diff --git a/.env b/.env new file mode 100644 index 0000000..57119ac --- /dev/null +++ b/.env @@ -0,0 +1,16 @@ +##### GENERAL ENVS ##### +GPU_SUPPORTED=True +GPU_ENABLED=True + +##### AUTH ENVS ##### +AUTH_ENABLED=False +REQUIRED_AUTH_ROLES=video-synthesis + +##### BACKEND ENVS ##### +FFMPEG_DOCKER=True # Python will use a different version of ffmpeg, therefore we need to redirect to /usr/bin/ffmpeg +BACKEND_URL=http://localhost:5000 + +#### AVATAR SETTINGS #### +AVAILABLE_AVATARS="" # Comma separated list of all Avatar names +AVATAR_MODELS=neuralVoice,motionGan # All options: "neuralVoice,motionGan" + diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..a1842ad --- /dev/null +++ b/.gitattributes @@ -0,0 +1,13 @@ +*.jpg filter=lfs diff=lfs merge=lfs -text +*.pkl filter=lfs diff=lfs merge=lfs -text +*.pth filter=lfs diff=lfs merge=lfs -text +*.tar filter=lfs diff=lfs merge=lfs -text +*.zip filter=lfs diff=lfs merge=lfs -text +*.mp4 filter=lfs diff=lfs merge=lfs -text +*.pb filter=lfs diff=lfs merge=lfs -text +*.npy filter=lfs diff=lfs merge=lfs -text +*.mat filter=lfs diff=lfs merge=lfs -text +*.bin filter=lfs diff=lfs merge=lfs -text +*.h5 filter=lfs diff=lfs merge=lfs -text +*.npz filter=lfs diff=lfs merge=lfs -text + diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..a52f1ed --- /dev/null +++ b/.gitignore @@ -0,0 +1,34 @@ +## Ignore Visual Studio temporary files, build results, and +## files generated by popular Visual Studio add-ons. +## +## Get latest from https://github.com/github/gitignore/blob/master/VisualStudio.gitignore + +.idea + +# Logging +error.log + +# Visual Studio 2015/2017 cache/options directory +.vscode/ + +#Pycharm directory +avatar_backend_api/.idea/ + +# log folder +sbatch_log/ +.out + +# Python Tools for Visual Studio (PTVS) +__pycache__/ +*.pyc + +# train cluster +*.job +# *.sh + +# all mac os files ._ +._* +*.DS_Store + +# Data dir +data/** diff --git a/AUTHORS.txt b/AUTHORS.txt new file mode 100644 index 0000000..df870c1 --- /dev/null +++ b/AUTHORS.txt @@ -0,0 +1,7 @@ +Contributions: + +* Alberto Pennino : Research and backend developement. +* Thomas Steinmann : Frontend and API developement. +* Marc Willhaus : Frontend developement. +* Clara Labrador-Fernandez : Supervision. + diff --git a/Dockerfile-motion-gan b/Dockerfile-motion-gan new file mode 100644 index 0000000..a472c83 --- /dev/null +++ b/Dockerfile-motion-gan @@ -0,0 +1,89 @@ +FROM nvidia/cuda:11.4.1-base-ubuntu20.04 as base +ENV DEBIAN_FRONTEND noninteractive + +ENV PATH /opt/conda/bin:$PATH + +# Update & install packages +RUN apt-get -qq update --fix-missing && \ + apt-get -qq install -y bzip2 ca-certificates wget curl git build-essential && \ + apt-get -qq clean && \ + rm -rf /var/lib/apt/lists/* + +SHELL ["/bin/bash", "-c"] +WORKDIR /app + +# Install miniconda +RUN wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-py38_4.12.0-Linux-x86_64.sh -O ~/miniconda.sh && \ + /bin/bash ~/miniconda.sh -b -p /opt/conda && \ + rm ~/miniconda.sh && \ + /opt/conda/bin/conda clean --quiet -tipsy && \ + ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh && \ + echo ". /opt/conda/etc/profile.d/conda.sh" >> ~/.bashrc && \ + echo "conda activate base" >> ~/.bashrc + +# Set python env vars +ENV PYTHONDONTWRITEBYTECODE 1 +ENV PYTHONUNBUFFERED 1 +ENV PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python +ENV FORCE_CUDA 1 +ENV PYTORCH_CUDA_ALLOC_CONF max_split_size_mb:512 + +RUN apt-get -qq update && \ + apt-get -qq install -y python3-opencv libsm6 libxext6 && \ + apt-get -qq clean && \ + rm -rf /var/lib/apt/lists/* + +FROM base as backend +WORKDIR /app + +### deepspeech ### +RUN conda create -n deepspeech python=3.7 +# Make run command use the new environment +SHELL ["conda", "run", "-n", "deepspeech", "/bin/bash", "-c"] + +RUN conda install pysoundfile -c conda-forge && \ + pip install torch==1.9.1+cu111 torchvision==0.10.1+cu111 torchaudio==0.9.1 -f https://download.pytorch.org/whl/torch_stable.html && \ + conda install x264=='1!152.20180717' ffmpeg=4.0.2 -c conda-forge + +COPY requirements_deepspeech.txt . +RUN pip install -r requirements_deepspeech.txt + +############################################## + +### pyenv ### +RUN conda create -n pyenv python=3.9 + +# Make run command use the new environment +SHELL ["conda", "run", "-n", "pyenv", "/bin/bash", "-c"] + +RUN conda install pysoundfile -c conda-forge && \ + conda install pytorch=1.13.0 torchvision torchaudio pytorch-cuda=11.6 -c pytorch -c nvidia && \ + conda install -c fvcore -c iopath -c conda-forge fvcore iopath && \ + conda install x264=='1!152.20180717' ffmpeg=4.0.2 -c conda-forge && \ + conda install pytorch3d -c pytorch3d + +COPY requirements.txt . +RUN pip install -r requirements.txt + +# Make run command use the new environment +SHELL ["conda", "run", "-n", "base", "/bin/bash", "-c"] + +COPY requirements.api.txt . +RUN pip install -r requirements.api.txt + +COPY avatar-api ./avatar-api +RUN pip install ./avatar-api --use-feature=in-tree-build && \ + rm -rf avatar-api + +# continue +COPY motion-gan-pipeline/GFPGAN ./GFPGAN +COPY motion-gan-pipeline/preprocessing ./preprocessing +COPY motion-gan-pipeline/motion-generation ./motion-generation +COPY motion-gan-pipeline/ImageToImage ./ImageToImage + +COPY motion-gan-pipeline/gunicorn.conf.py . +COPY motion-gan-pipeline/motion_gan_backend_api ./motion_gan_backend_api +COPY motion-gan-pipeline/full_pipeline* ./ +COPY motion-gan-pipeline/*.py ./ + +CMD [ "conda", "run", "--no-capture-output", "-n", "base", "gunicorn", "-c", "gunicorn.conf.py", "--chdir", "./motion_gan_backend_api", "-k", "uvicorn.workers.UvicornWorker", "app:app" ] diff --git a/Dockerfile-neural-voice b/Dockerfile-neural-voice new file mode 100644 index 0000000..e327d33 --- /dev/null +++ b/Dockerfile-neural-voice @@ -0,0 +1,83 @@ +FROM nvidia/cuda:11.4.1-base-ubuntu20.04 as base +ENV DEBIAN_FRONTEND noninteractive + +ENV PATH /opt/conda/bin:$PATH + +# Update & install packages +RUN apt-get -qq update --fix-missing && \ + apt-get -qq install -y bzip2 ca-certificates wget curl git build-essential && \ + apt-get -qq clean && \ + rm -rf /var/lib/apt/lists/* + +SHELL ["/bin/bash", "-c"] +WORKDIR /app + +# Install miniconda +RUN wget --quiet https://repo.anaconda.com/miniconda/Miniconda3-py38_4.12.0-Linux-x86_64.sh -O ~/miniconda.sh && \ + /bin/bash ~/miniconda.sh -b -p /opt/conda && \ + rm ~/miniconda.sh && \ + /opt/conda/bin/conda clean --quiet -tipsy && \ + ln -s /opt/conda/etc/profile.d/conda.sh /etc/profile.d/conda.sh && \ + echo ". /opt/conda/etc/profile.d/conda.sh" >> ~/.bashrc && \ + echo "conda activate base" >> ~/.bashrc + +# Set python env vars +ENV PYTHONDONTWRITEBYTECODE 1 +ENV PYTHONUNBUFFERED 1 +ENV PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python +ENV FORCE_CUDA 1 +ENV PYTORCH_CUDA_ALLOC_CONF max_split_size_mb:512 + +RUN apt-get -qq update && \ + apt-get -qq install -y python3-opencv libsm6 libxext6 && \ + apt-get -qq clean && \ + rm -rf /var/lib/apt/lists/* + +FROM base as backend +WORKDIR /app + +### deepspeech ### +RUN conda create -n deepspeech python=3.7 +# Make run command use the new environment +SHELL ["conda", "run", "-n", "deepspeech", "/bin/bash", "-c"] + +RUN conda install pysoundfile -c conda-forge && \ + pip install torch==1.9.1+cu111 torchvision==0.10.1+cu111 torchaudio==0.9.1 -f https://download.pytorch.org/whl/torch_stable.html && \ + conda install x264=='1!152.20180717' ffmpeg=4.0.2 -c conda-forge + +COPY requirements_deepspeech.txt . +RUN pip install -r requirements_deepspeech.txt + +############################################## + +### pyenv ### +RUN conda create -n pyenv python=3.9 + +# Make run command use the new environment +SHELL ["conda", "run", "-n", "pyenv", "/bin/bash", "-c"] + +RUN conda install pysoundfile -c conda-forge && \ + conda install pytorch=1.13.0 torchvision torchaudio pytorch-cuda=11.6 -c pytorch -c nvidia && \ + conda install -c fvcore -c iopath -c conda-forge fvcore iopath && \ + conda install x264=='1!152.20180717' ffmpeg=4.0.2 -c conda-forge && \ + conda install pytorch3d -c pytorch3d + +COPY requirements.txt . +RUN pip install -r requirements.txt + +# Make run command use the new environment +SHELL ["conda", "run", "-n", "base", "/bin/bash", "-c"] + +COPY requirements.api.txt . +RUN pip install -r requirements.api.txt + +COPY avatar-api ./avatar-api +RUN pip install ./avatar-api --use-feature=in-tree-build && \ + rm -rf avatar-api + +COPY NeuralVoicePuppetry/neural-code ./neural-code + +COPY NeuralVoicePuppetry/neural_voice_backend_api ./neural_voice_backend_api + +COPY NeuralVoicePuppetry/gunicorn.conf.py . +CMD [ "conda", "run", "--no-capture-output", "-n", "base", "gunicorn", "-c", "gunicorn.conf.py", "--chdir", "./neural_voice_backend_api", "-k", "uvicorn.workers.UvicornWorker", "app:app" ] diff --git a/NeuralVoicePuppetry/.dockerignore b/NeuralVoicePuppetry/.dockerignore new file mode 100644 index 0000000..0a71680 --- /dev/null +++ b/NeuralVoicePuppetry/.dockerignore @@ -0,0 +1,5 @@ +.idea + +**__pycache__** + +**.git diff --git a/NeuralVoicePuppetry/.env b/NeuralVoicePuppetry/.env new file mode 100644 index 0000000..b17fd6d --- /dev/null +++ b/NeuralVoicePuppetry/.env @@ -0,0 +1,13 @@ +##### GENERAL ENVS ##### +GPU_SUPPORTED=True +GPU_ENABLED=True + +##### AUTH ENVS ##### +AUTH_ENABLED=False +REQUIRED_AUTH_ROLES=video-synthesis + +##### BACKEND ENVS ##### +FFMPEG_DOCKER=True # Python will use a different version of ffmpeg, therefore we need to redirect to /usr/bin/ffmpeg + +##### REACT ENVS ##### +REACT_APP_ENVIRONMENT=prod diff --git a/NeuralVoicePuppetry/AUTHORS.txt b/NeuralVoicePuppetry/AUTHORS.txt new file mode 100644 index 0000000..df870c1 --- /dev/null +++ b/NeuralVoicePuppetry/AUTHORS.txt @@ -0,0 +1,7 @@ +Contributions: + +* Alberto Pennino : Research and backend developement. +* Thomas Steinmann : Frontend and API developement. +* Marc Willhaus : Frontend developement. +* Clara Labrador-Fernandez : Supervision. + diff --git a/NeuralVoicePuppetry/LICENSE b/NeuralVoicePuppetry/LICENSE new file mode 100644 index 0000000..46aadda --- /dev/null +++ b/NeuralVoicePuppetry/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022 ETH Zurich + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/NeuralVoicePuppetry/README.md b/NeuralVoicePuppetry/README.md new file mode 100644 index 0000000..545186c --- /dev/null +++ b/NeuralVoicePuppetry/README.md @@ -0,0 +1,199 @@ +# NeuralVoicePuppetry + +This repository contains the end-to-end "Neural Voice Puppetry" for the Audio-driven Video Synthesis project. + +You can find additional information on the method in the following [paper](https://arxiv.org/abs/1912.05566). + +![NeuralVoicePuppetry](media/pipeline.png "NeuralVoicePuppetry") + +## Requirements +Create a new environment: + +```bash +# Deepspeech environment +conda create -n deepspeech python=3.7 +conda activate deepspeech +conda install pysoundfile -c conda-forge +pip install torch==1.9.1+cu111 torchvision==0.10.1+cu111 torchaudio==0.9.1 -f https://download.pytorch.org/whl/torch_stable.html +conda install x264=='1!152.20180717' ffmpeg=4.0.2 -c conda-forge +pip install -r requirements_deepspeech.txt +conda deactivate + +# pyenv environment +conda create -n pyenv python=3.9 +conda activate pyenv +conda install pysoundfile -c conda-forge +conda install pytorch=1.13.0 torchvision torchaudio pytorch-cuda=11.6 -c pytorch -c nvidia +conda install -c fvcore -c iopath -c conda-forge fvcore iopath +conda install x264=='1!152.20180717' ffmpeg=4.0.2 -c conda-forge +conda install pytorch3d -c pytorch3d +pip install -r requirements.txt +``` + +## Data +If you plan on using this code with the already available and pre-trained moderators, you will only have to provide the audio data. + +Please follow these instructions on data quality: +- Audio + - Provide a recording of a person speaking (audio of any duration is accepted). + - The cleaner the audio signal the better: audio with background noise will result in unmatching lip-sync. + - Avoid recording multiple people talking: the model is unable to distinguish between multiple voice signals. +- Video + - Provide a video of your desired avatar character talking. + - Minimum video duration: 3 minutes. + - Longer videos will results in longer training time. + - The background is irrelevant. + +## Data structure + +Place your input data using the following structure: + +``` +input_data +│ +└───video +│ └─── NAME_VIDEO_FILE +│ │ └─ NAME_VIDEO_FILE.mp4 +│ |... +│ +└───audio +│ └─── NAME_AUDIO_FILE +│ | └─ NAME_AUDIO_FILE.mp3/wav/mp4 +│ |... +│ +``` + +Video files can only be mp4. Moreover for the time being input videos must have 25 fps. + +Input audio files can have the following formats: mp4, mp3 or wav. + +## Usage +The same command can be used for both training and inference. If the trained checkpoints for the requested avatar are not found, the code will launch the training procedure. + +Use the following call to launch the code: +```bash +bash full_pipeline_nvp.sh $AUDIO_NAME $VIDEO_NAME $DATAROOT +``` +Where: +- $AUDIO_NAME: name of the audio file (without extension). +- $VIDEO_NAME: name of the video file (without extension). This will also be the name of the Avatar. +- $DATAROOT: path to the data folder. + +## Train new Avatars +In order to train a new avatar follow this easy steps: + +1. Record a new video following the instructions above. \ + Eg: VIDEONAME = my_new_avatar +2. Move your file to your data folder inside the video subfolder. + +3. Choose any audio file. \ + Eg: AUDIONAME = my_audio_sample + +4. Start training (and inference) with the following command: + ```bash + bash full_pipeline_nvp.sh my_audio_sample my_new_avatar $DATAROOT + ``` + +5. Wait until it's finished (if training, the process will be long). +6. Enjoy your new Avatar! \ + You can now use the same avatar with any other audio file. + +## Run using pretrained model +In order to run the model on a pretrained (video) identity, you need the following data: +- `./NeuralRenderingNetwork/checkpoints/DynamicNeuralTextures-NAME_VIDEO_FILE` - this should contain the pretrained model +- `./output_data/features/NAME_VIDEO_FILE` - this should contain the DECA codedics, the original video frames, an h5 file and the tform.npy + +## Computational time +Estimated computational time : +* Preprocessing: approx 30 minutes for a 5 minutes video. Varies depending on the frame size. +* Audio2Expression Training: approx 5 minutes for a 5 minutes video. +* Audio2Expression Inference: approx 30 seconds for a 5 minutes video. +* Neural Rendering Training: apporx 8 hours for a 5 minutes video. Change number of epochs for longer videos to keep training time constant. +* Neural Rendering Inference: approx 15 minutes for a 5 minutes video. +* Postprocessing: approx 20 minutes for a 5 minutes video. + +## LICENSE + +This implementation NVP Pipeline is free and open source! All code in this repository is licensed under: + +* [MIT](LICENSE) License. + +This pipeline relies and is inspired by the following works, and therefore refers to their individual licenses: + +- [Neural Voice Puppetry](https://web.archive.org/web/20201113014501/https://github.com/JustusThies/NeuralVoicePuppetry): [license](https://gitlab.ethz.ch/mtc/video-synthesis/.NeuralVoicePuppetry/-/blob/0fc75ba2edfdfd5655984f1515bfe06de880d91d/LICENSE), internal path `/neural-code/Audio2ExpressionNet`, `/neural-code/NeuralRenderingNetwork`. +- [DECA](https://github.com/YadiraF/DECA): [license](https://github.com/YadiraF/DECA/blob/master/LICENSE), internal path `/neural-code/third/DECA`. +- [VOCA](https://github.com/TimoBolkart/voca): [license](https://voca.is.tue.mpg.de/license), used internally by DECA. +- [DeepSpeech](https://github.com/mozilla/DeepSpeech): [license](https://github.com/mozilla/DeepSpeech/blob/master/LICENSE), used by VOCA. +- [CycleGAN and Pix2Pix](https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix): [license](https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/blob/master/LICENSE), used as skeleton for GAN training. + + +## Contact information + +Alberto Pennino: [alberto.pennino@inf.ethz.ch](alberto.pennino@inf.ethz.ch) + + +## Reproduce env from scratch + +1. Install [gcc](https://linuxize.com/post/how-to-install-gcc-compiler-on-ubuntu-18-04/) + +2. Disable [Nouveau kernel driver](https://linuxconfig.org/how-to-disable-nouveau-nvidia-driver-on-ubuntu-18-04-bionic-beaver-linux/) - always reboot + +3. Install [CUDA 11.1](https://developer.nvidia.com/cuda-11.1.0-download-archive?target_os=Linux&target_arch=x86_64&target_distro=Ubuntu&target_version=1804&target_type=runfilelocal) + +Run the following: + +``` +$ wget https://developer.download.nvidia.com/compute/cuda/11.1.0/local_installers/cuda_11.1.0_455.23.05_linux.run +$ sudo sh cuda_11.1.0_455.23.05_linux.run +$ export PATH="/usr/local/cuda-11.1/bin:$PATH" export +$ LD_LIBRARY_PATH="/usr/local/cuda-11.1/lib64:$LD_LIBRARY_PATH" +``` + +4. Clone repo: git clone https://gitlab.ethz.ch/mtc/NeuralVoicePuppetry.git + +5. Install [Miniconda](https://docs.conda.io/projects/conda/en/latest/user-guide/install/linux.html) + +6. Reproduce env + +``` +conda create -n NAME python=3.8 +conda activate NAME +conda install numpy +conda install -c anaconda scipy +conda install -c conda-forge opencv +conda install -c conda-forge tqdm +conda install -c conda-forge resampy +conda install scikit-image +conda install h5py +pip install --upgrade tensorflow +pip install python_speech_features +conda install -c conda-forge moviepy +conda install -c conda-forge pydub +conda install pytorch torchvision torchaudio cudatoolkit=10.2 -c pytorch +conda install -c conda-forge librosa +conda install -c fvcore -c iopath -c conda-forge fvcore iopath +conda install -c bottler nvidiacub +pip install "git+https://github.com/facebookresearch/pytorch3d.git" +pip install face-alignment +conda install -c conda-forge dominate +pip install chumpy +conda install -c conda-forge progressbar2 +pip install flask +chmod a+x full_pipeline_nvp.sh +pip install gunicorn +``` + + + + + diff --git a/NeuralVoicePuppetry/gunicorn.conf.py b/NeuralVoicePuppetry/gunicorn.conf.py new file mode 100644 index 0000000..9bc8d9c --- /dev/null +++ b/NeuralVoicePuppetry/gunicorn.conf.py @@ -0,0 +1,19 @@ +# https://github.com/benoitc/gunicorn/blob/master/examples/example_config.py + +# Bind & deployment + +bind = '0.0.0.0:5000' +reload = False + +# Connections +# The dashboard backend should be capable of supporting multiple workers, +# however initialization is currently an issue when running in multiple threads. +workers = 1 # if DashboardConfig.debug else 4 +threads = 4 +backlog = 64 + +timeout = 300 +TIMEOUT = 300 + +# Logging +loglevel = 'info' diff --git a/NeuralVoicePuppetry/media/audio2expression_net.jpg b/NeuralVoicePuppetry/media/audio2expression_net.jpg new file mode 100644 index 0000000..f5298b3 --- /dev/null +++ b/NeuralVoicePuppetry/media/audio2expression_net.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:27b5086adab995c64b451d19623a436a9c0b5724e98d49751aea256eb85e4aff +size 236375 diff --git a/NeuralVoicePuppetry/media/pipeline.png b/NeuralVoicePuppetry/media/pipeline.png new file mode 100644 index 0000000..7328917 Binary files /dev/null and b/NeuralVoicePuppetry/media/pipeline.png differ diff --git a/NeuralVoicePuppetry/media/renderer_net.jpg b/NeuralVoicePuppetry/media/renderer_net.jpg new file mode 100644 index 0000000..bfd0840 --- /dev/null +++ b/NeuralVoicePuppetry/media/renderer_net.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c08150f27c2fcfb0a063b848761e1ad6f47d75efe7c9f4fa465d49357a94c108 +size 335239 diff --git a/NeuralVoicePuppetry/media/rendering_pipeline.jpg b/NeuralVoicePuppetry/media/rendering_pipeline.jpg new file mode 100644 index 0000000..3eb7498 --- /dev/null +++ b/NeuralVoicePuppetry/media/rendering_pipeline.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4ff3699d933a57739e820e63847e45ffa9e71ab65622919092e5d648ee443153 +size 332511 diff --git a/NeuralVoicePuppetry/media/teaser.jpg b/NeuralVoicePuppetry/media/teaser.jpg new file mode 100644 index 0000000..1d2108b --- /dev/null +++ b/NeuralVoicePuppetry/media/teaser.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f085e8851a012b69e3aba2d69196d2e69a55b1daec3f5215ab6f8a4794a4b4db +size 543929 diff --git a/NeuralVoicePuppetry/neural-code/.gitignore b/NeuralVoicePuppetry/neural-code/.gitignore new file mode 100644 index 0000000..2f0d06f --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/.gitignore @@ -0,0 +1 @@ +output_data \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/BaselModel/__init__.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/BaselModel/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/BaselModel/basel_model.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/BaselModel/basel_model.py new file mode 100644 index 0000000..26a50c0 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/BaselModel/basel_model.py @@ -0,0 +1,94 @@ +import os +import torch +import torch.nn as nn +import torchvision.transforms as transforms +import numpy as np + +######################### +N_EXPRESSIONS=76 # <<<<<< NEEDS TO BE SPECIFIED ACCORDING TO THE USED FACE MODEL +# N_EXPRESSIONS=299 +######################### + +#import soft_renderer as sr +# +#class MorphableModel(nn.Module): +# def __init__(self, filename_average=''): +# super(MorphableModel, self).__init__() +# +# print('Load Morphable Model (Basel)') +# +# #filename_mesh = os.path.join(opt.dataroot, opt.phase + '/average_model.obj') +# filename_mesh = filename_average +# if filename_average=='': +# print('use default identity') +# filename_mesh = './BaselModel/average.obj' +# mesh = sr.Mesh.from_obj(filename_mesh, normalization=False, load_texture=True) +# self.average_vertices = mesh.vertices[0] +# self.faces = mesh.faces[0] +# self.average_vertices = self.average_vertices[None, :, :] # [num_vertices, XYZ] -> [batch_size=1, num_vertices, XYZ] +# self.faces = self.faces[None, :, :] # [num_faces, 3] -> [batch_size=1, num_faces, 3] +# self.textures = mesh.textures +# +# self.num_vertices = self.average_vertices.shape[1] +# self.num_faces = self.faces.shape[1] +# print('vertices:', self.average_vertices.shape) +# print('faces:', self.faces.shape) +# +# ## basis function +# self.expression_basis = np.memmap('./BaselModel/ExpressionBasis.matrix', dtype='float32', mode='r').__array__()[1:] # first entry is the size +# self.expression_basis = np.resize(self.expression_basis, (N_EXPRESSIONS, self.num_vertices, 4))[:,:,0:3] +# self.expression_basis = torch.tensor(self.expression_basis.astype(np.float32)).cuda() # N_EXPRESSIONS x num_vertices x 3 +# self.expression_basis = torch.transpose(self.expression_basis,0,2) # transpose for matmul +# print('expression_basis', self.expression_basis.shape) +# +# # default expression +# zeroExpr = torch.zeros(1, N_EXPRESSIONS, dtype=torch.float32).cuda() +# self.morph(zeroExpr) +# +# +# def save_model_to_obj_file(self, filename, mask=None): +# faces_cpu = self.faces.detach().cpu().numpy() +# vertices_cpu = self.vertices.detach().cpu().numpy() +# +# mask_cpu = None +# if not type(mask) == type(None): +# mask_cpu = mask.detach().cpu().numpy() +# +# f = open(filename, 'w') +# if type(mask) == type(None): +# for i in range(0, self.num_vertices): +# f.write('v ' + str(vertices_cpu[0, i, 0]) + ' ' + str(vertices_cpu[0, i, 1]) + ' ' + str(vertices_cpu[0, i, 2]) + '\n') +# else: +# for i in range(0, self.num_vertices): +# f.write('v ' + str(vertices_cpu[0, i, 0]) + ' ' + str(vertices_cpu[0, i, 1]) + ' ' + str(vertices_cpu[0, i, 2]) + ' ' + str(mask_cpu[i]) + ' ' + str(mask_cpu[i]) + ' ' + str(mask_cpu[i]) + ' 1'+ '\n') +# +# for i in range(0, self.num_faces): +# f.write('f ' + str(faces_cpu[0, i, 0]+1) + '// ' + str(faces_cpu[0, i, 1]+1) + '// ' + str(faces_cpu[0, i, 2]+1) + '//\n') +# +# f.close() +# +# def compute_expression_delta(self, expressions): +# return torch.transpose(torch.matmul(self.expression_basis, torch.transpose(expressions, 0,1)), 0, 2) # note that matmul wants to have this order: (a x b x c) x (c x m) => (a x b x m) +# +# def morph(self, expressions): +# self.vertices = self.average_vertices + self.compute_expression_delta(expressions) +# return self.vertices +# +# +# +# def LoadMask(self, filename=''): +# if filename=='': +# print('use default mask') +# filename = './BaselModel/mask/defaultMask_mouth.obj' +# +# mask = np.zeros(self.num_vertices) +# file = open(filename, 'r') +# i=0 +# for line in file: +# if line[0] == 'v': +# floats = [float(x) for x in line[1:].split()] +# if floats[3] == 1.0 and floats[4] == 0.0 and floats[5] == 0.0: +# mask[i] = 1.0 +# i += 1 +# file.close() +# return torch.tensor(mask.astype(np.float32)).cuda() \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/ReadMe.txt b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/ReadMe.txt new file mode 100644 index 0000000..9f5dd53 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/ReadMe.txt @@ -0,0 +1,21 @@ +This code runs the inference given a source audio and a target video that has been tracked. +The process is started using "transfer.sh" (where you can also specify the target sequences -> TARGET_ACTOR_LIST). +In the "transfer.py" you can specify the source sequences (search for "source_actors"). +Make sure that you specify the dimensions of your blendshape model in "BaselModel/basel_model.py" (-> N_EXPRESSIONS). + +Note that you have to extract the deepspeech features in a preprocessing step. +In the datasets folder you will find an example how the data should look like. +The deepspeech features are provided as npy files, while for the target sequence you also have to provide + the expressions (visually tracked blendshape coefficients). +If you have a different data format you need to adapt the data loader (data/face_dataset.py). + +Once you have the prepared data you can run the script. +It will optimize for the mapping from the audio-expression space to your blendshape model space. +The mapping is stored in the "mappings" folder (note: that it caches the mappings there and reuses it for the next run. + If you change something, you need to delete this cache). +The final output is stored in the "datasets/TRANSFERS" folder as a list of estimated expressions using the + source audio features. + +Given these expressions you need to generate new uv maps for the target video using the rigid pose + of the target video (only replacing the expressions). +These can then be used in the deferred neural rendering framework. \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/__init__.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/audio2expr_with_map.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/audio2expr_with_map.py new file mode 100644 index 0000000..fd323c2 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/audio2expr_with_map.py @@ -0,0 +1,107 @@ +import os +import os.path +from options.transfer_options import Audio2ExprOptions +from data import CreateDataLoader +from data.face_dataset import FaceDataset +from data.audio_dataset import AudioDataset +from models import create_model +from util.visualizer import save_images +from util import html +import torch +import torch.nn as nn +import torchvision.transforms as transforms +import numpy as np +from PIL import Image +import time +import random +import progressbar +import copy +from shutil import copyfile + +from BaselModel.basel_model import * + + +def load_model(opt): + opt.output_audio_expressions = True + opt.nTrainObjects = 116 + + print('#train objects = %d' % opt.nTrainObjects) + + print('>>> create model <<<') + model = create_model(opt) + print('>>> setup model <<<') + model.setup(opt) + + return model + +def load_source_sequence(opt): + opt_source = copy.copy(opt) # create a clone + opt_source.dataroot = opt.source_actor # overwrite root directory + print(opt_source.dataroot) + opt_source.dataset_mode = 'audio' + opt_source.phase = 'train' + + dataset_source = AudioDataset() + + dataset_source.initialize(opt_source) + + dataloader = torch.utils.data.DataLoader( + dataset_source, + batch_size=opt.batch_size, + shuffle=not opt.serial_batches, + num_workers=int(opt.num_threads)) + + return dataset_source, dataloader + + +if __name__ == '__main__': + # read options + opt = Audio2ExprOptions().parse() + + # load model + model = load_model(opt) + print('model version:', opt.name) + + if opt.use_mapping: + # read mapping + mapping_fn = opt.mapping_path + + # load mapping from file + map_cpu = np.load(mapping_fn + '.npy') + mapping = torch.tensor(map_cpu.astype(np.float32)).cuda() + print('loaded mapping from file', mapping.shape) + + # make outdir + source_name = opt.source_actor.split("/")[-1] + out_dir = opt.out_dir + source_name + os.makedirs(out_dir, exist_ok=True) + audio2expr_dir = os.path.join(out_dir, 'audio2expr') + os.makedirs(audio2expr_dir, exist_ok=True) + if opt.use_mapping: + expr_dir = os.path.join(out_dir, 'expr') + os.makedirs(expr_dir, exist_ok=True) + + # read source sequence + dataset_source, data_loader_source = load_source_sequence(opt) + dataset_source_size = len(dataset_source) + print('#source_actor frames = %d' % dataset_source_size) + + expression_multiplier = 1.0 # default + + # run over data + with progressbar.ProgressBar(max_value=len(dataset_source)) as bar: + for i, data in enumerate(data_loader_source): + bar.update(i) + model.set_input(data) + model.test() + audio_expression = model.fake_expressions.data[0, :, 0] + + if opt.use_mapping: + expression = expression_multiplier * 10.0 * torch.matmul(mapping, audio_expression) + expression = expression[None, :] + np.save(os.path.join(expr_dir, f'expr_{i}.npy'), expression.cpu().numpy()) + + audio_expression = audio_expression[None, :] + np.save(os.path.join(audio2expr_dir, f'audioexpr_{i}.npy'), audio_expression.cpu().numpy()) + + exit() \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/10_netG.pth b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/10_netG.pth new file mode 100644 index 0000000..86f917f --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/10_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:450ac758c0b1c86078f6c22a3971d1ada8b4f7b8bec59e0facb34a1d34b36623 +size 1314132 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/15_netG.pth b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/15_netG.pth new file mode 100644 index 0000000..867717f --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/15_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e64c2e037305352fd15233ca3b7db7ed5dd205f2c70e14ccff24db0e644858d5 +size 1314132 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/20_netG.pth b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/20_netG.pth new file mode 100644 index 0000000..4759659 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/20_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:40ebd6414acdede17d4b329af1e784876da3940b62a63c4560c047351fd04b25 +size 1314132 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/25_netG.pth b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/25_netG.pth new file mode 100644 index 0000000..0cee87c --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/25_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d65789804c76602e57d25c200a1b4f0758ac4cc2f85c190a989fda9fda7fb30 +size 1314132 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/30_netG.pth b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/30_netG.pth new file mode 100644 index 0000000..c1cf871 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/30_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:31d87402f3adc96d14b20645935be68f0f38fd1acc0797f035114c098001bd15 +size 1314132 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/35_netG.pth b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/35_netG.pth new file mode 100644 index 0000000..f8eafcc --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/35_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1a59642e0450ed198d4107a4a31b688bdfa03bcd586f4b3ad8b0dfc78c889521 +size 1314132 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/40_netG.pth b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/40_netG.pth new file mode 100644 index 0000000..5ae6c21 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/40_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6040b71bf8abea82dc58fc1b36a3a8af9fa4a88f6bfb5676a90cf0987612376a +size 1314132 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/45_netG.pth b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/45_netG.pth new file mode 100644 index 0000000..0d98d7c --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/45_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ba76a02cb27239dab8c2a1e231d0c6728c5283b17c2ab4c811be13a20a8115bc +size 1314132 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/50_netG.pth b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/50_netG.pth new file mode 100644 index 0000000..d2866a5 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/50_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c143f554a882250cc74b80649c5a8d4499b4962d71bbf124dbcac8b888d21914 +size 1314132 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/5_netG.pth b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/5_netG.pth new file mode 100644 index 0000000..5cf58e6 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/5_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58b3419dd86e8643bcee26409d5329d58e6dd08ce596e43bb85c18885d421b3e +size 1314132 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/latest_netG.pth b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/latest_netG.pth new file mode 100644 index 0000000..159e954 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/latest_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:64b408c2a7d19300c7d1c5a828a0780e1cb1748d16105f6f3338ce9f01d411e2 +size 1314132 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/loss_log.txt b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/loss_log.txt new file mode 100644 index 0000000..7431bb2 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/loss_log.txt @@ -0,0 +1,7569 @@ +================ Training Loss (Tue Nov 5 11:55:26 2019) ================ +(epoch: 1, iters: 2000, time: 0.527, data: 0.543) G_L1: 20.533 G_L1_ABSOLUTE: 6.121 G_L1_RELATIVE: 14.412 G_Regularizer: 0.000 validation_error: 26.195 +(epoch: 1, iters: 4000, time: 0.519, data: 0.000) G_L1: 22.297 G_L1_ABSOLUTE: 7.213 G_L1_RELATIVE: 15.085 G_Regularizer: 0.000 validation_error: 26.295 +(epoch: 1, iters: 6000, time: 0.523, data: 0.000) G_L1: 26.460 G_L1_ABSOLUTE: 7.193 G_L1_RELATIVE: 19.267 G_Regularizer: 0.000 validation_error: 26.111 +(epoch: 1, iters: 8000, time: 0.525, data: 0.000) G_L1: 25.699 G_L1_ABSOLUTE: 6.721 G_L1_RELATIVE: 18.978 G_Regularizer: 0.000 validation_error: 25.961 +(epoch: 1, iters: 10000, time: 0.522, data: 0.000) G_L1: 25.790 G_L1_ABSOLUTE: 6.689 G_L1_RELATIVE: 19.102 G_Regularizer: 0.000 validation_error: 26.259 +(epoch: 1, iters: 12000, time: 0.524, data: 0.000) G_L1: 21.361 G_L1_ABSOLUTE: 6.775 G_L1_RELATIVE: 14.586 G_Regularizer: 0.000 validation_error: 25.803 +(epoch: 1, iters: 14000, time: 0.524, data: 0.000) G_L1: 26.366 G_L1_ABSOLUTE: 6.800 G_L1_RELATIVE: 19.566 G_Regularizer: 0.000 validation_error: 26.035 +(epoch: 1, iters: 16000, time: 0.521, data: 0.000) G_L1: 22.563 G_L1_ABSOLUTE: 6.146 G_L1_RELATIVE: 16.416 G_Regularizer: 0.000 validation_error: 25.958 +(epoch: 1, iters: 18000, time: 0.521, data: 0.000) G_L1: 23.015 G_L1_ABSOLUTE: 6.598 G_L1_RELATIVE: 16.417 G_Regularizer: 0.000 validation_error: 26.145 +(epoch: 1, iters: 20000, time: 0.521, data: 0.000) G_L1: 27.604 G_L1_ABSOLUTE: 6.411 G_L1_RELATIVE: 21.193 G_Regularizer: 0.000 validation_error: 26.835 +(epoch: 1, iters: 22000, time: 0.524, data: 0.000) G_L1: 23.692 G_L1_ABSOLUTE: 6.645 G_L1_RELATIVE: 17.046 G_Regularizer: 0.000 validation_error: 26.400 +(epoch: 1, iters: 24000, time: 0.525, data: 0.000) G_L1: 31.754 G_L1_ABSOLUTE: 6.221 G_L1_RELATIVE: 25.533 G_Regularizer: 0.000 validation_error: 25.967 +(epoch: 1, iters: 26000, time: 0.534, data: 0.000) G_L1: 20.770 G_L1_ABSOLUTE: 6.981 G_L1_RELATIVE: 13.789 G_Regularizer: 0.000 validation_error: 26.519 +(epoch: 1, iters: 28000, time: 0.521, data: 0.000) G_L1: 21.386 G_L1_ABSOLUTE: 5.921 G_L1_RELATIVE: 15.465 G_Regularizer: 0.000 validation_error: 26.218 +(epoch: 1, iters: 30000, time: 0.527, data: 0.000) G_L1: 20.580 G_L1_ABSOLUTE: 5.905 G_L1_RELATIVE: 14.675 G_Regularizer: 0.000 validation_error: 26.542 +(epoch: 1, iters: 32000, time: 0.528, data: 0.000) G_L1: 22.306 G_L1_ABSOLUTE: 5.767 G_L1_RELATIVE: 16.538 G_Regularizer: 0.000 validation_error: 25.869 +(epoch: 1, iters: 34000, time: 0.530, data: 0.000) G_L1: 27.841 G_L1_ABSOLUTE: 6.845 G_L1_RELATIVE: 20.997 G_Regularizer: 0.000 validation_error: 25.692 +(epoch: 1, iters: 36000, time: 0.528, data: 0.000) G_L1: 24.150 G_L1_ABSOLUTE: 6.119 G_L1_RELATIVE: 18.030 G_Regularizer: 0.000 validation_error: 25.631 +(epoch: 1, iters: 38000, time: 0.530, data: 0.000) G_L1: 28.152 G_L1_ABSOLUTE: 5.918 G_L1_RELATIVE: 22.234 G_Regularizer: 0.000 validation_error: 26.156 +(epoch: 1, iters: 40000, time: 0.525, data: 0.000) G_L1: 25.895 G_L1_ABSOLUTE: 5.195 G_L1_RELATIVE: 20.700 G_Regularizer: 0.000 validation_error: 25.479 +(epoch: 1, iters: 42000, time: 0.530, data: 0.000) G_L1: 23.754 G_L1_ABSOLUTE: 5.785 G_L1_RELATIVE: 17.969 G_Regularizer: 0.000 validation_error: 25.697 +(epoch: 1, iters: 44000, time: 0.524, data: 0.000) G_L1: 23.181 G_L1_ABSOLUTE: 5.283 G_L1_RELATIVE: 17.897 G_Regularizer: 0.000 validation_error: 25.307 +(epoch: 1, iters: 46000, time: 0.528, data: 0.000) G_L1: 23.469 G_L1_ABSOLUTE: 6.536 G_L1_RELATIVE: 16.933 G_Regularizer: 0.000 validation_error: 25.563 +(epoch: 1, iters: 48000, time: 0.532, data: 0.000) G_L1: 24.980 G_L1_ABSOLUTE: 6.330 G_L1_RELATIVE: 18.649 G_Regularizer: 0.000 validation_error: 25.355 +(epoch: 1, iters: 50000, time: 0.528, data: 0.000) G_L1: 22.681 G_L1_ABSOLUTE: 5.501 G_L1_RELATIVE: 17.180 G_Regularizer: 0.000 validation_error: 25.428 +(epoch: 1, iters: 52000, time: 0.528, data: 0.000) G_L1: 18.113 G_L1_ABSOLUTE: 5.679 G_L1_RELATIVE: 12.434 G_Regularizer: 0.000 validation_error: 25.698 +(epoch: 1, iters: 54000, time: 0.527, data: 0.000) G_L1: 19.100 G_L1_ABSOLUTE: 5.162 G_L1_RELATIVE: 13.939 G_Regularizer: 0.000 validation_error: 25.779 +(epoch: 1, iters: 56000, time: 0.522, data: 0.000) G_L1: 24.206 G_L1_ABSOLUTE: 5.879 G_L1_RELATIVE: 18.327 G_Regularizer: 0.000 validation_error: 26.205 +(epoch: 1, iters: 58000, time: 0.532, data: 0.000) G_L1: 27.497 G_L1_ABSOLUTE: 5.718 G_L1_RELATIVE: 21.779 G_Regularizer: 0.000 validation_error: 25.873 +(epoch: 1, iters: 60000, time: 0.528, data: 0.000) G_L1: 23.159 G_L1_ABSOLUTE: 6.134 G_L1_RELATIVE: 17.025 G_Regularizer: 0.000 validation_error: 26.379 +(epoch: 1, iters: 62000, time: 0.533, data: 0.000) G_L1: 19.434 G_L1_ABSOLUTE: 4.600 G_L1_RELATIVE: 14.834 G_Regularizer: 0.000 validation_error: 25.891 +(epoch: 1, iters: 64000, time: 0.525, data: 0.001) G_L1: 23.563 G_L1_ABSOLUTE: 5.571 G_L1_RELATIVE: 17.992 G_Regularizer: 0.000 validation_error: 26.009 +(epoch: 1, iters: 66000, time: 0.531, data: 0.000) G_L1: 20.647 G_L1_ABSOLUTE: 5.063 G_L1_RELATIVE: 15.585 G_Regularizer: 0.000 validation_error: 25.800 +(epoch: 1, iters: 68000, time: 0.532, data: 0.000) G_L1: 24.282 G_L1_ABSOLUTE: 5.969 G_L1_RELATIVE: 18.312 G_Regularizer: 0.000 validation_error: 25.929 +(epoch: 1, iters: 70000, time: 0.530, data: 0.000) G_L1: 23.425 G_L1_ABSOLUTE: 5.420 G_L1_RELATIVE: 18.005 G_Regularizer: 0.000 validation_error: 26.189 +(epoch: 1, iters: 72000, time: 0.523, data: 0.000) G_L1: 19.160 G_L1_ABSOLUTE: 5.065 G_L1_RELATIVE: 14.095 G_Regularizer: 0.000 validation_error: 25.471 +(epoch: 1, iters: 74000, time: 0.526, data: 0.000) G_L1: 21.785 G_L1_ABSOLUTE: 5.365 G_L1_RELATIVE: 16.420 G_Regularizer: 0.000 validation_error: 25.427 +(epoch: 1, iters: 76000, time: 0.531, data: 0.000) G_L1: 24.412 G_L1_ABSOLUTE: 4.703 G_L1_RELATIVE: 19.709 G_Regularizer: 0.000 validation_error: 25.933 +(epoch: 1, iters: 78000, time: 0.523, data: 0.000) G_L1: 20.388 G_L1_ABSOLUTE: 4.958 G_L1_RELATIVE: 15.430 G_Regularizer: 0.000 validation_error: 25.459 +(epoch: 1, iters: 80000, time: 0.527, data: 0.000) G_L1: 24.715 G_L1_ABSOLUTE: 4.889 G_L1_RELATIVE: 19.826 G_Regularizer: 0.000 validation_error: 25.478 +(epoch: 1, iters: 82000, time: 0.531, data: 0.000) G_L1: 17.904 G_L1_ABSOLUTE: 4.281 G_L1_RELATIVE: 13.622 G_Regularizer: 0.000 validation_error: 25.462 +(epoch: 1, iters: 84000, time: 0.525, data: 0.000) G_L1: 23.077 G_L1_ABSOLUTE: 4.637 G_L1_RELATIVE: 18.440 G_Regularizer: 0.000 validation_error: 25.672 +(epoch: 1, iters: 86000, time: 0.538, data: 0.000) G_L1: 25.415 G_L1_ABSOLUTE: 5.170 G_L1_RELATIVE: 20.246 G_Regularizer: 0.000 validation_error: 26.016 +(epoch: 1, iters: 88000, time: 0.525, data: 0.000) G_L1: 21.505 G_L1_ABSOLUTE: 4.944 G_L1_RELATIVE: 16.561 G_Regularizer: 0.000 validation_error: 25.608 +(epoch: 1, iters: 90000, time: 0.522, data: 0.000) G_L1: 22.340 G_L1_ABSOLUTE: 4.808 G_L1_RELATIVE: 17.533 G_Regularizer: 0.000 validation_error: 25.641 +(epoch: 1, iters: 92000, time: 0.528, data: 0.000) G_L1: 17.184 G_L1_ABSOLUTE: 5.004 G_L1_RELATIVE: 12.179 G_Regularizer: 0.000 validation_error: 25.229 +(epoch: 1, iters: 94000, time: 0.526, data: 0.000) G_L1: 22.259 G_L1_ABSOLUTE: 4.649 G_L1_RELATIVE: 17.610 G_Regularizer: 0.000 validation_error: 25.663 +(epoch: 1, iters: 96000, time: 0.531, data: 0.000) G_L1: 20.482 G_L1_ABSOLUTE: 4.829 G_L1_RELATIVE: 15.653 G_Regularizer: 0.000 validation_error: 25.542 +(epoch: 1, iters: 98000, time: 0.526, data: 0.000) G_L1: 20.044 G_L1_ABSOLUTE: 4.605 G_L1_RELATIVE: 15.439 G_Regularizer: 0.000 validation_error: 25.474 +(epoch: 1, iters: 100000, time: 0.524, data: 0.000) G_L1: 24.498 G_L1_ABSOLUTE: 4.666 G_L1_RELATIVE: 19.831 G_Regularizer: 0.000 validation_error: 25.764 +(epoch: 1, iters: 102000, time: 0.529, data: 0.000) G_L1: 28.585 G_L1_ABSOLUTE: 5.306 G_L1_RELATIVE: 23.278 G_Regularizer: 0.000 validation_error: 25.472 +(epoch: 1, iters: 104000, time: 0.525, data: 0.000) G_L1: 22.490 G_L1_ABSOLUTE: 4.104 G_L1_RELATIVE: 18.385 G_Regularizer: 0.000 validation_error: 25.019 +(epoch: 1, iters: 106000, time: 0.530, data: 0.000) G_L1: 23.004 G_L1_ABSOLUTE: 5.039 G_L1_RELATIVE: 17.965 G_Regularizer: 0.000 validation_error: 25.303 +(epoch: 1, iters: 108000, time: 0.530, data: 0.000) G_L1: 27.354 G_L1_ABSOLUTE: 4.710 G_L1_RELATIVE: 22.644 G_Regularizer: 0.000 validation_error: 25.447 +(epoch: 1, iters: 110000, time: 0.523, data: 0.000) G_L1: 21.328 G_L1_ABSOLUTE: 4.957 G_L1_RELATIVE: 16.371 G_Regularizer: 0.000 validation_error: 25.714 +(epoch: 1, iters: 112000, time: 0.525, data: 0.000) G_L1: 19.985 G_L1_ABSOLUTE: 4.154 G_L1_RELATIVE: 15.831 G_Regularizer: 0.000 validation_error: 25.122 +(epoch: 1, iters: 114000, time: 0.529, data: 0.000) G_L1: 22.220 G_L1_ABSOLUTE: 4.470 G_L1_RELATIVE: 17.750 G_Regularizer: 0.000 validation_error: 25.690 +(epoch: 1, iters: 116000, time: 0.527, data: 0.000) G_L1: 20.264 G_L1_ABSOLUTE: 3.744 G_L1_RELATIVE: 16.520 G_Regularizer: 0.000 validation_error: 25.616 +(epoch: 1, iters: 118000, time: 0.524, data: 0.000) G_L1: 23.164 G_L1_ABSOLUTE: 4.250 G_L1_RELATIVE: 18.915 G_Regularizer: 0.000 validation_error: 25.461 +(epoch: 1, iters: 120000, time: 0.535, data: 0.000) G_L1: 24.064 G_L1_ABSOLUTE: 4.503 G_L1_RELATIVE: 19.561 G_Regularizer: 0.000 validation_error: 25.242 +(epoch: 1, iters: 122000, time: 0.524, data: 0.000) G_L1: 20.198 G_L1_ABSOLUTE: 3.989 G_L1_RELATIVE: 16.209 G_Regularizer: 0.000 validation_error: 25.856 +(epoch: 1, iters: 124000, time: 0.535, data: 0.000) G_L1: 22.604 G_L1_ABSOLUTE: 4.396 G_L1_RELATIVE: 18.208 G_Regularizer: 0.000 validation_error: 25.615 +(epoch: 1, iters: 126000, time: 0.532, data: 0.000) G_L1: 17.857 G_L1_ABSOLUTE: 3.804 G_L1_RELATIVE: 14.053 G_Regularizer: 0.000 validation_error: 24.820 +(epoch: 1, iters: 128000, time: 0.527, data: 0.000) G_L1: 22.076 G_L1_ABSOLUTE: 4.793 G_L1_RELATIVE: 17.284 G_Regularizer: 0.000 validation_error: 25.246 +(epoch: 1, iters: 130000, time: 0.526, data: 0.000) G_L1: 22.058 G_L1_ABSOLUTE: 4.431 G_L1_RELATIVE: 17.628 G_Regularizer: 0.000 validation_error: 24.596 +(epoch: 1, iters: 132000, time: 0.524, data: 0.000) G_L1: 25.289 G_L1_ABSOLUTE: 4.253 G_L1_RELATIVE: 21.036 G_Regularizer: 0.000 validation_error: 25.387 +(epoch: 1, iters: 134000, time: 0.527, data: 0.000) G_L1: 21.595 G_L1_ABSOLUTE: 3.987 G_L1_RELATIVE: 17.607 G_Regularizer: 0.000 validation_error: 25.077 +(epoch: 1, iters: 136000, time: 0.525, data: 0.000) G_L1: 21.235 G_L1_ABSOLUTE: 4.392 G_L1_RELATIVE: 16.843 G_Regularizer: 0.000 validation_error: 24.855 +(epoch: 1, iters: 138000, time: 0.534, data: 0.000) G_L1: 22.488 G_L1_ABSOLUTE: 4.404 G_L1_RELATIVE: 18.084 G_Regularizer: 0.000 validation_error: 25.866 +(epoch: 1, iters: 140000, time: 0.528, data: 0.000) G_L1: 23.761 G_L1_ABSOLUTE: 4.339 G_L1_RELATIVE: 19.422 G_Regularizer: 0.000 validation_error: 25.059 +(epoch: 1, iters: 142000, time: 0.527, data: 0.001) G_L1: 18.897 G_L1_ABSOLUTE: 4.298 G_L1_RELATIVE: 14.599 G_Regularizer: 0.000 validation_error: 25.060 +(epoch: 1, iters: 144000, time: 0.529, data: 0.000) G_L1: 18.984 G_L1_ABSOLUTE: 4.345 G_L1_RELATIVE: 14.639 G_Regularizer: 0.000 validation_error: 25.088 +(epoch: 1, iters: 146000, time: 0.531, data: 0.000) G_L1: 17.956 G_L1_ABSOLUTE: 4.423 G_L1_RELATIVE: 13.533 G_Regularizer: 0.000 validation_error: 25.111 +(epoch: 1, iters: 148000, time: 0.530, data: 0.000) G_L1: 19.417 G_L1_ABSOLUTE: 4.514 G_L1_RELATIVE: 14.903 G_Regularizer: 0.000 validation_error: 24.421 +(epoch: 1, iters: 150000, time: 0.525, data: 0.000) G_L1: 19.327 G_L1_ABSOLUTE: 4.618 G_L1_RELATIVE: 14.709 G_Regularizer: 0.000 validation_error: 24.444 +(epoch: 1, iters: 152000, time: 0.529, data: 0.000) G_L1: 22.617 G_L1_ABSOLUTE: 4.578 G_L1_RELATIVE: 18.040 G_Regularizer: 0.000 validation_error: 24.705 +(epoch: 1, iters: 154000, time: 0.524, data: 0.000) G_L1: 24.957 G_L1_ABSOLUTE: 3.963 G_L1_RELATIVE: 20.994 G_Regularizer: 0.000 validation_error: 24.430 +(epoch: 1, iters: 156000, time: 0.521, data: 0.000) G_L1: 20.307 G_L1_ABSOLUTE: 3.911 G_L1_RELATIVE: 16.396 G_Regularizer: 0.000 validation_error: 24.632 +(epoch: 1, iters: 158000, time: 0.527, data: 0.000) G_L1: 18.463 G_L1_ABSOLUTE: 4.156 G_L1_RELATIVE: 14.307 G_Regularizer: 0.000 validation_error: 24.343 +(epoch: 1, iters: 160000, time: 0.531, data: 0.000) G_L1: 20.475 G_L1_ABSOLUTE: 4.195 G_L1_RELATIVE: 16.280 G_Regularizer: 0.000 validation_error: 24.677 +(epoch: 1, iters: 162000, time: 0.525, data: 0.000) G_L1: 20.776 G_L1_ABSOLUTE: 4.367 G_L1_RELATIVE: 16.409 G_Regularizer: 0.000 validation_error: 24.303 +(epoch: 1, iters: 164000, time: 0.531, data: 0.000) G_L1: 18.067 G_L1_ABSOLUTE: 4.091 G_L1_RELATIVE: 13.976 G_Regularizer: 0.000 validation_error: 24.560 +(epoch: 1, iters: 166000, time: 0.526, data: 0.000) G_L1: 22.843 G_L1_ABSOLUTE: 4.290 G_L1_RELATIVE: 18.553 G_Regularizer: 0.000 validation_error: 24.721 +(epoch: 1, iters: 168000, time: 0.524, data: 0.000) G_L1: 20.924 G_L1_ABSOLUTE: 3.742 G_L1_RELATIVE: 17.182 G_Regularizer: 0.000 validation_error: 24.209 +(epoch: 1, iters: 170000, time: 0.526, data: 0.000) G_L1: 17.669 G_L1_ABSOLUTE: 4.038 G_L1_RELATIVE: 13.631 G_Regularizer: 0.000 validation_error: 24.377 +(epoch: 1, iters: 172000, time: 0.526, data: 0.000) G_L1: 20.277 G_L1_ABSOLUTE: 4.201 G_L1_RELATIVE: 16.076 G_Regularizer: 0.000 validation_error: 24.538 +(epoch: 1, iters: 174000, time: 0.527, data: 0.000) G_L1: 22.542 G_L1_ABSOLUTE: 3.949 G_L1_RELATIVE: 18.593 G_Regularizer: 0.000 validation_error: 24.260 +(epoch: 1, iters: 176000, time: 0.529, data: 0.000) G_L1: 21.529 G_L1_ABSOLUTE: 3.585 G_L1_RELATIVE: 17.944 G_Regularizer: 0.000 validation_error: 24.700 +(epoch: 1, iters: 178000, time: 0.529, data: 0.000) G_L1: 21.695 G_L1_ABSOLUTE: 4.256 G_L1_RELATIVE: 17.439 G_Regularizer: 0.000 validation_error: 24.722 +(epoch: 1, iters: 180000, time: 0.528, data: 0.000) G_L1: 18.981 G_L1_ABSOLUTE: 3.781 G_L1_RELATIVE: 15.200 G_Regularizer: 0.000 validation_error: 24.588 +(epoch: 1, iters: 182000, time: 0.531, data: 0.000) G_L1: 25.291 G_L1_ABSOLUTE: 4.210 G_L1_RELATIVE: 21.081 G_Regularizer: 0.000 validation_error: 24.406 +(epoch: 1, iters: 184000, time: 0.522, data: 0.000) G_L1: 19.536 G_L1_ABSOLUTE: 3.747 G_L1_RELATIVE: 15.789 G_Regularizer: 0.000 validation_error: 24.321 +(epoch: 1, iters: 186000, time: 0.525, data: 0.000) G_L1: 20.605 G_L1_ABSOLUTE: 4.199 G_L1_RELATIVE: 16.406 G_Regularizer: 0.000 validation_error: 24.567 +(epoch: 1, iters: 188000, time: 0.524, data: 0.000) G_L1: 18.809 G_L1_ABSOLUTE: 3.932 G_L1_RELATIVE: 14.877 G_Regularizer: 0.000 validation_error: 24.504 +(epoch: 1, iters: 190000, time: 0.527, data: 0.000) G_L1: 18.435 G_L1_ABSOLUTE: 4.072 G_L1_RELATIVE: 14.363 G_Regularizer: 0.000 validation_error: 24.471 +(epoch: 1, iters: 192000, time: 0.527, data: 0.000) G_L1: 24.383 G_L1_ABSOLUTE: 3.989 G_L1_RELATIVE: 20.394 G_Regularizer: 0.000 validation_error: 24.320 +(epoch: 1, iters: 194000, time: 0.524, data: 0.000) G_L1: 18.557 G_L1_ABSOLUTE: 4.491 G_L1_RELATIVE: 14.066 G_Regularizer: 0.000 validation_error: 24.349 +(epoch: 1, iters: 196000, time: 0.527, data: 0.000) G_L1: 22.087 G_L1_ABSOLUTE: 4.181 G_L1_RELATIVE: 17.906 G_Regularizer: 0.000 validation_error: 24.555 +(epoch: 1, iters: 198000, time: 0.532, data: 0.001) G_L1: 21.366 G_L1_ABSOLUTE: 3.752 G_L1_RELATIVE: 17.614 G_Regularizer: 0.000 validation_error: 24.559 +(epoch: 1, iters: 200000, time: 0.524, data: 0.000) G_L1: 19.948 G_L1_ABSOLUTE: 4.048 G_L1_RELATIVE: 15.900 G_Regularizer: 0.000 validation_error: 24.242 +(epoch: 1, iters: 202000, time: 0.530, data: 0.000) G_L1: 20.864 G_L1_ABSOLUTE: 3.799 G_L1_RELATIVE: 17.066 G_Regularizer: 0.000 validation_error: 24.322 +(epoch: 1, iters: 204000, time: 0.531, data: 0.000) G_L1: 20.734 G_L1_ABSOLUTE: 3.939 G_L1_RELATIVE: 16.795 G_Regularizer: 0.000 validation_error: 23.832 +(epoch: 1, iters: 206000, time: 0.527, data: 0.000) G_L1: 17.837 G_L1_ABSOLUTE: 4.024 G_L1_RELATIVE: 13.813 G_Regularizer: 0.000 validation_error: 24.326 +(epoch: 1, iters: 208000, time: 0.523, data: 0.000) G_L1: 21.147 G_L1_ABSOLUTE: 3.896 G_L1_RELATIVE: 17.251 G_Regularizer: 0.000 validation_error: 24.181 +(epoch: 1, iters: 210000, time: 0.527, data: 0.000) G_L1: 18.972 G_L1_ABSOLUTE: 3.557 G_L1_RELATIVE: 15.415 G_Regularizer: 0.000 validation_error: 24.457 +(epoch: 1, iters: 212000, time: 0.524, data: 0.000) G_L1: 21.846 G_L1_ABSOLUTE: 3.925 G_L1_RELATIVE: 17.921 G_Regularizer: 0.000 validation_error: 24.408 +(epoch: 1, iters: 214000, time: 0.528, data: 0.000) G_L1: 19.201 G_L1_ABSOLUTE: 3.623 G_L1_RELATIVE: 15.577 G_Regularizer: 0.000 validation_error: 24.239 +(epoch: 1, iters: 216000, time: 0.531, data: 0.000) G_L1: 17.034 G_L1_ABSOLUTE: 4.041 G_L1_RELATIVE: 12.993 G_Regularizer: 0.000 validation_error: 24.544 +(epoch: 1, iters: 218000, time: 0.525, data: 0.000) G_L1: 22.975 G_L1_ABSOLUTE: 3.899 G_L1_RELATIVE: 19.076 G_Regularizer: 0.000 validation_error: 23.697 +(epoch: 1, iters: 220000, time: 0.523, data: 0.000) G_L1: 18.786 G_L1_ABSOLUTE: 3.943 G_L1_RELATIVE: 14.844 G_Regularizer: 0.000 validation_error: 24.047 +(epoch: 1, iters: 222000, time: 0.527, data: 0.000) G_L1: 16.579 G_L1_ABSOLUTE: 4.034 G_L1_RELATIVE: 12.545 G_Regularizer: 0.000 validation_error: 23.986 +(epoch: 1, iters: 224000, time: 0.530, data: 0.000) G_L1: 18.551 G_L1_ABSOLUTE: 3.978 G_L1_RELATIVE: 14.573 G_Regularizer: 0.000 validation_error: 24.706 +(epoch: 1, iters: 226000, time: 0.526, data: 0.000) G_L1: 21.904 G_L1_ABSOLUTE: 3.952 G_L1_RELATIVE: 17.952 G_Regularizer: 0.000 validation_error: 24.628 +(epoch: 1, iters: 228000, time: 0.531, data: 0.000) G_L1: 18.624 G_L1_ABSOLUTE: 3.560 G_L1_RELATIVE: 15.064 G_Regularizer: 0.000 validation_error: 23.511 +(epoch: 1, iters: 230000, time: 0.524, data: 0.000) G_L1: 17.772 G_L1_ABSOLUTE: 3.938 G_L1_RELATIVE: 13.834 G_Regularizer: 0.000 validation_error: 23.734 +(epoch: 1, iters: 232000, time: 0.531, data: 0.000) G_L1: 20.067 G_L1_ABSOLUTE: 4.079 G_L1_RELATIVE: 15.988 G_Regularizer: 0.000 validation_error: 24.111 +(epoch: 1, iters: 234000, time: 0.529, data: 0.000) G_L1: 17.543 G_L1_ABSOLUTE: 4.035 G_L1_RELATIVE: 13.507 G_Regularizer: 0.000 validation_error: 24.288 +(epoch: 1, iters: 236000, time: 0.527, data: 0.000) G_L1: 23.722 G_L1_ABSOLUTE: 3.983 G_L1_RELATIVE: 19.739 G_Regularizer: 0.000 validation_error: 23.954 +(epoch: 1, iters: 238000, time: 0.526, data: 0.000) G_L1: 19.177 G_L1_ABSOLUTE: 3.632 G_L1_RELATIVE: 15.546 G_Regularizer: 0.000 validation_error: 24.831 +(epoch: 1, iters: 240000, time: 0.531, data: 0.000) G_L1: 18.756 G_L1_ABSOLUTE: 3.548 G_L1_RELATIVE: 15.207 G_Regularizer: 0.000 validation_error: 24.057 +(epoch: 1, iters: 242000, time: 0.526, data: 0.000) G_L1: 15.468 G_L1_ABSOLUTE: 3.767 G_L1_RELATIVE: 11.701 G_Regularizer: 0.000 validation_error: 24.242 +(epoch: 1, iters: 244000, time: 0.529, data: 0.000) G_L1: 18.866 G_L1_ABSOLUTE: 3.760 G_L1_RELATIVE: 15.105 G_Regularizer: 0.000 validation_error: 24.002 +(epoch: 1, iters: 246000, time: 0.522, data: 0.001) G_L1: 18.390 G_L1_ABSOLUTE: 3.824 G_L1_RELATIVE: 14.566 G_Regularizer: 0.000 validation_error: 23.662 +(epoch: 1, iters: 248000, time: 0.524, data: 0.000) G_L1: 19.327 G_L1_ABSOLUTE: 3.222 G_L1_RELATIVE: 16.105 G_Regularizer: 0.000 validation_error: 23.933 +(epoch: 1, iters: 250000, time: 0.525, data: 0.000) G_L1: 16.488 G_L1_ABSOLUTE: 4.071 G_L1_RELATIVE: 12.417 G_Regularizer: 0.000 validation_error: 23.823 +(epoch: 1, iters: 252000, time: 0.529, data: 0.001) G_L1: 19.932 G_L1_ABSOLUTE: 3.844 G_L1_RELATIVE: 16.088 G_Regularizer: 0.000 validation_error: 24.064 +(epoch: 1, iters: 254000, time: 0.528, data: 0.000) G_L1: 21.046 G_L1_ABSOLUTE: 3.934 G_L1_RELATIVE: 17.112 G_Regularizer: 0.000 validation_error: 23.750 +(epoch: 1, iters: 256000, time: 0.523, data: 0.000) G_L1: 20.878 G_L1_ABSOLUTE: 4.014 G_L1_RELATIVE: 16.864 G_Regularizer: 0.000 validation_error: 24.444 +(epoch: 1, iters: 258000, time: 0.525, data: 0.000) G_L1: 15.624 G_L1_ABSOLUTE: 3.351 G_L1_RELATIVE: 12.272 G_Regularizer: 0.000 validation_error: 23.646 +(epoch: 1, iters: 260000, time: 0.522, data: 0.000) G_L1: 21.103 G_L1_ABSOLUTE: 3.355 G_L1_RELATIVE: 17.748 G_Regularizer: 0.000 validation_error: 24.173 +(epoch: 1, iters: 262000, time: 0.528, data: 0.000) G_L1: 15.254 G_L1_ABSOLUTE: 3.802 G_L1_RELATIVE: 11.452 G_Regularizer: 0.000 validation_error: 24.097 +(epoch: 1, iters: 264000, time: 0.529, data: 0.000) G_L1: 17.711 G_L1_ABSOLUTE: 3.441 G_L1_RELATIVE: 14.270 G_Regularizer: 0.000 validation_error: 23.639 +(epoch: 1, iters: 266000, time: 0.531, data: 0.000) G_L1: 22.043 G_L1_ABSOLUTE: 3.804 G_L1_RELATIVE: 18.239 G_Regularizer: 0.000 validation_error: 24.509 +(epoch: 1, iters: 268000, time: 0.529, data: 0.000) G_L1: 15.759 G_L1_ABSOLUTE: 3.534 G_L1_RELATIVE: 12.226 G_Regularizer: 0.000 validation_error: 23.430 +(epoch: 1, iters: 270000, time: 0.527, data: 0.000) G_L1: 19.133 G_L1_ABSOLUTE: 4.115 G_L1_RELATIVE: 15.018 G_Regularizer: 0.000 validation_error: 24.018 +(epoch: 1, iters: 272000, time: 0.529, data: 0.000) G_L1: 15.650 G_L1_ABSOLUTE: 3.156 G_L1_RELATIVE: 12.494 G_Regularizer: 0.000 validation_error: 24.029 +(epoch: 1, iters: 274000, time: 0.532, data: 0.000) G_L1: 22.627 G_L1_ABSOLUTE: 3.961 G_L1_RELATIVE: 18.666 G_Regularizer: 0.000 validation_error: 24.322 +(epoch: 1, iters: 276000, time: 0.530, data: 0.000) G_L1: 21.083 G_L1_ABSOLUTE: 4.061 G_L1_RELATIVE: 17.022 G_Regularizer: 0.000 validation_error: 23.667 +(epoch: 1, iters: 278000, time: 0.531, data: 0.000) G_L1: 20.596 G_L1_ABSOLUTE: 3.878 G_L1_RELATIVE: 16.718 G_Regularizer: 0.000 validation_error: 24.293 +(epoch: 1, iters: 280000, time: 0.534, data: 0.001) G_L1: 22.605 G_L1_ABSOLUTE: 3.707 G_L1_RELATIVE: 18.898 G_Regularizer: 0.000 validation_error: 23.754 +(epoch: 1, iters: 282000, time: 0.527, data: 0.000) G_L1: 15.923 G_L1_ABSOLUTE: 3.235 G_L1_RELATIVE: 12.688 G_Regularizer: 0.000 validation_error: 23.480 +(epoch: 1, iters: 284000, time: 0.531, data: 0.000) G_L1: 20.187 G_L1_ABSOLUTE: 4.283 G_L1_RELATIVE: 15.904 G_Regularizer: 0.000 validation_error: 23.692 +(epoch: 1, iters: 286000, time: 0.528, data: 0.000) G_L1: 21.816 G_L1_ABSOLUTE: 3.407 G_L1_RELATIVE: 18.409 G_Regularizer: 0.000 validation_error: 23.527 +(epoch: 1, iters: 288000, time: 0.527, data: 0.000) G_L1: 19.219 G_L1_ABSOLUTE: 3.813 G_L1_RELATIVE: 15.405 G_Regularizer: 0.000 validation_error: 23.934 +(epoch: 1, iters: 290000, time: 0.531, data: 0.000) G_L1: 15.926 G_L1_ABSOLUTE: 3.455 G_L1_RELATIVE: 12.470 G_Regularizer: 0.000 validation_error: 23.804 +(epoch: 1, iters: 292000, time: 0.525, data: 0.000) G_L1: 23.940 G_L1_ABSOLUTE: 3.763 G_L1_RELATIVE: 20.177 G_Regularizer: 0.000 validation_error: 23.613 +(epoch: 1, iters: 294000, time: 0.532, data: 0.000) G_L1: 19.805 G_L1_ABSOLUTE: 3.906 G_L1_RELATIVE: 15.899 G_Regularizer: 0.000 validation_error: 23.512 +(epoch: 1, iters: 296000, time: 0.526, data: 0.000) G_L1: 18.564 G_L1_ABSOLUTE: 3.842 G_L1_RELATIVE: 14.722 G_Regularizer: 0.000 validation_error: 24.080 +(epoch: 1, iters: 298000, time: 0.523, data: 0.000) G_L1: 21.355 G_L1_ABSOLUTE: 4.216 G_L1_RELATIVE: 17.139 G_Regularizer: 0.000 validation_error: 23.654 +(epoch: 1, iters: 300000, time: 0.524, data: 0.000) G_L1: 16.615 G_L1_ABSOLUTE: 3.394 G_L1_RELATIVE: 13.222 G_Regularizer: 0.000 validation_error: 23.533 +(epoch: 1, iters: 302000, time: 0.531, data: 0.000) G_L1: 22.100 G_L1_ABSOLUTE: 4.024 G_L1_RELATIVE: 18.076 G_Regularizer: 0.000 validation_error: 23.328 +(epoch: 2, iters: 1248, time: 0.531, data: 0.000) G_L1: 17.071 G_L1_ABSOLUTE: 3.598 G_L1_RELATIVE: 13.473 G_Regularizer: 0.000 validation_error: 23.080 +(epoch: 2, iters: 3248, time: 0.527, data: 0.000) G_L1: 20.515 G_L1_ABSOLUTE: 3.594 G_L1_RELATIVE: 16.921 G_Regularizer: 0.000 validation_error: 23.704 +(epoch: 2, iters: 5248, time: 0.527, data: 0.000) G_L1: 16.561 G_L1_ABSOLUTE: 3.182 G_L1_RELATIVE: 13.379 G_Regularizer: 0.000 validation_error: 23.516 +(epoch: 2, iters: 7248, time: 0.531, data: 0.000) G_L1: 15.340 G_L1_ABSOLUTE: 3.116 G_L1_RELATIVE: 12.225 G_Regularizer: 0.000 validation_error: 23.368 +(epoch: 2, iters: 9248, time: 0.531, data: 0.000) G_L1: 21.704 G_L1_ABSOLUTE: 3.737 G_L1_RELATIVE: 17.967 G_Regularizer: 0.000 validation_error: 23.156 +(epoch: 2, iters: 11248, time: 0.531, data: 0.000) G_L1: 16.057 G_L1_ABSOLUTE: 3.588 G_L1_RELATIVE: 12.469 G_Regularizer: 0.000 validation_error: 23.250 +(epoch: 2, iters: 13248, time: 0.528, data: 0.000) G_L1: 21.139 G_L1_ABSOLUTE: 3.705 G_L1_RELATIVE: 17.434 G_Regularizer: 0.000 validation_error: 23.455 +(epoch: 2, iters: 15248, time: 0.527, data: 0.000) G_L1: 17.701 G_L1_ABSOLUTE: 3.750 G_L1_RELATIVE: 13.951 G_Regularizer: 0.000 validation_error: 23.150 +(epoch: 2, iters: 17248, time: 0.521, data: 0.000) G_L1: 18.372 G_L1_ABSOLUTE: 3.927 G_L1_RELATIVE: 14.445 G_Regularizer: 0.000 validation_error: 22.785 +(epoch: 2, iters: 19248, time: 0.531, data: 0.000) G_L1: 22.116 G_L1_ABSOLUTE: 3.609 G_L1_RELATIVE: 18.507 G_Regularizer: 0.000 validation_error: 23.102 +(epoch: 2, iters: 21248, time: 0.531, data: 0.000) G_L1: 21.331 G_L1_ABSOLUTE: 4.043 G_L1_RELATIVE: 17.288 G_Regularizer: 0.000 validation_error: 23.610 +(epoch: 2, iters: 23248, time: 0.532, data: 0.000) G_L1: 20.874 G_L1_ABSOLUTE: 3.990 G_L1_RELATIVE: 16.884 G_Regularizer: 0.000 validation_error: 23.080 +(epoch: 2, iters: 25248, time: 0.530, data: 0.000) G_L1: 20.500 G_L1_ABSOLUTE: 4.043 G_L1_RELATIVE: 16.458 G_Regularizer: 0.000 validation_error: 22.868 +(epoch: 2, iters: 27248, time: 0.521, data: 0.000) G_L1: 23.737 G_L1_ABSOLUTE: 3.618 G_L1_RELATIVE: 20.119 G_Regularizer: 0.000 validation_error: 23.424 +(epoch: 2, iters: 29248, time: 0.528, data: 0.000) G_L1: 16.543 G_L1_ABSOLUTE: 3.437 G_L1_RELATIVE: 13.107 G_Regularizer: 0.000 validation_error: 23.103 +(epoch: 2, iters: 31248, time: 0.532, data: 0.000) G_L1: 18.316 G_L1_ABSOLUTE: 3.081 G_L1_RELATIVE: 15.235 G_Regularizer: 0.000 validation_error: 23.809 +(epoch: 2, iters: 33248, time: 0.523, data: 0.000) G_L1: 19.593 G_L1_ABSOLUTE: 3.451 G_L1_RELATIVE: 16.142 G_Regularizer: 0.000 validation_error: 23.355 +(epoch: 2, iters: 35248, time: 0.529, data: 0.000) G_L1: 20.985 G_L1_ABSOLUTE: 3.616 G_L1_RELATIVE: 17.369 G_Regularizer: 0.000 validation_error: 23.562 +(epoch: 2, iters: 37248, time: 0.520, data: 0.000) G_L1: 19.825 G_L1_ABSOLUTE: 3.324 G_L1_RELATIVE: 16.501 G_Regularizer: 0.000 validation_error: 23.243 +(epoch: 2, iters: 39248, time: 0.524, data: 0.000) G_L1: 14.658 G_L1_ABSOLUTE: 3.161 G_L1_RELATIVE: 11.497 G_Regularizer: 0.000 validation_error: 23.368 +(epoch: 2, iters: 41248, time: 0.534, data: 0.000) G_L1: 17.936 G_L1_ABSOLUTE: 4.023 G_L1_RELATIVE: 13.913 G_Regularizer: 0.000 validation_error: 23.358 +(epoch: 2, iters: 43248, time: 0.525, data: 0.000) G_L1: 18.571 G_L1_ABSOLUTE: 3.913 G_L1_RELATIVE: 14.659 G_Regularizer: 0.000 validation_error: 23.244 +(epoch: 2, iters: 45248, time: 0.524, data: 0.000) G_L1: 21.359 G_L1_ABSOLUTE: 4.021 G_L1_RELATIVE: 17.338 G_Regularizer: 0.000 validation_error: 23.670 +(epoch: 2, iters: 47248, time: 0.523, data: 0.000) G_L1: 17.232 G_L1_ABSOLUTE: 3.526 G_L1_RELATIVE: 13.706 G_Regularizer: 0.000 validation_error: 23.741 +(epoch: 2, iters: 49248, time: 0.525, data: 0.000) G_L1: 19.561 G_L1_ABSOLUTE: 3.473 G_L1_RELATIVE: 16.088 G_Regularizer: 0.000 validation_error: 22.551 +(epoch: 2, iters: 51248, time: 0.527, data: 0.000) G_L1: 18.534 G_L1_ABSOLUTE: 3.302 G_L1_RELATIVE: 15.232 G_Regularizer: 0.000 validation_error: 23.145 +(epoch: 2, iters: 53248, time: 0.525, data: 0.000) G_L1: 16.864 G_L1_ABSOLUTE: 3.113 G_L1_RELATIVE: 13.750 G_Regularizer: 0.000 validation_error: 23.217 +(epoch: 2, iters: 55248, time: 0.528, data: 0.000) G_L1: 17.958 G_L1_ABSOLUTE: 3.384 G_L1_RELATIVE: 14.574 G_Regularizer: 0.000 validation_error: 23.627 +(epoch: 2, iters: 57248, time: 0.522, data: 0.000) G_L1: 19.117 G_L1_ABSOLUTE: 3.716 G_L1_RELATIVE: 15.401 G_Regularizer: 0.000 validation_error: 23.102 +(epoch: 2, iters: 59248, time: 0.532, data: 0.000) G_L1: 19.338 G_L1_ABSOLUTE: 3.483 G_L1_RELATIVE: 15.856 G_Regularizer: 0.000 validation_error: 23.934 +(epoch: 2, iters: 61248, time: 0.531, data: 0.000) G_L1: 20.639 G_L1_ABSOLUTE: 3.676 G_L1_RELATIVE: 16.963 G_Regularizer: 0.000 validation_error: 23.446 +(epoch: 2, iters: 63248, time: 0.528, data: 0.000) G_L1: 19.802 G_L1_ABSOLUTE: 3.931 G_L1_RELATIVE: 15.870 G_Regularizer: 0.000 validation_error: 23.258 +(epoch: 2, iters: 65248, time: 0.528, data: 0.000) G_L1: 19.555 G_L1_ABSOLUTE: 3.193 G_L1_RELATIVE: 16.362 G_Regularizer: 0.000 validation_error: 22.816 +(epoch: 2, iters: 67248, time: 0.529, data: 0.000) G_L1: 15.286 G_L1_ABSOLUTE: 3.287 G_L1_RELATIVE: 11.998 G_Regularizer: 0.000 validation_error: 23.291 +(epoch: 2, iters: 69248, time: 0.527, data: 0.000) G_L1: 16.987 G_L1_ABSOLUTE: 3.303 G_L1_RELATIVE: 13.684 G_Regularizer: 0.000 validation_error: 22.931 +(epoch: 2, iters: 71248, time: 0.522, data: 0.000) G_L1: 15.496 G_L1_ABSOLUTE: 4.119 G_L1_RELATIVE: 11.377 G_Regularizer: 0.000 validation_error: 23.433 +(epoch: 2, iters: 73248, time: 0.531, data: 0.000) G_L1: 22.068 G_L1_ABSOLUTE: 3.235 G_L1_RELATIVE: 18.833 G_Regularizer: 0.000 validation_error: 22.599 +(epoch: 2, iters: 75248, time: 0.524, data: 0.000) G_L1: 14.787 G_L1_ABSOLUTE: 3.646 G_L1_RELATIVE: 11.141 G_Regularizer: 0.000 validation_error: 23.038 +(epoch: 2, iters: 77248, time: 0.525, data: 0.000) G_L1: 19.725 G_L1_ABSOLUTE: 3.895 G_L1_RELATIVE: 15.830 G_Regularizer: 0.000 validation_error: 22.628 +(epoch: 2, iters: 79248, time: 0.524, data: 0.000) G_L1: 17.329 G_L1_ABSOLUTE: 3.965 G_L1_RELATIVE: 13.364 G_Regularizer: 0.000 validation_error: 22.406 +(epoch: 2, iters: 81248, time: 0.521, data: 0.000) G_L1: 20.568 G_L1_ABSOLUTE: 3.516 G_L1_RELATIVE: 17.052 G_Regularizer: 0.000 validation_error: 22.773 +(epoch: 2, iters: 83248, time: 0.530, data: 0.000) G_L1: 19.684 G_L1_ABSOLUTE: 3.353 G_L1_RELATIVE: 16.330 G_Regularizer: 0.000 validation_error: 22.096 +(epoch: 2, iters: 85248, time: 0.526, data: 0.000) G_L1: 18.110 G_L1_ABSOLUTE: 3.267 G_L1_RELATIVE: 14.843 G_Regularizer: 0.000 validation_error: 22.325 +(epoch: 2, iters: 87248, time: 0.523, data: 0.000) G_L1: 20.769 G_L1_ABSOLUTE: 3.615 G_L1_RELATIVE: 17.154 G_Regularizer: 0.000 validation_error: 22.466 +(epoch: 2, iters: 89248, time: 0.524, data: 0.000) G_L1: 21.016 G_L1_ABSOLUTE: 3.439 G_L1_RELATIVE: 17.577 G_Regularizer: 0.000 validation_error: 23.083 +(epoch: 2, iters: 91248, time: 0.523, data: 0.000) G_L1: 16.418 G_L1_ABSOLUTE: 3.475 G_L1_RELATIVE: 12.942 G_Regularizer: 0.000 validation_error: 22.785 +(epoch: 2, iters: 93248, time: 0.529, data: 0.000) G_L1: 16.625 G_L1_ABSOLUTE: 3.387 G_L1_RELATIVE: 13.238 G_Regularizer: 0.000 validation_error: 22.552 +(epoch: 2, iters: 95248, time: 0.524, data: 0.000) G_L1: 16.428 G_L1_ABSOLUTE: 3.582 G_L1_RELATIVE: 12.846 G_Regularizer: 0.000 validation_error: 22.430 +(epoch: 2, iters: 97248, time: 0.525, data: 0.000) G_L1: 19.261 G_L1_ABSOLUTE: 3.571 G_L1_RELATIVE: 15.690 G_Regularizer: 0.000 validation_error: 22.355 +(epoch: 2, iters: 99248, time: 0.527, data: 0.000) G_L1: 15.921 G_L1_ABSOLUTE: 3.310 G_L1_RELATIVE: 12.611 G_Regularizer: 0.000 validation_error: 22.780 +(epoch: 2, iters: 101248, time: 0.523, data: 0.000) G_L1: 18.042 G_L1_ABSOLUTE: 3.489 G_L1_RELATIVE: 14.554 G_Regularizer: 0.000 validation_error: 23.101 +(epoch: 2, iters: 103248, time: 0.529, data: 0.000) G_L1: 20.851 G_L1_ABSOLUTE: 3.513 G_L1_RELATIVE: 17.338 G_Regularizer: 0.000 validation_error: 22.728 +(epoch: 2, iters: 105248, time: 0.528, data: 0.000) G_L1: 16.144 G_L1_ABSOLUTE: 3.327 G_L1_RELATIVE: 12.817 G_Regularizer: 0.000 validation_error: 22.925 +(epoch: 2, iters: 107248, time: 0.521, data: 0.000) G_L1: 16.086 G_L1_ABSOLUTE: 3.030 G_L1_RELATIVE: 13.057 G_Regularizer: 0.000 validation_error: 22.383 +(epoch: 2, iters: 109248, time: 0.525, data: 0.000) G_L1: 19.658 G_L1_ABSOLUTE: 3.804 G_L1_RELATIVE: 15.854 G_Regularizer: 0.000 validation_error: 23.037 +(epoch: 2, iters: 111248, time: 0.524, data: 0.000) G_L1: 18.013 G_L1_ABSOLUTE: 3.070 G_L1_RELATIVE: 14.944 G_Regularizer: 0.000 validation_error: 23.241 +(epoch: 2, iters: 113248, time: 0.521, data: 0.000) G_L1: 16.377 G_L1_ABSOLUTE: 3.382 G_L1_RELATIVE: 12.995 G_Regularizer: 0.000 validation_error: 22.550 +(epoch: 2, iters: 115248, time: 0.527, data: 0.000) G_L1: 22.772 G_L1_ABSOLUTE: 3.368 G_L1_RELATIVE: 19.404 G_Regularizer: 0.000 validation_error: 22.492 +(epoch: 2, iters: 117248, time: 0.529, data: 0.000) G_L1: 17.029 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 13.853 G_Regularizer: 0.000 validation_error: 22.841 +(epoch: 2, iters: 119248, time: 0.533, data: 0.000) G_L1: 19.395 G_L1_ABSOLUTE: 3.149 G_L1_RELATIVE: 16.245 G_Regularizer: 0.000 validation_error: 22.454 +(epoch: 2, iters: 121248, time: 0.522, data: 0.001) G_L1: 23.174 G_L1_ABSOLUTE: 3.557 G_L1_RELATIVE: 19.618 G_Regularizer: 0.000 validation_error: 23.497 +(epoch: 2, iters: 123248, time: 0.528, data: 0.000) G_L1: 16.769 G_L1_ABSOLUTE: 3.241 G_L1_RELATIVE: 13.528 G_Regularizer: 0.000 validation_error: 22.927 +(epoch: 2, iters: 125248, time: 0.526, data: 0.000) G_L1: 19.578 G_L1_ABSOLUTE: 3.705 G_L1_RELATIVE: 15.873 G_Regularizer: 0.000 validation_error: 23.087 +(epoch: 2, iters: 127248, time: 0.529, data: 0.000) G_L1: 21.452 G_L1_ABSOLUTE: 3.541 G_L1_RELATIVE: 17.911 G_Regularizer: 0.000 validation_error: 23.492 +(epoch: 2, iters: 129248, time: 0.528, data: 0.000) G_L1: 18.801 G_L1_ABSOLUTE: 3.398 G_L1_RELATIVE: 15.404 G_Regularizer: 0.000 validation_error: 22.291 +(epoch: 2, iters: 131248, time: 0.534, data: 0.000) G_L1: 16.445 G_L1_ABSOLUTE: 3.393 G_L1_RELATIVE: 13.052 G_Regularizer: 0.000 validation_error: 21.906 +(epoch: 2, iters: 133248, time: 0.525, data: 0.000) G_L1: 18.212 G_L1_ABSOLUTE: 3.306 G_L1_RELATIVE: 14.906 G_Regularizer: 0.000 validation_error: 22.626 +(epoch: 2, iters: 135248, time: 0.525, data: 0.000) G_L1: 16.989 G_L1_ABSOLUTE: 2.998 G_L1_RELATIVE: 13.991 G_Regularizer: 0.000 validation_error: 22.980 +(epoch: 2, iters: 137248, time: 0.531, data: 0.000) G_L1: 23.062 G_L1_ABSOLUTE: 3.350 G_L1_RELATIVE: 19.711 G_Regularizer: 0.000 validation_error: 22.804 +(epoch: 2, iters: 139248, time: 0.527, data: 0.000) G_L1: 13.959 G_L1_ABSOLUTE: 3.508 G_L1_RELATIVE: 10.451 G_Regularizer: 0.000 validation_error: 22.635 +(epoch: 2, iters: 141248, time: 0.527, data: 0.001) G_L1: 15.024 G_L1_ABSOLUTE: 3.493 G_L1_RELATIVE: 11.531 G_Regularizer: 0.000 validation_error: 22.477 +(epoch: 2, iters: 143248, time: 0.530, data: 0.000) G_L1: 19.662 G_L1_ABSOLUTE: 3.676 G_L1_RELATIVE: 15.986 G_Regularizer: 0.000 validation_error: 22.946 +(epoch: 2, iters: 145248, time: 0.527, data: 0.000) G_L1: 16.094 G_L1_ABSOLUTE: 3.977 G_L1_RELATIVE: 12.117 G_Regularizer: 0.000 validation_error: 22.937 +(epoch: 2, iters: 147248, time: 0.522, data: 0.000) G_L1: 19.488 G_L1_ABSOLUTE: 3.716 G_L1_RELATIVE: 15.772 G_Regularizer: 0.000 validation_error: 22.969 +(epoch: 2, iters: 149248, time: 0.525, data: 0.000) G_L1: 16.429 G_L1_ABSOLUTE: 3.325 G_L1_RELATIVE: 13.104 G_Regularizer: 0.000 validation_error: 22.564 +(epoch: 2, iters: 151248, time: 0.525, data: 0.000) G_L1: 19.893 G_L1_ABSOLUTE: 3.890 G_L1_RELATIVE: 16.003 G_Regularizer: 0.000 validation_error: 22.257 +(epoch: 2, iters: 153248, time: 0.527, data: 0.000) G_L1: 21.929 G_L1_ABSOLUTE: 3.938 G_L1_RELATIVE: 17.991 G_Regularizer: 0.000 validation_error: 22.666 +(epoch: 2, iters: 155248, time: 0.529, data: 0.000) G_L1: 17.609 G_L1_ABSOLUTE: 3.660 G_L1_RELATIVE: 13.949 G_Regularizer: 0.000 validation_error: 22.663 +(epoch: 2, iters: 157248, time: 0.527, data: 0.000) G_L1: 19.777 G_L1_ABSOLUTE: 3.402 G_L1_RELATIVE: 16.375 G_Regularizer: 0.000 validation_error: 21.971 +(epoch: 2, iters: 159248, time: 0.523, data: 0.000) G_L1: 16.893 G_L1_ABSOLUTE: 4.129 G_L1_RELATIVE: 12.764 G_Regularizer: 0.000 validation_error: 22.675 +(epoch: 2, iters: 161248, time: 0.526, data: 0.000) G_L1: 15.298 G_L1_ABSOLUTE: 3.091 G_L1_RELATIVE: 12.207 G_Regularizer: 0.000 validation_error: 22.544 +(epoch: 2, iters: 163248, time: 0.531, data: 0.000) G_L1: 21.016 G_L1_ABSOLUTE: 3.173 G_L1_RELATIVE: 17.843 G_Regularizer: 0.000 validation_error: 22.432 +(epoch: 2, iters: 165248, time: 0.525, data: 0.000) G_L1: 23.441 G_L1_ABSOLUTE: 3.821 G_L1_RELATIVE: 19.620 G_Regularizer: 0.000 validation_error: 22.382 +(epoch: 2, iters: 167248, time: 0.528, data: 0.000) G_L1: 16.872 G_L1_ABSOLUTE: 3.352 G_L1_RELATIVE: 13.520 G_Regularizer: 0.000 validation_error: 22.550 +(epoch: 2, iters: 169248, time: 0.519, data: 0.000) G_L1: 21.007 G_L1_ABSOLUTE: 3.747 G_L1_RELATIVE: 17.260 G_Regularizer: 0.000 validation_error: 22.080 +(epoch: 2, iters: 171248, time: 0.527, data: 0.000) G_L1: 17.800 G_L1_ABSOLUTE: 3.790 G_L1_RELATIVE: 14.010 G_Regularizer: 0.000 validation_error: 22.413 +(epoch: 2, iters: 173248, time: 0.525, data: 0.001) G_L1: 19.482 G_L1_ABSOLUTE: 3.269 G_L1_RELATIVE: 16.213 G_Regularizer: 0.000 validation_error: 22.347 +(epoch: 2, iters: 175248, time: 0.527, data: 0.000) G_L1: 18.551 G_L1_ABSOLUTE: 3.339 G_L1_RELATIVE: 15.212 G_Regularizer: 0.000 validation_error: 22.609 +(epoch: 2, iters: 177248, time: 0.526, data: 0.000) G_L1: 16.063 G_L1_ABSOLUTE: 2.951 G_L1_RELATIVE: 13.112 G_Regularizer: 0.000 validation_error: 23.254 +(epoch: 2, iters: 179248, time: 0.529, data: 0.000) G_L1: 13.859 G_L1_ABSOLUTE: 3.016 G_L1_RELATIVE: 10.844 G_Regularizer: 0.000 validation_error: 22.355 +(epoch: 2, iters: 181248, time: 0.526, data: 0.000) G_L1: 17.330 G_L1_ABSOLUTE: 3.341 G_L1_RELATIVE: 13.989 G_Regularizer: 0.000 validation_error: 22.650 +(epoch: 2, iters: 183248, time: 0.524, data: 0.000) G_L1: 21.017 G_L1_ABSOLUTE: 3.245 G_L1_RELATIVE: 17.773 G_Regularizer: 0.000 validation_error: 22.689 +(epoch: 2, iters: 185248, time: 0.527, data: 0.000) G_L1: 18.863 G_L1_ABSOLUTE: 3.178 G_L1_RELATIVE: 15.685 G_Regularizer: 0.000 validation_error: 22.150 +(epoch: 2, iters: 187248, time: 0.530, data: 0.000) G_L1: 18.754 G_L1_ABSOLUTE: 3.526 G_L1_RELATIVE: 15.229 G_Regularizer: 0.000 validation_error: 21.877 +(epoch: 2, iters: 189248, time: 0.526, data: 0.000) G_L1: 16.535 G_L1_ABSOLUTE: 2.914 G_L1_RELATIVE: 13.620 G_Regularizer: 0.000 validation_error: 22.509 +(epoch: 2, iters: 191248, time: 0.531, data: 0.000) G_L1: 19.439 G_L1_ABSOLUTE: 3.230 G_L1_RELATIVE: 16.209 G_Regularizer: 0.000 validation_error: 22.330 +(epoch: 2, iters: 193248, time: 0.530, data: 0.000) G_L1: 18.030 G_L1_ABSOLUTE: 3.428 G_L1_RELATIVE: 14.602 G_Regularizer: 0.000 validation_error: 22.655 +(epoch: 2, iters: 195248, time: 0.524, data: 0.000) G_L1: 19.138 G_L1_ABSOLUTE: 3.059 G_L1_RELATIVE: 16.079 G_Regularizer: 0.000 validation_error: 22.307 +(epoch: 2, iters: 197248, time: 0.530, data: 0.000) G_L1: 14.541 G_L1_ABSOLUTE: 3.254 G_L1_RELATIVE: 11.287 G_Regularizer: 0.000 validation_error: 22.990 +(epoch: 2, iters: 199248, time: 0.536, data: 0.000) G_L1: 19.795 G_L1_ABSOLUTE: 3.369 G_L1_RELATIVE: 16.426 G_Regularizer: 0.000 validation_error: 23.097 +(epoch: 2, iters: 201248, time: 0.528, data: 0.000) G_L1: 19.176 G_L1_ABSOLUTE: 3.318 G_L1_RELATIVE: 15.858 G_Regularizer: 0.000 validation_error: 22.049 +(epoch: 2, iters: 203248, time: 0.524, data: 0.000) G_L1: 18.021 G_L1_ABSOLUTE: 3.264 G_L1_RELATIVE: 14.757 G_Regularizer: 0.000 validation_error: 22.431 +(epoch: 2, iters: 205248, time: 0.528, data: 0.000) G_L1: 16.555 G_L1_ABSOLUTE: 3.110 G_L1_RELATIVE: 13.445 G_Regularizer: 0.000 validation_error: 23.123 +(epoch: 2, iters: 207248, time: 0.529, data: 0.000) G_L1: 16.991 G_L1_ABSOLUTE: 3.376 G_L1_RELATIVE: 13.616 G_Regularizer: 0.000 validation_error: 22.364 +(epoch: 2, iters: 209248, time: 0.525, data: 0.000) G_L1: 18.126 G_L1_ABSOLUTE: 3.660 G_L1_RELATIVE: 14.466 G_Regularizer: 0.000 validation_error: 22.964 +(epoch: 2, iters: 211248, time: 0.527, data: 0.000) G_L1: 18.180 G_L1_ABSOLUTE: 3.626 G_L1_RELATIVE: 14.554 G_Regularizer: 0.000 validation_error: 22.443 +(epoch: 2, iters: 213248, time: 0.531, data: 0.000) G_L1: 18.043 G_L1_ABSOLUTE: 3.998 G_L1_RELATIVE: 14.044 G_Regularizer: 0.000 validation_error: 21.764 +(epoch: 2, iters: 215248, time: 0.523, data: 0.000) G_L1: 20.519 G_L1_ABSOLUTE: 3.863 G_L1_RELATIVE: 16.656 G_Regularizer: 0.000 validation_error: 22.037 +(epoch: 2, iters: 217248, time: 0.532, data: 0.000) G_L1: 16.253 G_L1_ABSOLUTE: 2.979 G_L1_RELATIVE: 13.274 G_Regularizer: 0.000 validation_error: 22.081 +(epoch: 2, iters: 219248, time: 0.528, data: 0.000) G_L1: 18.731 G_L1_ABSOLUTE: 3.423 G_L1_RELATIVE: 15.307 G_Regularizer: 0.000 validation_error: 21.826 +(epoch: 2, iters: 221248, time: 0.532, data: 0.000) G_L1: 17.620 G_L1_ABSOLUTE: 3.538 G_L1_RELATIVE: 14.082 G_Regularizer: 0.000 validation_error: 22.069 +(epoch: 2, iters: 223248, time: 0.529, data: 0.000) G_L1: 19.015 G_L1_ABSOLUTE: 2.803 G_L1_RELATIVE: 16.212 G_Regularizer: 0.000 validation_error: 22.037 +(epoch: 2, iters: 225248, time: 0.531, data: 0.000) G_L1: 15.481 G_L1_ABSOLUTE: 3.166 G_L1_RELATIVE: 12.315 G_Regularizer: 0.000 validation_error: 22.581 +(epoch: 2, iters: 227248, time: 0.530, data: 0.000) G_L1: 15.880 G_L1_ABSOLUTE: 3.149 G_L1_RELATIVE: 12.730 G_Regularizer: 0.000 validation_error: 22.800 +(epoch: 2, iters: 229248, time: 0.528, data: 0.001) G_L1: 14.752 G_L1_ABSOLUTE: 3.355 G_L1_RELATIVE: 11.397 G_Regularizer: 0.000 validation_error: 22.253 +(epoch: 2, iters: 231248, time: 0.527, data: 0.000) G_L1: 17.916 G_L1_ABSOLUTE: 3.297 G_L1_RELATIVE: 14.620 G_Regularizer: 0.000 validation_error: 22.567 +(epoch: 2, iters: 233248, time: 0.531, data: 0.000) G_L1: 19.253 G_L1_ABSOLUTE: 2.872 G_L1_RELATIVE: 16.381 G_Regularizer: 0.000 validation_error: 21.814 +(epoch: 2, iters: 235248, time: 0.528, data: 0.000) G_L1: 15.784 G_L1_ABSOLUTE: 2.995 G_L1_RELATIVE: 12.789 G_Regularizer: 0.000 validation_error: 22.220 +(epoch: 2, iters: 237248, time: 0.525, data: 0.000) G_L1: 18.267 G_L1_ABSOLUTE: 3.682 G_L1_RELATIVE: 14.585 G_Regularizer: 0.000 validation_error: 22.397 +(epoch: 2, iters: 239248, time: 0.531, data: 0.000) G_L1: 13.722 G_L1_ABSOLUTE: 3.123 G_L1_RELATIVE: 10.598 G_Regularizer: 0.000 validation_error: 22.534 +(epoch: 2, iters: 241248, time: 0.527, data: 0.000) G_L1: 18.989 G_L1_ABSOLUTE: 3.674 G_L1_RELATIVE: 15.315 G_Regularizer: 0.000 validation_error: 22.702 +(epoch: 2, iters: 243248, time: 0.528, data: 0.000) G_L1: 19.537 G_L1_ABSOLUTE: 3.492 G_L1_RELATIVE: 16.045 G_Regularizer: 0.000 validation_error: 22.421 +(epoch: 2, iters: 245248, time: 0.525, data: 0.000) G_L1: 16.567 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 13.790 G_Regularizer: 0.000 validation_error: 22.657 +(epoch: 2, iters: 247248, time: 0.528, data: 0.000) G_L1: 19.837 G_L1_ABSOLUTE: 3.300 G_L1_RELATIVE: 16.537 G_Regularizer: 0.000 validation_error: 22.231 +(epoch: 2, iters: 249248, time: 0.525, data: 0.000) G_L1: 17.652 G_L1_ABSOLUTE: 2.874 G_L1_RELATIVE: 14.778 G_Regularizer: 0.000 validation_error: 22.521 +(epoch: 2, iters: 251248, time: 0.536, data: 0.000) G_L1: 14.739 G_L1_ABSOLUTE: 3.446 G_L1_RELATIVE: 11.293 G_Regularizer: 0.000 validation_error: 22.760 +(epoch: 2, iters: 253248, time: 0.525, data: 0.000) G_L1: 15.555 G_L1_ABSOLUTE: 3.050 G_L1_RELATIVE: 12.505 G_Regularizer: 0.000 validation_error: 21.859 +(epoch: 2, iters: 255248, time: 0.536, data: 0.000) G_L1: 17.642 G_L1_ABSOLUTE: 2.963 G_L1_RELATIVE: 14.678 G_Regularizer: 0.000 validation_error: 22.491 +(epoch: 2, iters: 257248, time: 0.537, data: 0.000) G_L1: 15.999 G_L1_ABSOLUTE: 3.013 G_L1_RELATIVE: 12.986 G_Regularizer: 0.000 validation_error: 21.722 +(epoch: 2, iters: 259248, time: 0.561, data: 0.000) G_L1: 18.104 G_L1_ABSOLUTE: 3.312 G_L1_RELATIVE: 14.792 G_Regularizer: 0.000 validation_error: 21.919 +(epoch: 2, iters: 261248, time: 0.531, data: 0.000) G_L1: 19.184 G_L1_ABSOLUTE: 3.353 G_L1_RELATIVE: 15.831 G_Regularizer: 0.000 validation_error: 22.411 +(epoch: 2, iters: 263248, time: 0.532, data: 0.000) G_L1: 19.732 G_L1_ABSOLUTE: 3.769 G_L1_RELATIVE: 15.963 G_Regularizer: 0.000 validation_error: 22.091 +(epoch: 2, iters: 265248, time: 0.531, data: 0.000) G_L1: 17.633 G_L1_ABSOLUTE: 2.899 G_L1_RELATIVE: 14.734 G_Regularizer: 0.000 validation_error: 22.655 +(epoch: 2, iters: 267248, time: 0.529, data: 0.000) G_L1: 15.106 G_L1_ABSOLUTE: 3.098 G_L1_RELATIVE: 12.008 G_Regularizer: 0.000 validation_error: 22.111 +(epoch: 2, iters: 269248, time: 0.527, data: 0.000) G_L1: 15.622 G_L1_ABSOLUTE: 3.032 G_L1_RELATIVE: 12.589 G_Regularizer: 0.000 validation_error: 22.442 +(epoch: 2, iters: 271248, time: 0.527, data: 0.000) G_L1: 19.038 G_L1_ABSOLUTE: 3.374 G_L1_RELATIVE: 15.664 G_Regularizer: 0.000 validation_error: 21.477 +(epoch: 2, iters: 273248, time: 0.529, data: 0.000) G_L1: 15.576 G_L1_ABSOLUTE: 3.535 G_L1_RELATIVE: 12.041 G_Regularizer: 0.000 validation_error: 22.186 +(epoch: 2, iters: 275248, time: 0.530, data: 0.000) G_L1: 20.849 G_L1_ABSOLUTE: 3.242 G_L1_RELATIVE: 17.607 G_Regularizer: 0.000 validation_error: 22.657 +(epoch: 2, iters: 277248, time: 0.541, data: 0.000) G_L1: 16.140 G_L1_ABSOLUTE: 3.040 G_L1_RELATIVE: 13.099 G_Regularizer: 0.000 validation_error: 21.840 +(epoch: 2, iters: 279248, time: 0.545, data: 0.000) G_L1: 17.551 G_L1_ABSOLUTE: 3.423 G_L1_RELATIVE: 14.128 G_Regularizer: 0.000 validation_error: 22.008 +(epoch: 2, iters: 281248, time: 0.539, data: 0.000) G_L1: 18.892 G_L1_ABSOLUTE: 3.339 G_L1_RELATIVE: 15.553 G_Regularizer: 0.000 validation_error: 22.458 +(epoch: 2, iters: 283248, time: 0.535, data: 0.000) G_L1: 16.882 G_L1_ABSOLUTE: 3.635 G_L1_RELATIVE: 13.247 G_Regularizer: 0.000 validation_error: 22.282 +(epoch: 2, iters: 285248, time: 0.534, data: 0.000) G_L1: 19.265 G_L1_ABSOLUTE: 3.230 G_L1_RELATIVE: 16.036 G_Regularizer: 0.000 validation_error: 21.618 +(epoch: 2, iters: 287248, time: 0.534, data: 0.000) G_L1: 16.428 G_L1_ABSOLUTE: 2.852 G_L1_RELATIVE: 13.575 G_Regularizer: 0.000 validation_error: 21.424 +(epoch: 2, iters: 289248, time: 0.538, data: 0.000) G_L1: 17.082 G_L1_ABSOLUTE: 3.566 G_L1_RELATIVE: 13.516 G_Regularizer: 0.000 validation_error: 21.639 +(epoch: 2, iters: 291248, time: 0.534, data: 0.000) G_L1: 19.217 G_L1_ABSOLUTE: 3.050 G_L1_RELATIVE: 16.167 G_Regularizer: 0.000 validation_error: 21.731 +(epoch: 2, iters: 293248, time: 0.537, data: 0.000) G_L1: 17.839 G_L1_ABSOLUTE: 3.147 G_L1_RELATIVE: 14.692 G_Regularizer: 0.000 validation_error: 21.612 +(epoch: 2, iters: 295248, time: 0.549, data: 0.000) G_L1: 16.399 G_L1_ABSOLUTE: 2.961 G_L1_RELATIVE: 13.438 G_Regularizer: 0.000 validation_error: 22.383 +(epoch: 2, iters: 297248, time: 0.529, data: 0.000) G_L1: 18.376 G_L1_ABSOLUTE: 3.261 G_L1_RELATIVE: 15.115 G_Regularizer: 0.000 validation_error: 22.006 +(epoch: 2, iters: 299248, time: 0.539, data: 0.000) G_L1: 19.169 G_L1_ABSOLUTE: 3.104 G_L1_RELATIVE: 16.066 G_Regularizer: 0.000 validation_error: 22.928 +(epoch: 2, iters: 301248, time: 0.537, data: 0.000) G_L1: 18.682 G_L1_ABSOLUTE: 3.289 G_L1_RELATIVE: 15.393 G_Regularizer: 0.000 validation_error: 21.984 +(epoch: 3, iters: 496, time: 0.539, data: 0.000) G_L1: 16.565 G_L1_ABSOLUTE: 3.553 G_L1_RELATIVE: 13.012 G_Regularizer: 0.000 validation_error: 22.182 +(epoch: 3, iters: 2496, time: 0.870, data: 0.000) G_L1: 21.922 G_L1_ABSOLUTE: 3.485 G_L1_RELATIVE: 18.437 G_Regularizer: 0.000 validation_error: 22.160 +(epoch: 3, iters: 4496, time: 0.926, data: 0.000) G_L1: 17.519 G_L1_ABSOLUTE: 3.493 G_L1_RELATIVE: 14.026 G_Regularizer: 0.000 validation_error: 21.903 +(epoch: 3, iters: 6496, time: 0.929, data: 0.000) G_L1: 17.317 G_L1_ABSOLUTE: 2.849 G_L1_RELATIVE: 14.468 G_Regularizer: 0.000 validation_error: 22.253 +(epoch: 3, iters: 8496, time: 0.933, data: 0.000) G_L1: 16.292 G_L1_ABSOLUTE: 3.001 G_L1_RELATIVE: 13.291 G_Regularizer: 0.000 validation_error: 21.738 +(epoch: 3, iters: 10496, time: 0.928, data: 0.000) G_L1: 17.132 G_L1_ABSOLUTE: 3.135 G_L1_RELATIVE: 13.997 G_Regularizer: 0.000 validation_error: 21.739 +(epoch: 3, iters: 12496, time: 0.933, data: 0.000) G_L1: 20.222 G_L1_ABSOLUTE: 3.591 G_L1_RELATIVE: 16.631 G_Regularizer: 0.000 validation_error: 22.351 +(epoch: 3, iters: 14496, time: 0.937, data: 0.000) G_L1: 22.101 G_L1_ABSOLUTE: 4.080 G_L1_RELATIVE: 18.021 G_Regularizer: 0.000 validation_error: 21.922 +(epoch: 3, iters: 16496, time: 0.939, data: 0.000) G_L1: 20.988 G_L1_ABSOLUTE: 3.306 G_L1_RELATIVE: 17.682 G_Regularizer: 0.000 validation_error: 22.179 +(epoch: 3, iters: 18496, time: 0.941, data: 0.000) G_L1: 18.434 G_L1_ABSOLUTE: 2.928 G_L1_RELATIVE: 15.506 G_Regularizer: 0.000 validation_error: 21.850 +(epoch: 3, iters: 20496, time: 0.936, data: 0.000) G_L1: 17.564 G_L1_ABSOLUTE: 3.211 G_L1_RELATIVE: 14.353 G_Regularizer: 0.000 validation_error: 22.317 +(epoch: 3, iters: 22496, time: 0.941, data: 0.000) G_L1: 16.734 G_L1_ABSOLUTE: 3.227 G_L1_RELATIVE: 13.506 G_Regularizer: 0.000 validation_error: 21.512 +(epoch: 3, iters: 24496, time: 0.943, data: 0.000) G_L1: 15.422 G_L1_ABSOLUTE: 3.271 G_L1_RELATIVE: 12.151 G_Regularizer: 0.000 validation_error: 21.823 +(epoch: 3, iters: 26496, time: 0.943, data: 0.000) G_L1: 16.184 G_L1_ABSOLUTE: 3.400 G_L1_RELATIVE: 12.784 G_Regularizer: 0.000 validation_error: 22.031 +(epoch: 3, iters: 28496, time: 0.945, data: 0.000) G_L1: 16.703 G_L1_ABSOLUTE: 3.048 G_L1_RELATIVE: 13.654 G_Regularizer: 0.000 validation_error: 21.609 +(epoch: 3, iters: 30496, time: 0.936, data: 0.000) G_L1: 15.875 G_L1_ABSOLUTE: 3.278 G_L1_RELATIVE: 12.597 G_Regularizer: 0.000 validation_error: 22.192 +(epoch: 3, iters: 32496, time: 0.939, data: 0.000) G_L1: 17.809 G_L1_ABSOLUTE: 3.370 G_L1_RELATIVE: 14.440 G_Regularizer: 0.000 validation_error: 21.414 +(epoch: 3, iters: 34496, time: 0.945, data: 0.000) G_L1: 18.109 G_L1_ABSOLUTE: 3.659 G_L1_RELATIVE: 14.450 G_Regularizer: 0.000 validation_error: 21.787 +(epoch: 3, iters: 36496, time: 0.942, data: 0.000) G_L1: 15.798 G_L1_ABSOLUTE: 2.897 G_L1_RELATIVE: 12.901 G_Regularizer: 0.000 validation_error: 21.267 +(epoch: 3, iters: 38496, time: 0.947, data: 0.000) G_L1: 17.948 G_L1_ABSOLUTE: 3.016 G_L1_RELATIVE: 14.932 G_Regularizer: 0.000 validation_error: 21.690 +(epoch: 3, iters: 40496, time: 0.948, data: 0.000) G_L1: 15.979 G_L1_ABSOLUTE: 3.450 G_L1_RELATIVE: 12.529 G_Regularizer: 0.000 validation_error: 21.998 +(epoch: 3, iters: 42496, time: 0.942, data: 0.000) G_L1: 17.612 G_L1_ABSOLUTE: 3.498 G_L1_RELATIVE: 14.114 G_Regularizer: 0.000 validation_error: 21.841 +(epoch: 3, iters: 44496, time: 0.947, data: 0.000) G_L1: 15.908 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 13.152 G_Regularizer: 0.000 validation_error: 21.506 +(epoch: 3, iters: 46496, time: 0.941, data: 0.000) G_L1: 16.567 G_L1_ABSOLUTE: 3.162 G_L1_RELATIVE: 13.405 G_Regularizer: 0.000 validation_error: 21.237 +(epoch: 3, iters: 48496, time: 0.951, data: 0.000) G_L1: 18.775 G_L1_ABSOLUTE: 3.128 G_L1_RELATIVE: 15.647 G_Regularizer: 0.000 validation_error: 22.224 +(epoch: 3, iters: 50496, time: 0.942, data: 0.000) G_L1: 16.853 G_L1_ABSOLUTE: 3.432 G_L1_RELATIVE: 13.421 G_Regularizer: 0.000 validation_error: 21.711 +(epoch: 3, iters: 52496, time: 0.943, data: 0.000) G_L1: 15.349 G_L1_ABSOLUTE: 2.979 G_L1_RELATIVE: 12.369 G_Regularizer: 0.000 validation_error: 22.062 +(epoch: 3, iters: 54496, time: 0.950, data: 0.000) G_L1: 15.604 G_L1_ABSOLUTE: 3.300 G_L1_RELATIVE: 12.304 G_Regularizer: 0.000 validation_error: 21.327 +(epoch: 3, iters: 56496, time: 0.947, data: 0.000) G_L1: 19.685 G_L1_ABSOLUTE: 3.272 G_L1_RELATIVE: 16.413 G_Regularizer: 0.000 validation_error: 22.060 +(epoch: 3, iters: 58496, time: 0.943, data: 0.000) G_L1: 18.089 G_L1_ABSOLUTE: 3.214 G_L1_RELATIVE: 14.875 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 3, iters: 60496, time: 0.946, data: 0.000) G_L1: 17.823 G_L1_ABSOLUTE: 3.441 G_L1_RELATIVE: 14.382 G_Regularizer: 0.000 validation_error: 21.906 +(epoch: 3, iters: 62496, time: 0.943, data: 0.000) G_L1: 16.541 G_L1_ABSOLUTE: 3.503 G_L1_RELATIVE: 13.038 G_Regularizer: 0.000 validation_error: 21.393 +(epoch: 3, iters: 64496, time: 0.941, data: 0.000) G_L1: 18.917 G_L1_ABSOLUTE: 3.573 G_L1_RELATIVE: 15.344 G_Regularizer: 0.000 validation_error: 21.662 +(epoch: 3, iters: 66496, time: 0.941, data: 0.000) G_L1: 15.879 G_L1_ABSOLUTE: 2.843 G_L1_RELATIVE: 13.036 G_Regularizer: 0.000 validation_error: 22.285 +(epoch: 3, iters: 68496, time: 0.946, data: 0.000) G_L1: 19.113 G_L1_ABSOLUTE: 2.847 G_L1_RELATIVE: 16.266 G_Regularizer: 0.000 validation_error: 21.788 +(epoch: 3, iters: 70496, time: 0.941, data: 0.000) G_L1: 18.363 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 15.567 G_Regularizer: 0.000 validation_error: 22.055 +(epoch: 3, iters: 72496, time: 0.946, data: 0.000) G_L1: 15.999 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 13.262 G_Regularizer: 0.000 validation_error: 21.737 +(epoch: 3, iters: 74496, time: 0.946, data: 0.000) G_L1: 18.528 G_L1_ABSOLUTE: 3.078 G_L1_RELATIVE: 15.450 G_Regularizer: 0.000 validation_error: 21.563 +(epoch: 3, iters: 76496, time: 0.951, data: 0.000) G_L1: 13.771 G_L1_ABSOLUTE: 3.166 G_L1_RELATIVE: 10.605 G_Regularizer: 0.000 validation_error: 21.817 +(epoch: 3, iters: 78496, time: 0.947, data: 0.000) G_L1: 21.609 G_L1_ABSOLUTE: 3.524 G_L1_RELATIVE: 18.085 G_Regularizer: 0.000 validation_error: 21.049 +(epoch: 3, iters: 80496, time: 0.943, data: 0.000) G_L1: 19.271 G_L1_ABSOLUTE: 3.639 G_L1_RELATIVE: 15.632 G_Regularizer: 0.000 validation_error: 22.086 +(epoch: 3, iters: 82496, time: 0.576, data: 0.000) G_L1: 15.880 G_L1_ABSOLUTE: 3.172 G_L1_RELATIVE: 12.708 G_Regularizer: 0.000 validation_error: 21.125 +(epoch: 3, iters: 84496, time: 0.586, data: 0.000) G_L1: 17.768 G_L1_ABSOLUTE: 3.853 G_L1_RELATIVE: 13.915 G_Regularizer: 0.000 validation_error: 21.905 +(epoch: 3, iters: 86496, time: 0.534, data: 0.000) G_L1: 18.121 G_L1_ABSOLUTE: 3.262 G_L1_RELATIVE: 14.859 G_Regularizer: 0.000 validation_error: 21.787 +(epoch: 3, iters: 88496, time: 0.527, data: 0.000) G_L1: 18.524 G_L1_ABSOLUTE: 2.971 G_L1_RELATIVE: 15.553 G_Regularizer: 0.000 validation_error: 22.054 +(epoch: 3, iters: 90496, time: 0.528, data: 0.000) G_L1: 21.795 G_L1_ABSOLUTE: 3.187 G_L1_RELATIVE: 18.609 G_Regularizer: 0.000 validation_error: 21.429 +(epoch: 3, iters: 92496, time: 0.528, data: 0.000) G_L1: 19.608 G_L1_ABSOLUTE: 2.978 G_L1_RELATIVE: 16.630 G_Regularizer: 0.000 validation_error: 21.598 +(epoch: 3, iters: 94496, time: 0.526, data: 0.000) G_L1: 16.730 G_L1_ABSOLUTE: 3.196 G_L1_RELATIVE: 13.534 G_Regularizer: 0.000 validation_error: 22.201 +(epoch: 3, iters: 96496, time: 0.531, data: 0.000) G_L1: 16.715 G_L1_ABSOLUTE: 2.896 G_L1_RELATIVE: 13.819 G_Regularizer: 0.000 validation_error: 21.440 +(epoch: 3, iters: 98496, time: 0.521, data: 0.000) G_L1: 18.180 G_L1_ABSOLUTE: 3.171 G_L1_RELATIVE: 15.009 G_Regularizer: 0.000 validation_error: 21.957 +(epoch: 3, iters: 100496, time: 0.529, data: 0.000) G_L1: 15.614 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 12.661 G_Regularizer: 0.000 validation_error: 22.274 +(epoch: 3, iters: 102496, time: 0.527, data: 0.000) G_L1: 18.841 G_L1_ABSOLUTE: 3.175 G_L1_RELATIVE: 15.666 G_Regularizer: 0.000 validation_error: 21.431 +(epoch: 3, iters: 104496, time: 0.527, data: 0.000) G_L1: 16.661 G_L1_ABSOLUTE: 3.162 G_L1_RELATIVE: 13.500 G_Regularizer: 0.000 validation_error: 21.564 +(epoch: 3, iters: 106496, time: 0.521, data: 0.000) G_L1: 20.301 G_L1_ABSOLUTE: 3.380 G_L1_RELATIVE: 16.921 G_Regularizer: 0.000 validation_error: 21.512 +(epoch: 3, iters: 108496, time: 0.526, data: 0.000) G_L1: 18.253 G_L1_ABSOLUTE: 3.154 G_L1_RELATIVE: 15.099 G_Regularizer: 0.000 validation_error: 21.463 +(epoch: 3, iters: 110496, time: 0.525, data: 0.000) G_L1: 14.442 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 11.644 G_Regularizer: 0.000 validation_error: 21.249 +(epoch: 3, iters: 112496, time: 0.533, data: 0.000) G_L1: 14.927 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 12.211 G_Regularizer: 0.000 validation_error: 21.361 +(epoch: 3, iters: 114496, time: 0.526, data: 0.000) G_L1: 19.963 G_L1_ABSOLUTE: 2.703 G_L1_RELATIVE: 17.259 G_Regularizer: 0.000 validation_error: 21.335 +(epoch: 3, iters: 116496, time: 0.531, data: 0.000) G_L1: 17.456 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 14.477 G_Regularizer: 0.000 validation_error: 21.344 +(epoch: 3, iters: 118496, time: 0.531, data: 0.000) G_L1: 21.592 G_L1_ABSOLUTE: 2.833 G_L1_RELATIVE: 18.759 G_Regularizer: 0.000 validation_error: 21.740 +(epoch: 3, iters: 120496, time: 0.526, data: 0.000) G_L1: 16.403 G_L1_ABSOLUTE: 3.315 G_L1_RELATIVE: 13.088 G_Regularizer: 0.000 validation_error: 21.648 +(epoch: 3, iters: 122496, time: 0.524, data: 0.000) G_L1: 16.938 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 14.061 G_Regularizer: 0.000 validation_error: 21.674 +(epoch: 3, iters: 124496, time: 0.529, data: 0.000) G_L1: 22.409 G_L1_ABSOLUTE: 3.748 G_L1_RELATIVE: 18.661 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 3, iters: 126496, time: 0.534, data: 0.000) G_L1: 20.650 G_L1_ABSOLUTE: 3.539 G_L1_RELATIVE: 17.111 G_Regularizer: 0.000 validation_error: 21.293 +(epoch: 3, iters: 128496, time: 0.528, data: 0.000) G_L1: 18.110 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 15.225 G_Regularizer: 0.000 validation_error: 21.701 +(epoch: 3, iters: 130496, time: 0.524, data: 0.000) G_L1: 20.200 G_L1_ABSOLUTE: 3.020 G_L1_RELATIVE: 17.180 G_Regularizer: 0.000 validation_error: 21.355 +(epoch: 3, iters: 132496, time: 0.533, data: 0.000) G_L1: 18.152 G_L1_ABSOLUTE: 3.460 G_L1_RELATIVE: 14.692 G_Regularizer: 0.000 validation_error: 21.807 +(epoch: 3, iters: 134496, time: 0.529, data: 0.000) G_L1: 21.824 G_L1_ABSOLUTE: 3.691 G_L1_RELATIVE: 18.133 G_Regularizer: 0.000 validation_error: 21.716 +(epoch: 3, iters: 136496, time: 0.528, data: 0.000) G_L1: 16.460 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 13.284 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 3, iters: 138496, time: 0.531, data: 0.000) G_L1: 14.454 G_L1_ABSOLUTE: 3.432 G_L1_RELATIVE: 11.022 G_Regularizer: 0.000 validation_error: 21.599 +(epoch: 3, iters: 140496, time: 0.535, data: 0.000) G_L1: 14.606 G_L1_ABSOLUTE: 3.254 G_L1_RELATIVE: 11.352 G_Regularizer: 0.000 validation_error: 22.054 +(epoch: 3, iters: 142496, time: 0.529, data: 0.000) G_L1: 15.329 G_L1_ABSOLUTE: 3.208 G_L1_RELATIVE: 12.121 G_Regularizer: 0.000 validation_error: 21.600 +(epoch: 3, iters: 144496, time: 0.532, data: 0.000) G_L1: 15.620 G_L1_ABSOLUTE: 3.564 G_L1_RELATIVE: 12.055 G_Regularizer: 0.000 validation_error: 21.926 +(epoch: 3, iters: 146496, time: 0.529, data: 0.000) G_L1: 19.399 G_L1_ABSOLUTE: 3.785 G_L1_RELATIVE: 15.614 G_Regularizer: 0.000 validation_error: 21.624 +(epoch: 3, iters: 148496, time: 0.526, data: 0.000) G_L1: 22.538 G_L1_ABSOLUTE: 3.848 G_L1_RELATIVE: 18.689 G_Regularizer: 0.000 validation_error: 21.926 +(epoch: 3, iters: 150496, time: 0.566, data: 0.000) G_L1: 15.952 G_L1_ABSOLUTE: 2.944 G_L1_RELATIVE: 13.009 G_Regularizer: 0.000 validation_error: 21.905 +(epoch: 3, iters: 152496, time: 0.633, data: 0.000) G_L1: 20.764 G_L1_ABSOLUTE: 3.469 G_L1_RELATIVE: 17.295 G_Regularizer: 0.000 validation_error: 21.631 +(epoch: 3, iters: 154496, time: 0.621, data: 0.000) G_L1: 17.313 G_L1_ABSOLUTE: 3.185 G_L1_RELATIVE: 14.128 G_Regularizer: 0.000 validation_error: 21.827 +(epoch: 3, iters: 156496, time: 0.627, data: 0.000) G_L1: 19.455 G_L1_ABSOLUTE: 3.362 G_L1_RELATIVE: 16.094 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 3, iters: 158496, time: 0.589, data: 0.000) G_L1: 17.687 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 14.899 G_Regularizer: 0.000 validation_error: 21.908 +(epoch: 3, iters: 160496, time: 0.632, data: 0.000) G_L1: 16.040 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 13.084 G_Regularizer: 0.000 validation_error: 21.339 +(epoch: 3, iters: 162496, time: 0.630, data: 0.000) G_L1: 15.586 G_L1_ABSOLUTE: 2.893 G_L1_RELATIVE: 12.693 G_Regularizer: 0.000 validation_error: 21.502 +(epoch: 3, iters: 164496, time: 0.636, data: 0.000) G_L1: 15.950 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 13.052 G_Regularizer: 0.000 validation_error: 21.982 +(epoch: 3, iters: 166496, time: 0.622, data: 0.000) G_L1: 14.508 G_L1_ABSOLUTE: 2.720 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 20.869 +(epoch: 3, iters: 168496, time: 0.585, data: 0.000) G_L1: 15.632 G_L1_ABSOLUTE: 3.198 G_L1_RELATIVE: 12.434 G_Regularizer: 0.000 validation_error: 21.817 +(epoch: 3, iters: 170496, time: 0.630, data: 0.000) G_L1: 16.885 G_L1_ABSOLUTE: 2.935 G_L1_RELATIVE: 13.951 G_Regularizer: 0.000 validation_error: 21.575 +(epoch: 3, iters: 172496, time: 0.621, data: 0.000) G_L1: 15.546 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 12.896 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 3, iters: 174496, time: 0.600, data: 0.000) G_L1: 15.884 G_L1_ABSOLUTE: 3.753 G_L1_RELATIVE: 12.131 G_Regularizer: 0.000 validation_error: 21.523 +(epoch: 3, iters: 176496, time: 0.602, data: 0.000) G_L1: 14.790 G_L1_ABSOLUTE: 2.998 G_L1_RELATIVE: 11.792 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 3, iters: 178496, time: 0.548, data: 0.000) G_L1: 16.725 G_L1_ABSOLUTE: 2.855 G_L1_RELATIVE: 13.870 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 3, iters: 180496, time: 0.525, data: 0.000) G_L1: 17.632 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 14.856 G_Regularizer: 0.000 validation_error: 21.914 +(epoch: 3, iters: 182496, time: 0.527, data: 0.000) G_L1: 20.213 G_L1_ABSOLUTE: 3.651 G_L1_RELATIVE: 16.562 G_Regularizer: 0.000 validation_error: 21.575 +(epoch: 3, iters: 184496, time: 0.530, data: 0.000) G_L1: 17.158 G_L1_ABSOLUTE: 3.185 G_L1_RELATIVE: 13.974 G_Regularizer: 0.000 validation_error: 21.702 +(epoch: 3, iters: 186496, time: 0.528, data: 0.000) G_L1: 15.908 G_L1_ABSOLUTE: 3.113 G_L1_RELATIVE: 12.795 G_Regularizer: 0.000 validation_error: 21.021 +(epoch: 3, iters: 188496, time: 0.529, data: 0.000) G_L1: 17.625 G_L1_ABSOLUTE: 3.218 G_L1_RELATIVE: 14.407 G_Regularizer: 0.000 validation_error: 21.256 +(epoch: 3, iters: 190496, time: 0.531, data: 0.000) G_L1: 20.941 G_L1_ABSOLUTE: 3.107 G_L1_RELATIVE: 17.834 G_Regularizer: 0.000 validation_error: 21.122 +(epoch: 3, iters: 192496, time: 0.527, data: 0.000) G_L1: 18.809 G_L1_ABSOLUTE: 2.966 G_L1_RELATIVE: 15.843 G_Regularizer: 0.000 validation_error: 21.592 +(epoch: 3, iters: 194496, time: 0.524, data: 0.000) G_L1: 15.818 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 12.955 G_Regularizer: 0.000 validation_error: 21.230 +(epoch: 3, iters: 196496, time: 0.537, data: 0.000) G_L1: 16.811 G_L1_ABSOLUTE: 3.618 G_L1_RELATIVE: 13.194 G_Regularizer: 0.000 validation_error: 21.419 +(epoch: 3, iters: 198496, time: 0.529, data: 0.000) G_L1: 17.341 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 14.487 G_Regularizer: 0.000 validation_error: 21.866 +(epoch: 3, iters: 200496, time: 0.533, data: 0.000) G_L1: 17.436 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 14.652 G_Regularizer: 0.000 validation_error: 22.129 +(epoch: 3, iters: 202496, time: 0.531, data: 0.000) G_L1: 17.934 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 15.306 G_Regularizer: 0.000 validation_error: 22.251 +(epoch: 3, iters: 204496, time: 0.531, data: 0.000) G_L1: 16.221 G_L1_ABSOLUTE: 3.189 G_L1_RELATIVE: 13.032 G_Regularizer: 0.000 validation_error: 21.341 +(epoch: 3, iters: 206496, time: 0.527, data: 0.000) G_L1: 22.852 G_L1_ABSOLUTE: 3.629 G_L1_RELATIVE: 19.223 G_Regularizer: 0.000 validation_error: 22.227 +(epoch: 3, iters: 208496, time: 0.525, data: 0.000) G_L1: 15.474 G_L1_ABSOLUTE: 3.251 G_L1_RELATIVE: 12.223 G_Regularizer: 0.000 validation_error: 21.987 +(epoch: 3, iters: 210496, time: 0.528, data: 0.000) G_L1: 19.697 G_L1_ABSOLUTE: 3.347 G_L1_RELATIVE: 16.350 G_Regularizer: 0.000 validation_error: 21.738 +(epoch: 3, iters: 212496, time: 0.522, data: 0.000) G_L1: 15.983 G_L1_ABSOLUTE: 3.269 G_L1_RELATIVE: 12.714 G_Regularizer: 0.000 validation_error: 21.409 +(epoch: 3, iters: 214496, time: 0.532, data: 0.000) G_L1: 15.208 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 12.348 G_Regularizer: 0.000 validation_error: 21.843 +(epoch: 3, iters: 216496, time: 0.530, data: 0.000) G_L1: 14.705 G_L1_ABSOLUTE: 2.995 G_L1_RELATIVE: 11.710 G_Regularizer: 0.000 validation_error: 21.481 +(epoch: 3, iters: 218496, time: 0.523, data: 0.000) G_L1: 16.089 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 13.359 G_Regularizer: 0.000 validation_error: 21.103 +(epoch: 3, iters: 220496, time: 0.524, data: 0.000) G_L1: 19.863 G_L1_ABSOLUTE: 3.676 G_L1_RELATIVE: 16.187 G_Regularizer: 0.000 validation_error: 21.371 +(epoch: 3, iters: 222496, time: 0.525, data: 0.000) G_L1: 18.292 G_L1_ABSOLUTE: 3.061 G_L1_RELATIVE: 15.231 G_Regularizer: 0.000 validation_error: 21.496 +(epoch: 3, iters: 224496, time: 0.536, data: 0.000) G_L1: 16.496 G_L1_ABSOLUTE: 3.288 G_L1_RELATIVE: 13.207 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 3, iters: 226496, time: 0.534, data: 0.000) G_L1: 17.648 G_L1_ABSOLUTE: 3.121 G_L1_RELATIVE: 14.527 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 3, iters: 228496, time: 0.528, data: 0.000) G_L1: 15.308 G_L1_ABSOLUTE: 3.200 G_L1_RELATIVE: 12.108 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 3, iters: 230496, time: 0.535, data: 0.000) G_L1: 18.682 G_L1_ABSOLUTE: 3.223 G_L1_RELATIVE: 15.459 G_Regularizer: 0.000 validation_error: 21.030 +(epoch: 3, iters: 232496, time: 0.530, data: 0.000) G_L1: 18.377 G_L1_ABSOLUTE: 3.179 G_L1_RELATIVE: 15.198 G_Regularizer: 0.000 validation_error: 21.094 +(epoch: 3, iters: 234496, time: 0.526, data: 0.001) G_L1: 17.366 G_L1_ABSOLUTE: 3.299 G_L1_RELATIVE: 14.067 G_Regularizer: 0.000 validation_error: 21.876 +(epoch: 3, iters: 236496, time: 0.535, data: 0.000) G_L1: 21.660 G_L1_ABSOLUTE: 3.146 G_L1_RELATIVE: 18.513 G_Regularizer: 0.000 validation_error: 21.934 +(epoch: 3, iters: 238496, time: 0.535, data: 0.000) G_L1: 17.813 G_L1_ABSOLUTE: 2.901 G_L1_RELATIVE: 14.913 G_Regularizer: 0.000 validation_error: 21.841 +(epoch: 3, iters: 240496, time: 0.529, data: 0.000) G_L1: 16.320 G_L1_ABSOLUTE: 3.248 G_L1_RELATIVE: 13.072 G_Regularizer: 0.000 validation_error: 21.190 +(epoch: 3, iters: 242496, time: 0.530, data: 0.000) G_L1: 16.815 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 13.811 G_Regularizer: 0.000 validation_error: 21.124 +(epoch: 3, iters: 244496, time: 0.533, data: 0.000) G_L1: 15.978 G_L1_ABSOLUTE: 3.700 G_L1_RELATIVE: 12.278 G_Regularizer: 0.000 validation_error: 20.896 +(epoch: 3, iters: 246496, time: 0.527, data: 0.000) G_L1: 16.128 G_L1_ABSOLUTE: 3.414 G_L1_RELATIVE: 12.714 G_Regularizer: 0.000 validation_error: 21.267 +(epoch: 3, iters: 248496, time: 0.530, data: 0.000) G_L1: 14.625 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 11.741 G_Regularizer: 0.000 validation_error: 21.809 +(epoch: 3, iters: 250496, time: 0.530, data: 0.000) G_L1: 15.678 G_L1_ABSOLUTE: 3.089 G_L1_RELATIVE: 12.590 G_Regularizer: 0.000 validation_error: 21.367 +(epoch: 3, iters: 252496, time: 0.527, data: 0.000) G_L1: 13.569 G_L1_ABSOLUTE: 2.763 G_L1_RELATIVE: 10.806 G_Regularizer: 0.000 validation_error: 21.784 +(epoch: 3, iters: 254496, time: 0.531, data: 0.000) G_L1: 16.397 G_L1_ABSOLUTE: 2.992 G_L1_RELATIVE: 13.405 G_Regularizer: 0.000 validation_error: 21.863 +(epoch: 3, iters: 256496, time: 0.535, data: 0.000) G_L1: 15.573 G_L1_ABSOLUTE: 3.270 G_L1_RELATIVE: 12.303 G_Regularizer: 0.000 validation_error: 21.346 +(epoch: 3, iters: 258496, time: 0.530, data: 0.000) G_L1: 18.379 G_L1_ABSOLUTE: 3.364 G_L1_RELATIVE: 15.016 G_Regularizer: 0.000 validation_error: 21.315 +(epoch: 3, iters: 260496, time: 0.530, data: 0.000) G_L1: 16.494 G_L1_ABSOLUTE: 2.729 G_L1_RELATIVE: 13.766 G_Regularizer: 0.000 validation_error: 21.313 +(epoch: 3, iters: 262496, time: 0.527, data: 0.000) G_L1: 16.103 G_L1_ABSOLUTE: 3.063 G_L1_RELATIVE: 13.040 G_Regularizer: 0.000 validation_error: 22.026 +(epoch: 3, iters: 264496, time: 0.545, data: 0.000) G_L1: 16.610 G_L1_ABSOLUTE: 2.852 G_L1_RELATIVE: 13.758 G_Regularizer: 0.000 validation_error: 22.296 +(epoch: 3, iters: 266496, time: 0.633, data: 0.000) G_L1: 18.416 G_L1_ABSOLUTE: 3.152 G_L1_RELATIVE: 15.264 G_Regularizer: 0.000 validation_error: 21.743 +(epoch: 3, iters: 268496, time: 0.630, data: 0.000) G_L1: 13.812 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 11.113 G_Regularizer: 0.000 validation_error: 21.977 +(epoch: 3, iters: 270496, time: 0.628, data: 0.001) G_L1: 15.533 G_L1_ABSOLUTE: 3.260 G_L1_RELATIVE: 12.273 G_Regularizer: 0.000 validation_error: 22.139 +(epoch: 3, iters: 272496, time: 0.633, data: 0.000) G_L1: 16.503 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 13.671 G_Regularizer: 0.000 validation_error: 21.469 +(epoch: 3, iters: 274496, time: 0.627, data: 0.000) G_L1: 24.646 G_L1_ABSOLUTE: 3.712 G_L1_RELATIVE: 20.934 G_Regularizer: 0.000 validation_error: 21.617 +(epoch: 3, iters: 276496, time: 0.620, data: 0.000) G_L1: 14.582 G_L1_ABSOLUTE: 2.820 G_L1_RELATIVE: 11.762 G_Regularizer: 0.000 validation_error: 21.755 +(epoch: 3, iters: 278496, time: 0.629, data: 0.000) G_L1: 20.605 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 17.868 G_Regularizer: 0.000 validation_error: 21.140 +(epoch: 3, iters: 280496, time: 0.591, data: 0.000) G_L1: 18.820 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 16.083 G_Regularizer: 0.000 validation_error: 21.585 +(epoch: 3, iters: 282496, time: 0.627, data: 0.000) G_L1: 17.572 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 14.702 G_Regularizer: 0.000 validation_error: 21.225 +(epoch: 3, iters: 284496, time: 0.586, data: 0.000) G_L1: 16.094 G_L1_ABSOLUTE: 2.991 G_L1_RELATIVE: 13.103 G_Regularizer: 0.000 validation_error: 21.506 +(epoch: 3, iters: 286496, time: 0.528, data: 0.000) G_L1: 19.949 G_L1_ABSOLUTE: 4.025 G_L1_RELATIVE: 15.924 G_Regularizer: 0.000 validation_error: 21.667 +(epoch: 3, iters: 288496, time: 0.526, data: 0.000) G_L1: 22.480 G_L1_ABSOLUTE: 3.173 G_L1_RELATIVE: 19.307 G_Regularizer: 0.000 validation_error: 22.052 +(epoch: 3, iters: 290496, time: 0.527, data: 0.000) G_L1: 16.482 G_L1_ABSOLUTE: 2.931 G_L1_RELATIVE: 13.551 G_Regularizer: 0.000 validation_error: 21.638 +(epoch: 3, iters: 292496, time: 0.534, data: 0.000) G_L1: 16.128 G_L1_ABSOLUTE: 3.177 G_L1_RELATIVE: 12.952 G_Regularizer: 0.000 validation_error: 22.034 +(epoch: 3, iters: 294496, time: 0.522, data: 0.000) G_L1: 14.329 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 11.340 G_Regularizer: 0.000 validation_error: 21.526 +(epoch: 3, iters: 296496, time: 0.545, data: 0.000) G_L1: 19.908 G_L1_ABSOLUTE: 3.168 G_L1_RELATIVE: 16.740 G_Regularizer: 0.000 validation_error: 21.747 +(epoch: 3, iters: 298496, time: 0.624, data: 0.000) G_L1: 17.762 G_L1_ABSOLUTE: 3.407 G_L1_RELATIVE: 14.355 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 3, iters: 300496, time: 0.575, data: 0.000) G_L1: 16.927 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 13.974 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 3, iters: 302496, time: 0.530, data: 0.000) G_L1: 16.064 G_L1_ABSOLUTE: 2.922 G_L1_RELATIVE: 13.141 G_Regularizer: 0.000 validation_error: 21.503 +(epoch: 4, iters: 1744, time: 0.527, data: 0.000) G_L1: 17.335 G_L1_ABSOLUTE: 3.433 G_L1_RELATIVE: 13.902 G_Regularizer: 0.000 validation_error: 21.591 +(epoch: 4, iters: 3744, time: 0.530, data: 0.000) G_L1: 22.076 G_L1_ABSOLUTE: 3.282 G_L1_RELATIVE: 18.794 G_Regularizer: 0.000 validation_error: 21.497 +(epoch: 4, iters: 5744, time: 0.532, data: 0.000) G_L1: 14.521 G_L1_ABSOLUTE: 3.097 G_L1_RELATIVE: 11.425 G_Regularizer: 0.000 validation_error: 21.582 +(epoch: 4, iters: 7744, time: 0.532, data: 0.000) G_L1: 16.551 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 13.755 G_Regularizer: 0.000 validation_error: 21.678 +(epoch: 4, iters: 9744, time: 0.532, data: 0.000) G_L1: 16.196 G_L1_ABSOLUTE: 2.996 G_L1_RELATIVE: 13.200 G_Regularizer: 0.000 validation_error: 21.293 +(epoch: 4, iters: 11744, time: 0.524, data: 0.000) G_L1: 16.523 G_L1_ABSOLUTE: 2.955 G_L1_RELATIVE: 13.568 G_Regularizer: 0.000 validation_error: 21.899 +(epoch: 4, iters: 13744, time: 0.524, data: 0.000) G_L1: 16.354 G_L1_ABSOLUTE: 3.167 G_L1_RELATIVE: 13.187 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 4, iters: 15744, time: 0.521, data: 0.000) G_L1: 18.166 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 15.306 G_Regularizer: 0.000 validation_error: 21.599 +(epoch: 4, iters: 17744, time: 0.537, data: 0.000) G_L1: 13.495 G_L1_ABSOLUTE: 3.052 G_L1_RELATIVE: 10.444 G_Regularizer: 0.000 validation_error: 21.734 +(epoch: 4, iters: 19744, time: 0.525, data: 0.000) G_L1: 16.257 G_L1_ABSOLUTE: 3.035 G_L1_RELATIVE: 13.223 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 4, iters: 21744, time: 0.531, data: 0.000) G_L1: 16.905 G_L1_ABSOLUTE: 3.068 G_L1_RELATIVE: 13.837 G_Regularizer: 0.000 validation_error: 21.744 +(epoch: 4, iters: 23744, time: 0.527, data: 0.000) G_L1: 16.217 G_L1_ABSOLUTE: 3.039 G_L1_RELATIVE: 13.178 G_Regularizer: 0.000 validation_error: 21.468 +(epoch: 4, iters: 25744, time: 0.529, data: 0.000) G_L1: 19.822 G_L1_ABSOLUTE: 3.210 G_L1_RELATIVE: 16.611 G_Regularizer: 0.000 validation_error: 21.523 +(epoch: 4, iters: 27744, time: 0.528, data: 0.000) G_L1: 14.909 G_L1_ABSOLUTE: 2.909 G_L1_RELATIVE: 11.999 G_Regularizer: 0.000 validation_error: 22.234 +(epoch: 4, iters: 29744, time: 0.528, data: 0.000) G_L1: 17.506 G_L1_ABSOLUTE: 3.258 G_L1_RELATIVE: 14.248 G_Regularizer: 0.000 validation_error: 21.357 +(epoch: 4, iters: 31744, time: 0.528, data: 0.000) G_L1: 13.906 G_L1_ABSOLUTE: 2.935 G_L1_RELATIVE: 10.971 G_Regularizer: 0.000 validation_error: 21.327 +(epoch: 4, iters: 33744, time: 0.533, data: 0.000) G_L1: 15.678 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 12.886 G_Regularizer: 0.000 validation_error: 21.464 +(epoch: 4, iters: 35744, time: 0.536, data: 0.000) G_L1: 19.474 G_L1_ABSOLUTE: 2.922 G_L1_RELATIVE: 16.553 G_Regularizer: 0.000 validation_error: 21.793 +(epoch: 4, iters: 37744, time: 0.532, data: 0.001) G_L1: 16.319 G_L1_ABSOLUTE: 2.996 G_L1_RELATIVE: 13.323 G_Regularizer: 0.000 validation_error: 21.906 +(epoch: 4, iters: 39744, time: 0.529, data: 0.000) G_L1: 16.452 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 13.746 G_Regularizer: 0.000 validation_error: 22.104 +(epoch: 4, iters: 41744, time: 0.533, data: 0.000) G_L1: 18.475 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 15.583 G_Regularizer: 0.000 validation_error: 21.524 +(epoch: 4, iters: 43744, time: 0.532, data: 0.000) G_L1: 17.585 G_L1_ABSOLUTE: 3.704 G_L1_RELATIVE: 13.881 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 4, iters: 45744, time: 0.534, data: 0.000) G_L1: 15.930 G_L1_ABSOLUTE: 3.445 G_L1_RELATIVE: 12.484 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 4, iters: 47744, time: 0.529, data: 0.000) G_L1: 18.882 G_L1_ABSOLUTE: 3.354 G_L1_RELATIVE: 15.527 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 4, iters: 49744, time: 0.532, data: 0.000) G_L1: 16.443 G_L1_ABSOLUTE: 2.837 G_L1_RELATIVE: 13.606 G_Regularizer: 0.000 validation_error: 21.443 +(epoch: 4, iters: 51744, time: 0.528, data: 0.000) G_L1: 18.152 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 15.381 G_Regularizer: 0.000 validation_error: 21.118 +(epoch: 4, iters: 53744, time: 0.536, data: 0.000) G_L1: 18.071 G_L1_ABSOLUTE: 3.799 G_L1_RELATIVE: 14.272 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 4, iters: 55744, time: 0.534, data: 0.000) G_L1: 21.285 G_L1_ABSOLUTE: 3.057 G_L1_RELATIVE: 18.228 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 4, iters: 57744, time: 0.533, data: 0.000) G_L1: 16.574 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 13.952 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 4, iters: 59744, time: 0.533, data: 0.000) G_L1: 15.908 G_L1_ABSOLUTE: 3.138 G_L1_RELATIVE: 12.771 G_Regularizer: 0.000 validation_error: 21.746 +(epoch: 4, iters: 61744, time: 0.526, data: 0.000) G_L1: 15.686 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 13.182 G_Regularizer: 0.000 validation_error: 21.012 +(epoch: 4, iters: 63744, time: 0.533, data: 0.000) G_L1: 18.535 G_L1_ABSOLUTE: 2.883 G_L1_RELATIVE: 15.652 G_Regularizer: 0.000 validation_error: 21.244 +(epoch: 4, iters: 65744, time: 0.527, data: 0.000) G_L1: 14.268 G_L1_ABSOLUTE: 2.974 G_L1_RELATIVE: 11.294 G_Regularizer: 0.000 validation_error: 21.373 +(epoch: 4, iters: 67744, time: 0.536, data: 0.000) G_L1: 15.090 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 12.283 G_Regularizer: 0.000 validation_error: 20.765 +(epoch: 4, iters: 69744, time: 0.531, data: 0.000) G_L1: 16.255 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 13.263 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 4, iters: 71744, time: 0.532, data: 0.000) G_L1: 21.011 G_L1_ABSOLUTE: 3.050 G_L1_RELATIVE: 17.961 G_Regularizer: 0.000 validation_error: 21.989 +(epoch: 4, iters: 73744, time: 0.527, data: 0.000) G_L1: 17.072 G_L1_ABSOLUTE: 3.097 G_L1_RELATIVE: 13.975 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 4, iters: 75744, time: 0.530, data: 0.000) G_L1: 17.241 G_L1_ABSOLUTE: 2.941 G_L1_RELATIVE: 14.299 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 4, iters: 77744, time: 0.532, data: 0.000) G_L1: 19.436 G_L1_ABSOLUTE: 3.997 G_L1_RELATIVE: 15.439 G_Regularizer: 0.000 validation_error: 21.525 +(epoch: 4, iters: 79744, time: 0.533, data: 0.000) G_L1: 13.746 G_L1_ABSOLUTE: 3.092 G_L1_RELATIVE: 10.655 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 4, iters: 81744, time: 0.529, data: 0.000) G_L1: 19.946 G_L1_ABSOLUTE: 3.184 G_L1_RELATIVE: 16.762 G_Regularizer: 0.000 validation_error: 21.729 +(epoch: 4, iters: 83744, time: 0.530, data: 0.000) G_L1: 16.279 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 13.443 G_Regularizer: 0.000 validation_error: 21.631 +(epoch: 4, iters: 85744, time: 0.533, data: 0.000) G_L1: 19.037 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 16.152 G_Regularizer: 0.000 validation_error: 21.862 +(epoch: 4, iters: 87744, time: 0.529, data: 0.000) G_L1: 15.886 G_L1_ABSOLUTE: 2.925 G_L1_RELATIVE: 12.961 G_Regularizer: 0.000 validation_error: 21.782 +(epoch: 4, iters: 89744, time: 0.531, data: 0.000) G_L1: 15.377 G_L1_ABSOLUTE: 3.318 G_L1_RELATIVE: 12.059 G_Regularizer: 0.000 validation_error: 22.207 +(epoch: 4, iters: 91744, time: 0.530, data: 0.000) G_L1: 16.127 G_L1_ABSOLUTE: 3.342 G_L1_RELATIVE: 12.785 G_Regularizer: 0.000 validation_error: 21.526 +(epoch: 4, iters: 93744, time: 0.535, data: 0.000) G_L1: 18.349 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 15.460 G_Regularizer: 0.000 validation_error: 20.724 +(epoch: 4, iters: 95744, time: 0.536, data: 0.000) G_L1: 18.555 G_L1_ABSOLUTE: 3.254 G_L1_RELATIVE: 15.301 G_Regularizer: 0.000 validation_error: 21.752 +(epoch: 4, iters: 97744, time: 0.533, data: 0.000) G_L1: 17.785 G_L1_ABSOLUTE: 3.590 G_L1_RELATIVE: 14.194 G_Regularizer: 0.000 validation_error: 20.822 +(epoch: 4, iters: 99744, time: 0.529, data: 0.000) G_L1: 20.644 G_L1_ABSOLUTE: 3.022 G_L1_RELATIVE: 17.622 G_Regularizer: 0.000 validation_error: 22.042 +(epoch: 4, iters: 101744, time: 0.529, data: 0.000) G_L1: 14.574 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 11.714 G_Regularizer: 0.000 validation_error: 21.241 +(epoch: 4, iters: 103744, time: 0.525, data: 0.000) G_L1: 17.009 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 14.263 G_Regularizer: 0.000 validation_error: 21.882 +(epoch: 4, iters: 105744, time: 0.521, data: 0.000) G_L1: 15.014 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 12.202 G_Regularizer: 0.000 validation_error: 21.068 +(epoch: 4, iters: 107744, time: 0.536, data: 0.000) G_L1: 19.033 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 16.040 G_Regularizer: 0.000 validation_error: 21.504 +(epoch: 4, iters: 109744, time: 0.531, data: 0.000) G_L1: 14.486 G_L1_ABSOLUTE: 3.258 G_L1_RELATIVE: 11.228 G_Regularizer: 0.000 validation_error: 21.350 +(epoch: 4, iters: 111744, time: 0.527, data: 0.000) G_L1: 18.351 G_L1_ABSOLUTE: 3.072 G_L1_RELATIVE: 15.280 G_Regularizer: 0.000 validation_error: 21.327 +(epoch: 4, iters: 113744, time: 0.528, data: 0.000) G_L1: 20.497 G_L1_ABSOLUTE: 2.910 G_L1_RELATIVE: 17.587 G_Regularizer: 0.000 validation_error: 21.737 +(epoch: 4, iters: 115744, time: 0.532, data: 0.000) G_L1: 15.026 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 12.375 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 4, iters: 117744, time: 0.526, data: 0.000) G_L1: 14.562 G_L1_ABSOLUTE: 2.875 G_L1_RELATIVE: 11.687 G_Regularizer: 0.000 validation_error: 21.202 +(epoch: 4, iters: 119744, time: 0.530, data: 0.001) G_L1: 17.048 G_L1_ABSOLUTE: 3.127 G_L1_RELATIVE: 13.921 G_Regularizer: 0.000 validation_error: 21.372 +(epoch: 4, iters: 121744, time: 0.532, data: 0.000) G_L1: 16.221 G_L1_ABSOLUTE: 3.169 G_L1_RELATIVE: 13.052 G_Regularizer: 0.000 validation_error: 21.882 +(epoch: 4, iters: 123744, time: 0.532, data: 0.000) G_L1: 15.657 G_L1_ABSOLUTE: 3.314 G_L1_RELATIVE: 12.342 G_Regularizer: 0.000 validation_error: 21.763 +(epoch: 4, iters: 125744, time: 0.538, data: 0.000) G_L1: 19.169 G_L1_ABSOLUTE: 3.709 G_L1_RELATIVE: 15.460 G_Regularizer: 0.000 validation_error: 21.599 +(epoch: 4, iters: 127744, time: 0.521, data: 0.000) G_L1: 19.321 G_L1_ABSOLUTE: 3.477 G_L1_RELATIVE: 15.844 G_Regularizer: 0.000 validation_error: 21.398 +(epoch: 4, iters: 129744, time: 0.527, data: 0.000) G_L1: 15.468 G_L1_ABSOLUTE: 3.087 G_L1_RELATIVE: 12.382 G_Regularizer: 0.000 validation_error: 20.620 +(epoch: 4, iters: 131744, time: 0.530, data: 0.000) G_L1: 15.220 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 12.264 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 4, iters: 133744, time: 0.526, data: 0.000) G_L1: 17.284 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 14.530 G_Regularizer: 0.000 validation_error: 21.734 +(epoch: 4, iters: 135744, time: 0.528, data: 0.000) G_L1: 16.186 G_L1_ABSOLUTE: 2.850 G_L1_RELATIVE: 13.336 G_Regularizer: 0.000 validation_error: 21.040 +(epoch: 4, iters: 137744, time: 0.533, data: 0.000) G_L1: 19.734 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 16.974 G_Regularizer: 0.000 validation_error: 20.510 +(epoch: 4, iters: 139744, time: 0.524, data: 0.000) G_L1: 18.493 G_L1_ABSOLUTE: 2.643 G_L1_RELATIVE: 15.849 G_Regularizer: 0.000 validation_error: 21.152 +(epoch: 4, iters: 141744, time: 0.533, data: 0.000) G_L1: 16.483 G_L1_ABSOLUTE: 2.991 G_L1_RELATIVE: 13.493 G_Regularizer: 0.000 validation_error: 21.489 +(epoch: 4, iters: 143744, time: 0.533, data: 0.000) G_L1: 21.410 G_L1_ABSOLUTE: 3.390 G_L1_RELATIVE: 18.019 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 4, iters: 145744, time: 0.529, data: 0.000) G_L1: 19.219 G_L1_ABSOLUTE: 3.381 G_L1_RELATIVE: 15.838 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 4, iters: 147744, time: 0.533, data: 0.000) G_L1: 17.485 G_L1_ABSOLUTE: 3.072 G_L1_RELATIVE: 14.413 G_Regularizer: 0.000 validation_error: 21.507 +(epoch: 4, iters: 149744, time: 0.530, data: 0.000) G_L1: 17.711 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 14.680 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 4, iters: 151744, time: 0.529, data: 0.000) G_L1: 19.921 G_L1_ABSOLUTE: 3.418 G_L1_RELATIVE: 16.503 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 4, iters: 153744, time: 0.525, data: 0.000) G_L1: 17.161 G_L1_ABSOLUTE: 2.845 G_L1_RELATIVE: 14.316 G_Regularizer: 0.000 validation_error: 21.186 +(epoch: 4, iters: 155744, time: 0.526, data: 0.000) G_L1: 17.292 G_L1_ABSOLUTE: 3.476 G_L1_RELATIVE: 13.816 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 4, iters: 157744, time: 0.529, data: 0.000) G_L1: 14.090 G_L1_ABSOLUTE: 3.078 G_L1_RELATIVE: 11.012 G_Regularizer: 0.000 validation_error: 21.323 +(epoch: 4, iters: 159744, time: 0.527, data: 0.000) G_L1: 14.811 G_L1_ABSOLUTE: 2.711 G_L1_RELATIVE: 12.100 G_Regularizer: 0.000 validation_error: 21.647 +(epoch: 4, iters: 161744, time: 0.529, data: 0.000) G_L1: 16.950 G_L1_ABSOLUTE: 3.252 G_L1_RELATIVE: 13.699 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 4, iters: 163744, time: 0.528, data: 0.000) G_L1: 14.781 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 12.027 G_Regularizer: 0.000 validation_error: 21.249 +(epoch: 4, iters: 165744, time: 0.529, data: 0.000) G_L1: 18.643 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 15.467 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 4, iters: 167744, time: 0.529, data: 0.001) G_L1: 16.096 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 13.297 G_Regularizer: 0.000 validation_error: 20.323 +(epoch: 4, iters: 169744, time: 0.532, data: 0.000) G_L1: 23.842 G_L1_ABSOLUTE: 3.075 G_L1_RELATIVE: 20.766 G_Regularizer: 0.000 validation_error: 21.659 +(epoch: 4, iters: 171744, time: 0.527, data: 0.000) G_L1: 18.038 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 15.162 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 4, iters: 173744, time: 0.529, data: 0.000) G_L1: 17.841 G_L1_ABSOLUTE: 3.190 G_L1_RELATIVE: 14.650 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 4, iters: 175744, time: 0.531, data: 0.000) G_L1: 18.511 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 15.468 G_Regularizer: 0.000 validation_error: 20.358 +(epoch: 4, iters: 177744, time: 0.527, data: 0.000) G_L1: 15.785 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 13.116 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 4, iters: 179744, time: 0.526, data: 0.000) G_L1: 15.732 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 12.963 G_Regularizer: 0.000 validation_error: 21.321 +(epoch: 4, iters: 181744, time: 0.530, data: 0.000) G_L1: 17.038 G_L1_ABSOLUTE: 3.448 G_L1_RELATIVE: 13.590 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 4, iters: 183744, time: 0.527, data: 0.000) G_L1: 17.348 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 14.611 G_Regularizer: 0.000 validation_error: 21.898 +(epoch: 4, iters: 185744, time: 0.528, data: 0.000) G_L1: 16.733 G_L1_ABSOLUTE: 3.018 G_L1_RELATIVE: 13.715 G_Regularizer: 0.000 validation_error: 21.037 +(epoch: 4, iters: 187744, time: 0.537, data: 0.000) G_L1: 15.632 G_L1_ABSOLUTE: 3.297 G_L1_RELATIVE: 12.335 G_Regularizer: 0.000 validation_error: 21.629 +(epoch: 4, iters: 189744, time: 0.527, data: 0.000) G_L1: 15.297 G_L1_ABSOLUTE: 2.958 G_L1_RELATIVE: 12.340 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 4, iters: 191744, time: 0.529, data: 0.000) G_L1: 17.970 G_L1_ABSOLUTE: 3.312 G_L1_RELATIVE: 14.657 G_Regularizer: 0.000 validation_error: 20.910 +(epoch: 4, iters: 193744, time: 0.527, data: 0.000) G_L1: 16.879 G_L1_ABSOLUTE: 3.324 G_L1_RELATIVE: 13.555 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 4, iters: 195744, time: 0.527, data: 0.000) G_L1: 15.272 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 12.623 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 4, iters: 197744, time: 0.533, data: 0.000) G_L1: 17.341 G_L1_ABSOLUTE: 3.230 G_L1_RELATIVE: 14.112 G_Regularizer: 0.000 validation_error: 21.608 +(epoch: 4, iters: 199744, time: 0.529, data: 0.000) G_L1: 17.586 G_L1_ABSOLUTE: 2.879 G_L1_RELATIVE: 14.707 G_Regularizer: 0.000 validation_error: 21.232 +(epoch: 4, iters: 201744, time: 0.542, data: 0.000) G_L1: 14.913 G_L1_ABSOLUTE: 3.138 G_L1_RELATIVE: 11.774 G_Regularizer: 0.000 validation_error: 21.487 +(epoch: 4, iters: 203744, time: 0.530, data: 0.000) G_L1: 15.769 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 12.992 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 4, iters: 205744, time: 0.532, data: 0.000) G_L1: 19.274 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 16.516 G_Regularizer: 0.000 validation_error: 21.382 +(epoch: 4, iters: 207744, time: 0.529, data: 0.000) G_L1: 16.524 G_L1_ABSOLUTE: 3.157 G_L1_RELATIVE: 13.367 G_Regularizer: 0.000 validation_error: 21.272 +(epoch: 4, iters: 209744, time: 0.529, data: 0.000) G_L1: 14.329 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 11.671 G_Regularizer: 0.000 validation_error: 21.864 +(epoch: 4, iters: 211744, time: 0.530, data: 0.000) G_L1: 18.949 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 16.028 G_Regularizer: 0.000 validation_error: 21.542 +(epoch: 4, iters: 213744, time: 0.532, data: 0.000) G_L1: 15.176 G_L1_ABSOLUTE: 3.045 G_L1_RELATIVE: 12.131 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 4, iters: 215744, time: 0.529, data: 0.000) G_L1: 21.322 G_L1_ABSOLUTE: 2.912 G_L1_RELATIVE: 18.410 G_Regularizer: 0.000 validation_error: 21.861 +(epoch: 4, iters: 217744, time: 0.527, data: 0.000) G_L1: 17.395 G_L1_ABSOLUTE: 3.124 G_L1_RELATIVE: 14.271 G_Regularizer: 0.000 validation_error: 21.358 +(epoch: 4, iters: 219744, time: 0.527, data: 0.000) G_L1: 21.469 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 18.855 G_Regularizer: 0.000 validation_error: 21.423 +(epoch: 4, iters: 221744, time: 0.532, data: 0.000) G_L1: 14.882 G_L1_ABSOLUTE: 3.062 G_L1_RELATIVE: 11.820 G_Regularizer: 0.000 validation_error: 21.286 +(epoch: 4, iters: 223744, time: 0.527, data: 0.000) G_L1: 17.217 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 14.567 G_Regularizer: 0.000 validation_error: 21.546 +(epoch: 4, iters: 225744, time: 0.529, data: 0.000) G_L1: 19.188 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 16.644 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 4, iters: 227744, time: 0.534, data: 0.000) G_L1: 17.153 G_L1_ABSOLUTE: 3.044 G_L1_RELATIVE: 14.110 G_Regularizer: 0.000 validation_error: 21.080 +(epoch: 4, iters: 229744, time: 0.524, data: 0.000) G_L1: 17.809 G_L1_ABSOLUTE: 3.119 G_L1_RELATIVE: 14.690 G_Regularizer: 0.000 validation_error: 21.191 +(epoch: 4, iters: 231744, time: 0.528, data: 0.000) G_L1: 19.376 G_L1_ABSOLUTE: 2.941 G_L1_RELATIVE: 16.436 G_Regularizer: 0.000 validation_error: 21.467 +(epoch: 4, iters: 233744, time: 0.536, data: 0.000) G_L1: 16.162 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 13.173 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 4, iters: 235744, time: 0.529, data: 0.000) G_L1: 15.474 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 12.714 G_Regularizer: 0.000 validation_error: 21.654 +(epoch: 4, iters: 237744, time: 0.528, data: 0.000) G_L1: 14.889 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 12.110 G_Regularizer: 0.000 validation_error: 21.755 +(epoch: 4, iters: 239744, time: 0.527, data: 0.000) G_L1: 15.106 G_L1_ABSOLUTE: 2.891 G_L1_RELATIVE: 12.215 G_Regularizer: 0.000 validation_error: 21.126 +(epoch: 4, iters: 241744, time: 0.530, data: 0.000) G_L1: 13.297 G_L1_ABSOLUTE: 3.214 G_L1_RELATIVE: 10.083 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 4, iters: 243744, time: 0.535, data: 0.000) G_L1: 17.786 G_L1_ABSOLUTE: 3.253 G_L1_RELATIVE: 14.533 G_Regularizer: 0.000 validation_error: 21.769 +(epoch: 4, iters: 245744, time: 0.523, data: 0.000) G_L1: 17.610 G_L1_ABSOLUTE: 3.050 G_L1_RELATIVE: 14.560 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 4, iters: 247744, time: 0.532, data: 0.000) G_L1: 18.648 G_L1_ABSOLUTE: 3.270 G_L1_RELATIVE: 15.378 G_Regularizer: 0.000 validation_error: 21.160 +(epoch: 4, iters: 249744, time: 0.529, data: 0.000) G_L1: 15.973 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 13.229 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 4, iters: 251744, time: 0.527, data: 0.000) G_L1: 18.756 G_L1_ABSOLUTE: 3.282 G_L1_RELATIVE: 15.474 G_Regularizer: 0.000 validation_error: 21.351 +(epoch: 4, iters: 253744, time: 0.530, data: 0.000) G_L1: 14.392 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 11.590 G_Regularizer: 0.000 validation_error: 21.784 +(epoch: 4, iters: 255744, time: 0.528, data: 0.000) G_L1: 18.928 G_L1_ABSOLUTE: 3.237 G_L1_RELATIVE: 15.691 G_Regularizer: 0.000 validation_error: 21.813 +(epoch: 4, iters: 257744, time: 0.532, data: 0.000) G_L1: 15.665 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 12.840 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 4, iters: 259744, time: 0.531, data: 0.000) G_L1: 15.276 G_L1_ABSOLUTE: 2.671 G_L1_RELATIVE: 12.605 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 4, iters: 261744, time: 0.526, data: 0.000) G_L1: 16.180 G_L1_ABSOLUTE: 3.049 G_L1_RELATIVE: 13.132 G_Regularizer: 0.000 validation_error: 21.627 +(epoch: 4, iters: 263744, time: 0.527, data: 0.000) G_L1: 14.778 G_L1_ABSOLUTE: 2.682 G_L1_RELATIVE: 12.097 G_Regularizer: 0.000 validation_error: 20.331 +(epoch: 4, iters: 265744, time: 0.532, data: 0.000) G_L1: 15.793 G_L1_ABSOLUTE: 2.893 G_L1_RELATIVE: 12.900 G_Regularizer: 0.000 validation_error: 21.787 +(epoch: 4, iters: 267744, time: 0.531, data: 0.000) G_L1: 16.914 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 14.253 G_Regularizer: 0.000 validation_error: 21.357 +(epoch: 4, iters: 269744, time: 0.531, data: 0.000) G_L1: 13.687 G_L1_ABSOLUTE: 3.093 G_L1_RELATIVE: 10.594 G_Regularizer: 0.000 validation_error: 21.418 +(epoch: 4, iters: 271744, time: 0.529, data: 0.000) G_L1: 15.516 G_L1_ABSOLUTE: 3.515 G_L1_RELATIVE: 12.001 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 4, iters: 273744, time: 0.534, data: 0.000) G_L1: 16.192 G_L1_ABSOLUTE: 3.194 G_L1_RELATIVE: 12.998 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 4, iters: 275744, time: 0.531, data: 0.000) G_L1: 18.883 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 16.246 G_Regularizer: 0.000 validation_error: 20.991 +(epoch: 4, iters: 277744, time: 0.527, data: 0.000) G_L1: 15.228 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 12.642 G_Regularizer: 0.000 validation_error: 21.353 +(epoch: 4, iters: 279744, time: 0.533, data: 0.000) G_L1: 19.209 G_L1_ABSOLUTE: 3.341 G_L1_RELATIVE: 15.868 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 4, iters: 281744, time: 0.533, data: 0.000) G_L1: 17.870 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 15.218 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 4, iters: 283744, time: 0.527, data: 0.000) G_L1: 18.653 G_L1_ABSOLUTE: 3.365 G_L1_RELATIVE: 15.287 G_Regularizer: 0.000 validation_error: 21.291 +(epoch: 4, iters: 285744, time: 0.527, data: 0.000) G_L1: 12.783 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 9.926 G_Regularizer: 0.000 validation_error: 21.252 +(epoch: 4, iters: 287744, time: 0.530, data: 0.000) G_L1: 16.768 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 13.724 G_Regularizer: 0.000 validation_error: 21.249 +(epoch: 4, iters: 289744, time: 0.521, data: 0.000) G_L1: 18.966 G_L1_ABSOLUTE: 3.215 G_L1_RELATIVE: 15.751 G_Regularizer: 0.000 validation_error: 21.186 +(epoch: 4, iters: 291744, time: 0.531, data: 0.000) G_L1: 19.073 G_L1_ABSOLUTE: 3.205 G_L1_RELATIVE: 15.868 G_Regularizer: 0.000 validation_error: 20.856 +(epoch: 4, iters: 293744, time: 0.531, data: 0.000) G_L1: 16.995 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 14.102 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 4, iters: 295744, time: 0.527, data: 0.000) G_L1: 14.628 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 12.072 G_Regularizer: 0.000 validation_error: 21.348 +(epoch: 4, iters: 297744, time: 0.530, data: 0.000) G_L1: 16.006 G_L1_ABSOLUTE: 3.033 G_L1_RELATIVE: 12.973 G_Regularizer: 0.000 validation_error: 21.497 +(epoch: 4, iters: 299744, time: 0.526, data: 0.000) G_L1: 18.891 G_L1_ABSOLUTE: 3.243 G_L1_RELATIVE: 15.647 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 4, iters: 301744, time: 0.525, data: 0.000) G_L1: 17.897 G_L1_ABSOLUTE: 3.047 G_L1_RELATIVE: 14.850 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 5, iters: 992, time: 0.534, data: 0.000) G_L1: 14.018 G_L1_ABSOLUTE: 3.353 G_L1_RELATIVE: 10.665 G_Regularizer: 0.000 validation_error: 21.132 +(epoch: 5, iters: 2992, time: 0.537, data: 0.000) G_L1: 16.055 G_L1_ABSOLUTE: 2.983 G_L1_RELATIVE: 13.072 G_Regularizer: 0.000 validation_error: 22.075 +(epoch: 5, iters: 4992, time: 0.533, data: 0.000) G_L1: 17.554 G_L1_ABSOLUTE: 2.966 G_L1_RELATIVE: 14.588 G_Regularizer: 0.000 validation_error: 21.824 +(epoch: 5, iters: 6992, time: 0.536, data: 0.000) G_L1: 16.833 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 14.133 G_Regularizer: 0.000 validation_error: 21.336 +(epoch: 5, iters: 8992, time: 0.527, data: 0.000) G_L1: 17.394 G_L1_ABSOLUTE: 2.753 G_L1_RELATIVE: 14.640 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 5, iters: 10992, time: 0.528, data: 0.000) G_L1: 15.535 G_L1_ABSOLUTE: 3.328 G_L1_RELATIVE: 12.207 G_Regularizer: 0.000 validation_error: 21.223 +(epoch: 5, iters: 12992, time: 0.531, data: 0.000) G_L1: 14.813 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 12.256 G_Regularizer: 0.000 validation_error: 21.005 +(epoch: 5, iters: 14992, time: 0.536, data: 0.000) G_L1: 19.054 G_L1_ABSOLUTE: 3.761 G_L1_RELATIVE: 15.293 G_Regularizer: 0.000 validation_error: 21.727 +(epoch: 5, iters: 16992, time: 0.528, data: 0.000) G_L1: 16.415 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 13.823 G_Regularizer: 0.000 validation_error: 21.115 +(epoch: 5, iters: 18992, time: 0.529, data: 0.000) G_L1: 12.564 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 9.915 G_Regularizer: 0.000 validation_error: 20.410 +(epoch: 5, iters: 20992, time: 0.533, data: 0.000) G_L1: 15.131 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 12.426 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 5, iters: 22992, time: 0.533, data: 0.000) G_L1: 15.767 G_L1_ABSOLUTE: 2.759 G_L1_RELATIVE: 13.008 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 5, iters: 24992, time: 0.530, data: 0.000) G_L1: 14.083 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 11.090 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 5, iters: 26992, time: 0.528, data: 0.000) G_L1: 16.393 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 13.701 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 5, iters: 28992, time: 0.531, data: 0.000) G_L1: 14.112 G_L1_ABSOLUTE: 2.931 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 21.705 +(epoch: 5, iters: 30992, time: 0.531, data: 0.000) G_L1: 17.419 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 14.549 G_Regularizer: 0.000 validation_error: 21.454 +(epoch: 5, iters: 32992, time: 0.529, data: 0.000) G_L1: 17.585 G_L1_ABSOLUTE: 3.425 G_L1_RELATIVE: 14.160 G_Regularizer: 0.000 validation_error: 21.233 +(epoch: 5, iters: 34992, time: 0.530, data: 0.000) G_L1: 14.840 G_L1_ABSOLUTE: 2.803 G_L1_RELATIVE: 12.037 G_Regularizer: 0.000 validation_error: 21.131 +(epoch: 5, iters: 36992, time: 0.528, data: 0.000) G_L1: 14.474 G_L1_ABSOLUTE: 2.846 G_L1_RELATIVE: 11.628 G_Regularizer: 0.000 validation_error: 21.431 +(epoch: 5, iters: 38992, time: 0.531, data: 0.000) G_L1: 20.901 G_L1_ABSOLUTE: 3.795 G_L1_RELATIVE: 17.106 G_Regularizer: 0.000 validation_error: 21.382 +(epoch: 5, iters: 40992, time: 0.531, data: 0.000) G_L1: 13.466 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 10.982 G_Regularizer: 0.000 validation_error: 21.642 +(epoch: 5, iters: 42992, time: 0.533, data: 0.000) G_L1: 14.916 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 12.178 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 5, iters: 44992, time: 0.527, data: 0.000) G_L1: 13.906 G_L1_ABSOLUTE: 3.123 G_L1_RELATIVE: 10.784 G_Regularizer: 0.000 validation_error: 21.438 +(epoch: 5, iters: 46992, time: 0.532, data: 0.000) G_L1: 12.649 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 10.308 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 5, iters: 48992, time: 0.534, data: 0.000) G_L1: 15.013 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 12.377 G_Regularizer: 0.000 validation_error: 21.041 +(epoch: 5, iters: 50992, time: 0.532, data: 0.000) G_L1: 17.935 G_L1_ABSOLUTE: 3.055 G_L1_RELATIVE: 14.881 G_Regularizer: 0.000 validation_error: 21.096 +(epoch: 5, iters: 52992, time: 0.534, data: 0.000) G_L1: 18.116 G_L1_ABSOLUTE: 3.148 G_L1_RELATIVE: 14.968 G_Regularizer: 0.000 validation_error: 21.188 +(epoch: 5, iters: 54992, time: 0.537, data: 0.000) G_L1: 15.619 G_L1_ABSOLUTE: 3.478 G_L1_RELATIVE: 12.141 G_Regularizer: 0.000 validation_error: 21.112 +(epoch: 5, iters: 56992, time: 0.531, data: 0.000) G_L1: 16.092 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 13.371 G_Regularizer: 0.000 validation_error: 21.418 +(epoch: 5, iters: 58992, time: 0.520, data: 0.000) G_L1: 19.706 G_L1_ABSOLUTE: 3.103 G_L1_RELATIVE: 16.603 G_Regularizer: 0.000 validation_error: 22.348 +(epoch: 5, iters: 60992, time: 0.524, data: 0.000) G_L1: 14.858 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 11.945 G_Regularizer: 0.000 validation_error: 20.937 +(epoch: 5, iters: 62992, time: 0.525, data: 0.000) G_L1: 16.800 G_L1_ABSOLUTE: 3.368 G_L1_RELATIVE: 13.432 G_Regularizer: 0.000 validation_error: 21.463 +(epoch: 5, iters: 64992, time: 0.525, data: 0.000) G_L1: 15.168 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 12.483 G_Regularizer: 0.000 validation_error: 21.306 +(epoch: 5, iters: 66992, time: 0.521, data: 0.000) G_L1: 14.167 G_L1_ABSOLUTE: 3.052 G_L1_RELATIVE: 11.115 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 5, iters: 68992, time: 0.524, data: 0.000) G_L1: 16.224 G_L1_ABSOLUTE: 3.014 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 21.593 +(epoch: 5, iters: 70992, time: 0.525, data: 0.000) G_L1: 16.905 G_L1_ABSOLUTE: 2.849 G_L1_RELATIVE: 14.057 G_Regularizer: 0.000 validation_error: 21.296 +(epoch: 5, iters: 72992, time: 0.526, data: 0.000) G_L1: 15.040 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 12.472 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 5, iters: 74992, time: 0.526, data: 0.000) G_L1: 15.518 G_L1_ABSOLUTE: 2.873 G_L1_RELATIVE: 12.645 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 5, iters: 76992, time: 0.532, data: 0.000) G_L1: 15.827 G_L1_ABSOLUTE: 2.922 G_L1_RELATIVE: 12.904 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 5, iters: 78992, time: 0.530, data: 0.000) G_L1: 14.993 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 12.372 G_Regularizer: 0.000 validation_error: 21.610 +(epoch: 5, iters: 80992, time: 0.530, data: 0.000) G_L1: 15.138 G_L1_ABSOLUTE: 2.687 G_L1_RELATIVE: 12.451 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 5, iters: 82992, time: 0.532, data: 0.000) G_L1: 16.424 G_L1_ABSOLUTE: 3.520 G_L1_RELATIVE: 12.904 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 5, iters: 84992, time: 0.527, data: 0.000) G_L1: 13.955 G_L1_ABSOLUTE: 2.840 G_L1_RELATIVE: 11.115 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 5, iters: 86992, time: 0.534, data: 0.000) G_L1: 19.030 G_L1_ABSOLUTE: 3.118 G_L1_RELATIVE: 15.912 G_Regularizer: 0.000 validation_error: 21.587 +(epoch: 5, iters: 88992, time: 0.528, data: 0.000) G_L1: 14.833 G_L1_ABSOLUTE: 2.819 G_L1_RELATIVE: 12.014 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 5, iters: 90992, time: 0.532, data: 0.000) G_L1: 17.091 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 14.310 G_Regularizer: 0.000 validation_error: 20.106 +(epoch: 5, iters: 92992, time: 0.530, data: 0.000) G_L1: 17.745 G_L1_ABSOLUTE: 2.801 G_L1_RELATIVE: 14.944 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 5, iters: 94992, time: 0.531, data: 0.000) G_L1: 17.968 G_L1_ABSOLUTE: 3.460 G_L1_RELATIVE: 14.508 G_Regularizer: 0.000 validation_error: 21.294 +(epoch: 5, iters: 96992, time: 0.516, data: 0.000) G_L1: 18.130 G_L1_ABSOLUTE: 3.123 G_L1_RELATIVE: 15.007 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 5, iters: 98992, time: 0.520, data: 0.000) G_L1: 17.821 G_L1_ABSOLUTE: 2.968 G_L1_RELATIVE: 14.853 G_Regularizer: 0.000 validation_error: 21.699 +(epoch: 5, iters: 100992, time: 0.517, data: 0.000) G_L1: 15.793 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 13.306 G_Regularizer: 0.000 validation_error: 21.338 +(epoch: 5, iters: 102992, time: 0.516, data: 0.000) G_L1: 18.522 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 15.700 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 5, iters: 104992, time: 0.536, data: 0.000) G_L1: 18.343 G_L1_ABSOLUTE: 3.039 G_L1_RELATIVE: 15.304 G_Regularizer: 0.000 validation_error: 21.680 +(epoch: 5, iters: 106992, time: 0.525, data: 0.000) G_L1: 16.912 G_L1_ABSOLUTE: 2.909 G_L1_RELATIVE: 14.002 G_Regularizer: 0.000 validation_error: 21.150 +(epoch: 5, iters: 108992, time: 0.524, data: 0.000) G_L1: 19.879 G_L1_ABSOLUTE: 3.014 G_L1_RELATIVE: 16.865 G_Regularizer: 0.000 validation_error: 21.211 +(epoch: 5, iters: 110992, time: 0.521, data: 0.000) G_L1: 17.852 G_L1_ABSOLUTE: 3.022 G_L1_RELATIVE: 14.829 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 5, iters: 112992, time: 0.524, data: 0.000) G_L1: 15.363 G_L1_ABSOLUTE: 3.192 G_L1_RELATIVE: 12.171 G_Regularizer: 0.000 validation_error: 20.461 +(epoch: 5, iters: 114992, time: 0.521, data: 0.000) G_L1: 14.951 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 12.386 G_Regularizer: 0.000 validation_error: 20.140 +(epoch: 5, iters: 116992, time: 0.528, data: 0.000) G_L1: 15.852 G_L1_ABSOLUTE: 3.393 G_L1_RELATIVE: 12.459 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 5, iters: 118992, time: 0.526, data: 0.000) G_L1: 15.606 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 12.917 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 5, iters: 120992, time: 0.527, data: 0.000) G_L1: 17.720 G_L1_ABSOLUTE: 2.948 G_L1_RELATIVE: 14.772 G_Regularizer: 0.000 validation_error: 21.456 +(epoch: 5, iters: 122992, time: 0.537, data: 0.000) G_L1: 14.697 G_L1_ABSOLUTE: 2.932 G_L1_RELATIVE: 11.765 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 5, iters: 124992, time: 0.535, data: 0.000) G_L1: 13.932 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 5, iters: 126992, time: 0.522, data: 0.000) G_L1: 16.197 G_L1_ABSOLUTE: 3.104 G_L1_RELATIVE: 13.093 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 5, iters: 128992, time: 0.527, data: 0.000) G_L1: 14.652 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 11.826 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 5, iters: 130992, time: 0.527, data: 0.000) G_L1: 17.918 G_L1_ABSOLUTE: 3.116 G_L1_RELATIVE: 14.802 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 5, iters: 132992, time: 0.525, data: 0.000) G_L1: 17.076 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 14.167 G_Regularizer: 0.000 validation_error: 20.011 +(epoch: 5, iters: 134992, time: 0.529, data: 0.000) G_L1: 16.228 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 13.623 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 5, iters: 136992, time: 0.526, data: 0.000) G_L1: 15.628 G_L1_ABSOLUTE: 3.199 G_L1_RELATIVE: 12.429 G_Regularizer: 0.000 validation_error: 21.596 +(epoch: 5, iters: 138992, time: 0.528, data: 0.000) G_L1: 16.223 G_L1_ABSOLUTE: 3.144 G_L1_RELATIVE: 13.080 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 5, iters: 140992, time: 0.532, data: 0.000) G_L1: 19.719 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 17.058 G_Regularizer: 0.000 validation_error: 21.485 +(epoch: 5, iters: 142992, time: 0.532, data: 0.000) G_L1: 18.376 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 15.642 G_Regularizer: 0.000 validation_error: 21.511 +(epoch: 5, iters: 144992, time: 0.528, data: 0.000) G_L1: 16.820 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 14.171 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 5, iters: 146992, time: 0.528, data: 0.000) G_L1: 15.625 G_L1_ABSOLUTE: 3.514 G_L1_RELATIVE: 12.111 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 5, iters: 148992, time: 0.534, data: 0.000) G_L1: 15.255 G_L1_ABSOLUTE: 2.739 G_L1_RELATIVE: 12.516 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 5, iters: 150992, time: 0.525, data: 0.000) G_L1: 15.172 G_L1_ABSOLUTE: 2.842 G_L1_RELATIVE: 12.329 G_Regularizer: 0.000 validation_error: 20.742 +(epoch: 5, iters: 152992, time: 0.525, data: 0.000) G_L1: 17.636 G_L1_ABSOLUTE: 3.128 G_L1_RELATIVE: 14.509 G_Regularizer: 0.000 validation_error: 20.232 +(epoch: 5, iters: 154992, time: 0.529, data: 0.000) G_L1: 19.175 G_L1_ABSOLUTE: 3.514 G_L1_RELATIVE: 15.661 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 5, iters: 156992, time: 0.529, data: 0.000) G_L1: 16.192 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 13.521 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 5, iters: 158992, time: 0.533, data: 0.000) G_L1: 16.512 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 14.096 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 5, iters: 160992, time: 0.538, data: 0.000) G_L1: 17.621 G_L1_ABSOLUTE: 2.973 G_L1_RELATIVE: 14.649 G_Regularizer: 0.000 validation_error: 20.191 +(epoch: 5, iters: 162992, time: 0.527, data: 0.000) G_L1: 18.010 G_L1_ABSOLUTE: 3.079 G_L1_RELATIVE: 14.931 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 5, iters: 164992, time: 0.532, data: 0.000) G_L1: 12.928 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 10.315 G_Regularizer: 0.000 validation_error: 21.217 +(epoch: 5, iters: 166992, time: 0.527, data: 0.000) G_L1: 15.709 G_L1_ABSOLUTE: 3.197 G_L1_RELATIVE: 12.512 G_Regularizer: 0.000 validation_error: 21.032 +(epoch: 5, iters: 168992, time: 0.530, data: 0.000) G_L1: 14.896 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 12.200 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 5, iters: 170992, time: 0.531, data: 0.000) G_L1: 17.794 G_L1_ABSOLUTE: 2.842 G_L1_RELATIVE: 14.951 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 5, iters: 172992, time: 0.535, data: 0.000) G_L1: 16.485 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 13.700 G_Regularizer: 0.000 validation_error: 21.376 +(epoch: 5, iters: 174992, time: 0.530, data: 0.000) G_L1: 16.218 G_L1_ABSOLUTE: 3.070 G_L1_RELATIVE: 13.148 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 5, iters: 176992, time: 0.528, data: 0.000) G_L1: 17.142 G_L1_ABSOLUTE: 2.713 G_L1_RELATIVE: 14.428 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 5, iters: 178992, time: 0.526, data: 0.000) G_L1: 14.390 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 11.513 G_Regularizer: 0.000 validation_error: 20.437 +(epoch: 5, iters: 180992, time: 0.528, data: 0.000) G_L1: 17.505 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 14.864 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 5, iters: 182992, time: 0.530, data: 0.000) G_L1: 16.481 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 13.749 G_Regularizer: 0.000 validation_error: 21.315 +(epoch: 5, iters: 184992, time: 0.530, data: 0.000) G_L1: 15.490 G_L1_ABSOLUTE: 3.180 G_L1_RELATIVE: 12.310 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 5, iters: 186992, time: 0.523, data: 0.000) G_L1: 19.083 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 16.414 G_Regularizer: 0.000 validation_error: 21.482 +(epoch: 5, iters: 188992, time: 0.522, data: 0.000) G_L1: 12.842 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 10.120 G_Regularizer: 0.000 validation_error: 21.589 +(epoch: 5, iters: 190992, time: 0.523, data: 0.000) G_L1: 17.215 G_L1_ABSOLUTE: 3.183 G_L1_RELATIVE: 14.031 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 5, iters: 192992, time: 0.532, data: 0.000) G_L1: 17.388 G_L1_ABSOLUTE: 3.215 G_L1_RELATIVE: 14.173 G_Regularizer: 0.000 validation_error: 20.514 +(epoch: 5, iters: 194992, time: 0.523, data: 0.000) G_L1: 15.192 G_L1_ABSOLUTE: 3.067 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 21.642 +(epoch: 5, iters: 196992, time: 0.524, data: 0.000) G_L1: 13.670 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 11.081 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 5, iters: 198992, time: 0.531, data: 0.000) G_L1: 13.167 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 10.378 G_Regularizer: 0.000 validation_error: 20.930 +(epoch: 5, iters: 200992, time: 0.549, data: 0.000) G_L1: 17.505 G_L1_ABSOLUTE: 3.022 G_L1_RELATIVE: 14.482 G_Regularizer: 0.000 validation_error: 21.435 +(epoch: 5, iters: 202992, time: 0.543, data: 0.000) G_L1: 16.052 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 13.207 G_Regularizer: 0.000 validation_error: 19.793 +(epoch: 5, iters: 204992, time: 0.549, data: 0.000) G_L1: 16.025 G_L1_ABSOLUTE: 3.137 G_L1_RELATIVE: 12.888 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 5, iters: 206992, time: 0.545, data: 0.001) G_L1: 15.921 G_L1_ABSOLUTE: 3.198 G_L1_RELATIVE: 12.723 G_Regularizer: 0.000 validation_error: 21.422 +(epoch: 5, iters: 208992, time: 0.550, data: 0.000) G_L1: 17.209 G_L1_ABSOLUTE: 3.196 G_L1_RELATIVE: 14.013 G_Regularizer: 0.000 validation_error: 20.998 +(epoch: 5, iters: 210992, time: 0.544, data: 0.000) G_L1: 15.802 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 13.077 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 5, iters: 212992, time: 0.551, data: 0.000) G_L1: 20.727 G_L1_ABSOLUTE: 3.232 G_L1_RELATIVE: 17.495 G_Regularizer: 0.000 validation_error: 21.001 +(epoch: 5, iters: 214992, time: 0.546, data: 0.000) G_L1: 15.088 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 12.297 G_Regularizer: 0.000 validation_error: 21.220 +(epoch: 5, iters: 216992, time: 0.542, data: 0.000) G_L1: 19.945 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 16.947 G_Regularizer: 0.000 validation_error: 21.522 +(epoch: 5, iters: 218992, time: 0.541, data: 0.000) G_L1: 16.690 G_L1_ABSOLUTE: 3.029 G_L1_RELATIVE: 13.661 G_Regularizer: 0.000 validation_error: 20.675 +(epoch: 5, iters: 220992, time: 0.542, data: 0.000) G_L1: 15.623 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 12.866 G_Regularizer: 0.000 validation_error: 20.291 +(epoch: 5, iters: 222992, time: 0.549, data: 0.000) G_L1: 14.722 G_L1_ABSOLUTE: 3.237 G_L1_RELATIVE: 11.485 G_Regularizer: 0.000 validation_error: 21.328 +(epoch: 5, iters: 224992, time: 0.543, data: 0.000) G_L1: 15.755 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 13.361 G_Regularizer: 0.000 validation_error: 21.384 +(epoch: 5, iters: 226992, time: 0.541, data: 0.000) G_L1: 15.782 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 12.694 G_Regularizer: 0.000 validation_error: 21.200 +(epoch: 5, iters: 228992, time: 0.542, data: 0.000) G_L1: 14.813 G_L1_ABSOLUTE: 2.869 G_L1_RELATIVE: 11.944 G_Regularizer: 0.000 validation_error: 21.240 +(epoch: 5, iters: 230992, time: 0.545, data: 0.000) G_L1: 17.611 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 14.816 G_Regularizer: 0.000 validation_error: 21.975 +(epoch: 5, iters: 232992, time: 0.549, data: 0.000) G_L1: 14.335 G_L1_ABSOLUTE: 2.866 G_L1_RELATIVE: 11.470 G_Regularizer: 0.000 validation_error: 21.296 +(epoch: 5, iters: 234992, time: 0.543, data: 0.000) G_L1: 16.163 G_L1_ABSOLUTE: 3.306 G_L1_RELATIVE: 12.857 G_Regularizer: 0.000 validation_error: 21.280 +(epoch: 5, iters: 236992, time: 0.544, data: 0.000) G_L1: 13.168 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 10.658 G_Regularizer: 0.000 validation_error: 21.811 +(epoch: 5, iters: 238992, time: 0.544, data: 0.000) G_L1: 15.478 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 12.570 G_Regularizer: 0.000 validation_error: 21.095 +(epoch: 5, iters: 240992, time: 0.537, data: 0.000) G_L1: 16.343 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 13.363 G_Regularizer: 0.000 validation_error: 21.650 +(epoch: 5, iters: 242992, time: 0.543, data: 0.000) G_L1: 17.232 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 14.557 G_Regularizer: 0.000 validation_error: 21.806 +(epoch: 5, iters: 244992, time: 0.541, data: 0.000) G_L1: 12.887 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 10.095 G_Regularizer: 0.000 validation_error: 21.543 +(epoch: 5, iters: 246992, time: 0.551, data: 0.000) G_L1: 18.945 G_L1_ABSOLUTE: 2.895 G_L1_RELATIVE: 16.051 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 5, iters: 248992, time: 0.543, data: 0.000) G_L1: 17.340 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 14.518 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 5, iters: 250992, time: 0.546, data: 0.000) G_L1: 13.879 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 11.272 G_Regularizer: 0.000 validation_error: 21.294 +(epoch: 5, iters: 252992, time: 0.548, data: 0.000) G_L1: 16.900 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 14.029 G_Regularizer: 0.000 validation_error: 21.443 +(epoch: 5, iters: 254992, time: 0.545, data: 0.001) G_L1: 15.761 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 12.856 G_Regularizer: 0.000 validation_error: 21.263 +(epoch: 5, iters: 256992, time: 0.547, data: 0.000) G_L1: 15.379 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 12.390 G_Regularizer: 0.000 validation_error: 21.476 +(epoch: 5, iters: 258992, time: 0.549, data: 0.000) G_L1: 18.406 G_L1_ABSOLUTE: 3.294 G_L1_RELATIVE: 15.112 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 5, iters: 260992, time: 0.554, data: 0.000) G_L1: 15.002 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 12.255 G_Regularizer: 0.000 validation_error: 21.839 +(epoch: 5, iters: 262992, time: 0.541, data: 0.000) G_L1: 18.592 G_L1_ABSOLUTE: 2.828 G_L1_RELATIVE: 15.764 G_Regularizer: 0.000 validation_error: 21.559 +(epoch: 5, iters: 264992, time: 0.548, data: 0.000) G_L1: 16.400 G_L1_ABSOLUTE: 2.888 G_L1_RELATIVE: 13.512 G_Regularizer: 0.000 validation_error: 21.228 +(epoch: 5, iters: 266992, time: 0.539, data: 0.000) G_L1: 16.922 G_L1_ABSOLUTE: 3.311 G_L1_RELATIVE: 13.611 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 5, iters: 268992, time: 0.547, data: 0.000) G_L1: 16.334 G_L1_ABSOLUTE: 3.304 G_L1_RELATIVE: 13.030 G_Regularizer: 0.000 validation_error: 21.511 +(epoch: 5, iters: 270992, time: 0.545, data: 0.000) G_L1: 16.556 G_L1_ABSOLUTE: 2.924 G_L1_RELATIVE: 13.632 G_Regularizer: 0.000 validation_error: 20.518 +(epoch: 5, iters: 272992, time: 0.555, data: 0.000) G_L1: 20.760 G_L1_ABSOLUTE: 3.002 G_L1_RELATIVE: 17.758 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 5, iters: 274992, time: 0.545, data: 0.000) G_L1: 17.560 G_L1_ABSOLUTE: 3.227 G_L1_RELATIVE: 14.333 G_Regularizer: 0.000 validation_error: 21.247 +(epoch: 5, iters: 276992, time: 0.540, data: 0.000) G_L1: 14.889 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 12.441 G_Regularizer: 0.000 validation_error: 21.005 +(epoch: 5, iters: 278992, time: 0.544, data: 0.000) G_L1: 13.757 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 10.947 G_Regularizer: 0.000 validation_error: 21.299 +(epoch: 5, iters: 280992, time: 0.533, data: 0.000) G_L1: 18.719 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 15.828 G_Regularizer: 0.000 validation_error: 20.717 +(epoch: 5, iters: 282992, time: 0.544, data: 0.000) G_L1: 19.028 G_L1_ABSOLUTE: 3.058 G_L1_RELATIVE: 15.970 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 5, iters: 284992, time: 0.540, data: 0.000) G_L1: 16.437 G_L1_ABSOLUTE: 3.398 G_L1_RELATIVE: 13.039 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 5, iters: 286992, time: 0.544, data: 0.000) G_L1: 16.851 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 14.196 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 5, iters: 288992, time: 0.544, data: 0.000) G_L1: 17.301 G_L1_ABSOLUTE: 2.647 G_L1_RELATIVE: 14.654 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 5, iters: 290992, time: 0.538, data: 0.000) G_L1: 18.381 G_L1_ABSOLUTE: 3.082 G_L1_RELATIVE: 15.299 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 5, iters: 292992, time: 0.539, data: 0.000) G_L1: 19.436 G_L1_ABSOLUTE: 3.374 G_L1_RELATIVE: 16.062 G_Regularizer: 0.000 validation_error: 21.706 +(epoch: 5, iters: 294992, time: 0.548, data: 0.000) G_L1: 15.186 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 12.294 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 5, iters: 296992, time: 0.535, data: 0.000) G_L1: 16.207 G_L1_ABSOLUTE: 2.579 G_L1_RELATIVE: 13.628 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 5, iters: 298992, time: 0.543, data: 0.000) G_L1: 15.561 G_L1_ABSOLUTE: 3.015 G_L1_RELATIVE: 12.546 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 5, iters: 300992, time: 0.545, data: 0.000) G_L1: 17.729 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 14.906 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 6, iters: 240, time: 0.542, data: 0.000) G_L1: 16.193 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 13.507 G_Regularizer: 0.000 validation_error: 21.195 +(epoch: 6, iters: 2240, time: 0.549, data: 0.000) G_L1: 15.056 G_L1_ABSOLUTE: 2.957 G_L1_RELATIVE: 12.099 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 6, iters: 4240, time: 0.548, data: 0.000) G_L1: 17.886 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 15.023 G_Regularizer: 0.000 validation_error: 21.398 +(epoch: 6, iters: 6240, time: 0.543, data: 0.000) G_L1: 18.168 G_L1_ABSOLUTE: 2.751 G_L1_RELATIVE: 15.416 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 6, iters: 8240, time: 0.547, data: 0.000) G_L1: 15.935 G_L1_ABSOLUTE: 3.023 G_L1_RELATIVE: 12.912 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 6, iters: 10240, time: 0.540, data: 0.000) G_L1: 19.362 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 16.186 G_Regularizer: 0.000 validation_error: 22.686 +(epoch: 6, iters: 12240, time: 0.546, data: 0.000) G_L1: 14.796 G_L1_ABSOLUTE: 3.008 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 6, iters: 14240, time: 0.540, data: 0.001) G_L1: 16.099 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 13.284 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 6, iters: 16240, time: 0.543, data: 0.000) G_L1: 16.948 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 14.180 G_Regularizer: 0.000 validation_error: 21.257 +(epoch: 6, iters: 18240, time: 0.550, data: 0.000) G_L1: 16.655 G_L1_ABSOLUTE: 3.055 G_L1_RELATIVE: 13.601 G_Regularizer: 0.000 validation_error: 20.445 +(epoch: 6, iters: 20240, time: 0.542, data: 0.000) G_L1: 15.364 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 12.842 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 6, iters: 22240, time: 0.543, data: 0.000) G_L1: 17.821 G_L1_ABSOLUTE: 2.992 G_L1_RELATIVE: 14.829 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 6, iters: 24240, time: 0.543, data: 0.000) G_L1: 14.774 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 12.135 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 6, iters: 26240, time: 0.542, data: 0.000) G_L1: 21.330 G_L1_ABSOLUTE: 3.396 G_L1_RELATIVE: 17.934 G_Regularizer: 0.000 validation_error: 21.287 +(epoch: 6, iters: 28240, time: 0.539, data: 0.000) G_L1: 17.168 G_L1_ABSOLUTE: 3.573 G_L1_RELATIVE: 13.594 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 6, iters: 30240, time: 0.534, data: 0.000) G_L1: 13.083 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 10.276 G_Regularizer: 0.000 validation_error: 20.281 +(epoch: 6, iters: 32240, time: 0.546, data: 0.000) G_L1: 14.334 G_L1_ABSOLUTE: 2.988 G_L1_RELATIVE: 11.347 G_Regularizer: 0.000 validation_error: 21.197 +(epoch: 6, iters: 34240, time: 0.545, data: 0.000) G_L1: 15.807 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 12.913 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 6, iters: 36240, time: 0.540, data: 0.000) G_L1: 16.104 G_L1_ABSOLUTE: 3.217 G_L1_RELATIVE: 12.887 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 6, iters: 38240, time: 0.540, data: 0.000) G_L1: 13.744 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 11.011 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 6, iters: 40240, time: 0.546, data: 0.000) G_L1: 16.080 G_L1_ABSOLUTE: 3.224 G_L1_RELATIVE: 12.856 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 6, iters: 42240, time: 0.544, data: 0.000) G_L1: 13.914 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 11.665 G_Regularizer: 0.000 validation_error: 20.292 +(epoch: 6, iters: 44240, time: 0.546, data: 0.000) G_L1: 17.629 G_L1_ABSOLUTE: 2.979 G_L1_RELATIVE: 14.650 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 6, iters: 46240, time: 0.547, data: 0.000) G_L1: 17.605 G_L1_ABSOLUTE: 2.720 G_L1_RELATIVE: 14.884 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 6, iters: 48240, time: 0.546, data: 0.000) G_L1: 15.357 G_L1_ABSOLUTE: 3.188 G_L1_RELATIVE: 12.169 G_Regularizer: 0.000 validation_error: 21.207 +(epoch: 6, iters: 50240, time: 0.549, data: 0.000) G_L1: 16.898 G_L1_ABSOLUTE: 3.118 G_L1_RELATIVE: 13.780 G_Regularizer: 0.000 validation_error: 21.300 +(epoch: 6, iters: 52240, time: 0.541, data: 0.000) G_L1: 19.373 G_L1_ABSOLUTE: 3.246 G_L1_RELATIVE: 16.127 G_Regularizer: 0.000 validation_error: 20.442 +(epoch: 6, iters: 54240, time: 0.547, data: 0.000) G_L1: 18.230 G_L1_ABSOLUTE: 2.957 G_L1_RELATIVE: 15.272 G_Regularizer: 0.000 validation_error: 21.316 +(epoch: 6, iters: 56240, time: 0.543, data: 0.000) G_L1: 14.731 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 12.177 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 6, iters: 58240, time: 0.549, data: 0.000) G_L1: 17.763 G_L1_ABSOLUTE: 3.005 G_L1_RELATIVE: 14.758 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 6, iters: 60240, time: 0.539, data: 0.000) G_L1: 14.467 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 11.561 G_Regularizer: 0.000 validation_error: 21.407 +(epoch: 6, iters: 62240, time: 0.547, data: 0.000) G_L1: 16.870 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 14.213 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 6, iters: 64240, time: 0.546, data: 0.000) G_L1: 18.308 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 15.531 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 6, iters: 66240, time: 0.541, data: 0.000) G_L1: 16.301 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 13.576 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 6, iters: 68240, time: 0.547, data: 0.000) G_L1: 17.460 G_L1_ABSOLUTE: 2.747 G_L1_RELATIVE: 14.712 G_Regularizer: 0.000 validation_error: 21.079 +(epoch: 6, iters: 70240, time: 0.549, data: 0.000) G_L1: 20.230 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 17.353 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 6, iters: 72240, time: 0.547, data: 0.000) G_L1: 18.004 G_L1_ABSOLUTE: 3.023 G_L1_RELATIVE: 14.981 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 6, iters: 74240, time: 0.548, data: 0.000) G_L1: 15.911 G_L1_ABSOLUTE: 2.925 G_L1_RELATIVE: 12.986 G_Regularizer: 0.000 validation_error: 20.854 +(epoch: 6, iters: 76240, time: 0.542, data: 0.000) G_L1: 16.619 G_L1_ABSOLUTE: 2.946 G_L1_RELATIVE: 13.673 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 6, iters: 78240, time: 0.546, data: 0.000) G_L1: 15.331 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 12.743 G_Regularizer: 0.000 validation_error: 21.296 +(epoch: 6, iters: 80240, time: 0.542, data: 0.000) G_L1: 14.614 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 12.046 G_Regularizer: 0.000 validation_error: 21.117 +(epoch: 6, iters: 82240, time: 0.547, data: 0.000) G_L1: 15.960 G_L1_ABSOLUTE: 2.543 G_L1_RELATIVE: 13.417 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 6, iters: 84240, time: 0.550, data: 0.000) G_L1: 15.596 G_L1_ABSOLUTE: 2.662 G_L1_RELATIVE: 12.934 G_Regularizer: 0.000 validation_error: 21.694 +(epoch: 6, iters: 86240, time: 0.542, data: 0.000) G_L1: 16.010 G_L1_ABSOLUTE: 2.647 G_L1_RELATIVE: 13.362 G_Regularizer: 0.000 validation_error: 21.115 +(epoch: 6, iters: 88240, time: 0.544, data: 0.000) G_L1: 18.031 G_L1_ABSOLUTE: 3.014 G_L1_RELATIVE: 15.016 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 6, iters: 90240, time: 0.546, data: 0.001) G_L1: 17.389 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 14.739 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 6, iters: 92240, time: 0.542, data: 0.000) G_L1: 14.932 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 12.240 G_Regularizer: 0.000 validation_error: 20.896 +(epoch: 6, iters: 94240, time: 0.548, data: 0.000) G_L1: 15.241 G_L1_ABSOLUTE: 3.093 G_L1_RELATIVE: 12.147 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 6, iters: 96240, time: 0.547, data: 0.000) G_L1: 18.588 G_L1_ABSOLUTE: 2.930 G_L1_RELATIVE: 15.658 G_Regularizer: 0.000 validation_error: 21.328 +(epoch: 6, iters: 98240, time: 0.544, data: 0.000) G_L1: 18.605 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 15.748 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 6, iters: 100240, time: 0.543, data: 0.000) G_L1: 15.938 G_L1_ABSOLUTE: 2.886 G_L1_RELATIVE: 13.053 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 6, iters: 102240, time: 0.542, data: 0.000) G_L1: 15.017 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 12.326 G_Regularizer: 0.000 validation_error: 21.923 +(epoch: 6, iters: 104240, time: 0.555, data: 0.000) G_L1: 17.065 G_L1_ABSOLUTE: 3.598 G_L1_RELATIVE: 13.467 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 6, iters: 106240, time: 0.542, data: 0.000) G_L1: 16.373 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 13.639 G_Regularizer: 0.000 validation_error: 20.512 +(epoch: 6, iters: 108240, time: 0.547, data: 0.000) G_L1: 17.522 G_L1_ABSOLUTE: 2.490 G_L1_RELATIVE: 15.031 G_Regularizer: 0.000 validation_error: 20.971 +(epoch: 6, iters: 110240, time: 0.548, data: 0.000) G_L1: 16.326 G_L1_ABSOLUTE: 3.191 G_L1_RELATIVE: 13.135 G_Regularizer: 0.000 validation_error: 21.113 +(epoch: 6, iters: 112240, time: 0.542, data: 0.000) G_L1: 15.629 G_L1_ABSOLUTE: 3.486 G_L1_RELATIVE: 12.143 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 6, iters: 114240, time: 0.544, data: 0.000) G_L1: 18.839 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 16.026 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 6, iters: 116240, time: 0.541, data: 0.000) G_L1: 13.580 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 11.155 G_Regularizer: 0.000 validation_error: 21.138 +(epoch: 6, iters: 118240, time: 0.546, data: 0.000) G_L1: 16.520 G_L1_ABSOLUTE: 2.534 G_L1_RELATIVE: 13.986 G_Regularizer: 0.000 validation_error: 21.329 +(epoch: 6, iters: 120240, time: 0.549, data: 0.000) G_L1: 16.249 G_L1_ABSOLUTE: 2.978 G_L1_RELATIVE: 13.270 G_Regularizer: 0.000 validation_error: 20.401 +(epoch: 6, iters: 122240, time: 0.544, data: 0.000) G_L1: 14.663 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 12.039 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 6, iters: 124240, time: 0.536, data: 0.000) G_L1: 15.925 G_L1_ABSOLUTE: 3.021 G_L1_RELATIVE: 12.903 G_Regularizer: 0.000 validation_error: 21.166 +(epoch: 6, iters: 126240, time: 0.539, data: 0.000) G_L1: 21.046 G_L1_ABSOLUTE: 2.758 G_L1_RELATIVE: 18.287 G_Regularizer: 0.000 validation_error: 21.553 +(epoch: 6, iters: 128240, time: 0.547, data: 0.000) G_L1: 16.882 G_L1_ABSOLUTE: 3.099 G_L1_RELATIVE: 13.783 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 6, iters: 130240, time: 0.555, data: 0.000) G_L1: 16.164 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 13.256 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 6, iters: 132240, time: 0.549, data: 0.000) G_L1: 14.700 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 12.128 G_Regularizer: 0.000 validation_error: 21.349 +(epoch: 6, iters: 134240, time: 0.541, data: 0.000) G_L1: 15.702 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 13.032 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 6, iters: 136240, time: 0.553, data: 0.000) G_L1: 21.830 G_L1_ABSOLUTE: 3.493 G_L1_RELATIVE: 18.337 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 6, iters: 138240, time: 0.549, data: 0.000) G_L1: 16.293 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 13.205 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 6, iters: 140240, time: 0.542, data: 0.000) G_L1: 13.667 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 11.381 G_Regularizer: 0.000 validation_error: 21.096 +(epoch: 6, iters: 142240, time: 0.552, data: 0.000) G_L1: 21.931 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 19.161 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 6, iters: 144240, time: 0.543, data: 0.000) G_L1: 16.510 G_L1_ABSOLUTE: 2.922 G_L1_RELATIVE: 13.588 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 6, iters: 146240, time: 0.550, data: 0.000) G_L1: 15.841 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 13.241 G_Regularizer: 0.000 validation_error: 21.473 +(epoch: 6, iters: 148240, time: 0.541, data: 0.000) G_L1: 16.553 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 13.816 G_Regularizer: 0.000 validation_error: 21.066 +(epoch: 6, iters: 150240, time: 0.542, data: 0.000) G_L1: 15.483 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 12.562 G_Regularizer: 0.000 validation_error: 22.378 +(epoch: 6, iters: 152240, time: 0.547, data: 0.000) G_L1: 16.069 G_L1_ABSOLUTE: 3.265 G_L1_RELATIVE: 12.803 G_Regularizer: 0.000 validation_error: 21.070 +(epoch: 6, iters: 154240, time: 0.539, data: 0.000) G_L1: 16.541 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 14.169 G_Regularizer: 0.000 validation_error: 21.590 +(epoch: 6, iters: 156240, time: 0.548, data: 0.000) G_L1: 17.000 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 14.212 G_Regularizer: 0.000 validation_error: 21.530 +(epoch: 6, iters: 158240, time: 0.535, data: 0.000) G_L1: 13.460 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 11.087 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 6, iters: 160240, time: 0.543, data: 0.001) G_L1: 14.255 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 11.808 G_Regularizer: 0.000 validation_error: 21.350 +(epoch: 6, iters: 162240, time: 0.545, data: 0.000) G_L1: 17.160 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 14.436 G_Regularizer: 0.000 validation_error: 21.780 +(epoch: 6, iters: 164240, time: 0.540, data: 0.000) G_L1: 17.760 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 15.114 G_Regularizer: 0.000 validation_error: 20.557 +(epoch: 6, iters: 166240, time: 0.542, data: 0.000) G_L1: 15.203 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 12.476 G_Regularizer: 0.000 validation_error: 20.299 +(epoch: 6, iters: 168240, time: 0.542, data: 0.000) G_L1: 17.053 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 14.010 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 6, iters: 170240, time: 0.548, data: 0.000) G_L1: 17.217 G_L1_ABSOLUTE: 2.785 G_L1_RELATIVE: 14.432 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 6, iters: 172240, time: 0.537, data: 0.000) G_L1: 25.567 G_L1_ABSOLUTE: 2.867 G_L1_RELATIVE: 22.699 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 6, iters: 174240, time: 0.547, data: 0.000) G_L1: 14.403 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 11.817 G_Regularizer: 0.000 validation_error: 20.336 +(epoch: 6, iters: 176240, time: 0.549, data: 0.000) G_L1: 15.679 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 12.766 G_Regularizer: 0.000 validation_error: 20.993 +(epoch: 6, iters: 178240, time: 0.551, data: 0.000) G_L1: 15.552 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 12.698 G_Regularizer: 0.000 validation_error: 21.157 +(epoch: 6, iters: 180240, time: 0.550, data: 0.000) G_L1: 17.262 G_L1_ABSOLUTE: 3.042 G_L1_RELATIVE: 14.220 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 6, iters: 182240, time: 0.546, data: 0.000) G_L1: 16.092 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 13.478 G_Regularizer: 0.000 validation_error: 21.095 +(epoch: 6, iters: 184240, time: 0.553, data: 0.000) G_L1: 16.253 G_L1_ABSOLUTE: 3.051 G_L1_RELATIVE: 13.202 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 6, iters: 186240, time: 0.548, data: 0.000) G_L1: 17.756 G_L1_ABSOLUTE: 2.643 G_L1_RELATIVE: 15.113 G_Regularizer: 0.000 validation_error: 21.175 +(epoch: 6, iters: 188240, time: 0.536, data: 0.000) G_L1: 15.022 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 12.509 G_Regularizer: 0.000 validation_error: 21.060 +(epoch: 6, iters: 190240, time: 0.544, data: 0.000) G_L1: 15.218 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 12.655 G_Regularizer: 0.000 validation_error: 21.270 +(epoch: 6, iters: 192240, time: 0.546, data: 0.000) G_L1: 16.688 G_L1_ABSOLUTE: 2.937 G_L1_RELATIVE: 13.751 G_Regularizer: 0.000 validation_error: 20.366 +(epoch: 6, iters: 194240, time: 0.546, data: 0.000) G_L1: 16.584 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 13.886 G_Regularizer: 0.000 validation_error: 21.354 +(epoch: 6, iters: 196240, time: 0.545, data: 0.000) G_L1: 15.798 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 13.059 G_Regularizer: 0.000 validation_error: 21.148 +(epoch: 6, iters: 198240, time: 0.541, data: 0.000) G_L1: 16.513 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 13.994 G_Regularizer: 0.000 validation_error: 21.365 +(epoch: 6, iters: 200240, time: 0.548, data: 0.000) G_L1: 16.807 G_L1_ABSOLUTE: 3.509 G_L1_RELATIVE: 13.298 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 6, iters: 202240, time: 0.544, data: 0.000) G_L1: 16.552 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 14.004 G_Regularizer: 0.000 validation_error: 21.044 +(epoch: 6, iters: 204240, time: 0.544, data: 0.000) G_L1: 14.405 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 11.581 G_Regularizer: 0.000 validation_error: 21.549 +(epoch: 6, iters: 206240, time: 0.540, data: 0.000) G_L1: 16.689 G_L1_ABSOLUTE: 2.982 G_L1_RELATIVE: 13.707 G_Regularizer: 0.000 validation_error: 20.910 +(epoch: 6, iters: 208240, time: 0.544, data: 0.001) G_L1: 15.751 G_L1_ABSOLUTE: 2.970 G_L1_RELATIVE: 12.781 G_Regularizer: 0.000 validation_error: 20.422 +(epoch: 6, iters: 210240, time: 0.544, data: 0.000) G_L1: 13.601 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 11.160 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 6, iters: 212240, time: 0.549, data: 0.000) G_L1: 15.976 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 13.363 G_Regularizer: 0.000 validation_error: 20.545 +(epoch: 6, iters: 214240, time: 0.544, data: 0.000) G_L1: 15.391 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 12.683 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 6, iters: 216240, time: 0.546, data: 0.000) G_L1: 17.680 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 14.884 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 6, iters: 218240, time: 0.543, data: 0.000) G_L1: 11.809 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 9.031 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 6, iters: 220240, time: 0.544, data: 0.000) G_L1: 15.956 G_L1_ABSOLUTE: 3.297 G_L1_RELATIVE: 12.659 G_Regularizer: 0.000 validation_error: 21.823 +(epoch: 6, iters: 222240, time: 0.542, data: 0.000) G_L1: 16.555 G_L1_ABSOLUTE: 3.246 G_L1_RELATIVE: 13.308 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 6, iters: 224240, time: 0.548, data: 0.000) G_L1: 14.384 G_L1_ABSOLUTE: 2.859 G_L1_RELATIVE: 11.525 G_Regularizer: 0.000 validation_error: 21.103 +(epoch: 6, iters: 226240, time: 0.546, data: 0.000) G_L1: 14.149 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 11.456 G_Regularizer: 0.000 validation_error: 20.217 +(epoch: 6, iters: 228240, time: 0.542, data: 0.000) G_L1: 17.708 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 15.118 G_Regularizer: 0.000 validation_error: 20.540 +(epoch: 6, iters: 230240, time: 0.544, data: 0.000) G_L1: 19.234 G_L1_ABSOLUTE: 2.976 G_L1_RELATIVE: 16.258 G_Regularizer: 0.000 validation_error: 21.519 +(epoch: 6, iters: 232240, time: 0.542, data: 0.000) G_L1: 15.313 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 12.543 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 6, iters: 234240, time: 0.540, data: 0.000) G_L1: 16.593 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 13.832 G_Regularizer: 0.000 validation_error: 21.124 +(epoch: 6, iters: 236240, time: 0.546, data: 0.000) G_L1: 15.655 G_L1_ABSOLUTE: 3.120 G_L1_RELATIVE: 12.535 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 6, iters: 238240, time: 0.549, data: 0.000) G_L1: 23.141 G_L1_ABSOLUTE: 3.225 G_L1_RELATIVE: 19.915 G_Regularizer: 0.000 validation_error: 21.092 +(epoch: 6, iters: 240240, time: 0.546, data: 0.000) G_L1: 16.912 G_L1_ABSOLUTE: 2.991 G_L1_RELATIVE: 13.921 G_Regularizer: 0.000 validation_error: 20.274 +(epoch: 6, iters: 242240, time: 0.547, data: 0.000) G_L1: 15.885 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 13.186 G_Regularizer: 0.000 validation_error: 21.735 +(epoch: 6, iters: 244240, time: 0.548, data: 0.000) G_L1: 17.707 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 14.907 G_Regularizer: 0.000 validation_error: 20.557 +(epoch: 6, iters: 246240, time: 0.543, data: 0.000) G_L1: 19.591 G_L1_ABSOLUTE: 2.865 G_L1_RELATIVE: 16.726 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 6, iters: 248240, time: 0.552, data: 0.000) G_L1: 18.637 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 16.097 G_Regularizer: 0.000 validation_error: 21.345 +(epoch: 6, iters: 250240, time: 0.547, data: 0.000) G_L1: 16.012 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 13.235 G_Regularizer: 0.000 validation_error: 20.373 +(epoch: 6, iters: 252240, time: 0.545, data: 0.000) G_L1: 17.392 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 14.661 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 6, iters: 254240, time: 0.544, data: 0.000) G_L1: 15.459 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 12.648 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 6, iters: 256240, time: 0.548, data: 0.000) G_L1: 15.094 G_L1_ABSOLUTE: 2.984 G_L1_RELATIVE: 12.110 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 6, iters: 258240, time: 0.548, data: 0.000) G_L1: 16.904 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 14.378 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 6, iters: 260240, time: 0.544, data: 0.001) G_L1: 13.811 G_L1_ABSOLUTE: 2.348 G_L1_RELATIVE: 11.463 G_Regularizer: 0.000 validation_error: 20.306 +(epoch: 6, iters: 262240, time: 0.535, data: 0.000) G_L1: 12.540 G_L1_ABSOLUTE: 2.617 G_L1_RELATIVE: 9.923 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 6, iters: 264240, time: 0.546, data: 0.000) G_L1: 14.167 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 11.457 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 6, iters: 266240, time: 0.544, data: 0.000) G_L1: 18.652 G_L1_ABSOLUTE: 2.944 G_L1_RELATIVE: 15.707 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 6, iters: 268240, time: 0.540, data: 0.000) G_L1: 19.862 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 17.324 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 6, iters: 270240, time: 0.540, data: 0.000) G_L1: 13.911 G_L1_ABSOLUTE: 3.011 G_L1_RELATIVE: 10.901 G_Regularizer: 0.000 validation_error: 20.904 +(epoch: 6, iters: 272240, time: 0.543, data: 0.000) G_L1: 15.678 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 13.343 G_Regularizer: 0.000 validation_error: 21.303 +(epoch: 6, iters: 274240, time: 0.541, data: 0.000) G_L1: 15.646 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 13.046 G_Regularizer: 0.000 validation_error: 20.612 +(epoch: 6, iters: 276240, time: 0.547, data: 0.000) G_L1: 15.791 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 12.928 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 6, iters: 278240, time: 0.542, data: 0.000) G_L1: 16.174 G_L1_ABSOLUTE: 3.083 G_L1_RELATIVE: 13.091 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 6, iters: 280240, time: 0.549, data: 0.000) G_L1: 19.565 G_L1_ABSOLUTE: 3.290 G_L1_RELATIVE: 16.276 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 6, iters: 282240, time: 0.543, data: 0.000) G_L1: 12.985 G_L1_ABSOLUTE: 3.002 G_L1_RELATIVE: 9.982 G_Regularizer: 0.000 validation_error: 21.824 +(epoch: 6, iters: 284240, time: 0.542, data: 0.000) G_L1: 13.479 G_L1_ABSOLUTE: 2.392 G_L1_RELATIVE: 11.087 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 6, iters: 286240, time: 0.548, data: 0.000) G_L1: 14.722 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 12.026 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 6, iters: 288240, time: 0.544, data: 0.000) G_L1: 15.923 G_L1_ABSOLUTE: 2.603 G_L1_RELATIVE: 13.320 G_Regularizer: 0.000 validation_error: 20.170 +(epoch: 6, iters: 290240, time: 0.544, data: 0.000) G_L1: 14.956 G_L1_ABSOLUTE: 3.179 G_L1_RELATIVE: 11.777 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 6, iters: 292240, time: 0.546, data: 0.000) G_L1: 19.723 G_L1_ABSOLUTE: 3.184 G_L1_RELATIVE: 16.539 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 6, iters: 294240, time: 0.549, data: 0.000) G_L1: 16.031 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 13.151 G_Regularizer: 0.000 validation_error: 21.341 +(epoch: 6, iters: 296240, time: 0.542, data: 0.000) G_L1: 18.362 G_L1_ABSOLUTE: 3.022 G_L1_RELATIVE: 15.340 G_Regularizer: 0.000 validation_error: 21.096 +(epoch: 6, iters: 298240, time: 0.546, data: 0.000) G_L1: 20.405 G_L1_ABSOLUTE: 3.140 G_L1_RELATIVE: 17.264 G_Regularizer: 0.000 validation_error: 21.096 +(epoch: 6, iters: 300240, time: 0.544, data: 0.000) G_L1: 19.462 G_L1_ABSOLUTE: 3.233 G_L1_RELATIVE: 16.229 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 6, iters: 302240, time: 0.545, data: 0.000) G_L1: 18.690 G_L1_ABSOLUTE: 3.216 G_L1_RELATIVE: 15.474 G_Regularizer: 0.000 validation_error: 21.379 +(epoch: 7, iters: 1488, time: 0.539, data: 0.000) G_L1: 15.882 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 13.064 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 7, iters: 3488, time: 0.543, data: 0.000) G_L1: 16.438 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 13.440 G_Regularizer: 0.000 validation_error: 20.629 +(epoch: 7, iters: 5488, time: 0.549, data: 0.000) G_L1: 15.449 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 12.833 G_Regularizer: 0.000 validation_error: 20.333 +(epoch: 7, iters: 7488, time: 0.542, data: 0.000) G_L1: 16.196 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 13.400 G_Regularizer: 0.000 validation_error: 20.108 +(epoch: 7, iters: 9488, time: 0.540, data: 0.000) G_L1: 14.910 G_L1_ABSOLUTE: 2.872 G_L1_RELATIVE: 12.038 G_Regularizer: 0.000 validation_error: 20.521 +(epoch: 7, iters: 11488, time: 0.547, data: 0.000) G_L1: 17.306 G_L1_ABSOLUTE: 2.950 G_L1_RELATIVE: 14.356 G_Regularizer: 0.000 validation_error: 21.329 +(epoch: 7, iters: 13488, time: 0.540, data: 0.000) G_L1: 16.909 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 14.289 G_Regularizer: 0.000 validation_error: 21.121 +(epoch: 7, iters: 15488, time: 0.546, data: 0.000) G_L1: 15.133 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 12.525 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 7, iters: 17488, time: 0.546, data: 0.000) G_L1: 16.227 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 13.846 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 7, iters: 19488, time: 0.548, data: 0.000) G_L1: 13.446 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 11.031 G_Regularizer: 0.000 validation_error: 20.445 +(epoch: 7, iters: 21488, time: 0.546, data: 0.000) G_L1: 16.821 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 14.043 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 7, iters: 23488, time: 0.547, data: 0.000) G_L1: 16.599 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 14.136 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 7, iters: 25488, time: 0.547, data: 0.000) G_L1: 13.995 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 11.458 G_Regularizer: 0.000 validation_error: 20.340 +(epoch: 7, iters: 27488, time: 0.542, data: 0.000) G_L1: 18.691 G_L1_ABSOLUTE: 2.950 G_L1_RELATIVE: 15.741 G_Regularizer: 0.000 validation_error: 20.634 +(epoch: 7, iters: 29488, time: 0.543, data: 0.000) G_L1: 17.314 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 14.460 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 7, iters: 31488, time: 0.552, data: 0.000) G_L1: 15.181 G_L1_ABSOLUTE: 2.743 G_L1_RELATIVE: 12.438 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 7, iters: 33488, time: 0.543, data: 0.000) G_L1: 15.187 G_L1_ABSOLUTE: 2.926 G_L1_RELATIVE: 12.261 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 7, iters: 35488, time: 0.545, data: 0.001) G_L1: 14.766 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 11.885 G_Regularizer: 0.000 validation_error: 21.656 +(epoch: 7, iters: 37488, time: 0.547, data: 0.000) G_L1: 15.962 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 13.278 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 7, iters: 39488, time: 0.547, data: 0.000) G_L1: 18.470 G_L1_ABSOLUTE: 3.467 G_L1_RELATIVE: 15.002 G_Regularizer: 0.000 validation_error: 21.208 +(epoch: 7, iters: 41488, time: 0.545, data: 0.000) G_L1: 14.587 G_L1_ABSOLUTE: 3.014 G_L1_RELATIVE: 11.572 G_Regularizer: 0.000 validation_error: 21.597 +(epoch: 7, iters: 43488, time: 0.541, data: 0.000) G_L1: 18.191 G_L1_ABSOLUTE: 3.081 G_L1_RELATIVE: 15.110 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 7, iters: 45488, time: 0.550, data: 0.000) G_L1: 16.190 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 13.646 G_Regularizer: 0.000 validation_error: 21.036 +(epoch: 7, iters: 47488, time: 0.545, data: 0.000) G_L1: 19.757 G_L1_ABSOLUTE: 3.506 G_L1_RELATIVE: 16.251 G_Regularizer: 0.000 validation_error: 21.215 +(epoch: 7, iters: 49488, time: 0.550, data: 0.000) G_L1: 16.511 G_L1_ABSOLUTE: 2.932 G_L1_RELATIVE: 13.579 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 7, iters: 51488, time: 0.545, data: 0.000) G_L1: 16.738 G_L1_ABSOLUTE: 3.006 G_L1_RELATIVE: 13.733 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 7, iters: 53488, time: 0.545, data: 0.000) G_L1: 18.144 G_L1_ABSOLUTE: 3.038 G_L1_RELATIVE: 15.106 G_Regularizer: 0.000 validation_error: 21.288 +(epoch: 7, iters: 55488, time: 0.549, data: 0.000) G_L1: 15.120 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 12.625 G_Regularizer: 0.000 validation_error: 21.472 +(epoch: 7, iters: 57488, time: 0.552, data: 0.000) G_L1: 14.521 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 11.698 G_Regularizer: 0.000 validation_error: 20.431 +(epoch: 7, iters: 59488, time: 0.545, data: 0.000) G_L1: 16.929 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 14.115 G_Regularizer: 0.000 validation_error: 21.118 +(epoch: 7, iters: 61488, time: 0.559, data: 0.000) G_L1: 15.309 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 12.746 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 7, iters: 63488, time: 0.551, data: 0.000) G_L1: 17.289 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 14.841 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 7, iters: 65488, time: 0.554, data: 0.000) G_L1: 16.505 G_L1_ABSOLUTE: 2.874 G_L1_RELATIVE: 13.630 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 7, iters: 67488, time: 0.552, data: 0.000) G_L1: 13.462 G_L1_ABSOLUTE: 2.853 G_L1_RELATIVE: 10.609 G_Regularizer: 0.000 validation_error: 21.350 +(epoch: 7, iters: 69488, time: 0.550, data: 0.000) G_L1: 19.528 G_L1_ABSOLUTE: 3.199 G_L1_RELATIVE: 16.329 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 7, iters: 71488, time: 0.546, data: 0.000) G_L1: 17.029 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 14.281 G_Regularizer: 0.000 validation_error: 21.144 +(epoch: 7, iters: 73488, time: 0.546, data: 0.000) G_L1: 16.107 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 13.296 G_Regularizer: 0.000 validation_error: 21.319 +(epoch: 7, iters: 75488, time: 0.554, data: 0.000) G_L1: 15.360 G_L1_ABSOLUTE: 2.868 G_L1_RELATIVE: 12.492 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 7, iters: 77488, time: 0.549, data: 0.001) G_L1: 17.641 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 14.820 G_Regularizer: 0.000 validation_error: 21.300 +(epoch: 7, iters: 79488, time: 0.550, data: 0.000) G_L1: 16.862 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 14.099 G_Regularizer: 0.000 validation_error: 20.632 +(epoch: 7, iters: 81488, time: 0.549, data: 0.000) G_L1: 15.642 G_L1_ABSOLUTE: 2.794 G_L1_RELATIVE: 12.848 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 7, iters: 83488, time: 0.556, data: 0.001) G_L1: 16.978 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 14.315 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 7, iters: 85488, time: 0.549, data: 0.000) G_L1: 17.241 G_L1_ABSOLUTE: 2.966 G_L1_RELATIVE: 14.274 G_Regularizer: 0.000 validation_error: 20.216 +(epoch: 7, iters: 87488, time: 0.561, data: 0.000) G_L1: 17.939 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 15.125 G_Regularizer: 0.000 validation_error: 20.442 +(epoch: 7, iters: 89488, time: 0.543, data: 0.000) G_L1: 12.685 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 10.216 G_Regularizer: 0.000 validation_error: 21.291 +(epoch: 7, iters: 91488, time: 0.548, data: 0.000) G_L1: 15.271 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 12.669 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 7, iters: 93488, time: 0.545, data: 0.000) G_L1: 16.596 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 14.102 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 7, iters: 95488, time: 0.542, data: 0.000) G_L1: 15.808 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 13.331 G_Regularizer: 0.000 validation_error: 21.248 +(epoch: 7, iters: 97488, time: 0.543, data: 0.000) G_L1: 17.778 G_L1_ABSOLUTE: 3.207 G_L1_RELATIVE: 14.571 G_Regularizer: 0.000 validation_error: 21.453 +(epoch: 7, iters: 99488, time: 0.541, data: 0.000) G_L1: 17.883 G_L1_ABSOLUTE: 3.667 G_L1_RELATIVE: 14.216 G_Regularizer: 0.000 validation_error: 21.235 +(epoch: 7, iters: 101488, time: 0.546, data: 0.000) G_L1: 15.022 G_L1_ABSOLUTE: 2.801 G_L1_RELATIVE: 12.221 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 7, iters: 103488, time: 0.543, data: 0.000) G_L1: 16.312 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 13.567 G_Regularizer: 0.000 validation_error: 21.468 +(epoch: 7, iters: 105488, time: 0.545, data: 0.000) G_L1: 15.513 G_L1_ABSOLUTE: 3.156 G_L1_RELATIVE: 12.356 G_Regularizer: 0.000 validation_error: 20.670 +(epoch: 7, iters: 107488, time: 0.541, data: 0.000) G_L1: 16.795 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 13.984 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 7, iters: 109488, time: 0.540, data: 0.000) G_L1: 12.669 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 9.689 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 7, iters: 111488, time: 0.550, data: 0.000) G_L1: 22.348 G_L1_ABSOLUTE: 2.791 G_L1_RELATIVE: 19.557 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 7, iters: 113488, time: 0.542, data: 0.000) G_L1: 13.866 G_L1_ABSOLUTE: 3.062 G_L1_RELATIVE: 10.804 G_Regularizer: 0.000 validation_error: 20.384 +(epoch: 7, iters: 115488, time: 0.542, data: 0.000) G_L1: 15.793 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 13.257 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 7, iters: 117488, time: 0.543, data: 0.000) G_L1: 14.482 G_L1_ABSOLUTE: 2.719 G_L1_RELATIVE: 11.763 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 7, iters: 119488, time: 0.539, data: 0.000) G_L1: 14.201 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 11.578 G_Regularizer: 0.000 validation_error: 20.291 +(epoch: 7, iters: 121488, time: 0.540, data: 0.000) G_L1: 15.631 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 13.023 G_Regularizer: 0.000 validation_error: 21.198 +(epoch: 7, iters: 123488, time: 0.539, data: 0.000) G_L1: 15.074 G_L1_ABSOLUTE: 2.869 G_L1_RELATIVE: 12.205 G_Regularizer: 0.000 validation_error: 19.916 +(epoch: 7, iters: 125488, time: 0.547, data: 0.000) G_L1: 16.335 G_L1_ABSOLUTE: 3.257 G_L1_RELATIVE: 13.077 G_Regularizer: 0.000 validation_error: 21.910 +(epoch: 7, iters: 127488, time: 0.536, data: 0.000) G_L1: 16.518 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 14.253 G_Regularizer: 0.000 validation_error: 21.432 +(epoch: 7, iters: 129488, time: 0.541, data: 0.000) G_L1: 16.800 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 14.158 G_Regularizer: 0.000 validation_error: 20.313 +(epoch: 7, iters: 131488, time: 0.543, data: 0.000) G_L1: 14.252 G_L1_ABSOLUTE: 2.902 G_L1_RELATIVE: 11.350 G_Regularizer: 0.000 validation_error: 21.259 +(epoch: 7, iters: 133488, time: 0.540, data: 0.000) G_L1: 14.494 G_L1_ABSOLUTE: 2.831 G_L1_RELATIVE: 11.663 G_Regularizer: 0.000 validation_error: 21.392 +(epoch: 7, iters: 135488, time: 0.546, data: 0.000) G_L1: 15.983 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 13.283 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 7, iters: 137488, time: 0.543, data: 0.000) G_L1: 13.936 G_L1_ABSOLUTE: 2.297 G_L1_RELATIVE: 11.639 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 7, iters: 139488, time: 0.541, data: 0.000) G_L1: 16.582 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 14.199 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 7, iters: 141488, time: 0.547, data: 0.000) G_L1: 14.801 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 12.183 G_Regularizer: 0.000 validation_error: 20.370 +(epoch: 7, iters: 143488, time: 0.543, data: 0.000) G_L1: 13.359 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 10.722 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 7, iters: 145488, time: 0.540, data: 0.000) G_L1: 14.873 G_L1_ABSOLUTE: 2.772 G_L1_RELATIVE: 12.102 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 7, iters: 147488, time: 0.552, data: 0.000) G_L1: 15.189 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 12.414 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 7, iters: 149488, time: 0.541, data: 0.000) G_L1: 16.811 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 14.392 G_Regularizer: 0.000 validation_error: 21.655 +(epoch: 7, iters: 151488, time: 0.543, data: 0.000) G_L1: 13.832 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 11.147 G_Regularizer: 0.000 validation_error: 21.180 +(epoch: 7, iters: 153488, time: 0.543, data: 0.000) G_L1: 16.855 G_L1_ABSOLUTE: 3.334 G_L1_RELATIVE: 13.521 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 7, iters: 155488, time: 0.543, data: 0.000) G_L1: 13.690 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 11.339 G_Regularizer: 0.000 validation_error: 21.328 +(epoch: 7, iters: 157488, time: 0.545, data: 0.000) G_L1: 13.795 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 10.986 G_Regularizer: 0.000 validation_error: 20.917 +(epoch: 7, iters: 159488, time: 0.544, data: 0.000) G_L1: 12.846 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 10.214 G_Regularizer: 0.000 validation_error: 21.252 +(epoch: 7, iters: 161488, time: 0.538, data: 0.000) G_L1: 20.032 G_L1_ABSOLUTE: 3.413 G_L1_RELATIVE: 16.619 G_Regularizer: 0.000 validation_error: 21.123 +(epoch: 7, iters: 163488, time: 0.546, data: 0.000) G_L1: 13.905 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 11.444 G_Regularizer: 0.000 validation_error: 21.584 +(epoch: 7, iters: 165488, time: 0.540, data: 0.000) G_L1: 15.271 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 12.822 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 7, iters: 167488, time: 0.537, data: 0.000) G_L1: 17.307 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 14.793 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 7, iters: 169488, time: 0.535, data: 0.000) G_L1: 17.404 G_L1_ABSOLUTE: 3.677 G_L1_RELATIVE: 13.727 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 7, iters: 171488, time: 0.544, data: 0.000) G_L1: 14.203 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 11.446 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 7, iters: 173488, time: 0.538, data: 0.000) G_L1: 14.211 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 11.970 G_Regularizer: 0.000 validation_error: 21.401 +(epoch: 7, iters: 175488, time: 0.543, data: 0.000) G_L1: 15.308 G_L1_ABSOLUTE: 2.838 G_L1_RELATIVE: 12.471 G_Regularizer: 0.000 validation_error: 21.791 +(epoch: 7, iters: 177488, time: 0.548, data: 0.000) G_L1: 18.719 G_L1_ABSOLUTE: 3.481 G_L1_RELATIVE: 15.238 G_Regularizer: 0.000 validation_error: 21.239 +(epoch: 7, iters: 179488, time: 0.539, data: 0.000) G_L1: 16.404 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 13.637 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 7, iters: 181488, time: 0.557, data: 0.000) G_L1: 14.715 G_L1_ABSOLUTE: 2.865 G_L1_RELATIVE: 11.850 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 7, iters: 183488, time: 0.555, data: 0.000) G_L1: 13.604 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 11.151 G_Regularizer: 0.000 validation_error: 21.630 +(epoch: 7, iters: 185488, time: 0.539, data: 0.000) G_L1: 15.135 G_L1_ABSOLUTE: 2.842 G_L1_RELATIVE: 12.293 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 7, iters: 187488, time: 0.546, data: 0.000) G_L1: 16.448 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 13.512 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 7, iters: 189488, time: 0.542, data: 0.000) G_L1: 16.333 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 13.397 G_Regularizer: 0.000 validation_error: 20.362 +(epoch: 7, iters: 191488, time: 0.547, data: 0.000) G_L1: 16.083 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 13.725 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 7, iters: 193488, time: 0.547, data: 0.000) G_L1: 15.457 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 12.549 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 7, iters: 195488, time: 0.543, data: 0.000) G_L1: 15.379 G_L1_ABSOLUTE: 2.272 G_L1_RELATIVE: 13.108 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 7, iters: 197488, time: 0.543, data: 0.000) G_L1: 13.654 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 10.819 G_Regularizer: 0.000 validation_error: 21.407 +(epoch: 7, iters: 199488, time: 0.548, data: 0.000) G_L1: 17.973 G_L1_ABSOLUTE: 3.258 G_L1_RELATIVE: 14.716 G_Regularizer: 0.000 validation_error: 19.863 +(epoch: 7, iters: 201488, time: 0.540, data: 0.000) G_L1: 15.333 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 12.528 G_Regularizer: 0.000 validation_error: 21.430 +(epoch: 7, iters: 203488, time: 0.534, data: 0.000) G_L1: 15.083 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 12.447 G_Regularizer: 0.000 validation_error: 21.256 +(epoch: 7, iters: 205488, time: 0.534, data: 0.000) G_L1: 14.415 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 11.507 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 7, iters: 207488, time: 0.551, data: 0.000) G_L1: 15.934 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 13.216 G_Regularizer: 0.000 validation_error: 20.611 +(epoch: 7, iters: 209488, time: 0.536, data: 0.000) G_L1: 17.534 G_L1_ABSOLUTE: 2.711 G_L1_RELATIVE: 14.822 G_Regularizer: 0.000 validation_error: 19.974 +(epoch: 7, iters: 211488, time: 0.542, data: 0.000) G_L1: 15.730 G_L1_ABSOLUTE: 2.859 G_L1_RELATIVE: 12.872 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 7, iters: 213488, time: 0.952, data: 0.000) G_L1: 15.402 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 12.784 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 7, iters: 215488, time: 0.950, data: 0.000) G_L1: 17.083 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 14.754 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 7, iters: 217488, time: 0.952, data: 0.000) G_L1: 17.356 G_L1_ABSOLUTE: 3.062 G_L1_RELATIVE: 14.293 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 7, iters: 219488, time: 0.945, data: 0.000) G_L1: 18.794 G_L1_ABSOLUTE: 3.116 G_L1_RELATIVE: 15.678 G_Regularizer: 0.000 validation_error: 21.487 +(epoch: 7, iters: 221488, time: 0.946, data: 0.000) G_L1: 16.169 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 13.315 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 7, iters: 223488, time: 0.952, data: 0.000) G_L1: 16.591 G_L1_ABSOLUTE: 2.643 G_L1_RELATIVE: 13.949 G_Regularizer: 0.000 validation_error: 21.398 +(epoch: 7, iters: 225488, time: 0.950, data: 0.000) G_L1: 13.806 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 11.361 G_Regularizer: 0.000 validation_error: 21.101 +(epoch: 7, iters: 227488, time: 0.950, data: 0.000) G_L1: 15.240 G_L1_ABSOLUTE: 3.177 G_L1_RELATIVE: 12.062 G_Regularizer: 0.000 validation_error: 21.106 +(epoch: 7, iters: 229488, time: 0.959, data: 0.000) G_L1: 16.177 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 13.365 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 7, iters: 231488, time: 0.956, data: 0.000) G_L1: 21.500 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 18.707 G_Regularizer: 0.000 validation_error: 21.081 +(epoch: 7, iters: 233488, time: 0.960, data: 0.000) G_L1: 16.471 G_L1_ABSOLUTE: 3.149 G_L1_RELATIVE: 13.322 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 7, iters: 235488, time: 0.948, data: 0.000) G_L1: 14.604 G_L1_ABSOLUTE: 2.768 G_L1_RELATIVE: 11.835 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 7, iters: 237488, time: 0.962, data: 0.000) G_L1: 17.029 G_L1_ABSOLUTE: 3.118 G_L1_RELATIVE: 13.911 G_Regularizer: 0.000 validation_error: 20.459 +(epoch: 7, iters: 239488, time: 0.957, data: 0.000) G_L1: 14.547 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 11.905 G_Regularizer: 0.000 validation_error: 20.421 +(epoch: 7, iters: 241488, time: 0.966, data: 0.000) G_L1: 16.705 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 13.800 G_Regularizer: 0.000 validation_error: 21.885 +(epoch: 7, iters: 243488, time: 0.954, data: 0.000) G_L1: 14.955 G_L1_ABSOLUTE: 2.868 G_L1_RELATIVE: 12.088 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 7, iters: 245488, time: 0.964, data: 0.000) G_L1: 17.937 G_L1_ABSOLUTE: 3.135 G_L1_RELATIVE: 14.803 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 7, iters: 247488, time: 0.961, data: 0.000) G_L1: 14.924 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 12.354 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 7, iters: 249488, time: 0.961, data: 0.000) G_L1: 16.443 G_L1_ABSOLUTE: 3.196 G_L1_RELATIVE: 13.246 G_Regularizer: 0.000 validation_error: 20.158 +(epoch: 7, iters: 251488, time: 0.964, data: 0.004) G_L1: 13.191 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 10.580 G_Regularizer: 0.000 validation_error: 21.441 +(epoch: 7, iters: 253488, time: 0.967, data: 0.000) G_L1: 16.307 G_L1_ABSOLUTE: 3.398 G_L1_RELATIVE: 12.909 G_Regularizer: 0.000 validation_error: 19.433 +(epoch: 7, iters: 255488, time: 0.960, data: 0.000) G_L1: 13.085 G_L1_ABSOLUTE: 3.145 G_L1_RELATIVE: 9.940 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 7, iters: 257488, time: 0.964, data: 0.000) G_L1: 15.551 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 12.759 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 7, iters: 259488, time: 0.962, data: 0.000) G_L1: 13.729 G_L1_ABSOLUTE: 3.238 G_L1_RELATIVE: 10.491 G_Regularizer: 0.000 validation_error: 20.112 +(epoch: 7, iters: 261488, time: 0.961, data: 0.000) G_L1: 14.181 G_L1_ABSOLUTE: 2.983 G_L1_RELATIVE: 11.198 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 7, iters: 263488, time: 0.964, data: 0.000) G_L1: 16.138 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 13.459 G_Regularizer: 0.000 validation_error: 20.066 +(epoch: 7, iters: 265488, time: 0.963, data: 0.000) G_L1: 18.784 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 15.974 G_Regularizer: 0.000 validation_error: 21.068 +(epoch: 7, iters: 267488, time: 0.960, data: 0.000) G_L1: 14.018 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 11.220 G_Regularizer: 0.000 validation_error: 20.661 +(epoch: 7, iters: 269488, time: 0.962, data: 0.000) G_L1: 20.672 G_L1_ABSOLUTE: 3.066 G_L1_RELATIVE: 17.606 G_Regularizer: 0.000 validation_error: 21.336 +(epoch: 7, iters: 271488, time: 0.966, data: 0.000) G_L1: 16.259 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 13.370 G_Regularizer: 0.000 validation_error: 20.412 +(epoch: 7, iters: 273488, time: 0.957, data: 0.000) G_L1: 15.082 G_L1_ABSOLUTE: 3.035 G_L1_RELATIVE: 12.047 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 7, iters: 275488, time: 0.966, data: 0.000) G_L1: 14.281 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 11.689 G_Regularizer: 0.000 validation_error: 21.335 +(epoch: 7, iters: 277488, time: 0.968, data: 0.000) G_L1: 16.842 G_L1_ABSOLUTE: 3.163 G_L1_RELATIVE: 13.679 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 7, iters: 279488, time: 0.962, data: 0.000) G_L1: 17.019 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 14.615 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 7, iters: 281488, time: 0.969, data: 0.000) G_L1: 19.542 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 16.740 G_Regularizer: 0.000 validation_error: 20.332 +(epoch: 7, iters: 283488, time: 0.963, data: 0.000) G_L1: 13.776 G_L1_ABSOLUTE: 2.583 G_L1_RELATIVE: 11.193 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 7, iters: 285488, time: 0.967, data: 0.000) G_L1: 13.699 G_L1_ABSOLUTE: 2.873 G_L1_RELATIVE: 10.826 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 7, iters: 287488, time: 0.970, data: 0.000) G_L1: 14.708 G_L1_ABSOLUTE: 3.108 G_L1_RELATIVE: 11.600 G_Regularizer: 0.000 validation_error: 20.441 +(epoch: 7, iters: 289488, time: 0.969, data: 0.000) G_L1: 15.487 G_L1_ABSOLUTE: 3.355 G_L1_RELATIVE: 12.132 G_Regularizer: 0.000 validation_error: 21.359 +(epoch: 7, iters: 291488, time: 0.960, data: 0.000) G_L1: 16.891 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 14.379 G_Regularizer: 0.000 validation_error: 20.388 +(epoch: 7, iters: 293488, time: 0.960, data: 0.000) G_L1: 15.866 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 13.009 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 7, iters: 295488, time: 0.968, data: 0.000) G_L1: 14.271 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 11.807 G_Regularizer: 0.000 validation_error: 20.226 +(epoch: 7, iters: 297488, time: 0.969, data: 0.000) G_L1: 16.518 G_L1_ABSOLUTE: 3.038 G_L1_RELATIVE: 13.480 G_Regularizer: 0.000 validation_error: 20.260 +(epoch: 7, iters: 299488, time: 0.968, data: 0.000) G_L1: 18.973 G_L1_ABSOLUTE: 3.642 G_L1_RELATIVE: 15.332 G_Regularizer: 0.000 validation_error: 20.323 +(epoch: 7, iters: 301488, time: 0.965, data: 0.000) G_L1: 16.323 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 13.565 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 8, iters: 736, time: 0.967, data: 0.000) G_L1: 13.612 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 11.073 G_Regularizer: 0.000 validation_error: 20.894 +(epoch: 8, iters: 2736, time: 0.966, data: 0.000) G_L1: 14.812 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 12.029 G_Regularizer: 0.000 validation_error: 20.003 +(epoch: 8, iters: 4736, time: 0.962, data: 0.000) G_L1: 13.494 G_L1_ABSOLUTE: 2.641 G_L1_RELATIVE: 10.853 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 8, iters: 6736, time: 0.968, data: 0.000) G_L1: 13.571 G_L1_ABSOLUTE: 2.840 G_L1_RELATIVE: 10.731 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 8, iters: 8736, time: 0.963, data: 0.000) G_L1: 17.155 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 14.275 G_Regularizer: 0.000 validation_error: 19.969 +(epoch: 8, iters: 10736, time: 0.967, data: 0.000) G_L1: 17.185 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 14.659 G_Regularizer: 0.000 validation_error: 20.665 +(epoch: 8, iters: 12736, time: 0.963, data: 0.000) G_L1: 15.634 G_L1_ABSOLUTE: 3.076 G_L1_RELATIVE: 12.558 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 8, iters: 14736, time: 0.965, data: 0.000) G_L1: 17.600 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 14.744 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 8, iters: 16736, time: 0.964, data: 0.000) G_L1: 14.371 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 11.654 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 8, iters: 18736, time: 0.968, data: 0.000) G_L1: 15.998 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 13.551 G_Regularizer: 0.000 validation_error: 22.041 +(epoch: 8, iters: 20736, time: 0.967, data: 0.000) G_L1: 16.580 G_L1_ABSOLUTE: 3.075 G_L1_RELATIVE: 13.505 G_Regularizer: 0.000 validation_error: 20.944 +(epoch: 8, iters: 22736, time: 0.967, data: 0.000) G_L1: 15.155 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 12.550 G_Regularizer: 0.000 validation_error: 20.506 +(epoch: 8, iters: 24736, time: 0.966, data: 0.001) G_L1: 19.403 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 16.629 G_Regularizer: 0.000 validation_error: 20.771 +(epoch: 8, iters: 26736, time: 0.969, data: 0.000) G_L1: 12.731 G_L1_ABSOLUTE: 2.154 G_L1_RELATIVE: 10.578 G_Regularizer: 0.000 validation_error: 21.552 +(epoch: 8, iters: 28736, time: 0.965, data: 0.002) G_L1: 14.734 G_L1_ABSOLUTE: 3.341 G_L1_RELATIVE: 11.393 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 8, iters: 30736, time: 0.961, data: 0.000) G_L1: 15.440 G_L1_ABSOLUTE: 3.515 G_L1_RELATIVE: 11.925 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 8, iters: 32736, time: 0.965, data: 0.000) G_L1: 14.674 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 11.903 G_Regularizer: 0.000 validation_error: 21.198 +(epoch: 8, iters: 34736, time: 0.955, data: 0.000) G_L1: 16.497 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 13.809 G_Regularizer: 0.000 validation_error: 21.251 +(epoch: 8, iters: 36736, time: 0.957, data: 0.000) G_L1: 13.661 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 11.315 G_Regularizer: 0.000 validation_error: 20.202 +(epoch: 8, iters: 38736, time: 0.966, data: 0.000) G_L1: 17.127 G_L1_ABSOLUTE: 3.039 G_L1_RELATIVE: 14.088 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 8, iters: 40736, time: 0.960, data: 0.000) G_L1: 13.988 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 11.569 G_Regularizer: 0.000 validation_error: 20.324 +(epoch: 8, iters: 42736, time: 0.967, data: 0.000) G_L1: 14.757 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 12.184 G_Regularizer: 0.000 validation_error: 20.436 +(epoch: 8, iters: 44736, time: 0.962, data: 0.000) G_L1: 15.931 G_L1_ABSOLUTE: 3.102 G_L1_RELATIVE: 12.828 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 8, iters: 46736, time: 0.968, data: 0.000) G_L1: 15.296 G_L1_ABSOLUTE: 3.449 G_L1_RELATIVE: 11.846 G_Regularizer: 0.000 validation_error: 20.290 +(epoch: 8, iters: 48736, time: 0.962, data: 0.000) G_L1: 15.891 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 13.231 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 8, iters: 50736, time: 0.966, data: 0.000) G_L1: 16.759 G_L1_ABSOLUTE: 2.961 G_L1_RELATIVE: 13.798 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 8, iters: 52736, time: 0.964, data: 0.000) G_L1: 14.306 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 11.561 G_Regularizer: 0.000 validation_error: 20.297 +(epoch: 8, iters: 54736, time: 0.963, data: 0.000) G_L1: 16.300 G_L1_ABSOLUTE: 3.089 G_L1_RELATIVE: 13.211 G_Regularizer: 0.000 validation_error: 20.478 +(epoch: 8, iters: 56736, time: 0.969, data: 0.000) G_L1: 12.454 G_L1_ABSOLUTE: 2.156 G_L1_RELATIVE: 10.299 G_Regularizer: 0.000 validation_error: 20.183 +(epoch: 8, iters: 58736, time: 0.963, data: 0.000) G_L1: 18.585 G_L1_ABSOLUTE: 2.876 G_L1_RELATIVE: 15.710 G_Regularizer: 0.000 validation_error: 21.060 +(epoch: 8, iters: 60736, time: 0.970, data: 0.000) G_L1: 13.725 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 10.969 G_Regularizer: 0.000 validation_error: 21.520 +(epoch: 8, iters: 62736, time: 0.965, data: 0.000) G_L1: 17.687 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 14.683 G_Regularizer: 0.000 validation_error: 20.616 +(epoch: 8, iters: 64736, time: 0.964, data: 0.000) G_L1: 15.836 G_L1_ABSOLUTE: 3.160 G_L1_RELATIVE: 12.676 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 8, iters: 66736, time: 0.972, data: 0.000) G_L1: 15.817 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 13.297 G_Regularizer: 0.000 validation_error: 20.188 +(epoch: 8, iters: 68736, time: 0.966, data: 0.000) G_L1: 13.620 G_L1_ABSOLUTE: 2.895 G_L1_RELATIVE: 10.725 G_Regularizer: 0.000 validation_error: 20.218 +(epoch: 8, iters: 70736, time: 0.967, data: 0.000) G_L1: 16.255 G_L1_ABSOLUTE: 3.095 G_L1_RELATIVE: 13.160 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 8, iters: 72736, time: 0.966, data: 0.000) G_L1: 19.138 G_L1_ABSOLUTE: 2.639 G_L1_RELATIVE: 16.500 G_Regularizer: 0.000 validation_error: 20.409 +(epoch: 8, iters: 74736, time: 0.965, data: 0.000) G_L1: 15.723 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 13.448 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 8, iters: 76736, time: 0.965, data: 0.000) G_L1: 13.382 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 10.736 G_Regularizer: 0.000 validation_error: 21.307 +(epoch: 8, iters: 78736, time: 0.964, data: 0.000) G_L1: 14.600 G_L1_ABSOLUTE: 2.252 G_L1_RELATIVE: 12.348 G_Regularizer: 0.000 validation_error: 21.214 +(epoch: 8, iters: 80736, time: 0.965, data: 0.000) G_L1: 17.443 G_L1_ABSOLUTE: 3.210 G_L1_RELATIVE: 14.232 G_Regularizer: 0.000 validation_error: 20.518 +(epoch: 8, iters: 82736, time: 0.957, data: 0.000) G_L1: 17.354 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 14.491 G_Regularizer: 0.000 validation_error: 21.371 +(epoch: 8, iters: 84736, time: 0.972, data: 0.000) G_L1: 14.949 G_L1_ABSOLUTE: 3.037 G_L1_RELATIVE: 11.912 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 8, iters: 86736, time: 0.963, data: 0.000) G_L1: 15.421 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 12.898 G_Regularizer: 0.000 validation_error: 20.156 +(epoch: 8, iters: 88736, time: 0.966, data: 0.000) G_L1: 16.252 G_L1_ABSOLUTE: 2.960 G_L1_RELATIVE: 13.292 G_Regularizer: 0.000 validation_error: 20.066 +(epoch: 8, iters: 90736, time: 0.711, data: 0.000) G_L1: 15.504 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 12.989 G_Regularizer: 0.000 validation_error: 19.965 +(epoch: 8, iters: 92736, time: 0.557, data: 0.000) G_L1: 16.518 G_L1_ABSOLUTE: 2.833 G_L1_RELATIVE: 13.686 G_Regularizer: 0.000 validation_error: 19.995 +(epoch: 8, iters: 94736, time: 0.573, data: 0.000) G_L1: 18.142 G_L1_ABSOLUTE: 2.728 G_L1_RELATIVE: 15.414 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 8, iters: 96736, time: 0.542, data: 0.000) G_L1: 14.603 G_L1_ABSOLUTE: 2.965 G_L1_RELATIVE: 11.637 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 8, iters: 98736, time: 0.551, data: 0.000) G_L1: 17.956 G_L1_ABSOLUTE: 2.862 G_L1_RELATIVE: 15.095 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 8, iters: 100736, time: 0.543, data: 0.000) G_L1: 17.222 G_L1_ABSOLUTE: 2.848 G_L1_RELATIVE: 14.375 G_Regularizer: 0.000 validation_error: 21.441 +(epoch: 8, iters: 102736, time: 0.547, data: 0.000) G_L1: 15.981 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 13.409 G_Regularizer: 0.000 validation_error: 21.885 +(epoch: 8, iters: 104736, time: 0.542, data: 0.000) G_L1: 14.450 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 11.900 G_Regularizer: 0.000 validation_error: 20.391 +(epoch: 8, iters: 106736, time: 0.540, data: 0.000) G_L1: 13.132 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 10.591 G_Regularizer: 0.000 validation_error: 20.402 +(epoch: 8, iters: 108736, time: 0.544, data: 0.000) G_L1: 16.296 G_L1_ABSOLUTE: 3.268 G_L1_RELATIVE: 13.028 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 8, iters: 110736, time: 0.548, data: 0.000) G_L1: 17.367 G_L1_ABSOLUTE: 3.012 G_L1_RELATIVE: 14.355 G_Regularizer: 0.000 validation_error: 21.134 +(epoch: 8, iters: 112736, time: 0.550, data: 0.000) G_L1: 14.403 G_L1_ABSOLUTE: 2.986 G_L1_RELATIVE: 11.416 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 8, iters: 114736, time: 0.540, data: 0.000) G_L1: 15.408 G_L1_ABSOLUTE: 3.190 G_L1_RELATIVE: 12.218 G_Regularizer: 0.000 validation_error: 20.336 +(epoch: 8, iters: 116736, time: 0.551, data: 0.000) G_L1: 17.879 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 15.158 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 8, iters: 118736, time: 0.542, data: 0.000) G_L1: 15.972 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 13.382 G_Regularizer: 0.000 validation_error: 20.459 +(epoch: 8, iters: 120736, time: 0.548, data: 0.000) G_L1: 14.268 G_L1_ABSOLUTE: 2.862 G_L1_RELATIVE: 11.406 G_Regularizer: 0.000 validation_error: 20.406 +(epoch: 8, iters: 122736, time: 0.542, data: 0.000) G_L1: 14.050 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 11.521 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 8, iters: 124736, time: 0.547, data: 0.000) G_L1: 15.425 G_L1_ABSOLUTE: 2.988 G_L1_RELATIVE: 12.437 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 8, iters: 126736, time: 0.553, data: 0.000) G_L1: 16.021 G_L1_ABSOLUTE: 3.221 G_L1_RELATIVE: 12.800 G_Regularizer: 0.000 validation_error: 20.475 +(epoch: 8, iters: 128736, time: 0.549, data: 0.000) G_L1: 16.918 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 14.239 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 8, iters: 130736, time: 0.552, data: 0.000) G_L1: 16.067 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 13.075 G_Regularizer: 0.000 validation_error: 20.427 +(epoch: 8, iters: 132736, time: 0.547, data: 0.000) G_L1: 14.398 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 11.508 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 8, iters: 134736, time: 0.545, data: 0.000) G_L1: 16.672 G_L1_ABSOLUTE: 3.076 G_L1_RELATIVE: 13.596 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 8, iters: 136736, time: 0.546, data: 0.000) G_L1: 16.482 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 13.605 G_Regularizer: 0.000 validation_error: 20.708 +(epoch: 8, iters: 138736, time: 0.550, data: 0.000) G_L1: 17.175 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 14.604 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 8, iters: 140736, time: 0.546, data: 0.000) G_L1: 13.729 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 11.371 G_Regularizer: 0.000 validation_error: 21.048 +(epoch: 8, iters: 142736, time: 0.545, data: 0.000) G_L1: 18.831 G_L1_ABSOLUTE: 3.262 G_L1_RELATIVE: 15.570 G_Regularizer: 0.000 validation_error: 19.818 +(epoch: 8, iters: 144736, time: 0.544, data: 0.000) G_L1: 16.613 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 13.978 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 8, iters: 146736, time: 0.545, data: 0.000) G_L1: 15.680 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 13.235 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 8, iters: 148736, time: 0.549, data: 0.000) G_L1: 16.197 G_L1_ABSOLUTE: 3.263 G_L1_RELATIVE: 12.934 G_Regularizer: 0.000 validation_error: 20.628 +(epoch: 8, iters: 150736, time: 0.544, data: 0.000) G_L1: 19.209 G_L1_ABSOLUTE: 3.083 G_L1_RELATIVE: 16.126 G_Regularizer: 0.000 validation_error: 20.394 +(epoch: 8, iters: 152736, time: 0.536, data: 0.000) G_L1: 14.369 G_L1_ABSOLUTE: 2.768 G_L1_RELATIVE: 11.601 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 8, iters: 154736, time: 0.547, data: 0.000) G_L1: 15.404 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 12.741 G_Regularizer: 0.000 validation_error: 20.092 +(epoch: 8, iters: 156736, time: 0.551, data: 0.000) G_L1: 15.602 G_L1_ABSOLUTE: 3.300 G_L1_RELATIVE: 12.302 G_Regularizer: 0.000 validation_error: 20.547 +(epoch: 8, iters: 158736, time: 0.543, data: 0.000) G_L1: 16.330 G_L1_ABSOLUTE: 2.772 G_L1_RELATIVE: 13.559 G_Regularizer: 0.000 validation_error: 21.238 +(epoch: 8, iters: 160736, time: 0.545, data: 0.000) G_L1: 19.179 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 16.437 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 8, iters: 162736, time: 0.551, data: 0.000) G_L1: 15.590 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 13.191 G_Regularizer: 0.000 validation_error: 20.494 +(epoch: 8, iters: 164736, time: 0.542, data: 0.000) G_L1: 14.680 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 12.289 G_Regularizer: 0.000 validation_error: 21.076 +(epoch: 8, iters: 166736, time: 0.539, data: 0.000) G_L1: 18.449 G_L1_ABSOLUTE: 3.356 G_L1_RELATIVE: 15.092 G_Regularizer: 0.000 validation_error: 20.249 +(epoch: 8, iters: 168736, time: 0.543, data: 0.000) G_L1: 17.683 G_L1_ABSOLUTE: 3.361 G_L1_RELATIVE: 14.322 G_Regularizer: 0.000 validation_error: 20.285 +(epoch: 8, iters: 170736, time: 0.554, data: 0.000) G_L1: 13.039 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 10.485 G_Regularizer: 0.000 validation_error: 19.896 +(epoch: 8, iters: 172736, time: 0.555, data: 0.000) G_L1: 13.817 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 11.313 G_Regularizer: 0.000 validation_error: 21.357 +(epoch: 8, iters: 174736, time: 0.555, data: 0.001) G_L1: 12.515 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 10.103 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 8, iters: 176736, time: 0.545, data: 0.000) G_L1: 15.166 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 12.509 G_Regularizer: 0.000 validation_error: 21.538 +(epoch: 8, iters: 178736, time: 0.549, data: 0.000) G_L1: 18.090 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 15.395 G_Regularizer: 0.000 validation_error: 21.251 +(epoch: 8, iters: 180736, time: 0.544, data: 0.000) G_L1: 16.903 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 14.285 G_Regularizer: 0.000 validation_error: 21.493 +(epoch: 8, iters: 182736, time: 0.537, data: 0.000) G_L1: 13.108 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 10.696 G_Regularizer: 0.000 validation_error: 19.816 +(epoch: 8, iters: 184736, time: 0.539, data: 0.000) G_L1: 18.393 G_L1_ABSOLUTE: 3.445 G_L1_RELATIVE: 14.949 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 8, iters: 186736, time: 0.549, data: 0.000) G_L1: 17.608 G_L1_ABSOLUTE: 2.939 G_L1_RELATIVE: 14.669 G_Regularizer: 0.000 validation_error: 20.057 +(epoch: 8, iters: 188736, time: 0.549, data: 0.000) G_L1: 15.536 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 12.967 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 8, iters: 190736, time: 0.543, data: 0.000) G_L1: 15.097 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 12.843 G_Regularizer: 0.000 validation_error: 21.409 +(epoch: 8, iters: 192736, time: 0.543, data: 0.000) G_L1: 15.143 G_L1_ABSOLUTE: 2.852 G_L1_RELATIVE: 12.292 G_Regularizer: 0.000 validation_error: 20.420 +(epoch: 8, iters: 194736, time: 0.546, data: 0.000) G_L1: 15.580 G_L1_ABSOLUTE: 2.916 G_L1_RELATIVE: 12.664 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 8, iters: 196736, time: 0.546, data: 0.000) G_L1: 12.428 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 9.913 G_Regularizer: 0.000 validation_error: 21.258 +(epoch: 8, iters: 198736, time: 0.545, data: 0.000) G_L1: 13.673 G_L1_ABSOLUTE: 3.224 G_L1_RELATIVE: 10.450 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 8, iters: 200736, time: 0.553, data: 0.000) G_L1: 15.773 G_L1_ABSOLUTE: 4.077 G_L1_RELATIVE: 11.695 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 8, iters: 202736, time: 0.541, data: 0.000) G_L1: 15.781 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 13.218 G_Regularizer: 0.000 validation_error: 20.315 +(epoch: 8, iters: 204736, time: 0.543, data: 0.000) G_L1: 15.969 G_L1_ABSOLUTE: 2.423 G_L1_RELATIVE: 13.545 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 8, iters: 206736, time: 0.547, data: 0.000) G_L1: 14.095 G_L1_ABSOLUTE: 2.471 G_L1_RELATIVE: 11.624 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 8, iters: 208736, time: 0.553, data: 0.000) G_L1: 17.238 G_L1_ABSOLUTE: 3.357 G_L1_RELATIVE: 13.881 G_Regularizer: 0.000 validation_error: 20.240 +(epoch: 8, iters: 210736, time: 0.542, data: 0.000) G_L1: 13.716 G_L1_ABSOLUTE: 2.307 G_L1_RELATIVE: 11.408 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 8, iters: 212736, time: 0.544, data: 0.000) G_L1: 15.258 G_L1_ABSOLUTE: 2.969 G_L1_RELATIVE: 12.289 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 8, iters: 214736, time: 0.537, data: 0.000) G_L1: 15.738 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 13.193 G_Regularizer: 0.000 validation_error: 20.583 +(epoch: 8, iters: 216736, time: 0.546, data: 0.000) G_L1: 13.430 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 11.028 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 8, iters: 218736, time: 0.950, data: 0.000) G_L1: 18.152 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 15.502 G_Regularizer: 0.000 validation_error: 20.148 +(epoch: 8, iters: 220736, time: 0.955, data: 0.000) G_L1: 15.518 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 12.620 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 8, iters: 222736, time: 0.950, data: 0.000) G_L1: 10.679 G_L1_ABSOLUTE: 2.643 G_L1_RELATIVE: 8.035 G_Regularizer: 0.000 validation_error: 21.310 +(epoch: 8, iters: 224736, time: 0.955, data: 0.000) G_L1: 18.038 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 15.104 G_Regularizer: 0.000 validation_error: 21.303 +(epoch: 8, iters: 226736, time: 0.954, data: 0.000) G_L1: 17.917 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 14.966 G_Regularizer: 0.000 validation_error: 21.494 +(epoch: 8, iters: 228736, time: 0.953, data: 0.000) G_L1: 16.140 G_L1_ABSOLUTE: 2.838 G_L1_RELATIVE: 13.302 G_Regularizer: 0.000 validation_error: 20.266 +(epoch: 8, iters: 230736, time: 0.962, data: 0.000) G_L1: 14.914 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 12.236 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 8, iters: 232736, time: 0.965, data: 0.000) G_L1: 13.346 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 10.817 G_Regularizer: 0.000 validation_error: 21.059 +(epoch: 8, iters: 234736, time: 0.962, data: 0.000) G_L1: 17.994 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 15.359 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 8, iters: 236736, time: 0.962, data: 0.000) G_L1: 15.085 G_L1_ABSOLUTE: 3.021 G_L1_RELATIVE: 12.064 G_Regularizer: 0.000 validation_error: 19.728 +(epoch: 8, iters: 238736, time: 0.962, data: 0.000) G_L1: 15.655 G_L1_ABSOLUTE: 3.387 G_L1_RELATIVE: 12.268 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 8, iters: 240736, time: 0.964, data: 0.000) G_L1: 18.510 G_L1_ABSOLUTE: 3.149 G_L1_RELATIVE: 15.361 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 8, iters: 242736, time: 0.960, data: 0.000) G_L1: 13.298 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 10.779 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 8, iters: 244736, time: 0.958, data: 0.000) G_L1: 14.388 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 11.678 G_Regularizer: 0.000 validation_error: 20.631 +(epoch: 8, iters: 246736, time: 0.966, data: 0.000) G_L1: 14.198 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 11.488 G_Regularizer: 0.000 validation_error: 20.568 +(epoch: 8, iters: 248736, time: 0.965, data: 0.000) G_L1: 16.713 G_L1_ABSOLUTE: 3.385 G_L1_RELATIVE: 13.328 G_Regularizer: 0.000 validation_error: 20.020 +(epoch: 8, iters: 250736, time: 0.963, data: 0.000) G_L1: 14.918 G_L1_ABSOLUTE: 2.751 G_L1_RELATIVE: 12.167 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 8, iters: 252736, time: 0.962, data: 0.000) G_L1: 19.178 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 16.426 G_Regularizer: 0.000 validation_error: 20.505 +(epoch: 8, iters: 254736, time: 0.962, data: 0.000) G_L1: 12.395 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 9.835 G_Regularizer: 0.000 validation_error: 20.434 +(epoch: 8, iters: 256736, time: 0.964, data: 0.000) G_L1: 18.127 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 15.497 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 8, iters: 258736, time: 0.966, data: 0.000) G_L1: 13.785 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 11.148 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 8, iters: 260736, time: 0.960, data: 0.000) G_L1: 14.304 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 11.780 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 8, iters: 262736, time: 0.965, data: 0.000) G_L1: 14.036 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 11.344 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 8, iters: 264736, time: 0.965, data: 0.000) G_L1: 16.708 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 13.932 G_Regularizer: 0.000 validation_error: 20.542 +(epoch: 8, iters: 266736, time: 0.963, data: 0.000) G_L1: 15.014 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 12.426 G_Regularizer: 0.000 validation_error: 20.406 +(epoch: 8, iters: 268736, time: 0.968, data: 0.000) G_L1: 15.856 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 13.021 G_Regularizer: 0.000 validation_error: 21.142 +(epoch: 8, iters: 270736, time: 0.969, data: 0.000) G_L1: 17.382 G_L1_ABSOLUTE: 3.273 G_L1_RELATIVE: 14.110 G_Regularizer: 0.000 validation_error: 21.355 +(epoch: 8, iters: 272736, time: 0.963, data: 0.000) G_L1: 16.861 G_L1_ABSOLUTE: 3.114 G_L1_RELATIVE: 13.747 G_Regularizer: 0.000 validation_error: 20.128 +(epoch: 8, iters: 274736, time: 0.966, data: 0.000) G_L1: 15.006 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 12.360 G_Regularizer: 0.000 validation_error: 20.285 +(epoch: 8, iters: 276736, time: 0.958, data: 0.001) G_L1: 16.904 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 14.325 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 8, iters: 278736, time: 0.964, data: 0.000) G_L1: 18.011 G_L1_ABSOLUTE: 3.172 G_L1_RELATIVE: 14.839 G_Regularizer: 0.000 validation_error: 20.392 +(epoch: 8, iters: 280736, time: 0.970, data: 0.000) G_L1: 17.662 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 15.165 G_Regularizer: 0.000 validation_error: 20.215 +(epoch: 8, iters: 282736, time: 0.968, data: 0.000) G_L1: 14.436 G_L1_ABSOLUTE: 2.816 G_L1_RELATIVE: 11.621 G_Regularizer: 0.000 validation_error: 21.121 +(epoch: 8, iters: 284736, time: 0.971, data: 0.000) G_L1: 15.203 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 12.740 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 8, iters: 286736, time: 0.962, data: 0.000) G_L1: 16.904 G_L1_ABSOLUTE: 2.726 G_L1_RELATIVE: 14.178 G_Regularizer: 0.000 validation_error: 19.925 +(epoch: 8, iters: 288736, time: 0.964, data: 0.000) G_L1: 15.568 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 13.180 G_Regularizer: 0.000 validation_error: 20.339 +(epoch: 8, iters: 290736, time: 0.962, data: 0.000) G_L1: 17.917 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 15.082 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 8, iters: 292736, time: 0.973, data: 0.000) G_L1: 16.667 G_L1_ABSOLUTE: 3.062 G_L1_RELATIVE: 13.605 G_Regularizer: 0.000 validation_error: 20.607 +(epoch: 8, iters: 294736, time: 0.966, data: 0.000) G_L1: 18.075 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 15.304 G_Regularizer: 0.000 validation_error: 20.076 +(epoch: 8, iters: 296736, time: 0.968, data: 0.000) G_L1: 16.862 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 14.069 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 8, iters: 298736, time: 0.967, data: 0.000) G_L1: 17.603 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 14.895 G_Regularizer: 0.000 validation_error: 20.511 +(epoch: 8, iters: 300736, time: 0.967, data: 0.000) G_L1: 15.650 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 12.745 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 8, iters: 302736, time: 0.966, data: 0.000) G_L1: 15.968 G_L1_ABSOLUTE: 2.739 G_L1_RELATIVE: 13.229 G_Regularizer: 0.000 validation_error: 20.544 +(epoch: 9, iters: 1984, time: 0.960, data: 0.000) G_L1: 15.391 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 12.348 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 9, iters: 3984, time: 0.964, data: 0.000) G_L1: 15.321 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 12.854 G_Regularizer: 0.000 validation_error: 21.430 +(epoch: 9, iters: 5984, time: 0.969, data: 0.000) G_L1: 12.995 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 10.235 G_Regularizer: 0.000 validation_error: 20.382 +(epoch: 9, iters: 7984, time: 0.970, data: 0.000) G_L1: 17.092 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 14.458 G_Regularizer: 0.000 validation_error: 20.504 +(epoch: 9, iters: 9984, time: 0.970, data: 0.000) G_L1: 16.882 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 13.926 G_Regularizer: 0.000 validation_error: 20.484 +(epoch: 9, iters: 11984, time: 0.965, data: 0.000) G_L1: 22.417 G_L1_ABSOLUTE: 3.428 G_L1_RELATIVE: 18.989 G_Regularizer: 0.000 validation_error: 20.157 +(epoch: 9, iters: 13984, time: 0.967, data: 0.000) G_L1: 18.115 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 15.531 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 9, iters: 15984, time: 0.968, data: 0.000) G_L1: 17.238 G_L1_ABSOLUTE: 3.226 G_L1_RELATIVE: 14.013 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 9, iters: 17984, time: 0.966, data: 0.000) G_L1: 12.049 G_L1_ABSOLUTE: 2.225 G_L1_RELATIVE: 9.824 G_Regularizer: 0.000 validation_error: 20.956 +(epoch: 9, iters: 19984, time: 0.967, data: 0.000) G_L1: 14.266 G_L1_ABSOLUTE: 2.346 G_L1_RELATIVE: 11.920 G_Regularizer: 0.000 validation_error: 20.479 +(epoch: 9, iters: 21984, time: 0.969, data: 0.000) G_L1: 15.944 G_L1_ABSOLUTE: 3.077 G_L1_RELATIVE: 12.867 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 9, iters: 23984, time: 0.971, data: 0.000) G_L1: 19.269 G_L1_ABSOLUTE: 3.584 G_L1_RELATIVE: 15.684 G_Regularizer: 0.000 validation_error: 20.180 +(epoch: 9, iters: 25984, time: 0.968, data: 0.000) G_L1: 17.748 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 15.041 G_Regularizer: 0.000 validation_error: 21.080 +(epoch: 9, iters: 27984, time: 0.967, data: 0.000) G_L1: 16.678 G_L1_ABSOLUTE: 2.747 G_L1_RELATIVE: 13.931 G_Regularizer: 0.000 validation_error: 20.578 +(epoch: 9, iters: 29984, time: 0.958, data: 0.000) G_L1: 16.547 G_L1_ABSOLUTE: 3.203 G_L1_RELATIVE: 13.344 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 9, iters: 31984, time: 0.964, data: 0.000) G_L1: 13.849 G_L1_ABSOLUTE: 2.232 G_L1_RELATIVE: 11.617 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 9, iters: 33984, time: 0.968, data: 0.000) G_L1: 18.135 G_L1_ABSOLUTE: 3.049 G_L1_RELATIVE: 15.086 G_Regularizer: 0.000 validation_error: 20.022 +(epoch: 9, iters: 35984, time: 0.965, data: 0.000) G_L1: 12.726 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 10.320 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 9, iters: 37984, time: 0.964, data: 0.000) G_L1: 18.169 G_L1_ABSOLUTE: 3.230 G_L1_RELATIVE: 14.939 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 9, iters: 39984, time: 0.963, data: 0.000) G_L1: 17.010 G_L1_ABSOLUTE: 3.178 G_L1_RELATIVE: 13.832 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 9, iters: 41984, time: 0.969, data: 0.000) G_L1: 15.596 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 13.227 G_Regularizer: 0.000 validation_error: 19.750 +(epoch: 9, iters: 43984, time: 0.965, data: 0.000) G_L1: 17.123 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 14.542 G_Regularizer: 0.000 validation_error: 20.237 +(epoch: 9, iters: 45984, time: 0.965, data: 0.000) G_L1: 17.597 G_L1_ABSOLUTE: 3.007 G_L1_RELATIVE: 14.590 G_Regularizer: 0.000 validation_error: 20.175 +(epoch: 9, iters: 47984, time: 0.966, data: 0.000) G_L1: 14.714 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 11.943 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 9, iters: 49984, time: 0.963, data: 0.000) G_L1: 14.604 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 12.053 G_Regularizer: 0.000 validation_error: 21.180 +(epoch: 9, iters: 51984, time: 0.963, data: 0.000) G_L1: 14.205 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.536 G_Regularizer: 0.000 validation_error: 20.072 +(epoch: 9, iters: 53984, time: 0.965, data: 0.000) G_L1: 15.603 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 12.718 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 9, iters: 55984, time: 0.961, data: 0.000) G_L1: 18.314 G_L1_ABSOLUTE: 3.172 G_L1_RELATIVE: 15.142 G_Regularizer: 0.000 validation_error: 20.462 +(epoch: 9, iters: 57984, time: 0.965, data: 0.000) G_L1: 17.680 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 14.991 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 9, iters: 59984, time: 0.969, data: 0.000) G_L1: 15.235 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 12.552 G_Regularizer: 0.000 validation_error: 20.628 +(epoch: 9, iters: 61984, time: 0.967, data: 0.000) G_L1: 14.881 G_L1_ABSOLUTE: 2.555 G_L1_RELATIVE: 12.326 G_Regularizer: 0.000 validation_error: 20.616 +(epoch: 9, iters: 63984, time: 0.969, data: 0.000) G_L1: 14.579 G_L1_ABSOLUTE: 2.831 G_L1_RELATIVE: 11.748 G_Regularizer: 0.000 validation_error: 20.340 +(epoch: 9, iters: 65984, time: 0.969, data: 0.000) G_L1: 14.290 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 11.664 G_Regularizer: 0.000 validation_error: 20.917 +(epoch: 9, iters: 67984, time: 0.963, data: 0.000) G_L1: 15.652 G_L1_ABSOLUTE: 2.865 G_L1_RELATIVE: 12.787 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 9, iters: 69984, time: 0.962, data: 0.000) G_L1: 17.347 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 14.776 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 9, iters: 71984, time: 0.968, data: 0.000) G_L1: 17.116 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 14.389 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 9, iters: 73984, time: 0.967, data: 0.000) G_L1: 14.543 G_L1_ABSOLUTE: 3.130 G_L1_RELATIVE: 11.413 G_Regularizer: 0.000 validation_error: 20.029 +(epoch: 9, iters: 75984, time: 0.968, data: 0.000) G_L1: 17.660 G_L1_ABSOLUTE: 2.923 G_L1_RELATIVE: 14.737 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 9, iters: 77984, time: 0.966, data: 0.000) G_L1: 13.225 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 10.547 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 9, iters: 79984, time: 0.967, data: 0.000) G_L1: 15.474 G_L1_ABSOLUTE: 2.785 G_L1_RELATIVE: 12.689 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 9, iters: 81984, time: 0.964, data: 0.000) G_L1: 18.693 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 15.834 G_Regularizer: 0.000 validation_error: 20.523 +(epoch: 9, iters: 83984, time: 0.960, data: 0.000) G_L1: 17.049 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 14.408 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 9, iters: 85984, time: 0.959, data: 0.001) G_L1: 14.629 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 11.803 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 9, iters: 87984, time: 0.968, data: 0.001) G_L1: 16.568 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 14.128 G_Regularizer: 0.000 validation_error: 20.076 +(epoch: 9, iters: 89984, time: 0.969, data: 0.000) G_L1: 17.583 G_L1_ABSOLUTE: 2.981 G_L1_RELATIVE: 14.602 G_Regularizer: 0.000 validation_error: 21.361 +(epoch: 9, iters: 91984, time: 0.966, data: 0.000) G_L1: 17.062 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 14.321 G_Regularizer: 0.000 validation_error: 20.198 +(epoch: 9, iters: 93984, time: 0.965, data: 0.000) G_L1: 16.944 G_L1_ABSOLUTE: 2.690 G_L1_RELATIVE: 14.254 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 9, iters: 95984, time: 0.969, data: 0.000) G_L1: 15.196 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 12.622 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 9, iters: 97984, time: 0.968, data: 0.000) G_L1: 19.242 G_L1_ABSOLUTE: 3.369 G_L1_RELATIVE: 15.872 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 9, iters: 99984, time: 0.968, data: 0.000) G_L1: 17.298 G_L1_ABSOLUTE: 3.143 G_L1_RELATIVE: 14.155 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 9, iters: 101984, time: 0.972, data: 0.000) G_L1: 18.865 G_L1_ABSOLUTE: 3.097 G_L1_RELATIVE: 15.768 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 9, iters: 103984, time: 0.962, data: 0.000) G_L1: 15.893 G_L1_ABSOLUTE: 3.196 G_L1_RELATIVE: 12.697 G_Regularizer: 0.000 validation_error: 21.222 +(epoch: 9, iters: 105984, time: 0.969, data: 0.000) G_L1: 16.690 G_L1_ABSOLUTE: 2.795 G_L1_RELATIVE: 13.895 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 9, iters: 107984, time: 0.970, data: 0.000) G_L1: 15.500 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 12.794 G_Regularizer: 0.000 validation_error: 20.929 +(epoch: 9, iters: 109984, time: 0.965, data: 0.000) G_L1: 16.343 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 13.959 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 9, iters: 111984, time: 0.963, data: 0.000) G_L1: 15.450 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 12.658 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 9, iters: 113984, time: 0.966, data: 0.000) G_L1: 18.040 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 15.201 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 9, iters: 115984, time: 0.685, data: 0.000) G_L1: 17.283 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 14.413 G_Regularizer: 0.000 validation_error: 21.065 +(epoch: 9, iters: 117984, time: 0.546, data: 0.000) G_L1: 13.555 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 10.942 G_Regularizer: 0.000 validation_error: 20.506 +(epoch: 9, iters: 119984, time: 0.539, data: 0.000) G_L1: 16.151 G_L1_ABSOLUTE: 3.106 G_L1_RELATIVE: 13.045 G_Regularizer: 0.000 validation_error: 20.723 +(epoch: 9, iters: 121984, time: 0.553, data: 0.000) G_L1: 15.498 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 12.962 G_Regularizer: 0.000 validation_error: 20.471 +(epoch: 9, iters: 123984, time: 0.554, data: 0.000) G_L1: 15.203 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 12.763 G_Regularizer: 0.000 validation_error: 20.070 +(epoch: 9, iters: 125984, time: 0.555, data: 0.000) G_L1: 15.588 G_L1_ABSOLUTE: 2.986 G_L1_RELATIVE: 12.602 G_Regularizer: 0.000 validation_error: 20.765 +(epoch: 9, iters: 127984, time: 0.549, data: 0.000) G_L1: 15.423 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 12.819 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 9, iters: 129984, time: 0.553, data: 0.000) G_L1: 14.916 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 12.381 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 9, iters: 131984, time: 0.546, data: 0.000) G_L1: 16.137 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 13.593 G_Regularizer: 0.000 validation_error: 20.159 +(epoch: 9, iters: 133984, time: 0.545, data: 0.000) G_L1: 15.612 G_L1_ABSOLUTE: 2.543 G_L1_RELATIVE: 13.070 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 9, iters: 135984, time: 0.545, data: 0.000) G_L1: 14.613 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 12.174 G_Regularizer: 0.000 validation_error: 21.282 +(epoch: 9, iters: 137984, time: 0.548, data: 0.000) G_L1: 15.895 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 13.160 G_Regularizer: 0.000 validation_error: 21.194 +(epoch: 9, iters: 139984, time: 0.544, data: 0.000) G_L1: 13.625 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 11.038 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 9, iters: 141984, time: 0.549, data: 0.000) G_L1: 15.495 G_L1_ABSOLUTE: 2.868 G_L1_RELATIVE: 12.627 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 9, iters: 143984, time: 0.546, data: 0.000) G_L1: 13.926 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 11.560 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 9, iters: 145984, time: 0.548, data: 0.000) G_L1: 15.235 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 12.665 G_Regularizer: 0.000 validation_error: 20.537 +(epoch: 9, iters: 147984, time: 0.550, data: 0.000) G_L1: 14.352 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 11.937 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 9, iters: 149984, time: 0.544, data: 0.000) G_L1: 18.128 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 15.297 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 9, iters: 151984, time: 0.546, data: 0.000) G_L1: 16.792 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 14.253 G_Regularizer: 0.000 validation_error: 21.160 +(epoch: 9, iters: 153984, time: 0.553, data: 0.000) G_L1: 16.713 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 14.148 G_Regularizer: 0.000 validation_error: 20.294 +(epoch: 9, iters: 155984, time: 0.550, data: 0.000) G_L1: 17.000 G_L1_ABSOLUTE: 3.398 G_L1_RELATIVE: 13.602 G_Regularizer: 0.000 validation_error: 21.202 +(epoch: 9, iters: 157984, time: 0.546, data: 0.000) G_L1: 14.628 G_L1_ABSOLUTE: 3.023 G_L1_RELATIVE: 11.605 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 9, iters: 159984, time: 0.549, data: 0.000) G_L1: 15.870 G_L1_ABSOLUTE: 2.558 G_L1_RELATIVE: 13.312 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 9, iters: 161984, time: 0.542, data: 0.000) G_L1: 13.638 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 20.561 +(epoch: 9, iters: 163984, time: 0.549, data: 0.000) G_L1: 13.394 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 10.992 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 9, iters: 165984, time: 0.547, data: 0.000) G_L1: 14.899 G_L1_ABSOLUTE: 2.992 G_L1_RELATIVE: 11.907 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 9, iters: 167984, time: 0.550, data: 0.000) G_L1: 17.892 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 15.036 G_Regularizer: 0.000 validation_error: 21.693 +(epoch: 9, iters: 169984, time: 0.543, data: 0.000) G_L1: 14.069 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 11.524 G_Regularizer: 0.000 validation_error: 20.057 +(epoch: 9, iters: 171984, time: 0.542, data: 0.000) G_L1: 16.640 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 14.013 G_Regularizer: 0.000 validation_error: 21.367 +(epoch: 9, iters: 173984, time: 0.542, data: 0.000) G_L1: 14.436 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 12.044 G_Regularizer: 0.000 validation_error: 20.504 +(epoch: 9, iters: 175984, time: 0.551, data: 0.000) G_L1: 13.468 G_L1_ABSOLUTE: 2.392 G_L1_RELATIVE: 11.075 G_Regularizer: 0.000 validation_error: 20.037 +(epoch: 9, iters: 177984, time: 0.554, data: 0.000) G_L1: 15.018 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 12.556 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 9, iters: 179984, time: 0.541, data: 0.000) G_L1: 18.841 G_L1_ABSOLUTE: 3.417 G_L1_RELATIVE: 15.424 G_Regularizer: 0.000 validation_error: 21.483 +(epoch: 9, iters: 181984, time: 0.542, data: 0.000) G_L1: 13.313 G_L1_ABSOLUTE: 2.827 G_L1_RELATIVE: 10.486 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 9, iters: 183984, time: 0.543, data: 0.000) G_L1: 14.184 G_L1_ABSOLUTE: 2.875 G_L1_RELATIVE: 11.309 G_Regularizer: 0.000 validation_error: 20.609 +(epoch: 9, iters: 185984, time: 0.548, data: 0.000) G_L1: 15.670 G_L1_ABSOLUTE: 2.330 G_L1_RELATIVE: 13.341 G_Regularizer: 0.000 validation_error: 21.245 +(epoch: 9, iters: 187984, time: 0.542, data: 0.000) G_L1: 14.869 G_L1_ABSOLUTE: 2.346 G_L1_RELATIVE: 12.524 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 9, iters: 189984, time: 0.542, data: 0.000) G_L1: 17.299 G_L1_ABSOLUTE: 3.104 G_L1_RELATIVE: 14.195 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 9, iters: 191984, time: 0.552, data: 0.000) G_L1: 14.024 G_L1_ABSOLUTE: 2.350 G_L1_RELATIVE: 11.674 G_Regularizer: 0.000 validation_error: 20.832 +(epoch: 9, iters: 193984, time: 0.543, data: 0.000) G_L1: 16.882 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 14.529 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 9, iters: 195984, time: 0.546, data: 0.000) G_L1: 13.199 G_L1_ABSOLUTE: 2.644 G_L1_RELATIVE: 10.555 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 9, iters: 197984, time: 0.548, data: 0.000) G_L1: 14.760 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.227 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 9, iters: 199984, time: 0.545, data: 0.000) G_L1: 18.370 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 15.411 G_Regularizer: 0.000 validation_error: 20.337 +(epoch: 9, iters: 201984, time: 0.548, data: 0.000) G_L1: 11.193 G_L1_ABSOLUTE: 2.337 G_L1_RELATIVE: 8.856 G_Regularizer: 0.000 validation_error: 21.236 +(epoch: 9, iters: 203984, time: 0.544, data: 0.000) G_L1: 15.613 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 12.976 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 9, iters: 205984, time: 0.554, data: 0.000) G_L1: 18.866 G_L1_ABSOLUTE: 3.138 G_L1_RELATIVE: 15.728 G_Regularizer: 0.000 validation_error: 21.234 +(epoch: 9, iters: 207984, time: 0.544, data: 0.000) G_L1: 15.386 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 12.884 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 9, iters: 209984, time: 0.549, data: 0.000) G_L1: 17.763 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 14.988 G_Regularizer: 0.000 validation_error: 21.195 +(epoch: 9, iters: 211984, time: 0.543, data: 0.000) G_L1: 15.748 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 13.037 G_Regularizer: 0.000 validation_error: 20.436 +(epoch: 9, iters: 213984, time: 0.549, data: 0.000) G_L1: 14.852 G_L1_ABSOLUTE: 3.001 G_L1_RELATIVE: 11.852 G_Regularizer: 0.000 validation_error: 21.267 +(epoch: 9, iters: 215984, time: 0.548, data: 0.000) G_L1: 17.066 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 14.500 G_Regularizer: 0.000 validation_error: 20.058 +(epoch: 9, iters: 217984, time: 0.549, data: 0.000) G_L1: 15.206 G_L1_ABSOLUTE: 2.472 G_L1_RELATIVE: 12.735 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 9, iters: 219984, time: 0.539, data: 0.000) G_L1: 18.198 G_L1_ABSOLUTE: 2.922 G_L1_RELATIVE: 15.276 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 9, iters: 221984, time: 0.541, data: 0.000) G_L1: 16.940 G_L1_ABSOLUTE: 3.160 G_L1_RELATIVE: 13.781 G_Regularizer: 0.000 validation_error: 20.947 +(epoch: 9, iters: 223984, time: 0.552, data: 0.000) G_L1: 13.963 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 11.606 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 9, iters: 225984, time: 0.555, data: 0.000) G_L1: 13.250 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 10.915 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 9, iters: 227984, time: 0.545, data: 0.001) G_L1: 13.509 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 10.421 G_Regularizer: 0.000 validation_error: 20.153 +(epoch: 9, iters: 229984, time: 0.546, data: 0.000) G_L1: 15.049 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 12.584 G_Regularizer: 0.000 validation_error: 20.307 +(epoch: 9, iters: 231984, time: 0.630, data: 0.000) G_L1: 13.619 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 10.949 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 9, iters: 233984, time: 0.624, data: 0.000) G_L1: 15.607 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 13.017 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 9, iters: 235984, time: 0.617, data: 0.000) G_L1: 14.109 G_L1_ABSOLUTE: 3.144 G_L1_RELATIVE: 10.965 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 9, iters: 237984, time: 0.548, data: 0.000) G_L1: 19.180 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 16.222 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 9, iters: 239984, time: 0.545, data: 0.000) G_L1: 13.808 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 11.168 G_Regularizer: 0.000 validation_error: 20.471 +(epoch: 9, iters: 241984, time: 0.550, data: 0.000) G_L1: 12.543 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 9.923 G_Regularizer: 0.000 validation_error: 21.082 +(epoch: 9, iters: 243984, time: 0.551, data: 0.000) G_L1: 15.811 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 13.225 G_Regularizer: 0.000 validation_error: 20.474 +(epoch: 9, iters: 245984, time: 0.545, data: 0.000) G_L1: 14.515 G_L1_ABSOLUTE: 3.198 G_L1_RELATIVE: 11.317 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 9, iters: 247984, time: 0.538, data: 0.000) G_L1: 12.108 G_L1_ABSOLUTE: 3.111 G_L1_RELATIVE: 8.996 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 9, iters: 249984, time: 0.540, data: 0.000) G_L1: 15.355 G_L1_ABSOLUTE: 2.145 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 20.125 +(epoch: 9, iters: 251984, time: 0.543, data: 0.000) G_L1: 16.205 G_L1_ABSOLUTE: 3.125 G_L1_RELATIVE: 13.081 G_Regularizer: 0.000 validation_error: 20.205 +(epoch: 9, iters: 253984, time: 0.548, data: 0.000) G_L1: 13.934 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 11.382 G_Regularizer: 0.000 validation_error: 20.574 +(epoch: 9, iters: 255984, time: 0.548, data: 0.000) G_L1: 12.349 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 9.647 G_Regularizer: 0.000 validation_error: 21.705 +(epoch: 9, iters: 257984, time: 0.545, data: 0.001) G_L1: 14.679 G_L1_ABSOLUTE: 2.703 G_L1_RELATIVE: 11.976 G_Regularizer: 0.000 validation_error: 20.335 +(epoch: 9, iters: 259984, time: 0.545, data: 0.000) G_L1: 13.881 G_L1_ABSOLUTE: 2.299 G_L1_RELATIVE: 11.582 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 9, iters: 261984, time: 0.543, data: 0.000) G_L1: 15.650 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 13.040 G_Regularizer: 0.000 validation_error: 20.567 +(epoch: 9, iters: 263984, time: 0.546, data: 0.000) G_L1: 15.620 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 12.998 G_Regularizer: 0.000 validation_error: 20.940 +(epoch: 9, iters: 265984, time: 0.549, data: 0.000) G_L1: 15.056 G_L1_ABSOLUTE: 2.268 G_L1_RELATIVE: 12.788 G_Regularizer: 0.000 validation_error: 20.753 +(epoch: 9, iters: 267984, time: 0.546, data: 0.000) G_L1: 15.834 G_L1_ABSOLUTE: 2.958 G_L1_RELATIVE: 12.875 G_Regularizer: 0.000 validation_error: 19.715 +(epoch: 9, iters: 269984, time: 0.546, data: 0.000) G_L1: 16.128 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 13.233 G_Regularizer: 0.000 validation_error: 20.263 +(epoch: 9, iters: 271984, time: 0.549, data: 0.000) G_L1: 15.477 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 13.058 G_Regularizer: 0.000 validation_error: 21.156 +(epoch: 9, iters: 273984, time: 0.548, data: 0.000) G_L1: 14.259 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 11.553 G_Regularizer: 0.000 validation_error: 21.190 +(epoch: 9, iters: 275984, time: 0.545, data: 0.000) G_L1: 14.822 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 12.026 G_Regularizer: 0.000 validation_error: 20.103 +(epoch: 9, iters: 277984, time: 0.545, data: 0.000) G_L1: 14.809 G_L1_ABSOLUTE: 2.932 G_L1_RELATIVE: 11.877 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 9, iters: 279984, time: 0.546, data: 0.000) G_L1: 15.399 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 12.871 G_Regularizer: 0.000 validation_error: 20.763 +(epoch: 9, iters: 281984, time: 0.547, data: 0.000) G_L1: 15.017 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 12.403 G_Regularizer: 0.000 validation_error: 20.419 +(epoch: 9, iters: 283984, time: 0.542, data: 0.000) G_L1: 22.202 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 19.572 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 9, iters: 285984, time: 0.551, data: 0.000) G_L1: 15.115 G_L1_ABSOLUTE: 2.395 G_L1_RELATIVE: 12.721 G_Regularizer: 0.000 validation_error: 19.677 +(epoch: 9, iters: 287984, time: 0.546, data: 0.000) G_L1: 14.265 G_L1_ABSOLUTE: 3.070 G_L1_RELATIVE: 11.195 G_Regularizer: 0.000 validation_error: 20.434 +(epoch: 9, iters: 289984, time: 0.542, data: 0.000) G_L1: 15.141 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 12.616 G_Regularizer: 0.000 validation_error: 20.186 +(epoch: 9, iters: 291984, time: 0.547, data: 0.000) G_L1: 15.511 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 12.781 G_Regularizer: 0.000 validation_error: 20.067 +(epoch: 9, iters: 293984, time: 0.550, data: 0.000) G_L1: 15.141 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 12.674 G_Regularizer: 0.000 validation_error: 21.343 +(epoch: 9, iters: 295984, time: 0.548, data: 0.000) G_L1: 15.472 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 12.789 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 9, iters: 297984, time: 0.549, data: 0.000) G_L1: 17.730 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 14.953 G_Regularizer: 0.000 validation_error: 20.460 +(epoch: 9, iters: 299984, time: 0.548, data: 0.000) G_L1: 17.515 G_L1_ABSOLUTE: 2.872 G_L1_RELATIVE: 14.643 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 9, iters: 301984, time: 0.542, data: 0.000) G_L1: 17.828 G_L1_ABSOLUTE: 3.263 G_L1_RELATIVE: 14.565 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 10, iters: 1232, time: 0.544, data: 0.000) G_L1: 15.572 G_L1_ABSOLUTE: 2.912 G_L1_RELATIVE: 12.661 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 10, iters: 3232, time: 0.551, data: 0.000) G_L1: 17.238 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 14.611 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 10, iters: 5232, time: 0.545, data: 0.000) G_L1: 15.144 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 12.453 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 10, iters: 7232, time: 0.547, data: 0.001) G_L1: 15.155 G_L1_ABSOLUTE: 3.141 G_L1_RELATIVE: 12.014 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 10, iters: 9232, time: 0.548, data: 0.000) G_L1: 18.825 G_L1_ABSOLUTE: 2.671 G_L1_RELATIVE: 16.154 G_Regularizer: 0.000 validation_error: 20.148 +(epoch: 10, iters: 11232, time: 0.548, data: 0.000) G_L1: 14.238 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 11.716 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 10, iters: 13232, time: 0.539, data: 0.000) G_L1: 19.797 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 17.278 G_Regularizer: 0.000 validation_error: 20.471 +(epoch: 10, iters: 15232, time: 0.545, data: 0.001) G_L1: 15.785 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 13.122 G_Regularizer: 0.000 validation_error: 20.022 +(epoch: 10, iters: 17232, time: 0.547, data: 0.000) G_L1: 14.396 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 11.719 G_Regularizer: 0.000 validation_error: 20.340 +(epoch: 10, iters: 19232, time: 0.550, data: 0.000) G_L1: 16.141 G_L1_ABSOLUTE: 3.529 G_L1_RELATIVE: 12.612 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 10, iters: 21232, time: 0.547, data: 0.000) G_L1: 17.689 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 15.227 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 10, iters: 23232, time: 0.540, data: 0.000) G_L1: 17.677 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 14.928 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 10, iters: 25232, time: 0.545, data: 0.000) G_L1: 15.482 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 12.693 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 10, iters: 27232, time: 0.542, data: 0.000) G_L1: 16.848 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 14.320 G_Regularizer: 0.000 validation_error: 20.402 +(epoch: 10, iters: 29232, time: 0.542, data: 0.000) G_L1: 18.471 G_L1_ABSOLUTE: 3.160 G_L1_RELATIVE: 15.311 G_Regularizer: 0.000 validation_error: 20.398 +(epoch: 10, iters: 31232, time: 0.550, data: 0.000) G_L1: 13.583 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 10.945 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 10, iters: 33232, time: 0.545, data: 0.000) G_L1: 18.439 G_L1_ABSOLUTE: 2.983 G_L1_RELATIVE: 15.456 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 10, iters: 35232, time: 0.547, data: 0.000) G_L1: 14.395 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 11.942 G_Regularizer: 0.000 validation_error: 21.390 +(epoch: 10, iters: 37232, time: 0.552, data: 0.000) G_L1: 15.150 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 12.388 G_Regularizer: 0.000 validation_error: 20.524 +(epoch: 10, iters: 39232, time: 0.538, data: 0.000) G_L1: 15.819 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 12.881 G_Regularizer: 0.000 validation_error: 21.298 +(epoch: 10, iters: 41232, time: 0.543, data: 0.000) G_L1: 14.680 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 12.264 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 10, iters: 43232, time: 0.553, data: 0.000) G_L1: 15.394 G_L1_ABSOLUTE: 2.847 G_L1_RELATIVE: 12.547 G_Regularizer: 0.000 validation_error: 20.938 +(epoch: 10, iters: 45232, time: 0.542, data: 0.000) G_L1: 17.995 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 15.449 G_Regularizer: 0.000 validation_error: 20.217 +(epoch: 10, iters: 47232, time: 0.547, data: 0.000) G_L1: 14.848 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 12.247 G_Regularizer: 0.000 validation_error: 20.282 +(epoch: 10, iters: 49232, time: 0.545, data: 0.000) G_L1: 15.107 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 12.708 G_Regularizer: 0.000 validation_error: 21.832 +(epoch: 10, iters: 51232, time: 0.541, data: 0.000) G_L1: 16.661 G_L1_ABSOLUTE: 2.961 G_L1_RELATIVE: 13.700 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 10, iters: 53232, time: 0.546, data: 0.000) G_L1: 15.706 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 13.008 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 10, iters: 55232, time: 0.546, data: 0.000) G_L1: 16.812 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 14.243 G_Regularizer: 0.000 validation_error: 21.406 +(epoch: 10, iters: 57232, time: 0.548, data: 0.000) G_L1: 14.104 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 11.426 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 10, iters: 59232, time: 0.548, data: 0.000) G_L1: 14.951 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 12.362 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 10, iters: 61232, time: 0.549, data: 0.000) G_L1: 14.276 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 11.560 G_Regularizer: 0.000 validation_error: 20.417 +(epoch: 10, iters: 63232, time: 0.544, data: 0.000) G_L1: 16.909 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 14.203 G_Regularizer: 0.000 validation_error: 20.492 +(epoch: 10, iters: 65232, time: 0.543, data: 0.000) G_L1: 17.253 G_L1_ABSOLUTE: 2.288 G_L1_RELATIVE: 14.965 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 10, iters: 67232, time: 0.554, data: 0.000) G_L1: 14.807 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 12.144 G_Regularizer: 0.000 validation_error: 21.235 +(epoch: 10, iters: 69232, time: 0.542, data: 0.000) G_L1: 14.487 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 11.979 G_Regularizer: 0.000 validation_error: 20.401 +(epoch: 10, iters: 71232, time: 0.549, data: 0.000) G_L1: 14.774 G_L1_ABSOLUTE: 2.907 G_L1_RELATIVE: 11.868 G_Regularizer: 0.000 validation_error: 20.380 +(epoch: 10, iters: 73232, time: 0.542, data: 0.000) G_L1: 13.222 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 10.658 G_Regularizer: 0.000 validation_error: 21.290 +(epoch: 10, iters: 75232, time: 0.542, data: 0.001) G_L1: 14.659 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 12.395 G_Regularizer: 0.000 validation_error: 19.907 +(epoch: 10, iters: 77232, time: 0.550, data: 0.000) G_L1: 21.355 G_L1_ABSOLUTE: 3.249 G_L1_RELATIVE: 18.107 G_Regularizer: 0.000 validation_error: 20.249 +(epoch: 10, iters: 79232, time: 0.556, data: 0.000) G_L1: 15.954 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 13.326 G_Regularizer: 0.000 validation_error: 21.347 +(epoch: 10, iters: 81232, time: 0.547, data: 0.000) G_L1: 17.125 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 14.448 G_Regularizer: 0.000 validation_error: 20.140 +(epoch: 10, iters: 83232, time: 0.539, data: 0.000) G_L1: 16.122 G_L1_ABSOLUTE: 2.933 G_L1_RELATIVE: 13.189 G_Regularizer: 0.000 validation_error: 20.130 +(epoch: 10, iters: 85232, time: 0.544, data: 0.000) G_L1: 14.880 G_L1_ABSOLUTE: 3.152 G_L1_RELATIVE: 11.728 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 10, iters: 87232, time: 0.545, data: 0.000) G_L1: 18.042 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 15.459 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 10, iters: 89232, time: 0.543, data: 0.000) G_L1: 15.052 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 12.674 G_Regularizer: 0.000 validation_error: 22.012 +(epoch: 10, iters: 91232, time: 0.541, data: 0.000) G_L1: 14.189 G_L1_ABSOLUTE: 3.181 G_L1_RELATIVE: 11.009 G_Regularizer: 0.000 validation_error: 20.145 +(epoch: 10, iters: 93232, time: 0.545, data: 0.000) G_L1: 15.443 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 12.824 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 10, iters: 95232, time: 0.545, data: 0.000) G_L1: 16.188 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 13.685 G_Regularizer: 0.000 validation_error: 20.505 +(epoch: 10, iters: 97232, time: 0.544, data: 0.000) G_L1: 18.122 G_L1_ABSOLUTE: 3.304 G_L1_RELATIVE: 14.818 G_Regularizer: 0.000 validation_error: 20.263 +(epoch: 10, iters: 99232, time: 0.544, data: 0.000) G_L1: 19.433 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 16.645 G_Regularizer: 0.000 validation_error: 21.333 +(epoch: 10, iters: 101232, time: 0.539, data: 0.000) G_L1: 16.613 G_L1_ABSOLUTE: 2.299 G_L1_RELATIVE: 14.314 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 10, iters: 103232, time: 0.539, data: 0.000) G_L1: 22.337 G_L1_ABSOLUTE: 2.918 G_L1_RELATIVE: 19.419 G_Regularizer: 0.000 validation_error: 20.227 +(epoch: 10, iters: 105232, time: 0.548, data: 0.000) G_L1: 13.335 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 10.987 G_Regularizer: 0.000 validation_error: 20.627 +(epoch: 10, iters: 107232, time: 0.551, data: 0.000) G_L1: 17.967 G_L1_ABSOLUTE: 3.361 G_L1_RELATIVE: 14.606 G_Regularizer: 0.000 validation_error: 21.174 +(epoch: 10, iters: 109232, time: 0.550, data: 0.000) G_L1: 18.124 G_L1_ABSOLUTE: 3.704 G_L1_RELATIVE: 14.420 G_Regularizer: 0.000 validation_error: 20.492 +(epoch: 10, iters: 111232, time: 0.544, data: 0.000) G_L1: 13.830 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 11.052 G_Regularizer: 0.000 validation_error: 19.846 +(epoch: 10, iters: 113232, time: 0.542, data: 0.000) G_L1: 15.722 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 12.828 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 10, iters: 115232, time: 0.542, data: 0.000) G_L1: 14.628 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 12.163 G_Regularizer: 0.000 validation_error: 20.289 +(epoch: 10, iters: 117232, time: 0.549, data: 0.000) G_L1: 16.276 G_L1_ABSOLUTE: 2.963 G_L1_RELATIVE: 13.313 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 10, iters: 119232, time: 0.545, data: 0.000) G_L1: 16.165 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 13.360 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 10, iters: 121232, time: 0.541, data: 0.000) G_L1: 15.414 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 12.617 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 10, iters: 123232, time: 0.541, data: 0.000) G_L1: 17.959 G_L1_ABSOLUTE: 3.439 G_L1_RELATIVE: 14.519 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 10, iters: 125232, time: 0.543, data: 0.000) G_L1: 16.408 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 13.750 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 10, iters: 127232, time: 0.549, data: 0.000) G_L1: 15.866 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 13.322 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 10, iters: 129232, time: 0.545, data: 0.000) G_L1: 14.576 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 11.699 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 10, iters: 131232, time: 0.552, data: 0.000) G_L1: 18.244 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 15.726 G_Regularizer: 0.000 validation_error: 21.169 +(epoch: 10, iters: 133232, time: 0.543, data: 0.000) G_L1: 14.349 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 11.711 G_Regularizer: 0.000 validation_error: 20.050 +(epoch: 10, iters: 135232, time: 0.546, data: 0.000) G_L1: 14.903 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 12.092 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 10, iters: 137232, time: 0.554, data: 0.000) G_L1: 17.455 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 14.836 G_Regularizer: 0.000 validation_error: 20.345 +(epoch: 10, iters: 139232, time: 0.547, data: 0.000) G_L1: 14.852 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 12.339 G_Regularizer: 0.000 validation_error: 20.586 +(epoch: 10, iters: 141232, time: 0.545, data: 0.000) G_L1: 14.591 G_L1_ABSOLUTE: 2.423 G_L1_RELATIVE: 12.168 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 10, iters: 143232, time: 0.542, data: 0.001) G_L1: 12.710 G_L1_ABSOLUTE: 2.120 G_L1_RELATIVE: 10.590 G_Regularizer: 0.000 validation_error: 20.434 +(epoch: 10, iters: 145232, time: 0.546, data: 0.000) G_L1: 12.996 G_L1_ABSOLUTE: 2.380 G_L1_RELATIVE: 10.617 G_Regularizer: 0.000 validation_error: 21.059 +(epoch: 10, iters: 147232, time: 0.550, data: 0.000) G_L1: 16.103 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 13.443 G_Regularizer: 0.000 validation_error: 20.179 +(epoch: 10, iters: 149232, time: 0.545, data: 0.000) G_L1: 15.736 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 13.277 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 10, iters: 151232, time: 0.548, data: 0.000) G_L1: 15.790 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 13.176 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 10, iters: 153232, time: 0.548, data: 0.000) G_L1: 19.871 G_L1_ABSOLUTE: 3.389 G_L1_RELATIVE: 16.481 G_Regularizer: 0.000 validation_error: 20.670 +(epoch: 10, iters: 155232, time: 0.546, data: 0.000) G_L1: 14.880 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 12.421 G_Regularizer: 0.000 validation_error: 20.503 +(epoch: 10, iters: 157232, time: 0.544, data: 0.000) G_L1: 16.626 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 13.720 G_Regularizer: 0.000 validation_error: 20.956 +(epoch: 10, iters: 159232, time: 0.547, data: 0.000) G_L1: 14.700 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 11.864 G_Regularizer: 0.000 validation_error: 21.093 +(epoch: 10, iters: 161232, time: 0.541, data: 0.000) G_L1: 16.012 G_L1_ABSOLUTE: 2.472 G_L1_RELATIVE: 13.540 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 10, iters: 163232, time: 0.540, data: 0.000) G_L1: 16.534 G_L1_ABSOLUTE: 3.063 G_L1_RELATIVE: 13.471 G_Regularizer: 0.000 validation_error: 20.047 +(epoch: 10, iters: 165232, time: 0.555, data: 0.001) G_L1: 17.346 G_L1_ABSOLUTE: 3.091 G_L1_RELATIVE: 14.255 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 10, iters: 167232, time: 0.553, data: 0.000) G_L1: 14.109 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 11.556 G_Regularizer: 0.000 validation_error: 20.547 +(epoch: 10, iters: 169232, time: 0.544, data: 0.000) G_L1: 14.122 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 11.391 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 10, iters: 171232, time: 0.545, data: 0.000) G_L1: 12.755 G_L1_ABSOLUTE: 2.328 G_L1_RELATIVE: 10.427 G_Regularizer: 0.000 validation_error: 20.380 +(epoch: 10, iters: 173232, time: 0.549, data: 0.000) G_L1: 17.591 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 14.918 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 10, iters: 175232, time: 0.545, data: 0.000) G_L1: 16.113 G_L1_ABSOLUTE: 2.831 G_L1_RELATIVE: 13.282 G_Regularizer: 0.000 validation_error: 20.408 +(epoch: 10, iters: 177232, time: 0.549, data: 0.000) G_L1: 13.938 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 11.182 G_Regularizer: 0.000 validation_error: 21.442 +(epoch: 10, iters: 179232, time: 0.544, data: 0.000) G_L1: 12.992 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 10.590 G_Regularizer: 0.000 validation_error: 20.125 +(epoch: 10, iters: 181232, time: 0.544, data: 0.000) G_L1: 14.906 G_L1_ABSOLUTE: 3.289 G_L1_RELATIVE: 11.617 G_Regularizer: 0.000 validation_error: 21.198 +(epoch: 10, iters: 183232, time: 0.543, data: 0.000) G_L1: 14.732 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 11.970 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 10, iters: 185232, time: 0.547, data: 0.000) G_L1: 14.420 G_L1_ABSOLUTE: 3.417 G_L1_RELATIVE: 11.002 G_Regularizer: 0.000 validation_error: 20.052 +(epoch: 10, iters: 187232, time: 0.547, data: 0.000) G_L1: 14.213 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 11.768 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 10, iters: 189232, time: 0.549, data: 0.000) G_L1: 13.537 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 11.181 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 10, iters: 191232, time: 0.545, data: 0.000) G_L1: 18.029 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 15.523 G_Regularizer: 0.000 validation_error: 20.999 +(epoch: 10, iters: 193232, time: 0.546, data: 0.000) G_L1: 12.655 G_L1_ABSOLUTE: 2.248 G_L1_RELATIVE: 10.407 G_Regularizer: 0.000 validation_error: 21.423 +(epoch: 10, iters: 195232, time: 0.549, data: 0.000) G_L1: 14.996 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 12.236 G_Regularizer: 0.000 validation_error: 20.178 +(epoch: 10, iters: 197232, time: 0.548, data: 0.000) G_L1: 14.274 G_L1_ABSOLUTE: 3.032 G_L1_RELATIVE: 11.242 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 10, iters: 199232, time: 0.545, data: 0.000) G_L1: 13.755 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 11.365 G_Regularizer: 0.000 validation_error: 20.429 +(epoch: 10, iters: 201232, time: 0.551, data: 0.000) G_L1: 14.799 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 12.258 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 10, iters: 203232, time: 0.545, data: 0.000) G_L1: 13.486 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 10.871 G_Regularizer: 0.000 validation_error: 20.114 +(epoch: 10, iters: 205232, time: 0.546, data: 0.000) G_L1: 15.124 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 12.194 G_Regularizer: 0.000 validation_error: 20.145 +(epoch: 10, iters: 207232, time: 0.553, data: 0.000) G_L1: 18.527 G_L1_ABSOLUTE: 3.160 G_L1_RELATIVE: 15.367 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 10, iters: 209232, time: 0.547, data: 0.000) G_L1: 14.778 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 12.479 G_Regularizer: 0.000 validation_error: 20.730 +(epoch: 10, iters: 211232, time: 0.547, data: 0.001) G_L1: 15.017 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 12.508 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 10, iters: 213232, time: 0.554, data: 0.000) G_L1: 17.296 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 14.661 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 10, iters: 215232, time: 0.552, data: 0.000) G_L1: 15.949 G_L1_ABSOLUTE: 2.191 G_L1_RELATIVE: 13.758 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 10, iters: 217232, time: 0.551, data: 0.000) G_L1: 14.761 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 12.112 G_Regularizer: 0.000 validation_error: 21.312 +(epoch: 10, iters: 219232, time: 0.553, data: 0.001) G_L1: 12.814 G_L1_ABSOLUTE: 2.382 G_L1_RELATIVE: 10.433 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 10, iters: 221232, time: 0.556, data: 0.000) G_L1: 15.400 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 12.916 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 10, iters: 223232, time: 0.549, data: 0.000) G_L1: 15.091 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 12.752 G_Regularizer: 0.000 validation_error: 21.217 +(epoch: 10, iters: 225232, time: 0.565, data: 0.000) G_L1: 17.512 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 14.576 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 10, iters: 227232, time: 0.553, data: 0.000) G_L1: 16.970 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 14.232 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 10, iters: 229232, time: 0.554, data: 0.000) G_L1: 17.336 G_L1_ABSOLUTE: 3.443 G_L1_RELATIVE: 13.893 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 10, iters: 231232, time: 0.554, data: 0.000) G_L1: 14.998 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 12.562 G_Regularizer: 0.000 validation_error: 20.045 +(epoch: 10, iters: 233232, time: 0.551, data: 0.000) G_L1: 16.179 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 13.381 G_Regularizer: 0.000 validation_error: 20.145 +(epoch: 10, iters: 235232, time: 0.552, data: 0.000) G_L1: 14.076 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 11.503 G_Regularizer: 0.000 validation_error: 20.510 +(epoch: 10, iters: 237232, time: 0.545, data: 0.000) G_L1: 16.731 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 13.920 G_Regularizer: 0.000 validation_error: 20.528 +(epoch: 10, iters: 239232, time: 0.553, data: 0.000) G_L1: 14.125 G_L1_ABSOLUTE: 2.992 G_L1_RELATIVE: 11.134 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 10, iters: 241232, time: 0.549, data: 0.000) G_L1: 15.174 G_L1_ABSOLUTE: 3.535 G_L1_RELATIVE: 11.639 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 10, iters: 243232, time: 0.545, data: 0.000) G_L1: 16.767 G_L1_ABSOLUTE: 2.897 G_L1_RELATIVE: 13.869 G_Regularizer: 0.000 validation_error: 20.551 +(epoch: 10, iters: 245232, time: 0.553, data: 0.000) G_L1: 23.020 G_L1_ABSOLUTE: 2.900 G_L1_RELATIVE: 20.120 G_Regularizer: 0.000 validation_error: 19.363 +(epoch: 10, iters: 247232, time: 0.552, data: 0.000) G_L1: 17.410 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 14.550 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 10, iters: 249232, time: 0.545, data: 0.000) G_L1: 14.205 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 11.403 G_Regularizer: 0.000 validation_error: 19.963 +(epoch: 10, iters: 251232, time: 0.548, data: 0.000) G_L1: 17.507 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 14.753 G_Regularizer: 0.000 validation_error: 19.682 +(epoch: 10, iters: 253232, time: 0.555, data: 0.000) G_L1: 14.642 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.974 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 10, iters: 255232, time: 0.552, data: 0.000) G_L1: 14.878 G_L1_ABSOLUTE: 3.030 G_L1_RELATIVE: 11.847 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 10, iters: 257232, time: 0.547, data: 0.000) G_L1: 13.846 G_L1_ABSOLUTE: 2.307 G_L1_RELATIVE: 11.538 G_Regularizer: 0.000 validation_error: 21.463 +(epoch: 10, iters: 259232, time: 0.545, data: 0.000) G_L1: 11.461 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 8.911 G_Regularizer: 0.000 validation_error: 20.281 +(epoch: 10, iters: 261232, time: 0.545, data: 0.000) G_L1: 16.568 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 14.028 G_Regularizer: 0.000 validation_error: 19.805 +(epoch: 10, iters: 263232, time: 0.550, data: 0.000) G_L1: 14.315 G_L1_ABSOLUTE: 2.639 G_L1_RELATIVE: 11.676 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 10, iters: 265232, time: 0.541, data: 0.000) G_L1: 15.470 G_L1_ABSOLUTE: 2.918 G_L1_RELATIVE: 12.551 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 10, iters: 267232, time: 0.551, data: 0.000) G_L1: 15.038 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 12.278 G_Regularizer: 0.000 validation_error: 21.138 +(epoch: 10, iters: 269232, time: 0.549, data: 0.000) G_L1: 15.097 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 12.282 G_Regularizer: 0.000 validation_error: 20.063 +(epoch: 10, iters: 271232, time: 0.547, data: 0.000) G_L1: 16.487 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 13.820 G_Regularizer: 0.000 validation_error: 20.431 +(epoch: 10, iters: 273232, time: 0.545, data: 0.000) G_L1: 13.480 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 10.773 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 10, iters: 275232, time: 0.545, data: 0.000) G_L1: 17.855 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 15.399 G_Regularizer: 0.000 validation_error: 21.204 +(epoch: 10, iters: 277232, time: 0.547, data: 0.000) G_L1: 13.515 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 11.099 G_Regularizer: 0.000 validation_error: 20.197 +(epoch: 10, iters: 279232, time: 0.538, data: 0.000) G_L1: 18.028 G_L1_ABSOLUTE: 2.765 G_L1_RELATIVE: 15.263 G_Regularizer: 0.000 validation_error: 20.173 +(epoch: 10, iters: 281232, time: 0.553, data: 0.000) G_L1: 15.266 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 12.561 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 10, iters: 283232, time: 0.551, data: 0.000) G_L1: 15.726 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 13.080 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 10, iters: 285232, time: 0.549, data: 0.000) G_L1: 17.751 G_L1_ABSOLUTE: 3.109 G_L1_RELATIVE: 14.642 G_Regularizer: 0.000 validation_error: 20.437 +(epoch: 10, iters: 287232, time: 0.552, data: 0.001) G_L1: 14.509 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.840 G_Regularizer: 0.000 validation_error: 20.600 +(epoch: 10, iters: 289232, time: 0.536, data: 0.000) G_L1: 17.136 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 14.279 G_Regularizer: 0.000 validation_error: 21.193 +(epoch: 10, iters: 291232, time: 0.545, data: 0.000) G_L1: 17.186 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 14.485 G_Regularizer: 0.000 validation_error: 21.151 +(epoch: 10, iters: 293232, time: 0.547, data: 0.000) G_L1: 14.177 G_L1_ABSOLUTE: 3.148 G_L1_RELATIVE: 11.029 G_Regularizer: 0.000 validation_error: 20.417 +(epoch: 10, iters: 295232, time: 0.550, data: 0.000) G_L1: 13.360 G_L1_ABSOLUTE: 2.162 G_L1_RELATIVE: 11.198 G_Regularizer: 0.000 validation_error: 19.833 +(epoch: 10, iters: 297232, time: 0.545, data: 0.000) G_L1: 13.799 G_L1_ABSOLUTE: 2.319 G_L1_RELATIVE: 11.479 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 10, iters: 299232, time: 0.546, data: 0.000) G_L1: 14.907 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 12.217 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 10, iters: 301232, time: 0.544, data: 0.000) G_L1: 13.607 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 10.997 G_Regularizer: 0.000 validation_error: 20.512 +(epoch: 11, iters: 480, time: 0.549, data: 0.000) G_L1: 16.376 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 13.707 G_Regularizer: 0.000 validation_error: 20.742 +(epoch: 11, iters: 2480, time: 0.546, data: 0.000) G_L1: 13.902 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 11.401 G_Regularizer: 0.000 validation_error: 21.817 +(epoch: 11, iters: 4480, time: 0.542, data: 0.000) G_L1: 18.020 G_L1_ABSOLUTE: 3.025 G_L1_RELATIVE: 14.994 G_Regularizer: 0.000 validation_error: 20.607 +(epoch: 11, iters: 6480, time: 0.547, data: 0.000) G_L1: 13.064 G_L1_ABSOLUTE: 2.252 G_L1_RELATIVE: 10.812 G_Regularizer: 0.000 validation_error: 20.303 +(epoch: 11, iters: 8480, time: 0.548, data: 0.000) G_L1: 17.276 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 14.656 G_Regularizer: 0.000 validation_error: 20.267 +(epoch: 11, iters: 10480, time: 0.549, data: 0.000) G_L1: 17.090 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 14.402 G_Regularizer: 0.000 validation_error: 21.273 +(epoch: 11, iters: 12480, time: 0.555, data: 0.000) G_L1: 14.087 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 11.378 G_Regularizer: 0.000 validation_error: 21.315 +(epoch: 11, iters: 14480, time: 0.554, data: 0.000) G_L1: 14.267 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 11.792 G_Regularizer: 0.000 validation_error: 20.320 +(epoch: 11, iters: 16480, time: 0.546, data: 0.001) G_L1: 15.473 G_L1_ABSOLUTE: 2.939 G_L1_RELATIVE: 12.534 G_Regularizer: 0.000 validation_error: 19.640 +(epoch: 11, iters: 18480, time: 0.540, data: 0.000) G_L1: 17.398 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 14.765 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 11, iters: 20480, time: 0.543, data: 0.000) G_L1: 14.396 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 11.765 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 11, iters: 22480, time: 0.546, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 12.010 G_Regularizer: 0.000 validation_error: 22.118 +(epoch: 11, iters: 24480, time: 0.546, data: 0.000) G_L1: 13.481 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 10.681 G_Regularizer: 0.000 validation_error: 21.266 +(epoch: 11, iters: 26480, time: 0.553, data: 0.000) G_L1: 13.165 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 10.420 G_Regularizer: 0.000 validation_error: 20.287 +(epoch: 11, iters: 28480, time: 0.547, data: 0.001) G_L1: 14.899 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 12.422 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 11, iters: 30480, time: 0.543, data: 0.000) G_L1: 18.560 G_L1_ABSOLUTE: 3.533 G_L1_RELATIVE: 15.027 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 11, iters: 32480, time: 0.542, data: 0.000) G_L1: 15.031 G_L1_ABSOLUTE: 2.764 G_L1_RELATIVE: 12.267 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 11, iters: 34480, time: 0.545, data: 0.000) G_L1: 17.692 G_L1_ABSOLUTE: 2.758 G_L1_RELATIVE: 14.934 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 11, iters: 36480, time: 0.549, data: 0.000) G_L1: 16.194 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 13.679 G_Regularizer: 0.000 validation_error: 20.206 +(epoch: 11, iters: 38480, time: 0.546, data: 0.000) G_L1: 19.133 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 16.220 G_Regularizer: 0.000 validation_error: 20.541 +(epoch: 11, iters: 40480, time: 0.543, data: 0.000) G_L1: 16.560 G_L1_ABSOLUTE: 2.455 G_L1_RELATIVE: 14.104 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 11, iters: 42480, time: 0.550, data: 0.000) G_L1: 15.410 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 13.205 G_Regularizer: 0.000 validation_error: 20.208 +(epoch: 11, iters: 44480, time: 0.543, data: 0.000) G_L1: 18.799 G_L1_ABSOLUTE: 3.002 G_L1_RELATIVE: 15.797 G_Regularizer: 0.000 validation_error: 20.021 +(epoch: 11, iters: 46480, time: 0.549, data: 0.000) G_L1: 16.024 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 13.537 G_Regularizer: 0.000 validation_error: 21.482 +(epoch: 11, iters: 48480, time: 0.546, data: 0.000) G_L1: 16.181 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 13.375 G_Regularizer: 0.000 validation_error: 20.675 +(epoch: 11, iters: 50480, time: 0.547, data: 0.000) G_L1: 15.226 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 12.823 G_Regularizer: 0.000 validation_error: 20.059 +(epoch: 11, iters: 52480, time: 0.551, data: 0.000) G_L1: 14.939 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 12.485 G_Regularizer: 0.000 validation_error: 20.214 +(epoch: 11, iters: 54480, time: 0.547, data: 0.000) G_L1: 18.968 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 16.616 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 11, iters: 56480, time: 0.543, data: 0.000) G_L1: 16.062 G_L1_ABSOLUTE: 2.405 G_L1_RELATIVE: 13.657 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 11, iters: 58480, time: 0.547, data: 0.001) G_L1: 14.969 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 12.172 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 11, iters: 60480, time: 0.547, data: 0.000) G_L1: 16.544 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 13.848 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 11, iters: 62480, time: 0.544, data: 0.000) G_L1: 15.426 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 12.932 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 11, iters: 64480, time: 0.549, data: 0.000) G_L1: 13.320 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 10.769 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 11, iters: 66480, time: 0.548, data: 0.000) G_L1: 15.972 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 13.115 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 11, iters: 68480, time: 0.545, data: 0.000) G_L1: 13.796 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 11.354 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 11, iters: 70480, time: 0.550, data: 0.000) G_L1: 14.291 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 11.909 G_Regularizer: 0.000 validation_error: 21.171 +(epoch: 11, iters: 72480, time: 0.542, data: 0.000) G_L1: 15.998 G_L1_ABSOLUTE: 2.923 G_L1_RELATIVE: 13.075 G_Regularizer: 0.000 validation_error: 21.129 +(epoch: 11, iters: 74480, time: 0.560, data: 0.000) G_L1: 13.909 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 11.284 G_Regularizer: 0.000 validation_error: 20.157 +(epoch: 11, iters: 76480, time: 0.547, data: 0.000) G_L1: 13.925 G_L1_ABSOLUTE: 2.875 G_L1_RELATIVE: 11.050 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 11, iters: 78480, time: 0.550, data: 0.000) G_L1: 16.667 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 13.935 G_Regularizer: 0.000 validation_error: 20.937 +(epoch: 11, iters: 80480, time: 0.548, data: 0.000) G_L1: 18.635 G_L1_ABSOLUTE: 3.035 G_L1_RELATIVE: 15.600 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 11, iters: 82480, time: 0.539, data: 0.000) G_L1: 14.653 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 12.306 G_Regularizer: 0.000 validation_error: 21.124 +(epoch: 11, iters: 84480, time: 0.536, data: 0.000) G_L1: 15.658 G_L1_ABSOLUTE: 2.220 G_L1_RELATIVE: 13.438 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 11, iters: 86480, time: 0.548, data: 0.000) G_L1: 14.230 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 11.409 G_Regularizer: 0.000 validation_error: 20.718 +(epoch: 11, iters: 88480, time: 0.544, data: 0.000) G_L1: 15.662 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 13.112 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 11, iters: 90480, time: 0.544, data: 0.000) G_L1: 13.886 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 11.087 G_Regularizer: 0.000 validation_error: 19.992 +(epoch: 11, iters: 92480, time: 0.533, data: 0.000) G_L1: 12.838 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 10.183 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 11, iters: 94480, time: 0.546, data: 0.000) G_L1: 17.600 G_L1_ABSOLUTE: 3.057 G_L1_RELATIVE: 14.543 G_Regularizer: 0.000 validation_error: 20.485 +(epoch: 11, iters: 96480, time: 0.535, data: 0.000) G_L1: 14.893 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 12.139 G_Regularizer: 0.000 validation_error: 21.407 +(epoch: 11, iters: 98480, time: 0.530, data: 0.000) G_L1: 14.051 G_L1_ABSOLUTE: 3.233 G_L1_RELATIVE: 10.818 G_Regularizer: 0.000 validation_error: 21.251 +(epoch: 11, iters: 100480, time: 0.534, data: 0.000) G_L1: 13.616 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 11.019 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 11, iters: 102480, time: 0.536, data: 0.000) G_L1: 15.465 G_L1_ABSOLUTE: 2.761 G_L1_RELATIVE: 12.704 G_Regularizer: 0.000 validation_error: 20.061 +(epoch: 11, iters: 104480, time: 0.529, data: 0.000) G_L1: 16.854 G_L1_ABSOLUTE: 2.210 G_L1_RELATIVE: 14.644 G_Regularizer: 0.000 validation_error: 20.299 +(epoch: 11, iters: 106480, time: 0.544, data: 0.000) G_L1: 14.665 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 12.064 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 11, iters: 108480, time: 0.534, data: 0.000) G_L1: 14.821 G_L1_ABSOLUTE: 3.057 G_L1_RELATIVE: 11.764 G_Regularizer: 0.000 validation_error: 20.255 +(epoch: 11, iters: 110480, time: 0.526, data: 0.000) G_L1: 15.236 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 12.803 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 11, iters: 112480, time: 0.548, data: 0.000) G_L1: 14.774 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 12.212 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 11, iters: 114480, time: 0.539, data: 0.000) G_L1: 13.771 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 11.293 G_Regularizer: 0.000 validation_error: 19.755 +(epoch: 11, iters: 116480, time: 0.535, data: 0.000) G_L1: 16.205 G_L1_ABSOLUTE: 2.886 G_L1_RELATIVE: 13.318 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 11, iters: 118480, time: 0.522, data: 0.000) G_L1: 12.835 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 10.559 G_Regularizer: 0.000 validation_error: 20.497 +(epoch: 11, iters: 120480, time: 0.544, data: 0.000) G_L1: 16.972 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 14.508 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 11, iters: 122480, time: 0.542, data: 0.000) G_L1: 13.825 G_L1_ABSOLUTE: 3.029 G_L1_RELATIVE: 10.796 G_Regularizer: 0.000 validation_error: 21.800 +(epoch: 11, iters: 124480, time: 0.531, data: 0.000) G_L1: 16.790 G_L1_ABSOLUTE: 2.346 G_L1_RELATIVE: 14.443 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 11, iters: 126480, time: 0.536, data: 0.000) G_L1: 15.606 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 12.831 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 11, iters: 128480, time: 0.544, data: 0.000) G_L1: 13.453 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 11.176 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 11, iters: 130480, time: 0.540, data: 0.000) G_L1: 16.980 G_L1_ABSOLUTE: 2.972 G_L1_RELATIVE: 14.008 G_Regularizer: 0.000 validation_error: 21.207 +(epoch: 11, iters: 132480, time: 0.905, data: 0.000) G_L1: 13.569 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 11.194 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 11, iters: 134480, time: 0.918, data: 0.000) G_L1: 16.084 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 13.366 G_Regularizer: 0.000 validation_error: 20.218 +(epoch: 11, iters: 136480, time: 0.920, data: 0.000) G_L1: 14.828 G_L1_ABSOLUTE: 3.328 G_L1_RELATIVE: 11.500 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 11, iters: 138480, time: 0.920, data: 0.000) G_L1: 20.499 G_L1_ABSOLUTE: 2.876 G_L1_RELATIVE: 17.623 G_Regularizer: 0.000 validation_error: 20.491 +(epoch: 11, iters: 140480, time: 0.920, data: 0.000) G_L1: 14.632 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 12.000 G_Regularizer: 0.000 validation_error: 20.408 +(epoch: 11, iters: 142480, time: 0.919, data: 0.000) G_L1: 16.573 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 13.679 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 11, iters: 144480, time: 0.918, data: 0.000) G_L1: 16.041 G_L1_ABSOLUTE: 2.957 G_L1_RELATIVE: 13.084 G_Regularizer: 0.000 validation_error: 20.929 +(epoch: 11, iters: 146480, time: 0.925, data: 0.001) G_L1: 14.816 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 12.533 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 11, iters: 148480, time: 0.918, data: 0.000) G_L1: 17.811 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 14.853 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 11, iters: 150480, time: 0.919, data: 0.000) G_L1: 13.809 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 11.355 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 11, iters: 152480, time: 0.919, data: 0.000) G_L1: 13.772 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 11.270 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 11, iters: 154480, time: 0.923, data: 0.000) G_L1: 15.495 G_L1_ABSOLUTE: 2.855 G_L1_RELATIVE: 12.640 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 11, iters: 156480, time: 0.927, data: 0.000) G_L1: 14.256 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 11.881 G_Regularizer: 0.000 validation_error: 19.980 +(epoch: 11, iters: 158480, time: 0.920, data: 0.000) G_L1: 16.756 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 14.194 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 11, iters: 160480, time: 0.925, data: 0.000) G_L1: 17.864 G_L1_ABSOLUTE: 2.916 G_L1_RELATIVE: 14.947 G_Regularizer: 0.000 validation_error: 20.221 +(epoch: 11, iters: 162480, time: 0.917, data: 0.000) G_L1: 15.868 G_L1_ABSOLUTE: 2.927 G_L1_RELATIVE: 12.941 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 11, iters: 164480, time: 0.919, data: 0.000) G_L1: 12.488 G_L1_ABSOLUTE: 2.594 G_L1_RELATIVE: 9.894 G_Regularizer: 0.000 validation_error: 20.631 +(epoch: 11, iters: 166480, time: 0.925, data: 0.000) G_L1: 18.878 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 16.249 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 11, iters: 168480, time: 0.923, data: 0.000) G_L1: 15.654 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 13.055 G_Regularizer: 0.000 validation_error: 20.369 +(epoch: 11, iters: 170480, time: 0.923, data: 0.000) G_L1: 15.099 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 12.279 G_Regularizer: 0.000 validation_error: 20.030 +(epoch: 11, iters: 172480, time: 0.922, data: 0.000) G_L1: 16.376 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 13.815 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 11, iters: 174480, time: 0.924, data: 0.000) G_L1: 13.603 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 10.911 G_Regularizer: 0.000 validation_error: 19.996 +(epoch: 11, iters: 176480, time: 0.930, data: 0.000) G_L1: 16.873 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 14.181 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 11, iters: 178480, time: 0.923, data: 0.000) G_L1: 19.851 G_L1_ABSOLUTE: 3.333 G_L1_RELATIVE: 16.518 G_Regularizer: 0.000 validation_error: 20.615 +(epoch: 11, iters: 180480, time: 0.928, data: 0.000) G_L1: 17.791 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 15.049 G_Regularizer: 0.000 validation_error: 20.285 +(epoch: 11, iters: 182480, time: 0.919, data: 0.000) G_L1: 19.061 G_L1_ABSOLUTE: 3.048 G_L1_RELATIVE: 16.013 G_Regularizer: 0.000 validation_error: 21.129 +(epoch: 11, iters: 184480, time: 0.915, data: 0.000) G_L1: 15.264 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 12.638 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 11, iters: 186480, time: 0.920, data: 0.000) G_L1: 13.892 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 11.555 G_Regularizer: 0.000 validation_error: 21.263 +(epoch: 11, iters: 188480, time: 0.921, data: 0.000) G_L1: 15.035 G_L1_ABSOLUTE: 2.827 G_L1_RELATIVE: 12.209 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 11, iters: 190480, time: 0.925, data: 0.000) G_L1: 15.177 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 12.713 G_Regularizer: 0.000 validation_error: 21.315 +(epoch: 11, iters: 192480, time: 0.924, data: 0.000) G_L1: 15.585 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 13.320 G_Regularizer: 0.000 validation_error: 21.604 +(epoch: 11, iters: 194480, time: 0.926, data: 0.000) G_L1: 12.338 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 9.768 G_Regularizer: 0.000 validation_error: 21.072 +(epoch: 11, iters: 196480, time: 0.921, data: 0.000) G_L1: 18.636 G_L1_ABSOLUTE: 2.819 G_L1_RELATIVE: 15.817 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 11, iters: 198480, time: 0.925, data: 0.000) G_L1: 14.545 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 12.021 G_Regularizer: 0.000 validation_error: 20.528 +(epoch: 11, iters: 200480, time: 0.920, data: 0.000) G_L1: 20.975 G_L1_ABSOLUTE: 3.117 G_L1_RELATIVE: 17.858 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 11, iters: 202480, time: 0.928, data: 0.000) G_L1: 13.589 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 11.132 G_Regularizer: 0.000 validation_error: 21.420 +(epoch: 11, iters: 204480, time: 0.921, data: 0.000) G_L1: 13.505 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 11.010 G_Regularizer: 0.000 validation_error: 20.684 +(epoch: 11, iters: 206480, time: 0.926, data: 0.000) G_L1: 16.471 G_L1_ABSOLUTE: 2.922 G_L1_RELATIVE: 13.549 G_Regularizer: 0.000 validation_error: 20.504 +(epoch: 11, iters: 208480, time: 0.921, data: 0.000) G_L1: 16.531 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 13.865 G_Regularizer: 0.000 validation_error: 20.447 +(epoch: 11, iters: 210480, time: 0.886, data: 0.000) G_L1: 15.495 G_L1_ABSOLUTE: 2.795 G_L1_RELATIVE: 12.700 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 11, iters: 212480, time: 0.557, data: 0.000) G_L1: 15.242 G_L1_ABSOLUTE: 3.111 G_L1_RELATIVE: 12.131 G_Regularizer: 0.000 validation_error: 21.078 +(epoch: 11, iters: 214480, time: 0.533, data: 0.000) G_L1: 17.768 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 15.428 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 11, iters: 216480, time: 0.532, data: 0.001) G_L1: 17.826 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 15.012 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 11, iters: 218480, time: 0.529, data: 0.001) G_L1: 26.968 G_L1_ABSOLUTE: 3.524 G_L1_RELATIVE: 23.444 G_Regularizer: 0.000 validation_error: 20.216 +(epoch: 11, iters: 220480, time: 0.526, data: 0.000) G_L1: 16.722 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 14.228 G_Regularizer: 0.000 validation_error: 19.986 +(epoch: 11, iters: 222480, time: 0.529, data: 0.000) G_L1: 15.934 G_L1_ABSOLUTE: 2.429 G_L1_RELATIVE: 13.505 G_Regularizer: 0.000 validation_error: 20.684 +(epoch: 11, iters: 224480, time: 0.528, data: 0.000) G_L1: 16.382 G_L1_ABSOLUTE: 2.853 G_L1_RELATIVE: 13.529 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 11, iters: 226480, time: 0.530, data: 0.001) G_L1: 15.439 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 12.813 G_Regularizer: 0.000 validation_error: 21.076 +(epoch: 11, iters: 228480, time: 0.533, data: 0.000) G_L1: 14.611 G_L1_ABSOLUTE: 2.051 G_L1_RELATIVE: 12.560 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 11, iters: 230480, time: 0.528, data: 0.001) G_L1: 15.223 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 12.546 G_Regularizer: 0.000 validation_error: 20.168 +(epoch: 11, iters: 232480, time: 0.521, data: 0.000) G_L1: 18.622 G_L1_ABSOLUTE: 2.998 G_L1_RELATIVE: 15.624 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 11, iters: 234480, time: 0.530, data: 0.000) G_L1: 17.019 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 14.343 G_Regularizer: 0.000 validation_error: 19.730 +(epoch: 11, iters: 236480, time: 0.525, data: 0.000) G_L1: 11.582 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 8.746 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 11, iters: 238480, time: 0.528, data: 0.000) G_L1: 17.524 G_L1_ABSOLUTE: 3.012 G_L1_RELATIVE: 14.511 G_Regularizer: 0.000 validation_error: 20.177 +(epoch: 11, iters: 240480, time: 0.530, data: 0.000) G_L1: 16.383 G_L1_ABSOLUTE: 2.977 G_L1_RELATIVE: 13.406 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 11, iters: 242480, time: 0.531, data: 0.000) G_L1: 12.936 G_L1_ABSOLUTE: 3.339 G_L1_RELATIVE: 9.596 G_Regularizer: 0.000 validation_error: 20.097 +(epoch: 11, iters: 244480, time: 0.526, data: 0.000) G_L1: 16.490 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 14.145 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 11, iters: 246480, time: 0.526, data: 0.000) G_L1: 19.234 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 16.453 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 11, iters: 248480, time: 0.528, data: 0.000) G_L1: 13.478 G_L1_ABSOLUTE: 2.899 G_L1_RELATIVE: 10.579 G_Regularizer: 0.000 validation_error: 19.934 +(epoch: 11, iters: 250480, time: 0.530, data: 0.000) G_L1: 14.532 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 11.949 G_Regularizer: 0.000 validation_error: 20.152 +(epoch: 11, iters: 252480, time: 0.528, data: 0.000) G_L1: 12.507 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 10.015 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 11, iters: 254480, time: 0.526, data: 0.000) G_L1: 13.786 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 11.160 G_Regularizer: 0.000 validation_error: 20.327 +(epoch: 11, iters: 256480, time: 0.527, data: 0.000) G_L1: 14.052 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 11.514 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 11, iters: 258480, time: 0.530, data: 0.000) G_L1: 14.081 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 11.725 G_Regularizer: 0.000 validation_error: 20.433 +(epoch: 11, iters: 260480, time: 0.524, data: 0.000) G_L1: 15.754 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 12.932 G_Regularizer: 0.000 validation_error: 20.470 +(epoch: 11, iters: 262480, time: 0.526, data: 0.000) G_L1: 16.718 G_L1_ABSOLUTE: 3.036 G_L1_RELATIVE: 13.682 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 11, iters: 264480, time: 0.530, data: 0.000) G_L1: 13.801 G_L1_ABSOLUTE: 2.472 G_L1_RELATIVE: 11.329 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 11, iters: 266480, time: 0.527, data: 0.000) G_L1: 14.400 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 11.864 G_Regularizer: 0.000 validation_error: 20.235 +(epoch: 11, iters: 268480, time: 0.533, data: 0.000) G_L1: 13.516 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 11.005 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 11, iters: 270480, time: 0.528, data: 0.000) G_L1: 16.431 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 13.550 G_Regularizer: 0.000 validation_error: 20.257 +(epoch: 11, iters: 272480, time: 0.526, data: 0.000) G_L1: 16.999 G_L1_ABSOLUTE: 3.061 G_L1_RELATIVE: 13.938 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 11, iters: 274480, time: 0.527, data: 0.000) G_L1: 14.973 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 12.235 G_Regularizer: 0.000 validation_error: 20.146 +(epoch: 11, iters: 276480, time: 0.528, data: 0.000) G_L1: 20.296 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 17.337 G_Regularizer: 0.000 validation_error: 21.501 +(epoch: 11, iters: 278480, time: 0.522, data: 0.000) G_L1: 13.399 G_L1_ABSOLUTE: 3.155 G_L1_RELATIVE: 10.243 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 11, iters: 280480, time: 0.529, data: 0.000) G_L1: 15.377 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 12.610 G_Regularizer: 0.000 validation_error: 21.688 +(epoch: 11, iters: 282480, time: 0.530, data: 0.000) G_L1: 17.750 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 15.281 G_Regularizer: 0.000 validation_error: 20.327 +(epoch: 11, iters: 284480, time: 0.531, data: 0.000) G_L1: 17.927 G_L1_ABSOLUTE: 2.972 G_L1_RELATIVE: 14.955 G_Regularizer: 0.000 validation_error: 20.068 +(epoch: 11, iters: 286480, time: 0.523, data: 0.000) G_L1: 13.003 G_L1_ABSOLUTE: 2.220 G_L1_RELATIVE: 10.782 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 11, iters: 288480, time: 0.530, data: 0.000) G_L1: 14.933 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 12.226 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 11, iters: 290480, time: 0.536, data: 0.000) G_L1: 17.147 G_L1_ABSOLUTE: 3.229 G_L1_RELATIVE: 13.917 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 11, iters: 292480, time: 0.525, data: 0.000) G_L1: 13.382 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 10.942 G_Regularizer: 0.000 validation_error: 20.436 +(epoch: 11, iters: 294480, time: 0.524, data: 0.000) G_L1: 19.117 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 16.372 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 11, iters: 296480, time: 0.529, data: 0.000) G_L1: 14.440 G_L1_ABSOLUTE: 3.084 G_L1_RELATIVE: 11.356 G_Regularizer: 0.000 validation_error: 20.279 +(epoch: 11, iters: 298480, time: 0.531, data: 0.000) G_L1: 14.145 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 11.338 G_Regularizer: 0.000 validation_error: 21.221 +(epoch: 11, iters: 300480, time: 0.530, data: 0.000) G_L1: 16.273 G_L1_ABSOLUTE: 2.911 G_L1_RELATIVE: 13.362 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 11, iters: 302480, time: 0.534, data: 0.000) G_L1: 15.915 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 13.282 G_Regularizer: 0.000 validation_error: 20.481 +(epoch: 12, iters: 1728, time: 0.522, data: 0.000) G_L1: 15.724 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 21.135 +(epoch: 12, iters: 3728, time: 0.532, data: 0.000) G_L1: 17.741 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 15.019 G_Regularizer: 0.000 validation_error: 19.812 +(epoch: 12, iters: 5728, time: 0.535, data: 0.000) G_L1: 12.924 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 10.573 G_Regularizer: 0.000 validation_error: 20.099 +(epoch: 12, iters: 7728, time: 0.525, data: 0.000) G_L1: 14.210 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 11.850 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 12, iters: 9728, time: 0.527, data: 0.000) G_L1: 16.350 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 13.870 G_Regularizer: 0.000 validation_error: 21.565 +(epoch: 12, iters: 11728, time: 0.529, data: 0.000) G_L1: 13.257 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 10.620 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 12, iters: 13728, time: 0.533, data: 0.000) G_L1: 15.167 G_L1_ABSOLUTE: 2.471 G_L1_RELATIVE: 12.696 G_Regularizer: 0.000 validation_error: 22.384 +(epoch: 12, iters: 15728, time: 0.530, data: 0.000) G_L1: 13.293 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 10.660 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 12, iters: 17728, time: 0.526, data: 0.000) G_L1: 14.108 G_L1_ABSOLUTE: 2.909 G_L1_RELATIVE: 11.200 G_Regularizer: 0.000 validation_error: 20.206 +(epoch: 12, iters: 19728, time: 0.521, data: 0.000) G_L1: 15.960 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 13.274 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 12, iters: 21728, time: 0.523, data: 0.000) G_L1: 13.859 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 11.471 G_Regularizer: 0.000 validation_error: 20.943 +(epoch: 12, iters: 23728, time: 0.527, data: 0.000) G_L1: 14.965 G_L1_ABSOLUTE: 2.251 G_L1_RELATIVE: 12.714 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 12, iters: 25728, time: 0.532, data: 0.000) G_L1: 13.731 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 11.171 G_Regularizer: 0.000 validation_error: 20.535 +(epoch: 12, iters: 27728, time: 0.522, data: 0.000) G_L1: 15.238 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 12.575 G_Regularizer: 0.000 validation_error: 20.522 +(epoch: 12, iters: 29728, time: 0.523, data: 0.000) G_L1: 16.831 G_L1_ABSOLUTE: 2.886 G_L1_RELATIVE: 13.945 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 12, iters: 31728, time: 0.529, data: 0.000) G_L1: 14.177 G_L1_ABSOLUTE: 2.996 G_L1_RELATIVE: 11.181 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 12, iters: 33728, time: 0.527, data: 0.000) G_L1: 18.937 G_L1_ABSOLUTE: 2.977 G_L1_RELATIVE: 15.960 G_Regularizer: 0.000 validation_error: 21.125 +(epoch: 12, iters: 35728, time: 0.527, data: 0.000) G_L1: 14.674 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.141 G_Regularizer: 0.000 validation_error: 21.017 +(epoch: 12, iters: 37728, time: 0.522, data: 0.000) G_L1: 17.610 G_L1_ABSOLUTE: 2.907 G_L1_RELATIVE: 14.703 G_Regularizer: 0.000 validation_error: 20.417 +(epoch: 12, iters: 39728, time: 0.525, data: 0.001) G_L1: 15.945 G_L1_ABSOLUTE: 2.330 G_L1_RELATIVE: 13.615 G_Regularizer: 0.000 validation_error: 19.909 +(epoch: 12, iters: 41728, time: 0.523, data: 0.000) G_L1: 17.535 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 14.606 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 12, iters: 43728, time: 0.529, data: 0.000) G_L1: 16.762 G_L1_ABSOLUTE: 2.672 G_L1_RELATIVE: 14.090 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 12, iters: 45728, time: 0.533, data: 0.000) G_L1: 16.834 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 14.252 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 12, iters: 47728, time: 0.530, data: 0.000) G_L1: 17.534 G_L1_ABSOLUTE: 2.719 G_L1_RELATIVE: 14.815 G_Regularizer: 0.000 validation_error: 20.067 +(epoch: 12, iters: 49728, time: 0.529, data: 0.000) G_L1: 15.151 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 12.765 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 12, iters: 51728, time: 0.526, data: 0.000) G_L1: 14.129 G_L1_ABSOLUTE: 2.848 G_L1_RELATIVE: 11.281 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 12, iters: 53728, time: 0.528, data: 0.000) G_L1: 14.174 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.572 G_Regularizer: 0.000 validation_error: 20.471 +(epoch: 12, iters: 55728, time: 0.528, data: 0.000) G_L1: 17.472 G_L1_ABSOLUTE: 2.875 G_L1_RELATIVE: 14.597 G_Regularizer: 0.000 validation_error: 21.149 +(epoch: 12, iters: 57728, time: 0.533, data: 0.000) G_L1: 16.179 G_L1_ABSOLUTE: 2.851 G_L1_RELATIVE: 13.328 G_Regularizer: 0.000 validation_error: 21.284 +(epoch: 12, iters: 59728, time: 0.534, data: 0.000) G_L1: 18.631 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 15.856 G_Regularizer: 0.000 validation_error: 20.998 +(epoch: 12, iters: 61728, time: 0.528, data: 0.000) G_L1: 14.799 G_L1_ABSOLUTE: 3.099 G_L1_RELATIVE: 11.700 G_Regularizer: 0.000 validation_error: 19.772 +(epoch: 12, iters: 63728, time: 0.525, data: 0.000) G_L1: 14.629 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 12.023 G_Regularizer: 0.000 validation_error: 21.674 +(epoch: 12, iters: 65728, time: 0.526, data: 0.001) G_L1: 15.409 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 12.797 G_Regularizer: 0.000 validation_error: 20.369 +(epoch: 12, iters: 67728, time: 0.528, data: 0.000) G_L1: 13.566 G_L1_ABSOLUTE: 2.931 G_L1_RELATIVE: 10.635 G_Regularizer: 0.000 validation_error: 20.993 +(epoch: 12, iters: 69728, time: 0.530, data: 0.000) G_L1: 15.074 G_L1_ABSOLUTE: 3.131 G_L1_RELATIVE: 11.943 G_Regularizer: 0.000 validation_error: 20.041 +(epoch: 12, iters: 71728, time: 0.526, data: 0.000) G_L1: 13.059 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 10.395 G_Regularizer: 0.000 validation_error: 20.370 +(epoch: 12, iters: 73728, time: 0.524, data: 0.000) G_L1: 12.749 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 10.337 G_Regularizer: 0.000 validation_error: 20.185 +(epoch: 12, iters: 75728, time: 0.527, data: 0.000) G_L1: 15.351 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 12.572 G_Regularizer: 0.000 validation_error: 20.555 +(epoch: 12, iters: 77728, time: 0.527, data: 0.000) G_L1: 13.469 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 11.025 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 12, iters: 79728, time: 0.530, data: 0.001) G_L1: 17.070 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 14.342 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 12, iters: 81728, time: 0.527, data: 0.000) G_L1: 17.607 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 15.244 G_Regularizer: 0.000 validation_error: 20.839 +(epoch: 12, iters: 83728, time: 0.527, data: 0.000) G_L1: 16.949 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 14.365 G_Regularizer: 0.000 validation_error: 20.403 +(epoch: 12, iters: 85728, time: 0.531, data: 0.000) G_L1: 15.851 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 13.227 G_Regularizer: 0.000 validation_error: 21.052 +(epoch: 12, iters: 87728, time: 0.527, data: 0.000) G_L1: 16.581 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 13.835 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 12, iters: 89728, time: 0.529, data: 0.000) G_L1: 13.167 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 10.461 G_Regularizer: 0.000 validation_error: 20.096 +(epoch: 12, iters: 91728, time: 0.532, data: 0.000) G_L1: 15.076 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.662 G_Regularizer: 0.000 validation_error: 21.315 +(epoch: 12, iters: 93728, time: 0.526, data: 0.000) G_L1: 17.040 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 14.394 G_Regularizer: 0.000 validation_error: 21.336 +(epoch: 12, iters: 95728, time: 0.526, data: 0.000) G_L1: 12.777 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 10.295 G_Regularizer: 0.000 validation_error: 20.283 +(epoch: 12, iters: 97728, time: 0.529, data: 0.000) G_L1: 19.675 G_L1_ABSOLUTE: 3.133 G_L1_RELATIVE: 16.542 G_Regularizer: 0.000 validation_error: 20.522 +(epoch: 12, iters: 99728, time: 0.531, data: 0.000) G_L1: 18.210 G_L1_ABSOLUTE: 3.050 G_L1_RELATIVE: 15.160 G_Regularizer: 0.000 validation_error: 21.378 +(epoch: 12, iters: 101728, time: 0.528, data: 0.000) G_L1: 13.621 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 11.121 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 12, iters: 103728, time: 0.527, data: 0.000) G_L1: 14.532 G_L1_ABSOLUTE: 2.985 G_L1_RELATIVE: 11.547 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 12, iters: 105728, time: 0.526, data: 0.000) G_L1: 16.584 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 13.958 G_Regularizer: 0.000 validation_error: 21.020 +(epoch: 12, iters: 107728, time: 0.526, data: 0.000) G_L1: 15.125 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 12.497 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 12, iters: 109728, time: 0.526, data: 0.000) G_L1: 13.830 G_L1_ABSOLUTE: 2.643 G_L1_RELATIVE: 11.187 G_Regularizer: 0.000 validation_error: 20.117 +(epoch: 12, iters: 111728, time: 0.528, data: 0.000) G_L1: 14.338 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 11.765 G_Regularizer: 0.000 validation_error: 19.777 +(epoch: 12, iters: 113728, time: 0.529, data: 0.000) G_L1: 15.024 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 12.590 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 12, iters: 115728, time: 0.523, data: 0.000) G_L1: 14.143 G_L1_ABSOLUTE: 2.887 G_L1_RELATIVE: 11.256 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 12, iters: 117728, time: 0.531, data: 0.000) G_L1: 13.189 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 10.531 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 12, iters: 119728, time: 0.527, data: 0.000) G_L1: 15.810 G_L1_ABSOLUTE: 2.939 G_L1_RELATIVE: 12.871 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 12, iters: 121728, time: 0.527, data: 0.000) G_L1: 14.398 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 12.141 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 12, iters: 123728, time: 0.536, data: 0.000) G_L1: 18.415 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 15.760 G_Regularizer: 0.000 validation_error: 20.453 +(epoch: 12, iters: 125728, time: 0.525, data: 0.000) G_L1: 16.323 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 13.530 G_Regularizer: 0.000 validation_error: 20.003 +(epoch: 12, iters: 127728, time: 0.524, data: 0.000) G_L1: 17.923 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 15.289 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 12, iters: 129728, time: 0.529, data: 0.000) G_L1: 13.750 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 10.928 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 12, iters: 131728, time: 0.523, data: 0.000) G_L1: 12.818 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 10.506 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 12, iters: 133728, time: 0.527, data: 0.000) G_L1: 18.953 G_L1_ABSOLUTE: 3.169 G_L1_RELATIVE: 15.784 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 12, iters: 135728, time: 0.528, data: 0.000) G_L1: 16.005 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 13.453 G_Regularizer: 0.000 validation_error: 20.369 +(epoch: 12, iters: 137728, time: 0.526, data: 0.000) G_L1: 14.852 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 12.188 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 12, iters: 139728, time: 0.527, data: 0.000) G_L1: 14.041 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 11.367 G_Regularizer: 0.000 validation_error: 20.450 +(epoch: 12, iters: 141728, time: 0.524, data: 0.000) G_L1: 15.628 G_L1_ABSOLUTE: 3.054 G_L1_RELATIVE: 12.574 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 12, iters: 143728, time: 0.532, data: 0.000) G_L1: 16.777 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 14.172 G_Regularizer: 0.000 validation_error: 19.954 +(epoch: 12, iters: 145728, time: 0.526, data: 0.000) G_L1: 16.162 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 13.605 G_Regularizer: 0.000 validation_error: 20.296 +(epoch: 12, iters: 147728, time: 0.531, data: 0.000) G_L1: 16.454 G_L1_ABSOLUTE: 2.851 G_L1_RELATIVE: 13.603 G_Regularizer: 0.000 validation_error: 20.313 +(epoch: 12, iters: 149728, time: 0.529, data: 0.000) G_L1: 15.254 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 12.265 G_Regularizer: 0.000 validation_error: 19.959 +(epoch: 12, iters: 151728, time: 0.529, data: 0.000) G_L1: 16.641 G_L1_ABSOLUTE: 3.272 G_L1_RELATIVE: 13.370 G_Regularizer: 0.000 validation_error: 21.006 +(epoch: 12, iters: 153728, time: 0.529, data: 0.000) G_L1: 15.600 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 12.787 G_Regularizer: 0.000 validation_error: 19.959 +(epoch: 12, iters: 155728, time: 0.528, data: 0.000) G_L1: 15.437 G_L1_ABSOLUTE: 3.343 G_L1_RELATIVE: 12.094 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 12, iters: 157728, time: 0.525, data: 0.000) G_L1: 20.543 G_L1_ABSOLUTE: 2.935 G_L1_RELATIVE: 17.608 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 12, iters: 159728, time: 0.530, data: 0.000) G_L1: 14.562 G_L1_ABSOLUTE: 2.534 G_L1_RELATIVE: 12.028 G_Regularizer: 0.000 validation_error: 20.600 +(epoch: 12, iters: 161728, time: 0.528, data: 0.000) G_L1: 12.041 G_L1_ABSOLUTE: 2.456 G_L1_RELATIVE: 9.585 G_Regularizer: 0.000 validation_error: 20.802 +(epoch: 12, iters: 163728, time: 0.529, data: 0.000) G_L1: 14.755 G_L1_ABSOLUTE: 3.024 G_L1_RELATIVE: 11.731 G_Regularizer: 0.000 validation_error: 20.393 +(epoch: 12, iters: 165728, time: 0.542, data: 0.000) G_L1: 12.314 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 9.943 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 12, iters: 167728, time: 0.578, data: 0.000) G_L1: 16.751 G_L1_ABSOLUTE: 2.829 G_L1_RELATIVE: 13.922 G_Regularizer: 0.000 validation_error: 21.163 +(epoch: 12, iters: 169728, time: 0.586, data: 0.000) G_L1: 15.225 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 12.697 G_Regularizer: 0.000 validation_error: 20.814 +(epoch: 12, iters: 171728, time: 0.577, data: 0.000) G_L1: 13.867 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 11.210 G_Regularizer: 0.000 validation_error: 20.297 +(epoch: 12, iters: 173728, time: 0.571, data: 0.000) G_L1: 15.300 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 12.475 G_Regularizer: 0.000 validation_error: 20.375 +(epoch: 12, iters: 175728, time: 0.570, data: 0.000) G_L1: 21.425 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 18.890 G_Regularizer: 0.000 validation_error: 20.098 +(epoch: 12, iters: 177728, time: 0.584, data: 0.000) G_L1: 13.583 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 10.974 G_Regularizer: 0.000 validation_error: 20.568 +(epoch: 12, iters: 179728, time: 0.580, data: 0.000) G_L1: 14.473 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 12.115 G_Regularizer: 0.000 validation_error: 20.723 +(epoch: 12, iters: 181728, time: 0.587, data: 0.000) G_L1: 16.857 G_L1_ABSOLUTE: 2.949 G_L1_RELATIVE: 13.908 G_Regularizer: 0.000 validation_error: 20.352 +(epoch: 12, iters: 183728, time: 0.586, data: 0.000) G_L1: 11.880 G_L1_ABSOLUTE: 2.180 G_L1_RELATIVE: 9.700 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 12, iters: 185728, time: 0.527, data: 0.000) G_L1: 16.344 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 13.850 G_Regularizer: 0.000 validation_error: 20.724 +(epoch: 12, iters: 187728, time: 0.526, data: 0.000) G_L1: 13.986 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 11.522 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 12, iters: 189728, time: 0.525, data: 0.000) G_L1: 17.501 G_L1_ABSOLUTE: 3.148 G_L1_RELATIVE: 14.353 G_Regularizer: 0.000 validation_error: 21.521 +(epoch: 12, iters: 191728, time: 0.529, data: 0.000) G_L1: 13.334 G_L1_ABSOLUTE: 2.828 G_L1_RELATIVE: 10.505 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 12, iters: 193728, time: 0.526, data: 0.000) G_L1: 13.050 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 10.522 G_Regularizer: 0.000 validation_error: 20.246 +(epoch: 12, iters: 195728, time: 0.527, data: 0.000) G_L1: 15.878 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 13.538 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 12, iters: 197728, time: 0.527, data: 0.000) G_L1: 14.032 G_L1_ABSOLUTE: 2.179 G_L1_RELATIVE: 11.852 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 12, iters: 199728, time: 0.528, data: 0.000) G_L1: 15.836 G_L1_ABSOLUTE: 2.838 G_L1_RELATIVE: 12.998 G_Regularizer: 0.000 validation_error: 21.012 +(epoch: 12, iters: 201728, time: 0.526, data: 0.000) G_L1: 16.718 G_L1_ABSOLUTE: 2.281 G_L1_RELATIVE: 14.438 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 12, iters: 203728, time: 0.533, data: 0.000) G_L1: 17.392 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 14.640 G_Regularizer: 0.000 validation_error: 20.563 +(epoch: 12, iters: 205728, time: 0.529, data: 0.001) G_L1: 15.247 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 12.478 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 12, iters: 207728, time: 0.529, data: 0.000) G_L1: 15.448 G_L1_ABSOLUTE: 2.872 G_L1_RELATIVE: 12.576 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 12, iters: 209728, time: 0.537, data: 0.000) G_L1: 14.785 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 12.171 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 12, iters: 211728, time: 0.524, data: 0.000) G_L1: 17.222 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 14.498 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 12, iters: 213728, time: 0.532, data: 0.000) G_L1: 14.576 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 11.980 G_Regularizer: 0.000 validation_error: 19.973 +(epoch: 12, iters: 215728, time: 0.533, data: 0.000) G_L1: 16.782 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 14.147 G_Regularizer: 0.000 validation_error: 19.712 +(epoch: 12, iters: 217728, time: 0.528, data: 0.000) G_L1: 13.155 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 10.869 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 12, iters: 219728, time: 0.528, data: 0.000) G_L1: 14.675 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 12.107 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 12, iters: 221728, time: 0.527, data: 0.000) G_L1: 15.247 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 12.463 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 12, iters: 223728, time: 0.541, data: 0.000) G_L1: 15.439 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 13.085 G_Regularizer: 0.000 validation_error: 20.293 +(epoch: 12, iters: 225728, time: 0.544, data: 0.000) G_L1: 15.914 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 13.582 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 12, iters: 227728, time: 0.547, data: 0.000) G_L1: 14.425 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 11.766 G_Regularizer: 0.000 validation_error: 21.276 +(epoch: 12, iters: 229728, time: 0.555, data: 0.000) G_L1: 15.796 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 12.919 G_Regularizer: 0.000 validation_error: 20.359 +(epoch: 12, iters: 231728, time: 0.527, data: 0.000) G_L1: 13.090 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 10.638 G_Regularizer: 0.000 validation_error: 19.841 +(epoch: 12, iters: 233728, time: 0.535, data: 0.000) G_L1: 14.464 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 11.656 G_Regularizer: 0.000 validation_error: 21.254 +(epoch: 12, iters: 235728, time: 0.554, data: 0.000) G_L1: 16.927 G_L1_ABSOLUTE: 2.803 G_L1_RELATIVE: 14.124 G_Regularizer: 0.000 validation_error: 20.020 +(epoch: 12, iters: 237728, time: 0.551, data: 0.000) G_L1: 18.946 G_L1_ABSOLUTE: 3.049 G_L1_RELATIVE: 15.897 G_Regularizer: 0.000 validation_error: 19.887 +(epoch: 12, iters: 239728, time: 0.544, data: 0.000) G_L1: 14.554 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 12.121 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 12, iters: 241728, time: 0.550, data: 0.000) G_L1: 14.955 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 12.482 G_Regularizer: 0.000 validation_error: 20.197 +(epoch: 12, iters: 243728, time: 0.556, data: 0.000) G_L1: 15.971 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 12.982 G_Regularizer: 0.000 validation_error: 20.488 +(epoch: 12, iters: 245728, time: 0.554, data: 0.000) G_L1: 17.786 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 14.951 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 12, iters: 247728, time: 0.559, data: 0.000) G_L1: 15.590 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 12.875 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 12, iters: 249728, time: 0.530, data: 0.001) G_L1: 15.817 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 13.144 G_Regularizer: 0.000 validation_error: 20.531 +(epoch: 12, iters: 251728, time: 0.545, data: 0.000) G_L1: 17.129 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 14.351 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 12, iters: 253728, time: 0.562, data: 0.000) G_L1: 15.595 G_L1_ABSOLUTE: 3.490 G_L1_RELATIVE: 12.105 G_Regularizer: 0.000 validation_error: 20.769 +(epoch: 12, iters: 255728, time: 0.554, data: 0.000) G_L1: 16.554 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 13.888 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 12, iters: 257728, time: 0.530, data: 0.000) G_L1: 14.065 G_L1_ABSOLUTE: 2.874 G_L1_RELATIVE: 11.191 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 12, iters: 259728, time: 0.559, data: 0.000) G_L1: 14.618 G_L1_ABSOLUTE: 2.899 G_L1_RELATIVE: 11.719 G_Regularizer: 0.000 validation_error: 21.422 +(epoch: 12, iters: 261728, time: 0.565, data: 0.000) G_L1: 14.307 G_L1_ABSOLUTE: 3.278 G_L1_RELATIVE: 11.029 G_Regularizer: 0.000 validation_error: 21.353 +(epoch: 12, iters: 263728, time: 0.556, data: 0.000) G_L1: 12.485 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 10.167 G_Regularizer: 0.000 validation_error: 20.369 +(epoch: 12, iters: 265728, time: 0.526, data: 0.000) G_L1: 14.852 G_L1_ABSOLUTE: 2.833 G_L1_RELATIVE: 12.019 G_Regularizer: 0.000 validation_error: 20.661 +(epoch: 12, iters: 267728, time: 0.532, data: 0.000) G_L1: 14.449 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 11.773 G_Regularizer: 0.000 validation_error: 20.399 +(epoch: 12, iters: 269728, time: 0.538, data: 0.000) G_L1: 14.787 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 12.365 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 12, iters: 271728, time: 0.550, data: 0.000) G_L1: 14.876 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 12.140 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 12, iters: 273728, time: 0.543, data: 0.001) G_L1: 14.125 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 11.704 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 12, iters: 275728, time: 0.542, data: 0.000) G_L1: 16.684 G_L1_ABSOLUTE: 3.195 G_L1_RELATIVE: 13.489 G_Regularizer: 0.000 validation_error: 20.960 +(epoch: 12, iters: 277728, time: 0.561, data: 0.000) G_L1: 16.461 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 13.737 G_Regularizer: 0.000 validation_error: 21.175 +(epoch: 12, iters: 279728, time: 0.537, data: 0.000) G_L1: 17.806 G_L1_ABSOLUTE: 2.990 G_L1_RELATIVE: 14.816 G_Regularizer: 0.000 validation_error: 21.170 +(epoch: 12, iters: 281728, time: 0.537, data: 0.000) G_L1: 14.342 G_L1_ABSOLUTE: 2.961 G_L1_RELATIVE: 11.381 G_Regularizer: 0.000 validation_error: 20.239 +(epoch: 12, iters: 283728, time: 0.532, data: 0.000) G_L1: 18.945 G_L1_ABSOLUTE: 2.579 G_L1_RELATIVE: 16.366 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 12, iters: 285728, time: 0.531, data: 0.000) G_L1: 12.938 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 10.281 G_Regularizer: 0.000 validation_error: 21.294 +(epoch: 12, iters: 287728, time: 0.547, data: 0.000) G_L1: 17.512 G_L1_ABSOLUTE: 2.974 G_L1_RELATIVE: 14.537 G_Regularizer: 0.000 validation_error: 20.894 +(epoch: 12, iters: 289728, time: 0.545, data: 0.000) G_L1: 16.963 G_L1_ABSOLUTE: 3.036 G_L1_RELATIVE: 13.927 G_Regularizer: 0.000 validation_error: 22.002 +(epoch: 12, iters: 291728, time: 0.540, data: 0.000) G_L1: 15.221 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 12.696 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 12, iters: 293728, time: 0.555, data: 0.001) G_L1: 14.782 G_L1_ABSOLUTE: 3.108 G_L1_RELATIVE: 11.674 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 12, iters: 295728, time: 0.554, data: 0.000) G_L1: 14.291 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 11.792 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 12, iters: 297728, time: 0.558, data: 0.000) G_L1: 17.145 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 14.634 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 12, iters: 299728, time: 0.549, data: 0.000) G_L1: 17.186 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 14.316 G_Regularizer: 0.000 validation_error: 20.579 +(epoch: 12, iters: 301728, time: 0.561, data: 0.000) G_L1: 13.919 G_L1_ABSOLUTE: 2.747 G_L1_RELATIVE: 11.172 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 13, iters: 976, time: 0.544, data: 0.000) G_L1: 16.858 G_L1_ABSOLUTE: 3.082 G_L1_RELATIVE: 13.777 G_Regularizer: 0.000 validation_error: 20.114 +(epoch: 13, iters: 2976, time: 0.562, data: 0.000) G_L1: 15.040 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 12.487 G_Regularizer: 0.000 validation_error: 20.042 +(epoch: 13, iters: 4976, time: 0.540, data: 0.000) G_L1: 15.250 G_L1_ABSOLUTE: 3.119 G_L1_RELATIVE: 12.131 G_Regularizer: 0.000 validation_error: 21.293 +(epoch: 13, iters: 6976, time: 0.554, data: 0.000) G_L1: 16.756 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 14.099 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 13, iters: 8976, time: 0.549, data: 0.000) G_L1: 15.237 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 12.647 G_Regularizer: 0.000 validation_error: 20.301 +(epoch: 13, iters: 10976, time: 0.550, data: 0.000) G_L1: 19.038 G_L1_ABSOLUTE: 3.167 G_L1_RELATIVE: 15.871 G_Regularizer: 0.000 validation_error: 20.470 +(epoch: 13, iters: 12976, time: 0.541, data: 0.000) G_L1: 17.155 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 14.604 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 13, iters: 14976, time: 0.549, data: 0.000) G_L1: 13.155 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 10.992 G_Regularizer: 0.000 validation_error: 20.383 +(epoch: 13, iters: 16976, time: 0.554, data: 0.000) G_L1: 12.324 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 9.879 G_Regularizer: 0.000 validation_error: 20.944 +(epoch: 13, iters: 18976, time: 0.555, data: 0.000) G_L1: 13.218 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 10.766 G_Regularizer: 0.000 validation_error: 21.233 +(epoch: 13, iters: 20976, time: 0.548, data: 0.000) G_L1: 15.478 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 12.588 G_Regularizer: 0.000 validation_error: 20.501 +(epoch: 13, iters: 22976, time: 0.556, data: 0.000) G_L1: 15.290 G_L1_ABSOLUTE: 3.475 G_L1_RELATIVE: 11.815 G_Regularizer: 0.000 validation_error: 21.195 +(epoch: 13, iters: 24976, time: 0.561, data: 0.000) G_L1: 13.541 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 10.744 G_Regularizer: 0.000 validation_error: 20.109 +(epoch: 13, iters: 26976, time: 0.558, data: 0.000) G_L1: 13.602 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 11.129 G_Regularizer: 0.000 validation_error: 20.068 +(epoch: 13, iters: 28976, time: 0.526, data: 0.000) G_L1: 17.087 G_L1_ABSOLUTE: 2.764 G_L1_RELATIVE: 14.323 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 13, iters: 30976, time: 0.538, data: 0.000) G_L1: 15.520 G_L1_ABSOLUTE: 3.129 G_L1_RELATIVE: 12.391 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 13, iters: 32976, time: 0.556, data: 0.000) G_L1: 17.732 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 14.986 G_Regularizer: 0.000 validation_error: 21.526 +(epoch: 13, iters: 34976, time: 0.554, data: 0.000) G_L1: 14.897 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 12.477 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 13, iters: 36976, time: 0.564, data: 0.000) G_L1: 16.279 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 13.509 G_Regularizer: 0.000 validation_error: 21.424 +(epoch: 13, iters: 38976, time: 0.550, data: 0.001) G_L1: 16.573 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 13.397 G_Regularizer: 0.000 validation_error: 20.671 +(epoch: 13, iters: 40976, time: 0.549, data: 0.000) G_L1: 15.057 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 12.468 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 13, iters: 42976, time: 0.556, data: 0.000) G_L1: 15.123 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 12.377 G_Regularizer: 0.000 validation_error: 20.620 +(epoch: 13, iters: 44976, time: 0.556, data: 0.000) G_L1: 16.213 G_L1_ABSOLUTE: 2.662 G_L1_RELATIVE: 13.551 G_Regularizer: 0.000 validation_error: 20.859 +(epoch: 13, iters: 46976, time: 0.538, data: 0.000) G_L1: 12.090 G_L1_ABSOLUTE: 2.532 G_L1_RELATIVE: 9.558 G_Regularizer: 0.000 validation_error: 20.435 +(epoch: 13, iters: 48976, time: 0.531, data: 0.000) G_L1: 15.107 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 12.526 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 13, iters: 50976, time: 0.547, data: 0.000) G_L1: 16.637 G_L1_ABSOLUTE: 2.963 G_L1_RELATIVE: 13.675 G_Regularizer: 0.000 validation_error: 20.904 +(epoch: 13, iters: 52976, time: 0.551, data: 0.000) G_L1: 14.541 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 12.243 G_Regularizer: 0.000 validation_error: 20.662 +(epoch: 13, iters: 54976, time: 0.544, data: 0.001) G_L1: 16.875 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 14.427 G_Regularizer: 0.000 validation_error: 20.852 +(epoch: 13, iters: 56976, time: 0.537, data: 0.000) G_L1: 18.710 G_L1_ABSOLUTE: 3.124 G_L1_RELATIVE: 15.586 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 13, iters: 58976, time: 0.552, data: 0.000) G_L1: 17.448 G_L1_ABSOLUTE: 3.101 G_L1_RELATIVE: 14.347 G_Regularizer: 0.000 validation_error: 21.337 +(epoch: 13, iters: 60976, time: 0.555, data: 0.000) G_L1: 13.407 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 10.996 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 13, iters: 62976, time: 0.550, data: 0.000) G_L1: 16.745 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 14.161 G_Regularizer: 0.000 validation_error: 20.230 +(epoch: 13, iters: 64976, time: 0.540, data: 0.000) G_L1: 14.258 G_L1_ABSOLUTE: 2.364 G_L1_RELATIVE: 11.894 G_Regularizer: 0.000 validation_error: 20.502 +(epoch: 13, iters: 66976, time: 0.530, data: 0.000) G_L1: 15.699 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 12.878 G_Regularizer: 0.000 validation_error: 20.006 +(epoch: 13, iters: 68976, time: 0.545, data: 0.000) G_L1: 13.681 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 11.241 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 13, iters: 70976, time: 0.539, data: 0.000) G_L1: 14.305 G_L1_ABSOLUTE: 2.851 G_L1_RELATIVE: 11.454 G_Regularizer: 0.000 validation_error: 20.435 +(epoch: 13, iters: 72976, time: 0.542, data: 0.000) G_L1: 13.469 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 11.050 G_Regularizer: 0.000 validation_error: 20.148 +(epoch: 13, iters: 74976, time: 0.543, data: 0.000) G_L1: 16.010 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 13.675 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 13, iters: 76976, time: 0.549, data: 0.000) G_L1: 14.021 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 11.366 G_Regularizer: 0.000 validation_error: 19.921 +(epoch: 13, iters: 78976, time: 0.554, data: 0.000) G_L1: 16.178 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 13.814 G_Regularizer: 0.000 validation_error: 20.473 +(epoch: 13, iters: 80976, time: 0.540, data: 0.000) G_L1: 14.161 G_L1_ABSOLUTE: 2.135 G_L1_RELATIVE: 12.027 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 13, iters: 82976, time: 0.539, data: 0.000) G_L1: 14.157 G_L1_ABSOLUTE: 2.843 G_L1_RELATIVE: 11.314 G_Regularizer: 0.000 validation_error: 20.412 +(epoch: 13, iters: 84976, time: 0.558, data: 0.000) G_L1: 16.261 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 13.602 G_Regularizer: 0.000 validation_error: 20.473 +(epoch: 13, iters: 86976, time: 0.555, data: 0.001) G_L1: 18.562 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 16.230 G_Regularizer: 0.000 validation_error: 21.262 +(epoch: 13, iters: 88976, time: 0.531, data: 0.000) G_L1: 13.676 G_L1_ABSOLUTE: 2.299 G_L1_RELATIVE: 11.377 G_Regularizer: 0.000 validation_error: 20.505 +(epoch: 13, iters: 90976, time: 0.552, data: 0.000) G_L1: 13.200 G_L1_ABSOLUTE: 2.987 G_L1_RELATIVE: 10.213 G_Regularizer: 0.000 validation_error: 20.622 +(epoch: 13, iters: 92976, time: 0.553, data: 0.000) G_L1: 14.376 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 11.601 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 13, iters: 94976, time: 0.554, data: 0.000) G_L1: 14.770 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 12.208 G_Regularizer: 0.000 validation_error: 20.313 +(epoch: 13, iters: 96976, time: 0.540, data: 0.000) G_L1: 15.290 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 12.922 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 13, iters: 98976, time: 0.535, data: 0.000) G_L1: 14.612 G_L1_ABSOLUTE: 2.939 G_L1_RELATIVE: 11.673 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 13, iters: 100976, time: 0.553, data: 0.000) G_L1: 17.365 G_L1_ABSOLUTE: 3.105 G_L1_RELATIVE: 14.260 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 13, iters: 102976, time: 0.551, data: 0.000) G_L1: 15.983 G_L1_ABSOLUTE: 3.114 G_L1_RELATIVE: 12.868 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 13, iters: 104976, time: 0.540, data: 0.000) G_L1: 12.562 G_L1_ABSOLUTE: 2.594 G_L1_RELATIVE: 9.968 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 13, iters: 106976, time: 0.550, data: 0.000) G_L1: 14.977 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 12.093 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 13, iters: 108976, time: 0.561, data: 0.001) G_L1: 15.194 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 12.597 G_Regularizer: 0.000 validation_error: 20.220 +(epoch: 13, iters: 110976, time: 0.556, data: 0.000) G_L1: 15.224 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 12.698 G_Regularizer: 0.000 validation_error: 20.510 +(epoch: 13, iters: 112976, time: 0.554, data: 0.000) G_L1: 12.690 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 10.277 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 13, iters: 114976, time: 0.542, data: 0.000) G_L1: 19.616 G_L1_ABSOLUTE: 3.087 G_L1_RELATIVE: 16.529 G_Regularizer: 0.000 validation_error: 21.171 +(epoch: 13, iters: 116976, time: 0.553, data: 0.001) G_L1: 12.822 G_L1_ABSOLUTE: 2.315 G_L1_RELATIVE: 10.508 G_Regularizer: 0.000 validation_error: 20.274 +(epoch: 13, iters: 118976, time: 0.563, data: 0.000) G_L1: 17.822 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 15.141 G_Regularizer: 0.000 validation_error: 22.026 +(epoch: 13, iters: 120976, time: 0.562, data: 0.000) G_L1: 13.213 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 10.664 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 13, iters: 122976, time: 0.529, data: 0.000) G_L1: 15.571 G_L1_ABSOLUTE: 3.059 G_L1_RELATIVE: 12.512 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 13, iters: 124976, time: 0.533, data: 0.000) G_L1: 18.208 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 15.800 G_Regularizer: 0.000 validation_error: 21.545 +(epoch: 13, iters: 126976, time: 0.544, data: 0.000) G_L1: 14.760 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 12.172 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 13, iters: 128976, time: 0.562, data: 0.000) G_L1: 15.249 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.738 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 13, iters: 130976, time: 0.531, data: 0.000) G_L1: 15.656 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 13.063 G_Regularizer: 0.000 validation_error: 19.508 +(epoch: 13, iters: 132976, time: 0.543, data: 0.000) G_L1: 13.916 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 11.407 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 13, iters: 134976, time: 0.555, data: 0.000) G_L1: 13.957 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 11.460 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 13, iters: 136976, time: 0.554, data: 0.000) G_L1: 15.795 G_L1_ABSOLUTE: 2.290 G_L1_RELATIVE: 13.505 G_Regularizer: 0.000 validation_error: 20.251 +(epoch: 13, iters: 138976, time: 0.547, data: 0.000) G_L1: 14.870 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 12.327 G_Regularizer: 0.000 validation_error: 20.366 +(epoch: 13, iters: 140976, time: 0.550, data: 0.000) G_L1: 18.950 G_L1_ABSOLUTE: 3.091 G_L1_RELATIVE: 15.859 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 13, iters: 142976, time: 0.561, data: 0.000) G_L1: 15.685 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 13.168 G_Regularizer: 0.000 validation_error: 20.146 +(epoch: 13, iters: 144976, time: 0.552, data: 0.000) G_L1: 12.027 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 9.689 G_Regularizer: 0.000 validation_error: 20.240 +(epoch: 13, iters: 146976, time: 0.542, data: 0.000) G_L1: 15.488 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 12.819 G_Regularizer: 0.000 validation_error: 20.413 +(epoch: 13, iters: 148976, time: 0.560, data: 0.001) G_L1: 17.268 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 14.312 G_Regularizer: 0.000 validation_error: 20.271 +(epoch: 13, iters: 150976, time: 0.558, data: 0.000) G_L1: 16.290 G_L1_ABSOLUTE: 2.804 G_L1_RELATIVE: 13.486 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 13, iters: 152976, time: 0.563, data: 0.000) G_L1: 15.585 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 12.657 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 13, iters: 154976, time: 0.548, data: 0.000) G_L1: 15.288 G_L1_ABSOLUTE: 3.083 G_L1_RELATIVE: 12.205 G_Regularizer: 0.000 validation_error: 21.066 +(epoch: 13, iters: 156976, time: 0.535, data: 0.000) G_L1: 15.031 G_L1_ABSOLUTE: 2.307 G_L1_RELATIVE: 12.724 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 13, iters: 158976, time: 0.560, data: 0.000) G_L1: 13.922 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 11.220 G_Regularizer: 0.000 validation_error: 20.862 +(epoch: 13, iters: 160976, time: 0.558, data: 0.000) G_L1: 14.386 G_L1_ABSOLUTE: 2.712 G_L1_RELATIVE: 11.674 G_Regularizer: 0.000 validation_error: 20.471 +(epoch: 13, iters: 162976, time: 0.553, data: 0.000) G_L1: 15.780 G_L1_ABSOLUTE: 2.932 G_L1_RELATIVE: 12.849 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 13, iters: 164976, time: 0.541, data: 0.000) G_L1: 17.217 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 14.186 G_Regularizer: 0.000 validation_error: 21.197 +(epoch: 13, iters: 166976, time: 0.532, data: 0.000) G_L1: 14.763 G_L1_ABSOLUTE: 2.820 G_L1_RELATIVE: 11.943 G_Regularizer: 0.000 validation_error: 20.441 +(epoch: 13, iters: 168976, time: 0.542, data: 0.000) G_L1: 12.840 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 10.523 G_Regularizer: 0.000 validation_error: 21.131 +(epoch: 13, iters: 170976, time: 0.532, data: 0.000) G_L1: 12.160 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 9.483 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 13, iters: 172976, time: 0.527, data: 0.000) G_L1: 15.002 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 12.044 G_Regularizer: 0.000 validation_error: 20.702 +(epoch: 13, iters: 174976, time: 0.544, data: 0.000) G_L1: 17.234 G_L1_ABSOLUTE: 2.703 G_L1_RELATIVE: 14.532 G_Regularizer: 0.000 validation_error: 20.904 +(epoch: 13, iters: 176976, time: 0.553, data: 0.000) G_L1: 15.465 G_L1_ABSOLUTE: 3.074 G_L1_RELATIVE: 12.391 G_Regularizer: 0.000 validation_error: 20.205 +(epoch: 13, iters: 178976, time: 0.558, data: 0.000) G_L1: 20.354 G_L1_ABSOLUTE: 3.295 G_L1_RELATIVE: 17.059 G_Regularizer: 0.000 validation_error: 20.416 +(epoch: 13, iters: 180976, time: 0.540, data: 0.000) G_L1: 13.678 G_L1_ABSOLUTE: 2.213 G_L1_RELATIVE: 11.465 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 13, iters: 182976, time: 0.548, data: 0.000) G_L1: 14.003 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 11.377 G_Regularizer: 0.000 validation_error: 20.396 +(epoch: 13, iters: 184976, time: 0.560, data: 0.000) G_L1: 13.682 G_L1_ABSOLUTE: 2.300 G_L1_RELATIVE: 11.382 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 13, iters: 186976, time: 0.558, data: 0.000) G_L1: 15.921 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 13.459 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 13, iters: 188976, time: 0.533, data: 0.000) G_L1: 14.549 G_L1_ABSOLUTE: 3.106 G_L1_RELATIVE: 11.443 G_Regularizer: 0.000 validation_error: 21.312 +(epoch: 13, iters: 190976, time: 0.549, data: 0.000) G_L1: 19.071 G_L1_ABSOLUTE: 3.293 G_L1_RELATIVE: 15.778 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 13, iters: 192976, time: 0.558, data: 0.000) G_L1: 12.730 G_L1_ABSOLUTE: 2.217 G_L1_RELATIVE: 10.513 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 13, iters: 194976, time: 0.563, data: 0.000) G_L1: 15.409 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 13.040 G_Regularizer: 0.000 validation_error: 20.395 +(epoch: 13, iters: 196976, time: 0.551, data: 0.000) G_L1: 11.787 G_L1_ABSOLUTE: 2.161 G_L1_RELATIVE: 9.626 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 13, iters: 198976, time: 0.554, data: 0.000) G_L1: 15.777 G_L1_ABSOLUTE: 3.023 G_L1_RELATIVE: 12.754 G_Regularizer: 0.000 validation_error: 20.496 +(epoch: 13, iters: 200976, time: 0.558, data: 0.000) G_L1: 15.247 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 12.430 G_Regularizer: 0.000 validation_error: 21.662 +(epoch: 13, iters: 202976, time: 0.555, data: 0.000) G_L1: 19.820 G_L1_ABSOLUTE: 3.537 G_L1_RELATIVE: 16.283 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 13, iters: 204976, time: 0.552, data: 0.001) G_L1: 16.276 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 13.570 G_Regularizer: 0.000 validation_error: 20.453 +(epoch: 13, iters: 206976, time: 0.535, data: 0.000) G_L1: 14.404 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 11.974 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 13, iters: 208976, time: 0.528, data: 0.000) G_L1: 15.424 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 12.644 G_Regularizer: 0.000 validation_error: 20.246 +(epoch: 13, iters: 210976, time: 0.537, data: 0.001) G_L1: 13.346 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 10.489 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 13, iters: 212976, time: 0.559, data: 0.000) G_L1: 12.006 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 9.497 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 13, iters: 214976, time: 0.530, data: 0.000) G_L1: 14.671 G_L1_ABSOLUTE: 2.196 G_L1_RELATIVE: 12.475 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 13, iters: 216976, time: 0.553, data: 0.000) G_L1: 17.917 G_L1_ABSOLUTE: 3.110 G_L1_RELATIVE: 14.807 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 13, iters: 218976, time: 0.547, data: 0.000) G_L1: 15.271 G_L1_ABSOLUTE: 3.704 G_L1_RELATIVE: 11.568 G_Regularizer: 0.000 validation_error: 20.354 +(epoch: 13, iters: 220976, time: 0.561, data: 0.000) G_L1: 14.334 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 11.999 G_Regularizer: 0.000 validation_error: 21.103 +(epoch: 13, iters: 222976, time: 0.535, data: 0.000) G_L1: 15.187 G_L1_ABSOLUTE: 3.125 G_L1_RELATIVE: 12.062 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 13, iters: 224976, time: 0.559, data: 0.000) G_L1: 16.925 G_L1_ABSOLUTE: 3.393 G_L1_RELATIVE: 13.532 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 13, iters: 226976, time: 0.553, data: 0.000) G_L1: 15.152 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 12.443 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 13, iters: 228976, time: 0.551, data: 0.000) G_L1: 14.851 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 12.040 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 13, iters: 230976, time: 0.540, data: 0.000) G_L1: 19.207 G_L1_ABSOLUTE: 3.127 G_L1_RELATIVE: 16.080 G_Regularizer: 0.000 validation_error: 20.479 +(epoch: 13, iters: 232976, time: 0.544, data: 0.000) G_L1: 15.016 G_L1_ABSOLUTE: 2.878 G_L1_RELATIVE: 12.138 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 13, iters: 234976, time: 0.552, data: 0.001) G_L1: 14.377 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 11.543 G_Regularizer: 0.000 validation_error: 21.381 +(epoch: 13, iters: 236976, time: 0.552, data: 0.000) G_L1: 14.846 G_L1_ABSOLUTE: 2.729 G_L1_RELATIVE: 12.117 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 13, iters: 238976, time: 0.559, data: 0.000) G_L1: 15.752 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 13.303 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 13, iters: 240976, time: 0.552, data: 0.000) G_L1: 16.469 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 13.438 G_Regularizer: 0.000 validation_error: 20.025 +(epoch: 13, iters: 242976, time: 0.550, data: 0.000) G_L1: 15.704 G_L1_ABSOLUTE: 2.309 G_L1_RELATIVE: 13.395 G_Regularizer: 0.000 validation_error: 20.190 +(epoch: 13, iters: 244976, time: 0.555, data: 0.000) G_L1: 14.215 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 11.757 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 13, iters: 246976, time: 0.560, data: 0.000) G_L1: 14.174 G_L1_ABSOLUTE: 3.193 G_L1_RELATIVE: 10.982 G_Regularizer: 0.000 validation_error: 20.322 +(epoch: 13, iters: 248976, time: 0.542, data: 0.000) G_L1: 14.630 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 11.641 G_Regularizer: 0.000 validation_error: 20.240 +(epoch: 13, iters: 250976, time: 0.547, data: 0.001) G_L1: 16.934 G_L1_ABSOLUTE: 2.794 G_L1_RELATIVE: 14.139 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 13, iters: 252976, time: 0.555, data: 0.000) G_L1: 17.732 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 15.401 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 13, iters: 254976, time: 0.565, data: 0.000) G_L1: 16.282 G_L1_ABSOLUTE: 2.964 G_L1_RELATIVE: 13.317 G_Regularizer: 0.000 validation_error: 20.568 +(epoch: 13, iters: 256976, time: 0.535, data: 0.000) G_L1: 15.360 G_L1_ABSOLUTE: 3.134 G_L1_RELATIVE: 12.226 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 13, iters: 258976, time: 0.531, data: 0.000) G_L1: 16.790 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 14.153 G_Regularizer: 0.000 validation_error: 20.742 +(epoch: 13, iters: 260976, time: 0.558, data: 0.001) G_L1: 13.683 G_L1_ABSOLUTE: 2.579 G_L1_RELATIVE: 11.105 G_Regularizer: 0.000 validation_error: 20.139 +(epoch: 13, iters: 262976, time: 0.557, data: 0.000) G_L1: 17.135 G_L1_ABSOLUTE: 2.128 G_L1_RELATIVE: 15.007 G_Regularizer: 0.000 validation_error: 20.347 +(epoch: 13, iters: 264976, time: 0.543, data: 0.000) G_L1: 18.879 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 16.203 G_Regularizer: 0.000 validation_error: 21.254 +(epoch: 13, iters: 266976, time: 0.534, data: 0.000) G_L1: 14.312 G_L1_ABSOLUTE: 2.873 G_L1_RELATIVE: 11.438 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 13, iters: 268976, time: 0.547, data: 0.001) G_L1: 17.528 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 14.792 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 13, iters: 270976, time: 0.557, data: 0.000) G_L1: 15.771 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 13.170 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 13, iters: 272976, time: 0.548, data: 0.000) G_L1: 17.449 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 14.829 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 13, iters: 274976, time: 0.552, data: 0.000) G_L1: 16.237 G_L1_ABSOLUTE: 2.641 G_L1_RELATIVE: 13.596 G_Regularizer: 0.000 validation_error: 20.163 +(epoch: 13, iters: 276976, time: 0.562, data: 0.000) G_L1: 16.604 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 13.963 G_Regularizer: 0.000 validation_error: 20.237 +(epoch: 13, iters: 278976, time: 0.549, data: 0.000) G_L1: 16.955 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 14.341 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 13, iters: 280976, time: 0.542, data: 0.000) G_L1: 16.656 G_L1_ABSOLUTE: 2.816 G_L1_RELATIVE: 13.839 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 13, iters: 282976, time: 0.533, data: 0.000) G_L1: 15.946 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 13.389 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 13, iters: 284976, time: 0.557, data: 0.000) G_L1: 16.758 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 14.129 G_Regularizer: 0.000 validation_error: 20.746 +(epoch: 13, iters: 286976, time: 0.549, data: 0.000) G_L1: 15.261 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 12.630 G_Regularizer: 0.000 validation_error: 21.089 +(epoch: 13, iters: 288976, time: 0.563, data: 0.000) G_L1: 13.659 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 13, iters: 290976, time: 0.531, data: 0.000) G_L1: 13.772 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.171 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 13, iters: 292976, time: 0.549, data: 0.000) G_L1: 12.840 G_L1_ABSOLUTE: 2.326 G_L1_RELATIVE: 10.514 G_Regularizer: 0.000 validation_error: 20.628 +(epoch: 13, iters: 294976, time: 0.554, data: 0.000) G_L1: 15.227 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 12.543 G_Regularizer: 0.000 validation_error: 21.275 +(epoch: 13, iters: 296976, time: 0.553, data: 0.000) G_L1: 13.760 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 11.132 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 13, iters: 298976, time: 0.532, data: 0.001) G_L1: 13.696 G_L1_ABSOLUTE: 2.295 G_L1_RELATIVE: 11.401 G_Regularizer: 0.000 validation_error: 20.447 +(epoch: 13, iters: 300976, time: 0.538, data: 0.000) G_L1: 17.344 G_L1_ABSOLUTE: 3.016 G_L1_RELATIVE: 14.327 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 14, iters: 224, time: 0.532, data: 0.000) G_L1: 13.684 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 11.173 G_Regularizer: 0.000 validation_error: 20.417 +(epoch: 14, iters: 2224, time: 0.561, data: 0.000) G_L1: 14.627 G_L1_ABSOLUTE: 2.888 G_L1_RELATIVE: 11.739 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 14, iters: 4224, time: 0.542, data: 0.000) G_L1: 13.497 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 11.154 G_Regularizer: 0.000 validation_error: 20.458 +(epoch: 14, iters: 6224, time: 0.535, data: 0.000) G_L1: 13.816 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 11.157 G_Regularizer: 0.000 validation_error: 20.100 +(epoch: 14, iters: 8224, time: 0.558, data: 0.000) G_L1: 15.192 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 12.542 G_Regularizer: 0.000 validation_error: 21.564 +(epoch: 14, iters: 10224, time: 0.551, data: 0.000) G_L1: 15.258 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 12.640 G_Regularizer: 0.000 validation_error: 20.359 +(epoch: 14, iters: 12224, time: 0.551, data: 0.000) G_L1: 14.358 G_L1_ABSOLUTE: 2.842 G_L1_RELATIVE: 11.516 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 14, iters: 14224, time: 0.536, data: 0.000) G_L1: 13.070 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 10.595 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 14, iters: 16224, time: 0.561, data: 0.001) G_L1: 16.479 G_L1_ABSOLUTE: 2.830 G_L1_RELATIVE: 13.649 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 14, iters: 18224, time: 0.565, data: 0.000) G_L1: 16.760 G_L1_ABSOLUTE: 3.008 G_L1_RELATIVE: 13.753 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 14, iters: 20224, time: 0.548, data: 0.000) G_L1: 18.038 G_L1_ABSOLUTE: 3.077 G_L1_RELATIVE: 14.961 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 14, iters: 22224, time: 0.543, data: 0.000) G_L1: 15.946 G_L1_ABSOLUTE: 2.244 G_L1_RELATIVE: 13.703 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 14, iters: 24224, time: 0.554, data: 0.001) G_L1: 15.020 G_L1_ABSOLUTE: 2.939 G_L1_RELATIVE: 12.081 G_Regularizer: 0.000 validation_error: 21.037 +(epoch: 14, iters: 26224, time: 0.534, data: 0.000) G_L1: 14.694 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 12.000 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 14, iters: 28224, time: 0.561, data: 0.000) G_L1: 19.784 G_L1_ABSOLUTE: 3.016 G_L1_RELATIVE: 16.768 G_Regularizer: 0.000 validation_error: 20.063 +(epoch: 14, iters: 30224, time: 0.530, data: 0.000) G_L1: 14.794 G_L1_ABSOLUTE: 2.327 G_L1_RELATIVE: 12.467 G_Regularizer: 0.000 validation_error: 20.854 +(epoch: 14, iters: 32224, time: 0.549, data: 0.000) G_L1: 15.858 G_L1_ABSOLUTE: 2.976 G_L1_RELATIVE: 12.881 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 14, iters: 34224, time: 0.556, data: 0.000) G_L1: 14.560 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 11.745 G_Regularizer: 0.000 validation_error: 20.597 +(epoch: 14, iters: 36224, time: 0.559, data: 0.000) G_L1: 14.073 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 11.628 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 14, iters: 38224, time: 0.541, data: 0.000) G_L1: 18.371 G_L1_ABSOLUTE: 3.390 G_L1_RELATIVE: 14.981 G_Regularizer: 0.000 validation_error: 20.252 +(epoch: 14, iters: 40224, time: 0.562, data: 0.000) G_L1: 16.922 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 14.143 G_Regularizer: 0.000 validation_error: 21.084 +(epoch: 14, iters: 42224, time: 0.557, data: 0.000) G_L1: 16.187 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 13.634 G_Regularizer: 0.000 validation_error: 20.217 +(epoch: 14, iters: 44224, time: 0.560, data: 0.000) G_L1: 14.905 G_L1_ABSOLUTE: 3.128 G_L1_RELATIVE: 11.777 G_Regularizer: 0.000 validation_error: 21.246 +(epoch: 14, iters: 46224, time: 0.537, data: 0.000) G_L1: 30.416 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 27.830 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 14, iters: 48224, time: 0.558, data: 0.000) G_L1: 14.523 G_L1_ABSOLUTE: 2.675 G_L1_RELATIVE: 11.848 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 14, iters: 50224, time: 0.556, data: 0.000) G_L1: 13.315 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 10.974 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 14, iters: 52224, time: 0.559, data: 0.000) G_L1: 15.785 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 13.360 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 14, iters: 54224, time: 0.544, data: 0.000) G_L1: 17.110 G_L1_ABSOLUTE: 3.062 G_L1_RELATIVE: 14.048 G_Regularizer: 0.000 validation_error: 21.070 +(epoch: 14, iters: 56224, time: 0.552, data: 0.000) G_L1: 14.542 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.032 G_Regularizer: 0.000 validation_error: 21.195 +(epoch: 14, iters: 58224, time: 0.562, data: 0.000) G_L1: 15.618 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 12.885 G_Regularizer: 0.000 validation_error: 21.401 +(epoch: 14, iters: 60224, time: 0.552, data: 0.000) G_L1: 14.941 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 12.131 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 14, iters: 62224, time: 0.549, data: 0.001) G_L1: 15.085 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 12.516 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 14, iters: 64224, time: 0.532, data: 0.000) G_L1: 12.951 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 10.490 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 14, iters: 66224, time: 0.546, data: 0.000) G_L1: 15.685 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 13.121 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 14, iters: 68224, time: 0.557, data: 0.000) G_L1: 14.376 G_L1_ABSOLUTE: 2.423 G_L1_RELATIVE: 11.954 G_Regularizer: 0.000 validation_error: 21.306 +(epoch: 14, iters: 70224, time: 0.567, data: 0.000) G_L1: 14.034 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 11.328 G_Regularizer: 0.000 validation_error: 20.702 +(epoch: 14, iters: 72224, time: 0.528, data: 0.000) G_L1: 12.247 G_L1_ABSOLUTE: 2.203 G_L1_RELATIVE: 10.043 G_Regularizer: 0.000 validation_error: 20.385 +(epoch: 14, iters: 74224, time: 0.531, data: 0.000) G_L1: 15.744 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 12.923 G_Regularizer: 0.000 validation_error: 21.137 +(epoch: 14, iters: 76224, time: 0.554, data: 0.000) G_L1: 16.642 G_L1_ABSOLUTE: 3.066 G_L1_RELATIVE: 13.575 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 14, iters: 78224, time: 0.558, data: 0.000) G_L1: 15.446 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 12.712 G_Regularizer: 0.000 validation_error: 21.316 +(epoch: 14, iters: 80224, time: 0.533, data: 0.000) G_L1: 11.685 G_L1_ABSOLUTE: 2.253 G_L1_RELATIVE: 9.432 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 14, iters: 82224, time: 0.536, data: 0.000) G_L1: 14.890 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 12.479 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 14, iters: 84224, time: 0.548, data: 0.000) G_L1: 13.770 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 11.377 G_Regularizer: 0.000 validation_error: 20.631 +(epoch: 14, iters: 86224, time: 0.556, data: 0.000) G_L1: 17.046 G_L1_ABSOLUTE: 3.220 G_L1_RELATIVE: 13.827 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 14, iters: 88224, time: 0.541, data: 0.000) G_L1: 14.803 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 12.362 G_Regularizer: 0.000 validation_error: 21.492 +(epoch: 14, iters: 90224, time: 0.549, data: 0.000) G_L1: 16.064 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 13.283 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 14, iters: 92224, time: 0.545, data: 0.000) G_L1: 15.302 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 12.860 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 14, iters: 94224, time: 0.535, data: 0.000) G_L1: 15.026 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 12.252 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 14, iters: 96224, time: 0.550, data: 0.000) G_L1: 16.932 G_L1_ABSOLUTE: 2.935 G_L1_RELATIVE: 13.996 G_Regularizer: 0.000 validation_error: 20.729 +(epoch: 14, iters: 98224, time: 0.541, data: 0.000) G_L1: 16.517 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 14.254 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 14, iters: 100224, time: 0.558, data: 0.000) G_L1: 14.135 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 11.182 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 14, iters: 102224, time: 0.554, data: 0.001) G_L1: 14.996 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 12.174 G_Regularizer: 0.000 validation_error: 19.977 +(epoch: 14, iters: 104224, time: 0.562, data: 0.000) G_L1: 14.915 G_L1_ABSOLUTE: 2.490 G_L1_RELATIVE: 12.426 G_Regularizer: 0.000 validation_error: 20.161 +(epoch: 14, iters: 106224, time: 0.532, data: 0.000) G_L1: 16.644 G_L1_ABSOLUTE: 2.306 G_L1_RELATIVE: 14.338 G_Regularizer: 0.000 validation_error: 20.285 +(epoch: 14, iters: 108224, time: 0.528, data: 0.000) G_L1: 15.151 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 12.580 G_Regularizer: 0.000 validation_error: 20.085 +(epoch: 14, iters: 110224, time: 0.546, data: 0.000) G_L1: 14.887 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 12.104 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 14, iters: 112224, time: 0.547, data: 0.001) G_L1: 15.276 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 12.800 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 14, iters: 114224, time: 0.535, data: 0.000) G_L1: 13.229 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 10.711 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 14, iters: 116224, time: 0.559, data: 0.000) G_L1: 15.714 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 13.221 G_Regularizer: 0.000 validation_error: 20.663 +(epoch: 14, iters: 118224, time: 0.560, data: 0.000) G_L1: 16.992 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 14.603 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 14, iters: 120224, time: 0.556, data: 0.000) G_L1: 15.779 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 13.171 G_Regularizer: 0.000 validation_error: 20.491 +(epoch: 14, iters: 122224, time: 0.533, data: 0.001) G_L1: 14.952 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 12.320 G_Regularizer: 0.000 validation_error: 20.212 +(epoch: 14, iters: 124224, time: 0.552, data: 0.000) G_L1: 22.967 G_L1_ABSOLUTE: 2.943 G_L1_RELATIVE: 20.024 G_Regularizer: 0.000 validation_error: 21.244 +(epoch: 14, iters: 126224, time: 0.563, data: 0.000) G_L1: 18.744 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 16.011 G_Regularizer: 0.000 validation_error: 20.537 +(epoch: 14, iters: 128224, time: 0.551, data: 0.001) G_L1: 15.218 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 12.627 G_Regularizer: 0.000 validation_error: 20.076 +(epoch: 14, iters: 130224, time: 0.549, data: 0.001) G_L1: 14.472 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 11.766 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 14, iters: 132224, time: 0.551, data: 0.000) G_L1: 16.892 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 14.324 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 14, iters: 134224, time: 0.537, data: 0.000) G_L1: 16.180 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 13.356 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 14, iters: 136224, time: 0.560, data: 0.000) G_L1: 12.685 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 10.114 G_Regularizer: 0.000 validation_error: 20.090 +(epoch: 14, iters: 138224, time: 0.547, data: 0.000) G_L1: 18.553 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 16.033 G_Regularizer: 0.000 validation_error: 19.844 +(epoch: 14, iters: 140224, time: 0.533, data: 0.000) G_L1: 13.461 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 10.940 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 14, iters: 142224, time: 0.553, data: 0.000) G_L1: 14.492 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 12.032 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 14, iters: 144224, time: 0.561, data: 0.000) G_L1: 15.609 G_L1_ABSOLUTE: 2.818 G_L1_RELATIVE: 12.791 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 14, iters: 146224, time: 0.556, data: 0.000) G_L1: 30.928 G_L1_ABSOLUTE: 2.967 G_L1_RELATIVE: 27.962 G_Regularizer: 0.000 validation_error: 21.021 +(epoch: 14, iters: 148224, time: 0.555, data: 0.000) G_L1: 12.491 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 10.093 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 14, iters: 150224, time: 0.559, data: 0.000) G_L1: 15.139 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 12.656 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 14, iters: 152224, time: 0.560, data: 0.000) G_L1: 16.724 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 14.177 G_Regularizer: 0.000 validation_error: 20.225 +(epoch: 14, iters: 154224, time: 0.553, data: 0.000) G_L1: 15.024 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 12.746 G_Regularizer: 0.000 validation_error: 21.184 +(epoch: 14, iters: 156224, time: 0.534, data: 0.000) G_L1: 16.824 G_L1_ABSOLUTE: 2.901 G_L1_RELATIVE: 13.923 G_Regularizer: 0.000 validation_error: 20.680 +(epoch: 14, iters: 158224, time: 0.541, data: 0.000) G_L1: 12.779 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 10.297 G_Regularizer: 0.000 validation_error: 20.394 +(epoch: 14, iters: 160224, time: 0.535, data: 0.000) G_L1: 14.315 G_L1_ABSOLUTE: 3.194 G_L1_RELATIVE: 11.121 G_Regularizer: 0.000 validation_error: 21.799 +(epoch: 14, iters: 162224, time: 0.555, data: 0.000) G_L1: 14.730 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 12.247 G_Regularizer: 0.000 validation_error: 20.036 +(epoch: 14, iters: 164224, time: 0.541, data: 0.000) G_L1: 16.525 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 13.718 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 14, iters: 166224, time: 0.556, data: 0.000) G_L1: 12.857 G_L1_ABSOLUTE: 3.065 G_L1_RELATIVE: 9.792 G_Regularizer: 0.000 validation_error: 20.506 +(epoch: 14, iters: 168224, time: 0.533, data: 0.000) G_L1: 16.778 G_L1_ABSOLUTE: 2.534 G_L1_RELATIVE: 14.244 G_Regularizer: 0.000 validation_error: 20.100 +(epoch: 14, iters: 170224, time: 0.533, data: 0.000) G_L1: 14.420 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 11.606 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 14, iters: 172224, time: 0.539, data: 0.000) G_L1: 17.305 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 14.570 G_Regularizer: 0.000 validation_error: 21.149 +(epoch: 14, iters: 174224, time: 0.550, data: 0.000) G_L1: 15.405 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 12.844 G_Regularizer: 0.000 validation_error: 21.260 +(epoch: 14, iters: 176224, time: 0.548, data: 0.000) G_L1: 13.658 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 11.056 G_Regularizer: 0.000 validation_error: 21.290 +(epoch: 14, iters: 178224, time: 0.562, data: 0.000) G_L1: 13.634 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 11.277 G_Regularizer: 0.000 validation_error: 20.202 +(epoch: 14, iters: 180224, time: 0.543, data: 0.000) G_L1: 13.252 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 10.824 G_Regularizer: 0.000 validation_error: 20.100 +(epoch: 14, iters: 182224, time: 0.547, data: 0.000) G_L1: 15.642 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 12.908 G_Regularizer: 0.000 validation_error: 20.310 +(epoch: 14, iters: 184224, time: 0.557, data: 0.000) G_L1: 13.468 G_L1_ABSOLUTE: 2.949 G_L1_RELATIVE: 10.518 G_Regularizer: 0.000 validation_error: 21.233 +(epoch: 14, iters: 186224, time: 0.542, data: 0.001) G_L1: 16.542 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 13.716 G_Regularizer: 0.000 validation_error: 21.418 +(epoch: 14, iters: 188224, time: 0.557, data: 0.000) G_L1: 17.850 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 15.033 G_Regularizer: 0.000 validation_error: 20.894 +(epoch: 14, iters: 190224, time: 0.531, data: 0.000) G_L1: 12.831 G_L1_ABSOLUTE: 2.313 G_L1_RELATIVE: 10.518 G_Regularizer: 0.000 validation_error: 21.340 +(epoch: 14, iters: 192224, time: 0.557, data: 0.000) G_L1: 15.403 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 12.719 G_Regularizer: 0.000 validation_error: 20.593 +(epoch: 14, iters: 194224, time: 0.560, data: 0.000) G_L1: 17.601 G_L1_ABSOLUTE: 2.794 G_L1_RELATIVE: 14.807 G_Regularizer: 0.000 validation_error: 20.466 +(epoch: 14, iters: 196224, time: 0.556, data: 0.000) G_L1: 15.566 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 12.804 G_Regularizer: 0.000 validation_error: 20.077 +(epoch: 14, iters: 198224, time: 0.530, data: 0.000) G_L1: 15.816 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 13.241 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 14, iters: 200224, time: 0.551, data: 0.000) G_L1: 15.988 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 13.418 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 14, iters: 202224, time: 0.544, data: 0.000) G_L1: 16.594 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 13.656 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 14, iters: 204224, time: 0.533, data: 0.001) G_L1: 15.443 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 12.636 G_Regularizer: 0.000 validation_error: 21.076 +(epoch: 14, iters: 206224, time: 0.540, data: 0.000) G_L1: 14.741 G_L1_ABSOLUTE: 2.751 G_L1_RELATIVE: 11.991 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 14, iters: 208224, time: 0.531, data: 0.000) G_L1: 16.643 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 13.763 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 14, iters: 210224, time: 0.534, data: 0.000) G_L1: 13.667 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 10.983 G_Regularizer: 0.000 validation_error: 20.226 +(epoch: 14, iters: 212224, time: 0.548, data: 0.000) G_L1: 13.905 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 11.398 G_Regularizer: 0.000 validation_error: 20.373 +(epoch: 14, iters: 214224, time: 0.553, data: 0.000) G_L1: 13.585 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 11.211 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 14, iters: 216224, time: 0.547, data: 0.000) G_L1: 15.356 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.955 G_Regularizer: 0.000 validation_error: 21.687 +(epoch: 14, iters: 218224, time: 0.556, data: 0.000) G_L1: 14.291 G_L1_ABSOLUTE: 2.376 G_L1_RELATIVE: 11.915 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 14, iters: 220224, time: 0.549, data: 0.000) G_L1: 17.474 G_L1_ABSOLUTE: 3.008 G_L1_RELATIVE: 14.466 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 14, iters: 222224, time: 0.548, data: 0.000) G_L1: 14.063 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 11.413 G_Regularizer: 0.000 validation_error: 20.204 +(epoch: 14, iters: 224224, time: 0.532, data: 0.000) G_L1: 13.770 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 11.423 G_Regularizer: 0.000 validation_error: 21.275 +(epoch: 14, iters: 226224, time: 0.565, data: 0.000) G_L1: 18.916 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 16.296 G_Regularizer: 0.000 validation_error: 20.632 +(epoch: 14, iters: 228224, time: 0.562, data: 0.000) G_L1: 16.020 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 13.106 G_Regularizer: 0.000 validation_error: 20.003 +(epoch: 14, iters: 230224, time: 0.542, data: 0.000) G_L1: 14.386 G_L1_ABSOLUTE: 3.006 G_L1_RELATIVE: 11.380 G_Regularizer: 0.000 validation_error: 20.365 +(epoch: 14, iters: 232224, time: 0.537, data: 0.000) G_L1: 17.274 G_L1_ABSOLUTE: 3.228 G_L1_RELATIVE: 14.045 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 14, iters: 234224, time: 0.551, data: 0.000) G_L1: 14.806 G_L1_ABSOLUTE: 2.438 G_L1_RELATIVE: 12.368 G_Regularizer: 0.000 validation_error: 20.425 +(epoch: 14, iters: 236224, time: 0.537, data: 0.000) G_L1: 13.294 G_L1_ABSOLUTE: 2.250 G_L1_RELATIVE: 11.044 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 14, iters: 238224, time: 0.533, data: 0.000) G_L1: 14.870 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 12.065 G_Regularizer: 0.000 validation_error: 20.802 +(epoch: 14, iters: 240224, time: 0.529, data: 0.000) G_L1: 15.175 G_L1_ABSOLUTE: 2.376 G_L1_RELATIVE: 12.799 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 14, iters: 242224, time: 0.560, data: 0.000) G_L1: 11.894 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 9.488 G_Regularizer: 0.000 validation_error: 21.107 +(epoch: 14, iters: 244224, time: 0.557, data: 0.000) G_L1: 15.985 G_L1_ABSOLUTE: 2.472 G_L1_RELATIVE: 13.513 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 14, iters: 246224, time: 0.558, data: 0.000) G_L1: 14.717 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 14, iters: 248224, time: 0.544, data: 0.000) G_L1: 12.281 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 10.032 G_Regularizer: 0.000 validation_error: 20.250 +(epoch: 14, iters: 250224, time: 0.537, data: 0.000) G_L1: 16.302 G_L1_ABSOLUTE: 3.177 G_L1_RELATIVE: 13.124 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 14, iters: 252224, time: 0.548, data: 0.000) G_L1: 14.173 G_L1_ABSOLUTE: 3.021 G_L1_RELATIVE: 11.152 G_Regularizer: 0.000 validation_error: 20.010 +(epoch: 14, iters: 254224, time: 0.559, data: 0.000) G_L1: 15.591 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 13.098 G_Regularizer: 0.000 validation_error: 20.111 +(epoch: 14, iters: 256224, time: 0.554, data: 0.000) G_L1: 14.896 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 12.298 G_Regularizer: 0.000 validation_error: 20.202 +(epoch: 14, iters: 258224, time: 0.530, data: 0.000) G_L1: 15.855 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 13.249 G_Regularizer: 0.000 validation_error: 20.258 +(epoch: 14, iters: 260224, time: 0.547, data: 0.000) G_L1: 16.619 G_L1_ABSOLUTE: 2.965 G_L1_RELATIVE: 13.654 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 14, iters: 262224, time: 0.551, data: 0.000) G_L1: 14.176 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 11.462 G_Regularizer: 0.000 validation_error: 20.537 +(epoch: 14, iters: 264224, time: 0.545, data: 0.000) G_L1: 15.293 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 12.523 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 14, iters: 266224, time: 0.535, data: 0.000) G_L1: 16.441 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 13.791 G_Regularizer: 0.000 validation_error: 21.230 +(epoch: 14, iters: 268224, time: 0.558, data: 0.000) G_L1: 16.647 G_L1_ABSOLUTE: 3.582 G_L1_RELATIVE: 13.064 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 14, iters: 270224, time: 0.524, data: 0.000) G_L1: 14.827 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 12.060 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 14, iters: 272224, time: 0.554, data: 0.000) G_L1: 14.202 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 11.345 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 14, iters: 274224, time: 0.534, data: 0.001) G_L1: 17.657 G_L1_ABSOLUTE: 3.009 G_L1_RELATIVE: 14.648 G_Regularizer: 0.000 validation_error: 21.150 +(epoch: 14, iters: 276224, time: 0.534, data: 0.000) G_L1: 15.536 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 12.758 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 14, iters: 278224, time: 0.542, data: 0.000) G_L1: 15.977 G_L1_ABSOLUTE: 3.250 G_L1_RELATIVE: 12.727 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 14, iters: 280224, time: 0.544, data: 0.001) G_L1: 18.644 G_L1_ABSOLUTE: 2.893 G_L1_RELATIVE: 15.751 G_Regularizer: 0.000 validation_error: 20.105 +(epoch: 14, iters: 282224, time: 0.545, data: 0.000) G_L1: 13.348 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 11.030 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 14, iters: 284224, time: 0.553, data: 0.001) G_L1: 14.323 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 12.027 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 14, iters: 286224, time: 0.554, data: 0.000) G_L1: 13.136 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 10.527 G_Regularizer: 0.000 validation_error: 21.321 +(epoch: 14, iters: 288224, time: 0.556, data: 0.000) G_L1: 12.455 G_L1_ABSOLUTE: 2.156 G_L1_RELATIVE: 10.299 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 14, iters: 290224, time: 0.551, data: 0.001) G_L1: 15.343 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 12.753 G_Regularizer: 0.000 validation_error: 20.200 +(epoch: 14, iters: 292224, time: 0.534, data: 0.000) G_L1: 16.609 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 14.120 G_Regularizer: 0.000 validation_error: 21.462 +(epoch: 14, iters: 294224, time: 0.533, data: 0.000) G_L1: 13.999 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 11.668 G_Regularizer: 0.000 validation_error: 20.362 +(epoch: 14, iters: 296224, time: 0.551, data: 0.000) G_L1: 14.865 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 12.311 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 14, iters: 298224, time: 0.555, data: 0.000) G_L1: 17.743 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 14.907 G_Regularizer: 0.000 validation_error: 20.222 +(epoch: 14, iters: 300224, time: 0.535, data: 0.000) G_L1: 15.517 G_L1_ABSOLUTE: 2.126 G_L1_RELATIVE: 13.392 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 14, iters: 302224, time: 0.553, data: 0.000) G_L1: 14.543 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 11.923 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 15, iters: 1472, time: 0.553, data: 0.000) G_L1: 15.186 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 12.624 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 15, iters: 3472, time: 0.548, data: 0.001) G_L1: 13.347 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 10.721 G_Regularizer: 0.000 validation_error: 20.254 +(epoch: 15, iters: 5472, time: 0.539, data: 0.000) G_L1: 13.893 G_L1_ABSOLUTE: 2.246 G_L1_RELATIVE: 11.647 G_Regularizer: 0.000 validation_error: 20.409 +(epoch: 15, iters: 7472, time: 0.537, data: 0.000) G_L1: 12.412 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 9.815 G_Regularizer: 0.000 validation_error: 20.101 +(epoch: 15, iters: 9472, time: 0.535, data: 0.000) G_L1: 14.820 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 12.484 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 15, iters: 11472, time: 0.543, data: 0.000) G_L1: 15.621 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 12.847 G_Regularizer: 0.000 validation_error: 21.129 +(epoch: 15, iters: 13472, time: 0.549, data: 0.000) G_L1: 16.442 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 13.608 G_Regularizer: 0.000 validation_error: 20.632 +(epoch: 15, iters: 15472, time: 0.538, data: 0.000) G_L1: 14.048 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 11.425 G_Regularizer: 0.000 validation_error: 21.495 +(epoch: 15, iters: 17472, time: 0.559, data: 0.000) G_L1: 17.324 G_L1_ABSOLUTE: 3.275 G_L1_RELATIVE: 14.049 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 15, iters: 19472, time: 0.562, data: 0.000) G_L1: 16.229 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 13.572 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 15, iters: 21472, time: 0.541, data: 0.000) G_L1: 14.771 G_L1_ABSOLUTE: 2.803 G_L1_RELATIVE: 11.969 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 15, iters: 23472, time: 0.560, data: 0.000) G_L1: 14.570 G_L1_ABSOLUTE: 2.900 G_L1_RELATIVE: 11.670 G_Regularizer: 0.000 validation_error: 20.954 +(epoch: 15, iters: 25472, time: 0.552, data: 0.000) G_L1: 17.073 G_L1_ABSOLUTE: 2.873 G_L1_RELATIVE: 14.200 G_Regularizer: 0.000 validation_error: 21.467 +(epoch: 15, iters: 27472, time: 0.552, data: 0.000) G_L1: 15.421 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 12.754 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 15, iters: 29472, time: 0.551, data: 0.000) G_L1: 14.491 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 11.793 G_Regularizer: 0.000 validation_error: 20.260 +(epoch: 15, iters: 31472, time: 0.531, data: 0.000) G_L1: 13.707 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 11.304 G_Regularizer: 0.000 validation_error: 19.965 +(epoch: 15, iters: 33472, time: 0.556, data: 0.000) G_L1: 14.878 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 11.984 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 15, iters: 35472, time: 0.557, data: 0.000) G_L1: 14.907 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.374 G_Regularizer: 0.000 validation_error: 20.938 +(epoch: 15, iters: 37472, time: 0.556, data: 0.001) G_L1: 15.687 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 12.758 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 15, iters: 39472, time: 0.558, data: 0.000) G_L1: 13.030 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 10.462 G_Regularizer: 0.000 validation_error: 21.352 +(epoch: 15, iters: 41472, time: 0.551, data: 0.000) G_L1: 18.711 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 15.993 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 15, iters: 43472, time: 0.540, data: 0.000) G_L1: 16.082 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 13.761 G_Regularizer: 0.000 validation_error: 20.229 +(epoch: 15, iters: 45472, time: 0.558, data: 0.000) G_L1: 13.439 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 10.628 G_Regularizer: 0.000 validation_error: 20.856 +(epoch: 15, iters: 47472, time: 0.532, data: 0.000) G_L1: 14.323 G_L1_ABSOLUTE: 3.060 G_L1_RELATIVE: 11.263 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 15, iters: 49472, time: 0.530, data: 0.000) G_L1: 14.825 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 12.234 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 15, iters: 51472, time: 0.563, data: 0.000) G_L1: 16.566 G_L1_ABSOLUTE: 3.555 G_L1_RELATIVE: 13.011 G_Regularizer: 0.000 validation_error: 21.219 +(epoch: 15, iters: 53472, time: 0.537, data: 0.000) G_L1: 15.074 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 12.454 G_Regularizer: 0.000 validation_error: 21.111 +(epoch: 15, iters: 55472, time: 0.537, data: 0.000) G_L1: 15.761 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 13.302 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 15, iters: 57472, time: 0.532, data: 0.000) G_L1: 17.178 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 14.364 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 15, iters: 59472, time: 0.556, data: 0.000) G_L1: 14.585 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 12.013 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 15, iters: 61472, time: 0.535, data: 0.000) G_L1: 12.946 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 10.354 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 15, iters: 63472, time: 0.546, data: 0.000) G_L1: 16.376 G_L1_ABSOLUTE: 3.204 G_L1_RELATIVE: 13.172 G_Regularizer: 0.000 validation_error: 20.435 +(epoch: 15, iters: 65472, time: 0.539, data: 0.000) G_L1: 13.115 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 10.784 G_Regularizer: 0.000 validation_error: 21.298 +(epoch: 15, iters: 67472, time: 0.532, data: 0.000) G_L1: 13.339 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 10.607 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 15, iters: 69472, time: 0.560, data: 0.000) G_L1: 16.132 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 13.409 G_Regularizer: 0.000 validation_error: 20.346 +(epoch: 15, iters: 71472, time: 0.541, data: 0.001) G_L1: 15.479 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 12.775 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 15, iters: 73472, time: 0.545, data: 0.000) G_L1: 15.688 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 12.957 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 15, iters: 75472, time: 0.560, data: 0.000) G_L1: 16.235 G_L1_ABSOLUTE: 3.098 G_L1_RELATIVE: 13.137 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 15, iters: 77472, time: 0.548, data: 0.000) G_L1: 16.214 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 13.536 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 15, iters: 79472, time: 0.559, data: 0.000) G_L1: 16.152 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 13.521 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 15, iters: 81472, time: 0.541, data: 0.000) G_L1: 13.228 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 10.755 G_Regularizer: 0.000 validation_error: 21.006 +(epoch: 15, iters: 83472, time: 0.547, data: 0.000) G_L1: 17.289 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 14.476 G_Regularizer: 0.000 validation_error: 20.583 +(epoch: 15, iters: 85472, time: 0.565, data: 0.000) G_L1: 14.868 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 12.480 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 15, iters: 87472, time: 0.559, data: 0.000) G_L1: 15.980 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 13.648 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 15, iters: 89472, time: 0.544, data: 0.000) G_L1: 13.769 G_L1_ABSOLUTE: 2.308 G_L1_RELATIVE: 11.461 G_Regularizer: 0.000 validation_error: 20.474 +(epoch: 15, iters: 91472, time: 0.539, data: 0.000) G_L1: 14.901 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 15, iters: 93472, time: 0.553, data: 0.000) G_L1: 14.701 G_L1_ABSOLUTE: 2.352 G_L1_RELATIVE: 12.349 G_Regularizer: 0.000 validation_error: 20.351 +(epoch: 15, iters: 95472, time: 0.547, data: 0.000) G_L1: 16.253 G_L1_ABSOLUTE: 2.931 G_L1_RELATIVE: 13.322 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 15, iters: 97472, time: 0.543, data: 0.000) G_L1: 13.488 G_L1_ABSOLUTE: 2.337 G_L1_RELATIVE: 11.152 G_Regularizer: 0.000 validation_error: 20.618 +(epoch: 15, iters: 99472, time: 0.550, data: 0.000) G_L1: 15.606 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 12.795 G_Regularizer: 0.000 validation_error: 20.326 +(epoch: 15, iters: 101472, time: 0.551, data: 0.000) G_L1: 14.492 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 11.838 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 15, iters: 103472, time: 0.559, data: 0.000) G_L1: 13.631 G_L1_ABSOLUTE: 3.128 G_L1_RELATIVE: 10.503 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 15, iters: 105472, time: 0.554, data: 0.000) G_L1: 14.674 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 11.498 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 15, iters: 107472, time: 0.535, data: 0.000) G_L1: 15.979 G_L1_ABSOLUTE: 3.190 G_L1_RELATIVE: 12.789 G_Regularizer: 0.000 validation_error: 20.396 +(epoch: 15, iters: 109472, time: 0.547, data: 0.000) G_L1: 15.654 G_L1_ABSOLUTE: 2.904 G_L1_RELATIVE: 12.750 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 15, iters: 111472, time: 0.544, data: 0.000) G_L1: 14.869 G_L1_ABSOLUTE: 2.645 G_L1_RELATIVE: 12.224 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 15, iters: 113472, time: 0.546, data: 0.000) G_L1: 15.339 G_L1_ABSOLUTE: 2.876 G_L1_RELATIVE: 12.463 G_Regularizer: 0.000 validation_error: 20.944 +(epoch: 15, iters: 115472, time: 0.531, data: 0.000) G_L1: 13.937 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 11.474 G_Regularizer: 0.000 validation_error: 19.658 +(epoch: 15, iters: 117472, time: 0.557, data: 0.000) G_L1: 17.793 G_L1_ABSOLUTE: 3.152 G_L1_RELATIVE: 14.641 G_Regularizer: 0.000 validation_error: 20.536 +(epoch: 15, iters: 119472, time: 0.555, data: 0.000) G_L1: 15.269 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 12.781 G_Regularizer: 0.000 validation_error: 21.558 +(epoch: 15, iters: 121472, time: 0.542, data: 0.000) G_L1: 15.236 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 12.599 G_Regularizer: 0.000 validation_error: 19.879 +(epoch: 15, iters: 123472, time: 0.549, data: 0.000) G_L1: 15.470 G_L1_ABSOLUTE: 2.739 G_L1_RELATIVE: 12.732 G_Regularizer: 0.000 validation_error: 20.528 +(epoch: 15, iters: 125472, time: 0.563, data: 0.000) G_L1: 14.923 G_L1_ABSOLUTE: 3.223 G_L1_RELATIVE: 11.701 G_Regularizer: 0.000 validation_error: 20.444 +(epoch: 15, iters: 127472, time: 0.546, data: 0.000) G_L1: 26.215 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 23.859 G_Regularizer: 0.000 validation_error: 20.420 +(epoch: 15, iters: 129472, time: 0.558, data: 0.000) G_L1: 17.561 G_L1_ABSOLUTE: 3.160 G_L1_RELATIVE: 14.401 G_Regularizer: 0.000 validation_error: 20.373 +(epoch: 15, iters: 131472, time: 0.542, data: 0.000) G_L1: 14.723 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 12.093 G_Regularizer: 0.000 validation_error: 19.950 +(epoch: 15, iters: 133472, time: 0.560, data: 0.000) G_L1: 12.727 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 10.092 G_Regularizer: 0.000 validation_error: 19.751 +(epoch: 15, iters: 135472, time: 0.560, data: 0.000) G_L1: 15.908 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 13.065 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 15, iters: 137472, time: 0.558, data: 0.000) G_L1: 14.417 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 11.764 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 15, iters: 139472, time: 0.542, data: 0.000) G_L1: 13.710 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 11.184 G_Regularizer: 0.000 validation_error: 19.684 +(epoch: 15, iters: 141472, time: 0.562, data: 0.000) G_L1: 15.427 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 12.992 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 15, iters: 143472, time: 0.559, data: 0.000) G_L1: 14.828 G_L1_ABSOLUTE: 2.765 G_L1_RELATIVE: 12.063 G_Regularizer: 0.000 validation_error: 19.963 +(epoch: 15, iters: 145472, time: 0.559, data: 0.000) G_L1: 15.792 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 13.192 G_Regularizer: 0.000 validation_error: 20.550 +(epoch: 15, iters: 147472, time: 0.545, data: 0.000) G_L1: 17.714 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 14.966 G_Regularizer: 0.000 validation_error: 20.238 +(epoch: 15, iters: 149472, time: 0.534, data: 0.000) G_L1: 15.828 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 13.058 G_Regularizer: 0.000 validation_error: 20.618 +(epoch: 15, iters: 151472, time: 0.533, data: 0.000) G_L1: 14.684 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 12.027 G_Regularizer: 0.000 validation_error: 20.303 +(epoch: 15, iters: 153472, time: 0.556, data: 0.000) G_L1: 17.120 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 14.284 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 15, iters: 155472, time: 0.541, data: 0.000) G_L1: 15.430 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 12.951 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 15, iters: 157472, time: 0.534, data: 0.000) G_L1: 15.051 G_L1_ABSOLUTE: 2.990 G_L1_RELATIVE: 12.061 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 15, iters: 159472, time: 0.560, data: 0.000) G_L1: 14.118 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 11.255 G_Regularizer: 0.000 validation_error: 20.757 +(epoch: 15, iters: 161472, time: 0.557, data: 0.000) G_L1: 15.586 G_L1_ABSOLUTE: 2.910 G_L1_RELATIVE: 12.675 G_Regularizer: 0.000 validation_error: 20.439 +(epoch: 15, iters: 163472, time: 0.566, data: 0.000) G_L1: 18.545 G_L1_ABSOLUTE: 3.261 G_L1_RELATIVE: 15.285 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 15, iters: 165472, time: 0.535, data: 0.000) G_L1: 14.515 G_L1_ABSOLUTE: 2.846 G_L1_RELATIVE: 11.669 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 15, iters: 167472, time: 0.539, data: 0.000) G_L1: 13.231 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 10.834 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 15, iters: 169472, time: 0.554, data: 0.000) G_L1: 14.182 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 11.615 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 15, iters: 171472, time: 0.564, data: 0.000) G_L1: 17.483 G_L1_ABSOLUTE: 3.067 G_L1_RELATIVE: 14.417 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 15, iters: 173472, time: 0.533, data: 0.001) G_L1: 16.094 G_L1_ABSOLUTE: 2.831 G_L1_RELATIVE: 13.263 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 15, iters: 175472, time: 0.550, data: 0.000) G_L1: 13.102 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 10.772 G_Regularizer: 0.000 validation_error: 20.769 +(epoch: 15, iters: 177472, time: 0.554, data: 0.000) G_L1: 16.650 G_L1_ABSOLUTE: 2.933 G_L1_RELATIVE: 13.716 G_Regularizer: 0.000 validation_error: 20.180 +(epoch: 15, iters: 179472, time: 0.550, data: 0.000) G_L1: 16.171 G_L1_ABSOLUTE: 2.977 G_L1_RELATIVE: 13.194 G_Regularizer: 0.000 validation_error: 20.300 +(epoch: 15, iters: 181472, time: 0.542, data: 0.000) G_L1: 14.661 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 11.892 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 15, iters: 183472, time: 0.555, data: 0.000) G_L1: 18.622 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 15.805 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 15, iters: 185472, time: 0.543, data: 0.000) G_L1: 13.532 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 11.270 G_Regularizer: 0.000 validation_error: 20.405 +(epoch: 15, iters: 187472, time: 0.559, data: 0.000) G_L1: 14.766 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 11.988 G_Regularizer: 0.000 validation_error: 20.122 +(epoch: 15, iters: 189472, time: 0.552, data: 0.000) G_L1: 16.124 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 13.297 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 15, iters: 191472, time: 0.559, data: 0.000) G_L1: 15.698 G_L1_ABSOLUTE: 2.067 G_L1_RELATIVE: 13.631 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 15, iters: 193472, time: 0.557, data: 0.000) G_L1: 12.478 G_L1_ABSOLUTE: 2.662 G_L1_RELATIVE: 9.817 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 15, iters: 195472, time: 0.558, data: 0.000) G_L1: 14.350 G_L1_ABSOLUTE: 2.147 G_L1_RELATIVE: 12.203 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 15, iters: 197472, time: 0.550, data: 0.000) G_L1: 17.161 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 14.749 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 15, iters: 199472, time: 0.536, data: 0.000) G_L1: 17.677 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 15.023 G_Regularizer: 0.000 validation_error: 21.571 +(epoch: 15, iters: 201472, time: 0.560, data: 0.000) G_L1: 14.986 G_L1_ABSOLUTE: 3.197 G_L1_RELATIVE: 11.789 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 15, iters: 203472, time: 0.559, data: 0.000) G_L1: 15.386 G_L1_ABSOLUTE: 3.146 G_L1_RELATIVE: 12.241 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 15, iters: 205472, time: 0.556, data: 0.000) G_L1: 15.276 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 12.453 G_Regularizer: 0.000 validation_error: 20.298 +(epoch: 15, iters: 207472, time: 0.549, data: 0.000) G_L1: 14.406 G_L1_ABSOLUTE: 2.617 G_L1_RELATIVE: 11.789 G_Regularizer: 0.000 validation_error: 20.629 +(epoch: 15, iters: 209472, time: 0.556, data: 0.000) G_L1: 14.142 G_L1_ABSOLUTE: 2.166 G_L1_RELATIVE: 11.976 G_Regularizer: 0.000 validation_error: 21.290 +(epoch: 15, iters: 211472, time: 0.541, data: 0.000) G_L1: 14.233 G_L1_ABSOLUTE: 2.392 G_L1_RELATIVE: 11.841 G_Regularizer: 0.000 validation_error: 21.752 +(epoch: 15, iters: 213472, time: 0.550, data: 0.001) G_L1: 14.178 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 11.298 G_Regularizer: 0.000 validation_error: 20.306 +(epoch: 15, iters: 215472, time: 0.547, data: 0.000) G_L1: 16.368 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 13.787 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 15, iters: 217472, time: 0.535, data: 0.000) G_L1: 12.199 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 9.766 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 15, iters: 219472, time: 0.532, data: 0.001) G_L1: 14.260 G_L1_ABSOLUTE: 2.973 G_L1_RELATIVE: 11.286 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 15, iters: 221472, time: 0.555, data: 0.000) G_L1: 14.878 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 12.124 G_Regularizer: 0.000 validation_error: 20.326 +(epoch: 15, iters: 223472, time: 0.545, data: 0.001) G_L1: 17.159 G_L1_ABSOLUTE: 3.035 G_L1_RELATIVE: 14.124 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 15, iters: 225472, time: 0.560, data: 0.000) G_L1: 15.612 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 13.001 G_Regularizer: 0.000 validation_error: 20.145 +(epoch: 15, iters: 227472, time: 0.561, data: 0.000) G_L1: 14.660 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 12.104 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 15, iters: 229472, time: 0.538, data: 0.000) G_L1: 14.824 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 12.459 G_Regularizer: 0.000 validation_error: 19.850 +(epoch: 15, iters: 231472, time: 0.534, data: 0.000) G_L1: 17.321 G_L1_ABSOLUTE: 3.581 G_L1_RELATIVE: 13.740 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 15, iters: 233472, time: 0.532, data: 0.000) G_L1: 14.237 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 11.822 G_Regularizer: 0.000 validation_error: 21.171 +(epoch: 15, iters: 235472, time: 0.541, data: 0.000) G_L1: 16.495 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 13.582 G_Regularizer: 0.000 validation_error: 20.304 +(epoch: 15, iters: 237472, time: 0.537, data: 0.000) G_L1: 13.676 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 11.235 G_Regularizer: 0.000 validation_error: 20.770 +(epoch: 15, iters: 239472, time: 0.556, data: 0.000) G_L1: 13.043 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 10.827 G_Regularizer: 0.000 validation_error: 20.675 +(epoch: 15, iters: 241472, time: 0.550, data: 0.000) G_L1: 14.686 G_L1_ABSOLUTE: 2.617 G_L1_RELATIVE: 12.069 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 15, iters: 243472, time: 0.565, data: 0.000) G_L1: 15.374 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 13.004 G_Regularizer: 0.000 validation_error: 20.375 +(epoch: 15, iters: 245472, time: 0.549, data: 0.000) G_L1: 13.655 G_L1_ABSOLUTE: 2.772 G_L1_RELATIVE: 10.883 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 15, iters: 247472, time: 0.564, data: 0.000) G_L1: 12.804 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 10.457 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 15, iters: 249472, time: 0.537, data: 0.000) G_L1: 13.787 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 11.060 G_Regularizer: 0.000 validation_error: 20.352 +(epoch: 15, iters: 251472, time: 0.559, data: 0.000) G_L1: 17.749 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 14.963 G_Regularizer: 0.000 validation_error: 21.260 +(epoch: 15, iters: 253472, time: 0.566, data: 0.000) G_L1: 15.926 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 13.515 G_Regularizer: 0.000 validation_error: 19.741 +(epoch: 15, iters: 255472, time: 0.558, data: 0.000) G_L1: 13.374 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 10.978 G_Regularizer: 0.000 validation_error: 20.073 +(epoch: 15, iters: 257472, time: 0.543, data: 0.000) G_L1: 16.039 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 13.513 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 15, iters: 259472, time: 0.560, data: 0.000) G_L1: 15.081 G_L1_ABSOLUTE: 3.131 G_L1_RELATIVE: 11.950 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 15, iters: 261472, time: 0.556, data: 0.000) G_L1: 12.990 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 10.707 G_Regularizer: 0.000 validation_error: 20.537 +(epoch: 15, iters: 263472, time: 0.540, data: 0.000) G_L1: 13.021 G_L1_ABSOLUTE: 2.313 G_L1_RELATIVE: 10.708 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 15, iters: 265472, time: 0.543, data: 0.001) G_L1: 14.745 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 12.428 G_Regularizer: 0.000 validation_error: 20.096 +(epoch: 15, iters: 267472, time: 0.559, data: 0.000) G_L1: 15.903 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 13.327 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 15, iters: 269472, time: 0.549, data: 0.000) G_L1: 16.743 G_L1_ABSOLUTE: 3.025 G_L1_RELATIVE: 13.718 G_Regularizer: 0.000 validation_error: 20.451 +(epoch: 15, iters: 271472, time: 0.541, data: 0.000) G_L1: 20.410 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 17.752 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 15, iters: 273472, time: 0.536, data: 0.000) G_L1: 13.005 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 10.555 G_Regularizer: 0.000 validation_error: 20.445 +(epoch: 15, iters: 275472, time: 0.553, data: 0.000) G_L1: 13.376 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 11.032 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 15, iters: 277472, time: 0.544, data: 0.000) G_L1: 15.083 G_L1_ABSOLUTE: 3.236 G_L1_RELATIVE: 11.846 G_Regularizer: 0.000 validation_error: 19.986 +(epoch: 15, iters: 279472, time: 0.560, data: 0.000) G_L1: 17.279 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 14.764 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 15, iters: 281472, time: 0.550, data: 0.000) G_L1: 17.204 G_L1_ABSOLUTE: 3.145 G_L1_RELATIVE: 14.059 G_Regularizer: 0.000 validation_error: 19.612 +(epoch: 15, iters: 283472, time: 0.554, data: 0.001) G_L1: 15.606 G_L1_ABSOLUTE: 2.927 G_L1_RELATIVE: 12.679 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 15, iters: 285472, time: 0.558, data: 0.000) G_L1: 14.598 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 11.925 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 15, iters: 287472, time: 0.549, data: 0.000) G_L1: 17.151 G_L1_ABSOLUTE: 3.517 G_L1_RELATIVE: 13.634 G_Regularizer: 0.000 validation_error: 20.366 +(epoch: 15, iters: 289472, time: 0.558, data: 0.001) G_L1: 14.643 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 12.182 G_Regularizer: 0.000 validation_error: 20.107 +(epoch: 15, iters: 291472, time: 0.533, data: 0.000) G_L1: 15.612 G_L1_ABSOLUTE: 2.245 G_L1_RELATIVE: 13.367 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 15, iters: 293472, time: 0.549, data: 0.000) G_L1: 15.987 G_L1_ABSOLUTE: 2.311 G_L1_RELATIVE: 13.675 G_Regularizer: 0.000 validation_error: 19.834 +(epoch: 15, iters: 295472, time: 0.547, data: 0.000) G_L1: 16.113 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 13.428 G_Regularizer: 0.000 validation_error: 20.233 +(epoch: 15, iters: 297472, time: 0.554, data: 0.000) G_L1: 14.056 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 11.675 G_Regularizer: 0.000 validation_error: 20.371 +(epoch: 15, iters: 299472, time: 0.539, data: 0.000) G_L1: 16.233 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 13.765 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 15, iters: 301472, time: 0.559, data: 0.000) G_L1: 12.279 G_L1_ABSOLUTE: 3.032 G_L1_RELATIVE: 9.247 G_Regularizer: 0.000 validation_error: 20.330 +(epoch: 16, iters: 720, time: 0.529, data: 0.000) G_L1: 16.885 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 14.273 G_Regularizer: 0.000 validation_error: 20.362 +(epoch: 16, iters: 2720, time: 0.560, data: 0.000) G_L1: 11.384 G_L1_ABSOLUTE: 2.176 G_L1_RELATIVE: 9.208 G_Regularizer: 0.000 validation_error: 20.468 +(epoch: 16, iters: 4720, time: 0.543, data: 0.000) G_L1: 17.008 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 14.563 G_Regularizer: 0.000 validation_error: 21.485 +(epoch: 16, iters: 6720, time: 0.554, data: 0.000) G_L1: 14.577 G_L1_ABSOLUTE: 3.018 G_L1_RELATIVE: 11.559 G_Regularizer: 0.000 validation_error: 20.296 +(epoch: 16, iters: 8720, time: 0.558, data: 0.000) G_L1: 17.480 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 15.096 G_Regularizer: 0.000 validation_error: 21.332 +(epoch: 16, iters: 10720, time: 0.554, data: 0.000) G_L1: 15.768 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 13.249 G_Regularizer: 0.000 validation_error: 20.545 +(epoch: 16, iters: 12720, time: 0.538, data: 0.000) G_L1: 15.892 G_L1_ABSOLUTE: 2.488 G_L1_RELATIVE: 13.403 G_Regularizer: 0.000 validation_error: 21.166 +(epoch: 16, iters: 14720, time: 0.555, data: 0.000) G_L1: 19.379 G_L1_ABSOLUTE: 2.667 G_L1_RELATIVE: 16.712 G_Regularizer: 0.000 validation_error: 20.631 +(epoch: 16, iters: 16720, time: 0.550, data: 0.000) G_L1: 17.897 G_L1_ABSOLUTE: 3.307 G_L1_RELATIVE: 14.590 G_Regularizer: 0.000 validation_error: 21.294 +(epoch: 16, iters: 18720, time: 0.551, data: 0.000) G_L1: 15.584 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 12.595 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 16, iters: 20720, time: 0.553, data: 0.000) G_L1: 14.203 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 11.462 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 16, iters: 22720, time: 0.539, data: 0.000) G_L1: 14.078 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 11.347 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 16, iters: 24720, time: 0.558, data: 0.000) G_L1: 16.643 G_L1_ABSOLUTE: 3.107 G_L1_RELATIVE: 13.536 G_Regularizer: 0.000 validation_error: 20.228 +(epoch: 16, iters: 26720, time: 0.563, data: 0.000) G_L1: 13.955 G_L1_ABSOLUTE: 2.099 G_L1_RELATIVE: 11.856 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 16, iters: 28720, time: 0.557, data: 0.000) G_L1: 14.943 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 12.594 G_Regularizer: 0.000 validation_error: 21.253 +(epoch: 16, iters: 30720, time: 0.559, data: 0.000) G_L1: 15.303 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 12.728 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 16, iters: 32720, time: 0.568, data: 0.000) G_L1: 16.298 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 13.725 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 16, iters: 34720, time: 0.556, data: 0.000) G_L1: 21.052 G_L1_ABSOLUTE: 3.165 G_L1_RELATIVE: 17.887 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 16, iters: 36720, time: 0.562, data: 0.000) G_L1: 15.074 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 12.260 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 16, iters: 38720, time: 0.535, data: 0.000) G_L1: 14.326 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.658 G_Regularizer: 0.000 validation_error: 20.344 +(epoch: 16, iters: 40720, time: 0.564, data: 0.000) G_L1: 17.524 G_L1_ABSOLUTE: 3.234 G_L1_RELATIVE: 14.290 G_Regularizer: 0.000 validation_error: 21.422 +(epoch: 16, iters: 42720, time: 0.564, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 12.225 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 16, iters: 44720, time: 0.559, data: 0.000) G_L1: 17.483 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 14.620 G_Regularizer: 0.000 validation_error: 19.984 +(epoch: 16, iters: 46720, time: 0.531, data: 0.000) G_L1: 15.632 G_L1_ABSOLUTE: 2.662 G_L1_RELATIVE: 12.970 G_Regularizer: 0.000 validation_error: 20.311 +(epoch: 16, iters: 48720, time: 0.536, data: 0.000) G_L1: 16.441 G_L1_ABSOLUTE: 2.882 G_L1_RELATIVE: 13.559 G_Regularizer: 0.000 validation_error: 20.579 +(epoch: 16, iters: 50720, time: 0.551, data: 0.000) G_L1: 14.654 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 12.191 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 16, iters: 52720, time: 0.551, data: 0.000) G_L1: 13.702 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 11.218 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 16, iters: 54720, time: 0.544, data: 0.000) G_L1: 15.200 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 12.811 G_Regularizer: 0.000 validation_error: 20.317 +(epoch: 16, iters: 56720, time: 0.542, data: 0.000) G_L1: 13.088 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 10.725 G_Regularizer: 0.000 validation_error: 20.112 +(epoch: 16, iters: 58720, time: 0.567, data: 0.000) G_L1: 15.605 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 13.101 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 16, iters: 60720, time: 0.558, data: 0.000) G_L1: 15.511 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 13.080 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 16, iters: 62720, time: 0.548, data: 0.000) G_L1: 16.407 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 13.489 G_Regularizer: 0.000 validation_error: 20.244 +(epoch: 16, iters: 64720, time: 0.553, data: 0.000) G_L1: 15.366 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 12.907 G_Regularizer: 0.000 validation_error: 20.309 +(epoch: 16, iters: 66720, time: 0.557, data: 0.000) G_L1: 18.665 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 15.969 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 16, iters: 68720, time: 0.557, data: 0.000) G_L1: 13.910 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 11.617 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 16, iters: 70720, time: 0.553, data: 0.000) G_L1: 14.873 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 12.449 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 16, iters: 72720, time: 0.543, data: 0.000) G_L1: 13.579 G_L1_ABSOLUTE: 2.212 G_L1_RELATIVE: 11.367 G_Regularizer: 0.000 validation_error: 20.463 +(epoch: 16, iters: 74720, time: 0.537, data: 0.000) G_L1: 15.235 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 12.611 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 16, iters: 76720, time: 0.550, data: 0.000) G_L1: 14.233 G_L1_ABSOLUTE: 2.180 G_L1_RELATIVE: 12.053 G_Regularizer: 0.000 validation_error: 19.776 +(epoch: 16, iters: 78720, time: 0.564, data: 0.000) G_L1: 13.733 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 11.241 G_Regularizer: 0.000 validation_error: 20.120 +(epoch: 16, iters: 80720, time: 0.533, data: 0.000) G_L1: 14.295 G_L1_ABSOLUTE: 2.713 G_L1_RELATIVE: 11.582 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 16, iters: 82720, time: 0.551, data: 0.000) G_L1: 14.630 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 11.831 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 16, iters: 84720, time: 0.559, data: 0.000) G_L1: 23.292 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 20.550 G_Regularizer: 0.000 validation_error: 19.963 +(epoch: 16, iters: 86720, time: 0.557, data: 0.000) G_L1: 16.564 G_L1_ABSOLUTE: 2.852 G_L1_RELATIVE: 13.713 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 16, iters: 88720, time: 0.532, data: 0.000) G_L1: 17.371 G_L1_ABSOLUTE: 2.845 G_L1_RELATIVE: 14.526 G_Regularizer: 0.000 validation_error: 20.331 +(epoch: 16, iters: 90720, time: 0.561, data: 0.001) G_L1: 16.049 G_L1_ABSOLUTE: 2.719 G_L1_RELATIVE: 13.330 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 16, iters: 92720, time: 0.558, data: 0.003) G_L1: 13.792 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 11.457 G_Regularizer: 0.000 validation_error: 20.620 +(epoch: 16, iters: 94720, time: 0.558, data: 0.000) G_L1: 16.870 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 13.877 G_Regularizer: 0.000 validation_error: 21.130 +(epoch: 16, iters: 96720, time: 0.547, data: 0.000) G_L1: 14.726 G_L1_ABSOLUTE: 2.751 G_L1_RELATIVE: 11.975 G_Regularizer: 0.000 validation_error: 20.240 +(epoch: 16, iters: 98720, time: 0.558, data: 0.000) G_L1: 17.494 G_L1_ABSOLUTE: 3.166 G_L1_RELATIVE: 14.327 G_Regularizer: 0.000 validation_error: 20.665 +(epoch: 16, iters: 100720, time: 0.561, data: 0.000) G_L1: 18.075 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 15.044 G_Regularizer: 0.000 validation_error: 20.292 +(epoch: 16, iters: 102720, time: 0.558, data: 0.000) G_L1: 16.573 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 13.983 G_Regularizer: 0.000 validation_error: 20.221 +(epoch: 16, iters: 104720, time: 0.541, data: 0.000) G_L1: 15.126 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 12.690 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 16, iters: 106720, time: 0.535, data: 0.000) G_L1: 15.484 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 13.099 G_Regularizer: 0.000 validation_error: 20.765 +(epoch: 16, iters: 108720, time: 0.548, data: 0.000) G_L1: 17.463 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 14.650 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 16, iters: 110720, time: 0.556, data: 0.000) G_L1: 13.720 G_L1_ABSOLUTE: 3.238 G_L1_RELATIVE: 10.482 G_Regularizer: 0.000 validation_error: 20.202 +(epoch: 16, iters: 112720, time: 0.544, data: 0.000) G_L1: 15.258 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 12.762 G_Regularizer: 0.000 validation_error: 20.320 +(epoch: 16, iters: 114720, time: 0.544, data: 0.000) G_L1: 15.533 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 13.028 G_Regularizer: 0.000 validation_error: 21.620 +(epoch: 16, iters: 116720, time: 0.545, data: 0.000) G_L1: 14.462 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 11.830 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 16, iters: 118720, time: 0.560, data: 0.000) G_L1: 13.131 G_L1_ABSOLUTE: 2.765 G_L1_RELATIVE: 10.367 G_Regularizer: 0.000 validation_error: 20.187 +(epoch: 16, iters: 120720, time: 0.548, data: 0.000) G_L1: 15.572 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 13.118 G_Regularizer: 0.000 validation_error: 20.012 +(epoch: 16, iters: 122720, time: 0.534, data: 0.000) G_L1: 16.460 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 13.993 G_Regularizer: 0.000 validation_error: 20.167 +(epoch: 16, iters: 124720, time: 0.550, data: 0.000) G_L1: 14.460 G_L1_ABSOLUTE: 2.809 G_L1_RELATIVE: 11.651 G_Regularizer: 0.000 validation_error: 19.905 +(epoch: 16, iters: 126720, time: 0.555, data: 0.001) G_L1: 13.489 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 11.024 G_Regularizer: 0.000 validation_error: 20.026 +(epoch: 16, iters: 128720, time: 0.559, data: 0.000) G_L1: 14.814 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 11.909 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 16, iters: 130720, time: 0.553, data: 0.000) G_L1: 16.321 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 13.999 G_Regularizer: 0.000 validation_error: 20.522 +(epoch: 16, iters: 132720, time: 0.554, data: 0.000) G_L1: 15.255 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 12.357 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 16, iters: 134720, time: 0.537, data: 0.000) G_L1: 17.156 G_L1_ABSOLUTE: 3.070 G_L1_RELATIVE: 14.086 G_Regularizer: 0.000 validation_error: 20.517 +(epoch: 16, iters: 136720, time: 0.544, data: 0.000) G_L1: 12.925 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 10.536 G_Regularizer: 0.000 validation_error: 20.453 +(epoch: 16, iters: 138720, time: 0.536, data: 0.000) G_L1: 16.760 G_L1_ABSOLUTE: 3.422 G_L1_RELATIVE: 13.338 G_Regularizer: 0.000 validation_error: 20.411 +(epoch: 16, iters: 140720, time: 0.556, data: 0.000) G_L1: 13.775 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 11.392 G_Regularizer: 0.000 validation_error: 20.108 +(epoch: 16, iters: 142720, time: 0.529, data: 0.000) G_L1: 15.551 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 13.114 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 16, iters: 144720, time: 0.537, data: 0.000) G_L1: 13.227 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 10.293 G_Regularizer: 0.000 validation_error: 21.079 +(epoch: 16, iters: 146720, time: 0.555, data: 0.000) G_L1: 14.019 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 11.039 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 16, iters: 148720, time: 0.553, data: 0.000) G_L1: 17.428 G_L1_ABSOLUTE: 3.080 G_L1_RELATIVE: 14.348 G_Regularizer: 0.000 validation_error: 21.541 +(epoch: 16, iters: 150720, time: 0.536, data: 0.000) G_L1: 11.972 G_L1_ABSOLUTE: 2.114 G_L1_RELATIVE: 9.858 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 16, iters: 152720, time: 0.548, data: 0.000) G_L1: 15.307 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 12.560 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 16, iters: 154720, time: 0.543, data: 0.000) G_L1: 14.417 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 16, iters: 156720, time: 0.546, data: 0.000) G_L1: 16.045 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 13.343 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 16, iters: 158720, time: 0.558, data: 0.000) G_L1: 16.841 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 14.213 G_Regularizer: 0.000 validation_error: 20.771 +(epoch: 16, iters: 160720, time: 0.547, data: 0.000) G_L1: 15.947 G_L1_ABSOLUTE: 2.038 G_L1_RELATIVE: 13.909 G_Regularizer: 0.000 validation_error: 19.876 +(epoch: 16, iters: 162720, time: 0.537, data: 0.000) G_L1: 16.784 G_L1_ABSOLUTE: 2.912 G_L1_RELATIVE: 13.872 G_Regularizer: 0.000 validation_error: 20.312 +(epoch: 16, iters: 164720, time: 0.539, data: 0.000) G_L1: 18.464 G_L1_ABSOLUTE: 2.804 G_L1_RELATIVE: 15.660 G_Regularizer: 0.000 validation_error: 21.184 +(epoch: 16, iters: 166720, time: 0.560, data: 0.000) G_L1: 15.393 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 12.999 G_Regularizer: 0.000 validation_error: 21.163 +(epoch: 16, iters: 168720, time: 0.558, data: 0.000) G_L1: 14.255 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 11.694 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 16, iters: 170720, time: 0.553, data: 0.000) G_L1: 17.315 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 14.761 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 16, iters: 172720, time: 0.532, data: 0.000) G_L1: 15.116 G_L1_ABSOLUTE: 2.186 G_L1_RELATIVE: 12.930 G_Regularizer: 0.000 validation_error: 21.568 +(epoch: 16, iters: 174720, time: 0.533, data: 0.000) G_L1: 16.115 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 13.384 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 16, iters: 176720, time: 0.537, data: 0.000) G_L1: 14.471 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 11.647 G_Regularizer: 0.000 validation_error: 20.380 +(epoch: 16, iters: 178720, time: 0.556, data: 0.000) G_L1: 14.840 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 12.327 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 16, iters: 180720, time: 0.548, data: 0.000) G_L1: 15.483 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 12.944 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 16, iters: 182720, time: 0.561, data: 0.000) G_L1: 13.437 G_L1_ABSOLUTE: 2.957 G_L1_RELATIVE: 10.480 G_Regularizer: 0.000 validation_error: 20.717 +(epoch: 16, iters: 184720, time: 0.557, data: 0.000) G_L1: 13.706 G_L1_ABSOLUTE: 2.864 G_L1_RELATIVE: 10.842 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 16, iters: 186720, time: 0.543, data: 0.000) G_L1: 13.478 G_L1_ABSOLUTE: 3.309 G_L1_RELATIVE: 10.168 G_Regularizer: 0.000 validation_error: 20.468 +(epoch: 16, iters: 188720, time: 0.546, data: 0.000) G_L1: 14.468 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 11.921 G_Regularizer: 0.000 validation_error: 20.320 +(epoch: 16, iters: 190720, time: 0.560, data: 0.000) G_L1: 13.578 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 10.905 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 16, iters: 192720, time: 0.555, data: 0.000) G_L1: 15.430 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 12.818 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 16, iters: 194720, time: 0.552, data: 0.000) G_L1: 17.654 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 15.089 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 16, iters: 196720, time: 0.546, data: 0.000) G_L1: 12.817 G_L1_ABSOLUTE: 2.246 G_L1_RELATIVE: 10.571 G_Regularizer: 0.000 validation_error: 19.939 +(epoch: 16, iters: 198720, time: 0.536, data: 0.000) G_L1: 16.361 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 13.561 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 16, iters: 200720, time: 0.532, data: 0.000) G_L1: 13.658 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 11.052 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 16, iters: 202720, time: 0.552, data: 0.000) G_L1: 16.023 G_L1_ABSOLUTE: 2.360 G_L1_RELATIVE: 13.663 G_Regularizer: 0.000 validation_error: 21.128 +(epoch: 16, iters: 204720, time: 0.561, data: 0.000) G_L1: 15.265 G_L1_ABSOLUTE: 3.236 G_L1_RELATIVE: 12.028 G_Regularizer: 0.000 validation_error: 20.401 +(epoch: 16, iters: 206720, time: 0.528, data: 0.000) G_L1: 16.003 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 13.511 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 16, iters: 208720, time: 0.539, data: 0.000) G_L1: 14.003 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 11.550 G_Regularizer: 0.000 validation_error: 19.942 +(epoch: 16, iters: 210720, time: 0.548, data: 0.000) G_L1: 16.765 G_L1_ABSOLUTE: 3.080 G_L1_RELATIVE: 13.686 G_Regularizer: 0.000 validation_error: 20.114 +(epoch: 16, iters: 212720, time: 0.536, data: 0.000) G_L1: 13.262 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 10.449 G_Regularizer: 0.000 validation_error: 20.501 +(epoch: 16, iters: 214720, time: 0.533, data: 0.000) G_L1: 16.756 G_L1_ABSOLUTE: 2.891 G_L1_RELATIVE: 13.865 G_Regularizer: 0.000 validation_error: 20.385 +(epoch: 16, iters: 216720, time: 0.555, data: 0.000) G_L1: 15.796 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 12.994 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 16, iters: 218720, time: 0.552, data: 0.000) G_L1: 14.234 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 11.538 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 16, iters: 220720, time: 0.559, data: 0.000) G_L1: 19.184 G_L1_ABSOLUTE: 3.437 G_L1_RELATIVE: 15.747 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 16, iters: 222720, time: 0.545, data: 0.000) G_L1: 14.050 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 11.424 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 16, iters: 224720, time: 0.559, data: 0.000) G_L1: 14.392 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 11.850 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 16, iters: 226720, time: 0.560, data: 0.000) G_L1: 16.596 G_L1_ABSOLUTE: 2.899 G_L1_RELATIVE: 13.697 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 16, iters: 228720, time: 0.563, data: 0.000) G_L1: 13.605 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 11.075 G_Regularizer: 0.000 validation_error: 20.021 +(epoch: 16, iters: 230720, time: 0.550, data: 0.000) G_L1: 15.686 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 13.278 G_Regularizer: 0.000 validation_error: 21.021 +(epoch: 16, iters: 232720, time: 0.558, data: 0.000) G_L1: 14.688 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 12.199 G_Regularizer: 0.000 validation_error: 19.775 +(epoch: 16, iters: 234720, time: 0.563, data: 0.002) G_L1: 15.675 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 13.160 G_Regularizer: 0.000 validation_error: 20.201 +(epoch: 16, iters: 236720, time: 0.544, data: 0.000) G_L1: 14.422 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 11.882 G_Regularizer: 0.000 validation_error: 20.182 +(epoch: 16, iters: 238720, time: 0.554, data: 0.000) G_L1: 15.008 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 20.468 +(epoch: 16, iters: 240720, time: 0.540, data: 0.000) G_L1: 14.674 G_L1_ABSOLUTE: 2.837 G_L1_RELATIVE: 11.837 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 16, iters: 242720, time: 0.548, data: 0.000) G_L1: 14.527 G_L1_ABSOLUTE: 2.804 G_L1_RELATIVE: 11.723 G_Regularizer: 0.000 validation_error: 21.132 +(epoch: 16, iters: 244720, time: 0.545, data: 0.000) G_L1: 12.441 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 10.033 G_Regularizer: 0.000 validation_error: 20.225 +(epoch: 16, iters: 246720, time: 0.560, data: 0.001) G_L1: 13.330 G_L1_ABSOLUTE: 2.256 G_L1_RELATIVE: 11.074 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 16, iters: 248720, time: 0.559, data: 0.000) G_L1: 19.203 G_L1_ABSOLUTE: 3.099 G_L1_RELATIVE: 16.104 G_Regularizer: 0.000 validation_error: 20.244 +(epoch: 16, iters: 250720, time: 0.553, data: 0.000) G_L1: 15.022 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 12.508 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 16, iters: 252720, time: 0.529, data: 0.000) G_L1: 16.017 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 13.261 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 16, iters: 254720, time: 0.548, data: 0.000) G_L1: 13.318 G_L1_ABSOLUTE: 2.232 G_L1_RELATIVE: 11.086 G_Regularizer: 0.000 validation_error: 20.183 +(epoch: 16, iters: 256720, time: 0.541, data: 0.000) G_L1: 14.801 G_L1_ABSOLUTE: 3.037 G_L1_RELATIVE: 11.764 G_Regularizer: 0.000 validation_error: 20.579 +(epoch: 16, iters: 258720, time: 0.563, data: 0.000) G_L1: 13.756 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 11.174 G_Regularizer: 0.000 validation_error: 19.838 +(epoch: 16, iters: 260720, time: 0.558, data: 0.000) G_L1: 17.510 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 14.905 G_Regularizer: 0.000 validation_error: 19.520 +(epoch: 16, iters: 262720, time: 0.555, data: 0.001) G_L1: 13.722 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 10.898 G_Regularizer: 0.000 validation_error: 20.350 +(epoch: 16, iters: 264720, time: 0.534, data: 0.000) G_L1: 15.876 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 13.237 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 16, iters: 266720, time: 0.559, data: 0.000) G_L1: 15.286 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 12.513 G_Regularizer: 0.000 validation_error: 19.742 +(epoch: 16, iters: 268720, time: 0.545, data: 0.000) G_L1: 14.128 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 11.353 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 16, iters: 270720, time: 0.555, data: 0.000) G_L1: 17.456 G_L1_ABSOLUTE: 3.059 G_L1_RELATIVE: 14.397 G_Regularizer: 0.000 validation_error: 19.679 +(epoch: 16, iters: 272720, time: 0.540, data: 0.000) G_L1: 15.453 G_L1_ABSOLUTE: 2.924 G_L1_RELATIVE: 12.529 G_Regularizer: 0.000 validation_error: 20.869 +(epoch: 16, iters: 274720, time: 0.555, data: 0.000) G_L1: 17.890 G_L1_ABSOLUTE: 3.044 G_L1_RELATIVE: 14.845 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 16, iters: 276720, time: 0.549, data: 0.000) G_L1: 16.135 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 13.254 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 16, iters: 278720, time: 0.556, data: 0.000) G_L1: 15.921 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 13.107 G_Regularizer: 0.000 validation_error: 20.116 +(epoch: 16, iters: 280720, time: 0.553, data: 0.000) G_L1: 13.633 G_L1_ABSOLUTE: 3.046 G_L1_RELATIVE: 10.587 G_Regularizer: 0.000 validation_error: 20.333 +(epoch: 16, iters: 282720, time: 0.564, data: 0.000) G_L1: 13.566 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 11.164 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 16, iters: 284720, time: 0.552, data: 0.000) G_L1: 15.336 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 12.511 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 16, iters: 286720, time: 0.560, data: 0.000) G_L1: 13.536 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 10.680 G_Regularizer: 0.000 validation_error: 20.155 +(epoch: 16, iters: 288720, time: 0.538, data: 0.000) G_L1: 23.352 G_L1_ABSOLUTE: 2.919 G_L1_RELATIVE: 20.433 G_Regularizer: 0.000 validation_error: 20.100 +(epoch: 16, iters: 290720, time: 0.537, data: 0.000) G_L1: 13.752 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 11.133 G_Regularizer: 0.000 validation_error: 20.518 +(epoch: 16, iters: 292720, time: 0.546, data: 0.000) G_L1: 14.433 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 11.859 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 16, iters: 294720, time: 0.563, data: 0.000) G_L1: 15.785 G_L1_ABSOLUTE: 2.950 G_L1_RELATIVE: 12.834 G_Regularizer: 0.000 validation_error: 20.279 +(epoch: 16, iters: 296720, time: 0.559, data: 0.001) G_L1: 14.784 G_L1_ABSOLUTE: 2.977 G_L1_RELATIVE: 11.807 G_Regularizer: 0.000 validation_error: 20.326 +(epoch: 16, iters: 298720, time: 0.528, data: 0.000) G_L1: 12.828 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 10.417 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 16, iters: 300720, time: 0.553, data: 0.000) G_L1: 17.238 G_L1_ABSOLUTE: 3.026 G_L1_RELATIVE: 14.212 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 16, iters: 302720, time: 0.557, data: 0.000) G_L1: 15.511 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 12.603 G_Regularizer: 0.000 validation_error: 20.427 +(epoch: 17, iters: 1968, time: 0.557, data: 0.000) G_L1: 11.920 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 9.096 G_Regularizer: 0.000 validation_error: 19.892 +(epoch: 17, iters: 3968, time: 0.546, data: 0.000) G_L1: 19.525 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 16.872 G_Regularizer: 0.000 validation_error: 19.894 +(epoch: 17, iters: 5968, time: 0.560, data: 0.000) G_L1: 16.621 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 13.989 G_Regularizer: 0.000 validation_error: 20.372 +(epoch: 17, iters: 7968, time: 0.554, data: 0.000) G_L1: 15.289 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 12.519 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 17, iters: 9968, time: 0.558, data: 0.000) G_L1: 15.042 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 12.783 G_Regularizer: 0.000 validation_error: 20.298 +(epoch: 17, iters: 11968, time: 0.537, data: 0.000) G_L1: 14.694 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 12.043 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 17, iters: 13968, time: 0.563, data: 0.000) G_L1: 15.037 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 12.722 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 17, iters: 15968, time: 0.552, data: 0.000) G_L1: 13.194 G_L1_ABSOLUTE: 2.869 G_L1_RELATIVE: 10.325 G_Regularizer: 0.000 validation_error: 21.020 +(epoch: 17, iters: 17968, time: 0.559, data: 0.000) G_L1: 18.673 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 15.892 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 17, iters: 19968, time: 0.549, data: 0.000) G_L1: 15.147 G_L1_ABSOLUTE: 3.294 G_L1_RELATIVE: 11.852 G_Regularizer: 0.000 validation_error: 20.494 +(epoch: 17, iters: 21968, time: 0.534, data: 0.000) G_L1: 12.430 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 10.090 G_Regularizer: 0.000 validation_error: 20.157 +(epoch: 17, iters: 23968, time: 0.538, data: 0.000) G_L1: 15.988 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 13.576 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 17, iters: 25968, time: 0.540, data: 0.000) G_L1: 16.052 G_L1_ABSOLUTE: 3.089 G_L1_RELATIVE: 12.962 G_Regularizer: 0.000 validation_error: 20.218 +(epoch: 17, iters: 27968, time: 0.554, data: 0.000) G_L1: 15.197 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 12.532 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 17, iters: 29968, time: 0.541, data: 0.000) G_L1: 13.195 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 10.946 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 17, iters: 31968, time: 0.543, data: 0.000) G_L1: 16.461 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 13.772 G_Regularizer: 0.000 validation_error: 20.102 +(epoch: 17, iters: 33968, time: 0.540, data: 0.000) G_L1: 14.395 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 11.596 G_Regularizer: 0.000 validation_error: 20.578 +(epoch: 17, iters: 35968, time: 0.548, data: 0.000) G_L1: 12.853 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 10.615 G_Regularizer: 0.000 validation_error: 20.708 +(epoch: 17, iters: 37968, time: 0.538, data: 0.000) G_L1: 14.160 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 11.252 G_Regularizer: 0.000 validation_error: 20.349 +(epoch: 17, iters: 39968, time: 0.556, data: 0.000) G_L1: 12.741 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 10.459 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 17, iters: 41968, time: 0.540, data: 0.000) G_L1: 15.529 G_L1_ABSOLUTE: 3.307 G_L1_RELATIVE: 12.222 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 17, iters: 43968, time: 0.554, data: 0.000) G_L1: 18.339 G_L1_ABSOLUTE: 3.094 G_L1_RELATIVE: 15.245 G_Regularizer: 0.000 validation_error: 19.930 +(epoch: 17, iters: 45968, time: 0.537, data: 0.000) G_L1: 17.984 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 15.150 G_Regularizer: 0.000 validation_error: 20.168 +(epoch: 17, iters: 47968, time: 0.531, data: 0.000) G_L1: 18.809 G_L1_ABSOLUTE: 2.300 G_L1_RELATIVE: 16.509 G_Regularizer: 0.000 validation_error: 21.123 +(epoch: 17, iters: 49968, time: 0.552, data: 0.000) G_L1: 16.118 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 13.596 G_Regularizer: 0.000 validation_error: 21.257 +(epoch: 17, iters: 51968, time: 0.553, data: 0.000) G_L1: 17.296 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 14.608 G_Regularizer: 0.000 validation_error: 20.086 +(epoch: 17, iters: 53968, time: 0.539, data: 0.000) G_L1: 13.852 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 11.374 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 17, iters: 55968, time: 0.560, data: 0.000) G_L1: 13.626 G_L1_ABSOLUTE: 2.204 G_L1_RELATIVE: 11.421 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 17, iters: 57968, time: 0.561, data: 0.000) G_L1: 15.592 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 12.792 G_Regularizer: 0.000 validation_error: 20.227 +(epoch: 17, iters: 59968, time: 0.541, data: 0.000) G_L1: 13.263 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 10.909 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 17, iters: 61968, time: 0.537, data: 0.000) G_L1: 14.913 G_L1_ABSOLUTE: 3.380 G_L1_RELATIVE: 11.533 G_Regularizer: 0.000 validation_error: 20.181 +(epoch: 17, iters: 63968, time: 0.544, data: 0.000) G_L1: 14.263 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 11.622 G_Regularizer: 0.000 validation_error: 20.114 +(epoch: 17, iters: 65968, time: 0.555, data: 0.000) G_L1: 14.417 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 11.853 G_Regularizer: 0.000 validation_error: 19.570 +(epoch: 17, iters: 67968, time: 0.557, data: 0.000) G_L1: 14.157 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 11.523 G_Regularizer: 0.000 validation_error: 20.097 +(epoch: 17, iters: 69968, time: 0.553, data: 0.000) G_L1: 17.874 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 15.201 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 17, iters: 71968, time: 0.551, data: 0.000) G_L1: 14.719 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 11.919 G_Regularizer: 0.000 validation_error: 20.586 +(epoch: 17, iters: 73968, time: 0.552, data: 0.000) G_L1: 15.799 G_L1_ABSOLUTE: 3.008 G_L1_RELATIVE: 12.791 G_Regularizer: 0.000 validation_error: 19.934 +(epoch: 17, iters: 75968, time: 0.559, data: 0.000) G_L1: 13.372 G_L1_ABSOLUTE: 2.763 G_L1_RELATIVE: 10.609 G_Regularizer: 0.000 validation_error: 20.629 +(epoch: 17, iters: 77968, time: 0.566, data: 0.000) G_L1: 15.469 G_L1_ABSOLUTE: 3.039 G_L1_RELATIVE: 12.430 G_Regularizer: 0.000 validation_error: 20.494 +(epoch: 17, iters: 79968, time: 0.540, data: 0.001) G_L1: 11.991 G_L1_ABSOLUTE: 1.882 G_L1_RELATIVE: 10.109 G_Regularizer: 0.000 validation_error: 20.113 +(epoch: 17, iters: 81968, time: 0.555, data: 0.000) G_L1: 14.806 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 12.220 G_Regularizer: 0.000 validation_error: 20.285 +(epoch: 17, iters: 83968, time: 0.540, data: 0.000) G_L1: 14.778 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 12.336 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 17, iters: 85968, time: 0.563, data: 0.000) G_L1: 17.656 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 15.204 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 17, iters: 87968, time: 0.535, data: 0.000) G_L1: 14.340 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 11.722 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 17, iters: 89968, time: 0.536, data: 0.000) G_L1: 15.230 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 12.910 G_Regularizer: 0.000 validation_error: 20.491 +(epoch: 17, iters: 91968, time: 0.553, data: 0.000) G_L1: 12.511 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 10.304 G_Regularizer: 0.000 validation_error: 20.627 +(epoch: 17, iters: 93968, time: 0.559, data: 0.001) G_L1: 12.857 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 10.334 G_Regularizer: 0.000 validation_error: 19.803 +(epoch: 17, iters: 95968, time: 0.538, data: 0.000) G_L1: 14.705 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 11.870 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 17, iters: 97968, time: 0.532, data: 0.000) G_L1: 12.738 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 10.330 G_Regularizer: 0.000 validation_error: 19.981 +(epoch: 17, iters: 99968, time: 0.531, data: 0.000) G_L1: 16.750 G_L1_ABSOLUTE: 3.009 G_L1_RELATIVE: 13.741 G_Regularizer: 0.000 validation_error: 19.988 +(epoch: 17, iters: 101968, time: 0.534, data: 0.000) G_L1: 18.399 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 15.676 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 17, iters: 103968, time: 0.529, data: 0.000) G_L1: 15.688 G_L1_ABSOLUTE: 3.399 G_L1_RELATIVE: 12.289 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 17, iters: 105968, time: 0.534, data: 0.000) G_L1: 16.067 G_L1_ABSOLUTE: 2.895 G_L1_RELATIVE: 13.172 G_Regularizer: 0.000 validation_error: 19.969 +(epoch: 17, iters: 107968, time: 0.532, data: 0.001) G_L1: 15.416 G_L1_ABSOLUTE: 2.585 G_L1_RELATIVE: 12.831 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 17, iters: 109968, time: 0.556, data: 0.000) G_L1: 16.782 G_L1_ABSOLUTE: 2.972 G_L1_RELATIVE: 13.810 G_Regularizer: 0.000 validation_error: 20.361 +(epoch: 17, iters: 111968, time: 0.563, data: 0.000) G_L1: 13.588 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 11.019 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 17, iters: 113968, time: 0.534, data: 0.000) G_L1: 14.154 G_L1_ABSOLUTE: 3.342 G_L1_RELATIVE: 10.812 G_Regularizer: 0.000 validation_error: 20.355 +(epoch: 17, iters: 115968, time: 0.543, data: 0.000) G_L1: 15.585 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 13.036 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 17, iters: 117968, time: 0.566, data: 0.000) G_L1: 13.967 G_L1_ABSOLUTE: 2.611 G_L1_RELATIVE: 11.356 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 17, iters: 119968, time: 0.551, data: 0.000) G_L1: 16.432 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 13.926 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 17, iters: 121968, time: 0.542, data: 0.000) G_L1: 16.600 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 14.180 G_Regularizer: 0.000 validation_error: 20.882 +(epoch: 17, iters: 123968, time: 0.559, data: 0.000) G_L1: 13.620 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 10.904 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 17, iters: 125968, time: 0.547, data: 0.000) G_L1: 13.021 G_L1_ABSOLUTE: 2.077 G_L1_RELATIVE: 10.944 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 17, iters: 127968, time: 0.554, data: 0.000) G_L1: 13.997 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 11.323 G_Regularizer: 0.000 validation_error: 21.351 +(epoch: 17, iters: 129968, time: 0.550, data: 0.000) G_L1: 15.293 G_L1_ABSOLUTE: 2.119 G_L1_RELATIVE: 13.174 G_Regularizer: 0.000 validation_error: 20.310 +(epoch: 17, iters: 131968, time: 0.556, data: 0.000) G_L1: 15.260 G_L1_ABSOLUTE: 3.054 G_L1_RELATIVE: 12.206 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 17, iters: 133968, time: 0.565, data: 0.000) G_L1: 16.629 G_L1_ABSOLUTE: 2.715 G_L1_RELATIVE: 13.914 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 17, iters: 135968, time: 0.554, data: 0.001) G_L1: 15.744 G_L1_ABSOLUTE: 2.753 G_L1_RELATIVE: 12.991 G_Regularizer: 0.000 validation_error: 21.411 +(epoch: 17, iters: 137968, time: 0.545, data: 0.001) G_L1: 15.926 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 13.121 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 17, iters: 139968, time: 0.552, data: 0.000) G_L1: 15.891 G_L1_ABSOLUTE: 2.971 G_L1_RELATIVE: 12.919 G_Regularizer: 0.000 validation_error: 20.543 +(epoch: 17, iters: 141968, time: 0.562, data: 0.000) G_L1: 13.408 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 10.638 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 17, iters: 143968, time: 0.564, data: 0.000) G_L1: 14.026 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 11.601 G_Regularizer: 0.000 validation_error: 20.320 +(epoch: 17, iters: 145968, time: 0.548, data: 0.000) G_L1: 16.352 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 13.951 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 17, iters: 147968, time: 0.555, data: 0.000) G_L1: 13.706 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.165 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 17, iters: 149968, time: 0.548, data: 0.000) G_L1: 14.768 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 12.194 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 17, iters: 151968, time: 0.551, data: 0.000) G_L1: 14.084 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 11.405 G_Regularizer: 0.000 validation_error: 20.028 +(epoch: 17, iters: 153968, time: 0.560, data: 0.000) G_L1: 13.537 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 11.076 G_Regularizer: 0.000 validation_error: 20.499 +(epoch: 17, iters: 155968, time: 0.538, data: 0.000) G_L1: 15.545 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 13.061 G_Regularizer: 0.000 validation_error: 20.798 +(epoch: 17, iters: 157968, time: 0.555, data: 0.000) G_L1: 14.579 G_L1_ABSOLUTE: 2.288 G_L1_RELATIVE: 12.291 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 17, iters: 159968, time: 0.557, data: 0.000) G_L1: 15.402 G_L1_ABSOLUTE: 3.150 G_L1_RELATIVE: 12.252 G_Regularizer: 0.000 validation_error: 20.325 +(epoch: 17, iters: 161968, time: 0.558, data: 0.000) G_L1: 16.114 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 13.468 G_Regularizer: 0.000 validation_error: 20.220 +(epoch: 17, iters: 163968, time: 0.536, data: 0.001) G_L1: 16.297 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 13.486 G_Regularizer: 0.000 validation_error: 20.723 +(epoch: 17, iters: 165968, time: 0.540, data: 0.000) G_L1: 12.516 G_L1_ABSOLUTE: 2.671 G_L1_RELATIVE: 9.845 G_Regularizer: 0.000 validation_error: 22.236 +(epoch: 17, iters: 167968, time: 0.553, data: 0.000) G_L1: 14.838 G_L1_ABSOLUTE: 2.850 G_L1_RELATIVE: 11.989 G_Regularizer: 0.000 validation_error: 20.175 +(epoch: 17, iters: 169968, time: 0.557, data: 0.000) G_L1: 16.682 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 13.948 G_Regularizer: 0.000 validation_error: 20.221 +(epoch: 17, iters: 171968, time: 0.543, data: 0.000) G_L1: 13.896 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 11.286 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 17, iters: 173968, time: 0.553, data: 0.000) G_L1: 13.196 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 10.646 G_Regularizer: 0.000 validation_error: 21.175 +(epoch: 17, iters: 175968, time: 0.551, data: 0.000) G_L1: 17.553 G_L1_ABSOLUTE: 2.859 G_L1_RELATIVE: 14.695 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 17, iters: 177968, time: 0.540, data: 0.000) G_L1: 16.289 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 13.603 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 17, iters: 179968, time: 0.540, data: 0.001) G_L1: 13.996 G_L1_ABSOLUTE: 2.446 G_L1_RELATIVE: 11.551 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 17, iters: 181968, time: 0.546, data: 0.000) G_L1: 12.996 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 10.430 G_Regularizer: 0.000 validation_error: 20.439 +(epoch: 17, iters: 183968, time: 0.545, data: 0.000) G_L1: 14.641 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 12.064 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 17, iters: 185968, time: 0.565, data: 0.000) G_L1: 15.966 G_L1_ABSOLUTE: 2.979 G_L1_RELATIVE: 12.987 G_Regularizer: 0.000 validation_error: 20.597 +(epoch: 17, iters: 187968, time: 0.540, data: 0.000) G_L1: 16.219 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 13.446 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 17, iters: 189968, time: 0.550, data: 0.000) G_L1: 13.807 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 10.887 G_Regularizer: 0.000 validation_error: 21.272 +(epoch: 17, iters: 191968, time: 0.557, data: 0.000) G_L1: 14.659 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 12.194 G_Regularizer: 0.000 validation_error: 20.120 +(epoch: 17, iters: 193968, time: 0.560, data: 0.000) G_L1: 14.254 G_L1_ABSOLUTE: 2.490 G_L1_RELATIVE: 11.764 G_Regularizer: 0.000 validation_error: 20.234 +(epoch: 17, iters: 195968, time: 0.555, data: 0.000) G_L1: 15.314 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 12.957 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 17, iters: 197968, time: 0.532, data: 0.000) G_L1: 16.058 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 13.562 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 17, iters: 199968, time: 0.552, data: 0.000) G_L1: 19.091 G_L1_ABSOLUTE: 3.121 G_L1_RELATIVE: 15.970 G_Regularizer: 0.000 validation_error: 20.167 +(epoch: 17, iters: 201968, time: 0.560, data: 0.000) G_L1: 13.293 G_L1_ABSOLUTE: 2.840 G_L1_RELATIVE: 10.453 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 17, iters: 203968, time: 0.560, data: 0.000) G_L1: 13.949 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 11.367 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 17, iters: 205968, time: 0.530, data: 0.000) G_L1: 16.915 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 13.826 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 17, iters: 207968, time: 0.555, data: 0.000) G_L1: 16.828 G_L1_ABSOLUTE: 3.042 G_L1_RELATIVE: 13.785 G_Regularizer: 0.000 validation_error: 20.137 +(epoch: 17, iters: 209968, time: 0.560, data: 0.000) G_L1: 14.972 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 12.695 G_Regularizer: 0.000 validation_error: 20.365 +(epoch: 17, iters: 211968, time: 0.557, data: 0.000) G_L1: 14.074 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 11.563 G_Regularizer: 0.000 validation_error: 19.955 +(epoch: 17, iters: 213968, time: 0.528, data: 0.000) G_L1: 15.085 G_L1_ABSOLUTE: 2.703 G_L1_RELATIVE: 12.382 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 17, iters: 215968, time: 0.554, data: 0.000) G_L1: 14.957 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 12.357 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 17, iters: 217968, time: 0.562, data: 0.000) G_L1: 16.228 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 13.494 G_Regularizer: 0.000 validation_error: 19.508 +(epoch: 17, iters: 219968, time: 0.541, data: 0.000) G_L1: 26.527 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 23.819 G_Regularizer: 0.000 validation_error: 20.338 +(epoch: 17, iters: 221968, time: 0.546, data: 0.000) G_L1: 15.703 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 12.995 G_Regularizer: 0.000 validation_error: 20.376 +(epoch: 17, iters: 223968, time: 0.531, data: 0.000) G_L1: 14.644 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 12.083 G_Regularizer: 0.000 validation_error: 20.094 +(epoch: 17, iters: 225968, time: 0.535, data: 0.000) G_L1: 16.403 G_L1_ABSOLUTE: 2.984 G_L1_RELATIVE: 13.419 G_Regularizer: 0.000 validation_error: 20.410 +(epoch: 17, iters: 227968, time: 0.539, data: 0.000) G_L1: 13.928 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 11.389 G_Regularizer: 0.000 validation_error: 20.420 +(epoch: 17, iters: 229968, time: 0.537, data: 0.000) G_L1: 17.750 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 14.952 G_Regularizer: 0.000 validation_error: 20.428 +(epoch: 17, iters: 231968, time: 0.543, data: 0.000) G_L1: 12.163 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 9.831 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 17, iters: 233968, time: 0.558, data: 0.000) G_L1: 13.506 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 11.082 G_Regularizer: 0.000 validation_error: 20.072 +(epoch: 17, iters: 235968, time: 0.557, data: 0.001) G_L1: 12.007 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 9.632 G_Regularizer: 0.000 validation_error: 21.491 +(epoch: 17, iters: 237968, time: 0.562, data: 0.000) G_L1: 15.590 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 12.839 G_Regularizer: 0.000 validation_error: 20.424 +(epoch: 17, iters: 239968, time: 0.540, data: 0.000) G_L1: 17.080 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 14.290 G_Regularizer: 0.000 validation_error: 21.168 +(epoch: 17, iters: 241968, time: 0.543, data: 0.000) G_L1: 13.154 G_L1_ABSOLUTE: 2.204 G_L1_RELATIVE: 10.950 G_Regularizer: 0.000 validation_error: 21.441 +(epoch: 17, iters: 243968, time: 0.542, data: 0.000) G_L1: 15.872 G_L1_ABSOLUTE: 3.244 G_L1_RELATIVE: 12.628 G_Regularizer: 0.000 validation_error: 20.419 +(epoch: 17, iters: 245968, time: 0.549, data: 0.000) G_L1: 14.376 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 11.949 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 17, iters: 247968, time: 0.540, data: 0.000) G_L1: 15.255 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 12.643 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 17, iters: 249968, time: 0.529, data: 0.000) G_L1: 17.225 G_L1_ABSOLUTE: 3.111 G_L1_RELATIVE: 14.114 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 17, iters: 251968, time: 0.563, data: 0.000) G_L1: 15.128 G_L1_ABSOLUTE: 3.274 G_L1_RELATIVE: 11.854 G_Regularizer: 0.000 validation_error: 19.942 +(epoch: 17, iters: 253968, time: 0.537, data: 0.000) G_L1: 14.670 G_L1_ABSOLUTE: 2.979 G_L1_RELATIVE: 11.692 G_Regularizer: 0.000 validation_error: 20.444 +(epoch: 17, iters: 255968, time: 0.553, data: 0.000) G_L1: 15.665 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 13.016 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 17, iters: 257968, time: 0.558, data: 0.000) G_L1: 16.477 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 13.694 G_Regularizer: 0.000 validation_error: 20.482 +(epoch: 17, iters: 259968, time: 0.563, data: 0.000) G_L1: 13.218 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 10.524 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 17, iters: 261968, time: 0.552, data: 0.001) G_L1: 16.081 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 13.429 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 17, iters: 263968, time: 0.548, data: 0.000) G_L1: 14.530 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 11.894 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 17, iters: 265968, time: 0.544, data: 0.000) G_L1: 14.587 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 11.809 G_Regularizer: 0.000 validation_error: 20.227 +(epoch: 17, iters: 267968, time: 0.562, data: 0.000) G_L1: 12.878 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 10.257 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 17, iters: 269968, time: 0.562, data: 0.000) G_L1: 15.606 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 13.009 G_Regularizer: 0.000 validation_error: 20.416 +(epoch: 17, iters: 271968, time: 0.556, data: 0.000) G_L1: 15.309 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 12.712 G_Regularizer: 0.000 validation_error: 20.253 +(epoch: 17, iters: 273968, time: 0.554, data: 0.000) G_L1: 15.118 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 12.440 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 17, iters: 275968, time: 0.558, data: 0.000) G_L1: 16.377 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 13.623 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 17, iters: 277968, time: 0.558, data: 0.001) G_L1: 13.261 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 10.586 G_Regularizer: 0.000 validation_error: 19.896 +(epoch: 17, iters: 279968, time: 0.560, data: 0.000) G_L1: 14.669 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 12.129 G_Regularizer: 0.000 validation_error: 20.038 +(epoch: 17, iters: 281968, time: 0.545, data: 0.000) G_L1: 13.214 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 10.830 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 17, iters: 283968, time: 0.559, data: 0.000) G_L1: 16.798 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 14.242 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 17, iters: 285968, time: 0.560, data: 0.000) G_L1: 15.719 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 13.051 G_Regularizer: 0.000 validation_error: 20.663 +(epoch: 17, iters: 287968, time: 0.559, data: 0.000) G_L1: 16.138 G_L1_ABSOLUTE: 2.985 G_L1_RELATIVE: 13.153 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 17, iters: 289968, time: 0.531, data: 0.000) G_L1: 14.275 G_L1_ABSOLUTE: 2.264 G_L1_RELATIVE: 12.011 G_Regularizer: 0.000 validation_error: 20.113 +(epoch: 17, iters: 291968, time: 0.553, data: 0.000) G_L1: 15.133 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 12.519 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 17, iters: 293968, time: 0.555, data: 0.000) G_L1: 15.072 G_L1_ABSOLUTE: 3.032 G_L1_RELATIVE: 12.040 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 17, iters: 295968, time: 0.564, data: 0.000) G_L1: 16.255 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 13.551 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 17, iters: 297968, time: 0.549, data: 0.000) G_L1: 14.665 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 12.136 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 17, iters: 299968, time: 0.559, data: 0.000) G_L1: 14.294 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 11.928 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 17, iters: 301968, time: 0.560, data: 0.001) G_L1: 17.263 G_L1_ABSOLUTE: 3.306 G_L1_RELATIVE: 13.958 G_Regularizer: 0.000 validation_error: 20.609 +(epoch: 18, iters: 1216, time: 0.558, data: 0.000) G_L1: 16.176 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 13.337 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 18, iters: 3216, time: 0.543, data: 0.000) G_L1: 14.646 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 12.042 G_Regularizer: 0.000 validation_error: 21.444 +(epoch: 18, iters: 5216, time: 0.563, data: 0.000) G_L1: 14.041 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 11.635 G_Regularizer: 0.000 validation_error: 20.387 +(epoch: 18, iters: 7216, time: 0.559, data: 0.000) G_L1: 13.743 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.142 G_Regularizer: 0.000 validation_error: 20.406 +(epoch: 18, iters: 9216, time: 0.558, data: 0.000) G_L1: 12.023 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 9.603 G_Regularizer: 0.000 validation_error: 20.997 +(epoch: 18, iters: 11216, time: 0.543, data: 0.000) G_L1: 14.247 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 11.349 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 18, iters: 13216, time: 0.545, data: 0.000) G_L1: 12.613 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 10.117 G_Regularizer: 0.000 validation_error: 20.375 +(epoch: 18, iters: 15216, time: 0.555, data: 0.000) G_L1: 13.729 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 11.139 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 18, iters: 17216, time: 0.568, data: 0.001) G_L1: 14.390 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 11.842 G_Regularizer: 0.000 validation_error: 20.540 +(epoch: 18, iters: 19216, time: 0.553, data: 0.000) G_L1: 12.291 G_L1_ABSOLUTE: 2.082 G_L1_RELATIVE: 10.208 G_Regularizer: 0.000 validation_error: 20.345 +(epoch: 18, iters: 21216, time: 0.534, data: 0.000) G_L1: 16.179 G_L1_ABSOLUTE: 3.110 G_L1_RELATIVE: 13.069 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 18, iters: 23216, time: 0.557, data: 0.000) G_L1: 12.535 G_L1_ABSOLUTE: 2.210 G_L1_RELATIVE: 10.325 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 18, iters: 25216, time: 0.564, data: 0.000) G_L1: 15.132 G_L1_ABSOLUTE: 2.215 G_L1_RELATIVE: 12.917 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 18, iters: 27216, time: 0.541, data: 0.001) G_L1: 13.412 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 10.684 G_Regularizer: 0.000 validation_error: 21.135 +(epoch: 18, iters: 29216, time: 0.533, data: 0.000) G_L1: 14.811 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 12.272 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 18, iters: 31216, time: 0.556, data: 0.000) G_L1: 14.716 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 12.119 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 18, iters: 33216, time: 0.556, data: 0.000) G_L1: 16.690 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 14.114 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 18, iters: 35216, time: 0.561, data: 0.000) G_L1: 14.212 G_L1_ABSOLUTE: 3.041 G_L1_RELATIVE: 11.171 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 18, iters: 37216, time: 0.538, data: 0.000) G_L1: 15.275 G_L1_ABSOLUTE: 2.991 G_L1_RELATIVE: 12.284 G_Regularizer: 0.000 validation_error: 21.030 +(epoch: 18, iters: 39216, time: 0.561, data: 0.000) G_L1: 12.672 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 10.331 G_Regularizer: 0.000 validation_error: 21.241 +(epoch: 18, iters: 41216, time: 0.555, data: 0.000) G_L1: 13.011 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 10.671 G_Regularizer: 0.000 validation_error: 20.723 +(epoch: 18, iters: 43216, time: 0.557, data: 0.000) G_L1: 15.643 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 13.101 G_Regularizer: 0.000 validation_error: 21.163 +(epoch: 18, iters: 45216, time: 0.530, data: 0.000) G_L1: 13.586 G_L1_ABSOLUTE: 2.984 G_L1_RELATIVE: 10.603 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 18, iters: 47216, time: 0.556, data: 0.000) G_L1: 15.771 G_L1_ABSOLUTE: 2.789 G_L1_RELATIVE: 12.982 G_Regularizer: 0.000 validation_error: 20.293 +(epoch: 18, iters: 49216, time: 0.562, data: 0.000) G_L1: 14.453 G_L1_ABSOLUTE: 2.726 G_L1_RELATIVE: 11.727 G_Regularizer: 0.000 validation_error: 20.249 +(epoch: 18, iters: 51216, time: 0.564, data: 0.000) G_L1: 11.702 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 9.160 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 18, iters: 53216, time: 0.546, data: 0.000) G_L1: 15.676 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 13.234 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 18, iters: 55216, time: 0.546, data: 0.000) G_L1: 17.108 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 14.231 G_Regularizer: 0.000 validation_error: 21.528 +(epoch: 18, iters: 57216, time: 0.562, data: 0.001) G_L1: 16.457 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 14.099 G_Regularizer: 0.000 validation_error: 21.925 +(epoch: 18, iters: 59216, time: 0.561, data: 0.000) G_L1: 17.877 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 15.381 G_Regularizer: 0.000 validation_error: 20.492 +(epoch: 18, iters: 61216, time: 0.549, data: 0.000) G_L1: 15.558 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 12.732 G_Regularizer: 0.000 validation_error: 20.249 +(epoch: 18, iters: 63216, time: 0.533, data: 0.000) G_L1: 14.077 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.640 G_Regularizer: 0.000 validation_error: 20.995 +(epoch: 18, iters: 65216, time: 0.553, data: 0.000) G_L1: 15.755 G_L1_ABSOLUTE: 3.205 G_L1_RELATIVE: 12.550 G_Regularizer: 0.000 validation_error: 20.008 +(epoch: 18, iters: 67216, time: 0.546, data: 0.000) G_L1: 11.631 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 9.148 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 18, iters: 69216, time: 0.540, data: 0.000) G_L1: 14.422 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 11.612 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 18, iters: 71216, time: 0.542, data: 0.000) G_L1: 15.464 G_L1_ABSOLUTE: 3.263 G_L1_RELATIVE: 12.201 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 18, iters: 73216, time: 0.549, data: 0.000) G_L1: 14.880 G_L1_ABSOLUTE: 3.110 G_L1_RELATIVE: 11.770 G_Regularizer: 0.000 validation_error: 21.127 +(epoch: 18, iters: 75216, time: 0.563, data: 0.000) G_L1: 17.239 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 14.555 G_Regularizer: 0.000 validation_error: 20.222 +(epoch: 18, iters: 77216, time: 0.558, data: 0.000) G_L1: 13.780 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 11.046 G_Regularizer: 0.000 validation_error: 20.310 +(epoch: 18, iters: 79216, time: 0.552, data: 0.000) G_L1: 15.787 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 13.190 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 18, iters: 81216, time: 0.554, data: 0.000) G_L1: 14.247 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 11.830 G_Regularizer: 0.000 validation_error: 21.327 +(epoch: 18, iters: 83216, time: 0.531, data: 0.000) G_L1: 15.223 G_L1_ABSOLUTE: 3.059 G_L1_RELATIVE: 12.164 G_Regularizer: 0.000 validation_error: 20.062 +(epoch: 18, iters: 85216, time: 0.559, data: 0.000) G_L1: 18.024 G_L1_ABSOLUTE: 3.053 G_L1_RELATIVE: 14.971 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 18, iters: 87216, time: 0.532, data: 0.001) G_L1: 13.467 G_L1_ABSOLUTE: 2.174 G_L1_RELATIVE: 11.293 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 18, iters: 89216, time: 0.531, data: 0.000) G_L1: 17.445 G_L1_ABSOLUTE: 2.942 G_L1_RELATIVE: 14.503 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 18, iters: 91216, time: 0.573, data: 0.000) G_L1: 13.877 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 11.268 G_Regularizer: 0.000 validation_error: 20.346 +(epoch: 18, iters: 93216, time: 0.557, data: 0.000) G_L1: 13.989 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 11.559 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 18, iters: 95216, time: 0.546, data: 0.000) G_L1: 15.038 G_L1_ABSOLUTE: 2.583 G_L1_RELATIVE: 12.455 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 18, iters: 97216, time: 0.566, data: 0.000) G_L1: 14.544 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.130 G_Regularizer: 0.000 validation_error: 21.269 +(epoch: 18, iters: 99216, time: 0.546, data: 0.000) G_L1: 17.851 G_L1_ABSOLUTE: 2.675 G_L1_RELATIVE: 15.176 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 18, iters: 101216, time: 0.547, data: 0.000) G_L1: 15.127 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 12.237 G_Regularizer: 0.000 validation_error: 20.463 +(epoch: 18, iters: 103216, time: 0.552, data: 0.000) G_L1: 14.622 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 12.226 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 18, iters: 105216, time: 0.549, data: 0.000) G_L1: 16.605 G_L1_ABSOLUTE: 2.787 G_L1_RELATIVE: 13.818 G_Regularizer: 0.000 validation_error: 20.429 +(epoch: 18, iters: 107216, time: 0.560, data: 0.001) G_L1: 17.746 G_L1_ABSOLUTE: 3.226 G_L1_RELATIVE: 14.520 G_Regularizer: 0.000 validation_error: 21.239 +(epoch: 18, iters: 109216, time: 0.556, data: 0.000) G_L1: 16.168 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 13.546 G_Regularizer: 0.000 validation_error: 20.418 +(epoch: 18, iters: 111216, time: 0.541, data: 0.000) G_L1: 15.366 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 12.912 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 18, iters: 113216, time: 0.536, data: 0.000) G_L1: 13.260 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 10.995 G_Regularizer: 0.000 validation_error: 20.334 +(epoch: 18, iters: 115216, time: 0.560, data: 0.000) G_L1: 16.590 G_L1_ABSOLUTE: 2.903 G_L1_RELATIVE: 13.686 G_Regularizer: 0.000 validation_error: 20.425 +(epoch: 18, iters: 117216, time: 0.560, data: 0.000) G_L1: 14.155 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 11.664 G_Regularizer: 0.000 validation_error: 21.314 +(epoch: 18, iters: 119216, time: 0.566, data: 0.000) G_L1: 14.299 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 11.671 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 18, iters: 121216, time: 0.537, data: 0.000) G_L1: 14.783 G_L1_ABSOLUTE: 2.787 G_L1_RELATIVE: 11.995 G_Regularizer: 0.000 validation_error: 19.758 +(epoch: 18, iters: 123216, time: 0.558, data: 0.000) G_L1: 14.921 G_L1_ABSOLUTE: 3.135 G_L1_RELATIVE: 11.787 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 18, iters: 125216, time: 0.564, data: 0.000) G_L1: 17.450 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 14.905 G_Regularizer: 0.000 validation_error: 20.258 +(epoch: 18, iters: 127216, time: 0.554, data: 0.000) G_L1: 13.985 G_L1_ABSOLUTE: 2.928 G_L1_RELATIVE: 11.058 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 18, iters: 129216, time: 0.531, data: 0.000) G_L1: 15.816 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 13.220 G_Regularizer: 0.000 validation_error: 20.407 +(epoch: 18, iters: 131216, time: 0.543, data: 0.000) G_L1: 16.696 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 13.989 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 18, iters: 133216, time: 0.565, data: 0.000) G_L1: 18.754 G_L1_ABSOLUTE: 3.401 G_L1_RELATIVE: 15.354 G_Regularizer: 0.000 validation_error: 21.752 +(epoch: 18, iters: 135216, time: 0.561, data: 0.001) G_L1: 14.709 G_L1_ABSOLUTE: 2.240 G_L1_RELATIVE: 12.470 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 18, iters: 137216, time: 0.536, data: 0.001) G_L1: 13.303 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 10.706 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 18, iters: 139216, time: 0.559, data: 0.000) G_L1: 15.003 G_L1_ABSOLUTE: 3.174 G_L1_RELATIVE: 11.829 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 18, iters: 141216, time: 0.559, data: 0.001) G_L1: 17.664 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 15.301 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 18, iters: 143216, time: 0.556, data: 0.000) G_L1: 12.305 G_L1_ABSOLUTE: 2.076 G_L1_RELATIVE: 10.228 G_Regularizer: 0.000 validation_error: 20.056 +(epoch: 18, iters: 145216, time: 0.553, data: 0.000) G_L1: 15.035 G_L1_ABSOLUTE: 2.376 G_L1_RELATIVE: 12.659 G_Regularizer: 0.000 validation_error: 21.290 +(epoch: 18, iters: 147216, time: 0.559, data: 0.000) G_L1: 20.920 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 17.987 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 18, iters: 149216, time: 0.558, data: 0.000) G_L1: 14.125 G_L1_ABSOLUTE: 2.350 G_L1_RELATIVE: 11.775 G_Regularizer: 0.000 validation_error: 21.245 +(epoch: 18, iters: 151216, time: 0.553, data: 0.000) G_L1: 15.524 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 13.041 G_Regularizer: 0.000 validation_error: 21.081 +(epoch: 18, iters: 153216, time: 0.554, data: 0.000) G_L1: 23.109 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 20.298 G_Regularizer: 0.000 validation_error: 20.291 +(epoch: 18, iters: 155216, time: 0.545, data: 0.001) G_L1: 16.714 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 13.991 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 18, iters: 157216, time: 0.560, data: 0.000) G_L1: 15.369 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 12.488 G_Regularizer: 0.000 validation_error: 20.535 +(epoch: 18, iters: 159216, time: 0.549, data: 0.000) G_L1: 14.507 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 12.013 G_Regularizer: 0.000 validation_error: 20.067 +(epoch: 18, iters: 161216, time: 0.552, data: 0.001) G_L1: 17.323 G_L1_ABSOLUTE: 3.133 G_L1_RELATIVE: 14.190 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 18, iters: 163216, time: 0.561, data: 0.000) G_L1: 11.816 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 8.972 G_Regularizer: 0.000 validation_error: 20.425 +(epoch: 18, iters: 165216, time: 0.558, data: 0.000) G_L1: 14.764 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 12.162 G_Regularizer: 0.000 validation_error: 20.955 +(epoch: 18, iters: 167216, time: 0.549, data: 0.000) G_L1: 11.201 G_L1_ABSOLUTE: 2.072 G_L1_RELATIVE: 9.129 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 18, iters: 169216, time: 0.553, data: 0.000) G_L1: 17.063 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 14.264 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 18, iters: 171216, time: 0.551, data: 0.001) G_L1: 17.420 G_L1_ABSOLUTE: 2.971 G_L1_RELATIVE: 14.449 G_Regularizer: 0.000 validation_error: 20.160 +(epoch: 18, iters: 173216, time: 0.539, data: 0.000) G_L1: 17.074 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 14.453 G_Regularizer: 0.000 validation_error: 20.329 +(epoch: 18, iters: 175216, time: 0.555, data: 0.000) G_L1: 14.226 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 11.465 G_Regularizer: 0.000 validation_error: 21.001 +(epoch: 18, iters: 177216, time: 0.553, data: 0.000) G_L1: 16.457 G_L1_ABSOLUTE: 3.215 G_L1_RELATIVE: 13.242 G_Regularizer: 0.000 validation_error: 20.255 +(epoch: 18, iters: 179216, time: 0.550, data: 0.000) G_L1: 17.378 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 14.580 G_Regularizer: 0.000 validation_error: 20.267 +(epoch: 18, iters: 181216, time: 0.560, data: 0.001) G_L1: 15.439 G_L1_ABSOLUTE: 2.861 G_L1_RELATIVE: 12.578 G_Regularizer: 0.000 validation_error: 20.372 +(epoch: 18, iters: 183216, time: 0.561, data: 0.000) G_L1: 12.987 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 10.647 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 18, iters: 185216, time: 0.541, data: 0.000) G_L1: 17.012 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 14.585 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 18, iters: 187216, time: 0.553, data: 0.000) G_L1: 15.516 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 12.898 G_Regularizer: 0.000 validation_error: 20.450 +(epoch: 18, iters: 189216, time: 0.559, data: 0.001) G_L1: 16.233 G_L1_ABSOLUTE: 2.288 G_L1_RELATIVE: 13.945 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 18, iters: 191216, time: 0.557, data: 0.000) G_L1: 14.026 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 11.212 G_Regularizer: 0.000 validation_error: 20.025 +(epoch: 18, iters: 193216, time: 0.558, data: 0.000) G_L1: 15.920 G_L1_ABSOLUTE: 2.838 G_L1_RELATIVE: 13.082 G_Regularizer: 0.000 validation_error: 20.295 +(epoch: 18, iters: 195216, time: 0.540, data: 0.000) G_L1: 15.112 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 12.849 G_Regularizer: 0.000 validation_error: 20.416 +(epoch: 18, iters: 197216, time: 0.539, data: 0.000) G_L1: 11.663 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 9.295 G_Regularizer: 0.000 validation_error: 19.968 +(epoch: 18, iters: 199216, time: 0.556, data: 0.001) G_L1: 17.890 G_L1_ABSOLUTE: 2.643 G_L1_RELATIVE: 15.247 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 18, iters: 201216, time: 0.554, data: 0.000) G_L1: 11.252 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 8.950 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 18, iters: 203216, time: 0.552, data: 0.000) G_L1: 14.577 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 11.683 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 18, iters: 205216, time: 0.543, data: 0.000) G_L1: 15.436 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 12.692 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 18, iters: 207216, time: 0.559, data: 0.000) G_L1: 12.824 G_L1_ABSOLUTE: 2.281 G_L1_RELATIVE: 10.544 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 18, iters: 209216, time: 0.564, data: 0.000) G_L1: 13.086 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 10.305 G_Regularizer: 0.000 validation_error: 19.755 +(epoch: 18, iters: 211216, time: 0.548, data: 0.000) G_L1: 14.105 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 11.545 G_Regularizer: 0.000 validation_error: 21.111 +(epoch: 18, iters: 213216, time: 0.538, data: 0.000) G_L1: 13.234 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 10.819 G_Regularizer: 0.000 validation_error: 20.550 +(epoch: 18, iters: 215216, time: 0.549, data: 0.000) G_L1: 13.325 G_L1_ABSOLUTE: 2.155 G_L1_RELATIVE: 11.170 G_Regularizer: 0.000 validation_error: 21.060 +(epoch: 18, iters: 217216, time: 0.538, data: 0.000) G_L1: 15.643 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 12.960 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 18, iters: 219216, time: 0.550, data: 0.000) G_L1: 12.902 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.397 G_Regularizer: 0.000 validation_error: 20.178 +(epoch: 18, iters: 221216, time: 0.538, data: 0.000) G_L1: 12.255 G_L1_ABSOLUTE: 2.327 G_L1_RELATIVE: 9.928 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 18, iters: 223216, time: 0.548, data: 0.001) G_L1: 16.002 G_L1_ABSOLUTE: 3.029 G_L1_RELATIVE: 12.973 G_Regularizer: 0.000 validation_error: 20.392 +(epoch: 18, iters: 225216, time: 0.563, data: 0.000) G_L1: 17.544 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 14.664 G_Regularizer: 0.000 validation_error: 20.049 +(epoch: 18, iters: 227216, time: 0.553, data: 0.000) G_L1: 13.862 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 11.358 G_Regularizer: 0.000 validation_error: 20.255 +(epoch: 18, iters: 229216, time: 0.538, data: 0.000) G_L1: 14.428 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 11.596 G_Regularizer: 0.000 validation_error: 20.107 +(epoch: 18, iters: 231216, time: 0.559, data: 0.000) G_L1: 17.832 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 15.172 G_Regularizer: 0.000 validation_error: 21.467 +(epoch: 18, iters: 233216, time: 0.556, data: 0.000) G_L1: 13.670 G_L1_ABSOLUTE: 2.761 G_L1_RELATIVE: 10.909 G_Regularizer: 0.000 validation_error: 20.486 +(epoch: 18, iters: 235216, time: 0.538, data: 0.000) G_L1: 16.803 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 14.006 G_Regularizer: 0.000 validation_error: 20.393 +(epoch: 18, iters: 237216, time: 0.561, data: 0.000) G_L1: 13.685 G_L1_ABSOLUTE: 2.064 G_L1_RELATIVE: 11.620 G_Regularizer: 0.000 validation_error: 21.200 +(epoch: 18, iters: 239216, time: 0.531, data: 0.000) G_L1: 13.574 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 11.003 G_Regularizer: 0.000 validation_error: 20.065 +(epoch: 18, iters: 241216, time: 0.556, data: 0.000) G_L1: 16.014 G_L1_ABSOLUTE: 3.533 G_L1_RELATIVE: 12.481 G_Regularizer: 0.000 validation_error: 20.607 +(epoch: 18, iters: 243216, time: 0.546, data: 0.000) G_L1: 13.984 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 11.645 G_Regularizer: 0.000 validation_error: 20.289 +(epoch: 18, iters: 245216, time: 0.562, data: 0.000) G_L1: 18.028 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 15.424 G_Regularizer: 0.000 validation_error: 21.240 +(epoch: 18, iters: 247216, time: 0.537, data: 0.000) G_L1: 13.711 G_L1_ABSOLUTE: 2.177 G_L1_RELATIVE: 11.534 G_Regularizer: 0.000 validation_error: 21.157 +(epoch: 18, iters: 249216, time: 0.559, data: 0.000) G_L1: 16.200 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 13.429 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 18, iters: 251216, time: 0.553, data: 0.000) G_L1: 15.787 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 13.201 G_Regularizer: 0.000 validation_error: 20.544 +(epoch: 18, iters: 253216, time: 0.553, data: 0.000) G_L1: 15.760 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 12.981 G_Regularizer: 0.000 validation_error: 21.431 +(epoch: 18, iters: 255216, time: 0.542, data: 0.000) G_L1: 30.076 G_L1_ABSOLUTE: 2.713 G_L1_RELATIVE: 27.364 G_Regularizer: 0.000 validation_error: 21.041 +(epoch: 18, iters: 257216, time: 0.551, data: 0.000) G_L1: 18.217 G_L1_ABSOLUTE: 2.901 G_L1_RELATIVE: 15.316 G_Regularizer: 0.000 validation_error: 20.549 +(epoch: 18, iters: 259216, time: 0.547, data: 0.000) G_L1: 21.037 G_L1_ABSOLUTE: 3.265 G_L1_RELATIVE: 17.772 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 18, iters: 261216, time: 0.559, data: 0.000) G_L1: 14.459 G_L1_ABSOLUTE: 2.270 G_L1_RELATIVE: 12.190 G_Regularizer: 0.000 validation_error: 20.007 +(epoch: 18, iters: 263216, time: 0.539, data: 0.000) G_L1: 15.024 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 12.478 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 18, iters: 265216, time: 0.556, data: 0.002) G_L1: 15.508 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 12.844 G_Regularizer: 0.000 validation_error: 20.646 +(epoch: 18, iters: 267216, time: 0.559, data: 0.000) G_L1: 15.475 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 12.774 G_Regularizer: 0.000 validation_error: 20.175 +(epoch: 18, iters: 269216, time: 0.559, data: 0.000) G_L1: 14.271 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 11.916 G_Regularizer: 0.000 validation_error: 21.079 +(epoch: 18, iters: 271216, time: 0.540, data: 0.000) G_L1: 18.815 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 16.374 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 18, iters: 273216, time: 0.553, data: 0.000) G_L1: 18.615 G_L1_ABSOLUTE: 2.833 G_L1_RELATIVE: 15.782 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 18, iters: 275216, time: 0.537, data: 0.000) G_L1: 15.630 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 12.985 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 18, iters: 277216, time: 0.540, data: 0.000) G_L1: 14.254 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 11.667 G_Regularizer: 0.000 validation_error: 20.609 +(epoch: 18, iters: 279216, time: 0.548, data: 0.000) G_L1: 16.233 G_L1_ABSOLUTE: 2.405 G_L1_RELATIVE: 13.828 G_Regularizer: 0.000 validation_error: 20.647 +(epoch: 18, iters: 281216, time: 0.555, data: 0.000) G_L1: 15.125 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 12.628 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 18, iters: 283216, time: 0.561, data: 0.000) G_L1: 16.377 G_L1_ABSOLUTE: 2.289 G_L1_RELATIVE: 14.088 G_Regularizer: 0.000 validation_error: 21.426 +(epoch: 18, iters: 285216, time: 0.561, data: 0.000) G_L1: 14.843 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 12.095 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 18, iters: 287216, time: 0.560, data: 0.000) G_L1: 15.489 G_L1_ABSOLUTE: 2.866 G_L1_RELATIVE: 12.623 G_Regularizer: 0.000 validation_error: 21.113 +(epoch: 18, iters: 289216, time: 0.537, data: 0.000) G_L1: 18.288 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 15.604 G_Regularizer: 0.000 validation_error: 20.504 +(epoch: 18, iters: 291216, time: 0.565, data: 0.000) G_L1: 17.055 G_L1_ABSOLUTE: 3.277 G_L1_RELATIVE: 13.779 G_Regularizer: 0.000 validation_error: 21.628 +(epoch: 18, iters: 293216, time: 0.552, data: 0.000) G_L1: 15.469 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 12.913 G_Regularizer: 0.000 validation_error: 20.241 +(epoch: 18, iters: 295216, time: 0.534, data: 0.000) G_L1: 18.294 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 15.752 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 18, iters: 297216, time: 0.537, data: 0.000) G_L1: 13.243 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 10.940 G_Regularizer: 0.000 validation_error: 20.117 +(epoch: 18, iters: 299216, time: 0.553, data: 0.000) G_L1: 14.747 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 12.232 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 18, iters: 301216, time: 0.556, data: 0.000) G_L1: 16.174 G_L1_ABSOLUTE: 2.869 G_L1_RELATIVE: 13.305 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 19, iters: 464, time: 0.551, data: 0.000) G_L1: 15.367 G_L1_ABSOLUTE: 3.116 G_L1_RELATIVE: 12.251 G_Regularizer: 0.000 validation_error: 20.358 +(epoch: 19, iters: 2464, time: 0.540, data: 0.000) G_L1: 15.152 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 12.720 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 19, iters: 4464, time: 0.556, data: 0.000) G_L1: 12.386 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 9.886 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 19, iters: 6464, time: 0.549, data: 0.000) G_L1: 14.758 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 12.465 G_Regularizer: 0.000 validation_error: 20.409 +(epoch: 19, iters: 8464, time: 0.550, data: 0.000) G_L1: 16.797 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 14.225 G_Regularizer: 0.000 validation_error: 21.302 +(epoch: 19, iters: 10464, time: 0.547, data: 0.000) G_L1: 14.510 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 12.137 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 19, iters: 12464, time: 0.533, data: 0.000) G_L1: 22.304 G_L1_ABSOLUTE: 3.062 G_L1_RELATIVE: 19.242 G_Regularizer: 0.000 validation_error: 20.399 +(epoch: 19, iters: 14464, time: 0.546, data: 0.000) G_L1: 14.920 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 12.302 G_Regularizer: 0.000 validation_error: 20.402 +(epoch: 19, iters: 16464, time: 0.558, data: 0.000) G_L1: 14.168 G_L1_ABSOLUTE: 2.758 G_L1_RELATIVE: 11.410 G_Regularizer: 0.000 validation_error: 20.291 +(epoch: 19, iters: 18464, time: 0.545, data: 0.000) G_L1: 13.241 G_L1_ABSOLUTE: 2.970 G_L1_RELATIVE: 10.271 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 19, iters: 20464, time: 0.547, data: 0.000) G_L1: 13.955 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 11.291 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 19, iters: 22464, time: 0.553, data: 0.000) G_L1: 16.244 G_L1_ABSOLUTE: 3.079 G_L1_RELATIVE: 13.165 G_Regularizer: 0.000 validation_error: 20.496 +(epoch: 19, iters: 24464, time: 0.553, data: 0.000) G_L1: 14.189 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 11.765 G_Regularizer: 0.000 validation_error: 21.137 +(epoch: 19, iters: 26464, time: 0.554, data: 0.000) G_L1: 17.305 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 14.384 G_Regularizer: 0.000 validation_error: 20.158 +(epoch: 19, iters: 28464, time: 0.539, data: 0.000) G_L1: 14.232 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 11.778 G_Regularizer: 0.000 validation_error: 20.618 +(epoch: 19, iters: 30464, time: 0.561, data: 0.000) G_L1: 18.771 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 16.199 G_Regularizer: 0.000 validation_error: 20.516 +(epoch: 19, iters: 32464, time: 0.565, data: 0.000) G_L1: 16.101 G_L1_ABSOLUTE: 3.052 G_L1_RELATIVE: 13.049 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 19, iters: 34464, time: 0.552, data: 0.001) G_L1: 15.848 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 13.057 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 19, iters: 36464, time: 0.541, data: 0.000) G_L1: 12.889 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 10.212 G_Regularizer: 0.000 validation_error: 21.344 +(epoch: 19, iters: 38464, time: 0.557, data: 0.000) G_L1: 12.939 G_L1_ABSOLUTE: 2.697 G_L1_RELATIVE: 10.241 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 19, iters: 40464, time: 0.562, data: 0.000) G_L1: 12.421 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 9.967 G_Regularizer: 0.000 validation_error: 20.007 +(epoch: 19, iters: 42464, time: 0.546, data: 0.000) G_L1: 14.580 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 11.818 G_Regularizer: 0.000 validation_error: 20.814 +(epoch: 19, iters: 44464, time: 0.537, data: 0.000) G_L1: 14.803 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 12.135 G_Regularizer: 0.000 validation_error: 20.194 +(epoch: 19, iters: 46464, time: 0.560, data: 0.000) G_L1: 15.727 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 13.256 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 19, iters: 48464, time: 0.558, data: 0.000) G_L1: 16.829 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 14.238 G_Regularizer: 0.000 validation_error: 20.325 +(epoch: 19, iters: 50464, time: 0.553, data: 0.000) G_L1: 14.140 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 11.492 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 19, iters: 52464, time: 0.530, data: 0.000) G_L1: 16.743 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 13.986 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 19, iters: 54464, time: 0.560, data: 0.000) G_L1: 16.116 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 13.277 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 19, iters: 56464, time: 0.552, data: 0.000) G_L1: 13.345 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 10.758 G_Regularizer: 0.000 validation_error: 21.104 +(epoch: 19, iters: 58464, time: 0.558, data: 0.000) G_L1: 15.797 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 13.521 G_Regularizer: 0.000 validation_error: 20.150 +(epoch: 19, iters: 60464, time: 0.533, data: 0.000) G_L1: 16.582 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 13.698 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 19, iters: 62464, time: 0.531, data: 0.000) G_L1: 14.153 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 11.871 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 19, iters: 64464, time: 0.551, data: 0.000) G_L1: 12.850 G_L1_ABSOLUTE: 2.180 G_L1_RELATIVE: 10.670 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 19, iters: 66464, time: 0.558, data: 0.000) G_L1: 14.965 G_L1_ABSOLUTE: 2.978 G_L1_RELATIVE: 11.987 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 19, iters: 68464, time: 0.537, data: 0.000) G_L1: 15.289 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 12.692 G_Regularizer: 0.000 validation_error: 20.697 +(epoch: 19, iters: 70464, time: 0.546, data: 0.001) G_L1: 12.357 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 10.116 G_Regularizer: 0.000 validation_error: 20.994 +(epoch: 19, iters: 72464, time: 0.563, data: 0.001) G_L1: 14.091 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 11.752 G_Regularizer: 0.000 validation_error: 20.434 +(epoch: 19, iters: 74464, time: 0.557, data: 0.000) G_L1: 14.433 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 12.109 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 19, iters: 76464, time: 0.563, data: 0.000) G_L1: 17.195 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 14.990 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 19, iters: 78464, time: 0.552, data: 0.000) G_L1: 15.788 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 13.126 G_Regularizer: 0.000 validation_error: 21.358 +(epoch: 19, iters: 80464, time: 0.564, data: 0.000) G_L1: 14.927 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 12.329 G_Regularizer: 0.000 validation_error: 20.387 +(epoch: 19, iters: 82464, time: 0.561, data: 0.000) G_L1: 19.138 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 16.718 G_Regularizer: 0.000 validation_error: 20.237 +(epoch: 19, iters: 84464, time: 0.557, data: 0.000) G_L1: 16.363 G_L1_ABSOLUTE: 2.794 G_L1_RELATIVE: 13.569 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 19, iters: 86464, time: 0.548, data: 0.000) G_L1: 15.052 G_L1_ABSOLUTE: 2.933 G_L1_RELATIVE: 12.119 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 19, iters: 88464, time: 0.554, data: 0.000) G_L1: 16.366 G_L1_ABSOLUTE: 2.758 G_L1_RELATIVE: 13.608 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 19, iters: 90464, time: 0.561, data: 0.000) G_L1: 16.883 G_L1_ABSOLUTE: 2.534 G_L1_RELATIVE: 14.348 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 19, iters: 92464, time: 0.534, data: 0.000) G_L1: 13.485 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 10.651 G_Regularizer: 0.000 validation_error: 20.450 +(epoch: 19, iters: 94464, time: 0.536, data: 0.000) G_L1: 14.322 G_L1_ABSOLUTE: 3.047 G_L1_RELATIVE: 11.275 G_Regularizer: 0.000 validation_error: 21.266 +(epoch: 19, iters: 96464, time: 0.538, data: 0.000) G_L1: 13.121 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 10.785 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 19, iters: 98464, time: 0.551, data: 0.000) G_L1: 16.684 G_L1_ABSOLUTE: 2.853 G_L1_RELATIVE: 13.831 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 19, iters: 100464, time: 0.563, data: 0.000) G_L1: 13.774 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 10.977 G_Regularizer: 0.000 validation_error: 19.794 +(epoch: 19, iters: 102464, time: 0.547, data: 0.000) G_L1: 18.478 G_L1_ABSOLUTE: 3.217 G_L1_RELATIVE: 15.261 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 19, iters: 104464, time: 0.546, data: 0.001) G_L1: 15.116 G_L1_ABSOLUTE: 3.028 G_L1_RELATIVE: 12.088 G_Regularizer: 0.000 validation_error: 20.502 +(epoch: 19, iters: 106464, time: 0.560, data: 0.000) G_L1: 16.216 G_L1_ABSOLUTE: 3.163 G_L1_RELATIVE: 13.053 G_Regularizer: 0.000 validation_error: 20.441 +(epoch: 19, iters: 108464, time: 0.559, data: 0.000) G_L1: 15.052 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 12.529 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 19, iters: 110464, time: 0.543, data: 0.000) G_L1: 18.565 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 15.991 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 19, iters: 112464, time: 0.562, data: 0.000) G_L1: 14.327 G_L1_ABSOLUTE: 2.583 G_L1_RELATIVE: 11.744 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 19, iters: 114464, time: 0.561, data: 0.000) G_L1: 13.605 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 11.152 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 19, iters: 116464, time: 0.557, data: 0.000) G_L1: 14.028 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 11.562 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 19, iters: 118464, time: 0.552, data: 0.000) G_L1: 16.283 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 13.734 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 19, iters: 120464, time: 0.532, data: 0.000) G_L1: 14.463 G_L1_ABSOLUTE: 3.139 G_L1_RELATIVE: 11.324 G_Regularizer: 0.000 validation_error: 20.627 +(epoch: 19, iters: 122464, time: 0.561, data: 0.000) G_L1: 16.128 G_L1_ABSOLUTE: 2.975 G_L1_RELATIVE: 13.154 G_Regularizer: 0.000 validation_error: 20.032 +(epoch: 19, iters: 124464, time: 0.562, data: 0.000) G_L1: 13.943 G_L1_ABSOLUTE: 2.222 G_L1_RELATIVE: 11.721 G_Regularizer: 0.000 validation_error: 20.681 +(epoch: 19, iters: 126464, time: 0.552, data: 0.000) G_L1: 11.543 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 9.336 G_Regularizer: 0.000 validation_error: 20.372 +(epoch: 19, iters: 128464, time: 0.539, data: 0.000) G_L1: 13.510 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 10.611 G_Regularizer: 0.000 validation_error: 21.280 +(epoch: 19, iters: 130464, time: 0.558, data: 0.000) G_L1: 14.039 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 11.554 G_Regularizer: 0.000 validation_error: 20.204 +(epoch: 19, iters: 132464, time: 0.566, data: 0.000) G_L1: 15.535 G_L1_ABSOLUTE: 2.833 G_L1_RELATIVE: 12.702 G_Regularizer: 0.000 validation_error: 21.550 +(epoch: 19, iters: 134464, time: 0.563, data: 0.000) G_L1: 19.217 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 16.768 G_Regularizer: 0.000 validation_error: 21.616 +(epoch: 19, iters: 136464, time: 0.534, data: 0.000) G_L1: 14.811 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 12.450 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 19, iters: 138464, time: 0.537, data: 0.000) G_L1: 15.159 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 12.816 G_Regularizer: 0.000 validation_error: 20.502 +(epoch: 19, iters: 140464, time: 0.558, data: 0.000) G_L1: 16.230 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 13.598 G_Regularizer: 0.000 validation_error: 20.622 +(epoch: 19, iters: 142464, time: 0.556, data: 0.001) G_L1: 13.698 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 11.086 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 19, iters: 144464, time: 0.551, data: 0.000) G_L1: 14.963 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 12.544 G_Regularizer: 0.000 validation_error: 20.305 +(epoch: 19, iters: 146464, time: 0.533, data: 0.000) G_L1: 11.375 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 8.962 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 19, iters: 148464, time: 0.538, data: 0.000) G_L1: 14.273 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 11.738 G_Regularizer: 0.000 validation_error: 20.407 +(epoch: 19, iters: 150464, time: 0.557, data: 0.000) G_L1: 14.813 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 12.565 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 19, iters: 152464, time: 0.547, data: 0.000) G_L1: 16.240 G_L1_ABSOLUTE: 2.896 G_L1_RELATIVE: 13.343 G_Regularizer: 0.000 validation_error: 20.470 +(epoch: 19, iters: 154464, time: 0.551, data: 0.000) G_L1: 13.387 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 10.929 G_Regularizer: 0.000 validation_error: 20.117 +(epoch: 19, iters: 156464, time: 0.559, data: 0.000) G_L1: 15.117 G_L1_ABSOLUTE: 2.969 G_L1_RELATIVE: 12.148 G_Regularizer: 0.000 validation_error: 21.090 +(epoch: 19, iters: 158464, time: 0.556, data: 0.000) G_L1: 13.023 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 10.325 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 19, iters: 160464, time: 0.528, data: 0.000) G_L1: 14.413 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 12.006 G_Regularizer: 0.000 validation_error: 20.160 +(epoch: 19, iters: 162464, time: 0.549, data: 0.000) G_L1: 15.285 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 12.886 G_Regularizer: 0.000 validation_error: 20.410 +(epoch: 19, iters: 164464, time: 0.553, data: 0.000) G_L1: 15.926 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 13.353 G_Regularizer: 0.000 validation_error: 21.419 +(epoch: 19, iters: 166464, time: 0.558, data: 0.000) G_L1: 15.601 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 13.107 G_Regularizer: 0.000 validation_error: 20.482 +(epoch: 19, iters: 168464, time: 0.555, data: 0.000) G_L1: 11.379 G_L1_ABSOLUTE: 2.123 G_L1_RELATIVE: 9.256 G_Regularizer: 0.000 validation_error: 20.094 +(epoch: 19, iters: 170464, time: 0.561, data: 0.000) G_L1: 15.207 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 12.677 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 19, iters: 172464, time: 0.556, data: 0.000) G_L1: 13.897 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 11.098 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 19, iters: 174464, time: 0.553, data: 0.000) G_L1: 15.087 G_L1_ABSOLUTE: 2.585 G_L1_RELATIVE: 12.501 G_Regularizer: 0.000 validation_error: 21.157 +(epoch: 19, iters: 176464, time: 0.561, data: 0.000) G_L1: 16.418 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 14.004 G_Regularizer: 0.000 validation_error: 21.435 +(epoch: 19, iters: 178464, time: 0.549, data: 0.000) G_L1: 16.224 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 13.389 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 19, iters: 180464, time: 0.556, data: 0.000) G_L1: 18.171 G_L1_ABSOLUTE: 3.185 G_L1_RELATIVE: 14.986 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 19, iters: 182464, time: 0.562, data: 0.000) G_L1: 19.761 G_L1_ABSOLUTE: 3.100 G_L1_RELATIVE: 16.661 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 19, iters: 184464, time: 0.569, data: 0.000) G_L1: 15.198 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 12.694 G_Regularizer: 0.000 validation_error: 21.203 +(epoch: 19, iters: 186464, time: 0.549, data: 0.000) G_L1: 13.735 G_L1_ABSOLUTE: 2.395 G_L1_RELATIVE: 11.340 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 19, iters: 188464, time: 0.542, data: 0.000) G_L1: 16.593 G_L1_ABSOLUTE: 2.967 G_L1_RELATIVE: 13.626 G_Regularizer: 0.000 validation_error: 20.775 +(epoch: 19, iters: 190464, time: 0.558, data: 0.000) G_L1: 13.408 G_L1_ABSOLUTE: 2.644 G_L1_RELATIVE: 10.765 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 19, iters: 192464, time: 0.536, data: 0.001) G_L1: 13.301 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 10.895 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 19, iters: 194464, time: 0.553, data: 0.000) G_L1: 16.433 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 14.044 G_Regularizer: 0.000 validation_error: 20.224 +(epoch: 19, iters: 196464, time: 0.552, data: 0.000) G_L1: 15.682 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 13.021 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 19, iters: 198464, time: 0.540, data: 0.000) G_L1: 14.566 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 11.758 G_Regularizer: 0.000 validation_error: 21.055 +(epoch: 19, iters: 200464, time: 0.543, data: 0.000) G_L1: 12.281 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 9.573 G_Regularizer: 0.000 validation_error: 20.428 +(epoch: 19, iters: 202464, time: 0.556, data: 0.000) G_L1: 14.448 G_L1_ABSOLUTE: 2.534 G_L1_RELATIVE: 11.914 G_Regularizer: 0.000 validation_error: 20.551 +(epoch: 19, iters: 204464, time: 0.543, data: 0.000) G_L1: 13.210 G_L1_ABSOLUTE: 2.585 G_L1_RELATIVE: 10.625 G_Regularizer: 0.000 validation_error: 20.378 +(epoch: 19, iters: 206464, time: 0.560, data: 0.000) G_L1: 16.016 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 13.267 G_Regularizer: 0.000 validation_error: 20.729 +(epoch: 19, iters: 208464, time: 0.555, data: 0.001) G_L1: 15.054 G_L1_ABSOLUTE: 2.675 G_L1_RELATIVE: 12.380 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 19, iters: 210464, time: 0.561, data: 0.000) G_L1: 14.048 G_L1_ABSOLUTE: 2.024 G_L1_RELATIVE: 12.024 G_Regularizer: 0.000 validation_error: 20.126 +(epoch: 19, iters: 212464, time: 0.530, data: 0.000) G_L1: 12.941 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 10.586 G_Regularizer: 0.000 validation_error: 21.458 +(epoch: 19, iters: 214464, time: 0.562, data: 0.000) G_L1: 14.947 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 12.351 G_Regularizer: 0.000 validation_error: 20.632 +(epoch: 19, iters: 216464, time: 0.556, data: 0.000) G_L1: 11.938 G_L1_ABSOLUTE: 2.244 G_L1_RELATIVE: 9.694 G_Regularizer: 0.000 validation_error: 20.312 +(epoch: 19, iters: 218464, time: 0.560, data: 0.000) G_L1: 15.287 G_L1_ABSOLUTE: 2.996 G_L1_RELATIVE: 12.291 G_Regularizer: 0.000 validation_error: 21.428 +(epoch: 19, iters: 220464, time: 0.531, data: 0.000) G_L1: 16.462 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 13.944 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 19, iters: 222464, time: 0.548, data: 0.001) G_L1: 16.299 G_L1_ABSOLUTE: 2.862 G_L1_RELATIVE: 13.437 G_Regularizer: 0.000 validation_error: 21.230 +(epoch: 19, iters: 224464, time: 0.566, data: 0.000) G_L1: 14.828 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 12.205 G_Regularizer: 0.000 validation_error: 20.512 +(epoch: 19, iters: 226464, time: 0.562, data: 0.000) G_L1: 15.328 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 12.698 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 19, iters: 228464, time: 0.540, data: 0.000) G_L1: 14.298 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 11.709 G_Regularizer: 0.000 validation_error: 21.248 +(epoch: 19, iters: 230464, time: 0.550, data: 0.000) G_L1: 16.469 G_L1_ABSOLUTE: 2.062 G_L1_RELATIVE: 14.407 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 19, iters: 232464, time: 0.541, data: 0.000) G_L1: 18.113 G_L1_ABSOLUTE: 3.111 G_L1_RELATIVE: 15.003 G_Regularizer: 0.000 validation_error: 21.186 +(epoch: 19, iters: 234464, time: 0.554, data: 0.000) G_L1: 19.876 G_L1_ABSOLUTE: 3.223 G_L1_RELATIVE: 16.653 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 19, iters: 236464, time: 0.553, data: 0.000) G_L1: 14.758 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 11.778 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 19, iters: 238464, time: 0.559, data: 0.000) G_L1: 13.041 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 10.444 G_Regularizer: 0.000 validation_error: 21.080 +(epoch: 19, iters: 240464, time: 0.559, data: 0.001) G_L1: 16.267 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 13.861 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 19, iters: 242464, time: 0.552, data: 0.000) G_L1: 14.442 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 11.998 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 19, iters: 244464, time: 0.552, data: 0.000) G_L1: 13.572 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 11.314 G_Regularizer: 0.000 validation_error: 20.002 +(epoch: 19, iters: 246464, time: 0.543, data: 0.000) G_L1: 16.713 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 13.997 G_Regularizer: 0.000 validation_error: 19.997 +(epoch: 19, iters: 248464, time: 0.559, data: 0.001) G_L1: 16.037 G_L1_ABSOLUTE: 3.002 G_L1_RELATIVE: 13.035 G_Regularizer: 0.000 validation_error: 20.414 +(epoch: 19, iters: 250464, time: 0.561, data: 0.000) G_L1: 14.102 G_L1_ABSOLUTE: 3.022 G_L1_RELATIVE: 11.080 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 19, iters: 252464, time: 0.559, data: 0.000) G_L1: 11.413 G_L1_ABSOLUTE: 2.161 G_L1_RELATIVE: 9.252 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 19, iters: 254464, time: 0.564, data: 0.000) G_L1: 13.805 G_L1_ABSOLUTE: 2.940 G_L1_RELATIVE: 10.865 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 19, iters: 256464, time: 0.562, data: 0.000) G_L1: 15.724 G_L1_ABSOLUTE: 3.154 G_L1_RELATIVE: 12.571 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 19, iters: 258464, time: 0.556, data: 0.001) G_L1: 13.393 G_L1_ABSOLUTE: 2.828 G_L1_RELATIVE: 10.564 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 19, iters: 260464, time: 0.570, data: 0.000) G_L1: 12.988 G_L1_ABSOLUTE: 2.703 G_L1_RELATIVE: 10.285 G_Regularizer: 0.000 validation_error: 21.557 +(epoch: 19, iters: 262464, time: 0.539, data: 0.000) G_L1: 15.013 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 12.236 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 19, iters: 264464, time: 0.555, data: 0.000) G_L1: 14.775 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 19, iters: 266464, time: 0.566, data: 0.000) G_L1: 15.779 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 13.127 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 19, iters: 268464, time: 0.563, data: 0.000) G_L1: 14.394 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 11.699 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 19, iters: 270464, time: 0.550, data: 0.001) G_L1: 12.722 G_L1_ABSOLUTE: 2.203 G_L1_RELATIVE: 10.519 G_Regularizer: 0.000 validation_error: 21.519 +(epoch: 19, iters: 272464, time: 0.562, data: 0.000) G_L1: 12.909 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 10.587 G_Regularizer: 0.000 validation_error: 20.732 +(epoch: 19, iters: 274464, time: 0.561, data: 0.000) G_L1: 15.075 G_L1_ABSOLUTE: 2.382 G_L1_RELATIVE: 12.693 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 19, iters: 276464, time: 0.552, data: 0.000) G_L1: 16.129 G_L1_ABSOLUTE: 3.120 G_L1_RELATIVE: 13.009 G_Regularizer: 0.000 validation_error: 21.236 +(epoch: 19, iters: 278464, time: 0.553, data: 0.000) G_L1: 15.697 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 13.303 G_Regularizer: 0.000 validation_error: 20.442 +(epoch: 19, iters: 280464, time: 0.558, data: 0.000) G_L1: 12.489 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 10.075 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 19, iters: 282464, time: 0.560, data: 0.000) G_L1: 15.038 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 12.240 G_Regularizer: 0.000 validation_error: 20.316 +(epoch: 19, iters: 284464, time: 0.561, data: 0.000) G_L1: 14.210 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 11.728 G_Regularizer: 0.000 validation_error: 19.859 +(epoch: 19, iters: 286464, time: 0.540, data: 0.001) G_L1: 14.200 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 11.705 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 19, iters: 288464, time: 0.547, data: 0.000) G_L1: 17.944 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 15.144 G_Regularizer: 0.000 validation_error: 20.043 +(epoch: 19, iters: 290464, time: 0.565, data: 0.000) G_L1: 15.249 G_L1_ABSOLUTE: 2.804 G_L1_RELATIVE: 12.445 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 19, iters: 292464, time: 0.544, data: 0.000) G_L1: 13.994 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 11.578 G_Regularizer: 0.000 validation_error: 20.051 +(epoch: 19, iters: 294464, time: 0.542, data: 0.000) G_L1: 14.150 G_L1_ABSOLUTE: 2.173 G_L1_RELATIVE: 11.977 G_Regularizer: 0.000 validation_error: 20.356 +(epoch: 19, iters: 296464, time: 0.553, data: 0.000) G_L1: 14.967 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 12.482 G_Regularizer: 0.000 validation_error: 20.412 +(epoch: 19, iters: 298464, time: 0.556, data: 0.000) G_L1: 13.791 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 11.210 G_Regularizer: 0.000 validation_error: 20.647 +(epoch: 19, iters: 300464, time: 0.557, data: 0.000) G_L1: 16.250 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 13.321 G_Regularizer: 0.000 validation_error: 20.461 +(epoch: 19, iters: 302464, time: 0.558, data: 0.001) G_L1: 16.661 G_L1_ABSOLUTE: 2.951 G_L1_RELATIVE: 13.710 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 20, iters: 1712, time: 0.542, data: 0.000) G_L1: 13.294 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 10.718 G_Regularizer: 0.000 validation_error: 19.992 +(epoch: 20, iters: 3712, time: 0.550, data: 0.000) G_L1: 14.482 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 11.910 G_Regularizer: 0.000 validation_error: 20.263 +(epoch: 20, iters: 5712, time: 0.548, data: 0.000) G_L1: 15.929 G_L1_ABSOLUTE: 2.295 G_L1_RELATIVE: 13.634 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 20, iters: 7712, time: 0.570, data: 0.001) G_L1: 12.130 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 9.693 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 20, iters: 9712, time: 0.541, data: 0.000) G_L1: 13.645 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 11.239 G_Regularizer: 0.000 validation_error: 20.322 +(epoch: 20, iters: 11712, time: 0.545, data: 0.000) G_L1: 15.685 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 12.941 G_Regularizer: 0.000 validation_error: 19.595 +(epoch: 20, iters: 13712, time: 0.561, data: 0.000) G_L1: 16.539 G_L1_ABSOLUTE: 3.046 G_L1_RELATIVE: 13.492 G_Regularizer: 0.000 validation_error: 20.558 +(epoch: 20, iters: 15712, time: 0.566, data: 0.000) G_L1: 13.760 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 11.318 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 20, iters: 17712, time: 0.528, data: 0.000) G_L1: 17.161 G_L1_ABSOLUTE: 2.641 G_L1_RELATIVE: 14.520 G_Regularizer: 0.000 validation_error: 20.256 +(epoch: 20, iters: 19712, time: 0.556, data: 0.000) G_L1: 13.413 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 10.787 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 20, iters: 21712, time: 0.557, data: 0.000) G_L1: 14.865 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 12.318 G_Regularizer: 0.000 validation_error: 20.280 +(epoch: 20, iters: 23712, time: 0.538, data: 0.000) G_L1: 12.584 G_L1_ABSOLUTE: 2.203 G_L1_RELATIVE: 10.380 G_Regularizer: 0.000 validation_error: 20.168 +(epoch: 20, iters: 25712, time: 0.549, data: 0.000) G_L1: 14.139 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 11.650 G_Regularizer: 0.000 validation_error: 20.220 +(epoch: 20, iters: 27712, time: 0.545, data: 0.000) G_L1: 14.669 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 12.038 G_Regularizer: 0.000 validation_error: 21.688 +(epoch: 20, iters: 29712, time: 0.562, data: 0.000) G_L1: 14.389 G_L1_ABSOLUTE: 1.899 G_L1_RELATIVE: 12.490 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 20, iters: 31712, time: 0.548, data: 0.000) G_L1: 17.641 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 14.801 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 20, iters: 33712, time: 0.551, data: 0.000) G_L1: 16.171 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 13.653 G_Regularizer: 0.000 validation_error: 19.895 +(epoch: 20, iters: 35712, time: 0.559, data: 0.000) G_L1: 14.244 G_L1_ABSOLUTE: 2.405 G_L1_RELATIVE: 11.839 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 20, iters: 37712, time: 0.564, data: 0.000) G_L1: 16.852 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 13.938 G_Regularizer: 0.000 validation_error: 21.262 +(epoch: 20, iters: 39712, time: 0.560, data: 0.000) G_L1: 13.470 G_L1_ABSOLUTE: 3.007 G_L1_RELATIVE: 10.463 G_Regularizer: 0.000 validation_error: 20.346 +(epoch: 20, iters: 41712, time: 0.545, data: 0.000) G_L1: 13.327 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 10.941 G_Regularizer: 0.000 validation_error: 20.286 +(epoch: 20, iters: 43712, time: 0.541, data: 0.000) G_L1: 14.136 G_L1_ABSOLUTE: 2.795 G_L1_RELATIVE: 11.341 G_Regularizer: 0.000 validation_error: 19.731 +(epoch: 20, iters: 45712, time: 0.538, data: 0.001) G_L1: 14.796 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 12.317 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 20, iters: 47712, time: 0.538, data: 0.000) G_L1: 16.569 G_L1_ABSOLUTE: 3.045 G_L1_RELATIVE: 13.523 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 20, iters: 49712, time: 0.543, data: 0.000) G_L1: 14.120 G_L1_ABSOLUTE: 3.059 G_L1_RELATIVE: 11.060 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 20, iters: 51712, time: 0.548, data: 0.000) G_L1: 19.758 G_L1_ABSOLUTE: 2.875 G_L1_RELATIVE: 16.883 G_Regularizer: 0.000 validation_error: 21.265 +(epoch: 20, iters: 53712, time: 0.561, data: 0.000) G_L1: 16.173 G_L1_ABSOLUTE: 2.902 G_L1_RELATIVE: 13.272 G_Regularizer: 0.000 validation_error: 21.558 +(epoch: 20, iters: 55712, time: 0.558, data: 0.000) G_L1: 18.107 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 15.216 G_Regularizer: 0.000 validation_error: 21.047 +(epoch: 20, iters: 57712, time: 0.559, data: 0.000) G_L1: 16.029 G_L1_ABSOLUTE: 2.270 G_L1_RELATIVE: 13.759 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 20, iters: 59712, time: 0.546, data: 0.000) G_L1: 15.108 G_L1_ABSOLUTE: 2.850 G_L1_RELATIVE: 12.259 G_Regularizer: 0.000 validation_error: 20.466 +(epoch: 20, iters: 61712, time: 0.562, data: 0.001) G_L1: 13.365 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 10.620 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 20, iters: 63712, time: 0.555, data: 0.000) G_L1: 12.254 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 9.841 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 20, iters: 65712, time: 0.557, data: 0.001) G_L1: 12.039 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 9.358 G_Regularizer: 0.000 validation_error: 20.496 +(epoch: 20, iters: 67712, time: 0.546, data: 0.001) G_L1: 12.054 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 9.370 G_Regularizer: 0.000 validation_error: 20.070 +(epoch: 20, iters: 69712, time: 0.553, data: 0.000) G_L1: 15.241 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 12.790 G_Regularizer: 0.000 validation_error: 21.048 +(epoch: 20, iters: 71712, time: 0.556, data: 0.000) G_L1: 13.573 G_L1_ABSOLUTE: 2.532 G_L1_RELATIVE: 11.041 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 20, iters: 73712, time: 0.558, data: 0.000) G_L1: 14.532 G_L1_ABSOLUTE: 2.763 G_L1_RELATIVE: 11.769 G_Regularizer: 0.000 validation_error: 20.206 +(epoch: 20, iters: 75712, time: 0.545, data: 0.001) G_L1: 14.106 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 11.735 G_Regularizer: 0.000 validation_error: 20.494 +(epoch: 20, iters: 77712, time: 0.534, data: 0.000) G_L1: 12.649 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 10.050 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 20, iters: 79712, time: 0.557, data: 0.000) G_L1: 16.299 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 13.599 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 20, iters: 81712, time: 0.561, data: 0.000) G_L1: 14.904 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 12.374 G_Regularizer: 0.000 validation_error: 20.959 +(epoch: 20, iters: 83712, time: 0.547, data: 0.000) G_L1: 14.523 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 11.942 G_Regularizer: 0.000 validation_error: 20.551 +(epoch: 20, iters: 85712, time: 0.561, data: 0.000) G_L1: 13.507 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 11.139 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 20, iters: 87712, time: 0.554, data: 0.000) G_L1: 13.606 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 10.871 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 20, iters: 89712, time: 0.559, data: 0.000) G_L1: 17.434 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 15.069 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 20, iters: 91712, time: 0.545, data: 0.001) G_L1: 12.433 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 9.930 G_Regularizer: 0.000 validation_error: 20.372 +(epoch: 20, iters: 93712, time: 0.537, data: 0.000) G_L1: 15.699 G_L1_ABSOLUTE: 3.113 G_L1_RELATIVE: 12.585 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 20, iters: 95712, time: 0.535, data: 0.000) G_L1: 14.181 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 11.750 G_Regularizer: 0.000 validation_error: 20.028 +(epoch: 20, iters: 97712, time: 0.566, data: 0.000) G_L1: 17.770 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 15.297 G_Regularizer: 0.000 validation_error: 20.944 +(epoch: 20, iters: 99712, time: 0.547, data: 0.000) G_L1: 12.943 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 10.545 G_Regularizer: 0.000 validation_error: 20.304 +(epoch: 20, iters: 101712, time: 0.531, data: 0.000) G_L1: 14.338 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 11.672 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 20, iters: 103712, time: 0.541, data: 0.000) G_L1: 14.233 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 11.426 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 20, iters: 105712, time: 0.559, data: 0.000) G_L1: 13.696 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.155 G_Regularizer: 0.000 validation_error: 21.181 +(epoch: 20, iters: 107712, time: 0.558, data: 0.000) G_L1: 15.298 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 12.836 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 20, iters: 109712, time: 0.533, data: 0.000) G_L1: 15.229 G_L1_ABSOLUTE: 2.532 G_L1_RELATIVE: 12.697 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 20, iters: 111712, time: 0.536, data: 0.000) G_L1: 15.135 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 12.229 G_Regularizer: 0.000 validation_error: 20.312 +(epoch: 20, iters: 113712, time: 0.559, data: 0.000) G_L1: 14.096 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 11.282 G_Regularizer: 0.000 validation_error: 19.982 +(epoch: 20, iters: 115712, time: 0.549, data: 0.001) G_L1: 16.537 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 13.879 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 20, iters: 117712, time: 0.560, data: 0.000) G_L1: 14.532 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 11.615 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 20, iters: 119712, time: 0.560, data: 0.000) G_L1: 16.893 G_L1_ABSOLUTE: 2.337 G_L1_RELATIVE: 14.556 G_Regularizer: 0.000 validation_error: 20.462 +(epoch: 20, iters: 121712, time: 0.558, data: 0.000) G_L1: 12.785 G_L1_ABSOLUTE: 2.127 G_L1_RELATIVE: 10.658 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 20, iters: 123712, time: 0.553, data: 0.000) G_L1: 13.511 G_L1_ABSOLUTE: 2.900 G_L1_RELATIVE: 10.612 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 20, iters: 125712, time: 0.554, data: 0.000) G_L1: 15.083 G_L1_ABSOLUTE: 2.184 G_L1_RELATIVE: 12.899 G_Regularizer: 0.000 validation_error: 20.305 +(epoch: 20, iters: 127712, time: 0.551, data: 0.000) G_L1: 15.096 G_L1_ABSOLUTE: 2.740 G_L1_RELATIVE: 12.356 G_Regularizer: 0.000 validation_error: 20.015 +(epoch: 20, iters: 129712, time: 0.553, data: 0.000) G_L1: 13.688 G_L1_ABSOLUTE: 2.068 G_L1_RELATIVE: 11.620 G_Regularizer: 0.000 validation_error: 20.409 +(epoch: 20, iters: 131712, time: 0.551, data: 0.000) G_L1: 15.090 G_L1_ABSOLUTE: 2.946 G_L1_RELATIVE: 12.144 G_Regularizer: 0.000 validation_error: 20.453 +(epoch: 20, iters: 133712, time: 0.547, data: 0.000) G_L1: 13.897 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 11.230 G_Regularizer: 0.000 validation_error: 21.005 +(epoch: 20, iters: 135712, time: 0.550, data: 0.000) G_L1: 14.331 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 12.017 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 20, iters: 137712, time: 0.562, data: 0.000) G_L1: 13.897 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 11.357 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 20, iters: 139712, time: 0.559, data: 0.000) G_L1: 15.935 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 13.415 G_Regularizer: 0.000 validation_error: 20.971 +(epoch: 20, iters: 141712, time: 0.558, data: 0.000) G_L1: 13.656 G_L1_ABSOLUTE: 2.052 G_L1_RELATIVE: 11.603 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 20, iters: 143712, time: 0.530, data: 0.000) G_L1: 15.467 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 12.888 G_Regularizer: 0.000 validation_error: 20.269 +(epoch: 20, iters: 145712, time: 0.562, data: 0.000) G_L1: 17.814 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 15.272 G_Regularizer: 0.000 validation_error: 19.935 +(epoch: 20, iters: 147712, time: 0.551, data: 0.000) G_L1: 13.837 G_L1_ABSOLUTE: 2.275 G_L1_RELATIVE: 11.561 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 20, iters: 149712, time: 0.550, data: 0.000) G_L1: 14.251 G_L1_ABSOLUTE: 2.223 G_L1_RELATIVE: 12.028 G_Regularizer: 0.000 validation_error: 21.090 +(epoch: 20, iters: 151712, time: 0.554, data: 0.000) G_L1: 13.915 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 11.288 G_Regularizer: 0.000 validation_error: 20.133 +(epoch: 20, iters: 153712, time: 0.548, data: 0.000) G_L1: 12.948 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 10.431 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 20, iters: 155712, time: 0.561, data: 0.000) G_L1: 14.393 G_L1_ABSOLUTE: 2.690 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 20.046 +(epoch: 20, iters: 157712, time: 0.560, data: 0.000) G_L1: 15.802 G_L1_ABSOLUTE: 2.878 G_L1_RELATIVE: 12.924 G_Regularizer: 0.000 validation_error: 20.309 +(epoch: 20, iters: 159712, time: 0.551, data: 0.000) G_L1: 15.897 G_L1_ABSOLUTE: 3.229 G_L1_RELATIVE: 12.668 G_Regularizer: 0.000 validation_error: 20.640 +(epoch: 20, iters: 161712, time: 0.560, data: 0.000) G_L1: 12.938 G_L1_ABSOLUTE: 2.181 G_L1_RELATIVE: 10.758 G_Regularizer: 0.000 validation_error: 20.378 +(epoch: 20, iters: 163712, time: 0.557, data: 0.000) G_L1: 13.757 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 11.168 G_Regularizer: 0.000 validation_error: 20.204 +(epoch: 20, iters: 165712, time: 0.559, data: 0.000) G_L1: 14.497 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 11.776 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 20, iters: 167712, time: 0.549, data: 0.000) G_L1: 13.481 G_L1_ABSOLUTE: 2.201 G_L1_RELATIVE: 11.280 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 20, iters: 169712, time: 0.548, data: 0.000) G_L1: 12.913 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 10.416 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 20, iters: 171712, time: 0.557, data: 0.000) G_L1: 15.840 G_L1_ABSOLUTE: 2.889 G_L1_RELATIVE: 12.951 G_Regularizer: 0.000 validation_error: 20.615 +(epoch: 20, iters: 173712, time: 0.559, data: 0.000) G_L1: 15.864 G_L1_ABSOLUTE: 2.680 G_L1_RELATIVE: 13.184 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 20, iters: 175712, time: 0.547, data: 0.001) G_L1: 16.734 G_L1_ABSOLUTE: 2.675 G_L1_RELATIVE: 14.059 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 20, iters: 177712, time: 0.549, data: 0.000) G_L1: 13.263 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 10.779 G_Regularizer: 0.000 validation_error: 19.910 +(epoch: 20, iters: 179712, time: 0.564, data: 0.000) G_L1: 16.104 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 13.517 G_Regularizer: 0.000 validation_error: 20.350 +(epoch: 20, iters: 181712, time: 0.548, data: 0.000) G_L1: 16.790 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 14.300 G_Regularizer: 0.000 validation_error: 19.638 +(epoch: 20, iters: 183712, time: 0.558, data: 0.000) G_L1: 14.402 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 11.741 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 20, iters: 185712, time: 0.539, data: 0.000) G_L1: 14.274 G_L1_ABSOLUTE: 2.920 G_L1_RELATIVE: 11.354 G_Regularizer: 0.000 validation_error: 20.143 +(epoch: 20, iters: 187712, time: 0.561, data: 0.000) G_L1: 15.507 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 12.701 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 20, iters: 189712, time: 0.556, data: 0.000) G_L1: 15.384 G_L1_ABSOLUTE: 2.253 G_L1_RELATIVE: 13.131 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 20, iters: 191712, time: 0.548, data: 0.000) G_L1: 13.230 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 10.477 G_Regularizer: 0.000 validation_error: 20.061 +(epoch: 20, iters: 193712, time: 0.530, data: 0.000) G_L1: 11.959 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 9.273 G_Regularizer: 0.000 validation_error: 20.280 +(epoch: 20, iters: 195712, time: 0.538, data: 0.000) G_L1: 15.844 G_L1_ABSOLUTE: 2.852 G_L1_RELATIVE: 12.992 G_Regularizer: 0.000 validation_error: 20.519 +(epoch: 20, iters: 197712, time: 0.551, data: 0.000) G_L1: 12.650 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 10.303 G_Regularizer: 0.000 validation_error: 20.993 +(epoch: 20, iters: 199712, time: 0.561, data: 0.000) G_L1: 12.087 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 9.667 G_Regularizer: 0.000 validation_error: 20.099 +(epoch: 20, iters: 201712, time: 0.549, data: 0.001) G_L1: 13.922 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 11.325 G_Regularizer: 0.000 validation_error: 19.985 +(epoch: 20, iters: 203712, time: 0.538, data: 0.000) G_L1: 14.822 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 12.184 G_Regularizer: 0.000 validation_error: 21.059 +(epoch: 20, iters: 205712, time: 0.561, data: 0.000) G_L1: 15.803 G_L1_ABSOLUTE: 2.672 G_L1_RELATIVE: 13.131 G_Regularizer: 0.000 validation_error: 20.279 +(epoch: 20, iters: 207712, time: 0.538, data: 0.000) G_L1: 12.743 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 10.325 G_Regularizer: 0.000 validation_error: 20.137 +(epoch: 20, iters: 209712, time: 0.537, data: 0.000) G_L1: 14.386 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 11.928 G_Regularizer: 0.000 validation_error: 20.241 +(epoch: 20, iters: 211712, time: 0.559, data: 0.001) G_L1: 17.438 G_L1_ABSOLUTE: 3.186 G_L1_RELATIVE: 14.252 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 20, iters: 213712, time: 0.546, data: 0.001) G_L1: 13.378 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 11.044 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 20, iters: 215712, time: 0.552, data: 0.001) G_L1: 15.050 G_L1_ABSOLUTE: 3.258 G_L1_RELATIVE: 11.792 G_Regularizer: 0.000 validation_error: 20.281 +(epoch: 20, iters: 217712, time: 0.551, data: 0.000) G_L1: 15.921 G_L1_ABSOLUTE: 2.715 G_L1_RELATIVE: 13.205 G_Regularizer: 0.000 validation_error: 21.289 +(epoch: 20, iters: 219712, time: 0.556, data: 0.000) G_L1: 14.646 G_L1_ABSOLUTE: 2.983 G_L1_RELATIVE: 11.663 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 20, iters: 221712, time: 0.549, data: 0.000) G_L1: 15.619 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 13.224 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 20, iters: 223712, time: 0.536, data: 0.000) G_L1: 14.936 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 12.231 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 20, iters: 225712, time: 0.559, data: 0.000) G_L1: 13.936 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 11.384 G_Regularizer: 0.000 validation_error: 20.641 +(epoch: 20, iters: 227712, time: 0.537, data: 0.000) G_L1: 14.771 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 12.176 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 20, iters: 229712, time: 0.549, data: 0.000) G_L1: 16.245 G_L1_ABSOLUTE: 2.682 G_L1_RELATIVE: 13.563 G_Regularizer: 0.000 validation_error: 21.293 +(epoch: 20, iters: 231712, time: 0.548, data: 0.000) G_L1: 13.700 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 11.462 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 20, iters: 233712, time: 0.566, data: 0.000) G_L1: 13.444 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 10.608 G_Regularizer: 0.000 validation_error: 20.183 +(epoch: 20, iters: 235712, time: 0.533, data: 0.000) G_L1: 16.799 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 14.148 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 20, iters: 237712, time: 0.557, data: 0.001) G_L1: 14.784 G_L1_ABSOLUTE: 2.300 G_L1_RELATIVE: 12.484 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 20, iters: 239712, time: 0.560, data: 0.000) G_L1: 12.940 G_L1_ABSOLUTE: 2.768 G_L1_RELATIVE: 10.172 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 20, iters: 241712, time: 0.557, data: 0.001) G_L1: 14.575 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 12.151 G_Regularizer: 0.000 validation_error: 21.450 +(epoch: 20, iters: 243712, time: 0.546, data: 0.000) G_L1: 13.789 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 11.196 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 20, iters: 245712, time: 0.561, data: 0.000) G_L1: 15.898 G_L1_ABSOLUTE: 2.166 G_L1_RELATIVE: 13.732 G_Regularizer: 0.000 validation_error: 20.343 +(epoch: 20, iters: 247712, time: 0.560, data: 0.000) G_L1: 15.946 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 13.320 G_Regularizer: 0.000 validation_error: 20.712 +(epoch: 20, iters: 249712, time: 0.556, data: 0.000) G_L1: 16.146 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 13.545 G_Regularizer: 0.000 validation_error: 20.516 +(epoch: 20, iters: 251712, time: 0.544, data: 0.001) G_L1: 15.664 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 13.040 G_Regularizer: 0.000 validation_error: 20.531 +(epoch: 20, iters: 253712, time: 0.565, data: 0.001) G_L1: 15.903 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 13.444 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 20, iters: 255712, time: 0.561, data: 0.000) G_L1: 16.753 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 14.036 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 20, iters: 257712, time: 0.561, data: 0.000) G_L1: 14.228 G_L1_ABSOLUTE: 2.848 G_L1_RELATIVE: 11.380 G_Regularizer: 0.000 validation_error: 20.257 +(epoch: 20, iters: 259712, time: 0.546, data: 0.000) G_L1: 15.523 G_L1_ABSOLUTE: 2.558 G_L1_RELATIVE: 12.964 G_Regularizer: 0.000 validation_error: 20.487 +(epoch: 20, iters: 261712, time: 0.535, data: 0.000) G_L1: 12.375 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 9.789 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 20, iters: 263712, time: 0.563, data: 0.000) G_L1: 16.328 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 13.928 G_Regularizer: 0.000 validation_error: 20.196 +(epoch: 20, iters: 265712, time: 0.564, data: 0.000) G_L1: 19.714 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 17.160 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 20, iters: 267712, time: 0.551, data: 0.000) G_L1: 17.411 G_L1_ABSOLUTE: 2.267 G_L1_RELATIVE: 15.144 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 20, iters: 269712, time: 0.554, data: 0.000) G_L1: 18.151 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 15.172 G_Regularizer: 0.000 validation_error: 20.222 +(epoch: 20, iters: 271712, time: 0.559, data: 0.000) G_L1: 15.165 G_L1_ABSOLUTE: 2.232 G_L1_RELATIVE: 12.934 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 20, iters: 273712, time: 0.556, data: 0.001) G_L1: 13.962 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 11.354 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 20, iters: 275712, time: 0.560, data: 0.000) G_L1: 14.854 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 12.199 G_Regularizer: 0.000 validation_error: 19.767 +(epoch: 20, iters: 277712, time: 0.537, data: 0.000) G_L1: 14.725 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 11.988 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 20, iters: 279712, time: 0.556, data: 0.000) G_L1: 12.802 G_L1_ABSOLUTE: 2.879 G_L1_RELATIVE: 9.923 G_Regularizer: 0.000 validation_error: 20.366 +(epoch: 20, iters: 281712, time: 0.549, data: 0.000) G_L1: 15.423 G_L1_ABSOLUTE: 2.904 G_L1_RELATIVE: 12.519 G_Regularizer: 0.000 validation_error: 20.397 +(epoch: 20, iters: 283712, time: 0.560, data: 0.000) G_L1: 15.268 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 12.893 G_Regularizer: 0.000 validation_error: 19.939 +(epoch: 20, iters: 285712, time: 0.553, data: 0.000) G_L1: 14.921 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 12.272 G_Regularizer: 0.000 validation_error: 20.387 +(epoch: 20, iters: 287712, time: 0.556, data: 0.000) G_L1: 13.322 G_L1_ABSOLUTE: 2.287 G_L1_RELATIVE: 11.035 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 20, iters: 289712, time: 0.548, data: 0.000) G_L1: 15.256 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 12.699 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 20, iters: 291712, time: 0.557, data: 0.000) G_L1: 12.531 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 9.891 G_Regularizer: 0.000 validation_error: 21.067 +(epoch: 20, iters: 293712, time: 0.560, data: 0.000) G_L1: 14.645 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 12.153 G_Regularizer: 0.000 validation_error: 20.753 +(epoch: 20, iters: 295712, time: 0.551, data: 0.000) G_L1: 14.970 G_L1_ABSOLUTE: 2.644 G_L1_RELATIVE: 12.326 G_Regularizer: 0.000 validation_error: 20.363 +(epoch: 20, iters: 297712, time: 0.559, data: 0.000) G_L1: 13.845 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.424 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 20, iters: 299712, time: 0.555, data: 0.000) G_L1: 15.202 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 12.458 G_Regularizer: 0.000 validation_error: 20.319 +(epoch: 20, iters: 301712, time: 0.559, data: 0.000) G_L1: 14.606 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 12.091 G_Regularizer: 0.000 validation_error: 20.307 +(epoch: 21, iters: 960, time: 0.549, data: 0.000) G_L1: 14.657 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 11.933 G_Regularizer: 0.000 validation_error: 20.526 +(epoch: 21, iters: 2960, time: 0.561, data: 0.000) G_L1: 16.028 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 13.372 G_Regularizer: 0.000 validation_error: 21.096 +(epoch: 21, iters: 4960, time: 0.554, data: 0.000) G_L1: 15.241 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 12.562 G_Regularizer: 0.000 validation_error: 20.206 +(epoch: 21, iters: 6960, time: 0.566, data: 0.000) G_L1: 15.466 G_L1_ABSOLUTE: 3.025 G_L1_RELATIVE: 12.441 G_Regularizer: 0.000 validation_error: 21.060 +(epoch: 21, iters: 8960, time: 0.554, data: 0.000) G_L1: 14.840 G_L1_ABSOLUTE: 2.728 G_L1_RELATIVE: 12.113 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 21, iters: 10960, time: 0.560, data: 0.000) G_L1: 14.623 G_L1_ABSOLUTE: 3.103 G_L1_RELATIVE: 11.520 G_Regularizer: 0.000 validation_error: 20.194 +(epoch: 21, iters: 12960, time: 0.552, data: 0.000) G_L1: 14.090 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 11.681 G_Regularizer: 0.000 validation_error: 20.493 +(epoch: 21, iters: 14960, time: 0.568, data: 0.000) G_L1: 13.330 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 10.911 G_Regularizer: 0.000 validation_error: 20.316 +(epoch: 21, iters: 16960, time: 0.553, data: 0.000) G_L1: 16.091 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 13.359 G_Regularizer: 0.000 validation_error: 20.184 +(epoch: 21, iters: 18960, time: 0.552, data: 0.000) G_L1: 17.419 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 14.430 G_Regularizer: 0.000 validation_error: 20.262 +(epoch: 21, iters: 20960, time: 0.553, data: 0.000) G_L1: 13.049 G_L1_ABSOLUTE: 2.079 G_L1_RELATIVE: 10.970 G_Regularizer: 0.000 validation_error: 20.441 +(epoch: 21, iters: 22960, time: 0.555, data: 0.000) G_L1: 14.094 G_L1_ABSOLUTE: 2.250 G_L1_RELATIVE: 11.843 G_Regularizer: 0.000 validation_error: 20.347 +(epoch: 21, iters: 24960, time: 0.556, data: 0.000) G_L1: 15.238 G_L1_ABSOLUTE: 3.027 G_L1_RELATIVE: 12.211 G_Regularizer: 0.000 validation_error: 20.641 +(epoch: 21, iters: 26960, time: 0.550, data: 0.000) G_L1: 14.844 G_L1_ABSOLUTE: 2.532 G_L1_RELATIVE: 12.312 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 21, iters: 28960, time: 0.556, data: 0.000) G_L1: 18.415 G_L1_ABSOLUTE: 3.177 G_L1_RELATIVE: 15.238 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 21, iters: 30960, time: 0.561, data: 0.000) G_L1: 16.895 G_L1_ABSOLUTE: 3.039 G_L1_RELATIVE: 13.856 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 21, iters: 32960, time: 0.565, data: 0.000) G_L1: 14.095 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 11.436 G_Regularizer: 0.000 validation_error: 20.609 +(epoch: 21, iters: 34960, time: 0.573, data: 0.000) G_L1: 14.357 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 11.975 G_Regularizer: 0.000 validation_error: 20.307 +(epoch: 21, iters: 36960, time: 0.572, data: 0.000) G_L1: 15.365 G_L1_ABSOLUTE: 2.819 G_L1_RELATIVE: 12.545 G_Regularizer: 0.000 validation_error: 20.405 +(epoch: 21, iters: 38960, time: 0.584, data: 0.000) G_L1: 11.983 G_L1_ABSOLUTE: 2.173 G_L1_RELATIVE: 9.810 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 21, iters: 40960, time: 0.584, data: 0.000) G_L1: 15.344 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 12.831 G_Regularizer: 0.000 validation_error: 20.407 +(epoch: 21, iters: 42960, time: 0.574, data: 0.000) G_L1: 16.427 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 13.967 G_Regularizer: 0.000 validation_error: 20.146 +(epoch: 21, iters: 44960, time: 0.574, data: 0.000) G_L1: 14.184 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 11.452 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 21, iters: 46960, time: 0.581, data: 0.000) G_L1: 15.763 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 12.939 G_Regularizer: 0.000 validation_error: 20.420 +(epoch: 21, iters: 48960, time: 0.575, data: 0.000) G_L1: 15.683 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 13.000 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 21, iters: 50960, time: 0.575, data: 0.000) G_L1: 15.563 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 13.182 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 21, iters: 52960, time: 0.583, data: 0.000) G_L1: 16.392 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 13.692 G_Regularizer: 0.000 validation_error: 19.954 +(epoch: 21, iters: 54960, time: 0.575, data: 0.000) G_L1: 14.211 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 11.701 G_Regularizer: 0.000 validation_error: 19.679 +(epoch: 21, iters: 56960, time: 0.573, data: 0.000) G_L1: 14.834 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 12.098 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 21, iters: 58960, time: 0.573, data: 0.000) G_L1: 14.887 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 12.258 G_Regularizer: 0.000 validation_error: 20.214 +(epoch: 21, iters: 60960, time: 0.573, data: 0.001) G_L1: 15.122 G_L1_ABSOLUTE: 2.972 G_L1_RELATIVE: 12.150 G_Regularizer: 0.000 validation_error: 21.343 +(epoch: 21, iters: 62960, time: 0.593, data: 0.000) G_L1: 11.192 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 8.827 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 21, iters: 64960, time: 0.574, data: 0.000) G_L1: 14.716 G_L1_ABSOLUTE: 2.740 G_L1_RELATIVE: 11.975 G_Regularizer: 0.000 validation_error: 19.854 +(epoch: 21, iters: 66960, time: 0.580, data: 0.000) G_L1: 13.322 G_L1_ABSOLUTE: 2.639 G_L1_RELATIVE: 10.683 G_Regularizer: 0.000 validation_error: 20.418 +(epoch: 21, iters: 68960, time: 0.569, data: 0.000) G_L1: 13.937 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 11.417 G_Regularizer: 0.000 validation_error: 20.374 +(epoch: 21, iters: 70960, time: 0.573, data: 0.000) G_L1: 16.676 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 14.111 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 21, iters: 72960, time: 0.572, data: 0.000) G_L1: 18.013 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 15.358 G_Regularizer: 0.000 validation_error: 20.543 +(epoch: 21, iters: 74960, time: 0.576, data: 0.000) G_L1: 18.551 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 16.043 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 21, iters: 76960, time: 0.581, data: 0.000) G_L1: 15.418 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 13.093 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 21, iters: 78960, time: 0.583, data: 0.000) G_L1: 15.758 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 13.168 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 21, iters: 80960, time: 0.570, data: 0.000) G_L1: 12.358 G_L1_ABSOLUTE: 2.305 G_L1_RELATIVE: 10.054 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 21, iters: 82960, time: 0.573, data: 0.000) G_L1: 17.076 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 14.352 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 21, iters: 84960, time: 0.576, data: 0.000) G_L1: 15.612 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 13.200 G_Regularizer: 0.000 validation_error: 19.960 +(epoch: 21, iters: 86960, time: 0.567, data: 0.000) G_L1: 16.900 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 14.098 G_Regularizer: 0.000 validation_error: 19.428 +(epoch: 21, iters: 88960, time: 0.577, data: 0.000) G_L1: 14.592 G_L1_ABSOLUTE: 2.972 G_L1_RELATIVE: 11.620 G_Regularizer: 0.000 validation_error: 20.434 +(epoch: 21, iters: 90960, time: 0.571, data: 0.000) G_L1: 16.350 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 13.701 G_Regularizer: 0.000 validation_error: 20.541 +(epoch: 21, iters: 92960, time: 0.569, data: 0.000) G_L1: 17.157 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 14.616 G_Regularizer: 0.000 validation_error: 20.439 +(epoch: 21, iters: 94960, time: 0.585, data: 0.000) G_L1: 12.284 G_L1_ABSOLUTE: 2.191 G_L1_RELATIVE: 10.094 G_Regularizer: 0.000 validation_error: 20.511 +(epoch: 21, iters: 96960, time: 0.576, data: 0.000) G_L1: 14.595 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 11.858 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 21, iters: 98960, time: 0.567, data: 0.000) G_L1: 14.613 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 12.339 G_Regularizer: 0.000 validation_error: 19.728 +(epoch: 21, iters: 100960, time: 0.573, data: 0.000) G_L1: 14.365 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 11.837 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 21, iters: 102960, time: 0.581, data: 0.000) G_L1: 12.860 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 10.401 G_Regularizer: 0.000 validation_error: 19.775 +(epoch: 21, iters: 104960, time: 0.573, data: 0.001) G_L1: 15.804 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 13.330 G_Regularizer: 0.000 validation_error: 20.370 +(epoch: 21, iters: 106960, time: 0.573, data: 0.000) G_L1: 14.789 G_L1_ABSOLUTE: 2.869 G_L1_RELATIVE: 11.920 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 21, iters: 108960, time: 0.576, data: 0.000) G_L1: 14.283 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 11.890 G_Regularizer: 0.000 validation_error: 20.018 +(epoch: 21, iters: 110960, time: 0.574, data: 0.000) G_L1: 14.779 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 12.331 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 21, iters: 112960, time: 0.583, data: 0.000) G_L1: 12.205 G_L1_ABSOLUTE: 2.304 G_L1_RELATIVE: 9.900 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 21, iters: 114960, time: 0.570, data: 0.000) G_L1: 15.022 G_L1_ABSOLUTE: 3.463 G_L1_RELATIVE: 11.560 G_Regularizer: 0.000 validation_error: 19.131 +(epoch: 21, iters: 116960, time: 0.571, data: 0.000) G_L1: 14.601 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 11.910 G_Regularizer: 0.000 validation_error: 20.317 +(epoch: 21, iters: 118960, time: 0.572, data: 0.000) G_L1: 16.187 G_L1_ABSOLUTE: 3.353 G_L1_RELATIVE: 12.834 G_Regularizer: 0.000 validation_error: 20.050 +(epoch: 21, iters: 120960, time: 0.578, data: 0.000) G_L1: 15.491 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 12.839 G_Regularizer: 0.000 validation_error: 20.246 +(epoch: 21, iters: 122960, time: 0.571, data: 0.000) G_L1: 15.616 G_L1_ABSOLUTE: 2.739 G_L1_RELATIVE: 12.878 G_Regularizer: 0.000 validation_error: 20.259 +(epoch: 21, iters: 124960, time: 0.577, data: 0.000) G_L1: 15.792 G_L1_ABSOLUTE: 2.527 G_L1_RELATIVE: 13.265 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 21, iters: 126960, time: 0.574, data: 0.000) G_L1: 15.560 G_L1_ABSOLUTE: 2.924 G_L1_RELATIVE: 12.637 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 21, iters: 128960, time: 0.585, data: 0.000) G_L1: 13.490 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 10.916 G_Regularizer: 0.000 validation_error: 20.185 +(epoch: 21, iters: 130960, time: 0.568, data: 0.000) G_L1: 15.531 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 12.915 G_Regularizer: 0.000 validation_error: 20.054 +(epoch: 21, iters: 132960, time: 0.573, data: 0.000) G_L1: 15.307 G_L1_ABSOLUTE: 3.090 G_L1_RELATIVE: 12.217 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 21, iters: 134960, time: 0.576, data: 0.000) G_L1: 15.118 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 12.381 G_Regularizer: 0.000 validation_error: 20.300 +(epoch: 21, iters: 136960, time: 0.570, data: 0.000) G_L1: 14.705 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 12.350 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 21, iters: 138960, time: 0.580, data: 0.000) G_L1: 14.849 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 12.229 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 21, iters: 140960, time: 0.575, data: 0.000) G_L1: 16.150 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 13.461 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 21, iters: 142960, time: 0.578, data: 0.000) G_L1: 13.754 G_L1_ABSOLUTE: 2.607 G_L1_RELATIVE: 11.147 G_Regularizer: 0.000 validation_error: 20.607 +(epoch: 21, iters: 144960, time: 0.575, data: 0.000) G_L1: 14.477 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.063 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 21, iters: 146960, time: 0.570, data: 0.000) G_L1: 14.357 G_L1_ABSOLUTE: 3.352 G_L1_RELATIVE: 11.005 G_Regularizer: 0.000 validation_error: 20.315 +(epoch: 21, iters: 148960, time: 0.576, data: 0.000) G_L1: 15.390 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 12.905 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 21, iters: 150960, time: 0.585, data: 0.000) G_L1: 14.044 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 11.233 G_Regularizer: 0.000 validation_error: 20.618 +(epoch: 21, iters: 152960, time: 0.572, data: 0.000) G_L1: 16.000 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 13.214 G_Regularizer: 0.000 validation_error: 20.680 +(epoch: 21, iters: 154960, time: 0.569, data: 0.000) G_L1: 16.367 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 13.429 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 21, iters: 156960, time: 0.574, data: 0.000) G_L1: 16.136 G_L1_ABSOLUTE: 3.180 G_L1_RELATIVE: 12.956 G_Regularizer: 0.000 validation_error: 20.413 +(epoch: 21, iters: 158960, time: 0.569, data: 0.000) G_L1: 13.863 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 11.161 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 21, iters: 160960, time: 0.579, data: 0.000) G_L1: 16.223 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 13.700 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 21, iters: 162960, time: 0.582, data: 0.000) G_L1: 12.864 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 10.377 G_Regularizer: 0.000 validation_error: 20.460 +(epoch: 21, iters: 164960, time: 0.583, data: 0.000) G_L1: 16.661 G_L1_ABSOLUTE: 2.348 G_L1_RELATIVE: 14.313 G_Regularizer: 0.000 validation_error: 20.349 +(epoch: 21, iters: 166960, time: 0.572, data: 0.000) G_L1: 16.378 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 13.725 G_Regularizer: 0.000 validation_error: 20.399 +(epoch: 21, iters: 168960, time: 0.574, data: 0.000) G_L1: 16.458 G_L1_ABSOLUTE: 3.036 G_L1_RELATIVE: 13.422 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 21, iters: 170960, time: 0.576, data: 0.000) G_L1: 19.124 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 16.355 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 21, iters: 172960, time: 0.572, data: 0.000) G_L1: 16.306 G_L1_ABSOLUTE: 3.212 G_L1_RELATIVE: 13.094 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 21, iters: 174960, time: 0.577, data: 0.000) G_L1: 14.540 G_L1_ABSOLUTE: 2.986 G_L1_RELATIVE: 11.554 G_Regularizer: 0.000 validation_error: 20.260 +(epoch: 21, iters: 176960, time: 0.578, data: 0.000) G_L1: 13.875 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 11.222 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 21, iters: 178960, time: 0.589, data: 0.000) G_L1: 16.822 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 14.090 G_Regularizer: 0.000 validation_error: 20.178 +(epoch: 21, iters: 180960, time: 0.576, data: 0.000) G_L1: 12.724 G_L1_ABSOLUTE: 2.287 G_L1_RELATIVE: 10.437 G_Regularizer: 0.000 validation_error: 20.268 +(epoch: 21, iters: 182960, time: 0.578, data: 0.000) G_L1: 13.672 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 11.128 G_Regularizer: 0.000 validation_error: 20.251 +(epoch: 21, iters: 184960, time: 0.571, data: 0.000) G_L1: 15.223 G_L1_ABSOLUTE: 2.945 G_L1_RELATIVE: 12.278 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 21, iters: 186960, time: 0.575, data: 0.000) G_L1: 17.398 G_L1_ABSOLUTE: 2.994 G_L1_RELATIVE: 14.404 G_Regularizer: 0.000 validation_error: 20.079 +(epoch: 21, iters: 188960, time: 0.574, data: 0.000) G_L1: 15.853 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 13.044 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 21, iters: 190960, time: 0.574, data: 0.000) G_L1: 18.669 G_L1_ABSOLUTE: 3.504 G_L1_RELATIVE: 15.165 G_Regularizer: 0.000 validation_error: 20.719 +(epoch: 21, iters: 192960, time: 0.579, data: 0.000) G_L1: 14.501 G_L1_ABSOLUTE: 2.889 G_L1_RELATIVE: 11.612 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 21, iters: 194960, time: 0.598, data: 0.000) G_L1: 16.251 G_L1_ABSOLUTE: 2.990 G_L1_RELATIVE: 13.261 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 21, iters: 196960, time: 0.585, data: 0.000) G_L1: 18.594 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 15.804 G_Regularizer: 0.000 validation_error: 20.412 +(epoch: 21, iters: 198960, time: 0.576, data: 0.000) G_L1: 17.582 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 14.930 G_Regularizer: 0.000 validation_error: 21.273 +(epoch: 21, iters: 200960, time: 0.570, data: 0.000) G_L1: 12.575 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 9.958 G_Regularizer: 0.000 validation_error: 21.299 +(epoch: 21, iters: 202960, time: 0.572, data: 0.000) G_L1: 19.067 G_L1_ABSOLUTE: 3.030 G_L1_RELATIVE: 16.037 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 21, iters: 204960, time: 0.577, data: 0.000) G_L1: 16.324 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 13.759 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 21, iters: 206960, time: 0.573, data: 0.001) G_L1: 16.264 G_L1_ABSOLUTE: 2.816 G_L1_RELATIVE: 13.448 G_Regularizer: 0.000 validation_error: 20.947 +(epoch: 21, iters: 208960, time: 0.566, data: 0.000) G_L1: 16.862 G_L1_ABSOLUTE: 2.275 G_L1_RELATIVE: 14.588 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 21, iters: 210960, time: 0.567, data: 0.000) G_L1: 14.952 G_L1_ABSOLUTE: 2.829 G_L1_RELATIVE: 12.122 G_Regularizer: 0.000 validation_error: 20.732 +(epoch: 21, iters: 212960, time: 0.600, data: 0.000) G_L1: 15.201 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 12.492 G_Regularizer: 0.000 validation_error: 20.765 +(epoch: 21, iters: 214960, time: 0.579, data: 0.001) G_L1: 16.019 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 13.349 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 21, iters: 216960, time: 0.570, data: 0.000) G_L1: 12.810 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 10.129 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 21, iters: 218960, time: 0.577, data: 0.000) G_L1: 14.482 G_L1_ABSOLUTE: 1.966 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 20.016 +(epoch: 21, iters: 220960, time: 0.586, data: 0.000) G_L1: 13.491 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 10.694 G_Regularizer: 0.000 validation_error: 20.304 +(epoch: 21, iters: 222960, time: 0.570, data: 0.000) G_L1: 16.252 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 13.536 G_Regularizer: 0.000 validation_error: 20.205 +(epoch: 21, iters: 224960, time: 0.568, data: 0.000) G_L1: 14.137 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 11.252 G_Regularizer: 0.000 validation_error: 20.273 +(epoch: 21, iters: 226960, time: 0.568, data: 0.000) G_L1: 12.263 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 9.754 G_Regularizer: 0.000 validation_error: 21.229 +(epoch: 21, iters: 228960, time: 0.593, data: 0.000) G_L1: 14.767 G_L1_ABSOLUTE: 2.902 G_L1_RELATIVE: 11.865 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 21, iters: 230960, time: 0.579, data: 0.000) G_L1: 14.950 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 12.321 G_Regularizer: 0.000 validation_error: 20.281 +(epoch: 21, iters: 232960, time: 0.565, data: 0.000) G_L1: 17.562 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 14.776 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 21, iters: 234960, time: 0.577, data: 0.000) G_L1: 17.497 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 14.864 G_Regularizer: 0.000 validation_error: 19.722 +(epoch: 21, iters: 236960, time: 0.576, data: 0.000) G_L1: 15.994 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 13.271 G_Regularizer: 0.000 validation_error: 20.190 +(epoch: 21, iters: 238960, time: 0.572, data: 0.001) G_L1: 16.431 G_L1_ABSOLUTE: 3.000 G_L1_RELATIVE: 13.431 G_Regularizer: 0.000 validation_error: 20.234 +(epoch: 21, iters: 240960, time: 0.569, data: 0.000) G_L1: 13.958 G_L1_ABSOLUTE: 2.183 G_L1_RELATIVE: 11.775 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 21, iters: 242960, time: 0.569, data: 0.000) G_L1: 13.054 G_L1_ABSOLUTE: 2.392 G_L1_RELATIVE: 10.662 G_Regularizer: 0.000 validation_error: 21.270 +(epoch: 21, iters: 244960, time: 0.841, data: 0.000) G_L1: 14.084 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 11.348 G_Regularizer: 0.000 validation_error: 20.360 +(epoch: 21, iters: 246960, time: 0.570, data: 0.000) G_L1: 12.456 G_L1_ABSOLUTE: 2.193 G_L1_RELATIVE: 10.264 G_Regularizer: 0.000 validation_error: 20.157 +(epoch: 21, iters: 248960, time: 0.575, data: 0.000) G_L1: 13.866 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 11.344 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 21, iters: 250960, time: 0.575, data: 0.000) G_L1: 16.931 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 14.224 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 21, iters: 252960, time: 0.568, data: 0.000) G_L1: 21.860 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 19.425 G_Regularizer: 0.000 validation_error: 20.662 +(epoch: 21, iters: 254960, time: 0.573, data: 0.000) G_L1: 17.362 G_L1_ABSOLUTE: 2.973 G_L1_RELATIVE: 14.389 G_Regularizer: 0.000 validation_error: 20.383 +(epoch: 21, iters: 256960, time: 0.570, data: 0.000) G_L1: 12.647 G_L1_ABSOLUTE: 2.076 G_L1_RELATIVE: 10.570 G_Regularizer: 0.000 validation_error: 20.578 +(epoch: 21, iters: 258960, time: 0.577, data: 0.000) G_L1: 15.212 G_L1_ABSOLUTE: 2.941 G_L1_RELATIVE: 12.271 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 21, iters: 260960, time: 0.579, data: 0.000) G_L1: 13.113 G_L1_ABSOLUTE: 2.308 G_L1_RELATIVE: 10.805 G_Regularizer: 0.000 validation_error: 20.557 +(epoch: 21, iters: 262960, time: 0.567, data: 0.000) G_L1: 14.773 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 12.394 G_Regularizer: 0.000 validation_error: 21.484 +(epoch: 21, iters: 264960, time: 0.573, data: 0.000) G_L1: 13.611 G_L1_ABSOLUTE: 3.133 G_L1_RELATIVE: 10.478 G_Regularizer: 0.000 validation_error: 21.320 +(epoch: 21, iters: 266960, time: 0.568, data: 0.000) G_L1: 15.283 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 12.859 G_Regularizer: 0.000 validation_error: 20.300 +(epoch: 21, iters: 268960, time: 0.566, data: 0.000) G_L1: 14.007 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 11.553 G_Regularizer: 0.000 validation_error: 20.614 +(epoch: 21, iters: 270960, time: 0.571, data: 0.000) G_L1: 12.995 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 10.513 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 21, iters: 272960, time: 0.588, data: 0.000) G_L1: 12.453 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 9.876 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 21, iters: 274960, time: 0.569, data: 0.000) G_L1: 12.002 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 9.671 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 21, iters: 276960, time: 0.568, data: 0.000) G_L1: 13.820 G_L1_ABSOLUTE: 2.197 G_L1_RELATIVE: 11.623 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 21, iters: 278960, time: 0.597, data: 0.000) G_L1: 14.299 G_L1_ABSOLUTE: 2.903 G_L1_RELATIVE: 11.396 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 21, iters: 280960, time: 0.580, data: 0.000) G_L1: 16.800 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 14.205 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 21, iters: 282960, time: 0.567, data: 0.000) G_L1: 16.754 G_L1_ABSOLUTE: 3.119 G_L1_RELATIVE: 13.635 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 21, iters: 284960, time: 0.573, data: 0.000) G_L1: 17.310 G_L1_ABSOLUTE: 2.849 G_L1_RELATIVE: 14.461 G_Regularizer: 0.000 validation_error: 21.149 +(epoch: 21, iters: 286960, time: 0.575, data: 0.000) G_L1: 16.583 G_L1_ABSOLUTE: 3.059 G_L1_RELATIVE: 13.524 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 21, iters: 288960, time: 0.569, data: 0.000) G_L1: 16.779 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 14.438 G_Regularizer: 0.000 validation_error: 20.482 +(epoch: 21, iters: 290960, time: 0.570, data: 0.000) G_L1: 22.240 G_L1_ABSOLUTE: 3.778 G_L1_RELATIVE: 18.461 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 21, iters: 292960, time: 0.574, data: 0.000) G_L1: 14.951 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 12.660 G_Regularizer: 0.000 validation_error: 20.524 +(epoch: 21, iters: 294960, time: 0.597, data: 0.000) G_L1: 13.017 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 10.347 G_Regularizer: 0.000 validation_error: 21.043 +(epoch: 21, iters: 296960, time: 0.578, data: 0.000) G_L1: 15.471 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 12.940 G_Regularizer: 0.000 validation_error: 21.172 +(epoch: 21, iters: 298960, time: 0.570, data: 0.000) G_L1: 17.117 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 14.457 G_Regularizer: 0.000 validation_error: 20.197 +(epoch: 21, iters: 300960, time: 0.572, data: 0.000) G_L1: 15.888 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 13.111 G_Regularizer: 0.000 validation_error: 20.555 +(epoch: 22, iters: 208, time: 0.581, data: 0.000) G_L1: 13.767 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 11.084 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 22, iters: 2208, time: 0.578, data: 0.000) G_L1: 11.624 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 9.206 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 22, iters: 4208, time: 0.570, data: 0.000) G_L1: 13.907 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 11.399 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 22, iters: 6208, time: 0.577, data: 0.000) G_L1: 15.842 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 13.282 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 22, iters: 8208, time: 0.586, data: 0.001) G_L1: 13.831 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 11.253 G_Regularizer: 0.000 validation_error: 20.089 +(epoch: 22, iters: 10208, time: 0.569, data: 0.000) G_L1: 13.905 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 11.134 G_Regularizer: 0.000 validation_error: 20.664 +(epoch: 22, iters: 12208, time: 0.571, data: 0.000) G_L1: 14.782 G_L1_ABSOLUTE: 2.690 G_L1_RELATIVE: 12.092 G_Regularizer: 0.000 validation_error: 20.198 +(epoch: 22, iters: 14208, time: 0.574, data: 0.000) G_L1: 17.022 G_L1_ABSOLUTE: 2.935 G_L1_RELATIVE: 14.087 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 22, iters: 16208, time: 0.574, data: 0.000) G_L1: 12.987 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 10.448 G_Regularizer: 0.000 validation_error: 20.663 +(epoch: 22, iters: 18208, time: 0.570, data: 0.000) G_L1: 15.535 G_L1_ABSOLUTE: 3.347 G_L1_RELATIVE: 12.188 G_Regularizer: 0.000 validation_error: 20.937 +(epoch: 22, iters: 20208, time: 0.568, data: 0.000) G_L1: 14.662 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 11.944 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 22, iters: 22208, time: 0.571, data: 0.000) G_L1: 14.410 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 11.740 G_Regularizer: 0.000 validation_error: 20.125 +(epoch: 22, iters: 24208, time: 0.571, data: 0.000) G_L1: 14.574 G_L1_ABSOLUTE: 2.081 G_L1_RELATIVE: 12.493 G_Regularizer: 0.000 validation_error: 20.535 +(epoch: 22, iters: 26208, time: 0.572, data: 0.000) G_L1: 13.919 G_L1_ABSOLUTE: 2.250 G_L1_RELATIVE: 11.669 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 22, iters: 28208, time: 0.579, data: 0.000) G_L1: 16.660 G_L1_ABSOLUTE: 2.847 G_L1_RELATIVE: 13.813 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 22, iters: 30208, time: 0.581, data: 0.000) G_L1: 13.343 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 10.929 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 22, iters: 32208, time: 0.569, data: 0.000) G_L1: 16.373 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 13.673 G_Regularizer: 0.000 validation_error: 20.521 +(epoch: 22, iters: 34208, time: 0.572, data: 0.000) G_L1: 15.778 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 13.132 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 22, iters: 36208, time: 0.573, data: 0.000) G_L1: 18.115 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 15.378 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 22, iters: 38208, time: 0.577, data: 0.000) G_L1: 13.812 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 11.062 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 22, iters: 40208, time: 0.578, data: 0.000) G_L1: 15.650 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 13.014 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 22, iters: 42208, time: 0.596, data: 0.000) G_L1: 15.177 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 12.393 G_Regularizer: 0.000 validation_error: 20.303 +(epoch: 22, iters: 44208, time: 0.577, data: 0.000) G_L1: 13.245 G_L1_ABSOLUTE: 2.937 G_L1_RELATIVE: 10.308 G_Regularizer: 0.000 validation_error: 20.586 +(epoch: 22, iters: 46208, time: 0.565, data: 0.000) G_L1: 17.448 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 14.712 G_Regularizer: 0.000 validation_error: 20.407 +(epoch: 22, iters: 48208, time: 0.575, data: 0.000) G_L1: 14.781 G_L1_ABSOLUTE: 2.410 G_L1_RELATIVE: 12.371 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 22, iters: 50208, time: 0.570, data: 0.000) G_L1: 17.763 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 15.102 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 22, iters: 52208, time: 0.571, data: 0.000) G_L1: 14.533 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 11.599 G_Regularizer: 0.000 validation_error: 20.583 +(epoch: 22, iters: 54208, time: 0.569, data: 0.000) G_L1: 16.015 G_L1_ABSOLUTE: 2.146 G_L1_RELATIVE: 13.869 G_Regularizer: 0.000 validation_error: 21.094 +(epoch: 22, iters: 56208, time: 0.585, data: 0.000) G_L1: 15.299 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 12.566 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 22, iters: 58208, time: 0.596, data: 0.000) G_L1: 16.195 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 13.669 G_Regularizer: 0.000 validation_error: 21.326 +(epoch: 22, iters: 60208, time: 0.578, data: 0.000) G_L1: 13.140 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 10.454 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 22, iters: 62208, time: 0.578, data: 0.000) G_L1: 15.618 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 12.885 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 22, iters: 64208, time: 0.564, data: 0.000) G_L1: 16.610 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 14.011 G_Regularizer: 0.000 validation_error: 20.450 +(epoch: 22, iters: 66208, time: 0.581, data: 0.000) G_L1: 13.698 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 11.032 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 22, iters: 68208, time: 0.579, data: 0.000) G_L1: 14.884 G_L1_ABSOLUTE: 3.019 G_L1_RELATIVE: 11.865 G_Regularizer: 0.000 validation_error: 20.385 +(epoch: 22, iters: 70208, time: 0.573, data: 0.000) G_L1: 16.106 G_L1_ABSOLUTE: 2.789 G_L1_RELATIVE: 13.316 G_Regularizer: 0.000 validation_error: 20.435 +(epoch: 22, iters: 72208, time: 0.580, data: 0.000) G_L1: 13.456 G_L1_ABSOLUTE: 2.715 G_L1_RELATIVE: 10.741 G_Regularizer: 0.000 validation_error: 21.499 +(epoch: 22, iters: 74208, time: 0.569, data: 0.000) G_L1: 14.068 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 11.754 G_Regularizer: 0.000 validation_error: 20.775 +(epoch: 22, iters: 76208, time: 0.571, data: 0.000) G_L1: 17.691 G_L1_ABSOLUTE: 2.982 G_L1_RELATIVE: 14.709 G_Regularizer: 0.000 validation_error: 21.433 +(epoch: 22, iters: 78208, time: 0.568, data: 0.000) G_L1: 14.402 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 11.901 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 22, iters: 80208, time: 0.576, data: 0.000) G_L1: 21.391 G_L1_ABSOLUTE: 3.044 G_L1_RELATIVE: 18.347 G_Regularizer: 0.000 validation_error: 21.178 +(epoch: 22, iters: 82208, time: 0.573, data: 0.000) G_L1: 15.181 G_L1_ABSOLUTE: 2.443 G_L1_RELATIVE: 12.738 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 22, iters: 84208, time: 0.568, data: 0.000) G_L1: 13.031 G_L1_ABSOLUTE: 2.240 G_L1_RELATIVE: 10.791 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 22, iters: 86208, time: 0.575, data: 0.000) G_L1: 15.170 G_L1_ABSOLUTE: 2.789 G_L1_RELATIVE: 12.381 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 22, iters: 88208, time: 0.569, data: 0.000) G_L1: 19.579 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 16.641 G_Regularizer: 0.000 validation_error: 21.381 +(epoch: 22, iters: 90208, time: 0.571, data: 0.000) G_L1: 16.322 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 13.760 G_Regularizer: 0.000 validation_error: 20.183 +(epoch: 22, iters: 92208, time: 0.598, data: 0.000) G_L1: 14.492 G_L1_ABSOLUTE: 2.829 G_L1_RELATIVE: 11.663 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 22, iters: 94208, time: 0.574, data: 0.000) G_L1: 16.454 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 13.866 G_Regularizer: 0.000 validation_error: 20.422 +(epoch: 22, iters: 96208, time: 0.563, data: 0.000) G_L1: 16.045 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 13.338 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 22, iters: 98208, time: 0.580, data: 0.000) G_L1: 15.223 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 12.478 G_Regularizer: 0.000 validation_error: 21.412 +(epoch: 22, iters: 100208, time: 0.570, data: 0.000) G_L1: 16.753 G_L1_ABSOLUTE: 2.861 G_L1_RELATIVE: 13.892 G_Regularizer: 0.000 validation_error: 21.237 +(epoch: 22, iters: 102208, time: 0.571, data: 0.000) G_L1: 15.389 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 12.519 G_Regularizer: 0.000 validation_error: 21.167 +(epoch: 22, iters: 104208, time: 0.573, data: 0.000) G_L1: 14.735 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.225 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 22, iters: 106208, time: 0.576, data: 0.001) G_L1: 17.569 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 14.861 G_Regularizer: 0.000 validation_error: 20.466 +(epoch: 22, iters: 108208, time: 0.590, data: 0.000) G_L1: 14.956 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 12.004 G_Regularizer: 0.000 validation_error: 21.253 +(epoch: 22, iters: 110208, time: 0.563, data: 0.000) G_L1: 13.005 G_L1_ABSOLUTE: 2.647 G_L1_RELATIVE: 10.358 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 22, iters: 112208, time: 0.572, data: 0.000) G_L1: 14.331 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 11.722 G_Regularizer: 0.000 validation_error: 20.977 +(epoch: 22, iters: 114208, time: 0.568, data: 0.000) G_L1: 12.098 G_L1_ABSOLUTE: 2.984 G_L1_RELATIVE: 9.114 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 22, iters: 116208, time: 0.581, data: 0.000) G_L1: 14.154 G_L1_ABSOLUTE: 2.712 G_L1_RELATIVE: 11.442 G_Regularizer: 0.000 validation_error: 20.237 +(epoch: 22, iters: 118208, time: 0.576, data: 0.000) G_L1: 14.685 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 12.116 G_Regularizer: 0.000 validation_error: 20.396 +(epoch: 22, iters: 120208, time: 0.583, data: 0.000) G_L1: 15.159 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 12.659 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 22, iters: 122208, time: 0.570, data: 0.000) G_L1: 14.941 G_L1_ABSOLUTE: 2.271 G_L1_RELATIVE: 12.671 G_Regularizer: 0.000 validation_error: 20.482 +(epoch: 22, iters: 124208, time: 0.585, data: 0.001) G_L1: 14.165 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 11.658 G_Regularizer: 0.000 validation_error: 20.360 +(epoch: 22, iters: 126208, time: 0.570, data: 0.000) G_L1: 15.853 G_L1_ABSOLUTE: 3.241 G_L1_RELATIVE: 12.612 G_Regularizer: 0.000 validation_error: 20.387 +(epoch: 22, iters: 128208, time: 0.567, data: 0.000) G_L1: 15.820 G_L1_ABSOLUTE: 3.012 G_L1_RELATIVE: 12.808 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 22, iters: 130208, time: 0.579, data: 0.000) G_L1: 14.438 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.854 G_Regularizer: 0.000 validation_error: 20.220 +(epoch: 22, iters: 132208, time: 0.576, data: 0.000) G_L1: 15.887 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 13.252 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 22, iters: 134208, time: 0.570, data: 0.000) G_L1: 17.251 G_L1_ABSOLUTE: 2.603 G_L1_RELATIVE: 14.648 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 22, iters: 136208, time: 0.577, data: 0.000) G_L1: 15.337 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 12.788 G_Regularizer: 0.000 validation_error: 20.529 +(epoch: 22, iters: 138208, time: 0.575, data: 0.000) G_L1: 13.904 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 11.234 G_Regularizer: 0.000 validation_error: 21.091 +(epoch: 22, iters: 140208, time: 0.569, data: 0.000) G_L1: 13.335 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 10.898 G_Regularizer: 0.000 validation_error: 21.438 +(epoch: 22, iters: 142208, time: 0.574, data: 0.000) G_L1: 14.563 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 11.849 G_Regularizer: 0.000 validation_error: 20.052 +(epoch: 22, iters: 144208, time: 0.573, data: 0.000) G_L1: 17.985 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 15.471 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 22, iters: 146208, time: 0.573, data: 0.000) G_L1: 13.857 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 11.357 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 22, iters: 148208, time: 0.570, data: 0.000) G_L1: 15.106 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 12.432 G_Regularizer: 0.000 validation_error: 19.957 +(epoch: 22, iters: 150208, time: 0.573, data: 0.000) G_L1: 14.317 G_L1_ABSOLUTE: 3.207 G_L1_RELATIVE: 11.110 G_Regularizer: 0.000 validation_error: 20.544 +(epoch: 22, iters: 152208, time: 0.572, data: 0.000) G_L1: 16.559 G_L1_ABSOLUTE: 3.074 G_L1_RELATIVE: 13.485 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 22, iters: 154208, time: 0.574, data: 0.000) G_L1: 15.326 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 12.605 G_Regularizer: 0.000 validation_error: 20.054 +(epoch: 22, iters: 156208, time: 0.569, data: 0.000) G_L1: 13.580 G_L1_ABSOLUTE: 2.671 G_L1_RELATIVE: 10.909 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 22, iters: 158208, time: 0.599, data: 0.000) G_L1: 15.024 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 12.495 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 22, iters: 160208, time: 0.576, data: 0.000) G_L1: 17.291 G_L1_ABSOLUTE: 2.872 G_L1_RELATIVE: 14.419 G_Regularizer: 0.000 validation_error: 20.231 +(epoch: 22, iters: 162208, time: 0.571, data: 0.000) G_L1: 14.548 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 12.047 G_Regularizer: 0.000 validation_error: 21.089 +(epoch: 22, iters: 164208, time: 0.567, data: 0.001) G_L1: 14.304 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 11.845 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 22, iters: 166208, time: 0.585, data: 0.000) G_L1: 13.681 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 11.033 G_Regularizer: 0.000 validation_error: 20.275 +(epoch: 22, iters: 168208, time: 0.578, data: 0.000) G_L1: 17.491 G_L1_ABSOLUTE: 3.211 G_L1_RELATIVE: 14.280 G_Regularizer: 0.000 validation_error: 20.382 +(epoch: 22, iters: 170208, time: 0.568, data: 0.000) G_L1: 16.265 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 13.528 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 22, iters: 172208, time: 0.572, data: 0.000) G_L1: 15.112 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 12.884 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 22, iters: 174208, time: 0.595, data: 0.000) G_L1: 15.892 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 13.619 G_Regularizer: 0.000 validation_error: 20.315 +(epoch: 22, iters: 176208, time: 0.571, data: 0.000) G_L1: 15.324 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 12.807 G_Regularizer: 0.000 validation_error: 20.197 +(epoch: 22, iters: 178208, time: 0.575, data: 0.000) G_L1: 14.730 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 12.115 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 22, iters: 180208, time: 0.566, data: 0.000) G_L1: 14.645 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.135 G_Regularizer: 0.000 validation_error: 21.628 +(epoch: 22, iters: 182208, time: 0.585, data: 0.000) G_L1: 14.469 G_L1_ABSOLUTE: 2.841 G_L1_RELATIVE: 11.628 G_Regularizer: 0.000 validation_error: 20.338 +(epoch: 22, iters: 184208, time: 0.573, data: 0.000) G_L1: 15.051 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 12.455 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 22, iters: 186208, time: 0.577, data: 0.001) G_L1: 14.330 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.729 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 22, iters: 188208, time: 0.571, data: 0.000) G_L1: 16.372 G_L1_ABSOLUTE: 3.071 G_L1_RELATIVE: 13.301 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 22, iters: 190208, time: 0.585, data: 0.000) G_L1: 17.225 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 14.418 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 22, iters: 192208, time: 0.574, data: 0.000) G_L1: 12.951 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 10.081 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 22, iters: 194208, time: 0.571, data: 0.000) G_L1: 13.193 G_L1_ABSOLUTE: 2.964 G_L1_RELATIVE: 10.229 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 22, iters: 196208, time: 0.579, data: 0.000) G_L1: 13.304 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 10.307 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 22, iters: 198208, time: 0.576, data: 0.000) G_L1: 16.164 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 13.308 G_Regularizer: 0.000 validation_error: 20.463 +(epoch: 22, iters: 200208, time: 0.568, data: 0.000) G_L1: 13.908 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 11.545 G_Regularizer: 0.000 validation_error: 20.277 +(epoch: 22, iters: 202208, time: 0.579, data: 0.000) G_L1: 16.757 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 14.239 G_Regularizer: 0.000 validation_error: 20.318 +(epoch: 22, iters: 204208, time: 0.572, data: 0.000) G_L1: 14.372 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 11.960 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 22, iters: 206208, time: 0.566, data: 0.000) G_L1: 16.698 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 13.927 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 22, iters: 208208, time: 0.585, data: 0.000) G_L1: 14.706 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 11.816 G_Regularizer: 0.000 validation_error: 21.336 +(epoch: 22, iters: 210208, time: 0.574, data: 0.000) G_L1: 15.137 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 12.655 G_Regularizer: 0.000 validation_error: 21.339 +(epoch: 22, iters: 212208, time: 0.573, data: 0.000) G_L1: 15.206 G_L1_ABSOLUTE: 2.720 G_L1_RELATIVE: 12.485 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 22, iters: 214208, time: 0.578, data: 0.000) G_L1: 19.794 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 17.494 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 22, iters: 216208, time: 0.574, data: 0.000) G_L1: 14.741 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 12.098 G_Regularizer: 0.000 validation_error: 21.149 +(epoch: 22, iters: 218208, time: 0.578, data: 0.000) G_L1: 15.228 G_L1_ABSOLUTE: 2.157 G_L1_RELATIVE: 13.071 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 22, iters: 220208, time: 0.562, data: 0.000) G_L1: 14.777 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 12.189 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 22, iters: 222208, time: 0.566, data: 0.000) G_L1: 16.897 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 14.347 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 22, iters: 224208, time: 0.584, data: 0.000) G_L1: 13.679 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 11.128 G_Regularizer: 0.000 validation_error: 20.558 +(epoch: 22, iters: 226208, time: 0.575, data: 0.000) G_L1: 12.343 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 9.936 G_Regularizer: 0.000 validation_error: 20.332 +(epoch: 22, iters: 228208, time: 0.569, data: 0.000) G_L1: 15.391 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 12.214 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 22, iters: 230208, time: 0.578, data: 0.000) G_L1: 13.300 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 10.827 G_Regularizer: 0.000 validation_error: 20.924 +(epoch: 22, iters: 232208, time: 0.574, data: 0.000) G_L1: 13.914 G_L1_ABSOLUTE: 2.269 G_L1_RELATIVE: 11.645 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 22, iters: 234208, time: 0.565, data: 0.000) G_L1: 15.530 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 12.911 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 22, iters: 236208, time: 0.575, data: 0.000) G_L1: 16.802 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 14.277 G_Regularizer: 0.000 validation_error: 21.191 +(epoch: 22, iters: 238208, time: 0.564, data: 0.001) G_L1: 15.094 G_L1_ABSOLUTE: 3.021 G_L1_RELATIVE: 12.073 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 22, iters: 240208, time: 0.576, data: 0.000) G_L1: 17.860 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 15.053 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 22, iters: 242208, time: 0.576, data: 0.000) G_L1: 14.657 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 11.884 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 22, iters: 244208, time: 0.568, data: 0.000) G_L1: 14.489 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 11.771 G_Regularizer: 0.000 validation_error: 20.614 +(epoch: 22, iters: 246208, time: 0.572, data: 0.000) G_L1: 15.719 G_L1_ABSOLUTE: 3.348 G_L1_RELATIVE: 12.371 G_Regularizer: 0.000 validation_error: 20.354 +(epoch: 22, iters: 248208, time: 0.572, data: 0.000) G_L1: 13.449 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 11.147 G_Regularizer: 0.000 validation_error: 20.401 +(epoch: 22, iters: 250208, time: 0.572, data: 0.000) G_L1: 19.439 G_L1_ABSOLUTE: 2.923 G_L1_RELATIVE: 16.516 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 22, iters: 252208, time: 0.576, data: 0.000) G_L1: 20.473 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 17.929 G_Regularizer: 0.000 validation_error: 20.345 +(epoch: 22, iters: 254208, time: 0.578, data: 0.000) G_L1: 13.250 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 10.889 G_Regularizer: 0.000 validation_error: 19.922 +(epoch: 22, iters: 256208, time: 0.578, data: 0.000) G_L1: 14.300 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 11.919 G_Regularizer: 0.000 validation_error: 21.463 +(epoch: 22, iters: 258208, time: 0.594, data: 0.000) G_L1: 16.455 G_L1_ABSOLUTE: 2.439 G_L1_RELATIVE: 14.016 G_Regularizer: 0.000 validation_error: 21.107 +(epoch: 22, iters: 260208, time: 0.574, data: 0.000) G_L1: 17.511 G_L1_ABSOLUTE: 3.236 G_L1_RELATIVE: 14.275 G_Regularizer: 0.000 validation_error: 20.033 +(epoch: 22, iters: 262208, time: 0.580, data: 0.000) G_L1: 16.892 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 13.803 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 22, iters: 264208, time: 0.576, data: 0.000) G_L1: 16.247 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 13.515 G_Regularizer: 0.000 validation_error: 20.622 +(epoch: 22, iters: 266208, time: 0.575, data: 0.000) G_L1: 11.013 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 8.562 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 22, iters: 268208, time: 0.579, data: 0.000) G_L1: 12.126 G_L1_ABSOLUTE: 2.097 G_L1_RELATIVE: 10.029 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 22, iters: 270208, time: 0.559, data: 0.000) G_L1: 13.119 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 10.628 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 22, iters: 272208, time: 0.554, data: 0.000) G_L1: 16.110 G_L1_ABSOLUTE: 2.555 G_L1_RELATIVE: 13.555 G_Regularizer: 0.000 validation_error: 20.491 +(epoch: 22, iters: 274208, time: 0.578, data: 0.000) G_L1: 14.058 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 11.284 G_Regularizer: 0.000 validation_error: 20.390 +(epoch: 22, iters: 276208, time: 0.564, data: 0.000) G_L1: 13.771 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 11.480 G_Regularizer: 0.000 validation_error: 21.177 +(epoch: 22, iters: 278208, time: 0.551, data: 0.000) G_L1: 15.467 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 12.827 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 22, iters: 280208, time: 0.560, data: 0.000) G_L1: 12.856 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 10.591 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 22, iters: 282208, time: 0.566, data: 0.000) G_L1: 15.769 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 13.227 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 22, iters: 284208, time: 0.555, data: 0.000) G_L1: 14.615 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 11.809 G_Regularizer: 0.000 validation_error: 20.317 +(epoch: 22, iters: 286208, time: 0.559, data: 0.000) G_L1: 13.354 G_L1_ABSOLUTE: 2.439 G_L1_RELATIVE: 10.915 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 22, iters: 288208, time: 0.556, data: 0.000) G_L1: 14.620 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 12.047 G_Regularizer: 0.000 validation_error: 20.308 +(epoch: 22, iters: 290208, time: 0.560, data: 0.000) G_L1: 14.149 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 11.601 G_Regularizer: 0.000 validation_error: 20.183 +(epoch: 22, iters: 292208, time: 0.565, data: 0.001) G_L1: 14.548 G_L1_ABSOLUTE: 2.387 G_L1_RELATIVE: 12.161 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 22, iters: 294208, time: 0.565, data: 0.000) G_L1: 17.412 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 14.653 G_Regularizer: 0.000 validation_error: 21.128 +(epoch: 22, iters: 296208, time: 0.568, data: 0.000) G_L1: 16.536 G_L1_ABSOLUTE: 2.874 G_L1_RELATIVE: 13.662 G_Regularizer: 0.000 validation_error: 21.395 +(epoch: 22, iters: 298208, time: 0.559, data: 0.000) G_L1: 13.809 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 11.281 G_Regularizer: 0.000 validation_error: 21.273 +(epoch: 22, iters: 300208, time: 0.556, data: 0.000) G_L1: 17.023 G_L1_ABSOLUTE: 2.865 G_L1_RELATIVE: 14.158 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 22, iters: 302208, time: 0.554, data: 0.000) G_L1: 13.653 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 11.173 G_Regularizer: 0.000 validation_error: 20.612 +(epoch: 23, iters: 1456, time: 0.553, data: 0.000) G_L1: 14.903 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 12.133 G_Regularizer: 0.000 validation_error: 20.371 +(epoch: 23, iters: 3456, time: 0.556, data: 0.000) G_L1: 15.112 G_L1_ABSOLUTE: 3.050 G_L1_RELATIVE: 12.061 G_Regularizer: 0.000 validation_error: 21.096 +(epoch: 23, iters: 5456, time: 0.564, data: 0.000) G_L1: 15.330 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 12.802 G_Regularizer: 0.000 validation_error: 21.416 +(epoch: 23, iters: 7456, time: 0.562, data: 0.000) G_L1: 12.981 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 10.399 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 23, iters: 9456, time: 0.558, data: 0.000) G_L1: 15.927 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 23, iters: 11456, time: 0.549, data: 0.000) G_L1: 14.318 G_L1_ABSOLUTE: 2.150 G_L1_RELATIVE: 12.168 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 23, iters: 13456, time: 0.556, data: 0.000) G_L1: 11.802 G_L1_ABSOLUTE: 2.034 G_L1_RELATIVE: 9.767 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 23, iters: 15456, time: 0.564, data: 0.000) G_L1: 16.303 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 13.704 G_Regularizer: 0.000 validation_error: 20.654 +(epoch: 23, iters: 17456, time: 0.561, data: 0.000) G_L1: 14.655 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 11.937 G_Regularizer: 0.000 validation_error: 21.117 +(epoch: 23, iters: 19456, time: 0.563, data: 0.000) G_L1: 14.235 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 11.478 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 23, iters: 21456, time: 0.562, data: 0.000) G_L1: 13.999 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 11.514 G_Regularizer: 0.000 validation_error: 20.555 +(epoch: 23, iters: 23456, time: 0.569, data: 0.000) G_L1: 15.028 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 12.534 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 23, iters: 25456, time: 0.557, data: 0.000) G_L1: 15.079 G_L1_ABSOLUTE: 2.439 G_L1_RELATIVE: 12.639 G_Regularizer: 0.000 validation_error: 20.540 +(epoch: 23, iters: 27456, time: 0.554, data: 0.000) G_L1: 17.180 G_L1_ABSOLUTE: 2.900 G_L1_RELATIVE: 14.281 G_Regularizer: 0.000 validation_error: 21.392 +(epoch: 23, iters: 29456, time: 0.568, data: 0.000) G_L1: 16.802 G_L1_ABSOLUTE: 2.933 G_L1_RELATIVE: 13.869 G_Regularizer: 0.000 validation_error: 20.398 +(epoch: 23, iters: 31456, time: 0.558, data: 0.000) G_L1: 16.203 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 13.643 G_Regularizer: 0.000 validation_error: 20.441 +(epoch: 23, iters: 33456, time: 0.553, data: 0.000) G_L1: 13.631 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 11.033 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 23, iters: 35456, time: 0.559, data: 0.000) G_L1: 11.032 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 8.699 G_Regularizer: 0.000 validation_error: 21.133 +(epoch: 23, iters: 37456, time: 0.555, data: 0.000) G_L1: 12.836 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 10.473 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 23, iters: 39456, time: 0.566, data: 0.000) G_L1: 13.384 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 10.467 G_Regularizer: 0.000 validation_error: 20.393 +(epoch: 23, iters: 41456, time: 0.547, data: 0.000) G_L1: 14.499 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 12.023 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 23, iters: 43456, time: 0.555, data: 0.000) G_L1: 14.552 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 11.681 G_Regularizer: 0.000 validation_error: 20.330 +(epoch: 23, iters: 45456, time: 0.552, data: 0.000) G_L1: 15.767 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 13.259 G_Regularizer: 0.000 validation_error: 20.593 +(epoch: 23, iters: 47456, time: 0.562, data: 0.000) G_L1: 20.930 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 18.565 G_Regularizer: 0.000 validation_error: 20.489 +(epoch: 23, iters: 49456, time: 0.555, data: 0.000) G_L1: 19.710 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 17.014 G_Regularizer: 0.000 validation_error: 19.938 +(epoch: 23, iters: 51456, time: 0.555, data: 0.000) G_L1: 13.471 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 10.993 G_Regularizer: 0.000 validation_error: 20.495 +(epoch: 23, iters: 53456, time: 0.560, data: 0.000) G_L1: 15.839 G_L1_ABSOLUTE: 3.018 G_L1_RELATIVE: 12.820 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 23, iters: 55456, time: 0.566, data: 0.000) G_L1: 13.349 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 11.018 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 23, iters: 57456, time: 0.571, data: 0.000) G_L1: 14.110 G_L1_ABSOLUTE: 2.279 G_L1_RELATIVE: 11.831 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 23, iters: 59456, time: 0.554, data: 0.000) G_L1: 15.851 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 13.233 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 23, iters: 61456, time: 0.556, data: 0.001) G_L1: 13.561 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 10.788 G_Regularizer: 0.000 validation_error: 20.307 +(epoch: 23, iters: 63456, time: 0.561, data: 0.000) G_L1: 17.033 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 14.264 G_Regularizer: 0.000 validation_error: 20.742 +(epoch: 23, iters: 65456, time: 0.559, data: 0.000) G_L1: 16.582 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 13.878 G_Regularizer: 0.000 validation_error: 20.233 +(epoch: 23, iters: 67456, time: 0.558, data: 0.000) G_L1: 15.147 G_L1_ABSOLUTE: 2.896 G_L1_RELATIVE: 12.250 G_Regularizer: 0.000 validation_error: 20.009 +(epoch: 23, iters: 69456, time: 0.557, data: 0.000) G_L1: 14.285 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 11.471 G_Regularizer: 0.000 validation_error: 20.364 +(epoch: 23, iters: 71456, time: 0.564, data: 0.000) G_L1: 13.552 G_L1_ABSOLUTE: 2.203 G_L1_RELATIVE: 11.349 G_Regularizer: 0.000 validation_error: 21.166 +(epoch: 23, iters: 73456, time: 0.578, data: 0.000) G_L1: 15.057 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 12.716 G_Regularizer: 0.000 validation_error: 20.451 +(epoch: 23, iters: 75456, time: 0.558, data: 0.000) G_L1: 13.736 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 11.078 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 23, iters: 77456, time: 0.557, data: 0.000) G_L1: 14.478 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 11.901 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 23, iters: 79456, time: 0.559, data: 0.000) G_L1: 15.038 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 12.303 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 23, iters: 81456, time: 0.552, data: 0.000) G_L1: 13.107 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 10.551 G_Regularizer: 0.000 validation_error: 20.512 +(epoch: 23, iters: 83456, time: 0.547, data: 0.000) G_L1: 16.529 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 14.161 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 23, iters: 85456, time: 0.561, data: 0.000) G_L1: 15.137 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 12.701 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 23, iters: 87456, time: 0.559, data: 0.000) G_L1: 12.414 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 9.939 G_Regularizer: 0.000 validation_error: 20.514 +(epoch: 23, iters: 89456, time: 0.558, data: 0.000) G_L1: 15.780 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 13.184 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 23, iters: 91456, time: 0.560, data: 0.000) G_L1: 14.742 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 11.993 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 23, iters: 93456, time: 0.557, data: 0.000) G_L1: 13.654 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 10.791 G_Regularizer: 0.000 validation_error: 20.079 +(epoch: 23, iters: 95456, time: 0.551, data: 0.000) G_L1: 14.073 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 11.740 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 23, iters: 97456, time: 0.558, data: 0.000) G_L1: 15.919 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 13.402 G_Regularizer: 0.000 validation_error: 20.174 +(epoch: 23, iters: 99456, time: 0.557, data: 0.000) G_L1: 12.953 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 10.503 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 23, iters: 101456, time: 0.553, data: 0.000) G_L1: 15.671 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 12.973 G_Regularizer: 0.000 validation_error: 20.712 +(epoch: 23, iters: 103456, time: 0.557, data: 0.000) G_L1: 14.997 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 12.676 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 23, iters: 105456, time: 0.561, data: 0.000) G_L1: 13.304 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 11.106 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 23, iters: 107456, time: 0.582, data: 0.000) G_L1: 13.757 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 11.253 G_Regularizer: 0.000 validation_error: 21.773 +(epoch: 23, iters: 109456, time: 0.552, data: 0.000) G_L1: 14.261 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 11.632 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 23, iters: 111456, time: 0.563, data: 0.000) G_L1: 15.512 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 12.808 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 23, iters: 113456, time: 0.561, data: 0.000) G_L1: 17.521 G_L1_ABSOLUTE: 2.831 G_L1_RELATIVE: 14.690 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 23, iters: 115456, time: 0.550, data: 0.000) G_L1: 13.892 G_L1_ABSOLUTE: 2.260 G_L1_RELATIVE: 11.632 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 23, iters: 117456, time: 0.556, data: 0.000) G_L1: 13.963 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 11.151 G_Regularizer: 0.000 validation_error: 20.216 +(epoch: 23, iters: 119456, time: 0.547, data: 0.000) G_L1: 13.331 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 11.002 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 23, iters: 121456, time: 0.557, data: 0.000) G_L1: 14.324 G_L1_ABSOLUTE: 3.214 G_L1_RELATIVE: 11.110 G_Regularizer: 0.000 validation_error: 20.091 +(epoch: 23, iters: 123456, time: 0.565, data: 0.000) G_L1: 15.333 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 12.743 G_Regularizer: 0.000 validation_error: 20.622 +(epoch: 23, iters: 125456, time: 0.578, data: 0.000) G_L1: 14.077 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 11.630 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 23, iters: 127456, time: 0.557, data: 0.000) G_L1: 14.390 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 11.969 G_Regularizer: 0.000 validation_error: 20.859 +(epoch: 23, iters: 129456, time: 0.559, data: 0.000) G_L1: 17.098 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 14.241 G_Regularizer: 0.000 validation_error: 20.732 +(epoch: 23, iters: 131456, time: 0.562, data: 0.000) G_L1: 18.305 G_L1_ABSOLUTE: 3.021 G_L1_RELATIVE: 15.283 G_Regularizer: 0.000 validation_error: 20.391 +(epoch: 23, iters: 133456, time: 0.556, data: 0.000) G_L1: 14.170 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 11.593 G_Regularizer: 0.000 validation_error: 20.354 +(epoch: 23, iters: 135456, time: 0.555, data: 0.000) G_L1: 13.717 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 11.250 G_Regularizer: 0.000 validation_error: 20.454 +(epoch: 23, iters: 137456, time: 0.553, data: 0.000) G_L1: 14.577 G_L1_ABSOLUTE: 2.543 G_L1_RELATIVE: 12.034 G_Regularizer: 0.000 validation_error: 20.170 +(epoch: 23, iters: 139456, time: 0.564, data: 0.000) G_L1: 13.808 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 11.262 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 23, iters: 141456, time: 0.574, data: 0.000) G_L1: 15.080 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 12.651 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 23, iters: 143456, time: 0.557, data: 0.000) G_L1: 14.016 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 11.131 G_Regularizer: 0.000 validation_error: 21.800 +(epoch: 23, iters: 145456, time: 0.555, data: 0.000) G_L1: 17.481 G_L1_ABSOLUTE: 3.489 G_L1_RELATIVE: 13.992 G_Regularizer: 0.000 validation_error: 20.616 +(epoch: 23, iters: 147456, time: 0.561, data: 0.000) G_L1: 13.330 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 10.727 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 23, iters: 149456, time: 0.563, data: 0.000) G_L1: 23.808 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 21.158 G_Regularizer: 0.000 validation_error: 19.820 +(epoch: 23, iters: 151456, time: 0.561, data: 0.000) G_L1: 14.457 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 11.774 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 23, iters: 153456, time: 0.559, data: 0.000) G_L1: 13.211 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 10.516 G_Regularizer: 0.000 validation_error: 21.164 +(epoch: 23, iters: 155456, time: 0.559, data: 0.000) G_L1: 16.166 G_L1_ABSOLUTE: 2.289 G_L1_RELATIVE: 13.876 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 23, iters: 157456, time: 0.552, data: 0.000) G_L1: 12.921 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 10.406 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 23, iters: 159456, time: 0.578, data: 0.000) G_L1: 16.799 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 14.279 G_Regularizer: 0.000 validation_error: 21.566 +(epoch: 23, iters: 161456, time: 0.552, data: 0.000) G_L1: 13.658 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 11.274 G_Regularizer: 0.000 validation_error: 21.284 +(epoch: 23, iters: 163456, time: 0.558, data: 0.000) G_L1: 16.607 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 14.036 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 23, iters: 165456, time: 0.563, data: 0.000) G_L1: 15.910 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 13.567 G_Regularizer: 0.000 validation_error: 22.094 +(epoch: 23, iters: 167456, time: 0.555, data: 0.000) G_L1: 14.702 G_L1_ABSOLUTE: 2.472 G_L1_RELATIVE: 12.230 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 23, iters: 169456, time: 0.561, data: 0.000) G_L1: 13.708 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 11.227 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 23, iters: 171456, time: 0.552, data: 0.001) G_L1: 14.870 G_L1_ABSOLUTE: 2.187 G_L1_RELATIVE: 12.682 G_Regularizer: 0.000 validation_error: 20.329 +(epoch: 23, iters: 173456, time: 0.566, data: 0.000) G_L1: 15.420 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 12.760 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 23, iters: 175456, time: 0.574, data: 0.000) G_L1: 16.386 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 13.664 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 23, iters: 177456, time: 0.554, data: 0.000) G_L1: 15.572 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 12.835 G_Regularizer: 0.000 validation_error: 20.771 +(epoch: 23, iters: 179456, time: 0.558, data: 0.000) G_L1: 16.214 G_L1_ABSOLUTE: 2.304 G_L1_RELATIVE: 13.910 G_Regularizer: 0.000 validation_error: 21.309 +(epoch: 23, iters: 181456, time: 0.557, data: 0.000) G_L1: 12.737 G_L1_ABSOLUTE: 2.292 G_L1_RELATIVE: 10.446 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 23, iters: 183456, time: 0.559, data: 0.000) G_L1: 17.185 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 14.393 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 23, iters: 185456, time: 0.561, data: 0.000) G_L1: 14.596 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 12.149 G_Regularizer: 0.000 validation_error: 21.241 +(epoch: 23, iters: 187456, time: 0.561, data: 0.000) G_L1: 12.859 G_L1_ABSOLUTE: 2.998 G_L1_RELATIVE: 9.861 G_Regularizer: 0.000 validation_error: 20.328 +(epoch: 23, iters: 189456, time: 0.563, data: 0.000) G_L1: 17.788 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 15.258 G_Regularizer: 0.000 validation_error: 20.757 +(epoch: 23, iters: 191456, time: 0.558, data: 0.000) G_L1: 12.839 G_L1_ABSOLUTE: 2.311 G_L1_RELATIVE: 10.529 G_Regularizer: 0.000 validation_error: 20.343 +(epoch: 23, iters: 193456, time: 0.574, data: 0.000) G_L1: 17.296 G_L1_ABSOLUTE: 3.072 G_L1_RELATIVE: 14.224 G_Regularizer: 0.000 validation_error: 21.222 +(epoch: 23, iters: 195456, time: 0.561, data: 0.000) G_L1: 13.738 G_L1_ABSOLUTE: 2.210 G_L1_RELATIVE: 11.528 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 23, iters: 197456, time: 0.561, data: 0.000) G_L1: 13.004 G_L1_ABSOLUTE: 2.232 G_L1_RELATIVE: 10.772 G_Regularizer: 0.000 validation_error: 21.131 +(epoch: 23, iters: 199456, time: 0.559, data: 0.000) G_L1: 13.265 G_L1_ABSOLUTE: 2.392 G_L1_RELATIVE: 10.872 G_Regularizer: 0.000 validation_error: 20.959 +(epoch: 23, iters: 201456, time: 0.561, data: 0.000) G_L1: 13.755 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 11.235 G_Regularizer: 0.000 validation_error: 20.346 +(epoch: 23, iters: 203456, time: 0.577, data: 0.000) G_L1: 15.805 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 13.458 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 23, iters: 205456, time: 0.551, data: 0.000) G_L1: 14.326 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 11.848 G_Regularizer: 0.000 validation_error: 19.950 +(epoch: 23, iters: 207456, time: 0.557, data: 0.000) G_L1: 20.621 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 17.891 G_Regularizer: 0.000 validation_error: 20.447 +(epoch: 23, iters: 209456, time: 0.570, data: 0.000) G_L1: 18.799 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 16.120 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 23, iters: 211456, time: 0.557, data: 0.000) G_L1: 16.145 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 13.114 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 23, iters: 213456, time: 0.560, data: 0.000) G_L1: 14.570 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 11.748 G_Regularizer: 0.000 validation_error: 20.910 +(epoch: 23, iters: 215456, time: 0.565, data: 0.000) G_L1: 15.458 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 13.050 G_Regularizer: 0.000 validation_error: 20.251 +(epoch: 23, iters: 217456, time: 0.562, data: 0.000) G_L1: 14.459 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 11.897 G_Regularizer: 0.000 validation_error: 20.465 +(epoch: 23, iters: 219456, time: 0.559, data: 0.000) G_L1: 14.497 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 11.614 G_Regularizer: 0.000 validation_error: 21.918 +(epoch: 23, iters: 221456, time: 0.561, data: 0.000) G_L1: 13.540 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 10.788 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 23, iters: 223456, time: 0.577, data: 0.000) G_L1: 11.678 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 9.215 G_Regularizer: 0.000 validation_error: 21.082 +(epoch: 23, iters: 225456, time: 0.556, data: 0.000) G_L1: 16.451 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 13.925 G_Regularizer: 0.000 validation_error: 20.154 +(epoch: 23, iters: 227456, time: 0.574, data: 0.000) G_L1: 15.848 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 13.063 G_Regularizer: 0.000 validation_error: 20.640 +(epoch: 23, iters: 229456, time: 0.559, data: 0.000) G_L1: 16.631 G_L1_ABSOLUTE: 3.117 G_L1_RELATIVE: 13.514 G_Regularizer: 0.000 validation_error: 20.021 +(epoch: 23, iters: 231456, time: 0.566, data: 0.000) G_L1: 15.471 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 12.565 G_Regularizer: 0.000 validation_error: 20.376 +(epoch: 23, iters: 233456, time: 0.563, data: 0.000) G_L1: 13.764 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 11.394 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 23, iters: 235456, time: 0.556, data: 0.000) G_L1: 16.741 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 13.986 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 23, iters: 237456, time: 0.557, data: 0.000) G_L1: 13.736 G_L1_ABSOLUTE: 2.152 G_L1_RELATIVE: 11.584 G_Regularizer: 0.000 validation_error: 20.593 +(epoch: 23, iters: 239456, time: 0.564, data: 0.000) G_L1: 16.643 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 14.182 G_Regularizer: 0.000 validation_error: 20.247 +(epoch: 23, iters: 241456, time: 0.556, data: 0.000) G_L1: 14.922 G_L1_ABSOLUTE: 2.171 G_L1_RELATIVE: 12.751 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 23, iters: 243456, time: 0.567, data: 0.000) G_L1: 14.584 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 11.823 G_Regularizer: 0.000 validation_error: 20.128 +(epoch: 23, iters: 245456, time: 0.556, data: 0.000) G_L1: 12.958 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 10.478 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 23, iters: 247456, time: 0.562, data: 0.000) G_L1: 16.286 G_L1_ABSOLUTE: 2.927 G_L1_RELATIVE: 13.359 G_Regularizer: 0.000 validation_error: 20.105 +(epoch: 23, iters: 249456, time: 0.548, data: 0.000) G_L1: 16.571 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 13.861 G_Regularizer: 0.000 validation_error: 20.146 +(epoch: 23, iters: 251456, time: 0.554, data: 0.000) G_L1: 15.254 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 12.602 G_Regularizer: 0.000 validation_error: 20.185 +(epoch: 23, iters: 253456, time: 0.563, data: 0.000) G_L1: 14.676 G_L1_ABSOLUTE: 2.965 G_L1_RELATIVE: 11.711 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 23, iters: 255456, time: 0.565, data: 0.000) G_L1: 16.715 G_L1_ABSOLUTE: 2.566 G_L1_RELATIVE: 14.149 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 23, iters: 257456, time: 0.564, data: 0.000) G_L1: 13.777 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 11.515 G_Regularizer: 0.000 validation_error: 20.578 +(epoch: 23, iters: 259456, time: 0.551, data: 0.000) G_L1: 15.047 G_L1_ABSOLUTE: 2.455 G_L1_RELATIVE: 12.592 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 23, iters: 261456, time: 0.577, data: 0.000) G_L1: 15.325 G_L1_ABSOLUTE: 2.617 G_L1_RELATIVE: 12.709 G_Regularizer: 0.000 validation_error: 20.329 +(epoch: 23, iters: 263456, time: 0.559, data: 0.000) G_L1: 12.947 G_L1_ABSOLUTE: 2.281 G_L1_RELATIVE: 10.666 G_Regularizer: 0.000 validation_error: 20.885 +(epoch: 23, iters: 265456, time: 0.563, data: 0.000) G_L1: 15.869 G_L1_ABSOLUTE: 2.801 G_L1_RELATIVE: 13.068 G_Regularizer: 0.000 validation_error: 20.419 +(epoch: 23, iters: 267456, time: 0.554, data: 0.000) G_L1: 20.493 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 17.929 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 23, iters: 269456, time: 0.562, data: 0.000) G_L1: 12.860 G_L1_ABSOLUTE: 2.218 G_L1_RELATIVE: 10.642 G_Regularizer: 0.000 validation_error: 20.666 +(epoch: 23, iters: 271456, time: 0.557, data: 0.000) G_L1: 16.405 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 13.729 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 23, iters: 273456, time: 0.564, data: 0.000) G_L1: 14.032 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 11.398 G_Regularizer: 0.000 validation_error: 21.418 +(epoch: 23, iters: 275456, time: 0.553, data: 0.000) G_L1: 13.872 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 11.240 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 23, iters: 277456, time: 0.572, data: 0.000) G_L1: 16.094 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 13.735 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 23, iters: 279456, time: 0.555, data: 0.000) G_L1: 16.458 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 14.059 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 23, iters: 281456, time: 0.556, data: 0.000) G_L1: 15.113 G_L1_ABSOLUTE: 2.594 G_L1_RELATIVE: 12.519 G_Regularizer: 0.000 validation_error: 20.267 +(epoch: 23, iters: 283456, time: 0.562, data: 0.000) G_L1: 16.488 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 13.908 G_Regularizer: 0.000 validation_error: 20.325 +(epoch: 23, iters: 285456, time: 0.559, data: 0.000) G_L1: 13.824 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 11.314 G_Regularizer: 0.000 validation_error: 20.373 +(epoch: 23, iters: 287456, time: 0.560, data: 0.000) G_L1: 13.868 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 11.246 G_Regularizer: 0.000 validation_error: 20.758 +(epoch: 23, iters: 289456, time: 0.558, data: 0.000) G_L1: 14.106 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 11.594 G_Regularizer: 0.000 validation_error: 21.119 +(epoch: 23, iters: 291456, time: 0.558, data: 0.000) G_L1: 15.317 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 13.058 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 23, iters: 293456, time: 0.557, data: 0.000) G_L1: 14.033 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 11.657 G_Regularizer: 0.000 validation_error: 21.267 +(epoch: 23, iters: 295456, time: 0.581, data: 0.000) G_L1: 13.531 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 11.013 G_Regularizer: 0.000 validation_error: 20.395 +(epoch: 23, iters: 297456, time: 0.558, data: 0.000) G_L1: 14.668 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 12.047 G_Regularizer: 0.000 validation_error: 20.163 +(epoch: 23, iters: 299456, time: 0.562, data: 0.000) G_L1: 18.088 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 15.343 G_Regularizer: 0.000 validation_error: 20.351 +(epoch: 23, iters: 301456, time: 0.561, data: 0.000) G_L1: 14.324 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 11.879 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 24, iters: 704, time: 0.559, data: 0.000) G_L1: 16.026 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 13.300 G_Regularizer: 0.000 validation_error: 21.425 +(epoch: 24, iters: 2704, time: 0.566, data: 0.000) G_L1: 15.383 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 12.848 G_Regularizer: 0.000 validation_error: 20.120 +(epoch: 24, iters: 4704, time: 0.564, data: 0.000) G_L1: 14.147 G_L1_ABSOLUTE: 2.973 G_L1_RELATIVE: 11.173 G_Regularizer: 0.000 validation_error: 21.167 +(epoch: 24, iters: 6704, time: 0.560, data: 0.000) G_L1: 15.940 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 13.350 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 24, iters: 8704, time: 0.574, data: 0.001) G_L1: 12.715 G_L1_ABSOLUTE: 2.611 G_L1_RELATIVE: 10.104 G_Regularizer: 0.000 validation_error: 21.032 +(epoch: 24, iters: 10704, time: 0.561, data: 0.000) G_L1: 15.848 G_L1_ABSOLUTE: 2.455 G_L1_RELATIVE: 13.393 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 24, iters: 12704, time: 0.558, data: 0.000) G_L1: 15.634 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 13.111 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 24, iters: 14704, time: 0.557, data: 0.000) G_L1: 16.633 G_L1_ABSOLUTE: 2.690 G_L1_RELATIVE: 13.942 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 24, iters: 16704, time: 0.559, data: 0.000) G_L1: 14.336 G_L1_ABSOLUTE: 2.237 G_L1_RELATIVE: 12.099 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 24, iters: 18704, time: 0.555, data: 0.000) G_L1: 19.221 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 16.386 G_Regularizer: 0.000 validation_error: 20.361 +(epoch: 24, iters: 20704, time: 0.555, data: 0.000) G_L1: 14.520 G_L1_ABSOLUTE: 2.267 G_L1_RELATIVE: 12.254 G_Regularizer: 0.000 validation_error: 21.255 +(epoch: 24, iters: 22704, time: 0.565, data: 0.000) G_L1: 12.642 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 9.760 G_Regularizer: 0.000 validation_error: 20.331 +(epoch: 24, iters: 24704, time: 0.555, data: 0.000) G_L1: 13.993 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 11.543 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 24, iters: 26704, time: 0.584, data: 0.000) G_L1: 15.209 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 12.614 G_Regularizer: 0.000 validation_error: 21.081 +(epoch: 24, iters: 28704, time: 0.556, data: 0.000) G_L1: 12.063 G_L1_ABSOLUTE: 3.180 G_L1_RELATIVE: 8.883 G_Regularizer: 0.000 validation_error: 20.429 +(epoch: 24, iters: 30704, time: 0.555, data: 0.000) G_L1: 14.699 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 12.113 G_Regularizer: 0.000 validation_error: 20.719 +(epoch: 24, iters: 32704, time: 0.554, data: 0.000) G_L1: 15.142 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 12.757 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 24, iters: 34704, time: 0.556, data: 0.000) G_L1: 13.133 G_L1_ABSOLUTE: 2.471 G_L1_RELATIVE: 10.662 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 24, iters: 36704, time: 0.551, data: 0.000) G_L1: 13.107 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 10.734 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 24, iters: 38704, time: 0.562, data: 0.000) G_L1: 15.375 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 12.878 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 24, iters: 40704, time: 0.559, data: 0.000) G_L1: 14.742 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 12.142 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 24, iters: 42704, time: 0.577, data: 0.000) G_L1: 13.693 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 11.081 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 24, iters: 44704, time: 0.555, data: 0.000) G_L1: 14.181 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 11.627 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 24, iters: 46704, time: 0.567, data: 0.000) G_L1: 15.434 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 12.895 G_Regularizer: 0.000 validation_error: 20.114 +(epoch: 24, iters: 48704, time: 0.559, data: 0.000) G_L1: 13.183 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 10.577 G_Regularizer: 0.000 validation_error: 20.422 +(epoch: 24, iters: 50704, time: 0.561, data: 0.002) G_L1: 14.344 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 24, iters: 52704, time: 0.561, data: 0.000) G_L1: 13.094 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 10.820 G_Regularizer: 0.000 validation_error: 20.629 +(epoch: 24, iters: 54704, time: 0.561, data: 0.000) G_L1: 12.628 G_L1_ABSOLUTE: 1.923 G_L1_RELATIVE: 10.705 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 24, iters: 56704, time: 0.554, data: 0.000) G_L1: 15.932 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 13.355 G_Regularizer: 0.000 validation_error: 20.712 +(epoch: 24, iters: 58704, time: 0.558, data: 0.000) G_L1: 15.586 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 13.050 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 24, iters: 60704, time: 0.581, data: 0.000) G_L1: 14.326 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 11.784 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 24, iters: 62704, time: 0.564, data: 0.000) G_L1: 12.218 G_L1_ABSOLUTE: 2.064 G_L1_RELATIVE: 10.154 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 24, iters: 64704, time: 0.557, data: 0.000) G_L1: 13.757 G_L1_ABSOLUTE: 2.603 G_L1_RELATIVE: 11.154 G_Regularizer: 0.000 validation_error: 20.413 +(epoch: 24, iters: 66704, time: 0.562, data: 0.000) G_L1: 15.032 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 12.566 G_Regularizer: 0.000 validation_error: 20.705 +(epoch: 24, iters: 68704, time: 0.567, data: 0.000) G_L1: 13.397 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 10.878 G_Regularizer: 0.000 validation_error: 20.308 +(epoch: 24, iters: 70704, time: 0.556, data: 0.000) G_L1: 15.283 G_L1_ABSOLUTE: 3.241 G_L1_RELATIVE: 12.042 G_Regularizer: 0.000 validation_error: 20.116 +(epoch: 24, iters: 72704, time: 0.554, data: 0.000) G_L1: 12.996 G_L1_ABSOLUTE: 2.060 G_L1_RELATIVE: 10.935 G_Regularizer: 0.000 validation_error: 20.231 +(epoch: 24, iters: 74704, time: 0.558, data: 0.000) G_L1: 15.953 G_L1_ABSOLUTE: 3.092 G_L1_RELATIVE: 12.861 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 24, iters: 76704, time: 0.563, data: 0.000) G_L1: 17.268 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 14.755 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 24, iters: 78704, time: 0.559, data: 0.000) G_L1: 19.120 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 16.456 G_Regularizer: 0.000 validation_error: 20.496 +(epoch: 24, iters: 80704, time: 0.559, data: 0.000) G_L1: 14.164 G_L1_ABSOLUTE: 2.212 G_L1_RELATIVE: 11.953 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 24, iters: 82704, time: 0.553, data: 0.000) G_L1: 16.816 G_L1_ABSOLUTE: 2.918 G_L1_RELATIVE: 13.898 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 24, iters: 84704, time: 0.572, data: 0.000) G_L1: 15.151 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 12.246 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 24, iters: 86704, time: 0.557, data: 0.000) G_L1: 13.900 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 11.347 G_Regularizer: 0.000 validation_error: 21.016 +(epoch: 24, iters: 88704, time: 0.566, data: 0.000) G_L1: 13.983 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 11.480 G_Regularizer: 0.000 validation_error: 20.134 +(epoch: 24, iters: 90704, time: 0.549, data: 0.000) G_L1: 14.020 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 11.658 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 24, iters: 92704, time: 0.560, data: 0.000) G_L1: 13.211 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 10.793 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 24, iters: 94704, time: 0.574, data: 0.000) G_L1: 17.891 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 15.149 G_Regularizer: 0.000 validation_error: 20.398 +(epoch: 24, iters: 96704, time: 0.572, data: 0.000) G_L1: 13.850 G_L1_ABSOLUTE: 2.585 G_L1_RELATIVE: 11.265 G_Regularizer: 0.000 validation_error: 20.770 +(epoch: 24, iters: 98704, time: 0.558, data: 0.000) G_L1: 16.148 G_L1_ABSOLUTE: 3.617 G_L1_RELATIVE: 12.531 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 24, iters: 100704, time: 0.559, data: 0.000) G_L1: 17.273 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 14.810 G_Regularizer: 0.000 validation_error: 19.873 +(epoch: 24, iters: 102704, time: 0.570, data: 0.000) G_L1: 12.051 G_L1_ABSOLUTE: 2.248 G_L1_RELATIVE: 9.803 G_Regularizer: 0.000 validation_error: 21.108 +(epoch: 24, iters: 104704, time: 0.562, data: 0.000) G_L1: 14.975 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 12.343 G_Regularizer: 0.000 validation_error: 20.948 +(epoch: 24, iters: 106704, time: 0.563, data: 0.000) G_L1: 15.431 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 12.998 G_Regularizer: 0.000 validation_error: 21.456 +(epoch: 24, iters: 108704, time: 0.551, data: 0.000) G_L1: 15.512 G_L1_ABSOLUTE: 1.910 G_L1_RELATIVE: 13.602 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 24, iters: 110704, time: 0.571, data: 0.000) G_L1: 16.515 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 13.834 G_Regularizer: 0.000 validation_error: 20.112 +(epoch: 24, iters: 112704, time: 0.563, data: 0.000) G_L1: 14.358 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 11.807 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 24, iters: 114704, time: 0.560, data: 0.000) G_L1: 13.907 G_L1_ABSOLUTE: 2.308 G_L1_RELATIVE: 11.599 G_Regularizer: 0.000 validation_error: 20.020 +(epoch: 24, iters: 116704, time: 0.563, data: 0.000) G_L1: 19.225 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 16.711 G_Regularizer: 0.000 validation_error: 20.375 +(epoch: 24, iters: 118704, time: 0.569, data: 0.000) G_L1: 15.135 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 12.452 G_Regularizer: 0.000 validation_error: 21.361 +(epoch: 24, iters: 120704, time: 0.561, data: 0.000) G_L1: 15.855 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 13.118 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 24, iters: 122704, time: 0.571, data: 0.000) G_L1: 16.073 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 13.589 G_Regularizer: 0.000 validation_error: 20.551 +(epoch: 24, iters: 124704, time: 0.554, data: 0.000) G_L1: 12.422 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 9.913 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 24, iters: 126704, time: 0.566, data: 0.000) G_L1: 12.164 G_L1_ABSOLUTE: 2.316 G_L1_RELATIVE: 9.848 G_Regularizer: 0.000 validation_error: 20.869 +(epoch: 24, iters: 128704, time: 0.576, data: 0.000) G_L1: 12.460 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 9.992 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 24, iters: 130704, time: 0.573, data: 0.000) G_L1: 14.777 G_L1_ABSOLUTE: 2.229 G_L1_RELATIVE: 12.548 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 24, iters: 132704, time: 0.563, data: 0.000) G_L1: 15.096 G_L1_ABSOLUTE: 2.949 G_L1_RELATIVE: 12.147 G_Regularizer: 0.000 validation_error: 20.202 +(epoch: 24, iters: 134704, time: 0.563, data: 0.000) G_L1: 15.063 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 12.660 G_Regularizer: 0.000 validation_error: 20.100 +(epoch: 24, iters: 136704, time: 0.558, data: 0.000) G_L1: 13.086 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 10.594 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 24, iters: 138704, time: 0.570, data: 0.000) G_L1: 14.226 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 11.418 G_Regularizer: 0.000 validation_error: 20.070 +(epoch: 24, iters: 140704, time: 0.561, data: 0.000) G_L1: 13.786 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 11.163 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 24, iters: 142704, time: 0.566, data: 0.000) G_L1: 18.055 G_L1_ABSOLUTE: 3.263 G_L1_RELATIVE: 14.792 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 24, iters: 144704, time: 0.576, data: 0.000) G_L1: 14.387 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 11.799 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 24, iters: 146704, time: 0.557, data: 0.000) G_L1: 15.297 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 13.042 G_Regularizer: 0.000 validation_error: 20.861 +(epoch: 24, iters: 148704, time: 0.555, data: 0.000) G_L1: 16.317 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 13.766 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 24, iters: 150704, time: 0.573, data: 0.000) G_L1: 14.623 G_L1_ABSOLUTE: 3.099 G_L1_RELATIVE: 11.524 G_Regularizer: 0.000 validation_error: 20.980 +(epoch: 24, iters: 152704, time: 0.557, data: 0.000) G_L1: 13.387 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 10.985 G_Regularizer: 0.000 validation_error: 20.129 +(epoch: 24, iters: 154704, time: 0.563, data: 0.000) G_L1: 15.086 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 12.609 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 24, iters: 156704, time: 0.555, data: 0.000) G_L1: 15.020 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 12.446 G_Regularizer: 0.000 validation_error: 20.750 +(epoch: 24, iters: 158704, time: 0.558, data: 0.001) G_L1: 15.231 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 12.655 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 24, iters: 160704, time: 0.562, data: 0.000) G_L1: 12.787 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 10.335 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 24, iters: 162704, time: 0.565, data: 0.000) G_L1: 12.796 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 9.995 G_Regularizer: 0.000 validation_error: 20.340 +(epoch: 24, iters: 164704, time: 0.564, data: 0.000) G_L1: 14.526 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 12.035 G_Regularizer: 0.000 validation_error: 20.397 +(epoch: 24, iters: 166704, time: 0.565, data: 0.000) G_L1: 15.657 G_L1_ABSOLUTE: 2.206 G_L1_RELATIVE: 13.452 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 24, iters: 168704, time: 0.563, data: 0.000) G_L1: 15.639 G_L1_ABSOLUTE: 2.897 G_L1_RELATIVE: 12.742 G_Regularizer: 0.000 validation_error: 20.230 +(epoch: 24, iters: 170704, time: 0.562, data: 0.000) G_L1: 14.529 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 12.037 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 24, iters: 172704, time: 0.567, data: 0.000) G_L1: 15.020 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 12.545 G_Regularizer: 0.000 validation_error: 20.921 +(epoch: 24, iters: 174704, time: 0.563, data: 0.000) G_L1: 16.078 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 13.505 G_Regularizer: 0.000 validation_error: 21.276 +(epoch: 24, iters: 176704, time: 0.561, data: 0.000) G_L1: 12.635 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 10.221 G_Regularizer: 0.000 validation_error: 21.314 +(epoch: 24, iters: 178704, time: 0.573, data: 0.000) G_L1: 14.076 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 11.657 G_Regularizer: 0.000 validation_error: 20.482 +(epoch: 24, iters: 180704, time: 0.564, data: 0.000) G_L1: 16.631 G_L1_ABSOLUTE: 2.764 G_L1_RELATIVE: 13.866 G_Regularizer: 0.000 validation_error: 21.043 +(epoch: 24, iters: 182704, time: 0.555, data: 0.000) G_L1: 17.755 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 15.366 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 24, iters: 184704, time: 0.557, data: 0.000) G_L1: 15.627 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 12.904 G_Regularizer: 0.000 validation_error: 21.361 +(epoch: 24, iters: 186704, time: 0.563, data: 0.001) G_L1: 15.070 G_L1_ABSOLUTE: 2.316 G_L1_RELATIVE: 12.754 G_Regularizer: 0.000 validation_error: 20.093 +(epoch: 24, iters: 188704, time: 0.565, data: 0.000) G_L1: 14.186 G_L1_ABSOLUTE: 3.432 G_L1_RELATIVE: 10.754 G_Regularizer: 0.000 validation_error: 20.337 +(epoch: 24, iters: 190704, time: 0.563, data: 0.000) G_L1: 13.941 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 11.579 G_Regularizer: 0.000 validation_error: 21.670 +(epoch: 24, iters: 192704, time: 0.560, data: 0.000) G_L1: 13.593 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 11.221 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 24, iters: 194704, time: 0.565, data: 0.000) G_L1: 14.358 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 11.605 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 24, iters: 196704, time: 0.559, data: 0.000) G_L1: 13.918 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 11.054 G_Regularizer: 0.000 validation_error: 20.202 +(epoch: 24, iters: 198704, time: 0.551, data: 0.000) G_L1: 14.000 G_L1_ABSOLUTE: 3.021 G_L1_RELATIVE: 10.978 G_Regularizer: 0.000 validation_error: 20.332 +(epoch: 24, iters: 200704, time: 0.570, data: 0.000) G_L1: 13.345 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 10.799 G_Regularizer: 0.000 validation_error: 20.839 +(epoch: 24, iters: 202704, time: 0.559, data: 0.000) G_L1: 15.501 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 13.000 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 24, iters: 204704, time: 0.561, data: 0.000) G_L1: 13.382 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 10.963 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 24, iters: 206704, time: 0.563, data: 0.000) G_L1: 13.188 G_L1_ABSOLUTE: 2.149 G_L1_RELATIVE: 11.040 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 24, iters: 208704, time: 0.561, data: 0.000) G_L1: 18.588 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 15.767 G_Regularizer: 0.000 validation_error: 20.485 +(epoch: 24, iters: 210704, time: 0.561, data: 0.000) G_L1: 12.726 G_L1_ABSOLUTE: 2.907 G_L1_RELATIVE: 9.820 G_Regularizer: 0.000 validation_error: 20.207 +(epoch: 24, iters: 212704, time: 0.580, data: 0.000) G_L1: 14.909 G_L1_ABSOLUTE: 2.223 G_L1_RELATIVE: 12.687 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 24, iters: 214704, time: 0.559, data: 0.000) G_L1: 14.428 G_L1_ABSOLUTE: 2.309 G_L1_RELATIVE: 12.119 G_Regularizer: 0.000 validation_error: 21.191 +(epoch: 24, iters: 216704, time: 0.570, data: 0.000) G_L1: 14.761 G_L1_ABSOLUTE: 2.268 G_L1_RELATIVE: 12.493 G_Regularizer: 0.000 validation_error: 20.453 +(epoch: 24, iters: 218704, time: 0.562, data: 0.000) G_L1: 16.550 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 13.940 G_Regularizer: 0.000 validation_error: 21.610 +(epoch: 24, iters: 220704, time: 0.559, data: 0.000) G_L1: 15.994 G_L1_ABSOLUTE: 2.891 G_L1_RELATIVE: 13.103 G_Regularizer: 0.000 validation_error: 20.462 +(epoch: 24, iters: 222704, time: 0.570, data: 0.000) G_L1: 14.513 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 24, iters: 224704, time: 0.558, data: 0.000) G_L1: 17.063 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 14.612 G_Regularizer: 0.000 validation_error: 20.523 +(epoch: 24, iters: 226704, time: 0.561, data: 0.001) G_L1: 12.676 G_L1_ABSOLUTE: 2.328 G_L1_RELATIVE: 10.348 G_Regularizer: 0.000 validation_error: 21.187 +(epoch: 24, iters: 228704, time: 0.577, data: 0.000) G_L1: 13.067 G_L1_ABSOLUTE: 2.197 G_L1_RELATIVE: 10.870 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 24, iters: 230704, time: 0.565, data: 0.000) G_L1: 16.636 G_L1_ABSOLUTE: 2.471 G_L1_RELATIVE: 14.166 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 24, iters: 232704, time: 0.575, data: 0.000) G_L1: 16.097 G_L1_ABSOLUTE: 3.122 G_L1_RELATIVE: 12.975 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 24, iters: 234704, time: 0.562, data: 0.000) G_L1: 17.661 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 14.998 G_Regularizer: 0.000 validation_error: 20.313 +(epoch: 24, iters: 236704, time: 0.559, data: 0.000) G_L1: 11.944 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 9.746 G_Regularizer: 0.000 validation_error: 20.937 +(epoch: 24, iters: 238704, time: 0.558, data: 0.000) G_L1: 16.410 G_L1_ABSOLUTE: 2.719 G_L1_RELATIVE: 13.691 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 24, iters: 240704, time: 0.558, data: 0.000) G_L1: 13.702 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 11.224 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 24, iters: 242704, time: 0.563, data: 0.000) G_L1: 17.343 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 14.769 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 24, iters: 244704, time: 0.565, data: 0.000) G_L1: 12.764 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 10.133 G_Regularizer: 0.000 validation_error: 20.303 +(epoch: 24, iters: 246704, time: 0.576, data: 0.000) G_L1: 14.188 G_L1_ABSOLUTE: 2.785 G_L1_RELATIVE: 11.403 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 24, iters: 248704, time: 0.575, data: 0.000) G_L1: 14.787 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 12.129 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 24, iters: 250704, time: 0.553, data: 0.000) G_L1: 14.373 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 12.157 G_Regularizer: 0.000 validation_error: 21.296 +(epoch: 24, iters: 252704, time: 0.544, data: 0.000) G_L1: 15.521 G_L1_ABSOLUTE: 2.488 G_L1_RELATIVE: 13.033 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 24, iters: 254704, time: 0.543, data: 0.000) G_L1: 14.207 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 11.715 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 24, iters: 256704, time: 0.549, data: 0.000) G_L1: 16.004 G_L1_ABSOLUTE: 2.763 G_L1_RELATIVE: 13.240 G_Regularizer: 0.000 validation_error: 21.286 +(epoch: 24, iters: 258704, time: 0.548, data: 0.000) G_L1: 16.068 G_L1_ABSOLUTE: 2.847 G_L1_RELATIVE: 13.222 G_Regularizer: 0.000 validation_error: 20.416 +(epoch: 24, iters: 260704, time: 0.539, data: 0.000) G_L1: 16.207 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 13.704 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 24, iters: 262704, time: 0.543, data: 0.000) G_L1: 15.212 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 12.256 G_Regularizer: 0.000 validation_error: 20.585 +(epoch: 24, iters: 264704, time: 0.547, data: 0.000) G_L1: 14.536 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 11.965 G_Regularizer: 0.000 validation_error: 20.461 +(epoch: 24, iters: 266704, time: 0.548, data: 0.000) G_L1: 12.771 G_L1_ABSOLUTE: 2.011 G_L1_RELATIVE: 10.760 G_Regularizer: 0.000 validation_error: 20.092 +(epoch: 24, iters: 268704, time: 0.549, data: 0.000) G_L1: 15.456 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 12.632 G_Regularizer: 0.000 validation_error: 20.382 +(epoch: 24, iters: 270704, time: 0.545, data: 0.000) G_L1: 12.213 G_L1_ABSOLUTE: 2.030 G_L1_RELATIVE: 10.184 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 24, iters: 272704, time: 0.536, data: 0.000) G_L1: 13.201 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 10.487 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 24, iters: 274704, time: 0.548, data: 0.000) G_L1: 13.934 G_L1_ABSOLUTE: 2.864 G_L1_RELATIVE: 11.070 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 24, iters: 276704, time: 0.543, data: 0.000) G_L1: 13.380 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 10.631 G_Regularizer: 0.000 validation_error: 20.089 +(epoch: 24, iters: 278704, time: 0.550, data: 0.000) G_L1: 14.844 G_L1_ABSOLUTE: 2.782 G_L1_RELATIVE: 12.062 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 24, iters: 280704, time: 0.552, data: 0.000) G_L1: 15.046 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 12.683 G_Regularizer: 0.000 validation_error: 20.708 +(epoch: 24, iters: 282704, time: 0.548, data: 0.001) G_L1: 12.310 G_L1_ABSOLUTE: 2.030 G_L1_RELATIVE: 10.279 G_Regularizer: 0.000 validation_error: 20.618 +(epoch: 24, iters: 284704, time: 0.555, data: 0.000) G_L1: 15.632 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 13.292 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 24, iters: 286704, time: 0.546, data: 0.000) G_L1: 14.568 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 12.102 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 24, iters: 288704, time: 0.547, data: 0.000) G_L1: 13.261 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 10.886 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 24, iters: 290704, time: 0.548, data: 0.000) G_L1: 12.579 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 10.043 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 24, iters: 292704, time: 0.549, data: 0.000) G_L1: 16.031 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 13.034 G_Regularizer: 0.000 validation_error: 21.705 +(epoch: 24, iters: 294704, time: 0.546, data: 0.000) G_L1: 13.506 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 10.966 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 24, iters: 296704, time: 0.545, data: 0.000) G_L1: 12.504 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 10.069 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 24, iters: 298704, time: 0.547, data: 0.001) G_L1: 14.962 G_L1_ABSOLUTE: 2.579 G_L1_RELATIVE: 12.383 G_Regularizer: 0.000 validation_error: 20.574 +(epoch: 24, iters: 300704, time: 0.550, data: 0.000) G_L1: 14.775 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 12.361 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 24, iters: 302704, time: 0.547, data: 0.000) G_L1: 13.865 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.197 G_Regularizer: 0.000 validation_error: 20.438 +(epoch: 25, iters: 1952, time: 0.546, data: 0.000) G_L1: 16.715 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 14.064 G_Regularizer: 0.000 validation_error: 20.354 +(epoch: 25, iters: 3952, time: 0.549, data: 0.000) G_L1: 18.925 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 16.129 G_Regularizer: 0.000 validation_error: 20.198 +(epoch: 25, iters: 5952, time: 0.550, data: 0.000) G_L1: 14.260 G_L1_ABSOLUTE: 3.075 G_L1_RELATIVE: 11.185 G_Regularizer: 0.000 validation_error: 20.948 +(epoch: 25, iters: 7952, time: 0.546, data: 0.001) G_L1: 13.572 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 11.169 G_Regularizer: 0.000 validation_error: 20.370 +(epoch: 25, iters: 9952, time: 0.545, data: 0.000) G_L1: 17.079 G_L1_ABSOLUTE: 3.210 G_L1_RELATIVE: 13.869 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 25, iters: 11952, time: 0.550, data: 0.000) G_L1: 14.941 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 12.379 G_Regularizer: 0.000 validation_error: 20.099 +(epoch: 25, iters: 13952, time: 0.547, data: 0.000) G_L1: 12.610 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 10.104 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 25, iters: 15952, time: 0.549, data: 0.000) G_L1: 23.800 G_L1_ABSOLUTE: 2.837 G_L1_RELATIVE: 20.963 G_Regularizer: 0.000 validation_error: 20.311 +(epoch: 25, iters: 17952, time: 0.552, data: 0.000) G_L1: 13.479 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 10.939 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 25, iters: 19952, time: 0.550, data: 0.000) G_L1: 13.971 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 11.019 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 25, iters: 21952, time: 0.552, data: 0.000) G_L1: 15.496 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 12.771 G_Regularizer: 0.000 validation_error: 19.754 +(epoch: 25, iters: 23952, time: 0.544, data: 0.000) G_L1: 14.165 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 11.358 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 25, iters: 25952, time: 0.548, data: 0.000) G_L1: 16.510 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 13.964 G_Regularizer: 0.000 validation_error: 20.315 +(epoch: 25, iters: 27952, time: 0.544, data: 0.000) G_L1: 13.271 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 10.875 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 25, iters: 29952, time: 0.543, data: 0.000) G_L1: 15.635 G_L1_ABSOLUTE: 2.176 G_L1_RELATIVE: 13.459 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 25, iters: 31952, time: 0.541, data: 0.001) G_L1: 15.312 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 12.847 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 25, iters: 33952, time: 0.546, data: 0.000) G_L1: 11.945 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 9.435 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 25, iters: 35952, time: 0.551, data: 0.000) G_L1: 15.506 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 12.762 G_Regularizer: 0.000 validation_error: 20.555 +(epoch: 25, iters: 37952, time: 0.542, data: 0.000) G_L1: 12.867 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 10.525 G_Regularizer: 0.000 validation_error: 20.484 +(epoch: 25, iters: 39952, time: 0.547, data: 0.001) G_L1: 15.521 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 12.752 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 25, iters: 41952, time: 0.549, data: 0.000) G_L1: 13.969 G_L1_ABSOLUTE: 2.199 G_L1_RELATIVE: 11.770 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 25, iters: 43952, time: 0.543, data: 0.000) G_L1: 16.354 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 13.935 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 25, iters: 45952, time: 0.548, data: 0.000) G_L1: 16.821 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 14.143 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 25, iters: 47952, time: 0.548, data: 0.000) G_L1: 13.068 G_L1_ABSOLUTE: 2.047 G_L1_RELATIVE: 11.022 G_Regularizer: 0.000 validation_error: 20.238 +(epoch: 25, iters: 49952, time: 0.542, data: 0.000) G_L1: 14.608 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 11.884 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 25, iters: 51952, time: 0.549, data: 0.000) G_L1: 13.869 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 11.370 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 25, iters: 53952, time: 0.547, data: 0.000) G_L1: 11.907 G_L1_ABSOLUTE: 2.311 G_L1_RELATIVE: 9.596 G_Regularizer: 0.000 validation_error: 20.600 +(epoch: 25, iters: 55952, time: 0.550, data: 0.000) G_L1: 16.315 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 13.693 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 25, iters: 57952, time: 0.542, data: 0.000) G_L1: 12.949 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 10.624 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 25, iters: 59952, time: 0.542, data: 0.000) G_L1: 13.782 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 11.281 G_Regularizer: 0.000 validation_error: 20.241 +(epoch: 25, iters: 61952, time: 0.547, data: 0.000) G_L1: 15.093 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 12.636 G_Regularizer: 0.000 validation_error: 20.158 +(epoch: 25, iters: 63952, time: 0.547, data: 0.000) G_L1: 12.885 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 10.180 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 25, iters: 65952, time: 0.551, data: 0.000) G_L1: 16.276 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 13.759 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 25, iters: 67952, time: 0.555, data: 0.000) G_L1: 14.900 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 12.211 G_Regularizer: 0.000 validation_error: 20.139 +(epoch: 25, iters: 69952, time: 0.546, data: 0.000) G_L1: 19.067 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 16.610 G_Regularizer: 0.000 validation_error: 20.381 +(epoch: 25, iters: 71952, time: 0.545, data: 0.000) G_L1: 13.194 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 10.492 G_Regularizer: 0.000 validation_error: 21.068 +(epoch: 25, iters: 73952, time: 0.556, data: 0.000) G_L1: 13.975 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 11.265 G_Regularizer: 0.000 validation_error: 20.512 +(epoch: 25, iters: 75952, time: 0.548, data: 0.000) G_L1: 16.101 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 13.466 G_Regularizer: 0.000 validation_error: 20.379 +(epoch: 25, iters: 77952, time: 0.556, data: 0.000) G_L1: 14.420 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 12.098 G_Regularizer: 0.000 validation_error: 20.497 +(epoch: 25, iters: 79952, time: 0.552, data: 0.000) G_L1: 16.715 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 14.248 G_Regularizer: 0.000 validation_error: 20.640 +(epoch: 25, iters: 81952, time: 0.544, data: 0.000) G_L1: 17.056 G_L1_ABSOLUTE: 2.919 G_L1_RELATIVE: 14.137 G_Regularizer: 0.000 validation_error: 20.750 +(epoch: 25, iters: 83952, time: 0.546, data: 0.000) G_L1: 20.427 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 17.622 G_Regularizer: 0.000 validation_error: 20.378 +(epoch: 25, iters: 85952, time: 0.547, data: 0.000) G_L1: 14.589 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 11.998 G_Regularizer: 0.000 validation_error: 20.499 +(epoch: 25, iters: 87952, time: 0.546, data: 0.001) G_L1: 14.603 G_L1_ABSOLUTE: 2.438 G_L1_RELATIVE: 12.166 G_Regularizer: 0.000 validation_error: 20.041 +(epoch: 25, iters: 89952, time: 0.550, data: 0.000) G_L1: 13.411 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 11.126 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 25, iters: 91952, time: 0.554, data: 0.000) G_L1: 15.037 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 12.702 G_Regularizer: 0.000 validation_error: 19.758 +(epoch: 25, iters: 93952, time: 0.545, data: 0.000) G_L1: 15.665 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 13.006 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 25, iters: 95952, time: 0.546, data: 0.000) G_L1: 16.046 G_L1_ABSOLUTE: 2.849 G_L1_RELATIVE: 13.197 G_Regularizer: 0.000 validation_error: 21.242 +(epoch: 25, iters: 97952, time: 0.549, data: 0.000) G_L1: 12.299 G_L1_ABSOLUTE: 2.189 G_L1_RELATIVE: 10.110 G_Regularizer: 0.000 validation_error: 20.861 +(epoch: 25, iters: 99952, time: 0.546, data: 0.000) G_L1: 15.122 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 12.643 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 25, iters: 101952, time: 0.545, data: 0.000) G_L1: 12.570 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 9.990 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 25, iters: 103952, time: 0.546, data: 0.000) G_L1: 14.672 G_L1_ABSOLUTE: 2.829 G_L1_RELATIVE: 11.843 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 25, iters: 105952, time: 0.552, data: 0.000) G_L1: 13.057 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 10.659 G_Regularizer: 0.000 validation_error: 21.183 +(epoch: 25, iters: 107952, time: 0.544, data: 0.000) G_L1: 13.316 G_L1_ABSOLUTE: 1.974 G_L1_RELATIVE: 11.342 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 25, iters: 109952, time: 0.543, data: 0.000) G_L1: 14.259 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 11.945 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 25, iters: 111952, time: 0.541, data: 0.000) G_L1: 14.341 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 11.665 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 25, iters: 113952, time: 0.546, data: 0.000) G_L1: 14.547 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 12.080 G_Regularizer: 0.000 validation_error: 21.192 +(epoch: 25, iters: 115952, time: 0.543, data: 0.000) G_L1: 15.822 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 13.333 G_Regularizer: 0.000 validation_error: 20.771 +(epoch: 25, iters: 117952, time: 0.542, data: 0.000) G_L1: 12.656 G_L1_ABSOLUTE: 2.102 G_L1_RELATIVE: 10.554 G_Regularizer: 0.000 validation_error: 20.859 +(epoch: 25, iters: 119952, time: 0.538, data: 0.000) G_L1: 15.247 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 12.989 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 25, iters: 121952, time: 0.548, data: 0.000) G_L1: 12.808 G_L1_ABSOLUTE: 2.179 G_L1_RELATIVE: 10.629 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 25, iters: 123952, time: 0.545, data: 0.000) G_L1: 11.810 G_L1_ABSOLUTE: 2.166 G_L1_RELATIVE: 9.644 G_Regularizer: 0.000 validation_error: 19.865 +(epoch: 25, iters: 125952, time: 0.548, data: 0.000) G_L1: 16.592 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 14.179 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 25, iters: 127952, time: 0.551, data: 0.001) G_L1: 15.898 G_L1_ABSOLUTE: 3.202 G_L1_RELATIVE: 12.696 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 25, iters: 129952, time: 0.542, data: 0.000) G_L1: 14.299 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 11.463 G_Regularizer: 0.000 validation_error: 20.181 +(epoch: 25, iters: 131952, time: 0.537, data: 0.000) G_L1: 13.003 G_L1_ABSOLUTE: 2.134 G_L1_RELATIVE: 10.870 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 25, iters: 133952, time: 0.546, data: 0.000) G_L1: 12.881 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 10.491 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 25, iters: 135952, time: 0.544, data: 0.000) G_L1: 17.695 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 15.064 G_Regularizer: 0.000 validation_error: 20.486 +(epoch: 25, iters: 137952, time: 0.549, data: 0.000) G_L1: 15.002 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 12.457 G_Regularizer: 0.000 validation_error: 20.314 +(epoch: 25, iters: 139952, time: 0.545, data: 0.000) G_L1: 12.675 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 10.255 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 25, iters: 141952, time: 0.556, data: 0.000) G_L1: 17.942 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 15.141 G_Regularizer: 0.000 validation_error: 20.489 +(epoch: 25, iters: 143952, time: 0.540, data: 0.000) G_L1: 16.373 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 13.636 G_Regularizer: 0.000 validation_error: 20.541 +(epoch: 25, iters: 145952, time: 0.539, data: 0.000) G_L1: 11.442 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 8.932 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 25, iters: 147952, time: 0.547, data: 0.001) G_L1: 13.761 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 11.161 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 25, iters: 149952, time: 0.550, data: 0.000) G_L1: 14.855 G_L1_ABSOLUTE: 2.846 G_L1_RELATIVE: 12.009 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 25, iters: 151952, time: 0.551, data: 0.000) G_L1: 16.074 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 13.553 G_Regularizer: 0.000 validation_error: 20.418 +(epoch: 25, iters: 153952, time: 0.556, data: 0.000) G_L1: 14.377 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 11.802 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 25, iters: 155952, time: 0.546, data: 0.000) G_L1: 12.834 G_L1_ABSOLUTE: 2.059 G_L1_RELATIVE: 10.775 G_Regularizer: 0.000 validation_error: 20.009 +(epoch: 25, iters: 157952, time: 0.548, data: 0.000) G_L1: 15.957 G_L1_ABSOLUTE: 2.883 G_L1_RELATIVE: 13.074 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 25, iters: 159952, time: 0.546, data: 0.001) G_L1: 14.605 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 12.329 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 25, iters: 161952, time: 0.545, data: 0.000) G_L1: 15.289 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 12.353 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 25, iters: 163952, time: 0.550, data: 0.000) G_L1: 13.254 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 10.750 G_Regularizer: 0.000 validation_error: 20.414 +(epoch: 25, iters: 165952, time: 0.544, data: 0.000) G_L1: 15.712 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 12.982 G_Regularizer: 0.000 validation_error: 20.207 +(epoch: 25, iters: 167952, time: 0.545, data: 0.000) G_L1: 16.877 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 13.960 G_Regularizer: 0.000 validation_error: 21.030 +(epoch: 25, iters: 169952, time: 0.542, data: 0.000) G_L1: 15.824 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 13.295 G_Regularizer: 0.000 validation_error: 21.383 +(epoch: 25, iters: 171952, time: 0.548, data: 0.000) G_L1: 13.776 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.355 G_Regularizer: 0.000 validation_error: 21.416 +(epoch: 25, iters: 173952, time: 0.546, data: 0.000) G_L1: 13.602 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 11.041 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 25, iters: 175952, time: 0.546, data: 0.000) G_L1: 15.279 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 12.877 G_Regularizer: 0.000 validation_error: 21.287 +(epoch: 25, iters: 177952, time: 0.550, data: 0.000) G_L1: 16.209 G_L1_ABSOLUTE: 2.965 G_L1_RELATIVE: 13.244 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 25, iters: 179952, time: 0.539, data: 0.000) G_L1: 14.468 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 12.095 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 25, iters: 181952, time: 0.539, data: 0.000) G_L1: 14.712 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 11.877 G_Regularizer: 0.000 validation_error: 20.561 +(epoch: 25, iters: 183952, time: 0.542, data: 0.000) G_L1: 13.878 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 11.255 G_Regularizer: 0.000 validation_error: 20.940 +(epoch: 25, iters: 185952, time: 0.548, data: 0.000) G_L1: 14.138 G_L1_ABSOLUTE: 2.146 G_L1_RELATIVE: 11.991 G_Regularizer: 0.000 validation_error: 20.104 +(epoch: 25, iters: 187952, time: 0.545, data: 0.000) G_L1: 15.302 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 12.980 G_Regularizer: 0.000 validation_error: 20.142 +(epoch: 25, iters: 189952, time: 0.553, data: 0.001) G_L1: 14.616 G_L1_ABSOLUTE: 2.639 G_L1_RELATIVE: 11.977 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 25, iters: 191952, time: 0.545, data: 0.000) G_L1: 16.370 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 13.831 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 25, iters: 193952, time: 0.544, data: 0.000) G_L1: 12.403 G_L1_ABSOLUTE: 2.183 G_L1_RELATIVE: 10.220 G_Regularizer: 0.000 validation_error: 20.006 +(epoch: 25, iters: 195952, time: 0.544, data: 0.000) G_L1: 12.550 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 10.265 G_Regularizer: 0.000 validation_error: 20.427 +(epoch: 25, iters: 197952, time: 0.541, data: 0.000) G_L1: 15.400 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 12.810 G_Regularizer: 0.000 validation_error: 20.943 +(epoch: 25, iters: 199952, time: 0.548, data: 0.000) G_L1: 13.333 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 10.647 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 25, iters: 201952, time: 0.545, data: 0.000) G_L1: 12.835 G_L1_ABSOLUTE: 2.763 G_L1_RELATIVE: 10.072 G_Regularizer: 0.000 validation_error: 21.254 +(epoch: 25, iters: 203952, time: 0.543, data: 0.000) G_L1: 15.029 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 12.206 G_Regularizer: 0.000 validation_error: 20.956 +(epoch: 25, iters: 205952, time: 0.546, data: 0.000) G_L1: 15.837 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 13.356 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 25, iters: 207952, time: 0.549, data: 0.000) G_L1: 19.813 G_L1_ABSOLUTE: 2.868 G_L1_RELATIVE: 16.946 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 25, iters: 209952, time: 0.545, data: 0.000) G_L1: 15.073 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 12.178 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 25, iters: 211952, time: 0.553, data: 0.000) G_L1: 16.697 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 13.921 G_Regularizer: 0.000 validation_error: 21.111 +(epoch: 25, iters: 213952, time: 0.553, data: 0.000) G_L1: 12.403 G_L1_ABSOLUTE: 2.352 G_L1_RELATIVE: 10.051 G_Regularizer: 0.000 validation_error: 20.069 +(epoch: 25, iters: 215952, time: 0.550, data: 0.000) G_L1: 13.453 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 10.994 G_Regularizer: 0.000 validation_error: 21.216 +(epoch: 25, iters: 217952, time: 0.546, data: 0.000) G_L1: 13.543 G_L1_ABSOLUTE: 2.566 G_L1_RELATIVE: 10.977 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 25, iters: 219952, time: 0.541, data: 0.000) G_L1: 14.260 G_L1_ABSOLUTE: 2.128 G_L1_RELATIVE: 12.132 G_Regularizer: 0.000 validation_error: 21.248 +(epoch: 25, iters: 221952, time: 0.543, data: 0.000) G_L1: 15.988 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 13.375 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 25, iters: 223952, time: 0.539, data: 0.000) G_L1: 13.488 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 10.863 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 25, iters: 225952, time: 0.548, data: 0.000) G_L1: 14.501 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 11.723 G_Regularizer: 0.000 validation_error: 20.549 +(epoch: 25, iters: 227952, time: 0.542, data: 0.000) G_L1: 14.266 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 11.567 G_Regularizer: 0.000 validation_error: 20.583 +(epoch: 25, iters: 229952, time: 0.544, data: 0.000) G_L1: 15.227 G_L1_ABSOLUTE: 2.186 G_L1_RELATIVE: 13.041 G_Regularizer: 0.000 validation_error: 20.238 +(epoch: 25, iters: 231952, time: 0.550, data: 0.000) G_L1: 14.301 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 11.608 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 25, iters: 233952, time: 0.537, data: 0.000) G_L1: 19.735 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 16.987 G_Regularizer: 0.000 validation_error: 21.341 +(epoch: 25, iters: 235952, time: 0.542, data: 0.000) G_L1: 12.368 G_L1_ABSOLUTE: 2.306 G_L1_RELATIVE: 10.062 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 25, iters: 237952, time: 0.548, data: 0.001) G_L1: 15.184 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 12.589 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 25, iters: 239952, time: 0.541, data: 0.000) G_L1: 14.809 G_L1_ABSOLUTE: 2.765 G_L1_RELATIVE: 12.044 G_Regularizer: 0.000 validation_error: 21.258 +(epoch: 25, iters: 241952, time: 0.539, data: 0.000) G_L1: 13.245 G_L1_ABSOLUTE: 2.532 G_L1_RELATIVE: 10.713 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 25, iters: 243952, time: 0.548, data: 0.000) G_L1: 14.554 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 11.893 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 25, iters: 245952, time: 0.548, data: 0.000) G_L1: 12.945 G_L1_ABSOLUTE: 2.155 G_L1_RELATIVE: 10.790 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 25, iters: 247952, time: 0.541, data: 0.000) G_L1: 10.626 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 8.344 G_Regularizer: 0.000 validation_error: 20.536 +(epoch: 25, iters: 249952, time: 0.548, data: 0.000) G_L1: 13.284 G_L1_ABSOLUTE: 2.019 G_L1_RELATIVE: 11.265 G_Regularizer: 0.000 validation_error: 20.339 +(epoch: 25, iters: 251952, time: 0.540, data: 0.000) G_L1: 14.619 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 11.814 G_Regularizer: 0.000 validation_error: 21.218 +(epoch: 25, iters: 253952, time: 0.548, data: 0.000) G_L1: 11.994 G_L1_ABSOLUTE: 2.052 G_L1_RELATIVE: 9.942 G_Regularizer: 0.000 validation_error: 20.401 +(epoch: 25, iters: 255952, time: 0.545, data: 0.000) G_L1: 15.365 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 12.643 G_Regularizer: 0.000 validation_error: 20.283 +(epoch: 25, iters: 257952, time: 0.537, data: 0.000) G_L1: 14.425 G_L1_ABSOLUTE: 3.009 G_L1_RELATIVE: 11.416 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 25, iters: 259952, time: 0.546, data: 0.000) G_L1: 13.229 G_L1_ABSOLUTE: 2.098 G_L1_RELATIVE: 11.131 G_Regularizer: 0.000 validation_error: 20.186 +(epoch: 25, iters: 261952, time: 0.543, data: 0.000) G_L1: 13.845 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 11.364 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 25, iters: 263952, time: 0.547, data: 0.000) G_L1: 14.886 G_L1_ABSOLUTE: 2.498 G_L1_RELATIVE: 12.388 G_Regularizer: 0.000 validation_error: 20.093 +(epoch: 25, iters: 265952, time: 0.549, data: 0.000) G_L1: 14.481 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 12.078 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 25, iters: 267952, time: 0.537, data: 0.000) G_L1: 16.035 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 13.340 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 25, iters: 269952, time: 0.537, data: 0.000) G_L1: 17.378 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 14.539 G_Regularizer: 0.000 validation_error: 19.787 +(epoch: 25, iters: 271952, time: 0.545, data: 0.000) G_L1: 14.124 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 11.658 G_Regularizer: 0.000 validation_error: 20.638 +(epoch: 25, iters: 273952, time: 0.557, data: 0.000) G_L1: 13.374 G_L1_ABSOLUTE: 2.975 G_L1_RELATIVE: 10.399 G_Regularizer: 0.000 validation_error: 21.318 +(epoch: 25, iters: 275952, time: 0.550, data: 0.000) G_L1: 12.762 G_L1_ABSOLUTE: 2.244 G_L1_RELATIVE: 10.518 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 25, iters: 277952, time: 0.547, data: 0.000) G_L1: 16.992 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 14.367 G_Regularizer: 0.000 validation_error: 20.382 +(epoch: 25, iters: 279952, time: 0.545, data: 0.000) G_L1: 13.341 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.835 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 25, iters: 281952, time: 0.543, data: 0.000) G_L1: 14.834 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 12.366 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 25, iters: 283952, time: 0.543, data: 0.000) G_L1: 17.367 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 14.876 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 25, iters: 285952, time: 0.546, data: 0.000) G_L1: 17.617 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 15.071 G_Regularizer: 0.000 validation_error: 20.355 +(epoch: 25, iters: 287952, time: 0.550, data: 0.000) G_L1: 14.524 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 11.950 G_Regularizer: 0.000 validation_error: 21.399 +(epoch: 25, iters: 289952, time: 0.538, data: 0.000) G_L1: 16.944 G_L1_ABSOLUTE: 2.900 G_L1_RELATIVE: 14.044 G_Regularizer: 0.000 validation_error: 21.237 +(epoch: 25, iters: 291952, time: 0.551, data: 0.000) G_L1: 13.965 G_L1_ABSOLUTE: 2.981 G_L1_RELATIVE: 10.984 G_Regularizer: 0.000 validation_error: 20.345 +(epoch: 25, iters: 293952, time: 0.546, data: 0.000) G_L1: 16.071 G_L1_ABSOLUTE: 2.879 G_L1_RELATIVE: 13.193 G_Regularizer: 0.000 validation_error: 21.338 +(epoch: 25, iters: 295952, time: 0.538, data: 0.000) G_L1: 14.207 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 11.302 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 25, iters: 297952, time: 0.550, data: 0.000) G_L1: 14.575 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 12.241 G_Regularizer: 0.000 validation_error: 21.365 +(epoch: 25, iters: 299952, time: 0.542, data: 0.000) G_L1: 15.937 G_L1_ABSOLUTE: 3.362 G_L1_RELATIVE: 12.575 G_Regularizer: 0.000 validation_error: 20.360 +(epoch: 25, iters: 301952, time: 0.541, data: 0.000) G_L1: 11.934 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 9.718 G_Regularizer: 0.000 validation_error: 20.271 +(epoch: 26, iters: 1200, time: 0.547, data: 0.001) G_L1: 17.731 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 15.272 G_Regularizer: 0.000 validation_error: 20.519 +(epoch: 26, iters: 3200, time: 0.543, data: 0.000) G_L1: 13.515 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 10.728 G_Regularizer: 0.000 validation_error: 20.033 +(epoch: 26, iters: 5200, time: 0.551, data: 0.000) G_L1: 15.812 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 13.369 G_Regularizer: 0.000 validation_error: 20.323 +(epoch: 26, iters: 7200, time: 0.546, data: 0.000) G_L1: 11.655 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 8.928 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 26, iters: 9200, time: 0.546, data: 0.000) G_L1: 12.563 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 10.279 G_Regularizer: 0.000 validation_error: 20.309 +(epoch: 26, iters: 11200, time: 0.542, data: 0.000) G_L1: 14.576 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 12.197 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 26, iters: 13200, time: 0.545, data: 0.000) G_L1: 14.302 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 11.641 G_Regularizer: 0.000 validation_error: 20.191 +(epoch: 26, iters: 15200, time: 0.541, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 11.747 G_Regularizer: 0.000 validation_error: 20.184 +(epoch: 26, iters: 17200, time: 0.540, data: 0.000) G_L1: 13.829 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.245 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 26, iters: 19200, time: 0.548, data: 0.000) G_L1: 16.028 G_L1_ABSOLUTE: 3.242 G_L1_RELATIVE: 12.785 G_Regularizer: 0.000 validation_error: 20.394 +(epoch: 26, iters: 21200, time: 0.544, data: 0.000) G_L1: 11.484 G_L1_ABSOLUTE: 2.245 G_L1_RELATIVE: 9.239 G_Regularizer: 0.000 validation_error: 20.894 +(epoch: 26, iters: 23200, time: 0.552, data: 0.000) G_L1: 11.320 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 8.698 G_Regularizer: 0.000 validation_error: 21.237 +(epoch: 26, iters: 25200, time: 0.541, data: 0.000) G_L1: 15.500 G_L1_ABSOLUTE: 2.915 G_L1_RELATIVE: 12.586 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 26, iters: 27200, time: 0.541, data: 0.000) G_L1: 13.222 G_L1_ABSOLUTE: 2.308 G_L1_RELATIVE: 10.914 G_Regularizer: 0.000 validation_error: 21.151 +(epoch: 26, iters: 29200, time: 0.546, data: 0.000) G_L1: 14.034 G_L1_ABSOLUTE: 2.018 G_L1_RELATIVE: 12.016 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 26, iters: 31200, time: 0.555, data: 0.000) G_L1: 15.462 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 12.912 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 26, iters: 33200, time: 0.541, data: 0.000) G_L1: 16.336 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 13.502 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 26, iters: 35200, time: 0.546, data: 0.000) G_L1: 15.006 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 12.250 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 26, iters: 37200, time: 0.551, data: 0.000) G_L1: 15.101 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 12.773 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 26, iters: 39200, time: 0.550, data: 0.000) G_L1: 12.957 G_L1_ABSOLUTE: 2.266 G_L1_RELATIVE: 10.691 G_Regularizer: 0.000 validation_error: 19.895 +(epoch: 26, iters: 41200, time: 0.545, data: 0.000) G_L1: 14.701 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 12.176 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 26, iters: 43200, time: 0.540, data: 0.000) G_L1: 18.566 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 15.902 G_Regularizer: 0.000 validation_error: 21.293 +(epoch: 26, iters: 45200, time: 0.547, data: 0.000) G_L1: 12.798 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 10.506 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 26, iters: 47200, time: 0.538, data: 0.000) G_L1: 14.607 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 12.126 G_Regularizer: 0.000 validation_error: 21.059 +(epoch: 26, iters: 49200, time: 0.550, data: 0.000) G_L1: 15.208 G_L1_ABSOLUTE: 2.167 G_L1_RELATIVE: 13.041 G_Regularizer: 0.000 validation_error: 21.204 +(epoch: 26, iters: 51200, time: 0.542, data: 0.000) G_L1: 16.370 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 13.996 G_Regularizer: 0.000 validation_error: 20.775 +(epoch: 26, iters: 53200, time: 0.550, data: 0.000) G_L1: 15.528 G_L1_ABSOLUTE: 2.841 G_L1_RELATIVE: 12.687 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 26, iters: 55200, time: 0.555, data: 0.000) G_L1: 14.653 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 12.357 G_Regularizer: 0.000 validation_error: 20.162 +(epoch: 26, iters: 57200, time: 0.547, data: 0.000) G_L1: 15.623 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 13.193 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 26, iters: 59200, time: 0.544, data: 0.000) G_L1: 12.558 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 10.271 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 26, iters: 61200, time: 0.540, data: 0.000) G_L1: 15.960 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 13.469 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 26, iters: 63200, time: 0.547, data: 0.000) G_L1: 15.280 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 13.026 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 26, iters: 65200, time: 0.540, data: 0.000) G_L1: 12.284 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 9.655 G_Regularizer: 0.000 validation_error: 20.516 +(epoch: 26, iters: 67200, time: 0.540, data: 0.000) G_L1: 13.738 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 11.461 G_Regularizer: 0.000 validation_error: 21.355 +(epoch: 26, iters: 69200, time: 0.545, data: 0.000) G_L1: 14.205 G_L1_ABSOLUTE: 2.992 G_L1_RELATIVE: 11.213 G_Regularizer: 0.000 validation_error: 20.600 +(epoch: 26, iters: 71200, time: 0.550, data: 0.000) G_L1: 14.195 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 11.706 G_Regularizer: 0.000 validation_error: 20.331 +(epoch: 26, iters: 73200, time: 0.542, data: 0.000) G_L1: 15.752 G_L1_ABSOLUTE: 2.697 G_L1_RELATIVE: 13.055 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 26, iters: 75200, time: 0.556, data: 0.000) G_L1: 16.121 G_L1_ABSOLUTE: 2.603 G_L1_RELATIVE: 13.518 G_Regularizer: 0.000 validation_error: 20.241 +(epoch: 26, iters: 77200, time: 0.538, data: 0.000) G_L1: 14.030 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 11.256 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 26, iters: 79200, time: 0.540, data: 0.000) G_L1: 16.021 G_L1_ABSOLUTE: 2.665 G_L1_RELATIVE: 13.357 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 26, iters: 81200, time: 0.551, data: 0.000) G_L1: 14.119 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 11.440 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 26, iters: 83200, time: 0.550, data: 0.000) G_L1: 15.805 G_L1_ABSOLUTE: 2.720 G_L1_RELATIVE: 13.086 G_Regularizer: 0.000 validation_error: 20.421 +(epoch: 26, iters: 85200, time: 0.546, data: 0.000) G_L1: 11.093 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 8.537 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 26, iters: 87200, time: 0.542, data: 0.000) G_L1: 15.693 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 13.224 G_Regularizer: 0.000 validation_error: 20.338 +(epoch: 26, iters: 89200, time: 0.548, data: 0.000) G_L1: 17.452 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 14.742 G_Regularizer: 0.000 validation_error: 20.403 +(epoch: 26, iters: 91200, time: 0.550, data: 0.000) G_L1: 15.264 G_L1_ABSOLUTE: 2.215 G_L1_RELATIVE: 13.049 G_Regularizer: 0.000 validation_error: 20.443 +(epoch: 26, iters: 93200, time: 0.543, data: 0.000) G_L1: 14.834 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 12.327 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 26, iters: 95200, time: 0.549, data: 0.001) G_L1: 14.425 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 11.725 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 26, iters: 97200, time: 0.543, data: 0.000) G_L1: 16.007 G_L1_ABSOLUTE: 2.644 G_L1_RELATIVE: 13.363 G_Regularizer: 0.000 validation_error: 20.438 +(epoch: 26, iters: 99200, time: 0.546, data: 0.000) G_L1: 14.259 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 11.978 G_Regularizer: 0.000 validation_error: 20.168 +(epoch: 26, iters: 101200, time: 0.555, data: 0.001) G_L1: 14.692 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 12.290 G_Regularizer: 0.000 validation_error: 20.213 +(epoch: 26, iters: 103200, time: 0.544, data: 0.000) G_L1: 17.673 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 15.119 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 26, iters: 105200, time: 0.545, data: 0.000) G_L1: 14.042 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 11.585 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 26, iters: 107200, time: 0.544, data: 0.000) G_L1: 13.302 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 10.558 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 26, iters: 109200, time: 0.551, data: 0.000) G_L1: 14.729 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 12.307 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 26, iters: 111200, time: 0.544, data: 0.000) G_L1: 15.444 G_L1_ABSOLUTE: 2.850 G_L1_RELATIVE: 12.594 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 26, iters: 113200, time: 0.547, data: 0.000) G_L1: 14.586 G_L1_ABSOLUTE: 3.393 G_L1_RELATIVE: 11.193 G_Regularizer: 0.000 validation_error: 20.563 +(epoch: 26, iters: 115200, time: 0.556, data: 0.000) G_L1: 14.339 G_L1_ABSOLUTE: 2.791 G_L1_RELATIVE: 11.549 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 26, iters: 117200, time: 0.539, data: 0.000) G_L1: 12.470 G_L1_ABSOLUTE: 2.096 G_L1_RELATIVE: 10.374 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 26, iters: 119200, time: 0.539, data: 0.000) G_L1: 28.228 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 25.394 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 26, iters: 121200, time: 0.546, data: 0.000) G_L1: 14.364 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 11.664 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 26, iters: 123200, time: 0.553, data: 0.000) G_L1: 13.052 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 10.559 G_Regularizer: 0.000 validation_error: 20.460 +(epoch: 26, iters: 125200, time: 0.548, data: 0.000) G_L1: 15.790 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 12.975 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 26, iters: 127200, time: 0.541, data: 0.000) G_L1: 26.302 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 23.527 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 26, iters: 129200, time: 0.542, data: 0.000) G_L1: 15.641 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 13.029 G_Regularizer: 0.000 validation_error: 21.323 +(epoch: 26, iters: 131200, time: 0.545, data: 0.000) G_L1: 15.445 G_L1_ABSOLUTE: 2.667 G_L1_RELATIVE: 12.778 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 26, iters: 133200, time: 0.541, data: 0.000) G_L1: 15.923 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 13.166 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 26, iters: 135200, time: 0.543, data: 0.000) G_L1: 13.783 G_L1_ABSOLUTE: 2.726 G_L1_RELATIVE: 11.057 G_Regularizer: 0.000 validation_error: 20.563 +(epoch: 26, iters: 137200, time: 0.543, data: 0.000) G_L1: 12.564 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 10.005 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 26, iters: 139200, time: 0.546, data: 0.000) G_L1: 14.751 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 11.934 G_Regularizer: 0.000 validation_error: 21.395 +(epoch: 26, iters: 141200, time: 0.563, data: 0.001) G_L1: 16.133 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 13.417 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 26, iters: 143200, time: 0.548, data: 0.000) G_L1: 12.502 G_L1_ABSOLUTE: 2.246 G_L1_RELATIVE: 10.256 G_Regularizer: 0.000 validation_error: 20.304 +(epoch: 26, iters: 145200, time: 0.546, data: 0.000) G_L1: 15.244 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 12.565 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 26, iters: 147200, time: 0.543, data: 0.000) G_L1: 16.972 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 14.494 G_Regularizer: 0.000 validation_error: 21.227 +(epoch: 26, iters: 149200, time: 0.543, data: 0.000) G_L1: 15.682 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 12.923 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 26, iters: 151200, time: 0.546, data: 0.000) G_L1: 14.424 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 11.886 G_Regularizer: 0.000 validation_error: 20.537 +(epoch: 26, iters: 153200, time: 0.546, data: 0.000) G_L1: 14.488 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 11.992 G_Regularizer: 0.000 validation_error: 20.279 +(epoch: 26, iters: 155200, time: 0.541, data: 0.000) G_L1: 13.382 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 10.928 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 26, iters: 157200, time: 0.551, data: 0.000) G_L1: 13.709 G_L1_ABSOLUTE: 2.100 G_L1_RELATIVE: 11.609 G_Regularizer: 0.000 validation_error: 21.339 +(epoch: 26, iters: 159200, time: 0.544, data: 0.000) G_L1: 14.290 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 11.793 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 26, iters: 161200, time: 0.546, data: 0.000) G_L1: 13.713 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 11.474 G_Regularizer: 0.000 validation_error: 20.503 +(epoch: 26, iters: 163200, time: 0.543, data: 0.000) G_L1: 15.597 G_L1_ABSOLUTE: 2.141 G_L1_RELATIVE: 13.455 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 26, iters: 165200, time: 0.540, data: 0.000) G_L1: 15.178 G_L1_ABSOLUTE: 2.988 G_L1_RELATIVE: 12.190 G_Regularizer: 0.000 validation_error: 21.046 +(epoch: 26, iters: 167200, time: 0.544, data: 0.000) G_L1: 16.894 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 14.115 G_Regularizer: 0.000 validation_error: 20.355 +(epoch: 26, iters: 169200, time: 0.542, data: 0.000) G_L1: 13.856 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 11.280 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 26, iters: 171200, time: 0.554, data: 0.000) G_L1: 14.541 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 12.090 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 26, iters: 173200, time: 0.540, data: 0.000) G_L1: 13.012 G_L1_ABSOLUTE: 2.233 G_L1_RELATIVE: 10.779 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 26, iters: 175200, time: 0.550, data: 0.000) G_L1: 11.372 G_L1_ABSOLUTE: 2.093 G_L1_RELATIVE: 9.279 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 26, iters: 177200, time: 0.550, data: 0.000) G_L1: 13.548 G_L1_ABSOLUTE: 2.230 G_L1_RELATIVE: 11.318 G_Regularizer: 0.000 validation_error: 20.579 +(epoch: 26, iters: 179200, time: 0.545, data: 0.000) G_L1: 16.508 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 14.091 G_Regularizer: 0.000 validation_error: 20.758 +(epoch: 26, iters: 181200, time: 0.545, data: 0.000) G_L1: 13.808 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 11.273 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 26, iters: 183200, time: 0.548, data: 0.000) G_L1: 16.096 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 13.520 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 26, iters: 185200, time: 0.545, data: 0.001) G_L1: 16.210 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 13.726 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 26, iters: 187200, time: 0.543, data: 0.001) G_L1: 15.265 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 12.723 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 26, iters: 189200, time: 0.548, data: 0.000) G_L1: 16.285 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 13.332 G_Regularizer: 0.000 validation_error: 21.345 +(epoch: 26, iters: 191200, time: 0.554, data: 0.000) G_L1: 17.032 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 14.323 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 26, iters: 193200, time: 0.545, data: 0.000) G_L1: 15.819 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 13.342 G_Regularizer: 0.000 validation_error: 21.332 +(epoch: 26, iters: 195200, time: 0.542, data: 0.000) G_L1: 14.127 G_L1_ABSOLUTE: 2.973 G_L1_RELATIVE: 11.154 G_Regularizer: 0.000 validation_error: 20.365 +(epoch: 26, iters: 197200, time: 0.545, data: 0.000) G_L1: 15.976 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 13.419 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 26, iters: 199200, time: 0.540, data: 0.000) G_L1: 15.561 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 13.237 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 26, iters: 201200, time: 0.557, data: 0.000) G_L1: 15.706 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 13.142 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 26, iters: 203200, time: 0.546, data: 0.000) G_L1: 15.360 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 13.021 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 26, iters: 205200, time: 0.545, data: 0.000) G_L1: 14.373 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 11.625 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 26, iters: 207200, time: 0.536, data: 0.000) G_L1: 13.921 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 11.495 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 26, iters: 209200, time: 0.547, data: 0.000) G_L1: 12.059 G_L1_ABSOLUTE: 2.074 G_L1_RELATIVE: 9.985 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 26, iters: 211200, time: 0.544, data: 0.000) G_L1: 16.363 G_L1_ABSOLUTE: 2.923 G_L1_RELATIVE: 13.440 G_Regularizer: 0.000 validation_error: 20.607 +(epoch: 26, iters: 213200, time: 0.545, data: 0.000) G_L1: 15.442 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 12.629 G_Regularizer: 0.000 validation_error: 21.784 +(epoch: 26, iters: 215200, time: 0.544, data: 0.000) G_L1: 16.341 G_L1_ABSOLUTE: 2.946 G_L1_RELATIVE: 13.395 G_Regularizer: 0.000 validation_error: 20.121 +(epoch: 26, iters: 217200, time: 0.542, data: 0.000) G_L1: 16.771 G_L1_ABSOLUTE: 2.759 G_L1_RELATIVE: 14.012 G_Regularizer: 0.000 validation_error: 20.322 +(epoch: 26, iters: 219200, time: 0.549, data: 0.000) G_L1: 15.739 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 12.947 G_Regularizer: 0.000 validation_error: 20.436 +(epoch: 26, iters: 221200, time: 0.539, data: 0.000) G_L1: 13.654 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 11.255 G_Regularizer: 0.000 validation_error: 21.284 +(epoch: 26, iters: 223200, time: 0.543, data: 0.001) G_L1: 14.085 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 11.567 G_Regularizer: 0.000 validation_error: 21.489 +(epoch: 26, iters: 225200, time: 0.547, data: 0.000) G_L1: 15.109 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 12.605 G_Regularizer: 0.000 validation_error: 21.141 +(epoch: 26, iters: 227200, time: 0.544, data: 0.000) G_L1: 14.358 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 12.060 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 26, iters: 229200, time: 0.542, data: 0.000) G_L1: 13.022 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 10.246 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 26, iters: 231200, time: 0.546, data: 0.000) G_L1: 14.675 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 11.671 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 26, iters: 233200, time: 0.549, data: 0.001) G_L1: 12.705 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.200 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 26, iters: 235200, time: 0.609, data: 0.000) G_L1: 11.844 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 9.418 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 26, iters: 237200, time: 0.532, data: 0.000) G_L1: 16.622 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 13.973 G_Regularizer: 0.000 validation_error: 20.418 +(epoch: 26, iters: 239200, time: 0.540, data: 0.000) G_L1: 14.724 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 12.129 G_Regularizer: 0.000 validation_error: 21.137 +(epoch: 26, iters: 241200, time: 0.545, data: 0.000) G_L1: 14.158 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 11.661 G_Regularizer: 0.000 validation_error: 21.256 +(epoch: 26, iters: 243200, time: 0.548, data: 0.000) G_L1: 13.462 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.027 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 26, iters: 245200, time: 0.542, data: 0.000) G_L1: 11.069 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 8.731 G_Regularizer: 0.000 validation_error: 20.263 +(epoch: 26, iters: 247200, time: 0.547, data: 0.000) G_L1: 15.064 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 12.677 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 26, iters: 249200, time: 0.541, data: 0.000) G_L1: 15.840 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 13.310 G_Regularizer: 0.000 validation_error: 21.022 +(epoch: 26, iters: 251200, time: 0.545, data: 0.000) G_L1: 12.541 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 9.977 G_Regularizer: 0.000 validation_error: 20.947 +(epoch: 26, iters: 253200, time: 0.544, data: 0.000) G_L1: 16.257 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 13.556 G_Regularizer: 0.000 validation_error: 20.731 +(epoch: 26, iters: 255200, time: 0.545, data: 0.000) G_L1: 15.291 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 12.822 G_Regularizer: 0.000 validation_error: 20.431 +(epoch: 26, iters: 257200, time: 0.535, data: 0.000) G_L1: 15.446 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 12.852 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 26, iters: 259200, time: 0.541, data: 0.000) G_L1: 14.829 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 12.365 G_Regularizer: 0.000 validation_error: 20.654 +(epoch: 26, iters: 261200, time: 0.549, data: 0.000) G_L1: 13.339 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 10.853 G_Regularizer: 0.000 validation_error: 20.519 +(epoch: 26, iters: 263200, time: 0.544, data: 0.000) G_L1: 14.754 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 12.400 G_Regularizer: 0.000 validation_error: 20.775 +(epoch: 26, iters: 265200, time: 0.549, data: 0.000) G_L1: 14.671 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 12.051 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 26, iters: 267200, time: 0.548, data: 0.000) G_L1: 12.848 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 10.479 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 26, iters: 269200, time: 0.549, data: 0.000) G_L1: 16.247 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 13.391 G_Regularizer: 0.000 validation_error: 20.542 +(epoch: 26, iters: 271200, time: 0.538, data: 0.000) G_L1: 12.614 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 10.186 G_Regularizer: 0.000 validation_error: 20.286 +(epoch: 26, iters: 273200, time: 0.551, data: 0.000) G_L1: 14.505 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 11.868 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 26, iters: 275200, time: 0.544, data: 0.000) G_L1: 14.805 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 12.170 G_Regularizer: 0.000 validation_error: 20.176 +(epoch: 26, iters: 277200, time: 0.546, data: 0.000) G_L1: 16.320 G_L1_ABSOLUTE: 2.851 G_L1_RELATIVE: 13.469 G_Regularizer: 0.000 validation_error: 20.379 +(epoch: 26, iters: 279200, time: 0.543, data: 0.000) G_L1: 15.443 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 13.025 G_Regularizer: 0.000 validation_error: 20.417 +(epoch: 26, iters: 281200, time: 0.548, data: 0.000) G_L1: 16.069 G_L1_ABSOLUTE: 2.728 G_L1_RELATIVE: 13.340 G_Regularizer: 0.000 validation_error: 21.047 +(epoch: 26, iters: 283200, time: 0.542, data: 0.001) G_L1: 13.604 G_L1_ABSOLUTE: 2.060 G_L1_RELATIVE: 11.544 G_Regularizer: 0.000 validation_error: 20.666 +(epoch: 26, iters: 285200, time: 0.551, data: 0.000) G_L1: 15.449 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 13.106 G_Regularizer: 0.000 validation_error: 20.523 +(epoch: 26, iters: 287200, time: 0.555, data: 0.000) G_L1: 13.753 G_L1_ABSOLUTE: 2.181 G_L1_RELATIVE: 11.572 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 26, iters: 289200, time: 0.539, data: 0.000) G_L1: 14.601 G_L1_ABSOLUTE: 2.907 G_L1_RELATIVE: 11.694 G_Regularizer: 0.000 validation_error: 20.216 +(epoch: 26, iters: 291200, time: 0.544, data: 0.000) G_L1: 14.594 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 12.130 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 26, iters: 293200, time: 0.543, data: 0.000) G_L1: 14.140 G_L1_ABSOLUTE: 2.543 G_L1_RELATIVE: 11.597 G_Regularizer: 0.000 validation_error: 20.466 +(epoch: 26, iters: 295200, time: 0.546, data: 0.000) G_L1: 13.739 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 11.476 G_Regularizer: 0.000 validation_error: 21.508 +(epoch: 26, iters: 297200, time: 0.542, data: 0.000) G_L1: 17.267 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 14.719 G_Regularizer: 0.000 validation_error: 21.108 +(epoch: 26, iters: 299200, time: 0.549, data: 0.000) G_L1: 11.134 G_L1_ABSOLUTE: 1.970 G_L1_RELATIVE: 9.164 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 26, iters: 301200, time: 0.541, data: 0.000) G_L1: 13.578 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 10.843 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 27, iters: 448, time: 0.539, data: 0.000) G_L1: 16.217 G_L1_ABSOLUTE: 2.543 G_L1_RELATIVE: 13.674 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 27, iters: 2448, time: 0.546, data: 0.000) G_L1: 14.076 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 11.562 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 27, iters: 4448, time: 0.546, data: 0.000) G_L1: 16.323 G_L1_ABSOLUTE: 2.947 G_L1_RELATIVE: 13.376 G_Regularizer: 0.000 validation_error: 19.992 +(epoch: 27, iters: 6448, time: 0.544, data: 0.000) G_L1: 13.348 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 10.865 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 27, iters: 8448, time: 0.551, data: 0.001) G_L1: 12.721 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 10.244 G_Regularizer: 0.000 validation_error: 20.151 +(epoch: 27, iters: 10448, time: 0.540, data: 0.000) G_L1: 12.463 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 9.966 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 27, iters: 12448, time: 0.545, data: 0.000) G_L1: 15.999 G_L1_ABSOLUTE: 2.828 G_L1_RELATIVE: 13.171 G_Regularizer: 0.000 validation_error: 21.436 +(epoch: 27, iters: 14448, time: 0.545, data: 0.000) G_L1: 11.687 G_L1_ABSOLUTE: 1.926 G_L1_RELATIVE: 9.761 G_Regularizer: 0.000 validation_error: 20.122 +(epoch: 27, iters: 16448, time: 0.535, data: 0.000) G_L1: 16.158 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 13.942 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 27, iters: 18448, time: 0.550, data: 0.000) G_L1: 15.476 G_L1_ABSOLUTE: 2.868 G_L1_RELATIVE: 12.608 G_Regularizer: 0.000 validation_error: 20.794 +(epoch: 27, iters: 20448, time: 0.549, data: 0.000) G_L1: 11.684 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 9.315 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 27, iters: 22448, time: 0.542, data: 0.000) G_L1: 13.060 G_L1_ABSOLUTE: 2.110 G_L1_RELATIVE: 10.950 G_Regularizer: 0.000 validation_error: 20.548 +(epoch: 27, iters: 24448, time: 0.546, data: 0.000) G_L1: 12.763 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 10.254 G_Regularizer: 0.000 validation_error: 20.136 +(epoch: 27, iters: 26448, time: 0.551, data: 0.000) G_L1: 14.683 G_L1_ABSOLUTE: 2.455 G_L1_RELATIVE: 12.229 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 27, iters: 28448, time: 0.539, data: 0.000) G_L1: 13.817 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 11.192 G_Regularizer: 0.000 validation_error: 19.994 +(epoch: 27, iters: 30448, time: 0.544, data: 0.000) G_L1: 15.856 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 13.449 G_Regularizer: 0.000 validation_error: 20.194 +(epoch: 27, iters: 32448, time: 0.542, data: 0.000) G_L1: 14.978 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 12.605 G_Regularizer: 0.000 validation_error: 21.392 +(epoch: 27, iters: 34448, time: 0.541, data: 0.000) G_L1: 15.978 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 13.458 G_Regularizer: 0.000 validation_error: 20.195 +(epoch: 27, iters: 36448, time: 0.543, data: 0.000) G_L1: 14.425 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 12.043 G_Regularizer: 0.000 validation_error: 20.527 +(epoch: 27, iters: 38448, time: 0.547, data: 0.000) G_L1: 14.608 G_L1_ABSOLUTE: 2.194 G_L1_RELATIVE: 12.414 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 27, iters: 40448, time: 0.543, data: 0.000) G_L1: 14.977 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 12.430 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 27, iters: 42448, time: 0.543, data: 0.000) G_L1: 17.086 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 14.437 G_Regularizer: 0.000 validation_error: 21.025 +(epoch: 27, iters: 44448, time: 0.548, data: 0.001) G_L1: 13.298 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 10.674 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 27, iters: 46448, time: 0.551, data: 0.000) G_L1: 15.198 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 12.475 G_Regularizer: 0.000 validation_error: 20.078 +(epoch: 27, iters: 48448, time: 0.544, data: 0.000) G_L1: 14.075 G_L1_ABSOLUTE: 2.719 G_L1_RELATIVE: 11.355 G_Regularizer: 0.000 validation_error: 20.344 +(epoch: 27, iters: 50448, time: 0.542, data: 0.000) G_L1: 15.117 G_L1_ABSOLUTE: 2.380 G_L1_RELATIVE: 12.737 G_Regularizer: 0.000 validation_error: 20.406 +(epoch: 27, iters: 52448, time: 0.546, data: 0.000) G_L1: 14.462 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 12.105 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 27, iters: 54448, time: 0.551, data: 0.000) G_L1: 13.081 G_L1_ABSOLUTE: 2.272 G_L1_RELATIVE: 10.809 G_Regularizer: 0.000 validation_error: 20.461 +(epoch: 27, iters: 56448, time: 0.546, data: 0.000) G_L1: 13.450 G_L1_ABSOLUTE: 2.303 G_L1_RELATIVE: 11.148 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 27, iters: 58448, time: 0.540, data: 0.000) G_L1: 15.751 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 13.130 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 27, iters: 60448, time: 0.547, data: 0.000) G_L1: 12.128 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 9.716 G_Regularizer: 0.000 validation_error: 21.781 +(epoch: 27, iters: 62448, time: 0.541, data: 0.000) G_L1: 13.082 G_L1_ABSOLUTE: 2.055 G_L1_RELATIVE: 11.027 G_Regularizer: 0.000 validation_error: 21.784 +(epoch: 27, iters: 64448, time: 0.542, data: 0.000) G_L1: 13.702 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 10.985 G_Regularizer: 0.000 validation_error: 20.671 +(epoch: 27, iters: 66448, time: 0.537, data: 0.000) G_L1: 15.008 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 12.641 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 27, iters: 68448, time: 0.542, data: 0.001) G_L1: 14.308 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 11.741 G_Regularizer: 0.000 validation_error: 21.469 +(epoch: 27, iters: 70448, time: 0.557, data: 0.000) G_L1: 13.099 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 10.616 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 27, iters: 72448, time: 0.536, data: 0.000) G_L1: 13.134 G_L1_ABSOLUTE: 3.055 G_L1_RELATIVE: 10.079 G_Regularizer: 0.000 validation_error: 20.421 +(epoch: 27, iters: 74448, time: 0.542, data: 0.000) G_L1: 16.029 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 13.519 G_Regularizer: 0.000 validation_error: 21.231 +(epoch: 27, iters: 76448, time: 0.541, data: 0.000) G_L1: 16.189 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 13.309 G_Regularizer: 0.000 validation_error: 20.681 +(epoch: 27, iters: 78448, time: 0.548, data: 0.000) G_L1: 12.902 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 10.265 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 27, iters: 80448, time: 0.547, data: 0.000) G_L1: 13.733 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 11.296 G_Regularizer: 0.000 validation_error: 20.359 +(epoch: 27, iters: 82448, time: 0.544, data: 0.000) G_L1: 13.420 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 10.785 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 27, iters: 84448, time: 0.543, data: 0.000) G_L1: 13.639 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 11.366 G_Regularizer: 0.000 validation_error: 20.247 +(epoch: 27, iters: 86448, time: 0.548, data: 0.000) G_L1: 15.903 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 13.332 G_Regularizer: 0.000 validation_error: 20.195 +(epoch: 27, iters: 88448, time: 0.546, data: 0.000) G_L1: 13.779 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 11.492 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 27, iters: 90448, time: 0.548, data: 0.000) G_L1: 18.633 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 16.172 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 27, iters: 92448, time: 0.546, data: 0.000) G_L1: 14.887 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 12.311 G_Regularizer: 0.000 validation_error: 21.125 +(epoch: 27, iters: 94448, time: 0.539, data: 0.000) G_L1: 13.500 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 10.964 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 27, iters: 96448, time: 0.539, data: 0.000) G_L1: 12.768 G_L1_ABSOLUTE: 2.055 G_L1_RELATIVE: 10.713 G_Regularizer: 0.000 validation_error: 21.382 +(epoch: 27, iters: 98448, time: 0.544, data: 0.000) G_L1: 13.590 G_L1_ABSOLUTE: 2.278 G_L1_RELATIVE: 11.312 G_Regularizer: 0.000 validation_error: 21.358 +(epoch: 27, iters: 100448, time: 0.541, data: 0.000) G_L1: 13.560 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 11.229 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 27, iters: 102448, time: 0.550, data: 0.000) G_L1: 13.789 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 11.134 G_Regularizer: 0.000 validation_error: 20.481 +(epoch: 27, iters: 104448, time: 0.546, data: 0.000) G_L1: 13.524 G_L1_ABSOLUTE: 2.303 G_L1_RELATIVE: 11.221 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 27, iters: 106448, time: 0.546, data: 0.000) G_L1: 15.241 G_L1_ABSOLUTE: 2.222 G_L1_RELATIVE: 13.019 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 27, iters: 108448, time: 0.551, data: 0.000) G_L1: 12.794 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 10.385 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 27, iters: 110448, time: 0.551, data: 0.000) G_L1: 14.867 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 12.237 G_Regularizer: 0.000 validation_error: 20.429 +(epoch: 27, iters: 112448, time: 0.547, data: 0.000) G_L1: 14.473 G_L1_ABSOLUTE: 2.843 G_L1_RELATIVE: 11.630 G_Regularizer: 0.000 validation_error: 20.548 +(epoch: 27, iters: 114448, time: 0.550, data: 0.000) G_L1: 14.742 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 11.791 G_Regularizer: 0.000 validation_error: 20.549 +(epoch: 27, iters: 116448, time: 0.547, data: 0.000) G_L1: 14.700 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 12.264 G_Regularizer: 0.000 validation_error: 19.920 +(epoch: 27, iters: 118448, time: 0.544, data: 0.000) G_L1: 13.651 G_L1_ABSOLUTE: 2.607 G_L1_RELATIVE: 11.044 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 27, iters: 120448, time: 0.539, data: 0.000) G_L1: 16.959 G_L1_ABSOLUTE: 2.534 G_L1_RELATIVE: 14.425 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 27, iters: 122448, time: 0.542, data: 0.000) G_L1: 15.101 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 12.452 G_Regularizer: 0.000 validation_error: 20.263 +(epoch: 27, iters: 124448, time: 0.542, data: 0.000) G_L1: 16.394 G_L1_ABSOLUTE: 2.804 G_L1_RELATIVE: 13.590 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 27, iters: 126448, time: 0.543, data: 0.001) G_L1: 14.281 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 11.628 G_Regularizer: 0.000 validation_error: 21.049 +(epoch: 27, iters: 128448, time: 0.542, data: 0.000) G_L1: 13.161 G_L1_ABSOLUTE: 2.202 G_L1_RELATIVE: 10.959 G_Regularizer: 0.000 validation_error: 20.634 +(epoch: 27, iters: 130448, time: 0.552, data: 0.000) G_L1: 15.239 G_L1_ABSOLUTE: 3.082 G_L1_RELATIVE: 12.157 G_Regularizer: 0.000 validation_error: 21.066 +(epoch: 27, iters: 132448, time: 0.543, data: 0.000) G_L1: 14.367 G_L1_ABSOLUTE: 2.712 G_L1_RELATIVE: 11.654 G_Regularizer: 0.000 validation_error: 21.046 +(epoch: 27, iters: 134448, time: 0.547, data: 0.000) G_L1: 13.851 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 10.930 G_Regularizer: 0.000 validation_error: 21.254 +(epoch: 27, iters: 136448, time: 0.547, data: 0.000) G_L1: 14.202 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 11.689 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 27, iters: 138448, time: 0.548, data: 0.000) G_L1: 13.871 G_L1_ABSOLUTE: 1.988 G_L1_RELATIVE: 11.883 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 27, iters: 140448, time: 0.542, data: 0.000) G_L1: 14.385 G_L1_ABSOLUTE: 3.218 G_L1_RELATIVE: 11.166 G_Regularizer: 0.000 validation_error: 20.273 +(epoch: 27, iters: 142448, time: 0.548, data: 0.000) G_L1: 13.903 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 11.664 G_Regularizer: 0.000 validation_error: 20.251 +(epoch: 27, iters: 144448, time: 0.546, data: 0.000) G_L1: 14.992 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 12.282 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 27, iters: 146448, time: 0.541, data: 0.000) G_L1: 15.290 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 12.920 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 27, iters: 148448, time: 0.544, data: 0.000) G_L1: 14.233 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 11.847 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 27, iters: 150448, time: 0.554, data: 0.000) G_L1: 13.004 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 10.797 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 27, iters: 152448, time: 0.549, data: 0.000) G_L1: 13.517 G_L1_ABSOLUTE: 1.930 G_L1_RELATIVE: 11.588 G_Regularizer: 0.000 validation_error: 21.092 +(epoch: 27, iters: 154448, time: 0.543, data: 0.000) G_L1: 14.781 G_L1_ABSOLUTE: 2.882 G_L1_RELATIVE: 11.899 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 27, iters: 156448, time: 0.543, data: 0.000) G_L1: 13.633 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 11.187 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 27, iters: 158448, time: 0.550, data: 0.000) G_L1: 14.732 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 12.251 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 27, iters: 160448, time: 0.538, data: 0.000) G_L1: 13.907 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 11.424 G_Regularizer: 0.000 validation_error: 20.616 +(epoch: 27, iters: 162448, time: 0.544, data: 0.000) G_L1: 14.725 G_L1_ABSOLUTE: 2.118 G_L1_RELATIVE: 12.607 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 27, iters: 164448, time: 0.545, data: 0.000) G_L1: 13.966 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 11.374 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 27, iters: 166448, time: 0.545, data: 0.000) G_L1: 15.247 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 12.443 G_Regularizer: 0.000 validation_error: 20.427 +(epoch: 27, iters: 168448, time: 0.547, data: 0.001) G_L1: 14.234 G_L1_ABSOLUTE: 2.726 G_L1_RELATIVE: 11.509 G_Regularizer: 0.000 validation_error: 21.210 +(epoch: 27, iters: 170448, time: 0.545, data: 0.000) G_L1: 14.903 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 12.355 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 27, iters: 172448, time: 0.548, data: 0.000) G_L1: 15.826 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 12.823 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 27, iters: 174448, time: 0.544, data: 0.000) G_L1: 13.223 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 10.707 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 27, iters: 176448, time: 0.546, data: 0.000) G_L1: 13.849 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 11.277 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 27, iters: 178448, time: 0.545, data: 0.000) G_L1: 13.122 G_L1_ABSOLUTE: 2.342 G_L1_RELATIVE: 10.780 G_Regularizer: 0.000 validation_error: 20.940 +(epoch: 27, iters: 180448, time: 0.542, data: 0.000) G_L1: 14.672 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 12.315 G_Regularizer: 0.000 validation_error: 20.516 +(epoch: 27, iters: 182448, time: 0.538, data: 0.000) G_L1: 15.489 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 12.869 G_Regularizer: 0.000 validation_error: 20.424 +(epoch: 27, iters: 184448, time: 0.549, data: 0.000) G_L1: 12.518 G_L1_ABSOLUTE: 2.310 G_L1_RELATIVE: 10.208 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 27, iters: 186448, time: 0.538, data: 0.000) G_L1: 16.759 G_L1_ABSOLUTE: 3.110 G_L1_RELATIVE: 13.649 G_Regularizer: 0.000 validation_error: 20.663 +(epoch: 27, iters: 188448, time: 0.548, data: 0.000) G_L1: 13.250 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 10.752 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 27, iters: 190448, time: 0.546, data: 0.000) G_L1: 11.783 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 9.343 G_Regularizer: 0.000 validation_error: 21.755 +(epoch: 27, iters: 192448, time: 0.543, data: 0.000) G_L1: 14.930 G_L1_ABSOLUTE: 2.977 G_L1_RELATIVE: 11.953 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 27, iters: 194448, time: 0.546, data: 0.000) G_L1: 14.583 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 11.778 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 27, iters: 196448, time: 0.550, data: 0.001) G_L1: 12.034 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 9.522 G_Regularizer: 0.000 validation_error: 20.044 +(epoch: 27, iters: 198448, time: 0.549, data: 0.000) G_L1: 15.646 G_L1_ABSOLUTE: 2.915 G_L1_RELATIVE: 12.731 G_Regularizer: 0.000 validation_error: 20.383 +(epoch: 27, iters: 200448, time: 0.539, data: 0.000) G_L1: 13.578 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 10.785 G_Regularizer: 0.000 validation_error: 20.301 +(epoch: 27, iters: 202448, time: 0.542, data: 0.000) G_L1: 13.811 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 10.872 G_Regularizer: 0.000 validation_error: 20.993 +(epoch: 27, iters: 204448, time: 0.545, data: 0.000) G_L1: 13.987 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.565 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 27, iters: 206448, time: 0.543, data: 0.000) G_L1: 14.170 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 11.835 G_Regularizer: 0.000 validation_error: 20.462 +(epoch: 27, iters: 208448, time: 0.548, data: 0.000) G_L1: 14.103 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 11.594 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 27, iters: 210448, time: 0.547, data: 0.000) G_L1: 13.652 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 11.284 G_Regularizer: 0.000 validation_error: 20.731 +(epoch: 27, iters: 212448, time: 0.550, data: 0.000) G_L1: 13.366 G_L1_ABSOLUTE: 3.042 G_L1_RELATIVE: 10.324 G_Regularizer: 0.000 validation_error: 20.398 +(epoch: 27, iters: 214448, time: 0.543, data: 0.000) G_L1: 14.637 G_L1_ABSOLUTE: 2.191 G_L1_RELATIVE: 12.447 G_Regularizer: 0.000 validation_error: 20.476 +(epoch: 27, iters: 216448, time: 0.547, data: 0.000) G_L1: 17.143 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 14.337 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 27, iters: 218448, time: 0.544, data: 0.000) G_L1: 15.904 G_L1_ABSOLUTE: 3.098 G_L1_RELATIVE: 12.806 G_Regularizer: 0.000 validation_error: 20.495 +(epoch: 27, iters: 220448, time: 0.550, data: 0.000) G_L1: 12.901 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 10.539 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 27, iters: 222448, time: 0.551, data: 0.000) G_L1: 14.060 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 11.553 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 27, iters: 224448, time: 0.540, data: 0.000) G_L1: 12.161 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 9.540 G_Regularizer: 0.000 validation_error: 20.206 +(epoch: 27, iters: 226448, time: 0.540, data: 0.000) G_L1: 15.672 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 13.423 G_Regularizer: 0.000 validation_error: 20.362 +(epoch: 27, iters: 228448, time: 0.546, data: 0.000) G_L1: 11.906 G_L1_ABSOLUTE: 2.193 G_L1_RELATIVE: 9.713 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 27, iters: 230448, time: 0.537, data: 0.000) G_L1: 16.354 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 13.470 G_Regularizer: 0.000 validation_error: 20.611 +(epoch: 27, iters: 232448, time: 0.547, data: 0.000) G_L1: 14.434 G_L1_ABSOLUTE: 2.295 G_L1_RELATIVE: 12.138 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 27, iters: 234448, time: 0.541, data: 0.000) G_L1: 15.380 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 12.649 G_Regularizer: 0.000 validation_error: 21.126 +(epoch: 27, iters: 236448, time: 0.545, data: 0.000) G_L1: 12.197 G_L1_ABSOLUTE: 2.084 G_L1_RELATIVE: 10.113 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 27, iters: 238448, time: 0.538, data: 0.000) G_L1: 11.949 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 9.379 G_Regularizer: 0.000 validation_error: 20.277 +(epoch: 27, iters: 240448, time: 0.541, data: 0.000) G_L1: 14.986 G_L1_ABSOLUTE: 2.196 G_L1_RELATIVE: 12.791 G_Regularizer: 0.000 validation_error: 21.303 +(epoch: 27, iters: 242448, time: 0.549, data: 0.001) G_L1: 13.793 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 11.371 G_Regularizer: 0.000 validation_error: 20.495 +(epoch: 27, iters: 244448, time: 0.542, data: 0.000) G_L1: 16.133 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 13.557 G_Regularizer: 0.000 validation_error: 22.036 +(epoch: 27, iters: 246448, time: 0.543, data: 0.000) G_L1: 13.984 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 11.184 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 27, iters: 248448, time: 0.544, data: 0.000) G_L1: 12.124 G_L1_ABSOLUTE: 2.311 G_L1_RELATIVE: 9.813 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 27, iters: 250448, time: 0.549, data: 0.000) G_L1: 13.900 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 11.586 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 27, iters: 252448, time: 0.548, data: 0.000) G_L1: 16.025 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 13.295 G_Regularizer: 0.000 validation_error: 20.446 +(epoch: 27, iters: 254448, time: 0.541, data: 0.000) G_L1: 16.162 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 13.277 G_Regularizer: 0.000 validation_error: 20.478 +(epoch: 27, iters: 256448, time: 0.546, data: 0.000) G_L1: 17.459 G_L1_ABSOLUTE: 3.209 G_L1_RELATIVE: 14.250 G_Regularizer: 0.000 validation_error: 21.225 +(epoch: 27, iters: 258448, time: 0.547, data: 0.000) G_L1: 13.863 G_L1_ABSOLUTE: 2.740 G_L1_RELATIVE: 11.123 G_Regularizer: 0.000 validation_error: 20.138 +(epoch: 27, iters: 260448, time: 0.540, data: 0.000) G_L1: 15.102 G_L1_ABSOLUTE: 2.726 G_L1_RELATIVE: 12.376 G_Regularizer: 0.000 validation_error: 21.196 +(epoch: 27, iters: 262448, time: 0.539, data: 0.000) G_L1: 14.970 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 12.273 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 27, iters: 264448, time: 0.547, data: 0.000) G_L1: 16.592 G_L1_ABSOLUTE: 2.861 G_L1_RELATIVE: 13.731 G_Regularizer: 0.000 validation_error: 20.563 +(epoch: 27, iters: 266448, time: 0.546, data: 0.000) G_L1: 14.695 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 12.115 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 27, iters: 268448, time: 0.537, data: 0.000) G_L1: 11.287 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 8.950 G_Regularizer: 0.000 validation_error: 20.159 +(epoch: 27, iters: 270448, time: 0.539, data: 0.000) G_L1: 11.582 G_L1_ABSOLUTE: 2.382 G_L1_RELATIVE: 9.200 G_Regularizer: 0.000 validation_error: 21.043 +(epoch: 27, iters: 272448, time: 0.547, data: 0.000) G_L1: 13.537 G_L1_ABSOLUTE: 3.089 G_L1_RELATIVE: 10.447 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 27, iters: 274448, time: 0.555, data: 0.000) G_L1: 16.325 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 13.468 G_Regularizer: 0.000 validation_error: 21.605 +(epoch: 27, iters: 276448, time: 0.551, data: 0.001) G_L1: 14.123 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 11.450 G_Regularizer: 0.000 validation_error: 21.040 +(epoch: 27, iters: 278448, time: 0.541, data: 0.000) G_L1: 14.788 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 12.086 G_Regularizer: 0.000 validation_error: 20.084 +(epoch: 27, iters: 280448, time: 0.547, data: 0.000) G_L1: 15.491 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 13.038 G_Regularizer: 0.000 validation_error: 21.260 +(epoch: 27, iters: 282448, time: 0.552, data: 0.000) G_L1: 15.821 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 13.327 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 27, iters: 284448, time: 0.537, data: 0.000) G_L1: 15.459 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 12.982 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 27, iters: 286448, time: 0.543, data: 0.000) G_L1: 16.494 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 13.696 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 27, iters: 288448, time: 0.550, data: 0.000) G_L1: 15.917 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 13.268 G_Regularizer: 0.000 validation_error: 20.794 +(epoch: 27, iters: 290448, time: 0.545, data: 0.000) G_L1: 12.894 G_L1_ABSOLUTE: 2.172 G_L1_RELATIVE: 10.722 G_Regularizer: 0.000 validation_error: 20.757 +(epoch: 27, iters: 292448, time: 0.542, data: 0.000) G_L1: 16.555 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 14.110 G_Regularizer: 0.000 validation_error: 20.301 +(epoch: 27, iters: 294448, time: 0.541, data: 0.000) G_L1: 14.297 G_L1_ABSOLUTE: 2.728 G_L1_RELATIVE: 11.569 G_Regularizer: 0.000 validation_error: 21.617 +(epoch: 27, iters: 296448, time: 0.540, data: 0.000) G_L1: 12.925 G_L1_ABSOLUTE: 2.315 G_L1_RELATIVE: 10.610 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 27, iters: 298448, time: 0.550, data: 0.000) G_L1: 12.797 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 10.551 G_Regularizer: 0.000 validation_error: 20.300 +(epoch: 27, iters: 300448, time: 0.548, data: 0.000) G_L1: 13.827 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 11.357 G_Regularizer: 0.000 validation_error: 20.479 +(epoch: 27, iters: 302448, time: 0.547, data: 0.000) G_L1: 14.407 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 11.615 G_Regularizer: 0.000 validation_error: 21.435 +(epoch: 28, iters: 1696, time: 0.541, data: 0.000) G_L1: 16.847 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 14.099 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 28, iters: 3696, time: 0.553, data: 0.000) G_L1: 14.083 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 11.472 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 28, iters: 5696, time: 0.546, data: 0.000) G_L1: 14.381 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 11.769 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 28, iters: 7696, time: 0.544, data: 0.000) G_L1: 15.810 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 13.402 G_Regularizer: 0.000 validation_error: 21.154 +(epoch: 28, iters: 9696, time: 0.546, data: 0.000) G_L1: 16.450 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 13.741 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 28, iters: 11696, time: 0.548, data: 0.000) G_L1: 13.376 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 11.141 G_Regularizer: 0.000 validation_error: 21.241 +(epoch: 28, iters: 13696, time: 0.542, data: 0.000) G_L1: 13.447 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 11.123 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 28, iters: 15696, time: 0.549, data: 0.000) G_L1: 14.224 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 11.771 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 28, iters: 17696, time: 0.543, data: 0.000) G_L1: 15.751 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 13.043 G_Regularizer: 0.000 validation_error: 21.133 +(epoch: 28, iters: 19696, time: 0.549, data: 0.000) G_L1: 12.690 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 10.275 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 28, iters: 21696, time: 0.544, data: 0.000) G_L1: 17.179 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 14.478 G_Regularizer: 0.000 validation_error: 20.453 +(epoch: 28, iters: 23696, time: 0.544, data: 0.000) G_L1: 16.326 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 13.888 G_Regularizer: 0.000 validation_error: 20.654 +(epoch: 28, iters: 25696, time: 0.541, data: 0.000) G_L1: 15.768 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 13.194 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 28, iters: 27696, time: 0.538, data: 0.000) G_L1: 14.396 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 11.728 G_Regularizer: 0.000 validation_error: 20.641 +(epoch: 28, iters: 29696, time: 0.544, data: 0.000) G_L1: 15.326 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 12.611 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 28, iters: 31696, time: 0.540, data: 0.000) G_L1: 15.457 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 12.740 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 28, iters: 33696, time: 0.544, data: 0.000) G_L1: 11.802 G_L1_ABSOLUTE: 2.405 G_L1_RELATIVE: 9.398 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 28, iters: 35696, time: 0.544, data: 0.000) G_L1: 17.464 G_L1_ABSOLUTE: 2.888 G_L1_RELATIVE: 14.576 G_Regularizer: 0.000 validation_error: 20.222 +(epoch: 28, iters: 37696, time: 0.545, data: 0.000) G_L1: 15.017 G_L1_ABSOLUTE: 2.888 G_L1_RELATIVE: 12.128 G_Regularizer: 0.000 validation_error: 20.323 +(epoch: 28, iters: 39696, time: 0.543, data: 0.001) G_L1: 13.474 G_L1_ABSOLUTE: 2.671 G_L1_RELATIVE: 10.803 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 28, iters: 41696, time: 0.545, data: 0.000) G_L1: 15.910 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 12.951 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 28, iters: 43696, time: 0.537, data: 0.000) G_L1: 14.471 G_L1_ABSOLUTE: 2.743 G_L1_RELATIVE: 11.728 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 28, iters: 45696, time: 0.551, data: 0.001) G_L1: 13.407 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 10.607 G_Regularizer: 0.000 validation_error: 20.382 +(epoch: 28, iters: 47696, time: 0.548, data: 0.000) G_L1: 13.171 G_L1_ABSOLUTE: 2.423 G_L1_RELATIVE: 10.748 G_Regularizer: 0.000 validation_error: 20.489 +(epoch: 28, iters: 49696, time: 0.536, data: 0.000) G_L1: 14.030 G_L1_ABSOLUTE: 2.342 G_L1_RELATIVE: 11.688 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 28, iters: 51696, time: 0.546, data: 0.000) G_L1: 13.750 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 11.154 G_Regularizer: 0.000 validation_error: 21.091 +(epoch: 28, iters: 53696, time: 0.540, data: 0.000) G_L1: 15.999 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 13.202 G_Regularizer: 0.000 validation_error: 21.351 +(epoch: 28, iters: 55696, time: 0.544, data: 0.000) G_L1: 13.970 G_L1_ABSOLUTE: 2.330 G_L1_RELATIVE: 11.640 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 28, iters: 57696, time: 0.545, data: 0.000) G_L1: 15.411 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 12.494 G_Regularizer: 0.000 validation_error: 20.712 +(epoch: 28, iters: 59696, time: 0.547, data: 0.000) G_L1: 12.091 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 9.756 G_Regularizer: 0.000 validation_error: 20.388 +(epoch: 28, iters: 61696, time: 0.541, data: 0.000) G_L1: 19.008 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 16.291 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 28, iters: 63696, time: 0.548, data: 0.000) G_L1: 12.466 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 10.021 G_Regularizer: 0.000 validation_error: 21.262 +(epoch: 28, iters: 65696, time: 0.543, data: 0.000) G_L1: 14.919 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 12.227 G_Regularizer: 0.000 validation_error: 20.309 +(epoch: 28, iters: 67696, time: 0.557, data: 0.000) G_L1: 15.407 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 13.013 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 28, iters: 69696, time: 0.536, data: 0.000) G_L1: 13.233 G_L1_ABSOLUTE: 3.164 G_L1_RELATIVE: 10.069 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 28, iters: 71696, time: 0.539, data: 0.000) G_L1: 15.579 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 12.771 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 28, iters: 73696, time: 0.544, data: 0.000) G_L1: 13.295 G_L1_ABSOLUTE: 2.108 G_L1_RELATIVE: 11.187 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 28, iters: 75696, time: 0.552, data: 0.000) G_L1: 14.383 G_L1_ABSOLUTE: 2.348 G_L1_RELATIVE: 12.035 G_Regularizer: 0.000 validation_error: 20.303 +(epoch: 28, iters: 77696, time: 0.539, data: 0.000) G_L1: 14.285 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 11.281 G_Regularizer: 0.000 validation_error: 21.360 +(epoch: 28, iters: 79696, time: 0.551, data: 0.000) G_L1: 13.302 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 10.679 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 28, iters: 81696, time: 0.546, data: 0.000) G_L1: 16.662 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 14.179 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 28, iters: 83696, time: 0.545, data: 0.000) G_L1: 11.143 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 8.660 G_Regularizer: 0.000 validation_error: 21.236 +(epoch: 28, iters: 85696, time: 0.551, data: 0.000) G_L1: 15.131 G_L1_ABSOLUTE: 3.040 G_L1_RELATIVE: 12.091 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 28, iters: 87696, time: 0.550, data: 0.000) G_L1: 15.833 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 13.515 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 28, iters: 89696, time: 0.548, data: 0.000) G_L1: 13.583 G_L1_ABSOLUTE: 2.531 G_L1_RELATIVE: 11.053 G_Regularizer: 0.000 validation_error: 20.189 +(epoch: 28, iters: 91696, time: 0.547, data: 0.000) G_L1: 15.882 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 13.148 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 28, iters: 93696, time: 0.540, data: 0.000) G_L1: 14.932 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 12.384 G_Regularizer: 0.000 validation_error: 21.236 +(epoch: 28, iters: 95696, time: 0.548, data: 0.000) G_L1: 14.063 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 11.723 G_Regularizer: 0.000 validation_error: 21.194 +(epoch: 28, iters: 97696, time: 0.544, data: 0.000) G_L1: 16.807 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 14.241 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 28, iters: 99696, time: 0.547, data: 0.000) G_L1: 13.905 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 11.275 G_Regularizer: 0.000 validation_error: 20.733 +(epoch: 28, iters: 101696, time: 0.546, data: 0.001) G_L1: 15.005 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 12.531 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 28, iters: 103696, time: 0.543, data: 0.000) G_L1: 12.282 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 9.695 G_Regularizer: 0.000 validation_error: 20.544 +(epoch: 28, iters: 105696, time: 0.543, data: 0.001) G_L1: 16.014 G_L1_ABSOLUTE: 3.374 G_L1_RELATIVE: 12.640 G_Regularizer: 0.000 validation_error: 20.079 +(epoch: 28, iters: 107696, time: 0.552, data: 0.000) G_L1: 14.152 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 28, iters: 109696, time: 0.544, data: 0.000) G_L1: 12.903 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 10.295 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 28, iters: 111696, time: 0.555, data: 0.000) G_L1: 16.142 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 13.489 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 28, iters: 113696, time: 0.543, data: 0.000) G_L1: 15.277 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 12.828 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 28, iters: 115696, time: 0.546, data: 0.000) G_L1: 16.657 G_L1_ABSOLUTE: 2.753 G_L1_RELATIVE: 13.904 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 28, iters: 117696, time: 0.543, data: 0.000) G_L1: 16.223 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 13.728 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 28, iters: 119696, time: 0.546, data: 0.000) G_L1: 16.767 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 13.923 G_Regularizer: 0.000 validation_error: 21.318 +(epoch: 28, iters: 121696, time: 0.545, data: 0.000) G_L1: 18.802 G_L1_ABSOLUTE: 3.270 G_L1_RELATIVE: 15.531 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 28, iters: 123696, time: 0.550, data: 0.001) G_L1: 12.158 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 9.820 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 28, iters: 125696, time: 0.543, data: 0.000) G_L1: 14.648 G_L1_ABSOLUTE: 2.831 G_L1_RELATIVE: 11.817 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 28, iters: 127696, time: 0.543, data: 0.000) G_L1: 13.391 G_L1_ABSOLUTE: 2.109 G_L1_RELATIVE: 11.282 G_Regularizer: 0.000 validation_error: 20.107 +(epoch: 28, iters: 129696, time: 0.544, data: 0.000) G_L1: 12.770 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 10.397 G_Regularizer: 0.000 validation_error: 21.369 +(epoch: 28, iters: 131696, time: 0.540, data: 0.000) G_L1: 13.142 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 10.662 G_Regularizer: 0.000 validation_error: 20.485 +(epoch: 28, iters: 133696, time: 0.541, data: 0.000) G_L1: 16.728 G_L1_ABSOLUTE: 2.791 G_L1_RELATIVE: 13.937 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 28, iters: 135696, time: 0.542, data: 0.000) G_L1: 16.139 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 13.571 G_Regularizer: 0.000 validation_error: 21.212 +(epoch: 28, iters: 137696, time: 0.547, data: 0.000) G_L1: 14.723 G_L1_ABSOLUTE: 2.853 G_L1_RELATIVE: 11.869 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 28, iters: 139696, time: 0.543, data: 0.000) G_L1: 15.737 G_L1_ABSOLUTE: 2.973 G_L1_RELATIVE: 12.763 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 28, iters: 141696, time: 0.547, data: 0.000) G_L1: 13.670 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 10.887 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 28, iters: 143696, time: 0.537, data: 0.000) G_L1: 15.923 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 13.290 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 28, iters: 145696, time: 0.546, data: 0.000) G_L1: 12.602 G_L1_ABSOLUTE: 2.260 G_L1_RELATIVE: 10.342 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 28, iters: 147696, time: 0.538, data: 0.000) G_L1: 18.570 G_L1_ABSOLUTE: 3.407 G_L1_RELATIVE: 15.163 G_Regularizer: 0.000 validation_error: 20.364 +(epoch: 28, iters: 149696, time: 0.539, data: 0.000) G_L1: 13.627 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 11.178 G_Regularizer: 0.000 validation_error: 20.043 +(epoch: 28, iters: 151696, time: 0.547, data: 0.000) G_L1: 14.241 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 11.612 G_Regularizer: 0.000 validation_error: 19.906 +(epoch: 28, iters: 153696, time: 0.549, data: 0.000) G_L1: 14.662 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 12.188 G_Regularizer: 0.000 validation_error: 21.253 +(epoch: 28, iters: 155696, time: 0.541, data: 0.001) G_L1: 13.812 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 11.339 G_Regularizer: 0.000 validation_error: 21.256 +(epoch: 28, iters: 157696, time: 0.541, data: 0.000) G_L1: 15.984 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 13.437 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 28, iters: 159696, time: 0.542, data: 0.001) G_L1: 15.607 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 12.930 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 28, iters: 161696, time: 0.546, data: 0.000) G_L1: 16.120 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 13.543 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 28, iters: 163696, time: 0.549, data: 0.001) G_L1: 15.788 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 13.160 G_Regularizer: 0.000 validation_error: 20.257 +(epoch: 28, iters: 165696, time: 0.544, data: 0.000) G_L1: 13.577 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 21.215 +(epoch: 28, iters: 167696, time: 0.546, data: 0.000) G_L1: 17.069 G_L1_ABSOLUTE: 3.256 G_L1_RELATIVE: 13.813 G_Regularizer: 0.000 validation_error: 20.437 +(epoch: 28, iters: 169696, time: 0.542, data: 0.000) G_L1: 16.515 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 13.857 G_Regularizer: 0.000 validation_error: 21.392 +(epoch: 28, iters: 171696, time: 0.549, data: 0.000) G_L1: 14.691 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 12.146 G_Regularizer: 0.000 validation_error: 21.302 +(epoch: 28, iters: 173696, time: 0.543, data: 0.000) G_L1: 12.286 G_L1_ABSOLUTE: 2.025 G_L1_RELATIVE: 10.260 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 28, iters: 175696, time: 0.545, data: 0.000) G_L1: 13.660 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 10.906 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 28, iters: 177696, time: 0.540, data: 0.000) G_L1: 15.050 G_L1_ABSOLUTE: 2.944 G_L1_RELATIVE: 12.106 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 28, iters: 179696, time: 0.539, data: 0.000) G_L1: 10.556 G_L1_ABSOLUTE: 1.943 G_L1_RELATIVE: 8.614 G_Regularizer: 0.000 validation_error: 20.422 +(epoch: 28, iters: 181696, time: 0.539, data: 0.000) G_L1: 13.187 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 10.667 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 28, iters: 183696, time: 0.540, data: 0.000) G_L1: 12.725 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 10.266 G_Regularizer: 0.000 validation_error: 19.901 +(epoch: 28, iters: 185696, time: 0.545, data: 0.000) G_L1: 15.240 G_L1_ABSOLUTE: 2.315 G_L1_RELATIVE: 12.925 G_Regularizer: 0.000 validation_error: 20.545 +(epoch: 28, iters: 187696, time: 0.547, data: 0.000) G_L1: 14.390 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 11.787 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 28, iters: 189696, time: 0.543, data: 0.000) G_L1: 18.476 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 16.271 G_Regularizer: 0.000 validation_error: 20.297 +(epoch: 28, iters: 191696, time: 0.539, data: 0.000) G_L1: 12.897 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 10.640 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 28, iters: 193696, time: 0.548, data: 0.000) G_L1: 12.322 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 9.892 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 28, iters: 195696, time: 0.537, data: 0.000) G_L1: 16.990 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 14.556 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 28, iters: 197696, time: 0.552, data: 0.000) G_L1: 16.008 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 13.513 G_Regularizer: 0.000 validation_error: 20.299 +(epoch: 28, iters: 199696, time: 0.547, data: 0.000) G_L1: 14.336 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 11.943 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 28, iters: 201696, time: 0.549, data: 0.000) G_L1: 12.775 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 10.612 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 28, iters: 203696, time: 0.536, data: 0.000) G_L1: 17.169 G_L1_ABSOLUTE: 3.224 G_L1_RELATIVE: 13.945 G_Regularizer: 0.000 validation_error: 20.959 +(epoch: 28, iters: 205696, time: 0.548, data: 0.000) G_L1: 13.948 G_L1_ABSOLUTE: 2.376 G_L1_RELATIVE: 11.571 G_Regularizer: 0.000 validation_error: 20.287 +(epoch: 28, iters: 207696, time: 0.542, data: 0.000) G_L1: 13.001 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 10.585 G_Regularizer: 0.000 validation_error: 20.037 +(epoch: 28, iters: 209696, time: 0.541, data: 0.000) G_L1: 15.556 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 13.187 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 28, iters: 211696, time: 0.545, data: 0.000) G_L1: 16.728 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 14.231 G_Regularizer: 0.000 validation_error: 20.442 +(epoch: 28, iters: 213696, time: 0.540, data: 0.000) G_L1: 17.605 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 15.090 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 28, iters: 215696, time: 0.537, data: 0.000) G_L1: 14.490 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 12.152 G_Regularizer: 0.000 validation_error: 20.379 +(epoch: 28, iters: 217696, time: 0.547, data: 0.000) G_L1: 16.151 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 13.426 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 28, iters: 219696, time: 0.553, data: 0.000) G_L1: 14.544 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 11.639 G_Regularizer: 0.000 validation_error: 20.343 +(epoch: 28, iters: 221696, time: 0.548, data: 0.000) G_L1: 15.586 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 12.788 G_Regularizer: 0.000 validation_error: 20.168 +(epoch: 28, iters: 223696, time: 0.550, data: 0.000) G_L1: 13.016 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 10.738 G_Regularizer: 0.000 validation_error: 21.314 +(epoch: 28, iters: 225696, time: 0.547, data: 0.000) G_L1: 16.028 G_L1_ABSOLUTE: 2.266 G_L1_RELATIVE: 13.763 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 28, iters: 227696, time: 0.550, data: 0.000) G_L1: 13.567 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 10.849 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 28, iters: 229696, time: 0.541, data: 0.000) G_L1: 16.846 G_L1_ABSOLUTE: 3.201 G_L1_RELATIVE: 13.645 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 28, iters: 231696, time: 0.554, data: 0.000) G_L1: 14.361 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 11.730 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 28, iters: 233696, time: 0.552, data: 0.000) G_L1: 14.936 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.426 G_Regularizer: 0.000 validation_error: 20.567 +(epoch: 28, iters: 235696, time: 0.540, data: 0.000) G_L1: 17.507 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 14.682 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 28, iters: 237696, time: 0.540, data: 0.000) G_L1: 14.675 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 12.175 G_Regularizer: 0.000 validation_error: 20.389 +(epoch: 28, iters: 239696, time: 0.548, data: 0.000) G_L1: 14.594 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 11.957 G_Regularizer: 0.000 validation_error: 20.475 +(epoch: 28, iters: 241696, time: 0.542, data: 0.000) G_L1: 12.876 G_L1_ABSOLUTE: 2.159 G_L1_RELATIVE: 10.718 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 28, iters: 243696, time: 0.545, data: 0.000) G_L1: 16.207 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 20.389 +(epoch: 28, iters: 245696, time: 0.559, data: 0.000) G_L1: 15.270 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 12.614 G_Regularizer: 0.000 validation_error: 20.234 +(epoch: 28, iters: 247696, time: 0.541, data: 0.000) G_L1: 13.788 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 11.082 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 28, iters: 249696, time: 0.546, data: 0.000) G_L1: 14.391 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.806 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 28, iters: 251696, time: 0.548, data: 0.000) G_L1: 15.266 G_L1_ABSOLUTE: 2.308 G_L1_RELATIVE: 12.958 G_Regularizer: 0.000 validation_error: 20.312 +(epoch: 28, iters: 253696, time: 0.556, data: 0.000) G_L1: 13.227 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 10.861 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 28, iters: 255696, time: 0.546, data: 0.000) G_L1: 13.636 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 11.062 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 28, iters: 257696, time: 0.547, data: 0.001) G_L1: 16.406 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 13.656 G_Regularizer: 0.000 validation_error: 20.421 +(epoch: 28, iters: 259696, time: 0.547, data: 0.000) G_L1: 12.322 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 10.094 G_Regularizer: 0.000 validation_error: 20.397 +(epoch: 28, iters: 261696, time: 0.543, data: 0.000) G_L1: 14.052 G_L1_ABSOLUTE: 2.234 G_L1_RELATIVE: 11.818 G_Regularizer: 0.000 validation_error: 21.172 +(epoch: 28, iters: 263696, time: 0.544, data: 0.000) G_L1: 15.915 G_L1_ABSOLUTE: 2.879 G_L1_RELATIVE: 13.036 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 28, iters: 265696, time: 0.559, data: 0.000) G_L1: 14.742 G_L1_ABSOLUTE: 3.041 G_L1_RELATIVE: 11.701 G_Regularizer: 0.000 validation_error: 21.300 +(epoch: 28, iters: 267696, time: 0.542, data: 0.000) G_L1: 17.566 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 15.112 G_Regularizer: 0.000 validation_error: 20.634 +(epoch: 28, iters: 269696, time: 0.541, data: 0.000) G_L1: 16.793 G_L1_ABSOLUTE: 2.987 G_L1_RELATIVE: 13.806 G_Regularizer: 0.000 validation_error: 20.529 +(epoch: 28, iters: 271696, time: 0.540, data: 0.000) G_L1: 11.008 G_L1_ABSOLUTE: 2.054 G_L1_RELATIVE: 8.955 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 28, iters: 273696, time: 0.550, data: 0.001) G_L1: 14.266 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 11.974 G_Regularizer: 0.000 validation_error: 21.194 +(epoch: 28, iters: 275696, time: 0.547, data: 0.000) G_L1: 11.475 G_L1_ABSOLUTE: 2.141 G_L1_RELATIVE: 9.334 G_Regularizer: 0.000 validation_error: 20.854 +(epoch: 28, iters: 277696, time: 0.539, data: 0.000) G_L1: 14.498 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 12.153 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 28, iters: 279696, time: 0.558, data: 0.000) G_L1: 15.444 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 12.919 G_Regularizer: 0.000 validation_error: 21.060 +(epoch: 28, iters: 281696, time: 0.538, data: 0.001) G_L1: 18.425 G_L1_ABSOLUTE: 2.313 G_L1_RELATIVE: 16.113 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 28, iters: 283696, time: 0.549, data: 0.000) G_L1: 15.748 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 13.264 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 28, iters: 285696, time: 0.541, data: 0.000) G_L1: 13.062 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 10.463 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 28, iters: 287696, time: 0.552, data: 0.001) G_L1: 13.333 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 10.596 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 28, iters: 289696, time: 0.542, data: 0.000) G_L1: 14.396 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.813 G_Regularizer: 0.000 validation_error: 20.292 +(epoch: 28, iters: 291696, time: 0.548, data: 0.000) G_L1: 15.197 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 12.616 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 28, iters: 293696, time: 0.542, data: 0.000) G_L1: 15.412 G_L1_ABSOLUTE: 2.270 G_L1_RELATIVE: 13.142 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 28, iters: 295696, time: 0.546, data: 0.000) G_L1: 17.772 G_L1_ABSOLUTE: 3.231 G_L1_RELATIVE: 14.542 G_Regularizer: 0.000 validation_error: 20.325 +(epoch: 28, iters: 297696, time: 0.545, data: 0.000) G_L1: 15.716 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 13.160 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 28, iters: 299696, time: 0.549, data: 0.000) G_L1: 14.640 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 12.435 G_Regularizer: 0.000 validation_error: 20.207 +(epoch: 28, iters: 301696, time: 0.538, data: 0.000) G_L1: 15.122 G_L1_ABSOLUTE: 3.334 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 20.309 +(epoch: 29, iters: 944, time: 0.538, data: 0.000) G_L1: 15.783 G_L1_ABSOLUTE: 3.073 G_L1_RELATIVE: 12.710 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 29, iters: 2944, time: 0.544, data: 0.000) G_L1: 13.689 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 11.153 G_Regularizer: 0.000 validation_error: 21.223 +(epoch: 29, iters: 4944, time: 0.548, data: 0.000) G_L1: 14.162 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 11.245 G_Regularizer: 0.000 validation_error: 19.976 +(epoch: 29, iters: 6944, time: 0.545, data: 0.001) G_L1: 14.831 G_L1_ABSOLUTE: 3.379 G_L1_RELATIVE: 11.452 G_Regularizer: 0.000 validation_error: 21.496 +(epoch: 29, iters: 8944, time: 0.543, data: 0.000) G_L1: 14.772 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 12.243 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 29, iters: 10944, time: 0.549, data: 0.000) G_L1: 14.717 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 12.146 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 29, iters: 12944, time: 0.541, data: 0.000) G_L1: 12.274 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 9.614 G_Regularizer: 0.000 validation_error: 20.661 +(epoch: 29, iters: 14944, time: 0.551, data: 0.000) G_L1: 13.604 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 11.245 G_Regularizer: 0.000 validation_error: 20.429 +(epoch: 29, iters: 16944, time: 0.541, data: 0.000) G_L1: 12.651 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 10.110 G_Regularizer: 0.000 validation_error: 21.190 +(epoch: 29, iters: 18944, time: 0.546, data: 0.001) G_L1: 14.415 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 11.789 G_Regularizer: 0.000 validation_error: 20.413 +(epoch: 29, iters: 20944, time: 0.537, data: 0.000) G_L1: 14.138 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 11.673 G_Regularizer: 0.000 validation_error: 20.475 +(epoch: 29, iters: 22944, time: 0.545, data: 0.000) G_L1: 17.081 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 14.175 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 29, iters: 24944, time: 0.551, data: 0.000) G_L1: 15.952 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 13.230 G_Regularizer: 0.000 validation_error: 21.090 +(epoch: 29, iters: 26944, time: 0.546, data: 0.000) G_L1: 24.085 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 21.588 G_Regularizer: 0.000 validation_error: 20.798 +(epoch: 29, iters: 28944, time: 0.544, data: 0.000) G_L1: 14.598 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 12.274 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 29, iters: 30944, time: 0.534, data: 0.000) G_L1: 16.065 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 13.253 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 29, iters: 32944, time: 0.559, data: 0.000) G_L1: 13.906 G_L1_ABSOLUTE: 2.862 G_L1_RELATIVE: 11.044 G_Regularizer: 0.000 validation_error: 21.089 +(epoch: 29, iters: 34944, time: 0.541, data: 0.001) G_L1: 12.031 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 9.684 G_Regularizer: 0.000 validation_error: 21.318 +(epoch: 29, iters: 36944, time: 0.539, data: 0.000) G_L1: 12.281 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 9.658 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 29, iters: 38944, time: 0.549, data: 0.000) G_L1: 14.355 G_L1_ABSOLUTE: 2.330 G_L1_RELATIVE: 12.025 G_Regularizer: 0.000 validation_error: 20.459 +(epoch: 29, iters: 40944, time: 0.547, data: 0.000) G_L1: 15.190 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 12.711 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 29, iters: 42944, time: 0.544, data: 0.000) G_L1: 17.793 G_L1_ABSOLUTE: 3.034 G_L1_RELATIVE: 14.759 G_Regularizer: 0.000 validation_error: 20.339 +(epoch: 29, iters: 44944, time: 0.548, data: 0.000) G_L1: 13.982 G_L1_ABSOLUTE: 2.896 G_L1_RELATIVE: 11.086 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 29, iters: 46944, time: 0.551, data: 0.000) G_L1: 16.979 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 14.046 G_Regularizer: 0.000 validation_error: 20.854 +(epoch: 29, iters: 48944, time: 0.547, data: 0.000) G_L1: 14.975 G_L1_ABSOLUTE: 3.475 G_L1_RELATIVE: 11.500 G_Regularizer: 0.000 validation_error: 20.206 +(epoch: 29, iters: 50944, time: 0.544, data: 0.000) G_L1: 14.249 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 12.086 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 29, iters: 52944, time: 0.548, data: 0.000) G_L1: 14.180 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 20.597 +(epoch: 29, iters: 54944, time: 0.547, data: 0.000) G_L1: 15.496 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 13.001 G_Regularizer: 0.000 validation_error: 21.246 +(epoch: 29, iters: 56944, time: 0.543, data: 0.000) G_L1: 14.402 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 11.748 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 29, iters: 58944, time: 0.543, data: 0.000) G_L1: 16.604 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 13.806 G_Regularizer: 0.000 validation_error: 20.258 +(epoch: 29, iters: 60944, time: 0.544, data: 0.000) G_L1: 13.965 G_L1_ABSOLUTE: 2.766 G_L1_RELATIVE: 11.199 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 29, iters: 62944, time: 0.549, data: 0.000) G_L1: 13.309 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 10.674 G_Regularizer: 0.000 validation_error: 20.424 +(epoch: 29, iters: 64944, time: 0.539, data: 0.000) G_L1: 11.475 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 9.221 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 29, iters: 66944, time: 0.538, data: 0.000) G_L1: 18.099 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 15.606 G_Regularizer: 0.000 validation_error: 20.943 +(epoch: 29, iters: 68944, time: 0.541, data: 0.000) G_L1: 16.482 G_L1_ABSOLUTE: 2.690 G_L1_RELATIVE: 13.793 G_Regularizer: 0.000 validation_error: 20.681 +(epoch: 29, iters: 70944, time: 0.546, data: 0.000) G_L1: 13.784 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 11.176 G_Regularizer: 0.000 validation_error: 20.380 +(epoch: 29, iters: 72944, time: 0.541, data: 0.000) G_L1: 13.097 G_L1_ABSOLUTE: 2.109 G_L1_RELATIVE: 10.988 G_Regularizer: 0.000 validation_error: 21.043 +(epoch: 29, iters: 74944, time: 0.559, data: 0.001) G_L1: 17.087 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 14.669 G_Regularizer: 0.000 validation_error: 20.383 +(epoch: 29, iters: 76944, time: 0.539, data: 0.000) G_L1: 14.200 G_L1_ABSOLUTE: 2.532 G_L1_RELATIVE: 11.667 G_Regularizer: 0.000 validation_error: 20.374 +(epoch: 29, iters: 78944, time: 0.541, data: 0.000) G_L1: 14.180 G_L1_ABSOLUTE: 3.126 G_L1_RELATIVE: 11.053 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 29, iters: 80944, time: 0.553, data: 0.000) G_L1: 15.635 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 13.014 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 29, iters: 82944, time: 0.543, data: 0.000) G_L1: 14.037 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 11.388 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 29, iters: 84944, time: 0.539, data: 0.000) G_L1: 11.372 G_L1_ABSOLUTE: 2.244 G_L1_RELATIVE: 9.129 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 29, iters: 86944, time: 0.542, data: 0.000) G_L1: 12.872 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 10.316 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 29, iters: 88944, time: 0.551, data: 0.000) G_L1: 12.699 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 10.110 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 29, iters: 90944, time: 0.548, data: 0.000) G_L1: 13.365 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 10.621 G_Regularizer: 0.000 validation_error: 20.527 +(epoch: 29, iters: 92944, time: 0.545, data: 0.000) G_L1: 14.232 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 11.595 G_Regularizer: 0.000 validation_error: 20.229 +(epoch: 29, iters: 94944, time: 0.545, data: 0.000) G_L1: 20.259 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 17.590 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 29, iters: 96944, time: 0.544, data: 0.000) G_L1: 15.416 G_L1_ABSOLUTE: 2.761 G_L1_RELATIVE: 12.655 G_Regularizer: 0.000 validation_error: 20.038 +(epoch: 29, iters: 98944, time: 0.537, data: 0.000) G_L1: 13.431 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 10.723 G_Regularizer: 0.000 validation_error: 21.228 +(epoch: 29, iters: 100944, time: 0.545, data: 0.000) G_L1: 14.055 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 11.627 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 29, iters: 102944, time: 0.547, data: 0.001) G_L1: 15.922 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 13.396 G_Regularizer: 0.000 validation_error: 20.323 +(epoch: 29, iters: 104944, time: 0.548, data: 0.000) G_L1: 16.599 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 14.093 G_Regularizer: 0.000 validation_error: 20.027 +(epoch: 29, iters: 106944, time: 0.542, data: 0.000) G_L1: 16.517 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 13.763 G_Regularizer: 0.000 validation_error: 20.421 +(epoch: 29, iters: 108944, time: 0.551, data: 0.000) G_L1: 14.025 G_L1_ABSOLUTE: 2.267 G_L1_RELATIVE: 11.758 G_Regularizer: 0.000 validation_error: 21.238 +(epoch: 29, iters: 110944, time: 0.540, data: 0.000) G_L1: 16.654 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 14.114 G_Regularizer: 0.000 validation_error: 19.846 +(epoch: 29, iters: 112944, time: 0.549, data: 0.000) G_L1: 12.593 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 10.239 G_Regularizer: 0.000 validation_error: 20.408 +(epoch: 29, iters: 114944, time: 0.549, data: 0.000) G_L1: 12.488 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 10.135 G_Regularizer: 0.000 validation_error: 21.152 +(epoch: 29, iters: 116944, time: 0.545, data: 0.000) G_L1: 16.239 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 13.625 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 29, iters: 118944, time: 0.547, data: 0.000) G_L1: 13.338 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 10.629 G_Regularizer: 0.000 validation_error: 21.249 +(epoch: 29, iters: 120944, time: 0.548, data: 0.001) G_L1: 13.866 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 11.131 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 29, iters: 122944, time: 0.554, data: 0.000) G_L1: 13.125 G_L1_ABSOLUTE: 2.204 G_L1_RELATIVE: 10.921 G_Regularizer: 0.000 validation_error: 20.622 +(epoch: 29, iters: 124944, time: 0.542, data: 0.000) G_L1: 12.550 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 9.864 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 29, iters: 126944, time: 0.554, data: 0.000) G_L1: 15.320 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 12.745 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 29, iters: 128944, time: 0.544, data: 0.000) G_L1: 11.659 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 8.847 G_Regularizer: 0.000 validation_error: 21.224 +(epoch: 29, iters: 130944, time: 0.545, data: 0.000) G_L1: 13.934 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 11.534 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 29, iters: 132944, time: 0.538, data: 0.000) G_L1: 14.750 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.349 G_Regularizer: 0.000 validation_error: 20.697 +(epoch: 29, iters: 134944, time: 0.539, data: 0.000) G_L1: 14.704 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 11.990 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 29, iters: 136944, time: 0.544, data: 0.000) G_L1: 14.151 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 11.910 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 29, iters: 138944, time: 0.549, data: 0.000) G_L1: 16.096 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 13.467 G_Regularizer: 0.000 validation_error: 20.364 +(epoch: 29, iters: 140944, time: 0.549, data: 0.000) G_L1: 13.122 G_L1_ABSOLUTE: 2.155 G_L1_RELATIVE: 10.967 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 29, iters: 142944, time: 0.543, data: 0.000) G_L1: 13.117 G_L1_ABSOLUTE: 2.667 G_L1_RELATIVE: 10.450 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 29, iters: 144944, time: 0.543, data: 0.000) G_L1: 13.839 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 11.135 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 29, iters: 146944, time: 0.542, data: 0.000) G_L1: 14.990 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 12.522 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 29, iters: 148944, time: 0.552, data: 0.000) G_L1: 15.735 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 12.993 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 29, iters: 150944, time: 0.538, data: 0.000) G_L1: 15.098 G_L1_ABSOLUTE: 2.711 G_L1_RELATIVE: 12.387 G_Regularizer: 0.000 validation_error: 21.192 +(epoch: 29, iters: 152944, time: 0.542, data: 0.000) G_L1: 14.173 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 11.915 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 29, iters: 154944, time: 0.538, data: 0.000) G_L1: 14.265 G_L1_ABSOLUTE: 2.617 G_L1_RELATIVE: 11.649 G_Regularizer: 0.000 validation_error: 20.730 +(epoch: 29, iters: 156944, time: 0.569, data: 0.000) G_L1: 13.822 G_L1_ABSOLUTE: 2.170 G_L1_RELATIVE: 11.651 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 29, iters: 158944, time: 0.539, data: 0.000) G_L1: 12.739 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 10.272 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 29, iters: 160944, time: 0.549, data: 0.001) G_L1: 17.458 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 14.833 G_Regularizer: 0.000 validation_error: 21.370 +(epoch: 29, iters: 162944, time: 0.541, data: 0.000) G_L1: 12.440 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 10.182 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 29, iters: 164944, time: 0.547, data: 0.000) G_L1: 16.039 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 13.356 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 29, iters: 166944, time: 0.553, data: 0.000) G_L1: 14.918 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 12.286 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 29, iters: 168944, time: 0.546, data: 0.000) G_L1: 14.001 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 11.612 G_Regularizer: 0.000 validation_error: 21.360 +(epoch: 29, iters: 170944, time: 0.546, data: 0.000) G_L1: 12.809 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 10.202 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 29, iters: 172944, time: 0.547, data: 0.000) G_L1: 14.471 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 12.136 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 29, iters: 174944, time: 0.549, data: 0.000) G_L1: 15.480 G_L1_ABSOLUTE: 3.167 G_L1_RELATIVE: 12.313 G_Regularizer: 0.000 validation_error: 20.181 +(epoch: 29, iters: 176944, time: 0.552, data: 0.000) G_L1: 13.173 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 10.832 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 29, iters: 178944, time: 0.540, data: 0.000) G_L1: 14.595 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 11.902 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 29, iters: 180944, time: 0.555, data: 0.000) G_L1: 13.217 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 10.757 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 29, iters: 182944, time: 0.546, data: 0.000) G_L1: 13.267 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 10.932 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 29, iters: 184944, time: 0.550, data: 0.000) G_L1: 13.577 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 11.006 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 29, iters: 186944, time: 0.541, data: 0.000) G_L1: 13.685 G_L1_ABSOLUTE: 2.218 G_L1_RELATIVE: 11.467 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 29, iters: 188944, time: 0.540, data: 0.000) G_L1: 16.126 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 13.396 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 29, iters: 190944, time: 0.543, data: 0.000) G_L1: 15.471 G_L1_ABSOLUTE: 2.942 G_L1_RELATIVE: 12.530 G_Regularizer: 0.000 validation_error: 20.708 +(epoch: 29, iters: 192944, time: 0.538, data: 0.000) G_L1: 14.545 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 12.091 G_Regularizer: 0.000 validation_error: 20.161 +(epoch: 29, iters: 194944, time: 0.555, data: 0.000) G_L1: 12.990 G_L1_ABSOLUTE: 2.089 G_L1_RELATIVE: 10.901 G_Regularizer: 0.000 validation_error: 20.503 +(epoch: 29, iters: 196944, time: 0.545, data: 0.000) G_L1: 13.842 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 11.054 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 29, iters: 198944, time: 0.539, data: 0.000) G_L1: 16.051 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 13.544 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 29, iters: 200944, time: 0.545, data: 0.000) G_L1: 14.118 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 11.408 G_Regularizer: 0.000 validation_error: 21.144 +(epoch: 29, iters: 202944, time: 0.546, data: 0.000) G_L1: 13.683 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 10.937 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 29, iters: 204944, time: 0.538, data: 0.001) G_L1: 14.252 G_L1_ABSOLUTE: 2.943 G_L1_RELATIVE: 11.310 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 29, iters: 206944, time: 0.542, data: 0.000) G_L1: 14.931 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 12.657 G_Regularizer: 0.000 validation_error: 20.502 +(epoch: 29, iters: 208944, time: 0.547, data: 0.000) G_L1: 13.522 G_L1_ABSOLUTE: 2.211 G_L1_RELATIVE: 11.310 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 29, iters: 210944, time: 0.535, data: 0.001) G_L1: 27.837 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 25.083 G_Regularizer: 0.000 validation_error: 20.395 +(epoch: 29, iters: 212944, time: 0.554, data: 0.000) G_L1: 13.080 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 10.645 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 29, iters: 214944, time: 0.545, data: 0.000) G_L1: 13.408 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 10.754 G_Regularizer: 0.000 validation_error: 19.890 +(epoch: 29, iters: 216944, time: 0.538, data: 0.001) G_L1: 13.051 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 10.400 G_Regularizer: 0.000 validation_error: 20.712 +(epoch: 29, iters: 218944, time: 0.538, data: 0.000) G_L1: 14.443 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 11.826 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 29, iters: 220944, time: 0.555, data: 0.000) G_L1: 12.196 G_L1_ABSOLUTE: 2.671 G_L1_RELATIVE: 9.524 G_Regularizer: 0.000 validation_error: 21.364 +(epoch: 29, iters: 222944, time: 0.542, data: 0.000) G_L1: 17.012 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 14.460 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 29, iters: 224944, time: 0.547, data: 0.000) G_L1: 13.753 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 11.470 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 29, iters: 226944, time: 0.549, data: 0.000) G_L1: 14.797 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 11.804 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 29, iters: 228944, time: 0.547, data: 0.000) G_L1: 14.709 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 12.090 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 29, iters: 230944, time: 0.543, data: 0.000) G_L1: 15.298 G_L1_ABSOLUTE: 2.323 G_L1_RELATIVE: 12.975 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 29, iters: 232944, time: 0.542, data: 0.000) G_L1: 10.488 G_L1_ABSOLUTE: 2.380 G_L1_RELATIVE: 8.108 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 29, iters: 234944, time: 0.554, data: 0.000) G_L1: 15.791 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 13.203 G_Regularizer: 0.000 validation_error: 20.675 +(epoch: 29, iters: 236944, time: 0.540, data: 0.000) G_L1: 15.349 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 12.709 G_Regularizer: 0.000 validation_error: 20.478 +(epoch: 29, iters: 238944, time: 0.544, data: 0.000) G_L1: 13.044 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 10.778 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 29, iters: 240944, time: 0.539, data: 0.000) G_L1: 15.243 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 12.840 G_Regularizer: 0.000 validation_error: 21.164 +(epoch: 29, iters: 242944, time: 0.553, data: 0.000) G_L1: 14.975 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 12.439 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 29, iters: 244944, time: 0.544, data: 0.000) G_L1: 15.308 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 12.937 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 29, iters: 246944, time: 0.557, data: 0.000) G_L1: 15.104 G_L1_ABSOLUTE: 2.848 G_L1_RELATIVE: 12.256 G_Regularizer: 0.000 validation_error: 20.611 +(epoch: 29, iters: 248944, time: 0.546, data: 0.000) G_L1: 14.972 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 12.634 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 29, iters: 250944, time: 0.546, data: 0.000) G_L1: 11.544 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 8.800 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 29, iters: 252944, time: 0.542, data: 0.000) G_L1: 15.329 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 12.951 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 29, iters: 254944, time: 0.551, data: 0.000) G_L1: 13.078 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 10.671 G_Regularizer: 0.000 validation_error: 21.055 +(epoch: 29, iters: 256944, time: 0.539, data: 0.000) G_L1: 13.231 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 10.577 G_Regularizer: 0.000 validation_error: 20.305 +(epoch: 29, iters: 258944, time: 0.543, data: 0.000) G_L1: 13.720 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 10.972 G_Regularizer: 0.000 validation_error: 21.093 +(epoch: 29, iters: 260944, time: 0.553, data: 0.000) G_L1: 15.331 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 12.692 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 29, iters: 262944, time: 0.541, data: 0.000) G_L1: 14.216 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 11.833 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 29, iters: 264944, time: 0.545, data: 0.000) G_L1: 13.856 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 11.345 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 29, iters: 266944, time: 0.542, data: 0.000) G_L1: 15.001 G_L1_ABSOLUTE: 2.962 G_L1_RELATIVE: 12.039 G_Regularizer: 0.000 validation_error: 20.971 +(epoch: 29, iters: 268944, time: 0.548, data: 0.000) G_L1: 14.325 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 11.625 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 29, iters: 270944, time: 0.534, data: 0.000) G_L1: 18.325 G_L1_ABSOLUTE: 3.427 G_L1_RELATIVE: 14.899 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 29, iters: 272944, time: 0.547, data: 0.000) G_L1: 12.054 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 9.541 G_Regularizer: 0.000 validation_error: 20.529 +(epoch: 29, iters: 274944, time: 0.547, data: 0.000) G_L1: 13.416 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 11.046 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 29, iters: 276944, time: 0.545, data: 0.000) G_L1: 15.214 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 12.594 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 29, iters: 278944, time: 0.549, data: 0.000) G_L1: 14.631 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 12.107 G_Regularizer: 0.000 validation_error: 21.150 +(epoch: 29, iters: 280944, time: 0.548, data: 0.000) G_L1: 14.953 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 12.169 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 29, iters: 282944, time: 0.547, data: 0.000) G_L1: 11.592 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 9.172 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 29, iters: 284944, time: 0.546, data: 0.000) G_L1: 15.213 G_L1_ABSOLUTE: 2.173 G_L1_RELATIVE: 13.039 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 29, iters: 286944, time: 0.550, data: 0.000) G_L1: 13.939 G_L1_ABSOLUTE: 2.279 G_L1_RELATIVE: 11.660 G_Regularizer: 0.000 validation_error: 20.259 +(epoch: 29, iters: 288944, time: 0.547, data: 0.000) G_L1: 15.641 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 12.837 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 29, iters: 290944, time: 0.548, data: 0.000) G_L1: 13.848 G_L1_ABSOLUTE: 3.074 G_L1_RELATIVE: 10.774 G_Regularizer: 0.000 validation_error: 20.316 +(epoch: 29, iters: 292944, time: 0.548, data: 0.000) G_L1: 14.348 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 11.698 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 29, iters: 294944, time: 0.544, data: 0.000) G_L1: 13.913 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 11.502 G_Regularizer: 0.000 validation_error: 20.531 +(epoch: 29, iters: 296944, time: 0.545, data: 0.000) G_L1: 12.746 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 10.491 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 29, iters: 298944, time: 0.547, data: 0.000) G_L1: 17.247 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 14.583 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 29, iters: 300944, time: 0.545, data: 0.000) G_L1: 15.133 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 12.647 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 30, iters: 192, time: 0.544, data: 0.000) G_L1: 15.310 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 12.828 G_Regularizer: 0.000 validation_error: 20.912 +(epoch: 30, iters: 2192, time: 0.544, data: 0.000) G_L1: 14.362 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 11.867 G_Regularizer: 0.000 validation_error: 20.445 +(epoch: 30, iters: 4192, time: 0.550, data: 0.000) G_L1: 11.628 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 9.276 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 30, iters: 6192, time: 0.541, data: 0.000) G_L1: 23.910 G_L1_ABSOLUTE: 2.935 G_L1_RELATIVE: 20.975 G_Regularizer: 0.000 validation_error: 20.758 +(epoch: 30, iters: 8192, time: 0.542, data: 0.000) G_L1: 16.729 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 13.983 G_Regularizer: 0.000 validation_error: 21.262 +(epoch: 30, iters: 10192, time: 0.543, data: 0.000) G_L1: 15.147 G_L1_ABSOLUTE: 3.024 G_L1_RELATIVE: 12.124 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 30, iters: 12192, time: 0.558, data: 0.000) G_L1: 12.274 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 9.853 G_Regularizer: 0.000 validation_error: 20.861 +(epoch: 30, iters: 14192, time: 0.549, data: 0.000) G_L1: 14.458 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 11.975 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 30, iters: 16192, time: 0.548, data: 0.000) G_L1: 14.216 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 11.874 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 30, iters: 18192, time: 0.547, data: 0.000) G_L1: 14.198 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 11.728 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 30, iters: 20192, time: 0.548, data: 0.000) G_L1: 13.245 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 10.549 G_Regularizer: 0.000 validation_error: 21.215 +(epoch: 30, iters: 22192, time: 0.544, data: 0.000) G_L1: 13.640 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 10.963 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 30, iters: 24192, time: 0.542, data: 0.000) G_L1: 15.972 G_L1_ABSOLUTE: 3.341 G_L1_RELATIVE: 12.630 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 30, iters: 26192, time: 0.545, data: 0.000) G_L1: 11.734 G_L1_ABSOLUTE: 1.850 G_L1_RELATIVE: 9.884 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 30, iters: 28192, time: 0.545, data: 0.000) G_L1: 17.166 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 14.743 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 30, iters: 30192, time: 0.546, data: 0.000) G_L1: 12.942 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 10.345 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 30, iters: 32192, time: 0.549, data: 0.000) G_L1: 15.928 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 13.049 G_Regularizer: 0.000 validation_error: 20.463 +(epoch: 30, iters: 34192, time: 0.544, data: 0.000) G_L1: 14.299 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 11.604 G_Regularizer: 0.000 validation_error: 20.027 +(epoch: 30, iters: 36192, time: 0.541, data: 0.000) G_L1: 13.821 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 11.566 G_Regularizer: 0.000 validation_error: 20.491 +(epoch: 30, iters: 38192, time: 0.547, data: 0.000) G_L1: 14.256 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.715 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 30, iters: 40192, time: 0.550, data: 0.000) G_L1: 12.104 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 9.779 G_Regularizer: 0.000 validation_error: 21.229 +(epoch: 30, iters: 42192, time: 0.547, data: 0.000) G_L1: 12.979 G_L1_ABSOLUTE: 2.156 G_L1_RELATIVE: 10.822 G_Regularizer: 0.000 validation_error: 21.224 +(epoch: 30, iters: 44192, time: 0.547, data: 0.000) G_L1: 14.828 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 12.395 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 30, iters: 46192, time: 0.546, data: 0.000) G_L1: 14.724 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 12.147 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 30, iters: 48192, time: 0.539, data: 0.000) G_L1: 15.215 G_L1_ABSOLUTE: 2.243 G_L1_RELATIVE: 12.972 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 30, iters: 50192, time: 0.540, data: 0.001) G_L1: 13.391 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 10.411 G_Regularizer: 0.000 validation_error: 20.684 +(epoch: 30, iters: 52192, time: 0.549, data: 0.000) G_L1: 13.780 G_L1_ABSOLUTE: 2.566 G_L1_RELATIVE: 11.214 G_Regularizer: 0.000 validation_error: 20.659 +(epoch: 30, iters: 54192, time: 0.538, data: 0.000) G_L1: 16.405 G_L1_ABSOLUTE: 3.036 G_L1_RELATIVE: 13.369 G_Regularizer: 0.000 validation_error: 21.271 +(epoch: 30, iters: 56192, time: 0.548, data: 0.000) G_L1: 14.091 G_L1_ABSOLUTE: 3.002 G_L1_RELATIVE: 11.090 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 30, iters: 58192, time: 0.547, data: 0.000) G_L1: 14.600 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 12.152 G_Regularizer: 0.000 validation_error: 20.169 +(epoch: 30, iters: 60192, time: 0.542, data: 0.000) G_L1: 14.532 G_L1_ABSOLUTE: 2.130 G_L1_RELATIVE: 12.402 G_Regularizer: 0.000 validation_error: 21.194 +(epoch: 30, iters: 62192, time: 0.550, data: 0.001) G_L1: 15.906 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 13.025 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 30, iters: 64192, time: 0.549, data: 0.000) G_L1: 14.577 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 11.841 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 30, iters: 66192, time: 0.540, data: 0.001) G_L1: 13.675 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 10.898 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 30, iters: 68192, time: 0.545, data: 0.001) G_L1: 15.177 G_L1_ABSOLUTE: 2.242 G_L1_RELATIVE: 12.935 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 30, iters: 70192, time: 0.564, data: 0.001) G_L1: 18.073 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 15.323 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 30, iters: 72192, time: 0.546, data: 0.000) G_L1: 14.435 G_L1_ABSOLUTE: 3.039 G_L1_RELATIVE: 11.396 G_Regularizer: 0.000 validation_error: 20.659 +(epoch: 30, iters: 74192, time: 0.544, data: 0.000) G_L1: 15.635 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 13.220 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 30, iters: 76192, time: 0.544, data: 0.000) G_L1: 11.577 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 9.091 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 30, iters: 78192, time: 0.541, data: 0.000) G_L1: 17.850 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 15.074 G_Regularizer: 0.000 validation_error: 21.188 +(epoch: 30, iters: 80192, time: 0.545, data: 0.000) G_L1: 29.229 G_L1_ABSOLUTE: 3.360 G_L1_RELATIVE: 25.869 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 30, iters: 82192, time: 0.546, data: 0.000) G_L1: 17.498 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 14.862 G_Regularizer: 0.000 validation_error: 20.145 +(epoch: 30, iters: 84192, time: 0.540, data: 0.000) G_L1: 14.515 G_L1_ABSOLUTE: 3.195 G_L1_RELATIVE: 11.319 G_Regularizer: 0.000 validation_error: 20.327 +(epoch: 30, iters: 86192, time: 0.551, data: 0.000) G_L1: 13.770 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 11.379 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 30, iters: 88192, time: 0.543, data: 0.000) G_L1: 12.906 G_L1_ABSOLUTE: 2.146 G_L1_RELATIVE: 10.760 G_Regularizer: 0.000 validation_error: 20.541 +(epoch: 30, iters: 90192, time: 0.547, data: 0.000) G_L1: 13.634 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 11.046 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 30, iters: 92192, time: 0.550, data: 0.000) G_L1: 16.852 G_L1_ABSOLUTE: 2.975 G_L1_RELATIVE: 13.877 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 30, iters: 94192, time: 0.550, data: 0.000) G_L1: 13.199 G_L1_ABSOLUTE: 2.342 G_L1_RELATIVE: 10.857 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 30, iters: 96192, time: 0.542, data: 0.000) G_L1: 13.920 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 11.600 G_Regularizer: 0.000 validation_error: 20.977 +(epoch: 30, iters: 98192, time: 0.553, data: 0.000) G_L1: 14.092 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.671 G_Regularizer: 0.000 validation_error: 20.600 +(epoch: 30, iters: 100192, time: 0.548, data: 0.000) G_L1: 11.938 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 9.298 G_Regularizer: 0.000 validation_error: 20.681 +(epoch: 30, iters: 102192, time: 0.542, data: 0.000) G_L1: 14.652 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 12.131 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 30, iters: 104192, time: 0.550, data: 0.000) G_L1: 14.278 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 12.019 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 30, iters: 106192, time: 0.541, data: 0.000) G_L1: 13.384 G_L1_ABSOLUTE: 2.311 G_L1_RELATIVE: 11.073 G_Regularizer: 0.000 validation_error: 21.059 +(epoch: 30, iters: 108192, time: 0.538, data: 0.001) G_L1: 13.742 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 10.975 G_Regularizer: 0.000 validation_error: 20.550 +(epoch: 30, iters: 110192, time: 0.546, data: 0.000) G_L1: 14.492 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 11.811 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 30, iters: 112192, time: 0.548, data: 0.000) G_L1: 15.726 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 13.139 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 30, iters: 114192, time: 0.542, data: 0.000) G_L1: 13.824 G_L1_ABSOLUTE: 2.682 G_L1_RELATIVE: 11.142 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 30, iters: 116192, time: 0.547, data: 0.000) G_L1: 14.053 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 11.679 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 30, iters: 118192, time: 0.543, data: 0.000) G_L1: 13.528 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 11.064 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 30, iters: 120192, time: 0.546, data: 0.000) G_L1: 12.066 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 9.682 G_Regularizer: 0.000 validation_error: 21.084 +(epoch: 30, iters: 122192, time: 0.550, data: 0.000) G_L1: 14.032 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 11.557 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 30, iters: 124192, time: 0.544, data: 0.000) G_L1: 12.804 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 10.368 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 30, iters: 126192, time: 0.547, data: 0.000) G_L1: 15.183 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 12.732 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 30, iters: 128192, time: 0.536, data: 0.000) G_L1: 16.607 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 13.776 G_Regularizer: 0.000 validation_error: 20.960 +(epoch: 30, iters: 130192, time: 0.548, data: 0.000) G_L1: 13.462 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 10.820 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 30, iters: 132192, time: 0.542, data: 0.000) G_L1: 12.991 G_L1_ABSOLUTE: 2.154 G_L1_RELATIVE: 10.837 G_Regularizer: 0.000 validation_error: 20.496 +(epoch: 30, iters: 134192, time: 0.545, data: 0.000) G_L1: 12.391 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 10.010 G_Regularizer: 0.000 validation_error: 20.431 +(epoch: 30, iters: 136192, time: 0.548, data: 0.000) G_L1: 15.861 G_L1_ABSOLUTE: 2.715 G_L1_RELATIVE: 13.146 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 30, iters: 138192, time: 0.552, data: 0.000) G_L1: 14.146 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 11.683 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 30, iters: 140192, time: 0.542, data: 0.000) G_L1: 13.303 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 10.601 G_Regularizer: 0.000 validation_error: 21.349 +(epoch: 30, iters: 142192, time: 0.549, data: 0.001) G_L1: 14.247 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 21.032 +(epoch: 30, iters: 144192, time: 0.547, data: 0.000) G_L1: 11.657 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 9.345 G_Regularizer: 0.000 validation_error: 21.345 +(epoch: 30, iters: 146192, time: 0.542, data: 0.000) G_L1: 15.758 G_L1_ABSOLUTE: 2.295 G_L1_RELATIVE: 13.463 G_Regularizer: 0.000 validation_error: 20.412 +(epoch: 30, iters: 148192, time: 0.543, data: 0.000) G_L1: 15.643 G_L1_ABSOLUTE: 2.901 G_L1_RELATIVE: 12.742 G_Regularizer: 0.000 validation_error: 21.291 +(epoch: 30, iters: 150192, time: 0.544, data: 0.000) G_L1: 12.995 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 10.398 G_Regularizer: 0.000 validation_error: 21.241 +(epoch: 30, iters: 152192, time: 0.541, data: 0.000) G_L1: 14.608 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 11.960 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 30, iters: 154192, time: 0.554, data: 0.000) G_L1: 24.356 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 21.693 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 30, iters: 156192, time: 0.544, data: 0.000) G_L1: 15.083 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 12.461 G_Regularizer: 0.000 validation_error: 20.724 +(epoch: 30, iters: 158192, time: 0.544, data: 0.001) G_L1: 14.519 G_L1_ABSOLUTE: 2.156 G_L1_RELATIVE: 12.363 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 30, iters: 160192, time: 0.540, data: 0.001) G_L1: 13.068 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 10.243 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 30, iters: 162192, time: 0.542, data: 0.000) G_L1: 15.199 G_L1_ABSOLUTE: 2.965 G_L1_RELATIVE: 12.234 G_Regularizer: 0.000 validation_error: 20.321 +(epoch: 30, iters: 164192, time: 0.541, data: 0.000) G_L1: 17.267 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 14.698 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 30, iters: 166192, time: 0.541, data: 0.000) G_L1: 13.261 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 10.694 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 30, iters: 168192, time: 0.544, data: 0.000) G_L1: 13.988 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 11.579 G_Regularizer: 0.000 validation_error: 20.620 +(epoch: 30, iters: 170192, time: 0.545, data: 0.000) G_L1: 15.456 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 12.631 G_Regularizer: 0.000 validation_error: 21.144 +(epoch: 30, iters: 172192, time: 0.548, data: 0.000) G_L1: 12.206 G_L1_ABSOLUTE: 2.307 G_L1_RELATIVE: 9.899 G_Regularizer: 0.000 validation_error: 20.612 +(epoch: 30, iters: 174192, time: 0.547, data: 0.000) G_L1: 13.960 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 11.544 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 30, iters: 176192, time: 0.543, data: 0.000) G_L1: 16.451 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 14.057 G_Regularizer: 0.000 validation_error: 21.047 +(epoch: 30, iters: 178192, time: 0.545, data: 0.000) G_L1: 14.124 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 11.390 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 30, iters: 180192, time: 0.549, data: 0.000) G_L1: 15.308 G_L1_ABSOLUTE: 2.930 G_L1_RELATIVE: 12.378 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 30, iters: 182192, time: 0.548, data: 0.001) G_L1: 16.418 G_L1_ABSOLUTE: 2.946 G_L1_RELATIVE: 13.472 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 30, iters: 184192, time: 0.550, data: 0.001) G_L1: 14.245 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 11.725 G_Regularizer: 0.000 validation_error: 20.332 +(epoch: 30, iters: 186192, time: 0.546, data: 0.000) G_L1: 14.132 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 11.747 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 30, iters: 188192, time: 0.552, data: 0.000) G_L1: 17.100 G_L1_ABSOLUTE: 3.022 G_L1_RELATIVE: 14.078 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 30, iters: 190192, time: 0.542, data: 0.000) G_L1: 13.068 G_L1_ABSOLUTE: 2.231 G_L1_RELATIVE: 10.837 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 30, iters: 192192, time: 0.550, data: 0.000) G_L1: 12.071 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 9.769 G_Regularizer: 0.000 validation_error: 20.702 +(epoch: 30, iters: 194192, time: 0.549, data: 0.000) G_L1: 15.276 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 12.600 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 30, iters: 196192, time: 0.546, data: 0.000) G_L1: 15.834 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 13.558 G_Regularizer: 0.000 validation_error: 20.344 +(epoch: 30, iters: 198192, time: 0.548, data: 0.000) G_L1: 14.914 G_L1_ABSOLUTE: 2.189 G_L1_RELATIVE: 12.725 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 30, iters: 200192, time: 0.538, data: 0.000) G_L1: 14.316 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 11.531 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 30, iters: 202192, time: 0.548, data: 0.000) G_L1: 14.288 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 11.872 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 30, iters: 204192, time: 0.541, data: 0.001) G_L1: 16.212 G_L1_ABSOLUTE: 2.215 G_L1_RELATIVE: 13.997 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 30, iters: 206192, time: 0.541, data: 0.000) G_L1: 14.991 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 12.355 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 30, iters: 208192, time: 0.544, data: 0.000) G_L1: 15.000 G_L1_ABSOLUTE: 2.177 G_L1_RELATIVE: 12.823 G_Regularizer: 0.000 validation_error: 20.567 +(epoch: 30, iters: 210192, time: 0.546, data: 0.000) G_L1: 13.916 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.374 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 30, iters: 212192, time: 0.543, data: 0.001) G_L1: 13.664 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 10.978 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 30, iters: 214192, time: 0.549, data: 0.000) G_L1: 14.181 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 11.820 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 30, iters: 216192, time: 0.542, data: 0.000) G_L1: 13.448 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 11.119 G_Regularizer: 0.000 validation_error: 20.528 +(epoch: 30, iters: 218192, time: 0.554, data: 0.000) G_L1: 15.203 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 12.421 G_Regularizer: 0.000 validation_error: 21.239 +(epoch: 30, iters: 220192, time: 0.542, data: 0.000) G_L1: 12.132 G_L1_ABSOLUTE: 2.869 G_L1_RELATIVE: 9.264 G_Regularizer: 0.000 validation_error: 21.286 +(epoch: 30, iters: 222192, time: 0.545, data: 0.000) G_L1: 16.070 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 13.498 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 30, iters: 224192, time: 0.555, data: 0.000) G_L1: 16.334 G_L1_ABSOLUTE: 2.970 G_L1_RELATIVE: 13.365 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 30, iters: 226192, time: 0.539, data: 0.000) G_L1: 12.728 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 10.203 G_Regularizer: 0.000 validation_error: 20.991 +(epoch: 30, iters: 228192, time: 0.558, data: 0.000) G_L1: 14.609 G_L1_ABSOLUTE: 2.271 G_L1_RELATIVE: 12.338 G_Regularizer: 0.000 validation_error: 21.066 +(epoch: 30, iters: 230192, time: 0.551, data: 0.000) G_L1: 13.980 G_L1_ABSOLUTE: 3.189 G_L1_RELATIVE: 10.791 G_Regularizer: 0.000 validation_error: 20.977 +(epoch: 30, iters: 232192, time: 0.553, data: 0.000) G_L1: 13.544 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 11.297 G_Regularizer: 0.000 validation_error: 20.285 +(epoch: 30, iters: 234192, time: 0.539, data: 0.001) G_L1: 13.285 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 11.057 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 30, iters: 236192, time: 0.548, data: 0.000) G_L1: 14.005 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 11.375 G_Regularizer: 0.000 validation_error: 20.623 +(epoch: 30, iters: 238192, time: 0.544, data: 0.000) G_L1: 13.833 G_L1_ABSOLUTE: 2.208 G_L1_RELATIVE: 11.625 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 30, iters: 240192, time: 0.540, data: 0.000) G_L1: 14.473 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 11.837 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 30, iters: 242192, time: 0.545, data: 0.000) G_L1: 13.537 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 10.965 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 30, iters: 244192, time: 0.549, data: 0.000) G_L1: 15.545 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 13.128 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 30, iters: 246192, time: 0.539, data: 0.000) G_L1: 14.572 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 11.960 G_Regularizer: 0.000 validation_error: 20.585 +(epoch: 30, iters: 248192, time: 0.546, data: 0.000) G_L1: 15.845 G_L1_ABSOLUTE: 2.579 G_L1_RELATIVE: 13.267 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 30, iters: 250192, time: 0.544, data: 0.000) G_L1: 15.287 G_L1_ABSOLUTE: 2.680 G_L1_RELATIVE: 12.607 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 30, iters: 252192, time: 0.545, data: 0.000) G_L1: 13.651 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 11.158 G_Regularizer: 0.000 validation_error: 20.363 +(epoch: 30, iters: 254192, time: 0.540, data: 0.000) G_L1: 12.626 G_L1_ABSOLUTE: 2.204 G_L1_RELATIVE: 10.422 G_Regularizer: 0.000 validation_error: 20.203 +(epoch: 30, iters: 256192, time: 0.550, data: 0.000) G_L1: 16.060 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 13.743 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 30, iters: 258192, time: 0.540, data: 0.000) G_L1: 13.435 G_L1_ABSOLUTE: 2.157 G_L1_RELATIVE: 11.278 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 30, iters: 260192, time: 0.539, data: 0.000) G_L1: 14.946 G_L1_ABSOLUTE: 2.712 G_L1_RELATIVE: 12.234 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 30, iters: 262192, time: 0.547, data: 0.001) G_L1: 14.251 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 11.577 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 30, iters: 264192, time: 0.542, data: 0.000) G_L1: 13.953 G_L1_ABSOLUTE: 2.346 G_L1_RELATIVE: 11.607 G_Regularizer: 0.000 validation_error: 20.214 +(epoch: 30, iters: 266192, time: 0.544, data: 0.000) G_L1: 15.665 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 13.172 G_Regularizer: 0.000 validation_error: 20.245 +(epoch: 30, iters: 268192, time: 0.551, data: 0.000) G_L1: 14.015 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.580 G_Regularizer: 0.000 validation_error: 20.156 +(epoch: 30, iters: 270192, time: 0.560, data: 0.000) G_L1: 13.780 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 11.097 G_Regularizer: 0.000 validation_error: 19.974 +(epoch: 30, iters: 272192, time: 0.540, data: 0.000) G_L1: 13.332 G_L1_ABSOLUTE: 2.342 G_L1_RELATIVE: 10.990 G_Regularizer: 0.000 validation_error: 20.414 +(epoch: 30, iters: 274192, time: 0.546, data: 0.000) G_L1: 12.581 G_L1_ABSOLUTE: 2.061 G_L1_RELATIVE: 10.520 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 30, iters: 276192, time: 0.552, data: 0.000) G_L1: 13.195 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 10.561 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 30, iters: 278192, time: 0.540, data: 0.000) G_L1: 13.686 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 11.354 G_Regularizer: 0.000 validation_error: 20.805 +(epoch: 30, iters: 280192, time: 0.540, data: 0.000) G_L1: 12.855 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 10.157 G_Regularizer: 0.000 validation_error: 21.163 +(epoch: 30, iters: 282192, time: 0.552, data: 0.001) G_L1: 14.369 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 11.873 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 30, iters: 284192, time: 0.556, data: 0.001) G_L1: 13.699 G_L1_ABSOLUTE: 2.264 G_L1_RELATIVE: 11.435 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 30, iters: 286192, time: 0.540, data: 0.000) G_L1: 14.483 G_L1_ABSOLUTE: 3.027 G_L1_RELATIVE: 11.456 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 30, iters: 288192, time: 0.546, data: 0.000) G_L1: 16.015 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 13.260 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 30, iters: 290192, time: 0.546, data: 0.000) G_L1: 14.025 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 11.477 G_Regularizer: 0.000 validation_error: 21.372 +(epoch: 30, iters: 292192, time: 0.546, data: 0.000) G_L1: 17.001 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 14.375 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 30, iters: 294192, time: 0.545, data: 0.000) G_L1: 15.014 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 12.258 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 30, iters: 296192, time: 0.553, data: 0.000) G_L1: 13.765 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 11.433 G_Regularizer: 0.000 validation_error: 20.697 +(epoch: 30, iters: 298192, time: 0.543, data: 0.000) G_L1: 13.902 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 11.581 G_Regularizer: 0.000 validation_error: 20.219 +(epoch: 30, iters: 300192, time: 0.548, data: 0.000) G_L1: 13.175 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 10.893 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 30, iters: 302192, time: 0.542, data: 0.000) G_L1: 12.268 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 9.890 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 31, iters: 1440, time: 0.553, data: 0.000) G_L1: 15.434 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 12.865 G_Regularizer: 0.000 validation_error: 21.134 +(epoch: 31, iters: 3440, time: 0.546, data: 0.000) G_L1: 16.577 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 14.065 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 31, iters: 5440, time: 0.551, data: 0.001) G_L1: 19.288 G_L1_ABSOLUTE: 3.330 G_L1_RELATIVE: 15.958 G_Regularizer: 0.000 validation_error: 20.264 +(epoch: 31, iters: 7440, time: 0.545, data: 0.000) G_L1: 13.993 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 11.653 G_Regularizer: 0.000 validation_error: 20.330 +(epoch: 31, iters: 9440, time: 0.542, data: 0.000) G_L1: 12.465 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 10.130 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 31, iters: 11440, time: 0.547, data: 0.000) G_L1: 13.523 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 10.684 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 31, iters: 13440, time: 0.541, data: 0.000) G_L1: 17.036 G_L1_ABSOLUTE: 2.886 G_L1_RELATIVE: 14.150 G_Regularizer: 0.000 validation_error: 20.242 +(epoch: 31, iters: 15440, time: 0.546, data: 0.000) G_L1: 14.027 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 11.768 G_Regularizer: 0.000 validation_error: 20.603 +(epoch: 31, iters: 17440, time: 0.540, data: 0.000) G_L1: 18.270 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 15.633 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 31, iters: 19440, time: 0.548, data: 0.000) G_L1: 22.899 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 20.640 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 31, iters: 21440, time: 0.548, data: 0.000) G_L1: 12.428 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 10.092 G_Regularizer: 0.000 validation_error: 20.429 +(epoch: 31, iters: 23440, time: 0.555, data: 0.000) G_L1: 14.527 G_L1_ABSOLUTE: 2.924 G_L1_RELATIVE: 11.603 G_Regularizer: 0.000 validation_error: 20.473 +(epoch: 31, iters: 25440, time: 0.546, data: 0.000) G_L1: 15.899 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 13.022 G_Regularizer: 0.000 validation_error: 20.548 +(epoch: 31, iters: 27440, time: 0.543, data: 0.000) G_L1: 15.554 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 12.865 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 31, iters: 29440, time: 0.537, data: 0.000) G_L1: 18.762 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 16.025 G_Regularizer: 0.000 validation_error: 20.354 +(epoch: 31, iters: 31440, time: 0.542, data: 0.000) G_L1: 21.914 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 18.978 G_Regularizer: 0.000 validation_error: 20.271 +(epoch: 31, iters: 33440, time: 0.541, data: 0.001) G_L1: 14.733 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.320 G_Regularizer: 0.000 validation_error: 20.574 +(epoch: 31, iters: 35440, time: 0.545, data: 0.000) G_L1: 15.080 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 12.616 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 31, iters: 37440, time: 0.556, data: 0.000) G_L1: 14.983 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 12.424 G_Regularizer: 0.000 validation_error: 20.948 +(epoch: 31, iters: 39440, time: 0.544, data: 0.000) G_L1: 16.416 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 13.838 G_Regularizer: 0.000 validation_error: 20.311 +(epoch: 31, iters: 41440, time: 0.545, data: 0.000) G_L1: 16.003 G_L1_ABSOLUTE: 3.008 G_L1_RELATIVE: 12.995 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 31, iters: 43440, time: 0.535, data: 0.000) G_L1: 14.084 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 11.310 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 31, iters: 45440, time: 0.543, data: 0.000) G_L1: 14.728 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 12.258 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 31, iters: 47440, time: 0.549, data: 0.000) G_L1: 15.850 G_L1_ABSOLUTE: 2.645 G_L1_RELATIVE: 13.205 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 31, iters: 49440, time: 0.542, data: 0.001) G_L1: 13.453 G_L1_ABSOLUTE: 2.059 G_L1_RELATIVE: 11.393 G_Regularizer: 0.000 validation_error: 20.757 +(epoch: 31, iters: 51440, time: 0.545, data: 0.000) G_L1: 14.042 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 11.296 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 31, iters: 53440, time: 0.547, data: 0.000) G_L1: 14.859 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 12.072 G_Regularizer: 0.000 validation_error: 21.458 +(epoch: 31, iters: 55440, time: 0.545, data: 0.000) G_L1: 13.159 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 10.680 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 31, iters: 57440, time: 0.540, data: 0.000) G_L1: 14.141 G_L1_ABSOLUTE: 2.037 G_L1_RELATIVE: 12.104 G_Regularizer: 0.000 validation_error: 20.708 +(epoch: 31, iters: 59440, time: 0.551, data: 0.000) G_L1: 15.777 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 12.921 G_Regularizer: 0.000 validation_error: 21.281 +(epoch: 31, iters: 61440, time: 0.540, data: 0.000) G_L1: 13.814 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 11.528 G_Regularizer: 0.000 validation_error: 20.463 +(epoch: 31, iters: 63440, time: 0.543, data: 0.000) G_L1: 15.590 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 12.891 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 31, iters: 65440, time: 0.546, data: 0.000) G_L1: 14.573 G_L1_ABSOLUTE: 2.827 G_L1_RELATIVE: 11.746 G_Regularizer: 0.000 validation_error: 21.196 +(epoch: 31, iters: 67440, time: 0.548, data: 0.000) G_L1: 15.223 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 12.585 G_Regularizer: 0.000 validation_error: 20.310 +(epoch: 31, iters: 69440, time: 0.540, data: 0.000) G_L1: 24.533 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 21.991 G_Regularizer: 0.000 validation_error: 20.324 +(epoch: 31, iters: 71440, time: 0.549, data: 0.000) G_L1: 13.004 G_L1_ABSOLUTE: 2.143 G_L1_RELATIVE: 10.861 G_Regularizer: 0.000 validation_error: 20.616 +(epoch: 31, iters: 73440, time: 0.541, data: 0.000) G_L1: 14.813 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 12.386 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 31, iters: 75440, time: 0.543, data: 0.001) G_L1: 11.469 G_L1_ABSOLUTE: 2.281 G_L1_RELATIVE: 9.188 G_Regularizer: 0.000 validation_error: 20.450 +(epoch: 31, iters: 77440, time: 0.537, data: 0.000) G_L1: 15.981 G_L1_ABSOLUTE: 3.171 G_L1_RELATIVE: 12.810 G_Regularizer: 0.000 validation_error: 21.421 +(epoch: 31, iters: 79440, time: 0.549, data: 0.000) G_L1: 15.608 G_L1_ABSOLUTE: 2.995 G_L1_RELATIVE: 12.613 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 31, iters: 81440, time: 0.540, data: 0.000) G_L1: 15.966 G_L1_ABSOLUTE: 3.113 G_L1_RELATIVE: 12.853 G_Regularizer: 0.000 validation_error: 20.746 +(epoch: 31, iters: 83440, time: 0.542, data: 0.000) G_L1: 14.199 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 11.840 G_Regularizer: 0.000 validation_error: 20.980 +(epoch: 31, iters: 85440, time: 0.545, data: 0.000) G_L1: 11.848 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 9.371 G_Regularizer: 0.000 validation_error: 20.646 +(epoch: 31, iters: 87440, time: 0.546, data: 0.001) G_L1: 15.115 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 12.540 G_Regularizer: 0.000 validation_error: 20.885 +(epoch: 31, iters: 89440, time: 0.549, data: 0.000) G_L1: 14.289 G_L1_ABSOLUTE: 2.438 G_L1_RELATIVE: 11.851 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 31, iters: 91440, time: 0.553, data: 0.000) G_L1: 13.378 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 10.768 G_Regularizer: 0.000 validation_error: 20.640 +(epoch: 31, iters: 93440, time: 0.546, data: 0.000) G_L1: 13.935 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 11.594 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 31, iters: 95440, time: 0.538, data: 0.001) G_L1: 13.643 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 11.445 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 31, iters: 97440, time: 0.546, data: 0.000) G_L1: 14.737 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 12.296 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 31, iters: 99440, time: 0.558, data: 0.000) G_L1: 13.818 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 11.402 G_Regularizer: 0.000 validation_error: 21.150 +(epoch: 31, iters: 101440, time: 0.543, data: 0.000) G_L1: 14.930 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 12.273 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 31, iters: 103440, time: 0.544, data: 0.000) G_L1: 16.166 G_L1_ABSOLUTE: 2.889 G_L1_RELATIVE: 13.278 G_Regularizer: 0.000 validation_error: 20.248 +(epoch: 31, iters: 105440, time: 0.555, data: 0.000) G_L1: 13.641 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 10.959 G_Regularizer: 0.000 validation_error: 20.787 +(epoch: 31, iters: 107440, time: 0.541, data: 0.000) G_L1: 16.349 G_L1_ABSOLUTE: 2.113 G_L1_RELATIVE: 14.236 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 31, iters: 109440, time: 0.543, data: 0.000) G_L1: 14.956 G_L1_ABSOLUTE: 2.443 G_L1_RELATIVE: 12.513 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 31, iters: 111440, time: 0.547, data: 0.000) G_L1: 14.041 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 11.508 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 31, iters: 113440, time: 0.547, data: 0.000) G_L1: 14.198 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 11.822 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 31, iters: 115440, time: 0.543, data: 0.000) G_L1: 12.266 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 9.900 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 31, iters: 117440, time: 0.546, data: 0.001) G_L1: 14.075 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 11.453 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 31, iters: 119440, time: 0.537, data: 0.000) G_L1: 13.390 G_L1_ABSOLUTE: 2.410 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 21.235 +(epoch: 31, iters: 121440, time: 0.541, data: 0.000) G_L1: 13.559 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 10.944 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 31, iters: 123440, time: 0.556, data: 0.000) G_L1: 16.467 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 13.839 G_Regularizer: 0.000 validation_error: 21.761 +(epoch: 31, iters: 125440, time: 0.542, data: 0.000) G_L1: 16.186 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 13.416 G_Regularizer: 0.000 validation_error: 20.315 +(epoch: 31, iters: 127440, time: 0.542, data: 0.001) G_L1: 15.462 G_L1_ABSOLUTE: 2.292 G_L1_RELATIVE: 13.169 G_Regularizer: 0.000 validation_error: 20.475 +(epoch: 31, iters: 129440, time: 0.548, data: 0.000) G_L1: 15.405 G_L1_ABSOLUTE: 2.987 G_L1_RELATIVE: 12.418 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 31, iters: 131440, time: 0.554, data: 0.000) G_L1: 14.183 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 11.151 G_Regularizer: 0.000 validation_error: 20.661 +(epoch: 31, iters: 133440, time: 0.548, data: 0.000) G_L1: 13.904 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 11.236 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 31, iters: 135440, time: 0.542, data: 0.001) G_L1: 13.359 G_L1_ABSOLUTE: 2.166 G_L1_RELATIVE: 11.193 G_Regularizer: 0.000 validation_error: 21.101 +(epoch: 31, iters: 137440, time: 0.556, data: 0.000) G_L1: 17.182 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 14.427 G_Regularizer: 0.000 validation_error: 20.327 +(epoch: 31, iters: 139440, time: 0.551, data: 0.001) G_L1: 13.251 G_L1_ABSOLUTE: 2.982 G_L1_RELATIVE: 10.268 G_Regularizer: 0.000 validation_error: 21.214 +(epoch: 31, iters: 141440, time: 0.539, data: 0.000) G_L1: 14.122 G_L1_ABSOLUTE: 3.042 G_L1_RELATIVE: 11.080 G_Regularizer: 0.000 validation_error: 20.270 +(epoch: 31, iters: 143440, time: 0.547, data: 0.000) G_L1: 15.564 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 12.707 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 31, iters: 145440, time: 0.541, data: 0.000) G_L1: 13.296 G_L1_ABSOLUTE: 2.644 G_L1_RELATIVE: 10.652 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 31, iters: 147440, time: 0.547, data: 0.001) G_L1: 16.899 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 14.120 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 31, iters: 149440, time: 0.541, data: 0.000) G_L1: 12.817 G_L1_ABSOLUTE: 2.260 G_L1_RELATIVE: 10.557 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 31, iters: 151440, time: 0.550, data: 0.000) G_L1: 13.392 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 10.833 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 31, iters: 153440, time: 0.550, data: 0.000) G_L1: 15.198 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 12.633 G_Regularizer: 0.000 validation_error: 20.963 +(epoch: 31, iters: 155440, time: 0.544, data: 0.000) G_L1: 13.105 G_L1_ABSOLUTE: 2.252 G_L1_RELATIVE: 10.853 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 31, iters: 157440, time: 0.545, data: 0.000) G_L1: 15.714 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 13.244 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 31, iters: 159440, time: 0.549, data: 0.000) G_L1: 13.129 G_L1_ABSOLUTE: 2.201 G_L1_RELATIVE: 10.928 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 31, iters: 161440, time: 0.540, data: 0.000) G_L1: 14.800 G_L1_ABSOLUTE: 2.977 G_L1_RELATIVE: 11.823 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 31, iters: 163440, time: 0.544, data: 0.000) G_L1: 12.636 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 10.010 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 31, iters: 165440, time: 0.545, data: 0.000) G_L1: 13.435 G_L1_ABSOLUTE: 2.490 G_L1_RELATIVE: 10.945 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 31, iters: 167440, time: 0.548, data: 0.000) G_L1: 17.262 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 14.734 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 31, iters: 169440, time: 0.546, data: 0.001) G_L1: 13.210 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 10.827 G_Regularizer: 0.000 validation_error: 20.729 +(epoch: 31, iters: 171440, time: 0.548, data: 0.000) G_L1: 14.519 G_L1_ABSOLUTE: 2.188 G_L1_RELATIVE: 12.331 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 31, iters: 173440, time: 0.548, data: 0.000) G_L1: 15.137 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 12.660 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 31, iters: 175440, time: 0.538, data: 0.000) G_L1: 14.146 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 11.436 G_Regularizer: 0.000 validation_error: 21.497 +(epoch: 31, iters: 177440, time: 0.554, data: 0.000) G_L1: 12.884 G_L1_ABSOLUTE: 2.176 G_L1_RELATIVE: 10.708 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 31, iters: 179440, time: 0.568, data: 0.000) G_L1: 12.651 G_L1_ABSOLUTE: 2.380 G_L1_RELATIVE: 10.271 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 31, iters: 181440, time: 0.542, data: 0.000) G_L1: 18.021 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 15.177 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 31, iters: 183440, time: 0.550, data: 0.000) G_L1: 14.459 G_L1_ABSOLUTE: 3.212 G_L1_RELATIVE: 11.246 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 31, iters: 185440, time: 0.548, data: 0.000) G_L1: 15.211 G_L1_ABSOLUTE: 2.266 G_L1_RELATIVE: 12.945 G_Regularizer: 0.000 validation_error: 21.377 +(epoch: 31, iters: 187440, time: 0.544, data: 0.000) G_L1: 12.814 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 10.338 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 31, iters: 189440, time: 0.540, data: 0.000) G_L1: 13.989 G_L1_ABSOLUTE: 2.297 G_L1_RELATIVE: 11.692 G_Regularizer: 0.000 validation_error: 20.994 +(epoch: 31, iters: 191440, time: 0.547, data: 0.000) G_L1: 15.546 G_L1_ABSOLUTE: 3.495 G_L1_RELATIVE: 12.051 G_Regularizer: 0.000 validation_error: 21.090 +(epoch: 31, iters: 193440, time: 0.539, data: 0.000) G_L1: 14.442 G_L1_ABSOLUTE: 2.172 G_L1_RELATIVE: 12.270 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 31, iters: 195440, time: 0.544, data: 0.000) G_L1: 14.517 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 11.956 G_Regularizer: 0.000 validation_error: 20.997 +(epoch: 31, iters: 197440, time: 0.553, data: 0.000) G_L1: 10.857 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 8.565 G_Regularizer: 0.000 validation_error: 20.224 +(epoch: 31, iters: 199440, time: 0.546, data: 0.000) G_L1: 14.373 G_L1_ABSOLUTE: 2.287 G_L1_RELATIVE: 12.086 G_Regularizer: 0.000 validation_error: 20.794 +(epoch: 31, iters: 201440, time: 0.551, data: 0.000) G_L1: 14.260 G_L1_ABSOLUTE: 2.904 G_L1_RELATIVE: 11.356 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 31, iters: 203440, time: 0.537, data: 0.001) G_L1: 14.945 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 12.529 G_Regularizer: 0.000 validation_error: 21.655 +(epoch: 31, iters: 205440, time: 0.546, data: 0.001) G_L1: 15.469 G_L1_ABSOLUTE: 2.319 G_L1_RELATIVE: 13.150 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 31, iters: 207440, time: 0.535, data: 0.000) G_L1: 15.561 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 13.326 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 31, iters: 209440, time: 0.550, data: 0.001) G_L1: 15.538 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 12.906 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 31, iters: 211440, time: 0.547, data: 0.000) G_L1: 13.534 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 10.966 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 31, iters: 213440, time: 0.547, data: 0.000) G_L1: 15.084 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 12.920 G_Regularizer: 0.000 validation_error: 20.763 +(epoch: 31, iters: 215440, time: 0.542, data: 0.000) G_L1: 14.579 G_L1_ABSOLUTE: 2.845 G_L1_RELATIVE: 11.734 G_Regularizer: 0.000 validation_error: 20.028 +(epoch: 31, iters: 217440, time: 0.547, data: 0.000) G_L1: 14.830 G_L1_ABSOLUTE: 2.272 G_L1_RELATIVE: 12.558 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 31, iters: 219440, time: 0.539, data: 0.000) G_L1: 13.070 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 10.542 G_Regularizer: 0.000 validation_error: 20.143 +(epoch: 31, iters: 221440, time: 0.541, data: 0.000) G_L1: 15.771 G_L1_ABSOLUTE: 3.115 G_L1_RELATIVE: 12.656 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 31, iters: 223440, time: 0.543, data: 0.000) G_L1: 13.083 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 10.659 G_Regularizer: 0.000 validation_error: 20.312 +(epoch: 31, iters: 225440, time: 0.546, data: 0.000) G_L1: 12.079 G_L1_ABSOLUTE: 2.498 G_L1_RELATIVE: 9.581 G_Regularizer: 0.000 validation_error: 20.226 +(epoch: 31, iters: 227440, time: 0.537, data: 0.001) G_L1: 11.687 G_L1_ABSOLUTE: 2.061 G_L1_RELATIVE: 9.627 G_Regularizer: 0.000 validation_error: 21.388 +(epoch: 31, iters: 229440, time: 0.552, data: 0.000) G_L1: 12.275 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 9.735 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 31, iters: 231440, time: 0.548, data: 0.001) G_L1: 15.704 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 12.982 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 31, iters: 233440, time: 0.543, data: 0.000) G_L1: 11.945 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 9.514 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 31, iters: 235440, time: 0.537, data: 0.000) G_L1: 17.086 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 14.517 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 31, iters: 237440, time: 0.550, data: 0.000) G_L1: 14.511 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 11.886 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 31, iters: 239440, time: 0.544, data: 0.000) G_L1: 12.547 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 9.909 G_Regularizer: 0.000 validation_error: 20.339 +(epoch: 31, iters: 241440, time: 0.546, data: 0.000) G_L1: 17.247 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 15.031 G_Regularizer: 0.000 validation_error: 20.079 +(epoch: 31, iters: 243440, time: 0.553, data: 0.000) G_L1: 15.518 G_L1_ABSOLUTE: 2.994 G_L1_RELATIVE: 12.524 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 31, iters: 245440, time: 0.549, data: 0.000) G_L1: 13.652 G_L1_ABSOLUTE: 2.328 G_L1_RELATIVE: 11.324 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 31, iters: 247440, time: 0.548, data: 0.000) G_L1: 23.853 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 21.129 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 31, iters: 249440, time: 0.538, data: 0.000) G_L1: 14.342 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 11.545 G_Regularizer: 0.000 validation_error: 20.405 +(epoch: 31, iters: 251440, time: 0.555, data: 0.001) G_L1: 13.841 G_L1_ABSOLUTE: 2.279 G_L1_RELATIVE: 11.562 G_Regularizer: 0.000 validation_error: 21.196 +(epoch: 31, iters: 253440, time: 0.542, data: 0.000) G_L1: 13.652 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 11.082 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 31, iters: 255440, time: 0.542, data: 0.000) G_L1: 25.598 G_L1_ABSOLUTE: 2.970 G_L1_RELATIVE: 22.629 G_Regularizer: 0.000 validation_error: 21.666 +(epoch: 31, iters: 257440, time: 0.559, data: 0.000) G_L1: 12.904 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 10.173 G_Regularizer: 0.000 validation_error: 20.641 +(epoch: 31, iters: 259440, time: 0.552, data: 0.000) G_L1: 13.977 G_L1_ABSOLUTE: 2.188 G_L1_RELATIVE: 11.790 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 31, iters: 261440, time: 0.546, data: 0.000) G_L1: 13.721 G_L1_ABSOLUTE: 2.772 G_L1_RELATIVE: 10.949 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 31, iters: 263440, time: 0.537, data: 0.000) G_L1: 13.652 G_L1_ABSOLUTE: 2.488 G_L1_RELATIVE: 11.165 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 31, iters: 265440, time: 0.547, data: 0.000) G_L1: 11.899 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 9.533 G_Regularizer: 0.000 validation_error: 20.620 +(epoch: 31, iters: 267440, time: 0.544, data: 0.000) G_L1: 14.270 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 11.671 G_Regularizer: 0.000 validation_error: 21.065 +(epoch: 31, iters: 269440, time: 0.550, data: 0.000) G_L1: 14.495 G_L1_ABSOLUTE: 2.292 G_L1_RELATIVE: 12.202 G_Regularizer: 0.000 validation_error: 21.185 +(epoch: 31, iters: 271440, time: 0.545, data: 0.000) G_L1: 15.408 G_L1_ABSOLUTE: 2.682 G_L1_RELATIVE: 12.726 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 31, iters: 273440, time: 0.554, data: 0.000) G_L1: 12.439 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 10.096 G_Regularizer: 0.000 validation_error: 20.175 +(epoch: 31, iters: 275440, time: 0.542, data: 0.000) G_L1: 12.176 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 9.894 G_Regularizer: 0.000 validation_error: 20.119 +(epoch: 31, iters: 277440, time: 0.541, data: 0.000) G_L1: 15.300 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 12.311 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 31, iters: 279440, time: 0.541, data: 0.000) G_L1: 15.100 G_L1_ABSOLUTE: 2.878 G_L1_RELATIVE: 12.222 G_Regularizer: 0.000 validation_error: 20.320 +(epoch: 31, iters: 281440, time: 0.549, data: 0.000) G_L1: 15.950 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 13.148 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 31, iters: 283440, time: 0.551, data: 0.000) G_L1: 14.677 G_L1_ABSOLUTE: 2.186 G_L1_RELATIVE: 12.491 G_Regularizer: 0.000 validation_error: 20.302 +(epoch: 31, iters: 285440, time: 0.550, data: 0.000) G_L1: 13.018 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 10.425 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 31, iters: 287440, time: 0.547, data: 0.000) G_L1: 14.301 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 11.484 G_Regularizer: 0.000 validation_error: 20.856 +(epoch: 31, iters: 289440, time: 0.545, data: 0.000) G_L1: 15.117 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 12.652 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 31, iters: 291440, time: 0.548, data: 0.000) G_L1: 16.981 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 14.273 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 31, iters: 293440, time: 0.545, data: 0.001) G_L1: 17.295 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 14.719 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 31, iters: 295440, time: 0.542, data: 0.001) G_L1: 13.315 G_L1_ABSOLUTE: 2.137 G_L1_RELATIVE: 11.178 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 31, iters: 297440, time: 0.552, data: 0.000) G_L1: 13.286 G_L1_ABSOLUTE: 2.069 G_L1_RELATIVE: 11.217 G_Regularizer: 0.000 validation_error: 20.517 +(epoch: 31, iters: 299440, time: 0.559, data: 0.000) G_L1: 12.168 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 9.582 G_Regularizer: 0.000 validation_error: 21.457 +(epoch: 31, iters: 301440, time: 0.545, data: 0.000) G_L1: 14.315 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 11.983 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 32, iters: 688, time: 0.559, data: 0.000) G_L1: 13.400 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 11.146 G_Regularizer: 0.000 validation_error: 20.670 +(epoch: 32, iters: 2688, time: 0.548, data: 0.000) G_L1: 14.849 G_L1_ABSOLUTE: 2.871 G_L1_RELATIVE: 11.978 G_Regularizer: 0.000 validation_error: 20.638 +(epoch: 32, iters: 4688, time: 0.545, data: 0.001) G_L1: 13.882 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 11.461 G_Regularizer: 0.000 validation_error: 20.134 +(epoch: 32, iters: 6688, time: 0.542, data: 0.000) G_L1: 14.857 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 12.407 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 32, iters: 8688, time: 0.550, data: 0.000) G_L1: 14.870 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 12.007 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 32, iters: 10688, time: 0.550, data: 0.000) G_L1: 12.436 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 10.175 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 32, iters: 12688, time: 0.540, data: 0.000) G_L1: 15.911 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 13.248 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 32, iters: 14688, time: 0.542, data: 0.000) G_L1: 14.270 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 12.051 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 32, iters: 16688, time: 0.553, data: 0.000) G_L1: 13.647 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 10.791 G_Regularizer: 0.000 validation_error: 20.535 +(epoch: 32, iters: 18688, time: 0.552, data: 0.000) G_L1: 14.976 G_L1_ABSOLUTE: 2.911 G_L1_RELATIVE: 12.065 G_Regularizer: 0.000 validation_error: 20.320 +(epoch: 32, iters: 20688, time: 0.551, data: 0.000) G_L1: 15.050 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 12.376 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 32, iters: 22688, time: 0.545, data: 0.000) G_L1: 15.020 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 12.312 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 32, iters: 24688, time: 0.545, data: 0.000) G_L1: 12.586 G_L1_ABSOLUTE: 2.531 G_L1_RELATIVE: 10.054 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 32, iters: 26688, time: 0.540, data: 0.000) G_L1: 14.275 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 11.492 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 32, iters: 28688, time: 0.539, data: 0.000) G_L1: 14.102 G_L1_ABSOLUTE: 2.299 G_L1_RELATIVE: 11.804 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 32, iters: 30688, time: 0.550, data: 0.000) G_L1: 13.777 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 11.350 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 32, iters: 32688, time: 0.537, data: 0.000) G_L1: 16.939 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 14.006 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 32, iters: 34688, time: 0.549, data: 0.000) G_L1: 15.492 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 12.988 G_Regularizer: 0.000 validation_error: 21.394 +(epoch: 32, iters: 36688, time: 0.537, data: 0.000) G_L1: 14.049 G_L1_ABSOLUTE: 3.010 G_L1_RELATIVE: 11.039 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 32, iters: 38688, time: 0.546, data: 0.000) G_L1: 16.644 G_L1_ABSOLUTE: 3.173 G_L1_RELATIVE: 13.472 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 32, iters: 40688, time: 0.545, data: 0.000) G_L1: 18.708 G_L1_ABSOLUTE: 3.011 G_L1_RELATIVE: 15.697 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 32, iters: 42688, time: 0.547, data: 0.000) G_L1: 12.636 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 10.139 G_Regularizer: 0.000 validation_error: 20.666 +(epoch: 32, iters: 44688, time: 0.544, data: 0.000) G_L1: 14.435 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 11.721 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 32, iters: 46688, time: 0.545, data: 0.000) G_L1: 14.676 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.275 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 32, iters: 48688, time: 0.549, data: 0.000) G_L1: 16.271 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 13.377 G_Regularizer: 0.000 validation_error: 20.746 +(epoch: 32, iters: 50688, time: 0.545, data: 0.000) G_L1: 18.568 G_L1_ABSOLUTE: 3.174 G_L1_RELATIVE: 15.394 G_Regularizer: 0.000 validation_error: 20.523 +(epoch: 32, iters: 52688, time: 0.547, data: 0.000) G_L1: 13.803 G_L1_ABSOLUTE: 2.751 G_L1_RELATIVE: 11.052 G_Regularizer: 0.000 validation_error: 21.169 +(epoch: 32, iters: 54688, time: 0.545, data: 0.000) G_L1: 14.388 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 11.710 G_Regularizer: 0.000 validation_error: 20.542 +(epoch: 32, iters: 56688, time: 0.550, data: 0.000) G_L1: 13.372 G_L1_ABSOLUTE: 2.801 G_L1_RELATIVE: 10.572 G_Regularizer: 0.000 validation_error: 21.048 +(epoch: 32, iters: 58688, time: 0.547, data: 0.000) G_L1: 15.227 G_L1_ABSOLUTE: 2.189 G_L1_RELATIVE: 13.038 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 32, iters: 60688, time: 0.550, data: 0.000) G_L1: 12.307 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 9.860 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 32, iters: 62688, time: 0.547, data: 0.000) G_L1: 14.540 G_L1_ABSOLUTE: 2.787 G_L1_RELATIVE: 11.753 G_Regularizer: 0.000 validation_error: 21.120 +(epoch: 32, iters: 64688, time: 0.544, data: 0.000) G_L1: 13.082 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 10.552 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 32, iters: 66688, time: 0.547, data: 0.000) G_L1: 16.768 G_L1_ABSOLUTE: 2.991 G_L1_RELATIVE: 13.777 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 32, iters: 68688, time: 0.548, data: 0.000) G_L1: 16.751 G_L1_ABSOLUTE: 3.600 G_L1_RELATIVE: 13.151 G_Regularizer: 0.000 validation_error: 20.077 +(epoch: 32, iters: 70688, time: 0.539, data: 0.000) G_L1: 13.511 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 10.962 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 32, iters: 72688, time: 0.549, data: 0.000) G_L1: 11.979 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 9.567 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 32, iters: 74688, time: 0.542, data: 0.000) G_L1: 12.890 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 10.342 G_Regularizer: 0.000 validation_error: 20.369 +(epoch: 32, iters: 76688, time: 0.539, data: 0.000) G_L1: 12.758 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 10.482 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 32, iters: 78688, time: 0.542, data: 0.000) G_L1: 12.683 G_L1_ABSOLUTE: 1.921 G_L1_RELATIVE: 10.761 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 32, iters: 80688, time: 0.551, data: 0.000) G_L1: 13.188 G_L1_ABSOLUTE: 2.390 G_L1_RELATIVE: 10.799 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 32, iters: 82688, time: 0.543, data: 0.000) G_L1: 14.750 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 12.204 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 32, iters: 84688, time: 0.538, data: 0.000) G_L1: 14.953 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 12.362 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 32, iters: 86688, time: 0.551, data: 0.000) G_L1: 12.258 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 10.042 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 32, iters: 88688, time: 0.543, data: 0.001) G_L1: 13.914 G_L1_ABSOLUTE: 2.144 G_L1_RELATIVE: 11.770 G_Regularizer: 0.000 validation_error: 21.074 +(epoch: 32, iters: 90688, time: 0.548, data: 0.000) G_L1: 13.129 G_L1_ABSOLUTE: 1.969 G_L1_RELATIVE: 11.159 G_Regularizer: 0.000 validation_error: 21.156 +(epoch: 32, iters: 92688, time: 0.546, data: 0.000) G_L1: 13.964 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 11.440 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 32, iters: 94688, time: 0.551, data: 0.000) G_L1: 16.338 G_L1_ABSOLUTE: 3.060 G_L1_RELATIVE: 13.278 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 32, iters: 96688, time: 0.541, data: 0.001) G_L1: 15.931 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 12.972 G_Regularizer: 0.000 validation_error: 21.036 +(epoch: 32, iters: 98688, time: 0.545, data: 0.001) G_L1: 14.770 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 12.248 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 32, iters: 100688, time: 0.558, data: 0.000) G_L1: 11.763 G_L1_ABSOLUTE: 2.242 G_L1_RELATIVE: 9.521 G_Regularizer: 0.000 validation_error: 20.115 +(epoch: 32, iters: 102688, time: 0.550, data: 0.000) G_L1: 12.021 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 9.547 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 32, iters: 104688, time: 0.538, data: 0.000) G_L1: 13.468 G_L1_ABSOLUTE: 2.095 G_L1_RELATIVE: 11.373 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 32, iters: 106688, time: 0.541, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 11.751 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 32, iters: 108688, time: 0.552, data: 0.000) G_L1: 15.795 G_L1_ABSOLUTE: 2.866 G_L1_RELATIVE: 12.928 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 32, iters: 110688, time: 0.546, data: 0.000) G_L1: 13.843 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 11.392 G_Regularizer: 0.000 validation_error: 20.463 +(epoch: 32, iters: 112688, time: 0.538, data: 0.000) G_L1: 13.016 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 10.572 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 32, iters: 114688, time: 0.550, data: 0.000) G_L1: 14.111 G_L1_ABSOLUTE: 2.098 G_L1_RELATIVE: 12.013 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 32, iters: 116688, time: 0.555, data: 0.000) G_L1: 15.896 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 13.189 G_Regularizer: 0.000 validation_error: 20.374 +(epoch: 32, iters: 118688, time: 0.542, data: 0.000) G_L1: 15.725 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 12.893 G_Regularizer: 0.000 validation_error: 20.384 +(epoch: 32, iters: 120688, time: 0.544, data: 0.000) G_L1: 16.500 G_L1_ABSOLUTE: 3.475 G_L1_RELATIVE: 13.025 G_Regularizer: 0.000 validation_error: 20.210 +(epoch: 32, iters: 122688, time: 0.547, data: 0.000) G_L1: 16.913 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 14.309 G_Regularizer: 0.000 validation_error: 21.131 +(epoch: 32, iters: 124688, time: 0.542, data: 0.000) G_L1: 13.215 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 10.559 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 32, iters: 126688, time: 0.540, data: 0.000) G_L1: 14.580 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 11.830 G_Regularizer: 0.000 validation_error: 20.310 +(epoch: 32, iters: 128688, time: 0.546, data: 0.000) G_L1: 16.992 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 13.989 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 32, iters: 130688, time: 0.545, data: 0.000) G_L1: 13.562 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 11.323 G_Regularizer: 0.000 validation_error: 20.318 +(epoch: 32, iters: 132688, time: 0.544, data: 0.000) G_L1: 15.200 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 12.962 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 32, iters: 134688, time: 0.552, data: 0.001) G_L1: 15.393 G_L1_ABSOLUTE: 3.627 G_L1_RELATIVE: 11.766 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 32, iters: 136688, time: 0.543, data: 0.000) G_L1: 13.503 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 11.201 G_Regularizer: 0.000 validation_error: 20.340 +(epoch: 32, iters: 138688, time: 0.549, data: 0.000) G_L1: 13.169 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 10.325 G_Regularizer: 0.000 validation_error: 21.112 +(epoch: 32, iters: 140688, time: 0.544, data: 0.000) G_L1: 14.620 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 12.014 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 32, iters: 142688, time: 0.549, data: 0.000) G_L1: 15.324 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 12.854 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 32, iters: 144688, time: 0.545, data: 0.000) G_L1: 15.071 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 12.387 G_Regularizer: 0.000 validation_error: 20.593 +(epoch: 32, iters: 146688, time: 0.542, data: 0.001) G_L1: 15.299 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 12.839 G_Regularizer: 0.000 validation_error: 20.597 +(epoch: 32, iters: 148688, time: 0.548, data: 0.000) G_L1: 14.960 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 12.605 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 32, iters: 150688, time: 0.545, data: 0.000) G_L1: 13.038 G_L1_ABSOLUTE: 2.268 G_L1_RELATIVE: 10.770 G_Regularizer: 0.000 validation_error: 20.662 +(epoch: 32, iters: 152688, time: 0.544, data: 0.000) G_L1: 15.606 G_L1_ABSOLUTE: 3.091 G_L1_RELATIVE: 12.514 G_Regularizer: 0.000 validation_error: 20.346 +(epoch: 32, iters: 154688, time: 0.548, data: 0.000) G_L1: 13.577 G_L1_ABSOLUTE: 2.753 G_L1_RELATIVE: 10.824 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 32, iters: 156688, time: 0.546, data: 0.000) G_L1: 14.621 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 12.119 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 32, iters: 158688, time: 0.558, data: 0.000) G_L1: 16.494 G_L1_ABSOLUTE: 2.203 G_L1_RELATIVE: 14.292 G_Regularizer: 0.000 validation_error: 20.234 +(epoch: 32, iters: 160688, time: 0.544, data: 0.000) G_L1: 15.431 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 12.577 G_Regularizer: 0.000 validation_error: 20.337 +(epoch: 32, iters: 162688, time: 0.546, data: 0.000) G_L1: 13.904 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 11.603 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 32, iters: 164688, time: 0.541, data: 0.001) G_L1: 13.935 G_L1_ABSOLUTE: 2.104 G_L1_RELATIVE: 11.831 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 32, iters: 166688, time: 0.544, data: 0.000) G_L1: 17.123 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 14.080 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 32, iters: 168688, time: 0.546, data: 0.000) G_L1: 12.744 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 10.330 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 32, iters: 170688, time: 0.541, data: 0.000) G_L1: 15.111 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 12.320 G_Regularizer: 0.000 validation_error: 20.465 +(epoch: 32, iters: 172688, time: 0.550, data: 0.000) G_L1: 15.355 G_L1_ABSOLUTE: 2.719 G_L1_RELATIVE: 12.636 G_Regularizer: 0.000 validation_error: 20.280 +(epoch: 32, iters: 174688, time: 0.546, data: 0.000) G_L1: 12.118 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 9.786 G_Regularizer: 0.000 validation_error: 20.684 +(epoch: 32, iters: 176688, time: 0.551, data: 0.000) G_L1: 15.378 G_L1_ABSOLUTE: 2.964 G_L1_RELATIVE: 12.415 G_Regularizer: 0.000 validation_error: 21.288 +(epoch: 32, iters: 178688, time: 0.543, data: 0.000) G_L1: 15.049 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 12.808 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 32, iters: 180688, time: 0.544, data: 0.000) G_L1: 16.692 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 14.216 G_Regularizer: 0.000 validation_error: 20.440 +(epoch: 32, iters: 182688, time: 0.549, data: 0.000) G_L1: 13.916 G_L1_ABSOLUTE: 3.170 G_L1_RELATIVE: 10.746 G_Regularizer: 0.000 validation_error: 20.662 +(epoch: 32, iters: 184688, time: 0.549, data: 0.000) G_L1: 13.190 G_L1_ABSOLUTE: 2.387 G_L1_RELATIVE: 10.803 G_Regularizer: 0.000 validation_error: 20.149 +(epoch: 32, iters: 186688, time: 0.551, data: 0.000) G_L1: 12.744 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 10.208 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 32, iters: 188688, time: 0.552, data: 0.001) G_L1: 15.539 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 12.641 G_Regularizer: 0.000 validation_error: 20.392 +(epoch: 32, iters: 190688, time: 0.545, data: 0.000) G_L1: 15.924 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 13.265 G_Regularizer: 0.000 validation_error: 20.080 +(epoch: 32, iters: 192688, time: 0.545, data: 0.000) G_L1: 16.039 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 13.008 G_Regularizer: 0.000 validation_error: 21.080 +(epoch: 32, iters: 194688, time: 0.540, data: 0.000) G_L1: 13.209 G_L1_ABSOLUTE: 2.309 G_L1_RELATIVE: 10.900 G_Regularizer: 0.000 validation_error: 19.919 +(epoch: 32, iters: 196688, time: 0.552, data: 0.000) G_L1: 14.111 G_L1_ABSOLUTE: 2.037 G_L1_RELATIVE: 12.074 G_Regularizer: 0.000 validation_error: 20.351 +(epoch: 32, iters: 198688, time: 0.540, data: 0.001) G_L1: 13.787 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 11.485 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 32, iters: 200688, time: 0.540, data: 0.000) G_L1: 16.914 G_L1_ABSOLUTE: 3.042 G_L1_RELATIVE: 13.872 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 32, iters: 202688, time: 0.557, data: 0.000) G_L1: 13.284 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 10.806 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 32, iters: 204688, time: 0.544, data: 0.000) G_L1: 14.461 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 11.968 G_Regularizer: 0.000 validation_error: 20.536 +(epoch: 32, iters: 206688, time: 0.547, data: 0.000) G_L1: 12.262 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 9.734 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 32, iters: 208688, time: 0.544, data: 0.000) G_L1: 13.697 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 11.365 G_Regularizer: 0.000 validation_error: 21.244 +(epoch: 32, iters: 210688, time: 0.544, data: 0.000) G_L1: 15.230 G_L1_ABSOLUTE: 3.028 G_L1_RELATIVE: 12.202 G_Regularizer: 0.000 validation_error: 20.524 +(epoch: 32, iters: 212688, time: 0.544, data: 0.000) G_L1: 16.565 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 14.086 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 32, iters: 214688, time: 0.544, data: 0.001) G_L1: 13.447 G_L1_ABSOLUTE: 2.104 G_L1_RELATIVE: 11.343 G_Regularizer: 0.000 validation_error: 20.896 +(epoch: 32, iters: 216688, time: 0.544, data: 0.000) G_L1: 16.394 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 13.818 G_Regularizer: 0.000 validation_error: 20.612 +(epoch: 32, iters: 218688, time: 0.542, data: 0.000) G_L1: 15.679 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 13.318 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 32, iters: 220688, time: 0.545, data: 0.000) G_L1: 13.574 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 11.320 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 32, iters: 222688, time: 0.550, data: 0.000) G_L1: 15.023 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 12.538 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 32, iters: 224688, time: 0.544, data: 0.000) G_L1: 14.424 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 11.878 G_Regularizer: 0.000 validation_error: 20.659 +(epoch: 32, iters: 226688, time: 0.544, data: 0.000) G_L1: 12.131 G_L1_ABSOLUTE: 2.021 G_L1_RELATIVE: 10.110 G_Regularizer: 0.000 validation_error: 21.043 +(epoch: 32, iters: 228688, time: 0.540, data: 0.000) G_L1: 13.827 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 11.470 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 32, iters: 230688, time: 0.549, data: 0.001) G_L1: 15.114 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 12.560 G_Regularizer: 0.000 validation_error: 21.387 +(epoch: 32, iters: 232688, time: 0.555, data: 0.000) G_L1: 13.571 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 11.275 G_Regularizer: 0.000 validation_error: 21.091 +(epoch: 32, iters: 234688, time: 0.546, data: 0.000) G_L1: 12.361 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 9.842 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 32, iters: 236688, time: 0.550, data: 0.001) G_L1: 14.613 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 12.245 G_Regularizer: 0.000 validation_error: 21.153 +(epoch: 32, iters: 238688, time: 0.542, data: 0.000) G_L1: 17.433 G_L1_ABSOLUTE: 2.306 G_L1_RELATIVE: 15.126 G_Regularizer: 0.000 validation_error: 21.147 +(epoch: 32, iters: 240688, time: 0.554, data: 0.000) G_L1: 13.187 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 10.462 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 32, iters: 242688, time: 0.541, data: 0.000) G_L1: 12.326 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 9.773 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 32, iters: 244688, time: 0.544, data: 0.000) G_L1: 13.364 G_L1_ABSOLUTE: 2.566 G_L1_RELATIVE: 10.798 G_Regularizer: 0.000 validation_error: 21.338 +(epoch: 32, iters: 246688, time: 0.547, data: 0.000) G_L1: 11.285 G_L1_ABSOLUTE: 2.042 G_L1_RELATIVE: 9.243 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 32, iters: 248688, time: 0.555, data: 0.000) G_L1: 17.801 G_L1_ABSOLUTE: 2.968 G_L1_RELATIVE: 14.833 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 32, iters: 250688, time: 0.539, data: 0.000) G_L1: 17.351 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 14.931 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 32, iters: 252688, time: 0.547, data: 0.000) G_L1: 13.652 G_L1_ABSOLUTE: 2.135 G_L1_RELATIVE: 11.517 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 32, iters: 254688, time: 0.547, data: 0.000) G_L1: 15.311 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 12.358 G_Regularizer: 0.000 validation_error: 21.229 +(epoch: 32, iters: 256688, time: 0.541, data: 0.000) G_L1: 15.517 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 13.018 G_Regularizer: 0.000 validation_error: 20.504 +(epoch: 32, iters: 258688, time: 0.539, data: 0.000) G_L1: 16.318 G_L1_ABSOLUTE: 2.645 G_L1_RELATIVE: 13.673 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 32, iters: 260688, time: 0.542, data: 0.000) G_L1: 12.333 G_L1_ABSOLUTE: 2.387 G_L1_RELATIVE: 9.946 G_Regularizer: 0.000 validation_error: 20.305 +(epoch: 32, iters: 262688, time: 0.536, data: 0.000) G_L1: 11.829 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 9.452 G_Regularizer: 0.000 validation_error: 20.405 +(epoch: 32, iters: 264688, time: 0.543, data: 0.000) G_L1: 12.642 G_L1_ABSOLUTE: 2.167 G_L1_RELATIVE: 10.474 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 32, iters: 266688, time: 0.548, data: 0.000) G_L1: 13.674 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 11.433 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 32, iters: 268688, time: 0.553, data: 0.000) G_L1: 15.524 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 13.021 G_Regularizer: 0.000 validation_error: 20.496 +(epoch: 32, iters: 270688, time: 0.540, data: 0.000) G_L1: 13.685 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 10.971 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 32, iters: 272688, time: 0.549, data: 0.000) G_L1: 16.412 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 13.408 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 32, iters: 274688, time: 0.552, data: 0.000) G_L1: 17.846 G_L1_ABSOLUTE: 3.076 G_L1_RELATIVE: 14.770 G_Regularizer: 0.000 validation_error: 20.763 +(epoch: 32, iters: 276688, time: 0.542, data: 0.000) G_L1: 15.899 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 13.158 G_Regularizer: 0.000 validation_error: 21.252 +(epoch: 32, iters: 278688, time: 0.542, data: 0.000) G_L1: 12.479 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 10.106 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 32, iters: 280688, time: 0.549, data: 0.000) G_L1: 14.473 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 12.042 G_Regularizer: 0.000 validation_error: 20.848 +(epoch: 32, iters: 282688, time: 0.549, data: 0.000) G_L1: 16.495 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 13.840 G_Regularizer: 0.000 validation_error: 20.730 +(epoch: 32, iters: 284688, time: 0.545, data: 0.000) G_L1: 15.645 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 13.025 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 32, iters: 286688, time: 0.551, data: 0.000) G_L1: 14.766 G_L1_ABSOLUTE: 2.251 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 20.485 +(epoch: 32, iters: 288688, time: 0.553, data: 0.000) G_L1: 14.682 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 12.090 G_Regularizer: 0.000 validation_error: 21.310 +(epoch: 32, iters: 290688, time: 0.544, data: 0.000) G_L1: 14.097 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 11.595 G_Regularizer: 0.000 validation_error: 21.100 +(epoch: 32, iters: 292688, time: 0.550, data: 0.000) G_L1: 15.115 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 12.309 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 32, iters: 294688, time: 0.548, data: 0.000) G_L1: 14.533 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 11.776 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 32, iters: 296688, time: 0.545, data: 0.000) G_L1: 12.034 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 9.635 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 32, iters: 298688, time: 0.550, data: 0.000) G_L1: 14.842 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 12.395 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 32, iters: 300688, time: 0.554, data: 0.000) G_L1: 18.641 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 15.867 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 32, iters: 302688, time: 0.542, data: 0.001) G_L1: 13.871 G_L1_ABSOLUTE: 2.088 G_L1_RELATIVE: 11.783 G_Regularizer: 0.000 validation_error: 20.503 +(epoch: 33, iters: 1936, time: 0.546, data: 0.000) G_L1: 13.409 G_L1_ABSOLUTE: 2.122 G_L1_RELATIVE: 11.287 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 33, iters: 3936, time: 0.547, data: 0.000) G_L1: 15.309 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 12.379 G_Regularizer: 0.000 validation_error: 20.475 +(epoch: 33, iters: 5936, time: 0.551, data: 0.000) G_L1: 13.525 G_L1_ABSOLUTE: 2.148 G_L1_RELATIVE: 11.377 G_Regularizer: 0.000 validation_error: 20.178 +(epoch: 33, iters: 7936, time: 0.541, data: 0.000) G_L1: 14.559 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 12.263 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 33, iters: 9936, time: 0.545, data: 0.000) G_L1: 15.649 G_L1_ABSOLUTE: 2.904 G_L1_RELATIVE: 12.745 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 33, iters: 11936, time: 0.545, data: 0.000) G_L1: 12.280 G_L1_ABSOLUTE: 2.120 G_L1_RELATIVE: 10.159 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 33, iters: 13936, time: 0.543, data: 0.000) G_L1: 14.211 G_L1_ABSOLUTE: 2.932 G_L1_RELATIVE: 11.279 G_Regularizer: 0.000 validation_error: 20.371 +(epoch: 33, iters: 15936, time: 0.552, data: 0.000) G_L1: 14.902 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 12.396 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 33, iters: 17936, time: 0.544, data: 0.000) G_L1: 15.253 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 13.046 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 33, iters: 19936, time: 0.552, data: 0.000) G_L1: 16.224 G_L1_ABSOLUTE: 3.257 G_L1_RELATIVE: 12.967 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 33, iters: 21936, time: 0.547, data: 0.000) G_L1: 13.255 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 10.371 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 33, iters: 23936, time: 0.553, data: 0.000) G_L1: 15.811 G_L1_ABSOLUTE: 2.209 G_L1_RELATIVE: 13.601 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 33, iters: 25936, time: 0.544, data: 0.000) G_L1: 17.588 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 15.189 G_Regularizer: 0.000 validation_error: 21.231 +(epoch: 33, iters: 27936, time: 0.553, data: 0.000) G_L1: 12.939 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 10.218 G_Regularizer: 0.000 validation_error: 20.705 +(epoch: 33, iters: 29936, time: 0.556, data: 0.000) G_L1: 24.473 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 21.968 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 33, iters: 31936, time: 0.554, data: 0.000) G_L1: 19.335 G_L1_ABSOLUTE: 3.288 G_L1_RELATIVE: 16.048 G_Regularizer: 0.000 validation_error: 21.261 +(epoch: 33, iters: 33936, time: 0.541, data: 0.000) G_L1: 15.943 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 13.474 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 33, iters: 35936, time: 0.545, data: 0.000) G_L1: 17.470 G_L1_ABSOLUTE: 2.192 G_L1_RELATIVE: 15.279 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 33, iters: 37936, time: 0.546, data: 0.000) G_L1: 14.971 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 12.385 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 33, iters: 39936, time: 0.544, data: 0.000) G_L1: 17.201 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 14.784 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 33, iters: 41936, time: 0.550, data: 0.000) G_L1: 12.462 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 10.092 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 33, iters: 43936, time: 0.543, data: 0.000) G_L1: 15.364 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 12.773 G_Regularizer: 0.000 validation_error: 21.223 +(epoch: 33, iters: 45936, time: 0.547, data: 0.000) G_L1: 16.958 G_L1_ABSOLUTE: 3.378 G_L1_RELATIVE: 13.580 G_Regularizer: 0.000 validation_error: 21.549 +(epoch: 33, iters: 47936, time: 0.546, data: 0.000) G_L1: 14.058 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 11.461 G_Regularizer: 0.000 validation_error: 20.539 +(epoch: 33, iters: 49936, time: 0.546, data: 0.000) G_L1: 18.969 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 16.350 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 33, iters: 51936, time: 0.546, data: 0.000) G_L1: 16.446 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 13.664 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 33, iters: 53936, time: 0.546, data: 0.000) G_L1: 16.173 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 13.754 G_Regularizer: 0.000 validation_error: 20.461 +(epoch: 33, iters: 55936, time: 0.545, data: 0.000) G_L1: 12.979 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 10.446 G_Regularizer: 0.000 validation_error: 21.299 +(epoch: 33, iters: 57936, time: 0.552, data: 0.000) G_L1: 15.505 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 12.788 G_Regularizer: 0.000 validation_error: 21.066 +(epoch: 33, iters: 59936, time: 0.544, data: 0.000) G_L1: 11.402 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 9.069 G_Regularizer: 0.000 validation_error: 21.067 +(epoch: 33, iters: 61936, time: 0.537, data: 0.000) G_L1: 15.358 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 12.768 G_Regularizer: 0.000 validation_error: 20.327 +(epoch: 33, iters: 63936, time: 0.553, data: 0.000) G_L1: 13.175 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 10.607 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 33, iters: 65936, time: 0.550, data: 0.000) G_L1: 13.051 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 10.616 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 33, iters: 67936, time: 0.543, data: 0.000) G_L1: 14.593 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 12.354 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 33, iters: 69936, time: 0.546, data: 0.000) G_L1: 14.735 G_L1_ABSOLUTE: 2.039 G_L1_RELATIVE: 12.696 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 33, iters: 71936, time: 0.551, data: 0.000) G_L1: 13.781 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 11.255 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 33, iters: 73936, time: 0.543, data: 0.000) G_L1: 13.518 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 11.227 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 33, iters: 75936, time: 0.554, data: 0.000) G_L1: 14.428 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 12.098 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 33, iters: 77936, time: 0.541, data: 0.000) G_L1: 14.429 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 12.190 G_Regularizer: 0.000 validation_error: 20.545 +(epoch: 33, iters: 79936, time: 0.549, data: 0.000) G_L1: 15.632 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 12.752 G_Regularizer: 0.000 validation_error: 21.076 +(epoch: 33, iters: 81936, time: 0.550, data: 0.000) G_L1: 15.446 G_L1_ABSOLUTE: 3.144 G_L1_RELATIVE: 12.302 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 33, iters: 83936, time: 0.546, data: 0.000) G_L1: 15.278 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 12.863 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 33, iters: 85936, time: 0.539, data: 0.000) G_L1: 13.841 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 11.576 G_Regularizer: 0.000 validation_error: 21.012 +(epoch: 33, iters: 87936, time: 0.545, data: 0.000) G_L1: 17.099 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 14.318 G_Regularizer: 0.000 validation_error: 21.172 +(epoch: 33, iters: 89936, time: 0.560, data: 0.000) G_L1: 13.707 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 11.282 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 33, iters: 91936, time: 0.558, data: 0.000) G_L1: 14.734 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 12.319 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 33, iters: 93936, time: 0.547, data: 0.000) G_L1: 13.566 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 11.041 G_Regularizer: 0.000 validation_error: 21.175 +(epoch: 33, iters: 95936, time: 0.547, data: 0.000) G_L1: 13.512 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 10.877 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 33, iters: 97936, time: 0.547, data: 0.000) G_L1: 12.153 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 9.691 G_Regularizer: 0.000 validation_error: 20.497 +(epoch: 33, iters: 99936, time: 0.539, data: 0.000) G_L1: 15.071 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 12.420 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 33, iters: 101936, time: 0.542, data: 0.000) G_L1: 15.074 G_L1_ABSOLUTE: 3.011 G_L1_RELATIVE: 12.063 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 33, iters: 103936, time: 0.549, data: 0.000) G_L1: 14.412 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 11.784 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 33, iters: 105936, time: 0.547, data: 0.000) G_L1: 14.999 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 12.431 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 33, iters: 107936, time: 0.544, data: 0.000) G_L1: 15.014 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 12.635 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 33, iters: 109936, time: 0.550, data: 0.000) G_L1: 14.181 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 11.564 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 33, iters: 111936, time: 0.546, data: 0.001) G_L1: 13.038 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 10.629 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 33, iters: 113936, time: 0.542, data: 0.000) G_L1: 14.570 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 11.736 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 33, iters: 115936, time: 0.546, data: 0.000) G_L1: 13.842 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 11.502 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 33, iters: 117936, time: 0.556, data: 0.000) G_L1: 13.724 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 11.228 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 33, iters: 119936, time: 0.547, data: 0.000) G_L1: 14.401 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 11.630 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 33, iters: 121936, time: 0.539, data: 0.000) G_L1: 15.212 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 12.558 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 33, iters: 123936, time: 0.545, data: 0.000) G_L1: 13.879 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 11.156 G_Regularizer: 0.000 validation_error: 20.503 +(epoch: 33, iters: 125936, time: 0.550, data: 0.000) G_L1: 14.953 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 12.212 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 33, iters: 127936, time: 0.545, data: 0.000) G_L1: 16.142 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 13.757 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 33, iters: 129936, time: 0.547, data: 0.000) G_L1: 14.769 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 12.055 G_Regularizer: 0.000 validation_error: 20.349 +(epoch: 33, iters: 131936, time: 0.549, data: 0.000) G_L1: 16.240 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 13.417 G_Regularizer: 0.000 validation_error: 21.006 +(epoch: 33, iters: 133936, time: 0.542, data: 0.000) G_L1: 12.864 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 10.605 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 33, iters: 135936, time: 0.548, data: 0.000) G_L1: 13.053 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 10.768 G_Regularizer: 0.000 validation_error: 20.183 +(epoch: 33, iters: 137936, time: 0.546, data: 0.001) G_L1: 11.928 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 9.730 G_Regularizer: 0.000 validation_error: 20.681 +(epoch: 33, iters: 139936, time: 0.541, data: 0.000) G_L1: 15.216 G_L1_ABSOLUTE: 2.871 G_L1_RELATIVE: 12.345 G_Regularizer: 0.000 validation_error: 20.998 +(epoch: 33, iters: 141936, time: 0.537, data: 0.000) G_L1: 13.808 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 11.156 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 33, iters: 143936, time: 0.545, data: 0.000) G_L1: 13.653 G_L1_ABSOLUTE: 2.846 G_L1_RELATIVE: 10.808 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 33, iters: 145936, time: 0.542, data: 0.000) G_L1: 14.972 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 12.506 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 33, iters: 147936, time: 0.553, data: 0.000) G_L1: 10.777 G_L1_ABSOLUTE: 1.930 G_L1_RELATIVE: 8.848 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 33, iters: 149936, time: 0.544, data: 0.000) G_L1: 14.503 G_L1_ABSOLUTE: 2.182 G_L1_RELATIVE: 12.321 G_Regularizer: 0.000 validation_error: 20.391 +(epoch: 33, iters: 151936, time: 0.544, data: 0.000) G_L1: 16.129 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 13.569 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 33, iters: 153936, time: 0.547, data: 0.001) G_L1: 16.473 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 14.024 G_Regularizer: 0.000 validation_error: 20.519 +(epoch: 33, iters: 155936, time: 0.541, data: 0.000) G_L1: 15.486 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 13.075 G_Regularizer: 0.000 validation_error: 21.350 +(epoch: 33, iters: 157936, time: 0.550, data: 0.000) G_L1: 12.938 G_L1_ABSOLUTE: 2.128 G_L1_RELATIVE: 10.810 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 33, iters: 159936, time: 0.547, data: 0.000) G_L1: 13.856 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 11.454 G_Regularizer: 0.000 validation_error: 20.527 +(epoch: 33, iters: 161936, time: 0.563, data: 0.000) G_L1: 13.801 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 11.110 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 33, iters: 163936, time: 0.545, data: 0.000) G_L1: 15.292 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 12.978 G_Regularizer: 0.000 validation_error: 20.854 +(epoch: 33, iters: 165936, time: 0.546, data: 0.000) G_L1: 12.393 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 10.073 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 33, iters: 167936, time: 0.543, data: 0.000) G_L1: 14.948 G_L1_ABSOLUTE: 2.944 G_L1_RELATIVE: 12.004 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 33, iters: 169936, time: 0.547, data: 0.000) G_L1: 16.082 G_L1_ABSOLUTE: 3.403 G_L1_RELATIVE: 12.679 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 33, iters: 171936, time: 0.544, data: 0.000) G_L1: 13.112 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 10.744 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 33, iters: 173936, time: 0.546, data: 0.000) G_L1: 14.250 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 11.883 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 33, iters: 175936, time: 0.547, data: 0.000) G_L1: 15.091 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 12.553 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 33, iters: 177936, time: 0.554, data: 0.000) G_L1: 15.981 G_L1_ABSOLUTE: 2.838 G_L1_RELATIVE: 13.143 G_Regularizer: 0.000 validation_error: 21.251 +(epoch: 33, iters: 179936, time: 0.541, data: 0.000) G_L1: 15.346 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.812 G_Regularizer: 0.000 validation_error: 21.156 +(epoch: 33, iters: 181936, time: 0.546, data: 0.000) G_L1: 15.805 G_L1_ABSOLUTE: 2.912 G_L1_RELATIVE: 12.893 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 33, iters: 183936, time: 0.546, data: 0.000) G_L1: 13.610 G_L1_ABSOLUTE: 2.287 G_L1_RELATIVE: 11.323 G_Regularizer: 0.000 validation_error: 21.227 +(epoch: 33, iters: 185936, time: 0.544, data: 0.000) G_L1: 17.934 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 15.308 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 33, iters: 187936, time: 0.549, data: 0.000) G_L1: 14.328 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 12.051 G_Regularizer: 0.000 validation_error: 20.671 +(epoch: 33, iters: 189936, time: 0.548, data: 0.001) G_L1: 15.319 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 12.641 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 33, iters: 191936, time: 0.547, data: 0.000) G_L1: 12.611 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 10.040 G_Regularizer: 0.000 validation_error: 20.551 +(epoch: 33, iters: 193936, time: 0.549, data: 0.000) G_L1: 9.905 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 7.558 G_Regularizer: 0.000 validation_error: 21.098 +(epoch: 33, iters: 195936, time: 0.544, data: 0.000) G_L1: 15.420 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 12.902 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 33, iters: 197936, time: 0.545, data: 0.000) G_L1: 14.987 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 12.256 G_Regularizer: 0.000 validation_error: 20.461 +(epoch: 33, iters: 199936, time: 0.540, data: 0.000) G_L1: 15.013 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 12.530 G_Regularizer: 0.000 validation_error: 21.404 +(epoch: 33, iters: 201936, time: 0.550, data: 0.000) G_L1: 12.909 G_L1_ABSOLUTE: 2.763 G_L1_RELATIVE: 10.145 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 33, iters: 203936, time: 0.546, data: 0.000) G_L1: 14.679 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 12.430 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 33, iters: 205936, time: 0.551, data: 0.000) G_L1: 14.088 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 11.596 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 33, iters: 207936, time: 0.549, data: 0.000) G_L1: 14.133 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 11.816 G_Regularizer: 0.000 validation_error: 20.555 +(epoch: 33, iters: 209936, time: 0.550, data: 0.000) G_L1: 14.676 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 12.336 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 33, iters: 211936, time: 0.546, data: 0.000) G_L1: 17.943 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 15.544 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 33, iters: 213936, time: 0.538, data: 0.000) G_L1: 14.725 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 12.186 G_Regularizer: 0.000 validation_error: 21.211 +(epoch: 33, iters: 215936, time: 0.545, data: 0.000) G_L1: 13.449 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 11.210 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 33, iters: 217936, time: 0.540, data: 0.000) G_L1: 14.112 G_L1_ABSOLUTE: 2.272 G_L1_RELATIVE: 11.839 G_Regularizer: 0.000 validation_error: 21.074 +(epoch: 33, iters: 219936, time: 0.547, data: 0.000) G_L1: 12.961 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 10.568 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 33, iters: 221936, time: 0.553, data: 0.000) G_L1: 12.151 G_L1_ABSOLUTE: 1.813 G_L1_RELATIVE: 10.338 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 33, iters: 223936, time: 0.548, data: 0.000) G_L1: 14.776 G_L1_ABSOLUTE: 2.899 G_L1_RELATIVE: 11.877 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 33, iters: 225936, time: 0.540, data: 0.000) G_L1: 13.661 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 11.212 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 33, iters: 227936, time: 0.549, data: 0.000) G_L1: 17.337 G_L1_ABSOLUTE: 2.937 G_L1_RELATIVE: 14.400 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 33, iters: 229936, time: 0.549, data: 0.001) G_L1: 14.612 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 11.980 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 33, iters: 231936, time: 0.543, data: 0.000) G_L1: 13.629 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 11.222 G_Regularizer: 0.000 validation_error: 20.381 +(epoch: 33, iters: 233936, time: 0.543, data: 0.000) G_L1: 16.069 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 13.264 G_Regularizer: 0.000 validation_error: 20.574 +(epoch: 33, iters: 235936, time: 0.543, data: 0.000) G_L1: 14.173 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 11.743 G_Regularizer: 0.000 validation_error: 19.987 +(epoch: 33, iters: 237936, time: 0.547, data: 0.000) G_L1: 14.772 G_L1_ABSOLUTE: 2.175 G_L1_RELATIVE: 12.597 G_Regularizer: 0.000 validation_error: 20.548 +(epoch: 33, iters: 239936, time: 0.543, data: 0.001) G_L1: 12.691 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 10.450 G_Regularizer: 0.000 validation_error: 20.628 +(epoch: 33, iters: 241936, time: 0.549, data: 0.000) G_L1: 16.423 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 13.824 G_Regularizer: 0.000 validation_error: 20.484 +(epoch: 33, iters: 243936, time: 0.542, data: 0.000) G_L1: 15.230 G_L1_ABSOLUTE: 2.197 G_L1_RELATIVE: 13.033 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 33, iters: 245936, time: 0.548, data: 0.000) G_L1: 12.223 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 9.969 G_Regularizer: 0.000 validation_error: 21.022 +(epoch: 33, iters: 247936, time: 0.546, data: 0.000) G_L1: 13.160 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 10.822 G_Regularizer: 0.000 validation_error: 19.880 +(epoch: 33, iters: 249936, time: 0.543, data: 0.000) G_L1: 15.686 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 13.258 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 33, iters: 251936, time: 0.548, data: 0.000) G_L1: 14.987 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 12.239 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 33, iters: 253936, time: 0.545, data: 0.000) G_L1: 16.745 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 14.249 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 33, iters: 255936, time: 0.546, data: 0.000) G_L1: 12.479 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 10.094 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 33, iters: 257936, time: 0.550, data: 0.000) G_L1: 14.069 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 11.442 G_Regularizer: 0.000 validation_error: 20.298 +(epoch: 33, iters: 259936, time: 0.540, data: 0.001) G_L1: 14.659 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 12.217 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 33, iters: 261936, time: 0.559, data: 0.001) G_L1: 14.342 G_L1_ABSOLUTE: 3.172 G_L1_RELATIVE: 11.170 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 33, iters: 263936, time: 0.546, data: 0.000) G_L1: 13.462 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 10.730 G_Regularizer: 0.000 validation_error: 21.515 +(epoch: 33, iters: 265936, time: 0.544, data: 0.000) G_L1: 14.168 G_L1_ABSOLUTE: 2.290 G_L1_RELATIVE: 11.877 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 33, iters: 267936, time: 0.553, data: 0.000) G_L1: 15.194 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 12.389 G_Regularizer: 0.000 validation_error: 20.408 +(epoch: 33, iters: 269936, time: 0.551, data: 0.000) G_L1: 15.737 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 13.095 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 33, iters: 271936, time: 0.543, data: 0.000) G_L1: 13.815 G_L1_ABSOLUTE: 2.195 G_L1_RELATIVE: 11.620 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 33, iters: 273936, time: 0.538, data: 0.000) G_L1: 16.555 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 14.033 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 33, iters: 275936, time: 0.548, data: 0.000) G_L1: 15.634 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 13.060 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 33, iters: 277936, time: 0.542, data: 0.000) G_L1: 13.981 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 11.490 G_Regularizer: 0.000 validation_error: 21.032 +(epoch: 33, iters: 279936, time: 0.540, data: 0.000) G_L1: 16.058 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 13.372 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 33, iters: 281936, time: 0.552, data: 0.000) G_L1: 14.263 G_L1_ABSOLUTE: 2.555 G_L1_RELATIVE: 11.707 G_Regularizer: 0.000 validation_error: 20.355 +(epoch: 33, iters: 283936, time: 0.545, data: 0.000) G_L1: 17.923 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 15.443 G_Regularizer: 0.000 validation_error: 20.141 +(epoch: 33, iters: 285936, time: 0.551, data: 0.001) G_L1: 11.887 G_L1_ABSOLUTE: 1.917 G_L1_RELATIVE: 9.970 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 33, iters: 287936, time: 0.550, data: 0.000) G_L1: 12.751 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 10.431 G_Regularizer: 0.000 validation_error: 20.350 +(epoch: 33, iters: 289936, time: 0.553, data: 0.000) G_L1: 13.549 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 11.041 G_Regularizer: 0.000 validation_error: 20.995 +(epoch: 33, iters: 291936, time: 0.542, data: 0.000) G_L1: 14.457 G_L1_ABSOLUTE: 2.215 G_L1_RELATIVE: 12.242 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 33, iters: 293936, time: 0.544, data: 0.000) G_L1: 16.487 G_L1_ABSOLUTE: 2.092 G_L1_RELATIVE: 14.394 G_Regularizer: 0.000 validation_error: 20.445 +(epoch: 33, iters: 295936, time: 0.548, data: 0.001) G_L1: 13.280 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 10.646 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 33, iters: 297936, time: 0.546, data: 0.000) G_L1: 15.739 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 13.083 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 33, iters: 299936, time: 0.546, data: 0.000) G_L1: 14.956 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.422 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 33, iters: 301936, time: 0.546, data: 0.000) G_L1: 12.875 G_L1_ABSOLUTE: 2.111 G_L1_RELATIVE: 10.764 G_Regularizer: 0.000 validation_error: 21.353 +(epoch: 34, iters: 1184, time: 0.548, data: 0.000) G_L1: 15.059 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.549 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 34, iters: 3184, time: 0.548, data: 0.000) G_L1: 15.093 G_L1_ABSOLUTE: 2.269 G_L1_RELATIVE: 12.825 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 34, iters: 5184, time: 0.546, data: 0.000) G_L1: 15.700 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 12.969 G_Regularizer: 0.000 validation_error: 20.665 +(epoch: 34, iters: 7184, time: 0.544, data: 0.000) G_L1: 14.791 G_L1_ABSOLUTE: 2.761 G_L1_RELATIVE: 12.030 G_Regularizer: 0.000 validation_error: 20.731 +(epoch: 34, iters: 9184, time: 0.548, data: 0.000) G_L1: 17.464 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 14.739 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 34, iters: 11184, time: 0.545, data: 0.000) G_L1: 16.011 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 13.554 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 34, iters: 13184, time: 0.549, data: 0.000) G_L1: 17.306 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 14.675 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 34, iters: 15184, time: 0.547, data: 0.000) G_L1: 16.258 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 13.662 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 34, iters: 17184, time: 0.542, data: 0.000) G_L1: 15.093 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 12.576 G_Regularizer: 0.000 validation_error: 21.060 +(epoch: 34, iters: 19184, time: 0.547, data: 0.000) G_L1: 12.898 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 10.519 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 34, iters: 21184, time: 0.547, data: 0.000) G_L1: 14.618 G_L1_ABSOLUTE: 2.439 G_L1_RELATIVE: 12.179 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 34, iters: 23184, time: 0.551, data: 0.000) G_L1: 14.822 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 12.464 G_Regularizer: 0.000 validation_error: 21.133 +(epoch: 34, iters: 25184, time: 0.544, data: 0.000) G_L1: 15.176 G_L1_ABSOLUTE: 2.995 G_L1_RELATIVE: 12.181 G_Regularizer: 0.000 validation_error: 20.549 +(epoch: 34, iters: 27184, time: 0.547, data: 0.000) G_L1: 16.341 G_L1_ABSOLUTE: 2.851 G_L1_RELATIVE: 13.490 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 34, iters: 29184, time: 0.552, data: 0.000) G_L1: 12.301 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 9.981 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 34, iters: 31184, time: 0.547, data: 0.000) G_L1: 14.375 G_L1_ABSOLUTE: 2.729 G_L1_RELATIVE: 11.646 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 34, iters: 33184, time: 0.547, data: 0.000) G_L1: 15.799 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 13.346 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 34, iters: 35184, time: 0.546, data: 0.000) G_L1: 14.437 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 11.806 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 34, iters: 37184, time: 0.536, data: 0.000) G_L1: 11.891 G_L1_ABSOLUTE: 2.225 G_L1_RELATIVE: 9.666 G_Regularizer: 0.000 validation_error: 20.839 +(epoch: 34, iters: 39184, time: 0.550, data: 0.000) G_L1: 14.314 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 11.840 G_Regularizer: 0.000 validation_error: 20.521 +(epoch: 34, iters: 41184, time: 0.537, data: 0.000) G_L1: 15.686 G_L1_ABSOLUTE: 2.901 G_L1_RELATIVE: 12.785 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 34, iters: 43184, time: 0.551, data: 0.000) G_L1: 15.393 G_L1_ABSOLUTE: 2.878 G_L1_RELATIVE: 12.514 G_Regularizer: 0.000 validation_error: 20.347 +(epoch: 34, iters: 45184, time: 0.544, data: 0.000) G_L1: 15.652 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 13.148 G_Regularizer: 0.000 validation_error: 20.462 +(epoch: 34, iters: 47184, time: 0.546, data: 0.000) G_L1: 13.669 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 11.097 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 34, iters: 49184, time: 0.548, data: 0.000) G_L1: 11.164 G_L1_ABSOLUTE: 2.088 G_L1_RELATIVE: 9.076 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 34, iters: 51184, time: 0.538, data: 0.000) G_L1: 12.824 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 10.577 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 34, iters: 53184, time: 0.551, data: 0.000) G_L1: 18.164 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 15.634 G_Regularizer: 0.000 validation_error: 20.475 +(epoch: 34, iters: 55184, time: 0.544, data: 0.000) G_L1: 16.480 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 13.706 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 34, iters: 57184, time: 0.550, data: 0.000) G_L1: 17.145 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 14.619 G_Regularizer: 0.000 validation_error: 20.431 +(epoch: 34, iters: 59184, time: 0.547, data: 0.000) G_L1: 13.581 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 11.006 G_Regularizer: 0.000 validation_error: 20.176 +(epoch: 34, iters: 61184, time: 0.543, data: 0.000) G_L1: 13.479 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 11.061 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 34, iters: 63184, time: 0.543, data: 0.001) G_L1: 14.007 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 11.420 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 34, iters: 65184, time: 0.545, data: 0.000) G_L1: 11.177 G_L1_ABSOLUTE: 1.836 G_L1_RELATIVE: 9.341 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 34, iters: 67184, time: 0.549, data: 0.000) G_L1: 13.192 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 10.647 G_Regularizer: 0.000 validation_error: 20.567 +(epoch: 34, iters: 69184, time: 0.552, data: 0.000) G_L1: 13.467 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 11.166 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 34, iters: 71184, time: 0.538, data: 0.000) G_L1: 22.214 G_L1_ABSOLUTE: 2.170 G_L1_RELATIVE: 20.045 G_Regularizer: 0.000 validation_error: 20.506 +(epoch: 34, iters: 73184, time: 0.551, data: 0.000) G_L1: 15.430 G_L1_ABSOLUTE: 2.222 G_L1_RELATIVE: 13.208 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 34, iters: 75184, time: 0.543, data: 0.001) G_L1: 11.367 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 8.973 G_Regularizer: 0.000 validation_error: 19.999 +(epoch: 34, iters: 77184, time: 0.543, data: 0.001) G_L1: 13.287 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 11.010 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 34, iters: 79184, time: 0.556, data: 0.000) G_L1: 15.083 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 12.545 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 34, iters: 81184, time: 0.549, data: 0.000) G_L1: 14.232 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 11.736 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 34, iters: 83184, time: 0.545, data: 0.000) G_L1: 12.045 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 9.688 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 34, iters: 85184, time: 0.543, data: 0.000) G_L1: 11.533 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 8.874 G_Regularizer: 0.000 validation_error: 20.309 +(epoch: 34, iters: 87184, time: 0.550, data: 0.000) G_L1: 16.227 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 13.467 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 34, iters: 89184, time: 0.550, data: 0.000) G_L1: 13.429 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 10.741 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 34, iters: 91184, time: 0.556, data: 0.000) G_L1: 16.072 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 13.657 G_Regularizer: 0.000 validation_error: 20.612 +(epoch: 34, iters: 93184, time: 0.542, data: 0.000) G_L1: 14.348 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 11.534 G_Regularizer: 0.000 validation_error: 20.359 +(epoch: 34, iters: 95184, time: 0.545, data: 0.000) G_L1: 13.312 G_L1_ABSOLUTE: 2.171 G_L1_RELATIVE: 11.140 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 34, iters: 97184, time: 0.545, data: 0.000) G_L1: 16.660 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 14.000 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 34, iters: 99184, time: 0.559, data: 0.000) G_L1: 13.822 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 11.282 G_Regularizer: 0.000 validation_error: 20.460 +(epoch: 34, iters: 101184, time: 0.550, data: 0.000) G_L1: 13.258 G_L1_ABSOLUTE: 2.350 G_L1_RELATIVE: 10.908 G_Regularizer: 0.000 validation_error: 20.412 +(epoch: 34, iters: 103184, time: 0.553, data: 0.000) G_L1: 13.720 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 11.214 G_Regularizer: 0.000 validation_error: 21.352 +(epoch: 34, iters: 105184, time: 0.539, data: 0.000) G_L1: 14.298 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 11.931 G_Regularizer: 0.000 validation_error: 20.267 +(epoch: 34, iters: 107184, time: 0.563, data: 0.000) G_L1: 17.242 G_L1_ABSOLUTE: 3.000 G_L1_RELATIVE: 14.242 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 34, iters: 109184, time: 0.547, data: 0.001) G_L1: 14.901 G_L1_ABSOLUTE: 3.010 G_L1_RELATIVE: 11.891 G_Regularizer: 0.000 validation_error: 20.586 +(epoch: 34, iters: 111184, time: 0.539, data: 0.000) G_L1: 13.142 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 10.449 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 34, iters: 113184, time: 0.545, data: 0.000) G_L1: 11.338 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 8.971 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 34, iters: 115184, time: 0.553, data: 0.000) G_L1: 13.905 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 11.622 G_Regularizer: 0.000 validation_error: 20.718 +(epoch: 34, iters: 117184, time: 0.540, data: 0.000) G_L1: 15.692 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 13.437 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 34, iters: 119184, time: 0.548, data: 0.000) G_L1: 12.918 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 10.395 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 34, iters: 121184, time: 0.547, data: 0.000) G_L1: 15.171 G_L1_ABSOLUTE: 2.926 G_L1_RELATIVE: 12.245 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 34, iters: 123184, time: 0.541, data: 0.000) G_L1: 10.889 G_L1_ABSOLUTE: 1.943 G_L1_RELATIVE: 8.945 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 34, iters: 125184, time: 0.541, data: 0.000) G_L1: 15.342 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 12.603 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 34, iters: 127184, time: 0.543, data: 0.000) G_L1: 14.748 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 12.128 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 34, iters: 129184, time: 0.548, data: 0.000) G_L1: 15.600 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 12.867 G_Regularizer: 0.000 validation_error: 21.193 +(epoch: 34, iters: 131184, time: 0.548, data: 0.000) G_L1: 15.209 G_L1_ABSOLUTE: 2.145 G_L1_RELATIVE: 13.064 G_Regularizer: 0.000 validation_error: 20.852 +(epoch: 34, iters: 133184, time: 0.553, data: 0.000) G_L1: 14.867 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 12.199 G_Regularizer: 0.000 validation_error: 20.539 +(epoch: 34, iters: 135184, time: 0.542, data: 0.000) G_L1: 13.998 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 11.779 G_Regularizer: 0.000 validation_error: 20.718 +(epoch: 34, iters: 137184, time: 0.543, data: 0.000) G_L1: 15.199 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 12.830 G_Regularizer: 0.000 validation_error: 20.399 +(epoch: 34, iters: 139184, time: 0.549, data: 0.000) G_L1: 15.387 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 12.920 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 34, iters: 141184, time: 0.554, data: 0.000) G_L1: 12.215 G_L1_ABSOLUTE: 2.471 G_L1_RELATIVE: 9.744 G_Regularizer: 0.000 validation_error: 20.514 +(epoch: 34, iters: 143184, time: 0.544, data: 0.000) G_L1: 13.411 G_L1_ABSOLUTE: 2.694 G_L1_RELATIVE: 10.717 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 34, iters: 145184, time: 0.545, data: 0.000) G_L1: 11.331 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 9.018 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 34, iters: 147184, time: 0.547, data: 0.000) G_L1: 14.001 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 11.499 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 34, iters: 149184, time: 0.541, data: 0.000) G_L1: 15.105 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 12.787 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 34, iters: 151184, time: 0.544, data: 0.000) G_L1: 12.803 G_L1_ABSOLUTE: 2.196 G_L1_RELATIVE: 10.607 G_Regularizer: 0.000 validation_error: 20.567 +(epoch: 34, iters: 153184, time: 0.548, data: 0.000) G_L1: 15.405 G_L1_ABSOLUTE: 2.761 G_L1_RELATIVE: 12.644 G_Regularizer: 0.000 validation_error: 21.112 +(epoch: 34, iters: 155184, time: 0.547, data: 0.000) G_L1: 13.326 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 10.798 G_Regularizer: 0.000 validation_error: 21.130 +(epoch: 34, iters: 157184, time: 0.547, data: 0.000) G_L1: 13.622 G_L1_ABSOLUTE: 2.266 G_L1_RELATIVE: 11.356 G_Regularizer: 0.000 validation_error: 20.214 +(epoch: 34, iters: 159184, time: 0.552, data: 0.000) G_L1: 12.488 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 10.027 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 34, iters: 161184, time: 0.544, data: 0.001) G_L1: 13.222 G_L1_ABSOLUTE: 2.221 G_L1_RELATIVE: 11.000 G_Regularizer: 0.000 validation_error: 20.814 +(epoch: 34, iters: 163184, time: 0.549, data: 0.000) G_L1: 14.559 G_L1_ABSOLUTE: 2.209 G_L1_RELATIVE: 12.350 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 34, iters: 165184, time: 0.544, data: 0.001) G_L1: 15.095 G_L1_ABSOLUTE: 2.360 G_L1_RELATIVE: 12.735 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 34, iters: 167184, time: 0.550, data: 0.001) G_L1: 16.368 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 13.611 G_Regularizer: 0.000 validation_error: 21.242 +(epoch: 34, iters: 169184, time: 0.545, data: 0.000) G_L1: 11.490 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 9.059 G_Regularizer: 0.000 validation_error: 20.427 +(epoch: 34, iters: 171184, time: 0.550, data: 0.000) G_L1: 14.567 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 11.865 G_Regularizer: 0.000 validation_error: 20.485 +(epoch: 34, iters: 173184, time: 0.559, data: 0.000) G_L1: 16.130 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 13.604 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 34, iters: 175184, time: 0.544, data: 0.000) G_L1: 12.206 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 9.803 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 34, iters: 177184, time: 0.554, data: 0.000) G_L1: 14.025 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 11.217 G_Regularizer: 0.000 validation_error: 21.048 +(epoch: 34, iters: 179184, time: 0.541, data: 0.000) G_L1: 12.317 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 10.079 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 34, iters: 181184, time: 0.549, data: 0.000) G_L1: 12.714 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 10.346 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 34, iters: 183184, time: 0.543, data: 0.000) G_L1: 14.825 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 12.216 G_Regularizer: 0.000 validation_error: 21.106 +(epoch: 34, iters: 185184, time: 0.539, data: 0.000) G_L1: 17.890 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 15.053 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 34, iters: 187184, time: 0.543, data: 0.000) G_L1: 14.988 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 12.366 G_Regularizer: 0.000 validation_error: 20.665 +(epoch: 34, iters: 189184, time: 0.550, data: 0.000) G_L1: 14.471 G_L1_ABSOLUTE: 2.711 G_L1_RELATIVE: 11.760 G_Regularizer: 0.000 validation_error: 20.363 +(epoch: 34, iters: 191184, time: 0.544, data: 0.000) G_L1: 13.362 G_L1_ABSOLUTE: 3.077 G_L1_RELATIVE: 10.284 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 34, iters: 193184, time: 0.547, data: 0.001) G_L1: 14.773 G_L1_ABSOLUTE: 3.029 G_L1_RELATIVE: 11.744 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 34, iters: 195184, time: 0.554, data: 0.000) G_L1: 16.561 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 13.869 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 34, iters: 197184, time: 0.542, data: 0.000) G_L1: 14.922 G_L1_ABSOLUTE: 2.223 G_L1_RELATIVE: 12.699 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 34, iters: 199184, time: 0.550, data: 0.000) G_L1: 15.048 G_L1_ABSOLUTE: 3.136 G_L1_RELATIVE: 11.912 G_Regularizer: 0.000 validation_error: 21.065 +(epoch: 34, iters: 201184, time: 0.546, data: 0.000) G_L1: 16.401 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 14.058 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 34, iters: 203184, time: 0.543, data: 0.000) G_L1: 15.534 G_L1_ABSOLUTE: 3.041 G_L1_RELATIVE: 12.493 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 34, iters: 205184, time: 0.551, data: 0.000) G_L1: 13.815 G_L1_ABSOLUTE: 2.003 G_L1_RELATIVE: 11.812 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 34, iters: 207184, time: 0.556, data: 0.000) G_L1: 15.092 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 12.542 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 34, iters: 209184, time: 0.543, data: 0.000) G_L1: 13.630 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 11.301 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 34, iters: 211184, time: 0.542, data: 0.000) G_L1: 14.107 G_L1_ABSOLUTE: 2.200 G_L1_RELATIVE: 11.907 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 34, iters: 213184, time: 0.541, data: 0.000) G_L1: 16.203 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 13.426 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 34, iters: 215184, time: 0.555, data: 0.000) G_L1: 12.858 G_L1_ABSOLUTE: 1.922 G_L1_RELATIVE: 10.935 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 34, iters: 217184, time: 0.547, data: 0.000) G_L1: 15.288 G_L1_ABSOLUTE: 3.170 G_L1_RELATIVE: 12.118 G_Regularizer: 0.000 validation_error: 21.492 +(epoch: 34, iters: 219184, time: 0.560, data: 0.000) G_L1: 13.499 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 11.167 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 34, iters: 221184, time: 0.541, data: 0.000) G_L1: 13.337 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 10.900 G_Regularizer: 0.000 validation_error: 21.290 +(epoch: 34, iters: 223184, time: 0.550, data: 0.000) G_L1: 15.404 G_L1_ABSOLUTE: 2.694 G_L1_RELATIVE: 12.710 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 34, iters: 225184, time: 0.558, data: 0.000) G_L1: 14.200 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 11.737 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 34, iters: 227184, time: 0.556, data: 0.000) G_L1: 14.652 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 12.201 G_Regularizer: 0.000 validation_error: 20.614 +(epoch: 34, iters: 229184, time: 0.546, data: 0.000) G_L1: 12.858 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 9.995 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 34, iters: 231184, time: 0.543, data: 0.000) G_L1: 13.539 G_L1_ABSOLUTE: 2.303 G_L1_RELATIVE: 11.236 G_Regularizer: 0.000 validation_error: 20.814 +(epoch: 34, iters: 233184, time: 0.547, data: 0.000) G_L1: 14.691 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.158 G_Regularizer: 0.000 validation_error: 20.492 +(epoch: 34, iters: 235184, time: 0.555, data: 0.000) G_L1: 13.630 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.194 G_Regularizer: 0.000 validation_error: 20.702 +(epoch: 34, iters: 237184, time: 0.546, data: 0.000) G_L1: 16.811 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 14.285 G_Regularizer: 0.000 validation_error: 19.916 +(epoch: 34, iters: 239184, time: 0.548, data: 0.000) G_L1: 14.660 G_L1_ABSOLUTE: 2.886 G_L1_RELATIVE: 11.774 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 34, iters: 241184, time: 0.554, data: 0.000) G_L1: 13.469 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 11.020 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 34, iters: 243184, time: 0.547, data: 0.000) G_L1: 15.489 G_L1_ABSOLUTE: 2.095 G_L1_RELATIVE: 13.393 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 34, iters: 245184, time: 0.550, data: 0.000) G_L1: 16.412 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 13.691 G_Regularizer: 0.000 validation_error: 20.266 +(epoch: 34, iters: 247184, time: 0.543, data: 0.000) G_L1: 14.881 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 12.321 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 34, iters: 249184, time: 0.542, data: 0.000) G_L1: 14.717 G_L1_ABSOLUTE: 2.585 G_L1_RELATIVE: 12.132 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 34, iters: 251184, time: 0.549, data: 0.000) G_L1: 13.605 G_L1_ABSOLUTE: 2.846 G_L1_RELATIVE: 10.759 G_Regularizer: 0.000 validation_error: 20.798 +(epoch: 34, iters: 253184, time: 0.554, data: 0.001) G_L1: 10.945 G_L1_ABSOLUTE: 2.246 G_L1_RELATIVE: 8.699 G_Regularizer: 0.000 validation_error: 21.468 +(epoch: 34, iters: 255184, time: 0.546, data: 0.000) G_L1: 15.271 G_L1_ABSOLUTE: 2.281 G_L1_RELATIVE: 12.990 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 34, iters: 257184, time: 0.552, data: 0.000) G_L1: 23.669 G_L1_ABSOLUTE: 2.943 G_L1_RELATIVE: 20.726 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 34, iters: 259184, time: 0.543, data: 0.000) G_L1: 13.679 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 11.293 G_Regularizer: 0.000 validation_error: 21.213 +(epoch: 34, iters: 261184, time: 0.548, data: 0.000) G_L1: 12.633 G_L1_ABSOLUTE: 2.180 G_L1_RELATIVE: 10.453 G_Regularizer: 0.000 validation_error: 20.658 +(epoch: 34, iters: 263184, time: 0.556, data: 0.000) G_L1: 14.253 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 11.633 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 34, iters: 265184, time: 0.543, data: 0.000) G_L1: 13.934 G_L1_ABSOLUTE: 2.947 G_L1_RELATIVE: 10.987 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 34, iters: 267184, time: 0.553, data: 0.000) G_L1: 13.701 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 11.316 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 34, iters: 269184, time: 0.551, data: 0.000) G_L1: 16.795 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 14.158 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 34, iters: 271184, time: 0.556, data: 0.000) G_L1: 15.317 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 12.618 G_Regularizer: 0.000 validation_error: 21.102 +(epoch: 34, iters: 273184, time: 0.543, data: 0.000) G_L1: 13.312 G_L1_ABSOLUTE: 2.158 G_L1_RELATIVE: 11.155 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 34, iters: 275184, time: 0.555, data: 0.000) G_L1: 12.181 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 9.811 G_Regularizer: 0.000 validation_error: 20.963 +(epoch: 34, iters: 277184, time: 0.541, data: 0.000) G_L1: 13.430 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 11.195 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 34, iters: 279184, time: 0.549, data: 0.000) G_L1: 15.151 G_L1_ABSOLUTE: 2.319 G_L1_RELATIVE: 12.831 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 34, iters: 281184, time: 0.550, data: 0.000) G_L1: 14.077 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 11.497 G_Regularizer: 0.000 validation_error: 20.948 +(epoch: 34, iters: 283184, time: 0.550, data: 0.000) G_L1: 13.556 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 10.718 G_Regularizer: 0.000 validation_error: 20.394 +(epoch: 34, iters: 285184, time: 0.540, data: 0.000) G_L1: 16.008 G_L1_ABSOLUTE: 3.011 G_L1_RELATIVE: 12.997 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 34, iters: 287184, time: 0.540, data: 0.000) G_L1: 13.901 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 11.441 G_Regularizer: 0.000 validation_error: 20.586 +(epoch: 34, iters: 289184, time: 0.545, data: 0.000) G_L1: 16.710 G_L1_ABSOLUTE: 3.333 G_L1_RELATIVE: 13.377 G_Regularizer: 0.000 validation_error: 20.471 +(epoch: 34, iters: 291184, time: 0.544, data: 0.000) G_L1: 14.125 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 11.631 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 34, iters: 293184, time: 0.550, data: 0.000) G_L1: 16.259 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 13.874 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 34, iters: 295184, time: 0.548, data: 0.000) G_L1: 13.856 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 11.368 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 34, iters: 297184, time: 0.539, data: 0.000) G_L1: 16.735 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 14.138 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 34, iters: 299184, time: 0.551, data: 0.000) G_L1: 14.549 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 11.808 G_Regularizer: 0.000 validation_error: 20.287 +(epoch: 34, iters: 301184, time: 0.546, data: 0.000) G_L1: 14.377 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 11.599 G_Regularizer: 0.000 validation_error: 20.341 +(epoch: 35, iters: 432, time: 0.561, data: 0.000) G_L1: 15.108 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 12.504 G_Regularizer: 0.000 validation_error: 21.294 +(epoch: 35, iters: 2432, time: 0.544, data: 0.000) G_L1: 13.208 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 10.732 G_Regularizer: 0.000 validation_error: 21.066 +(epoch: 35, iters: 4432, time: 0.547, data: 0.000) G_L1: 13.142 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 10.785 G_Regularizer: 0.000 validation_error: 20.497 +(epoch: 35, iters: 6432, time: 0.547, data: 0.000) G_L1: 13.918 G_L1_ABSOLUTE: 2.319 G_L1_RELATIVE: 11.599 G_Regularizer: 0.000 validation_error: 20.547 +(epoch: 35, iters: 8432, time: 0.543, data: 0.000) G_L1: 14.711 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 12.158 G_Regularizer: 0.000 validation_error: 20.705 +(epoch: 35, iters: 10432, time: 0.540, data: 0.000) G_L1: 12.538 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 10.172 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 35, iters: 12432, time: 0.550, data: 0.000) G_L1: 14.281 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 11.681 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 35, iters: 14432, time: 0.553, data: 0.000) G_L1: 15.156 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 12.505 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 35, iters: 16432, time: 0.541, data: 0.000) G_L1: 14.692 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 11.971 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 35, iters: 18432, time: 0.548, data: 0.000) G_L1: 14.659 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 11.848 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 35, iters: 20432, time: 0.546, data: 0.000) G_L1: 27.341 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 24.860 G_Regularizer: 0.000 validation_error: 21.140 +(epoch: 35, iters: 22432, time: 0.544, data: 0.000) G_L1: 17.350 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 14.880 G_Regularizer: 0.000 validation_error: 21.065 +(epoch: 35, iters: 24432, time: 0.551, data: 0.000) G_L1: 10.966 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 8.719 G_Regularizer: 0.000 validation_error: 21.127 +(epoch: 35, iters: 26432, time: 0.547, data: 0.000) G_L1: 13.850 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 11.446 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 35, iters: 28432, time: 0.545, data: 0.000) G_L1: 13.254 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 10.841 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 35, iters: 30432, time: 0.546, data: 0.000) G_L1: 14.489 G_L1_ABSOLUTE: 2.955 G_L1_RELATIVE: 11.534 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 35, iters: 32432, time: 0.556, data: 0.000) G_L1: 17.096 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 14.283 G_Regularizer: 0.000 validation_error: 21.144 +(epoch: 35, iters: 34432, time: 0.553, data: 0.000) G_L1: 13.948 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 11.579 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 35, iters: 36432, time: 0.547, data: 0.001) G_L1: 13.091 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 10.633 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 35, iters: 38432, time: 0.546, data: 0.000) G_L1: 14.237 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 11.892 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 35, iters: 40432, time: 0.548, data: 0.000) G_L1: 12.291 G_L1_ABSOLUTE: 2.087 G_L1_RELATIVE: 10.205 G_Regularizer: 0.000 validation_error: 21.088 +(epoch: 35, iters: 42432, time: 0.538, data: 0.000) G_L1: 14.414 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 11.961 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 35, iters: 44432, time: 0.547, data: 0.000) G_L1: 15.494 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 13.182 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 35, iters: 46432, time: 0.543, data: 0.000) G_L1: 18.387 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 15.713 G_Regularizer: 0.000 validation_error: 20.824 +(epoch: 35, iters: 48432, time: 0.546, data: 0.001) G_L1: 12.920 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 10.324 G_Regularizer: 0.000 validation_error: 20.383 +(epoch: 35, iters: 50432, time: 0.546, data: 0.000) G_L1: 13.810 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 11.284 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 35, iters: 52432, time: 0.548, data: 0.000) G_L1: 13.354 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 10.592 G_Regularizer: 0.000 validation_error: 21.333 +(epoch: 35, iters: 54432, time: 0.547, data: 0.001) G_L1: 10.727 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 8.286 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 35, iters: 56432, time: 0.547, data: 0.000) G_L1: 14.973 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 12.369 G_Regularizer: 0.000 validation_error: 21.081 +(epoch: 35, iters: 58432, time: 0.546, data: 0.000) G_L1: 15.700 G_L1_ABSOLUTE: 3.142 G_L1_RELATIVE: 12.557 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 35, iters: 60432, time: 0.550, data: 0.000) G_L1: 12.650 G_L1_ABSOLUTE: 2.194 G_L1_RELATIVE: 10.456 G_Regularizer: 0.000 validation_error: 21.641 +(epoch: 35, iters: 62432, time: 0.545, data: 0.000) G_L1: 14.885 G_L1_ABSOLUTE: 2.809 G_L1_RELATIVE: 12.076 G_Regularizer: 0.000 validation_error: 21.093 +(epoch: 35, iters: 64432, time: 0.539, data: 0.000) G_L1: 12.725 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 10.115 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 35, iters: 66432, time: 0.548, data: 0.000) G_L1: 15.069 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 12.706 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 35, iters: 68432, time: 0.544, data: 0.000) G_L1: 12.469 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 10.012 G_Regularizer: 0.000 validation_error: 20.547 +(epoch: 35, iters: 70432, time: 0.544, data: 0.001) G_L1: 13.441 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 10.845 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 35, iters: 72432, time: 0.542, data: 0.001) G_L1: 14.156 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 11.676 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 35, iters: 74432, time: 0.548, data: 0.000) G_L1: 14.899 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 12.535 G_Regularizer: 0.000 validation_error: 20.467 +(epoch: 35, iters: 76432, time: 0.550, data: 0.000) G_L1: 11.970 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 9.688 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 35, iters: 78432, time: 0.550, data: 0.000) G_L1: 12.159 G_L1_ABSOLUTE: 2.187 G_L1_RELATIVE: 9.972 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 35, iters: 80432, time: 0.546, data: 0.001) G_L1: 14.338 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 11.854 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 35, iters: 82432, time: 0.543, data: 0.000) G_L1: 14.446 G_L1_ABSOLUTE: 2.981 G_L1_RELATIVE: 11.465 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 35, iters: 84432, time: 0.541, data: 0.000) G_L1: 14.229 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 11.657 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 35, iters: 86432, time: 0.545, data: 0.000) G_L1: 14.126 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 11.739 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 35, iters: 88432, time: 0.546, data: 0.000) G_L1: 13.250 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 10.837 G_Regularizer: 0.000 validation_error: 21.387 +(epoch: 35, iters: 90432, time: 0.542, data: 0.000) G_L1: 13.639 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 10.829 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 35, iters: 92432, time: 0.542, data: 0.000) G_L1: 14.748 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 11.705 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 35, iters: 94432, time: 0.541, data: 0.000) G_L1: 12.252 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 9.828 G_Regularizer: 0.000 validation_error: 20.540 +(epoch: 35, iters: 96432, time: 0.547, data: 0.000) G_L1: 15.842 G_L1_ABSOLUTE: 2.269 G_L1_RELATIVE: 13.573 G_Regularizer: 0.000 validation_error: 20.732 +(epoch: 35, iters: 98432, time: 0.546, data: 0.000) G_L1: 15.805 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 13.176 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 35, iters: 100432, time: 0.546, data: 0.000) G_L1: 13.318 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 11.111 G_Regularizer: 0.000 validation_error: 20.403 +(epoch: 35, iters: 102432, time: 0.547, data: 0.000) G_L1: 17.161 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 14.412 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 35, iters: 104432, time: 0.546, data: 0.000) G_L1: 13.643 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 10.828 G_Regularizer: 0.000 validation_error: 21.214 +(epoch: 35, iters: 106432, time: 0.552, data: 0.000) G_L1: 16.303 G_L1_ABSOLUTE: 2.220 G_L1_RELATIVE: 14.083 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 35, iters: 108432, time: 0.549, data: 0.000) G_L1: 14.148 G_L1_ABSOLUTE: 1.935 G_L1_RELATIVE: 12.214 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 35, iters: 110432, time: 0.548, data: 0.000) G_L1: 14.823 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 12.354 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 35, iters: 112432, time: 0.546, data: 0.000) G_L1: 14.131 G_L1_ABSOLUTE: 2.472 G_L1_RELATIVE: 11.659 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 35, iters: 114432, time: 0.544, data: 0.000) G_L1: 13.438 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 10.909 G_Regularizer: 0.000 validation_error: 20.763 +(epoch: 35, iters: 116432, time: 0.545, data: 0.000) G_L1: 13.549 G_L1_ABSOLUTE: 2.186 G_L1_RELATIVE: 11.362 G_Regularizer: 0.000 validation_error: 20.763 +(epoch: 35, iters: 118432, time: 0.549, data: 0.001) G_L1: 14.729 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 11.930 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 35, iters: 120432, time: 0.544, data: 0.000) G_L1: 13.506 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 11.027 G_Regularizer: 0.000 validation_error: 21.247 +(epoch: 35, iters: 122432, time: 0.542, data: 0.000) G_L1: 15.021 G_L1_ABSOLUTE: 2.555 G_L1_RELATIVE: 12.467 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 35, iters: 124432, time: 0.547, data: 0.000) G_L1: 12.166 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 9.938 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 35, iters: 126432, time: 0.543, data: 0.000) G_L1: 13.504 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 11.251 G_Regularizer: 0.000 validation_error: 21.091 +(epoch: 35, iters: 128432, time: 0.561, data: 0.000) G_L1: 13.455 G_L1_ABSOLUTE: 2.271 G_L1_RELATIVE: 11.184 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 35, iters: 130432, time: 0.550, data: 0.000) G_L1: 15.314 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 12.651 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 35, iters: 132432, time: 0.545, data: 0.000) G_L1: 15.193 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 12.625 G_Regularizer: 0.000 validation_error: 20.638 +(epoch: 35, iters: 134432, time: 0.544, data: 0.000) G_L1: 14.937 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 12.504 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 35, iters: 136432, time: 0.544, data: 0.000) G_L1: 12.729 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 10.132 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 35, iters: 138432, time: 0.547, data: 0.000) G_L1: 13.802 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 10.967 G_Regularizer: 0.000 validation_error: 21.155 +(epoch: 35, iters: 140432, time: 0.547, data: 0.000) G_L1: 15.749 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 13.367 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 35, iters: 142432, time: 0.549, data: 0.001) G_L1: 12.536 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 9.788 G_Regularizer: 0.000 validation_error: 21.081 +(epoch: 35, iters: 144432, time: 0.547, data: 0.000) G_L1: 15.147 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 12.350 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 35, iters: 146432, time: 0.538, data: 0.000) G_L1: 14.023 G_L1_ABSOLUTE: 2.583 G_L1_RELATIVE: 11.439 G_Regularizer: 0.000 validation_error: 20.396 +(epoch: 35, iters: 148432, time: 0.546, data: 0.000) G_L1: 14.647 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 12.135 G_Regularizer: 0.000 validation_error: 20.425 +(epoch: 35, iters: 150432, time: 0.549, data: 0.000) G_L1: 14.835 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 12.344 G_Regularizer: 0.000 validation_error: 20.492 +(epoch: 35, iters: 152432, time: 0.552, data: 0.000) G_L1: 13.585 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 11.195 G_Regularizer: 0.000 validation_error: 20.750 +(epoch: 35, iters: 154432, time: 0.543, data: 0.001) G_L1: 15.351 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 13.025 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 35, iters: 156432, time: 0.548, data: 0.001) G_L1: 11.860 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 9.577 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 35, iters: 158432, time: 0.547, data: 0.000) G_L1: 17.214 G_L1_ABSOLUTE: 2.974 G_L1_RELATIVE: 14.240 G_Regularizer: 0.000 validation_error: 21.128 +(epoch: 35, iters: 160432, time: 0.551, data: 0.000) G_L1: 13.221 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 10.790 G_Regularizer: 0.000 validation_error: 21.157 +(epoch: 35, iters: 162432, time: 0.550, data: 0.000) G_L1: 14.750 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 12.026 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 35, iters: 164432, time: 0.548, data: 0.000) G_L1: 12.771 G_L1_ABSOLUTE: 2.456 G_L1_RELATIVE: 10.315 G_Regularizer: 0.000 validation_error: 20.885 +(epoch: 35, iters: 166432, time: 0.550, data: 0.000) G_L1: 13.837 G_L1_ABSOLUTE: 2.242 G_L1_RELATIVE: 11.595 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 35, iters: 168432, time: 0.543, data: 0.000) G_L1: 15.765 G_L1_ABSOLUTE: 2.086 G_L1_RELATIVE: 13.678 G_Regularizer: 0.000 validation_error: 20.937 +(epoch: 35, iters: 170432, time: 0.543, data: 0.000) G_L1: 12.664 G_L1_ABSOLUTE: 2.761 G_L1_RELATIVE: 9.903 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 35, iters: 172432, time: 0.547, data: 0.000) G_L1: 16.045 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 13.222 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 35, iters: 174432, time: 0.546, data: 0.000) G_L1: 13.900 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 11.580 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 35, iters: 176432, time: 0.540, data: 0.000) G_L1: 15.201 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 12.749 G_Regularizer: 0.000 validation_error: 21.176 +(epoch: 35, iters: 178432, time: 0.549, data: 0.000) G_L1: 15.717 G_L1_ABSOLUTE: 3.291 G_L1_RELATIVE: 12.427 G_Regularizer: 0.000 validation_error: 21.272 +(epoch: 35, iters: 180432, time: 0.545, data: 0.000) G_L1: 15.519 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 12.974 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 35, iters: 182432, time: 0.551, data: 0.000) G_L1: 12.836 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 10.185 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 35, iters: 184432, time: 0.540, data: 0.000) G_L1: 13.647 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 11.220 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 35, iters: 186432, time: 0.546, data: 0.000) G_L1: 14.176 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 11.885 G_Regularizer: 0.000 validation_error: 21.331 +(epoch: 35, iters: 188432, time: 0.548, data: 0.000) G_L1: 14.045 G_L1_ABSOLUTE: 2.199 G_L1_RELATIVE: 11.845 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 35, iters: 190432, time: 0.547, data: 0.000) G_L1: 13.398 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 11.043 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 35, iters: 192432, time: 0.548, data: 0.000) G_L1: 13.529 G_L1_ABSOLUTE: 2.187 G_L1_RELATIVE: 11.342 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 35, iters: 194432, time: 0.541, data: 0.000) G_L1: 12.448 G_L1_ABSOLUTE: 2.611 G_L1_RELATIVE: 9.838 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 35, iters: 196432, time: 0.557, data: 0.000) G_L1: 14.495 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 12.178 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 35, iters: 198432, time: 0.557, data: 0.000) G_L1: 16.566 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 14.270 G_Regularizer: 0.000 validation_error: 20.729 +(epoch: 35, iters: 200432, time: 0.539, data: 0.000) G_L1: 13.388 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 11.017 G_Regularizer: 0.000 validation_error: 21.287 +(epoch: 35, iters: 202432, time: 0.553, data: 0.000) G_L1: 14.485 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 11.702 G_Regularizer: 0.000 validation_error: 20.253 +(epoch: 35, iters: 204432, time: 0.547, data: 0.001) G_L1: 14.549 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 11.693 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 35, iters: 206432, time: 0.547, data: 0.000) G_L1: 11.949 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 9.684 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 35, iters: 208432, time: 0.551, data: 0.000) G_L1: 16.691 G_L1_ABSOLUTE: 3.128 G_L1_RELATIVE: 13.563 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 35, iters: 210432, time: 0.539, data: 0.000) G_L1: 16.314 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 13.727 G_Regularizer: 0.000 validation_error: 20.896 +(epoch: 35, iters: 212432, time: 0.552, data: 0.000) G_L1: 14.663 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 12.024 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 35, iters: 214432, time: 0.544, data: 0.000) G_L1: 20.001 G_L1_ABSOLUTE: 2.579 G_L1_RELATIVE: 17.422 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 35, iters: 216432, time: 0.560, data: 0.000) G_L1: 15.776 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 13.042 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 35, iters: 218432, time: 0.540, data: 0.000) G_L1: 12.126 G_L1_ABSOLUTE: 2.248 G_L1_RELATIVE: 9.878 G_Regularizer: 0.000 validation_error: 21.323 +(epoch: 35, iters: 220432, time: 0.544, data: 0.000) G_L1: 14.225 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 11.817 G_Regularizer: 0.000 validation_error: 20.522 +(epoch: 35, iters: 222432, time: 0.549, data: 0.000) G_L1: 13.570 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 10.885 G_Regularizer: 0.000 validation_error: 21.216 +(epoch: 35, iters: 224432, time: 0.551, data: 0.000) G_L1: 15.088 G_L1_ABSOLUTE: 2.928 G_L1_RELATIVE: 12.160 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 35, iters: 226432, time: 0.539, data: 0.000) G_L1: 14.787 G_L1_ABSOLUTE: 2.555 G_L1_RELATIVE: 12.232 G_Regularizer: 0.000 validation_error: 20.852 +(epoch: 35, iters: 228432, time: 0.546, data: 0.000) G_L1: 14.327 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 12.065 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 35, iters: 230432, time: 0.547, data: 0.000) G_L1: 11.794 G_L1_ABSOLUTE: 2.165 G_L1_RELATIVE: 9.629 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 35, iters: 232432, time: 0.551, data: 0.000) G_L1: 12.711 G_L1_ABSOLUTE: 2.122 G_L1_RELATIVE: 10.590 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 35, iters: 234432, time: 0.539, data: 0.000) G_L1: 26.445 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 23.896 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 35, iters: 236432, time: 0.542, data: 0.000) G_L1: 11.979 G_L1_ABSOLUTE: 2.328 G_L1_RELATIVE: 9.651 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 35, iters: 238432, time: 0.545, data: 0.000) G_L1: 15.302 G_L1_ABSOLUTE: 2.988 G_L1_RELATIVE: 12.314 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 35, iters: 240432, time: 0.543, data: 0.001) G_L1: 12.377 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 10.024 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 35, iters: 242432, time: 0.546, data: 0.000) G_L1: 17.676 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 15.175 G_Regularizer: 0.000 validation_error: 20.416 +(epoch: 35, iters: 244432, time: 0.544, data: 0.000) G_L1: 13.397 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 10.740 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 35, iters: 246432, time: 0.540, data: 0.001) G_L1: 12.560 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 10.299 G_Regularizer: 0.000 validation_error: 20.770 +(epoch: 35, iters: 248432, time: 0.549, data: 0.000) G_L1: 17.413 G_L1_ABSOLUTE: 3.502 G_L1_RELATIVE: 13.911 G_Regularizer: 0.000 validation_error: 20.671 +(epoch: 35, iters: 250432, time: 0.555, data: 0.000) G_L1: 14.278 G_L1_ABSOLUTE: 3.166 G_L1_RELATIVE: 11.112 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 35, iters: 252432, time: 0.547, data: 0.000) G_L1: 13.589 G_L1_ABSOLUTE: 2.197 G_L1_RELATIVE: 11.392 G_Regularizer: 0.000 validation_error: 21.255 +(epoch: 35, iters: 254432, time: 0.551, data: 0.001) G_L1: 13.261 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 10.768 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 35, iters: 256432, time: 0.543, data: 0.000) G_L1: 14.996 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 12.443 G_Regularizer: 0.000 validation_error: 20.629 +(epoch: 35, iters: 258432, time: 0.548, data: 0.000) G_L1: 15.326 G_L1_ABSOLUTE: 3.128 G_L1_RELATIVE: 12.198 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 35, iters: 260432, time: 0.541, data: 0.001) G_L1: 13.579 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 11.178 G_Regularizer: 0.000 validation_error: 20.536 +(epoch: 35, iters: 262432, time: 0.542, data: 0.000) G_L1: 14.027 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 11.696 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 35, iters: 264432, time: 0.542, data: 0.002) G_L1: 14.784 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 12.316 G_Regularizer: 0.000 validation_error: 21.156 +(epoch: 35, iters: 266432, time: 0.545, data: 0.000) G_L1: 14.524 G_L1_ABSOLUTE: 3.165 G_L1_RELATIVE: 11.359 G_Regularizer: 0.000 validation_error: 20.524 +(epoch: 35, iters: 268432, time: 0.544, data: 0.000) G_L1: 13.273 G_L1_ABSOLUTE: 2.830 G_L1_RELATIVE: 10.443 G_Regularizer: 0.000 validation_error: 21.072 +(epoch: 35, iters: 270432, time: 0.550, data: 0.001) G_L1: 13.977 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 11.575 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 35, iters: 272432, time: 0.549, data: 0.000) G_L1: 12.767 G_L1_ABSOLUTE: 2.025 G_L1_RELATIVE: 10.741 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 35, iters: 274432, time: 0.545, data: 0.000) G_L1: 18.408 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 15.523 G_Regularizer: 0.000 validation_error: 20.719 +(epoch: 35, iters: 276432, time: 0.552, data: 0.001) G_L1: 14.640 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 11.909 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 35, iters: 278432, time: 0.547, data: 0.001) G_L1: 16.662 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 14.136 G_Regularizer: 0.000 validation_error: 20.444 +(epoch: 35, iters: 280432, time: 0.545, data: 0.000) G_L1: 15.315 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 12.979 G_Regularizer: 0.000 validation_error: 20.472 +(epoch: 35, iters: 282432, time: 0.557, data: 0.000) G_L1: 13.674 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 11.111 G_Regularizer: 0.000 validation_error: 20.186 +(epoch: 35, iters: 284432, time: 0.547, data: 0.000) G_L1: 16.963 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 14.219 G_Regularizer: 0.000 validation_error: 20.406 +(epoch: 35, iters: 286432, time: 0.541, data: 0.000) G_L1: 19.652 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 16.791 G_Regularizer: 0.000 validation_error: 20.647 +(epoch: 35, iters: 288432, time: 0.552, data: 0.000) G_L1: 16.436 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 13.482 G_Regularizer: 0.000 validation_error: 20.203 +(epoch: 35, iters: 290432, time: 0.552, data: 0.000) G_L1: 14.637 G_L1_ABSOLUTE: 1.958 G_L1_RELATIVE: 12.679 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 35, iters: 292432, time: 0.540, data: 0.000) G_L1: 13.801 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 11.080 G_Regularizer: 0.000 validation_error: 20.385 +(epoch: 35, iters: 294432, time: 0.541, data: 0.000) G_L1: 13.243 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 10.601 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 35, iters: 296432, time: 0.541, data: 0.001) G_L1: 15.631 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 13.063 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 35, iters: 298432, time: 0.547, data: 0.000) G_L1: 12.644 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 10.084 G_Regularizer: 0.000 validation_error: 20.325 +(epoch: 35, iters: 300432, time: 0.542, data: 0.001) G_L1: 14.638 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 12.316 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 35, iters: 302432, time: 0.556, data: 0.000) G_L1: 13.227 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 10.706 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 36, iters: 1680, time: 0.549, data: 0.000) G_L1: 15.592 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 13.139 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 36, iters: 3680, time: 0.547, data: 0.000) G_L1: 14.185 G_L1_ABSOLUTE: 2.957 G_L1_RELATIVE: 11.228 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 36, iters: 5680, time: 0.552, data: 0.000) G_L1: 11.535 G_L1_ABSOLUTE: 2.173 G_L1_RELATIVE: 9.361 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 36, iters: 7680, time: 0.545, data: 0.000) G_L1: 13.719 G_L1_ABSOLUTE: 2.199 G_L1_RELATIVE: 11.520 G_Regularizer: 0.000 validation_error: 20.454 +(epoch: 36, iters: 9680, time: 0.546, data: 0.000) G_L1: 14.041 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 11.368 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 36, iters: 11680, time: 0.549, data: 0.000) G_L1: 16.669 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 13.844 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 36, iters: 13680, time: 0.548, data: 0.000) G_L1: 12.521 G_L1_ABSOLUTE: 2.064 G_L1_RELATIVE: 10.457 G_Regularizer: 0.000 validation_error: 21.065 +(epoch: 36, iters: 15680, time: 0.552, data: 0.000) G_L1: 14.798 G_L1_ABSOLUTE: 2.611 G_L1_RELATIVE: 12.187 G_Regularizer: 0.000 validation_error: 20.731 +(epoch: 36, iters: 17680, time: 0.549, data: 0.000) G_L1: 15.035 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 36, iters: 19680, time: 0.546, data: 0.000) G_L1: 16.540 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 13.819 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 36, iters: 21680, time: 0.544, data: 0.000) G_L1: 12.153 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 9.735 G_Regularizer: 0.000 validation_error: 20.548 +(epoch: 36, iters: 23680, time: 0.547, data: 0.000) G_L1: 15.721 G_L1_ABSOLUTE: 2.909 G_L1_RELATIVE: 12.812 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 36, iters: 25680, time: 0.538, data: 0.000) G_L1: 13.707 G_L1_ABSOLUTE: 2.295 G_L1_RELATIVE: 11.412 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 36, iters: 27680, time: 0.542, data: 0.000) G_L1: 12.990 G_L1_ABSOLUTE: 2.151 G_L1_RELATIVE: 10.839 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 36, iters: 29680, time: 0.539, data: 0.000) G_L1: 14.621 G_L1_ABSOLUTE: 2.740 G_L1_RELATIVE: 11.881 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 36, iters: 31680, time: 0.547, data: 0.000) G_L1: 14.413 G_L1_ABSOLUTE: 2.201 G_L1_RELATIVE: 12.212 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 36, iters: 33680, time: 0.538, data: 0.000) G_L1: 13.441 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 10.817 G_Regularizer: 0.000 validation_error: 20.578 +(epoch: 36, iters: 35680, time: 0.544, data: 0.000) G_L1: 14.226 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 11.964 G_Regularizer: 0.000 validation_error: 20.956 +(epoch: 36, iters: 37680, time: 0.539, data: 0.000) G_L1: 14.391 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 11.659 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 36, iters: 39680, time: 0.551, data: 0.000) G_L1: 16.365 G_L1_ABSOLUTE: 2.390 G_L1_RELATIVE: 13.975 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 36, iters: 41680, time: 0.552, data: 0.000) G_L1: 12.881 G_L1_ABSOLUTE: 2.148 G_L1_RELATIVE: 10.733 G_Regularizer: 0.000 validation_error: 20.924 +(epoch: 36, iters: 43680, time: 0.544, data: 0.000) G_L1: 12.676 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 10.252 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 36, iters: 45680, time: 0.548, data: 0.000) G_L1: 13.897 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 11.446 G_Regularizer: 0.000 validation_error: 21.078 +(epoch: 36, iters: 47680, time: 0.551, data: 0.000) G_L1: 12.347 G_L1_ABSOLUTE: 2.290 G_L1_RELATIVE: 10.057 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 36, iters: 49680, time: 0.551, data: 0.000) G_L1: 12.085 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 9.681 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 36, iters: 51680, time: 0.552, data: 0.000) G_L1: 14.464 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 11.704 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 36, iters: 53680, time: 0.542, data: 0.000) G_L1: 12.095 G_L1_ABSOLUTE: 2.315 G_L1_RELATIVE: 9.779 G_Regularizer: 0.000 validation_error: 20.607 +(epoch: 36, iters: 55680, time: 0.549, data: 0.000) G_L1: 13.488 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.052 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 36, iters: 57680, time: 0.541, data: 0.000) G_L1: 14.588 G_L1_ABSOLUTE: 2.129 G_L1_RELATIVE: 12.460 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 36, iters: 59680, time: 0.558, data: 0.000) G_L1: 12.554 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 9.961 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 36, iters: 61680, time: 0.545, data: 0.000) G_L1: 14.162 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 11.249 G_Regularizer: 0.000 validation_error: 21.139 +(epoch: 36, iters: 63680, time: 0.550, data: 0.000) G_L1: 14.355 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 11.857 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 36, iters: 65680, time: 0.545, data: 0.000) G_L1: 16.490 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 13.944 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 36, iters: 67680, time: 0.548, data: 0.000) G_L1: 13.661 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 11.072 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 36, iters: 69680, time: 0.549, data: 0.000) G_L1: 14.511 G_L1_ABSOLUTE: 2.033 G_L1_RELATIVE: 12.478 G_Regularizer: 0.000 validation_error: 21.267 +(epoch: 36, iters: 71680, time: 0.544, data: 0.000) G_L1: 15.471 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 13.189 G_Regularizer: 0.000 validation_error: 21.280 +(epoch: 36, iters: 73680, time: 0.552, data: 0.000) G_L1: 17.074 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 14.732 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 36, iters: 75680, time: 0.547, data: 0.000) G_L1: 14.070 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 11.697 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 36, iters: 77680, time: 0.543, data: 0.000) G_L1: 14.237 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 11.961 G_Regularizer: 0.000 validation_error: 20.991 +(epoch: 36, iters: 79680, time: 0.543, data: 0.000) G_L1: 14.036 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 11.558 G_Regularizer: 0.000 validation_error: 20.912 +(epoch: 36, iters: 81680, time: 0.547, data: 0.001) G_L1: 13.799 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 11.262 G_Regularizer: 0.000 validation_error: 21.281 +(epoch: 36, iters: 83680, time: 0.543, data: 0.000) G_L1: 12.351 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 9.620 G_Regularizer: 0.000 validation_error: 21.517 +(epoch: 36, iters: 85680, time: 0.546, data: 0.000) G_L1: 12.763 G_L1_ABSOLUTE: 2.828 G_L1_RELATIVE: 9.935 G_Regularizer: 0.000 validation_error: 20.545 +(epoch: 36, iters: 87680, time: 0.553, data: 0.000) G_L1: 14.524 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 12.009 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 36, iters: 89680, time: 0.542, data: 0.000) G_L1: 16.921 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 14.222 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 36, iters: 91680, time: 0.541, data: 0.000) G_L1: 13.842 G_L1_ABSOLUTE: 2.034 G_L1_RELATIVE: 11.808 G_Regularizer: 0.000 validation_error: 21.049 +(epoch: 36, iters: 93680, time: 0.555, data: 0.000) G_L1: 14.418 G_L1_ABSOLUTE: 3.457 G_L1_RELATIVE: 10.961 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 36, iters: 95680, time: 0.549, data: 0.000) G_L1: 14.342 G_L1_ABSOLUTE: 2.182 G_L1_RELATIVE: 12.161 G_Regularizer: 0.000 validation_error: 21.313 +(epoch: 36, iters: 97680, time: 0.541, data: 0.000) G_L1: 15.866 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 13.129 G_Regularizer: 0.000 validation_error: 20.938 +(epoch: 36, iters: 99680, time: 0.544, data: 0.000) G_L1: 14.401 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 11.916 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 36, iters: 101680, time: 0.551, data: 0.001) G_L1: 13.188 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 10.644 G_Regularizer: 0.000 validation_error: 20.563 +(epoch: 36, iters: 103680, time: 0.547, data: 0.000) G_L1: 14.381 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 11.901 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 36, iters: 105680, time: 0.544, data: 0.000) G_L1: 14.364 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 11.841 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 36, iters: 107680, time: 0.547, data: 0.000) G_L1: 13.436 G_L1_ABSOLUTE: 2.327 G_L1_RELATIVE: 11.109 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 36, iters: 109680, time: 0.542, data: 0.000) G_L1: 12.987 G_L1_ABSOLUTE: 2.154 G_L1_RELATIVE: 10.833 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 36, iters: 111680, time: 0.541, data: 0.000) G_L1: 16.860 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 14.204 G_Regularizer: 0.000 validation_error: 20.646 +(epoch: 36, iters: 113680, time: 0.547, data: 0.001) G_L1: 14.083 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 11.615 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 36, iters: 115680, time: 0.548, data: 0.000) G_L1: 14.176 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 11.697 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 36, iters: 117680, time: 0.538, data: 0.000) G_L1: 14.236 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 11.373 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 36, iters: 119680, time: 0.551, data: 0.000) G_L1: 13.760 G_L1_ABSOLUTE: 2.127 G_L1_RELATIVE: 11.634 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 36, iters: 121680, time: 0.553, data: 0.000) G_L1: 12.979 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 10.370 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 36, iters: 123680, time: 0.552, data: 0.000) G_L1: 14.568 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 12.228 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 36, iters: 125680, time: 0.549, data: 0.000) G_L1: 16.296 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 13.814 G_Regularizer: 0.000 validation_error: 20.301 +(epoch: 36, iters: 127680, time: 0.549, data: 0.000) G_L1: 13.548 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 11.047 G_Regularizer: 0.000 validation_error: 20.733 +(epoch: 36, iters: 129680, time: 0.550, data: 0.001) G_L1: 12.949 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 10.535 G_Regularizer: 0.000 validation_error: 20.297 +(epoch: 36, iters: 131680, time: 0.543, data: 0.000) G_L1: 13.813 G_L1_ABSOLUTE: 2.456 G_L1_RELATIVE: 11.358 G_Regularizer: 0.000 validation_error: 20.543 +(epoch: 36, iters: 133680, time: 0.545, data: 0.000) G_L1: 12.497 G_L1_ABSOLUTE: 2.272 G_L1_RELATIVE: 10.224 G_Regularizer: 0.000 validation_error: 21.380 +(epoch: 36, iters: 135680, time: 0.551, data: 0.000) G_L1: 13.479 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 11.022 G_Regularizer: 0.000 validation_error: 21.230 +(epoch: 36, iters: 137680, time: 0.542, data: 0.000) G_L1: 13.454 G_L1_ABSOLUTE: 3.182 G_L1_RELATIVE: 10.271 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 36, iters: 139680, time: 0.543, data: 0.000) G_L1: 16.195 G_L1_ABSOLUTE: 3.217 G_L1_RELATIVE: 12.978 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 36, iters: 141680, time: 0.545, data: 0.000) G_L1: 12.818 G_L1_ABSOLUTE: 2.346 G_L1_RELATIVE: 10.471 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 36, iters: 143680, time: 0.544, data: 0.000) G_L1: 15.261 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 12.524 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 36, iters: 145680, time: 0.548, data: 0.000) G_L1: 14.384 G_L1_ABSOLUTE: 2.897 G_L1_RELATIVE: 11.486 G_Regularizer: 0.000 validation_error: 21.248 +(epoch: 36, iters: 147680, time: 0.547, data: 0.000) G_L1: 16.200 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 13.602 G_Regularizer: 0.000 validation_error: 20.355 +(epoch: 36, iters: 149680, time: 0.547, data: 0.000) G_L1: 12.884 G_L1_ABSOLUTE: 2.266 G_L1_RELATIVE: 10.618 G_Regularizer: 0.000 validation_error: 20.611 +(epoch: 36, iters: 151680, time: 0.549, data: 0.000) G_L1: 16.105 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 13.581 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 36, iters: 153680, time: 0.547, data: 0.001) G_L1: 12.350 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 9.606 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 36, iters: 155680, time: 0.552, data: 0.000) G_L1: 13.861 G_L1_ABSOLUTE: 2.789 G_L1_RELATIVE: 11.071 G_Regularizer: 0.000 validation_error: 21.484 +(epoch: 36, iters: 157680, time: 0.553, data: 0.000) G_L1: 12.906 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 10.513 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 36, iters: 159680, time: 0.552, data: 0.000) G_L1: 15.220 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 12.550 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 36, iters: 161680, time: 0.554, data: 0.000) G_L1: 13.658 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 11.100 G_Regularizer: 0.000 validation_error: 20.993 +(epoch: 36, iters: 163680, time: 0.548, data: 0.000) G_L1: 16.049 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 13.678 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 36, iters: 165680, time: 0.545, data: 0.000) G_L1: 11.625 G_L1_ABSOLUTE: 2.297 G_L1_RELATIVE: 9.328 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 36, iters: 167680, time: 0.546, data: 0.000) G_L1: 14.289 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 12.027 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 36, iters: 169680, time: 0.540, data: 0.000) G_L1: 14.768 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 12.407 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 36, iters: 171680, time: 0.548, data: 0.000) G_L1: 20.242 G_L1_ABSOLUTE: 3.288 G_L1_RELATIVE: 16.954 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 36, iters: 173680, time: 0.544, data: 0.000) G_L1: 13.411 G_L1_ABSOLUTE: 2.250 G_L1_RELATIVE: 11.161 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 36, iters: 175680, time: 0.554, data: 0.000) G_L1: 16.492 G_L1_ABSOLUTE: 2.639 G_L1_RELATIVE: 13.854 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 36, iters: 177680, time: 0.558, data: 0.000) G_L1: 14.755 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.221 G_Regularizer: 0.000 validation_error: 20.046 +(epoch: 36, iters: 179680, time: 0.542, data: 0.000) G_L1: 16.950 G_L1_ABSOLUTE: 3.028 G_L1_RELATIVE: 13.922 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 36, iters: 181680, time: 0.544, data: 0.000) G_L1: 10.602 G_L1_ABSOLUTE: 2.179 G_L1_RELATIVE: 8.422 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 36, iters: 183680, time: 0.546, data: 0.000) G_L1: 14.370 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 11.897 G_Regularizer: 0.000 validation_error: 20.732 +(epoch: 36, iters: 185680, time: 0.551, data: 0.000) G_L1: 13.858 G_L1_ABSOLUTE: 2.309 G_L1_RELATIVE: 11.549 G_Regularizer: 0.000 validation_error: 20.418 +(epoch: 36, iters: 187680, time: 0.555, data: 0.001) G_L1: 16.004 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 13.529 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 36, iters: 189680, time: 0.544, data: 0.000) G_L1: 15.760 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 13.235 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 36, iters: 191680, time: 0.549, data: 0.000) G_L1: 13.238 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 10.643 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 36, iters: 193680, time: 0.556, data: 0.000) G_L1: 14.353 G_L1_ABSOLUTE: 2.975 G_L1_RELATIVE: 11.378 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 36, iters: 195680, time: 0.547, data: 0.000) G_L1: 14.086 G_L1_ABSOLUTE: 2.288 G_L1_RELATIVE: 11.798 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 36, iters: 197680, time: 0.536, data: 0.000) G_L1: 13.319 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 10.912 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 36, iters: 199680, time: 0.550, data: 0.000) G_L1: 11.218 G_L1_ABSOLUTE: 2.128 G_L1_RELATIVE: 9.090 G_Regularizer: 0.000 validation_error: 20.954 +(epoch: 36, iters: 201680, time: 0.541, data: 0.001) G_L1: 13.131 G_L1_ABSOLUTE: 2.210 G_L1_RELATIVE: 10.921 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 36, iters: 203680, time: 0.549, data: 0.000) G_L1: 15.191 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 12.835 G_Regularizer: 0.000 validation_error: 20.493 +(epoch: 36, iters: 205680, time: 0.555, data: 0.000) G_L1: 16.060 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 13.268 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 36, iters: 207680, time: 0.546, data: 0.000) G_L1: 14.089 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 11.621 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 36, iters: 209680, time: 0.547, data: 0.000) G_L1: 15.208 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 12.671 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 36, iters: 211680, time: 0.548, data: 0.000) G_L1: 16.146 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 13.682 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 36, iters: 213680, time: 0.555, data: 0.000) G_L1: 14.347 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 11.799 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 36, iters: 215680, time: 0.536, data: 0.000) G_L1: 13.851 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 11.463 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 36, iters: 217680, time: 0.549, data: 0.000) G_L1: 15.306 G_L1_ABSOLUTE: 2.768 G_L1_RELATIVE: 12.537 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 36, iters: 219680, time: 0.550, data: 0.000) G_L1: 17.994 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 15.365 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 36, iters: 221680, time: 0.547, data: 0.000) G_L1: 15.728 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 13.304 G_Regularizer: 0.000 validation_error: 20.954 +(epoch: 36, iters: 223680, time: 0.545, data: 0.000) G_L1: 13.260 G_L1_ABSOLUTE: 2.074 G_L1_RELATIVE: 11.186 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 36, iters: 225680, time: 0.544, data: 0.000) G_L1: 15.579 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 13.009 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 36, iters: 227680, time: 0.552, data: 0.000) G_L1: 15.285 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 12.580 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 36, iters: 229680, time: 0.548, data: 0.001) G_L1: 13.415 G_L1_ABSOLUTE: 2.883 G_L1_RELATIVE: 10.532 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 36, iters: 231680, time: 0.547, data: 0.000) G_L1: 13.090 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 10.655 G_Regularizer: 0.000 validation_error: 21.211 +(epoch: 36, iters: 233680, time: 0.545, data: 0.000) G_L1: 15.319 G_L1_ABSOLUTE: 2.555 G_L1_RELATIVE: 12.764 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 36, iters: 235680, time: 0.542, data: 0.000) G_L1: 14.656 G_L1_ABSOLUTE: 2.021 G_L1_RELATIVE: 12.634 G_Regularizer: 0.000 validation_error: 20.956 +(epoch: 36, iters: 237680, time: 0.548, data: 0.000) G_L1: 15.900 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 13.126 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 36, iters: 239680, time: 0.545, data: 0.000) G_L1: 13.728 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 11.334 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 36, iters: 241680, time: 0.542, data: 0.000) G_L1: 13.463 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 10.889 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 36, iters: 243680, time: 0.546, data: 0.001) G_L1: 12.777 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 10.495 G_Regularizer: 0.000 validation_error: 21.306 +(epoch: 36, iters: 245680, time: 0.551, data: 0.000) G_L1: 15.934 G_L1_ABSOLUTE: 2.920 G_L1_RELATIVE: 13.014 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 36, iters: 247680, time: 0.563, data: 0.000) G_L1: 15.549 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 12.897 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 36, iters: 249680, time: 0.546, data: 0.000) G_L1: 11.263 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 8.761 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 36, iters: 251680, time: 0.544, data: 0.000) G_L1: 13.584 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 11.248 G_Regularizer: 0.000 validation_error: 20.802 +(epoch: 36, iters: 253680, time: 0.555, data: 0.000) G_L1: 14.620 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 12.200 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 36, iters: 255680, time: 0.550, data: 0.000) G_L1: 11.185 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 8.923 G_Regularizer: 0.000 validation_error: 20.536 +(epoch: 36, iters: 257680, time: 0.547, data: 0.000) G_L1: 13.534 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 11.140 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 36, iters: 259680, time: 0.549, data: 0.000) G_L1: 14.603 G_L1_ABSOLUTE: 2.963 G_L1_RELATIVE: 11.640 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 36, iters: 261680, time: 0.555, data: 0.000) G_L1: 17.107 G_L1_ABSOLUTE: 3.222 G_L1_RELATIVE: 13.885 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 36, iters: 263680, time: 0.544, data: 0.000) G_L1: 13.101 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 10.644 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 36, iters: 265680, time: 0.557, data: 0.000) G_L1: 14.716 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 12.100 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 36, iters: 267680, time: 0.539, data: 0.000) G_L1: 13.044 G_L1_ABSOLUTE: 2.148 G_L1_RELATIVE: 10.896 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 36, iters: 269680, time: 0.543, data: 0.000) G_L1: 15.949 G_L1_ABSOLUTE: 2.912 G_L1_RELATIVE: 13.037 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 36, iters: 271680, time: 0.539, data: 0.000) G_L1: 12.048 G_L1_ABSOLUTE: 2.270 G_L1_RELATIVE: 9.778 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 36, iters: 273680, time: 0.545, data: 0.000) G_L1: 13.558 G_L1_ABSOLUTE: 2.665 G_L1_RELATIVE: 10.893 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 36, iters: 275680, time: 0.554, data: 0.000) G_L1: 17.435 G_L1_ABSOLUTE: 3.108 G_L1_RELATIVE: 14.327 G_Regularizer: 0.000 validation_error: 20.848 +(epoch: 36, iters: 277680, time: 0.546, data: 0.000) G_L1: 12.425 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 9.862 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 36, iters: 279680, time: 0.547, data: 0.000) G_L1: 14.006 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 11.726 G_Regularizer: 0.000 validation_error: 21.126 +(epoch: 36, iters: 281680, time: 0.552, data: 0.000) G_L1: 12.728 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 10.453 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 36, iters: 283680, time: 0.548, data: 0.000) G_L1: 13.367 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 10.827 G_Regularizer: 0.000 validation_error: 20.603 +(epoch: 36, iters: 285680, time: 0.545, data: 0.000) G_L1: 14.505 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 11.908 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 36, iters: 287680, time: 0.547, data: 0.000) G_L1: 12.516 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 9.999 G_Regularizer: 0.000 validation_error: 20.862 +(epoch: 36, iters: 289680, time: 0.545, data: 0.000) G_L1: 11.822 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 9.386 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 36, iters: 291680, time: 0.545, data: 0.000) G_L1: 13.045 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 10.630 G_Regularizer: 0.000 validation_error: 21.254 +(epoch: 36, iters: 293680, time: 0.545, data: 0.000) G_L1: 13.524 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 11.051 G_Regularizer: 0.000 validation_error: 21.239 +(epoch: 36, iters: 295680, time: 0.552, data: 0.001) G_L1: 13.582 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 11.223 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 36, iters: 297680, time: 0.545, data: 0.000) G_L1: 16.238 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 13.582 G_Regularizer: 0.000 validation_error: 20.269 +(epoch: 36, iters: 299680, time: 0.547, data: 0.000) G_L1: 11.939 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 9.659 G_Regularizer: 0.000 validation_error: 21.025 +(epoch: 36, iters: 301680, time: 0.543, data: 0.000) G_L1: 13.939 G_L1_ABSOLUTE: 2.157 G_L1_RELATIVE: 11.783 G_Regularizer: 0.000 validation_error: 20.408 +(epoch: 37, iters: 928, time: 0.551, data: 0.000) G_L1: 12.967 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 10.732 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 37, iters: 2928, time: 0.550, data: 0.000) G_L1: 17.113 G_L1_ABSOLUTE: 3.076 G_L1_RELATIVE: 14.037 G_Regularizer: 0.000 validation_error: 20.344 +(epoch: 37, iters: 4928, time: 0.555, data: 0.000) G_L1: 11.039 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 8.681 G_Regularizer: 0.000 validation_error: 21.347 +(epoch: 37, iters: 6928, time: 0.550, data: 0.000) G_L1: 13.640 G_L1_ABSOLUTE: 2.029 G_L1_RELATIVE: 11.611 G_Regularizer: 0.000 validation_error: 21.055 +(epoch: 37, iters: 8928, time: 0.559, data: 0.000) G_L1: 14.808 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 12.465 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 37, iters: 10928, time: 0.541, data: 0.000) G_L1: 13.467 G_L1_ABSOLUTE: 2.882 G_L1_RELATIVE: 10.585 G_Regularizer: 0.000 validation_error: 20.446 +(epoch: 37, iters: 12928, time: 0.556, data: 0.000) G_L1: 14.200 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 11.946 G_Regularizer: 0.000 validation_error: 20.397 +(epoch: 37, iters: 14928, time: 0.543, data: 0.000) G_L1: 13.905 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 11.556 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 37, iters: 16928, time: 0.555, data: 0.000) G_L1: 13.272 G_L1_ABSOLUTE: 2.256 G_L1_RELATIVE: 11.016 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 37, iters: 18928, time: 0.546, data: 0.000) G_L1: 16.778 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 14.275 G_Regularizer: 0.000 validation_error: 21.199 +(epoch: 37, iters: 20928, time: 0.544, data: 0.000) G_L1: 12.605 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 10.348 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 37, iters: 22928, time: 0.546, data: 0.000) G_L1: 14.604 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 12.133 G_Regularizer: 0.000 validation_error: 20.611 +(epoch: 37, iters: 24928, time: 0.545, data: 0.000) G_L1: 13.154 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 10.651 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 37, iters: 26928, time: 0.546, data: 0.001) G_L1: 12.146 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 9.811 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 37, iters: 28928, time: 0.544, data: 0.000) G_L1: 13.214 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 10.857 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 37, iters: 30928, time: 0.549, data: 0.000) G_L1: 14.946 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 12.337 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 37, iters: 32928, time: 0.543, data: 0.000) G_L1: 15.114 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 12.606 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 37, iters: 34928, time: 0.550, data: 0.000) G_L1: 11.943 G_L1_ABSOLUTE: 2.050 G_L1_RELATIVE: 9.893 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 37, iters: 36928, time: 0.546, data: 0.000) G_L1: 14.628 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 11.994 G_Regularizer: 0.000 validation_error: 21.566 +(epoch: 37, iters: 38928, time: 0.551, data: 0.000) G_L1: 14.147 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 11.774 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 37, iters: 40928, time: 0.545, data: 0.000) G_L1: 15.369 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 12.816 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 37, iters: 42928, time: 0.546, data: 0.000) G_L1: 11.599 G_L1_ABSOLUTE: 2.236 G_L1_RELATIVE: 9.364 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 37, iters: 44928, time: 0.550, data: 0.000) G_L1: 13.443 G_L1_ABSOLUTE: 2.950 G_L1_RELATIVE: 10.492 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 37, iters: 46928, time: 0.539, data: 0.000) G_L1: 12.944 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 10.569 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 37, iters: 48928, time: 0.538, data: 0.000) G_L1: 13.358 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 10.898 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 37, iters: 50928, time: 0.549, data: 0.000) G_L1: 16.174 G_L1_ABSOLUTE: 3.209 G_L1_RELATIVE: 12.965 G_Regularizer: 0.000 validation_error: 21.353 +(epoch: 37, iters: 52928, time: 0.548, data: 0.000) G_L1: 11.681 G_L1_ABSOLUTE: 1.838 G_L1_RELATIVE: 9.842 G_Regularizer: 0.000 validation_error: 20.585 +(epoch: 37, iters: 54928, time: 0.544, data: 0.000) G_L1: 14.113 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 11.643 G_Regularizer: 0.000 validation_error: 21.262 +(epoch: 37, iters: 56928, time: 0.548, data: 0.000) G_L1: 13.541 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 11.043 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 37, iters: 58928, time: 0.548, data: 0.000) G_L1: 16.033 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 13.573 G_Regularizer: 0.000 validation_error: 20.702 +(epoch: 37, iters: 60928, time: 0.540, data: 0.000) G_L1: 13.711 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 11.099 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 37, iters: 62928, time: 0.560, data: 0.000) G_L1: 12.815 G_L1_ABSOLUTE: 2.231 G_L1_RELATIVE: 10.584 G_Regularizer: 0.000 validation_error: 21.328 +(epoch: 37, iters: 64928, time: 0.550, data: 0.000) G_L1: 14.106 G_L1_ABSOLUTE: 2.212 G_L1_RELATIVE: 11.894 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 37, iters: 66928, time: 0.552, data: 0.000) G_L1: 14.485 G_L1_ABSOLUTE: 2.267 G_L1_RELATIVE: 12.218 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 37, iters: 68928, time: 0.550, data: 0.000) G_L1: 14.771 G_L1_ABSOLUTE: 2.236 G_L1_RELATIVE: 12.535 G_Regularizer: 0.000 validation_error: 21.139 +(epoch: 37, iters: 70928, time: 0.547, data: 0.000) G_L1: 15.445 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 12.811 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 37, iters: 72928, time: 0.546, data: 0.000) G_L1: 13.060 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 10.652 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 37, iters: 74928, time: 0.544, data: 0.000) G_L1: 14.045 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 11.701 G_Regularizer: 0.000 validation_error: 21.095 +(epoch: 37, iters: 76928, time: 0.547, data: 0.000) G_L1: 15.547 G_L1_ABSOLUTE: 2.053 G_L1_RELATIVE: 13.493 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 37, iters: 78928, time: 0.550, data: 0.000) G_L1: 16.004 G_L1_ABSOLUTE: 2.129 G_L1_RELATIVE: 13.875 G_Regularizer: 0.000 validation_error: 20.153 +(epoch: 37, iters: 80928, time: 0.543, data: 0.001) G_L1: 17.452 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 14.774 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 37, iters: 82928, time: 0.544, data: 0.000) G_L1: 14.551 G_L1_ABSOLUTE: 2.915 G_L1_RELATIVE: 11.637 G_Regularizer: 0.000 validation_error: 20.684 +(epoch: 37, iters: 84928, time: 0.552, data: 0.000) G_L1: 17.284 G_L1_ABSOLUTE: 3.144 G_L1_RELATIVE: 14.140 G_Regularizer: 0.000 validation_error: 21.184 +(epoch: 37, iters: 86928, time: 0.542, data: 0.000) G_L1: 14.708 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 12.359 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 37, iters: 88928, time: 0.542, data: 0.000) G_L1: 14.674 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 11.796 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 37, iters: 90928, time: 0.546, data: 0.000) G_L1: 12.175 G_L1_ABSOLUTE: 2.181 G_L1_RELATIVE: 9.994 G_Regularizer: 0.000 validation_error: 20.980 +(epoch: 37, iters: 92928, time: 0.540, data: 0.000) G_L1: 13.612 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 10.907 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 37, iters: 94928, time: 0.545, data: 0.000) G_L1: 11.470 G_L1_ABSOLUTE: 2.064 G_L1_RELATIVE: 9.406 G_Regularizer: 0.000 validation_error: 21.327 +(epoch: 37, iters: 96928, time: 0.547, data: 0.000) G_L1: 16.009 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 13.444 G_Regularizer: 0.000 validation_error: 20.627 +(epoch: 37, iters: 98928, time: 0.555, data: 0.000) G_L1: 16.093 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 13.516 G_Regularizer: 0.000 validation_error: 21.059 +(epoch: 37, iters: 100928, time: 0.541, data: 0.000) G_L1: 14.228 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 11.184 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 37, iters: 102928, time: 0.548, data: 0.000) G_L1: 16.499 G_L1_ABSOLUTE: 3.352 G_L1_RELATIVE: 13.147 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 37, iters: 104928, time: 0.556, data: 0.000) G_L1: 13.859 G_L1_ABSOLUTE: 2.185 G_L1_RELATIVE: 11.674 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 37, iters: 106928, time: 0.542, data: 0.000) G_L1: 13.762 G_L1_ABSOLUTE: 2.197 G_L1_RELATIVE: 11.565 G_Regularizer: 0.000 validation_error: 21.361 +(epoch: 37, iters: 108928, time: 0.538, data: 0.000) G_L1: 14.099 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 11.765 G_Regularizer: 0.000 validation_error: 20.505 +(epoch: 37, iters: 110928, time: 0.544, data: 0.000) G_L1: 12.755 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 10.282 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 37, iters: 112928, time: 0.551, data: 0.000) G_L1: 15.362 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 12.793 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 37, iters: 114928, time: 0.547, data: 0.000) G_L1: 14.409 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 11.631 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 37, iters: 116928, time: 0.546, data: 0.001) G_L1: 13.312 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 10.661 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 37, iters: 118928, time: 0.548, data: 0.000) G_L1: 17.696 G_L1_ABSOLUTE: 2.558 G_L1_RELATIVE: 15.138 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 37, iters: 120928, time: 0.553, data: 0.000) G_L1: 15.186 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 12.780 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 37, iters: 122928, time: 0.558, data: 0.000) G_L1: 12.544 G_L1_ABSOLUTE: 2.644 G_L1_RELATIVE: 9.900 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 37, iters: 124928, time: 0.556, data: 0.000) G_L1: 12.290 G_L1_ABSOLUTE: 1.966 G_L1_RELATIVE: 10.323 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 37, iters: 126928, time: 0.544, data: 0.000) G_L1: 12.523 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 10.264 G_Regularizer: 0.000 validation_error: 21.110 +(epoch: 37, iters: 128928, time: 0.547, data: 0.000) G_L1: 15.895 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 13.296 G_Regularizer: 0.000 validation_error: 21.340 +(epoch: 37, iters: 130928, time: 0.553, data: 0.000) G_L1: 14.848 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 12.209 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 37, iters: 132928, time: 0.541, data: 0.000) G_L1: 16.628 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 13.914 G_Regularizer: 0.000 validation_error: 21.169 +(epoch: 37, iters: 134928, time: 0.549, data: 0.000) G_L1: 12.203 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 9.962 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 37, iters: 136928, time: 0.547, data: 0.001) G_L1: 15.411 G_L1_ABSOLUTE: 2.665 G_L1_RELATIVE: 12.745 G_Regularizer: 0.000 validation_error: 20.535 +(epoch: 37, iters: 138928, time: 0.548, data: 0.000) G_L1: 14.649 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 12.069 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 37, iters: 140928, time: 0.542, data: 0.001) G_L1: 12.124 G_L1_ABSOLUTE: 1.944 G_L1_RELATIVE: 10.180 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 37, iters: 142928, time: 0.554, data: 0.000) G_L1: 13.249 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 10.292 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 37, iters: 144928, time: 0.547, data: 0.001) G_L1: 14.035 G_L1_ABSOLUTE: 2.747 G_L1_RELATIVE: 11.288 G_Regularizer: 0.000 validation_error: 20.548 +(epoch: 37, iters: 146928, time: 0.547, data: 0.000) G_L1: 13.470 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 11.086 G_Regularizer: 0.000 validation_error: 20.947 +(epoch: 37, iters: 148928, time: 0.550, data: 0.000) G_L1: 15.007 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 12.405 G_Regularizer: 0.000 validation_error: 21.312 +(epoch: 37, iters: 150928, time: 0.553, data: 0.000) G_L1: 13.359 G_L1_ABSOLUTE: 2.236 G_L1_RELATIVE: 11.123 G_Regularizer: 0.000 validation_error: 21.126 +(epoch: 37, iters: 152928, time: 0.548, data: 0.001) G_L1: 12.775 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 10.453 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 37, iters: 154928, time: 0.559, data: 0.000) G_L1: 13.748 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 11.529 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 37, iters: 156928, time: 0.544, data: 0.000) G_L1: 15.873 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 13.275 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 37, iters: 158928, time: 0.553, data: 0.000) G_L1: 14.122 G_L1_ABSOLUTE: 2.073 G_L1_RELATIVE: 12.050 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 37, iters: 160928, time: 0.551, data: 0.000) G_L1: 14.958 G_L1_ABSOLUTE: 3.084 G_L1_RELATIVE: 11.874 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 37, iters: 162928, time: 0.548, data: 0.001) G_L1: 16.528 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 13.716 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 37, iters: 164928, time: 0.550, data: 0.000) G_L1: 17.369 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 14.746 G_Regularizer: 0.000 validation_error: 20.690 +(epoch: 37, iters: 166928, time: 0.543, data: 0.000) G_L1: 14.611 G_L1_ABSOLUTE: 2.969 G_L1_RELATIVE: 11.642 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 37, iters: 168928, time: 0.536, data: 0.000) G_L1: 14.118 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 11.609 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 37, iters: 170928, time: 0.544, data: 0.000) G_L1: 15.144 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 12.354 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 37, iters: 172928, time: 0.557, data: 0.000) G_L1: 15.802 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 13.138 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 37, iters: 174928, time: 0.547, data: 0.000) G_L1: 14.387 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 11.589 G_Regularizer: 0.000 validation_error: 20.930 +(epoch: 37, iters: 176928, time: 0.549, data: 0.001) G_L1: 14.969 G_L1_ABSOLUTE: 2.840 G_L1_RELATIVE: 12.128 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 37, iters: 178928, time: 0.548, data: 0.000) G_L1: 11.070 G_L1_ABSOLUTE: 2.271 G_L1_RELATIVE: 8.799 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 37, iters: 180928, time: 0.545, data: 0.000) G_L1: 13.724 G_L1_ABSOLUTE: 2.456 G_L1_RELATIVE: 11.268 G_Regularizer: 0.000 validation_error: 20.960 +(epoch: 37, iters: 182928, time: 0.547, data: 0.000) G_L1: 14.327 G_L1_ABSOLUTE: 2.103 G_L1_RELATIVE: 12.224 G_Regularizer: 0.000 validation_error: 21.053 +(epoch: 37, iters: 184928, time: 0.555, data: 0.000) G_L1: 13.489 G_L1_ABSOLUTE: 2.288 G_L1_RELATIVE: 11.202 G_Regularizer: 0.000 validation_error: 20.346 +(epoch: 37, iters: 186928, time: 0.551, data: 0.000) G_L1: 12.048 G_L1_ABSOLUTE: 2.223 G_L1_RELATIVE: 9.825 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 37, iters: 188928, time: 0.550, data: 0.000) G_L1: 12.823 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 10.498 G_Regularizer: 0.000 validation_error: 21.005 +(epoch: 37, iters: 190928, time: 0.546, data: 0.000) G_L1: 16.678 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 13.976 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 37, iters: 192928, time: 0.554, data: 0.000) G_L1: 14.917 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 12.287 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 37, iters: 194928, time: 0.548, data: 0.000) G_L1: 22.297 G_L1_ABSOLUTE: 3.000 G_L1_RELATIVE: 19.296 G_Regularizer: 0.000 validation_error: 20.921 +(epoch: 37, iters: 196928, time: 0.541, data: 0.000) G_L1: 14.453 G_L1_ABSOLUTE: 2.310 G_L1_RELATIVE: 12.143 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 37, iters: 198928, time: 0.547, data: 0.000) G_L1: 13.898 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 11.369 G_Regularizer: 0.000 validation_error: 20.228 +(epoch: 37, iters: 200928, time: 0.546, data: 0.000) G_L1: 14.344 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 20.466 +(epoch: 37, iters: 202928, time: 0.538, data: 0.000) G_L1: 12.804 G_L1_ABSOLUTE: 2.050 G_L1_RELATIVE: 10.754 G_Regularizer: 0.000 validation_error: 21.129 +(epoch: 37, iters: 204928, time: 0.545, data: 0.000) G_L1: 14.758 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 12.475 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 37, iters: 206928, time: 0.553, data: 0.000) G_L1: 14.930 G_L1_ABSOLUTE: 2.211 G_L1_RELATIVE: 12.719 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 37, iters: 208928, time: 0.542, data: 0.000) G_L1: 13.251 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 10.994 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 37, iters: 210928, time: 0.549, data: 0.000) G_L1: 12.444 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 9.619 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 37, iters: 212928, time: 0.537, data: 0.000) G_L1: 14.678 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 12.058 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 37, iters: 214928, time: 0.549, data: 0.000) G_L1: 13.076 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 10.722 G_Regularizer: 0.000 validation_error: 21.088 +(epoch: 37, iters: 216928, time: 0.546, data: 0.000) G_L1: 13.338 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 10.764 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 37, iters: 218928, time: 0.554, data: 0.001) G_L1: 16.410 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 13.936 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 37, iters: 220928, time: 0.546, data: 0.000) G_L1: 10.908 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 8.610 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 37, iters: 222928, time: 0.546, data: 0.000) G_L1: 12.444 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 9.830 G_Regularizer: 0.000 validation_error: 21.130 +(epoch: 37, iters: 224928, time: 0.550, data: 0.000) G_L1: 14.714 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 12.005 G_Regularizer: 0.000 validation_error: 20.944 +(epoch: 37, iters: 226928, time: 0.538, data: 0.000) G_L1: 16.877 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 13.960 G_Regularizer: 0.000 validation_error: 20.307 +(epoch: 37, iters: 228928, time: 0.542, data: 0.000) G_L1: 12.911 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 10.057 G_Regularizer: 0.000 validation_error: 21.094 +(epoch: 37, iters: 230928, time: 0.548, data: 0.000) G_L1: 12.581 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 9.991 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 37, iters: 232928, time: 0.550, data: 0.000) G_L1: 15.459 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 12.955 G_Regularizer: 0.000 validation_error: 21.189 +(epoch: 37, iters: 234928, time: 0.543, data: 0.000) G_L1: 16.508 G_L1_ABSOLUTE: 3.179 G_L1_RELATIVE: 13.329 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 37, iters: 236928, time: 0.554, data: 0.000) G_L1: 13.954 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 11.541 G_Regularizer: 0.000 validation_error: 20.421 +(epoch: 37, iters: 238928, time: 0.550, data: 0.000) G_L1: 12.614 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 10.222 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 37, iters: 240928, time: 0.546, data: 0.000) G_L1: 14.030 G_L1_ABSOLUTE: 2.063 G_L1_RELATIVE: 11.968 G_Regularizer: 0.000 validation_error: 21.043 +(epoch: 37, iters: 242928, time: 0.540, data: 0.000) G_L1: 15.094 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 12.531 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 37, iters: 244928, time: 0.555, data: 0.000) G_L1: 16.561 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 14.135 G_Regularizer: 0.000 validation_error: 20.441 +(epoch: 37, iters: 246928, time: 0.544, data: 0.000) G_L1: 15.541 G_L1_ABSOLUTE: 2.034 G_L1_RELATIVE: 13.507 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 37, iters: 248928, time: 0.545, data: 0.000) G_L1: 14.270 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 11.809 G_Regularizer: 0.000 validation_error: 20.497 +(epoch: 37, iters: 250928, time: 0.548, data: 0.000) G_L1: 14.143 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 11.685 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 37, iters: 252928, time: 0.544, data: 0.000) G_L1: 15.520 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 12.707 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 37, iters: 254928, time: 0.545, data: 0.000) G_L1: 13.282 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 10.447 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 37, iters: 256928, time: 0.546, data: 0.000) G_L1: 15.505 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 12.916 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 37, iters: 258928, time: 0.551, data: 0.000) G_L1: 14.054 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 11.340 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 37, iters: 260928, time: 0.544, data: 0.000) G_L1: 12.391 G_L1_ABSOLUTE: 2.236 G_L1_RELATIVE: 10.155 G_Regularizer: 0.000 validation_error: 20.300 +(epoch: 37, iters: 262928, time: 0.558, data: 0.000) G_L1: 13.390 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 10.645 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 37, iters: 264928, time: 0.546, data: 0.000) G_L1: 14.348 G_L1_ABSOLUTE: 2.883 G_L1_RELATIVE: 11.465 G_Regularizer: 0.000 validation_error: 20.193 +(epoch: 37, iters: 266928, time: 0.540, data: 0.000) G_L1: 15.885 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 13.268 G_Regularizer: 0.000 validation_error: 20.484 +(epoch: 37, iters: 268928, time: 0.544, data: 0.000) G_L1: 13.083 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 10.480 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 37, iters: 270928, time: 0.550, data: 0.000) G_L1: 14.129 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 11.697 G_Regularizer: 0.000 validation_error: 19.990 +(epoch: 37, iters: 272928, time: 0.541, data: 0.000) G_L1: 20.214 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 17.509 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 37, iters: 274928, time: 0.543, data: 0.000) G_L1: 13.674 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 11.131 G_Regularizer: 0.000 validation_error: 21.386 +(epoch: 37, iters: 276928, time: 0.546, data: 0.000) G_L1: 11.363 G_L1_ABSOLUTE: 2.348 G_L1_RELATIVE: 9.015 G_Regularizer: 0.000 validation_error: 21.065 +(epoch: 37, iters: 278928, time: 0.552, data: 0.000) G_L1: 14.836 G_L1_ABSOLUTE: 2.220 G_L1_RELATIVE: 12.616 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 37, iters: 280928, time: 0.543, data: 0.000) G_L1: 14.157 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 11.407 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 37, iters: 282928, time: 0.542, data: 0.000) G_L1: 13.533 G_L1_ABSOLUTE: 2.872 G_L1_RELATIVE: 10.661 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 37, iters: 284928, time: 0.547, data: 0.000) G_L1: 13.805 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 11.185 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 37, iters: 286928, time: 0.540, data: 0.000) G_L1: 13.148 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 10.899 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 37, iters: 288928, time: 0.545, data: 0.001) G_L1: 14.926 G_L1_ABSOLUTE: 2.410 G_L1_RELATIVE: 12.516 G_Regularizer: 0.000 validation_error: 21.186 +(epoch: 37, iters: 290928, time: 0.541, data: 0.000) G_L1: 12.482 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 10.134 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 37, iters: 292928, time: 0.548, data: 0.000) G_L1: 10.319 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 7.866 G_Regularizer: 0.000 validation_error: 20.395 +(epoch: 37, iters: 294928, time: 0.546, data: 0.000) G_L1: 12.993 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 10.575 G_Regularizer: 0.000 validation_error: 20.502 +(epoch: 37, iters: 296928, time: 0.549, data: 0.001) G_L1: 13.037 G_L1_ABSOLUTE: 2.141 G_L1_RELATIVE: 10.896 G_Regularizer: 0.000 validation_error: 20.658 +(epoch: 37, iters: 298928, time: 0.548, data: 0.000) G_L1: 12.847 G_L1_ABSOLUTE: 2.641 G_L1_RELATIVE: 10.207 G_Regularizer: 0.000 validation_error: 21.357 +(epoch: 37, iters: 300928, time: 0.548, data: 0.000) G_L1: 14.922 G_L1_ABSOLUTE: 2.531 G_L1_RELATIVE: 12.391 G_Regularizer: 0.000 validation_error: 20.658 +(epoch: 38, iters: 176, time: 0.547, data: 0.000) G_L1: 15.220 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 12.323 G_Regularizer: 0.000 validation_error: 21.216 +(epoch: 38, iters: 2176, time: 0.552, data: 0.001) G_L1: 13.925 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 11.380 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 38, iters: 4176, time: 0.546, data: 0.000) G_L1: 14.061 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 11.595 G_Regularizer: 0.000 validation_error: 21.230 +(epoch: 38, iters: 6176, time: 0.544, data: 0.000) G_L1: 12.640 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 10.307 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 38, iters: 8176, time: 0.548, data: 0.000) G_L1: 15.937 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 13.221 G_Regularizer: 0.000 validation_error: 20.719 +(epoch: 38, iters: 10176, time: 0.547, data: 0.000) G_L1: 12.983 G_L1_ABSOLUTE: 2.647 G_L1_RELATIVE: 10.336 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 38, iters: 12176, time: 0.543, data: 0.000) G_L1: 13.620 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 11.221 G_Regularizer: 0.000 validation_error: 20.323 +(epoch: 38, iters: 14176, time: 0.549, data: 0.000) G_L1: 11.918 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 9.582 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 38, iters: 16176, time: 0.555, data: 0.000) G_L1: 14.488 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 12.173 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 38, iters: 18176, time: 0.541, data: 0.000) G_L1: 17.008 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 14.693 G_Regularizer: 0.000 validation_error: 20.512 +(epoch: 38, iters: 20176, time: 0.552, data: 0.000) G_L1: 15.017 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 12.287 G_Regularizer: 0.000 validation_error: 20.705 +(epoch: 38, iters: 22176, time: 0.540, data: 0.000) G_L1: 12.597 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 10.096 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 38, iters: 24176, time: 0.548, data: 0.000) G_L1: 14.606 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 12.015 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 38, iters: 26176, time: 0.551, data: 0.001) G_L1: 14.208 G_L1_ABSOLUTE: 2.188 G_L1_RELATIVE: 12.020 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 38, iters: 28176, time: 0.552, data: 0.000) G_L1: 16.070 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 13.519 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 38, iters: 30176, time: 0.547, data: 0.000) G_L1: 15.815 G_L1_ABSOLUTE: 2.842 G_L1_RELATIVE: 12.972 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 38, iters: 32176, time: 0.550, data: 0.000) G_L1: 13.450 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 11.064 G_Regularizer: 0.000 validation_error: 20.603 +(epoch: 38, iters: 34176, time: 0.564, data: 0.000) G_L1: 15.928 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 13.556 G_Regularizer: 0.000 validation_error: 20.770 +(epoch: 38, iters: 36176, time: 0.551, data: 0.000) G_L1: 14.883 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 12.439 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 38, iters: 38176, time: 0.550, data: 0.000) G_L1: 18.228 G_L1_ABSOLUTE: 3.090 G_L1_RELATIVE: 15.138 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 38, iters: 40176, time: 0.549, data: 0.000) G_L1: 18.201 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 15.943 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 38, iters: 42176, time: 0.552, data: 0.000) G_L1: 12.995 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 10.625 G_Regularizer: 0.000 validation_error: 20.852 +(epoch: 38, iters: 44176, time: 0.544, data: 0.000) G_L1: 11.206 G_L1_ABSOLUTE: 2.240 G_L1_RELATIVE: 8.966 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 38, iters: 46176, time: 0.546, data: 0.000) G_L1: 15.798 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 13.025 G_Regularizer: 0.000 validation_error: 20.374 +(epoch: 38, iters: 48176, time: 0.550, data: 0.001) G_L1: 16.572 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 14.221 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 38, iters: 50176, time: 0.548, data: 0.000) G_L1: 14.366 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 11.917 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 38, iters: 52176, time: 0.546, data: 0.000) G_L1: 15.267 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 12.711 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 38, iters: 54176, time: 0.555, data: 0.001) G_L1: 11.563 G_L1_ABSOLUTE: 2.029 G_L1_RELATIVE: 9.534 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 38, iters: 56176, time: 0.550, data: 0.000) G_L1: 13.524 G_L1_ABSOLUTE: 2.307 G_L1_RELATIVE: 11.218 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 38, iters: 58176, time: 0.549, data: 0.000) G_L1: 15.926 G_L1_ABSOLUTE: 2.998 G_L1_RELATIVE: 12.928 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 38, iters: 60176, time: 0.539, data: 0.000) G_L1: 13.947 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 11.431 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 38, iters: 62176, time: 0.554, data: 0.000) G_L1: 14.023 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 11.688 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 38, iters: 64176, time: 0.543, data: 0.000) G_L1: 14.440 G_L1_ABSOLUTE: 1.935 G_L1_RELATIVE: 12.505 G_Regularizer: 0.000 validation_error: 21.267 +(epoch: 38, iters: 66176, time: 0.544, data: 0.001) G_L1: 14.853 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 12.068 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 38, iters: 68176, time: 0.557, data: 0.000) G_L1: 14.449 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 11.617 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 38, iters: 70176, time: 0.547, data: 0.000) G_L1: 13.842 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 11.213 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 38, iters: 72176, time: 0.550, data: 0.000) G_L1: 14.803 G_L1_ABSOLUTE: 3.007 G_L1_RELATIVE: 11.796 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 38, iters: 74176, time: 0.542, data: 0.000) G_L1: 12.441 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 10.147 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 38, iters: 76176, time: 0.552, data: 0.000) G_L1: 14.937 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 12.530 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 38, iters: 78176, time: 0.552, data: 0.000) G_L1: 12.647 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 10.097 G_Regularizer: 0.000 validation_error: 21.110 +(epoch: 38, iters: 80176, time: 0.558, data: 0.000) G_L1: 14.093 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.424 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 38, iters: 82176, time: 0.541, data: 0.000) G_L1: 13.347 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 10.862 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 38, iters: 84176, time: 0.588, data: 0.000) G_L1: 17.222 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 14.544 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 38, iters: 86176, time: 0.606, data: 0.000) G_L1: 19.185 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 16.322 G_Regularizer: 0.000 validation_error: 20.995 +(epoch: 38, iters: 88176, time: 0.588, data: 0.000) G_L1: 13.624 G_L1_ABSOLUTE: 2.164 G_L1_RELATIVE: 11.460 G_Regularizer: 0.000 validation_error: 20.514 +(epoch: 38, iters: 90176, time: 0.611, data: 0.000) G_L1: 10.437 G_L1_ABSOLUTE: 2.200 G_L1_RELATIVE: 8.237 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 38, iters: 92176, time: 0.608, data: 0.000) G_L1: 12.581 G_L1_ABSOLUTE: 2.290 G_L1_RELATIVE: 10.291 G_Regularizer: 0.000 validation_error: 21.365 +(epoch: 38, iters: 94176, time: 0.604, data: 0.000) G_L1: 12.218 G_L1_ABSOLUTE: 2.196 G_L1_RELATIVE: 10.021 G_Regularizer: 0.000 validation_error: 21.034 +(epoch: 38, iters: 96176, time: 0.610, data: 0.000) G_L1: 15.028 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 12.889 G_Regularizer: 0.000 validation_error: 21.223 +(epoch: 38, iters: 98176, time: 0.611, data: 0.000) G_L1: 16.481 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 13.761 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 38, iters: 100176, time: 0.567, data: 0.000) G_L1: 14.994 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 12.619 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 38, iters: 102176, time: 0.614, data: 0.000) G_L1: 16.942 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 14.217 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 38, iters: 104176, time: 0.608, data: 0.000) G_L1: 13.322 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 10.989 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 38, iters: 106176, time: 0.610, data: 0.000) G_L1: 14.850 G_L1_ABSOLUTE: 3.199 G_L1_RELATIVE: 11.652 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 38, iters: 108176, time: 0.594, data: 0.000) G_L1: 12.929 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 10.516 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 38, iters: 110176, time: 0.601, data: 0.000) G_L1: 14.459 G_L1_ABSOLUTE: 2.967 G_L1_RELATIVE: 11.493 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 38, iters: 112176, time: 0.606, data: 0.000) G_L1: 11.923 G_L1_ABSOLUTE: 2.062 G_L1_RELATIVE: 9.861 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 38, iters: 114176, time: 0.607, data: 0.000) G_L1: 15.638 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 13.184 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 38, iters: 116176, time: 0.608, data: 0.000) G_L1: 13.054 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 10.819 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 38, iters: 118176, time: 0.612, data: 0.000) G_L1: 13.862 G_L1_ABSOLUTE: 2.720 G_L1_RELATIVE: 11.142 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 38, iters: 120176, time: 0.561, data: 0.000) G_L1: 12.793 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 9.971 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 38, iters: 122176, time: 0.604, data: 0.000) G_L1: 16.206 G_L1_ABSOLUTE: 2.309 G_L1_RELATIVE: 13.897 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 38, iters: 124176, time: 0.578, data: 0.000) G_L1: 14.435 G_L1_ABSOLUTE: 3.003 G_L1_RELATIVE: 11.432 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 38, iters: 126176, time: 0.607, data: 0.001) G_L1: 14.751 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 12.045 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 38, iters: 128176, time: 0.608, data: 0.000) G_L1: 15.299 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 12.638 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 38, iters: 130176, time: 0.608, data: 0.000) G_L1: 14.575 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 11.623 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 38, iters: 132176, time: 0.613, data: 0.000) G_L1: 14.889 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 12.591 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 38, iters: 134176, time: 0.609, data: 0.000) G_L1: 13.422 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 10.648 G_Regularizer: 0.000 validation_error: 20.563 +(epoch: 38, iters: 136176, time: 0.611, data: 0.000) G_L1: 16.604 G_L1_ABSOLUTE: 2.991 G_L1_RELATIVE: 13.613 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 38, iters: 138176, time: 0.613, data: 0.000) G_L1: 12.279 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 9.732 G_Regularizer: 0.000 validation_error: 21.006 +(epoch: 38, iters: 140176, time: 0.589, data: 0.000) G_L1: 12.235 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 9.895 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 38, iters: 142176, time: 0.603, data: 0.000) G_L1: 12.970 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 10.572 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 38, iters: 144176, time: 0.610, data: 0.000) G_L1: 15.953 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 13.121 G_Regularizer: 0.000 validation_error: 20.615 +(epoch: 38, iters: 146176, time: 0.599, data: 0.000) G_L1: 13.399 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 10.990 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 38, iters: 148176, time: 0.546, data: 0.000) G_L1: 18.064 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 14.976 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 38, iters: 150176, time: 0.541, data: 0.000) G_L1: 12.437 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 9.840 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 38, iters: 152176, time: 0.541, data: 0.000) G_L1: 15.600 G_L1_ABSOLUTE: 3.037 G_L1_RELATIVE: 12.564 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 38, iters: 154176, time: 0.540, data: 0.000) G_L1: 17.176 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 14.172 G_Regularizer: 0.000 validation_error: 21.127 +(epoch: 38, iters: 156176, time: 0.546, data: 0.000) G_L1: 14.082 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 11.716 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 38, iters: 158176, time: 0.534, data: 0.000) G_L1: 15.048 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 12.387 G_Regularizer: 0.000 validation_error: 21.406 +(epoch: 38, iters: 160176, time: 0.543, data: 0.000) G_L1: 16.212 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 13.475 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 38, iters: 162176, time: 0.544, data: 0.000) G_L1: 14.247 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 11.758 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 38, iters: 164176, time: 0.538, data: 0.000) G_L1: 14.005 G_L1_ABSOLUTE: 2.313 G_L1_RELATIVE: 11.692 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 38, iters: 166176, time: 0.533, data: 0.000) G_L1: 15.798 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 13.074 G_Regularizer: 0.000 validation_error: 20.482 +(epoch: 38, iters: 168176, time: 0.540, data: 0.000) G_L1: 15.040 G_L1_ABSOLUTE: 2.310 G_L1_RELATIVE: 12.730 G_Regularizer: 0.000 validation_error: 21.048 +(epoch: 38, iters: 170176, time: 0.544, data: 0.000) G_L1: 13.183 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 10.955 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 38, iters: 172176, time: 0.549, data: 0.000) G_L1: 14.113 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 11.484 G_Regularizer: 0.000 validation_error: 21.258 +(epoch: 38, iters: 174176, time: 0.532, data: 0.000) G_L1: 13.811 G_L1_ABSOLUTE: 2.924 G_L1_RELATIVE: 10.887 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 38, iters: 176176, time: 0.540, data: 0.000) G_L1: 14.389 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 11.893 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 38, iters: 178176, time: 0.547, data: 0.000) G_L1: 14.586 G_L1_ABSOLUTE: 2.687 G_L1_RELATIVE: 11.898 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 38, iters: 180176, time: 0.542, data: 0.000) G_L1: 13.691 G_L1_ABSOLUTE: 2.300 G_L1_RELATIVE: 11.391 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 38, iters: 182176, time: 0.543, data: 0.000) G_L1: 14.191 G_L1_ABSOLUTE: 2.073 G_L1_RELATIVE: 12.119 G_Regularizer: 0.000 validation_error: 21.025 +(epoch: 38, iters: 184176, time: 0.532, data: 0.000) G_L1: 13.878 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.277 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 38, iters: 186176, time: 0.539, data: 0.000) G_L1: 12.997 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 10.739 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 38, iters: 188176, time: 0.538, data: 0.000) G_L1: 14.409 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 38, iters: 190176, time: 0.544, data: 0.000) G_L1: 14.019 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 11.444 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 38, iters: 192176, time: 0.536, data: 0.001) G_L1: 13.911 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 11.653 G_Regularizer: 0.000 validation_error: 21.366 +(epoch: 38, iters: 194176, time: 0.542, data: 0.000) G_L1: 15.196 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 12.288 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 38, iters: 196176, time: 0.537, data: 0.000) G_L1: 20.692 G_L1_ABSOLUTE: 2.904 G_L1_RELATIVE: 17.788 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 38, iters: 198176, time: 0.547, data: 0.000) G_L1: 12.780 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 10.256 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 38, iters: 200176, time: 0.537, data: 0.000) G_L1: 13.877 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 11.364 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 38, iters: 202176, time: 0.541, data: 0.000) G_L1: 14.712 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 12.024 G_Regularizer: 0.000 validation_error: 21.329 +(epoch: 38, iters: 204176, time: 0.540, data: 0.000) G_L1: 14.898 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 12.456 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 38, iters: 206176, time: 0.543, data: 0.000) G_L1: 12.426 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 9.924 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 38, iters: 208176, time: 0.539, data: 0.000) G_L1: 13.448 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 10.946 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 38, iters: 210176, time: 0.530, data: 0.000) G_L1: 13.728 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 11.361 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 38, iters: 212176, time: 0.543, data: 0.000) G_L1: 13.261 G_L1_ABSOLUTE: 2.193 G_L1_RELATIVE: 11.068 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 38, iters: 214176, time: 0.538, data: 0.001) G_L1: 16.541 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 13.715 G_Regularizer: 0.000 validation_error: 21.139 +(epoch: 38, iters: 216176, time: 0.539, data: 0.000) G_L1: 16.186 G_L1_ABSOLUTE: 2.818 G_L1_RELATIVE: 13.367 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 38, iters: 218176, time: 0.538, data: 0.000) G_L1: 14.750 G_L1_ABSOLUTE: 2.226 G_L1_RELATIVE: 12.523 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 38, iters: 220176, time: 0.539, data: 0.000) G_L1: 14.231 G_L1_ABSOLUTE: 2.224 G_L1_RELATIVE: 12.008 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 38, iters: 222176, time: 0.539, data: 0.000) G_L1: 13.746 G_L1_ABSOLUTE: 3.117 G_L1_RELATIVE: 10.629 G_Regularizer: 0.000 validation_error: 21.262 +(epoch: 38, iters: 224176, time: 0.542, data: 0.000) G_L1: 15.530 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 13.057 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 38, iters: 226176, time: 0.535, data: 0.000) G_L1: 15.602 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 13.155 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 38, iters: 228176, time: 0.541, data: 0.000) G_L1: 20.649 G_L1_ABSOLUTE: 2.326 G_L1_RELATIVE: 18.323 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 38, iters: 230176, time: 0.542, data: 0.000) G_L1: 15.236 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 12.649 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 38, iters: 232176, time: 0.539, data: 0.000) G_L1: 15.066 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 12.592 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 38, iters: 234176, time: 0.539, data: 0.001) G_L1: 14.797 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 12.363 G_Regularizer: 0.000 validation_error: 21.187 +(epoch: 38, iters: 236176, time: 0.545, data: 0.000) G_L1: 12.323 G_L1_ABSOLUTE: 2.594 G_L1_RELATIVE: 9.729 G_Regularizer: 0.000 validation_error: 20.940 +(epoch: 38, iters: 238176, time: 0.541, data: 0.000) G_L1: 13.334 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 10.898 G_Regularizer: 0.000 validation_error: 20.373 +(epoch: 38, iters: 240176, time: 0.546, data: 0.000) G_L1: 13.343 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 10.935 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 38, iters: 242176, time: 0.541, data: 0.000) G_L1: 14.764 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 12.196 G_Regularizer: 0.000 validation_error: 21.050 +(epoch: 38, iters: 244176, time: 0.547, data: 0.000) G_L1: 15.758 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 13.371 G_Regularizer: 0.000 validation_error: 20.384 +(epoch: 38, iters: 246176, time: 0.546, data: 0.000) G_L1: 13.677 G_L1_ABSOLUTE: 2.165 G_L1_RELATIVE: 11.512 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 38, iters: 248176, time: 0.541, data: 0.000) G_L1: 12.901 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 10.603 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 38, iters: 250176, time: 0.547, data: 0.000) G_L1: 16.396 G_L1_ABSOLUTE: 2.352 G_L1_RELATIVE: 14.044 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 38, iters: 252176, time: 0.542, data: 0.000) G_L1: 14.126 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 11.388 G_Regularizer: 0.000 validation_error: 20.640 +(epoch: 38, iters: 254176, time: 0.539, data: 0.000) G_L1: 15.742 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 13.220 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 38, iters: 256176, time: 0.539, data: 0.000) G_L1: 14.959 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 12.385 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 38, iters: 258176, time: 0.542, data: 0.000) G_L1: 14.760 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 12.027 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 38, iters: 260176, time: 0.543, data: 0.000) G_L1: 14.052 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.468 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 38, iters: 262176, time: 0.541, data: 0.000) G_L1: 15.193 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 12.382 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 38, iters: 264176, time: 0.541, data: 0.000) G_L1: 13.122 G_L1_ABSOLUTE: 2.188 G_L1_RELATIVE: 10.934 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 38, iters: 266176, time: 0.538, data: 0.000) G_L1: 13.409 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 11.270 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 38, iters: 268176, time: 0.546, data: 0.001) G_L1: 13.196 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 10.858 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 38, iters: 270176, time: 0.534, data: 0.000) G_L1: 15.325 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 12.800 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 38, iters: 272176, time: 0.543, data: 0.000) G_L1: 15.162 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 12.339 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 38, iters: 274176, time: 0.546, data: 0.000) G_L1: 11.841 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 9.581 G_Regularizer: 0.000 validation_error: 21.324 +(epoch: 38, iters: 276176, time: 0.549, data: 0.000) G_L1: 13.307 G_L1_ABSOLUTE: 2.098 G_L1_RELATIVE: 11.208 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 38, iters: 278176, time: 0.524, data: 0.000) G_L1: 13.719 G_L1_ABSOLUTE: 2.780 G_L1_RELATIVE: 10.939 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 38, iters: 280176, time: 0.534, data: 0.000) G_L1: 14.010 G_L1_ABSOLUTE: 3.018 G_L1_RELATIVE: 10.992 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 38, iters: 282176, time: 0.538, data: 0.000) G_L1: 15.020 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.606 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 38, iters: 284176, time: 0.541, data: 0.000) G_L1: 13.529 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 11.026 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 38, iters: 286176, time: 0.542, data: 0.000) G_L1: 15.800 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 13.184 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 38, iters: 288176, time: 0.544, data: 0.000) G_L1: 12.757 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 10.304 G_Regularizer: 0.000 validation_error: 21.168 +(epoch: 38, iters: 290176, time: 0.551, data: 0.000) G_L1: 13.296 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 10.925 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 38, iters: 292176, time: 0.536, data: 0.000) G_L1: 14.027 G_L1_ABSOLUTE: 2.939 G_L1_RELATIVE: 11.089 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 38, iters: 294176, time: 0.534, data: 0.000) G_L1: 11.779 G_L1_ABSOLUTE: 1.975 G_L1_RELATIVE: 9.804 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 38, iters: 296176, time: 0.541, data: 0.000) G_L1: 15.051 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 12.624 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 38, iters: 298176, time: 0.544, data: 0.000) G_L1: 12.588 G_L1_ABSOLUTE: 2.134 G_L1_RELATIVE: 10.455 G_Regularizer: 0.000 validation_error: 20.460 +(epoch: 38, iters: 300176, time: 0.545, data: 0.000) G_L1: 14.091 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 11.708 G_Regularizer: 0.000 validation_error: 21.418 +(epoch: 38, iters: 302176, time: 0.545, data: 0.000) G_L1: 14.047 G_L1_ABSOLUTE: 2.150 G_L1_RELATIVE: 11.897 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 39, iters: 1424, time: 0.532, data: 0.000) G_L1: 14.844 G_L1_ABSOLUTE: 2.183 G_L1_RELATIVE: 12.661 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 39, iters: 3424, time: 0.548, data: 0.000) G_L1: 12.174 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 9.687 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 39, iters: 5424, time: 0.542, data: 0.000) G_L1: 11.743 G_L1_ABSOLUTE: 1.980 G_L1_RELATIVE: 9.763 G_Regularizer: 0.000 validation_error: 20.802 +(epoch: 39, iters: 7424, time: 0.548, data: 0.000) G_L1: 14.694 G_L1_ABSOLUTE: 2.125 G_L1_RELATIVE: 12.569 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 39, iters: 9424, time: 0.538, data: 0.001) G_L1: 14.174 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 11.889 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 39, iters: 11424, time: 0.543, data: 0.001) G_L1: 14.830 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 12.352 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 39, iters: 13424, time: 0.533, data: 0.000) G_L1: 13.771 G_L1_ABSOLUTE: 2.865 G_L1_RELATIVE: 10.906 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 39, iters: 15424, time: 0.546, data: 0.000) G_L1: 14.888 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 12.499 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 39, iters: 17424, time: 0.547, data: 0.000) G_L1: 13.508 G_L1_ABSOLUTE: 2.982 G_L1_RELATIVE: 10.526 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 39, iters: 19424, time: 0.537, data: 0.000) G_L1: 13.126 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 10.613 G_Regularizer: 0.000 validation_error: 20.712 +(epoch: 39, iters: 21424, time: 0.545, data: 0.000) G_L1: 14.570 G_L1_ABSOLUTE: 3.111 G_L1_RELATIVE: 11.459 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 39, iters: 23424, time: 0.540, data: 0.000) G_L1: 13.792 G_L1_ABSOLUTE: 2.380 G_L1_RELATIVE: 11.412 G_Regularizer: 0.000 validation_error: 21.383 +(epoch: 39, iters: 25424, time: 0.546, data: 0.000) G_L1: 14.204 G_L1_ABSOLUTE: 2.680 G_L1_RELATIVE: 11.524 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 39, iters: 27424, time: 0.541, data: 0.000) G_L1: 13.481 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 10.744 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 39, iters: 29424, time: 0.541, data: 0.000) G_L1: 14.124 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 11.354 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 39, iters: 31424, time: 0.541, data: 0.000) G_L1: 14.669 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 12.089 G_Regularizer: 0.000 validation_error: 20.729 +(epoch: 39, iters: 33424, time: 0.542, data: 0.000) G_L1: 15.470 G_L1_ABSOLUTE: 2.955 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 21.068 +(epoch: 39, iters: 35424, time: 0.541, data: 0.000) G_L1: 13.683 G_L1_ABSOLUTE: 2.937 G_L1_RELATIVE: 10.746 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 39, iters: 37424, time: 0.544, data: 0.000) G_L1: 13.217 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 10.815 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 39, iters: 39424, time: 0.538, data: 0.000) G_L1: 24.063 G_L1_ABSOLUTE: 2.740 G_L1_RELATIVE: 21.323 G_Regularizer: 0.000 validation_error: 20.666 +(epoch: 39, iters: 41424, time: 0.541, data: 0.000) G_L1: 16.581 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 14.129 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 39, iters: 43424, time: 0.542, data: 0.001) G_L1: 14.113 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 11.666 G_Regularizer: 0.000 validation_error: 21.416 +(epoch: 39, iters: 45424, time: 0.545, data: 0.000) G_L1: 11.419 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 9.046 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 39, iters: 47424, time: 0.538, data: 0.001) G_L1: 12.732 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 10.476 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 39, iters: 49424, time: 0.541, data: 0.000) G_L1: 15.876 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 13.302 G_Regularizer: 0.000 validation_error: 21.171 +(epoch: 39, iters: 51424, time: 0.541, data: 0.000) G_L1: 14.098 G_L1_ABSOLUTE: 2.911 G_L1_RELATIVE: 11.186 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 39, iters: 53424, time: 0.544, data: 0.000) G_L1: 13.286 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 10.746 G_Regularizer: 0.000 validation_error: 20.466 +(epoch: 39, iters: 55424, time: 0.542, data: 0.000) G_L1: 12.995 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 10.336 G_Regularizer: 0.000 validation_error: 21.227 +(epoch: 39, iters: 57424, time: 0.542, data: 0.001) G_L1: 17.724 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 15.354 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 39, iters: 59424, time: 0.540, data: 0.000) G_L1: 15.137 G_L1_ABSOLUTE: 2.376 G_L1_RELATIVE: 12.761 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 39, iters: 61424, time: 0.537, data: 0.000) G_L1: 14.749 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 12.311 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 39, iters: 63424, time: 0.543, data: 0.001) G_L1: 13.935 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 11.676 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 39, iters: 65424, time: 0.543, data: 0.000) G_L1: 12.096 G_L1_ABSOLUTE: 2.153 G_L1_RELATIVE: 9.943 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 39, iters: 67424, time: 0.545, data: 0.000) G_L1: 14.398 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 12.065 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 39, iters: 69424, time: 0.537, data: 0.001) G_L1: 14.968 G_L1_ABSOLUTE: 3.076 G_L1_RELATIVE: 11.892 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 39, iters: 71424, time: 0.536, data: 0.001) G_L1: 14.318 G_L1_ABSOLUTE: 2.327 G_L1_RELATIVE: 11.991 G_Regularizer: 0.000 validation_error: 21.104 +(epoch: 39, iters: 73424, time: 0.542, data: 0.000) G_L1: 13.797 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 10.976 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 39, iters: 75424, time: 0.544, data: 0.001) G_L1: 15.121 G_L1_ABSOLUTE: 3.140 G_L1_RELATIVE: 11.981 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 39, iters: 77424, time: 0.540, data: 0.000) G_L1: 13.510 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 10.991 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 39, iters: 79424, time: 0.544, data: 0.000) G_L1: 14.367 G_L1_ABSOLUTE: 2.323 G_L1_RELATIVE: 12.044 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 39, iters: 81424, time: 0.540, data: 0.000) G_L1: 15.942 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 13.006 G_Regularizer: 0.000 validation_error: 20.634 +(epoch: 39, iters: 83424, time: 0.542, data: 0.000) G_L1: 14.078 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 11.624 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 39, iters: 85424, time: 0.542, data: 0.000) G_L1: 14.920 G_L1_ABSOLUTE: 2.122 G_L1_RELATIVE: 12.797 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 39, iters: 87424, time: 0.540, data: 0.000) G_L1: 14.167 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 11.415 G_Regularizer: 0.000 validation_error: 20.539 +(epoch: 39, iters: 89424, time: 0.545, data: 0.000) G_L1: 11.949 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 9.446 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 39, iters: 91424, time: 0.537, data: 0.000) G_L1: 11.065 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 8.772 G_Regularizer: 0.000 validation_error: 21.169 +(epoch: 39, iters: 93424, time: 0.545, data: 0.000) G_L1: 11.189 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 8.620 G_Regularizer: 0.000 validation_error: 21.112 +(epoch: 39, iters: 95424, time: 0.549, data: 0.001) G_L1: 15.107 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 12.230 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 39, iters: 97424, time: 0.542, data: 0.000) G_L1: 15.318 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 12.800 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 39, iters: 99424, time: 0.543, data: 0.000) G_L1: 13.345 G_L1_ABSOLUTE: 2.138 G_L1_RELATIVE: 11.207 G_Regularizer: 0.000 validation_error: 21.212 +(epoch: 39, iters: 101424, time: 0.543, data: 0.001) G_L1: 13.989 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 11.247 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 39, iters: 103424, time: 0.540, data: 0.000) G_L1: 14.894 G_L1_ABSOLUTE: 3.144 G_L1_RELATIVE: 11.750 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 39, iters: 105424, time: 0.539, data: 0.000) G_L1: 13.646 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 11.131 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 39, iters: 107424, time: 0.542, data: 0.000) G_L1: 12.621 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 10.393 G_Regularizer: 0.000 validation_error: 21.103 +(epoch: 39, iters: 109424, time: 0.538, data: 0.000) G_L1: 14.827 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 12.107 G_Regularizer: 0.000 validation_error: 20.616 +(epoch: 39, iters: 111424, time: 0.542, data: 0.000) G_L1: 14.047 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 11.237 G_Regularizer: 0.000 validation_error: 21.190 +(epoch: 39, iters: 113424, time: 0.524, data: 0.001) G_L1: 14.037 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 11.716 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 39, iters: 115424, time: 0.540, data: 0.000) G_L1: 15.471 G_L1_ABSOLUTE: 2.827 G_L1_RELATIVE: 12.644 G_Regularizer: 0.000 validation_error: 21.159 +(epoch: 39, iters: 117424, time: 0.544, data: 0.000) G_L1: 15.030 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 12.525 G_Regularizer: 0.000 validation_error: 20.924 +(epoch: 39, iters: 119424, time: 0.542, data: 0.000) G_L1: 15.885 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 13.516 G_Regularizer: 0.000 validation_error: 21.244 +(epoch: 39, iters: 121424, time: 0.535, data: 0.000) G_L1: 15.028 G_L1_ABSOLUTE: 2.390 G_L1_RELATIVE: 12.639 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 39, iters: 123424, time: 0.541, data: 0.000) G_L1: 14.175 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.754 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 39, iters: 125424, time: 0.541, data: 0.000) G_L1: 12.162 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 9.870 G_Regularizer: 0.000 validation_error: 21.172 +(epoch: 39, iters: 127424, time: 0.543, data: 0.000) G_L1: 16.847 G_L1_ABSOLUTE: 3.025 G_L1_RELATIVE: 13.822 G_Regularizer: 0.000 validation_error: 20.511 +(epoch: 39, iters: 129424, time: 0.542, data: 0.000) G_L1: 12.637 G_L1_ABSOLUTE: 2.076 G_L1_RELATIVE: 10.561 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 39, iters: 131424, time: 0.536, data: 0.000) G_L1: 13.061 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 10.543 G_Regularizer: 0.000 validation_error: 21.331 +(epoch: 39, iters: 133424, time: 0.543, data: 0.000) G_L1: 14.440 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 11.873 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 39, iters: 135424, time: 0.541, data: 0.000) G_L1: 15.554 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 13.186 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 39, iters: 137424, time: 0.543, data: 0.000) G_L1: 13.646 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.062 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 39, iters: 139424, time: 0.535, data: 0.000) G_L1: 12.292 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 9.890 G_Regularizer: 0.000 validation_error: 21.135 +(epoch: 39, iters: 141424, time: 0.538, data: 0.000) G_L1: 13.164 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 10.505 G_Regularizer: 0.000 validation_error: 20.527 +(epoch: 39, iters: 143424, time: 0.542, data: 0.000) G_L1: 14.892 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 12.571 G_Regularizer: 0.000 validation_error: 21.454 +(epoch: 39, iters: 145424, time: 0.541, data: 0.000) G_L1: 12.512 G_L1_ABSOLUTE: 2.360 G_L1_RELATIVE: 10.151 G_Regularizer: 0.000 validation_error: 20.518 +(epoch: 39, iters: 147424, time: 0.537, data: 0.000) G_L1: 15.870 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 13.444 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 39, iters: 149424, time: 0.540, data: 0.000) G_L1: 15.958 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 13.533 G_Regularizer: 0.000 validation_error: 20.188 +(epoch: 39, iters: 151424, time: 0.544, data: 0.000) G_L1: 16.256 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 13.614 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 39, iters: 153424, time: 0.542, data: 0.000) G_L1: 14.178 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 11.442 G_Regularizer: 0.000 validation_error: 21.381 +(epoch: 39, iters: 155424, time: 0.539, data: 0.000) G_L1: 12.463 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 9.916 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 39, iters: 157424, time: 0.530, data: 0.000) G_L1: 13.798 G_L1_ABSOLUTE: 2.397 G_L1_RELATIVE: 11.401 G_Regularizer: 0.000 validation_error: 20.627 +(epoch: 39, iters: 159424, time: 0.538, data: 0.000) G_L1: 14.811 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 12.477 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 39, iters: 161424, time: 0.539, data: 0.000) G_L1: 14.122 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 11.697 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 39, iters: 163424, time: 0.541, data: 0.000) G_L1: 13.110 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 10.486 G_Regularizer: 0.000 validation_error: 20.894 +(epoch: 39, iters: 165424, time: 0.537, data: 0.000) G_L1: 14.945 G_L1_ABSOLUTE: 2.279 G_L1_RELATIVE: 12.666 G_Regularizer: 0.000 validation_error: 21.020 +(epoch: 39, iters: 167424, time: 0.544, data: 0.000) G_L1: 12.862 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 10.472 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 39, iters: 169424, time: 0.543, data: 0.000) G_L1: 11.935 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 9.300 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 39, iters: 171424, time: 0.544, data: 0.000) G_L1: 14.485 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 12.008 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 39, iters: 173424, time: 0.539, data: 0.001) G_L1: 14.638 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 12.079 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 39, iters: 175424, time: 0.537, data: 0.000) G_L1: 15.872 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 13.311 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 39, iters: 177424, time: 0.549, data: 0.000) G_L1: 11.961 G_L1_ABSOLUTE: 2.120 G_L1_RELATIVE: 9.841 G_Regularizer: 0.000 validation_error: 20.623 +(epoch: 39, iters: 179424, time: 0.538, data: 0.000) G_L1: 15.014 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 12.453 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 39, iters: 181424, time: 0.535, data: 0.000) G_L1: 15.708 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 12.935 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 39, iters: 183424, time: 0.537, data: 0.001) G_L1: 12.920 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 10.458 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 39, iters: 185424, time: 0.539, data: 0.000) G_L1: 13.314 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 10.678 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 39, iters: 187424, time: 0.540, data: 0.001) G_L1: 12.708 G_L1_ABSOLUTE: 2.051 G_L1_RELATIVE: 10.657 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 39, iters: 189424, time: 0.546, data: 0.000) G_L1: 16.990 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 14.304 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 39, iters: 191424, time: 0.536, data: 0.000) G_L1: 13.874 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 11.093 G_Regularizer: 0.000 validation_error: 20.304 +(epoch: 39, iters: 193424, time: 0.539, data: 0.000) G_L1: 14.722 G_L1_ABSOLUTE: 2.543 G_L1_RELATIVE: 12.179 G_Regularizer: 0.000 validation_error: 20.487 +(epoch: 39, iters: 195424, time: 0.543, data: 0.000) G_L1: 15.065 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 39, iters: 197424, time: 0.547, data: 0.000) G_L1: 11.881 G_L1_ABSOLUTE: 2.171 G_L1_RELATIVE: 9.711 G_Regularizer: 0.000 validation_error: 20.955 +(epoch: 39, iters: 199424, time: 0.539, data: 0.000) G_L1: 13.134 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 10.359 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 39, iters: 201424, time: 0.544, data: 0.000) G_L1: 15.879 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 13.473 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 39, iters: 203424, time: 0.545, data: 0.000) G_L1: 14.215 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 11.530 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 39, iters: 205424, time: 0.539, data: 0.000) G_L1: 15.453 G_L1_ABSOLUTE: 2.174 G_L1_RELATIVE: 13.279 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 39, iters: 207424, time: 0.538, data: 0.000) G_L1: 15.155 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 12.705 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 39, iters: 209424, time: 0.538, data: 0.000) G_L1: 14.244 G_L1_ABSOLUTE: 2.443 G_L1_RELATIVE: 11.801 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 39, iters: 211424, time: 0.547, data: 0.001) G_L1: 13.553 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 11.073 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 39, iters: 213424, time: 0.546, data: 0.000) G_L1: 12.236 G_L1_ABSOLUTE: 2.089 G_L1_RELATIVE: 10.148 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 39, iters: 215424, time: 0.544, data: 0.000) G_L1: 14.512 G_L1_ABSOLUTE: 2.095 G_L1_RELATIVE: 12.417 G_Regularizer: 0.000 validation_error: 20.438 +(epoch: 39, iters: 217424, time: 0.537, data: 0.000) G_L1: 12.221 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 9.649 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 39, iters: 219424, time: 0.550, data: 0.000) G_L1: 14.180 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 11.275 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 39, iters: 221424, time: 0.558, data: 0.000) G_L1: 14.195 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 11.453 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 39, iters: 223424, time: 0.556, data: 0.001) G_L1: 13.123 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 10.745 G_Regularizer: 0.000 validation_error: 20.433 +(epoch: 39, iters: 225424, time: 0.538, data: 0.000) G_L1: 15.493 G_L1_ABSOLUTE: 2.954 G_L1_RELATIVE: 12.539 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 39, iters: 227424, time: 0.554, data: 0.000) G_L1: 14.385 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 12.004 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 39, iters: 229424, time: 0.543, data: 0.000) G_L1: 14.897 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 12.490 G_Regularizer: 0.000 validation_error: 21.317 +(epoch: 39, iters: 231424, time: 0.556, data: 0.000) G_L1: 13.115 G_L1_ABSOLUTE: 2.210 G_L1_RELATIVE: 10.905 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 39, iters: 233424, time: 0.548, data: 0.000) G_L1: 13.395 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 10.790 G_Regularizer: 0.000 validation_error: 20.487 +(epoch: 39, iters: 235424, time: 0.551, data: 0.001) G_L1: 13.899 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 11.446 G_Regularizer: 0.000 validation_error: 20.531 +(epoch: 39, iters: 237424, time: 0.551, data: 0.000) G_L1: 13.591 G_L1_ABSOLUTE: 2.766 G_L1_RELATIVE: 10.825 G_Regularizer: 0.000 validation_error: 20.647 +(epoch: 39, iters: 239424, time: 0.551, data: 0.000) G_L1: 14.379 G_L1_ABSOLUTE: 2.382 G_L1_RELATIVE: 11.997 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 39, iters: 241424, time: 0.547, data: 0.000) G_L1: 15.929 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 12.993 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 39, iters: 243424, time: 0.556, data: 0.000) G_L1: 12.747 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 10.005 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 39, iters: 245424, time: 0.555, data: 0.000) G_L1: 12.735 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 10.474 G_Regularizer: 0.000 validation_error: 21.088 +(epoch: 39, iters: 247424, time: 0.554, data: 0.000) G_L1: 13.421 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 10.488 G_Regularizer: 0.000 validation_error: 21.091 +(epoch: 39, iters: 249424, time: 0.555, data: 0.000) G_L1: 14.009 G_L1_ABSOLUTE: 2.316 G_L1_RELATIVE: 11.693 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 39, iters: 251424, time: 0.563, data: 0.000) G_L1: 17.074 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 14.154 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 39, iters: 253424, time: 0.551, data: 0.000) G_L1: 15.411 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 12.864 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 39, iters: 255424, time: 0.555, data: 0.000) G_L1: 13.196 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 10.796 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 39, iters: 257424, time: 0.554, data: 0.000) G_L1: 14.129 G_L1_ABSOLUTE: 2.794 G_L1_RELATIVE: 11.335 G_Regularizer: 0.000 validation_error: 21.329 +(epoch: 39, iters: 259424, time: 0.548, data: 0.000) G_L1: 14.082 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 11.466 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 39, iters: 261424, time: 0.548, data: 0.000) G_L1: 15.934 G_L1_ABSOLUTE: 2.841 G_L1_RELATIVE: 13.093 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 39, iters: 263424, time: 0.550, data: 0.000) G_L1: 15.937 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 12.999 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 39, iters: 265424, time: 0.555, data: 0.000) G_L1: 15.392 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 12.723 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 39, iters: 267424, time: 0.550, data: 0.000) G_L1: 12.420 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 10.098 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 39, iters: 269424, time: 0.553, data: 0.000) G_L1: 15.722 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 13.194 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 39, iters: 271424, time: 0.556, data: 0.000) G_L1: 13.586 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 11.184 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 39, iters: 273424, time: 0.541, data: 0.000) G_L1: 14.064 G_L1_ABSOLUTE: 2.439 G_L1_RELATIVE: 11.625 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 39, iters: 275424, time: 0.556, data: 0.000) G_L1: 15.616 G_L1_ABSOLUTE: 2.130 G_L1_RELATIVE: 13.486 G_Regularizer: 0.000 validation_error: 20.670 +(epoch: 39, iters: 277424, time: 0.549, data: 0.000) G_L1: 12.700 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 10.342 G_Regularizer: 0.000 validation_error: 20.523 +(epoch: 39, iters: 279424, time: 0.555, data: 0.000) G_L1: 13.904 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.303 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 39, iters: 281424, time: 0.558, data: 0.000) G_L1: 12.649 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 10.214 G_Regularizer: 0.000 validation_error: 20.998 +(epoch: 39, iters: 283424, time: 0.547, data: 0.000) G_L1: 13.580 G_L1_ABSOLUTE: 2.217 G_L1_RELATIVE: 11.362 G_Regularizer: 0.000 validation_error: 20.658 +(epoch: 39, iters: 285424, time: 0.549, data: 0.000) G_L1: 13.243 G_L1_ABSOLUTE: 2.195 G_L1_RELATIVE: 11.048 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 39, iters: 287424, time: 0.561, data: 0.001) G_L1: 17.081 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 14.599 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 39, iters: 289424, time: 0.652, data: 0.000) G_L1: 14.443 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 11.727 G_Regularizer: 0.000 validation_error: 20.579 +(epoch: 39, iters: 291424, time: 0.647, data: 0.000) G_L1: 12.757 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 9.836 G_Regularizer: 0.000 validation_error: 21.050 +(epoch: 39, iters: 293424, time: 0.592, data: 0.000) G_L1: 14.040 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 11.286 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 39, iters: 295424, time: 0.614, data: 0.000) G_L1: 14.847 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 12.332 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 39, iters: 297424, time: 0.563, data: 0.000) G_L1: 12.949 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 10.413 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 39, iters: 299424, time: 0.619, data: 0.000) G_L1: 13.104 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 10.214 G_Regularizer: 0.000 validation_error: 20.746 +(epoch: 39, iters: 301424, time: 0.606, data: 0.000) G_L1: 11.411 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 8.928 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 40, iters: 672, time: 0.619, data: 0.000) G_L1: 13.078 G_L1_ABSOLUTE: 2.410 G_L1_RELATIVE: 10.667 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 40, iters: 2672, time: 0.603, data: 0.000) G_L1: 16.802 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 14.047 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 40, iters: 4672, time: 0.614, data: 0.000) G_L1: 13.812 G_L1_ABSOLUTE: 2.429 G_L1_RELATIVE: 11.383 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 40, iters: 6672, time: 0.612, data: 0.000) G_L1: 14.037 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 11.233 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 40, iters: 8672, time: 0.611, data: 0.000) G_L1: 12.319 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 9.685 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 40, iters: 10672, time: 0.575, data: 0.000) G_L1: 12.714 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 10.292 G_Regularizer: 0.000 validation_error: 20.798 +(epoch: 40, iters: 12672, time: 0.605, data: 0.000) G_L1: 13.719 G_L1_ABSOLUTE: 2.304 G_L1_RELATIVE: 11.415 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 40, iters: 14672, time: 0.610, data: 0.000) G_L1: 13.007 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 10.444 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 40, iters: 16672, time: 0.610, data: 0.000) G_L1: 13.234 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 40, iters: 18672, time: 0.600, data: 0.000) G_L1: 13.697 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 11.354 G_Regularizer: 0.000 validation_error: 21.176 +(epoch: 40, iters: 20672, time: 0.581, data: 0.000) G_L1: 13.538 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 11.147 G_Regularizer: 0.000 validation_error: 20.493 +(epoch: 40, iters: 22672, time: 0.601, data: 0.000) G_L1: 12.109 G_L1_ABSOLUTE: 2.192 G_L1_RELATIVE: 9.917 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 40, iters: 24672, time: 0.576, data: 0.000) G_L1: 14.281 G_L1_ABSOLUTE: 2.115 G_L1_RELATIVE: 12.166 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 40, iters: 26672, time: 0.614, data: 0.000) G_L1: 13.699 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 11.047 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 40, iters: 28672, time: 0.618, data: 0.000) G_L1: 16.170 G_L1_ABSOLUTE: 3.070 G_L1_RELATIVE: 13.100 G_Regularizer: 0.000 validation_error: 20.391 +(epoch: 40, iters: 30672, time: 0.611, data: 0.000) G_L1: 15.237 G_L1_ABSOLUTE: 2.194 G_L1_RELATIVE: 13.043 G_Regularizer: 0.000 validation_error: 21.147 +(epoch: 40, iters: 32672, time: 0.603, data: 0.000) G_L1: 14.975 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 12.606 G_Regularizer: 0.000 validation_error: 21.032 +(epoch: 40, iters: 34672, time: 0.579, data: 0.000) G_L1: 14.283 G_L1_ABSOLUTE: 2.943 G_L1_RELATIVE: 11.340 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 40, iters: 36672, time: 0.619, data: 0.000) G_L1: 14.336 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 11.718 G_Regularizer: 0.000 validation_error: 21.384 +(epoch: 40, iters: 38672, time: 0.604, data: 0.000) G_L1: 11.601 G_L1_ABSOLUTE: 2.046 G_L1_RELATIVE: 9.555 G_Regularizer: 0.000 validation_error: 20.885 +(epoch: 40, iters: 40672, time: 0.620, data: 0.000) G_L1: 14.937 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 12.288 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 40, iters: 42672, time: 0.593, data: 0.000) G_L1: 14.351 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 11.610 G_Regularizer: 0.000 validation_error: 21.124 +(epoch: 40, iters: 44672, time: 0.615, data: 0.000) G_L1: 17.192 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 14.503 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 40, iters: 46672, time: 0.611, data: 0.000) G_L1: 14.012 G_L1_ABSOLUTE: 3.186 G_L1_RELATIVE: 10.826 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 40, iters: 48672, time: 0.597, data: 0.000) G_L1: 15.074 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 12.341 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 40, iters: 50672, time: 0.529, data: 0.000) G_L1: 14.695 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 11.859 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 40, iters: 52672, time: 0.537, data: 0.000) G_L1: 12.661 G_L1_ABSOLUTE: 2.759 G_L1_RELATIVE: 9.902 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 40, iters: 54672, time: 0.545, data: 0.000) G_L1: 12.950 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 10.484 G_Regularizer: 0.000 validation_error: 21.123 +(epoch: 40, iters: 56672, time: 0.539, data: 0.000) G_L1: 15.742 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 13.089 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 40, iters: 58672, time: 0.540, data: 0.001) G_L1: 15.573 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 12.789 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 40, iters: 60672, time: 0.540, data: 0.000) G_L1: 13.603 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 10.968 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 40, iters: 62672, time: 0.531, data: 0.000) G_L1: 14.146 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 11.829 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 40, iters: 64672, time: 0.538, data: 0.000) G_L1: 16.657 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 14.094 G_Regularizer: 0.000 validation_error: 20.753 +(epoch: 40, iters: 66672, time: 0.550, data: 0.000) G_L1: 15.610 G_L1_ABSOLUTE: 2.224 G_L1_RELATIVE: 13.386 G_Regularizer: 0.000 validation_error: 21.139 +(epoch: 40, iters: 68672, time: 0.543, data: 0.001) G_L1: 13.831 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 11.082 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 40, iters: 70672, time: 0.528, data: 0.000) G_L1: 12.770 G_L1_ABSOLUTE: 2.141 G_L1_RELATIVE: 10.630 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 40, iters: 72672, time: 0.545, data: 0.000) G_L1: 13.850 G_L1_ABSOLUTE: 2.395 G_L1_RELATIVE: 11.454 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 40, iters: 74672, time: 0.542, data: 0.000) G_L1: 13.191 G_L1_ABSOLUTE: 2.313 G_L1_RELATIVE: 10.878 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 40, iters: 76672, time: 0.538, data: 0.000) G_L1: 15.712 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 12.957 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 40, iters: 78672, time: 0.537, data: 0.000) G_L1: 14.055 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 11.615 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 40, iters: 80672, time: 0.542, data: 0.000) G_L1: 15.825 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 13.355 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 40, iters: 82672, time: 0.539, data: 0.000) G_L1: 14.311 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 11.825 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 40, iters: 84672, time: 0.543, data: 0.000) G_L1: 12.777 G_L1_ABSOLUTE: 2.236 G_L1_RELATIVE: 10.541 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 40, iters: 86672, time: 0.546, data: 0.000) G_L1: 12.341 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 9.574 G_Regularizer: 0.000 validation_error: 20.757 +(epoch: 40, iters: 88672, time: 0.548, data: 0.000) G_L1: 13.642 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 10.969 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 40, iters: 90672, time: 0.538, data: 0.000) G_L1: 16.614 G_L1_ABSOLUTE: 2.944 G_L1_RELATIVE: 13.670 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 40, iters: 92672, time: 0.539, data: 0.000) G_L1: 15.413 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 13.014 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 40, iters: 94672, time: 0.538, data: 0.000) G_L1: 14.949 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 12.532 G_Regularizer: 0.000 validation_error: 21.158 +(epoch: 40, iters: 96672, time: 0.549, data: 0.000) G_L1: 14.418 G_L1_ABSOLUTE: 2.789 G_L1_RELATIVE: 11.629 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 40, iters: 98672, time: 0.548, data: 0.000) G_L1: 15.354 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 12.842 G_Regularizer: 0.000 validation_error: 21.181 +(epoch: 40, iters: 100672, time: 0.542, data: 0.000) G_L1: 13.249 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 10.920 G_Regularizer: 0.000 validation_error: 21.020 +(epoch: 40, iters: 102672, time: 0.548, data: 0.000) G_L1: 14.609 G_L1_ABSOLUTE: 2.167 G_L1_RELATIVE: 12.442 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 40, iters: 104672, time: 0.544, data: 0.001) G_L1: 12.020 G_L1_ABSOLUTE: 2.183 G_L1_RELATIVE: 9.837 G_Regularizer: 0.000 validation_error: 21.367 +(epoch: 40, iters: 106672, time: 0.541, data: 0.000) G_L1: 14.735 G_L1_ABSOLUTE: 3.030 G_L1_RELATIVE: 11.705 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 40, iters: 108672, time: 0.542, data: 0.000) G_L1: 17.364 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 14.964 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 40, iters: 110672, time: 0.548, data: 0.000) G_L1: 13.443 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 10.545 G_Regularizer: 0.000 validation_error: 21.269 +(epoch: 40, iters: 112672, time: 0.543, data: 0.000) G_L1: 13.810 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 11.572 G_Regularizer: 0.000 validation_error: 21.411 +(epoch: 40, iters: 114672, time: 0.552, data: 0.000) G_L1: 12.922 G_L1_ABSOLUTE: 2.191 G_L1_RELATIVE: 10.731 G_Regularizer: 0.000 validation_error: 21.044 +(epoch: 40, iters: 116672, time: 0.545, data: 0.000) G_L1: 15.168 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 12.837 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 40, iters: 118672, time: 0.545, data: 0.001) G_L1: 14.672 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 11.861 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 40, iters: 120672, time: 0.536, data: 0.000) G_L1: 13.127 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 10.929 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 40, iters: 122672, time: 0.534, data: 0.000) G_L1: 12.248 G_L1_ABSOLUTE: 2.144 G_L1_RELATIVE: 10.105 G_Regularizer: 0.000 validation_error: 20.431 +(epoch: 40, iters: 124672, time: 0.547, data: 0.000) G_L1: 11.208 G_L1_ABSOLUTE: 2.328 G_L1_RELATIVE: 8.881 G_Regularizer: 0.000 validation_error: 21.215 +(epoch: 40, iters: 126672, time: 0.542, data: 0.000) G_L1: 17.507 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 14.812 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 40, iters: 128672, time: 0.549, data: 0.000) G_L1: 14.393 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 11.854 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 40, iters: 130672, time: 0.550, data: 0.001) G_L1: 14.427 G_L1_ABSOLUTE: 2.316 G_L1_RELATIVE: 12.111 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 40, iters: 132672, time: 0.539, data: 0.000) G_L1: 13.453 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 10.864 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 40, iters: 134672, time: 0.544, data: 0.000) G_L1: 13.472 G_L1_ABSOLUTE: 2.077 G_L1_RELATIVE: 11.394 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 40, iters: 136672, time: 0.542, data: 0.001) G_L1: 13.380 G_L1_ABSOLUTE: 2.859 G_L1_RELATIVE: 10.521 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 40, iters: 138672, time: 0.537, data: 0.000) G_L1: 11.557 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 9.394 G_Regularizer: 0.000 validation_error: 21.187 +(epoch: 40, iters: 140672, time: 0.538, data: 0.000) G_L1: 12.548 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 10.137 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 40, iters: 142672, time: 0.541, data: 0.000) G_L1: 12.271 G_L1_ABSOLUTE: 2.212 G_L1_RELATIVE: 10.058 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 40, iters: 144672, time: 0.547, data: 0.000) G_L1: 13.786 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 11.223 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 40, iters: 146672, time: 0.543, data: 0.000) G_L1: 13.371 G_L1_ABSOLUTE: 2.360 G_L1_RELATIVE: 11.011 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 40, iters: 148672, time: 0.541, data: 0.000) G_L1: 14.804 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 12.177 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 40, iters: 150672, time: 0.539, data: 0.000) G_L1: 17.266 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 14.624 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 40, iters: 152672, time: 0.544, data: 0.000) G_L1: 13.602 G_L1_ABSOLUTE: 1.813 G_L1_RELATIVE: 11.789 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 40, iters: 154672, time: 0.544, data: 0.000) G_L1: 14.020 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 11.313 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 40, iters: 156672, time: 0.530, data: 0.001) G_L1: 13.597 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 10.921 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 40, iters: 158672, time: 0.540, data: 0.000) G_L1: 14.663 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 12.152 G_Regularizer: 0.000 validation_error: 20.959 +(epoch: 40, iters: 160672, time: 0.537, data: 0.000) G_L1: 18.136 G_L1_ABSOLUTE: 3.774 G_L1_RELATIVE: 14.362 G_Regularizer: 0.000 validation_error: 20.522 +(epoch: 40, iters: 162672, time: 0.543, data: 0.000) G_L1: 11.588 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 9.083 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 40, iters: 164672, time: 0.538, data: 0.000) G_L1: 12.127 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 9.668 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 40, iters: 166672, time: 0.543, data: 0.000) G_L1: 13.542 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 11.403 G_Regularizer: 0.000 validation_error: 21.041 +(epoch: 40, iters: 168672, time: 0.545, data: 0.000) G_L1: 15.490 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 13.089 G_Regularizer: 0.000 validation_error: 20.516 +(epoch: 40, iters: 170672, time: 0.538, data: 0.000) G_L1: 15.693 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 13.041 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 40, iters: 172672, time: 0.543, data: 0.000) G_L1: 14.590 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 11.992 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 40, iters: 174672, time: 0.539, data: 0.000) G_L1: 13.389 G_L1_ABSOLUTE: 2.208 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 40, iters: 176672, time: 0.546, data: 0.000) G_L1: 13.479 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 11.150 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 40, iters: 178672, time: 0.544, data: 0.000) G_L1: 14.187 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 11.752 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 40, iters: 180672, time: 0.543, data: 0.000) G_L1: 14.545 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 11.653 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 40, iters: 182672, time: 0.541, data: 0.000) G_L1: 12.671 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 10.095 G_Regularizer: 0.000 validation_error: 20.176 +(epoch: 40, iters: 184672, time: 0.537, data: 0.000) G_L1: 13.153 G_L1_ABSOLUTE: 2.064 G_L1_RELATIVE: 11.088 G_Regularizer: 0.000 validation_error: 21.239 +(epoch: 40, iters: 186672, time: 0.537, data: 0.000) G_L1: 14.732 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 12.262 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 40, iters: 188672, time: 0.550, data: 0.000) G_L1: 15.211 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 12.697 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 40, iters: 190672, time: 0.548, data: 0.000) G_L1: 11.419 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 9.128 G_Regularizer: 0.000 validation_error: 21.177 +(epoch: 40, iters: 192672, time: 0.547, data: 0.000) G_L1: 13.893 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 11.569 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 40, iters: 194672, time: 0.544, data: 0.000) G_L1: 12.214 G_L1_ABSOLUTE: 2.067 G_L1_RELATIVE: 10.147 G_Regularizer: 0.000 validation_error: 20.994 +(epoch: 40, iters: 196672, time: 0.539, data: 0.001) G_L1: 15.905 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 13.496 G_Regularizer: 0.000 validation_error: 21.317 +(epoch: 40, iters: 198672, time: 0.544, data: 0.001) G_L1: 11.825 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 9.471 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 40, iters: 200672, time: 0.545, data: 0.000) G_L1: 13.308 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 11.007 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 40, iters: 202672, time: 0.540, data: 0.000) G_L1: 16.156 G_L1_ABSOLUTE: 2.920 G_L1_RELATIVE: 13.236 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 40, iters: 204672, time: 0.539, data: 0.000) G_L1: 14.554 G_L1_ABSOLUTE: 2.446 G_L1_RELATIVE: 12.108 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 40, iters: 206672, time: 0.554, data: 0.001) G_L1: 14.706 G_L1_ABSOLUTE: 2.231 G_L1_RELATIVE: 12.475 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 40, iters: 208672, time: 0.538, data: 0.000) G_L1: 11.998 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 9.646 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 40, iters: 210672, time: 0.540, data: 0.000) G_L1: 13.910 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 11.609 G_Regularizer: 0.000 validation_error: 20.627 +(epoch: 40, iters: 212672, time: 0.533, data: 0.000) G_L1: 12.840 G_L1_ABSOLUTE: 2.173 G_L1_RELATIVE: 10.667 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 40, iters: 214672, time: 0.542, data: 0.000) G_L1: 15.175 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 12.427 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 40, iters: 216672, time: 0.537, data: 0.000) G_L1: 12.762 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 10.232 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 40, iters: 218672, time: 0.544, data: 0.000) G_L1: 13.906 G_L1_ABSOLUTE: 2.252 G_L1_RELATIVE: 11.655 G_Regularizer: 0.000 validation_error: 21.040 +(epoch: 40, iters: 220672, time: 0.544, data: 0.000) G_L1: 14.043 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 40, iters: 222672, time: 0.541, data: 0.000) G_L1: 11.845 G_L1_ABSOLUTE: 1.986 G_L1_RELATIVE: 9.859 G_Regularizer: 0.000 validation_error: 20.758 +(epoch: 40, iters: 224672, time: 0.532, data: 0.000) G_L1: 13.444 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 11.050 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 40, iters: 226672, time: 0.546, data: 0.000) G_L1: 12.270 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 9.633 G_Regularizer: 0.000 validation_error: 20.882 +(epoch: 40, iters: 228672, time: 0.541, data: 0.001) G_L1: 11.422 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 8.964 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 40, iters: 230672, time: 0.541, data: 0.000) G_L1: 16.320 G_L1_ABSOLUTE: 2.982 G_L1_RELATIVE: 13.339 G_Regularizer: 0.000 validation_error: 21.021 +(epoch: 40, iters: 232672, time: 0.550, data: 0.001) G_L1: 13.957 G_L1_ABSOLUTE: 2.147 G_L1_RELATIVE: 11.810 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 40, iters: 234672, time: 0.539, data: 0.000) G_L1: 14.591 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 11.951 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 40, iters: 236672, time: 0.546, data: 0.000) G_L1: 12.607 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 10.379 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 40, iters: 238672, time: 0.537, data: 0.000) G_L1: 13.054 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 10.856 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 40, iters: 240672, time: 0.545, data: 0.000) G_L1: 17.009 G_L1_ABSOLUTE: 2.141 G_L1_RELATIVE: 14.868 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 40, iters: 242672, time: 0.528, data: 0.000) G_L1: 13.186 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 10.445 G_Regularizer: 0.000 validation_error: 21.145 +(epoch: 40, iters: 244672, time: 0.539, data: 0.000) G_L1: 14.491 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 11.882 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 40, iters: 246672, time: 0.543, data: 0.001) G_L1: 15.506 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 13.220 G_Regularizer: 0.000 validation_error: 20.753 +(epoch: 40, iters: 248672, time: 0.542, data: 0.000) G_L1: 15.389 G_L1_ABSOLUTE: 2.875 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 40, iters: 250672, time: 0.542, data: 0.000) G_L1: 14.497 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 11.831 G_Regularizer: 0.000 validation_error: 20.545 +(epoch: 40, iters: 252672, time: 0.538, data: 0.000) G_L1: 13.287 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 10.910 G_Regularizer: 0.000 validation_error: 20.472 +(epoch: 40, iters: 254672, time: 0.543, data: 0.000) G_L1: 12.912 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 10.395 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 40, iters: 256672, time: 0.547, data: 0.000) G_L1: 12.527 G_L1_ABSOLUTE: 2.082 G_L1_RELATIVE: 10.445 G_Regularizer: 0.000 validation_error: 20.510 +(epoch: 40, iters: 258672, time: 0.544, data: 0.000) G_L1: 14.253 G_L1_ABSOLUTE: 3.104 G_L1_RELATIVE: 11.148 G_Regularizer: 0.000 validation_error: 21.270 +(epoch: 40, iters: 260672, time: 0.543, data: 0.000) G_L1: 14.204 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 11.769 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 40, iters: 262672, time: 0.545, data: 0.001) G_L1: 12.295 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 10.034 G_Regularizer: 0.000 validation_error: 20.487 +(epoch: 40, iters: 264672, time: 0.542, data: 0.000) G_L1: 12.326 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 9.928 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 40, iters: 266672, time: 0.543, data: 0.000) G_L1: 16.281 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 13.784 G_Regularizer: 0.000 validation_error: 20.484 +(epoch: 40, iters: 268672, time: 0.526, data: 0.000) G_L1: 15.234 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 12.709 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 40, iters: 270672, time: 0.546, data: 0.000) G_L1: 13.575 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 11.299 G_Regularizer: 0.000 validation_error: 20.425 +(epoch: 40, iters: 272672, time: 0.544, data: 0.000) G_L1: 12.616 G_L1_ABSOLUTE: 2.759 G_L1_RELATIVE: 9.858 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 40, iters: 274672, time: 0.547, data: 0.000) G_L1: 15.339 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 12.741 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 40, iters: 276672, time: 0.537, data: 0.000) G_L1: 15.659 G_L1_ABSOLUTE: 2.887 G_L1_RELATIVE: 12.772 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 40, iters: 278672, time: 0.547, data: 0.000) G_L1: 11.873 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 9.237 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 40, iters: 280672, time: 0.547, data: 0.000) G_L1: 14.515 G_L1_ABSOLUTE: 2.237 G_L1_RELATIVE: 12.278 G_Regularizer: 0.000 validation_error: 20.531 +(epoch: 40, iters: 282672, time: 0.545, data: 0.000) G_L1: 13.618 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 10.985 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 40, iters: 284672, time: 0.544, data: 0.000) G_L1: 13.044 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 10.085 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 40, iters: 286672, time: 0.540, data: 0.000) G_L1: 15.020 G_L1_ABSOLUTE: 2.983 G_L1_RELATIVE: 12.037 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 40, iters: 288672, time: 0.543, data: 0.000) G_L1: 15.352 G_L1_ABSOLUTE: 2.187 G_L1_RELATIVE: 13.165 G_Regularizer: 0.000 validation_error: 21.306 +(epoch: 40, iters: 290672, time: 0.543, data: 0.000) G_L1: 15.308 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 12.802 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 40, iters: 292672, time: 0.541, data: 0.000) G_L1: 13.123 G_L1_ABSOLUTE: 2.323 G_L1_RELATIVE: 10.800 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 40, iters: 294672, time: 0.540, data: 0.000) G_L1: 15.378 G_L1_ABSOLUTE: 2.410 G_L1_RELATIVE: 12.967 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 40, iters: 296672, time: 0.548, data: 0.000) G_L1: 15.537 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 13.017 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 40, iters: 298672, time: 0.540, data: 0.000) G_L1: 13.935 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 11.452 G_Regularizer: 0.000 validation_error: 20.384 +(epoch: 40, iters: 300672, time: 0.546, data: 0.000) G_L1: 14.812 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 12.204 G_Regularizer: 0.000 validation_error: 20.659 +(epoch: 40, iters: 302672, time: 0.531, data: 0.000) G_L1: 13.715 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.174 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 41, iters: 1920, time: 0.546, data: 0.000) G_L1: 15.368 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 12.701 G_Regularizer: 0.000 validation_error: 20.638 +(epoch: 41, iters: 3920, time: 0.539, data: 0.000) G_L1: 14.953 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 12.402 G_Regularizer: 0.000 validation_error: 20.904 +(epoch: 41, iters: 5920, time: 0.545, data: 0.000) G_L1: 14.879 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 12.090 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 41, iters: 7920, time: 0.539, data: 0.000) G_L1: 13.578 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 11.103 G_Regularizer: 0.000 validation_error: 20.697 +(epoch: 41, iters: 9920, time: 0.545, data: 0.000) G_L1: 12.208 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 9.864 G_Regularizer: 0.000 validation_error: 20.894 +(epoch: 41, iters: 11920, time: 0.540, data: 0.000) G_L1: 15.097 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 12.496 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 41, iters: 13920, time: 0.542, data: 0.000) G_L1: 13.298 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 10.927 G_Regularizer: 0.000 validation_error: 20.474 +(epoch: 41, iters: 15920, time: 0.546, data: 0.000) G_L1: 12.808 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 10.285 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 41, iters: 17920, time: 0.539, data: 0.000) G_L1: 13.554 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 11.192 G_Regularizer: 0.000 validation_error: 20.824 +(epoch: 41, iters: 19920, time: 0.541, data: 0.000) G_L1: 13.445 G_L1_ABSOLUTE: 2.018 G_L1_RELATIVE: 11.428 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 41, iters: 21920, time: 0.540, data: 0.000) G_L1: 14.210 G_L1_ABSOLUTE: 2.352 G_L1_RELATIVE: 11.858 G_Regularizer: 0.000 validation_error: 21.121 +(epoch: 41, iters: 23920, time: 0.539, data: 0.000) G_L1: 13.030 G_L1_ABSOLUTE: 1.957 G_L1_RELATIVE: 11.073 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 41, iters: 25920, time: 0.536, data: 0.000) G_L1: 13.076 G_L1_ABSOLUTE: 2.304 G_L1_RELATIVE: 10.773 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 41, iters: 27920, time: 0.545, data: 0.000) G_L1: 13.921 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 11.638 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 41, iters: 29920, time: 0.547, data: 0.001) G_L1: 15.518 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 12.920 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 41, iters: 31920, time: 0.538, data: 0.001) G_L1: 11.288 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 8.955 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 41, iters: 33920, time: 0.534, data: 0.000) G_L1: 13.922 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 11.604 G_Regularizer: 0.000 validation_error: 20.954 +(epoch: 41, iters: 35920, time: 0.544, data: 0.000) G_L1: 14.840 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 12.352 G_Regularizer: 0.000 validation_error: 20.980 +(epoch: 41, iters: 37920, time: 0.542, data: 0.001) G_L1: 12.792 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 10.256 G_Regularizer: 0.000 validation_error: 21.192 +(epoch: 41, iters: 39920, time: 0.545, data: 0.000) G_L1: 12.906 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 10.220 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 41, iters: 41920, time: 0.537, data: 0.000) G_L1: 14.645 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 12.049 G_Regularizer: 0.000 validation_error: 20.856 +(epoch: 41, iters: 43920, time: 0.546, data: 0.000) G_L1: 24.490 G_L1_ABSOLUTE: 2.920 G_L1_RELATIVE: 21.570 G_Regularizer: 0.000 validation_error: 20.166 +(epoch: 41, iters: 45920, time: 0.541, data: 0.000) G_L1: 13.116 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 10.699 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 41, iters: 47920, time: 0.546, data: 0.000) G_L1: 14.506 G_L1_ABSOLUTE: 3.201 G_L1_RELATIVE: 11.305 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 41, iters: 49920, time: 0.542, data: 0.001) G_L1: 13.297 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 10.866 G_Regularizer: 0.000 validation_error: 20.848 +(epoch: 41, iters: 51920, time: 0.528, data: 0.000) G_L1: 13.154 G_L1_ABSOLUTE: 2.795 G_L1_RELATIVE: 10.359 G_Regularizer: 0.000 validation_error: 20.620 +(epoch: 41, iters: 53920, time: 0.543, data: 0.000) G_L1: 12.910 G_L1_ABSOLUTE: 2.192 G_L1_RELATIVE: 10.719 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 41, iters: 55920, time: 0.546, data: 0.000) G_L1: 14.887 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 12.508 G_Regularizer: 0.000 validation_error: 21.139 +(epoch: 41, iters: 57920, time: 0.535, data: 0.000) G_L1: 13.194 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 10.945 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 41, iters: 59920, time: 0.532, data: 0.000) G_L1: 14.837 G_L1_ABSOLUTE: 3.009 G_L1_RELATIVE: 11.828 G_Regularizer: 0.000 validation_error: 21.175 +(epoch: 41, iters: 61920, time: 0.540, data: 0.000) G_L1: 14.714 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 12.223 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 41, iters: 63920, time: 0.532, data: 0.000) G_L1: 13.255 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 10.739 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 41, iters: 65920, time: 0.546, data: 0.000) G_L1: 14.609 G_L1_ABSOLUTE: 2.171 G_L1_RELATIVE: 12.439 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 41, iters: 67920, time: 0.535, data: 0.000) G_L1: 14.591 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 12.124 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 41, iters: 69920, time: 0.542, data: 0.000) G_L1: 13.949 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 11.606 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 41, iters: 71920, time: 0.547, data: 0.001) G_L1: 12.103 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 9.546 G_Regularizer: 0.000 validation_error: 20.997 +(epoch: 41, iters: 73920, time: 0.545, data: 0.000) G_L1: 12.799 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 10.331 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 41, iters: 75920, time: 0.544, data: 0.000) G_L1: 16.727 G_L1_ABSOLUTE: 2.726 G_L1_RELATIVE: 14.001 G_Regularizer: 0.000 validation_error: 20.832 +(epoch: 41, iters: 77920, time: 0.538, data: 0.000) G_L1: 13.721 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 41, iters: 79920, time: 0.551, data: 0.000) G_L1: 13.400 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 10.968 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 41, iters: 81920, time: 0.541, data: 0.000) G_L1: 12.837 G_L1_ABSOLUTE: 2.221 G_L1_RELATIVE: 10.616 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 41, iters: 83920, time: 0.547, data: 0.000) G_L1: 13.586 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 11.237 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 41, iters: 85920, time: 0.540, data: 0.000) G_L1: 12.719 G_L1_ABSOLUTE: 2.080 G_L1_RELATIVE: 10.639 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 41, iters: 87920, time: 0.545, data: 0.000) G_L1: 13.034 G_L1_ABSOLUTE: 2.061 G_L1_RELATIVE: 10.973 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 41, iters: 89920, time: 0.542, data: 0.000) G_L1: 13.021 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 10.736 G_Regularizer: 0.000 validation_error: 20.670 +(epoch: 41, iters: 91920, time: 0.550, data: 0.000) G_L1: 12.626 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 10.256 G_Regularizer: 0.000 validation_error: 21.345 +(epoch: 41, iters: 93920, time: 0.543, data: 0.000) G_L1: 13.590 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 10.894 G_Regularizer: 0.000 validation_error: 21.147 +(epoch: 41, iters: 95920, time: 0.541, data: 0.000) G_L1: 13.292 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 11.045 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 41, iters: 97920, time: 0.546, data: 0.000) G_L1: 14.344 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 11.913 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 41, iters: 99920, time: 0.542, data: 0.000) G_L1: 16.477 G_L1_ABSOLUTE: 2.156 G_L1_RELATIVE: 14.321 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 41, iters: 101920, time: 0.544, data: 0.000) G_L1: 13.518 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 10.825 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 41, iters: 103920, time: 0.538, data: 0.001) G_L1: 14.605 G_L1_ABSOLUTE: 1.966 G_L1_RELATIVE: 12.639 G_Regularizer: 0.000 validation_error: 21.306 +(epoch: 41, iters: 105920, time: 0.546, data: 0.000) G_L1: 13.908 G_L1_ABSOLUTE: 2.015 G_L1_RELATIVE: 11.893 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 41, iters: 107920, time: 0.547, data: 0.001) G_L1: 14.094 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 11.643 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 41, iters: 109920, time: 0.536, data: 0.000) G_L1: 16.367 G_L1_ABSOLUTE: 3.152 G_L1_RELATIVE: 13.216 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 41, iters: 111920, time: 0.541, data: 0.000) G_L1: 12.847 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 10.498 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 41, iters: 113920, time: 0.542, data: 0.000) G_L1: 14.352 G_L1_ABSOLUTE: 2.395 G_L1_RELATIVE: 11.957 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 41, iters: 115920, time: 0.541, data: 0.000) G_L1: 15.487 G_L1_ABSOLUTE: 2.558 G_L1_RELATIVE: 12.929 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 41, iters: 117920, time: 0.542, data: 0.001) G_L1: 14.679 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 12.245 G_Regularizer: 0.000 validation_error: 21.258 +(epoch: 41, iters: 119920, time: 0.534, data: 0.000) G_L1: 14.116 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 11.394 G_Regularizer: 0.000 validation_error: 21.093 +(epoch: 41, iters: 121920, time: 0.550, data: 0.001) G_L1: 13.570 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 11.174 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 41, iters: 123920, time: 0.549, data: 0.000) G_L1: 12.472 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 10.093 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 41, iters: 125920, time: 0.545, data: 0.000) G_L1: 12.734 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 10.010 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 41, iters: 127920, time: 0.538, data: 0.001) G_L1: 14.107 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 11.498 G_Regularizer: 0.000 validation_error: 21.108 +(epoch: 41, iters: 129920, time: 0.533, data: 0.001) G_L1: 13.977 G_L1_ABSOLUTE: 2.252 G_L1_RELATIVE: 11.725 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 41, iters: 131920, time: 0.537, data: 0.000) G_L1: 14.809 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 11.993 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 41, iters: 133920, time: 0.544, data: 0.001) G_L1: 13.871 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 11.517 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 41, iters: 135920, time: 0.541, data: 0.000) G_L1: 15.046 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 12.483 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 41, iters: 137920, time: 0.539, data: 0.000) G_L1: 13.278 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 10.683 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 41, iters: 139920, time: 0.541, data: 0.000) G_L1: 14.789 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 12.281 G_Regularizer: 0.000 validation_error: 21.143 +(epoch: 41, iters: 141920, time: 0.546, data: 0.000) G_L1: 14.524 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 12.199 G_Regularizer: 0.000 validation_error: 20.468 +(epoch: 41, iters: 143920, time: 0.539, data: 0.000) G_L1: 14.327 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 11.522 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 41, iters: 145920, time: 0.541, data: 0.001) G_L1: 12.166 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 9.653 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 41, iters: 147920, time: 0.540, data: 0.001) G_L1: 13.152 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 10.646 G_Regularizer: 0.000 validation_error: 20.662 +(epoch: 41, iters: 149920, time: 0.539, data: 0.000) G_L1: 13.092 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 10.603 G_Regularizer: 0.000 validation_error: 20.882 +(epoch: 41, iters: 151920, time: 0.544, data: 0.000) G_L1: 14.673 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 12.228 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 41, iters: 153920, time: 0.538, data: 0.000) G_L1: 13.313 G_L1_ABSOLUTE: 2.423 G_L1_RELATIVE: 10.891 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 41, iters: 155920, time: 0.548, data: 0.000) G_L1: 12.737 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 10.402 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 41, iters: 157920, time: 0.537, data: 0.000) G_L1: 13.158 G_L1_ABSOLUTE: 2.168 G_L1_RELATIVE: 10.990 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 41, iters: 159920, time: 0.541, data: 0.000) G_L1: 10.480 G_L1_ABSOLUTE: 2.154 G_L1_RELATIVE: 8.326 G_Regularizer: 0.000 validation_error: 20.770 +(epoch: 41, iters: 161920, time: 0.540, data: 0.000) G_L1: 15.453 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 12.568 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 41, iters: 163920, time: 0.543, data: 0.000) G_L1: 14.635 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 12.386 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 41, iters: 165920, time: 0.540, data: 0.000) G_L1: 16.296 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 13.907 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 41, iters: 167920, time: 0.544, data: 0.000) G_L1: 12.955 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 10.351 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 41, iters: 169920, time: 0.542, data: 0.000) G_L1: 14.811 G_L1_ABSOLUTE: 2.987 G_L1_RELATIVE: 11.824 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 41, iters: 171920, time: 0.532, data: 0.001) G_L1: 13.791 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 11.018 G_Regularizer: 0.000 validation_error: 20.930 +(epoch: 41, iters: 173920, time: 0.546, data: 0.000) G_L1: 14.836 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 11.944 G_Regularizer: 0.000 validation_error: 21.214 +(epoch: 41, iters: 175920, time: 0.546, data: 0.000) G_L1: 14.719 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 12.184 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 41, iters: 177920, time: 0.543, data: 0.000) G_L1: 15.604 G_L1_ABSOLUTE: 2.841 G_L1_RELATIVE: 12.763 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 41, iters: 179920, time: 0.539, data: 0.000) G_L1: 15.723 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 13.291 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 41, iters: 181920, time: 0.543, data: 0.000) G_L1: 14.724 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 12.339 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 41, iters: 183920, time: 0.541, data: 0.000) G_L1: 14.425 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 12.151 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 41, iters: 185920, time: 0.547, data: 0.000) G_L1: 15.123 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 12.608 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 41, iters: 187920, time: 0.541, data: 0.000) G_L1: 16.249 G_L1_ABSOLUTE: 2.143 G_L1_RELATIVE: 14.106 G_Regularizer: 0.000 validation_error: 21.005 +(epoch: 41, iters: 189920, time: 0.545, data: 0.000) G_L1: 14.550 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 12.192 G_Regularizer: 0.000 validation_error: 21.313 +(epoch: 41, iters: 191920, time: 0.545, data: 0.000) G_L1: 13.311 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 11.104 G_Regularizer: 0.000 validation_error: 21.030 +(epoch: 41, iters: 193920, time: 0.538, data: 0.000) G_L1: 16.444 G_L1_ABSOLUTE: 2.292 G_L1_RELATIVE: 14.153 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 41, iters: 195920, time: 0.541, data: 0.000) G_L1: 13.255 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 10.877 G_Regularizer: 0.000 validation_error: 20.568 +(epoch: 41, iters: 197920, time: 0.544, data: 0.000) G_L1: 16.000 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 13.385 G_Regularizer: 0.000 validation_error: 20.832 +(epoch: 41, iters: 199920, time: 0.543, data: 0.001) G_L1: 12.830 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 10.194 G_Regularizer: 0.000 validation_error: 20.808 +(epoch: 41, iters: 201920, time: 0.537, data: 0.000) G_L1: 11.620 G_L1_ABSOLUTE: 2.116 G_L1_RELATIVE: 9.504 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 41, iters: 203920, time: 0.545, data: 0.000) G_L1: 14.841 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 12.074 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 41, iters: 205920, time: 0.538, data: 0.001) G_L1: 15.624 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 13.305 G_Regularizer: 0.000 validation_error: 21.363 +(epoch: 41, iters: 207920, time: 0.538, data: 0.000) G_L1: 16.028 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 13.542 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 41, iters: 209920, time: 0.543, data: 0.000) G_L1: 13.495 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 11.002 G_Regularizer: 0.000 validation_error: 21.293 +(epoch: 41, iters: 211920, time: 0.548, data: 0.000) G_L1: 15.630 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 13.162 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 41, iters: 213920, time: 0.536, data: 0.000) G_L1: 14.036 G_L1_ABSOLUTE: 2.114 G_L1_RELATIVE: 11.922 G_Regularizer: 0.000 validation_error: 20.402 +(epoch: 41, iters: 215920, time: 0.538, data: 0.001) G_L1: 14.125 G_L1_ABSOLUTE: 2.827 G_L1_RELATIVE: 11.298 G_Regularizer: 0.000 validation_error: 21.380 +(epoch: 41, iters: 217920, time: 0.537, data: 0.000) G_L1: 11.927 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 9.610 G_Regularizer: 0.000 validation_error: 20.697 +(epoch: 41, iters: 219920, time: 0.540, data: 0.000) G_L1: 14.071 G_L1_ABSOLUTE: 2.184 G_L1_RELATIVE: 11.887 G_Regularizer: 0.000 validation_error: 20.640 +(epoch: 41, iters: 221920, time: 0.547, data: 0.000) G_L1: 16.713 G_L1_ABSOLUTE: 3.001 G_L1_RELATIVE: 13.712 G_Regularizer: 0.000 validation_error: 20.948 +(epoch: 41, iters: 223920, time: 0.538, data: 0.000) G_L1: 14.712 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 11.956 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 41, iters: 225920, time: 0.539, data: 0.000) G_L1: 15.197 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 12.445 G_Regularizer: 0.000 validation_error: 21.130 +(epoch: 41, iters: 227920, time: 0.533, data: 0.000) G_L1: 13.016 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 10.642 G_Regularizer: 0.000 validation_error: 20.963 +(epoch: 41, iters: 229920, time: 0.535, data: 0.000) G_L1: 14.113 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 11.637 G_Regularizer: 0.000 validation_error: 21.294 +(epoch: 41, iters: 231920, time: 0.532, data: 0.000) G_L1: 12.419 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 9.824 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 41, iters: 233920, time: 0.547, data: 0.000) G_L1: 12.612 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 10.017 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 41, iters: 235920, time: 0.542, data: 0.000) G_L1: 12.278 G_L1_ABSOLUTE: 2.330 G_L1_RELATIVE: 9.948 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 41, iters: 237920, time: 0.548, data: 0.000) G_L1: 16.593 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 13.895 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 41, iters: 239920, time: 0.543, data: 0.000) G_L1: 15.663 G_L1_ABSOLUTE: 2.962 G_L1_RELATIVE: 12.701 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 41, iters: 241920, time: 0.541, data: 0.000) G_L1: 13.155 G_L1_ABSOLUTE: 2.092 G_L1_RELATIVE: 11.063 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 41, iters: 243920, time: 0.544, data: 0.000) G_L1: 17.743 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 14.999 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 41, iters: 245920, time: 0.540, data: 0.000) G_L1: 14.676 G_L1_ABSOLUTE: 3.065 G_L1_RELATIVE: 11.610 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 41, iters: 247920, time: 0.531, data: 0.000) G_L1: 15.601 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 13.135 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 41, iters: 249920, time: 0.535, data: 0.001) G_L1: 11.738 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 9.325 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 41, iters: 251920, time: 0.546, data: 0.000) G_L1: 13.309 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 10.906 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 41, iters: 253920, time: 0.547, data: 0.000) G_L1: 13.060 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 10.774 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 41, iters: 255920, time: 0.547, data: 0.000) G_L1: 12.577 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 10.313 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 41, iters: 257920, time: 0.538, data: 0.000) G_L1: 12.586 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 9.994 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 41, iters: 259920, time: 0.545, data: 0.000) G_L1: 14.196 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 11.606 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 41, iters: 261920, time: 0.546, data: 0.001) G_L1: 13.981 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 11.494 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 41, iters: 263920, time: 0.541, data: 0.000) G_L1: 15.267 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 13.050 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 41, iters: 265920, time: 0.535, data: 0.000) G_L1: 13.087 G_L1_ABSOLUTE: 2.185 G_L1_RELATIVE: 10.902 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 41, iters: 267920, time: 0.548, data: 0.000) G_L1: 15.162 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.652 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 41, iters: 269920, time: 0.545, data: 0.000) G_L1: 14.791 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 12.424 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 41, iters: 271920, time: 0.543, data: 0.000) G_L1: 14.261 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 11.773 G_Regularizer: 0.000 validation_error: 20.947 +(epoch: 41, iters: 273920, time: 0.540, data: 0.000) G_L1: 13.169 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 10.915 G_Regularizer: 0.000 validation_error: 20.943 +(epoch: 41, iters: 275920, time: 0.546, data: 0.000) G_L1: 15.720 G_L1_ABSOLUTE: 2.861 G_L1_RELATIVE: 12.859 G_Regularizer: 0.000 validation_error: 20.641 +(epoch: 41, iters: 277920, time: 0.544, data: 0.001) G_L1: 13.598 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 11.265 G_Regularizer: 0.000 validation_error: 21.320 +(epoch: 41, iters: 279920, time: 0.546, data: 0.000) G_L1: 13.220 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 10.738 G_Regularizer: 0.000 validation_error: 21.036 +(epoch: 41, iters: 281920, time: 0.553, data: 0.000) G_L1: 11.818 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 9.391 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 41, iters: 283920, time: 0.543, data: 0.000) G_L1: 13.965 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.529 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 41, iters: 285920, time: 0.543, data: 0.000) G_L1: 14.259 G_L1_ABSOLUTE: 2.816 G_L1_RELATIVE: 11.443 G_Regularizer: 0.000 validation_error: 20.210 +(epoch: 41, iters: 287920, time: 0.542, data: 0.001) G_L1: 15.088 G_L1_ABSOLUTE: 2.583 G_L1_RELATIVE: 12.505 G_Regularizer: 0.000 validation_error: 20.523 +(epoch: 41, iters: 289920, time: 0.538, data: 0.001) G_L1: 14.560 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 12.010 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 41, iters: 291920, time: 0.530, data: 0.000) G_L1: 12.680 G_L1_ABSOLUTE: 2.665 G_L1_RELATIVE: 10.015 G_Regularizer: 0.000 validation_error: 21.195 +(epoch: 41, iters: 293920, time: 0.529, data: 0.000) G_L1: 14.786 G_L1_ABSOLUTE: 2.143 G_L1_RELATIVE: 12.643 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 41, iters: 295920, time: 0.528, data: 0.000) G_L1: 12.972 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 10.342 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 41, iters: 297920, time: 0.538, data: 0.001) G_L1: 12.231 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 9.616 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 41, iters: 299920, time: 0.531, data: 0.001) G_L1: 15.068 G_L1_ABSOLUTE: 3.068 G_L1_RELATIVE: 12.000 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 41, iters: 301920, time: 0.538, data: 0.000) G_L1: 12.908 G_L1_ABSOLUTE: 2.222 G_L1_RELATIVE: 10.686 G_Regularizer: 0.000 validation_error: 21.135 +(epoch: 42, iters: 1168, time: 0.531, data: 0.000) G_L1: 14.898 G_L1_ABSOLUTE: 3.260 G_L1_RELATIVE: 11.639 G_Regularizer: 0.000 validation_error: 21.053 +(epoch: 42, iters: 3168, time: 0.531, data: 0.001) G_L1: 15.221 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 12.459 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 42, iters: 5168, time: 0.533, data: 0.001) G_L1: 13.587 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 11.015 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 42, iters: 7168, time: 0.531, data: 0.001) G_L1: 13.430 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 10.976 G_Regularizer: 0.000 validation_error: 20.492 +(epoch: 42, iters: 9168, time: 0.537, data: 0.000) G_L1: 13.689 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 11.277 G_Regularizer: 0.000 validation_error: 21.145 +(epoch: 42, iters: 11168, time: 0.531, data: 0.000) G_L1: 13.307 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 10.811 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 42, iters: 13168, time: 0.531, data: 0.000) G_L1: 14.809 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 12.135 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 42, iters: 15168, time: 0.544, data: 0.000) G_L1: 12.463 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 10.116 G_Regularizer: 0.000 validation_error: 21.055 +(epoch: 42, iters: 17168, time: 0.532, data: 0.001) G_L1: 16.653 G_L1_ABSOLUTE: 2.919 G_L1_RELATIVE: 13.734 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 42, iters: 19168, time: 0.531, data: 0.001) G_L1: 13.070 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.565 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 42, iters: 21168, time: 0.529, data: 0.000) G_L1: 14.784 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 11.991 G_Regularizer: 0.000 validation_error: 21.078 +(epoch: 42, iters: 23168, time: 0.527, data: 0.000) G_L1: 14.689 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 11.905 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 42, iters: 25168, time: 0.540, data: 0.000) G_L1: 15.421 G_L1_ABSOLUTE: 2.785 G_L1_RELATIVE: 12.636 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 42, iters: 27168, time: 0.533, data: 0.000) G_L1: 12.748 G_L1_ABSOLUTE: 2.213 G_L1_RELATIVE: 10.535 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 42, iters: 29168, time: 0.532, data: 0.000) G_L1: 12.323 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 9.861 G_Regularizer: 0.000 validation_error: 20.561 +(epoch: 42, iters: 31168, time: 0.541, data: 0.000) G_L1: 16.265 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 13.632 G_Regularizer: 0.000 validation_error: 20.547 +(epoch: 42, iters: 33168, time: 0.536, data: 0.000) G_L1: 15.132 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 12.585 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 42, iters: 35168, time: 0.535, data: 0.000) G_L1: 13.779 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 11.339 G_Regularizer: 0.000 validation_error: 21.196 +(epoch: 42, iters: 37168, time: 0.540, data: 0.001) G_L1: 14.525 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 11.879 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 42, iters: 39168, time: 0.526, data: 0.000) G_L1: 13.796 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 11.297 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 42, iters: 41168, time: 0.534, data: 0.000) G_L1: 14.898 G_L1_ABSOLUTE: 2.029 G_L1_RELATIVE: 12.869 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 42, iters: 43168, time: 0.532, data: 0.001) G_L1: 11.622 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 9.024 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 42, iters: 45168, time: 0.534, data: 0.000) G_L1: 12.213 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 10.050 G_Regularizer: 0.000 validation_error: 21.198 +(epoch: 42, iters: 47168, time: 0.529, data: 0.000) G_L1: 15.624 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 13.142 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 42, iters: 49168, time: 0.531, data: 0.000) G_L1: 16.454 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 14.216 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 42, iters: 51168, time: 0.537, data: 0.000) G_L1: 24.276 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 21.384 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 42, iters: 53168, time: 0.532, data: 0.000) G_L1: 15.067 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 12.371 G_Regularizer: 0.000 validation_error: 20.690 +(epoch: 42, iters: 55168, time: 0.536, data: 0.000) G_L1: 16.208 G_L1_ABSOLUTE: 2.264 G_L1_RELATIVE: 13.944 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 42, iters: 57168, time: 0.532, data: 0.000) G_L1: 14.603 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.933 G_Regularizer: 0.000 validation_error: 21.144 +(epoch: 42, iters: 59168, time: 0.531, data: 0.000) G_L1: 17.747 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 15.488 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 42, iters: 61168, time: 0.534, data: 0.001) G_L1: 12.610 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 10.232 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 42, iters: 63168, time: 0.531, data: 0.000) G_L1: 15.618 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 13.205 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 42, iters: 65168, time: 0.534, data: 0.000) G_L1: 13.746 G_L1_ABSOLUTE: 2.204 G_L1_RELATIVE: 11.542 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 42, iters: 67168, time: 0.533, data: 0.000) G_L1: 13.690 G_L1_ABSOLUTE: 2.490 G_L1_RELATIVE: 11.201 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 42, iters: 69168, time: 0.532, data: 0.000) G_L1: 14.227 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 11.764 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 42, iters: 71168, time: 0.530, data: 0.000) G_L1: 11.916 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 9.303 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 42, iters: 73168, time: 0.543, data: 0.000) G_L1: 14.582 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 11.962 G_Regularizer: 0.000 validation_error: 21.435 +(epoch: 42, iters: 75168, time: 0.535, data: 0.000) G_L1: 11.818 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 9.277 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 42, iters: 77168, time: 0.530, data: 0.000) G_L1: 13.539 G_L1_ABSOLUTE: 2.305 G_L1_RELATIVE: 11.234 G_Regularizer: 0.000 validation_error: 20.995 +(epoch: 42, iters: 79168, time: 0.533, data: 0.000) G_L1: 13.848 G_L1_ABSOLUTE: 2.305 G_L1_RELATIVE: 11.543 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 42, iters: 81168, time: 0.530, data: 0.000) G_L1: 13.983 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 11.074 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 42, iters: 83168, time: 0.531, data: 0.000) G_L1: 14.572 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 12.066 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 42, iters: 85168, time: 0.534, data: 0.000) G_L1: 12.953 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 10.591 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 42, iters: 87168, time: 0.537, data: 0.001) G_L1: 17.711 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 15.119 G_Regularizer: 0.000 validation_error: 20.995 +(epoch: 42, iters: 89168, time: 0.532, data: 0.000) G_L1: 13.802 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 11.192 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 42, iters: 91168, time: 0.535, data: 0.000) G_L1: 11.532 G_L1_ABSOLUTE: 2.116 G_L1_RELATIVE: 9.416 G_Regularizer: 0.000 validation_error: 21.163 +(epoch: 42, iters: 93168, time: 0.539, data: 0.000) G_L1: 15.567 G_L1_ABSOLUTE: 2.933 G_L1_RELATIVE: 12.634 G_Regularizer: 0.000 validation_error: 20.611 +(epoch: 42, iters: 95168, time: 0.531, data: 0.001) G_L1: 15.912 G_L1_ABSOLUTE: 2.703 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 42, iters: 97168, time: 0.535, data: 0.000) G_L1: 11.685 G_L1_ABSOLUTE: 2.273 G_L1_RELATIVE: 9.412 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 42, iters: 99168, time: 0.531, data: 0.000) G_L1: 15.493 G_L1_ABSOLUTE: 3.470 G_L1_RELATIVE: 12.023 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 42, iters: 101168, time: 0.528, data: 0.000) G_L1: 14.381 G_L1_ABSOLUTE: 2.266 G_L1_RELATIVE: 12.115 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 42, iters: 103168, time: 0.537, data: 0.000) G_L1: 11.017 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 8.752 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 42, iters: 105168, time: 0.532, data: 0.000) G_L1: 12.457 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 9.805 G_Regularizer: 0.000 validation_error: 21.221 +(epoch: 42, iters: 107168, time: 0.530, data: 0.000) G_L1: 13.843 G_L1_ABSOLUTE: 2.045 G_L1_RELATIVE: 11.798 G_Regularizer: 0.000 validation_error: 21.109 +(epoch: 42, iters: 109168, time: 0.529, data: 0.000) G_L1: 12.759 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 10.363 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 42, iters: 111168, time: 0.536, data: 0.001) G_L1: 13.539 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 11.035 G_Regularizer: 0.000 validation_error: 21.256 +(epoch: 42, iters: 113168, time: 0.543, data: 0.001) G_L1: 16.596 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 13.904 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 42, iters: 115168, time: 0.535, data: 0.000) G_L1: 13.268 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 10.434 G_Regularizer: 0.000 validation_error: 21.127 +(epoch: 42, iters: 117168, time: 0.534, data: 0.000) G_L1: 15.633 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 12.916 G_Regularizer: 0.000 validation_error: 21.236 +(epoch: 42, iters: 119168, time: 0.536, data: 0.000) G_L1: 14.495 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 11.794 G_Regularizer: 0.000 validation_error: 20.746 +(epoch: 42, iters: 121168, time: 0.539, data: 0.000) G_L1: 14.744 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 12.287 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 42, iters: 123168, time: 0.536, data: 0.000) G_L1: 13.618 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 11.363 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 42, iters: 125168, time: 0.537, data: 0.000) G_L1: 15.296 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 12.846 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 42, iters: 127168, time: 0.539, data: 0.000) G_L1: 14.226 G_L1_ABSOLUTE: 3.105 G_L1_RELATIVE: 11.121 G_Regularizer: 0.000 validation_error: 21.070 +(epoch: 42, iters: 129168, time: 0.535, data: 0.000) G_L1: 12.424 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 9.873 G_Regularizer: 0.000 validation_error: 21.070 +(epoch: 42, iters: 131168, time: 0.535, data: 0.000) G_L1: 15.546 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 13.347 G_Regularizer: 0.000 validation_error: 20.971 +(epoch: 42, iters: 133168, time: 0.534, data: 0.000) G_L1: 11.841 G_L1_ABSOLUTE: 2.184 G_L1_RELATIVE: 9.657 G_Regularizer: 0.000 validation_error: 20.994 +(epoch: 42, iters: 135168, time: 0.541, data: 0.000) G_L1: 14.553 G_L1_ABSOLUTE: 2.785 G_L1_RELATIVE: 11.768 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 42, iters: 137168, time: 0.532, data: 0.000) G_L1: 12.794 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 10.265 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 42, iters: 139168, time: 0.532, data: 0.000) G_L1: 12.897 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 10.282 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 42, iters: 141168, time: 0.551, data: 0.000) G_L1: 16.315 G_L1_ABSOLUTE: 2.897 G_L1_RELATIVE: 13.418 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 42, iters: 143168, time: 0.580, data: 0.000) G_L1: 14.513 G_L1_ABSOLUTE: 2.161 G_L1_RELATIVE: 12.353 G_Regularizer: 0.000 validation_error: 21.194 +(epoch: 42, iters: 145168, time: 0.595, data: 0.000) G_L1: 14.164 G_L1_ABSOLUTE: 2.879 G_L1_RELATIVE: 11.285 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 42, iters: 147168, time: 0.580, data: 0.000) G_L1: 13.181 G_L1_ABSOLUTE: 2.047 G_L1_RELATIVE: 11.133 G_Regularizer: 0.000 validation_error: 20.861 +(epoch: 42, iters: 149168, time: 0.551, data: 0.000) G_L1: 15.500 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 12.888 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 42, iters: 151168, time: 0.565, data: 0.000) G_L1: 13.768 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 11.250 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 42, iters: 153168, time: 0.589, data: 0.000) G_L1: 14.585 G_L1_ABSOLUTE: 2.925 G_L1_RELATIVE: 11.660 G_Regularizer: 0.000 validation_error: 21.302 +(epoch: 42, iters: 155168, time: 0.567, data: 0.000) G_L1: 14.202 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 11.818 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 42, iters: 157168, time: 0.581, data: 0.000) G_L1: 14.411 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 12.195 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 42, iters: 159168, time: 0.531, data: 0.000) G_L1: 14.604 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 11.974 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 42, iters: 161168, time: 0.527, data: 0.000) G_L1: 16.121 G_L1_ABSOLUTE: 2.871 G_L1_RELATIVE: 13.250 G_Regularizer: 0.000 validation_error: 20.929 +(epoch: 42, iters: 163168, time: 0.539, data: 0.000) G_L1: 15.832 G_L1_ABSOLUTE: 2.862 G_L1_RELATIVE: 12.969 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 42, iters: 165168, time: 0.534, data: 0.000) G_L1: 14.615 G_L1_ABSOLUTE: 2.085 G_L1_RELATIVE: 12.530 G_Regularizer: 0.000 validation_error: 21.125 +(epoch: 42, iters: 167168, time: 0.540, data: 0.000) G_L1: 13.989 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 11.546 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 42, iters: 169168, time: 0.533, data: 0.000) G_L1: 15.605 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 13.120 G_Regularizer: 0.000 validation_error: 20.446 +(epoch: 42, iters: 171168, time: 0.534, data: 0.000) G_L1: 13.519 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 10.950 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 42, iters: 173168, time: 0.536, data: 0.001) G_L1: 15.572 G_L1_ABSOLUTE: 2.829 G_L1_RELATIVE: 12.744 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 42, iters: 175168, time: 0.528, data: 0.000) G_L1: 12.501 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 10.073 G_Regularizer: 0.000 validation_error: 20.999 +(epoch: 42, iters: 177168, time: 0.539, data: 0.000) G_L1: 14.300 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 11.831 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 42, iters: 179168, time: 0.527, data: 0.001) G_L1: 14.476 G_L1_ABSOLUTE: 2.175 G_L1_RELATIVE: 12.301 G_Regularizer: 0.000 validation_error: 21.133 +(epoch: 42, iters: 181168, time: 0.539, data: 0.000) G_L1: 13.121 G_L1_ABSOLUTE: 2.039 G_L1_RELATIVE: 11.082 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 42, iters: 183168, time: 0.533, data: 0.001) G_L1: 12.156 G_L1_ABSOLUTE: 2.192 G_L1_RELATIVE: 9.964 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 42, iters: 185168, time: 0.537, data: 0.001) G_L1: 13.700 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 11.000 G_Regularizer: 0.000 validation_error: 21.193 +(epoch: 42, iters: 187168, time: 0.538, data: 0.000) G_L1: 13.698 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 11.115 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 42, iters: 189168, time: 0.535, data: 0.000) G_L1: 13.041 G_L1_ABSOLUTE: 2.527 G_L1_RELATIVE: 10.513 G_Regularizer: 0.000 validation_error: 21.148 +(epoch: 42, iters: 191168, time: 0.534, data: 0.000) G_L1: 14.259 G_L1_ABSOLUTE: 2.240 G_L1_RELATIVE: 12.018 G_Regularizer: 0.000 validation_error: 20.808 +(epoch: 42, iters: 193168, time: 0.530, data: 0.000) G_L1: 13.404 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 11.121 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 42, iters: 195168, time: 0.531, data: 0.000) G_L1: 15.108 G_L1_ABSOLUTE: 3.216 G_L1_RELATIVE: 11.892 G_Regularizer: 0.000 validation_error: 20.852 +(epoch: 42, iters: 197168, time: 0.532, data: 0.001) G_L1: 17.183 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 14.668 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 42, iters: 199168, time: 0.536, data: 0.000) G_L1: 13.602 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 10.945 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 42, iters: 201168, time: 0.531, data: 0.000) G_L1: 13.978 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 11.595 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 42, iters: 203168, time: 0.530, data: 0.000) G_L1: 17.911 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 14.982 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 42, iters: 205168, time: 0.533, data: 0.000) G_L1: 14.804 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 12.459 G_Regularizer: 0.000 validation_error: 21.128 +(epoch: 42, iters: 207168, time: 0.538, data: 0.000) G_L1: 13.170 G_L1_ABSOLUTE: 2.279 G_L1_RELATIVE: 10.891 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 42, iters: 209168, time: 0.537, data: 0.000) G_L1: 13.825 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 11.003 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 42, iters: 211168, time: 0.533, data: 0.000) G_L1: 15.966 G_L1_ABSOLUTE: 2.185 G_L1_RELATIVE: 13.781 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 42, iters: 213168, time: 0.531, data: 0.000) G_L1: 14.473 G_L1_ABSOLUTE: 3.027 G_L1_RELATIVE: 11.446 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 42, iters: 215168, time: 0.536, data: 0.001) G_L1: 14.682 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 12.360 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 42, iters: 217168, time: 0.531, data: 0.001) G_L1: 14.106 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 11.556 G_Regularizer: 0.000 validation_error: 21.158 +(epoch: 42, iters: 219168, time: 0.538, data: 0.001) G_L1: 13.832 G_L1_ABSOLUTE: 2.192 G_L1_RELATIVE: 11.641 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 42, iters: 221168, time: 0.539, data: 0.000) G_L1: 13.313 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 10.999 G_Regularizer: 0.000 validation_error: 20.904 +(epoch: 42, iters: 223168, time: 0.530, data: 0.001) G_L1: 15.922 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 13.308 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 42, iters: 225168, time: 0.536, data: 0.000) G_L1: 14.113 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 11.639 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 42, iters: 227168, time: 0.528, data: 0.001) G_L1: 12.425 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 9.958 G_Regularizer: 0.000 validation_error: 21.524 +(epoch: 42, iters: 229168, time: 0.542, data: 0.000) G_L1: 12.262 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 9.372 G_Regularizer: 0.000 validation_error: 21.311 +(epoch: 42, iters: 231168, time: 0.530, data: 0.000) G_L1: 13.269 G_L1_ABSOLUTE: 2.230 G_L1_RELATIVE: 11.039 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 42, iters: 233168, time: 0.542, data: 0.000) G_L1: 12.916 G_L1_ABSOLUTE: 1.891 G_L1_RELATIVE: 11.025 G_Regularizer: 0.000 validation_error: 20.719 +(epoch: 42, iters: 235168, time: 0.536, data: 0.000) G_L1: 11.145 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 8.606 G_Regularizer: 0.000 validation_error: 21.202 +(epoch: 42, iters: 237168, time: 0.539, data: 0.001) G_L1: 14.368 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 12.134 G_Regularizer: 0.000 validation_error: 21.124 +(epoch: 42, iters: 239168, time: 0.536, data: 0.000) G_L1: 18.056 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 15.556 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 42, iters: 241168, time: 0.534, data: 0.001) G_L1: 15.834 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 13.292 G_Regularizer: 0.000 validation_error: 20.954 +(epoch: 42, iters: 243168, time: 0.550, data: 0.000) G_L1: 12.678 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 10.538 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 42, iters: 245168, time: 0.553, data: 0.000) G_L1: 11.597 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 9.093 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 42, iters: 247168, time: 0.556, data: 0.000) G_L1: 13.666 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 10.999 G_Regularizer: 0.000 validation_error: 21.202 +(epoch: 42, iters: 249168, time: 0.536, data: 0.000) G_L1: 12.560 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 10.298 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 42, iters: 251168, time: 0.552, data: 0.000) G_L1: 17.271 G_L1_ABSOLUTE: 2.838 G_L1_RELATIVE: 14.433 G_Regularizer: 0.000 validation_error: 21.006 +(epoch: 42, iters: 253168, time: 0.555, data: 0.000) G_L1: 13.828 G_L1_ABSOLUTE: 2.237 G_L1_RELATIVE: 11.591 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 42, iters: 255168, time: 0.552, data: 0.000) G_L1: 14.657 G_L1_ABSOLUTE: 2.264 G_L1_RELATIVE: 12.393 G_Regularizer: 0.000 validation_error: 21.155 +(epoch: 42, iters: 257168, time: 0.558, data: 0.000) G_L1: 12.840 G_L1_ABSOLUTE: 2.229 G_L1_RELATIVE: 10.611 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 42, iters: 259168, time: 0.550, data: 0.000) G_L1: 14.897 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 12.239 G_Regularizer: 0.000 validation_error: 21.428 +(epoch: 42, iters: 261168, time: 0.547, data: 0.000) G_L1: 12.898 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 10.415 G_Regularizer: 0.000 validation_error: 21.192 +(epoch: 42, iters: 263168, time: 0.539, data: 0.000) G_L1: 17.170 G_L1_ABSOLUTE: 3.259 G_L1_RELATIVE: 13.911 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 42, iters: 265168, time: 0.540, data: 0.000) G_L1: 16.443 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 14.088 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 42, iters: 267168, time: 0.546, data: 0.000) G_L1: 12.907 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 10.230 G_Regularizer: 0.000 validation_error: 20.960 +(epoch: 42, iters: 269168, time: 0.547, data: 0.000) G_L1: 12.705 G_L1_ABSOLUTE: 2.260 G_L1_RELATIVE: 10.446 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 42, iters: 271168, time: 0.551, data: 0.000) G_L1: 15.396 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 12.819 G_Regularizer: 0.000 validation_error: 21.166 +(epoch: 42, iters: 273168, time: 0.545, data: 0.000) G_L1: 14.342 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 11.984 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 42, iters: 275168, time: 0.540, data: 0.000) G_L1: 14.749 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 12.092 G_Regularizer: 0.000 validation_error: 21.155 +(epoch: 42, iters: 277168, time: 0.547, data: 0.000) G_L1: 11.636 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 8.945 G_Regularizer: 0.000 validation_error: 20.805 +(epoch: 42, iters: 279168, time: 0.553, data: 0.000) G_L1: 12.489 G_L1_ABSOLUTE: 2.148 G_L1_RELATIVE: 10.341 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 42, iters: 281168, time: 0.555, data: 0.000) G_L1: 11.346 G_L1_ABSOLUTE: 2.217 G_L1_RELATIVE: 9.129 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 42, iters: 283168, time: 0.542, data: 0.000) G_L1: 13.733 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 11.498 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 42, iters: 285168, time: 0.534, data: 0.000) G_L1: 13.730 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 11.167 G_Regularizer: 0.000 validation_error: 20.671 +(epoch: 42, iters: 287168, time: 0.552, data: 0.000) G_L1: 12.243 G_L1_ABSOLUTE: 2.290 G_L1_RELATIVE: 9.954 G_Regularizer: 0.000 validation_error: 21.153 +(epoch: 42, iters: 289168, time: 0.561, data: 0.001) G_L1: 16.455 G_L1_ABSOLUTE: 3.102 G_L1_RELATIVE: 13.352 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 42, iters: 291168, time: 0.548, data: 0.000) G_L1: 14.505 G_L1_ABSOLUTE: 2.083 G_L1_RELATIVE: 12.422 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 42, iters: 293168, time: 0.548, data: 0.000) G_L1: 12.925 G_L1_ABSOLUTE: 1.966 G_L1_RELATIVE: 10.960 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 42, iters: 295168, time: 0.553, data: 0.000) G_L1: 14.089 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.488 G_Regularizer: 0.000 validation_error: 21.310 +(epoch: 42, iters: 297168, time: 0.550, data: 0.000) G_L1: 14.222 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 11.780 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 42, iters: 299168, time: 0.536, data: 0.000) G_L1: 16.529 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 13.937 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 42, iters: 301168, time: 0.521, data: 0.000) G_L1: 14.893 G_L1_ABSOLUTE: 3.190 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 43, iters: 416, time: 0.552, data: 0.000) G_L1: 15.790 G_L1_ABSOLUTE: 2.907 G_L1_RELATIVE: 12.883 G_Regularizer: 0.000 validation_error: 21.137 +(epoch: 43, iters: 2416, time: 0.550, data: 0.000) G_L1: 13.883 G_L1_ABSOLUTE: 2.260 G_L1_RELATIVE: 11.623 G_Regularizer: 0.000 validation_error: 21.040 +(epoch: 43, iters: 4416, time: 0.557, data: 0.000) G_L1: 14.368 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 11.691 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 43, iters: 6416, time: 0.536, data: 0.001) G_L1: 15.613 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 12.869 G_Regularizer: 0.000 validation_error: 21.251 +(epoch: 43, iters: 8416, time: 0.544, data: 0.000) G_L1: 12.993 G_L1_ABSOLUTE: 2.180 G_L1_RELATIVE: 10.812 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 43, iters: 10416, time: 0.552, data: 0.000) G_L1: 15.451 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 12.702 G_Regularizer: 0.000 validation_error: 21.103 +(epoch: 43, iters: 12416, time: 0.546, data: 0.001) G_L1: 13.129 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 10.612 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 43, iters: 14416, time: 0.557, data: 0.000) G_L1: 12.447 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 10.227 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 43, iters: 16416, time: 0.545, data: 0.000) G_L1: 14.548 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 12.040 G_Regularizer: 0.000 validation_error: 21.148 +(epoch: 43, iters: 18416, time: 0.551, data: 0.000) G_L1: 14.733 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 12.390 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 43, iters: 20416, time: 0.551, data: 0.000) G_L1: 12.578 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 10.025 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 43, iters: 22416, time: 0.560, data: 0.000) G_L1: 13.393 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 10.986 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 43, iters: 24416, time: 0.553, data: 0.000) G_L1: 14.071 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 11.610 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 43, iters: 26416, time: 0.551, data: 0.000) G_L1: 14.614 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 12.379 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 43, iters: 28416, time: 0.544, data: 0.000) G_L1: 11.688 G_L1_ABSOLUTE: 2.794 G_L1_RELATIVE: 8.894 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 43, iters: 30416, time: 0.554, data: 0.000) G_L1: 13.910 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 10.972 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 43, iters: 32416, time: 0.557, data: 0.000) G_L1: 11.773 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 9.252 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 43, iters: 34416, time: 0.549, data: 0.000) G_L1: 14.544 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 12.057 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 43, iters: 36416, time: 0.553, data: 0.000) G_L1: 13.890 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 11.605 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 43, iters: 38416, time: 0.556, data: 0.000) G_L1: 13.093 G_L1_ABSOLUTE: 2.136 G_L1_RELATIVE: 10.957 G_Regularizer: 0.000 validation_error: 20.991 +(epoch: 43, iters: 40416, time: 0.567, data: 0.001) G_L1: 10.620 G_L1_ABSOLUTE: 2.076 G_L1_RELATIVE: 8.545 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 43, iters: 42416, time: 0.548, data: 0.000) G_L1: 11.518 G_L1_ABSOLUTE: 2.117 G_L1_RELATIVE: 9.401 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 43, iters: 44416, time: 0.549, data: 0.000) G_L1: 15.369 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 12.558 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 43, iters: 46416, time: 0.553, data: 0.000) G_L1: 11.842 G_L1_ABSOLUTE: 2.211 G_L1_RELATIVE: 9.631 G_Regularizer: 0.000 validation_error: 21.389 +(epoch: 43, iters: 48416, time: 0.565, data: 0.000) G_L1: 19.041 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 16.469 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 43, iters: 50416, time: 0.552, data: 0.000) G_L1: 15.270 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 12.925 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 43, iters: 52416, time: 0.550, data: 0.000) G_L1: 13.026 G_L1_ABSOLUTE: 2.055 G_L1_RELATIVE: 10.971 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 43, iters: 54416, time: 0.551, data: 0.000) G_L1: 12.111 G_L1_ABSOLUTE: 2.397 G_L1_RELATIVE: 9.714 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 43, iters: 56416, time: 0.554, data: 0.000) G_L1: 12.857 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 10.603 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 43, iters: 58416, time: 0.549, data: 0.000) G_L1: 15.180 G_L1_ABSOLUTE: 2.244 G_L1_RELATIVE: 12.935 G_Regularizer: 0.000 validation_error: 21.345 +(epoch: 43, iters: 60416, time: 0.541, data: 0.000) G_L1: 13.603 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 11.254 G_Regularizer: 0.000 validation_error: 21.151 +(epoch: 43, iters: 62416, time: 0.541, data: 0.000) G_L1: 12.914 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 10.089 G_Regularizer: 0.000 validation_error: 21.068 +(epoch: 43, iters: 64416, time: 0.545, data: 0.000) G_L1: 14.504 G_L1_ABSOLUTE: 2.713 G_L1_RELATIVE: 11.791 G_Regularizer: 0.000 validation_error: 21.090 +(epoch: 43, iters: 66416, time: 0.537, data: 0.000) G_L1: 13.085 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 10.359 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 43, iters: 68416, time: 0.559, data: 0.000) G_L1: 15.026 G_L1_ABSOLUTE: 2.297 G_L1_RELATIVE: 12.729 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 43, iters: 70416, time: 0.563, data: 0.000) G_L1: 12.017 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 9.309 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 43, iters: 72416, time: 0.558, data: 0.000) G_L1: 16.321 G_L1_ABSOLUTE: 2.968 G_L1_RELATIVE: 13.353 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 43, iters: 74416, time: 0.554, data: 0.000) G_L1: 13.189 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 10.730 G_Regularizer: 0.000 validation_error: 21.219 +(epoch: 43, iters: 76416, time: 0.553, data: 0.000) G_L1: 13.080 G_L1_ABSOLUTE: 2.137 G_L1_RELATIVE: 10.943 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 43, iters: 78416, time: 0.561, data: 0.000) G_L1: 12.084 G_L1_ABSOLUTE: 2.191 G_L1_RELATIVE: 9.893 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 43, iters: 80416, time: 0.556, data: 0.000) G_L1: 13.273 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 43, iters: 82416, time: 0.544, data: 0.000) G_L1: 12.943 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 10.241 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 43, iters: 84416, time: 0.562, data: 0.000) G_L1: 16.056 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 13.171 G_Regularizer: 0.000 validation_error: 20.980 +(epoch: 43, iters: 86416, time: 0.539, data: 0.000) G_L1: 14.271 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 11.778 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 43, iters: 88416, time: 0.564, data: 0.000) G_L1: 12.287 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 9.835 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 43, iters: 90416, time: 0.552, data: 0.001) G_L1: 14.875 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 12.540 G_Regularizer: 0.000 validation_error: 20.921 +(epoch: 43, iters: 92416, time: 0.544, data: 0.000) G_L1: 13.125 G_L1_ABSOLUTE: 3.086 G_L1_RELATIVE: 10.040 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 43, iters: 94416, time: 0.559, data: 0.000) G_L1: 14.287 G_L1_ABSOLUTE: 2.791 G_L1_RELATIVE: 11.496 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 43, iters: 96416, time: 0.565, data: 0.001) G_L1: 14.310 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 11.520 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 43, iters: 98416, time: 0.569, data: 0.000) G_L1: 12.250 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 9.714 G_Regularizer: 0.000 validation_error: 21.041 +(epoch: 43, iters: 100416, time: 0.533, data: 0.000) G_L1: 16.172 G_L1_ABSOLUTE: 3.402 G_L1_RELATIVE: 12.770 G_Regularizer: 0.000 validation_error: 20.808 +(epoch: 43, iters: 102416, time: 0.556, data: 0.000) G_L1: 12.235 G_L1_ABSOLUTE: 2.153 G_L1_RELATIVE: 10.082 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 43, iters: 104416, time: 0.543, data: 0.000) G_L1: 14.404 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 11.713 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 43, iters: 106416, time: 0.543, data: 0.000) G_L1: 11.852 G_L1_ABSOLUTE: 2.326 G_L1_RELATIVE: 9.525 G_Regularizer: 0.000 validation_error: 20.929 +(epoch: 43, iters: 108416, time: 0.541, data: 0.000) G_L1: 14.888 G_L1_ABSOLUTE: 2.910 G_L1_RELATIVE: 11.977 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 43, iters: 110416, time: 0.550, data: 0.000) G_L1: 16.410 G_L1_ABSOLUTE: 3.048 G_L1_RELATIVE: 13.362 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 43, iters: 112416, time: 0.555, data: 0.000) G_L1: 14.724 G_L1_ABSOLUTE: 2.294 G_L1_RELATIVE: 12.431 G_Regularizer: 0.000 validation_error: 20.681 +(epoch: 43, iters: 114416, time: 0.555, data: 0.000) G_L1: 16.235 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 13.758 G_Regularizer: 0.000 validation_error: 20.458 +(epoch: 43, iters: 116416, time: 0.552, data: 0.000) G_L1: 13.759 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.218 G_Regularizer: 0.000 validation_error: 20.622 +(epoch: 43, iters: 118416, time: 0.558, data: 0.000) G_L1: 15.833 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 13.298 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 43, iters: 120416, time: 0.533, data: 0.000) G_L1: 13.304 G_L1_ABSOLUTE: 2.747 G_L1_RELATIVE: 10.557 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 43, iters: 122416, time: 0.537, data: 0.000) G_L1: 12.839 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 10.498 G_Regularizer: 0.000 validation_error: 21.108 +(epoch: 43, iters: 124416, time: 0.543, data: 0.001) G_L1: 16.667 G_L1_ABSOLUTE: 2.819 G_L1_RELATIVE: 13.849 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 43, iters: 126416, time: 0.557, data: 0.000) G_L1: 14.240 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 11.790 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 43, iters: 128416, time: 0.549, data: 0.000) G_L1: 13.758 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 11.460 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 43, iters: 130416, time: 0.563, data: 0.000) G_L1: 15.255 G_L1_ABSOLUTE: 2.949 G_L1_RELATIVE: 12.306 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 43, iters: 132416, time: 0.559, data: 0.000) G_L1: 13.908 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 11.451 G_Regularizer: 0.000 validation_error: 20.869 +(epoch: 43, iters: 134416, time: 0.540, data: 0.000) G_L1: 13.171 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 10.266 G_Regularizer: 0.000 validation_error: 21.120 +(epoch: 43, iters: 136416, time: 0.537, data: 0.000) G_L1: 14.653 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 12.028 G_Regularizer: 0.000 validation_error: 21.074 +(epoch: 43, iters: 138416, time: 0.561, data: 0.001) G_L1: 12.042 G_L1_ABSOLUTE: 3.019 G_L1_RELATIVE: 9.024 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 43, iters: 140416, time: 0.556, data: 0.000) G_L1: 13.130 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 10.559 G_Regularizer: 0.000 validation_error: 21.137 +(epoch: 43, iters: 142416, time: 0.540, data: 0.002) G_L1: 12.754 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 10.024 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 43, iters: 144416, time: 0.560, data: 0.000) G_L1: 15.613 G_L1_ABSOLUTE: 2.687 G_L1_RELATIVE: 12.926 G_Regularizer: 0.000 validation_error: 21.459 +(epoch: 43, iters: 146416, time: 0.553, data: 0.000) G_L1: 13.586 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 11.212 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 43, iters: 148416, time: 0.536, data: 0.000) G_L1: 14.672 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 12.328 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 43, iters: 150416, time: 0.544, data: 0.000) G_L1: 12.208 G_L1_ABSOLUTE: 2.268 G_L1_RELATIVE: 9.940 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 43, iters: 152416, time: 0.548, data: 0.000) G_L1: 13.934 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 11.305 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 43, iters: 154416, time: 0.556, data: 0.000) G_L1: 12.636 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 10.055 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 43, iters: 156416, time: 0.547, data: 0.001) G_L1: 14.918 G_L1_ABSOLUTE: 2.337 G_L1_RELATIVE: 12.581 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 43, iters: 158416, time: 0.536, data: 0.000) G_L1: 14.234 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 11.610 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 43, iters: 160416, time: 0.553, data: 0.000) G_L1: 16.868 G_L1_ABSOLUTE: 2.972 G_L1_RELATIVE: 13.897 G_Regularizer: 0.000 validation_error: 20.535 +(epoch: 43, iters: 162416, time: 0.565, data: 0.000) G_L1: 14.377 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 12.114 G_Regularizer: 0.000 validation_error: 20.753 +(epoch: 43, iters: 164416, time: 0.562, data: 0.000) G_L1: 12.621 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 10.246 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 43, iters: 166416, time: 0.559, data: 0.000) G_L1: 11.947 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 9.711 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 43, iters: 168416, time: 0.540, data: 0.000) G_L1: 15.267 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 12.883 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 43, iters: 170416, time: 0.543, data: 0.000) G_L1: 12.812 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 10.531 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 43, iters: 172416, time: 0.550, data: 0.000) G_L1: 12.279 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 9.858 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 43, iters: 174416, time: 0.556, data: 0.000) G_L1: 11.145 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 8.731 G_Regularizer: 0.000 validation_error: 21.222 +(epoch: 43, iters: 176416, time: 0.556, data: 0.000) G_L1: 16.240 G_L1_ABSOLUTE: 2.583 G_L1_RELATIVE: 13.658 G_Regularizer: 0.000 validation_error: 21.119 +(epoch: 43, iters: 178416, time: 0.555, data: 0.000) G_L1: 12.569 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 10.109 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 43, iters: 180416, time: 0.559, data: 0.001) G_L1: 13.105 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 10.695 G_Regularizer: 0.000 validation_error: 21.095 +(epoch: 43, iters: 182416, time: 0.556, data: 0.001) G_L1: 12.657 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 10.202 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 43, iters: 184416, time: 0.544, data: 0.000) G_L1: 14.547 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 12.021 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 43, iters: 186416, time: 0.558, data: 0.000) G_L1: 12.185 G_L1_ABSOLUTE: 2.096 G_L1_RELATIVE: 10.089 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 43, iters: 188416, time: 0.564, data: 0.000) G_L1: 15.334 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 12.806 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 43, iters: 190416, time: 0.564, data: 0.001) G_L1: 15.090 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 12.813 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 43, iters: 192416, time: 0.559, data: 0.000) G_L1: 11.732 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 9.449 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 43, iters: 194416, time: 0.555, data: 0.001) G_L1: 21.905 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 19.024 G_Regularizer: 0.000 validation_error: 20.628 +(epoch: 43, iters: 196416, time: 0.549, data: 0.000) G_L1: 16.290 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 13.640 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 43, iters: 198416, time: 0.540, data: 0.000) G_L1: 13.504 G_L1_ABSOLUTE: 2.438 G_L1_RELATIVE: 11.066 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 43, iters: 200416, time: 0.570, data: 0.000) G_L1: 15.594 G_L1_ABSOLUTE: 2.200 G_L1_RELATIVE: 13.394 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 43, iters: 202416, time: 0.545, data: 0.000) G_L1: 15.267 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 12.897 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 43, iters: 204416, time: 0.556, data: 0.000) G_L1: 14.213 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 11.933 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 43, iters: 206416, time: 0.562, data: 0.000) G_L1: 13.420 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 10.968 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 43, iters: 208416, time: 0.563, data: 0.000) G_L1: 13.414 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 10.723 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 43, iters: 210416, time: 0.541, data: 0.000) G_L1: 15.183 G_L1_ABSOLUTE: 2.092 G_L1_RELATIVE: 13.091 G_Regularizer: 0.000 validation_error: 21.076 +(epoch: 43, iters: 212416, time: 0.545, data: 0.000) G_L1: 11.589 G_L1_ABSOLUTE: 2.292 G_L1_RELATIVE: 9.297 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 43, iters: 214416, time: 0.553, data: 0.000) G_L1: 13.032 G_L1_ABSOLUTE: 2.138 G_L1_RELATIVE: 10.893 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 43, iters: 216416, time: 0.556, data: 0.000) G_L1: 16.056 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 13.539 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 43, iters: 218416, time: 0.549, data: 0.000) G_L1: 13.361 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 10.739 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 43, iters: 220416, time: 0.548, data: 0.000) G_L1: 13.177 G_L1_ABSOLUTE: 3.134 G_L1_RELATIVE: 10.043 G_Regularizer: 0.000 validation_error: 20.938 +(epoch: 43, iters: 222416, time: 0.565, data: 0.000) G_L1: 12.793 G_L1_ABSOLUTE: 2.032 G_L1_RELATIVE: 10.761 G_Regularizer: 0.000 validation_error: 20.805 +(epoch: 43, iters: 224416, time: 0.561, data: 0.000) G_L1: 13.973 G_L1_ABSOLUTE: 2.743 G_L1_RELATIVE: 11.230 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 43, iters: 226416, time: 0.540, data: 0.000) G_L1: 14.540 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 12.157 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 43, iters: 228416, time: 0.545, data: 0.000) G_L1: 13.696 G_L1_ABSOLUTE: 2.294 G_L1_RELATIVE: 11.402 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 43, iters: 230416, time: 0.555, data: 0.000) G_L1: 12.669 G_L1_ABSOLUTE: 2.303 G_L1_RELATIVE: 10.366 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 43, iters: 232416, time: 0.553, data: 0.000) G_L1: 14.723 G_L1_ABSOLUTE: 2.316 G_L1_RELATIVE: 12.407 G_Regularizer: 0.000 validation_error: 20.542 +(epoch: 43, iters: 234416, time: 0.562, data: 0.000) G_L1: 17.991 G_L1_ABSOLUTE: 2.915 G_L1_RELATIVE: 15.076 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 43, iters: 236416, time: 0.534, data: 0.000) G_L1: 15.218 G_L1_ABSOLUTE: 2.193 G_L1_RELATIVE: 13.026 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 43, iters: 238416, time: 0.559, data: 0.000) G_L1: 15.558 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 13.111 G_Regularizer: 0.000 validation_error: 21.423 +(epoch: 43, iters: 240416, time: 0.564, data: 0.000) G_L1: 13.123 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 10.865 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 43, iters: 242416, time: 0.553, data: 0.000) G_L1: 15.231 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 12.645 G_Regularizer: 0.000 validation_error: 21.022 +(epoch: 43, iters: 244416, time: 0.558, data: 0.000) G_L1: 16.928 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 14.446 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 43, iters: 246416, time: 0.562, data: 0.000) G_L1: 12.564 G_L1_ABSOLUTE: 2.142 G_L1_RELATIVE: 10.422 G_Regularizer: 0.000 validation_error: 20.485 +(epoch: 43, iters: 248416, time: 0.562, data: 0.000) G_L1: 14.410 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 11.797 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 43, iters: 250416, time: 0.550, data: 0.000) G_L1: 14.934 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 11.954 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 43, iters: 252416, time: 0.563, data: 0.000) G_L1: 14.875 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 12.190 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 43, iters: 254416, time: 0.535, data: 0.000) G_L1: 13.767 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 11.394 G_Regularizer: 0.000 validation_error: 20.729 +(epoch: 43, iters: 256416, time: 0.558, data: 0.000) G_L1: 11.073 G_L1_ABSOLUTE: 1.991 G_L1_RELATIVE: 9.082 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 43, iters: 258416, time: 0.553, data: 0.000) G_L1: 13.420 G_L1_ABSOLUTE: 2.234 G_L1_RELATIVE: 11.185 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 43, iters: 260416, time: 0.561, data: 0.000) G_L1: 15.477 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 12.745 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 43, iters: 262416, time: 0.544, data: 0.000) G_L1: 13.963 G_L1_ABSOLUTE: 2.443 G_L1_RELATIVE: 11.520 G_Regularizer: 0.000 validation_error: 20.848 +(epoch: 43, iters: 264416, time: 0.535, data: 0.000) G_L1: 13.710 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 11.199 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 43, iters: 266416, time: 0.557, data: 0.000) G_L1: 14.061 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 11.319 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 43, iters: 268416, time: 0.551, data: 0.000) G_L1: 14.393 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 11.994 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 43, iters: 270416, time: 0.548, data: 0.000) G_L1: 13.754 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 11.434 G_Regularizer: 0.000 validation_error: 20.410 +(epoch: 43, iters: 272416, time: 0.551, data: 0.000) G_L1: 16.454 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 13.954 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 43, iters: 274416, time: 0.549, data: 0.000) G_L1: 15.649 G_L1_ABSOLUTE: 2.878 G_L1_RELATIVE: 12.771 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 43, iters: 276416, time: 0.541, data: 0.000) G_L1: 14.685 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 12.030 G_Regularizer: 0.000 validation_error: 20.994 +(epoch: 43, iters: 278416, time: 0.533, data: 0.001) G_L1: 17.176 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 14.492 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 43, iters: 280416, time: 0.558, data: 0.000) G_L1: 13.515 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 10.710 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 43, iters: 282416, time: 0.557, data: 0.000) G_L1: 15.595 G_L1_ABSOLUTE: 2.313 G_L1_RELATIVE: 13.282 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 43, iters: 284416, time: 0.562, data: 0.000) G_L1: 15.134 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 12.566 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 43, iters: 286416, time: 0.544, data: 0.000) G_L1: 15.731 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 13.134 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 43, iters: 288416, time: 0.529, data: 0.000) G_L1: 16.617 G_L1_ABSOLUTE: 2.234 G_L1_RELATIVE: 14.383 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 43, iters: 290416, time: 0.561, data: 0.000) G_L1: 13.604 G_L1_ABSOLUTE: 3.127 G_L1_RELATIVE: 10.477 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 43, iters: 292416, time: 0.547, data: 0.000) G_L1: 14.011 G_L1_ABSOLUTE: 2.224 G_L1_RELATIVE: 11.786 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 43, iters: 294416, time: 0.542, data: 0.000) G_L1: 17.116 G_L1_ABSOLUTE: 2.988 G_L1_RELATIVE: 14.128 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 43, iters: 296416, time: 0.563, data: 0.000) G_L1: 14.414 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 12.026 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 43, iters: 298416, time: 0.553, data: 0.000) G_L1: 13.677 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 10.998 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 43, iters: 300416, time: 0.551, data: 0.000) G_L1: 13.808 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 11.305 G_Regularizer: 0.000 validation_error: 20.631 +(epoch: 43, iters: 302416, time: 0.545, data: 0.000) G_L1: 18.610 G_L1_ABSOLUTE: 2.981 G_L1_RELATIVE: 15.629 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 44, iters: 1664, time: 0.556, data: 0.000) G_L1: 12.910 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 10.619 G_Regularizer: 0.000 validation_error: 20.470 +(epoch: 44, iters: 3664, time: 0.543, data: 0.000) G_L1: 14.913 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 12.536 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 44, iters: 5664, time: 0.553, data: 0.000) G_L1: 11.004 G_L1_ABSOLUTE: 2.122 G_L1_RELATIVE: 8.883 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 44, iters: 7664, time: 0.560, data: 0.000) G_L1: 13.712 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 11.324 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 44, iters: 9664, time: 0.531, data: 0.000) G_L1: 13.678 G_L1_ABSOLUTE: 3.108 G_L1_RELATIVE: 10.570 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 44, iters: 11664, time: 0.563, data: 0.000) G_L1: 15.823 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 12.870 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 44, iters: 13664, time: 0.549, data: 0.000) G_L1: 13.809 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 11.176 G_Regularizer: 0.000 validation_error: 20.690 +(epoch: 44, iters: 15664, time: 0.559, data: 0.000) G_L1: 14.197 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 11.896 G_Regularizer: 0.000 validation_error: 20.705 +(epoch: 44, iters: 17664, time: 0.540, data: 0.000) G_L1: 13.534 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 10.875 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 44, iters: 19664, time: 0.556, data: 0.000) G_L1: 14.900 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 12.427 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 44, iters: 21664, time: 0.553, data: 0.000) G_L1: 13.066 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 10.707 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 44, iters: 23664, time: 0.560, data: 0.000) G_L1: 14.756 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 12.152 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 44, iters: 25664, time: 0.555, data: 0.000) G_L1: 16.557 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 13.979 G_Regularizer: 0.000 validation_error: 21.288 +(epoch: 44, iters: 27664, time: 0.550, data: 0.000) G_L1: 16.587 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 13.987 G_Regularizer: 0.000 validation_error: 20.775 +(epoch: 44, iters: 29664, time: 0.555, data: 0.000) G_L1: 14.444 G_L1_ABSOLUTE: 3.279 G_L1_RELATIVE: 11.165 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 44, iters: 31664, time: 0.556, data: 0.000) G_L1: 15.145 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 12.774 G_Regularizer: 0.000 validation_error: 21.114 +(epoch: 44, iters: 33664, time: 0.535, data: 0.000) G_L1: 14.363 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 11.992 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 44, iters: 35664, time: 0.553, data: 0.000) G_L1: 14.682 G_L1_ABSOLUTE: 2.288 G_L1_RELATIVE: 12.394 G_Regularizer: 0.000 validation_error: 20.416 +(epoch: 44, iters: 37664, time: 0.537, data: 0.000) G_L1: 13.645 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 11.365 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 44, iters: 39664, time: 0.539, data: 0.000) G_L1: 13.659 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 11.193 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 44, iters: 41664, time: 0.544, data: 0.000) G_L1: 13.728 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 11.382 G_Regularizer: 0.000 validation_error: 21.115 +(epoch: 44, iters: 43664, time: 0.534, data: 0.000) G_L1: 15.018 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 12.425 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 44, iters: 45664, time: 0.538, data: 0.000) G_L1: 13.819 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 10.965 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 44, iters: 47664, time: 0.557, data: 0.000) G_L1: 12.634 G_L1_ABSOLUTE: 2.184 G_L1_RELATIVE: 10.450 G_Regularizer: 0.000 validation_error: 21.017 +(epoch: 44, iters: 49664, time: 0.538, data: 0.000) G_L1: 15.692 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 13.246 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 44, iters: 51664, time: 0.539, data: 0.000) G_L1: 13.981 G_L1_ABSOLUTE: 3.000 G_L1_RELATIVE: 10.981 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 44, iters: 53664, time: 0.531, data: 0.000) G_L1: 12.315 G_L1_ABSOLUTE: 2.066 G_L1_RELATIVE: 10.249 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 44, iters: 55664, time: 0.563, data: 0.000) G_L1: 11.918 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 9.413 G_Regularizer: 0.000 validation_error: 20.765 +(epoch: 44, iters: 57664, time: 0.545, data: 0.000) G_L1: 13.255 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 10.718 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 44, iters: 59664, time: 0.531, data: 0.000) G_L1: 12.950 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 10.743 G_Regularizer: 0.000 validation_error: 20.465 +(epoch: 44, iters: 61664, time: 0.551, data: 0.000) G_L1: 15.299 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 12.575 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 44, iters: 63664, time: 0.556, data: 0.000) G_L1: 15.545 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 12.919 G_Regularizer: 0.000 validation_error: 20.824 +(epoch: 44, iters: 65664, time: 0.557, data: 0.000) G_L1: 11.876 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 9.504 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 44, iters: 67664, time: 0.539, data: 0.000) G_L1: 16.032 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 13.379 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 44, iters: 69664, time: 0.540, data: 0.000) G_L1: 14.275 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 11.904 G_Regularizer: 0.000 validation_error: 20.680 +(epoch: 44, iters: 71664, time: 0.551, data: 0.000) G_L1: 13.500 G_L1_ABSOLUTE: 2.878 G_L1_RELATIVE: 10.622 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 44, iters: 73664, time: 0.549, data: 0.000) G_L1: 14.199 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 11.406 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 44, iters: 75664, time: 0.525, data: 0.000) G_L1: 14.983 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 12.548 G_Regularizer: 0.000 validation_error: 21.081 +(epoch: 44, iters: 77664, time: 0.535, data: 0.000) G_L1: 13.512 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 10.907 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 44, iters: 79664, time: 0.550, data: 0.000) G_L1: 13.438 G_L1_ABSOLUTE: 2.100 G_L1_RELATIVE: 11.338 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 44, iters: 81664, time: 0.553, data: 0.000) G_L1: 14.166 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 11.706 G_Regularizer: 0.000 validation_error: 20.348 +(epoch: 44, iters: 83664, time: 0.538, data: 0.000) G_L1: 13.733 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 11.258 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 44, iters: 85664, time: 0.668, data: 0.000) G_L1: 14.807 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 12.045 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 44, iters: 87664, time: 0.680, data: 0.000) G_L1: 10.322 G_L1_ABSOLUTE: 2.159 G_L1_RELATIVE: 8.163 G_Regularizer: 0.000 validation_error: 21.139 +(epoch: 44, iters: 89664, time: 0.668, data: 0.000) G_L1: 11.637 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 9.077 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 44, iters: 91664, time: 0.650, data: 0.000) G_L1: 13.373 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 10.992 G_Regularizer: 0.000 validation_error: 20.794 +(epoch: 44, iters: 93664, time: 0.638, data: 0.000) G_L1: 15.732 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 12.951 G_Regularizer: 0.000 validation_error: 20.856 +(epoch: 44, iters: 95664, time: 0.658, data: 0.000) G_L1: 14.252 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 11.965 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 44, iters: 97664, time: 0.687, data: 0.000) G_L1: 12.953 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 10.416 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 44, iters: 99664, time: 0.688, data: 0.000) G_L1: 12.121 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 9.657 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 44, iters: 101664, time: 0.672, data: 0.000) G_L1: 12.312 G_L1_ABSOLUTE: 2.031 G_L1_RELATIVE: 10.281 G_Regularizer: 0.000 validation_error: 20.578 +(epoch: 44, iters: 103664, time: 0.643, data: 0.000) G_L1: 12.856 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 10.481 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 44, iters: 105664, time: 0.673, data: 0.000) G_L1: 16.290 G_L1_ABSOLUTE: 2.743 G_L1_RELATIVE: 13.548 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 44, iters: 107664, time: 0.679, data: 0.000) G_L1: 17.360 G_L1_ABSOLUTE: 2.903 G_L1_RELATIVE: 14.457 G_Regularizer: 0.000 validation_error: 20.733 +(epoch: 44, iters: 109664, time: 0.675, data: 0.000) G_L1: 13.873 G_L1_ABSOLUTE: 2.912 G_L1_RELATIVE: 10.962 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 44, iters: 111664, time: 0.663, data: 0.001) G_L1: 15.268 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 12.635 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 44, iters: 113664, time: 0.686, data: 0.000) G_L1: 14.383 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 11.790 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 44, iters: 115664, time: 0.671, data: 0.000) G_L1: 12.953 G_L1_ABSOLUTE: 2.065 G_L1_RELATIVE: 10.888 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 44, iters: 117664, time: 0.674, data: 0.000) G_L1: 15.540 G_L1_ABSOLUTE: 2.948 G_L1_RELATIVE: 12.592 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 44, iters: 119664, time: 0.565, data: 0.000) G_L1: 15.911 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 13.590 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 44, iters: 121664, time: 0.554, data: 0.000) G_L1: 18.023 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 15.288 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 44, iters: 123664, time: 0.531, data: 0.000) G_L1: 14.718 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 12.241 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 44, iters: 125664, time: 0.559, data: 0.000) G_L1: 12.288 G_L1_ABSOLUTE: 2.603 G_L1_RELATIVE: 9.684 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 44, iters: 127664, time: 0.553, data: 0.001) G_L1: 14.185 G_L1_ABSOLUTE: 2.243 G_L1_RELATIVE: 11.942 G_Regularizer: 0.000 validation_error: 20.274 +(epoch: 44, iters: 129664, time: 0.552, data: 0.001) G_L1: 12.771 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 10.175 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 44, iters: 131664, time: 0.539, data: 0.000) G_L1: 13.106 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 10.741 G_Regularizer: 0.000 validation_error: 21.270 +(epoch: 44, iters: 133664, time: 0.547, data: 0.000) G_L1: 14.906 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 12.476 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 44, iters: 135664, time: 0.543, data: 0.001) G_L1: 13.279 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 10.796 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 44, iters: 137664, time: 0.548, data: 0.000) G_L1: 15.095 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 12.658 G_Regularizer: 0.000 validation_error: 21.094 +(epoch: 44, iters: 139664, time: 0.555, data: 0.000) G_L1: 13.736 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 10.911 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 44, iters: 141664, time: 0.546, data: 0.000) G_L1: 12.980 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 10.279 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 44, iters: 143664, time: 0.553, data: 0.000) G_L1: 13.982 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 11.261 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 44, iters: 145664, time: 0.556, data: 0.000) G_L1: 12.555 G_L1_ABSOLUTE: 2.098 G_L1_RELATIVE: 10.457 G_Regularizer: 0.000 validation_error: 21.142 +(epoch: 44, iters: 147664, time: 0.557, data: 0.000) G_L1: 13.550 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 11.115 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 44, iters: 149664, time: 0.530, data: 0.001) G_L1: 15.160 G_L1_ABSOLUTE: 3.024 G_L1_RELATIVE: 12.135 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 44, iters: 151664, time: 0.557, data: 0.000) G_L1: 13.735 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 11.325 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 44, iters: 153664, time: 0.540, data: 0.000) G_L1: 11.744 G_L1_ABSOLUTE: 1.894 G_L1_RELATIVE: 9.850 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 44, iters: 155664, time: 0.539, data: 0.000) G_L1: 14.952 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 12.554 G_Regularizer: 0.000 validation_error: 21.082 +(epoch: 44, iters: 157664, time: 0.547, data: 0.001) G_L1: 14.598 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 12.161 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 44, iters: 159664, time: 0.549, data: 0.001) G_L1: 14.374 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 12.056 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 44, iters: 161664, time: 0.533, data: 0.000) G_L1: 14.563 G_L1_ABSOLUTE: 2.682 G_L1_RELATIVE: 11.881 G_Regularizer: 0.000 validation_error: 21.153 +(epoch: 44, iters: 163664, time: 0.550, data: 0.002) G_L1: 14.036 G_L1_ABSOLUTE: 2.142 G_L1_RELATIVE: 11.895 G_Regularizer: 0.000 validation_error: 20.697 +(epoch: 44, iters: 165664, time: 0.539, data: 0.001) G_L1: 14.653 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 11.886 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 44, iters: 167664, time: 0.530, data: 0.000) G_L1: 24.341 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 21.668 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 44, iters: 169664, time: 0.532, data: 0.000) G_L1: 14.539 G_L1_ABSOLUTE: 2.498 G_L1_RELATIVE: 12.041 G_Regularizer: 0.000 validation_error: 21.235 +(epoch: 44, iters: 171664, time: 0.539, data: 0.000) G_L1: 12.021 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 9.668 G_Regularizer: 0.000 validation_error: 20.547 +(epoch: 44, iters: 173664, time: 0.545, data: 0.000) G_L1: 13.551 G_L1_ABSOLUTE: 2.697 G_L1_RELATIVE: 10.854 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 44, iters: 175664, time: 0.548, data: 0.000) G_L1: 13.379 G_L1_ABSOLUTE: 2.390 G_L1_RELATIVE: 10.989 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 44, iters: 177664, time: 0.545, data: 0.000) G_L1: 12.556 G_L1_ABSOLUTE: 2.232 G_L1_RELATIVE: 10.323 G_Regularizer: 0.000 validation_error: 21.121 +(epoch: 44, iters: 179664, time: 0.538, data: 0.000) G_L1: 12.779 G_L1_ABSOLUTE: 2.558 G_L1_RELATIVE: 10.221 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 44, iters: 181664, time: 0.525, data: 0.001) G_L1: 14.219 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 11.704 G_Regularizer: 0.000 validation_error: 21.173 +(epoch: 44, iters: 183664, time: 0.549, data: 0.000) G_L1: 15.731 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 13.009 G_Regularizer: 0.000 validation_error: 21.138 +(epoch: 44, iters: 185664, time: 0.558, data: 0.000) G_L1: 15.280 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 12.710 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 44, iters: 187664, time: 0.559, data: 0.000) G_L1: 14.145 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 11.423 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 44, iters: 189664, time: 0.552, data: 0.000) G_L1: 15.451 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 12.826 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 44, iters: 191664, time: 0.547, data: 0.000) G_L1: 15.158 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 12.357 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 44, iters: 193664, time: 0.559, data: 0.000) G_L1: 14.323 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 11.706 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 44, iters: 195664, time: 0.555, data: 0.000) G_L1: 13.411 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 11.002 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 44, iters: 197664, time: 0.562, data: 0.000) G_L1: 13.375 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 10.861 G_Regularizer: 0.000 validation_error: 21.196 +(epoch: 44, iters: 199664, time: 0.545, data: 0.000) G_L1: 13.807 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 11.130 G_Regularizer: 0.000 validation_error: 21.006 +(epoch: 44, iters: 201664, time: 0.546, data: 0.000) G_L1: 15.030 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 12.510 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 44, iters: 203664, time: 0.531, data: 0.000) G_L1: 12.952 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 10.456 G_Regularizer: 0.000 validation_error: 21.170 +(epoch: 44, iters: 205664, time: 0.546, data: 0.000) G_L1: 13.989 G_L1_ABSOLUTE: 2.260 G_L1_RELATIVE: 11.729 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 44, iters: 207664, time: 0.538, data: 0.000) G_L1: 14.157 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 11.503 G_Regularizer: 0.000 validation_error: 20.822 +(epoch: 44, iters: 209664, time: 0.549, data: 0.000) G_L1: 13.158 G_L1_ABSOLUTE: 2.278 G_L1_RELATIVE: 10.880 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 44, iters: 211664, time: 0.525, data: 0.000) G_L1: 14.107 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 11.702 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 44, iters: 213664, time: 0.542, data: 0.001) G_L1: 12.167 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 9.881 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 44, iters: 215664, time: 0.550, data: 0.000) G_L1: 15.549 G_L1_ABSOLUTE: 2.455 G_L1_RELATIVE: 13.094 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 44, iters: 217664, time: 0.538, data: 0.000) G_L1: 12.672 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 10.315 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 44, iters: 219664, time: 0.553, data: 0.000) G_L1: 13.563 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 10.788 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 44, iters: 221664, time: 0.552, data: 0.000) G_L1: 13.774 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 11.386 G_Regularizer: 0.000 validation_error: 21.132 +(epoch: 44, iters: 223664, time: 0.553, data: 0.000) G_L1: 12.195 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 9.672 G_Regularizer: 0.000 validation_error: 20.629 +(epoch: 44, iters: 225664, time: 0.552, data: 0.000) G_L1: 14.490 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 11.967 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 44, iters: 227664, time: 0.554, data: 0.000) G_L1: 13.406 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 11.121 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 44, iters: 229664, time: 0.553, data: 0.000) G_L1: 14.638 G_L1_ABSOLUTE: 2.662 G_L1_RELATIVE: 11.977 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 44, iters: 231664, time: 0.532, data: 0.000) G_L1: 13.355 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 11.053 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 44, iters: 233664, time: 0.551, data: 0.000) G_L1: 12.180 G_L1_ABSOLUTE: 1.990 G_L1_RELATIVE: 10.190 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 44, iters: 235664, time: 0.548, data: 0.000) G_L1: 14.377 G_L1_ABSOLUTE: 2.772 G_L1_RELATIVE: 11.605 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 44, iters: 237664, time: 0.539, data: 0.000) G_L1: 16.523 G_L1_ABSOLUTE: 2.152 G_L1_RELATIVE: 14.371 G_Regularizer: 0.000 validation_error: 20.938 +(epoch: 44, iters: 239664, time: 0.558, data: 0.000) G_L1: 16.653 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 14.004 G_Regularizer: 0.000 validation_error: 21.054 +(epoch: 44, iters: 241664, time: 0.558, data: 0.000) G_L1: 11.480 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 9.068 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 44, iters: 243664, time: 0.532, data: 0.000) G_L1: 20.372 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 17.583 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 44, iters: 245664, time: 0.533, data: 0.000) G_L1: 11.691 G_L1_ABSOLUTE: 2.137 G_L1_RELATIVE: 9.554 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 44, iters: 247664, time: 0.551, data: 0.000) G_L1: 12.576 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.071 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 44, iters: 249664, time: 0.559, data: 0.000) G_L1: 13.963 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 11.449 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 44, iters: 251664, time: 0.545, data: 0.001) G_L1: 11.396 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 8.967 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 44, iters: 253664, time: 0.538, data: 0.000) G_L1: 13.734 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 11.088 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 44, iters: 255664, time: 0.557, data: 0.000) G_L1: 17.421 G_L1_ABSOLUTE: 2.253 G_L1_RELATIVE: 15.169 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 44, iters: 257664, time: 0.561, data: 0.000) G_L1: 13.732 G_L1_ABSOLUTE: 2.471 G_L1_RELATIVE: 11.261 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 44, iters: 259664, time: 0.554, data: 0.000) G_L1: 12.591 G_L1_ABSOLUTE: 2.148 G_L1_RELATIVE: 10.443 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 44, iters: 261664, time: 0.543, data: 0.000) G_L1: 12.428 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 10.170 G_Regularizer: 0.000 validation_error: 20.460 +(epoch: 44, iters: 263664, time: 0.534, data: 0.000) G_L1: 14.386 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 11.871 G_Regularizer: 0.000 validation_error: 21.080 +(epoch: 44, iters: 265664, time: 0.552, data: 0.001) G_L1: 10.812 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 8.326 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 44, iters: 267664, time: 0.539, data: 0.000) G_L1: 13.236 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 10.974 G_Regularizer: 0.000 validation_error: 20.567 +(epoch: 44, iters: 269664, time: 0.536, data: 0.000) G_L1: 14.057 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 11.610 G_Regularizer: 0.000 validation_error: 21.106 +(epoch: 44, iters: 271664, time: 0.554, data: 0.000) G_L1: 14.849 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 12.651 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 44, iters: 273664, time: 0.562, data: 0.001) G_L1: 14.149 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 11.579 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 44, iters: 275664, time: 0.557, data: 0.000) G_L1: 13.989 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 11.243 G_Regularizer: 0.000 validation_error: 21.130 +(epoch: 44, iters: 277664, time: 0.539, data: 0.000) G_L1: 14.709 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 12.161 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 44, iters: 279664, time: 0.550, data: 0.000) G_L1: 14.411 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 11.820 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 44, iters: 281664, time: 0.551, data: 0.001) G_L1: 14.643 G_L1_ABSOLUTE: 2.032 G_L1_RELATIVE: 12.611 G_Regularizer: 0.000 validation_error: 20.769 +(epoch: 44, iters: 283664, time: 0.540, data: 0.000) G_L1: 15.154 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 12.494 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 44, iters: 285664, time: 0.542, data: 0.000) G_L1: 15.561 G_L1_ABSOLUTE: 3.076 G_L1_RELATIVE: 12.484 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 44, iters: 287664, time: 0.531, data: 0.000) G_L1: 14.634 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 12.165 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 44, iters: 289664, time: 0.536, data: 0.000) G_L1: 16.457 G_L1_ABSOLUTE: 3.252 G_L1_RELATIVE: 13.206 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 44, iters: 291664, time: 0.550, data: 0.000) G_L1: 14.604 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 12.248 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 44, iters: 293664, time: 0.538, data: 0.000) G_L1: 16.174 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 13.681 G_Regularizer: 0.000 validation_error: 21.114 +(epoch: 44, iters: 295664, time: 0.527, data: 0.000) G_L1: 13.417 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 10.983 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 44, iters: 297664, time: 0.544, data: 0.000) G_L1: 11.399 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 8.942 G_Regularizer: 0.000 validation_error: 20.991 +(epoch: 44, iters: 299664, time: 0.535, data: 0.000) G_L1: 15.657 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 13.287 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 44, iters: 301664, time: 0.538, data: 0.000) G_L1: 15.529 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 13.121 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 45, iters: 912, time: 0.531, data: 0.000) G_L1: 12.325 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 10.051 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 45, iters: 2912, time: 0.556, data: 0.000) G_L1: 11.568 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 9.405 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 45, iters: 4912, time: 0.530, data: 0.000) G_L1: 14.774 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 12.420 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 45, iters: 6912, time: 0.549, data: 0.000) G_L1: 19.406 G_L1_ABSOLUTE: 2.531 G_L1_RELATIVE: 16.875 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 45, iters: 8912, time: 0.554, data: 0.000) G_L1: 13.827 G_L1_ABSOLUTE: 1.972 G_L1_RELATIVE: 11.855 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 45, iters: 10912, time: 0.554, data: 0.000) G_L1: 13.889 G_L1_ABSOLUTE: 2.963 G_L1_RELATIVE: 10.925 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 45, iters: 12912, time: 0.546, data: 0.000) G_L1: 13.482 G_L1_ABSOLUTE: 2.667 G_L1_RELATIVE: 10.815 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 45, iters: 14912, time: 0.561, data: 0.000) G_L1: 13.473 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 10.687 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 45, iters: 16912, time: 0.554, data: 0.000) G_L1: 13.540 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 10.992 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 45, iters: 18912, time: 0.560, data: 0.000) G_L1: 14.248 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.827 G_Regularizer: 0.000 validation_error: 20.614 +(epoch: 45, iters: 20912, time: 0.554, data: 0.000) G_L1: 13.981 G_L1_ABSOLUTE: 2.785 G_L1_RELATIVE: 11.196 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 45, iters: 22912, time: 0.541, data: 0.000) G_L1: 11.274 G_L1_ABSOLUTE: 2.188 G_L1_RELATIVE: 9.085 G_Regularizer: 0.000 validation_error: 20.856 +(epoch: 45, iters: 24912, time: 0.545, data: 0.000) G_L1: 14.602 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 11.696 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 45, iters: 26912, time: 0.544, data: 0.000) G_L1: 12.710 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 10.122 G_Regularizer: 0.000 validation_error: 20.753 +(epoch: 45, iters: 28912, time: 0.549, data: 0.000) G_L1: 13.804 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 11.298 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 45, iters: 30912, time: 0.543, data: 0.001) G_L1: 13.549 G_L1_ABSOLUTE: 2.305 G_L1_RELATIVE: 11.245 G_Regularizer: 0.000 validation_error: 20.798 +(epoch: 45, iters: 32912, time: 0.547, data: 0.000) G_L1: 13.908 G_L1_ABSOLUTE: 2.382 G_L1_RELATIVE: 11.527 G_Regularizer: 0.000 validation_error: 20.593 +(epoch: 45, iters: 34912, time: 0.544, data: 0.000) G_L1: 15.413 G_L1_ABSOLUTE: 2.145 G_L1_RELATIVE: 13.268 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 45, iters: 36912, time: 0.555, data: 0.000) G_L1: 12.771 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 10.387 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 45, iters: 38912, time: 0.555, data: 0.000) G_L1: 12.948 G_L1_ABSOLUTE: 2.337 G_L1_RELATIVE: 10.611 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 45, iters: 40912, time: 0.547, data: 0.000) G_L1: 12.964 G_L1_ABSOLUTE: 2.937 G_L1_RELATIVE: 10.027 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 45, iters: 42912, time: 0.546, data: 0.000) G_L1: 16.626 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 14.220 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 45, iters: 44912, time: 0.553, data: 0.000) G_L1: 12.110 G_L1_ABSOLUTE: 2.104 G_L1_RELATIVE: 10.006 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 45, iters: 46912, time: 0.555, data: 0.000) G_L1: 12.978 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 10.578 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 45, iters: 48912, time: 0.561, data: 0.000) G_L1: 16.011 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 13.244 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 45, iters: 50912, time: 0.533, data: 0.000) G_L1: 12.741 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 10.244 G_Regularizer: 0.000 validation_error: 20.859 +(epoch: 45, iters: 52912, time: 0.561, data: 0.000) G_L1: 14.835 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 12.200 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 45, iters: 54912, time: 0.552, data: 0.000) G_L1: 14.650 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 12.176 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 45, iters: 56912, time: 0.560, data: 0.000) G_L1: 11.943 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 9.626 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 45, iters: 58912, time: 0.551, data: 0.000) G_L1: 16.132 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 13.352 G_Regularizer: 0.000 validation_error: 20.994 +(epoch: 45, iters: 60912, time: 0.548, data: 0.000) G_L1: 11.620 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 9.289 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 45, iters: 62912, time: 0.558, data: 0.000) G_L1: 15.430 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 13.292 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 45, iters: 64912, time: 0.555, data: 0.000) G_L1: 14.492 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 12.018 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 45, iters: 66912, time: 0.555, data: 0.000) G_L1: 12.844 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 10.188 G_Regularizer: 0.000 validation_error: 20.675 +(epoch: 45, iters: 68912, time: 0.530, data: 0.000) G_L1: 14.981 G_L1_ABSOLUTE: 2.326 G_L1_RELATIVE: 12.654 G_Regularizer: 0.000 validation_error: 21.107 +(epoch: 45, iters: 70912, time: 0.548, data: 0.000) G_L1: 14.608 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 12.351 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 45, iters: 72912, time: 0.559, data: 0.000) G_L1: 15.087 G_L1_ABSOLUTE: 3.155 G_L1_RELATIVE: 11.932 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 45, iters: 74912, time: 0.532, data: 0.000) G_L1: 14.655 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 12.035 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 45, iters: 76912, time: 0.535, data: 0.001) G_L1: 13.630 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 20.680 +(epoch: 45, iters: 78912, time: 0.558, data: 0.000) G_L1: 12.824 G_L1_ABSOLUTE: 2.123 G_L1_RELATIVE: 10.701 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 45, iters: 80912, time: 0.551, data: 0.000) G_L1: 12.364 G_L1_ABSOLUTE: 2.199 G_L1_RELATIVE: 10.165 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 45, iters: 82912, time: 0.544, data: 0.001) G_L1: 15.620 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 12.922 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 45, iters: 84912, time: 0.537, data: 0.000) G_L1: 13.472 G_L1_ABSOLUTE: 2.099 G_L1_RELATIVE: 11.373 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 45, iters: 86912, time: 0.561, data: 0.000) G_L1: 12.495 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 10.211 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 45, iters: 88912, time: 0.559, data: 0.001) G_L1: 13.591 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 11.214 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 45, iters: 90912, time: 0.547, data: 0.000) G_L1: 13.515 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 10.831 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 45, iters: 92912, time: 0.548, data: 0.000) G_L1: 15.287 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 12.509 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 45, iters: 94912, time: 0.530, data: 0.000) G_L1: 12.667 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 10.045 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 45, iters: 96912, time: 0.533, data: 0.000) G_L1: 14.379 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 11.737 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 45, iters: 98912, time: 0.551, data: 0.000) G_L1: 13.414 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 10.609 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 45, iters: 100912, time: 0.553, data: 0.000) G_L1: 12.269 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 9.863 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 45, iters: 102912, time: 0.542, data: 0.000) G_L1: 15.669 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 45, iters: 104912, time: 0.558, data: 0.000) G_L1: 13.533 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 10.784 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 45, iters: 106912, time: 0.553, data: 0.000) G_L1: 14.889 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.487 G_Regularizer: 0.000 validation_error: 20.472 +(epoch: 45, iters: 108912, time: 0.539, data: 0.000) G_L1: 13.449 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 10.761 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 45, iters: 110912, time: 0.547, data: 0.000) G_L1: 14.344 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 11.970 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 45, iters: 112912, time: 0.558, data: 0.000) G_L1: 13.522 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 11.109 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 45, iters: 114912, time: 0.559, data: 0.000) G_L1: 12.916 G_L1_ABSOLUTE: 2.146 G_L1_RELATIVE: 10.771 G_Regularizer: 0.000 validation_error: 21.095 +(epoch: 45, iters: 116912, time: 0.536, data: 0.000) G_L1: 13.311 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 10.638 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 45, iters: 118912, time: 0.548, data: 0.000) G_L1: 12.679 G_L1_ABSOLUTE: 2.025 G_L1_RELATIVE: 10.654 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 45, iters: 120912, time: 0.553, data: 0.000) G_L1: 15.030 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 12.636 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 45, iters: 122912, time: 0.539, data: 0.000) G_L1: 13.833 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 11.245 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 45, iters: 124912, time: 0.543, data: 0.000) G_L1: 16.347 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 13.747 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 45, iters: 126912, time: 0.562, data: 0.000) G_L1: 13.368 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 10.755 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 45, iters: 128912, time: 0.539, data: 0.000) G_L1: 13.526 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 11.321 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 45, iters: 130912, time: 0.556, data: 0.000) G_L1: 13.877 G_L1_ABSOLUTE: 2.410 G_L1_RELATIVE: 11.467 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 45, iters: 132912, time: 0.540, data: 0.000) G_L1: 13.944 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 11.667 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 45, iters: 134912, time: 0.547, data: 0.000) G_L1: 13.832 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 11.283 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 45, iters: 136912, time: 0.528, data: 0.000) G_L1: 15.013 G_L1_ABSOLUTE: 2.090 G_L1_RELATIVE: 12.923 G_Regularizer: 0.000 validation_error: 20.462 +(epoch: 45, iters: 138912, time: 0.538, data: 0.000) G_L1: 18.113 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 15.590 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 45, iters: 140912, time: 0.536, data: 0.000) G_L1: 14.581 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 11.988 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 45, iters: 142912, time: 0.551, data: 0.000) G_L1: 12.635 G_L1_ABSOLUTE: 2.174 G_L1_RELATIVE: 10.461 G_Regularizer: 0.000 validation_error: 20.862 +(epoch: 45, iters: 144912, time: 0.540, data: 0.000) G_L1: 15.563 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 13.265 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 45, iters: 146912, time: 0.544, data: 0.000) G_L1: 14.898 G_L1_ABSOLUTE: 2.199 G_L1_RELATIVE: 12.699 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 45, iters: 148912, time: 0.549, data: 0.000) G_L1: 14.120 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 11.854 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 45, iters: 150912, time: 0.552, data: 0.000) G_L1: 15.065 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 12.287 G_Regularizer: 0.000 validation_error: 20.665 +(epoch: 45, iters: 152912, time: 0.550, data: 0.000) G_L1: 14.803 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 12.319 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 45, iters: 154912, time: 0.533, data: 0.000) G_L1: 14.263 G_L1_ABSOLUTE: 2.186 G_L1_RELATIVE: 12.077 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 45, iters: 156912, time: 0.537, data: 0.000) G_L1: 13.356 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 10.543 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 45, iters: 158912, time: 0.531, data: 0.000) G_L1: 13.681 G_L1_ABSOLUTE: 2.130 G_L1_RELATIVE: 11.551 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 45, iters: 160912, time: 0.558, data: 0.000) G_L1: 13.346 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 10.653 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 45, iters: 162912, time: 0.547, data: 0.000) G_L1: 11.734 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 9.476 G_Regularizer: 0.000 validation_error: 21.017 +(epoch: 45, iters: 164912, time: 0.551, data: 0.001) G_L1: 14.742 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 12.367 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 45, iters: 166912, time: 0.541, data: 0.000) G_L1: 12.975 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 10.614 G_Regularizer: 0.000 validation_error: 20.971 +(epoch: 45, iters: 168912, time: 0.560, data: 0.000) G_L1: 14.345 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 11.455 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 45, iters: 170912, time: 0.551, data: 0.000) G_L1: 15.889 G_L1_ABSOLUTE: 2.743 G_L1_RELATIVE: 13.146 G_Regularizer: 0.000 validation_error: 20.757 +(epoch: 45, iters: 172912, time: 0.543, data: 0.000) G_L1: 13.414 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 10.723 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 45, iters: 174912, time: 0.564, data: 0.000) G_L1: 13.832 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 11.331 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 45, iters: 176912, time: 0.557, data: 0.001) G_L1: 13.623 G_L1_ABSOLUTE: 1.989 G_L1_RELATIVE: 11.633 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 45, iters: 178912, time: 0.547, data: 0.000) G_L1: 12.384 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 10.101 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 45, iters: 180912, time: 0.554, data: 0.000) G_L1: 12.701 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 10.129 G_Regularizer: 0.000 validation_error: 20.859 +(epoch: 45, iters: 182912, time: 0.545, data: 0.000) G_L1: 14.088 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 11.700 G_Regularizer: 0.000 validation_error: 21.223 +(epoch: 45, iters: 184912, time: 0.541, data: 0.000) G_L1: 13.657 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 11.188 G_Regularizer: 0.000 validation_error: 21.324 +(epoch: 45, iters: 186912, time: 0.554, data: 0.000) G_L1: 11.856 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 9.541 G_Regularizer: 0.000 validation_error: 21.297 +(epoch: 45, iters: 188912, time: 0.545, data: 0.000) G_L1: 14.098 G_L1_ABSOLUTE: 2.920 G_L1_RELATIVE: 11.178 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 45, iters: 190912, time: 0.529, data: 0.000) G_L1: 14.718 G_L1_ABSOLUTE: 2.954 G_L1_RELATIVE: 11.764 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 45, iters: 192912, time: 0.541, data: 0.000) G_L1: 15.457 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 12.741 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 45, iters: 194912, time: 0.559, data: 0.000) G_L1: 13.891 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 11.298 G_Regularizer: 0.000 validation_error: 20.662 +(epoch: 45, iters: 196912, time: 0.544, data: 0.000) G_L1: 12.764 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 10.213 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 45, iters: 198912, time: 0.552, data: 0.000) G_L1: 13.252 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 10.571 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 45, iters: 200912, time: 0.555, data: 0.000) G_L1: 13.043 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 10.376 G_Regularizer: 0.000 validation_error: 21.076 +(epoch: 45, iters: 202912, time: 0.565, data: 0.000) G_L1: 14.355 G_L1_ABSOLUTE: 2.178 G_L1_RELATIVE: 12.177 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 45, iters: 204912, time: 0.537, data: 0.000) G_L1: 14.718 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 12.256 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 45, iters: 206912, time: 0.558, data: 0.000) G_L1: 12.417 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 10.210 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 45, iters: 208912, time: 0.535, data: 0.000) G_L1: 13.597 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 11.122 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 45, iters: 210912, time: 0.543, data: 0.000) G_L1: 13.592 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 11.247 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 45, iters: 212912, time: 0.538, data: 0.000) G_L1: 13.934 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 11.559 G_Regularizer: 0.000 validation_error: 21.025 +(epoch: 45, iters: 214912, time: 0.560, data: 0.000) G_L1: 14.406 G_L1_ABSOLUTE: 2.197 G_L1_RELATIVE: 12.209 G_Regularizer: 0.000 validation_error: 20.963 +(epoch: 45, iters: 216912, time: 0.546, data: 0.000) G_L1: 14.902 G_L1_ABSOLUTE: 2.740 G_L1_RELATIVE: 12.162 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 45, iters: 218912, time: 0.552, data: 0.000) G_L1: 13.231 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 10.993 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 45, iters: 220912, time: 0.555, data: 0.000) G_L1: 14.574 G_L1_ABSOLUTE: 2.387 G_L1_RELATIVE: 12.188 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 45, iters: 222912, time: 0.558, data: 0.000) G_L1: 13.484 G_L1_ABSOLUTE: 2.647 G_L1_RELATIVE: 10.837 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 45, iters: 224912, time: 0.538, data: 0.001) G_L1: 13.402 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 11.116 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 45, iters: 226912, time: 0.536, data: 0.000) G_L1: 13.634 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 11.179 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 45, iters: 228912, time: 0.558, data: 0.000) G_L1: 14.295 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 11.661 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 45, iters: 230912, time: 0.542, data: 0.000) G_L1: 15.519 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 12.984 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 45, iters: 232912, time: 0.544, data: 0.000) G_L1: 13.650 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 11.368 G_Regularizer: 0.000 validation_error: 21.155 +(epoch: 45, iters: 234912, time: 0.549, data: 0.000) G_L1: 16.477 G_L1_ABSOLUTE: 3.072 G_L1_RELATIVE: 13.405 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 45, iters: 236912, time: 0.542, data: 0.000) G_L1: 15.186 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 12.614 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 45, iters: 238912, time: 0.540, data: 0.000) G_L1: 12.063 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 9.800 G_Regularizer: 0.000 validation_error: 21.243 +(epoch: 45, iters: 240912, time: 0.544, data: 0.000) G_L1: 13.855 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 11.594 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 45, iters: 242912, time: 0.549, data: 0.000) G_L1: 14.809 G_L1_ABSOLUTE: 2.801 G_L1_RELATIVE: 12.008 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 45, iters: 244912, time: 0.544, data: 0.000) G_L1: 15.088 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 12.654 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 45, iters: 246912, time: 0.544, data: 0.000) G_L1: 13.458 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 11.146 G_Regularizer: 0.000 validation_error: 20.959 +(epoch: 45, iters: 248912, time: 0.527, data: 0.000) G_L1: 12.984 G_L1_ABSOLUTE: 2.859 G_L1_RELATIVE: 10.126 G_Regularizer: 0.000 validation_error: 20.954 +(epoch: 45, iters: 250912, time: 0.541, data: 0.000) G_L1: 14.836 G_L1_ABSOLUTE: 2.780 G_L1_RELATIVE: 12.056 G_Regularizer: 0.000 validation_error: 20.647 +(epoch: 45, iters: 252912, time: 0.526, data: 0.000) G_L1: 15.372 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 12.599 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 45, iters: 254912, time: 0.551, data: 0.000) G_L1: 15.427 G_L1_ABSOLUTE: 3.341 G_L1_RELATIVE: 12.086 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 45, iters: 256912, time: 0.552, data: 0.000) G_L1: 14.768 G_L1_ABSOLUTE: 2.920 G_L1_RELATIVE: 11.848 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 45, iters: 258912, time: 0.536, data: 0.000) G_L1: 15.252 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 12.775 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 45, iters: 260912, time: 0.553, data: 0.000) G_L1: 11.715 G_L1_ABSOLUTE: 2.337 G_L1_RELATIVE: 9.379 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 45, iters: 262912, time: 0.556, data: 0.000) G_L1: 14.512 G_L1_ABSOLUTE: 3.017 G_L1_RELATIVE: 11.495 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 45, iters: 264912, time: 0.535, data: 0.000) G_L1: 13.746 G_L1_ABSOLUTE: 2.006 G_L1_RELATIVE: 11.740 G_Regularizer: 0.000 validation_error: 20.944 +(epoch: 45, iters: 266912, time: 0.548, data: 0.000) G_L1: 16.370 G_L1_ABSOLUTE: 2.886 G_L1_RELATIVE: 13.484 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 45, iters: 268912, time: 0.555, data: 0.001) G_L1: 14.656 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 11.954 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 45, iters: 270912, time: 0.549, data: 0.000) G_L1: 10.988 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 8.705 G_Regularizer: 0.000 validation_error: 21.072 +(epoch: 45, iters: 272912, time: 0.538, data: 0.000) G_L1: 14.963 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 12.566 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 45, iters: 274912, time: 0.545, data: 0.000) G_L1: 16.807 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 14.295 G_Regularizer: 0.000 validation_error: 21.297 +(epoch: 45, iters: 276912, time: 0.538, data: 0.000) G_L1: 13.994 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 11.372 G_Regularizer: 0.000 validation_error: 21.251 +(epoch: 45, iters: 278912, time: 0.559, data: 0.000) G_L1: 17.085 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 14.377 G_Regularizer: 0.000 validation_error: 21.353 +(epoch: 45, iters: 280912, time: 0.555, data: 0.001) G_L1: 14.177 G_L1_ABSOLUTE: 2.142 G_L1_RELATIVE: 12.034 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 45, iters: 282912, time: 0.541, data: 0.000) G_L1: 12.332 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 9.823 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 45, iters: 284912, time: 0.534, data: 0.000) G_L1: 14.091 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 11.951 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 45, iters: 286912, time: 0.548, data: 0.000) G_L1: 14.436 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 11.761 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 45, iters: 288912, time: 0.559, data: 0.001) G_L1: 14.049 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 11.325 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 45, iters: 290912, time: 0.536, data: 0.000) G_L1: 14.785 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 12.104 G_Regularizer: 0.000 validation_error: 21.153 +(epoch: 45, iters: 292912, time: 0.560, data: 0.000) G_L1: 14.719 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 12.364 G_Regularizer: 0.000 validation_error: 20.808 +(epoch: 45, iters: 294912, time: 0.552, data: 0.000) G_L1: 13.824 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 11.118 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 45, iters: 296912, time: 0.557, data: 0.000) G_L1: 13.322 G_L1_ABSOLUTE: 2.711 G_L1_RELATIVE: 10.610 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 45, iters: 298912, time: 0.533, data: 0.000) G_L1: 12.308 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 9.935 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 45, iters: 300912, time: 0.548, data: 0.000) G_L1: 13.233 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 10.950 G_Regularizer: 0.000 validation_error: 20.959 +(epoch: 46, iters: 160, time: 0.546, data: 0.000) G_L1: 12.205 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 9.709 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 46, iters: 2160, time: 0.553, data: 0.000) G_L1: 15.929 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 13.567 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 46, iters: 4160, time: 0.535, data: 0.000) G_L1: 11.617 G_L1_ABSOLUTE: 2.253 G_L1_RELATIVE: 9.364 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 46, iters: 6160, time: 0.556, data: 0.001) G_L1: 14.480 G_L1_ABSOLUTE: 3.309 G_L1_RELATIVE: 11.171 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 46, iters: 8160, time: 0.545, data: 0.000) G_L1: 10.192 G_L1_ABSOLUTE: 2.131 G_L1_RELATIVE: 8.061 G_Regularizer: 0.000 validation_error: 21.174 +(epoch: 46, iters: 10160, time: 0.558, data: 0.000) G_L1: 14.256 G_L1_ABSOLUTE: 2.127 G_L1_RELATIVE: 12.130 G_Regularizer: 0.000 validation_error: 20.474 +(epoch: 46, iters: 12160, time: 0.547, data: 0.000) G_L1: 14.516 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 11.743 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 46, iters: 14160, time: 0.548, data: 0.000) G_L1: 15.049 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 12.684 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 46, iters: 16160, time: 0.547, data: 0.001) G_L1: 13.996 G_L1_ABSOLUTE: 2.256 G_L1_RELATIVE: 11.739 G_Regularizer: 0.000 validation_error: 21.088 +(epoch: 46, iters: 18160, time: 0.555, data: 0.000) G_L1: 12.049 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 9.513 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 46, iters: 20160, time: 0.565, data: 0.000) G_L1: 14.241 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 11.875 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 46, iters: 22160, time: 0.533, data: 0.000) G_L1: 14.137 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 11.692 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 46, iters: 24160, time: 0.556, data: 0.000) G_L1: 14.732 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.318 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 46, iters: 26160, time: 0.556, data: 0.000) G_L1: 13.747 G_L1_ABSOLUTE: 2.566 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 46, iters: 28160, time: 0.566, data: 0.000) G_L1: 15.750 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 13.317 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 46, iters: 30160, time: 0.532, data: 0.000) G_L1: 25.271 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 22.390 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 46, iters: 32160, time: 0.556, data: 0.000) G_L1: 14.843 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 12.413 G_Regularizer: 0.000 validation_error: 21.285 +(epoch: 46, iters: 34160, time: 0.555, data: 0.000) G_L1: 13.749 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 10.797 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 46, iters: 36160, time: 0.544, data: 0.000) G_L1: 14.151 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 11.258 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 46, iters: 38160, time: 0.552, data: 0.000) G_L1: 14.118 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 11.735 G_Regularizer: 0.000 validation_error: 20.854 +(epoch: 46, iters: 40160, time: 0.559, data: 0.000) G_L1: 16.428 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 14.075 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 46, iters: 42160, time: 0.550, data: 0.000) G_L1: 14.367 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 11.739 G_Regularizer: 0.000 validation_error: 20.955 +(epoch: 46, iters: 44160, time: 0.534, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 12.220 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 46, iters: 46160, time: 0.534, data: 0.000) G_L1: 14.090 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 11.553 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 46, iters: 48160, time: 0.544, data: 0.000) G_L1: 11.956 G_L1_ABSOLUTE: 2.245 G_L1_RELATIVE: 9.711 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 46, iters: 50160, time: 0.552, data: 0.000) G_L1: 13.873 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 11.470 G_Regularizer: 0.000 validation_error: 20.763 +(epoch: 46, iters: 52160, time: 0.542, data: 0.000) G_L1: 11.227 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 8.930 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 46, iters: 54160, time: 0.549, data: 0.000) G_L1: 13.779 G_L1_ABSOLUTE: 2.202 G_L1_RELATIVE: 11.577 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 46, iters: 56160, time: 0.554, data: 0.000) G_L1: 13.107 G_L1_ABSOLUTE: 2.196 G_L1_RELATIVE: 10.911 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 46, iters: 58160, time: 0.553, data: 0.000) G_L1: 14.125 G_L1_ABSOLUTE: 2.971 G_L1_RELATIVE: 11.153 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 46, iters: 60160, time: 0.532, data: 0.000) G_L1: 17.177 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 14.559 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 46, iters: 62160, time: 0.550, data: 0.000) G_L1: 13.997 G_L1_ABSOLUTE: 2.146 G_L1_RELATIVE: 11.852 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 46, iters: 64160, time: 0.541, data: 0.000) G_L1: 13.882 G_L1_ABSOLUTE: 2.179 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 46, iters: 66160, time: 0.553, data: 0.001) G_L1: 15.299 G_L1_ABSOLUTE: 2.868 G_L1_RELATIVE: 12.431 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 46, iters: 68160, time: 0.551, data: 0.000) G_L1: 12.648 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 10.147 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 46, iters: 70160, time: 0.554, data: 0.000) G_L1: 14.140 G_L1_ABSOLUTE: 2.184 G_L1_RELATIVE: 11.956 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 46, iters: 72160, time: 0.550, data: 0.000) G_L1: 15.911 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 12.960 G_Regularizer: 0.000 validation_error: 20.775 +(epoch: 46, iters: 74160, time: 0.558, data: 0.000) G_L1: 14.518 G_L1_ABSOLUTE: 2.647 G_L1_RELATIVE: 11.871 G_Regularizer: 0.000 validation_error: 21.132 +(epoch: 46, iters: 76160, time: 0.555, data: 0.000) G_L1: 14.460 G_L1_ABSOLUTE: 2.728 G_L1_RELATIVE: 11.733 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 46, iters: 78160, time: 0.549, data: 0.000) G_L1: 13.665 G_L1_ABSOLUTE: 2.350 G_L1_RELATIVE: 11.314 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 46, iters: 80160, time: 0.534, data: 0.000) G_L1: 14.599 G_L1_ABSOLUTE: 2.837 G_L1_RELATIVE: 11.762 G_Regularizer: 0.000 validation_error: 20.742 +(epoch: 46, iters: 82160, time: 0.562, data: 0.000) G_L1: 13.851 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 11.370 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 46, iters: 84160, time: 0.554, data: 0.000) G_L1: 16.545 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 14.147 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 46, iters: 86160, time: 0.540, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 12.294 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 46, iters: 88160, time: 0.541, data: 0.000) G_L1: 17.945 G_L1_ABSOLUTE: 3.102 G_L1_RELATIVE: 14.843 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 46, iters: 90160, time: 0.549, data: 0.000) G_L1: 14.786 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 12.318 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 46, iters: 92160, time: 0.553, data: 0.000) G_L1: 13.286 G_L1_ABSOLUTE: 2.315 G_L1_RELATIVE: 10.971 G_Regularizer: 0.000 validation_error: 21.193 +(epoch: 46, iters: 94160, time: 0.552, data: 0.001) G_L1: 15.157 G_L1_ABSOLUTE: 2.667 G_L1_RELATIVE: 12.490 G_Regularizer: 0.000 validation_error: 21.021 +(epoch: 46, iters: 96160, time: 0.534, data: 0.000) G_L1: 13.560 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 10.655 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 46, iters: 98160, time: 0.546, data: 0.000) G_L1: 13.281 G_L1_ABSOLUTE: 1.972 G_L1_RELATIVE: 11.309 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 46, iters: 100160, time: 0.562, data: 0.000) G_L1: 14.769 G_L1_ABSOLUTE: 2.134 G_L1_RELATIVE: 12.635 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 46, iters: 102160, time: 0.546, data: 0.000) G_L1: 14.546 G_L1_ABSOLUTE: 2.841 G_L1_RELATIVE: 11.705 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 46, iters: 104160, time: 0.537, data: 0.000) G_L1: 13.381 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 10.820 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 46, iters: 106160, time: 0.534, data: 0.000) G_L1: 14.442 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 12.010 G_Regularizer: 0.000 validation_error: 21.104 +(epoch: 46, iters: 108160, time: 0.557, data: 0.000) G_L1: 14.541 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 12.138 G_Regularizer: 0.000 validation_error: 21.040 +(epoch: 46, iters: 110160, time: 0.558, data: 0.000) G_L1: 13.826 G_L1_ABSOLUTE: 2.876 G_L1_RELATIVE: 10.949 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 46, iters: 112160, time: 0.549, data: 0.001) G_L1: 13.467 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 10.966 G_Regularizer: 0.000 validation_error: 21.108 +(epoch: 46, iters: 114160, time: 0.545, data: 0.000) G_L1: 15.713 G_L1_ABSOLUTE: 2.229 G_L1_RELATIVE: 13.484 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 46, iters: 116160, time: 0.531, data: 0.000) G_L1: 14.656 G_L1_ABSOLUTE: 2.739 G_L1_RELATIVE: 11.917 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 46, iters: 118160, time: 0.548, data: 0.000) G_L1: 14.223 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 11.518 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 46, iters: 120160, time: 0.556, data: 0.000) G_L1: 13.219 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 10.887 G_Regularizer: 0.000 validation_error: 20.862 +(epoch: 46, iters: 122160, time: 0.555, data: 0.000) G_L1: 14.154 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 11.617 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 46, iters: 124160, time: 0.543, data: 0.000) G_L1: 16.266 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 13.889 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 46, iters: 126160, time: 0.561, data: 0.000) G_L1: 12.310 G_L1_ABSOLUTE: 2.092 G_L1_RELATIVE: 10.219 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 46, iters: 128160, time: 0.551, data: 0.000) G_L1: 13.216 G_L1_ABSOLUTE: 2.218 G_L1_RELATIVE: 10.998 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 46, iters: 130160, time: 0.541, data: 0.001) G_L1: 14.190 G_L1_ABSOLUTE: 3.018 G_L1_RELATIVE: 11.171 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 46, iters: 132160, time: 0.528, data: 0.001) G_L1: 13.241 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 10.817 G_Regularizer: 0.000 validation_error: 20.885 +(epoch: 46, iters: 134160, time: 0.565, data: 0.001) G_L1: 15.186 G_L1_ABSOLUTE: 2.175 G_L1_RELATIVE: 13.011 G_Regularizer: 0.000 validation_error: 21.184 +(epoch: 46, iters: 136160, time: 0.562, data: 0.000) G_L1: 13.719 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 11.019 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 46, iters: 138160, time: 0.541, data: 0.000) G_L1: 12.734 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 10.019 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 46, iters: 140160, time: 0.544, data: 0.000) G_L1: 15.116 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 12.753 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 46, iters: 142160, time: 0.539, data: 0.000) G_L1: 13.739 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 11.044 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 46, iters: 144160, time: 0.548, data: 0.001) G_L1: 15.581 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 13.190 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 46, iters: 146160, time: 0.539, data: 0.000) G_L1: 14.450 G_L1_ABSOLUTE: 2.364 G_L1_RELATIVE: 12.086 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 46, iters: 148160, time: 0.542, data: 0.000) G_L1: 15.531 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 13.290 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 46, iters: 150160, time: 0.538, data: 0.000) G_L1: 14.299 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 11.719 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 46, iters: 152160, time: 0.537, data: 0.000) G_L1: 14.310 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 11.816 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 46, iters: 154160, time: 0.538, data: 0.000) G_L1: 16.373 G_L1_ABSOLUTE: 3.089 G_L1_RELATIVE: 13.284 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 46, iters: 156160, time: 0.554, data: 0.000) G_L1: 13.353 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 10.922 G_Regularizer: 0.000 validation_error: 21.252 +(epoch: 46, iters: 158160, time: 0.543, data: 0.001) G_L1: 14.793 G_L1_ABSOLUTE: 2.087 G_L1_RELATIVE: 12.706 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 46, iters: 160160, time: 0.549, data: 0.000) G_L1: 14.896 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 12.529 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 46, iters: 162160, time: 0.546, data: 0.000) G_L1: 12.189 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 9.693 G_Regularizer: 0.000 validation_error: 21.266 +(epoch: 46, iters: 164160, time: 0.551, data: 0.000) G_L1: 17.667 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 15.181 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 46, iters: 166160, time: 0.537, data: 0.000) G_L1: 14.345 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 11.825 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 46, iters: 168160, time: 0.555, data: 0.000) G_L1: 14.146 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 11.779 G_Regularizer: 0.000 validation_error: 21.224 +(epoch: 46, iters: 170160, time: 0.558, data: 0.000) G_L1: 16.307 G_L1_ABSOLUTE: 2.694 G_L1_RELATIVE: 13.613 G_Regularizer: 0.000 validation_error: 20.980 +(epoch: 46, iters: 172160, time: 0.549, data: 0.001) G_L1: 16.617 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 14.148 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 46, iters: 174160, time: 0.541, data: 0.001) G_L1: 14.651 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 12.167 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 46, iters: 176160, time: 0.562, data: 0.001) G_L1: 14.125 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 11.573 G_Regularizer: 0.000 validation_error: 20.583 +(epoch: 46, iters: 178160, time: 0.558, data: 0.000) G_L1: 13.338 G_L1_ABSOLUTE: 2.928 G_L1_RELATIVE: 10.410 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 46, iters: 180160, time: 0.535, data: 0.000) G_L1: 17.832 G_L1_ABSOLUTE: 2.859 G_L1_RELATIVE: 14.973 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 46, iters: 182160, time: 0.537, data: 0.000) G_L1: 12.465 G_L1_ABSOLUTE: 1.986 G_L1_RELATIVE: 10.479 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 46, iters: 184160, time: 0.551, data: 0.000) G_L1: 14.832 G_L1_ABSOLUTE: 3.165 G_L1_RELATIVE: 11.667 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 46, iters: 186160, time: 0.561, data: 0.000) G_L1: 15.628 G_L1_ABSOLUTE: 2.987 G_L1_RELATIVE: 12.640 G_Regularizer: 0.000 validation_error: 20.814 +(epoch: 46, iters: 188160, time: 0.530, data: 0.000) G_L1: 16.298 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 13.418 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 46, iters: 190160, time: 0.543, data: 0.000) G_L1: 13.556 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 11.074 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 46, iters: 192160, time: 0.533, data: 0.000) G_L1: 15.481 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 13.156 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 46, iters: 194160, time: 0.552, data: 0.000) G_L1: 11.629 G_L1_ABSOLUTE: 2.094 G_L1_RELATIVE: 9.535 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 46, iters: 196160, time: 0.535, data: 0.000) G_L1: 13.976 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 11.184 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 46, iters: 198160, time: 0.557, data: 0.000) G_L1: 13.653 G_L1_ABSOLUTE: 2.084 G_L1_RELATIVE: 11.569 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 46, iters: 200160, time: 0.535, data: 0.000) G_L1: 17.706 G_L1_ABSOLUTE: 3.464 G_L1_RELATIVE: 14.242 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 46, iters: 202160, time: 0.550, data: 0.000) G_L1: 14.049 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 11.850 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 46, iters: 204160, time: 0.557, data: 0.001) G_L1: 11.970 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 9.772 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 46, iters: 206160, time: 0.561, data: 0.000) G_L1: 11.703 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 9.318 G_Regularizer: 0.000 validation_error: 21.151 +(epoch: 46, iters: 208160, time: 0.553, data: 0.000) G_L1: 12.781 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 10.480 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 46, iters: 210160, time: 0.532, data: 0.000) G_L1: 13.413 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 11.029 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 46, iters: 212160, time: 0.548, data: 0.000) G_L1: 11.529 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 8.991 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 46, iters: 214160, time: 0.554, data: 0.000) G_L1: 13.389 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 10.962 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 46, iters: 216160, time: 0.560, data: 0.000) G_L1: 13.063 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 10.646 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 46, iters: 218160, time: 0.547, data: 0.000) G_L1: 15.455 G_L1_ABSOLUTE: 2.289 G_L1_RELATIVE: 13.167 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 46, iters: 220160, time: 0.555, data: 0.000) G_L1: 14.340 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 11.984 G_Regularizer: 0.000 validation_error: 20.940 +(epoch: 46, iters: 222160, time: 0.557, data: 0.000) G_L1: 13.434 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 10.896 G_Regularizer: 0.000 validation_error: 20.832 +(epoch: 46, iters: 224160, time: 0.551, data: 0.000) G_L1: 12.900 G_L1_ABSOLUTE: 2.189 G_L1_RELATIVE: 10.711 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 46, iters: 226160, time: 0.541, data: 0.000) G_L1: 16.123 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 13.599 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 46, iters: 228160, time: 0.557, data: 0.001) G_L1: 13.802 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 10.864 G_Regularizer: 0.000 validation_error: 21.177 +(epoch: 46, iters: 230160, time: 0.532, data: 0.000) G_L1: 17.382 G_L1_ABSOLUTE: 2.641 G_L1_RELATIVE: 14.741 G_Regularizer: 0.000 validation_error: 20.930 +(epoch: 46, iters: 232160, time: 0.555, data: 0.001) G_L1: 13.207 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 10.620 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 46, iters: 234160, time: 0.542, data: 0.000) G_L1: 12.219 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 10.003 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 46, iters: 236160, time: 0.545, data: 0.000) G_L1: 13.487 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 10.649 G_Regularizer: 0.000 validation_error: 21.078 +(epoch: 46, iters: 238160, time: 0.533, data: 0.001) G_L1: 12.732 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 10.224 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 46, iters: 240160, time: 0.548, data: 0.000) G_L1: 14.951 G_L1_ABSOLUTE: 3.240 G_L1_RELATIVE: 11.710 G_Regularizer: 0.000 validation_error: 20.503 +(epoch: 46, iters: 242160, time: 0.557, data: 0.000) G_L1: 14.910 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 12.309 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 46, iters: 244160, time: 0.560, data: 0.000) G_L1: 16.740 G_L1_ABSOLUTE: 2.667 G_L1_RELATIVE: 14.073 G_Regularizer: 0.000 validation_error: 21.231 +(epoch: 46, iters: 246160, time: 0.539, data: 0.000) G_L1: 12.884 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 10.604 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 46, iters: 248160, time: 0.563, data: 0.000) G_L1: 12.877 G_L1_ABSOLUTE: 2.182 G_L1_RELATIVE: 10.696 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 46, iters: 250160, time: 0.557, data: 0.000) G_L1: 11.678 G_L1_ABSOLUTE: 2.133 G_L1_RELATIVE: 9.545 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 46, iters: 252160, time: 0.533, data: 0.000) G_L1: 15.731 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 13.376 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 46, iters: 254160, time: 0.541, data: 0.000) G_L1: 14.257 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.837 G_Regularizer: 0.000 validation_error: 21.001 +(epoch: 46, iters: 256160, time: 0.561, data: 0.000) G_L1: 13.311 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 10.978 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 46, iters: 258160, time: 0.556, data: 0.000) G_L1: 12.909 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 10.199 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 46, iters: 260160, time: 0.545, data: 0.001) G_L1: 14.195 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 11.797 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 46, iters: 262160, time: 0.543, data: 0.000) G_L1: 19.543 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 16.546 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 46, iters: 264160, time: 0.526, data: 0.000) G_L1: 12.245 G_L1_ABSOLUTE: 2.226 G_L1_RELATIVE: 10.019 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 46, iters: 266160, time: 0.533, data: 0.000) G_L1: 13.813 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 11.448 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 46, iters: 268160, time: 0.547, data: 0.001) G_L1: 12.655 G_L1_ABSOLUTE: 2.151 G_L1_RELATIVE: 10.504 G_Regularizer: 0.000 validation_error: 20.675 +(epoch: 46, iters: 270160, time: 0.525, data: 0.000) G_L1: 14.083 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 11.503 G_Regularizer: 0.000 validation_error: 21.072 +(epoch: 46, iters: 272160, time: 0.554, data: 0.000) G_L1: 11.637 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 9.109 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 46, iters: 274160, time: 0.552, data: 0.000) G_L1: 14.829 G_L1_ABSOLUTE: 2.682 G_L1_RELATIVE: 12.147 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 46, iters: 276160, time: 0.540, data: 0.000) G_L1: 14.370 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 11.890 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 46, iters: 278160, time: 0.531, data: 0.000) G_L1: 15.222 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 12.638 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 46, iters: 280160, time: 0.535, data: 0.000) G_L1: 12.378 G_L1_ABSOLUTE: 2.154 G_L1_RELATIVE: 10.223 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 46, iters: 282160, time: 0.554, data: 0.000) G_L1: 16.971 G_L1_ABSOLUTE: 2.186 G_L1_RELATIVE: 14.784 G_Regularizer: 0.000 validation_error: 21.177 +(epoch: 46, iters: 284160, time: 0.547, data: 0.001) G_L1: 11.308 G_L1_ABSOLUTE: 2.242 G_L1_RELATIVE: 9.066 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 46, iters: 286160, time: 0.545, data: 0.000) G_L1: 14.867 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 12.267 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 46, iters: 288160, time: 0.543, data: 0.000) G_L1: 15.869 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 13.505 G_Regularizer: 0.000 validation_error: 21.118 +(epoch: 46, iters: 290160, time: 0.552, data: 0.000) G_L1: 13.980 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 11.315 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 46, iters: 292160, time: 0.558, data: 0.000) G_L1: 14.178 G_L1_ABSOLUTE: 2.456 G_L1_RELATIVE: 11.722 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 46, iters: 294160, time: 0.555, data: 0.000) G_L1: 13.050 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 10.633 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 46, iters: 296160, time: 0.542, data: 0.000) G_L1: 13.002 G_L1_ABSOLUTE: 2.295 G_L1_RELATIVE: 10.707 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 46, iters: 298160, time: 0.557, data: 0.000) G_L1: 13.162 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 10.778 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 46, iters: 300160, time: 0.557, data: 0.000) G_L1: 17.280 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 14.523 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 46, iters: 302160, time: 0.538, data: 0.001) G_L1: 13.382 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 10.968 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 47, iters: 1408, time: 0.532, data: 0.000) G_L1: 14.104 G_L1_ABSOLUTE: 2.306 G_L1_RELATIVE: 11.798 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 47, iters: 3408, time: 0.559, data: 0.001) G_L1: 16.565 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 13.672 G_Regularizer: 0.000 validation_error: 21.022 +(epoch: 47, iters: 5408, time: 0.552, data: 0.000) G_L1: 11.763 G_L1_ABSOLUTE: 1.965 G_L1_RELATIVE: 9.797 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 47, iters: 7408, time: 0.545, data: 0.000) G_L1: 14.375 G_L1_ABSOLUTE: 2.851 G_L1_RELATIVE: 11.524 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 47, iters: 9408, time: 0.545, data: 0.000) G_L1: 17.191 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 14.489 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 47, iters: 11408, time: 0.532, data: 0.000) G_L1: 13.082 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 10.819 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 47, iters: 13408, time: 0.533, data: 0.000) G_L1: 14.603 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 12.043 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 47, iters: 15408, time: 0.553, data: 0.000) G_L1: 12.431 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 10.182 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 47, iters: 17408, time: 0.553, data: 0.000) G_L1: 13.540 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 11.110 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 47, iters: 19408, time: 0.535, data: 0.000) G_L1: 13.446 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 10.970 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 47, iters: 21408, time: 0.548, data: 0.000) G_L1: 16.318 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 13.688 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 47, iters: 23408, time: 0.555, data: 0.000) G_L1: 14.793 G_L1_ABSOLUTE: 2.883 G_L1_RELATIVE: 11.909 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 47, iters: 25408, time: 0.535, data: 0.000) G_L1: 15.210 G_L1_ABSOLUTE: 2.455 G_L1_RELATIVE: 12.755 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 47, iters: 27408, time: 0.534, data: 0.000) G_L1: 12.666 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 9.962 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 47, iters: 29408, time: 0.548, data: 0.000) G_L1: 12.741 G_L1_ABSOLUTE: 2.327 G_L1_RELATIVE: 10.414 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 47, iters: 31408, time: 0.555, data: 0.000) G_L1: 14.120 G_L1_ABSOLUTE: 2.753 G_L1_RELATIVE: 11.367 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 47, iters: 33408, time: 0.548, data: 0.000) G_L1: 14.030 G_L1_ABSOLUTE: 2.397 G_L1_RELATIVE: 11.633 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 47, iters: 35408, time: 0.571, data: 0.000) G_L1: 11.744 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 9.375 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 47, iters: 37408, time: 0.557, data: 0.000) G_L1: 14.338 G_L1_ABSOLUTE: 3.048 G_L1_RELATIVE: 11.290 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 47, iters: 39408, time: 0.551, data: 0.000) G_L1: 14.526 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 12.208 G_Regularizer: 0.000 validation_error: 21.129 +(epoch: 47, iters: 41408, time: 0.559, data: 0.000) G_L1: 12.454 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 10.111 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 47, iters: 43408, time: 0.538, data: 0.001) G_L1: 12.883 G_L1_ABSOLUTE: 1.771 G_L1_RELATIVE: 11.111 G_Regularizer: 0.000 validation_error: 20.852 +(epoch: 47, iters: 45408, time: 0.551, data: 0.000) G_L1: 15.913 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 13.281 G_Regularizer: 0.000 validation_error: 21.107 +(epoch: 47, iters: 47408, time: 0.555, data: 0.000) G_L1: 15.051 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 12.442 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 47, iters: 49408, time: 0.546, data: 0.000) G_L1: 14.884 G_L1_ABSOLUTE: 2.665 G_L1_RELATIVE: 12.219 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 47, iters: 51408, time: 0.550, data: 0.000) G_L1: 13.978 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 11.151 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 47, iters: 53408, time: 0.536, data: 0.000) G_L1: 12.330 G_L1_ABSOLUTE: 2.164 G_L1_RELATIVE: 10.166 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 47, iters: 55408, time: 0.559, data: 0.000) G_L1: 12.410 G_L1_ABSOLUTE: 1.940 G_L1_RELATIVE: 10.470 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 47, iters: 57408, time: 0.534, data: 0.000) G_L1: 13.164 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 10.532 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 47, iters: 59408, time: 0.539, data: 0.001) G_L1: 15.502 G_L1_ABSOLUTE: 3.134 G_L1_RELATIVE: 12.368 G_Regularizer: 0.000 validation_error: 21.133 +(epoch: 47, iters: 61408, time: 0.547, data: 0.000) G_L1: 12.568 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 10.142 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 47, iters: 63408, time: 0.553, data: 0.000) G_L1: 11.820 G_L1_ABSOLUTE: 2.179 G_L1_RELATIVE: 9.641 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 47, iters: 65408, time: 0.556, data: 0.000) G_L1: 12.960 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 10.478 G_Regularizer: 0.000 validation_error: 20.723 +(epoch: 47, iters: 67408, time: 0.550, data: 0.000) G_L1: 12.496 G_L1_ABSOLUTE: 2.078 G_L1_RELATIVE: 10.418 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 47, iters: 69408, time: 0.554, data: 0.001) G_L1: 13.477 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 11.044 G_Regularizer: 0.000 validation_error: 20.746 +(epoch: 47, iters: 71408, time: 0.541, data: 0.001) G_L1: 13.205 G_L1_ABSOLUTE: 2.069 G_L1_RELATIVE: 11.136 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 47, iters: 73408, time: 0.557, data: 0.000) G_L1: 15.519 G_L1_ABSOLUTE: 2.488 G_L1_RELATIVE: 13.032 G_Regularizer: 0.000 validation_error: 20.805 +(epoch: 47, iters: 75408, time: 0.574, data: 0.001) G_L1: 16.120 G_L1_ABSOLUTE: 2.933 G_L1_RELATIVE: 13.187 G_Regularizer: 0.000 validation_error: 21.053 +(epoch: 47, iters: 77408, time: 0.550, data: 0.000) G_L1: 13.096 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 10.812 G_Regularizer: 0.000 validation_error: 20.917 +(epoch: 47, iters: 79408, time: 0.551, data: 0.001) G_L1: 13.960 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 11.405 G_Regularizer: 0.000 validation_error: 20.940 +(epoch: 47, iters: 81408, time: 0.548, data: 0.000) G_L1: 14.104 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 11.517 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 47, iters: 83408, time: 0.551, data: 0.000) G_L1: 14.406 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 12.006 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 47, iters: 85408, time: 0.537, data: 0.001) G_L1: 13.791 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 11.226 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 47, iters: 87408, time: 0.554, data: 0.000) G_L1: 13.983 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 11.628 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 47, iters: 89408, time: 0.550, data: 0.000) G_L1: 12.670 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 10.244 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 47, iters: 91408, time: 0.553, data: 0.000) G_L1: 11.894 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 9.520 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 47, iters: 93408, time: 0.533, data: 0.000) G_L1: 16.572 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 13.757 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 47, iters: 95408, time: 0.541, data: 0.000) G_L1: 13.587 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 10.766 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 47, iters: 97408, time: 0.538, data: 0.000) G_L1: 14.780 G_L1_ABSOLUTE: 2.896 G_L1_RELATIVE: 11.883 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 47, iters: 99408, time: 0.548, data: 0.000) G_L1: 14.149 G_L1_ABSOLUTE: 2.896 G_L1_RELATIVE: 11.252 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 47, iters: 101408, time: 0.540, data: 0.001) G_L1: 12.224 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 9.809 G_Regularizer: 0.000 validation_error: 20.555 +(epoch: 47, iters: 103408, time: 0.543, data: 0.000) G_L1: 12.564 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 10.309 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 47, iters: 105408, time: 0.545, data: 0.000) G_L1: 13.255 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 10.898 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 47, iters: 107408, time: 0.553, data: 0.000) G_L1: 15.414 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 12.727 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 47, iters: 109408, time: 0.552, data: 0.000) G_L1: 12.213 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 9.843 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 47, iters: 111408, time: 0.529, data: 0.000) G_L1: 14.098 G_L1_ABSOLUTE: 2.876 G_L1_RELATIVE: 11.222 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 47, iters: 113408, time: 0.546, data: 0.000) G_L1: 13.347 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 10.645 G_Regularizer: 0.000 validation_error: 20.376 +(epoch: 47, iters: 115408, time: 0.557, data: 0.000) G_L1: 15.296 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 12.844 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 47, iters: 117408, time: 0.549, data: 0.000) G_L1: 12.658 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 10.172 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 47, iters: 119408, time: 0.540, data: 0.000) G_L1: 13.671 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 11.422 G_Regularizer: 0.000 validation_error: 20.993 +(epoch: 47, iters: 121408, time: 0.556, data: 0.000) G_L1: 14.940 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 12.431 G_Regularizer: 0.000 validation_error: 21.183 +(epoch: 47, iters: 123408, time: 0.558, data: 0.000) G_L1: 12.359 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 9.752 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 47, iters: 125408, time: 0.549, data: 0.000) G_L1: 11.674 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 9.216 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 47, iters: 127408, time: 0.540, data: 0.000) G_L1: 13.552 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 11.132 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 47, iters: 129408, time: 0.563, data: 0.000) G_L1: 14.697 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 12.247 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 47, iters: 131408, time: 0.558, data: 0.000) G_L1: 13.827 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 11.228 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 47, iters: 133408, time: 0.562, data: 0.000) G_L1: 11.654 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 9.455 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 47, iters: 135408, time: 0.534, data: 0.000) G_L1: 14.003 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 11.705 G_Regularizer: 0.000 validation_error: 21.049 +(epoch: 47, iters: 137408, time: 0.571, data: 0.000) G_L1: 12.175 G_L1_ABSOLUTE: 2.192 G_L1_RELATIVE: 9.984 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 47, iters: 139408, time: 0.537, data: 0.000) G_L1: 16.023 G_L1_ABSOLUTE: 2.206 G_L1_RELATIVE: 13.817 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 47, iters: 141408, time: 0.550, data: 0.000) G_L1: 14.130 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 11.931 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 47, iters: 143408, time: 0.546, data: 0.000) G_L1: 12.628 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 10.104 G_Regularizer: 0.000 validation_error: 20.943 +(epoch: 47, iters: 145408, time: 0.542, data: 0.000) G_L1: 14.340 G_L1_ABSOLUTE: 2.133 G_L1_RELATIVE: 12.208 G_Regularizer: 0.000 validation_error: 20.536 +(epoch: 47, iters: 147408, time: 0.556, data: 0.000) G_L1: 12.750 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 10.120 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 47, iters: 149408, time: 0.560, data: 0.000) G_L1: 15.616 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 13.030 G_Regularizer: 0.000 validation_error: 21.173 +(epoch: 47, iters: 151408, time: 0.532, data: 0.001) G_L1: 13.719 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 11.398 G_Regularizer: 0.000 validation_error: 21.127 +(epoch: 47, iters: 153408, time: 0.536, data: 0.000) G_L1: 15.255 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 12.737 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 47, iters: 155408, time: 0.548, data: 0.000) G_L1: 12.808 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 10.278 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 47, iters: 157408, time: 0.549, data: 0.001) G_L1: 15.417 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 13.083 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 47, iters: 159408, time: 0.545, data: 0.001) G_L1: 20.813 G_L1_ABSOLUTE: 2.850 G_L1_RELATIVE: 17.963 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 47, iters: 161408, time: 0.541, data: 0.001) G_L1: 13.836 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 10.880 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 47, iters: 163408, time: 0.555, data: 0.000) G_L1: 12.773 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 10.413 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 47, iters: 165408, time: 0.554, data: 0.000) G_L1: 13.057 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 10.439 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 47, iters: 167408, time: 0.547, data: 0.000) G_L1: 12.377 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 10.052 G_Regularizer: 0.000 validation_error: 20.937 +(epoch: 47, iters: 169408, time: 0.551, data: 0.000) G_L1: 11.914 G_L1_ABSOLUTE: 2.110 G_L1_RELATIVE: 9.804 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 47, iters: 171408, time: 0.553, data: 0.000) G_L1: 13.415 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 10.572 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 47, iters: 173408, time: 0.551, data: 0.000) G_L1: 14.099 G_L1_ABSOLUTE: 2.115 G_L1_RELATIVE: 11.984 G_Regularizer: 0.000 validation_error: 21.054 +(epoch: 47, iters: 175408, time: 0.549, data: 0.000) G_L1: 14.728 G_L1_ABSOLUTE: 2.429 G_L1_RELATIVE: 12.299 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 47, iters: 177408, time: 0.558, data: 0.000) G_L1: 15.051 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 12.274 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 47, iters: 179408, time: 0.532, data: 0.000) G_L1: 14.385 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 11.948 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 47, iters: 181408, time: 0.533, data: 0.000) G_L1: 13.918 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 11.195 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 47, iters: 183408, time: 0.548, data: 0.000) G_L1: 13.733 G_L1_ABSOLUTE: 2.030 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 20.960 +(epoch: 47, iters: 185408, time: 0.557, data: 0.000) G_L1: 14.363 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 11.786 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 47, iters: 187408, time: 0.529, data: 0.000) G_L1: 15.495 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 13.043 G_Regularizer: 0.000 validation_error: 20.924 +(epoch: 47, iters: 189408, time: 0.556, data: 0.000) G_L1: 14.321 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 11.791 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 47, iters: 191408, time: 0.559, data: 0.000) G_L1: 15.200 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 12.822 G_Regularizer: 0.000 validation_error: 21.050 +(epoch: 47, iters: 193408, time: 0.547, data: 0.000) G_L1: 12.572 G_L1_ABSOLUTE: 2.004 G_L1_RELATIVE: 10.568 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 47, iters: 195408, time: 0.540, data: 0.000) G_L1: 14.585 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 12.099 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 47, iters: 197408, time: 0.560, data: 0.000) G_L1: 13.173 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 10.823 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 47, iters: 199408, time: 0.539, data: 0.000) G_L1: 13.287 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 10.752 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 47, iters: 201408, time: 0.542, data: 0.000) G_L1: 14.310 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 11.544 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 47, iters: 203408, time: 0.556, data: 0.000) G_L1: 13.023 G_L1_ABSOLUTE: 2.206 G_L1_RELATIVE: 10.817 G_Regularizer: 0.000 validation_error: 20.956 +(epoch: 47, iters: 205408, time: 0.545, data: 0.000) G_L1: 15.485 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 12.701 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 47, iters: 207408, time: 0.531, data: 0.000) G_L1: 12.024 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 9.618 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 47, iters: 209408, time: 0.537, data: 0.000) G_L1: 14.493 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 11.883 G_Regularizer: 0.000 validation_error: 21.189 +(epoch: 47, iters: 211408, time: 0.548, data: 0.000) G_L1: 11.333 G_L1_ABSOLUTE: 2.074 G_L1_RELATIVE: 9.259 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 47, iters: 213408, time: 0.541, data: 0.000) G_L1: 13.648 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 11.089 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 47, iters: 215408, time: 0.556, data: 0.000) G_L1: 13.757 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 11.126 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 47, iters: 217408, time: 0.555, data: 0.000) G_L1: 14.165 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 11.641 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 47, iters: 219408, time: 0.558, data: 0.000) G_L1: 15.206 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 12.429 G_Regularizer: 0.000 validation_error: 21.037 +(epoch: 47, iters: 221408, time: 0.539, data: 0.000) G_L1: 15.954 G_L1_ABSOLUTE: 2.395 G_L1_RELATIVE: 13.559 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 47, iters: 223408, time: 0.531, data: 0.000) G_L1: 13.023 G_L1_ABSOLUTE: 2.195 G_L1_RELATIVE: 10.828 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 47, iters: 225408, time: 0.556, data: 0.000) G_L1: 15.072 G_L1_ABSOLUTE: 2.780 G_L1_RELATIVE: 12.292 G_Regularizer: 0.000 validation_error: 21.050 +(epoch: 47, iters: 227408, time: 0.538, data: 0.000) G_L1: 13.501 G_L1_ABSOLUTE: 2.213 G_L1_RELATIVE: 11.288 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 47, iters: 229408, time: 0.545, data: 0.000) G_L1: 14.215 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 11.826 G_Regularizer: 0.000 validation_error: 21.189 +(epoch: 47, iters: 231408, time: 0.537, data: 0.000) G_L1: 13.624 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 11.082 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 47, iters: 233408, time: 0.534, data: 0.000) G_L1: 14.455 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 11.997 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 47, iters: 235408, time: 0.540, data: 0.000) G_L1: 15.438 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 13.029 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 47, iters: 237408, time: 0.546, data: 0.000) G_L1: 23.324 G_L1_ABSOLUTE: 2.531 G_L1_RELATIVE: 20.792 G_Regularizer: 0.000 validation_error: 20.930 +(epoch: 47, iters: 239408, time: 0.547, data: 0.000) G_L1: 15.218 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 12.554 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 47, iters: 241408, time: 0.559, data: 0.000) G_L1: 14.062 G_L1_ABSOLUTE: 1.943 G_L1_RELATIVE: 12.118 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 47, iters: 243408, time: 0.536, data: 0.000) G_L1: 16.495 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 13.894 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 47, iters: 245408, time: 0.554, data: 0.001) G_L1: 12.721 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 10.184 G_Regularizer: 0.000 validation_error: 21.130 +(epoch: 47, iters: 247408, time: 0.558, data: 0.000) G_L1: 13.595 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 11.311 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 47, iters: 249408, time: 0.559, data: 0.000) G_L1: 15.289 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 12.533 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 47, iters: 251408, time: 0.551, data: 0.000) G_L1: 12.865 G_L1_ABSOLUTE: 2.209 G_L1_RELATIVE: 10.656 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 47, iters: 253408, time: 0.555, data: 0.000) G_L1: 18.581 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 16.442 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 47, iters: 255408, time: 0.539, data: 0.000) G_L1: 13.438 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 10.832 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 47, iters: 257408, time: 0.552, data: 0.000) G_L1: 14.660 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 11.900 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 47, iters: 259408, time: 0.548, data: 0.000) G_L1: 11.916 G_L1_ABSOLUTE: 2.167 G_L1_RELATIVE: 9.748 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 47, iters: 261408, time: 0.556, data: 0.000) G_L1: 13.628 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 10.924 G_Regularizer: 0.000 validation_error: 20.659 +(epoch: 47, iters: 263408, time: 0.554, data: 0.000) G_L1: 14.608 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 12.310 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 47, iters: 265408, time: 0.544, data: 0.000) G_L1: 15.293 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 12.662 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 47, iters: 267408, time: 0.554, data: 0.000) G_L1: 14.477 G_L1_ABSOLUTE: 2.585 G_L1_RELATIVE: 11.892 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 47, iters: 269408, time: 0.550, data: 0.000) G_L1: 11.432 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 9.004 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 47, iters: 271408, time: 0.565, data: 0.000) G_L1: 12.547 G_L1_ABSOLUTE: 2.026 G_L1_RELATIVE: 10.522 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 47, iters: 273408, time: 0.552, data: 0.000) G_L1: 13.879 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 11.405 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 47, iters: 275408, time: 0.554, data: 0.000) G_L1: 16.228 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 13.761 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 47, iters: 277408, time: 0.543, data: 0.000) G_L1: 16.246 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 13.812 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 47, iters: 279408, time: 0.557, data: 0.000) G_L1: 13.866 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 11.235 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 47, iters: 281408, time: 0.530, data: 0.000) G_L1: 12.102 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 9.737 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 47, iters: 283408, time: 0.563, data: 0.000) G_L1: 13.211 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 10.727 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 47, iters: 285408, time: 0.541, data: 0.000) G_L1: 15.288 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 13.069 G_Regularizer: 0.000 validation_error: 20.467 +(epoch: 47, iters: 287408, time: 0.553, data: 0.000) G_L1: 12.115 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 9.748 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 47, iters: 289408, time: 0.542, data: 0.000) G_L1: 15.578 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 13.174 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 47, iters: 291408, time: 0.562, data: 0.000) G_L1: 13.177 G_L1_ABSOLUTE: 2.438 G_L1_RELATIVE: 10.739 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 47, iters: 293408, time: 0.530, data: 0.000) G_L1: 14.891 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.477 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 47, iters: 295408, time: 0.559, data: 0.000) G_L1: 15.205 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 12.432 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 47, iters: 297408, time: 0.557, data: 0.000) G_L1: 13.349 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 10.924 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 47, iters: 299408, time: 0.560, data: 0.000) G_L1: 13.582 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 11.243 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 47, iters: 301408, time: 0.539, data: 0.000) G_L1: 15.124 G_L1_ABSOLUTE: 2.984 G_L1_RELATIVE: 12.140 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 48, iters: 656, time: 0.542, data: 0.000) G_L1: 13.665 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 11.008 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 48, iters: 2656, time: 0.552, data: 0.000) G_L1: 14.486 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 11.891 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 48, iters: 4656, time: 0.535, data: 0.000) G_L1: 12.929 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 10.413 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 48, iters: 6656, time: 0.539, data: 0.000) G_L1: 14.075 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 11.523 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 48, iters: 8656, time: 0.552, data: 0.000) G_L1: 15.876 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 13.312 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 48, iters: 10656, time: 0.569, data: 0.000) G_L1: 14.472 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 11.966 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 48, iters: 12656, time: 0.535, data: 0.000) G_L1: 13.844 G_L1_ABSOLUTE: 3.055 G_L1_RELATIVE: 10.790 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 48, iters: 14656, time: 0.536, data: 0.000) G_L1: 14.081 G_L1_ABSOLUTE: 3.025 G_L1_RELATIVE: 11.057 G_Regularizer: 0.000 validation_error: 21.133 +(epoch: 48, iters: 16656, time: 0.536, data: 0.000) G_L1: 13.959 G_L1_ABSOLUTE: 2.289 G_L1_RELATIVE: 11.670 G_Regularizer: 0.000 validation_error: 20.814 +(epoch: 48, iters: 18656, time: 0.540, data: 0.000) G_L1: 16.811 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 14.233 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 48, iters: 20656, time: 0.541, data: 0.000) G_L1: 12.896 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 10.556 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 48, iters: 22656, time: 0.543, data: 0.000) G_L1: 12.357 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 9.907 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 48, iters: 24656, time: 0.548, data: 0.001) G_L1: 12.884 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 10.449 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 48, iters: 26656, time: 0.556, data: 0.001) G_L1: 14.310 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 11.440 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 48, iters: 28656, time: 0.550, data: 0.000) G_L1: 16.712 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 14.089 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 48, iters: 30656, time: 0.534, data: 0.000) G_L1: 15.419 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 12.686 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 48, iters: 32656, time: 0.528, data: 0.000) G_L1: 16.417 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 13.926 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 48, iters: 34656, time: 0.565, data: 0.000) G_L1: 15.450 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 12.720 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 48, iters: 36656, time: 0.541, data: 0.000) G_L1: 15.602 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 13.224 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 48, iters: 38656, time: 0.532, data: 0.000) G_L1: 14.467 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 11.717 G_Regularizer: 0.000 validation_error: 21.106 +(epoch: 48, iters: 40656, time: 0.539, data: 0.000) G_L1: 14.569 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 11.864 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 48, iters: 42656, time: 0.559, data: 0.000) G_L1: 15.759 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 13.026 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 48, iters: 44656, time: 0.561, data: 0.000) G_L1: 14.941 G_L1_ABSOLUTE: 3.087 G_L1_RELATIVE: 11.854 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 48, iters: 46656, time: 0.548, data: 0.001) G_L1: 15.772 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 13.150 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 48, iters: 48656, time: 0.537, data: 0.000) G_L1: 13.331 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 10.576 G_Regularizer: 0.000 validation_error: 20.769 +(epoch: 48, iters: 50656, time: 0.533, data: 0.001) G_L1: 12.280 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 9.759 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 48, iters: 52656, time: 0.545, data: 0.000) G_L1: 12.602 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 10.240 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 48, iters: 54656, time: 0.549, data: 0.000) G_L1: 14.524 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 11.964 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 48, iters: 56656, time: 0.535, data: 0.000) G_L1: 13.512 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 11.108 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 48, iters: 58656, time: 0.535, data: 0.000) G_L1: 11.933 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 9.686 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 48, iters: 60656, time: 0.553, data: 0.000) G_L1: 12.891 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 9.803 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 48, iters: 62656, time: 0.559, data: 0.000) G_L1: 13.460 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 11.106 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 48, iters: 64656, time: 0.557, data: 0.000) G_L1: 15.884 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 13.591 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 48, iters: 66656, time: 0.527, data: 0.000) G_L1: 12.567 G_L1_ABSOLUTE: 2.310 G_L1_RELATIVE: 10.258 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 48, iters: 68656, time: 0.557, data: 0.000) G_L1: 13.751 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 11.306 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 48, iters: 70656, time: 0.556, data: 0.000) G_L1: 14.940 G_L1_ABSOLUTE: 2.364 G_L1_RELATIVE: 12.576 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 48, iters: 72656, time: 0.542, data: 0.000) G_L1: 15.313 G_L1_ABSOLUTE: 2.768 G_L1_RELATIVE: 12.545 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 48, iters: 74656, time: 0.554, data: 0.000) G_L1: 13.672 G_L1_ABSOLUTE: 2.387 G_L1_RELATIVE: 11.284 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 48, iters: 76656, time: 0.556, data: 0.000) G_L1: 16.601 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 14.203 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 48, iters: 78656, time: 0.558, data: 0.000) G_L1: 14.090 G_L1_ABSOLUTE: 2.160 G_L1_RELATIVE: 11.930 G_Regularizer: 0.000 validation_error: 20.861 +(epoch: 48, iters: 80656, time: 0.543, data: 0.000) G_L1: 12.661 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 10.283 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 48, iters: 82656, time: 0.535, data: 0.000) G_L1: 10.948 G_L1_ABSOLUTE: 2.080 G_L1_RELATIVE: 8.868 G_Regularizer: 0.000 validation_error: 20.862 +(epoch: 48, iters: 84656, time: 0.550, data: 0.000) G_L1: 12.662 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 10.111 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 48, iters: 86656, time: 0.561, data: 0.000) G_L1: 13.155 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 10.894 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 48, iters: 88656, time: 0.554, data: 0.000) G_L1: 14.460 G_L1_ABSOLUTE: 2.195 G_L1_RELATIVE: 12.265 G_Regularizer: 0.000 validation_error: 21.046 +(epoch: 48, iters: 90656, time: 0.537, data: 0.000) G_L1: 12.292 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 9.597 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 48, iters: 92656, time: 0.561, data: 0.000) G_L1: 14.466 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 11.946 G_Regularizer: 0.000 validation_error: 20.912 +(epoch: 48, iters: 94656, time: 0.552, data: 0.000) G_L1: 12.726 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 10.201 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 48, iters: 96656, time: 0.561, data: 0.000) G_L1: 17.071 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 14.635 G_Regularizer: 0.000 validation_error: 20.671 +(epoch: 48, iters: 98656, time: 0.536, data: 0.000) G_L1: 12.858 G_L1_ABSOLUTE: 2.062 G_L1_RELATIVE: 10.796 G_Regularizer: 0.000 validation_error: 21.102 +(epoch: 48, iters: 100656, time: 0.559, data: 0.000) G_L1: 13.393 G_L1_ABSOLUTE: 2.279 G_L1_RELATIVE: 11.114 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 48, iters: 102656, time: 0.560, data: 0.000) G_L1: 12.866 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 10.347 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 48, iters: 104656, time: 0.559, data: 0.000) G_L1: 13.026 G_L1_ABSOLUTE: 2.167 G_L1_RELATIVE: 10.859 G_Regularizer: 0.000 validation_error: 20.924 +(epoch: 48, iters: 106656, time: 0.542, data: 0.001) G_L1: 14.272 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 11.921 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 48, iters: 108656, time: 0.555, data: 0.000) G_L1: 16.128 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 13.580 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 48, iters: 110656, time: 0.552, data: 0.001) G_L1: 12.689 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 10.226 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 48, iters: 112656, time: 0.563, data: 0.000) G_L1: 13.948 G_L1_ABSOLUTE: 2.439 G_L1_RELATIVE: 11.508 G_Regularizer: 0.000 validation_error: 21.054 +(epoch: 48, iters: 114656, time: 0.544, data: 0.000) G_L1: 13.888 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 11.437 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 48, iters: 116656, time: 0.563, data: 0.000) G_L1: 14.214 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 11.564 G_Regularizer: 0.000 validation_error: 20.702 +(epoch: 48, iters: 118656, time: 0.552, data: 0.000) G_L1: 19.357 G_L1_ABSOLUTE: 3.049 G_L1_RELATIVE: 16.308 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 48, iters: 120656, time: 0.555, data: 0.000) G_L1: 14.225 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 11.723 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 48, iters: 122656, time: 0.545, data: 0.000) G_L1: 14.400 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 11.838 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 48, iters: 124656, time: 0.558, data: 0.000) G_L1: 14.816 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 12.226 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 48, iters: 126656, time: 0.559, data: 0.000) G_L1: 13.514 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 11.010 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 48, iters: 128656, time: 0.557, data: 0.000) G_L1: 13.873 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 11.466 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 48, iters: 130656, time: 0.545, data: 0.000) G_L1: 15.862 G_L1_ABSOLUTE: 3.140 G_L1_RELATIVE: 12.723 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 48, iters: 132656, time: 0.527, data: 0.000) G_L1: 13.523 G_L1_ABSOLUTE: 2.213 G_L1_RELATIVE: 11.309 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 48, iters: 134656, time: 0.541, data: 0.000) G_L1: 14.828 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 12.132 G_Regularizer: 0.000 validation_error: 20.912 +(epoch: 48, iters: 136656, time: 0.531, data: 0.000) G_L1: 14.793 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 12.090 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 48, iters: 138656, time: 0.543, data: 0.000) G_L1: 14.514 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 11.762 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 48, iters: 140656, time: 0.539, data: 0.000) G_L1: 15.677 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 13.174 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 48, iters: 142656, time: 0.535, data: 0.000) G_L1: 13.754 G_L1_ABSOLUTE: 2.227 G_L1_RELATIVE: 11.527 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 48, iters: 144656, time: 0.555, data: 0.000) G_L1: 26.933 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 24.384 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 48, iters: 146656, time: 0.560, data: 0.002) G_L1: 12.350 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 10.049 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 48, iters: 148656, time: 0.541, data: 0.000) G_L1: 14.605 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 12.146 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 48, iters: 150656, time: 0.531, data: 0.000) G_L1: 14.682 G_L1_ABSOLUTE: 2.147 G_L1_RELATIVE: 12.535 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 48, iters: 152656, time: 0.541, data: 0.000) G_L1: 14.205 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 11.683 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 48, iters: 154656, time: 0.551, data: 0.000) G_L1: 16.240 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 13.964 G_Regularizer: 0.000 validation_error: 21.048 +(epoch: 48, iters: 156656, time: 0.547, data: 0.000) G_L1: 27.442 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 24.802 G_Regularizer: 0.000 validation_error: 21.055 +(epoch: 48, iters: 158656, time: 0.541, data: 0.000) G_L1: 15.127 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 12.622 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 48, iters: 160656, time: 0.543, data: 0.001) G_L1: 14.967 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 12.171 G_Regularizer: 0.000 validation_error: 20.912 +(epoch: 48, iters: 162656, time: 0.558, data: 0.000) G_L1: 14.070 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 11.717 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 48, iters: 164656, time: 0.552, data: 0.001) G_L1: 13.150 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 10.817 G_Regularizer: 0.000 validation_error: 21.126 +(epoch: 48, iters: 166656, time: 0.554, data: 0.000) G_L1: 14.276 G_L1_ABSOLUTE: 2.315 G_L1_RELATIVE: 11.961 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 48, iters: 168656, time: 0.546, data: 0.000) G_L1: 12.856 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 10.372 G_Regularizer: 0.000 validation_error: 20.977 +(epoch: 48, iters: 170656, time: 0.556, data: 0.000) G_L1: 12.938 G_L1_ABSOLUTE: 2.144 G_L1_RELATIVE: 10.794 G_Regularizer: 0.000 validation_error: 20.742 +(epoch: 48, iters: 172656, time: 0.553, data: 0.000) G_L1: 13.878 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 11.492 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 48, iters: 174656, time: 0.543, data: 0.000) G_L1: 12.340 G_L1_ABSOLUTE: 2.185 G_L1_RELATIVE: 10.155 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 48, iters: 176656, time: 0.545, data: 0.001) G_L1: 17.896 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 15.277 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 48, iters: 178656, time: 0.547, data: 0.000) G_L1: 15.512 G_L1_ABSOLUTE: 2.747 G_L1_RELATIVE: 12.765 G_Regularizer: 0.000 validation_error: 21.044 +(epoch: 48, iters: 180656, time: 0.538, data: 0.000) G_L1: 14.918 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 12.486 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 48, iters: 182656, time: 0.546, data: 0.000) G_L1: 14.545 G_L1_ABSOLUTE: 2.064 G_L1_RELATIVE: 12.481 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 48, iters: 184656, time: 0.557, data: 0.000) G_L1: 13.449 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 10.715 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 48, iters: 186656, time: 0.554, data: 0.000) G_L1: 15.108 G_L1_ABSOLUTE: 2.768 G_L1_RELATIVE: 12.340 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 48, iters: 188656, time: 0.558, data: 0.000) G_L1: 12.296 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 9.785 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 48, iters: 190656, time: 0.527, data: 0.000) G_L1: 13.134 G_L1_ABSOLUTE: 2.713 G_L1_RELATIVE: 10.421 G_Regularizer: 0.000 validation_error: 21.120 +(epoch: 48, iters: 192656, time: 0.563, data: 0.000) G_L1: 14.674 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 12.306 G_Regularizer: 0.000 validation_error: 20.787 +(epoch: 48, iters: 194656, time: 0.553, data: 0.000) G_L1: 13.659 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 11.274 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 48, iters: 196656, time: 0.556, data: 0.000) G_L1: 12.328 G_L1_ABSOLUTE: 2.307 G_L1_RELATIVE: 10.022 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 48, iters: 198656, time: 0.548, data: 0.000) G_L1: 14.231 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 11.824 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 48, iters: 200656, time: 0.533, data: 0.000) G_L1: 13.626 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 11.054 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 48, iters: 202656, time: 0.539, data: 0.000) G_L1: 11.540 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 9.286 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 48, iters: 204656, time: 0.554, data: 0.000) G_L1: 12.710 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 10.018 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 48, iters: 206656, time: 0.559, data: 0.000) G_L1: 12.429 G_L1_ABSOLUTE: 2.117 G_L1_RELATIVE: 10.312 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 48, iters: 208656, time: 0.531, data: 0.000) G_L1: 13.704 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 11.465 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 48, iters: 210656, time: 0.562, data: 0.000) G_L1: 13.644 G_L1_ABSOLUTE: 3.132 G_L1_RELATIVE: 10.512 G_Regularizer: 0.000 validation_error: 20.991 +(epoch: 48, iters: 212656, time: 0.551, data: 0.001) G_L1: 12.194 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 9.731 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 48, iters: 214656, time: 0.547, data: 0.001) G_L1: 16.337 G_L1_ABSOLUTE: 1.998 G_L1_RELATIVE: 14.339 G_Regularizer: 0.000 validation_error: 21.080 +(epoch: 48, iters: 216656, time: 0.546, data: 0.000) G_L1: 14.736 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 12.175 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 48, iters: 218656, time: 0.534, data: 0.000) G_L1: 14.381 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 11.902 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 48, iters: 220656, time: 0.542, data: 0.000) G_L1: 15.921 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 13.175 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 48, iters: 222656, time: 0.562, data: 0.000) G_L1: 11.972 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 9.390 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 48, iters: 224656, time: 0.534, data: 0.000) G_L1: 13.156 G_L1_ABSOLUTE: 2.270 G_L1_RELATIVE: 10.886 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 48, iters: 226656, time: 0.553, data: 0.000) G_L1: 13.792 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 11.437 G_Regularizer: 0.000 validation_error: 21.020 +(epoch: 48, iters: 228656, time: 0.561, data: 0.000) G_L1: 13.376 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 10.520 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 48, iters: 230656, time: 0.557, data: 0.000) G_L1: 14.795 G_L1_ABSOLUTE: 2.273 G_L1_RELATIVE: 12.522 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 48, iters: 232656, time: 0.542, data: 0.000) G_L1: 13.533 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 11.106 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 48, iters: 234656, time: 0.553, data: 0.000) G_L1: 13.277 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 10.676 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 48, iters: 236656, time: 0.554, data: 0.000) G_L1: 11.555 G_L1_ABSOLUTE: 2.211 G_L1_RELATIVE: 9.344 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 48, iters: 238656, time: 0.544, data: 0.001) G_L1: 13.956 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 11.524 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 48, iters: 240656, time: 0.560, data: 0.000) G_L1: 13.908 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 11.321 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 48, iters: 242656, time: 0.551, data: 0.000) G_L1: 13.177 G_L1_ABSOLUTE: 2.223 G_L1_RELATIVE: 10.953 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 48, iters: 244656, time: 0.566, data: 0.000) G_L1: 14.607 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 11.878 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 48, iters: 246656, time: 0.558, data: 0.000) G_L1: 13.182 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 10.842 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 48, iters: 248656, time: 0.547, data: 0.000) G_L1: 14.058 G_L1_ABSOLUTE: 2.498 G_L1_RELATIVE: 11.561 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 48, iters: 250656, time: 0.548, data: 0.000) G_L1: 13.218 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.712 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 48, iters: 252656, time: 0.544, data: 0.000) G_L1: 12.762 G_L1_ABSOLUTE: 1.940 G_L1_RELATIVE: 10.822 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 48, iters: 254656, time: 0.572, data: 0.000) G_L1: 13.513 G_L1_ABSOLUTE: 2.268 G_L1_RELATIVE: 11.245 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 48, iters: 256656, time: 0.545, data: 0.000) G_L1: 13.616 G_L1_ABSOLUTE: 2.129 G_L1_RELATIVE: 11.487 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 48, iters: 258656, time: 0.541, data: 0.000) G_L1: 14.539 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 11.654 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 48, iters: 260656, time: 0.543, data: 0.000) G_L1: 15.157 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 12.435 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 48, iters: 262656, time: 0.544, data: 0.000) G_L1: 14.287 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 11.696 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 48, iters: 264656, time: 0.550, data: 0.000) G_L1: 11.775 G_L1_ABSOLUTE: 2.237 G_L1_RELATIVE: 9.538 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 48, iters: 266656, time: 0.557, data: 0.000) G_L1: 13.859 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 11.521 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 48, iters: 268656, time: 0.550, data: 0.000) G_L1: 12.632 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 9.992 G_Regularizer: 0.000 validation_error: 20.861 +(epoch: 48, iters: 270656, time: 0.555, data: 0.000) G_L1: 14.029 G_L1_ABSOLUTE: 3.007 G_L1_RELATIVE: 11.022 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 48, iters: 272656, time: 0.552, data: 0.000) G_L1: 14.219 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 11.921 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 48, iters: 274656, time: 0.553, data: 0.000) G_L1: 14.481 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 12.111 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 48, iters: 276656, time: 0.534, data: 0.000) G_L1: 16.469 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 13.715 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 48, iters: 278656, time: 0.537, data: 0.001) G_L1: 15.058 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 12.323 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 48, iters: 280656, time: 0.558, data: 0.000) G_L1: 13.037 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 10.589 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 48, iters: 282656, time: 0.560, data: 0.000) G_L1: 15.770 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 13.084 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 48, iters: 284656, time: 0.532, data: 0.000) G_L1: 15.240 G_L1_ABSOLUTE: 2.153 G_L1_RELATIVE: 13.088 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 48, iters: 286656, time: 0.559, data: 0.001) G_L1: 13.817 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 20.802 +(epoch: 48, iters: 288656, time: 0.554, data: 0.000) G_L1: 12.126 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 9.496 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 48, iters: 290656, time: 0.538, data: 0.000) G_L1: 15.244 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 12.666 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 48, iters: 292656, time: 0.548, data: 0.000) G_L1: 11.808 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 9.359 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 48, iters: 294656, time: 0.532, data: 0.000) G_L1: 11.594 G_L1_ABSOLUTE: 2.230 G_L1_RELATIVE: 9.364 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 48, iters: 296656, time: 0.564, data: 0.000) G_L1: 13.473 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 11.119 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 48, iters: 298656, time: 0.534, data: 0.001) G_L1: 13.508 G_L1_ABSOLUTE: 2.148 G_L1_RELATIVE: 11.360 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 48, iters: 300656, time: 0.569, data: 0.000) G_L1: 12.742 G_L1_ABSOLUTE: 2.152 G_L1_RELATIVE: 10.590 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 48, iters: 302656, time: 0.544, data: 0.001) G_L1: 16.292 G_L1_ABSOLUTE: 2.617 G_L1_RELATIVE: 13.675 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 49, iters: 1904, time: 0.549, data: 0.000) G_L1: 13.613 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 11.273 G_Regularizer: 0.000 validation_error: 20.808 +(epoch: 49, iters: 3904, time: 0.553, data: 0.000) G_L1: 12.797 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 10.365 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 49, iters: 5904, time: 0.561, data: 0.000) G_L1: 13.107 G_L1_ABSOLUTE: 2.183 G_L1_RELATIVE: 10.924 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 49, iters: 7904, time: 0.537, data: 0.000) G_L1: 14.259 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 11.753 G_Regularizer: 0.000 validation_error: 20.822 +(epoch: 49, iters: 9904, time: 0.557, data: 0.000) G_L1: 17.067 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 14.793 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 49, iters: 11904, time: 0.546, data: 0.000) G_L1: 12.834 G_L1_ABSOLUTE: 2.147 G_L1_RELATIVE: 10.687 G_Regularizer: 0.000 validation_error: 21.168 +(epoch: 49, iters: 13904, time: 0.541, data: 0.000) G_L1: 17.336 G_L1_ABSOLUTE: 2.488 G_L1_RELATIVE: 14.848 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 49, iters: 15904, time: 0.534, data: 0.000) G_L1: 14.037 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 11.265 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 49, iters: 17904, time: 0.549, data: 0.000) G_L1: 17.021 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 14.482 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 49, iters: 19904, time: 0.546, data: 0.000) G_L1: 13.703 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 11.181 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 49, iters: 21904, time: 0.559, data: 0.000) G_L1: 13.633 G_L1_ABSOLUTE: 2.981 G_L1_RELATIVE: 10.652 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 49, iters: 23904, time: 0.537, data: 0.001) G_L1: 15.637 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 13.271 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 49, iters: 25904, time: 0.530, data: 0.000) G_L1: 14.873 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 12.321 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 49, iters: 27904, time: 0.550, data: 0.000) G_L1: 12.277 G_L1_ABSOLUTE: 2.203 G_L1_RELATIVE: 10.074 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 49, iters: 29904, time: 0.552, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 12.108 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 49, iters: 31904, time: 0.550, data: 0.000) G_L1: 15.170 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 12.507 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 49, iters: 33904, time: 0.535, data: 0.000) G_L1: 14.249 G_L1_ABSOLUTE: 2.712 G_L1_RELATIVE: 11.537 G_Regularizer: 0.000 validation_error: 21.047 +(epoch: 49, iters: 35904, time: 0.557, data: 0.000) G_L1: 13.938 G_L1_ABSOLUTE: 2.048 G_L1_RELATIVE: 11.890 G_Regularizer: 0.000 validation_error: 20.600 +(epoch: 49, iters: 37904, time: 0.549, data: 0.000) G_L1: 13.543 G_L1_ABSOLUTE: 2.101 G_L1_RELATIVE: 11.442 G_Regularizer: 0.000 validation_error: 20.750 +(epoch: 49, iters: 39904, time: 0.550, data: 0.000) G_L1: 13.573 G_L1_ABSOLUTE: 2.123 G_L1_RELATIVE: 11.450 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 49, iters: 41904, time: 0.534, data: 0.000) G_L1: 16.738 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 14.234 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 49, iters: 43904, time: 0.558, data: 0.000) G_L1: 15.040 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 12.722 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 49, iters: 45904, time: 0.537, data: 0.000) G_L1: 14.812 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 12.133 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 49, iters: 47904, time: 0.569, data: 0.000) G_L1: 13.980 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 11.677 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 49, iters: 49904, time: 0.540, data: 0.000) G_L1: 12.633 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 10.405 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 49, iters: 51904, time: 0.559, data: 0.000) G_L1: 12.430 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 10.052 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 49, iters: 53904, time: 0.547, data: 0.000) G_L1: 13.977 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 11.296 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 49, iters: 55904, time: 0.546, data: 0.001) G_L1: 11.549 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 9.074 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 49, iters: 57904, time: 0.546, data: 0.001) G_L1: 12.847 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 10.063 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 49, iters: 59904, time: 0.558, data: 0.000) G_L1: 13.375 G_L1_ABSOLUTE: 2.131 G_L1_RELATIVE: 11.243 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 49, iters: 61904, time: 0.553, data: 0.000) G_L1: 13.330 G_L1_ABSOLUTE: 2.125 G_L1_RELATIVE: 11.205 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 49, iters: 63904, time: 0.554, data: 0.000) G_L1: 11.689 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 9.321 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 49, iters: 65904, time: 0.553, data: 0.000) G_L1: 15.371 G_L1_ABSOLUTE: 2.034 G_L1_RELATIVE: 13.337 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 49, iters: 67904, time: 0.536, data: 0.000) G_L1: 17.632 G_L1_ABSOLUTE: 3.010 G_L1_RELATIVE: 14.622 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 49, iters: 69904, time: 0.548, data: 0.000) G_L1: 13.731 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 11.352 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 49, iters: 71904, time: 0.557, data: 0.000) G_L1: 15.937 G_L1_ABSOLUTE: 2.234 G_L1_RELATIVE: 13.702 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 49, iters: 73904, time: 0.560, data: 0.000) G_L1: 11.998 G_L1_ABSOLUTE: 2.069 G_L1_RELATIVE: 9.929 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 49, iters: 75904, time: 0.549, data: 0.000) G_L1: 12.983 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 10.778 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 49, iters: 77904, time: 0.535, data: 0.000) G_L1: 14.605 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 11.981 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 49, iters: 79904, time: 0.541, data: 0.000) G_L1: 12.581 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 10.319 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 49, iters: 81904, time: 0.534, data: 0.000) G_L1: 13.376 G_L1_ABSOLUTE: 2.223 G_L1_RELATIVE: 11.153 G_Regularizer: 0.000 validation_error: 21.084 +(epoch: 49, iters: 83904, time: 0.530, data: 0.001) G_L1: 13.694 G_L1_ABSOLUTE: 1.927 G_L1_RELATIVE: 11.767 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 49, iters: 85904, time: 0.559, data: 0.000) G_L1: 14.352 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.767 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 49, iters: 87904, time: 0.541, data: 0.000) G_L1: 14.997 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.596 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 49, iters: 89904, time: 0.535, data: 0.000) G_L1: 13.576 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 10.696 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 49, iters: 91904, time: 0.544, data: 0.000) G_L1: 11.584 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 9.002 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 49, iters: 93904, time: 0.551, data: 0.000) G_L1: 13.397 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 10.778 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 49, iters: 95904, time: 0.540, data: 0.000) G_L1: 14.491 G_L1_ABSOLUTE: 2.611 G_L1_RELATIVE: 11.880 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 49, iters: 97904, time: 0.558, data: 0.001) G_L1: 15.263 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 12.325 G_Regularizer: 0.000 validation_error: 20.684 +(epoch: 49, iters: 99904, time: 0.552, data: 0.000) G_L1: 13.163 G_L1_ABSOLUTE: 3.027 G_L1_RELATIVE: 10.136 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 49, iters: 101904, time: 0.538, data: 0.000) G_L1: 10.810 G_L1_ABSOLUTE: 2.130 G_L1_RELATIVE: 8.680 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 49, iters: 103904, time: 0.542, data: 0.001) G_L1: 17.370 G_L1_ABSOLUTE: 2.861 G_L1_RELATIVE: 14.509 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 49, iters: 105904, time: 0.565, data: 0.001) G_L1: 11.656 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 9.344 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 49, iters: 107904, time: 0.564, data: 0.000) G_L1: 14.769 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 12.167 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 49, iters: 109904, time: 0.545, data: 0.000) G_L1: 12.786 G_L1_ABSOLUTE: 2.234 G_L1_RELATIVE: 10.552 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 49, iters: 111904, time: 0.543, data: 0.000) G_L1: 15.174 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.774 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 49, iters: 113904, time: 0.552, data: 0.000) G_L1: 13.885 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 11.427 G_Regularizer: 0.000 validation_error: 20.963 +(epoch: 49, iters: 115904, time: 0.555, data: 0.000) G_L1: 14.428 G_L1_ABSOLUTE: 2.387 G_L1_RELATIVE: 12.040 G_Regularizer: 0.000 validation_error: 21.016 +(epoch: 49, iters: 117904, time: 0.542, data: 0.001) G_L1: 14.676 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 12.179 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 49, iters: 119904, time: 0.546, data: 0.000) G_L1: 13.709 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 11.202 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 49, iters: 121904, time: 0.550, data: 0.000) G_L1: 13.924 G_L1_ABSOLUTE: 2.129 G_L1_RELATIVE: 11.795 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 49, iters: 123904, time: 0.551, data: 0.000) G_L1: 12.596 G_L1_ABSOLUTE: 1.983 G_L1_RELATIVE: 10.612 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 49, iters: 125904, time: 0.538, data: 0.000) G_L1: 11.128 G_L1_ABSOLUTE: 2.289 G_L1_RELATIVE: 8.839 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 49, iters: 127904, time: 0.551, data: 0.000) G_L1: 14.323 G_L1_ABSOLUTE: 2.827 G_L1_RELATIVE: 11.496 G_Regularizer: 0.000 validation_error: 21.104 +(epoch: 49, iters: 129904, time: 0.532, data: 0.000) G_L1: 15.281 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.772 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 49, iters: 131904, time: 0.551, data: 0.000) G_L1: 13.254 G_L1_ABSOLUTE: 2.075 G_L1_RELATIVE: 11.179 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 49, iters: 133904, time: 0.556, data: 0.001) G_L1: 13.731 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 11.285 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 49, iters: 135904, time: 0.552, data: 0.000) G_L1: 14.572 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 12.292 G_Regularizer: 0.000 validation_error: 21.034 +(epoch: 49, iters: 137904, time: 0.542, data: 0.000) G_L1: 12.657 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 10.080 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 49, iters: 139904, time: 0.557, data: 0.001) G_L1: 12.236 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 9.809 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 49, iters: 141904, time: 0.557, data: 0.000) G_L1: 15.094 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 12.717 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 49, iters: 143904, time: 0.532, data: 0.000) G_L1: 15.125 G_L1_ABSOLUTE: 3.032 G_L1_RELATIVE: 12.093 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 49, iters: 145904, time: 0.536, data: 0.000) G_L1: 14.029 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 11.414 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 49, iters: 147904, time: 0.551, data: 0.000) G_L1: 13.184 G_L1_ABSOLUTE: 2.204 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 49, iters: 149904, time: 0.563, data: 0.000) G_L1: 11.877 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 9.502 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 49, iters: 151904, time: 0.555, data: 0.000) G_L1: 13.322 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.817 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 49, iters: 153904, time: 0.541, data: 0.000) G_L1: 14.177 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 11.623 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 49, iters: 155904, time: 0.558, data: 0.000) G_L1: 13.299 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 11.101 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 49, iters: 157904, time: 0.558, data: 0.000) G_L1: 16.129 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 13.470 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 49, iters: 159904, time: 0.542, data: 0.000) G_L1: 13.056 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 10.530 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 49, iters: 161904, time: 0.531, data: 0.000) G_L1: 13.267 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 10.744 G_Regularizer: 0.000 validation_error: 21.113 +(epoch: 49, iters: 163904, time: 0.541, data: 0.000) G_L1: 14.652 G_L1_ABSOLUTE: 2.175 G_L1_RELATIVE: 12.477 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 49, iters: 165904, time: 0.539, data: 0.000) G_L1: 14.093 G_L1_ABSOLUTE: 2.766 G_L1_RELATIVE: 11.328 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 49, iters: 167904, time: 0.566, data: 0.000) G_L1: 13.120 G_L1_ABSOLUTE: 2.893 G_L1_RELATIVE: 10.227 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 49, iters: 169904, time: 0.535, data: 0.000) G_L1: 11.731 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 9.205 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 49, iters: 171904, time: 0.546, data: 0.000) G_L1: 16.165 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 13.595 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 49, iters: 173904, time: 0.557, data: 0.001) G_L1: 15.285 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 12.643 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 49, iters: 175904, time: 0.552, data: 0.000) G_L1: 14.278 G_L1_ABSOLUTE: 2.145 G_L1_RELATIVE: 12.133 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 49, iters: 177904, time: 0.539, data: 0.000) G_L1: 14.081 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 11.595 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 49, iters: 179904, time: 0.542, data: 0.000) G_L1: 13.539 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.102 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 49, iters: 181904, time: 0.530, data: 0.000) G_L1: 13.544 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 10.979 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 49, iters: 183904, time: 0.531, data: 0.000) G_L1: 14.716 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 11.998 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 49, iters: 185904, time: 0.538, data: 0.000) G_L1: 13.016 G_L1_ABSOLUTE: 2.309 G_L1_RELATIVE: 10.707 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 49, iters: 187904, time: 0.540, data: 0.000) G_L1: 15.010 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 12.248 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 49, iters: 189904, time: 0.535, data: 0.000) G_L1: 14.609 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 11.936 G_Regularizer: 0.000 validation_error: 20.634 +(epoch: 49, iters: 191904, time: 0.552, data: 0.001) G_L1: 13.307 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 10.932 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 49, iters: 193904, time: 0.554, data: 0.001) G_L1: 12.827 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 10.395 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 49, iters: 195904, time: 0.542, data: 0.000) G_L1: 13.582 G_L1_ABSOLUTE: 2.208 G_L1_RELATIVE: 11.373 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 49, iters: 197904, time: 0.558, data: 0.000) G_L1: 15.217 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 12.905 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 49, iters: 199904, time: 0.555, data: 0.000) G_L1: 15.170 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 12.702 G_Regularizer: 0.000 validation_error: 21.103 +(epoch: 49, iters: 201904, time: 0.567, data: 0.000) G_L1: 14.614 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 12.242 G_Regularizer: 0.000 validation_error: 21.134 +(epoch: 49, iters: 203904, time: 0.540, data: 0.000) G_L1: 12.198 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 9.935 G_Regularizer: 0.000 validation_error: 20.995 +(epoch: 49, iters: 205904, time: 0.560, data: 0.000) G_L1: 12.741 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 10.522 G_Regularizer: 0.000 validation_error: 21.091 +(epoch: 49, iters: 207904, time: 0.538, data: 0.000) G_L1: 15.057 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 12.624 G_Regularizer: 0.000 validation_error: 21.248 +(epoch: 49, iters: 209904, time: 0.546, data: 0.000) G_L1: 12.408 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 9.927 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 49, iters: 211904, time: 0.542, data: 0.000) G_L1: 14.120 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 11.781 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 49, iters: 213904, time: 0.560, data: 0.000) G_L1: 14.100 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 11.702 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 49, iters: 215904, time: 0.560, data: 0.000) G_L1: 12.077 G_L1_ABSOLUTE: 2.063 G_L1_RELATIVE: 10.014 G_Regularizer: 0.000 validation_error: 21.082 +(epoch: 49, iters: 217904, time: 0.558, data: 0.000) G_L1: 13.099 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 10.638 G_Regularizer: 0.000 validation_error: 21.170 +(epoch: 49, iters: 219904, time: 0.540, data: 0.000) G_L1: 13.035 G_L1_ABSOLUTE: 2.017 G_L1_RELATIVE: 11.018 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 49, iters: 221904, time: 0.542, data: 0.000) G_L1: 14.600 G_L1_ABSOLUTE: 2.310 G_L1_RELATIVE: 12.290 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 49, iters: 223904, time: 0.554, data: 0.000) G_L1: 13.688 G_L1_ABSOLUTE: 2.828 G_L1_RELATIVE: 10.860 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 49, iters: 225904, time: 0.556, data: 0.001) G_L1: 11.710 G_L1_ABSOLUTE: 1.925 G_L1_RELATIVE: 9.785 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 49, iters: 227904, time: 0.555, data: 0.000) G_L1: 13.748 G_L1_ABSOLUTE: 2.987 G_L1_RELATIVE: 10.761 G_Regularizer: 0.000 validation_error: 21.104 +(epoch: 49, iters: 229904, time: 0.544, data: 0.000) G_L1: 11.599 G_L1_ABSOLUTE: 2.162 G_L1_RELATIVE: 9.437 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 49, iters: 231904, time: 0.553, data: 0.000) G_L1: 13.653 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 11.051 G_Regularizer: 0.000 validation_error: 21.082 +(epoch: 49, iters: 233904, time: 0.559, data: 0.000) G_L1: 13.177 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 10.820 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 49, iters: 235904, time: 0.562, data: 0.001) G_L1: 14.612 G_L1_ABSOLUTE: 2.243 G_L1_RELATIVE: 12.368 G_Regularizer: 0.000 validation_error: 21.001 +(epoch: 49, iters: 237904, time: 0.530, data: 0.000) G_L1: 13.244 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 10.948 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 49, iters: 239904, time: 0.537, data: 0.000) G_L1: 13.886 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 11.681 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 49, iters: 241904, time: 0.556, data: 0.000) G_L1: 13.006 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 10.617 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 49, iters: 243904, time: 0.555, data: 0.000) G_L1: 14.110 G_L1_ABSOLUTE: 2.946 G_L1_RELATIVE: 11.163 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 49, iters: 245904, time: 0.535, data: 0.000) G_L1: 11.671 G_L1_ABSOLUTE: 1.877 G_L1_RELATIVE: 9.794 G_Regularizer: 0.000 validation_error: 21.156 +(epoch: 49, iters: 247904, time: 0.554, data: 0.000) G_L1: 13.502 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 11.130 G_Regularizer: 0.000 validation_error: 20.971 +(epoch: 49, iters: 249904, time: 0.549, data: 0.000) G_L1: 12.961 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 10.187 G_Regularizer: 0.000 validation_error: 20.930 +(epoch: 49, iters: 251904, time: 0.555, data: 0.000) G_L1: 13.851 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.430 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 49, iters: 253904, time: 0.548, data: 0.000) G_L1: 15.253 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 12.550 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 49, iters: 255904, time: 0.560, data: 0.000) G_L1: 11.771 G_L1_ABSOLUTE: 2.297 G_L1_RELATIVE: 9.474 G_Regularizer: 0.000 validation_error: 20.896 +(epoch: 49, iters: 257904, time: 0.551, data: 0.000) G_L1: 12.998 G_L1_ABSOLUTE: 2.874 G_L1_RELATIVE: 10.123 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 49, iters: 259904, time: 0.552, data: 0.001) G_L1: 17.093 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 14.646 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 49, iters: 261904, time: 0.549, data: 0.000) G_L1: 13.355 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 10.548 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 49, iters: 263904, time: 0.555, data: 0.000) G_L1: 14.607 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 12.085 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 49, iters: 265904, time: 0.563, data: 0.000) G_L1: 14.565 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 12.048 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 49, iters: 267904, time: 0.559, data: 0.000) G_L1: 14.950 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 12.295 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 49, iters: 269904, time: 0.554, data: 0.001) G_L1: 14.625 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 11.793 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 49, iters: 271904, time: 0.549, data: 0.000) G_L1: 17.761 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 15.006 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 49, iters: 273904, time: 0.556, data: 0.000) G_L1: 14.041 G_L1_ABSOLUTE: 2.405 G_L1_RELATIVE: 11.636 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 49, iters: 275904, time: 0.559, data: 0.000) G_L1: 13.499 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 11.054 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 49, iters: 277904, time: 0.561, data: 0.000) G_L1: 14.529 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.128 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 49, iters: 279904, time: 0.540, data: 0.000) G_L1: 14.537 G_L1_ABSOLUTE: 2.382 G_L1_RELATIVE: 12.155 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 49, iters: 281904, time: 0.563, data: 0.000) G_L1: 13.450 G_L1_ABSOLUTE: 2.248 G_L1_RELATIVE: 11.202 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 49, iters: 283904, time: 0.558, data: 0.000) G_L1: 14.795 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 12.422 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 49, iters: 285904, time: 0.679, data: 0.000) G_L1: 11.269 G_L1_ABSOLUTE: 2.105 G_L1_RELATIVE: 9.164 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 49, iters: 287904, time: 0.530, data: 0.000) G_L1: 17.003 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 14.438 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 49, iters: 289904, time: 0.535, data: 0.000) G_L1: 12.346 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 9.741 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 49, iters: 291904, time: 0.547, data: 0.000) G_L1: 12.442 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 9.752 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 49, iters: 293904, time: 0.566, data: 0.000) G_L1: 14.000 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 10.969 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 49, iters: 295904, time: 0.551, data: 0.000) G_L1: 13.322 G_L1_ABSOLUTE: 2.566 G_L1_RELATIVE: 10.756 G_Regularizer: 0.000 validation_error: 20.771 +(epoch: 49, iters: 297904, time: 0.553, data: 0.000) G_L1: 13.791 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 11.328 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 49, iters: 299904, time: 0.564, data: 0.000) G_L1: 14.164 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 11.948 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 49, iters: 301904, time: 0.557, data: 0.000) G_L1: 15.067 G_L1_ABSOLUTE: 3.471 G_L1_RELATIVE: 11.597 G_Regularizer: 0.000 validation_error: 21.283 +(epoch: 50, iters: 1152, time: 0.533, data: 0.000) G_L1: 14.371 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 11.849 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 50, iters: 3152, time: 0.554, data: 0.000) G_L1: 13.130 G_L1_ABSOLUTE: 2.278 G_L1_RELATIVE: 10.853 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 50, iters: 5152, time: 0.554, data: 0.000) G_L1: 14.526 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 11.777 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 50, iters: 7152, time: 0.556, data: 0.000) G_L1: 14.835 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 12.555 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 50, iters: 9152, time: 0.535, data: 0.000) G_L1: 11.811 G_L1_ABSOLUTE: 2.200 G_L1_RELATIVE: 9.611 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 50, iters: 11152, time: 0.543, data: 0.001) G_L1: 12.810 G_L1_ABSOLUTE: 2.041 G_L1_RELATIVE: 10.769 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 50, iters: 13152, time: 0.556, data: 0.000) G_L1: 15.019 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 12.204 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 50, iters: 15152, time: 0.561, data: 0.000) G_L1: 18.882 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 15.930 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 50, iters: 17152, time: 0.540, data: 0.000) G_L1: 13.298 G_L1_ABSOLUTE: 2.134 G_L1_RELATIVE: 11.164 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 50, iters: 19152, time: 0.536, data: 0.000) G_L1: 15.481 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 12.821 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 50, iters: 21152, time: 0.548, data: 0.000) G_L1: 14.504 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 11.974 G_Regularizer: 0.000 validation_error: 20.794 +(epoch: 50, iters: 23152, time: 0.554, data: 0.000) G_L1: 13.488 G_L1_ABSOLUTE: 2.795 G_L1_RELATIVE: 10.693 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 50, iters: 25152, time: 0.549, data: 0.000) G_L1: 13.609 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 11.283 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 50, iters: 27152, time: 0.534, data: 0.000) G_L1: 15.034 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 12.041 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 50, iters: 29152, time: 0.558, data: 0.000) G_L1: 12.685 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 10.340 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 50, iters: 31152, time: 0.540, data: 0.000) G_L1: 14.289 G_L1_ABSOLUTE: 2.687 G_L1_RELATIVE: 11.602 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 50, iters: 33152, time: 0.563, data: 0.000) G_L1: 12.960 G_L1_ABSOLUTE: 2.189 G_L1_RELATIVE: 10.771 G_Regularizer: 0.000 validation_error: 20.859 +(epoch: 50, iters: 35152, time: 0.541, data: 0.000) G_L1: 10.776 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 8.514 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 50, iters: 37152, time: 0.547, data: 0.000) G_L1: 11.814 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 9.081 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 50, iters: 39152, time: 0.552, data: 0.000) G_L1: 13.315 G_L1_ABSOLUTE: 2.082 G_L1_RELATIVE: 11.233 G_Regularizer: 0.000 validation_error: 21.025 +(epoch: 50, iters: 41152, time: 0.558, data: 0.000) G_L1: 12.997 G_L1_ABSOLUTE: 2.941 G_L1_RELATIVE: 10.056 G_Regularizer: 0.000 validation_error: 21.070 +(epoch: 50, iters: 43152, time: 0.551, data: 0.000) G_L1: 13.694 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 11.208 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 50, iters: 45152, time: 0.544, data: 0.000) G_L1: 14.561 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 11.979 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 50, iters: 47152, time: 0.533, data: 0.001) G_L1: 12.045 G_L1_ABSOLUTE: 2.292 G_L1_RELATIVE: 9.752 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 50, iters: 49152, time: 0.539, data: 0.000) G_L1: 14.838 G_L1_ABSOLUTE: 2.765 G_L1_RELATIVE: 12.073 G_Regularizer: 0.000 validation_error: 20.948 +(epoch: 50, iters: 51152, time: 0.542, data: 0.000) G_L1: 13.778 G_L1_ABSOLUTE: 2.233 G_L1_RELATIVE: 11.545 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 50, iters: 53152, time: 0.535, data: 0.000) G_L1: 14.896 G_L1_ABSOLUTE: 2.222 G_L1_RELATIVE: 12.674 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 50, iters: 55152, time: 0.535, data: 0.000) G_L1: 13.896 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 11.300 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 50, iters: 57152, time: 0.534, data: 0.001) G_L1: 14.520 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 11.971 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 50, iters: 59152, time: 0.550, data: 0.000) G_L1: 14.624 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 12.185 G_Regularizer: 0.000 validation_error: 20.798 +(epoch: 50, iters: 61152, time: 0.541, data: 0.000) G_L1: 12.540 G_L1_ABSOLUTE: 2.209 G_L1_RELATIVE: 10.331 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 50, iters: 63152, time: 0.552, data: 0.000) G_L1: 12.599 G_L1_ABSOLUTE: 2.245 G_L1_RELATIVE: 10.354 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 50, iters: 65152, time: 0.552, data: 0.000) G_L1: 14.447 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 11.935 G_Regularizer: 0.000 validation_error: 20.805 +(epoch: 50, iters: 67152, time: 0.553, data: 0.000) G_L1: 12.909 G_L1_ABSOLUTE: 2.350 G_L1_RELATIVE: 10.559 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 50, iters: 69152, time: 0.562, data: 0.001) G_L1: 14.041 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.440 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 50, iters: 71152, time: 0.545, data: 0.000) G_L1: 13.394 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 11.032 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 50, iters: 73152, time: 0.541, data: 0.000) G_L1: 12.909 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 10.389 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 50, iters: 75152, time: 0.554, data: 0.000) G_L1: 16.826 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 14.196 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 50, iters: 77152, time: 0.557, data: 0.000) G_L1: 13.076 G_L1_ABSOLUTE: 2.187 G_L1_RELATIVE: 10.889 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 50, iters: 79152, time: 0.533, data: 0.000) G_L1: 12.667 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 10.069 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 50, iters: 81152, time: 0.562, data: 0.000) G_L1: 16.399 G_L1_ABSOLUTE: 3.145 G_L1_RELATIVE: 13.254 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 50, iters: 83152, time: 0.555, data: 0.000) G_L1: 17.972 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 15.421 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 50, iters: 85152, time: 0.538, data: 0.001) G_L1: 14.157 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 11.644 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 50, iters: 87152, time: 0.531, data: 0.001) G_L1: 14.550 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 11.976 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 50, iters: 89152, time: 0.564, data: 0.000) G_L1: 12.514 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 10.152 G_Regularizer: 0.000 validation_error: 20.824 +(epoch: 50, iters: 91152, time: 0.551, data: 0.000) G_L1: 12.283 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 9.789 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 50, iters: 93152, time: 0.552, data: 0.001) G_L1: 11.937 G_L1_ABSOLUTE: 2.155 G_L1_RELATIVE: 9.781 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 50, iters: 95152, time: 0.548, data: 0.000) G_L1: 11.778 G_L1_ABSOLUTE: 2.242 G_L1_RELATIVE: 9.536 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 50, iters: 97152, time: 0.558, data: 0.000) G_L1: 14.091 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 11.721 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 50, iters: 99152, time: 0.552, data: 0.000) G_L1: 12.872 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 10.361 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 50, iters: 101152, time: 0.531, data: 0.001) G_L1: 15.522 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 13.100 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 50, iters: 103152, time: 0.551, data: 0.001) G_L1: 12.428 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 10.169 G_Regularizer: 0.000 validation_error: 20.910 +(epoch: 50, iters: 105152, time: 0.545, data: 0.000) G_L1: 15.354 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 12.804 G_Regularizer: 0.000 validation_error: 20.929 +(epoch: 50, iters: 107152, time: 0.559, data: 0.000) G_L1: 13.092 G_L1_ABSOLUTE: 2.171 G_L1_RELATIVE: 10.922 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 50, iters: 109152, time: 0.559, data: 0.000) G_L1: 16.573 G_L1_ABSOLUTE: 3.057 G_L1_RELATIVE: 13.517 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 50, iters: 111152, time: 0.563, data: 0.000) G_L1: 13.745 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 11.165 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 50, iters: 113152, time: 0.542, data: 0.000) G_L1: 14.991 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 12.307 G_Regularizer: 0.000 validation_error: 20.917 +(epoch: 50, iters: 115152, time: 0.533, data: 0.000) G_L1: 14.602 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 12.024 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 50, iters: 117152, time: 0.540, data: 0.001) G_L1: 12.585 G_L1_ABSOLUTE: 2.405 G_L1_RELATIVE: 10.180 G_Regularizer: 0.000 validation_error: 21.089 +(epoch: 50, iters: 119152, time: 0.546, data: 0.000) G_L1: 13.344 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 10.755 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 50, iters: 121152, time: 0.539, data: 0.000) G_L1: 14.067 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 11.690 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 50, iters: 123152, time: 0.529, data: 0.000) G_L1: 12.631 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 10.345 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 50, iters: 125152, time: 0.543, data: 0.000) G_L1: 12.825 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 10.124 G_Regularizer: 0.000 validation_error: 21.034 +(epoch: 50, iters: 127152, time: 0.563, data: 0.000) G_L1: 14.406 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 11.793 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 50, iters: 129152, time: 0.542, data: 0.002) G_L1: 13.469 G_L1_ABSOLUTE: 2.165 G_L1_RELATIVE: 11.304 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 50, iters: 131152, time: 0.559, data: 0.000) G_L1: 13.021 G_L1_ABSOLUTE: 3.026 G_L1_RELATIVE: 9.995 G_Regularizer: 0.000 validation_error: 21.050 +(epoch: 50, iters: 133152, time: 0.552, data: 0.000) G_L1: 12.392 G_L1_ABSOLUTE: 2.456 G_L1_RELATIVE: 9.936 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 50, iters: 135152, time: 0.553, data: 0.000) G_L1: 14.762 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 11.902 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 50, iters: 137152, time: 0.555, data: 0.000) G_L1: 12.442 G_L1_ABSOLUTE: 2.155 G_L1_RELATIVE: 10.287 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 50, iters: 139152, time: 0.544, data: 0.000) G_L1: 13.416 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 10.892 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 50, iters: 141152, time: 0.559, data: 0.000) G_L1: 11.459 G_L1_ABSOLUTE: 2.348 G_L1_RELATIVE: 9.111 G_Regularizer: 0.000 validation_error: 21.022 +(epoch: 50, iters: 143152, time: 0.537, data: 0.000) G_L1: 13.826 G_L1_ABSOLUTE: 2.156 G_L1_RELATIVE: 11.669 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 50, iters: 145152, time: 0.559, data: 0.000) G_L1: 17.179 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 14.694 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 50, iters: 147152, time: 0.530, data: 0.000) G_L1: 11.547 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 8.990 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 50, iters: 149152, time: 0.539, data: 0.001) G_L1: 14.324 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 21.112 +(epoch: 50, iters: 151152, time: 0.541, data: 0.000) G_L1: 14.865 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 12.461 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 50, iters: 153152, time: 0.559, data: 0.000) G_L1: 14.809 G_L1_ABSOLUTE: 2.531 G_L1_RELATIVE: 12.278 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 50, iters: 155152, time: 0.549, data: 0.000) G_L1: 12.128 G_L1_ABSOLUTE: 2.213 G_L1_RELATIVE: 9.915 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 50, iters: 157152, time: 0.545, data: 0.000) G_L1: 13.782 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 10.983 G_Regularizer: 0.000 validation_error: 20.750 +(epoch: 50, iters: 159152, time: 0.555, data: 0.000) G_L1: 13.700 G_L1_ABSOLUTE: 2.423 G_L1_RELATIVE: 11.276 G_Regularizer: 0.000 validation_error: 20.862 +(epoch: 50, iters: 161152, time: 0.559, data: 0.000) G_L1: 16.878 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 14.068 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 50, iters: 163152, time: 0.545, data: 0.000) G_L1: 14.249 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 11.894 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 50, iters: 165152, time: 0.546, data: 0.000) G_L1: 13.474 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 11.064 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 50, iters: 167152, time: 0.541, data: 0.000) G_L1: 14.579 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 11.941 G_Regularizer: 0.000 validation_error: 21.046 +(epoch: 50, iters: 169152, time: 0.552, data: 0.000) G_L1: 13.612 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 11.295 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 50, iters: 171152, time: 0.550, data: 0.000) G_L1: 13.918 G_L1_ABSOLUTE: 2.350 G_L1_RELATIVE: 11.568 G_Regularizer: 0.000 validation_error: 21.055 +(epoch: 50, iters: 173152, time: 0.559, data: 0.000) G_L1: 14.002 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 11.582 G_Regularizer: 0.000 validation_error: 21.025 +(epoch: 50, iters: 175152, time: 0.544, data: 0.000) G_L1: 12.535 G_L1_ABSOLUTE: 3.139 G_L1_RELATIVE: 9.397 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 50, iters: 177152, time: 0.540, data: 0.000) G_L1: 15.029 G_L1_ABSOLUTE: 3.106 G_L1_RELATIVE: 11.923 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 50, iters: 179152, time: 0.562, data: 0.001) G_L1: 13.282 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 10.908 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 50, iters: 181152, time: 0.538, data: 0.001) G_L1: 12.506 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 9.982 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 50, iters: 183152, time: 0.549, data: 0.000) G_L1: 13.664 G_L1_ABSOLUTE: 2.346 G_L1_RELATIVE: 11.318 G_Regularizer: 0.000 validation_error: 20.921 +(epoch: 50, iters: 185152, time: 0.554, data: 0.000) G_L1: 12.013 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 9.635 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 50, iters: 187152, time: 0.541, data: 0.000) G_L1: 13.810 G_L1_ABSOLUTE: 2.155 G_L1_RELATIVE: 11.655 G_Regularizer: 0.000 validation_error: 20.929 +(epoch: 50, iters: 189152, time: 0.546, data: 0.000) G_L1: 12.634 G_L1_ABSOLUTE: 2.278 G_L1_RELATIVE: 10.356 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 50, iters: 191152, time: 0.549, data: 0.000) G_L1: 12.997 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 10.701 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 50, iters: 193152, time: 0.535, data: 0.000) G_L1: 11.561 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 9.277 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 50, iters: 195152, time: 0.557, data: 0.001) G_L1: 9.892 G_L1_ABSOLUTE: 2.195 G_L1_RELATIVE: 7.697 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 50, iters: 197152, time: 0.540, data: 0.000) G_L1: 14.692 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 12.211 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 50, iters: 199152, time: 0.558, data: 0.000) G_L1: 13.356 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 11.137 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 50, iters: 201152, time: 0.539, data: 0.000) G_L1: 10.934 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 8.584 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 50, iters: 203152, time: 0.532, data: 0.000) G_L1: 14.364 G_L1_ABSOLUTE: 2.226 G_L1_RELATIVE: 12.138 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 50, iters: 205152, time: 0.544, data: 0.000) G_L1: 13.461 G_L1_ABSOLUTE: 2.390 G_L1_RELATIVE: 11.071 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 50, iters: 207152, time: 0.547, data: 0.000) G_L1: 16.535 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 13.865 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 50, iters: 209152, time: 0.543, data: 0.001) G_L1: 14.066 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 11.393 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 50, iters: 211152, time: 0.545, data: 0.000) G_L1: 14.960 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 12.282 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 50, iters: 213152, time: 0.552, data: 0.000) G_L1: 13.359 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 10.782 G_Regularizer: 0.000 validation_error: 21.114 +(epoch: 50, iters: 215152, time: 0.550, data: 0.000) G_L1: 13.462 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 10.623 G_Regularizer: 0.000 validation_error: 21.005 +(epoch: 50, iters: 217152, time: 0.534, data: 0.001) G_L1: 14.286 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 11.870 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 50, iters: 219152, time: 0.549, data: 0.001) G_L1: 13.220 G_L1_ABSOLUTE: 2.181 G_L1_RELATIVE: 11.039 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 50, iters: 221152, time: 0.549, data: 0.000) G_L1: 14.465 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 11.791 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 50, iters: 223152, time: 0.543, data: 0.000) G_L1: 15.709 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 13.220 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 50, iters: 225152, time: 0.552, data: 0.000) G_L1: 14.548 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 11.551 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 50, iters: 227152, time: 0.551, data: 0.000) G_L1: 16.420 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 13.907 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 50, iters: 229152, time: 0.553, data: 0.000) G_L1: 14.365 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 11.798 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 50, iters: 231152, time: 0.554, data: 0.000) G_L1: 13.529 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 11.120 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 50, iters: 233152, time: 0.541, data: 0.000) G_L1: 13.512 G_L1_ABSOLUTE: 2.270 G_L1_RELATIVE: 11.242 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 50, iters: 235152, time: 0.546, data: 0.000) G_L1: 14.784 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 12.284 G_Regularizer: 0.000 validation_error: 20.963 +(epoch: 50, iters: 237152, time: 0.540, data: 0.001) G_L1: 17.443 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 14.966 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 50, iters: 239152, time: 0.536, data: 0.000) G_L1: 14.884 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 50, iters: 241152, time: 0.546, data: 0.000) G_L1: 13.389 G_L1_ABSOLUTE: 2.895 G_L1_RELATIVE: 10.495 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 50, iters: 243152, time: 0.544, data: 0.000) G_L1: 17.264 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 14.614 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 50, iters: 245152, time: 0.563, data: 0.000) G_L1: 13.888 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 11.461 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 50, iters: 247152, time: 0.559, data: 0.000) G_L1: 12.697 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 10.132 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 50, iters: 249152, time: 0.539, data: 0.000) G_L1: 14.719 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 12.315 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 50, iters: 251152, time: 0.540, data: 0.000) G_L1: 14.773 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 12.453 G_Regularizer: 0.000 validation_error: 20.998 +(epoch: 50, iters: 253152, time: 0.541, data: 0.000) G_L1: 14.338 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 11.679 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 50, iters: 255152, time: 0.562, data: 0.000) G_L1: 15.671 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 13.166 G_Regularizer: 0.000 validation_error: 20.839 +(epoch: 50, iters: 257152, time: 0.546, data: 0.001) G_L1: 14.734 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 11.783 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 50, iters: 259152, time: 0.546, data: 0.000) G_L1: 14.127 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 11.393 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 50, iters: 261152, time: 0.560, data: 0.000) G_L1: 21.629 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 19.002 G_Regularizer: 0.000 validation_error: 20.832 +(epoch: 50, iters: 263152, time: 0.561, data: 0.000) G_L1: 11.584 G_L1_ABSOLUTE: 2.200 G_L1_RELATIVE: 9.384 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 50, iters: 265152, time: 0.547, data: 0.000) G_L1: 10.555 G_L1_ABSOLUTE: 2.069 G_L1_RELATIVE: 8.486 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 50, iters: 267152, time: 0.545, data: 0.000) G_L1: 11.750 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 9.152 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 50, iters: 269152, time: 0.542, data: 0.000) G_L1: 13.131 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 10.662 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 50, iters: 271152, time: 0.551, data: 0.000) G_L1: 15.054 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 12.691 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 50, iters: 273152, time: 0.553, data: 0.000) G_L1: 14.395 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 12.021 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 50, iters: 275152, time: 0.531, data: 0.000) G_L1: 16.199 G_L1_ABSOLUTE: 2.816 G_L1_RELATIVE: 13.383 G_Regularizer: 0.000 validation_error: 21.037 +(epoch: 50, iters: 277152, time: 0.537, data: 0.000) G_L1: 13.642 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 50, iters: 279152, time: 0.554, data: 0.000) G_L1: 15.279 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 12.803 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 50, iters: 281152, time: 0.552, data: 0.000) G_L1: 13.569 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 11.202 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 50, iters: 283152, time: 0.550, data: 0.001) G_L1: 14.213 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 11.543 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 50, iters: 285152, time: 0.555, data: 0.000) G_L1: 15.793 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 13.390 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 50, iters: 287152, time: 0.553, data: 0.000) G_L1: 12.423 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 10.046 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 50, iters: 289152, time: 0.563, data: 0.000) G_L1: 12.481 G_L1_ABSOLUTE: 2.242 G_L1_RELATIVE: 10.239 G_Regularizer: 0.000 validation_error: 20.723 +(epoch: 50, iters: 291152, time: 0.554, data: 0.000) G_L1: 14.564 G_L1_ABSOLUTE: 2.944 G_L1_RELATIVE: 11.619 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 50, iters: 293152, time: 0.540, data: 0.000) G_L1: 13.002 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 10.648 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 50, iters: 295152, time: 0.550, data: 0.001) G_L1: 15.001 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 12.353 G_Regularizer: 0.000 validation_error: 20.943 +(epoch: 50, iters: 297152, time: 0.560, data: 0.001) G_L1: 13.265 G_L1_ABSOLUTE: 2.233 G_L1_RELATIVE: 11.032 G_Regularizer: 0.000 validation_error: 21.106 +(epoch: 50, iters: 299152, time: 0.539, data: 0.000) G_L1: 14.484 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 11.886 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 50, iters: 301152, time: 0.535, data: 0.000) G_L1: 15.200 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 12.917 G_Regularizer: 0.000 validation_error: 20.902 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/opt.txt b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/opt.txt new file mode 100644 index 0000000..4c31766 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/opt.txt @@ -0,0 +1,60 @@ +----------------- Options --------------- + aspect_ratio: 1.0 + audio_window_size: 16 + batch_size: 1 + cached_images: False + checkpoints_dir: ./checkpoints + dataroot: /ARD_ZDF [default: None] + dataset_mode: multi_face_audio_eq_tmp_cached [default: aligned] + direction: AtoB + display_winsize: 512 [default: 256] + epoch: latest + erosionFactor: 1.0 + eval: False + fineSize: 512 + fix_renderer: False + gpu_ids: 0 + hierarchicalTex: False + init_gain: 0.02 + init_type: xavier + input_nc: 3 + isTrain: False [default: None] + loadSize: 512 + load_iter: 0 [default: 0] + look_ahead: True [default: False] + lossType: RMS [default: L1] + max_dataset_size: inf + model: audio2ExpressionsAttentionTMP4 [default: test] + n_layers_D: 3 + name: audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead [default: experiment_name] + ndf: 64 + netD: basic + netG: unet_256 + ngf: 64 + no_augmentation: False + no_dropout: False + norm: instance + ntest: inf + num_test: 50 + num_threads: 4 + output_audio_expressions: False + output_nc: 3 + phase: test + renderer: no_renderer + rendererType: estimatorAttention [default: UNET_5_level] + resize_or_crop: resize_and_crop + results_dir: ./results/ + seq_len: 8 [default: 1] + serial_batches: False + source_actor: /code/neural-code/output_data/features/fgsdfsdf [default: ] + source_dir: ./datasets/ + suffix: + target_actor: /code/neural-code/output_data/features/Jennifer_355_9415 [default: ] + tex_dim: 256 + tex_features: 16 +tex_features_intermediate: 16 + textureModel: DynamicNeuralTextureAudio + transfer_path: /code/neural-code/output_data/TRANSFERS/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead [default: None] + verbose: False + write_no_images: True [default: False] +----------------- End ------------------- diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4/opt.txt b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4/opt.txt new file mode 100644 index 0000000..caf26b3 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/checkpoints/audio2ExpressionsAttentionTMP4/opt.txt @@ -0,0 +1,59 @@ +----------------- Options --------------- + aspect_ratio: 1.0 + audio_window_size: 16 + batch_size: 1 + cached_images: False + checkpoints_dir: ./checkpoints + dataroot: /ARD_ZDF [default: None] + dataset_mode: multi_face_audio_eq_tmp_cached [default: aligned] + direction: AtoB + display_winsize: 512 [default: 256] + epoch: latest + erosionFactor: 1.0 + eval: False + fineSize: 512 + fix_renderer: False + gpu_ids: 0 + hierarchicalTex: False + init_gain: 0.02 + init_type: xavier + input_nc: 3 + isTrain: False [default: None] + loadSize: 512 + load_iter: 0 [default: 0] + look_ahead: True [default: False] + lossType: RMS [default: L1] + max_dataset_size: inf + model: audio2ExpressionsAttentionTMP4 [default: test] + n_layers_D: 3 + name: audio2ExpressionsAttentionTMP4 [default: experiment_name] + ndf: 64 + netD: basic + netG: unet_256 + ngf: 64 + no_augmentation: False + no_dropout: False + norm: instance + ntest: inf + num_test: 50 + num_threads: 4 + output_audio_expressions: False + output_nc: 3 + phase: test + renderer: no_renderer + rendererType: estimatorAttention [default: UNET_5_level] + resize_or_crop: resize_and_crop + results_dir: ./results/ + seq_len: 8 [default: 1] + serial_batches: False + source_actor: /home/alberto/NeuralVoicePuppetry/datasets//External/Sekunden_Wissen_01 [default: ] + source_dir: ./datasets/ + suffix: + target_actor: /home/alberto/NeuralVoicePuppetry/datasets//External/Russian_guy [default: ] + tex_dim: 256 + tex_features: 16 +tex_features_intermediate: 16 + textureModel: DynamicNeuralTextureAudio + verbose: False + write_no_images: True [default: False] +----------------- End ------------------- diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/__init__.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/__init__.py new file mode 100644 index 0000000..3735ac1 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/__init__.py @@ -0,0 +1,90 @@ +import importlib +import torch.utils.data +from data.base_data_loader import BaseDataLoader +from data.base_dataset import BaseDataset + + +def find_dataset_using_name(dataset_name): + # Given the option --dataset_mode [datasetname], + # the file "data/datasetname_dataset.py" + # will be imported. + dataset_filename = "data." + dataset_name + "_dataset" + datasetlib = importlib.import_module(dataset_filename) + + # In the file, the class called DatasetNameDataset() will + # be instantiated. It has to be a subclass of BaseDataset, + # and it is case-insensitive. + dataset = None + target_dataset_name = dataset_name.replace('_', '') + 'dataset' + for name, cls in datasetlib.__dict__.items(): + if name.lower() == target_dataset_name.lower() \ + and issubclass(cls, BaseDataset): + dataset = cls + + if dataset is None: + print("In %s.py, there should be a subclass of BaseDataset with class name that matches %s in lowercase." % (dataset_filename, target_dataset_name)) + exit(0) + + return dataset + + +def get_option_setter(dataset_name): + dataset_class = find_dataset_using_name(dataset_name) + return dataset_class.modify_commandline_options + + +def create_dataset(opt): + dataset = find_dataset_using_name(opt.dataset_mode) + instance = dataset() + instance.initialize(opt) + print("dataset [%s] was created" % (instance.name())) + return instance + + +def CreateDataLoader(opt): + data_loader = CustomDatasetDataLoader() + data_loader.initialize(opt) + return data_loader + + +# Wrapper class of Dataset class that performs +# multi-threaded data loading +class CustomDatasetDataLoader(BaseDataLoader): + def name(self): + return 'CustomDatasetDataLoader' + + def initialize(self, opt): + BaseDataLoader.initialize(self, opt) + self.dataset = create_dataset(opt) + if opt.serial_batches: + self.dataloader = torch.utils.data.DataLoader( + self.dataset, + batch_size=opt.batch_size, + shuffle=not opt.serial_batches, + num_workers=int(opt.num_threads)) + else: + #weights = make_weights_for_balanced_classes(dataset_train.imgs, len(dataset_train.classes)) + weights = self.dataset.getSampleWeights() + weights = torch.DoubleTensor(weights) + sampler = torch.utils.data.sampler.WeightedRandomSampler(weights, len(weights)) + + self.dataloader = torch.utils.data.DataLoader( + self.dataset, + batch_size=opt.batch_size, + #shuffle=True, + sampler=sampler, + pin_memory=True, + num_workers=int(opt.num_threads)) + + + def load_data(self): + return self + + def __len__(self): + return min(len(self.dataset), self.opt.max_dataset_size) + + def __iter__(self): + for i, data in enumerate(self.dataloader): + if i * self.opt.batch_size >= self.opt.max_dataset_size: + break + yield data diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/audio.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/audio.py new file mode 100644 index 0000000..4932c94 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/audio.py @@ -0,0 +1,77 @@ +import time +import random +import math + +import numpy as np + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision.transforms as transforms + +import torchaudio +import torchaudio.transforms + +import librosa +import scipy.signal +import librosa.display +import matplotlib.pyplot as plt + + +class Audio(): + def name(self): + return 'Audio' + + def __init__(self, filename, write_mel_spectogram = False): + self.n_mels=128 + self.fmax=8000 + self.hop_length_ms = 20 + + sound, sample_rate = librosa.load(filename)#torchaudio.load(filename) + self.raw_audio = sound + self.sample_rate = sample_rate + print('sample_rate = %d' % self.sample_rate) + self.n_samples = sound.shape[0] + self.time_total = self.n_samples / self.sample_rate + print('length = %ds' % self.time_total) + + print('compute mel spectrogram...') + self.hop_length = int(sample_rate / 1000.0 * self.hop_length_ms) + print('hop_length: ', self.hop_length) + self.mel_spectrogram = librosa.feature.melspectrogram(y=self.raw_audio, sr=self.sample_rate, hop_length=self.hop_length, n_mels=self.n_mels, fmax=self.fmax) + + + if write_mel_spectogram: + print('write spectrogram to file') + plt.figure(figsize=(100, 15)) + librosa.display.specshow(librosa.power_to_db(self.mel_spectrogram, ref=np.max), y_axis='mel', fmax=self.fmax, x_axis='time') + plt.colorbar(format='%+2.0f dB') + plt.title('Mel spectrogram') + plt.tight_layout() + plt.savefig('mel_features.png', dpi=None, facecolor='w', edgecolor='w', orientation='portrait', papertype=None, format=None, transparent=False, bbox_inches=None, pad_inches=0.1, frameon=None, metadata=None) + + print('mel: ', self.mel_spectrogram.shape) # (128, 18441) + self.n_mel_frames = self.mel_spectrogram.shape[1] + self.mel_sample_rate = self.mel_spectrogram.shape[1] / self.time_total + print('n_mel_frames: ', self.n_mel_frames) + print('mel_sample_rate: ', self.mel_sample_rate) + + # convert to torch + self.mel_spectrogram = torch.FloatTensor(self.mel_spectrogram) + + def getWindow(self, mel_frame_idx, window_size): + # get audio mel sample window + audio_start = mel_frame_idx - (window_size//2) + audio_end = mel_frame_idx + (window_size//2) + if audio_start < 0: + audio_input = self.mel_spectrogram[0:self.n_mels, 0:audio_end] + zeros = torch.zeros((self.n_mels,-audio_start)) + audio_input = torch.cat([zeros, audio_input], 1) + elif audio_end >= self.n_mel_frames: + audio_input = self.mel_spectrogram[:, audio_start:-1] + zeros = torch.zeros((self.n_mels,audio_end-self.n_mel_frames + 1)) + audio_input = torch.cat([audio_input, zeros], 1) + else: + audio_input = self.mel_spectrogram[:, audio_start:audio_end] + + return torch.reshape(audio_input, (1, 1, self.n_mels, window_size)) \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/audio_dataset.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/audio_dataset.py new file mode 100644 index 0000000..3ff5e14 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/audio_dataset.py @@ -0,0 +1,143 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image +import h5py + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.deepspeech.npy']): + id_str = fname[:-15] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + #id_str = fname[l+1:-4] + id_str = fname[l+1:-15] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +class AudioDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + self.h5py_path = os.path.join(opt.dataroot, opt.dataroot.split("/")[-1]+'.h5') + + print('\th5py_path:', self.h5py_path) + self.data = h5py.File(self.h5py_path, 'r') + + opt.nObjects = 1 + opt.nTrainObjects = 116 # TODO + opt.nTestObjects = 1 + opt.test_sequence_names = [[opt.dataroot.split("/")[-1], 'test']] + assert(opt.resize_or_crop == 'resize_and_crop') + + if opt.isTrain: + print('ERROR: audio_dataset only allowed for test') + exit() + + def getSampleWeights(self): + weights = np.ones((len(self.frame_paths))) + return weights + + def getAudioFilename(self): + return os.path.join(self.root, 'audio.wav') + + def getAudioFeatureFilename(self, idx): + return self.frame_paths[idx % len(self.frame_paths)] + + def __getitem__(self, index): + + #print('GET ITEM: ', index) + + # load deepspeech feature + feature_array = self.data["dsf"][index] + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + + feature_array = self.data["dsf"][index_seq] + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.data["dsf"])-1 + if index_seq > max_idx: index_seq = max_idx + + feature_array = self.data["dsf"][index_seq] + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + + feature_array = self.data["dsf"][index_seq] + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + + ################################# + zeroIdentity = torch.zeros(100) + zeroExpressions = torch.zeros(76) + + target_id = -1 + internal_sequence_id = 0 + + weight = 1.0 / len(self.data["dsf"]) + + return {'paths': '', + 'expressions': zeroExpressions, + 'identity': zeroIdentity, + 'intrinsics': np.zeros((4)), + 'extrinsics': np.zeros((4,4)), + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id':target_id, + 'internal_id':internal_sequence_id, + 'weight': np.array([weight]).astype(np.float32)} + + def __len__(self): + return len(self.data["dsf"]) + + def name(self): + return 'AudioDataset' diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/base_data_loader.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/base_data_loader.py new file mode 100644 index 0000000..ae5a168 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/base_data_loader.py @@ -0,0 +1,10 @@ +class BaseDataLoader(): + def __init__(self): + pass + + def initialize(self, opt): + self.opt = opt + pass + + def load_data(): + return None diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/base_dataset.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/base_dataset.py new file mode 100644 index 0000000..25c7c8c --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/base_dataset.py @@ -0,0 +1,105 @@ +import torch.utils.data as data +from PIL import Image +import torchvision.transforms as transforms + + +class BaseDataset(data.Dataset): + def __init__(self): + super(BaseDataset, self).__init__() + + def name(self): + return 'BaseDataset' + + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + pass + + def getSampleWeights(self): + return torch.ones((len(self))) + + def __len__(self): + return 0 + + +def get_transform(opt): + transform_list = [] + if opt.resize_or_crop == 'resize_and_crop': + osize = [opt.loadSize, opt.loadSize] + transform_list.append(transforms.Resize(osize, Image.BICUBIC)) + transform_list.append(transforms.RandomCrop(opt.fineSize)) + elif opt.resize_or_crop == 'crop': + transform_list.append(transforms.RandomCrop(opt.fineSize)) + elif opt.resize_or_crop == 'scale_width': + transform_list.append(transforms.Lambda( + lambda img: __scale_width(img, opt.fineSize))) + elif opt.resize_or_crop == 'scale_width_and_crop': + transform_list.append(transforms.Lambda( + lambda img: __scale_width(img, opt.loadSize))) + transform_list.append(transforms.RandomCrop(opt.fineSize)) + elif opt.resize_or_crop == 'none': + transform_list.append(transforms.Lambda( + lambda img: __adjust(img))) + else: + raise ValueError('--resize_or_crop %s is not a valid option.' % opt.resize_or_crop) + + if opt.isTrain and not opt.no_flip: + transform_list.append(transforms.RandomHorizontalFlip()) + + transform_list += [transforms.ToTensor(), + transforms.Normalize((0.5, 0.5, 0.5), + (0.5, 0.5, 0.5))] + return transforms.Compose(transform_list) + + +# just modify the width and height to be multiple of 4 +def __adjust(img): + ow, oh = img.size + + # the size needs to be a multiple of this number, + # because going through generator network may change img size + # and eventually cause size mismatch error + mult = 4 + if ow % mult == 0 and oh % mult == 0: + return img + w = (ow - 1) // mult + w = (w + 1) * mult + h = (oh - 1) // mult + h = (h + 1) * mult + + if ow != w or oh != h: + __print_size_warning(ow, oh, w, h) + + return img.resize((w, h), Image.BICUBIC) + + +def __scale_width(img, target_width): + ow, oh = img.size + + # the size needs to be a multiple of this number, + # because going through generator network may change img size + # and eventually cause size mismatch error + mult = 4 + assert target_width % mult == 0, "the target width needs to be multiple of %d." % mult + if (ow == target_width and oh % mult == 0): + return img + w = target_width + target_height = int(target_width * oh / ow) + m = (target_height - 1) // mult + h = (m + 1) * mult + + if target_height != h: + __print_size_warning(target_width, target_height, w, h) + + return img.resize((w, h), Image.BICUBIC) + + +def __print_size_warning(ow, oh, w, h): + if not hasattr(__print_size_warning, 'has_printed'): + print("The image size needs to be a multiple of 4. " + "The loaded image size was (%d, %d), so it was adjusted to " + "(%d, %d). This adjustment will be done to all images " + "whose sizes are not multiples of 4" % (ow, oh, w, h)) + __print_size_warning.has_printed = True diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/face_dataset.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/face_dataset.py new file mode 100644 index 0000000..66abe90 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/face_dataset.py @@ -0,0 +1,179 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +from PIL import Image +from util import util +import h5py + + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.npy', '.NPY']): + #.deepspeech.npy + id_str = fname[:-15] #4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_expr_dataset(dir): + expressions = [] + num_expr = len([name for name in os.listdir(dir) if os.path.isfile(name)]) + for i in range(num_expr): + fname = f'expr_{i}.npy' + path = os.path.join(dir, fname) + expressions.append(path) + return expressions + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-15]#4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + + +# def load_expressions(input_dir): +# file = open(input_dir+"/expressions/", "r") +# expressions = [[float(x) for x in line.split()] for line in file] +# file.close() +# return expressions + +class FaceDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + + # directories + self.dataroot = opt.dataroot + self.h5py_path = os.path.join(opt.dataroot, opt.dataroot.split("/")[-1]+'.h5') + + # debug print + print('load sequence:', self.dataroot) + print('\th5py_path:', self.h5py_path) + + self.data = h5py.File(self.h5py_path, 'r') + + # set data + self.n_frames_total = min(len(self.data["dsf"]), len(self.data["ep"])) + + print('\tnum frames:', self.n_frames_total) + + + opt.nTrainObjects = 1 + opt.nValObjects = 1 + opt.nTestObjects = 1 + + opt.test_sequence_names = [[opt.dataroot.split("/")[-1], 'train']] + assert(opt.resize_or_crop == 'resize_and_crop') + + def getSampleWeights(self): + weights = np.ones((self.n_frames_total)) + return weights + + + def getAudioFilename(self): + return os.path.join(self.dataroot, 'audio.wav') + + def getAudioFeatureFilename(self, idx): + #return self.frame_paths[idx % len(self.frame_paths)] + audio_id = self.audio_ids[idx] + return os.path.join(self.audio_feature_dir, str(audio_id) + '.deepspeech.npy') + + + def __getitem__(self, global_index): + # select frame from sequence + index = global_index + + # intrinsics and extrinsics + intrinsics = np.zeros((4)) # not used + extrinsics = np.zeros((4,4))# not used + + # expressions + expressions = self.data["ep"][index] + expressions = torch.tensor(expressions) + + # identity + identity = torch.zeros(100) # not used + + # load deepspeech feature + feature_array = self.data["dsf"][index] + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + + feature_array = self.data["dsf"][index_seq] + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.data["dsf"])-1 + if index_seq > max_idx: index_seq = max_idx + + feature_array = self.data["dsf"][index_seq] + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + last_valid_idx = audio_id + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + + feature_array = self.data["dsf"][index_seq] + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + + ################################# + weight = 1.0 / self.n_frames_total + + return {#'TARGET': TARGET, 'UV': UV, + 'paths': '', #img_path, + 'intrinsics': np.array(intrinsics), + 'extrinsics': np.array(extrinsics), + 'expressions': expressions, + 'identity': identity, + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id': -1, + 'internal_id': 0, + 'weight': np.array([weight]).astype(np.float32)} + + def __len__(self): + return self.n_frames_total + + def name(self): + return 'FaceDataset' diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/multi_face_audio_eq_tmp_cached_dataset.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/multi_face_audio_eq_tmp_cached_dataset.py new file mode 100644 index 0000000..59c4a89 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/data/multi_face_audio_eq_tmp_cached_dataset.py @@ -0,0 +1,458 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image +from tqdm import tqdm + +#def make_dataset(dir): +# images = [] +# assert os.path.isdir(dir), '%s is not a valid directory' % dir +# for root, _, fnames in sorted(os.walk(dir)): +# for fname in fnames: +# if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): +# path = os.path.join(root, fname) +# images.append(path) +# return sorted(images) + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.npy', '.NPY']): + #.deepspeech.npy + id_str = fname[:-15] #4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-15]#4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +def make_dataset_ids_png(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.png', '.png']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + +def load_intrinsics(input_dir): + file = open(input_dir+"/intrinsics.txt", "r") + intrinsics = [[(float(x) for x in line.split())] for line in file] + file.close() + intrinsics = list(intrinsics[0][0]) + return intrinsics + +def load_rigids(input_dir): + file = open(input_dir+"/rigid.txt", "r") + rigid_floats = [[float(x) for x in line.split()] for line in file] # note that it stores 5 lines per matrix (blank line) + file.close() + all_rigids = [ [rigid_floats[4*idx + 0],rigid_floats[4*idx + 1],rigid_floats[4*idx + 2],rigid_floats[4*idx + 3]] for idx in range(0, len(rigid_floats)//4) ] + return all_rigids + +def load_expressions(input_dir): + file = open(input_dir+"/expression.txt", "r") + expressions = [[float(x) for x in line.split()] for line in file] + file.close() + return expressions + +def load_identity(input_dir): + file = open(input_dir+"/identities.txt", "r") + identities = [[float(x) for x in line.split()] for line in file] + file.close() + return identities + + +class MultiFaceAudioEQTmpCachedDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + + # read dataset file that contains the filenames for the train, val and test lists + file = open(self.root+"/dataset.txt", "r") + self.filename_train_list, self.filename_val_list, self.filename_test_list = [str(line) for line in file] + file.close() + if self.filename_train_list[-1] == '\n': self.filename_train_list = self.filename_train_list[:-1] + if self.filename_val_list[-1] == '\n': self.filename_val_list = self.filename_val_list[:-1] + if self.filename_test_list[-1] == '\n': self.filename_test_list = self.filename_test_list[:-1] + + + + # get list of train sequences + file = open(self.root+"/" + self.filename_train_list, "r") + self.train_sequence_names = [str(line) for line in file] + file.close() + for i in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[i][-1] == '\n': + self.train_sequence_names[i] = self.train_sequence_names[i][:-1] + + # get list of val sequences + file = open(self.root+"/" + self.filename_val_list, "r") + self.val_sequence_names = [[str(w) for w in line.split()] for line in file] + file.close() + for i in range(0,len(self.val_sequence_names)): + if self.val_sequence_names[i][0][-1] == '\n': self.val_sequence_names[i][0] = self.val_sequence_names[i][0][:-1] + if self.val_sequence_names[i][1][-1] == '\n': self.val_sequence_names[i][1] = self.val_sequence_names[i][1][:-1] + + # get list of test sequences + file = open(self.root+"/" + self.filename_test_list, "r") + self.test_sequence_names = [[str(w) for w in line.split()] for line in file] + if opt.output_audio_expressions: self.test_sequence_names = self.test_sequence_names[0:1] + file.close() + for i in range(0,len(self.test_sequence_names)): + if self.test_sequence_names[i][0][-1] == '\n': self.test_sequence_names[i][0] = self.test_sequence_names[i][0][:-1] + if self.test_sequence_names[i][1][-1] == '\n': self.test_sequence_names[i][1] = self.test_sequence_names[i][1][:-1] + + # print some stats + print('filename_train_list:', self.filename_train_list) + print('\tnum_seq:', len(self.train_sequence_names)) + print('filename_val_list: ', self.filename_val_list) + print('\tnum_seq:', len(self.val_sequence_names)) + print('filename_test_list: ', self.filename_test_list) + print('\tnum_seq:', len(self.test_sequence_names)) + + opt.train_sequence_names = self.train_sequence_names + opt.val_sequence_names = self.val_sequence_names + opt.test_sequence_names = self.test_sequence_names + + # search mapping from val, test to train sequences that are used as targets + self.val_sequence_targets = [] + for i in range(0,len(self.val_sequence_names)): + target_name = self.val_sequence_names[i][1] + target_id = -1 + for j in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[j] == target_name: + target_id = j + break + if target_id == -1: + print('Target sequence not in train set! ', target_name) + exit() + self.val_sequence_targets.append(target_id) + + self.test_sequence_targets = [] + for i in range(0,len(self.test_sequence_names)): + target_name = self.test_sequence_names[i][1] + target_id = -1 + for j in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[j] == target_name: + target_id = j + break + if target_id == -1: + print('Target sequence not in train set! ', target_name) + exit() + self.test_sequence_targets.append(target_id) + print('test: ', self.test_sequence_names[i]) + print('\t target:', target_id) + + # store len values + opt.nTrainObjects = len(self.train_sequence_names) + opt.nValObjects = len(self.val_sequence_names) + opt.nTestObjects = len(self.test_sequence_names) + + ################################################ + ################################################ + ################################################ + + # prepare dataloader paths / data + self.audio_feature_dir = [] + self.image_dir = [] + self.uvs_dir = [] + self.audio_ids = [] + self.image_ids = [] + self.intrinsics = [] + self.extrinsics = [] + self.expressions = [] + self.identities = [] + self.target_id = [] + self.n_frames_total = 0 + + if opt.phase == 'train': + self.sequence_names = self.train_sequence_names + for i in range(0,len(self.train_sequence_names)): + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[i]) + audio_feature_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'uvs') + print('load train sequence:', self.train_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + + audio_ids = make_ids(make_dataset(audio_feature_dir), dataroot) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(dataroot) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(i) + + self.n_frames_total += min_len + elif opt.phase == 'val': + for i in range(0,len(self.val_sequence_names)): + target_id = self.val_sequence_targets[i] + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[target_id]) + audio_feature_dir = os.path.join(opt.dataroot, self.val_sequence_names[i][0], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'uvs') + print('load val sequence:', self.val_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + audio_ids = make_ids(make_dataset(audio_feature_dir), os.path.join(opt.dataroot, self.val_sequence_names[i][0])) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(os.path.join(opt.dataroot, self.val_sequence_names[i][0])) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(target_id) + + self.n_frames_total += min_len + else: # test + for i in range(0,len(self.test_sequence_names)): + target_id = self.test_sequence_targets[i] + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[target_id]) + audio_feature_dir = os.path.join(opt.dataroot, self.test_sequence_names[i][0], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'uvs') + print('load test sequence:', self.test_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + audio_ids = make_ids(make_dataset(audio_feature_dir), os.path.join(opt.dataroot, self.test_sequence_names[i][0])) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(os.path.join(opt.dataroot, self.test_sequence_names[i][0])) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(target_id) + + self.n_frames_total += min_len + + + print('frames_total:', self.n_frames_total) + + + #global_target_ids = [] + #for i in range(0,len(self.audio_ids)): + # for j in range(0,len(self.audio_ids[i])): + # global_target_ids.append(self.target_id[i]) + #global_target_ids=np.array(global_target_ids) + #self.weights = np.where(global_target_ids==2, 1.0 * np.ones((self.n_frames_total)), 0.01 * np.ones((self.n_frames_total)) ) + self.weights = [] + for i in range(0,len(self.audio_ids)): + l = len(self.audio_ids[i]) + for j in range(0,l): + self.weights.append(1.0 / l) + self.weights = np.array(self.weights) + + assert(opt.resize_or_crop == 'resize_and_crop') + + # mapping global to internal + self.mapping_global2internal = [] + self.mapping_global2internal_offset = [] + self.dsf = [] + offset = 0 + + for i in tqdm(range(0,len(self.audio_ids))): + l = len(self.audio_ids[i]) + dsf_seq = [] + for k in range(0,l): + self.mapping_global2internal.append(i) + self.mapping_global2internal_offset.append(offset) + dsf_fname = os.path.join(self.audio_feature_dir[i], str(self.audio_ids[i][k]) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq.append(dsf_np.astype(np.float32)) + bar.update(offset + k) + self.dsf.append(dsf_seq) + offset += l + + + def getSampleWeights(self): + return self.weights + + def getitem(self, global_index): + + # select sequence + internal_sequence_id = self.mapping_global2internal[global_index] + sum_frames = self.mapping_global2internal_offset[global_index] + + # select frame from sequence + index = (global_index-sum_frames) % len(self.audio_ids[internal_sequence_id]) + + # get data ids + audio_id = self.audio_ids[internal_sequence_id][index] + image_id = self.image_ids[internal_sequence_id][index] + + #print('GET ITEM: ', index) + #img_path = self.frame_paths[sequence_id][index] + + # intrinsics and extrinsics + intrinsics = self.intrinsics[internal_sequence_id] + extrinsics = self.extrinsics[internal_sequence_id][image_id] + + # expressions + expressions = np.asarray(self.expressions[internal_sequence_id][audio_id], dtype=np.float32) + #print('expressions:', expressions.shape) + expressions[32] *= 0.0 # remove eye brow movements + expressions[41] *= 0.0 # remove eye brow movements + expressions[71:75] *= 0.0 # remove eye brow movements + expressions = torch.tensor(expressions) + + # identity + identity = torch.tensor(self.identities[internal_sequence_id][image_id]) + target_id = self.target_id[internal_sequence_id] # sequence id refers to the target sequence (of the training corpus) + + # load deepspeech feature + #print('audio_id', audio_id) + dsf_fname = os.path.join(self.audio_feature_dir[internal_sequence_id], str(audio_id) + '.deepspeech.npy') + + dsf_np = self.dsf[internal_sequence_id][index] + dsf = transforms.ToTensor()(dsf_np) + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + dsf_np = self.dsf[internal_sequence_id][index_seq] + dsf_seq = transforms.ToTensor()(dsf_np) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.audio_ids[internal_sequence_id])-1 + if index_seq > max_idx: index_seq = max_idx + dsf_np = self.dsf[internal_sequence_id][index_seq] + dsf_seq = transforms.ToTensor()(dsf_np) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + dsf_np = self.dsf[internal_sequence_id][index_seq] + dsf_seq = transforms.ToTensor()(dsf_np) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + #weight = 1.0 / len(self.audio_feature_dir[internal_sequence_id]) + weight = self.weights[global_index] + + return {'paths': dsf_fname, #img_path, + 'intrinsics': np.array(intrinsics), + 'extrinsics': np.array(extrinsics), + 'expressions': expressions, + 'identity': identity, + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id':target_id, + 'internal_id':internal_sequence_id, + + 'weight': np.array([weight]).astype(np.float32)} + + + def __getitem__(self, global_index): + # select frame from sequence + index = global_index + current = self.getitem(index) + prv = self.getitem(max(index-1, 0)) + nxt = self.getitem(min(index+1, self.n_frames_total-1)) + + return { + 'paths': current['paths'], #img_path, + 'target_id': current['target_id'], + 'internal_id': current['internal_id'], + 'weight': current['weight'], + 'identity': current['identity'], + 'intrinsics': current['intrinsics'], + + 'extrinsics': current['extrinsics'], + 'expressions': current['expressions'], + 'audio_deepspeech': current['audio_deepspeech'], + + 'extrinsics_prv': prv['extrinsics'], + 'expressions_prv': prv['expressions'], + 'audio_deepspeech_prv': prv['audio_deepspeech'], + + 'extrinsics_nxt': nxt['extrinsics'], + 'expressions_nxt': nxt['expressions'], + 'audio_deepspeech_nxt': nxt['audio_deepspeech'], + } + + + def __len__(self): + return self.n_frames_total #len(self.frame_paths[0]) + + def name(self): + return 'MultiFaceAudioEQTmpCachedDataset' diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/models/__init__.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/models/__init__.py new file mode 100644 index 0000000..4d92091 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/models/__init__.py @@ -0,0 +1,39 @@ +import importlib +from models.base_model import BaseModel + + +def find_model_using_name(model_name): + # Given the option --model [modelname], + # the file "models/modelname_model.py" + # will be imported. + model_filename = "models." + model_name + "_model" + modellib = importlib.import_module(model_filename) + + # In the file, the class called ModelNameModel() will + # be instantiated. It has to be a subclass of BaseModel, + # and it is case-insensitive. + model = None + target_model_name = model_name.replace('_', '') + 'model' + for name, cls in modellib.__dict__.items(): + if name.lower() == target_model_name.lower() \ + and issubclass(cls, BaseModel): + model = cls + + if model is None: + print("In %s.py, there should be a subclass of BaseModel with class name that matches %s in lowercase." % (model_filename, target_model_name)) + exit(0) + + return model + + +def get_option_setter(model_name): + model_class = find_model_using_name(model_name) + return model_class.modify_commandline_options + + +def create_model(opt): + model = find_model_using_name(opt.model) + instance = model() + instance.initialize(opt) + print("model [%s] was created" % (instance.name())) + return instance diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/models/audio2ExpressionsAttentionTMP4_model.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/models/audio2ExpressionsAttentionTMP4_model.py new file mode 100644 index 0000000..8c8b6f2 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/models/audio2ExpressionsAttentionTMP4_model.py @@ -0,0 +1,342 @@ +import torch +import torch.nn as nn +from util.image_pool import ImagePool +from .base_model import BaseModel +from . import networks + +from BaselModel.basel_model import * + +INVALID_UV = -1.0 + + +class ExpressionEstimator_Attention(nn.Module): + def __init__(self, n_output_expressions, nIdentities, seq_len, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(ExpressionEstimator_Attention, self).__init__() + print('Estimator Attention') + ################################# + ######## audio net ########## + ################################# + self.seq_len = seq_len + + dropout_rate = 0.0 + if use_dropout == True: + #dropout_rate = 0.5 + dropout_rate = 0.25 + + self.convNet = nn.Sequential( + nn.Conv2d(29, 32, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 29 x 16 x 1 => 32 x 8 x 1 + nn.LeakyReLU(0.02, True), + nn.Conv2d(32, 32, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 32 x 8 x 1 => 32 x 4 x 1 + nn.LeakyReLU(0.02, True), + nn.Conv2d(32, 64, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 32 x 4 x 1 => 64 x 2 x 1 + nn.LeakyReLU(0.2, True), + nn.Conv2d(64, 64, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 64 x 2 x 1 => 64 x 1 x 1 + nn.LeakyReLU(0.2, True), + ) + + fullNet_input_size = 64 + + self.subspace_dim = 32 # number of audio expressions + print('fullNet_input_size: ', fullNet_input_size) + self.fullNet = nn.Sequential( + nn.Linear(in_features = fullNet_input_size, out_features=128, bias = True), + nn.LeakyReLU(0.02), + + nn.Linear(in_features = 128, out_features=64, bias = True), + nn.LeakyReLU(0.02), + + nn.Linear(in_features = 64, out_features=self.subspace_dim, bias = True), + nn.Tanh() + ) + + + # mapping from subspace to full expression space + self.register_parameter('mapping', torch.nn.Parameter(torch.randn(1, nIdentities, N_EXPRESSIONS, self.subspace_dim, requires_grad=True))) + + # attention + self.attentionConvNet = nn.Sequential( # b x subspace_dim x seq_len + nn.Conv1d(self.subspace_dim, 16, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True), + nn.Conv1d(16, 8, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True), + nn.Conv1d(8, 4, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True), + nn.Conv1d(4, 2, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True), + nn.Conv1d(2, 1, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True) + ) + self.attentionNet = nn.Sequential( + nn.Linear(in_features = self.seq_len, out_features=self.seq_len, bias = True), + nn.Softmax(dim=1) + ) + #self.hidden2subspace = nn.Linear(self.subspace_dim,self.subspace_dim) + + def forward_internal(self, audio_features_sequence, identity_id): + result_subspace, intermediate_expression = self.getAudioExpressions_internal(audio_features_sequence) + mapping = torch.index_select(self.mapping[0], dim = 0, index = identity_id) + result = 10.0 * torch.bmm(mapping, result_subspace)[:,:,0] + result_intermediate = 10.0 * torch.bmm(mapping, intermediate_expression)[:,:,0] + #exit(-1) + return result, result_intermediate + + def forward(self, audio_features_sequence, identity_id): + result_subspace = self.getAudioExpressions(audio_features_sequence) + mapping = torch.index_select(self.mapping[0], dim = 0, index = identity_id) + result = torch.bmm(mapping, result_subspace)[:,:,0] + #exit(-1) + return 10.0 * result + + def getAudioExpressions_internal(self, audio_features_sequence): + # audio_features_sequence: b x seq_len x 16 x 29 + b = audio_features_sequence.shape[0] # batchsize + audio_features_sequence = audio_features_sequence.view(b * self.seq_len, 1, 16, 29) # b * seq_len x 1 x 16 x 29 + audio_features_sequence = torch.transpose(audio_features_sequence, 1, 3) # b* seq_len x 29 x 16 x 1 + conv_res = self.convNet( audio_features_sequence ) + conv_res = torch.reshape( conv_res, (b * self.seq_len, 1, -1)) + result_subspace = self.fullNet(conv_res)[:,0,:] # b * seq_len x subspace_dim + result_subspace = result_subspace.view(b, self.seq_len, self.subspace_dim)# b x seq_len x subspace_dim + + ################# + ### attention ### + ################# + result_subspace_T = torch.transpose(result_subspace, 1, 2) # b x subspace_dim x seq_len + intermediate_expression = result_subspace_T[:,:,(self.seq_len // 2):(self.seq_len // 2) + 1] + att_conv_res = self.attentionConvNet(result_subspace_T) + #print('att_conv_res', att_conv_res.shape) + attention = self.attentionNet(att_conv_res.view(b, self.seq_len)).view(b, self.seq_len, 1) # b x seq_len x 1 + #print('attention', attention.shape) + # pooling along the sequence dimension + result_subspace = torch.bmm(result_subspace_T, attention) + #print('result_subspace', result_subspace.shape) + ### + + return result_subspace.view(b, self.subspace_dim, 1), intermediate_expression + + def getAudioExpressions(self, audio_features_sequence): + expr, _ = self.getAudioExpressions_internal(audio_features_sequence) + return expr + + def regularizer(self): + #reg = torch.norm(self.mapping) + reg_mapping = torch.mean(torch.abs(self.mapping)) + + # one could also enforce orthogonality here + + # s_browExpressions[] = { 32, 41, 71, 72, 73, 74, 75 }; + reg_eye_brow = torch.mean(torch.abs( self.mapping[0,:,[32, 41, 71, 72, 73, 74, 75],:] )) + #return 0.01 * reg_mapping + 1.0 * reg_eye_brow + return 0.0 * reg_mapping + + + +def define_ExpressionEstimator(estimatorType='estimatorDefault', nIdentities=1, seq_len=1, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + print('EstimatorType: ', estimatorType) + if estimatorType=='estimatorAttention': net = ExpressionEstimator_Attention(N_EXPRESSIONS,nIdentities, seq_len) + + return networks.init_net(net, init_type, init_gain, gpu_ids) + + +class Audio2ExpressionsAttentionTMP4Model(BaseModel): + def name(self): + return 'Audio2ExpressionsAttentionTMP4Model' + + @staticmethod + def modify_commandline_options(parser, is_train=True): + + # changing the default values to match the pix2pix paper + # (https://phillipi.github.io/pix2pix/) + #parser.set_defaults(norm='batch', netG='unet_256') + parser.set_defaults(norm='instance', netG='unet_256') + parser.set_defaults(dataset_mode='aligned') + if is_train: + parser.set_defaults(pool_size=0, no_lsgan=True) + parser.add_argument('--lambda_L1', type=float, default=100.0, help='weight for L1 loss') + + return parser + + def initialize(self, opt): + BaseModel.initialize(self, opt) + self.isTrain = opt.isTrain + + self.trainRenderer = not opt.fix_renderer + + # specify the training losses you want to print out. The program will call base_model.get_current_losses + self.loss_names = ['G_L1','G_L1_ABSOLUTE','G_L1_RELATIVE', 'G_Regularizer'] + + # specify the images you want to save/display. The program will call base_model.get_current_visuals + #self.visual_names = ['input_uv', 'fake', 'target'] + self.visual_names = ['zeros'] + self.zeros = torch.zeros(1,3,2,2) + + # specify the models you want to save to the disk. The program will call base_model.save_networks and base_model.load_networks + if self.isTrain: + self.model_names = ['netG'] + else: # during test time, only load Gs + self.model_names = ['netG'] + + self.fake_expressions = None + self.fake_expressions_prv = None + self.fake_expressions_nxt = None + + if self.isTrain: + self.morphable_model = MorphableModel() + self.mask = self.morphable_model.LoadMask() + + nIdentities=opt.nTrainObjects + + # load/define networks + self.netG = define_ExpressionEstimator(estimatorType=opt.rendererType, nIdentities=nIdentities, seq_len=opt.seq_len, gpu_ids=self.gpu_ids) + + if self.isTrain: + use_sigmoid = opt.no_lsgan + self.fake_AB_pool = ImagePool(opt.pool_size) + + # define loss functions + self.criterionL1 = torch.nn.L1Loss() + self.criterionL1Smooth = torch.nn.SmoothL1Loss() + self.criterionL2 = torch.nn.MSELoss() + + # initialize optimizers + self.optimizers = [] + #self.optimizer_G = torch.optim.Adam(self.netG.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999), weight_decay=0.1 )#10.0) + self.optimizer_G = torch.optim.Adam(self.netG.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999), weight_decay=0.0 )#10.0) + self.optimizers.append(self.optimizer_G) + + + def set_input(self, input): + #self.image_paths = input['paths'] + + self.expressions = input['expressions'].cuda() + self.audio_features = input['audio_deepspeech'].cuda() # b x seq_len x 16 x 29 + + if self.isTrain: + self.expressions_prv = input['expressions_prv'].cuda() + self.audio_features_prv = input['audio_deepspeech_prv'].cuda() # b x seq_len x 16 x 29 + + self.expressions_nxt = input['expressions_nxt'].cuda() + self.audio_features_nxt = input['audio_deepspeech_nxt'].cuda() # b x seq_len x 16 x 29 + + self.target_id = input['target_id'].cuda() + + + def forward(self): + # estimate expressions + if self.opt.output_audio_expressions: #self.opt.dataset_mode=='audio': + self.fake_expressions = self.netG.getAudioExpressions(self.audio_features) + if self.isTrain: + self.fake_expressions_prv = self.netG.getAudioExpressions(self.audio_features_prv) + self.fake_expressions_nxt = self.netG.getAudioExpressions(self.audio_features_nxt) + else: + self.fake_expressions, self.fake_expressions_intermediate = self.netG.forward_internal(self.audio_features, self.target_id) + if self.isTrain: + self.fake_expressions_prv = self.netG(self.audio_features_prv, self.target_id) + self.fake_expressions_nxt = self.netG(self.audio_features_nxt, self.target_id) + + + + def backward_G(self, epoch): + + # Second, G(A) = B + #self.loss_G_L1 = self.criterionL1(self.fake_expressions, self.expressions) + + # difference in vertex space + mask = torch.cat([self.mask[:,None],self.mask[:,None],self.mask[:,None]], 1) + mask = mask + 0.1 * torch.ones_like(mask) # priority for the mask region, but other region should also be constrained + + # absolute (single timesteps) + diff_expression = self.fake_expressions - self.expressions + diff_vertices = self.morphable_model.compute_expression_delta(diff_expression) + + + diff_expression_intermediate = self.fake_expressions_intermediate - self.expressions + diff_vertices_intermediate = self.morphable_model.compute_expression_delta(diff_expression_intermediate) + + + diff_expression_prv = self.fake_expressions_prv - self.expressions_prv + diff_vertices_prv = self.morphable_model.compute_expression_delta(diff_expression_prv) + + diff_expression_nxt = self.fake_expressions_nxt - self.expressions_nxt + diff_vertices_nxt = self.morphable_model.compute_expression_delta(diff_expression_nxt) + + # relative (temporal 1 timestep) cur - nxt and prv - cur + diff_expression_tmp_cur_nxt = (self.fake_expressions - self.fake_expressions_nxt) - (self.expressions - self.expressions_nxt) + diff_vertices_tmp_cur_nxt = self.morphable_model.compute_expression_delta(diff_expression_tmp_cur_nxt) + diff_expression_tmp_prv_cur = (self.fake_expressions_prv - self.fake_expressions) - (self.expressions_prv - self.expressions) + diff_vertices_tmp_prv_cur = self.morphable_model.compute_expression_delta(diff_expression_tmp_prv_cur) + + # relative (temporal 2 timesteps) nxt - prv + diff_expression_tmp_nxt_prv = (self.fake_expressions_nxt - self.fake_expressions_prv) - (self.expressions_nxt - self.expressions_prv) + diff_vertices_tmp_nxt_prv = self.morphable_model.compute_expression_delta(diff_expression_tmp_nxt_prv) + + #print('mask: ', mask.shape) + #print('diff_vertices: ', diff_vertices.shape) + + self.loss_G_L1_ABSOLUTE = 0.0 + self.loss_G_L1_RELATIVE = 0.0 + if self.opt.lossType == 'L1': + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices_prv)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices_nxt)) + + self.loss_G_L1_ABSOLUTE += 3000.0 * torch.mean(mask * torch.abs(diff_vertices_intermediate)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_cur_nxt)) + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_prv_cur)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_nxt_prv)) + + elif self.opt.lossType == 'L2': + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * diff_vertices * diff_vertices) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * diff_vertices_prv * diff_vertices_prv) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * diff_vertices_nxt * diff_vertices_nxt) + + self.loss_G_L1_ABSOLUTE += 3000.0 * torch.mean(mask * diff_vertices_intermediate * diff_vertices_intermediate) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * diff_vertices_tmp_cur_nxt * diff_vertices_tmp_cur_nxt) + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * diff_vertices_tmp_prv_cur * diff_vertices_tmp_prv_cur) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * diff_vertices_tmp_nxt_prv * diff_vertices_tmp_nxt_prv) + + elif self.opt.lossType == 'RMS': + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.sqrt(torch.mean(mask * diff_vertices * diff_vertices)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.sqrt(torch.mean(mask * diff_vertices_prv * diff_vertices_prv)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.sqrt(torch.mean(mask * diff_vertices_nxt * diff_vertices_nxt)) + + self.loss_G_L1_ABSOLUTE += 3000.0 * torch.sqrt(torch.mean(mask * diff_vertices_intermediate * diff_vertices_intermediate)) + + + self.loss_G_L1_RELATIVE += 20000.0 * torch.sqrt(torch.mean(mask * diff_vertices_tmp_cur_nxt * diff_vertices_tmp_cur_nxt)) + self.loss_G_L1_RELATIVE += 20000.0 * torch.sqrt(torch.mean(mask * diff_vertices_tmp_prv_cur * diff_vertices_tmp_prv_cur)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.sqrt(torch.mean(mask * diff_vertices_tmp_nxt_prv * diff_vertices_tmp_nxt_prv)) + + else: # L1 + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices_prv)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices_nxt)) + + self.loss_G_L1_ABSOLUTE += 3000.0 * torch.mean(mask * torch.abs(diff_vertices_intermediate)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_cur_nxt)) + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_prv_cur)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_nxt_prv)) + + self.loss_G_L1 = self.loss_G_L1_ABSOLUTE + self.loss_G_L1_RELATIVE + self.loss_G_Regularizer = self.netG.regularizer() + + self.loss_G = self.loss_G_L1 + self.loss_G_Regularizer #self.loss_G_GAN + self.loss_G_L1 + self.regularizerTex + + + self.loss_G.backward() + + def optimize_parameters(self, epoch_iter): + self.forward() + + # update Generator + self.optimizer_G.zero_grad() + self.backward_G(epoch_iter) + self.optimizer_G.step() + diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/models/base_model.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/models/base_model.py new file mode 100644 index 0000000..04489f6 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/models/base_model.py @@ -0,0 +1,222 @@ +import os +import torch +from collections import OrderedDict +from . import networks +import numpy as np +from PIL import Image + +def save_tensor_image(input_image, image_path): + if isinstance(input_image, torch.Tensor): + image_tensor = input_image.data + image_numpy = image_tensor[0].cpu().float().numpy() + if image_numpy.shape[0] == 1: + image_numpy = np.tile(image_numpy, (3, 1, 1)) + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 + else: + image_numpy = input_image + image_numpy = image_numpy.astype(np.uint8) + image_pil = Image.fromarray(image_numpy) + image_pil.save(image_path) + +class BaseModel(): + + # modify parser to add command line options, + # and also change the default values if needed + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def name(self): + return 'BaseModel' + + def initialize(self, opt): + self.opt = opt + self.gpu_ids = opt.gpu_ids + self.isTrain = opt.isTrain + self.device = torch.device('cuda:{}'.format(self.gpu_ids[0])) if self.gpu_ids else torch.device('cpu') + self.load_dir = os.path.join(opt.checkpoints_dir, opt.name) + self.save_dir = os.path.join(opt.checkpoints_dir, opt.name) + if opt.resize_or_crop != 'scale_width': + torch.backends.cudnn.benchmark = True + self.loss_names = [] + self.model_names = [] + self.visual_names = [] + self.image_paths = [] + + def set_input(self, input): + pass + + def forward(self): + pass + + # load and print networks; create schedulers + def setup(self, opt, parser=None): + if self.isTrain: + self.schedulers = [networks.get_scheduler(optimizer, opt) for optimizer in self.optimizers] + if not self.isTrain or opt.continue_train: + load_suffix = 'iter_%d' % opt.load_iter if opt.load_iter > 0 else opt.epoch + self.load_networks(load_suffix) + self.print_networks(opt.verbose) + + + + # load specific moudles + def loadModules(self, opt, model_name, module_names): + for name in module_names: + if isinstance(name, str): + load_dir = os.path.join(opt.checkpoints_dir, model_name) + load_filename = 'latest_%s.pth' % (name) + load_path = os.path.join(load_dir, load_filename) + net = getattr(self, name) + if isinstance(net, torch.Tensor): + print('loading the tensor from %s' % load_path) + net_loaded = torch.load(load_path, map_location=str(self.device)) + net.copy_(net_loaded) + else: + # if isinstance(net, torch.nn.DataParallel): + # net = net.module + print('loading the module from %s' % load_path) + # if you are using PyTorch newer than 0.4 (e.g., built from + # GitHub source), you can remove str() on self.device + state_dict = torch.load(load_path, map_location=str(self.device)) + if hasattr(state_dict, '_metadata'): + del state_dict._metadata + + # patch InstanceNorm checkpoints prior to 0.4 + for key in list(state_dict.keys()): # need to copy keys here because we mutate in loop + self.__patch_instance_norm_state_dict(state_dict, net, key.split('.')) + net.load_state_dict(state_dict) + + + + + # make models eval mode during test time + def eval(self): + for name in self.model_names: + if isinstance(name, str): + net = getattr(self, name) + net.eval() + + # used in test time, wrapping `forward` in no_grad() so we don't save + # intermediate steps for backprop + def test(self): + with torch.no_grad(): + self.forward() + + # get image paths + def get_image_paths(self): + return self.image_paths + + def optimize_parameters(self): + pass + + # update learning rate (called once every epoch) + def update_learning_rate(self): + for scheduler in self.schedulers: + scheduler.step() + lr = self.optimizers[0].param_groups[0]['lr'] + print('learning rate = %.7f' % lr) + + # return visualization images. train.py will display these images, and save the images to a html + def get_current_visuals(self): + visual_ret = OrderedDict() + for name in self.visual_names: + if isinstance(name, str): + visual_ret[name] = getattr(self, name) + return visual_ret + + # return traning losses/errors. train.py will print out these errors as debugging information + def get_current_losses(self): + errors_ret = OrderedDict() + for name in self.loss_names: + if isinstance(name, str): + # float(...) works for both scalar tensor and float number + errors_ret[name] = float(getattr(self, 'loss_' + name)) + return errors_ret + + # save models to the disk + def save_networks(self, epoch): + for name in self.model_names: + if isinstance(name, str): + save_filename = '%s_%s.pth' % (epoch, name) + save_path = os.path.join(self.save_dir, save_filename) + net = getattr(self, name) + + if isinstance(net, torch.Tensor): + #torch.save(net.state_dict(), save_path) + torch.save(net, save_path) + for i in range(0, list(net.size())[0]): + save_tensor_image(net[i:i+1,0:3,:,:], save_path+str(i)+'.png') + else: + if len(self.gpu_ids) > 0 and torch.cuda.is_available(): + #torch.save(net.module.cpu().state_dict(), save_path) # << original + torch.save(net.cpu().state_dict(), save_path) + net.cuda(self.gpu_ids[0]) + else: + torch.save(net.cpu().state_dict(), save_path) + + def __patch_instance_norm_state_dict(self, state_dict, module, keys, i=0): + key = keys[i] + if i + 1 == len(keys): # at the end, pointing to a parameter/buffer + if module.__class__.__name__.startswith('InstanceNorm') and \ + (key == 'running_mean' or key == 'running_var'): + if getattr(module, key) is None: + state_dict.pop('.'.join(keys)) + if module.__class__.__name__.startswith('InstanceNorm') and \ + (key == 'num_batches_tracked'): + state_dict.pop('.'.join(keys)) + else: + self.__patch_instance_norm_state_dict(state_dict, getattr(module, key), keys, i + 1) + + # load models from the disk + def load_networks(self, epoch): + for name in self.model_names: + if isinstance(name, str): + load_filename = '%s_%s.pth' % (epoch, name) + load_path = os.path.join(self.load_dir, load_filename) + net = getattr(self, name) + if isinstance(net, torch.Tensor): + print('loading the tensor from %s' % load_path) + net_loaded = torch.load(load_path, map_location=str(self.device)) + net.copy_(net_loaded) + else: + # if isinstance(net, torch.nn.DataParallel): + # net = net.module + print('loading the module from %s' % load_path) + # if you are using PyTorch newer than 0.4 (e.g., built from + # GitHub source), you can remove str() on self.device + state_dict = torch.load(load_path, map_location=str(self.device)) + if hasattr(state_dict, '_metadata'): + del state_dict._metadata + + # patch InstanceNorm checkpoints prior to 0.4 + for key in list(state_dict.keys()): # need to copy keys here because we mutate in loop + self.__patch_instance_norm_state_dict(state_dict, net, key.split('.')) + net.load_state_dict(state_dict) + + # print network information + def print_networks(self, verbose): + print('---------- Networks initialized -------------') + for name in self.model_names: + if isinstance(name, str): + net = getattr(self, name) + if isinstance(net, torch.Tensor): + num_params = net.numel() + print('[Tensor %s] Total number of parameters : %.3f M' % (name, num_params / 1e6)) + else: + num_params = 0 + for param in net.parameters(): + num_params += param.numel() + if verbose: + print(net) + print('[Network %s] Total number of parameters : %.3f M' % (name, num_params / 1e6)) + print('-----------------------------------------------') + + # set requies_grad=False to avoid computation + def set_requires_grad(self, nets, requires_grad=False): + if not isinstance(nets, list): + nets = [nets] + for net in nets: + if net is not None: + for param in net.parameters(): + param.requires_grad = requires_grad diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/models/networks.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/models/networks.py new file mode 100644 index 0000000..a8c6505 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/models/networks.py @@ -0,0 +1,383 @@ +import torch +import torch.nn as nn +from torch.nn import init +import functools +from torch.optim import lr_scheduler + +############################################################################### +# Helper Functions +############################################################################### + + +def get_norm_layer(norm_type='instance'): + if norm_type == 'batch': + norm_layer = functools.partial(nn.BatchNorm2d, affine=True) + elif norm_type == 'instance': + norm_layer = functools.partial(nn.InstanceNorm2d, affine=False, track_running_stats=False) + elif norm_type == 'none': + norm_layer = None + else: + raise NotImplementedError('normalization layer [%s] is not found' % norm_type) + return norm_layer + + +def get_scheduler(optimizer, opt): + if opt.lr_policy == 'lambda': + def lambda_rule(epoch): + lr_l = 1.0 - max(0, epoch + opt.epoch_count - opt.niter) / float(opt.niter_decay + 1) + return lr_l + scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule) + elif opt.lr_policy == 'step': + scheduler = lr_scheduler.StepLR(optimizer, step_size=opt.lr_decay_iters, gamma=0.1) + elif opt.lr_policy == 'plateau': + scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.2, threshold=0.01, patience=5) + elif opt.lr_policy == 'cosine': + scheduler = lr_scheduler.CosineAnnealingLR(optimizer, T_max=opt.niter, eta_min=0) + else: + return NotImplementedError('learning rate policy [%s] is not implemented', opt.lr_policy) + return scheduler + + +def init_weights(net, init_type='normal', gain=0.02): + def init_func(m): + classname = m.__class__.__name__ + if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1): + if init_type == 'normal': + init.normal_(m.weight.data, 0.0, gain) + elif init_type == 'xavier': + init.xavier_normal_(m.weight.data, gain=gain) + elif init_type == 'kaiming': + init.kaiming_normal_(m.weight.data, a=0, mode='fan_in') + elif init_type == 'orthogonal': + init.orthogonal_(m.weight.data, gain=gain) + else: + raise NotImplementedError('initialization method [%s] is not implemented' % init_type) + if hasattr(m, 'bias') and m.bias is not None: + init.constant_(m.bias.data, 0.0) + elif classname.find('BatchNorm2d') != -1: + init.normal_(m.weight.data, 1.0, gain) + init.constant_(m.bias.data, 0.0) + + print('initialize network with %s' % init_type) + net.apply(init_func) + + +def init_net(net, init_type='normal', init_gain=0.02, gpu_ids=[]): + if len(gpu_ids) > 0: + assert(torch.cuda.is_available()) + net.to(gpu_ids[0]) + #net = torch.nn.DataParallel(net, gpu_ids) + init_weights(net, init_type, gain=init_gain) + return net + + +def define_G(input_nc, output_nc, ngf, netG, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + norm_layer = get_norm_layer(norm_type=norm) + + if netG == 'resnet_9blocks': + net = ResnetGenerator(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=9) + elif netG == 'resnet_6blocks': + net = ResnetGenerator(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=6) + elif netG == 'unet_128': + net = UnetGenerator(input_nc, output_nc, 7, ngf, norm_layer=norm_layer, use_dropout=use_dropout) + elif netG == 'unet_256': + net = UnetGenerator(input_nc, output_nc, 8, ngf, norm_layer=norm_layer, use_dropout=use_dropout) + else: + raise NotImplementedError('Generator model name [%s] is not recognized' % netG) + return init_net(net, init_type, init_gain, gpu_ids) + + +def define_D(input_nc, ndf, netD, + n_layers_D=3, norm='batch', use_sigmoid=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + norm_layer = get_norm_layer(norm_type=norm) + + if netD == 'basic': + net = NLayerDiscriminator(input_nc, ndf, n_layers=3, norm_layer=norm_layer, use_sigmoid=use_sigmoid) + elif netD == 'n_layers': + net = NLayerDiscriminator(input_nc, ndf, n_layers_D, norm_layer=norm_layer, use_sigmoid=use_sigmoid) + elif netD == 'pixel': + net = PixelDiscriminator(input_nc, ndf, norm_layer=norm_layer, use_sigmoid=use_sigmoid) + else: + raise NotImplementedError('Discriminator model name [%s] is not recognized' % net) + return init_net(net, init_type, init_gain, gpu_ids) + + +############################################################################## +# Classes +############################################################################## + + +# Defines the GAN loss which uses either LSGAN or the regular GAN. +# When LSGAN is used, it is basically same as MSELoss, +# but it abstracts away the need to create the target label tensor +# that has the same size as the input +class GANLoss(nn.Module): + def __init__(self, use_lsgan=True, target_real_label=1.0, target_fake_label=0.0): + super(GANLoss, self).__init__() + self.register_buffer('real_label', torch.tensor(target_real_label)) + self.register_buffer('fake_label', torch.tensor(target_fake_label)) + if use_lsgan: + self.loss = nn.MSELoss() + else: + self.loss = nn.BCELoss() + + def get_target_tensor(self, input, target_is_real): + if target_is_real: + target_tensor = self.real_label + else: + target_tensor = self.fake_label + return target_tensor.expand_as(input) + + def __call__(self, input, target_is_real): + target_tensor = self.get_target_tensor(input, target_is_real) + return self.loss(input, target_tensor) + + +# Defines the generator that consists of Resnet blocks between a few +# downsampling/upsampling operations. +# Code and idea originally from Justin Johnson's architecture. +# https://github.com/jcjohnson/fast-neural-style/ +class ResnetGenerator(nn.Module): + def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect'): + assert(n_blocks >= 0) + super(ResnetGenerator, self).__init__() + self.input_nc = input_nc + self.output_nc = output_nc + self.ngf = ngf + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + + model = [nn.ReflectionPad2d(3), + nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0, + bias=use_bias), + norm_layer(ngf), + nn.ReLU(True)] + + n_downsampling = 2 + for i in range(n_downsampling): + mult = 2**i + model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, + stride=2, padding=1, bias=use_bias), + norm_layer(ngf * mult * 2), + nn.ReLU(True)] + + mult = 2**n_downsampling + for i in range(n_blocks): + model += [ResnetBlock(ngf * mult, padding_type=padding_type, norm_layer=norm_layer, use_dropout=use_dropout, use_bias=use_bias)] + + for i in range(n_downsampling): + mult = 2**(n_downsampling - i) + model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2), + kernel_size=3, stride=2, + padding=1, output_padding=1, + bias=use_bias), + norm_layer(int(ngf * mult / 2)), + nn.ReLU(True)] + model += [nn.ReflectionPad2d(3)] + model += [nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)] + model += [nn.Tanh()] + + self.model = nn.Sequential(*model) + + def forward(self, input): + return self.model(input) + + +# Define a resnet block +class ResnetBlock(nn.Module): + def __init__(self, dim, padding_type, norm_layer, use_dropout, use_bias): + super(ResnetBlock, self).__init__() + self.conv_block = self.build_conv_block(dim, padding_type, norm_layer, use_dropout, use_bias) + + def build_conv_block(self, dim, padding_type, norm_layer, use_dropout, use_bias): + conv_block = [] + p = 0 + if padding_type == 'reflect': + conv_block += [nn.ReflectionPad2d(1)] + elif padding_type == 'replicate': + conv_block += [nn.ReplicationPad2d(1)] + elif padding_type == 'zero': + p = 1 + else: + raise NotImplementedError('padding [%s] is not implemented' % padding_type) + + conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), + norm_layer(dim), + nn.ReLU(True)] + if use_dropout: + conv_block += [nn.Dropout(0.5)] + + p = 0 + if padding_type == 'reflect': + conv_block += [nn.ReflectionPad2d(1)] + elif padding_type == 'replicate': + conv_block += [nn.ReplicationPad2d(1)] + elif padding_type == 'zero': + p = 1 + else: + raise NotImplementedError('padding [%s] is not implemented' % padding_type) + conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), + norm_layer(dim)] + + return nn.Sequential(*conv_block) + + def forward(self, x): + out = x + self.conv_block(x) + return out + + +# Defines the Unet generator. +# |num_downs|: number of downsamplings in UNet. For example, +# if |num_downs| == 7, image of size 128x128 will become of size 1x1 +# at the bottleneck +class UnetGenerator(nn.Module): + def __init__(self, input_nc, output_nc, num_downs, ngf=64, + norm_layer=nn.BatchNorm2d, use_dropout=False): + super(UnetGenerator, self).__init__() + + # construct unet structure + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) + for i in range(num_downs - 5): + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout) + unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) + + self.model = unet_block + + def forward(self, input): + return self.model(input) + + +# Defines the submodule with skip connection. +# X -------------------identity---------------------- X +# |-- downsampling -- |submodule| -- upsampling --| +class UnetSkipConnectionBlock(nn.Module): + def __init__(self, outer_nc, inner_nc, input_nc=None, + submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(UnetSkipConnectionBlock, self).__init__() + self.outermost = outermost + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + if input_nc is None: + input_nc = outer_nc + downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4, + stride=2, padding=1, bias=use_bias) + downrelu = nn.LeakyReLU(0.2, True) + downnorm = norm_layer(inner_nc) + uprelu = nn.ReLU(True) + upnorm = norm_layer(outer_nc) + + if outermost: + upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, + kernel_size=4, stride=2, + padding=1) + down = [downconv] + up = [uprelu, upconv, nn.Tanh()] + model = down + [submodule] + up + elif innermost: + upconv = nn.ConvTranspose2d(inner_nc, outer_nc, + kernel_size=4, stride=2, + padding=1, bias=use_bias) + down = [downrelu, downconv] + up = [uprelu, upconv, upnorm] + model = down + up + else: + upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, + kernel_size=4, stride=2, + padding=1, bias=use_bias) + down = [downrelu, downconv, downnorm] + up = [uprelu, upconv, upnorm] + + if use_dropout: + model = down + [submodule] + up + [nn.Dropout(0.5)] + else: + model = down + [submodule] + up + + self.model = nn.Sequential(*model) + + def forward(self, x): + if self.outermost: + return self.model(x) + else: + return torch.cat([x, self.model(x)], 1) + + +# Defines the PatchGAN discriminator with the specified arguments. +class NLayerDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d, use_sigmoid=False): + super(NLayerDiscriminator, self).__init__() + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + + kw = 4 + padw = 1 + sequence = [ + nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), + nn.LeakyReLU(0.2, True) + ] + + nf_mult = 1 + nf_mult_prev = 1 + for n in range(1, n_layers): + nf_mult_prev = nf_mult + nf_mult = min(2**n, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, + kernel_size=kw, stride=2, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + nf_mult_prev = nf_mult + nf_mult = min(2**n_layers, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, + kernel_size=kw, stride=1, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + sequence += [nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)] + + if use_sigmoid: + sequence += [nn.Sigmoid()] + + self.model = nn.Sequential(*sequence) + + def forward(self, input): + return self.model(input) + + +class PixelDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, norm_layer=nn.BatchNorm2d, use_sigmoid=False): + super(PixelDiscriminator, self).__init__() + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + + self.net = [ + nn.Conv2d(input_nc, ndf, kernel_size=1, stride=1, padding=0), + nn.LeakyReLU(0.2, True), + nn.Conv2d(ndf, ndf * 2, kernel_size=1, stride=1, padding=0, bias=use_bias), + norm_layer(ndf * 2), + nn.LeakyReLU(0.2, True), + nn.Conv2d(ndf * 2, 1, kernel_size=1, stride=1, padding=0, bias=use_bias)] + + if use_sigmoid: + self.net.append(nn.Sigmoid()) + + self.net = nn.Sequential(*self.net) + + def forward(self, input): + return self.net(input) diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/__init__.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/base_options.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/base_options.py new file mode 100644 index 0000000..b85c6ce --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/base_options.py @@ -0,0 +1,138 @@ +import argparse +import os +from util import util +import torch +import models +import data + + +class BaseOptions(): + def __init__(self): + self.initialized = False + + def initialize(self, parser): + parser.add_argument('--dataroot', required=True, help='path to images (should have subfolders trainA, trainB, valA, valB, etc)') + parser.add_argument('--batch_size', type=int, default=1, help='input batch size') + parser.add_argument('--seq_len', type=int, default=1, help='sequence length (if applicable)') + parser.add_argument('--fineSize', type=int, default=512, help='then crop to this size') + parser.add_argument('--display_winsize', type=int, default=256, help='display window size for both visdom and HTML') + parser.add_argument('--input_nc', type=int, default=3, help='# of input image channels') + parser.add_argument('--output_nc', type=int, default=3, help='# of output image channels') + parser.add_argument('--ngf', type=int, default=64, help='# of gen filters in first conv layer') + parser.add_argument('--ndf', type=int, default=64, help='# of discrim filters in first conv layer') + parser.add_argument('--netD', type=str, default='basic', help='selects model to use for netD') + parser.add_argument('--netG', type=str, default='resnet_9blocks', help='selects model to use for netG') + parser.add_argument('--n_layers_D', type=int, default=3, help='only used if netD==n_layers') + parser.add_argument('--gpu_ids', type=str, default='0', help='gpu ids: e.g. 0 0,1,2, 0,2. use -1 for CPU') + parser.add_argument('--name', type=str, default='experiment_name', help='name of the experiment. It decides where to store samples and models') + parser.add_argument('--renderer', type=str, default='no_renderer', help='name of the renderer to load the models from') + parser.add_argument('--fix_renderer', action='store_true', help='renderer is fixed') + parser.add_argument('--dataset_mode', type=str, default='aligned', help='chooses how datasets are loaded. [aligned | multi]') + parser.add_argument('--model', type=str, default='cycle_gan', help='chooses which model to use. cycle_gan, pix2pix, test') + parser.add_argument('--direction', type=str, default='AtoB', help='AtoB or BtoA') + parser.add_argument('--epoch', type=str, default='latest', help='which epoch to load? set to latest to use latest cached model') + parser.add_argument('--load_iter', type=int, default='0', help='which iteration to load? if load_iter > 0, the code will load models by iter_[load_iter]; otherwise, the code will load models by [epoch]') + parser.add_argument('--num_threads', default=4, type=int, help='# threads for loading data') + parser.add_argument('--checkpoints_dir', type=str, default='./checkpoints', help='models are saved here') + parser.add_argument('--norm', type=str, default='instance', help='instance normalization or batch normalization') + parser.add_argument('--serial_batches', action='store_true', help='if true, takes images in order to make batches, otherwise takes them randomly') + parser.add_argument('--no_dropout', action='store_true', help='no dropout for the generator') + parser.add_argument('--max_dataset_size', type=int, default=float("inf"), help='Maximum number of samples allowed per dataset. If the dataset directory contains more than max_dataset_size, only a subset is loaded.') + parser.add_argument('--resize_or_crop', type=str, default='resize_and_crop', help='scaling and cropping of images at load time [resize_and_crop|crop|scale_width|scale_width_and_crop|none]') + parser.add_argument('--no_augmentation', action='store_true', help='if specified, no data augmentation') + #parser.add_argument('--init_type', type=str, default='normal', help='network initialization [normal|xavier|kaiming|orthogonal]') + parser.add_argument('--init_type', type=str, default='xavier', help='network initialization [normal|xavier|kaiming|orthogonal]') + parser.add_argument('--init_gain', type=float, default=0.02, help='scaling factor for normal, xavier and orthogonal.') + parser.add_argument('--verbose', action='store_true', help='if specified, print more debugging information') + parser.add_argument('--suffix', default='', type=str, help='customized suffix: opt.name = opt.name + suffix: e.g., {model}_{netG}_size{loadSize}') + parser.add_argument('--tex_dim', type=int, default=256, help='neural texture dimensions') + parser.add_argument('--tex_features_intermediate', type=int, default=16, help='# intermediate neural texture features when using dynamic textures') + parser.add_argument('--tex_features', type=int, default=16, help='# neural texture features') + parser.add_argument('--textureModel', type=str, default='DynamicNeuralTextureAudio', help='texture model') + parser.add_argument('--rendererType', type=str, default='UNET_5_level', help='neural renderer network') + parser.add_argument('--lossType', type=str, default='L1', help='loss type for the final output') + + parser.add_argument('--hierarchicalTex', action='store_true', help='if specified, hierachical neural textures are used') + + parser.add_argument('--output_audio_expressions', action='store_true', help='if specified, no sh layers are used') + + parser.add_argument('--erosionFactor', type=float, default=1.0, help='scaling factor for erosion of the background.') + + parser.add_argument('--audio_window_size', type=float, default=16, help='audio window size = #mel feature bins') + + parser.add_argument('--look_ahead', action='store_true', help='cache images in numpy format') + + parser.add_argument('--cached_images', action='store_true', help='cache images in numpy format') + + self.initialized = True + return parser + + def gather_options(self): + # initialize parser with basic options + if not self.initialized: + parser = argparse.ArgumentParser( + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser = self.initialize(parser) + + # get the basic options + opt, _ = parser.parse_known_args() + + # modify model-related parser options + model_name = opt.model + model_option_setter = models.get_option_setter(model_name) + parser = model_option_setter(parser, self.isTrain) + opt, _ = parser.parse_known_args() # parse again with the new defaults + + # modify dataset-related parser options + dataset_name = opt.dataset_mode + dataset_option_setter = data.get_option_setter(dataset_name) + parser = dataset_option_setter(parser, self.isTrain) + + self.parser = parser + + return parser.parse_args() + + def print_options(self, opt): + message = '' + message += '----------------- Options ---------------\n' + for k, v in sorted(vars(opt).items()): + comment = '' + default = self.parser.get_default(k) + if v != default: + comment = '\t[default: %s]' % str(default) + message += '{:>25}: {:<30}{}\n'.format(str(k), str(v), comment) + message += '----------------- End -------------------' + print(message) + + # save to the disk + expr_dir = os.path.join(opt.checkpoints_dir, opt.name) + util.mkdirs(expr_dir) + file_name = os.path.join(expr_dir, 'opt.txt') + with open(file_name, 'wt') as opt_file: + opt_file.write(message) + opt_file.write('\n') + + def parse(self): + + opt = self.gather_options() + opt.isTrain = self.isTrain # train or test + + # process opt.suffix + if opt.suffix: + suffix = ('_' + opt.suffix.format(**vars(opt))) if opt.suffix != '' else '' + opt.name = opt.name + suffix + + self.print_options(opt) + + # set gpu ids + str_ids = opt.gpu_ids.split(',') + opt.gpu_ids = [] + for str_id in str_ids: + id = int(str_id) + if id >= 0: + opt.gpu_ids.append(id) + if len(opt.gpu_ids) > 0: + torch.cuda.set_device(opt.gpu_ids[0]) + + self.opt = opt + return self.opt diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/test_options.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/test_options.py new file mode 100644 index 0000000..d21da35 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/test_options.py @@ -0,0 +1,27 @@ +from .base_options import BaseOptions + + +class TestOptions(BaseOptions): + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) + parser.add_argument('--ntest', type=int, default=float("inf"), help='# of test examples.') + parser.add_argument('--results_dir', type=str, default='./results/', help='saves results here.') + parser.add_argument('--aspect_ratio', type=float, default=1.0, help='aspect ratio of result images') + parser.add_argument('--phase', type=str, default='test', help='train, val, test, etc') + # Dropout and Batchnorm has different behavioir during training and test. + parser.add_argument('--eval', action='store_true', help='use eval mode during test time.') + parser.add_argument('--num_test', type=int, default=50, help='how many test images to run') + + parser.add_argument('--write_no_images', action='store_true', help='compute validation') + + parser.add_argument('--write_video', action='store_true', help='write video') + parser.add_argument('--video_fps', type=float, default=25.0, help='video fps') + + + parser.add_argument('--source_dir', type=str, default='./datasets/', help='loads source files (expressions, audio, uvs).') + + parser.set_defaults(model='test') + # To avoid cropping, the loadSize should be the same as fineSize + parser.set_defaults(loadSize=parser.get_default('fineSize')) + self.isTrain = False + return parser diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/train_options.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/train_options.py new file mode 100644 index 0000000..c91d50d --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/train_options.py @@ -0,0 +1,37 @@ +from .base_options import BaseOptions + + +class TrainOptions(BaseOptions): + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) + parser.add_argument('--display_freq', type=int, default=400, help='frequency of showing training results on screen') + parser.add_argument('--display_ncols', type=int, default=4, help='if positive, display all images in a single visdom web panel with certain number of images per row.') + parser.add_argument('--display_id', type=int, default=1, help='window id of the web display') + parser.add_argument('--display_server', type=str, default="http://localhost", help='visdom server of the web display') + parser.add_argument('--display_env', type=str, default='main', help='visdom display environment name (default is "main")') + parser.add_argument('--display_port', type=int, default=8097, help='visdom port of the web display') + + parser.add_argument('--compute_val', action='store_true', help='compute validation') + parser.add_argument('--input_noise_augmentation', action='store_true', help='add input noise') + + parser.add_argument('--update_html_freq', type=int, default=1000, help='frequency of saving training results to html') + parser.add_argument('--print_freq', type=int, default=500, help='frequency of showing training results on console') + parser.add_argument('--save_latest_freq', type=int, default=5000, help='frequency of saving the latest results') + parser.add_argument('--save_epoch_freq', type=int, default=5, help='frequency of saving checkpoints at the end of epochs') + parser.add_argument('--save_by_iter', action='store_true', help='whether saves model by iteration') + + parser.add_argument('--continue_train', action='store_true', help='continue training: load the latest model') + parser.add_argument('--epoch_count', type=int, default=1, help='the starting epoch count, we save the model by , +, ...') + parser.add_argument('--phase', type=str, default='train', help='train, val, test, etc') + parser.add_argument('--niter', type=int, default=100, help='# of iter at starting learning rate') + parser.add_argument('--niter_decay', type=int, default=100, help='# of iter to linearly decay learning rate to zero') + parser.add_argument('--beta1', type=float, default=0.5, help='momentum term of adam') + parser.add_argument('--lr', type=float, default=0.0002, help='initial learning rate for adam') + parser.add_argument('--no_lsgan', action='store_true', help='do *not* use least square GAN, if false, use vanilla GAN') + parser.add_argument('--pool_size', type=int, default=50, help='the size of image buffer that stores previously generated images') + parser.add_argument('--no_html', action='store_true', help='do not save intermediate training results to [opt.checkpoints_dir]/[opt.name]/web/') + parser.add_argument('--lr_policy', type=str, default='lambda', help='learning rate policy: lambda|step|plateau|cosine') + parser.add_argument('--lr_decay_iters', type=int, default=50, help='multiply by a gamma every lr_decay_iters iterations') + + self.isTrain = True + return parser diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/transfer_options.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/transfer_options.py new file mode 100644 index 0000000..25e6832 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/options/transfer_options.py @@ -0,0 +1,42 @@ +from .base_options import BaseOptions + + +class TransferOptions(BaseOptions): + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) + parser.add_argument('--ntest', type=int, default=float("inf"), help='# of test examples.') + parser.add_argument('--results_dir', type=str, default='./results/', help='saves results here.') + parser.add_argument('--aspect_ratio', type=float, default=1.0, help='aspect ratio of result images') + parser.add_argument('--phase', type=str, default='test', help='train, val, test, etc') + # Dropout and Batchnorm has different behavioir during training and test. + parser.add_argument('--eval', action='store_true', help='use eval mode during test time.') + parser.add_argument('--num_test', type=int, default=50, help='how many test images to run') + + parser.add_argument('--write_no_images', action='store_true', help='compute validation') + + + parser.add_argument('--source_dir', type=str, default='./datasets/', help='loads source files (expressions, audio, uvs).') + + parser.add_argument('--source_actor', type=str, default='', help='source actor directory') + parser.add_argument('--target_actor', type=str, default='', help='target actor directory') + + parser.add_argument('--transfer_path', type=str, help='path to output the transfer files') + parser.add_argument('--base_path', type = str, default= '../..', help='path for mappings folder') + + parser.set_defaults(model='test') + # To avoid cropping, the loadSize should be the same as fineSize + parser.set_defaults(loadSize=parser.get_default('fineSize')) + self.isTrain = False + return parser + +class Audio2ExprOptions(TransferOptions): + def initialize(self, parser): + parser = TransferOptions.initialize(self, parser) + parser.add_argument('--use_mapping', action='store_true', help='use mapping function.') + parser.add_argument('--mapping_path', type=str, default='', help='path to mapping function.') + parser.add_argument('--out_dir', type=str, default='', help='path to output directory.') + parser.set_defaults(model='test') + # To avoid cropping, the loadSize should be the same as fineSize + parser.set_defaults(loadSize=parser.get_default('fineSize')) + self.isTrain = False + return parser \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/transfer.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/transfer.py new file mode 100644 index 0000000..1532d82 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/transfer.py @@ -0,0 +1,247 @@ +import os +import os.path +from options.transfer_options import TransferOptions +from data.face_dataset import FaceDataset +from data.audio_dataset import AudioDataset +from models import create_model +import torch +import numpy as np +import random +from tqdm import tqdm +import copy + +from BaselModel.basel_model import * + + +def load_model(opt): + opt.output_audio_expressions = True + opt.nTrainObjects = 116 + + print('#train objects = %d' % opt.nTrainObjects) + + print('>>> create model <<<') + model = create_model(opt) + print('>>> setup model <<<') + model.setup(opt) + + return model + +def load_target_sequence(opt): + opt_target = copy.copy(opt) # create a clone + opt_target.dataroot = opt.target_actor # overwrite root directory + opt_target.dataset_mode = 'face' + opt_target.phase = 'train' + dataset_target = FaceDataset() + dataset_target.initialize(opt_target) + + dataloader = torch.utils.data.DataLoader( + dataset_target, + batch_size=opt.batch_size, + shuffle=not opt.serial_batches, + num_workers=int(opt.num_threads)) + + return dataset_target, dataloader + + +def load_source_sequence(opt): + opt_source = copy.copy(opt) # create a clone + opt_source.dataroot = opt.source_actor # overwrite root directory + print(opt_source.dataroot) + opt_source.dataset_mode = 'audio' + opt_source.phase = 'train' + + dataset_source = AudioDataset() + + dataset_source.initialize(opt_source) + + dataloader = torch.utils.data.DataLoader( + dataset_source, + batch_size=opt.batch_size, + shuffle=not opt.serial_batches, + num_workers=int(opt.num_threads)) + + return dataset_source, dataloader + + +if __name__ == '__main__': + # read options + opt = TransferOptions().parse() + + target_name = opt.target_actor.split("/")[-1] + + # hard-code some parameters for test + opt.num_threads = 1 # test code only supports num_threads = 1 + opt.batch_size = 1 # test code only supports batch_size = 1 + opt.serial_batches = True # no shuffle + opt.no_augmentation = True # no flip + opt.display_id = -1 # no visdom display + + + # load model + model = load_model(opt) + print('model version:', opt.name) + + + # # load face model + # morphable_model = MorphableModel() + # mask = morphable_model.LoadMask() + # mask = mask + 0.1 * torch.ones_like(mask) + + + # read target sequence + dataset_target, data_loader_target = load_target_sequence(opt) + dataset_target_size = len(dataset_target) + print('#target_actor frames = %d' % dataset_target_size) + + ################################## + ####### create mapping ####### + ################################## + base_path = opt.base_path + # base_path = '../..' #'/home/alberto/NeuralVoicePuppetry' + + os.makedirs(base_path+'/mappings/'+opt.name, exist_ok=True) + mapping_fn = base_path+'/mappings/'+opt.name+'/'+'mapping_'+target_name + print(mapping_fn) + #not_exists = True + not_exists = not os.path.exists(mapping_fn+'.npy') + if not_exists: + # collect data + print('collect data') + audio_expressions = None + gt_expressions = None + for i, data in tqdm(enumerate(data_loader_target)): + model.set_input(data) + model.test() + + ae = model.fake_expressions.data[:,:,0] + if type(audio_expressions) == type(None): + audio_expressions = ae + e = model.expressions.data + gt_expressions = e + else: + audio_expressions = torch.cat([audio_expressions,ae],dim=0) + e = model.expressions.data + gt_expressions = torch.cat([gt_expressions,e],dim=0) + + # solve for mapping + print('solve for mapping') + optimize_in_parameter_space = True #False + if optimize_in_parameter_space: +# A = audio_expressions +# B = gt_expressions +# # solve lstsq ||AX - B|| +# X, _ = torch.gels(B, A, out=None) +# #X, _ = torch.lstsq(B, A) # requires pytorch 1.2 +# X = X[0:A.shape[1],:] +# mapping = X.t() + + # use gradient descent method + n = audio_expressions.shape[0] + print(n) + subspace_dim = 32 + + # TODO: patch + n_expr = 53 + + X = torch.nn.Parameter(torch.randn(n_expr, subspace_dim, requires_grad=True).cuda()) + optimizer = torch.optim.Adam([X], lr=0.01) + lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=30, gamma=0.1) + num_epochs = 90 + random_range = [k for k in range(0,n)] + + for ep in tqdm(range(0,num_epochs)): + random.shuffle(random_range) + for j in random_range: + expressions = gt_expressions[j] + fake_expressions = 10.0 * torch.matmul(X, audio_expressions[j]) + diff_expression = fake_expressions - expressions + loss = torch.mean(diff_expression * diff_expression) # L2 + optimizer.zero_grad() + loss.backward() + optimizer.step() + lr_scheduler.step() + mapping = X.data + + + +# else: # optimize in vertex space +# # use gradient descent method +# n = audio_expressions.shape[0] +# subspace_dim = 32 +# X = torch.nn.Parameter(torch.randn(N_EXPRESSIONS, subspace_dim, requires_grad=True).cuda()) +# optimizer = torch.optim.Adam([X], lr=0.01) +# lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=30, gamma=0.1) +# num_epochs = 90 +# random_range = [k for k in range(0,n)] +# with progressbar.ProgressBar(max_value=num_epochs) as bar: +# for ep in range(0,num_epochs): +# bar.update(ep) +# random.shuffle(random_range) +# for j in random_range: +# expressions = gt_expressions[j] +# fake_expressions = 10.0 * torch.matmul(X, audio_expressions[j]) +# diff_expression = fake_expressions - expressions +# diff_vertices = torch.matmul(morphable_model.expression_basis, diff_expression) +# #loss = torch.sqrt(torch.mean(mask * diff_vertices * diff_vertices)) # RMS +# loss = torch.mean(mask * diff_vertices * diff_vertices) # L2 +# # +# optimizer.zero_grad() +# loss.backward() +# optimizer.step() +# lr_scheduler.step() +# +# mapping = X.data + + map_cpu = mapping.data.cpu().numpy() + print(map_cpu.shape) + file_out=open(mapping_fn+'.txt', 'w') + np.savetxt(file_out, map_cpu, delimiter=' ') + file_out.close() + np.save(mapping_fn+'.npy', map_cpu) + else: + # load mapping from file + map_cpu = np.load(mapping_fn+'.npy') + mapping = torch.tensor(map_cpu.astype(np.float32)).cuda() + print('loaded mapping from file', mapping.shape) + + # ############# + # map_cpu = np.load(base_path + '/mappings/' + opt.name + '/' + 'mapping_Close_355_9105.npy') + # mapping = torch.tensor(map_cpu.astype(np.float32)).cuda() + # print('loaded mapping from file', mapping.shape) + # ############# + + # process source sequence (opt.source_actor) + source_actors = [opt.source_actor] + + os.makedirs(opt.transfer_path, exist_ok=True) + list_transfer = open(opt.transfer_path+'/list_transfer.txt', "a") + + target_actor_offset = 0 # default + expression_multiplier = 1.0 # default + + if target_actor_offset != 0.0: + target_name = target_name + '--offset' + if expression_multiplier != 1.0: + target_name = target_name + '-X' + + for source_actor in source_actors: + opt.source_actor = source_actor + source_name = opt.source_actor.split("/")[-1] + # read source sequence + dataset_source, data_loader_source = load_source_sequence(opt) + dataset_source_size = len(dataset_source) + print('#source_actor frames = %d' % dataset_source_size) + list_transfer.write(source_name+'--'+target_name+'\n') + out_dir = opt.transfer_path + '/' + source_name+'--'+target_name+'/expressions' + if not os.path.isdir(out_dir): + os.makedirs(out_dir, exist_ok=True) + for i, data in tqdm(enumerate(data_loader_source)): + model.set_input(data) + model.test() + audio_expression = model.fake_expressions.data[0,:,0] + expression = expression_multiplier * 10.0 * torch.matmul(mapping, audio_expression) + expression = expression[None,:] + np.save(os.path.join(out_dir, f'expr_{i}.npy'), expression.cpu().numpy()) + + list_transfer.close() + exit() diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/transfer.sh b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/transfer.sh new file mode 100644 index 0000000..2726e84 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/transfer.sh @@ -0,0 +1,57 @@ +set -ex +# . transfer.sh & +GPUID=0 + +###################################################### +################## SPECIFY MODEL ################## +###################################################### + +## audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead # <<<<<<<<<<<<< +DATASETS_DIR=/home/alberto/NeuralVoicePuppetry/datasets +OBJECT=ARD_ZDF +LR=0.00001 +N_ITER=150 +N_ITER_LR_DECAY=50 +RENDERER=$OBJECT +EROSION=1.0 +BATCH_SIZE=16 +MODEL=audio2ExpressionsAttentionTMP4 +RENDERER_TYPE=estimatorAttention +DATASET_MODE=multi_face_audio_eq_tmp_cached +LOSS=RMS +SEQ_LEN=8 +DATE_WITH_TIME=20191105-115332 +NAME=$MODEL-$RENDERER_TYPE-SL$SEQ_LEN-BS$BATCH_SIZE-$OBJECT-$DATASET_MODE-$LOSS-$DATE_WITH_TIME-look_ahead + +# --look_ahead +EPOCH=latest + +############################################################### +###################### SPECIFY TARGET ###################### +############################################################### + +# target actors +#SOURCE_ACTOR_LIST=(/home/alberto/NeuralVoicePuppetry/datasets/External/Alberto_videos_uno /home/alberto/NeuralVoicePuppetry/datasets/External/Alberto_videos_due /home/alberto/NeuralVoicePuppetry/datasets/External/Alberto_videos_tre) +SOURCE_ACTOR_LIST=(/home/alberto/NeuralVoicePuppetry/datasets/External/Clara_audios_one /home/alberto/NeuralVoicePuppetry/datasets/External/Clara_audios_two /home/alberto/NeuralVoicePuppetry/datasets/External/Clara_audios_three) + +#TARGET_ACTOR_LIST[1]=/home/alberto/NeuralVoicePuppetry/datasets/External/Youtube_Russian_guy +TARGET_ACTOR_LIST[1]=/home/alberto/NeuralVoicePuppetry/datasets/SRF_anchor_short/Halbtotale_355_9415 + + +rm -f ./datasets/TRANSFERS/$NAME/list_transfer.txt +for TARGET_ACTOR in "${TARGET_ACTOR_LIST[@]}" +do + echo $TARGET_ACTOR + + for SOURCE_ACTOR in "${SOURCE_ACTOR_LIST[@]}" + do + echo $SOURCE_ACTOR + # --look_ahead + python transfer.py --look_ahead --seq_len $SEQ_LEN --source_actor $SOURCE_ACTOR --target_actor $TARGET_ACTOR --write_no_images --name $NAME --erosionFactor $EROSION --epoch $EPOCH --display_winsize 512 --rendererType $RENDERER_TYPE --lossType $LOSS --dataroot $DATASETS_DIR/$OBJECT --model $MODEL --netG unet_256 --dataset_mode $DATASET_MODE --norm instance --gpu_ids $GPUID + done +done + + +############################################################### +############################################################### +############################################################### \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/__init__.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/get_data.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/get_data.py new file mode 100644 index 0000000..6325605 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/get_data.py @@ -0,0 +1,115 @@ +from __future__ import print_function +import os +import tarfile +import requests +from warnings import warn +from zipfile import ZipFile +from bs4 import BeautifulSoup +from os.path import abspath, isdir, join, basename + + +class GetData(object): + """ + + Download CycleGAN or Pix2Pix Data. + + Args: + technique : str + One of: 'cyclegan' or 'pix2pix'. + verbose : bool + If True, print additional information. + + Examples: + >>> from util.get_data import GetData + >>> gd = GetData(technique='cyclegan') + >>> new_data_path = gd.get(save_path='./datasets') # options will be displayed. + + """ + + def __init__(self, technique='cyclegan', verbose=True): + url_dict = { + 'pix2pix': 'https://people.eecs.berkeley.edu/~tinghuiz/projects/pix2pix/datasets', + 'cyclegan': 'https://people.eecs.berkeley.edu/~taesung_park/CycleGAN/datasets' + } + self.url = url_dict.get(technique.lower()) + self._verbose = verbose + + def _print(self, text): + if self._verbose: + print(text) + + @staticmethod + def _get_options(r): + soup = BeautifulSoup(r.text, 'lxml') + options = [h.text for h in soup.find_all('a', href=True) + if h.text.endswith(('.zip', 'tar.gz'))] + return options + + def _present_options(self): + r = requests.get(self.url) + options = self._get_options(r) + print('Options:\n') + for i, o in enumerate(options): + print("{0}: {1}".format(i, o)) + choice = input("\nPlease enter the number of the " + "dataset above you wish to download:") + return options[int(choice)] + + def _download_data(self, dataset_url, save_path): + if not isdir(save_path): + os.makedirs(save_path) + + base = basename(dataset_url) + temp_save_path = join(save_path, base) + + with open(temp_save_path, "wb") as f: + r = requests.get(dataset_url) + f.write(r.content) + + if base.endswith('.tar.gz'): + obj = tarfile.open(temp_save_path) + elif base.endswith('.zip'): + obj = ZipFile(temp_save_path, 'r') + else: + raise ValueError("Unknown File Type: {0}.".format(base)) + + self._print("Unpacking Data...") + obj.extractall(save_path) + obj.close() + os.remove(temp_save_path) + + def get(self, save_path, dataset=None): + """ + + Download a dataset. + + Args: + save_path : str + A directory to save the data to. + dataset : str, optional + A specific dataset to download. + Note: this must include the file extension. + If None, options will be presented for you + to choose from. + + Returns: + save_path_full : str + The absolute path to the downloaded data. + + """ + if dataset is None: + selected_dataset = self._present_options() + else: + selected_dataset = dataset + + save_path_full = join(save_path, selected_dataset.split('.')[0]) + + if isdir(save_path_full): + warn("\n'{0}' already exists. Voiding Download.".format( + save_path_full)) + else: + self._print('Downloading Data...') + url = "{0}/{1}".format(self.url, selected_dataset) + self._download_data(url, save_path=save_path) + + return abspath(save_path_full) diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/html.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/html.py new file mode 100644 index 0000000..1e7aab9 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/html.py @@ -0,0 +1,64 @@ +import dominate +from dominate.tags import meta, h3, table, tr, td, p, a, img, br +import os + + +class HTML: + def __init__(self, web_dir, title, reflesh=0): + self.title = title + self.web_dir = web_dir + self.img_dir = os.path.join(self.web_dir, 'images') + if not os.path.exists(self.web_dir): + os.makedirs(self.web_dir) + if not os.path.exists(self.img_dir): + os.makedirs(self.img_dir) + # print(self.img_dir) + + self.doc = dominate.document(title=title) + if reflesh > 0: + with self.doc.head: + meta(http_equiv="reflesh", content=str(reflesh)) + + def get_image_dir(self): + return self.img_dir + + def add_header(self, str): + with self.doc: + h3(str) + + def add_table(self, border=1): + self.t = table(border=border, style="table-layout: fixed;") + self.doc.add(self.t) + + def add_images(self, ims, txts, links, width=400): + self.add_table() + with self.t: + with tr(): + for im, txt, link in zip(ims, txts, links): + with td(style="word-wrap: break-word;", halign="center", valign="top"): + with p(): + with a(href=os.path.join('images', link)): + img(style="width:%dpx" % width, src=os.path.join('images', im)) + br() + p(txt) + + def save(self): + html_file = '%s/index.html' % self.web_dir + f = open(html_file, 'wt') + f.write(self.doc.render()) + f.close() + + +if __name__ == '__main__': + html = HTML('web/', 'test_html') + html.add_header('hello world') + + ims = [] + txts = [] + links = [] + for n in range(4): + ims.append('image_%d.png' % n) + txts.append('text_%d' % n) + links.append('image_%d.png' % n) + html.add_images(ims, txts, links) + html.save() diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/image_pool.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/image_pool.py new file mode 100644 index 0000000..52413e0 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/image_pool.py @@ -0,0 +1,32 @@ +import random +import torch + + +class ImagePool(): + def __init__(self, pool_size): + self.pool_size = pool_size + if self.pool_size > 0: + self.num_imgs = 0 + self.images = [] + + def query(self, images): + if self.pool_size == 0: + return images + return_images = [] + for image in images: + image = torch.unsqueeze(image.data, 0) + if self.num_imgs < self.pool_size: + self.num_imgs = self.num_imgs + 1 + self.images.append(image) + return_images.append(image) + else: + p = random.uniform(0, 1) + if p > 0.5: + random_id = random.randint(0, self.pool_size - 1) # randint is inclusive + tmp = self.images[random_id].clone() + self.images[random_id] = image + return_images.append(tmp) + else: + return_images.append(image) + return_images = torch.cat(return_images, 0) + return return_images diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/util.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/util.py new file mode 100644 index 0000000..df72b66 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/util.py @@ -0,0 +1,82 @@ +from __future__ import print_function +import torch +import numpy as np +from PIL import Image +import os +import sys +import array +# import OpenEXR +# import Imath + +'''def load_exr(image_path): + # Open the input file + file = OpenEXR.InputFile(image_path) + + # Compute the size + dw = file.header()['dataWindow'] + w, h = (dw.max.x - dw.min.x + 1, dw.max.y - dw.min.y + 1) + + # Read the three color channels as 32-bit floats + FLOAT = Imath.PixelType(Imath.PixelType.FLOAT) + #(R,G,B) = [np.array(array.array('f', file.channel(Chan, FLOAT)).tolist()).reshape((w, h, 1)) for Chan in ("R", "G", "B") ] + + (r, g, b) = file.channels("RGB") + R = np.array(array.array('f', r).tolist()).reshape((w, h, 1)) + G = np.array(array.array('f', g).tolist()).reshape((w, h, 1)) + B = np.array(array.array('f', b).tolist()).reshape((w, h, 1)) + + return np.concatenate((R, G, B), axis=2)''' + +# Converts a Tensor into an image array (numpy) +# |imtype|: the desired type of the converted numpy array +def tensor2im(input_image, imtype=np.uint8): + if isinstance(input_image, torch.Tensor): + input_image = torch.clamp(input_image, -1.0, 1.0) + image_tensor = input_image.data + else: + return input_image + image_numpy = image_tensor[0].cpu().float().numpy() + if image_numpy.shape[0] == 1: + image_numpy = np.tile(image_numpy, (3, 1, 1)) + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 + return image_numpy.astype(imtype) + + +def diagnose_network(net, name='network'): + mean = 0.0 + count = 0 + for param in net.parameters(): + if param.grad is not None: + mean += torch.mean(torch.abs(param.grad.data)) + count += 1 + if count > 0: + mean = mean / count + print(name) + print(mean) + + +def save_image(image_numpy, image_path): + image_pil = Image.fromarray(image_numpy) + image_pil.save(image_path) + +def print_numpy(x, val=True, shp=False): + x = x.astype(np.float64) + if shp: + print('shape,', x.shape) + if val: + x = x.flatten() + print('mean = %3.3f, min = %3.3f, max = %3.3f, median = %3.3f, std=%3.3f' % ( + np.mean(x), np.min(x), np.max(x), np.median(x), np.std(x))) + + +def mkdirs(paths): + if isinstance(paths, list) and not isinstance(paths, str): + for path in paths: + mkdir(path) + else: + mkdir(paths) + + +def mkdir(path): + if not os.path.exists(path): + os.makedirs(path) diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/visualizer.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/visualizer.py new file mode 100644 index 0000000..7cb7750 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Inference/util/visualizer.py @@ -0,0 +1,207 @@ +import numpy as np +import os +import sys +import ntpath +import time +from . import util +from . import html +#from scipy.misc import imresize +import cv2 + +if sys.version_info[0] == 2: + VisdomExceptionBase = Exception +else: + VisdomExceptionBase = ConnectionError + + +# save image to the disk +def save_images(webpage, visuals, image_path, aspect_ratio=1.0, width=256): + image_dir = webpage.get_image_dir() + short_path = ntpath.basename(image_path[0]) + name = os.path.splitext(short_path)[0] + + webpage.add_header(name) + ims, txts, links = [], [], [] + + for label, im_data in visuals.items(): + im = util.tensor2im(im_data) + image_name = '%s_%s.png' % (name, label) + save_path = os.path.join(image_dir, image_name) + h, w, _ = im.shape + + height = int(width * h / float(w)) + #im = imresize(im, (height,width), interp='bicubic') + im = cv2.resize(src=im, dsize=(height,width), interpolation=cv2.INTER_CUBIC) + + #im = imresize(im, (height,widht), interp='bicubic') + #if aspect_ratio > 1.0: + # im = imresize(im, (h, int(w * aspect_ratio)), interp='bicubic') + #if aspect_ratio < 1.0: + # im = imresize(im, (int(h / aspect_ratio), w), interp='bicubic') + + util.save_image(im, save_path) + + ims.append(image_name) + txts.append(label) + links.append(image_name) + webpage.add_images(ims, txts, links, width=width) + + +class Visualizer(): + def __init__(self, opt): + self.display_id = opt.display_id + self.use_html = opt.isTrain and not opt.no_html + self.win_size = opt.display_winsize + self.name = opt.name + self.opt = opt + self.saved = False + if self.display_id > 0: + import visdom + self.ncols = opt.display_ncols + self.vis = visdom.Visdom(server=opt.display_server, port=opt.display_port, env=opt.display_env, raise_exceptions=True) + + if self.use_html: + self.web_dir = os.path.join(opt.checkpoints_dir, opt.name, 'web') + self.img_dir = os.path.join(self.web_dir, 'images') + print('create web directory %s...' % self.web_dir) + util.mkdirs([self.web_dir, self.img_dir]) + self.log_name = os.path.join(opt.checkpoints_dir, opt.name, 'loss_log.txt') + with open(self.log_name, "a") as log_file: + now = time.strftime("%c") + log_file.write('================ Training Loss (%s) ================\n' % now) + + def reset(self): + self.saved = False + + def throw_visdom_connection_error(self): + print('\n\nCould not connect to Visdom server (https://github.com/facebookresearch/visdom) for displaying training progress.\nYou can suppress connection to Visdom using the option --display_id -1. To install visdom, run \n$ pip install visdom\n, and start the server by \n$ python -m visdom.server.\n\n') + exit(1) + + # |visuals|: dictionary of images to display or save + def display_current_results(self, visuals, epoch, save_result, aspect_ratio=1.0, width=256): + if self.display_id > 0: # show images in the browser + ncols = self.ncols + if ncols > 0: + ncols = min(ncols, len(visuals)) + h, w = next(iter(visuals.values())).shape[2:4] + height = int(width * h / float(w)) + h = height + w = width + table_css = """""" % (w, h) + title = self.name + label_html = '' + label_html_row = '' + images = [] + idx = 0 + for label, image in visuals.items(): + # + image_numpy = util.tensor2im(image) + #image_numpy = imresize(image_numpy, (h, w), interp='bicubic') + image_numpy = cv2.resize(src=image_numpy, dsize=(h, w), interpolation=cv2.INTER_CUBIC) + image_numpy = image_numpy.transpose([2, 0, 1]) + label_html_row += '%s' % label + images.append(image_numpy) + idx += 1 + if idx % ncols == 0: + label_html += '%s' % label_html_row + label_html_row = '' + white_image = np.ones_like(image_numpy) * 255 + while idx % ncols != 0: + images.append(white_image) + label_html_row += '' + idx += 1 + if label_html_row != '': + label_html += '%s' % label_html_row + # pane col = image row + try: + self.vis.images(images, nrow=ncols, win=self.display_id + 1, padding=2, opts=dict(title=title + ' images')) + label_html = '%s
' % label_html + self.vis.text(table_css + label_html, win=self.display_id + 2, + opts=dict(title=title + ' labels')) + except VisdomExceptionBase: + self.throw_visdom_connection_error() + + else: + idx = 1 + for label, image in visuals.items(): + image_numpy = util.tensor2im(image) + self.vis.image(image_numpy.transpose([2, 0, 1]), opts=dict(title=label), + win=self.display_id + idx) + idx += 1 + + if self.use_html and (save_result or not self.saved): # save images to a html file + self.saved = True + for label, image in visuals.items(): + image_numpy = util.tensor2im(image) + img_path = os.path.join(self.img_dir, 'epoch%.3d_%s.png' % (epoch, label)) + util.save_image(image_numpy, img_path) + # update website + webpage = html.HTML(self.web_dir, 'Experiment name = %s' % self.name, reflesh=1) + for n in range(epoch, 0, -1): + webpage.add_header('epoch [%d]' % n) + ims, txts, links = [], [], [] + + for label, image_numpy in visuals.items(): + image_numpy = util.tensor2im(image) + img_path = 'epoch%.3d_%s.png' % (n, label) + ims.append(img_path) + txts.append(label) + links.append(img_path) + webpage.add_images(ims, txts, links, width=self.win_size) + webpage.save() + + # losses: dictionary of error labels and values + def plot_current_losses(self, epoch, counter_ratio, opt, losses): + if not hasattr(self, 'plot_data'): + self.plot_data = {'X': [], 'Y': [], 'legend': list(losses.keys())} + self.plot_data['X'].append(epoch + counter_ratio) + self.plot_data['Y'].append([losses[k] for k in self.plot_data['legend']]) + try: + self.vis.line( + X=np.stack([np.array(self.plot_data['X'])] * len(self.plot_data['legend']), 1), + Y=np.array(self.plot_data['Y']), + opts={ + 'title': self.name + ' loss over time', + 'legend': self.plot_data['legend'], + 'xlabel': 'epoch', + 'ylabel': 'loss'}, + win=self.display_id) + except VisdomExceptionBase: + self.throw_visdom_connection_error() + + # losses: same format as |losses| of plot_current_losses + def print_current_losses(self, epoch, i, losses, t, t_data): + message = '(epoch: %d, iters: %d, time: %.3f, data: %.3f) ' % (epoch, i, t, t_data) + for k, v in losses.items(): + message += '%s: %.3f ' % (k, v) + + print(message) + with open(self.log_name, "a") as log_file: + log_file.write('%s\n' % message) + + + + + + + # losses: dictionary of error labels and values + def plot_current_validation_error(self, epoch, counter_ratio, losses): + if not hasattr(self, 'plot_validation_data'): + self.plot_validation_data = {'X': [], 'Y': [], 'legend': list(losses.keys())} + self.plot_validation_data['X'].append(epoch + counter_ratio) + self.plot_validation_data['Y'].append([losses[k] for k in self.plot_validation_data['legend']]) + try: + self.vis.line( + X=np.stack([np.array(self.plot_validation_data['X'])] * len(self.plot_validation_data['legend']), 1), + Y=np.array(self.plot_validation_data['Y']), + opts={ + 'title': self.name + ' validation error over time', + 'legend': self.plot_validation_data['legend'], + 'xlabel': 'epoch', + 'ylabel': 'error'}, + win=self.display_id+1) + except VisdomExceptionBase: + self.throw_visdom_connection_error() \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/BaselModel/__init__.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/BaselModel/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/BaselModel/basel_model.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/BaselModel/basel_model.py new file mode 100644 index 0000000..876b871 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/BaselModel/basel_model.py @@ -0,0 +1,106 @@ +import os +import torch +import torch.nn as nn +import torchvision.transforms as transforms +import numpy as np + +#import soft_renderer as sr +import renderer as sr + +N_EXPRESSIONS=76 + +class MorphableModel(nn.Module): + def __init__(self, filename_average=''): + super(MorphableModel, self).__init__() + + print('Load Morphable Model (Basel)') + + #filename_mesh = os.path.join(opt.dataroot, opt.phase + '/average_model.obj') + filename_mesh = filename_average + if filename_average=='': + print('use default identity') + filename_mesh = './BaselModel/average.obj' + mesh = sr.Mesh.from_obj(filename_mesh, normalization=False, load_texture=True) + self.average_vertices = mesh.vertices[0] + self.faces = mesh.faces[0] + self.average_vertices = self.average_vertices[None, :, :] # [num_vertices, XYZ] -> [batch_size=1, num_vertices, XYZ] + self.faces = self.faces[None, :, :] # [num_faces, 3] -> [batch_size=1, num_faces, 3] + self.textures = mesh.textures + + self.num_vertices = self.average_vertices.shape[1] + self.num_faces = self.faces.shape[1] + print('vertices:', self.average_vertices.shape) + print('faces:', self.faces.shape) + + ## basis function + self.expression_basis = np.memmap('./BaselModel/ExpressionBasis.matrix', dtype='float32', mode='r').__array__()[1:] # first entry is the size + self.expression_basis = np.resize(self.expression_basis, (N_EXPRESSIONS, self.num_vertices, 4))[:,:,0:3] + self.expression_basis = torch.tensor(self.expression_basis.astype(np.float32)).cuda() # N_EXPRESSIONS x num_vertices x 3 + self.expression_basis = torch.transpose(self.expression_basis,0,2) # transpose for matmul + print('expression_basis', self.expression_basis.shape) + + + #texture_size = 2 + #self.textures = torch.ones(1, self.faces.shape[1], texture_size, texture_size, texture_size, 3, dtype=torch.float32).cuda() + #print('textures:', self.textures.shape) + + ## ## debug + ## zeroExpr = torch.zeros(1, N_EXPRESSIONS, dtype=torch.float32).cuda() + ## self.morph(zeroExpr) + ## self.save_model_to_obj_file('model_zero_expression.obj') + ## ## + ## onesExpr = torch.ones(1, N_EXPRESSIONS, dtype=torch.float32).cuda() + ## self.morph(onesExpr) + ## self.save_model_to_obj_file('model_ones_expression.obj') + ## exit() + ## ## + + # default expression + zeroExpr = torch.zeros(1, N_EXPRESSIONS, dtype=torch.float32).cuda() + self.morph(zeroExpr) + + + def save_model_to_obj_file(self, filename, mask=None): + faces_cpu = self.faces.detach().cpu().numpy() + vertices_cpu = self.vertices.detach().cpu().numpy() + + mask_cpu = None + if not type(mask) == type(None): + mask_cpu = mask.detach().cpu().numpy() + + f = open(filename, 'w') + if type(mask) == type(None): + for i in range(0, self.num_vertices): + f.write('v ' + str(vertices_cpu[0, i, 0]) + ' ' + str(vertices_cpu[0, i, 1]) + ' ' + str(vertices_cpu[0, i, 2]) + '\n') + else: + for i in range(0, self.num_vertices): + f.write('v ' + str(vertices_cpu[0, i, 0]) + ' ' + str(vertices_cpu[0, i, 1]) + ' ' + str(vertices_cpu[0, i, 2]) + ' ' + str(mask_cpu[i]) + ' ' + str(mask_cpu[i]) + ' ' + str(mask_cpu[i]) + ' 1'+ '\n') + + for i in range(0, self.num_faces): + f.write('f ' + str(faces_cpu[0, i, 0]+1) + '// ' + str(faces_cpu[0, i, 1]+1) + '// ' + str(faces_cpu[0, i, 2]+1) + '//\n') + + f.close() + + def compute_expression_delta(self, expressions): + return torch.transpose(torch.matmul(self.expression_basis, torch.transpose(expressions, 0,1)), 0, 2) # note that matmul wants to have this order: (a x b x c) x (c x m) => (a x b x m) + + def morph(self, expressions): + self.vertices = self.average_vertices + self.compute_expression_delta(expressions) + return self.vertices + + def LoadMask(self, filename=''): + if filename=='': + print('use default mask') + filename = './BaselModel/mask/defaultMask_mouth.obj' + + mask = np.zeros(self.num_vertices) + file = open(filename, 'r') + i=0 + for line in file: + if line[0] == 'v': + floats = [float(x) for x in line[1:].split()] + if floats[3] == 1.0 and floats[4] == 0.0 and floats[5] == 0.0: + mask[i] = 1.0 + i += 1 + file.close() + return torch.tensor(mask.astype(np.float32)).cuda() \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/ReadMe.md b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/ReadMe.md new file mode 100644 index 0000000..4165ff6 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/ReadMe.md @@ -0,0 +1,6 @@ +This code is used to train the audio to expression network. +-> train_audio2expressionsAttentionTMP.sh +You need to provide the training data that fits your face model. +The face model is defined in "BaselModel", you need to provide the average model, a mask for the mouth and the basis vectors. + +As training data you need to provide data that fits the "data/multi_face_audio_eq_tmp_cached_dataset.py" data loader. \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/__init__.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/alphabet.txt b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/alphabet.txt new file mode 100644 index 0000000..46aa35e --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/alphabet.txt @@ -0,0 +1,33 @@ +# Each line in this file represents the Unicode codepoint (UTF-8 encoded) +# associated with a numeric label. +# A line that starts with # is a comment. You can escape it with \# if you wish +# to use '#' as a label. + +a +b +c +d +e +f +g +h +i +j +k +l +m +n +o +p +q +r +s +t +u +v +w +x +y +z +' +# The last (non-comment) line needs to end with a newline. diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/__init__.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/__init__.py new file mode 100644 index 0000000..3735ac1 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/__init__.py @@ -0,0 +1,90 @@ +import importlib +import torch.utils.data +from data.base_data_loader import BaseDataLoader +from data.base_dataset import BaseDataset + + +def find_dataset_using_name(dataset_name): + # Given the option --dataset_mode [datasetname], + # the file "data/datasetname_dataset.py" + # will be imported. + dataset_filename = "data." + dataset_name + "_dataset" + datasetlib = importlib.import_module(dataset_filename) + + # In the file, the class called DatasetNameDataset() will + # be instantiated. It has to be a subclass of BaseDataset, + # and it is case-insensitive. + dataset = None + target_dataset_name = dataset_name.replace('_', '') + 'dataset' + for name, cls in datasetlib.__dict__.items(): + if name.lower() == target_dataset_name.lower() \ + and issubclass(cls, BaseDataset): + dataset = cls + + if dataset is None: + print("In %s.py, there should be a subclass of BaseDataset with class name that matches %s in lowercase." % (dataset_filename, target_dataset_name)) + exit(0) + + return dataset + + +def get_option_setter(dataset_name): + dataset_class = find_dataset_using_name(dataset_name) + return dataset_class.modify_commandline_options + + +def create_dataset(opt): + dataset = find_dataset_using_name(opt.dataset_mode) + instance = dataset() + instance.initialize(opt) + print("dataset [%s] was created" % (instance.name())) + return instance + + +def CreateDataLoader(opt): + data_loader = CustomDatasetDataLoader() + data_loader.initialize(opt) + return data_loader + + +# Wrapper class of Dataset class that performs +# multi-threaded data loading +class CustomDatasetDataLoader(BaseDataLoader): + def name(self): + return 'CustomDatasetDataLoader' + + def initialize(self, opt): + BaseDataLoader.initialize(self, opt) + self.dataset = create_dataset(opt) + if opt.serial_batches: + self.dataloader = torch.utils.data.DataLoader( + self.dataset, + batch_size=opt.batch_size, + shuffle=not opt.serial_batches, + num_workers=int(opt.num_threads)) + else: + #weights = make_weights_for_balanced_classes(dataset_train.imgs, len(dataset_train.classes)) + weights = self.dataset.getSampleWeights() + weights = torch.DoubleTensor(weights) + sampler = torch.utils.data.sampler.WeightedRandomSampler(weights, len(weights)) + + self.dataloader = torch.utils.data.DataLoader( + self.dataset, + batch_size=opt.batch_size, + #shuffle=True, + sampler=sampler, + pin_memory=True, + num_workers=int(opt.num_threads)) + + + def load_data(self): + return self + + def __len__(self): + return min(len(self.dataset), self.opt.max_dataset_size) + + def __iter__(self): + for i, data in enumerate(self.dataloader): + if i * self.opt.batch_size >= self.opt.max_dataset_size: + break + yield data diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/aligned_dataset.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/aligned_dataset.py new file mode 100644 index 0000000..b4cfa17 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/aligned_dataset.py @@ -0,0 +1,208 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image + +#def make_dataset(dir): +# images = [] +# assert os.path.isdir(dir), '%s is not a valid directory' % dir +# for root, _, fnames in sorted(os.walk(dir)): +# for fname in fnames: +# if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): +# path = os.path.join(root, fname) +# images.append(path) +# return sorted(images) + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.bin' + path = os.path.join(root, fname) + images.append(path) + return images + +def load_intrinsics(input_dir): + file = open(input_dir+"/intrinsics.txt", "r") + intrinsics = [[(float(x) for x in line.split())] for line in file] + file.close() + intrinsics = list(intrinsics[0][0]) + return intrinsics + +def load_rigids(input_dir): + file = open(input_dir+"/rigid.txt", "r") + rigid_floats = [[float(x) for x in line.split()] for line in file] # note that it stores 5 lines per matrix (blank line) + file.close() + all_rigids = [ [rigid_floats[4*idx + 0],rigid_floats[4*idx + 1],rigid_floats[4*idx + 2],rigid_floats[4*idx + 3]] for idx in range(0, len(rigid_floats)//4) ] + return all_rigids + +def load_expressions(input_dir): + file = open(input_dir+"/expression.txt", "r") + expressions = [[float(x) for x in line.split()] for line in file] + file.close() + return expressions + +def load_audio(input_dir): + audio = Audio(input_dir + '/audio.mp3', write_mel_spectogram = False) + #audio = Audio(input_dir + '/audio.mp3', write_mel_spectogram = True) + + return audio + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +class AlignedDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + self.data_dir = os.path.join(opt.dataroot, opt.phase) + self.frame_paths = make_dataset(self.data_dir) + self.frame_ids = make_ids(self.frame_paths, self.root) + self.intrinsics = load_intrinsics(self.data_dir) + self.extrinsics = load_rigids(self.data_dir) + self.expressions = load_expressions(self.data_dir) + + self.audio = load_audio(self.data_dir) + self.audio_window_size = opt.audio_window_size + + opt.nObjects = 1 + assert(opt.resize_or_crop == 'resize_and_crop') + + def __getitem__(self, index): + + # get video data + frame_id = index + + #print('GET ITEM: ', index) + img_path = self.frame_paths[index] + frame_id = self.frame_ids[index] + + # intrinsics and extrinsics + intrinsics = self.intrinsics + extrinsics = self.extrinsics[frame_id] + + # get audio mel sample window + frame_rate = 24#29.97 + #frame_rate = len(self.expressions) / self.audio.time_total + mel_frame_idx = int((frame_id / frame_rate) * self.audio.mel_sample_rate) + mels = self.audio.getWindow(mel_frame_idx, self.audio_window_size) + + + # expressions + expressions = torch.tensor(self.expressions[frame_id]) + + # default image dimensions + IMG_DIM_X = 512 + IMG_DIM_Y = 512 + + # load image data + #assert(IMG_DIM == self.opt.fineSize) + img_array = np.memmap(img_path, dtype='float32', mode='r').__array__() + if img_array.size != IMG_DIM_X * IMG_DIM_Y * 5: + IMG_DIM_X = int(img_array[0]) + IMG_DIM_Y = int(img_array[1]) + img_array = img_array[2:] + intrinsics = img_array[0:4] + img_array = img_array[4:] + + img_array = np.clip(img_array, 0.0, 1.0) + img = np.resize(img_array, (IMG_DIM_Y, IMG_DIM_X, 5)) + A = img[:,:,0:3] + B = img[:,:,3:5] + B = np.concatenate((B, np.zeros((IMG_DIM_Y, IMG_DIM_X, 1))), axis=2) + + TARGET = transforms.ToTensor()(A.astype(np.float32)) + UV = transforms.ToTensor()(B.astype(np.float32)) + + TARGET = 2.0 * TARGET - 1.0 + UV = 2.0 * UV - 1.0 + + + # load deepspeech feature + dsf_fname = img_path[:-4] + '.deepspeech.npy' +# print('dsf_fname:', dsf_fname) + feature_array = np.load(dsf_fname) + #feature_array = np.memmap(dsf_fname, dtype='float32', mode='r').__array__() +# print('feature_array shape: ', feature_array.shape) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + ################################# + ####### apply augmentation ###### + ################################# + if not self.opt.no_augmentation: + # random dimensions + new_dim_x = np.random.randint(int(IMG_DIM_X * 0.75), IMG_DIM_X+1) + new_dim_y = np.random.randint(int(IMG_DIM_Y * 0.75), IMG_DIM_Y+1) + new_dim_x = int(np.floor(new_dim_x / 64.0) * 64 ) # << dependent on the network structure !! 64 => 6 layers + new_dim_y = int(np.floor(new_dim_y / 64.0) * 64 ) + if new_dim_x > IMG_DIM_X: new_dim_x -= 64 + if new_dim_y > IMG_DIM_Y: new_dim_y -= 64 + + # random pos + if IMG_DIM_X == new_dim_x: offset_x = 0 + else: offset_x = np.random.randint(0, IMG_DIM_X-new_dim_x) + if IMG_DIM_Y == new_dim_y: offset_y = 0 + else: offset_y = np.random.randint(0, IMG_DIM_Y-new_dim_y) + + # select subwindow + TARGET = TARGET[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + UV = UV[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + + # compute new intrinsics + # TODO: atm not needed but maybe later + + else: + new_dim_x = int(np.floor(IMG_DIM_X / 64.0) * 64 ) # << dependent on the network structure !! 64 => 6 layers + new_dim_y = int(np.floor(IMG_DIM_Y / 64.0) * 64 ) + offset_x = 0 + offset_y = 0 + # select subwindow + TARGET = TARGET[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + UV = UV[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + + # compute new intrinsics + # TODO: atm not needed but maybe later + + ################################# + + return {'TARGET': TARGET, 'UV': UV, + 'paths': self.frame_paths[index],#img_path, + 'intrinsics': intrinsics, + 'extrinsics': extrinsics, + 'expressions': expressions, + 'audio_mels': mels, + 'audio_deepspeech': dsf, # deepspeech feature + 'object_id':0} + + def __len__(self): + return len(self.frame_paths) + + def name(self): + return 'AlignedDataset' diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/audio.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/audio.py new file mode 100644 index 0000000..4932c94 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/audio.py @@ -0,0 +1,77 @@ +import time +import random +import math + +import numpy as np + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision.transforms as transforms + +import torchaudio +import torchaudio.transforms + +import librosa +import scipy.signal +import librosa.display +import matplotlib.pyplot as plt + + +class Audio(): + def name(self): + return 'Audio' + + def __init__(self, filename, write_mel_spectogram = False): + self.n_mels=128 + self.fmax=8000 + self.hop_length_ms = 20 + + sound, sample_rate = librosa.load(filename)#torchaudio.load(filename) + self.raw_audio = sound + self.sample_rate = sample_rate + print('sample_rate = %d' % self.sample_rate) + self.n_samples = sound.shape[0] + self.time_total = self.n_samples / self.sample_rate + print('length = %ds' % self.time_total) + + print('compute mel spectrogram...') + self.hop_length = int(sample_rate / 1000.0 * self.hop_length_ms) + print('hop_length: ', self.hop_length) + self.mel_spectrogram = librosa.feature.melspectrogram(y=self.raw_audio, sr=self.sample_rate, hop_length=self.hop_length, n_mels=self.n_mels, fmax=self.fmax) + + + if write_mel_spectogram: + print('write spectrogram to file') + plt.figure(figsize=(100, 15)) + librosa.display.specshow(librosa.power_to_db(self.mel_spectrogram, ref=np.max), y_axis='mel', fmax=self.fmax, x_axis='time') + plt.colorbar(format='%+2.0f dB') + plt.title('Mel spectrogram') + plt.tight_layout() + plt.savefig('mel_features.png', dpi=None, facecolor='w', edgecolor='w', orientation='portrait', papertype=None, format=None, transparent=False, bbox_inches=None, pad_inches=0.1, frameon=None, metadata=None) + + print('mel: ', self.mel_spectrogram.shape) # (128, 18441) + self.n_mel_frames = self.mel_spectrogram.shape[1] + self.mel_sample_rate = self.mel_spectrogram.shape[1] / self.time_total + print('n_mel_frames: ', self.n_mel_frames) + print('mel_sample_rate: ', self.mel_sample_rate) + + # convert to torch + self.mel_spectrogram = torch.FloatTensor(self.mel_spectrogram) + + def getWindow(self, mel_frame_idx, window_size): + # get audio mel sample window + audio_start = mel_frame_idx - (window_size//2) + audio_end = mel_frame_idx + (window_size//2) + if audio_start < 0: + audio_input = self.mel_spectrogram[0:self.n_mels, 0:audio_end] + zeros = torch.zeros((self.n_mels,-audio_start)) + audio_input = torch.cat([zeros, audio_input], 1) + elif audio_end >= self.n_mel_frames: + audio_input = self.mel_spectrogram[:, audio_start:-1] + zeros = torch.zeros((self.n_mels,audio_end-self.n_mel_frames + 1)) + audio_input = torch.cat([audio_input, zeros], 1) + else: + audio_input = self.mel_spectrogram[:, audio_start:audio_end] + + return torch.reshape(audio_input, (1, 1, self.n_mels, window_size)) \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/audio_dataset.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/audio_dataset.py new file mode 100644 index 0000000..ba0d603 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/audio_dataset.py @@ -0,0 +1,145 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.deepspeech.npy']): + id_str = fname[:-15] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + #id_str = fname[l+1:-4] + id_str = fname[l+1:-15] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +class AudioDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + self.data_dir = os.path.join(opt.dataroot, 'audio_feature') + self.frame_paths = make_dataset(self.data_dir) + self.frame_ids = make_ids(self.frame_paths, self.root) + + opt.nObjects = 1 + opt.nTrainObjects = 116 # TODO + opt.nTestObjects = 1 + opt.test_sequence_names = [[opt.dataroot.split("/")[-1], 'test']] + assert(opt.resize_or_crop == 'resize_and_crop') + + if opt.isTrain: + print('ERROR: audio_dataset only allowed for test') + exit() + + def getSampleWeights(self): + weights = np.ones((len(self.frame_paths))) + return weights + + def getAudioFilename(self): + return os.path.join(self.root, 'audio.wav') + + def getAudioFeatureFilename(self, idx): + return self.frame_paths[idx % len(self.frame_paths)] + + def __getitem__(self, index): + + #print('GET ITEM: ', index) + frame_path = self.frame_paths[index] + frame_id = self.frame_ids[index] + + # load deepspeech feature + feature_array = np.load(frame_path) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + + dsf_fname = self.frame_paths[index_seq] + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.frame_paths)-1 + if index_seq > max_idx: index_seq = max_idx + + dsf_fname = self.frame_paths[index_seq] + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + dsf_fname = self.frame_paths[index_seq] + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + + ################################# + zeroIdentity = torch.zeros(100) + zeroExpressions = torch.zeros(76) + + target_id = -1 + internal_sequence_id = 0 + + weight = 1.0 / len(self.frame_paths) + + return {'paths': frame_path, + 'expressions': zeroExpressions, + 'identity': zeroIdentity, + 'intrinsics': np.zeros((4)), + 'extrinsics': np.zeros((4,4)), + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id':target_id, + 'internal_id':internal_sequence_id, + 'weight': np.array([weight]).astype(np.float32)} + + def __len__(self): + return len(self.frame_paths) + + def name(self): + return 'AudioDataset' diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/base_data_loader.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/base_data_loader.py new file mode 100644 index 0000000..ae5a168 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/base_data_loader.py @@ -0,0 +1,10 @@ +class BaseDataLoader(): + def __init__(self): + pass + + def initialize(self, opt): + self.opt = opt + pass + + def load_data(): + return None diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/base_dataset.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/base_dataset.py new file mode 100644 index 0000000..25c7c8c --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/base_dataset.py @@ -0,0 +1,105 @@ +import torch.utils.data as data +from PIL import Image +import torchvision.transforms as transforms + + +class BaseDataset(data.Dataset): + def __init__(self): + super(BaseDataset, self).__init__() + + def name(self): + return 'BaseDataset' + + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + pass + + def getSampleWeights(self): + return torch.ones((len(self))) + + def __len__(self): + return 0 + + +def get_transform(opt): + transform_list = [] + if opt.resize_or_crop == 'resize_and_crop': + osize = [opt.loadSize, opt.loadSize] + transform_list.append(transforms.Resize(osize, Image.BICUBIC)) + transform_list.append(transforms.RandomCrop(opt.fineSize)) + elif opt.resize_or_crop == 'crop': + transform_list.append(transforms.RandomCrop(opt.fineSize)) + elif opt.resize_or_crop == 'scale_width': + transform_list.append(transforms.Lambda( + lambda img: __scale_width(img, opt.fineSize))) + elif opt.resize_or_crop == 'scale_width_and_crop': + transform_list.append(transforms.Lambda( + lambda img: __scale_width(img, opt.loadSize))) + transform_list.append(transforms.RandomCrop(opt.fineSize)) + elif opt.resize_or_crop == 'none': + transform_list.append(transforms.Lambda( + lambda img: __adjust(img))) + else: + raise ValueError('--resize_or_crop %s is not a valid option.' % opt.resize_or_crop) + + if opt.isTrain and not opt.no_flip: + transform_list.append(transforms.RandomHorizontalFlip()) + + transform_list += [transforms.ToTensor(), + transforms.Normalize((0.5, 0.5, 0.5), + (0.5, 0.5, 0.5))] + return transforms.Compose(transform_list) + + +# just modify the width and height to be multiple of 4 +def __adjust(img): + ow, oh = img.size + + # the size needs to be a multiple of this number, + # because going through generator network may change img size + # and eventually cause size mismatch error + mult = 4 + if ow % mult == 0 and oh % mult == 0: + return img + w = (ow - 1) // mult + w = (w + 1) * mult + h = (oh - 1) // mult + h = (h + 1) * mult + + if ow != w or oh != h: + __print_size_warning(ow, oh, w, h) + + return img.resize((w, h), Image.BICUBIC) + + +def __scale_width(img, target_width): + ow, oh = img.size + + # the size needs to be a multiple of this number, + # because going through generator network may change img size + # and eventually cause size mismatch error + mult = 4 + assert target_width % mult == 0, "the target width needs to be multiple of %d." % mult + if (ow == target_width and oh % mult == 0): + return img + w = target_width + target_height = int(target_width * oh / ow) + m = (target_height - 1) // mult + h = (m + 1) * mult + + if target_height != h: + __print_size_warning(target_width, target_height, w, h) + + return img.resize((w, h), Image.BICUBIC) + + +def __print_size_warning(ow, oh, w, h): + if not hasattr(__print_size_warning, 'has_printed'): + print("The image size needs to be a multiple of 4. " + "The loaded image size was (%d, %d), so it was adjusted to " + "(%d, %d). This adjustment will be done to all images " + "whose sizes are not multiples of 4" % (ow, oh, w, h)) + __print_size_warning.has_printed = True diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/face_dataset.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/face_dataset.py new file mode 100644 index 0000000..a89c931 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/face_dataset.py @@ -0,0 +1,382 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image +from util import util + +#def make_dataset(dir): +# images = [] +# assert os.path.isdir(dir), '%s is not a valid directory' % dir +# for root, _, fnames in sorted(os.walk(dir)): +# for fname in fnames: +# if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): +# path = os.path.join(root, fname) +# images.append(path) +# return sorted(images) + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.npy', '.NPY']): + #.deepspeech.npy + id_str = fname[:-15] #4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-15]#4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +def make_dataset_png_ids(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.png', '.PNG']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + +def make_dataset_exr_ids(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.exr', '.EXR']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + + +def load_intrinsics(input_dir): + file = open(input_dir+"/intrinsics.txt", "r") + intrinsics = [[(float(x) for x in line.split())] for line in file] + file.close() + intrinsics = list(intrinsics[0][0]) + return intrinsics + +def load_rigids(input_dir): + file = open(input_dir+"/rigid.txt", "r") + rigid_floats = [[float(x) for x in line.split()] for line in file] # note that it stores 5 lines per matrix (blank line) + file.close() + all_rigids = [ [rigid_floats[4*idx + 0],rigid_floats[4*idx + 1],rigid_floats[4*idx + 2],rigid_floats[4*idx + 3]] for idx in range(0, len(rigid_floats)//4) ] + return all_rigids + +def load_expressions(input_dir): + file = open(input_dir+"/expression.txt", "r") + expressions = [[float(x) for x in line.split()] for line in file] + file.close() + return expressions + +def load_identity(input_dir): + file = open(input_dir+"/identities.txt", "r") + identity = [[float(x) for x in line.split()] for line in file] + file.close() + return identity + + + +class FaceDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + + # directories + self.dataroot = opt.dataroot + self.audio_feature_dir = os.path.join(opt.dataroot, 'audio_feature') + self.image_dir = os.path.join(opt.dataroot, 'images') + self.uvs_dir = os.path.join(opt.dataroot, 'uvs') + + # debug print + print('load sequence:', self.dataroot) + print('\taudio_feature_dir:', self.audio_feature_dir) + print('\timage_dir:', self.image_dir) + print('\tuvs_dir:', self.uvs_dir) + + # generate index maps + audio_ids = make_ids(make_dataset(self.audio_feature_dir), self.dataroot) + image_ids = make_dataset_png_ids(self.image_dir) + uvs_ids = make_dataset_exr_ids(self.uvs_dir) + + # get model parameters + intrinsics = load_intrinsics(self.dataroot) + extrinsics = load_rigids(self.dataroot) + expressions = load_expressions(self.dataroot) + identities = load_identity(self.dataroot) + + if opt.phase == 'test': # test overwrites the audio and uv files, as well as expressions + print('Test mode. Overwriting audio, uv and expressions') + print('source sequence:', opt.source_dir) + dataroot = opt.source_dir + self.audio_feature_dir = os.path.join(dataroot, 'audio_feature') + self.uvs_dir = os.path.join(dataroot, 'uvs') + audio_ids = make_ids(make_dataset(self.audio_feature_dir), dataroot) + uvs_ids = make_dataset_exr_ids(self.uvs_dir) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(dataroot) + identities = load_identity(dataroot) + + print('\tnum audio_ids:', len(audio_ids)) + print('\tnum image_ids:', len(image_ids)) + print('\tnum uvs_ids:', len(uvs_ids)) + + + # set data + min_len = min(len(audio_ids), len(image_ids), len(uvs_ids), len(extrinsics), len(expressions)) + self.audio_ids = audio_ids[:min_len] + self.image_ids = image_ids[:min_len] + self.uvs_ids = uvs_ids[:min_len] + self.intrinsics = intrinsics + self.extrinsics = extrinsics[:] #extrinsics[:min_len] + self.expressions = expressions[:] #expressions[:min_len] + self.identities = identities[:] #identities[:min_len] + self.n_frames_total = min_len + + print('\tnum frames:', self.n_frames_total) + + + opt.nTrainObjects = 1 + opt.nValObjects = 1 + opt.nTestObjects = 1 + + + opt.test_sequence_names = [[opt.dataroot.split("/")[-1], 'train']] + if opt.phase == 'test': + opt.test_sequence_names = [[opt.source_dir.split("/")[-1], 'test']] + print('test:', opt.test_sequence_names) + + assert(opt.resize_or_crop == 'resize_and_crop') + + def getSampleWeights(self): + weights = np.ones((self.n_frames_total)) + return weights + + def getExtrinsics(self, idx): + return self.extrinsics[self.uvs_ids[idx % self.n_frames_total]] + def getIntrinsics(self, idx): + return self.intrinsics + def getIdentities(self, idx): + return self.identities[self.uvs_ids[idx % self.n_frames_total]] + def getExpressions(self, idx): + return self.expressions[self.uvs_ids[idx % self.n_frames_total]] + + def getAudioFilename(self): + return os.path.join(self.dataroot, 'audio.wav') + + def getImageFilename(self, idx): + image_id = self.image_ids[idx % self.n_frames_total] + img_fname = os.path.join(self.image_dir, str(image_id).zfill(5) + '.png') + return img_fname + #img_numpy = np.asarray(Image.open(img_fname)) + #TARGET = 2.0 * transforms.ToTensor()(img_numpy.astype(np.float32))/255.0 - 1.0 + + def getAudioFeatureFilename(self, idx): + #return self.frame_paths[idx % len(self.frame_paths)] + audio_id = self.audio_ids[idx] + return os.path.join(self.audio_feature_dir, str(audio_id) + '.deepspeech.npy') + + + def computeCrop(self, mask, MULTIPLE_OF=64, random_size=False): + IMG_DIM_X = mask.shape[2] + IMG_DIM_Y = mask.shape[1] + if random_size: + # random dimensions + new_dim_x = np.random.randint(int(IMG_DIM_X * 0.75), IMG_DIM_X+1) + new_dim_y = np.random.randint(int(IMG_DIM_Y * 0.75), IMG_DIM_Y+1) + new_dim_x = int(np.floor(new_dim_x / float(MULTIPLE_OF)) * MULTIPLE_OF ) + new_dim_y = int(np.floor(new_dim_y / float(MULTIPLE_OF)) * MULTIPLE_OF ) + else: + new_dim_x = 3 * MULTIPLE_OF + new_dim_y = 3 * MULTIPLE_OF + + # check dims + if new_dim_x > IMG_DIM_X: new_dim_x -= MULTIPLE_OF + if new_dim_y > IMG_DIM_Y: new_dim_y -= MULTIPLE_OF + + # random pos + mask_indices = torch.nonzero(mask) + _, bb_mid_point_y, bb_mid_point_x = mask_indices[np.random.randint(0, mask_indices.shape[0])].data.cpu() + #print('bb_mid_point', bb_mid_point_x, bb_mid_point_y) + + offset_x = bb_mid_point_x - new_dim_x/2 + offset_y = bb_mid_point_y - new_dim_y/2 + + + if IMG_DIM_X == new_dim_x: offset_x = 0 + if offset_x < 0: offset_x = 0 + if offset_x+new_dim_x >= IMG_DIM_X: offset_x = IMG_DIM_X-new_dim_x + + if IMG_DIM_Y == new_dim_y: offset_y = 0 + if offset_y < 0: offset_y = 0 + if offset_y+new_dim_y >= IMG_DIM_Y: offset_y = IMG_DIM_Y-new_dim_y + + return np.array([int(offset_x),int(offset_y),int(new_dim_x), int(new_dim_y)]) + + + def __getitem__(self, global_index): + # select frame from sequence + index = global_index + + # get data ids + audio_id = self.audio_ids[index] + image_id = self.image_ids[index] + uv_id = self.uvs_ids[index] + + + + #print('GET ITEM: ', index) + img_fname = os.path.join(self.image_dir, str(image_id).zfill(5) + '.png') + img_numpy = np.asarray(Image.open(img_fname)) + TARGET = 2.0 * transforms.ToTensor()(img_numpy.astype(np.float32))/255.0 - 1.0 + + uv_fname = os.path.join(self.uvs_dir, str(uv_id).zfill(5) + '.exr') + uv_numpy = util.load_exr(uv_fname) + UV = transforms.ToTensor()(uv_numpy.astype(np.float32)) + UV = torch.where(UV > 1.0, torch.zeros_like(UV), UV) + UV = torch.where(UV < 0.0, torch.zeros_like(UV), UV) + UV = 2.0 * UV - 1.0 + + #print('img_fname:', img_fname) + #print('uv_fname:', uv_fname) + + # intrinsics and extrinsics + intrinsics = self.intrinsics + extrinsics = self.extrinsics[uv_id] + + # expressions + expressions = np.asarray(self.expressions[audio_id], dtype=np.float32) + #print('expressions:', expressions.shape) + expressions[32] *= 0.0 # remove eye brow movements + expressions[41] *= 0.0 # remove eye brow movements + expressions[71:75] *= 0.0 # remove eye brow movements + expressions = torch.tensor(expressions) + + # identity + identity = torch.tensor(self.identities[audio_id]) + + # load deepspeech feature + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + last_valid_idx = audio_id + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + audio_id_seq = self.audio_ids[index_seq] + if audio_id_seq == audio_id - i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + last_valid_idx = audio_id + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.audio_ids)-1 + if index_seq > max_idx: index_seq = max_idx + audio_id_seq = self.audio_ids[index_seq] + if audio_id_seq == audio_id + i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + last_valid_idx = audio_id + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + audio_id_seq = self.audio_ids[index_seq] + if audio_id_seq == audio_id - i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + + ################################# + ####### apply augmentation ###### + ################################# + crop = np.array([0,0,UV.shape[2],UV.shape[1]]) + if not self.opt.no_augmentation: + INVALID_UV = -1 + mask = ( (UV[0:1,:,:] != INVALID_UV) | (UV[1:2,:,:] != INVALID_UV) ) + crop = self.computeCrop(mask, MULTIPLE_OF=64) # << dependent on the network structure !! 64 => 6 layers + + offset_x,offset_y,new_dim_x, new_dim_y = crop + # select subwindow + TARGET = TARGET[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + UV = UV[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + + # compute new intrinsics + # TODO: atm not needed but maybe later + + ################################# + weight = 1.0 / self.n_frames_total + + return {'TARGET': TARGET, 'UV': UV, + 'paths': dsf_fname, #img_path, + 'intrinsics': np.array(intrinsics), + 'extrinsics': np.array(extrinsics), + 'expressions': expressions, + 'identity': identity, + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id': -1, + 'crop': crop, + 'internal_id': 0, + 'weight': np.array([weight]).astype(np.float32)} + + def __len__(self): + return self.n_frames_total + + def name(self): + return 'FaceDataset' diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/facetmp_dataset.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/facetmp_dataset.py new file mode 100644 index 0000000..bf2baeb --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/facetmp_dataset.py @@ -0,0 +1,394 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image +from util import util + +#def make_dataset(dir): +# images = [] +# assert os.path.isdir(dir), '%s is not a valid directory' % dir +# for root, _, fnames in sorted(os.walk(dir)): +# for fname in fnames: +# if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): +# path = os.path.join(root, fname) +# images.append(path) +# return sorted(images) + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.npy', '.NPY']): + #.deepspeech.npy + id_str = fname[:-15] #4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-15]#4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +def make_dataset_png_ids(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.png', '.PNG']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + +def make_dataset_exr_ids(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.exr', '.EXR']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + + +def load_intrinsics(input_dir): + file = open(input_dir+"/intrinsics.txt", "r") + intrinsics = [[(float(x) for x in line.split())] for line in file] + file.close() + intrinsics = list(intrinsics[0][0]) + return intrinsics + +def load_rigids(input_dir): + file = open(input_dir+"/rigid.txt", "r") + rigid_floats = [[float(x) for x in line.split()] for line in file] # note that it stores 5 lines per matrix (blank line) + file.close() + all_rigids = [ [rigid_floats[4*idx + 0],rigid_floats[4*idx + 1],rigid_floats[4*idx + 2],rigid_floats[4*idx + 3]] for idx in range(0, len(rigid_floats)//4) ] + return all_rigids + +def load_expressions(input_dir): + file = open(input_dir+"/expression.txt", "r") + expressions = [[float(x) for x in line.split()] for line in file] + file.close() + return expressions + +def load_identity(input_dir): + file = open(input_dir+"/identities.txt", "r") + identity = [[float(x) for x in line.split()] for line in file] + file.close() + return identity + + + +class FaceTmpDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + + # directories + self.dataroot = opt.dataroot + self.audio_feature_dir = os.path.join(opt.dataroot, 'audio_feature') + self.image_dir = os.path.join(opt.dataroot, 'images') + self.uvs_dir = os.path.join(opt.dataroot, 'uvs') + + # debug print + print('load sequence:', self.dataroot) + print('\taudio_feature_dir:', self.audio_feature_dir) + print('\timage_dir:', self.image_dir) + print('\tuvs_dir:', self.uvs_dir) + + # generate index maps + audio_ids = make_ids(make_dataset(self.audio_feature_dir), self.dataroot) + image_ids = make_dataset_png_ids(self.image_dir) + uvs_ids = make_dataset_exr_ids(self.uvs_dir) + + # get model parameters + intrinsics = load_intrinsics(self.dataroot) + extrinsics = load_rigids(self.dataroot) + expressions = load_expressions(self.dataroot) + identities = load_identity(self.dataroot) + + if opt.phase == 'test': # test overwrites the audio and uv files, as well as expressions + print('Test mode. Overwriting audio, uv and expressions') + print('source sequence:', opt.source_dir) + dataroot = opt.source_dir + self.audio_feature_dir = os.path.join(dataroot, 'audio_feature') + self.uvs_dir = os.path.join(dataroot, 'uvs') + audio_ids = make_ids(make_dataset(self.audio_feature_dir), dataroot) + uvs_ids = make_dataset_exr_ids(self.uvs_dir) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(dataroot) + identities = load_identity(dataroot) + + print('\tnum audio_ids:', len(audio_ids)) + print('\tnum image_ids:', len(image_ids)) + print('\tnum uvs_ids:', len(uvs_ids)) + + + # set data + min_len = min(len(audio_ids), len(image_ids), len(uvs_ids), len(extrinsics), len(expressions)) + self.audio_ids = audio_ids[:min_len] + self.image_ids = image_ids[:min_len] + self.uvs_ids = uvs_ids[:min_len] + self.intrinsics = intrinsics + self.extrinsics = extrinsics[:] # extrinsics[:min_len] + self.expressions = expressions[:] # expressions[:min_len] + self.identities = identities[:] #identities[:min_len] + self.n_frames_total = min_len + + print('\tnum frames:', self.n_frames_total) + + + opt.nTrainObjects = 1 + opt.nValObjects = 1 + opt.nTestObjects = 1 + + + opt.test_sequence_names = [[opt.dataroot.split("/")[-1], 'train']] + if opt.phase == 'test': + + opt.test_sequence_names = [[opt.source_dir.split("/")[-1], 'test']] + print('test:', opt.test_sequence_names) + + assert(opt.resize_or_crop == 'resize_and_crop') + + def getSampleWeights(self): + weights = np.ones((self.n_frames_total)) + return weights + + def computeCrop(self, mask, MULTIPLE_OF=64, random_size=False): + IMG_DIM_X = mask.shape[2] + IMG_DIM_Y = mask.shape[1] + if random_size: + # random dimensions + new_dim_x = np.random.randint(int(IMG_DIM_X * 0.75), IMG_DIM_X+1) + new_dim_y = np.random.randint(int(IMG_DIM_Y * 0.75), IMG_DIM_Y+1) + new_dim_x = int(np.floor(new_dim_x / float(MULTIPLE_OF)) * MULTIPLE_OF ) + new_dim_y = int(np.floor(new_dim_y / float(MULTIPLE_OF)) * MULTIPLE_OF ) + else: + new_dim_x = 2 * MULTIPLE_OF + new_dim_y = 2 * MULTIPLE_OF + + # check dims + if new_dim_x > IMG_DIM_X: new_dim_x -= MULTIPLE_OF + if new_dim_y > IMG_DIM_Y: new_dim_y -= MULTIPLE_OF + + # random pos + mask_indices = torch.nonzero(mask) + _, bb_mid_point_y, bb_mid_point_x = mask_indices[np.random.randint(0, mask_indices.shape[0])].data.cpu() + #print('bb_mid_point', bb_mid_point_x, bb_mid_point_y) + + offset_x = bb_mid_point_x - new_dim_x/2 + offset_y = bb_mid_point_y - new_dim_y/2 + + + if IMG_DIM_X == new_dim_x: offset_x = 0 + if offset_x < 0: offset_x = 0 + if offset_x+new_dim_x >= IMG_DIM_X: offset_x = IMG_DIM_X-new_dim_x + + if IMG_DIM_Y == new_dim_y: offset_y = 0 + if offset_y < 0: offset_y = 0 + if offset_y+new_dim_y >= IMG_DIM_Y: offset_y = IMG_DIM_Y-new_dim_y + + + return np.array([int(offset_x),int(offset_y),int(new_dim_x), int(new_dim_y)]) + + def getitem(self, global_index, crop=None): + # select frame from sequence + index = global_index + + # get data ids + audio_id = self.audio_ids[index] + image_id = self.image_ids[index] + uv_id = self.uvs_ids[index] + + #print('GET ITEM: ', index) + img_fname = os.path.join(self.image_dir, str(image_id).zfill(5) + '.png') + img_numpy = np.asarray(Image.open(img_fname)) + TARGET = 2.0 * transforms.ToTensor()(img_numpy.astype(np.float32))/255.0 - 1.0 + + uv_fname = os.path.join(self.uvs_dir, str(uv_id).zfill(5) + '.exr') + uv_numpy = util.load_exr(uv_fname) + UV = transforms.ToTensor()(uv_numpy.astype(np.float32)) + UV = torch.where(UV > 1.0, torch.zeros_like(UV), UV) + UV = torch.where(UV < 0.0, torch.zeros_like(UV), UV) + UV = 2.0 * UV - 1.0 + + + # intrinsics and extrinsics + intrinsics = self.intrinsics + extrinsics = self.extrinsics[uv_id] + + # expressions + expressions = np.asarray(self.expressions[audio_id], dtype=np.float32) + #print('expressions:', expressions.shape) + expressions[32] *= 0.0 # remove eye brow movements + expressions[41] *= 0.0 # remove eye brow movements + expressions[71:75] *= 0.0 # remove eye brow movements + expressions = torch.tensor(expressions) + + # identity + identity = torch.tensor(self.identities[audio_id]) + + # load deepspeech feature + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + last_valid_idx = audio_id + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + audio_id_seq = self.audio_ids[index_seq] + if audio_id_seq == audio_id - i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + last_valid_idx = audio_id + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.audio_ids)-1 + if index_seq > max_idx: index_seq = max_idx + audio_id_seq = self.audio_ids[index_seq] + if audio_id_seq == audio_id + i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + last_valid_idx = audio_id + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + audio_id_seq = self.audio_ids[index_seq] + if audio_id_seq == audio_id - i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + ################################# + ####### apply augmentation ###### + ################################# + + if not self.opt.no_augmentation: + if type(crop) == type(None): + INVALID_UV = -1 + mask = ( (UV[0:1,:,:] != INVALID_UV) | (UV[1:2,:,:] != INVALID_UV) ) + crop = self.computeCrop(mask, MULTIPLE_OF=64) # << dependent on the network structure !! 64 => 6 layers + + offset_x,offset_y,new_dim_x, new_dim_y = crop + + # select subwindow + TARGET = TARGET[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + UV = UV[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + + # compute new intrinsics + # TODO: atm not needed but maybe later + + ################################# + + + return {'TARGET': TARGET, 'UV': UV, + 'paths': dsf_fname, #img_path, + 'intrinsics': np.array(intrinsics), + 'extrinsics': np.array(extrinsics), + 'expressions': expressions, + 'identity': identity, + 'audio_deepspeech': dsf, # deepspeech feature + #'target_id':target_id, + 'crop': crop, + + 'internal_id': 0} + + + def __getitem__(self, global_index): + # select frame from sequence + index = global_index + + current = self.getitem(index) + crop = current['crop'] + prv = self.getitem(max(index-1, 0), crop) + nxt = self.getitem(min(index+1, self.n_frames_total-1), crop) + + if type(crop) == type(None): + crop = np.array([0,0,current['UV'].shape[2],current['UV'].shape[1]]) + + weight = 1.0 / self.n_frames_total + + return {'TARGET': current['TARGET'], + 'UV': current['UV'], + 'paths': current['paths'], + 'intrinsics': current['intrinsics'], + 'extrinsics': current['extrinsics'], + 'expressions': current['expressions'], + 'identity': current['identity'], + 'audio_deepspeech': current['audio_deepspeech'], + + 'prv_TARGET': prv['TARGET'], + 'prv_UV': prv['UV'], + 'prv_audio_deepspeech': prv['audio_deepspeech'], + 'prv_expressions': prv['expressions'], + + 'nxt_TARGET': nxt['TARGET'], + 'nxt_UV': nxt['UV'], + 'nxt_audio_deepspeech': nxt['audio_deepspeech'], + 'nxt_expressions': nxt['expressions'], + + 'crop': crop, + + 'internal_id': current['internal_id'], + 'weight': np.array([weight]).astype(np.float32)} + + def __len__(self): + return self.n_frames_total + + def name(self): + return 'FaceTmpDataset' diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/multi_face_audio_eq_tmp_cached_dataset.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/multi_face_audio_eq_tmp_cached_dataset.py new file mode 100644 index 0000000..9bb4504 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/multi_face_audio_eq_tmp_cached_dataset.py @@ -0,0 +1,458 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image +import progressbar + +#def make_dataset(dir): +# images = [] +# assert os.path.isdir(dir), '%s is not a valid directory' % dir +# for root, _, fnames in sorted(os.walk(dir)): +# for fname in fnames: +# if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): +# path = os.path.join(root, fname) +# images.append(path) +# return sorted(images) + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.npy', '.NPY']): + #.deepspeech.npy + id_str = fname[:-15] #4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-15]#4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +def make_dataset_ids_png(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.png', '.png']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + +def load_intrinsics(input_dir): + file = open(input_dir+"/intrinsics.txt", "r") + intrinsics = [[(float(x) for x in line.split())] for line in file] + file.close() + intrinsics = list(intrinsics[0][0]) + return intrinsics + +def load_rigids(input_dir): + file = open(input_dir+"/rigid.txt", "r") + rigid_floats = [[float(x) for x in line.split()] for line in file] # note that it stores 5 lines per matrix (blank line) + file.close() + all_rigids = [ [rigid_floats[4*idx + 0],rigid_floats[4*idx + 1],rigid_floats[4*idx + 2],rigid_floats[4*idx + 3]] for idx in range(0, len(rigid_floats)//4) ] + return all_rigids + +def load_expressions(input_dir): + file = open(input_dir+"/expression.txt", "r") + expressions = [[float(x) for x in line.split()] for line in file] + file.close() + return expressions + +def load_identity(input_dir): + file = open(input_dir+"/identities.txt", "r") + identities = [[float(x) for x in line.split()] for line in file] + file.close() + return identities + + +class MultiFaceAudioEQTmpCachedDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + + # read dataset file that contains the filenames for the train, val and test lists + file = open(self.root+"/dataset.txt", "r") + self.filename_train_list, self.filename_val_list, self.filename_test_list = [str(line) for line in file] + file.close() + if self.filename_train_list[-1] == '\n': self.filename_train_list = self.filename_train_list[:-1] + if self.filename_val_list[-1] == '\n': self.filename_val_list = self.filename_val_list[:-1] + if self.filename_test_list[-1] == '\n': self.filename_test_list = self.filename_test_list[:-1] + + + + # get list of train sequences + file = open(self.root+"/" + self.filename_train_list, "r") + self.train_sequence_names = [str(line) for line in file] + file.close() + for i in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[i][-1] == '\n': + self.train_sequence_names[i] = self.train_sequence_names[i][:-1] + + # get list of val sequences + file = open(self.root+"/" + self.filename_val_list, "r") + self.val_sequence_names = [[str(w) for w in line.split()] for line in file] + file.close() + for i in range(0,len(self.val_sequence_names)): + if self.val_sequence_names[i][0][-1] == '\n': self.val_sequence_names[i][0] = self.val_sequence_names[i][0][:-1] + if self.val_sequence_names[i][1][-1] == '\n': self.val_sequence_names[i][1] = self.val_sequence_names[i][1][:-1] + + # get list of test sequences + file = open(self.root+"/" + self.filename_test_list, "r") + self.test_sequence_names = [[str(w) for w in line.split()] for line in file] + if opt.output_audio_expressions: self.test_sequence_names = self.test_sequence_names[0:1] + file.close() + for i in range(0,len(self.test_sequence_names)): + if self.test_sequence_names[i][0][-1] == '\n': self.test_sequence_names[i][0] = self.test_sequence_names[i][0][:-1] + if self.test_sequence_names[i][1][-1] == '\n': self.test_sequence_names[i][1] = self.test_sequence_names[i][1][:-1] + + # print some stats + print('filename_train_list:', self.filename_train_list) + print('\tnum_seq:', len(self.train_sequence_names)) + print('filename_val_list: ', self.filename_val_list) + print('\tnum_seq:', len(self.val_sequence_names)) + print('filename_test_list: ', self.filename_test_list) + print('\tnum_seq:', len(self.test_sequence_names)) + + opt.train_sequence_names = self.train_sequence_names + opt.val_sequence_names = self.val_sequence_names + opt.test_sequence_names = self.test_sequence_names + + # search mapping from val, test to train sequences that are used as targets + self.val_sequence_targets = [] + for i in range(0,len(self.val_sequence_names)): + target_name = self.val_sequence_names[i][1] + target_id = -1 + for j in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[j] == target_name: + target_id = j + break + if target_id == -1: + print('Target sequence not in train set! ', target_name) + exit() + self.val_sequence_targets.append(target_id) + + self.test_sequence_targets = [] + for i in range(0,len(self.test_sequence_names)): + target_name = self.test_sequence_names[i][1] + target_id = -1 + for j in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[j] == target_name: + target_id = j + break + if target_id == -1: + print('Target sequence not in train set! ', target_name) + exit() + self.test_sequence_targets.append(target_id) + print('test: ', self.test_sequence_names[i]) + print('\t target:', target_id) + + # store len values + opt.nTrainObjects = len(self.train_sequence_names) + opt.nValObjects = len(self.val_sequence_names) + opt.nTestObjects = len(self.test_sequence_names) + + ################################################ + ################################################ + ################################################ + + # prepare dataloader paths / data + self.audio_feature_dir = [] + self.image_dir = [] + self.uvs_dir = [] + self.audio_ids = [] + self.image_ids = [] + self.intrinsics = [] + self.extrinsics = [] + self.expressions = [] + self.identities = [] + self.target_id = [] + self.n_frames_total = 0 + + if opt.phase == 'train': + self.sequence_names = self.train_sequence_names + for i in range(0,len(self.train_sequence_names)): + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[i]) + audio_feature_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'uvs') + print('load train sequence:', self.train_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + + audio_ids = make_ids(make_dataset(audio_feature_dir), dataroot) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(dataroot) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(i) + + self.n_frames_total += min_len + elif opt.phase == 'val': + for i in range(0,len(self.val_sequence_names)): + target_id = self.val_sequence_targets[i] + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[target_id]) + audio_feature_dir = os.path.join(opt.dataroot, self.val_sequence_names[i][0], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'uvs') + print('load val sequence:', self.val_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + audio_ids = make_ids(make_dataset(audio_feature_dir), os.path.join(opt.dataroot, self.val_sequence_names[i][0])) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(os.path.join(opt.dataroot, self.val_sequence_names[i][0])) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(target_id) + + self.n_frames_total += min_len + else: # test + for i in range(0,len(self.test_sequence_names)): + target_id = self.test_sequence_targets[i] + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[target_id]) + audio_feature_dir = os.path.join(opt.dataroot, self.test_sequence_names[i][0], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'uvs') + print('load test sequence:', self.test_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + audio_ids = make_ids(make_dataset(audio_feature_dir), os.path.join(opt.dataroot, self.test_sequence_names[i][0])) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(os.path.join(opt.dataroot, self.test_sequence_names[i][0])) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(target_id) + + self.n_frames_total += min_len + + + print('frames_total:', self.n_frames_total) + + + #global_target_ids = [] + #for i in range(0,len(self.audio_ids)): + # for j in range(0,len(self.audio_ids[i])): + # global_target_ids.append(self.target_id[i]) + #global_target_ids=np.array(global_target_ids) + #self.weights = np.where(global_target_ids==2, 1.0 * np.ones((self.n_frames_total)), 0.01 * np.ones((self.n_frames_total)) ) + self.weights = [] + for i in range(0,len(self.audio_ids)): + l = len(self.audio_ids[i]) + for j in range(0,l): + self.weights.append(1.0 / l) + self.weights = np.array(self.weights) + + assert(opt.resize_or_crop == 'resize_and_crop') + + # mapping global to internal + self.mapping_global2internal = [] + self.mapping_global2internal_offset = [] + self.dsf = [] + offset = 0 + with progressbar.ProgressBar(max_value=self.n_frames_total) as bar: + for i in range(0,len(self.audio_ids)): + l = len(self.audio_ids[i]) + dsf_seq = [] + for k in range(0,l): + self.mapping_global2internal.append(i) + self.mapping_global2internal_offset.append(offset) + dsf_fname = os.path.join(self.audio_feature_dir[i], str(self.audio_ids[i][k]) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq.append(dsf_np.astype(np.float32)) + bar.update(offset + k) + self.dsf.append(dsf_seq) + offset += l + + + def getSampleWeights(self): + return self.weights + + def getitem(self, global_index): + + # select sequence + internal_sequence_id = self.mapping_global2internal[global_index] + sum_frames = self.mapping_global2internal_offset[global_index] + + # select frame from sequence + index = (global_index-sum_frames) % len(self.audio_ids[internal_sequence_id]) + + # get data ids + audio_id = self.audio_ids[internal_sequence_id][index] + image_id = self.image_ids[internal_sequence_id][index] + + #print('GET ITEM: ', index) + #img_path = self.frame_paths[sequence_id][index] + + # intrinsics and extrinsics + intrinsics = self.intrinsics[internal_sequence_id] + extrinsics = self.extrinsics[internal_sequence_id][image_id] + + # expressions + expressions = np.asarray(self.expressions[internal_sequence_id][audio_id], dtype=np.float32) + #print('expressions:', expressions.shape) + expressions[32] *= 0.0 # remove eye brow movements + expressions[41] *= 0.0 # remove eye brow movements + expressions[71:75] *= 0.0 # remove eye brow movements + expressions = torch.tensor(expressions) + + # identity + identity = torch.tensor(self.identities[internal_sequence_id][image_id]) + target_id = self.target_id[internal_sequence_id] # sequence id refers to the target sequence (of the training corpus) + + # load deepspeech feature + #print('audio_id', audio_id) + dsf_fname = os.path.join(self.audio_feature_dir[internal_sequence_id], str(audio_id) + '.deepspeech.npy') + + dsf_np = self.dsf[internal_sequence_id][index] + dsf = transforms.ToTensor()(dsf_np) + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + dsf_np = self.dsf[internal_sequence_id][index_seq] + dsf_seq = transforms.ToTensor()(dsf_np) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.audio_ids[internal_sequence_id])-1 + if index_seq > max_idx: index_seq = max_idx + dsf_np = self.dsf[internal_sequence_id][index_seq] + dsf_seq = transforms.ToTensor()(dsf_np) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + dsf_np = self.dsf[internal_sequence_id][index_seq] + dsf_seq = transforms.ToTensor()(dsf_np) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + #weight = 1.0 / len(self.audio_feature_dir[internal_sequence_id]) + weight = self.weights[global_index] + + return {'paths': dsf_fname, #img_path, + 'intrinsics': np.array(intrinsics), + 'extrinsics': np.array(extrinsics), + 'expressions': expressions, + 'identity': identity, + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id':target_id, + 'internal_id':internal_sequence_id, + + 'weight': np.array([weight]).astype(np.float32)} + + + def __getitem__(self, global_index): + # select frame from sequence + index = global_index + current = self.getitem(index) + prv = self.getitem(max(index-1, 0)) + nxt = self.getitem(min(index+1, self.n_frames_total-1)) + + return { + 'paths': current['paths'], #img_path, + 'target_id': current['target_id'], + 'internal_id': current['internal_id'], + 'weight': current['weight'], + 'identity': current['identity'], + 'intrinsics': current['intrinsics'], + + 'extrinsics': current['extrinsics'], + 'expressions': current['expressions'], + 'audio_deepspeech': current['audio_deepspeech'], + + 'extrinsics_prv': prv['extrinsics'], + 'expressions_prv': prv['expressions'], + 'audio_deepspeech_prv': prv['audio_deepspeech'], + + 'extrinsics_nxt': nxt['extrinsics'], + 'expressions_nxt': nxt['expressions'], + 'audio_deepspeech_nxt': nxt['audio_deepspeech'], + } + + + def __len__(self): + return self.n_frames_total #len(self.frame_paths[0]) + + def name(self): + return 'MultiFaceAudioEQTmpCachedDataset' diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/multi_face_audio_eq_tmp_dataset.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/multi_face_audio_eq_tmp_dataset.py new file mode 100644 index 0000000..e42f155 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/data/multi_face_audio_eq_tmp_dataset.py @@ -0,0 +1,435 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image + +#def make_dataset(dir): +# images = [] +# assert os.path.isdir(dir), '%s is not a valid directory' % dir +# for root, _, fnames in sorted(os.walk(dir)): +# for fname in fnames: +# if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): +# path = os.path.join(root, fname) +# images.append(path) +# return sorted(images) + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.npy', '.NPY']): + #.deepspeech.npy + id_str = fname[:-15] #4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-15]#4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +def make_dataset_ids_png(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.png', '.png']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + +def load_intrinsics(input_dir): + file = open(input_dir+"/intrinsics.txt", "r") + intrinsics = [[(float(x) for x in line.split())] for line in file] + file.close() + intrinsics = list(intrinsics[0][0]) + return intrinsics + +def load_rigids(input_dir): + file = open(input_dir+"/rigid.txt", "r") + rigid_floats = [[float(x) for x in line.split()] for line in file] # note that it stores 5 lines per matrix (blank line) + file.close() + all_rigids = [ [rigid_floats[4*idx + 0],rigid_floats[4*idx + 1],rigid_floats[4*idx + 2],rigid_floats[4*idx + 3]] for idx in range(0, len(rigid_floats)//4) ] + return all_rigids + +def load_expressions(input_dir): + file = open(input_dir+"/expression.txt", "r") + expressions = [[float(x) for x in line.split()] for line in file] + file.close() + return expressions + +def load_identity(input_dir): + file = open(input_dir+"/identities.txt", "r") + identities = [[float(x) for x in line.split()] for line in file] + file.close() + return identities + + +class MultiFaceAudioEQTmpDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + + # read dataset file that contains the filenames for the train, val and test lists + file = open(self.root+"/dataset.txt", "r") + self.filename_train_list, self.filename_val_list, self.filename_test_list = [str(line) for line in file] + file.close() + if self.filename_train_list[-1] == '\n': self.filename_train_list = self.filename_train_list[:-1] + if self.filename_val_list[-1] == '\n': self.filename_val_list = self.filename_val_list[:-1] + if self.filename_test_list[-1] == '\n': self.filename_test_list = self.filename_test_list[:-1] + + + + # get list of train sequences + file = open(self.root+"/" + self.filename_train_list, "r") + self.train_sequence_names = [str(line) for line in file] + file.close() + for i in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[i][-1] == '\n': + self.train_sequence_names[i] = self.train_sequence_names[i][:-1] + + # get list of val sequences + file = open(self.root+"/" + self.filename_val_list, "r") + self.val_sequence_names = [[str(w) for w in line.split()] for line in file] + file.close() + for i in range(0,len(self.val_sequence_names)): + if self.val_sequence_names[i][0][-1] == '\n': self.val_sequence_names[i][0] = self.val_sequence_names[i][0][:-1] + if self.val_sequence_names[i][1][-1] == '\n': self.val_sequence_names[i][1] = self.val_sequence_names[i][1][:-1] + + # get list of test sequences + file = open(self.root+"/" + self.filename_test_list, "r") + self.test_sequence_names = [[str(w) for w in line.split()] for line in file] + if opt.output_audio_expressions: self.test_sequence_names = self.test_sequence_names[0:1] + file.close() + for i in range(0,len(self.test_sequence_names)): + if self.test_sequence_names[i][0][-1] == '\n': self.test_sequence_names[i][0] = self.test_sequence_names[i][0][:-1] + if self.test_sequence_names[i][1][-1] == '\n': self.test_sequence_names[i][1] = self.test_sequence_names[i][1][:-1] + + # print some stats + print('filename_train_list:', self.filename_train_list) + print('\tnum_seq:', len(self.train_sequence_names)) + print('filename_val_list: ', self.filename_val_list) + print('\tnum_seq:', len(self.val_sequence_names)) + print('filename_test_list: ', self.filename_test_list) + print('\tnum_seq:', len(self.test_sequence_names)) + + opt.train_sequence_names = self.train_sequence_names + opt.val_sequence_names = self.val_sequence_names + opt.test_sequence_names = self.test_sequence_names + + # search mapping from val, test to train sequences that are used as targets + self.val_sequence_targets = [] + for i in range(0,len(self.val_sequence_names)): + target_name = self.val_sequence_names[i][1] + target_id = -1 + for j in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[j] == target_name: + target_id = j + break + if target_id == -1: + print('Target sequence not in train set! ', target_name) + exit() + self.val_sequence_targets.append(target_id) + + self.test_sequence_targets = [] + for i in range(0,len(self.test_sequence_names)): + target_name = self.test_sequence_names[i][1] + target_id = -1 + for j in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[j] == target_name: + target_id = j + break + if target_id == -1: + print('Target sequence not in train set! ', target_name) + exit() + self.test_sequence_targets.append(target_id) + print('test: ', self.test_sequence_names[i]) + print('\t target:', target_id) + + # store len values + opt.nTrainObjects = len(self.train_sequence_names) + opt.nValObjects = len(self.val_sequence_names) + opt.nTestObjects = len(self.test_sequence_names) + + ################################################ + ################################################ + ################################################ + + # prepare dataloader paths / data + self.audio_feature_dir = [] + self.image_dir = [] + self.uvs_dir = [] + self.audio_ids = [] + self.image_ids = [] + self.intrinsics = [] + self.extrinsics = [] + self.expressions = [] + self.identities = [] + self.target_id = [] + self.n_frames_total = 0 + + if opt.phase == 'train': + self.sequence_names = self.train_sequence_names + for i in range(0,len(self.train_sequence_names)): + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[i]) + audio_feature_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'uvs') + print('load train sequence:', self.train_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + + audio_ids = make_ids(make_dataset(audio_feature_dir), dataroot) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(dataroot) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(i) + + self.n_frames_total += min_len + elif opt.phase == 'val': + for i in range(0,len(self.val_sequence_names)): + target_id = self.val_sequence_targets[i] + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[target_id]) + audio_feature_dir = os.path.join(opt.dataroot, self.val_sequence_names[i][0], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'uvs') + print('load val sequence:', self.val_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + audio_ids = make_ids(make_dataset(audio_feature_dir), os.path.join(opt.dataroot, self.val_sequence_names[i][0])) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(os.path.join(opt.dataroot, self.val_sequence_names[i][0])) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(target_id) + + self.n_frames_total += min_len + else: # test + for i in range(0,len(self.test_sequence_names)): + target_id = self.test_sequence_targets[i] + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[target_id]) + audio_feature_dir = os.path.join(opt.dataroot, self.test_sequence_names[i][0], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'uvs') + print('load test sequence:', self.test_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + audio_ids = make_ids(make_dataset(audio_feature_dir), os.path.join(opt.dataroot, self.test_sequence_names[i][0])) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(os.path.join(opt.dataroot, self.test_sequence_names[i][0])) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(target_id) + + self.n_frames_total += min_len + + + print('frames_total:', self.n_frames_total) + + + #global_target_ids = [] + #for i in range(0,len(self.audio_ids)): + # for j in range(0,len(self.audio_ids[i])): + # global_target_ids.append(self.target_id[i]) + #global_target_ids=np.array(global_target_ids) + #self.weights = np.where(global_target_ids==2, 1.0 * np.ones((self.n_frames_total)), 0.01 * np.ones((self.n_frames_total)) ) + self.weights = [] + for i in range(0,len(self.audio_ids)): + l = len(self.audio_ids[i]) + for j in range(0,l): + self.weights.append(1.0 / l) + self.weights = np.array(self.weights) + + assert(opt.resize_or_crop == 'resize_and_crop') + + def getSampleWeights(self): + return self.weights + + def getitem(self, global_index): + + # select sequence + internal_sequence_id = 0 + sum_frames = 0 + for i in range(0,len(self.audio_ids)): + l = len(self.audio_ids[i]) + if (global_index-sum_frames) < l: + internal_sequence_id = i + break + else: + sum_frames += len(self.audio_ids[i]) + + # select frame from sequence + index = (global_index-sum_frames) % len(self.audio_ids[internal_sequence_id]) + + # get data ids + audio_id = self.audio_ids[internal_sequence_id][index] + image_id = self.image_ids[internal_sequence_id][index] + + #print('GET ITEM: ', index) + #img_path = self.frame_paths[sequence_id][index] + + # intrinsics and extrinsics + intrinsics = self.intrinsics[internal_sequence_id] + extrinsics = self.extrinsics[internal_sequence_id][image_id] + + # expressions + expressions = np.asarray(self.expressions[internal_sequence_id][audio_id], dtype=np.float32) + #print('expressions:', expressions.shape) + expressions[32] *= 0.0 # remove eye brow movements + expressions[41] *= 0.0 # remove eye brow movements + expressions[71:75] *= 0.0 # remove eye brow movements + expressions = torch.tensor(expressions) + + + + # identity + identity = torch.tensor(self.identities[internal_sequence_id][image_id]) + target_id = self.target_id[internal_sequence_id] # sequence id refers to the target sequence (of the training corpus) + + # load deepspeech feature + #print('audio_id', audio_id) + dsf_fname = os.path.join(self.audio_feature_dir[internal_sequence_id], str(audio_id) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + # load sequence data if necessary + last_valid_idx = audio_id + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + audio_id_seq = self.audio_ids[internal_sequence_id][index_seq] + if audio_id_seq == audio_id - i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir[internal_sequence_id], str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + + + #weight = 1.0 / len(self.audio_feature_dir[internal_sequence_id]) + weight = self.weights[global_index] + + return {'paths': dsf_fname, #img_path, + 'intrinsics': np.array(intrinsics), + 'extrinsics': np.array(extrinsics), + 'expressions': expressions, + 'identity': identity, + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id':target_id, + 'internal_id':internal_sequence_id, + + 'weight': np.array([weight]).astype(np.float32)} + + + def __getitem__(self, global_index): + # select frame from sequence + index = global_index + current = self.getitem(index) + prv = self.getitem(max(index-1, 0)) + nxt = self.getitem(min(index+1, self.n_frames_total-1)) + + return { + 'paths': current['paths'], #img_path, + 'target_id': current['target_id'], + 'internal_id': current['internal_id'], + 'weight': current['weight'], + 'identity': current['identity'], + 'intrinsics': current['intrinsics'], + + 'extrinsics': current['extrinsics'], + 'expressions': current['expressions'], + 'audio_deepspeech': current['audio_deepspeech'], + + 'extrinsics_prv': prv['extrinsics'], + 'expressions_prv': prv['expressions'], + 'audio_deepspeech_prv': prv['audio_deepspeech'], + + 'extrinsics_nxt': nxt['extrinsics'], + 'expressions_nxt': nxt['expressions'], + 'audio_deepspeech_nxt': nxt['audio_deepspeech'], + } + + + def __len__(self): + return self.n_frames_total #len(self.frame_paths[0]) + + def name(self): + return 'MultiFaceAudioEQTmpDataset' diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/models/__init__.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/models/__init__.py new file mode 100644 index 0000000..4d92091 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/models/__init__.py @@ -0,0 +1,39 @@ +import importlib +from models.base_model import BaseModel + + +def find_model_using_name(model_name): + # Given the option --model [modelname], + # the file "models/modelname_model.py" + # will be imported. + model_filename = "models." + model_name + "_model" + modellib = importlib.import_module(model_filename) + + # In the file, the class called ModelNameModel() will + # be instantiated. It has to be a subclass of BaseModel, + # and it is case-insensitive. + model = None + target_model_name = model_name.replace('_', '') + 'model' + for name, cls in modellib.__dict__.items(): + if name.lower() == target_model_name.lower() \ + and issubclass(cls, BaseModel): + model = cls + + if model is None: + print("In %s.py, there should be a subclass of BaseModel with class name that matches %s in lowercase." % (model_filename, target_model_name)) + exit(0) + + return model + + +def get_option_setter(model_name): + model_class = find_model_using_name(model_name) + return model_class.modify_commandline_options + + +def create_model(opt): + model = find_model_using_name(opt.model) + instance = model() + instance.initialize(opt) + print("model [%s] was created" % (instance.name())) + return instance diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/models/audio2ExpressionsAttentionTMP4_model.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/models/audio2ExpressionsAttentionTMP4_model.py new file mode 100644 index 0000000..7ba8d49 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/models/audio2ExpressionsAttentionTMP4_model.py @@ -0,0 +1,342 @@ +import torch +import torch.nn as nn +from util.image_pool import ImagePool +from .base_model import BaseModel +from . import networks + +from BaselModel.basel_model import * + +################ +### HELPER ### +################ + +INVALID_UV = -1.0 + + +class ExpressionEstimator_Attention(nn.Module): + def __init__(self, n_output_expressions, nIdentities, seq_len, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(ExpressionEstimator_Attention, self).__init__() + print('Estimator Attention') + ################################# + ######## audio net ########## + ################################# + self.seq_len = seq_len + + dropout_rate = 0.0 + if use_dropout == True: + #dropout_rate = 0.5 + dropout_rate = 0.25 + + self.convNet = nn.Sequential( + nn.Conv2d(29, 32, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 29 x 16 x 1 => 32 x 8 x 1 + nn.LeakyReLU(0.02, True), + nn.Conv2d(32, 32, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 32 x 8 x 1 => 32 x 4 x 1 + nn.LeakyReLU(0.02, True), + nn.Conv2d(32, 64, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 32 x 4 x 1 => 64 x 2 x 1 + nn.LeakyReLU(0.2, True), + nn.Conv2d(64, 64, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 64 x 2 x 1 => 64 x 1 x 1 + nn.LeakyReLU(0.2, True), + ) + + fullNet_input_size = 64 + + self.subspace_dim = 32 # number of audio expressions + print('fullNet_input_size: ', fullNet_input_size) + self.fullNet = nn.Sequential( + nn.Linear(in_features = fullNet_input_size, out_features=128, bias = True), + nn.LeakyReLU(0.02), + + nn.Linear(in_features = 128, out_features=64, bias = True), + nn.LeakyReLU(0.02), + + nn.Linear(in_features = 64, out_features=self.subspace_dim, bias = True), + nn.Tanh() + ) + + + # mapping from subspace to full expression space + self.register_parameter('mapping', torch.nn.Parameter(torch.randn(1, nIdentities, N_EXPRESSIONS, self.subspace_dim, requires_grad=True))) + + # attention + self.attentionConvNet = nn.Sequential( # b x subspace_dim x seq_len + nn.Conv1d(self.subspace_dim, 16, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True), + nn.Conv1d(16, 8, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True), + nn.Conv1d(8, 4, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True), + nn.Conv1d(4, 2, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True), + nn.Conv1d(2, 1, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True) + ) + self.attentionNet = nn.Sequential( + nn.Linear(in_features = self.seq_len, out_features=self.seq_len, bias = True), + nn.Softmax(dim=1) + ) + #self.hidden2subspace = nn.Linear(self.subspace_dim,self.subspace_dim) + + def forward_internal(self, audio_features_sequence, identity_id): + result_subspace, intermediate_expression = self.getAudioExpressions_internal(audio_features_sequence) + mapping = torch.index_select(self.mapping[0], dim = 0, index = identity_id) + result = 10.0 * torch.bmm(mapping, result_subspace)[:,:,0] + result_intermediate = 10.0 * torch.bmm(mapping, intermediate_expression)[:,:,0] + return result, result_intermediate + + def forward(self, audio_features_sequence, identity_id): + result_subspace = self.getAudioExpressions(audio_features_sequence) + mapping = torch.index_select(self.mapping[0], dim = 0, index = identity_id) + result = torch.bmm(mapping, result_subspace)[:,:,0] + return 10.0 * result + + def getAudioExpressions_internal(self, audio_features_sequence): + # audio_features_sequence: b x seq_len x 16 x 29 + b = audio_features_sequence.shape[0] # batchsize + audio_features_sequence = audio_features_sequence.view(b * self.seq_len, 1, 16, 29) # b * seq_len x 1 x 16 x 29 + audio_features_sequence = torch.transpose(audio_features_sequence, 1, 3) # b* seq_len x 29 x 16 x 1 + conv_res = self.convNet( audio_features_sequence ) + conv_res = torch.reshape( conv_res, (b * self.seq_len, 1, -1)) + result_subspace = self.fullNet(conv_res)[:,0,:] # b * seq_len x subspace_dim + result_subspace = result_subspace.view(b, self.seq_len, self.subspace_dim)# b x seq_len x subspace_dim + + ################# + ### attention ### + ################# + result_subspace_T = torch.transpose(result_subspace, 1, 2) # b x subspace_dim x seq_len + intermediate_expression = result_subspace_T[:,:,(self.seq_len // 2):(self.seq_len // 2) + 1] + att_conv_res = self.attentionConvNet(result_subspace_T) + #print('att_conv_res', att_conv_res.shape) + attention = self.attentionNet(att_conv_res.view(b, self.seq_len)).view(b, self.seq_len, 1) # b x seq_len x 1 + #print('attention', attention.shape) + # pooling along the sequence dimension + result_subspace = torch.bmm(result_subspace_T, attention) + #print('result_subspace', result_subspace.shape) + ### + + return result_subspace.view(b, self.subspace_dim, 1), intermediate_expression + + def getAudioExpressions(self, audio_features_sequence): + expr, _ = self.getAudioExpressions_internal(audio_features_sequence) + return expr + + def regularizer(self): + #reg = torch.norm(self.mapping) + reg_mapping = torch.mean(torch.abs(self.mapping)) + + # one could also enforce orthogonality here + + # s_browExpressions[] = { 32, 41, 71, 72, 73, 74, 75 }; + reg_eye_brow = torch.mean(torch.abs( self.mapping[0,:,[32, 41, 71, 72, 73, 74, 75],:] )) + #return 0.01 * reg_mapping + 1.0 * reg_eye_brow + return 0.0 * reg_mapping + + + +def define_ExpressionEstimator(estimatorType='estimatorDefault', nIdentities=1, seq_len=1, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + print('EstimatorType: ', estimatorType) + if estimatorType=='estimatorAttention': net = ExpressionEstimator_Attention(N_EXPRESSIONS,nIdentities, seq_len) + + return networks.init_net(net, init_type, init_gain, gpu_ids) + + +class Audio2ExpressionsAttentionTMP4Model(BaseModel): + def name(self): + return 'Audio2ExpressionsAttentionTMP4Model' + + @staticmethod + def modify_commandline_options(parser, is_train=True): + + # changing the default values to match the pix2pix paper + # (https://phillipi.github.io/pix2pix/) + #parser.set_defaults(norm='batch', netG='unet_256') + parser.set_defaults(norm='instance', netG='unet_256') + parser.set_defaults(dataset_mode='aligned') + if is_train: + parser.set_defaults(pool_size=0, no_lsgan=True) + parser.add_argument('--lambda_L1', type=float, default=100.0, help='weight for L1 loss') + + return parser + + def initialize(self, opt): + BaseModel.initialize(self, opt) + self.isTrain = opt.isTrain + + self.trainRenderer = not opt.fix_renderer + + # specify the training losses you want to print out. The program will call base_model.get_current_losses + self.loss_names = ['G_L1','G_L1_ABSOLUTE','G_L1_RELATIVE', 'G_Regularizer'] + + # specify the images you want to save/display. The program will call base_model.get_current_visuals + #self.visual_names = ['input_uv', 'fake', 'target'] + self.visual_names = ['zeros'] + self.zeros = torch.zeros(1,3,2,2) + + # specify the models you want to save to the disk. The program will call base_model.save_networks and base_model.load_networks + if self.isTrain: + self.model_names = ['netG'] + else: # during test time, only load Gs + self.model_names = ['netG'] + + self.fake_expressions = None + self.fake_expressions_prv = None + self.fake_expressions_nxt = None + + self.morphable_model = MorphableModel() + self.mask = self.morphable_model.LoadMask() + + nIdentities=opt.nTrainObjects + + # load/define networks + self.netG = define_ExpressionEstimator(estimatorType=opt.rendererType, nIdentities=nIdentities, seq_len=opt.seq_len, gpu_ids=self.gpu_ids) + + if self.isTrain: + use_sigmoid = opt.no_lsgan + self.fake_AB_pool = ImagePool(opt.pool_size) + + # define loss functions + self.criterionL1 = torch.nn.L1Loss() + self.criterionL1Smooth = torch.nn.SmoothL1Loss() + self.criterionL2 = torch.nn.MSELoss() + + # initialize optimizers + self.optimizers = [] + self.optimizer_G = torch.optim.Adam(self.netG.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999), weight_decay=0.0 ) + self.optimizers.append(self.optimizer_G) + + + def set_input(self, input): + self.image_paths = input['paths'] + + self.expressions = input['expressions'].cuda() + self.audio_features = input['audio_deepspeech'].cuda() # b x seq_len x 16 x 29 + + if self.isTrain: + self.expressions_prv = input['expressions_prv'].cuda() + self.audio_features_prv = input['audio_deepspeech_prv'].cuda() # b x seq_len x 16 x 29 + + self.expressions_nxt = input['expressions_nxt'].cuda() + self.audio_features_nxt = input['audio_deepspeech_nxt'].cuda() # b x seq_len x 16 x 29 + + self.target_id = input['target_id'].cuda() + + + def forward(self): + # estimate expressions + if self.opt.output_audio_expressions: #self.opt.dataset_mode=='audio': + self.fake_expressions = self.netG.getAudioExpressions(self.audio_features) + if self.isTrain: + self.fake_expressions_prv = self.netG.getAudioExpressions(self.audio_features_prv) + self.fake_expressions_nxt = self.netG.getAudioExpressions(self.audio_features_nxt) + else: + self.fake_expressions, self.fake_expressions_intermediate = self.netG.forward_internal(self.audio_features, self.target_id) + if self.isTrain: + self.fake_expressions_prv = self.netG(self.audio_features_prv, self.target_id) + self.fake_expressions_nxt = self.netG(self.audio_features_nxt, self.target_id) + + + + def backward_G(self, epoch): + + # Second, G(A) = B + #self.loss_G_L1 = self.criterionL1(self.fake_expressions, self.expressions) + + # difference in vertex space + mask = torch.cat([self.mask[:,None],self.mask[:,None],self.mask[:,None]], 1) + mask = mask + 0.1 * torch.ones_like(mask) # priority for the mask region, but other region should also be constrained + + # absolute (single timesteps) + diff_expression = self.fake_expressions - self.expressions + diff_vertices = self.morphable_model.compute_expression_delta(diff_expression) + + + diff_expression_intermediate = self.fake_expressions_intermediate - self.expressions + diff_vertices_intermediate = self.morphable_model.compute_expression_delta(diff_expression_intermediate) + + + diff_expression_prv = self.fake_expressions_prv - self.expressions_prv + diff_vertices_prv = self.morphable_model.compute_expression_delta(diff_expression_prv) + + diff_expression_nxt = self.fake_expressions_nxt - self.expressions_nxt + diff_vertices_nxt = self.morphable_model.compute_expression_delta(diff_expression_nxt) + + # relative (temporal 1 timestep) cur - nxt and prv - cur + diff_expression_tmp_cur_nxt = (self.fake_expressions - self.fake_expressions_nxt) - (self.expressions - self.expressions_nxt) + diff_vertices_tmp_cur_nxt = self.morphable_model.compute_expression_delta(diff_expression_tmp_cur_nxt) + diff_expression_tmp_prv_cur = (self.fake_expressions_prv - self.fake_expressions) - (self.expressions_prv - self.expressions) + diff_vertices_tmp_prv_cur = self.morphable_model.compute_expression_delta(diff_expression_tmp_prv_cur) + + # relative (temporal 2 timesteps) nxt - prv + diff_expression_tmp_nxt_prv = (self.fake_expressions_nxt - self.fake_expressions_prv) - (self.expressions_nxt - self.expressions_prv) + diff_vertices_tmp_nxt_prv = self.morphable_model.compute_expression_delta(diff_expression_tmp_nxt_prv) + + #print('mask: ', mask.shape) + #print('diff_vertices: ', diff_vertices.shape) + + self.loss_G_L1_ABSOLUTE = 0.0 + self.loss_G_L1_RELATIVE = 0.0 + if self.opt.lossType == 'L1': + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices)) # scale brings meters to millimeters + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices_prv)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices_nxt)) + + self.loss_G_L1_ABSOLUTE += 3000.0 * torch.mean(mask * torch.abs(diff_vertices_intermediate)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_cur_nxt)) + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_prv_cur)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_nxt_prv)) + + elif self.opt.lossType == 'L2': + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * diff_vertices * diff_vertices) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * diff_vertices_prv * diff_vertices_prv) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * diff_vertices_nxt * diff_vertices_nxt) + + self.loss_G_L1_ABSOLUTE += 3000.0 * torch.mean(mask * diff_vertices_intermediate * diff_vertices_intermediate) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * diff_vertices_tmp_cur_nxt * diff_vertices_tmp_cur_nxt) + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * diff_vertices_tmp_prv_cur * diff_vertices_tmp_prv_cur) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * diff_vertices_tmp_nxt_prv * diff_vertices_tmp_nxt_prv) + + elif self.opt.lossType == 'RMS': + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.sqrt(torch.mean(mask * diff_vertices * diff_vertices)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.sqrt(torch.mean(mask * diff_vertices_prv * diff_vertices_prv)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.sqrt(torch.mean(mask * diff_vertices_nxt * diff_vertices_nxt)) + + self.loss_G_L1_ABSOLUTE += 3000.0 * torch.sqrt(torch.mean(mask * diff_vertices_intermediate * diff_vertices_intermediate)) + + + self.loss_G_L1_RELATIVE += 20000.0 * torch.sqrt(torch.mean(mask * diff_vertices_tmp_cur_nxt * diff_vertices_tmp_cur_nxt)) + self.loss_G_L1_RELATIVE += 20000.0 * torch.sqrt(torch.mean(mask * diff_vertices_tmp_prv_cur * diff_vertices_tmp_prv_cur)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.sqrt(torch.mean(mask * diff_vertices_tmp_nxt_prv * diff_vertices_tmp_nxt_prv)) + + else: # L1 + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices_prv)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices_nxt)) + + self.loss_G_L1_ABSOLUTE += 3000.0 * torch.mean(mask * torch.abs(diff_vertices_intermediate)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_cur_nxt)) + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_prv_cur)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_nxt_prv)) + + self.loss_G_L1 = self.loss_G_L1_ABSOLUTE + self.loss_G_L1_RELATIVE + self.loss_G_Regularizer = self.netG.regularizer() + + self.loss_G = self.loss_G_L1 + self.loss_G_Regularizer + + + self.loss_G.backward() + + def optimize_parameters(self, epoch_iter): + self.forward() + + # update Generator + self.optimizer_G.zero_grad() + self.backward_G(epoch_iter) + self.optimizer_G.step() + diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/models/base_model.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/models/base_model.py new file mode 100644 index 0000000..04489f6 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/models/base_model.py @@ -0,0 +1,222 @@ +import os +import torch +from collections import OrderedDict +from . import networks +import numpy as np +from PIL import Image + +def save_tensor_image(input_image, image_path): + if isinstance(input_image, torch.Tensor): + image_tensor = input_image.data + image_numpy = image_tensor[0].cpu().float().numpy() + if image_numpy.shape[0] == 1: + image_numpy = np.tile(image_numpy, (3, 1, 1)) + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 + else: + image_numpy = input_image + image_numpy = image_numpy.astype(np.uint8) + image_pil = Image.fromarray(image_numpy) + image_pil.save(image_path) + +class BaseModel(): + + # modify parser to add command line options, + # and also change the default values if needed + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def name(self): + return 'BaseModel' + + def initialize(self, opt): + self.opt = opt + self.gpu_ids = opt.gpu_ids + self.isTrain = opt.isTrain + self.device = torch.device('cuda:{}'.format(self.gpu_ids[0])) if self.gpu_ids else torch.device('cpu') + self.load_dir = os.path.join(opt.checkpoints_dir, opt.name) + self.save_dir = os.path.join(opt.checkpoints_dir, opt.name) + if opt.resize_or_crop != 'scale_width': + torch.backends.cudnn.benchmark = True + self.loss_names = [] + self.model_names = [] + self.visual_names = [] + self.image_paths = [] + + def set_input(self, input): + pass + + def forward(self): + pass + + # load and print networks; create schedulers + def setup(self, opt, parser=None): + if self.isTrain: + self.schedulers = [networks.get_scheduler(optimizer, opt) for optimizer in self.optimizers] + if not self.isTrain or opt.continue_train: + load_suffix = 'iter_%d' % opt.load_iter if opt.load_iter > 0 else opt.epoch + self.load_networks(load_suffix) + self.print_networks(opt.verbose) + + + + # load specific moudles + def loadModules(self, opt, model_name, module_names): + for name in module_names: + if isinstance(name, str): + load_dir = os.path.join(opt.checkpoints_dir, model_name) + load_filename = 'latest_%s.pth' % (name) + load_path = os.path.join(load_dir, load_filename) + net = getattr(self, name) + if isinstance(net, torch.Tensor): + print('loading the tensor from %s' % load_path) + net_loaded = torch.load(load_path, map_location=str(self.device)) + net.copy_(net_loaded) + else: + # if isinstance(net, torch.nn.DataParallel): + # net = net.module + print('loading the module from %s' % load_path) + # if you are using PyTorch newer than 0.4 (e.g., built from + # GitHub source), you can remove str() on self.device + state_dict = torch.load(load_path, map_location=str(self.device)) + if hasattr(state_dict, '_metadata'): + del state_dict._metadata + + # patch InstanceNorm checkpoints prior to 0.4 + for key in list(state_dict.keys()): # need to copy keys here because we mutate in loop + self.__patch_instance_norm_state_dict(state_dict, net, key.split('.')) + net.load_state_dict(state_dict) + + + + + # make models eval mode during test time + def eval(self): + for name in self.model_names: + if isinstance(name, str): + net = getattr(self, name) + net.eval() + + # used in test time, wrapping `forward` in no_grad() so we don't save + # intermediate steps for backprop + def test(self): + with torch.no_grad(): + self.forward() + + # get image paths + def get_image_paths(self): + return self.image_paths + + def optimize_parameters(self): + pass + + # update learning rate (called once every epoch) + def update_learning_rate(self): + for scheduler in self.schedulers: + scheduler.step() + lr = self.optimizers[0].param_groups[0]['lr'] + print('learning rate = %.7f' % lr) + + # return visualization images. train.py will display these images, and save the images to a html + def get_current_visuals(self): + visual_ret = OrderedDict() + for name in self.visual_names: + if isinstance(name, str): + visual_ret[name] = getattr(self, name) + return visual_ret + + # return traning losses/errors. train.py will print out these errors as debugging information + def get_current_losses(self): + errors_ret = OrderedDict() + for name in self.loss_names: + if isinstance(name, str): + # float(...) works for both scalar tensor and float number + errors_ret[name] = float(getattr(self, 'loss_' + name)) + return errors_ret + + # save models to the disk + def save_networks(self, epoch): + for name in self.model_names: + if isinstance(name, str): + save_filename = '%s_%s.pth' % (epoch, name) + save_path = os.path.join(self.save_dir, save_filename) + net = getattr(self, name) + + if isinstance(net, torch.Tensor): + #torch.save(net.state_dict(), save_path) + torch.save(net, save_path) + for i in range(0, list(net.size())[0]): + save_tensor_image(net[i:i+1,0:3,:,:], save_path+str(i)+'.png') + else: + if len(self.gpu_ids) > 0 and torch.cuda.is_available(): + #torch.save(net.module.cpu().state_dict(), save_path) # << original + torch.save(net.cpu().state_dict(), save_path) + net.cuda(self.gpu_ids[0]) + else: + torch.save(net.cpu().state_dict(), save_path) + + def __patch_instance_norm_state_dict(self, state_dict, module, keys, i=0): + key = keys[i] + if i + 1 == len(keys): # at the end, pointing to a parameter/buffer + if module.__class__.__name__.startswith('InstanceNorm') and \ + (key == 'running_mean' or key == 'running_var'): + if getattr(module, key) is None: + state_dict.pop('.'.join(keys)) + if module.__class__.__name__.startswith('InstanceNorm') and \ + (key == 'num_batches_tracked'): + state_dict.pop('.'.join(keys)) + else: + self.__patch_instance_norm_state_dict(state_dict, getattr(module, key), keys, i + 1) + + # load models from the disk + def load_networks(self, epoch): + for name in self.model_names: + if isinstance(name, str): + load_filename = '%s_%s.pth' % (epoch, name) + load_path = os.path.join(self.load_dir, load_filename) + net = getattr(self, name) + if isinstance(net, torch.Tensor): + print('loading the tensor from %s' % load_path) + net_loaded = torch.load(load_path, map_location=str(self.device)) + net.copy_(net_loaded) + else: + # if isinstance(net, torch.nn.DataParallel): + # net = net.module + print('loading the module from %s' % load_path) + # if you are using PyTorch newer than 0.4 (e.g., built from + # GitHub source), you can remove str() on self.device + state_dict = torch.load(load_path, map_location=str(self.device)) + if hasattr(state_dict, '_metadata'): + del state_dict._metadata + + # patch InstanceNorm checkpoints prior to 0.4 + for key in list(state_dict.keys()): # need to copy keys here because we mutate in loop + self.__patch_instance_norm_state_dict(state_dict, net, key.split('.')) + net.load_state_dict(state_dict) + + # print network information + def print_networks(self, verbose): + print('---------- Networks initialized -------------') + for name in self.model_names: + if isinstance(name, str): + net = getattr(self, name) + if isinstance(net, torch.Tensor): + num_params = net.numel() + print('[Tensor %s] Total number of parameters : %.3f M' % (name, num_params / 1e6)) + else: + num_params = 0 + for param in net.parameters(): + num_params += param.numel() + if verbose: + print(net) + print('[Network %s] Total number of parameters : %.3f M' % (name, num_params / 1e6)) + print('-----------------------------------------------') + + # set requies_grad=False to avoid computation + def set_requires_grad(self, nets, requires_grad=False): + if not isinstance(nets, list): + nets = [nets] + for net in nets: + if net is not None: + for param in net.parameters(): + param.requires_grad = requires_grad diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/models/networks.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/models/networks.py new file mode 100644 index 0000000..a8c6505 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/models/networks.py @@ -0,0 +1,383 @@ +import torch +import torch.nn as nn +from torch.nn import init +import functools +from torch.optim import lr_scheduler + +############################################################################### +# Helper Functions +############################################################################### + + +def get_norm_layer(norm_type='instance'): + if norm_type == 'batch': + norm_layer = functools.partial(nn.BatchNorm2d, affine=True) + elif norm_type == 'instance': + norm_layer = functools.partial(nn.InstanceNorm2d, affine=False, track_running_stats=False) + elif norm_type == 'none': + norm_layer = None + else: + raise NotImplementedError('normalization layer [%s] is not found' % norm_type) + return norm_layer + + +def get_scheduler(optimizer, opt): + if opt.lr_policy == 'lambda': + def lambda_rule(epoch): + lr_l = 1.0 - max(0, epoch + opt.epoch_count - opt.niter) / float(opt.niter_decay + 1) + return lr_l + scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule) + elif opt.lr_policy == 'step': + scheduler = lr_scheduler.StepLR(optimizer, step_size=opt.lr_decay_iters, gamma=0.1) + elif opt.lr_policy == 'plateau': + scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.2, threshold=0.01, patience=5) + elif opt.lr_policy == 'cosine': + scheduler = lr_scheduler.CosineAnnealingLR(optimizer, T_max=opt.niter, eta_min=0) + else: + return NotImplementedError('learning rate policy [%s] is not implemented', opt.lr_policy) + return scheduler + + +def init_weights(net, init_type='normal', gain=0.02): + def init_func(m): + classname = m.__class__.__name__ + if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1): + if init_type == 'normal': + init.normal_(m.weight.data, 0.0, gain) + elif init_type == 'xavier': + init.xavier_normal_(m.weight.data, gain=gain) + elif init_type == 'kaiming': + init.kaiming_normal_(m.weight.data, a=0, mode='fan_in') + elif init_type == 'orthogonal': + init.orthogonal_(m.weight.data, gain=gain) + else: + raise NotImplementedError('initialization method [%s] is not implemented' % init_type) + if hasattr(m, 'bias') and m.bias is not None: + init.constant_(m.bias.data, 0.0) + elif classname.find('BatchNorm2d') != -1: + init.normal_(m.weight.data, 1.0, gain) + init.constant_(m.bias.data, 0.0) + + print('initialize network with %s' % init_type) + net.apply(init_func) + + +def init_net(net, init_type='normal', init_gain=0.02, gpu_ids=[]): + if len(gpu_ids) > 0: + assert(torch.cuda.is_available()) + net.to(gpu_ids[0]) + #net = torch.nn.DataParallel(net, gpu_ids) + init_weights(net, init_type, gain=init_gain) + return net + + +def define_G(input_nc, output_nc, ngf, netG, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + norm_layer = get_norm_layer(norm_type=norm) + + if netG == 'resnet_9blocks': + net = ResnetGenerator(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=9) + elif netG == 'resnet_6blocks': + net = ResnetGenerator(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=6) + elif netG == 'unet_128': + net = UnetGenerator(input_nc, output_nc, 7, ngf, norm_layer=norm_layer, use_dropout=use_dropout) + elif netG == 'unet_256': + net = UnetGenerator(input_nc, output_nc, 8, ngf, norm_layer=norm_layer, use_dropout=use_dropout) + else: + raise NotImplementedError('Generator model name [%s] is not recognized' % netG) + return init_net(net, init_type, init_gain, gpu_ids) + + +def define_D(input_nc, ndf, netD, + n_layers_D=3, norm='batch', use_sigmoid=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + norm_layer = get_norm_layer(norm_type=norm) + + if netD == 'basic': + net = NLayerDiscriminator(input_nc, ndf, n_layers=3, norm_layer=norm_layer, use_sigmoid=use_sigmoid) + elif netD == 'n_layers': + net = NLayerDiscriminator(input_nc, ndf, n_layers_D, norm_layer=norm_layer, use_sigmoid=use_sigmoid) + elif netD == 'pixel': + net = PixelDiscriminator(input_nc, ndf, norm_layer=norm_layer, use_sigmoid=use_sigmoid) + else: + raise NotImplementedError('Discriminator model name [%s] is not recognized' % net) + return init_net(net, init_type, init_gain, gpu_ids) + + +############################################################################## +# Classes +############################################################################## + + +# Defines the GAN loss which uses either LSGAN or the regular GAN. +# When LSGAN is used, it is basically same as MSELoss, +# but it abstracts away the need to create the target label tensor +# that has the same size as the input +class GANLoss(nn.Module): + def __init__(self, use_lsgan=True, target_real_label=1.0, target_fake_label=0.0): + super(GANLoss, self).__init__() + self.register_buffer('real_label', torch.tensor(target_real_label)) + self.register_buffer('fake_label', torch.tensor(target_fake_label)) + if use_lsgan: + self.loss = nn.MSELoss() + else: + self.loss = nn.BCELoss() + + def get_target_tensor(self, input, target_is_real): + if target_is_real: + target_tensor = self.real_label + else: + target_tensor = self.fake_label + return target_tensor.expand_as(input) + + def __call__(self, input, target_is_real): + target_tensor = self.get_target_tensor(input, target_is_real) + return self.loss(input, target_tensor) + + +# Defines the generator that consists of Resnet blocks between a few +# downsampling/upsampling operations. +# Code and idea originally from Justin Johnson's architecture. +# https://github.com/jcjohnson/fast-neural-style/ +class ResnetGenerator(nn.Module): + def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect'): + assert(n_blocks >= 0) + super(ResnetGenerator, self).__init__() + self.input_nc = input_nc + self.output_nc = output_nc + self.ngf = ngf + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + + model = [nn.ReflectionPad2d(3), + nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0, + bias=use_bias), + norm_layer(ngf), + nn.ReLU(True)] + + n_downsampling = 2 + for i in range(n_downsampling): + mult = 2**i + model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, + stride=2, padding=1, bias=use_bias), + norm_layer(ngf * mult * 2), + nn.ReLU(True)] + + mult = 2**n_downsampling + for i in range(n_blocks): + model += [ResnetBlock(ngf * mult, padding_type=padding_type, norm_layer=norm_layer, use_dropout=use_dropout, use_bias=use_bias)] + + for i in range(n_downsampling): + mult = 2**(n_downsampling - i) + model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2), + kernel_size=3, stride=2, + padding=1, output_padding=1, + bias=use_bias), + norm_layer(int(ngf * mult / 2)), + nn.ReLU(True)] + model += [nn.ReflectionPad2d(3)] + model += [nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)] + model += [nn.Tanh()] + + self.model = nn.Sequential(*model) + + def forward(self, input): + return self.model(input) + + +# Define a resnet block +class ResnetBlock(nn.Module): + def __init__(self, dim, padding_type, norm_layer, use_dropout, use_bias): + super(ResnetBlock, self).__init__() + self.conv_block = self.build_conv_block(dim, padding_type, norm_layer, use_dropout, use_bias) + + def build_conv_block(self, dim, padding_type, norm_layer, use_dropout, use_bias): + conv_block = [] + p = 0 + if padding_type == 'reflect': + conv_block += [nn.ReflectionPad2d(1)] + elif padding_type == 'replicate': + conv_block += [nn.ReplicationPad2d(1)] + elif padding_type == 'zero': + p = 1 + else: + raise NotImplementedError('padding [%s] is not implemented' % padding_type) + + conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), + norm_layer(dim), + nn.ReLU(True)] + if use_dropout: + conv_block += [nn.Dropout(0.5)] + + p = 0 + if padding_type == 'reflect': + conv_block += [nn.ReflectionPad2d(1)] + elif padding_type == 'replicate': + conv_block += [nn.ReplicationPad2d(1)] + elif padding_type == 'zero': + p = 1 + else: + raise NotImplementedError('padding [%s] is not implemented' % padding_type) + conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), + norm_layer(dim)] + + return nn.Sequential(*conv_block) + + def forward(self, x): + out = x + self.conv_block(x) + return out + + +# Defines the Unet generator. +# |num_downs|: number of downsamplings in UNet. For example, +# if |num_downs| == 7, image of size 128x128 will become of size 1x1 +# at the bottleneck +class UnetGenerator(nn.Module): + def __init__(self, input_nc, output_nc, num_downs, ngf=64, + norm_layer=nn.BatchNorm2d, use_dropout=False): + super(UnetGenerator, self).__init__() + + # construct unet structure + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) + for i in range(num_downs - 5): + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout) + unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) + + self.model = unet_block + + def forward(self, input): + return self.model(input) + + +# Defines the submodule with skip connection. +# X -------------------identity---------------------- X +# |-- downsampling -- |submodule| -- upsampling --| +class UnetSkipConnectionBlock(nn.Module): + def __init__(self, outer_nc, inner_nc, input_nc=None, + submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(UnetSkipConnectionBlock, self).__init__() + self.outermost = outermost + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + if input_nc is None: + input_nc = outer_nc + downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4, + stride=2, padding=1, bias=use_bias) + downrelu = nn.LeakyReLU(0.2, True) + downnorm = norm_layer(inner_nc) + uprelu = nn.ReLU(True) + upnorm = norm_layer(outer_nc) + + if outermost: + upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, + kernel_size=4, stride=2, + padding=1) + down = [downconv] + up = [uprelu, upconv, nn.Tanh()] + model = down + [submodule] + up + elif innermost: + upconv = nn.ConvTranspose2d(inner_nc, outer_nc, + kernel_size=4, stride=2, + padding=1, bias=use_bias) + down = [downrelu, downconv] + up = [uprelu, upconv, upnorm] + model = down + up + else: + upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, + kernel_size=4, stride=2, + padding=1, bias=use_bias) + down = [downrelu, downconv, downnorm] + up = [uprelu, upconv, upnorm] + + if use_dropout: + model = down + [submodule] + up + [nn.Dropout(0.5)] + else: + model = down + [submodule] + up + + self.model = nn.Sequential(*model) + + def forward(self, x): + if self.outermost: + return self.model(x) + else: + return torch.cat([x, self.model(x)], 1) + + +# Defines the PatchGAN discriminator with the specified arguments. +class NLayerDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d, use_sigmoid=False): + super(NLayerDiscriminator, self).__init__() + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + + kw = 4 + padw = 1 + sequence = [ + nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), + nn.LeakyReLU(0.2, True) + ] + + nf_mult = 1 + nf_mult_prev = 1 + for n in range(1, n_layers): + nf_mult_prev = nf_mult + nf_mult = min(2**n, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, + kernel_size=kw, stride=2, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + nf_mult_prev = nf_mult + nf_mult = min(2**n_layers, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, + kernel_size=kw, stride=1, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + sequence += [nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)] + + if use_sigmoid: + sequence += [nn.Sigmoid()] + + self.model = nn.Sequential(*sequence) + + def forward(self, input): + return self.model(input) + + +class PixelDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, norm_layer=nn.BatchNorm2d, use_sigmoid=False): + super(PixelDiscriminator, self).__init__() + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + + self.net = [ + nn.Conv2d(input_nc, ndf, kernel_size=1, stride=1, padding=0), + nn.LeakyReLU(0.2, True), + nn.Conv2d(ndf, ndf * 2, kernel_size=1, stride=1, padding=0, bias=use_bias), + norm_layer(ndf * 2), + nn.LeakyReLU(0.2, True), + nn.Conv2d(ndf * 2, 1, kernel_size=1, stride=1, padding=0, bias=use_bias)] + + if use_sigmoid: + self.net.append(nn.Sigmoid()) + + self.net = nn.Sequential(*self.net) + + def forward(self, input): + return self.net(input) diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/__init__.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/base_options.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/base_options.py new file mode 100644 index 0000000..b85c6ce --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/base_options.py @@ -0,0 +1,138 @@ +import argparse +import os +from util import util +import torch +import models +import data + + +class BaseOptions(): + def __init__(self): + self.initialized = False + + def initialize(self, parser): + parser.add_argument('--dataroot', required=True, help='path to images (should have subfolders trainA, trainB, valA, valB, etc)') + parser.add_argument('--batch_size', type=int, default=1, help='input batch size') + parser.add_argument('--seq_len', type=int, default=1, help='sequence length (if applicable)') + parser.add_argument('--fineSize', type=int, default=512, help='then crop to this size') + parser.add_argument('--display_winsize', type=int, default=256, help='display window size for both visdom and HTML') + parser.add_argument('--input_nc', type=int, default=3, help='# of input image channels') + parser.add_argument('--output_nc', type=int, default=3, help='# of output image channels') + parser.add_argument('--ngf', type=int, default=64, help='# of gen filters in first conv layer') + parser.add_argument('--ndf', type=int, default=64, help='# of discrim filters in first conv layer') + parser.add_argument('--netD', type=str, default='basic', help='selects model to use for netD') + parser.add_argument('--netG', type=str, default='resnet_9blocks', help='selects model to use for netG') + parser.add_argument('--n_layers_D', type=int, default=3, help='only used if netD==n_layers') + parser.add_argument('--gpu_ids', type=str, default='0', help='gpu ids: e.g. 0 0,1,2, 0,2. use -1 for CPU') + parser.add_argument('--name', type=str, default='experiment_name', help='name of the experiment. It decides where to store samples and models') + parser.add_argument('--renderer', type=str, default='no_renderer', help='name of the renderer to load the models from') + parser.add_argument('--fix_renderer', action='store_true', help='renderer is fixed') + parser.add_argument('--dataset_mode', type=str, default='aligned', help='chooses how datasets are loaded. [aligned | multi]') + parser.add_argument('--model', type=str, default='cycle_gan', help='chooses which model to use. cycle_gan, pix2pix, test') + parser.add_argument('--direction', type=str, default='AtoB', help='AtoB or BtoA') + parser.add_argument('--epoch', type=str, default='latest', help='which epoch to load? set to latest to use latest cached model') + parser.add_argument('--load_iter', type=int, default='0', help='which iteration to load? if load_iter > 0, the code will load models by iter_[load_iter]; otherwise, the code will load models by [epoch]') + parser.add_argument('--num_threads', default=4, type=int, help='# threads for loading data') + parser.add_argument('--checkpoints_dir', type=str, default='./checkpoints', help='models are saved here') + parser.add_argument('--norm', type=str, default='instance', help='instance normalization or batch normalization') + parser.add_argument('--serial_batches', action='store_true', help='if true, takes images in order to make batches, otherwise takes them randomly') + parser.add_argument('--no_dropout', action='store_true', help='no dropout for the generator') + parser.add_argument('--max_dataset_size', type=int, default=float("inf"), help='Maximum number of samples allowed per dataset. If the dataset directory contains more than max_dataset_size, only a subset is loaded.') + parser.add_argument('--resize_or_crop', type=str, default='resize_and_crop', help='scaling and cropping of images at load time [resize_and_crop|crop|scale_width|scale_width_and_crop|none]') + parser.add_argument('--no_augmentation', action='store_true', help='if specified, no data augmentation') + #parser.add_argument('--init_type', type=str, default='normal', help='network initialization [normal|xavier|kaiming|orthogonal]') + parser.add_argument('--init_type', type=str, default='xavier', help='network initialization [normal|xavier|kaiming|orthogonal]') + parser.add_argument('--init_gain', type=float, default=0.02, help='scaling factor for normal, xavier and orthogonal.') + parser.add_argument('--verbose', action='store_true', help='if specified, print more debugging information') + parser.add_argument('--suffix', default='', type=str, help='customized suffix: opt.name = opt.name + suffix: e.g., {model}_{netG}_size{loadSize}') + parser.add_argument('--tex_dim', type=int, default=256, help='neural texture dimensions') + parser.add_argument('--tex_features_intermediate', type=int, default=16, help='# intermediate neural texture features when using dynamic textures') + parser.add_argument('--tex_features', type=int, default=16, help='# neural texture features') + parser.add_argument('--textureModel', type=str, default='DynamicNeuralTextureAudio', help='texture model') + parser.add_argument('--rendererType', type=str, default='UNET_5_level', help='neural renderer network') + parser.add_argument('--lossType', type=str, default='L1', help='loss type for the final output') + + parser.add_argument('--hierarchicalTex', action='store_true', help='if specified, hierachical neural textures are used') + + parser.add_argument('--output_audio_expressions', action='store_true', help='if specified, no sh layers are used') + + parser.add_argument('--erosionFactor', type=float, default=1.0, help='scaling factor for erosion of the background.') + + parser.add_argument('--audio_window_size', type=float, default=16, help='audio window size = #mel feature bins') + + parser.add_argument('--look_ahead', action='store_true', help='cache images in numpy format') + + parser.add_argument('--cached_images', action='store_true', help='cache images in numpy format') + + self.initialized = True + return parser + + def gather_options(self): + # initialize parser with basic options + if not self.initialized: + parser = argparse.ArgumentParser( + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser = self.initialize(parser) + + # get the basic options + opt, _ = parser.parse_known_args() + + # modify model-related parser options + model_name = opt.model + model_option_setter = models.get_option_setter(model_name) + parser = model_option_setter(parser, self.isTrain) + opt, _ = parser.parse_known_args() # parse again with the new defaults + + # modify dataset-related parser options + dataset_name = opt.dataset_mode + dataset_option_setter = data.get_option_setter(dataset_name) + parser = dataset_option_setter(parser, self.isTrain) + + self.parser = parser + + return parser.parse_args() + + def print_options(self, opt): + message = '' + message += '----------------- Options ---------------\n' + for k, v in sorted(vars(opt).items()): + comment = '' + default = self.parser.get_default(k) + if v != default: + comment = '\t[default: %s]' % str(default) + message += '{:>25}: {:<30}{}\n'.format(str(k), str(v), comment) + message += '----------------- End -------------------' + print(message) + + # save to the disk + expr_dir = os.path.join(opt.checkpoints_dir, opt.name) + util.mkdirs(expr_dir) + file_name = os.path.join(expr_dir, 'opt.txt') + with open(file_name, 'wt') as opt_file: + opt_file.write(message) + opt_file.write('\n') + + def parse(self): + + opt = self.gather_options() + opt.isTrain = self.isTrain # train or test + + # process opt.suffix + if opt.suffix: + suffix = ('_' + opt.suffix.format(**vars(opt))) if opt.suffix != '' else '' + opt.name = opt.name + suffix + + self.print_options(opt) + + # set gpu ids + str_ids = opt.gpu_ids.split(',') + opt.gpu_ids = [] + for str_id in str_ids: + id = int(str_id) + if id >= 0: + opt.gpu_ids.append(id) + if len(opt.gpu_ids) > 0: + torch.cuda.set_device(opt.gpu_ids[0]) + + self.opt = opt + return self.opt diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/test_options.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/test_options.py new file mode 100644 index 0000000..d21da35 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/test_options.py @@ -0,0 +1,27 @@ +from .base_options import BaseOptions + + +class TestOptions(BaseOptions): + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) + parser.add_argument('--ntest', type=int, default=float("inf"), help='# of test examples.') + parser.add_argument('--results_dir', type=str, default='./results/', help='saves results here.') + parser.add_argument('--aspect_ratio', type=float, default=1.0, help='aspect ratio of result images') + parser.add_argument('--phase', type=str, default='test', help='train, val, test, etc') + # Dropout and Batchnorm has different behavioir during training and test. + parser.add_argument('--eval', action='store_true', help='use eval mode during test time.') + parser.add_argument('--num_test', type=int, default=50, help='how many test images to run') + + parser.add_argument('--write_no_images', action='store_true', help='compute validation') + + parser.add_argument('--write_video', action='store_true', help='write video') + parser.add_argument('--video_fps', type=float, default=25.0, help='video fps') + + + parser.add_argument('--source_dir', type=str, default='./datasets/', help='loads source files (expressions, audio, uvs).') + + parser.set_defaults(model='test') + # To avoid cropping, the loadSize should be the same as fineSize + parser.set_defaults(loadSize=parser.get_default('fineSize')) + self.isTrain = False + return parser diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/train_options.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/train_options.py new file mode 100644 index 0000000..c91d50d --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/train_options.py @@ -0,0 +1,37 @@ +from .base_options import BaseOptions + + +class TrainOptions(BaseOptions): + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) + parser.add_argument('--display_freq', type=int, default=400, help='frequency of showing training results on screen') + parser.add_argument('--display_ncols', type=int, default=4, help='if positive, display all images in a single visdom web panel with certain number of images per row.') + parser.add_argument('--display_id', type=int, default=1, help='window id of the web display') + parser.add_argument('--display_server', type=str, default="http://localhost", help='visdom server of the web display') + parser.add_argument('--display_env', type=str, default='main', help='visdom display environment name (default is "main")') + parser.add_argument('--display_port', type=int, default=8097, help='visdom port of the web display') + + parser.add_argument('--compute_val', action='store_true', help='compute validation') + parser.add_argument('--input_noise_augmentation', action='store_true', help='add input noise') + + parser.add_argument('--update_html_freq', type=int, default=1000, help='frequency of saving training results to html') + parser.add_argument('--print_freq', type=int, default=500, help='frequency of showing training results on console') + parser.add_argument('--save_latest_freq', type=int, default=5000, help='frequency of saving the latest results') + parser.add_argument('--save_epoch_freq', type=int, default=5, help='frequency of saving checkpoints at the end of epochs') + parser.add_argument('--save_by_iter', action='store_true', help='whether saves model by iteration') + + parser.add_argument('--continue_train', action='store_true', help='continue training: load the latest model') + parser.add_argument('--epoch_count', type=int, default=1, help='the starting epoch count, we save the model by , +, ...') + parser.add_argument('--phase', type=str, default='train', help='train, val, test, etc') + parser.add_argument('--niter', type=int, default=100, help='# of iter at starting learning rate') + parser.add_argument('--niter_decay', type=int, default=100, help='# of iter to linearly decay learning rate to zero') + parser.add_argument('--beta1', type=float, default=0.5, help='momentum term of adam') + parser.add_argument('--lr', type=float, default=0.0002, help='initial learning rate for adam') + parser.add_argument('--no_lsgan', action='store_true', help='do *not* use least square GAN, if false, use vanilla GAN') + parser.add_argument('--pool_size', type=int, default=50, help='the size of image buffer that stores previously generated images') + parser.add_argument('--no_html', action='store_true', help='do not save intermediate training results to [opt.checkpoints_dir]/[opt.name]/web/') + parser.add_argument('--lr_policy', type=str, default='lambda', help='learning rate policy: lambda|step|plateau|cosine') + parser.add_argument('--lr_decay_iters', type=int, default=50, help='multiply by a gamma every lr_decay_iters iterations') + + self.isTrain = True + return parser diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/transfer_options.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/transfer_options.py new file mode 100644 index 0000000..3fb0709 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/options/transfer_options.py @@ -0,0 +1,30 @@ +from .base_options import BaseOptions + + +class TransferOptions(BaseOptions): + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) + parser.add_argument('--ntest', type=int, default=float("inf"), help='# of test examples.') + parser.add_argument('--results_dir', type=str, default='./results/', help='saves results here.') + parser.add_argument('--aspect_ratio', type=float, default=1.0, help='aspect ratio of result images') + parser.add_argument('--phase', type=str, default='test', help='train, val, test, etc') + # Dropout and Batchnorm has different behavioir during training and test. + parser.add_argument('--eval', action='store_true', help='use eval mode during test time.') + parser.add_argument('--num_test', type=int, default=50, help='how many test images to run') + + parser.add_argument('--write_no_images', action='store_true', help='compute validation') + + + parser.add_argument('--source_dir', type=str, default='./datasets/', help='loads source files (expressions, audio, uvs).') + + + + parser.add_argument('--source_actor', type=str, default='', help='source actor directory') + parser.add_argument('--target_actor', type=str, default='', help='target actor directory') + + + parser.set_defaults(model='test') + # To avoid cropping, the loadSize should be the same as fineSize + parser.set_defaults(loadSize=parser.get_default('fineSize')) + self.isTrain = False + return parser diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/req.txt b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/req.txt new file mode 100644 index 0000000..5252532 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/req.txt @@ -0,0 +1,60 @@ +audioread==2.1.6 +certifi==2018.11.29 +cffi==1.12.1 +chardet==3.0.4 +chumpy==0.68 +cycler==0.10.0 +decorator==4.3.2 +dominate==2.3.5 +EasyProcess==0.2.5 +freetype-py==2.1.0.post1 +future==0.17.1 +idna==2.8 +imageio==2.5.0 +joblib==0.13.2 +kiwisolver==1.0.1 +librosa==0.6.3 +llvmlite==0.27.1 +matplotlib==3.0.3 +mkl-fft==1.0.6 +mkl-random==1.0.1 +moderngl==5.5.0 +networkx==2.2 +neural-renderer-pytorch==1.1.3 +numba==0.42.1 +numpy==1.16.1 +olefile==0.46 +opencv-python==4.1.1.26 +OpenEXR==1.3.2 +Pillow==5.4.1 +progressbar2==3.46.1 +pycparser==2.19 +pyglet==1.4.0b1 +PyOpenGL==3.1.0 +pyparsing==2.3.1 +pyrender==0.1.24 +python-dateutil==2.8.0 +python-Levenshtein==0.12.0 +python-utils==2.3.0 +PyVirtualDisplay==0.2.1 +PyWavelets==1.0.3 +pyzmq==18.0.0 +requests==2.21.0 +resampy==0.2.1 +scikit-image==0.15.0 +scikit-learn==0.20.2 +scipy==1.2.1 +Shapely==1.6.4.post2 +six==1.12.0 +soft-renderer==1.0.0 +torch==1.0.1.post2 +torchaudio==0.2 +torchfile==0.1.0 +torchvision==0.2.1 +tornado==5.1.1 +tqdm==4.31.1 +trimesh==2.38.19 +urllib3==1.24.1 +visdom==0.1.8.8 +websocket-client==0.54.0 +wget==3.2 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/requirements.txt b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/requirements.txt new file mode 100644 index 0000000..072d027 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/requirements.txt @@ -0,0 +1,4 @@ +torch>=0.4.0 +torchvision>=0.2.1 +dominate>=2.3.1 +visdom>=0.1.8.3 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/test.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/test.py new file mode 100644 index 0000000..13c1bf7 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/test.py @@ -0,0 +1,169 @@ +import os +from options.test_options import TestOptions +from data import CreateDataLoader +from models import create_model +from util.visualizer import save_images +from util import html +from util import util +from scipy.misc import imresize + +import torch +import numpy as np +from PIL import Image +import time +import cv2 + + +def save_tensor_image(input_image, image_path): + if isinstance(input_image, torch.Tensor): + image_tensor = input_image.data + image_numpy = image_tensor[0].cpu().float().numpy() + if image_numpy.shape[0] == 1: + image_numpy = np.tile(image_numpy, (3, 1, 1)) + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 + else: + image_numpy = input_image + image_numpy = image_numpy.astype(np.uint8) + image_pil = Image.fromarray(image_numpy) + image_pil.save(image_path) + +if __name__ == '__main__': + opt = TestOptions().parse() + # hard-code some parameters for test + opt.num_threads = 1 # test code only supports num_threads = 1 + opt.batch_size = 1 # test code only supports batch_size = 1 + opt.serial_batches = True # no shuffle + opt.no_augmentation = True # no flip + opt.display_id = -1 # no visdom display + + data_loader = CreateDataLoader(opt) + dataset = data_loader.load_data() + dataset_size = len(data_loader) + print('#test images = %d' % dataset_size) + print('#train objects = %d' % opt.nTrainObjects) + print('#test objects = %d' % opt.nTestObjects) + + + print('>>> create model <<<') + model = create_model(opt) + print('>>> setup model <<<') + model.setup(opt) + #save_tensor_image(model.texture.data[0:1,0:3,:,:], 'load_test1.png') + + + sum_time = 0 + total_runs = dataset_size + warm_up = 50 + + # create a website + web_dirs = [] + webpages = [] + file_expressions = [] + file_fake_expressions = [] + file_rigids = [] + file_intrinsics = [] + file_identities = [] + video_writer = [] + + print('>>> create a websites and output directories <<<') + for i in range(0, opt.nTestObjects): + #web_dir = os.path.join(opt.results_dir, opt.name, '%s--%s__%s_%s' % (opt.test_sequence_names[i][0], opt.test_sequence_names[i][1], opt.phase, opt.epoch) ) + web_dir = os.path.join(opt.results_dir, opt.name, '%s--%s' % (opt.test_sequence_names[i][0], opt.test_sequence_names[i][1]) ) + webpage = html.HTML(web_dir, 'Experiment = %s, Phase = %s, Epoch = %s' % (opt.name, opt.phase, opt.epoch)) + + ## + output_file_expressions=None + output_file_fake_expressions=None + output_file_rigids = None + output_file_intrinsics = None + output_file_identities = None + if hasattr(model, 'fake_expressions'): + output_file_expressions=open(os.path.join(web_dir, 'gt_expressions.txt'), 'w') + output_file_fake_expressions=open(os.path.join(web_dir, 'expression.txt'), 'w') + output_file_rigids=open(os.path.join(web_dir, 'rigid.txt'), 'w') + output_file_intrinsics=open(os.path.join(web_dir, 'intrinsics.txt'), 'w') + output_file_identities=open(os.path.join(web_dir, 'identities.txt'), 'w') + ## + + web_dirs.append(web_dir) + webpages.append(webpage) + file_expressions.append(output_file_expressions) + file_fake_expressions.append(output_file_fake_expressions) + file_rigids.append(output_file_rigids) + file_intrinsics.append(output_file_intrinsics) + file_identities.append(output_file_identities) + + if opt.write_video: + writer = cv2.VideoWriter(web_dir+'.mp4', cv2.VideoWriter_fourcc(*"mp4v"), 25.0,(opt.display_winsize,opt.display_winsize)) + video_writer.append(writer) + + # test with eval mode. This only affects layers like batchnorm and dropout. + # pix2pix: we use batchnorm and dropout in the original pix2pix. You can experiment it with and without eval() mode. + # CycleGAN: It should not affect CycleGAN as CycleGAN uses instancenorm without dropout. + if opt.eval: + model.eval() + for i, data in enumerate(dataset): + #if i >= opt.num_test: + # break + model.set_input(data) + test_sequence_id = data['internal_id'].cpu() + + + torch.cuda.synchronize() + a = time.perf_counter() + + model.test() + + torch.cuda.synchronize() # added sync + b = time.perf_counter() + + if i > warm_up: # give torch some time to warm up + sum_time += ((b-a) * 1000) + + visuals = model.get_current_visuals() + img_path = model.get_image_paths() + if i % 5 == 0: + print('processing (%04d)-th image... %s' % (i, img_path)) + #if not hasattr(model, 'fake_expressions'): + if not opt.write_no_images: + save_images(webpages[test_sequence_id], visuals, img_path, aspect_ratio=opt.aspect_ratio, width=opt.display_winsize) + + if opt.write_video: + fake = visuals['fake'] + im = util.tensor2im(fake) + im = imresize(im, (opt.display_winsize,opt.display_winsize), interp='bicubic') + im = np.concatenate([im[:,:,2:3], im[:,:,1:2], im[:,:,0:1]],axis=2) + #video_writer[test_sequence_id].write(np.random.randint(0, 255, (opt.display_winsize,opt.display_winsize,3)).astype('uint8')) + video_writer[test_sequence_id].write(im.astype('uint8')) + + if hasattr(model, 'fake_expressions'): + #print('contains fake expressions') + np.savetxt(file_fake_expressions[test_sequence_id], model.fake_expressions.data.cpu().numpy(), delimiter=' ') + if hasattr(model, 'expressions'): + np.savetxt(file_expressions[test_sequence_id], model.expressions.data.cpu().numpy(), delimiter=' ') + + np.savetxt(file_rigids[test_sequence_id], data['extrinsics'][0].data.cpu().numpy(), delimiter=' ') + np.savetxt(file_intrinsics[test_sequence_id], data['intrinsics'].data.cpu().numpy(), delimiter=' ') + np.savetxt(file_identities[test_sequence_id], data['identity'].data.cpu().numpy(), delimiter=' ') + + + #if i < 50: + # if hasattr(model, 'face_model'): + # image_dir = webpage.get_image_dir() + # name = os.path.splitext(short_path)[0] + # + # filename_mesh = name + '.obj' + # #filename_tex = name + '_audio_texture.png' + # #filename_mat = name + '.mtl' + # #model.face_model.save_model_to_obj_file(image_dir, filename_mesh, filename_mat, filename_tex) + # model.face_model.save_model_to_obj_file(image_dir, filename_mesh) + + print('mean eval time (ms): ', (sum_time / (total_runs - warm_up))) + + # save the website + for webpage in webpages: + webpage.save() + + if opt.write_video: + for writer in video_writer: + writer.release() diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/test.sh b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/test.sh new file mode 100644 index 0000000..2a5d23a --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/test.sh @@ -0,0 +1,2 @@ +set -ex +python test.py --dataroot ./datasets/facades --name facades_pix2pix --model neuralRenderer --netG unet_256 --direction BtoA --dataset_mode aligned --norm batch diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/train.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/train.py new file mode 100644 index 0000000..dfdd454 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/train.py @@ -0,0 +1,114 @@ +import time +import copy +import torch +from options.train_options import TrainOptions +from data import CreateDataLoader +from models import create_model +from util.visualizer import Visualizer + +if __name__ == '__main__': + # training dataset + opt = TrainOptions().parse() + data_loader = CreateDataLoader(opt) + dataset = data_loader.load_data() + dataset_size = len(data_loader) + print('#training images = %d' % dataset_size) + print('#training objects = %d' % opt.nTrainObjects) + + ## validation dataset + if opt.compute_val: + opt_validation = copy.copy(opt) # create a clone + opt_validation.phase = 'val' + opt_validation.serial_batches = True + opt_validation.isTrain = False + data_loader_validation = CreateDataLoader(opt_validation) + dataset_validation = data_loader_validation.load_data() + dataset_size_validation = len(data_loader_validation) + print('#validation images = %d' % dataset_size_validation) + print('#validation objects = %d' % opt_validation.nValObjects) + + # model + model = create_model(opt) + model.setup(opt) + + if opt.renderer != 'no_renderer': + print('load renderer') + model.loadModules(opt, opt.renderer, ['netD', 'netG']) + + visualizer = Visualizer(opt) + total_steps = 0 + + for epoch in range(opt.epoch_count, opt.niter + opt.niter_decay + 1): + epoch_start_time = time.time() + iter_data_time = time.time() + epoch_iter = 0 # iterator within an epoch + + for i, data in enumerate(dataset): + iter_start_time = time.time() + if total_steps % opt.print_freq == 0: + t_data = iter_start_time - iter_data_time + visualizer.reset() + total_steps += opt.batch_size + epoch_iter += opt.batch_size + + model.set_input(data) + model.optimize_parameters(epoch) + + if total_steps % opt.display_freq == 0: + save_result = total_steps % opt.update_html_freq == 0 + visualizer.display_current_results(model.get_current_visuals(), epoch, save_result) + + if total_steps % opt.print_freq == 0: + losses = model.get_current_losses() + + if opt.compute_val: + validation_error = 0 + cnt = 0 + for i, data in enumerate(dataset_validation): + model.set_input(data) + model.forward() + model.backward_G(epoch) # be carefull with the gradients (are zeroed in the optimization step) + validation_error += model.loss_G.detach().cpu() + cnt += 1.0 + validation_error /= cnt + #print('Validation Error:', validation_error) + #visualizer.plot_current_validation_error(epoch, float(epoch_iter) / dataset_size, {'validation_error': validation_error}) + losses.update({'validation_error': validation_error}) + + t = (time.time() - iter_start_time) / opt.batch_size + visualizer.print_current_losses(epoch, epoch_iter, losses, t, t_data) + if opt.display_id > 0: + visualizer.plot_current_losses(epoch, float(epoch_iter) / dataset_size, opt, losses) + + if total_steps % opt.save_latest_freq == 0: + print('saving the latest model (epoch %d, total_steps %d)' % (epoch, total_steps)) + save_suffix = 'iter_%d' % total_steps if opt.save_by_iter else 'latest' + model.save_networks(save_suffix) + + iter_data_time = time.time() + + + if epoch % opt.save_epoch_freq == 0: + print('saving the model at the end of epoch %d, iters %d' % (epoch, total_steps)) + model.save_networks('latest') + model.save_networks(epoch) + + print('End of epoch %d / %d \t Time Taken: %d sec' % + (epoch, opt.niter + opt.niter_decay, time.time() - epoch_start_time)) + model.update_learning_rate() + + # run validation + #if epoch % opt.save_epoch_freq == 0: + # if opt.compute_val: + # validation_error = 0 + # cnt = 0 + # for i, data in enumerate(dataset_validation): + # model.set_input(data) + # model.forward() + # model.backward_G(epoch) + # validation_error += model.loss_G.detach().cpu() + # cnt += 1.0 + # + # validation_error /= cnt + # print('Validation Error:', validation_error) + # visualizer.plot_current_validation_error(epoch, {'validation_error': validation_error}) diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/train_audio2expressionsAttentionTMP.sh b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/train_audio2expressionsAttentionTMP.sh new file mode 100644 index 0000000..520edae --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/train_audio2expressionsAttentionTMP.sh @@ -0,0 +1,58 @@ +set -ex +# . train_audio2expressionsAttentionTMP.sh & +GPUID=0 +DATASETS_DIR=./datasets +DATASET_MODE=multi_face_audio_eq_tmp_cached +OBJECT=ARD_ZDF + +# neural texture, not used here +TEX_DIM=128 +TEX_FEATURES=16 + +# loss +#LOSS=VGG +#LOSS=L1 +LOSS=RMS +#LOSS=L4 + +# models +MODEL=audio2ExpressionsAttentionTMP4 +RENDERER_TYPE=estimatorAttention + + +# optimizer parameters +#LR=0.00001 +LR=0.0001 + +#N_ITER=150 #50 #N_ITER=150 +#N_ITER_LR_DECAY=50 + +N_ITER=20 #50 #N_ITER=150 +N_ITER_LR_DECAY=30 + +BATCH_SIZE=16 +SEQ_LEN=8 + + +RENDERER=$OBJECT +EROSION=1.0 + +################################################################################ +################################################################################ +################################################################################ +DATE_WITH_TIME=`date "+%Y%m%d-%H%M%S"` +NAME=$MODEL-$RENDERER_TYPE-SL$SEQ_LEN-BS$BATCH_SIZE-$OBJECT-$DATASET_MODE-$LOSS-$DATE_WITH_TIME-look_ahead +DISPLAY_NAME=${MODEL}-$DATASET_MODE_${OBJECT}-${RENDERER_TYPE}-SL$SEQ_LEN-BS$BATCH_SIZE-${LOSS}-look_ahead + + +# training +# --input_noise_augmentation +python train.py --look_ahead --seq_len $SEQ_LEN --save_latest_freq 100000 --no_augmentation --compute_val --name $NAME --erosionFactor $EROSION --tex_dim $TEX_DIM --tex_features $TEX_FEATURES --rendererType $RENDERER_TYPE --lossType $LOSS --display_env $DISPLAY_NAME --niter $N_ITER --niter_decay $N_ITER_LR_DECAY --dataroot $DATASETS_DIR/$OBJECT --model $MODEL --netG unet_256 --lambda_L1 100 --dataset_mode $DATASET_MODE --no_lsgan --norm instance --pool_size 0 --gpu_ids $GPUID --lr $LR --batch_size $BATCH_SIZE + +# # testing +#EPOCH=latest +#python test.py --seq_len $SEQ_LEN --write_no_images --name $NAME --erosionFactor $EROSION --epoch $EPOCH --display_winsize 512 --tex_dim $TEX_DIM --tex_features $TEX_FEATURES --rendererType $RENDERER_TYPE --lossType $LOSS --dataroot $DATASETS_DIR/$OBJECT --model $MODEL --netG unet_256 --dataset_mode $DATASET_MODE --norm instance --gpu_ids $GPUID + +################################################################################ +################################################################################ +################################################################################ \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/__init__.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/get_data.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/get_data.py new file mode 100644 index 0000000..6325605 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/get_data.py @@ -0,0 +1,115 @@ +from __future__ import print_function +import os +import tarfile +import requests +from warnings import warn +from zipfile import ZipFile +from bs4 import BeautifulSoup +from os.path import abspath, isdir, join, basename + + +class GetData(object): + """ + + Download CycleGAN or Pix2Pix Data. + + Args: + technique : str + One of: 'cyclegan' or 'pix2pix'. + verbose : bool + If True, print additional information. + + Examples: + >>> from util.get_data import GetData + >>> gd = GetData(technique='cyclegan') + >>> new_data_path = gd.get(save_path='./datasets') # options will be displayed. + + """ + + def __init__(self, technique='cyclegan', verbose=True): + url_dict = { + 'pix2pix': 'https://people.eecs.berkeley.edu/~tinghuiz/projects/pix2pix/datasets', + 'cyclegan': 'https://people.eecs.berkeley.edu/~taesung_park/CycleGAN/datasets' + } + self.url = url_dict.get(technique.lower()) + self._verbose = verbose + + def _print(self, text): + if self._verbose: + print(text) + + @staticmethod + def _get_options(r): + soup = BeautifulSoup(r.text, 'lxml') + options = [h.text for h in soup.find_all('a', href=True) + if h.text.endswith(('.zip', 'tar.gz'))] + return options + + def _present_options(self): + r = requests.get(self.url) + options = self._get_options(r) + print('Options:\n') + for i, o in enumerate(options): + print("{0}: {1}".format(i, o)) + choice = input("\nPlease enter the number of the " + "dataset above you wish to download:") + return options[int(choice)] + + def _download_data(self, dataset_url, save_path): + if not isdir(save_path): + os.makedirs(save_path) + + base = basename(dataset_url) + temp_save_path = join(save_path, base) + + with open(temp_save_path, "wb") as f: + r = requests.get(dataset_url) + f.write(r.content) + + if base.endswith('.tar.gz'): + obj = tarfile.open(temp_save_path) + elif base.endswith('.zip'): + obj = ZipFile(temp_save_path, 'r') + else: + raise ValueError("Unknown File Type: {0}.".format(base)) + + self._print("Unpacking Data...") + obj.extractall(save_path) + obj.close() + os.remove(temp_save_path) + + def get(self, save_path, dataset=None): + """ + + Download a dataset. + + Args: + save_path : str + A directory to save the data to. + dataset : str, optional + A specific dataset to download. + Note: this must include the file extension. + If None, options will be presented for you + to choose from. + + Returns: + save_path_full : str + The absolute path to the downloaded data. + + """ + if dataset is None: + selected_dataset = self._present_options() + else: + selected_dataset = dataset + + save_path_full = join(save_path, selected_dataset.split('.')[0]) + + if isdir(save_path_full): + warn("\n'{0}' already exists. Voiding Download.".format( + save_path_full)) + else: + self._print('Downloading Data...') + url = "{0}/{1}".format(self.url, selected_dataset) + self._download_data(url, save_path=save_path) + + return abspath(save_path_full) diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/html.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/html.py new file mode 100644 index 0000000..1e7aab9 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/html.py @@ -0,0 +1,64 @@ +import dominate +from dominate.tags import meta, h3, table, tr, td, p, a, img, br +import os + + +class HTML: + def __init__(self, web_dir, title, reflesh=0): + self.title = title + self.web_dir = web_dir + self.img_dir = os.path.join(self.web_dir, 'images') + if not os.path.exists(self.web_dir): + os.makedirs(self.web_dir) + if not os.path.exists(self.img_dir): + os.makedirs(self.img_dir) + # print(self.img_dir) + + self.doc = dominate.document(title=title) + if reflesh > 0: + with self.doc.head: + meta(http_equiv="reflesh", content=str(reflesh)) + + def get_image_dir(self): + return self.img_dir + + def add_header(self, str): + with self.doc: + h3(str) + + def add_table(self, border=1): + self.t = table(border=border, style="table-layout: fixed;") + self.doc.add(self.t) + + def add_images(self, ims, txts, links, width=400): + self.add_table() + with self.t: + with tr(): + for im, txt, link in zip(ims, txts, links): + with td(style="word-wrap: break-word;", halign="center", valign="top"): + with p(): + with a(href=os.path.join('images', link)): + img(style="width:%dpx" % width, src=os.path.join('images', im)) + br() + p(txt) + + def save(self): + html_file = '%s/index.html' % self.web_dir + f = open(html_file, 'wt') + f.write(self.doc.render()) + f.close() + + +if __name__ == '__main__': + html = HTML('web/', 'test_html') + html.add_header('hello world') + + ims = [] + txts = [] + links = [] + for n in range(4): + ims.append('image_%d.png' % n) + txts.append('text_%d' % n) + links.append('image_%d.png' % n) + html.add_images(ims, txts, links) + html.save() diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/image_pool.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/image_pool.py new file mode 100644 index 0000000..52413e0 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/image_pool.py @@ -0,0 +1,32 @@ +import random +import torch + + +class ImagePool(): + def __init__(self, pool_size): + self.pool_size = pool_size + if self.pool_size > 0: + self.num_imgs = 0 + self.images = [] + + def query(self, images): + if self.pool_size == 0: + return images + return_images = [] + for image in images: + image = torch.unsqueeze(image.data, 0) + if self.num_imgs < self.pool_size: + self.num_imgs = self.num_imgs + 1 + self.images.append(image) + return_images.append(image) + else: + p = random.uniform(0, 1) + if p > 0.5: + random_id = random.randint(0, self.pool_size - 1) # randint is inclusive + tmp = self.images[random_id].clone() + self.images[random_id] = image + return_images.append(tmp) + else: + return_images.append(image) + return_images = torch.cat(return_images, 0) + return return_images diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/util.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/util.py new file mode 100644 index 0000000..4f5ca3b --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/util.py @@ -0,0 +1,82 @@ +from __future__ import print_function +import torch +import numpy as np +from PIL import Image +import os +import sys +import array +import OpenEXR +import Imath + +def load_exr(image_path): + # Open the input file + file = OpenEXR.InputFile(image_path) + + # Compute the size + dw = file.header()['dataWindow'] + w, h = (dw.max.x - dw.min.x + 1, dw.max.y - dw.min.y + 1) + + # Read the three color channels as 32-bit floats + FLOAT = Imath.PixelType(Imath.PixelType.FLOAT) + #(R,G,B) = [np.array(array.array('f', file.channel(Chan, FLOAT)).tolist()).reshape((w, h, 1)) for Chan in ("R", "G", "B") ] + + (r, g, b) = file.channels("RGB") + R = np.array(array.array('f', r).tolist()).reshape((w, h, 1)) + G = np.array(array.array('f', g).tolist()).reshape((w, h, 1)) + B = np.array(array.array('f', b).tolist()).reshape((w, h, 1)) + + return np.concatenate((R, G, B), axis=2) + +# Converts a Tensor into an image array (numpy) +# |imtype|: the desired type of the converted numpy array +def tensor2im(input_image, imtype=np.uint8): + if isinstance(input_image, torch.Tensor): + input_image = torch.clamp(input_image, -1.0, 1.0) + image_tensor = input_image.data + else: + return input_image + image_numpy = image_tensor[0].cpu().float().numpy() + if image_numpy.shape[0] == 1: + image_numpy = np.tile(image_numpy, (3, 1, 1)) + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 + return image_numpy.astype(imtype) + + +def diagnose_network(net, name='network'): + mean = 0.0 + count = 0 + for param in net.parameters(): + if param.grad is not None: + mean += torch.mean(torch.abs(param.grad.data)) + count += 1 + if count > 0: + mean = mean / count + print(name) + print(mean) + + +def save_image(image_numpy, image_path): + image_pil = Image.fromarray(image_numpy) + image_pil.save(image_path) + +def print_numpy(x, val=True, shp=False): + x = x.astype(np.float64) + if shp: + print('shape,', x.shape) + if val: + x = x.flatten() + print('mean = %3.3f, min = %3.3f, max = %3.3f, median = %3.3f, std=%3.3f' % ( + np.mean(x), np.min(x), np.max(x), np.median(x), np.std(x))) + + +def mkdirs(paths): + if isinstance(paths, list) and not isinstance(paths, str): + for path in paths: + mkdir(path) + else: + mkdir(paths) + + +def mkdir(path): + if not os.path.exists(path): + os.makedirs(path) diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/visualizer.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/visualizer.py new file mode 100644 index 0000000..7cb7750 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/Training Code/util/visualizer.py @@ -0,0 +1,207 @@ +import numpy as np +import os +import sys +import ntpath +import time +from . import util +from . import html +#from scipy.misc import imresize +import cv2 + +if sys.version_info[0] == 2: + VisdomExceptionBase = Exception +else: + VisdomExceptionBase = ConnectionError + + +# save image to the disk +def save_images(webpage, visuals, image_path, aspect_ratio=1.0, width=256): + image_dir = webpage.get_image_dir() + short_path = ntpath.basename(image_path[0]) + name = os.path.splitext(short_path)[0] + + webpage.add_header(name) + ims, txts, links = [], [], [] + + for label, im_data in visuals.items(): + im = util.tensor2im(im_data) + image_name = '%s_%s.png' % (name, label) + save_path = os.path.join(image_dir, image_name) + h, w, _ = im.shape + + height = int(width * h / float(w)) + #im = imresize(im, (height,width), interp='bicubic') + im = cv2.resize(src=im, dsize=(height,width), interpolation=cv2.INTER_CUBIC) + + #im = imresize(im, (height,widht), interp='bicubic') + #if aspect_ratio > 1.0: + # im = imresize(im, (h, int(w * aspect_ratio)), interp='bicubic') + #if aspect_ratio < 1.0: + # im = imresize(im, (int(h / aspect_ratio), w), interp='bicubic') + + util.save_image(im, save_path) + + ims.append(image_name) + txts.append(label) + links.append(image_name) + webpage.add_images(ims, txts, links, width=width) + + +class Visualizer(): + def __init__(self, opt): + self.display_id = opt.display_id + self.use_html = opt.isTrain and not opt.no_html + self.win_size = opt.display_winsize + self.name = opt.name + self.opt = opt + self.saved = False + if self.display_id > 0: + import visdom + self.ncols = opt.display_ncols + self.vis = visdom.Visdom(server=opt.display_server, port=opt.display_port, env=opt.display_env, raise_exceptions=True) + + if self.use_html: + self.web_dir = os.path.join(opt.checkpoints_dir, opt.name, 'web') + self.img_dir = os.path.join(self.web_dir, 'images') + print('create web directory %s...' % self.web_dir) + util.mkdirs([self.web_dir, self.img_dir]) + self.log_name = os.path.join(opt.checkpoints_dir, opt.name, 'loss_log.txt') + with open(self.log_name, "a") as log_file: + now = time.strftime("%c") + log_file.write('================ Training Loss (%s) ================\n' % now) + + def reset(self): + self.saved = False + + def throw_visdom_connection_error(self): + print('\n\nCould not connect to Visdom server (https://github.com/facebookresearch/visdom) for displaying training progress.\nYou can suppress connection to Visdom using the option --display_id -1. To install visdom, run \n$ pip install visdom\n, and start the server by \n$ python -m visdom.server.\n\n') + exit(1) + + # |visuals|: dictionary of images to display or save + def display_current_results(self, visuals, epoch, save_result, aspect_ratio=1.0, width=256): + if self.display_id > 0: # show images in the browser + ncols = self.ncols + if ncols > 0: + ncols = min(ncols, len(visuals)) + h, w = next(iter(visuals.values())).shape[2:4] + height = int(width * h / float(w)) + h = height + w = width + table_css = """""" % (w, h) + title = self.name + label_html = '' + label_html_row = '' + images = [] + idx = 0 + for label, image in visuals.items(): + # + image_numpy = util.tensor2im(image) + #image_numpy = imresize(image_numpy, (h, w), interp='bicubic') + image_numpy = cv2.resize(src=image_numpy, dsize=(h, w), interpolation=cv2.INTER_CUBIC) + image_numpy = image_numpy.transpose([2, 0, 1]) + label_html_row += '%s' % label + images.append(image_numpy) + idx += 1 + if idx % ncols == 0: + label_html += '%s' % label_html_row + label_html_row = '' + white_image = np.ones_like(image_numpy) * 255 + while idx % ncols != 0: + images.append(white_image) + label_html_row += '' + idx += 1 + if label_html_row != '': + label_html += '%s' % label_html_row + # pane col = image row + try: + self.vis.images(images, nrow=ncols, win=self.display_id + 1, padding=2, opts=dict(title=title + ' images')) + label_html = '%s
' % label_html + self.vis.text(table_css + label_html, win=self.display_id + 2, + opts=dict(title=title + ' labels')) + except VisdomExceptionBase: + self.throw_visdom_connection_error() + + else: + idx = 1 + for label, image in visuals.items(): + image_numpy = util.tensor2im(image) + self.vis.image(image_numpy.transpose([2, 0, 1]), opts=dict(title=label), + win=self.display_id + idx) + idx += 1 + + if self.use_html and (save_result or not self.saved): # save images to a html file + self.saved = True + for label, image in visuals.items(): + image_numpy = util.tensor2im(image) + img_path = os.path.join(self.img_dir, 'epoch%.3d_%s.png' % (epoch, label)) + util.save_image(image_numpy, img_path) + # update website + webpage = html.HTML(self.web_dir, 'Experiment name = %s' % self.name, reflesh=1) + for n in range(epoch, 0, -1): + webpage.add_header('epoch [%d]' % n) + ims, txts, links = [], [], [] + + for label, image_numpy in visuals.items(): + image_numpy = util.tensor2im(image) + img_path = 'epoch%.3d_%s.png' % (n, label) + ims.append(img_path) + txts.append(label) + links.append(img_path) + webpage.add_images(ims, txts, links, width=self.win_size) + webpage.save() + + # losses: dictionary of error labels and values + def plot_current_losses(self, epoch, counter_ratio, opt, losses): + if not hasattr(self, 'plot_data'): + self.plot_data = {'X': [], 'Y': [], 'legend': list(losses.keys())} + self.plot_data['X'].append(epoch + counter_ratio) + self.plot_data['Y'].append([losses[k] for k in self.plot_data['legend']]) + try: + self.vis.line( + X=np.stack([np.array(self.plot_data['X'])] * len(self.plot_data['legend']), 1), + Y=np.array(self.plot_data['Y']), + opts={ + 'title': self.name + ' loss over time', + 'legend': self.plot_data['legend'], + 'xlabel': 'epoch', + 'ylabel': 'loss'}, + win=self.display_id) + except VisdomExceptionBase: + self.throw_visdom_connection_error() + + # losses: same format as |losses| of plot_current_losses + def print_current_losses(self, epoch, i, losses, t, t_data): + message = '(epoch: %d, iters: %d, time: %.3f, data: %.3f) ' % (epoch, i, t, t_data) + for k, v in losses.items(): + message += '%s: %.3f ' % (k, v) + + print(message) + with open(self.log_name, "a") as log_file: + log_file.write('%s\n' % message) + + + + + + + # losses: dictionary of error labels and values + def plot_current_validation_error(self, epoch, counter_ratio, losses): + if not hasattr(self, 'plot_validation_data'): + self.plot_validation_data = {'X': [], 'Y': [], 'legend': list(losses.keys())} + self.plot_validation_data['X'].append(epoch + counter_ratio) + self.plot_validation_data['Y'].append([losses[k] for k in self.plot_validation_data['legend']]) + try: + self.vis.line( + X=np.stack([np.array(self.plot_validation_data['X'])] * len(self.plot_validation_data['legend']), 1), + Y=np.array(self.plot_validation_data['Y']), + opts={ + 'title': self.name + ' validation error over time', + 'legend': self.plot_validation_data['legend'], + 'xlabel': 'epoch', + 'ylabel': 'error'}, + win=self.display_id+1) + except VisdomExceptionBase: + self.throw_visdom_connection_error() \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/__init__.py b/NeuralVoicePuppetry/neural-code/Audio2ExpressionNet/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/DECA/FLAME2020.zip b/NeuralVoicePuppetry/neural-code/DECA/FLAME2020.zip new file mode 100644 index 0000000..3a00762 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/DECA/FLAME2020.zip @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9bfa073763c38d02068e26f72818aec8401d5d306c56b42503af4d8bffac2225 +size 153808252 diff --git a/NeuralVoicePuppetry/neural-code/DECA/deca_model.tar b/NeuralVoicePuppetry/neural-code/DECA/deca_model.tar new file mode 100644 index 0000000..c04160f --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/DECA/deca_model.tar @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e714ed293054cba5eea9c96bd3b6b57880074cd84b3fd00d606cbaf0bee7c5c2 +size 434142943 diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/.gitignore b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/.gitignore new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/BaselModel/__init__.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/BaselModel/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/BaselModel/basel_model.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/BaselModel/basel_model.py new file mode 100644 index 0000000..c84016d --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/BaselModel/basel_model.py @@ -0,0 +1,108 @@ +import os +import torch +import torch.nn as nn +import torchvision.transforms as transforms +import numpy as np + +# import soft_renderer as sr +import renderer as sr + +N_EXPRESSIONS=76 + +class MorphableModel(nn.Module): + def __init__(self, filename_average=''): + super(MorphableModel, self).__init__() + + print('Load Morphable Model (Basel)') + + #filename_mesh = os.path.join(opt.dataroot, opt.phase + '/average_model.obj') + filename_mesh = filename_average + if filename_average=='': + print('use default identity') + filename_mesh = './BaselModel/average.obj' + mesh = sr.Mesh.from_obj(filename_mesh, normalization=False, load_texture=True) + self.average_vertices = mesh.vertices[0] + self.faces = mesh.faces[0] + self.average_vertices = self.average_vertices[None, :, :] # [num_vertices, XYZ] -> [batch_size=1, num_vertices, XYZ] + self.faces = self.faces[None, :, :] # [num_faces, 3] -> [batch_size=1, num_faces, 3] + self.textures = mesh.textures + + self.num_vertices = self.average_vertices.shape[1] + self.num_faces = self.faces.shape[1] + print('vertices:', self.average_vertices.shape) + print('faces:', self.faces.shape) + + ## basis function + self.expression_basis = np.memmap('./BaselModel/ExpressionBasis.matrix', dtype='float32', mode='r').__array__()[1:] # first entry is the size + self.expression_basis = np.resize(self.expression_basis, (N_EXPRESSIONS, self.num_vertices, 4))[:,:,0:3] + self.expression_basis = torch.tensor(self.expression_basis.astype(np.float32)).cuda() # N_EXPRESSIONS x num_vertices x 3 + self.expression_basis = torch.transpose(self.expression_basis,0,2) # transpose for matmul + print('expression_basis', self.expression_basis.shape) + + + #texture_size = 2 + #self.textures = torch.ones(1, self.faces.shape[1], texture_size, texture_size, texture_size, 3, dtype=torch.float32).cuda() + #print('textures:', self.textures.shape) + + ## ## debug + ## zeroExpr = torch.zeros(1, N_EXPRESSIONS, dtype=torch.float32).cuda() + ## self.morph(zeroExpr) + ## self.save_model_to_obj_file('model_zero_expression.obj') + ## ## + ## onesExpr = torch.ones(1, N_EXPRESSIONS, dtype=torch.float32).cuda() + ## self.morph(onesExpr) + ## self.save_model_to_obj_file('model_ones_expression.obj') + ## exit() + ## ## + + # default expression + zeroExpr = torch.zeros(1, N_EXPRESSIONS, dtype=torch.float32).cuda() + self.morph(zeroExpr) + + + def save_model_to_obj_file(self, filename, mask=None): + faces_cpu = self.faces.detach().cpu().numpy() + vertices_cpu = self.vertices.detach().cpu().numpy() + + mask_cpu = None + if not type(mask) == type(None): + mask_cpu = mask.detach().cpu().numpy() + + f = open(filename, 'w') + if type(mask) == type(None): + for i in range(0, self.num_vertices): + f.write('v ' + str(vertices_cpu[0, i, 0]) + ' ' + str(vertices_cpu[0, i, 1]) + ' ' + str(vertices_cpu[0, i, 2]) + '\n') + else: + for i in range(0, self.num_vertices): + f.write('v ' + str(vertices_cpu[0, i, 0]) + ' ' + str(vertices_cpu[0, i, 1]) + ' ' + str(vertices_cpu[0, i, 2]) + ' ' + str(mask_cpu[i]) + ' ' + str(mask_cpu[i]) + ' ' + str(mask_cpu[i]) + ' 1'+ '\n') + + for i in range(0, self.num_faces): + f.write('f ' + str(faces_cpu[0, i, 0]+1) + '// ' + str(faces_cpu[0, i, 1]+1) + '// ' + str(faces_cpu[0, i, 2]+1) + '//\n') + + f.close() + + def compute_expression_delta(self, expressions): + return torch.transpose(torch.matmul(self.expression_basis, torch.transpose(expressions, 0,1)), 0, 2) # note that matmul wants to have this order: (a x b x c) x (c x m) => (a x b x m) + + def morph(self, expressions): + self.vertices = self.average_vertices + self.compute_expression_delta(expressions) + return self.vertices + + + + def LoadMask(self, filename=''): + if filename=='': + print('use default mask') + filename = './BaselModel/mask/defaultMask_mouth.obj' + + mask = np.zeros(self.num_vertices) + file = open(filename, 'r') + i=0 + for line in file: + if line[0] == 'v': + floats = [float(x) for x in line[1:].split()] + if floats[3] == 1.0 and floats[4] == 0.0 and floats[5] == 0.0: + mask[i] = 1.0 + i += 1 + file.close() + return torch.tensor(mask.astype(np.float32)).cuda() \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/ReadMe.md b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/ReadMe.md new file mode 100644 index 0000000..4165ff6 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/ReadMe.md @@ -0,0 +1,6 @@ +This code is used to train the audio to expression network. +-> train_audio2expressionsAttentionTMP.sh +You need to provide the training data that fits your face model. +The face model is defined in "BaselModel", you need to provide the average model, a mask for the mouth and the basis vectors. + +As training data you need to provide data that fits the "data/multi_face_audio_eq_tmp_cached_dataset.py" data loader. \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/__init__.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/__init__.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/__init__.py new file mode 100644 index 0000000..3735ac1 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/__init__.py @@ -0,0 +1,90 @@ +import importlib +import torch.utils.data +from data.base_data_loader import BaseDataLoader +from data.base_dataset import BaseDataset + + +def find_dataset_using_name(dataset_name): + # Given the option --dataset_mode [datasetname], + # the file "data/datasetname_dataset.py" + # will be imported. + dataset_filename = "data." + dataset_name + "_dataset" + datasetlib = importlib.import_module(dataset_filename) + + # In the file, the class called DatasetNameDataset() will + # be instantiated. It has to be a subclass of BaseDataset, + # and it is case-insensitive. + dataset = None + target_dataset_name = dataset_name.replace('_', '') + 'dataset' + for name, cls in datasetlib.__dict__.items(): + if name.lower() == target_dataset_name.lower() \ + and issubclass(cls, BaseDataset): + dataset = cls + + if dataset is None: + print("In %s.py, there should be a subclass of BaseDataset with class name that matches %s in lowercase." % (dataset_filename, target_dataset_name)) + exit(0) + + return dataset + + +def get_option_setter(dataset_name): + dataset_class = find_dataset_using_name(dataset_name) + return dataset_class.modify_commandline_options + + +def create_dataset(opt): + dataset = find_dataset_using_name(opt.dataset_mode) + instance = dataset() + instance.initialize(opt) + print("dataset [%s] was created" % (instance.name())) + return instance + + +def CreateDataLoader(opt): + data_loader = CustomDatasetDataLoader() + data_loader.initialize(opt) + return data_loader + + +# Wrapper class of Dataset class that performs +# multi-threaded data loading +class CustomDatasetDataLoader(BaseDataLoader): + def name(self): + return 'CustomDatasetDataLoader' + + def initialize(self, opt): + BaseDataLoader.initialize(self, opt) + self.dataset = create_dataset(opt) + if opt.serial_batches: + self.dataloader = torch.utils.data.DataLoader( + self.dataset, + batch_size=opt.batch_size, + shuffle=not opt.serial_batches, + num_workers=int(opt.num_threads)) + else: + #weights = make_weights_for_balanced_classes(dataset_train.imgs, len(dataset_train.classes)) + weights = self.dataset.getSampleWeights() + weights = torch.DoubleTensor(weights) + sampler = torch.utils.data.sampler.WeightedRandomSampler(weights, len(weights)) + + self.dataloader = torch.utils.data.DataLoader( + self.dataset, + batch_size=opt.batch_size, + #shuffle=True, + sampler=sampler, + pin_memory=True, + num_workers=int(opt.num_threads)) + + + def load_data(self): + return self + + def __len__(self): + return min(len(self.dataset), self.opt.max_dataset_size) + + def __iter__(self): + for i, data in enumerate(self.dataloader): + if i * self.opt.batch_size >= self.opt.max_dataset_size: + break + yield data diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/aligned_dataset.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/aligned_dataset.py new file mode 100644 index 0000000..cfc1a74 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/aligned_dataset.py @@ -0,0 +1,209 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image + +#def make_dataset(dir): +# images = [] +# assert os.path.isdir(dir), '%s is not a valid directory' % dir +# for root, _, fnames in sorted(os.walk(dir)): +# for fname in fnames: +# if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): +# path = os.path.join(root, fname) +# images.append(path) +# return sorted(images) + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.bin' + path = os.path.join(root, fname) + images.append(path) + return images + +def load_intrinsics(input_dir): + file = open(input_dir+"/intrinsics.txt", "r") + intrinsics = [[(float(x) for x in line.split())] for line in file] + file.close() + intrinsics = list(intrinsics[0][0]) + return intrinsics + +def load_rigids(input_dir): + file = open(input_dir+"/rigid.txt", "r") + rigid_floats = [[float(x) for x in line.split()] for line in file] # note that it stores 5 lines per matrix (blank line) + file.close() + all_rigids = [ [rigid_floats[4*idx + 0],rigid_floats[4*idx + 1],rigid_floats[4*idx + 2],rigid_floats[4*idx + 3]] for idx in range(0, len(rigid_floats)//4) ] + return all_rigids + +def load_expressions(input_dir): + file = open(input_dir+"/expression.txt", "r") + expressions = [[float(x) for x in line.split()] for line in file] + file.close() + return expressions + +def load_audio(input_dir): + audio = Audio(input_dir + '/audio.mp3', write_mel_spectogram = False) + #audio = Audio(input_dir + '/audio.mp3', write_mel_spectogram = True) + + return audio + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +class AlignedDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + self.data_dir = os.path.join(opt.dataroot, opt.phase) + self.frame_paths = make_dataset(self.data_dir) + self.frame_ids = make_ids(self.frame_paths, self.root) + self.intrinsics = load_intrinsics(self.data_dir) + self.extrinsics = load_rigids(self.data_dir) + self.expressions = load_expressions(self.data_dir) + + self.audio = load_audio(self.data_dir) + self.audio_window_size = opt.audio_window_size + + opt.nObjects = 1 + assert(opt.resize_or_crop == 'resize_and_crop') + + def __getitem__(self, index): + + # get video data + frame_id = index + + #print('GET ITEM: ', index) + img_path = self.frame_paths[index] + frame_id = self.frame_ids[index] + + # intrinsics and extrinsics + intrinsics = self.intrinsics + extrinsics = self.extrinsics[frame_id] + + # get audio mel sample window + frame_rate = 24#29.97 + #frame_rate = len(self.expressions) / self.audio.time_total + mel_frame_idx = int((frame_id / frame_rate) * self.audio.mel_sample_rate) + mels = self.audio.getWindow(mel_frame_idx, self.audio_window_size) + + + # expressions + expressions = torch.tensor(self.expressions[frame_id]) + + # default image dimensions + IMG_DIM_X = 512 + IMG_DIM_Y = 512 + + # load image data + #assert(IMG_DIM == self.opt.fineSize) + img_array = np.memmap(img_path, dtype='float32', mode='r').__array__() + if img_array.size != IMG_DIM_X * IMG_DIM_Y * 5: + IMG_DIM_X = int(img_array[0]) + IMG_DIM_Y = int(img_array[1]) + img_array = img_array[2:] + intrinsics = img_array[0:4] + img_array = img_array[4:] + + img_array = np.clip(img_array, 0.0, 1.0) + img = np.resize(img_array, (IMG_DIM_Y, IMG_DIM_X, 5)) + A = img[:,:,0:3] + B = img[:,:,3:5] + B = np.concatenate((B, np.zeros((IMG_DIM_Y, IMG_DIM_X, 1))), axis=2) + + TARGET = transforms.ToTensor()(A.astype(np.float32)) + UV = transforms.ToTensor()(B.astype(np.float32)) + + TARGET = 2.0 * TARGET - 1.0 + UV = 2.0 * UV - 1.0 + + + # load deepspeech feature + dsf_fname = img_path[:-4] + '.deepspeech.npy' +# print('dsf_fname:', dsf_fname) + feature_array = np.load(dsf_fname) + #feature_array = np.memmap(dsf_fname, dtype='float32', mode='r').__array__() +# print('feature_array shape: ', feature_array.shape) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + ################################# + ####### apply augmentation ###### + ################################# + if not self.opt.no_augmentation: + # random dimensions + new_dim_x = np.random.randint(int(IMG_DIM_X * 0.75), IMG_DIM_X+1) + new_dim_y = np.random.randint(int(IMG_DIM_Y * 0.75), IMG_DIM_Y+1) + new_dim_x = int(np.floor(new_dim_x / 64.0) * 64 ) # << dependent on the network structure !! 64 => 6 layers + new_dim_y = int(np.floor(new_dim_y / 64.0) * 64 ) + if new_dim_x > IMG_DIM_X: new_dim_x -= 64 + if new_dim_y > IMG_DIM_Y: new_dim_y -= 64 + + # random pos + if IMG_DIM_X == new_dim_x: offset_x = 0 + else: offset_x = np.random.randint(0, IMG_DIM_X-new_dim_x) + if IMG_DIM_Y == new_dim_y: offset_y = 0 + else: offset_y = np.random.randint(0, IMG_DIM_Y-new_dim_y) + + # select subwindow + TARGET = TARGET[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + UV = UV[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + + # compute new intrinsics + # TODO: atm not needed but maybe later + + else: + new_dim_x = int(np.floor(IMG_DIM_X / 64.0) * 64 ) # << dependent on the network structure !! 64 => 6 layers + new_dim_y = int(np.floor(IMG_DIM_Y / 64.0) * 64 ) + offset_x = 0 + offset_y = 0 + # select subwindow + TARGET = TARGET[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + UV = UV[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + + # compute new intrinsics + # TODO: atm not needed but maybe later + + ################################# + + return {'TARGET': TARGET, + 'UV': UV, + 'paths': self.frame_paths[index],#img_path, + 'intrinsics': intrinsics, + 'extrinsics': extrinsics, + 'expressions': expressions, + 'audio_mels': mels, + 'audio_deepspeech': dsf, # deepspeech feature + 'object_id':0} + + def __len__(self): + return len(self.frame_paths) + + def name(self): + return 'AlignedDataset' diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/audio.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/audio.py new file mode 100644 index 0000000..4932c94 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/audio.py @@ -0,0 +1,77 @@ +import time +import random +import math + +import numpy as np + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision.transforms as transforms + +import torchaudio +import torchaudio.transforms + +import librosa +import scipy.signal +import librosa.display +import matplotlib.pyplot as plt + + +class Audio(): + def name(self): + return 'Audio' + + def __init__(self, filename, write_mel_spectogram = False): + self.n_mels=128 + self.fmax=8000 + self.hop_length_ms = 20 + + sound, sample_rate = librosa.load(filename)#torchaudio.load(filename) + self.raw_audio = sound + self.sample_rate = sample_rate + print('sample_rate = %d' % self.sample_rate) + self.n_samples = sound.shape[0] + self.time_total = self.n_samples / self.sample_rate + print('length = %ds' % self.time_total) + + print('compute mel spectrogram...') + self.hop_length = int(sample_rate / 1000.0 * self.hop_length_ms) + print('hop_length: ', self.hop_length) + self.mel_spectrogram = librosa.feature.melspectrogram(y=self.raw_audio, sr=self.sample_rate, hop_length=self.hop_length, n_mels=self.n_mels, fmax=self.fmax) + + + if write_mel_spectogram: + print('write spectrogram to file') + plt.figure(figsize=(100, 15)) + librosa.display.specshow(librosa.power_to_db(self.mel_spectrogram, ref=np.max), y_axis='mel', fmax=self.fmax, x_axis='time') + plt.colorbar(format='%+2.0f dB') + plt.title('Mel spectrogram') + plt.tight_layout() + plt.savefig('mel_features.png', dpi=None, facecolor='w', edgecolor='w', orientation='portrait', papertype=None, format=None, transparent=False, bbox_inches=None, pad_inches=0.1, frameon=None, metadata=None) + + print('mel: ', self.mel_spectrogram.shape) # (128, 18441) + self.n_mel_frames = self.mel_spectrogram.shape[1] + self.mel_sample_rate = self.mel_spectrogram.shape[1] / self.time_total + print('n_mel_frames: ', self.n_mel_frames) + print('mel_sample_rate: ', self.mel_sample_rate) + + # convert to torch + self.mel_spectrogram = torch.FloatTensor(self.mel_spectrogram) + + def getWindow(self, mel_frame_idx, window_size): + # get audio mel sample window + audio_start = mel_frame_idx - (window_size//2) + audio_end = mel_frame_idx + (window_size//2) + if audio_start < 0: + audio_input = self.mel_spectrogram[0:self.n_mels, 0:audio_end] + zeros = torch.zeros((self.n_mels,-audio_start)) + audio_input = torch.cat([zeros, audio_input], 1) + elif audio_end >= self.n_mel_frames: + audio_input = self.mel_spectrogram[:, audio_start:-1] + zeros = torch.zeros((self.n_mels,audio_end-self.n_mel_frames + 1)) + audio_input = torch.cat([audio_input, zeros], 1) + else: + audio_input = self.mel_spectrogram[:, audio_start:audio_end] + + return torch.reshape(audio_input, (1, 1, self.n_mels, window_size)) \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/audio_dataset.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/audio_dataset.py new file mode 100644 index 0000000..ba0d603 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/audio_dataset.py @@ -0,0 +1,145 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.deepspeech.npy']): + id_str = fname[:-15] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + #id_str = fname[l+1:-4] + id_str = fname[l+1:-15] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +class AudioDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + self.data_dir = os.path.join(opt.dataroot, 'audio_feature') + self.frame_paths = make_dataset(self.data_dir) + self.frame_ids = make_ids(self.frame_paths, self.root) + + opt.nObjects = 1 + opt.nTrainObjects = 116 # TODO + opt.nTestObjects = 1 + opt.test_sequence_names = [[opt.dataroot.split("/")[-1], 'test']] + assert(opt.resize_or_crop == 'resize_and_crop') + + if opt.isTrain: + print('ERROR: audio_dataset only allowed for test') + exit() + + def getSampleWeights(self): + weights = np.ones((len(self.frame_paths))) + return weights + + def getAudioFilename(self): + return os.path.join(self.root, 'audio.wav') + + def getAudioFeatureFilename(self, idx): + return self.frame_paths[idx % len(self.frame_paths)] + + def __getitem__(self, index): + + #print('GET ITEM: ', index) + frame_path = self.frame_paths[index] + frame_id = self.frame_ids[index] + + # load deepspeech feature + feature_array = np.load(frame_path) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + + dsf_fname = self.frame_paths[index_seq] + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.frame_paths)-1 + if index_seq > max_idx: index_seq = max_idx + + dsf_fname = self.frame_paths[index_seq] + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + dsf_fname = self.frame_paths[index_seq] + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + + ################################# + zeroIdentity = torch.zeros(100) + zeroExpressions = torch.zeros(76) + + target_id = -1 + internal_sequence_id = 0 + + weight = 1.0 / len(self.frame_paths) + + return {'paths': frame_path, + 'expressions': zeroExpressions, + 'identity': zeroIdentity, + 'intrinsics': np.zeros((4)), + 'extrinsics': np.zeros((4,4)), + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id':target_id, + 'internal_id':internal_sequence_id, + 'weight': np.array([weight]).astype(np.float32)} + + def __len__(self): + return len(self.frame_paths) + + def name(self): + return 'AudioDataset' diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/base_data_loader.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/base_data_loader.py new file mode 100644 index 0000000..ae5a168 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/base_data_loader.py @@ -0,0 +1,10 @@ +class BaseDataLoader(): + def __init__(self): + pass + + def initialize(self, opt): + self.opt = opt + pass + + def load_data(): + return None diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/base_dataset.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/base_dataset.py new file mode 100644 index 0000000..4a714be --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/base_dataset.py @@ -0,0 +1,106 @@ +import torch.utils.data as data +from PIL import Image +import torchvision.transforms as transforms +import torch + + +class BaseDataset(data.Dataset): + def __init__(self): + super(BaseDataset, self).__init__() + + def name(self): + return 'BaseDataset' + + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + pass + + def getSampleWeights(self): + return torch.ones((len(self))).double() + + def __len__(self): + return 0 + + +def get_transform(opt): + transform_list = [] + if opt.resize_or_crop == 'resize_and_crop': + osize = [opt.loadSize, opt.loadSize] + transform_list.append(transforms.Resize(osize, Image.BICUBIC)) + transform_list.append(transforms.RandomCrop(opt.fineSize)) + elif opt.resize_or_crop == 'crop': + transform_list.append(transforms.RandomCrop(opt.fineSize)) + elif opt.resize_or_crop == 'scale_width': + transform_list.append(transforms.Lambda( + lambda img: __scale_width(img, opt.fineSize))) + elif opt.resize_or_crop == 'scale_width_and_crop': + transform_list.append(transforms.Lambda( + lambda img: __scale_width(img, opt.loadSize))) + transform_list.append(transforms.RandomCrop(opt.fineSize)) + elif opt.resize_or_crop == 'none': + transform_list.append(transforms.Lambda( + lambda img: __adjust(img))) + else: + raise ValueError('--resize_or_crop %s is not a valid option.' % opt.resize_or_crop) + + if opt.isTrain and not opt.no_flip: + transform_list.append(transforms.RandomHorizontalFlip()) + + transform_list += [transforms.ToTensor(), + transforms.Normalize((0.5, 0.5, 0.5), + (0.5, 0.5, 0.5))] + return transforms.Compose(transform_list) + + +# just modify the width and height to be multiple of 4 +def __adjust(img): + ow, oh = img.size + + # the size needs to be a multiple of this number, + # because going through generator network may change img size + # and eventually cause size mismatch error + mult = 4 + if ow % mult == 0 and oh % mult == 0: + return img + w = (ow - 1) // mult + w = (w + 1) * mult + h = (oh - 1) // mult + h = (h + 1) * mult + + if ow != w or oh != h: + __print_size_warning(ow, oh, w, h) + + return img.resize((w, h), Image.BICUBIC) + + +def __scale_width(img, target_width): + ow, oh = img.size + + # the size needs to be a multiple of this number, + # because going through generator network may change img size + # and eventually cause size mismatch error + mult = 4 + assert target_width % mult == 0, "the target width needs to be multiple of %d." % mult + if (ow == target_width and oh % mult == 0): + return img + w = target_width + target_height = int(target_width * oh / ow) + m = (target_height - 1) // mult + h = (m + 1) * mult + + if target_height != h: + __print_size_warning(target_width, target_height, w, h) + + return img.resize((w, h), Image.BICUBIC) + + +def __print_size_warning(ow, oh, w, h): + if not hasattr(__print_size_warning, 'has_printed'): + print("The image size needs to be a multiple of 4. " + "The loaded image size was (%d, %d), so it was adjusted to " + "(%d, %d). This adjustment will be done to all images " + "whose sizes are not multiples of 4" % (ow, oh, w, h)) + __print_size_warning.has_printed = True diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/custom_aligned_dataset.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/custom_aligned_dataset.py new file mode 100644 index 0000000..a88669e --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/custom_aligned_dataset.py @@ -0,0 +1,238 @@ +import os.path + +import h5py +import random +import torchvision.transforms as transforms +import torch +import numpy as np + +# from mutil.bfm2017 import BFM2017 +# from mutil.np_util import adjust_width + +from data.base_dataset import BaseDataset +from data.audio import Audio +import cv2 +from scipy.ndimage.morphology import binary_fill_holes +from scipy.ndimage.morphology import binary_erosion +#from data.image_folder import make_dataset +from PIL import Image + +#def make_dataset(dir): +# images = [] +# assert os.path.isdir(dir), '%s is not a valid directory' % dir +# for root, _, fnames in sorted(os.walk(dir)): +# for fname in fnames: +# if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): +# path = os.path.join(root, fname) +# images.append(path) +# return sorted(images) +# from mutil.pytorch_utils import to_tensor + + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.bin' + path = os.path.join(root, fname) + images.append(path) + return images + +def load_intrinsics(input_dir): + file = open(input_dir+"/intrinsics.txt", "r") + intrinsics = [[(float(x) for x in line.split())] for line in file] + file.close() + intrinsics = list(intrinsics[0][0]) + return intrinsics + +def load_rigids(input_dir): + file = open(input_dir+"/rigid.txt", "r") + rigid_floats = [[float(x) for x in line.split()] for line in file] # note that it stores 5 lines per matrix (blank line) + file.close() + all_rigids = [ [rigid_floats[4*idx + 0],rigid_floats[4*idx + 1],rigid_floats[4*idx + 2],rigid_floats[4*idx + 3]] for idx in range(0, len(rigid_floats)//4) ] + return all_rigids + +def load_expressions(input_dir): + file = open(input_dir+"/expression.txt", "r") + expressions = [[float(x) for x in line.split()] for line in file] + file.close() + return expressions + +def load_audio(input_dir): + audio = Audio(input_dir + '/audio.mp3', write_mel_spectogram = False) + #audio = Audio(input_dir + '/audio.mp3', write_mel_spectogram = True) + + return audio + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +class CustomAlignedDataset(BaseDataset): + IMG_DIM_Y = 256 + IMG_DIM_X = 256 + + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + + self.data = h5py.File(self.root, 'r') + print(self.data.keys()) + # self.audio = load_audio(self.data_dir) + # self.audio_window_size = opt.audio_window_size + + opt.nObjects = 1 + + def preprocess_image(self, frame_id): + img = self.data["frame"][frame_id] + img = img.astype(np.float) / 255 + if img.shape[0] != self.IMG_DIM_Y or img.shape[1] != self.IMG_DIM_X: + # print("Image needs resizing from {} to {},{}".format(img.shape, self.IMG_DIM_Y, self.IMG_DIM_X)) + img = cv2.resize(img, (self.IMG_DIM_Y, self.IMG_DIM_X)) # TODO: np.resize vs. cv2.resize + + assert 0 <= img.min() and img.max() <= 1 + img_tensor = transforms.ToTensor()(img.astype(np.float32)) + img_tensor = 2.0 * img_tensor - 1.0 + return img_tensor + + def preprocess_uv(self, frame_id): + uv = self.data["uv"][frame_id] # TODO: how to store uv? Do we need the same dims? + + if uv.shape[0] != self.IMG_DIM_Y or uv.shape[1] != self.IMG_DIM_X: + # print("UV needs resizing from {} to {},{}".format(uv.shape, self.IMG_DIM_Y, self.IMG_DIM_X)) + uv = cv2.resize(uv, (self.IMG_DIM_Y, self.IMG_DIM_X)) # TODO: np.resize vs. cv2.resize + + # issue only happens with multiple concurrent reads + if not (-1 <= uv.min() and uv.max() <= 1): + print("frame invalid", frame_id) + uv_tensor = transforms.ToTensor()(uv.astype(np.float32)) + + assert -1 <= uv.min() and uv.max() <= 1, "UV not in range [-1, 1]! min: {} max: {}".format(uv.min(), uv.max()) + return uv_tensor + + def preprocess_expressions(self, frame_id): + return torch.tensor(self.data['ep'][frame_id]) + + def preprocess_audio_features(self, frame_id): + # load deepspeech feature + feature_array = self.data["dsf"][frame_id] + assert feature_array.shape == (16, 29) + dsf_np = np.expand_dims(feature_array, 2) + dsf_tensor = transforms.ToTensor()(dsf_np.astype(np.float32)) + return dsf_tensor + + def preprocess_deca_details(self, frame_id): + deca_details = self.data["deca_details"][frame_id] # TODO: how to store uv? Do we need the same dims? + + if deca_details.shape[0] != self.IMG_DIM_Y or deca_details.shape[1] != self.IMG_DIM_X: + # print("UV needs resizing from {} to {},{}".format(uv.shape, self.IMG_DIM_Y, self.IMG_DIM_X)) + deca_details = cv2.resize(deca_details, (self.IMG_DIM_Y, self.IMG_DIM_X)) # TODO: np.resize vs. cv2.resize + + deca_details_tensor = transforms.ToTensor()(deca_details.astype(np.float32)) + return deca_details_tensor + + def preprocess_mask(self, frame_id): + mask = self.data["mask"][frame_id] # TODO: how to store uv? Do we need the same dims? + + if mask.shape[0] != self.IMG_DIM_Y or mask.shape[1] != self.IMG_DIM_X: + # print("UV needs resizing from {} to {},{}".format(uv.shape, self.IMG_DIM_Y, self.IMG_DIM_X)) + mask = cv2.resize(mask, (self.IMG_DIM_Y, self.IMG_DIM_X)) + + mask = mask>0 + mask = binary_fill_holes(mask).astype(np.float32) + mask = binary_erosion(mask, iterations=8) + mask_tensor = transforms.ToTensor()(mask.astype(np.float32)) + + return mask_tensor + + def __getitem__(self, index): + frame_id = index + # expressions + expression_tensor = self.preprocess_expressions(frame_id) + img_tensor = self.preprocess_image(frame_id) + uv_tensor = self.preprocess_uv(frame_id) + mask_tensor = self.preprocess_mask(frame_id) + dsf_tensor = self.preprocess_audio_features(frame_id) + + if self.opt.deca_details: + deca_details_tensor = self.preprocess_deca_details(frame_id) + + ################################# + ####### apply augmentation ###### + ################################# + if not self.opt.no_augmentation: + # random dimensions + new_dim_x = np.random.randint(int(self.IMG_DIM_X * 0.75), self.IMG_DIM_X+1) + new_dim_y = np.random.randint(int(self.IMG_DIM_Y * 0.75), self.IMG_DIM_Y+1) + new_dim_x = int(np.floor(new_dim_x / 64.0) * 64 ) # << dependent on the network structure !! 64 => 6 layers + new_dim_y = int(np.floor(new_dim_y / 64.0) * 64 ) + if new_dim_x > self.IMG_DIM_X: new_dim_x -= 64 + if new_dim_y > self.IMG_DIM_Y: new_dim_y -= 64 + + # random pos + if self.IMG_DIM_X == new_dim_x: offset_x = 0 + else: offset_x = np.random.randint(0, self.IMG_DIM_X-new_dim_x) + if self.IMG_DIM_Y == new_dim_y: offset_y = 0 + else: offset_y = np.random.randint(0, self.IMG_DIM_Y-new_dim_y) + + # select subwindow + img_tensor = img_tensor[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + uv_tensor = uv_tensor[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + + # compute new intrinsics + # TODO: atm not needed but maybe later + + else: + new_dim_x = int(np.floor(self.IMG_DIM_X / 64.0) * 64 ) # << dependent on the network structure !! 64 => 6 layers + new_dim_y = int(np.floor(self.IMG_DIM_Y / 64.0) * 64 ) + offset_x = 0 + offset_y = 0 + # select subwindow + img_tensor = img_tensor[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + uv_tensor = uv_tensor[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + + # compute new intrinsics + # TODO: atm not needed but maybe later + + ################################# + return_dict = {'TARGET': img_tensor, + 'UV': uv_tensor * mask_tensor, + # 'paths': self.frame_paths[index],#img_path, + # 'intrinsics': intrinsics, + # 'extrinsics': extrinsics, + 'expressions': expression_tensor, + # 'audio_mels': mels, + 'audio_deepspeech': dsf_tensor, # deepspeech feature + } + + if self.opt.deca_details: + return_dict['deca_details'] = deca_details_tensor * mask_tensor + + return return_dict + + def __len__(self): + return self.data["dsf"].shape[0] + + def name(self): + return 'CustomAlignedDataset' + diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/custom_aligned_inference.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/custom_aligned_inference.py new file mode 100644 index 0000000..2626e9c --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/custom_aligned_inference.py @@ -0,0 +1,78 @@ +import os.path + +import h5py +import random +import torchvision.transforms as transforms +import torch +import numpy as np + +from mutil.bfm2017 import BFM2017 +from mutil.np_util import adjust_width + +from data.base_dataset import BaseDataset +from data.audio import Audio +import cv2 +from pytorch3d.renderer import look_at_view_transform +from pytorch3d.structures import Meshes +#from data.image_folder import make_dataset +from PIL import Image + +from data.custom_aligned_dataset import CustomAlignedDataset + + +class CustomAlignedInferenceDataset(CustomAlignedDataset): + def initialize(self, opt): + super().initialize(opt) + self.data_source = h5py.File(opt.dataroot_source, 'r') + + self.bfm = BFM2017(opt.path_bfm) + self.uv_renderer = None + + def create_uv_renderer(self, height): + from mutil.renderer import BFMUVRenderer + R, T = look_at_view_transform(eye=((0, 0, 0),), at=((0, 0, -1),), up=((0, 1, 0),)) + self.uv_renderer = BFMUVRenderer(self.opt.path_uv, device="cuda" if torch.cuda.is_available() else "cpu", fov=63, image_size=height, R=R, T=T) + + + def render_uv(self, mesh, height, width): + if self.uv_renderer is None: + # We do this because we only now the height now. + self.uv_renderer = self.create_uv_renderer(height) + + rendered_uv_np = self.uv_renderer.render(mesh) + rendered_uv_np = adjust_width(rendered_uv_np, width, is_uv=True) + return rendered_uv_np + + def __getitem__(self, index): + item = super().__getitem__(index) + frame_id = index + + # # Expressions + # source_expressions_tensor = torch.tensor(self.data_source['ep'][frame_id]) + # item['expressions'] = source_expressions_tensor + # + # # Audio features + # feature_array = self.data_source["dsf"][frame_id] + # dsf_np = np.expand_dims(feature_array, 2) + # dsf_tensor = transforms.ToTensor()(dsf_np.astype(np.float32)) + # item['audio_deepspeech'] = dsf_tensor # deepspeech feature + # + # # image and UV texture map (rendered) + # new_dim_x = int(np.floor(self.IMG_DIM_X / 64.0) * 64) # << dependent on the network structure !! 64 => 6 layers + # new_dim_y = int(np.floor(self.IMG_DIM_Y / 64.0) * 64) + # img_source = self.data_source["frame"][frame_id] + # img_tensor_source = self.preprocess_image(img_source) + # item['SOURCE'] = img_tensor_source + + shape_coeff = self.data['sp'][frame_id] + expression_coeff = self.data_source['ep'][frame_id] + verts = self.bfm.generate_vertices(shape_coeff, expression_coeff) + R, t, s = self.data['R'][frame_id], self.data['t'][frame_id], self.data['s'][frame_id] + verts = self.bfm.apply_Rts(verts, R, t, s) + mesh = Meshes([verts], [self.bfm.faces]) + + # uv = self.render_uv(mesh, img_source.shape[0], img_source.shape[1]) + # uv_tensor = transforms.ToTensor()(uv.astype(np.float32)) + # uv_tensor = uv_tensor[:, 0: new_dim_y, 0:new_dim_x] + # item['UV'] = uv_tensor + return item \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/custom_aligned_inference_dataset.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/custom_aligned_inference_dataset.py new file mode 100644 index 0000000..d570542 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/custom_aligned_inference_dataset.py @@ -0,0 +1,77 @@ +import os.path + +import h5py +import random +import torchvision.transforms as transforms +import torch +import numpy as np + + +from data.base_dataset import BaseDataset +from data.audio import Audio +import cv2 +from pytorch3d.renderer import look_at_view_transform +from pytorch3d.structures import Meshes +#from data.image_folder import make_dataset +from PIL import Image + +from data.custom_aligned_dataset import CustomAlignedDataset + + +class CustomAlignedInferenceDataset(CustomAlignedDataset): + def initialize(self, opt): + super().initialize(opt) + self.data_source = h5py.File(opt.dataroot_source, 'r') + + # self.bfm = BFM2017(opt.path_bfm) + # self.uv_renderer = None + # + # def create_uv_renderer(self, height): + # from mutil.renderer import BFMUVRenderer + # R, T = look_at_view_transform(eye=((0, 0, 0),), at=((0, 0, -1),), up=((0, 1, 0),)) + # self.uv_renderer = BFMUVRenderer(self.opt.path_uv, device="cuda" if torch.cuda.is_available() else "cpu", fov=63, image_size=height, R=R, T=T) + # + # + # def render_uv(self, mesh, height, width): + # if self.uv_renderer is None: + # # We do this because we only now the height now. + # self.uv_renderer = self.create_uv_renderer(height) + # + # rendered_uv_np = self.uv_renderer.render(mesh) + # rendered_uv_np = adjust_width(rendered_uv_np, width, is_uv=True) + # return rendered_uv_np + + def __getitem__(self, index): + item = super().__getitem__(index) + frame_id = index + # + # # Expressions + # source_expressions_tensor = torch.tensor(self.data_source['ep'][frame_id]) + # item['expressions'] = source_expressions_tensor + # + # # Audio features + # feature_array = self.data_source["dsf"][frame_id] + # dsf_np = np.expand_dims(feature_array, 2) + # dsf_tensor = transforms.ToTensor()(dsf_np.astype(np.float32)) + # item['audio_deepspeech'] = dsf_tensor # deepspeech feature + # + # # image and UV texture map (rendered) + # new_dim_x = int(np.floor(self.IMG_DIM_X / 64.0) * 64) # << dependent on the network structure !! 64 => 6 layers + # new_dim_y = int(np.floor(self.IMG_DIM_Y / 64.0) * 64) + # img_source = self.data_source["frame"][frame_id] + # img_tensor_source = self.preprocess_image(img_source) + # item['SOURCE'] = img_tensor_source + # + # shape_coeff = self.data['sp'][frame_id] + # expression_coeff = self.data_source['ep'][frame_id] + # verts = self.bfm.generate_vertices(shape_coeff, expression_coeff) + # R, t, s = self.data['R'][frame_id], self.data['t'][frame_id], self.data['s'][frame_id] + # verts = self.bfm.apply_Rts(verts, R, t, s).astype(np.float) + # mesh = Meshes([to_tensor(verts, "cuda")], [to_tensor(self.bfm.faces, "cuda").long()]) + # + # uv = self.render_uv(mesh, item["TARGET"].shape[-2], item["TARGET"].shape[-1]) + + # uv_tensor = transforms.ToTensor()(uv.astype(np.float32)) + # uv_tensor = uv_tensor[:, 0: new_dim_y, 0:new_dim_x] + # item['UV'] = uv_tensor + return item \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/face_dataset.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/face_dataset.py new file mode 100644 index 0000000..619c088 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/face_dataset.py @@ -0,0 +1,382 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image +from util import util + +#def make_dataset(dir): +# images = [] +# assert os.path.isdir(dir), '%s is not a valid directory' % dir +# for root, _, fnames in sorted(os.walk(dir)): +# for fname in fnames: +# if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): +# path = os.path.join(root, fname) +# images.append(path) +# return sorted(images) + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.npy', '.NPY']): + #.deepspeech.npy + id_str = fname[:-15] #4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-15]#4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +def make_dataset_png_ids(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.png', '.PNG']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + +def make_dataset_exr_ids(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.exr', '.EXR']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + + +def load_intrinsics(input_dir): + file = open(input_dir+"/intrinsics.txt", "r") + intrinsics = [[(float(x) for x in line.split())] for line in file] + file.close() + intrinsics = list(intrinsics[0][0]) + return intrinsics + +def load_rigids(input_dir): + file = open(input_dir+"/rigid.txt", "r") + rigid_floats = [[float(x) for x in line.split()] for line in file] # note that it stores 5 lines per matrix (blank line) + file.close() + all_rigids = [ [rigid_floats[4*idx + 0],rigid_floats[4*idx + 1],rigid_floats[4*idx + 2],rigid_floats[4*idx + 3]] for idx in range(0, len(rigid_floats)//4) ] + return all_rigids + +def load_expressions(input_dir): + file = open(input_dir+"/expression.txt", "r") + expressions = [[float(x) for x in line.split()] for line in file] + file.close() + return expressions + +def load_identity(input_dir): + file = open(input_dir+"/identities.txt", "r") + identity = [[float(x) for x in line.split()] for line in file] + file.close() + return identity + + + +class FaceDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + + # directories + self.dataroot = opt.dataroot + self.audio_feature_dir = os.path.join(opt.dataroot, 'audio_feature') + self.image_dir = os.path.join(opt.dataroot, 'images') + self.uvs_dir = os.path.join(opt.dataroot, 'uvs') + + # debug print + print('load sequence:', self.dataroot) + print('\taudio_feature_dir:', self.audio_feature_dir) + print('\timage_dir:', self.image_dir) + print('\tuvs_dir:', self.uvs_dir) + + # generate index maps + audio_ids = make_ids(make_dataset(self.audio_feature_dir), self.dataroot) + image_ids = make_dataset_png_ids(self.image_dir) + uvs_ids = make_dataset_exr_ids(self.uvs_dir) + + # get model parameters + intrinsics = load_intrinsics(self.dataroot) + extrinsics = load_rigids(self.dataroot) + expressions = load_expressions(self.dataroot) + identities = load_identity(self.dataroot) + + if opt.phase == 'test': # test overwrites the audio and uv files, as well as expressions + print('Test mode. Overwriting audio, uv and expressions') + print('source sequence:', opt.source_dir) + dataroot = opt.source_dir + self.audio_feature_dir = os.path.join(dataroot, 'audio_feature') + self.uvs_dir = os.path.join(dataroot, 'uvs') + audio_ids = make_ids(make_dataset(self.audio_feature_dir), dataroot) + uvs_ids = make_dataset_exr_ids(self.uvs_dir) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(dataroot) + identities = load_identity(dataroot) + + print('\tnum audio_ids:', len(audio_ids)) + print('\tnum image_ids:', len(image_ids)) + print('\tnum uvs_ids:', len(uvs_ids)) + + + # set data + min_len = min(len(audio_ids), len(image_ids), len(uvs_ids), len(extrinsics), len(expressions)) + self.audio_ids = audio_ids[:min_len] + self.image_ids = image_ids[:min_len] + self.uvs_ids = uvs_ids[:min_len] + self.intrinsics = intrinsics + self.extrinsics = extrinsics[:] #extrinsics[:min_len] + self.expressions = expressions[:] #expressions[:min_len] + self.identities = identities[:] #identities[:min_len] + self.n_frames_total = min_len + + print('\tnum frames:', self.n_frames_total) + + + opt.nTrainObjects = 1 + opt.nValObjects = 1 + opt.nTestObjects = 1 + + + opt.test_sequence_names = [[opt.dataroot.split("/")[-1], 'train']] + if opt.phase == 'test': + opt.test_sequence_names = [[opt.source_dir.split("/")[-1], 'test']] + print('test:', opt.test_sequence_names) + + assert(opt.resize_or_crop == 'resize_and_crop') + + def getSampleWeights(self): + weights = np.ones((self.n_frames_total)) + return weights + + def getExtrinsics(self, idx): + return self.extrinsics[self.uvs_ids[idx % self.n_frames_total]] + def getIntrinsics(self, idx): + return self.intrinsics + def getIdentities(self, idx): + return self.identities[self.uvs_ids[idx % self.n_frames_total]] + def getExpressions(self, idx): + return self.expressions[self.uvs_ids[idx % self.n_frames_total]] + + def getAudioFilename(self): + return os.path.join(self.dataroot, 'audio.wav') + + def getImageFilename(self, idx): + image_id = self.image_ids[idx % self.n_frames_total] + img_fname = os.path.join(self.image_dir, str(image_id).zfill(5) + '.png') + return img_fname + #img_numpy = np.asarray(Image.open(img_fname)) + #TARGET = 2.0 * transforms.ToTensor()(img_numpy.astype(np.float32))/255.0 - 1.0 + + def getAudioFeatureFilename(self, idx): + #return self.frame_paths[idx % len(self.frame_paths)] + audio_id = self.audio_ids[idx] + return os.path.join(self.audio_feature_dir, str(audio_id) + '.deepspeech.npy') + + + def computeCrop(self, mask, MULTIPLE_OF=64, random_size=False): + IMG_DIM_X = mask.shape[2] + IMG_DIM_Y = mask.shape[1] + if random_size: + # random dimensions + new_dim_x = np.random.randint(int(IMG_DIM_X * 0.75), IMG_DIM_X+1) + new_dim_y = np.random.randint(int(IMG_DIM_Y * 0.75), IMG_DIM_Y+1) + new_dim_x = int(np.floor(new_dim_x / float(MULTIPLE_OF)) * MULTIPLE_OF ) + new_dim_y = int(np.floor(new_dim_y / float(MULTIPLE_OF)) * MULTIPLE_OF ) + else: + new_dim_x = 3 * MULTIPLE_OF + new_dim_y = 3 * MULTIPLE_OF + + # check dims + if new_dim_x > IMG_DIM_X: new_dim_x -= MULTIPLE_OF + if new_dim_y > IMG_DIM_Y: new_dim_y -= MULTIPLE_OF + + # random pos + mask_indices = torch.nonzero(mask) + _, bb_mid_point_y, bb_mid_point_x = mask_indices[np.random.randint(0, mask_indices.shape[0])].data_source.cpu() + #print('bb_mid_point', bb_mid_point_x, bb_mid_point_y) + + offset_x = bb_mid_point_x - new_dim_x/2 + offset_y = bb_mid_point_y - new_dim_y/2 + + + if IMG_DIM_X == new_dim_x: offset_x = 0 + if offset_x < 0: offset_x = 0 + if offset_x+new_dim_x >= IMG_DIM_X: offset_x = IMG_DIM_X-new_dim_x + + if IMG_DIM_Y == new_dim_y: offset_y = 0 + if offset_y < 0: offset_y = 0 + if offset_y+new_dim_y >= IMG_DIM_Y: offset_y = IMG_DIM_Y-new_dim_y + + return np.array([int(offset_x),int(offset_y),int(new_dim_x), int(new_dim_y)]) + + + def __getitem__(self, global_index): + # select frame from sequence + index = global_index + + # get data ids + audio_id = self.audio_ids[index] + image_id = self.image_ids[index] + uv_id = self.uvs_ids[index] + + + + #print('GET ITEM: ', index) + img_fname = os.path.join(self.image_dir, str(image_id).zfill(5) + '.png') + img_numpy = np.asarray(Image.open(img_fname)) + TARGET = 2.0 * transforms.ToTensor()(img_numpy.astype(np.float32))/255.0 - 1.0 + + uv_fname = os.path.join(self.uvs_dir, str(uv_id).zfill(5) + '.exr') + uv_numpy = util.load_exr(uv_fname) + UV = transforms.ToTensor()(uv_numpy.astype(np.float32)) + UV = torch.where(UV > 1.0, torch.zeros_like(UV), UV) + UV = torch.where(UV < 0.0, torch.zeros_like(UV), UV) + UV = 2.0 * UV - 1.0 + + #print('img_fname:', img_fname) + #print('uv_fname:', uv_fname) + + # intrinsics and extrinsics + intrinsics = self.intrinsics + extrinsics = self.extrinsics[uv_id] + + # expressions + expressions = np.asarray(self.expressions[audio_id], dtype=np.float32) + #print('expressions:', expressions.shape) + expressions[32] *= 0.0 # remove eye brow movements + expressions[41] *= 0.0 # remove eye brow movements + expressions[71:75] *= 0.0 # remove eye brow movements + expressions = torch.tensor(expressions) + + # identity + identity = torch.tensor(self.identities[audio_id]) + + # load deepspeech feature + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + last_valid_idx = audio_id + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + audio_id_seq = self.audio_ids[index_seq] + if audio_id_seq == audio_id - i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + last_valid_idx = audio_id + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.audio_ids)-1 + if index_seq > max_idx: index_seq = max_idx + audio_id_seq = self.audio_ids[index_seq] + if audio_id_seq == audio_id + i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + last_valid_idx = audio_id + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + audio_id_seq = self.audio_ids[index_seq] + if audio_id_seq == audio_id - i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + + ################################# + ####### apply augmentation ###### + ################################# + crop = np.array([0,0,UV.shape[2],UV.shape[1]]) + if not self.opt.no_augmentation: + INVALID_UV = -1 + mask = ( (UV[0:1,:,:] != INVALID_UV) | (UV[1:2,:,:] != INVALID_UV) ) + crop = self.computeCrop(mask, MULTIPLE_OF=64) # << dependent on the network structure !! 64 => 6 layers + + offset_x,offset_y,new_dim_x, new_dim_y = crop + # select subwindow + TARGET = TARGET[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + UV = UV[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + + # compute new intrinsics + # TODO: atm not needed but maybe later + + ################################# + weight = 1.0 / self.n_frames_total + + return {'TARGET': TARGET, 'UV': UV, + 'paths': dsf_fname, #img_path, + 'intrinsics': np.array(intrinsics), + 'extrinsics': np.array(extrinsics), + 'expressions': expressions, + 'identity': identity, + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id': -1, + 'crop': crop, + 'internal_id': 0, + 'weight': np.array([weight]).astype(np.float32)} + + def __len__(self): + return self.n_frames_total + + def name(self): + return 'FaceDataset' diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/facetmp_dataset.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/facetmp_dataset.py new file mode 100644 index 0000000..d771116 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/facetmp_dataset.py @@ -0,0 +1,394 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image +from util import util + +#def make_dataset(dir): +# images = [] +# assert os.path.isdir(dir), '%s is not a valid directory' % dir +# for root, _, fnames in sorted(os.walk(dir)): +# for fname in fnames: +# if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): +# path = os.path.join(root, fname) +# images.append(path) +# return sorted(images) + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.npy', '.NPY']): + #.deepspeech.npy + id_str = fname[:-15] #4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-15]#4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +def make_dataset_png_ids(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.png', '.PNG']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + +def make_dataset_exr_ids(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.exr', '.EXR']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + + +def load_intrinsics(input_dir): + file = open(input_dir+"/intrinsics.txt", "r") + intrinsics = [[(float(x) for x in line.split())] for line in file] + file.close() + intrinsics = list(intrinsics[0][0]) + return intrinsics + +def load_rigids(input_dir): + file = open(input_dir+"/rigid.txt", "r") + rigid_floats = [[float(x) for x in line.split()] for line in file] # note that it stores 5 lines per matrix (blank line) + file.close() + all_rigids = [ [rigid_floats[4*idx + 0],rigid_floats[4*idx + 1],rigid_floats[4*idx + 2],rigid_floats[4*idx + 3]] for idx in range(0, len(rigid_floats)//4) ] + return all_rigids + +def load_expressions(input_dir): + file = open(input_dir+"/expression.txt", "r") + expressions = [[float(x) for x in line.split()] for line in file] + file.close() + return expressions + +def load_identity(input_dir): + file = open(input_dir+"/identities.txt", "r") + identity = [[float(x) for x in line.split()] for line in file] + file.close() + return identity + + + +class FaceTmpDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + + # directories + self.dataroot = opt.dataroot + self.audio_feature_dir = os.path.join(opt.dataroot, 'audio_feature') + self.image_dir = os.path.join(opt.dataroot, 'images') + self.uvs_dir = os.path.join(opt.dataroot, 'uvs') + + # debug print + print('load sequence:', self.dataroot) + print('\taudio_feature_dir:', self.audio_feature_dir) + print('\timage_dir:', self.image_dir) + print('\tuvs_dir:', self.uvs_dir) + + # generate index maps + audio_ids = make_ids(make_dataset(self.audio_feature_dir), self.dataroot) + image_ids = make_dataset_png_ids(self.image_dir) + uvs_ids = make_dataset_exr_ids(self.uvs_dir) + + # get model parameters + intrinsics = load_intrinsics(self.dataroot) + extrinsics = load_rigids(self.dataroot) + expressions = load_expressions(self.dataroot) + identities = load_identity(self.dataroot) + + if opt.phase == 'test': # test overwrites the audio and uv files, as well as expressions + print('Test mode. Overwriting audio, uv and expressions') + print('source sequence:', opt.source_dir) + dataroot = opt.source_dir + self.audio_feature_dir = os.path.join(dataroot, 'audio_feature') + self.uvs_dir = os.path.join(dataroot, 'uvs') + audio_ids = make_ids(make_dataset(self.audio_feature_dir), dataroot) + uvs_ids = make_dataset_exr_ids(self.uvs_dir) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(dataroot) + identities = load_identity(dataroot) + + print('\tnum audio_ids:', len(audio_ids)) + print('\tnum image_ids:', len(image_ids)) + print('\tnum uvs_ids:', len(uvs_ids)) + + + # set data + min_len = min(len(audio_ids), len(image_ids), len(uvs_ids), len(extrinsics), len(expressions)) + self.audio_ids = audio_ids[:min_len] + self.image_ids = image_ids[:min_len] + self.uvs_ids = uvs_ids[:min_len] + self.intrinsics = intrinsics + self.extrinsics = extrinsics[:] # extrinsics[:min_len] + self.expressions = expressions[:] # expressions[:min_len] + self.identities = identities[:] #identities[:min_len] + self.n_frames_total = min_len + + print('\tnum frames:', self.n_frames_total) + + + opt.nTrainObjects = 1 + opt.nValObjects = 1 + opt.nTestObjects = 1 + + + opt.test_sequence_names = [[opt.dataroot.split("/")[-1], 'train']] + if opt.phase == 'test': + + opt.test_sequence_names = [[opt.source_dir.split("/")[-1], 'test']] + print('test:', opt.test_sequence_names) + + assert(opt.resize_or_crop == 'resize_and_crop') + + def getSampleWeights(self): + weights = np.ones((self.n_frames_total)) + return weights + + def computeCrop(self, mask, MULTIPLE_OF=64, random_size=False): + IMG_DIM_X = mask.shape[2] + IMG_DIM_Y = mask.shape[1] + if random_size: + # random dimensions + new_dim_x = np.random.randint(int(IMG_DIM_X * 0.75), IMG_DIM_X+1) + new_dim_y = np.random.randint(int(IMG_DIM_Y * 0.75), IMG_DIM_Y+1) + new_dim_x = int(np.floor(new_dim_x / float(MULTIPLE_OF)) * MULTIPLE_OF ) + new_dim_y = int(np.floor(new_dim_y / float(MULTIPLE_OF)) * MULTIPLE_OF ) + else: + new_dim_x = 2 * MULTIPLE_OF + new_dim_y = 2 * MULTIPLE_OF + + # check dims + if new_dim_x > IMG_DIM_X: new_dim_x -= MULTIPLE_OF + if new_dim_y > IMG_DIM_Y: new_dim_y -= MULTIPLE_OF + + # random pos + mask_indices = torch.nonzero(mask) + _, bb_mid_point_y, bb_mid_point_x = mask_indices[np.random.randint(0, mask_indices.shape[0])].data_source.cpu() + #print('bb_mid_point', bb_mid_point_x, bb_mid_point_y) + + offset_x = bb_mid_point_x - new_dim_x/2 + offset_y = bb_mid_point_y - new_dim_y/2 + + + if IMG_DIM_X == new_dim_x: offset_x = 0 + if offset_x < 0: offset_x = 0 + if offset_x+new_dim_x >= IMG_DIM_X: offset_x = IMG_DIM_X-new_dim_x + + if IMG_DIM_Y == new_dim_y: offset_y = 0 + if offset_y < 0: offset_y = 0 + if offset_y+new_dim_y >= IMG_DIM_Y: offset_y = IMG_DIM_Y-new_dim_y + + + return np.array([int(offset_x),int(offset_y),int(new_dim_x), int(new_dim_y)]) + + def getitem(self, global_index, crop=None): + # select frame from sequence + index = global_index + + # get data ids + audio_id = self.audio_ids[index] + image_id = self.image_ids[index] + uv_id = self.uvs_ids[index] + + #print('GET ITEM: ', index) + img_fname = os.path.join(self.image_dir, str(image_id).zfill(5) + '.png') + img_numpy = np.asarray(Image.open(img_fname)) + TARGET = 2.0 * transforms.ToTensor()(img_numpy.astype(np.float32))/255.0 - 1.0 + + uv_fname = os.path.join(self.uvs_dir, str(uv_id).zfill(5) + '.exr') + uv_numpy = util.load_exr(uv_fname) + UV = transforms.ToTensor()(uv_numpy.astype(np.float32)) + UV = torch.where(UV > 1.0, torch.zeros_like(UV), UV) + UV = torch.where(UV < 0.0, torch.zeros_like(UV), UV) + UV = 2.0 * UV - 1.0 + + + # intrinsics and extrinsics + intrinsics = self.intrinsics + extrinsics = self.extrinsics[uv_id] + + # expressions + expressions = np.asarray(self.expressions[audio_id], dtype=np.float32) + #print('expressions:', expressions.shape) + expressions[32] *= 0.0 # remove eye brow movements + expressions[41] *= 0.0 # remove eye brow movements + expressions[71:75] *= 0.0 # remove eye brow movements + expressions = torch.tensor(expressions) + + # identity + identity = torch.tensor(self.identities[audio_id]) + + # load deepspeech feature + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + last_valid_idx = audio_id + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + audio_id_seq = self.audio_ids[index_seq] + if audio_id_seq == audio_id - i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + last_valid_idx = audio_id + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.audio_ids)-1 + if index_seq > max_idx: index_seq = max_idx + audio_id_seq = self.audio_ids[index_seq] + if audio_id_seq == audio_id + i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + last_valid_idx = audio_id + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + audio_id_seq = self.audio_ids[index_seq] + if audio_id_seq == audio_id - i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir, str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + ################################# + ####### apply augmentation ###### + ################################# + + if not self.opt.no_augmentation: + if type(crop) == type(None): + INVALID_UV = -1 + mask = ( (UV[0:1,:,:] != INVALID_UV) | (UV[1:2,:,:] != INVALID_UV) ) + crop = self.computeCrop(mask, MULTIPLE_OF=64) # << dependent on the network structure !! 64 => 6 layers + + offset_x,offset_y,new_dim_x, new_dim_y = crop + + # select subwindow + TARGET = TARGET[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + UV = UV[:, offset_y:offset_y+new_dim_y, offset_x:offset_x+new_dim_x] + + # compute new intrinsics + # TODO: atm not needed but maybe later + + ################################# + + + return {'TARGET': TARGET, 'UV': UV, + 'paths': dsf_fname, #img_path, + 'intrinsics': np.array(intrinsics), + 'extrinsics': np.array(extrinsics), + 'expressions': expressions, + 'identity': identity, + 'audio_deepspeech': dsf, # deepspeech feature + #'target_id':target_id, + 'crop': crop, + + 'internal_id': 0} + + + def __getitem__(self, global_index): + # select frame from sequence + index = global_index + + current = self.getitem(index) + crop = current['crop'] + prv = self.getitem(max(index-1, 0), crop) + nxt = self.getitem(min(index+1, self.n_frames_total-1), crop) + + if type(crop) == type(None): + crop = np.array([0,0,current['UV'].shape[2],current['UV'].shape[1]]) + + weight = 1.0 / self.n_frames_total + + return {'TARGET': current['TARGET'], + 'UV': current['UV'], + 'paths': current['paths'], + 'intrinsics': current['intrinsics'], + 'extrinsics': current['extrinsics'], + 'expressions': current['expressions'], + 'identity': current['identity'], + 'audio_deepspeech': current['audio_deepspeech'], + + 'prv_TARGET': prv['TARGET'], + 'prv_UV': prv['UV'], + 'prv_audio_deepspeech': prv['audio_deepspeech'], + 'prv_expressions': prv['expressions'], + + 'nxt_TARGET': nxt['TARGET'], + 'nxt_UV': nxt['UV'], + 'nxt_audio_deepspeech': nxt['audio_deepspeech'], + 'nxt_expressions': nxt['expressions'], + + 'crop': crop, + + 'internal_id': current['internal_id'], + 'weight': np.array([weight]).astype(np.float32)} + + def __len__(self): + return self.n_frames_total + + def name(self): + return 'FaceTmpDataset' diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/multi_face_audio_eq_tmp_cached_dataset.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/multi_face_audio_eq_tmp_cached_dataset.py new file mode 100644 index 0000000..9bb4504 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/multi_face_audio_eq_tmp_cached_dataset.py @@ -0,0 +1,458 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image +import progressbar + +#def make_dataset(dir): +# images = [] +# assert os.path.isdir(dir), '%s is not a valid directory' % dir +# for root, _, fnames in sorted(os.walk(dir)): +# for fname in fnames: +# if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): +# path = os.path.join(root, fname) +# images.append(path) +# return sorted(images) + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.npy', '.NPY']): + #.deepspeech.npy + id_str = fname[:-15] #4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-15]#4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +def make_dataset_ids_png(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.png', '.png']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + +def load_intrinsics(input_dir): + file = open(input_dir+"/intrinsics.txt", "r") + intrinsics = [[(float(x) for x in line.split())] for line in file] + file.close() + intrinsics = list(intrinsics[0][0]) + return intrinsics + +def load_rigids(input_dir): + file = open(input_dir+"/rigid.txt", "r") + rigid_floats = [[float(x) for x in line.split()] for line in file] # note that it stores 5 lines per matrix (blank line) + file.close() + all_rigids = [ [rigid_floats[4*idx + 0],rigid_floats[4*idx + 1],rigid_floats[4*idx + 2],rigid_floats[4*idx + 3]] for idx in range(0, len(rigid_floats)//4) ] + return all_rigids + +def load_expressions(input_dir): + file = open(input_dir+"/expression.txt", "r") + expressions = [[float(x) for x in line.split()] for line in file] + file.close() + return expressions + +def load_identity(input_dir): + file = open(input_dir+"/identities.txt", "r") + identities = [[float(x) for x in line.split()] for line in file] + file.close() + return identities + + +class MultiFaceAudioEQTmpCachedDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + + # read dataset file that contains the filenames for the train, val and test lists + file = open(self.root+"/dataset.txt", "r") + self.filename_train_list, self.filename_val_list, self.filename_test_list = [str(line) for line in file] + file.close() + if self.filename_train_list[-1] == '\n': self.filename_train_list = self.filename_train_list[:-1] + if self.filename_val_list[-1] == '\n': self.filename_val_list = self.filename_val_list[:-1] + if self.filename_test_list[-1] == '\n': self.filename_test_list = self.filename_test_list[:-1] + + + + # get list of train sequences + file = open(self.root+"/" + self.filename_train_list, "r") + self.train_sequence_names = [str(line) for line in file] + file.close() + for i in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[i][-1] == '\n': + self.train_sequence_names[i] = self.train_sequence_names[i][:-1] + + # get list of val sequences + file = open(self.root+"/" + self.filename_val_list, "r") + self.val_sequence_names = [[str(w) for w in line.split()] for line in file] + file.close() + for i in range(0,len(self.val_sequence_names)): + if self.val_sequence_names[i][0][-1] == '\n': self.val_sequence_names[i][0] = self.val_sequence_names[i][0][:-1] + if self.val_sequence_names[i][1][-1] == '\n': self.val_sequence_names[i][1] = self.val_sequence_names[i][1][:-1] + + # get list of test sequences + file = open(self.root+"/" + self.filename_test_list, "r") + self.test_sequence_names = [[str(w) for w in line.split()] for line in file] + if opt.output_audio_expressions: self.test_sequence_names = self.test_sequence_names[0:1] + file.close() + for i in range(0,len(self.test_sequence_names)): + if self.test_sequence_names[i][0][-1] == '\n': self.test_sequence_names[i][0] = self.test_sequence_names[i][0][:-1] + if self.test_sequence_names[i][1][-1] == '\n': self.test_sequence_names[i][1] = self.test_sequence_names[i][1][:-1] + + # print some stats + print('filename_train_list:', self.filename_train_list) + print('\tnum_seq:', len(self.train_sequence_names)) + print('filename_val_list: ', self.filename_val_list) + print('\tnum_seq:', len(self.val_sequence_names)) + print('filename_test_list: ', self.filename_test_list) + print('\tnum_seq:', len(self.test_sequence_names)) + + opt.train_sequence_names = self.train_sequence_names + opt.val_sequence_names = self.val_sequence_names + opt.test_sequence_names = self.test_sequence_names + + # search mapping from val, test to train sequences that are used as targets + self.val_sequence_targets = [] + for i in range(0,len(self.val_sequence_names)): + target_name = self.val_sequence_names[i][1] + target_id = -1 + for j in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[j] == target_name: + target_id = j + break + if target_id == -1: + print('Target sequence not in train set! ', target_name) + exit() + self.val_sequence_targets.append(target_id) + + self.test_sequence_targets = [] + for i in range(0,len(self.test_sequence_names)): + target_name = self.test_sequence_names[i][1] + target_id = -1 + for j in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[j] == target_name: + target_id = j + break + if target_id == -1: + print('Target sequence not in train set! ', target_name) + exit() + self.test_sequence_targets.append(target_id) + print('test: ', self.test_sequence_names[i]) + print('\t target:', target_id) + + # store len values + opt.nTrainObjects = len(self.train_sequence_names) + opt.nValObjects = len(self.val_sequence_names) + opt.nTestObjects = len(self.test_sequence_names) + + ################################################ + ################################################ + ################################################ + + # prepare dataloader paths / data + self.audio_feature_dir = [] + self.image_dir = [] + self.uvs_dir = [] + self.audio_ids = [] + self.image_ids = [] + self.intrinsics = [] + self.extrinsics = [] + self.expressions = [] + self.identities = [] + self.target_id = [] + self.n_frames_total = 0 + + if opt.phase == 'train': + self.sequence_names = self.train_sequence_names + for i in range(0,len(self.train_sequence_names)): + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[i]) + audio_feature_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'uvs') + print('load train sequence:', self.train_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + + audio_ids = make_ids(make_dataset(audio_feature_dir), dataroot) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(dataroot) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(i) + + self.n_frames_total += min_len + elif opt.phase == 'val': + for i in range(0,len(self.val_sequence_names)): + target_id = self.val_sequence_targets[i] + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[target_id]) + audio_feature_dir = os.path.join(opt.dataroot, self.val_sequence_names[i][0], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'uvs') + print('load val sequence:', self.val_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + audio_ids = make_ids(make_dataset(audio_feature_dir), os.path.join(opt.dataroot, self.val_sequence_names[i][0])) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(os.path.join(opt.dataroot, self.val_sequence_names[i][0])) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(target_id) + + self.n_frames_total += min_len + else: # test + for i in range(0,len(self.test_sequence_names)): + target_id = self.test_sequence_targets[i] + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[target_id]) + audio_feature_dir = os.path.join(opt.dataroot, self.test_sequence_names[i][0], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'uvs') + print('load test sequence:', self.test_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + audio_ids = make_ids(make_dataset(audio_feature_dir), os.path.join(opt.dataroot, self.test_sequence_names[i][0])) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(os.path.join(opt.dataroot, self.test_sequence_names[i][0])) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(target_id) + + self.n_frames_total += min_len + + + print('frames_total:', self.n_frames_total) + + + #global_target_ids = [] + #for i in range(0,len(self.audio_ids)): + # for j in range(0,len(self.audio_ids[i])): + # global_target_ids.append(self.target_id[i]) + #global_target_ids=np.array(global_target_ids) + #self.weights = np.where(global_target_ids==2, 1.0 * np.ones((self.n_frames_total)), 0.01 * np.ones((self.n_frames_total)) ) + self.weights = [] + for i in range(0,len(self.audio_ids)): + l = len(self.audio_ids[i]) + for j in range(0,l): + self.weights.append(1.0 / l) + self.weights = np.array(self.weights) + + assert(opt.resize_or_crop == 'resize_and_crop') + + # mapping global to internal + self.mapping_global2internal = [] + self.mapping_global2internal_offset = [] + self.dsf = [] + offset = 0 + with progressbar.ProgressBar(max_value=self.n_frames_total) as bar: + for i in range(0,len(self.audio_ids)): + l = len(self.audio_ids[i]) + dsf_seq = [] + for k in range(0,l): + self.mapping_global2internal.append(i) + self.mapping_global2internal_offset.append(offset) + dsf_fname = os.path.join(self.audio_feature_dir[i], str(self.audio_ids[i][k]) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq.append(dsf_np.astype(np.float32)) + bar.update(offset + k) + self.dsf.append(dsf_seq) + offset += l + + + def getSampleWeights(self): + return self.weights + + def getitem(self, global_index): + + # select sequence + internal_sequence_id = self.mapping_global2internal[global_index] + sum_frames = self.mapping_global2internal_offset[global_index] + + # select frame from sequence + index = (global_index-sum_frames) % len(self.audio_ids[internal_sequence_id]) + + # get data ids + audio_id = self.audio_ids[internal_sequence_id][index] + image_id = self.image_ids[internal_sequence_id][index] + + #print('GET ITEM: ', index) + #img_path = self.frame_paths[sequence_id][index] + + # intrinsics and extrinsics + intrinsics = self.intrinsics[internal_sequence_id] + extrinsics = self.extrinsics[internal_sequence_id][image_id] + + # expressions + expressions = np.asarray(self.expressions[internal_sequence_id][audio_id], dtype=np.float32) + #print('expressions:', expressions.shape) + expressions[32] *= 0.0 # remove eye brow movements + expressions[41] *= 0.0 # remove eye brow movements + expressions[71:75] *= 0.0 # remove eye brow movements + expressions = torch.tensor(expressions) + + # identity + identity = torch.tensor(self.identities[internal_sequence_id][image_id]) + target_id = self.target_id[internal_sequence_id] # sequence id refers to the target sequence (of the training corpus) + + # load deepspeech feature + #print('audio_id', audio_id) + dsf_fname = os.path.join(self.audio_feature_dir[internal_sequence_id], str(audio_id) + '.deepspeech.npy') + + dsf_np = self.dsf[internal_sequence_id][index] + dsf = transforms.ToTensor()(dsf_np) + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + dsf_np = self.dsf[internal_sequence_id][index_seq] + dsf_seq = transforms.ToTensor()(dsf_np) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.audio_ids[internal_sequence_id])-1 + if index_seq > max_idx: index_seq = max_idx + dsf_np = self.dsf[internal_sequence_id][index_seq] + dsf_seq = transforms.ToTensor()(dsf_np) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + dsf_np = self.dsf[internal_sequence_id][index_seq] + dsf_seq = transforms.ToTensor()(dsf_np) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + #weight = 1.0 / len(self.audio_feature_dir[internal_sequence_id]) + weight = self.weights[global_index] + + return {'paths': dsf_fname, #img_path, + 'intrinsics': np.array(intrinsics), + 'extrinsics': np.array(extrinsics), + 'expressions': expressions, + 'identity': identity, + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id':target_id, + 'internal_id':internal_sequence_id, + + 'weight': np.array([weight]).astype(np.float32)} + + + def __getitem__(self, global_index): + # select frame from sequence + index = global_index + current = self.getitem(index) + prv = self.getitem(max(index-1, 0)) + nxt = self.getitem(min(index+1, self.n_frames_total-1)) + + return { + 'paths': current['paths'], #img_path, + 'target_id': current['target_id'], + 'internal_id': current['internal_id'], + 'weight': current['weight'], + 'identity': current['identity'], + 'intrinsics': current['intrinsics'], + + 'extrinsics': current['extrinsics'], + 'expressions': current['expressions'], + 'audio_deepspeech': current['audio_deepspeech'], + + 'extrinsics_prv': prv['extrinsics'], + 'expressions_prv': prv['expressions'], + 'audio_deepspeech_prv': prv['audio_deepspeech'], + + 'extrinsics_nxt': nxt['extrinsics'], + 'expressions_nxt': nxt['expressions'], + 'audio_deepspeech_nxt': nxt['audio_deepspeech'], + } + + + def __len__(self): + return self.n_frames_total #len(self.frame_paths[0]) + + def name(self): + return 'MultiFaceAudioEQTmpCachedDataset' diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/multi_face_audio_eq_tmp_dataset.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/multi_face_audio_eq_tmp_dataset.py new file mode 100644 index 0000000..e42f155 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/data/multi_face_audio_eq_tmp_dataset.py @@ -0,0 +1,435 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image + +#def make_dataset(dir): +# images = [] +# assert os.path.isdir(dir), '%s is not a valid directory' % dir +# for root, _, fnames in sorted(os.walk(dir)): +# for fname in fnames: +# if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): +# path = os.path.join(root, fname) +# images.append(path) +# return sorted(images) + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.npy', '.NPY']): + #.deepspeech.npy + id_str = fname[:-15] #4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-15]#4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +def make_dataset_ids_png(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.png', '.png']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + +def load_intrinsics(input_dir): + file = open(input_dir+"/intrinsics.txt", "r") + intrinsics = [[(float(x) for x in line.split())] for line in file] + file.close() + intrinsics = list(intrinsics[0][0]) + return intrinsics + +def load_rigids(input_dir): + file = open(input_dir+"/rigid.txt", "r") + rigid_floats = [[float(x) for x in line.split()] for line in file] # note that it stores 5 lines per matrix (blank line) + file.close() + all_rigids = [ [rigid_floats[4*idx + 0],rigid_floats[4*idx + 1],rigid_floats[4*idx + 2],rigid_floats[4*idx + 3]] for idx in range(0, len(rigid_floats)//4) ] + return all_rigids + +def load_expressions(input_dir): + file = open(input_dir+"/expression.txt", "r") + expressions = [[float(x) for x in line.split()] for line in file] + file.close() + return expressions + +def load_identity(input_dir): + file = open(input_dir+"/identities.txt", "r") + identities = [[float(x) for x in line.split()] for line in file] + file.close() + return identities + + +class MultiFaceAudioEQTmpDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + + # read dataset file that contains the filenames for the train, val and test lists + file = open(self.root+"/dataset.txt", "r") + self.filename_train_list, self.filename_val_list, self.filename_test_list = [str(line) for line in file] + file.close() + if self.filename_train_list[-1] == '\n': self.filename_train_list = self.filename_train_list[:-1] + if self.filename_val_list[-1] == '\n': self.filename_val_list = self.filename_val_list[:-1] + if self.filename_test_list[-1] == '\n': self.filename_test_list = self.filename_test_list[:-1] + + + + # get list of train sequences + file = open(self.root+"/" + self.filename_train_list, "r") + self.train_sequence_names = [str(line) for line in file] + file.close() + for i in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[i][-1] == '\n': + self.train_sequence_names[i] = self.train_sequence_names[i][:-1] + + # get list of val sequences + file = open(self.root+"/" + self.filename_val_list, "r") + self.val_sequence_names = [[str(w) for w in line.split()] for line in file] + file.close() + for i in range(0,len(self.val_sequence_names)): + if self.val_sequence_names[i][0][-1] == '\n': self.val_sequence_names[i][0] = self.val_sequence_names[i][0][:-1] + if self.val_sequence_names[i][1][-1] == '\n': self.val_sequence_names[i][1] = self.val_sequence_names[i][1][:-1] + + # get list of test sequences + file = open(self.root+"/" + self.filename_test_list, "r") + self.test_sequence_names = [[str(w) for w in line.split()] for line in file] + if opt.output_audio_expressions: self.test_sequence_names = self.test_sequence_names[0:1] + file.close() + for i in range(0,len(self.test_sequence_names)): + if self.test_sequence_names[i][0][-1] == '\n': self.test_sequence_names[i][0] = self.test_sequence_names[i][0][:-1] + if self.test_sequence_names[i][1][-1] == '\n': self.test_sequence_names[i][1] = self.test_sequence_names[i][1][:-1] + + # print some stats + print('filename_train_list:', self.filename_train_list) + print('\tnum_seq:', len(self.train_sequence_names)) + print('filename_val_list: ', self.filename_val_list) + print('\tnum_seq:', len(self.val_sequence_names)) + print('filename_test_list: ', self.filename_test_list) + print('\tnum_seq:', len(self.test_sequence_names)) + + opt.train_sequence_names = self.train_sequence_names + opt.val_sequence_names = self.val_sequence_names + opt.test_sequence_names = self.test_sequence_names + + # search mapping from val, test to train sequences that are used as targets + self.val_sequence_targets = [] + for i in range(0,len(self.val_sequence_names)): + target_name = self.val_sequence_names[i][1] + target_id = -1 + for j in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[j] == target_name: + target_id = j + break + if target_id == -1: + print('Target sequence not in train set! ', target_name) + exit() + self.val_sequence_targets.append(target_id) + + self.test_sequence_targets = [] + for i in range(0,len(self.test_sequence_names)): + target_name = self.test_sequence_names[i][1] + target_id = -1 + for j in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[j] == target_name: + target_id = j + break + if target_id == -1: + print('Target sequence not in train set! ', target_name) + exit() + self.test_sequence_targets.append(target_id) + print('test: ', self.test_sequence_names[i]) + print('\t target:', target_id) + + # store len values + opt.nTrainObjects = len(self.train_sequence_names) + opt.nValObjects = len(self.val_sequence_names) + opt.nTestObjects = len(self.test_sequence_names) + + ################################################ + ################################################ + ################################################ + + # prepare dataloader paths / data + self.audio_feature_dir = [] + self.image_dir = [] + self.uvs_dir = [] + self.audio_ids = [] + self.image_ids = [] + self.intrinsics = [] + self.extrinsics = [] + self.expressions = [] + self.identities = [] + self.target_id = [] + self.n_frames_total = 0 + + if opt.phase == 'train': + self.sequence_names = self.train_sequence_names + for i in range(0,len(self.train_sequence_names)): + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[i]) + audio_feature_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'uvs') + print('load train sequence:', self.train_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + + audio_ids = make_ids(make_dataset(audio_feature_dir), dataroot) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(dataroot) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(i) + + self.n_frames_total += min_len + elif opt.phase == 'val': + for i in range(0,len(self.val_sequence_names)): + target_id = self.val_sequence_targets[i] + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[target_id]) + audio_feature_dir = os.path.join(opt.dataroot, self.val_sequence_names[i][0], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'uvs') + print('load val sequence:', self.val_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + audio_ids = make_ids(make_dataset(audio_feature_dir), os.path.join(opt.dataroot, self.val_sequence_names[i][0])) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(os.path.join(opt.dataroot, self.val_sequence_names[i][0])) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(target_id) + + self.n_frames_total += min_len + else: # test + for i in range(0,len(self.test_sequence_names)): + target_id = self.test_sequence_targets[i] + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[target_id]) + audio_feature_dir = os.path.join(opt.dataroot, self.test_sequence_names[i][0], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'uvs') + print('load test sequence:', self.test_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + audio_ids = make_ids(make_dataset(audio_feature_dir), os.path.join(opt.dataroot, self.test_sequence_names[i][0])) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(os.path.join(opt.dataroot, self.test_sequence_names[i][0])) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(target_id) + + self.n_frames_total += min_len + + + print('frames_total:', self.n_frames_total) + + + #global_target_ids = [] + #for i in range(0,len(self.audio_ids)): + # for j in range(0,len(self.audio_ids[i])): + # global_target_ids.append(self.target_id[i]) + #global_target_ids=np.array(global_target_ids) + #self.weights = np.where(global_target_ids==2, 1.0 * np.ones((self.n_frames_total)), 0.01 * np.ones((self.n_frames_total)) ) + self.weights = [] + for i in range(0,len(self.audio_ids)): + l = len(self.audio_ids[i]) + for j in range(0,l): + self.weights.append(1.0 / l) + self.weights = np.array(self.weights) + + assert(opt.resize_or_crop == 'resize_and_crop') + + def getSampleWeights(self): + return self.weights + + def getitem(self, global_index): + + # select sequence + internal_sequence_id = 0 + sum_frames = 0 + for i in range(0,len(self.audio_ids)): + l = len(self.audio_ids[i]) + if (global_index-sum_frames) < l: + internal_sequence_id = i + break + else: + sum_frames += len(self.audio_ids[i]) + + # select frame from sequence + index = (global_index-sum_frames) % len(self.audio_ids[internal_sequence_id]) + + # get data ids + audio_id = self.audio_ids[internal_sequence_id][index] + image_id = self.image_ids[internal_sequence_id][index] + + #print('GET ITEM: ', index) + #img_path = self.frame_paths[sequence_id][index] + + # intrinsics and extrinsics + intrinsics = self.intrinsics[internal_sequence_id] + extrinsics = self.extrinsics[internal_sequence_id][image_id] + + # expressions + expressions = np.asarray(self.expressions[internal_sequence_id][audio_id], dtype=np.float32) + #print('expressions:', expressions.shape) + expressions[32] *= 0.0 # remove eye brow movements + expressions[41] *= 0.0 # remove eye brow movements + expressions[71:75] *= 0.0 # remove eye brow movements + expressions = torch.tensor(expressions) + + + + # identity + identity = torch.tensor(self.identities[internal_sequence_id][image_id]) + target_id = self.target_id[internal_sequence_id] # sequence id refers to the target sequence (of the training corpus) + + # load deepspeech feature + #print('audio_id', audio_id) + dsf_fname = os.path.join(self.audio_feature_dir[internal_sequence_id], str(audio_id) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + # load sequence data if necessary + last_valid_idx = audio_id + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + audio_id_seq = self.audio_ids[internal_sequence_id][index_seq] + if audio_id_seq == audio_id - i: last_valid_idx = audio_id_seq + else: audio_id_seq = last_valid_idx + + dsf_fname = os.path.join(self.audio_feature_dir[internal_sequence_id], str(audio_id_seq) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + + + #weight = 1.0 / len(self.audio_feature_dir[internal_sequence_id]) + weight = self.weights[global_index] + + return {'paths': dsf_fname, #img_path, + 'intrinsics': np.array(intrinsics), + 'extrinsics': np.array(extrinsics), + 'expressions': expressions, + 'identity': identity, + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id':target_id, + 'internal_id':internal_sequence_id, + + 'weight': np.array([weight]).astype(np.float32)} + + + def __getitem__(self, global_index): + # select frame from sequence + index = global_index + current = self.getitem(index) + prv = self.getitem(max(index-1, 0)) + nxt = self.getitem(min(index+1, self.n_frames_total-1)) + + return { + 'paths': current['paths'], #img_path, + 'target_id': current['target_id'], + 'internal_id': current['internal_id'], + 'weight': current['weight'], + 'identity': current['identity'], + 'intrinsics': current['intrinsics'], + + 'extrinsics': current['extrinsics'], + 'expressions': current['expressions'], + 'audio_deepspeech': current['audio_deepspeech'], + + 'extrinsics_prv': prv['extrinsics'], + 'expressions_prv': prv['expressions'], + 'audio_deepspeech_prv': prv['audio_deepspeech'], + + 'extrinsics_nxt': nxt['extrinsics'], + 'expressions_nxt': nxt['expressions'], + 'audio_deepspeech_nxt': nxt['audio_deepspeech'], + } + + + def __len__(self): + return self.n_frames_total #len(self.frame_paths[0]) + + def name(self): + return 'MultiFaceAudioEQTmpDataset' diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/inference_renderer.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/inference_renderer.py new file mode 100644 index 0000000..a8ceb25 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/inference_renderer.py @@ -0,0 +1,221 @@ +import copy +import numpy as np +import torch +import torchvision.transforms as transforms +import sys +import os +import cv2 + +from tqdm import tqdm +from scipy.ndimage import binary_fill_holes +from scipy.ndimage import binary_erosion + +from data.custom_aligned_dataset import CustomAlignedDataset +from options.test_options import TestOptions +from models import create_model +from util.visualizer import InferenceVisualizer + +# deca import +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../third/DECA/'))) +from decalib.deca import * +from decalib.utils.config import cfg as deca_cfg + +class InferenceManager: + + def __init__(self, opt): + self.opt = opt + + deca_cfg.model.use_tex = False + self.deca = DECA(config=deca_cfg, device="cuda" if torch.cuda.is_available() else "cpu") + + self.model = create_model(opt) + self.model.setup(opt) + + # self.new_expressions = np.loadtxt(opt.expr_path) + # print(self.new_expressions.shape) + + if opt.textureModel == 'DynamicNeuralTextureAudio': + self.load_source = True + opt_source = copy.copy(opt) + opt_source.dataroot = opt.source_dataroot + self.dataset_source = CustomAlignedDataset() + self.dataset_source.initialize(opt_source) + opt_target = copy.copy(opt) + opt_target.dataroot = opt.target_dataroot + self.dataset_target = CustomAlignedDataset() + self.dataset_target.initialize(opt_target) + + else: + self.load_source = False + opt_target = copy.copy(opt) + opt_target.dataroot = opt.target_dataroot + self.dataset_target = CustomAlignedDataset() + self.dataset_target.initialize(opt_target) + opt_source = copy.copy(opt) + opt_source.dataroot = opt.source_dataroot + self.dataset_source = CustomAlignedDataset() + self.dataset_source.initialize(opt_source) + + + self.frame_id_source = opt.frame_id_source + self.frame_id_target = opt.frame_id_target + print("Running inference with the following config:") + print("SOURCE: frame {} in {}".format(self.frame_id_source, opt.source_dataroot)) + print("TARGET: frame {} in {}".format(self.frame_id_target, opt.target_dataroot)) + + self.visualizer = InferenceVisualizer(opt) + + + def get_codedict(self, frame_id): + + codedict = torch.load(os.path.join(self.opt.dataroot, 'DECA_codedicts', f'codedict_{frame_id}.pt')) + + return codedict + + def get_expr(self, frame_id): + + expr = np.load(os.path.join(self.opt.expr_path, f'expr_{frame_id}.npy'))[0] + + return expr + + def preprocess_uv(self, uv): + IMG_DIM_Y = 256 + IMG_DIM_X = 256 + + if uv.shape[0] != IMG_DIM_Y or uv.shape[1] != IMG_DIM_X: + # print("UV needs resizing from {} to {},{}".format(uv.shape, self.IMG_DIM_Y, self.IMG_DIM_X)) + uv = cv2.resize(uv, (IMG_DIM_Y, IMG_DIM_X)) + + # issue only happens with multiple concurrent reads + if not (-1 <= uv.min() and uv.max() <= 1): + print("frame invalid", frame_id) + + assert -1 <= uv.min() and uv.max() <= 1, "UV not in range [-1, 1]! min: {} max: {}".format(uv.min(), uv.max()) + uv_tensor = transforms.ToTensor()(uv.astype(np.float32)) + + return uv_tensor.cuda() + + def preprocess_deca_details(self, deca_details): + IMG_DIM_Y = 256 + IMG_DIM_X = 256 + + if deca_details.shape[0] != IMG_DIM_Y or deca_details.shape[1] != IMG_DIM_X: + # print("UV needs resizing from {} to {},{}".format(uv.shape, self.IMG_DIM_Y, self.IMG_DIM_X)) + deca_details = cv2.resize(deca_details, (IMG_DIM_Y, IMG_DIM_X)) + + deca_details_tensor = transforms.ToTensor()(deca_details.astype(np.float32)) + + return deca_details_tensor.cuda() + + def preprocess_mask(self, mask): + IMG_DIM_Y = 256 + IMG_DIM_X = 256 + + if mask.shape[0] != IMG_DIM_Y or mask.shape[1] != IMG_DIM_X: + # print("UV needs resizing from {} to {},{}".format(uv.shape, self.IMG_DIM_Y, self.IMG_DIM_X)) + mask = cv2.resize(mask, (IMG_DIM_Y, IMG_DIM_X)) + + mask = mask>0 + mask = binary_fill_holes(mask).astype(np.float32) + mask = binary_erosion(mask, iterations=8) + mask_tensor = transforms.ToTensor()(mask.astype(np.float32)) + + return mask_tensor.cuda() + + def run_frame(self, frame_id_source, frame_id_target): + + new_expr = self.get_expr(frame_id_source) + codedict = self.get_codedict(frame_id_target) + + # Render new UV map with shape and pose from target, and expression from source + exp = torch.tensor(new_expr[:-3], dtype=torch.float32).cuda().unsqueeze(0) + + pose = codedict['pose'][:, :-3] + jaw_pose = torch.tensor(new_expr[-3:], dtype=torch.float32).cuda().unsqueeze(0) + new_pose = torch.cat((pose, jaw_pose), dim=1).cuda() + + if self.opt.deca_details: + uv, deca_details = self.deca.render_uv_details(codedict, exp, new_pose) + mask = self.deca.render_mask(uv) + mask = self.preprocess_mask(mask.cpu().detach().numpy()) + uv = self.preprocess_uv(uv[0].cpu().numpy()) + deca_details = self.preprocess_deca_details(deca_details[0].permute(1, 2, 0).cpu().detach().numpy()) + + uv = uv * mask + deca_details = deca_details * mask + + else: + uv = self.deca.render_uv(codedict, exp, new_pose)[0] + uv = self.preprocess_uv(uv.cpu().numpy()) + + if self.load_source: + source = self.dataset_source[frame_id_source] + target = self.dataset_target[frame_id_target] + + item = source + + item["SOURCE"] = source['TARGET'].cuda().unsqueeze(0) + item["TARGET"] = target['TARGET'].cuda().unsqueeze(0) + item["UV"] = uv.cuda().unsqueeze(0) + item["expressions"] = torch.tensor(new_expr).cuda().unsqueeze(0) + item["audio_deepspeech"] = item["audio_deepspeech"].cuda().unsqueeze(0) + + else: + target = self.dataset_target[frame_id_target] + + item = target + + item["SOURCE"] = target['TARGET'].cuda().unsqueeze(0) + item["TARGET"] = target['TARGET'].cuda().unsqueeze(0) + item["UV"] = uv.cuda().unsqueeze(0) + item["expressions"] = torch.tensor(new_expr).cuda().unsqueeze(0) + item["audio_deepspeech"] = item["audio_deepspeech"].cuda().unsqueeze(0) + + if self.opt.deca_details: + item["deca_details"] = deca_details.cuda().unsqueeze(0) + + self.model.set_input(item) + self.model.forward() + results = self.model.get_current_visuals() + + # results = None + + return results + + + def run_inference(self): + + # Run inference for all frames + try: + n_frames = min(len(self.dataset_source), len(self.dataset_target)) + + except AttributeError: + n_frames = len(self.dataset_target) + + if self.frame_id_source >= 0 and self.frame_id_target >= 0: + + for frame_id in tqdm(range(self.frame_id_source, self.frame_id_target)): + results = self.run_frame(frame_id, frame_id) + self.visualizer.reset() + self.visualizer.display_current_results(results, frame_id, save_result=True) + + else: + + for frame_id in tqdm(range(n_frames)): + results = self.run_frame(frame_id, frame_id) + self.visualizer.reset() + self.visualizer.display_current_results(results, frame_id, save_result=True) + + +if __name__ == '__main__': + # training dataset + opt = TestOptions().parse() + opt.serial_batches = True + opt.display_id = 0 + + # model + manager = InferenceManager(opt) + manager.run_inference() + + + diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/inference_renderer.sh b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/inference_renderer.sh new file mode 100644 index 0000000..d2cb02f --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/inference_renderer.sh @@ -0,0 +1,91 @@ +set -ex + +DP=/home/alberto/NeuralVoicePuppetry/datasets/SRF_anchor_short +DT=/home/alberto/NeuralVoicePuppetry/datasets/TRANSFERS/ +TRANSFER_PATH=$DT/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead +CHECKPOINT_DIR=/home/alberto/NeuralVoicePuppetry/NeuralRenderingNetwork/checkpoints + +FILE_ID_SOURCE=Italian_vicky_audio +FILE_ID_TARGET=Halbtotale_355_9415 + +# . train_audio2expressionsAttentionTMP.sh & +#EROSION=0.2 +NAME=DynamicNeuralTextures-UNET_6_level-DynamicNeuralTextureExpression-SL8-BS8-Halbtotale_355_9415.h5-custom_aligned-VGG-20210302-144030-look_ahead +EROSION=0.6 + +GPUID=0 +DATASET_MODE=custom_aligned + +DATAROOT=$DP/$FILE_ID_TARGET/ +TARGET_DATAROOT=$DP/$FILE_ID_TARGET/$FILE_ID_TARGET.h5 +SOURCE_DATAROOT=$DP/$FILE_ID_SOURCE/$FILE_ID_SOURCE.h5 +EXPR_PATH=$TRANSFER_PATH/$FILE_ID_SOURCE--$FILE_ID_TARGET/expression.txt + +IMAGES_TARGET_DIR=/home/alberto/NeuralVoicePuppetry/results/inference/$NAME/$FILE_ID_SOURCE"_to_"$FILE_ID_TARGET + +# neural texture, not used here +TEX_DIM=128 +TEX_FEATURES=16 +TEXTUREMODEL=DynamicNeuralTextureExpression +INPUT_NC=2 +NUM_THREADS=1 + +LOSS=VGG + +# models +MODEL=DynamicNeuralTextures +RENDERER_TYPE=UNET_6_level # There are many more of these TODO + +BATCH_SIZE=1 +SEQ_LEN=8 + +################################################################################ +################################################################################ +################################################################################ + + +#DISPLAY_NAME=DynamicNeuralTextures-UNET_6_level-SL8-BS16-201125_TV-20190114-2245-5701_24fps_cropped_short.h5-custom_aligned-RMS-20201123-185215-look_ahead +DISPLAY_ID=0 + +FRAME_ID_SOURCE=-1 +FRAME_ID_TARGET=-1 +#FRAME_ID_SOURCE=6540 +#FRAME_ID_TARGET=6540 + +# training +# --input_noise_augmentation +echo "---------" + +CUDA_VISIBLE_DEVICES=0 python \ +inference_renderer.py \ +--num_threads $NUM_THREADS \ +--input_nc $INPUT_NC \ +--look_ahead \ +--seq_len $SEQ_LEN \ +--no_augmentation \ +--name $NAME \ +--checkpoints_dir $CHECKPOINT_DIR \ +--erosionFactor $EROSION \ +--tex_dim $TEX_DIM \ +--tex_features $TEX_FEATURES \ +--rendererType $RENDERER_TYPE \ +--lossType $LOSS \ +--model $MODEL \ +--netG unet_256 \ +--dataset_mode $DATASET_MODE \ +--norm instance \ +--gpu_ids $GPUID \ +--batch_size $BATCH_SIZE \ +--epoch latest \ +--textureModel $TEXTUREMODEL \ +--dataroot $DATAROOT \ +--target_dataroot $TARGET_DATAROOT \ +--source_dataroot $SOURCE_DATAROOT \ +--expr_path $EXPR_PATH \ +--images_target_dir $IMAGES_TARGET_DIR \ +--frame_id_source $FRAME_ID_SOURCE \ +--frame_id_target $FRAME_ID_TARGET \ + +################################################################################ +################################################################################ +################################################################################ diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/inference_renderer_deca_details.sh b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/inference_renderer_deca_details.sh new file mode 100644 index 0000000..f2cffde --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/inference_renderer_deca_details.sh @@ -0,0 +1,113 @@ +set -ex + +DP=/home/alberto/NeuralVoicePuppetry/datasets/SRF_anchor_short +DPS=/home/alberto/NeuralVoicePuppetry/datasets/External +DT=/home/alberto/NeuralVoicePuppetry/datasets/TRANSFERS/ +TRANSFER_PATH=$DT/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead +CHECKPOINT_DIR=/home/alberto/NeuralVoicePuppetry/NeuralRenderingNetwork/checkpoints + +#FILE_ID_SOURCE=Halbtotale_355_9414 +#FILE_ID_SOURCE=Italian_vicky_audio + +FILE_ID_SOURCE_LIST=(Alberto_videos_uno Alberto_videos_due Alberto_videos_tre) +#FILE_ID_SOURCE_LIST=(Clara_audios_two) +#FILE_ID_TARGET=Halbtotale_355_9415 +FILE_ID_TARGET=Youtube_Russian_guy + +# . train_audio2expressionsAttentionTMP.sh & +#EROSION=0.2 +# Name for standard model +NAME=DynamicNeuralTextures-UNET_6_level-DynamicNeuralTextureExpression-SL8-BS8-Halbtotale_355_9415.h5-custom_aligned-VGG-20210302-144030-look_ahead +# Name for deca details model +NAME=DynamicNeuralTextures-UNET_6_level-DynamicNeuralTextureExpression-SL8-BS8-Halbtotale_355_9415.h5-custom_aligned-VGG-20210308-004445-look_ahead +# Name for deca details masked model +NAME=DynamicNeuralTextures-UNET_6_level-DynamicNeuralTextureExpression-SL8-BS8-Halbtotale_355_9415.h5-custom_aligned-VGG-20210309-184531-look_ahead_masked +# Name for deca details of masked mouth SRF moderator +#NAME=DynamicNeuralTextures-UNET_6_level-DynamicNeuralTextureExpression-SL8-BS8-Halbtotale_355_9415.h5-custom_aligned-VGG-20210311-095742-look_ahead_mask_mouth +# Name for deca details of masked mouth Russian guy +#NAME=DynamicNeuralTextures-UNET_6_level-DynamicNeuralTextureExpression-SL8-BS8-Youtube_Russian_guy.h5-custom_aligned-VGG-20210315-231511-look_ahead_mask_mouth + +EROSION=0.6 + +GPUID=0 +DATASET_MODE=custom_aligned + +for FILE_ID_SOURCE in "${FILE_ID_SOURCE_LIST[@]}" +do + DATAROOT=$DPS/$FILE_ID_TARGET/ + TARGET_DATAROOT=$DPS/$FILE_ID_TARGET/$FILE_ID_TARGET.h5 + #SOURCE_DATAROOT=$DP/$FILE_ID_SOURCE/$FILE_ID_SOURCE.h5 + SOURCE_DATAROOT=$DPS/$FILE_ID_SOURCE/$FILE_ID_SOURCE.h5 + EXPR_PATH=$TRANSFER_PATH/$FILE_ID_SOURCE--$FILE_ID_TARGET/expression.txt + + IMAGES_TARGET_DIR=/home/alberto/NeuralVoicePuppetry/results/inference/$NAME/$FILE_ID_SOURCE"_to_"$FILE_ID_TARGET + + # neural texture, not used here + TEX_DIM=128 + TEX_FEATURES=16 + TEXTUREMODEL=DynamicNeuralTextureExpression + INPUT_NC=5 + NUM_THREADS=1 + + LOSS=VGG + + # models + MODEL=DynamicNeuralTextures + RENDERER_TYPE=UNET_6_level # There are many more of these TODO + + BATCH_SIZE=1 + SEQ_LEN=8 + + ################################################################################ + ################################################################################ + ################################################################################ + + + #DISPLAY_NAME=DynamicNeuralTextures-UNET_6_level-SL8-BS16-201125_TV-20190114-2245-5701_24fps_cropped_short.h5-custom_aligned-RMS-20201123-185215-look_ahead + DISPLAY_ID=0 + + # Used as start and end + FRAME_ID_SOURCE=-1 + FRAME_ID_TARGET=-1 + + + # training + # --input_noise_augmentation + echo "---------" + + CUDA_VISIBLE_DEVICES=0 python \ + inference_renderer.py \ + --num_threads $NUM_THREADS \ + --input_nc $INPUT_NC \ + --look_ahead \ + --seq_len $SEQ_LEN \ + --no_augmentation \ + --name $NAME \ + --checkpoints_dir $CHECKPOINT_DIR \ + --erosionFactor $EROSION \ + --tex_dim $TEX_DIM \ + --tex_features $TEX_FEATURES \ + --rendererType $RENDERER_TYPE \ + --lossType $LOSS \ + --model $MODEL \ + --netG unet_256 \ + --dataset_mode $DATASET_MODE \ + --norm instance \ + --gpu_ids $GPUID \ + --batch_size $BATCH_SIZE \ + --epoch latest \ + --textureModel $TEXTUREMODEL \ + --dataroot $DATAROOT \ + --target_dataroot $TARGET_DATAROOT \ + --source_dataroot $SOURCE_DATAROOT \ + --expr_path $EXPR_PATH \ + --images_target_dir $IMAGES_TARGET_DIR \ + --frame_id_source $FRAME_ID_SOURCE \ + --frame_id_target $FRAME_ID_TARGET \ + --deca_details + +done + +################################################################################ +################################################################################ +################################################################################ diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/DynamicNeuralTextures_model.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/DynamicNeuralTextures_model.py new file mode 100644 index 0000000..8cf232b --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/DynamicNeuralTextures_model.py @@ -0,0 +1,328 @@ +from util.image_pool import ImagePool +import torch +################ +### HELPER ### +################ +# from BaselModel.basel_model import * +from . import NeuralTexture +from . import UNET +from . import VGG_LOSS +from . import networks +from .base_model import BaseModel +import matplotlib.pyplot as plt +import numpy as np + +INVALID_UV = 0 + + +def define_Texture(opt, gpu_ids=[]): + net = None + + if opt.textureModel == 'DynamicNeuralTextureAudio': + net = NeuralTexture.DynamicNeuralTextureAudio(texture_dimensions=opt.tex_dim, texture_features_intermediate=opt.tex_features_intermediate, texture_features=opt.tex_features) + elif opt.textureModel == 'DynamicNeuralTextureExpression': + net = NeuralTexture.DynamicNeuralTextureExpression(texture_dimensions=opt.tex_dim, texture_features_intermediate=opt.tex_features_intermediate, texture_features=opt.tex_features) + elif opt.textureModel == 'StaticNeuralTexture': + net = NeuralTexture.StaticNeuralTexture(texture_dimensions=opt.tex_dim, texture_features=opt.tex_features) + + return networks.init_net(net, opt.init_type, opt.init_gain, gpu_ids) + + +def define_TextureDecoder(renderer, n_feature, ngf, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + norm_layer = networks.get_norm_layer(norm_type=norm) + N_OUT = 3 + #renderer=='UNET_5_level' + net = UNET.UnetRenderer(renderer, n_feature, N_OUT, ngf, norm_layer=norm_layer, use_dropout=use_dropout) + + return networks.init_net(net, init_type, init_gain, gpu_ids) + + +def define_Inpainter(renderer, n_feature, ngf, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + norm_layer = networks.get_norm_layer(norm_type=norm) + N_OUT = 3 + #renderer=='UNET_5_level' + net = UNET.UnetRenderer(renderer, n_feature, N_OUT, ngf, norm_layer=norm_layer, use_dropout=use_dropout) + + return networks.init_net(net, init_type, init_gain, gpu_ids) + + + +class DynamicNeuralTexturesModel(BaseModel): + def name(self): + return 'DynamicNeuralTexturesModel' + + @staticmethod + def modify_commandline_options(parser, is_train=True): + + # changing the default values to match the pix2pix paper + # (https://phillipi.github.io/pix2pix/) + #parser.set_defaults(norm='batch', netG='unet_256') + parser.set_defaults(norm='instance', netG='unet_256') + parser.set_defaults(dataset_mode='aligned') + if is_train: + parser.set_defaults(pool_size=0, no_lsgan=True) + parser.add_argument('--lambda_L1', type=float, default=100.0, help='weight for L1 loss') + + return parser + + def initialize(self, opt): + BaseModel.initialize(self, opt) + self.isTrain = opt.isTrain + + self.trainRenderer = not opt.fix_renderer + + # specify the training losses you want to print out. The program will call base_model.get_current_losses + self.loss_names = ['G_total', 'G_L1_Rendering', 'G_VGG_Rendering', 'G_GAN'] + + # specify the images you want to save/display. The program will call base_model.get_current_visuals + if self.isTrain: + self.visual_names = ['input_uv', 'features', 'fake', 'target'] + else: + self.visual_names = ['input_uv', 'fake', 'target', 'source'] + + # specify the models you want to save to the disk. The program will call base_model.save_networks and base_model.load_networks + if self.isTrain: + self.model_names = ['texture', 'texture_decoder', 'inpainter' ,'netD'] + else: # during test time, only load Gs + self.model_names = ['texture', 'texture_decoder', 'inpainter'] + + # load/define networks + self.texture = define_Texture(opt, self.gpu_ids) + if self.opt.deca_details: + self.texture_decoder = define_TextureDecoder(opt.rendererType, opt.tex_features + 6, opt.ngf, opt.norm, not opt.no_dropout, opt.init_type, opt.init_gain, self.gpu_ids) + else: + self.texture_decoder = define_TextureDecoder(opt.rendererType, opt.tex_features + 3, opt.ngf, opt.norm, + not opt.no_dropout, opt.init_type, opt.init_gain, self.gpu_ids) + self.inpainter = define_Inpainter(opt.rendererType, 6, opt.ngf, opt.norm, not opt.no_dropout, opt.init_type, opt.init_gain, self.gpu_ids) + + + # optimizer + self.loss_G_GAN = 0.0 + + if self.isTrain: + use_sigmoid = opt.no_lsgan + print(opt.input_nc) + print(opt.output_nc) + print(opt.input_nc + opt.output_nc) + self.netD = networks.define_D(opt.input_nc + opt.output_nc, opt.ndf, opt.netD, opt.n_layers_D, opt.norm, use_sigmoid, opt.init_type, opt.init_gain, self.gpu_ids) + self.fake_AB_pool = ImagePool(opt.pool_size) + # define loss functions + self.criterionGAN = networks.GANLoss(use_lsgan=not opt.no_lsgan).to(self.device) + self.criterionL1 = torch.nn.L1Loss(reduction='mean') + self.criterionL1Smooth = torch.nn.SmoothL1Loss(reduction='mean') + self.criterionL2 = torch.nn.MSELoss(reduction='mean') + + if self.opt.lossType == 'VGG': + self.vggloss = VGG_LOSS.VGGLOSS().to(self.device) + + # initialize optimizers + self.optimizers = [] + + self.optimizer_texture_decoder = torch.optim.Adam(self.texture_decoder.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999)) + self.optimizers.append(self.optimizer_texture_decoder) + + self.optimizer_inpainter = torch.optim.Adam(self.inpainter.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999)) + self.optimizers.append(self.optimizer_inpainter) + + self.optimizer_D = torch.optim.Adam(self.netD.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999)) + self.optimizers.append(self.optimizer_D) + + self.optimizer_T = torch.optim.Adam(self.texture.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999)) + self.optimizers.append(self.optimizer_T) + + + def maskErosion(self, mask, erosionFactor): + offsetY = int(erosionFactor * 40) + # throat + mask2 = mask[:,:,0:-offsetY,:] + mask2 = torch.cat([torch.ones_like(mask[:,:,0:offsetY,:]), mask2], 2) + # forehead + offsetY = int(erosionFactor * 8) #<<<< + mask3 = mask[:,:,offsetY:,:] + mask3 = torch.cat([mask3, torch.ones_like(mask[:,:,0:offsetY,:])], 2) + mask = mask * mask2 * mask3 + + offsetX = int(erosionFactor * 15) + # left + mask4 = mask[:,:,:,0:-offsetX] + mask4 = torch.cat([torch.ones_like(mask[:,:,:,0:offsetX]), mask4], 3) + # right + mask5 = mask[:,:,:,offsetX:] + mask5 = torch.cat([mask5,torch.ones_like(mask[:,:,:,0:offsetX])], 3) + return mask * mask4 * mask5 + + def set_input(self, input): + self.target = input['TARGET'].to(self.device) + self.input_uv = input['UV'].to(self.device) + # self.intrinsics = input['intrinsics'] + # self.extrinsics = input['extrinsics'] + self.expressions = input['expressions'].cuda() + if self.opt.deca_details : + self.deca_details = input['deca_details'].cuda() + + # self.image_paths = input['paths'] + self.audio_features = input['audio_deepspeech'].cuda() + + if "SOURCE" in input: + self.source = input["SOURCE"].to(self.device) + if "REFERENCE_POSE" in input: + self.reference_pose = input["REFERENCE_POSE"].to(self.device) + + ## in training phase introduce some noise + #if self.isTrain: + # if self.opt.input_noise_augmentation: + # audio_noise = torch.randn_like(self.audio_features)*0.05 # check magnitude of noise + # self.audio_features = self.audio_features + audio_noise + + + def forward(self, alpha=1.0): + # background + mask = (self.input_uv[:,0:1,:,:] == INVALID_UV) & (self.input_uv[:,1:2,:,:] == INVALID_UV) + mask = self.maskErosion(mask, self.opt.erosionFactor) + mask = torch.cat([mask,mask,mask], 1) + self.background = torch.where(mask, self.target, torch.zeros_like(self.target)) + + if 0: + fig = plt.figure() + fig.add_subplot(241) + plt.imshow(self.input_uv[0,0:1,:,:].permute(1, 2, 0).cpu().numpy()) + fig.add_subplot(242) + plt.imshow(self.input_uv[0, 1:2, :, :].permute(1, 2, 0).cpu().numpy()) + fig.add_subplot(243) + plt.imshow(mask[0].permute(1, 2, 0).cpu().numpy().astype(np.float32)) + fig.add_subplot(244) + plt.imshow((((self.background[0].permute(1, 2, 0).cpu().numpy() + 1.) / 2.) * 255.).astype(np.uint8)) + fig.add_subplot(245) + plt.imshow((((self.target[0].permute(1, 2, 0).cpu().numpy() + 1.) / 2.) * 255.).astype(np.uint8)) + fig.add_subplot(246) + plt.imshow(torch.zeros_like(self.target)[0].permute(1, 2, 0).cpu().numpy()) + plt.show() + + # loop over batch elements + batch_size = self.target.shape[0] + self.features = [] + self.intermediate_fake = [] + self.fake = [] + for b in range(0,batch_size): + feat = self.texture(self.expressions[b:b+1], self.audio_features[b:b+1], self.input_uv[b:b+1]) + + if self.opt.deca_details: + feat = torch.cat((feat, self.deca_details[b:b+1]), dim=1) + + + self.features.append(feat) + + + intermediate_fake = self.texture_decoder(self.expressions[b:b+1], self.audio_features[b:b+1], feat, self.background[b:b+1]) + self.intermediate_fake.append(intermediate_fake) + + fake = self.inpainter(self.expressions[b:b+1], self.audio_features[b:b+1], intermediate_fake, self.background[b:b+1]) + self.fake.append(fake) + + + self.features = torch.cat(self.features, dim=0) + self.intermediate_fake = torch.cat(self.intermediate_fake, dim=0) + self.fake = torch.cat(self.fake, dim=0) + + self.fake = torch.where(mask, self.background, self.fake) + + + def backward_D(self): + mask = ( (self.input_uv[:,0:1,:,:] != INVALID_UV) | (self.input_uv[:,1:2,:,:] != INVALID_UV) ) + mask = torch.cat([mask,mask,mask], 1) + + def masked(img): + return torch.where(mask, img, torch.zeros_like(img)) + # Fake + # stop backprop to the generator by detaching fake_B + if self.opt.deca_details: + fake_AB = self.fake_AB_pool.query(torch.cat((self.input_uv, self.deca_details, masked(self.fake)), 1)) + + else: + fake_AB = self.fake_AB_pool.query(torch.cat((self.input_uv, masked(self.fake)), 1)) + + pred_fake = self.netD(fake_AB.detach()) + self.loss_D_fake = self.criterionGAN(pred_fake, False) + + + # Real + if self.opt.deca_details: + real_AB = torch.cat((self.input_uv, self.deca_details, masked(self.target)), 1) + + else: + real_AB = torch.cat((self.input_uv, masked(self.target)), 1) + + pred_real = self.netD(real_AB) + self.loss_D_real = self.criterionGAN(pred_real, True) + + + # Combined loss + self.loss_D = (self.loss_D_fake + self.loss_D_real) * 0.5 + + self.loss_D.backward() + + def backward_G(self, epoch): + + mask = ( (self.input_uv[:,0:1,:,:] != INVALID_UV) | (self.input_uv[:,1:2,:,:] != INVALID_UV) ) + sum_mask = torch.sum(mask) + d = mask.shape[1] + mask_weight = (d*d) / sum_mask + mask = torch.cat([mask,mask,mask], 1) + def masked(img): + return torch.where(mask, img, torch.zeros_like(img)) + + # First, G(A) should fake the discriminator + if self.opt.deca_details: + fake_AB = torch.cat((self.input_uv, self.deca_details, masked(self.fake)), 1) + + else: + fake_AB = torch.cat((self.input_uv, masked(self.fake)), 1) + + pred_fake = self.netD(fake_AB) + self.loss_G_GAN = self.criterionGAN(pred_fake, True) * 0.0 # disabled GAN + + + # Second, G(A) = B + self.loss_G_L1_Rendering = 0.0 + self.loss_G_L1_Rendering = 1.0 * self.criterionL1(masked(self.features[:,0:3,:,:]), masked(self.target) ) * mask_weight + self.loss_G_L1_Rendering += 5.0 * self.criterionL1(masked(self.intermediate_fake), masked(self.target) ) * mask_weight + self.loss_G_L1_Rendering += 10.0 * self.criterionL1(self.fake, self.target) + + self.loss_G_VGG_Rendering = 0.0 + if self.opt.lossType == 'VGG': + self.loss_G_VGG_Rendering += 10.0 * self.vggloss(self.fake, self.target) + + self.loss_G_total = self.loss_G_L1_Rendering + self.loss_G_VGG_Rendering + self.loss_G_GAN + + self.loss_G_total.backward() + + def optimize_parameters(self, epoch_iter): + alpha = (epoch_iter-5) / 50.0 + if alpha < 0.0: alpha = 0.0 + if alpha > 1.0: alpha = 1.0 + self.forward(alpha) + + + updateDiscriminator = self.loss_G_GAN < 1.0#0.1 + + # update Discriminator + if updateDiscriminator: + self.set_requires_grad(self.netD, True) + self.optimizer_D.zero_grad() + self.backward_D() + self.optimizer_D.step() + + # update Generator + self.set_requires_grad(self.netD, False) + self.optimizer_texture_decoder.zero_grad() + self.optimizer_inpainter.zero_grad() + self.optimizer_T.zero_grad() + + self.backward_G(epoch_iter) + + self.optimizer_texture_decoder.step() + self.optimizer_inpainter.step() + self.optimizer_T.step() + diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/NeuralTexture.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/NeuralTexture.py new file mode 100644 index 0000000..0f0b56a --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/NeuralTexture.py @@ -0,0 +1,134 @@ +import os +import torch +import torch.nn as nn +import torchvision.transforms as transforms +from util.image_pool import ImagePool +from .base_model import BaseModel +from . import networks +import numpy as np +import functools +from PIL import Image +from util import util +from torchvision import models +from collections import namedtuple + +################ +### HELPER ### +################ +# from BaselModel.basel_model import * +INVALID_UV = -1.0 + + + +##################################### +######## static texture ######### +##################################### +class StaticNeuralTexture(nn.Module): + def __init__(self, texture_dimensions, texture_features): + super(StaticNeuralTexture, self).__init__() + self.texture_dimensions = texture_dimensions #256 #texture dimensions + self.out_ch = texture_features # output feature, after evaluating the texture + + + self.register_parameter('data', torch.nn.Parameter(torch.randn(1, self.out_ch, self.texture_dimensions, self.texture_dimensions, requires_grad=True))) + #### + + def forward(self, expressions, audio_features, uv_inputs): + b = audio_features.shape[0] # batchsize + if b != 1: + print('ERROR: NeuralTexture forward only implemented for batchsize==1') + exit(-1) + uvs = torch.stack([uv_inputs[:,0,:,:], uv_inputs[:,1,:,:]], 3) + return torch.nn.functional.grid_sample(self.data, uvs, mode='bilinear', padding_mode='border') + + +##################################### +######## audio texture ########## +##################################### +class DynamicNeuralTextureAudio(nn.Module): + def __init__(self, texture_dimensions, texture_features_intermediate, texture_features): + super(DynamicNeuralTextureAudio, self).__init__() + self.texture_features_intermediate = texture_features_intermediate #16 #features stored in texture + self.texture_dimensions = texture_dimensions #256 #texture dimensions + self.out_ch = texture_features # output feature, after evaluating the texture + + # input 16 x 29 + self.convNet = nn.Sequential( + nn.Conv2d(29, 32, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 29 x 16 x 1 => 32 x 8 x 1 + nn.LeakyReLU(0.02, True), + nn.Conv2d(32, 32, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 32 x 8 x 1 => 32 x 4 x 1 + nn.LeakyReLU(0.02, True), + nn.Conv2d(32, 64, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 32 x 4 x 1 => 64 x 2 x 1 + nn.LeakyReLU(0.2, True), + nn.Conv2d(64, 64, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 64 x 2 x 1 => 64 x 1 x 1 + nn.LeakyReLU(0.2, True), + ) + conv_output_size = 64 + self.fullNet = nn.Sequential( + nn.Linear(in_features = conv_output_size, out_features=128, bias = True), + nn.LeakyReLU(0.02), + nn.Linear(in_features = 128, out_features=64, bias = True), + nn.LeakyReLU(0.02), + nn.Linear(in_features = 64, out_features=self.out_ch*4*4*self.texture_features_intermediate, bias = True), + nn.Tanh() + ) + + self.register_parameter('data', torch.nn.Parameter(torch.randn(1, self.texture_features_intermediate, self.texture_dimensions, self.texture_dimensions, requires_grad=True))) + #### + + def forward(self, expressions, audio_features, uv_inputs): + b = audio_features.shape[0] # batchsize + if b != 1: + print('ERROR: NeuralTexture forward only implemented for batchsize==1') + exit(-1) + # b x 1 x 16 x 29 --> transpose + audio_features = torch.transpose(audio_features, 1, 3) + audio_conv_res = self.convNet( audio_features ) + conv_filter = torch.reshape( self.fullNet( torch.reshape( audio_conv_res, (b,1,-1))), (self.out_ch,self.texture_features_intermediate,4,4)) + self.tex_eval = nn.functional.conv2d(self.data, conv_filter, stride=1, padding=2) + uvs = torch.stack([uv_inputs[:,0,:,:], uv_inputs[:,1,:,:]], 3) + + return torch.nn.functional.grid_sample(self.tex_eval, uvs, mode='bilinear', padding_mode='border') + + +##################################### +###### expression texture ####### +##################################### + +class DynamicNeuralTextureExpression(nn.Module): + def __init__(self, texture_dimensions, texture_features_intermediate, texture_features): + super(DynamicNeuralTextureExpression, self).__init__() + self.texture_features_intermediate = texture_features_intermediate #16 #features stored in texture + self.texture_dimensions = texture_dimensions #256 #texture dimensions + self.out_ch = texture_features # output feature, after evaluating the texture + + # input: 76 + input_size = 53 + self.fullNet = nn.Sequential( + nn.Linear(in_features = input_size, out_features=128, bias = True), + nn.LeakyReLU(0.02), + nn.Linear(in_features = 128, out_features=64, bias = True), + nn.LeakyReLU(0.02), + nn.Linear(in_features = 64, out_features=self.out_ch*4*4*self.texture_features_intermediate, bias = True), + nn.Tanh() + ).cuda() + + self.register_parameter('data', torch.nn.Parameter(torch.randn(1, self.texture_features_intermediate, self.texture_dimensions, self.texture_dimensions, requires_grad=True))) + #### + + def forward(self, expressions, audio_features, uv_inputs): + b = expressions.shape[0] # batchsize + if b != 1: + print('ERROR: NeuralTexture forward only implemented for batchsize==1') + exit(-1) + + expressions = expressions.type(torch.FloatTensor) + + tmp = self.fullNet(torch.reshape( expressions, (1,1,-1)).cuda()) + + conv_filter = torch.reshape(tmp, (self.out_ch,self.texture_features_intermediate,4,4)) + self.tex_eval = nn.functional.conv2d(self.data, conv_filter, stride=1, padding=2) + uvs = torch.stack([uv_inputs[:,0,:,:], uv_inputs[:,1,:,:]], 3) + + return torch.nn.functional.grid_sample(self.tex_eval, uvs, mode='bilinear', padding_mode='border') + diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/UNET.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/UNET.py new file mode 100644 index 0000000..0b0724d --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/UNET.py @@ -0,0 +1,316 @@ +import os +import torch +import torch.nn as nn +import torchvision.transforms as transforms +from util.image_pool import ImagePool +from .base_model import BaseModel +from . import networks +import numpy as np +import functools +from PIL import Image +from util import util +from torchvision import models +from collections import namedtuple + + +class UnetSkipConnectionBlock(nn.Module): + def __init__(self, outer_nc, inner_nc, input_nc=None, submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(UnetSkipConnectionBlock, self).__init__() + self.outermost = outermost + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + if input_nc is None: + input_nc = outer_nc + + #use_norm = False + use_norm = True + + downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) + downrelu = nn.LeakyReLU(0.2, True) + + if use_norm: downnorm = norm_layer(inner_nc) + uprelu = nn.ReLU(True) + if use_norm: upnorm = norm_layer(outer_nc) + + if outermost: + upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, kernel_size=4, stride=2, padding=1) + down = [downconv] + up = [uprelu, upconv, nn.Tanh()] + model = down + [submodule] + up + elif innermost: + upconv = nn.ConvTranspose2d(inner_nc, outer_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) + down = [downrelu, downconv] + if use_norm: up = [uprelu, upconv, upnorm] + else: up = [uprelu, upconv] + model = down + up + else: + upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) + if use_norm: down = [downrelu, downconv, downnorm] + down = [downrelu, downconv] + if use_norm: up = [uprelu, upconv, upnorm] + else: up = [uprelu, upconv] + + if use_dropout: + model = down + [submodule] + up + [nn.Dropout(0.5)] + else: + model = down + [submodule] + up + + self.model = nn.Sequential(*model) + + def forward(self, x): + if self.outermost: + return self.model(x) + else: + return torch.cat([x, self.model(x)], 1) + + +# with bilinear upsampling +class UnetSkipConnectionBlock_BU(nn.Module): + def __init__(self, outer_nc, inner_nc, input_nc=None, submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(UnetSkipConnectionBlock_BU, self).__init__() + self.outermost = outermost + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + if input_nc is None: + input_nc = outer_nc + + #use_norm = False + use_norm = True + + downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) + downrelu = nn.LeakyReLU(0.2, True) + + if use_norm: downnorm = norm_layer(inner_nc) + uprelu = nn.ReLU(True) + if use_norm: upnorm = norm_layer(outer_nc) + + if outermost: + #upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, kernel_size=4, stride=2, padding=1) + upconv = nn.Sequential( + nn.Upsample(scale_factor=2, mode="bilinear", align_corners=False), + nn.Conv2d(inner_nc * 2, outer_nc, kernel_size=3, stride=1, padding=1, bias=use_bias), + nn.LeakyReLU(0.2, True) + ) + + down = [downconv] + up = [uprelu, upconv, nn.Tanh()] + model = down + [submodule] + up + elif innermost: + #upconv = nn.ConvTranspose2d(inner_nc, outer_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) + upconv = nn.Sequential( + nn.Upsample(scale_factor=2, mode="bilinear", align_corners=False), + nn.Conv2d(inner_nc, outer_nc, kernel_size=3, stride=1, padding=1, bias=use_bias), + nn.LeakyReLU(0.2, True) + ) + + down = [downrelu, downconv] + if use_norm: up = [uprelu, upconv, upnorm] + else: up = [uprelu, upconv] + model = down + up + else: + #upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) + upconv = nn.Sequential( + nn.Upsample(scale_factor=2, mode="bilinear", align_corners=False), + nn.Conv2d(inner_nc * 2, outer_nc, kernel_size=3, stride=1, padding=1, bias=use_bias), + nn.LeakyReLU(0.2, True) + ) + + if use_norm: down = [downrelu, downconv, downnorm] + down = [downrelu, downconv] + if use_norm: up = [uprelu, upconv, upnorm] + else: up = [uprelu, upconv] + + if use_dropout: + model = down + [submodule] + up + [nn.Dropout(0.5)] + else: + model = down + [submodule] + up + + self.model = nn.Sequential(*model) + + def forward(self, x): + if self.outermost: + return self.model(x) + else: + return torch.cat([x, self.model(x)], 1) + + +# dilated convs, without downsampling +class UnetSkipConnectionBlock_DC(nn.Module): + def __init__(self, outer_nc, inner_nc, input_nc=None, submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False, dilation=1): + super(UnetSkipConnectionBlock_DC, self).__init__() + self.outermost = outermost + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + if input_nc is None: + input_nc = outer_nc + + #use_norm = False + use_norm = True + + #downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4, stride=2, dilation=dilation, padding=1, bias=use_bias) + downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=3, stride=1, dilation=dilation, padding=1*dilation, bias=use_bias) + downrelu = nn.LeakyReLU(0.2, True) + + if use_norm: downnorm = norm_layer(inner_nc) + uprelu = nn.ReLU(True) + if use_norm: upnorm = norm_layer(outer_nc) + + if outermost: + #upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, kernel_size=4, stride=2, padding=1) + upconv = nn.Sequential( + nn.Conv2d(inner_nc * 2, outer_nc, kernel_size=3, stride=1, dilation=1, padding=1, bias=use_bias), + nn.LeakyReLU(0.2, True) + ) + + down = [downconv] + up = [uprelu, upconv, nn.Tanh()] + model = down + [submodule] + up + elif innermost: + #upconv = nn.ConvTranspose2d(inner_nc, outer_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) + upconv = nn.Sequential( + nn.Conv2d(inner_nc, outer_nc, kernel_size=3, stride=1, dilation=1, padding=1, bias=use_bias), + nn.LeakyReLU(0.2, True) + ) + + down = [downrelu, downconv] + if use_norm: up = [uprelu, upconv, upnorm] + else: up = [uprelu, upconv] + model = down + up + else: + #upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, kernel_size=4, stride=2, padding=1, bias=use_bias) + upconv = nn.Sequential( + nn.Conv2d(inner_nc * 2, outer_nc, kernel_size=3, stride=1, dilation=1, padding=1, bias=use_bias), + nn.LeakyReLU(0.2, True) + ) + + if use_norm: down = [downrelu, downconv, downnorm] + down = [downrelu, downconv] + if use_norm: up = [uprelu, upconv, upnorm] + else: up = [uprelu, upconv] + + if use_dropout: + model = down + [submodule] + up + [nn.Dropout(0.5)] + else: + model = down + [submodule] + up + + self.model = nn.Sequential(*model) + + def forward(self, x): + if self.outermost: + return self.model(x) + else: + return torch.cat([x, self.model(x)], 1) + + +class UnetRenderer(nn.Module): + def __init__(self, renderer, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(UnetRenderer, self).__init__() + # construct unet structure + if renderer=='UNET_8_level_BU': + print('>>>> UNET_8_level_BU <<<<') + num_downs = 8 + unet_block = UnetSkipConnectionBlock_BU(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) + for i in range(num_downs - 5): + unet_block = UnetSkipConnectionBlock_BU(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout) + unet_block = UnetSkipConnectionBlock_BU(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock_BU(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock_BU(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock_BU(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) + elif renderer=='UNET_6_level_BU': + print('>>>> UNET_6_level_BU <<<<') + num_downs = 6 + unet_block = UnetSkipConnectionBlock_BU(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) + for i in range(num_downs - 5): + unet_block = UnetSkipConnectionBlock_BU(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout) + unet_block = UnetSkipConnectionBlock_BU(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock_BU(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock_BU(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock_BU(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) + elif renderer=='UNET_5_level_BU': + print('>>>> UNET_5_level_BU <<<<') + num_downs = 5 + unet_block = UnetSkipConnectionBlock_BU(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) + for i in range(num_downs - 5): + unet_block = UnetSkipConnectionBlock_BU(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout) + unet_block = UnetSkipConnectionBlock_BU(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock_BU(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock_BU(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock_BU(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) + elif renderer=='UNET_3_level_BU': + print('>>>> UNET_3_level_BU <<<<') + unet_block = UnetSkipConnectionBlock_BU(ngf * 2, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) + unet_block = UnetSkipConnectionBlock_BU(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock_BU(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) + + elif renderer=='UNET_8_level': + print('>>>> UNET_8_level <<<<') + num_downs = 8 + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) + for i in range(num_downs - 5): + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout) + unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) + elif renderer=='UNET_6_level': + print('>>>> UNET_6_level <<<<') + num_downs = 6 + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) + for i in range(num_downs - 5): + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout) + unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) + elif renderer=='UNET_5_level': + print('>>>> UNET_5_level <<<<') + num_downs = 5 + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) + for i in range(num_downs - 5): + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout) + unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) + elif renderer=='UNET_3_level': + print('>>>> UNET_3_level <<<<') + unet_block = UnetSkipConnectionBlock(ngf * 2, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) + unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) + + elif renderer=='UNET_5_level_DC': + print('>>>> UNET_5_level_DC <<<<') + num_downs = 5 + dilation = 1 + unet_block = UnetSkipConnectionBlock_DC(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True, dilation=dilation) + for i in range(num_downs - 5): + dilation *= 2 + unet_block = UnetSkipConnectionBlock_DC(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout, dilation=dilation) + dilation *= 2 + unet_block = UnetSkipConnectionBlock_DC(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, dilation=dilation) + dilation *= 2 + unet_block = UnetSkipConnectionBlock_DC(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer, dilation=dilation) + dilation *= 2 + unet_block = UnetSkipConnectionBlock_DC(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer, dilation=dilation) + dilation *= 2 + unet_block = UnetSkipConnectionBlock_DC(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer, dilation=dilation) + elif renderer=='UNET_3_level_DC': + print('>>>> UNET_3_level_DC <<<<') + dilation = 1 + unet_block = UnetSkipConnectionBlock_DC(ngf * 2, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True, dilation=dilation) + dilation *= 2 + unet_block = UnetSkipConnectionBlock_DC(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer, dilation=dilation) + dilation *= 2 + unet_block = UnetSkipConnectionBlock_DC(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer, dilation=dilation) + + self.model = unet_block + + def forward(self, expressions, audio_features, features, background): + unet_input = torch.cat([features, background], 1) + return self.model(unet_input) \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/VGG_LOSS.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/VGG_LOSS.py new file mode 100644 index 0000000..f426c37 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/VGG_LOSS.py @@ -0,0 +1,79 @@ +import os +import torch +import torch.nn as nn +import torchvision.transforms as transforms +from util.image_pool import ImagePool +from .base_model import BaseModel +from . import networks +import numpy as np +import functools +from torchvision import models +from collections import namedtuple + +class VGG16(torch.nn.Module): + def __init__(self, requires_grad=False): + super(VGG16, self).__init__() + vgg_pretrained_features = models.vgg16(pretrained=True).features + self.slice1 = torch.nn.Sequential() + self.slice2 = torch.nn.Sequential() + self.slice3 = torch.nn.Sequential() + self.slice4 = torch.nn.Sequential() + for x in range(4): + self.slice1.add_module(str(x), vgg_pretrained_features[x]) + for x in range(4, 9): + self.slice2.add_module(str(x), vgg_pretrained_features[x]) + for x in range(9, 16): + self.slice3.add_module(str(x), vgg_pretrained_features[x]) + for x in range(16, 23): + self.slice4.add_module(str(x), vgg_pretrained_features[x]) + if not requires_grad: + for param in self.parameters(): + param.requires_grad = False + + def forward(self, X): + #normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225]) + #X=normalize(X) + X = 0.5 * (X + 1.0) # map to [0,1] + + h = self.slice1(X) + h_relu1_2 = h + h = self.slice2(h) + h_relu2_2 = h + h = self.slice3(h) + h_relu3_3 = h + h = self.slice4(h) + h_relu4_3 = h + vgg_outputs = namedtuple("VggOutputs", ['relu1_2', 'relu2_2', 'relu3_3', 'relu4_3']) + out = vgg_outputs(h_relu1_2, h_relu2_2, h_relu3_3, h_relu4_3) + return out + +def gram_matrix(y): + (b, ch, h, w) = y.size() + features = y.view(b, ch, w * h) + features_t = features.transpose(1, 2) + gram = features.bmm(features_t) / (ch * h * w) + return gram + + + +class VGGLOSS(torch.nn.Module): + def __init__(self): + super(VGGLOSS, self).__init__() + self.model = VGG16() + self.criterionL2 = torch.nn.MSELoss(reduction='mean') + + def forward(self, fake, target, content_weight = 1.0, style_weight = 1.0): + vgg_fake = self.model(fake) + vgg_target = self.model(target) + + content_loss = self.criterionL2(vgg_target.relu2_2, vgg_fake.relu2_2) + + # gram_matrix + gram_style = [gram_matrix(y) for y in vgg_target] + style_loss = 0.0 + for ft_y, gm_s in zip(vgg_fake, gram_style): + gm_y = gram_matrix(ft_y) + style_loss += self.criterionL2(gm_y, gm_s) + + total_loss = content_weight * content_loss + style_weight * style_loss + return total_loss \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/__init__.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/__init__.py new file mode 100644 index 0000000..4d92091 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/__init__.py @@ -0,0 +1,39 @@ +import importlib +from models.base_model import BaseModel + + +def find_model_using_name(model_name): + # Given the option --model [modelname], + # the file "models/modelname_model.py" + # will be imported. + model_filename = "models." + model_name + "_model" + modellib = importlib.import_module(model_filename) + + # In the file, the class called ModelNameModel() will + # be instantiated. It has to be a subclass of BaseModel, + # and it is case-insensitive. + model = None + target_model_name = model_name.replace('_', '') + 'model' + for name, cls in modellib.__dict__.items(): + if name.lower() == target_model_name.lower() \ + and issubclass(cls, BaseModel): + model = cls + + if model is None: + print("In %s.py, there should be a subclass of BaseModel with class name that matches %s in lowercase." % (model_filename, target_model_name)) + exit(0) + + return model + + +def get_option_setter(model_name): + model_class = find_model_using_name(model_name) + return model_class.modify_commandline_options + + +def create_model(opt): + model = find_model_using_name(opt.model) + instance = model() + instance.initialize(opt) + print("model [%s] was created" % (instance.name())) + return instance diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/base_model.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/base_model.py new file mode 100644 index 0000000..5d3f9fa --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/base_model.py @@ -0,0 +1,254 @@ +import os +import torch +from collections import OrderedDict +from . import networks +import numpy as np +from PIL import Image +import shutil + +def save_tensor_image(input_image, image_path): + if isinstance(input_image, torch.Tensor): + image_tensor = input_image.data + image_numpy = image_tensor[0].cpu().float().numpy() + if image_numpy.shape[0] == 1: + image_numpy = np.tile(image_numpy, (3, 1, 1)) + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 + else: + image_numpy = input_image + image_numpy = image_numpy.astype(np.uint8) + image_pil = Image.fromarray(image_numpy) + image_pil.save(image_path) + +class BaseModel(): + + # modify parser to add command line options, + # and also change the default values if needed + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def name(self): + return 'BaseModel' + + def initialize(self, opt): + self.opt = opt + self.gpu_ids = opt.gpu_ids + self.isTrain = opt.isTrain + self.device = torch.device('cuda:{}'.format(self.gpu_ids[0])) if self.gpu_ids else torch.device('cpu') + self.load_dir = os.path.join(opt.checkpoints_dir, opt.name) + self.save_dir = os.path.join(opt.checkpoints_dir, opt.name) + if opt.resize_or_crop != 'scale_width': + torch.backends.cudnn.benchmark = True + self.loss_names = [] + self.model_names = [] + self.visual_names = [] + self.image_paths = [] + + def set_input(self, input): + pass + + def forward(self): + pass + + # load and print networks; create schedulers + def setup(self, opt, parser=None): + if self.isTrain: + self.schedulers = [networks.get_scheduler(optimizer, opt) for optimizer in self.optimizers] + if not self.isTrain or opt.continue_train: + load_suffix = 'iter_%d' % opt.load_iter if opt.load_iter > 0 else opt.epoch + self.load_networks(load_suffix) + self.print_networks(opt.verbose) + + + + # load specific moudles + def loadModules(self, opt, model_name, module_names): + for name in module_names: + if isinstance(name, str): + load_dir = os.path.join(opt.checkpoints_dir, model_name) + load_filename = 'latest_%s.pth' % (name) + load_path = os.path.join(load_dir, load_filename) + net = getattr(self, name) + if isinstance(net, torch.Tensor): + print('loading the tensor from %s' % load_path) + net_loaded = torch.load(load_path, map_location=str(self.device)) + net.copy_(net_loaded) + else: + # if isinstance(net, torch.nn.DataParallel): + # net = net.module + print('loading the module from %s' % load_path) + # if you are using PyTorch newer than 0.4 (e.g., built from + # GitHub source), you can remove str() on self.device + state_dict = torch.load(load_path, map_location=str(self.device)) + if hasattr(state_dict, '_metadata'): + del state_dict._metadata + + # patch InstanceNorm checkpoints prior to 0.4 + for key in list(state_dict.keys()): # need to copy keys here because we mutate in loop + self.__patch_instance_norm_state_dict(state_dict, net, key.split('.')) + net.load_state_dict(state_dict) + + + + + # make models eval mode during test time + def eval(self): + for name in self.model_names: + if isinstance(name, str): + net = getattr(self, name) + net.eval() + + # used in test time, wrapping `forward` in no_grad() so we don't save + # intermediate steps for backprop + def test(self): + with torch.no_grad(): + self.forward() + + # get image paths + def get_image_paths(self): + return self.image_paths + + def optimize_parameters(self): + pass + + # update learning rate (called once every epoch) + def update_learning_rate(self): + for scheduler in self.schedulers: + scheduler.step() + lr = self.optimizers[0].param_groups[0]['lr'] + print('learning rate = %.7f' % lr) + + # return visualization images. train.py will display these images, and save the images to a html + def get_current_visuals(self): + visual_ret = OrderedDict() + for name in self.visual_names: + if isinstance(name, str): + visual_ret[name] = getattr(self, name) + return visual_ret + + # return traning losses/errors. train.py will print out these errors as debugging information + def get_current_losses(self): + errors_ret = OrderedDict() + for name in self.loss_names: + if isinstance(name, str): + # float(...) works for both scalar tensor and float number + errors_ret[name] = float(getattr(self, 'loss_' + name)) + return errors_ret + + # save models to the disk + def save_networks(self, epoch): + for name in self.model_names: + if isinstance(name, str): + save_filename = '%s_%s.pth' % (epoch, name) + save_path = os.path.join(self.save_dir, save_filename) + net = getattr(self, name) + torch.save(net.state_dict(), save_path) + + # if isinstance(net, torch.Tensor): + # #torch.save(net.state_dict(), save_path) + # torch.save(net, save_path) + # for i in range(0, list(net.size())[0]): + # save_tensor_image(net[i:i+1,0:3,:,:], save_path+str(i)+'.png') + # else: + # if len(self.gpu_ids) > 0 and torch.cuda.is_available(): + # #torch.save(net.module.cpu().state_dict(), save_path) # << original + # torch.save(net.cpu().state_dict(), save_path) + # net.cuda(self.gpu_ids[0]) + # else: + # torch.save(net.cpu().state_dict(), save_path) + + # clean checkpoints + def clean_checkpoints(self, total_epochs): + for name in self.model_names: + if isinstance(name, str): + for epoch in range(total_epochs): + delete_filename = '%s_%s.pth' % (epoch, name) + save_path = os.path.join(self.save_dir, delete_filename) + if os.path.isfile(save_path): + os.remove(save_path) + + shutil.rmtree(os.path.join(self.save_dir, 'web')) + + + + def __patch_instance_norm_state_dict(self, state_dict, module, keys, i=0): + key = keys[i] + if i + 1 == len(keys): # at the end, pointing to a parameter/buffer + if module.__class__.__name__.startswith('InstanceNorm') and \ + (key == 'running_mean' or key == 'running_var'): + if getattr(module, key) is None: + state_dict.pop('.'.join(keys)) + if module.__class__.__name__.startswith('InstanceNorm') and \ + (key == 'num_batches_tracked'): + state_dict.pop('.'.join(keys)) + else: + self.__patch_instance_norm_state_dict(state_dict, getattr(module, key), keys, i + 1) + + # load models from the disk + def load_networks(self, epoch): + for name in self.model_names: + if isinstance(name, str): + load_filename = '%s_%s.pth' % (epoch, name) + load_path = os.path.join(self.load_dir, load_filename) + net = getattr(self, name) + net_loaded = torch.load(load_path, map_location=str(self.device)) + net.load_state_dict(net_loaded) + + + # print('loading the tensor from %s' % load_path) + # net_loaded = torch.load(load_path, map_location=str(self.device)) + + # for i, k in zip(net.state_dict().keys(), net_loaded.keys()): + # print(i, k) + # + # net.state_dict()[i] = net_loaded[k] + + + # if isinstance(net, torch.Tensor): + # print('loading the tensor from %s' % load_path) + # net_loaded = torch.load(load_path, map_location=str(self.device)) + # net.copy_(net_loaded) + # + # else: + # # if isinstance(net, torch.nn.DataParallel): + # # net = net.module + # print('loading the module from %s' % load_path) + # # if you are using PyTorch newer than 0.4 (e.g., built from + # # GitHub source), you can remove str() on self.device + # state_dict = torch.load(load_path, map_location=str(self.device)) + # if hasattr(state_dict, '_metadata'): + # del state_dict._metadata + # + # # patch InstanceNorm checkpoints prior to 0.4 + # for key in list(state_dict.keys()): # need to copy keys here because we mutate in loop + # print(key) + # self.__patch_instance_norm_state_dict(state_dict, net, key.split('.')) + # + # net.load_state_dict(net_loaded.state_dict()) + + # print network information + def print_networks(self, verbose): + print('---------- Networks initialized -------------') + for name in self.model_names: + if isinstance(name, str): + net = getattr(self, name) + if isinstance(net, torch.Tensor): + num_params = net.numel() + print('[Tensor %s] Total number of parameters : %.3f M' % (name, num_params / 1e6)) + else: + num_params = 0 + for param in net.parameters(): + num_params += param.numel() + if verbose: + print(net) + print('[Network %s] Total number of parameters : %.3f M' % (name, num_params / 1e6)) + print('-----------------------------------------------') + + # set requies_grad=False to avoid computation + def set_requires_grad(self, nets, requires_grad=False): + if not isinstance(nets, list): + nets = [nets] + for net in nets: + if net is not None: + for param in net.parameters(): + param.requires_grad = requires_grad diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/networks.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/networks.py new file mode 100644 index 0000000..a8c6505 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/models/networks.py @@ -0,0 +1,383 @@ +import torch +import torch.nn as nn +from torch.nn import init +import functools +from torch.optim import lr_scheduler + +############################################################################### +# Helper Functions +############################################################################### + + +def get_norm_layer(norm_type='instance'): + if norm_type == 'batch': + norm_layer = functools.partial(nn.BatchNorm2d, affine=True) + elif norm_type == 'instance': + norm_layer = functools.partial(nn.InstanceNorm2d, affine=False, track_running_stats=False) + elif norm_type == 'none': + norm_layer = None + else: + raise NotImplementedError('normalization layer [%s] is not found' % norm_type) + return norm_layer + + +def get_scheduler(optimizer, opt): + if opt.lr_policy == 'lambda': + def lambda_rule(epoch): + lr_l = 1.0 - max(0, epoch + opt.epoch_count - opt.niter) / float(opt.niter_decay + 1) + return lr_l + scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule) + elif opt.lr_policy == 'step': + scheduler = lr_scheduler.StepLR(optimizer, step_size=opt.lr_decay_iters, gamma=0.1) + elif opt.lr_policy == 'plateau': + scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.2, threshold=0.01, patience=5) + elif opt.lr_policy == 'cosine': + scheduler = lr_scheduler.CosineAnnealingLR(optimizer, T_max=opt.niter, eta_min=0) + else: + return NotImplementedError('learning rate policy [%s] is not implemented', opt.lr_policy) + return scheduler + + +def init_weights(net, init_type='normal', gain=0.02): + def init_func(m): + classname = m.__class__.__name__ + if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1): + if init_type == 'normal': + init.normal_(m.weight.data, 0.0, gain) + elif init_type == 'xavier': + init.xavier_normal_(m.weight.data, gain=gain) + elif init_type == 'kaiming': + init.kaiming_normal_(m.weight.data, a=0, mode='fan_in') + elif init_type == 'orthogonal': + init.orthogonal_(m.weight.data, gain=gain) + else: + raise NotImplementedError('initialization method [%s] is not implemented' % init_type) + if hasattr(m, 'bias') and m.bias is not None: + init.constant_(m.bias.data, 0.0) + elif classname.find('BatchNorm2d') != -1: + init.normal_(m.weight.data, 1.0, gain) + init.constant_(m.bias.data, 0.0) + + print('initialize network with %s' % init_type) + net.apply(init_func) + + +def init_net(net, init_type='normal', init_gain=0.02, gpu_ids=[]): + if len(gpu_ids) > 0: + assert(torch.cuda.is_available()) + net.to(gpu_ids[0]) + #net = torch.nn.DataParallel(net, gpu_ids) + init_weights(net, init_type, gain=init_gain) + return net + + +def define_G(input_nc, output_nc, ngf, netG, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + norm_layer = get_norm_layer(norm_type=norm) + + if netG == 'resnet_9blocks': + net = ResnetGenerator(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=9) + elif netG == 'resnet_6blocks': + net = ResnetGenerator(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=6) + elif netG == 'unet_128': + net = UnetGenerator(input_nc, output_nc, 7, ngf, norm_layer=norm_layer, use_dropout=use_dropout) + elif netG == 'unet_256': + net = UnetGenerator(input_nc, output_nc, 8, ngf, norm_layer=norm_layer, use_dropout=use_dropout) + else: + raise NotImplementedError('Generator model name [%s] is not recognized' % netG) + return init_net(net, init_type, init_gain, gpu_ids) + + +def define_D(input_nc, ndf, netD, + n_layers_D=3, norm='batch', use_sigmoid=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + norm_layer = get_norm_layer(norm_type=norm) + + if netD == 'basic': + net = NLayerDiscriminator(input_nc, ndf, n_layers=3, norm_layer=norm_layer, use_sigmoid=use_sigmoid) + elif netD == 'n_layers': + net = NLayerDiscriminator(input_nc, ndf, n_layers_D, norm_layer=norm_layer, use_sigmoid=use_sigmoid) + elif netD == 'pixel': + net = PixelDiscriminator(input_nc, ndf, norm_layer=norm_layer, use_sigmoid=use_sigmoid) + else: + raise NotImplementedError('Discriminator model name [%s] is not recognized' % net) + return init_net(net, init_type, init_gain, gpu_ids) + + +############################################################################## +# Classes +############################################################################## + + +# Defines the GAN loss which uses either LSGAN or the regular GAN. +# When LSGAN is used, it is basically same as MSELoss, +# but it abstracts away the need to create the target label tensor +# that has the same size as the input +class GANLoss(nn.Module): + def __init__(self, use_lsgan=True, target_real_label=1.0, target_fake_label=0.0): + super(GANLoss, self).__init__() + self.register_buffer('real_label', torch.tensor(target_real_label)) + self.register_buffer('fake_label', torch.tensor(target_fake_label)) + if use_lsgan: + self.loss = nn.MSELoss() + else: + self.loss = nn.BCELoss() + + def get_target_tensor(self, input, target_is_real): + if target_is_real: + target_tensor = self.real_label + else: + target_tensor = self.fake_label + return target_tensor.expand_as(input) + + def __call__(self, input, target_is_real): + target_tensor = self.get_target_tensor(input, target_is_real) + return self.loss(input, target_tensor) + + +# Defines the generator that consists of Resnet blocks between a few +# downsampling/upsampling operations. +# Code and idea originally from Justin Johnson's architecture. +# https://github.com/jcjohnson/fast-neural-style/ +class ResnetGenerator(nn.Module): + def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect'): + assert(n_blocks >= 0) + super(ResnetGenerator, self).__init__() + self.input_nc = input_nc + self.output_nc = output_nc + self.ngf = ngf + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + + model = [nn.ReflectionPad2d(3), + nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0, + bias=use_bias), + norm_layer(ngf), + nn.ReLU(True)] + + n_downsampling = 2 + for i in range(n_downsampling): + mult = 2**i + model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, + stride=2, padding=1, bias=use_bias), + norm_layer(ngf * mult * 2), + nn.ReLU(True)] + + mult = 2**n_downsampling + for i in range(n_blocks): + model += [ResnetBlock(ngf * mult, padding_type=padding_type, norm_layer=norm_layer, use_dropout=use_dropout, use_bias=use_bias)] + + for i in range(n_downsampling): + mult = 2**(n_downsampling - i) + model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2), + kernel_size=3, stride=2, + padding=1, output_padding=1, + bias=use_bias), + norm_layer(int(ngf * mult / 2)), + nn.ReLU(True)] + model += [nn.ReflectionPad2d(3)] + model += [nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)] + model += [nn.Tanh()] + + self.model = nn.Sequential(*model) + + def forward(self, input): + return self.model(input) + + +# Define a resnet block +class ResnetBlock(nn.Module): + def __init__(self, dim, padding_type, norm_layer, use_dropout, use_bias): + super(ResnetBlock, self).__init__() + self.conv_block = self.build_conv_block(dim, padding_type, norm_layer, use_dropout, use_bias) + + def build_conv_block(self, dim, padding_type, norm_layer, use_dropout, use_bias): + conv_block = [] + p = 0 + if padding_type == 'reflect': + conv_block += [nn.ReflectionPad2d(1)] + elif padding_type == 'replicate': + conv_block += [nn.ReplicationPad2d(1)] + elif padding_type == 'zero': + p = 1 + else: + raise NotImplementedError('padding [%s] is not implemented' % padding_type) + + conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), + norm_layer(dim), + nn.ReLU(True)] + if use_dropout: + conv_block += [nn.Dropout(0.5)] + + p = 0 + if padding_type == 'reflect': + conv_block += [nn.ReflectionPad2d(1)] + elif padding_type == 'replicate': + conv_block += [nn.ReplicationPad2d(1)] + elif padding_type == 'zero': + p = 1 + else: + raise NotImplementedError('padding [%s] is not implemented' % padding_type) + conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), + norm_layer(dim)] + + return nn.Sequential(*conv_block) + + def forward(self, x): + out = x + self.conv_block(x) + return out + + +# Defines the Unet generator. +# |num_downs|: number of downsamplings in UNet. For example, +# if |num_downs| == 7, image of size 128x128 will become of size 1x1 +# at the bottleneck +class UnetGenerator(nn.Module): + def __init__(self, input_nc, output_nc, num_downs, ngf=64, + norm_layer=nn.BatchNorm2d, use_dropout=False): + super(UnetGenerator, self).__init__() + + # construct unet structure + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) + for i in range(num_downs - 5): + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout) + unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) + + self.model = unet_block + + def forward(self, input): + return self.model(input) + + +# Defines the submodule with skip connection. +# X -------------------identity---------------------- X +# |-- downsampling -- |submodule| -- upsampling --| +class UnetSkipConnectionBlock(nn.Module): + def __init__(self, outer_nc, inner_nc, input_nc=None, + submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(UnetSkipConnectionBlock, self).__init__() + self.outermost = outermost + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + if input_nc is None: + input_nc = outer_nc + downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4, + stride=2, padding=1, bias=use_bias) + downrelu = nn.LeakyReLU(0.2, True) + downnorm = norm_layer(inner_nc) + uprelu = nn.ReLU(True) + upnorm = norm_layer(outer_nc) + + if outermost: + upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, + kernel_size=4, stride=2, + padding=1) + down = [downconv] + up = [uprelu, upconv, nn.Tanh()] + model = down + [submodule] + up + elif innermost: + upconv = nn.ConvTranspose2d(inner_nc, outer_nc, + kernel_size=4, stride=2, + padding=1, bias=use_bias) + down = [downrelu, downconv] + up = [uprelu, upconv, upnorm] + model = down + up + else: + upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, + kernel_size=4, stride=2, + padding=1, bias=use_bias) + down = [downrelu, downconv, downnorm] + up = [uprelu, upconv, upnorm] + + if use_dropout: + model = down + [submodule] + up + [nn.Dropout(0.5)] + else: + model = down + [submodule] + up + + self.model = nn.Sequential(*model) + + def forward(self, x): + if self.outermost: + return self.model(x) + else: + return torch.cat([x, self.model(x)], 1) + + +# Defines the PatchGAN discriminator with the specified arguments. +class NLayerDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d, use_sigmoid=False): + super(NLayerDiscriminator, self).__init__() + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + + kw = 4 + padw = 1 + sequence = [ + nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), + nn.LeakyReLU(0.2, True) + ] + + nf_mult = 1 + nf_mult_prev = 1 + for n in range(1, n_layers): + nf_mult_prev = nf_mult + nf_mult = min(2**n, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, + kernel_size=kw, stride=2, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + nf_mult_prev = nf_mult + nf_mult = min(2**n_layers, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, + kernel_size=kw, stride=1, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + sequence += [nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)] + + if use_sigmoid: + sequence += [nn.Sigmoid()] + + self.model = nn.Sequential(*sequence) + + def forward(self, input): + return self.model(input) + + +class PixelDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, norm_layer=nn.BatchNorm2d, use_sigmoid=False): + super(PixelDiscriminator, self).__init__() + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + + self.net = [ + nn.Conv2d(input_nc, ndf, kernel_size=1, stride=1, padding=0), + nn.LeakyReLU(0.2, True), + nn.Conv2d(ndf, ndf * 2, kernel_size=1, stride=1, padding=0, bias=use_bias), + norm_layer(ndf * 2), + nn.LeakyReLU(0.2, True), + nn.Conv2d(ndf * 2, 1, kernel_size=1, stride=1, padding=0, bias=use_bias)] + + if use_sigmoid: + self.net.append(nn.Sigmoid()) + + self.net = nn.Sequential(*self.net) + + def forward(self, input): + return self.net(input) diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/__init__.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/base_options.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/base_options.py new file mode 100644 index 0000000..e56ae40 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/base_options.py @@ -0,0 +1,141 @@ +import argparse +import os +from util import util +import torch +import models +import data + + +class BaseOptions(): + def __init__(self): + self.initialized = False + + def initialize(self, parser): + parser.add_argument('--dataroot', required=True, help='path to images (should have subfolders trainA, trainB, valA, valB, etc)') + parser.add_argument('--batch_size', type=int, default=1, help='input batch size') + parser.add_argument('--seq_len', type=int, default=1, help='sequence length (if applicable)') + parser.add_argument('--fineSize', type=int, default=512, help='then crop to this size') + parser.add_argument('--display_winsize', type=int, default=256, help='display window size for both visdom and HTML') + parser.add_argument('--input_nc', type=int, default=3, help='# of input image channels') + parser.add_argument('--output_nc', type=int, default=3, help='# of output image channels') + parser.add_argument('--ngf', type=int, default=64, help='# of gen filters in first conv layer') + parser.add_argument('--ndf', type=int, default=64, help='# of discrim filters in first conv layer') + parser.add_argument('--netD', type=str, default='basic', help='selects model to use for netD') + parser.add_argument('--netG', type=str, default='resnet_9blocks', help='selects model to use for netG') + parser.add_argument('--n_layers_D', type=int, default=3, help='only used if netD==n_layers') + parser.add_argument('--gpu_ids', type=str, default='0', help='gpu ids: e.g. 0 0,1,2, 0,2. use -1 for CPU') + parser.add_argument('--name', type=str, default='experiment_name', help='name of the experiment. It decides where to store samples and models') + parser.add_argument('--renderer', type=str, default='no_renderer', help='name of the renderer to load the models from') + parser.add_argument('--fix_renderer', action='store_true', help='renderer is fixed') + parser.add_argument('--dataset_mode', type=str, default='aligned', help='chooses how datasets are loaded. [aligned | multi]') + parser.add_argument('--model', type=str, default='cycle_gan', help='chooses which model to use. cycle_gan, pix2pix, test') + parser.add_argument('--direction', type=str, default='AtoB', help='AtoB or BtoA') + parser.add_argument('--epoch', type=str, default='latest', help='which epoch to load? set to latest to use latest cached model') + parser.add_argument('--load_iter', type=int, default='0', help='which iteration to load? if load_iter > 0, the code will load models by iter_[load_iter]; otherwise, the code will load models by [epoch]') + parser.add_argument('--num_threads', default=4, type=int, help='# threads for loading data') + parser.add_argument('--checkpoints_dir', type=str, default='./checkpoints', help='models are saved here') + parser.add_argument('--norm', type=str, default='instance', help='instance normalization or batch normalization') + parser.add_argument('--serial_batches', action='store_true', help='if true, takes images in order to make batches, otherwise takes them randomly') + parser.add_argument('--no_dropout', action='store_true', help='no dropout for the generator') + parser.add_argument('--max_dataset_size', type=int, default=float("inf"), help='Maximum number of samples allowed per dataset. If the dataset directory contains more than max_dataset_size, only a subset is loaded.') + parser.add_argument('--resize_or_crop', type=str, default='resize_and_crop', help='scaling and cropping of images at load time [resize_and_crop|crop|scale_width|scale_width_and_crop|none]') + parser.add_argument('--no_augmentation', action='store_true', help='if specified, no data augmentation') + #parser.add_argument('--init_type', type=str, default='normal', help='network initialization [normal|xavier|kaiming|orthogonal]') + parser.add_argument('--init_type', type=str, default='xavier', help='network initialization [normal|xavier|kaiming|orthogonal]') + parser.add_argument('--init_gain', type=float, default=0.02, help='scaling factor for normal, xavier and orthogonal.') + parser.add_argument('--verbose', action='store_true', help='if specified, print more debugging information') + parser.add_argument('--suffix', default='', type=str, help='customized suffix: opt.name = opt.name + suffix: e.g., {model}_{netG}_size{loadSize}') + parser.add_argument('--tex_dim', type=int, default=256, help='neural texture dimensions') + parser.add_argument('--tex_features_intermediate', type=int, default=16, help='# intermediate neural texture features when using dynamic textures') + parser.add_argument('--tex_features', type=int, default=16, help='# neural texture features') + parser.add_argument('--textureModel', type=str, default='DynamicNeuralTextureAudio', help='texture model') + parser.add_argument('--rendererType', type=str, default='UNET_5_level', help='neural renderer network') + parser.add_argument('--lossType', type=str, default='L1', help='loss type for the final output') + + parser.add_argument('--hierarchicalTex', action='store_true', help='if specified, hierachical neural textures are used') + + parser.add_argument('--output_audio_expressions', action='store_true', help='if specified, no sh layers are used') + + parser.add_argument('--erosionFactor', type=float, default=1.0, help='scaling factor for erosion of the background.') + + parser.add_argument('--audio_window_size', type=float, default=16, help='audio window size = #mel feature bins') + + parser.add_argument('--look_ahead', action='store_true', help='cache images in numpy format') + + parser.add_argument('--cached_images', action='store_true', help='cache images in numpy format') + + # add option for deca details + parser.add_argument('--deca_details', action='store_true', help='use deca details as addidtional input') + + self.initialized = True + return parser + + def gather_options(self): + # initialize parser with basic options + if not self.initialized: + parser = argparse.ArgumentParser( + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser = self.initialize(parser) + + # get the basic options + opt, _ = parser.parse_known_args() + + # modify model-related parser options + model_name = opt.model + model_option_setter = models.get_option_setter(model_name) + parser = model_option_setter(parser, self.isTrain) + opt, _ = parser.parse_known_args() # parse again with the new defaults + + # modify dataset-related parser options + dataset_name = opt.dataset_mode + dataset_option_setter = data.get_option_setter(dataset_name) + parser = dataset_option_setter(parser, self.isTrain) + + self.parser = parser + + return parser.parse_args() + + def print_options(self, opt): + message = '' + message += '----------------- Options ---------------\n' + for k, v in sorted(vars(opt).items()): + comment = '' + default = self.parser.get_default(k) + if v != default: + comment = '\t[default: %s]' % str(default) + message += '{:>25}: {:<30}{}\n'.format(str(k), str(v), comment) + message += '----------------- End -------------------' + print(message) + + # save to the disk + expr_dir = os.path.join(opt.checkpoints_dir, opt.name) + util.mkdirs(expr_dir) + file_name = os.path.join(expr_dir, 'opt.txt') + with open(file_name, 'wt') as opt_file: + opt_file.write(message) + opt_file.write('\n') + + def parse(self): + + opt = self.gather_options() + opt.isTrain = self.isTrain # train or test + + # process opt.suffix + if opt.suffix: + suffix = ('_' + opt.suffix.format(**vars(opt))) if opt.suffix != '' else '' + opt.name = opt.name + suffix + + self.print_options(opt) + + # set gpu ids + str_ids = opt.gpu_ids.split(',') + opt.gpu_ids = [] + for str_id in str_ids: + id = int(str_id) + if id >= 0: + opt.gpu_ids.append(id) + if len(opt.gpu_ids) > 0: + torch.cuda.set_device(opt.gpu_ids[0]) + + self.opt = opt + return self.opt diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/test_options.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/test_options.py new file mode 100644 index 0000000..2c35acf --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/test_options.py @@ -0,0 +1,32 @@ +from .base_options import BaseOptions + + +class TestOptions(BaseOptions): + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) + parser.add_argument('--ntest', type=int, default=float("inf"), help='# of test examples.') + parser.add_argument('--results_dir', type=str, default='./results/', help='saves results here.') + parser.add_argument('--aspect_ratio', type=float, default=1.0, help='aspect ratio of result images') + parser.add_argument('--phase', type=str, default='test', help='train, val, test, etc') + # Dropout and Batchnorm has different behavioir during training and test. + parser.add_argument('--eval', action='store_true', help='use eval mode during test time.') + parser.add_argument('--num_test', type=int, default=50, help='how many test images to run') + + parser.add_argument('--write_no_images', action='store_true', help='compute validation') + + parser.add_argument('--write_video', action='store_true', help='write video') + parser.add_argument('--video_fps', type=float, default=25.0, help='video fps') + + parser.add_argument('--target_dataroot', type=str, default=None) + parser.add_argument('--source_dataroot', type=str, default='./datasets/', help='loads source files (expressions, audio, uvs).') + parser.add_argument('--expr_path', type=str, default=None) + parser.add_argument('--images_target_dir', type=str, default=None, help='path to save images.') + + parser.add_argument('--frame_id_source', type=int, default=-1) + parser.add_argument('--frame_id_target', type=int, default=-1) + + parser.set_defaults(model='test') + # To avoid cropping, the loadSize should be the same as fineSize + parser.set_defaults(loadSize=parser.get_default('fineSize')) + self.isTrain = False + return parser diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/train_options.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/train_options.py new file mode 100644 index 0000000..c91d50d --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/train_options.py @@ -0,0 +1,37 @@ +from .base_options import BaseOptions + + +class TrainOptions(BaseOptions): + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) + parser.add_argument('--display_freq', type=int, default=400, help='frequency of showing training results on screen') + parser.add_argument('--display_ncols', type=int, default=4, help='if positive, display all images in a single visdom web panel with certain number of images per row.') + parser.add_argument('--display_id', type=int, default=1, help='window id of the web display') + parser.add_argument('--display_server', type=str, default="http://localhost", help='visdom server of the web display') + parser.add_argument('--display_env', type=str, default='main', help='visdom display environment name (default is "main")') + parser.add_argument('--display_port', type=int, default=8097, help='visdom port of the web display') + + parser.add_argument('--compute_val', action='store_true', help='compute validation') + parser.add_argument('--input_noise_augmentation', action='store_true', help='add input noise') + + parser.add_argument('--update_html_freq', type=int, default=1000, help='frequency of saving training results to html') + parser.add_argument('--print_freq', type=int, default=500, help='frequency of showing training results on console') + parser.add_argument('--save_latest_freq', type=int, default=5000, help='frequency of saving the latest results') + parser.add_argument('--save_epoch_freq', type=int, default=5, help='frequency of saving checkpoints at the end of epochs') + parser.add_argument('--save_by_iter', action='store_true', help='whether saves model by iteration') + + parser.add_argument('--continue_train', action='store_true', help='continue training: load the latest model') + parser.add_argument('--epoch_count', type=int, default=1, help='the starting epoch count, we save the model by , +, ...') + parser.add_argument('--phase', type=str, default='train', help='train, val, test, etc') + parser.add_argument('--niter', type=int, default=100, help='# of iter at starting learning rate') + parser.add_argument('--niter_decay', type=int, default=100, help='# of iter to linearly decay learning rate to zero') + parser.add_argument('--beta1', type=float, default=0.5, help='momentum term of adam') + parser.add_argument('--lr', type=float, default=0.0002, help='initial learning rate for adam') + parser.add_argument('--no_lsgan', action='store_true', help='do *not* use least square GAN, if false, use vanilla GAN') + parser.add_argument('--pool_size', type=int, default=50, help='the size of image buffer that stores previously generated images') + parser.add_argument('--no_html', action='store_true', help='do not save intermediate training results to [opt.checkpoints_dir]/[opt.name]/web/') + parser.add_argument('--lr_policy', type=str, default='lambda', help='learning rate policy: lambda|step|plateau|cosine') + parser.add_argument('--lr_decay_iters', type=int, default=50, help='multiply by a gamma every lr_decay_iters iterations') + + self.isTrain = True + return parser diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/transfer_options.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/transfer_options.py new file mode 100644 index 0000000..3fb0709 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/options/transfer_options.py @@ -0,0 +1,30 @@ +from .base_options import BaseOptions + + +class TransferOptions(BaseOptions): + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) + parser.add_argument('--ntest', type=int, default=float("inf"), help='# of test examples.') + parser.add_argument('--results_dir', type=str, default='./results/', help='saves results here.') + parser.add_argument('--aspect_ratio', type=float, default=1.0, help='aspect ratio of result images') + parser.add_argument('--phase', type=str, default='test', help='train, val, test, etc') + # Dropout and Batchnorm has different behavioir during training and test. + parser.add_argument('--eval', action='store_true', help='use eval mode during test time.') + parser.add_argument('--num_test', type=int, default=50, help='how many test images to run') + + parser.add_argument('--write_no_images', action='store_true', help='compute validation') + + + parser.add_argument('--source_dir', type=str, default='./datasets/', help='loads source files (expressions, audio, uvs).') + + + + parser.add_argument('--source_actor', type=str, default='', help='source actor directory') + parser.add_argument('--target_actor', type=str, default='', help='target actor directory') + + + parser.set_defaults(model='test') + # To avoid cropping, the loadSize should be the same as fineSize + parser.set_defaults(loadSize=parser.get_default('fineSize')) + self.isTrain = False + return parser diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/req.txt b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/req.txt new file mode 100644 index 0000000..5252532 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/req.txt @@ -0,0 +1,60 @@ +audioread==2.1.6 +certifi==2018.11.29 +cffi==1.12.1 +chardet==3.0.4 +chumpy==0.68 +cycler==0.10.0 +decorator==4.3.2 +dominate==2.3.5 +EasyProcess==0.2.5 +freetype-py==2.1.0.post1 +future==0.17.1 +idna==2.8 +imageio==2.5.0 +joblib==0.13.2 +kiwisolver==1.0.1 +librosa==0.6.3 +llvmlite==0.27.1 +matplotlib==3.0.3 +mkl-fft==1.0.6 +mkl-random==1.0.1 +moderngl==5.5.0 +networkx==2.2 +neural-renderer-pytorch==1.1.3 +numba==0.42.1 +numpy==1.16.1 +olefile==0.46 +opencv-python==4.1.1.26 +OpenEXR==1.3.2 +Pillow==5.4.1 +progressbar2==3.46.1 +pycparser==2.19 +pyglet==1.4.0b1 +PyOpenGL==3.1.0 +pyparsing==2.3.1 +pyrender==0.1.24 +python-dateutil==2.8.0 +python-Levenshtein==0.12.0 +python-utils==2.3.0 +PyVirtualDisplay==0.2.1 +PyWavelets==1.0.3 +pyzmq==18.0.0 +requests==2.21.0 +resampy==0.2.1 +scikit-image==0.15.0 +scikit-learn==0.20.2 +scipy==1.2.1 +Shapely==1.6.4.post2 +six==1.12.0 +soft-renderer==1.0.0 +torch==1.0.1.post2 +torchaudio==0.2 +torchfile==0.1.0 +torchvision==0.2.1 +tornado==5.1.1 +tqdm==4.31.1 +trimesh==2.38.19 +urllib3==1.24.1 +visdom==0.1.8.8 +websocket-client==0.54.0 +wget==3.2 diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/requirements.txt b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/requirements.txt new file mode 100644 index 0000000..072d027 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/requirements.txt @@ -0,0 +1,4 @@ +torch>=0.4.0 +torchvision>=0.2.1 +dominate>=2.3.1 +visdom>=0.1.8.3 diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/train_renderer.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/train_renderer.py new file mode 100644 index 0000000..f9911b5 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/train_renderer.py @@ -0,0 +1,105 @@ +import time +import copy +from options.train_options import TrainOptions +from data import CreateDataLoader +from models import create_model +from util.visualizer import Visualizer + + +if __name__ == '__main__': + # training dataset + opt = TrainOptions().parse() + data_loader = CreateDataLoader(opt) + dataset = data_loader.load_data() + dataset_size = len(data_loader) + print('#training images = %d' % dataset_size) + # print('#training objects = %d' % opt.nTrainObjects) + + ## validation dataset + if opt.compute_val: + opt_validation = copy.copy(opt) # create a clone + opt_validation.phase = 'val' + opt_validation.serial_batches = True + opt_validation.isTrain = False + data_loader_validation = CreateDataLoader(opt_validation) + dataset_validation = data_loader_validation.load_data() + dataset_size_validation = len(data_loader_validation) + print('#validation images = %d' % dataset_size_validation) + print('#validation objects = %d' % opt_validation.nValObjects) + + # model + model = create_model(opt) + model.setup(opt) + + if opt.renderer != 'no_renderer': + print('load renderer') + model.loadModules(opt, opt.renderer, ['netD','netG']) + + visualizer = Visualizer(opt) + total_steps = 0 + + for epoch in range(opt.epoch_count, opt.niter + opt.niter_decay + 1): + epoch_start_time = time.time() + iter_data_time = time.time() + epoch_iter = 0 # iterator within an epoch + + for i, data in enumerate(dataset): + iter_start_time = time.time() + if total_steps % opt.print_freq == 0: + t_data = iter_start_time - iter_data_time + visualizer.reset() + total_steps += opt.batch_size + epoch_iter += opt.batch_size + + model.set_input(data) + model.optimize_parameters(epoch) + + if total_steps % opt.display_freq == 0: + save_result = total_steps % opt.update_html_freq == 0 + visualizer.display_current_results(model.get_current_visuals(), epoch, save_result) + + if total_steps % opt.print_freq == 0: + losses = model.get_current_losses() + + + if opt.compute_val: + validation_error = 0 + cnt = 0 + for i, data in enumerate(dataset_validation): + model.set_input(data) + model.forward() + model.backward_G(epoch) # be carefull with the gradients (are zeroed in the optimization step) + validation_error += model.loss_G.detach().cpu() + cnt += 1.0 + validation_error /= cnt + #print('Validation Error:', validation_error) + #visualizer.plot_current_validation_error(epoch, float(epoch_iter) / dataset_size, {'validation_error': validation_error}) + losses.update({'validation_error': validation_error}) + + t = (time.time() - iter_start_time) / opt.batch_size + visualizer.print_current_losses(epoch, epoch_iter, losses, t, t_data) + if opt.display_id > 0: + visualizer.plot_current_losses(epoch, float(epoch_iter) / dataset_size, opt, losses) + + if total_steps % opt.save_latest_freq == 0: + print('saving the latest model (epoch %d, total_steps %d)' % (epoch, total_steps)) + save_suffix = 'iter_%d' % total_steps if opt.save_by_iter else 'latest' + model.save_networks(save_suffix) + + iter_data_time = time.time() + + + + if epoch % opt.save_epoch_freq == 0: + print('saving the model at the end of epoch %d, iters %d' % (epoch, total_steps)) + model.save_networks('latest') + model.save_networks(epoch) + + print('End of epoch %d / %d \t Time Taken: %d sec' % + (epoch, opt.niter + opt.niter_decay, time.time() - epoch_start_time)) + model.update_learning_rate() + + model.save_networks('latest') + model.save_networks(epoch) + model.clean_checkpoints(opt.niter + opt.niter_decay + 1) + diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/train_renderer.sh b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/train_renderer.sh new file mode 100644 index 0000000..dbe4dcd --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/train_renderer.sh @@ -0,0 +1,70 @@ +set -ex +# . train_audio2expressionsAttentionTMP.sh & +GPUID=0 + +DP=/home/alberto/NeuralVoicePuppetry/datasets/SRF_anchor_short +SUB=Halbtotale_355_9415 + +DATASETS_DIR=$DP/$SUB +DATASET_MODE=custom_aligned +OBJECT=Halbtotale_355_9415.h5 + +# neural texture, not used here +TEX_DIM=128 +TEX_FEATURES=16 + +INPUT_NC=2 +NUM_THREADS=1 + +# loss +LOSS=VGG +#LOSS=L1 +#LOSS=RMS +#LOSS=L4 + +# models +TEXTUREMODEL=DynamicNeuralTextureExpression +#TEXTUREMODEL=DynamicNeuralTextureAudio +MODEL=DynamicNeuralTextures +RENDERER_TYPE=UNET_6_level # There are many more of these TODO + + +# optimizer parameters +#LR=0.00001 +LR=0.0001 + +#N_ITER=150 #50 #N_ITER=150 +#N_ITER_LR_DECAY=50 + +N_ITER=50 #50 #N_ITER=150 +N_ITER_LR_DECAY=50 + +BATCH_SIZE=8 +SEQ_LEN=8 + + +RENDERER=$OBJECT +EROSION=0.6 + +################################################################################ +################################################################################ +################################################################################ +DATE_WITH_TIME=`date "+%Y%m%d-%H%M%S"` +NAME=$MODEL-$RENDERER_TYPE-$TEXTUREMODEL-SL$SEQ_LEN-BS$BATCH_SIZE-$OBJECT-$DATASET_MODE-$LOSS-$DATE_WITH_TIME-look_ahead +DISPLAY_NAME=${MODEL}-$DATASET_MODE_${OBJECT}-${RENDERER_TYPE}-SL$SEQ_LEN-BS$BATCH_SIZE-${LOSS}-look_ahead +DISPLAY_ID=0 + + + +# training +# --input_noise_augmentation +echo "---------" +CUDA_VISIBLE_DEVICES=0 python train_renderer.py --textureModel $TEXTUREMODEL --num_threads $NUM_THREADS --input_nc $INPUT_NC --display_id $DISPLAY_ID --look_ahead --seq_len $SEQ_LEN --save_latest_freq 100000 --no_augmentation --name $NAME --erosionFactor $EROSION --tex_dim $TEX_DIM --tex_features $TEX_FEATURES --rendererType $RENDERER_TYPE --lossType $LOSS --display_env $DISPLAY_NAME --niter $N_ITER --niter_decay $N_ITER_LR_DECAY --dataroot $DATASETS_DIR/$OBJECT --model $MODEL --netG unet_256 --lambda_L1 100 --dataset_mode $DATASET_MODE --no_lsgan --norm instance --pool_size 0 --gpu_ids $GPUID --lr $LR --batch_size $BATCH_SIZE + +# # testing +#EPOCH=latest +#python test.py --seq_len $SEQ_LEN --write_no_images --name $NAME --erosionFactor $EROSION --epoch $EPOCH --display_winsize 512 --tex_dim $TEX_DIM --tex_features $TEX_FEATURES --rendererType $RENDERER_TYPE --lossType $LOSS --dataroot $DATASETS_DIR/$OBJECT --model $MODEL --netG unet_256 --dataset_mode $DATASET_MODE --norm instance --gpu_ids $GPUID + +################################################################################ +################################################################################ +################################################################################ diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/train_renderer_deca_details.sh b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/train_renderer_deca_details.sh new file mode 100644 index 0000000..1f5a191 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/train_renderer_deca_details.sh @@ -0,0 +1,104 @@ +set -ex +# . train_audio2expressionsAttentionTMP.sh & +GPUID=0 + +DP=/home/alberto/NeuralVoicePuppetry/datasets/External +SUB=Youtube_Russian_guy + +DATASETS_DIR=$DP/$SUB +DATASET_MODE=custom_aligned +OBJECT=Youtube_Russian_guy.h5 + +# neural texture, not used here +TEX_DIM=128 +TEX_FEATURES=16 + +INPUT_NC=5 +NUM_THREADS=1 + +# loss +LOSS=VGG +#LOSS=L1 +#LOSS=RMS +#LOSS=L4 + +# models +TEXTUREMODEL=DynamicNeuralTextureExpression +#TEXTUREMODEL=DynamicNeuralTextureAudio +MODEL=DynamicNeuralTextures +RENDERER_TYPE=UNET_6_level # There are many more of these TODO + + +# optimizer parameters +#LR=0.00001 +LR=0.0001 + +#N_ITER=150 #50 #N_ITER=150 +#N_ITER_LR_DECAY=50 + +N_ITER=50 #50 #N_ITER=150 +N_ITER_LR_DECAY=50 + +BATCH_SIZE=8 +SEQ_LEN=8 + + +RENDERER=$OBJECT +EROSION=0.6 + +################################################################################ +################################################################################ +################################################################################ +DATE_WITH_TIME=`date "+%Y%m%d-%H%M%S"` +NAME=$MODEL-$RENDERER_TYPE-$TEXTUREMODEL-SL$SEQ_LEN-BS$BATCH_SIZE-$OBJECT-$DATASET_MODE-$LOSS-$DATE_WITH_TIME-look_ahead_mask_mouth +#NAME=DynamicNeuralTextures-UNET_6_level-DynamicNeuralTextureExpression-SL8-BS8-Halbtotale_355_9415.h5-custom_aligned-VGG-20210308-004445-look_ahead +#NAME=DynamicNeuralTextures-UNET_6_level-DynamicNeuralTextureExpression-SL8-BS8-Halbtotale_355_9415.h5-custom_aligned-VGG-20210309-184531-look_ahead_masked +DISPLAY_NAME=${MODEL}-$DATASET_MODE_${OBJECT}-${RENDERER_TYPE}-SL$SEQ_LEN-BS$BATCH_SIZE-${LOSS}-look_ahead +DISPLAY_ID=0 + + + +# training +# --input_noise_augmentation +echo "---------" +CUDA_VISIBLE_DEVICES=0 python \ +train_renderer.py \ +--textureModel $TEXTUREMODEL \ +--num_threads $NUM_THREADS \ +--input_nc $INPUT_NC \ +--display_id $DISPLAY_ID \ +--look_ahead \ +--seq_len $SEQ_LEN \ +--save_latest_freq 100000 \ +--no_augmentation \ +--name $NAME \ +--erosionFactor $EROSION \ +--tex_dim $TEX_DIM \ +--tex_features $TEX_FEATURES \ +--rendererType $RENDERER_TYPE \ +--lossType $LOSS \ +--display_env $DISPLAY_NAME \ +--niter $N_ITER \ +--niter_decay $N_ITER_LR_DECAY \ +--dataroot $DATASETS_DIR/$OBJECT \ +--model $MODEL \ +--netG unet_256 \ +--lambda_L1 100 \ +--dataset_mode $DATASET_MODE \ +--no_lsgan \ +--norm instance \ +--pool_size 0 \ +--gpu_ids $GPUID \ +--lr $LR \ +--batch_size $BATCH_SIZE \ +--deca_details \ +#--continue_train \ +#--epoch_count 15 \ + +# # testing +#EPOCH=latest +#python test.py --seq_len $SEQ_LEN --write_no_images --name $NAME --erosionFactor $EROSION --epoch $EPOCH --display_winsize 512 --tex_dim $TEX_DIM --tex_features $TEX_FEATURES --rendererType $RENDERER_TYPE --lossType $LOSS --dataroot $DATASETS_DIR/$OBJECT --model $MODEL --netG unet_256 --dataset_mode $DATASET_MODE --norm instance --gpu_ids $GPUID + +################################################################################ +################################################################################ +################################################################################ diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/__init__.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/get_data.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/get_data.py new file mode 100644 index 0000000..6325605 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/get_data.py @@ -0,0 +1,115 @@ +from __future__ import print_function +import os +import tarfile +import requests +from warnings import warn +from zipfile import ZipFile +from bs4 import BeautifulSoup +from os.path import abspath, isdir, join, basename + + +class GetData(object): + """ + + Download CycleGAN or Pix2Pix Data. + + Args: + technique : str + One of: 'cyclegan' or 'pix2pix'. + verbose : bool + If True, print additional information. + + Examples: + >>> from util.get_data import GetData + >>> gd = GetData(technique='cyclegan') + >>> new_data_path = gd.get(save_path='./datasets') # options will be displayed. + + """ + + def __init__(self, technique='cyclegan', verbose=True): + url_dict = { + 'pix2pix': 'https://people.eecs.berkeley.edu/~tinghuiz/projects/pix2pix/datasets', + 'cyclegan': 'https://people.eecs.berkeley.edu/~taesung_park/CycleGAN/datasets' + } + self.url = url_dict.get(technique.lower()) + self._verbose = verbose + + def _print(self, text): + if self._verbose: + print(text) + + @staticmethod + def _get_options(r): + soup = BeautifulSoup(r.text, 'lxml') + options = [h.text for h in soup.find_all('a', href=True) + if h.text.endswith(('.zip', 'tar.gz'))] + return options + + def _present_options(self): + r = requests.get(self.url) + options = self._get_options(r) + print('Options:\n') + for i, o in enumerate(options): + print("{0}: {1}".format(i, o)) + choice = input("\nPlease enter the number of the " + "dataset above you wish to download:") + return options[int(choice)] + + def _download_data(self, dataset_url, save_path): + if not isdir(save_path): + os.makedirs(save_path) + + base = basename(dataset_url) + temp_save_path = join(save_path, base) + + with open(temp_save_path, "wb") as f: + r = requests.get(dataset_url) + f.write(r.content) + + if base.endswith('.tar.gz'): + obj = tarfile.open(temp_save_path) + elif base.endswith('.zip'): + obj = ZipFile(temp_save_path, 'r') + else: + raise ValueError("Unknown File Type: {0}.".format(base)) + + self._print("Unpacking Data...") + obj.extractall(save_path) + obj.close() + os.remove(temp_save_path) + + def get(self, save_path, dataset=None): + """ + + Download a dataset. + + Args: + save_path : str + A directory to save the data to. + dataset : str, optional + A specific dataset to download. + Note: this must include the file extension. + If None, options will be presented for you + to choose from. + + Returns: + save_path_full : str + The absolute path to the downloaded data. + + """ + if dataset is None: + selected_dataset = self._present_options() + else: + selected_dataset = dataset + + save_path_full = join(save_path, selected_dataset.split('.')[0]) + + if isdir(save_path_full): + warn("\n'{0}' already exists. Voiding Download.".format( + save_path_full)) + else: + self._print('Downloading Data...') + url = "{0}/{1}".format(self.url, selected_dataset) + self._download_data(url, save_path=save_path) + + return abspath(save_path_full) diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/image_pool.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/image_pool.py new file mode 100644 index 0000000..4a525fc --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/image_pool.py @@ -0,0 +1,32 @@ +import random +import torch + + +class ImagePool(): + def __init__(self, pool_size): + self.pool_size = pool_size + if self.pool_size > 0: + self.num_imgs = 0 + self.images = [] + + def query(self, images): + if self.pool_size == 0: + return images + return_images = [] + for image in images: + image = torch.unsqueeze(image.data_source, 0) + if self.num_imgs < self.pool_size: + self.num_imgs = self.num_imgs + 1 + self.images.append(image) + return_images.append(image) + else: + p = random.uniform(0, 1) + if p > 0.5: + random_id = random.randint(0, self.pool_size - 1) # randint is inclusive + tmp = self.images[random_id].clone() + self.images[random_id] = image + return_images.append(tmp) + else: + return_images.append(image) + return_images = torch.cat(return_images, 0) + return return_images diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/ownhtml.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/ownhtml.py new file mode 100644 index 0000000..41eabdd --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/ownhtml.py @@ -0,0 +1,64 @@ +import dominate +from dominate.tags import meta, h3, table, tr, td, p, a, img, br +import os + + +class OwnHTML: + def __init__(self, web_dir, title, reflesh=0): + self.title = title + self.web_dir = web_dir + self.img_dir = os.path.join(self.web_dir, 'images') + if not os.path.exists(self.web_dir): + os.makedirs(self.web_dir) + if not os.path.exists(self.img_dir): + os.makedirs(self.img_dir) + # print(self.img_dir) + + self.doc = dominate.document(title=title) + if reflesh > 0: + with self.doc.head: + meta(http_equiv="reflesh", content=str(reflesh)) + + def get_image_dir(self): + return self.img_dir + + def add_header(self, str): + with self.doc: + h3(str) + + def add_table(self, border=1): + self.t = table(border=border, style="table-layout: fixed;") + self.doc.add(self.t) + + def add_images(self, ims, txts, links, width=400): + self.add_table() + with self.t: + with tr(): + for im, txt, link in zip(ims, txts, links): + with td(style="word-wrap: break-word;", halign="center", valign="top"): + with p(): + with a(href=os.path.join('images', link)): + img(style="width:%dpx" % width, src=os.path.join('images', im)) + br() + p(txt) + + def save(self): + html_file = '%s/index.html' % self.web_dir + f = open(html_file, 'wt') + f.write(self.doc.render()) + f.close() + + +if __name__ == '__main__': + html = OwnHTML('web/', 'test_html') + html.add_header('hello world') + + ims = [] + txts = [] + links = [] + for n in range(4): + ims.append('image_%d.png' % n) + txts.append('text_%d' % n) + links.append('image_%d.png' % n) + html.add_images(ims, txts, links) + html.save() diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/util.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/util.py new file mode 100644 index 0000000..0509edf --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/util.py @@ -0,0 +1,82 @@ +from __future__ import print_function +import torch +import numpy as np +from PIL import Image +import os +import sys +import array +# import OpenEXR +# import Imath + +# def load_exr(image_path): +# # Open the input file +# file = OpenEXR.InputFile(image_path) +# +# # Compute the size +# dw = file.header()['dataWindow'] +# w, h = (dw.max.x - dw.min.x + 1, dw.max.y - dw.min.y + 1) +# +# # Read the three color channels as 32-bit floats +# FLOAT = Imath.PixelType(Imath.PixelType.FLOAT) +# #(R,G,B) = [np.array(array.array('f', file.channel(Chan, FLOAT)).tolist()).reshape((w, h, 1)) for Chan in ("R", "G", "B") ] +# +# (r, g, b) = file.channels("RGB") +# R = np.array(array.array('f', r).tolist()).reshape((w, h, 1)) +# G = np.array(array.array('f', g).tolist()).reshape((w, h, 1)) +# B = np.array(array.array('f', b).tolist()).reshape((w, h, 1)) +# +# return np.concatenate((R, G, B), axis=2) + +# Converts a Tensor into an image array (numpy) +# |imtype|: the desired type of the converted numpy array +def tensor2im(input_image, imtype=np.uint8): + if isinstance(input_image, torch.Tensor): + input_image = torch.clamp(input_image, -1.0, 1.0) + image_tensor = input_image.data + else: + return input_image + image_numpy = image_tensor[0].cpu().float().numpy() + if image_numpy.shape[0] == 1: + image_numpy = np.tile(image_numpy, (3, 1, 1)) + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 + return image_numpy.astype(imtype) + + +def diagnose_network(net, name='network'): + mean = 0.0 + count = 0 + for param in net.parameters(): + if param.grad is not None: + mean += torch.mean(torch.abs(param.grad.data_source)) + count += 1 + if count > 0: + mean = mean / count + print(name) + print(mean) + + +def save_image(image_numpy, image_path): + image_pil = Image.fromarray(image_numpy) + image_pil.save(image_path) + +def print_numpy(x, val=True, shp=False): + x = x.astype(np.float64) + if shp: + print('shape,', x.shape) + if val: + x = x.flatten() + print('mean = %3.3f, min = %3.3f, max = %3.3f, median = %3.3f, std=%3.3f' % ( + np.mean(x), np.min(x), np.max(x), np.median(x), np.std(x))) + + +def mkdirs(paths): + if isinstance(paths, list) and not isinstance(paths, str): + for path in paths: + mkdir(path) + else: + mkdir(paths) + + +def mkdir(path): + if not os.path.exists(path): + os.makedirs(path) diff --git a/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/visualizer.py b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/visualizer.py new file mode 100644 index 0000000..cc301d3 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/NeuralRenderingNetwork/util/visualizer.py @@ -0,0 +1,233 @@ +import numpy as np +import os +import sys +import ntpath +import time +from . import util +from . import ownhtml +# from scipy.misc import imresize + +def imresize(arr, shape, method): + return numpy.array(Image.fromarray(arr).resize(shape[::-1], method)) + + +if sys.version_info[0] == 2: + VisdomExceptionBase = Exception +else: + VisdomExceptionBase = ConnectionError + + +# # save image to the disk +# def save_images(webpage, visuals, image_path, aspect_ratio=1.0, width=256): +# image_dir = webpage.get_image_dir() +# short_path = ntpath.basename(image_path[0]) +# name = os.path.splitext(short_path)[0] +# +# webpage.add_header(name) +# ims, txts, links = [], [], [] +# +# for label, im_data in visuals.items(): +# im = util.tensor2im(im_data) +# image_name = '%s_%s.png' % (name, label) +# save_path = os.path.join(image_dir, image_name) +# h, w, _ = im.shape +# +# height = int(width * h / float(w)) +# im = imresize(im, (height,width), interp='bicubic') +# +# #im = imresize(im, (height,widht), interp='bicubic') +# #if aspect_ratio > 1.0: +# # im = imresize(im, (h, int(w * aspect_ratio)), interp='bicubic') +# #if aspect_ratio < 1.0: +# # im = imresize(im, (int(h / aspect_ratio), w), interp='bicubic') +# +# util.save_image(im, save_path) +# +# ims.append(image_name) +# txts.append(label) +# links.append(image_name) +# webpage.add_images(ims, txts, links, width=width) + +def save_images(webpage, visuals, image_path, aspect_ratio=1.0, width=256): + image_dir = webpage.get_image_dir() + short_path = ntpath.basename(image_path[0]) + name = os.path.splitext(short_path)[0] + + ims, txts, links = [], [], [] + + for label, im_data in visuals.items(): + im = util.tensor2im(im_data) + image_name = '%s_%s.png' % (name, label) + save_path = os.path.join(image_dir, image_name) + h, w, _ = im.shape + + height = int(width * h / float(w)) + im = imresize(im, (height, width), interp='bicubic') + util.save_image(im, save_path) + +class Visualizer(): + def __init__(self, opt): + print("vis") + self.display_id = opt.display_id + # self.use_html = opt.isTrain and not opt.no_html + self.use_html = True + self.win_size = opt.display_winsize + self.name = opt.name + self.opt = opt + self.saved = False + if self.display_id > 0: + import visdom + self.ncols = opt.display_ncols + self.vis = visdom.Visdom(server=opt.display_server, port=opt.display_port, env=opt.display_env, raise_exceptions=True) + + if self.use_html: + self.web_dir = os.path.join(opt.checkpoints_dir, opt.name, 'web', opt.phase) + if "images_target_dir" in opt: + self.img_dir = os.path.join(opt.images_target_dir, 'images') + else: + self.img_dir = os.path.join(self.web_dir, 'images') + print('create web directory %s...' % self.web_dir) + util.mkdirs([self.web_dir, self.img_dir]) + self.log_name = os.path.join(opt.checkpoints_dir, opt.name, 'loss_log.txt') + with open(self.log_name, "a") as log_file: + now = time.strftime("%c") + log_file.write('================ Training Loss (%s) ================\n' % now) + + def reset(self): + self.saved = False + + def throw_visdom_connection_error(self): + print('\n\nCould not connect to Visdom server (https://github.com/facebookresearch/visdom) for displaying training progress.\nYou can suppress connection to Visdom using the option --display_id -1. To install visdom, run \n$ pip install visdom\n, and start the server by \n$ python -m visdom.server.\n\n') + exit(1) + + # |visuals|: dictionary of images to display or save + def display_current_results(self, visuals, epoch, save_result, aspect_ratio=1.0, width=256): + if self.display_id > 0: # show images in the browser + ncols = self.ncols + if ncols > 0: + ncols = min(ncols, len(visuals)) + h, w = next(iter(visuals.values())).shape[2:4] + height = int(width * h / float(w)) + h = height + w = width + table_css = """""" % (w, h) + title = self.name + label_html = '' + label_html_row = '' + images = [] + idx = 0 + for label, image in visuals.items(): + # + image_numpy = util.tensor2im(image) + image_numpy = imresize(image_numpy, (h, w), interp='bicubic') + image_numpy = image_numpy.transpose([2, 0, 1]) + label_html_row += '%s' % label + images.append(image_numpy) + idx += 1 + if idx % ncols == 0: + label_html += '%s' % label_html_row + label_html_row = '' + white_image = np.ones_like(image_numpy) * 255 + while idx % ncols != 0: + images.append(white_image) + label_html_row += '' + idx += 1 + if label_html_row != '': + label_html += '%s' % label_html_row + # pane col = image row + try: + self.vis.images(images, nrow=ncols, win=self.display_id + 1, padding=2, opts=dict(title=title + ' images')) + label_html = '%s
' % label_html + self.vis.text(table_css + label_html, win=self.display_id + 2, + opts=dict(title=title + ' labels')) + except VisdomExceptionBase: + self.throw_visdom_connection_error() + + else: + idx = 1 + for label, image in visuals.items(): + image_numpy = util.tensor2im(image) + self.vis.image(image_numpy.transpose([2, 0, 1]), opts=dict(title=label), + win=self.display_id + idx) + idx += 1 + + if self.use_html and (save_result or not self.saved): # save images to a html file + self.saved = True + for label, image in visuals.items(): + if label == "features": + image = image[:, :3] + image_numpy = util.tensor2im(image) + img_path = os.path.join(self.img_dir, 'epoch%.5d_%s.png' % (epoch, label)) + util.save_image(image_numpy, img_path) + # update website + webpage = ownhtml.OwnHTML(self.web_dir, 'Experiment name = %s' % self.name, reflesh=1) + for n in range(epoch, 0, -1): + webpage.add_header('epoch [%d]' % n) + ims, txts, links = [], [], [] + + for label, image_numpy in visuals.items(): + image_numpy = util.tensor2im(image) + img_path = 'epoch%.5d_%s.png' % (n, label) + ims.append(img_path) + txts.append(label) + links.append(img_path) + webpage.add_images(ims, txts, links, width=self.win_size) + webpage.save() + + # losses: dictionary of error labels and values + def plot_current_losses(self, epoch, counter_ratio, opt, losses): + if not hasattr(self, 'plot_data'): + self.plot_data = {'X': [], 'Y': [], 'legend': list(losses.keys())} + self.plot_data['X'].append(epoch + counter_ratio) + self.plot_data['Y'].append([losses[k] for k in self.plot_data['legend']]) + try: + self.vis.line( + X=np.stack([np.array(self.plot_data['X'])] * len(self.plot_data['legend']), 1), + Y=np.array(self.plot_data['Y']), + opts={ + 'title': self.name + ' loss over time', + 'legend': self.plot_data['legend'], + 'xlabel': 'epoch', + 'ylabel': 'loss'}, + win=self.display_id) + except VisdomExceptionBase: + self.throw_visdom_connection_error() + + # losses: same format as |losses| of plot_current_losses + def print_current_losses(self, epoch, i, losses, t, t_data): + message = '(epoch: %d, iters: %d, time: %.3f, data: %.3f) ' % (epoch, i, t, t_data) + for k, v in losses.items(): + message += '%s: %.3f ' % (k, v) + + print(message) + with open(self.log_name, "a") as log_file: + log_file.write('%s\n' % message) + + # losses: dictionary of error labels and values + def plot_current_validation_error(self, epoch, counter_ratio, losses): + if not hasattr(self, 'plot_validation_data'): + self.plot_validation_data = {'X': [], 'Y': [], 'legend': list(losses.keys())} + self.plot_validation_data['X'].append(epoch + counter_ratio) + self.plot_validation_data['Y'].append([losses[k] for k in self.plot_validation_data['legend']]) + try: + self.vis.line( + X=np.stack([np.array(self.plot_validation_data['X'])] * len(self.plot_validation_data['legend']), 1), + Y=np.array(self.plot_validation_data['Y']), + opts={ + 'title': self.name + ' validation error over time', + 'legend': self.plot_validation_data['legend'], + 'xlabel': 'epoch', + 'ylabel': 'error'}, + win=self.display_id+1) + except VisdomExceptionBase: + self.throw_visdom_connection_error() + +class InferenceVisualizer(Visualizer): + def display_current_results(self, visuals, epoch, save_result, aspect_ratio=1.0, width=256): + image = visuals['fake'] + image_numpy = util.tensor2im(image) + img_path = os.path.join(self.img_dir, '%05d.png' %epoch) + util.save_image(image_numpy, img_path) \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/__init__.py b/NeuralVoicePuppetry/neural-code/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/api_test.sh b/NeuralVoicePuppetry/neural-code/api_test.sh new file mode 100755 index 0000000..55aeac9 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/api_test.sh @@ -0,0 +1,13 @@ +#!/bin/bash +echo 'Hello World' + +NAME_AUDIO_FILE=$1 +NAME_VIDEO_FILE=$2 +echo "NAME_AUDIO_FILE: $1" +echo "NAME_VIDEO_FILE: $2" + +cd ../neural-code + +BASE=`pwd` +echo "BASE DIRECTORY: "$BASE + diff --git a/NeuralVoicePuppetry/neural-code/autils/__init__.py b/NeuralVoicePuppetry/neural-code/autils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/autils/combine_rendering_frames.py b/NeuralVoicePuppetry/neural-code/autils/combine_rendering_frames.py new file mode 100644 index 0000000..f56db5e --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/autils/combine_rendering_frames.py @@ -0,0 +1,59 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +import numpy as np +import cv2 +from tqdm import tqdm +import matplotlib.pyplot as plt +from skimage.transform import warp +from skimage.transform import SimilarityTransform + +def warp_back(image, oldimage, tform): + + alpha = 0.6 + + oldimage = oldimage.astype(np.float64) /255. + new_size = oldimage.shape + + dst_image = warp(image, tform, output_shape=new_size) + + # Mask of non-black pixels. + mask = np.where(np.all(dst_image == [0, 0, 0], axis=-1)) + dst_image[mask] = oldimage[mask] + + res = cv2.addWeighted(oldimage, 1 - alpha, dst_image, alpha, 0) + res = res[:, :, ::-1] + + return res + +images_out_path = 'results/face_reconstruction/images' +os.makedirs(images_out_path, exist_ok=True) + +tform_path = '/home/alberto/NeuralVoicePuppetry/datasets/SRF_anchor_short/Halbtotale_355_9415/tform.npy' + +frames_path = '/home/alberto/data/videosynth/SRF_anchor_short/Halbtotale/355_9415' + +combined_images_out_path = 'results/face_reconstruction/combines_images' +os.makedirs(combined_images_out_path, exist_ok=True) + +tform = np.load(tform_path) + +for image_path, frame in zip(tqdm(sorted(os.listdir(images_out_path))), sorted(os.listdir(frames_path))): + + index = int(image_path[:-4]) + f_tform = SimilarityTransform(matrix=tform[index]) + + image = cv2.imread(os.path.join(images_out_path,image_path)) + old_image = cv2.imread(os.path.join(frames_path, frame)) + + res = warp_back(image, old_image, f_tform) + res = res * 255. + res = res.astype('uint8') + res = cv2.cvtColor(res, cv2.COLOR_BGR2RGB) + + + file_name = os.path.join(combined_images_out_path, '%04d.jpg' % index) + cv2.imwrite(file_name, res) + + diff --git a/NeuralVoicePuppetry/neural-code/autils/crop_videos.py b/NeuralVoicePuppetry/neural-code/autils/crop_videos.py new file mode 100644 index 0000000..1878699 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/autils/crop_videos.py @@ -0,0 +1,43 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from moviepy.video.io.ffmpeg_tools import ffmpeg_extract_subclip +import os +from moviepy.editor import * +from subprocess import call +from math import ceil + +dataset = 'External' +data_path = f'/home/alberto/data/videosynth/{dataset}' +target_base = f'/home/alberto/data/videosynth/{dataset}' + +for sub in ['Close', 'Halbtotale', 'Totale']: + os.makedirs(os.path.join(target_base, sub), exist_ok=True) + +video_list = ['Youtube/Russian_guy.mp4', ] + +# Start and end time in minutes +start_time = 0 +end_time = 5 + +for video in video_list: + input_file = os.path.join(data_path, video) + output_file = os.path.join(target_base, video) + + clip = VideoFileClip(input_file) + print('FPS: ', clip.fps) + # end_time = ceil((7500/clip.fps))/60 + # print('End time: ', end_time) + + try: + cut = clip.subclip(start_time*60, int(end_time*60)) + + cut.write_videofile(output_file) + + except ValueError: + start_time = 0 + end_time = 5 + + cut = clip.subclip(start_time * 60, int(end_time*60)) + cut.write_videofile(output_file) + diff --git a/NeuralVoicePuppetry/neural-code/autils/deca_flame_fitting.py b/NeuralVoicePuppetry/neural-code/autils/deca_flame_fitting.py new file mode 100644 index 0000000..5247128 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/autils/deca_flame_fitting.py @@ -0,0 +1,63 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os, sys +import cv2 +import numpy as np +from time import time +from scipy.io import savemat +import argparse +from tqdm import tqdm +import matplotlib.pyplot as plt +from skimage.transform import estimate_transform, warp, resize, rescale +import torch + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../third/DECA/'))) +from decalib.deca import DECA +from decalib.datasets import datasets +from decalib.utils import util +from decalib.utils.config import cfg as deca_cfg + + +def warp_back(image, oldimage, tform): + + alpha = 0.6 + + oldimage = oldimage.astype(np.float64) /255. + new_size = oldimage.shape + + dst_image = warp(image, tform, output_shape=new_size) + + # Mask of non-black pixels. + mask = np.where(np.all(dst_image == [0, 0, 0], axis=-1)) + dst_image[mask] = oldimage[mask] + + res = cv2.addWeighted(oldimage, 1 - alpha, dst_image, alpha, 0) + res = res[:, :, ::-1] + + return res + + +class DECA_tracker: + def __init__(self, video_path, target_dir=None): + + # load test images + self.testdata = datasets.TestData(video_path, iscrop=True, face_detector='fan', target_dir=target_dir) + self.device = "cuda" if torch.cuda.is_available() else "cpu" + + # run DECA + deca_cfg.model.use_tex = False + self.deca = DECA(config=deca_cfg, device=self.device) + + def __call__(self, images, tform=None): + + codedict = self.deca.encode(images) + + opdict, visdict = self.deca.decode(codedict, tform) + + mask = self.deca.render_mask(opdict['grid']) + + # for key in opdict.keys(): + # print(key, opdict[key].size()) + + return codedict, opdict, mask diff --git a/NeuralVoicePuppetry/neural-code/autils/deepspeech_features.py b/NeuralVoicePuppetry/neural-code/autils/deepspeech_features.py new file mode 100644 index 0000000..a16f0f7 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/autils/deepspeech_features.py @@ -0,0 +1,351 @@ +""" +20.10.14 +Code collection from VOCA: +https://github.com/TimoBolkart/voca +""" +import argparse +import re +import subprocess + +from scipy.io import wavfile +import os +import copy +import resampy +import numpy as np +import tensorflow as tf +from python_speech_features import mfcc +import moviepy.editor as mp +from pydub import AudioSegment +from subprocess import call + +class AudioHandler: + def __init__(self, config): + self.config = config + self.audio_feature_type = config['audio_feature_type'] + self.num_audio_features = config['num_audio_features'] + self.audio_window_size = config['audio_window_size'] + self.audio_window_stride = config['audio_window_stride'] + self.target_fps = config["target_fps"] + + def process(self, audio): + if self.audio_feature_type.lower() == "none": + return None + elif self.audio_feature_type.lower() == 'deepspeech': + return self.convert_to_deepspeech(audio) + else: + raise NotImplementedError("Audio features not supported") + + def convert_to_deepspeech(self, audio): + def audioToInputVector(audio, fs, numcep, numcontext): + # Get mfcc coefficients + features = mfcc(audio, samplerate=fs, numcep=numcep) + + # We only keep every second feature (BiRNN stride = 2) + features = features[::2] + + # One stride per time step in the input + num_strides = len(features) + + # Add empty initial and final contexts + empty_context = np.zeros((numcontext, numcep), dtype=features.dtype) + features = np.concatenate((empty_context, features, empty_context)) + + # Create a view into the array with overlapping strides of size + # numcontext (past) + 1 (present) + numcontext (future) + window_size = 2 * numcontext + 1 + train_inputs = np.lib.stride_tricks.as_strided( + features, + (num_strides, window_size, numcep), + (features.strides[0], features.strides[0], features.strides[1]), + writeable=False) + + # Flatten the second and third dimensions + train_inputs = np.reshape(train_inputs, [num_strides, -1]) + + train_inputs = np.copy(train_inputs) + train_inputs = (train_inputs - np.mean(train_inputs)) / np.std(train_inputs) + + # Return results + return train_inputs + + if type(audio) == dict: + pass + else: + raise ValueError('Wrong type for audio') + + # Load graph and place_holders + + with tf.io.gfile.GFile(self.config['deepspeech_graph_fname'], "rb") as f: + graph_def = tf.compat.v1.GraphDef() + graph_def.ParseFromString(f.read()) + + from tensorflow.python.framework.ops import get_default_graph + graph = get_default_graph() + tf.import_graph_def(graph_def, name="deepspeech") + input_tensor = graph.get_tensor_by_name('deepspeech/input_node:0') + # input_tensor = graph.get_tensor_by_name('input_node:0') + seq_length = graph.get_tensor_by_name('deepspeech/input_lengths:0') + layer_6 = graph.get_tensor_by_name('deepspeech/logits:0') + + n_input = 26 + n_context = 9 + + processed_audio = copy.deepcopy(audio) + with tf.compat.v1.Session(graph=graph) as sess: + for subj in audio.keys(): + for seq in audio[subj].keys(): + print('process audio: %s - %s' % (subj, seq)) + + audio_sample = audio[subj][seq]['audio'] + sample_rate = audio[subj][seq]['sample_rate'] + resampled_audio = resampy.resample(audio_sample.astype(float), sample_rate, 16000) + input_vector = audioToInputVector(resampled_audio.astype('int16'), 16000, n_input, n_context) + + network_output = sess.run(layer_6, feed_dict={input_tensor: input_vector[np.newaxis, ...], + seq_length: [input_vector.shape[0]]}) + + # Resample network output from 50 fps to 60 fps + audio_len_s = float(audio_sample.shape[0]) / sample_rate + num_frames = int(round(audio_len_s * self.target_fps)) + network_output = self.interpolate_features(network_output[:, 0], 50, self.target_fps, + output_len=num_frames) + + # Make windows + zero_pad = np.zeros((int(self.audio_window_size / 2), network_output.shape[1])) + network_output = np.concatenate((zero_pad, network_output, zero_pad), axis=0) + windows = [] + for window_index in range(0, network_output.shape[0] - self.audio_window_size, + self.audio_window_stride): + windows.append(network_output[window_index:window_index + self.audio_window_size]) + + processed_audio[subj][seq]['audio'] = np.array(windows) + return processed_audio + + def interpolate_features(self, features, input_rate, output_rate, output_len=None): + num_features = features.shape[1] + input_len = features.shape[0] + seq_len = input_len / float(input_rate) + if output_len is None: + output_len = int(seq_len * output_rate) + input_timestamps = np.arange(input_len) / float(input_rate) + output_timestamps = np.arange(output_len) / float(output_rate) + output_features = np.zeros((output_len, num_features)) + for feat in range(num_features): + output_features[:, feat] = np.interp(output_timestamps, + input_timestamps, + features[:, feat]) + return output_features + + +class AudioFeatures: + def __init__(self, config): + self.audio_handler = AudioHandler(config) + + def process_audio(self, audio, sample_rate): + tmp_audio = {'subj': {'seq': {'audio': audio, 'sample_rate': sample_rate}}} + return self.audio_handler.process(tmp_audio)['subj']['seq']['audio'] + + def inference_interpolate_styles(self, audio_fname): + # sample_rate, audio = wavfile.read(audio_fname) + import soundfile as sf + import librosa + import wavio + + x,_ = librosa.load(audio_fname, sr=16000) + sf.write(audio_fname, x, 16000) + print('Successfully converted.') + + wav = wavio.read(audio_fname) + audio, sample_rate = wav.data, wav.rate + print("sample rate: ", sample_rate) + if audio.ndim != 1: + print('Audio has multiple channels, only first channel is considered') + audio = audio[:, 0] + + processed_audio = self.process_audio(audio, sample_rate) + return processed_audio + + def run(self, audio_fname): + features = self.inference_interpolate_styles(audio_fname) + return features + + +class FFMPEGMetaReader: + """ + Uses ffprobe to extract metadata from audio and video + """ + + def __init__(cls, fps): + cls.DEFAULTS = { + "samplerate": 48000, + "bitrate": 96, + "duration": "00", + "fps": fps + } + + cls.PATTERNS = { + "sample_and_bitrate": r'Audio:.*\s(\d+)\sHz.*\s(\d+)\skb/s', + "duration": r"Duration:\s(\d{2}):(\d{2}):(\d{2})\.(\d{2})", + "fps": r"Video:.*\s(\d+\.?\d*)\sfps" + } + + def _run_ffmpeg(cls, path): + process = subprocess.Popen(['ffprobe', '-i', path, '-hide_banner'], stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + output = stderr.decode('utf-8') + return output + + def _sample_and_bitrate(cls, path, default_samplerate=None, default_bitrate=None, output=None): + if output is None: + output = cls._run_ffmpeg(path) + match = re.search(cls.PATTERNS["sample_and_bitrate"], output) + + samplerate = default_samplerate if default_samplerate else cls.DEFAULTS["samplerate"] + bitrate = default_bitrate if default_bitrate else cls.DEFAULTS["bitrate"] + + if match: + samplerate = match.group(1) if match.group(1) else samplerate + bitrate = match.group(2) if match.group(2) else bitrate + return samplerate, bitrate + + def _duration(cls, path, output=None): + if output is None: + output = cls._run_ffmpeg(path) + + duration = cls.DEFAULTS["duration"] + + match = re.search(cls.PATTERNS["duration"], output) + + if match and match.group(4): + duration = "{}:{}:{}.{}".format(*[match.group(i) if match.group(i) else duration for i in range(1, 5)]) + return duration + + def _fps(cls, video_path, output=None): + if output is None: + output = cls._run_ffmpeg(video_path) + + fps = cls.DEFAULTS["fps"] + + match = re.search(cls.PATTERNS["fps"], output) + + if match: + fps = match.group(1) if match.group(1) else fps + else: + raise Warning("No fps found.") + return fps + + def extract_meta(cls, path): + output = cls._run_ffmpeg(path) + samplerate, bitrate = cls._sample_and_bitrate(path, output=output) + duration = cls._duration(path, output=output) + fps = cls._fps(path, output=output) + return { + "samplerate": samplerate, + "bitrate": bitrate, + "duration": duration, + "fps": fps + } + + +def get_parser(): + parser = argparse.ArgumentParser() + parser.add_argument("--dataset", type=str, required=True) + parser.add_argument("--video_id", type=str, required=True) + return parser + + +def get_config(): + config = {} + config['deepspeech_graph_fname'] = os.path.join( + "./third/DeepSpeech/models/", "output_graph.pb") + config['audio_feature_type'] = 'deepspeech' + config['num_audio_features'] = 29 + + config['audio_window_size'] = 16 + config['audio_window_stride'] = 1 + # config['target_fps'] = target_fps + return config + + +def mkdir(path): + if not os.path.exists(path): + os.makedirs(path, exist_ok=True) + +def extract_ds(folder_videos, file_id, target_base, target_name, type, target_fps): + + config = get_config() + + # First convert mp4 to wav + if type == 'video': + + path_wav = '{}/{}.wav'.format(folder_videos, file_id) + if not os.path.isfile(path_wav): + path_video_in = '{}/{}.mp4'.format(folder_videos, file_id) + + if not os.path.isfile(path_video_in): + path_video_in = '{}/{}.avi'.format(folder_videos, file_id) + + # cmd = "ffmpeg -i input-video.avi -vn -acodec copy output-audio.aac" + + cmd = ('ffmpeg' + f' -i {path_video_in} {path_wav}').split() + call(cmd) + + else: + path_wav = '{}/{}.wav'.format(folder_videos, file_id) + if not os.path.isfile(path_wav): + try: + path_mp3 = '{}/{}.mp3'.format(folder_videos, file_id) + sound = AudioSegment.from_mp3(path_mp3) + sound.export(path_wav, format="wav") + + except FileNotFoundError: + print('Audio file not found. File format should be mp3 or wav.') + + + config["target_fps"] = target_fps + + # Compute features + features = AudioFeatures(config=config).run(path_wav) + folder_out = os.path.join(target_base, target_name, "audio_feature") + + # Save features + mkdir(folder_out) + for i, feature in enumerate(features): + fn_out = "{}.deepspeech.npy".format(i) + np.save(os.path.join(folder_out, fn_out), feature) + print("Written {} files to '{}'".format(features.shape[0], folder_out)) + + +if __name__ == "__main__": + args = get_parser().parse_args() + folder_videos = args.dataset + file_id = args.video_id + folder_nvp = "TARGETS" + + # First convert mp4 to wav + path_video_in = '{}/{}.mp4'.format(folder_videos, file_id) + path_wav = '{}/{}.wav'.format(folder_videos, file_id) + path_features = os.path.join(folder_videos, "deepspeech", "{}.npy".format(file_id)) + clip = mp.VideoFileClip(path_video_in) + clip.audio.write_audiofile(path_wav) + + # Get the meta data via ffmpeg + config = get_config() + metadata = FFMPEGMetaReader.extract_meta(path_video_in) + if int(metadata["fps"]) != float(metadata["fps"]): + raise Warning("Careful: fps is not an integer ({})".format(metadata["fps"])) + config["target_fps"] = int(metadata["fps"]) + + # Compute features + features = AudioFeatures(config=config).run(path_wav) + folder_out = os.path.join( + './NeuralVoicePuppetry/Audio2ExpressionNet/Inference/datasets/{}'.format( + folder_nvp), file_id, "audio_feature") + + # Save features + mkdir(folder_out) + for i, feature in enumerate(features): + fn_out = "{}.deepspeech.npy".format(i) + np.save(os.path.join(folder_out, fn_out), feature) + print("Written {} files to '{}'".format(features.shape[0], folder_out)) diff --git a/NeuralVoicePuppetry/neural-code/autils/eos_tracker.py b/NeuralVoicePuppetry/neural-code/autils/eos_tracker.py new file mode 100644 index 0000000..ae1403e --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/autils/eos_tracker.py @@ -0,0 +1,168 @@ +import numpy as np +import dlib +import eos +import os +from imageio import imread, imwrite +from skimage.draw import disk +import matplotlib.pyplot as plt +import cv2 +import trimesh + +PREDICTOR_PATH = 'tracker/eos/shape_predictor_68_face_landmarks.dat' +PATH_TO_EOS = 'tracker/eos/share' + +# get viewport matrix +def viewport_matrix(w, h): + viewport = np.array([0, h, w, -h]) + + # scale + S = np.identity(4, dtype=np.float32) + S[0][0] *= viewport[2] / 2 + S[1][1] *= viewport[3] / 2 + S[2][2] *= 0.5 + + # translate + T = np.identity(4, dtype=np.float32) + T[3][0] = viewport[0] + (viewport[2] / 2) + T[3][1] = viewport[1] + (viewport[3] / 2) + T[3][2] = 0.5 + return S @ T + + +def draw_circle(canvas, x, y, r=8, color=(255,255,255)): + rr,cc = disk((x,y), r, shape=canvas.shape) + canvas[rr,cc] = color + + +class EOS_Tracker: + def __init__(self, path_to_eos, predictor_path): + + # Dlib tracker + self.detector = dlib.get_frontal_face_detector() + self.shape_predictor = dlib.shape_predictor(predictor_path) + + # EOS model + self.model = eos.morphablemodel.load_model(f"{path_to_eos}/scripts/bfm2017-1_bfm_nomouth.bin") + self.landmark_mapper = eos.core.LandmarkMapper(f'{path_to_eos}/ibug_to_bfm2017-1_bfm_nomouth.txt') + self.edge_topology = eos.morphablemodel.load_edge_topology(f'{path_to_eos}/scripts/bfm2017.json') + self.contour_landmarks = eos.fitting.ContourLandmarks.load(f'{path_to_eos}/ibug_to_bfm2017-1_bfm_nomouth.txt') + self.model_contour = eos.fitting.ModelContour.load(f'{path_to_eos}/bfm2017-1_bfm_nomouth_model_contours.json') + + def get_facial_landmarks(self, img): + # get bounding box and facial landmarks + boxes = self.detector(img) + lp = [] + for box in boxes: + shape = self.shape_predictor(img, box) + index = 1 + landmarks = [] + for part in shape.parts(): + landmarks.append(eos.core.Landmark(str(index), [float(part.y), float(part.x)])) + index += 1 + lp.append(landmarks) + return lp + + def fit_shape_and_pose(self, img, landmarks): + h, w = img.shape[:2] + mesh, pose, shape_coeffs, blendshape_coeffs = eos.fitting.fit_shape_and_pose(self.model, + landmarks, + self.landmark_mapper, + w, + h, + self.edge_topology, + self.contour_landmarks, + self.model_contour) + + return mesh, pose, shape_coeffs, blendshape_coeffs + + def project_on_img(self, img, mesh, pose): + canvas = img.copy() + h, w = img.shape[:2] + p, mv, vm, fm = self.get_transformation(img, pose) + + # get vertices from mesh + sampled_verts = mesh.vertices + for i in sampled_verts: + tmp = fm @ np.append(i, 1) + # disregard z and draw 2d pt + x, y = (int(w / 2 + tmp[0]), int(h / 2 + tmp[1])) + draw_circle(canvas, x, y) + return canvas + + def get_transformation(self, img, pose): + h, w = img.shape[:2] + vm = viewport_matrix(w, h) + + # get pose and transform to img space + p = pose.get_projection() # from world coordinates to view coordinates + mv = pose.get_modelview() # From object to world coordinates + fm = vm @ p @ mv + + return p, mv, vm, fm + + def get_mesh_from_coeffs(self, shape_coeffs, expr_coeffs): + mesh = self.model.draw_sample(shape_coeffs, expr_coeffs) + return mesh + + def __call__(self, img, show_on_img=False, show_landmarks=False, save_ply=False): + + # call landmark detector + landmarks = self.get_facial_landmarks(img) + + if show_landmarks: + canvas = img.copy() + for lp in landmarks: + for point in lp: draw_circle(canvas, point.coordinates[0], point.coordinates[1]) + plt.imshow(canvas) + plt.show() + + # call eos fitter + try: + landmarks = landmarks[0] + mesh, pose, shape_coeffs, blendshape_coeffs = self.fit_shape_and_pose(img, landmarks) + # Mesh vertices + vertices = mesh.vertices + # print(f'vertices: {len(vertices)}') + + # Triangle vertex indices + faces = mesh.tvi + # print(f'faces: {len(faces)}') + + if save_ply: + mesh_tri = trimesh.Trimesh(vertices=vertices, faces=faces) + mesh_tri.export('mesh.ply') + + # Get pose + # rotation = pose.get_rotation() + # print(f'rotation:\n{rotation}') + # projection = pose.get_projection() + # print(f'projection:\n{projection}') + + # print(f'shape_coeffs: {len(shape_coeffs)}') + # print(f'blendshape_coeffs: {len(blendshape_coeffs)}') + + if show_on_img: + img_with_mesh = self.project_on_img(img, mesh, pose) + plt.imshow(img_with_mesh) + plt.show() + + return mesh, pose, shape_coeffs, blendshape_coeffs + + except IndexError: + # plt.imshow(img) + # plt.show() + imwrite('error.png', img) + + return None, None, np.zeros(100), np.zeros(199) + + +if __name__ == "__main__": + img_path = 'IMG_3046.JPG' + # img = cv2.imread(img_path) + img = imread(img_path) + tracker = EOS_Tracker(PATH_TO_EOS, PREDICTOR_PATH) + + mesh, pose, shape_coeffs, blendshape_coeffs = tracker(img) + + + diff --git a/NeuralVoicePuppetry/neural-code/autils/face12.json b/NeuralVoicePuppetry/neural-code/autils/face12.json new file mode 100644 index 0000000..252b542 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/autils/face12.json @@ -0,0 +1,10 @@ +{ + "triangles": [[0, 1, 130], [0, 130, 129], [1, 2, 130], [2, 131, 130], [2, 3, 132], [2, 132, 131], [3, 4, 132], [4, 133, 132], [4, 5, 134], [4, 134, 133], [5, 6, 134], [6, 135, 134], [6, 7, 136], [6, 136, 135], [7, 8, 136], [8, 137, 136], [8, 9, 138], [8, 138, 137], [9, 10, 138], [10, 139, 138], [10, 11, 140], [10, 140, 139], [11, 12, 140], [12, 141, 140], [12, 13, 142], [12, 142, 141], [13, 14, 142], [14, 143, 142], [14, 15, 144], [14, 144, 143], [15, 16, 144], [16, 145, 144], [16, 17, 146], [16, 146, 145], [17, 18, 146], [18, 147, 146], [18, 19, 148], [18, 148, 147], [19, 20, 148], [20, 149, 148], [20, 21, 150], [20, 150, 149], [21, 22, 150], [22, 151, 150], [22, 23, 152], [22, 152, 151], [23, 24, 152], [24, 153, 152], [24, 25, 154], [24, 154, 153], [25, 26, 154], [26, 155, 154], [26, 27, 156], [26, 156, 155], [27, 28, 156], [28, 157, 156], [28, 29, 158], [28, 158, 157], [29, 30, 158], [30, 159, 158], [30, 31, 160], [30, 160, 159], [31, 32, 160], [32, 161, 160], [32, 33, 162], [32, 162, 161], [33, 34, 162], [34, 163, 162], [34, 35, 164], [34, 164, 163], [35, 36, 164], [36, 165, 164], [36, 37, 166], [36, 166, 165], [37, 38, 166], [38, 167, 166], [38, 39, 168], [38, 168, 167], [39, 40, 168], [40, 169, 168], [40, 41, 170], [40, 170, 169], [41, 42, 170], [42, 171, 170], [42, 43, 172], [42, 172, 171], [43, 44, 172], [44, 173, 172], [44, 45, 174], [44, 174, 173], [45, 46, 174], [46, 175, 174], [46, 47, 176], [46, 176, 175], [47, 48, 176], [48, 177, 176], [48, 49, 178], [48, 178, 177], [49, 50, 178], [50, 179, 178], [50, 51, 180], [50, 180, 179], [51, 52, 180], [52, 181, 180], [52, 53, 182], [52, 182, 181], [53, 54, 182], [54, 183, 182], [54, 55, 184], [54, 184, 183], [55, 56, 184], [56, 185, 184], [56, 57, 186], [56, 186, 185], [57, 58, 186], [58, 187, 186], [58, 59, 188], [58, 188, 187], [59, 60, 188], [60, 189, 188], [60, 61, 190], [60, 190, 189], [61, 62, 190], [62, 191, 190], [62, 63, 192], [62, 192, 191], [63, 64, 192], [64, 193, 192], [64, 65, 194], [64, 194, 193], [65, 66, 194], [66, 195, 194], [66, 67, 196], [66, 196, 195], [67, 68, 196], [68, 197, 196], [68, 69, 198], [68, 198, 197], [69, 70, 198], [70, 199, 198], [70, 71, 200], [70, 200, 199], [71, 72, 200], [72, 201, 200], [72, 73, 202], [72, 202, 201], [73, 74, 202], [74, 203, 202], [74, 75, 204], [74, 204, 203], [75, 76, 204], [76, 205, 204], [76, 77, 206], [76, 206, 205], [77, 78, 206], [78, 207, 206], [78, 79, 208], [78, 208, 207], [79, 80, 208], [80, 209, 208], [80, 81, 210], [80, 210, 209], [81, 82, 210], [82, 211, 210], [82, 83, 212], [82, 212, 211], [83, 84, 212], [84, 213, 212], [84, 85, 214], [84, 214, 213], [85, 86, 214], [86, 215, 214], [86, 87, 216], [86, 216, 215], [87, 88, 216], [88, 217, 216], [88, 89, 218], [88, 218, 217], [89, 90, 218], [90, 219, 218], [90, 91, 220], [90, 220, 219], [91, 92, 220], [92, 221, 220], [92, 93, 222], [92, 222, 221], [93, 94, 222], [94, 223, 222], [94, 95, 224], [94, 224, 223], [95, 96, 224], [96, 225, 224], [96, 97, 226], [96, 226, 225], [97, 98, 226], [98, 227, 226], [98, 99, 228], [98, 228, 227], [99, 100, 228], [100, 229, 228], [100, 101, 230], [100, 230, 229], [101, 102, 230], [102, 231, 230], [102, 103, 232], [102, 232, 231], [103, 104, 232], [104, 233, 232], [104, 105, 234], [104, 234, 233], [105, 106, 234], [106, 235, 234], [106, 107, 236], [106, 236, 235], [107, 108, 236], [108, 237, 236], [108, 109, 238], [108, 238, 237], [109, 110, 238], [110, 239, 238], [110, 111, 240], [110, 240, 239], [111, 112, 240], [112, 241, 240], [112, 113, 242], [112, 242, 241], [113, 114, 242], [114, 243, 242], [114, 115, 244], [114, 244, 243], [115, 116, 244], [116, 245, 244], [116, 117, 246], [116, 246, 245], [117, 118, 246], [118, 247, 246], [118, 119, 248], [118, 248, 247], [119, 120, 248], [120, 249, 248], [120, 121, 250], [120, 250, 249], [121, 122, 250], [122, 251, 250], [122, 123, 252], [122, 252, 251], [123, 124, 252], [124, 253, 252], [124, 125, 254], [124, 254, 253], [125, 126, 254], [126, 255, 254], [126, 127, 256], [126, 256, 255], [127, 128, 256], [128, 257, 256], [129, 130, 258], [130, 259, 258], [130, 131, 260], [130, 260, 259], [131, 132, 260], [132, 261, 260], [132, 133, 262], [132, 262, 261], [133, 134, 262], [134, 263, 262], [134, 135, 264], [134, 264, 263], [135, 136, 264], [136, 265, 264], [136, 137, 266], [136, 266, 265], [137, 138, 266], [138, 267, 266], [138, 139, 268], [138, 268, 267], [139, 140, 268], [140, 269, 268], [140, 141, 270], [140, 270, 269], [141, 142, 270], [142, 271, 270], [142, 143, 272], [142, 272, 271], [143, 144, 272], [144, 273, 272], [144, 145, 274], [144, 274, 273], [145, 146, 274], [146, 275, 274], [146, 147, 276], [146, 276, 275], [147, 148, 276], [148, 277, 276], [148, 149, 278], [148, 278, 277], [149, 150, 278], [150, 279, 278], [150, 151, 280], [150, 280, 279], [151, 152, 280], [152, 281, 280], [152, 153, 282], [152, 282, 281], [153, 154, 282], [154, 283, 282], [154, 155, 284], [154, 284, 283], [155, 156, 284], [156, 285, 284], [156, 157, 286], [156, 286, 285], [157, 158, 286], [158, 287, 286], [158, 159, 288], [158, 288, 287], [159, 160, 288], [160, 289, 288], [160, 161, 290], [160, 290, 289], [161, 162, 290], [162, 291, 290], [162, 163, 292], [162, 292, 291], [163, 164, 292], [164, 293, 292], [164, 165, 294], [164, 294, 293], [165, 166, 294], [166, 295, 294], [166, 167, 296], [166, 296, 295], [167, 168, 296], [168, 297, 296], [168, 169, 298], [168, 298, 297], [169, 170, 298], [170, 299, 298], [170, 171, 300], [170, 300, 299], [171, 172, 300], [172, 301, 300], [172, 173, 302], [172, 302, 301], [173, 174, 302], [174, 303, 302], [174, 175, 304], [174, 304, 303], [175, 176, 304], [176, 305, 304], [176, 177, 306], [176, 306, 305], [177, 178, 306], [178, 307, 306], [178, 179, 308], [178, 308, 307], [179, 180, 308], [180, 309, 308], [180, 181, 310], [180, 310, 309], [181, 182, 310], [182, 311, 310], [182, 183, 312], [182, 312, 311], [183, 184, 312], [184, 313, 312], [184, 185, 314], [184, 314, 313], [185, 186, 314], [186, 315, 314], [186, 187, 316], [186, 316, 315], [187, 188, 316], [188, 317, 316], [188, 189, 318], [188, 318, 317], [189, 190, 318], [190, 319, 318], [190, 191, 320], [190, 320, 319], [191, 192, 320], [192, 321, 320], [192, 193, 322], [192, 322, 321], [193, 194, 322], [194, 323, 322], [194, 195, 324], [194, 324, 323], [195, 196, 324], [196, 325, 324], [196, 197, 326], [196, 326, 325], [197, 198, 326], [198, 327, 326], [198, 199, 328], [198, 328, 327], [199, 200, 328], [200, 329, 328], [200, 201, 330], [200, 330, 329], [201, 202, 330], [202, 331, 330], [202, 203, 332], [202, 332, 331], [203, 204, 332], [204, 333, 332], [204, 205, 334], [204, 334, 333], [205, 206, 334], [206, 335, 334], [206, 207, 336], [206, 336, 335], [207, 208, 336], [208, 337, 336], [208, 209, 338], [208, 338, 337], [209, 210, 338], [210, 339, 338], [210, 211, 340], [210, 340, 339], [211, 212, 340], [212, 341, 340], [212, 213, 342], [212, 342, 341], [213, 214, 342], [214, 343, 342], [214, 215, 344], [214, 344, 343], [215, 216, 344], [216, 345, 344], [216, 217, 346], [216, 346, 345], [217, 218, 346], [218, 347, 346], [218, 219, 348], [218, 348, 347], [219, 220, 348], [220, 349, 348], [220, 221, 350], [220, 350, 349], [221, 222, 350], [222, 351, 350], [222, 223, 352], [222, 352, 351], [223, 224, 352], [224, 353, 352], [224, 225, 354], [224, 354, 353], [225, 226, 354], [226, 355, 354], [226, 227, 356], [226, 356, 355], [227, 228, 356], [228, 357, 356], [228, 229, 358], [228, 358, 357], [229, 230, 358], [230, 359, 358], [230, 231, 360], [230, 360, 359], [231, 232, 360], [232, 361, 360], [232, 233, 362], [232, 362, 361], [233, 234, 362], [234, 363, 362], [234, 235, 364], [234, 364, 363], [235, 236, 364], [236, 365, 364], [236, 237, 366], [236, 366, 365], [237, 238, 366], [238, 367, 366], [238, 239, 368], [238, 368, 367], [239, 240, 368], [240, 369, 368], [240, 241, 370], [240, 370, 369], [241, 242, 370], [242, 371, 370], [242, 243, 372], [242, 372, 371], [243, 244, 372], [244, 373, 372], [244, 245, 374], [244, 374, 373], [245, 246, 374], [246, 375, 374], [246, 247, 376], [246, 376, 375], [247, 248, 376], [248, 377, 376], [248, 249, 378], [248, 378, 377], [249, 250, 378], [250, 379, 378], [250, 251, 380], [250, 380, 379], [251, 252, 380], [252, 381, 380], [252, 253, 382], [252, 382, 381], [253, 254, 382], [254, 383, 382], [254, 255, 384], [254, 384, 383], [255, 256, 384], [256, 385, 384], [256, 257, 386], [256, 386, 385], [258, 259, 388], [258, 388, 387], [259, 260, 388], [260, 389, 388], [260, 261, 390], [260, 390, 389], [261, 262, 390], [262, 391, 390], [262, 263, 392], [262, 392, 391], [263, 264, 392], [264, 393, 392], [264, 265, 394], [264, 394, 393], [265, 266, 394], [266, 395, 394], [266, 267, 396], [266, 396, 395], [267, 268, 396], [268, 397, 396], [268, 269, 398], [268, 398, 397], [269, 270, 398], [270, 399, 398], [270, 271, 400], [270, 400, 399], [271, 272, 400], [272, 401, 400], [272, 273, 402], [272, 402, 401], [273, 274, 402], [274, 403, 402], [274, 275, 404], [274, 404, 403], [275, 276, 404], [276, 405, 404], [276, 277, 406], [276, 406, 405], [277, 278, 406], [278, 407, 406], [278, 279, 408], [278, 408, 407], [279, 280, 408], [280, 409, 408], [280, 281, 410], [280, 410, 409], [281, 282, 410], [282, 411, 410], [282, 283, 412], [282, 412, 411], [283, 284, 412], [284, 413, 412], [284, 285, 414], [284, 414, 413], [285, 286, 414], [286, 415, 414], [286, 287, 416], [286, 416, 415], [287, 288, 416], [288, 417, 416], [288, 289, 418], [288, 418, 417], [289, 290, 418], [290, 419, 418], [290, 291, 420], [290, 420, 419], [291, 292, 420], [292, 421, 420], [292, 293, 422], [292, 422, 421], [293, 294, 422], [294, 423, 422], [294, 295, 424], [294, 424, 423], [295, 296, 424], [296, 425, 424], [296, 297, 426], [296, 426, 425], [297, 298, 426], [298, 427, 426], [298, 299, 428], [298, 428, 427], [299, 300, 428], [300, 429, 428], [300, 301, 430], [300, 430, 429], [301, 302, 430], [302, 431, 430], [302, 303, 432], [302, 432, 431], [303, 304, 432], [304, 433, 432], [304, 305, 434], [304, 434, 433], [305, 306, 434], [306, 435, 434], [306, 307, 436], [306, 436, 435], [307, 308, 436], [308, 437, 436], [308, 309, 438], [308, 438, 437], [309, 310, 438], [310, 439, 438], [310, 311, 440], [310, 440, 439], [311, 312, 440], [312, 441, 440], [312, 313, 442], [312, 442, 441], [313, 314, 442], [314, 443, 442], [314, 315, 444], [314, 444, 443], [315, 316, 444], [316, 445, 444], [316, 317, 446], [316, 446, 445], [317, 318, 446], [318, 447, 446], [318, 319, 448], [318, 448, 447], [319, 320, 448], [320, 449, 448], [320, 321, 450], [320, 450, 449], [321, 322, 450], [322, 451, 450], [322, 323, 452], [322, 452, 451], [323, 324, 452], [324, 453, 452], [324, 325, 454], [324, 454, 453], [325, 326, 454], [326, 455, 454], [326, 327, 456], [326, 456, 455], [327, 328, 456], [328, 457, 456], [328, 329, 458], [328, 458, 457], [329, 330, 458], [330, 459, 458], [330, 331, 460], [330, 460, 459], [331, 332, 460], [332, 461, 460], [332, 333, 462], [332, 462, 461], [333, 334, 462], [334, 463, 462], [334, 335, 464], [334, 464, 463], [335, 336, 464], [336, 465, 464], [336, 337, 466], [336, 466, 465], [337, 338, 466], [338, 467, 466], [338, 339, 468], [338, 468, 467], [339, 340, 468], [340, 469, 468], [340, 341, 470], [340, 470, 469], [341, 342, 470], [342, 471, 470], [342, 343, 472], [342, 472, 471], [343, 344, 472], [344, 473, 472], [344, 345, 474], [344, 474, 473], [345, 346, 474], [346, 475, 474], [346, 347, 476], [346, 476, 475], [347, 348, 476], [348, 477, 476], [348, 349, 478], [348, 478, 477], [349, 350, 478], [350, 479, 478], [350, 351, 480], [350, 480, 479], [351, 352, 480], [352, 481, 480], [352, 353, 482], [352, 482, 481], [353, 354, 482], [354, 483, 482], [354, 355, 484], [354, 484, 483], [355, 356, 484], [356, 485, 484], [356, 357, 486], [356, 486, 485], [357, 358, 486], [358, 487, 486], [358, 359, 488], [358, 488, 487], [359, 360, 488], [360, 489, 488], [360, 361, 490], [360, 490, 489], [361, 362, 490], [362, 491, 490], [362, 363, 492], [362, 492, 491], [363, 364, 492], [364, 493, 492], [364, 365, 494], [364, 494, 493], [365, 366, 494], [366, 495, 494], [366, 367, 496], [366, 496, 495], [367, 368, 496], [368, 497, 496], [368, 369, 498], [368, 498, 497], [369, 370, 498], [370, 499, 498], [370, 371, 500], [370, 500, 499], [371, 372, 500], [372, 501, 500], [372, 373, 502], [372, 502, 501], [373, 374, 502], [374, 503, 502], [374, 375, 504], [374, 504, 503], [375, 376, 504], [376, 505, 504], [376, 377, 506], [376, 506, 505], [377, 378, 506], [378, 507, 506], [378, 379, 508], [378, 508, 507], [379, 380, 508], [380, 509, 508], [380, 381, 510], [380, 510, 509], [381, 382, 510], [382, 511, 510], [382, 383, 512], [382, 512, 511], [383, 384, 512], [384, 513, 512], [384, 385, 514], [384, 514, 513], [385, 386, 514], [386, 515, 514], [387, 388, 516], [388, 517, 516], [388, 389, 518], [388, 518, 517], [389, 390, 518], [390, 519, 518], [390, 391, 520], [390, 520, 519], [391, 392, 520], [392, 521, 520], [392, 393, 522], [392, 522, 521], [393, 394, 522], [394, 523, 522], [394, 395, 524], [394, 524, 523], [395, 396, 524], [396, 525, 524], [396, 397, 526], [396, 526, 525], [397, 398, 526], [398, 527, 526], [398, 399, 528], [398, 528, 527], [399, 400, 528], [400, 529, 528], [400, 401, 530], [400, 530, 529], [401, 402, 530], [402, 531, 530], [402, 403, 532], [402, 532, 531], [403, 404, 532], [404, 533, 532], [404, 405, 534], [404, 534, 533], [405, 406, 534], [406, 535, 534], [406, 407, 536], [406, 536, 535], [407, 408, 536], [408, 537, 536], [408, 409, 538], [408, 538, 537], [409, 410, 538], [410, 539, 538], [410, 411, 540], [410, 540, 539], [411, 412, 540], [412, 541, 540], [412, 413, 542], [412, 542, 541], [413, 414, 542], [414, 543, 542], [414, 415, 544], [414, 544, 543], [415, 416, 544], [416, 545, 544], [416, 417, 546], [416, 546, 545], [417, 418, 546], [418, 547, 546], [418, 419, 548], [418, 548, 547], [419, 420, 548], [420, 549, 548], [420, 421, 550], [420, 550, 549], [421, 422, 550], [422, 551, 550], [422, 423, 552], [422, 552, 551], [423, 424, 552], [424, 553, 552], [424, 425, 554], [424, 554, 553], [425, 426, 554], [426, 555, 554], [426, 427, 556], [426, 556, 555], [427, 428, 556], [428, 557, 556], [428, 429, 558], [428, 558, 557], [429, 430, 558], [430, 559, 558], [430, 431, 560], [430, 560, 559], [431, 432, 560], [432, 561, 560], [432, 433, 562], [432, 562, 561], [433, 434, 562], [434, 563, 562], [434, 435, 564], [434, 564, 563], [435, 436, 564], [436, 565, 564], [436, 437, 566], [436, 566, 565], [437, 438, 566], [438, 567, 566], [438, 439, 568], [438, 568, 567], [439, 440, 568], [440, 569, 568], [440, 441, 570], [440, 570, 569], [441, 442, 570], [442, 571, 570], [442, 443, 572], [442, 572, 571], [443, 444, 572], [444, 573, 572], [444, 445, 574], [444, 574, 573], [445, 446, 574], [446, 575, 574], [446, 447, 576], [446, 576, 575], [447, 448, 576], [448, 577, 576], [448, 449, 578], [448, 578, 577], [449, 450, 578], [450, 579, 578], [450, 451, 580], [450, 580, 579], [451, 452, 580], [452, 581, 580], [452, 453, 582], [452, 582, 581], [453, 454, 582], [454, 583, 582], [454, 455, 584], [454, 584, 583], [455, 456, 584], [456, 585, 584], [456, 457, 586], [456, 586, 585], [457, 458, 586], [458, 587, 586], [458, 459, 588], [458, 588, 587], [459, 460, 588], [460, 589, 588], [460, 461, 590], [460, 590, 589], [461, 462, 590], [462, 591, 590], [462, 463, 592], [462, 592, 591], [463, 464, 592], [464, 593, 592], [464, 465, 594], [464, 594, 593], [465, 466, 594], [466, 595, 594], [466, 467, 596], [466, 596, 595], [467, 468, 596], [468, 597, 596], [468, 469, 598], [468, 598, 597], [469, 470, 598], [470, 599, 598], [470, 471, 600], [470, 600, 599], [471, 472, 600], [472, 601, 600], [472, 473, 602], [472, 602, 601], [473, 474, 602], [474, 603, 602], [474, 475, 604], [474, 604, 603], [475, 476, 604], [476, 605, 604], [476, 477, 606], [476, 606, 605], [477, 478, 606], [478, 607, 606], [478, 479, 608], [478, 608, 607], [479, 480, 608], [480, 609, 608], [480, 481, 610], [480, 610, 609], [481, 482, 610], [482, 611, 610], [482, 483, 612], [482, 612, 611], [483, 484, 612], [484, 613, 612], [484, 485, 614], [484, 614, 613], [485, 486, 614], [486, 615, 614], [486, 487, 616], [486, 616, 615], [487, 488, 616], [488, 617, 616], [488, 489, 618], [488, 618, 617], [489, 490, 618], [490, 619, 618], [490, 491, 620], [490, 620, 619], [491, 492, 620], [492, 621, 620], [492, 493, 622], [492, 622, 621], [493, 494, 622], [494, 623, 622], [494, 495, 624], [494, 624, 623], [495, 496, 624], [496, 625, 624], [496, 497, 626], [496, 626, 625], [497, 498, 626], [498, 627, 626], [498, 499, 628], [498, 628, 627], [499, 500, 628], [500, 629, 628], [500, 501, 630], [500, 630, 629], [501, 502, 630], [502, 631, 630], [502, 503, 632], [502, 632, 631], [503, 504, 632], [504, 633, 632], [504, 505, 634], [504, 634, 633], [505, 506, 634], [506, 635, 634], [506, 507, 636], [506, 636, 635], [507, 508, 636], [508, 637, 636], [508, 509, 638], [508, 638, 637], [509, 510, 638], [510, 639, 638], [510, 511, 640], [510, 640, 639], [511, 512, 640], [512, 641, 640], [512, 513, 642], [512, 642, 641], [513, 514, 642], [514, 643, 642], [514, 515, 644], [514, 644, 643], [516, 517, 646], [516, 646, 645], [517, 518, 646], [518, 647, 646], [518, 519, 648], [518, 648, 647], [519, 520, 648], [520, 649, 648], [520, 521, 650], [520, 650, 649], [521, 522, 650], [522, 651, 650], [522, 523, 652], [522, 652, 651], [523, 524, 652], [524, 653, 652], [524, 525, 654], [524, 654, 653], [525, 526, 654], [526, 655, 654], [526, 527, 656], [526, 656, 655], [527, 528, 656], [528, 657, 656], [528, 529, 658], [528, 658, 657], [529, 530, 658], [530, 659, 658], [530, 531, 660], [530, 660, 659], [531, 532, 660], [532, 661, 660], [532, 533, 662], [532, 662, 661], [533, 534, 662], [534, 663, 662], [534, 535, 664], [534, 664, 663], [535, 536, 664], [536, 665, 664], [536, 537, 666], [536, 666, 665], [537, 538, 666], [538, 667, 666], [538, 539, 668], [538, 668, 667], [539, 540, 668], [540, 669, 668], [540, 541, 670], [540, 670, 669], [541, 542, 670], [542, 671, 670], [542, 543, 672], [542, 672, 671], [543, 544, 672], [544, 673, 672], [544, 545, 674], [544, 674, 673], [545, 546, 674], [546, 675, 674], [546, 547, 676], [546, 676, 675], [547, 548, 676], [548, 677, 676], [548, 549, 678], [548, 678, 677], [549, 550, 678], [550, 679, 678], [550, 551, 680], [550, 680, 679], [551, 552, 680], [552, 681, 680], [552, 553, 682], [552, 682, 681], [553, 554, 682], [554, 683, 682], [554, 555, 684], [554, 684, 683], [555, 556, 684], [556, 685, 684], [556, 557, 686], [556, 686, 685], [557, 558, 686], [558, 687, 686], [558, 559, 688], [558, 688, 687], [559, 560, 688], [560, 689, 688], [560, 561, 690], [560, 690, 689], [561, 562, 690], [562, 691, 690], [562, 563, 692], [562, 692, 691], [563, 564, 692], [564, 693, 692], [564, 565, 694], [564, 694, 693], [565, 566, 694], [566, 695, 694], [566, 567, 696], [566, 696, 695], [567, 568, 696], [568, 697, 696], [568, 569, 698], [568, 698, 697], [569, 570, 698], [570, 699, 698], [570, 571, 700], [570, 700, 699], [571, 572, 700], [572, 701, 700], [572, 573, 702], [572, 702, 701], [573, 574, 702], [574, 703, 702], [574, 575, 704], [574, 704, 703], [575, 576, 704], [576, 705, 704], [576, 577, 706], [576, 706, 705], [577, 578, 706], [578, 707, 706], [578, 579, 708], [578, 708, 707], [579, 580, 708], [580, 709, 708], [580, 581, 710], [580, 710, 709], [581, 582, 710], [582, 711, 710], [582, 583, 712], [582, 712, 711], [583, 584, 712], [584, 713, 712], [584, 585, 714], [584, 714, 713], [585, 586, 714], [586, 715, 714], [586, 587, 716], [586, 716, 715], [587, 588, 716], [588, 717, 716], [588, 589, 718], [588, 718, 717], [589, 590, 718], [590, 719, 718], [590, 591, 720], [590, 720, 719], [591, 592, 720], [592, 721, 720], [592, 593, 722], [592, 722, 721], [593, 594, 722], [594, 723, 722], [594, 595, 724], [594, 724, 723], [595, 596, 724], [596, 725, 724], [596, 597, 726], [596, 726, 725], [597, 598, 726], [598, 727, 726], [598, 599, 728], [598, 728, 727], [599, 600, 728], [600, 729, 728], [600, 601, 730], [600, 730, 729], [601, 602, 730], [602, 731, 730], [602, 603, 732], [602, 732, 731], [603, 604, 732], [604, 733, 732], [604, 605, 734], [604, 734, 733], [605, 606, 734], [606, 735, 734], [606, 607, 736], [606, 736, 735], [607, 608, 736], [608, 737, 736], [608, 609, 738], [608, 738, 737], [609, 610, 738], [610, 739, 738], [610, 611, 740], [610, 740, 739], [611, 612, 740], [612, 741, 740], [612, 613, 742], [612, 742, 741], [613, 614, 742], [614, 743, 742], [614, 615, 744], [614, 744, 743], [615, 616, 744], [616, 745, 744], [616, 617, 746], [616, 746, 745], [617, 618, 746], [618, 747, 746], [618, 619, 748], [618, 748, 747], [619, 620, 748], [620, 749, 748], [620, 621, 750], [620, 750, 749], [621, 622, 750], [622, 751, 750], [622, 623, 752], [622, 752, 751], [623, 624, 752], [624, 753, 752], [624, 625, 754], [624, 754, 753], [625, 626, 754], [626, 755, 754], [626, 627, 756], [626, 756, 755], [627, 628, 756], [628, 757, 756], [628, 629, 758], [628, 758, 757], [629, 630, 758], [630, 759, 758], [630, 631, 760], [630, 760, 759], [631, 632, 760], [632, 761, 760], [632, 633, 762], [632, 762, 761], [633, 634, 762], [634, 763, 762], [634, 635, 764], [634, 764, 763], [635, 636, 764], [636, 765, 764], [636, 637, 766], [636, 766, 765], [637, 638, 766], [638, 767, 766], [638, 639, 768], [638, 768, 767], [639, 640, 768], [640, 769, 768], [640, 641, 770], [640, 770, 769], [641, 642, 770], [642, 771, 770], [642, 643, 772], [642, 772, 771], [643, 644, 772], [644, 773, 772], [645, 646, 774], [646, 775, 774], [646, 647, 776], [646, 776, 775], [647, 648, 776], [648, 777, 776], [648, 649, 778], [648, 778, 777], [649, 650, 778], [650, 779, 778], [650, 651, 780], [650, 780, 779], [651, 652, 780], [652, 781, 780], [652, 653, 782], [652, 782, 781], [653, 654, 782], [654, 783, 782], [654, 655, 784], [654, 784, 783], [655, 656, 784], [656, 785, 784], [656, 657, 786], [656, 786, 785], [657, 658, 786], [658, 787, 786], [658, 659, 788], [658, 788, 787], [659, 660, 788], [660, 789, 788], [660, 661, 790], [660, 790, 789], [661, 662, 790], [662, 791, 790], [662, 663, 792], [662, 792, 791], [663, 664, 792], [664, 793, 792], [664, 665, 794], [664, 794, 793], [665, 666, 794], [666, 795, 794], [666, 667, 796], [666, 796, 795], [667, 668, 796], [668, 797, 796], [668, 669, 798], [668, 798, 797], [669, 670, 798], [670, 799, 798], [670, 671, 800], [670, 800, 799], [671, 672, 800], [672, 801, 800], [672, 673, 802], [672, 802, 801], [673, 674, 802], [674, 803, 802], [674, 675, 804], [674, 804, 803], [675, 676, 804], [676, 805, 804], [676, 677, 806], [676, 806, 805], [677, 678, 806], [678, 807, 806], [678, 679, 808], [678, 808, 807], [679, 680, 808], [680, 809, 808], [680, 681, 810], [680, 810, 809], [681, 682, 810], [682, 811, 810], [682, 683, 812], [682, 812, 811], [683, 684, 812], [684, 813, 812], [684, 685, 814], [684, 814, 813], [685, 686, 814], [686, 815, 814], [686, 687, 816], [686, 816, 815], [687, 688, 816], [688, 817, 816], [688, 689, 818], [688, 818, 817], [689, 690, 818], [690, 819, 818], [690, 691, 820], [690, 820, 819], [691, 692, 820], [692, 821, 820], [692, 693, 822], [692, 822, 821], [693, 694, 822], [694, 823, 822], [694, 695, 824], [694, 824, 823], [695, 696, 824], [696, 825, 824], [696, 697, 826], [696, 826, 825], [697, 698, 826], [698, 827, 826], [698, 699, 828], [698, 828, 827], [699, 700, 828], [700, 829, 828], [700, 701, 830], [700, 830, 829], [701, 702, 830], [702, 831, 830], [702, 703, 832], [702, 832, 831], [703, 704, 832], [704, 833, 832], [704, 705, 834], [704, 834, 833], [705, 706, 834], [706, 835, 834], [706, 707, 836], [706, 836, 835], [707, 708, 836], [708, 837, 836], [708, 709, 838], [708, 838, 837], [709, 710, 838], [710, 839, 838], [710, 711, 840], [710, 840, 839], [711, 712, 840], [712, 841, 840], [712, 713, 842], [712, 842, 841], [713, 714, 842], [714, 843, 842], [714, 715, 844], [714, 844, 843], [715, 716, 844], [716, 845, 844], [716, 717, 846], [716, 846, 845], [717, 718, 846], [718, 847, 846], [718, 719, 848], [718, 848, 847], [719, 720, 848], [720, 849, 848], [720, 721, 850], [720, 850, 849], [721, 722, 850], [722, 851, 850], [722, 723, 852], [722, 852, 851], [723, 724, 852], [724, 853, 852], [724, 725, 854], [724, 854, 853], [725, 726, 854], [726, 855, 854], [726, 727, 856], [726, 856, 855], [727, 728, 856], [728, 857, 856], [728, 729, 858], [728, 858, 857], [729, 730, 858], [730, 859, 858], [730, 731, 860], [730, 860, 859], [731, 732, 860], [732, 861, 860], [732, 733, 862], [732, 862, 861], [733, 734, 862], [734, 863, 862], [734, 735, 864], [734, 864, 863], [735, 736, 864], [736, 865, 864], [736, 737, 866], [736, 866, 865], [737, 738, 866], [738, 867, 866], [738, 739, 868], [738, 868, 867], [739, 740, 868], [740, 869, 868], [740, 741, 870], [740, 870, 869], [741, 742, 870], [742, 871, 870], [742, 743, 872], [742, 872, 871], [743, 744, 872], [744, 873, 872], [744, 745, 874], [744, 874, 873], [745, 746, 874], [746, 875, 874], [746, 747, 876], [746, 876, 875], [747, 748, 876], [748, 877, 876], [748, 749, 878], [748, 878, 877], [749, 750, 878], [750, 879, 878], [750, 751, 880], [750, 880, 879], [751, 752, 880], [752, 881, 880], [752, 753, 882], [752, 882, 881], [753, 754, 882], [754, 883, 882], [754, 755, 884], [754, 884, 883], [755, 756, 884], [756, 885, 884], [756, 757, 886], [756, 886, 885], [757, 758, 886], [758, 887, 886], [758, 759, 888], [758, 888, 887], [759, 760, 888], [760, 889, 888], [760, 761, 890], [760, 890, 889], [761, 762, 890], [762, 891, 890], [762, 763, 892], [762, 892, 891], [763, 764, 892], [764, 893, 892], [764, 765, 894], [764, 894, 893], [765, 766, 894], [766, 895, 894], [766, 767, 896], [766, 896, 895], [767, 768, 896], [768, 897, 896], [768, 769, 898], [768, 898, 897], [769, 770, 898], [770, 899, 898], [770, 771, 900], [770, 900, 899], [771, 772, 900], [772, 901, 900], [772, 773, 902], [772, 902, 901], [774, 775, 904], [774, 904, 903], [775, 776, 904], [776, 905, 904], [776, 777, 906], [776, 906, 905], [777, 778, 906], [778, 907, 906], [778, 779, 908], [778, 908, 907], [779, 780, 908], [780, 909, 908], [780, 781, 910], [780, 910, 909], [781, 782, 910], [782, 911, 910], [782, 783, 912], [782, 912, 911], [783, 784, 912], [784, 913, 912], [784, 785, 914], [784, 914, 913], [785, 786, 914], [786, 915, 914], [786, 787, 916], [786, 916, 915], [787, 788, 916], [788, 917, 916], [788, 789, 918], [788, 918, 917], [789, 790, 918], [790, 919, 918], [790, 791, 920], [790, 920, 919], [791, 792, 920], [792, 921, 920], [792, 793, 922], [792, 922, 921], [793, 794, 922], [794, 923, 922], [794, 795, 924], [794, 924, 923], [795, 796, 924], [796, 925, 924], [796, 797, 926], [796, 926, 925], [797, 798, 926], [798, 927, 926], [798, 799, 928], [798, 928, 927], [799, 800, 928], [800, 929, 928], [800, 801, 930], [800, 930, 929], [801, 802, 930], [802, 931, 930], [802, 803, 932], [802, 932, 931], [803, 804, 932], [804, 933, 932], [804, 805, 934], [804, 934, 933], [805, 806, 934], [806, 935, 934], [806, 807, 936], [806, 936, 935], [807, 808, 936], [808, 937, 936], [808, 809, 938], [808, 938, 937], [809, 810, 938], [810, 939, 938], [810, 811, 940], [810, 940, 939], [811, 812, 940], [812, 941, 940], [812, 813, 942], [812, 942, 941], [813, 814, 942], [814, 943, 942], [814, 815, 944], [814, 944, 943], [815, 816, 944], [816, 945, 944], [816, 817, 946], [816, 946, 945], [817, 818, 946], [818, 947, 946], [818, 819, 948], [818, 948, 947], [819, 820, 948], [820, 949, 948], [820, 821, 950], [820, 950, 949], [821, 822, 950], [822, 951, 950], [822, 823, 952], [822, 952, 951], [823, 824, 952], [824, 953, 952], [824, 825, 954], [824, 954, 953], [825, 826, 954], [826, 955, 954], [826, 827, 956], [826, 956, 955], [827, 828, 956], [828, 957, 956], [828, 829, 958], [828, 958, 957], [829, 830, 958], [830, 959, 958], [830, 831, 960], [830, 960, 959], [831, 832, 960], [832, 961, 960], [832, 833, 962], [832, 962, 961], [833, 834, 962], [834, 963, 962], [834, 835, 964], [834, 964, 963], [835, 836, 964], [836, 965, 964], [836, 837, 966], [836, 966, 965], [837, 838, 966], [838, 967, 966], [838, 839, 968], [838, 968, 967], [839, 840, 968], [840, 969, 968], [840, 841, 970], [840, 970, 969], [841, 842, 970], [842, 971, 970], [842, 843, 972], [842, 972, 971], [843, 844, 972], [844, 973, 972], [844, 845, 974], [844, 974, 973], [845, 846, 974], [846, 975, 974], [846, 847, 976], [846, 976, 975], [847, 848, 976], [848, 977, 976], [848, 849, 978], [848, 978, 977], [849, 850, 978], [850, 979, 978], [850, 851, 980], [850, 980, 979], [851, 852, 980], [852, 981, 980], [852, 853, 982], [852, 982, 981], [853, 854, 982], [854, 983, 982], [854, 855, 984], [854, 984, 983], [855, 856, 984], [856, 985, 984], [856, 857, 986], [856, 986, 985], [857, 858, 986], [858, 987, 986], [858, 859, 988], [858, 988, 987], [859, 860, 988], [860, 989, 988], [860, 861, 990], [860, 990, 989], [861, 862, 990], [862, 991, 990], [862, 863, 992], [862, 992, 991], [863, 864, 992], [864, 993, 992], [864, 865, 994], [864, 994, 993], [865, 866, 994], [866, 995, 994], [866, 867, 996], [866, 996, 995], [867, 868, 996], [868, 997, 996], [868, 869, 998], [868, 998, 997], [869, 870, 998], [870, 999, 998], [870, 871, 1000], [870, 1000, 999], [871, 872, 1000], [872, 1001, 1000], [872, 873, 1002], [872, 1002, 1001], [873, 874, 1002], [874, 1003, 1002], [874, 875, 1004], [874, 1004, 1003], [875, 876, 1004], [876, 1005, 1004], [876, 877, 1006], [876, 1006, 1005], [877, 878, 1006], [878, 1007, 1006], [878, 879, 1008], [878, 1008, 1007], [879, 880, 1008], [880, 1009, 1008], [880, 881, 1010], [880, 1010, 1009], [881, 882, 1010], [882, 1011, 1010], [882, 883, 1012], [882, 1012, 1011], [883, 884, 1012], [884, 1013, 1012], [884, 885, 1014], [884, 1014, 1013], [885, 886, 1014], [886, 1015, 1014], [886, 887, 1016], [886, 1016, 1015], [887, 888, 1016], [888, 1017, 1016], [888, 889, 1018], [888, 1018, 1017], [889, 890, 1018], [890, 1019, 1018], [890, 891, 1020], [890, 1020, 1019], [891, 892, 1020], [892, 1021, 1020], [892, 893, 1022], [892, 1022, 1021], [893, 894, 1022], [894, 1023, 1022], [894, 895, 1024], [894, 1024, 1023], [895, 896, 1024], [896, 1025, 1024], [896, 897, 1026], [896, 1026, 1025], [897, 898, 1026], [898, 1027, 1026], [898, 899, 1028], [898, 1028, 1027], [899, 900, 1028], [900, 1029, 1028], [900, 901, 1030], [900, 1030, 1029], [901, 902, 1030], [902, 1031, 1030], [903, 904, 1032], [904, 1033, 1032], [904, 905, 1034], [904, 1034, 1033], [905, 906, 1034], [906, 1035, 1034], [906, 907, 1036], [906, 1036, 1035], [907, 908, 1036], [908, 1037, 1036], [908, 909, 1038], [908, 1038, 1037], [909, 910, 1038], [910, 1039, 1038], [910, 911, 1040], [910, 1040, 1039], [911, 912, 1040], [912, 1041, 1040], [912, 913, 1042], [912, 1042, 1041], [913, 914, 1042], [914, 1043, 1042], [914, 915, 1044], [914, 1044, 1043], [915, 916, 1044], [916, 1045, 1044], [916, 917, 1046], [916, 1046, 1045], [917, 918, 1046], [918, 1047, 1046], [918, 919, 1048], [918, 1048, 1047], [919, 920, 1048], [920, 1049, 1048], [920, 921, 1050], [920, 1050, 1049], [921, 922, 1050], [922, 1051, 1050], [922, 923, 1052], [922, 1052, 1051], [923, 924, 1052], [924, 1053, 1052], [924, 925, 1054], [924, 1054, 1053], [925, 926, 1054], [926, 1055, 1054], [926, 927, 1056], [926, 1056, 1055], [927, 928, 1056], [928, 1057, 1056], [928, 929, 1058], [928, 1058, 1057], [929, 930, 1058], [930, 1059, 1058], [930, 931, 1060], [930, 1060, 1059], [931, 932, 1060], [932, 1061, 1060], [932, 933, 1062], [932, 1062, 1061], [933, 934, 1062], [934, 1063, 1062], [934, 935, 1064], [934, 1064, 1063], [935, 936, 1064], [936, 1065, 1064], [936, 937, 1066], [936, 1066, 1065], [937, 938, 1066], [938, 1067, 1066], [938, 939, 1068], [938, 1068, 1067], [939, 940, 1068], [940, 1069, 1068], [940, 941, 1070], [940, 1070, 1069], [941, 942, 1070], [942, 1071, 1070], [942, 943, 1072], [942, 1072, 1071], [943, 944, 1072], [944, 1073, 1072], [944, 945, 1074], [944, 1074, 1073], [945, 946, 1074], [946, 1075, 1074], [946, 947, 1076], [946, 1076, 1075], [947, 948, 1076], [948, 1077, 1076], [948, 949, 1078], [948, 1078, 1077], [949, 950, 1078], [950, 1079, 1078], [950, 951, 1080], [950, 1080, 1079], [951, 952, 1080], [952, 1081, 1080], [952, 953, 1082], [952, 1082, 1081], [953, 954, 1082], [954, 1083, 1082], [954, 955, 1084], [954, 1084, 1083], [955, 956, 1084], [956, 1085, 1084], [956, 957, 1086], [956, 1086, 1085], [957, 958, 1086], [958, 1087, 1086], [958, 959, 1088], [958, 1088, 1087], [959, 960, 1088], [960, 1089, 1088], [960, 961, 1090], [960, 1090, 1089], [961, 962, 1090], [962, 1091, 1090], [962, 963, 1092], [962, 1092, 1091], [963, 964, 1092], [964, 1093, 1092], [964, 965, 1094], [964, 1094, 1093], [965, 966, 1094], [966, 1095, 1094], [966, 967, 1096], [966, 1096, 1095], [967, 968, 1096], [968, 1097, 1096], [968, 969, 1098], [968, 1098, 1097], [969, 970, 1098], [970, 1099, 1098], [970, 971, 1100], [970, 1100, 1099], [971, 972, 1100], [972, 1101, 1100], [972, 973, 1102], [972, 1102, 1101], [973, 974, 1102], [974, 1103, 1102], [974, 975, 1104], [974, 1104, 1103], [975, 976, 1104], [976, 1105, 1104], [976, 977, 1106], [976, 1106, 1105], [977, 978, 1106], [978, 1107, 1106], [978, 979, 1108], [978, 1108, 1107], [979, 980, 1108], [980, 1109, 1108], [980, 981, 1110], [980, 1110, 1109], [981, 982, 1110], [982, 1111, 1110], [982, 983, 1112], [982, 1112, 1111], [983, 984, 1112], [984, 1113, 1112], [984, 985, 1114], [984, 1114, 1113], [985, 986, 1114], [986, 1115, 1114], [986, 987, 1116], [986, 1116, 1115], [987, 988, 1116], [988, 1117, 1116], [988, 989, 1118], [988, 1118, 1117], [989, 990, 1118], [990, 1119, 1118], [990, 991, 1120], [990, 1120, 1119], [991, 992, 1120], [992, 1121, 1120], [992, 993, 1122], [992, 1122, 1121], [993, 994, 1122], [994, 1123, 1122], [994, 995, 1124], [994, 1124, 1123], [995, 996, 1124], [996, 1125, 1124], [996, 997, 1126], [996, 1126, 1125], [997, 998, 1126], [998, 1127, 1126], [998, 999, 1128], [998, 1128, 1127], [999, 1000, 1128], [1000, 1129, 1128], [1000, 1001, 1130], [1000, 1130, 1129], [1001, 1002, 1130], [1002, 1131, 1130], [1002, 1003, 1132], [1002, 1132, 1131], [1003, 1004, 1132], [1004, 1133, 1132], [1004, 1005, 1134], [1004, 1134, 1133], [1005, 1006, 1134], [1006, 1135, 1134], [1006, 1007, 1136], [1006, 1136, 1135], [1007, 1008, 1136], [1008, 1137, 1136], [1008, 1009, 1138], [1008, 1138, 1137], [1009, 1010, 1138], [1010, 1139, 1138], [1010, 1011, 1140], [1010, 1140, 1139], [1011, 1012, 1140], [1012, 1141, 1140], [1012, 1013, 1142], [1012, 1142, 1141], [1013, 1014, 1142], [1014, 1143, 1142], [1014, 1015, 1144], [1014, 1144, 1143], [1015, 1016, 1144], [1016, 1145, 1144], [1016, 1017, 1146], [1016, 1146, 1145], [1017, 1018, 1146], [1018, 1147, 1146], [1018, 1019, 1148], [1018, 1148, 1147], [1019, 1020, 1148], [1020, 1149, 1148], [1020, 1021, 1150], [1020, 1150, 1149], [1021, 1022, 1150], [1022, 1151, 1150], [1022, 1023, 1152], [1022, 1152, 1151], [1023, 1024, 1152], [1024, 1153, 1152], [1024, 1025, 1154], [1024, 1154, 1153], [1025, 1026, 1154], [1026, 1155, 1154], [1026, 1027, 1156], [1026, 1156, 1155], [1027, 1028, 1156], [1028, 1157, 1156], [1028, 1029, 1158], [1028, 1158, 1157], [1029, 1030, 1158], [1030, 1159, 1158], [1030, 1031, 1160], [1030, 1160, 1159], [1032, 1033, 1162], [1032, 1162, 1161], [1033, 1034, 1162], [1034, 1163, 1162], [1034, 1035, 1164], [1034, 1164, 1163], [1035, 1036, 1164], [1036, 1165, 1164], [1036, 1037, 1166], [1036, 1166, 1165], [1037, 1038, 1166], [1038, 1167, 1166], [1038, 1039, 1168], [1038, 1168, 1167], [1039, 1040, 1168], [1040, 1169, 1168], [1040, 1041, 1170], [1040, 1170, 1169], [1041, 1042, 1170], [1042, 1171, 1170], [1042, 1043, 1172], [1042, 1172, 1171], [1043, 1044, 1172], [1044, 1173, 1172], [1044, 1045, 1174], [1044, 1174, 1173], [1045, 1046, 1174], [1046, 1175, 1174], [1046, 1047, 1176], [1046, 1176, 1175], [1047, 1048, 1176], [1048, 1177, 1176], [1048, 1049, 1178], [1048, 1178, 1177], [1049, 1050, 1178], [1050, 1179, 1178], [1050, 1051, 1180], [1050, 1180, 1179], [1051, 1052, 1180], [1052, 1181, 1180], [1052, 1053, 1182], [1052, 1182, 1181], [1053, 1054, 1182], [1054, 1183, 1182], [1054, 1055, 1184], [1054, 1184, 1183], [1055, 1056, 1184], [1056, 1185, 1184], [1056, 1057, 1186], [1056, 1186, 1185], [1057, 1058, 1186], [1058, 1187, 1186], [1058, 1059, 1188], [1058, 1188, 1187], [1059, 1060, 1188], [1060, 1189, 1188], [1060, 1061, 1190], [1060, 1190, 1189], [1061, 1062, 1190], [1062, 1191, 1190], [1062, 1063, 1192], [1062, 1192, 1191], [1063, 1064, 1192], [1064, 1193, 1192], [1064, 1065, 1194], [1064, 1194, 1193], [1065, 1066, 1194], [1066, 1195, 1194], [1066, 1067, 1196], [1066, 1196, 1195], [1067, 1068, 1196], [1068, 1197, 1196], [1068, 1069, 1198], [1068, 1198, 1197], [1069, 1070, 1198], [1070, 1199, 1198], [1070, 1071, 1200], [1070, 1200, 1199], [1071, 1072, 1200], [1072, 1201, 1200], [1072, 1073, 1202], [1072, 1202, 1201], [1073, 1074, 1202], [1074, 1203, 1202], [1074, 1075, 1204], [1074, 1204, 1203], [1075, 1076, 1204], [1076, 1205, 1204], [1076, 1077, 1206], [1076, 1206, 1205], [1077, 1078, 1206], [1078, 1207, 1206], [1078, 1079, 1208], [1078, 1208, 1207], [1079, 1080, 1208], [1080, 1209, 1208], [1080, 1081, 1210], [1080, 1210, 1209], [1081, 1082, 1210], [1082, 1211, 1210], [1082, 1083, 1212], [1082, 1212, 1211], [1083, 1084, 1212], [1084, 1213, 1212], [1084, 1085, 1214], [1084, 1214, 1213], [1085, 1086, 1214], [1086, 1215, 1214], [1086, 1087, 1216], [1086, 1216, 1215], [1087, 1088, 1216], [1088, 1217, 1216], [1088, 1089, 1218], [1088, 1218, 1217], [1089, 1090, 1218], [1090, 1219, 1218], [1090, 1091, 1220], [1090, 1220, 1219], [1091, 1092, 1220], [1092, 1221, 1220], [1092, 1093, 1222], [1092, 1222, 1221], [1093, 1094, 1222], [1094, 1223, 1222], [1094, 1095, 1224], [1094, 1224, 1223], [1095, 1096, 1224], [1096, 1225, 1224], [1096, 1097, 1226], [1096, 1226, 1225], [1097, 1098, 1226], [1098, 1227, 1226], [1098, 1099, 1228], [1098, 1228, 1227], [1099, 1100, 1228], [1100, 1229, 1228], [1100, 1101, 1230], [1100, 1230, 1229], [1101, 1102, 1230], [1102, 1231, 1230], [1102, 1103, 1232], [1102, 1232, 1231], [1103, 1104, 1232], [1104, 1233, 1232], [1104, 1105, 1234], [1104, 1234, 1233], [1105, 1106, 1234], [1106, 1235, 1234], [1106, 1107, 1236], [1106, 1236, 1235], [1107, 1108, 1236], [1108, 1237, 1236], [1108, 1109, 1238], [1108, 1238, 1237], [1109, 1110, 1238], [1110, 1239, 1238], [1110, 1111, 1240], [1110, 1240, 1239], [1111, 1112, 1240], [1112, 1241, 1240], [1112, 1113, 1242], [1112, 1242, 1241], [1113, 1114, 1242], [1114, 1243, 1242], [1114, 1115, 1244], [1114, 1244, 1243], [1115, 1116, 1244], [1116, 1245, 1244], [1116, 1117, 1246], [1116, 1246, 1245], [1117, 1118, 1246], [1118, 1247, 1246], [1118, 1119, 1248], [1118, 1248, 1247], [1119, 1120, 1248], [1120, 1249, 1248], [1120, 1121, 1250], [1120, 1250, 1249], [1121, 1122, 1250], [1122, 1251, 1250], [1122, 1123, 1252], [1122, 1252, 1251], [1123, 1124, 1252], [1124, 1253, 1252], [1124, 1125, 1254], [1124, 1254, 1253], [1125, 1126, 1254], [1126, 1255, 1254], [1126, 1127, 1256], [1126, 1256, 1255], [1127, 1128, 1256], [1128, 1257, 1256], [1128, 1129, 1258], [1128, 1258, 1257], [1129, 1130, 1258], [1130, 1259, 1258], [1130, 1131, 1260], [1130, 1260, 1259], [1131, 1132, 1260], [1132, 1261, 1260], [1132, 1133, 1262], [1132, 1262, 1261], [1133, 1134, 1262], [1134, 1263, 1262], [1134, 1135, 1264], [1134, 1264, 1263], [1135, 1136, 1264], [1136, 1265, 1264], [1136, 1137, 1266], [1136, 1266, 1265], [1137, 1138, 1266], [1138, 1267, 1266], [1138, 1139, 1268], [1138, 1268, 1267], [1139, 1140, 1268], [1140, 1269, 1268], [1140, 1141, 1270], [1140, 1270, 1269], [1141, 1142, 1270], [1142, 1271, 1270], [1142, 1143, 1272], [1142, 1272, 1271], [1143, 1144, 1272], [1144, 1273, 1272], [1144, 1145, 1274], [1144, 1274, 1273], [1145, 1146, 1274], [1146, 1275, 1274], [1146, 1147, 1276], [1146, 1276, 1275], [1147, 1148, 1276], [1148, 1277, 1276], [1148, 1149, 1278], [1148, 1278, 1277], [1149, 1150, 1278], [1150, 1279, 1278], [1150, 1151, 1280], [1150, 1280, 1279], [1151, 1152, 1280], [1152, 1281, 1280], [1152, 1153, 1282], [1152, 1282, 1281], [1153, 1154, 1282], [1154, 1283, 1282], [1154, 1155, 1284], [1154, 1284, 1283], [1155, 1156, 1284], [1156, 1285, 1284], [1156, 1157, 1286], [1156, 1286, 1285], [1157, 1158, 1286], [1158, 1287, 1286], [1158, 1159, 1288], [1158, 1288, 1287], [1159, 1160, 1288], [1160, 1289, 1288], [1161, 1162, 1290], [1162, 1291, 1290], [1162, 1163, 1292], [1162, 1292, 1291], [1163, 1164, 1292], [1164, 1293, 1292], [1164, 1165, 1294], [1164, 1294, 1293], [1165, 1166, 1294], [1166, 1295, 1294], [1166, 1167, 1296], [1166, 1296, 1295], [1167, 1168, 1296], [1168, 1297, 1296], [1168, 1169, 1298], [1168, 1298, 1297], [1169, 1170, 1298], [1170, 1299, 1298], [1170, 1171, 1300], [1170, 1300, 1299], [1171, 1172, 1300], [1172, 1301, 1300], [1172, 1173, 1302], [1172, 1302, 1301], [1173, 1174, 1302], [1174, 1303, 1302], [1174, 1175, 1304], [1174, 1304, 1303], [1175, 1176, 1304], [1176, 1305, 1304], [1176, 1177, 1306], [1176, 1306, 1305], [1177, 1178, 1306], [1178, 1307, 1306], [1178, 1179, 1308], [1178, 1308, 1307], [1179, 1180, 1308], [1180, 1309, 1308], [1180, 1181, 1310], [1180, 1310, 1309], [1181, 1182, 1310], [1182, 1311, 1310], [1182, 1183, 1312], [1182, 1312, 1311], [1183, 1184, 1312], [1184, 1313, 1312], [1184, 1185, 1314], [1184, 1314, 1313], [1185, 1186, 1314], [1186, 1315, 1314], [1186, 1187, 1316], [1186, 1316, 1315], [1187, 1188, 1316], [1188, 1317, 1316], [1188, 1189, 1318], [1188, 1318, 1317], [1189, 1190, 1318], [1190, 1319, 1318], [1190, 1191, 1320], [1190, 1320, 1319], [1191, 1192, 1320], [1192, 1321, 1320], [1192, 1193, 1322], [1192, 1322, 1321], [1193, 1194, 1322], [1194, 1323, 1322], [1194, 1195, 1324], [1194, 1324, 1323], [1195, 1196, 1324], [1196, 1325, 1324], [1196, 1197, 1326], [1196, 1326, 1325], [1197, 1198, 1326], [1198, 1327, 1326], [1198, 1199, 1328], [1198, 1328, 1327], [1199, 1200, 1328], [1200, 1329, 1328], [1200, 1201, 1330], [1200, 1330, 1329], [1201, 1202, 1330], [1202, 1331, 1330], [1202, 1203, 1332], [1202, 1332, 1331], [1203, 1204, 1332], [1204, 1333, 1332], [1204, 1205, 1334], [1204, 1334, 1333], [1205, 1206, 1334], [1206, 1335, 1334], [1206, 1207, 1336], [1206, 1336, 1335], [1207, 1208, 1336], [1208, 1337, 1336], [1208, 1209, 1338], [1208, 1338, 1337], [1209, 1210, 1338], [1210, 1339, 1338], [1210, 1211, 1340], [1210, 1340, 1339], [1211, 1212, 1340], [1212, 1341, 1340], [1212, 1213, 1342], [1212, 1342, 1341], [1213, 1214, 1342], [1214, 1343, 1342], [1214, 1215, 1344], [1214, 1344, 1343], [1215, 1216, 1344], [1216, 1345, 1344], [1216, 1217, 1346], [1216, 1346, 1345], [1217, 1218, 1346], [1218, 1347, 1346], [1218, 1219, 1348], [1218, 1348, 1347], [1219, 1220, 1348], [1220, 1349, 1348], [1220, 1221, 1350], [1220, 1350, 1349], [1221, 1222, 1350], [1222, 1351, 1350], [1222, 1223, 1352], [1222, 1352, 1351], [1223, 1224, 1352], [1224, 1353, 1352], [1224, 1225, 1354], [1224, 1354, 1353], [1225, 1226, 1354], [1226, 1355, 1354], [1226, 1227, 1356], [1226, 1356, 1355], [1227, 1228, 1356], [1228, 1357, 1356], [1228, 1229, 1358], [1228, 1358, 1357], [1229, 1230, 1358], [1230, 1359, 1358], [1230, 1231, 1360], [1230, 1360, 1359], [1231, 1232, 1360], [1232, 1361, 1360], [1232, 1233, 1362], [1232, 1362, 1361], [1233, 1234, 1362], [1234, 1363, 1362], [1234, 1235, 1364], [1234, 1364, 1363], [1235, 1236, 1364], [1236, 1365, 1364], [1236, 1237, 1366], [1236, 1366, 1365], [1237, 1238, 1366], [1238, 1367, 1366], [1238, 1239, 1368], [1238, 1368, 1367], [1239, 1240, 1368], [1240, 1369, 1368], [1240, 1241, 1370], [1240, 1370, 1369], [1241, 1242, 1370], [1242, 1371, 1370], [1242, 1243, 1372], [1242, 1372, 1371], [1243, 1244, 1372], [1244, 1373, 1372], [1244, 1245, 1374], [1244, 1374, 1373], [1245, 1246, 1374], [1246, 1375, 1374], [1246, 1247, 1376], [1246, 1376, 1375], [1247, 1248, 1376], [1248, 1377, 1376], [1248, 1249, 1378], [1248, 1378, 1377], [1249, 1250, 1378], [1250, 1379, 1378], [1250, 1251, 1380], [1250, 1380, 1379], [1251, 1252, 1380], [1252, 1381, 1380], [1252, 1253, 1382], [1252, 1382, 1381], [1253, 1254, 1382], [1254, 1383, 1382], [1254, 1255, 1384], [1254, 1384, 1383], [1255, 1256, 1384], [1256, 1385, 1384], [1256, 1257, 1386], [1256, 1386, 1385], [1257, 1258, 1386], [1258, 1387, 1386], [1258, 1259, 1388], [1258, 1388, 1387], [1259, 1260, 1388], [1260, 1389, 1388], [1260, 1261, 1390], [1260, 1390, 1389], [1261, 1262, 1390], [1262, 1391, 1390], [1262, 1263, 1392], [1262, 1392, 1391], [1263, 1264, 1392], [1264, 1393, 1392], [1264, 1265, 1394], [1264, 1394, 1393], [1265, 1266, 1394], [1266, 1395, 1394], [1266, 1267, 1396], [1266, 1396, 1395], [1267, 1268, 1396], [1268, 1397, 1396], [1268, 1269, 1398], [1268, 1398, 1397], [1269, 1270, 1398], [1270, 1399, 1398], [1270, 1271, 1400], [1270, 1400, 1399], [1271, 1272, 1400], [1272, 1401, 1400], [1272, 1273, 1402], [1272, 1402, 1401], [1273, 1274, 1402], [1274, 1403, 1402], [1274, 1275, 1404], [1274, 1404, 1403], [1275, 1276, 1404], [1276, 1405, 1404], [1276, 1277, 1406], [1276, 1406, 1405], [1277, 1278, 1406], [1278, 1407, 1406], [1278, 1279, 1408], [1278, 1408, 1407], [1279, 1280, 1408], [1280, 1409, 1408], [1280, 1281, 1410], [1280, 1410, 1409], [1281, 1282, 1410], [1282, 1411, 1410], [1282, 1283, 1412], [1282, 1412, 1411], [1283, 1284, 1412], [1284, 1413, 1412], [1284, 1285, 1414], [1284, 1414, 1413], [1285, 1286, 1414], [1286, 1415, 1414], [1286, 1287, 1416], [1286, 1416, 1415], [1287, 1288, 1416], [1288, 1417, 1416], [1288, 1289, 1418], [1288, 1418, 1417], [1290, 1291, 1420], [1290, 1420, 1419], [1291, 1292, 1420], [1292, 1421, 1420], [1292, 1293, 1422], [1292, 1422, 1421], [1293, 1294, 1422], [1294, 1423, 1422], [1294, 1295, 1424], [1294, 1424, 1423], [1295, 1296, 1424], [1296, 1425, 1424], [1296, 1297, 1426], [1296, 1426, 1425], [1297, 1298, 1426], [1298, 1427, 1426], [1298, 1299, 1428], [1298, 1428, 1427], [1299, 1300, 1428], [1300, 1429, 1428], [1300, 1301, 1430], [1300, 1430, 1429], [1301, 1302, 1430], [1302, 1431, 1430], [1302, 1303, 1432], [1302, 1432, 1431], [1303, 1304, 1432], [1304, 1433, 1432], [1304, 1305, 1434], [1304, 1434, 1433], [1305, 1306, 1434], [1306, 1435, 1434], [1306, 1307, 1436], [1306, 1436, 1435], [1307, 1308, 1436], [1308, 1437, 1436], [1308, 1309, 1438], [1308, 1438, 1437], [1309, 1310, 1438], [1310, 1439, 1438], [1310, 1311, 1440], [1310, 1440, 1439], [1311, 1312, 1440], [1312, 1441, 1440], [1312, 1313, 1442], [1312, 1442, 1441], [1313, 1314, 1442], [1314, 1443, 1442], [1314, 1315, 1444], [1314, 1444, 1443], [1315, 1316, 1444], [1316, 1445, 1444], [1316, 1317, 1446], [1316, 1446, 1445], [1317, 1318, 1446], [1318, 1447, 1446], [1318, 1319, 1448], [1318, 1448, 1447], [1319, 1320, 1448], [1320, 1449, 1448], [1320, 1321, 1450], [1320, 1450, 1449], [1321, 1322, 1450], [1322, 1451, 1450], [1322, 1323, 1452], [1322, 1452, 1451], [1323, 1324, 1452], [1324, 1453, 1452], [1324, 1325, 1454], [1324, 1454, 1453], [1325, 1326, 1454], [1326, 1455, 1454], [1326, 1327, 1456], [1326, 1456, 1455], [1327, 1328, 1456], [1328, 1457, 1456], [1328, 1329, 1458], [1328, 1458, 1457], [1329, 1330, 1458], [1330, 1459, 1458], [1330, 1331, 1460], [1330, 1460, 1459], [1331, 1332, 1460], [1332, 1461, 1460], [1332, 1333, 1462], [1332, 1462, 1461], [1333, 1334, 1462], [1334, 1463, 1462], [1334, 1335, 1464], [1334, 1464, 1463], [1335, 1336, 1464], [1336, 1465, 1464], [1336, 1337, 1466], [1336, 1466, 1465], [1337, 1338, 1466], [1338, 1467, 1466], [1338, 1339, 1468], [1338, 1468, 1467], [1339, 1340, 1468], [1340, 1469, 1468], [1340, 1341, 1470], [1340, 1470, 1469], [1341, 1342, 1470], [1342, 1471, 1470], [1342, 1343, 1472], [1342, 1472, 1471], [1343, 1344, 1472], [1344, 1473, 1472], [1344, 1345, 1474], [1344, 1474, 1473], [1345, 1346, 1474], [1346, 1475, 1474], [1346, 1347, 1476], [1346, 1476, 1475], [1347, 1348, 1476], [1348, 1477, 1476], [1348, 1349, 1478], [1348, 1478, 1477], [1349, 1350, 1478], [1350, 1479, 1478], [1350, 1351, 1480], [1350, 1480, 1479], [1351, 1352, 1480], [1352, 1481, 1480], [1352, 1353, 1482], [1352, 1482, 1481], [1353, 1354, 1482], [1354, 1483, 1482], [1354, 1355, 1484], [1354, 1484, 1483], [1355, 1356, 1484], [1356, 1485, 1484], [1356, 1357, 1486], [1356, 1486, 1485], [1357, 1358, 1486], [1358, 1487, 1486], [1358, 1359, 1488], [1358, 1488, 1487], [1359, 1360, 1488], [1360, 1489, 1488], [1360, 1361, 1490], [1360, 1490, 1489], [1361, 1362, 1490], [1362, 1491, 1490], [1362, 1363, 1492], [1362, 1492, 1491], [1363, 1364, 1492], [1364, 1493, 1492], [1364, 1365, 1494], [1364, 1494, 1493], [1365, 1366, 1494], [1366, 1495, 1494], [1366, 1367, 1496], [1366, 1496, 1495], [1367, 1368, 1496], [1368, 1497, 1496], [1368, 1369, 1498], [1368, 1498, 1497], [1369, 1370, 1498], [1370, 1499, 1498], [1370, 1371, 1500], [1370, 1500, 1499], [1371, 1372, 1500], [1372, 1501, 1500], [1372, 1373, 1502], [1372, 1502, 1501], [1373, 1374, 1502], [1374, 1503, 1502], [1374, 1375, 1504], [1374, 1504, 1503], [1375, 1376, 1504], [1376, 1505, 1504], [1376, 1377, 1506], [1376, 1506, 1505], [1377, 1378, 1506], [1378, 1507, 1506], [1378, 1379, 1508], [1378, 1508, 1507], [1379, 1380, 1508], [1380, 1509, 1508], [1380, 1381, 1510], [1380, 1510, 1509], [1381, 1382, 1510], [1382, 1511, 1510], [1382, 1383, 1512], [1382, 1512, 1511], [1383, 1384, 1512], [1384, 1513, 1512], [1384, 1385, 1514], [1384, 1514, 1513], [1385, 1386, 1514], [1386, 1515, 1514], [1386, 1387, 1516], [1386, 1516, 1515], [1387, 1388, 1516], [1388, 1517, 1516], [1388, 1389, 1518], [1388, 1518, 1517], [1389, 1390, 1518], [1390, 1519, 1518], [1390, 1391, 1520], [1390, 1520, 1519], [1391, 1392, 1520], [1392, 1521, 1520], [1392, 1393, 1522], [1392, 1522, 1521], [1393, 1394, 1522], [1394, 1523, 1522], [1394, 1395, 1524], [1394, 1524, 1523], [1395, 1396, 1524], [1396, 1525, 1524], [1396, 1397, 1526], [1396, 1526, 1525], [1397, 1398, 1526], [1398, 1527, 1526], [1398, 1399, 1528], [1398, 1528, 1527], [1399, 1400, 1528], [1400, 1529, 1528], [1400, 1401, 1530], [1400, 1530, 1529], [1401, 1402, 1530], [1402, 1531, 1530], [1402, 1403, 1532], [1402, 1532, 1531], [1403, 1404, 1532], [1404, 1533, 1532], [1404, 1405, 1534], [1404, 1534, 1533], [1405, 1406, 1534], [1406, 1535, 1534], [1406, 1407, 1536], [1406, 1536, 1535], [1407, 1408, 1536], [1408, 1537, 1536], [1408, 1409, 1538], [1408, 1538, 1537], [1409, 1410, 1538], [1410, 1539, 1538], [1410, 1411, 1540], [1410, 1540, 1539], [1411, 1412, 1540], [1412, 1541, 1540], [1412, 1413, 1542], [1412, 1542, 1541], [1413, 1414, 1542], [1414, 1543, 1542], [1414, 1415, 1544], [1414, 1544, 1543], [1415, 1416, 1544], [1416, 1545, 1544], [1416, 1417, 1546], [1416, 1546, 1545], [1417, 1418, 1546], [1418, 1547, 1546], [1419, 1420, 1548], [1420, 1549, 1548], [1420, 1421, 1550], [1420, 1550, 1549], [1421, 1422, 1550], [1422, 1551, 1550], [1422, 1423, 1552], [1422, 1552, 1551], [1423, 1424, 1552], [1424, 1553, 1552], [1424, 1425, 1554], [1424, 1554, 1553], [1425, 1426, 1554], [1426, 1555, 1554], [1426, 1427, 1556], [1426, 1556, 1555], [1427, 1428, 1556], [1428, 1557, 1556], [1428, 1429, 1558], [1428, 1558, 1557], [1429, 1430, 1558], [1430, 1559, 1558], [1430, 1431, 1560], [1430, 1560, 1559], [1431, 1432, 1560], [1432, 1561, 1560], [1432, 1433, 1562], [1432, 1562, 1561], [1433, 1434, 1562], [1434, 1563, 1562], [1434, 1435, 1564], [1434, 1564, 1563], [1435, 1436, 1564], [1436, 1565, 1564], [1436, 1437, 1566], [1436, 1566, 1565], [1437, 1438, 1566], [1438, 1567, 1566], [1438, 1439, 1568], [1438, 1568, 1567], [1439, 1440, 1568], [1440, 1569, 1568], [1440, 1441, 1570], [1440, 1570, 1569], [1441, 1442, 1570], [1442, 1571, 1570], [1442, 1443, 1572], [1442, 1572, 1571], [1443, 1444, 1572], [1444, 1573, 1572], [1444, 1445, 1574], [1444, 1574, 1573], [1445, 1446, 1574], [1446, 1575, 1574], [1446, 1447, 1576], [1446, 1576, 1575], [1447, 1448, 1576], [1448, 1577, 1576], [1448, 1449, 1578], [1448, 1578, 1577], [1449, 1450, 1578], [1450, 1579, 1578], [1450, 1451, 1580], [1450, 1580, 1579], [1451, 1452, 1580], [1452, 1581, 1580], [1452, 1453, 1582], [1452, 1582, 1581], [1453, 1454, 1582], [1454, 1583, 1582], [1454, 1455, 1584], [1454, 1584, 1583], [1455, 1456, 1584], [1456, 1585, 1584], [1456, 1457, 1586], [1456, 1586, 1585], [1457, 1458, 1586], [1458, 1587, 1586], [1458, 1459, 1588], [1458, 1588, 1587], [1459, 1460, 1588], [1460, 1589, 1588], [1460, 1461, 1590], [1460, 1590, 1589], [1461, 1462, 1590], [1462, 1591, 1590], [1462, 1463, 1592], [1462, 1592, 1591], [1463, 1464, 1592], [1464, 1593, 1592], [1464, 1465, 1594], [1464, 1594, 1593], [1465, 1466, 1594], [1466, 1595, 1594], [1466, 1467, 1596], [1466, 1596, 1595], [1467, 1468, 1596], [1468, 1597, 1596], [1468, 1469, 1598], [1468, 1598, 1597], [1469, 1470, 1598], [1470, 1599, 1598], [1470, 1471, 1600], [1470, 1600, 1599], [1471, 1472, 1600], [1472, 1601, 1600], [1472, 1473, 1602], [1472, 1602, 1601], [1473, 1474, 1602], [1474, 1603, 1602], [1474, 1475, 1604], [1474, 1604, 1603], [1475, 1476, 1604], [1476, 1605, 1604], [1476, 1477, 1606], [1476, 1606, 1605], [1477, 1478, 1606], [1478, 1607, 1606], [1478, 1479, 1608], [1478, 1608, 1607], [1479, 1480, 1608], [1480, 1609, 1608], [1480, 1481, 1610], [1480, 1610, 1609], [1481, 1482, 1610], [1482, 1611, 1610], [1482, 1483, 1612], [1482, 1612, 1611], [1483, 1484, 1612], [1484, 1613, 1612], [1484, 1485, 1614], [1484, 1614, 1613], [1485, 1486, 1614], [1486, 1615, 1614], [1486, 1487, 1616], [1486, 1616, 1615], [1487, 1488, 1616], [1488, 1617, 1616], [1488, 1489, 1618], [1488, 1618, 1617], [1489, 1490, 1618], [1490, 1619, 1618], [1490, 1491, 1620], [1490, 1620, 1619], [1491, 1492, 1620], [1492, 1621, 1620], [1492, 1493, 1622], [1492, 1622, 1621], [1493, 1494, 1622], [1494, 1623, 1622], [1494, 1495, 1624], [1494, 1624, 1623], [1495, 1496, 1624], [1496, 1625, 1624], [1496, 1497, 1626], [1496, 1626, 1625], [1497, 1498, 1626], [1498, 1627, 1626], [1498, 1499, 1628], [1498, 1628, 1627], [1499, 1500, 1628], [1500, 1629, 1628], [1500, 1501, 1630], [1500, 1630, 1629], [1501, 1502, 1630], [1502, 1631, 1630], [1502, 1503, 1632], [1502, 1632, 1631], [1503, 1504, 1632], [1504, 1633, 1632], [1504, 1505, 1634], [1504, 1634, 1633], [1505, 1506, 1634], [1506, 1635, 1634], [1506, 1507, 1636], [1506, 1636, 1635], [1507, 1508, 1636], [1508, 1637, 1636], [1508, 1509, 1638], [1508, 1638, 1637], [1509, 1510, 1638], [1510, 1639, 1638], [1510, 1511, 1640], [1510, 1640, 1639], [1511, 1512, 1640], [1512, 1641, 1640], [1512, 1513, 1642], [1512, 1642, 1641], [1513, 1514, 1642], [1514, 1643, 1642], [1514, 1515, 1644], [1514, 1644, 1643], [1515, 1516, 1644], [1516, 1645, 1644], [1516, 1517, 1646], [1516, 1646, 1645], [1517, 1518, 1646], [1518, 1647, 1646], [1518, 1519, 1648], [1518, 1648, 1647], [1519, 1520, 1648], [1520, 1649, 1648], [1520, 1521, 1650], [1520, 1650, 1649], [1521, 1522, 1650], [1522, 1651, 1650], [1522, 1523, 1652], [1522, 1652, 1651], [1523, 1524, 1652], [1524, 1653, 1652], [1524, 1525, 1654], [1524, 1654, 1653], [1525, 1526, 1654], [1526, 1655, 1654], [1526, 1527, 1656], [1526, 1656, 1655], [1527, 1528, 1656], [1528, 1657, 1656], [1528, 1529, 1658], [1528, 1658, 1657], [1529, 1530, 1658], [1530, 1659, 1658], [1530, 1531, 1660], [1530, 1660, 1659], [1531, 1532, 1660], [1532, 1661, 1660], [1532, 1533, 1662], [1532, 1662, 1661], [1533, 1534, 1662], [1534, 1663, 1662], [1534, 1535, 1664], [1534, 1664, 1663], [1535, 1536, 1664], [1536, 1665, 1664], [1536, 1537, 1666], [1536, 1666, 1665], [1537, 1538, 1666], [1538, 1667, 1666], [1538, 1539, 1668], [1538, 1668, 1667], [1539, 1540, 1668], [1540, 1669, 1668], [1540, 1541, 1670], [1540, 1670, 1669], [1541, 1542, 1670], [1542, 1671, 1670], [1542, 1543, 1672], [1542, 1672, 1671], [1543, 1544, 1672], [1544, 1673, 1672], [1544, 1545, 1674], [1544, 1674, 1673], [1545, 1546, 1674], [1546, 1675, 1674], [1546, 1547, 1676], [1546, 1676, 1675], [1548, 1549, 1678], [1548, 1678, 1677], [1549, 1550, 1678], [1550, 1679, 1678], [1550, 1551, 1680], [1550, 1680, 1679], [1551, 1552, 1680], [1552, 1681, 1680], [1552, 1553, 1682], [1552, 1682, 1681], [1553, 1554, 1682], [1554, 1683, 1682], [1554, 1555, 1684], [1554, 1684, 1683], [1555, 1556, 1684], [1556, 1685, 1684], [1556, 1557, 1686], [1556, 1686, 1685], [1557, 1558, 1686], [1558, 1687, 1686], [1558, 1559, 1688], [1558, 1688, 1687], [1559, 1560, 1688], [1560, 1689, 1688], [1560, 1561, 1690], [1560, 1690, 1689], [1561, 1562, 1690], [1562, 1691, 1690], [1562, 1563, 1692], [1562, 1692, 1691], [1563, 1564, 1692], [1564, 1693, 1692], [1564, 1565, 1694], [1564, 1694, 1693], [1565, 1566, 1694], [1566, 1695, 1694], [1566, 1567, 1696], [1566, 1696, 1695], [1567, 1568, 1696], [1568, 1697, 1696], [1568, 1569, 1698], [1568, 1698, 1697], [1569, 1570, 1698], [1570, 1699, 1698], [1570, 1571, 1700], [1570, 1700, 1699], [1571, 1572, 1700], [1572, 1701, 1700], [1572, 1573, 1702], [1572, 1702, 1701], [1573, 1574, 1702], [1574, 1703, 1702], [1574, 1575, 1704], [1574, 1704, 1703], [1575, 1576, 1704], [1576, 1705, 1704], [1576, 1577, 1706], [1576, 1706, 1705], [1577, 1578, 1706], [1578, 1707, 1706], [1578, 1579, 1708], [1578, 1708, 1707], [1579, 1580, 1708], [1580, 1709, 1708], [1580, 1581, 1710], [1580, 1710, 1709], [1581, 1582, 1710], [1582, 1711, 1710], [1582, 1583, 1712], [1582, 1712, 1711], [1583, 1584, 1712], [1584, 1713, 1712], [1584, 1585, 1714], [1584, 1714, 1713], [1585, 1586, 1714], [1586, 1715, 1714], [1586, 1587, 1716], [1586, 1716, 1715], [1587, 1588, 1716], [1588, 1717, 1716], [1588, 1589, 1718], [1588, 1718, 1717], [1589, 1590, 1718], [1590, 1719, 1718], [1590, 1591, 1720], [1590, 1720, 1719], [1591, 1592, 1720], [1592, 1721, 1720], [1592, 1593, 1722], [1592, 1722, 1721], [1593, 1594, 1722], [1594, 1723, 1722], [1594, 1595, 1724], [1594, 1724, 1723], [1595, 1596, 1724], [1596, 1725, 1724], [1596, 1597, 1726], [1596, 1726, 1725], [1597, 1598, 1726], [1598, 1727, 1726], [1598, 1599, 1728], [1598, 1728, 1727], [1599, 1600, 1728], [1600, 1729, 1728], [1600, 1601, 1730], [1600, 1730, 1729], [1601, 1602, 1730], [1602, 1731, 1730], [1602, 1603, 1732], [1602, 1732, 1731], [1603, 1604, 1732], [1604, 1733, 1732], [1604, 1605, 1734], [1604, 1734, 1733], [1605, 1606, 1734], [1606, 1735, 1734], [1606, 1607, 1736], [1606, 1736, 1735], [1607, 1608, 1736], [1608, 1737, 1736], [1608, 1609, 1738], [1608, 1738, 1737], [1609, 1610, 1738], [1610, 1739, 1738], [1610, 1611, 1740], [1610, 1740, 1739], [1611, 1612, 1740], [1612, 1741, 1740], [1612, 1613, 1742], [1612, 1742, 1741], [1613, 1614, 1742], [1614, 1743, 1742], [1614, 1615, 1744], [1614, 1744, 1743], [1615, 1616, 1744], [1616, 1745, 1744], [1616, 1617, 1746], [1616, 1746, 1745], [1617, 1618, 1746], [1618, 1747, 1746], [1618, 1619, 1748], [1618, 1748, 1747], [1619, 1620, 1748], [1620, 1749, 1748], [1620, 1621, 1750], [1620, 1750, 1749], [1621, 1622, 1750], [1622, 1751, 1750], [1622, 1623, 1752], [1622, 1752, 1751], [1623, 1624, 1752], [1624, 1753, 1752], [1624, 1625, 1754], [1624, 1754, 1753], [1625, 1626, 1754], [1626, 1755, 1754], [1626, 1627, 1756], [1626, 1756, 1755], [1627, 1628, 1756], [1628, 1757, 1756], [1628, 1629, 1758], [1628, 1758, 1757], [1629, 1630, 1758], [1630, 1759, 1758], [1630, 1631, 1760], [1630, 1760, 1759], [1631, 1632, 1760], [1632, 1761, 1760], [1632, 1633, 1762], [1632, 1762, 1761], [1633, 1634, 1762], [1634, 1763, 1762], [1634, 1635, 1764], [1634, 1764, 1763], [1635, 1636, 1764], [1636, 1765, 1764], [1636, 1637, 1766], [1636, 1766, 1765], [1637, 1638, 1766], [1638, 1767, 1766], [1638, 1639, 1768], [1638, 1768, 1767], [1639, 1640, 1768], [1640, 1769, 1768], [1640, 1641, 1770], [1640, 1770, 1769], [1641, 1642, 1770], [1642, 1771, 1770], [1642, 1643, 1772], [1642, 1772, 1771], [1643, 1644, 1772], [1644, 1773, 1772], [1644, 1645, 1774], [1644, 1774, 1773], [1645, 1646, 1774], [1646, 1775, 1774], [1646, 1647, 1776], [1646, 1776, 1775], [1647, 1648, 1776], [1648, 1777, 1776], [1648, 1649, 1778], [1648, 1778, 1777], [1649, 1650, 1778], [1650, 1779, 1778], [1650, 1651, 1780], [1650, 1780, 1779], [1651, 1652, 1780], [1652, 1781, 1780], [1652, 1653, 1782], [1652, 1782, 1781], [1653, 1654, 1782], [1654, 1783, 1782], [1654, 1655, 1784], [1654, 1784, 1783], [1655, 1656, 1784], [1656, 1785, 1784], [1656, 1657, 1786], [1656, 1786, 1785], [1657, 1658, 1786], [1658, 1787, 1786], [1658, 1659, 1788], [1658, 1788, 1787], [1659, 1660, 1788], [1660, 1789, 1788], [1660, 1661, 1790], [1660, 1790, 1789], [1661, 1662, 1790], [1662, 1791, 1790], [1662, 1663, 1792], [1662, 1792, 1791], [1663, 1664, 1792], [1664, 1793, 1792], [1664, 1665, 1794], [1664, 1794, 1793], [1665, 1666, 1794], [1666, 1795, 1794], [1666, 1667, 1796], [1666, 1796, 1795], [1667, 1668, 1796], [1668, 1797, 1796], [1668, 1669, 1798], [1668, 1798, 1797], [1669, 1670, 1798], [1670, 1799, 1798], [1670, 1671, 1800], [1670, 1800, 1799], [1671, 1672, 1800], [1672, 1801, 1800], [1672, 1673, 1802], [1672, 1802, 1801], [1673, 1674, 1802], [1674, 1803, 1802], [1674, 1675, 1804], [1674, 1804, 1803], [1675, 1676, 1804], [1676, 1805, 1804], [1677, 1678, 1806], [1678, 1807, 1806], [1678, 1679, 1808], [1678, 1808, 1807], [1679, 1680, 1808], [1680, 1809, 1808], [1680, 1681, 1810], [1680, 1810, 1809], [1681, 1682, 1810], [1682, 1811, 1810], [1682, 1683, 1812], [1682, 1812, 1811], [1683, 1684, 1812], [1684, 1813, 1812], [1684, 1685, 1814], [1684, 1814, 1813], [1685, 1686, 1814], [1686, 1815, 1814], [1686, 1687, 1816], [1686, 1816, 1815], [1687, 1688, 1816], [1688, 1817, 1816], [1688, 1689, 1818], [1688, 1818, 1817], [1689, 1690, 1818], [1690, 1819, 1818], [1690, 1691, 1820], [1690, 1820, 1819], [1691, 1692, 1820], [1692, 1821, 1820], [1692, 1693, 1822], [1692, 1822, 1821], [1693, 1694, 1822], [1694, 1823, 1822], [1694, 1695, 1824], [1694, 1824, 1823], [1695, 1696, 1824], [1696, 1825, 1824], [1696, 1697, 1826], [1696, 1826, 1825], [1697, 1698, 1826], [1698, 1827, 1826], [1698, 1699, 1828], [1698, 1828, 1827], [1699, 1700, 1828], [1700, 1829, 1828], [1700, 1701, 1830], [1700, 1830, 1829], [1701, 1702, 1830], [1702, 1831, 1830], [1702, 1703, 1832], [1702, 1832, 1831], [1703, 1704, 1832], [1704, 1833, 1832], [1704, 1705, 1834], [1704, 1834, 1833], [1705, 1706, 1834], [1706, 1835, 1834], [1706, 1707, 1836], [1706, 1836, 1835], [1707, 1708, 1836], [1708, 1837, 1836], [1708, 1709, 1838], [1708, 1838, 1837], [1709, 1710, 1838], [1710, 1839, 1838], [1710, 1711, 1840], [1710, 1840, 1839], [1711, 1712, 1840], [1712, 1841, 1840], [1712, 1713, 1842], [1712, 1842, 1841], [1713, 1714, 1842], [1714, 1843, 1842], [1714, 1715, 1844], [1714, 1844, 1843], [1715, 1716, 1844], [1716, 1845, 1844], [1716, 1717, 1846], [1716, 1846, 1845], [1717, 1718, 1846], [1718, 1847, 1846], [1718, 1719, 1848], [1718, 1848, 1847], [1719, 1720, 1848], [1720, 1849, 1848], [1720, 1721, 1850], [1720, 1850, 1849], [1721, 1722, 1850], [1722, 1851, 1850], [1722, 1723, 1852], [1722, 1852, 1851], [1723, 1724, 1852], [1724, 1853, 1852], [1724, 1725, 1854], [1724, 1854, 1853], [1725, 1726, 1854], [1726, 1855, 1854], [1726, 1727, 1856], [1726, 1856, 1855], [1727, 1728, 1856], [1728, 1857, 1856], [1728, 1729, 1858], [1728, 1858, 1857], [1729, 1730, 1858], [1730, 1859, 1858], [1730, 1731, 1860], [1730, 1860, 1859], [1731, 1732, 1860], [1732, 1861, 1860], [1732, 1733, 1862], [1732, 1862, 1861], [1733, 1734, 1862], [1734, 1863, 1862], [1734, 1735, 1864], [1734, 1864, 1863], [1735, 1736, 1864], [1736, 1865, 1864], [1736, 1737, 1866], [1736, 1866, 1865], [1737, 1738, 1866], [1738, 1867, 1866], [1738, 1739, 1868], [1738, 1868, 1867], [1739, 1740, 1868], [1740, 1869, 1868], [1740, 1741, 1870], [1740, 1870, 1869], [1741, 1742, 1870], [1742, 1871, 1870], [1742, 1743, 1872], [1742, 1872, 1871], [1743, 1744, 1872], [1744, 1873, 1872], [1744, 1745, 1874], [1744, 1874, 1873], [1745, 1746, 1874], [1746, 1875, 1874], [1746, 1747, 1876], [1746, 1876, 1875], [1747, 1748, 1876], [1748, 1877, 1876], [1748, 1749, 1878], [1748, 1878, 1877], [1749, 1750, 1878], [1750, 1879, 1878], [1750, 1751, 1880], [1750, 1880, 1879], [1751, 1752, 1880], [1752, 1881, 1880], [1752, 1753, 1882], [1752, 1882, 1881], [1753, 1754, 1882], [1754, 1883, 1882], [1754, 1755, 1884], [1754, 1884, 1883], [1755, 1756, 1884], [1756, 1885, 1884], [1756, 1757, 1886], [1756, 1886, 1885], [1757, 1758, 1886], [1758, 1887, 1886], [1758, 1759, 1888], [1758, 1888, 1887], [1759, 1760, 1888], [1760, 1889, 1888], [1760, 1761, 1890], [1760, 1890, 1889], [1761, 1762, 1890], [1762, 1891, 1890], [1762, 1763, 1892], [1762, 1892, 1891], [1763, 1764, 1892], [1764, 1893, 1892], [1764, 1765, 1894], [1764, 1894, 1893], [1765, 1766, 1894], [1766, 1895, 1894], [1766, 1767, 1896], [1766, 1896, 1895], [1767, 1768, 1896], [1768, 1897, 1896], [1768, 1769, 1898], [1768, 1898, 1897], [1769, 1770, 1898], [1770, 1899, 1898], [1770, 1771, 1900], [1770, 1900, 1899], [1771, 1772, 1900], [1772, 1901, 1900], [1772, 1773, 1902], [1772, 1902, 1901], [1773, 1774, 1902], [1774, 1903, 1902], [1774, 1775, 1904], [1774, 1904, 1903], [1775, 1776, 1904], [1776, 1905, 1904], [1776, 1777, 1906], [1776, 1906, 1905], [1777, 1778, 1906], [1778, 1907, 1906], [1778, 1779, 1908], [1778, 1908, 1907], [1779, 1780, 1908], [1780, 1909, 1908], [1780, 1781, 1910], [1780, 1910, 1909], [1781, 1782, 1910], [1782, 1911, 1910], [1782, 1783, 1912], [1782, 1912, 1911], [1783, 1784, 1912], [1784, 1913, 1912], [1784, 1785, 1914], [1784, 1914, 1913], [1785, 1786, 1914], [1786, 1915, 1914], [1786, 1787, 1916], [1786, 1916, 1915], [1787, 1788, 1916], [1788, 1917, 1916], [1788, 1789, 1918], [1788, 1918, 1917], [1789, 1790, 1918], [1790, 1919, 1918], [1790, 1791, 1920], [1790, 1920, 1919], [1791, 1792, 1920], [1792, 1921, 1920], [1792, 1793, 1922], [1792, 1922, 1921], [1793, 1794, 1922], [1794, 1923, 1922], [1794, 1795, 1924], [1794, 1924, 1923], [1795, 1796, 1924], [1796, 1925, 1924], [1796, 1797, 1926], [1796, 1926, 1925], [1797, 1798, 1926], [1798, 1927, 1926], [1798, 1799, 1928], [1798, 1928, 1927], [1799, 1800, 1928], [1800, 1929, 1928], [1800, 1801, 1930], [1800, 1930, 1929], [1801, 1802, 1930], [1802, 1931, 1930], [1802, 1803, 1932], [1802, 1932, 1931], [1803, 1804, 1932], [1804, 1933, 1932], [1804, 1805, 1934], [1804, 1934, 1933], [1806, 1807, 1936], [1806, 1936, 1935], [1807, 1808, 1936], [1808, 1937, 1936], [1808, 1809, 1938], [1808, 1938, 1937], [1809, 1810, 1938], [1810, 1939, 1938], [1810, 1811, 1940], [1810, 1940, 1939], [1811, 1812, 1940], [1812, 1941, 1940], [1812, 1813, 1942], [1812, 1942, 1941], [1813, 1814, 1942], [1814, 1943, 1942], [1814, 1815, 1944], [1814, 1944, 1943], [1815, 1816, 1944], [1816, 1945, 1944], [1816, 1817, 1946], [1816, 1946, 1945], [1817, 1818, 1946], [1818, 1947, 1946], [1818, 1819, 1948], [1818, 1948, 1947], [1819, 1820, 1948], [1820, 1949, 1948], [1820, 1821, 1950], [1820, 1950, 1949], [1821, 1822, 1950], [1822, 1951, 1950], [1822, 1823, 1952], [1822, 1952, 1951], [1823, 1824, 1952], [1824, 1953, 1952], [1824, 1825, 1954], [1824, 1954, 1953], [1825, 1826, 1954], [1826, 1955, 1954], [1826, 1827, 1956], [1826, 1956, 1955], [1827, 1828, 1956], [1828, 1957, 1956], [1828, 1829, 1958], [1828, 1958, 1957], [1829, 1830, 1958], [1830, 1959, 1958], [1830, 1831, 1960], [1830, 1960, 1959], [1831, 1832, 1960], [1832, 1961, 1960], [1832, 1833, 1962], [1832, 1962, 1961], [1833, 1834, 1962], [1834, 1963, 1962], [1834, 1835, 1964], [1834, 1964, 1963], [1835, 1836, 1964], [1836, 1965, 1964], [1836, 1837, 1966], [1836, 1966, 1965], [1837, 1838, 1966], [1838, 1967, 1966], [1838, 1839, 1968], [1838, 1968, 1967], [1839, 1840, 1968], [1840, 1969, 1968], [1840, 1841, 1970], [1840, 1970, 1969], [1841, 1842, 1970], [1842, 1971, 1970], [1842, 1843, 1972], [1842, 1972, 1971], [1843, 1844, 1972], [1844, 1973, 1972], [1844, 1845, 1974], [1844, 1974, 1973], [1845, 1846, 1974], [1846, 1975, 1974], [1846, 1847, 1976], [1846, 1976, 1975], [1847, 1848, 1976], [1848, 1977, 1976], [1848, 1849, 1978], [1848, 1978, 1977], [1849, 1850, 1978], [1850, 1979, 1978], [1850, 1851, 1980], [1850, 1980, 1979], [1851, 1852, 1980], [1852, 1981, 1980], [1852, 1853, 1982], [1852, 1982, 1981], [1853, 1854, 1982], [1854, 1983, 1982], [1854, 1855, 1984], [1854, 1984, 1983], [1855, 1856, 1984], [1856, 1985, 1984], [1856, 1857, 1986], [1856, 1986, 1985], [1857, 1858, 1986], [1858, 1987, 1986], [1858, 1859, 1988], [1858, 1988, 1987], [1859, 1860, 1988], [1860, 1989, 1988], [1860, 1861, 1990], [1860, 1990, 1989], [1861, 1862, 1990], [1862, 1991, 1990], [1862, 1863, 1992], [1862, 1992, 1991], [1863, 1864, 1992], [1864, 1993, 1992], [1864, 1865, 1994], [1864, 1994, 1993], [1865, 1866, 1994], [1866, 1995, 1994], [1866, 1867, 1996], [1866, 1996, 1995], [1867, 1868, 1996], [1868, 1997, 1996], [1868, 1869, 1998], [1868, 1998, 1997], [1869, 1870, 1998], [1870, 1999, 1998], [1870, 1871, 2000], [1870, 2000, 1999], [1871, 1872, 2000], [1872, 2001, 2000], [1872, 1873, 2002], [1872, 2002, 2001], [1873, 1874, 2002], [1874, 2003, 2002], [1874, 1875, 2004], [1874, 2004, 2003], [1875, 1876, 2004], [1876, 2005, 2004], [1876, 1877, 2006], [1876, 2006, 2005], [1877, 1878, 2006], [1878, 2007, 2006], [1878, 1879, 2008], [1878, 2008, 2007], [1879, 1880, 2008], [1880, 2009, 2008], [1880, 1881, 2010], [1880, 2010, 2009], [1881, 1882, 2010], [1882, 2011, 2010], [1882, 1883, 2012], [1882, 2012, 2011], [1883, 1884, 2012], [1884, 2013, 2012], [1884, 1885, 2014], [1884, 2014, 2013], [1885, 1886, 2014], [1886, 2015, 2014], [1886, 1887, 2016], [1886, 2016, 2015], [1887, 1888, 2016], [1888, 2017, 2016], [1888, 1889, 2018], [1888, 2018, 2017], [1889, 1890, 2018], [1890, 2019, 2018], [1890, 1891, 2020], [1890, 2020, 2019], [1891, 1892, 2020], [1892, 2021, 2020], [1892, 1893, 2022], [1892, 2022, 2021], [1893, 1894, 2022], [1894, 2023, 2022], [1894, 1895, 2024], [1894, 2024, 2023], [1895, 1896, 2024], [1896, 2025, 2024], [1896, 1897, 2026], [1896, 2026, 2025], [1897, 1898, 2026], [1898, 2027, 2026], [1898, 1899, 2028], [1898, 2028, 2027], [1899, 1900, 2028], [1900, 2029, 2028], [1900, 1901, 2030], [1900, 2030, 2029], [1901, 1902, 2030], [1902, 2031, 2030], [1902, 1903, 2032], [1902, 2032, 2031], [1903, 1904, 2032], [1904, 2033, 2032], [1904, 1905, 2034], [1904, 2034, 2033], [1905, 1906, 2034], [1906, 2035, 2034], [1906, 1907, 2036], [1906, 2036, 2035], [1907, 1908, 2036], [1908, 2037, 2036], [1908, 1909, 2038], [1908, 2038, 2037], [1909, 1910, 2038], [1910, 2039, 2038], [1910, 1911, 2040], [1910, 2040, 2039], [1911, 1912, 2040], [1912, 2041, 2040], [1912, 1913, 2042], [1912, 2042, 2041], [1913, 1914, 2042], [1914, 2043, 2042], [1914, 1915, 2044], [1914, 2044, 2043], [1915, 1916, 2044], [1916, 2045, 2044], [1916, 1917, 2046], [1916, 2046, 2045], [1917, 1918, 2046], [1918, 2047, 2046], [1918, 1919, 2048], [1918, 2048, 2047], [1919, 1920, 2048], [1920, 2049, 2048], [1920, 1921, 2050], [1920, 2050, 2049], [1921, 1922, 2050], [1922, 2051, 2050], [1922, 1923, 2052], [1922, 2052, 2051], [1923, 1924, 2052], [1924, 2053, 2052], [1924, 1925, 2054], [1924, 2054, 2053], [1925, 1926, 2054], [1926, 2055, 2054], [1926, 1927, 2056], [1926, 2056, 2055], [1927, 1928, 2056], [1928, 2057, 2056], [1928, 1929, 2058], [1928, 2058, 2057], [1929, 1930, 2058], [1930, 2059, 2058], [1930, 1931, 2060], [1930, 2060, 2059], [1931, 1932, 2060], [1932, 2061, 2060], [1932, 1933, 2062], [1932, 2062, 2061], [1933, 1934, 2062], [1934, 2063, 2062], [1935, 1936, 2064], [1936, 2065, 2064], [1936, 1937, 2066], [1936, 2066, 2065], [1937, 1938, 2066], [1938, 2067, 2066], [1938, 1939, 2068], [1938, 2068, 2067], [1939, 1940, 2068], [1940, 2069, 2068], [1940, 1941, 2070], [1940, 2070, 2069], [1941, 1942, 2070], [1942, 2071, 2070], [1942, 1943, 2072], [1942, 2072, 2071], [1943, 1944, 2072], [1944, 2073, 2072], [1944, 1945, 2074], [1944, 2074, 2073], [1945, 1946, 2074], [1946, 2075, 2074], [1946, 1947, 2076], [1946, 2076, 2075], [1947, 1948, 2076], [1948, 2077, 2076], [1948, 1949, 2078], [1948, 2078, 2077], [1949, 1950, 2078], [1950, 2079, 2078], [1950, 1951, 2080], [1950, 2080, 2079], [1951, 1952, 2080], [1952, 2081, 2080], [1952, 1953, 2082], [1952, 2082, 2081], [1953, 1954, 2082], [1954, 2083, 2082], [1954, 1955, 2084], [1954, 2084, 2083], [1955, 1956, 2084], [1956, 2085, 2084], [1956, 1957, 2086], [1956, 2086, 2085], [1957, 1958, 2086], [1958, 2087, 2086], [1958, 1959, 2088], [1958, 2088, 2087], [1959, 1960, 2088], [1960, 2089, 2088], [1960, 1961, 2090], [1960, 2090, 2089], [1961, 1962, 2090], [1962, 2091, 2090], [1962, 1963, 2092], [1962, 2092, 2091], [1963, 1964, 2092], [1964, 2093, 2092], [1964, 1965, 2094], [1964, 2094, 2093], [1965, 1966, 2094], [1966, 2095, 2094], [1966, 1967, 2096], [1966, 2096, 2095], [1967, 1968, 2096], [1968, 2097, 2096], [1968, 1969, 2098], [1968, 2098, 2097], [1969, 1970, 2098], [1970, 2099, 2098], [1970, 1971, 2100], [1970, 2100, 2099], [1971, 1972, 2100], [1972, 2101, 2100], [1972, 1973, 2102], [1972, 2102, 2101], [1973, 1974, 2102], [1974, 2103, 2102], [1974, 1975, 2104], [1974, 2104, 2103], [1975, 1976, 2104], [1976, 2105, 2104], [1976, 1977, 2106], [1976, 2106, 2105], [1977, 1978, 2106], [1978, 2107, 2106], [1978, 1979, 2108], [1978, 2108, 2107], [1979, 1980, 2108], [1980, 2109, 2108], [1980, 1981, 2110], [1980, 2110, 2109], [1981, 1982, 2110], [1982, 2111, 2110], [1982, 1983, 2112], [1982, 2112, 2111], [1983, 1984, 2112], [1984, 2113, 2112], [1984, 1985, 2114], [1984, 2114, 2113], [1985, 1986, 2114], [1986, 2115, 2114], [1986, 1987, 2116], [1986, 2116, 2115], [1987, 1988, 2116], [1988, 2117, 2116], [1988, 1989, 2118], [1988, 2118, 2117], [1989, 1990, 2118], [1990, 2119, 2118], [1990, 1991, 2120], [1990, 2120, 2119], [1991, 1992, 2120], [1992, 2121, 2120], [1992, 1993, 2122], [1992, 2122, 2121], [1993, 1994, 2122], [1994, 2123, 2122], [1994, 1995, 2124], [1994, 2124, 2123], [1995, 1996, 2124], [1996, 2125, 2124], [1996, 1997, 2126], [1996, 2126, 2125], [1997, 1998, 2126], [1998, 2127, 2126], [1998, 1999, 2128], [1998, 2128, 2127], [1999, 2000, 2128], [2000, 2129, 2128], [2000, 2001, 2130], [2000, 2130, 2129], [2001, 2002, 2130], [2002, 2131, 2130], [2002, 2003, 2132], [2002, 2132, 2131], [2003, 2004, 2132], [2004, 2133, 2132], [2004, 2005, 2134], [2004, 2134, 2133], [2005, 2006, 2134], [2006, 2135, 2134], [2006, 2007, 2136], [2006, 2136, 2135], [2007, 2008, 2136], [2008, 2137, 2136], [2008, 2009, 2138], [2008, 2138, 2137], [2009, 2010, 2138], [2010, 2139, 2138], [2010, 2011, 2140], [2010, 2140, 2139], [2011, 2012, 2140], [2012, 2141, 2140], [2012, 2013, 2142], [2012, 2142, 2141], [2013, 2014, 2142], [2014, 2143, 2142], [2014, 2015, 2144], [2014, 2144, 2143], [2015, 2016, 2144], [2016, 2145, 2144], [2016, 2017, 2146], [2016, 2146, 2145], [2017, 2018, 2146], [2018, 2147, 2146], [2018, 2019, 2148], [2018, 2148, 2147], [2019, 2020, 2148], [2020, 2149, 2148], [2020, 2021, 2150], [2020, 2150, 2149], [2021, 2022, 2150], [2022, 2151, 2150], [2022, 2023, 2152], [2022, 2152, 2151], [2023, 2024, 2152], [2024, 2153, 2152], [2024, 2025, 2154], [2024, 2154, 2153], [2025, 2026, 2154], [2026, 2155, 2154], [2026, 2027, 2156], [2026, 2156, 2155], [2027, 2028, 2156], [2028, 2157, 2156], [2028, 2029, 2158], [2028, 2158, 2157], [2029, 2030, 2158], [2030, 2159, 2158], [2030, 2031, 2160], [2030, 2160, 2159], [2031, 2032, 2160], [2032, 2161, 2160], [2032, 2033, 2162], [2032, 2162, 2161], [2033, 2034, 2162], [2034, 2163, 2162], [2034, 2035, 2164], [2034, 2164, 2163], [2035, 2036, 2164], [2036, 2165, 2164], [2036, 2037, 2166], [2036, 2166, 2165], [2037, 2038, 2166], [2038, 2167, 2166], [2038, 2039, 2168], [2038, 2168, 2167], [2039, 2040, 2168], [2040, 2169, 2168], [2040, 2041, 2170], [2040, 2170, 2169], [2041, 2042, 2170], [2042, 2171, 2170], [2042, 2043, 2172], [2042, 2172, 2171], [2043, 2044, 2172], [2044, 2173, 2172], [2044, 2045, 2174], [2044, 2174, 2173], [2045, 2046, 2174], [2046, 2175, 2174], [2046, 2047, 2176], [2046, 2176, 2175], [2047, 2048, 2176], [2048, 2177, 2176], [2048, 2049, 2178], [2048, 2178, 2177], [2049, 2050, 2178], [2050, 2179, 2178], [2050, 2051, 2180], [2050, 2180, 2179], [2051, 2052, 2180], [2052, 2181, 2180], [2052, 2053, 2182], [2052, 2182, 2181], [2053, 2054, 2182], [2054, 2183, 2182], [2054, 2055, 2184], [2054, 2184, 2183], [2055, 2056, 2184], [2056, 2185, 2184], [2056, 2057, 2186], [2056, 2186, 2185], [2057, 2058, 2186], [2058, 2187, 2186], [2058, 2059, 2188], [2058, 2188, 2187], [2059, 2060, 2188], [2060, 2189, 2188], [2060, 2061, 2190], [2060, 2190, 2189], [2061, 2062, 2190], [2062, 2191, 2190], [2062, 2063, 2192], [2062, 2192, 2191], [2064, 2065, 2194], [2064, 2194, 2193], [2065, 2066, 2194], [2066, 2195, 2194], [2066, 2067, 2196], [2066, 2196, 2195], [2067, 2068, 2196], [2068, 2197, 2196], [2068, 2069, 2198], [2068, 2198, 2197], [2069, 2070, 2198], [2070, 2199, 2198], [2070, 2071, 2200], [2070, 2200, 2199], [2071, 2072, 2200], [2072, 2201, 2200], [2072, 2073, 2202], [2072, 2202, 2201], [2073, 2074, 2202], [2074, 2203, 2202], [2074, 2075, 2204], [2074, 2204, 2203], [2075, 2076, 2204], [2076, 2205, 2204], [2076, 2077, 2206], [2076, 2206, 2205], [2077, 2078, 2206], [2078, 2207, 2206], [2078, 2079, 2208], [2078, 2208, 2207], [2079, 2080, 2208], [2080, 2209, 2208], [2080, 2081, 2210], [2080, 2210, 2209], [2081, 2082, 2210], [2082, 2211, 2210], [2082, 2083, 2212], [2082, 2212, 2211], [2083, 2084, 2212], [2084, 2213, 2212], [2084, 2085, 2214], [2084, 2214, 2213], [2085, 2086, 2214], [2086, 2215, 2214], [2086, 2087, 2216], [2086, 2216, 2215], [2087, 2088, 2216], [2088, 2217, 2216], [2088, 2089, 2218], [2088, 2218, 2217], [2089, 2090, 2218], [2090, 2219, 2218], [2090, 2091, 2220], [2090, 2220, 2219], [2091, 2092, 2220], [2092, 2221, 2220], [2092, 2093, 2222], [2092, 2222, 2221], [2093, 2094, 2222], [2094, 2223, 2222], [2094, 2095, 2224], [2094, 2224, 2223], [2095, 2096, 2224], [2096, 2225, 2224], [2096, 2097, 2226], [2096, 2226, 2225], [2097, 2098, 2226], [2098, 2227, 2226], [2098, 2099, 2228], [2098, 2228, 2227], [2099, 2100, 2228], [2100, 2229, 2228], [2100, 2101, 2230], [2100, 2230, 2229], [2101, 2102, 2230], [2102, 2231, 2230], [2102, 2103, 2232], [2102, 2232, 2231], [2103, 2104, 2232], [2104, 2233, 2232], [2104, 2105, 2234], [2104, 2234, 2233], [2105, 2106, 2234], [2106, 2235, 2234], [2106, 2107, 2236], [2106, 2236, 2235], [2107, 2108, 2236], [2108, 2237, 2236], [2108, 2109, 2238], [2108, 2238, 2237], [2109, 2110, 2238], [2110, 2239, 2238], [2110, 2111, 2240], [2110, 2240, 2239], [2111, 2112, 2240], [2112, 2241, 2240], [2112, 2113, 2242], [2112, 2242, 2241], [2113, 2114, 2242], [2114, 2243, 2242], [2114, 2115, 2244], [2114, 2244, 2243], [2115, 2116, 2244], [2116, 2245, 2244], [2116, 2117, 2246], [2116, 2246, 2245], [2117, 2118, 2246], [2118, 2247, 2246], [2118, 2119, 2248], [2118, 2248, 2247], [2119, 2120, 2248], [2120, 2249, 2248], [2120, 2121, 2250], [2120, 2250, 2249], [2121, 2122, 2250], [2122, 2251, 2250], [2122, 2123, 2252], [2122, 2252, 2251], [2123, 2124, 2252], [2124, 2253, 2252], [2124, 2125, 2254], [2124, 2254, 2253], [2125, 2126, 2254], [2126, 2255, 2254], [2126, 2127, 2256], [2126, 2256, 2255], [2127, 2128, 2256], [2128, 2257, 2256], [2128, 2129, 2258], [2128, 2258, 2257], [2129, 2130, 2258], [2130, 2259, 2258], [2130, 2131, 2260], [2130, 2260, 2259], [2131, 2132, 2260], [2132, 2261, 2260], [2132, 2133, 2262], [2132, 2262, 2261], [2133, 2134, 2262], [2134, 2263, 2262], [2134, 2135, 2264], [2134, 2264, 2263], [2135, 2136, 2264], [2136, 2265, 2264], [2136, 2137, 2266], [2136, 2266, 2265], [2137, 2138, 2266], [2138, 2267, 2266], [2138, 2139, 2268], [2138, 2268, 2267], [2139, 2140, 2268], [2140, 2269, 2268], [2140, 2141, 2270], [2140, 2270, 2269], [2141, 2142, 2270], [2142, 2271, 2270], [2142, 2143, 2272], [2142, 2272, 2271], [2143, 2144, 2272], [2144, 2273, 2272], [2144, 2145, 2274], [2144, 2274, 2273], [2145, 2146, 2274], [2146, 2275, 2274], [2146, 2147, 2276], [2146, 2276, 2275], [2147, 2148, 2276], [2148, 2277, 2276], [2148, 2149, 2278], [2148, 2278, 2277], [2149, 2150, 2278], [2150, 2279, 2278], [2150, 2151, 2280], [2150, 2280, 2279], [2151, 2152, 2280], [2152, 2281, 2280], [2152, 2153, 2282], [2152, 2282, 2281], [2153, 2154, 2282], [2154, 2283, 2282], [2154, 2155, 2284], [2154, 2284, 2283], [2155, 2156, 2284], [2156, 2285, 2284], [2156, 2157, 2286], [2156, 2286, 2285], [2157, 2158, 2286], [2158, 2287, 2286], [2158, 2159, 2288], [2158, 2288, 2287], [2159, 2160, 2288], [2160, 2289, 2288], [2160, 2161, 2290], [2160, 2290, 2289], [2161, 2162, 2290], [2162, 2291, 2290], [2162, 2163, 2292], [2162, 2292, 2291], [2163, 2164, 2292], [2164, 2293, 2292], [2164, 2165, 2294], [2164, 2294, 2293], [2165, 2166, 2294], [2166, 2295, 2294], [2166, 2167, 2296], [2166, 2296, 2295], [2167, 2168, 2296], [2168, 2297, 2296], [2168, 2169, 2298], [2168, 2298, 2297], [2169, 2170, 2298], [2170, 2299, 2298], [2170, 2171, 2300], [2170, 2300, 2299], [2171, 2172, 2300], [2172, 2301, 2300], [2172, 2173, 2302], [2172, 2302, 2301], [2173, 2174, 2302], [2174, 2303, 2302], [2174, 2175, 2304], [2174, 2304, 2303], [2175, 2176, 2304], [2176, 2305, 2304], [2176, 2177, 2306], [2176, 2306, 2305], [2177, 2178, 2306], [2178, 2307, 2306], [2178, 2179, 2308], [2178, 2308, 2307], [2179, 2180, 2308], [2180, 2309, 2308], [2180, 2181, 2310], [2180, 2310, 2309], [2181, 2182, 2310], [2182, 2311, 2310], [2182, 2183, 2312], [2182, 2312, 2311], [2183, 2184, 2312], [2184, 2313, 2312], [2184, 2185, 2314], [2184, 2314, 2313], [2185, 2186, 2314], [2186, 2315, 2314], [2186, 2187, 2316], [2186, 2316, 2315], [2187, 2188, 2316], [2188, 2317, 2316], [2188, 2189, 2318], [2188, 2318, 2317], [2189, 2190, 2318], [2190, 2319, 2318], [2190, 2191, 2320], [2190, 2320, 2319], [2191, 2192, 2320], [2192, 2321, 2320], [2193, 2194, 2322], [2194, 2323, 2322], [2194, 2195, 2324], [2194, 2324, 2323], [2195, 2196, 2324], [2196, 2325, 2324], [2196, 2197, 2326], [2196, 2326, 2325], [2197, 2198, 2326], [2198, 2327, 2326], [2198, 2199, 2328], [2198, 2328, 2327], [2199, 2200, 2328], [2200, 2329, 2328], [2200, 2201, 2330], [2200, 2330, 2329], [2201, 2202, 2330], [2202, 2331, 2330], [2202, 2203, 2332], [2202, 2332, 2331], [2203, 2204, 2332], [2204, 2333, 2332], [2204, 2205, 2334], [2204, 2334, 2333], [2205, 2206, 2334], [2206, 2335, 2334], [2206, 2207, 2336], [2206, 2336, 2335], [2207, 2208, 2336], [2208, 2337, 2336], [2208, 2209, 2338], [2208, 2338, 2337], [2209, 2210, 2338], [2210, 2339, 2338], [2210, 2211, 2340], [2210, 2340, 2339], [2211, 2212, 2340], [2212, 2341, 2340], [2212, 2213, 2342], [2212, 2342, 2341], [2213, 2214, 2342], [2214, 2343, 2342], [2214, 2215, 2344], [2214, 2344, 2343], [2215, 2216, 2344], [2216, 2345, 2344], [2216, 2217, 2346], [2216, 2346, 2345], [2217, 2218, 2346], [2218, 2347, 2346], [2218, 2219, 2348], [2218, 2348, 2347], [2219, 2220, 2348], [2220, 2349, 2348], [2220, 2221, 2350], [2220, 2350, 2349], [2221, 2222, 2350], [2222, 2351, 2350], [2222, 2223, 2352], [2222, 2352, 2351], [2223, 2224, 2352], [2224, 2353, 2352], [2224, 2225, 2354], [2224, 2354, 2353], [2225, 2226, 2354], [2226, 2355, 2354], [2226, 2227, 2356], [2226, 2356, 2355], [2227, 2228, 2356], [2228, 2357, 2356], [2228, 2229, 2358], [2228, 2358, 2357], [2229, 2230, 2358], [2230, 2359, 2358], [2230, 2231, 2360], [2230, 2360, 2359], [2231, 2232, 2360], [2232, 2361, 2360], [2232, 2233, 2362], [2232, 2362, 2361], [2233, 2234, 2362], [2234, 2363, 2362], [2234, 2235, 2364], [2234, 2364, 2363], [2235, 2236, 2364], [2236, 2365, 2364], [2236, 2237, 2366], [2236, 2366, 2365], [2237, 2238, 2366], [2238, 2367, 2366], [2238, 2239, 2368], [2238, 2368, 2367], [2239, 2240, 2368], [2240, 2369, 2368], [2240, 2241, 2370], [2240, 2370, 2369], [2241, 2242, 2370], [2242, 2371, 2370], [2242, 2243, 2372], [2242, 2372, 2371], [2243, 2244, 2372], [2244, 2373, 2372], [2244, 2245, 2374], [2244, 2374, 2373], [2245, 2246, 2374], [2246, 2375, 2374], [2246, 2247, 2376], [2246, 2376, 2375], [2247, 2248, 2376], [2248, 2377, 2376], [2248, 2249, 2378], [2248, 2378, 2377], [2249, 2250, 2378], [2250, 2379, 2378], [2250, 2251, 2380], [2250, 2380, 2379], [2251, 2252, 2380], [2252, 2381, 2380], [2252, 2253, 2382], [2252, 2382, 2381], [2253, 2254, 2382], [2254, 2383, 2382], [2254, 2255, 2384], [2254, 2384, 2383], [2255, 2256, 2384], [2256, 2385, 2384], [2256, 2257, 2386], [2256, 2386, 2385], [2257, 2258, 2386], [2258, 2387, 2386], [2258, 2259, 2388], [2258, 2388, 2387], [2259, 2260, 2388], [2260, 2389, 2388], [2260, 2261, 2390], [2260, 2390, 2389], [2261, 2262, 2390], [2262, 2391, 2390], [2262, 2263, 2392], [2262, 2392, 2391], [2263, 2264, 2392], [2264, 2393, 2392], [2264, 2265, 2394], [2264, 2394, 2393], [2265, 2266, 2394], [2266, 2395, 2394], [2266, 2267, 2396], [2266, 2396, 2395], [2267, 2268, 2396], [2268, 2397, 2396], [2268, 2269, 2398], [2268, 2398, 2397], [2269, 2270, 2398], [2270, 2399, 2398], [2270, 2271, 2400], [2270, 2400, 2399], [2271, 2272, 2400], [2272, 2401, 2400], [2272, 2273, 2402], [2272, 2402, 2401], [2273, 2274, 2402], [2274, 2403, 2402], [2274, 2275, 2404], [2274, 2404, 2403], [2275, 2276, 2404], [2276, 2405, 2404], [2276, 2277, 2406], [2276, 2406, 2405], [2277, 2278, 2406], [2278, 2407, 2406], [2278, 2279, 2408], [2278, 2408, 2407], [2279, 2280, 2408], [2280, 2409, 2408], [2280, 2281, 2410], [2280, 2410, 2409], [2281, 2282, 2410], [2282, 2411, 2410], [2282, 2283, 2412], [2282, 2412, 2411], [2283, 2284, 2412], [2284, 2413, 2412], [2284, 2285, 2414], [2284, 2414, 2413], [2285, 2286, 2414], [2286, 2415, 2414], [2286, 2287, 2416], [2286, 2416, 2415], [2287, 2288, 2416], [2288, 2417, 2416], [2288, 2289, 2418], [2288, 2418, 2417], [2289, 2290, 2418], [2290, 2419, 2418], [2290, 2291, 2420], [2290, 2420, 2419], [2291, 2292, 2420], [2292, 2421, 2420], [2292, 2293, 2422], [2292, 2422, 2421], [2293, 2294, 2422], [2294, 2423, 2422], [2294, 2295, 2424], [2294, 2424, 2423], [2295, 2296, 2424], [2296, 2425, 2424], [2296, 2297, 2426], [2296, 2426, 2425], [2297, 2298, 2426], [2298, 2427, 2426], [2298, 2299, 2428], [2298, 2428, 2427], [2299, 2300, 2428], [2300, 2429, 2428], [2300, 2301, 2430], [2300, 2430, 2429], [2301, 2302, 2430], [2302, 2431, 2430], [2302, 2303, 2432], [2302, 2432, 2431], [2303, 2304, 2432], [2304, 2433, 2432], [2304, 2305, 2434], [2304, 2434, 2433], [2305, 2306, 2434], [2306, 2435, 2434], [2306, 2307, 2436], [2306, 2436, 2435], [2307, 2308, 2436], [2308, 2437, 2436], [2308, 2309, 2438], [2308, 2438, 2437], [2309, 2310, 2438], [2310, 2439, 2438], [2310, 2311, 2440], [2310, 2440, 2439], [2311, 2312, 2440], [2312, 2441, 2440], [2312, 2313, 2442], [2312, 2442, 2441], [2313, 2314, 2442], [2314, 2443, 2442], [2314, 2315, 2444], [2314, 2444, 2443], [2315, 2316, 2444], [2316, 2445, 2444], [2316, 2317, 2446], [2316, 2446, 2445], [2317, 2318, 2446], [2318, 2447, 2446], [2318, 2319, 2448], [2318, 2448, 2447], [2319, 2320, 2448], [2320, 2449, 2448], [2320, 2321, 2450], [2320, 2450, 2449], [2322, 2323, 2452], [2322, 2452, 2451], [2323, 2324, 2452], [2324, 2453, 2452], [2324, 2325, 2454], [2324, 2454, 2453], [2325, 2326, 2454], [2326, 2455, 2454], [2326, 2327, 2456], [2326, 2456, 2455], [2327, 2328, 2456], [2328, 2457, 2456], [2328, 2329, 2458], [2328, 2458, 2457], [2329, 2330, 2458], [2330, 2459, 2458], [2330, 2331, 2460], [2330, 2460, 2459], [2331, 2332, 2460], [2332, 2461, 2460], [2332, 2333, 2462], [2332, 2462, 2461], [2333, 2334, 2462], [2334, 2463, 2462], [2334, 2335, 2464], [2334, 2464, 2463], [2335, 2336, 2464], [2336, 2465, 2464], [2336, 2337, 2466], [2336, 2466, 2465], [2337, 2338, 2466], [2338, 2467, 2466], [2338, 2339, 2468], [2338, 2468, 2467], [2339, 2340, 2468], [2340, 2469, 2468], [2340, 2341, 2470], [2340, 2470, 2469], [2341, 2342, 2470], [2342, 2471, 2470], [2342, 2343, 2472], [2342, 2472, 2471], [2343, 2344, 2472], [2344, 2473, 2472], [2344, 2345, 2474], [2344, 2474, 2473], [2345, 2346, 2474], [2346, 2475, 2474], [2346, 2347, 2476], [2346, 2476, 2475], [2347, 2348, 2476], [2348, 2477, 2476], [2348, 2349, 2478], [2348, 2478, 2477], [2349, 2350, 2478], [2350, 2479, 2478], [2350, 2351, 2480], [2350, 2480, 2479], [2351, 2352, 2480], [2352, 2481, 2480], [2352, 2353, 2482], [2352, 2482, 2481], [2353, 2354, 2482], [2354, 2483, 2482], [2354, 2355, 2484], [2354, 2484, 2483], [2355, 2356, 2484], [2356, 2485, 2484], [2356, 2357, 2486], [2356, 2486, 2485], [2357, 2358, 2486], [2358, 2487, 2486], [2358, 2359, 2488], [2358, 2488, 2487], [2359, 2360, 2488], [2360, 2489, 2488], [2360, 2361, 2490], [2360, 2490, 2489], [2361, 2362, 2490], [2362, 2491, 2490], [2362, 2363, 2492], [2362, 2492, 2491], [2363, 2364, 2492], [2364, 2493, 2492], [2364, 2365, 2494], [2364, 2494, 2493], [2365, 2366, 2494], [2366, 2495, 2494], [2366, 2367, 2496], [2366, 2496, 2495], [2367, 2368, 2496], [2368, 2497, 2496], [2368, 2369, 2498], [2368, 2498, 2497], [2369, 2370, 2498], [2370, 2499, 2498], [2370, 2371, 2500], [2370, 2500, 2499], [2371, 2372, 2500], [2372, 2501, 2500], [2372, 2373, 2502], [2372, 2502, 2501], [2373, 2374, 2502], [2374, 2503, 2502], [2374, 2375, 2504], [2374, 2504, 2503], [2375, 2376, 2504], [2376, 2505, 2504], [2376, 2377, 2506], [2376, 2506, 2505], [2377, 2378, 2506], [2378, 2507, 2506], [2378, 2379, 2508], [2378, 2508, 2507], [2379, 2380, 2508], [2380, 2509, 2508], [2380, 2381, 2510], [2380, 2510, 2509], [2381, 2382, 2510], [2382, 2511, 2510], [2382, 2383, 2512], [2382, 2512, 2511], [2383, 2384, 2512], [2384, 2513, 2512], [2384, 2385, 2514], [2384, 2514, 2513], [2385, 2386, 2514], [2386, 2515, 2514], [2386, 2387, 2516], [2386, 2516, 2515], [2387, 2388, 2516], [2388, 2517, 2516], [2388, 2389, 2518], [2388, 2518, 2517], [2389, 2390, 2518], [2390, 2519, 2518], [2390, 2391, 2520], [2390, 2520, 2519], [2391, 2392, 2520], [2392, 2521, 2520], [2392, 2393, 2522], [2392, 2522, 2521], [2393, 2394, 2522], [2394, 2523, 2522], [2394, 2395, 2524], [2394, 2524, 2523], [2395, 2396, 2524], [2396, 2525, 2524], [2396, 2397, 2526], [2396, 2526, 2525], [2397, 2398, 2526], [2398, 2527, 2526], [2398, 2399, 2528], [2398, 2528, 2527], [2399, 2400, 2528], [2400, 2529, 2528], [2400, 2401, 2530], [2400, 2530, 2529], [2401, 2402, 2530], [2402, 2531, 2530], [2402, 2403, 2532], [2402, 2532, 2531], [2403, 2404, 2532], [2404, 2533, 2532], [2404, 2405, 2534], [2404, 2534, 2533], [2405, 2406, 2534], [2406, 2535, 2534], [2406, 2407, 2536], [2406, 2536, 2535], [2407, 2408, 2536], [2408, 2537, 2536], [2408, 2409, 2538], [2408, 2538, 2537], [2409, 2410, 2538], [2410, 2539, 2538], [2410, 2411, 2540], [2410, 2540, 2539], [2411, 2412, 2540], [2412, 2541, 2540], [2412, 2413, 2542], [2412, 2542, 2541], [2413, 2414, 2542], [2414, 2543, 2542], [2414, 2415, 2544], [2414, 2544, 2543], [2415, 2416, 2544], [2416, 2545, 2544], [2416, 2417, 2546], [2416, 2546, 2545], [2417, 2418, 2546], [2418, 2547, 2546], [2418, 2419, 2548], [2418, 2548, 2547], [2419, 2420, 2548], [2420, 2549, 2548], [2420, 2421, 2550], [2420, 2550, 2549], [2421, 2422, 2550], [2422, 2551, 2550], [2422, 2423, 2552], [2422, 2552, 2551], [2423, 2424, 2552], [2424, 2553, 2552], [2424, 2425, 2554], [2424, 2554, 2553], [2425, 2426, 2554], [2426, 2555, 2554], [2426, 2427, 2556], [2426, 2556, 2555], [2427, 2428, 2556], [2428, 2557, 2556], [2428, 2429, 2558], [2428, 2558, 2557], [2429, 2430, 2558], [2430, 2559, 2558], [2430, 2431, 2560], [2430, 2560, 2559], [2431, 2432, 2560], [2432, 2561, 2560], [2432, 2433, 2562], [2432, 2562, 2561], [2433, 2434, 2562], [2434, 2563, 2562], [2434, 2435, 2564], [2434, 2564, 2563], [2435, 2436, 2564], [2436, 2565, 2564], [2436, 2437, 2566], [2436, 2566, 2565], [2437, 2438, 2566], [2438, 2567, 2566], [2438, 2439, 2568], [2438, 2568, 2567], [2439, 2440, 2568], [2440, 2569, 2568], [2440, 2441, 2570], [2440, 2570, 2569], [2441, 2442, 2570], [2442, 2571, 2570], [2442, 2443, 2572], [2442, 2572, 2571], [2443, 2444, 2572], [2444, 2573, 2572], [2444, 2445, 2574], [2444, 2574, 2573], [2445, 2446, 2574], [2446, 2575, 2574], [2446, 2447, 2576], [2446, 2576, 2575], [2447, 2448, 2576], [2448, 2577, 2576], [2448, 2449, 2578], [2448, 2578, 2577], [2449, 2450, 2578], [2450, 2579, 2578], [2451, 2452, 2580], [2452, 2581, 2580], [2452, 2453, 2582], [2452, 2582, 2581], [2453, 2454, 2582], [2454, 2583, 2582], [2454, 2455, 2584], [2454, 2584, 2583], [2455, 2456, 2584], [2456, 2585, 2584], [2456, 2457, 2586], [2456, 2586, 2585], [2457, 2458, 2586], [2458, 2587, 2586], [2458, 2459, 2588], [2458, 2588, 2587], [2459, 2460, 2588], [2460, 2589, 2588], [2460, 2461, 2590], [2460, 2590, 2589], [2461, 2462, 2590], [2462, 2591, 2590], [2462, 2463, 2592], [2462, 2592, 2591], [2463, 2464, 2592], [2464, 2593, 2592], [2464, 2465, 2594], [2464, 2594, 2593], [2465, 2466, 2594], [2466, 2595, 2594], [2466, 2467, 2596], [2466, 2596, 2595], [2467, 2468, 2596], [2468, 2597, 2596], [2468, 2469, 2598], [2468, 2598, 2597], [2469, 2470, 2598], [2470, 2599, 2598], [2470, 2471, 2600], [2470, 2600, 2599], [2471, 2472, 2600], [2472, 2601, 2600], [2472, 2473, 2602], [2472, 2602, 2601], [2473, 2474, 2602], [2474, 2603, 2602], [2474, 2475, 2604], [2474, 2604, 2603], [2475, 2476, 2604], [2476, 2605, 2604], [2476, 2477, 2606], [2476, 2606, 2605], [2477, 2478, 2606], [2478, 2607, 2606], [2478, 2479, 2608], [2478, 2608, 2607], [2479, 2480, 2608], [2480, 2609, 2608], [2480, 2481, 2610], [2480, 2610, 2609], [2481, 2482, 2610], [2482, 2611, 2610], [2482, 2483, 2612], [2482, 2612, 2611], [2483, 2484, 2612], [2484, 2613, 2612], [2484, 2485, 2614], [2484, 2614, 2613], [2485, 2486, 2614], [2486, 2615, 2614], [2486, 2487, 2616], [2486, 2616, 2615], [2487, 2488, 2616], [2488, 2617, 2616], [2488, 2489, 2618], [2488, 2618, 2617], [2489, 2490, 2618], [2490, 2619, 2618], [2490, 2491, 2620], [2490, 2620, 2619], [2491, 2492, 2620], [2492, 2621, 2620], [2492, 2493, 2622], [2492, 2622, 2621], [2493, 2494, 2622], [2494, 2623, 2622], [2494, 2495, 2624], [2494, 2624, 2623], [2495, 2496, 2624], [2496, 2625, 2624], [2496, 2497, 2626], [2496, 2626, 2625], [2497, 2498, 2626], [2498, 2627, 2626], [2498, 2499, 2628], [2498, 2628, 2627], [2499, 2500, 2628], [2500, 2629, 2628], [2500, 2501, 2630], [2500, 2630, 2629], [2501, 2502, 2630], [2502, 2631, 2630], [2502, 2503, 2632], [2502, 2632, 2631], [2503, 2504, 2632], [2504, 2633, 2632], [2504, 2505, 2634], [2504, 2634, 2633], [2505, 2506, 2634], [2506, 2635, 2634], [2506, 2507, 2636], [2506, 2636, 2635], [2507, 2508, 2636], [2508, 2637, 2636], [2508, 2509, 2638], [2508, 2638, 2637], [2509, 2510, 2638], [2510, 2639, 2638], [2510, 2511, 2640], [2510, 2640, 2639], [2511, 2512, 2640], [2512, 2641, 2640], [2512, 2513, 2642], [2512, 2642, 2641], [2513, 2514, 2642], [2514, 2643, 2642], [2514, 2515, 2644], [2514, 2644, 2643], [2515, 2516, 2644], [2516, 2645, 2644], [2516, 2517, 2646], [2516, 2646, 2645], [2517, 2518, 2646], [2518, 2647, 2646], [2518, 2519, 2648], [2518, 2648, 2647], [2519, 2520, 2648], [2520, 2649, 2648], [2520, 2521, 2650], [2520, 2650, 2649], [2521, 2522, 2650], [2522, 2651, 2650], [2522, 2523, 2652], [2522, 2652, 2651], [2523, 2524, 2652], [2524, 2653, 2652], [2524, 2525, 2654], [2524, 2654, 2653], [2525, 2526, 2654], [2526, 2655, 2654], [2526, 2527, 2656], [2526, 2656, 2655], [2527, 2528, 2656], [2528, 2657, 2656], [2528, 2529, 2658], [2528, 2658, 2657], [2529, 2530, 2658], [2530, 2659, 2658], [2530, 2531, 2660], [2530, 2660, 2659], [2531, 2532, 2660], [2532, 2661, 2660], [2532, 2533, 2662], [2532, 2662, 2661], [2533, 2534, 2662], [2534, 2663, 2662], [2534, 2535, 2664], [2534, 2664, 2663], [2535, 2536, 2664], [2536, 2665, 2664], [2536, 2537, 2666], [2536, 2666, 2665], [2537, 2538, 2666], [2538, 2667, 2666], [2538, 2539, 2668], [2538, 2668, 2667], [2539, 2540, 2668], [2540, 2669, 2668], [2540, 2541, 2670], [2540, 2670, 2669], [2541, 2542, 2670], [2542, 2671, 2670], [2542, 2543, 2672], [2542, 2672, 2671], [2543, 2544, 2672], [2544, 2673, 2672], [2544, 2545, 2674], [2544, 2674, 2673], [2545, 2546, 2674], [2546, 2675, 2674], [2546, 2547, 2676], [2546, 2676, 2675], [2547, 2548, 2676], [2548, 2677, 2676], [2548, 2549, 2678], [2548, 2678, 2677], [2549, 2550, 2678], [2550, 2679, 2678], [2550, 2551, 2680], [2550, 2680, 2679], [2551, 2552, 2680], [2552, 2681, 2680], [2552, 2553, 2682], [2552, 2682, 2681], [2553, 2554, 2682], [2554, 2683, 2682], [2554, 2555, 2684], [2554, 2684, 2683], [2555, 2556, 2684], [2556, 2685, 2684], [2556, 2557, 2686], [2556, 2686, 2685], [2557, 2558, 2686], [2558, 2687, 2686], [2558, 2559, 2688], [2558, 2688, 2687], [2559, 2560, 2688], [2560, 2689, 2688], [2560, 2561, 2690], [2560, 2690, 2689], [2561, 2562, 2690], [2562, 2691, 2690], [2562, 2563, 2692], [2562, 2692, 2691], [2563, 2564, 2692], [2564, 2693, 2692], [2564, 2565, 2694], [2564, 2694, 2693], [2565, 2566, 2694], [2566, 2695, 2694], [2566, 2567, 2696], [2566, 2696, 2695], [2567, 2568, 2696], [2568, 2697, 2696], [2568, 2569, 2698], [2568, 2698, 2697], [2569, 2570, 2698], [2570, 2699, 2698], [2570, 2571, 2700], [2570, 2700, 2699], [2571, 2572, 2700], [2572, 2701, 2700], [2572, 2573, 2702], [2572, 2702, 2701], [2573, 2574, 2702], [2574, 2703, 2702], [2574, 2575, 2704], [2574, 2704, 2703], [2575, 2576, 2704], [2576, 2705, 2704], [2576, 2577, 2706], [2576, 2706, 2705], [2577, 2578, 2706], [2578, 2707, 2706], [2578, 2579, 2708], [2578, 2708, 2707], [2580, 2581, 2710], [2580, 2710, 2709], [2581, 2582, 2710], [2582, 2711, 2710], [2582, 2583, 2712], [2582, 2712, 2711], [2583, 2584, 2712], [2584, 2713, 2712], [2584, 2585, 2714], [2584, 2714, 2713], [2585, 2586, 2714], [2586, 2715, 2714], [2586, 2587, 2716], [2586, 2716, 2715], [2587, 2588, 2716], [2588, 2717, 2716], [2588, 2589, 2718], [2588, 2718, 2717], [2589, 2590, 2718], [2590, 2719, 2718], [2590, 2591, 2720], [2590, 2720, 2719], [2591, 2592, 2720], [2592, 2721, 2720], [2592, 2593, 2722], [2592, 2722, 2721], [2593, 2594, 2722], [2594, 2723, 2722], [2594, 2595, 2724], [2594, 2724, 2723], [2595, 2596, 2724], [2596, 2725, 2724], [2596, 2597, 2726], [2596, 2726, 2725], [2597, 2598, 2726], [2598, 2727, 2726], [2598, 2599, 2728], [2598, 2728, 2727], [2599, 2600, 2728], [2600, 2729, 2728], [2600, 2601, 2730], [2600, 2730, 2729], [2601, 2602, 2730], [2602, 2731, 2730], [2602, 2603, 2732], [2602, 2732, 2731], [2603, 2604, 2732], [2604, 2733, 2732], [2604, 2605, 2734], [2604, 2734, 2733], [2605, 2606, 2734], [2606, 2735, 2734], [2606, 2607, 2736], [2606, 2736, 2735], [2607, 2608, 2736], [2608, 2737, 2736], [2608, 2609, 2738], [2608, 2738, 2737], [2609, 2610, 2738], [2610, 2739, 2738], [2610, 2611, 2740], [2610, 2740, 2739], [2611, 2612, 2740], [2612, 2741, 2740], [2612, 2613, 2742], [2612, 2742, 2741], [2613, 2614, 2742], [2614, 2743, 2742], [2614, 2615, 2744], [2614, 2744, 2743], [2615, 2616, 2744], [2616, 2745, 2744], [2616, 2617, 2746], [2616, 2746, 2745], [2617, 2618, 2746], [2618, 2747, 2746], [2618, 2619, 2748], [2618, 2748, 2747], [2619, 2620, 2748], [2620, 2749, 2748], [2620, 2621, 2750], [2620, 2750, 2749], [2621, 2622, 2750], [2622, 2751, 2750], [2622, 2623, 2752], [2622, 2752, 2751], [2623, 2624, 2752], [2624, 2753, 2752], [2624, 2625, 2754], [2624, 2754, 2753], [2625, 2626, 2754], [2626, 2755, 2754], [2626, 2627, 2756], [2626, 2756, 2755], [2627, 2628, 2756], [2628, 2757, 2756], [2628, 2629, 2758], [2628, 2758, 2757], [2629, 2630, 2758], [2630, 2759, 2758], [2630, 2631, 2760], [2630, 2760, 2759], [2631, 2632, 2760], [2632, 2761, 2760], [2632, 2633, 2762], [2632, 2762, 2761], [2633, 2634, 2762], [2634, 2763, 2762], [2634, 2635, 2764], [2634, 2764, 2763], [2635, 2636, 2764], [2636, 2765, 2764], [2636, 2637, 2766], [2636, 2766, 2765], [2637, 2638, 2766], [2638, 2767, 2766], [2638, 2639, 2768], [2638, 2768, 2767], [2639, 2640, 2768], [2640, 2769, 2768], [2640, 2641, 2770], [2640, 2770, 2769], [2641, 2642, 2770], [2642, 2771, 2770], [2642, 2643, 2772], [2642, 2772, 2771], [2643, 2644, 2772], [2644, 2773, 2772], [2644, 2645, 2774], [2644, 2774, 2773], [2645, 2646, 2774], [2646, 2775, 2774], [2646, 2647, 2776], [2646, 2776, 2775], [2647, 2648, 2776], [2648, 2777, 2776], [2648, 2649, 2778], [2648, 2778, 2777], [2649, 2650, 2778], [2650, 2779, 2778], [2650, 2651, 2780], [2650, 2780, 2779], [2651, 2652, 2780], [2652, 2781, 2780], [2652, 2653, 2782], [2652, 2782, 2781], [2653, 2654, 2782], [2654, 2783, 2782], [2654, 2655, 2784], [2654, 2784, 2783], [2655, 2656, 2784], [2656, 2785, 2784], [2656, 2657, 2786], [2656, 2786, 2785], [2657, 2658, 2786], [2658, 2787, 2786], [2658, 2659, 2788], [2658, 2788, 2787], [2659, 2660, 2788], [2660, 2789, 2788], [2660, 2661, 2790], [2660, 2790, 2789], [2661, 2662, 2790], [2662, 2791, 2790], [2662, 2663, 2792], [2662, 2792, 2791], [2663, 2664, 2792], [2664, 2793, 2792], [2664, 2665, 2794], [2664, 2794, 2793], [2665, 2666, 2794], [2666, 2795, 2794], [2666, 2667, 2796], [2666, 2796, 2795], [2667, 2668, 2796], [2668, 2797, 2796], [2668, 2669, 2798], [2668, 2798, 2797], [2669, 2670, 2798], [2670, 2799, 2798], [2670, 2671, 2800], [2670, 2800, 2799], [2671, 2672, 2800], [2672, 2801, 2800], [2672, 2673, 2802], [2672, 2802, 2801], [2673, 2674, 2802], [2674, 2803, 2802], [2674, 2675, 2804], [2674, 2804, 2803], [2675, 2676, 2804], [2676, 2805, 2804], [2676, 2677, 2806], [2676, 2806, 2805], [2677, 2678, 2806], [2678, 2807, 2806], [2678, 2679, 2808], [2678, 2808, 2807], [2679, 2680, 2808], [2680, 2809, 2808], [2680, 2681, 2810], [2680, 2810, 2809], [2681, 2682, 2810], [2682, 2811, 2810], [2682, 2683, 2812], [2682, 2812, 2811], [2683, 2684, 2812], [2684, 2813, 2812], [2684, 2685, 2814], [2684, 2814, 2813], [2685, 2686, 2814], [2686, 2815, 2814], [2686, 2687, 2816], [2686, 2816, 2815], [2687, 2688, 2816], [2688, 2817, 2816], [2688, 2689, 2818], [2688, 2818, 2817], [2689, 2690, 2818], [2690, 2819, 2818], [2690, 2691, 2820], [2690, 2820, 2819], [2691, 2692, 2820], [2692, 2821, 2820], [2692, 2693, 2822], [2692, 2822, 2821], [2693, 2694, 2822], [2694, 2823, 2822], [2694, 2695, 2824], [2694, 2824, 2823], [2695, 2696, 2824], [2696, 2825, 2824], [2696, 2697, 2826], [2696, 2826, 2825], [2697, 2698, 2826], [2698, 2827, 2826], [2698, 2699, 2828], [2698, 2828, 2827], [2699, 2700, 2828], [2700, 2829, 2828], [2700, 2701, 2830], [2700, 2830, 2829], [2701, 2702, 2830], [2702, 2831, 2830], [2702, 2703, 2832], [2702, 2832, 2831], [2703, 2704, 2832], [2704, 2833, 2832], [2704, 2705, 2834], [2704, 2834, 2833], [2705, 2706, 2834], [2706, 2835, 2834], [2706, 2707, 2836], [2706, 2836, 2835], [2707, 2708, 2836], [2708, 2837, 2836], [2709, 2710, 2838], [2710, 2839, 2838], [2710, 2711, 2840], [2710, 2840, 2839], [2711, 2712, 2840], [2712, 2841, 2840], [2712, 2713, 2842], [2712, 2842, 2841], [2713, 2714, 2842], [2714, 2843, 2842], [2714, 2715, 2844], [2714, 2844, 2843], [2715, 2716, 2844], [2716, 2845, 2844], [2716, 2717, 2846], [2716, 2846, 2845], [2717, 2718, 2846], [2718, 2847, 2846], [2718, 2719, 2848], [2718, 2848, 2847], [2719, 2720, 2848], [2720, 2849, 2848], [2720, 2721, 2850], [2720, 2850, 2849], [2721, 2722, 2850], [2722, 2851, 2850], [2722, 2723, 2852], [2722, 2852, 2851], [2723, 2724, 2852], [2724, 2853, 2852], [2724, 2725, 2854], [2724, 2854, 2853], [2725, 2726, 2854], [2726, 2855, 2854], [2726, 2727, 2856], [2726, 2856, 2855], [2727, 2728, 2856], [2728, 2857, 2856], [2728, 2729, 2858], [2728, 2858, 2857], [2729, 2730, 2858], [2730, 2859, 2858], [2730, 2731, 2860], [2730, 2860, 2859], [2731, 2732, 2860], [2732, 2861, 2860], [2732, 2733, 2862], [2732, 2862, 2861], [2733, 2734, 2862], [2734, 2863, 2862], [2734, 2735, 2864], [2734, 2864, 2863], [2735, 2736, 2864], [2736, 2865, 2864], [2736, 2737, 2866], [2736, 2866, 2865], [2737, 2738, 2866], [2738, 2867, 2866], [2738, 2739, 2868], [2738, 2868, 2867], [2739, 2740, 2868], [2740, 2869, 2868], [2740, 2741, 2870], [2740, 2870, 2869], [2741, 2742, 2870], [2742, 2871, 2870], [2742, 2743, 2872], [2742, 2872, 2871], [2743, 2744, 2872], [2744, 2873, 2872], [2744, 2745, 2874], [2744, 2874, 2873], [2745, 2746, 2874], [2746, 2875, 2874], [2746, 2747, 2876], [2746, 2876, 2875], [2747, 2748, 2876], [2748, 2877, 2876], [2748, 2749, 2878], [2748, 2878, 2877], [2749, 2750, 2878], [2750, 2879, 2878], [2750, 2751, 2880], [2750, 2880, 2879], [2751, 2752, 2880], [2752, 2881, 2880], [2752, 2753, 2882], [2752, 2882, 2881], [2753, 2754, 2882], [2754, 2883, 2882], [2754, 2755, 2884], [2754, 2884, 2883], [2755, 2756, 2884], [2756, 2885, 2884], [2756, 2757, 2886], [2756, 2886, 2885], [2757, 2758, 2886], [2758, 2887, 2886], [2758, 2759, 2888], [2758, 2888, 2887], [2759, 2760, 2888], [2760, 2889, 2888], [2760, 2761, 2890], [2760, 2890, 2889], [2761, 2762, 2890], [2762, 2891, 2890], [2762, 2763, 2892], [2762, 2892, 2891], [2763, 2764, 2892], [2764, 2893, 2892], [2764, 2765, 2894], [2764, 2894, 2893], [2765, 2766, 2894], [2766, 2895, 2894], [2766, 2767, 2896], [2766, 2896, 2895], [2767, 2768, 2896], [2768, 2897, 2896], [2768, 2769, 2898], [2768, 2898, 2897], [2769, 2770, 2898], [2770, 2899, 2898], [2770, 2771, 2900], [2770, 2900, 2899], [2771, 2772, 2900], [2772, 2901, 2900], [2772, 2773, 2902], [2772, 2902, 2901], [2773, 2774, 2902], [2774, 2903, 2902], [2774, 2775, 2904], [2774, 2904, 2903], [2775, 2776, 2904], [2776, 2905, 2904], [2776, 2777, 2906], [2776, 2906, 2905], [2777, 2778, 2906], [2778, 2907, 2906], [2778, 2779, 2908], [2778, 2908, 2907], [2779, 2780, 2908], [2780, 2909, 2908], [2780, 2781, 2910], [2780, 2910, 2909], [2781, 2782, 2910], [2782, 2911, 2910], [2782, 2783, 2912], [2782, 2912, 2911], [2783, 2784, 2912], [2784, 2913, 2912], [2784, 2785, 2914], [2784, 2914, 2913], [2785, 2786, 2914], [2786, 2915, 2914], [2786, 2787, 2916], [2786, 2916, 2915], [2787, 2788, 2916], [2788, 2917, 2916], [2788, 2789, 2918], [2788, 2918, 2917], [2789, 2790, 2918], [2790, 2919, 2918], [2790, 2791, 2920], [2790, 2920, 2919], [2791, 2792, 2920], [2792, 2921, 2920], [2792, 2793, 2922], [2792, 2922, 2921], [2793, 2794, 2922], [2794, 2923, 2922], [2794, 2795, 2924], [2794, 2924, 2923], [2795, 2796, 2924], [2796, 2925, 2924], [2796, 2797, 2926], [2796, 2926, 2925], [2797, 2798, 2926], [2798, 2927, 2926], [2798, 2799, 2928], [2798, 2928, 2927], [2799, 2800, 2928], [2800, 2929, 2928], [2800, 2801, 2930], [2800, 2930, 2929], [2801, 2802, 2930], [2802, 2931, 2930], [2802, 2803, 2932], [2802, 2932, 2931], [2803, 2804, 2932], [2804, 2933, 2932], [2804, 2805, 2934], [2804, 2934, 2933], [2805, 2806, 2934], [2806, 2935, 2934], [2806, 2807, 2936], [2806, 2936, 2935], [2807, 2808, 2936], [2808, 2937, 2936], [2808, 2809, 2938], [2808, 2938, 2937], [2809, 2810, 2938], [2810, 2939, 2938], [2810, 2811, 2940], [2810, 2940, 2939], [2811, 2812, 2940], [2812, 2941, 2940], [2812, 2813, 2942], [2812, 2942, 2941], [2813, 2814, 2942], [2814, 2943, 2942], [2814, 2815, 2944], [2814, 2944, 2943], [2815, 2816, 2944], [2816, 2945, 2944], [2816, 2817, 2946], [2816, 2946, 2945], [2817, 2818, 2946], [2818, 2947, 2946], [2818, 2819, 2948], [2818, 2948, 2947], [2819, 2820, 2948], [2820, 2949, 2948], [2820, 2821, 2950], [2820, 2950, 2949], [2821, 2822, 2950], [2822, 2951, 2950], [2822, 2823, 2952], [2822, 2952, 2951], [2823, 2824, 2952], [2824, 2953, 2952], [2824, 2825, 2954], [2824, 2954, 2953], [2825, 2826, 2954], [2826, 2955, 2954], [2826, 2827, 2956], [2826, 2956, 2955], [2827, 2828, 2956], [2828, 2957, 2956], [2828, 2829, 2958], [2828, 2958, 2957], [2829, 2830, 2958], [2830, 2959, 2958], [2830, 2831, 2960], [2830, 2960, 2959], [2831, 2832, 2960], [2832, 2961, 2960], [2832, 2833, 2962], [2832, 2962, 2961], [2833, 2834, 2962], [2834, 2963, 2962], [2834, 2835, 2964], [2834, 2964, 2963], [2835, 2836, 2964], [2836, 2965, 2964], [2836, 2837, 2966], [2836, 2966, 2965], [2838, 2839, 2968], [2838, 2968, 2967], [2839, 2840, 2968], [2840, 2969, 2968], [2840, 2841, 2970], [2840, 2970, 2969], [2841, 2842, 2970], [2842, 2971, 2970], [2842, 2843, 2972], [2842, 2972, 2971], [2843, 2844, 2972], [2844, 2973, 2972], [2844, 2845, 2974], [2844, 2974, 2973], [2845, 2846, 2974], [2846, 2975, 2974], [2846, 2847, 2976], [2846, 2976, 2975], [2847, 2848, 2976], [2848, 2977, 2976], [2848, 2849, 2978], [2848, 2978, 2977], [2849, 2850, 2978], [2850, 2979, 2978], [2850, 2851, 2980], [2850, 2980, 2979], [2851, 2852, 2980], [2852, 2981, 2980], [2852, 2853, 2982], [2852, 2982, 2981], [2853, 2854, 2982], [2854, 2983, 2982], [2854, 2855, 2984], [2854, 2984, 2983], [2855, 2856, 2984], [2856, 2985, 2984], [2856, 2857, 2986], [2856, 2986, 2985], [2857, 2858, 2986], [2858, 2987, 2986], [2858, 2859, 2988], [2858, 2988, 2987], [2859, 2860, 2988], [2860, 2989, 2988], [2860, 2861, 2990], [2860, 2990, 2989], [2861, 2862, 2990], [2862, 2991, 2990], [2862, 2863, 2992], [2862, 2992, 2991], [2863, 2864, 2992], [2864, 2993, 2992], [2864, 2865, 2994], [2864, 2994, 2993], [2865, 2866, 2994], [2866, 2995, 2994], [2866, 2867, 2996], [2866, 2996, 2995], [2867, 2868, 2996], [2868, 2997, 2996], [2868, 2869, 2998], [2868, 2998, 2997], [2869, 2870, 2998], [2870, 2999, 2998], [2870, 2871, 3000], [2870, 3000, 2999], [2871, 2872, 3000], [2872, 3001, 3000], [2872, 2873, 3002], [2872, 3002, 3001], [2873, 2874, 3002], [2874, 3003, 3002], [2874, 2875, 3004], [2874, 3004, 3003], [2875, 2876, 3004], [2876, 3005, 3004], [2876, 2877, 3006], [2876, 3006, 3005], [2877, 2878, 3006], [2878, 3007, 3006], [2878, 2879, 3008], [2878, 3008, 3007], [2879, 2880, 3008], [2880, 3009, 3008], [2880, 2881, 3010], [2880, 3010, 3009], [2881, 2882, 3010], [2882, 3011, 3010], [2882, 2883, 3012], [2882, 3012, 3011], [2883, 2884, 3012], [2884, 3013, 3012], [2884, 2885, 3014], [2884, 3014, 3013], [2885, 2886, 3014], [2886, 3015, 3014], [2886, 2887, 3016], [2886, 3016, 3015], [2887, 2888, 3016], [2888, 3017, 3016], [2888, 2889, 3018], [2888, 3018, 3017], [2889, 2890, 3018], [2890, 3019, 3018], [2890, 2891, 3020], [2890, 3020, 3019], [2891, 2892, 3020], [2892, 3021, 3020], [2892, 2893, 3022], [2892, 3022, 3021], [2893, 2894, 3022], [2894, 3023, 3022], [2894, 2895, 3024], [2894, 3024, 3023], [2895, 2896, 3024], [2896, 3025, 3024], [2896, 2897, 3026], [2896, 3026, 3025], [2897, 2898, 3026], [2898, 3027, 3026], [2898, 2899, 3028], [2898, 3028, 3027], [2899, 2900, 3028], [2900, 3029, 3028], [2900, 2901, 3030], [2900, 3030, 3029], [2901, 2902, 3030], [2902, 3031, 3030], [2902, 2903, 3032], [2902, 3032, 3031], [2903, 2904, 3032], [2904, 3033, 3032], [2904, 2905, 3034], [2904, 3034, 3033], [2905, 2906, 3034], [2906, 3035, 3034], [2906, 2907, 3036], [2906, 3036, 3035], [2907, 2908, 3036], [2908, 3037, 3036], [2908, 2909, 3038], [2908, 3038, 3037], [2909, 2910, 3038], [2910, 3039, 3038], [2910, 2911, 3040], [2910, 3040, 3039], [2911, 2912, 3040], [2912, 3041, 3040], [2912, 2913, 3042], [2912, 3042, 3041], [2913, 2914, 3042], [2914, 3043, 3042], [2914, 2915, 3044], [2914, 3044, 3043], [2915, 2916, 3044], [2916, 3045, 3044], [2916, 2917, 3046], [2916, 3046, 3045], [2917, 2918, 3046], [2918, 3047, 3046], [2918, 2919, 3048], [2918, 3048, 3047], [2919, 2920, 3048], [2920, 3049, 3048], [2920, 2921, 3050], [2920, 3050, 3049], [2921, 2922, 3050], [2922, 3051, 3050], [2922, 2923, 3052], [2922, 3052, 3051], [2923, 2924, 3052], [2924, 3053, 3052], [2924, 2925, 3054], [2924, 3054, 3053], [2925, 2926, 3054], [2926, 3055, 3054], [2926, 2927, 3056], [2926, 3056, 3055], [2927, 2928, 3056], [2928, 3057, 3056], [2928, 2929, 3058], [2928, 3058, 3057], [2929, 2930, 3058], [2930, 3059, 3058], [2930, 2931, 3060], [2930, 3060, 3059], [2931, 2932, 3060], [2932, 3061, 3060], [2932, 2933, 3062], [2932, 3062, 3061], [2933, 2934, 3062], [2934, 3063, 3062], [2934, 2935, 3064], [2934, 3064, 3063], [2935, 2936, 3064], [2936, 3065, 3064], [2936, 2937, 3066], [2936, 3066, 3065], [2937, 2938, 3066], [2938, 3067, 3066], [2938, 2939, 3068], [2938, 3068, 3067], [2939, 2940, 3068], [2940, 3069, 3068], [2940, 2941, 3070], [2940, 3070, 3069], [2941, 2942, 3070], [2942, 3071, 3070], [2942, 2943, 3072], [2942, 3072, 3071], [2943, 2944, 3072], [2944, 3073, 3072], [2944, 2945, 3074], [2944, 3074, 3073], [2945, 2946, 3074], [2946, 3075, 3074], [2946, 2947, 3076], [2946, 3076, 3075], [2947, 2948, 3076], [2948, 3077, 3076], [2948, 2949, 3078], [2948, 3078, 3077], [2949, 2950, 3078], [2950, 3079, 3078], [2950, 2951, 3080], [2950, 3080, 3079], [2951, 2952, 3080], [2952, 3081, 3080], [2952, 2953, 3082], [2952, 3082, 3081], [2953, 2954, 3082], [2954, 3083, 3082], [2954, 2955, 3084], [2954, 3084, 3083], [2955, 2956, 3084], [2956, 3085, 3084], [2956, 2957, 3086], [2956, 3086, 3085], [2957, 2958, 3086], [2958, 3087, 3086], [2958, 2959, 3088], [2958, 3088, 3087], [2959, 2960, 3088], [2960, 3089, 3088], [2960, 2961, 3090], [2960, 3090, 3089], [2961, 2962, 3090], [2962, 3091, 3090], [2962, 2963, 3092], [2962, 3092, 3091], [2963, 2964, 3092], [2964, 3093, 3092], [2964, 2965, 3094], [2964, 3094, 3093], [2965, 2966, 3094], [2966, 3095, 3094], [2967, 2968, 3096], [2968, 3097, 3096], [2968, 2969, 3098], [2968, 3098, 3097], [2969, 2970, 3098], [2970, 3099, 3098], [2970, 2971, 3100], [2970, 3100, 3099], [2971, 2972, 3100], [2972, 3101, 3100], [2972, 2973, 3102], [2972, 3102, 3101], [2973, 2974, 3102], [2974, 3103, 3102], [2974, 2975, 3104], [2974, 3104, 3103], [2975, 2976, 3104], [2976, 3105, 3104], [2976, 2977, 3106], [2976, 3106, 3105], [2977, 2978, 3106], [2978, 3107, 3106], [2978, 2979, 3108], [2978, 3108, 3107], [2979, 2980, 3108], [2980, 3109, 3108], [2980, 2981, 3110], [2980, 3110, 3109], [2981, 2982, 3110], [2982, 3111, 3110], [2982, 2983, 3112], [2982, 3112, 3111], [2983, 2984, 3112], [2984, 3113, 3112], [2984, 2985, 3114], [2984, 3114, 3113], [2985, 2986, 3114], [2986, 3115, 3114], [2986, 2987, 3116], [2986, 3116, 3115], [2987, 2988, 3116], [2988, 3117, 3116], [2988, 2989, 3118], [2988, 3118, 3117], [2989, 2990, 3118], [2990, 3119, 3118], [2990, 2991, 3120], [2990, 3120, 3119], [2991, 2992, 3120], [2992, 3121, 3120], [2992, 2993, 3122], [2992, 3122, 3121], [2993, 2994, 3122], [2994, 3123, 3122], [2994, 2995, 3124], [2994, 3124, 3123], [2995, 2996, 3124], [2996, 3125, 3124], [2996, 2997, 3126], [2996, 3126, 3125], [2997, 2998, 3126], [2998, 3127, 3126], [2998, 2999, 3128], [2998, 3128, 3127], [2999, 3000, 3128], [3000, 3129, 3128], [3000, 3001, 3130], [3000, 3130, 3129], [3001, 3002, 3130], [3002, 3131, 3130], [3002, 3003, 3132], [3002, 3132, 3131], [3003, 3004, 3132], [3004, 3133, 3132], [3004, 3005, 3134], [3004, 3134, 3133], [3005, 3006, 3134], [3006, 3135, 3134], [3006, 3007, 3136], [3006, 3136, 3135], [3007, 3008, 3136], [3008, 3137, 3136], [3008, 3009, 3138], [3008, 3138, 3137], [3009, 3010, 3138], [3010, 3139, 3138], [3010, 3011, 3140], [3010, 3140, 3139], [3011, 3012, 3140], [3012, 3141, 3140], [3012, 3013, 3142], [3012, 3142, 3141], [3013, 3014, 3142], [3014, 3143, 3142], [3014, 3015, 3144], [3014, 3144, 3143], [3015, 3016, 3144], [3016, 3145, 3144], [3016, 3017, 3146], [3016, 3146, 3145], [3017, 3018, 3146], [3018, 3147, 3146], [3018, 3019, 3148], [3018, 3148, 3147], [3019, 3020, 3148], [3020, 3149, 3148], [3020, 3021, 3150], [3020, 3150, 3149], [3021, 3022, 3150], [3022, 3151, 3150], [3022, 3023, 3152], [3022, 3152, 3151], [3023, 3024, 3152], [3024, 3153, 3152], [3024, 3025, 3154], [3024, 3154, 3153], [3025, 3026, 3154], [3026, 3155, 3154], [3026, 3027, 3156], [3026, 3156, 3155], [3027, 3028, 3156], [3028, 3157, 3156], [3028, 3029, 3158], [3028, 3158, 3157], [3029, 3030, 3158], [3030, 3159, 3158], [3030, 3031, 3160], [3030, 3160, 3159], [3031, 3032, 3160], [3032, 3161, 3160], [3032, 3033, 3162], [3032, 3162, 3161], [3033, 3034, 3162], [3034, 3163, 3162], [3034, 3035, 3164], [3034, 3164, 3163], [3035, 3036, 3164], [3036, 3165, 3164], [3036, 3037, 3166], [3036, 3166, 3165], [3037, 3038, 3166], [3038, 3167, 3166], [3038, 3039, 3168], [3038, 3168, 3167], [3039, 3040, 3168], [3040, 3169, 3168], [3040, 3041, 3170], [3040, 3170, 3169], [3041, 3042, 3170], [3042, 3171, 3170], [3042, 3043, 3172], [3042, 3172, 3171], [3043, 3044, 3172], [3044, 3173, 3172], [3044, 3045, 3174], [3044, 3174, 3173], [3045, 3046, 3174], [3046, 3175, 3174], [3046, 3047, 3176], [3046, 3176, 3175], [3047, 3048, 3176], [3048, 3177, 3176], [3048, 3049, 3178], [3048, 3178, 3177], [3049, 3050, 3178], [3050, 3179, 3178], [3050, 3051, 3180], [3050, 3180, 3179], [3051, 3052, 3180], [3052, 3181, 3180], [3052, 3053, 3182], [3052, 3182, 3181], [3053, 3054, 3182], [3054, 3183, 3182], [3054, 3055, 3184], [3054, 3184, 3183], [3055, 3056, 3184], [3056, 3185, 3184], [3056, 3057, 3186], [3056, 3186, 3185], [3057, 3058, 3186], [3058, 3187, 3186], [3058, 3059, 3188], [3058, 3188, 3187], [3059, 3060, 3188], [3060, 3189, 3188], [3060, 3061, 3190], [3060, 3190, 3189], [3061, 3062, 3190], [3062, 3191, 3190], [3062, 3063, 3192], [3062, 3192, 3191], [3063, 3064, 3192], [3064, 3193, 3192], [3064, 3065, 3194], [3064, 3194, 3193], [3065, 3066, 3194], [3066, 3195, 3194], [3066, 3067, 3196], [3066, 3196, 3195], [3067, 3068, 3196], [3068, 3197, 3196], [3068, 3069, 3198], [3068, 3198, 3197], [3069, 3070, 3198], [3070, 3199, 3198], [3070, 3071, 3200], [3070, 3200, 3199], [3071, 3072, 3200], [3072, 3201, 3200], [3072, 3073, 3202], [3072, 3202, 3201], [3073, 3074, 3202], [3074, 3203, 3202], [3074, 3075, 3204], [3074, 3204, 3203], [3075, 3076, 3204], [3076, 3205, 3204], [3076, 3077, 3206], [3076, 3206, 3205], [3077, 3078, 3206], [3078, 3207, 3206], [3078, 3079, 3208], [3078, 3208, 3207], [3079, 3080, 3208], [3080, 3209, 3208], [3080, 3081, 3210], [3080, 3210, 3209], [3081, 3082, 3210], [3082, 3211, 3210], [3082, 3083, 3212], [3082, 3212, 3211], [3083, 3084, 3212], [3084, 3213, 3212], [3084, 3085, 3214], [3084, 3214, 3213], [3085, 3086, 3214], [3086, 3215, 3214], [3086, 3087, 3216], [3086, 3216, 3215], [3087, 3088, 3216], [3088, 3217, 3216], [3088, 3089, 3218], [3088, 3218, 3217], [3089, 3090, 3218], [3090, 3219, 3218], [3090, 3091, 3220], [3090, 3220, 3219], [3091, 3092, 3220], [3092, 3221, 3220], [3092, 3093, 3222], [3092, 3222, 3221], [3093, 3094, 3222], [3094, 3223, 3222], [3094, 3095, 3224], [3094, 3224, 3223], [3096, 3097, 3226], [3096, 3226, 3225], [3097, 3098, 3226], [3098, 3227, 3226], [3098, 3099, 3228], [3098, 3228, 3227], [3099, 3100, 3228], [3100, 3229, 3228], [3100, 3101, 3230], [3100, 3230, 3229], [3101, 3102, 3230], [3102, 3231, 3230], [3102, 3103, 3232], [3102, 3232, 3231], [3103, 3104, 3232], [3104, 3233, 3232], [3104, 3105, 3234], [3104, 3234, 3233], [3105, 3106, 3234], [3106, 3235, 3234], [3106, 3107, 3236], [3106, 3236, 3235], [3107, 3108, 3236], [3108, 3237, 3236], [3108, 3109, 3238], [3108, 3238, 3237], [3109, 3110, 3238], [3110, 3239, 3238], [3110, 3111, 3240], [3110, 3240, 3239], [3111, 3112, 3240], [3112, 3241, 3240], [3112, 3113, 3242], [3112, 3242, 3241], [3113, 3114, 3242], [3114, 3243, 3242], [3114, 3115, 3244], [3114, 3244, 3243], [3115, 3116, 3244], [3116, 3245, 3244], [3116, 3117, 3246], [3116, 3246, 3245], [3117, 3118, 3246], [3118, 3247, 3246], [3118, 3119, 3248], [3118, 3248, 3247], [3119, 3120, 3248], [3120, 3249, 3248], [3120, 3121, 3250], [3120, 3250, 3249], [3121, 3122, 3250], [3122, 3251, 3250], [3122, 3123, 3252], [3122, 3252, 3251], [3123, 3124, 3252], [3124, 3253, 3252], [3124, 3125, 3254], [3124, 3254, 3253], [3125, 3126, 3254], [3126, 3255, 3254], [3126, 3127, 3256], [3126, 3256, 3255], [3127, 3128, 3256], [3128, 3257, 3256], [3128, 3129, 3258], [3128, 3258, 3257], [3129, 3130, 3258], [3130, 3259, 3258], [3130, 3131, 3260], [3130, 3260, 3259], [3131, 3132, 3260], [3132, 3261, 3260], [3132, 3133, 3262], [3132, 3262, 3261], [3133, 3134, 3262], [3134, 3263, 3262], [3134, 3135, 3264], [3134, 3264, 3263], [3135, 3136, 3264], [3136, 3265, 3264], [3136, 3137, 3266], [3136, 3266, 3265], [3137, 3138, 3266], [3138, 3267, 3266], [3138, 3139, 3268], [3138, 3268, 3267], [3139, 3140, 3268], [3140, 3269, 3268], [3140, 3141, 3270], [3140, 3270, 3269], [3141, 3142, 3270], [3142, 3271, 3270], [3142, 3143, 3272], [3142, 3272, 3271], [3143, 3144, 3272], [3144, 3273, 3272], [3144, 3145, 3274], [3144, 3274, 3273], [3145, 3146, 3274], [3146, 3275, 3274], [3146, 3147, 3276], [3146, 3276, 3275], [3147, 3148, 3276], [3148, 3277, 3276], [3148, 3149, 3278], [3148, 3278, 3277], [3149, 3150, 3278], [3150, 3279, 3278], [3150, 3151, 3280], [3150, 3280, 3279], [3151, 3152, 3280], [3152, 3281, 3280], [3152, 3153, 3282], [3152, 3282, 3281], [3153, 3154, 3282], [3154, 3283, 3282], [3154, 3155, 3284], [3154, 3284, 3283], [3155, 3156, 3284], [3156, 3285, 3284], [3156, 3157, 3286], [3156, 3286, 3285], [3157, 3158, 3286], [3158, 3287, 3286], [3158, 3159, 3288], [3158, 3288, 3287], [3159, 3160, 3288], [3160, 3289, 3288], [3160, 3161, 3290], [3160, 3290, 3289], [3161, 3162, 3290], [3162, 3291, 3290], [3162, 3163, 3292], [3162, 3292, 3291], [3163, 3164, 3292], [3164, 3293, 3292], [3164, 3165, 3294], [3164, 3294, 3293], [3165, 3166, 3294], [3166, 3295, 3294], [3166, 3167, 3296], [3166, 3296, 3295], [3167, 3168, 3296], [3168, 3297, 3296], [3168, 3169, 3298], [3168, 3298, 3297], [3169, 3170, 3298], [3170, 3299, 3298], [3170, 3171, 3300], [3170, 3300, 3299], [3171, 3172, 3300], [3172, 3301, 3300], [3172, 3173, 3302], [3172, 3302, 3301], [3173, 3174, 3302], [3174, 3303, 3302], [3174, 3175, 3304], [3174, 3304, 3303], [3175, 3176, 3304], [3176, 3305, 3304], [3176, 3177, 3306], [3176, 3306, 3305], [3177, 3178, 3306], [3178, 3307, 3306], [3178, 3179, 3308], [3178, 3308, 3307], [3179, 3180, 3308], [3180, 3309, 3308], [3180, 3181, 3310], [3180, 3310, 3309], [3181, 3182, 3310], [3182, 3311, 3310], [3182, 3183, 3312], [3182, 3312, 3311], [3183, 3184, 3312], [3184, 3313, 3312], [3184, 3185, 3314], [3184, 3314, 3313], [3185, 3186, 3314], [3186, 3315, 3314], [3186, 3187, 3316], [3186, 3316, 3315], [3187, 3188, 3316], [3188, 3317, 3316], [3188, 3189, 3318], [3188, 3318, 3317], [3189, 3190, 3318], [3190, 3319, 3318], [3190, 3191, 3320], [3190, 3320, 3319], [3191, 3192, 3320], [3192, 3321, 3320], [3192, 3193, 3322], [3192, 3322, 3321], [3193, 3194, 3322], [3194, 3323, 3322], [3194, 3195, 3324], [3194, 3324, 3323], [3195, 3196, 3324], [3196, 3325, 3324], [3196, 3197, 3326], [3196, 3326, 3325], [3197, 3198, 3326], [3198, 3327, 3326], [3198, 3199, 3328], [3198, 3328, 3327], [3199, 3200, 3328], [3200, 3329, 3328], [3200, 3201, 3330], [3200, 3330, 3329], [3201, 3202, 3330], [3202, 3331, 3330], [3202, 3203, 3332], [3202, 3332, 3331], [3203, 3204, 3332], [3204, 3333, 3332], [3204, 3205, 3334], [3204, 3334, 3333], [3205, 3206, 3334], [3206, 3335, 3334], [3206, 3207, 3336], [3206, 3336, 3335], [3207, 3208, 3336], [3208, 3337, 3336], [3208, 3209, 3338], [3208, 3338, 3337], [3209, 3210, 3338], [3210, 3339, 3338], [3210, 3211, 3340], [3210, 3340, 3339], [3211, 3212, 3340], [3212, 3341, 3340], [3212, 3213, 3342], [3212, 3342, 3341], [3213, 3214, 3342], [3214, 3343, 3342], [3214, 3215, 3344], [3214, 3344, 3343], [3215, 3216, 3344], [3216, 3345, 3344], [3216, 3217, 3346], [3216, 3346, 3345], [3217, 3218, 3346], [3218, 3347, 3346], [3218, 3219, 3348], [3218, 3348, 3347], [3219, 3220, 3348], [3220, 3349, 3348], [3220, 3221, 3350], [3220, 3350, 3349], [3221, 3222, 3350], [3222, 3351, 3350], [3222, 3223, 3352], [3222, 3352, 3351], [3223, 3224, 3352], [3224, 3353, 3352], [3225, 3226, 3354], [3226, 3355, 3354], [3226, 3227, 3356], [3226, 3356, 3355], [3227, 3228, 3356], [3228, 3357, 3356], [3228, 3229, 3358], [3228, 3358, 3357], [3229, 3230, 3358], [3230, 3359, 3358], [3230, 3231, 3360], [3230, 3360, 3359], [3231, 3232, 3360], [3232, 3361, 3360], [3232, 3233, 3362], [3232, 3362, 3361], [3233, 3234, 3362], [3234, 3363, 3362], [3234, 3235, 3364], [3234, 3364, 3363], [3235, 3236, 3364], [3236, 3365, 3364], [3236, 3237, 3366], [3236, 3366, 3365], [3237, 3238, 3366], [3238, 3367, 3366], [3238, 3239, 3368], [3238, 3368, 3367], [3239, 3240, 3368], [3240, 3369, 3368], [3240, 3241, 3370], [3240, 3370, 3369], [3241, 3242, 3370], [3242, 3371, 3370], [3242, 3243, 3372], [3242, 3372, 3371], [3243, 3244, 3372], [3244, 3373, 3372], [3244, 3245, 3374], [3244, 3374, 3373], [3245, 3246, 3374], [3246, 3375, 3374], [3246, 3247, 3376], [3246, 3376, 3375], [3247, 3248, 3376], [3248, 3377, 3376], [3248, 3249, 3378], [3248, 3378, 3377], [3249, 3250, 3378], [3250, 3379, 3378], [3250, 3251, 3380], [3250, 3380, 3379], [3251, 3252, 3380], [3252, 3381, 3380], [3252, 3253, 3382], [3252, 3382, 3381], [3253, 3254, 3382], [3254, 3383, 3382], [3254, 3255, 3384], [3254, 3384, 3383], [3255, 3256, 3384], [3256, 3385, 3384], [3256, 3257, 3386], [3256, 3386, 3385], [3257, 3258, 3386], [3258, 3387, 3386], [3258, 3259, 3388], [3258, 3388, 3387], [3259, 3260, 3388], [3260, 3389, 3388], [3260, 3261, 3390], [3260, 3390, 3389], [3261, 3262, 3390], [3262, 3391, 3390], [3262, 3263, 3392], [3262, 3392, 3391], [3263, 3264, 3392], [3264, 3393, 3392], [3264, 3265, 3394], [3264, 3394, 3393], [3265, 3266, 3394], [3266, 3395, 3394], [3266, 3267, 3396], [3266, 3396, 3395], [3267, 3268, 3396], [3268, 3397, 3396], [3268, 3269, 3398], [3268, 3398, 3397], [3269, 3270, 3398], [3270, 3399, 3398], [3270, 3271, 3400], [3270, 3400, 3399], [3271, 3272, 3400], [3272, 3401, 3400], [3272, 3273, 3402], [3272, 3402, 3401], [3273, 3274, 3402], [3274, 3403, 3402], [3274, 3275, 3404], [3274, 3404, 3403], [3275, 3276, 3404], [3276, 3405, 3404], [3276, 3277, 3406], [3276, 3406, 3405], [3277, 3278, 3406], [3278, 3407, 3406], [3278, 3279, 3408], [3278, 3408, 3407], [3279, 3280, 3408], [3280, 3409, 3408], [3280, 3281, 3410], [3280, 3410, 3409], [3281, 3282, 3410], [3282, 3411, 3410], [3282, 3283, 3412], [3282, 3412, 3411], [3283, 3284, 3412], [3284, 3413, 3412], [3284, 3285, 3414], [3284, 3414, 3413], [3285, 3286, 3414], [3286, 3415, 3414], [3286, 3287, 3416], [3286, 3416, 3415], [3287, 3288, 3416], [3288, 3417, 3416], [3288, 3289, 3418], [3288, 3418, 3417], [3289, 3290, 3418], [3290, 3419, 3418], [3290, 3291, 3420], [3290, 3420, 3419], [3291, 3292, 3420], [3292, 3421, 3420], [3292, 3293, 3422], [3292, 3422, 3421], [3293, 3294, 3422], [3294, 3423, 3422], [3294, 3295, 3424], [3294, 3424, 3423], [3295, 3296, 3424], [3296, 3425, 3424], [3296, 3297, 3426], [3296, 3426, 3425], [3297, 3298, 3426], [3298, 3427, 3426], [3298, 3299, 3428], [3298, 3428, 3427], [3299, 3300, 3428], [3300, 3429, 3428], [3300, 3301, 3430], [3300, 3430, 3429], [3301, 3302, 3430], [3302, 3431, 3430], [3302, 3303, 3432], [3302, 3432, 3431], [3303, 3304, 3432], [3304, 3433, 3432], [3304, 3305, 3434], [3304, 3434, 3433], [3305, 3306, 3434], [3306, 3435, 3434], [3306, 3307, 3436], [3306, 3436, 3435], [3307, 3308, 3436], [3308, 3437, 3436], [3308, 3309, 3438], [3308, 3438, 3437], [3309, 3310, 3438], [3310, 3439, 3438], [3310, 3311, 3440], [3310, 3440, 3439], [3311, 3312, 3440], [3312, 3441, 3440], [3312, 3313, 3442], [3312, 3442, 3441], [3313, 3314, 3442], [3314, 3443, 3442], [3314, 3315, 3444], [3314, 3444, 3443], [3315, 3316, 3444], [3316, 3445, 3444], [3316, 3317, 3446], [3316, 3446, 3445], [3317, 3318, 3446], [3318, 3447, 3446], [3318, 3319, 3448], [3318, 3448, 3447], [3319, 3320, 3448], [3320, 3449, 3448], [3320, 3321, 3450], [3320, 3450, 3449], [3321, 3322, 3450], [3322, 3451, 3450], [3322, 3323, 3452], [3322, 3452, 3451], [3323, 3324, 3452], [3324, 3453, 3452], [3324, 3325, 3454], [3324, 3454, 3453], [3325, 3326, 3454], [3326, 3455, 3454], [3326, 3327, 3456], [3326, 3456, 3455], [3327, 3328, 3456], [3328, 3457, 3456], [3328, 3329, 3458], [3328, 3458, 3457], [3329, 3330, 3458], [3330, 3459, 3458], [3330, 3331, 3460], [3330, 3460, 3459], [3331, 3332, 3460], [3332, 3461, 3460], [3332, 3333, 3462], [3332, 3462, 3461], [3333, 3334, 3462], [3334, 3463, 3462], [3334, 3335, 3464], [3334, 3464, 3463], [3335, 3336, 3464], [3336, 3465, 3464], [3336, 3337, 3466], [3336, 3466, 3465], [3337, 3338, 3466], [3338, 3467, 3466], [3338, 3339, 3468], [3338, 3468, 3467], [3339, 3340, 3468], [3340, 3469, 3468], [3340, 3341, 3470], [3340, 3470, 3469], [3341, 3342, 3470], [3342, 3471, 3470], [3342, 3343, 3472], [3342, 3472, 3471], [3343, 3344, 3472], [3344, 3473, 3472], [3344, 3345, 3474], [3344, 3474, 3473], [3345, 3346, 3474], [3346, 3475, 3474], [3346, 3347, 3476], [3346, 3476, 3475], [3347, 3348, 3476], [3348, 3477, 3476], [3348, 3349, 3478], [3348, 3478, 3477], [3349, 3350, 3478], [3350, 3479, 3478], [3350, 3351, 3480], [3350, 3480, 3479], [3351, 3352, 3480], [3352, 3481, 3480], [3352, 3353, 3482], [3352, 3482, 3481], [3354, 3355, 3484], [3354, 3484, 3483], [3355, 3356, 3484], [3356, 3485, 3484], [3356, 3357, 3486], [3356, 3486, 3485], [3357, 3358, 3486], [3358, 3487, 3486], [3358, 3359, 3488], [3358, 3488, 3487], [3359, 3360, 3488], [3360, 3489, 3488], [3360, 3361, 3490], [3360, 3490, 3489], [3361, 3362, 3490], [3362, 3491, 3490], [3362, 3363, 3492], [3362, 3492, 3491], [3363, 3364, 3492], [3364, 3493, 3492], [3364, 3365, 3494], [3364, 3494, 3493], [3365, 3366, 3494], [3366, 3495, 3494], [3366, 3367, 3496], [3366, 3496, 3495], [3367, 3368, 3496], [3368, 3497, 3496], [3368, 3369, 3498], [3368, 3498, 3497], [3369, 3370, 3498], [3370, 3499, 3498], [3370, 3371, 3500], [3370, 3500, 3499], [3371, 3372, 3500], [3372, 3501, 3500], [3372, 3373, 3502], [3372, 3502, 3501], [3373, 3374, 3502], [3374, 3503, 3502], [3374, 3375, 3504], [3374, 3504, 3503], [3375, 3376, 3504], [3376, 3505, 3504], [3376, 3377, 3506], [3376, 3506, 3505], [3377, 3378, 3506], [3378, 3507, 3506], [3378, 3379, 3508], [3378, 3508, 3507], [3379, 3380, 3508], [3380, 3509, 3508], [3380, 3381, 3510], [3380, 3510, 3509], [3381, 3382, 3510], [3382, 3511, 3510], [3382, 3383, 3512], [3382, 3512, 3511], [3383, 3384, 3512], [3384, 3513, 3512], [3384, 3385, 3514], [3384, 3514, 3513], [3385, 3386, 3514], [3386, 3515, 3514], [3386, 3387, 3516], [3386, 3516, 3515], [3387, 3388, 3516], [3388, 3517, 3516], [3388, 3389, 3518], [3388, 3518, 3517], [3389, 3390, 3518], [3390, 3519, 3518], [3390, 3391, 3520], [3390, 3520, 3519], [3391, 3392, 3520], [3392, 3521, 3520], [3392, 3393, 3522], [3392, 3522, 3521], [3393, 3394, 3522], [3394, 3523, 3522], [3394, 3395, 3524], [3394, 3524, 3523], [3395, 3396, 3524], [3396, 3525, 3524], [3396, 3397, 3526], [3396, 3526, 3525], [3397, 3398, 3526], [3398, 3527, 3526], [3398, 3399, 3528], [3398, 3528, 3527], [3399, 3400, 3528], [3400, 3529, 3528], [3400, 3401, 3530], [3400, 3530, 3529], [3401, 3402, 3530], [3402, 3531, 3530], [3402, 3403, 3532], [3402, 3532, 3531], [3403, 3404, 3532], [3404, 3533, 3532], [3404, 3405, 3534], [3404, 3534, 3533], [3405, 3406, 3534], [3406, 3535, 3534], [3406, 3407, 3536], [3406, 3536, 3535], [3407, 3408, 3536], [3408, 3537, 3536], [3408, 3409, 3538], [3408, 3538, 3537], [3409, 3410, 3538], [3410, 3539, 3538], [3410, 3411, 3540], [3410, 3540, 3539], [3411, 3412, 3540], [3412, 3541, 3540], [3412, 3413, 3542], [3412, 3542, 3541], [3413, 3414, 3542], [3414, 3543, 3542], [3414, 3415, 3544], [3414, 3544, 3543], [3415, 3416, 3544], [3416, 3545, 3544], [3416, 3417, 3546], [3416, 3546, 3545], [3417, 3418, 3546], [3418, 3547, 3546], [3418, 3419, 3548], [3418, 3548, 3547], [3419, 3420, 3548], [3420, 3549, 3548], [3420, 3421, 3550], [3420, 3550, 3549], [3421, 3422, 3550], [3422, 3551, 3550], [3422, 3423, 3552], [3422, 3552, 3551], [3423, 3424, 3552], [3424, 3553, 3552], [3424, 3425, 3554], [3424, 3554, 3553], [3425, 3426, 3554], [3426, 3555, 3554], [3426, 3427, 3556], [3426, 3556, 3555], [3427, 3428, 3556], [3428, 3557, 3556], [3428, 3429, 3558], [3428, 3558, 3557], [3429, 3430, 3558], [3430, 3559, 3558], [3430, 3431, 3560], [3430, 3560, 3559], [3431, 3432, 3560], [3432, 3561, 3560], [3432, 3433, 3562], [3432, 3562, 3561], [3433, 3434, 3562], [3434, 3563, 3562], [3434, 3435, 3564], [3434, 3564, 3563], [3435, 3436, 3564], [3436, 3565, 3564], [3436, 3437, 3566], [3436, 3566, 3565], [3437, 3438, 3566], [3438, 3567, 3566], [3438, 3439, 3568], [3438, 3568, 3567], [3439, 3440, 3568], [3440, 3569, 3568], [3440, 3441, 3570], [3440, 3570, 3569], [3441, 3442, 3570], [3442, 3571, 3570], [3442, 3443, 3572], [3442, 3572, 3571], [3443, 3444, 3572], [3444, 3573, 3572], [3444, 3445, 3574], [3444, 3574, 3573], [3445, 3446, 3574], [3446, 3575, 3574], [3446, 3447, 3576], [3446, 3576, 3575], [3447, 3448, 3576], [3448, 3577, 3576], [3448, 3449, 3578], [3448, 3578, 3577], [3449, 3450, 3578], [3450, 3579, 3578], [3450, 3451, 3580], [3450, 3580, 3579], [3451, 3452, 3580], [3452, 3581, 3580], [3452, 3453, 3582], [3452, 3582, 3581], [3453, 3454, 3582], [3454, 3583, 3582], [3454, 3455, 3584], [3454, 3584, 3583], [3455, 3456, 3584], [3456, 3585, 3584], [3456, 3457, 3586], [3456, 3586, 3585], [3457, 3458, 3586], [3458, 3587, 3586], [3458, 3459, 3588], [3458, 3588, 3587], [3459, 3460, 3588], [3460, 3589, 3588], [3460, 3461, 3590], [3460, 3590, 3589], [3461, 3462, 3590], [3462, 3591, 3590], [3462, 3463, 3592], [3462, 3592, 3591], [3463, 3464, 3592], [3464, 3593, 3592], [3464, 3465, 3594], [3464, 3594, 3593], [3465, 3466, 3594], [3466, 3595, 3594], [3466, 3467, 3596], [3466, 3596, 3595], [3467, 3468, 3596], [3468, 3597, 3596], [3468, 3469, 3598], [3468, 3598, 3597], [3469, 3470, 3598], [3470, 3599, 3598], [3470, 3471, 3600], [3470, 3600, 3599], [3471, 3472, 3600], [3472, 3601, 3600], [3472, 3473, 3602], [3472, 3602, 3601], [3473, 3474, 3602], [3474, 3603, 3602], [3474, 3475, 3604], [3474, 3604, 3603], [3475, 3476, 3604], [3476, 3605, 3604], [3476, 3477, 3606], [3476, 3606, 3605], [3477, 3478, 3606], [3478, 3607, 3606], [3478, 3479, 3608], [3478, 3608, 3607], [3479, 3480, 3608], [3480, 3609, 3608], [3480, 3481, 3610], [3480, 3610, 3609], [3481, 3482, 3610], [3482, 3611, 3610], [3483, 3484, 3612], [3484, 3613, 3612], [3484, 3485, 3614], [3484, 3614, 3613], [3485, 3486, 3614], [3486, 3615, 3614], [3486, 3487, 3616], [3486, 3616, 3615], [3487, 3488, 3616], [3488, 3617, 3616], [3488, 3489, 3618], [3488, 3618, 3617], [3489, 3490, 3618], [3490, 3619, 3618], [3490, 3491, 3620], [3490, 3620, 3619], [3491, 3492, 3620], [3492, 3621, 3620], [3492, 3493, 3622], [3492, 3622, 3621], [3493, 3494, 3622], [3494, 3623, 3622], [3494, 3495, 3624], [3494, 3624, 3623], [3495, 3496, 3624], [3496, 3625, 3624], [3496, 3497, 3626], [3496, 3626, 3625], [3497, 3498, 3626], [3498, 3627, 3626], [3498, 3499, 3628], [3498, 3628, 3627], [3499, 3500, 3628], [3500, 3629, 3628], [3500, 3501, 3630], [3500, 3630, 3629], [3501, 3502, 3630], [3502, 3631, 3630], [3502, 3503, 3632], [3502, 3632, 3631], [3503, 3504, 3632], [3504, 3633, 3632], [3504, 3505, 3634], [3504, 3634, 3633], [3505, 3506, 3634], [3506, 3635, 3634], [3506, 3507, 3636], [3506, 3636, 3635], [3507, 3508, 3636], [3508, 3637, 3636], [3508, 3509, 3638], [3508, 3638, 3637], [3509, 3510, 3638], [3510, 3639, 3638], [3510, 3511, 3640], [3510, 3640, 3639], [3511, 3512, 3640], [3512, 3641, 3640], [3512, 3513, 3642], [3512, 3642, 3641], [3513, 3514, 3642], [3514, 3643, 3642], [3514, 3515, 3644], [3514, 3644, 3643], [3515, 3516, 3644], [3516, 3645, 3644], [3516, 3517, 3646], [3516, 3646, 3645], [3517, 3518, 3646], [3518, 3647, 3646], [3518, 3519, 3648], [3518, 3648, 3647], [3519, 3520, 3648], [3520, 3649, 3648], [3520, 3521, 3650], [3520, 3650, 3649], [3521, 3522, 3650], [3522, 3651, 3650], [3522, 3523, 3652], [3522, 3652, 3651], [3523, 3524, 3652], [3524, 3653, 3652], [3524, 3525, 3654], [3524, 3654, 3653], [3525, 3526, 3654], [3526, 3655, 3654], [3526, 3527, 3656], [3526, 3656, 3655], [3527, 3528, 3656], [3528, 3657, 3656], [3528, 3529, 3658], [3528, 3658, 3657], [3529, 3530, 3658], [3530, 3659, 3658], [3530, 3531, 3660], [3530, 3660, 3659], [3531, 3532, 3660], [3532, 3661, 3660], [3532, 3533, 3662], [3532, 3662, 3661], [3533, 3534, 3662], [3534, 3663, 3662], [3534, 3535, 3664], [3534, 3664, 3663], [3535, 3536, 3664], [3536, 3665, 3664], [3536, 3537, 3666], [3536, 3666, 3665], [3537, 3538, 3666], [3538, 3667, 3666], [3538, 3539, 3668], [3538, 3668, 3667], [3539, 3540, 3668], [3540, 3669, 3668], [3540, 3541, 3670], [3540, 3670, 3669], [3541, 3542, 3670], [3542, 3671, 3670], [3542, 3543, 3672], [3542, 3672, 3671], [3543, 3544, 3672], [3544, 3673, 3672], [3544, 3545, 3674], [3544, 3674, 3673], [3545, 3546, 3674], [3546, 3675, 3674], [3546, 3547, 3676], [3546, 3676, 3675], [3547, 3548, 3676], [3548, 3677, 3676], [3548, 3549, 3678], [3548, 3678, 3677], [3549, 3550, 3678], [3550, 3679, 3678], [3550, 3551, 3680], [3550, 3680, 3679], [3551, 3552, 3680], [3552, 3681, 3680], [3552, 3553, 3682], [3552, 3682, 3681], [3553, 3554, 3682], [3554, 3683, 3682], [3554, 3555, 3684], [3554, 3684, 3683], [3555, 3556, 3684], [3556, 3685, 3684], [3556, 3557, 3686], [3556, 3686, 3685], [3557, 3558, 3686], [3558, 3687, 3686], [3558, 3559, 3688], [3558, 3688, 3687], [3559, 3560, 3688], [3560, 3689, 3688], [3560, 3561, 3690], [3560, 3690, 3689], [3561, 3562, 3690], [3562, 3691, 3690], [3562, 3563, 3692], [3562, 3692, 3691], [3563, 3564, 3692], [3564, 3693, 3692], [3564, 3565, 3694], [3564, 3694, 3693], [3565, 3566, 3694], [3566, 3695, 3694], [3566, 3567, 3696], [3566, 3696, 3695], [3567, 3568, 3696], [3568, 3697, 3696], [3568, 3569, 3698], [3568, 3698, 3697], [3569, 3570, 3698], [3570, 3699, 3698], [3570, 3571, 3700], [3570, 3700, 3699], [3571, 3572, 3700], [3572, 3701, 3700], [3572, 3573, 3702], [3572, 3702, 3701], [3573, 3574, 3702], [3574, 3703, 3702], [3574, 3575, 3704], [3574, 3704, 3703], [3575, 3576, 3704], [3576, 3705, 3704], [3576, 3577, 3706], [3576, 3706, 3705], [3577, 3578, 3706], [3578, 3707, 3706], [3578, 3579, 3708], [3578, 3708, 3707], [3579, 3580, 3708], [3580, 3709, 3708], [3580, 3581, 3710], [3580, 3710, 3709], [3581, 3582, 3710], [3582, 3711, 3710], [3582, 3583, 3712], [3582, 3712, 3711], [3583, 3584, 3712], [3584, 3713, 3712], [3584, 3585, 3714], [3584, 3714, 3713], [3585, 3586, 3714], [3586, 3715, 3714], [3586, 3587, 3716], [3586, 3716, 3715], [3587, 3588, 3716], [3588, 3717, 3716], [3588, 3589, 3718], [3588, 3718, 3717], [3589, 3590, 3718], [3590, 3719, 3718], [3590, 3591, 3720], [3590, 3720, 3719], [3591, 3592, 3720], [3592, 3721, 3720], [3592, 3593, 3722], [3592, 3722, 3721], [3593, 3594, 3722], [3594, 3723, 3722], [3594, 3595, 3724], [3594, 3724, 3723], [3595, 3596, 3724], [3596, 3725, 3724], [3596, 3597, 3726], [3596, 3726, 3725], [3597, 3598, 3726], [3598, 3727, 3726], [3598, 3599, 3728], [3598, 3728, 3727], [3599, 3600, 3728], [3600, 3729, 3728], [3600, 3601, 3730], [3600, 3730, 3729], [3601, 3602, 3730], [3602, 3731, 3730], [3602, 3603, 3732], [3602, 3732, 3731], [3603, 3604, 3732], [3604, 3733, 3732], [3604, 3605, 3734], [3604, 3734, 3733], [3605, 3606, 3734], [3606, 3735, 3734], [3606, 3607, 3736], [3606, 3736, 3735], [3607, 3608, 3736], [3608, 3737, 3736], [3608, 3609, 3738], [3608, 3738, 3737], [3609, 3610, 3738], [3610, 3739, 3738], [3610, 3611, 3740], [3610, 3740, 3739], [3612, 3613, 3742], [3612, 3742, 3741], [3613, 3614, 3742], [3614, 3743, 3742], [3614, 3615, 3744], [3614, 3744, 3743], [3615, 3616, 3744], [3616, 3745, 3744], [3616, 3617, 3746], [3616, 3746, 3745], [3617, 3618, 3746], [3618, 3747, 3746], [3618, 3619, 3748], [3618, 3748, 3747], [3619, 3620, 3748], [3620, 3749, 3748], [3620, 3621, 3750], [3620, 3750, 3749], [3621, 3622, 3750], [3622, 3751, 3750], [3622, 3623, 3752], [3622, 3752, 3751], [3623, 3624, 3752], [3624, 3753, 3752], [3624, 3625, 3754], [3624, 3754, 3753], [3625, 3626, 3754], [3626, 3755, 3754], [3626, 3627, 3756], [3626, 3756, 3755], [3627, 3628, 3756], [3628, 3757, 3756], [3628, 3629, 3758], [3628, 3758, 3757], [3629, 3630, 3758], [3630, 3759, 3758], [3630, 3631, 3760], [3630, 3760, 3759], [3631, 3632, 3760], [3632, 3761, 3760], [3632, 3633, 3762], [3632, 3762, 3761], [3633, 3634, 3762], [3634, 3763, 3762], [3634, 3635, 3764], [3634, 3764, 3763], [3635, 3636, 3764], [3636, 3765, 3764], [3636, 3637, 3766], [3636, 3766, 3765], [3637, 3638, 3766], [3638, 3767, 3766], [3638, 3639, 3768], [3638, 3768, 3767], [3639, 3640, 3768], [3640, 3769, 3768], [3640, 3641, 3770], [3640, 3770, 3769], [3641, 3642, 3770], [3642, 3771, 3770], [3642, 3643, 3772], [3642, 3772, 3771], [3643, 3644, 3772], [3644, 3773, 3772], [3644, 3645, 3774], [3644, 3774, 3773], [3645, 3646, 3774], [3646, 3775, 3774], [3646, 3647, 3776], [3646, 3776, 3775], [3647, 3648, 3776], [3648, 3777, 3776], [3648, 3649, 3778], [3648, 3778, 3777], [3649, 3650, 3778], [3650, 3779, 3778], [3650, 3651, 3780], [3650, 3780, 3779], [3651, 3652, 3780], [3652, 3781, 3780], [3652, 3653, 3782], [3652, 3782, 3781], [3653, 3654, 3782], [3654, 3783, 3782], [3654, 3655, 3784], [3654, 3784, 3783], [3655, 3656, 3784], [3656, 3785, 3784], [3656, 3657, 3786], [3656, 3786, 3785], [3657, 3658, 3786], [3658, 3787, 3786], [3658, 3659, 3788], [3658, 3788, 3787], [3659, 3660, 3788], [3660, 3789, 3788], [3660, 3661, 3790], [3660, 3790, 3789], [3661, 3662, 3790], [3662, 3791, 3790], [3662, 3663, 3792], [3662, 3792, 3791], [3663, 3664, 3792], [3664, 3793, 3792], [3664, 3665, 3794], [3664, 3794, 3793], [3665, 3666, 3794], [3666, 3795, 3794], [3666, 3667, 3796], [3666, 3796, 3795], [3667, 3668, 3796], [3668, 3797, 3796], [3668, 3669, 3798], [3668, 3798, 3797], [3669, 3670, 3798], [3670, 3799, 3798], [3670, 3671, 3800], [3670, 3800, 3799], [3671, 3672, 3800], [3672, 3801, 3800], [3672, 3673, 3802], [3672, 3802, 3801], [3673, 3674, 3802], [3674, 3803, 3802], [3674, 3675, 3804], [3674, 3804, 3803], [3675, 3676, 3804], [3676, 3805, 3804], [3676, 3677, 3806], [3676, 3806, 3805], [3677, 3678, 3806], [3678, 3807, 3806], [3678, 3679, 3808], [3678, 3808, 3807], [3679, 3680, 3808], [3680, 3809, 3808], [3680, 3681, 3810], [3680, 3810, 3809], [3681, 3682, 3810], [3682, 3811, 3810], [3682, 3683, 3812], [3682, 3812, 3811], [3683, 3684, 3812], [3684, 3813, 3812], [3684, 3685, 3814], [3684, 3814, 3813], [3685, 3686, 3814], [3686, 3815, 3814], [3686, 3687, 3816], [3686, 3816, 3815], [3687, 3688, 3816], [3688, 3817, 3816], [3688, 3689, 3818], [3688, 3818, 3817], [3689, 3690, 3818], [3690, 3819, 3818], [3690, 3691, 3820], [3690, 3820, 3819], [3691, 3692, 3820], [3692, 3821, 3820], [3692, 3693, 3822], [3692, 3822, 3821], [3693, 3694, 3822], [3694, 3823, 3822], [3694, 3695, 3824], [3694, 3824, 3823], [3695, 3696, 3824], [3696, 3825, 3824], [3696, 3697, 3826], [3696, 3826, 3825], [3697, 3698, 3826], [3698, 3827, 3826], [3698, 3699, 3828], [3698, 3828, 3827], [3699, 3700, 3828], [3700, 3829, 3828], [3700, 3701, 3830], [3700, 3830, 3829], [3701, 3702, 3830], [3702, 3831, 3830], [3702, 3703, 3832], [3702, 3832, 3831], [3703, 3704, 3832], [3704, 3833, 3832], [3704, 3705, 3834], [3704, 3834, 3833], [3705, 3706, 3834], [3706, 3835, 3834], [3706, 3707, 3836], [3706, 3836, 3835], [3707, 3708, 3836], [3708, 3837, 3836], [3708, 3709, 3838], [3708, 3838, 3837], [3709, 3710, 3838], [3710, 3839, 3838], [3710, 3711, 3840], [3710, 3840, 3839], [3711, 3712, 3840], [3712, 3841, 3840], [3712, 3713, 3842], [3712, 3842, 3841], [3713, 3714, 3842], [3714, 3843, 3842], [3714, 3715, 3844], [3714, 3844, 3843], [3715, 3716, 3844], [3716, 3845, 3844], [3716, 3717, 3846], [3716, 3846, 3845], [3717, 3718, 3846], [3718, 3847, 3846], [3718, 3719, 3848], [3718, 3848, 3847], [3719, 3720, 3848], [3720, 3849, 3848], [3720, 3721, 3850], [3720, 3850, 3849], [3721, 3722, 3850], [3722, 3851, 3850], [3722, 3723, 3852], [3722, 3852, 3851], [3723, 3724, 3852], [3724, 3853, 3852], [3724, 3725, 3854], [3724, 3854, 3853], [3725, 3726, 3854], [3726, 3855, 3854], [3726, 3727, 3856], [3726, 3856, 3855], [3727, 3728, 3856], [3728, 3857, 3856], [3728, 3729, 3858], [3728, 3858, 3857], [3729, 3730, 3858], [3730, 3859, 3858], [3730, 3731, 3860], [3730, 3860, 3859], [3731, 3732, 3860], [3732, 3861, 3860], [3732, 3733, 3862], [3732, 3862, 3861], [3733, 3734, 3862], [3734, 3863, 3862], [3734, 3735, 3864], [3734, 3864, 3863], [3735, 3736, 3864], [3736, 3865, 3864], [3736, 3737, 3866], [3736, 3866, 3865], [3737, 3738, 3866], [3738, 3867, 3866], [3738, 3739, 3868], [3738, 3868, 3867], [3739, 3740, 3868], [3740, 3869, 3868], [3741, 3742, 3870], [3742, 3871, 3870], [3742, 3743, 3872], [3742, 3872, 3871], [3743, 3744, 3872], [3744, 3873, 3872], [3744, 3745, 3874], [3744, 3874, 3873], [3745, 3746, 3874], [3746, 3875, 3874], [3746, 3747, 3876], [3746, 3876, 3875], [3747, 3748, 3876], [3748, 3877, 3876], [3748, 3749, 3878], [3748, 3878, 3877], [3749, 3750, 3878], [3750, 3879, 3878], [3750, 3751, 3880], [3750, 3880, 3879], [3751, 3752, 3880], [3752, 3881, 3880], [3752, 3753, 3882], [3752, 3882, 3881], [3753, 3754, 3882], [3754, 3883, 3882], [3754, 3755, 3884], [3754, 3884, 3883], [3755, 3756, 3884], [3756, 3885, 3884], [3756, 3757, 3886], [3756, 3886, 3885], [3757, 3758, 3886], [3758, 3887, 3886], [3758, 3759, 3888], [3758, 3888, 3887], [3759, 3760, 3888], [3760, 3889, 3888], [3760, 3761, 3890], [3760, 3890, 3889], [3761, 3762, 3890], [3762, 3891, 3890], [3762, 3763, 3892], [3762, 3892, 3891], [3763, 3764, 3892], [3764, 3893, 3892], [3764, 3765, 3894], [3764, 3894, 3893], [3765, 3766, 3894], [3766, 3895, 3894], [3766, 3767, 3896], [3766, 3896, 3895], [3767, 3768, 3896], [3768, 3897, 3896], [3768, 3769, 3898], [3768, 3898, 3897], [3769, 3770, 3898], [3770, 3899, 3898], [3770, 3771, 3900], [3770, 3900, 3899], [3771, 3772, 3900], [3772, 3901, 3900], [3772, 3773, 3902], [3772, 3902, 3901], [3773, 3774, 3902], [3774, 3903, 3902], [3774, 3775, 3904], [3774, 3904, 3903], [3775, 3776, 3904], [3776, 3905, 3904], [3776, 3777, 3906], [3776, 3906, 3905], [3777, 3778, 3906], [3778, 3907, 3906], [3778, 3779, 3908], [3778, 3908, 3907], [3779, 3780, 3908], [3780, 3909, 3908], [3780, 3781, 3910], [3780, 3910, 3909], [3781, 3782, 3910], [3782, 3911, 3910], [3782, 3783, 3912], [3782, 3912, 3911], [3783, 3784, 3912], [3784, 3913, 3912], [3784, 3785, 3914], [3784, 3914, 3913], [3785, 3786, 3914], [3786, 3915, 3914], [3786, 3787, 3916], [3786, 3916, 3915], [3787, 3788, 3916], [3788, 3917, 3916], [3788, 3789, 3918], [3788, 3918, 3917], [3789, 3790, 3918], [3790, 3919, 3918], [3790, 3791, 3920], [3790, 3920, 3919], [3791, 3792, 3920], [3792, 3921, 3920], [3792, 3793, 3922], [3792, 3922, 3921], [3793, 3794, 3922], [3794, 3923, 3922], [3794, 3795, 3924], [3794, 3924, 3923], [3795, 3796, 3924], [3796, 3925, 3924], [3796, 3797, 3926], [3796, 3926, 3925], [3797, 3798, 3926], [3798, 3927, 3926], [3798, 3799, 3928], [3798, 3928, 3927], [3799, 3800, 3928], [3800, 3929, 3928], [3800, 3801, 3930], [3800, 3930, 3929], [3801, 3802, 3930], [3802, 3931, 3930], [3802, 3803, 3932], [3802, 3932, 3931], [3803, 3804, 3932], [3804, 3933, 3932], [3804, 3805, 3934], [3804, 3934, 3933], [3805, 3806, 3934], [3806, 3935, 3934], [3806, 3807, 3936], [3806, 3936, 3935], [3807, 3808, 3936], [3808, 3937, 3936], [3808, 3809, 3938], [3808, 3938, 3937], [3809, 3810, 3938], [3810, 3939, 3938], [3810, 3811, 3940], [3810, 3940, 3939], [3811, 3812, 3940], [3812, 3941, 3940], [3812, 3813, 3942], [3812, 3942, 3941], [3813, 3814, 3942], [3814, 3943, 3942], [3814, 3815, 3944], [3814, 3944, 3943], [3815, 3816, 3944], [3816, 3945, 3944], [3816, 3817, 3946], [3816, 3946, 3945], [3817, 3818, 3946], [3818, 3947, 3946], [3818, 3819, 3948], [3818, 3948, 3947], [3819, 3820, 3948], [3820, 3949, 3948], [3820, 3821, 3950], [3820, 3950, 3949], [3821, 3822, 3950], [3822, 3951, 3950], [3822, 3823, 3952], [3822, 3952, 3951], [3823, 3824, 3952], [3824, 3953, 3952], [3824, 3825, 3954], [3824, 3954, 3953], [3825, 3826, 3954], [3826, 3955, 3954], [3826, 3827, 3956], [3826, 3956, 3955], [3827, 3828, 3956], [3828, 3957, 3956], [3828, 3829, 3958], [3828, 3958, 3957], [3829, 3830, 3958], [3830, 3959, 3958], [3830, 3831, 3960], [3830, 3960, 3959], [3831, 3832, 3960], [3832, 3961, 3960], [3832, 3833, 3962], [3832, 3962, 3961], [3833, 3834, 3962], [3834, 3963, 3962], [3834, 3835, 3964], [3834, 3964, 3963], [3835, 3836, 3964], [3836, 3965, 3964], [3836, 3837, 3966], [3836, 3966, 3965], [3837, 3838, 3966], [3838, 3967, 3966], [3838, 3839, 3968], [3838, 3968, 3967], [3839, 3840, 3968], [3840, 3969, 3968], [3840, 3841, 3970], [3840, 3970, 3969], [3841, 3842, 3970], [3842, 3971, 3970], [3842, 3843, 3972], [3842, 3972, 3971], [3843, 3844, 3972], [3844, 3973, 3972], [3844, 3845, 3974], [3844, 3974, 3973], [3845, 3846, 3974], [3846, 3975, 3974], [3846, 3847, 3976], [3846, 3976, 3975], [3847, 3848, 3976], [3848, 3977, 3976], [3848, 3849, 3978], [3848, 3978, 3977], [3849, 3850, 3978], [3850, 3979, 3978], [3850, 3851, 3980], [3850, 3980, 3979], [3851, 3852, 3980], [3852, 3981, 3980], [3852, 3853, 3982], [3852, 3982, 3981], [3853, 3854, 3982], [3854, 3983, 3982], [3854, 3855, 3984], [3854, 3984, 3983], [3855, 3856, 3984], [3856, 3985, 3984], [3856, 3857, 3986], [3856, 3986, 3985], [3857, 3858, 3986], [3858, 3987, 3986], [3858, 3859, 3988], [3858, 3988, 3987], [3859, 3860, 3988], [3860, 3989, 3988], [3860, 3861, 3990], [3860, 3990, 3989], [3861, 3862, 3990], [3862, 3991, 3990], [3862, 3863, 3992], [3862, 3992, 3991], [3863, 3864, 3992], [3864, 3993, 3992], [3864, 3865, 3994], [3864, 3994, 3993], [3865, 3866, 3994], [3866, 3995, 3994], [3866, 3867, 3996], [3866, 3996, 3995], [3867, 3868, 3996], [3868, 3997, 3996], [3868, 3869, 3998], [3868, 3998, 3997], [3870, 3871, 4000], [3870, 4000, 3999], [3871, 3872, 4000], [3872, 4001, 4000], [3872, 3873, 4002], [3872, 4002, 4001], [3873, 3874, 4002], [3874, 4003, 4002], [3874, 3875, 4004], [3874, 4004, 4003], [3875, 3876, 4004], [3876, 4005, 4004], [3876, 3877, 4006], [3876, 4006, 4005], [3877, 3878, 4006], [3878, 4007, 4006], [3878, 3879, 4008], [3878, 4008, 4007], [3879, 3880, 4008], [3880, 4009, 4008], [3880, 3881, 4010], [3880, 4010, 4009], [3881, 3882, 4010], [3882, 4011, 4010], [3882, 3883, 4012], [3882, 4012, 4011], [3883, 3884, 4012], [3884, 4013, 4012], [3884, 3885, 4014], [3884, 4014, 4013], [3885, 3886, 4014], [3886, 4015, 4014], [3886, 3887, 4016], [3886, 4016, 4015], [3887, 3888, 4016], [3888, 4017, 4016], [3888, 3889, 4018], [3888, 4018, 4017], [3889, 3890, 4018], [3890, 4019, 4018], [3890, 3891, 4020], [3890, 4020, 4019], [3891, 3892, 4020], [3892, 4021, 4020], [3892, 3893, 4022], [3892, 4022, 4021], [3893, 3894, 4022], [3894, 4023, 4022], [3894, 3895, 4024], [3894, 4024, 4023], [3895, 3896, 4024], [3896, 4025, 4024], [3896, 3897, 4026], [3896, 4026, 4025], [3897, 3898, 4026], [3898, 4027, 4026], [3898, 3899, 4028], [3898, 4028, 4027], [3899, 3900, 4028], [3900, 4029, 4028], [3900, 3901, 4030], [3900, 4030, 4029], [3901, 3902, 4030], [3902, 4031, 4030], [3902, 3903, 4032], [3902, 4032, 4031], [3903, 3904, 4032], [3904, 4033, 4032], [3904, 3905, 4034], [3904, 4034, 4033], [3905, 3906, 4034], [3906, 4035, 4034], [3906, 3907, 4036], [3906, 4036, 4035], [3907, 3908, 4036], [3908, 4037, 4036], [3908, 3909, 4038], [3908, 4038, 4037], [3909, 3910, 4038], [3910, 4039, 4038], [3910, 3911, 4040], [3910, 4040, 4039], [3911, 3912, 4040], [3912, 4041, 4040], [3912, 3913, 4042], [3912, 4042, 4041], [3913, 3914, 4042], [3914, 4043, 4042], [3914, 3915, 4044], [3914, 4044, 4043], [3915, 3916, 4044], [3916, 4045, 4044], [3916, 3917, 4046], [3916, 4046, 4045], [3917, 3918, 4046], [3918, 4047, 4046], [3918, 3919, 4048], [3918, 4048, 4047], [3919, 3920, 4048], [3920, 4049, 4048], [3920, 3921, 4050], [3920, 4050, 4049], [3921, 3922, 4050], [3922, 4051, 4050], [3922, 3923, 4052], [3922, 4052, 4051], [3923, 3924, 4052], [3924, 4053, 4052], [3924, 3925, 4054], [3924, 4054, 4053], [3925, 3926, 4054], [3926, 4055, 4054], [3926, 3927, 4056], [3926, 4056, 4055], [3927, 3928, 4056], [3928, 4057, 4056], [3928, 3929, 4058], [3928, 4058, 4057], [3929, 3930, 4058], [3930, 4059, 4058], [3930, 3931, 4060], [3930, 4060, 4059], [3931, 3932, 4060], [3932, 4061, 4060], [3932, 3933, 4062], [3932, 4062, 4061], [3933, 3934, 4062], [3934, 4063, 4062], [3934, 3935, 4064], [3934, 4064, 4063], [3935, 3936, 4064], [3936, 4065, 4064], [3936, 3937, 4066], [3936, 4066, 4065], [3937, 3938, 4066], [3938, 4067, 4066], [3938, 3939, 4068], [3938, 4068, 4067], [3939, 3940, 4068], [3940, 4069, 4068], [3940, 3941, 4070], [3940, 4070, 4069], [3941, 3942, 4070], [3942, 4071, 4070], [3942, 3943, 4072], [3942, 4072, 4071], [3943, 3944, 4072], [3944, 4073, 4072], [3944, 3945, 4074], [3944, 4074, 4073], [3945, 3946, 4074], [3946, 4075, 4074], [3946, 3947, 4076], [3946, 4076, 4075], [3947, 3948, 4076], [3948, 4077, 4076], [3948, 3949, 4078], [3948, 4078, 4077], [3949, 3950, 4078], [3950, 4079, 4078], [3950, 3951, 4080], [3950, 4080, 4079], [3951, 3952, 4080], [3952, 4081, 4080], [3952, 3953, 4082], [3952, 4082, 4081], [3953, 3954, 4082], [3954, 4083, 4082], [3954, 3955, 4084], [3954, 4084, 4083], [3955, 3956, 4084], [3956, 4085, 4084], [3956, 3957, 4086], [3956, 4086, 4085], [3957, 3958, 4086], [3958, 4087, 4086], [3958, 3959, 4088], [3958, 4088, 4087], [3959, 3960, 4088], [3960, 4089, 4088], [3960, 3961, 4090], [3960, 4090, 4089], [3961, 3962, 4090], [3962, 4091, 4090], [3962, 3963, 4092], [3962, 4092, 4091], [3963, 3964, 4092], [3964, 4093, 4092], [3964, 3965, 4094], [3964, 4094, 4093], [3965, 3966, 4094], [3966, 4095, 4094], [3966, 3967, 4096], [3966, 4096, 4095], [3967, 3968, 4096], [3968, 4097, 4096], [3968, 3969, 4098], [3968, 4098, 4097], [3969, 3970, 4098], [3970, 4099, 4098], [3970, 3971, 4100], [3970, 4100, 4099], [3971, 3972, 4100], [3972, 4101, 4100], [3972, 3973, 4102], [3972, 4102, 4101], [3973, 3974, 4102], [3974, 4103, 4102], [3974, 3975, 4104], [3974, 4104, 4103], [3975, 3976, 4104], [3976, 4105, 4104], [3976, 3977, 4106], [3976, 4106, 4105], [3977, 3978, 4106], [3978, 4107, 4106], [3978, 3979, 4108], [3978, 4108, 4107], [3979, 3980, 4108], [3980, 4109, 4108], [3980, 3981, 4110], [3980, 4110, 4109], [3981, 3982, 4110], [3982, 4111, 4110], [3982, 3983, 4112], [3982, 4112, 4111], [3983, 3984, 4112], [3984, 4113, 4112], [3984, 3985, 4114], [3984, 4114, 4113], [3985, 3986, 4114], [3986, 4115, 4114], [3986, 3987, 4116], [3986, 4116, 4115], [3987, 3988, 4116], [3988, 4117, 4116], [3988, 3989, 4118], [3988, 4118, 4117], [3989, 3990, 4118], [3990, 4119, 4118], [3990, 3991, 4120], [3990, 4120, 4119], [3991, 3992, 4120], [3992, 4121, 4120], [3992, 3993, 4122], [3992, 4122, 4121], [3993, 3994, 4122], [3994, 4123, 4122], [3994, 3995, 4124], [3994, 4124, 4123], [3995, 3996, 4124], [3996, 4125, 4124], [3996, 3997, 4126], [3996, 4126, 4125], [3997, 3998, 4126], [3998, 4127, 4126], [3999, 4000, 4128], [4000, 4129, 4128], [4000, 4001, 4130], [4000, 4130, 4129], [4001, 4002, 4130], [4002, 4131, 4130], [4002, 4003, 4132], [4002, 4132, 4131], [4003, 4004, 4132], [4004, 4133, 4132], [4004, 4005, 4134], [4004, 4134, 4133], [4005, 4006, 4134], [4006, 4135, 4134], [4006, 4007, 4136], [4006, 4136, 4135], [4007, 4008, 4136], [4008, 4137, 4136], [4008, 4009, 4138], [4008, 4138, 4137], [4009, 4010, 4138], [4010, 4139, 4138], [4010, 4011, 4140], [4010, 4140, 4139], [4011, 4012, 4140], [4012, 4141, 4140], [4012, 4013, 4142], [4012, 4142, 4141], [4013, 4014, 4142], [4014, 4143, 4142], [4014, 4015, 4144], [4014, 4144, 4143], [4015, 4016, 4144], [4016, 4145, 4144], [4016, 4017, 4146], [4016, 4146, 4145], [4017, 4018, 4146], [4018, 4147, 4146], [4018, 4019, 4148], [4018, 4148, 4147], [4019, 4020, 4148], [4020, 4149, 4148], [4020, 4021, 4150], [4020, 4150, 4149], [4021, 4022, 4150], [4022, 4151, 4150], [4022, 4023, 4152], [4022, 4152, 4151], [4023, 4024, 4152], [4024, 4153, 4152], [4024, 4025, 4154], [4024, 4154, 4153], [4025, 4026, 4154], [4026, 4155, 4154], [4026, 4027, 4156], [4026, 4156, 4155], [4027, 4028, 4156], [4028, 4157, 4156], [4028, 4029, 4158], [4028, 4158, 4157], [4029, 4030, 4158], [4030, 4159, 4158], [4030, 4031, 4160], [4030, 4160, 4159], [4031, 4032, 4160], [4032, 4161, 4160], [4032, 4033, 4162], [4032, 4162, 4161], [4033, 4034, 4162], [4034, 4163, 4162], [4034, 4035, 4164], [4034, 4164, 4163], [4035, 4036, 4164], [4036, 4165, 4164], [4036, 4037, 4166], [4036, 4166, 4165], [4037, 4038, 4166], [4038, 4167, 4166], [4038, 4039, 4168], [4038, 4168, 4167], [4039, 4040, 4168], [4040, 4169, 4168], [4040, 4041, 4170], [4040, 4170, 4169], [4041, 4042, 4170], [4042, 4171, 4170], [4042, 4043, 4172], [4042, 4172, 4171], [4043, 4044, 4172], [4044, 4173, 4172], [4044, 4045, 4174], [4044, 4174, 4173], [4045, 4046, 4174], [4046, 4175, 4174], [4046, 4047, 4176], [4046, 4176, 4175], [4047, 4048, 4176], [4048, 4177, 4176], [4048, 4049, 4178], [4048, 4178, 4177], [4049, 4050, 4178], [4050, 4179, 4178], [4050, 4051, 4180], [4050, 4180, 4179], [4051, 4052, 4180], [4052, 4181, 4180], [4052, 4053, 4182], [4052, 4182, 4181], [4053, 4054, 4182], [4054, 4183, 4182], [4054, 4055, 4184], [4054, 4184, 4183], [4055, 4056, 4184], [4056, 4185, 4184], [4056, 4057, 4186], [4056, 4186, 4185], [4057, 4058, 4186], [4058, 4187, 4186], [4058, 4059, 4188], [4058, 4188, 4187], [4059, 4060, 4188], [4060, 4189, 4188], [4060, 4061, 4190], [4060, 4190, 4189], [4061, 4062, 4190], [4062, 4191, 4190], [4062, 4063, 4192], [4062, 4192, 4191], [4063, 4064, 4192], [4064, 4193, 4192], [4064, 4065, 4194], [4064, 4194, 4193], [4065, 4066, 4194], [4066, 4195, 4194], [4066, 4067, 4196], [4066, 4196, 4195], [4067, 4068, 4196], [4068, 4197, 4196], [4068, 4069, 4198], [4068, 4198, 4197], [4069, 4070, 4198], [4070, 4199, 4198], [4070, 4071, 4200], [4070, 4200, 4199], [4071, 4072, 4200], [4072, 4201, 4200], [4072, 4073, 4202], [4072, 4202, 4201], [4073, 4074, 4202], [4074, 4203, 4202], [4074, 4075, 4204], [4074, 4204, 4203], [4075, 4076, 4204], [4076, 4205, 4204], [4076, 4077, 4206], [4076, 4206, 4205], [4077, 4078, 4206], [4078, 4207, 4206], [4078, 4079, 4208], [4078, 4208, 4207], [4079, 4080, 4208], [4080, 4209, 4208], [4080, 4081, 4210], [4080, 4210, 4209], [4081, 4082, 4210], [4082, 4211, 4210], [4082, 4083, 4212], [4082, 4212, 4211], [4083, 4084, 4212], [4084, 4213, 4212], [4084, 4085, 4214], [4084, 4214, 4213], [4085, 4086, 4214], [4086, 4215, 4214], [4086, 4087, 4216], [4086, 4216, 4215], [4087, 4088, 4216], [4088, 4217, 4216], [4088, 4089, 4218], [4088, 4218, 4217], [4089, 4090, 4218], [4090, 4219, 4218], [4090, 4091, 4220], [4090, 4220, 4219], [4091, 4092, 4220], [4092, 4221, 4220], [4092, 4093, 4222], [4092, 4222, 4221], [4093, 4094, 4222], [4094, 4223, 4222], [4094, 4095, 4224], [4094, 4224, 4223], [4095, 4096, 4224], [4096, 4225, 4224], [4096, 4097, 4226], [4096, 4226, 4225], [4097, 4098, 4226], [4098, 4227, 4226], [4098, 4099, 4228], [4098, 4228, 4227], [4099, 4100, 4228], [4100, 4229, 4228], [4100, 4101, 4230], [4100, 4230, 4229], [4101, 4102, 4230], [4102, 4231, 4230], [4102, 4103, 4232], [4102, 4232, 4231], [4103, 4104, 4232], [4104, 4233, 4232], [4104, 4105, 4234], [4104, 4234, 4233], [4105, 4106, 4234], [4106, 4235, 4234], [4106, 4107, 4236], [4106, 4236, 4235], [4107, 4108, 4236], [4108, 4237, 4236], [4108, 4109, 4238], [4108, 4238, 4237], [4109, 4110, 4238], [4110, 4239, 4238], [4110, 4111, 4240], [4110, 4240, 4239], [4111, 4112, 4240], [4112, 4241, 4240], [4112, 4113, 4242], [4112, 4242, 4241], [4113, 4114, 4242], [4114, 4243, 4242], [4114, 4115, 4244], [4114, 4244, 4243], [4115, 4116, 4244], [4116, 4245, 4244], [4116, 4117, 4246], [4116, 4246, 4245], [4117, 4118, 4246], [4118, 4247, 4246], [4118, 4119, 4248], [4118, 4248, 4247], [4119, 4120, 4248], [4120, 4249, 4248], [4120, 4121, 4250], [4120, 4250, 4249], [4121, 4122, 4250], [4122, 4251, 4250], [4122, 4123, 4252], [4122, 4252, 4251], [4123, 4124, 4252], [4124, 4253, 4252], [4124, 4125, 4254], [4124, 4254, 4253], [4125, 4126, 4254], [4126, 4255, 4254], [4126, 4127, 4256], [4126, 4256, 4255], [4128, 4129, 4258], [4128, 4258, 4257], [4129, 4130, 4258], [4130, 4259, 4258], [4130, 4131, 4260], [4130, 4260, 4259], [4131, 4132, 4260], [4132, 4261, 4260], [4132, 4133, 4262], [4132, 4262, 4261], [4133, 4134, 4262], [4134, 4263, 4262], [4134, 4135, 4264], [4134, 4264, 4263], [4135, 4136, 4264], [4136, 4265, 4264], [4136, 4137, 4266], [4136, 4266, 4265], [4137, 4138, 4266], [4138, 4267, 4266], [4138, 4139, 4268], [4138, 4268, 4267], [4139, 4140, 4268], [4140, 4269, 4268], [4140, 4141, 4270], [4140, 4270, 4269], [4141, 4142, 4270], [4142, 4271, 4270], [4142, 4143, 4272], [4142, 4272, 4271], [4143, 4144, 4272], [4144, 4273, 4272], [4144, 4145, 4274], [4144, 4274, 4273], [4145, 4146, 4274], [4146, 4275, 4274], [4146, 4147, 4276], [4146, 4276, 4275], [4147, 4148, 4276], [4148, 4277, 4276], [4148, 4149, 4278], [4148, 4278, 4277], [4149, 4150, 4278], [4150, 4279, 4278], [4150, 4151, 4280], [4150, 4280, 4279], [4151, 4152, 4280], [4152, 4281, 4280], [4152, 4153, 4282], [4152, 4282, 4281], [4153, 4154, 4282], [4154, 4283, 4282], [4154, 4155, 4284], [4154, 4284, 4283], [4155, 4156, 4284], [4156, 4285, 4284], [4156, 4157, 4286], [4156, 4286, 4285], [4157, 4158, 4286], [4158, 4287, 4286], [4158, 4159, 4288], [4158, 4288, 4287], [4159, 4160, 4288], [4160, 4289, 4288], [4160, 4161, 4290], [4160, 4290, 4289], [4161, 4162, 4290], [4162, 4291, 4290], [4162, 4163, 4292], [4162, 4292, 4291], [4163, 4164, 4292], [4164, 4293, 4292], [4164, 4165, 4294], [4164, 4294, 4293], [4165, 4166, 4294], [4166, 4295, 4294], [4166, 4167, 4296], [4166, 4296, 4295], [4167, 4168, 4296], [4168, 4297, 4296], [4168, 4169, 4298], [4168, 4298, 4297], [4169, 4170, 4298], [4170, 4299, 4298], [4170, 4171, 4300], [4170, 4300, 4299], [4171, 4172, 4300], [4172, 4301, 4300], [4172, 4173, 4302], [4172, 4302, 4301], [4173, 4174, 4302], [4174, 4303, 4302], [4174, 4175, 4304], [4174, 4304, 4303], [4175, 4176, 4304], [4176, 4305, 4304], [4176, 4177, 4306], [4176, 4306, 4305], [4177, 4178, 4306], [4178, 4307, 4306], [4178, 4179, 4308], [4178, 4308, 4307], [4179, 4180, 4308], [4180, 4309, 4308], [4180, 4181, 4310], [4180, 4310, 4309], [4181, 4182, 4310], [4182, 4311, 4310], [4182, 4183, 4312], [4182, 4312, 4311], [4183, 4184, 4312], [4184, 4313, 4312], [4184, 4185, 4314], [4184, 4314, 4313], [4185, 4186, 4314], [4186, 4315, 4314], [4186, 4187, 4316], [4186, 4316, 4315], [4187, 4188, 4316], [4188, 4317, 4316], [4188, 4189, 4318], [4188, 4318, 4317], [4189, 4190, 4318], [4190, 4319, 4318], [4190, 4191, 4320], [4190, 4320, 4319], [4191, 4192, 4320], [4192, 4321, 4320], [4192, 4193, 4322], [4192, 4322, 4321], [4193, 4194, 4322], [4194, 4323, 4322], [4194, 4195, 4324], [4194, 4324, 4323], [4195, 4196, 4324], [4196, 4325, 4324], [4196, 4197, 4326], [4196, 4326, 4325], [4197, 4198, 4326], [4198, 4327, 4326], [4198, 4199, 4328], [4198, 4328, 4327], [4199, 4200, 4328], [4200, 4329, 4328], [4200, 4201, 4330], [4200, 4330, 4329], [4201, 4202, 4330], [4202, 4331, 4330], [4202, 4203, 4332], [4202, 4332, 4331], [4203, 4204, 4332], [4204, 4333, 4332], [4204, 4205, 4334], [4204, 4334, 4333], [4205, 4206, 4334], [4206, 4335, 4334], [4206, 4207, 4336], [4206, 4336, 4335], [4207, 4208, 4336], [4208, 4337, 4336], [4208, 4209, 4338], [4208, 4338, 4337], [4209, 4210, 4338], [4210, 4339, 4338], [4210, 4211, 4340], [4210, 4340, 4339], [4211, 4212, 4340], [4212, 4341, 4340], [4212, 4213, 4342], [4212, 4342, 4341], [4213, 4214, 4342], [4214, 4343, 4342], [4214, 4215, 4344], [4214, 4344, 4343], [4215, 4216, 4344], [4216, 4345, 4344], [4216, 4217, 4346], [4216, 4346, 4345], [4217, 4218, 4346], [4218, 4347, 4346], [4218, 4219, 4348], [4218, 4348, 4347], [4219, 4220, 4348], [4220, 4349, 4348], [4220, 4221, 4350], [4220, 4350, 4349], [4221, 4222, 4350], [4222, 4351, 4350], [4222, 4223, 4352], [4222, 4352, 4351], [4223, 4224, 4352], [4224, 4353, 4352], [4224, 4225, 4354], [4224, 4354, 4353], [4225, 4226, 4354], [4226, 4355, 4354], [4226, 4227, 4356], [4226, 4356, 4355], [4227, 4228, 4356], [4228, 4357, 4356], [4228, 4229, 4358], [4228, 4358, 4357], [4229, 4230, 4358], [4230, 4359, 4358], [4230, 4231, 4360], [4230, 4360, 4359], [4231, 4232, 4360], [4232, 4361, 4360], [4232, 4233, 4362], [4232, 4362, 4361], [4233, 4234, 4362], [4234, 4363, 4362], [4234, 4235, 4364], [4234, 4364, 4363], [4235, 4236, 4364], [4236, 4365, 4364], [4236, 4237, 4366], [4236, 4366, 4365], [4237, 4238, 4366], [4238, 4367, 4366], [4238, 4239, 4368], [4238, 4368, 4367], [4239, 4240, 4368], [4240, 4369, 4368], [4240, 4241, 4370], [4240, 4370, 4369], [4241, 4242, 4370], [4242, 4371, 4370], [4242, 4243, 4372], [4242, 4372, 4371], [4243, 4244, 4372], [4244, 4373, 4372], [4244, 4245, 4374], [4244, 4374, 4373], [4245, 4246, 4374], [4246, 4375, 4374], [4246, 4247, 4376], [4246, 4376, 4375], [4247, 4248, 4376], [4248, 4377, 4376], [4248, 4249, 4378], [4248, 4378, 4377], [4249, 4250, 4378], [4250, 4379, 4378], [4250, 4251, 4380], [4250, 4380, 4379], [4251, 4252, 4380], [4252, 4381, 4380], [4252, 4253, 4382], [4252, 4382, 4381], [4253, 4254, 4382], [4254, 4383, 4382], [4254, 4255, 4384], [4254, 4384, 4383], [4255, 4256, 4384], [4256, 4385, 4384], [4257, 4258, 4386], [4258, 4387, 4386], [4258, 4259, 4388], [4258, 4388, 4387], [4259, 4260, 4388], [4260, 4389, 4388], [4260, 4261, 4390], [4260, 4390, 4389], [4261, 4262, 4390], [4262, 4391, 4390], [4262, 4263, 4392], [4262, 4392, 4391], [4263, 4264, 4392], [4264, 4393, 4392], [4264, 4265, 4394], [4264, 4394, 4393], [4265, 4266, 4394], [4266, 4395, 4394], [4266, 4267, 4396], [4266, 4396, 4395], [4267, 4268, 4396], [4268, 4397, 4396], [4268, 4269, 4398], [4268, 4398, 4397], [4269, 4270, 4398], [4270, 4399, 4398], [4270, 4271, 4400], [4270, 4400, 4399], [4271, 4272, 4400], [4272, 4401, 4400], [4272, 4273, 4402], [4272, 4402, 4401], [4273, 4274, 4402], [4274, 4403, 4402], [4274, 4275, 4404], [4274, 4404, 4403], [4275, 4276, 4404], [4276, 4405, 4404], [4276, 4277, 4406], [4276, 4406, 4405], [4277, 4278, 4406], [4278, 4407, 4406], [4278, 4279, 4408], [4278, 4408, 4407], [4279, 4280, 4408], [4280, 4409, 4408], [4280, 4281, 4410], [4280, 4410, 4409], [4281, 4282, 4410], [4282, 4411, 4410], [4282, 4283, 4412], [4282, 4412, 4411], [4283, 4284, 4412], [4284, 4413, 4412], [4284, 4285, 4414], [4284, 4414, 4413], [4285, 4286, 4414], [4286, 4415, 4414], [4286, 4287, 4416], [4286, 4416, 4415], [4287, 4288, 4416], [4288, 4417, 4416], [4288, 4289, 4418], [4288, 4418, 4417], [4289, 4290, 4418], [4290, 4419, 4418], [4290, 4291, 4420], [4290, 4420, 4419], [4291, 4292, 4420], [4292, 4421, 4420], [4292, 4293, 4422], [4292, 4422, 4421], [4293, 4294, 4422], [4294, 4423, 4422], [4294, 4295, 4424], [4294, 4424, 4423], [4295, 4296, 4424], [4296, 4425, 4424], [4296, 4297, 4426], [4296, 4426, 4425], [4297, 4298, 4426], [4298, 4427, 4426], [4298, 4299, 4428], [4298, 4428, 4427], [4299, 4300, 4428], [4300, 4429, 4428], [4300, 4301, 4430], [4300, 4430, 4429], [4301, 4302, 4430], [4302, 4431, 4430], [4302, 4303, 4432], [4302, 4432, 4431], [4303, 4304, 4432], [4304, 4433, 4432], [4304, 4305, 4434], [4304, 4434, 4433], [4305, 4306, 4434], [4306, 4435, 4434], [4306, 4307, 4436], [4306, 4436, 4435], [4307, 4308, 4436], [4308, 4437, 4436], [4308, 4309, 4438], [4308, 4438, 4437], [4309, 4310, 4438], [4310, 4439, 4438], [4310, 4311, 4440], [4310, 4440, 4439], [4311, 4312, 4440], [4312, 4441, 4440], [4312, 4313, 4442], [4312, 4442, 4441], [4313, 4314, 4442], [4314, 4443, 4442], [4314, 4315, 4444], [4314, 4444, 4443], [4315, 4316, 4444], [4316, 4445, 4444], [4316, 4317, 4446], [4316, 4446, 4445], [4317, 4318, 4446], [4318, 4447, 4446], [4318, 4319, 4448], [4318, 4448, 4447], [4319, 4320, 4448], [4320, 4449, 4448], [4320, 4321, 4450], [4320, 4450, 4449], [4321, 4322, 4450], [4322, 4451, 4450], [4322, 4323, 4452], [4322, 4452, 4451], [4323, 4324, 4452], [4324, 4453, 4452], [4324, 4325, 4454], [4324, 4454, 4453], [4325, 4326, 4454], [4326, 4455, 4454], [4326, 4327, 4456], [4326, 4456, 4455], [4327, 4328, 4456], [4328, 4457, 4456], [4328, 4329, 4458], [4328, 4458, 4457], [4329, 4330, 4458], [4330, 4459, 4458], [4330, 4331, 4460], [4330, 4460, 4459], [4331, 4332, 4460], [4332, 4461, 4460], [4332, 4333, 4462], [4332, 4462, 4461], [4333, 4334, 4462], [4334, 4463, 4462], [4334, 4335, 4464], [4334, 4464, 4463], [4335, 4336, 4464], [4336, 4465, 4464], [4336, 4337, 4466], [4336, 4466, 4465], [4337, 4338, 4466], [4338, 4467, 4466], [4338, 4339, 4468], [4338, 4468, 4467], [4339, 4340, 4468], [4340, 4469, 4468], [4340, 4341, 4470], [4340, 4470, 4469], [4341, 4342, 4470], [4342, 4471, 4470], [4342, 4343, 4472], [4342, 4472, 4471], [4343, 4344, 4472], [4344, 4473, 4472], [4344, 4345, 4474], [4344, 4474, 4473], [4345, 4346, 4474], [4346, 4475, 4474], [4346, 4347, 4476], [4346, 4476, 4475], [4347, 4348, 4476], [4348, 4477, 4476], [4348, 4349, 4478], [4348, 4478, 4477], [4349, 4350, 4478], [4350, 4479, 4478], [4350, 4351, 4480], [4350, 4480, 4479], [4351, 4352, 4480], [4352, 4481, 4480], [4352, 4353, 4482], [4352, 4482, 4481], [4353, 4354, 4482], [4354, 4483, 4482], [4354, 4355, 4484], [4354, 4484, 4483], [4355, 4356, 4484], [4356, 4485, 4484], [4356, 4357, 4486], [4356, 4486, 4485], [4357, 4358, 4486], [4358, 4487, 4486], [4358, 4359, 4488], [4358, 4488, 4487], [4359, 4360, 4488], [4360, 4489, 4488], [4360, 4361, 4490], [4360, 4490, 4489], [4361, 4362, 4490], [4362, 4491, 4490], [4362, 4363, 4492], [4362, 4492, 4491], [4363, 4364, 4492], [4364, 4493, 4492], [4364, 4365, 4494], [4364, 4494, 4493], [4365, 4366, 4494], [4366, 4495, 4494], [4366, 4367, 4496], [4366, 4496, 4495], [4367, 4368, 4496], [4368, 4497, 4496], [4368, 4369, 4498], [4368, 4498, 4497], [4369, 4370, 4498], [4370, 4499, 4498], [4370, 4371, 4500], [4370, 4500, 4499], [4371, 4372, 4500], [4372, 4501, 4500], [4372, 4373, 4502], [4372, 4502, 4501], [4373, 4374, 4502], [4374, 4503, 4502], [4374, 4375, 4504], [4374, 4504, 4503], [4375, 4376, 4504], [4376, 4505, 4504], [4376, 4377, 4506], [4376, 4506, 4505], [4377, 4378, 4506], [4378, 4507, 4506], [4378, 4379, 4508], [4378, 4508, 4507], [4379, 4380, 4508], [4380, 4509, 4508], [4380, 4381, 4510], [4380, 4510, 4509], [4381, 4382, 4510], [4382, 4511, 4510], [4382, 4383, 4512], [4382, 4512, 4511], [4383, 4384, 4512], [4384, 4513, 4512], [4384, 4385, 4514], [4384, 4514, 4513], [4386, 4387, 4516], [4386, 4516, 4515], [4387, 4388, 4516], [4388, 4517, 4516], [4388, 4389, 4518], [4388, 4518, 4517], [4389, 4390, 4518], [4390, 4519, 4518], [4390, 4391, 4520], [4390, 4520, 4519], [4391, 4392, 4520], [4392, 4521, 4520], [4392, 4393, 4522], [4392, 4522, 4521], [4393, 4394, 4522], [4394, 4523, 4522], [4394, 4395, 4524], [4394, 4524, 4523], [4395, 4396, 4524], [4396, 4525, 4524], [4396, 4397, 4526], [4396, 4526, 4525], [4397, 4398, 4526], [4398, 4527, 4526], [4398, 4399, 4528], [4398, 4528, 4527], [4399, 4400, 4528], [4400, 4529, 4528], [4400, 4401, 4530], [4400, 4530, 4529], [4401, 4402, 4530], [4402, 4531, 4530], [4402, 4403, 4532], [4402, 4532, 4531], [4403, 4404, 4532], [4404, 4533, 4532], [4404, 4405, 4534], [4404, 4534, 4533], [4405, 4406, 4534], [4406, 4535, 4534], [4406, 4407, 4536], [4406, 4536, 4535], [4407, 4408, 4536], [4408, 4537, 4536], [4408, 4409, 4538], [4408, 4538, 4537], [4409, 4410, 4538], [4410, 4539, 4538], [4410, 4411, 4540], [4410, 4540, 4539], [4411, 4412, 4540], [4412, 4541, 4540], [4412, 4413, 4542], [4412, 4542, 4541], [4413, 4414, 4542], [4414, 4543, 4542], [4414, 4415, 4544], [4414, 4544, 4543], [4415, 4416, 4544], [4416, 4545, 4544], [4416, 4417, 4546], [4416, 4546, 4545], [4417, 4418, 4546], [4418, 4547, 4546], [4418, 4419, 4548], [4418, 4548, 4547], [4419, 4420, 4548], [4420, 4549, 4548], [4420, 4421, 4550], [4420, 4550, 4549], [4421, 4422, 4550], [4422, 4551, 4550], [4422, 4423, 4552], [4422, 4552, 4551], [4423, 4424, 4552], [4424, 4553, 4552], [4424, 4425, 4554], [4424, 4554, 4553], [4425, 4426, 4554], [4426, 4555, 4554], [4426, 4427, 4556], [4426, 4556, 4555], [4427, 4428, 4556], [4428, 4557, 4556], [4428, 4429, 4558], [4428, 4558, 4557], [4429, 4430, 4558], [4430, 4559, 4558], [4430, 4431, 4560], [4430, 4560, 4559], [4431, 4432, 4560], [4432, 4561, 4560], [4432, 4433, 4562], [4432, 4562, 4561], [4433, 4434, 4562], [4434, 4563, 4562], [4434, 4435, 4564], [4434, 4564, 4563], [4435, 4436, 4564], [4436, 4565, 4564], [4436, 4437, 4566], [4436, 4566, 4565], [4437, 4438, 4566], [4438, 4567, 4566], [4438, 4439, 4568], [4438, 4568, 4567], [4439, 4440, 4568], [4440, 4569, 4568], [4440, 4441, 4570], [4440, 4570, 4569], [4441, 4442, 4570], [4442, 4571, 4570], [4442, 4443, 4572], [4442, 4572, 4571], [4443, 4444, 4572], [4444, 4573, 4572], [4444, 4445, 4574], [4444, 4574, 4573], [4445, 4446, 4574], [4446, 4575, 4574], [4446, 4447, 4576], [4446, 4576, 4575], [4447, 4448, 4576], [4448, 4577, 4576], [4448, 4449, 4578], [4448, 4578, 4577], [4449, 4450, 4578], [4450, 4579, 4578], [4450, 4451, 4580], [4450, 4580, 4579], [4451, 4452, 4580], [4452, 4581, 4580], [4452, 4453, 4582], [4452, 4582, 4581], [4453, 4454, 4582], [4454, 4583, 4582], [4454, 4455, 4584], [4454, 4584, 4583], [4455, 4456, 4584], [4456, 4585, 4584], [4456, 4457, 4586], [4456, 4586, 4585], [4457, 4458, 4586], [4458, 4587, 4586], [4458, 4459, 4588], [4458, 4588, 4587], [4459, 4460, 4588], [4460, 4589, 4588], [4460, 4461, 4590], [4460, 4590, 4589], [4461, 4462, 4590], [4462, 4591, 4590], [4462, 4463, 4592], [4462, 4592, 4591], [4463, 4464, 4592], [4464, 4593, 4592], [4464, 4465, 4594], [4464, 4594, 4593], [4465, 4466, 4594], [4466, 4595, 4594], [4466, 4467, 4596], [4466, 4596, 4595], [4467, 4468, 4596], [4468, 4597, 4596], [4468, 4469, 4598], [4468, 4598, 4597], [4469, 4470, 4598], [4470, 4599, 4598], [4470, 4471, 4600], [4470, 4600, 4599], [4471, 4472, 4600], [4472, 4601, 4600], [4472, 4473, 4602], [4472, 4602, 4601], [4473, 4474, 4602], [4474, 4603, 4602], [4474, 4475, 4604], [4474, 4604, 4603], [4475, 4476, 4604], [4476, 4605, 4604], [4476, 4477, 4606], [4476, 4606, 4605], [4477, 4478, 4606], [4478, 4607, 4606], [4478, 4479, 4608], [4478, 4608, 4607], [4479, 4480, 4608], [4480, 4609, 4608], [4480, 4481, 4610], [4480, 4610, 4609], [4481, 4482, 4610], [4482, 4611, 4610], [4482, 4483, 4612], [4482, 4612, 4611], [4483, 4484, 4612], [4484, 4613, 4612], [4484, 4485, 4614], [4484, 4614, 4613], [4485, 4486, 4614], [4486, 4615, 4614], [4486, 4487, 4616], [4486, 4616, 4615], [4487, 4488, 4616], [4488, 4617, 4616], [4488, 4489, 4618], [4488, 4618, 4617], [4489, 4490, 4618], [4490, 4619, 4618], [4490, 4491, 4620], [4490, 4620, 4619], [4491, 4492, 4620], [4492, 4621, 4620], [4492, 4493, 4622], [4492, 4622, 4621], [4493, 4494, 4622], [4494, 4623, 4622], [4494, 4495, 4624], [4494, 4624, 4623], [4495, 4496, 4624], [4496, 4625, 4624], [4496, 4497, 4626], [4496, 4626, 4625], [4497, 4498, 4626], [4498, 4627, 4626], [4498, 4499, 4628], [4498, 4628, 4627], [4499, 4500, 4628], [4500, 4629, 4628], [4500, 4501, 4630], [4500, 4630, 4629], [4501, 4502, 4630], [4502, 4631, 4630], [4502, 4503, 4632], [4502, 4632, 4631], [4503, 4504, 4632], [4504, 4633, 4632], [4504, 4505, 4634], [4504, 4634, 4633], [4505, 4506, 4634], [4506, 4635, 4634], [4506, 4507, 4636], [4506, 4636, 4635], [4507, 4508, 4636], [4508, 4637, 4636], [4508, 4509, 4638], [4508, 4638, 4637], [4509, 4510, 4638], [4510, 4639, 4638], [4510, 4511, 4640], [4510, 4640, 4639], [4511, 4512, 4640], [4512, 4641, 4640], [4512, 4513, 4642], [4512, 4642, 4641], [4513, 4514, 4642], [4514, 4643, 4642], [4515, 4516, 4644], [4516, 4645, 4644], [4516, 4517, 4646], [4516, 4646, 4645], [4517, 4518, 4646], [4518, 4647, 4646], [4518, 4519, 4648], [4518, 4648, 4647], [4519, 4520, 4648], [4520, 4649, 4648], [4520, 4521, 4650], [4520, 4650, 4649], [4521, 4522, 4650], [4522, 4651, 4650], [4522, 4523, 4652], [4522, 4652, 4651], [4523, 4524, 4652], [4524, 4653, 4652], [4524, 4525, 4654], [4524, 4654, 4653], [4525, 4526, 4654], [4526, 4655, 4654], [4526, 4527, 4656], [4526, 4656, 4655], [4527, 4528, 4656], [4528, 4657, 4656], [4528, 4529, 4658], [4528, 4658, 4657], [4529, 4530, 4658], [4530, 4659, 4658], [4530, 4531, 4660], [4530, 4660, 4659], [4531, 4532, 4660], [4532, 4661, 4660], [4532, 4533, 4662], [4532, 4662, 4661], [4533, 4534, 4662], [4534, 4663, 4662], [4534, 4535, 4664], [4534, 4664, 4663], [4535, 4536, 4664], [4536, 4665, 4664], [4536, 4537, 4666], [4536, 4666, 4665], [4537, 4538, 4666], [4538, 4667, 4666], [4538, 4539, 4668], [4538, 4668, 4667], [4539, 4540, 4668], [4540, 4669, 4668], [4540, 4541, 4670], [4540, 4670, 4669], [4541, 4542, 4670], [4542, 4671, 4670], [4542, 4543, 4672], [4542, 4672, 4671], [4543, 4544, 4672], [4544, 4673, 4672], [4544, 4545, 4674], [4544, 4674, 4673], [4545, 4546, 4674], [4546, 4675, 4674], [4546, 4547, 4676], [4546, 4676, 4675], [4547, 4548, 4676], [4548, 4677, 4676], [4548, 4549, 4678], [4548, 4678, 4677], [4549, 4550, 4678], [4550, 4679, 4678], [4550, 4551, 4680], [4550, 4680, 4679], [4551, 4552, 4680], [4552, 4681, 4680], [4552, 4553, 4682], [4552, 4682, 4681], [4553, 4554, 4682], [4554, 4683, 4682], [4554, 4555, 4684], [4554, 4684, 4683], [4555, 4556, 4684], [4556, 4685, 4684], [4556, 4557, 4686], [4556, 4686, 4685], [4557, 4558, 4686], [4558, 4687, 4686], [4558, 4559, 4688], [4558, 4688, 4687], [4559, 4560, 4688], [4560, 4689, 4688], [4560, 4561, 4690], [4560, 4690, 4689], [4561, 4562, 4690], [4562, 4691, 4690], [4562, 4563, 4692], [4562, 4692, 4691], [4563, 4564, 4692], [4564, 4693, 4692], [4564, 4565, 4694], [4564, 4694, 4693], [4565, 4566, 4694], [4566, 4695, 4694], [4566, 4567, 4696], [4566, 4696, 4695], [4567, 4568, 4696], [4568, 4697, 4696], [4568, 4569, 4698], [4568, 4698, 4697], [4569, 4570, 4698], [4570, 4699, 4698], [4570, 4571, 4700], [4570, 4700, 4699], [4571, 4572, 4700], [4572, 4701, 4700], [4572, 4573, 4702], [4572, 4702, 4701], [4573, 4574, 4702], [4574, 4703, 4702], [4574, 4575, 4704], [4574, 4704, 4703], [4575, 4576, 4704], [4576, 4705, 4704], [4576, 4577, 4706], [4576, 4706, 4705], [4577, 4578, 4706], [4578, 4707, 4706], [4578, 4579, 4708], [4578, 4708, 4707], [4579, 4580, 4708], [4580, 4709, 4708], [4580, 4581, 4710], [4580, 4710, 4709], [4581, 4582, 4710], [4582, 4711, 4710], [4582, 4583, 4712], [4582, 4712, 4711], [4583, 4584, 4712], [4584, 4713, 4712], [4584, 4585, 4714], [4584, 4714, 4713], [4585, 4586, 4714], [4586, 4715, 4714], [4586, 4587, 4716], [4586, 4716, 4715], [4587, 4588, 4716], [4588, 4717, 4716], [4588, 4589, 4718], [4588, 4718, 4717], [4589, 4590, 4718], [4590, 4719, 4718], [4590, 4591, 4720], [4590, 4720, 4719], [4591, 4592, 4720], [4592, 4721, 4720], [4592, 4593, 4722], [4592, 4722, 4721], [4593, 4594, 4722], [4594, 4723, 4722], [4594, 4595, 4724], [4594, 4724, 4723], [4595, 4596, 4724], [4596, 4725, 4724], [4596, 4597, 4726], [4596, 4726, 4725], [4597, 4598, 4726], [4598, 4727, 4726], [4598, 4599, 4728], [4598, 4728, 4727], [4599, 4600, 4728], [4600, 4729, 4728], [4600, 4601, 4730], [4600, 4730, 4729], [4601, 4602, 4730], [4602, 4731, 4730], [4602, 4603, 4732], [4602, 4732, 4731], [4603, 4604, 4732], [4604, 4733, 4732], [4604, 4605, 4734], [4604, 4734, 4733], [4605, 4606, 4734], [4606, 4735, 4734], [4606, 4607, 4736], [4606, 4736, 4735], [4607, 4608, 4736], [4608, 4737, 4736], [4608, 4609, 4738], [4608, 4738, 4737], [4609, 4610, 4738], [4610, 4739, 4738], [4610, 4611, 4740], [4610, 4740, 4739], [4611, 4612, 4740], [4612, 4741, 4740], [4612, 4613, 4742], [4612, 4742, 4741], [4613, 4614, 4742], [4614, 4743, 4742], [4614, 4615, 4744], [4614, 4744, 4743], [4615, 4616, 4744], [4616, 4745, 4744], [4616, 4617, 4746], [4616, 4746, 4745], [4617, 4618, 4746], [4618, 4747, 4746], [4618, 4619, 4748], [4618, 4748, 4747], [4619, 4620, 4748], [4620, 4749, 4748], [4620, 4621, 4750], [4620, 4750, 4749], [4621, 4622, 4750], [4622, 4751, 4750], [4622, 4623, 4752], [4622, 4752, 4751], [4623, 4624, 4752], [4624, 4753, 4752], [4624, 4625, 4754], [4624, 4754, 4753], [4625, 4626, 4754], [4626, 4755, 4754], [4626, 4627, 4756], [4626, 4756, 4755], [4627, 4628, 4756], [4628, 4757, 4756], [4628, 4629, 4758], [4628, 4758, 4757], [4629, 4630, 4758], [4630, 4759, 4758], [4630, 4631, 4760], [4630, 4760, 4759], [4631, 4632, 4760], [4632, 4761, 4760], [4632, 4633, 4762], [4632, 4762, 4761], [4633, 4634, 4762], [4634, 4763, 4762], [4634, 4635, 4764], [4634, 4764, 4763], [4635, 4636, 4764], [4636, 4765, 4764], [4636, 4637, 4766], [4636, 4766, 4765], [4637, 4638, 4766], [4638, 4767, 4766], [4638, 4639, 4768], [4638, 4768, 4767], [4639, 4640, 4768], [4640, 4769, 4768], [4640, 4641, 4770], [4640, 4770, 4769], [4641, 4642, 4770], [4642, 4771, 4770], [4642, 4643, 4772], [4642, 4772, 4771], [4644, 4645, 4774], [4644, 4774, 4773], [4645, 4646, 4774], [4646, 4775, 4774], [4646, 4647, 4776], [4646, 4776, 4775], [4647, 4648, 4776], [4648, 4777, 4776], [4648, 4649, 4778], [4648, 4778, 4777], [4649, 4650, 4778], [4650, 4779, 4778], [4650, 4651, 4780], [4650, 4780, 4779], [4651, 4652, 4780], [4652, 4781, 4780], [4652, 4653, 4782], [4652, 4782, 4781], [4653, 4654, 4782], [4654, 4783, 4782], [4654, 4655, 4784], [4654, 4784, 4783], [4655, 4656, 4784], [4656, 4785, 4784], [4656, 4657, 4786], [4656, 4786, 4785], [4657, 4658, 4786], [4658, 4787, 4786], [4658, 4659, 4788], [4658, 4788, 4787], [4659, 4660, 4788], [4660, 4789, 4788], [4660, 4661, 4790], [4660, 4790, 4789], [4661, 4662, 4790], [4662, 4791, 4790], [4662, 4663, 4792], [4662, 4792, 4791], [4663, 4664, 4792], [4664, 4793, 4792], [4664, 4665, 4794], [4664, 4794, 4793], [4665, 4666, 4794], [4666, 4795, 4794], [4666, 4667, 4796], [4666, 4796, 4795], [4667, 4668, 4796], [4668, 4797, 4796], [4668, 4669, 4798], [4668, 4798, 4797], [4669, 4670, 4798], [4670, 4799, 4798], [4670, 4671, 4800], [4670, 4800, 4799], [4671, 4672, 4800], [4672, 4801, 4800], [4672, 4673, 4802], [4672, 4802, 4801], [4673, 4674, 4802], [4674, 4803, 4802], [4674, 4675, 4804], [4674, 4804, 4803], [4675, 4676, 4804], [4676, 4805, 4804], [4676, 4677, 4806], [4676, 4806, 4805], [4677, 4678, 4806], [4678, 4807, 4806], [4678, 4679, 4808], [4678, 4808, 4807], [4679, 4680, 4808], [4680, 4809, 4808], [4680, 4681, 4810], [4680, 4810, 4809], [4681, 4682, 4810], [4682, 4811, 4810], [4682, 4683, 4812], [4682, 4812, 4811], [4683, 4684, 4812], [4684, 4813, 4812], [4684, 4685, 4814], [4684, 4814, 4813], [4685, 4686, 4814], [4686, 4815, 4814], [4686, 4687, 4816], [4686, 4816, 4815], [4687, 4688, 4816], [4688, 4817, 4816], [4688, 4689, 4818], [4688, 4818, 4817], [4689, 4690, 4818], [4690, 4819, 4818], [4690, 4691, 4820], [4690, 4820, 4819], [4691, 4692, 4820], [4692, 4821, 4820], [4692, 4693, 4822], [4692, 4822, 4821], [4693, 4694, 4822], [4694, 4823, 4822], [4694, 4695, 4824], [4694, 4824, 4823], [4695, 4696, 4824], [4696, 4825, 4824], [4696, 4697, 4826], [4696, 4826, 4825], [4697, 4698, 4826], [4698, 4827, 4826], [4698, 4699, 4828], [4698, 4828, 4827], [4699, 4700, 4828], [4700, 4829, 4828], [4700, 4701, 4830], [4700, 4830, 4829], [4701, 4702, 4830], [4702, 4831, 4830], [4702, 4703, 4832], [4702, 4832, 4831], [4703, 4704, 4832], [4704, 4833, 4832], [4704, 4705, 4834], [4704, 4834, 4833], [4705, 4706, 4834], [4706, 4835, 4834], [4706, 4707, 4836], [4706, 4836, 4835], [4707, 4708, 4836], [4708, 4837, 4836], [4708, 4709, 4838], [4708, 4838, 4837], [4709, 4710, 4838], [4710, 4839, 4838], [4710, 4711, 4840], [4710, 4840, 4839], [4711, 4712, 4840], [4712, 4841, 4840], [4712, 4713, 4842], [4712, 4842, 4841], [4713, 4714, 4842], [4714, 4843, 4842], [4714, 4715, 4844], [4714, 4844, 4843], [4715, 4716, 4844], [4716, 4845, 4844], [4716, 4717, 4846], [4716, 4846, 4845], [4717, 4718, 4846], [4718, 4847, 4846], [4718, 4719, 4848], [4718, 4848, 4847], [4719, 4720, 4848], [4720, 4849, 4848], [4720, 4721, 4850], [4720, 4850, 4849], [4721, 4722, 4850], [4722, 4851, 4850], [4722, 4723, 4852], [4722, 4852, 4851], [4723, 4724, 4852], [4724, 4853, 4852], [4724, 4725, 4854], [4724, 4854, 4853], [4725, 4726, 4854], [4726, 4855, 4854], [4726, 4727, 4856], [4726, 4856, 4855], [4727, 4728, 4856], [4728, 4857, 4856], [4728, 4729, 4858], [4728, 4858, 4857], [4729, 4730, 4858], [4730, 4859, 4858], [4730, 4731, 4860], [4730, 4860, 4859], [4731, 4732, 4860], [4732, 4861, 4860], [4732, 4733, 4862], [4732, 4862, 4861], [4733, 4734, 4862], [4734, 4863, 4862], [4734, 4735, 4864], [4734, 4864, 4863], [4735, 4736, 4864], [4736, 4865, 4864], [4736, 4737, 4866], [4736, 4866, 4865], [4737, 4738, 4866], [4738, 4867, 4866], [4738, 4739, 4868], [4738, 4868, 4867], [4739, 4740, 4868], [4740, 4869, 4868], [4740, 4741, 4870], [4740, 4870, 4869], [4741, 4742, 4870], [4742, 4871, 4870], [4742, 4743, 4872], [4742, 4872, 4871], [4743, 4744, 4872], [4744, 4873, 4872], [4744, 4745, 4874], [4744, 4874, 4873], [4745, 4746, 4874], [4746, 4875, 4874], [4746, 4747, 4876], [4746, 4876, 4875], [4747, 4748, 4876], [4748, 4877, 4876], [4748, 4749, 4878], [4748, 4878, 4877], [4749, 4750, 4878], [4750, 4879, 4878], [4750, 4751, 4880], [4750, 4880, 4879], [4751, 4752, 4880], [4752, 4881, 4880], [4752, 4753, 4882], [4752, 4882, 4881], [4753, 4754, 4882], [4754, 4883, 4882], [4754, 4755, 4884], [4754, 4884, 4883], [4755, 4756, 4884], [4756, 4885, 4884], [4756, 4757, 4886], [4756, 4886, 4885], [4757, 4758, 4886], [4758, 4887, 4886], [4758, 4759, 4888], [4758, 4888, 4887], [4759, 4760, 4888], [4760, 4889, 4888], [4760, 4761, 4890], [4760, 4890, 4889], [4761, 4762, 4890], [4762, 4891, 4890], [4762, 4763, 4892], [4762, 4892, 4891], [4763, 4764, 4892], [4764, 4893, 4892], [4764, 4765, 4894], [4764, 4894, 4893], [4765, 4766, 4894], [4766, 4895, 4894], [4766, 4767, 4896], [4766, 4896, 4895], [4767, 4768, 4896], [4768, 4897, 4896], [4768, 4769, 4898], [4768, 4898, 4897], [4769, 4770, 4898], [4770, 4899, 4898], [4770, 4771, 4900], [4770, 4900, 4899], [4771, 4772, 4900], [4772, 4901, 4900], [4773, 4774, 4902], [4774, 4903, 4902], [4774, 4775, 4904], [4774, 4904, 4903], [4775, 4776, 4904], [4776, 4905, 4904], [4776, 4777, 4906], [4776, 4906, 4905], [4777, 4778, 4906], [4778, 4907, 4906], [4778, 4779, 4908], [4778, 4908, 4907], [4779, 4780, 4908], [4780, 4909, 4908], [4780, 4781, 4910], [4780, 4910, 4909], [4781, 4782, 4910], [4782, 4911, 4910], [4782, 4783, 4912], [4782, 4912, 4911], [4783, 4784, 4912], [4784, 4913, 4912], [4784, 4785, 4914], [4784, 4914, 4913], [4785, 4786, 4914], [4786, 4915, 4914], [4786, 4787, 4916], [4786, 4916, 4915], [4787, 4788, 4916], [4788, 4917, 4916], [4788, 4789, 4918], [4788, 4918, 4917], [4789, 4790, 4918], [4790, 4919, 4918], [4790, 4791, 4920], [4790, 4920, 4919], [4791, 4792, 4920], [4792, 4921, 4920], [4792, 4793, 4922], [4792, 4922, 4921], [4793, 4794, 4922], [4794, 4923, 4922], [4794, 4795, 4924], [4794, 4924, 4923], [4795, 4796, 4924], [4796, 4925, 4924], [4796, 4797, 4926], [4796, 4926, 4925], [4797, 4798, 4926], [4798, 4927, 4926], [4798, 4799, 4928], [4798, 4928, 4927], [4799, 4800, 4928], [4800, 4929, 4928], [4800, 4801, 4930], [4800, 4930, 4929], [4801, 4802, 4930], [4802, 4931, 4930], [4802, 4803, 4932], [4802, 4932, 4931], [4803, 4804, 4932], [4804, 4933, 4932], [4804, 4805, 4934], [4804, 4934, 4933], [4805, 4806, 4934], [4806, 4935, 4934], [4806, 4807, 4936], [4806, 4936, 4935], [4807, 4808, 4936], [4808, 4937, 4936], [4808, 4809, 4938], [4808, 4938, 4937], [4809, 4810, 4938], [4810, 4939, 4938], [4810, 4811, 4940], [4810, 4940, 4939], [4811, 4812, 4940], [4812, 4941, 4940], [4812, 4813, 4942], [4812, 4942, 4941], [4813, 4814, 4942], [4814, 4943, 4942], [4814, 4815, 4944], [4814, 4944, 4943], [4815, 4816, 4944], [4816, 4945, 4944], [4816, 4817, 4946], [4816, 4946, 4945], [4817, 4818, 4946], [4818, 4947, 4946], [4818, 4819, 4948], [4818, 4948, 4947], [4819, 4820, 4948], [4820, 4949, 4948], [4820, 4821, 4950], [4820, 4950, 4949], [4821, 4822, 4950], [4822, 4951, 4950], [4822, 4823, 4952], [4822, 4952, 4951], [4823, 4824, 4952], [4824, 4953, 4952], [4824, 4825, 4954], [4824, 4954, 4953], [4825, 4826, 4954], [4826, 4955, 4954], [4826, 4827, 4956], [4826, 4956, 4955], [4827, 4828, 4956], [4828, 4957, 4956], [4828, 4829, 4958], [4828, 4958, 4957], [4829, 4830, 4958], [4830, 4959, 4958], [4830, 4831, 4960], [4830, 4960, 4959], [4831, 4832, 4960], [4832, 4961, 4960], [4832, 4833, 4962], [4832, 4962, 4961], [4833, 4834, 4962], [4834, 4963, 4962], [4834, 4835, 4964], [4834, 4964, 4963], [4835, 4836, 4964], [4836, 4965, 4964], [4836, 4837, 4966], [4836, 4966, 4965], [4837, 4838, 4966], [4838, 4967, 4966], [4838, 4839, 4968], [4838, 4968, 4967], [4839, 4840, 4968], [4840, 4969, 4968], [4840, 4841, 4970], [4840, 4970, 4969], [4841, 4842, 4970], [4842, 4971, 4970], [4842, 4843, 4972], [4842, 4972, 4971], [4843, 4844, 4972], [4844, 4973, 4972], [4844, 4845, 4974], [4844, 4974, 4973], [4845, 4846, 4974], [4846, 4975, 4974], [4846, 4847, 4976], [4846, 4976, 4975], [4847, 4848, 4976], [4848, 4977, 4976], [4848, 4849, 4978], [4848, 4978, 4977], [4849, 4850, 4978], [4850, 4979, 4978], [4850, 4851, 4980], [4850, 4980, 4979], [4851, 4852, 4980], [4852, 4981, 4980], [4852, 4853, 4982], [4852, 4982, 4981], [4853, 4854, 4982], [4854, 4983, 4982], [4854, 4855, 4984], [4854, 4984, 4983], [4855, 4856, 4984], [4856, 4985, 4984], [4856, 4857, 4986], [4856, 4986, 4985], [4857, 4858, 4986], [4858, 4987, 4986], [4858, 4859, 4988], [4858, 4988, 4987], [4859, 4860, 4988], [4860, 4989, 4988], [4860, 4861, 4990], [4860, 4990, 4989], [4861, 4862, 4990], [4862, 4991, 4990], [4862, 4863, 4992], [4862, 4992, 4991], [4863, 4864, 4992], [4864, 4993, 4992], [4864, 4865, 4994], [4864, 4994, 4993], [4865, 4866, 4994], [4866, 4995, 4994], [4866, 4867, 4996], [4866, 4996, 4995], [4867, 4868, 4996], [4868, 4997, 4996], [4868, 4869, 4998], [4868, 4998, 4997], [4869, 4870, 4998], [4870, 4999, 4998], [4870, 4871, 5000], [4870, 5000, 4999], [4871, 4872, 5000], [4872, 5001, 5000], [4872, 4873, 5002], [4872, 5002, 5001], [4873, 4874, 5002], [4874, 5003, 5002], [4874, 4875, 5004], [4874, 5004, 5003], [4875, 4876, 5004], [4876, 5005, 5004], [4876, 4877, 5006], [4876, 5006, 5005], [4877, 4878, 5006], [4878, 5007, 5006], [4878, 4879, 5008], [4878, 5008, 5007], [4879, 4880, 5008], [4880, 5009, 5008], [4880, 4881, 5010], [4880, 5010, 5009], [4881, 4882, 5010], [4882, 5011, 5010], [4882, 4883, 5012], [4882, 5012, 5011], [4883, 4884, 5012], [4884, 5013, 5012], [4884, 4885, 5014], [4884, 5014, 5013], [4885, 4886, 5014], [4886, 5015, 5014], [4886, 4887, 5016], [4886, 5016, 5015], [4887, 4888, 5016], [4888, 5017, 5016], [4888, 4889, 5018], [4888, 5018, 5017], [4889, 4890, 5018], [4890, 5019, 5018], [4890, 4891, 5020], [4890, 5020, 5019], [4891, 4892, 5020], [4892, 5021, 5020], [4892, 4893, 5022], [4892, 5022, 5021], [4893, 4894, 5022], [4894, 5023, 5022], [4894, 4895, 5024], [4894, 5024, 5023], [4895, 4896, 5024], [4896, 5025, 5024], [4896, 4897, 5026], [4896, 5026, 5025], [4897, 4898, 5026], [4898, 5027, 5026], [4898, 4899, 5028], [4898, 5028, 5027], [4899, 4900, 5028], [4900, 5029, 5028], [4900, 4901, 5030], [4900, 5030, 5029], [4902, 4903, 5032], [4902, 5032, 5031], [4903, 4904, 5032], [4904, 5033, 5032], [4904, 4905, 5034], [4904, 5034, 5033], [4905, 4906, 5034], [4906, 5035, 5034], [4906, 4907, 5036], [4906, 5036, 5035], [4907, 4908, 5036], [4908, 5037, 5036], [4908, 4909, 5038], [4908, 5038, 5037], [4909, 4910, 5038], [4910, 5039, 5038], [4910, 4911, 5040], [4910, 5040, 5039], [4911, 4912, 5040], [4912, 5041, 5040], [4912, 4913, 5042], [4912, 5042, 5041], [4913, 4914, 5042], [4914, 5043, 5042], [4914, 4915, 5044], [4914, 5044, 5043], [4915, 4916, 5044], [4916, 5045, 5044], [4916, 4917, 5046], [4916, 5046, 5045], [4917, 4918, 5046], [4918, 5047, 5046], [4918, 4919, 5048], [4918, 5048, 5047], [4919, 4920, 5048], [4920, 5049, 5048], [4920, 4921, 5050], [4920, 5050, 5049], [4921, 4922, 5050], [4922, 5051, 5050], [4922, 4923, 5052], [4922, 5052, 5051], [4923, 4924, 5052], [4924, 5053, 5052], [4924, 4925, 5054], [4924, 5054, 5053], [4925, 4926, 5054], [4926, 5055, 5054], [4926, 4927, 5056], [4926, 5056, 5055], [4927, 4928, 5056], [4928, 5057, 5056], [4928, 4929, 5058], [4928, 5058, 5057], [4929, 4930, 5058], [4930, 5059, 5058], [4930, 4931, 5060], [4930, 5060, 5059], [4931, 4932, 5060], [4932, 5061, 5060], [4932, 4933, 5062], [4932, 5062, 5061], [4933, 4934, 5062], [4934, 5063, 5062], [4934, 4935, 5064], [4934, 5064, 5063], [4935, 4936, 5064], [4936, 5065, 5064], [4936, 4937, 5066], [4936, 5066, 5065], [4937, 4938, 5066], [4938, 5067, 5066], [4938, 4939, 5068], [4938, 5068, 5067], [4939, 4940, 5068], [4940, 5069, 5068], [4940, 4941, 5070], [4940, 5070, 5069], [4941, 4942, 5070], [4942, 5071, 5070], [4942, 4943, 5072], [4942, 5072, 5071], [4943, 4944, 5072], [4944, 5073, 5072], [4944, 4945, 5074], [4944, 5074, 5073], [4945, 4946, 5074], [4946, 5075, 5074], [4946, 4947, 5076], [4946, 5076, 5075], [4947, 4948, 5076], [4948, 5077, 5076], [4948, 4949, 5078], [4948, 5078, 5077], [4949, 4950, 5078], [4950, 5079, 5078], [4950, 4951, 5080], [4950, 5080, 5079], [4951, 4952, 5080], [4952, 5081, 5080], [4952, 4953, 5082], [4952, 5082, 5081], [4953, 4954, 5082], [4954, 5083, 5082], [4954, 4955, 5084], [4954, 5084, 5083], [4955, 4956, 5084], [4956, 5085, 5084], [4956, 4957, 5086], [4956, 5086, 5085], [4957, 4958, 5086], [4958, 5087, 5086], [4958, 4959, 5088], [4958, 5088, 5087], [4959, 4960, 5088], [4960, 5089, 5088], [4960, 4961, 5090], [4960, 5090, 5089], [4961, 4962, 5090], [4962, 5091, 5090], [4962, 4963, 5092], [4962, 5092, 5091], [4963, 4964, 5092], [4964, 5093, 5092], [4964, 4965, 5094], [4964, 5094, 5093], [4965, 4966, 5094], [4966, 5095, 5094], [4966, 4967, 5096], [4966, 5096, 5095], [4967, 4968, 5096], [4968, 5097, 5096], [4968, 4969, 5098], [4968, 5098, 5097], [4969, 4970, 5098], [4970, 5099, 5098], [4970, 4971, 5100], [4970, 5100, 5099], [4971, 4972, 5100], [4972, 5101, 5100], [4972, 4973, 5102], [4972, 5102, 5101], [4973, 4974, 5102], [4974, 5103, 5102], [4974, 4975, 5104], [4974, 5104, 5103], [4975, 4976, 5104], [4976, 5105, 5104], [4976, 4977, 5106], [4976, 5106, 5105], [4977, 4978, 5106], [4978, 5107, 5106], [4978, 4979, 5108], [4978, 5108, 5107], [4979, 4980, 5108], [4980, 5109, 5108], [4980, 4981, 5110], [4980, 5110, 5109], [4981, 4982, 5110], [4982, 5111, 5110], [4982, 4983, 5112], [4982, 5112, 5111], [4983, 4984, 5112], [4984, 5113, 5112], [4984, 4985, 5114], [4984, 5114, 5113], [4985, 4986, 5114], [4986, 5115, 5114], [4986, 4987, 5116], [4986, 5116, 5115], [4987, 4988, 5116], [4988, 5117, 5116], [4988, 4989, 5118], [4988, 5118, 5117], [4989, 4990, 5118], [4990, 5119, 5118], [4990, 4991, 5120], [4990, 5120, 5119], [4991, 4992, 5120], [4992, 5121, 5120], [4992, 4993, 5122], [4992, 5122, 5121], [4993, 4994, 5122], [4994, 5123, 5122], [4994, 4995, 5124], [4994, 5124, 5123], [4995, 4996, 5124], [4996, 5125, 5124], [4996, 4997, 5126], [4996, 5126, 5125], [4997, 4998, 5126], [4998, 5127, 5126], [4998, 4999, 5128], [4998, 5128, 5127], [4999, 5000, 5128], [5000, 5129, 5128], [5000, 5001, 5130], [5000, 5130, 5129], [5001, 5002, 5130], [5002, 5131, 5130], [5002, 5003, 5132], [5002, 5132, 5131], [5003, 5004, 5132], [5004, 5133, 5132], [5004, 5005, 5134], [5004, 5134, 5133], [5005, 5006, 5134], [5006, 5135, 5134], [5006, 5007, 5136], [5006, 5136, 5135], [5007, 5008, 5136], [5008, 5137, 5136], [5008, 5009, 5138], [5008, 5138, 5137], [5009, 5010, 5138], [5010, 5139, 5138], [5010, 5011, 5140], [5010, 5140, 5139], [5011, 5012, 5140], [5012, 5141, 5140], [5012, 5013, 5142], [5012, 5142, 5141], [5013, 5014, 5142], [5014, 5143, 5142], [5014, 5015, 5144], [5014, 5144, 5143], [5015, 5016, 5144], [5016, 5145, 5144], [5016, 5017, 5146], [5016, 5146, 5145], [5017, 5018, 5146], [5018, 5147, 5146], [5018, 5019, 5148], [5018, 5148, 5147], [5019, 5020, 5148], [5020, 5149, 5148], [5020, 5021, 5150], [5020, 5150, 5149], [5021, 5022, 5150], [5022, 5151, 5150], [5022, 5023, 5152], [5022, 5152, 5151], [5023, 5024, 5152], [5024, 5153, 5152], [5024, 5025, 5154], [5024, 5154, 5153], [5025, 5026, 5154], [5026, 5155, 5154], [5026, 5027, 5156], [5026, 5156, 5155], [5027, 5028, 5156], [5028, 5157, 5156], [5028, 5029, 5158], [5028, 5158, 5157], [5029, 5030, 5158], [5030, 5159, 5158], [5031, 5032, 5160], [5032, 5161, 5160], [5032, 5033, 5162], [5032, 5162, 5161], [5033, 5034, 5162], [5034, 5163, 5162], [5034, 5035, 5164], [5034, 5164, 5163], [5035, 5036, 5164], [5036, 5165, 5164], [5036, 5037, 5166], [5036, 5166, 5165], [5037, 5038, 5166], [5038, 5167, 5166], [5038, 5039, 5168], [5038, 5168, 5167], [5039, 5040, 5168], [5040, 5169, 5168], [5040, 5041, 5170], [5040, 5170, 5169], [5041, 5042, 5170], [5042, 5171, 5170], [5042, 5043, 5172], [5042, 5172, 5171], [5043, 5044, 5172], [5044, 5173, 5172], [5044, 5045, 5174], [5044, 5174, 5173], [5045, 5046, 5174], [5046, 5175, 5174], [5046, 5047, 5176], [5046, 5176, 5175], [5047, 5048, 5176], [5048, 5177, 5176], [5048, 5049, 5178], [5048, 5178, 5177], [5049, 5050, 5178], [5050, 5179, 5178], [5050, 5051, 5180], [5050, 5180, 5179], [5051, 5052, 5180], [5052, 5181, 5180], [5052, 5053, 5182], [5052, 5182, 5181], [5053, 5054, 5182], [5054, 5183, 5182], [5054, 5055, 5184], [5054, 5184, 5183], [5055, 5056, 5184], [5056, 5185, 5184], [5056, 5057, 5186], [5056, 5186, 5185], [5057, 5058, 5186], [5058, 5187, 5186], [5058, 5059, 5188], [5058, 5188, 5187], [5059, 5060, 5188], [5060, 5189, 5188], [5060, 5061, 5190], [5060, 5190, 5189], [5061, 5062, 5190], [5062, 5191, 5190], [5062, 5063, 5192], [5062, 5192, 5191], [5063, 5064, 5192], [5064, 5193, 5192], [5064, 5065, 5194], [5064, 5194, 5193], [5065, 5066, 5194], [5066, 5195, 5194], [5066, 5067, 5196], [5066, 5196, 5195], [5067, 5068, 5196], [5068, 5197, 5196], [5068, 5069, 5198], [5068, 5198, 5197], [5069, 5070, 5198], [5070, 5199, 5198], [5070, 5071, 5200], [5070, 5200, 5199], [5071, 5072, 5200], [5072, 5201, 5200], [5072, 5073, 5202], [5072, 5202, 5201], [5073, 5074, 5202], [5074, 5203, 5202], [5074, 5075, 5204], [5074, 5204, 5203], [5075, 5076, 5204], [5076, 5205, 5204], [5076, 5077, 5206], [5076, 5206, 5205], [5077, 5078, 5206], [5078, 5207, 5206], [5078, 5079, 5208], [5078, 5208, 5207], [5079, 5080, 5208], [5080, 5209, 5208], [5080, 5081, 5210], [5080, 5210, 5209], [5081, 5082, 5210], [5082, 5211, 5210], [5082, 5083, 5212], [5082, 5212, 5211], [5083, 5084, 5212], [5084, 5213, 5212], [5084, 5085, 5214], [5084, 5214, 5213], [5085, 5086, 5214], [5086, 5215, 5214], [5086, 5087, 5216], [5086, 5216, 5215], [5087, 5088, 5216], [5088, 5217, 5216], [5088, 5089, 5218], [5088, 5218, 5217], [5089, 5090, 5218], [5090, 5219, 5218], [5090, 5091, 5220], [5090, 5220, 5219], [5091, 5092, 5220], [5092, 5221, 5220], [5092, 5093, 5222], [5092, 5222, 5221], [5093, 5094, 5222], [5094, 5223, 5222], [5094, 5095, 5224], [5094, 5224, 5223], [5095, 5096, 5224], [5096, 5225, 5224], [5096, 5097, 5226], [5096, 5226, 5225], [5097, 5098, 5226], [5098, 5227, 5226], [5098, 5099, 5228], [5098, 5228, 5227], [5099, 5100, 5228], [5100, 5229, 5228], [5100, 5101, 5230], [5100, 5230, 5229], [5101, 5102, 5230], [5102, 5231, 5230], [5102, 5103, 5232], [5102, 5232, 5231], [5103, 5104, 5232], [5104, 5233, 5232], [5104, 5105, 5234], [5104, 5234, 5233], [5105, 5106, 5234], [5106, 5235, 5234], [5106, 5107, 5236], [5106, 5236, 5235], [5107, 5108, 5236], [5108, 5237, 5236], [5108, 5109, 5238], [5108, 5238, 5237], [5109, 5110, 5238], [5110, 5239, 5238], [5110, 5111, 5240], [5110, 5240, 5239], [5111, 5112, 5240], [5112, 5241, 5240], [5112, 5113, 5242], [5112, 5242, 5241], [5113, 5114, 5242], [5114, 5243, 5242], [5114, 5115, 5244], [5114, 5244, 5243], [5115, 5116, 5244], [5116, 5245, 5244], [5116, 5117, 5246], [5116, 5246, 5245], [5117, 5118, 5246], [5118, 5247, 5246], [5118, 5119, 5248], [5118, 5248, 5247], [5119, 5120, 5248], [5120, 5249, 5248], [5120, 5121, 5250], [5120, 5250, 5249], [5121, 5122, 5250], [5122, 5251, 5250], [5122, 5123, 5252], [5122, 5252, 5251], [5123, 5124, 5252], [5124, 5253, 5252], [5124, 5125, 5254], [5124, 5254, 5253], [5125, 5126, 5254], [5126, 5255, 5254], [5126, 5127, 5256], [5126, 5256, 5255], [5127, 5128, 5256], [5128, 5257, 5256], [5128, 5129, 5258], [5128, 5258, 5257], [5129, 5130, 5258], [5130, 5259, 5258], [5130, 5131, 5260], [5130, 5260, 5259], [5131, 5132, 5260], [5132, 5261, 5260], [5132, 5133, 5262], [5132, 5262, 5261], [5133, 5134, 5262], [5134, 5263, 5262], [5134, 5135, 5264], [5134, 5264, 5263], [5135, 5136, 5264], [5136, 5265, 5264], [5136, 5137, 5266], [5136, 5266, 5265], [5137, 5138, 5266], [5138, 5267, 5266], [5138, 5139, 5268], [5138, 5268, 5267], [5139, 5140, 5268], [5140, 5269, 5268], [5140, 5141, 5270], [5140, 5270, 5269], [5141, 5142, 5270], [5142, 5271, 5270], [5142, 5143, 5272], [5142, 5272, 5271], [5143, 5144, 5272], [5144, 5273, 5272], [5144, 5145, 5274], [5144, 5274, 5273], [5145, 5146, 5274], [5146, 5275, 5274], [5146, 5147, 5276], [5146, 5276, 5275], [5147, 5148, 5276], [5148, 5277, 5276], [5148, 5149, 5278], [5148, 5278, 5277], [5149, 5150, 5278], [5150, 5279, 5278], [5150, 5151, 5280], [5150, 5280, 5279], [5151, 5152, 5280], [5152, 5281, 5280], [5152, 5153, 5282], [5152, 5282, 5281], [5153, 5154, 5282], [5154, 5283, 5282], [5154, 5155, 5284], [5154, 5284, 5283], [5155, 5156, 5284], [5156, 5285, 5284], [5156, 5157, 5286], [5156, 5286, 5285], [5157, 5158, 5286], [5158, 5287, 5286], [5158, 5159, 5288], [5158, 5288, 5287], [5160, 5161, 5290], [5160, 5290, 5289], [5161, 5162, 5290], [5162, 5291, 5290], [5162, 5163, 5292], [5162, 5292, 5291], [5163, 5164, 5292], [5164, 5293, 5292], [5164, 5165, 5294], [5164, 5294, 5293], [5165, 5166, 5294], [5166, 5295, 5294], [5166, 5167, 5296], [5166, 5296, 5295], [5167, 5168, 5296], [5168, 5297, 5296], [5168, 5169, 5298], [5168, 5298, 5297], [5169, 5170, 5298], [5170, 5299, 5298], [5170, 5171, 5300], [5170, 5300, 5299], [5171, 5172, 5300], [5172, 5301, 5300], [5172, 5173, 5302], [5172, 5302, 5301], [5173, 5174, 5302], [5174, 5303, 5302], [5174, 5175, 5304], [5174, 5304, 5303], [5175, 5176, 5304], [5176, 5305, 5304], [5176, 5177, 5306], [5176, 5306, 5305], [5177, 5178, 5306], [5178, 5307, 5306], [5178, 5179, 5308], [5178, 5308, 5307], [5179, 5180, 5308], [5180, 5309, 5308], [5180, 5181, 5310], [5180, 5310, 5309], [5181, 5182, 5310], [5182, 5311, 5310], [5182, 5183, 5312], [5182, 5312, 5311], [5183, 5184, 5312], [5184, 5313, 5312], [5184, 5185, 5314], [5184, 5314, 5313], [5185, 5186, 5314], [5186, 5315, 5314], [5186, 5187, 5316], [5186, 5316, 5315], [5187, 5188, 5316], [5188, 5317, 5316], [5188, 5189, 5318], [5188, 5318, 5317], [5189, 5190, 5318], [5190, 5319, 5318], [5190, 5191, 5320], [5190, 5320, 5319], [5191, 5192, 5320], [5192, 5321, 5320], [5192, 5193, 5322], [5192, 5322, 5321], [5193, 5194, 5322], [5194, 5323, 5322], [5194, 5195, 5324], [5194, 5324, 5323], [5195, 5196, 5324], [5196, 5325, 5324], [5196, 5197, 5326], [5196, 5326, 5325], [5197, 5198, 5326], [5198, 5327, 5326], [5198, 5199, 5328], [5198, 5328, 5327], [5199, 5200, 5328], [5200, 5329, 5328], [5200, 5201, 5330], [5200, 5330, 5329], [5201, 5202, 5330], [5202, 5331, 5330], [5202, 5203, 5332], [5202, 5332, 5331], [5203, 5204, 5332], [5204, 5333, 5332], [5204, 5205, 5334], [5204, 5334, 5333], [5205, 5206, 5334], [5206, 5335, 5334], [5206, 5207, 5336], [5206, 5336, 5335], [5207, 5208, 5336], [5208, 5337, 5336], [5208, 5209, 5338], [5208, 5338, 5337], [5209, 5210, 5338], [5210, 5339, 5338], [5210, 5211, 5340], [5210, 5340, 5339], [5211, 5212, 5340], [5212, 5341, 5340], [5212, 5213, 5342], [5212, 5342, 5341], [5213, 5214, 5342], [5214, 5343, 5342], [5214, 5215, 5344], [5214, 5344, 5343], [5215, 5216, 5344], [5216, 5345, 5344], [5216, 5217, 5346], [5216, 5346, 5345], [5217, 5218, 5346], [5218, 5347, 5346], [5218, 5219, 5348], [5218, 5348, 5347], [5219, 5220, 5348], [5220, 5349, 5348], [5220, 5221, 5350], [5220, 5350, 5349], [5221, 5222, 5350], [5222, 5351, 5350], [5222, 5223, 5352], [5222, 5352, 5351], [5223, 5224, 5352], [5224, 5353, 5352], [5224, 5225, 5354], [5224, 5354, 5353], [5225, 5226, 5354], [5226, 5355, 5354], [5226, 5227, 5356], [5226, 5356, 5355], [5227, 5228, 5356], [5228, 5357, 5356], [5228, 5229, 5358], [5228, 5358, 5357], [5229, 5230, 5358], [5230, 5359, 5358], [5230, 5231, 5360], [5230, 5360, 5359], [5231, 5232, 5360], [5232, 5361, 5360], [5232, 5233, 5362], [5232, 5362, 5361], [5233, 5234, 5362], [5234, 5363, 5362], [5234, 5235, 5364], [5234, 5364, 5363], [5235, 5236, 5364], [5236, 5365, 5364], [5236, 5237, 5366], [5236, 5366, 5365], [5237, 5238, 5366], [5238, 5367, 5366], [5238, 5239, 5368], [5238, 5368, 5367], [5239, 5240, 5368], [5240, 5369, 5368], [5240, 5241, 5370], [5240, 5370, 5369], [5241, 5242, 5370], [5242, 5371, 5370], [5242, 5243, 5372], [5242, 5372, 5371], [5243, 5244, 5372], [5244, 5373, 5372], [5244, 5245, 5374], [5244, 5374, 5373], [5245, 5246, 5374], [5246, 5375, 5374], [5246, 5247, 5376], [5246, 5376, 5375], [5247, 5248, 5376], [5248, 5377, 5376], [5248, 5249, 5378], [5248, 5378, 5377], [5249, 5250, 5378], [5250, 5379, 5378], [5250, 5251, 5380], [5250, 5380, 5379], [5251, 5252, 5380], [5252, 5381, 5380], [5252, 5253, 5382], [5252, 5382, 5381], [5253, 5254, 5382], [5254, 5383, 5382], [5254, 5255, 5384], [5254, 5384, 5383], [5255, 5256, 5384], [5256, 5385, 5384], [5256, 5257, 5386], [5256, 5386, 5385], [5257, 5258, 5386], [5258, 5387, 5386], [5258, 5259, 5388], [5258, 5388, 5387], [5259, 5260, 5388], [5260, 5389, 5388], [5260, 5261, 5390], [5260, 5390, 5389], [5261, 5262, 5390], [5262, 5391, 5390], [5262, 5263, 5392], [5262, 5392, 5391], [5263, 5264, 5392], [5264, 5393, 5392], [5264, 5265, 5394], [5264, 5394, 5393], [5265, 5266, 5394], [5266, 5395, 5394], [5266, 5267, 5396], [5266, 5396, 5395], [5267, 5268, 5396], [5268, 5397, 5396], [5268, 5269, 5398], [5268, 5398, 5397], [5269, 5270, 5398], [5270, 5399, 5398], [5270, 5271, 5400], [5270, 5400, 5399], [5271, 5272, 5400], [5272, 5401, 5400], [5272, 5273, 5402], [5272, 5402, 5401], [5273, 5274, 5402], [5274, 5403, 5402], [5274, 5275, 5404], [5274, 5404, 5403], [5275, 5276, 5404], [5276, 5405, 5404], [5276, 5277, 5406], [5276, 5406, 5405], [5277, 5278, 5406], [5278, 5407, 5406], [5278, 5279, 5408], [5278, 5408, 5407], [5279, 5280, 5408], [5280, 5409, 5408], [5280, 5281, 5410], [5280, 5410, 5409], [5281, 5282, 5410], [5282, 5411, 5410], [5282, 5283, 5412], [5282, 5412, 5411], [5283, 5284, 5412], [5284, 5413, 5412], [5284, 5285, 5414], [5284, 5414, 5413], [5285, 5286, 5414], [5286, 5415, 5414], [5286, 5287, 5416], [5286, 5416, 5415], [5287, 5288, 5416], [5288, 5417, 5416], [5289, 5290, 5418], [5290, 5419, 5418], [5290, 5291, 5420], [5290, 5420, 5419], [5291, 5292, 5420], [5292, 5421, 5420], [5292, 5293, 5422], [5292, 5422, 5421], [5293, 5294, 5422], [5294, 5423, 5422], [5294, 5295, 5424], [5294, 5424, 5423], [5295, 5296, 5424], [5296, 5425, 5424], [5296, 5297, 5426], [5296, 5426, 5425], [5297, 5298, 5426], [5298, 5427, 5426], [5298, 5299, 5428], [5298, 5428, 5427], [5299, 5300, 5428], [5300, 5429, 5428], [5300, 5301, 5430], [5300, 5430, 5429], [5301, 5302, 5430], [5302, 5431, 5430], [5302, 5303, 5432], [5302, 5432, 5431], [5303, 5304, 5432], [5304, 5433, 5432], [5304, 5305, 5434], [5304, 5434, 5433], [5305, 5306, 5434], [5306, 5435, 5434], [5306, 5307, 5436], [5306, 5436, 5435], [5307, 5308, 5436], [5308, 5437, 5436], [5308, 5309, 5438], [5308, 5438, 5437], [5309, 5310, 5438], [5310, 5439, 5438], [5310, 5311, 5440], [5310, 5440, 5439], [5311, 5312, 5440], [5312, 5441, 5440], [5312, 5313, 5442], [5312, 5442, 5441], [5313, 5314, 5442], [5314, 5443, 5442], [5314, 5315, 5444], [5314, 5444, 5443], [5315, 5316, 5444], [5316, 5445, 5444], [5316, 5317, 5446], [5316, 5446, 5445], [5317, 5318, 5446], [5318, 5447, 5446], [5318, 5319, 5448], [5318, 5448, 5447], [5319, 5320, 5448], [5320, 5449, 5448], [5320, 5321, 5450], [5320, 5450, 5449], [5321, 5322, 5450], [5322, 5451, 5450], [5322, 5323, 5452], [5322, 5452, 5451], [5323, 5324, 5452], [5324, 5453, 5452], [5324, 5325, 5454], [5324, 5454, 5453], [5325, 5326, 5454], [5326, 5455, 5454], [5326, 5327, 5456], [5326, 5456, 5455], [5327, 5328, 5456], [5328, 5457, 5456], [5328, 5329, 5458], [5328, 5458, 5457], [5329, 5330, 5458], [5330, 5459, 5458], [5330, 5331, 5460], [5330, 5460, 5459], [5331, 5332, 5460], [5332, 5461, 5460], [5332, 5333, 5462], [5332, 5462, 5461], [5333, 5334, 5462], [5334, 5463, 5462], [5334, 5335, 5464], [5334, 5464, 5463], [5335, 5336, 5464], [5336, 5465, 5464], [5336, 5337, 5466], [5336, 5466, 5465], [5337, 5338, 5466], [5338, 5467, 5466], [5338, 5339, 5468], [5338, 5468, 5467], [5339, 5340, 5468], [5340, 5469, 5468], [5340, 5341, 5470], [5340, 5470, 5469], [5341, 5342, 5470], [5342, 5471, 5470], [5342, 5343, 5472], [5342, 5472, 5471], [5343, 5344, 5472], [5344, 5473, 5472], [5344, 5345, 5474], [5344, 5474, 5473], [5345, 5346, 5474], [5346, 5475, 5474], [5346, 5347, 5476], [5346, 5476, 5475], [5347, 5348, 5476], [5348, 5477, 5476], [5348, 5349, 5478], [5348, 5478, 5477], [5349, 5350, 5478], [5350, 5479, 5478], [5350, 5351, 5480], [5350, 5480, 5479], [5351, 5352, 5480], [5352, 5481, 5480], [5352, 5353, 5482], [5352, 5482, 5481], [5353, 5354, 5482], [5354, 5483, 5482], [5354, 5355, 5484], [5354, 5484, 5483], [5355, 5356, 5484], [5356, 5485, 5484], [5356, 5357, 5486], [5356, 5486, 5485], [5357, 5358, 5486], [5358, 5487, 5486], [5358, 5359, 5488], [5358, 5488, 5487], [5359, 5360, 5488], [5360, 5489, 5488], [5360, 5361, 5490], [5360, 5490, 5489], [5361, 5362, 5490], [5362, 5491, 5490], [5362, 5363, 5492], [5362, 5492, 5491], [5363, 5364, 5492], [5364, 5493, 5492], [5364, 5365, 5494], [5364, 5494, 5493], [5365, 5366, 5494], [5366, 5495, 5494], [5366, 5367, 5496], [5366, 5496, 5495], [5367, 5368, 5496], [5368, 5497, 5496], [5368, 5369, 5498], [5368, 5498, 5497], [5369, 5370, 5498], [5370, 5499, 5498], [5370, 5371, 5500], [5370, 5500, 5499], [5371, 5372, 5500], [5372, 5501, 5500], [5372, 5373, 5502], [5372, 5502, 5501], [5373, 5374, 5502], [5374, 5503, 5502], [5374, 5375, 5504], [5374, 5504, 5503], [5375, 5376, 5504], [5376, 5505, 5504], [5376, 5377, 5506], [5376, 5506, 5505], [5377, 5378, 5506], [5378, 5507, 5506], [5378, 5379, 5508], [5378, 5508, 5507], [5379, 5380, 5508], [5380, 5509, 5508], [5380, 5381, 5510], [5380, 5510, 5509], [5381, 5382, 5510], [5382, 5511, 5510], [5382, 5383, 5512], [5382, 5512, 5511], [5383, 5384, 5512], [5384, 5513, 5512], [5384, 5385, 5514], [5384, 5514, 5513], [5385, 5386, 5514], [5386, 5515, 5514], [5386, 5387, 5516], [5386, 5516, 5515], [5387, 5388, 5516], [5388, 5517, 5516], [5388, 5389, 5518], [5388, 5518, 5517], [5389, 5390, 5518], [5390, 5519, 5518], [5390, 5391, 5520], [5390, 5520, 5519], [5391, 5392, 5520], [5392, 5521, 5520], [5392, 5393, 5522], [5392, 5522, 5521], [5393, 5394, 5522], [5394, 5523, 5522], [5394, 5395, 5524], [5394, 5524, 5523], [5395, 5396, 5524], [5396, 5525, 5524], [5396, 5397, 5526], [5396, 5526, 5525], [5397, 5398, 5526], [5398, 5527, 5526], [5398, 5399, 5528], [5398, 5528, 5527], [5399, 5400, 5528], [5400, 5529, 5528], [5400, 5401, 5530], [5400, 5530, 5529], [5401, 5402, 5530], [5402, 5531, 5530], [5402, 5403, 5532], [5402, 5532, 5531], [5403, 5404, 5532], [5404, 5533, 5532], [5404, 5405, 5534], [5404, 5534, 5533], [5405, 5406, 5534], [5406, 5535, 5534], [5406, 5407, 5536], [5406, 5536, 5535], [5407, 5408, 5536], [5408, 5537, 5536], [5408, 5409, 5538], [5408, 5538, 5537], [5409, 5410, 5538], [5410, 5539, 5538], [5410, 5411, 5540], [5410, 5540, 5539], [5411, 5412, 5540], [5412, 5541, 5540], [5412, 5413, 5542], [5412, 5542, 5541], [5413, 5414, 5542], [5414, 5543, 5542], [5414, 5415, 5544], [5414, 5544, 5543], [5415, 5416, 5544], [5416, 5545, 5544], [5416, 5417, 5546], [5416, 5546, 5545], [5418, 5419, 5548], [5418, 5548, 5547], [5419, 5420, 5548], [5420, 5549, 5548], [5420, 5421, 5550], [5420, 5550, 5549], [5421, 5422, 5550], [5422, 5551, 5550], [5422, 5423, 5552], [5422, 5552, 5551], [5423, 5424, 5552], [5424, 5553, 5552], [5424, 5425, 5554], [5424, 5554, 5553], [5425, 5426, 5554], [5426, 5555, 5554], [5426, 5427, 5556], [5426, 5556, 5555], [5427, 5428, 5556], [5428, 5557, 5556], [5428, 5429, 5558], [5428, 5558, 5557], [5429, 5430, 5558], [5430, 5559, 5558], [5430, 5431, 5560], [5430, 5560, 5559], [5431, 5432, 5560], [5432, 5561, 5560], [5432, 5433, 5562], [5432, 5562, 5561], [5433, 5434, 5562], [5434, 5563, 5562], [5434, 5435, 5564], [5434, 5564, 5563], [5435, 5436, 5564], [5436, 5565, 5564], [5436, 5437, 5566], [5436, 5566, 5565], [5437, 5438, 5566], [5438, 5567, 5566], [5438, 5439, 5568], [5438, 5568, 5567], [5439, 5440, 5568], [5440, 5569, 5568], [5440, 5441, 5570], [5440, 5570, 5569], [5441, 5442, 5570], [5442, 5571, 5570], [5442, 5443, 5572], [5442, 5572, 5571], [5443, 5444, 5572], [5444, 5573, 5572], [5444, 5445, 5574], [5444, 5574, 5573], [5445, 5446, 5574], [5446, 5575, 5574], [5446, 5447, 5576], [5446, 5576, 5575], [5447, 5448, 5576], [5448, 5577, 5576], [5448, 5449, 5578], [5448, 5578, 5577], [5449, 5450, 5578], [5450, 5579, 5578], [5450, 5451, 5580], [5450, 5580, 5579], [5451, 5452, 5580], [5452, 5581, 5580], [5452, 5453, 5582], [5452, 5582, 5581], [5453, 5454, 5582], [5454, 5583, 5582], [5454, 5455, 5584], [5454, 5584, 5583], [5455, 5456, 5584], [5456, 5585, 5584], [5456, 5457, 5586], [5456, 5586, 5585], [5457, 5458, 5586], [5458, 5587, 5586], [5458, 5459, 5588], [5458, 5588, 5587], [5459, 5460, 5588], [5460, 5589, 5588], [5460, 5461, 5590], [5460, 5590, 5589], [5461, 5462, 5590], [5462, 5591, 5590], [5462, 5463, 5592], [5462, 5592, 5591], [5463, 5464, 5592], [5464, 5593, 5592], [5464, 5465, 5594], [5464, 5594, 5593], [5465, 5466, 5594], [5466, 5595, 5594], [5466, 5467, 5596], [5466, 5596, 5595], [5467, 5468, 5596], [5468, 5597, 5596], [5468, 5469, 5598], [5468, 5598, 5597], [5469, 5470, 5598], [5470, 5599, 5598], [5470, 5471, 5600], [5470, 5600, 5599], [5471, 5472, 5600], [5472, 5601, 5600], [5472, 5473, 5602], [5472, 5602, 5601], [5473, 5474, 5602], [5474, 5603, 5602], [5474, 5475, 5604], [5474, 5604, 5603], [5475, 5476, 5604], [5476, 5605, 5604], [5476, 5477, 5606], [5476, 5606, 5605], [5477, 5478, 5606], [5478, 5607, 5606], [5478, 5479, 5608], [5478, 5608, 5607], [5479, 5480, 5608], [5480, 5609, 5608], [5480, 5481, 5610], [5480, 5610, 5609], [5481, 5482, 5610], [5482, 5611, 5610], [5482, 5483, 5612], [5482, 5612, 5611], [5483, 5484, 5612], [5484, 5613, 5612], [5484, 5485, 5614], [5484, 5614, 5613], [5485, 5486, 5614], [5486, 5615, 5614], [5486, 5487, 5616], [5486, 5616, 5615], [5487, 5488, 5616], [5488, 5617, 5616], [5488, 5489, 5618], [5488, 5618, 5617], [5489, 5490, 5618], [5490, 5619, 5618], [5490, 5491, 5620], [5490, 5620, 5619], [5491, 5492, 5620], [5492, 5621, 5620], [5492, 5493, 5622], [5492, 5622, 5621], [5493, 5494, 5622], [5494, 5623, 5622], [5494, 5495, 5624], [5494, 5624, 5623], [5495, 5496, 5624], [5496, 5625, 5624], [5496, 5497, 5626], [5496, 5626, 5625], [5497, 5498, 5626], [5498, 5627, 5626], [5498, 5499, 5628], [5498, 5628, 5627], [5499, 5500, 5628], [5500, 5629, 5628], [5500, 5501, 5630], [5500, 5630, 5629], [5501, 5502, 5630], [5502, 5631, 5630], [5502, 5503, 5632], [5502, 5632, 5631], [5503, 5504, 5632], [5504, 5633, 5632], [5504, 5505, 5634], [5504, 5634, 5633], [5505, 5506, 5634], [5506, 5635, 5634], [5506, 5507, 5636], [5506, 5636, 5635], [5507, 5508, 5636], [5508, 5637, 5636], [5508, 5509, 5638], [5508, 5638, 5637], [5509, 5510, 5638], [5510, 5639, 5638], [5510, 5511, 5640], [5510, 5640, 5639], [5511, 5512, 5640], [5512, 5641, 5640], [5512, 5513, 5642], [5512, 5642, 5641], [5513, 5514, 5642], [5514, 5643, 5642], [5514, 5515, 5644], [5514, 5644, 5643], [5515, 5516, 5644], [5516, 5645, 5644], [5516, 5517, 5646], [5516, 5646, 5645], [5517, 5518, 5646], [5518, 5647, 5646], [5518, 5519, 5648], [5518, 5648, 5647], [5519, 5520, 5648], [5520, 5649, 5648], [5520, 5521, 5650], [5520, 5650, 5649], [5521, 5522, 5650], [5522, 5651, 5650], [5522, 5523, 5652], [5522, 5652, 5651], [5523, 5524, 5652], [5524, 5653, 5652], [5524, 5525, 5654], [5524, 5654, 5653], [5525, 5526, 5654], [5526, 5655, 5654], [5526, 5527, 5656], [5526, 5656, 5655], [5527, 5528, 5656], [5528, 5657, 5656], [5528, 5529, 5658], [5528, 5658, 5657], [5529, 5530, 5658], [5530, 5659, 5658], [5530, 5531, 5660], [5530, 5660, 5659], [5531, 5532, 5660], [5532, 5661, 5660], [5532, 5533, 5662], [5532, 5662, 5661], [5533, 5534, 5662], [5534, 5663, 5662], [5534, 5535, 5664], [5534, 5664, 5663], [5535, 5536, 5664], [5536, 5665, 5664], [5536, 5537, 5666], [5536, 5666, 5665], [5537, 5538, 5666], [5538, 5667, 5666], [5538, 5539, 5668], [5538, 5668, 5667], [5539, 5540, 5668], [5540, 5669, 5668], [5540, 5541, 5670], [5540, 5670, 5669], [5541, 5542, 5670], [5542, 5671, 5670], [5542, 5543, 5672], [5542, 5672, 5671], [5543, 5544, 5672], [5544, 5673, 5672], [5544, 5545, 5674], [5544, 5674, 5673], [5545, 5546, 5674], [5546, 5675, 5674], [5547, 5548, 5676], [5548, 5677, 5676], [5548, 5549, 5678], [5548, 5678, 5677], [5549, 5550, 5678], [5550, 5679, 5678], [5550, 5551, 5680], [5550, 5680, 5679], [5551, 5552, 5680], [5552, 5681, 5680], [5552, 5553, 5682], [5552, 5682, 5681], [5553, 5554, 5682], [5554, 5683, 5682], [5554, 5555, 5684], [5554, 5684, 5683], [5555, 5556, 5684], [5556, 5685, 5684], [5556, 5557, 5686], [5556, 5686, 5685], [5557, 5558, 5686], [5558, 5687, 5686], [5558, 5559, 5688], [5558, 5688, 5687], [5559, 5560, 5688], [5560, 5689, 5688], [5560, 5561, 5690], [5560, 5690, 5689], [5561, 5562, 5690], [5562, 5691, 5690], [5562, 5563, 5692], [5562, 5692, 5691], [5563, 5564, 5692], [5564, 5693, 5692], [5564, 5565, 5694], [5564, 5694, 5693], [5565, 5566, 5694], [5566, 5695, 5694], [5566, 5567, 5696], [5566, 5696, 5695], [5567, 5568, 5696], [5568, 5697, 5696], [5568, 5569, 5698], [5568, 5698, 5697], [5569, 5570, 5698], [5570, 5699, 5698], [5570, 5571, 5700], [5570, 5700, 5699], [5571, 5572, 5700], [5572, 5701, 5700], [5572, 5573, 5702], [5572, 5702, 5701], [5573, 5574, 5702], [5574, 5703, 5702], [5574, 5575, 5704], [5574, 5704, 5703], [5575, 5576, 5704], [5576, 5705, 5704], [5576, 5577, 5706], [5576, 5706, 5705], [5577, 5578, 5706], [5578, 5707, 5706], [5578, 5579, 5708], [5578, 5708, 5707], [5579, 5580, 5708], [5580, 5709, 5708], [5580, 5581, 5710], [5580, 5710, 5709], [5581, 5582, 5710], [5582, 5711, 5710], [5582, 5583, 5712], [5582, 5712, 5711], [5583, 5584, 5712], [5584, 5713, 5712], [5584, 5585, 5714], [5584, 5714, 5713], [5585, 5586, 5714], [5586, 5715, 5714], [5586, 5587, 5716], [5586, 5716, 5715], [5587, 5588, 5716], [5588, 5717, 5716], [5588, 5589, 5718], [5588, 5718, 5717], [5589, 5590, 5718], [5590, 5719, 5718], [5590, 5591, 5720], [5590, 5720, 5719], [5591, 5592, 5720], [5592, 5721, 5720], [5592, 5593, 5722], [5592, 5722, 5721], [5593, 5594, 5722], [5594, 5723, 5722], [5594, 5595, 5724], [5594, 5724, 5723], [5595, 5596, 5724], [5596, 5725, 5724], [5596, 5597, 5726], [5596, 5726, 5725], [5597, 5598, 5726], [5598, 5727, 5726], [5598, 5599, 5728], [5598, 5728, 5727], [5599, 5600, 5728], [5600, 5729, 5728], [5600, 5601, 5730], [5600, 5730, 5729], [5601, 5602, 5730], [5602, 5731, 5730], [5602, 5603, 5732], [5602, 5732, 5731], [5603, 5604, 5732], [5604, 5733, 5732], [5604, 5605, 5734], [5604, 5734, 5733], [5605, 5606, 5734], [5606, 5735, 5734], [5606, 5607, 5736], [5606, 5736, 5735], [5607, 5608, 5736], [5608, 5737, 5736], [5608, 5609, 5738], [5608, 5738, 5737], [5609, 5610, 5738], [5610, 5739, 5738], [5610, 5611, 5740], [5610, 5740, 5739], [5611, 5612, 5740], [5612, 5741, 5740], [5612, 5613, 5742], [5612, 5742, 5741], [5613, 5614, 5742], [5614, 5743, 5742], [5614, 5615, 5744], [5614, 5744, 5743], [5615, 5616, 5744], [5616, 5745, 5744], [5616, 5617, 5746], [5616, 5746, 5745], [5617, 5618, 5746], [5618, 5747, 5746], [5618, 5619, 5748], [5618, 5748, 5747], [5619, 5620, 5748], [5620, 5749, 5748], [5620, 5621, 5750], [5620, 5750, 5749], [5621, 5622, 5750], [5622, 5751, 5750], [5622, 5623, 5752], [5622, 5752, 5751], [5623, 5624, 5752], [5624, 5753, 5752], [5624, 5625, 5754], [5624, 5754, 5753], [5625, 5626, 5754], [5626, 5755, 5754], [5626, 5627, 5756], [5626, 5756, 5755], [5627, 5628, 5756], [5628, 5757, 5756], [5628, 5629, 5758], [5628, 5758, 5757], [5629, 5630, 5758], [5630, 5759, 5758], [5630, 5631, 5760], [5630, 5760, 5759], [5631, 5632, 5760], [5632, 5761, 5760], [5632, 5633, 5762], [5632, 5762, 5761], [5633, 5634, 5762], [5634, 5763, 5762], [5634, 5635, 5764], [5634, 5764, 5763], [5635, 5636, 5764], [5636, 5765, 5764], [5636, 5637, 5766], [5636, 5766, 5765], [5637, 5638, 5766], [5638, 5767, 5766], [5638, 5639, 5768], [5638, 5768, 5767], [5639, 5640, 5768], [5640, 5769, 5768], [5640, 5641, 5770], [5640, 5770, 5769], [5641, 5642, 5770], [5642, 5771, 5770], [5642, 5643, 5772], [5642, 5772, 5771], [5643, 5644, 5772], [5644, 5773, 5772], [5644, 5645, 5774], [5644, 5774, 5773], [5645, 5646, 5774], [5646, 5775, 5774], [5646, 5647, 5776], [5646, 5776, 5775], [5647, 5648, 5776], [5648, 5777, 5776], [5648, 5649, 5778], [5648, 5778, 5777], [5649, 5650, 5778], [5650, 5779, 5778], [5650, 5651, 5780], [5650, 5780, 5779], [5651, 5652, 5780], [5652, 5781, 5780], [5652, 5653, 5782], [5652, 5782, 5781], [5653, 5654, 5782], [5654, 5783, 5782], [5654, 5655, 5784], [5654, 5784, 5783], [5655, 5656, 5784], [5656, 5785, 5784], [5656, 5657, 5786], [5656, 5786, 5785], [5657, 5658, 5786], [5658, 5787, 5786], [5658, 5659, 5788], [5658, 5788, 5787], [5659, 5660, 5788], [5660, 5789, 5788], [5660, 5661, 5790], [5660, 5790, 5789], [5661, 5662, 5790], [5662, 5791, 5790], [5662, 5663, 5792], [5662, 5792, 5791], [5663, 5664, 5792], [5664, 5793, 5792], [5664, 5665, 5794], [5664, 5794, 5793], [5665, 5666, 5794], [5666, 5795, 5794], [5666, 5667, 5796], [5666, 5796, 5795], [5667, 5668, 5796], [5668, 5797, 5796], [5668, 5669, 5798], [5668, 5798, 5797], [5669, 5670, 5798], [5670, 5799, 5798], [5670, 5671, 5800], [5670, 5800, 5799], [5671, 5672, 5800], [5672, 5801, 5800], [5672, 5673, 5802], [5672, 5802, 5801], [5673, 5674, 5802], [5674, 5803, 5802], [5674, 5675, 5804], [5674, 5804, 5803], [5676, 5677, 5806], [5676, 5806, 5805], [5677, 5678, 5806], [5678, 5807, 5806], [5678, 5679, 5808], [5678, 5808, 5807], [5679, 5680, 5808], [5680, 5809, 5808], [5680, 5681, 5810], [5680, 5810, 5809], [5681, 5682, 5810], [5682, 5811, 5810], [5682, 5683, 5812], [5682, 5812, 5811], [5683, 5684, 5812], [5684, 5813, 5812], [5684, 5685, 5814], [5684, 5814, 5813], [5685, 5686, 5814], [5686, 5815, 5814], [5686, 5687, 5816], [5686, 5816, 5815], [5687, 5688, 5816], [5688, 5817, 5816], [5688, 5689, 5818], [5688, 5818, 5817], [5689, 5690, 5818], [5690, 5819, 5818], [5690, 5691, 5820], [5690, 5820, 5819], [5691, 5692, 5820], [5692, 5821, 5820], [5692, 5693, 5822], [5692, 5822, 5821], [5693, 5694, 5822], [5694, 5823, 5822], [5694, 5695, 5824], [5694, 5824, 5823], [5695, 5696, 5824], [5696, 5825, 5824], [5696, 5697, 5826], [5696, 5826, 5825], [5697, 5698, 5826], [5698, 5827, 5826], [5698, 5699, 5828], [5698, 5828, 5827], [5699, 5700, 5828], [5700, 5829, 5828], [5700, 5701, 5830], [5700, 5830, 5829], [5701, 5702, 5830], [5702, 5831, 5830], [5702, 5703, 5832], [5702, 5832, 5831], [5703, 5704, 5832], [5704, 5833, 5832], [5704, 5705, 5834], [5704, 5834, 5833], [5705, 5706, 5834], [5706, 5835, 5834], [5706, 5707, 5836], [5706, 5836, 5835], [5707, 5708, 5836], [5708, 5837, 5836], [5708, 5709, 5838], [5708, 5838, 5837], [5709, 5710, 5838], [5710, 5839, 5838], [5710, 5711, 5840], [5710, 5840, 5839], [5711, 5712, 5840], [5712, 5841, 5840], [5712, 5713, 5842], [5712, 5842, 5841], [5713, 5714, 5842], [5714, 5843, 5842], [5714, 5715, 5844], [5714, 5844, 5843], [5715, 5716, 5844], [5716, 5845, 5844], [5716, 5717, 5846], [5716, 5846, 5845], [5717, 5718, 5846], [5718, 5847, 5846], [5718, 5719, 5848], [5718, 5848, 5847], [5719, 5720, 5848], [5720, 5849, 5848], [5720, 5721, 5850], [5720, 5850, 5849], [5721, 5722, 5850], [5722, 5851, 5850], [5722, 5723, 5852], [5722, 5852, 5851], [5723, 5724, 5852], [5724, 5853, 5852], [5724, 5725, 5854], [5724, 5854, 5853], [5725, 5726, 5854], [5726, 5855, 5854], [5726, 5727, 5856], [5726, 5856, 5855], [5727, 5728, 5856], [5728, 5857, 5856], [5728, 5729, 5858], [5728, 5858, 5857], [5729, 5730, 5858], [5730, 5859, 5858], [5730, 5731, 5860], [5730, 5860, 5859], [5731, 5732, 5860], [5732, 5861, 5860], [5732, 5733, 5862], [5732, 5862, 5861], [5733, 5734, 5862], [5734, 5863, 5862], [5734, 5735, 5864], [5734, 5864, 5863], [5735, 5736, 5864], [5736, 5865, 5864], [5736, 5737, 5866], [5736, 5866, 5865], [5737, 5738, 5866], [5738, 5867, 5866], [5738, 5739, 5868], [5738, 5868, 5867], [5739, 5740, 5868], [5740, 5869, 5868], [5740, 5741, 5870], [5740, 5870, 5869], [5741, 5742, 5870], [5742, 5871, 5870], [5742, 5743, 5872], [5742, 5872, 5871], [5743, 5744, 5872], [5744, 5873, 5872], [5744, 5745, 5874], [5744, 5874, 5873], [5745, 5746, 5874], [5746, 5875, 5874], [5746, 5747, 5876], [5746, 5876, 5875], [5747, 5748, 5876], [5748, 5877, 5876], [5748, 5749, 5878], [5748, 5878, 5877], [5749, 5750, 5878], [5750, 5879, 5878], [5750, 5751, 5880], [5750, 5880, 5879], [5751, 5752, 5880], [5752, 5881, 5880], [5752, 5753, 5882], [5752, 5882, 5881], [5753, 5754, 5882], [5754, 5883, 5882], [5754, 5755, 5884], [5754, 5884, 5883], [5755, 5756, 5884], [5756, 5885, 5884], [5756, 5757, 5886], [5756, 5886, 5885], [5757, 5758, 5886], [5758, 5887, 5886], [5758, 5759, 5888], [5758, 5888, 5887], [5759, 5760, 5888], [5760, 5889, 5888], [5760, 5761, 5890], [5760, 5890, 5889], [5761, 5762, 5890], [5762, 5891, 5890], [5762, 5763, 5892], [5762, 5892, 5891], [5763, 5764, 5892], [5764, 5893, 5892], [5764, 5765, 5894], [5764, 5894, 5893], [5765, 5766, 5894], [5766, 5895, 5894], [5766, 5767, 5896], [5766, 5896, 5895], [5767, 5768, 5896], [5768, 5897, 5896], [5768, 5769, 5898], [5768, 5898, 5897], [5769, 5770, 5898], [5770, 5899, 5898], [5770, 5771, 5900], [5770, 5900, 5899], [5771, 5772, 5900], [5772, 5901, 5900], [5772, 5773, 5902], [5772, 5902, 5901], [5773, 5774, 5902], [5774, 5903, 5902], [5774, 5775, 5904], [5774, 5904, 5903], [5775, 5776, 5904], [5776, 5905, 5904], [5776, 5777, 5906], [5776, 5906, 5905], [5777, 5778, 5906], [5778, 5907, 5906], [5778, 5779, 5908], [5778, 5908, 5907], [5779, 5780, 5908], [5780, 5909, 5908], [5780, 5781, 5910], [5780, 5910, 5909], [5781, 5782, 5910], [5782, 5911, 5910], [5782, 5783, 5912], [5782, 5912, 5911], [5783, 5784, 5912], [5784, 5913, 5912], [5784, 5785, 5914], [5784, 5914, 5913], [5785, 5786, 5914], [5786, 5915, 5914], [5786, 5787, 5916], [5786, 5916, 5915], [5787, 5788, 5916], [5788, 5917, 5916], [5788, 5789, 5918], [5788, 5918, 5917], [5789, 5790, 5918], [5790, 5919, 5918], [5790, 5791, 5920], [5790, 5920, 5919], [5791, 5792, 5920], [5792, 5921, 5920], [5792, 5793, 5922], [5792, 5922, 5921], [5793, 5794, 5922], [5794, 5923, 5922], [5794, 5795, 5924], [5794, 5924, 5923], [5795, 5796, 5924], [5796, 5925, 5924], [5796, 5797, 5926], [5796, 5926, 5925], [5797, 5798, 5926], [5798, 5927, 5926], [5798, 5799, 5928], [5798, 5928, 5927], [5799, 5800, 5928], [5800, 5929, 5928], [5800, 5801, 5930], [5800, 5930, 5929], [5801, 5802, 5930], [5802, 5931, 5930], [5802, 5803, 5932], [5802, 5932, 5931], [5803, 5804, 5932], [5804, 5933, 5932], [5805, 5806, 5934], [5806, 5935, 5934], [5806, 5807, 5936], [5806, 5936, 5935], [5807, 5808, 5936], [5808, 5937, 5936], [5808, 5809, 5938], [5808, 5938, 5937], [5809, 5810, 5938], [5810, 5939, 5938], [5810, 5811, 5940], [5810, 5940, 5939], [5811, 5812, 5940], [5812, 5941, 5940], [5812, 5813, 5942], [5812, 5942, 5941], [5813, 5814, 5942], [5814, 5943, 5942], [5814, 5815, 5944], [5814, 5944, 5943], [5815, 5816, 5944], [5816, 5945, 5944], [5816, 5817, 5946], [5816, 5946, 5945], [5817, 5818, 5946], [5818, 5947, 5946], [5818, 5819, 5948], [5818, 5948, 5947], [5819, 5820, 5948], [5820, 5949, 5948], [5820, 5821, 5950], [5820, 5950, 5949], [5821, 5822, 5950], [5822, 5951, 5950], [5822, 5823, 5952], [5822, 5952, 5951], [5823, 5824, 5952], [5824, 5953, 5952], [5824, 5825, 5954], [5824, 5954, 5953], [5825, 5826, 5954], [5826, 5955, 5954], [5826, 5827, 5956], [5826, 5956, 5955], [5827, 5828, 5956], [5828, 5957, 5956], [5828, 5829, 5958], [5828, 5958, 5957], [5829, 5830, 5958], [5830, 5959, 5958], [5830, 5831, 5960], [5830, 5960, 5959], [5831, 5832, 5960], [5832, 5961, 5960], [5832, 5833, 5962], [5832, 5962, 5961], [5833, 5834, 5962], [5834, 5963, 5962], [5834, 5835, 5964], [5834, 5964, 5963], [5835, 5836, 5964], [5836, 5965, 5964], [5836, 5837, 5966], [5836, 5966, 5965], [5837, 5838, 5966], [5838, 5967, 5966], [5838, 5839, 5968], [5838, 5968, 5967], [5839, 5840, 5968], [5840, 5969, 5968], [5840, 5841, 5970], [5840, 5970, 5969], [5841, 5842, 5970], [5842, 5971, 5970], [5842, 5843, 5972], [5842, 5972, 5971], [5843, 5844, 5972], [5844, 5973, 5972], [5844, 5845, 5974], [5844, 5974, 5973], [5845, 5846, 5974], [5846, 5975, 5974], [5846, 5847, 5976], [5846, 5976, 5975], [5847, 5848, 5976], [5848, 5977, 5976], [5848, 5849, 5978], [5848, 5978, 5977], [5849, 5850, 5978], [5850, 5979, 5978], [5850, 5851, 5980], [5850, 5980, 5979], [5851, 5852, 5980], [5852, 5981, 5980], [5852, 5853, 5982], [5852, 5982, 5981], [5853, 5854, 5982], [5854, 5983, 5982], [5854, 5855, 5984], [5854, 5984, 5983], [5855, 5856, 5984], [5856, 5985, 5984], [5856, 5857, 5986], [5856, 5986, 5985], [5857, 5858, 5986], [5858, 5987, 5986], [5858, 5859, 5988], [5858, 5988, 5987], [5859, 5860, 5988], [5860, 5989, 5988], [5860, 5861, 5990], [5860, 5990, 5989], [5861, 5862, 5990], [5862, 5991, 5990], [5862, 5863, 5992], [5862, 5992, 5991], [5863, 5864, 5992], [5864, 5993, 5992], [5864, 5865, 5994], [5864, 5994, 5993], [5865, 5866, 5994], [5866, 5995, 5994], [5866, 5867, 5996], [5866, 5996, 5995], [5867, 5868, 5996], [5868, 5997, 5996], [5868, 5869, 5998], [5868, 5998, 5997], [5869, 5870, 5998], [5870, 5999, 5998], [5870, 5871, 6000], [5870, 6000, 5999], [5871, 5872, 6000], [5872, 6001, 6000], [5872, 5873, 6002], [5872, 6002, 6001], [5873, 5874, 6002], [5874, 6003, 6002], [5874, 5875, 6004], [5874, 6004, 6003], [5875, 5876, 6004], [5876, 6005, 6004], [5876, 5877, 6006], [5876, 6006, 6005], [5877, 5878, 6006], [5878, 6007, 6006], [5878, 5879, 6008], [5878, 6008, 6007], [5879, 5880, 6008], [5880, 6009, 6008], [5880, 5881, 6010], [5880, 6010, 6009], [5881, 5882, 6010], [5882, 6011, 6010], [5882, 5883, 6012], [5882, 6012, 6011], [5883, 5884, 6012], [5884, 6013, 6012], [5884, 5885, 6014], [5884, 6014, 6013], [5885, 5886, 6014], [5886, 6015, 6014], [5886, 5887, 6016], [5886, 6016, 6015], [5887, 5888, 6016], [5888, 6017, 6016], [5888, 5889, 6018], [5888, 6018, 6017], [5889, 5890, 6018], [5890, 6019, 6018], [5890, 5891, 6020], [5890, 6020, 6019], [5891, 5892, 6020], [5892, 6021, 6020], [5892, 5893, 6022], [5892, 6022, 6021], [5893, 5894, 6022], [5894, 6023, 6022], [5894, 5895, 6024], [5894, 6024, 6023], [5895, 5896, 6024], [5896, 6025, 6024], [5896, 5897, 6026], [5896, 6026, 6025], [5897, 5898, 6026], [5898, 6027, 6026], [5898, 5899, 6028], [5898, 6028, 6027], [5899, 5900, 6028], [5900, 6029, 6028], [5900, 5901, 6030], [5900, 6030, 6029], [5901, 5902, 6030], [5902, 6031, 6030], [5902, 5903, 6032], [5902, 6032, 6031], [5903, 5904, 6032], [5904, 6033, 6032], [5904, 5905, 6034], [5904, 6034, 6033], [5905, 5906, 6034], [5906, 6035, 6034], [5906, 5907, 6036], [5906, 6036, 6035], [5907, 5908, 6036], [5908, 6037, 6036], [5908, 5909, 6038], [5908, 6038, 6037], [5909, 5910, 6038], [5910, 6039, 6038], [5910, 5911, 6040], [5910, 6040, 6039], [5911, 5912, 6040], [5912, 6041, 6040], [5912, 5913, 6042], [5912, 6042, 6041], [5913, 5914, 6042], [5914, 6043, 6042], [5914, 5915, 6044], [5914, 6044, 6043], [5915, 5916, 6044], [5916, 6045, 6044], [5916, 5917, 6046], [5916, 6046, 6045], [5917, 5918, 6046], [5918, 6047, 6046], [5918, 5919, 6048], [5918, 6048, 6047], [5919, 5920, 6048], [5920, 6049, 6048], [5920, 5921, 6050], [5920, 6050, 6049], [5921, 5922, 6050], [5922, 6051, 6050], [5922, 5923, 6052], [5922, 6052, 6051], [5923, 5924, 6052], [5924, 6053, 6052], [5924, 5925, 6054], [5924, 6054, 6053], [5925, 5926, 6054], [5926, 6055, 6054], [5926, 5927, 6056], [5926, 6056, 6055], [5927, 5928, 6056], [5928, 6057, 6056], [5928, 5929, 6058], [5928, 6058, 6057], [5929, 5930, 6058], [5930, 6059, 6058], [5930, 5931, 6060], [5930, 6060, 6059], [5931, 5932, 6060], [5932, 6061, 6060], [5932, 5933, 6062], [5932, 6062, 6061], [5934, 5935, 6064], [5934, 6064, 6063], [5935, 5936, 6064], [5936, 6065, 6064], [5936, 5937, 6066], [5936, 6066, 6065], [5937, 5938, 6066], [5938, 6067, 6066], [5938, 5939, 6068], [5938, 6068, 6067], [5939, 5940, 6068], [5940, 6069, 6068], [5940, 5941, 6070], [5940, 6070, 6069], [5941, 5942, 6070], [5942, 6071, 6070], [5942, 5943, 6072], [5942, 6072, 6071], [5943, 5944, 6072], [5944, 6073, 6072], [5944, 5945, 6074], [5944, 6074, 6073], [5945, 5946, 6074], [5946, 6075, 6074], [5946, 5947, 6076], [5946, 6076, 6075], [5947, 5948, 6076], [5948, 6077, 6076], [5948, 5949, 6078], [5948, 6078, 6077], [5949, 5950, 6078], [5950, 6079, 6078], [5950, 5951, 6080], [5950, 6080, 6079], [5951, 5952, 6080], [5952, 6081, 6080], [5952, 5953, 6082], [5952, 6082, 6081], [5953, 5954, 6082], [5954, 6083, 6082], [5954, 5955, 6084], [5954, 6084, 6083], [5955, 5956, 6084], [5956, 6085, 6084], [5956, 5957, 6086], [5956, 6086, 6085], [5957, 5958, 6086], [5958, 6087, 6086], [5958, 5959, 6088], [5958, 6088, 6087], [5959, 5960, 6088], [5960, 6089, 6088], [5960, 5961, 6090], [5960, 6090, 6089], [5961, 5962, 6090], [5962, 6091, 6090], [5962, 5963, 6092], [5962, 6092, 6091], [5963, 5964, 6092], [5964, 6093, 6092], [5964, 5965, 6094], [5964, 6094, 6093], [5965, 5966, 6094], [5966, 6095, 6094], [5966, 5967, 6096], [5966, 6096, 6095], [5967, 5968, 6096], [5968, 6097, 6096], [5968, 5969, 6098], [5968, 6098, 6097], [5969, 5970, 6098], [5970, 6099, 6098], [5970, 5971, 6100], [5970, 6100, 6099], [5971, 5972, 6100], [5972, 6101, 6100], [5972, 5973, 6102], [5972, 6102, 6101], [5973, 5974, 6102], [5974, 6103, 6102], [5974, 5975, 6104], [5974, 6104, 6103], [5975, 5976, 6104], [5976, 6105, 6104], [5976, 5977, 6106], [5976, 6106, 6105], [5977, 5978, 6106], [5978, 6107, 6106], [5978, 5979, 6108], [5978, 6108, 6107], [5979, 5980, 6108], [5980, 6109, 6108], [5980, 5981, 6110], [5980, 6110, 6109], [5981, 5982, 6110], [5982, 6111, 6110], [5982, 5983, 6112], [5982, 6112, 6111], [5983, 5984, 6112], [5984, 6113, 6112], [5984, 5985, 6114], [5984, 6114, 6113], [5985, 5986, 6114], [5986, 6115, 6114], [5986, 5987, 6116], [5986, 6116, 6115], [5987, 5988, 6116], [5988, 6117, 6116], [5988, 5989, 6118], [5988, 6118, 6117], [5989, 5990, 6118], [5990, 6119, 6118], [5990, 5991, 6120], [5990, 6120, 6119], [5991, 5992, 6120], [5992, 6121, 6120], [5992, 5993, 6122], [5992, 6122, 6121], [5993, 5994, 6122], [5994, 6123, 6122], [5994, 5995, 6124], [5994, 6124, 6123], [5995, 5996, 6124], [5996, 6125, 6124], [5996, 5997, 6126], [5996, 6126, 6125], [5997, 5998, 6126], [5998, 6127, 6126], [5998, 5999, 6128], [5998, 6128, 6127], [5999, 6000, 6128], [6000, 6129, 6128], [6000, 6001, 6130], [6000, 6130, 6129], [6001, 6002, 6130], [6002, 6131, 6130], [6002, 6003, 6132], [6002, 6132, 6131], [6003, 6004, 6132], [6004, 6133, 6132], [6004, 6005, 6134], [6004, 6134, 6133], [6005, 6006, 6134], [6006, 6135, 6134], [6006, 6007, 6136], [6006, 6136, 6135], [6007, 6008, 6136], [6008, 6137, 6136], [6008, 6009, 6138], [6008, 6138, 6137], [6009, 6010, 6138], [6010, 6139, 6138], [6010, 6011, 6140], [6010, 6140, 6139], [6011, 6012, 6140], [6012, 6141, 6140], [6012, 6013, 6142], [6012, 6142, 6141], [6013, 6014, 6142], [6014, 6143, 6142], [6014, 6015, 6144], [6014, 6144, 6143], [6015, 6016, 6144], [6016, 6145, 6144], [6016, 6017, 6146], [6016, 6146, 6145], [6017, 6018, 6146], [6018, 6147, 6146], [6018, 6019, 6148], [6018, 6148, 6147], [6019, 6020, 6148], [6020, 6149, 6148], [6020, 6021, 6150], [6020, 6150, 6149], [6021, 6022, 6150], [6022, 6151, 6150], [6022, 6023, 6152], [6022, 6152, 6151], [6023, 6024, 6152], [6024, 6153, 6152], [6024, 6025, 6154], [6024, 6154, 6153], [6025, 6026, 6154], [6026, 6155, 6154], [6026, 6027, 6156], [6026, 6156, 6155], [6027, 6028, 6156], [6028, 6157, 6156], [6028, 6029, 6158], [6028, 6158, 6157], [6029, 6030, 6158], [6030, 6159, 6158], [6030, 6031, 6160], [6030, 6160, 6159], [6031, 6032, 6160], [6032, 6161, 6160], [6032, 6033, 6162], [6032, 6162, 6161], [6033, 6034, 6162], [6034, 6163, 6162], [6036, 6037, 6164], [6037, 6038, 6164], [6038, 6165, 6164], [6038, 6039, 6166], [6038, 6166, 6165], [6039, 6040, 6166], [6040, 6167, 6166], [6040, 6041, 6168], [6040, 6168, 6167], [6041, 6042, 6168], [6042, 6169, 6168], [6042, 6043, 6170], [6042, 6170, 6169], [6043, 6044, 6170], [6044, 6171, 6170], [6044, 6045, 6172], [6044, 6172, 6171], [6045, 6046, 6172], [6046, 6173, 6172], [6046, 6047, 6174], [6046, 6174, 6173], [6047, 6048, 6174], [6048, 6175, 6174], [6048, 6049, 6176], [6048, 6176, 6175], [6049, 6050, 6176], [6050, 6177, 6176], [6050, 6051, 6178], [6050, 6178, 6177], [6051, 6052, 6178], [6052, 6179, 6178], [6052, 6053, 6180], [6052, 6180, 6179], [6053, 6054, 6180], [6054, 6181, 6180], [6054, 6055, 6182], [6054, 6182, 6181], [6055, 6056, 6182], [6056, 6183, 6182], [6056, 6057, 6184], [6056, 6184, 6183], [6057, 6058, 6184], [6058, 6185, 6184], [6058, 6059, 6186], [6058, 6186, 6185], [6059, 6060, 6186], [6060, 6187, 6186], [6060, 6061, 6188], [6060, 6188, 6187], [6061, 6062, 6188], [6062, 6189, 6188], [6063, 6064, 6190], [6064, 6191, 6190], [6064, 6065, 6192], [6064, 6192, 6191], [6065, 6066, 6192], [6066, 6193, 6192], [6066, 6067, 6194], [6066, 6194, 6193], [6067, 6068, 6194], [6068, 6195, 6194], [6068, 6069, 6196], [6068, 6196, 6195], [6069, 6070, 6196], [6070, 6197, 6196], [6070, 6071, 6198], [6070, 6198, 6197], [6071, 6072, 6198], [6072, 6199, 6198], [6072, 6073, 6200], [6072, 6200, 6199], [6073, 6074, 6200], [6074, 6201, 6200], [6074, 6075, 6202], [6074, 6202, 6201], [6075, 6076, 6202], [6076, 6203, 6202], [6076, 6077, 6204], [6076, 6204, 6203], [6077, 6078, 6204], [6078, 6205, 6204], [6078, 6079, 6206], [6078, 6206, 6205], [6079, 6080, 6206], [6080, 6207, 6206], [6080, 6081, 6208], [6080, 6208, 6207], [6081, 6082, 6208], [6082, 6209, 6208], [6082, 6083, 6210], [6082, 6210, 6209], [6083, 6084, 6210], [6084, 6211, 6210], [6084, 6085, 6212], [6084, 6212, 6211], [6085, 6086, 6212], [6086, 6213, 6212], [6086, 6087, 6214], [6086, 6214, 6213], [6087, 6088, 6214], [6088, 6215, 6214], [6088, 6089, 6216], [6088, 6216, 6215], [6089, 6090, 6216], [6090, 6217, 6216], [6090, 6091, 6218], [6090, 6218, 6217], [6091, 6092, 6218], [6092, 6219, 6218], [6092, 6093, 6220], [6092, 6220, 6219], [6093, 6094, 6220], [6094, 6221, 6220], [6094, 6095, 6222], [6094, 6222, 6221], [6095, 6096, 6222], [6096, 6223, 6222], [6096, 6097, 6224], [6096, 6224, 6223], [6097, 6098, 6224], [6098, 6225, 6224], [6098, 6099, 6226], [6098, 6226, 6225], [6099, 6100, 6226], [6100, 6227, 6226], [6100, 6101, 6228], [6100, 6228, 6227], [6101, 6102, 6228], [6102, 6229, 6228], [6102, 6103, 6230], [6102, 6230, 6229], [6103, 6104, 6230], [6104, 6231, 6230], [6104, 6105, 6232], [6104, 6232, 6231], [6105, 6106, 6232], [6106, 6233, 6232], [6106, 6107, 6234], [6106, 6234, 6233], [6107, 6108, 6234], [6108, 6235, 6234], [6108, 6109, 6236], [6108, 6236, 6235], [6109, 6110, 6236], [6110, 6237, 6236], [6110, 6111, 6238], [6110, 6238, 6237], [6111, 6112, 6238], [6112, 6239, 6238], [6112, 6113, 6240], [6112, 6240, 6239], [6113, 6114, 6240], [6114, 6241, 6240], [6114, 6115, 6242], [6114, 6242, 6241], [6115, 6116, 6242], [6116, 6243, 6242], [6116, 6117, 6244], [6116, 6244, 6243], [6117, 6118, 6244], [6118, 6245, 6244], [6118, 6119, 6246], [6118, 6246, 6245], [6119, 6120, 6246], [6120, 6247, 6246], [6120, 6121, 6248], [6120, 6248, 6247], [6121, 6122, 6248], [6122, 6249, 6248], [6122, 6123, 6250], [6122, 6250, 6249], [6123, 6124, 6250], [6124, 6251, 6250], [6124, 6125, 6252], [6124, 6252, 6251], [6125, 6126, 6252], [6126, 6253, 6252], [6126, 6127, 6254], [6126, 6254, 6253], [6127, 6128, 6254], [6128, 6255, 6254], [6128, 6129, 6256], [6128, 6256, 6255], [6129, 6130, 6256], [6130, 6257, 6256], [6130, 6131, 6258], [6130, 6258, 6257], [6131, 6132, 6258], [6132, 6259, 6258], [6132, 6133, 6260], [6132, 6260, 6259], [6133, 6134, 6260], [6134, 6261, 6260], [6134, 6135, 6262], [6134, 6262, 6261], [6135, 6136, 6262], [6136, 6263, 6262], [6136, 6137, 6264], [6136, 6264, 6263], [6137, 6138, 6264], [6138, 6265, 6264], [6138, 6139, 6266], [6138, 6266, 6265], [6139, 6140, 6266], [6140, 6267, 6266], [6140, 6141, 6268], [6140, 6268, 6267], [6141, 6142, 6268], [6142, 6269, 6268], [6142, 6143, 6270], [6142, 6270, 6269], [6143, 6144, 6270], [6144, 6271, 6270], [6144, 6145, 6272], [6144, 6272, 6271], [6145, 6146, 6272], [6146, 6273, 6272], [6146, 6147, 6274], [6146, 6274, 6273], [6147, 6148, 6274], [6148, 6275, 6274], [6148, 6149, 6276], [6148, 6276, 6275], [6149, 6150, 6276], [6150, 6277, 6276], [6150, 6151, 6278], [6150, 6278, 6277], [6151, 6152, 6278], [6152, 6279, 6278], [6152, 6153, 6280], [6152, 6280, 6279], [6153, 6154, 6280], [6154, 6281, 6280], [6154, 6155, 6282], [6154, 6282, 6281], [6155, 6156, 6282], [6156, 6283, 6282], [6156, 6157, 6284], [6156, 6284, 6283], [6157, 6158, 6284], [6158, 6285, 6284], [6158, 6159, 6286], [6158, 6286, 6285], [6159, 6160, 6286], [6160, 6287, 6286], [6160, 6161, 6288], [6160, 6288, 6287], [6161, 6162, 6288], [6162, 6289, 6288], [6162, 6163, 6290], [6162, 6290, 6289], [6166, 6167, 6292], [6166, 6292, 6291], [6167, 6168, 6292], [6168, 6293, 6292], [6168, 6169, 6294], [6168, 6294, 6293], [6169, 6170, 6294], [6170, 6295, 6294], [6170, 6171, 6296], [6170, 6296, 6295], [6171, 6172, 6296], [6172, 6297, 6296], [6172, 6173, 6298], [6172, 6298, 6297], [6173, 6174, 6298], [6174, 6299, 6298], [6174, 6175, 6300], [6174, 6300, 6299], [6175, 6176, 6300], [6176, 6301, 6300], [6176, 6177, 6302], [6176, 6302, 6301], [6177, 6178, 6302], [6178, 6303, 6302], [6178, 6179, 6304], [6178, 6304, 6303], [6179, 6180, 6304], [6180, 6305, 6304], [6180, 6181, 6306], [6180, 6306, 6305], [6181, 6182, 6306], [6182, 6307, 6306], [6182, 6183, 6308], [6182, 6308, 6307], [6183, 6184, 6308], [6184, 6309, 6308], [6184, 6185, 6310], [6184, 6310, 6309], [6185, 6186, 6310], [6186, 6311, 6310], [6186, 6187, 6312], [6186, 6312, 6311], [6187, 6188, 6312], [6188, 6313, 6312], [6188, 6189, 6314], [6188, 6314, 6313], [6190, 6191, 6316], [6190, 6316, 6315], [6191, 6192, 6316], [6192, 6317, 6316], [6192, 6193, 6318], [6192, 6318, 6317], [6193, 6194, 6318], [6194, 6319, 6318], [6194, 6195, 6320], [6194, 6320, 6319], [6195, 6196, 6320], [6196, 6321, 6320], [6196, 6197, 6322], [6196, 6322, 6321], [6197, 6198, 6322], [6198, 6323, 6322], [6198, 6199, 6324], [6198, 6324, 6323], [6199, 6200, 6324], [6200, 6325, 6324], [6200, 6201, 6326], [6200, 6326, 6325], [6201, 6202, 6326], [6202, 6327, 6326], [6202, 6203, 6328], [6202, 6328, 6327], [6203, 6204, 6328], [6204, 6329, 6328], [6204, 6205, 6330], [6204, 6330, 6329], [6205, 6206, 6330], [6206, 6331, 6330], [6206, 6207, 6332], [6206, 6332, 6331], [6207, 6208, 6332], [6208, 6333, 6332], [6208, 6209, 6334], [6208, 6334, 6333], [6209, 6210, 6334], [6210, 6335, 6334], [6210, 6211, 6336], [6210, 6336, 6335], [6211, 6212, 6336], [6212, 6337, 6336], [6212, 6213, 6338], [6212, 6338, 6337], [6213, 6214, 6338], [6214, 6339, 6338], [6214, 6215, 6340], [6214, 6340, 6339], [6215, 6216, 6340], [6216, 6341, 6340], [6216, 6217, 6342], [6216, 6342, 6341], [6217, 6218, 6342], [6218, 6343, 6342], [6218, 6219, 6344], [6218, 6344, 6343], [6219, 6220, 6344], [6220, 6345, 6344], [6220, 6221, 6346], [6220, 6346, 6345], [6221, 6222, 6346], [6222, 6347, 6346], [6222, 6223, 6348], [6222, 6348, 6347], [6223, 6224, 6348], [6224, 6349, 6348], [6224, 6225, 6350], [6224, 6350, 6349], [6225, 6226, 6350], [6226, 6351, 6350], [6226, 6227, 6352], [6226, 6352, 6351], [6227, 6228, 6352], [6228, 6353, 6352], [6228, 6229, 6354], [6228, 6354, 6353], [6229, 6230, 6354], [6230, 6355, 6354], [6230, 6231, 6356], [6230, 6356, 6355], [6231, 6232, 6356], [6232, 6357, 6356], [6232, 6233, 6358], [6232, 6358, 6357], [6233, 6234, 6358], [6234, 6359, 6358], [6234, 6235, 6360], [6234, 6360, 6359], [6235, 6236, 6360], [6236, 6361, 6360], [6236, 6237, 6362], [6236, 6362, 6361], [6237, 6238, 6362], [6238, 6363, 6362], [6238, 6239, 6364], [6238, 6364, 6363], [6239, 6240, 6364], [6240, 6365, 6364], [6240, 6241, 6366], [6240, 6366, 6365], [6241, 6242, 6366], [6242, 6367, 6366], [6242, 6243, 6368], [6242, 6368, 6367], [6243, 6244, 6368], [6244, 6369, 6368], [6244, 6245, 6370], [6244, 6370, 6369], [6245, 6246, 6370], [6246, 6371, 6370], [6246, 6247, 6372], [6246, 6372, 6371], [6247, 6248, 6372], [6248, 6373, 6372], [6248, 6249, 6374], [6248, 6374, 6373], [6249, 6250, 6374], [6250, 6375, 6374], [6250, 6251, 6376], [6250, 6376, 6375], [6251, 6252, 6376], [6252, 6377, 6376], [6252, 6253, 6378], [6252, 6378, 6377], [6253, 6254, 6378], [6254, 6379, 6378], [6254, 6255, 6380], [6254, 6380, 6379], [6255, 6256, 6380], [6256, 6381, 6380], [6256, 6257, 6382], [6256, 6382, 6381], [6257, 6258, 6382], [6258, 6383, 6382], [6258, 6259, 6384], [6258, 6384, 6383], [6259, 6260, 6384], [6260, 6385, 6384], [6260, 6261, 6386], [6260, 6386, 6385], [6261, 6262, 6386], [6262, 6387, 6386], [6262, 6263, 6388], [6262, 6388, 6387], [6263, 6264, 6388], [6264, 6389, 6388], [6264, 6265, 6390], [6264, 6390, 6389], [6265, 6266, 6390], [6266, 6391, 6390], [6266, 6267, 6392], [6266, 6392, 6391], [6267, 6268, 6392], [6268, 6393, 6392], [6268, 6269, 6394], [6268, 6394, 6393], [6269, 6270, 6394], [6270, 6395, 6394], [6270, 6271, 6396], [6270, 6396, 6395], [6271, 6272, 6396], [6272, 6397, 6396], [6272, 6273, 6398], [6272, 6398, 6397], [6273, 6274, 6398], [6274, 6399, 6398], [6274, 6275, 6400], [6274, 6400, 6399], [6275, 6276, 6400], [6276, 6401, 6400], [6276, 6277, 6402], [6276, 6402, 6401], [6277, 6278, 6402], [6278, 6403, 6402], [6278, 6279, 6404], [6278, 6404, 6403], [6279, 6280, 6404], [6280, 6405, 6404], [6280, 6281, 6406], [6280, 6406, 6405], [6281, 6282, 6406], [6282, 6407, 6406], [6282, 6283, 6408], [6282, 6408, 6407], [6283, 6284, 6408], [6284, 6409, 6408], [6284, 6285, 6410], [6284, 6410, 6409], [6285, 6286, 6410], [6286, 6411, 6410], [6286, 6287, 6412], [6286, 6412, 6411], [6287, 6288, 6412], [6288, 6413, 6412], [6291, 6292, 6414], [6292, 6415, 6414], [6292, 6293, 6416], [6292, 6416, 6415], [6293, 6294, 6416], [6294, 6417, 6416], [6294, 6295, 6418], [6294, 6418, 6417], [6295, 6296, 6418], [6296, 6419, 6418], [6296, 6297, 6420], [6296, 6420, 6419], [6297, 6298, 6420], [6298, 6421, 6420], [6298, 6299, 6422], [6298, 6422, 6421], [6299, 6300, 6422], [6300, 6423, 6422], [6300, 6301, 6424], [6300, 6424, 6423], [6301, 6302, 6424], [6302, 6425, 6424], [6302, 6303, 6426], [6302, 6426, 6425], [6303, 6304, 6426], [6304, 6427, 6426], [6304, 6305, 6428], [6304, 6428, 6427], [6305, 6306, 6428], [6306, 6429, 6428], [6306, 6307, 6430], [6306, 6430, 6429], [6307, 6308, 6430], [6308, 6431, 6430], [6308, 6309, 6432], [6308, 6432, 6431], [6309, 6310, 6432], [6310, 6433, 6432], [6310, 6311, 6434], [6310, 6434, 6433], [6311, 6312, 6434], [6312, 6435, 6434], [6312, 6313, 6436], [6312, 6436, 6435], [6313, 6314, 6436], [6314, 6437, 6436], [6315, 6316, 6438], [6316, 6439, 6438], [6316, 6317, 6440], [6316, 6440, 6439], [6317, 6318, 6440], [6318, 6441, 6440], [6318, 6319, 6442], [6318, 6442, 6441], [6319, 6320, 6442], [6320, 6443, 6442], [6320, 6321, 6444], [6320, 6444, 6443], [6321, 6322, 6444], [6322, 6445, 6444], [6322, 6323, 6446], [6322, 6446, 6445], [6323, 6324, 6446], [6324, 6447, 6446], [6324, 6325, 6448], [6324, 6448, 6447], [6325, 6326, 6448], [6326, 6449, 6448], [6326, 6327, 6450], [6326, 6450, 6449], [6327, 6328, 6450], [6328, 6451, 6450], [6328, 6329, 6452], [6328, 6452, 6451], [6329, 6330, 6452], [6330, 6453, 6452], [6330, 6331, 6454], [6330, 6454, 6453], [6331, 6332, 6454], [6332, 6455, 6454], [6332, 6333, 6456], [6332, 6456, 6455], [6333, 6334, 6456], [6334, 6457, 6456], [6334, 6335, 6458], [6334, 6458, 6457], [6335, 6336, 6458], [6336, 6459, 6458], [6336, 6337, 6460], [6336, 6460, 6459], [6337, 6338, 6460], [6338, 6461, 6460], [6338, 6339, 6462], [6338, 6462, 6461], [6339, 6340, 6462], [6340, 6463, 6462], [6340, 6341, 6464], [6340, 6464, 6463], [6341, 6342, 6464], [6342, 6465, 6464], [6342, 6343, 6466], [6342, 6466, 6465], [6343, 6344, 6466], [6344, 6467, 6466], [6344, 6345, 6468], [6344, 6468, 6467], [6345, 6346, 6468], [6346, 6469, 6468], [6346, 6347, 6470], [6346, 6470, 6469], [6347, 6348, 6470], [6348, 6471, 6470], [6348, 6349, 6472], [6348, 6472, 6471], [6349, 6350, 6472], [6350, 6473, 6472], [6350, 6351, 6474], [6350, 6474, 6473], [6351, 6352, 6474], [6352, 6475, 6474], [6352, 6353, 6476], [6352, 6476, 6475], [6353, 6354, 6476], [6354, 6477, 6476], [6354, 6355, 6478], [6354, 6478, 6477], [6355, 6356, 6478], [6356, 6479, 6478], [6356, 6357, 6480], [6356, 6480, 6479], [6357, 6358, 6480], [6358, 6481, 6480], [6358, 6359, 6482], [6358, 6482, 6481], [6359, 6360, 6482], [6360, 6483, 6482], [6360, 6361, 6484], [6360, 6484, 6483], [6361, 6362, 6484], [6362, 6485, 6484], [6362, 6363, 6486], [6362, 6486, 6485], [6363, 6364, 6486], [6364, 6487, 6486], [6364, 6365, 6488], [6364, 6488, 6487], [6365, 6366, 6488], [6366, 6489, 6488], [6366, 6367, 6490], [6366, 6490, 6489], [6367, 6368, 6490], [6368, 6491, 6490], [6368, 6369, 6492], [6368, 6492, 6491], [6369, 6370, 6492], [6370, 6493, 6492], [6370, 6371, 6494], [6370, 6494, 6493], [6371, 6372, 6494], [6372, 6495, 6494], [6372, 6373, 6496], [6372, 6496, 6495], [6373, 6374, 6496], [6374, 6497, 6496], [6374, 6375, 6498], [6374, 6498, 6497], [6375, 6376, 6498], [6376, 6499, 6498], [6376, 6377, 6500], [6376, 6500, 6499], [6377, 6378, 6500], [6378, 6501, 6500], [6378, 6379, 6502], [6378, 6502, 6501], [6379, 6380, 6502], [6380, 6503, 6502], [6380, 6381, 6504], [6380, 6504, 6503], [6381, 6382, 6504], [6382, 6505, 6504], [6382, 6383, 6506], [6382, 6506, 6505], [6383, 6384, 6506], [6384, 6507, 6506], [6384, 6385, 6508], [6384, 6508, 6507], [6385, 6386, 6508], [6386, 6509, 6508], [6386, 6387, 6510], [6386, 6510, 6509], [6387, 6388, 6510], [6388, 6511, 6510], [6388, 6389, 6512], [6388, 6512, 6511], [6389, 6390, 6512], [6390, 6513, 6512], [6390, 6391, 6514], [6390, 6514, 6513], [6391, 6392, 6514], [6392, 6515, 6514], [6392, 6393, 6516], [6392, 6516, 6515], [6393, 6394, 6516], [6394, 6517, 6516], [6394, 6395, 6518], [6394, 6518, 6517], [6395, 6396, 6518], [6396, 6519, 6518], [6396, 6397, 6520], [6396, 6520, 6519], [6397, 6398, 6520], [6398, 6521, 6520], [6398, 6399, 6522], [6398, 6522, 6521], [6399, 6400, 6522], [6400, 6523, 6522], [6400, 6401, 6524], [6400, 6524, 6523], [6401, 6402, 6524], [6402, 6525, 6524], [6402, 6403, 6526], [6402, 6526, 6525], [6403, 6404, 6526], [6404, 6527, 6526], [6404, 6405, 6528], [6404, 6528, 6527], [6405, 6406, 6528], [6406, 6529, 6528], [6406, 6407, 6530], [6406, 6530, 6529], [6407, 6408, 6530], [6408, 6531, 6530], [6408, 6409, 6532], [6408, 6532, 6531], [6409, 6410, 6532], [6410, 6533, 6532], [6410, 6411, 6534], [6410, 6534, 6533], [6411, 6412, 6534], [6412, 6535, 6534], [6412, 6413, 6536], [6412, 6536, 6535], [6416, 6417, 6537], [6417, 6418, 6537], [6418, 6538, 6537], [6418, 6419, 6539], [6418, 6539, 6538], [6419, 6420, 6539], [6420, 6540, 6539], [6420, 6421, 6541], [6420, 6541, 6540], [6421, 6422, 6541], [6422, 6542, 6541], [6422, 6423, 6543], [6422, 6543, 6542], [6423, 6424, 6543], [6424, 6544, 6543], [6424, 6425, 6545], [6424, 6545, 6544], [6425, 6426, 6545], [6426, 6546, 6545], [6426, 6427, 6547], [6426, 6547, 6546], [6427, 6428, 6547], [6428, 6548, 6547], [6428, 6429, 6549], [6428, 6549, 6548], [6429, 6430, 6549], [6430, 6550, 6549], [6430, 6431, 6551], [6430, 6551, 6550], [6431, 6432, 6551], [6432, 6552, 6551], [6432, 6433, 6553], [6432, 6553, 6552], [6433, 6434, 6553], [6434, 6554, 6553], [6434, 6435, 6555], [6434, 6555, 6554], [6435, 6436, 6555], [6436, 6556, 6555], [6436, 6437, 6557], [6436, 6557, 6556], [6438, 6439, 6559], [6438, 6559, 6558], [6439, 6440, 6559], [6440, 6560, 6559], [6440, 6441, 6561], [6440, 6561, 6560], [6441, 6442, 6561], [6442, 6562, 6561], [6442, 6443, 6563], [6442, 6563, 6562], [6443, 6444, 6563], [6444, 6564, 6563], [6444, 6445, 6565], [6444, 6565, 6564], [6445, 6446, 6565], [6446, 6566, 6565], [6446, 6447, 6567], [6446, 6567, 6566], [6447, 6448, 6567], [6448, 6568, 6567], [6448, 6449, 6569], [6448, 6569, 6568], [6449, 6450, 6569], [6450, 6570, 6569], [6450, 6451, 6571], [6450, 6571, 6570], [6451, 6452, 6571], [6452, 6572, 6571], [6452, 6453, 6573], [6452, 6573, 6572], [6453, 6454, 6573], [6454, 6574, 6573], [6454, 6455, 6575], [6454, 6575, 6574], [6455, 6456, 6575], [6456, 6576, 6575], [6456, 6457, 6577], [6456, 6577, 6576], [6457, 6458, 6577], [6458, 6578, 6577], [6458, 6459, 6579], [6458, 6579, 6578], [6459, 6460, 6579], [6460, 6580, 6579], [6460, 6461, 6581], [6460, 6581, 6580], [6461, 6462, 6581], [6462, 6582, 6581], [6462, 6463, 6583], [6462, 6583, 6582], [6463, 6464, 6583], [6464, 6584, 6583], [6464, 6465, 6585], [6464, 6585, 6584], [6465, 6466, 6585], [6466, 6586, 6585], [6466, 6467, 6587], [6466, 6587, 6586], [6467, 6468, 6587], [6468, 6588, 6587], [6468, 6469, 6589], [6468, 6589, 6588], [6469, 6470, 6589], [6470, 6590, 6589], [6470, 6471, 6591], [6470, 6591, 6590], [6471, 6472, 6591], [6472, 6592, 6591], [6472, 6473, 6593], [6472, 6593, 6592], [6473, 6474, 6593], [6474, 6594, 6593], [6474, 6475, 6595], [6474, 6595, 6594], [6475, 6476, 6595], [6476, 6596, 6595], [6476, 6477, 6597], [6476, 6597, 6596], [6477, 6478, 6597], [6478, 6598, 6597], [6478, 6479, 6599], [6478, 6599, 6598], [6479, 6480, 6599], [6480, 6600, 6599], [6480, 6481, 6601], [6480, 6601, 6600], [6481, 6482, 6601], [6482, 6602, 6601], [6482, 6483, 6603], [6482, 6603, 6602], [6483, 6484, 6603], [6484, 6604, 6603], [6484, 6485, 6605], [6484, 6605, 6604], [6485, 6486, 6605], [6486, 6606, 6605], [6486, 6487, 6607], [6486, 6607, 6606], [6487, 6488, 6607], [6488, 6608, 6607], [6488, 6489, 6609], [6488, 6609, 6608], [6489, 6490, 6609], [6490, 6610, 6609], [6490, 6491, 6611], [6490, 6611, 6610], [6491, 6492, 6611], [6492, 6612, 6611], [6492, 6493, 6613], [6492, 6613, 6612], [6493, 6494, 6613], [6494, 6614, 6613], [6494, 6495, 6615], [6494, 6615, 6614], [6495, 6496, 6615], [6496, 6616, 6615], [6496, 6497, 6617], [6496, 6617, 6616], [6497, 6498, 6617], [6498, 6618, 6617], [6498, 6499, 6619], [6498, 6619, 6618], [6499, 6500, 6619], [6500, 6620, 6619], [6500, 6501, 6621], [6500, 6621, 6620], [6501, 6502, 6621], [6502, 6622, 6621], [6502, 6503, 6623], [6502, 6623, 6622], [6503, 6504, 6623], [6504, 6624, 6623], [6504, 6505, 6625], [6504, 6625, 6624], [6505, 6506, 6625], [6506, 6626, 6625], [6506, 6507, 6627], [6506, 6627, 6626], [6507, 6508, 6627], [6508, 6628, 6627], [6508, 6509, 6629], [6508, 6629, 6628], [6509, 6510, 6629], [6510, 6630, 6629], [6510, 6511, 6631], [6510, 6631, 6630], [6511, 6512, 6631], [6512, 6632, 6631], [6512, 6513, 6633], [6512, 6633, 6632], [6513, 6514, 6633], [6514, 6634, 6633], [6514, 6515, 6635], [6514, 6635, 6634], [6515, 6516, 6635], [6516, 6636, 6635], [6516, 6517, 6637], [6516, 6637, 6636], [6517, 6518, 6637], [6518, 6638, 6637], [6518, 6519, 6639], [6518, 6639, 6638], [6519, 6520, 6639], [6520, 6640, 6639], [6520, 6521, 6641], [6520, 6641, 6640], [6521, 6522, 6641], [6522, 6642, 6641], [6522, 6523, 6643], [6522, 6643, 6642], [6523, 6524, 6643], [6524, 6644, 6643], [6524, 6525, 6645], [6524, 6645, 6644], [6525, 6526, 6645], [6526, 6646, 6645], [6526, 6527, 6647], [6526, 6647, 6646], [6527, 6528, 6647], [6528, 6648, 6647], [6528, 6529, 6649], [6528, 6649, 6648], [6529, 6530, 6649], [6530, 6650, 6649], [6530, 6531, 6651], [6530, 6651, 6650], [6531, 6532, 6651], [6532, 6652, 6651], [6532, 6533, 6653], [6532, 6653, 6652], [6533, 6534, 6653], [6534, 6654, 6653], [6534, 6535, 6655], [6534, 6655, 6654], [6535, 6536, 6655], [6536, 6656, 6655], [6539, 6540, 6658], [6539, 6658, 6657], [6540, 6541, 6658], [6541, 6659, 6658], [6541, 6542, 6660], [6541, 6660, 6659], [6542, 6543, 6660], [6543, 6661, 6660], [6543, 6544, 6662], [6543, 6662, 6661], [6544, 6545, 6662], [6545, 6663, 6662], [6545, 6546, 6664], [6545, 6664, 6663], [6546, 6547, 6664], [6547, 6665, 6664], [6547, 6548, 6666], [6547, 6666, 6665], [6548, 6549, 6666], [6549, 6667, 6666], [6549, 6550, 6668], [6549, 6668, 6667], [6550, 6551, 6668], [6551, 6669, 6668], [6551, 6552, 6670], [6551, 6670, 6669], [6552, 6553, 6670], [6553, 6671, 6670], [6553, 6554, 6672], [6553, 6672, 6671], [6554, 6555, 6672], [6555, 6673, 6672], [6555, 6556, 6674], [6555, 6674, 6673], [6556, 6557, 6674], [6557, 6675, 6674], [6558, 6559, 6676], [6559, 6677, 6676], [6559, 6560, 6678], [6559, 6678, 6677], [6560, 6561, 6678], [6561, 6679, 6678], [6561, 6562, 6680], [6561, 6680, 6679], [6562, 6563, 6680], [6563, 6681, 6680], [6563, 6564, 6682], [6563, 6682, 6681], [6564, 6565, 6682], [6565, 6683, 6682], [6565, 6566, 6684], [6565, 6684, 6683], [6566, 6567, 6684], [6567, 6685, 6684], [6567, 6568, 6686], [6567, 6686, 6685], [6568, 6569, 6686], [6569, 6687, 6686], [6569, 6570, 6688], [6569, 6688, 6687], [6570, 6571, 6688], [6571, 6689, 6688], [6571, 6572, 6690], [6571, 6690, 6689], [6572, 6573, 6690], [6573, 6691, 6690], [6573, 6574, 6692], [6573, 6692, 6691], [6574, 6575, 6692], [6575, 6693, 6692], [6575, 6576, 6694], [6575, 6694, 6693], [6576, 6577, 6694], [6577, 6695, 6694], [6577, 6578, 6696], [6577, 6696, 6695], [6578, 6579, 6696], [6579, 6697, 6696], [6579, 6580, 6698], [6579, 6698, 6697], [6580, 6581, 6698], [6581, 6699, 6698], [6581, 6582, 6700], [6581, 6700, 6699], [6582, 6583, 6700], [6583, 6701, 6700], [6583, 6584, 6702], [6583, 6702, 6701], [6584, 6585, 6702], [6585, 6703, 6702], [6585, 6586, 6704], [6585, 6704, 6703], [6586, 6587, 6704], [6587, 6705, 6704], [6587, 6588, 6706], [6587, 6706, 6705], [6588, 6589, 6706], [6589, 6707, 6706], [6589, 6590, 6708], [6589, 6708, 6707], [6590, 6591, 6708], [6591, 6709, 6708], [6591, 6592, 6710], [6591, 6710, 6709], [6592, 6593, 6710], [6593, 6711, 6710], [6593, 6594, 6712], [6593, 6712, 6711], [6594, 6595, 6712], [6595, 6713, 6712], [6595, 6596, 6714], [6595, 6714, 6713], [6596, 6597, 6714], [6597, 6715, 6714], [6597, 6598, 6716], [6597, 6716, 6715], [6598, 6599, 6716], [6599, 6717, 6716], [6599, 6600, 6718], [6599, 6718, 6717], [6600, 6601, 6718], [6601, 6719, 6718], [6601, 6602, 6720], [6601, 6720, 6719], [6602, 6603, 6720], [6603, 6721, 6720], [6603, 6604, 6722], [6603, 6722, 6721], [6604, 6605, 6722], [6605, 6723, 6722], [6605, 6606, 6724], [6605, 6724, 6723], [6606, 6607, 6724], [6607, 6725, 6724], [6607, 6608, 6726], [6607, 6726, 6725], [6608, 6609, 6726], [6609, 6727, 6726], [6609, 6610, 6728], [6609, 6728, 6727], [6610, 6611, 6728], [6611, 6729, 6728], [6611, 6612, 6730], [6611, 6730, 6729], [6612, 6613, 6730], [6613, 6731, 6730], [6613, 6614, 6732], [6613, 6732, 6731], [6614, 6615, 6732], [6615, 6733, 6732], [6615, 6616, 6734], [6615, 6734, 6733], [6616, 6617, 6734], [6617, 6735, 6734], [6617, 6618, 6736], [6617, 6736, 6735], [6618, 6619, 6736], [6619, 6737, 6736], [6619, 6620, 6738], [6619, 6738, 6737], [6620, 6621, 6738], [6621, 6739, 6738], [6621, 6622, 6740], [6621, 6740, 6739], [6622, 6623, 6740], [6623, 6741, 6740], [6623, 6624, 6742], [6623, 6742, 6741], [6624, 6625, 6742], [6625, 6743, 6742], [6625, 6626, 6744], [6625, 6744, 6743], [6626, 6627, 6744], [6627, 6745, 6744], [6627, 6628, 6746], [6627, 6746, 6745], [6628, 6629, 6746], [6629, 6747, 6746], [6629, 6630, 6748], [6629, 6748, 6747], [6630, 6631, 6748], [6631, 6749, 6748], [6631, 6632, 6750], [6631, 6750, 6749], [6632, 6633, 6750], [6633, 6751, 6750], [6633, 6634, 6752], [6633, 6752, 6751], [6634, 6635, 6752], [6635, 6753, 6752], [6635, 6636, 6754], [6635, 6754, 6753], [6636, 6637, 6754], [6637, 6755, 6754], [6637, 6638, 6756], [6637, 6756, 6755], [6638, 6639, 6756], [6639, 6757, 6756], [6639, 6640, 6758], [6639, 6758, 6757], [6640, 6641, 6758], [6641, 6759, 6758], [6641, 6642, 6760], [6641, 6760, 6759], [6642, 6643, 6760], [6643, 6761, 6760], [6643, 6644, 6762], [6643, 6762, 6761], [6644, 6645, 6762], [6645, 6763, 6762], [6645, 6646, 6764], [6645, 6764, 6763], [6646, 6647, 6764], [6647, 6765, 6764], [6647, 6648, 6766], [6647, 6766, 6765], [6648, 6649, 6766], [6649, 6767, 6766], [6649, 6650, 6768], [6649, 6768, 6767], [6650, 6651, 6768], [6651, 6769, 6768], [6651, 6652, 6770], [6651, 6770, 6769], [6652, 6653, 6770], [6653, 6771, 6770], [6653, 6654, 6772], [6653, 6772, 6771], [6654, 6655, 6772], [6655, 6773, 6772], [6655, 6656, 6774], [6655, 6774, 6773], [6657, 6658, 6775], [6658, 6776, 6775], [6658, 6659, 6777], [6658, 6777, 6776], [6659, 6660, 6777], [6660, 6778, 6777], [6660, 6661, 6779], [6660, 6779, 6778], [6661, 6662, 6779], [6662, 6780, 6779], [6662, 6663, 6781], [6662, 6781, 6780], [6663, 6664, 6781], [6664, 6782, 6781], [6664, 6665, 6783], [6664, 6783, 6782], [6665, 6666, 6783], [6666, 6784, 6783], [6666, 6667, 6785], [6666, 6785, 6784], [6667, 6668, 6785], [6668, 6786, 6785], [6668, 6669, 6787], [6668, 6787, 6786], [6669, 6670, 6787], [6670, 6788, 6787], [6670, 6671, 6789], [6670, 6789, 6788], [6671, 6672, 6789], [6672, 6790, 6789], [6672, 6673, 6791], [6672, 6791, 6790], [6673, 6674, 6791], [6674, 6792, 6791], [6674, 6675, 6793], [6674, 6793, 6792], [6676, 6677, 6795], [6676, 6795, 6794], [6677, 6678, 6795], [6678, 6796, 6795], [6678, 6679, 6797], [6678, 6797, 6796], [6679, 6680, 6797], [6680, 6798, 6797], [6680, 6681, 6799], [6680, 6799, 6798], [6681, 6682, 6799], [6682, 6800, 6799], [6682, 6683, 6801], [6682, 6801, 6800], [6683, 6684, 6801], [6684, 6802, 6801], [6684, 6685, 6803], [6684, 6803, 6802], [6685, 6686, 6803], [6686, 6804, 6803], [6686, 6687, 6805], [6686, 6805, 6804], [6687, 6688, 6805], [6688, 6806, 6805], [6688, 6689, 6807], [6688, 6807, 6806], [6689, 6690, 6807], [6690, 6808, 6807], [6690, 6691, 6809], [6690, 6809, 6808], [6691, 6692, 6809], [6692, 6810, 6809], [6692, 6693, 6811], [6692, 6811, 6810], [6693, 6694, 6811], [6694, 6812, 6811], [6694, 6695, 6813], [6694, 6813, 6812], [6695, 6696, 6813], [6696, 6814, 6813], [6696, 6697, 6815], [6696, 6815, 6814], [6697, 6698, 6815], [6698, 6816, 6815], [6698, 6699, 6817], [6698, 6817, 6816], [6699, 6700, 6817], [6700, 6818, 6817], [6700, 6701, 6819], [6700, 6819, 6818], [6701, 6702, 6819], [6702, 6820, 6819], [6702, 6703, 6821], [6702, 6821, 6820], [6703, 6704, 6821], [6704, 6822, 6821], [6704, 6705, 6823], [6704, 6823, 6822], [6705, 6706, 6823], [6706, 6824, 6823], [6706, 6707, 6825], [6706, 6825, 6824], [6707, 6708, 6825], [6708, 6826, 6825], [6708, 6709, 6827], [6708, 6827, 6826], [6709, 6710, 6827], [6710, 6828, 6827], [6710, 6711, 6829], [6710, 6829, 6828], [6711, 6712, 6829], [6712, 6830, 6829], [6712, 6713, 6831], [6712, 6831, 6830], [6713, 6714, 6831], [6714, 6832, 6831], [6714, 6715, 6833], [6714, 6833, 6832], [6715, 6716, 6833], [6716, 6834, 6833], [6716, 6717, 6835], [6716, 6835, 6834], [6717, 6718, 6835], [6718, 6836, 6835], [6718, 6719, 6837], [6718, 6837, 6836], [6719, 6720, 6837], [6720, 6838, 6837], [6720, 6721, 6839], [6720, 6839, 6838], [6721, 6722, 6839], [6722, 6840, 6839], [6722, 6723, 6841], [6722, 6841, 6840], [6723, 6724, 6841], [6724, 6842, 6841], [6724, 6725, 6843], [6724, 6843, 6842], [6725, 6726, 6843], [6726, 6844, 6843], [6726, 6727, 6845], [6726, 6845, 6844], [6727, 6728, 6845], [6728, 6846, 6845], [6728, 6729, 6847], [6728, 6847, 6846], [6729, 6730, 6847], [6730, 6848, 6847], [6730, 6731, 6849], [6730, 6849, 6848], [6731, 6732, 6849], [6732, 6850, 6849], [6732, 6733, 6851], [6732, 6851, 6850], [6733, 6734, 6851], [6734, 6852, 6851], [6734, 6735, 6853], [6734, 6853, 6852], [6735, 6736, 6853], [6736, 6854, 6853], [6736, 6737, 6855], [6736, 6855, 6854], [6737, 6738, 6855], [6738, 6856, 6855], [6738, 6739, 6857], [6738, 6857, 6856], [6739, 6740, 6857], [6740, 6858, 6857], [6740, 6741, 6859], [6740, 6859, 6858], [6741, 6742, 6859], [6742, 6860, 6859], [6742, 6743, 6861], [6742, 6861, 6860], [6743, 6744, 6861], [6744, 6862, 6861], [6744, 6745, 6863], [6744, 6863, 6862], [6745, 6746, 6863], [6746, 6864, 6863], [6746, 6747, 6865], [6746, 6865, 6864], [6747, 6748, 6865], [6748, 6866, 6865], [6748, 6749, 6867], [6748, 6867, 6866], [6749, 6750, 6867], [6750, 6868, 6867], [6750, 6751, 6869], [6750, 6869, 6868], [6751, 6752, 6869], [6752, 6870, 6869], [6752, 6753, 6871], [6752, 6871, 6870], [6753, 6754, 6871], [6754, 6872, 6871], [6754, 6755, 6873], [6754, 6873, 6872], [6755, 6756, 6873], [6756, 6874, 6873], [6756, 6757, 6875], [6756, 6875, 6874], [6757, 6758, 6875], [6758, 6876, 6875], [6758, 6759, 6877], [6758, 6877, 6876], [6759, 6760, 6877], [6760, 6878, 6877], [6760, 6761, 6879], [6760, 6879, 6878], [6761, 6762, 6879], [6762, 6880, 6879], [6762, 6763, 6881], [6762, 6881, 6880], [6763, 6764, 6881], [6764, 6882, 6881], [6764, 6765, 6883], [6764, 6883, 6882], [6765, 6766, 6883], [6766, 6884, 6883], [6766, 6767, 6885], [6766, 6885, 6884], [6767, 6768, 6885], [6768, 6886, 6885], [6768, 6769, 6887], [6768, 6887, 6886], [6769, 6770, 6887], [6770, 6888, 6887], [6770, 6771, 6889], [6770, 6889, 6888], [6771, 6772, 6889], [6772, 6890, 6889], [6772, 6773, 6891], [6772, 6891, 6890], [6773, 6774, 6891], [6774, 6892, 6891], [6775, 6776, 6894], [6775, 6894, 6893], [6776, 6777, 6894], [6777, 6895, 6894], [6777, 6778, 6896], [6777, 6896, 6895], [6778, 6779, 6896], [6779, 6897, 6896], [6779, 6780, 6898], [6779, 6898, 6897], [6780, 6781, 6898], [6781, 6899, 6898], [6781, 6782, 6900], [6781, 6900, 6899], [6782, 6783, 6900], [6783, 6901, 6900], [6783, 6784, 6902], [6783, 6902, 6901], [6784, 6785, 6902], [6785, 6903, 6902], [6785, 6786, 6904], [6785, 6904, 6903], [6786, 6787, 6904], [6787, 6905, 6904], [6787, 6788, 6906], [6787, 6906, 6905], [6788, 6789, 6906], [6789, 6907, 6906], [6789, 6790, 6908], [6789, 6908, 6907], [6790, 6791, 6908], [6791, 6909, 6908], [6791, 6792, 6910], [6791, 6910, 6909], [6792, 6793, 6910], [6793, 6911, 6910], [6794, 6795, 6912], [6795, 6913, 6912], [6795, 6796, 6914], [6795, 6914, 6913], [6796, 6797, 6914], [6797, 6915, 6914], [6797, 6798, 6916], [6797, 6916, 6915], [6798, 6799, 6916], [6799, 6917, 6916], [6799, 6800, 6918], [6799, 6918, 6917], [6800, 6801, 6918], [6801, 6919, 6918], [6801, 6802, 6920], [6801, 6920, 6919], [6802, 6803, 6920], [6803, 6921, 6920], [6803, 6804, 6922], [6803, 6922, 6921], [6804, 6805, 6922], [6805, 6923, 6922], [6805, 6806, 6924], [6805, 6924, 6923], [6806, 6807, 6924], [6807, 6925, 6924], [6807, 6808, 6926], [6807, 6926, 6925], [6808, 6809, 6926], [6809, 6927, 6926], [6809, 6810, 6928], [6809, 6928, 6927], [6810, 6811, 6928], [6811, 6929, 6928], [6811, 6812, 6930], [6811, 6930, 6929], [6812, 6813, 6930], [6813, 6931, 6930], [6813, 6814, 6932], [6813, 6932, 6931], [6814, 6815, 6932], [6815, 6933, 6932], [6815, 6816, 6934], [6815, 6934, 6933], [6816, 6817, 6934], [6817, 6935, 6934], [6817, 6818, 6936], [6817, 6936, 6935], [6818, 6819, 6936], [6819, 6937, 6936], [6819, 6820, 6938], [6819, 6938, 6937], [6820, 6821, 6938], [6821, 6939, 6938], [6821, 6822, 6940], [6821, 6940, 6939], [6822, 6823, 6940], [6823, 6941, 6940], [6823, 6824, 6942], [6823, 6942, 6941], [6824, 6825, 6942], [6825, 6943, 6942], [6825, 6826, 6944], [6825, 6944, 6943], [6826, 6827, 6944], [6827, 6945, 6944], [6827, 6828, 6946], [6827, 6946, 6945], [6828, 6829, 6946], [6829, 6947, 6946], [6829, 6830, 6948], [6829, 6948, 6947], [6830, 6831, 6948], [6831, 6949, 6948], [6831, 6832, 6950], [6831, 6950, 6949], [6832, 6833, 6950], [6833, 6951, 6950], [6833, 6834, 6952], [6833, 6952, 6951], [6834, 6835, 6952], [6835, 6953, 6952], [6835, 6836, 6954], [6835, 6954, 6953], [6836, 6837, 6954], [6837, 6955, 6954], [6837, 6838, 6956], [6837, 6956, 6955], [6838, 6839, 6956], [6839, 6957, 6956], [6839, 6840, 6958], [6839, 6958, 6957], [6840, 6841, 6958], [6841, 6959, 6958], [6841, 6842, 6960], [6841, 6960, 6959], [6842, 6843, 6960], [6843, 6961, 6960], [6843, 6844, 6962], [6843, 6962, 6961], [6844, 6845, 6962], [6845, 6963, 6962], [6845, 6846, 6964], [6845, 6964, 6963], [6846, 6847, 6964], [6847, 6965, 6964], [6847, 6848, 6966], [6847, 6966, 6965], [6848, 6849, 6966], [6849, 6967, 6966], [6849, 6850, 6968], [6849, 6968, 6967], [6850, 6851, 6968], [6851, 6969, 6968], [6851, 6852, 6970], [6851, 6970, 6969], [6852, 6853, 6970], [6853, 6971, 6970], [6853, 6854, 6972], [6853, 6972, 6971], [6854, 6855, 6972], [6855, 6973, 6972], [6855, 6856, 6974], [6855, 6974, 6973], [6856, 6857, 6974], [6857, 6975, 6974], [6857, 6858, 6976], [6857, 6976, 6975], [6858, 6859, 6976], [6859, 6977, 6976], [6859, 6860, 6978], [6859, 6978, 6977], [6860, 6861, 6978], [6861, 6979, 6978], [6861, 6862, 6980], [6861, 6980, 6979], [6862, 6863, 6980], [6863, 6981, 6980], [6863, 6864, 6982], [6863, 6982, 6981], [6864, 6865, 6982], [6865, 6983, 6982], [6865, 6866, 6984], [6865, 6984, 6983], [6866, 6867, 6984], [6867, 6985, 6984], [6867, 6868, 6986], [6867, 6986, 6985], [6868, 6869, 6986], [6869, 6987, 6986], [6869, 6870, 6988], [6869, 6988, 6987], [6870, 6871, 6988], [6871, 6989, 6988], [6871, 6872, 6990], [6871, 6990, 6989], [6872, 6873, 6990], [6873, 6991, 6990], [6873, 6874, 6992], [6873, 6992, 6991], [6874, 6875, 6992], [6875, 6993, 6992], [6875, 6876, 6994], [6875, 6994, 6993], [6876, 6877, 6994], [6877, 6995, 6994], [6877, 6878, 6996], [6877, 6996, 6995], [6878, 6879, 6996], [6879, 6997, 6996], [6879, 6880, 6998], [6879, 6998, 6997], [6880, 6881, 6998], [6881, 6999, 6998], [6881, 6882, 7000], [6881, 7000, 6999], [6882, 6883, 7000], [6883, 7001, 7000], [6883, 6884, 7002], [6883, 7002, 7001], [6884, 6885, 7002], [6885, 7003, 7002], [6885, 6886, 7004], [6885, 7004, 7003], [6886, 6887, 7004], [6887, 7005, 7004], [6887, 6888, 7006], [6887, 7006, 7005], [6888, 6889, 7006], [6889, 7007, 7006], [6889, 6890, 7008], [6889, 7008, 7007], [6890, 6891, 7008], [6891, 7009, 7008], [6891, 6892, 7010], [6891, 7010, 7009], [6893, 6894, 7011], [6894, 7012, 7011], [6894, 6895, 7013], [6894, 7013, 7012], [6895, 6896, 7013], [6896, 7014, 7013], [6896, 6897, 7015], [6896, 7015, 7014], [6897, 6898, 7015], [6898, 7016, 7015], [6898, 6899, 7017], [6898, 7017, 7016], [6899, 6900, 7017], [6900, 7018, 7017], [6900, 6901, 7019], [6900, 7019, 7018], [6901, 6902, 7019], [6902, 7020, 7019], [6902, 6903, 7021], [6902, 7021, 7020], [6903, 6904, 7021], [6904, 7022, 7021], [6904, 6905, 7023], [6904, 7023, 7022], [6905, 6906, 7023], [6906, 7024, 7023], [6906, 6907, 7025], [6906, 7025, 7024], [6907, 6908, 7025], [6908, 7026, 7025], [6908, 6909, 7027], [6908, 7027, 7026], [6909, 6910, 7027], [6910, 7028, 7027], [6910, 6911, 7029], [6910, 7029, 7028], [6912, 6913, 7031], [6912, 7031, 7030], [6913, 6914, 7031], [6914, 7032, 7031], [6914, 6915, 7033], [6914, 7033, 7032], [6915, 6916, 7033], [6916, 7034, 7033], [6916, 6917, 7035], [6916, 7035, 7034], [6917, 6918, 7035], [6918, 7036, 7035], [6918, 6919, 7037], [6918, 7037, 7036], [6919, 6920, 7037], [6920, 7038, 7037], [6920, 6921, 7039], [6920, 7039, 7038], [6921, 6922, 7039], [6922, 7040, 7039], [6922, 6923, 7041], [6922, 7041, 7040], [6923, 6924, 7041], [6924, 7042, 7041], [6924, 6925, 7043], [6924, 7043, 7042], [6925, 6926, 7043], [6926, 7044, 7043], [6926, 6927, 7045], [6926, 7045, 7044], [6927, 6928, 7045], [6928, 7046, 7045], [6928, 6929, 7047], [6928, 7047, 7046], [6929, 6930, 7047], [6930, 7048, 7047], [6930, 6931, 7049], [6930, 7049, 7048], [6931, 6932, 7049], [6932, 7050, 7049], [6932, 6933, 7051], [6932, 7051, 7050], [6933, 6934, 7051], [6934, 7052, 7051], [6934, 6935, 7053], [6934, 7053, 7052], [6935, 6936, 7053], [6936, 7054, 7053], [6936, 6937, 7055], [6936, 7055, 7054], [6937, 6938, 7055], [6938, 7056, 7055], [6938, 6939, 7057], [6938, 7057, 7056], [6939, 6940, 7057], [6940, 7058, 7057], [6940, 6941, 7059], [6940, 7059, 7058], [6941, 6942, 7059], [6942, 7060, 7059], [6942, 6943, 7061], [6942, 7061, 7060], [6943, 6944, 7061], [6944, 7062, 7061], [6944, 6945, 7063], [6944, 7063, 7062], [6945, 6946, 7063], [6946, 7064, 7063], [6946, 6947, 7065], [6946, 7065, 7064], [6947, 6948, 7065], [6948, 7066, 7065], [6948, 6949, 7067], [6948, 7067, 7066], [6949, 6950, 7067], [6950, 7068, 7067], [6950, 6951, 7069], [6950, 7069, 7068], [6951, 6952, 7069], [6952, 7070, 7069], [6952, 6953, 7071], [6952, 7071, 7070], [6953, 6954, 7071], [6954, 7072, 7071], [6954, 6955, 7073], [6954, 7073, 7072], [6955, 6956, 7073], [6956, 7074, 7073], [6956, 6957, 7075], [6956, 7075, 7074], [6957, 6958, 7075], [6958, 7076, 7075], [6958, 6959, 7077], [6958, 7077, 7076], [6959, 6960, 7077], [6960, 7078, 7077], [6960, 6961, 7079], [6960, 7079, 7078], [6961, 6962, 7079], [6962, 7080, 7079], [6962, 6963, 7081], [6962, 7081, 7080], [6963, 6964, 7081], [6964, 7082, 7081], [6964, 6965, 7083], [6964, 7083, 7082], [6965, 6966, 7083], [6966, 7084, 7083], [6966, 6967, 7085], [6966, 7085, 7084], [6967, 6968, 7085], [6968, 7086, 7085], [6968, 6969, 7087], [6968, 7087, 7086], [6969, 6970, 7087], [6970, 7088, 7087], [6970, 6971, 7089], [6970, 7089, 7088], [6971, 6972, 7089], [6972, 7090, 7089], [6972, 6973, 7091], [6972, 7091, 7090], [6973, 6974, 7091], [6974, 7092, 7091], [6974, 6975, 7093], [6974, 7093, 7092], [6975, 6976, 7093], [6976, 7094, 7093], [6976, 6977, 7095], [6976, 7095, 7094], [6977, 6978, 7095], [6978, 7096, 7095], [6978, 6979, 7097], [6978, 7097, 7096], [6979, 6980, 7097], [6980, 7098, 7097], [6980, 6981, 7099], [6980, 7099, 7098], [6981, 6982, 7099], [6982, 7100, 7099], [6982, 6983, 7101], [6982, 7101, 7100], [6983, 6984, 7101], [6984, 7102, 7101], [6984, 6985, 7103], [6984, 7103, 7102], [6985, 6986, 7103], [6986, 7104, 7103], [6986, 6987, 7105], [6986, 7105, 7104], [6987, 6988, 7105], [6988, 7106, 7105], [6988, 6989, 7107], [6988, 7107, 7106], [6989, 6990, 7107], [6990, 7108, 7107], [6990, 6991, 7109], [6990, 7109, 7108], [6991, 6992, 7109], [6992, 7110, 7109], [6992, 6993, 7111], [6992, 7111, 7110], [6993, 6994, 7111], [6994, 7112, 7111], [6994, 6995, 7113], [6994, 7113, 7112], [6995, 6996, 7113], [6996, 7114, 7113], [6996, 6997, 7115], [6996, 7115, 7114], [6997, 6998, 7115], [6998, 7116, 7115], [6998, 6999, 7117], [6998, 7117, 7116], [6999, 7000, 7117], [7000, 7118, 7117], [7000, 7001, 7119], [7000, 7119, 7118], [7001, 7002, 7119], [7002, 7120, 7119], [7002, 7003, 7121], [7002, 7121, 7120], [7003, 7004, 7121], [7004, 7122, 7121], [7004, 7005, 7123], [7004, 7123, 7122], [7005, 7006, 7123], [7006, 7124, 7123], [7006, 7007, 7125], [7006, 7125, 7124], [7007, 7008, 7125], [7008, 7126, 7125], [7008, 7009, 7127], [7008, 7127, 7126], [7009, 7010, 7127], [7010, 7128, 7127], [7011, 7012, 7130], [7011, 7130, 7129], [7012, 7013, 7130], [7013, 7131, 7130], [7013, 7014, 7132], [7013, 7132, 7131], [7014, 7015, 7132], [7015, 7133, 7132], [7015, 7016, 7134], [7015, 7134, 7133], [7016, 7017, 7134], [7017, 7135, 7134], [7017, 7018, 7136], [7017, 7136, 7135], [7018, 7019, 7136], [7019, 7137, 7136], [7019, 7020, 7138], [7019, 7138, 7137], [7020, 7021, 7138], [7021, 7139, 7138], [7021, 7022, 7140], [7021, 7140, 7139], [7022, 7023, 7140], [7023, 7141, 7140], [7023, 7024, 7142], [7023, 7142, 7141], [7024, 7025, 7142], [7025, 7143, 7142], [7025, 7026, 7144], [7025, 7144, 7143], [7026, 7027, 7144], [7027, 7145, 7144], [7027, 7028, 7146], [7027, 7146, 7145], [7028, 7029, 7146], [7029, 7147, 7146], [7030, 7031, 7148], [7031, 7149, 7148], [7031, 7032, 7150], [7031, 7150, 7149], [7032, 7033, 7150], [7033, 7151, 7150], [7033, 7034, 7152], [7033, 7152, 7151], [7034, 7035, 7152], [7035, 7153, 7152], [7035, 7036, 7154], [7035, 7154, 7153], [7036, 7037, 7154], [7037, 7155, 7154], [7037, 7038, 7156], [7037, 7156, 7155], [7038, 7039, 7156], [7039, 7157, 7156], [7039, 7040, 7158], [7039, 7158, 7157], [7040, 7041, 7158], [7041, 7159, 7158], [7041, 7042, 7160], [7041, 7160, 7159], [7042, 7043, 7160], [7043, 7161, 7160], [7043, 7044, 7162], [7043, 7162, 7161], [7044, 7045, 7162], [7045, 7163, 7162], [7045, 7046, 7164], [7045, 7164, 7163], [7046, 7047, 7164], [7047, 7165, 7164], [7047, 7048, 7166], [7047, 7166, 7165], [7048, 7049, 7166], [7049, 7167, 7166], [7049, 7050, 7168], [7049, 7168, 7167], [7050, 7051, 7168], [7051, 7169, 7168], [7051, 7052, 7170], [7051, 7170, 7169], [7052, 7053, 7170], [7053, 7171, 7170], [7053, 7054, 7172], [7053, 7172, 7171], [7054, 7055, 7172], [7055, 7173, 7172], [7055, 7056, 7174], [7055, 7174, 7173], [7056, 7057, 7174], [7057, 7175, 7174], [7057, 7058, 7176], [7057, 7176, 7175], [7058, 7059, 7176], [7059, 7177, 7176], [7059, 7060, 7178], [7059, 7178, 7177], [7060, 7061, 7178], [7061, 7179, 7178], [7061, 7062, 7180], [7061, 7180, 7179], [7062, 7063, 7180], [7063, 7181, 7180], [7063, 7064, 7182], [7063, 7182, 7181], [7064, 7065, 7182], [7065, 7183, 7182], [7065, 7066, 7184], [7065, 7184, 7183], [7066, 7067, 7184], [7067, 7185, 7184], [7067, 7068, 7186], [7067, 7186, 7185], [7068, 7069, 7186], [7069, 7187, 7186], [7069, 7070, 7188], [7069, 7188, 7187], [7070, 7071, 7188], [7071, 7189, 7188], [7071, 7072, 7190], [7071, 7190, 7189], [7072, 7073, 7190], [7073, 7191, 7190], [7073, 7074, 7192], [7073, 7192, 7191], [7074, 7075, 7192], [7075, 7193, 7192], [7075, 7076, 7194], [7075, 7194, 7193], [7076, 7077, 7194], [7077, 7195, 7194], [7077, 7078, 7196], [7077, 7196, 7195], [7078, 7079, 7196], [7079, 7197, 7196], [7079, 7080, 7198], [7079, 7198, 7197], [7080, 7081, 7198], [7081, 7199, 7198], [7081, 7082, 7200], [7081, 7200, 7199], [7082, 7083, 7200], [7083, 7201, 7200], [7083, 7084, 7202], [7083, 7202, 7201], [7084, 7085, 7202], [7085, 7203, 7202], [7085, 7086, 7204], [7085, 7204, 7203], [7086, 7087, 7204], [7087, 7205, 7204], [7087, 7088, 7206], [7087, 7206, 7205], [7088, 7089, 7206], [7089, 7207, 7206], [7089, 7090, 7208], [7089, 7208, 7207], [7090, 7091, 7208], [7091, 7209, 7208], [7091, 7092, 7210], [7091, 7210, 7209], [7092, 7093, 7210], [7093, 7211, 7210], [7093, 7094, 7212], [7093, 7212, 7211], [7094, 7095, 7212], [7095, 7213, 7212], [7095, 7096, 7214], [7095, 7214, 7213], [7096, 7097, 7214], [7097, 7215, 7214], [7097, 7098, 7216], [7097, 7216, 7215], [7098, 7099, 7216], [7099, 7217, 7216], [7099, 7100, 7218], [7099, 7218, 7217], [7100, 7101, 7218], [7101, 7219, 7218], [7101, 7102, 7220], [7101, 7220, 7219], [7102, 7103, 7220], [7103, 7221, 7220], [7103, 7104, 7222], [7103, 7222, 7221], [7104, 7105, 7222], [7105, 7223, 7222], [7105, 7106, 7224], [7105, 7224, 7223], [7106, 7107, 7224], [7107, 7225, 7224], [7107, 7108, 7226], [7107, 7226, 7225], [7108, 7109, 7226], [7109, 7227, 7226], [7109, 7110, 7228], [7109, 7228, 7227], [7110, 7111, 7228], [7111, 7229, 7228], [7111, 7112, 7230], [7111, 7230, 7229], [7112, 7113, 7230], [7113, 7231, 7230], [7113, 7114, 7232], [7113, 7232, 7231], [7114, 7115, 7232], [7115, 7233, 7232], [7115, 7116, 7234], [7115, 7234, 7233], [7116, 7117, 7234], [7117, 7235, 7234], [7117, 7118, 7236], [7117, 7236, 7235], [7118, 7119, 7236], [7119, 7237, 7236], [7119, 7120, 7238], [7119, 7238, 7237], [7120, 7121, 7238], [7121, 7239, 7238], [7121, 7122, 7240], [7121, 7240, 7239], [7122, 7123, 7240], [7123, 7241, 7240], [7123, 7124, 7242], [7123, 7242, 7241], [7124, 7125, 7242], [7125, 7243, 7242], [7125, 7126, 7244], [7125, 7244, 7243], [7126, 7127, 7244], [7127, 7245, 7244], [7127, 7128, 7246], [7127, 7246, 7245], [7129, 7130, 7247], [7130, 7248, 7247], [7130, 7131, 7249], [7130, 7249, 7248], [7131, 7132, 7249], [7132, 7250, 7249], [7132, 7133, 7251], [7132, 7251, 7250], [7133, 7134, 7251], [7134, 7252, 7251], [7134, 7135, 7253], [7134, 7253, 7252], [7135, 7136, 7253], [7136, 7254, 7253], [7136, 7137, 7255], [7136, 7255, 7254], [7137, 7138, 7255], [7138, 7256, 7255], [7138, 7139, 7257], [7138, 7257, 7256], [7139, 7140, 7257], [7140, 7258, 7257], [7140, 7141, 7259], [7140, 7259, 7258], [7141, 7142, 7259], [7142, 7260, 7259], [7142, 7143, 7261], [7142, 7261, 7260], [7143, 7144, 7261], [7144, 7262, 7261], [7144, 7145, 7263], [7144, 7263, 7262], [7145, 7146, 7263], [7146, 7264, 7263], [7146, 7147, 7265], [7146, 7265, 7264], [7148, 7149, 7267], [7148, 7267, 7266], [7149, 7150, 7267], [7150, 7268, 7267], [7150, 7151, 7269], [7150, 7269, 7268], [7151, 7152, 7269], [7152, 7270, 7269], [7152, 7153, 7271], [7152, 7271, 7270], [7153, 7154, 7271], [7154, 7272, 7271], [7154, 7155, 7273], [7154, 7273, 7272], [7155, 7156, 7273], [7156, 7274, 7273], [7156, 7157, 7275], [7156, 7275, 7274], [7157, 7158, 7275], [7158, 7276, 7275], [7158, 7159, 7277], [7158, 7277, 7276], [7159, 7160, 7277], [7160, 7278, 7277], [7160, 7161, 7279], [7160, 7279, 7278], [7161, 7162, 7279], [7162, 7280, 7279], [7162, 7163, 7281], [7162, 7281, 7280], [7163, 7164, 7281], [7164, 7282, 7281], [7164, 7165, 7283], [7164, 7283, 7282], [7165, 7166, 7283], [7166, 7284, 7283], [7166, 7167, 7285], [7166, 7285, 7284], [7167, 7168, 7285], [7168, 7286, 7285], [7168, 7169, 7287], [7168, 7287, 7286], [7169, 7170, 7287], [7170, 7288, 7287], [7170, 7171, 7289], [7170, 7289, 7288], [7171, 7172, 7289], [7172, 7290, 7289], [7172, 7173, 7291], [7172, 7291, 7290], [7173, 7174, 7291], [7174, 7292, 7291], [7174, 7175, 7293], [7174, 7293, 7292], [7175, 7176, 7293], [7176, 7294, 7293], [7176, 7177, 7295], [7176, 7295, 7294], [7177, 7178, 7295], [7178, 7296, 7295], [7178, 7179, 7297], [7178, 7297, 7296], [7179, 7180, 7297], [7180, 7298, 7297], [7180, 7181, 7299], [7180, 7299, 7298], [7181, 7182, 7299], [7182, 7300, 7299], [7182, 7183, 7301], [7182, 7301, 7300], [7183, 7184, 7301], [7184, 7302, 7301], [7184, 7185, 7303], [7184, 7303, 7302], [7185, 7186, 7303], [7186, 7304, 7303], [7186, 7187, 7305], [7186, 7305, 7304], [7187, 7188, 7305], [7188, 7306, 7305], [7188, 7189, 7307], [7188, 7307, 7306], [7189, 7190, 7307], [7190, 7308, 7307], [7190, 7191, 7309], [7190, 7309, 7308], [7191, 7192, 7309], [7192, 7310, 7309], [7192, 7193, 7311], [7192, 7311, 7310], [7193, 7194, 7311], [7194, 7312, 7311], [7194, 7195, 7313], [7194, 7313, 7312], [7195, 7196, 7313], [7196, 7314, 7313], [7196, 7197, 7315], [7196, 7315, 7314], [7197, 7198, 7315], [7198, 7316, 7315], [7198, 7199, 7317], [7198, 7317, 7316], [7199, 7200, 7317], [7200, 7318, 7317], [7200, 7201, 7319], [7200, 7319, 7318], [7201, 7202, 7319], [7202, 7320, 7319], [7202, 7203, 7321], [7202, 7321, 7320], [7203, 7204, 7321], [7204, 7322, 7321], [7204, 7205, 7323], [7204, 7323, 7322], [7205, 7206, 7323], [7206, 7324, 7323], [7206, 7207, 7325], [7206, 7325, 7324], [7207, 7208, 7325], [7208, 7326, 7325], [7208, 7209, 7327], [7208, 7327, 7326], [7209, 7210, 7327], [7210, 7328, 7327], [7210, 7211, 7329], [7210, 7329, 7328], [7211, 7212, 7329], [7212, 7330, 7329], [7212, 7213, 7331], [7212, 7331, 7330], [7213, 7214, 7331], [7214, 7332, 7331], [7214, 7215, 7333], [7214, 7333, 7332], [7215, 7216, 7333], [7216, 7334, 7333], [7216, 7217, 7335], [7216, 7335, 7334], [7217, 7218, 7335], [7218, 7336, 7335], [7218, 7219, 7337], [7218, 7337, 7336], [7219, 7220, 7337], [7220, 7338, 7337], [7220, 7221, 7339], [7220, 7339, 7338], [7221, 7222, 7339], [7222, 7340, 7339], [7222, 7223, 7341], [7222, 7341, 7340], [7223, 7224, 7341], [7224, 7342, 7341], [7224, 7225, 7343], [7224, 7343, 7342], [7225, 7226, 7343], [7226, 7344, 7343], [7226, 7227, 7345], [7226, 7345, 7344], [7227, 7228, 7345], [7228, 7346, 7345], [7228, 7229, 7347], [7228, 7347, 7346], [7229, 7230, 7347], [7230, 7348, 7347], [7230, 7231, 7349], [7230, 7349, 7348], [7231, 7232, 7349], [7232, 7350, 7349], [7232, 7233, 7351], [7232, 7351, 7350], [7233, 7234, 7351], [7234, 7352, 7351], [7234, 7235, 7353], [7234, 7353, 7352], [7235, 7236, 7353], [7236, 7354, 7353], [7236, 7237, 7355], [7236, 7355, 7354], [7237, 7238, 7355], [7238, 7356, 7355], [7238, 7239, 7357], [7238, 7357, 7356], [7239, 7240, 7357], [7240, 7358, 7357], [7240, 7241, 7359], [7240, 7359, 7358], [7241, 7242, 7359], [7242, 7360, 7359], [7242, 7243, 7361], [7242, 7361, 7360], [7243, 7244, 7361], [7244, 7362, 7361], [7244, 7245, 7363], [7244, 7363, 7362], [7245, 7246, 7363], [7246, 7364, 7363], [7247, 7248, 7366], [7247, 7366, 7365], [7248, 7249, 7366], [7249, 7367, 7366], [7249, 7250, 7368], [7249, 7368, 7367], [7250, 7251, 7368], [7251, 7369, 7368], [7251, 7252, 7370], [7251, 7370, 7369], [7252, 7253, 7370], [7253, 7371, 7370], [7253, 7254, 7372], [7253, 7372, 7371], [7254, 7255, 7372], [7255, 7373, 7372], [7255, 7256, 7374], [7255, 7374, 7373], [7256, 7257, 7374], [7257, 7375, 7374], [7257, 7258, 7376], [7257, 7376, 7375], [7258, 7259, 7376], [7259, 7377, 7376], [7259, 7260, 7378], [7259, 7378, 7377], [7260, 7261, 7378], [7261, 7379, 7378], [7261, 7262, 7380], [7261, 7380, 7379], [7262, 7263, 7380], [7263, 7381, 7380], [7263, 7264, 7382], [7263, 7382, 7381], [7264, 7265, 7382], [7265, 7383, 7382], [7266, 7267, 7384], [7267, 7385, 7384], [7267, 7268, 7386], [7267, 7386, 7385], [7268, 7269, 7386], [7269, 7387, 7386], [7269, 7270, 7388], [7269, 7388, 7387], [7270, 7271, 7388], [7271, 7389, 7388], [7271, 7272, 7390], [7271, 7390, 7389], [7272, 7273, 7390], [7273, 7391, 7390], [7273, 7274, 7392], [7273, 7392, 7391], [7274, 7275, 7392], [7275, 7393, 7392], [7275, 7276, 7394], [7275, 7394, 7393], [7276, 7277, 7394], [7277, 7395, 7394], [7277, 7278, 7396], [7277, 7396, 7395], [7278, 7279, 7396], [7279, 7397, 7396], [7279, 7280, 7398], [7279, 7398, 7397], [7280, 7281, 7398], [7281, 7399, 7398], [7281, 7282, 7400], [7281, 7400, 7399], [7282, 7283, 7400], [7283, 7401, 7400], [7283, 7284, 7402], [7283, 7402, 7401], [7284, 7285, 7402], [7285, 7403, 7402], [7285, 7286, 7404], [7285, 7404, 7403], [7286, 7287, 7404], [7287, 7405, 7404], [7287, 7288, 7406], [7287, 7406, 7405], [7288, 7289, 7406], [7289, 7407, 7406], [7289, 7290, 7408], [7289, 7408, 7407], [7290, 7291, 7408], [7291, 7409, 7408], [7291, 7292, 7410], [7291, 7410, 7409], [7292, 7293, 7410], [7293, 7411, 7410], [7293, 7294, 7412], [7293, 7412, 7411], [7294, 7295, 7412], [7295, 7413, 7412], [7295, 7296, 7414], [7295, 7414, 7413], [7296, 7297, 7414], [7297, 7415, 7414], [7297, 7298, 7416], [7297, 7416, 7415], [7298, 7299, 7416], [7299, 7417, 7416], [7299, 7300, 7418], [7299, 7418, 7417], [7300, 7301, 7418], [7301, 7419, 7418], [7301, 7302, 7420], [7301, 7420, 7419], [7302, 7303, 7420], [7303, 7421, 7420], [7303, 7304, 7422], [7303, 7422, 7421], [7304, 7305, 7422], [7305, 7423, 7422], [7305, 7306, 7424], [7305, 7424, 7423], [7306, 7307, 7424], [7307, 7425, 7424], [7307, 7308, 7426], [7307, 7426, 7425], [7308, 7309, 7426], [7309, 7427, 7426], [7309, 7310, 7428], [7309, 7428, 7427], [7310, 7311, 7428], [7311, 7429, 7428], [7311, 7312, 7430], [7311, 7430, 7429], [7312, 7313, 7430], [7313, 7431, 7430], [7313, 7314, 7432], [7313, 7432, 7431], [7314, 7315, 7432], [7315, 7433, 7432], [7315, 7316, 7434], [7315, 7434, 7433], [7316, 7317, 7434], [7317, 7435, 7434], [7317, 7318, 7436], [7317, 7436, 7435], [7318, 7319, 7436], [7319, 7437, 7436], [7319, 7320, 7438], [7319, 7438, 7437], [7320, 7321, 7438], [7321, 7439, 7438], [7321, 7322, 7440], [7321, 7440, 7439], [7322, 7323, 7440], [7323, 7441, 7440], [7323, 7324, 7442], [7323, 7442, 7441], [7324, 7325, 7442], [7325, 7443, 7442], [7325, 7326, 7444], [7325, 7444, 7443], [7326, 7327, 7444], [7327, 7445, 7444], [7327, 7328, 7446], [7327, 7446, 7445], [7328, 7329, 7446], [7329, 7447, 7446], [7329, 7330, 7448], [7329, 7448, 7447], [7330, 7331, 7448], [7331, 7449, 7448], [7331, 7332, 7450], [7331, 7450, 7449], [7332, 7333, 7450], [7333, 7451, 7450], [7333, 7334, 7452], [7333, 7452, 7451], [7334, 7335, 7452], [7335, 7453, 7452], [7335, 7336, 7454], [7335, 7454, 7453], [7336, 7337, 7454], [7337, 7455, 7454], [7337, 7338, 7456], [7337, 7456, 7455], [7338, 7339, 7456], [7339, 7457, 7456], [7339, 7340, 7458], [7339, 7458, 7457], [7340, 7341, 7458], [7341, 7459, 7458], [7341, 7342, 7460], [7341, 7460, 7459], [7342, 7343, 7460], [7343, 7461, 7460], [7343, 7344, 7462], [7343, 7462, 7461], [7344, 7345, 7462], [7345, 7463, 7462], [7345, 7346, 7464], [7345, 7464, 7463], [7346, 7347, 7464], [7347, 7465, 7464], [7347, 7348, 7466], [7347, 7466, 7465], [7348, 7349, 7466], [7349, 7467, 7466], [7349, 7350, 7468], [7349, 7468, 7467], [7350, 7351, 7468], [7351, 7469, 7468], [7351, 7352, 7470], [7351, 7470, 7469], [7352, 7353, 7470], [7353, 7471, 7470], [7353, 7354, 7472], [7353, 7472, 7471], [7354, 7355, 7472], [7355, 7473, 7472], [7355, 7356, 7474], [7355, 7474, 7473], [7356, 7357, 7474], [7357, 7475, 7474], [7357, 7358, 7476], [7357, 7476, 7475], [7358, 7359, 7476], [7359, 7477, 7476], [7359, 7360, 7478], [7359, 7478, 7477], [7360, 7361, 7478], [7361, 7479, 7478], [7361, 7362, 7480], [7361, 7480, 7479], [7362, 7363, 7480], [7363, 7481, 7480], [7363, 7364, 7482], [7363, 7482, 7481], [7365, 7366, 7483], [7366, 7484, 7483], [7366, 7367, 7485], [7366, 7485, 7484], [7367, 7368, 7485], [7368, 7486, 7485], [7368, 7369, 7487], [7368, 7487, 7486], [7369, 7370, 7487], [7370, 7488, 7487], [7370, 7371, 7489], [7370, 7489, 7488], [7371, 7372, 7489], [7372, 7490, 7489], [7372, 7373, 7491], [7372, 7491, 7490], [7373, 7374, 7491], [7374, 7492, 7491], [7374, 7375, 7493], [7374, 7493, 7492], [7375, 7376, 7493], [7376, 7494, 7493], [7376, 7377, 7495], [7376, 7495, 7494], [7377, 7378, 7495], [7378, 7496, 7495], [7378, 7379, 7497], [7378, 7497, 7496], [7379, 7380, 7497], [7380, 7498, 7497], [7380, 7381, 7499], [7380, 7499, 7498], [7381, 7382, 7499], [7382, 7500, 7499], [7382, 7383, 7501], [7382, 7501, 7500], [7384, 7385, 7503], [7384, 7503, 7502], [7385, 7386, 7503], [7386, 7504, 7503], [7386, 7387, 7505], [7386, 7505, 7504], [7387, 7388, 7505], [7388, 7506, 7505], [7388, 7389, 7507], [7388, 7507, 7506], [7389, 7390, 7507], [7390, 7508, 7507], [7390, 7391, 7509], [7390, 7509, 7508], [7391, 7392, 7509], [7392, 7510, 7509], [7392, 7393, 7511], [7392, 7511, 7510], [7393, 7394, 7511], [7394, 7512, 7511], [7394, 7395, 7513], [7394, 7513, 7512], [7395, 7396, 7513], [7396, 7514, 7513], [7396, 7397, 7515], [7396, 7515, 7514], [7397, 7398, 7515], [7398, 7516, 7515], [7398, 7399, 7517], [7398, 7517, 7516], [7399, 7400, 7517], [7400, 7518, 7517], [7400, 7401, 7519], [7400, 7519, 7518], [7401, 7402, 7519], [7402, 7520, 7519], [7402, 7403, 7521], [7402, 7521, 7520], [7403, 7404, 7521], [7404, 7522, 7521], [7404, 7405, 7523], [7404, 7523, 7522], [7405, 7406, 7523], [7406, 7524, 7523], [7406, 7407, 7525], [7406, 7525, 7524], [7407, 7408, 7525], [7408, 7526, 7525], [7408, 7409, 7527], [7408, 7527, 7526], [7409, 7410, 7527], [7410, 7528, 7527], [7410, 7411, 7529], [7410, 7529, 7528], [7411, 7412, 7529], [7412, 7530, 7529], [7412, 7413, 7531], [7412, 7531, 7530], [7413, 7414, 7531], [7414, 7532, 7531], [7414, 7415, 7533], [7414, 7533, 7532], [7415, 7416, 7533], [7416, 7534, 7533], [7416, 7417, 7535], [7416, 7535, 7534], [7417, 7418, 7535], [7418, 7536, 7535], [7418, 7419, 7537], [7418, 7537, 7536], [7419, 7420, 7537], [7420, 7538, 7537], [7420, 7421, 7539], [7420, 7539, 7538], [7421, 7422, 7539], [7422, 7540, 7539], [7422, 7423, 7541], [7422, 7541, 7540], [7423, 7424, 7541], [7424, 7542, 7541], [7424, 7425, 7543], [7424, 7543, 7542], [7425, 7426, 7543], [7426, 7544, 7543], [7426, 7427, 7545], [7426, 7545, 7544], [7427, 7428, 7545], [7428, 7546, 7545], [7428, 7429, 7547], [7428, 7547, 7546], [7429, 7430, 7547], [7430, 7548, 7547], [7430, 7431, 7549], [7430, 7549, 7548], [7431, 7432, 7549], [7432, 7550, 7549], [7432, 7433, 7551], [7432, 7551, 7550], [7433, 7434, 7551], [7434, 7552, 7551], [7434, 7435, 7553], [7434, 7553, 7552], [7435, 7436, 7553], [7436, 7554, 7553], [7436, 7437, 7555], [7436, 7555, 7554], [7437, 7438, 7555], [7438, 7556, 7555], [7438, 7439, 7557], [7438, 7557, 7556], [7439, 7440, 7557], [7440, 7558, 7557], [7440, 7441, 7559], [7440, 7559, 7558], [7441, 7442, 7559], [7442, 7560, 7559], [7442, 7443, 7561], [7442, 7561, 7560], [7443, 7444, 7561], [7444, 7562, 7561], [7444, 7445, 7563], [7444, 7563, 7562], [7445, 7446, 7563], [7446, 7564, 7563], [7446, 7447, 7565], [7446, 7565, 7564], [7447, 7448, 7565], [7448, 7566, 7565], [7448, 7449, 7567], [7448, 7567, 7566], [7449, 7450, 7567], [7450, 7568, 7567], [7450, 7451, 7569], [7450, 7569, 7568], [7451, 7452, 7569], [7452, 7570, 7569], [7452, 7453, 7571], [7452, 7571, 7570], [7453, 7454, 7571], [7454, 7572, 7571], [7454, 7455, 7573], [7454, 7573, 7572], [7455, 7456, 7573], [7456, 7574, 7573], [7456, 7457, 7575], [7456, 7575, 7574], [7457, 7458, 7575], [7458, 7576, 7575], [7458, 7459, 7577], [7458, 7577, 7576], [7459, 7460, 7577], [7460, 7578, 7577], [7460, 7461, 7579], [7460, 7579, 7578], [7461, 7462, 7579], [7462, 7580, 7579], [7462, 7463, 7581], [7462, 7581, 7580], [7463, 7464, 7581], [7464, 7582, 7581], [7464, 7465, 7583], [7464, 7583, 7582], [7465, 7466, 7583], [7466, 7584, 7583], [7466, 7467, 7585], [7466, 7585, 7584], [7467, 7468, 7585], [7468, 7586, 7585], [7468, 7469, 7587], [7468, 7587, 7586], [7469, 7470, 7587], [7470, 7588, 7587], [7470, 7471, 7589], [7470, 7589, 7588], [7471, 7472, 7589], [7472, 7590, 7589], [7472, 7473, 7591], [7472, 7591, 7590], [7473, 7474, 7591], [7474, 7592, 7591], [7474, 7475, 7593], [7474, 7593, 7592], [7475, 7476, 7593], [7476, 7594, 7593], [7476, 7477, 7595], [7476, 7595, 7594], [7477, 7478, 7595], [7478, 7596, 7595], [7478, 7479, 7597], [7478, 7597, 7596], [7479, 7480, 7597], [7480, 7598, 7597], [7480, 7481, 7599], [7480, 7599, 7598], [7481, 7482, 7599], [7482, 7600, 7599], [7483, 7484, 7602], [7483, 7602, 7601], [7484, 7485, 7602], [7485, 7603, 7602], [7485, 7486, 7604], [7485, 7604, 7603], [7486, 7487, 7604], [7487, 7605, 7604], [7487, 7488, 7606], [7487, 7606, 7605], [7488, 7489, 7606], [7489, 7607, 7606], [7489, 7490, 7608], [7489, 7608, 7607], [7490, 7491, 7608], [7491, 7609, 7608], [7491, 7492, 7610], [7491, 7610, 7609], [7492, 7493, 7610], [7493, 7611, 7610], [7493, 7494, 7612], [7493, 7612, 7611], [7494, 7495, 7612], [7495, 7613, 7612], [7495, 7496, 7614], [7495, 7614, 7613], [7496, 7497, 7614], [7497, 7615, 7614], [7497, 7498, 7616], [7497, 7616, 7615], [7498, 7499, 7616], [7499, 7617, 7616], [7499, 7500, 7618], [7499, 7618, 7617], [7500, 7501, 7618], [7501, 7619, 7618], [7502, 7503, 7620], [7503, 7621, 7620], [7503, 7504, 7622], [7503, 7622, 7621], [7504, 7505, 7622], [7505, 7623, 7622], [7505, 7506, 7624], [7505, 7624, 7623], [7506, 7507, 7624], [7507, 7625, 7624], [7507, 7508, 7626], [7507, 7626, 7625], [7508, 7509, 7626], [7509, 7627, 7626], [7509, 7510, 7628], [7509, 7628, 7627], [7510, 7511, 7628], [7511, 7629, 7628], [7511, 7512, 7630], [7511, 7630, 7629], [7512, 7513, 7630], [7513, 7631, 7630], [7513, 7514, 7632], [7513, 7632, 7631], [7514, 7515, 7632], [7515, 7633, 7632], [7515, 7516, 7634], [7515, 7634, 7633], [7516, 7517, 7634], [7517, 7635, 7634], [7517, 7518, 7636], [7517, 7636, 7635], [7518, 7519, 7636], [7519, 7637, 7636], [7519, 7520, 7638], [7519, 7638, 7637], [7520, 7521, 7638], [7521, 7639, 7638], [7521, 7522, 7640], [7521, 7640, 7639], [7522, 7523, 7640], [7523, 7641, 7640], [7523, 7524, 7642], [7523, 7642, 7641], [7524, 7525, 7642], [7525, 7643, 7642], [7525, 7526, 7644], [7525, 7644, 7643], [7526, 7527, 7644], [7527, 7645, 7644], [7527, 7528, 7646], [7527, 7646, 7645], [7528, 7529, 7646], [7529, 7647, 7646], [7529, 7530, 7648], [7529, 7648, 7647], [7530, 7531, 7648], [7531, 7649, 7648], [7531, 7532, 7650], [7531, 7650, 7649], [7532, 7533, 7650], [7533, 7651, 7650], [7533, 7534, 7652], [7533, 7652, 7651], [7534, 7535, 7652], [7535, 7653, 7652], [7535, 7536, 7654], [7535, 7654, 7653], [7536, 7537, 7654], [7537, 7655, 7654], [7537, 7538, 7656], [7537, 7656, 7655], [7538, 7539, 7656], [7539, 7657, 7656], [7539, 7540, 7658], [7539, 7658, 7657], [7540, 7541, 7658], [7541, 7659, 7658], [7541, 7542, 7660], [7541, 7660, 7659], [7542, 7543, 7660], [7543, 7661, 7660], [7543, 7544, 7662], [7543, 7662, 7661], [7544, 7545, 7662], [7545, 7663, 7662], [7545, 7546, 7664], [7545, 7664, 7663], [7546, 7547, 7664], [7547, 7665, 7664], [7547, 7548, 7666], [7547, 7666, 7665], [7548, 7549, 7666], [7549, 7667, 7666], [7549, 7550, 7668], [7549, 7668, 7667], [7550, 7551, 7668], [7551, 7669, 7668], [7551, 7552, 7670], [7551, 7670, 7669], [7552, 7553, 7670], [7553, 7671, 7670], [7553, 7554, 7672], [7553, 7672, 7671], [7554, 7555, 7672], [7555, 7673, 7672], [7555, 7556, 7674], [7555, 7674, 7673], [7556, 7557, 7674], [7557, 7675, 7674], [7557, 7558, 7676], [7557, 7676, 7675], [7558, 7559, 7676], [7559, 7677, 7676], [7559, 7560, 7678], [7559, 7678, 7677], [7560, 7561, 7678], [7561, 7679, 7678], [7561, 7562, 7680], [7561, 7680, 7679], [7562, 7563, 7680], [7563, 7681, 7680], [7563, 7564, 7682], [7563, 7682, 7681], [7564, 7565, 7682], [7565, 7683, 7682], [7565, 7566, 7684], [7565, 7684, 7683], [7566, 7567, 7684], [7567, 7685, 7684], [7567, 7568, 7686], [7567, 7686, 7685], [7568, 7569, 7686], [7569, 7687, 7686], [7569, 7570, 7688], [7569, 7688, 7687], [7570, 7571, 7688], [7571, 7689, 7688], [7571, 7572, 7690], [7571, 7690, 7689], [7572, 7573, 7690], [7573, 7691, 7690], [7573, 7574, 7692], [7573, 7692, 7691], [7574, 7575, 7692], [7575, 7693, 7692], [7575, 7576, 7694], [7575, 7694, 7693], [7576, 7577, 7694], [7577, 7695, 7694], [7577, 7578, 7696], [7577, 7696, 7695], [7578, 7579, 7696], [7579, 7697, 7696], [7579, 7580, 7698], [7579, 7698, 7697], [7580, 7581, 7698], [7581, 7699, 7698], [7581, 7582, 7700], [7581, 7700, 7699], [7582, 7583, 7700], [7583, 7701, 7700], [7583, 7584, 7702], [7583, 7702, 7701], [7584, 7585, 7702], [7585, 7703, 7702], [7585, 7586, 7704], [7585, 7704, 7703], [7586, 7587, 7704], [7587, 7705, 7704], [7587, 7588, 7706], [7587, 7706, 7705], [7588, 7589, 7706], [7589, 7707, 7706], [7589, 7590, 7708], [7589, 7708, 7707], [7590, 7591, 7708], [7591, 7709, 7708], [7591, 7592, 7710], [7591, 7710, 7709], [7592, 7593, 7710], [7593, 7711, 7710], [7593, 7594, 7712], [7593, 7712, 7711], [7594, 7595, 7712], [7595, 7713, 7712], [7595, 7596, 7714], [7595, 7714, 7713], [7596, 7597, 7714], [7597, 7715, 7714], [7597, 7598, 7716], [7597, 7716, 7715], [7598, 7599, 7716], [7599, 7717, 7716], [7599, 7600, 7718], [7599, 7718, 7717], [7601, 7602, 7719], [7602, 7720, 7719], [7602, 7603, 7721], [7602, 7721, 7720], [7603, 7604, 7721], [7604, 7722, 7721], [7604, 7605, 7723], [7604, 7723, 7722], [7605, 7606, 7723], [7606, 7724, 7723], [7606, 7607, 7725], [7606, 7725, 7724], [7607, 7608, 7725], [7608, 7726, 7725], [7608, 7609, 7727], [7608, 7727, 7726], [7609, 7610, 7727], [7610, 7728, 7727], [7610, 7611, 7729], [7610, 7729, 7728], [7611, 7612, 7729], [7612, 7730, 7729], [7612, 7613, 7731], [7612, 7731, 7730], [7613, 7614, 7731], [7614, 7732, 7731], [7614, 7615, 7733], [7614, 7733, 7732], [7615, 7616, 7733], [7616, 7734, 7733], [7616, 7617, 7735], [7616, 7735, 7734], [7617, 7618, 7735], [7618, 7736, 7735], [7618, 7619, 7737], [7618, 7737, 7736], [7620, 7621, 7739], [7620, 7739, 7738], [7621, 7622, 7739], [7622, 7740, 7739], [7622, 7623, 7741], [7622, 7741, 7740], [7623, 7624, 7741], [7624, 7742, 7741], [7624, 7625, 7743], [7624, 7743, 7742], [7625, 7626, 7743], [7626, 7744, 7743], [7626, 7627, 7745], [7626, 7745, 7744], [7627, 7628, 7745], [7628, 7746, 7745], [7628, 7629, 7747], [7628, 7747, 7746], [7629, 7630, 7747], [7630, 7748, 7747], [7630, 7631, 7749], [7630, 7749, 7748], [7631, 7632, 7749], [7632, 7750, 7749], [7632, 7633, 7751], [7632, 7751, 7750], [7633, 7634, 7751], [7634, 7752, 7751], [7634, 7635, 7753], [7634, 7753, 7752], [7635, 7636, 7753], [7636, 7754, 7753], [7636, 7637, 7755], [7636, 7755, 7754], [7637, 7638, 7755], [7638, 7756, 7755], [7638, 7639, 7757], [7638, 7757, 7756], [7639, 7640, 7757], [7640, 7758, 7757], [7640, 7641, 7759], [7640, 7759, 7758], [7641, 7642, 7759], [7642, 7760, 7759], [7642, 7643, 7761], [7642, 7761, 7760], [7643, 7644, 7761], [7644, 7762, 7761], [7644, 7645, 7763], [7644, 7763, 7762], [7645, 7646, 7763], [7646, 7764, 7763], [7646, 7647, 7765], [7646, 7765, 7764], [7647, 7648, 7765], [7648, 7766, 7765], [7648, 7649, 7767], [7648, 7767, 7766], [7649, 7650, 7767], [7650, 7768, 7767], [7650, 7651, 7769], [7650, 7769, 7768], [7651, 7652, 7769], [7652, 7770, 7769], [7652, 7653, 7771], [7652, 7771, 7770], [7653, 7654, 7771], [7654, 7772, 7771], [7654, 7655, 7773], [7654, 7773, 7772], [7655, 7656, 7773], [7656, 7774, 7773], [7656, 7657, 7775], [7656, 7775, 7774], [7657, 7658, 7775], [7658, 7776, 7775], [7658, 7659, 7777], [7658, 7777, 7776], [7659, 7660, 7777], [7660, 7778, 7777], [7660, 7661, 7779], [7660, 7779, 7778], [7661, 7662, 7779], [7662, 7780, 7779], [7662, 7663, 7781], [7662, 7781, 7780], [7663, 7664, 7781], [7664, 7782, 7781], [7664, 7665, 7783], [7664, 7783, 7782], [7665, 7666, 7783], [7666, 7784, 7783], [7666, 7667, 7785], [7666, 7785, 7784], [7667, 7668, 7785], [7668, 7786, 7785], [7668, 7669, 7787], [7668, 7787, 7786], [7669, 7670, 7787], [7670, 7788, 7787], [7670, 7671, 7789], [7670, 7789, 7788], [7671, 7672, 7789], [7672, 7790, 7789], [7672, 7673, 7791], [7672, 7791, 7790], [7673, 7674, 7791], [7674, 7792, 7791], [7674, 7675, 7793], [7674, 7793, 7792], [7675, 7676, 7793], [7676, 7794, 7793], [7676, 7677, 7795], [7676, 7795, 7794], [7677, 7678, 7795], [7678, 7796, 7795], [7678, 7679, 7797], [7678, 7797, 7796], [7679, 7680, 7797], [7680, 7798, 7797], [7680, 7681, 7799], [7680, 7799, 7798], [7681, 7682, 7799], [7682, 7800, 7799], [7682, 7683, 7801], [7682, 7801, 7800], [7683, 7684, 7801], [7684, 7802, 7801], [7684, 7685, 7803], [7684, 7803, 7802], [7685, 7686, 7803], [7686, 7804, 7803], [7686, 7687, 7805], [7686, 7805, 7804], [7687, 7688, 7805], [7688, 7806, 7805], [7688, 7689, 7807], [7688, 7807, 7806], [7689, 7690, 7807], [7690, 7808, 7807], [7690, 7691, 7809], [7690, 7809, 7808], [7691, 7692, 7809], [7692, 7810, 7809], [7692, 7693, 7811], [7692, 7811, 7810], [7693, 7694, 7811], [7694, 7812, 7811], [7694, 7695, 7813], [7694, 7813, 7812], [7695, 7696, 7813], [7696, 7814, 7813], [7696, 7697, 7815], [7696, 7815, 7814], [7697, 7698, 7815], [7698, 7816, 7815], [7698, 7699, 7817], [7698, 7817, 7816], [7699, 7700, 7817], [7700, 7818, 7817], [7700, 7701, 7819], [7700, 7819, 7818], [7701, 7702, 7819], [7702, 7820, 7819], [7702, 7703, 7821], [7702, 7821, 7820], [7703, 7704, 7821], [7704, 7822, 7821], [7704, 7705, 7823], [7704, 7823, 7822], [7705, 7706, 7823], [7706, 7824, 7823], [7706, 7707, 7825], [7706, 7825, 7824], [7707, 7708, 7825], [7708, 7826, 7825], [7708, 7709, 7827], [7708, 7827, 7826], [7709, 7710, 7827], [7710, 7828, 7827], [7710, 7711, 7829], [7710, 7829, 7828], [7711, 7712, 7829], [7712, 7830, 7829], [7712, 7713, 7831], [7712, 7831, 7830], [7713, 7714, 7831], [7714, 7832, 7831], [7714, 7715, 7833], [7714, 7833, 7832], [7715, 7716, 7833], [7716, 7834, 7833], [7716, 7717, 7835], [7716, 7835, 7834], [7717, 7718, 7835], [7718, 7836, 7835], [7719, 7720, 7838], [7719, 7838, 7837], [7720, 7721, 7838], [7721, 7839, 7838], [7721, 7722, 7840], [7721, 7840, 7839], [7722, 7723, 7840], [7723, 7841, 7840], [7723, 7724, 7842], [7723, 7842, 7841], [7724, 7725, 7842], [7725, 7843, 7842], [7725, 7726, 7844], [7725, 7844, 7843], [7726, 7727, 7844], [7727, 7845, 7844], [7727, 7728, 7846], [7727, 7846, 7845], [7728, 7729, 7846], [7729, 7847, 7846], [7729, 7730, 7848], [7729, 7848, 7847], [7730, 7731, 7848], [7731, 7849, 7848], [7731, 7732, 7850], [7731, 7850, 7849], [7732, 7733, 7850], [7733, 7851, 7850], [7733, 7734, 7852], [7733, 7852, 7851], [7734, 7735, 7852], [7735, 7853, 7852], [7735, 7736, 7854], [7735, 7854, 7853], [7736, 7737, 7854], [7737, 7855, 7854], [7738, 7739, 7856], [7739, 7857, 7856], [7739, 7740, 7858], [7739, 7858, 7857], [7740, 7741, 7858], [7741, 7859, 7858], [7741, 7742, 7860], [7741, 7860, 7859], [7742, 7743, 7860], [7743, 7861, 7860], [7743, 7744, 7862], [7743, 7862, 7861], [7744, 7745, 7862], [7745, 7863, 7862], [7745, 7746, 7864], [7745, 7864, 7863], [7746, 7747, 7864], [7747, 7865, 7864], [7747, 7748, 7866], [7747, 7866, 7865], [7748, 7749, 7866], [7749, 7867, 7866], [7749, 7750, 7868], [7749, 7868, 7867], [7750, 7751, 7868], [7751, 7869, 7868], [7751, 7752, 7870], [7751, 7870, 7869], [7752, 7753, 7870], [7753, 7871, 7870], [7753, 7754, 7872], [7753, 7872, 7871], [7754, 7755, 7872], [7755, 7873, 7872], [7755, 7756, 7874], [7755, 7874, 7873], [7756, 7757, 7874], [7757, 7875, 7874], [7757, 7758, 7876], [7757, 7876, 7875], [7758, 7759, 7876], [7759, 7877, 7876], [7759, 7760, 7878], [7759, 7878, 7877], [7760, 7761, 7878], [7761, 7879, 7878], [7761, 7762, 7880], [7761, 7880, 7879], [7762, 7763, 7880], [7763, 7881, 7880], [7763, 7764, 7882], [7763, 7882, 7881], [7764, 7765, 7882], [7765, 7883, 7882], [7765, 7766, 7884], [7765, 7884, 7883], [7766, 7767, 7884], [7767, 7885, 7884], [7767, 7768, 7886], [7767, 7886, 7885], [7768, 7769, 7886], [7769, 7887, 7886], [7769, 7770, 7888], [7769, 7888, 7887], [7770, 7771, 7888], [7771, 7889, 7888], [7771, 7772, 7890], [7771, 7890, 7889], [7772, 7773, 7890], [7773, 7891, 7890], [7773, 7774, 7892], [7773, 7892, 7891], [7774, 7775, 7892], [7775, 7893, 7892], [7775, 7776, 7894], [7775, 7894, 7893], [7776, 7777, 7894], [7777, 7895, 7894], [7777, 7778, 7896], [7777, 7896, 7895], [7778, 7779, 7896], [7779, 7897, 7896], [7779, 7780, 7898], [7779, 7898, 7897], [7780, 7781, 7898], [7781, 7899, 7898], [7781, 7782, 7900], [7781, 7900, 7899], [7782, 7783, 7900], [7783, 7901, 7900], [7783, 7784, 7902], [7783, 7902, 7901], [7784, 7785, 7902], [7785, 7903, 7902], [7785, 7786, 7904], [7785, 7904, 7903], [7786, 7787, 7904], [7787, 7905, 7904], [7787, 7788, 7906], [7787, 7906, 7905], [7788, 7789, 7906], [7789, 7907, 7906], [7789, 7790, 7908], [7789, 7908, 7907], [7790, 7791, 7908], [7791, 7909, 7908], [7791, 7792, 7910], [7791, 7910, 7909], [7792, 7793, 7910], [7793, 7911, 7910], [7793, 7794, 7912], [7793, 7912, 7911], [7794, 7795, 7912], [7795, 7913, 7912], [7795, 7796, 7914], [7795, 7914, 7913], [7796, 7797, 7914], [7797, 7915, 7914], [7797, 7798, 7916], [7797, 7916, 7915], [7798, 7799, 7916], [7799, 7917, 7916], [7799, 7800, 7918], [7799, 7918, 7917], [7800, 7801, 7918], [7801, 7919, 7918], [7801, 7802, 7920], [7801, 7920, 7919], [7802, 7803, 7920], [7803, 7921, 7920], [7803, 7804, 7922], [7803, 7922, 7921], [7804, 7805, 7922], [7805, 7923, 7922], [7805, 7806, 7924], [7805, 7924, 7923], [7806, 7807, 7924], [7807, 7925, 7924], [7807, 7808, 7926], [7807, 7926, 7925], [7808, 7809, 7926], [7809, 7927, 7926], [7809, 7810, 7928], [7809, 7928, 7927], [7810, 7811, 7928], [7811, 7929, 7928], [7811, 7812, 7930], [7811, 7930, 7929], [7812, 7813, 7930], [7813, 7931, 7930], [7813, 7814, 7932], [7813, 7932, 7931], [7814, 7815, 7932], [7815, 7933, 7932], [7815, 7816, 7934], [7815, 7934, 7933], [7816, 7817, 7934], [7817, 7935, 7934], [7817, 7818, 7936], [7817, 7936, 7935], [7818, 7819, 7936], [7819, 7937, 7936], [7819, 7820, 7938], [7819, 7938, 7937], [7820, 7821, 7938], [7821, 7939, 7938], [7821, 7822, 7940], [7821, 7940, 7939], [7822, 7823, 7940], [7823, 7941, 7940], [7823, 7824, 7942], [7823, 7942, 7941], [7824, 7825, 7942], [7825, 7943, 7942], [7825, 7826, 7944], [7825, 7944, 7943], [7826, 7827, 7944], [7827, 7945, 7944], [7827, 7828, 7946], [7827, 7946, 7945], [7828, 7829, 7946], [7829, 7947, 7946], [7829, 7830, 7948], [7829, 7948, 7947], [7830, 7831, 7948], [7831, 7949, 7948], [7831, 7832, 7950], [7831, 7950, 7949], [7832, 7833, 7950], [7833, 7951, 7950], [7833, 7834, 7952], [7833, 7952, 7951], [7834, 7835, 7952], [7835, 7953, 7952], [7835, 7836, 7954], [7835, 7954, 7953], [7837, 7838, 7955], [7838, 7956, 7955], [7838, 7839, 7957], [7838, 7957, 7956], [7839, 7840, 7957], [7840, 7958, 7957], [7840, 7841, 7959], [7840, 7959, 7958], [7841, 7842, 7959], [7842, 7960, 7959], [7842, 7843, 7961], [7842, 7961, 7960], [7843, 7844, 7961], [7844, 7962, 7961], [7844, 7845, 7963], [7844, 7963, 7962], [7845, 7846, 7963], [7846, 7964, 7963], [7846, 7847, 7965], [7846, 7965, 7964], [7847, 7848, 7965], [7848, 7966, 7965], [7848, 7849, 7967], [7848, 7967, 7966], [7849, 7850, 7967], [7850, 7968, 7967], [7850, 7851, 7969], [7850, 7969, 7968], [7851, 7852, 7969], [7852, 7970, 7969], [7852, 7853, 7971], [7852, 7971, 7970], [7853, 7854, 7971], [7854, 7972, 7971], [7854, 7855, 7973], [7854, 7973, 7972], [7856, 7857, 7975], [7856, 7975, 7974], [7857, 7858, 7975], [7858, 7976, 7975], [7858, 7859, 7977], [7858, 7977, 7976], [7859, 7860, 7977], [7860, 7978, 7977], [7860, 7861, 7979], [7860, 7979, 7978], [7861, 7862, 7979], [7862, 7980, 7979], [7862, 7863, 7981], [7862, 7981, 7980], [7863, 7864, 7981], [7864, 7982, 7981], [7864, 7865, 7983], [7864, 7983, 7982], [7865, 7866, 7983], [7866, 7984, 7983], [7866, 7867, 7985], [7866, 7985, 7984], [7867, 7868, 7985], [7868, 7986, 7985], [7868, 7869, 7987], [7868, 7987, 7986], [7869, 7870, 7987], [7870, 7988, 7987], [7870, 7871, 7989], [7870, 7989, 7988], [7871, 7872, 7989], [7872, 7990, 7989], [7872, 7873, 7991], [7872, 7991, 7990], [7873, 7874, 7991], [7874, 7992, 7991], [7874, 7875, 7993], [7874, 7993, 7992], [7875, 7876, 7993], [7876, 7994, 7993], [7876, 7877, 7995], [7876, 7995, 7994], [7877, 7878, 7995], [7878, 7996, 7995], [7878, 7879, 7997], [7878, 7997, 7996], [7879, 7880, 7997], [7880, 7998, 7997], [7880, 7881, 7999], [7880, 7999, 7998], [7881, 7882, 7999], [7882, 8000, 7999], [7882, 7883, 8001], [7882, 8001, 8000], [7883, 7884, 8001], [7884, 8002, 8001], [7884, 7885, 8003], [7884, 8003, 8002], [7885, 7886, 8003], [7886, 8004, 8003], [7886, 7887, 8005], [7886, 8005, 8004], [7887, 7888, 8005], [7888, 8006, 8005], [7888, 7889, 8007], [7888, 8007, 8006], [7889, 7890, 8007], [7890, 8008, 8007], [7890, 7891, 8009], [7890, 8009, 8008], [7891, 7892, 8009], [7892, 8010, 8009], [7892, 7893, 8011], [7892, 8011, 8010], [7893, 7894, 8011], [7894, 8012, 8011], [7894, 7895, 8013], [7894, 8013, 8012], [7895, 7896, 8013], [7896, 8014, 8013], [7896, 7897, 8015], [7896, 8015, 8014], [7897, 7898, 8015], [7898, 8016, 8015], [7898, 7899, 8017], [7898, 8017, 8016], [7899, 7900, 8017], [7900, 8018, 8017], [7900, 7901, 8019], [7900, 8019, 8018], [7901, 7902, 8019], [7902, 8020, 8019], [7902, 7903, 8021], [7902, 8021, 8020], [7903, 7904, 8021], [7904, 8022, 8021], [7904, 7905, 8023], [7904, 8023, 8022], [7905, 7906, 8023], [7906, 8024, 8023], [7906, 7907, 8025], [7906, 8025, 8024], [7907, 7908, 8025], [7908, 8026, 8025], [7908, 7909, 8027], [7908, 8027, 8026], [7909, 7910, 8027], [7910, 8028, 8027], [7910, 7911, 8029], [7910, 8029, 8028], [7911, 7912, 8029], [7912, 8030, 8029], [7912, 7913, 8031], [7912, 8031, 8030], [7913, 7914, 8031], [7914, 8032, 8031], [7914, 7915, 8033], [7914, 8033, 8032], [7915, 7916, 8033], [7916, 8034, 8033], [7916, 7917, 8035], [7916, 8035, 8034], [7917, 7918, 8035], [7918, 8036, 8035], [7918, 7919, 8037], [7918, 8037, 8036], [7919, 7920, 8037], [7920, 8038, 8037], [7920, 7921, 8039], [7920, 8039, 8038], [7921, 7922, 8039], [7922, 8040, 8039], [7922, 7923, 8041], [7922, 8041, 8040], [7923, 7924, 8041], [7924, 8042, 8041], [7924, 7925, 8043], [7924, 8043, 8042], [7925, 7926, 8043], [7926, 8044, 8043], [7926, 7927, 8045], [7926, 8045, 8044], [7927, 7928, 8045], [7928, 8046, 8045], [7928, 7929, 8047], [7928, 8047, 8046], [7929, 7930, 8047], [7930, 8048, 8047], [7930, 7931, 8049], [7930, 8049, 8048], [7931, 7932, 8049], [7932, 8050, 8049], [7932, 7933, 8051], [7932, 8051, 8050], [7933, 7934, 8051], [7934, 8052, 8051], [7934, 7935, 8053], [7934, 8053, 8052], [7935, 7936, 8053], [7936, 8054, 8053], [7936, 7937, 8055], [7936, 8055, 8054], [7937, 7938, 8055], [7938, 8056, 8055], [7938, 7939, 8057], [7938, 8057, 8056], [7939, 7940, 8057], [7940, 8058, 8057], [7940, 7941, 8059], [7940, 8059, 8058], [7941, 7942, 8059], [7942, 8060, 8059], [7942, 7943, 8061], [7942, 8061, 8060], [7943, 7944, 8061], [7944, 8062, 8061], [7944, 7945, 8063], [7944, 8063, 8062], [7945, 7946, 8063], [7946, 8064, 8063], [7946, 7947, 8065], [7946, 8065, 8064], [7947, 7948, 8065], [7948, 8066, 8065], [7948, 7949, 8067], [7948, 8067, 8066], [7949, 7950, 8067], [7950, 8068, 8067], [7950, 7951, 8069], [7950, 8069, 8068], [7951, 7952, 8069], [7952, 8070, 8069], [7952, 7953, 8071], [7952, 8071, 8070], [7953, 7954, 8071], [7954, 8072, 8071], [7955, 7956, 8074], [7955, 8074, 8073], [7956, 7957, 8074], [7957, 8075, 8074], [7957, 7958, 8076], [7957, 8076, 8075], [7958, 7959, 8076], [7959, 8077, 8076], [7959, 7960, 8078], [7959, 8078, 8077], [7960, 7961, 8078], [7961, 8079, 8078], [7961, 7962, 8080], [7961, 8080, 8079], [7962, 7963, 8080], [7963, 8081, 8080], [7963, 7964, 8082], [7963, 8082, 8081], [7964, 7965, 8082], [7965, 8083, 8082], [7965, 7966, 8084], [7965, 8084, 8083], [7966, 7967, 8084], [7967, 8085, 8084], [7967, 7968, 8086], [7967, 8086, 8085], [7968, 7969, 8086], [7969, 8087, 8086], [7969, 7970, 8088], [7969, 8088, 8087], [7970, 7971, 8088], [7971, 8089, 8088], [7971, 7972, 8090], [7971, 8090, 8089], [7972, 7973, 8090], [7973, 8091, 8090], [7974, 7975, 8092], [7975, 8093, 8092], [7975, 7976, 8094], [7975, 8094, 8093], [7976, 7977, 8094], [7977, 8095, 8094], [7977, 7978, 8096], [7977, 8096, 8095], [7978, 7979, 8096], [7979, 8097, 8096], [7979, 7980, 8098], [7979, 8098, 8097], [7980, 7981, 8098], [7981, 8099, 8098], [7981, 7982, 8100], [7981, 8100, 8099], [7982, 7983, 8100], [7983, 8101, 8100], [7983, 7984, 8102], [7983, 8102, 8101], [7984, 7985, 8102], [7985, 8103, 8102], [7985, 7986, 8104], [7985, 8104, 8103], [7986, 7987, 8104], [7987, 8105, 8104], [7987, 7988, 8106], [7987, 8106, 8105], [7988, 7989, 8106], [7989, 8107, 8106], [7989, 7990, 8108], [7989, 8108, 8107], [7990, 7991, 8108], [7991, 8109, 8108], [7991, 7992, 8110], [7991, 8110, 8109], [7992, 7993, 8110], [7993, 8111, 8110], [7993, 7994, 8112], [7993, 8112, 8111], [7994, 7995, 8112], [7995, 8113, 8112], [7995, 7996, 8114], [7995, 8114, 8113], [7996, 7997, 8114], [7997, 8115, 8114], [7997, 7998, 8116], [7997, 8116, 8115], [7998, 7999, 8116], [7999, 8117, 8116], [7999, 8000, 8118], [7999, 8118, 8117], [8000, 8001, 8118], [8001, 8119, 8118], [8001, 8002, 8120], [8001, 8120, 8119], [8002, 8003, 8120], [8003, 8121, 8120], [8003, 8004, 8122], [8003, 8122, 8121], [8004, 8005, 8122], [8005, 8123, 8122], [8005, 8006, 8124], [8005, 8124, 8123], [8006, 8007, 8124], [8007, 8125, 8124], [8007, 8008, 8126], [8007, 8126, 8125], [8008, 8009, 8126], [8009, 8127, 8126], [8009, 8010, 8128], [8009, 8128, 8127], [8010, 8011, 8128], [8011, 8129, 8128], [8011, 8012, 8130], [8011, 8130, 8129], [8012, 8013, 8130], [8013, 8131, 8130], [8013, 8014, 8132], [8013, 8132, 8131], [8014, 8015, 8132], [8015, 8133, 8132], [8015, 8016, 8134], [8015, 8134, 8133], [8016, 8017, 8134], [8017, 8135, 8134], [8017, 8018, 8136], [8017, 8136, 8135], [8018, 8019, 8136], [8019, 8137, 8136], [8019, 8020, 8138], [8019, 8138, 8137], [8020, 8021, 8138], [8021, 8139, 8138], [8021, 8022, 8140], [8021, 8140, 8139], [8022, 8023, 8140], [8023, 8141, 8140], [8023, 8024, 8142], [8023, 8142, 8141], [8024, 8025, 8142], [8025, 8143, 8142], [8025, 8026, 8144], [8025, 8144, 8143], [8026, 8027, 8144], [8027, 8145, 8144], [8027, 8028, 8146], [8027, 8146, 8145], [8028, 8029, 8146], [8029, 8147, 8146], [8029, 8030, 8148], [8029, 8148, 8147], [8030, 8031, 8148], [8031, 8149, 8148], [8031, 8032, 8150], [8031, 8150, 8149], [8032, 8033, 8150], [8033, 8151, 8150], [8033, 8034, 8152], [8033, 8152, 8151], [8034, 8035, 8152], [8035, 8153, 8152], [8035, 8036, 8154], [8035, 8154, 8153], [8036, 8037, 8154], [8037, 8155, 8154], [8037, 8038, 8156], [8037, 8156, 8155], [8038, 8039, 8156], [8039, 8157, 8156], [8039, 8040, 8158], [8039, 8158, 8157], [8040, 8041, 8158], [8041, 8159, 8158], [8041, 8042, 8160], [8041, 8160, 8159], [8042, 8043, 8160], [8043, 8161, 8160], [8043, 8044, 8162], [8043, 8162, 8161], [8044, 8045, 8162], [8045, 8163, 8162], [8045, 8046, 8164], [8045, 8164, 8163], [8046, 8047, 8164], [8047, 8165, 8164], [8047, 8048, 8166], [8047, 8166, 8165], [8048, 8049, 8166], [8049, 8167, 8166], [8049, 8050, 8168], [8049, 8168, 8167], [8050, 8051, 8168], [8051, 8169, 8168], [8051, 8052, 8170], [8051, 8170, 8169], [8052, 8053, 8170], [8053, 8171, 8170], [8053, 8054, 8172], [8053, 8172, 8171], [8054, 8055, 8172], [8055, 8173, 8172], [8055, 8056, 8174], [8055, 8174, 8173], [8056, 8057, 8174], [8057, 8175, 8174], [8057, 8058, 8176], [8057, 8176, 8175], [8058, 8059, 8176], [8059, 8177, 8176], [8059, 8060, 8178], [8059, 8178, 8177], [8060, 8061, 8178], [8061, 8179, 8178], [8061, 8062, 8180], [8061, 8180, 8179], [8062, 8063, 8180], [8063, 8181, 8180], [8063, 8064, 8182], [8063, 8182, 8181], [8064, 8065, 8182], [8065, 8183, 8182], [8065, 8066, 8184], [8065, 8184, 8183], [8066, 8067, 8184], [8067, 8185, 8184], [8067, 8068, 8186], [8067, 8186, 8185], [8068, 8069, 8186], [8069, 8187, 8186], [8069, 8070, 8188], [8069, 8188, 8187], [8070, 8071, 8188], [8071, 8189, 8188], [8071, 8072, 8190], [8071, 8190, 8189], [8073, 8074, 8191], [8074, 8192, 8191], [8074, 8075, 8193], [8074, 8193, 8192], [8075, 8076, 8193], [8076, 8194, 8193], [8076, 8077, 8195], [8076, 8195, 8194], [8077, 8078, 8195], [8078, 8196, 8195], [8078, 8079, 8197], [8078, 8197, 8196], [8079, 8080, 8197], [8080, 8198, 8197], [8080, 8081, 8199], [8080, 8199, 8198], [8081, 8082, 8199], [8082, 8200, 8199], [8082, 8083, 8201], [8082, 8201, 8200], [8083, 8084, 8201], [8084, 8202, 8201], [8084, 8085, 8203], [8084, 8203, 8202], [8085, 8086, 8203], [8086, 8204, 8203], [8086, 8087, 8205], [8086, 8205, 8204], [8087, 8088, 8205], [8088, 8206, 8205], [8088, 8089, 8207], [8088, 8207, 8206], [8089, 8090, 8207], [8090, 8208, 8207], [8090, 8091, 8209], [8090, 8209, 8208], [8092, 8093, 8211], [8092, 8211, 8210], [8093, 8094, 8211], [8094, 8212, 8211], [8094, 8095, 8213], [8094, 8213, 8212], [8095, 8096, 8213], [8096, 8214, 8213], [8096, 8097, 8215], [8096, 8215, 8214], [8097, 8098, 8215], [8098, 8216, 8215], [8098, 8099, 8217], [8098, 8217, 8216], [8099, 8100, 8217], [8100, 8218, 8217], [8100, 8101, 8219], [8100, 8219, 8218], [8101, 8102, 8219], [8102, 8220, 8219], [8102, 8103, 8221], [8102, 8221, 8220], [8103, 8104, 8221], [8104, 8222, 8221], [8104, 8105, 8223], [8104, 8223, 8222], [8105, 8106, 8223], [8106, 8224, 8223], [8106, 8107, 8225], [8106, 8225, 8224], [8107, 8108, 8225], [8108, 8226, 8225], [8108, 8109, 8227], [8108, 8227, 8226], [8109, 8110, 8227], [8110, 8228, 8227], [8110, 8111, 8229], [8110, 8229, 8228], [8111, 8112, 8229], [8112, 8230, 8229], [8112, 8113, 8231], [8112, 8231, 8230], [8113, 8114, 8231], [8114, 8232, 8231], [8114, 8115, 8233], [8114, 8233, 8232], [8115, 8116, 8233], [8116, 8234, 8233], [8116, 8117, 8235], [8116, 8235, 8234], [8117, 8118, 8235], [8118, 8236, 8235], [8118, 8119, 8237], [8118, 8237, 8236], [8119, 8120, 8237], [8120, 8238, 8237], [8120, 8121, 8239], [8120, 8239, 8238], [8121, 8122, 8239], [8122, 8240, 8239], [8122, 8123, 8241], [8122, 8241, 8240], [8123, 8124, 8241], [8124, 8242, 8241], [8124, 8125, 8243], [8124, 8243, 8242], [8125, 8126, 8243], [8126, 8244, 8243], [8126, 8127, 8245], [8126, 8245, 8244], [8127, 8128, 8245], [8128, 8246, 8245], [8128, 8129, 8247], [8128, 8247, 8246], [8129, 8130, 8247], [8130, 8248, 8247], [8130, 8131, 8249], [8130, 8249, 8248], [8131, 8132, 8249], [8132, 8250, 8249], [8132, 8133, 8251], [8132, 8251, 8250], [8133, 8134, 8251], [8134, 8252, 8251], [8134, 8135, 8253], [8134, 8253, 8252], [8135, 8136, 8253], [8136, 8254, 8253], [8136, 8137, 8255], [8136, 8255, 8254], [8137, 8138, 8255], [8138, 8256, 8255], [8138, 8139, 8257], [8138, 8257, 8256], [8139, 8140, 8257], [8140, 8258, 8257], [8140, 8141, 8259], [8140, 8259, 8258], [8141, 8142, 8259], [8142, 8260, 8259], [8142, 8143, 8261], [8142, 8261, 8260], [8143, 8144, 8261], [8144, 8262, 8261], [8144, 8145, 8263], [8144, 8263, 8262], [8145, 8146, 8263], [8146, 8264, 8263], [8146, 8147, 8265], [8146, 8265, 8264], [8147, 8148, 8265], [8148, 8266, 8265], [8148, 8149, 8267], [8148, 8267, 8266], [8149, 8150, 8267], [8150, 8268, 8267], [8150, 8151, 8269], [8150, 8269, 8268], [8151, 8152, 8269], [8152, 8270, 8269], [8152, 8153, 8271], [8152, 8271, 8270], [8153, 8154, 8271], [8154, 8272, 8271], [8154, 8155, 8273], [8154, 8273, 8272], [8155, 8156, 8273], [8156, 8274, 8273], [8156, 8157, 8275], [8156, 8275, 8274], [8157, 8158, 8275], [8158, 8276, 8275], [8158, 8159, 8277], [8158, 8277, 8276], [8159, 8160, 8277], [8160, 8278, 8277], [8160, 8161, 8279], [8160, 8279, 8278], [8161, 8162, 8279], [8162, 8280, 8279], [8162, 8163, 8281], [8162, 8281, 8280], [8163, 8164, 8281], [8164, 8282, 8281], [8164, 8165, 8283], [8164, 8283, 8282], [8165, 8166, 8283], [8166, 8284, 8283], [8166, 8167, 8285], [8166, 8285, 8284], [8167, 8168, 8285], [8168, 8286, 8285], [8168, 8169, 8287], [8168, 8287, 8286], [8169, 8170, 8287], [8170, 8288, 8287], [8170, 8171, 8289], [8170, 8289, 8288], [8171, 8172, 8289], [8172, 8290, 8289], [8172, 8173, 8291], [8172, 8291, 8290], [8173, 8174, 8291], [8174, 8292, 8291], [8174, 8175, 8293], [8174, 8293, 8292], [8175, 8176, 8293], [8176, 8294, 8293], [8176, 8177, 8295], [8176, 8295, 8294], [8177, 8178, 8295], [8178, 8296, 8295], [8178, 8179, 8297], [8178, 8297, 8296], [8179, 8180, 8297], [8180, 8298, 8297], [8180, 8181, 8299], [8180, 8299, 8298], [8181, 8182, 8299], [8182, 8300, 8299], [8182, 8183, 8301], [8182, 8301, 8300], [8183, 8184, 8301], [8184, 8302, 8301], [8184, 8185, 8303], [8184, 8303, 8302], [8185, 8186, 8303], [8186, 8304, 8303], [8186, 8187, 8305], [8186, 8305, 8304], [8187, 8188, 8305], [8188, 8306, 8305], [8188, 8189, 8307], [8188, 8307, 8306], [8189, 8190, 8307], [8190, 8308, 8307], [8191, 8192, 8310], [8191, 8310, 8309], [8192, 8193, 8310], [8193, 8311, 8310], [8193, 8194, 8312], [8193, 8312, 8311], [8194, 8195, 8312], [8195, 8313, 8312], [8195, 8196, 8314], [8195, 8314, 8313], [8196, 8197, 8314], [8197, 8315, 8314], [8197, 8198, 8316], [8197, 8316, 8315], [8198, 8199, 8316], [8199, 8317, 8316], [8199, 8200, 8318], [8199, 8318, 8317], [8200, 8201, 8318], [8201, 8319, 8318], [8201, 8202, 8320], [8201, 8320, 8319], [8202, 8203, 8320], [8203, 8321, 8320], [8203, 8204, 8322], [8203, 8322, 8321], [8204, 8205, 8322], [8205, 8323, 8322], [8205, 8206, 8324], [8205, 8324, 8323], [8206, 8207, 8324], [8207, 8325, 8324], [8207, 8208, 8326], [8207, 8326, 8325], [8208, 8209, 8326], [8209, 8327, 8326], [8210, 8211, 8328], [8211, 8329, 8328], [8211, 8212, 8330], [8211, 8330, 8329], [8212, 8213, 8330], [8213, 8331, 8330], [8213, 8214, 8332], [8213, 8332, 8331], [8214, 8215, 8332], [8215, 8333, 8332], [8215, 8216, 8334], [8215, 8334, 8333], [8216, 8217, 8334], [8217, 8335, 8334], [8217, 8218, 8336], [8217, 8336, 8335], [8218, 8219, 8336], [8219, 8337, 8336], [8219, 8220, 8338], [8219, 8338, 8337], [8220, 8221, 8338], [8221, 8339, 8338], [8221, 8222, 8340], [8221, 8340, 8339], [8222, 8223, 8340], [8223, 8341, 8340], [8223, 8224, 8342], [8223, 8342, 8341], [8224, 8225, 8342], [8225, 8343, 8342], [8225, 8226, 8344], [8225, 8344, 8343], [8226, 8227, 8344], [8227, 8345, 8344], [8227, 8228, 8346], [8227, 8346, 8345], [8228, 8229, 8346], [8229, 8347, 8346], [8229, 8230, 8348], [8229, 8348, 8347], [8230, 8231, 8348], [8231, 8349, 8348], [8231, 8232, 8350], [8231, 8350, 8349], [8232, 8233, 8350], [8233, 8351, 8350], [8233, 8234, 8352], [8233, 8352, 8351], [8234, 8235, 8352], [8235, 8353, 8352], [8235, 8236, 8354], [8235, 8354, 8353], [8236, 8237, 8354], [8237, 8355, 8354], [8237, 8238, 8356], [8237, 8356, 8355], [8238, 8239, 8356], [8239, 8357, 8356], [8239, 8240, 8358], [8239, 8358, 8357], [8240, 8241, 8358], [8241, 8359, 8358], [8241, 8242, 8360], [8241, 8360, 8359], [8242, 8243, 8360], [8243, 8361, 8360], [8243, 8244, 8362], [8243, 8362, 8361], [8244, 8245, 8362], [8245, 8363, 8362], [8245, 8246, 8364], [8245, 8364, 8363], [8246, 8247, 8364], [8247, 8365, 8364], [8247, 8248, 8366], [8247, 8366, 8365], [8248, 8249, 8366], [8249, 8367, 8366], [8249, 8250, 8368], [8249, 8368, 8367], [8250, 8251, 8368], [8251, 8369, 8368], [8251, 8252, 8370], [8251, 8370, 8369], [8252, 8253, 8370], [8253, 8371, 8370], [8253, 8254, 8372], [8253, 8372, 8371], [8254, 8255, 8372], [8255, 8373, 8372], [8255, 8256, 8374], [8255, 8374, 8373], [8256, 8257, 8374], [8257, 8375, 8374], [8257, 8258, 8376], [8257, 8376, 8375], [8258, 8259, 8376], [8259, 8377, 8376], [8259, 8260, 8378], [8259, 8378, 8377], [8260, 8261, 8378], [8261, 8379, 8378], [8261, 8262, 8380], [8261, 8380, 8379], [8262, 8263, 8380], [8263, 8381, 8380], [8263, 8264, 8382], [8263, 8382, 8381], [8264, 8265, 8382], [8265, 8383, 8382], [8265, 8266, 8384], [8265, 8384, 8383], [8266, 8267, 8384], [8267, 8385, 8384], [8267, 8268, 8386], [8267, 8386, 8385], [8268, 8269, 8386], [8269, 8387, 8386], [8269, 8270, 8388], [8269, 8388, 8387], [8270, 8271, 8388], [8271, 8389, 8388], [8271, 8272, 8390], [8271, 8390, 8389], [8272, 8273, 8390], [8273, 8391, 8390], [8273, 8274, 8392], [8273, 8392, 8391], [8274, 8275, 8392], [8275, 8393, 8392], [8275, 8276, 8394], [8275, 8394, 8393], [8276, 8277, 8394], [8277, 8395, 8394], [8277, 8278, 8396], [8277, 8396, 8395], [8278, 8279, 8396], [8279, 8397, 8396], [8279, 8280, 8398], [8279, 8398, 8397], [8280, 8281, 8398], [8281, 8399, 8398], [8281, 8282, 8400], [8281, 8400, 8399], [8282, 8283, 8400], [8283, 8401, 8400], [8283, 8284, 8402], [8283, 8402, 8401], [8284, 8285, 8402], [8285, 8403, 8402], [8285, 8286, 8404], [8285, 8404, 8403], [8286, 8287, 8404], [8287, 8405, 8404], [8287, 8288, 8406], [8287, 8406, 8405], [8288, 8289, 8406], [8289, 8407, 8406], [8289, 8290, 8408], [8289, 8408, 8407], [8290, 8291, 8408], [8291, 8409, 8408], [8291, 8292, 8410], [8291, 8410, 8409], [8292, 8293, 8410], [8293, 8411, 8410], [8293, 8294, 8412], [8293, 8412, 8411], [8294, 8295, 8412], [8295, 8413, 8412], [8295, 8296, 8414], [8295, 8414, 8413], [8296, 8297, 8414], [8297, 8415, 8414], [8297, 8298, 8416], [8297, 8416, 8415], [8298, 8299, 8416], [8299, 8417, 8416], [8299, 8300, 8418], [8299, 8418, 8417], [8300, 8301, 8418], [8301, 8419, 8418], [8301, 8302, 8420], [8301, 8420, 8419], [8302, 8303, 8420], [8303, 8421, 8420], [8303, 8304, 8422], [8303, 8422, 8421], [8304, 8305, 8422], [8305, 8423, 8422], [8305, 8306, 8424], [8305, 8424, 8423], [8306, 8307, 8424], [8307, 8425, 8424], [8307, 8308, 8426], [8307, 8426, 8425], [8309, 8310, 8427], [8310, 8428, 8427], [8310, 8311, 8429], [8310, 8429, 8428], [8311, 8312, 8429], [8312, 8430, 8429], [8312, 8313, 8431], [8312, 8431, 8430], [8313, 8314, 8431], [8314, 8432, 8431], [8314, 8315, 8433], [8314, 8433, 8432], [8315, 8316, 8433], [8316, 8434, 8433], [8316, 8317, 8435], [8316, 8435, 8434], [8317, 8318, 8435], [8318, 8436, 8435], [8318, 8319, 8437], [8318, 8437, 8436], [8319, 8320, 8437], [8320, 8438, 8437], [8320, 8321, 8439], [8320, 8439, 8438], [8321, 8322, 8439], [8322, 8440, 8439], [8322, 8323, 8441], [8322, 8441, 8440], [8323, 8324, 8441], [8324, 8442, 8441], [8324, 8325, 8443], [8324, 8443, 8442], [8325, 8326, 8443], [8326, 8444, 8443], [8326, 8327, 8445], [8326, 8445, 8444], [8328, 8329, 8447], [8328, 8447, 8446], [8329, 8330, 8447], [8330, 8448, 8447], [8330, 8331, 8449], [8330, 8449, 8448], [8331, 8332, 8449], [8332, 8450, 8449], [8332, 8333, 8451], [8332, 8451, 8450], [8333, 8334, 8451], [8334, 8452, 8451], [8334, 8335, 8453], [8334, 8453, 8452], [8335, 8336, 8453], [8336, 8454, 8453], [8336, 8337, 8455], [8336, 8455, 8454], [8337, 8338, 8455], [8338, 8456, 8455], [8338, 8339, 8457], [8338, 8457, 8456], [8339, 8340, 8457], [8340, 8458, 8457], [8340, 8341, 8459], [8340, 8459, 8458], [8341, 8342, 8459], [8342, 8460, 8459], [8342, 8343, 8461], [8342, 8461, 8460], [8343, 8344, 8461], [8344, 8462, 8461], [8344, 8345, 8463], [8344, 8463, 8462], [8345, 8346, 8463], [8346, 8464, 8463], [8346, 8347, 8465], [8346, 8465, 8464], [8347, 8348, 8465], [8348, 8466, 8465], [8348, 8349, 8467], [8348, 8467, 8466], [8349, 8350, 8467], [8350, 8468, 8467], [8350, 8351, 8469], [8350, 8469, 8468], [8351, 8352, 8469], [8352, 8470, 8469], [8352, 8353, 8471], [8352, 8471, 8470], [8353, 8354, 8471], [8354, 8472, 8471], [8354, 8355, 8473], [8354, 8473, 8472], [8355, 8356, 8473], [8356, 8474, 8473], [8356, 8357, 8475], [8356, 8475, 8474], [8357, 8358, 8475], [8358, 8476, 8475], [8358, 8359, 8477], [8358, 8477, 8476], [8359, 8360, 8477], [8360, 8478, 8477], [8360, 8361, 8479], [8360, 8479, 8478], [8361, 8362, 8479], [8362, 8480, 8479], [8362, 8363, 8481], [8362, 8481, 8480], [8363, 8364, 8481], [8364, 8482, 8481], [8364, 8365, 8483], [8364, 8483, 8482], [8365, 8366, 8483], [8366, 8484, 8483], [8366, 8367, 8485], [8366, 8485, 8484], [8367, 8368, 8485], [8368, 8486, 8485], [8368, 8369, 8487], [8368, 8487, 8486], [8369, 8370, 8487], [8370, 8488, 8487], [8370, 8371, 8489], [8370, 8489, 8488], [8371, 8372, 8489], [8372, 8490, 8489], [8372, 8373, 8491], [8372, 8491, 8490], [8373, 8374, 8491], [8374, 8492, 8491], [8374, 8375, 8493], [8374, 8493, 8492], [8375, 8376, 8493], [8376, 8494, 8493], [8376, 8377, 8495], [8376, 8495, 8494], [8377, 8378, 8495], [8378, 8496, 8495], [8378, 8379, 8497], [8378, 8497, 8496], [8379, 8380, 8497], [8380, 8498, 8497], [8380, 8381, 8499], [8380, 8499, 8498], [8381, 8382, 8499], [8382, 8500, 8499], [8382, 8383, 8501], [8382, 8501, 8500], [8383, 8384, 8501], [8384, 8502, 8501], [8384, 8385, 8503], [8384, 8503, 8502], [8385, 8386, 8503], [8386, 8504, 8503], [8386, 8387, 8505], [8386, 8505, 8504], [8387, 8388, 8505], [8388, 8506, 8505], [8388, 8389, 8507], [8388, 8507, 8506], [8389, 8390, 8507], [8390, 8508, 8507], [8390, 8391, 8509], [8390, 8509, 8508], [8391, 8392, 8509], [8392, 8510, 8509], [8392, 8393, 8511], [8392, 8511, 8510], [8393, 8394, 8511], [8394, 8512, 8511], [8394, 8395, 8513], [8394, 8513, 8512], [8395, 8396, 8513], [8396, 8514, 8513], [8396, 8397, 8515], [8396, 8515, 8514], [8397, 8398, 8515], [8398, 8516, 8515], [8398, 8399, 8517], [8398, 8517, 8516], [8399, 8400, 8517], [8400, 8518, 8517], [8400, 8401, 8519], [8400, 8519, 8518], [8401, 8402, 8519], [8402, 8520, 8519], [8402, 8403, 8521], [8402, 8521, 8520], [8403, 8404, 8521], [8404, 8522, 8521], [8404, 8405, 8523], [8404, 8523, 8522], [8405, 8406, 8523], [8406, 8524, 8523], [8406, 8407, 8525], [8406, 8525, 8524], [8407, 8408, 8525], [8408, 8526, 8525], [8408, 8409, 8527], [8408, 8527, 8526], [8409, 8410, 8527], [8410, 8528, 8527], [8410, 8411, 8529], [8410, 8529, 8528], [8411, 8412, 8529], [8412, 8530, 8529], [8412, 8413, 8531], [8412, 8531, 8530], [8413, 8414, 8531], [8414, 8532, 8531], [8414, 8415, 8533], [8414, 8533, 8532], [8415, 8416, 8533], [8416, 8534, 8533], [8416, 8417, 8535], [8416, 8535, 8534], [8417, 8418, 8535], [8418, 8536, 8535], [8418, 8419, 8537], [8418, 8537, 8536], [8419, 8420, 8537], [8420, 8538, 8537], [8420, 8421, 8539], [8420, 8539, 8538], [8421, 8422, 8539], [8422, 8540, 8539], [8422, 8423, 8541], [8422, 8541, 8540], [8423, 8424, 8541], [8424, 8542, 8541], [8424, 8425, 8543], [8424, 8543, 8542], [8425, 8426, 8543], [8426, 8544, 8543], [8427, 8428, 8546], [8427, 8546, 8545], [8428, 8429, 8546], [8429, 8547, 8546], [8429, 8430, 8548], [8429, 8548, 8547], [8430, 8431, 8548], [8431, 8549, 8548], [8431, 8432, 8550], [8431, 8550, 8549], [8432, 8433, 8550], [8433, 8551, 8550], [8433, 8434, 8552], [8433, 8552, 8551], [8434, 8435, 8552], [8435, 8553, 8552], [8435, 8436, 8554], [8435, 8554, 8553], [8436, 8437, 8554], [8437, 8555, 8554], [8437, 8438, 8556], [8437, 8556, 8555], [8438, 8439, 8556], [8439, 8557, 8556], [8439, 8440, 8558], [8439, 8558, 8557], [8440, 8441, 8558], [8441, 8559, 8558], [8441, 8442, 8560], [8441, 8560, 8559], [8442, 8443, 8560], [8443, 8561, 8560], [8443, 8444, 8562], [8443, 8562, 8561], [8444, 8445, 8562], [8445, 8563, 8562], [8446, 8447, 8564], [8447, 8565, 8564], [8447, 8448, 8566], [8447, 8566, 8565], [8448, 8449, 8566], [8449, 8567, 8566], [8449, 8450, 8568], [8449, 8568, 8567], [8450, 8451, 8568], [8451, 8569, 8568], [8451, 8452, 8570], [8451, 8570, 8569], [8452, 8453, 8570], [8453, 8571, 8570], [8453, 8454, 8572], [8453, 8572, 8571], [8454, 8455, 8572], [8455, 8573, 8572], [8455, 8456, 8574], [8455, 8574, 8573], [8456, 8457, 8574], [8457, 8575, 8574], [8457, 8458, 8576], [8457, 8576, 8575], [8458, 8459, 8576], [8459, 8577, 8576], [8459, 8460, 8578], [8459, 8578, 8577], [8460, 8461, 8578], [8461, 8579, 8578], [8461, 8462, 8580], [8461, 8580, 8579], [8462, 8463, 8580], [8463, 8581, 8580], [8463, 8464, 8582], [8463, 8582, 8581], [8464, 8465, 8582], [8465, 8583, 8582], [8465, 8466, 8584], [8465, 8584, 8583], [8466, 8467, 8584], [8467, 8585, 8584], [8467, 8468, 8586], [8467, 8586, 8585], [8468, 8469, 8586], [8469, 8587, 8586], [8469, 8470, 8588], [8469, 8588, 8587], [8470, 8471, 8588], [8471, 8589, 8588], [8471, 8472, 8590], [8471, 8590, 8589], [8472, 8473, 8590], [8473, 8591, 8590], [8473, 8474, 8592], [8473, 8592, 8591], [8474, 8475, 8592], [8475, 8593, 8592], [8475, 8476, 8594], [8475, 8594, 8593], [8476, 8477, 8594], [8477, 8595, 8594], [8477, 8478, 8596], [8477, 8596, 8595], [8478, 8479, 8596], [8479, 8597, 8596], [8479, 8480, 8598], [8479, 8598, 8597], [8480, 8481, 8598], [8481, 8599, 8598], [8481, 8482, 8600], [8481, 8600, 8599], [8482, 8483, 8600], [8483, 8601, 8600], [8483, 8484, 8602], [8483, 8602, 8601], [8484, 8485, 8602], [8485, 8603, 8602], [8485, 8486, 8604], [8485, 8604, 8603], [8486, 8487, 8604], [8487, 8605, 8604], [8487, 8488, 8606], [8487, 8606, 8605], [8488, 8489, 8606], [8489, 8607, 8606], [8489, 8490, 8608], [8489, 8608, 8607], [8490, 8491, 8608], [8491, 8609, 8608], [8491, 8492, 8610], [8491, 8610, 8609], [8492, 8493, 8610], [8493, 8611, 8610], [8493, 8494, 8612], [8493, 8612, 8611], [8494, 8495, 8612], [8495, 8613, 8612], [8495, 8496, 8614], [8495, 8614, 8613], [8496, 8497, 8614], [8497, 8615, 8614], [8497, 8498, 8616], [8497, 8616, 8615], [8498, 8499, 8616], [8499, 8617, 8616], [8499, 8500, 8618], [8499, 8618, 8617], [8500, 8501, 8618], [8501, 8619, 8618], [8501, 8502, 8620], [8501, 8620, 8619], [8502, 8503, 8620], [8503, 8621, 8620], [8503, 8504, 8622], [8503, 8622, 8621], [8504, 8505, 8622], [8505, 8623, 8622], [8505, 8506, 8624], [8505, 8624, 8623], [8506, 8507, 8624], [8507, 8625, 8624], [8507, 8508, 8626], [8507, 8626, 8625], [8508, 8509, 8626], [8509, 8627, 8626], [8509, 8510, 8628], [8509, 8628, 8627], [8510, 8511, 8628], [8511, 8629, 8628], [8511, 8512, 8630], [8511, 8630, 8629], [8512, 8513, 8630], [8513, 8631, 8630], [8513, 8514, 8632], [8513, 8632, 8631], [8514, 8515, 8632], [8515, 8633, 8632], [8515, 8516, 8634], [8515, 8634, 8633], [8516, 8517, 8634], [8517, 8635, 8634], [8517, 8518, 8636], [8517, 8636, 8635], [8518, 8519, 8636], [8519, 8637, 8636], [8519, 8520, 8638], [8519, 8638, 8637], [8520, 8521, 8638], [8521, 8639, 8638], [8521, 8522, 8640], [8521, 8640, 8639], [8522, 8523, 8640], [8523, 8641, 8640], [8523, 8524, 8642], [8523, 8642, 8641], [8524, 8525, 8642], [8525, 8643, 8642], [8525, 8526, 8644], [8525, 8644, 8643], [8526, 8527, 8644], [8527, 8645, 8644], [8527, 8528, 8646], [8527, 8646, 8645], [8528, 8529, 8646], [8529, 8647, 8646], [8529, 8530, 8648], [8529, 8648, 8647], [8530, 8531, 8648], [8531, 8649, 8648], [8531, 8532, 8650], [8531, 8650, 8649], [8532, 8533, 8650], [8533, 8651, 8650], [8533, 8534, 8652], [8533, 8652, 8651], [8534, 8535, 8652], [8535, 8653, 8652], [8535, 8536, 8654], [8535, 8654, 8653], [8536, 8537, 8654], [8537, 8655, 8654], [8537, 8538, 8656], [8537, 8656, 8655], [8538, 8539, 8656], [8539, 8657, 8656], [8539, 8540, 8658], [8539, 8658, 8657], [8540, 8541, 8658], [8541, 8659, 8658], [8541, 8542, 8660], [8541, 8660, 8659], [8542, 8543, 8660], [8543, 8661, 8660], [8543, 8544, 8662], [8543, 8662, 8661], [8545, 8546, 8663], [8546, 8664, 8663], [8546, 8547, 8665], [8546, 8665, 8664], [8547, 8548, 8665], [8548, 8666, 8665], [8548, 8549, 8667], [8548, 8667, 8666], [8549, 8550, 8667], [8550, 8668, 8667], [8550, 8551, 8669], [8550, 8669, 8668], [8551, 8552, 8669], [8552, 8670, 8669], [8552, 8553, 8671], [8552, 8671, 8670], [8553, 8554, 8671], [8554, 8672, 8671], [8554, 8555, 8673], [8554, 8673, 8672], [8555, 8556, 8673], [8556, 8674, 8673], [8556, 8557, 8675], [8556, 8675, 8674], [8557, 8558, 8675], [8558, 8676, 8675], [8558, 8559, 8677], [8558, 8677, 8676], [8559, 8560, 8677], [8560, 8678, 8677], [8560, 8561, 8679], [8560, 8679, 8678], [8561, 8562, 8679], [8562, 8680, 8679], [8562, 8563, 8681], [8562, 8681, 8680], [8564, 8565, 8683], [8564, 8683, 8682], [8565, 8566, 8683], [8566, 8684, 8683], [8566, 8567, 8685], [8566, 8685, 8684], [8567, 8568, 8685], [8568, 8686, 8685], [8568, 8569, 8687], [8568, 8687, 8686], [8569, 8570, 8687], [8570, 8688, 8687], [8570, 8571, 8689], [8570, 8689, 8688], [8571, 8572, 8689], [8572, 8690, 8689], [8572, 8573, 8691], [8572, 8691, 8690], [8573, 8574, 8691], [8574, 8692, 8691], [8574, 8575, 8693], [8574, 8693, 8692], [8575, 8576, 8693], [8576, 8694, 8693], [8576, 8577, 8695], [8576, 8695, 8694], [8577, 8578, 8695], [8578, 8696, 8695], [8578, 8579, 8697], [8578, 8697, 8696], [8579, 8580, 8697], [8580, 8698, 8697], [8580, 8581, 8699], [8580, 8699, 8698], [8581, 8582, 8699], [8582, 8700, 8699], [8582, 8583, 8701], [8582, 8701, 8700], [8583, 8584, 8701], [8584, 8702, 8701], [8584, 8585, 8703], [8584, 8703, 8702], [8585, 8586, 8703], [8586, 8704, 8703], [8586, 8587, 8705], [8586, 8705, 8704], [8587, 8588, 8705], [8588, 8706, 8705], [8588, 8589, 8707], [8588, 8707, 8706], [8589, 8590, 8707], [8590, 8708, 8707], [8590, 8591, 8709], [8590, 8709, 8708], [8591, 8592, 8709], [8592, 8710, 8709], [8592, 8593, 8711], [8592, 8711, 8710], [8593, 8594, 8711], [8594, 8712, 8711], [8594, 8595, 8713], [8594, 8713, 8712], [8595, 8596, 8713], [8596, 8714, 8713], [8596, 8597, 8715], [8596, 8715, 8714], [8597, 8598, 8715], [8598, 8716, 8715], [8598, 8599, 8717], [8598, 8717, 8716], [8599, 8600, 8717], [8600, 8718, 8717], [8600, 8601, 8719], [8600, 8719, 8718], [8601, 8602, 8719], [8602, 8720, 8719], [8602, 8603, 8721], [8602, 8721, 8720], [8603, 8604, 8721], [8604, 8722, 8721], [8604, 8605, 8723], [8604, 8723, 8722], [8605, 8606, 8723], [8606, 8724, 8723], [8606, 8607, 8725], [8606, 8725, 8724], [8607, 8608, 8725], [8608, 8726, 8725], [8608, 8609, 8727], [8608, 8727, 8726], [8609, 8610, 8727], [8610, 8728, 8727], [8610, 8611, 8729], [8610, 8729, 8728], [8611, 8612, 8729], [8612, 8730, 8729], [8612, 8613, 8731], [8612, 8731, 8730], [8613, 8614, 8731], [8614, 8732, 8731], [8614, 8615, 8733], [8614, 8733, 8732], [8615, 8616, 8733], [8616, 8734, 8733], [8616, 8617, 8735], [8616, 8735, 8734], [8617, 8618, 8735], [8618, 8736, 8735], [8618, 8619, 8737], [8618, 8737, 8736], [8619, 8620, 8737], [8620, 8738, 8737], [8620, 8621, 8739], [8620, 8739, 8738], [8621, 8622, 8739], [8622, 8740, 8739], [8622, 8623, 8741], [8622, 8741, 8740], [8623, 8624, 8741], [8624, 8742, 8741], [8624, 8625, 8743], [8624, 8743, 8742], [8625, 8626, 8743], [8626, 8744, 8743], [8626, 8627, 8745], [8626, 8745, 8744], [8627, 8628, 8745], [8628, 8746, 8745], [8628, 8629, 8747], [8628, 8747, 8746], [8629, 8630, 8747], [8630, 8748, 8747], [8630, 8631, 8749], [8630, 8749, 8748], [8631, 8632, 8749], [8632, 8750, 8749], [8632, 8633, 8751], [8632, 8751, 8750], [8633, 8634, 8751], [8634, 8752, 8751], [8634, 8635, 8753], [8634, 8753, 8752], [8635, 8636, 8753], [8636, 8754, 8753], [8636, 8637, 8755], [8636, 8755, 8754], [8637, 8638, 8755], [8638, 8756, 8755], [8638, 8639, 8757], [8638, 8757, 8756], [8639, 8640, 8757], [8640, 8758, 8757], [8640, 8641, 8759], [8640, 8759, 8758], [8641, 8642, 8759], [8642, 8760, 8759], [8642, 8643, 8761], [8642, 8761, 8760], [8643, 8644, 8761], [8644, 8762, 8761], [8644, 8645, 8763], [8644, 8763, 8762], [8645, 8646, 8763], [8646, 8764, 8763], [8646, 8647, 8765], [8646, 8765, 8764], [8647, 8648, 8765], [8648, 8766, 8765], [8648, 8649, 8767], [8648, 8767, 8766], [8649, 8650, 8767], [8650, 8768, 8767], [8650, 8651, 8769], [8650, 8769, 8768], [8651, 8652, 8769], [8652, 8770, 8769], [8652, 8653, 8771], [8652, 8771, 8770], [8653, 8654, 8771], [8654, 8772, 8771], [8654, 8655, 8773], [8654, 8773, 8772], [8655, 8656, 8773], [8656, 8774, 8773], [8656, 8657, 8775], [8656, 8775, 8774], [8657, 8658, 8775], [8658, 8776, 8775], [8658, 8659, 8777], [8658, 8777, 8776], [8659, 8660, 8777], [8660, 8778, 8777], [8660, 8661, 8779], [8660, 8779, 8778], [8661, 8662, 8779], [8662, 8780, 8779], [8663, 8664, 8782], [8663, 8782, 8781], [8664, 8665, 8782], [8665, 8783, 8782], [8665, 8666, 8784], [8665, 8784, 8783], [8666, 8667, 8784], [8667, 8785, 8784], [8667, 8668, 8786], [8667, 8786, 8785], [8668, 8669, 8786], [8669, 8787, 8786], [8669, 8670, 8788], [8669, 8788, 8787], [8670, 8671, 8788], [8671, 8789, 8788], [8671, 8672, 8790], [8671, 8790, 8789], [8672, 8673, 8790], [8673, 8791, 8790], [8673, 8674, 8792], [8673, 8792, 8791], [8674, 8675, 8792], [8675, 8793, 8792], [8675, 8676, 8794], [8675, 8794, 8793], [8676, 8677, 8794], [8677, 8795, 8794], [8677, 8678, 8796], [8677, 8796, 8795], [8678, 8679, 8796], [8679, 8797, 8796], [8679, 8680, 8798], [8679, 8798, 8797], [8680, 8681, 8798], [8681, 8799, 8798], [8682, 8683, 8800], [8683, 8801, 8800], [8683, 8684, 8802], [8683, 8802, 8801], [8684, 8685, 8802], [8685, 8803, 8802], [8685, 8686, 8804], [8685, 8804, 8803], [8686, 8687, 8804], [8687, 8805, 8804], [8687, 8688, 8806], [8687, 8806, 8805], [8688, 8689, 8806], [8689, 8807, 8806], [8689, 8690, 8808], [8689, 8808, 8807], [8690, 8691, 8808], [8691, 8809, 8808], [8691, 8692, 8810], [8691, 8810, 8809], [8692, 8693, 8810], [8693, 8811, 8810], [8693, 8694, 8812], [8693, 8812, 8811], [8694, 8695, 8812], [8695, 8813, 8812], [8695, 8696, 8814], [8695, 8814, 8813], [8696, 8697, 8814], [8697, 8815, 8814], [8697, 8698, 8816], [8697, 8816, 8815], [8698, 8699, 8816], [8699, 8817, 8816], [8699, 8700, 8818], [8699, 8818, 8817], [8700, 8701, 8818], [8701, 8819, 8818], [8701, 8702, 8820], [8701, 8820, 8819], [8702, 8703, 8820], [8703, 8821, 8820], [8703, 8704, 8822], [8703, 8822, 8821], [8704, 8705, 8822], [8705, 8823, 8822], [8705, 8706, 8824], [8705, 8824, 8823], [8706, 8707, 8824], [8707, 8825, 8824], [8707, 8708, 8826], [8707, 8826, 8825], [8708, 8709, 8826], [8709, 8827, 8826], [8709, 8710, 8828], [8709, 8828, 8827], [8710, 8711, 8828], [8711, 8829, 8828], [8711, 8712, 8830], [8711, 8830, 8829], [8712, 8713, 8830], [8713, 8831, 8830], [8713, 8714, 8832], [8713, 8832, 8831], [8714, 8715, 8832], [8715, 8833, 8832], [8715, 8716, 8834], [8715, 8834, 8833], [8716, 8717, 8834], [8717, 8835, 8834], [8717, 8718, 8836], [8717, 8836, 8835], [8718, 8719, 8836], [8719, 8837, 8836], [8719, 8720, 8838], [8719, 8838, 8837], [8720, 8721, 8838], [8721, 8839, 8838], [8721, 8722, 8840], [8721, 8840, 8839], [8722, 8723, 8840], [8723, 8841, 8840], [8723, 8724, 8842], [8723, 8842, 8841], [8724, 8725, 8842], [8725, 8843, 8842], [8725, 8726, 8844], [8725, 8844, 8843], [8726, 8727, 8844], [8727, 8845, 8844], [8727, 8728, 8846], [8727, 8846, 8845], [8728, 8729, 8846], [8729, 8847, 8846], [8729, 8730, 8848], [8729, 8848, 8847], [8730, 8731, 8848], [8731, 8849, 8848], [8731, 8732, 8850], [8731, 8850, 8849], [8732, 8733, 8850], [8733, 8851, 8850], [8733, 8734, 8852], [8733, 8852, 8851], [8734, 8735, 8852], [8735, 8853, 8852], [8735, 8736, 8854], [8735, 8854, 8853], [8736, 8737, 8854], [8737, 8855, 8854], [8737, 8738, 8856], [8737, 8856, 8855], [8738, 8739, 8856], [8739, 8857, 8856], [8739, 8740, 8858], [8739, 8858, 8857], [8740, 8741, 8858], [8741, 8859, 8858], [8741, 8742, 8860], [8741, 8860, 8859], [8742, 8743, 8860], [8743, 8861, 8860], [8743, 8744, 8862], [8743, 8862, 8861], [8744, 8745, 8862], [8745, 8863, 8862], [8745, 8746, 8864], [8745, 8864, 8863], [8746, 8747, 8864], [8747, 8865, 8864], [8747, 8748, 8866], [8747, 8866, 8865], [8748, 8749, 8866], [8749, 8867, 8866], [8749, 8750, 8868], [8749, 8868, 8867], [8750, 8751, 8868], [8751, 8869, 8868], [8751, 8752, 8870], [8751, 8870, 8869], [8752, 8753, 8870], [8753, 8871, 8870], [8753, 8754, 8872], [8753, 8872, 8871], [8754, 8755, 8872], [8755, 8873, 8872], [8755, 8756, 8874], [8755, 8874, 8873], [8756, 8757, 8874], [8757, 8875, 8874], [8757, 8758, 8876], [8757, 8876, 8875], [8758, 8759, 8876], [8759, 8877, 8876], [8759, 8760, 8878], [8759, 8878, 8877], [8760, 8761, 8878], [8761, 8879, 8878], [8761, 8762, 8880], [8761, 8880, 8879], [8762, 8763, 8880], [8763, 8881, 8880], [8763, 8764, 8882], [8763, 8882, 8881], [8764, 8765, 8882], [8765, 8883, 8882], [8765, 8766, 8884], [8765, 8884, 8883], [8766, 8767, 8884], [8767, 8885, 8884], [8767, 8768, 8886], [8767, 8886, 8885], [8768, 8769, 8886], [8769, 8887, 8886], [8769, 8770, 8888], [8769, 8888, 8887], [8770, 8771, 8888], [8771, 8889, 8888], [8771, 8772, 8890], [8771, 8890, 8889], [8772, 8773, 8890], [8773, 8891, 8890], [8773, 8774, 8892], [8773, 8892, 8891], [8774, 8775, 8892], [8775, 8893, 8892], [8775, 8776, 8894], [8775, 8894, 8893], [8776, 8777, 8894], [8777, 8895, 8894], [8777, 8778, 8896], [8777, 8896, 8895], [8778, 8779, 8896], [8779, 8897, 8896], [8779, 8780, 8898], [8779, 8898, 8897], [8781, 8782, 8899], [8782, 8900, 8899], [8782, 8783, 8901], [8782, 8901, 8900], [8783, 8784, 8901], [8784, 8902, 8901], [8784, 8785, 8903], [8784, 8903, 8902], [8785, 8786, 8903], [8786, 8904, 8903], [8786, 8787, 8905], [8786, 8905, 8904], [8787, 8788, 8905], [8788, 8906, 8905], [8788, 8789, 8907], [8788, 8907, 8906], [8789, 8790, 8907], [8790, 8908, 8907], [8790, 8791, 8909], [8790, 8909, 8908], [8791, 8792, 8909], [8792, 8910, 8909], [8792, 8793, 8911], [8792, 8911, 8910], [8793, 8794, 8911], [8794, 8912, 8911], [8794, 8795, 8913], [8794, 8913, 8912], [8795, 8796, 8913], [8796, 8914, 8913], [8796, 8797, 8915], [8796, 8915, 8914], [8797, 8798, 8915], [8798, 8916, 8915], [8798, 8799, 8917], [8798, 8917, 8916], [8800, 8801, 8919], [8800, 8919, 8918], [8801, 8802, 8919], [8802, 8920, 8919], [8802, 8803, 8921], [8802, 8921, 8920], [8803, 8804, 8921], [8804, 8922, 8921], [8804, 8805, 8923], [8804, 8923, 8922], [8805, 8806, 8923], [8806, 8924, 8923], [8806, 8807, 8925], [8806, 8925, 8924], [8807, 8808, 8925], [8808, 8926, 8925], [8808, 8809, 8927], [8808, 8927, 8926], [8809, 8810, 8927], [8810, 8928, 8927], [8810, 8811, 8929], [8810, 8929, 8928], [8811, 8812, 8929], [8812, 8930, 8929], [8812, 8813, 8931], [8812, 8931, 8930], [8813, 8814, 8931], [8814, 8932, 8931], [8814, 8815, 8933], [8814, 8933, 8932], [8815, 8816, 8933], [8816, 8934, 8933], [8816, 8817, 8935], [8816, 8935, 8934], [8817, 8818, 8935], [8818, 8936, 8935], [8818, 8819, 8937], [8818, 8937, 8936], [8819, 8820, 8937], [8820, 8938, 8937], [8820, 8821, 8939], [8820, 8939, 8938], [8821, 8822, 8939], [8822, 8940, 8939], [8822, 8823, 8941], [8822, 8941, 8940], [8823, 8824, 8941], [8824, 8942, 8941], [8824, 8825, 8943], [8824, 8943, 8942], [8825, 8826, 8943], [8826, 8944, 8943], [8826, 8827, 8945], [8826, 8945, 8944], [8827, 8828, 8945], [8828, 8946, 8945], [8828, 8829, 8947], [8828, 8947, 8946], [8829, 8830, 8947], [8830, 8948, 8947], [8830, 8831, 8949], [8830, 8949, 8948], [8831, 8832, 8949], [8832, 8950, 8949], [8832, 8833, 8951], [8832, 8951, 8950], [8833, 8834, 8951], [8834, 8952, 8951], [8834, 8835, 8953], [8834, 8953, 8952], [8835, 8836, 8953], [8836, 8954, 8953], [8836, 8837, 8955], [8836, 8955, 8954], [8837, 8838, 8955], [8838, 8956, 8955], [8838, 8839, 8957], [8838, 8957, 8956], [8839, 8840, 8957], [8840, 8958, 8957], [8840, 8841, 8959], [8840, 8959, 8958], [8841, 8842, 8959], [8842, 8960, 8959], [8842, 8843, 8961], [8842, 8961, 8960], [8843, 8844, 8961], [8844, 8962, 8961], [8844, 8845, 8963], [8844, 8963, 8962], [8845, 8846, 8963], [8846, 8964, 8963], [8846, 8847, 8965], [8846, 8965, 8964], [8847, 8848, 8965], [8848, 8966, 8965], [8848, 8849, 8967], [8848, 8967, 8966], [8849, 8850, 8967], [8850, 8968, 8967], [8850, 8851, 8969], [8850, 8969, 8968], [8851, 8852, 8969], [8852, 8970, 8969], [8852, 8853, 8971], [8852, 8971, 8970], [8853, 8854, 8971], [8854, 8972, 8971], [8854, 8855, 8973], [8854, 8973, 8972], [8855, 8856, 8973], [8856, 8974, 8973], [8856, 8857, 8975], [8856, 8975, 8974], [8857, 8858, 8975], [8858, 8976, 8975], [8858, 8859, 8977], [8858, 8977, 8976], [8859, 8860, 8977], [8860, 8978, 8977], [8860, 8861, 8979], [8860, 8979, 8978], [8861, 8862, 8979], [8862, 8980, 8979], [8862, 8863, 8981], [8862, 8981, 8980], [8863, 8864, 8981], [8864, 8982, 8981], [8864, 8865, 8983], [8864, 8983, 8982], [8865, 8866, 8983], [8866, 8984, 8983], [8866, 8867, 8985], [8866, 8985, 8984], [8867, 8868, 8985], [8868, 8986, 8985], [8868, 8869, 8987], [8868, 8987, 8986], [8869, 8870, 8987], [8870, 8988, 8987], [8870, 8871, 8989], [8870, 8989, 8988], [8871, 8872, 8989], [8872, 8990, 8989], [8872, 8873, 8991], [8872, 8991, 8990], [8873, 8874, 8991], [8874, 8992, 8991], [8874, 8875, 8993], [8874, 8993, 8992], [8875, 8876, 8993], [8876, 8994, 8993], [8876, 8877, 8995], [8876, 8995, 8994], [8877, 8878, 8995], [8878, 8996, 8995], [8878, 8879, 8997], [8878, 8997, 8996], [8879, 8880, 8997], [8880, 8998, 8997], [8880, 8881, 8999], [8880, 8999, 8998], [8881, 8882, 8999], [8882, 9000, 8999], [8882, 8883, 9001], [8882, 9001, 9000], [8883, 8884, 9001], [8884, 9002, 9001], [8884, 8885, 9003], [8884, 9003, 9002], [8885, 8886, 9003], [8886, 9004, 9003], [8886, 8887, 9005], [8886, 9005, 9004], [8887, 8888, 9005], [8888, 9006, 9005], [8888, 8889, 9007], [8888, 9007, 9006], [8889, 8890, 9007], [8890, 9008, 9007], [8890, 8891, 9009], [8890, 9009, 9008], [8891, 8892, 9009], [8892, 9010, 9009], [8892, 8893, 9011], [8892, 9011, 9010], [8893, 8894, 9011], [8894, 9012, 9011], [8894, 8895, 9013], [8894, 9013, 9012], [8895, 8896, 9013], [8896, 9014, 9013], [8896, 8897, 9015], [8896, 9015, 9014], [8897, 8898, 9015], [8898, 9016, 9015], [8899, 8900, 9018], [8899, 9018, 9017], [8900, 8901, 9018], [8901, 9019, 9018], [8901, 8902, 9020], [8901, 9020, 9019], [8902, 8903, 9020], [8903, 9021, 9020], [8903, 8904, 9022], [8903, 9022, 9021], [8904, 8905, 9022], [8905, 9023, 9022], [8905, 8906, 9024], [8905, 9024, 9023], [8906, 8907, 9024], [8907, 9025, 9024], [8907, 8908, 9026], [8907, 9026, 9025], [8908, 8909, 9026], [8909, 9027, 9026], [8909, 8910, 9028], [8909, 9028, 9027], [8910, 8911, 9028], [8911, 9029, 9028], [8911, 8912, 9030], [8911, 9030, 9029], [8912, 8913, 9030], [8913, 9031, 9030], [8913, 8914, 9032], [8913, 9032, 9031], [8914, 8915, 9032], [8915, 9033, 9032], [8915, 8916, 9034], [8915, 9034, 9033], [8916, 8917, 9034], [8917, 9035, 9034], [8918, 8919, 9036], [8919, 9037, 9036], [8919, 8920, 9038], [8919, 9038, 9037], [8920, 8921, 9038], [8921, 9039, 9038], [8921, 8922, 9040], [8921, 9040, 9039], [8922, 8923, 9040], [8923, 9041, 9040], [8923, 8924, 9042], [8923, 9042, 9041], [8924, 8925, 9042], [8925, 9043, 9042], [8925, 8926, 9044], [8925, 9044, 9043], [8926, 8927, 9044], [8927, 9045, 9044], [8927, 8928, 9046], [8927, 9046, 9045], [8928, 8929, 9046], [8929, 9047, 9046], [8929, 8930, 9048], [8929, 9048, 9047], [8930, 8931, 9048], [8931, 9049, 9048], [8931, 8932, 9050], [8931, 9050, 9049], [8932, 8933, 9050], [8933, 9051, 9050], [8933, 8934, 9052], [8933, 9052, 9051], [8934, 8935, 9052], [8935, 9053, 9052], [8935, 8936, 9054], [8935, 9054, 9053], [8936, 8937, 9054], [8937, 9055, 9054], [8937, 8938, 9056], [8937, 9056, 9055], [8938, 8939, 9056], [8939, 9057, 9056], [8939, 8940, 9058], [8939, 9058, 9057], [8940, 8941, 9058], [8941, 9059, 9058], [8941, 8942, 9060], [8941, 9060, 9059], [8942, 8943, 9060], [8943, 9061, 9060], [8943, 8944, 9062], [8943, 9062, 9061], [8944, 8945, 9062], [8945, 9063, 9062], [8945, 8946, 9064], [8945, 9064, 9063], [8946, 8947, 9064], [8947, 9065, 9064], [8947, 8948, 9066], [8947, 9066, 9065], [8948, 8949, 9066], [8949, 9067, 9066], [8949, 8950, 9068], [8949, 9068, 9067], [8950, 8951, 9068], [8951, 9069, 9068], [8951, 8952, 9070], [8951, 9070, 9069], [8952, 8953, 9070], [8953, 9071, 9070], [8953, 8954, 9072], [8953, 9072, 9071], [8954, 8955, 9072], [8955, 9073, 9072], [8955, 8956, 9074], [8955, 9074, 9073], [8956, 8957, 9074], [8957, 9075, 9074], [8957, 8958, 9076], [8957, 9076, 9075], [8958, 8959, 9076], [8959, 9077, 9076], [8959, 8960, 9078], [8959, 9078, 9077], [8960, 8961, 9078], [8961, 9079, 9078], [8961, 8962, 9080], [8961, 9080, 9079], [8962, 8963, 9080], [8963, 9081, 9080], [8963, 8964, 9082], [8963, 9082, 9081], [8964, 8965, 9082], [8965, 9083, 9082], [8965, 8966, 9084], [8965, 9084, 9083], [8966, 8967, 9084], [8967, 9085, 9084], [8967, 8968, 9086], [8967, 9086, 9085], [8968, 8969, 9086], [8969, 9087, 9086], [8969, 8970, 9088], [8969, 9088, 9087], [8970, 8971, 9088], [8971, 9089, 9088], [8971, 8972, 9090], [8971, 9090, 9089], [8972, 8973, 9090], [8973, 9091, 9090], [8973, 8974, 9092], [8973, 9092, 9091], [8974, 8975, 9092], [8975, 9093, 9092], [8975, 8976, 9094], [8975, 9094, 9093], [8976, 8977, 9094], [8977, 9095, 9094], [8977, 8978, 9096], [8977, 9096, 9095], [8978, 8979, 9096], [8979, 9097, 9096], [8979, 8980, 9098], [8979, 9098, 9097], [8980, 8981, 9098], [8981, 9099, 9098], [8981, 8982, 9100], [8981, 9100, 9099], [8982, 8983, 9100], [8983, 9101, 9100], [8983, 8984, 9102], [8983, 9102, 9101], [8984, 8985, 9102], [8985, 9103, 9102], [8985, 8986, 9104], [8985, 9104, 9103], [8986, 8987, 9104], [8987, 9105, 9104], [8987, 8988, 9106], [8987, 9106, 9105], [8988, 8989, 9106], [8989, 9107, 9106], [8989, 8990, 9108], [8989, 9108, 9107], [8990, 8991, 9108], [8991, 9109, 9108], [8991, 8992, 9110], [8991, 9110, 9109], [8992, 8993, 9110], [8993, 9111, 9110], [8993, 8994, 9112], [8993, 9112, 9111], [8994, 8995, 9112], [8995, 9113, 9112], [8995, 8996, 9114], [8995, 9114, 9113], [8996, 8997, 9114], [8997, 9115, 9114], [8997, 8998, 9116], [8997, 9116, 9115], [8998, 8999, 9116], [8999, 9117, 9116], [8999, 9000, 9118], [8999, 9118, 9117], [9000, 9001, 9118], [9001, 9119, 9118], [9001, 9002, 9120], [9001, 9120, 9119], [9002, 9003, 9120], [9003, 9121, 9120], [9003, 9004, 9122], [9003, 9122, 9121], [9004, 9005, 9122], [9005, 9123, 9122], [9005, 9006, 9124], [9005, 9124, 9123], [9006, 9007, 9124], [9007, 9125, 9124], [9007, 9008, 9126], [9007, 9126, 9125], [9008, 9009, 9126], [9009, 9127, 9126], [9009, 9010, 9128], [9009, 9128, 9127], [9010, 9011, 9128], [9011, 9129, 9128], [9011, 9012, 9130], [9011, 9130, 9129], [9012, 9013, 9130], [9013, 9131, 9130], [9013, 9014, 9132], [9013, 9132, 9131], [9014, 9015, 9132], [9015, 9133, 9132], [9015, 9016, 9134], [9015, 9134, 9133], [9017, 9018, 9135], [9018, 9136, 9135], [9018, 9019, 9137], [9018, 9137, 9136], [9019, 9020, 9137], [9020, 9138, 9137], [9020, 9021, 9139], [9020, 9139, 9138], [9021, 9022, 9139], [9022, 9140, 9139], [9022, 9023, 9141], [9022, 9141, 9140], [9023, 9024, 9141], [9024, 9142, 9141], [9024, 9025, 9143], [9024, 9143, 9142], [9025, 9026, 9143], [9026, 9144, 9143], [9026, 9027, 9145], [9026, 9145, 9144], [9027, 9028, 9145], [9028, 9146, 9145], [9028, 9029, 9147], [9028, 9147, 9146], [9029, 9030, 9147], [9030, 9148, 9147], [9030, 9031, 9149], [9030, 9149, 9148], [9031, 9032, 9149], [9032, 9150, 9149], [9032, 9033, 9151], [9032, 9151, 9150], [9033, 9034, 9151], [9034, 9152, 9151], [9034, 9035, 9153], [9034, 9153, 9152], [9036, 9037, 9155], [9036, 9155, 9154], [9037, 9038, 9155], [9038, 9156, 9155], [9038, 9039, 9157], [9038, 9157, 9156], [9039, 9040, 9157], [9040, 9158, 9157], [9040, 9041, 9159], [9040, 9159, 9158], [9041, 9042, 9159], [9042, 9160, 9159], [9042, 9043, 9161], [9042, 9161, 9160], [9043, 9044, 9161], [9044, 9162, 9161], [9044, 9045, 9163], [9044, 9163, 9162], [9045, 9046, 9163], [9046, 9164, 9163], [9046, 9047, 9165], [9046, 9165, 9164], [9047, 9048, 9165], [9048, 9166, 9165], [9048, 9049, 9167], [9048, 9167, 9166], [9049, 9050, 9167], [9050, 9168, 9167], [9050, 9051, 9169], [9050, 9169, 9168], [9051, 9052, 9169], [9052, 9170, 9169], [9052, 9053, 9171], [9052, 9171, 9170], [9053, 9054, 9171], [9054, 9172, 9171], [9054, 9055, 9173], [9054, 9173, 9172], [9055, 9056, 9173], [9056, 9174, 9173], [9056, 9057, 9175], [9056, 9175, 9174], [9057, 9058, 9175], [9058, 9176, 9175], [9058, 9059, 9177], [9058, 9177, 9176], [9059, 9060, 9177], [9060, 9178, 9177], [9060, 9061, 9179], [9060, 9179, 9178], [9061, 9062, 9179], [9062, 9180, 9179], [9062, 9063, 9181], [9062, 9181, 9180], [9063, 9064, 9181], [9064, 9182, 9181], [9064, 9065, 9183], [9064, 9183, 9182], [9065, 9066, 9183], [9066, 9184, 9183], [9066, 9067, 9185], [9066, 9185, 9184], [9067, 9068, 9185], [9068, 9186, 9185], [9068, 9069, 9187], [9068, 9187, 9186], [9069, 9070, 9187], [9070, 9188, 9187], [9070, 9071, 9189], [9070, 9189, 9188], [9071, 9072, 9189], [9072, 9190, 9189], [9072, 9073, 9191], [9072, 9191, 9190], [9073, 9074, 9191], [9074, 9192, 9191], [9074, 9075, 9193], [9074, 9193, 9192], [9075, 9076, 9193], [9076, 9194, 9193], [9076, 9077, 9195], [9076, 9195, 9194], [9077, 9078, 9195], [9078, 9196, 9195], [9078, 9079, 9197], [9078, 9197, 9196], [9079, 9080, 9197], [9080, 9198, 9197], [9080, 9081, 9199], [9080, 9199, 9198], [9081, 9082, 9199], [9082, 9200, 9199], [9082, 9083, 9201], [9082, 9201, 9200], [9083, 9084, 9201], [9084, 9202, 9201], [9084, 9085, 9203], [9084, 9203, 9202], [9085, 9086, 9203], [9086, 9204, 9203], [9086, 9087, 9205], [9086, 9205, 9204], [9087, 9088, 9205], [9088, 9206, 9205], [9088, 9089, 9207], [9088, 9207, 9206], [9089, 9090, 9207], [9090, 9208, 9207], [9090, 9091, 9209], [9090, 9209, 9208], [9091, 9092, 9209], [9092, 9210, 9209], [9092, 9093, 9211], [9092, 9211, 9210], [9093, 9094, 9211], [9094, 9212, 9211], [9094, 9095, 9213], [9094, 9213, 9212], [9095, 9096, 9213], [9096, 9214, 9213], [9096, 9097, 9215], [9096, 9215, 9214], [9097, 9098, 9215], [9098, 9216, 9215], [9098, 9099, 9217], [9098, 9217, 9216], [9099, 9100, 9217], [9100, 9218, 9217], [9100, 9101, 9219], [9100, 9219, 9218], [9101, 9102, 9219], [9102, 9220, 9219], [9102, 9103, 9221], [9102, 9221, 9220], [9103, 9104, 9221], [9104, 9222, 9221], [9104, 9105, 9223], [9104, 9223, 9222], [9105, 9106, 9223], [9106, 9224, 9223], [9106, 9107, 9225], [9106, 9225, 9224], [9107, 9108, 9225], [9108, 9226, 9225], [9108, 9109, 9227], [9108, 9227, 9226], [9109, 9110, 9227], [9110, 9228, 9227], [9110, 9111, 9229], [9110, 9229, 9228], [9111, 9112, 9229], [9112, 9230, 9229], [9112, 9113, 9231], [9112, 9231, 9230], [9113, 9114, 9231], [9114, 9232, 9231], [9114, 9115, 9233], [9114, 9233, 9232], [9115, 9116, 9233], [9116, 9234, 9233], [9116, 9117, 9235], [9116, 9235, 9234], [9117, 9118, 9235], [9118, 9236, 9235], [9118, 9119, 9237], [9118, 9237, 9236], [9119, 9120, 9237], [9120, 9238, 9237], [9120, 9121, 9239], [9120, 9239, 9238], [9121, 9122, 9239], [9122, 9240, 9239], [9122, 9123, 9241], [9122, 9241, 9240], [9123, 9124, 9241], [9124, 9242, 9241], [9124, 9125, 9243], [9124, 9243, 9242], [9125, 9126, 9243], [9126, 9244, 9243], [9126, 9127, 9245], [9126, 9245, 9244], [9127, 9128, 9245], [9128, 9246, 9245], [9128, 9129, 9247], [9128, 9247, 9246], [9129, 9130, 9247], [9130, 9248, 9247], [9130, 9131, 9249], [9130, 9249, 9248], [9131, 9132, 9249], [9132, 9250, 9249], [9132, 9133, 9251], [9132, 9251, 9250], [9133, 9134, 9251], [9134, 9252, 9251], [9135, 9136, 9254], [9135, 9254, 9253], [9136, 9137, 9254], [9137, 9255, 9254], [9137, 9138, 9256], [9137, 9256, 9255], [9138, 9139, 9256], [9139, 9257, 9256], [9139, 9140, 9258], [9139, 9258, 9257], [9140, 9141, 9258], [9141, 9259, 9258], [9141, 9142, 9260], [9141, 9260, 9259], [9142, 9143, 9260], [9143, 9261, 9260], [9143, 9144, 9262], [9143, 9262, 9261], [9144, 9145, 9262], [9145, 9263, 9262], [9145, 9146, 9264], [9145, 9264, 9263], [9146, 9147, 9264], [9147, 9265, 9264], [9147, 9148, 9266], [9147, 9266, 9265], [9148, 9149, 9266], [9149, 9267, 9266], [9149, 9150, 9268], [9149, 9268, 9267], [9150, 9151, 9268], [9151, 9269, 9268], [9151, 9152, 9270], [9151, 9270, 9269], [9152, 9153, 9270], [9153, 9271, 9270], [9154, 9155, 9272], [9155, 9273, 9272], [9155, 9156, 9274], [9155, 9274, 9273], [9156, 9157, 9274], [9157, 9275, 9274], [9157, 9158, 9276], [9157, 9276, 9275], [9158, 9159, 9276], [9159, 9277, 9276], [9159, 9160, 9278], [9159, 9278, 9277], [9160, 9161, 9278], [9161, 9279, 9278], [9161, 9162, 9280], [9161, 9280, 9279], [9162, 9163, 9280], [9163, 9281, 9280], [9163, 9164, 9282], [9163, 9282, 9281], [9164, 9165, 9282], [9165, 9283, 9282], [9165, 9166, 9284], [9165, 9284, 9283], [9166, 9167, 9284], [9167, 9285, 9284], [9167, 9168, 9286], [9167, 9286, 9285], [9168, 9169, 9286], [9169, 9287, 9286], [9169, 9170, 9288], [9169, 9288, 9287], [9170, 9171, 9288], [9171, 9289, 9288], [9171, 9172, 9290], [9171, 9290, 9289], [9172, 9173, 9290], [9173, 9291, 9290], [9173, 9174, 9292], [9173, 9292, 9291], [9174, 9175, 9292], [9175, 9293, 9292], [9175, 9176, 9294], [9175, 9294, 9293], [9176, 9177, 9294], [9177, 9295, 9294], [9177, 9178, 9296], [9177, 9296, 9295], [9178, 9179, 9296], [9179, 9297, 9296], [9179, 9180, 9298], [9179, 9298, 9297], [9180, 9181, 9298], [9181, 9299, 9298], [9181, 9182, 9300], [9181, 9300, 9299], [9182, 9183, 9300], [9183, 9301, 9300], [9183, 9184, 9302], [9183, 9302, 9301], [9184, 9185, 9302], [9185, 9303, 9302], [9185, 9186, 9304], [9185, 9304, 9303], [9186, 9187, 9304], [9187, 9305, 9304], [9187, 9188, 9306], [9187, 9306, 9305], [9188, 9189, 9306], [9189, 9307, 9306], [9189, 9190, 9308], [9189, 9308, 9307], [9190, 9191, 9308], [9191, 9309, 9308], [9191, 9192, 9310], [9191, 9310, 9309], [9192, 9193, 9310], [9193, 9311, 9310], [9193, 9194, 9312], [9193, 9312, 9311], [9194, 9195, 9312], [9195, 9313, 9312], [9195, 9196, 9314], [9195, 9314, 9313], [9196, 9197, 9314], [9197, 9315, 9314], [9197, 9198, 9316], [9197, 9316, 9315], [9198, 9199, 9316], [9199, 9317, 9316], [9199, 9200, 9318], [9199, 9318, 9317], [9200, 9201, 9318], [9201, 9319, 9318], [9201, 9202, 9320], [9201, 9320, 9319], [9202, 9203, 9320], [9203, 9321, 9320], [9203, 9204, 9322], [9203, 9322, 9321], [9204, 9205, 9322], [9205, 9323, 9322], [9205, 9206, 9324], [9205, 9324, 9323], [9206, 9207, 9324], [9207, 9325, 9324], [9207, 9208, 9326], [9207, 9326, 9325], [9208, 9209, 9326], [9209, 9327, 9326], [9209, 9210, 9328], [9209, 9328, 9327], [9210, 9211, 9328], [9211, 9329, 9328], [9211, 9212, 9330], [9211, 9330, 9329], [9212, 9213, 9330], [9213, 9331, 9330], [9213, 9214, 9332], [9213, 9332, 9331], [9214, 9215, 9332], [9215, 9333, 9332], [9215, 9216, 9334], [9215, 9334, 9333], [9216, 9217, 9334], [9217, 9335, 9334], [9217, 9218, 9336], [9217, 9336, 9335], [9218, 9219, 9336], [9219, 9337, 9336], [9219, 9220, 9338], [9219, 9338, 9337], [9220, 9221, 9338], [9221, 9339, 9338], [9221, 9222, 9340], [9221, 9340, 9339], [9222, 9223, 9340], [9223, 9341, 9340], [9223, 9224, 9342], [9223, 9342, 9341], [9224, 9225, 9342], [9225, 9343, 9342], [9225, 9226, 9344], [9225, 9344, 9343], [9226, 9227, 9344], [9227, 9345, 9344], [9227, 9228, 9346], [9227, 9346, 9345], [9228, 9229, 9346], [9229, 9347, 9346], [9229, 9230, 9348], [9229, 9348, 9347], [9230, 9231, 9348], [9231, 9349, 9348], [9231, 9232, 9350], [9231, 9350, 9349], [9232, 9233, 9350], [9233, 9351, 9350], [9233, 9234, 9352], [9233, 9352, 9351], [9234, 9235, 9352], [9235, 9353, 9352], [9235, 9236, 9354], [9235, 9354, 9353], [9236, 9237, 9354], [9237, 9355, 9354], [9237, 9238, 9356], [9237, 9356, 9355], [9238, 9239, 9356], [9239, 9357, 9356], [9239, 9240, 9358], [9239, 9358, 9357], [9240, 9241, 9358], [9241, 9359, 9358], [9241, 9242, 9360], [9241, 9360, 9359], [9242, 9243, 9360], [9243, 9361, 9360], [9243, 9244, 9362], [9243, 9362, 9361], [9244, 9245, 9362], [9245, 9363, 9362], [9245, 9246, 9364], [9245, 9364, 9363], [9246, 9247, 9364], [9247, 9365, 9364], [9247, 9248, 9366], [9247, 9366, 9365], [9248, 9249, 9366], [9249, 9367, 9366], [9249, 9250, 9368], [9249, 9368, 9367], [9250, 9251, 9368], [9251, 9369, 9368], [9251, 9252, 9370], [9251, 9370, 9369], [9253, 9254, 9371], [9254, 9372, 9371], [9254, 9255, 9373], [9254, 9373, 9372], [9255, 9256, 9373], [9256, 9374, 9373], [9256, 9257, 9375], [9256, 9375, 9374], [9257, 9258, 9375], [9258, 9376, 9375], [9258, 9259, 9377], [9258, 9377, 9376], [9259, 9260, 9377], [9260, 9378, 9377], [9260, 9261, 9379], [9260, 9379, 9378], [9261, 9262, 9379], [9262, 9380, 9379], [9262, 9263, 9381], [9262, 9381, 9380], [9263, 9264, 9381], [9264, 9382, 9381], [9264, 9265, 9383], [9264, 9383, 9382], [9265, 9266, 9383], [9266, 9384, 9383], [9266, 9267, 9385], [9266, 9385, 9384], [9267, 9268, 9385], [9268, 9386, 9385], [9268, 9269, 9387], [9268, 9387, 9386], [9269, 9270, 9387], [9270, 9388, 9387], [9270, 9271, 9389], [9270, 9389, 9388], [9272, 9273, 9391], [9272, 9391, 9390], [9273, 9274, 9391], [9274, 9392, 9391], [9274, 9275, 9393], [9274, 9393, 9392], [9275, 9276, 9393], [9276, 9394, 9393], [9276, 9277, 9395], [9276, 9395, 9394], [9277, 9278, 9395], [9278, 9396, 9395], [9278, 9279, 9397], [9278, 9397, 9396], [9279, 9280, 9397], [9280, 9398, 9397], [9280, 9281, 9399], [9280, 9399, 9398], [9281, 9282, 9399], [9282, 9400, 9399], [9282, 9283, 9401], [9282, 9401, 9400], [9283, 9284, 9401], [9284, 9402, 9401], [9284, 9285, 9403], [9284, 9403, 9402], [9285, 9286, 9403], [9286, 9404, 9403], [9286, 9287, 9405], [9286, 9405, 9404], [9287, 9288, 9405], [9288, 9406, 9405], [9288, 9289, 9407], [9288, 9407, 9406], [9289, 9290, 9407], [9290, 9408, 9407], [9290, 9291, 9409], [9290, 9409, 9408], [9291, 9292, 9409], [9292, 9410, 9409], [9292, 9293, 9411], [9292, 9411, 9410], [9293, 9294, 9411], [9294, 9412, 9411], [9294, 9295, 9413], [9294, 9413, 9412], [9295, 9296, 9413], [9296, 9414, 9413], [9296, 9297, 9415], [9296, 9415, 9414], [9297, 9298, 9415], [9298, 9416, 9415], [9298, 9299, 9417], [9298, 9417, 9416], [9299, 9300, 9417], [9300, 9418, 9417], [9300, 9301, 9419], [9300, 9419, 9418], [9301, 9302, 9419], [9302, 9420, 9419], [9302, 9303, 9421], [9302, 9421, 9420], [9303, 9304, 9421], [9304, 9422, 9421], [9304, 9305, 9423], [9304, 9423, 9422], [9305, 9306, 9423], [9306, 9424, 9423], [9306, 9307, 9425], [9306, 9425, 9424], [9307, 9308, 9425], [9308, 9426, 9425], [9308, 9309, 9427], [9308, 9427, 9426], [9309, 9310, 9427], [9310, 9428, 9427], [9310, 9311, 9429], [9310, 9429, 9428], [9311, 9312, 9429], [9312, 9430, 9429], [9312, 9313, 9431], [9312, 9431, 9430], [9313, 9314, 9431], [9314, 9432, 9431], [9314, 9315, 9433], [9314, 9433, 9432], [9315, 9316, 9433], [9316, 9434, 9433], [9316, 9317, 9435], [9316, 9435, 9434], [9317, 9318, 9435], [9318, 9436, 9435], [9318, 9319, 9437], [9318, 9437, 9436], [9319, 9320, 9437], [9320, 9438, 9437], [9320, 9321, 9439], [9320, 9439, 9438], [9321, 9322, 9439], [9322, 9440, 9439], [9322, 9323, 9441], [9322, 9441, 9440], [9323, 9324, 9441], [9324, 9442, 9441], [9324, 9325, 9443], [9324, 9443, 9442], [9325, 9326, 9443], [9326, 9444, 9443], [9326, 9327, 9445], [9326, 9445, 9444], [9327, 9328, 9445], [9328, 9446, 9445], [9328, 9329, 9447], [9328, 9447, 9446], [9329, 9330, 9447], [9330, 9448, 9447], [9330, 9331, 9449], [9330, 9449, 9448], [9331, 9332, 9449], [9332, 9450, 9449], [9332, 9333, 9451], [9332, 9451, 9450], [9333, 9334, 9451], [9334, 9452, 9451], [9334, 9335, 9453], [9334, 9453, 9452], [9335, 9336, 9453], [9336, 9454, 9453], [9336, 9337, 9455], [9336, 9455, 9454], [9337, 9338, 9455], [9338, 9456, 9455], [9338, 9339, 9457], [9338, 9457, 9456], [9339, 9340, 9457], [9340, 9458, 9457], [9340, 9341, 9459], [9340, 9459, 9458], [9341, 9342, 9459], [9342, 9460, 9459], [9342, 9343, 9461], [9342, 9461, 9460], [9343, 9344, 9461], [9344, 9462, 9461], [9344, 9345, 9463], [9344, 9463, 9462], [9345, 9346, 9463], [9346, 9464, 9463], [9346, 9347, 9465], [9346, 9465, 9464], [9347, 9348, 9465], [9348, 9466, 9465], [9348, 9349, 9467], [9348, 9467, 9466], [9349, 9350, 9467], [9350, 9468, 9467], [9350, 9351, 9469], [9350, 9469, 9468], [9351, 9352, 9469], [9352, 9470, 9469], [9352, 9353, 9471], [9352, 9471, 9470], [9353, 9354, 9471], [9354, 9472, 9471], [9354, 9355, 9473], [9354, 9473, 9472], [9355, 9356, 9473], [9356, 9474, 9473], [9356, 9357, 9475], [9356, 9475, 9474], [9357, 9358, 9475], [9358, 9476, 9475], [9358, 9359, 9477], [9358, 9477, 9476], [9359, 9360, 9477], [9360, 9478, 9477], [9360, 9361, 9479], [9360, 9479, 9478], [9361, 9362, 9479], [9362, 9480, 9479], [9362, 9363, 9481], [9362, 9481, 9480], [9363, 9364, 9481], [9364, 9482, 9481], [9364, 9365, 9483], [9364, 9483, 9482], [9365, 9366, 9483], [9366, 9484, 9483], [9366, 9367, 9485], [9366, 9485, 9484], [9367, 9368, 9485], [9368, 9486, 9485], [9368, 9369, 9487], [9368, 9487, 9486], [9369, 9370, 9487], [9370, 9488, 9487], [9371, 9372, 9490], [9371, 9490, 9489], [9372, 9373, 9490], [9373, 9491, 9490], [9373, 9374, 9492], [9373, 9492, 9491], [9374, 9375, 9492], [9375, 9493, 9492], [9375, 9376, 9494], [9375, 9494, 9493], [9376, 9377, 9494], [9377, 9495, 9494], [9377, 9378, 9496], [9377, 9496, 9495], [9378, 9379, 9496], [9379, 9497, 9496], [9379, 9380, 9498], [9379, 9498, 9497], [9380, 9381, 9498], [9381, 9499, 9498], [9381, 9382, 9500], [9381, 9500, 9499], [9382, 9383, 9500], [9383, 9501, 9500], [9383, 9384, 9502], [9383, 9502, 9501], [9384, 9385, 9502], [9385, 9503, 9502], [9385, 9386, 9504], [9385, 9504, 9503], [9386, 9387, 9504], [9387, 9505, 9504], [9387, 9388, 9506], [9387, 9506, 9505], [9388, 9389, 9506], [9389, 9507, 9506], [9390, 9391, 9508], [9391, 9509, 9508], [9391, 9392, 9510], [9391, 9510, 9509], [9392, 9393, 9510], [9393, 9511, 9510], [9393, 9394, 9512], [9393, 9512, 9511], [9394, 9395, 9512], [9395, 9513, 9512], [9395, 9396, 9514], [9395, 9514, 9513], [9396, 9397, 9514], [9397, 9515, 9514], [9397, 9398, 9516], [9397, 9516, 9515], [9398, 9399, 9516], [9399, 9517, 9516], [9399, 9400, 9518], [9399, 9518, 9517], [9400, 9401, 9518], [9401, 9519, 9518], [9401, 9402, 9520], [9401, 9520, 9519], [9402, 9403, 9520], [9403, 9521, 9520], [9403, 9404, 9522], [9403, 9522, 9521], [9404, 9405, 9522], [9405, 9523, 9522], [9405, 9406, 9524], [9405, 9524, 9523], [9406, 9407, 9524], [9407, 9525, 9524], [9407, 9408, 9526], [9407, 9526, 9525], [9408, 9409, 9526], [9409, 9527, 9526], [9409, 9410, 9528], [9409, 9528, 9527], [9410, 9411, 9528], [9411, 9529, 9528], [9411, 9412, 9530], [9411, 9530, 9529], [9412, 9413, 9530], [9413, 9531, 9530], [9413, 9414, 9532], [9413, 9532, 9531], [9414, 9415, 9532], [9415, 9533, 9532], [9415, 9416, 9534], [9415, 9534, 9533], [9416, 9417, 9534], [9417, 9535, 9534], [9417, 9418, 9536], [9417, 9536, 9535], [9418, 9419, 9536], [9419, 9537, 9536], [9419, 9420, 9538], [9419, 9538, 9537], [9420, 9421, 9538], [9421, 9539, 9538], [9421, 9422, 9540], [9421, 9540, 9539], [9422, 9423, 9540], [9423, 9541, 9540], [9423, 9424, 9542], [9423, 9542, 9541], [9424, 9425, 9542], [9425, 9543, 9542], [9425, 9426, 9544], [9425, 9544, 9543], [9426, 9427, 9544], [9427, 9545, 9544], [9427, 9428, 9546], [9427, 9546, 9545], [9428, 9429, 9546], [9429, 9547, 9546], [9429, 9430, 9548], [9429, 9548, 9547], [9430, 9431, 9548], [9431, 9549, 9548], [9431, 9432, 9550], [9431, 9550, 9549], [9432, 9433, 9550], [9433, 9551, 9550], [9433, 9434, 9552], [9433, 9552, 9551], [9434, 9435, 9552], [9435, 9553, 9552], [9435, 9436, 9554], [9435, 9554, 9553], [9436, 9437, 9554], [9437, 9555, 9554], [9437, 9438, 9556], [9437, 9556, 9555], [9438, 9439, 9556], [9439, 9557, 9556], [9439, 9440, 9558], [9439, 9558, 9557], [9440, 9441, 9558], [9441, 9559, 9558], [9441, 9442, 9560], [9441, 9560, 9559], [9442, 9443, 9560], [9443, 9561, 9560], [9443, 9444, 9562], [9443, 9562, 9561], [9444, 9445, 9562], [9445, 9563, 9562], [9445, 9446, 9564], [9445, 9564, 9563], [9446, 9447, 9564], [9447, 9565, 9564], [9447, 9448, 9566], [9447, 9566, 9565], [9448, 9449, 9566], [9449, 9567, 9566], [9449, 9450, 9568], [9449, 9568, 9567], [9450, 9451, 9568], [9451, 9569, 9568], [9451, 9452, 9570], [9451, 9570, 9569], [9452, 9453, 9570], [9453, 9571, 9570], [9453, 9454, 9572], [9453, 9572, 9571], [9454, 9455, 9572], [9455, 9573, 9572], [9455, 9456, 9574], [9455, 9574, 9573], [9456, 9457, 9574], [9457, 9575, 9574], [9457, 9458, 9576], [9457, 9576, 9575], [9458, 9459, 9576], [9459, 9577, 9576], [9459, 9460, 9578], [9459, 9578, 9577], [9460, 9461, 9578], [9461, 9579, 9578], [9461, 9462, 9580], [9461, 9580, 9579], [9462, 9463, 9580], [9463, 9581, 9580], [9463, 9464, 9582], [9463, 9582, 9581], [9464, 9465, 9582], [9465, 9583, 9582], [9465, 9466, 9584], [9465, 9584, 9583], [9466, 9467, 9584], [9467, 9585, 9584], [9467, 9468, 9586], [9467, 9586, 9585], [9468, 9469, 9586], [9469, 9587, 9586], [9469, 9470, 9588], [9469, 9588, 9587], [9470, 9471, 9588], [9471, 9589, 9588], [9471, 9472, 9590], [9471, 9590, 9589], [9472, 9473, 9590], [9473, 9591, 9590], [9473, 9474, 9592], [9473, 9592, 9591], [9474, 9475, 9592], [9475, 9593, 9592], [9475, 9476, 9594], [9475, 9594, 9593], [9476, 9477, 9594], [9477, 9595, 9594], [9477, 9478, 9596], [9477, 9596, 9595], [9478, 9479, 9596], [9479, 9597, 9596], [9479, 9480, 9598], [9479, 9598, 9597], [9480, 9481, 9598], [9481, 9599, 9598], [9481, 9482, 9600], [9481, 9600, 9599], [9482, 9483, 9600], [9483, 9601, 9600], [9483, 9484, 9602], [9483, 9602, 9601], [9484, 9485, 9602], [9485, 9603, 9602], [9485, 9486, 9604], [9485, 9604, 9603], [9486, 9487, 9604], [9487, 9605, 9604], [9487, 9488, 9606], [9487, 9606, 9605], [9489, 9490, 9607], [9490, 9608, 9607], [9490, 9491, 9609], [9490, 9609, 9608], [9491, 9492, 9609], [9492, 9610, 9609], [9492, 9493, 9611], [9492, 9611, 9610], [9493, 9494, 9611], [9494, 9612, 9611], [9494, 9495, 9613], [9494, 9613, 9612], [9495, 9496, 9613], [9496, 9614, 9613], [9496, 9497, 9615], [9496, 9615, 9614], [9497, 9498, 9615], [9498, 9616, 9615], [9498, 9499, 9617], [9498, 9617, 9616], [9499, 9500, 9617], [9500, 9618, 9617], [9500, 9501, 9619], [9500, 9619, 9618], [9501, 9502, 9619], [9502, 9620, 9619], [9502, 9503, 9621], [9502, 9621, 9620], [9503, 9504, 9621], [9504, 9622, 9621], [9504, 9505, 9623], [9504, 9623, 9622], [9505, 9506, 9623], [9506, 9624, 9623], [9506, 9507, 9625], [9506, 9625, 9624], [9508, 9509, 9627], [9508, 9627, 9626], [9509, 9510, 9627], [9510, 9628, 9627], [9510, 9511, 9629], [9510, 9629, 9628], [9511, 9512, 9629], [9512, 9630, 9629], [9512, 9513, 9631], [9512, 9631, 9630], [9513, 9514, 9631], [9514, 9632, 9631], [9514, 9515, 9633], [9514, 9633, 9632], [9515, 9516, 9633], [9516, 9634, 9633], [9516, 9517, 9635], [9516, 9635, 9634], [9517, 9518, 9635], [9518, 9636, 9635], [9518, 9519, 9637], [9518, 9637, 9636], [9519, 9520, 9637], [9520, 9638, 9637], [9520, 9521, 9639], [9520, 9639, 9638], [9521, 9522, 9639], [9522, 9640, 9639], [9522, 9523, 9641], [9522, 9641, 9640], [9523, 9524, 9641], [9524, 9642, 9641], [9524, 9525, 9643], [9524, 9643, 9642], [9525, 9526, 9643], [9526, 9644, 9643], [9526, 9527, 9645], [9526, 9645, 9644], [9527, 9528, 9645], [9528, 9646, 9645], [9528, 9529, 9647], [9528, 9647, 9646], [9529, 9530, 9647], [9530, 9648, 9647], [9530, 9531, 9649], [9530, 9649, 9648], [9531, 9532, 9649], [9532, 9650, 9649], [9532, 9533, 9651], [9532, 9651, 9650], [9533, 9534, 9651], [9534, 9652, 9651], [9534, 9535, 9653], [9534, 9653, 9652], [9535, 9536, 9653], [9536, 9654, 9653], [9536, 9537, 9655], [9536, 9655, 9654], [9537, 9538, 9655], [9538, 9656, 9655], [9538, 9539, 9657], [9538, 9657, 9656], [9539, 9540, 9657], [9540, 9658, 9657], [9540, 9541, 9659], [9540, 9659, 9658], [9541, 9542, 9659], [9542, 9660, 9659], [9542, 9543, 9661], [9542, 9661, 9660], [9543, 9544, 9661], [9544, 9662, 9661], [9544, 9545, 9663], [9544, 9663, 9662], [9545, 9546, 9663], [9546, 9664, 9663], [9546, 9547, 9665], [9546, 9665, 9664], [9547, 9548, 9665], [9548, 9666, 9665], [9548, 9549, 9667], [9548, 9667, 9666], [9549, 9550, 9667], [9550, 9668, 9667], [9550, 9551, 9669], [9550, 9669, 9668], [9551, 9552, 9669], [9552, 9670, 9669], [9552, 9553, 9671], [9552, 9671, 9670], [9553, 9554, 9671], [9554, 9672, 9671], [9554, 9555, 9673], [9554, 9673, 9672], [9555, 9556, 9673], [9556, 9674, 9673], [9556, 9557, 9675], [9556, 9675, 9674], [9557, 9558, 9675], [9558, 9676, 9675], [9558, 9559, 9677], [9558, 9677, 9676], [9559, 9560, 9677], [9560, 9678, 9677], [9560, 9561, 9679], [9560, 9679, 9678], [9561, 9562, 9679], [9562, 9680, 9679], [9562, 9563, 9681], [9562, 9681, 9680], [9563, 9564, 9681], [9564, 9682, 9681], [9564, 9565, 9683], [9564, 9683, 9682], [9565, 9566, 9683], [9566, 9684, 9683], [9566, 9567, 9685], [9566, 9685, 9684], [9567, 9568, 9685], [9568, 9686, 9685], [9568, 9569, 9687], [9568, 9687, 9686], [9569, 9570, 9687], [9570, 9688, 9687], [9570, 9571, 9689], [9570, 9689, 9688], [9571, 9572, 9689], [9572, 9690, 9689], [9572, 9573, 9691], [9572, 9691, 9690], [9573, 9574, 9691], [9574, 9692, 9691], [9574, 9575, 9693], [9574, 9693, 9692], [9575, 9576, 9693], [9576, 9694, 9693], [9576, 9577, 9695], [9576, 9695, 9694], [9577, 9578, 9695], [9578, 9696, 9695], [9578, 9579, 9697], [9578, 9697, 9696], [9579, 9580, 9697], [9580, 9698, 9697], [9580, 9581, 9699], [9580, 9699, 9698], [9581, 9582, 9699], [9582, 9700, 9699], [9582, 9583, 9701], [9582, 9701, 9700], [9583, 9584, 9701], [9584, 9702, 9701], [9584, 9585, 9703], [9584, 9703, 9702], [9585, 9586, 9703], [9586, 9704, 9703], [9586, 9587, 9705], [9586, 9705, 9704], [9587, 9588, 9705], [9588, 9706, 9705], [9588, 9589, 9707], [9588, 9707, 9706], [9589, 9590, 9707], [9590, 9708, 9707], [9590, 9591, 9709], [9590, 9709, 9708], [9591, 9592, 9709], [9592, 9710, 9709], [9592, 9593, 9711], [9592, 9711, 9710], [9593, 9594, 9711], [9594, 9712, 9711], [9594, 9595, 9713], [9594, 9713, 9712], [9595, 9596, 9713], [9596, 9714, 9713], [9596, 9597, 9715], [9596, 9715, 9714], [9597, 9598, 9715], [9598, 9716, 9715], [9598, 9599, 9717], [9598, 9717, 9716], [9599, 9600, 9717], [9600, 9718, 9717], [9600, 9601, 9719], [9600, 9719, 9718], [9601, 9602, 9719], [9602, 9720, 9719], [9602, 9603, 9721], [9602, 9721, 9720], [9603, 9604, 9721], [9604, 9722, 9721], [9604, 9605, 9723], [9604, 9723, 9722], [9605, 9606, 9723], [9606, 9724, 9723], [9607, 9608, 9726], [9607, 9726, 9725], [9608, 9609, 9726], [9609, 9727, 9726], [9609, 9610, 9728], [9609, 9728, 9727], [9610, 9611, 9728], [9611, 9729, 9728], [9611, 9612, 9730], [9611, 9730, 9729], [9612, 9613, 9730], [9613, 9731, 9730], [9613, 9614, 9732], [9613, 9732, 9731], [9614, 9615, 9732], [9615, 9733, 9732], [9615, 9616, 9734], [9615, 9734, 9733], [9616, 9617, 9734], [9617, 9735, 9734], [9617, 9618, 9736], [9617, 9736, 9735], [9618, 9619, 9736], [9619, 9737, 9736], [9619, 9620, 9738], [9619, 9738, 9737], [9620, 9621, 9738], [9621, 9739, 9738], [9621, 9622, 9740], [9621, 9740, 9739], [9622, 9623, 9740], [9623, 9741, 9740], [9623, 9624, 9742], [9623, 9742, 9741], [9624, 9625, 9742], [9625, 9743, 9742], [9626, 9627, 9744], [9627, 9745, 9744], [9627, 9628, 9746], [9627, 9746, 9745], [9628, 9629, 9746], [9629, 9747, 9746], [9629, 9630, 9748], [9629, 9748, 9747], [9630, 9631, 9748], [9631, 9749, 9748], [9631, 9632, 9750], [9631, 9750, 9749], [9632, 9633, 9750], [9633, 9751, 9750], [9633, 9634, 9752], [9633, 9752, 9751], [9634, 9635, 9752], [9635, 9753, 9752], [9635, 9636, 9754], [9635, 9754, 9753], [9636, 9637, 9754], [9637, 9755, 9754], [9637, 9638, 9756], [9637, 9756, 9755], [9638, 9639, 9756], [9639, 9757, 9756], [9639, 9640, 9758], [9639, 9758, 9757], [9640, 9641, 9758], [9641, 9759, 9758], [9641, 9642, 9760], [9641, 9760, 9759], [9642, 9643, 9760], [9643, 9761, 9760], [9643, 9644, 9762], [9643, 9762, 9761], [9644, 9645, 9762], [9645, 9763, 9762], [9645, 9646, 9764], [9645, 9764, 9763], [9646, 9647, 9764], [9647, 9765, 9764], [9647, 9648, 9766], [9647, 9766, 9765], [9648, 9649, 9766], [9649, 9767, 9766], [9649, 9650, 9768], [9649, 9768, 9767], [9650, 9651, 9768], [9651, 9769, 9768], [9651, 9652, 9770], [9651, 9770, 9769], [9652, 9653, 9770], [9653, 9771, 9770], [9653, 9654, 9772], [9653, 9772, 9771], [9654, 9655, 9772], [9655, 9773, 9772], [9655, 9656, 9774], [9655, 9774, 9773], [9656, 9657, 9774], [9657, 9775, 9774], [9657, 9658, 9776], [9657, 9776, 9775], [9658, 9659, 9776], [9659, 9777, 9776], [9659, 9660, 9778], [9659, 9778, 9777], [9660, 9661, 9778], [9661, 9779, 9778], [9661, 9662, 9780], [9661, 9780, 9779], [9662, 9663, 9780], [9663, 9781, 9780], [9663, 9664, 9782], [9663, 9782, 9781], [9664, 9665, 9782], [9665, 9783, 9782], [9665, 9666, 9784], [9665, 9784, 9783], [9666, 9667, 9784], [9667, 9785, 9784], [9667, 9668, 9786], [9667, 9786, 9785], [9668, 9669, 9786], [9669, 9787, 9786], [9669, 9670, 9788], [9669, 9788, 9787], [9670, 9671, 9788], [9671, 9789, 9788], [9671, 9672, 9790], [9671, 9790, 9789], [9672, 9673, 9790], [9673, 9791, 9790], [9673, 9674, 9792], [9673, 9792, 9791], [9674, 9675, 9792], [9675, 9793, 9792], [9675, 9676, 9794], [9675, 9794, 9793], [9676, 9677, 9794], [9677, 9795, 9794], [9677, 9678, 9796], [9677, 9796, 9795], [9678, 9679, 9796], [9679, 9797, 9796], [9679, 9680, 9798], [9679, 9798, 9797], [9680, 9681, 9798], [9681, 9799, 9798], [9681, 9682, 9800], [9681, 9800, 9799], [9682, 9683, 9800], [9683, 9801, 9800], [9683, 9684, 9802], [9683, 9802, 9801], [9684, 9685, 9802], [9685, 9803, 9802], [9685, 9686, 9804], [9685, 9804, 9803], [9686, 9687, 9804], [9687, 9805, 9804], [9687, 9688, 9806], [9687, 9806, 9805], [9688, 9689, 9806], [9689, 9807, 9806], [9689, 9690, 9808], [9689, 9808, 9807], [9690, 9691, 9808], [9691, 9809, 9808], [9691, 9692, 9810], [9691, 9810, 9809], [9692, 9693, 9810], [9693, 9811, 9810], [9693, 9694, 9812], [9693, 9812, 9811], [9694, 9695, 9812], [9695, 9813, 9812], [9695, 9696, 9814], [9695, 9814, 9813], [9696, 9697, 9814], [9697, 9815, 9814], [9697, 9698, 9816], [9697, 9816, 9815], [9698, 9699, 9816], [9699, 9817, 9816], [9699, 9700, 9818], [9699, 9818, 9817], [9700, 9701, 9818], [9701, 9819, 9818], [9701, 9702, 9820], [9701, 9820, 9819], [9702, 9703, 9820], [9703, 9821, 9820], [9703, 9704, 9822], [9703, 9822, 9821], [9704, 9705, 9822], [9705, 9823, 9822], [9705, 9706, 9824], [9705, 9824, 9823], [9706, 9707, 9824], [9707, 9825, 9824], [9707, 9708, 9826], [9707, 9826, 9825], [9708, 9709, 9826], [9709, 9827, 9826], [9709, 9710, 9828], [9709, 9828, 9827], [9710, 9711, 9828], [9711, 9829, 9828], [9711, 9712, 9830], [9711, 9830, 9829], [9712, 9713, 9830], [9713, 9831, 9830], [9713, 9714, 9832], [9713, 9832, 9831], [9714, 9715, 9832], [9715, 9833, 9832], [9715, 9716, 9834], [9715, 9834, 9833], [9716, 9717, 9834], [9717, 9835, 9834], [9717, 9718, 9836], [9717, 9836, 9835], [9718, 9719, 9836], [9719, 9837, 9836], [9719, 9720, 9838], [9719, 9838, 9837], [9720, 9721, 9838], [9721, 9839, 9838], [9721, 9722, 9840], [9721, 9840, 9839], [9722, 9723, 9840], [9723, 9841, 9840], [9723, 9724, 9842], [9723, 9842, 9841], [9725, 9726, 9845], [9726, 9846, 9845], [9726, 9727, 9847], [9726, 9847, 9846], [9727, 9728, 9847], [9728, 9848, 9847], [9728, 9729, 9849], [9728, 9849, 9848], [9729, 9730, 9849], [9730, 9850, 9849], [9730, 9731, 9851], [9730, 9851, 9850], [9731, 9732, 9851], [9732, 9852, 9851], [9732, 9733, 9853], [9732, 9853, 9852], [9733, 9734, 9853], [9734, 9854, 9853], [9734, 9735, 9855], [9734, 9855, 9854], [9735, 9736, 9855], [9736, 9856, 9855], [9736, 9737, 9857], [9736, 9857, 9856], [9737, 9738, 9857], [9738, 9858, 9857], [9738, 9739, 9859], [9738, 9859, 9858], [9739, 9740, 9859], [9740, 9860, 9859], [9740, 9741, 9861], [9740, 9861, 9860], [9741, 9742, 9861], [9742, 9862, 9861], [9742, 9743, 9863], [9742, 9863, 9862], [9744, 9745, 9865], [9744, 9865, 9864], [9745, 9746, 9865], [9746, 9866, 9865], [9746, 9747, 9867], [9746, 9867, 9866], [9747, 9748, 9867], [9748, 9868, 9867], [9748, 9749, 9869], [9748, 9869, 9868], [9749, 9750, 9869], [9750, 9870, 9869], [9750, 9751, 9871], [9750, 9871, 9870], [9751, 9752, 9871], [9752, 9872, 9871], [9752, 9753, 9873], [9752, 9873, 9872], [9753, 9754, 9873], [9754, 9874, 9873], [9754, 9755, 9875], [9754, 9875, 9874], [9755, 9756, 9875], [9756, 9876, 9875], [9756, 9757, 9877], [9756, 9877, 9876], [9757, 9758, 9877], [9758, 9878, 9877], [9758, 9759, 9879], [9758, 9879, 9878], [9759, 9760, 9879], [9760, 9880, 9879], [9760, 9761, 9881], [9760, 9881, 9880], [9761, 9762, 9881], [9762, 9882, 9881], [9762, 9763, 9883], [9762, 9883, 9882], [9763, 9764, 9883], [9764, 9884, 9883], [9764, 9765, 9885], [9764, 9885, 9884], [9765, 9766, 9885], [9766, 9886, 9885], [9766, 9767, 9887], [9766, 9887, 9886], [9767, 9768, 9887], [9768, 9888, 9887], [9768, 9769, 9889], [9768, 9889, 9888], [9769, 9770, 9889], [9770, 9890, 9889], [9770, 9771, 9891], [9770, 9891, 9890], [9771, 9772, 9891], [9772, 9892, 9891], [9772, 9773, 9893], [9772, 9893, 9892], [9773, 9774, 9893], [9774, 9894, 9893], [9774, 9775, 9895], [9774, 9895, 9894], [9775, 9776, 9895], [9776, 9896, 9895], [9776, 9777, 9897], [9776, 9897, 9896], [9777, 9778, 9897], [9778, 9898, 9897], [9778, 9779, 9899], [9778, 9899, 9898], [9779, 9780, 9899], [9780, 9900, 9899], [9780, 9781, 9901], [9780, 9901, 9900], [9781, 9782, 9901], [9782, 9902, 9901], [9782, 9783, 9903], [9782, 9903, 9902], [9783, 9784, 9903], [9784, 9904, 9903], [9784, 9785, 9905], [9784, 9905, 9904], [9785, 9786, 9905], [9786, 9906, 9905], [9786, 9787, 9907], [9786, 9907, 9906], [9787, 9788, 9907], [9788, 9908, 9907], [9788, 9789, 9909], [9788, 9909, 9908], [9789, 9790, 9909], [9790, 9910, 9909], [9790, 9791, 9911], [9790, 9911, 9910], [9791, 9792, 9911], [9792, 9912, 9911], [9792, 9793, 9913], [9792, 9913, 9912], [9793, 9794, 9913], [9794, 9914, 9913], [9794, 9795, 9915], [9794, 9915, 9914], [9795, 9796, 9915], [9796, 9916, 9915], [9796, 9797, 9917], [9796, 9917, 9916], [9797, 9798, 9917], [9798, 9918, 9917], [9798, 9799, 9919], [9798, 9919, 9918], [9799, 9800, 9919], [9800, 9920, 9919], [9800, 9801, 9921], [9800, 9921, 9920], [9801, 9802, 9921], [9802, 9922, 9921], [9802, 9803, 9923], [9802, 9923, 9922], [9803, 9804, 9923], [9804, 9924, 9923], [9804, 9805, 9925], [9804, 9925, 9924], [9805, 9806, 9925], [9806, 9926, 9925], [9806, 9807, 9927], [9806, 9927, 9926], [9807, 9808, 9927], [9808, 9928, 9927], [9808, 9809, 9929], [9808, 9929, 9928], [9809, 9810, 9929], [9810, 9930, 9929], [9810, 9811, 9931], [9810, 9931, 9930], [9811, 9812, 9931], [9812, 9932, 9931], [9812, 9813, 9933], [9812, 9933, 9932], [9813, 9814, 9933], [9814, 9934, 9933], [9814, 9815, 9935], [9814, 9935, 9934], [9815, 9816, 9935], [9816, 9936, 9935], [9816, 9817, 9937], [9816, 9937, 9936], [9817, 9818, 9937], [9818, 9938, 9937], [9818, 9819, 9939], [9818, 9939, 9938], [9819, 9820, 9939], [9820, 9940, 9939], [9820, 9821, 9941], [9820, 9941, 9940], [9821, 9822, 9941], [9822, 9942, 9941], [9822, 9823, 9943], [9822, 9943, 9942], [9823, 9824, 9943], [9824, 9944, 9943], [9824, 9825, 9945], [9824, 9945, 9944], [9825, 9826, 9945], [9826, 9946, 9945], [9826, 9827, 9947], [9826, 9947, 9946], [9827, 9828, 9947], [9828, 9948, 9947], [9828, 9829, 9949], [9828, 9949, 9948], [9829, 9830, 9949], [9830, 9950, 9949], [9830, 9831, 9951], [9830, 9951, 9950], [9831, 9832, 9951], [9832, 9952, 9951], [9832, 9833, 9953], [9832, 9953, 9952], [9833, 9834, 9953], [9834, 9954, 9953], [9834, 9835, 9955], [9834, 9955, 9954], [9835, 9836, 9955], [9836, 9956, 9955], [9836, 9837, 9957], [9836, 9957, 9956], [9837, 9838, 9957], [9838, 9958, 9957], [9838, 9839, 9959], [9838, 9959, 9958], [9839, 9840, 9959], [9840, 9960, 9959], [9840, 9841, 9961], [9840, 9961, 9960], [9841, 9842, 9961], [9842, 9962, 9961], [9843, 9964, 9963], [9843, 9844, 9965], [9843, 9965, 9964], [9844, 9845, 9965], [9845, 9966, 9965], [9845, 9846, 9967], [9845, 9967, 9966], [9846, 9847, 9967], [9847, 9968, 9967], [9847, 9848, 9969], [9847, 9969, 9968], [9848, 9849, 9969], [9849, 9970, 9969], [9849, 9850, 9971], [9849, 9971, 9970], [9850, 9851, 9971], [9851, 9972, 9971], [9851, 9852, 9973], [9851, 9973, 9972], [9852, 9853, 9973], [9853, 9974, 9973], [9853, 9854, 9975], [9853, 9975, 9974], [9854, 9855, 9975], [9855, 9976, 9975], [9855, 9856, 9977], [9855, 9977, 9976], [9856, 9857, 9977], [9857, 9978, 9977], [9857, 9858, 9979], [9857, 9979, 9978], [9858, 9859, 9979], [9859, 9980, 9979], [9859, 9860, 9981], [9859, 9981, 9980], [9860, 9861, 9981], [9861, 9982, 9981], [9861, 9862, 9983], [9861, 9983, 9982], [9862, 9863, 9983], [9863, 9984, 9983], [9864, 9865, 9985], [9865, 9986, 9985], [9865, 9866, 9987], [9865, 9987, 9986], [9866, 9867, 9987], [9867, 9988, 9987], [9867, 9868, 9989], [9867, 9989, 9988], [9868, 9869, 9989], [9869, 9990, 9989], [9869, 9870, 9991], [9869, 9991, 9990], [9870, 9871, 9991], [9871, 9992, 9991], [9871, 9872, 9993], [9871, 9993, 9992], [9872, 9873, 9993], [9873, 9994, 9993], [9873, 9874, 9995], [9873, 9995, 9994], [9874, 9875, 9995], [9875, 9996, 9995], [9875, 9876, 9997], [9875, 9997, 9996], [9876, 9877, 9997], [9877, 9998, 9997], [9877, 9878, 9999], [9877, 9999, 9998], [9878, 9879, 9999], [9879, 10000, 9999], [9879, 9880, 10001], [9879, 10001, 10000], [9880, 9881, 10001], [9881, 10002, 10001], [9881, 9882, 10003], [9881, 10003, 10002], [9882, 9883, 10003], [9883, 10004, 10003], [9883, 9884, 10005], [9883, 10005, 10004], [9884, 9885, 10005], [9885, 10006, 10005], [9885, 9886, 10007], [9885, 10007, 10006], [9886, 9887, 10007], [9887, 10008, 10007], [9887, 9888, 10009], [9887, 10009, 10008], [9888, 9889, 10009], [9889, 10010, 10009], [9889, 9890, 10011], [9889, 10011, 10010], [9890, 9891, 10011], [9891, 10012, 10011], [9891, 9892, 10013], [9891, 10013, 10012], [9892, 9893, 10013], [9893, 10014, 10013], [9893, 9894, 10015], [9893, 10015, 10014], [9894, 9895, 10015], [9895, 10016, 10015], [9895, 9896, 10017], [9895, 10017, 10016], [9896, 9897, 10017], [9897, 10018, 10017], [9897, 9898, 10019], [9897, 10019, 10018], [9898, 9899, 10019], [9899, 10020, 10019], [9899, 9900, 10021], [9899, 10021, 10020], [9900, 9901, 10021], [9901, 10022, 10021], [9901, 9902, 10023], [9901, 10023, 10022], [9902, 9903, 10023], [9903, 10024, 10023], [9903, 9904, 10025], [9903, 10025, 10024], [9904, 9905, 10025], [9905, 10026, 10025], [9905, 9906, 10027], [9905, 10027, 10026], [9906, 9907, 10027], [9907, 10028, 10027], [9907, 9908, 10029], [9907, 10029, 10028], [9908, 9909, 10029], [9909, 10030, 10029], [9909, 9910, 10031], [9909, 10031, 10030], [9910, 9911, 10031], [9911, 10032, 10031], [9911, 9912, 10033], [9911, 10033, 10032], [9912, 9913, 10033], [9913, 10034, 10033], [9913, 9914, 10035], [9913, 10035, 10034], [9914, 9915, 10035], [9915, 10036, 10035], [9915, 9916, 10037], [9915, 10037, 10036], [9916, 9917, 10037], [9917, 10038, 10037], [9917, 9918, 10039], [9917, 10039, 10038], [9918, 9919, 10039], [9919, 10040, 10039], [9919, 9920, 10041], [9919, 10041, 10040], [9920, 9921, 10041], [9921, 10042, 10041], [9921, 9922, 10043], [9921, 10043, 10042], [9922, 9923, 10043], [9923, 10044, 10043], [9923, 9924, 10045], [9923, 10045, 10044], [9924, 9925, 10045], [9925, 10046, 10045], [9925, 9926, 10047], [9925, 10047, 10046], [9926, 9927, 10047], [9927, 10048, 10047], [9927, 9928, 10049], [9927, 10049, 10048], [9928, 9929, 10049], [9929, 10050, 10049], [9929, 9930, 10051], [9929, 10051, 10050], [9930, 9931, 10051], [9931, 10052, 10051], [9931, 9932, 10053], [9931, 10053, 10052], [9932, 9933, 10053], [9933, 10054, 10053], [9933, 9934, 10055], [9933, 10055, 10054], [9934, 9935, 10055], [9935, 10056, 10055], [9935, 9936, 10057], [9935, 10057, 10056], [9936, 9937, 10057], [9937, 10058, 10057], [9937, 9938, 10059], [9937, 10059, 10058], [9938, 9939, 10059], [9939, 10060, 10059], [9939, 9940, 10061], [9939, 10061, 10060], [9940, 9941, 10061], [9941, 10062, 10061], [9941, 9942, 10063], [9941, 10063, 10062], [9942, 9943, 10063], [9943, 10064, 10063], [9943, 9944, 10065], [9943, 10065, 10064], [9944, 9945, 10065], [9945, 10066, 10065], [9945, 9946, 10067], [9945, 10067, 10066], [9946, 9947, 10067], [9947, 10068, 10067], [9947, 9948, 10069], [9947, 10069, 10068], [9948, 9949, 10069], [9949, 10070, 10069], [9949, 9950, 10071], [9949, 10071, 10070], [9950, 9951, 10071], [9951, 10072, 10071], [9951, 9952, 10073], [9951, 10073, 10072], [9952, 9953, 10073], [9953, 10074, 10073], [9953, 9954, 10075], [9953, 10075, 10074], [9954, 9955, 10075], [9955, 10076, 10075], [9955, 9956, 10077], [9955, 10077, 10076], [9956, 9957, 10077], [9957, 10078, 10077], [9957, 9958, 10079], [9957, 10079, 10078], [9958, 9959, 10079], [9959, 10080, 10079], [9959, 9960, 10081], [9959, 10081, 10080], [9960, 9961, 10081], [9961, 10082, 10081], [9961, 9962, 10083], [9961, 10083, 10082], [9963, 10089, 10088], [9963, 9964, 10090], [9963, 10090, 10089], [9964, 9965, 10090], [9965, 10091, 10090], [9965, 9966, 10092], [9965, 10092, 10091], [9966, 9967, 10092], [9967, 10093, 10092], [9967, 9968, 10094], [9967, 10094, 10093], [9968, 9969, 10094], [9969, 10095, 10094], [9969, 9970, 10096], [9969, 10096, 10095], [9970, 9971, 10096], [9971, 10097, 10096], [9971, 9972, 10098], [9971, 10098, 10097], [9972, 9973, 10098], [9973, 10099, 10098], [9973, 9974, 10100], [9973, 10100, 10099], [9974, 9975, 10100], [9975, 10101, 10100], [9975, 9976, 10102], [9975, 10102, 10101], [9976, 9977, 10102], [9977, 10103, 10102], [9977, 9978, 10104], [9977, 10104, 10103], [9978, 9979, 10104], [9979, 10105, 10104], [9979, 9980, 10106], [9979, 10106, 10105], [9980, 9981, 10106], [9981, 10107, 10106], [9981, 9982, 10108], [9981, 10108, 10107], [9982, 9983, 10108], [9983, 10109, 10108], [9983, 9984, 10110], [9983, 10110, 10109], [9985, 9986, 10112], [9985, 10112, 10111], [9986, 9987, 10112], [9987, 10113, 10112], [9987, 9988, 10114], [9987, 10114, 10113], [9988, 9989, 10114], [9989, 10115, 10114], [9989, 9990, 10116], [9989, 10116, 10115], [9990, 9991, 10116], [9991, 10117, 10116], [9991, 9992, 10118], [9991, 10118, 10117], [9992, 9993, 10118], [9993, 10119, 10118], [9993, 9994, 10120], [9993, 10120, 10119], [9994, 9995, 10120], [9995, 10121, 10120], [9995, 9996, 10122], [9995, 10122, 10121], [9996, 9997, 10122], [9997, 10123, 10122], [9997, 9998, 10124], [9997, 10124, 10123], [9998, 9999, 10124], [9999, 10125, 10124], [9999, 10000, 10126], [9999, 10126, 10125], [10000, 10001, 10126], [10001, 10127, 10126], [10001, 10002, 10128], [10001, 10128, 10127], [10002, 10003, 10128], [10003, 10129, 10128], [10003, 10004, 10130], [10003, 10130, 10129], [10004, 10005, 10130], [10005, 10131, 10130], [10005, 10006, 10132], [10005, 10132, 10131], [10006, 10007, 10132], [10007, 10133, 10132], [10007, 10008, 10134], [10007, 10134, 10133], [10008, 10009, 10134], [10009, 10135, 10134], [10009, 10010, 10136], [10009, 10136, 10135], [10010, 10011, 10136], [10011, 10137, 10136], [10011, 10012, 10138], [10011, 10138, 10137], [10012, 10013, 10138], [10013, 10139, 10138], [10013, 10014, 10140], [10013, 10140, 10139], [10014, 10015, 10140], [10015, 10141, 10140], [10015, 10016, 10142], [10015, 10142, 10141], [10016, 10017, 10142], [10017, 10143, 10142], [10017, 10018, 10144], [10017, 10144, 10143], [10018, 10019, 10144], [10019, 10145, 10144], [10019, 10020, 10146], [10019, 10146, 10145], [10020, 10021, 10146], [10021, 10147, 10146], [10021, 10022, 10148], [10021, 10148, 10147], [10022, 10023, 10148], [10023, 10149, 10148], [10023, 10024, 10150], [10023, 10150, 10149], [10024, 10025, 10150], [10025, 10151, 10150], [10025, 10026, 10152], [10025, 10152, 10151], [10026, 10027, 10152], [10027, 10153, 10152], [10027, 10028, 10154], [10027, 10154, 10153], [10028, 10029, 10154], [10029, 10155, 10154], [10029, 10030, 10156], [10029, 10156, 10155], [10030, 10031, 10156], [10031, 10157, 10156], [10031, 10032, 10158], [10031, 10158, 10157], [10032, 10033, 10158], [10033, 10159, 10158], [10033, 10034, 10160], [10033, 10160, 10159], [10034, 10035, 10160], [10035, 10161, 10160], [10035, 10036, 10162], [10035, 10162, 10161], [10036, 10037, 10162], [10037, 10163, 10162], [10037, 10038, 10164], [10037, 10164, 10163], [10038, 10039, 10164], [10039, 10165, 10164], [10039, 10040, 10166], [10039, 10166, 10165], [10040, 10041, 10166], [10041, 10167, 10166], [10041, 10042, 10168], [10041, 10168, 10167], [10042, 10043, 10168], [10043, 10169, 10168], [10043, 10044, 10170], [10043, 10170, 10169], [10044, 10045, 10170], [10045, 10171, 10170], [10045, 10046, 10172], [10045, 10172, 10171], [10046, 10047, 10172], [10047, 10173, 10172], [10047, 10048, 10174], [10047, 10174, 10173], [10048, 10049, 10174], [10049, 10175, 10174], [10049, 10050, 10176], [10049, 10176, 10175], [10050, 10051, 10176], [10051, 10177, 10176], [10051, 10052, 10178], [10051, 10178, 10177], [10052, 10053, 10178], [10053, 10179, 10178], [10053, 10054, 10180], [10053, 10180, 10179], [10054, 10055, 10180], [10055, 10181, 10180], [10055, 10056, 10182], [10055, 10182, 10181], [10056, 10057, 10182], [10057, 10183, 10182], [10057, 10058, 10184], [10057, 10184, 10183], [10058, 10059, 10184], [10059, 10185, 10184], [10059, 10060, 10186], [10059, 10186, 10185], [10060, 10061, 10186], [10061, 10187, 10186], [10061, 10062, 10188], [10061, 10188, 10187], [10062, 10063, 10188], [10063, 10189, 10188], [10063, 10064, 10190], [10063, 10190, 10189], [10064, 10065, 10190], [10065, 10191, 10190], [10065, 10066, 10192], [10065, 10192, 10191], [10066, 10067, 10192], [10067, 10193, 10192], [10067, 10068, 10194], [10067, 10194, 10193], [10068, 10069, 10194], [10069, 10195, 10194], [10069, 10070, 10196], [10069, 10196, 10195], [10070, 10071, 10196], [10071, 10197, 10196], [10071, 10072, 10198], [10071, 10198, 10197], [10072, 10073, 10198], [10073, 10199, 10198], [10073, 10074, 10200], [10073, 10200, 10199], [10074, 10075, 10200], [10075, 10201, 10200], [10075, 10076, 10202], [10075, 10202, 10201], [10076, 10077, 10202], [10077, 10203, 10202], [10077, 10078, 10204], [10077, 10204, 10203], [10078, 10079, 10204], [10079, 10205, 10204], [10079, 10080, 10206], [10079, 10206, 10205], [10080, 10081, 10206], [10081, 10207, 10206], [10081, 10082, 10208], [10081, 10208, 10207], [10082, 10083, 10208], [10083, 10209, 10208], [10083, 10084, 10210], [10083, 10210, 10209], [10084, 10085, 10210], [10085, 10211, 10210], [10086, 10087, 10213], [10086, 10213, 10212], [10087, 10088, 10213], [10088, 10214, 10213], [10088, 10089, 10215], [10088, 10215, 10214], [10089, 10090, 10215], [10090, 10216, 10215], [10090, 10091, 10217], [10090, 10217, 10216], [10091, 10092, 10217], [10092, 10218, 10217], [10092, 10093, 10219], [10092, 10219, 10218], [10093, 10094, 10219], [10094, 10220, 10219], [10094, 10095, 10221], [10094, 10221, 10220], [10095, 10096, 10221], [10096, 10222, 10221], [10096, 10097, 10223], [10096, 10223, 10222], [10097, 10098, 10223], [10098, 10224, 10223], [10098, 10099, 10225], [10098, 10225, 10224], [10099, 10100, 10225], [10100, 10226, 10225], [10100, 10101, 10227], [10100, 10227, 10226], [10101, 10102, 10227], [10102, 10228, 10227], [10102, 10103, 10229], [10102, 10229, 10228], [10103, 10104, 10229], [10104, 10230, 10229], [10104, 10105, 10231], [10104, 10231, 10230], [10105, 10106, 10231], [10106, 10232, 10231], [10106, 10107, 10233], [10106, 10233, 10232], [10107, 10108, 10233], [10108, 10234, 10233], [10108, 10109, 10235], [10108, 10235, 10234], [10109, 10110, 10235], [10110, 10236, 10235], [10111, 10112, 10237], [10112, 10238, 10237], [10112, 10113, 10239], [10112, 10239, 10238], [10113, 10114, 10239], [10114, 10240, 10239], [10114, 10115, 10241], [10114, 10241, 10240], [10115, 10116, 10241], [10116, 10242, 10241], [10116, 10117, 10243], [10116, 10243, 10242], [10117, 10118, 10243], [10118, 10244, 10243], [10118, 10119, 10245], [10118, 10245, 10244], [10119, 10120, 10245], [10120, 10246, 10245], [10120, 10121, 10247], [10120, 10247, 10246], [10121, 10122, 10247], [10122, 10248, 10247], [10122, 10123, 10249], [10122, 10249, 10248], [10123, 10124, 10249], [10124, 10250, 10249], [10124, 10125, 10251], [10124, 10251, 10250], [10125, 10126, 10251], [10126, 10252, 10251], [10126, 10127, 10253], [10126, 10253, 10252], [10127, 10128, 10253], [10128, 10254, 10253], [10128, 10129, 10255], [10128, 10255, 10254], [10129, 10130, 10255], [10130, 10256, 10255], [10130, 10131, 10257], [10130, 10257, 10256], [10131, 10132, 10257], [10132, 10258, 10257], [10132, 10133, 10259], [10132, 10259, 10258], [10133, 10134, 10259], [10134, 10260, 10259], [10134, 10135, 10261], [10134, 10261, 10260], [10135, 10136, 10261], [10136, 10262, 10261], [10136, 10137, 10263], [10136, 10263, 10262], [10137, 10138, 10263], [10138, 10264, 10263], [10138, 10139, 10265], [10138, 10265, 10264], [10139, 10140, 10265], [10140, 10266, 10265], [10140, 10141, 10267], [10140, 10267, 10266], [10141, 10142, 10267], [10142, 10268, 10267], [10142, 10143, 10269], [10142, 10269, 10268], [10143, 10144, 10269], [10144, 10270, 10269], [10144, 10145, 10271], [10144, 10271, 10270], [10145, 10146, 10271], [10146, 10272, 10271], [10146, 10147, 10273], [10146, 10273, 10272], [10147, 10148, 10273], [10148, 10274, 10273], [10148, 10149, 10275], [10148, 10275, 10274], [10149, 10150, 10275], [10150, 10276, 10275], [10150, 10151, 10277], [10150, 10277, 10276], [10151, 10152, 10277], [10152, 10278, 10277], [10152, 10153, 10279], [10152, 10279, 10278], [10153, 10154, 10279], [10154, 10280, 10279], [10154, 10155, 10281], [10154, 10281, 10280], [10155, 10156, 10281], [10156, 10282, 10281], [10156, 10157, 10283], [10156, 10283, 10282], [10157, 10158, 10283], [10158, 10284, 10283], [10158, 10159, 10285], [10158, 10285, 10284], [10159, 10160, 10285], [10160, 10286, 10285], [10160, 10161, 10287], [10160, 10287, 10286], [10161, 10162, 10287], [10162, 10288, 10287], [10162, 10163, 10289], [10162, 10289, 10288], [10163, 10164, 10289], [10164, 10290, 10289], [10164, 10165, 10291], [10164, 10291, 10290], [10165, 10166, 10291], [10166, 10292, 10291], [10166, 10167, 10293], [10166, 10293, 10292], [10167, 10168, 10293], [10168, 10294, 10293], [10168, 10169, 10295], [10168, 10295, 10294], [10169, 10170, 10295], [10170, 10296, 10295], [10170, 10171, 10297], [10170, 10297, 10296], [10171, 10172, 10297], [10172, 10298, 10297], [10172, 10173, 10299], [10172, 10299, 10298], [10173, 10174, 10299], [10174, 10300, 10299], [10174, 10175, 10301], [10174, 10301, 10300], [10175, 10176, 10301], [10176, 10302, 10301], [10176, 10177, 10303], [10176, 10303, 10302], [10177, 10178, 10303], [10178, 10304, 10303], [10178, 10179, 10305], [10178, 10305, 10304], [10179, 10180, 10305], [10180, 10306, 10305], [10180, 10181, 10307], [10180, 10307, 10306], [10181, 10182, 10307], [10182, 10308, 10307], [10182, 10183, 10309], [10182, 10309, 10308], [10183, 10184, 10309], [10184, 10310, 10309], [10184, 10185, 10311], [10184, 10311, 10310], [10185, 10186, 10311], [10186, 10312, 10311], [10186, 10187, 10313], [10186, 10313, 10312], [10187, 10188, 10313], [10188, 10314, 10313], [10188, 10189, 10315], [10188, 10315, 10314], [10189, 10190, 10315], [10190, 10316, 10315], [10190, 10191, 10317], [10190, 10317, 10316], [10191, 10192, 10317], [10192, 10318, 10317], [10192, 10193, 10319], [10192, 10319, 10318], [10193, 10194, 10319], [10194, 10320, 10319], [10194, 10195, 10321], [10194, 10321, 10320], [10195, 10196, 10321], [10196, 10322, 10321], [10196, 10197, 10323], [10196, 10323, 10322], [10197, 10198, 10323], [10198, 10324, 10323], [10198, 10199, 10325], [10198, 10325, 10324], [10199, 10200, 10325], [10200, 10326, 10325], [10200, 10201, 10327], [10200, 10327, 10326], [10201, 10202, 10327], [10202, 10328, 10327], [10202, 10203, 10329], [10202, 10329, 10328], [10203, 10204, 10329], [10204, 10330, 10329], [10204, 10205, 10331], [10204, 10331, 10330], [10205, 10206, 10331], [10206, 10332, 10331], [10206, 10207, 10333], [10206, 10333, 10332], [10207, 10208, 10333], [10208, 10334, 10333], [10208, 10209, 10335], [10208, 10335, 10334], [10209, 10210, 10335], [10210, 10336, 10335], [10210, 10211, 10337], [10210, 10337, 10336], [10212, 10213, 10341], [10213, 10342, 10341], [10213, 10214, 10343], [10213, 10343, 10342], [10214, 10215, 10343], [10215, 10344, 10343], [10215, 10216, 10345], [10215, 10345, 10344], [10216, 10217, 10345], [10217, 10346, 10345], [10217, 10218, 10347], [10217, 10347, 10346], [10218, 10219, 10347], [10219, 10348, 10347], [10219, 10220, 10349], [10219, 10349, 10348], [10220, 10221, 10349], [10221, 10350, 10349], [10221, 10222, 10351], [10221, 10351, 10350], [10222, 10223, 10351], [10223, 10352, 10351], [10223, 10224, 10353], [10223, 10353, 10352], [10224, 10225, 10353], [10225, 10354, 10353], [10225, 10226, 10355], [10225, 10355, 10354], [10226, 10227, 10355], [10227, 10356, 10355], [10227, 10228, 10357], [10227, 10357, 10356], [10228, 10229, 10357], [10229, 10358, 10357], [10229, 10230, 10359], [10229, 10359, 10358], [10230, 10231, 10359], [10231, 10360, 10359], [10231, 10232, 10361], [10231, 10361, 10360], [10232, 10233, 10361], [10233, 10362, 10361], [10233, 10234, 10363], [10233, 10363, 10362], [10234, 10235, 10363], [10235, 10364, 10363], [10235, 10236, 10365], [10235, 10365, 10364], [10237, 10238, 10367], [10237, 10367, 10366], [10238, 10239, 10367], [10239, 10368, 10367], [10239, 10240, 10369], [10239, 10369, 10368], [10240, 10241, 10369], [10241, 10370, 10369], [10241, 10242, 10371], [10241, 10371, 10370], [10242, 10243, 10371], [10243, 10372, 10371], [10243, 10244, 10373], [10243, 10373, 10372], [10244, 10245, 10373], [10245, 10374, 10373], [10245, 10246, 10375], [10245, 10375, 10374], [10246, 10247, 10375], [10247, 10376, 10375], [10247, 10248, 10377], [10247, 10377, 10376], [10248, 10249, 10377], [10249, 10378, 10377], [10249, 10250, 10379], [10249, 10379, 10378], [10250, 10251, 10379], [10251, 10380, 10379], [10251, 10252, 10381], [10251, 10381, 10380], [10252, 10253, 10381], [10253, 10382, 10381], [10253, 10254, 10383], [10253, 10383, 10382], [10254, 10255, 10383], [10255, 10384, 10383], [10255, 10256, 10385], [10255, 10385, 10384], [10256, 10257, 10385], [10257, 10386, 10385], [10257, 10258, 10387], [10257, 10387, 10386], [10258, 10259, 10387], [10259, 10388, 10387], [10259, 10260, 10389], [10259, 10389, 10388], [10260, 10261, 10389], [10261, 10390, 10389], [10261, 10262, 10391], [10261, 10391, 10390], [10262, 10263, 10391], [10263, 10392, 10391], [10263, 10264, 10393], [10263, 10393, 10392], [10264, 10265, 10393], [10265, 10394, 10393], [10265, 10266, 10395], [10265, 10395, 10394], [10266, 10267, 10395], [10267, 10396, 10395], [10267, 10268, 10397], [10267, 10397, 10396], [10268, 10269, 10397], [10269, 10398, 10397], [10269, 10270, 10399], [10269, 10399, 10398], [10270, 10271, 10399], [10271, 10400, 10399], [10271, 10272, 10401], [10271, 10401, 10400], [10272, 10273, 10401], [10273, 10402, 10401], [10273, 10274, 10403], [10273, 10403, 10402], [10274, 10275, 10403], [10275, 10404, 10403], [10275, 10276, 10405], [10275, 10405, 10404], [10276, 10277, 10405], [10277, 10406, 10405], [10277, 10278, 10407], [10277, 10407, 10406], [10278, 10279, 10407], [10279, 10408, 10407], [10279, 10280, 10409], [10279, 10409, 10408], [10280, 10281, 10409], [10281, 10410, 10409], [10281, 10282, 10411], [10281, 10411, 10410], [10282, 10283, 10411], [10283, 10412, 10411], [10283, 10284, 10413], [10283, 10413, 10412], [10284, 10285, 10413], [10285, 10414, 10413], [10285, 10286, 10415], [10285, 10415, 10414], [10286, 10287, 10415], [10287, 10416, 10415], [10287, 10288, 10417], [10287, 10417, 10416], [10288, 10289, 10417], [10289, 10418, 10417], [10289, 10290, 10419], [10289, 10419, 10418], [10290, 10291, 10419], [10291, 10420, 10419], [10291, 10292, 10421], [10291, 10421, 10420], [10292, 10293, 10421], [10293, 10422, 10421], [10293, 10294, 10423], [10293, 10423, 10422], [10294, 10295, 10423], [10295, 10424, 10423], [10295, 10296, 10425], [10295, 10425, 10424], [10296, 10297, 10425], [10297, 10426, 10425], [10297, 10298, 10427], [10297, 10427, 10426], [10298, 10299, 10427], [10299, 10428, 10427], [10299, 10300, 10429], [10299, 10429, 10428], [10300, 10301, 10429], [10301, 10430, 10429], [10301, 10302, 10431], [10301, 10431, 10430], [10302, 10303, 10431], [10303, 10432, 10431], [10303, 10304, 10433], [10303, 10433, 10432], [10304, 10305, 10433], [10305, 10434, 10433], [10305, 10306, 10435], [10305, 10435, 10434], [10306, 10307, 10435], [10307, 10436, 10435], [10307, 10308, 10437], [10307, 10437, 10436], [10308, 10309, 10437], [10309, 10438, 10437], [10309, 10310, 10439], [10309, 10439, 10438], [10310, 10311, 10439], [10311, 10440, 10439], [10311, 10312, 10441], [10311, 10441, 10440], [10312, 10313, 10441], [10313, 10442, 10441], [10313, 10314, 10443], [10313, 10443, 10442], [10314, 10315, 10443], [10315, 10444, 10443], [10315, 10316, 10445], [10315, 10445, 10444], [10316, 10317, 10445], [10317, 10446, 10445], [10317, 10318, 10447], [10317, 10447, 10446], [10318, 10319, 10447], [10319, 10448, 10447], [10319, 10320, 10449], [10319, 10449, 10448], [10320, 10321, 10449], [10321, 10450, 10449], [10321, 10322, 10451], [10321, 10451, 10450], [10322, 10323, 10451], [10323, 10452, 10451], [10323, 10324, 10453], [10323, 10453, 10452], [10324, 10325, 10453], [10325, 10454, 10453], [10325, 10326, 10455], [10325, 10455, 10454], [10326, 10327, 10455], [10327, 10456, 10455], [10327, 10328, 10457], [10327, 10457, 10456], [10328, 10329, 10457], [10329, 10458, 10457], [10329, 10330, 10459], [10329, 10459, 10458], [10330, 10331, 10459], [10331, 10460, 10459], [10331, 10332, 10461], [10331, 10461, 10460], [10332, 10333, 10461], [10333, 10462, 10461], [10333, 10334, 10463], [10333, 10463, 10462], [10334, 10335, 10463], [10335, 10464, 10463], [10335, 10336, 10465], [10335, 10465, 10464], [10336, 10337, 10465], [10337, 10466, 10465], [10337, 10338, 10467], [10337, 10467, 10466], [10338, 10339, 10467], [10339, 10468, 10467], [10339, 10340, 10469], [10339, 10469, 10468], [10340, 10341, 10469], [10341, 10470, 10469], [10341, 10342, 10471], [10341, 10471, 10470], [10342, 10343, 10471], [10343, 10472, 10471], [10343, 10344, 10473], [10343, 10473, 10472], [10344, 10345, 10473], [10345, 10474, 10473], [10345, 10346, 10475], [10345, 10475, 10474], [10346, 10347, 10475], [10347, 10476, 10475], [10347, 10348, 10477], [10347, 10477, 10476], [10348, 10349, 10477], [10349, 10478, 10477], [10349, 10350, 10479], [10349, 10479, 10478], [10350, 10351, 10479], [10351, 10480, 10479], [10351, 10352, 10481], [10351, 10481, 10480], [10352, 10353, 10481], [10353, 10482, 10481], [10353, 10354, 10483], [10353, 10483, 10482], [10354, 10355, 10483], [10355, 10484, 10483], [10355, 10356, 10485], [10355, 10485, 10484], [10356, 10357, 10485], [10357, 10486, 10485], [10357, 10358, 10487], [10357, 10487, 10486], [10358, 10359, 10487], [10359, 10488, 10487], [10359, 10360, 10489], [10359, 10489, 10488], [10360, 10361, 10489], [10361, 10490, 10489], [10361, 10362, 10491], [10361, 10491, 10490], [10362, 10363, 10491], [10363, 10492, 10491], [10363, 10364, 10493], [10363, 10493, 10492], [10364, 10365, 10493], [10365, 10494, 10493], [10366, 10367, 10495], [10367, 10496, 10495], [10367, 10368, 10497], [10367, 10497, 10496], [10368, 10369, 10497], [10369, 10498, 10497], [10369, 10370, 10499], [10369, 10499, 10498], [10370, 10371, 10499], [10371, 10500, 10499], [10371, 10372, 10501], [10371, 10501, 10500], [10372, 10373, 10501], [10373, 10502, 10501], [10373, 10374, 10503], [10373, 10503, 10502], [10374, 10375, 10503], [10375, 10504, 10503], [10375, 10376, 10505], [10375, 10505, 10504], [10376, 10377, 10505], [10377, 10506, 10505], [10377, 10378, 10507], [10377, 10507, 10506], [10378, 10379, 10507], [10379, 10508, 10507], [10379, 10380, 10509], [10379, 10509, 10508], [10380, 10381, 10509], [10381, 10510, 10509], [10381, 10382, 10511], [10381, 10511, 10510], [10382, 10383, 10511], [10383, 10512, 10511], [10383, 10384, 10513], [10383, 10513, 10512], [10384, 10385, 10513], [10385, 10514, 10513], [10385, 10386, 10515], [10385, 10515, 10514], [10386, 10387, 10515], [10387, 10516, 10515], [10387, 10388, 10517], [10387, 10517, 10516], [10388, 10389, 10517], [10389, 10518, 10517], [10389, 10390, 10519], [10389, 10519, 10518], [10390, 10391, 10519], [10391, 10520, 10519], [10391, 10392, 10521], [10391, 10521, 10520], [10392, 10393, 10521], [10393, 10522, 10521], [10393, 10394, 10523], [10393, 10523, 10522], [10394, 10395, 10523], [10395, 10524, 10523], [10395, 10396, 10525], [10395, 10525, 10524], [10396, 10397, 10525], [10397, 10526, 10525], [10397, 10398, 10527], [10397, 10527, 10526], [10398, 10399, 10527], [10399, 10528, 10527], [10399, 10400, 10529], [10399, 10529, 10528], [10400, 10401, 10529], [10401, 10530, 10529], [10401, 10402, 10531], [10401, 10531, 10530], [10402, 10403, 10531], [10403, 10532, 10531], [10403, 10404, 10533], [10403, 10533, 10532], [10404, 10405, 10533], [10405, 10534, 10533], [10405, 10406, 10535], [10405, 10535, 10534], [10406, 10407, 10535], [10407, 10536, 10535], [10407, 10408, 10537], [10407, 10537, 10536], [10408, 10409, 10537], [10409, 10538, 10537], [10409, 10410, 10539], [10409, 10539, 10538], [10410, 10411, 10539], [10411, 10540, 10539], [10411, 10412, 10541], [10411, 10541, 10540], [10412, 10413, 10541], [10413, 10542, 10541], [10413, 10414, 10543], [10413, 10543, 10542], [10414, 10415, 10543], [10415, 10544, 10543], [10415, 10416, 10545], [10415, 10545, 10544], [10416, 10417, 10545], [10417, 10546, 10545], [10417, 10418, 10547], [10417, 10547, 10546], [10418, 10419, 10547], [10419, 10548, 10547], [10419, 10420, 10549], [10419, 10549, 10548], [10420, 10421, 10549], [10421, 10550, 10549], [10421, 10422, 10551], [10421, 10551, 10550], [10422, 10423, 10551], [10423, 10552, 10551], [10423, 10424, 10553], [10423, 10553, 10552], [10424, 10425, 10553], [10425, 10554, 10553], [10425, 10426, 10555], [10425, 10555, 10554], [10426, 10427, 10555], [10427, 10556, 10555], [10427, 10428, 10557], [10427, 10557, 10556], [10428, 10429, 10557], [10429, 10558, 10557], [10429, 10430, 10559], [10429, 10559, 10558], [10430, 10431, 10559], [10431, 10560, 10559], [10431, 10432, 10561], [10431, 10561, 10560], [10432, 10433, 10561], [10433, 10562, 10561], [10433, 10434, 10563], [10433, 10563, 10562], [10434, 10435, 10563], [10435, 10564, 10563], [10435, 10436, 10565], [10435, 10565, 10564], [10436, 10437, 10565], [10437, 10566, 10565], [10437, 10438, 10567], [10437, 10567, 10566], [10438, 10439, 10567], [10439, 10568, 10567], [10439, 10440, 10569], [10439, 10569, 10568], [10440, 10441, 10569], [10441, 10570, 10569], [10441, 10442, 10571], [10441, 10571, 10570], [10442, 10443, 10571], [10443, 10572, 10571], [10443, 10444, 10573], [10443, 10573, 10572], [10444, 10445, 10573], [10445, 10574, 10573], [10445, 10446, 10575], [10445, 10575, 10574], [10446, 10447, 10575], [10447, 10576, 10575], [10447, 10448, 10577], [10447, 10577, 10576], [10448, 10449, 10577], [10449, 10578, 10577], [10449, 10450, 10579], [10449, 10579, 10578], [10450, 10451, 10579], [10451, 10580, 10579], [10451, 10452, 10581], [10451, 10581, 10580], [10452, 10453, 10581], [10453, 10582, 10581], [10453, 10454, 10583], [10453, 10583, 10582], [10454, 10455, 10583], [10455, 10584, 10583], [10455, 10456, 10585], [10455, 10585, 10584], [10456, 10457, 10585], [10457, 10586, 10585], [10457, 10458, 10587], [10457, 10587, 10586], [10458, 10459, 10587], [10459, 10588, 10587], [10459, 10460, 10589], [10459, 10589, 10588], [10460, 10461, 10589], [10461, 10590, 10589], [10461, 10462, 10591], [10461, 10591, 10590], [10462, 10463, 10591], [10463, 10592, 10591], [10463, 10464, 10593], [10463, 10593, 10592], [10464, 10465, 10593], [10465, 10594, 10593], [10465, 10466, 10595], [10465, 10595, 10594], [10466, 10467, 10595], [10467, 10596, 10595], [10467, 10468, 10597], [10467, 10597, 10596], [10468, 10469, 10597], [10469, 10598, 10597], [10469, 10470, 10599], [10469, 10599, 10598], [10470, 10471, 10599], [10471, 10600, 10599], [10471, 10472, 10601], [10471, 10601, 10600], [10472, 10473, 10601], [10473, 10602, 10601], [10473, 10474, 10603], [10473, 10603, 10602], [10474, 10475, 10603], [10475, 10604, 10603], [10475, 10476, 10605], [10475, 10605, 10604], [10476, 10477, 10605], [10477, 10606, 10605], [10477, 10478, 10607], [10477, 10607, 10606], [10478, 10479, 10607], [10479, 10608, 10607], [10479, 10480, 10609], [10479, 10609, 10608], [10480, 10481, 10609], [10481, 10610, 10609], [10481, 10482, 10611], [10481, 10611, 10610], [10482, 10483, 10611], [10483, 10612, 10611], [10483, 10484, 10613], [10483, 10613, 10612], [10484, 10485, 10613], [10485, 10614, 10613], [10485, 10486, 10615], [10485, 10615, 10614], [10486, 10487, 10615], [10487, 10616, 10615], [10487, 10488, 10617], [10487, 10617, 10616], [10488, 10489, 10617], [10489, 10618, 10617], [10489, 10490, 10619], [10489, 10619, 10618], [10490, 10491, 10619], [10491, 10620, 10619], [10491, 10492, 10621], [10491, 10621, 10620], [10492, 10493, 10621], [10493, 10622, 10621], [10493, 10494, 10623], [10493, 10623, 10622], [10495, 10496, 10625], [10495, 10625, 10624], [10496, 10497, 10625], [10497, 10626, 10625], [10497, 10498, 10627], [10497, 10627, 10626], [10498, 10499, 10627], [10499, 10628, 10627], [10499, 10500, 10629], [10499, 10629, 10628], [10500, 10501, 10629], [10501, 10630, 10629], [10501, 10502, 10631], [10501, 10631, 10630], [10502, 10503, 10631], [10503, 10632, 10631], [10503, 10504, 10633], [10503, 10633, 10632], [10504, 10505, 10633], [10505, 10634, 10633], [10505, 10506, 10635], [10505, 10635, 10634], [10506, 10507, 10635], [10507, 10636, 10635], [10507, 10508, 10637], [10507, 10637, 10636], [10508, 10509, 10637], [10509, 10638, 10637], [10509, 10510, 10639], [10509, 10639, 10638], [10510, 10511, 10639], [10511, 10640, 10639], [10511, 10512, 10641], [10511, 10641, 10640], [10512, 10513, 10641], [10513, 10642, 10641], [10513, 10514, 10643], [10513, 10643, 10642], [10514, 10515, 10643], [10515, 10644, 10643], [10515, 10516, 10645], [10515, 10645, 10644], [10516, 10517, 10645], [10517, 10646, 10645], [10517, 10518, 10647], [10517, 10647, 10646], [10518, 10519, 10647], [10519, 10648, 10647], [10519, 10520, 10649], [10519, 10649, 10648], [10520, 10521, 10649], [10521, 10650, 10649], [10521, 10522, 10651], [10521, 10651, 10650], [10522, 10523, 10651], [10523, 10652, 10651], [10523, 10524, 10653], [10523, 10653, 10652], [10524, 10525, 10653], [10525, 10654, 10653], [10525, 10526, 10655], [10525, 10655, 10654], [10526, 10527, 10655], [10527, 10656, 10655], [10527, 10528, 10657], [10527, 10657, 10656], [10528, 10529, 10657], [10529, 10658, 10657], [10529, 10530, 10659], [10529, 10659, 10658], [10530, 10531, 10659], [10531, 10660, 10659], [10531, 10532, 10661], [10531, 10661, 10660], [10532, 10533, 10661], [10533, 10662, 10661], [10533, 10534, 10663], [10533, 10663, 10662], [10534, 10535, 10663], [10535, 10664, 10663], [10535, 10536, 10665], [10535, 10665, 10664], [10536, 10537, 10665], [10537, 10666, 10665], [10537, 10538, 10667], [10537, 10667, 10666], [10538, 10539, 10667], [10539, 10668, 10667], [10539, 10540, 10669], [10539, 10669, 10668], [10540, 10541, 10669], [10541, 10670, 10669], [10541, 10542, 10671], [10541, 10671, 10670], [10542, 10543, 10671], [10543, 10672, 10671], [10543, 10544, 10673], [10543, 10673, 10672], [10544, 10545, 10673], [10545, 10674, 10673], [10545, 10546, 10675], [10545, 10675, 10674], [10546, 10547, 10675], [10547, 10676, 10675], [10547, 10548, 10677], [10547, 10677, 10676], [10548, 10549, 10677], [10549, 10678, 10677], [10549, 10550, 10679], [10549, 10679, 10678], [10550, 10551, 10679], [10551, 10680, 10679], [10551, 10552, 10681], [10551, 10681, 10680], [10552, 10553, 10681], [10553, 10682, 10681], [10553, 10554, 10683], [10553, 10683, 10682], [10554, 10555, 10683], [10555, 10684, 10683], [10555, 10556, 10685], [10555, 10685, 10684], [10556, 10557, 10685], [10557, 10686, 10685], [10557, 10558, 10687], [10557, 10687, 10686], [10558, 10559, 10687], [10559, 10688, 10687], [10559, 10560, 10689], [10559, 10689, 10688], [10560, 10561, 10689], [10561, 10690, 10689], [10561, 10562, 10691], [10561, 10691, 10690], [10562, 10563, 10691], [10563, 10692, 10691], [10563, 10564, 10693], [10563, 10693, 10692], [10564, 10565, 10693], [10565, 10694, 10693], [10565, 10566, 10695], [10565, 10695, 10694], [10566, 10567, 10695], [10567, 10696, 10695], [10567, 10568, 10697], [10567, 10697, 10696], [10568, 10569, 10697], [10569, 10698, 10697], [10569, 10570, 10699], [10569, 10699, 10698], [10570, 10571, 10699], [10571, 10700, 10699], [10571, 10572, 10701], [10571, 10701, 10700], [10572, 10573, 10701], [10573, 10702, 10701], [10573, 10574, 10703], [10573, 10703, 10702], [10574, 10575, 10703], [10575, 10704, 10703], [10575, 10576, 10705], [10575, 10705, 10704], [10576, 10577, 10705], [10577, 10706, 10705], [10577, 10578, 10707], [10577, 10707, 10706], [10578, 10579, 10707], [10579, 10708, 10707], [10579, 10580, 10709], [10579, 10709, 10708], [10580, 10581, 10709], [10581, 10710, 10709], [10581, 10582, 10711], [10581, 10711, 10710], [10582, 10583, 10711], [10583, 10712, 10711], [10583, 10584, 10713], [10583, 10713, 10712], [10584, 10585, 10713], [10585, 10714, 10713], [10585, 10586, 10715], [10585, 10715, 10714], [10586, 10587, 10715], [10587, 10716, 10715], [10587, 10588, 10717], [10587, 10717, 10716], [10588, 10589, 10717], [10589, 10718, 10717], [10589, 10590, 10719], [10589, 10719, 10718], [10590, 10591, 10719], [10591, 10720, 10719], [10591, 10592, 10721], [10591, 10721, 10720], [10592, 10593, 10721], [10593, 10722, 10721], [10593, 10594, 10723], [10593, 10723, 10722], [10594, 10595, 10723], [10595, 10724, 10723], [10595, 10596, 10725], [10595, 10725, 10724], [10596, 10597, 10725], [10597, 10726, 10725], [10597, 10598, 10727], [10597, 10727, 10726], [10598, 10599, 10727], [10599, 10728, 10727], [10599, 10600, 10729], [10599, 10729, 10728], [10600, 10601, 10729], [10601, 10730, 10729], [10601, 10602, 10731], [10601, 10731, 10730], [10602, 10603, 10731], [10603, 10732, 10731], [10603, 10604, 10733], [10603, 10733, 10732], [10604, 10605, 10733], [10605, 10734, 10733], [10605, 10606, 10735], [10605, 10735, 10734], [10606, 10607, 10735], [10607, 10736, 10735], [10607, 10608, 10737], [10607, 10737, 10736], [10608, 10609, 10737], [10609, 10738, 10737], [10609, 10610, 10739], [10609, 10739, 10738], [10610, 10611, 10739], [10611, 10740, 10739], [10611, 10612, 10741], [10611, 10741, 10740], [10612, 10613, 10741], [10613, 10742, 10741], [10613, 10614, 10743], [10613, 10743, 10742], [10614, 10615, 10743], [10615, 10744, 10743], [10615, 10616, 10745], [10615, 10745, 10744], [10616, 10617, 10745], [10617, 10746, 10745], [10617, 10618, 10747], [10617, 10747, 10746], [10618, 10619, 10747], [10619, 10748, 10747], [10619, 10620, 10749], [10619, 10749, 10748], [10620, 10621, 10749], [10621, 10750, 10749], [10621, 10622, 10751], [10621, 10751, 10750], [10622, 10623, 10751], [10623, 10752, 10751], [10624, 10625, 10753], [10625, 10754, 10753], [10625, 10626, 10755], [10625, 10755, 10754], [10626, 10627, 10755], [10627, 10756, 10755], [10627, 10628, 10757], [10627, 10757, 10756], [10628, 10629, 10757], [10629, 10758, 10757], [10629, 10630, 10759], [10629, 10759, 10758], [10630, 10631, 10759], [10631, 10760, 10759], [10631, 10632, 10761], [10631, 10761, 10760], [10632, 10633, 10761], [10633, 10762, 10761], [10633, 10634, 10763], [10633, 10763, 10762], [10634, 10635, 10763], [10635, 10764, 10763], [10635, 10636, 10765], [10635, 10765, 10764], [10636, 10637, 10765], [10637, 10766, 10765], [10637, 10638, 10767], [10637, 10767, 10766], [10638, 10639, 10767], [10639, 10768, 10767], [10639, 10640, 10769], [10639, 10769, 10768], [10640, 10641, 10769], [10641, 10770, 10769], [10641, 10642, 10771], [10641, 10771, 10770], [10642, 10643, 10771], [10643, 10772, 10771], [10643, 10644, 10773], [10643, 10773, 10772], [10644, 10645, 10773], [10645, 10774, 10773], [10645, 10646, 10775], [10645, 10775, 10774], [10646, 10647, 10775], [10647, 10776, 10775], [10647, 10648, 10777], [10647, 10777, 10776], [10648, 10649, 10777], [10649, 10778, 10777], [10649, 10650, 10779], [10649, 10779, 10778], [10650, 10651, 10779], [10651, 10780, 10779], [10651, 10652, 10781], [10651, 10781, 10780], [10652, 10653, 10781], [10653, 10782, 10781], [10653, 10654, 10783], [10653, 10783, 10782], [10654, 10655, 10783], [10655, 10784, 10783], [10655, 10656, 10785], [10655, 10785, 10784], [10656, 10657, 10785], [10657, 10786, 10785], [10657, 10658, 10787], [10657, 10787, 10786], [10658, 10659, 10787], [10659, 10788, 10787], [10659, 10660, 10789], [10659, 10789, 10788], [10660, 10661, 10789], [10661, 10790, 10789], [10661, 10662, 10791], [10661, 10791, 10790], [10662, 10663, 10791], [10663, 10792, 10791], [10663, 10664, 10793], [10663, 10793, 10792], [10664, 10665, 10793], [10665, 10794, 10793], [10665, 10666, 10795], [10665, 10795, 10794], [10666, 10667, 10795], [10667, 10796, 10795], [10667, 10668, 10797], [10667, 10797, 10796], [10668, 10669, 10797], [10669, 10798, 10797], [10669, 10670, 10799], [10669, 10799, 10798], [10670, 10671, 10799], [10671, 10800, 10799], [10671, 10672, 10801], [10671, 10801, 10800], [10672, 10673, 10801], [10673, 10802, 10801], [10673, 10674, 10803], [10673, 10803, 10802], [10674, 10675, 10803], [10675, 10804, 10803], [10675, 10676, 10805], [10675, 10805, 10804], [10676, 10677, 10805], [10677, 10806, 10805], [10677, 10678, 10807], [10677, 10807, 10806], [10678, 10679, 10807], [10679, 10808, 10807], [10679, 10680, 10809], [10679, 10809, 10808], [10680, 10681, 10809], [10681, 10810, 10809], [10681, 10682, 10811], [10681, 10811, 10810], [10682, 10683, 10811], [10683, 10812, 10811], [10683, 10684, 10813], [10683, 10813, 10812], [10684, 10685, 10813], [10685, 10814, 10813], [10685, 10686, 10815], [10685, 10815, 10814], [10686, 10687, 10815], [10687, 10816, 10815], [10687, 10688, 10817], [10687, 10817, 10816], [10688, 10689, 10817], [10689, 10818, 10817], [10689, 10690, 10819], [10689, 10819, 10818], [10690, 10691, 10819], [10691, 10820, 10819], [10691, 10692, 10821], [10691, 10821, 10820], [10692, 10693, 10821], [10693, 10822, 10821], [10693, 10694, 10823], [10693, 10823, 10822], [10694, 10695, 10823], [10695, 10824, 10823], [10695, 10696, 10825], [10695, 10825, 10824], [10696, 10697, 10825], [10697, 10826, 10825], [10697, 10698, 10827], [10697, 10827, 10826], [10698, 10699, 10827], [10699, 10828, 10827], [10699, 10700, 10829], [10699, 10829, 10828], [10700, 10701, 10829], [10701, 10830, 10829], [10701, 10702, 10831], [10701, 10831, 10830], [10702, 10703, 10831], [10703, 10832, 10831], [10703, 10704, 10833], [10703, 10833, 10832], [10704, 10705, 10833], [10705, 10834, 10833], [10705, 10706, 10835], [10705, 10835, 10834], [10706, 10707, 10835], [10707, 10836, 10835], [10707, 10708, 10837], [10707, 10837, 10836], [10708, 10709, 10837], [10709, 10838, 10837], [10709, 10710, 10839], [10709, 10839, 10838], [10710, 10711, 10839], [10711, 10840, 10839], [10711, 10712, 10841], [10711, 10841, 10840], [10712, 10713, 10841], [10713, 10842, 10841], [10713, 10714, 10843], [10713, 10843, 10842], [10714, 10715, 10843], [10715, 10844, 10843], [10715, 10716, 10845], [10715, 10845, 10844], [10716, 10717, 10845], [10717, 10846, 10845], [10717, 10718, 10847], [10717, 10847, 10846], [10718, 10719, 10847], [10719, 10848, 10847], [10719, 10720, 10849], [10719, 10849, 10848], [10720, 10721, 10849], [10721, 10850, 10849], [10721, 10722, 10851], [10721, 10851, 10850], [10722, 10723, 10851], [10723, 10852, 10851], [10723, 10724, 10853], [10723, 10853, 10852], [10724, 10725, 10853], [10725, 10854, 10853], [10725, 10726, 10855], [10725, 10855, 10854], [10726, 10727, 10855], [10727, 10856, 10855], [10727, 10728, 10857], [10727, 10857, 10856], [10728, 10729, 10857], [10729, 10858, 10857], [10729, 10730, 10859], [10729, 10859, 10858], [10730, 10731, 10859], [10731, 10860, 10859], [10731, 10732, 10861], [10731, 10861, 10860], [10732, 10733, 10861], [10733, 10862, 10861], [10733, 10734, 10863], [10733, 10863, 10862], [10734, 10735, 10863], [10735, 10864, 10863], [10735, 10736, 10865], [10735, 10865, 10864], [10736, 10737, 10865], [10737, 10866, 10865], [10737, 10738, 10867], [10737, 10867, 10866], [10738, 10739, 10867], [10739, 10868, 10867], [10739, 10740, 10869], [10739, 10869, 10868], [10740, 10741, 10869], [10741, 10870, 10869], [10741, 10742, 10871], [10741, 10871, 10870], [10742, 10743, 10871], [10743, 10872, 10871], [10743, 10744, 10873], [10743, 10873, 10872], [10744, 10745, 10873], [10745, 10874, 10873], [10745, 10746, 10875], [10745, 10875, 10874], [10746, 10747, 10875], [10747, 10876, 10875], [10747, 10748, 10877], [10747, 10877, 10876], [10748, 10749, 10877], [10749, 10878, 10877], [10749, 10750, 10879], [10749, 10879, 10878], [10750, 10751, 10879], [10751, 10880, 10879], [10751, 10752, 10881], [10751, 10881, 10880], [10753, 10754, 10883], [10753, 10883, 10882], [10754, 10755, 10883], [10755, 10884, 10883], [10755, 10756, 10885], [10755, 10885, 10884], [10756, 10757, 10885], [10757, 10886, 10885], [10757, 10758, 10887], [10757, 10887, 10886], [10758, 10759, 10887], [10759, 10888, 10887], [10759, 10760, 10889], [10759, 10889, 10888], [10760, 10761, 10889], [10761, 10890, 10889], [10761, 10762, 10891], [10761, 10891, 10890], [10762, 10763, 10891], [10763, 10892, 10891], [10763, 10764, 10893], [10763, 10893, 10892], [10764, 10765, 10893], [10765, 10894, 10893], [10765, 10766, 10895], [10765, 10895, 10894], [10766, 10767, 10895], [10767, 10896, 10895], [10767, 10768, 10897], [10767, 10897, 10896], [10768, 10769, 10897], [10769, 10898, 10897], [10769, 10770, 10899], [10769, 10899, 10898], [10770, 10771, 10899], [10771, 10900, 10899], [10771, 10772, 10901], [10771, 10901, 10900], [10772, 10773, 10901], [10773, 10902, 10901], [10773, 10774, 10903], [10773, 10903, 10902], [10774, 10775, 10903], [10775, 10904, 10903], [10775, 10776, 10905], [10775, 10905, 10904], [10776, 10777, 10905], [10777, 10906, 10905], [10777, 10778, 10907], [10777, 10907, 10906], [10778, 10779, 10907], [10779, 10908, 10907], [10779, 10780, 10909], [10779, 10909, 10908], [10780, 10781, 10909], [10781, 10910, 10909], [10781, 10782, 10911], [10781, 10911, 10910], [10782, 10783, 10911], [10783, 10912, 10911], [10783, 10784, 10913], [10783, 10913, 10912], [10784, 10785, 10913], [10785, 10914, 10913], [10785, 10786, 10915], [10785, 10915, 10914], [10786, 10787, 10915], [10787, 10916, 10915], [10787, 10788, 10917], [10787, 10917, 10916], [10788, 10789, 10917], [10789, 10918, 10917], [10789, 10790, 10919], [10789, 10919, 10918], [10790, 10791, 10919], [10791, 10920, 10919], [10791, 10792, 10921], [10791, 10921, 10920], [10792, 10793, 10921], [10793, 10922, 10921], [10793, 10794, 10923], [10793, 10923, 10922], [10794, 10795, 10923], [10795, 10924, 10923], [10795, 10796, 10925], [10795, 10925, 10924], [10796, 10797, 10925], [10797, 10926, 10925], [10797, 10798, 10927], [10797, 10927, 10926], [10798, 10799, 10927], [10799, 10928, 10927], [10799, 10800, 10929], [10799, 10929, 10928], [10800, 10801, 10929], [10801, 10930, 10929], [10801, 10802, 10931], [10801, 10931, 10930], [10802, 10803, 10931], [10803, 10932, 10931], [10803, 10804, 10933], [10803, 10933, 10932], [10804, 10805, 10933], [10805, 10934, 10933], [10805, 10806, 10935], [10805, 10935, 10934], [10806, 10807, 10935], [10807, 10936, 10935], [10807, 10808, 10937], [10807, 10937, 10936], [10808, 10809, 10937], [10809, 10938, 10937], [10809, 10810, 10939], [10809, 10939, 10938], [10810, 10811, 10939], [10811, 10940, 10939], [10811, 10812, 10941], [10811, 10941, 10940], [10812, 10813, 10941], [10813, 10942, 10941], [10813, 10814, 10943], [10813, 10943, 10942], [10814, 10815, 10943], [10815, 10944, 10943], [10815, 10816, 10945], [10815, 10945, 10944], [10816, 10817, 10945], [10817, 10946, 10945], [10817, 10818, 10947], [10817, 10947, 10946], [10818, 10819, 10947], [10819, 10948, 10947], [10819, 10820, 10949], [10819, 10949, 10948], [10820, 10821, 10949], [10821, 10950, 10949], [10821, 10822, 10951], [10821, 10951, 10950], [10822, 10823, 10951], [10823, 10952, 10951], [10823, 10824, 10953], [10823, 10953, 10952], [10824, 10825, 10953], [10825, 10954, 10953], [10825, 10826, 10955], [10825, 10955, 10954], [10826, 10827, 10955], [10827, 10956, 10955], [10827, 10828, 10957], [10827, 10957, 10956], [10828, 10829, 10957], [10829, 10958, 10957], [10829, 10830, 10959], [10829, 10959, 10958], [10830, 10831, 10959], [10831, 10960, 10959], [10831, 10832, 10961], [10831, 10961, 10960], [10832, 10833, 10961], [10833, 10962, 10961], [10833, 10834, 10963], [10833, 10963, 10962], [10834, 10835, 10963], [10835, 10964, 10963], [10835, 10836, 10965], [10835, 10965, 10964], [10836, 10837, 10965], [10837, 10966, 10965], [10837, 10838, 10967], [10837, 10967, 10966], [10838, 10839, 10967], [10839, 10968, 10967], [10839, 10840, 10969], [10839, 10969, 10968], [10840, 10841, 10969], [10841, 10970, 10969], [10841, 10842, 10971], [10841, 10971, 10970], [10842, 10843, 10971], [10843, 10972, 10971], [10843, 10844, 10973], [10843, 10973, 10972], [10844, 10845, 10973], [10845, 10974, 10973], [10845, 10846, 10975], [10845, 10975, 10974], [10846, 10847, 10975], [10847, 10976, 10975], [10847, 10848, 10977], [10847, 10977, 10976], [10848, 10849, 10977], [10849, 10978, 10977], [10849, 10850, 10979], [10849, 10979, 10978], [10850, 10851, 10979], [10851, 10980, 10979], [10851, 10852, 10981], [10851, 10981, 10980], [10852, 10853, 10981], [10853, 10982, 10981], [10853, 10854, 10983], [10853, 10983, 10982], [10854, 10855, 10983], [10855, 10984, 10983], [10855, 10856, 10985], [10855, 10985, 10984], [10856, 10857, 10985], [10857, 10986, 10985], [10857, 10858, 10987], [10857, 10987, 10986], [10858, 10859, 10987], [10859, 10988, 10987], [10859, 10860, 10989], [10859, 10989, 10988], [10860, 10861, 10989], [10861, 10990, 10989], [10861, 10862, 10991], [10861, 10991, 10990], [10862, 10863, 10991], [10863, 10992, 10991], [10863, 10864, 10993], [10863, 10993, 10992], [10864, 10865, 10993], [10865, 10994, 10993], [10865, 10866, 10995], [10865, 10995, 10994], [10866, 10867, 10995], [10867, 10996, 10995], [10867, 10868, 10997], [10867, 10997, 10996], [10868, 10869, 10997], [10869, 10998, 10997], [10869, 10870, 10999], [10869, 10999, 10998], [10870, 10871, 10999], [10871, 11000, 10999], [10871, 10872, 11001], [10871, 11001, 11000], [10872, 10873, 11001], [10873, 11002, 11001], [10873, 10874, 11003], [10873, 11003, 11002], [10874, 10875, 11003], [10875, 11004, 11003], [10875, 10876, 11005], [10875, 11005, 11004], [10876, 10877, 11005], [10877, 11006, 11005], [10877, 10878, 11007], [10877, 11007, 11006], [10878, 10879, 11007], [10879, 11008, 11007], [10879, 10880, 11009], [10879, 11009, 11008], [10880, 10881, 11009], [10881, 11010, 11009], [10882, 10883, 11011], [10883, 11012, 11011], [10883, 10884, 11013], [10883, 11013, 11012], [10884, 10885, 11013], [10885, 11014, 11013], [10885, 10886, 11015], [10885, 11015, 11014], [10886, 10887, 11015], [10887, 11016, 11015], [10887, 10888, 11017], [10887, 11017, 11016], [10888, 10889, 11017], [10889, 11018, 11017], [10889, 10890, 11019], [10889, 11019, 11018], [10890, 10891, 11019], [10891, 11020, 11019], [10891, 10892, 11021], [10891, 11021, 11020], [10892, 10893, 11021], [10893, 11022, 11021], [10893, 10894, 11023], [10893, 11023, 11022], [10894, 10895, 11023], [10895, 11024, 11023], [10895, 10896, 11025], [10895, 11025, 11024], [10896, 10897, 11025], [10897, 11026, 11025], [10897, 10898, 11027], [10897, 11027, 11026], [10898, 10899, 11027], [10899, 11028, 11027], [10899, 10900, 11029], [10899, 11029, 11028], [10900, 10901, 11029], [10901, 11030, 11029], [10901, 10902, 11031], [10901, 11031, 11030], [10902, 10903, 11031], [10903, 11032, 11031], [10903, 10904, 11033], [10903, 11033, 11032], [10904, 10905, 11033], [10905, 11034, 11033], [10905, 10906, 11035], [10905, 11035, 11034], [10906, 10907, 11035], [10907, 11036, 11035], [10907, 10908, 11037], [10907, 11037, 11036], [10908, 10909, 11037], [10909, 11038, 11037], [10909, 10910, 11039], [10909, 11039, 11038], [10910, 10911, 11039], [10911, 11040, 11039], [10911, 10912, 11041], [10911, 11041, 11040], [10912, 10913, 11041], [10913, 11042, 11041], [10913, 10914, 11043], [10913, 11043, 11042], [10914, 10915, 11043], [10915, 11044, 11043], [10915, 10916, 11045], [10915, 11045, 11044], [10916, 10917, 11045], [10917, 11046, 11045], [10917, 10918, 11047], [10917, 11047, 11046], [10918, 10919, 11047], [10919, 11048, 11047], [10919, 10920, 11049], [10919, 11049, 11048], [10920, 10921, 11049], [10921, 11050, 11049], [10921, 10922, 11051], [10921, 11051, 11050], [10922, 10923, 11051], [10923, 11052, 11051], [10923, 10924, 11053], [10923, 11053, 11052], [10924, 10925, 11053], [10925, 11054, 11053], [10925, 10926, 11055], [10925, 11055, 11054], [10926, 10927, 11055], [10927, 11056, 11055], [10927, 10928, 11057], [10927, 11057, 11056], [10928, 10929, 11057], [10929, 11058, 11057], [10929, 10930, 11059], [10929, 11059, 11058], [10930, 10931, 11059], [10931, 11060, 11059], [10931, 10932, 11061], [10931, 11061, 11060], [10932, 10933, 11061], [10933, 11062, 11061], [10933, 10934, 11063], [10933, 11063, 11062], [10934, 10935, 11063], [10935, 11064, 11063], [10935, 10936, 11065], [10935, 11065, 11064], [10936, 10937, 11065], [10937, 11066, 11065], [10937, 10938, 11067], [10937, 11067, 11066], [10938, 10939, 11067], [10939, 11068, 11067], [10939, 10940, 11069], [10939, 11069, 11068], [10940, 10941, 11069], [10941, 11070, 11069], [10941, 10942, 11071], [10941, 11071, 11070], [10942, 10943, 11071], [10943, 11072, 11071], [10943, 10944, 11073], [10943, 11073, 11072], [10944, 10945, 11073], [10945, 11074, 11073], [10945, 10946, 11075], [10945, 11075, 11074], [10946, 10947, 11075], [10947, 11076, 11075], [10947, 10948, 11077], [10947, 11077, 11076], [10948, 10949, 11077], [10949, 11078, 11077], [10949, 10950, 11079], [10949, 11079, 11078], [10950, 10951, 11079], [10951, 11080, 11079], [10951, 10952, 11081], [10951, 11081, 11080], [10952, 10953, 11081], [10953, 11082, 11081], [10953, 10954, 11083], [10953, 11083, 11082], [10954, 10955, 11083], [10955, 11084, 11083], [10955, 10956, 11085], [10955, 11085, 11084], [10956, 10957, 11085], [10957, 11086, 11085], [10957, 10958, 11087], [10957, 11087, 11086], [10958, 10959, 11087], [10959, 11088, 11087], [10959, 10960, 11089], [10959, 11089, 11088], [10960, 10961, 11089], [10961, 11090, 11089], [10961, 10962, 11091], [10961, 11091, 11090], [10962, 10963, 11091], [10963, 11092, 11091], [10963, 10964, 11093], [10963, 11093, 11092], [10964, 10965, 11093], [10965, 11094, 11093], [10965, 10966, 11095], [10965, 11095, 11094], [10966, 10967, 11095], [10967, 11096, 11095], [10967, 10968, 11097], [10967, 11097, 11096], [10968, 10969, 11097], [10969, 11098, 11097], [10969, 10970, 11099], [10969, 11099, 11098], [10970, 10971, 11099], [10971, 11100, 11099], [10971, 10972, 11101], [10971, 11101, 11100], [10972, 10973, 11101], [10973, 11102, 11101], [10973, 10974, 11103], [10973, 11103, 11102], [10974, 10975, 11103], [10975, 11104, 11103], [10975, 10976, 11105], [10975, 11105, 11104], [10976, 10977, 11105], [10977, 11106, 11105], [10977, 10978, 11107], [10977, 11107, 11106], [10978, 10979, 11107], [10979, 11108, 11107], [10979, 10980, 11109], [10979, 11109, 11108], [10980, 10981, 11109], [10981, 11110, 11109], [10981, 10982, 11111], [10981, 11111, 11110], [10982, 10983, 11111], [10983, 11112, 11111], [10983, 10984, 11113], [10983, 11113, 11112], [10984, 10985, 11113], [10985, 11114, 11113], [10985, 10986, 11115], [10985, 11115, 11114], [10986, 10987, 11115], [10987, 11116, 11115], [10987, 10988, 11117], [10987, 11117, 11116], [10988, 10989, 11117], [10989, 11118, 11117], [10989, 10990, 11119], [10989, 11119, 11118], [10990, 10991, 11119], [10991, 11120, 11119], [10991, 10992, 11121], [10991, 11121, 11120], [10992, 10993, 11121], [10993, 11122, 11121], [10993, 10994, 11123], [10993, 11123, 11122], [10994, 10995, 11123], [10995, 11124, 11123], [10995, 10996, 11125], [10995, 11125, 11124], [10996, 10997, 11125], [10997, 11126, 11125], [10997, 10998, 11127], [10997, 11127, 11126], [10998, 10999, 11127], [10999, 11128, 11127], [10999, 11000, 11129], [10999, 11129, 11128], [11000, 11001, 11129], [11001, 11130, 11129], [11001, 11002, 11131], [11001, 11131, 11130], [11002, 11003, 11131], [11003, 11132, 11131], [11003, 11004, 11133], [11003, 11133, 11132], [11004, 11005, 11133], [11005, 11134, 11133], [11005, 11006, 11135], [11005, 11135, 11134], [11006, 11007, 11135], [11007, 11136, 11135], [11007, 11008, 11137], [11007, 11137, 11136], [11008, 11009, 11137], [11009, 11138, 11137], [11009, 11010, 11139], [11009, 11139, 11138], [11011, 11012, 11141], [11011, 11141, 11140], [11012, 11013, 11141], [11013, 11142, 11141], [11013, 11014, 11143], [11013, 11143, 11142], [11014, 11015, 11143], [11015, 11144, 11143], [11015, 11016, 11145], [11015, 11145, 11144], [11016, 11017, 11145], [11017, 11146, 11145], [11017, 11018, 11147], [11017, 11147, 11146], [11018, 11019, 11147], [11019, 11148, 11147], [11019, 11020, 11149], [11019, 11149, 11148], [11020, 11021, 11149], [11021, 11150, 11149], [11021, 11022, 11151], [11021, 11151, 11150], [11022, 11023, 11151], [11023, 11152, 11151], [11023, 11024, 11153], [11023, 11153, 11152], [11024, 11025, 11153], [11025, 11154, 11153], [11025, 11026, 11155], [11025, 11155, 11154], [11026, 11027, 11155], [11027, 11156, 11155], [11027, 11028, 11157], [11027, 11157, 11156], [11028, 11029, 11157], [11029, 11158, 11157], [11029, 11030, 11159], [11029, 11159, 11158], [11030, 11031, 11159], [11031, 11160, 11159], [11031, 11032, 11161], [11031, 11161, 11160], [11032, 11033, 11161], [11033, 11162, 11161], [11033, 11034, 11163], [11033, 11163, 11162], [11034, 11035, 11163], [11035, 11164, 11163], [11035, 11036, 11165], [11035, 11165, 11164], [11036, 11037, 11165], [11037, 11166, 11165], [11037, 11038, 11167], [11037, 11167, 11166], [11038, 11039, 11167], [11039, 11168, 11167], [11039, 11040, 11169], [11039, 11169, 11168], [11040, 11041, 11169], [11041, 11170, 11169], [11041, 11042, 11171], [11041, 11171, 11170], [11042, 11043, 11171], [11043, 11172, 11171], [11043, 11044, 11173], [11043, 11173, 11172], [11044, 11045, 11173], [11045, 11174, 11173], [11045, 11046, 11175], [11045, 11175, 11174], [11046, 11047, 11175], [11047, 11176, 11175], [11047, 11048, 11177], [11047, 11177, 11176], [11048, 11049, 11177], [11049, 11178, 11177], [11049, 11050, 11179], [11049, 11179, 11178], [11050, 11051, 11179], [11051, 11180, 11179], [11051, 11052, 11181], [11051, 11181, 11180], [11052, 11053, 11181], [11053, 11182, 11181], [11053, 11054, 11183], [11053, 11183, 11182], [11054, 11055, 11183], [11055, 11184, 11183], [11055, 11056, 11185], [11055, 11185, 11184], [11056, 11057, 11185], [11057, 11186, 11185], [11057, 11058, 11187], [11057, 11187, 11186], [11058, 11059, 11187], [11059, 11188, 11187], [11059, 11060, 11189], [11059, 11189, 11188], [11060, 11061, 11189], [11061, 11190, 11189], [11061, 11062, 11191], [11061, 11191, 11190], [11062, 11063, 11191], [11063, 11192, 11191], [11063, 11064, 11193], [11063, 11193, 11192], [11064, 11065, 11193], [11065, 11194, 11193], [11065, 11066, 11195], [11065, 11195, 11194], [11066, 11067, 11195], [11067, 11196, 11195], [11067, 11068, 11197], [11067, 11197, 11196], [11068, 11069, 11197], [11069, 11198, 11197], [11069, 11070, 11199], [11069, 11199, 11198], [11070, 11071, 11199], [11071, 11200, 11199], [11071, 11072, 11201], [11071, 11201, 11200], [11072, 11073, 11201], [11073, 11202, 11201], [11073, 11074, 11203], [11073, 11203, 11202], [11074, 11075, 11203], [11075, 11204, 11203], [11075, 11076, 11205], [11075, 11205, 11204], [11076, 11077, 11205], [11077, 11206, 11205], [11077, 11078, 11207], [11077, 11207, 11206], [11078, 11079, 11207], [11079, 11208, 11207], [11079, 11080, 11209], [11079, 11209, 11208], [11080, 11081, 11209], [11081, 11210, 11209], [11081, 11082, 11211], [11081, 11211, 11210], [11082, 11083, 11211], [11083, 11212, 11211], [11083, 11084, 11213], [11083, 11213, 11212], [11084, 11085, 11213], [11085, 11214, 11213], [11085, 11086, 11215], [11085, 11215, 11214], [11086, 11087, 11215], [11087, 11216, 11215], [11087, 11088, 11217], [11087, 11217, 11216], [11088, 11089, 11217], [11089, 11218, 11217], [11089, 11090, 11219], [11089, 11219, 11218], [11090, 11091, 11219], [11091, 11220, 11219], [11091, 11092, 11221], [11091, 11221, 11220], [11092, 11093, 11221], [11093, 11222, 11221], [11093, 11094, 11223], [11093, 11223, 11222], [11094, 11095, 11223], [11095, 11224, 11223], [11095, 11096, 11225], [11095, 11225, 11224], [11096, 11097, 11225], [11097, 11226, 11225], [11097, 11098, 11227], [11097, 11227, 11226], [11098, 11099, 11227], [11099, 11228, 11227], [11099, 11100, 11229], [11099, 11229, 11228], [11100, 11101, 11229], [11101, 11230, 11229], [11101, 11102, 11231], [11101, 11231, 11230], [11102, 11103, 11231], [11103, 11232, 11231], [11103, 11104, 11233], [11103, 11233, 11232], [11104, 11105, 11233], [11105, 11234, 11233], [11105, 11106, 11235], [11105, 11235, 11234], [11106, 11107, 11235], [11107, 11236, 11235], [11107, 11108, 11237], [11107, 11237, 11236], [11108, 11109, 11237], [11109, 11238, 11237], [11109, 11110, 11239], [11109, 11239, 11238], [11110, 11111, 11239], [11111, 11240, 11239], [11111, 11112, 11241], [11111, 11241, 11240], [11112, 11113, 11241], [11113, 11242, 11241], [11113, 11114, 11243], [11113, 11243, 11242], [11114, 11115, 11243], [11115, 11244, 11243], [11115, 11116, 11245], [11115, 11245, 11244], [11116, 11117, 11245], [11117, 11246, 11245], [11117, 11118, 11247], [11117, 11247, 11246], [11118, 11119, 11247], [11119, 11248, 11247], [11119, 11120, 11249], [11119, 11249, 11248], [11120, 11121, 11249], [11121, 11250, 11249], [11121, 11122, 11251], [11121, 11251, 11250], [11122, 11123, 11251], [11123, 11252, 11251], [11123, 11124, 11253], [11123, 11253, 11252], [11124, 11125, 11253], [11125, 11254, 11253], [11125, 11126, 11255], [11125, 11255, 11254], [11126, 11127, 11255], [11127, 11256, 11255], [11127, 11128, 11257], [11127, 11257, 11256], [11128, 11129, 11257], [11129, 11258, 11257], [11129, 11130, 11259], [11129, 11259, 11258], [11130, 11131, 11259], [11131, 11260, 11259], [11131, 11132, 11261], [11131, 11261, 11260], [11132, 11133, 11261], [11133, 11262, 11261], [11133, 11134, 11263], [11133, 11263, 11262], [11134, 11135, 11263], [11135, 11264, 11263], [11135, 11136, 11265], [11135, 11265, 11264], [11136, 11137, 11265], [11137, 11266, 11265], [11137, 11138, 11267], [11137, 11267, 11266], [11138, 11139, 11267], [11139, 11268, 11267], [11140, 11141, 11269], [11141, 11270, 11269], [11141, 11142, 11271], [11141, 11271, 11270], [11142, 11143, 11271], [11143, 11272, 11271], [11143, 11144, 11273], [11143, 11273, 11272], [11144, 11145, 11273], [11145, 11274, 11273], [11145, 11146, 11275], [11145, 11275, 11274], [11146, 11147, 11275], [11147, 11276, 11275], [11147, 11148, 11277], [11147, 11277, 11276], [11148, 11149, 11277], [11149, 11278, 11277], [11149, 11150, 11279], [11149, 11279, 11278], [11150, 11151, 11279], [11151, 11280, 11279], [11151, 11152, 11281], [11151, 11281, 11280], [11152, 11153, 11281], [11153, 11282, 11281], [11153, 11154, 11283], [11153, 11283, 11282], [11154, 11155, 11283], [11155, 11284, 11283], [11155, 11156, 11285], [11155, 11285, 11284], [11156, 11157, 11285], [11157, 11286, 11285], [11157, 11158, 11287], [11157, 11287, 11286], [11158, 11159, 11287], [11159, 11288, 11287], [11159, 11160, 11289], [11159, 11289, 11288], [11160, 11161, 11289], [11161, 11290, 11289], [11161, 11162, 11291], [11161, 11291, 11290], [11162, 11163, 11291], [11163, 11292, 11291], [11163, 11164, 11293], [11163, 11293, 11292], [11164, 11165, 11293], [11165, 11294, 11293], [11165, 11166, 11295], [11165, 11295, 11294], [11166, 11167, 11295], [11167, 11296, 11295], [11167, 11168, 11297], [11167, 11297, 11296], [11168, 11169, 11297], [11169, 11298, 11297], [11169, 11170, 11299], [11169, 11299, 11298], [11170, 11171, 11299], [11171, 11300, 11299], [11171, 11172, 11301], [11171, 11301, 11300], [11172, 11173, 11301], [11173, 11302, 11301], [11173, 11174, 11303], [11173, 11303, 11302], [11174, 11175, 11303], [11175, 11304, 11303], [11175, 11176, 11305], [11175, 11305, 11304], [11176, 11177, 11305], [11177, 11306, 11305], [11177, 11178, 11307], [11177, 11307, 11306], [11178, 11179, 11307], [11179, 11308, 11307], [11179, 11180, 11309], [11179, 11309, 11308], [11180, 11181, 11309], [11181, 11310, 11309], [11181, 11182, 11311], [11181, 11311, 11310], [11182, 11183, 11311], [11183, 11312, 11311], [11183, 11184, 11313], [11183, 11313, 11312], [11184, 11185, 11313], [11185, 11314, 11313], [11185, 11186, 11315], [11185, 11315, 11314], [11186, 11187, 11315], [11187, 11316, 11315], [11187, 11188, 11317], [11187, 11317, 11316], [11188, 11189, 11317], [11189, 11318, 11317], [11189, 11190, 11319], [11189, 11319, 11318], [11190, 11191, 11319], [11191, 11320, 11319], [11191, 11192, 11321], [11191, 11321, 11320], [11192, 11193, 11321], [11193, 11322, 11321], [11193, 11194, 11323], [11193, 11323, 11322], [11194, 11195, 11323], [11195, 11324, 11323], [11195, 11196, 11325], [11195, 11325, 11324], [11196, 11197, 11325], [11197, 11326, 11325], [11197, 11198, 11327], [11197, 11327, 11326], [11198, 11199, 11327], [11199, 11328, 11327], [11199, 11200, 11329], [11199, 11329, 11328], [11200, 11201, 11329], [11201, 11330, 11329], [11201, 11202, 11331], [11201, 11331, 11330], [11202, 11203, 11331], [11203, 11332, 11331], [11203, 11204, 11333], [11203, 11333, 11332], [11204, 11205, 11333], [11205, 11334, 11333], [11205, 11206, 11335], [11205, 11335, 11334], [11206, 11207, 11335], [11207, 11336, 11335], [11207, 11208, 11337], [11207, 11337, 11336], [11208, 11209, 11337], [11209, 11338, 11337], [11209, 11210, 11339], [11209, 11339, 11338], [11210, 11211, 11339], [11211, 11340, 11339], [11211, 11212, 11341], [11211, 11341, 11340], [11212, 11213, 11341], [11213, 11342, 11341], [11213, 11214, 11343], [11213, 11343, 11342], [11214, 11215, 11343], [11215, 11344, 11343], [11215, 11216, 11345], [11215, 11345, 11344], [11216, 11217, 11345], [11217, 11346, 11345], [11217, 11218, 11347], [11217, 11347, 11346], [11218, 11219, 11347], [11219, 11348, 11347], [11219, 11220, 11349], [11219, 11349, 11348], [11220, 11221, 11349], [11221, 11350, 11349], [11221, 11222, 11351], [11221, 11351, 11350], [11222, 11223, 11351], [11223, 11352, 11351], [11223, 11224, 11353], [11223, 11353, 11352], [11224, 11225, 11353], [11225, 11354, 11353], [11225, 11226, 11355], [11225, 11355, 11354], [11226, 11227, 11355], [11227, 11356, 11355], [11227, 11228, 11357], [11227, 11357, 11356], [11228, 11229, 11357], [11229, 11358, 11357], [11229, 11230, 11359], [11229, 11359, 11358], [11230, 11231, 11359], [11231, 11360, 11359], [11231, 11232, 11361], [11231, 11361, 11360], [11232, 11233, 11361], [11233, 11362, 11361], [11233, 11234, 11363], [11233, 11363, 11362], [11234, 11235, 11363], [11235, 11364, 11363], [11235, 11236, 11365], [11235, 11365, 11364], [11236, 11237, 11365], [11237, 11366, 11365], [11237, 11238, 11367], [11237, 11367, 11366], [11238, 11239, 11367], [11239, 11368, 11367], [11239, 11240, 11369], [11239, 11369, 11368], [11240, 11241, 11369], [11241, 11370, 11369], [11241, 11242, 11371], [11241, 11371, 11370], [11242, 11243, 11371], [11243, 11372, 11371], [11243, 11244, 11373], [11243, 11373, 11372], [11244, 11245, 11373], [11245, 11374, 11373], [11245, 11246, 11375], [11245, 11375, 11374], [11246, 11247, 11375], [11247, 11376, 11375], [11247, 11248, 11377], [11247, 11377, 11376], [11248, 11249, 11377], [11249, 11378, 11377], [11249, 11250, 11379], [11249, 11379, 11378], [11250, 11251, 11379], [11251, 11380, 11379], [11251, 11252, 11381], [11251, 11381, 11380], [11252, 11253, 11381], [11253, 11382, 11381], [11253, 11254, 11383], [11253, 11383, 11382], [11254, 11255, 11383], [11255, 11384, 11383], [11255, 11256, 11385], [11255, 11385, 11384], [11256, 11257, 11385], [11257, 11386, 11385], [11257, 11258, 11387], [11257, 11387, 11386], [11258, 11259, 11387], [11259, 11388, 11387], [11259, 11260, 11389], [11259, 11389, 11388], [11260, 11261, 11389], [11261, 11390, 11389], [11261, 11262, 11391], [11261, 11391, 11390], [11262, 11263, 11391], [11263, 11392, 11391], [11263, 11264, 11393], [11263, 11393, 11392], [11264, 11265, 11393], [11265, 11394, 11393], [11265, 11266, 11395], [11265, 11395, 11394], [11266, 11267, 11395], [11267, 11396, 11395], [11267, 11268, 11397], [11267, 11397, 11396], [11269, 11270, 11399], [11269, 11399, 11398], [11270, 11271, 11399], [11271, 11400, 11399], [11271, 11272, 11401], [11271, 11401, 11400], [11272, 11273, 11401], [11273, 11402, 11401], [11273, 11274, 11403], [11273, 11403, 11402], [11274, 11275, 11403], [11275, 11404, 11403], [11275, 11276, 11405], [11275, 11405, 11404], [11276, 11277, 11405], [11277, 11406, 11405], [11277, 11278, 11407], [11277, 11407, 11406], [11278, 11279, 11407], [11279, 11408, 11407], [11279, 11280, 11409], [11279, 11409, 11408], [11280, 11281, 11409], [11281, 11410, 11409], [11281, 11282, 11411], [11281, 11411, 11410], [11282, 11283, 11411], [11283, 11412, 11411], [11283, 11284, 11413], [11283, 11413, 11412], [11284, 11285, 11413], [11285, 11414, 11413], [11285, 11286, 11415], [11285, 11415, 11414], [11286, 11287, 11415], [11287, 11416, 11415], [11287, 11288, 11417], [11287, 11417, 11416], [11288, 11289, 11417], [11289, 11418, 11417], [11289, 11290, 11419], [11289, 11419, 11418], [11290, 11291, 11419], [11291, 11420, 11419], [11291, 11292, 11421], [11291, 11421, 11420], [11292, 11293, 11421], [11293, 11422, 11421], [11293, 11294, 11423], [11293, 11423, 11422], [11294, 11295, 11423], [11295, 11424, 11423], [11295, 11296, 11425], [11295, 11425, 11424], [11296, 11297, 11425], [11297, 11426, 11425], [11297, 11298, 11427], [11297, 11427, 11426], [11298, 11299, 11427], [11299, 11428, 11427], [11299, 11300, 11429], [11299, 11429, 11428], [11300, 11301, 11429], [11301, 11430, 11429], [11301, 11302, 11431], [11301, 11431, 11430], [11302, 11303, 11431], [11303, 11432, 11431], [11303, 11304, 11433], [11303, 11433, 11432], [11304, 11305, 11433], [11305, 11434, 11433], [11305, 11306, 11435], [11305, 11435, 11434], [11306, 11307, 11435], [11307, 11436, 11435], [11307, 11308, 11437], [11307, 11437, 11436], [11308, 11309, 11437], [11309, 11438, 11437], [11309, 11310, 11439], [11309, 11439, 11438], [11310, 11311, 11439], [11311, 11440, 11439], [11311, 11312, 11441], [11311, 11441, 11440], [11312, 11313, 11441], [11313, 11442, 11441], [11313, 11314, 11443], [11313, 11443, 11442], [11314, 11315, 11443], [11315, 11444, 11443], [11315, 11316, 11445], [11315, 11445, 11444], [11316, 11317, 11445], [11317, 11446, 11445], [11317, 11318, 11447], [11317, 11447, 11446], [11318, 11319, 11447], [11319, 11448, 11447], [11319, 11320, 11449], [11319, 11449, 11448], [11320, 11321, 11449], [11321, 11450, 11449], [11321, 11322, 11451], [11321, 11451, 11450], [11322, 11323, 11451], [11323, 11452, 11451], [11323, 11324, 11453], [11323, 11453, 11452], [11324, 11325, 11453], [11325, 11454, 11453], [11325, 11326, 11455], [11325, 11455, 11454], [11326, 11327, 11455], [11327, 11456, 11455], [11327, 11328, 11457], [11327, 11457, 11456], [11328, 11329, 11457], [11329, 11458, 11457], [11329, 11330, 11459], [11329, 11459, 11458], [11330, 11331, 11459], [11331, 11460, 11459], [11331, 11332, 11461], [11331, 11461, 11460], [11332, 11333, 11461], [11333, 11462, 11461], [11333, 11334, 11463], [11333, 11463, 11462], [11334, 11335, 11463], [11335, 11464, 11463], [11335, 11336, 11465], [11335, 11465, 11464], [11336, 11337, 11465], [11337, 11466, 11465], [11337, 11338, 11467], [11337, 11467, 11466], [11338, 11339, 11467], [11339, 11468, 11467], [11339, 11340, 11469], [11339, 11469, 11468], [11340, 11341, 11469], [11341, 11470, 11469], [11341, 11342, 11471], [11341, 11471, 11470], [11342, 11343, 11471], [11343, 11472, 11471], [11343, 11344, 11473], [11343, 11473, 11472], [11344, 11345, 11473], [11345, 11474, 11473], [11345, 11346, 11475], [11345, 11475, 11474], [11346, 11347, 11475], [11347, 11476, 11475], [11347, 11348, 11477], [11347, 11477, 11476], [11348, 11349, 11477], [11349, 11478, 11477], [11349, 11350, 11479], [11349, 11479, 11478], [11350, 11351, 11479], [11351, 11480, 11479], [11351, 11352, 11481], [11351, 11481, 11480], [11352, 11353, 11481], [11353, 11482, 11481], [11353, 11354, 11483], [11353, 11483, 11482], [11354, 11355, 11483], [11355, 11484, 11483], [11355, 11356, 11485], [11355, 11485, 11484], [11356, 11357, 11485], [11357, 11486, 11485], [11357, 11358, 11487], [11357, 11487, 11486], [11358, 11359, 11487], [11359, 11488, 11487], [11359, 11360, 11489], [11359, 11489, 11488], [11360, 11361, 11489], [11361, 11490, 11489], [11361, 11362, 11491], [11361, 11491, 11490], [11362, 11363, 11491], [11363, 11492, 11491], [11363, 11364, 11493], [11363, 11493, 11492], [11364, 11365, 11493], [11365, 11494, 11493], [11365, 11366, 11495], [11365, 11495, 11494], [11366, 11367, 11495], [11367, 11496, 11495], [11367, 11368, 11497], [11367, 11497, 11496], [11368, 11369, 11497], [11369, 11498, 11497], [11369, 11370, 11499], [11369, 11499, 11498], [11370, 11371, 11499], [11371, 11500, 11499], [11371, 11372, 11501], [11371, 11501, 11500], [11372, 11373, 11501], [11373, 11502, 11501], [11373, 11374, 11503], [11373, 11503, 11502], [11374, 11375, 11503], [11375, 11504, 11503], [11375, 11376, 11505], [11375, 11505, 11504], [11376, 11377, 11505], [11377, 11506, 11505], [11377, 11378, 11507], [11377, 11507, 11506], [11378, 11379, 11507], [11379, 11508, 11507], [11379, 11380, 11509], [11379, 11509, 11508], [11380, 11381, 11509], [11381, 11510, 11509], [11381, 11382, 11511], [11381, 11511, 11510], [11382, 11383, 11511], [11383, 11512, 11511], [11383, 11384, 11513], [11383, 11513, 11512], [11384, 11385, 11513], [11385, 11514, 11513], [11385, 11386, 11515], [11385, 11515, 11514], [11386, 11387, 11515], [11387, 11516, 11515], [11387, 11388, 11517], [11387, 11517, 11516], [11388, 11389, 11517], [11389, 11518, 11517], [11389, 11390, 11519], [11389, 11519, 11518], [11390, 11391, 11519], [11391, 11520, 11519], [11391, 11392, 11521], [11391, 11521, 11520], [11392, 11393, 11521], [11393, 11522, 11521], [11393, 11394, 11523], [11393, 11523, 11522], [11394, 11395, 11523], [11395, 11524, 11523], [11395, 11396, 11525], [11395, 11525, 11524], [11396, 11397, 11525], [11397, 11526, 11525], [11398, 11399, 11527], [11399, 11528, 11527], [11399, 11400, 11529], [11399, 11529, 11528], [11400, 11401, 11529], [11401, 11530, 11529], [11401, 11402, 11531], [11401, 11531, 11530], [11402, 11403, 11531], [11403, 11532, 11531], [11403, 11404, 11533], [11403, 11533, 11532], [11404, 11405, 11533], [11405, 11534, 11533], [11405, 11406, 11535], [11405, 11535, 11534], [11406, 11407, 11535], [11407, 11536, 11535], [11407, 11408, 11537], [11407, 11537, 11536], [11408, 11409, 11537], [11409, 11538, 11537], [11409, 11410, 11539], [11409, 11539, 11538], [11410, 11411, 11539], [11411, 11540, 11539], [11411, 11412, 11541], [11411, 11541, 11540], [11412, 11413, 11541], [11413, 11542, 11541], [11413, 11414, 11543], [11413, 11543, 11542], [11414, 11415, 11543], [11415, 11544, 11543], [11415, 11416, 11545], [11415, 11545, 11544], [11416, 11417, 11545], [11417, 11546, 11545], [11417, 11418, 11547], [11417, 11547, 11546], [11418, 11419, 11547], [11419, 11548, 11547], [11419, 11420, 11549], [11419, 11549, 11548], [11420, 11421, 11549], [11421, 11550, 11549], [11421, 11422, 11551], [11421, 11551, 11550], [11422, 11423, 11551], [11423, 11552, 11551], [11423, 11424, 11553], [11423, 11553, 11552], [11424, 11425, 11553], [11425, 11554, 11553], [11425, 11426, 11555], [11425, 11555, 11554], [11426, 11427, 11555], [11427, 11556, 11555], [11427, 11428, 11557], [11427, 11557, 11556], [11428, 11429, 11557], [11429, 11558, 11557], [11429, 11430, 11559], [11429, 11559, 11558], [11430, 11431, 11559], [11431, 11560, 11559], [11431, 11432, 11561], [11431, 11561, 11560], [11432, 11433, 11561], [11433, 11562, 11561], [11433, 11434, 11563], [11433, 11563, 11562], [11434, 11435, 11563], [11435, 11564, 11563], [11435, 11436, 11565], [11435, 11565, 11564], [11436, 11437, 11565], [11437, 11566, 11565], [11437, 11438, 11567], [11437, 11567, 11566], [11438, 11439, 11567], [11439, 11568, 11567], [11439, 11440, 11569], [11439, 11569, 11568], [11440, 11441, 11569], [11441, 11570, 11569], [11441, 11442, 11571], [11441, 11571, 11570], [11442, 11443, 11571], [11443, 11572, 11571], [11443, 11444, 11573], [11443, 11573, 11572], [11444, 11445, 11573], [11445, 11574, 11573], [11445, 11446, 11575], [11445, 11575, 11574], [11446, 11447, 11575], [11447, 11576, 11575], [11447, 11448, 11577], [11447, 11577, 11576], [11448, 11449, 11577], [11449, 11578, 11577], [11449, 11450, 11579], [11449, 11579, 11578], [11450, 11451, 11579], [11451, 11580, 11579], [11451, 11452, 11581], [11451, 11581, 11580], [11452, 11453, 11581], [11453, 11582, 11581], [11453, 11454, 11583], [11453, 11583, 11582], [11454, 11455, 11583], [11455, 11584, 11583], [11455, 11456, 11585], [11455, 11585, 11584], [11456, 11457, 11585], [11457, 11586, 11585], [11457, 11458, 11587], [11457, 11587, 11586], [11458, 11459, 11587], [11459, 11588, 11587], [11459, 11460, 11589], [11459, 11589, 11588], [11460, 11461, 11589], [11461, 11590, 11589], [11461, 11462, 11591], [11461, 11591, 11590], [11462, 11463, 11591], [11463, 11592, 11591], [11463, 11464, 11593], [11463, 11593, 11592], [11464, 11465, 11593], [11465, 11594, 11593], [11465, 11466, 11595], [11465, 11595, 11594], [11466, 11467, 11595], [11467, 11596, 11595], [11467, 11468, 11597], [11467, 11597, 11596], [11468, 11469, 11597], [11469, 11598, 11597], [11469, 11470, 11599], [11469, 11599, 11598], [11470, 11471, 11599], [11471, 11600, 11599], [11471, 11472, 11601], [11471, 11601, 11600], [11472, 11473, 11601], [11473, 11602, 11601], [11473, 11474, 11603], [11473, 11603, 11602], [11474, 11475, 11603], [11475, 11604, 11603], [11475, 11476, 11605], [11475, 11605, 11604], [11476, 11477, 11605], [11477, 11606, 11605], [11477, 11478, 11607], [11477, 11607, 11606], [11478, 11479, 11607], [11479, 11608, 11607], [11479, 11480, 11609], [11479, 11609, 11608], [11480, 11481, 11609], [11481, 11610, 11609], [11481, 11482, 11611], [11481, 11611, 11610], [11482, 11483, 11611], [11483, 11612, 11611], [11483, 11484, 11613], [11483, 11613, 11612], [11484, 11485, 11613], [11485, 11614, 11613], [11485, 11486, 11615], [11485, 11615, 11614], [11486, 11487, 11615], [11487, 11616, 11615], [11487, 11488, 11617], [11487, 11617, 11616], [11488, 11489, 11617], [11489, 11618, 11617], [11489, 11490, 11619], [11489, 11619, 11618], [11490, 11491, 11619], [11491, 11620, 11619], [11491, 11492, 11621], [11491, 11621, 11620], [11492, 11493, 11621], [11493, 11622, 11621], [11493, 11494, 11623], [11493, 11623, 11622], [11494, 11495, 11623], [11495, 11624, 11623], [11495, 11496, 11625], [11495, 11625, 11624], [11496, 11497, 11625], [11497, 11626, 11625], [11497, 11498, 11627], [11497, 11627, 11626], [11498, 11499, 11627], [11499, 11628, 11627], [11499, 11500, 11629], [11499, 11629, 11628], [11500, 11501, 11629], [11501, 11630, 11629], [11501, 11502, 11631], [11501, 11631, 11630], [11502, 11503, 11631], [11503, 11632, 11631], [11503, 11504, 11633], [11503, 11633, 11632], [11504, 11505, 11633], [11505, 11634, 11633], [11505, 11506, 11635], [11505, 11635, 11634], [11506, 11507, 11635], [11507, 11636, 11635], [11507, 11508, 11637], [11507, 11637, 11636], [11508, 11509, 11637], [11509, 11638, 11637], [11509, 11510, 11639], [11509, 11639, 11638], [11510, 11511, 11639], [11511, 11640, 11639], [11511, 11512, 11641], [11511, 11641, 11640], [11512, 11513, 11641], [11513, 11642, 11641], [11513, 11514, 11643], [11513, 11643, 11642], [11514, 11515, 11643], [11515, 11644, 11643], [11515, 11516, 11645], [11515, 11645, 11644], [11516, 11517, 11645], [11517, 11646, 11645], [11517, 11518, 11647], [11517, 11647, 11646], [11518, 11519, 11647], [11519, 11648, 11647], [11519, 11520, 11649], [11519, 11649, 11648], [11520, 11521, 11649], [11521, 11650, 11649], [11521, 11522, 11651], [11521, 11651, 11650], [11522, 11523, 11651], [11523, 11652, 11651], [11523, 11524, 11653], [11523, 11653, 11652], [11524, 11525, 11653], [11525, 11654, 11653], [11525, 11526, 11655], [11525, 11655, 11654], [11527, 11528, 11657], [11527, 11657, 11656], [11528, 11529, 11657], [11529, 11658, 11657], [11529, 11530, 11659], [11529, 11659, 11658], [11530, 11531, 11659], [11531, 11660, 11659], [11531, 11532, 11661], [11531, 11661, 11660], [11532, 11533, 11661], [11533, 11662, 11661], [11533, 11534, 11663], [11533, 11663, 11662], [11534, 11535, 11663], [11535, 11664, 11663], [11535, 11536, 11665], [11535, 11665, 11664], [11536, 11537, 11665], [11537, 11666, 11665], [11537, 11538, 11667], [11537, 11667, 11666], [11538, 11539, 11667], [11539, 11668, 11667], [11539, 11540, 11669], [11539, 11669, 11668], [11540, 11541, 11669], [11541, 11670, 11669], [11541, 11542, 11671], [11541, 11671, 11670], [11542, 11543, 11671], [11543, 11672, 11671], [11543, 11544, 11673], [11543, 11673, 11672], [11544, 11545, 11673], [11545, 11674, 11673], [11545, 11546, 11675], [11545, 11675, 11674], [11546, 11547, 11675], [11547, 11676, 11675], [11547, 11548, 11677], [11547, 11677, 11676], [11548, 11549, 11677], [11549, 11678, 11677], [11549, 11550, 11679], [11549, 11679, 11678], [11550, 11551, 11679], [11551, 11680, 11679], [11551, 11552, 11681], [11551, 11681, 11680], [11552, 11553, 11681], [11553, 11682, 11681], [11553, 11554, 11683], [11553, 11683, 11682], [11554, 11555, 11683], [11555, 11684, 11683], [11555, 11556, 11685], [11555, 11685, 11684], [11556, 11557, 11685], [11557, 11686, 11685], [11557, 11558, 11687], [11557, 11687, 11686], [11558, 11559, 11687], [11559, 11688, 11687], [11559, 11560, 11689], [11559, 11689, 11688], [11560, 11561, 11689], [11561, 11690, 11689], [11561, 11562, 11691], [11561, 11691, 11690], [11562, 11563, 11691], [11563, 11692, 11691], [11563, 11564, 11693], [11563, 11693, 11692], [11564, 11565, 11693], [11565, 11694, 11693], [11565, 11566, 11695], [11565, 11695, 11694], [11566, 11567, 11695], [11567, 11696, 11695], [11567, 11568, 11697], [11567, 11697, 11696], [11568, 11569, 11697], [11569, 11698, 11697], [11569, 11570, 11699], [11569, 11699, 11698], [11570, 11571, 11699], [11571, 11700, 11699], [11571, 11572, 11701], [11571, 11701, 11700], [11572, 11573, 11701], [11573, 11702, 11701], [11573, 11574, 11703], [11573, 11703, 11702], [11574, 11575, 11703], [11575, 11704, 11703], [11575, 11576, 11705], [11575, 11705, 11704], [11576, 11577, 11705], [11577, 11706, 11705], [11577, 11578, 11707], [11577, 11707, 11706], [11578, 11579, 11707], [11579, 11708, 11707], [11579, 11580, 11709], [11579, 11709, 11708], [11580, 11581, 11709], [11581, 11710, 11709], [11581, 11582, 11711], [11581, 11711, 11710], [11582, 11583, 11711], [11583, 11712, 11711], [11583, 11584, 11713], [11583, 11713, 11712], [11584, 11585, 11713], [11585, 11714, 11713], [11585, 11586, 11715], [11585, 11715, 11714], [11586, 11587, 11715], [11587, 11716, 11715], [11587, 11588, 11717], [11587, 11717, 11716], [11588, 11589, 11717], [11589, 11718, 11717], [11589, 11590, 11719], [11589, 11719, 11718], [11590, 11591, 11719], [11591, 11720, 11719], [11591, 11592, 11721], [11591, 11721, 11720], [11592, 11593, 11721], [11593, 11722, 11721], [11593, 11594, 11723], [11593, 11723, 11722], [11594, 11595, 11723], [11595, 11724, 11723], [11595, 11596, 11725], [11595, 11725, 11724], [11596, 11597, 11725], [11597, 11726, 11725], [11597, 11598, 11727], [11597, 11727, 11726], [11598, 11599, 11727], [11599, 11728, 11727], [11599, 11600, 11729], [11599, 11729, 11728], [11600, 11601, 11729], [11601, 11730, 11729], [11601, 11602, 11731], [11601, 11731, 11730], [11602, 11603, 11731], [11603, 11732, 11731], [11603, 11604, 11733], [11603, 11733, 11732], [11604, 11605, 11733], [11605, 11734, 11733], [11605, 11606, 11735], [11605, 11735, 11734], [11606, 11607, 11735], [11607, 11736, 11735], [11607, 11608, 11737], [11607, 11737, 11736], [11608, 11609, 11737], [11609, 11738, 11737], [11609, 11610, 11739], [11609, 11739, 11738], [11610, 11611, 11739], [11611, 11740, 11739], [11611, 11612, 11741], [11611, 11741, 11740], [11612, 11613, 11741], [11613, 11742, 11741], [11613, 11614, 11743], [11613, 11743, 11742], [11614, 11615, 11743], [11615, 11744, 11743], [11615, 11616, 11745], [11615, 11745, 11744], [11616, 11617, 11745], [11617, 11746, 11745], [11617, 11618, 11747], [11617, 11747, 11746], [11618, 11619, 11747], [11619, 11748, 11747], [11619, 11620, 11749], [11619, 11749, 11748], [11620, 11621, 11749], [11621, 11750, 11749], [11621, 11622, 11751], [11621, 11751, 11750], [11622, 11623, 11751], [11623, 11752, 11751], [11623, 11624, 11753], [11623, 11753, 11752], [11624, 11625, 11753], [11625, 11754, 11753], [11625, 11626, 11755], [11625, 11755, 11754], [11626, 11627, 11755], [11627, 11756, 11755], [11627, 11628, 11757], [11627, 11757, 11756], [11628, 11629, 11757], [11629, 11758, 11757], [11629, 11630, 11759], [11629, 11759, 11758], [11630, 11631, 11759], [11631, 11760, 11759], [11631, 11632, 11761], [11631, 11761, 11760], [11632, 11633, 11761], [11633, 11762, 11761], [11633, 11634, 11763], [11633, 11763, 11762], [11634, 11635, 11763], [11635, 11764, 11763], [11635, 11636, 11765], [11635, 11765, 11764], [11636, 11637, 11765], [11637, 11766, 11765], [11637, 11638, 11767], [11637, 11767, 11766], [11638, 11639, 11767], [11639, 11768, 11767], [11639, 11640, 11769], [11639, 11769, 11768], [11640, 11641, 11769], [11641, 11770, 11769], [11641, 11642, 11771], [11641, 11771, 11770], [11642, 11643, 11771], [11643, 11772, 11771], [11643, 11644, 11773], [11643, 11773, 11772], [11644, 11645, 11773], [11645, 11774, 11773], [11645, 11646, 11775], [11645, 11775, 11774], [11646, 11647, 11775], [11647, 11776, 11775], [11647, 11648, 11777], [11647, 11777, 11776], [11648, 11649, 11777], [11649, 11778, 11777], [11649, 11650, 11779], [11649, 11779, 11778], [11650, 11651, 11779], [11651, 11780, 11779], [11651, 11652, 11781], [11651, 11781, 11780], [11652, 11653, 11781], [11653, 11782, 11781], [11653, 11654, 11783], [11653, 11783, 11782], [11654, 11655, 11783], [11655, 11784, 11783], [11656, 11657, 11785], [11657, 11786, 11785], [11657, 11658, 11787], [11657, 11787, 11786], [11658, 11659, 11787], [11659, 11788, 11787], [11659, 11660, 11789], [11659, 11789, 11788], [11660, 11661, 11789], [11661, 11790, 11789], [11661, 11662, 11791], [11661, 11791, 11790], [11662, 11663, 11791], [11663, 11792, 11791], [11663, 11664, 11793], [11663, 11793, 11792], [11664, 11665, 11793], [11665, 11794, 11793], [11665, 11666, 11795], [11665, 11795, 11794], [11666, 11667, 11795], [11667, 11796, 11795], [11667, 11668, 11797], [11667, 11797, 11796], [11668, 11669, 11797], [11669, 11798, 11797], [11669, 11670, 11799], [11669, 11799, 11798], [11670, 11671, 11799], [11671, 11800, 11799], [11671, 11672, 11801], [11671, 11801, 11800], [11672, 11673, 11801], [11673, 11802, 11801], [11673, 11674, 11803], [11673, 11803, 11802], [11674, 11675, 11803], [11675, 11804, 11803], [11675, 11676, 11805], [11675, 11805, 11804], [11676, 11677, 11805], [11677, 11806, 11805], [11677, 11678, 11807], [11677, 11807, 11806], [11678, 11679, 11807], [11679, 11808, 11807], [11679, 11680, 11809], [11679, 11809, 11808], [11680, 11681, 11809], [11681, 11810, 11809], [11681, 11682, 11811], [11681, 11811, 11810], [11682, 11683, 11811], [11683, 11812, 11811], [11683, 11684, 11813], [11683, 11813, 11812], [11684, 11685, 11813], [11685, 11814, 11813], [11685, 11686, 11815], [11685, 11815, 11814], [11686, 11687, 11815], [11687, 11816, 11815], [11687, 11688, 11817], [11687, 11817, 11816], [11688, 11689, 11817], [11689, 11818, 11817], [11689, 11690, 11819], [11689, 11819, 11818], [11690, 11691, 11819], [11691, 11820, 11819], [11691, 11692, 11821], [11691, 11821, 11820], [11692, 11693, 11821], [11693, 11822, 11821], [11693, 11694, 11823], [11693, 11823, 11822], [11694, 11695, 11823], [11695, 11824, 11823], [11695, 11696, 11825], [11695, 11825, 11824], [11696, 11697, 11825], [11697, 11826, 11825], [11697, 11698, 11827], [11697, 11827, 11826], [11698, 11699, 11827], [11699, 11828, 11827], [11699, 11700, 11829], [11699, 11829, 11828], [11700, 11701, 11829], [11701, 11830, 11829], [11701, 11702, 11831], [11701, 11831, 11830], [11702, 11703, 11831], [11703, 11832, 11831], [11703, 11704, 11833], [11703, 11833, 11832], [11704, 11705, 11833], [11705, 11834, 11833], [11705, 11706, 11835], [11705, 11835, 11834], [11706, 11707, 11835], [11707, 11836, 11835], [11707, 11708, 11837], [11707, 11837, 11836], [11708, 11709, 11837], [11709, 11838, 11837], [11709, 11710, 11839], [11709, 11839, 11838], [11710, 11711, 11839], [11711, 11840, 11839], [11711, 11712, 11841], [11711, 11841, 11840], [11712, 11713, 11841], [11713, 11842, 11841], [11713, 11714, 11843], [11713, 11843, 11842], [11714, 11715, 11843], [11715, 11844, 11843], [11715, 11716, 11845], [11715, 11845, 11844], [11716, 11717, 11845], [11717, 11846, 11845], [11717, 11718, 11847], [11717, 11847, 11846], [11718, 11719, 11847], [11719, 11848, 11847], [11719, 11720, 11849], [11719, 11849, 11848], [11720, 11721, 11849], [11721, 11850, 11849], [11721, 11722, 11851], [11721, 11851, 11850], [11722, 11723, 11851], [11723, 11852, 11851], [11723, 11724, 11853], [11723, 11853, 11852], [11724, 11725, 11853], [11725, 11854, 11853], [11725, 11726, 11855], [11725, 11855, 11854], [11726, 11727, 11855], [11727, 11856, 11855], [11727, 11728, 11857], [11727, 11857, 11856], [11728, 11729, 11857], [11729, 11858, 11857], [11729, 11730, 11859], [11729, 11859, 11858], [11730, 11731, 11859], [11731, 11860, 11859], [11731, 11732, 11861], [11731, 11861, 11860], [11732, 11733, 11861], [11733, 11862, 11861], [11733, 11734, 11863], [11733, 11863, 11862], [11734, 11735, 11863], [11735, 11864, 11863], [11735, 11736, 11865], [11735, 11865, 11864], [11736, 11737, 11865], [11737, 11866, 11865], [11737, 11738, 11867], [11737, 11867, 11866], [11738, 11739, 11867], [11739, 11868, 11867], [11739, 11740, 11869], [11739, 11869, 11868], [11740, 11741, 11869], [11741, 11870, 11869], [11741, 11742, 11871], [11741, 11871, 11870], [11742, 11743, 11871], [11743, 11872, 11871], [11743, 11744, 11873], [11743, 11873, 11872], [11744, 11745, 11873], [11745, 11874, 11873], [11745, 11746, 11875], [11745, 11875, 11874], [11746, 11747, 11875], [11747, 11876, 11875], [11747, 11748, 11877], [11747, 11877, 11876], [11748, 11749, 11877], [11749, 11878, 11877], [11749, 11750, 11879], [11749, 11879, 11878], [11750, 11751, 11879], [11751, 11880, 11879], [11751, 11752, 11881], [11751, 11881, 11880], [11752, 11753, 11881], [11753, 11882, 11881], [11753, 11754, 11883], [11753, 11883, 11882], [11754, 11755, 11883], [11755, 11884, 11883], [11755, 11756, 11885], [11755, 11885, 11884], [11756, 11757, 11885], [11757, 11886, 11885], [11757, 11758, 11887], [11757, 11887, 11886], [11758, 11759, 11887], [11759, 11888, 11887], [11759, 11760, 11889], [11759, 11889, 11888], [11760, 11761, 11889], [11761, 11890, 11889], [11761, 11762, 11891], [11761, 11891, 11890], [11762, 11763, 11891], [11763, 11892, 11891], [11763, 11764, 11893], [11763, 11893, 11892], [11764, 11765, 11893], [11765, 11894, 11893], [11765, 11766, 11895], [11765, 11895, 11894], [11766, 11767, 11895], [11767, 11896, 11895], [11767, 11768, 11897], [11767, 11897, 11896], [11768, 11769, 11897], [11769, 11898, 11897], [11769, 11770, 11899], [11769, 11899, 11898], [11770, 11771, 11899], [11771, 11900, 11899], [11771, 11772, 11901], [11771, 11901, 11900], [11772, 11773, 11901], [11773, 11902, 11901], [11773, 11774, 11903], [11773, 11903, 11902], [11774, 11775, 11903], [11775, 11904, 11903], [11775, 11776, 11905], [11775, 11905, 11904], [11776, 11777, 11905], [11777, 11906, 11905], [11777, 11778, 11907], [11777, 11907, 11906], [11778, 11779, 11907], [11779, 11908, 11907], [11779, 11780, 11909], [11779, 11909, 11908], [11780, 11781, 11909], [11781, 11910, 11909], [11781, 11782, 11911], [11781, 11911, 11910], [11782, 11783, 11911], [11783, 11912, 11911], [11783, 11784, 11913], [11783, 11913, 11912], [11785, 11786, 11915], [11785, 11915, 11914], [11786, 11787, 11915], [11787, 11916, 11915], [11787, 11788, 11917], [11787, 11917, 11916], [11788, 11789, 11917], [11789, 11918, 11917], [11789, 11790, 11919], [11789, 11919, 11918], [11790, 11791, 11919], [11791, 11920, 11919], [11791, 11792, 11921], [11791, 11921, 11920], [11792, 11793, 11921], [11793, 11922, 11921], [11793, 11794, 11923], [11793, 11923, 11922], [11794, 11795, 11923], [11795, 11924, 11923], [11795, 11796, 11925], [11795, 11925, 11924], [11796, 11797, 11925], [11797, 11926, 11925], [11797, 11798, 11927], [11797, 11927, 11926], [11798, 11799, 11927], [11799, 11928, 11927], [11799, 11800, 11929], [11799, 11929, 11928], [11800, 11801, 11929], [11801, 11930, 11929], [11801, 11802, 11931], [11801, 11931, 11930], [11802, 11803, 11931], [11803, 11932, 11931], [11803, 11804, 11933], [11803, 11933, 11932], [11804, 11805, 11933], [11805, 11934, 11933], [11805, 11806, 11935], [11805, 11935, 11934], [11806, 11807, 11935], [11807, 11936, 11935], [11807, 11808, 11937], [11807, 11937, 11936], [11808, 11809, 11937], [11809, 11938, 11937], [11809, 11810, 11939], [11809, 11939, 11938], [11810, 11811, 11939], [11811, 11940, 11939], [11811, 11812, 11941], [11811, 11941, 11940], [11812, 11813, 11941], [11813, 11942, 11941], [11813, 11814, 11943], [11813, 11943, 11942], [11814, 11815, 11943], [11815, 11944, 11943], [11815, 11816, 11945], [11815, 11945, 11944], [11816, 11817, 11945], [11817, 11946, 11945], [11817, 11818, 11947], [11817, 11947, 11946], [11818, 11819, 11947], [11819, 11948, 11947], [11819, 11820, 11949], [11819, 11949, 11948], [11820, 11821, 11949], [11821, 11950, 11949], [11821, 11822, 11951], [11821, 11951, 11950], [11822, 11823, 11951], [11823, 11952, 11951], [11823, 11824, 11953], [11823, 11953, 11952], [11824, 11825, 11953], [11825, 11954, 11953], [11825, 11826, 11955], [11825, 11955, 11954], [11826, 11827, 11955], [11827, 11956, 11955], [11827, 11828, 11957], [11827, 11957, 11956], [11828, 11829, 11957], [11829, 11958, 11957], [11829, 11830, 11959], [11829, 11959, 11958], [11830, 11831, 11959], [11831, 11960, 11959], [11831, 11832, 11961], [11831, 11961, 11960], [11832, 11833, 11961], [11833, 11962, 11961], [11833, 11834, 11963], [11833, 11963, 11962], [11834, 11835, 11963], [11835, 11964, 11963], [11835, 11836, 11965], [11835, 11965, 11964], [11836, 11837, 11965], [11837, 11966, 11965], [11837, 11838, 11967], [11837, 11967, 11966], [11838, 11839, 11967], [11839, 11968, 11967], [11839, 11840, 11969], [11839, 11969, 11968], [11840, 11841, 11969], [11841, 11970, 11969], [11841, 11842, 11971], [11841, 11971, 11970], [11842, 11843, 11971], [11843, 11972, 11971], [11843, 11844, 11973], [11843, 11973, 11972], [11844, 11845, 11973], [11845, 11974, 11973], [11845, 11846, 11975], [11845, 11975, 11974], [11846, 11847, 11975], [11847, 11976, 11975], [11847, 11848, 11977], [11847, 11977, 11976], [11848, 11849, 11977], [11849, 11978, 11977], [11849, 11850, 11979], [11849, 11979, 11978], [11850, 11851, 11979], [11851, 11980, 11979], [11851, 11852, 11981], [11851, 11981, 11980], [11852, 11853, 11981], [11853, 11982, 11981], [11853, 11854, 11983], [11853, 11983, 11982], [11854, 11855, 11983], [11855, 11984, 11983], [11855, 11856, 11985], [11855, 11985, 11984], [11856, 11857, 11985], [11857, 11986, 11985], [11857, 11858, 11987], [11857, 11987, 11986], [11858, 11859, 11987], [11859, 11988, 11987], [11859, 11860, 11989], [11859, 11989, 11988], [11860, 11861, 11989], [11861, 11990, 11989], [11861, 11862, 11991], [11861, 11991, 11990], [11862, 11863, 11991], [11863, 11992, 11991], [11863, 11864, 11993], [11863, 11993, 11992], [11864, 11865, 11993], [11865, 11994, 11993], [11865, 11866, 11995], [11865, 11995, 11994], [11866, 11867, 11995], [11867, 11996, 11995], [11867, 11868, 11997], [11867, 11997, 11996], [11868, 11869, 11997], [11869, 11998, 11997], [11869, 11870, 11999], [11869, 11999, 11998], [11870, 11871, 11999], [11871, 12000, 11999], [11871, 11872, 12001], [11871, 12001, 12000], [11872, 11873, 12001], [11873, 12002, 12001], [11873, 11874, 12003], [11873, 12003, 12002], [11874, 11875, 12003], [11875, 12004, 12003], [11875, 11876, 12005], [11875, 12005, 12004], [11876, 11877, 12005], [11877, 12006, 12005], [11877, 11878, 12007], [11877, 12007, 12006], [11878, 11879, 12007], [11879, 12008, 12007], [11879, 11880, 12009], [11879, 12009, 12008], [11880, 11881, 12009], [11881, 12010, 12009], [11881, 11882, 12011], [11881, 12011, 12010], [11882, 11883, 12011], [11883, 12012, 12011], [11883, 11884, 12013], [11883, 12013, 12012], [11884, 11885, 12013], [11885, 12014, 12013], [11885, 11886, 12015], [11885, 12015, 12014], [11886, 11887, 12015], [11887, 12016, 12015], [11887, 11888, 12017], [11887, 12017, 12016], [11888, 11889, 12017], [11889, 12018, 12017], [11889, 11890, 12019], [11889, 12019, 12018], [11890, 11891, 12019], [11891, 12020, 12019], [11891, 11892, 12021], [11891, 12021, 12020], [11892, 11893, 12021], [11893, 12022, 12021], [11893, 11894, 12023], [11893, 12023, 12022], [11894, 11895, 12023], [11895, 12024, 12023], [11895, 11896, 12025], [11895, 12025, 12024], [11896, 11897, 12025], [11897, 12026, 12025], [11897, 11898, 12027], [11897, 12027, 12026], [11898, 11899, 12027], [11899, 12028, 12027], [11899, 11900, 12029], [11899, 12029, 12028], [11900, 11901, 12029], [11901, 12030, 12029], [11901, 11902, 12031], [11901, 12031, 12030], [11902, 11903, 12031], [11903, 12032, 12031], [11903, 11904, 12033], [11903, 12033, 12032], [11904, 11905, 12033], [11905, 12034, 12033], [11905, 11906, 12035], [11905, 12035, 12034], [11906, 11907, 12035], [11907, 12036, 12035], [11907, 11908, 12037], [11907, 12037, 12036], [11908, 11909, 12037], [11909, 12038, 12037], [11909, 11910, 12039], [11909, 12039, 12038], [11910, 11911, 12039], [11911, 12040, 12039], [11911, 11912, 12041], [11911, 12041, 12040], [11912, 11913, 12041], [11913, 12042, 12041], [11914, 11915, 12043], [11915, 12044, 12043], [11915, 11916, 12045], [11915, 12045, 12044], [11916, 11917, 12045], [11917, 12046, 12045], [11917, 11918, 12047], [11917, 12047, 12046], [11918, 11919, 12047], [11919, 12048, 12047], [11919, 11920, 12049], [11919, 12049, 12048], [11920, 11921, 12049], [11921, 12050, 12049], [11921, 11922, 12051], [11921, 12051, 12050], [11922, 11923, 12051], [11923, 12052, 12051], [11923, 11924, 12053], [11923, 12053, 12052], [11924, 11925, 12053], [11925, 12054, 12053], [11925, 11926, 12055], [11925, 12055, 12054], [11926, 11927, 12055], [11927, 12056, 12055], [11927, 11928, 12057], [11927, 12057, 12056], [11928, 11929, 12057], [11929, 12058, 12057], [11929, 11930, 12059], [11929, 12059, 12058], [11930, 11931, 12059], [11931, 12060, 12059], [11931, 11932, 12061], [11931, 12061, 12060], [11932, 11933, 12061], [11933, 12062, 12061], [11933, 11934, 12063], [11933, 12063, 12062], [11934, 11935, 12063], [11935, 12064, 12063], [11935, 11936, 12065], [11935, 12065, 12064], [11936, 11937, 12065], [11937, 12066, 12065], [11937, 11938, 12067], [11937, 12067, 12066], [11938, 11939, 12067], [11939, 12068, 12067], [11939, 11940, 12069], [11939, 12069, 12068], [11940, 11941, 12069], [11941, 12070, 12069], [11941, 11942, 12071], [11941, 12071, 12070], [11942, 11943, 12071], [11943, 12072, 12071], [11943, 11944, 12073], [11943, 12073, 12072], [11944, 11945, 12073], [11945, 12074, 12073], [11945, 11946, 12075], [11945, 12075, 12074], [11946, 11947, 12075], [11947, 12076, 12075], [11947, 11948, 12077], [11947, 12077, 12076], [11948, 11949, 12077], [11949, 12078, 12077], [11949, 11950, 12079], [11949, 12079, 12078], [11950, 11951, 12079], [11951, 12080, 12079], [11951, 11952, 12081], [11951, 12081, 12080], [11952, 11953, 12081], [11953, 12082, 12081], [11953, 11954, 12083], [11953, 12083, 12082], [11954, 11955, 12083], [11955, 12084, 12083], [11955, 11956, 12085], [11955, 12085, 12084], [11956, 11957, 12085], [11957, 12086, 12085], [11957, 11958, 12087], [11957, 12087, 12086], [11958, 11959, 12087], [11959, 12088, 12087], [11959, 11960, 12089], [11959, 12089, 12088], [11960, 11961, 12089], [11961, 12090, 12089], [11961, 11962, 12091], [11961, 12091, 12090], [11962, 11963, 12091], [11963, 12092, 12091], [11963, 11964, 12093], [11963, 12093, 12092], [11964, 11965, 12093], [11965, 12094, 12093], [11965, 11966, 12095], [11965, 12095, 12094], [11966, 11967, 12095], [11967, 12096, 12095], [11967, 11968, 12097], [11967, 12097, 12096], [11968, 11969, 12097], [11969, 12098, 12097], [11969, 11970, 12099], [11969, 12099, 12098], [11970, 11971, 12099], [11971, 12100, 12099], [11971, 11972, 12101], [11971, 12101, 12100], [11972, 11973, 12101], [11973, 12102, 12101], [11973, 11974, 12103], [11973, 12103, 12102], [11974, 11975, 12103], [11975, 12104, 12103], [11975, 11976, 12105], [11975, 12105, 12104], [11976, 11977, 12105], [11977, 12106, 12105], [11977, 11978, 12107], [11977, 12107, 12106], [11978, 11979, 12107], [11979, 12108, 12107], [11979, 11980, 12109], [11979, 12109, 12108], [11980, 11981, 12109], [11981, 12110, 12109], [11981, 11982, 12111], [11981, 12111, 12110], [11982, 11983, 12111], [11983, 12112, 12111], [11983, 11984, 12113], [11983, 12113, 12112], [11984, 11985, 12113], [11985, 12114, 12113], [11985, 11986, 12115], [11985, 12115, 12114], [11986, 11987, 12115], [11987, 12116, 12115], [11987, 11988, 12117], [11987, 12117, 12116], [11988, 11989, 12117], [11989, 12118, 12117], [11989, 11990, 12119], [11989, 12119, 12118], [11990, 11991, 12119], [11991, 12120, 12119], [11991, 11992, 12121], [11991, 12121, 12120], [11992, 11993, 12121], [11993, 12122, 12121], [11993, 11994, 12123], [11993, 12123, 12122], [11994, 11995, 12123], [11995, 12124, 12123], [11995, 11996, 12125], [11995, 12125, 12124], [11996, 11997, 12125], [11997, 12126, 12125], [11997, 11998, 12127], [11997, 12127, 12126], [11998, 11999, 12127], [11999, 12128, 12127], [11999, 12000, 12129], [11999, 12129, 12128], [12000, 12001, 12129], [12001, 12130, 12129], [12001, 12002, 12131], [12001, 12131, 12130], [12002, 12003, 12131], [12003, 12132, 12131], [12003, 12004, 12133], [12003, 12133, 12132], [12004, 12005, 12133], [12005, 12134, 12133], [12005, 12006, 12135], [12005, 12135, 12134], [12006, 12007, 12135], [12007, 12136, 12135], [12007, 12008, 12137], [12007, 12137, 12136], [12008, 12009, 12137], [12009, 12138, 12137], [12009, 12010, 12139], [12009, 12139, 12138], [12010, 12011, 12139], [12011, 12140, 12139], [12011, 12012, 12141], [12011, 12141, 12140], [12012, 12013, 12141], [12013, 12142, 12141], [12013, 12014, 12143], [12013, 12143, 12142], [12014, 12015, 12143], [12015, 12144, 12143], [12015, 12016, 12145], [12015, 12145, 12144], [12016, 12017, 12145], [12017, 12146, 12145], [12017, 12018, 12147], [12017, 12147, 12146], [12018, 12019, 12147], [12019, 12148, 12147], [12019, 12020, 12149], [12019, 12149, 12148], [12020, 12021, 12149], [12021, 12150, 12149], [12021, 12022, 12151], [12021, 12151, 12150], [12022, 12023, 12151], [12023, 12152, 12151], [12023, 12024, 12153], [12023, 12153, 12152], [12024, 12025, 12153], [12025, 12154, 12153], [12025, 12026, 12155], [12025, 12155, 12154], [12026, 12027, 12155], [12027, 12156, 12155], [12027, 12028, 12157], [12027, 12157, 12156], [12028, 12029, 12157], [12029, 12158, 12157], [12029, 12030, 12159], [12029, 12159, 12158], [12030, 12031, 12159], [12031, 12160, 12159], [12031, 12032, 12161], [12031, 12161, 12160], [12032, 12033, 12161], [12033, 12162, 12161], [12033, 12034, 12163], [12033, 12163, 12162], [12034, 12035, 12163], [12035, 12164, 12163], [12035, 12036, 12165], [12035, 12165, 12164], [12036, 12037, 12165], [12037, 12166, 12165], [12037, 12038, 12167], [12037, 12167, 12166], [12038, 12039, 12167], [12039, 12168, 12167], [12039, 12040, 12169], [12039, 12169, 12168], [12040, 12041, 12169], [12041, 12170, 12169], [12041, 12042, 12171], [12041, 12171, 12170], [12043, 12044, 12173], [12043, 12173, 12172], [12044, 12045, 12173], [12045, 12174, 12173], [12045, 12046, 12175], [12045, 12175, 12174], [12046, 12047, 12175], [12047, 12176, 12175], [12047, 12048, 12177], [12047, 12177, 12176], [12048, 12049, 12177], [12049, 12178, 12177], [12049, 12050, 12179], [12049, 12179, 12178], [12050, 12051, 12179], [12051, 12180, 12179], [12051, 12052, 12181], [12051, 12181, 12180], [12052, 12053, 12181], [12053, 12182, 12181], [12053, 12054, 12183], [12053, 12183, 12182], [12054, 12055, 12183], [12055, 12184, 12183], [12055, 12056, 12185], [12055, 12185, 12184], [12056, 12057, 12185], [12057, 12186, 12185], [12057, 12058, 12187], [12057, 12187, 12186], [12058, 12059, 12187], [12059, 12188, 12187], [12059, 12060, 12189], [12059, 12189, 12188], [12060, 12061, 12189], [12061, 12190, 12189], [12061, 12062, 12191], [12061, 12191, 12190], [12062, 12063, 12191], [12063, 12192, 12191], [12063, 12064, 12193], [12063, 12193, 12192], [12064, 12065, 12193], [12065, 12194, 12193], [12065, 12066, 12195], [12065, 12195, 12194], [12066, 12067, 12195], [12067, 12196, 12195], [12067, 12068, 12197], [12067, 12197, 12196], [12068, 12069, 12197], [12069, 12198, 12197], [12069, 12070, 12199], [12069, 12199, 12198], [12070, 12071, 12199], [12071, 12200, 12199], [12071, 12072, 12201], [12071, 12201, 12200], [12072, 12073, 12201], [12073, 12202, 12201], [12073, 12074, 12203], [12073, 12203, 12202], [12074, 12075, 12203], [12075, 12204, 12203], [12075, 12076, 12205], [12075, 12205, 12204], [12076, 12077, 12205], [12077, 12206, 12205], [12077, 12078, 12207], [12077, 12207, 12206], [12078, 12079, 12207], [12079, 12208, 12207], [12079, 12080, 12209], [12079, 12209, 12208], [12080, 12081, 12209], [12081, 12210, 12209], [12081, 12082, 12211], [12081, 12211, 12210], [12082, 12083, 12211], [12083, 12212, 12211], [12083, 12084, 12213], [12083, 12213, 12212], [12084, 12085, 12213], [12085, 12214, 12213], [12085, 12086, 12215], [12085, 12215, 12214], [12086, 12087, 12215], [12087, 12216, 12215], [12087, 12088, 12217], [12087, 12217, 12216], [12088, 12089, 12217], [12089, 12218, 12217], [12089, 12090, 12219], [12089, 12219, 12218], [12090, 12091, 12219], [12091, 12220, 12219], [12091, 12092, 12221], [12091, 12221, 12220], [12092, 12093, 12221], [12093, 12222, 12221], [12093, 12094, 12223], [12093, 12223, 12222], [12094, 12095, 12223], [12095, 12224, 12223], [12095, 12096, 12225], [12095, 12225, 12224], [12096, 12097, 12225], [12097, 12226, 12225], [12097, 12098, 12227], [12097, 12227, 12226], [12098, 12099, 12227], [12099, 12228, 12227], [12099, 12100, 12229], [12099, 12229, 12228], [12100, 12101, 12229], [12101, 12230, 12229], [12101, 12102, 12231], [12101, 12231, 12230], [12102, 12103, 12231], [12103, 12232, 12231], [12103, 12104, 12233], [12103, 12233, 12232], [12104, 12105, 12233], [12105, 12234, 12233], [12105, 12106, 12235], [12105, 12235, 12234], [12106, 12107, 12235], [12107, 12236, 12235], [12107, 12108, 12237], [12107, 12237, 12236], [12108, 12109, 12237], [12109, 12238, 12237], [12109, 12110, 12239], [12109, 12239, 12238], [12110, 12111, 12239], [12111, 12240, 12239], [12111, 12112, 12241], [12111, 12241, 12240], [12112, 12113, 12241], [12113, 12242, 12241], [12113, 12114, 12243], [12113, 12243, 12242], [12114, 12115, 12243], [12115, 12244, 12243], [12115, 12116, 12245], [12115, 12245, 12244], [12116, 12117, 12245], [12117, 12246, 12245], [12117, 12118, 12247], [12117, 12247, 12246], [12118, 12119, 12247], [12119, 12248, 12247], [12119, 12120, 12249], [12119, 12249, 12248], [12120, 12121, 12249], [12121, 12250, 12249], [12121, 12122, 12251], [12121, 12251, 12250], [12122, 12123, 12251], [12123, 12252, 12251], [12123, 12124, 12253], [12123, 12253, 12252], [12124, 12125, 12253], [12125, 12254, 12253], [12125, 12126, 12255], [12125, 12255, 12254], [12126, 12127, 12255], [12127, 12256, 12255], [12127, 12128, 12257], [12127, 12257, 12256], [12128, 12129, 12257], [12129, 12258, 12257], [12129, 12130, 12259], [12129, 12259, 12258], [12130, 12131, 12259], [12131, 12260, 12259], [12131, 12132, 12261], [12131, 12261, 12260], [12132, 12133, 12261], [12133, 12262, 12261], [12133, 12134, 12263], [12133, 12263, 12262], [12134, 12135, 12263], [12135, 12264, 12263], [12135, 12136, 12265], [12135, 12265, 12264], [12136, 12137, 12265], [12137, 12266, 12265], [12137, 12138, 12267], [12137, 12267, 12266], [12138, 12139, 12267], [12139, 12268, 12267], [12139, 12140, 12269], [12139, 12269, 12268], [12140, 12141, 12269], [12141, 12270, 12269], [12141, 12142, 12271], [12141, 12271, 12270], [12142, 12143, 12271], [12143, 12272, 12271], [12143, 12144, 12273], [12143, 12273, 12272], [12144, 12145, 12273], [12145, 12274, 12273], [12145, 12146, 12275], [12145, 12275, 12274], [12146, 12147, 12275], [12147, 12276, 12275], [12147, 12148, 12277], [12147, 12277, 12276], [12148, 12149, 12277], [12149, 12278, 12277], [12149, 12150, 12279], [12149, 12279, 12278], [12150, 12151, 12279], [12151, 12280, 12279], [12151, 12152, 12281], [12151, 12281, 12280], [12152, 12153, 12281], [12153, 12282, 12281], [12153, 12154, 12283], [12153, 12283, 12282], [12154, 12155, 12283], [12155, 12284, 12283], [12155, 12156, 12285], [12155, 12285, 12284], [12156, 12157, 12285], [12157, 12286, 12285], [12157, 12158, 12287], [12157, 12287, 12286], [12158, 12159, 12287], [12159, 12288, 12287], [12159, 12160, 12289], [12159, 12289, 12288], [12160, 12161, 12289], [12161, 12290, 12289], [12161, 12162, 12291], [12161, 12291, 12290], [12162, 12163, 12291], [12163, 12292, 12291], [12163, 12164, 12293], [12163, 12293, 12292], [12164, 12165, 12293], [12165, 12294, 12293], [12165, 12166, 12295], [12165, 12295, 12294], [12166, 12167, 12295], [12167, 12296, 12295], [12167, 12168, 12297], [12167, 12297, 12296], [12168, 12169, 12297], [12169, 12298, 12297], [12169, 12170, 12299], [12169, 12299, 12298], [12170, 12171, 12299], [12171, 12300, 12299], [12172, 12173, 12301], [12173, 12302, 12301], [12173, 12174, 12303], [12173, 12303, 12302], [12174, 12175, 12303], [12175, 12304, 12303], [12175, 12176, 12305], [12175, 12305, 12304], [12176, 12177, 12305], [12177, 12306, 12305], [12177, 12178, 12307], [12177, 12307, 12306], [12178, 12179, 12307], [12179, 12308, 12307], [12179, 12180, 12309], [12179, 12309, 12308], [12180, 12181, 12309], [12181, 12310, 12309], [12181, 12182, 12311], [12181, 12311, 12310], [12182, 12183, 12311], [12183, 12312, 12311], [12183, 12184, 12313], [12183, 12313, 12312], [12184, 12185, 12313], [12185, 12314, 12313], [12185, 12186, 12315], [12185, 12315, 12314], [12186, 12187, 12315], [12187, 12316, 12315], [12187, 12188, 12317], [12187, 12317, 12316], [12188, 12189, 12317], [12189, 12318, 12317], [12189, 12190, 12319], [12189, 12319, 12318], [12190, 12191, 12319], [12191, 12320, 12319], [12191, 12192, 12321], [12191, 12321, 12320], [12192, 12193, 12321], [12193, 12322, 12321], [12193, 12194, 12323], [12193, 12323, 12322], [12194, 12195, 12323], [12195, 12324, 12323], [12195, 12196, 12325], [12195, 12325, 12324], [12196, 12197, 12325], [12197, 12326, 12325], [12197, 12198, 12327], [12197, 12327, 12326], [12198, 12199, 12327], [12199, 12328, 12327], [12199, 12200, 12329], [12199, 12329, 12328], [12200, 12201, 12329], [12201, 12330, 12329], [12201, 12202, 12331], [12201, 12331, 12330], [12202, 12203, 12331], [12203, 12332, 12331], [12203, 12204, 12333], [12203, 12333, 12332], [12204, 12205, 12333], [12205, 12334, 12333], [12205, 12206, 12335], [12205, 12335, 12334], [12206, 12207, 12335], [12207, 12336, 12335], [12207, 12208, 12337], [12207, 12337, 12336], [12208, 12209, 12337], [12209, 12338, 12337], [12209, 12210, 12339], [12209, 12339, 12338], [12210, 12211, 12339], [12211, 12340, 12339], [12211, 12212, 12341], [12211, 12341, 12340], [12212, 12213, 12341], [12213, 12342, 12341], [12213, 12214, 12343], [12213, 12343, 12342], [12214, 12215, 12343], [12215, 12344, 12343], [12215, 12216, 12345], [12215, 12345, 12344], [12216, 12217, 12345], [12217, 12346, 12345], [12217, 12218, 12347], [12217, 12347, 12346], [12218, 12219, 12347], [12219, 12348, 12347], [12219, 12220, 12349], [12219, 12349, 12348], [12220, 12221, 12349], [12221, 12350, 12349], [12221, 12222, 12351], [12221, 12351, 12350], [12222, 12223, 12351], [12223, 12352, 12351], [12223, 12224, 12353], [12223, 12353, 12352], [12224, 12225, 12353], [12225, 12354, 12353], [12225, 12226, 12355], [12225, 12355, 12354], [12226, 12227, 12355], [12227, 12356, 12355], [12227, 12228, 12357], [12227, 12357, 12356], [12228, 12229, 12357], [12229, 12358, 12357], [12229, 12230, 12359], [12229, 12359, 12358], [12230, 12231, 12359], [12231, 12360, 12359], [12231, 12232, 12361], [12231, 12361, 12360], [12232, 12233, 12361], [12233, 12362, 12361], [12233, 12234, 12363], [12233, 12363, 12362], [12234, 12235, 12363], [12235, 12364, 12363], [12235, 12236, 12365], [12235, 12365, 12364], [12236, 12237, 12365], [12237, 12366, 12365], [12237, 12238, 12367], [12237, 12367, 12366], [12238, 12239, 12367], [12239, 12368, 12367], [12239, 12240, 12369], [12239, 12369, 12368], [12240, 12241, 12369], [12241, 12370, 12369], [12241, 12242, 12371], [12241, 12371, 12370], [12242, 12243, 12371], [12243, 12372, 12371], [12243, 12244, 12373], [12243, 12373, 12372], [12244, 12245, 12373], [12245, 12374, 12373], [12245, 12246, 12375], [12245, 12375, 12374], [12246, 12247, 12375], [12247, 12376, 12375], [12247, 12248, 12377], [12247, 12377, 12376], [12248, 12249, 12377], [12249, 12378, 12377], [12249, 12250, 12379], [12249, 12379, 12378], [12250, 12251, 12379], [12251, 12380, 12379], [12251, 12252, 12381], [12251, 12381, 12380], [12252, 12253, 12381], [12253, 12382, 12381], [12253, 12254, 12383], [12253, 12383, 12382], [12254, 12255, 12383], [12255, 12384, 12383], [12255, 12256, 12385], [12255, 12385, 12384], [12256, 12257, 12385], [12257, 12386, 12385], [12257, 12258, 12387], [12257, 12387, 12386], [12258, 12259, 12387], [12259, 12388, 12387], [12259, 12260, 12389], [12259, 12389, 12388], [12260, 12261, 12389], [12261, 12390, 12389], [12261, 12262, 12391], [12261, 12391, 12390], [12262, 12263, 12391], [12263, 12392, 12391], [12263, 12264, 12393], [12263, 12393, 12392], [12264, 12265, 12393], [12265, 12394, 12393], [12265, 12266, 12395], [12265, 12395, 12394], [12266, 12267, 12395], [12267, 12396, 12395], [12267, 12268, 12397], [12267, 12397, 12396], [12268, 12269, 12397], [12269, 12398, 12397], [12269, 12270, 12399], [12269, 12399, 12398], [12270, 12271, 12399], [12271, 12400, 12399], [12271, 12272, 12401], [12271, 12401, 12400], [12272, 12273, 12401], [12273, 12402, 12401], [12273, 12274, 12403], [12273, 12403, 12402], [12274, 12275, 12403], [12275, 12404, 12403], [12275, 12276, 12405], [12275, 12405, 12404], [12276, 12277, 12405], [12277, 12406, 12405], [12277, 12278, 12407], [12277, 12407, 12406], [12278, 12279, 12407], [12279, 12408, 12407], [12279, 12280, 12409], [12279, 12409, 12408], [12280, 12281, 12409], [12281, 12410, 12409], [12281, 12282, 12411], [12281, 12411, 12410], [12282, 12283, 12411], [12283, 12412, 12411], [12283, 12284, 12413], [12283, 12413, 12412], [12284, 12285, 12413], [12285, 12414, 12413], [12285, 12286, 12415], [12285, 12415, 12414], [12286, 12287, 12415], [12287, 12416, 12415], [12287, 12288, 12417], [12287, 12417, 12416], [12288, 12289, 12417], [12289, 12418, 12417], [12289, 12290, 12419], [12289, 12419, 12418], [12290, 12291, 12419], [12291, 12420, 12419], [12291, 12292, 12421], [12291, 12421, 12420], [12292, 12293, 12421], [12293, 12422, 12421], [12293, 12294, 12423], [12293, 12423, 12422], [12294, 12295, 12423], [12295, 12424, 12423], [12295, 12296, 12425], [12295, 12425, 12424], [12296, 12297, 12425], [12297, 12426, 12425], [12297, 12298, 12427], [12297, 12427, 12426], [12298, 12299, 12427], [12299, 12428, 12427], [12299, 12300, 12429], [12299, 12429, 12428], [12301, 12302, 12431], [12301, 12431, 12430], [12302, 12303, 12431], [12303, 12432, 12431], [12303, 12304, 12433], [12303, 12433, 12432], [12304, 12305, 12433], [12305, 12434, 12433], [12305, 12306, 12435], [12305, 12435, 12434], [12306, 12307, 12435], [12307, 12436, 12435], [12307, 12308, 12437], [12307, 12437, 12436], [12308, 12309, 12437], [12309, 12438, 12437], [12309, 12310, 12439], [12309, 12439, 12438], [12310, 12311, 12439], [12311, 12440, 12439], [12311, 12312, 12441], [12311, 12441, 12440], [12312, 12313, 12441], [12313, 12442, 12441], [12313, 12314, 12443], [12313, 12443, 12442], [12314, 12315, 12443], [12315, 12444, 12443], [12315, 12316, 12445], [12315, 12445, 12444], [12316, 12317, 12445], [12317, 12446, 12445], [12317, 12318, 12447], [12317, 12447, 12446], [12318, 12319, 12447], [12319, 12448, 12447], [12319, 12320, 12449], [12319, 12449, 12448], [12320, 12321, 12449], [12321, 12450, 12449], [12321, 12322, 12451], [12321, 12451, 12450], [12322, 12323, 12451], [12323, 12452, 12451], [12323, 12324, 12453], [12323, 12453, 12452], [12324, 12325, 12453], [12325, 12454, 12453], [12325, 12326, 12455], [12325, 12455, 12454], [12326, 12327, 12455], [12327, 12456, 12455], [12327, 12328, 12457], [12327, 12457, 12456], [12328, 12329, 12457], [12329, 12458, 12457], [12329, 12330, 12459], [12329, 12459, 12458], [12330, 12331, 12459], [12331, 12460, 12459], [12331, 12332, 12461], [12331, 12461, 12460], [12332, 12333, 12461], [12333, 12462, 12461], [12333, 12334, 12463], [12333, 12463, 12462], [12334, 12335, 12463], [12335, 12464, 12463], [12335, 12336, 12465], [12335, 12465, 12464], [12336, 12337, 12465], [12337, 12466, 12465], [12337, 12338, 12467], [12337, 12467, 12466], [12338, 12339, 12467], [12339, 12468, 12467], [12339, 12340, 12469], [12339, 12469, 12468], [12340, 12341, 12469], [12341, 12470, 12469], [12341, 12342, 12471], [12341, 12471, 12470], [12342, 12343, 12471], [12343, 12472, 12471], [12343, 12344, 12473], [12343, 12473, 12472], [12344, 12345, 12473], [12345, 12474, 12473], [12345, 12346, 12475], [12345, 12475, 12474], [12346, 12347, 12475], [12347, 12476, 12475], [12347, 12348, 12477], [12347, 12477, 12476], [12348, 12349, 12477], [12349, 12478, 12477], [12349, 12350, 12479], [12349, 12479, 12478], [12350, 12351, 12479], [12351, 12480, 12479], [12351, 12352, 12481], [12351, 12481, 12480], [12352, 12353, 12481], [12353, 12482, 12481], [12353, 12354, 12483], [12353, 12483, 12482], [12354, 12355, 12483], [12355, 12484, 12483], [12355, 12356, 12485], [12355, 12485, 12484], [12356, 12357, 12485], [12357, 12486, 12485], [12357, 12358, 12487], [12357, 12487, 12486], [12358, 12359, 12487], [12359, 12488, 12487], [12359, 12360, 12489], [12359, 12489, 12488], [12360, 12361, 12489], [12361, 12490, 12489], [12361, 12362, 12491], [12361, 12491, 12490], [12362, 12363, 12491], [12363, 12492, 12491], [12363, 12364, 12493], [12363, 12493, 12492], [12364, 12365, 12493], [12365, 12494, 12493], [12365, 12366, 12495], [12365, 12495, 12494], [12366, 12367, 12495], [12367, 12496, 12495], [12367, 12368, 12497], [12367, 12497, 12496], [12368, 12369, 12497], [12369, 12498, 12497], [12369, 12370, 12499], [12369, 12499, 12498], [12370, 12371, 12499], [12371, 12500, 12499], [12371, 12372, 12501], [12371, 12501, 12500], [12372, 12373, 12501], [12373, 12502, 12501], [12373, 12374, 12503], [12373, 12503, 12502], [12374, 12375, 12503], [12375, 12504, 12503], [12375, 12376, 12505], [12375, 12505, 12504], [12376, 12377, 12505], [12377, 12506, 12505], [12377, 12378, 12507], [12377, 12507, 12506], [12378, 12379, 12507], [12379, 12508, 12507], [12379, 12380, 12509], [12379, 12509, 12508], [12380, 12381, 12509], [12381, 12510, 12509], [12381, 12382, 12511], [12381, 12511, 12510], [12382, 12383, 12511], [12383, 12512, 12511], [12383, 12384, 12513], [12383, 12513, 12512], [12384, 12385, 12513], [12385, 12514, 12513], [12385, 12386, 12515], [12385, 12515, 12514], [12386, 12387, 12515], [12387, 12516, 12515], [12387, 12388, 12517], [12387, 12517, 12516], [12388, 12389, 12517], [12389, 12518, 12517], [12389, 12390, 12519], [12389, 12519, 12518], [12390, 12391, 12519], [12391, 12520, 12519], [12391, 12392, 12521], [12391, 12521, 12520], [12392, 12393, 12521], [12393, 12522, 12521], [12393, 12394, 12523], [12393, 12523, 12522], [12394, 12395, 12523], [12395, 12524, 12523], [12395, 12396, 12525], [12395, 12525, 12524], [12396, 12397, 12525], [12397, 12526, 12525], [12397, 12398, 12527], [12397, 12527, 12526], [12398, 12399, 12527], [12399, 12528, 12527], [12399, 12400, 12529], [12399, 12529, 12528], [12400, 12401, 12529], [12401, 12530, 12529], [12401, 12402, 12531], [12401, 12531, 12530], [12402, 12403, 12531], [12403, 12532, 12531], [12403, 12404, 12533], [12403, 12533, 12532], [12404, 12405, 12533], [12405, 12534, 12533], [12405, 12406, 12535], [12405, 12535, 12534], [12406, 12407, 12535], [12407, 12536, 12535], [12407, 12408, 12537], [12407, 12537, 12536], [12408, 12409, 12537], [12409, 12538, 12537], [12409, 12410, 12539], [12409, 12539, 12538], [12410, 12411, 12539], [12411, 12540, 12539], [12411, 12412, 12541], [12411, 12541, 12540], [12412, 12413, 12541], [12413, 12542, 12541], [12413, 12414, 12543], [12413, 12543, 12542], [12414, 12415, 12543], [12415, 12544, 12543], [12415, 12416, 12545], [12415, 12545, 12544], [12416, 12417, 12545], [12417, 12546, 12545], [12417, 12418, 12547], [12417, 12547, 12546], [12418, 12419, 12547], [12419, 12548, 12547], [12419, 12420, 12549], [12419, 12549, 12548], [12420, 12421, 12549], [12421, 12550, 12549], [12421, 12422, 12551], [12421, 12551, 12550], [12422, 12423, 12551], [12423, 12552, 12551], [12423, 12424, 12553], [12423, 12553, 12552], [12424, 12425, 12553], [12425, 12554, 12553], [12425, 12426, 12555], [12425, 12555, 12554], [12426, 12427, 12555], [12427, 12556, 12555], [12427, 12428, 12557], [12427, 12557, 12556], [12428, 12429, 12557], [12429, 12558, 12557], [12430, 12431, 12559], [12431, 12560, 12559], [12431, 12432, 12561], [12431, 12561, 12560], [12432, 12433, 12561], [12433, 12562, 12561], [12433, 12434, 12563], [12433, 12563, 12562], [12434, 12435, 12563], [12435, 12564, 12563], [12435, 12436, 12565], [12435, 12565, 12564], [12436, 12437, 12565], [12437, 12566, 12565], [12437, 12438, 12567], [12437, 12567, 12566], [12438, 12439, 12567], [12439, 12568, 12567], [12439, 12440, 12569], [12439, 12569, 12568], [12440, 12441, 12569], [12441, 12570, 12569], [12441, 12442, 12571], [12441, 12571, 12570], [12442, 12443, 12571], [12443, 12572, 12571], [12443, 12444, 12573], [12443, 12573, 12572], [12444, 12445, 12573], [12445, 12574, 12573], [12445, 12446, 12575], [12445, 12575, 12574], [12446, 12447, 12575], [12447, 12576, 12575], [12447, 12448, 12577], [12447, 12577, 12576], [12448, 12449, 12577], [12449, 12578, 12577], [12449, 12450, 12579], [12449, 12579, 12578], [12450, 12451, 12579], [12451, 12580, 12579], [12451, 12452, 12581], [12451, 12581, 12580], [12452, 12453, 12581], [12453, 12582, 12581], [12453, 12454, 12583], [12453, 12583, 12582], [12454, 12455, 12583], [12455, 12584, 12583], [12455, 12456, 12585], [12455, 12585, 12584], [12456, 12457, 12585], [12457, 12586, 12585], [12457, 12458, 12587], [12457, 12587, 12586], [12458, 12459, 12587], [12459, 12588, 12587], [12459, 12460, 12589], [12459, 12589, 12588], [12460, 12461, 12589], [12461, 12590, 12589], [12461, 12462, 12591], [12461, 12591, 12590], [12462, 12463, 12591], [12463, 12592, 12591], [12463, 12464, 12593], [12463, 12593, 12592], [12464, 12465, 12593], [12465, 12594, 12593], [12465, 12466, 12595], [12465, 12595, 12594], [12466, 12467, 12595], [12467, 12596, 12595], [12467, 12468, 12597], [12467, 12597, 12596], [12468, 12469, 12597], [12469, 12598, 12597], [12469, 12470, 12599], [12469, 12599, 12598], [12470, 12471, 12599], [12471, 12600, 12599], [12471, 12472, 12601], [12471, 12601, 12600], [12472, 12473, 12601], [12473, 12602, 12601], [12473, 12474, 12603], [12473, 12603, 12602], [12474, 12475, 12603], [12475, 12604, 12603], [12475, 12476, 12605], [12475, 12605, 12604], [12476, 12477, 12605], [12477, 12606, 12605], [12477, 12478, 12607], [12477, 12607, 12606], [12478, 12479, 12607], [12479, 12608, 12607], [12479, 12480, 12609], [12479, 12609, 12608], [12480, 12481, 12609], [12481, 12610, 12609], [12481, 12482, 12611], [12481, 12611, 12610], [12482, 12483, 12611], [12483, 12612, 12611], [12483, 12484, 12613], [12483, 12613, 12612], [12484, 12485, 12613], [12485, 12614, 12613], [12485, 12486, 12615], [12485, 12615, 12614], [12486, 12487, 12615], [12487, 12616, 12615], [12487, 12488, 12617], [12487, 12617, 12616], [12488, 12489, 12617], [12489, 12618, 12617], [12489, 12490, 12619], [12489, 12619, 12618], [12490, 12491, 12619], [12491, 12620, 12619], [12491, 12492, 12621], [12491, 12621, 12620], [12492, 12493, 12621], [12493, 12622, 12621], [12493, 12494, 12623], [12493, 12623, 12622], [12494, 12495, 12623], [12495, 12624, 12623], [12495, 12496, 12625], [12495, 12625, 12624], [12496, 12497, 12625], [12497, 12626, 12625], [12497, 12498, 12627], [12497, 12627, 12626], [12498, 12499, 12627], [12499, 12628, 12627], [12499, 12500, 12629], [12499, 12629, 12628], [12500, 12501, 12629], [12501, 12630, 12629], [12501, 12502, 12631], [12501, 12631, 12630], [12502, 12503, 12631], [12503, 12632, 12631], [12503, 12504, 12633], [12503, 12633, 12632], [12504, 12505, 12633], [12505, 12634, 12633], [12505, 12506, 12635], [12505, 12635, 12634], [12506, 12507, 12635], [12507, 12636, 12635], [12507, 12508, 12637], [12507, 12637, 12636], [12508, 12509, 12637], [12509, 12638, 12637], [12509, 12510, 12639], [12509, 12639, 12638], [12510, 12511, 12639], [12511, 12640, 12639], [12511, 12512, 12641], [12511, 12641, 12640], [12512, 12513, 12641], [12513, 12642, 12641], [12513, 12514, 12643], [12513, 12643, 12642], [12514, 12515, 12643], [12515, 12644, 12643], [12515, 12516, 12645], [12515, 12645, 12644], [12516, 12517, 12645], [12517, 12646, 12645], [12517, 12518, 12647], [12517, 12647, 12646], [12518, 12519, 12647], [12519, 12648, 12647], [12519, 12520, 12649], [12519, 12649, 12648], [12520, 12521, 12649], [12521, 12650, 12649], [12521, 12522, 12651], [12521, 12651, 12650], [12522, 12523, 12651], [12523, 12652, 12651], [12523, 12524, 12653], [12523, 12653, 12652], [12524, 12525, 12653], [12525, 12654, 12653], [12525, 12526, 12655], [12525, 12655, 12654], [12526, 12527, 12655], [12527, 12656, 12655], [12527, 12528, 12657], [12527, 12657, 12656], [12528, 12529, 12657], [12529, 12658, 12657], [12529, 12530, 12659], [12529, 12659, 12658], [12530, 12531, 12659], [12531, 12660, 12659], [12531, 12532, 12661], [12531, 12661, 12660], [12532, 12533, 12661], [12533, 12662, 12661], [12533, 12534, 12663], [12533, 12663, 12662], [12534, 12535, 12663], [12535, 12664, 12663], [12535, 12536, 12665], [12535, 12665, 12664], [12536, 12537, 12665], [12537, 12666, 12665], [12537, 12538, 12667], [12537, 12667, 12666], [12538, 12539, 12667], [12539, 12668, 12667], [12539, 12540, 12669], [12539, 12669, 12668], [12540, 12541, 12669], [12541, 12670, 12669], [12541, 12542, 12671], [12541, 12671, 12670], [12542, 12543, 12671], [12543, 12672, 12671], [12543, 12544, 12673], [12543, 12673, 12672], [12544, 12545, 12673], [12545, 12674, 12673], [12545, 12546, 12675], [12545, 12675, 12674], [12546, 12547, 12675], [12547, 12676, 12675], [12547, 12548, 12677], [12547, 12677, 12676], [12548, 12549, 12677], [12549, 12678, 12677], [12549, 12550, 12679], [12549, 12679, 12678], [12550, 12551, 12679], [12551, 12680, 12679], [12551, 12552, 12681], [12551, 12681, 12680], [12552, 12553, 12681], [12553, 12682, 12681], [12553, 12554, 12683], [12553, 12683, 12682], [12554, 12555, 12683], [12555, 12684, 12683], [12555, 12556, 12685], [12555, 12685, 12684], [12556, 12557, 12685], [12557, 12686, 12685], [12557, 12558, 12687], [12557, 12687, 12686], [12559, 12560, 12689], [12559, 12689, 12688], [12560, 12561, 12689], [12561, 12690, 12689], [12561, 12562, 12691], [12561, 12691, 12690], [12562, 12563, 12691], [12563, 12692, 12691], [12563, 12564, 12693], [12563, 12693, 12692], [12564, 12565, 12693], [12565, 12694, 12693], [12565, 12566, 12695], [12565, 12695, 12694], [12566, 12567, 12695], [12567, 12696, 12695], [12567, 12568, 12697], [12567, 12697, 12696], [12568, 12569, 12697], [12569, 12698, 12697], [12569, 12570, 12699], [12569, 12699, 12698], [12570, 12571, 12699], [12571, 12700, 12699], [12571, 12572, 12701], [12571, 12701, 12700], [12572, 12573, 12701], [12573, 12702, 12701], [12573, 12574, 12703], [12573, 12703, 12702], [12574, 12575, 12703], [12575, 12704, 12703], [12575, 12576, 12705], [12575, 12705, 12704], [12576, 12577, 12705], [12577, 12706, 12705], [12577, 12578, 12707], [12577, 12707, 12706], [12578, 12579, 12707], [12579, 12708, 12707], [12579, 12580, 12709], [12579, 12709, 12708], [12580, 12581, 12709], [12581, 12710, 12709], [12581, 12582, 12711], [12581, 12711, 12710], [12582, 12583, 12711], [12583, 12712, 12711], [12583, 12584, 12713], [12583, 12713, 12712], [12584, 12585, 12713], [12585, 12714, 12713], [12585, 12586, 12715], [12585, 12715, 12714], [12586, 12587, 12715], [12587, 12716, 12715], [12587, 12588, 12717], [12587, 12717, 12716], [12588, 12589, 12717], [12589, 12718, 12717], [12589, 12590, 12719], [12589, 12719, 12718], [12590, 12591, 12719], [12591, 12720, 12719], [12591, 12592, 12721], [12591, 12721, 12720], [12592, 12593, 12721], [12593, 12722, 12721], [12593, 12594, 12723], [12593, 12723, 12722], [12594, 12595, 12723], [12595, 12724, 12723], [12595, 12596, 12725], [12595, 12725, 12724], [12596, 12597, 12725], [12597, 12726, 12725], [12597, 12598, 12727], [12597, 12727, 12726], [12598, 12599, 12727], [12599, 12728, 12727], [12599, 12600, 12729], [12599, 12729, 12728], [12600, 12601, 12729], [12601, 12730, 12729], [12601, 12602, 12731], [12601, 12731, 12730], [12602, 12603, 12731], [12603, 12732, 12731], [12603, 12604, 12733], [12603, 12733, 12732], [12604, 12605, 12733], [12605, 12734, 12733], [12605, 12606, 12735], [12605, 12735, 12734], [12606, 12607, 12735], [12607, 12736, 12735], [12607, 12608, 12737], [12607, 12737, 12736], [12608, 12609, 12737], [12609, 12738, 12737], [12609, 12610, 12739], [12609, 12739, 12738], [12610, 12611, 12739], [12611, 12740, 12739], [12611, 12612, 12741], [12611, 12741, 12740], [12612, 12613, 12741], [12613, 12742, 12741], [12613, 12614, 12743], [12613, 12743, 12742], [12614, 12615, 12743], [12615, 12744, 12743], [12615, 12616, 12745], [12615, 12745, 12744], [12616, 12617, 12745], [12617, 12746, 12745], [12617, 12618, 12747], [12617, 12747, 12746], [12618, 12619, 12747], [12619, 12748, 12747], [12619, 12620, 12749], [12619, 12749, 12748], [12620, 12621, 12749], [12621, 12750, 12749], [12621, 12622, 12751], [12621, 12751, 12750], [12622, 12623, 12751], [12623, 12752, 12751], [12623, 12624, 12753], [12623, 12753, 12752], [12624, 12625, 12753], [12625, 12754, 12753], [12625, 12626, 12755], [12625, 12755, 12754], [12626, 12627, 12755], [12627, 12756, 12755], [12627, 12628, 12757], [12627, 12757, 12756], [12628, 12629, 12757], [12629, 12758, 12757], [12629, 12630, 12759], [12629, 12759, 12758], [12630, 12631, 12759], [12631, 12760, 12759], [12631, 12632, 12761], [12631, 12761, 12760], [12632, 12633, 12761], [12633, 12762, 12761], [12633, 12634, 12763], [12633, 12763, 12762], [12634, 12635, 12763], [12635, 12764, 12763], [12635, 12636, 12765], [12635, 12765, 12764], [12636, 12637, 12765], [12637, 12766, 12765], [12637, 12638, 12767], [12637, 12767, 12766], [12638, 12639, 12767], [12639, 12768, 12767], [12639, 12640, 12769], [12639, 12769, 12768], [12640, 12641, 12769], [12641, 12770, 12769], [12641, 12642, 12771], [12641, 12771, 12770], [12642, 12643, 12771], [12643, 12772, 12771], [12643, 12644, 12773], [12643, 12773, 12772], [12644, 12645, 12773], [12645, 12774, 12773], [12645, 12646, 12775], [12645, 12775, 12774], [12646, 12647, 12775], [12647, 12776, 12775], [12647, 12648, 12777], [12647, 12777, 12776], [12648, 12649, 12777], [12649, 12778, 12777], [12649, 12650, 12779], [12649, 12779, 12778], [12650, 12651, 12779], [12651, 12780, 12779], [12651, 12652, 12781], [12651, 12781, 12780], [12652, 12653, 12781], [12653, 12782, 12781], [12653, 12654, 12783], [12653, 12783, 12782], [12654, 12655, 12783], [12655, 12784, 12783], [12655, 12656, 12785], [12655, 12785, 12784], [12656, 12657, 12785], [12657, 12786, 12785], [12657, 12658, 12787], [12657, 12787, 12786], [12658, 12659, 12787], [12659, 12788, 12787], [12659, 12660, 12789], [12659, 12789, 12788], [12660, 12661, 12789], [12661, 12790, 12789], [12661, 12662, 12791], [12661, 12791, 12790], [12662, 12663, 12791], [12663, 12792, 12791], [12663, 12664, 12793], [12663, 12793, 12792], [12664, 12665, 12793], [12665, 12794, 12793], [12665, 12666, 12795], [12665, 12795, 12794], [12666, 12667, 12795], [12667, 12796, 12795], [12667, 12668, 12797], [12667, 12797, 12796], [12668, 12669, 12797], [12669, 12798, 12797], [12669, 12670, 12799], [12669, 12799, 12798], [12670, 12671, 12799], [12671, 12800, 12799], [12671, 12672, 12801], [12671, 12801, 12800], [12672, 12673, 12801], [12673, 12802, 12801], [12673, 12674, 12803], [12673, 12803, 12802], [12674, 12675, 12803], [12675, 12804, 12803], [12675, 12676, 12805], [12675, 12805, 12804], [12676, 12677, 12805], [12677, 12806, 12805], [12677, 12678, 12807], [12677, 12807, 12806], [12678, 12679, 12807], [12679, 12808, 12807], [12679, 12680, 12809], [12679, 12809, 12808], [12680, 12681, 12809], [12681, 12810, 12809], [12681, 12682, 12811], [12681, 12811, 12810], [12682, 12683, 12811], [12683, 12812, 12811], [12683, 12684, 12813], [12683, 12813, 12812], [12684, 12685, 12813], [12685, 12814, 12813], [12685, 12686, 12815], [12685, 12815, 12814], [12686, 12687, 12815], [12687, 12816, 12815], [12688, 12689, 12817], [12689, 12818, 12817], [12689, 12690, 12819], [12689, 12819, 12818], [12690, 12691, 12819], [12691, 12820, 12819], [12691, 12692, 12821], [12691, 12821, 12820], [12692, 12693, 12821], [12693, 12822, 12821], [12693, 12694, 12823], [12693, 12823, 12822], [12694, 12695, 12823], [12695, 12824, 12823], [12695, 12696, 12825], [12695, 12825, 12824], [12696, 12697, 12825], [12697, 12826, 12825], [12697, 12698, 12827], [12697, 12827, 12826], [12698, 12699, 12827], [12699, 12828, 12827], [12699, 12700, 12829], [12699, 12829, 12828], [12700, 12701, 12829], [12701, 12830, 12829], [12701, 12702, 12831], [12701, 12831, 12830], [12702, 12703, 12831], [12703, 12832, 12831], [12703, 12704, 12833], [12703, 12833, 12832], [12704, 12705, 12833], [12705, 12834, 12833], [12705, 12706, 12835], [12705, 12835, 12834], [12706, 12707, 12835], [12707, 12836, 12835], [12707, 12708, 12837], [12707, 12837, 12836], [12708, 12709, 12837], [12709, 12838, 12837], [12709, 12710, 12839], [12709, 12839, 12838], [12710, 12711, 12839], [12711, 12840, 12839], [12711, 12712, 12841], [12711, 12841, 12840], [12712, 12713, 12841], [12713, 12842, 12841], [12713, 12714, 12843], [12713, 12843, 12842], [12714, 12715, 12843], [12715, 12844, 12843], [12715, 12716, 12845], [12715, 12845, 12844], [12716, 12717, 12845], [12717, 12846, 12845], [12717, 12718, 12847], [12717, 12847, 12846], [12718, 12719, 12847], [12719, 12848, 12847], [12719, 12720, 12849], [12719, 12849, 12848], [12720, 12721, 12849], [12721, 12850, 12849], [12721, 12722, 12851], [12721, 12851, 12850], [12722, 12723, 12851], [12723, 12852, 12851], [12723, 12724, 12853], [12723, 12853, 12852], [12724, 12725, 12853], [12725, 12854, 12853], [12725, 12726, 12855], [12725, 12855, 12854], [12726, 12727, 12855], [12727, 12856, 12855], [12727, 12728, 12857], [12727, 12857, 12856], [12728, 12729, 12857], [12729, 12858, 12857], [12729, 12730, 12859], [12729, 12859, 12858], [12730, 12731, 12859], [12731, 12860, 12859], [12731, 12732, 12861], [12731, 12861, 12860], [12732, 12733, 12861], [12733, 12862, 12861], [12733, 12734, 12863], [12733, 12863, 12862], [12734, 12735, 12863], [12735, 12864, 12863], [12735, 12736, 12865], [12735, 12865, 12864], [12736, 12737, 12865], [12737, 12866, 12865], [12737, 12738, 12867], [12737, 12867, 12866], [12738, 12739, 12867], [12739, 12868, 12867], [12739, 12740, 12869], [12739, 12869, 12868], [12740, 12741, 12869], [12741, 12870, 12869], [12741, 12742, 12871], [12741, 12871, 12870], [12742, 12743, 12871], [12743, 12872, 12871], [12743, 12744, 12873], [12743, 12873, 12872], [12744, 12745, 12873], [12745, 12874, 12873], [12745, 12746, 12875], [12745, 12875, 12874], [12746, 12747, 12875], [12747, 12876, 12875], [12747, 12748, 12877], [12747, 12877, 12876], [12748, 12749, 12877], [12749, 12878, 12877], [12749, 12750, 12879], [12749, 12879, 12878], [12750, 12751, 12879], [12751, 12880, 12879], [12751, 12752, 12881], [12751, 12881, 12880], [12752, 12753, 12881], [12753, 12882, 12881], [12753, 12754, 12883], [12753, 12883, 12882], [12754, 12755, 12883], [12755, 12884, 12883], [12755, 12756, 12885], [12755, 12885, 12884], [12756, 12757, 12885], [12757, 12886, 12885], [12757, 12758, 12887], [12757, 12887, 12886], [12758, 12759, 12887], [12759, 12888, 12887], [12759, 12760, 12889], [12759, 12889, 12888], [12760, 12761, 12889], [12761, 12890, 12889], [12761, 12762, 12891], [12761, 12891, 12890], [12762, 12763, 12891], [12763, 12892, 12891], [12763, 12764, 12893], [12763, 12893, 12892], [12764, 12765, 12893], [12765, 12894, 12893], [12765, 12766, 12895], [12765, 12895, 12894], [12766, 12767, 12895], [12767, 12896, 12895], [12767, 12768, 12897], [12767, 12897, 12896], [12768, 12769, 12897], [12769, 12898, 12897], [12769, 12770, 12899], [12769, 12899, 12898], [12770, 12771, 12899], [12771, 12900, 12899], [12771, 12772, 12901], [12771, 12901, 12900], [12772, 12773, 12901], [12773, 12902, 12901], [12773, 12774, 12903], [12773, 12903, 12902], [12774, 12775, 12903], [12775, 12904, 12903], [12775, 12776, 12905], [12775, 12905, 12904], [12776, 12777, 12905], [12777, 12906, 12905], [12777, 12778, 12907], [12777, 12907, 12906], [12778, 12779, 12907], [12779, 12908, 12907], [12779, 12780, 12909], [12779, 12909, 12908], [12780, 12781, 12909], [12781, 12910, 12909], [12781, 12782, 12911], [12781, 12911, 12910], [12782, 12783, 12911], [12783, 12912, 12911], [12783, 12784, 12913], [12783, 12913, 12912], [12784, 12785, 12913], [12785, 12914, 12913], [12785, 12786, 12915], [12785, 12915, 12914], [12786, 12787, 12915], [12787, 12916, 12915], [12787, 12788, 12917], [12787, 12917, 12916], [12788, 12789, 12917], [12789, 12918, 12917], [12789, 12790, 12919], [12789, 12919, 12918], [12790, 12791, 12919], [12791, 12920, 12919], [12791, 12792, 12921], [12791, 12921, 12920], [12792, 12793, 12921], [12793, 12922, 12921], [12793, 12794, 12923], [12793, 12923, 12922], [12794, 12795, 12923], [12795, 12924, 12923], [12795, 12796, 12925], [12795, 12925, 12924], [12796, 12797, 12925], [12797, 12926, 12925], [12797, 12798, 12927], [12797, 12927, 12926], [12798, 12799, 12927], [12799, 12928, 12927], [12799, 12800, 12929], [12799, 12929, 12928], [12800, 12801, 12929], [12801, 12930, 12929], [12801, 12802, 12931], [12801, 12931, 12930], [12802, 12803, 12931], [12803, 12932, 12931], [12803, 12804, 12933], [12803, 12933, 12932], [12804, 12805, 12933], [12805, 12934, 12933], [12805, 12806, 12935], [12805, 12935, 12934], [12806, 12807, 12935], [12807, 12936, 12935], [12807, 12808, 12937], [12807, 12937, 12936], [12808, 12809, 12937], [12809, 12938, 12937], [12809, 12810, 12939], [12809, 12939, 12938], [12810, 12811, 12939], [12811, 12940, 12939], [12811, 12812, 12941], [12811, 12941, 12940], [12812, 12813, 12941], [12813, 12942, 12941], [12813, 12814, 12943], [12813, 12943, 12942], [12814, 12815, 12943], [12815, 12944, 12943], [12815, 12816, 12945], [12815, 12945, 12944], [12817, 12818, 12947], [12817, 12947, 12946], [12818, 12819, 12947], [12819, 12948, 12947], [12819, 12820, 12949], [12819, 12949, 12948], [12820, 12821, 12949], [12821, 12950, 12949], [12821, 12822, 12951], [12821, 12951, 12950], [12822, 12823, 12951], [12823, 12952, 12951], [12823, 12824, 12953], [12823, 12953, 12952], [12824, 12825, 12953], [12825, 12954, 12953], [12825, 12826, 12955], [12825, 12955, 12954], [12826, 12827, 12955], [12827, 12956, 12955], [12827, 12828, 12957], [12827, 12957, 12956], [12828, 12829, 12957], [12829, 12958, 12957], [12829, 12830, 12959], [12829, 12959, 12958], [12830, 12831, 12959], [12831, 12960, 12959], [12831, 12832, 12961], [12831, 12961, 12960], [12832, 12833, 12961], [12833, 12962, 12961], [12833, 12834, 12963], [12833, 12963, 12962], [12834, 12835, 12963], [12835, 12964, 12963], [12835, 12836, 12965], [12835, 12965, 12964], [12836, 12837, 12965], [12837, 12966, 12965], [12837, 12838, 12967], [12837, 12967, 12966], [12838, 12839, 12967], [12839, 12968, 12967], [12839, 12840, 12969], [12839, 12969, 12968], [12840, 12841, 12969], [12841, 12970, 12969], [12841, 12842, 12971], [12841, 12971, 12970], [12842, 12843, 12971], [12843, 12972, 12971], [12843, 12844, 12973], [12843, 12973, 12972], [12844, 12845, 12973], [12845, 12974, 12973], [12845, 12846, 12975], [12845, 12975, 12974], [12846, 12847, 12975], [12847, 12976, 12975], [12847, 12848, 12977], [12847, 12977, 12976], [12848, 12849, 12977], [12849, 12978, 12977], [12849, 12850, 12979], [12849, 12979, 12978], [12850, 12851, 12979], [12851, 12980, 12979], [12851, 12852, 12981], [12851, 12981, 12980], [12852, 12853, 12981], [12853, 12982, 12981], [12853, 12854, 12983], [12853, 12983, 12982], [12854, 12855, 12983], [12855, 12984, 12983], [12855, 12856, 12985], [12855, 12985, 12984], [12856, 12857, 12985], [12857, 12986, 12985], [12857, 12858, 12987], [12857, 12987, 12986], [12858, 12859, 12987], [12859, 12988, 12987], [12859, 12860, 12989], [12859, 12989, 12988], [12860, 12861, 12989], [12861, 12990, 12989], [12861, 12862, 12991], [12861, 12991, 12990], [12862, 12863, 12991], [12863, 12992, 12991], [12863, 12864, 12993], [12863, 12993, 12992], [12864, 12865, 12993], [12865, 12994, 12993], [12865, 12866, 12995], [12865, 12995, 12994], [12866, 12867, 12995], [12867, 12996, 12995], [12867, 12868, 12997], [12867, 12997, 12996], [12868, 12869, 12997], [12869, 12998, 12997], [12869, 12870, 12999], [12869, 12999, 12998], [12870, 12871, 12999], [12871, 13000, 12999], [12871, 12872, 13001], [12871, 13001, 13000], [12872, 12873, 13001], [12873, 13002, 13001], [12873, 12874, 13003], [12873, 13003, 13002], [12874, 12875, 13003], [12875, 13004, 13003], [12875, 12876, 13005], [12875, 13005, 13004], [12876, 12877, 13005], [12877, 13006, 13005], [12877, 12878, 13007], [12877, 13007, 13006], [12878, 12879, 13007], [12879, 13008, 13007], [12879, 12880, 13009], [12879, 13009, 13008], [12880, 12881, 13009], [12881, 13010, 13009], [12881, 12882, 13011], [12881, 13011, 13010], [12882, 12883, 13011], [12883, 13012, 13011], [12883, 12884, 13013], [12883, 13013, 13012], [12884, 12885, 13013], [12885, 13014, 13013], [12885, 12886, 13015], [12885, 13015, 13014], [12886, 12887, 13015], [12887, 13016, 13015], [12887, 12888, 13017], [12887, 13017, 13016], [12888, 12889, 13017], [12889, 13018, 13017], [12889, 12890, 13019], [12889, 13019, 13018], [12890, 12891, 13019], [12891, 13020, 13019], [12891, 12892, 13021], [12891, 13021, 13020], [12892, 12893, 13021], [12893, 13022, 13021], [12893, 12894, 13023], [12893, 13023, 13022], [12894, 12895, 13023], [12895, 13024, 13023], [12895, 12896, 13025], [12895, 13025, 13024], [12896, 12897, 13025], [12897, 13026, 13025], [12897, 12898, 13027], [12897, 13027, 13026], [12898, 12899, 13027], [12899, 13028, 13027], [12899, 12900, 13029], [12899, 13029, 13028], [12900, 12901, 13029], [12901, 13030, 13029], [12901, 12902, 13031], [12901, 13031, 13030], [12902, 12903, 13031], [12903, 13032, 13031], [12903, 12904, 13033], [12903, 13033, 13032], [12904, 12905, 13033], [12905, 13034, 13033], [12905, 12906, 13035], [12905, 13035, 13034], [12906, 12907, 13035], [12907, 13036, 13035], [12907, 12908, 13037], [12907, 13037, 13036], [12908, 12909, 13037], [12909, 13038, 13037], [12909, 12910, 13039], [12909, 13039, 13038], [12910, 12911, 13039], [12911, 13040, 13039], [12911, 12912, 13041], [12911, 13041, 13040], [12912, 12913, 13041], [12913, 13042, 13041], [12913, 12914, 13043], [12913, 13043, 13042], [12914, 12915, 13043], [12915, 13044, 13043], [12915, 12916, 13045], [12915, 13045, 13044], [12916, 12917, 13045], [12917, 13046, 13045], [12917, 12918, 13047], [12917, 13047, 13046], [12918, 12919, 13047], [12919, 13048, 13047], [12919, 12920, 13049], [12919, 13049, 13048], [12920, 12921, 13049], [12921, 13050, 13049], [12921, 12922, 13051], [12921, 13051, 13050], [12922, 12923, 13051], [12923, 13052, 13051], [12923, 12924, 13053], [12923, 13053, 13052], [12924, 12925, 13053], [12925, 13054, 13053], [12925, 12926, 13055], [12925, 13055, 13054], [12926, 12927, 13055], [12927, 13056, 13055], [12927, 12928, 13057], [12927, 13057, 13056], [12928, 12929, 13057], [12929, 13058, 13057], [12929, 12930, 13059], [12929, 13059, 13058], [12930, 12931, 13059], [12931, 13060, 13059], [12931, 12932, 13061], [12931, 13061, 13060], [12932, 12933, 13061], [12933, 13062, 13061], [12933, 12934, 13063], [12933, 13063, 13062], [12934, 12935, 13063], [12935, 13064, 13063], [12935, 12936, 13065], [12935, 13065, 13064], [12936, 12937, 13065], [12937, 13066, 13065], [12937, 12938, 13067], [12937, 13067, 13066], [12938, 12939, 13067], [12939, 13068, 13067], [12939, 12940, 13069], [12939, 13069, 13068], [12940, 12941, 13069], [12941, 13070, 13069], [12941, 12942, 13071], [12941, 13071, 13070], [12942, 12943, 13071], [12943, 13072, 13071], [12943, 12944, 13073], [12943, 13073, 13072], [12944, 12945, 13073], [12945, 13074, 13073], [12946, 12947, 13075], [12947, 13076, 13075], [12947, 12948, 13077], [12947, 13077, 13076], [12948, 12949, 13077], [12949, 13078, 13077], [12949, 12950, 13079], [12949, 13079, 13078], [12950, 12951, 13079], [12951, 13080, 13079], [12951, 12952, 13081], [12951, 13081, 13080], [12952, 12953, 13081], [12953, 13082, 13081], [12953, 12954, 13083], [12953, 13083, 13082], [12954, 12955, 13083], [12955, 13084, 13083], [12955, 12956, 13085], [12955, 13085, 13084], [12956, 12957, 13085], [12957, 13086, 13085], [12957, 12958, 13087], [12957, 13087, 13086], [12958, 12959, 13087], [12959, 13088, 13087], [12959, 12960, 13089], [12959, 13089, 13088], [12960, 12961, 13089], [12961, 13090, 13089], [12961, 12962, 13091], [12961, 13091, 13090], [12962, 12963, 13091], [12963, 13092, 13091], [12963, 12964, 13093], [12963, 13093, 13092], [12964, 12965, 13093], [12965, 13094, 13093], [12965, 12966, 13095], [12965, 13095, 13094], [12966, 12967, 13095], [12967, 13096, 13095], [12967, 12968, 13097], [12967, 13097, 13096], [12968, 12969, 13097], [12969, 13098, 13097], [12969, 12970, 13099], [12969, 13099, 13098], [12970, 12971, 13099], [12971, 13100, 13099], [12971, 12972, 13101], [12971, 13101, 13100], [12972, 12973, 13101], [12973, 13102, 13101], [12973, 12974, 13103], [12973, 13103, 13102], [12974, 12975, 13103], [12975, 13104, 13103], [12975, 12976, 13105], [12975, 13105, 13104], [12976, 12977, 13105], [12977, 13106, 13105], [12977, 12978, 13107], [12977, 13107, 13106], [12978, 12979, 13107], [12979, 13108, 13107], [12979, 12980, 13109], [12979, 13109, 13108], [12980, 12981, 13109], [12981, 13110, 13109], [12981, 12982, 13111], [12981, 13111, 13110], [12982, 12983, 13111], [12983, 13112, 13111], [12983, 12984, 13113], [12983, 13113, 13112], [12984, 12985, 13113], [12985, 13114, 13113], [12985, 12986, 13115], [12985, 13115, 13114], [12986, 12987, 13115], [12987, 13116, 13115], [12987, 12988, 13117], [12987, 13117, 13116], [12988, 12989, 13117], [12989, 13118, 13117], [12989, 12990, 13119], [12989, 13119, 13118], [12990, 12991, 13119], [12991, 13120, 13119], [12991, 12992, 13121], [12991, 13121, 13120], [12992, 12993, 13121], [12993, 13122, 13121], [12993, 12994, 13123], [12993, 13123, 13122], [12994, 12995, 13123], [12995, 13124, 13123], [12995, 12996, 13125], [12995, 13125, 13124], [12996, 12997, 13125], [12997, 13126, 13125], [12997, 12998, 13127], [12997, 13127, 13126], [12998, 12999, 13127], [12999, 13128, 13127], [12999, 13000, 13129], [12999, 13129, 13128], [13000, 13001, 13129], [13001, 13130, 13129], [13001, 13002, 13131], [13001, 13131, 13130], [13002, 13003, 13131], [13003, 13132, 13131], [13003, 13004, 13133], [13003, 13133, 13132], [13004, 13005, 13133], [13005, 13134, 13133], [13005, 13006, 13135], [13005, 13135, 13134], [13006, 13007, 13135], [13007, 13136, 13135], [13007, 13008, 13137], [13007, 13137, 13136], [13008, 13009, 13137], [13009, 13138, 13137], [13009, 13010, 13139], [13009, 13139, 13138], [13010, 13011, 13139], [13011, 13140, 13139], [13011, 13012, 13141], [13011, 13141, 13140], [13012, 13013, 13141], [13013, 13142, 13141], [13013, 13014, 13143], [13013, 13143, 13142], [13014, 13015, 13143], [13015, 13144, 13143], [13015, 13016, 13145], [13015, 13145, 13144], [13016, 13017, 13145], [13017, 13146, 13145], [13017, 13018, 13147], [13017, 13147, 13146], [13018, 13019, 13147], [13019, 13148, 13147], [13019, 13020, 13149], [13019, 13149, 13148], [13020, 13021, 13149], [13021, 13150, 13149], [13021, 13022, 13151], [13021, 13151, 13150], [13022, 13023, 13151], [13023, 13152, 13151], [13023, 13024, 13153], [13023, 13153, 13152], [13024, 13025, 13153], [13025, 13154, 13153], [13025, 13026, 13155], [13025, 13155, 13154], [13026, 13027, 13155], [13027, 13156, 13155], [13027, 13028, 13157], [13027, 13157, 13156], [13028, 13029, 13157], [13029, 13158, 13157], [13029, 13030, 13159], [13029, 13159, 13158], [13030, 13031, 13159], [13031, 13160, 13159], [13031, 13032, 13161], [13031, 13161, 13160], [13032, 13033, 13161], [13033, 13162, 13161], [13033, 13034, 13163], [13033, 13163, 13162], [13034, 13035, 13163], [13035, 13164, 13163], [13035, 13036, 13165], [13035, 13165, 13164], [13036, 13037, 13165], [13037, 13166, 13165], [13037, 13038, 13167], [13037, 13167, 13166], [13038, 13039, 13167], [13039, 13168, 13167], [13039, 13040, 13169], [13039, 13169, 13168], [13040, 13041, 13169], [13041, 13170, 13169], [13041, 13042, 13171], [13041, 13171, 13170], [13042, 13043, 13171], [13043, 13172, 13171], [13043, 13044, 13173], [13043, 13173, 13172], [13044, 13045, 13173], [13045, 13174, 13173], [13045, 13046, 13175], [13045, 13175, 13174], [13046, 13047, 13175], [13047, 13176, 13175], [13047, 13048, 13177], [13047, 13177, 13176], [13048, 13049, 13177], [13049, 13178, 13177], [13049, 13050, 13179], [13049, 13179, 13178], [13050, 13051, 13179], [13051, 13180, 13179], [13051, 13052, 13181], [13051, 13181, 13180], [13052, 13053, 13181], [13053, 13182, 13181], [13053, 13054, 13183], [13053, 13183, 13182], [13054, 13055, 13183], [13055, 13184, 13183], [13055, 13056, 13185], [13055, 13185, 13184], [13056, 13057, 13185], [13057, 13186, 13185], [13057, 13058, 13187], [13057, 13187, 13186], [13058, 13059, 13187], [13059, 13188, 13187], [13059, 13060, 13189], [13059, 13189, 13188], [13060, 13061, 13189], [13061, 13190, 13189], [13061, 13062, 13191], [13061, 13191, 13190], [13062, 13063, 13191], [13063, 13192, 13191], [13063, 13064, 13193], [13063, 13193, 13192], [13064, 13065, 13193], [13065, 13194, 13193], [13065, 13066, 13195], [13065, 13195, 13194], [13066, 13067, 13195], [13067, 13196, 13195], [13067, 13068, 13197], [13067, 13197, 13196], [13068, 13069, 13197], [13069, 13198, 13197], [13069, 13070, 13199], [13069, 13199, 13198], [13070, 13071, 13199], [13071, 13200, 13199], [13071, 13072, 13201], [13071, 13201, 13200], [13072, 13073, 13201], [13073, 13202, 13201], [13073, 13074, 13203], [13073, 13203, 13202], [13075, 13076, 13205], [13075, 13205, 13204], [13076, 13077, 13205], [13077, 13206, 13205], [13077, 13078, 13207], [13077, 13207, 13206], [13078, 13079, 13207], [13079, 13208, 13207], [13079, 13080, 13209], [13079, 13209, 13208], [13080, 13081, 13209], [13081, 13210, 13209], [13081, 13082, 13211], [13081, 13211, 13210], [13082, 13083, 13211], [13083, 13212, 13211], [13083, 13084, 13213], [13083, 13213, 13212], [13084, 13085, 13213], [13085, 13214, 13213], [13085, 13086, 13215], [13085, 13215, 13214], [13086, 13087, 13215], [13087, 13216, 13215], [13087, 13088, 13217], [13087, 13217, 13216], [13088, 13089, 13217], [13089, 13218, 13217], [13089, 13090, 13219], [13089, 13219, 13218], [13090, 13091, 13219], [13091, 13220, 13219], [13091, 13092, 13221], [13091, 13221, 13220], [13092, 13093, 13221], [13093, 13222, 13221], [13093, 13094, 13223], [13093, 13223, 13222], [13094, 13095, 13223], [13095, 13224, 13223], [13095, 13096, 13225], [13095, 13225, 13224], [13096, 13097, 13225], [13097, 13226, 13225], [13097, 13098, 13227], [13097, 13227, 13226], [13098, 13099, 13227], [13099, 13228, 13227], [13099, 13100, 13229], [13099, 13229, 13228], [13100, 13101, 13229], [13101, 13230, 13229], [13101, 13102, 13231], [13101, 13231, 13230], [13102, 13103, 13231], [13103, 13232, 13231], [13103, 13104, 13233], [13103, 13233, 13232], [13104, 13105, 13233], [13105, 13234, 13233], [13105, 13106, 13235], [13105, 13235, 13234], [13106, 13107, 13235], [13107, 13236, 13235], [13107, 13108, 13237], [13107, 13237, 13236], [13108, 13109, 13237], [13109, 13238, 13237], [13109, 13110, 13239], [13109, 13239, 13238], [13110, 13111, 13239], [13111, 13240, 13239], [13111, 13112, 13241], [13111, 13241, 13240], [13112, 13113, 13241], [13113, 13242, 13241], [13113, 13114, 13243], [13113, 13243, 13242], [13114, 13115, 13243], [13115, 13244, 13243], [13115, 13116, 13245], [13115, 13245, 13244], [13116, 13117, 13245], [13117, 13246, 13245], [13117, 13118, 13247], [13117, 13247, 13246], [13118, 13119, 13247], [13119, 13248, 13247], [13119, 13120, 13249], [13119, 13249, 13248], [13120, 13121, 13249], [13121, 13250, 13249], [13121, 13122, 13251], [13121, 13251, 13250], [13122, 13123, 13251], [13123, 13252, 13251], [13123, 13124, 13253], [13123, 13253, 13252], [13124, 13125, 13253], [13125, 13254, 13253], [13125, 13126, 13255], [13125, 13255, 13254], [13126, 13127, 13255], [13127, 13256, 13255], [13127, 13128, 13257], [13127, 13257, 13256], [13128, 13129, 13257], [13129, 13258, 13257], [13129, 13130, 13259], [13129, 13259, 13258], [13130, 13131, 13259], [13131, 13260, 13259], [13131, 13132, 13261], [13131, 13261, 13260], [13132, 13133, 13261], [13133, 13262, 13261], [13133, 13134, 13263], [13133, 13263, 13262], [13134, 13135, 13263], [13135, 13264, 13263], [13135, 13136, 13265], [13135, 13265, 13264], [13136, 13137, 13265], [13137, 13266, 13265], [13137, 13138, 13267], [13137, 13267, 13266], [13138, 13139, 13267], [13139, 13268, 13267], [13139, 13140, 13269], [13139, 13269, 13268], [13140, 13141, 13269], [13141, 13270, 13269], [13141, 13142, 13271], [13141, 13271, 13270], [13142, 13143, 13271], [13143, 13272, 13271], [13143, 13144, 13273], [13143, 13273, 13272], [13144, 13145, 13273], [13145, 13274, 13273], [13145, 13146, 13275], [13145, 13275, 13274], [13146, 13147, 13275], [13147, 13276, 13275], [13147, 13148, 13277], [13147, 13277, 13276], [13148, 13149, 13277], [13149, 13278, 13277], [13149, 13150, 13279], [13149, 13279, 13278], [13150, 13151, 13279], [13151, 13280, 13279], [13151, 13152, 13281], [13151, 13281, 13280], [13152, 13153, 13281], [13153, 13282, 13281], [13153, 13154, 13283], [13153, 13283, 13282], [13154, 13155, 13283], [13155, 13284, 13283], [13155, 13156, 13285], [13155, 13285, 13284], [13156, 13157, 13285], [13157, 13286, 13285], [13157, 13158, 13287], [13157, 13287, 13286], [13158, 13159, 13287], [13159, 13288, 13287], [13159, 13160, 13289], [13159, 13289, 13288], [13160, 13161, 13289], [13161, 13290, 13289], [13161, 13162, 13291], [13161, 13291, 13290], [13162, 13163, 13291], [13163, 13292, 13291], [13163, 13164, 13293], [13163, 13293, 13292], [13164, 13165, 13293], [13165, 13294, 13293], [13165, 13166, 13295], [13165, 13295, 13294], [13166, 13167, 13295], [13167, 13296, 13295], [13167, 13168, 13297], [13167, 13297, 13296], [13168, 13169, 13297], [13169, 13298, 13297], [13169, 13170, 13299], [13169, 13299, 13298], [13170, 13171, 13299], [13171, 13300, 13299], [13171, 13172, 13301], [13171, 13301, 13300], [13172, 13173, 13301], [13173, 13302, 13301], [13173, 13174, 13303], [13173, 13303, 13302], [13174, 13175, 13303], [13175, 13304, 13303], [13175, 13176, 13305], [13175, 13305, 13304], [13176, 13177, 13305], [13177, 13306, 13305], [13177, 13178, 13307], [13177, 13307, 13306], [13178, 13179, 13307], [13179, 13308, 13307], [13179, 13180, 13309], [13179, 13309, 13308], [13180, 13181, 13309], [13181, 13310, 13309], [13181, 13182, 13311], [13181, 13311, 13310], [13182, 13183, 13311], [13183, 13312, 13311], [13183, 13184, 13313], [13183, 13313, 13312], [13184, 13185, 13313], [13185, 13314, 13313], [13185, 13186, 13315], [13185, 13315, 13314], [13186, 13187, 13315], [13187, 13316, 13315], [13187, 13188, 13317], [13187, 13317, 13316], [13188, 13189, 13317], [13189, 13318, 13317], [13189, 13190, 13319], [13189, 13319, 13318], [13190, 13191, 13319], [13191, 13320, 13319], [13191, 13192, 13321], [13191, 13321, 13320], [13192, 13193, 13321], [13193, 13322, 13321], [13193, 13194, 13323], [13193, 13323, 13322], [13194, 13195, 13323], [13195, 13324, 13323], [13195, 13196, 13325], [13195, 13325, 13324], [13196, 13197, 13325], [13197, 13326, 13325], [13197, 13198, 13327], [13197, 13327, 13326], [13198, 13199, 13327], [13199, 13328, 13327], [13199, 13200, 13329], [13199, 13329, 13328], [13200, 13201, 13329], [13201, 13330, 13329], [13201, 13202, 13331], [13201, 13331, 13330], [13202, 13203, 13331], [13203, 13332, 13331], [13204, 13205, 13333], [13205, 13334, 13333], [13205, 13206, 13335], [13205, 13335, 13334], [13206, 13207, 13335], [13207, 13336, 13335], [13207, 13208, 13337], [13207, 13337, 13336], [13208, 13209, 13337], [13209, 13338, 13337], [13209, 13210, 13339], [13209, 13339, 13338], [13210, 13211, 13339], [13211, 13340, 13339], [13211, 13212, 13341], [13211, 13341, 13340], [13212, 13213, 13341], [13213, 13342, 13341], [13213, 13214, 13343], [13213, 13343, 13342], [13214, 13215, 13343], [13215, 13344, 13343], [13215, 13216, 13345], [13215, 13345, 13344], [13216, 13217, 13345], [13217, 13346, 13345], [13217, 13218, 13347], [13217, 13347, 13346], [13218, 13219, 13347], [13219, 13348, 13347], [13219, 13220, 13349], [13219, 13349, 13348], [13220, 13221, 13349], [13221, 13350, 13349], [13221, 13222, 13351], [13221, 13351, 13350], [13222, 13223, 13351], [13223, 13352, 13351], [13223, 13224, 13353], [13223, 13353, 13352], [13224, 13225, 13353], [13225, 13354, 13353], [13225, 13226, 13355], [13225, 13355, 13354], [13226, 13227, 13355], [13227, 13356, 13355], [13227, 13228, 13357], [13227, 13357, 13356], [13228, 13229, 13357], [13229, 13358, 13357], [13229, 13230, 13359], [13229, 13359, 13358], [13230, 13231, 13359], [13231, 13360, 13359], [13231, 13232, 13361], [13231, 13361, 13360], [13232, 13233, 13361], [13233, 13362, 13361], [13233, 13234, 13363], [13233, 13363, 13362], [13234, 13235, 13363], [13235, 13364, 13363], [13235, 13236, 13365], [13235, 13365, 13364], [13236, 13237, 13365], [13237, 13366, 13365], [13237, 13238, 13367], [13237, 13367, 13366], [13238, 13239, 13367], [13239, 13368, 13367], [13239, 13240, 13369], [13239, 13369, 13368], [13240, 13241, 13369], [13241, 13370, 13369], [13241, 13242, 13371], [13241, 13371, 13370], [13242, 13243, 13371], [13243, 13372, 13371], [13243, 13244, 13373], [13243, 13373, 13372], [13244, 13245, 13373], [13245, 13374, 13373], [13245, 13246, 13375], [13245, 13375, 13374], [13246, 13247, 13375], [13247, 13376, 13375], [13247, 13248, 13377], [13247, 13377, 13376], [13248, 13249, 13377], [13249, 13378, 13377], [13249, 13250, 13379], [13249, 13379, 13378], [13250, 13251, 13379], [13251, 13380, 13379], [13251, 13252, 13381], [13251, 13381, 13380], [13252, 13253, 13381], [13253, 13382, 13381], [13253, 13254, 13383], [13253, 13383, 13382], [13254, 13255, 13383], [13255, 13384, 13383], [13255, 13256, 13385], [13255, 13385, 13384], [13256, 13257, 13385], [13257, 13386, 13385], [13257, 13258, 13387], [13257, 13387, 13386], [13258, 13259, 13387], [13259, 13388, 13387], [13259, 13260, 13389], [13259, 13389, 13388], [13260, 13261, 13389], [13261, 13390, 13389], [13261, 13262, 13391], [13261, 13391, 13390], [13262, 13263, 13391], [13263, 13392, 13391], [13263, 13264, 13393], [13263, 13393, 13392], [13264, 13265, 13393], [13265, 13394, 13393], [13265, 13266, 13395], [13265, 13395, 13394], [13266, 13267, 13395], [13267, 13396, 13395], [13267, 13268, 13397], [13267, 13397, 13396], [13268, 13269, 13397], [13269, 13398, 13397], [13269, 13270, 13399], [13269, 13399, 13398], [13270, 13271, 13399], [13271, 13400, 13399], [13271, 13272, 13401], [13271, 13401, 13400], [13272, 13273, 13401], [13273, 13402, 13401], [13273, 13274, 13403], [13273, 13403, 13402], [13274, 13275, 13403], [13275, 13404, 13403], [13275, 13276, 13405], [13275, 13405, 13404], [13276, 13277, 13405], [13277, 13406, 13405], [13277, 13278, 13407], [13277, 13407, 13406], [13278, 13279, 13407], [13279, 13408, 13407], [13279, 13280, 13409], [13279, 13409, 13408], [13280, 13281, 13409], [13281, 13410, 13409], [13281, 13282, 13411], [13281, 13411, 13410], [13282, 13283, 13411], [13283, 13412, 13411], [13283, 13284, 13413], [13283, 13413, 13412], [13284, 13285, 13413], [13285, 13414, 13413], [13285, 13286, 13415], [13285, 13415, 13414], [13286, 13287, 13415], [13287, 13416, 13415], [13287, 13288, 13417], [13287, 13417, 13416], [13288, 13289, 13417], [13289, 13418, 13417], [13289, 13290, 13419], [13289, 13419, 13418], [13290, 13291, 13419], [13291, 13420, 13419], [13291, 13292, 13421], [13291, 13421, 13420], [13292, 13293, 13421], [13293, 13422, 13421], [13293, 13294, 13423], [13293, 13423, 13422], [13294, 13295, 13423], [13295, 13424, 13423], [13295, 13296, 13425], [13295, 13425, 13424], [13296, 13297, 13425], [13297, 13426, 13425], [13297, 13298, 13427], [13297, 13427, 13426], [13298, 13299, 13427], [13299, 13428, 13427], [13299, 13300, 13429], [13299, 13429, 13428], [13300, 13301, 13429], [13301, 13430, 13429], [13301, 13302, 13431], [13301, 13431, 13430], [13302, 13303, 13431], [13303, 13432, 13431], [13303, 13304, 13433], [13303, 13433, 13432], [13304, 13305, 13433], [13305, 13434, 13433], [13305, 13306, 13435], [13305, 13435, 13434], [13306, 13307, 13435], [13307, 13436, 13435], [13307, 13308, 13437], [13307, 13437, 13436], [13308, 13309, 13437], [13309, 13438, 13437], [13309, 13310, 13439], [13309, 13439, 13438], [13310, 13311, 13439], [13311, 13440, 13439], [13311, 13312, 13441], [13311, 13441, 13440], [13312, 13313, 13441], [13313, 13442, 13441], [13313, 13314, 13443], [13313, 13443, 13442], [13314, 13315, 13443], [13315, 13444, 13443], [13315, 13316, 13445], [13315, 13445, 13444], [13316, 13317, 13445], [13317, 13446, 13445], [13317, 13318, 13447], [13317, 13447, 13446], [13318, 13319, 13447], [13319, 13448, 13447], [13319, 13320, 13449], [13319, 13449, 13448], [13320, 13321, 13449], [13321, 13450, 13449], [13321, 13322, 13451], [13321, 13451, 13450], [13322, 13323, 13451], [13323, 13452, 13451], [13323, 13324, 13453], [13323, 13453, 13452], [13324, 13325, 13453], [13325, 13454, 13453], [13325, 13326, 13455], [13325, 13455, 13454], [13326, 13327, 13455], [13327, 13456, 13455], [13327, 13328, 13457], [13327, 13457, 13456], [13328, 13329, 13457], [13329, 13458, 13457], [13329, 13330, 13459], [13329, 13459, 13458], [13330, 13331, 13459], [13331, 13460, 13459], [13331, 13332, 13461], [13331, 13461, 13460], [13333, 13334, 13463], [13333, 13463, 13462], [13334, 13335, 13463], [13335, 13464, 13463], [13335, 13336, 13465], [13335, 13465, 13464], [13336, 13337, 13465], [13337, 13466, 13465], [13337, 13338, 13467], [13337, 13467, 13466], [13338, 13339, 13467], [13339, 13468, 13467], [13339, 13340, 13469], [13339, 13469, 13468], [13340, 13341, 13469], [13341, 13470, 13469], [13341, 13342, 13471], [13341, 13471, 13470], [13342, 13343, 13471], [13343, 13472, 13471], [13343, 13344, 13473], [13343, 13473, 13472], [13344, 13345, 13473], [13345, 13474, 13473], [13345, 13346, 13475], [13345, 13475, 13474], [13346, 13347, 13475], [13347, 13476, 13475], [13347, 13348, 13477], [13347, 13477, 13476], [13348, 13349, 13477], [13349, 13478, 13477], [13349, 13350, 13479], [13349, 13479, 13478], [13350, 13351, 13479], [13351, 13480, 13479], [13351, 13352, 13481], [13351, 13481, 13480], [13352, 13353, 13481], [13353, 13482, 13481], [13353, 13354, 13483], [13353, 13483, 13482], [13354, 13355, 13483], [13355, 13484, 13483], [13355, 13356, 13485], [13355, 13485, 13484], [13356, 13357, 13485], [13357, 13486, 13485], [13357, 13358, 13487], [13357, 13487, 13486], [13358, 13359, 13487], [13359, 13488, 13487], [13359, 13360, 13489], [13359, 13489, 13488], [13360, 13361, 13489], [13361, 13490, 13489], [13361, 13362, 13491], [13361, 13491, 13490], [13362, 13363, 13491], [13363, 13492, 13491], [13363, 13364, 13493], [13363, 13493, 13492], [13364, 13365, 13493], [13365, 13494, 13493], [13365, 13366, 13495], [13365, 13495, 13494], [13366, 13367, 13495], [13367, 13496, 13495], [13367, 13368, 13497], [13367, 13497, 13496], [13368, 13369, 13497], [13369, 13498, 13497], [13369, 13370, 13499], [13369, 13499, 13498], [13370, 13371, 13499], [13371, 13500, 13499], [13371, 13372, 13501], [13371, 13501, 13500], [13372, 13373, 13501], [13373, 13502, 13501], [13373, 13374, 13503], [13373, 13503, 13502], [13374, 13375, 13503], [13375, 13504, 13503], [13375, 13376, 13505], [13375, 13505, 13504], [13376, 13377, 13505], [13377, 13506, 13505], [13377, 13378, 13507], [13377, 13507, 13506], [13378, 13379, 13507], [13379, 13508, 13507], [13379, 13380, 13509], [13379, 13509, 13508], [13380, 13381, 13509], [13381, 13510, 13509], [13381, 13382, 13511], [13381, 13511, 13510], [13382, 13383, 13511], [13383, 13512, 13511], [13383, 13384, 13513], [13383, 13513, 13512], [13384, 13385, 13513], [13385, 13514, 13513], [13385, 13386, 13515], [13385, 13515, 13514], [13386, 13387, 13515], [13387, 13516, 13515], [13387, 13388, 13517], [13387, 13517, 13516], [13388, 13389, 13517], [13389, 13518, 13517], [13389, 13390, 13519], [13389, 13519, 13518], [13390, 13391, 13519], [13391, 13520, 13519], [13391, 13392, 13521], [13391, 13521, 13520], [13392, 13393, 13521], [13393, 13522, 13521], [13393, 13394, 13523], [13393, 13523, 13522], [13394, 13395, 13523], [13395, 13524, 13523], [13395, 13396, 13525], [13395, 13525, 13524], [13396, 13397, 13525], [13397, 13526, 13525], [13397, 13398, 13527], [13397, 13527, 13526], [13398, 13399, 13527], [13399, 13528, 13527], [13399, 13400, 13529], [13399, 13529, 13528], [13400, 13401, 13529], [13401, 13530, 13529], [13401, 13402, 13531], [13401, 13531, 13530], [13402, 13403, 13531], [13403, 13532, 13531], [13403, 13404, 13533], [13403, 13533, 13532], [13404, 13405, 13533], [13405, 13534, 13533], [13405, 13406, 13535], [13405, 13535, 13534], [13406, 13407, 13535], [13407, 13536, 13535], [13407, 13408, 13537], [13407, 13537, 13536], [13408, 13409, 13537], [13409, 13538, 13537], [13409, 13410, 13539], [13409, 13539, 13538], [13410, 13411, 13539], [13411, 13540, 13539], [13411, 13412, 13541], [13411, 13541, 13540], [13412, 13413, 13541], [13413, 13542, 13541], [13413, 13414, 13543], [13413, 13543, 13542], [13414, 13415, 13543], [13415, 13544, 13543], [13415, 13416, 13545], [13415, 13545, 13544], [13416, 13417, 13545], [13417, 13546, 13545], [13417, 13418, 13547], [13417, 13547, 13546], [13418, 13419, 13547], [13419, 13548, 13547], [13419, 13420, 13549], [13419, 13549, 13548], [13420, 13421, 13549], [13421, 13550, 13549], [13421, 13422, 13551], [13421, 13551, 13550], [13422, 13423, 13551], [13423, 13552, 13551], [13423, 13424, 13553], [13423, 13553, 13552], [13424, 13425, 13553], [13425, 13554, 13553], [13425, 13426, 13555], [13425, 13555, 13554], [13426, 13427, 13555], [13427, 13556, 13555], [13427, 13428, 13557], [13427, 13557, 13556], [13428, 13429, 13557], [13429, 13558, 13557], [13429, 13430, 13559], [13429, 13559, 13558], [13430, 13431, 13559], [13431, 13560, 13559], [13431, 13432, 13561], [13431, 13561, 13560], [13432, 13433, 13561], [13433, 13562, 13561], [13433, 13434, 13563], [13433, 13563, 13562], [13434, 13435, 13563], [13435, 13564, 13563], [13435, 13436, 13565], [13435, 13565, 13564], [13436, 13437, 13565], [13437, 13566, 13565], [13437, 13438, 13567], [13437, 13567, 13566], [13438, 13439, 13567], [13439, 13568, 13567], [13439, 13440, 13569], [13439, 13569, 13568], [13440, 13441, 13569], [13441, 13570, 13569], [13441, 13442, 13571], [13441, 13571, 13570], [13442, 13443, 13571], [13443, 13572, 13571], [13443, 13444, 13573], [13443, 13573, 13572], [13444, 13445, 13573], [13445, 13574, 13573], [13445, 13446, 13575], [13445, 13575, 13574], [13446, 13447, 13575], [13447, 13576, 13575], [13447, 13448, 13577], [13447, 13577, 13576], [13448, 13449, 13577], [13449, 13578, 13577], [13449, 13450, 13579], [13449, 13579, 13578], [13450, 13451, 13579], [13451, 13580, 13579], [13451, 13452, 13581], [13451, 13581, 13580], [13452, 13453, 13581], [13453, 13582, 13581], [13453, 13454, 13583], [13453, 13583, 13582], [13454, 13455, 13583], [13455, 13584, 13583], [13455, 13456, 13585], [13455, 13585, 13584], [13456, 13457, 13585], [13457, 13586, 13585], [13457, 13458, 13587], [13457, 13587, 13586], [13458, 13459, 13587], [13459, 13588, 13587], [13459, 13460, 13589], [13459, 13589, 13588], [13460, 13461, 13589], [13461, 13590, 13589], [13462, 13463, 13591], [13463, 13592, 13591], [13463, 13464, 13593], [13463, 13593, 13592], [13464, 13465, 13593], [13465, 13594, 13593], [13465, 13466, 13595], [13465, 13595, 13594], [13466, 13467, 13595], [13467, 13596, 13595], [13467, 13468, 13597], [13467, 13597, 13596], [13468, 13469, 13597], [13469, 13598, 13597], [13469, 13470, 13599], [13469, 13599, 13598], [13470, 13471, 13599], [13471, 13600, 13599], [13471, 13472, 13601], [13471, 13601, 13600], [13472, 13473, 13601], [13473, 13602, 13601], [13473, 13474, 13603], [13473, 13603, 13602], [13474, 13475, 13603], [13475, 13604, 13603], [13475, 13476, 13605], [13475, 13605, 13604], [13476, 13477, 13605], [13477, 13606, 13605], [13477, 13478, 13607], [13477, 13607, 13606], [13478, 13479, 13607], [13479, 13608, 13607], [13479, 13480, 13609], [13479, 13609, 13608], [13480, 13481, 13609], [13481, 13610, 13609], [13481, 13482, 13611], [13481, 13611, 13610], [13482, 13483, 13611], [13483, 13612, 13611], [13483, 13484, 13613], [13483, 13613, 13612], [13484, 13485, 13613], [13485, 13614, 13613], [13485, 13486, 13615], [13485, 13615, 13614], [13486, 13487, 13615], [13487, 13616, 13615], [13487, 13488, 13617], [13487, 13617, 13616], [13488, 13489, 13617], [13489, 13618, 13617], [13489, 13490, 13619], [13489, 13619, 13618], [13490, 13491, 13619], [13491, 13620, 13619], [13491, 13492, 13621], [13491, 13621, 13620], [13492, 13493, 13621], [13493, 13622, 13621], [13493, 13494, 13623], [13493, 13623, 13622], [13494, 13495, 13623], [13495, 13624, 13623], [13495, 13496, 13625], [13495, 13625, 13624], [13496, 13497, 13625], [13497, 13626, 13625], [13497, 13498, 13627], [13497, 13627, 13626], [13498, 13499, 13627], [13499, 13628, 13627], [13499, 13500, 13629], [13499, 13629, 13628], [13500, 13501, 13629], [13501, 13630, 13629], [13501, 13502, 13631], [13501, 13631, 13630], [13502, 13503, 13631], [13503, 13632, 13631], [13503, 13504, 13633], [13503, 13633, 13632], [13504, 13505, 13633], [13505, 13634, 13633], [13505, 13506, 13635], [13505, 13635, 13634], [13506, 13507, 13635], [13507, 13636, 13635], [13507, 13508, 13637], [13507, 13637, 13636], [13508, 13509, 13637], [13509, 13638, 13637], [13509, 13510, 13639], [13509, 13639, 13638], [13510, 13511, 13639], [13511, 13640, 13639], [13511, 13512, 13641], [13511, 13641, 13640], [13512, 13513, 13641], [13513, 13642, 13641], [13513, 13514, 13643], [13513, 13643, 13642], [13514, 13515, 13643], [13515, 13644, 13643], [13515, 13516, 13645], [13515, 13645, 13644], [13516, 13517, 13645], [13517, 13646, 13645], [13517, 13518, 13647], [13517, 13647, 13646], [13518, 13519, 13647], [13519, 13648, 13647], [13519, 13520, 13649], [13519, 13649, 13648], [13520, 13521, 13649], [13521, 13650, 13649], [13521, 13522, 13651], [13521, 13651, 13650], [13522, 13523, 13651], [13523, 13652, 13651], [13523, 13524, 13653], [13523, 13653, 13652], [13524, 13525, 13653], [13525, 13654, 13653], [13525, 13526, 13655], [13525, 13655, 13654], [13526, 13527, 13655], [13527, 13656, 13655], [13527, 13528, 13657], [13527, 13657, 13656], [13528, 13529, 13657], [13529, 13658, 13657], [13529, 13530, 13659], [13529, 13659, 13658], [13530, 13531, 13659], [13531, 13660, 13659], [13531, 13532, 13661], [13531, 13661, 13660], [13532, 13533, 13661], [13533, 13662, 13661], [13533, 13534, 13663], [13533, 13663, 13662], [13534, 13535, 13663], [13535, 13664, 13663], [13535, 13536, 13665], [13535, 13665, 13664], [13536, 13537, 13665], [13537, 13666, 13665], [13537, 13538, 13667], [13537, 13667, 13666], [13538, 13539, 13667], [13539, 13668, 13667], [13539, 13540, 13669], [13539, 13669, 13668], [13540, 13541, 13669], [13541, 13670, 13669], [13541, 13542, 13671], [13541, 13671, 13670], [13542, 13543, 13671], [13543, 13672, 13671], [13543, 13544, 13673], [13543, 13673, 13672], [13544, 13545, 13673], [13545, 13674, 13673], [13545, 13546, 13675], [13545, 13675, 13674], [13546, 13547, 13675], [13547, 13676, 13675], [13547, 13548, 13677], [13547, 13677, 13676], [13548, 13549, 13677], [13549, 13678, 13677], [13549, 13550, 13679], [13549, 13679, 13678], [13550, 13551, 13679], [13551, 13680, 13679], [13551, 13552, 13681], [13551, 13681, 13680], [13552, 13553, 13681], [13553, 13682, 13681], [13553, 13554, 13683], [13553, 13683, 13682], [13554, 13555, 13683], [13555, 13684, 13683], [13555, 13556, 13685], [13555, 13685, 13684], [13556, 13557, 13685], [13557, 13686, 13685], [13557, 13558, 13687], [13557, 13687, 13686], [13558, 13559, 13687], [13559, 13688, 13687], [13559, 13560, 13689], [13559, 13689, 13688], [13560, 13561, 13689], [13561, 13690, 13689], [13561, 13562, 13691], [13561, 13691, 13690], [13562, 13563, 13691], [13563, 13692, 13691], [13563, 13564, 13693], [13563, 13693, 13692], [13564, 13565, 13693], [13565, 13694, 13693], [13565, 13566, 13695], [13565, 13695, 13694], [13566, 13567, 13695], [13567, 13696, 13695], [13567, 13568, 13697], [13567, 13697, 13696], [13568, 13569, 13697], [13569, 13698, 13697], [13569, 13570, 13699], [13569, 13699, 13698], [13570, 13571, 13699], [13571, 13700, 13699], [13571, 13572, 13701], [13571, 13701, 13700], [13572, 13573, 13701], [13573, 13702, 13701], [13573, 13574, 13703], [13573, 13703, 13702], [13574, 13575, 13703], [13575, 13704, 13703], [13575, 13576, 13705], [13575, 13705, 13704], [13576, 13577, 13705], [13577, 13706, 13705], [13577, 13578, 13707], [13577, 13707, 13706], [13578, 13579, 13707], [13579, 13708, 13707], [13579, 13580, 13709], [13579, 13709, 13708], [13580, 13581, 13709], [13581, 13710, 13709], [13581, 13582, 13711], [13581, 13711, 13710], [13582, 13583, 13711], [13583, 13712, 13711], [13583, 13584, 13713], [13583, 13713, 13712], [13584, 13585, 13713], [13585, 13714, 13713], [13585, 13586, 13715], [13585, 13715, 13714], [13586, 13587, 13715], [13587, 13716, 13715], [13587, 13588, 13717], [13587, 13717, 13716], [13588, 13589, 13717], [13589, 13718, 13717], [13589, 13590, 13719], [13589, 13719, 13718], [13591, 13592, 13721], [13591, 13721, 13720], [13592, 13593, 13721], [13593, 13722, 13721], [13593, 13594, 13723], [13593, 13723, 13722], [13594, 13595, 13723], [13595, 13724, 13723], [13595, 13596, 13725], [13595, 13725, 13724], [13596, 13597, 13725], [13597, 13726, 13725], [13597, 13598, 13727], [13597, 13727, 13726], [13598, 13599, 13727], [13599, 13728, 13727], [13599, 13600, 13729], [13599, 13729, 13728], [13600, 13601, 13729], [13601, 13730, 13729], [13601, 13602, 13731], [13601, 13731, 13730], [13602, 13603, 13731], [13603, 13732, 13731], [13603, 13604, 13733], [13603, 13733, 13732], [13604, 13605, 13733], [13605, 13734, 13733], [13605, 13606, 13735], [13605, 13735, 13734], [13606, 13607, 13735], [13607, 13736, 13735], [13607, 13608, 13737], [13607, 13737, 13736], [13608, 13609, 13737], [13609, 13738, 13737], [13609, 13610, 13739], [13609, 13739, 13738], [13610, 13611, 13739], [13611, 13740, 13739], [13611, 13612, 13741], [13611, 13741, 13740], [13612, 13613, 13741], [13613, 13742, 13741], [13613, 13614, 13743], [13613, 13743, 13742], [13614, 13615, 13743], [13615, 13744, 13743], [13615, 13616, 13745], [13615, 13745, 13744], [13616, 13617, 13745], [13617, 13746, 13745], [13617, 13618, 13747], [13617, 13747, 13746], [13618, 13619, 13747], [13619, 13748, 13747], [13619, 13620, 13749], [13619, 13749, 13748], [13620, 13621, 13749], [13621, 13750, 13749], [13621, 13622, 13751], [13621, 13751, 13750], [13622, 13623, 13751], [13623, 13752, 13751], [13623, 13624, 13753], [13623, 13753, 13752], [13624, 13625, 13753], [13625, 13754, 13753], [13625, 13626, 13755], [13625, 13755, 13754], [13626, 13627, 13755], [13627, 13756, 13755], [13627, 13628, 13757], [13627, 13757, 13756], [13628, 13629, 13757], [13629, 13758, 13757], [13629, 13630, 13759], [13629, 13759, 13758], [13630, 13631, 13759], [13631, 13760, 13759], [13631, 13632, 13761], [13631, 13761, 13760], [13632, 13633, 13761], [13633, 13762, 13761], [13633, 13634, 13763], [13633, 13763, 13762], [13634, 13635, 13763], [13635, 13764, 13763], [13635, 13636, 13765], [13635, 13765, 13764], [13636, 13637, 13765], [13637, 13766, 13765], [13637, 13638, 13767], [13637, 13767, 13766], [13638, 13639, 13767], [13639, 13768, 13767], [13639, 13640, 13769], [13639, 13769, 13768], [13640, 13641, 13769], [13641, 13770, 13769], [13641, 13642, 13771], [13641, 13771, 13770], [13642, 13643, 13771], [13643, 13772, 13771], [13643, 13644, 13773], [13643, 13773, 13772], [13644, 13645, 13773], [13645, 13774, 13773], [13645, 13646, 13775], [13645, 13775, 13774], [13646, 13647, 13775], [13647, 13776, 13775], [13647, 13648, 13777], [13647, 13777, 13776], [13648, 13649, 13777], [13649, 13778, 13777], [13649, 13650, 13779], [13649, 13779, 13778], [13650, 13651, 13779], [13651, 13780, 13779], [13651, 13652, 13781], [13651, 13781, 13780], [13652, 13653, 13781], [13653, 13782, 13781], [13653, 13654, 13783], [13653, 13783, 13782], [13654, 13655, 13783], [13655, 13784, 13783], [13655, 13656, 13785], [13655, 13785, 13784], [13656, 13657, 13785], [13657, 13786, 13785], [13657, 13658, 13787], [13657, 13787, 13786], [13658, 13659, 13787], [13659, 13788, 13787], [13659, 13660, 13789], [13659, 13789, 13788], [13660, 13661, 13789], [13661, 13790, 13789], [13661, 13662, 13791], [13661, 13791, 13790], [13662, 13663, 13791], [13663, 13792, 13791], [13663, 13664, 13793], [13663, 13793, 13792], [13664, 13665, 13793], [13665, 13794, 13793], [13665, 13666, 13795], [13665, 13795, 13794], [13666, 13667, 13795], [13667, 13796, 13795], [13667, 13668, 13797], [13667, 13797, 13796], [13668, 13669, 13797], [13669, 13798, 13797], [13669, 13670, 13799], [13669, 13799, 13798], [13670, 13671, 13799], [13671, 13800, 13799], [13671, 13672, 13801], [13671, 13801, 13800], [13672, 13673, 13801], [13673, 13802, 13801], [13673, 13674, 13803], [13673, 13803, 13802], [13674, 13675, 13803], [13675, 13804, 13803], [13675, 13676, 13805], [13675, 13805, 13804], [13676, 13677, 13805], [13677, 13806, 13805], [13677, 13678, 13807], [13677, 13807, 13806], [13678, 13679, 13807], [13679, 13808, 13807], [13679, 13680, 13809], [13679, 13809, 13808], [13680, 13681, 13809], [13681, 13810, 13809], [13681, 13682, 13811], [13681, 13811, 13810], [13682, 13683, 13811], [13683, 13812, 13811], [13683, 13684, 13813], [13683, 13813, 13812], [13684, 13685, 13813], [13685, 13814, 13813], [13685, 13686, 13815], [13685, 13815, 13814], [13686, 13687, 13815], [13687, 13816, 13815], [13687, 13688, 13817], [13687, 13817, 13816], [13688, 13689, 13817], [13689, 13818, 13817], [13689, 13690, 13819], [13689, 13819, 13818], [13690, 13691, 13819], [13691, 13820, 13819], [13691, 13692, 13821], [13691, 13821, 13820], [13692, 13693, 13821], [13693, 13822, 13821], [13693, 13694, 13823], [13693, 13823, 13822], [13694, 13695, 13823], [13695, 13824, 13823], [13695, 13696, 13825], [13695, 13825, 13824], [13696, 13697, 13825], [13697, 13826, 13825], [13697, 13698, 13827], [13697, 13827, 13826], [13698, 13699, 13827], [13699, 13828, 13827], [13699, 13700, 13829], [13699, 13829, 13828], [13700, 13701, 13829], [13701, 13830, 13829], [13701, 13702, 13831], [13701, 13831, 13830], [13702, 13703, 13831], [13703, 13832, 13831], [13703, 13704, 13833], [13703, 13833, 13832], [13704, 13705, 13833], [13705, 13834, 13833], [13705, 13706, 13835], [13705, 13835, 13834], [13706, 13707, 13835], [13707, 13836, 13835], [13707, 13708, 13837], [13707, 13837, 13836], [13708, 13709, 13837], [13709, 13838, 13837], [13709, 13710, 13839], [13709, 13839, 13838], [13710, 13711, 13839], [13711, 13840, 13839], [13711, 13712, 13841], [13711, 13841, 13840], [13712, 13713, 13841], [13713, 13842, 13841], [13713, 13714, 13843], [13713, 13843, 13842], [13714, 13715, 13843], [13715, 13844, 13843], [13715, 13716, 13845], [13715, 13845, 13844], [13716, 13717, 13845], [13717, 13846, 13845], [13717, 13718, 13847], [13717, 13847, 13846], [13718, 13719, 13847], [13719, 13848, 13847], [13720, 13721, 13849], [13721, 13850, 13849], [13721, 13722, 13851], [13721, 13851, 13850], [13722, 13723, 13851], [13723, 13852, 13851], [13723, 13724, 13853], [13723, 13853, 13852], [13724, 13725, 13853], [13725, 13854, 13853], [13725, 13726, 13855], [13725, 13855, 13854], [13726, 13727, 13855], [13727, 13856, 13855], [13727, 13728, 13857], [13727, 13857, 13856], [13728, 13729, 13857], [13729, 13858, 13857], [13729, 13730, 13859], [13729, 13859, 13858], [13730, 13731, 13859], [13731, 13860, 13859], [13731, 13732, 13861], [13731, 13861, 13860], [13732, 13733, 13861], [13733, 13862, 13861], [13733, 13734, 13863], [13733, 13863, 13862], [13734, 13735, 13863], [13735, 13864, 13863], [13735, 13736, 13865], [13735, 13865, 13864], [13736, 13737, 13865], [13737, 13866, 13865], [13737, 13738, 13867], [13737, 13867, 13866], [13738, 13739, 13867], [13739, 13868, 13867], [13739, 13740, 13869], [13739, 13869, 13868], [13740, 13741, 13869], [13741, 13870, 13869], [13741, 13742, 13871], [13741, 13871, 13870], [13742, 13743, 13871], [13743, 13872, 13871], [13743, 13744, 13873], [13743, 13873, 13872], [13744, 13745, 13873], [13745, 13874, 13873], [13745, 13746, 13875], [13745, 13875, 13874], [13746, 13747, 13875], [13747, 13876, 13875], [13747, 13748, 13877], [13747, 13877, 13876], [13748, 13749, 13877], [13749, 13878, 13877], [13749, 13750, 13879], [13749, 13879, 13878], [13750, 13751, 13879], [13751, 13880, 13879], [13751, 13752, 13881], [13751, 13881, 13880], [13752, 13753, 13881], [13753, 13882, 13881], [13753, 13754, 13883], [13753, 13883, 13882], [13754, 13755, 13883], [13755, 13884, 13883], [13755, 13756, 13885], [13755, 13885, 13884], [13756, 13757, 13885], [13757, 13886, 13885], [13757, 13758, 13887], [13757, 13887, 13886], [13758, 13759, 13887], [13759, 13888, 13887], [13759, 13760, 13889], [13759, 13889, 13888], [13760, 13761, 13889], [13761, 13890, 13889], [13761, 13762, 13891], [13761, 13891, 13890], [13762, 13763, 13891], [13763, 13892, 13891], [13763, 13764, 13893], [13763, 13893, 13892], [13764, 13765, 13893], [13765, 13894, 13893], [13765, 13766, 13895], [13765, 13895, 13894], [13766, 13767, 13895], [13767, 13896, 13895], [13767, 13768, 13897], [13767, 13897, 13896], [13768, 13769, 13897], [13769, 13898, 13897], [13769, 13770, 13899], [13769, 13899, 13898], [13770, 13771, 13899], [13771, 13900, 13899], [13771, 13772, 13901], [13771, 13901, 13900], [13772, 13773, 13901], [13773, 13902, 13901], [13773, 13774, 13903], [13773, 13903, 13902], [13774, 13775, 13903], [13775, 13904, 13903], [13775, 13776, 13905], [13775, 13905, 13904], [13776, 13777, 13905], [13777, 13906, 13905], [13777, 13778, 13907], [13777, 13907, 13906], [13778, 13779, 13907], [13779, 13908, 13907], [13779, 13780, 13909], [13779, 13909, 13908], [13780, 13781, 13909], [13781, 13910, 13909], [13781, 13782, 13911], [13781, 13911, 13910], [13782, 13783, 13911], [13783, 13912, 13911], [13783, 13784, 13913], [13783, 13913, 13912], [13784, 13785, 13913], [13785, 13914, 13913], [13785, 13786, 13915], [13785, 13915, 13914], [13786, 13787, 13915], [13787, 13916, 13915], [13787, 13788, 13917], [13787, 13917, 13916], [13788, 13789, 13917], [13789, 13918, 13917], [13789, 13790, 13919], [13789, 13919, 13918], [13790, 13791, 13919], [13791, 13920, 13919], [13791, 13792, 13921], [13791, 13921, 13920], [13792, 13793, 13921], [13793, 13922, 13921], [13793, 13794, 13923], [13793, 13923, 13922], [13794, 13795, 13923], [13795, 13924, 13923], [13795, 13796, 13925], [13795, 13925, 13924], [13796, 13797, 13925], [13797, 13926, 13925], [13797, 13798, 13927], [13797, 13927, 13926], [13798, 13799, 13927], [13799, 13928, 13927], [13799, 13800, 13929], [13799, 13929, 13928], [13800, 13801, 13929], [13801, 13930, 13929], [13801, 13802, 13931], [13801, 13931, 13930], [13802, 13803, 13931], [13803, 13932, 13931], [13803, 13804, 13933], [13803, 13933, 13932], [13804, 13805, 13933], [13805, 13934, 13933], [13805, 13806, 13935], [13805, 13935, 13934], [13806, 13807, 13935], [13807, 13936, 13935], [13807, 13808, 13937], [13807, 13937, 13936], [13808, 13809, 13937], [13809, 13938, 13937], [13809, 13810, 13939], [13809, 13939, 13938], [13810, 13811, 13939], [13811, 13940, 13939], [13811, 13812, 13941], [13811, 13941, 13940], [13812, 13813, 13941], [13813, 13942, 13941], [13813, 13814, 13943], [13813, 13943, 13942], [13814, 13815, 13943], [13815, 13944, 13943], [13815, 13816, 13945], [13815, 13945, 13944], [13816, 13817, 13945], [13817, 13946, 13945], [13817, 13818, 13947], [13817, 13947, 13946], [13818, 13819, 13947], [13819, 13948, 13947], [13819, 13820, 13949], [13819, 13949, 13948], [13820, 13821, 13949], [13821, 13950, 13949], [13821, 13822, 13951], [13821, 13951, 13950], [13822, 13823, 13951], [13823, 13952, 13951], [13823, 13824, 13953], [13823, 13953, 13952], [13824, 13825, 13953], [13825, 13954, 13953], [13825, 13826, 13955], [13825, 13955, 13954], [13826, 13827, 13955], [13827, 13956, 13955], [13827, 13828, 13957], [13827, 13957, 13956], [13828, 13829, 13957], [13829, 13958, 13957], [13829, 13830, 13959], [13829, 13959, 13958], [13830, 13831, 13959], [13831, 13960, 13959], [13831, 13832, 13961], [13831, 13961, 13960], [13832, 13833, 13961], [13833, 13962, 13961], [13833, 13834, 13963], [13833, 13963, 13962], [13834, 13835, 13963], [13835, 13964, 13963], [13835, 13836, 13965], [13835, 13965, 13964], [13836, 13837, 13965], [13837, 13966, 13965], [13837, 13838, 13967], [13837, 13967, 13966], [13838, 13839, 13967], [13839, 13968, 13967], [13839, 13840, 13969], [13839, 13969, 13968], [13840, 13841, 13969], [13841, 13970, 13969], [13841, 13842, 13971], [13841, 13971, 13970], [13842, 13843, 13971], [13843, 13972, 13971], [13843, 13844, 13973], [13843, 13973, 13972], [13844, 13845, 13973], [13845, 13974, 13973], [13845, 13846, 13975], [13845, 13975, 13974], [13846, 13847, 13975], [13847, 13976, 13975], [13847, 13848, 13977], [13847, 13977, 13976], [13849, 13850, 13979], [13849, 13979, 13978], [13850, 13851, 13979], [13851, 13980, 13979], [13851, 13852, 13981], [13851, 13981, 13980], [13852, 13853, 13981], [13853, 13982, 13981], [13853, 13854, 13983], [13853, 13983, 13982], [13854, 13855, 13983], [13855, 13984, 13983], [13855, 13856, 13985], [13855, 13985, 13984], [13856, 13857, 13985], [13857, 13986, 13985], [13857, 13858, 13987], [13857, 13987, 13986], [13858, 13859, 13987], [13859, 13988, 13987], [13859, 13860, 13989], [13859, 13989, 13988], [13860, 13861, 13989], [13861, 13990, 13989], [13861, 13862, 13991], [13861, 13991, 13990], [13862, 13863, 13991], [13863, 13992, 13991], [13863, 13864, 13993], [13863, 13993, 13992], [13864, 13865, 13993], [13865, 13994, 13993], [13865, 13866, 13995], [13865, 13995, 13994], [13866, 13867, 13995], [13867, 13996, 13995], [13867, 13868, 13997], [13867, 13997, 13996], [13868, 13869, 13997], [13869, 13998, 13997], [13869, 13870, 13999], [13869, 13999, 13998], [13870, 13871, 13999], [13871, 14000, 13999], [13871, 13872, 14001], [13871, 14001, 14000], [13872, 13873, 14001], [13873, 14002, 14001], [13873, 13874, 14003], [13873, 14003, 14002], [13874, 13875, 14003], [13875, 14004, 14003], [13875, 13876, 14005], [13875, 14005, 14004], [13876, 13877, 14005], [13877, 14006, 14005], [13877, 13878, 14007], [13877, 14007, 14006], [13878, 13879, 14007], [13879, 14008, 14007], [13879, 13880, 14009], [13879, 14009, 14008], [13880, 13881, 14009], [13881, 14010, 14009], [13881, 13882, 14011], [13881, 14011, 14010], [13882, 13883, 14011], [13883, 14012, 14011], [13883, 13884, 14013], [13883, 14013, 14012], [13884, 13885, 14013], [13885, 14014, 14013], [13885, 13886, 14015], [13885, 14015, 14014], [13886, 13887, 14015], [13887, 14016, 14015], [13887, 13888, 14017], [13887, 14017, 14016], [13888, 13889, 14017], [13889, 14018, 14017], [13889, 13890, 14019], [13889, 14019, 14018], [13890, 13891, 14019], [13891, 14020, 14019], [13891, 13892, 14021], [13891, 14021, 14020], [13892, 13893, 14021], [13893, 14022, 14021], [13893, 13894, 14023], [13893, 14023, 14022], [13894, 13895, 14023], [13895, 14024, 14023], [13895, 13896, 14025], [13895, 14025, 14024], [13896, 13897, 14025], [13897, 14026, 14025], [13897, 13898, 14027], [13897, 14027, 14026], [13898, 13899, 14027], [13899, 14028, 14027], [13899, 13900, 14029], [13899, 14029, 14028], [13900, 13901, 14029], [13901, 14030, 14029], [13901, 13902, 14031], [13901, 14031, 14030], [13902, 13903, 14031], [13903, 14032, 14031], [13903, 13904, 14033], [13903, 14033, 14032], [13904, 13905, 14033], [13905, 14034, 14033], [13905, 13906, 14035], [13905, 14035, 14034], [13906, 13907, 14035], [13907, 14036, 14035], [13907, 13908, 14037], [13907, 14037, 14036], [13908, 13909, 14037], [13909, 14038, 14037], [13909, 13910, 14039], [13909, 14039, 14038], [13910, 13911, 14039], [13911, 14040, 14039], [13911, 13912, 14041], [13911, 14041, 14040], [13912, 13913, 14041], [13913, 14042, 14041], [13913, 13914, 14043], [13913, 14043, 14042], [13914, 13915, 14043], [13915, 14044, 14043], [13915, 13916, 14045], [13915, 14045, 14044], [13916, 13917, 14045], [13917, 14046, 14045], [13917, 13918, 14047], [13917, 14047, 14046], [13918, 13919, 14047], [13919, 14048, 14047], [13919, 13920, 14049], [13919, 14049, 14048], [13920, 13921, 14049], [13921, 14050, 14049], [13921, 13922, 14051], [13921, 14051, 14050], [13922, 13923, 14051], [13923, 14052, 14051], [13923, 13924, 14053], [13923, 14053, 14052], [13924, 13925, 14053], [13925, 14054, 14053], [13925, 13926, 14055], [13925, 14055, 14054], [13926, 13927, 14055], [13927, 14056, 14055], [13927, 13928, 14057], [13927, 14057, 14056], [13928, 13929, 14057], [13929, 14058, 14057], [13929, 13930, 14059], [13929, 14059, 14058], [13930, 13931, 14059], [13931, 14060, 14059], [13931, 13932, 14061], [13931, 14061, 14060], [13932, 13933, 14061], [13933, 14062, 14061], [13933, 13934, 14063], [13933, 14063, 14062], [13934, 13935, 14063], [13935, 14064, 14063], [13935, 13936, 14065], [13935, 14065, 14064], [13936, 13937, 14065], [13937, 14066, 14065], [13937, 13938, 14067], [13937, 14067, 14066], [13938, 13939, 14067], [13939, 14068, 14067], [13939, 13940, 14069], [13939, 14069, 14068], [13940, 13941, 14069], [13941, 14070, 14069], [13941, 13942, 14071], [13941, 14071, 14070], [13942, 13943, 14071], [13943, 14072, 14071], [13943, 13944, 14073], [13943, 14073, 14072], [13944, 13945, 14073], [13945, 14074, 14073], [13945, 13946, 14075], [13945, 14075, 14074], [13946, 13947, 14075], [13947, 14076, 14075], [13947, 13948, 14077], [13947, 14077, 14076], [13948, 13949, 14077], [13949, 14078, 14077], [13949, 13950, 14079], [13949, 14079, 14078], [13950, 13951, 14079], [13951, 14080, 14079], [13951, 13952, 14081], [13951, 14081, 14080], [13952, 13953, 14081], [13953, 14082, 14081], [13953, 13954, 14083], [13953, 14083, 14082], [13954, 13955, 14083], [13955, 14084, 14083], [13955, 13956, 14085], [13955, 14085, 14084], [13956, 13957, 14085], [13957, 14086, 14085], [13957, 13958, 14087], [13957, 14087, 14086], [13958, 13959, 14087], [13959, 14088, 14087], [13959, 13960, 14089], [13959, 14089, 14088], [13960, 13961, 14089], [13961, 14090, 14089], [13961, 13962, 14091], [13961, 14091, 14090], [13962, 13963, 14091], [13963, 14092, 14091], [13963, 13964, 14093], [13963, 14093, 14092], [13964, 13965, 14093], [13965, 14094, 14093], [13965, 13966, 14095], [13965, 14095, 14094], [13966, 13967, 14095], [13967, 14096, 14095], [13967, 13968, 14097], [13967, 14097, 14096], [13968, 13969, 14097], [13969, 14098, 14097], [13969, 13970, 14099], [13969, 14099, 14098], [13970, 13971, 14099], [13971, 14100, 14099], [13971, 13972, 14101], [13971, 14101, 14100], [13972, 13973, 14101], [13973, 14102, 14101], [13973, 13974, 14103], [13973, 14103, 14102], [13974, 13975, 14103], [13975, 14104, 14103], [13975, 13976, 14105], [13975, 14105, 14104], [13976, 13977, 14105], [13977, 14106, 14105], [13978, 13979, 14107], [13979, 14108, 14107], [13979, 13980, 14109], [13979, 14109, 14108], [13980, 13981, 14109], [13981, 14110, 14109], [13981, 13982, 14111], [13981, 14111, 14110], [13982, 13983, 14111], [13983, 14112, 14111], [13983, 13984, 14113], [13983, 14113, 14112], [13984, 13985, 14113], [13985, 14114, 14113], [13985, 13986, 14115], [13985, 14115, 14114], [13986, 13987, 14115], [13987, 14116, 14115], [13987, 13988, 14117], [13987, 14117, 14116], [13988, 13989, 14117], [13989, 14118, 14117], [13989, 13990, 14119], [13989, 14119, 14118], [13990, 13991, 14119], [13991, 14120, 14119], [13991, 13992, 14121], [13991, 14121, 14120], [13992, 13993, 14121], [13993, 14122, 14121], [13993, 13994, 14123], [13993, 14123, 14122], [13994, 13995, 14123], [13995, 14124, 14123], [13995, 13996, 14125], [13995, 14125, 14124], [13996, 13997, 14125], [13997, 14126, 14125], [13997, 13998, 14127], [13997, 14127, 14126], [13998, 13999, 14127], [13999, 14128, 14127], [13999, 14000, 14129], [13999, 14129, 14128], [14000, 14001, 14129], [14001, 14130, 14129], [14001, 14002, 14131], [14001, 14131, 14130], [14002, 14003, 14131], [14003, 14132, 14131], [14003, 14004, 14133], [14003, 14133, 14132], [14004, 14005, 14133], [14005, 14134, 14133], [14005, 14006, 14135], [14005, 14135, 14134], [14006, 14007, 14135], [14007, 14136, 14135], [14007, 14008, 14137], [14007, 14137, 14136], [14008, 14009, 14137], [14009, 14138, 14137], [14009, 14010, 14139], [14009, 14139, 14138], [14010, 14011, 14139], [14011, 14140, 14139], [14011, 14012, 14141], [14011, 14141, 14140], [14012, 14013, 14141], [14013, 14142, 14141], [14013, 14014, 14143], [14013, 14143, 14142], [14014, 14015, 14143], [14015, 14144, 14143], [14015, 14016, 14145], [14015, 14145, 14144], [14016, 14017, 14145], [14017, 14146, 14145], [14017, 14018, 14147], [14017, 14147, 14146], [14018, 14019, 14147], [14019, 14148, 14147], [14019, 14020, 14149], [14019, 14149, 14148], [14020, 14021, 14149], [14021, 14150, 14149], [14021, 14022, 14151], [14021, 14151, 14150], [14022, 14023, 14151], [14023, 14152, 14151], [14023, 14024, 14153], [14023, 14153, 14152], [14024, 14025, 14153], [14025, 14154, 14153], [14025, 14026, 14155], [14025, 14155, 14154], [14026, 14027, 14155], [14027, 14156, 14155], [14027, 14028, 14157], [14027, 14157, 14156], [14028, 14029, 14157], [14029, 14158, 14157], [14029, 14030, 14159], [14029, 14159, 14158], [14030, 14031, 14159], [14031, 14160, 14159], [14031, 14032, 14161], [14031, 14161, 14160], [14032, 14033, 14161], [14033, 14162, 14161], [14033, 14034, 14163], [14033, 14163, 14162], [14034, 14035, 14163], [14035, 14164, 14163], [14035, 14036, 14165], [14035, 14165, 14164], [14036, 14037, 14165], [14037, 14166, 14165], [14037, 14038, 14167], [14037, 14167, 14166], [14038, 14039, 14167], [14039, 14168, 14167], [14039, 14040, 14169], [14039, 14169, 14168], [14040, 14041, 14169], [14041, 14170, 14169], [14041, 14042, 14171], [14041, 14171, 14170], [14042, 14043, 14171], [14043, 14172, 14171], [14043, 14044, 14173], [14043, 14173, 14172], [14044, 14045, 14173], [14045, 14174, 14173], [14045, 14046, 14175], [14045, 14175, 14174], [14046, 14047, 14175], [14047, 14176, 14175], [14047, 14048, 14177], [14047, 14177, 14176], [14048, 14049, 14177], [14049, 14178, 14177], [14049, 14050, 14179], [14049, 14179, 14178], [14050, 14051, 14179], [14051, 14180, 14179], [14051, 14052, 14181], [14051, 14181, 14180], [14052, 14053, 14181], [14053, 14182, 14181], [14053, 14054, 14183], [14053, 14183, 14182], [14054, 14055, 14183], [14055, 14184, 14183], [14055, 14056, 14185], [14055, 14185, 14184], [14056, 14057, 14185], [14057, 14186, 14185], [14057, 14058, 14187], [14057, 14187, 14186], [14058, 14059, 14187], [14059, 14188, 14187], [14059, 14060, 14189], [14059, 14189, 14188], [14060, 14061, 14189], [14061, 14190, 14189], [14061, 14062, 14191], [14061, 14191, 14190], [14062, 14063, 14191], [14063, 14192, 14191], [14063, 14064, 14193], [14063, 14193, 14192], [14064, 14065, 14193], [14065, 14194, 14193], [14065, 14066, 14195], [14065, 14195, 14194], [14066, 14067, 14195], [14067, 14196, 14195], [14067, 14068, 14197], [14067, 14197, 14196], [14068, 14069, 14197], [14069, 14198, 14197], [14069, 14070, 14199], [14069, 14199, 14198], [14070, 14071, 14199], [14071, 14200, 14199], [14071, 14072, 14201], [14071, 14201, 14200], [14072, 14073, 14201], [14073, 14202, 14201], [14073, 14074, 14203], [14073, 14203, 14202], [14074, 14075, 14203], [14075, 14204, 14203], [14075, 14076, 14205], [14075, 14205, 14204], [14076, 14077, 14205], [14077, 14206, 14205], [14077, 14078, 14207], [14077, 14207, 14206], [14078, 14079, 14207], [14079, 14208, 14207], [14079, 14080, 14209], [14079, 14209, 14208], [14080, 14081, 14209], [14081, 14210, 14209], [14081, 14082, 14211], [14081, 14211, 14210], [14082, 14083, 14211], [14083, 14212, 14211], [14083, 14084, 14213], [14083, 14213, 14212], [14084, 14085, 14213], [14085, 14214, 14213], [14085, 14086, 14215], [14085, 14215, 14214], [14086, 14087, 14215], [14087, 14216, 14215], [14087, 14088, 14217], [14087, 14217, 14216], [14088, 14089, 14217], [14089, 14218, 14217], [14089, 14090, 14219], [14089, 14219, 14218], [14090, 14091, 14219], [14091, 14220, 14219], [14091, 14092, 14221], [14091, 14221, 14220], [14092, 14093, 14221], [14093, 14222, 14221], [14093, 14094, 14223], [14093, 14223, 14222], [14094, 14095, 14223], [14095, 14224, 14223], [14095, 14096, 14225], [14095, 14225, 14224], [14096, 14097, 14225], [14097, 14226, 14225], [14097, 14098, 14227], [14097, 14227, 14226], [14098, 14099, 14227], [14099, 14228, 14227], [14099, 14100, 14229], [14099, 14229, 14228], [14100, 14101, 14229], [14101, 14230, 14229], [14101, 14102, 14231], [14101, 14231, 14230], [14102, 14103, 14231], [14103, 14232, 14231], [14103, 14104, 14233], [14103, 14233, 14232], [14104, 14105, 14233], [14105, 14234, 14233], [14105, 14106, 14235], [14105, 14235, 14234], [14107, 14108, 14237], [14107, 14237, 14236], [14108, 14109, 14237], [14109, 14238, 14237], [14109, 14110, 14239], [14109, 14239, 14238], [14110, 14111, 14239], [14111, 14240, 14239], [14111, 14112, 14241], [14111, 14241, 14240], [14112, 14113, 14241], [14113, 14242, 14241], [14113, 14114, 14243], [14113, 14243, 14242], [14114, 14115, 14243], [14115, 14244, 14243], [14115, 14116, 14245], [14115, 14245, 14244], [14116, 14117, 14245], [14117, 14246, 14245], [14117, 14118, 14247], [14117, 14247, 14246], [14118, 14119, 14247], [14119, 14248, 14247], [14119, 14120, 14249], [14119, 14249, 14248], [14120, 14121, 14249], [14121, 14250, 14249], [14121, 14122, 14251], [14121, 14251, 14250], [14122, 14123, 14251], [14123, 14252, 14251], [14123, 14124, 14253], [14123, 14253, 14252], [14124, 14125, 14253], [14125, 14254, 14253], [14125, 14126, 14255], [14125, 14255, 14254], [14126, 14127, 14255], [14127, 14256, 14255], [14127, 14128, 14257], [14127, 14257, 14256], [14128, 14129, 14257], [14129, 14258, 14257], [14129, 14130, 14259], [14129, 14259, 14258], [14130, 14131, 14259], [14131, 14260, 14259], [14131, 14132, 14261], [14131, 14261, 14260], [14132, 14133, 14261], [14133, 14262, 14261], [14133, 14134, 14263], [14133, 14263, 14262], [14134, 14135, 14263], [14135, 14264, 14263], [14135, 14136, 14265], [14135, 14265, 14264], [14136, 14137, 14265], [14137, 14266, 14265], [14137, 14138, 14267], [14137, 14267, 14266], [14138, 14139, 14267], [14139, 14268, 14267], [14139, 14140, 14269], [14139, 14269, 14268], [14140, 14141, 14269], [14141, 14270, 14269], [14141, 14142, 14271], [14141, 14271, 14270], [14142, 14143, 14271], [14143, 14272, 14271], [14143, 14144, 14273], [14143, 14273, 14272], [14144, 14145, 14273], [14145, 14274, 14273], [14145, 14146, 14275], [14145, 14275, 14274], [14146, 14147, 14275], [14147, 14276, 14275], [14147, 14148, 14277], [14147, 14277, 14276], [14148, 14149, 14277], [14149, 14278, 14277], [14149, 14150, 14279], [14149, 14279, 14278], [14150, 14151, 14279], [14151, 14280, 14279], [14151, 14152, 14281], [14151, 14281, 14280], [14152, 14153, 14281], [14153, 14282, 14281], [14153, 14154, 14283], [14153, 14283, 14282], [14154, 14155, 14283], [14155, 14284, 14283], [14155, 14156, 14285], [14155, 14285, 14284], [14156, 14157, 14285], [14157, 14286, 14285], [14157, 14158, 14287], [14157, 14287, 14286], [14158, 14159, 14287], [14159, 14288, 14287], [14159, 14160, 14289], [14159, 14289, 14288], [14160, 14161, 14289], [14161, 14290, 14289], [14161, 14162, 14291], [14161, 14291, 14290], [14162, 14163, 14291], [14163, 14292, 14291], [14163, 14164, 14293], [14163, 14293, 14292], [14164, 14165, 14293], [14165, 14294, 14293], [14165, 14166, 14295], [14165, 14295, 14294], [14166, 14167, 14295], [14167, 14296, 14295], [14167, 14168, 14297], [14167, 14297, 14296], [14168, 14169, 14297], [14169, 14298, 14297], [14169, 14170, 14299], [14169, 14299, 14298], [14170, 14171, 14299], [14171, 14300, 14299], [14171, 14172, 14301], [14171, 14301, 14300], [14172, 14173, 14301], [14173, 14302, 14301], [14173, 14174, 14303], [14173, 14303, 14302], [14174, 14175, 14303], [14175, 14304, 14303], [14175, 14176, 14305], [14175, 14305, 14304], [14176, 14177, 14305], [14177, 14306, 14305], [14177, 14178, 14307], [14177, 14307, 14306], [14178, 14179, 14307], [14179, 14308, 14307], [14179, 14180, 14309], [14179, 14309, 14308], [14180, 14181, 14309], [14181, 14310, 14309], [14181, 14182, 14311], [14181, 14311, 14310], [14182, 14183, 14311], [14183, 14312, 14311], [14183, 14184, 14313], [14183, 14313, 14312], [14184, 14185, 14313], [14185, 14314, 14313], [14185, 14186, 14315], [14185, 14315, 14314], [14186, 14187, 14315], [14187, 14316, 14315], [14187, 14188, 14317], [14187, 14317, 14316], [14188, 14189, 14317], [14189, 14318, 14317], [14189, 14190, 14319], [14189, 14319, 14318], [14190, 14191, 14319], [14191, 14320, 14319], [14191, 14192, 14321], [14191, 14321, 14320], [14192, 14193, 14321], [14193, 14322, 14321], [14193, 14194, 14323], [14193, 14323, 14322], [14194, 14195, 14323], [14195, 14324, 14323], [14195, 14196, 14325], [14195, 14325, 14324], [14196, 14197, 14325], [14197, 14326, 14325], [14197, 14198, 14327], [14197, 14327, 14326], [14198, 14199, 14327], [14199, 14328, 14327], [14199, 14200, 14329], [14199, 14329, 14328], [14200, 14201, 14329], [14201, 14330, 14329], [14201, 14202, 14331], [14201, 14331, 14330], [14202, 14203, 14331], [14203, 14332, 14331], [14203, 14204, 14333], [14203, 14333, 14332], [14204, 14205, 14333], [14205, 14334, 14333], [14205, 14206, 14335], [14205, 14335, 14334], [14206, 14207, 14335], [14207, 14336, 14335], [14207, 14208, 14337], [14207, 14337, 14336], [14208, 14209, 14337], [14209, 14338, 14337], [14209, 14210, 14339], [14209, 14339, 14338], [14210, 14211, 14339], [14211, 14340, 14339], [14211, 14212, 14341], [14211, 14341, 14340], [14212, 14213, 14341], [14213, 14342, 14341], [14213, 14214, 14343], [14213, 14343, 14342], [14214, 14215, 14343], [14215, 14344, 14343], [14215, 14216, 14345], [14215, 14345, 14344], [14216, 14217, 14345], [14217, 14346, 14345], [14217, 14218, 14347], [14217, 14347, 14346], [14218, 14219, 14347], [14219, 14348, 14347], [14219, 14220, 14349], [14219, 14349, 14348], [14220, 14221, 14349], [14221, 14350, 14349], [14221, 14222, 14351], [14221, 14351, 14350], [14222, 14223, 14351], [14223, 14352, 14351], [14223, 14224, 14353], [14223, 14353, 14352], [14224, 14225, 14353], [14225, 14354, 14353], [14225, 14226, 14355], [14225, 14355, 14354], [14226, 14227, 14355], [14227, 14356, 14355], [14227, 14228, 14357], [14227, 14357, 14356], [14228, 14229, 14357], [14229, 14358, 14357], [14229, 14230, 14359], [14229, 14359, 14358], [14230, 14231, 14359], [14231, 14360, 14359], [14231, 14232, 14361], [14231, 14361, 14360], [14232, 14233, 14361], [14233, 14362, 14361], [14233, 14234, 14363], [14233, 14363, 14362], [14234, 14235, 14363], [14235, 14364, 14363], [14236, 14237, 14365], [14237, 14366, 14365], [14237, 14238, 14367], [14237, 14367, 14366], [14238, 14239, 14367], [14239, 14368, 14367], [14239, 14240, 14369], [14239, 14369, 14368], [14240, 14241, 14369], [14241, 14370, 14369], [14241, 14242, 14371], [14241, 14371, 14370], [14242, 14243, 14371], [14243, 14372, 14371], [14243, 14244, 14373], [14243, 14373, 14372], [14244, 14245, 14373], [14245, 14374, 14373], [14245, 14246, 14375], [14245, 14375, 14374], [14246, 14247, 14375], [14247, 14376, 14375], [14247, 14248, 14377], [14247, 14377, 14376], [14248, 14249, 14377], [14249, 14378, 14377], [14249, 14250, 14379], [14249, 14379, 14378], [14250, 14251, 14379], [14251, 14380, 14379], [14251, 14252, 14381], [14251, 14381, 14380], [14252, 14253, 14381], [14253, 14382, 14381], [14253, 14254, 14383], [14253, 14383, 14382], [14254, 14255, 14383], [14255, 14384, 14383], [14255, 14256, 14385], [14255, 14385, 14384], [14256, 14257, 14385], [14257, 14386, 14385], [14257, 14258, 14387], [14257, 14387, 14386], [14258, 14259, 14387], [14259, 14388, 14387], [14259, 14260, 14389], [14259, 14389, 14388], [14260, 14261, 14389], [14261, 14390, 14389], [14261, 14262, 14391], [14261, 14391, 14390], [14262, 14263, 14391], [14263, 14392, 14391], [14263, 14264, 14393], [14263, 14393, 14392], [14264, 14265, 14393], [14265, 14394, 14393], [14265, 14266, 14395], [14265, 14395, 14394], [14266, 14267, 14395], [14267, 14396, 14395], [14267, 14268, 14397], [14267, 14397, 14396], [14268, 14269, 14397], [14269, 14398, 14397], [14269, 14270, 14399], [14269, 14399, 14398], [14270, 14271, 14399], [14271, 14400, 14399], [14271, 14272, 14401], [14271, 14401, 14400], [14272, 14273, 14401], [14273, 14402, 14401], [14273, 14274, 14403], [14273, 14403, 14402], [14274, 14275, 14403], [14275, 14404, 14403], [14275, 14276, 14405], [14275, 14405, 14404], [14276, 14277, 14405], [14277, 14406, 14405], [14277, 14278, 14407], [14277, 14407, 14406], [14278, 14279, 14407], [14279, 14408, 14407], [14279, 14280, 14409], [14279, 14409, 14408], [14280, 14281, 14409], [14281, 14410, 14409], [14281, 14282, 14411], [14281, 14411, 14410], [14282, 14283, 14411], [14283, 14412, 14411], [14283, 14284, 14413], [14283, 14413, 14412], [14284, 14285, 14413], [14285, 14414, 14413], [14285, 14286, 14415], [14285, 14415, 14414], [14286, 14287, 14415], [14287, 14416, 14415], [14287, 14288, 14417], [14287, 14417, 14416], [14288, 14289, 14417], [14289, 14418, 14417], [14289, 14290, 14419], [14289, 14419, 14418], [14290, 14291, 14419], [14291, 14420, 14419], [14291, 14292, 14421], [14291, 14421, 14420], [14292, 14293, 14421], [14293, 14422, 14421], [14293, 14294, 14423], [14293, 14423, 14422], [14294, 14295, 14423], [14295, 14424, 14423], [14295, 14296, 14425], [14295, 14425, 14424], [14296, 14297, 14425], [14297, 14426, 14425], [14297, 14298, 14427], [14297, 14427, 14426], [14298, 14299, 14427], [14299, 14428, 14427], [14299, 14300, 14429], [14299, 14429, 14428], [14300, 14301, 14429], [14301, 14430, 14429], [14301, 14302, 14431], [14301, 14431, 14430], [14302, 14303, 14431], [14303, 14432, 14431], [14303, 14304, 14433], [14303, 14433, 14432], [14304, 14305, 14433], [14305, 14434, 14433], [14305, 14306, 14435], [14305, 14435, 14434], [14306, 14307, 14435], [14307, 14436, 14435], [14307, 14308, 14437], [14307, 14437, 14436], [14308, 14309, 14437], [14309, 14438, 14437], [14309, 14310, 14439], [14309, 14439, 14438], [14310, 14311, 14439], [14311, 14440, 14439], [14311, 14312, 14441], [14311, 14441, 14440], [14312, 14313, 14441], [14313, 14442, 14441], [14313, 14314, 14443], [14313, 14443, 14442], [14314, 14315, 14443], [14315, 14444, 14443], [14315, 14316, 14445], [14315, 14445, 14444], [14316, 14317, 14445], [14317, 14446, 14445], [14317, 14318, 14447], [14317, 14447, 14446], [14318, 14319, 14447], [14319, 14448, 14447], [14319, 14320, 14449], [14319, 14449, 14448], [14320, 14321, 14449], [14321, 14450, 14449], [14321, 14322, 14451], [14321, 14451, 14450], [14322, 14323, 14451], [14323, 14452, 14451], [14323, 14324, 14453], [14323, 14453, 14452], [14324, 14325, 14453], [14325, 14454, 14453], [14325, 14326, 14455], [14325, 14455, 14454], [14326, 14327, 14455], [14327, 14456, 14455], [14327, 14328, 14457], [14327, 14457, 14456], [14328, 14329, 14457], [14329, 14458, 14457], [14329, 14330, 14459], [14329, 14459, 14458], [14330, 14331, 14459], [14331, 14460, 14459], [14331, 14332, 14461], [14331, 14461, 14460], [14332, 14333, 14461], [14333, 14462, 14461], [14333, 14334, 14463], [14333, 14463, 14462], [14334, 14335, 14463], [14335, 14464, 14463], [14335, 14336, 14465], [14335, 14465, 14464], [14336, 14337, 14465], [14337, 14466, 14465], [14337, 14338, 14467], [14337, 14467, 14466], [14338, 14339, 14467], [14339, 14468, 14467], [14339, 14340, 14469], [14339, 14469, 14468], [14340, 14341, 14469], [14341, 14470, 14469], [14341, 14342, 14471], [14341, 14471, 14470], [14342, 14343, 14471], [14343, 14472, 14471], [14343, 14344, 14473], [14343, 14473, 14472], [14344, 14345, 14473], [14345, 14474, 14473], [14345, 14346, 14475], [14345, 14475, 14474], [14346, 14347, 14475], [14347, 14476, 14475], [14347, 14348, 14477], [14347, 14477, 14476], [14348, 14349, 14477], [14349, 14478, 14477], [14349, 14350, 14479], [14349, 14479, 14478], [14350, 14351, 14479], [14351, 14480, 14479], [14351, 14352, 14481], [14351, 14481, 14480], [14352, 14353, 14481], [14353, 14482, 14481], [14353, 14354, 14483], [14353, 14483, 14482], [14354, 14355, 14483], [14355, 14484, 14483], [14355, 14356, 14485], [14355, 14485, 14484], [14356, 14357, 14485], [14357, 14486, 14485], [14357, 14358, 14487], [14357, 14487, 14486], [14358, 14359, 14487], [14359, 14488, 14487], [14359, 14360, 14489], [14359, 14489, 14488], [14360, 14361, 14489], [14361, 14490, 14489], [14361, 14362, 14491], [14361, 14491, 14490], [14362, 14363, 14491], [14363, 14492, 14491], [14363, 14364, 14493], [14363, 14493, 14492], [14365, 14366, 14495], [14365, 14495, 14494], [14366, 14367, 14495], [14367, 14496, 14495], [14367, 14368, 14497], [14367, 14497, 14496], [14368, 14369, 14497], [14369, 14498, 14497], [14369, 14370, 14499], [14369, 14499, 14498], [14370, 14371, 14499], [14371, 14500, 14499], [14371, 14372, 14501], [14371, 14501, 14500], [14372, 14373, 14501], [14373, 14502, 14501], [14373, 14374, 14503], [14373, 14503, 14502], [14374, 14375, 14503], [14375, 14504, 14503], [14375, 14376, 14505], [14375, 14505, 14504], [14376, 14377, 14505], [14377, 14506, 14505], [14377, 14378, 14507], [14377, 14507, 14506], [14378, 14379, 14507], [14379, 14508, 14507], [14379, 14380, 14509], [14379, 14509, 14508], [14380, 14381, 14509], [14381, 14510, 14509], [14381, 14382, 14511], [14381, 14511, 14510], [14382, 14383, 14511], [14383, 14512, 14511], [14383, 14384, 14513], [14383, 14513, 14512], [14384, 14385, 14513], [14385, 14514, 14513], [14385, 14386, 14515], [14385, 14515, 14514], [14386, 14387, 14515], [14387, 14516, 14515], [14387, 14388, 14517], [14387, 14517, 14516], [14388, 14389, 14517], [14389, 14518, 14517], [14389, 14390, 14519], [14389, 14519, 14518], [14390, 14391, 14519], [14391, 14520, 14519], [14391, 14392, 14521], [14391, 14521, 14520], [14392, 14393, 14521], [14393, 14522, 14521], [14393, 14394, 14523], [14393, 14523, 14522], [14394, 14395, 14523], [14395, 14524, 14523], [14395, 14396, 14525], [14395, 14525, 14524], [14396, 14397, 14525], [14397, 14526, 14525], [14397, 14398, 14527], [14397, 14527, 14526], [14398, 14399, 14527], [14399, 14528, 14527], [14399, 14400, 14529], [14399, 14529, 14528], [14400, 14401, 14529], [14401, 14530, 14529], [14401, 14402, 14531], [14401, 14531, 14530], [14402, 14403, 14531], [14403, 14532, 14531], [14403, 14404, 14533], [14403, 14533, 14532], [14404, 14405, 14533], [14405, 14534, 14533], [14405, 14406, 14535], [14405, 14535, 14534], [14406, 14407, 14535], [14407, 14536, 14535], [14407, 14408, 14537], [14407, 14537, 14536], [14408, 14409, 14537], [14409, 14538, 14537], [14409, 14410, 14539], [14409, 14539, 14538], [14410, 14411, 14539], [14411, 14540, 14539], [14411, 14412, 14541], [14411, 14541, 14540], [14412, 14413, 14541], [14413, 14542, 14541], [14413, 14414, 14543], [14413, 14543, 14542], [14414, 14415, 14543], [14415, 14544, 14543], [14415, 14416, 14545], [14415, 14545, 14544], [14416, 14417, 14545], [14417, 14546, 14545], [14417, 14418, 14547], [14417, 14547, 14546], [14418, 14419, 14547], [14419, 14548, 14547], [14419, 14420, 14549], [14419, 14549, 14548], [14420, 14421, 14549], [14421, 14550, 14549], [14421, 14422, 14551], [14421, 14551, 14550], [14422, 14423, 14551], [14423, 14552, 14551], [14423, 14424, 14553], [14423, 14553, 14552], [14424, 14425, 14553], [14425, 14554, 14553], [14425, 14426, 14555], [14425, 14555, 14554], [14426, 14427, 14555], [14427, 14556, 14555], [14427, 14428, 14557], [14427, 14557, 14556], [14428, 14429, 14557], [14429, 14558, 14557], [14429, 14430, 14559], [14429, 14559, 14558], [14430, 14431, 14559], [14431, 14560, 14559], [14431, 14432, 14561], [14431, 14561, 14560], [14432, 14433, 14561], [14433, 14562, 14561], [14433, 14434, 14563], [14433, 14563, 14562], [14434, 14435, 14563], [14435, 14564, 14563], [14435, 14436, 14565], [14435, 14565, 14564], [14436, 14437, 14565], [14437, 14566, 14565], [14437, 14438, 14567], [14437, 14567, 14566], [14438, 14439, 14567], [14439, 14568, 14567], [14439, 14440, 14569], [14439, 14569, 14568], [14440, 14441, 14569], [14441, 14570, 14569], [14441, 14442, 14571], [14441, 14571, 14570], [14442, 14443, 14571], [14443, 14572, 14571], [14443, 14444, 14573], [14443, 14573, 14572], [14444, 14445, 14573], [14445, 14574, 14573], [14445, 14446, 14575], [14445, 14575, 14574], [14446, 14447, 14575], [14447, 14576, 14575], [14447, 14448, 14577], [14447, 14577, 14576], [14448, 14449, 14577], [14449, 14578, 14577], [14449, 14450, 14579], [14449, 14579, 14578], [14450, 14451, 14579], [14451, 14580, 14579], [14451, 14452, 14581], [14451, 14581, 14580], [14452, 14453, 14581], [14453, 14582, 14581], [14453, 14454, 14583], [14453, 14583, 14582], [14454, 14455, 14583], [14455, 14584, 14583], [14455, 14456, 14585], [14455, 14585, 14584], [14456, 14457, 14585], [14457, 14586, 14585], [14457, 14458, 14587], [14457, 14587, 14586], [14458, 14459, 14587], [14459, 14588, 14587], [14459, 14460, 14589], [14459, 14589, 14588], [14460, 14461, 14589], [14461, 14590, 14589], [14461, 14462, 14591], [14461, 14591, 14590], [14462, 14463, 14591], [14463, 14592, 14591], [14463, 14464, 14593], [14463, 14593, 14592], [14464, 14465, 14593], [14465, 14594, 14593], [14465, 14466, 14595], [14465, 14595, 14594], [14466, 14467, 14595], [14467, 14596, 14595], [14467, 14468, 14597], [14467, 14597, 14596], [14468, 14469, 14597], [14469, 14598, 14597], [14469, 14470, 14599], [14469, 14599, 14598], [14470, 14471, 14599], [14471, 14600, 14599], [14471, 14472, 14601], [14471, 14601, 14600], [14472, 14473, 14601], [14473, 14602, 14601], [14473, 14474, 14603], [14473, 14603, 14602], [14474, 14475, 14603], [14475, 14604, 14603], [14475, 14476, 14605], [14475, 14605, 14604], [14476, 14477, 14605], [14477, 14606, 14605], [14477, 14478, 14607], [14477, 14607, 14606], [14478, 14479, 14607], [14479, 14608, 14607], [14479, 14480, 14609], [14479, 14609, 14608], [14480, 14481, 14609], [14481, 14610, 14609], [14481, 14482, 14611], [14481, 14611, 14610], [14482, 14483, 14611], [14483, 14612, 14611], [14483, 14484, 14613], [14483, 14613, 14612], [14484, 14485, 14613], [14485, 14614, 14613], [14485, 14486, 14615], [14485, 14615, 14614], [14486, 14487, 14615], [14487, 14616, 14615], [14487, 14488, 14617], [14487, 14617, 14616], [14488, 14489, 14617], [14489, 14618, 14617], [14489, 14490, 14619], [14489, 14619, 14618], [14490, 14491, 14619], [14491, 14620, 14619], [14491, 14492, 14621], [14491, 14621, 14620], [14492, 14493, 14621], [14493, 14622, 14621], [14494, 14495, 14623], [14495, 14624, 14623], [14495, 14496, 14625], [14495, 14625, 14624], [14496, 14497, 14625], [14497, 14626, 14625], [14497, 14498, 14627], [14497, 14627, 14626], [14498, 14499, 14627], [14499, 14628, 14627], [14499, 14500, 14629], [14499, 14629, 14628], [14500, 14501, 14629], [14501, 14630, 14629], [14501, 14502, 14631], [14501, 14631, 14630], [14502, 14503, 14631], [14503, 14632, 14631], [14503, 14504, 14633], [14503, 14633, 14632], [14504, 14505, 14633], [14505, 14634, 14633], [14505, 14506, 14635], [14505, 14635, 14634], [14506, 14507, 14635], [14507, 14636, 14635], [14507, 14508, 14637], [14507, 14637, 14636], [14508, 14509, 14637], [14509, 14638, 14637], [14509, 14510, 14639], [14509, 14639, 14638], [14510, 14511, 14639], [14511, 14640, 14639], [14511, 14512, 14641], [14511, 14641, 14640], [14512, 14513, 14641], [14513, 14642, 14641], [14513, 14514, 14643], [14513, 14643, 14642], [14514, 14515, 14643], [14515, 14644, 14643], [14515, 14516, 14645], [14515, 14645, 14644], [14516, 14517, 14645], [14517, 14646, 14645], [14517, 14518, 14647], [14517, 14647, 14646], [14518, 14519, 14647], [14519, 14648, 14647], [14519, 14520, 14649], [14519, 14649, 14648], [14520, 14521, 14649], [14521, 14650, 14649], [14521, 14522, 14651], [14521, 14651, 14650], [14522, 14523, 14651], [14523, 14652, 14651], [14523, 14524, 14653], [14523, 14653, 14652], [14524, 14525, 14653], [14525, 14654, 14653], [14525, 14526, 14655], [14525, 14655, 14654], [14526, 14527, 14655], [14527, 14656, 14655], [14527, 14528, 14657], [14527, 14657, 14656], [14528, 14529, 14657], [14529, 14658, 14657], [14529, 14530, 14659], [14529, 14659, 14658], [14530, 14531, 14659], [14531, 14660, 14659], [14531, 14532, 14661], [14531, 14661, 14660], [14532, 14533, 14661], [14533, 14662, 14661], [14533, 14534, 14663], [14533, 14663, 14662], [14534, 14535, 14663], [14535, 14664, 14663], [14535, 14536, 14665], [14535, 14665, 14664], [14536, 14537, 14665], [14537, 14666, 14665], [14537, 14538, 14667], [14537, 14667, 14666], [14538, 14539, 14667], [14539, 14668, 14667], [14539, 14540, 14669], [14539, 14669, 14668], [14540, 14541, 14669], [14541, 14670, 14669], [14541, 14542, 14671], [14541, 14671, 14670], [14542, 14543, 14671], [14543, 14672, 14671], [14543, 14544, 14673], [14543, 14673, 14672], [14544, 14545, 14673], [14545, 14674, 14673], [14545, 14546, 14675], [14545, 14675, 14674], [14546, 14547, 14675], [14547, 14676, 14675], [14547, 14548, 14677], [14547, 14677, 14676], [14548, 14549, 14677], [14549, 14678, 14677], [14549, 14550, 14679], [14549, 14679, 14678], [14550, 14551, 14679], [14551, 14680, 14679], [14551, 14552, 14681], [14551, 14681, 14680], [14552, 14553, 14681], [14553, 14682, 14681], [14553, 14554, 14683], [14553, 14683, 14682], [14554, 14555, 14683], [14555, 14684, 14683], [14555, 14556, 14685], [14555, 14685, 14684], [14556, 14557, 14685], [14557, 14686, 14685], [14557, 14558, 14687], [14557, 14687, 14686], [14558, 14559, 14687], [14559, 14688, 14687], [14559, 14560, 14689], [14559, 14689, 14688], [14560, 14561, 14689], [14561, 14690, 14689], [14561, 14562, 14691], [14561, 14691, 14690], [14562, 14563, 14691], [14563, 14692, 14691], [14563, 14564, 14693], [14563, 14693, 14692], [14564, 14565, 14693], [14565, 14694, 14693], [14565, 14566, 14695], [14565, 14695, 14694], [14566, 14567, 14695], [14567, 14696, 14695], [14567, 14568, 14697], [14567, 14697, 14696], [14568, 14569, 14697], [14569, 14698, 14697], [14569, 14570, 14699], [14569, 14699, 14698], [14570, 14571, 14699], [14571, 14700, 14699], [14571, 14572, 14701], [14571, 14701, 14700], [14572, 14573, 14701], [14573, 14702, 14701], [14573, 14574, 14703], [14573, 14703, 14702], [14574, 14575, 14703], [14575, 14704, 14703], [14575, 14576, 14705], [14575, 14705, 14704], [14576, 14577, 14705], [14577, 14706, 14705], [14577, 14578, 14707], [14577, 14707, 14706], [14578, 14579, 14707], [14579, 14708, 14707], [14579, 14580, 14709], [14579, 14709, 14708], [14580, 14581, 14709], [14581, 14710, 14709], [14581, 14582, 14711], [14581, 14711, 14710], [14582, 14583, 14711], [14583, 14712, 14711], [14583, 14584, 14713], [14583, 14713, 14712], [14584, 14585, 14713], [14585, 14714, 14713], [14585, 14586, 14715], [14585, 14715, 14714], [14586, 14587, 14715], [14587, 14716, 14715], [14587, 14588, 14717], [14587, 14717, 14716], [14588, 14589, 14717], [14589, 14718, 14717], [14589, 14590, 14719], [14589, 14719, 14718], [14590, 14591, 14719], [14591, 14720, 14719], [14591, 14592, 14721], [14591, 14721, 14720], [14592, 14593, 14721], [14593, 14722, 14721], [14593, 14594, 14723], [14593, 14723, 14722], [14594, 14595, 14723], [14595, 14724, 14723], [14595, 14596, 14725], [14595, 14725, 14724], [14596, 14597, 14725], [14597, 14726, 14725], [14597, 14598, 14727], [14597, 14727, 14726], [14598, 14599, 14727], [14599, 14728, 14727], [14599, 14600, 14729], [14599, 14729, 14728], [14600, 14601, 14729], [14601, 14730, 14729], [14601, 14602, 14731], [14601, 14731, 14730], [14602, 14603, 14731], [14603, 14732, 14731], [14603, 14604, 14733], [14603, 14733, 14732], [14604, 14605, 14733], [14605, 14734, 14733], [14605, 14606, 14735], [14605, 14735, 14734], [14606, 14607, 14735], [14607, 14736, 14735], [14607, 14608, 14737], [14607, 14737, 14736], [14608, 14609, 14737], [14609, 14738, 14737], [14609, 14610, 14739], [14609, 14739, 14738], [14610, 14611, 14739], [14611, 14740, 14739], [14611, 14612, 14741], [14611, 14741, 14740], [14612, 14613, 14741], [14613, 14742, 14741], [14613, 14614, 14743], [14613, 14743, 14742], [14614, 14615, 14743], [14615, 14744, 14743], [14615, 14616, 14745], [14615, 14745, 14744], [14616, 14617, 14745], [14617, 14746, 14745], [14617, 14618, 14747], [14617, 14747, 14746], [14618, 14619, 14747], [14619, 14748, 14747], [14619, 14620, 14749], [14619, 14749, 14748], [14620, 14621, 14749], [14621, 14750, 14749], [14621, 14622, 14751], [14621, 14751, 14750], [14623, 14624, 14753], [14623, 14753, 14752], [14624, 14625, 14753], [14625, 14754, 14753], [14625, 14626, 14755], [14625, 14755, 14754], [14626, 14627, 14755], [14627, 14756, 14755], [14627, 14628, 14757], [14627, 14757, 14756], [14628, 14629, 14757], [14629, 14758, 14757], [14629, 14630, 14759], [14629, 14759, 14758], [14630, 14631, 14759], [14631, 14760, 14759], [14631, 14632, 14761], [14631, 14761, 14760], [14632, 14633, 14761], [14633, 14762, 14761], [14633, 14634, 14763], [14633, 14763, 14762], [14634, 14635, 14763], [14635, 14764, 14763], [14635, 14636, 14765], [14635, 14765, 14764], [14636, 14637, 14765], [14637, 14766, 14765], [14637, 14638, 14767], [14637, 14767, 14766], [14638, 14639, 14767], [14639, 14768, 14767], [14639, 14640, 14769], [14639, 14769, 14768], [14640, 14641, 14769], [14641, 14770, 14769], [14641, 14642, 14771], [14641, 14771, 14770], [14642, 14643, 14771], [14643, 14772, 14771], [14643, 14644, 14773], [14643, 14773, 14772], [14644, 14645, 14773], [14645, 14774, 14773], [14645, 14646, 14775], [14645, 14775, 14774], [14646, 14647, 14775], [14647, 14776, 14775], [14647, 14648, 14777], [14647, 14777, 14776], [14648, 14649, 14777], [14649, 14778, 14777], [14649, 14650, 14779], [14649, 14779, 14778], [14650, 14651, 14779], [14651, 14780, 14779], [14651, 14652, 14781], [14651, 14781, 14780], [14652, 14653, 14781], [14653, 14782, 14781], [14653, 14654, 14783], [14653, 14783, 14782], [14654, 14655, 14783], [14655, 14784, 14783], [14655, 14656, 14785], [14655, 14785, 14784], [14656, 14657, 14785], [14657, 14786, 14785], [14657, 14658, 14787], [14657, 14787, 14786], [14658, 14659, 14787], [14659, 14788, 14787], [14659, 14660, 14789], [14659, 14789, 14788], [14660, 14661, 14789], [14661, 14790, 14789], [14661, 14662, 14791], [14661, 14791, 14790], [14662, 14663, 14791], [14663, 14792, 14791], [14663, 14664, 14793], [14663, 14793, 14792], [14664, 14665, 14793], [14665, 14794, 14793], [14665, 14666, 14795], [14665, 14795, 14794], [14666, 14667, 14795], [14667, 14796, 14795], [14667, 14668, 14797], [14667, 14797, 14796], [14668, 14669, 14797], [14669, 14798, 14797], [14669, 14670, 14799], [14669, 14799, 14798], [14670, 14671, 14799], [14671, 14800, 14799], [14671, 14672, 14801], [14671, 14801, 14800], [14672, 14673, 14801], [14673, 14802, 14801], [14673, 14674, 14803], [14673, 14803, 14802], [14674, 14675, 14803], [14675, 14804, 14803], [14675, 14676, 14805], [14675, 14805, 14804], [14676, 14677, 14805], [14677, 14806, 14805], [14677, 14678, 14807], [14677, 14807, 14806], [14678, 14679, 14807], [14679, 14808, 14807], [14679, 14680, 14809], [14679, 14809, 14808], [14680, 14681, 14809], [14681, 14810, 14809], [14681, 14682, 14811], [14681, 14811, 14810], [14682, 14683, 14811], [14683, 14812, 14811], [14683, 14684, 14813], [14683, 14813, 14812], [14684, 14685, 14813], [14685, 14814, 14813], [14685, 14686, 14815], [14685, 14815, 14814], [14686, 14687, 14815], [14687, 14816, 14815], [14687, 14688, 14817], [14687, 14817, 14816], [14688, 14689, 14817], [14689, 14818, 14817], [14689, 14690, 14819], [14689, 14819, 14818], [14690, 14691, 14819], [14691, 14820, 14819], [14691, 14692, 14821], [14691, 14821, 14820], [14692, 14693, 14821], [14693, 14822, 14821], [14693, 14694, 14823], [14693, 14823, 14822], [14694, 14695, 14823], [14695, 14824, 14823], [14695, 14696, 14825], [14695, 14825, 14824], [14696, 14697, 14825], [14697, 14826, 14825], [14697, 14698, 14827], [14697, 14827, 14826], [14698, 14699, 14827], [14699, 14828, 14827], [14699, 14700, 14829], [14699, 14829, 14828], [14700, 14701, 14829], [14701, 14830, 14829], [14701, 14702, 14831], [14701, 14831, 14830], [14702, 14703, 14831], [14703, 14832, 14831], [14703, 14704, 14833], [14703, 14833, 14832], [14704, 14705, 14833], [14705, 14834, 14833], [14705, 14706, 14835], [14705, 14835, 14834], [14706, 14707, 14835], [14707, 14836, 14835], [14707, 14708, 14837], [14707, 14837, 14836], [14708, 14709, 14837], [14709, 14838, 14837], [14709, 14710, 14839], [14709, 14839, 14838], [14710, 14711, 14839], [14711, 14840, 14839], [14711, 14712, 14841], [14711, 14841, 14840], [14712, 14713, 14841], [14713, 14842, 14841], [14713, 14714, 14843], [14713, 14843, 14842], [14714, 14715, 14843], [14715, 14844, 14843], [14715, 14716, 14845], [14715, 14845, 14844], [14716, 14717, 14845], [14717, 14846, 14845], [14717, 14718, 14847], [14717, 14847, 14846], [14718, 14719, 14847], [14719, 14848, 14847], [14719, 14720, 14849], [14719, 14849, 14848], [14720, 14721, 14849], [14721, 14850, 14849], [14721, 14722, 14851], [14721, 14851, 14850], [14722, 14723, 14851], [14723, 14852, 14851], [14723, 14724, 14853], [14723, 14853, 14852], [14724, 14725, 14853], [14725, 14854, 14853], [14725, 14726, 14855], [14725, 14855, 14854], [14726, 14727, 14855], [14727, 14856, 14855], [14727, 14728, 14857], [14727, 14857, 14856], [14728, 14729, 14857], [14729, 14858, 14857], [14729, 14730, 14859], [14729, 14859, 14858], [14730, 14731, 14859], [14731, 14860, 14859], [14731, 14732, 14861], [14731, 14861, 14860], [14732, 14733, 14861], [14733, 14862, 14861], [14733, 14734, 14863], [14733, 14863, 14862], [14734, 14735, 14863], [14735, 14864, 14863], [14735, 14736, 14865], [14735, 14865, 14864], [14736, 14737, 14865], [14737, 14866, 14865], [14737, 14738, 14867], [14737, 14867, 14866], [14738, 14739, 14867], [14739, 14868, 14867], [14739, 14740, 14869], [14739, 14869, 14868], [14740, 14741, 14869], [14741, 14870, 14869], [14741, 14742, 14871], [14741, 14871, 14870], [14742, 14743, 14871], [14743, 14872, 14871], [14743, 14744, 14873], [14743, 14873, 14872], [14744, 14745, 14873], [14745, 14874, 14873], [14745, 14746, 14875], [14745, 14875, 14874], [14746, 14747, 14875], [14747, 14876, 14875], [14747, 14748, 14877], [14747, 14877, 14876], [14748, 14749, 14877], [14749, 14878, 14877], [14749, 14750, 14879], [14749, 14879, 14878], [14750, 14751, 14879], [14751, 14880, 14879], [14752, 14753, 14881], [14753, 14882, 14881], [14753, 14754, 14883], [14753, 14883, 14882], [14754, 14755, 14883], [14755, 14884, 14883], [14755, 14756, 14885], [14755, 14885, 14884], [14756, 14757, 14885], [14757, 14886, 14885], [14757, 14758, 14887], [14757, 14887, 14886], [14758, 14759, 14887], [14759, 14888, 14887], [14759, 14760, 14889], [14759, 14889, 14888], [14760, 14761, 14889], [14761, 14890, 14889], [14761, 14762, 14891], [14761, 14891, 14890], [14762, 14763, 14891], [14763, 14892, 14891], [14763, 14764, 14893], [14763, 14893, 14892], [14764, 14765, 14893], [14765, 14894, 14893], [14765, 14766, 14895], [14765, 14895, 14894], [14766, 14767, 14895], [14767, 14896, 14895], [14767, 14768, 14897], [14767, 14897, 14896], [14768, 14769, 14897], [14769, 14898, 14897], [14769, 14770, 14899], [14769, 14899, 14898], [14770, 14771, 14899], [14771, 14900, 14899], [14771, 14772, 14901], [14771, 14901, 14900], [14772, 14773, 14901], [14773, 14902, 14901], [14773, 14774, 14903], [14773, 14903, 14902], [14774, 14775, 14903], [14775, 14904, 14903], [14775, 14776, 14905], [14775, 14905, 14904], [14776, 14777, 14905], [14777, 14906, 14905], [14777, 14778, 14907], [14777, 14907, 14906], [14778, 14779, 14907], [14779, 14908, 14907], [14779, 14780, 14909], [14779, 14909, 14908], [14780, 14781, 14909], [14781, 14910, 14909], [14781, 14782, 14911], [14781, 14911, 14910], [14782, 14783, 14911], [14783, 14912, 14911], [14783, 14784, 14913], [14783, 14913, 14912], [14784, 14785, 14913], [14785, 14914, 14913], [14785, 14786, 14915], [14785, 14915, 14914], [14786, 14787, 14915], [14787, 14916, 14915], [14787, 14788, 14917], [14787, 14917, 14916], [14788, 14789, 14917], [14789, 14918, 14917], [14789, 14790, 14919], [14789, 14919, 14918], [14790, 14791, 14919], [14791, 14920, 14919], [14791, 14792, 14921], [14791, 14921, 14920], [14792, 14793, 14921], [14793, 14922, 14921], [14793, 14794, 14923], [14793, 14923, 14922], [14794, 14795, 14923], [14795, 14924, 14923], [14795, 14796, 14925], [14795, 14925, 14924], [14796, 14797, 14925], [14797, 14926, 14925], [14797, 14798, 14927], [14797, 14927, 14926], [14798, 14799, 14927], [14799, 14928, 14927], [14799, 14800, 14929], [14799, 14929, 14928], [14800, 14801, 14929], [14801, 14930, 14929], [14801, 14802, 14931], [14801, 14931, 14930], [14802, 14803, 14931], [14803, 14932, 14931], [14803, 14804, 14933], [14803, 14933, 14932], [14804, 14805, 14933], [14805, 14934, 14933], [14805, 14806, 14935], [14805, 14935, 14934], [14806, 14807, 14935], [14807, 14936, 14935], [14807, 14808, 14937], [14807, 14937, 14936], [14808, 14809, 14937], [14809, 14938, 14937], [14809, 14810, 14939], [14809, 14939, 14938], [14810, 14811, 14939], [14811, 14940, 14939], [14811, 14812, 14941], [14811, 14941, 14940], [14812, 14813, 14941], [14813, 14942, 14941], [14813, 14814, 14943], [14813, 14943, 14942], [14814, 14815, 14943], [14815, 14944, 14943], [14815, 14816, 14945], [14815, 14945, 14944], [14816, 14817, 14945], [14817, 14946, 14945], [14817, 14818, 14947], [14817, 14947, 14946], [14818, 14819, 14947], [14819, 14948, 14947], [14819, 14820, 14949], [14819, 14949, 14948], [14820, 14821, 14949], [14821, 14950, 14949], [14821, 14822, 14951], [14821, 14951, 14950], [14822, 14823, 14951], [14823, 14952, 14951], [14823, 14824, 14953], [14823, 14953, 14952], [14824, 14825, 14953], [14825, 14954, 14953], [14825, 14826, 14955], [14825, 14955, 14954], [14826, 14827, 14955], [14827, 14956, 14955], [14827, 14828, 14957], [14827, 14957, 14956], [14828, 14829, 14957], [14829, 14958, 14957], [14829, 14830, 14959], [14829, 14959, 14958], [14830, 14831, 14959], [14831, 14960, 14959], [14831, 14832, 14961], [14831, 14961, 14960], [14832, 14833, 14961], [14833, 14962, 14961], [14833, 14834, 14963], [14833, 14963, 14962], [14834, 14835, 14963], [14835, 14964, 14963], [14835, 14836, 14965], [14835, 14965, 14964], [14836, 14837, 14965], [14837, 14966, 14965], [14837, 14838, 14967], [14837, 14967, 14966], [14838, 14839, 14967], [14839, 14968, 14967], [14839, 14840, 14969], [14839, 14969, 14968], [14840, 14841, 14969], [14841, 14970, 14969], [14841, 14842, 14971], [14841, 14971, 14970], [14842, 14843, 14971], [14843, 14972, 14971], [14843, 14844, 14973], [14843, 14973, 14972], [14844, 14845, 14973], [14845, 14974, 14973], [14845, 14846, 14975], [14845, 14975, 14974], [14846, 14847, 14975], [14847, 14976, 14975], [14847, 14848, 14977], [14847, 14977, 14976], [14848, 14849, 14977], [14849, 14978, 14977], [14849, 14850, 14979], [14849, 14979, 14978], [14850, 14851, 14979], [14851, 14980, 14979], [14851, 14852, 14981], [14851, 14981, 14980], [14852, 14853, 14981], [14853, 14982, 14981], [14853, 14854, 14983], [14853, 14983, 14982], [14854, 14855, 14983], [14855, 14984, 14983], [14855, 14856, 14985], [14855, 14985, 14984], [14856, 14857, 14985], [14857, 14986, 14985], [14857, 14858, 14987], [14857, 14987, 14986], [14858, 14859, 14987], [14859, 14988, 14987], [14859, 14860, 14989], [14859, 14989, 14988], [14860, 14861, 14989], [14861, 14990, 14989], [14861, 14862, 14991], [14861, 14991, 14990], [14862, 14863, 14991], [14863, 14992, 14991], [14863, 14864, 14993], [14863, 14993, 14992], [14864, 14865, 14993], [14865, 14994, 14993], [14865, 14866, 14995], [14865, 14995, 14994], [14866, 14867, 14995], [14867, 14996, 14995], [14867, 14868, 14997], [14867, 14997, 14996], [14868, 14869, 14997], [14869, 14998, 14997], [14869, 14870, 14999], [14869, 14999, 14998], [14870, 14871, 14999], [14871, 15000, 14999], [14871, 14872, 15001], [14871, 15001, 15000], [14872, 14873, 15001], [14873, 15002, 15001], [14873, 14874, 15003], [14873, 15003, 15002], [14874, 14875, 15003], [14875, 15004, 15003], [14875, 14876, 15005], [14875, 15005, 15004], [14876, 14877, 15005], [14877, 15006, 15005], [14877, 14878, 15007], [14877, 15007, 15006], [14878, 14879, 15007], [14879, 15008, 15007], [14879, 14880, 15009], [14879, 15009, 15008], [14881, 14882, 15011], [14881, 15011, 15010], [14882, 14883, 15011], [14883, 15012, 15011], [14883, 14884, 15013], [14883, 15013, 15012], [14884, 14885, 15013], [14885, 15014, 15013], [14885, 14886, 15015], [14885, 15015, 15014], [14886, 14887, 15015], [14887, 15016, 15015], [14887, 14888, 15017], [14887, 15017, 15016], [14888, 14889, 15017], [14889, 15018, 15017], [14889, 14890, 15019], [14889, 15019, 15018], [14890, 14891, 15019], [14891, 15020, 15019], [14891, 14892, 15021], [14891, 15021, 15020], [14892, 14893, 15021], [14893, 15022, 15021], [14893, 14894, 15023], [14893, 15023, 15022], [14894, 14895, 15023], [14895, 15024, 15023], [14895, 14896, 15025], [14895, 15025, 15024], [14896, 14897, 15025], [14897, 15026, 15025], [14897, 14898, 15027], [14897, 15027, 15026], [14898, 14899, 15027], [14899, 15028, 15027], [14899, 14900, 15029], [14899, 15029, 15028], [14900, 14901, 15029], [14901, 15030, 15029], [14901, 14902, 15031], [14901, 15031, 15030], [14902, 14903, 15031], [14903, 15032, 15031], [14903, 14904, 15033], [14903, 15033, 15032], [14904, 14905, 15033], [14905, 15034, 15033], [14905, 14906, 15035], [14905, 15035, 15034], [14906, 14907, 15035], [14907, 15036, 15035], [14907, 14908, 15037], [14907, 15037, 15036], [14908, 14909, 15037], [14909, 15038, 15037], [14909, 14910, 15039], [14909, 15039, 15038], [14910, 14911, 15039], [14911, 15040, 15039], [14911, 14912, 15041], [14911, 15041, 15040], [14912, 14913, 15041], [14913, 15042, 15041], [14913, 14914, 15043], [14913, 15043, 15042], [14914, 14915, 15043], [14915, 15044, 15043], [14915, 14916, 15045], [14915, 15045, 15044], [14916, 14917, 15045], [14917, 15046, 15045], [14917, 14918, 15047], [14917, 15047, 15046], [14918, 14919, 15047], [14919, 15048, 15047], [14919, 14920, 15049], [14919, 15049, 15048], [14920, 14921, 15049], [14921, 15050, 15049], [14921, 14922, 15051], [14921, 15051, 15050], [14922, 14923, 15051], [14923, 15052, 15051], [14923, 14924, 15053], [14923, 15053, 15052], [14924, 14925, 15053], [14925, 15054, 15053], [14925, 14926, 15055], [14925, 15055, 15054], [14926, 14927, 15055], [14927, 15056, 15055], [14927, 14928, 15057], [14927, 15057, 15056], [14928, 14929, 15057], [14929, 15058, 15057], [14929, 14930, 15059], [14929, 15059, 15058], [14930, 14931, 15059], [14931, 15060, 15059], [14931, 14932, 15061], [14931, 15061, 15060], [14932, 14933, 15061], [14933, 15062, 15061], [14933, 14934, 15063], [14933, 15063, 15062], [14934, 14935, 15063], [14935, 15064, 15063], [14935, 14936, 15065], [14935, 15065, 15064], [14936, 14937, 15065], [14937, 15066, 15065], [14937, 14938, 15067], [14937, 15067, 15066], [14938, 14939, 15067], [14939, 15068, 15067], [14939, 14940, 15069], [14939, 15069, 15068], [14940, 14941, 15069], [14941, 15070, 15069], [14941, 14942, 15071], [14941, 15071, 15070], [14942, 14943, 15071], [14943, 15072, 15071], [14943, 14944, 15073], [14943, 15073, 15072], [14944, 14945, 15073], [14945, 15074, 15073], [14945, 14946, 15075], [14945, 15075, 15074], [14946, 14947, 15075], [14947, 15076, 15075], [14947, 14948, 15077], [14947, 15077, 15076], [14948, 14949, 15077], [14949, 15078, 15077], [14949, 14950, 15079], [14949, 15079, 15078], [14950, 14951, 15079], [14951, 15080, 15079], [14951, 14952, 15081], [14951, 15081, 15080], [14952, 14953, 15081], [14953, 15082, 15081], [14953, 14954, 15083], [14953, 15083, 15082], [14954, 14955, 15083], [14955, 15084, 15083], [14955, 14956, 15085], [14955, 15085, 15084], [14956, 14957, 15085], [14957, 15086, 15085], [14957, 14958, 15087], [14957, 15087, 15086], [14958, 14959, 15087], [14959, 15088, 15087], [14959, 14960, 15089], [14959, 15089, 15088], [14960, 14961, 15089], [14961, 15090, 15089], [14961, 14962, 15091], [14961, 15091, 15090], [14962, 14963, 15091], [14963, 15092, 15091], [14963, 14964, 15093], [14963, 15093, 15092], [14964, 14965, 15093], [14965, 15094, 15093], [14965, 14966, 15095], [14965, 15095, 15094], [14966, 14967, 15095], [14967, 15096, 15095], [14967, 14968, 15097], [14967, 15097, 15096], [14968, 14969, 15097], [14969, 15098, 15097], [14969, 14970, 15099], [14969, 15099, 15098], [14970, 14971, 15099], [14971, 15100, 15099], [14971, 14972, 15101], [14971, 15101, 15100], [14972, 14973, 15101], [14973, 15102, 15101], [14973, 14974, 15103], [14973, 15103, 15102], [14974, 14975, 15103], [14975, 15104, 15103], [14975, 14976, 15105], [14975, 15105, 15104], [14976, 14977, 15105], [14977, 15106, 15105], [14977, 14978, 15107], [14977, 15107, 15106], [14978, 14979, 15107], [14979, 15108, 15107], [14979, 14980, 15109], [14979, 15109, 15108], [14980, 14981, 15109], [14981, 15110, 15109], [14981, 14982, 15111], [14981, 15111, 15110], [14982, 14983, 15111], [14983, 15112, 15111], [14983, 14984, 15113], [14983, 15113, 15112], [14984, 14985, 15113], [14985, 15114, 15113], [14985, 14986, 15115], [14985, 15115, 15114], [14986, 14987, 15115], [14987, 15116, 15115], [14987, 14988, 15117], [14987, 15117, 15116], [14988, 14989, 15117], [14989, 15118, 15117], [14989, 14990, 15119], [14989, 15119, 15118], [14990, 14991, 15119], [14991, 15120, 15119], [14991, 14992, 15121], [14991, 15121, 15120], [14992, 14993, 15121], [14993, 15122, 15121], [14993, 14994, 15123], [14993, 15123, 15122], [14994, 14995, 15123], [14995, 15124, 15123], [14995, 14996, 15125], [14995, 15125, 15124], [14996, 14997, 15125], [14997, 15126, 15125], [14997, 14998, 15127], [14997, 15127, 15126], [14998, 14999, 15127], [14999, 15128, 15127], [14999, 15000, 15129], [14999, 15129, 15128], [15000, 15001, 15129], [15001, 15130, 15129], [15001, 15002, 15131], [15001, 15131, 15130], [15002, 15003, 15131], [15003, 15132, 15131], [15003, 15004, 15133], [15003, 15133, 15132], [15004, 15005, 15133], [15005, 15134, 15133], [15005, 15006, 15135], [15005, 15135, 15134], [15006, 15007, 15135], [15007, 15136, 15135], [15007, 15008, 15137], [15007, 15137, 15136], [15008, 15009, 15137], [15009, 15138, 15137], [15010, 15011, 15139], [15011, 15140, 15139], [15011, 15012, 15141], [15011, 15141, 15140], [15012, 15013, 15141], [15013, 15142, 15141], [15013, 15014, 15143], [15013, 15143, 15142], [15014, 15015, 15143], [15015, 15144, 15143], [15015, 15016, 15145], [15015, 15145, 15144], [15016, 15017, 15145], [15017, 15146, 15145], [15017, 15018, 15147], [15017, 15147, 15146], [15018, 15019, 15147], [15019, 15148, 15147], [15019, 15020, 15149], [15019, 15149, 15148], [15020, 15021, 15149], [15021, 15150, 15149], [15021, 15022, 15151], [15021, 15151, 15150], [15022, 15023, 15151], [15023, 15152, 15151], [15023, 15024, 15153], [15023, 15153, 15152], [15024, 15025, 15153], [15025, 15154, 15153], [15025, 15026, 15155], [15025, 15155, 15154], [15026, 15027, 15155], [15027, 15156, 15155], [15027, 15028, 15157], [15027, 15157, 15156], [15028, 15029, 15157], [15029, 15158, 15157], [15029, 15030, 15159], [15029, 15159, 15158], [15030, 15031, 15159], [15031, 15160, 15159], [15031, 15032, 15161], [15031, 15161, 15160], [15032, 15033, 15161], [15033, 15162, 15161], [15033, 15034, 15163], [15033, 15163, 15162], [15034, 15035, 15163], [15035, 15164, 15163], [15035, 15036, 15165], [15035, 15165, 15164], [15036, 15037, 15165], [15037, 15166, 15165], [15037, 15038, 15167], [15037, 15167, 15166], [15038, 15039, 15167], [15039, 15168, 15167], [15039, 15040, 15169], [15039, 15169, 15168], [15040, 15041, 15169], [15041, 15170, 15169], [15041, 15042, 15171], [15041, 15171, 15170], [15042, 15043, 15171], [15043, 15172, 15171], [15043, 15044, 15173], [15043, 15173, 15172], [15044, 15045, 15173], [15045, 15174, 15173], [15045, 15046, 15175], [15045, 15175, 15174], [15046, 15047, 15175], [15047, 15176, 15175], [15047, 15048, 15177], [15047, 15177, 15176], [15048, 15049, 15177], [15049, 15178, 15177], [15049, 15050, 15179], [15049, 15179, 15178], [15050, 15051, 15179], [15051, 15180, 15179], [15051, 15052, 15181], [15051, 15181, 15180], [15052, 15053, 15181], [15053, 15182, 15181], [15053, 15054, 15183], [15053, 15183, 15182], [15054, 15055, 15183], [15055, 15184, 15183], [15055, 15056, 15185], [15055, 15185, 15184], [15056, 15057, 15185], [15057, 15186, 15185], [15057, 15058, 15187], [15057, 15187, 15186], [15058, 15059, 15187], [15059, 15188, 15187], [15059, 15060, 15189], [15059, 15189, 15188], [15060, 15061, 15189], [15061, 15190, 15189], [15061, 15062, 15191], [15061, 15191, 15190], [15062, 15063, 15191], [15063, 15192, 15191], [15063, 15064, 15193], [15063, 15193, 15192], [15064, 15065, 15193], [15065, 15194, 15193], [15065, 15066, 15195], [15065, 15195, 15194], [15066, 15067, 15195], [15067, 15196, 15195], [15067, 15068, 15197], [15067, 15197, 15196], [15068, 15069, 15197], [15069, 15198, 15197], [15069, 15070, 15199], [15069, 15199, 15198], [15070, 15071, 15199], [15071, 15200, 15199], [15071, 15072, 15201], [15071, 15201, 15200], [15072, 15073, 15201], [15073, 15202, 15201], [15073, 15074, 15203], [15073, 15203, 15202], [15074, 15075, 15203], [15075, 15204, 15203], [15075, 15076, 15205], [15075, 15205, 15204], [15076, 15077, 15205], [15077, 15206, 15205], [15077, 15078, 15207], [15077, 15207, 15206], [15078, 15079, 15207], [15079, 15208, 15207], [15079, 15080, 15209], [15079, 15209, 15208], [15080, 15081, 15209], [15081, 15210, 15209], [15081, 15082, 15211], [15081, 15211, 15210], [15082, 15083, 15211], [15083, 15212, 15211], [15083, 15084, 15213], [15083, 15213, 15212], [15084, 15085, 15213], [15085, 15214, 15213], [15085, 15086, 15215], [15085, 15215, 15214], [15086, 15087, 15215], [15087, 15216, 15215], [15087, 15088, 15217], [15087, 15217, 15216], [15088, 15089, 15217], [15089, 15218, 15217], [15089, 15090, 15219], [15089, 15219, 15218], [15090, 15091, 15219], [15091, 15220, 15219], [15091, 15092, 15221], [15091, 15221, 15220], [15092, 15093, 15221], [15093, 15222, 15221], [15093, 15094, 15223], [15093, 15223, 15222], [15094, 15095, 15223], [15095, 15224, 15223], [15095, 15096, 15225], [15095, 15225, 15224], [15096, 15097, 15225], [15097, 15226, 15225], [15097, 15098, 15227], [15097, 15227, 15226], [15098, 15099, 15227], [15099, 15228, 15227], [15099, 15100, 15229], [15099, 15229, 15228], [15100, 15101, 15229], [15101, 15230, 15229], [15101, 15102, 15231], [15101, 15231, 15230], [15102, 15103, 15231], [15103, 15232, 15231], [15103, 15104, 15233], [15103, 15233, 15232], [15104, 15105, 15233], [15105, 15234, 15233], [15105, 15106, 15235], [15105, 15235, 15234], [15106, 15107, 15235], [15107, 15236, 15235], [15107, 15108, 15237], [15107, 15237, 15236], [15108, 15109, 15237], [15109, 15238, 15237], [15109, 15110, 15239], [15109, 15239, 15238], [15110, 15111, 15239], [15111, 15240, 15239], [15111, 15112, 15241], [15111, 15241, 15240], [15112, 15113, 15241], [15113, 15242, 15241], [15113, 15114, 15243], [15113, 15243, 15242], [15114, 15115, 15243], [15115, 15244, 15243], [15115, 15116, 15245], [15115, 15245, 15244], [15116, 15117, 15245], [15117, 15246, 15245], [15117, 15118, 15247], [15117, 15247, 15246], [15118, 15119, 15247], [15119, 15248, 15247], [15119, 15120, 15249], [15119, 15249, 15248], [15120, 15121, 15249], [15121, 15250, 15249], [15121, 15122, 15251], [15121, 15251, 15250], [15122, 15123, 15251], [15123, 15252, 15251], [15123, 15124, 15253], [15123, 15253, 15252], [15124, 15125, 15253], [15125, 15254, 15253], [15125, 15126, 15255], [15125, 15255, 15254], [15126, 15127, 15255], [15127, 15256, 15255], [15127, 15128, 15257], [15127, 15257, 15256], [15128, 15129, 15257], [15129, 15258, 15257], [15129, 15130, 15259], [15129, 15259, 15258], [15130, 15131, 15259], [15131, 15260, 15259], [15131, 15132, 15261], [15131, 15261, 15260], [15132, 15133, 15261], [15133, 15262, 15261], [15133, 15134, 15263], [15133, 15263, 15262], [15134, 15135, 15263], [15135, 15264, 15263], [15135, 15136, 15265], [15135, 15265, 15264], [15136, 15137, 15265], [15137, 15266, 15265], [15137, 15138, 15267], [15137, 15267, 15266], [15139, 15140, 15269], [15139, 15269, 15268], [15140, 15141, 15269], [15141, 15270, 15269], [15141, 15142, 15271], [15141, 15271, 15270], [15142, 15143, 15271], [15143, 15272, 15271], [15143, 15144, 15273], [15143, 15273, 15272], [15144, 15145, 15273], [15145, 15274, 15273], [15145, 15146, 15275], [15145, 15275, 15274], [15146, 15147, 15275], [15147, 15276, 15275], [15147, 15148, 15277], [15147, 15277, 15276], [15148, 15149, 15277], [15149, 15278, 15277], [15149, 15150, 15279], [15149, 15279, 15278], [15150, 15151, 15279], [15151, 15280, 15279], [15151, 15152, 15281], [15151, 15281, 15280], [15152, 15153, 15281], [15153, 15282, 15281], [15153, 15154, 15283], [15153, 15283, 15282], [15154, 15155, 15283], [15155, 15284, 15283], [15155, 15156, 15285], [15155, 15285, 15284], [15156, 15157, 15285], [15157, 15286, 15285], [15157, 15158, 15287], [15157, 15287, 15286], [15158, 15159, 15287], [15159, 15288, 15287], [15159, 15160, 15289], [15159, 15289, 15288], [15160, 15161, 15289], [15161, 15290, 15289], [15161, 15162, 15291], [15161, 15291, 15290], [15162, 15163, 15291], [15163, 15292, 15291], [15163, 15164, 15293], [15163, 15293, 15292], [15164, 15165, 15293], [15165, 15294, 15293], [15165, 15166, 15295], [15165, 15295, 15294], [15166, 15167, 15295], [15167, 15296, 15295], [15167, 15168, 15297], [15167, 15297, 15296], [15168, 15169, 15297], [15169, 15298, 15297], [15169, 15170, 15299], [15169, 15299, 15298], [15170, 15171, 15299], [15171, 15300, 15299], [15171, 15172, 15301], [15171, 15301, 15300], [15172, 15173, 15301], [15173, 15302, 15301], [15173, 15174, 15303], [15173, 15303, 15302], [15174, 15175, 15303], [15175, 15304, 15303], [15175, 15176, 15305], [15175, 15305, 15304], [15176, 15177, 15305], [15177, 15306, 15305], [15177, 15178, 15307], [15177, 15307, 15306], [15178, 15179, 15307], [15179, 15308, 15307], [15179, 15180, 15309], [15179, 15309, 15308], [15180, 15181, 15309], [15181, 15310, 15309], [15181, 15182, 15311], [15181, 15311, 15310], [15182, 15183, 15311], [15183, 15312, 15311], [15183, 15184, 15313], [15183, 15313, 15312], [15184, 15185, 15313], [15185, 15314, 15313], [15185, 15186, 15315], [15185, 15315, 15314], [15186, 15187, 15315], [15187, 15316, 15315], [15187, 15188, 15317], [15187, 15317, 15316], [15188, 15189, 15317], [15189, 15318, 15317], [15189, 15190, 15319], [15189, 15319, 15318], [15190, 15191, 15319], [15191, 15320, 15319], [15191, 15192, 15321], [15191, 15321, 15320], [15192, 15193, 15321], [15193, 15322, 15321], [15193, 15194, 15323], [15193, 15323, 15322], [15194, 15195, 15323], [15195, 15324, 15323], [15195, 15196, 15325], [15195, 15325, 15324], [15196, 15197, 15325], [15197, 15326, 15325], [15197, 15198, 15327], [15197, 15327, 15326], [15198, 15199, 15327], [15199, 15328, 15327], [15199, 15200, 15329], [15199, 15329, 15328], [15200, 15201, 15329], [15201, 15330, 15329], [15201, 15202, 15331], [15201, 15331, 15330], [15202, 15203, 15331], [15203, 15332, 15331], [15203, 15204, 15333], [15203, 15333, 15332], [15204, 15205, 15333], [15205, 15334, 15333], [15205, 15206, 15335], [15205, 15335, 15334], [15206, 15207, 15335], [15207, 15336, 15335], [15207, 15208, 15337], [15207, 15337, 15336], [15208, 15209, 15337], [15209, 15338, 15337], [15209, 15210, 15339], [15209, 15339, 15338], [15210, 15211, 15339], [15211, 15340, 15339], [15211, 15212, 15341], [15211, 15341, 15340], [15212, 15213, 15341], [15213, 15342, 15341], [15213, 15214, 15343], [15213, 15343, 15342], [15214, 15215, 15343], [15215, 15344, 15343], [15215, 15216, 15345], [15215, 15345, 15344], [15216, 15217, 15345], [15217, 15346, 15345], [15217, 15218, 15347], [15217, 15347, 15346], [15218, 15219, 15347], [15219, 15348, 15347], [15219, 15220, 15349], [15219, 15349, 15348], [15220, 15221, 15349], [15221, 15350, 15349], [15221, 15222, 15351], [15221, 15351, 15350], [15222, 15223, 15351], [15223, 15352, 15351], [15223, 15224, 15353], [15223, 15353, 15352], [15224, 15225, 15353], [15225, 15354, 15353], [15225, 15226, 15355], [15225, 15355, 15354], [15226, 15227, 15355], [15227, 15356, 15355], [15227, 15228, 15357], [15227, 15357, 15356], [15228, 15229, 15357], [15229, 15358, 15357], [15229, 15230, 15359], [15229, 15359, 15358], [15230, 15231, 15359], [15231, 15360, 15359], [15231, 15232, 15361], [15231, 15361, 15360], [15232, 15233, 15361], [15233, 15362, 15361], [15233, 15234, 15363], [15233, 15363, 15362], [15234, 15235, 15363], [15235, 15364, 15363], [15235, 15236, 15365], [15235, 15365, 15364], [15236, 15237, 15365], [15237, 15366, 15365], [15237, 15238, 15367], [15237, 15367, 15366], [15238, 15239, 15367], [15239, 15368, 15367], [15239, 15240, 15369], [15239, 15369, 15368], [15240, 15241, 15369], [15241, 15370, 15369], [15241, 15242, 15371], [15241, 15371, 15370], [15242, 15243, 15371], [15243, 15372, 15371], [15243, 15244, 15373], [15243, 15373, 15372], [15244, 15245, 15373], [15245, 15374, 15373], [15245, 15246, 15375], [15245, 15375, 15374], [15246, 15247, 15375], [15247, 15376, 15375], [15247, 15248, 15377], [15247, 15377, 15376], [15248, 15249, 15377], [15249, 15378, 15377], [15249, 15250, 15379], [15249, 15379, 15378], [15250, 15251, 15379], [15251, 15380, 15379], [15251, 15252, 15381], [15251, 15381, 15380], [15252, 15253, 15381], [15253, 15382, 15381], [15253, 15254, 15383], [15253, 15383, 15382], [15254, 15255, 15383], [15255, 15384, 15383], [15255, 15256, 15385], [15255, 15385, 15384], [15256, 15257, 15385], [15257, 15386, 15385], [15257, 15258, 15387], [15257, 15387, 15386], [15258, 15259, 15387], [15259, 15388, 15387], [15259, 15260, 15389], [15259, 15389, 15388], [15260, 15261, 15389], [15261, 15390, 15389], [15261, 15262, 15391], [15261, 15391, 15390], [15262, 15263, 15391], [15263, 15392, 15391], [15263, 15264, 15393], [15263, 15393, 15392], [15264, 15265, 15393], [15265, 15394, 15393], [15265, 15266, 15395], [15265, 15395, 15394], [15266, 15267, 15395], [15267, 15396, 15395], [15268, 15269, 15397], [15269, 15398, 15397], [15269, 15270, 15399], [15269, 15399, 15398], [15270, 15271, 15399], [15271, 15400, 15399], [15271, 15272, 15401], [15271, 15401, 15400], [15272, 15273, 15401], [15273, 15402, 15401], [15273, 15274, 15403], [15273, 15403, 15402], [15274, 15275, 15403], [15275, 15404, 15403], [15275, 15276, 15405], [15275, 15405, 15404], [15276, 15277, 15405], [15277, 15406, 15405], [15277, 15278, 15407], [15277, 15407, 15406], [15278, 15279, 15407], [15279, 15408, 15407], [15279, 15280, 15409], [15279, 15409, 15408], [15280, 15281, 15409], [15281, 15410, 15409], [15281, 15282, 15411], [15281, 15411, 15410], [15282, 15283, 15411], [15283, 15412, 15411], [15283, 15284, 15413], [15283, 15413, 15412], [15284, 15285, 15413], [15285, 15414, 15413], [15285, 15286, 15415], [15285, 15415, 15414], [15286, 15287, 15415], [15287, 15416, 15415], [15287, 15288, 15417], [15287, 15417, 15416], [15288, 15289, 15417], [15289, 15418, 15417], [15289, 15290, 15419], [15289, 15419, 15418], [15290, 15291, 15419], [15291, 15420, 15419], [15291, 15292, 15421], [15291, 15421, 15420], [15292, 15293, 15421], [15293, 15422, 15421], [15293, 15294, 15423], [15293, 15423, 15422], [15294, 15295, 15423], [15295, 15424, 15423], [15295, 15296, 15425], [15295, 15425, 15424], [15296, 15297, 15425], [15297, 15426, 15425], [15297, 15298, 15427], [15297, 15427, 15426], [15298, 15299, 15427], [15299, 15428, 15427], [15299, 15300, 15429], [15299, 15429, 15428], [15300, 15301, 15429], [15301, 15430, 15429], [15301, 15302, 15431], [15301, 15431, 15430], [15302, 15303, 15431], [15303, 15432, 15431], [15303, 15304, 15433], [15303, 15433, 15432], [15304, 15305, 15433], [15305, 15434, 15433], [15305, 15306, 15435], [15305, 15435, 15434], [15306, 15307, 15435], [15307, 15436, 15435], [15307, 15308, 15437], [15307, 15437, 15436], [15308, 15309, 15437], [15309, 15438, 15437], [15309, 15310, 15439], [15309, 15439, 15438], [15310, 15311, 15439], [15311, 15440, 15439], [15311, 15312, 15441], [15311, 15441, 15440], [15312, 15313, 15441], [15313, 15442, 15441], [15313, 15314, 15443], [15313, 15443, 15442], [15314, 15315, 15443], [15315, 15444, 15443], [15315, 15316, 15445], [15315, 15445, 15444], [15316, 15317, 15445], [15317, 15446, 15445], [15317, 15318, 15447], [15317, 15447, 15446], [15318, 15319, 15447], [15319, 15448, 15447], [15319, 15320, 15449], [15319, 15449, 15448], [15320, 15321, 15449], [15321, 15450, 15449], [15321, 15322, 15451], [15321, 15451, 15450], [15322, 15323, 15451], [15323, 15452, 15451], [15323, 15324, 15453], [15323, 15453, 15452], [15324, 15325, 15453], [15325, 15454, 15453], [15325, 15326, 15455], [15325, 15455, 15454], [15326, 15327, 15455], [15327, 15456, 15455], [15327, 15328, 15457], [15327, 15457, 15456], [15328, 15329, 15457], [15329, 15458, 15457], [15329, 15330, 15459], [15329, 15459, 15458], [15330, 15331, 15459], [15331, 15460, 15459], [15331, 15332, 15461], [15331, 15461, 15460], [15332, 15333, 15461], [15333, 15462, 15461], [15333, 15334, 15463], [15333, 15463, 15462], [15334, 15335, 15463], [15335, 15464, 15463], [15335, 15336, 15465], [15335, 15465, 15464], [15336, 15337, 15465], [15337, 15466, 15465], [15337, 15338, 15467], [15337, 15467, 15466], [15338, 15339, 15467], [15339, 15468, 15467], [15339, 15340, 15469], [15339, 15469, 15468], [15340, 15341, 15469], [15341, 15470, 15469], [15341, 15342, 15471], [15341, 15471, 15470], [15342, 15343, 15471], [15343, 15472, 15471], [15343, 15344, 15473], [15343, 15473, 15472], [15344, 15345, 15473], [15345, 15474, 15473], [15345, 15346, 15475], [15345, 15475, 15474], [15346, 15347, 15475], [15347, 15476, 15475], [15347, 15348, 15477], [15347, 15477, 15476], [15348, 15349, 15477], [15349, 15478, 15477], [15349, 15350, 15479], [15349, 15479, 15478], [15350, 15351, 15479], [15351, 15480, 15479], [15351, 15352, 15481], [15351, 15481, 15480], [15352, 15353, 15481], [15353, 15482, 15481], [15353, 15354, 15483], [15353, 15483, 15482], [15354, 15355, 15483], [15355, 15484, 15483], [15355, 15356, 15485], [15355, 15485, 15484], [15356, 15357, 15485], [15357, 15486, 15485], [15357, 15358, 15487], [15357, 15487, 15486], [15358, 15359, 15487], [15359, 15488, 15487], [15359, 15360, 15489], [15359, 15489, 15488], [15360, 15361, 15489], [15361, 15490, 15489], [15361, 15362, 15491], [15361, 15491, 15490], [15362, 15363, 15491], [15363, 15492, 15491], [15363, 15364, 15493], [15363, 15493, 15492], [15364, 15365, 15493], [15365, 15494, 15493], [15365, 15366, 15495], [15365, 15495, 15494], [15366, 15367, 15495], [15367, 15496, 15495], [15367, 15368, 15497], [15367, 15497, 15496], [15368, 15369, 15497], [15369, 15498, 15497], [15369, 15370, 15499], [15369, 15499, 15498], [15370, 15371, 15499], [15371, 15500, 15499], [15371, 15372, 15501], [15371, 15501, 15500], [15372, 15373, 15501], [15373, 15502, 15501], [15373, 15374, 15503], [15373, 15503, 15502], [15374, 15375, 15503], [15375, 15504, 15503], [15375, 15376, 15505], [15375, 15505, 15504], [15376, 15377, 15505], [15377, 15506, 15505], [15377, 15378, 15507], [15377, 15507, 15506], [15378, 15379, 15507], [15379, 15508, 15507], [15379, 15380, 15509], [15379, 15509, 15508], [15380, 15381, 15509], [15381, 15510, 15509], [15381, 15382, 15511], [15381, 15511, 15510], [15382, 15383, 15511], [15383, 15512, 15511], [15383, 15384, 15513], [15383, 15513, 15512], [15384, 15385, 15513], [15385, 15514, 15513], [15385, 15386, 15515], [15385, 15515, 15514], [15386, 15387, 15515], [15387, 15516, 15515], [15387, 15388, 15517], [15387, 15517, 15516], [15388, 15389, 15517], [15389, 15518, 15517], [15389, 15390, 15519], [15389, 15519, 15518], [15390, 15391, 15519], [15391, 15520, 15519], [15391, 15392, 15521], [15391, 15521, 15520], [15392, 15393, 15521], [15393, 15522, 15521], [15393, 15394, 15523], [15393, 15523, 15522], [15394, 15395, 15523], [15395, 15524, 15523], [15395, 15396, 15525], [15395, 15525, 15524], [15397, 15398, 15527], [15397, 15527, 15526], [15398, 15399, 15527], [15399, 15528, 15527], [15399, 15400, 15529], [15399, 15529, 15528], [15400, 15401, 15529], [15401, 15530, 15529], [15401, 15402, 15531], [15401, 15531, 15530], [15402, 15403, 15531], [15403, 15532, 15531], [15403, 15404, 15533], [15403, 15533, 15532], [15404, 15405, 15533], [15405, 15534, 15533], [15405, 15406, 15535], [15405, 15535, 15534], [15406, 15407, 15535], [15407, 15536, 15535], [15407, 15408, 15537], [15407, 15537, 15536], [15408, 15409, 15537], [15409, 15538, 15537], [15409, 15410, 15539], [15409, 15539, 15538], [15410, 15411, 15539], [15411, 15540, 15539], [15411, 15412, 15541], [15411, 15541, 15540], [15412, 15413, 15541], [15413, 15542, 15541], [15413, 15414, 15543], [15413, 15543, 15542], [15414, 15415, 15543], [15415, 15544, 15543], [15415, 15416, 15545], [15415, 15545, 15544], [15416, 15417, 15545], [15417, 15546, 15545], [15417, 15418, 15547], [15417, 15547, 15546], [15418, 15419, 15547], [15419, 15548, 15547], [15419, 15420, 15549], [15419, 15549, 15548], [15420, 15421, 15549], [15421, 15550, 15549], [15421, 15422, 15551], [15421, 15551, 15550], [15422, 15423, 15551], [15423, 15552, 15551], [15423, 15424, 15553], [15423, 15553, 15552], [15424, 15425, 15553], [15425, 15554, 15553], [15425, 15426, 15555], [15425, 15555, 15554], [15426, 15427, 15555], [15427, 15556, 15555], [15427, 15428, 15557], [15427, 15557, 15556], [15428, 15429, 15557], [15429, 15558, 15557], [15429, 15430, 15559], [15429, 15559, 15558], [15430, 15431, 15559], [15431, 15560, 15559], [15431, 15432, 15561], [15431, 15561, 15560], [15432, 15433, 15561], [15433, 15562, 15561], [15433, 15434, 15563], [15433, 15563, 15562], [15434, 15435, 15563], [15435, 15564, 15563], [15435, 15436, 15565], [15435, 15565, 15564], [15436, 15437, 15565], [15437, 15566, 15565], [15437, 15438, 15567], [15437, 15567, 15566], [15438, 15439, 15567], [15439, 15568, 15567], [15439, 15440, 15569], [15439, 15569, 15568], [15440, 15441, 15569], [15441, 15570, 15569], [15441, 15442, 15571], [15441, 15571, 15570], [15442, 15443, 15571], [15443, 15572, 15571], [15443, 15444, 15573], [15443, 15573, 15572], [15444, 15445, 15573], [15445, 15574, 15573], [15445, 15446, 15575], [15445, 15575, 15574], [15446, 15447, 15575], [15447, 15576, 15575], [15447, 15448, 15577], [15447, 15577, 15576], [15448, 15449, 15577], [15449, 15578, 15577], [15449, 15450, 15579], [15449, 15579, 15578], [15450, 15451, 15579], [15451, 15580, 15579], [15451, 15452, 15581], [15451, 15581, 15580], [15452, 15453, 15581], [15453, 15582, 15581], [15453, 15454, 15583], [15453, 15583, 15582], [15454, 15455, 15583], [15455, 15584, 15583], [15455, 15456, 15585], [15455, 15585, 15584], [15456, 15457, 15585], [15457, 15586, 15585], [15457, 15458, 15587], [15457, 15587, 15586], [15458, 15459, 15587], [15459, 15588, 15587], [15459, 15460, 15589], [15459, 15589, 15588], [15460, 15461, 15589], [15461, 15590, 15589], [15461, 15462, 15591], [15461, 15591, 15590], [15462, 15463, 15591], [15463, 15592, 15591], [15463, 15464, 15593], [15463, 15593, 15592], [15464, 15465, 15593], [15465, 15594, 15593], [15465, 15466, 15595], [15465, 15595, 15594], [15466, 15467, 15595], [15467, 15596, 15595], [15467, 15468, 15597], [15467, 15597, 15596], [15468, 15469, 15597], [15469, 15598, 15597], [15469, 15470, 15599], [15469, 15599, 15598], [15470, 15471, 15599], [15471, 15600, 15599], [15471, 15472, 15601], [15471, 15601, 15600], [15472, 15473, 15601], [15473, 15602, 15601], [15473, 15474, 15603], [15473, 15603, 15602], [15474, 15475, 15603], [15475, 15604, 15603], [15475, 15476, 15605], [15475, 15605, 15604], [15476, 15477, 15605], [15477, 15606, 15605], [15477, 15478, 15607], [15477, 15607, 15606], [15478, 15479, 15607], [15479, 15608, 15607], [15479, 15480, 15609], [15479, 15609, 15608], [15480, 15481, 15609], [15481, 15610, 15609], [15481, 15482, 15611], [15481, 15611, 15610], [15482, 15483, 15611], [15483, 15612, 15611], [15483, 15484, 15613], [15483, 15613, 15612], [15484, 15485, 15613], [15485, 15614, 15613], [15485, 15486, 15615], [15485, 15615, 15614], [15486, 15487, 15615], [15487, 15616, 15615], [15487, 15488, 15617], [15487, 15617, 15616], [15488, 15489, 15617], [15489, 15618, 15617], [15489, 15490, 15619], [15489, 15619, 15618], [15490, 15491, 15619], [15491, 15620, 15619], [15491, 15492, 15621], [15491, 15621, 15620], [15492, 15493, 15621], [15493, 15622, 15621], [15493, 15494, 15623], [15493, 15623, 15622], [15494, 15495, 15623], [15495, 15624, 15623], [15495, 15496, 15625], [15495, 15625, 15624], [15496, 15497, 15625], [15497, 15626, 15625], [15497, 15498, 15627], [15497, 15627, 15626], [15498, 15499, 15627], [15499, 15628, 15627], [15499, 15500, 15629], [15499, 15629, 15628], [15500, 15501, 15629], [15501, 15630, 15629], [15501, 15502, 15631], [15501, 15631, 15630], [15502, 15503, 15631], [15503, 15632, 15631], [15503, 15504, 15633], [15503, 15633, 15632], [15504, 15505, 15633], [15505, 15634, 15633], [15505, 15506, 15635], [15505, 15635, 15634], [15506, 15507, 15635], [15507, 15636, 15635], [15507, 15508, 15637], [15507, 15637, 15636], [15508, 15509, 15637], [15509, 15638, 15637], [15509, 15510, 15639], [15509, 15639, 15638], [15510, 15511, 15639], [15511, 15640, 15639], [15511, 15512, 15641], [15511, 15641, 15640], [15512, 15513, 15641], [15513, 15642, 15641], [15513, 15514, 15643], [15513, 15643, 15642], [15514, 15515, 15643], [15515, 15644, 15643], [15515, 15516, 15645], [15515, 15645, 15644], [15516, 15517, 15645], [15517, 15646, 15645], [15517, 15518, 15647], [15517, 15647, 15646], [15518, 15519, 15647], [15519, 15648, 15647], [15519, 15520, 15649], [15519, 15649, 15648], [15520, 15521, 15649], [15521, 15650, 15649], [15521, 15522, 15651], [15521, 15651, 15650], [15522, 15523, 15651], [15523, 15652, 15651], [15523, 15524, 15653], [15523, 15653, 15652], [15524, 15525, 15653], [15525, 15654, 15653], [15526, 15527, 15655], [15527, 15656, 15655], [15527, 15528, 15657], [15527, 15657, 15656], [15528, 15529, 15657], [15529, 15658, 15657], [15529, 15530, 15659], [15529, 15659, 15658], [15530, 15531, 15659], [15531, 15660, 15659], [15531, 15532, 15661], [15531, 15661, 15660], [15532, 15533, 15661], [15533, 15662, 15661], [15533, 15534, 15663], [15533, 15663, 15662], [15534, 15535, 15663], [15535, 15664, 15663], [15535, 15536, 15665], [15535, 15665, 15664], [15536, 15537, 15665], [15537, 15666, 15665], [15537, 15538, 15667], [15537, 15667, 15666], [15538, 15539, 15667], [15539, 15668, 15667], [15539, 15540, 15669], [15539, 15669, 15668], [15540, 15541, 15669], [15541, 15670, 15669], [15541, 15542, 15671], [15541, 15671, 15670], [15542, 15543, 15671], [15543, 15672, 15671], [15543, 15544, 15673], [15543, 15673, 15672], [15544, 15545, 15673], [15545, 15674, 15673], [15545, 15546, 15675], [15545, 15675, 15674], [15546, 15547, 15675], [15547, 15676, 15675], [15547, 15548, 15677], [15547, 15677, 15676], [15548, 15549, 15677], [15549, 15678, 15677], [15549, 15550, 15679], [15549, 15679, 15678], [15550, 15551, 15679], [15551, 15680, 15679], [15551, 15552, 15681], [15551, 15681, 15680], [15552, 15553, 15681], [15553, 15682, 15681], [15553, 15554, 15683], [15553, 15683, 15682], [15554, 15555, 15683], [15555, 15684, 15683], [15555, 15556, 15685], [15555, 15685, 15684], [15556, 15557, 15685], [15557, 15686, 15685], [15557, 15558, 15687], [15557, 15687, 15686], [15558, 15559, 15687], [15559, 15688, 15687], [15559, 15560, 15689], [15559, 15689, 15688], [15560, 15561, 15689], [15561, 15690, 15689], [15561, 15562, 15691], [15561, 15691, 15690], [15562, 15563, 15691], [15563, 15692, 15691], [15563, 15564, 15693], [15563, 15693, 15692], [15564, 15565, 15693], [15565, 15694, 15693], [15565, 15566, 15695], [15565, 15695, 15694], [15566, 15567, 15695], [15567, 15696, 15695], [15567, 15568, 15697], [15567, 15697, 15696], [15568, 15569, 15697], [15569, 15698, 15697], [15569, 15570, 15699], [15569, 15699, 15698], [15570, 15571, 15699], [15571, 15700, 15699], [15571, 15572, 15701], [15571, 15701, 15700], [15572, 15573, 15701], [15573, 15702, 15701], [15573, 15574, 15703], [15573, 15703, 15702], [15574, 15575, 15703], [15575, 15704, 15703], [15575, 15576, 15705], [15575, 15705, 15704], [15576, 15577, 15705], [15577, 15706, 15705], [15577, 15578, 15707], [15577, 15707, 15706], [15578, 15579, 15707], [15579, 15708, 15707], [15579, 15580, 15709], [15579, 15709, 15708], [15580, 15581, 15709], [15581, 15710, 15709], [15581, 15582, 15711], [15581, 15711, 15710], [15582, 15583, 15711], [15583, 15712, 15711], [15583, 15584, 15713], [15583, 15713, 15712], [15584, 15585, 15713], [15585, 15714, 15713], [15585, 15586, 15715], [15585, 15715, 15714], [15586, 15587, 15715], [15587, 15716, 15715], [15587, 15588, 15717], [15587, 15717, 15716], [15588, 15589, 15717], [15589, 15718, 15717], [15589, 15590, 15719], [15589, 15719, 15718], [15590, 15591, 15719], [15591, 15720, 15719], [15591, 15592, 15721], [15591, 15721, 15720], [15592, 15593, 15721], [15593, 15722, 15721], [15593, 15594, 15723], [15593, 15723, 15722], [15594, 15595, 15723], [15595, 15724, 15723], [15595, 15596, 15725], [15595, 15725, 15724], [15596, 15597, 15725], [15597, 15726, 15725], [15597, 15598, 15727], [15597, 15727, 15726], [15598, 15599, 15727], [15599, 15728, 15727], [15599, 15600, 15729], [15599, 15729, 15728], [15600, 15601, 15729], [15601, 15730, 15729], [15601, 15602, 15731], [15601, 15731, 15730], [15602, 15603, 15731], [15603, 15732, 15731], [15603, 15604, 15733], [15603, 15733, 15732], [15604, 15605, 15733], [15605, 15734, 15733], [15605, 15606, 15735], [15605, 15735, 15734], [15606, 15607, 15735], [15607, 15736, 15735], [15607, 15608, 15737], [15607, 15737, 15736], [15608, 15609, 15737], [15609, 15738, 15737], [15609, 15610, 15739], [15609, 15739, 15738], [15610, 15611, 15739], [15611, 15740, 15739], [15611, 15612, 15741], [15611, 15741, 15740], [15612, 15613, 15741], [15613, 15742, 15741], [15613, 15614, 15743], [15613, 15743, 15742], [15614, 15615, 15743], [15615, 15744, 15743], [15615, 15616, 15745], [15615, 15745, 15744], [15616, 15617, 15745], [15617, 15746, 15745], [15617, 15618, 15747], [15617, 15747, 15746], [15618, 15619, 15747], [15619, 15748, 15747], [15619, 15620, 15749], [15619, 15749, 15748], [15620, 15621, 15749], [15621, 15750, 15749], [15621, 15622, 15751], [15621, 15751, 15750], [15622, 15623, 15751], [15623, 15752, 15751], [15623, 15624, 15753], [15623, 15753, 15752], [15624, 15625, 15753], [15625, 15754, 15753], [15625, 15626, 15755], [15625, 15755, 15754], [15626, 15627, 15755], [15627, 15756, 15755], [15627, 15628, 15757], [15627, 15757, 15756], [15628, 15629, 15757], [15629, 15758, 15757], [15629, 15630, 15759], [15629, 15759, 15758], [15630, 15631, 15759], [15631, 15760, 15759], [15631, 15632, 15761], [15631, 15761, 15760], [15632, 15633, 15761], [15633, 15762, 15761], [15633, 15634, 15763], [15633, 15763, 15762], [15634, 15635, 15763], [15635, 15764, 15763], [15635, 15636, 15765], [15635, 15765, 15764], [15636, 15637, 15765], [15637, 15766, 15765], [15637, 15638, 15767], [15637, 15767, 15766], [15638, 15639, 15767], [15639, 15768, 15767], [15639, 15640, 15769], [15639, 15769, 15768], [15640, 15641, 15769], [15641, 15770, 15769], [15641, 15642, 15771], [15641, 15771, 15770], [15642, 15643, 15771], [15643, 15772, 15771], [15643, 15644, 15773], [15643, 15773, 15772], [15644, 15645, 15773], [15645, 15774, 15773], [15645, 15646, 15775], [15645, 15775, 15774], [15646, 15647, 15775], [15647, 15776, 15775], [15647, 15648, 15777], [15647, 15777, 15776], [15648, 15649, 15777], [15649, 15778, 15777], [15649, 15650, 15779], [15649, 15779, 15778], [15650, 15651, 15779], [15651, 15780, 15779], [15651, 15652, 15781], [15651, 15781, 15780], [15652, 15653, 15781], [15653, 15782, 15781], [15653, 15654, 15783], [15653, 15783, 15782], [15655, 15656, 15785], [15655, 15785, 15784], [15656, 15657, 15785], [15657, 15786, 15785], [15657, 15658, 15787], [15657, 15787, 15786], [15658, 15659, 15787], [15659, 15788, 15787], [15659, 15660, 15789], [15659, 15789, 15788], [15660, 15661, 15789], [15661, 15790, 15789], [15661, 15662, 15791], [15661, 15791, 15790], [15662, 15663, 15791], [15663, 15792, 15791], [15663, 15664, 15793], [15663, 15793, 15792], [15664, 15665, 15793], [15665, 15794, 15793], [15665, 15666, 15795], [15665, 15795, 15794], [15666, 15667, 15795], [15667, 15796, 15795], [15667, 15668, 15797], [15667, 15797, 15796], [15668, 15669, 15797], [15669, 15798, 15797], [15669, 15670, 15799], [15669, 15799, 15798], [15670, 15671, 15799], [15671, 15800, 15799], [15671, 15672, 15801], [15671, 15801, 15800], [15672, 15673, 15801], [15673, 15802, 15801], [15673, 15674, 15803], [15673, 15803, 15802], [15674, 15675, 15803], [15675, 15804, 15803], [15675, 15676, 15805], [15675, 15805, 15804], [15676, 15677, 15805], [15677, 15806, 15805], [15677, 15678, 15807], [15677, 15807, 15806], [15678, 15679, 15807], [15679, 15808, 15807], [15679, 15680, 15809], [15679, 15809, 15808], [15680, 15681, 15809], [15681, 15810, 15809], [15681, 15682, 15811], [15681, 15811, 15810], [15682, 15683, 15811], [15683, 15812, 15811], [15683, 15684, 15813], [15683, 15813, 15812], [15684, 15685, 15813], [15685, 15814, 15813], [15685, 15686, 15815], [15685, 15815, 15814], [15686, 15687, 15815], [15687, 15816, 15815], [15687, 15688, 15817], [15687, 15817, 15816], [15688, 15689, 15817], [15689, 15818, 15817], [15689, 15690, 15819], [15689, 15819, 15818], [15690, 15691, 15819], [15691, 15820, 15819], [15691, 15692, 15821], [15691, 15821, 15820], [15692, 15693, 15821], [15693, 15822, 15821], [15693, 15694, 15823], [15693, 15823, 15822], [15694, 15695, 15823], [15695, 15824, 15823], [15695, 15696, 15825], [15695, 15825, 15824], [15696, 15697, 15825], [15697, 15826, 15825], [15697, 15698, 15827], [15697, 15827, 15826], [15698, 15699, 15827], [15699, 15828, 15827], [15699, 15700, 15829], [15699, 15829, 15828], [15700, 15701, 15829], [15701, 15830, 15829], [15701, 15702, 15831], [15701, 15831, 15830], [15702, 15703, 15831], [15703, 15832, 15831], [15703, 15704, 15833], [15703, 15833, 15832], [15704, 15705, 15833], [15705, 15834, 15833], [15705, 15706, 15835], [15705, 15835, 15834], [15706, 15707, 15835], [15707, 15836, 15835], [15707, 15708, 15837], [15707, 15837, 15836], [15708, 15709, 15837], [15709, 15838, 15837], [15709, 15710, 15839], [15709, 15839, 15838], [15710, 15711, 15839], [15711, 15840, 15839], [15711, 15712, 15841], [15711, 15841, 15840], [15712, 15713, 15841], [15713, 15842, 15841], [15713, 15714, 15843], [15713, 15843, 15842], [15714, 15715, 15843], [15715, 15844, 15843], [15715, 15716, 15845], [15715, 15845, 15844], [15716, 15717, 15845], [15717, 15846, 15845], [15717, 15718, 15847], [15717, 15847, 15846], [15718, 15719, 15847], [15719, 15848, 15847], [15719, 15720, 15849], [15719, 15849, 15848], [15720, 15721, 15849], [15721, 15850, 15849], [15721, 15722, 15851], [15721, 15851, 15850], [15722, 15723, 15851], [15723, 15852, 15851], [15723, 15724, 15853], [15723, 15853, 15852], [15724, 15725, 15853], [15725, 15854, 15853], [15725, 15726, 15855], [15725, 15855, 15854], [15726, 15727, 15855], [15727, 15856, 15855], [15727, 15728, 15857], [15727, 15857, 15856], [15728, 15729, 15857], [15729, 15858, 15857], [15729, 15730, 15859], [15729, 15859, 15858], [15730, 15731, 15859], [15731, 15860, 15859], [15731, 15732, 15861], [15731, 15861, 15860], [15732, 15733, 15861], [15733, 15862, 15861], [15733, 15734, 15863], [15733, 15863, 15862], [15734, 15735, 15863], [15735, 15864, 15863], [15735, 15736, 15865], [15735, 15865, 15864], [15736, 15737, 15865], [15737, 15866, 15865], [15737, 15738, 15867], [15737, 15867, 15866], [15738, 15739, 15867], [15739, 15868, 15867], [15739, 15740, 15869], [15739, 15869, 15868], [15740, 15741, 15869], [15741, 15870, 15869], [15741, 15742, 15871], [15741, 15871, 15870], [15742, 15743, 15871], [15743, 15872, 15871], [15743, 15744, 15873], [15743, 15873, 15872], [15744, 15745, 15873], [15745, 15874, 15873], [15745, 15746, 15875], [15745, 15875, 15874], [15746, 15747, 15875], [15747, 15876, 15875], [15747, 15748, 15877], [15747, 15877, 15876], [15748, 15749, 15877], [15749, 15878, 15877], [15749, 15750, 15879], [15749, 15879, 15878], [15750, 15751, 15879], [15751, 15880, 15879], [15751, 15752, 15881], [15751, 15881, 15880], [15752, 15753, 15881], [15753, 15882, 15881], [15753, 15754, 15883], [15753, 15883, 15882], [15754, 15755, 15883], [15755, 15884, 15883], [15755, 15756, 15885], [15755, 15885, 15884], [15756, 15757, 15885], [15757, 15886, 15885], [15757, 15758, 15887], [15757, 15887, 15886], [15758, 15759, 15887], [15759, 15888, 15887], [15759, 15760, 15889], [15759, 15889, 15888], [15760, 15761, 15889], [15761, 15890, 15889], [15761, 15762, 15891], [15761, 15891, 15890], [15762, 15763, 15891], [15763, 15892, 15891], [15763, 15764, 15893], [15763, 15893, 15892], [15764, 15765, 15893], [15765, 15894, 15893], [15765, 15766, 15895], [15765, 15895, 15894], [15766, 15767, 15895], [15767, 15896, 15895], [15767, 15768, 15897], [15767, 15897, 15896], [15768, 15769, 15897], [15769, 15898, 15897], [15769, 15770, 15899], [15769, 15899, 15898], [15770, 15771, 15899], [15771, 15900, 15899], [15771, 15772, 15901], [15771, 15901, 15900], [15772, 15773, 15901], [15773, 15902, 15901], [15773, 15774, 15903], [15773, 15903, 15902], [15774, 15775, 15903], [15775, 15904, 15903], [15775, 15776, 15905], [15775, 15905, 15904], [15776, 15777, 15905], [15777, 15906, 15905], [15777, 15778, 15907], [15777, 15907, 15906], [15778, 15779, 15907], [15779, 15908, 15907], [15779, 15780, 15909], [15779, 15909, 15908], [15780, 15781, 15909], [15781, 15910, 15909], [15781, 15782, 15911], [15781, 15911, 15910], [15782, 15783, 15911], [15783, 15912, 15911], [15784, 15785, 15913], [15785, 15914, 15913], [15785, 15786, 15915], [15785, 15915, 15914], [15786, 15787, 15915], [15787, 15916, 15915], [15787, 15788, 15917], [15787, 15917, 15916], [15788, 15789, 15917], [15789, 15918, 15917], [15789, 15790, 15919], [15789, 15919, 15918], [15790, 15791, 15919], [15791, 15920, 15919], [15791, 15792, 15921], [15791, 15921, 15920], [15792, 15793, 15921], [15793, 15922, 15921], [15793, 15794, 15923], [15793, 15923, 15922], [15794, 15795, 15923], [15795, 15924, 15923], [15795, 15796, 15925], [15795, 15925, 15924], [15796, 15797, 15925], [15797, 15926, 15925], [15797, 15798, 15927], [15797, 15927, 15926], [15798, 15799, 15927], [15799, 15928, 15927], [15799, 15800, 15929], [15799, 15929, 15928], [15800, 15801, 15929], [15801, 15930, 15929], [15801, 15802, 15931], [15801, 15931, 15930], [15802, 15803, 15931], [15803, 15932, 15931], [15803, 15804, 15933], [15803, 15933, 15932], [15804, 15805, 15933], [15805, 15934, 15933], [15805, 15806, 15935], [15805, 15935, 15934], [15806, 15807, 15935], [15807, 15936, 15935], [15807, 15808, 15937], [15807, 15937, 15936], [15808, 15809, 15937], [15809, 15938, 15937], [15809, 15810, 15939], [15809, 15939, 15938], [15810, 15811, 15939], [15811, 15940, 15939], [15811, 15812, 15941], [15811, 15941, 15940], [15812, 15813, 15941], [15813, 15942, 15941], [15813, 15814, 15943], [15813, 15943, 15942], [15814, 15815, 15943], [15815, 15944, 15943], [15815, 15816, 15945], [15815, 15945, 15944], [15816, 15817, 15945], [15817, 15946, 15945], [15817, 15818, 15947], [15817, 15947, 15946], [15818, 15819, 15947], [15819, 15948, 15947], [15819, 15820, 15949], [15819, 15949, 15948], [15820, 15821, 15949], [15821, 15950, 15949], [15821, 15822, 15951], [15821, 15951, 15950], [15822, 15823, 15951], [15823, 15952, 15951], [15823, 15824, 15953], [15823, 15953, 15952], [15824, 15825, 15953], [15825, 15954, 15953], [15825, 15826, 15955], [15825, 15955, 15954], [15826, 15827, 15955], [15827, 15956, 15955], [15827, 15828, 15957], [15827, 15957, 15956], [15828, 15829, 15957], [15829, 15958, 15957], [15829, 15830, 15959], [15829, 15959, 15958], [15830, 15831, 15959], [15831, 15960, 15959], [15831, 15832, 15961], [15831, 15961, 15960], [15832, 15833, 15961], [15833, 15962, 15961], [15833, 15834, 15963], [15833, 15963, 15962], [15834, 15835, 15963], [15835, 15964, 15963], [15835, 15836, 15965], [15835, 15965, 15964], [15836, 15837, 15965], [15837, 15966, 15965], [15837, 15838, 15967], [15837, 15967, 15966], [15838, 15839, 15967], [15839, 15968, 15967], [15839, 15840, 15969], [15839, 15969, 15968], [15840, 15841, 15969], [15841, 15970, 15969], [15841, 15842, 15971], [15841, 15971, 15970], [15842, 15843, 15971], [15843, 15972, 15971], [15843, 15844, 15973], [15843, 15973, 15972], [15844, 15845, 15973], [15845, 15974, 15973], [15845, 15846, 15975], [15845, 15975, 15974], [15846, 15847, 15975], [15847, 15976, 15975], [15847, 15848, 15977], [15847, 15977, 15976], [15848, 15849, 15977], [15849, 15978, 15977], [15849, 15850, 15979], [15849, 15979, 15978], [15850, 15851, 15979], [15851, 15980, 15979], [15851, 15852, 15981], [15851, 15981, 15980], [15852, 15853, 15981], [15853, 15982, 15981], [15853, 15854, 15983], [15853, 15983, 15982], [15854, 15855, 15983], [15855, 15984, 15983], [15855, 15856, 15985], [15855, 15985, 15984], [15856, 15857, 15985], [15857, 15986, 15985], [15857, 15858, 15987], [15857, 15987, 15986], [15858, 15859, 15987], [15859, 15988, 15987], [15859, 15860, 15989], [15859, 15989, 15988], [15860, 15861, 15989], [15861, 15990, 15989], [15861, 15862, 15991], [15861, 15991, 15990], [15862, 15863, 15991], [15863, 15992, 15991], [15863, 15864, 15993], [15863, 15993, 15992], [15864, 15865, 15993], [15865, 15994, 15993], [15865, 15866, 15995], [15865, 15995, 15994], [15866, 15867, 15995], [15867, 15996, 15995], [15867, 15868, 15997], [15867, 15997, 15996], [15868, 15869, 15997], [15869, 15998, 15997], [15869, 15870, 15999], [15869, 15999, 15998], [15870, 15871, 15999], [15871, 16000, 15999], [15871, 15872, 16001], [15871, 16001, 16000], [15872, 15873, 16001], [15873, 16002, 16001], [15873, 15874, 16003], [15873, 16003, 16002], [15874, 15875, 16003], [15875, 16004, 16003], [15875, 15876, 16005], [15875, 16005, 16004], [15876, 15877, 16005], [15877, 16006, 16005], [15877, 15878, 16007], [15877, 16007, 16006], [15878, 15879, 16007], [15879, 16008, 16007], [15879, 15880, 16009], [15879, 16009, 16008], [15880, 15881, 16009], [15881, 16010, 16009], [15881, 15882, 16011], [15881, 16011, 16010], [15882, 15883, 16011], [15883, 16012, 16011], [15883, 15884, 16013], [15883, 16013, 16012], [15884, 15885, 16013], [15885, 16014, 16013], [15885, 15886, 16015], [15885, 16015, 16014], [15886, 15887, 16015], [15887, 16016, 16015], [15887, 15888, 16017], [15887, 16017, 16016], [15888, 15889, 16017], [15889, 16018, 16017], [15889, 15890, 16019], [15889, 16019, 16018], [15890, 15891, 16019], [15891, 16020, 16019], [15891, 15892, 16021], [15891, 16021, 16020], [15892, 15893, 16021], [15893, 16022, 16021], [15893, 15894, 16023], [15893, 16023, 16022], [15894, 15895, 16023], [15895, 16024, 16023], [15895, 15896, 16025], [15895, 16025, 16024], [15896, 15897, 16025], [15897, 16026, 16025], [15897, 15898, 16027], [15897, 16027, 16026], [15898, 15899, 16027], [15899, 16028, 16027], [15899, 15900, 16029], [15899, 16029, 16028], [15900, 15901, 16029], [15901, 16030, 16029], [15901, 15902, 16031], [15901, 16031, 16030], [15902, 15903, 16031], [15903, 16032, 16031], [15903, 15904, 16033], [15903, 16033, 16032], [15904, 15905, 16033], [15905, 16034, 16033], [15905, 15906, 16035], [15905, 16035, 16034], [15906, 15907, 16035], [15907, 16036, 16035], [15907, 15908, 16037], [15907, 16037, 16036], [15908, 15909, 16037], [15909, 16038, 16037], [15909, 15910, 16039], [15909, 16039, 16038], [15910, 15911, 16039], [15911, 16040, 16039], [15911, 15912, 16041], [15911, 16041, 16040], [15913, 15914, 16043], [15913, 16043, 16042], [15914, 15915, 16043], [15915, 16044, 16043], [15915, 15916, 16045], [15915, 16045, 16044], [15916, 15917, 16045], [15917, 16046, 16045], [15917, 15918, 16047], [15917, 16047, 16046], [15918, 15919, 16047], [15919, 16048, 16047], [15919, 15920, 16049], [15919, 16049, 16048], [15920, 15921, 16049], [15921, 16050, 16049], [15921, 15922, 16051], [15921, 16051, 16050], [15922, 15923, 16051], [15923, 16052, 16051], [15923, 15924, 16053], [15923, 16053, 16052], [15924, 15925, 16053], [15925, 16054, 16053], [15925, 15926, 16055], [15925, 16055, 16054], [15926, 15927, 16055], [15927, 16056, 16055], [15927, 15928, 16057], [15927, 16057, 16056], [15928, 15929, 16057], [15929, 16058, 16057], [15929, 15930, 16059], [15929, 16059, 16058], [15930, 15931, 16059], [15931, 16060, 16059], [15931, 15932, 16061], [15931, 16061, 16060], [15932, 15933, 16061], [15933, 16062, 16061], [15933, 15934, 16063], [15933, 16063, 16062], [15934, 15935, 16063], [15935, 16064, 16063], [15935, 15936, 16065], [15935, 16065, 16064], [15936, 15937, 16065], [15937, 16066, 16065], [15937, 15938, 16067], [15937, 16067, 16066], [15938, 15939, 16067], [15939, 16068, 16067], [15939, 15940, 16069], [15939, 16069, 16068], [15940, 15941, 16069], [15941, 16070, 16069], [15941, 15942, 16071], [15941, 16071, 16070], [15942, 15943, 16071], [15943, 16072, 16071], [15943, 15944, 16073], [15943, 16073, 16072], [15944, 15945, 16073], [15945, 16074, 16073], [15945, 15946, 16075], [15945, 16075, 16074], [15946, 15947, 16075], [15947, 16076, 16075], [15947, 15948, 16077], [15947, 16077, 16076], [15948, 15949, 16077], [15949, 16078, 16077], [15949, 15950, 16079], [15949, 16079, 16078], [15950, 15951, 16079], [15951, 16080, 16079], [15951, 15952, 16081], [15951, 16081, 16080], [15952, 15953, 16081], [15953, 16082, 16081], [15953, 15954, 16083], [15953, 16083, 16082], [15954, 15955, 16083], [15955, 16084, 16083], [15955, 15956, 16085], [15955, 16085, 16084], [15956, 15957, 16085], [15957, 16086, 16085], [15957, 15958, 16087], [15957, 16087, 16086], [15958, 15959, 16087], [15959, 16088, 16087], [15959, 15960, 16089], [15959, 16089, 16088], [15960, 15961, 16089], [15961, 16090, 16089], [15961, 15962, 16091], [15961, 16091, 16090], [15962, 15963, 16091], [15963, 16092, 16091], [15963, 15964, 16093], [15963, 16093, 16092], [15964, 15965, 16093], [15965, 16094, 16093], [15965, 15966, 16095], [15965, 16095, 16094], [15966, 15967, 16095], [15967, 16096, 16095], [15967, 15968, 16097], [15967, 16097, 16096], [15968, 15969, 16097], [15969, 16098, 16097], [15969, 15970, 16099], [15969, 16099, 16098], [15970, 15971, 16099], [15971, 16100, 16099], [15971, 15972, 16101], [15971, 16101, 16100], [15972, 15973, 16101], [15973, 16102, 16101], [15973, 15974, 16103], [15973, 16103, 16102], [15974, 15975, 16103], [15975, 16104, 16103], [15975, 15976, 16105], [15975, 16105, 16104], [15976, 15977, 16105], [15977, 16106, 16105], [15977, 15978, 16107], [15977, 16107, 16106], [15978, 15979, 16107], [15979, 16108, 16107], [15979, 15980, 16109], [15979, 16109, 16108], [15980, 15981, 16109], [15981, 16110, 16109], [15981, 15982, 16111], [15981, 16111, 16110], [15982, 15983, 16111], [15983, 16112, 16111], [15983, 15984, 16113], [15983, 16113, 16112], [15984, 15985, 16113], [15985, 16114, 16113], [15985, 15986, 16115], [15985, 16115, 16114], [15986, 15987, 16115], [15987, 16116, 16115], [15987, 15988, 16117], [15987, 16117, 16116], [15988, 15989, 16117], [15989, 16118, 16117], [15989, 15990, 16119], [15989, 16119, 16118], [15990, 15991, 16119], [15991, 16120, 16119], [15991, 15992, 16121], [15991, 16121, 16120], [15992, 15993, 16121], [15993, 16122, 16121], [15993, 15994, 16123], [15993, 16123, 16122], [15994, 15995, 16123], [15995, 16124, 16123], [15995, 15996, 16125], [15995, 16125, 16124], [15996, 15997, 16125], [15997, 16126, 16125], [15997, 15998, 16127], [15997, 16127, 16126], [15998, 15999, 16127], [15999, 16128, 16127], [15999, 16000, 16129], [15999, 16129, 16128], [16000, 16001, 16129], [16001, 16130, 16129], [16001, 16002, 16131], [16001, 16131, 16130], [16002, 16003, 16131], [16003, 16132, 16131], [16003, 16004, 16133], [16003, 16133, 16132], [16004, 16005, 16133], [16005, 16134, 16133], [16005, 16006, 16135], [16005, 16135, 16134], [16006, 16007, 16135], [16007, 16136, 16135], [16007, 16008, 16137], [16007, 16137, 16136], [16008, 16009, 16137], [16009, 16138, 16137], [16009, 16010, 16139], [16009, 16139, 16138], [16010, 16011, 16139], [16011, 16140, 16139], [16011, 16012, 16141], [16011, 16141, 16140], [16012, 16013, 16141], [16013, 16142, 16141], [16013, 16014, 16143], [16013, 16143, 16142], [16014, 16015, 16143], [16015, 16144, 16143], [16015, 16016, 16145], [16015, 16145, 16144], [16016, 16017, 16145], [16017, 16146, 16145], [16017, 16018, 16147], [16017, 16147, 16146], [16018, 16019, 16147], [16019, 16148, 16147], [16019, 16020, 16149], [16019, 16149, 16148], [16020, 16021, 16149], [16021, 16150, 16149], [16021, 16022, 16151], [16021, 16151, 16150], [16022, 16023, 16151], [16023, 16152, 16151], [16023, 16024, 16153], [16023, 16153, 16152], [16024, 16025, 16153], [16025, 16154, 16153], [16025, 16026, 16155], [16025, 16155, 16154], [16026, 16027, 16155], [16027, 16156, 16155], [16027, 16028, 16157], [16027, 16157, 16156], [16028, 16029, 16157], [16029, 16158, 16157], [16029, 16030, 16159], [16029, 16159, 16158], [16030, 16031, 16159], [16031, 16160, 16159], [16031, 16032, 16161], [16031, 16161, 16160], [16032, 16033, 16161], [16033, 16162, 16161], [16033, 16034, 16163], [16033, 16163, 16162], [16034, 16035, 16163], [16035, 16164, 16163], [16035, 16036, 16165], [16035, 16165, 16164], [16036, 16037, 16165], [16037, 16166, 16165], [16037, 16038, 16167], [16037, 16167, 16166], [16038, 16039, 16167], [16039, 16168, 16167], [16039, 16040, 16169], [16039, 16169, 16168], [16040, 16041, 16169], [16041, 16170, 16169], [16042, 16043, 19267], [16043, 19268, 19267], [16043, 16044, 19269], [16043, 19269, 19268], [16044, 16045, 19269], [16045, 19270, 19269], [16045, 16046, 19271], [16045, 19271, 19270], [16046, 16047, 19271], [16047, 19272, 19271], [16047, 16048, 19273], [16047, 19273, 19272], [16048, 16049, 19273], [16049, 19274, 19273], [16049, 16050, 19275], [16049, 19275, 19274], [16050, 16051, 19275], [16051, 19276, 19275], [16051, 16052, 19277], [16051, 19277, 19276], [16052, 16053, 19277], [16053, 19278, 19277], [16053, 16054, 19279], [16053, 19279, 19278], [16054, 16055, 19279], [16055, 19280, 19279], [16055, 16056, 19281], [16055, 19281, 19280], [16056, 16057, 19281], [16057, 19282, 19281], [16057, 16058, 19283], [16057, 19283, 19282], [16058, 16059, 19283], [16059, 19284, 19283], [16059, 16060, 19285], [16059, 19285, 19284], [16060, 16061, 19285], [16061, 19286, 19285], [16061, 16062, 19287], [16061, 19287, 19286], [16062, 16063, 19287], [16063, 19288, 19287], [16063, 16064, 19289], [16063, 19289, 19288], [16064, 16065, 19289], [16065, 19290, 19289], [16065, 16066, 19291], [16065, 19291, 19290], [16066, 16067, 19291], [16067, 19292, 19291], [16067, 16068, 19293], [16067, 19293, 19292], [16068, 16069, 19293], [16069, 19294, 19293], [16069, 16070, 19295], [16069, 19295, 19294], [16070, 16071, 19295], [16071, 19296, 19295], [16071, 16072, 19297], [16071, 19297, 19296], [16072, 16073, 19297], [16073, 19298, 19297], [16073, 16074, 19299], [16073, 19299, 19298], [16074, 16075, 19299], [16075, 19300, 19299], [16075, 16076, 19301], [16075, 19301, 19300], [16076, 16077, 19301], [16077, 19302, 19301], [16077, 16078, 19303], [16077, 19303, 19302], [16078, 16079, 19303], [16079, 19304, 19303], [16079, 16080, 19305], [16079, 19305, 19304], [16080, 16081, 19305], [16081, 19306, 19305], [16081, 16082, 19307], [16081, 19307, 19306], [16082, 16083, 19307], [16083, 19308, 19307], [16083, 16084, 19309], [16083, 19309, 19308], [16084, 16085, 19309], [16085, 19310, 19309], [16085, 16086, 19311], [16085, 19311, 19310], [16086, 16087, 19311], [16087, 19312, 19311], [16087, 16088, 19313], [16087, 19313, 19312], [16088, 16089, 19313], [16089, 19314, 19313], [16089, 16090, 19315], [16089, 19315, 19314], [16090, 16091, 19315], [16091, 19316, 19315], [16091, 16092, 19317], [16091, 19317, 19316], [16092, 16093, 19317], [16093, 19318, 19317], [16093, 16094, 19319], [16093, 19319, 19318], [16094, 16095, 19319], [16095, 19320, 19319], [16095, 16096, 19321], [16095, 19321, 19320], [16096, 16097, 19321], [16097, 19322, 19321], [16097, 16098, 19323], [16097, 19323, 19322], [16098, 16099, 19323], [16099, 19324, 19323], [16099, 16100, 19325], [16099, 19325, 19324], [16100, 16101, 19325], [16101, 19326, 19325], [16101, 16102, 19327], [16101, 19327, 19326], [16102, 16103, 19327], [16103, 19328, 19327], [16103, 16104, 19329], [16103, 19329, 19328], [16104, 16105, 19329], [16105, 19330, 19329], [16105, 16106, 19331], [16105, 19331, 19330], [16106, 16107, 19331], [16107, 19332, 19331], [16107, 16108, 19333], [16107, 19333, 19332], [16108, 16109, 19333], [16109, 19334, 19333], [16109, 16110, 19335], [16109, 19335, 19334], [16110, 16111, 19335], [16111, 19336, 19335], [16111, 16112, 19337], [16111, 19337, 19336], [16112, 16113, 19337], [16113, 19338, 19337], [16113, 16114, 19339], [16113, 19339, 19338], [16114, 16115, 19339], [16115, 19340, 19339], [16115, 16116, 19341], [16115, 19341, 19340], [16116, 16117, 19341], [16117, 19342, 19341], [16117, 16118, 19343], [16117, 19343, 19342], [16118, 16119, 19343], [16119, 19344, 19343], [16119, 16120, 19345], [16119, 19345, 19344], [16120, 16121, 19345], [16121, 19346, 19345], [16121, 16122, 19347], [16121, 19347, 19346], [16122, 16123, 19347], [16123, 19348, 19347], [16123, 16124, 19349], [16123, 19349, 19348], [16124, 16125, 19349], [16125, 19350, 19349], [16125, 16126, 19351], [16125, 19351, 19350], [16126, 16127, 19351], [16127, 19352, 19351], [16127, 16128, 19353], [16127, 19353, 19352], [16128, 16129, 19353], [16129, 19354, 19353], [16129, 16130, 19355], [16129, 19355, 19354], [16130, 16131, 19355], [16131, 19356, 19355], [16131, 16132, 19357], [16131, 19357, 19356], [16132, 16133, 19357], [16133, 19358, 19357], [16133, 16134, 19359], [16133, 19359, 19358], [16134, 16135, 19359], [16135, 19360, 19359], [16135, 16136, 19361], [16135, 19361, 19360], [16136, 16137, 19361], [16137, 19362, 19361], [16137, 16138, 19363], [16137, 19363, 19362], [16138, 16139, 19363], [16139, 19364, 19363], [16139, 16140, 19365], [16139, 19365, 19364], [16140, 16141, 19365], [16141, 19366, 19365], [16141, 16142, 19367], [16141, 19367, 19366], [16142, 16143, 19367], [16143, 19368, 19367], [16143, 16144, 19369], [16143, 19369, 19368], [16144, 16145, 19369], [16145, 19370, 19369], [16145, 16146, 19371], [16145, 19371, 19370], [16146, 16147, 19371], [16147, 19372, 19371], [16147, 16148, 19373], [16147, 19373, 19372], [16148, 16149, 19373], [16149, 19374, 19373], [16149, 16150, 19375], [16149, 19375, 19374], [16150, 16151, 19375], [16151, 19376, 19375], [16151, 16152, 19377], [16151, 19377, 19376], [16152, 16153, 19377], [16153, 19378, 19377], [16153, 16154, 19379], [16153, 19379, 19378], [16154, 16155, 19379], [16155, 19380, 19379], [16155, 16156, 19381], [16155, 19381, 19380], [16156, 16157, 19381], [16157, 19382, 19381], [16157, 16158, 19383], [16157, 19383, 19382], [16158, 16159, 19383], [16159, 19384, 19383], [16159, 16160, 19385], [16159, 19385, 19384], [16160, 16161, 19385], [16161, 19386, 19385], [16161, 16162, 19387], [16161, 19387, 19386], [16162, 16163, 19387], [16163, 19388, 19387], [16163, 16164, 19389], [16163, 19389, 19388], [16164, 16165, 19389], [16165, 19390, 19389], [16165, 16166, 19391], [16165, 19391, 19390], [16166, 16167, 19391], [16167, 19392, 19391], [16167, 16168, 19393], [16167, 19393, 19392], [16168, 16169, 19393], [16169, 19394, 19393], [16169, 16170, 19395], [16169, 19395, 19394], [16171, 16172, 16301], [16171, 16301, 16300], [16172, 16173, 16301], [16173, 16302, 16301], [16173, 16174, 16303], [16173, 16303, 16302], [16174, 16175, 16303], [16175, 16304, 16303], [16175, 16176, 16305], [16175, 16305, 16304], [16176, 16177, 16305], [16177, 16306, 16305], [16177, 16178, 16307], [16177, 16307, 16306], [16178, 16179, 16307], [16179, 16308, 16307], [16179, 16180, 16309], [16179, 16309, 16308], [16180, 16181, 16309], [16181, 16310, 16309], [16181, 16182, 16311], [16181, 16311, 16310], [16182, 16183, 16311], [16183, 16312, 16311], [16183, 16184, 16313], [16183, 16313, 16312], [16184, 16185, 16313], [16185, 16314, 16313], [16185, 16186, 16315], [16185, 16315, 16314], [16186, 16187, 16315], [16187, 16316, 16315], [16187, 16188, 16317], [16187, 16317, 16316], [16188, 16189, 16317], [16189, 16318, 16317], [16189, 16190, 16319], [16189, 16319, 16318], [16190, 16191, 16319], [16191, 16320, 16319], [16191, 16192, 16321], [16191, 16321, 16320], [16192, 16193, 16321], [16193, 16322, 16321], [16193, 16194, 16323], [16193, 16323, 16322], [16194, 16195, 16323], [16195, 16324, 16323], [16195, 16196, 16325], [16195, 16325, 16324], [16196, 16197, 16325], [16197, 16326, 16325], [16197, 16198, 16327], [16197, 16327, 16326], [16198, 16199, 16327], [16199, 16328, 16327], [16199, 16200, 16329], [16199, 16329, 16328], [16200, 16201, 16329], [16201, 16330, 16329], [16201, 16202, 16331], [16201, 16331, 16330], [16202, 16203, 16331], [16203, 16332, 16331], [16203, 16204, 16333], [16203, 16333, 16332], [16204, 16205, 16333], [16205, 16334, 16333], [16205, 16206, 16335], [16205, 16335, 16334], [16206, 16207, 16335], [16207, 16336, 16335], [16207, 16208, 16337], [16207, 16337, 16336], [16208, 16209, 16337], [16209, 16338, 16337], [16209, 16210, 16339], [16209, 16339, 16338], [16210, 16211, 16339], [16211, 16340, 16339], [16211, 16212, 16341], [16211, 16341, 16340], [16212, 16213, 16341], [16213, 16342, 16341], [16213, 16214, 16343], [16213, 16343, 16342], [16214, 16215, 16343], [16215, 16344, 16343], [16215, 16216, 16345], [16215, 16345, 16344], [16216, 16217, 16345], [16217, 16346, 16345], [16217, 16218, 16347], [16217, 16347, 16346], [16218, 16219, 16347], [16219, 16348, 16347], [16219, 16220, 16349], [16219, 16349, 16348], [16220, 16221, 16349], [16221, 16350, 16349], [16221, 16222, 16351], [16221, 16351, 16350], [16222, 16223, 16351], [16223, 16352, 16351], [16223, 16224, 16353], [16223, 16353, 16352], [16224, 16225, 16353], [16225, 16354, 16353], [16225, 16226, 16355], [16225, 16355, 16354], [16226, 16227, 16355], [16227, 16356, 16355], [16227, 16228, 16357], [16227, 16357, 16356], [16228, 16229, 16357], [16229, 16358, 16357], [16229, 16230, 16359], [16229, 16359, 16358], [16230, 16231, 16359], [16231, 16360, 16359], [16231, 16232, 16361], [16231, 16361, 16360], [16232, 16233, 16361], [16233, 16362, 16361], [16233, 16234, 16363], [16233, 16363, 16362], [16234, 16235, 16363], [16235, 16364, 16363], [16235, 16236, 16365], [16235, 16365, 16364], [16236, 16237, 16365], [16237, 16366, 16365], [16237, 16238, 16367], [16237, 16367, 16366], [16238, 16239, 16367], [16239, 16368, 16367], [16239, 16240, 16369], [16239, 16369, 16368], [16240, 16241, 16369], [16241, 16370, 16369], [16241, 16242, 16371], [16241, 16371, 16370], [16242, 16243, 16371], [16243, 16372, 16371], [16243, 16244, 16373], [16243, 16373, 16372], [16244, 16245, 16373], [16245, 16374, 16373], [16245, 16246, 16375], [16245, 16375, 16374], [16246, 16247, 16375], [16247, 16376, 16375], [16247, 16248, 16377], [16247, 16377, 16376], [16248, 16249, 16377], [16249, 16378, 16377], [16249, 16250, 16379], [16249, 16379, 16378], [16250, 16251, 16379], [16251, 16380, 16379], [16251, 16252, 16381], [16251, 16381, 16380], [16252, 16253, 16381], [16253, 16382, 16381], [16253, 16254, 16383], [16253, 16383, 16382], [16254, 16255, 16383], [16255, 16384, 16383], [16255, 16256, 16385], [16255, 16385, 16384], [16256, 16257, 16385], [16257, 16386, 16385], [16257, 16258, 16387], [16257, 16387, 16386], [16258, 16259, 16387], [16259, 16388, 16387], [16259, 16260, 16389], [16259, 16389, 16388], [16260, 16261, 16389], [16261, 16390, 16389], [16261, 16262, 16391], [16261, 16391, 16390], [16262, 16263, 16391], [16263, 16392, 16391], [16263, 16264, 16393], [16263, 16393, 16392], [16264, 16265, 16393], [16265, 16394, 16393], [16265, 16266, 16395], [16265, 16395, 16394], [16266, 16267, 16395], [16267, 16396, 16395], [16267, 16268, 16397], [16267, 16397, 16396], [16268, 16269, 16397], [16269, 16398, 16397], [16269, 16270, 16399], [16269, 16399, 16398], [16270, 16271, 16399], [16271, 16400, 16399], [16271, 16272, 16401], [16271, 16401, 16400], [16272, 16273, 16401], [16273, 16402, 16401], [16273, 16274, 16403], [16273, 16403, 16402], [16274, 16275, 16403], [16275, 16404, 16403], [16275, 16276, 16405], [16275, 16405, 16404], [16276, 16277, 16405], [16277, 16406, 16405], [16277, 16278, 16407], [16277, 16407, 16406], [16278, 16279, 16407], [16279, 16408, 16407], [16279, 16280, 16409], [16279, 16409, 16408], [16280, 16281, 16409], [16281, 16410, 16409], [16281, 16282, 16411], [16281, 16411, 16410], [16282, 16283, 16411], [16283, 16412, 16411], [16283, 16284, 16413], [16283, 16413, 16412], [16284, 16285, 16413], [16285, 16414, 16413], [16285, 16286, 16415], [16285, 16415, 16414], [16286, 16287, 16415], [16287, 16416, 16415], [16287, 16288, 16417], [16287, 16417, 16416], [16288, 16289, 16417], [16289, 16418, 16417], [16289, 16290, 16419], [16289, 16419, 16418], [16290, 16291, 16419], [16291, 16420, 16419], [16291, 16292, 16421], [16291, 16421, 16420], [16292, 16293, 16421], [16293, 16422, 16421], [16293, 16294, 16423], [16293, 16423, 16422], [16294, 16295, 16423], [16295, 16424, 16423], [16295, 16296, 16425], [16295, 16425, 16424], [16296, 16297, 16425], [16297, 16426, 16425], [16297, 16298, 16427], [16297, 16427, 16426], [16298, 16299, 16427], [16299, 16428, 16427], [16300, 16301, 16429], [16301, 16430, 16429], [16301, 16302, 16431], [16301, 16431, 16430], [16302, 16303, 16431], [16303, 16432, 16431], [16303, 16304, 16433], [16303, 16433, 16432], [16304, 16305, 16433], [16305, 16434, 16433], [16305, 16306, 16435], [16305, 16435, 16434], [16306, 16307, 16435], [16307, 16436, 16435], [16307, 16308, 16437], [16307, 16437, 16436], [16308, 16309, 16437], [16309, 16438, 16437], [16309, 16310, 16439], [16309, 16439, 16438], [16310, 16311, 16439], [16311, 16440, 16439], [16311, 16312, 16441], [16311, 16441, 16440], [16312, 16313, 16441], [16313, 16442, 16441], [16313, 16314, 16443], [16313, 16443, 16442], [16314, 16315, 16443], [16315, 16444, 16443], [16315, 16316, 16445], [16315, 16445, 16444], [16316, 16317, 16445], [16317, 16446, 16445], [16317, 16318, 16447], [16317, 16447, 16446], [16318, 16319, 16447], [16319, 16448, 16447], [16319, 16320, 16449], [16319, 16449, 16448], [16320, 16321, 16449], [16321, 16450, 16449], [16321, 16322, 16451], [16321, 16451, 16450], [16322, 16323, 16451], [16323, 16452, 16451], [16323, 16324, 16453], [16323, 16453, 16452], [16324, 16325, 16453], [16325, 16454, 16453], [16325, 16326, 16455], [16325, 16455, 16454], [16326, 16327, 16455], [16327, 16456, 16455], [16327, 16328, 16457], [16327, 16457, 16456], [16328, 16329, 16457], [16329, 16458, 16457], [16329, 16330, 16459], [16329, 16459, 16458], [16330, 16331, 16459], [16331, 16460, 16459], [16331, 16332, 16461], [16331, 16461, 16460], [16332, 16333, 16461], [16333, 16462, 16461], [16333, 16334, 16463], [16333, 16463, 16462], [16334, 16335, 16463], [16335, 16464, 16463], [16335, 16336, 16465], [16335, 16465, 16464], [16336, 16337, 16465], [16337, 16466, 16465], [16337, 16338, 16467], [16337, 16467, 16466], [16338, 16339, 16467], [16339, 16468, 16467], [16339, 16340, 16469], [16339, 16469, 16468], [16340, 16341, 16469], [16341, 16470, 16469], [16341, 16342, 16471], [16341, 16471, 16470], [16342, 16343, 16471], [16343, 16472, 16471], [16343, 16344, 16473], [16343, 16473, 16472], [16344, 16345, 16473], [16345, 16474, 16473], [16345, 16346, 16475], [16345, 16475, 16474], [16346, 16347, 16475], [16347, 16476, 16475], [16347, 16348, 16477], [16347, 16477, 16476], [16348, 16349, 16477], [16349, 16478, 16477], [16349, 16350, 16479], [16349, 16479, 16478], [16350, 16351, 16479], [16351, 16480, 16479], [16351, 16352, 16481], [16351, 16481, 16480], [16352, 16353, 16481], [16353, 16482, 16481], [16353, 16354, 16483], [16353, 16483, 16482], [16354, 16355, 16483], [16355, 16484, 16483], [16355, 16356, 16485], [16355, 16485, 16484], [16356, 16357, 16485], [16357, 16486, 16485], [16357, 16358, 16487], [16357, 16487, 16486], [16358, 16359, 16487], [16359, 16488, 16487], [16359, 16360, 16489], [16359, 16489, 16488], [16360, 16361, 16489], [16361, 16490, 16489], [16361, 16362, 16491], [16361, 16491, 16490], [16362, 16363, 16491], [16363, 16492, 16491], [16363, 16364, 16493], [16363, 16493, 16492], [16364, 16365, 16493], [16365, 16494, 16493], [16365, 16366, 16495], [16365, 16495, 16494], [16366, 16367, 16495], [16367, 16496, 16495], [16367, 16368, 16497], [16367, 16497, 16496], [16368, 16369, 16497], [16369, 16498, 16497], [16369, 16370, 16499], [16369, 16499, 16498], [16370, 16371, 16499], [16371, 16500, 16499], [16371, 16372, 16501], [16371, 16501, 16500], [16372, 16373, 16501], [16373, 16502, 16501], [16373, 16374, 16503], [16373, 16503, 16502], [16374, 16375, 16503], [16375, 16504, 16503], [16375, 16376, 16505], [16375, 16505, 16504], [16376, 16377, 16505], [16377, 16506, 16505], [16377, 16378, 16507], [16377, 16507, 16506], [16378, 16379, 16507], [16379, 16508, 16507], [16379, 16380, 16509], [16379, 16509, 16508], [16380, 16381, 16509], [16381, 16510, 16509], [16381, 16382, 16511], [16381, 16511, 16510], [16382, 16383, 16511], [16383, 16512, 16511], [16383, 16384, 16513], [16383, 16513, 16512], [16384, 16385, 16513], [16385, 16514, 16513], [16385, 16386, 16515], [16385, 16515, 16514], [16386, 16387, 16515], [16387, 16516, 16515], [16387, 16388, 16517], [16387, 16517, 16516], [16388, 16389, 16517], [16389, 16518, 16517], [16389, 16390, 16519], [16389, 16519, 16518], [16390, 16391, 16519], [16391, 16520, 16519], [16391, 16392, 16521], [16391, 16521, 16520], [16392, 16393, 16521], [16393, 16522, 16521], [16393, 16394, 16523], [16393, 16523, 16522], [16394, 16395, 16523], [16395, 16524, 16523], [16395, 16396, 16525], [16395, 16525, 16524], [16396, 16397, 16525], [16397, 16526, 16525], [16397, 16398, 16527], [16397, 16527, 16526], [16398, 16399, 16527], [16399, 16528, 16527], [16399, 16400, 16529], [16399, 16529, 16528], [16400, 16401, 16529], [16401, 16530, 16529], [16401, 16402, 16531], [16401, 16531, 16530], [16402, 16403, 16531], [16403, 16532, 16531], [16403, 16404, 16533], [16403, 16533, 16532], [16404, 16405, 16533], [16405, 16534, 16533], [16405, 16406, 16535], [16405, 16535, 16534], [16406, 16407, 16535], [16407, 16536, 16535], [16407, 16408, 16537], [16407, 16537, 16536], [16408, 16409, 16537], [16409, 16538, 16537], [16409, 16410, 16539], [16409, 16539, 16538], [16410, 16411, 16539], [16411, 16540, 16539], [16411, 16412, 16541], [16411, 16541, 16540], [16412, 16413, 16541], [16413, 16542, 16541], [16413, 16414, 16543], [16413, 16543, 16542], [16414, 16415, 16543], [16415, 16544, 16543], [16415, 16416, 16545], [16415, 16545, 16544], [16416, 16417, 16545], [16417, 16546, 16545], [16417, 16418, 16547], [16417, 16547, 16546], [16418, 16419, 16547], [16419, 16548, 16547], [16419, 16420, 16549], [16419, 16549, 16548], [16420, 16421, 16549], [16421, 16550, 16549], [16421, 16422, 16551], [16421, 16551, 16550], [16422, 16423, 16551], [16423, 16552, 16551], [16423, 16424, 16553], [16423, 16553, 16552], [16424, 16425, 16553], [16425, 16554, 16553], [16425, 16426, 16555], [16425, 16555, 16554], [16426, 16427, 16555], [16427, 16556, 16555], [16427, 16428, 16557], [16427, 16557, 16556], [16429, 16430, 16559], [16429, 16559, 16558], [16430, 16431, 16559], [16431, 16560, 16559], [16431, 16432, 16561], [16431, 16561, 16560], [16432, 16433, 16561], [16433, 16562, 16561], [16433, 16434, 16563], [16433, 16563, 16562], [16434, 16435, 16563], [16435, 16564, 16563], [16435, 16436, 16565], [16435, 16565, 16564], [16436, 16437, 16565], [16437, 16566, 16565], [16437, 16438, 16567], [16437, 16567, 16566], [16438, 16439, 16567], [16439, 16568, 16567], [16439, 16440, 16569], [16439, 16569, 16568], [16440, 16441, 16569], [16441, 16570, 16569], [16441, 16442, 16571], [16441, 16571, 16570], [16442, 16443, 16571], [16443, 16572, 16571], [16443, 16444, 16573], [16443, 16573, 16572], [16444, 16445, 16573], [16445, 16574, 16573], [16445, 16446, 16575], [16445, 16575, 16574], [16446, 16447, 16575], [16447, 16576, 16575], [16447, 16448, 16577], [16447, 16577, 16576], [16448, 16449, 16577], [16449, 16578, 16577], [16449, 16450, 16579], [16449, 16579, 16578], [16450, 16451, 16579], [16451, 16580, 16579], [16451, 16452, 16581], [16451, 16581, 16580], [16452, 16453, 16581], [16453, 16582, 16581], [16453, 16454, 16583], [16453, 16583, 16582], [16454, 16455, 16583], [16455, 16584, 16583], [16455, 16456, 16585], [16455, 16585, 16584], [16456, 16457, 16585], [16457, 16586, 16585], [16457, 16458, 16587], [16457, 16587, 16586], [16458, 16459, 16587], [16459, 16588, 16587], [16459, 16460, 16589], [16459, 16589, 16588], [16460, 16461, 16589], [16461, 16590, 16589], [16461, 16462, 16591], [16461, 16591, 16590], [16462, 16463, 16591], [16463, 16592, 16591], [16463, 16464, 16593], [16463, 16593, 16592], [16464, 16465, 16593], [16465, 16594, 16593], [16465, 16466, 16595], [16465, 16595, 16594], [16466, 16467, 16595], [16467, 16596, 16595], [16467, 16468, 16597], [16467, 16597, 16596], [16468, 16469, 16597], [16469, 16598, 16597], [16469, 16470, 16599], [16469, 16599, 16598], [16470, 16471, 16599], [16471, 16600, 16599], [16471, 16472, 16601], [16471, 16601, 16600], [16472, 16473, 16601], [16473, 16602, 16601], [16473, 16474, 16603], [16473, 16603, 16602], [16474, 16475, 16603], [16475, 16604, 16603], [16475, 16476, 16605], [16475, 16605, 16604], [16476, 16477, 16605], [16477, 16606, 16605], [16477, 16478, 16607], [16477, 16607, 16606], [16478, 16479, 16607], [16479, 16608, 16607], [16479, 16480, 16609], [16479, 16609, 16608], [16480, 16481, 16609], [16481, 16610, 16609], [16481, 16482, 16611], [16481, 16611, 16610], [16482, 16483, 16611], [16483, 16612, 16611], [16483, 16484, 16613], [16483, 16613, 16612], [16484, 16485, 16613], [16485, 16614, 16613], [16485, 16486, 16615], [16485, 16615, 16614], [16486, 16487, 16615], [16487, 16616, 16615], [16487, 16488, 16617], [16487, 16617, 16616], [16488, 16489, 16617], [16489, 16618, 16617], [16489, 16490, 16619], [16489, 16619, 16618], [16490, 16491, 16619], [16491, 16620, 16619], [16491, 16492, 16621], [16491, 16621, 16620], [16492, 16493, 16621], [16493, 16622, 16621], [16493, 16494, 16623], [16493, 16623, 16622], [16494, 16495, 16623], [16495, 16624, 16623], [16495, 16496, 16625], [16495, 16625, 16624], [16496, 16497, 16625], [16497, 16626, 16625], [16497, 16498, 16627], [16497, 16627, 16626], [16498, 16499, 16627], [16499, 16628, 16627], [16499, 16500, 16629], [16499, 16629, 16628], [16500, 16501, 16629], [16501, 16630, 16629], [16501, 16502, 16631], [16501, 16631, 16630], [16502, 16503, 16631], [16503, 16632, 16631], [16503, 16504, 16633], [16503, 16633, 16632], [16504, 16505, 16633], [16505, 16634, 16633], [16505, 16506, 16635], [16505, 16635, 16634], [16506, 16507, 16635], [16507, 16636, 16635], [16507, 16508, 16637], [16507, 16637, 16636], [16508, 16509, 16637], [16509, 16638, 16637], [16509, 16510, 16639], [16509, 16639, 16638], [16510, 16511, 16639], [16511, 16640, 16639], [16511, 16512, 16641], [16511, 16641, 16640], [16512, 16513, 16641], [16513, 16642, 16641], [16513, 16514, 16643], [16513, 16643, 16642], [16514, 16515, 16643], [16515, 16644, 16643], [16515, 16516, 16645], [16515, 16645, 16644], [16516, 16517, 16645], [16517, 16646, 16645], [16517, 16518, 16647], [16517, 16647, 16646], [16518, 16519, 16647], [16519, 16648, 16647], [16519, 16520, 16649], [16519, 16649, 16648], [16520, 16521, 16649], [16521, 16650, 16649], [16521, 16522, 16651], [16521, 16651, 16650], [16522, 16523, 16651], [16523, 16652, 16651], [16523, 16524, 16653], [16523, 16653, 16652], [16524, 16525, 16653], [16525, 16654, 16653], [16525, 16526, 16655], [16525, 16655, 16654], [16526, 16527, 16655], [16527, 16656, 16655], [16527, 16528, 16657], [16527, 16657, 16656], [16528, 16529, 16657], [16529, 16658, 16657], [16529, 16530, 16659], [16529, 16659, 16658], [16530, 16531, 16659], [16531, 16660, 16659], [16531, 16532, 16661], [16531, 16661, 16660], [16532, 16533, 16661], [16533, 16662, 16661], [16533, 16534, 16663], [16533, 16663, 16662], [16534, 16535, 16663], [16535, 16664, 16663], [16535, 16536, 16665], [16535, 16665, 16664], [16536, 16537, 16665], [16537, 16666, 16665], [16537, 16538, 16667], [16537, 16667, 16666], [16538, 16539, 16667], [16539, 16668, 16667], [16539, 16540, 16669], [16539, 16669, 16668], [16540, 16541, 16669], [16541, 16670, 16669], [16541, 16542, 16671], [16541, 16671, 16670], [16542, 16543, 16671], [16543, 16672, 16671], [16543, 16544, 16673], [16543, 16673, 16672], [16544, 16545, 16673], [16545, 16674, 16673], [16545, 16546, 16675], [16545, 16675, 16674], [16546, 16547, 16675], [16547, 16676, 16675], [16547, 16548, 16677], [16547, 16677, 16676], [16548, 16549, 16677], [16549, 16678, 16677], [16549, 16550, 16679], [16549, 16679, 16678], [16550, 16551, 16679], [16551, 16680, 16679], [16551, 16552, 16681], [16551, 16681, 16680], [16552, 16553, 16681], [16553, 16682, 16681], [16553, 16554, 16683], [16553, 16683, 16682], [16554, 16555, 16683], [16555, 16684, 16683], [16555, 16556, 16685], [16555, 16685, 16684], [16556, 16557, 16685], [16557, 16686, 16685], [16558, 16559, 16687], [16559, 16688, 16687], [16559, 16560, 16689], [16559, 16689, 16688], [16560, 16561, 16689], [16561, 16690, 16689], [16561, 16562, 16691], [16561, 16691, 16690], [16562, 16563, 16691], [16563, 16692, 16691], [16563, 16564, 16693], [16563, 16693, 16692], [16564, 16565, 16693], [16565, 16694, 16693], [16565, 16566, 16695], [16565, 16695, 16694], [16566, 16567, 16695], [16567, 16696, 16695], [16567, 16568, 16697], [16567, 16697, 16696], [16568, 16569, 16697], [16569, 16698, 16697], [16569, 16570, 16699], [16569, 16699, 16698], [16570, 16571, 16699], [16571, 16700, 16699], [16571, 16572, 16701], [16571, 16701, 16700], [16572, 16573, 16701], [16573, 16702, 16701], [16573, 16574, 16703], [16573, 16703, 16702], [16574, 16575, 16703], [16575, 16704, 16703], [16575, 16576, 16705], [16575, 16705, 16704], [16576, 16577, 16705], [16577, 16706, 16705], [16577, 16578, 16707], [16577, 16707, 16706], [16578, 16579, 16707], [16579, 16708, 16707], [16579, 16580, 16709], [16579, 16709, 16708], [16580, 16581, 16709], [16581, 16710, 16709], [16581, 16582, 16711], [16581, 16711, 16710], [16582, 16583, 16711], [16583, 16712, 16711], [16583, 16584, 16713], [16583, 16713, 16712], [16584, 16585, 16713], [16585, 16714, 16713], [16585, 16586, 16715], [16585, 16715, 16714], [16586, 16587, 16715], [16587, 16716, 16715], [16587, 16588, 16717], [16587, 16717, 16716], [16588, 16589, 16717], [16589, 16718, 16717], [16589, 16590, 16719], [16589, 16719, 16718], [16590, 16591, 16719], [16591, 16720, 16719], [16591, 16592, 16721], [16591, 16721, 16720], [16592, 16593, 16721], [16593, 16722, 16721], [16593, 16594, 16723], [16593, 16723, 16722], [16594, 16595, 16723], [16595, 16724, 16723], [16595, 16596, 16725], [16595, 16725, 16724], [16596, 16597, 16725], [16597, 16726, 16725], [16597, 16598, 16727], [16597, 16727, 16726], [16598, 16599, 16727], [16599, 16728, 16727], [16599, 16600, 16729], [16599, 16729, 16728], [16600, 16601, 16729], [16601, 16730, 16729], [16601, 16602, 16731], [16601, 16731, 16730], [16602, 16603, 16731], [16603, 16732, 16731], [16603, 16604, 16733], [16603, 16733, 16732], [16604, 16605, 16733], [16605, 16734, 16733], [16605, 16606, 16735], [16605, 16735, 16734], [16606, 16607, 16735], [16607, 16736, 16735], [16607, 16608, 16737], [16607, 16737, 16736], [16608, 16609, 16737], [16609, 16738, 16737], [16609, 16610, 16739], [16609, 16739, 16738], [16610, 16611, 16739], [16611, 16740, 16739], [16611, 16612, 16741], [16611, 16741, 16740], [16612, 16613, 16741], [16613, 16742, 16741], [16613, 16614, 16743], [16613, 16743, 16742], [16614, 16615, 16743], [16615, 16744, 16743], [16615, 16616, 16745], [16615, 16745, 16744], [16616, 16617, 16745], [16617, 16746, 16745], [16617, 16618, 16747], [16617, 16747, 16746], [16618, 16619, 16747], [16619, 16748, 16747], [16619, 16620, 16749], [16619, 16749, 16748], [16620, 16621, 16749], [16621, 16750, 16749], [16621, 16622, 16751], [16621, 16751, 16750], [16622, 16623, 16751], [16623, 16752, 16751], [16623, 16624, 16753], [16623, 16753, 16752], [16624, 16625, 16753], [16625, 16754, 16753], [16625, 16626, 16755], [16625, 16755, 16754], [16626, 16627, 16755], [16627, 16756, 16755], [16627, 16628, 16757], [16627, 16757, 16756], [16628, 16629, 16757], [16629, 16758, 16757], [16629, 16630, 16759], [16629, 16759, 16758], [16630, 16631, 16759], [16631, 16760, 16759], [16631, 16632, 16761], [16631, 16761, 16760], [16632, 16633, 16761], [16633, 16762, 16761], [16633, 16634, 16763], [16633, 16763, 16762], [16634, 16635, 16763], [16635, 16764, 16763], [16635, 16636, 16765], [16635, 16765, 16764], [16636, 16637, 16765], [16637, 16766, 16765], [16637, 16638, 16767], [16637, 16767, 16766], [16638, 16639, 16767], [16639, 16768, 16767], [16639, 16640, 16769], [16639, 16769, 16768], [16640, 16641, 16769], [16641, 16770, 16769], [16641, 16642, 16771], [16641, 16771, 16770], [16642, 16643, 16771], [16643, 16772, 16771], [16643, 16644, 16773], [16643, 16773, 16772], [16644, 16645, 16773], [16645, 16774, 16773], [16645, 16646, 16775], [16645, 16775, 16774], [16646, 16647, 16775], [16647, 16776, 16775], [16647, 16648, 16777], [16647, 16777, 16776], [16648, 16649, 16777], [16649, 16778, 16777], [16649, 16650, 16779], [16649, 16779, 16778], [16650, 16651, 16779], [16651, 16780, 16779], [16651, 16652, 16781], [16651, 16781, 16780], [16652, 16653, 16781], [16653, 16782, 16781], [16653, 16654, 16783], [16653, 16783, 16782], [16654, 16655, 16783], [16655, 16784, 16783], [16655, 16656, 16785], [16655, 16785, 16784], [16656, 16657, 16785], [16657, 16786, 16785], [16657, 16658, 16787], [16657, 16787, 16786], [16658, 16659, 16787], [16659, 16788, 16787], [16659, 16660, 16789], [16659, 16789, 16788], [16660, 16661, 16789], [16661, 16790, 16789], [16661, 16662, 16791], [16661, 16791, 16790], [16662, 16663, 16791], [16663, 16792, 16791], [16663, 16664, 16793], [16663, 16793, 16792], [16664, 16665, 16793], [16665, 16794, 16793], [16665, 16666, 16795], [16665, 16795, 16794], [16666, 16667, 16795], [16667, 16796, 16795], [16667, 16668, 16797], [16667, 16797, 16796], [16668, 16669, 16797], [16669, 16798, 16797], [16669, 16670, 16799], [16669, 16799, 16798], [16670, 16671, 16799], [16671, 16800, 16799], [16671, 16672, 16801], [16671, 16801, 16800], [16672, 16673, 16801], [16673, 16802, 16801], [16673, 16674, 16803], [16673, 16803, 16802], [16674, 16675, 16803], [16675, 16804, 16803], [16675, 16676, 16805], [16675, 16805, 16804], [16676, 16677, 16805], [16677, 16806, 16805], [16677, 16678, 16807], [16677, 16807, 16806], [16678, 16679, 16807], [16679, 16808, 16807], [16679, 16680, 16809], [16679, 16809, 16808], [16680, 16681, 16809], [16681, 16810, 16809], [16681, 16682, 16811], [16681, 16811, 16810], [16682, 16683, 16811], [16683, 16812, 16811], [16683, 16684, 16813], [16683, 16813, 16812], [16684, 16685, 16813], [16685, 16814, 16813], [16685, 16686, 16815], [16685, 16815, 16814], [16687, 16688, 16817], [16687, 16817, 16816], [16688, 16689, 16817], [16689, 16818, 16817], [16689, 16690, 16819], [16689, 16819, 16818], [16690, 16691, 16819], [16691, 16820, 16819], [16691, 16692, 16821], [16691, 16821, 16820], [16692, 16693, 16821], [16693, 16822, 16821], [16693, 16694, 16823], [16693, 16823, 16822], [16694, 16695, 16823], [16695, 16824, 16823], [16695, 16696, 16825], [16695, 16825, 16824], [16696, 16697, 16825], [16697, 16826, 16825], [16697, 16698, 16827], [16697, 16827, 16826], [16698, 16699, 16827], [16699, 16828, 16827], [16699, 16700, 16829], [16699, 16829, 16828], [16700, 16701, 16829], [16701, 16830, 16829], [16701, 16702, 16831], [16701, 16831, 16830], [16702, 16703, 16831], [16703, 16832, 16831], [16703, 16704, 16833], [16703, 16833, 16832], [16704, 16705, 16833], [16705, 16834, 16833], [16705, 16706, 16835], [16705, 16835, 16834], [16706, 16707, 16835], [16707, 16836, 16835], [16707, 16708, 16837], [16707, 16837, 16836], [16708, 16709, 16837], [16709, 16838, 16837], [16709, 16710, 16839], [16709, 16839, 16838], [16710, 16711, 16839], [16711, 16840, 16839], [16711, 16712, 16841], [16711, 16841, 16840], [16712, 16713, 16841], [16713, 16842, 16841], [16713, 16714, 16843], [16713, 16843, 16842], [16714, 16715, 16843], [16715, 16844, 16843], [16715, 16716, 16845], [16715, 16845, 16844], [16716, 16717, 16845], [16717, 16846, 16845], [16717, 16718, 16847], [16717, 16847, 16846], [16718, 16719, 16847], [16719, 16848, 16847], [16719, 16720, 16849], [16719, 16849, 16848], [16720, 16721, 16849], [16721, 16850, 16849], [16721, 16722, 16851], [16721, 16851, 16850], [16722, 16723, 16851], [16723, 16852, 16851], [16723, 16724, 16853], [16723, 16853, 16852], [16724, 16725, 16853], [16725, 16854, 16853], [16725, 16726, 16855], [16725, 16855, 16854], [16726, 16727, 16855], [16727, 16856, 16855], [16727, 16728, 16857], [16727, 16857, 16856], [16728, 16729, 16857], [16729, 16858, 16857], [16729, 16730, 16859], [16729, 16859, 16858], [16730, 16731, 16859], [16731, 16860, 16859], [16731, 16732, 16861], [16731, 16861, 16860], [16732, 16733, 16861], [16733, 16862, 16861], [16733, 16734, 16863], [16733, 16863, 16862], [16734, 16735, 16863], [16735, 16864, 16863], [16735, 16736, 16865], [16735, 16865, 16864], [16736, 16737, 16865], [16737, 16866, 16865], [16737, 16738, 16867], [16737, 16867, 16866], [16738, 16739, 16867], [16739, 16868, 16867], [16739, 16740, 16869], [16739, 16869, 16868], [16740, 16741, 16869], [16741, 16870, 16869], [16741, 16742, 16871], [16741, 16871, 16870], [16742, 16743, 16871], [16743, 16872, 16871], [16743, 16744, 16873], [16743, 16873, 16872], [16744, 16745, 16873], [16745, 16874, 16873], [16745, 16746, 16875], [16745, 16875, 16874], [16746, 16747, 16875], [16747, 16876, 16875], [16747, 16748, 16877], [16747, 16877, 16876], [16748, 16749, 16877], [16749, 16878, 16877], [16749, 16750, 16879], [16749, 16879, 16878], [16750, 16751, 16879], [16751, 16880, 16879], [16751, 16752, 16881], [16751, 16881, 16880], [16752, 16753, 16881], [16753, 16882, 16881], [16753, 16754, 16883], [16753, 16883, 16882], [16754, 16755, 16883], [16755, 16884, 16883], [16755, 16756, 16885], [16755, 16885, 16884], [16756, 16757, 16885], [16757, 16886, 16885], [16757, 16758, 16887], [16757, 16887, 16886], [16758, 16759, 16887], [16759, 16888, 16887], [16759, 16760, 16889], [16759, 16889, 16888], [16760, 16761, 16889], [16761, 16890, 16889], [16761, 16762, 16891], [16761, 16891, 16890], [16762, 16763, 16891], [16763, 16892, 16891], [16763, 16764, 16893], [16763, 16893, 16892], [16764, 16765, 16893], [16765, 16894, 16893], [16765, 16766, 16895], [16765, 16895, 16894], [16766, 16767, 16895], [16767, 16896, 16895], [16767, 16768, 16897], [16767, 16897, 16896], [16768, 16769, 16897], [16769, 16898, 16897], [16769, 16770, 16899], [16769, 16899, 16898], [16770, 16771, 16899], [16771, 16900, 16899], [16771, 16772, 16901], [16771, 16901, 16900], [16772, 16773, 16901], [16773, 16902, 16901], [16773, 16774, 16903], [16773, 16903, 16902], [16774, 16775, 16903], [16775, 16904, 16903], [16775, 16776, 16905], [16775, 16905, 16904], [16776, 16777, 16905], [16777, 16906, 16905], [16777, 16778, 16907], [16777, 16907, 16906], [16778, 16779, 16907], [16779, 16908, 16907], [16779, 16780, 16909], [16779, 16909, 16908], [16780, 16781, 16909], [16781, 16910, 16909], [16781, 16782, 16911], [16781, 16911, 16910], [16782, 16783, 16911], [16783, 16912, 16911], [16783, 16784, 16913], [16783, 16913, 16912], [16784, 16785, 16913], [16785, 16914, 16913], [16785, 16786, 16915], [16785, 16915, 16914], [16786, 16787, 16915], [16787, 16916, 16915], [16787, 16788, 16917], [16787, 16917, 16916], [16788, 16789, 16917], [16789, 16918, 16917], [16789, 16790, 16919], [16789, 16919, 16918], [16790, 16791, 16919], [16791, 16920, 16919], [16791, 16792, 16921], [16791, 16921, 16920], [16792, 16793, 16921], [16793, 16922, 16921], [16793, 16794, 16923], [16793, 16923, 16922], [16794, 16795, 16923], [16795, 16924, 16923], [16795, 16796, 16925], [16795, 16925, 16924], [16796, 16797, 16925], [16797, 16926, 16925], [16797, 16798, 16927], [16797, 16927, 16926], [16798, 16799, 16927], [16799, 16928, 16927], [16799, 16800, 16929], [16799, 16929, 16928], [16800, 16801, 16929], [16801, 16930, 16929], [16801, 16802, 16931], [16801, 16931, 16930], [16802, 16803, 16931], [16803, 16932, 16931], [16803, 16804, 16933], [16803, 16933, 16932], [16804, 16805, 16933], [16805, 16934, 16933], [16805, 16806, 16935], [16805, 16935, 16934], [16806, 16807, 16935], [16807, 16936, 16935], [16807, 16808, 16937], [16807, 16937, 16936], [16808, 16809, 16937], [16809, 16938, 16937], [16809, 16810, 16939], [16809, 16939, 16938], [16810, 16811, 16939], [16811, 16940, 16939], [16811, 16812, 16941], [16811, 16941, 16940], [16812, 16813, 16941], [16813, 16942, 16941], [16813, 16814, 16943], [16813, 16943, 16942], [16814, 16815, 16943], [16815, 16944, 16943], [16816, 16817, 16945], [16817, 16946, 16945], [16817, 16818, 16947], [16817, 16947, 16946], [16818, 16819, 16947], [16819, 16948, 16947], [16819, 16820, 16949], [16819, 16949, 16948], [16820, 16821, 16949], [16821, 16950, 16949], [16821, 16822, 16951], [16821, 16951, 16950], [16822, 16823, 16951], [16823, 16952, 16951], [16823, 16824, 16953], [16823, 16953, 16952], [16824, 16825, 16953], [16825, 16954, 16953], [16825, 16826, 16955], [16825, 16955, 16954], [16826, 16827, 16955], [16827, 16956, 16955], [16827, 16828, 16957], [16827, 16957, 16956], [16828, 16829, 16957], [16829, 16958, 16957], [16829, 16830, 16959], [16829, 16959, 16958], [16830, 16831, 16959], [16831, 16960, 16959], [16831, 16832, 16961], [16831, 16961, 16960], [16832, 16833, 16961], [16833, 16962, 16961], [16833, 16834, 16963], [16833, 16963, 16962], [16834, 16835, 16963], [16835, 16964, 16963], [16835, 16836, 16965], [16835, 16965, 16964], [16836, 16837, 16965], [16837, 16966, 16965], [16837, 16838, 16967], [16837, 16967, 16966], [16838, 16839, 16967], [16839, 16968, 16967], [16839, 16840, 16969], [16839, 16969, 16968], [16840, 16841, 16969], [16841, 16970, 16969], [16841, 16842, 16971], [16841, 16971, 16970], [16842, 16843, 16971], [16843, 16972, 16971], [16843, 16844, 16973], [16843, 16973, 16972], [16844, 16845, 16973], [16845, 16974, 16973], [16845, 16846, 16975], [16845, 16975, 16974], [16846, 16847, 16975], [16847, 16976, 16975], [16847, 16848, 16977], [16847, 16977, 16976], [16848, 16849, 16977], [16849, 16978, 16977], [16849, 16850, 16979], [16849, 16979, 16978], [16850, 16851, 16979], [16851, 16980, 16979], [16851, 16852, 16981], [16851, 16981, 16980], [16852, 16853, 16981], [16853, 16982, 16981], [16853, 16854, 16983], [16853, 16983, 16982], [16854, 16855, 16983], [16855, 16984, 16983], [16855, 16856, 16985], [16855, 16985, 16984], [16856, 16857, 16985], [16857, 16986, 16985], [16857, 16858, 16987], [16857, 16987, 16986], [16858, 16859, 16987], [16859, 16988, 16987], [16859, 16860, 16989], [16859, 16989, 16988], [16860, 16861, 16989], [16861, 16990, 16989], [16861, 16862, 16991], [16861, 16991, 16990], [16862, 16863, 16991], [16863, 16992, 16991], [16863, 16864, 16993], [16863, 16993, 16992], [16864, 16865, 16993], [16865, 16994, 16993], [16865, 16866, 16995], [16865, 16995, 16994], [16866, 16867, 16995], [16867, 16996, 16995], [16867, 16868, 16997], [16867, 16997, 16996], [16868, 16869, 16997], [16869, 16998, 16997], [16869, 16870, 16999], [16869, 16999, 16998], [16870, 16871, 16999], [16871, 17000, 16999], [16871, 16872, 17001], [16871, 17001, 17000], [16872, 16873, 17001], [16873, 17002, 17001], [16873, 16874, 17003], [16873, 17003, 17002], [16874, 16875, 17003], [16875, 17004, 17003], [16875, 16876, 17005], [16875, 17005, 17004], [16876, 16877, 17005], [16877, 17006, 17005], [16877, 16878, 17007], [16877, 17007, 17006], [16878, 16879, 17007], [16879, 17008, 17007], [16879, 16880, 17009], [16879, 17009, 17008], [16880, 16881, 17009], [16881, 17010, 17009], [16881, 16882, 17011], [16881, 17011, 17010], [16882, 16883, 17011], [16883, 17012, 17011], [16883, 16884, 17013], [16883, 17013, 17012], [16884, 16885, 17013], [16885, 17014, 17013], [16885, 16886, 17015], [16885, 17015, 17014], [16886, 16887, 17015], [16887, 17016, 17015], [16887, 16888, 17017], [16887, 17017, 17016], [16888, 16889, 17017], [16889, 17018, 17017], [16889, 16890, 17019], [16889, 17019, 17018], [16890, 16891, 17019], [16891, 17020, 17019], [16891, 16892, 17021], [16891, 17021, 17020], [16892, 16893, 17021], [16893, 17022, 17021], [16893, 16894, 17023], [16893, 17023, 17022], [16894, 16895, 17023], [16895, 17024, 17023], [16895, 16896, 17025], [16895, 17025, 17024], [16896, 16897, 17025], [16897, 17026, 17025], [16897, 16898, 17027], [16897, 17027, 17026], [16898, 16899, 17027], [16899, 17028, 17027], [16899, 16900, 17029], [16899, 17029, 17028], [16900, 16901, 17029], [16901, 17030, 17029], [16901, 16902, 17031], [16901, 17031, 17030], [16902, 16903, 17031], [16903, 17032, 17031], [16903, 16904, 17033], [16903, 17033, 17032], [16904, 16905, 17033], [16905, 17034, 17033], [16905, 16906, 17035], [16905, 17035, 17034], [16906, 16907, 17035], [16907, 17036, 17035], [16907, 16908, 17037], [16907, 17037, 17036], [16908, 16909, 17037], [16909, 17038, 17037], [16909, 16910, 17039], [16909, 17039, 17038], [16910, 16911, 17039], [16911, 17040, 17039], [16911, 16912, 17041], [16911, 17041, 17040], [16912, 16913, 17041], [16913, 17042, 17041], [16913, 16914, 17043], [16913, 17043, 17042], [16914, 16915, 17043], [16915, 17044, 17043], [16915, 16916, 17045], [16915, 17045, 17044], [16916, 16917, 17045], [16917, 17046, 17045], [16917, 16918, 17047], [16917, 17047, 17046], [16918, 16919, 17047], [16919, 17048, 17047], [16919, 16920, 17049], [16919, 17049, 17048], [16920, 16921, 17049], [16921, 17050, 17049], [16921, 16922, 17051], [16921, 17051, 17050], [16922, 16923, 17051], [16923, 17052, 17051], [16923, 16924, 17053], [16923, 17053, 17052], [16924, 16925, 17053], [16925, 17054, 17053], [16925, 16926, 17055], [16925, 17055, 17054], [16926, 16927, 17055], [16927, 17056, 17055], [16927, 16928, 17057], [16927, 17057, 17056], [16928, 16929, 17057], [16929, 17058, 17057], [16929, 16930, 17059], [16929, 17059, 17058], [16930, 16931, 17059], [16931, 17060, 17059], [16931, 16932, 17061], [16931, 17061, 17060], [16932, 16933, 17061], [16933, 17062, 17061], [16933, 16934, 17063], [16933, 17063, 17062], [16934, 16935, 17063], [16935, 17064, 17063], [16935, 16936, 17065], [16935, 17065, 17064], [16936, 16937, 17065], [16937, 17066, 17065], [16937, 16938, 17067], [16937, 17067, 17066], [16938, 16939, 17067], [16939, 17068, 17067], [16939, 16940, 17069], [16939, 17069, 17068], [16940, 16941, 17069], [16941, 17070, 17069], [16941, 16942, 17071], [16941, 17071, 17070], [16942, 16943, 17071], [16943, 17072, 17071], [16943, 16944, 17073], [16943, 17073, 17072], [16945, 16946, 17075], [16945, 17075, 17074], [16946, 16947, 17075], [16947, 17076, 17075], [16947, 16948, 17077], [16947, 17077, 17076], [16948, 16949, 17077], [16949, 17078, 17077], [16949, 16950, 17079], [16949, 17079, 17078], [16950, 16951, 17079], [16951, 17080, 17079], [16951, 16952, 17081], [16951, 17081, 17080], [16952, 16953, 17081], [16953, 17082, 17081], [16953, 16954, 17083], [16953, 17083, 17082], [16954, 16955, 17083], [16955, 17084, 17083], [16955, 16956, 17085], [16955, 17085, 17084], [16956, 16957, 17085], [16957, 17086, 17085], [16957, 16958, 17087], [16957, 17087, 17086], [16958, 16959, 17087], [16959, 17088, 17087], [16959, 16960, 17089], [16959, 17089, 17088], [16960, 16961, 17089], [16961, 17090, 17089], [16961, 16962, 17091], [16961, 17091, 17090], [16962, 16963, 17091], [16963, 17092, 17091], [16963, 16964, 17093], [16963, 17093, 17092], [16964, 16965, 17093], [16965, 17094, 17093], [16965, 16966, 17095], [16965, 17095, 17094], [16966, 16967, 17095], [16967, 17096, 17095], [16967, 16968, 17097], [16967, 17097, 17096], [16968, 16969, 17097], [16969, 17098, 17097], [16969, 16970, 17099], [16969, 17099, 17098], [16970, 16971, 17099], [16971, 17100, 17099], [16971, 16972, 17101], [16971, 17101, 17100], [16972, 16973, 17101], [16973, 17102, 17101], [16973, 16974, 17103], [16973, 17103, 17102], [16974, 16975, 17103], [16975, 17104, 17103], [16975, 16976, 17105], [16975, 17105, 17104], [16976, 16977, 17105], [16977, 17106, 17105], [16977, 16978, 17107], [16977, 17107, 17106], [16978, 16979, 17107], [16979, 17108, 17107], [16979, 16980, 17109], [16979, 17109, 17108], [16980, 16981, 17109], [16981, 17110, 17109], [16981, 16982, 17111], [16981, 17111, 17110], [16982, 16983, 17111], [16983, 17112, 17111], [16983, 16984, 17113], [16983, 17113, 17112], [16984, 16985, 17113], [16985, 17114, 17113], [16985, 16986, 17115], [16985, 17115, 17114], [16986, 16987, 17115], [16987, 17116, 17115], [16987, 16988, 17117], [16987, 17117, 17116], [16988, 16989, 17117], [16989, 17118, 17117], [16989, 16990, 17119], [16989, 17119, 17118], [16990, 16991, 17119], [16991, 17120, 17119], [16991, 16992, 17121], [16991, 17121, 17120], [16992, 16993, 17121], [16993, 17122, 17121], [16993, 16994, 17123], [16993, 17123, 17122], [16994, 16995, 17123], [16995, 17124, 17123], [16995, 16996, 17125], [16995, 17125, 17124], [16996, 16997, 17125], [16997, 17126, 17125], [16997, 16998, 17127], [16997, 17127, 17126], [16998, 16999, 17127], [16999, 17128, 17127], [16999, 17000, 17129], [16999, 17129, 17128], [17000, 17001, 17129], [17001, 17130, 17129], [17001, 17002, 17131], [17001, 17131, 17130], [17002, 17003, 17131], [17003, 17132, 17131], [17003, 17004, 17133], [17003, 17133, 17132], [17004, 17005, 17133], [17005, 17134, 17133], [17005, 17006, 17135], [17005, 17135, 17134], [17006, 17007, 17135], [17007, 17136, 17135], [17007, 17008, 17137], [17007, 17137, 17136], [17008, 17009, 17137], [17009, 17138, 17137], [17009, 17010, 17139], [17009, 17139, 17138], [17010, 17011, 17139], [17011, 17140, 17139], [17011, 17012, 17141], [17011, 17141, 17140], [17012, 17013, 17141], [17013, 17142, 17141], [17013, 17014, 17143], [17013, 17143, 17142], [17014, 17015, 17143], [17015, 17144, 17143], [17015, 17016, 17145], [17015, 17145, 17144], [17016, 17017, 17145], [17017, 17146, 17145], [17017, 17018, 17147], [17017, 17147, 17146], [17018, 17019, 17147], [17019, 17148, 17147], [17019, 17020, 17149], [17019, 17149, 17148], [17020, 17021, 17149], [17021, 17150, 17149], [17021, 17022, 17151], [17021, 17151, 17150], [17022, 17023, 17151], [17023, 17152, 17151], [17023, 17024, 17153], [17023, 17153, 17152], [17024, 17025, 17153], [17025, 17154, 17153], [17025, 17026, 17155], [17025, 17155, 17154], [17026, 17027, 17155], [17027, 17156, 17155], [17027, 17028, 17157], [17027, 17157, 17156], [17028, 17029, 17157], [17029, 17158, 17157], [17029, 17030, 17159], [17029, 17159, 17158], [17030, 17031, 17159], [17031, 17160, 17159], [17031, 17032, 17161], [17031, 17161, 17160], [17032, 17033, 17161], [17033, 17162, 17161], [17033, 17034, 17163], [17033, 17163, 17162], [17034, 17035, 17163], [17035, 17164, 17163], [17035, 17036, 17165], [17035, 17165, 17164], [17036, 17037, 17165], [17037, 17166, 17165], [17037, 17038, 17167], [17037, 17167, 17166], [17038, 17039, 17167], [17039, 17168, 17167], [17039, 17040, 17169], [17039, 17169, 17168], [17040, 17041, 17169], [17041, 17170, 17169], [17041, 17042, 17171], [17041, 17171, 17170], [17042, 17043, 17171], [17043, 17172, 17171], [17043, 17044, 17173], [17043, 17173, 17172], [17044, 17045, 17173], [17045, 17174, 17173], [17045, 17046, 17175], [17045, 17175, 17174], [17046, 17047, 17175], [17047, 17176, 17175], [17047, 17048, 17177], [17047, 17177, 17176], [17048, 17049, 17177], [17049, 17178, 17177], [17049, 17050, 17179], [17049, 17179, 17178], [17050, 17051, 17179], [17051, 17180, 17179], [17051, 17052, 17181], [17051, 17181, 17180], [17052, 17053, 17181], [17053, 17182, 17181], [17053, 17054, 17183], [17053, 17183, 17182], [17054, 17055, 17183], [17055, 17184, 17183], [17055, 17056, 17185], [17055, 17185, 17184], [17056, 17057, 17185], [17057, 17186, 17185], [17057, 17058, 17187], [17057, 17187, 17186], [17058, 17059, 17187], [17059, 17188, 17187], [17059, 17060, 17189], [17059, 17189, 17188], [17060, 17061, 17189], [17061, 17190, 17189], [17061, 17062, 17191], [17061, 17191, 17190], [17062, 17063, 17191], [17063, 17192, 17191], [17063, 17064, 17193], [17063, 17193, 17192], [17064, 17065, 17193], [17065, 17194, 17193], [17065, 17066, 17195], [17065, 17195, 17194], [17066, 17067, 17195], [17067, 17196, 17195], [17067, 17068, 17197], [17067, 17197, 17196], [17068, 17069, 17197], [17069, 17198, 17197], [17069, 17070, 17199], [17069, 17199, 17198], [17070, 17071, 17199], [17071, 17200, 17199], [17071, 17072, 17201], [17071, 17201, 17200], [17072, 17073, 17201], [17073, 17202, 17201], [17074, 17075, 17203], [17075, 17204, 17203], [17075, 17076, 17205], [17075, 17205, 17204], [17076, 17077, 17205], [17077, 17206, 17205], [17077, 17078, 17207], [17077, 17207, 17206], [17078, 17079, 17207], [17079, 17208, 17207], [17079, 17080, 17209], [17079, 17209, 17208], [17080, 17081, 17209], [17081, 17210, 17209], [17081, 17082, 17211], [17081, 17211, 17210], [17082, 17083, 17211], [17083, 17212, 17211], [17083, 17084, 17213], [17083, 17213, 17212], [17084, 17085, 17213], [17085, 17214, 17213], [17085, 17086, 17215], [17085, 17215, 17214], [17086, 17087, 17215], [17087, 17216, 17215], [17087, 17088, 17217], [17087, 17217, 17216], [17088, 17089, 17217], [17089, 17218, 17217], [17089, 17090, 17219], [17089, 17219, 17218], [17090, 17091, 17219], [17091, 17220, 17219], [17091, 17092, 17221], [17091, 17221, 17220], [17092, 17093, 17221], [17093, 17222, 17221], [17093, 17094, 17223], [17093, 17223, 17222], [17094, 17095, 17223], [17095, 17224, 17223], [17095, 17096, 17225], [17095, 17225, 17224], [17096, 17097, 17225], [17097, 17226, 17225], [17097, 17098, 17227], [17097, 17227, 17226], [17098, 17099, 17227], [17099, 17228, 17227], [17099, 17100, 17229], [17099, 17229, 17228], [17100, 17101, 17229], [17101, 17230, 17229], [17101, 17102, 17231], [17101, 17231, 17230], [17102, 17103, 17231], [17103, 17232, 17231], [17103, 17104, 17233], [17103, 17233, 17232], [17104, 17105, 17233], [17105, 17234, 17233], [17105, 17106, 17235], [17105, 17235, 17234], [17106, 17107, 17235], [17107, 17236, 17235], [17107, 17108, 17237], [17107, 17237, 17236], [17108, 17109, 17237], [17109, 17238, 17237], [17109, 17110, 17239], [17109, 17239, 17238], [17110, 17111, 17239], [17111, 17240, 17239], [17111, 17112, 17241], [17111, 17241, 17240], [17112, 17113, 17241], [17113, 17242, 17241], [17113, 17114, 17243], [17113, 17243, 17242], [17114, 17115, 17243], [17115, 17244, 17243], [17115, 17116, 17245], [17115, 17245, 17244], [17116, 17117, 17245], [17117, 17246, 17245], [17117, 17118, 17247], [17117, 17247, 17246], [17118, 17119, 17247], [17119, 17248, 17247], [17119, 17120, 17249], [17119, 17249, 17248], [17120, 17121, 17249], [17121, 17250, 17249], [17121, 17122, 17251], [17121, 17251, 17250], [17122, 17123, 17251], [17123, 17252, 17251], [17123, 17124, 17253], [17123, 17253, 17252], [17124, 17125, 17253], [17125, 17254, 17253], [17125, 17126, 17255], [17125, 17255, 17254], [17126, 17127, 17255], [17127, 17256, 17255], [17127, 17128, 17257], [17127, 17257, 17256], [17128, 17129, 17257], [17129, 17258, 17257], [17129, 17130, 17259], [17129, 17259, 17258], [17130, 17131, 17259], [17131, 17260, 17259], [17131, 17132, 17261], [17131, 17261, 17260], [17132, 17133, 17261], [17133, 17262, 17261], [17133, 17134, 17263], [17133, 17263, 17262], [17134, 17135, 17263], [17135, 17264, 17263], [17135, 17136, 17265], [17135, 17265, 17264], [17136, 17137, 17265], [17137, 17266, 17265], [17137, 17138, 17267], [17137, 17267, 17266], [17138, 17139, 17267], [17139, 17268, 17267], [17139, 17140, 17269], [17139, 17269, 17268], [17140, 17141, 17269], [17141, 17270, 17269], [17141, 17142, 17271], [17141, 17271, 17270], [17142, 17143, 17271], [17143, 17272, 17271], [17143, 17144, 17273], [17143, 17273, 17272], [17144, 17145, 17273], [17145, 17274, 17273], [17145, 17146, 17275], [17145, 17275, 17274], [17146, 17147, 17275], [17147, 17276, 17275], [17147, 17148, 17277], [17147, 17277, 17276], [17148, 17149, 17277], [17149, 17278, 17277], [17149, 17150, 17279], [17149, 17279, 17278], [17150, 17151, 17279], [17151, 17280, 17279], [17151, 17152, 17281], [17151, 17281, 17280], [17152, 17153, 17281], [17153, 17282, 17281], [17153, 17154, 17283], [17153, 17283, 17282], [17154, 17155, 17283], [17155, 17284, 17283], [17155, 17156, 17285], [17155, 17285, 17284], [17156, 17157, 17285], [17157, 17286, 17285], [17157, 17158, 17287], [17157, 17287, 17286], [17158, 17159, 17287], [17159, 17288, 17287], [17159, 17160, 17289], [17159, 17289, 17288], [17160, 17161, 17289], [17161, 17290, 17289], [17161, 17162, 17291], [17161, 17291, 17290], [17162, 17163, 17291], [17163, 17292, 17291], [17163, 17164, 17293], [17163, 17293, 17292], [17164, 17165, 17293], [17165, 17294, 17293], [17165, 17166, 17295], [17165, 17295, 17294], [17166, 17167, 17295], [17167, 17296, 17295], [17167, 17168, 17297], [17167, 17297, 17296], [17168, 17169, 17297], [17169, 17298, 17297], [17169, 17170, 17299], [17169, 17299, 17298], [17170, 17171, 17299], [17171, 17300, 17299], [17171, 17172, 17301], [17171, 17301, 17300], [17172, 17173, 17301], [17173, 17302, 17301], [17173, 17174, 17303], [17173, 17303, 17302], [17174, 17175, 17303], [17175, 17304, 17303], [17175, 17176, 17305], [17175, 17305, 17304], [17176, 17177, 17305], [17177, 17306, 17305], [17177, 17178, 17307], [17177, 17307, 17306], [17178, 17179, 17307], [17179, 17308, 17307], [17179, 17180, 17309], [17179, 17309, 17308], [17180, 17181, 17309], [17181, 17310, 17309], [17181, 17182, 17311], [17181, 17311, 17310], [17182, 17183, 17311], [17183, 17312, 17311], [17183, 17184, 17313], [17183, 17313, 17312], [17184, 17185, 17313], [17185, 17314, 17313], [17185, 17186, 17315], [17185, 17315, 17314], [17186, 17187, 17315], [17187, 17316, 17315], [17187, 17188, 17317], [17187, 17317, 17316], [17188, 17189, 17317], [17189, 17318, 17317], [17189, 17190, 17319], [17189, 17319, 17318], [17190, 17191, 17319], [17191, 17320, 17319], [17191, 17192, 17321], [17191, 17321, 17320], [17192, 17193, 17321], [17193, 17322, 17321], [17193, 17194, 17323], [17193, 17323, 17322], [17194, 17195, 17323], [17195, 17324, 17323], [17195, 17196, 17325], [17195, 17325, 17324], [17196, 17197, 17325], [17197, 17326, 17325], [17197, 17198, 17327], [17197, 17327, 17326], [17198, 17199, 17327], [17199, 17328, 17327], [17199, 17200, 17329], [17199, 17329, 17328], [17200, 17201, 17329], [17201, 17330, 17329], [17201, 17202, 17331], [17201, 17331, 17330], [17203, 17204, 17333], [17203, 17333, 17332], [17204, 17205, 17333], [17205, 17334, 17333], [17205, 17206, 17335], [17205, 17335, 17334], [17206, 17207, 17335], [17207, 17336, 17335], [17207, 17208, 17337], [17207, 17337, 17336], [17208, 17209, 17337], [17209, 17338, 17337], [17209, 17210, 17339], [17209, 17339, 17338], [17210, 17211, 17339], [17211, 17340, 17339], [17211, 17212, 17341], [17211, 17341, 17340], [17212, 17213, 17341], [17213, 17342, 17341], [17213, 17214, 17343], [17213, 17343, 17342], [17214, 17215, 17343], [17215, 17344, 17343], [17215, 17216, 17345], [17215, 17345, 17344], [17216, 17217, 17345], [17217, 17346, 17345], [17217, 17218, 17347], [17217, 17347, 17346], [17218, 17219, 17347], [17219, 17348, 17347], [17219, 17220, 17349], [17219, 17349, 17348], [17220, 17221, 17349], [17221, 17350, 17349], [17221, 17222, 17351], [17221, 17351, 17350], [17222, 17223, 17351], [17223, 17352, 17351], [17223, 17224, 17353], [17223, 17353, 17352], [17224, 17225, 17353], [17225, 17354, 17353], [17225, 17226, 17355], [17225, 17355, 17354], [17226, 17227, 17355], [17227, 17356, 17355], [17227, 17228, 17357], [17227, 17357, 17356], [17228, 17229, 17357], [17229, 17358, 17357], [17229, 17230, 17359], [17229, 17359, 17358], [17230, 17231, 17359], [17231, 17360, 17359], [17231, 17232, 17361], [17231, 17361, 17360], [17232, 17233, 17361], [17233, 17362, 17361], [17233, 17234, 17363], [17233, 17363, 17362], [17234, 17235, 17363], [17235, 17364, 17363], [17235, 17236, 17365], [17235, 17365, 17364], [17236, 17237, 17365], [17237, 17366, 17365], [17237, 17238, 17367], [17237, 17367, 17366], [17238, 17239, 17367], [17239, 17368, 17367], [17239, 17240, 17369], [17239, 17369, 17368], [17240, 17241, 17369], [17241, 17370, 17369], [17241, 17242, 17371], [17241, 17371, 17370], [17242, 17243, 17371], [17243, 17372, 17371], [17243, 17244, 17373], [17243, 17373, 17372], [17244, 17245, 17373], [17245, 17374, 17373], [17245, 17246, 17375], [17245, 17375, 17374], [17246, 17247, 17375], [17247, 17376, 17375], [17247, 17248, 17377], [17247, 17377, 17376], [17248, 17249, 17377], [17249, 17378, 17377], [17249, 17250, 17379], [17249, 17379, 17378], [17250, 17251, 17379], [17251, 17380, 17379], [17251, 17252, 17381], [17251, 17381, 17380], [17252, 17253, 17381], [17253, 17382, 17381], [17253, 17254, 17383], [17253, 17383, 17382], [17254, 17255, 17383], [17255, 17384, 17383], [17255, 17256, 17385], [17255, 17385, 17384], [17256, 17257, 17385], [17257, 17386, 17385], [17257, 17258, 17387], [17257, 17387, 17386], [17258, 17259, 17387], [17259, 17388, 17387], [17259, 17260, 17389], [17259, 17389, 17388], [17260, 17261, 17389], [17261, 17390, 17389], [17261, 17262, 17391], [17261, 17391, 17390], [17262, 17263, 17391], [17263, 17392, 17391], [17263, 17264, 17393], [17263, 17393, 17392], [17264, 17265, 17393], [17265, 17394, 17393], [17265, 17266, 17395], [17265, 17395, 17394], [17266, 17267, 17395], [17267, 17396, 17395], [17267, 17268, 17397], [17267, 17397, 17396], [17268, 17269, 17397], [17269, 17398, 17397], [17269, 17270, 17399], [17269, 17399, 17398], [17270, 17271, 17399], [17271, 17400, 17399], [17271, 17272, 17401], [17271, 17401, 17400], [17272, 17273, 17401], [17273, 17402, 17401], [17273, 17274, 17403], [17273, 17403, 17402], [17274, 17275, 17403], [17275, 17404, 17403], [17275, 17276, 17405], [17275, 17405, 17404], [17276, 17277, 17405], [17277, 17406, 17405], [17277, 17278, 17407], [17277, 17407, 17406], [17278, 17279, 17407], [17279, 17408, 17407], [17279, 17280, 17409], [17279, 17409, 17408], [17280, 17281, 17409], [17281, 17410, 17409], [17281, 17282, 17411], [17281, 17411, 17410], [17282, 17283, 17411], [17283, 17412, 17411], [17283, 17284, 17413], [17283, 17413, 17412], [17284, 17285, 17413], [17285, 17414, 17413], [17285, 17286, 17415], [17285, 17415, 17414], [17286, 17287, 17415], [17287, 17416, 17415], [17287, 17288, 17417], [17287, 17417, 17416], [17288, 17289, 17417], [17289, 17418, 17417], [17289, 17290, 17419], [17289, 17419, 17418], [17290, 17291, 17419], [17291, 17420, 17419], [17291, 17292, 17421], [17291, 17421, 17420], [17292, 17293, 17421], [17293, 17422, 17421], [17293, 17294, 17423], [17293, 17423, 17422], [17294, 17295, 17423], [17295, 17424, 17423], [17295, 17296, 17425], [17295, 17425, 17424], [17296, 17297, 17425], [17297, 17426, 17425], [17297, 17298, 17427], [17297, 17427, 17426], [17298, 17299, 17427], [17299, 17428, 17427], [17299, 17300, 17429], [17299, 17429, 17428], [17300, 17301, 17429], [17301, 17430, 17429], [17301, 17302, 17431], [17301, 17431, 17430], [17302, 17303, 17431], [17303, 17432, 17431], [17303, 17304, 17433], [17303, 17433, 17432], [17304, 17305, 17433], [17305, 17434, 17433], [17305, 17306, 17435], [17305, 17435, 17434], [17306, 17307, 17435], [17307, 17436, 17435], [17307, 17308, 17437], [17307, 17437, 17436], [17308, 17309, 17437], [17309, 17438, 17437], [17309, 17310, 17439], [17309, 17439, 17438], [17310, 17311, 17439], [17311, 17440, 17439], [17311, 17312, 17441], [17311, 17441, 17440], [17312, 17313, 17441], [17313, 17442, 17441], [17313, 17314, 17443], [17313, 17443, 17442], [17314, 17315, 17443], [17315, 17444, 17443], [17315, 17316, 17445], [17315, 17445, 17444], [17316, 17317, 17445], [17317, 17446, 17445], [17317, 17318, 17447], [17317, 17447, 17446], [17318, 17319, 17447], [17319, 17448, 17447], [17319, 17320, 17449], [17319, 17449, 17448], [17320, 17321, 17449], [17321, 17450, 17449], [17321, 17322, 17451], [17321, 17451, 17450], [17322, 17323, 17451], [17323, 17452, 17451], [17323, 17324, 17453], [17323, 17453, 17452], [17324, 17325, 17453], [17325, 17454, 17453], [17325, 17326, 17455], [17325, 17455, 17454], [17326, 17327, 17455], [17327, 17456, 17455], [17327, 17328, 17457], [17327, 17457, 17456], [17328, 17329, 17457], [17329, 17458, 17457], [17329, 17330, 17459], [17329, 17459, 17458], [17330, 17331, 17459], [17331, 17460, 17459], [17332, 17333, 17461], [17333, 17462, 17461], [17333, 17334, 17463], [17333, 17463, 17462], [17334, 17335, 17463], [17335, 17464, 17463], [17335, 17336, 17465], [17335, 17465, 17464], [17336, 17337, 17465], [17337, 17466, 17465], [17337, 17338, 17467], [17337, 17467, 17466], [17338, 17339, 17467], [17339, 17468, 17467], [17339, 17340, 17469], [17339, 17469, 17468], [17340, 17341, 17469], [17341, 17470, 17469], [17341, 17342, 17471], [17341, 17471, 17470], [17342, 17343, 17471], [17343, 17472, 17471], [17343, 17344, 17473], [17343, 17473, 17472], [17344, 17345, 17473], [17345, 17474, 17473], [17345, 17346, 17475], [17345, 17475, 17474], [17346, 17347, 17475], [17347, 17476, 17475], [17347, 17348, 17477], [17347, 17477, 17476], [17348, 17349, 17477], [17349, 17478, 17477], [17349, 17350, 17479], [17349, 17479, 17478], [17350, 17351, 17479], [17351, 17480, 17479], [17351, 17352, 17481], [17351, 17481, 17480], [17352, 17353, 17481], [17353, 17482, 17481], [17353, 17354, 17483], [17353, 17483, 17482], [17354, 17355, 17483], [17355, 17484, 17483], [17355, 17356, 17485], [17355, 17485, 17484], [17356, 17357, 17485], [17357, 17486, 17485], [17357, 17358, 17487], [17357, 17487, 17486], [17358, 17359, 17487], [17359, 17488, 17487], [17359, 17360, 17489], [17359, 17489, 17488], [17360, 17361, 17489], [17361, 17490, 17489], [17361, 17362, 17491], [17361, 17491, 17490], [17362, 17363, 17491], [17363, 17492, 17491], [17363, 17364, 17493], [17363, 17493, 17492], [17364, 17365, 17493], [17365, 17494, 17493], [17365, 17366, 17495], [17365, 17495, 17494], [17366, 17367, 17495], [17367, 17496, 17495], [17367, 17368, 17497], [17367, 17497, 17496], [17368, 17369, 17497], [17369, 17498, 17497], [17369, 17370, 17499], [17369, 17499, 17498], [17370, 17371, 17499], [17371, 17500, 17499], [17371, 17372, 17501], [17371, 17501, 17500], [17372, 17373, 17501], [17373, 17502, 17501], [17373, 17374, 17503], [17373, 17503, 17502], [17374, 17375, 17503], [17375, 17504, 17503], [17375, 17376, 17505], [17375, 17505, 17504], [17376, 17377, 17505], [17377, 17506, 17505], [17377, 17378, 17507], [17377, 17507, 17506], [17378, 17379, 17507], [17379, 17508, 17507], [17379, 17380, 17509], [17379, 17509, 17508], [17380, 17381, 17509], [17381, 17510, 17509], [17381, 17382, 17511], [17381, 17511, 17510], [17382, 17383, 17511], [17383, 17512, 17511], [17383, 17384, 17513], [17383, 17513, 17512], [17384, 17385, 17513], [17385, 17514, 17513], [17385, 17386, 17515], [17385, 17515, 17514], [17386, 17387, 17515], [17387, 17516, 17515], [17387, 17388, 17517], [17387, 17517, 17516], [17388, 17389, 17517], [17389, 17518, 17517], [17389, 17390, 17519], [17389, 17519, 17518], [17390, 17391, 17519], [17391, 17520, 17519], [17391, 17392, 17521], [17391, 17521, 17520], [17392, 17393, 17521], [17393, 17522, 17521], [17393, 17394, 17523], [17393, 17523, 17522], [17394, 17395, 17523], [17395, 17524, 17523], [17395, 17396, 17525], [17395, 17525, 17524], [17396, 17397, 17525], [17397, 17526, 17525], [17397, 17398, 17527], [17397, 17527, 17526], [17398, 17399, 17527], [17399, 17528, 17527], [17399, 17400, 17529], [17399, 17529, 17528], [17400, 17401, 17529], [17401, 17530, 17529], [17401, 17402, 17531], [17401, 17531, 17530], [17402, 17403, 17531], [17403, 17532, 17531], [17403, 17404, 17533], [17403, 17533, 17532], [17404, 17405, 17533], [17405, 17534, 17533], [17405, 17406, 17535], [17405, 17535, 17534], [17406, 17407, 17535], [17407, 17536, 17535], [17407, 17408, 17537], [17407, 17537, 17536], [17408, 17409, 17537], [17409, 17538, 17537], [17409, 17410, 17539], [17409, 17539, 17538], [17410, 17411, 17539], [17411, 17540, 17539], [17411, 17412, 17541], [17411, 17541, 17540], [17412, 17413, 17541], [17413, 17542, 17541], [17413, 17414, 17543], [17413, 17543, 17542], [17414, 17415, 17543], [17415, 17544, 17543], [17415, 17416, 17545], [17415, 17545, 17544], [17416, 17417, 17545], [17417, 17546, 17545], [17417, 17418, 17547], [17417, 17547, 17546], [17418, 17419, 17547], [17419, 17548, 17547], [17419, 17420, 17549], [17419, 17549, 17548], [17420, 17421, 17549], [17421, 17550, 17549], [17421, 17422, 17551], [17421, 17551, 17550], [17422, 17423, 17551], [17423, 17552, 17551], [17423, 17424, 17553], [17423, 17553, 17552], [17424, 17425, 17553], [17425, 17554, 17553], [17425, 17426, 17555], [17425, 17555, 17554], [17426, 17427, 17555], [17427, 17556, 17555], [17427, 17428, 17557], [17427, 17557, 17556], [17428, 17429, 17557], [17429, 17558, 17557], [17429, 17430, 17559], [17429, 17559, 17558], [17430, 17431, 17559], [17431, 17560, 17559], [17431, 17432, 17561], [17431, 17561, 17560], [17432, 17433, 17561], [17433, 17562, 17561], [17433, 17434, 17563], [17433, 17563, 17562], [17434, 17435, 17563], [17435, 17564, 17563], [17435, 17436, 17565], [17435, 17565, 17564], [17436, 17437, 17565], [17437, 17566, 17565], [17437, 17438, 17567], [17437, 17567, 17566], [17438, 17439, 17567], [17439, 17568, 17567], [17439, 17440, 17569], [17439, 17569, 17568], [17440, 17441, 17569], [17441, 17570, 17569], [17441, 17442, 17571], [17441, 17571, 17570], [17442, 17443, 17571], [17443, 17572, 17571], [17443, 17444, 17573], [17443, 17573, 17572], [17444, 17445, 17573], [17445, 17574, 17573], [17445, 17446, 17575], [17445, 17575, 17574], [17446, 17447, 17575], [17447, 17576, 17575], [17447, 17448, 17577], [17447, 17577, 17576], [17448, 17449, 17577], [17449, 17578, 17577], [17449, 17450, 17579], [17449, 17579, 17578], [17450, 17451, 17579], [17451, 17580, 17579], [17451, 17452, 17581], [17451, 17581, 17580], [17452, 17453, 17581], [17453, 17582, 17581], [17453, 17454, 17583], [17453, 17583, 17582], [17454, 17455, 17583], [17455, 17584, 17583], [17455, 17456, 17585], [17455, 17585, 17584], [17456, 17457, 17585], [17457, 17586, 17585], [17457, 17458, 17587], [17457, 17587, 17586], [17458, 17459, 17587], [17459, 17588, 17587], [17459, 17460, 17589], [17459, 17589, 17588], [17461, 17462, 17591], [17461, 17591, 17590], [17462, 17463, 17591], [17463, 17592, 17591], [17463, 17464, 17593], [17463, 17593, 17592], [17464, 17465, 17593], [17465, 17594, 17593], [17465, 17466, 17595], [17465, 17595, 17594], [17466, 17467, 17595], [17467, 17596, 17595], [17467, 17468, 17597], [17467, 17597, 17596], [17468, 17469, 17597], [17469, 17598, 17597], [17469, 17470, 17599], [17469, 17599, 17598], [17470, 17471, 17599], [17471, 17600, 17599], [17471, 17472, 17601], [17471, 17601, 17600], [17472, 17473, 17601], [17473, 17602, 17601], [17473, 17474, 17603], [17473, 17603, 17602], [17474, 17475, 17603], [17475, 17604, 17603], [17475, 17476, 17605], [17475, 17605, 17604], [17476, 17477, 17605], [17477, 17606, 17605], [17477, 17478, 17607], [17477, 17607, 17606], [17478, 17479, 17607], [17479, 17608, 17607], [17479, 17480, 17609], [17479, 17609, 17608], [17480, 17481, 17609], [17481, 17610, 17609], [17481, 17482, 17611], [17481, 17611, 17610], [17482, 17483, 17611], [17483, 17612, 17611], [17483, 17484, 17613], [17483, 17613, 17612], [17484, 17485, 17613], [17485, 17614, 17613], [17485, 17486, 17615], [17485, 17615, 17614], [17486, 17487, 17615], [17487, 17616, 17615], [17487, 17488, 17617], [17487, 17617, 17616], [17488, 17489, 17617], [17489, 17618, 17617], [17489, 17490, 17619], [17489, 17619, 17618], [17490, 17491, 17619], [17491, 17620, 17619], [17491, 17492, 17621], [17491, 17621, 17620], [17492, 17493, 17621], [17493, 17622, 17621], [17493, 17494, 17623], [17493, 17623, 17622], [17494, 17495, 17623], [17495, 17624, 17623], [17495, 17496, 17625], [17495, 17625, 17624], [17496, 17497, 17625], [17497, 17626, 17625], [17497, 17498, 17627], [17497, 17627, 17626], [17498, 17499, 17627], [17499, 17628, 17627], [17499, 17500, 17629], [17499, 17629, 17628], [17500, 17501, 17629], [17501, 17630, 17629], [17501, 17502, 17631], [17501, 17631, 17630], [17502, 17503, 17631], [17503, 17632, 17631], [17503, 17504, 17633], [17503, 17633, 17632], [17504, 17505, 17633], [17505, 17634, 17633], [17505, 17506, 17635], [17505, 17635, 17634], [17506, 17507, 17635], [17507, 17636, 17635], [17507, 17508, 17637], [17507, 17637, 17636], [17508, 17509, 17637], [17509, 17638, 17637], [17509, 17510, 17639], [17509, 17639, 17638], [17510, 17511, 17639], [17511, 17640, 17639], [17511, 17512, 17641], [17511, 17641, 17640], [17512, 17513, 17641], [17513, 17642, 17641], [17513, 17514, 17643], [17513, 17643, 17642], [17514, 17515, 17643], [17515, 17644, 17643], [17515, 17516, 17645], [17515, 17645, 17644], [17516, 17517, 17645], [17517, 17646, 17645], [17517, 17518, 17647], [17517, 17647, 17646], [17518, 17519, 17647], [17519, 17648, 17647], [17519, 17520, 17649], [17519, 17649, 17648], [17520, 17521, 17649], [17521, 17650, 17649], [17521, 17522, 17651], [17521, 17651, 17650], [17522, 17523, 17651], [17523, 17652, 17651], [17523, 17524, 17653], [17523, 17653, 17652], [17524, 17525, 17653], [17525, 17654, 17653], [17525, 17526, 17655], [17525, 17655, 17654], [17526, 17527, 17655], [17527, 17656, 17655], [17527, 17528, 17657], [17527, 17657, 17656], [17528, 17529, 17657], [17529, 17658, 17657], [17529, 17530, 17659], [17529, 17659, 17658], [17530, 17531, 17659], [17531, 17660, 17659], [17531, 17532, 17661], [17531, 17661, 17660], [17532, 17533, 17661], [17533, 17662, 17661], [17533, 17534, 17663], [17533, 17663, 17662], [17534, 17535, 17663], [17535, 17664, 17663], [17535, 17536, 17665], [17535, 17665, 17664], [17536, 17537, 17665], [17537, 17666, 17665], [17537, 17538, 17667], [17537, 17667, 17666], [17538, 17539, 17667], [17539, 17668, 17667], [17539, 17540, 17669], [17539, 17669, 17668], [17540, 17541, 17669], [17541, 17670, 17669], [17541, 17542, 17671], [17541, 17671, 17670], [17542, 17543, 17671], [17543, 17672, 17671], [17543, 17544, 17673], [17543, 17673, 17672], [17544, 17545, 17673], [17545, 17674, 17673], [17545, 17546, 17675], [17545, 17675, 17674], [17546, 17547, 17675], [17547, 17676, 17675], [17547, 17548, 17677], [17547, 17677, 17676], [17548, 17549, 17677], [17549, 17678, 17677], [17549, 17550, 17679], [17549, 17679, 17678], [17550, 17551, 17679], [17551, 17680, 17679], [17551, 17552, 17681], [17551, 17681, 17680], [17552, 17553, 17681], [17553, 17682, 17681], [17553, 17554, 17683], [17553, 17683, 17682], [17554, 17555, 17683], [17555, 17684, 17683], [17555, 17556, 17685], [17555, 17685, 17684], [17556, 17557, 17685], [17557, 17686, 17685], [17557, 17558, 17687], [17557, 17687, 17686], [17558, 17559, 17687], [17559, 17688, 17687], [17559, 17560, 17689], [17559, 17689, 17688], [17560, 17561, 17689], [17561, 17690, 17689], [17561, 17562, 17691], [17561, 17691, 17690], [17562, 17563, 17691], [17563, 17692, 17691], [17563, 17564, 17693], [17563, 17693, 17692], [17564, 17565, 17693], [17565, 17694, 17693], [17565, 17566, 17695], [17565, 17695, 17694], [17566, 17567, 17695], [17567, 17696, 17695], [17567, 17568, 17697], [17567, 17697, 17696], [17568, 17569, 17697], [17569, 17698, 17697], [17569, 17570, 17699], [17569, 17699, 17698], [17570, 17571, 17699], [17571, 17700, 17699], [17571, 17572, 17701], [17571, 17701, 17700], [17572, 17573, 17701], [17573, 17702, 17701], [17573, 17574, 17703], [17573, 17703, 17702], [17574, 17575, 17703], [17575, 17704, 17703], [17575, 17576, 17705], [17575, 17705, 17704], [17576, 17577, 17705], [17577, 17706, 17705], [17577, 17578, 17707], [17577, 17707, 17706], [17578, 17579, 17707], [17579, 17708, 17707], [17579, 17580, 17709], [17579, 17709, 17708], [17580, 17581, 17709], [17581, 17710, 17709], [17581, 17582, 17711], [17581, 17711, 17710], [17582, 17583, 17711], [17583, 17712, 17711], [17583, 17584, 17713], [17583, 17713, 17712], [17584, 17585, 17713], [17585, 17714, 17713], [17585, 17586, 17715], [17585, 17715, 17714], [17586, 17587, 17715], [17587, 17716, 17715], [17587, 17588, 17717], [17587, 17717, 17716], [17588, 17589, 17717], [17589, 17718, 17717], [17590, 17591, 17719], [17591, 17720, 17719], [17591, 17592, 17721], [17591, 17721, 17720], [17592, 17593, 17721], [17593, 17722, 17721], [17593, 17594, 17723], [17593, 17723, 17722], [17594, 17595, 17723], [17595, 17724, 17723], [17595, 17596, 17725], [17595, 17725, 17724], [17596, 17597, 17725], [17597, 17726, 17725], [17597, 17598, 17727], [17597, 17727, 17726], [17598, 17599, 17727], [17599, 17728, 17727], [17599, 17600, 17729], [17599, 17729, 17728], [17600, 17601, 17729], [17601, 17730, 17729], [17601, 17602, 17731], [17601, 17731, 17730], [17602, 17603, 17731], [17603, 17732, 17731], [17603, 17604, 17733], [17603, 17733, 17732], [17604, 17605, 17733], [17605, 17734, 17733], [17605, 17606, 17735], [17605, 17735, 17734], [17606, 17607, 17735], [17607, 17736, 17735], [17607, 17608, 17737], [17607, 17737, 17736], [17608, 17609, 17737], [17609, 17738, 17737], [17609, 17610, 17739], [17609, 17739, 17738], [17610, 17611, 17739], [17611, 17740, 17739], [17611, 17612, 17741], [17611, 17741, 17740], [17612, 17613, 17741], [17613, 17742, 17741], [17613, 17614, 17743], [17613, 17743, 17742], [17614, 17615, 17743], [17615, 17744, 17743], [17615, 17616, 17745], [17615, 17745, 17744], [17616, 17617, 17745], [17617, 17746, 17745], [17617, 17618, 17747], [17617, 17747, 17746], [17618, 17619, 17747], [17619, 17748, 17747], [17619, 17620, 17749], [17619, 17749, 17748], [17620, 17621, 17749], [17621, 17750, 17749], [17621, 17622, 17751], [17621, 17751, 17750], [17622, 17623, 17751], [17623, 17752, 17751], [17623, 17624, 17753], [17623, 17753, 17752], [17624, 17625, 17753], [17625, 17754, 17753], [17625, 17626, 17755], [17625, 17755, 17754], [17626, 17627, 17755], [17627, 17756, 17755], [17627, 17628, 17757], [17627, 17757, 17756], [17628, 17629, 17757], [17629, 17758, 17757], [17629, 17630, 17759], [17629, 17759, 17758], [17630, 17631, 17759], [17631, 17760, 17759], [17631, 17632, 17761], [17631, 17761, 17760], [17632, 17633, 17761], [17633, 17762, 17761], [17633, 17634, 17763], [17633, 17763, 17762], [17634, 17635, 17763], [17635, 17764, 17763], [17635, 17636, 17765], [17635, 17765, 17764], [17636, 17637, 17765], [17637, 17766, 17765], [17637, 17638, 17767], [17637, 17767, 17766], [17638, 17639, 17767], [17639, 17768, 17767], [17639, 17640, 17769], [17639, 17769, 17768], [17640, 17641, 17769], [17641, 17770, 17769], [17641, 17642, 17771], [17641, 17771, 17770], [17642, 17643, 17771], [17643, 17772, 17771], [17643, 17644, 17773], [17643, 17773, 17772], [17644, 17645, 17773], [17645, 17774, 17773], [17645, 17646, 17775], [17645, 17775, 17774], [17646, 17647, 17775], [17647, 17776, 17775], [17647, 17648, 17777], [17647, 17777, 17776], [17648, 17649, 17777], [17649, 17778, 17777], [17649, 17650, 17779], [17649, 17779, 17778], [17650, 17651, 17779], [17651, 17780, 17779], [17651, 17652, 17781], [17651, 17781, 17780], [17652, 17653, 17781], [17653, 17782, 17781], [17653, 17654, 17783], [17653, 17783, 17782], [17654, 17655, 17783], [17655, 17784, 17783], [17655, 17656, 17785], [17655, 17785, 17784], [17656, 17657, 17785], [17657, 17786, 17785], [17657, 17658, 17787], [17657, 17787, 17786], [17658, 17659, 17787], [17659, 17788, 17787], [17659, 17660, 17789], [17659, 17789, 17788], [17660, 17661, 17789], [17661, 17790, 17789], [17661, 17662, 17791], [17661, 17791, 17790], [17662, 17663, 17791], [17663, 17792, 17791], [17663, 17664, 17793], [17663, 17793, 17792], [17664, 17665, 17793], [17665, 17794, 17793], [17665, 17666, 17795], [17665, 17795, 17794], [17666, 17667, 17795], [17667, 17796, 17795], [17667, 17668, 17797], [17667, 17797, 17796], [17668, 17669, 17797], [17669, 17798, 17797], [17669, 17670, 17799], [17669, 17799, 17798], [17670, 17671, 17799], [17671, 17800, 17799], [17671, 17672, 17801], [17671, 17801, 17800], [17672, 17673, 17801], [17673, 17802, 17801], [17673, 17674, 17803], [17673, 17803, 17802], [17674, 17675, 17803], [17675, 17804, 17803], [17675, 17676, 17805], [17675, 17805, 17804], [17676, 17677, 17805], [17677, 17806, 17805], [17677, 17678, 17807], [17677, 17807, 17806], [17678, 17679, 17807], [17679, 17808, 17807], [17679, 17680, 17809], [17679, 17809, 17808], [17680, 17681, 17809], [17681, 17810, 17809], [17681, 17682, 17811], [17681, 17811, 17810], [17682, 17683, 17811], [17683, 17812, 17811], [17683, 17684, 17813], [17683, 17813, 17812], [17684, 17685, 17813], [17685, 17814, 17813], [17685, 17686, 17815], [17685, 17815, 17814], [17686, 17687, 17815], [17687, 17816, 17815], [17687, 17688, 17817], [17687, 17817, 17816], [17688, 17689, 17817], [17689, 17818, 17817], [17689, 17690, 17819], [17689, 17819, 17818], [17690, 17691, 17819], [17691, 17820, 17819], [17691, 17692, 17821], [17691, 17821, 17820], [17692, 17693, 17821], [17693, 17822, 17821], [17693, 17694, 17823], [17693, 17823, 17822], [17694, 17695, 17823], [17695, 17824, 17823], [17695, 17696, 17825], [17695, 17825, 17824], [17696, 17697, 17825], [17697, 17826, 17825], [17697, 17698, 17827], [17697, 17827, 17826], [17698, 17699, 17827], [17699, 17828, 17827], [17699, 17700, 17829], [17699, 17829, 17828], [17700, 17701, 17829], [17701, 17830, 17829], [17701, 17702, 17831], [17701, 17831, 17830], [17702, 17703, 17831], [17703, 17832, 17831], [17703, 17704, 17833], [17703, 17833, 17832], [17704, 17705, 17833], [17705, 17834, 17833], [17705, 17706, 17835], [17705, 17835, 17834], [17706, 17707, 17835], [17707, 17836, 17835], [17707, 17708, 17837], [17707, 17837, 17836], [17708, 17709, 17837], [17709, 17838, 17837], [17709, 17710, 17839], [17709, 17839, 17838], [17710, 17711, 17839], [17711, 17840, 17839], [17711, 17712, 17841], [17711, 17841, 17840], [17712, 17713, 17841], [17713, 17842, 17841], [17713, 17714, 17843], [17713, 17843, 17842], [17714, 17715, 17843], [17715, 17844, 17843], [17715, 17716, 17845], [17715, 17845, 17844], [17716, 17717, 17845], [17717, 17846, 17845], [17717, 17718, 17847], [17717, 17847, 17846], [17719, 17720, 17849], [17719, 17849, 17848], [17720, 17721, 17849], [17721, 17850, 17849], [17721, 17722, 17851], [17721, 17851, 17850], [17722, 17723, 17851], [17723, 17852, 17851], [17723, 17724, 17853], [17723, 17853, 17852], [17724, 17725, 17853], [17725, 17854, 17853], [17725, 17726, 17855], [17725, 17855, 17854], [17726, 17727, 17855], [17727, 17856, 17855], [17727, 17728, 17857], [17727, 17857, 17856], [17728, 17729, 17857], [17729, 17858, 17857], [17729, 17730, 17859], [17729, 17859, 17858], [17730, 17731, 17859], [17731, 17860, 17859], [17731, 17732, 17861], [17731, 17861, 17860], [17732, 17733, 17861], [17733, 17862, 17861], [17733, 17734, 17863], [17733, 17863, 17862], [17734, 17735, 17863], [17735, 17864, 17863], [17735, 17736, 17865], [17735, 17865, 17864], [17736, 17737, 17865], [17737, 17866, 17865], [17737, 17738, 17867], [17737, 17867, 17866], [17738, 17739, 17867], [17739, 17868, 17867], [17739, 17740, 17869], [17739, 17869, 17868], [17740, 17741, 17869], [17741, 17870, 17869], [17741, 17742, 17871], [17741, 17871, 17870], [17742, 17743, 17871], [17743, 17872, 17871], [17743, 17744, 17873], [17743, 17873, 17872], [17744, 17745, 17873], [17745, 17874, 17873], [17745, 17746, 17875], [17745, 17875, 17874], [17746, 17747, 17875], [17747, 17876, 17875], [17747, 17748, 17877], [17747, 17877, 17876], [17748, 17749, 17877], [17749, 17878, 17877], [17749, 17750, 17879], [17749, 17879, 17878], [17750, 17751, 17879], [17751, 17880, 17879], [17751, 17752, 17881], [17751, 17881, 17880], [17752, 17753, 17881], [17753, 17882, 17881], [17753, 17754, 17883], [17753, 17883, 17882], [17754, 17755, 17883], [17755, 17884, 17883], [17755, 17756, 17885], [17755, 17885, 17884], [17756, 17757, 17885], [17757, 17886, 17885], [17757, 17758, 17887], [17757, 17887, 17886], [17758, 17759, 17887], [17759, 17888, 17887], [17759, 17760, 17889], [17759, 17889, 17888], [17760, 17761, 17889], [17761, 17890, 17889], [17761, 17762, 17891], [17761, 17891, 17890], [17762, 17763, 17891], [17763, 17892, 17891], [17763, 17764, 17893], [17763, 17893, 17892], [17764, 17765, 17893], [17765, 17894, 17893], [17765, 17766, 17895], [17765, 17895, 17894], [17766, 17767, 17895], [17767, 17896, 17895], [17767, 17768, 17897], [17767, 17897, 17896], [17768, 17769, 17897], [17769, 17898, 17897], [17769, 17770, 17899], [17769, 17899, 17898], [17770, 17771, 17899], [17771, 17900, 17899], [17771, 17772, 17901], [17771, 17901, 17900], [17772, 17773, 17901], [17773, 17902, 17901], [17773, 17774, 17903], [17773, 17903, 17902], [17774, 17775, 17903], [17775, 17904, 17903], [17775, 17776, 17905], [17775, 17905, 17904], [17776, 17777, 17905], [17777, 17906, 17905], [17777, 17778, 17907], [17777, 17907, 17906], [17778, 17779, 17907], [17779, 17908, 17907], [17779, 17780, 17909], [17779, 17909, 17908], [17780, 17781, 17909], [17781, 17910, 17909], [17781, 17782, 17911], [17781, 17911, 17910], [17782, 17783, 17911], [17783, 17912, 17911], [17783, 17784, 17913], [17783, 17913, 17912], [17784, 17785, 17913], [17785, 17914, 17913], [17785, 17786, 17915], [17785, 17915, 17914], [17786, 17787, 17915], [17787, 17916, 17915], [17787, 17788, 17917], [17787, 17917, 17916], [17788, 17789, 17917], [17789, 17918, 17917], [17789, 17790, 17919], [17789, 17919, 17918], [17790, 17791, 17919], [17791, 17920, 17919], [17791, 17792, 17921], [17791, 17921, 17920], [17792, 17793, 17921], [17793, 17922, 17921], [17793, 17794, 17923], [17793, 17923, 17922], [17794, 17795, 17923], [17795, 17924, 17923], [17795, 17796, 17925], [17795, 17925, 17924], [17796, 17797, 17925], [17797, 17926, 17925], [17797, 17798, 17927], [17797, 17927, 17926], [17798, 17799, 17927], [17799, 17928, 17927], [17799, 17800, 17929], [17799, 17929, 17928], [17800, 17801, 17929], [17801, 17930, 17929], [17801, 17802, 17931], [17801, 17931, 17930], [17802, 17803, 17931], [17803, 17932, 17931], [17803, 17804, 17933], [17803, 17933, 17932], [17804, 17805, 17933], [17805, 17934, 17933], [17805, 17806, 17935], [17805, 17935, 17934], [17806, 17807, 17935], [17807, 17936, 17935], [17807, 17808, 17937], [17807, 17937, 17936], [17808, 17809, 17937], [17809, 17938, 17937], [17809, 17810, 17939], [17809, 17939, 17938], [17810, 17811, 17939], [17811, 17940, 17939], [17811, 17812, 17941], [17811, 17941, 17940], [17812, 17813, 17941], [17813, 17942, 17941], [17813, 17814, 17943], [17813, 17943, 17942], [17814, 17815, 17943], [17815, 17944, 17943], [17815, 17816, 17945], [17815, 17945, 17944], [17816, 17817, 17945], [17817, 17946, 17945], [17817, 17818, 17947], [17817, 17947, 17946], [17818, 17819, 17947], [17819, 17948, 17947], [17819, 17820, 17949], [17819, 17949, 17948], [17820, 17821, 17949], [17821, 17950, 17949], [17821, 17822, 17951], [17821, 17951, 17950], [17822, 17823, 17951], [17823, 17952, 17951], [17823, 17824, 17953], [17823, 17953, 17952], [17824, 17825, 17953], [17825, 17954, 17953], [17825, 17826, 17955], [17825, 17955, 17954], [17826, 17827, 17955], [17827, 17956, 17955], [17827, 17828, 17957], [17827, 17957, 17956], [17828, 17829, 17957], [17829, 17958, 17957], [17829, 17830, 17959], [17829, 17959, 17958], [17830, 17831, 17959], [17831, 17960, 17959], [17831, 17832, 17961], [17831, 17961, 17960], [17832, 17833, 17961], [17833, 17962, 17961], [17833, 17834, 17963], [17833, 17963, 17962], [17834, 17835, 17963], [17835, 17964, 17963], [17835, 17836, 17965], [17835, 17965, 17964], [17836, 17837, 17965], [17837, 17966, 17965], [17837, 17838, 17967], [17837, 17967, 17966], [17838, 17839, 17967], [17839, 17968, 17967], [17839, 17840, 17969], [17839, 17969, 17968], [17840, 17841, 17969], [17841, 17970, 17969], [17841, 17842, 17971], [17841, 17971, 17970], [17842, 17843, 17971], [17843, 17972, 17971], [17843, 17844, 17973], [17843, 17973, 17972], [17844, 17845, 17973], [17845, 17974, 17973], [17845, 17846, 17975], [17845, 17975, 17974], [17846, 17847, 17975], [17847, 17976, 17975], [17848, 17849, 17977], [17849, 17978, 17977], [17849, 17850, 17979], [17849, 17979, 17978], [17850, 17851, 17979], [17851, 17980, 17979], [17851, 17852, 17981], [17851, 17981, 17980], [17852, 17853, 17981], [17853, 17982, 17981], [17853, 17854, 17983], [17853, 17983, 17982], [17854, 17855, 17983], [17855, 17984, 17983], [17855, 17856, 17985], [17855, 17985, 17984], [17856, 17857, 17985], [17857, 17986, 17985], [17857, 17858, 17987], [17857, 17987, 17986], [17858, 17859, 17987], [17859, 17988, 17987], [17859, 17860, 17989], [17859, 17989, 17988], [17860, 17861, 17989], [17861, 17990, 17989], [17861, 17862, 17991], [17861, 17991, 17990], [17862, 17863, 17991], [17863, 17992, 17991], [17863, 17864, 17993], [17863, 17993, 17992], [17864, 17865, 17993], [17865, 17994, 17993], [17865, 17866, 17995], [17865, 17995, 17994], [17866, 17867, 17995], [17867, 17996, 17995], [17867, 17868, 17997], [17867, 17997, 17996], [17868, 17869, 17997], [17869, 17998, 17997], [17869, 17870, 17999], [17869, 17999, 17998], [17870, 17871, 17999], [17871, 18000, 17999], [17871, 17872, 18001], [17871, 18001, 18000], [17872, 17873, 18001], [17873, 18002, 18001], [17873, 17874, 18003], [17873, 18003, 18002], [17874, 17875, 18003], [17875, 18004, 18003], [17875, 17876, 18005], [17875, 18005, 18004], [17876, 17877, 18005], [17877, 18006, 18005], [17877, 17878, 18007], [17877, 18007, 18006], [17878, 17879, 18007], [17879, 18008, 18007], [17879, 17880, 18009], [17879, 18009, 18008], [17880, 17881, 18009], [17881, 18010, 18009], [17881, 17882, 18011], [17881, 18011, 18010], [17882, 17883, 18011], [17883, 18012, 18011], [17883, 17884, 18013], [17883, 18013, 18012], [17884, 17885, 18013], [17885, 18014, 18013], [17885, 17886, 18015], [17885, 18015, 18014], [17886, 17887, 18015], [17887, 18016, 18015], [17887, 17888, 18017], [17887, 18017, 18016], [17888, 17889, 18017], [17889, 18018, 18017], [17889, 17890, 18019], [17889, 18019, 18018], [17890, 17891, 18019], [17891, 18020, 18019], [17891, 17892, 18021], [17891, 18021, 18020], [17892, 17893, 18021], [17893, 18022, 18021], [17893, 17894, 18023], [17893, 18023, 18022], [17894, 17895, 18023], [17895, 18024, 18023], [17895, 17896, 18025], [17895, 18025, 18024], [17896, 17897, 18025], [17897, 18026, 18025], [17897, 17898, 18027], [17897, 18027, 18026], [17898, 17899, 18027], [17899, 18028, 18027], [17899, 17900, 18029], [17899, 18029, 18028], [17900, 17901, 18029], [17901, 18030, 18029], [17901, 17902, 18031], [17901, 18031, 18030], [17902, 17903, 18031], [17903, 18032, 18031], [17903, 17904, 18033], [17903, 18033, 18032], [17904, 17905, 18033], [17905, 18034, 18033], [17905, 17906, 18035], [17905, 18035, 18034], [17906, 17907, 18035], [17907, 18036, 18035], [17907, 17908, 18037], [17907, 18037, 18036], [17908, 17909, 18037], [17909, 18038, 18037], [17909, 17910, 18039], [17909, 18039, 18038], [17910, 17911, 18039], [17911, 18040, 18039], [17911, 17912, 18041], [17911, 18041, 18040], [17912, 17913, 18041], [17913, 18042, 18041], [17913, 17914, 18043], [17913, 18043, 18042], [17914, 17915, 18043], [17915, 18044, 18043], [17915, 17916, 18045], [17915, 18045, 18044], [17916, 17917, 18045], [17917, 18046, 18045], [17917, 17918, 18047], [17917, 18047, 18046], [17918, 17919, 18047], [17919, 18048, 18047], [17919, 17920, 18049], [17919, 18049, 18048], [17920, 17921, 18049], [17921, 18050, 18049], [17921, 17922, 18051], [17921, 18051, 18050], [17922, 17923, 18051], [17923, 18052, 18051], [17923, 17924, 18053], [17923, 18053, 18052], [17924, 17925, 18053], [17925, 18054, 18053], [17925, 17926, 18055], [17925, 18055, 18054], [17926, 17927, 18055], [17927, 18056, 18055], [17927, 17928, 18057], [17927, 18057, 18056], [17928, 17929, 18057], [17929, 18058, 18057], [17929, 17930, 18059], [17929, 18059, 18058], [17930, 17931, 18059], [17931, 18060, 18059], [17931, 17932, 18061], [17931, 18061, 18060], [17932, 17933, 18061], [17933, 18062, 18061], [17933, 17934, 18063], [17933, 18063, 18062], [17934, 17935, 18063], [17935, 18064, 18063], [17935, 17936, 18065], [17935, 18065, 18064], [17936, 17937, 18065], [17937, 18066, 18065], [17937, 17938, 18067], [17937, 18067, 18066], [17938, 17939, 18067], [17939, 18068, 18067], [17939, 17940, 18069], [17939, 18069, 18068], [17940, 17941, 18069], [17941, 18070, 18069], [17941, 17942, 18071], [17941, 18071, 18070], [17942, 17943, 18071], [17943, 18072, 18071], [17943, 17944, 18073], [17943, 18073, 18072], [17944, 17945, 18073], [17945, 18074, 18073], [17945, 17946, 18075], [17945, 18075, 18074], [17946, 17947, 18075], [17947, 18076, 18075], [17947, 17948, 18077], [17947, 18077, 18076], [17948, 17949, 18077], [17949, 18078, 18077], [17949, 17950, 18079], [17949, 18079, 18078], [17950, 17951, 18079], [17951, 18080, 18079], [17951, 17952, 18081], [17951, 18081, 18080], [17952, 17953, 18081], [17953, 18082, 18081], [17953, 17954, 18083], [17953, 18083, 18082], [17954, 17955, 18083], [17955, 18084, 18083], [17955, 17956, 18085], [17955, 18085, 18084], [17956, 17957, 18085], [17957, 18086, 18085], [17957, 17958, 18087], [17957, 18087, 18086], [17958, 17959, 18087], [17959, 18088, 18087], [17959, 17960, 18089], [17959, 18089, 18088], [17960, 17961, 18089], [17961, 18090, 18089], [17961, 17962, 18091], [17961, 18091, 18090], [17962, 17963, 18091], [17963, 18092, 18091], [17963, 17964, 18093], [17963, 18093, 18092], [17964, 17965, 18093], [17965, 18094, 18093], [17965, 17966, 18095], [17965, 18095, 18094], [17966, 17967, 18095], [17967, 18096, 18095], [17967, 17968, 18097], [17967, 18097, 18096], [17968, 17969, 18097], [17969, 18098, 18097], [17969, 17970, 18099], [17969, 18099, 18098], [17970, 17971, 18099], [17971, 18100, 18099], [17971, 17972, 18101], [17971, 18101, 18100], [17972, 17973, 18101], [17973, 18102, 18101], [17973, 17974, 18103], [17973, 18103, 18102], [17974, 17975, 18103], [17975, 18104, 18103], [17975, 17976, 18105], [17975, 18105, 18104], [17977, 17978, 18107], [17977, 18107, 18106], [17978, 17979, 18107], [17979, 18108, 18107], [17979, 17980, 18109], [17979, 18109, 18108], [17980, 17981, 18109], [17981, 18110, 18109], [17981, 17982, 18111], [17981, 18111, 18110], [17982, 17983, 18111], [17983, 18112, 18111], [17983, 17984, 18113], [17983, 18113, 18112], [17984, 17985, 18113], [17985, 18114, 18113], [17985, 17986, 18115], [17985, 18115, 18114], [17986, 17987, 18115], [17987, 18116, 18115], [17987, 17988, 18117], [17987, 18117, 18116], [17988, 17989, 18117], [17989, 18118, 18117], [17989, 17990, 18119], [17989, 18119, 18118], [17990, 17991, 18119], [17991, 18120, 18119], [17991, 17992, 18121], [17991, 18121, 18120], [17992, 17993, 18121], [17993, 18122, 18121], [17993, 17994, 18123], [17993, 18123, 18122], [17994, 17995, 18123], [17995, 18124, 18123], [17995, 17996, 18125], [17995, 18125, 18124], [17996, 17997, 18125], [17997, 18126, 18125], [17997, 17998, 18127], [17997, 18127, 18126], [17998, 17999, 18127], [17999, 18128, 18127], [17999, 18000, 18129], [17999, 18129, 18128], [18000, 18001, 18129], [18001, 18130, 18129], [18001, 18002, 18131], [18001, 18131, 18130], [18002, 18003, 18131], [18003, 18132, 18131], [18003, 18004, 18133], [18003, 18133, 18132], [18004, 18005, 18133], [18005, 18134, 18133], [18005, 18006, 18135], [18005, 18135, 18134], [18006, 18007, 18135], [18007, 18136, 18135], [18007, 18008, 18137], [18007, 18137, 18136], [18008, 18009, 18137], [18009, 18138, 18137], [18009, 18010, 18139], [18009, 18139, 18138], [18010, 18011, 18139], [18011, 18140, 18139], [18011, 18012, 18141], [18011, 18141, 18140], [18012, 18013, 18141], [18013, 18142, 18141], [18013, 18014, 18143], [18013, 18143, 18142], [18014, 18015, 18143], [18015, 18144, 18143], [18015, 18016, 18145], [18015, 18145, 18144], [18016, 18017, 18145], [18017, 18146, 18145], [18017, 18018, 18147], [18017, 18147, 18146], [18018, 18019, 18147], [18019, 18148, 18147], [18019, 18020, 18149], [18019, 18149, 18148], [18020, 18021, 18149], [18021, 18150, 18149], [18021, 18022, 18151], [18021, 18151, 18150], [18022, 18023, 18151], [18023, 18152, 18151], [18023, 18024, 18153], [18023, 18153, 18152], [18024, 18025, 18153], [18025, 18154, 18153], [18025, 18026, 18155], [18025, 18155, 18154], [18026, 18027, 18155], [18027, 18156, 18155], [18027, 18028, 18157], [18027, 18157, 18156], [18028, 18029, 18157], [18029, 18158, 18157], [18029, 18030, 18159], [18029, 18159, 18158], [18030, 18031, 18159], [18031, 18160, 18159], [18031, 18032, 18161], [18031, 18161, 18160], [18032, 18033, 18161], [18033, 18162, 18161], [18033, 18034, 18163], [18033, 18163, 18162], [18034, 18035, 18163], [18035, 18164, 18163], [18035, 18036, 18165], [18035, 18165, 18164], [18036, 18037, 18165], [18037, 18166, 18165], [18037, 18038, 18167], [18037, 18167, 18166], [18038, 18039, 18167], [18039, 18168, 18167], [18039, 18040, 18169], [18039, 18169, 18168], [18040, 18041, 18169], [18041, 18170, 18169], [18041, 18042, 18171], [18041, 18171, 18170], [18042, 18043, 18171], [18043, 18172, 18171], [18043, 18044, 18173], [18043, 18173, 18172], [18044, 18045, 18173], [18045, 18174, 18173], [18045, 18046, 18175], [18045, 18175, 18174], [18046, 18047, 18175], [18047, 18176, 18175], [18047, 18048, 18177], [18047, 18177, 18176], [18048, 18049, 18177], [18049, 18178, 18177], [18049, 18050, 18179], [18049, 18179, 18178], [18050, 18051, 18179], [18051, 18180, 18179], [18051, 18052, 18181], [18051, 18181, 18180], [18052, 18053, 18181], [18053, 18182, 18181], [18053, 18054, 18183], [18053, 18183, 18182], [18054, 18055, 18183], [18055, 18184, 18183], [18055, 18056, 18185], [18055, 18185, 18184], [18056, 18057, 18185], [18057, 18186, 18185], [18057, 18058, 18187], [18057, 18187, 18186], [18058, 18059, 18187], [18059, 18188, 18187], [18059, 18060, 18189], [18059, 18189, 18188], [18060, 18061, 18189], [18061, 18190, 18189], [18061, 18062, 18191], [18061, 18191, 18190], [18062, 18063, 18191], [18063, 18192, 18191], [18063, 18064, 18193], [18063, 18193, 18192], [18064, 18065, 18193], [18065, 18194, 18193], [18065, 18066, 18195], [18065, 18195, 18194], [18066, 18067, 18195], [18067, 18196, 18195], [18067, 18068, 18197], [18067, 18197, 18196], [18068, 18069, 18197], [18069, 18198, 18197], [18069, 18070, 18199], [18069, 18199, 18198], [18070, 18071, 18199], [18071, 18200, 18199], [18071, 18072, 18201], [18071, 18201, 18200], [18072, 18073, 18201], [18073, 18202, 18201], [18073, 18074, 18203], [18073, 18203, 18202], [18074, 18075, 18203], [18075, 18204, 18203], [18075, 18076, 18205], [18075, 18205, 18204], [18076, 18077, 18205], [18077, 18206, 18205], [18077, 18078, 18207], [18077, 18207, 18206], [18078, 18079, 18207], [18079, 18208, 18207], [18079, 18080, 18209], [18079, 18209, 18208], [18080, 18081, 18209], [18081, 18210, 18209], [18081, 18082, 18211], [18081, 18211, 18210], [18082, 18083, 18211], [18083, 18212, 18211], [18083, 18084, 18213], [18083, 18213, 18212], [18084, 18085, 18213], [18085, 18214, 18213], [18085, 18086, 18215], [18085, 18215, 18214], [18086, 18087, 18215], [18087, 18216, 18215], [18087, 18088, 18217], [18087, 18217, 18216], [18088, 18089, 18217], [18089, 18218, 18217], [18089, 18090, 18219], [18089, 18219, 18218], [18090, 18091, 18219], [18091, 18220, 18219], [18091, 18092, 18221], [18091, 18221, 18220], [18092, 18093, 18221], [18093, 18222, 18221], [18093, 18094, 18223], [18093, 18223, 18222], [18094, 18095, 18223], [18095, 18224, 18223], [18095, 18096, 18225], [18095, 18225, 18224], [18096, 18097, 18225], [18097, 18226, 18225], [18097, 18098, 18227], [18097, 18227, 18226], [18098, 18099, 18227], [18099, 18228, 18227], [18099, 18100, 18229], [18099, 18229, 18228], [18100, 18101, 18229], [18101, 18230, 18229], [18101, 18102, 18231], [18101, 18231, 18230], [18102, 18103, 18231], [18103, 18232, 18231], [18103, 18104, 18233], [18103, 18233, 18232], [18104, 18105, 18233], [18105, 18234, 18233], [18106, 18107, 18235], [18107, 18236, 18235], [18107, 18108, 18237], [18107, 18237, 18236], [18108, 18109, 18237], [18109, 18238, 18237], [18109, 18110, 18239], [18109, 18239, 18238], [18110, 18111, 18239], [18111, 18240, 18239], [18111, 18112, 18241], [18111, 18241, 18240], [18112, 18113, 18241], [18113, 18242, 18241], [18113, 18114, 18243], [18113, 18243, 18242], [18114, 18115, 18243], [18115, 18244, 18243], [18115, 18116, 18245], [18115, 18245, 18244], [18116, 18117, 18245], [18117, 18246, 18245], [18117, 18118, 18247], [18117, 18247, 18246], [18118, 18119, 18247], [18119, 18248, 18247], [18119, 18120, 18249], [18119, 18249, 18248], [18120, 18121, 18249], [18121, 18250, 18249], [18121, 18122, 18251], [18121, 18251, 18250], [18122, 18123, 18251], [18123, 18252, 18251], [18123, 18124, 18253], [18123, 18253, 18252], [18124, 18125, 18253], [18125, 18254, 18253], [18125, 18126, 18255], [18125, 18255, 18254], [18126, 18127, 18255], [18127, 18256, 18255], [18127, 18128, 18257], [18127, 18257, 18256], [18128, 18129, 18257], [18129, 18258, 18257], [18129, 18130, 18259], [18129, 18259, 18258], [18130, 18131, 18259], [18131, 18260, 18259], [18131, 18132, 18261], [18131, 18261, 18260], [18132, 18133, 18261], [18133, 18262, 18261], [18133, 18134, 18263], [18133, 18263, 18262], [18134, 18135, 18263], [18135, 18264, 18263], [18135, 18136, 18265], [18135, 18265, 18264], [18136, 18137, 18265], [18137, 18266, 18265], [18137, 18138, 18267], [18137, 18267, 18266], [18138, 18139, 18267], [18139, 18268, 18267], [18139, 18140, 18269], [18139, 18269, 18268], [18140, 18141, 18269], [18141, 18270, 18269], [18141, 18142, 18271], [18141, 18271, 18270], [18142, 18143, 18271], [18143, 18272, 18271], [18143, 18144, 18273], [18143, 18273, 18272], [18144, 18145, 18273], [18145, 18274, 18273], [18145, 18146, 18275], [18145, 18275, 18274], [18146, 18147, 18275], [18147, 18276, 18275], [18147, 18148, 18277], [18147, 18277, 18276], [18148, 18149, 18277], [18149, 18278, 18277], [18149, 18150, 18279], [18149, 18279, 18278], [18150, 18151, 18279], [18151, 18280, 18279], [18151, 18152, 18281], [18151, 18281, 18280], [18152, 18153, 18281], [18153, 18282, 18281], [18153, 18154, 18283], [18153, 18283, 18282], [18154, 18155, 18283], [18155, 18284, 18283], [18155, 18156, 18285], [18155, 18285, 18284], [18156, 18157, 18285], [18157, 18286, 18285], [18157, 18158, 18287], [18157, 18287, 18286], [18158, 18159, 18287], [18159, 18288, 18287], [18159, 18160, 18289], [18159, 18289, 18288], [18160, 18161, 18289], [18161, 18290, 18289], [18161, 18162, 18291], [18161, 18291, 18290], [18162, 18163, 18291], [18163, 18292, 18291], [18163, 18164, 18293], [18163, 18293, 18292], [18164, 18165, 18293], [18165, 18294, 18293], [18165, 18166, 18295], [18165, 18295, 18294], [18166, 18167, 18295], [18167, 18296, 18295], [18167, 18168, 18297], [18167, 18297, 18296], [18168, 18169, 18297], [18169, 18298, 18297], [18169, 18170, 18299], [18169, 18299, 18298], [18170, 18171, 18299], [18171, 18300, 18299], [18171, 18172, 18301], [18171, 18301, 18300], [18172, 18173, 18301], [18173, 18302, 18301], [18173, 18174, 18303], [18173, 18303, 18302], [18174, 18175, 18303], [18175, 18304, 18303], [18175, 18176, 18305], [18175, 18305, 18304], [18176, 18177, 18305], [18177, 18306, 18305], [18177, 18178, 18307], [18177, 18307, 18306], [18178, 18179, 18307], [18179, 18308, 18307], [18179, 18180, 18309], [18179, 18309, 18308], [18180, 18181, 18309], [18181, 18310, 18309], [18181, 18182, 18311], [18181, 18311, 18310], [18182, 18183, 18311], [18183, 18312, 18311], [18183, 18184, 18313], [18183, 18313, 18312], [18184, 18185, 18313], [18185, 18314, 18313], [18185, 18186, 18315], [18185, 18315, 18314], [18186, 18187, 18315], [18187, 18316, 18315], [18187, 18188, 18317], [18187, 18317, 18316], [18188, 18189, 18317], [18189, 18318, 18317], [18189, 18190, 18319], [18189, 18319, 18318], [18190, 18191, 18319], [18191, 18320, 18319], [18191, 18192, 18321], [18191, 18321, 18320], [18192, 18193, 18321], [18193, 18322, 18321], [18193, 18194, 18323], [18193, 18323, 18322], [18194, 18195, 18323], [18195, 18324, 18323], [18195, 18196, 18325], [18195, 18325, 18324], [18196, 18197, 18325], [18197, 18326, 18325], [18197, 18198, 18327], [18197, 18327, 18326], [18198, 18199, 18327], [18199, 18328, 18327], [18199, 18200, 18329], [18199, 18329, 18328], [18200, 18201, 18329], [18201, 18330, 18329], [18201, 18202, 18331], [18201, 18331, 18330], [18202, 18203, 18331], [18203, 18332, 18331], [18203, 18204, 18333], [18203, 18333, 18332], [18204, 18205, 18333], [18205, 18334, 18333], [18205, 18206, 18335], [18205, 18335, 18334], [18206, 18207, 18335], [18207, 18336, 18335], [18207, 18208, 18337], [18207, 18337, 18336], [18208, 18209, 18337], [18209, 18338, 18337], [18209, 18210, 18339], [18209, 18339, 18338], [18210, 18211, 18339], [18211, 18340, 18339], [18211, 18212, 18341], [18211, 18341, 18340], [18212, 18213, 18341], [18213, 18342, 18341], [18213, 18214, 18343], [18213, 18343, 18342], [18214, 18215, 18343], [18215, 18344, 18343], [18215, 18216, 18345], [18215, 18345, 18344], [18216, 18217, 18345], [18217, 18346, 18345], [18217, 18218, 18347], [18217, 18347, 18346], [18218, 18219, 18347], [18219, 18348, 18347], [18219, 18220, 18349], [18219, 18349, 18348], [18220, 18221, 18349], [18221, 18350, 18349], [18221, 18222, 18351], [18221, 18351, 18350], [18222, 18223, 18351], [18223, 18352, 18351], [18223, 18224, 18353], [18223, 18353, 18352], [18224, 18225, 18353], [18225, 18354, 18353], [18225, 18226, 18355], [18225, 18355, 18354], [18226, 18227, 18355], [18227, 18356, 18355], [18227, 18228, 18357], [18227, 18357, 18356], [18228, 18229, 18357], [18229, 18358, 18357], [18229, 18230, 18359], [18229, 18359, 18358], [18230, 18231, 18359], [18231, 18360, 18359], [18231, 18232, 18361], [18231, 18361, 18360], [18232, 18233, 18361], [18233, 18362, 18361], [18233, 18234, 18363], [18233, 18363, 18362], [18235, 18236, 18365], [18235, 18365, 18364], [18236, 18237, 18365], [18237, 18366, 18365], [18237, 18238, 18367], [18237, 18367, 18366], [18238, 18239, 18367], [18239, 18368, 18367], [18239, 18240, 18369], [18239, 18369, 18368], [18240, 18241, 18369], [18241, 18370, 18369], [18241, 18242, 18371], [18241, 18371, 18370], [18242, 18243, 18371], [18243, 18372, 18371], [18243, 18244, 18373], [18243, 18373, 18372], [18244, 18245, 18373], [18245, 18374, 18373], [18245, 18246, 18375], [18245, 18375, 18374], [18246, 18247, 18375], [18247, 18376, 18375], [18247, 18248, 18377], [18247, 18377, 18376], [18248, 18249, 18377], [18249, 18378, 18377], [18249, 18250, 18379], [18249, 18379, 18378], [18250, 18251, 18379], [18251, 18380, 18379], [18251, 18252, 18381], [18251, 18381, 18380], [18252, 18253, 18381], [18253, 18382, 18381], [18253, 18254, 18383], [18253, 18383, 18382], [18254, 18255, 18383], [18255, 18384, 18383], [18255, 18256, 18385], [18255, 18385, 18384], [18256, 18257, 18385], [18257, 18386, 18385], [18257, 18258, 18387], [18257, 18387, 18386], [18258, 18259, 18387], [18259, 18388, 18387], [18259, 18260, 18389], [18259, 18389, 18388], [18260, 18261, 18389], [18261, 18390, 18389], [18261, 18262, 18391], [18261, 18391, 18390], [18262, 18263, 18391], [18263, 18392, 18391], [18263, 18264, 18393], [18263, 18393, 18392], [18264, 18265, 18393], [18265, 18394, 18393], [18265, 18266, 18395], [18265, 18395, 18394], [18266, 18267, 18395], [18267, 18396, 18395], [18267, 18268, 18397], [18267, 18397, 18396], [18268, 18269, 18397], [18269, 18398, 18397], [18269, 18270, 18399], [18269, 18399, 18398], [18270, 18271, 18399], [18271, 18400, 18399], [18271, 18272, 18401], [18271, 18401, 18400], [18272, 18273, 18401], [18273, 18402, 18401], [18273, 18274, 18403], [18273, 18403, 18402], [18274, 18275, 18403], [18275, 18404, 18403], [18275, 18276, 18405], [18275, 18405, 18404], [18276, 18277, 18405], [18277, 18406, 18405], [18277, 18278, 18407], [18277, 18407, 18406], [18278, 18279, 18407], [18279, 18408, 18407], [18279, 18280, 18409], [18279, 18409, 18408], [18280, 18281, 18409], [18281, 18410, 18409], [18281, 18282, 18411], [18281, 18411, 18410], [18282, 18283, 18411], [18283, 18412, 18411], [18283, 18284, 18413], [18283, 18413, 18412], [18284, 18285, 18413], [18285, 18414, 18413], [18285, 18286, 18415], [18285, 18415, 18414], [18286, 18287, 18415], [18287, 18416, 18415], [18287, 18288, 18417], [18287, 18417, 18416], [18288, 18289, 18417], [18289, 18418, 18417], [18289, 18290, 18419], [18289, 18419, 18418], [18290, 18291, 18419], [18291, 18420, 18419], [18291, 18292, 18421], [18291, 18421, 18420], [18292, 18293, 18421], [18293, 18422, 18421], [18293, 18294, 18423], [18293, 18423, 18422], [18294, 18295, 18423], [18295, 18424, 18423], [18295, 18296, 18425], [18295, 18425, 18424], [18296, 18297, 18425], [18297, 18426, 18425], [18297, 18298, 18427], [18297, 18427, 18426], [18298, 18299, 18427], [18299, 18428, 18427], [18299, 18300, 18429], [18299, 18429, 18428], [18300, 18301, 18429], [18301, 18430, 18429], [18301, 18302, 18431], [18301, 18431, 18430], [18302, 18303, 18431], [18303, 18432, 18431], [18303, 18304, 18433], [18303, 18433, 18432], [18304, 18305, 18433], [18305, 18434, 18433], [18305, 18306, 18435], [18305, 18435, 18434], [18306, 18307, 18435], [18307, 18436, 18435], [18307, 18308, 18437], [18307, 18437, 18436], [18308, 18309, 18437], [18309, 18438, 18437], [18309, 18310, 18439], [18309, 18439, 18438], [18310, 18311, 18439], [18311, 18440, 18439], [18311, 18312, 18441], [18311, 18441, 18440], [18312, 18313, 18441], [18313, 18442, 18441], [18313, 18314, 18443], [18313, 18443, 18442], [18314, 18315, 18443], [18315, 18444, 18443], [18315, 18316, 18445], [18315, 18445, 18444], [18316, 18317, 18445], [18317, 18446, 18445], [18317, 18318, 18447], [18317, 18447, 18446], [18318, 18319, 18447], [18319, 18448, 18447], [18319, 18320, 18449], [18319, 18449, 18448], [18320, 18321, 18449], [18321, 18450, 18449], [18321, 18322, 18451], [18321, 18451, 18450], [18322, 18323, 18451], [18323, 18452, 18451], [18323, 18324, 18453], [18323, 18453, 18452], [18324, 18325, 18453], [18325, 18454, 18453], [18325, 18326, 18455], [18325, 18455, 18454], [18326, 18327, 18455], [18327, 18456, 18455], [18327, 18328, 18457], [18327, 18457, 18456], [18328, 18329, 18457], [18329, 18458, 18457], [18329, 18330, 18459], [18329, 18459, 18458], [18330, 18331, 18459], [18331, 18460, 18459], [18331, 18332, 18461], [18331, 18461, 18460], [18332, 18333, 18461], [18333, 18462, 18461], [18333, 18334, 18463], [18333, 18463, 18462], [18334, 18335, 18463], [18335, 18464, 18463], [18335, 18336, 18465], [18335, 18465, 18464], [18336, 18337, 18465], [18337, 18466, 18465], [18337, 18338, 18467], [18337, 18467, 18466], [18338, 18339, 18467], [18339, 18468, 18467], [18339, 18340, 18469], [18339, 18469, 18468], [18340, 18341, 18469], [18341, 18470, 18469], [18341, 18342, 18471], [18341, 18471, 18470], [18342, 18343, 18471], [18343, 18472, 18471], [18343, 18344, 18473], [18343, 18473, 18472], [18344, 18345, 18473], [18345, 18474, 18473], [18345, 18346, 18475], [18345, 18475, 18474], [18346, 18347, 18475], [18347, 18476, 18475], [18347, 18348, 18477], [18347, 18477, 18476], [18348, 18349, 18477], [18349, 18478, 18477], [18349, 18350, 18479], [18349, 18479, 18478], [18350, 18351, 18479], [18351, 18480, 18479], [18351, 18352, 18481], [18351, 18481, 18480], [18352, 18353, 18481], [18353, 18482, 18481], [18353, 18354, 18483], [18353, 18483, 18482], [18354, 18355, 18483], [18355, 18484, 18483], [18355, 18356, 18485], [18355, 18485, 18484], [18356, 18357, 18485], [18357, 18486, 18485], [18357, 18358, 18487], [18357, 18487, 18486], [18358, 18359, 18487], [18359, 18488, 18487], [18359, 18360, 18489], [18359, 18489, 18488], [18360, 18361, 18489], [18361, 18490, 18489], [18361, 18362, 18491], [18361, 18491, 18490], [18362, 18363, 18491], [18363, 18492, 18491], [18364, 18365, 18493], [18365, 18494, 18493], [18365, 18366, 18495], [18365, 18495, 18494], [18366, 18367, 18495], [18367, 18496, 18495], [18367, 18368, 18497], [18367, 18497, 18496], [18368, 18369, 18497], [18369, 18498, 18497], [18369, 18370, 18499], [18369, 18499, 18498], [18370, 18371, 18499], [18371, 18500, 18499], [18371, 18372, 18501], [18371, 18501, 18500], [18372, 18373, 18501], [18373, 18502, 18501], [18373, 18374, 18503], [18373, 18503, 18502], [18374, 18375, 18503], [18375, 18504, 18503], [18375, 18376, 18505], [18375, 18505, 18504], [18376, 18377, 18505], [18377, 18506, 18505], [18377, 18378, 18507], [18377, 18507, 18506], [18378, 18379, 18507], [18379, 18508, 18507], [18379, 18380, 18509], [18379, 18509, 18508], [18380, 18381, 18509], [18381, 18510, 18509], [18381, 18382, 18511], [18381, 18511, 18510], [18382, 18383, 18511], [18383, 18512, 18511], [18383, 18384, 18513], [18383, 18513, 18512], [18384, 18385, 18513], [18385, 18514, 18513], [18385, 18386, 18515], [18385, 18515, 18514], [18386, 18387, 18515], [18387, 18516, 18515], [18387, 18388, 18517], [18387, 18517, 18516], [18388, 18389, 18517], [18389, 18518, 18517], [18389, 18390, 18519], [18389, 18519, 18518], [18390, 18391, 18519], [18391, 18520, 18519], [18391, 18392, 18521], [18391, 18521, 18520], [18392, 18393, 18521], [18393, 18522, 18521], [18393, 18394, 18523], [18393, 18523, 18522], [18394, 18395, 18523], [18395, 18524, 18523], [18395, 18396, 18525], [18395, 18525, 18524], [18396, 18397, 18525], [18397, 18526, 18525], [18397, 18398, 18527], [18397, 18527, 18526], [18398, 18399, 18527], [18399, 18528, 18527], [18399, 18400, 18529], [18399, 18529, 18528], [18400, 18401, 18529], [18401, 18530, 18529], [18401, 18402, 18531], [18401, 18531, 18530], [18402, 18403, 18531], [18403, 18532, 18531], [18403, 18404, 18533], [18403, 18533, 18532], [18404, 18405, 18533], [18405, 18534, 18533], [18405, 18406, 18535], [18405, 18535, 18534], [18406, 18407, 18535], [18407, 18536, 18535], [18407, 18408, 18537], [18407, 18537, 18536], [18408, 18409, 18537], [18409, 18538, 18537], [18409, 18410, 18539], [18409, 18539, 18538], [18410, 18411, 18539], [18411, 18540, 18539], [18411, 18412, 18541], [18411, 18541, 18540], [18412, 18413, 18541], [18413, 18542, 18541], [18413, 18414, 18543], [18413, 18543, 18542], [18414, 18415, 18543], [18415, 18544, 18543], [18415, 18416, 18545], [18415, 18545, 18544], [18416, 18417, 18545], [18417, 18546, 18545], [18417, 18418, 18547], [18417, 18547, 18546], [18418, 18419, 18547], [18419, 18548, 18547], [18419, 18420, 18549], [18419, 18549, 18548], [18420, 18421, 18549], [18421, 18550, 18549], [18421, 18422, 18551], [18421, 18551, 18550], [18422, 18423, 18551], [18423, 18552, 18551], [18423, 18424, 18553], [18423, 18553, 18552], [18424, 18425, 18553], [18425, 18554, 18553], [18425, 18426, 18555], [18425, 18555, 18554], [18426, 18427, 18555], [18427, 18556, 18555], [18427, 18428, 18557], [18427, 18557, 18556], [18428, 18429, 18557], [18429, 18558, 18557], [18429, 18430, 18559], [18429, 18559, 18558], [18430, 18431, 18559], [18431, 18560, 18559], [18431, 18432, 18561], [18431, 18561, 18560], [18432, 18433, 18561], [18433, 18562, 18561], [18433, 18434, 18563], [18433, 18563, 18562], [18434, 18435, 18563], [18435, 18564, 18563], [18435, 18436, 18565], [18435, 18565, 18564], [18436, 18437, 18565], [18437, 18566, 18565], [18437, 18438, 18567], [18437, 18567, 18566], [18438, 18439, 18567], [18439, 18568, 18567], [18439, 18440, 18569], [18439, 18569, 18568], [18440, 18441, 18569], [18441, 18570, 18569], [18441, 18442, 18571], [18441, 18571, 18570], [18442, 18443, 18571], [18443, 18572, 18571], [18443, 18444, 18573], [18443, 18573, 18572], [18444, 18445, 18573], [18445, 18574, 18573], [18445, 18446, 18575], [18445, 18575, 18574], [18446, 18447, 18575], [18447, 18576, 18575], [18447, 18448, 18577], [18447, 18577, 18576], [18448, 18449, 18577], [18449, 18578, 18577], [18449, 18450, 18579], [18449, 18579, 18578], [18450, 18451, 18579], [18451, 18580, 18579], [18451, 18452, 18581], [18451, 18581, 18580], [18452, 18453, 18581], [18453, 18582, 18581], [18453, 18454, 18583], [18453, 18583, 18582], [18454, 18455, 18583], [18455, 18584, 18583], [18455, 18456, 18585], [18455, 18585, 18584], [18456, 18457, 18585], [18457, 18586, 18585], [18457, 18458, 18587], [18457, 18587, 18586], [18458, 18459, 18587], [18459, 18588, 18587], [18459, 18460, 18589], [18459, 18589, 18588], [18460, 18461, 18589], [18461, 18590, 18589], [18461, 18462, 18591], [18461, 18591, 18590], [18462, 18463, 18591], [18463, 18592, 18591], [18463, 18464, 18593], [18463, 18593, 18592], [18464, 18465, 18593], [18465, 18594, 18593], [18465, 18466, 18595], [18465, 18595, 18594], [18466, 18467, 18595], [18467, 18596, 18595], [18467, 18468, 18597], [18467, 18597, 18596], [18468, 18469, 18597], [18469, 18598, 18597], [18469, 18470, 18599], [18469, 18599, 18598], [18470, 18471, 18599], [18471, 18600, 18599], [18471, 18472, 18601], [18471, 18601, 18600], [18472, 18473, 18601], [18473, 18602, 18601], [18473, 18474, 18603], [18473, 18603, 18602], [18474, 18475, 18603], [18475, 18604, 18603], [18475, 18476, 18605], [18475, 18605, 18604], [18476, 18477, 18605], [18477, 18606, 18605], [18477, 18478, 18607], [18477, 18607, 18606], [18478, 18479, 18607], [18479, 18608, 18607], [18479, 18480, 18609], [18479, 18609, 18608], [18480, 18481, 18609], [18481, 18610, 18609], [18481, 18482, 18611], [18481, 18611, 18610], [18482, 18483, 18611], [18483, 18612, 18611], [18483, 18484, 18613], [18483, 18613, 18612], [18484, 18485, 18613], [18485, 18614, 18613], [18485, 18486, 18615], [18485, 18615, 18614], [18486, 18487, 18615], [18487, 18616, 18615], [18487, 18488, 18617], [18487, 18617, 18616], [18488, 18489, 18617], [18489, 18618, 18617], [18489, 18490, 18619], [18489, 18619, 18618], [18490, 18491, 18619], [18491, 18620, 18619], [18491, 18492, 18621], [18491, 18621, 18620], [18493, 18494, 18623], [18493, 18623, 18622], [18494, 18495, 18623], [18495, 18624, 18623], [18495, 18496, 18625], [18495, 18625, 18624], [18496, 18497, 18625], [18497, 18626, 18625], [18497, 18498, 18627], [18497, 18627, 18626], [18498, 18499, 18627], [18499, 18628, 18627], [18499, 18500, 18629], [18499, 18629, 18628], [18500, 18501, 18629], [18501, 18630, 18629], [18501, 18502, 18631], [18501, 18631, 18630], [18502, 18503, 18631], [18503, 18632, 18631], [18503, 18504, 18633], [18503, 18633, 18632], [18504, 18505, 18633], [18505, 18634, 18633], [18505, 18506, 18635], [18505, 18635, 18634], [18506, 18507, 18635], [18507, 18636, 18635], [18507, 18508, 18637], [18507, 18637, 18636], [18508, 18509, 18637], [18509, 18638, 18637], [18509, 18510, 18639], [18509, 18639, 18638], [18510, 18511, 18639], [18511, 18640, 18639], [18511, 18512, 18641], [18511, 18641, 18640], [18512, 18513, 18641], [18513, 18642, 18641], [18513, 18514, 18643], [18513, 18643, 18642], [18514, 18515, 18643], [18515, 18644, 18643], [18515, 18516, 18645], [18515, 18645, 18644], [18516, 18517, 18645], [18517, 18646, 18645], [18517, 18518, 18647], [18517, 18647, 18646], [18518, 18519, 18647], [18519, 18648, 18647], [18519, 18520, 18649], [18519, 18649, 18648], [18520, 18521, 18649], [18521, 18650, 18649], [18521, 18522, 18651], [18521, 18651, 18650], [18522, 18523, 18651], [18523, 18652, 18651], [18523, 18524, 18653], [18523, 18653, 18652], [18524, 18525, 18653], [18525, 18654, 18653], [18525, 18526, 18655], [18525, 18655, 18654], [18526, 18527, 18655], [18527, 18656, 18655], [18527, 18528, 18657], [18527, 18657, 18656], [18528, 18529, 18657], [18529, 18658, 18657], [18529, 18530, 18659], [18529, 18659, 18658], [18530, 18531, 18659], [18531, 18660, 18659], [18531, 18532, 18661], [18531, 18661, 18660], [18532, 18533, 18661], [18533, 18662, 18661], [18533, 18534, 18663], [18533, 18663, 18662], [18534, 18535, 18663], [18535, 18664, 18663], [18535, 18536, 18665], [18535, 18665, 18664], [18536, 18537, 18665], [18537, 18666, 18665], [18537, 18538, 18667], [18537, 18667, 18666], [18538, 18539, 18667], [18539, 18668, 18667], [18539, 18540, 18669], [18539, 18669, 18668], [18540, 18541, 18669], [18541, 18670, 18669], [18541, 18542, 18671], [18541, 18671, 18670], [18542, 18543, 18671], [18543, 18672, 18671], [18543, 18544, 18673], [18543, 18673, 18672], [18544, 18545, 18673], [18545, 18674, 18673], [18545, 18546, 18675], [18545, 18675, 18674], [18546, 18547, 18675], [18547, 18676, 18675], [18547, 18548, 18677], [18547, 18677, 18676], [18548, 18549, 18677], [18549, 18678, 18677], [18549, 18550, 18679], [18549, 18679, 18678], [18550, 18551, 18679], [18551, 18680, 18679], [18551, 18552, 18681], [18551, 18681, 18680], [18552, 18553, 18681], [18553, 18682, 18681], [18553, 18554, 18683], [18553, 18683, 18682], [18554, 18555, 18683], [18555, 18684, 18683], [18555, 18556, 18685], [18555, 18685, 18684], [18556, 18557, 18685], [18557, 18686, 18685], [18557, 18558, 18687], [18557, 18687, 18686], [18558, 18559, 18687], [18559, 18688, 18687], [18559, 18560, 18689], [18559, 18689, 18688], [18560, 18561, 18689], [18561, 18690, 18689], [18561, 18562, 18691], [18561, 18691, 18690], [18562, 18563, 18691], [18563, 18692, 18691], [18563, 18564, 18693], [18563, 18693, 18692], [18564, 18565, 18693], [18565, 18694, 18693], [18565, 18566, 18695], [18565, 18695, 18694], [18566, 18567, 18695], [18567, 18696, 18695], [18567, 18568, 18697], [18567, 18697, 18696], [18568, 18569, 18697], [18569, 18698, 18697], [18569, 18570, 18699], [18569, 18699, 18698], [18570, 18571, 18699], [18571, 18700, 18699], [18571, 18572, 18701], [18571, 18701, 18700], [18572, 18573, 18701], [18573, 18702, 18701], [18573, 18574, 18703], [18573, 18703, 18702], [18574, 18575, 18703], [18575, 18704, 18703], [18575, 18576, 18705], [18575, 18705, 18704], [18576, 18577, 18705], [18577, 18706, 18705], [18577, 18578, 18707], [18577, 18707, 18706], [18578, 18579, 18707], [18579, 18708, 18707], [18579, 18580, 18709], [18579, 18709, 18708], [18580, 18581, 18709], [18581, 18710, 18709], [18581, 18582, 18711], [18581, 18711, 18710], [18582, 18583, 18711], [18583, 18712, 18711], [18583, 18584, 18713], [18583, 18713, 18712], [18584, 18585, 18713], [18585, 18714, 18713], [18585, 18586, 18715], [18585, 18715, 18714], [18586, 18587, 18715], [18587, 18716, 18715], [18587, 18588, 18717], [18587, 18717, 18716], [18588, 18589, 18717], [18589, 18718, 18717], [18589, 18590, 18719], [18589, 18719, 18718], [18590, 18591, 18719], [18591, 18720, 18719], [18591, 18592, 18721], [18591, 18721, 18720], [18592, 18593, 18721], [18593, 18722, 18721], [18593, 18594, 18723], [18593, 18723, 18722], [18594, 18595, 18723], [18595, 18724, 18723], [18595, 18596, 18725], [18595, 18725, 18724], [18596, 18597, 18725], [18597, 18726, 18725], [18597, 18598, 18727], [18597, 18727, 18726], [18598, 18599, 18727], [18599, 18728, 18727], [18599, 18600, 18729], [18599, 18729, 18728], [18600, 18601, 18729], [18601, 18730, 18729], [18601, 18602, 18731], [18601, 18731, 18730], [18602, 18603, 18731], [18603, 18732, 18731], [18603, 18604, 18733], [18603, 18733, 18732], [18604, 18605, 18733], [18605, 18734, 18733], [18605, 18606, 18735], [18605, 18735, 18734], [18606, 18607, 18735], [18607, 18736, 18735], [18607, 18608, 18737], [18607, 18737, 18736], [18608, 18609, 18737], [18609, 18738, 18737], [18609, 18610, 18739], [18609, 18739, 18738], [18610, 18611, 18739], [18611, 18740, 18739], [18611, 18612, 18741], [18611, 18741, 18740], [18612, 18613, 18741], [18613, 18742, 18741], [18613, 18614, 18743], [18613, 18743, 18742], [18614, 18615, 18743], [18615, 18744, 18743], [18615, 18616, 18745], [18615, 18745, 18744], [18616, 18617, 18745], [18617, 18746, 18745], [18617, 18618, 18747], [18617, 18747, 18746], [18618, 18619, 18747], [18619, 18748, 18747], [18619, 18620, 18749], [18619, 18749, 18748], [18620, 18621, 18749], [18621, 18750, 18749], [18622, 18623, 18751], [18623, 18752, 18751], [18623, 18624, 18753], [18623, 18753, 18752], [18624, 18625, 18753], [18625, 18754, 18753], [18625, 18626, 18755], [18625, 18755, 18754], [18626, 18627, 18755], [18627, 18756, 18755], [18627, 18628, 18757], [18627, 18757, 18756], [18628, 18629, 18757], [18629, 18758, 18757], [18629, 18630, 18759], [18629, 18759, 18758], [18630, 18631, 18759], [18631, 18760, 18759], [18631, 18632, 18761], [18631, 18761, 18760], [18632, 18633, 18761], [18633, 18762, 18761], [18633, 18634, 18763], [18633, 18763, 18762], [18634, 18635, 18763], [18635, 18764, 18763], [18635, 18636, 18765], [18635, 18765, 18764], [18636, 18637, 18765], [18637, 18766, 18765], [18637, 18638, 18767], [18637, 18767, 18766], [18638, 18639, 18767], [18639, 18768, 18767], [18639, 18640, 18769], [18639, 18769, 18768], [18640, 18641, 18769], [18641, 18770, 18769], [18641, 18642, 18771], [18641, 18771, 18770], [18642, 18643, 18771], [18643, 18772, 18771], [18643, 18644, 18773], [18643, 18773, 18772], [18644, 18645, 18773], [18645, 18774, 18773], [18645, 18646, 18775], [18645, 18775, 18774], [18646, 18647, 18775], [18647, 18776, 18775], [18647, 18648, 18777], [18647, 18777, 18776], [18648, 18649, 18777], [18649, 18778, 18777], [18649, 18650, 18779], [18649, 18779, 18778], [18650, 18651, 18779], [18651, 18780, 18779], [18651, 18652, 18781], [18651, 18781, 18780], [18652, 18653, 18781], [18653, 18782, 18781], [18653, 18654, 18783], [18653, 18783, 18782], [18654, 18655, 18783], [18655, 18784, 18783], [18655, 18656, 18785], [18655, 18785, 18784], [18656, 18657, 18785], [18657, 18786, 18785], [18657, 18658, 18787], [18657, 18787, 18786], [18658, 18659, 18787], [18659, 18788, 18787], [18659, 18660, 18789], [18659, 18789, 18788], [18660, 18661, 18789], [18661, 18790, 18789], [18661, 18662, 18791], [18661, 18791, 18790], [18662, 18663, 18791], [18663, 18792, 18791], [18663, 18664, 18793], [18663, 18793, 18792], [18664, 18665, 18793], [18665, 18794, 18793], [18665, 18666, 18795], [18665, 18795, 18794], [18666, 18667, 18795], [18667, 18796, 18795], [18667, 18668, 18797], [18667, 18797, 18796], [18668, 18669, 18797], [18669, 18798, 18797], [18669, 18670, 18799], [18669, 18799, 18798], [18670, 18671, 18799], [18671, 18800, 18799], [18671, 18672, 18801], [18671, 18801, 18800], [18672, 18673, 18801], [18673, 18802, 18801], [18673, 18674, 18803], [18673, 18803, 18802], [18674, 18675, 18803], [18675, 18804, 18803], [18675, 18676, 18805], [18675, 18805, 18804], [18676, 18677, 18805], [18677, 18806, 18805], [18677, 18678, 18807], [18677, 18807, 18806], [18678, 18679, 18807], [18679, 18808, 18807], [18679, 18680, 18809], [18679, 18809, 18808], [18680, 18681, 18809], [18681, 18810, 18809], [18681, 18682, 18811], [18681, 18811, 18810], [18682, 18683, 18811], [18683, 18812, 18811], [18683, 18684, 18813], [18683, 18813, 18812], [18684, 18685, 18813], [18685, 18814, 18813], [18685, 18686, 18815], [18685, 18815, 18814], [18686, 18687, 18815], [18687, 18816, 18815], [18687, 18688, 18817], [18687, 18817, 18816], [18688, 18689, 18817], [18689, 18818, 18817], [18689, 18690, 18819], [18689, 18819, 18818], [18690, 18691, 18819], [18691, 18820, 18819], [18691, 18692, 18821], [18691, 18821, 18820], [18692, 18693, 18821], [18693, 18822, 18821], [18693, 18694, 18823], [18693, 18823, 18822], [18694, 18695, 18823], [18695, 18824, 18823], [18695, 18696, 18825], [18695, 18825, 18824], [18696, 18697, 18825], [18697, 18826, 18825], [18697, 18698, 18827], [18697, 18827, 18826], [18698, 18699, 18827], [18699, 18828, 18827], [18699, 18700, 18829], [18699, 18829, 18828], [18700, 18701, 18829], [18701, 18830, 18829], [18701, 18702, 18831], [18701, 18831, 18830], [18702, 18703, 18831], [18703, 18832, 18831], [18703, 18704, 18833], [18703, 18833, 18832], [18704, 18705, 18833], [18705, 18834, 18833], [18705, 18706, 18835], [18705, 18835, 18834], [18706, 18707, 18835], [18707, 18836, 18835], [18707, 18708, 18837], [18707, 18837, 18836], [18708, 18709, 18837], [18709, 18838, 18837], [18709, 18710, 18839], [18709, 18839, 18838], [18710, 18711, 18839], [18711, 18840, 18839], [18711, 18712, 18841], [18711, 18841, 18840], [18712, 18713, 18841], [18713, 18842, 18841], [18713, 18714, 18843], [18713, 18843, 18842], [18714, 18715, 18843], [18715, 18844, 18843], [18715, 18716, 18845], [18715, 18845, 18844], [18716, 18717, 18845], [18717, 18846, 18845], [18717, 18718, 18847], [18717, 18847, 18846], [18718, 18719, 18847], [18719, 18848, 18847], [18719, 18720, 18849], [18719, 18849, 18848], [18720, 18721, 18849], [18721, 18850, 18849], [18721, 18722, 18851], [18721, 18851, 18850], [18722, 18723, 18851], [18723, 18852, 18851], [18723, 18724, 18853], [18723, 18853, 18852], [18724, 18725, 18853], [18725, 18854, 18853], [18725, 18726, 18855], [18725, 18855, 18854], [18726, 18727, 18855], [18727, 18856, 18855], [18727, 18728, 18857], [18727, 18857, 18856], [18728, 18729, 18857], [18729, 18858, 18857], [18729, 18730, 18859], [18729, 18859, 18858], [18730, 18731, 18859], [18731, 18860, 18859], [18731, 18732, 18861], [18731, 18861, 18860], [18732, 18733, 18861], [18733, 18862, 18861], [18733, 18734, 18863], [18733, 18863, 18862], [18734, 18735, 18863], [18735, 18864, 18863], [18735, 18736, 18865], [18735, 18865, 18864], [18736, 18737, 18865], [18737, 18866, 18865], [18737, 18738, 18867], [18737, 18867, 18866], [18738, 18739, 18867], [18739, 18868, 18867], [18739, 18740, 18869], [18739, 18869, 18868], [18740, 18741, 18869], [18741, 18870, 18869], [18741, 18742, 18871], [18741, 18871, 18870], [18742, 18743, 18871], [18743, 18872, 18871], [18743, 18744, 18873], [18743, 18873, 18872], [18744, 18745, 18873], [18745, 18874, 18873], [18745, 18746, 18875], [18745, 18875, 18874], [18746, 18747, 18875], [18747, 18876, 18875], [18747, 18748, 18877], [18747, 18877, 18876], [18748, 18749, 18877], [18749, 18878, 18877], [18749, 18750, 18879], [18749, 18879, 18878], [18751, 18752, 18881], [18751, 18881, 18880], [18752, 18753, 18881], [18753, 18882, 18881], [18753, 18754, 18883], [18753, 18883, 18882], [18754, 18755, 18883], [18755, 18884, 18883], [18755, 18756, 18885], [18755, 18885, 18884], [18756, 18757, 18885], [18757, 18886, 18885], [18757, 18758, 18887], [18757, 18887, 18886], [18758, 18759, 18887], [18759, 18888, 18887], [18759, 18760, 18889], [18759, 18889, 18888], [18760, 18761, 18889], [18761, 18890, 18889], [18761, 18762, 18891], [18761, 18891, 18890], [18762, 18763, 18891], [18763, 18892, 18891], [18763, 18764, 18893], [18763, 18893, 18892], [18764, 18765, 18893], [18765, 18894, 18893], [18765, 18766, 18895], [18765, 18895, 18894], [18766, 18767, 18895], [18767, 18896, 18895], [18767, 18768, 18897], [18767, 18897, 18896], [18768, 18769, 18897], [18769, 18898, 18897], [18769, 18770, 18899], [18769, 18899, 18898], [18770, 18771, 18899], [18771, 18900, 18899], [18771, 18772, 18901], [18771, 18901, 18900], [18772, 18773, 18901], [18773, 18902, 18901], [18773, 18774, 18903], [18773, 18903, 18902], [18774, 18775, 18903], [18775, 18904, 18903], [18775, 18776, 18905], [18775, 18905, 18904], [18776, 18777, 18905], [18777, 18906, 18905], [18777, 18778, 18907], [18777, 18907, 18906], [18778, 18779, 18907], [18779, 18908, 18907], [18779, 18780, 18909], [18779, 18909, 18908], [18780, 18781, 18909], [18781, 18910, 18909], [18781, 18782, 18911], [18781, 18911, 18910], [18782, 18783, 18911], [18783, 18912, 18911], [18783, 18784, 18913], [18783, 18913, 18912], [18784, 18785, 18913], [18785, 18914, 18913], [18785, 18786, 18915], [18785, 18915, 18914], [18786, 18787, 18915], [18787, 18916, 18915], [18787, 18788, 18917], [18787, 18917, 18916], [18788, 18789, 18917], [18789, 18918, 18917], [18789, 18790, 18919], [18789, 18919, 18918], [18790, 18791, 18919], [18791, 18920, 18919], [18791, 18792, 18921], [18791, 18921, 18920], [18792, 18793, 18921], [18793, 18922, 18921], [18793, 18794, 18923], [18793, 18923, 18922], [18794, 18795, 18923], [18795, 18924, 18923], [18795, 18796, 18925], [18795, 18925, 18924], [18796, 18797, 18925], [18797, 18926, 18925], [18797, 18798, 18927], [18797, 18927, 18926], [18798, 18799, 18927], [18799, 18928, 18927], [18799, 18800, 18929], [18799, 18929, 18928], [18800, 18801, 18929], [18801, 18930, 18929], [18801, 18802, 18931], [18801, 18931, 18930], [18802, 18803, 18931], [18803, 18932, 18931], [18803, 18804, 18933], [18803, 18933, 18932], [18804, 18805, 18933], [18805, 18934, 18933], [18805, 18806, 18935], [18805, 18935, 18934], [18806, 18807, 18935], [18807, 18936, 18935], [18807, 18808, 18937], [18807, 18937, 18936], [18808, 18809, 18937], [18809, 18938, 18937], [18809, 18810, 18939], [18809, 18939, 18938], [18810, 18811, 18939], [18811, 18940, 18939], [18811, 18812, 18941], [18811, 18941, 18940], [18812, 18813, 18941], [18813, 18942, 18941], [18813, 18814, 18943], [18813, 18943, 18942], [18814, 18815, 18943], [18815, 18944, 18943], [18815, 18816, 18945], [18815, 18945, 18944], [18816, 18817, 18945], [18817, 18946, 18945], [18817, 18818, 18947], [18817, 18947, 18946], [18818, 18819, 18947], [18819, 18948, 18947], [18819, 18820, 18949], [18819, 18949, 18948], [18820, 18821, 18949], [18821, 18950, 18949], [18821, 18822, 18951], [18821, 18951, 18950], [18822, 18823, 18951], [18823, 18952, 18951], [18823, 18824, 18953], [18823, 18953, 18952], [18824, 18825, 18953], [18825, 18954, 18953], [18825, 18826, 18955], [18825, 18955, 18954], [18826, 18827, 18955], [18827, 18956, 18955], [18827, 18828, 18957], [18827, 18957, 18956], [18828, 18829, 18957], [18829, 18958, 18957], [18829, 18830, 18959], [18829, 18959, 18958], [18830, 18831, 18959], [18831, 18960, 18959], [18831, 18832, 18961], [18831, 18961, 18960], [18832, 18833, 18961], [18833, 18962, 18961], [18833, 18834, 18963], [18833, 18963, 18962], [18834, 18835, 18963], [18835, 18964, 18963], [18835, 18836, 18965], [18835, 18965, 18964], [18836, 18837, 18965], [18837, 18966, 18965], [18837, 18838, 18967], [18837, 18967, 18966], [18838, 18839, 18967], [18839, 18968, 18967], [18839, 18840, 18969], [18839, 18969, 18968], [18840, 18841, 18969], [18841, 18970, 18969], [18841, 18842, 18971], [18841, 18971, 18970], [18842, 18843, 18971], [18843, 18972, 18971], [18843, 18844, 18973], [18843, 18973, 18972], [18844, 18845, 18973], [18845, 18974, 18973], [18845, 18846, 18975], [18845, 18975, 18974], [18846, 18847, 18975], [18847, 18976, 18975], [18847, 18848, 18977], [18847, 18977, 18976], [18848, 18849, 18977], [18849, 18978, 18977], [18849, 18850, 18979], [18849, 18979, 18978], [18850, 18851, 18979], [18851, 18980, 18979], [18851, 18852, 18981], [18851, 18981, 18980], [18852, 18853, 18981], [18853, 18982, 18981], [18853, 18854, 18983], [18853, 18983, 18982], [18854, 18855, 18983], [18855, 18984, 18983], [18855, 18856, 18985], [18855, 18985, 18984], [18856, 18857, 18985], [18857, 18986, 18985], [18857, 18858, 18987], [18857, 18987, 18986], [18858, 18859, 18987], [18859, 18988, 18987], [18859, 18860, 18989], [18859, 18989, 18988], [18860, 18861, 18989], [18861, 18990, 18989], [18861, 18862, 18991], [18861, 18991, 18990], [18862, 18863, 18991], [18863, 18992, 18991], [18863, 18864, 18993], [18863, 18993, 18992], [18864, 18865, 18993], [18865, 18994, 18993], [18865, 18866, 18995], [18865, 18995, 18994], [18866, 18867, 18995], [18867, 18996, 18995], [18867, 18868, 18997], [18867, 18997, 18996], [18868, 18869, 18997], [18869, 18998, 18997], [18869, 18870, 18999], [18869, 18999, 18998], [18870, 18871, 18999], [18871, 19000, 18999], [18871, 18872, 19001], [18871, 19001, 19000], [18872, 18873, 19001], [18873, 19002, 19001], [18873, 18874, 19003], [18873, 19003, 19002], [18874, 18875, 19003], [18875, 19004, 19003], [18875, 18876, 19005], [18875, 19005, 19004], [18876, 18877, 19005], [18877, 19006, 19005], [18877, 18878, 19007], [18877, 19007, 19006], [18878, 18879, 19007], [18879, 19008, 19007], [18880, 18881, 19009], [18881, 19010, 19009], [18881, 18882, 19011], [18881, 19011, 19010], [18882, 18883, 19011], [18883, 19012, 19011], [18883, 18884, 19013], [18883, 19013, 19012], [18884, 18885, 19013], [18885, 19014, 19013], [18885, 18886, 19015], [18885, 19015, 19014], [18886, 18887, 19015], [18887, 19016, 19015], [18887, 18888, 19017], [18887, 19017, 19016], [18888, 18889, 19017], [18889, 19018, 19017], [18889, 18890, 19019], [18889, 19019, 19018], [18890, 18891, 19019], [18891, 19020, 19019], [18891, 18892, 19021], [18891, 19021, 19020], [18892, 18893, 19021], [18893, 19022, 19021], [18893, 18894, 19023], [18893, 19023, 19022], [18894, 18895, 19023], [18895, 19024, 19023], [18895, 18896, 19025], [18895, 19025, 19024], [18896, 18897, 19025], [18897, 19026, 19025], [18897, 18898, 19027], [18897, 19027, 19026], [18898, 18899, 19027], [18899, 19028, 19027], [18899, 18900, 19029], [18899, 19029, 19028], [18900, 18901, 19029], [18901, 19030, 19029], [18901, 18902, 19031], [18901, 19031, 19030], [18902, 18903, 19031], [18903, 19032, 19031], [18903, 18904, 19033], [18903, 19033, 19032], [18904, 18905, 19033], [18905, 19034, 19033], [18905, 18906, 19035], [18905, 19035, 19034], [18906, 18907, 19035], [18907, 19036, 19035], [18907, 18908, 19037], [18907, 19037, 19036], [18908, 18909, 19037], [18909, 19038, 19037], [18909, 18910, 19039], [18909, 19039, 19038], [18910, 18911, 19039], [18911, 19040, 19039], [18911, 18912, 19041], [18911, 19041, 19040], [18912, 18913, 19041], [18913, 19042, 19041], [18913, 18914, 19043], [18913, 19043, 19042], [18914, 18915, 19043], [18915, 19044, 19043], [18915, 18916, 19045], [18915, 19045, 19044], [18916, 18917, 19045], [18917, 19046, 19045], [18917, 18918, 19047], [18917, 19047, 19046], [18918, 18919, 19047], [18919, 19048, 19047], [18919, 18920, 19049], [18919, 19049, 19048], [18920, 18921, 19049], [18921, 19050, 19049], [18921, 18922, 19051], [18921, 19051, 19050], [18922, 18923, 19051], [18923, 19052, 19051], [18923, 18924, 19053], [18923, 19053, 19052], [18924, 18925, 19053], [18925, 19054, 19053], [18925, 18926, 19055], [18925, 19055, 19054], [18926, 18927, 19055], [18927, 19056, 19055], [18927, 18928, 19057], [18927, 19057, 19056], [18928, 18929, 19057], [18929, 19058, 19057], [18929, 18930, 19059], [18929, 19059, 19058], [18930, 18931, 19059], [18931, 19060, 19059], [18931, 18932, 19061], [18931, 19061, 19060], [18932, 18933, 19061], [18933, 19062, 19061], [18933, 18934, 19063], [18933, 19063, 19062], [18934, 18935, 19063], [18935, 19064, 19063], [18935, 18936, 19065], [18935, 19065, 19064], [18936, 18937, 19065], [18937, 19066, 19065], [18937, 18938, 19067], [18937, 19067, 19066], [18938, 18939, 19067], [18939, 19068, 19067], [18939, 18940, 19069], [18939, 19069, 19068], [18940, 18941, 19069], [18941, 19070, 19069], [18941, 18942, 19071], [18941, 19071, 19070], [18942, 18943, 19071], [18943, 19072, 19071], [18943, 18944, 19073], [18943, 19073, 19072], [18944, 18945, 19073], [18945, 19074, 19073], [18945, 18946, 19075], [18945, 19075, 19074], [18946, 18947, 19075], [18947, 19076, 19075], [18947, 18948, 19077], [18947, 19077, 19076], [18948, 18949, 19077], [18949, 19078, 19077], [18949, 18950, 19079], [18949, 19079, 19078], [18950, 18951, 19079], [18951, 19080, 19079], [18951, 18952, 19081], [18951, 19081, 19080], [18952, 18953, 19081], [18953, 19082, 19081], [18953, 18954, 19083], [18953, 19083, 19082], [18954, 18955, 19083], [18955, 19084, 19083], [18955, 18956, 19085], [18955, 19085, 19084], [18956, 18957, 19085], [18957, 19086, 19085], [18957, 18958, 19087], [18957, 19087, 19086], [18958, 18959, 19087], [18959, 19088, 19087], [18959, 18960, 19089], [18959, 19089, 19088], [18960, 18961, 19089], [18961, 19090, 19089], [18961, 18962, 19091], [18961, 19091, 19090], [18962, 18963, 19091], [18963, 19092, 19091], [18963, 18964, 19093], [18963, 19093, 19092], [18964, 18965, 19093], [18965, 19094, 19093], [18965, 18966, 19095], [18965, 19095, 19094], [18966, 18967, 19095], [18967, 19096, 19095], [18967, 18968, 19097], [18967, 19097, 19096], [18968, 18969, 19097], [18969, 19098, 19097], [18969, 18970, 19099], [18969, 19099, 19098], [18970, 18971, 19099], [18971, 19100, 19099], [18971, 18972, 19101], [18971, 19101, 19100], [18972, 18973, 19101], [18973, 19102, 19101], [18973, 18974, 19103], [18973, 19103, 19102], [18974, 18975, 19103], [18975, 19104, 19103], [18975, 18976, 19105], [18975, 19105, 19104], [18976, 18977, 19105], [18977, 19106, 19105], [18977, 18978, 19107], [18977, 19107, 19106], [18978, 18979, 19107], [18979, 19108, 19107], [18979, 18980, 19109], [18979, 19109, 19108], [18980, 18981, 19109], [18981, 19110, 19109], [18981, 18982, 19111], [18981, 19111, 19110], [18982, 18983, 19111], [18983, 19112, 19111], [18983, 18984, 19113], [18983, 19113, 19112], [18984, 18985, 19113], [18985, 19114, 19113], [18985, 18986, 19115], [18985, 19115, 19114], [18986, 18987, 19115], [18987, 19116, 19115], [18987, 18988, 19117], [18987, 19117, 19116], [18988, 18989, 19117], [18989, 19118, 19117], [18989, 18990, 19119], [18989, 19119, 19118], [18990, 18991, 19119], [18991, 19120, 19119], [18991, 18992, 19121], [18991, 19121, 19120], [18992, 18993, 19121], [18993, 19122, 19121], [18993, 18994, 19123], [18993, 19123, 19122], [18994, 18995, 19123], [18995, 19124, 19123], [18995, 18996, 19125], [18995, 19125, 19124], [18996, 18997, 19125], [18997, 19126, 19125], [18997, 18998, 19127], [18997, 19127, 19126], [18998, 18999, 19127], [18999, 19128, 19127], [18999, 19000, 19129], [18999, 19129, 19128], [19000, 19001, 19129], [19001, 19130, 19129], [19001, 19002, 19131], [19001, 19131, 19130], [19002, 19003, 19131], [19003, 19132, 19131], [19003, 19004, 19133], [19003, 19133, 19132], [19004, 19005, 19133], [19005, 19134, 19133], [19005, 19006, 19135], [19005, 19135, 19134], [19006, 19007, 19135], [19007, 19136, 19135], [19007, 19008, 19137], [19007, 19137, 19136], [19009, 19010, 19139], [19009, 19139, 19138], [19010, 19011, 19139], [19011, 19140, 19139], [19011, 19012, 19141], [19011, 19141, 19140], [19012, 19013, 19141], [19013, 19142, 19141], [19013, 19014, 19143], [19013, 19143, 19142], [19014, 19015, 19143], [19015, 19144, 19143], [19015, 19016, 19145], [19015, 19145, 19144], [19016, 19017, 19145], [19017, 19146, 19145], [19017, 19018, 19147], [19017, 19147, 19146], [19018, 19019, 19147], [19019, 19148, 19147], [19019, 19020, 19149], [19019, 19149, 19148], [19020, 19021, 19149], [19021, 19150, 19149], [19021, 19022, 19151], [19021, 19151, 19150], [19022, 19023, 19151], [19023, 19152, 19151], [19023, 19024, 19153], [19023, 19153, 19152], [19024, 19025, 19153], [19025, 19154, 19153], [19025, 19026, 19155], [19025, 19155, 19154], [19026, 19027, 19155], [19027, 19156, 19155], [19027, 19028, 19157], [19027, 19157, 19156], [19028, 19029, 19157], [19029, 19158, 19157], [19029, 19030, 19159], [19029, 19159, 19158], [19030, 19031, 19159], [19031, 19160, 19159], [19031, 19032, 19161], [19031, 19161, 19160], [19032, 19033, 19161], [19033, 19162, 19161], [19033, 19034, 19163], [19033, 19163, 19162], [19034, 19035, 19163], [19035, 19164, 19163], [19035, 19036, 19165], [19035, 19165, 19164], [19036, 19037, 19165], [19037, 19166, 19165], [19037, 19038, 19167], [19037, 19167, 19166], [19038, 19039, 19167], [19039, 19168, 19167], [19039, 19040, 19169], [19039, 19169, 19168], [19040, 19041, 19169], [19041, 19170, 19169], [19041, 19042, 19171], [19041, 19171, 19170], [19042, 19043, 19171], [19043, 19172, 19171], [19043, 19044, 19173], [19043, 19173, 19172], [19044, 19045, 19173], [19045, 19174, 19173], [19045, 19046, 19175], [19045, 19175, 19174], [19046, 19047, 19175], [19047, 19176, 19175], [19047, 19048, 19177], [19047, 19177, 19176], [19048, 19049, 19177], [19049, 19178, 19177], [19049, 19050, 19179], [19049, 19179, 19178], [19050, 19051, 19179], [19051, 19180, 19179], [19051, 19052, 19181], [19051, 19181, 19180], [19052, 19053, 19181], [19053, 19182, 19181], [19053, 19054, 19183], [19053, 19183, 19182], [19054, 19055, 19183], [19055, 19184, 19183], [19055, 19056, 19185], [19055, 19185, 19184], [19056, 19057, 19185], [19057, 19186, 19185], [19057, 19058, 19187], [19057, 19187, 19186], [19058, 19059, 19187], [19059, 19188, 19187], [19059, 19060, 19189], [19059, 19189, 19188], [19060, 19061, 19189], [19061, 19190, 19189], [19061, 19062, 19191], [19061, 19191, 19190], [19062, 19063, 19191], [19063, 19192, 19191], [19063, 19064, 19193], [19063, 19193, 19192], [19064, 19065, 19193], [19065, 19194, 19193], [19065, 19066, 19195], [19065, 19195, 19194], [19066, 19067, 19195], [19067, 19196, 19195], [19067, 19068, 19197], [19067, 19197, 19196], [19068, 19069, 19197], [19069, 19198, 19197], [19069, 19070, 19199], [19069, 19199, 19198], [19070, 19071, 19199], [19071, 19200, 19199], [19071, 19072, 19201], [19071, 19201, 19200], [19072, 19073, 19201], [19073, 19202, 19201], [19073, 19074, 19203], [19073, 19203, 19202], [19074, 19075, 19203], [19075, 19204, 19203], [19075, 19076, 19205], [19075, 19205, 19204], [19076, 19077, 19205], [19077, 19206, 19205], [19077, 19078, 19207], [19077, 19207, 19206], [19078, 19079, 19207], [19079, 19208, 19207], [19079, 19080, 19209], [19079, 19209, 19208], [19080, 19081, 19209], [19081, 19210, 19209], [19081, 19082, 19211], [19081, 19211, 19210], [19082, 19083, 19211], [19083, 19212, 19211], [19083, 19084, 19213], [19083, 19213, 19212], [19084, 19085, 19213], [19085, 19214, 19213], [19085, 19086, 19215], [19085, 19215, 19214], [19086, 19087, 19215], [19087, 19216, 19215], [19087, 19088, 19217], [19087, 19217, 19216], [19088, 19089, 19217], [19089, 19218, 19217], [19089, 19090, 19219], [19089, 19219, 19218], [19090, 19091, 19219], [19091, 19220, 19219], [19091, 19092, 19221], [19091, 19221, 19220], [19092, 19093, 19221], [19093, 19222, 19221], [19093, 19094, 19223], [19093, 19223, 19222], [19094, 19095, 19223], [19095, 19224, 19223], [19095, 19096, 19225], [19095, 19225, 19224], [19096, 19097, 19225], [19097, 19226, 19225], [19097, 19098, 19227], [19097, 19227, 19226], [19098, 19099, 19227], [19099, 19228, 19227], [19099, 19100, 19229], [19099, 19229, 19228], [19100, 19101, 19229], [19101, 19230, 19229], [19101, 19102, 19231], [19101, 19231, 19230], [19102, 19103, 19231], [19103, 19232, 19231], [19103, 19104, 19233], [19103, 19233, 19232], [19104, 19105, 19233], [19105, 19234, 19233], [19105, 19106, 19235], [19105, 19235, 19234], [19106, 19107, 19235], [19107, 19236, 19235], [19107, 19108, 19237], [19107, 19237, 19236], [19108, 19109, 19237], [19109, 19238, 19237], [19109, 19110, 19239], [19109, 19239, 19238], [19110, 19111, 19239], [19111, 19240, 19239], [19111, 19112, 19241], [19111, 19241, 19240], [19112, 19113, 19241], [19113, 19242, 19241], [19113, 19114, 19243], [19113, 19243, 19242], [19114, 19115, 19243], [19115, 19244, 19243], [19115, 19116, 19245], [19115, 19245, 19244], [19116, 19117, 19245], [19117, 19246, 19245], [19117, 19118, 19247], [19117, 19247, 19246], [19118, 19119, 19247], [19119, 19248, 19247], [19119, 19120, 19249], [19119, 19249, 19248], [19120, 19121, 19249], [19121, 19250, 19249], [19121, 19122, 19251], [19121, 19251, 19250], [19122, 19123, 19251], [19123, 19252, 19251], [19123, 19124, 19253], [19123, 19253, 19252], [19124, 19125, 19253], [19125, 19254, 19253], [19125, 19126, 19255], [19125, 19255, 19254], [19126, 19127, 19255], [19127, 19256, 19255], [19127, 19128, 19257], [19127, 19257, 19256], [19128, 19129, 19257], [19129, 19258, 19257], [19129, 19130, 19259], [19129, 19259, 19258], [19130, 19131, 19259], [19131, 19260, 19259], [19131, 19132, 19261], [19131, 19261, 19260], [19132, 19133, 19261], [19133, 19262, 19261], [19133, 19134, 19263], [19133, 19263, 19262], [19134, 19135, 19263], [19135, 19264, 19263], [19135, 19136, 19265], [19135, 19265, 19264], [19136, 19137, 19265], [19137, 19266, 19265], [19138, 19139, 0], [19139, 1, 0], [19139, 19140, 2], [19139, 2, 1], [19140, 19141, 2], [19141, 3, 2], [19141, 19142, 4], [19141, 4, 3], [19142, 19143, 4], [19143, 5, 4], [19143, 19144, 6], [19143, 6, 5], [19144, 19145, 6], [19145, 7, 6], [19145, 19146, 8], [19145, 8, 7], [19146, 19147, 8], [19147, 9, 8], [19147, 19148, 10], [19147, 10, 9], [19148, 19149, 10], [19149, 11, 10], [19149, 19150, 12], [19149, 12, 11], [19150, 19151, 12], [19151, 13, 12], [19151, 19152, 14], [19151, 14, 13], [19152, 19153, 14], [19153, 15, 14], [19153, 19154, 16], [19153, 16, 15], [19154, 19155, 16], [19155, 17, 16], [19155, 19156, 18], [19155, 18, 17], [19156, 19157, 18], [19157, 19, 18], [19157, 19158, 20], [19157, 20, 19], [19158, 19159, 20], [19159, 21, 20], [19159, 19160, 22], [19159, 22, 21], [19160, 19161, 22], [19161, 23, 22], [19161, 19162, 24], [19161, 24, 23], [19162, 19163, 24], [19163, 25, 24], [19163, 19164, 26], [19163, 26, 25], [19164, 19165, 26], [19165, 27, 26], [19165, 19166, 28], [19165, 28, 27], [19166, 19167, 28], [19167, 29, 28], [19167, 19168, 30], [19167, 30, 29], [19168, 19169, 30], [19169, 31, 30], [19169, 19170, 32], [19169, 32, 31], [19170, 19171, 32], [19171, 33, 32], [19171, 19172, 34], [19171, 34, 33], [19172, 19173, 34], [19173, 35, 34], [19173, 19174, 36], [19173, 36, 35], [19174, 19175, 36], [19175, 37, 36], [19175, 19176, 38], [19175, 38, 37], [19176, 19177, 38], [19177, 39, 38], [19177, 19178, 40], [19177, 40, 39], [19178, 19179, 40], [19179, 41, 40], [19179, 19180, 42], [19179, 42, 41], [19180, 19181, 42], [19181, 43, 42], [19181, 19182, 44], [19181, 44, 43], [19182, 19183, 44], [19183, 45, 44], [19183, 19184, 46], [19183, 46, 45], [19184, 19185, 46], [19185, 47, 46], [19185, 19186, 48], [19185, 48, 47], [19186, 19187, 48], [19187, 49, 48], [19187, 19188, 50], [19187, 50, 49], [19188, 19189, 50], [19189, 51, 50], [19189, 19190, 52], [19189, 52, 51], [19190, 19191, 52], [19191, 53, 52], [19191, 19192, 54], [19191, 54, 53], [19192, 19193, 54], [19193, 55, 54], [19193, 19194, 56], [19193, 56, 55], [19194, 19195, 56], [19195, 57, 56], [19195, 19196, 58], [19195, 58, 57], [19196, 19197, 58], [19197, 59, 58], [19197, 19198, 60], [19197, 60, 59], [19198, 19199, 60], [19199, 61, 60], [19199, 19200, 62], [19199, 62, 61], [19200, 19201, 62], [19201, 63, 62], [19201, 19202, 64], [19201, 64, 63], [19202, 19203, 64], [19203, 65, 64], [19203, 19204, 66], [19203, 66, 65], [19204, 19205, 66], [19205, 67, 66], [19205, 19206, 68], [19205, 68, 67], [19206, 19207, 68], [19207, 69, 68], [19207, 19208, 70], [19207, 70, 69], [19208, 19209, 70], [19209, 71, 70], [19209, 19210, 72], [19209, 72, 71], [19210, 19211, 72], [19211, 73, 72], [19211, 19212, 74], [19211, 74, 73], [19212, 19213, 74], [19213, 75, 74], [19213, 19214, 76], [19213, 76, 75], [19214, 19215, 76], [19215, 77, 76], [19215, 19216, 78], [19215, 78, 77], [19216, 19217, 78], [19217, 79, 78], [19217, 19218, 80], [19217, 80, 79], [19218, 19219, 80], [19219, 81, 80], [19219, 19220, 82], [19219, 82, 81], [19220, 19221, 82], [19221, 83, 82], [19221, 19222, 84], [19221, 84, 83], [19222, 19223, 84], [19223, 85, 84], [19223, 19224, 86], [19223, 86, 85], [19224, 19225, 86], [19225, 87, 86], [19225, 19226, 88], [19225, 88, 87], [19226, 19227, 88], [19227, 89, 88], [19227, 19228, 90], [19227, 90, 89], [19228, 19229, 90], [19229, 91, 90], [19229, 19230, 92], [19229, 92, 91], [19230, 19231, 92], [19231, 93, 92], [19231, 19232, 94], [19231, 94, 93], [19232, 19233, 94], [19233, 95, 94], [19233, 19234, 96], [19233, 96, 95], [19234, 19235, 96], [19235, 97, 96], [19235, 19236, 98], [19235, 98, 97], [19236, 19237, 98], [19237, 99, 98], [19237, 19238, 100], [19237, 100, 99], [19238, 19239, 100], [19239, 101, 100], [19239, 19240, 102], [19239, 102, 101], [19240, 19241, 102], [19241, 103, 102], [19241, 19242, 104], [19241, 104, 103], [19242, 19243, 104], [19243, 105, 104], [19243, 19244, 106], [19243, 106, 105], [19244, 19245, 106], [19245, 107, 106], [19245, 19246, 108], [19245, 108, 107], [19246, 19247, 108], [19247, 109, 108], [19247, 19248, 110], [19247, 110, 109], [19248, 19249, 110], [19249, 111, 110], [19249, 19250, 112], [19249, 112, 111], [19250, 19251, 112], [19251, 113, 112], [19251, 19252, 114], [19251, 114, 113], [19252, 19253, 114], [19253, 115, 114], [19253, 19254, 116], [19253, 116, 115], [19254, 19255, 116], [19255, 117, 116], [19255, 19256, 118], [19255, 118, 117], [19256, 19257, 118], [19257, 119, 118], [19257, 19258, 120], [19257, 120, 119], [19258, 19259, 120], [19259, 121, 120], [19259, 19260, 122], [19259, 122, 121], [19260, 19261, 122], [19261, 123, 122], [19261, 19262, 124], [19261, 124, 123], [19262, 19263, 124], [19263, 125, 124], [19263, 19264, 126], [19263, 126, 125], [19264, 19265, 126], [19265, 127, 126], [19265, 19266, 128], [19265, 128, 127], [19267, 19268, 19397], [19267, 19397, 19396], [19268, 19269, 19397], [19269, 19398, 19397], [19269, 19270, 19399], [19269, 19399, 19398], [19270, 19271, 19399], [19271, 19400, 19399], [19271, 19272, 19401], [19271, 19401, 19400], [19272, 19273, 19401], [19273, 19402, 19401], [19273, 19274, 19403], [19273, 19403, 19402], [19274, 19275, 19403], [19275, 19404, 19403], [19275, 19276, 19405], [19275, 19405, 19404], [19276, 19277, 19405], [19277, 19406, 19405], [19277, 19278, 19407], [19277, 19407, 19406], [19278, 19279, 19407], [19279, 19408, 19407], [19279, 19280, 19409], [19279, 19409, 19408], [19280, 19281, 19409], [19281, 19410, 19409], [19281, 19282, 19411], [19281, 19411, 19410], [19282, 19283, 19411], [19283, 19412, 19411], [19283, 19284, 19413], [19283, 19413, 19412], [19284, 19285, 19413], [19285, 19414, 19413], [19285, 19286, 19415], [19285, 19415, 19414], [19286, 19287, 19415], [19287, 19416, 19415], [19287, 19288, 19417], [19287, 19417, 19416], [19288, 19289, 19417], [19289, 19418, 19417], [19289, 19290, 19419], [19289, 19419, 19418], [19290, 19291, 19419], [19291, 19420, 19419], [19291, 19292, 19421], [19291, 19421, 19420], [19292, 19293, 19421], [19293, 19422, 19421], [19293, 19294, 19423], [19293, 19423, 19422], [19294, 19295, 19423], [19295, 19424, 19423], [19295, 19296, 19425], [19295, 19425, 19424], [19296, 19297, 19425], [19297, 19426, 19425], [19297, 19298, 19427], [19297, 19427, 19426], [19298, 19299, 19427], [19299, 19428, 19427], [19299, 19300, 19429], [19299, 19429, 19428], [19300, 19301, 19429], [19301, 19430, 19429], [19301, 19302, 19431], [19301, 19431, 19430], [19302, 19303, 19431], [19303, 19432, 19431], [19303, 19304, 19433], [19303, 19433, 19432], [19304, 19305, 19433], [19305, 19434, 19433], [19305, 19306, 19435], [19305, 19435, 19434], [19306, 19307, 19435], [19307, 19436, 19435], [19307, 19308, 19437], [19307, 19437, 19436], [19308, 19309, 19437], [19309, 19438, 19437], [19309, 19310, 19439], [19309, 19439, 19438], [19310, 19311, 19439], [19311, 19440, 19439], [19311, 19312, 19441], [19311, 19441, 19440], [19312, 19313, 19441], [19313, 19442, 19441], [19313, 19314, 19443], [19313, 19443, 19442], [19314, 19315, 19443], [19315, 19444, 19443], [19315, 19316, 19445], [19315, 19445, 19444], [19316, 19317, 19445], [19317, 19446, 19445], [19317, 19318, 19447], [19317, 19447, 19446], [19318, 19319, 19447], [19319, 19448, 19447], [19319, 19320, 19449], [19319, 19449, 19448], [19320, 19321, 19449], [19321, 19450, 19449], [19321, 19322, 19451], [19321, 19451, 19450], [19322, 19323, 19451], [19323, 19452, 19451], [19323, 19324, 19453], [19323, 19453, 19452], [19324, 19325, 19453], [19325, 19454, 19453], [19325, 19326, 19455], [19325, 19455, 19454], [19326, 19327, 19455], [19327, 19456, 19455], [19327, 19328, 19457], [19327, 19457, 19456], [19328, 19329, 19457], [19329, 19458, 19457], [19329, 19330, 19459], [19329, 19459, 19458], [19330, 19331, 19459], [19331, 19460, 19459], [19331, 19332, 19461], [19331, 19461, 19460], [19332, 19333, 19461], [19333, 19462, 19461], [19333, 19334, 19463], [19333, 19463, 19462], [19334, 19335, 19463], [19335, 19464, 19463], [19335, 19336, 19465], [19335, 19465, 19464], [19336, 19337, 19465], [19337, 19466, 19465], [19337, 19338, 19467], [19337, 19467, 19466], [19338, 19339, 19467], [19339, 19468, 19467], [19339, 19340, 19469], [19339, 19469, 19468], [19340, 19341, 19469], [19341, 19470, 19469], [19341, 19342, 19471], [19341, 19471, 19470], [19342, 19343, 19471], [19343, 19472, 19471], [19343, 19344, 19473], [19343, 19473, 19472], [19344, 19345, 19473], [19345, 19474, 19473], [19345, 19346, 19475], [19345, 19475, 19474], [19346, 19347, 19475], [19347, 19476, 19475], [19347, 19348, 19477], [19347, 19477, 19476], [19348, 19349, 19477], [19349, 19478, 19477], [19349, 19350, 19479], [19349, 19479, 19478], [19350, 19351, 19479], [19351, 19480, 19479], [19351, 19352, 19481], [19351, 19481, 19480], [19352, 19353, 19481], [19353, 19482, 19481], [19353, 19354, 19483], [19353, 19483, 19482], [19354, 19355, 19483], [19355, 19484, 19483], [19355, 19356, 19485], [19355, 19485, 19484], [19356, 19357, 19485], [19357, 19486, 19485], [19357, 19358, 19487], [19357, 19487, 19486], [19358, 19359, 19487], [19359, 19488, 19487], [19359, 19360, 19489], [19359, 19489, 19488], [19360, 19361, 19489], [19361, 19490, 19489], [19361, 19362, 19491], [19361, 19491, 19490], [19362, 19363, 19491], [19363, 19492, 19491], [19363, 19364, 19493], [19363, 19493, 19492], [19364, 19365, 19493], [19365, 19494, 19493], [19365, 19366, 19495], [19365, 19495, 19494], [19366, 19367, 19495], [19367, 19496, 19495], [19367, 19368, 19497], [19367, 19497, 19496], [19368, 19369, 19497], [19369, 19498, 19497], [19369, 19370, 19499], [19369, 19499, 19498], [19370, 19371, 19499], [19371, 19500, 19499], [19371, 19372, 19501], [19371, 19501, 19500], [19372, 19373, 19501], [19373, 19502, 19501], [19373, 19374, 19503], [19373, 19503, 19502], [19374, 19375, 19503], [19375, 19504, 19503], [19375, 19376, 19505], [19375, 19505, 19504], [19376, 19377, 19505], [19377, 19506, 19505], [19377, 19378, 19507], [19377, 19507, 19506], [19378, 19379, 19507], [19379, 19508, 19507], [19379, 19380, 19509], [19379, 19509, 19508], [19380, 19381, 19509], [19381, 19510, 19509], [19381, 19382, 19511], [19381, 19511, 19510], [19382, 19383, 19511], [19383, 19512, 19511], [19383, 19384, 19513], [19383, 19513, 19512], [19384, 19385, 19513], [19385, 19514, 19513], [19385, 19386, 19515], [19385, 19515, 19514], [19386, 19387, 19515], [19387, 19516, 19515], [19387, 19388, 19517], [19387, 19517, 19516], [19388, 19389, 19517], [19389, 19518, 19517], [19389, 19390, 19519], [19389, 19519, 19518], [19390, 19391, 19519], [19391, 19520, 19519], [19391, 19392, 19521], [19391, 19521, 19520], [19392, 19393, 19521], [19393, 19522, 19521], [19393, 19394, 19523], [19393, 19523, 19522], [19394, 19395, 19523], [19395, 19524, 19523], [19396, 19397, 19525], [19397, 19526, 19525], [19397, 19398, 19527], [19397, 19527, 19526], [19398, 19399, 19527], [19399, 19528, 19527], [19399, 19400, 19529], [19399, 19529, 19528], [19400, 19401, 19529], [19401, 19530, 19529], [19401, 19402, 19531], [19401, 19531, 19530], [19402, 19403, 19531], [19403, 19532, 19531], [19403, 19404, 19533], [19403, 19533, 19532], [19404, 19405, 19533], [19405, 19534, 19533], [19405, 19406, 19535], [19405, 19535, 19534], [19406, 19407, 19535], [19407, 19536, 19535], [19407, 19408, 19537], [19407, 19537, 19536], [19408, 19409, 19537], [19409, 19538, 19537], [19409, 19410, 19539], [19409, 19539, 19538], [19410, 19411, 19539], [19411, 19540, 19539], [19411, 19412, 19541], [19411, 19541, 19540], [19412, 19413, 19541], [19413, 19542, 19541], [19413, 19414, 19543], [19413, 19543, 19542], [19414, 19415, 19543], [19415, 19544, 19543], [19415, 19416, 19545], [19415, 19545, 19544], [19416, 19417, 19545], [19417, 19546, 19545], [19417, 19418, 19547], [19417, 19547, 19546], [19418, 19419, 19547], [19419, 19548, 19547], [19419, 19420, 19549], [19419, 19549, 19548], [19420, 19421, 19549], [19421, 19550, 19549], [19421, 19422, 19551], [19421, 19551, 19550], [19422, 19423, 19551], [19423, 19552, 19551], [19423, 19424, 19553], [19423, 19553, 19552], [19424, 19425, 19553], [19425, 19554, 19553], [19425, 19426, 19555], [19425, 19555, 19554], [19426, 19427, 19555], [19427, 19556, 19555], [19427, 19428, 19557], [19427, 19557, 19556], [19428, 19429, 19557], [19429, 19558, 19557], [19429, 19430, 19559], [19429, 19559, 19558], [19430, 19431, 19559], [19431, 19560, 19559], [19431, 19432, 19561], [19431, 19561, 19560], [19432, 19433, 19561], [19433, 19562, 19561], [19433, 19434, 19563], [19433, 19563, 19562], [19434, 19435, 19563], [19435, 19564, 19563], [19435, 19436, 19565], [19435, 19565, 19564], [19436, 19437, 19565], [19437, 19566, 19565], [19437, 19438, 19567], [19437, 19567, 19566], [19438, 19439, 19567], [19439, 19568, 19567], [19439, 19440, 19569], [19439, 19569, 19568], [19440, 19441, 19569], [19441, 19570, 19569], [19441, 19442, 19571], [19441, 19571, 19570], [19442, 19443, 19571], [19443, 19572, 19571], [19443, 19444, 19573], [19443, 19573, 19572], [19444, 19445, 19573], [19445, 19574, 19573], [19445, 19446, 19575], [19445, 19575, 19574], [19446, 19447, 19575], [19447, 19576, 19575], [19447, 19448, 19577], [19447, 19577, 19576], [19448, 19449, 19577], [19449, 19578, 19577], [19449, 19450, 19579], [19449, 19579, 19578], [19450, 19451, 19579], [19451, 19580, 19579], [19451, 19452, 19581], [19451, 19581, 19580], [19452, 19453, 19581], [19453, 19582, 19581], [19453, 19454, 19583], [19453, 19583, 19582], [19454, 19455, 19583], [19455, 19584, 19583], [19455, 19456, 19585], [19455, 19585, 19584], [19456, 19457, 19585], [19457, 19586, 19585], [19457, 19458, 19587], [19457, 19587, 19586], [19458, 19459, 19587], [19459, 19588, 19587], [19459, 19460, 19589], [19459, 19589, 19588], [19460, 19461, 19589], [19461, 19590, 19589], [19461, 19462, 19591], [19461, 19591, 19590], [19462, 19463, 19591], [19463, 19592, 19591], [19463, 19464, 19593], [19463, 19593, 19592], [19464, 19465, 19593], [19465, 19594, 19593], [19465, 19466, 19595], [19465, 19595, 19594], [19466, 19467, 19595], [19467, 19596, 19595], [19467, 19468, 19597], [19467, 19597, 19596], [19468, 19469, 19597], [19469, 19598, 19597], [19469, 19470, 19599], [19469, 19599, 19598], [19470, 19471, 19599], [19471, 19600, 19599], [19471, 19472, 19601], [19471, 19601, 19600], [19472, 19473, 19601], [19473, 19602, 19601], [19473, 19474, 19603], [19473, 19603, 19602], [19474, 19475, 19603], [19475, 19604, 19603], [19475, 19476, 19605], [19475, 19605, 19604], [19476, 19477, 19605], [19477, 19606, 19605], [19477, 19478, 19607], [19477, 19607, 19606], [19478, 19479, 19607], [19479, 19608, 19607], [19479, 19480, 19609], [19479, 19609, 19608], [19480, 19481, 19609], [19481, 19610, 19609], [19481, 19482, 19611], [19481, 19611, 19610], [19482, 19483, 19611], [19483, 19612, 19611], [19483, 19484, 19613], [19483, 19613, 19612], [19484, 19485, 19613], [19485, 19614, 19613], [19485, 19486, 19615], [19485, 19615, 19614], [19486, 19487, 19615], [19487, 19616, 19615], [19487, 19488, 19617], [19487, 19617, 19616], [19488, 19489, 19617], [19489, 19618, 19617], [19489, 19490, 19619], [19489, 19619, 19618], [19490, 19491, 19619], [19491, 19620, 19619], [19491, 19492, 19621], [19491, 19621, 19620], [19492, 19493, 19621], [19493, 19622, 19621], [19493, 19494, 19623], [19493, 19623, 19622], [19494, 19495, 19623], [19495, 19624, 19623], [19495, 19496, 19625], [19495, 19625, 19624], [19496, 19497, 19625], [19497, 19626, 19625], [19497, 19498, 19627], [19497, 19627, 19626], [19498, 19499, 19627], [19499, 19628, 19627], [19499, 19500, 19629], [19499, 19629, 19628], [19500, 19501, 19629], [19501, 19630, 19629], [19501, 19502, 19631], [19501, 19631, 19630], [19502, 19503, 19631], [19503, 19632, 19631], [19503, 19504, 19633], [19503, 19633, 19632], [19504, 19505, 19633], [19505, 19634, 19633], [19505, 19506, 19635], [19505, 19635, 19634], [19506, 19507, 19635], [19507, 19636, 19635], [19507, 19508, 19637], [19507, 19637, 19636], [19508, 19509, 19637], [19509, 19638, 19637], [19509, 19510, 19639], [19509, 19639, 19638], [19510, 19511, 19639], [19511, 19640, 19639], [19511, 19512, 19641], [19511, 19641, 19640], [19512, 19513, 19641], [19513, 19642, 19641], [19513, 19514, 19643], [19513, 19643, 19642], [19514, 19515, 19643], [19515, 19644, 19643], [19515, 19516, 19645], [19515, 19645, 19644], [19516, 19517, 19645], [19517, 19646, 19645], [19517, 19518, 19647], [19517, 19647, 19646], [19518, 19519, 19647], [19519, 19648, 19647], [19519, 19520, 19649], [19519, 19649, 19648], [19520, 19521, 19649], [19521, 19650, 19649], [19521, 19522, 19651], [19521, 19651, 19650], [19522, 19523, 19651], [19523, 19652, 19651], [19523, 19524, 19653], [19523, 19653, 19652], [19525, 19526, 19655], [19525, 19655, 19654], [19526, 19527, 19655], [19527, 19656, 19655], [19527, 19528, 19657], [19527, 19657, 19656], [19528, 19529, 19657], [19529, 19658, 19657], [19529, 19530, 19659], [19529, 19659, 19658], [19530, 19531, 19659], [19531, 19660, 19659], [19531, 19532, 19661], [19531, 19661, 19660], [19532, 19533, 19661], [19533, 19662, 19661], [19533, 19534, 19663], [19533, 19663, 19662], [19534, 19535, 19663], [19535, 19664, 19663], [19535, 19536, 19665], [19535, 19665, 19664], [19536, 19537, 19665], [19537, 19666, 19665], [19537, 19538, 19667], [19537, 19667, 19666], [19538, 19539, 19667], [19539, 19668, 19667], [19539, 19540, 19669], [19539, 19669, 19668], [19540, 19541, 19669], [19541, 19670, 19669], [19541, 19542, 19671], [19541, 19671, 19670], [19542, 19543, 19671], [19543, 19672, 19671], [19543, 19544, 19673], [19543, 19673, 19672], [19544, 19545, 19673], [19545, 19674, 19673], [19545, 19546, 19675], [19545, 19675, 19674], [19546, 19547, 19675], [19547, 19676, 19675], [19547, 19548, 19677], [19547, 19677, 19676], [19548, 19549, 19677], [19549, 19678, 19677], [19549, 19550, 19679], [19549, 19679, 19678], [19550, 19551, 19679], [19551, 19680, 19679], [19551, 19552, 19681], [19551, 19681, 19680], [19552, 19553, 19681], [19553, 19682, 19681], [19553, 19554, 19683], [19553, 19683, 19682], [19554, 19555, 19683], [19555, 19684, 19683], [19555, 19556, 19685], [19555, 19685, 19684], [19556, 19557, 19685], [19557, 19686, 19685], [19557, 19558, 19687], [19557, 19687, 19686], [19558, 19559, 19687], [19559, 19688, 19687], [19559, 19560, 19689], [19559, 19689, 19688], [19560, 19561, 19689], [19561, 19690, 19689], [19561, 19562, 19691], [19561, 19691, 19690], [19562, 19563, 19691], [19563, 19692, 19691], [19563, 19564, 19693], [19563, 19693, 19692], [19564, 19565, 19693], [19565, 19694, 19693], [19565, 19566, 19695], [19565, 19695, 19694], [19566, 19567, 19695], [19567, 19696, 19695], [19567, 19568, 19697], [19567, 19697, 19696], [19568, 19569, 19697], [19569, 19698, 19697], [19569, 19570, 19699], [19569, 19699, 19698], [19570, 19571, 19699], [19571, 19700, 19699], [19571, 19572, 19701], [19571, 19701, 19700], [19572, 19573, 19701], [19573, 19702, 19701], [19573, 19574, 19703], [19573, 19703, 19702], [19574, 19575, 19703], [19575, 19704, 19703], [19575, 19576, 19705], [19575, 19705, 19704], [19576, 19577, 19705], [19577, 19706, 19705], [19577, 19578, 19707], [19577, 19707, 19706], [19578, 19579, 19707], [19579, 19708, 19707], [19579, 19580, 19709], [19579, 19709, 19708], [19580, 19581, 19709], [19581, 19710, 19709], [19581, 19582, 19711], [19581, 19711, 19710], [19582, 19583, 19711], [19583, 19712, 19711], [19583, 19584, 19713], [19583, 19713, 19712], [19584, 19585, 19713], [19585, 19714, 19713], [19585, 19586, 19715], [19585, 19715, 19714], [19586, 19587, 19715], [19587, 19716, 19715], [19587, 19588, 19717], [19587, 19717, 19716], [19588, 19589, 19717], [19589, 19718, 19717], [19589, 19590, 19719], [19589, 19719, 19718], [19590, 19591, 19719], [19591, 19720, 19719], [19591, 19592, 19721], [19591, 19721, 19720], [19592, 19593, 19721], [19593, 19722, 19721], [19593, 19594, 19723], [19593, 19723, 19722], [19594, 19595, 19723], [19595, 19724, 19723], [19595, 19596, 19725], [19595, 19725, 19724], [19596, 19597, 19725], [19597, 19726, 19725], [19597, 19598, 19727], [19597, 19727, 19726], [19598, 19599, 19727], [19599, 19728, 19727], [19599, 19600, 19729], [19599, 19729, 19728], [19600, 19601, 19729], [19601, 19730, 19729], [19601, 19602, 19731], [19601, 19731, 19730], [19602, 19603, 19731], [19603, 19732, 19731], [19603, 19604, 19733], [19603, 19733, 19732], [19604, 19605, 19733], [19605, 19734, 19733], [19605, 19606, 19735], [19605, 19735, 19734], [19606, 19607, 19735], [19607, 19736, 19735], [19607, 19608, 19737], [19607, 19737, 19736], [19608, 19609, 19737], [19609, 19738, 19737], [19609, 19610, 19739], [19609, 19739, 19738], [19610, 19611, 19739], [19611, 19740, 19739], [19611, 19612, 19741], [19611, 19741, 19740], [19612, 19613, 19741], [19613, 19742, 19741], [19613, 19614, 19743], [19613, 19743, 19742], [19614, 19615, 19743], [19615, 19744, 19743], [19615, 19616, 19745], [19615, 19745, 19744], [19616, 19617, 19745], [19617, 19746, 19745], [19617, 19618, 19747], [19617, 19747, 19746], [19618, 19619, 19747], [19619, 19748, 19747], [19619, 19620, 19749], [19619, 19749, 19748], [19620, 19621, 19749], [19621, 19750, 19749], [19621, 19622, 19751], [19621, 19751, 19750], [19622, 19623, 19751], [19623, 19752, 19751], [19623, 19624, 19753], [19623, 19753, 19752], [19624, 19625, 19753], [19625, 19754, 19753], [19625, 19626, 19755], [19625, 19755, 19754], [19626, 19627, 19755], [19627, 19756, 19755], [19627, 19628, 19757], [19627, 19757, 19756], [19628, 19629, 19757], [19629, 19758, 19757], [19629, 19630, 19759], [19629, 19759, 19758], [19630, 19631, 19759], [19631, 19760, 19759], [19631, 19632, 19761], [19631, 19761, 19760], [19632, 19633, 19761], [19633, 19762, 19761], [19633, 19634, 19763], [19633, 19763, 19762], [19634, 19635, 19763], [19635, 19764, 19763], [19635, 19636, 19765], [19635, 19765, 19764], [19636, 19637, 19765], [19637, 19766, 19765], [19637, 19638, 19767], [19637, 19767, 19766], [19638, 19639, 19767], [19639, 19768, 19767], [19639, 19640, 19769], [19639, 19769, 19768], [19640, 19641, 19769], [19641, 19770, 19769], [19641, 19642, 19771], [19641, 19771, 19770], [19642, 19643, 19771], [19643, 19772, 19771], [19643, 19644, 19773], [19643, 19773, 19772], [19644, 19645, 19773], [19645, 19774, 19773], [19645, 19646, 19775], [19645, 19775, 19774], [19646, 19647, 19775], [19647, 19776, 19775], [19647, 19648, 19777], [19647, 19777, 19776], [19648, 19649, 19777], [19649, 19778, 19777], [19649, 19650, 19779], [19649, 19779, 19778], [19650, 19651, 19779], [19651, 19780, 19779], [19651, 19652, 19781], [19651, 19781, 19780], [19652, 19653, 19781], [19653, 19782, 19781], [19654, 19655, 19783], [19655, 19784, 19783], [19655, 19656, 19785], [19655, 19785, 19784], [19656, 19657, 19785], [19657, 19786, 19785], [19657, 19658, 19787], [19657, 19787, 19786], [19658, 19659, 19787], [19659, 19788, 19787], [19659, 19660, 19789], [19659, 19789, 19788], [19660, 19661, 19789], [19661, 19790, 19789], [19661, 19662, 19791], [19661, 19791, 19790], [19662, 19663, 19791], [19663, 19792, 19791], [19663, 19664, 19793], [19663, 19793, 19792], [19664, 19665, 19793], [19665, 19794, 19793], [19665, 19666, 19795], [19665, 19795, 19794], [19666, 19667, 19795], [19667, 19796, 19795], [19667, 19668, 19797], [19667, 19797, 19796], [19668, 19669, 19797], [19669, 19798, 19797], [19669, 19670, 19799], [19669, 19799, 19798], [19670, 19671, 19799], [19671, 19800, 19799], [19671, 19672, 19801], [19671, 19801, 19800], [19672, 19673, 19801], [19673, 19802, 19801], [19673, 19674, 19803], [19673, 19803, 19802], [19674, 19675, 19803], [19675, 19804, 19803], [19675, 19676, 19805], [19675, 19805, 19804], [19676, 19677, 19805], [19677, 19806, 19805], [19677, 19678, 19807], [19677, 19807, 19806], [19678, 19679, 19807], [19679, 19808, 19807], [19679, 19680, 19809], [19679, 19809, 19808], [19680, 19681, 19809], [19681, 19810, 19809], [19681, 19682, 19811], [19681, 19811, 19810], [19682, 19683, 19811], [19683, 19812, 19811], [19683, 19684, 19813], [19683, 19813, 19812], [19684, 19685, 19813], [19685, 19814, 19813], [19685, 19686, 19815], [19685, 19815, 19814], [19686, 19687, 19815], [19687, 19816, 19815], [19687, 19688, 19817], [19687, 19817, 19816], [19688, 19689, 19817], [19689, 19818, 19817], [19689, 19690, 19819], [19689, 19819, 19818], [19690, 19691, 19819], [19691, 19820, 19819], [19691, 19692, 19821], [19691, 19821, 19820], [19692, 19693, 19821], [19693, 19822, 19821], [19693, 19694, 19823], [19693, 19823, 19822], [19694, 19695, 19823], [19695, 19824, 19823], [19695, 19696, 19825], [19695, 19825, 19824], [19696, 19697, 19825], [19697, 19826, 19825], [19697, 19698, 19827], [19697, 19827, 19826], [19698, 19699, 19827], [19699, 19828, 19827], [19699, 19700, 19829], [19699, 19829, 19828], [19700, 19701, 19829], [19701, 19830, 19829], [19701, 19702, 19831], [19701, 19831, 19830], [19702, 19703, 19831], [19703, 19832, 19831], [19703, 19704, 19833], [19703, 19833, 19832], [19704, 19705, 19833], [19705, 19834, 19833], [19705, 19706, 19835], [19705, 19835, 19834], [19706, 19707, 19835], [19707, 19836, 19835], [19707, 19708, 19837], [19707, 19837, 19836], [19708, 19709, 19837], [19709, 19838, 19837], [19709, 19710, 19839], [19709, 19839, 19838], [19710, 19711, 19839], [19711, 19840, 19839], [19711, 19712, 19841], [19711, 19841, 19840], [19712, 19713, 19841], [19713, 19842, 19841], [19713, 19714, 19843], [19713, 19843, 19842], [19714, 19715, 19843], [19715, 19844, 19843], [19715, 19716, 19845], [19715, 19845, 19844], [19716, 19717, 19845], [19717, 19846, 19845], [19717, 19718, 19847], [19717, 19847, 19846], [19718, 19719, 19847], [19719, 19848, 19847], [19719, 19720, 19849], [19719, 19849, 19848], [19720, 19721, 19849], [19721, 19850, 19849], [19721, 19722, 19851], [19721, 19851, 19850], [19722, 19723, 19851], [19723, 19852, 19851], [19723, 19724, 19853], [19723, 19853, 19852], [19724, 19725, 19853], [19725, 19854, 19853], [19725, 19726, 19855], [19725, 19855, 19854], [19726, 19727, 19855], [19727, 19856, 19855], [19727, 19728, 19857], [19727, 19857, 19856], [19728, 19729, 19857], [19729, 19858, 19857], [19729, 19730, 19859], [19729, 19859, 19858], [19730, 19731, 19859], [19731, 19860, 19859], [19731, 19732, 19861], [19731, 19861, 19860], [19732, 19733, 19861], [19733, 19862, 19861], [19733, 19734, 19863], [19733, 19863, 19862], [19734, 19735, 19863], [19735, 19864, 19863], [19735, 19736, 19865], [19735, 19865, 19864], [19736, 19737, 19865], [19737, 19866, 19865], [19737, 19738, 19867], [19737, 19867, 19866], [19738, 19739, 19867], [19739, 19868, 19867], [19739, 19740, 19869], [19739, 19869, 19868], [19740, 19741, 19869], [19741, 19870, 19869], [19741, 19742, 19871], [19741, 19871, 19870], [19742, 19743, 19871], [19743, 19872, 19871], [19743, 19744, 19873], [19743, 19873, 19872], [19744, 19745, 19873], [19745, 19874, 19873], [19745, 19746, 19875], [19745, 19875, 19874], [19746, 19747, 19875], [19747, 19876, 19875], [19747, 19748, 19877], [19747, 19877, 19876], [19748, 19749, 19877], [19749, 19878, 19877], [19749, 19750, 19879], [19749, 19879, 19878], [19750, 19751, 19879], [19751, 19880, 19879], [19751, 19752, 19881], [19751, 19881, 19880], [19752, 19753, 19881], [19753, 19882, 19881], [19753, 19754, 19883], [19753, 19883, 19882], [19754, 19755, 19883], [19755, 19884, 19883], [19755, 19756, 19885], [19755, 19885, 19884], [19756, 19757, 19885], [19757, 19886, 19885], [19757, 19758, 19887], [19757, 19887, 19886], [19758, 19759, 19887], [19759, 19888, 19887], [19759, 19760, 19889], [19759, 19889, 19888], [19760, 19761, 19889], [19761, 19890, 19889], [19761, 19762, 19891], [19761, 19891, 19890], [19762, 19763, 19891], [19763, 19892, 19891], [19763, 19764, 19893], [19763, 19893, 19892], [19764, 19765, 19893], [19765, 19894, 19893], [19765, 19766, 19895], [19765, 19895, 19894], [19766, 19767, 19895], [19767, 19896, 19895], [19767, 19768, 19897], [19767, 19897, 19896], [19768, 19769, 19897], [19769, 19898, 19897], [19769, 19770, 19899], [19769, 19899, 19898], [19770, 19771, 19899], [19771, 19900, 19899], [19771, 19772, 19901], [19771, 19901, 19900], [19772, 19773, 19901], [19773, 19902, 19901], [19773, 19774, 19903], [19773, 19903, 19902], [19774, 19775, 19903], [19775, 19904, 19903], [19775, 19776, 19905], [19775, 19905, 19904], [19776, 19777, 19905], [19777, 19906, 19905], [19777, 19778, 19907], [19777, 19907, 19906], [19778, 19779, 19907], [19779, 19908, 19907], [19779, 19780, 19909], [19779, 19909, 19908], [19780, 19781, 19909], [19781, 19910, 19909], [19781, 19782, 19911], [19781, 19911, 19910], [19783, 19784, 19913], [19783, 19913, 19912], [19784, 19785, 19913], [19785, 19914, 19913], [19785, 19786, 19915], [19785, 19915, 19914], [19786, 19787, 19915], [19787, 19916, 19915], [19787, 19788, 19917], [19787, 19917, 19916], [19788, 19789, 19917], [19789, 19918, 19917], [19789, 19790, 19919], [19789, 19919, 19918], [19790, 19791, 19919], [19791, 19920, 19919], [19791, 19792, 19921], [19791, 19921, 19920], [19792, 19793, 19921], [19793, 19922, 19921], [19793, 19794, 19923], [19793, 19923, 19922], [19794, 19795, 19923], [19795, 19924, 19923], [19795, 19796, 19925], [19795, 19925, 19924], [19796, 19797, 19925], [19797, 19926, 19925], [19797, 19798, 19927], [19797, 19927, 19926], [19798, 19799, 19927], [19799, 19928, 19927], [19799, 19800, 19929], [19799, 19929, 19928], [19800, 19801, 19929], [19801, 19930, 19929], [19801, 19802, 19931], [19801, 19931, 19930], [19802, 19803, 19931], [19803, 19932, 19931], [19803, 19804, 19933], [19803, 19933, 19932], [19804, 19805, 19933], [19805, 19934, 19933], [19805, 19806, 19935], [19805, 19935, 19934], [19806, 19807, 19935], [19807, 19936, 19935], [19807, 19808, 19937], [19807, 19937, 19936], [19808, 19809, 19937], [19809, 19938, 19937], [19809, 19810, 19939], [19809, 19939, 19938], [19810, 19811, 19939], [19811, 19940, 19939], [19811, 19812, 19941], [19811, 19941, 19940], [19812, 19813, 19941], [19813, 19942, 19941], [19813, 19814, 19943], [19813, 19943, 19942], [19814, 19815, 19943], [19815, 19944, 19943], [19815, 19816, 19945], [19815, 19945, 19944], [19816, 19817, 19945], [19817, 19946, 19945], [19817, 19818, 19947], [19817, 19947, 19946], [19818, 19819, 19947], [19819, 19948, 19947], [19819, 19820, 19949], [19819, 19949, 19948], [19820, 19821, 19949], [19821, 19950, 19949], [19821, 19822, 19951], [19821, 19951, 19950], [19822, 19823, 19951], [19823, 19952, 19951], [19823, 19824, 19953], [19823, 19953, 19952], [19824, 19825, 19953], [19825, 19954, 19953], [19825, 19826, 19955], [19825, 19955, 19954], [19826, 19827, 19955], [19827, 19956, 19955], [19827, 19828, 19957], [19827, 19957, 19956], [19828, 19829, 19957], [19829, 19958, 19957], [19829, 19830, 19959], [19829, 19959, 19958], [19830, 19831, 19959], [19831, 19960, 19959], [19831, 19832, 19961], [19831, 19961, 19960], [19832, 19833, 19961], [19833, 19962, 19961], [19833, 19834, 19963], [19833, 19963, 19962], [19834, 19835, 19963], [19835, 19964, 19963], [19835, 19836, 19965], [19835, 19965, 19964], [19836, 19837, 19965], [19837, 19966, 19965], [19837, 19838, 19967], [19837, 19967, 19966], [19838, 19839, 19967], [19839, 19968, 19967], [19839, 19840, 19969], [19839, 19969, 19968], [19840, 19841, 19969], [19841, 19970, 19969], [19841, 19842, 19971], [19841, 19971, 19970], [19842, 19843, 19971], [19843, 19972, 19971], [19843, 19844, 19973], [19843, 19973, 19972], [19844, 19845, 19973], [19845, 19974, 19973], [19845, 19846, 19975], [19845, 19975, 19974], [19846, 19847, 19975], [19847, 19976, 19975], [19847, 19848, 19977], [19847, 19977, 19976], [19848, 19849, 19977], [19849, 19978, 19977], [19849, 19850, 19979], [19849, 19979, 19978], [19850, 19851, 19979], [19851, 19980, 19979], [19851, 19852, 19981], [19851, 19981, 19980], [19852, 19853, 19981], [19853, 19982, 19981], [19853, 19854, 19983], [19853, 19983, 19982], [19854, 19855, 19983], [19855, 19984, 19983], [19855, 19856, 19985], [19855, 19985, 19984], [19856, 19857, 19985], [19857, 19986, 19985], [19857, 19858, 19987], [19857, 19987, 19986], [19858, 19859, 19987], [19859, 19988, 19987], [19859, 19860, 19989], [19859, 19989, 19988], [19860, 19861, 19989], [19861, 19990, 19989], [19861, 19862, 19991], [19861, 19991, 19990], [19862, 19863, 19991], [19863, 19992, 19991], [19863, 19864, 19993], [19863, 19993, 19992], [19864, 19865, 19993], [19865, 19994, 19993], [19865, 19866, 19995], [19865, 19995, 19994], [19866, 19867, 19995], [19867, 19996, 19995], [19867, 19868, 19997], [19867, 19997, 19996], [19868, 19869, 19997], [19869, 19998, 19997], [19869, 19870, 19999], [19869, 19999, 19998], [19870, 19871, 19999], [19871, 20000, 19999], [19871, 19872, 20001], [19871, 20001, 20000], [19872, 19873, 20001], [19873, 20002, 20001], [19873, 19874, 20003], [19873, 20003, 20002], [19874, 19875, 20003], [19875, 20004, 20003], [19875, 19876, 20005], [19875, 20005, 20004], [19876, 19877, 20005], [19877, 20006, 20005], [19877, 19878, 20007], [19877, 20007, 20006], [19878, 19879, 20007], [19879, 20008, 20007], [19879, 19880, 20009], [19879, 20009, 20008], [19880, 19881, 20009], [19881, 20010, 20009], [19881, 19882, 20011], [19881, 20011, 20010], [19882, 19883, 20011], [19883, 20012, 20011], [19883, 19884, 20013], [19883, 20013, 20012], [19884, 19885, 20013], [19885, 20014, 20013], [19885, 19886, 20015], [19885, 20015, 20014], [19886, 19887, 20015], [19887, 20016, 20015], [19887, 19888, 20017], [19887, 20017, 20016], [19888, 19889, 20017], [19889, 20018, 20017], [19889, 19890, 20019], [19889, 20019, 20018], [19890, 19891, 20019], [19891, 20020, 20019], [19891, 19892, 20021], [19891, 20021, 20020], [19892, 19893, 20021], [19893, 20022, 20021], [19893, 19894, 20023], [19893, 20023, 20022], [19894, 19895, 20023], [19895, 20024, 20023], [19895, 19896, 20025], [19895, 20025, 20024], [19896, 19897, 20025], [19897, 20026, 20025], [19897, 19898, 20027], [19897, 20027, 20026], [19898, 19899, 20027], [19899, 20028, 20027], [19899, 19900, 20029], [19899, 20029, 20028], [19900, 19901, 20029], [19901, 20030, 20029], [19901, 19902, 20031], [19901, 20031, 20030], [19902, 19903, 20031], [19903, 20032, 20031], [19903, 19904, 20033], [19903, 20033, 20032], [19904, 19905, 20033], [19905, 20034, 20033], [19905, 19906, 20035], [19905, 20035, 20034], [19906, 19907, 20035], [19907, 20036, 20035], [19907, 19908, 20037], [19907, 20037, 20036], [19908, 19909, 20037], [19909, 20038, 20037], [19909, 19910, 20039], [19909, 20039, 20038], [19910, 19911, 20039], [19911, 20040, 20039], [19912, 19913, 20041], [19913, 20042, 20041], [19913, 19914, 20043], [19913, 20043, 20042], [19914, 19915, 20043], [19915, 20044, 20043], [19915, 19916, 20045], [19915, 20045, 20044], [19916, 19917, 20045], [19917, 20046, 20045], [19917, 19918, 20047], [19917, 20047, 20046], [19918, 19919, 20047], [19919, 20048, 20047], [19919, 19920, 20049], [19919, 20049, 20048], [19920, 19921, 20049], [19921, 20050, 20049], [19921, 19922, 20051], [19921, 20051, 20050], [19922, 19923, 20051], [19923, 20052, 20051], [19923, 19924, 20053], [19923, 20053, 20052], [19924, 19925, 20053], [19925, 20054, 20053], [19925, 19926, 20055], [19925, 20055, 20054], [19926, 19927, 20055], [19927, 20056, 20055], [19927, 19928, 20057], [19927, 20057, 20056], [19928, 19929, 20057], [19929, 20058, 20057], [19929, 19930, 20059], [19929, 20059, 20058], [19930, 19931, 20059], [19931, 20060, 20059], [19931, 19932, 20061], [19931, 20061, 20060], [19932, 19933, 20061], [19933, 20062, 20061], [19933, 19934, 20063], [19933, 20063, 20062], [19934, 19935, 20063], [19935, 20064, 20063], [19935, 19936, 20065], [19935, 20065, 20064], [19936, 19937, 20065], [19937, 20066, 20065], [19937, 19938, 20067], [19937, 20067, 20066], [19938, 19939, 20067], [19939, 20068, 20067], [19939, 19940, 20069], [19939, 20069, 20068], [19940, 19941, 20069], [19941, 20070, 20069], [19941, 19942, 20071], [19941, 20071, 20070], [19942, 19943, 20071], [19943, 20072, 20071], [19943, 19944, 20073], [19943, 20073, 20072], [19944, 19945, 20073], [19945, 20074, 20073], [19945, 19946, 20075], [19945, 20075, 20074], [19946, 19947, 20075], [19947, 20076, 20075], [19947, 19948, 20077], [19947, 20077, 20076], [19948, 19949, 20077], [19949, 20078, 20077], [19949, 19950, 20079], [19949, 20079, 20078], [19950, 19951, 20079], [19951, 20080, 20079], [19951, 19952, 20081], [19951, 20081, 20080], [19952, 19953, 20081], [19953, 20082, 20081], [19953, 19954, 20083], [19953, 20083, 20082], [19954, 19955, 20083], [19955, 20084, 20083], [19955, 19956, 20085], [19955, 20085, 20084], [19956, 19957, 20085], [19957, 20086, 20085], [19957, 19958, 20087], [19957, 20087, 20086], [19958, 19959, 20087], [19959, 20088, 20087], [19959, 19960, 20089], [19959, 20089, 20088], [19960, 19961, 20089], [19961, 20090, 20089], [19961, 19962, 20091], [19961, 20091, 20090], [19962, 19963, 20091], [19963, 20092, 20091], [19963, 19964, 20093], [19963, 20093, 20092], [19964, 19965, 20093], [19965, 20094, 20093], [19965, 19966, 20095], [19965, 20095, 20094], [19966, 19967, 20095], [19967, 20096, 20095], [19967, 19968, 20097], [19967, 20097, 20096], [19968, 19969, 20097], [19969, 20098, 20097], [19969, 19970, 20099], [19969, 20099, 20098], [19970, 19971, 20099], [19971, 20100, 20099], [19971, 19972, 20101], [19971, 20101, 20100], [19972, 19973, 20101], [19973, 20102, 20101], [19973, 19974, 20103], [19973, 20103, 20102], [19974, 19975, 20103], [19975, 20104, 20103], [19975, 19976, 20105], [19975, 20105, 20104], [19976, 19977, 20105], [19977, 20106, 20105], [19977, 19978, 20107], [19977, 20107, 20106], [19978, 19979, 20107], [19979, 20108, 20107], [19979, 19980, 20109], [19979, 20109, 20108], [19980, 19981, 20109], [19981, 20110, 20109], [19981, 19982, 20111], [19981, 20111, 20110], [19982, 19983, 20111], [19983, 20112, 20111], [19983, 19984, 20113], [19983, 20113, 20112], [19984, 19985, 20113], [19985, 20114, 20113], [19985, 19986, 20115], [19985, 20115, 20114], [19986, 19987, 20115], [19987, 20116, 20115], [19987, 19988, 20117], [19987, 20117, 20116], [19988, 19989, 20117], [19989, 20118, 20117], [19989, 19990, 20119], [19989, 20119, 20118], [19990, 19991, 20119], [19991, 20120, 20119], [19991, 19992, 20121], [19991, 20121, 20120], [19992, 19993, 20121], [19993, 20122, 20121], [19993, 19994, 20123], [19993, 20123, 20122], [19994, 19995, 20123], [19995, 20124, 20123], [19995, 19996, 20125], [19995, 20125, 20124], [19996, 19997, 20125], [19997, 20126, 20125], [19997, 19998, 20127], [19997, 20127, 20126], [19998, 19999, 20127], [19999, 20128, 20127], [19999, 20000, 20129], [19999, 20129, 20128], [20000, 20001, 20129], [20001, 20130, 20129], [20001, 20002, 20131], [20001, 20131, 20130], [20002, 20003, 20131], [20003, 20132, 20131], [20003, 20004, 20133], [20003, 20133, 20132], [20004, 20005, 20133], [20005, 20134, 20133], [20005, 20006, 20135], [20005, 20135, 20134], [20006, 20007, 20135], [20007, 20136, 20135], [20007, 20008, 20137], [20007, 20137, 20136], [20008, 20009, 20137], [20009, 20138, 20137], [20009, 20010, 20139], [20009, 20139, 20138], [20010, 20011, 20139], [20011, 20140, 20139], [20011, 20012, 20141], [20011, 20141, 20140], [20012, 20013, 20141], [20013, 20142, 20141], [20013, 20014, 20143], [20013, 20143, 20142], [20014, 20015, 20143], [20015, 20144, 20143], [20015, 20016, 20145], [20015, 20145, 20144], [20016, 20017, 20145], [20017, 20146, 20145], [20017, 20018, 20147], [20017, 20147, 20146], [20018, 20019, 20147], [20019, 20148, 20147], [20019, 20020, 20149], [20019, 20149, 20148], [20020, 20021, 20149], [20021, 20150, 20149], [20021, 20022, 20151], [20021, 20151, 20150], [20022, 20023, 20151], [20023, 20152, 20151], [20023, 20024, 20153], [20023, 20153, 20152], [20024, 20025, 20153], [20025, 20154, 20153], [20025, 20026, 20155], [20025, 20155, 20154], [20026, 20027, 20155], [20027, 20156, 20155], [20027, 20028, 20157], [20027, 20157, 20156], [20028, 20029, 20157], [20029, 20158, 20157], [20029, 20030, 20159], [20029, 20159, 20158], [20030, 20031, 20159], [20031, 20160, 20159], [20031, 20032, 20161], [20031, 20161, 20160], [20032, 20033, 20161], [20033, 20162, 20161], [20033, 20034, 20163], [20033, 20163, 20162], [20034, 20035, 20163], [20035, 20164, 20163], [20035, 20036, 20165], [20035, 20165, 20164], [20036, 20037, 20165], [20037, 20166, 20165], [20037, 20038, 20167], [20037, 20167, 20166], [20038, 20039, 20167], [20039, 20168, 20167], [20039, 20040, 20169], [20039, 20169, 20168], [20041, 20042, 20171], [20041, 20171, 20170], [20042, 20043, 20171], [20043, 20172, 20171], [20043, 20044, 20173], [20043, 20173, 20172], [20044, 20045, 20173], [20045, 20174, 20173], [20045, 20046, 20175], [20045, 20175, 20174], [20046, 20047, 20175], [20047, 20176, 20175], [20047, 20048, 20177], [20047, 20177, 20176], [20048, 20049, 20177], [20049, 20178, 20177], [20049, 20050, 20179], [20049, 20179, 20178], [20050, 20051, 20179], [20051, 20180, 20179], [20051, 20052, 20181], [20051, 20181, 20180], [20052, 20053, 20181], [20053, 20182, 20181], [20053, 20054, 20183], [20053, 20183, 20182], [20054, 20055, 20183], [20055, 20184, 20183], [20055, 20056, 20185], [20055, 20185, 20184], [20056, 20057, 20185], [20057, 20186, 20185], [20057, 20058, 20187], [20057, 20187, 20186], [20058, 20059, 20187], [20059, 20188, 20187], [20059, 20060, 20189], [20059, 20189, 20188], [20060, 20061, 20189], [20061, 20190, 20189], [20061, 20062, 20191], [20061, 20191, 20190], [20062, 20063, 20191], [20063, 20192, 20191], [20063, 20064, 20193], [20063, 20193, 20192], [20064, 20065, 20193], [20065, 20194, 20193], [20065, 20066, 20195], [20065, 20195, 20194], [20066, 20067, 20195], [20067, 20196, 20195], [20067, 20068, 20197], [20067, 20197, 20196], [20068, 20069, 20197], [20069, 20198, 20197], [20069, 20070, 20199], [20069, 20199, 20198], [20070, 20071, 20199], [20071, 20200, 20199], [20071, 20072, 20201], [20071, 20201, 20200], [20072, 20073, 20201], [20073, 20202, 20201], [20073, 20074, 20203], [20073, 20203, 20202], [20074, 20075, 20203], [20075, 20204, 20203], [20075, 20076, 20205], [20075, 20205, 20204], [20076, 20077, 20205], [20077, 20206, 20205], [20077, 20078, 20207], [20077, 20207, 20206], [20078, 20079, 20207], [20079, 20208, 20207], [20079, 20080, 20209], [20079, 20209, 20208], [20080, 20081, 20209], [20081, 20210, 20209], [20081, 20082, 20211], [20081, 20211, 20210], [20082, 20083, 20211], [20083, 20212, 20211], [20083, 20084, 20213], [20083, 20213, 20212], [20084, 20085, 20213], [20085, 20214, 20213], [20085, 20086, 20215], [20085, 20215, 20214], [20086, 20087, 20215], [20087, 20216, 20215], [20087, 20088, 20217], [20087, 20217, 20216], [20088, 20089, 20217], [20089, 20218, 20217], [20089, 20090, 20219], [20089, 20219, 20218], [20090, 20091, 20219], [20091, 20220, 20219], [20091, 20092, 20221], [20091, 20221, 20220], [20092, 20093, 20221], [20093, 20222, 20221], [20093, 20094, 20223], [20093, 20223, 20222], [20094, 20095, 20223], [20095, 20224, 20223], [20095, 20096, 20225], [20095, 20225, 20224], [20096, 20097, 20225], [20097, 20226, 20225], [20097, 20098, 20227], [20097, 20227, 20226], [20098, 20099, 20227], [20099, 20228, 20227], [20099, 20100, 20229], [20099, 20229, 20228], [20100, 20101, 20229], [20101, 20230, 20229], [20101, 20102, 20231], [20101, 20231, 20230], [20102, 20103, 20231], [20103, 20232, 20231], [20103, 20104, 20233], [20103, 20233, 20232], [20104, 20105, 20233], [20105, 20234, 20233], [20105, 20106, 20235], [20105, 20235, 20234], [20106, 20107, 20235], [20107, 20236, 20235], [20107, 20108, 20237], [20107, 20237, 20236], [20108, 20109, 20237], [20109, 20238, 20237], [20109, 20110, 20239], [20109, 20239, 20238], [20110, 20111, 20239], [20111, 20240, 20239], [20111, 20112, 20241], [20111, 20241, 20240], [20112, 20113, 20241], [20113, 20242, 20241], [20113, 20114, 20243], [20113, 20243, 20242], [20114, 20115, 20243], [20115, 20244, 20243], [20115, 20116, 20245], [20115, 20245, 20244], [20116, 20117, 20245], [20117, 20246, 20245], [20117, 20118, 20247], [20117, 20247, 20246], [20118, 20119, 20247], [20119, 20248, 20247], [20119, 20120, 20249], [20119, 20249, 20248], [20120, 20121, 20249], [20121, 20250, 20249], [20121, 20122, 20251], [20121, 20251, 20250], [20122, 20123, 20251], [20123, 20252, 20251], [20123, 20124, 20253], [20123, 20253, 20252], [20124, 20125, 20253], [20125, 20254, 20253], [20125, 20126, 20255], [20125, 20255, 20254], [20126, 20127, 20255], [20127, 20256, 20255], [20127, 20128, 20257], [20127, 20257, 20256], [20128, 20129, 20257], [20129, 20258, 20257], [20129, 20130, 20259], [20129, 20259, 20258], [20130, 20131, 20259], [20131, 20260, 20259], [20131, 20132, 20261], [20131, 20261, 20260], [20132, 20133, 20261], [20133, 20262, 20261], [20133, 20134, 20263], [20133, 20263, 20262], [20134, 20135, 20263], [20135, 20264, 20263], [20135, 20136, 20265], [20135, 20265, 20264], [20136, 20137, 20265], [20137, 20266, 20265], [20137, 20138, 20267], [20137, 20267, 20266], [20138, 20139, 20267], [20139, 20268, 20267], [20139, 20140, 20269], [20139, 20269, 20268], [20140, 20141, 20269], [20141, 20270, 20269], [20141, 20142, 20271], [20141, 20271, 20270], [20142, 20143, 20271], [20143, 20272, 20271], [20143, 20144, 20273], [20143, 20273, 20272], [20144, 20145, 20273], [20145, 20274, 20273], [20145, 20146, 20275], [20145, 20275, 20274], [20146, 20147, 20275], [20147, 20276, 20275], [20147, 20148, 20277], [20147, 20277, 20276], [20148, 20149, 20277], [20149, 20278, 20277], [20149, 20150, 20279], [20149, 20279, 20278], [20150, 20151, 20279], [20151, 20280, 20279], [20151, 20152, 20281], [20151, 20281, 20280], [20152, 20153, 20281], [20153, 20282, 20281], [20153, 20154, 20283], [20153, 20283, 20282], [20154, 20155, 20283], [20155, 20284, 20283], [20155, 20156, 20285], [20155, 20285, 20284], [20156, 20157, 20285], [20157, 20286, 20285], [20157, 20158, 20287], [20157, 20287, 20286], [20158, 20159, 20287], [20159, 20288, 20287], [20159, 20160, 20289], [20159, 20289, 20288], [20160, 20161, 20289], [20161, 20290, 20289], [20161, 20162, 20291], [20161, 20291, 20290], [20162, 20163, 20291], [20163, 20292, 20291], [20163, 20164, 20293], [20163, 20293, 20292], [20164, 20165, 20293], [20165, 20294, 20293], [20165, 20166, 20295], [20165, 20295, 20294], [20166, 20167, 20295], [20167, 20296, 20295], [20167, 20168, 20297], [20167, 20297, 20296], [20168, 20169, 20297], [20169, 20298, 20297], [20170, 20171, 20299], [20171, 20300, 20299], [20171, 20172, 20301], [20171, 20301, 20300], [20172, 20173, 20301], [20173, 20302, 20301], [20173, 20174, 20303], [20173, 20303, 20302], [20174, 20175, 20303], [20175, 20304, 20303], [20175, 20176, 20305], [20175, 20305, 20304], [20176, 20177, 20305], [20177, 20306, 20305], [20177, 20178, 20307], [20177, 20307, 20306], [20178, 20179, 20307], [20179, 20308, 20307], [20179, 20180, 20309], [20179, 20309, 20308], [20180, 20181, 20309], [20181, 20310, 20309], [20181, 20182, 20311], [20181, 20311, 20310], [20182, 20183, 20311], [20183, 20312, 20311], [20183, 20184, 20313], [20183, 20313, 20312], [20184, 20185, 20313], [20185, 20314, 20313], [20185, 20186, 20315], [20185, 20315, 20314], [20186, 20187, 20315], [20187, 20316, 20315], [20187, 20188, 20317], [20187, 20317, 20316], [20188, 20189, 20317], [20189, 20318, 20317], [20189, 20190, 20319], [20189, 20319, 20318], [20190, 20191, 20319], [20191, 20320, 20319], [20191, 20192, 20321], [20191, 20321, 20320], [20192, 20193, 20321], [20193, 20322, 20321], [20193, 20194, 20323], [20193, 20323, 20322], [20194, 20195, 20323], [20195, 20324, 20323], [20195, 20196, 20325], [20195, 20325, 20324], [20196, 20197, 20325], [20197, 20326, 20325], [20197, 20198, 20327], [20197, 20327, 20326], [20198, 20199, 20327], [20199, 20328, 20327], [20199, 20200, 20329], [20199, 20329, 20328], [20200, 20201, 20329], [20201, 20330, 20329], [20201, 20202, 20331], [20201, 20331, 20330], [20202, 20203, 20331], [20203, 20332, 20331], [20203, 20204, 20333], [20203, 20333, 20332], [20204, 20205, 20333], [20205, 20334, 20333], [20205, 20206, 20335], [20205, 20335, 20334], [20206, 20207, 20335], [20207, 20336, 20335], [20207, 20208, 20337], [20207, 20337, 20336], [20208, 20209, 20337], [20209, 20338, 20337], [20209, 20210, 20339], [20209, 20339, 20338], [20210, 20211, 20339], [20211, 20340, 20339], [20211, 20212, 20341], [20211, 20341, 20340], [20212, 20213, 20341], [20213, 20342, 20341], [20213, 20214, 20343], [20213, 20343, 20342], [20214, 20215, 20343], [20215, 20344, 20343], [20215, 20216, 20345], [20215, 20345, 20344], [20216, 20217, 20345], [20217, 20346, 20345], [20217, 20218, 20347], [20217, 20347, 20346], [20218, 20219, 20347], [20219, 20348, 20347], [20219, 20220, 20349], [20219, 20349, 20348], [20220, 20221, 20349], [20221, 20350, 20349], [20221, 20222, 20351], [20221, 20351, 20350], [20222, 20223, 20351], [20223, 20352, 20351], [20223, 20224, 20353], [20223, 20353, 20352], [20224, 20225, 20353], [20225, 20354, 20353], [20225, 20226, 20355], [20225, 20355, 20354], [20226, 20227, 20355], [20227, 20356, 20355], [20227, 20228, 20357], [20227, 20357, 20356], [20228, 20229, 20357], [20229, 20358, 20357], [20229, 20230, 20359], [20229, 20359, 20358], [20230, 20231, 20359], [20231, 20360, 20359], [20231, 20232, 20361], [20231, 20361, 20360], [20232, 20233, 20361], [20233, 20362, 20361], [20233, 20234, 20363], [20233, 20363, 20362], [20234, 20235, 20363], [20235, 20364, 20363], [20235, 20236, 20365], [20235, 20365, 20364], [20236, 20237, 20365], [20237, 20366, 20365], [20237, 20238, 20367], [20237, 20367, 20366], [20238, 20239, 20367], [20239, 20368, 20367], [20239, 20240, 20369], [20239, 20369, 20368], [20240, 20241, 20369], [20241, 20370, 20369], [20241, 20242, 20371], [20241, 20371, 20370], [20242, 20243, 20371], [20243, 20372, 20371], [20243, 20244, 20373], [20243, 20373, 20372], [20244, 20245, 20373], [20245, 20374, 20373], [20245, 20246, 20375], [20245, 20375, 20374], [20246, 20247, 20375], [20247, 20376, 20375], [20247, 20248, 20377], [20247, 20377, 20376], [20248, 20249, 20377], [20249, 20378, 20377], [20249, 20250, 20379], [20249, 20379, 20378], [20250, 20251, 20379], [20251, 20380, 20379], [20251, 20252, 20381], [20251, 20381, 20380], [20252, 20253, 20381], [20253, 20382, 20381], [20253, 20254, 20383], [20253, 20383, 20382], [20254, 20255, 20383], [20255, 20384, 20383], [20255, 20256, 20385], [20255, 20385, 20384], [20256, 20257, 20385], [20257, 20386, 20385], [20257, 20258, 20387], [20257, 20387, 20386], [20258, 20259, 20387], [20259, 20388, 20387], [20259, 20260, 20389], [20259, 20389, 20388], [20260, 20261, 20389], [20261, 20390, 20389], [20261, 20262, 20391], [20261, 20391, 20390], [20262, 20263, 20391], [20263, 20392, 20391], [20263, 20264, 20393], [20263, 20393, 20392], [20264, 20265, 20393], [20265, 20394, 20393], [20265, 20266, 20395], [20265, 20395, 20394], [20266, 20267, 20395], [20267, 20396, 20395], [20267, 20268, 20397], [20267, 20397, 20396], [20268, 20269, 20397], [20269, 20398, 20397], [20269, 20270, 20399], [20269, 20399, 20398], [20270, 20271, 20399], [20271, 20400, 20399], [20271, 20272, 20401], [20271, 20401, 20400], [20272, 20273, 20401], [20273, 20402, 20401], [20273, 20274, 20403], [20273, 20403, 20402], [20274, 20275, 20403], [20275, 20404, 20403], [20275, 20276, 20405], [20275, 20405, 20404], [20276, 20277, 20405], [20277, 20406, 20405], [20277, 20278, 20407], [20277, 20407, 20406], [20278, 20279, 20407], [20279, 20408, 20407], [20279, 20280, 20409], [20279, 20409, 20408], [20280, 20281, 20409], [20281, 20410, 20409], [20281, 20282, 20411], [20281, 20411, 20410], [20282, 20283, 20411], [20283, 20412, 20411], [20283, 20284, 20413], [20283, 20413, 20412], [20284, 20285, 20413], [20285, 20414, 20413], [20285, 20286, 20415], [20285, 20415, 20414], [20286, 20287, 20415], [20287, 20416, 20415], [20287, 20288, 20417], [20287, 20417, 20416], [20288, 20289, 20417], [20289, 20418, 20417], [20289, 20290, 20419], [20289, 20419, 20418], [20290, 20291, 20419], [20291, 20420, 20419], [20291, 20292, 20421], [20291, 20421, 20420], [20292, 20293, 20421], [20293, 20422, 20421], [20293, 20294, 20423], [20293, 20423, 20422], [20294, 20295, 20423], [20295, 20424, 20423], [20295, 20296, 20425], [20295, 20425, 20424], [20296, 20297, 20425], [20297, 20426, 20425], [20297, 20298, 20427], [20297, 20427, 20426], [20299, 20300, 20429], [20299, 20429, 20428], [20300, 20301, 20429], [20301, 20430, 20429], [20301, 20302, 20431], [20301, 20431, 20430], [20302, 20303, 20431], [20303, 20432, 20431], [20303, 20304, 20433], [20303, 20433, 20432], [20304, 20305, 20433], [20305, 20434, 20433], [20305, 20306, 20435], [20305, 20435, 20434], [20306, 20307, 20435], [20307, 20436, 20435], [20307, 20308, 20437], [20307, 20437, 20436], [20308, 20309, 20437], [20309, 20438, 20437], [20309, 20310, 20439], [20309, 20439, 20438], [20310, 20311, 20439], [20311, 20440, 20439], [20311, 20312, 20441], [20311, 20441, 20440], [20312, 20313, 20441], [20313, 20442, 20441], [20313, 20314, 20443], [20313, 20443, 20442], [20314, 20315, 20443], [20315, 20444, 20443], [20315, 20316, 20445], [20315, 20445, 20444], [20316, 20317, 20445], [20317, 20446, 20445], [20317, 20318, 20447], [20317, 20447, 20446], [20318, 20319, 20447], [20319, 20448, 20447], [20319, 20320, 20449], [20319, 20449, 20448], [20320, 20321, 20449], [20321, 20450, 20449], [20321, 20322, 20451], [20321, 20451, 20450], [20322, 20323, 20451], [20323, 20452, 20451], [20323, 20324, 20453], [20323, 20453, 20452], [20324, 20325, 20453], [20325, 20454, 20453], [20325, 20326, 20455], [20325, 20455, 20454], [20326, 20327, 20455], [20327, 20456, 20455], [20327, 20328, 20457], [20327, 20457, 20456], [20328, 20329, 20457], [20329, 20458, 20457], [20329, 20330, 20459], [20329, 20459, 20458], [20330, 20331, 20459], [20331, 20460, 20459], [20331, 20332, 20461], [20331, 20461, 20460], [20332, 20333, 20461], [20333, 20462, 20461], [20333, 20334, 20463], [20333, 20463, 20462], [20334, 20335, 20463], [20335, 20464, 20463], [20335, 20336, 20465], [20335, 20465, 20464], [20336, 20337, 20465], [20337, 20466, 20465], [20337, 20338, 20467], [20337, 20467, 20466], [20338, 20339, 20467], [20339, 20468, 20467], [20339, 20340, 20469], [20339, 20469, 20468], [20340, 20341, 20469], [20341, 20470, 20469], [20341, 20342, 20471], [20341, 20471, 20470], [20342, 20343, 20471], [20343, 20472, 20471], [20343, 20344, 20473], [20343, 20473, 20472], [20344, 20345, 20473], [20345, 20474, 20473], [20345, 20346, 20475], [20345, 20475, 20474], [20346, 20347, 20475], [20347, 20476, 20475], [20347, 20348, 20477], [20347, 20477, 20476], [20348, 20349, 20477], [20349, 20478, 20477], [20349, 20350, 20479], [20349, 20479, 20478], [20350, 20351, 20479], [20351, 20480, 20479], [20351, 20352, 20481], [20351, 20481, 20480], [20352, 20353, 20481], [20353, 20482, 20481], [20353, 20354, 20483], [20353, 20483, 20482], [20354, 20355, 20483], [20355, 20484, 20483], [20355, 20356, 20485], [20355, 20485, 20484], [20356, 20357, 20485], [20357, 20486, 20485], [20357, 20358, 20487], [20357, 20487, 20486], [20358, 20359, 20487], [20359, 20488, 20487], [20359, 20360, 20489], [20359, 20489, 20488], [20360, 20361, 20489], [20361, 20490, 20489], [20361, 20362, 20491], [20361, 20491, 20490], [20362, 20363, 20491], [20363, 20492, 20491], [20363, 20364, 20493], [20363, 20493, 20492], [20364, 20365, 20493], [20365, 20494, 20493], [20365, 20366, 20495], [20365, 20495, 20494], [20366, 20367, 20495], [20367, 20496, 20495], [20367, 20368, 20497], [20367, 20497, 20496], [20368, 20369, 20497], [20369, 20498, 20497], [20369, 20370, 20499], [20369, 20499, 20498], [20370, 20371, 20499], [20371, 20500, 20499], [20371, 20372, 20501], [20371, 20501, 20500], [20372, 20373, 20501], [20373, 20502, 20501], [20373, 20374, 20503], [20373, 20503, 20502], [20374, 20375, 20503], [20375, 20504, 20503], [20375, 20376, 20505], [20375, 20505, 20504], [20376, 20377, 20505], [20377, 20506, 20505], [20377, 20378, 20507], [20377, 20507, 20506], [20378, 20379, 20507], [20379, 20508, 20507], [20379, 20380, 20509], [20379, 20509, 20508], [20380, 20381, 20509], [20381, 20510, 20509], [20381, 20382, 20511], [20381, 20511, 20510], [20382, 20383, 20511], [20383, 20512, 20511], [20383, 20384, 20513], [20383, 20513, 20512], [20384, 20385, 20513], [20385, 20514, 20513], [20385, 20386, 20515], [20385, 20515, 20514], [20386, 20387, 20515], [20387, 20516, 20515], [20387, 20388, 20517], [20387, 20517, 20516], [20388, 20389, 20517], [20389, 20518, 20517], [20389, 20390, 20519], [20389, 20519, 20518], [20390, 20391, 20519], [20391, 20520, 20519], [20391, 20392, 20521], [20391, 20521, 20520], [20392, 20393, 20521], [20393, 20522, 20521], [20393, 20394, 20523], [20393, 20523, 20522], [20394, 20395, 20523], [20395, 20524, 20523], [20395, 20396, 20525], [20395, 20525, 20524], [20396, 20397, 20525], [20397, 20526, 20525], [20397, 20398, 20527], [20397, 20527, 20526], [20398, 20399, 20527], [20399, 20528, 20527], [20399, 20400, 20529], [20399, 20529, 20528], [20400, 20401, 20529], [20401, 20530, 20529], [20401, 20402, 20531], [20401, 20531, 20530], [20402, 20403, 20531], [20403, 20532, 20531], [20403, 20404, 20533], [20403, 20533, 20532], [20404, 20405, 20533], [20405, 20534, 20533], [20405, 20406, 20535], [20405, 20535, 20534], [20406, 20407, 20535], [20407, 20536, 20535], [20407, 20408, 20537], [20407, 20537, 20536], [20408, 20409, 20537], [20409, 20538, 20537], [20409, 20410, 20539], [20409, 20539, 20538], [20410, 20411, 20539], [20411, 20540, 20539], [20411, 20412, 20541], [20411, 20541, 20540], [20412, 20413, 20541], [20413, 20542, 20541], [20413, 20414, 20543], [20413, 20543, 20542], [20414, 20415, 20543], [20415, 20544, 20543], [20415, 20416, 20545], [20415, 20545, 20544], [20416, 20417, 20545], [20417, 20546, 20545], [20417, 20418, 20547], [20417, 20547, 20546], [20418, 20419, 20547], [20419, 20548, 20547], [20419, 20420, 20549], [20419, 20549, 20548], [20420, 20421, 20549], [20421, 20550, 20549], [20421, 20422, 20551], [20421, 20551, 20550], [20422, 20423, 20551], [20423, 20552, 20551], [20423, 20424, 20553], [20423, 20553, 20552], [20424, 20425, 20553], [20425, 20554, 20553], [20425, 20426, 20555], [20425, 20555, 20554], [20426, 20427, 20555], [20427, 20556, 20555], [20428, 20429, 20557], [20429, 20558, 20557], [20429, 20430, 20559], [20429, 20559, 20558], [20430, 20431, 20559], [20431, 20560, 20559], [20431, 20432, 20561], [20431, 20561, 20560], [20432, 20433, 20561], [20433, 20562, 20561], [20433, 20434, 20563], [20433, 20563, 20562], [20434, 20435, 20563], [20435, 20564, 20563], [20435, 20436, 20565], [20435, 20565, 20564], [20436, 20437, 20565], [20437, 20566, 20565], [20437, 20438, 20567], [20437, 20567, 20566], [20438, 20439, 20567], [20439, 20568, 20567], [20439, 20440, 20569], [20439, 20569, 20568], [20440, 20441, 20569], [20441, 20570, 20569], [20441, 20442, 20571], [20441, 20571, 20570], [20442, 20443, 20571], [20443, 20572, 20571], [20443, 20444, 20573], [20443, 20573, 20572], [20444, 20445, 20573], [20445, 20574, 20573], [20445, 20446, 20575], [20445, 20575, 20574], [20446, 20447, 20575], [20447, 20576, 20575], [20447, 20448, 20577], [20447, 20577, 20576], [20448, 20449, 20577], [20449, 20578, 20577], [20449, 20450, 20579], [20449, 20579, 20578], [20450, 20451, 20579], [20451, 20580, 20579], [20451, 20452, 20581], [20451, 20581, 20580], [20452, 20453, 20581], [20453, 20582, 20581], [20453, 20454, 20583], [20453, 20583, 20582], [20454, 20455, 20583], [20455, 20584, 20583], [20455, 20456, 20585], [20455, 20585, 20584], [20456, 20457, 20585], [20457, 20586, 20585], [20457, 20458, 20587], [20457, 20587, 20586], [20458, 20459, 20587], [20459, 20588, 20587], [20459, 20460, 20589], [20459, 20589, 20588], [20460, 20461, 20589], [20461, 20590, 20589], [20461, 20462, 20591], [20461, 20591, 20590], [20462, 20463, 20591], [20463, 20592, 20591], [20463, 20464, 20593], [20463, 20593, 20592], [20464, 20465, 20593], [20465, 20594, 20593], [20465, 20466, 20595], [20465, 20595, 20594], [20466, 20467, 20595], [20467, 20596, 20595], [20467, 20468, 20597], [20467, 20597, 20596], [20468, 20469, 20597], [20469, 20598, 20597], [20469, 20470, 20599], [20469, 20599, 20598], [20470, 20471, 20599], [20471, 20600, 20599], [20471, 20472, 20601], [20471, 20601, 20600], [20472, 20473, 20601], [20473, 20602, 20601], [20473, 20474, 20603], [20473, 20603, 20602], [20474, 20475, 20603], [20475, 20604, 20603], [20475, 20476, 20605], [20475, 20605, 20604], [20476, 20477, 20605], [20477, 20606, 20605], [20477, 20478, 20607], [20477, 20607, 20606], [20478, 20479, 20607], [20479, 20608, 20607], [20479, 20480, 20609], [20479, 20609, 20608], [20480, 20481, 20609], [20481, 20610, 20609], [20481, 20482, 20611], [20481, 20611, 20610], [20482, 20483, 20611], [20483, 20612, 20611], [20483, 20484, 20613], [20483, 20613, 20612], [20484, 20485, 20613], [20485, 20614, 20613], [20485, 20486, 20615], [20485, 20615, 20614], [20486, 20487, 20615], [20487, 20616, 20615], [20487, 20488, 20617], [20487, 20617, 20616], [20488, 20489, 20617], [20489, 20618, 20617], [20489, 20490, 20619], [20489, 20619, 20618], [20490, 20491, 20619], [20491, 20620, 20619], [20491, 20492, 20621], [20491, 20621, 20620], [20492, 20493, 20621], [20493, 20622, 20621], [20493, 20494, 20623], [20493, 20623, 20622], [20494, 20495, 20623], [20495, 20624, 20623], [20495, 20496, 20625], [20495, 20625, 20624], [20496, 20497, 20625], [20497, 20626, 20625], [20497, 20498, 20627], [20497, 20627, 20626], [20498, 20499, 20627], [20499, 20628, 20627], [20499, 20500, 20629], [20499, 20629, 20628], [20500, 20501, 20629], [20501, 20630, 20629], [20501, 20502, 20631], [20501, 20631, 20630], [20502, 20503, 20631], [20503, 20632, 20631], [20503, 20504, 20633], [20503, 20633, 20632], [20504, 20505, 20633], [20505, 20634, 20633], [20505, 20506, 20635], [20505, 20635, 20634], [20506, 20507, 20635], [20507, 20636, 20635], [20507, 20508, 20637], [20507, 20637, 20636], [20508, 20509, 20637], [20509, 20638, 20637], [20509, 20510, 20639], [20509, 20639, 20638], [20510, 20511, 20639], [20511, 20640, 20639], [20511, 20512, 20641], [20511, 20641, 20640], [20512, 20513, 20641], [20513, 20642, 20641], [20513, 20514, 20643], [20513, 20643, 20642], [20514, 20515, 20643], [20515, 20644, 20643], [20515, 20516, 20645], [20515, 20645, 20644], [20516, 20517, 20645], [20517, 20646, 20645], [20517, 20518, 20647], [20517, 20647, 20646], [20518, 20519, 20647], [20519, 20648, 20647], [20519, 20520, 20649], [20519, 20649, 20648], [20520, 20521, 20649], [20521, 20650, 20649], [20521, 20522, 20651], [20521, 20651, 20650], [20522, 20523, 20651], [20523, 20652, 20651], [20523, 20524, 20653], [20523, 20653, 20652], [20524, 20525, 20653], [20525, 20654, 20653], [20525, 20526, 20655], [20525, 20655, 20654], [20526, 20527, 20655], [20527, 20656, 20655], [20527, 20528, 20657], [20527, 20657, 20656], [20528, 20529, 20657], [20529, 20658, 20657], [20529, 20530, 20659], [20529, 20659, 20658], [20530, 20531, 20659], [20531, 20660, 20659], [20531, 20532, 20661], [20531, 20661, 20660], [20532, 20533, 20661], [20533, 20662, 20661], [20533, 20534, 20663], [20533, 20663, 20662], [20534, 20535, 20663], [20535, 20664, 20663], [20535, 20536, 20665], [20535, 20665, 20664], [20536, 20537, 20665], [20537, 20666, 20665], [20537, 20538, 20667], [20537, 20667, 20666], [20538, 20539, 20667], [20539, 20668, 20667], [20539, 20540, 20669], [20539, 20669, 20668], [20540, 20541, 20669], [20541, 20670, 20669], [20541, 20542, 20671], [20541, 20671, 20670], [20542, 20543, 20671], [20543, 20672, 20671], [20543, 20544, 20673], [20543, 20673, 20672], [20544, 20545, 20673], [20545, 20674, 20673], [20545, 20546, 20675], [20545, 20675, 20674], [20546, 20547, 20675], [20547, 20676, 20675], [20547, 20548, 20677], [20547, 20677, 20676], [20548, 20549, 20677], [20549, 20678, 20677], [20549, 20550, 20679], [20549, 20679, 20678], [20550, 20551, 20679], [20551, 20680, 20679], [20551, 20552, 20681], [20551, 20681, 20680], [20552, 20553, 20681], [20553, 20682, 20681], [20553, 20554, 20683], [20553, 20683, 20682], [20554, 20555, 20683], [20555, 20684, 20683], [20555, 20556, 20685], [20555, 20685, 20684], [20557, 20558, 20687], [20557, 20687, 20686], [20558, 20559, 20687], [20559, 20688, 20687], [20559, 20560, 20689], [20559, 20689, 20688], [20560, 20561, 20689], [20561, 20690, 20689], [20561, 20562, 20691], [20561, 20691, 20690], [20562, 20563, 20691], [20563, 20692, 20691], [20563, 20564, 20693], [20563, 20693, 20692], [20564, 20565, 20693], [20565, 20694, 20693], [20565, 20566, 20695], [20565, 20695, 20694], [20566, 20567, 20695], [20567, 20696, 20695], [20567, 20568, 20697], [20567, 20697, 20696], [20568, 20569, 20697], [20569, 20698, 20697], [20569, 20570, 20699], [20569, 20699, 20698], [20570, 20571, 20699], [20571, 20700, 20699], [20571, 20572, 20701], [20571, 20701, 20700], [20572, 20573, 20701], [20573, 20702, 20701], [20573, 20574, 20703], [20573, 20703, 20702], [20574, 20575, 20703], [20575, 20704, 20703], [20575, 20576, 20705], [20575, 20705, 20704], [20576, 20577, 20705], [20577, 20706, 20705], [20577, 20578, 20707], [20577, 20707, 20706], [20578, 20579, 20707], [20579, 20708, 20707], [20579, 20580, 20709], [20579, 20709, 20708], [20580, 20581, 20709], [20581, 20710, 20709], [20581, 20582, 20711], [20581, 20711, 20710], [20582, 20583, 20711], [20583, 20712, 20711], [20583, 20584, 20713], [20583, 20713, 20712], [20584, 20585, 20713], [20585, 20714, 20713], [20585, 20586, 20715], [20585, 20715, 20714], [20586, 20587, 20715], [20587, 20716, 20715], [20587, 20588, 20717], [20587, 20717, 20716], [20588, 20589, 20717], [20589, 20718, 20717], [20589, 20590, 20719], [20589, 20719, 20718], [20590, 20591, 20719], [20591, 20720, 20719], [20591, 20592, 20721], [20591, 20721, 20720], [20592, 20593, 20721], [20593, 20722, 20721], [20593, 20594, 20723], [20593, 20723, 20722], [20594, 20595, 20723], [20595, 20724, 20723], [20595, 20596, 20725], [20595, 20725, 20724], [20596, 20597, 20725], [20597, 20726, 20725], [20597, 20598, 20727], [20597, 20727, 20726], [20598, 20599, 20727], [20599, 20728, 20727], [20599, 20600, 20729], [20599, 20729, 20728], [20600, 20601, 20729], [20601, 20730, 20729], [20601, 20602, 20731], [20601, 20731, 20730], [20602, 20603, 20731], [20603, 20732, 20731], [20603, 20604, 20733], [20603, 20733, 20732], [20604, 20605, 20733], [20605, 20734, 20733], [20605, 20606, 20735], [20605, 20735, 20734], [20606, 20607, 20735], [20607, 20736, 20735], [20607, 20608, 20737], [20607, 20737, 20736], [20608, 20609, 20737], [20609, 20738, 20737], [20609, 20610, 20739], [20609, 20739, 20738], [20610, 20611, 20739], [20611, 20740, 20739], [20611, 20612, 20741], [20611, 20741, 20740], [20612, 20613, 20741], [20613, 20742, 20741], [20613, 20614, 20743], [20613, 20743, 20742], [20614, 20615, 20743], [20615, 20744, 20743], [20615, 20616, 20745], [20615, 20745, 20744], [20616, 20617, 20745], [20617, 20746, 20745], [20617, 20618, 20747], [20617, 20747, 20746], [20618, 20619, 20747], [20619, 20748, 20747], [20619, 20620, 20749], [20619, 20749, 20748], [20620, 20621, 20749], [20621, 20750, 20749], [20621, 20622, 20751], [20621, 20751, 20750], [20622, 20623, 20751], [20623, 20752, 20751], [20623, 20624, 20753], [20623, 20753, 20752], [20624, 20625, 20753], [20625, 20754, 20753], [20625, 20626, 20755], [20625, 20755, 20754], [20626, 20627, 20755], [20627, 20756, 20755], [20627, 20628, 20757], [20627, 20757, 20756], [20628, 20629, 20757], [20629, 20758, 20757], [20629, 20630, 20759], [20629, 20759, 20758], [20630, 20631, 20759], [20631, 20760, 20759], [20631, 20632, 20761], [20631, 20761, 20760], [20632, 20633, 20761], [20633, 20762, 20761], [20633, 20634, 20763], [20633, 20763, 20762], [20634, 20635, 20763], [20635, 20764, 20763], [20635, 20636, 20765], [20635, 20765, 20764], [20636, 20637, 20765], [20637, 20766, 20765], [20637, 20638, 20767], [20637, 20767, 20766], [20638, 20639, 20767], [20639, 20768, 20767], [20639, 20640, 20769], [20639, 20769, 20768], [20640, 20641, 20769], [20641, 20770, 20769], [20641, 20642, 20771], [20641, 20771, 20770], [20642, 20643, 20771], [20643, 20772, 20771], [20643, 20644, 20773], [20643, 20773, 20772], [20644, 20645, 20773], [20645, 20774, 20773], [20645, 20646, 20775], [20645, 20775, 20774], [20646, 20647, 20775], [20647, 20776, 20775], [20647, 20648, 20777], [20647, 20777, 20776], [20648, 20649, 20777], [20649, 20778, 20777], [20649, 20650, 20779], [20649, 20779, 20778], [20650, 20651, 20779], [20651, 20780, 20779], [20651, 20652, 20781], [20651, 20781, 20780], [20652, 20653, 20781], [20653, 20782, 20781], [20653, 20654, 20783], [20653, 20783, 20782], [20654, 20655, 20783], [20655, 20784, 20783], [20655, 20656, 20785], [20655, 20785, 20784], [20656, 20657, 20785], [20657, 20786, 20785], [20657, 20658, 20787], [20657, 20787, 20786], [20658, 20659, 20787], [20659, 20788, 20787], [20659, 20660, 20789], [20659, 20789, 20788], [20660, 20661, 20789], [20661, 20790, 20789], [20661, 20662, 20791], [20661, 20791, 20790], [20662, 20663, 20791], [20663, 20792, 20791], [20663, 20664, 20793], [20663, 20793, 20792], [20664, 20665, 20793], [20665, 20794, 20793], [20665, 20666, 20795], [20665, 20795, 20794], [20666, 20667, 20795], [20667, 20796, 20795], [20667, 20668, 20797], [20667, 20797, 20796], [20668, 20669, 20797], [20669, 20798, 20797], [20669, 20670, 20799], [20669, 20799, 20798], [20670, 20671, 20799], [20671, 20800, 20799], [20671, 20672, 20801], [20671, 20801, 20800], [20672, 20673, 20801], [20673, 20802, 20801], [20673, 20674, 20803], [20673, 20803, 20802], [20674, 20675, 20803], [20675, 20804, 20803], [20675, 20676, 20805], [20675, 20805, 20804], [20676, 20677, 20805], [20677, 20806, 20805], [20677, 20678, 20807], [20677, 20807, 20806], [20678, 20679, 20807], [20679, 20808, 20807], [20679, 20680, 20809], [20679, 20809, 20808], [20680, 20681, 20809], [20681, 20810, 20809], [20681, 20682, 20811], [20681, 20811, 20810], [20682, 20683, 20811], [20683, 20812, 20811], [20683, 20684, 20813], [20683, 20813, 20812], [20684, 20685, 20813], [20685, 20814, 20813], [20686, 20687, 20815], [20687, 20816, 20815], [20687, 20688, 20817], [20687, 20817, 20816], [20688, 20689, 20817], [20689, 20818, 20817], [20689, 20690, 20819], [20689, 20819, 20818], [20690, 20691, 20819], [20691, 20820, 20819], [20691, 20692, 20821], [20691, 20821, 20820], [20692, 20693, 20821], [20693, 20822, 20821], [20693, 20694, 20823], [20693, 20823, 20822], [20694, 20695, 20823], [20695, 20824, 20823], [20695, 20696, 20825], [20695, 20825, 20824], [20696, 20697, 20825], [20697, 20826, 20825], [20697, 20698, 20827], [20697, 20827, 20826], [20698, 20699, 20827], [20699, 20828, 20827], [20699, 20700, 20829], [20699, 20829, 20828], [20700, 20701, 20829], [20701, 20830, 20829], [20701, 20702, 20831], [20701, 20831, 20830], [20702, 20703, 20831], [20703, 20832, 20831], [20703, 20704, 20833], [20703, 20833, 20832], [20704, 20705, 20833], [20705, 20834, 20833], [20705, 20706, 20835], [20705, 20835, 20834], [20706, 20707, 20835], [20707, 20836, 20835], [20707, 20708, 20837], [20707, 20837, 20836], [20708, 20709, 20837], [20709, 20838, 20837], [20709, 20710, 20839], [20709, 20839, 20838], [20710, 20711, 20839], [20711, 20840, 20839], [20711, 20712, 20841], [20711, 20841, 20840], [20712, 20713, 20841], [20713, 20842, 20841], [20713, 20714, 20843], [20713, 20843, 20842], [20714, 20715, 20843], [20715, 20844, 20843], [20715, 20716, 20845], [20715, 20845, 20844], [20716, 20717, 20845], [20717, 20846, 20845], [20717, 20718, 20847], [20717, 20847, 20846], [20718, 20719, 20847], [20719, 20848, 20847], [20719, 20720, 20849], [20719, 20849, 20848], [20720, 20721, 20849], [20721, 20850, 20849], [20721, 20722, 20851], [20721, 20851, 20850], [20722, 20723, 20851], [20723, 20852, 20851], [20723, 20724, 20853], [20723, 20853, 20852], [20724, 20725, 20853], [20725, 20854, 20853], [20725, 20726, 20855], [20725, 20855, 20854], [20726, 20727, 20855], [20727, 20856, 20855], [20727, 20728, 20857], [20727, 20857, 20856], [20728, 20729, 20857], [20729, 20858, 20857], [20729, 20730, 20859], [20729, 20859, 20858], [20730, 20731, 20859], [20731, 20860, 20859], [20731, 20732, 20861], [20731, 20861, 20860], [20732, 20733, 20861], [20733, 20862, 20861], [20733, 20734, 20863], [20733, 20863, 20862], [20734, 20735, 20863], [20735, 20864, 20863], [20735, 20736, 20865], [20735, 20865, 20864], [20736, 20737, 20865], [20737, 20866, 20865], [20737, 20738, 20867], [20737, 20867, 20866], [20738, 20739, 20867], [20739, 20868, 20867], [20739, 20740, 20869], [20739, 20869, 20868], [20740, 20741, 20869], [20741, 20870, 20869], [20741, 20742, 20871], [20741, 20871, 20870], [20742, 20743, 20871], [20743, 20872, 20871], [20743, 20744, 20873], [20743, 20873, 20872], [20744, 20745, 20873], [20745, 20874, 20873], [20745, 20746, 20875], [20745, 20875, 20874], [20746, 20747, 20875], [20747, 20876, 20875], [20747, 20748, 20877], [20747, 20877, 20876], [20748, 20749, 20877], [20749, 20878, 20877], [20749, 20750, 20879], [20749, 20879, 20878], [20750, 20751, 20879], [20751, 20880, 20879], [20751, 20752, 20881], [20751, 20881, 20880], [20752, 20753, 20881], [20753, 20882, 20881], [20753, 20754, 20883], [20753, 20883, 20882], [20754, 20755, 20883], [20755, 20884, 20883], [20755, 20756, 20885], [20755, 20885, 20884], [20756, 20757, 20885], [20757, 20886, 20885], [20757, 20758, 20887], [20757, 20887, 20886], [20758, 20759, 20887], [20759, 20888, 20887], [20759, 20760, 20889], [20759, 20889, 20888], [20760, 20761, 20889], [20761, 20890, 20889], [20761, 20762, 20891], [20761, 20891, 20890], [20762, 20763, 20891], [20763, 20892, 20891], [20763, 20764, 20893], [20763, 20893, 20892], [20764, 20765, 20893], [20765, 20894, 20893], [20765, 20766, 20895], [20765, 20895, 20894], [20766, 20767, 20895], [20767, 20896, 20895], [20767, 20768, 20897], [20767, 20897, 20896], [20768, 20769, 20897], [20769, 20898, 20897], [20769, 20770, 20899], [20769, 20899, 20898], [20770, 20771, 20899], [20771, 20900, 20899], [20771, 20772, 20901], [20771, 20901, 20900], [20772, 20773, 20901], [20773, 20902, 20901], [20773, 20774, 20903], [20773, 20903, 20902], [20774, 20775, 20903], [20775, 20904, 20903], [20775, 20776, 20905], [20775, 20905, 20904], [20776, 20777, 20905], [20777, 20906, 20905], [20777, 20778, 20907], [20777, 20907, 20906], [20778, 20779, 20907], [20779, 20908, 20907], [20779, 20780, 20909], [20779, 20909, 20908], [20780, 20781, 20909], [20781, 20910, 20909], [20781, 20782, 20911], [20781, 20911, 20910], [20782, 20783, 20911], [20783, 20912, 20911], [20783, 20784, 20913], [20783, 20913, 20912], [20784, 20785, 20913], [20785, 20914, 20913], [20785, 20786, 20915], [20785, 20915, 20914], [20786, 20787, 20915], [20787, 20916, 20915], [20787, 20788, 20917], [20787, 20917, 20916], [20788, 20789, 20917], [20789, 20918, 20917], [20789, 20790, 20919], [20789, 20919, 20918], [20790, 20791, 20919], [20791, 20920, 20919], [20791, 20792, 20921], [20791, 20921, 20920], [20792, 20793, 20921], [20793, 20922, 20921], [20793, 20794, 20923], [20793, 20923, 20922], [20794, 20795, 20923], [20795, 20924, 20923], [20795, 20796, 20925], [20795, 20925, 20924], [20796, 20797, 20925], [20797, 20926, 20925], [20797, 20798, 20927], [20797, 20927, 20926], [20798, 20799, 20927], [20799, 20928, 20927], [20799, 20800, 20929], [20799, 20929, 20928], [20800, 20801, 20929], [20801, 20930, 20929], [20801, 20802, 20931], [20801, 20931, 20930], [20802, 20803, 20931], [20803, 20932, 20931], [20803, 20804, 20933], [20803, 20933, 20932], [20804, 20805, 20933], [20805, 20934, 20933], [20805, 20806, 20935], [20805, 20935, 20934], [20806, 20807, 20935], [20807, 20936, 20935], [20807, 20808, 20937], [20807, 20937, 20936], [20808, 20809, 20937], [20809, 20938, 20937], [20809, 20810, 20939], [20809, 20939, 20938], [20810, 20811, 20939], [20811, 20940, 20939], [20811, 20812, 20941], [20811, 20941, 20940], [20812, 20813, 20941], [20813, 20942, 20941], [20813, 20814, 20943], [20813, 20943, 20942], [20815, 20816, 20945], [20815, 20945, 20944], [20816, 20817, 20945], [20817, 20946, 20945], [20817, 20818, 20947], [20817, 20947, 20946], [20818, 20819, 20947], [20819, 20948, 20947], [20819, 20820, 20949], [20819, 20949, 20948], [20820, 20821, 20949], [20821, 20950, 20949], [20821, 20822, 20951], [20821, 20951, 20950], [20822, 20823, 20951], [20823, 20952, 20951], [20823, 20824, 20953], [20823, 20953, 20952], [20824, 20825, 20953], [20825, 20954, 20953], [20825, 20826, 20955], [20825, 20955, 20954], [20826, 20827, 20955], [20827, 20956, 20955], [20827, 20828, 20957], [20827, 20957, 20956], [20828, 20829, 20957], [20829, 20958, 20957], [20829, 20830, 20959], [20829, 20959, 20958], [20830, 20831, 20959], [20831, 20960, 20959], [20831, 20832, 20961], [20831, 20961, 20960], [20832, 20833, 20961], [20833, 20962, 20961], [20833, 20834, 20963], [20833, 20963, 20962], [20834, 20835, 20963], [20835, 20964, 20963], [20835, 20836, 20965], [20835, 20965, 20964], [20836, 20837, 20965], [20837, 20966, 20965], [20837, 20838, 20967], [20837, 20967, 20966], [20838, 20839, 20967], [20839, 20968, 20967], [20839, 20840, 20969], [20839, 20969, 20968], [20840, 20841, 20969], [20841, 20970, 20969], [20841, 20842, 20971], [20841, 20971, 20970], [20842, 20843, 20971], [20843, 20972, 20971], [20843, 20844, 20973], [20843, 20973, 20972], [20844, 20845, 20973], [20845, 20974, 20973], [20845, 20846, 20975], [20845, 20975, 20974], [20846, 20847, 20975], [20847, 20976, 20975], [20847, 20848, 20977], [20847, 20977, 20976], [20848, 20849, 20977], [20849, 20978, 20977], [20849, 20850, 20979], [20849, 20979, 20978], [20850, 20851, 20979], [20851, 20980, 20979], [20851, 20852, 20981], [20851, 20981, 20980], [20852, 20853, 20981], [20853, 20982, 20981], [20853, 20854, 20983], [20853, 20983, 20982], [20854, 20855, 20983], [20855, 20984, 20983], [20855, 20856, 20985], [20855, 20985, 20984], [20856, 20857, 20985], [20857, 20986, 20985], [20857, 20858, 20987], [20857, 20987, 20986], [20858, 20859, 20987], [20859, 20988, 20987], [20859, 20860, 20989], [20859, 20989, 20988], [20860, 20861, 20989], [20861, 20990, 20989], [20861, 20862, 20991], [20861, 20991, 20990], [20862, 20863, 20991], [20863, 20992, 20991], [20863, 20864, 20993], [20863, 20993, 20992], [20864, 20865, 20993], [20865, 20994, 20993], [20865, 20866, 20995], [20865, 20995, 20994], [20866, 20867, 20995], [20867, 20996, 20995], [20867, 20868, 20997], [20867, 20997, 20996], [20868, 20869, 20997], [20869, 20998, 20997], [20869, 20870, 20999], [20869, 20999, 20998], [20870, 20871, 20999], [20871, 21000, 20999], [20871, 20872, 21001], [20871, 21001, 21000], [20872, 20873, 21001], [20873, 21002, 21001], [20873, 20874, 21003], [20873, 21003, 21002], [20874, 20875, 21003], [20875, 21004, 21003], [20875, 20876, 21005], [20875, 21005, 21004], [20876, 20877, 21005], [20877, 21006, 21005], [20877, 20878, 21007], [20877, 21007, 21006], [20878, 20879, 21007], [20879, 21008, 21007], [20879, 20880, 21009], [20879, 21009, 21008], [20880, 20881, 21009], [20881, 21010, 21009], [20881, 20882, 21011], [20881, 21011, 21010], [20882, 20883, 21011], [20883, 21012, 21011], [20883, 20884, 21013], [20883, 21013, 21012], [20884, 20885, 21013], [20885, 21014, 21013], [20885, 20886, 21015], [20885, 21015, 21014], [20886, 20887, 21015], [20887, 21016, 21015], [20887, 20888, 21017], [20887, 21017, 21016], [20888, 20889, 21017], [20889, 21018, 21017], [20889, 20890, 21019], [20889, 21019, 21018], [20890, 20891, 21019], [20891, 21020, 21019], [20891, 20892, 21021], [20891, 21021, 21020], [20892, 20893, 21021], [20893, 21022, 21021], [20893, 20894, 21023], [20893, 21023, 21022], [20894, 20895, 21023], [20895, 21024, 21023], [20895, 20896, 21025], [20895, 21025, 21024], [20896, 20897, 21025], [20897, 21026, 21025], [20897, 20898, 21027], [20897, 21027, 21026], [20898, 20899, 21027], [20899, 21028, 21027], [20899, 20900, 21029], [20899, 21029, 21028], [20900, 20901, 21029], [20901, 21030, 21029], [20901, 20902, 21031], [20901, 21031, 21030], [20902, 20903, 21031], [20903, 21032, 21031], [20903, 20904, 21033], [20903, 21033, 21032], [20904, 20905, 21033], [20905, 21034, 21033], [20905, 20906, 21035], [20905, 21035, 21034], [20906, 20907, 21035], [20907, 21036, 21035], [20907, 20908, 21037], [20907, 21037, 21036], [20908, 20909, 21037], [20909, 21038, 21037], [20909, 20910, 21039], [20909, 21039, 21038], [20910, 20911, 21039], [20911, 21040, 21039], [20911, 20912, 21041], [20911, 21041, 21040], [20912, 20913, 21041], [20913, 21042, 21041], [20913, 20914, 21043], [20913, 21043, 21042], [20914, 20915, 21043], [20915, 21044, 21043], [20915, 20916, 21045], [20915, 21045, 21044], [20916, 20917, 21045], [20917, 21046, 21045], [20917, 20918, 21047], [20917, 21047, 21046], [20918, 20919, 21047], [20919, 21048, 21047], [20919, 20920, 21049], [20919, 21049, 21048], [20920, 20921, 21049], [20921, 21050, 21049], [20921, 20922, 21051], [20921, 21051, 21050], [20922, 20923, 21051], [20923, 21052, 21051], [20923, 20924, 21053], [20923, 21053, 21052], [20924, 20925, 21053], [20925, 21054, 21053], [20925, 20926, 21055], [20925, 21055, 21054], [20926, 20927, 21055], [20927, 21056, 21055], [20927, 20928, 21057], [20927, 21057, 21056], [20928, 20929, 21057], [20929, 21058, 21057], [20929, 20930, 21059], [20929, 21059, 21058], [20930, 20931, 21059], [20931, 21060, 21059], [20931, 20932, 21061], [20931, 21061, 21060], [20932, 20933, 21061], [20933, 21062, 21061], [20933, 20934, 21063], [20933, 21063, 21062], [20934, 20935, 21063], [20935, 21064, 21063], [20935, 20936, 21065], [20935, 21065, 21064], [20936, 20937, 21065], [20937, 21066, 21065], [20937, 20938, 21067], [20937, 21067, 21066], [20938, 20939, 21067], [20939, 21068, 21067], [20939, 20940, 21069], [20939, 21069, 21068], [20940, 20941, 21069], [20941, 21070, 21069], [20941, 20942, 21071], [20941, 21071, 21070], [20942, 20943, 21071], [20943, 21072, 21071], [20944, 20945, 21073], [20945, 21074, 21073], [20945, 20946, 21075], [20945, 21075, 21074], [20946, 20947, 21075], [20947, 21076, 21075], [20947, 20948, 21077], [20947, 21077, 21076], [20948, 20949, 21077], [20949, 21078, 21077], [20949, 20950, 21079], [20949, 21079, 21078], [20950, 20951, 21079], [20951, 21080, 21079], [20951, 20952, 21081], [20951, 21081, 21080], [20952, 20953, 21081], [20953, 21082, 21081], [20953, 20954, 21083], [20953, 21083, 21082], [20954, 20955, 21083], [20955, 21084, 21083], [20955, 20956, 21085], [20955, 21085, 21084], [20956, 20957, 21085], [20957, 21086, 21085], [20957, 20958, 21087], [20957, 21087, 21086], [20958, 20959, 21087], [20959, 21088, 21087], [20959, 20960, 21089], [20959, 21089, 21088], [20960, 20961, 21089], [20961, 21090, 21089], [20961, 20962, 21091], [20961, 21091, 21090], [20962, 20963, 21091], [20963, 21092, 21091], [20963, 20964, 21093], [20963, 21093, 21092], [20964, 20965, 21093], [20965, 21094, 21093], [20965, 20966, 21095], [20965, 21095, 21094], [20966, 20967, 21095], [20967, 21096, 21095], [20967, 20968, 21097], [20967, 21097, 21096], [20968, 20969, 21097], [20969, 21098, 21097], [20969, 20970, 21099], [20969, 21099, 21098], [20970, 20971, 21099], [20971, 21100, 21099], [20971, 20972, 21101], [20971, 21101, 21100], [20972, 20973, 21101], [20973, 21102, 21101], [20973, 20974, 21103], [20973, 21103, 21102], [20974, 20975, 21103], [20975, 21104, 21103], [20975, 20976, 21105], [20975, 21105, 21104], [20976, 20977, 21105], [20977, 21106, 21105], [20977, 20978, 21107], [20977, 21107, 21106], [20978, 20979, 21107], [20979, 21108, 21107], [20979, 20980, 21109], [20979, 21109, 21108], [20980, 20981, 21109], [20981, 21110, 21109], [20981, 20982, 21111], [20981, 21111, 21110], [20982, 20983, 21111], [20983, 21112, 21111], [20983, 20984, 21113], [20983, 21113, 21112], [20984, 20985, 21113], [20985, 21114, 21113], [20985, 20986, 21115], [20985, 21115, 21114], [20986, 20987, 21115], [20987, 21116, 21115], [20987, 20988, 21117], [20987, 21117, 21116], [20988, 20989, 21117], [20989, 21118, 21117], [20989, 20990, 21119], [20989, 21119, 21118], [20990, 20991, 21119], [20991, 21120, 21119], [20991, 20992, 21121], [20991, 21121, 21120], [20992, 20993, 21121], [20993, 21122, 21121], [20993, 20994, 21123], [20993, 21123, 21122], [20994, 20995, 21123], [20995, 21124, 21123], [20995, 20996, 21125], [20995, 21125, 21124], [20996, 20997, 21125], [20997, 21126, 21125], [20997, 20998, 21127], [20997, 21127, 21126], [20998, 20999, 21127], [20999, 21128, 21127], [20999, 21000, 21129], [20999, 21129, 21128], [21000, 21001, 21129], [21001, 21130, 21129], [21001, 21002, 21131], [21001, 21131, 21130], [21002, 21003, 21131], [21003, 21132, 21131], [21003, 21004, 21133], [21003, 21133, 21132], [21004, 21005, 21133], [21005, 21134, 21133], [21005, 21006, 21135], [21005, 21135, 21134], [21006, 21007, 21135], [21007, 21136, 21135], [21007, 21008, 21137], [21007, 21137, 21136], [21008, 21009, 21137], [21009, 21138, 21137], [21009, 21010, 21139], [21009, 21139, 21138], [21010, 21011, 21139], [21011, 21140, 21139], [21011, 21012, 21141], [21011, 21141, 21140], [21012, 21013, 21141], [21013, 21142, 21141], [21013, 21014, 21143], [21013, 21143, 21142], [21014, 21015, 21143], [21015, 21144, 21143], [21015, 21016, 21145], [21015, 21145, 21144], [21016, 21017, 21145], [21017, 21146, 21145], [21017, 21018, 21147], [21017, 21147, 21146], [21018, 21019, 21147], [21019, 21148, 21147], [21019, 21020, 21149], [21019, 21149, 21148], [21020, 21021, 21149], [21021, 21150, 21149], [21021, 21022, 21151], [21021, 21151, 21150], [21022, 21023, 21151], [21023, 21152, 21151], [21023, 21024, 21153], [21023, 21153, 21152], [21024, 21025, 21153], [21025, 21154, 21153], [21025, 21026, 21155], [21025, 21155, 21154], [21026, 21027, 21155], [21027, 21156, 21155], [21027, 21028, 21157], [21027, 21157, 21156], [21028, 21029, 21157], [21029, 21158, 21157], [21029, 21030, 21159], [21029, 21159, 21158], [21030, 21031, 21159], [21031, 21160, 21159], [21031, 21032, 21161], [21031, 21161, 21160], [21032, 21033, 21161], [21033, 21162, 21161], [21033, 21034, 21163], [21033, 21163, 21162], [21034, 21035, 21163], [21035, 21164, 21163], [21035, 21036, 21165], [21035, 21165, 21164], [21036, 21037, 21165], [21037, 21166, 21165], [21037, 21038, 21167], [21037, 21167, 21166], [21038, 21039, 21167], [21039, 21168, 21167], [21039, 21040, 21169], [21039, 21169, 21168], [21040, 21041, 21169], [21041, 21170, 21169], [21041, 21042, 21171], [21041, 21171, 21170], [21042, 21043, 21171], [21043, 21172, 21171], [21043, 21044, 21173], [21043, 21173, 21172], [21044, 21045, 21173], [21045, 21174, 21173], [21045, 21046, 21175], [21045, 21175, 21174], [21046, 21047, 21175], [21047, 21176, 21175], [21047, 21048, 21177], [21047, 21177, 21176], [21048, 21049, 21177], [21049, 21178, 21177], [21049, 21050, 21179], [21049, 21179, 21178], [21050, 21051, 21179], [21051, 21180, 21179], [21051, 21052, 21181], [21051, 21181, 21180], [21052, 21053, 21181], [21053, 21182, 21181], [21053, 21054, 21183], [21053, 21183, 21182], [21054, 21055, 21183], [21055, 21184, 21183], [21055, 21056, 21185], [21055, 21185, 21184], [21056, 21057, 21185], [21057, 21186, 21185], [21057, 21058, 21187], [21057, 21187, 21186], [21058, 21059, 21187], [21059, 21188, 21187], [21059, 21060, 21189], [21059, 21189, 21188], [21060, 21061, 21189], [21061, 21190, 21189], [21061, 21062, 21191], [21061, 21191, 21190], [21062, 21063, 21191], [21063, 21192, 21191], [21063, 21064, 21193], [21063, 21193, 21192], [21064, 21065, 21193], [21065, 21194, 21193], [21065, 21066, 21195], [21065, 21195, 21194], [21066, 21067, 21195], [21067, 21196, 21195], [21067, 21068, 21197], [21067, 21197, 21196], [21068, 21069, 21197], [21069, 21198, 21197], [21069, 21070, 21199], [21069, 21199, 21198], [21070, 21071, 21199], [21071, 21200, 21199], [21071, 21072, 21201], [21071, 21201, 21200], [21073, 21074, 21203], [21073, 21203, 21202], [21074, 21075, 21203], [21075, 21204, 21203], [21075, 21076, 21205], [21075, 21205, 21204], [21076, 21077, 21205], [21077, 21206, 21205], [21077, 21078, 21207], [21077, 21207, 21206], [21078, 21079, 21207], [21079, 21208, 21207], [21079, 21080, 21209], [21079, 21209, 21208], [21080, 21081, 21209], [21081, 21210, 21209], [21081, 21082, 21211], [21081, 21211, 21210], [21082, 21083, 21211], [21083, 21212, 21211], [21083, 21084, 21213], [21083, 21213, 21212], [21084, 21085, 21213], [21085, 21214, 21213], [21085, 21086, 21215], [21085, 21215, 21214], [21086, 21087, 21215], [21087, 21216, 21215], [21087, 21088, 21217], [21087, 21217, 21216], [21088, 21089, 21217], [21089, 21218, 21217], [21089, 21090, 21219], [21089, 21219, 21218], [21090, 21091, 21219], [21091, 21220, 21219], [21091, 21092, 21221], [21091, 21221, 21220], [21092, 21093, 21221], [21093, 21222, 21221], [21093, 21094, 21223], [21093, 21223, 21222], [21094, 21095, 21223], [21095, 21224, 21223], [21095, 21096, 21225], [21095, 21225, 21224], [21096, 21097, 21225], [21097, 21226, 21225], [21097, 21098, 21227], [21097, 21227, 21226], [21098, 21099, 21227], [21099, 21228, 21227], [21099, 21100, 21229], [21099, 21229, 21228], [21100, 21101, 21229], [21101, 21230, 21229], [21101, 21102, 21231], [21101, 21231, 21230], [21102, 21103, 21231], [21103, 21232, 21231], [21103, 21104, 21233], [21103, 21233, 21232], [21104, 21105, 21233], [21105, 21234, 21233], [21105, 21106, 21235], [21105, 21235, 21234], [21106, 21107, 21235], [21107, 21236, 21235], [21107, 21108, 21237], [21107, 21237, 21236], [21108, 21109, 21237], [21109, 21238, 21237], [21109, 21110, 21239], [21109, 21239, 21238], [21110, 21111, 21239], [21111, 21240, 21239], [21111, 21112, 21241], [21111, 21241, 21240], [21112, 21113, 21241], [21113, 21242, 21241], [21113, 21114, 21243], [21113, 21243, 21242], [21114, 21115, 21243], [21115, 21244, 21243], [21115, 21116, 21245], [21115, 21245, 21244], [21116, 21117, 21245], [21117, 21246, 21245], [21117, 21118, 21247], [21117, 21247, 21246], [21118, 21119, 21247], [21119, 21248, 21247], [21119, 21120, 21249], [21119, 21249, 21248], [21120, 21121, 21249], [21121, 21250, 21249], [21121, 21122, 21251], [21121, 21251, 21250], [21122, 21123, 21251], [21123, 21252, 21251], [21123, 21124, 21253], [21123, 21253, 21252], [21124, 21125, 21253], [21125, 21254, 21253], [21125, 21126, 21255], [21125, 21255, 21254], [21126, 21127, 21255], [21127, 21256, 21255], [21127, 21128, 21257], [21127, 21257, 21256], [21128, 21129, 21257], [21129, 21258, 21257], [21129, 21130, 21259], [21129, 21259, 21258], [21130, 21131, 21259], [21131, 21260, 21259], [21131, 21132, 21261], [21131, 21261, 21260], [21132, 21133, 21261], [21133, 21262, 21261], [21133, 21134, 21263], [21133, 21263, 21262], [21134, 21135, 21263], [21135, 21264, 21263], [21135, 21136, 21265], [21135, 21265, 21264], [21136, 21137, 21265], [21137, 21266, 21265], [21137, 21138, 21267], [21137, 21267, 21266], [21138, 21139, 21267], [21139, 21268, 21267], [21139, 21140, 21269], [21139, 21269, 21268], [21140, 21141, 21269], [21141, 21270, 21269], [21141, 21142, 21271], [21141, 21271, 21270], [21142, 21143, 21271], [21143, 21272, 21271], [21143, 21144, 21273], [21143, 21273, 21272], [21144, 21145, 21273], [21145, 21274, 21273], [21145, 21146, 21275], [21145, 21275, 21274], [21146, 21147, 21275], [21147, 21276, 21275], [21147, 21148, 21277], [21147, 21277, 21276], [21148, 21149, 21277], [21149, 21278, 21277], [21149, 21150, 21279], [21149, 21279, 21278], [21150, 21151, 21279], [21151, 21280, 21279], [21151, 21152, 21281], [21151, 21281, 21280], [21152, 21153, 21281], [21153, 21282, 21281], [21153, 21154, 21283], [21153, 21283, 21282], [21154, 21155, 21283], [21155, 21284, 21283], [21155, 21156, 21285], [21155, 21285, 21284], [21156, 21157, 21285], [21157, 21286, 21285], [21157, 21158, 21287], [21157, 21287, 21286], [21158, 21159, 21287], [21159, 21288, 21287], [21159, 21160, 21289], [21159, 21289, 21288], [21160, 21161, 21289], [21161, 21290, 21289], [21161, 21162, 21291], [21161, 21291, 21290], [21162, 21163, 21291], [21163, 21292, 21291], [21163, 21164, 21293], [21163, 21293, 21292], [21164, 21165, 21293], [21165, 21294, 21293], [21165, 21166, 21295], [21165, 21295, 21294], [21166, 21167, 21295], [21167, 21296, 21295], [21167, 21168, 21297], [21167, 21297, 21296], [21168, 21169, 21297], [21169, 21298, 21297], [21169, 21170, 21299], [21169, 21299, 21298], [21170, 21171, 21299], [21171, 21300, 21299], [21171, 21172, 21301], [21171, 21301, 21300], [21172, 21173, 21301], [21173, 21302, 21301], [21173, 21174, 21303], [21173, 21303, 21302], [21174, 21175, 21303], [21175, 21304, 21303], [21175, 21176, 21305], [21175, 21305, 21304], [21176, 21177, 21305], [21177, 21306, 21305], [21177, 21178, 21307], [21177, 21307, 21306], [21178, 21179, 21307], [21179, 21308, 21307], [21179, 21180, 21309], [21179, 21309, 21308], [21180, 21181, 21309], [21181, 21310, 21309], [21181, 21182, 21311], [21181, 21311, 21310], [21182, 21183, 21311], [21183, 21312, 21311], [21183, 21184, 21313], [21183, 21313, 21312], [21184, 21185, 21313], [21185, 21314, 21313], [21185, 21186, 21315], [21185, 21315, 21314], [21186, 21187, 21315], [21187, 21316, 21315], [21187, 21188, 21317], [21187, 21317, 21316], [21188, 21189, 21317], [21189, 21318, 21317], [21189, 21190, 21319], [21189, 21319, 21318], [21190, 21191, 21319], [21191, 21320, 21319], [21191, 21192, 21321], [21191, 21321, 21320], [21192, 21193, 21321], [21193, 21322, 21321], [21193, 21194, 21323], [21193, 21323, 21322], [21194, 21195, 21323], [21195, 21324, 21323], [21195, 21196, 21325], [21195, 21325, 21324], [21196, 21197, 21325], [21197, 21326, 21325], [21197, 21198, 21327], [21197, 21327, 21326], [21198, 21199, 21327], [21199, 21328, 21327], [21199, 21200, 21329], [21199, 21329, 21328], [21200, 21201, 21329], [21201, 21330, 21329], [21202, 21203, 21331], [21203, 21332, 21331], [21203, 21204, 21333], [21203, 21333, 21332], [21204, 21205, 21333], [21205, 21334, 21333], [21205, 21206, 21335], [21205, 21335, 21334], [21206, 21207, 21335], [21207, 21336, 21335], [21207, 21208, 21337], [21207, 21337, 21336], [21208, 21209, 21337], [21209, 21338, 21337], [21209, 21210, 21339], [21209, 21339, 21338], [21210, 21211, 21339], [21211, 21340, 21339], [21211, 21212, 21341], [21211, 21341, 21340], [21212, 21213, 21341], [21213, 21342, 21341], [21213, 21214, 21343], [21213, 21343, 21342], [21214, 21215, 21343], [21215, 21344, 21343], [21215, 21216, 21345], [21215, 21345, 21344], [21216, 21217, 21345], [21217, 21346, 21345], [21217, 21218, 21347], [21217, 21347, 21346], [21218, 21219, 21347], [21219, 21348, 21347], [21219, 21220, 21349], [21219, 21349, 21348], [21220, 21221, 21349], [21221, 21350, 21349], [21221, 21222, 21351], [21221, 21351, 21350], [21222, 21223, 21351], [21223, 21352, 21351], [21223, 21224, 21353], [21223, 21353, 21352], [21224, 21225, 21353], [21225, 21354, 21353], [21225, 21226, 21355], [21225, 21355, 21354], [21226, 21227, 21355], [21227, 21356, 21355], [21227, 21228, 21357], [21227, 21357, 21356], [21228, 21229, 21357], [21229, 21358, 21357], [21229, 21230, 21359], [21229, 21359, 21358], [21230, 21231, 21359], [21231, 21360, 21359], [21231, 21232, 21361], [21231, 21361, 21360], [21232, 21233, 21361], [21233, 21362, 21361], [21233, 21234, 21363], [21233, 21363, 21362], [21234, 21235, 21363], [21235, 21364, 21363], [21235, 21236, 21365], [21235, 21365, 21364], [21236, 21237, 21365], [21237, 21366, 21365], [21237, 21238, 21367], [21237, 21367, 21366], [21238, 21239, 21367], [21239, 21368, 21367], [21239, 21240, 21369], [21239, 21369, 21368], [21240, 21241, 21369], [21241, 21370, 21369], [21241, 21242, 21371], [21241, 21371, 21370], [21242, 21243, 21371], [21243, 21372, 21371], [21243, 21244, 21373], [21243, 21373, 21372], [21244, 21245, 21373], [21245, 21374, 21373], [21245, 21246, 21375], [21245, 21375, 21374], [21246, 21247, 21375], [21247, 21376, 21375], [21247, 21248, 21377], [21247, 21377, 21376], [21248, 21249, 21377], [21249, 21378, 21377], [21249, 21250, 21379], [21249, 21379, 21378], [21250, 21251, 21379], [21251, 21380, 21379], [21251, 21252, 21381], [21251, 21381, 21380], [21252, 21253, 21381], [21253, 21382, 21381], [21253, 21254, 21383], [21253, 21383, 21382], [21254, 21255, 21383], [21255, 21384, 21383], [21255, 21256, 21385], [21255, 21385, 21384], [21256, 21257, 21385], [21257, 21386, 21385], [21257, 21258, 21387], [21257, 21387, 21386], [21258, 21259, 21387], [21259, 21388, 21387], [21259, 21260, 21389], [21259, 21389, 21388], [21260, 21261, 21389], [21261, 21390, 21389], [21261, 21262, 21391], [21261, 21391, 21390], [21262, 21263, 21391], [21263, 21392, 21391], [21263, 21264, 21393], [21263, 21393, 21392], [21264, 21265, 21393], [21265, 21394, 21393], [21265, 21266, 21395], [21265, 21395, 21394], [21266, 21267, 21395], [21267, 21396, 21395], [21267, 21268, 21397], [21267, 21397, 21396], [21268, 21269, 21397], [21269, 21398, 21397], [21269, 21270, 21399], [21269, 21399, 21398], [21270, 21271, 21399], [21271, 21400, 21399], [21271, 21272, 21401], [21271, 21401, 21400], [21272, 21273, 21401], [21273, 21402, 21401], [21273, 21274, 21403], [21273, 21403, 21402], [21274, 21275, 21403], [21275, 21404, 21403], [21275, 21276, 21405], [21275, 21405, 21404], [21276, 21277, 21405], [21277, 21406, 21405], [21277, 21278, 21407], [21277, 21407, 21406], [21278, 21279, 21407], [21279, 21408, 21407], [21279, 21280, 21409], [21279, 21409, 21408], [21280, 21281, 21409], [21281, 21410, 21409], [21281, 21282, 21411], [21281, 21411, 21410], [21282, 21283, 21411], [21283, 21412, 21411], [21283, 21284, 21413], [21283, 21413, 21412], [21284, 21285, 21413], [21285, 21414, 21413], [21285, 21286, 21415], [21285, 21415, 21414], [21286, 21287, 21415], [21287, 21416, 21415], [21287, 21288, 21417], [21287, 21417, 21416], [21288, 21289, 21417], [21289, 21418, 21417], [21289, 21290, 21419], [21289, 21419, 21418], [21290, 21291, 21419], [21291, 21420, 21419], [21291, 21292, 21421], [21291, 21421, 21420], [21292, 21293, 21421], [21293, 21422, 21421], [21293, 21294, 21423], [21293, 21423, 21422], [21294, 21295, 21423], [21295, 21424, 21423], [21295, 21296, 21425], [21295, 21425, 21424], [21296, 21297, 21425], [21297, 21426, 21425], [21297, 21298, 21427], [21297, 21427, 21426], [21298, 21299, 21427], [21299, 21428, 21427], [21299, 21300, 21429], [21299, 21429, 21428], [21300, 21301, 21429], [21301, 21430, 21429], [21301, 21302, 21431], [21301, 21431, 21430], [21302, 21303, 21431], [21303, 21432, 21431], [21303, 21304, 21433], [21303, 21433, 21432], [21304, 21305, 21433], [21305, 21434, 21433], [21305, 21306, 21435], [21305, 21435, 21434], [21306, 21307, 21435], [21307, 21436, 21435], [21307, 21308, 21437], [21307, 21437, 21436], [21308, 21309, 21437], [21309, 21438, 21437], [21309, 21310, 21439], [21309, 21439, 21438], [21310, 21311, 21439], [21311, 21440, 21439], [21311, 21312, 21441], [21311, 21441, 21440], [21312, 21313, 21441], [21313, 21442, 21441], [21313, 21314, 21443], [21313, 21443, 21442], [21314, 21315, 21443], [21315, 21444, 21443], [21315, 21316, 21445], [21315, 21445, 21444], [21316, 21317, 21445], [21317, 21446, 21445], [21317, 21318, 21447], [21317, 21447, 21446], [21318, 21319, 21447], [21319, 21448, 21447], [21319, 21320, 21449], [21319, 21449, 21448], [21320, 21321, 21449], [21321, 21450, 21449], [21321, 21322, 21451], [21321, 21451, 21450], [21322, 21323, 21451], [21323, 21452, 21451], [21323, 21324, 21453], [21323, 21453, 21452], [21324, 21325, 21453], [21325, 21454, 21453], [21325, 21326, 21455], [21325, 21455, 21454], [21326, 21327, 21455], [21327, 21456, 21455], [21327, 21328, 21457], [21327, 21457, 21456], [21328, 21329, 21457], [21329, 21458, 21457], [21329, 21330, 21459], [21329, 21459, 21458], [21331, 21332, 21461], [21331, 21461, 21460], [21332, 21333, 21461], [21333, 21462, 21461], [21333, 21334, 21463], [21333, 21463, 21462], [21334, 21335, 21463], [21335, 21464, 21463], [21335, 21336, 21465], [21335, 21465, 21464], [21336, 21337, 21465], [21337, 21466, 21465], [21337, 21338, 21467], [21337, 21467, 21466], [21338, 21339, 21467], [21339, 21468, 21467], [21339, 21340, 21469], [21339, 21469, 21468], [21340, 21341, 21469], [21341, 21470, 21469], [21341, 21342, 21471], [21341, 21471, 21470], [21342, 21343, 21471], [21343, 21472, 21471], [21343, 21344, 21473], [21343, 21473, 21472], [21344, 21345, 21473], [21345, 21474, 21473], [21345, 21346, 21475], [21345, 21475, 21474], [21346, 21347, 21475], [21347, 21476, 21475], [21347, 21348, 21477], [21347, 21477, 21476], [21348, 21349, 21477], [21349, 21478, 21477], [21349, 21350, 21479], [21349, 21479, 21478], [21350, 21351, 21479], [21351, 21480, 21479], [21351, 21352, 21481], [21351, 21481, 21480], [21352, 21353, 21481], [21353, 21482, 21481], [21353, 21354, 21483], [21353, 21483, 21482], [21354, 21355, 21483], [21355, 21484, 21483], [21355, 21356, 21485], [21355, 21485, 21484], [21356, 21357, 21485], [21357, 21486, 21485], [21357, 21358, 21487], [21357, 21487, 21486], [21358, 21359, 21487], [21359, 21488, 21487], [21359, 21360, 21489], [21359, 21489, 21488], [21360, 21361, 21489], [21361, 21490, 21489], [21361, 21362, 21491], [21361, 21491, 21490], [21362, 21363, 21491], [21363, 21492, 21491], [21363, 21364, 21493], [21363, 21493, 21492], [21364, 21365, 21493], [21365, 21494, 21493], [21365, 21366, 21495], [21365, 21495, 21494], [21366, 21367, 21495], [21367, 21496, 21495], [21367, 21368, 21497], [21367, 21497, 21496], [21368, 21369, 21497], [21369, 21498, 21497], [21369, 21370, 21499], [21369, 21499, 21498], [21370, 21371, 21499], [21371, 21500, 21499], [21371, 21372, 21501], [21371, 21501, 21500], [21372, 21373, 21501], [21373, 21502, 21501], [21373, 21374, 21503], [21373, 21503, 21502], [21374, 21375, 21503], [21375, 21504, 21503], [21375, 21376, 21505], [21375, 21505, 21504], [21376, 21377, 21505], [21377, 21506, 21505], [21377, 21378, 21507], [21377, 21507, 21506], [21378, 21379, 21507], [21379, 21508, 21507], [21379, 21380, 21509], [21379, 21509, 21508], [21380, 21381, 21509], [21381, 21510, 21509], [21381, 21382, 21511], [21381, 21511, 21510], [21382, 21383, 21511], [21383, 21512, 21511], [21383, 21384, 21513], [21383, 21513, 21512], [21384, 21385, 21513], [21385, 21514, 21513], [21385, 21386, 21515], [21385, 21515, 21514], [21386, 21387, 21515], [21387, 21516, 21515], [21387, 21388, 21517], [21387, 21517, 21516], [21388, 21389, 21517], [21389, 21518, 21517], [21389, 21390, 21519], [21389, 21519, 21518], [21390, 21391, 21519], [21391, 21520, 21519], [21391, 21392, 21521], [21391, 21521, 21520], [21392, 21393, 21521], [21393, 21522, 21521], [21393, 21394, 21523], [21393, 21523, 21522], [21394, 21395, 21523], [21395, 21524, 21523], [21395, 21396, 21525], [21395, 21525, 21524], [21396, 21397, 21525], [21397, 21526, 21525], [21397, 21398, 21527], [21397, 21527, 21526], [21398, 21399, 21527], [21399, 21528, 21527], [21399, 21400, 21529], [21399, 21529, 21528], [21400, 21401, 21529], [21401, 21530, 21529], [21401, 21402, 21531], [21401, 21531, 21530], [21402, 21403, 21531], [21403, 21532, 21531], [21403, 21404, 21533], [21403, 21533, 21532], [21404, 21405, 21533], [21405, 21534, 21533], [21405, 21406, 21535], [21405, 21535, 21534], [21406, 21407, 21535], [21407, 21536, 21535], [21407, 21408, 21537], [21407, 21537, 21536], [21408, 21409, 21537], [21409, 21538, 21537], [21409, 21410, 21539], [21409, 21539, 21538], [21410, 21411, 21539], [21411, 21540, 21539], [21411, 21412, 21541], [21411, 21541, 21540], [21412, 21413, 21541], [21413, 21542, 21541], [21413, 21414, 21543], [21413, 21543, 21542], [21414, 21415, 21543], [21415, 21544, 21543], [21415, 21416, 21545], [21415, 21545, 21544], [21416, 21417, 21545], [21417, 21546, 21545], [21417, 21418, 21547], [21417, 21547, 21546], [21418, 21419, 21547], [21419, 21548, 21547], [21419, 21420, 21549], [21419, 21549, 21548], [21420, 21421, 21549], [21421, 21550, 21549], [21421, 21422, 21551], [21421, 21551, 21550], [21422, 21423, 21551], [21423, 21552, 21551], [21423, 21424, 21553], [21423, 21553, 21552], [21424, 21425, 21553], [21425, 21554, 21553], [21425, 21426, 21555], [21425, 21555, 21554], [21426, 21427, 21555], [21427, 21556, 21555], [21427, 21428, 21557], [21427, 21557, 21556], [21428, 21429, 21557], [21429, 21558, 21557], [21429, 21430, 21559], [21429, 21559, 21558], [21430, 21431, 21559], [21431, 21560, 21559], [21431, 21432, 21561], [21431, 21561, 21560], [21432, 21433, 21561], [21433, 21562, 21561], [21433, 21434, 21563], [21433, 21563, 21562], [21434, 21435, 21563], [21435, 21564, 21563], [21435, 21436, 21565], [21435, 21565, 21564], [21436, 21437, 21565], [21437, 21566, 21565], [21437, 21438, 21567], [21437, 21567, 21566], [21438, 21439, 21567], [21439, 21568, 21567], [21439, 21440, 21569], [21439, 21569, 21568], [21440, 21441, 21569], [21441, 21570, 21569], [21441, 21442, 21571], [21441, 21571, 21570], [21442, 21443, 21571], [21443, 21572, 21571], [21443, 21444, 21573], [21443, 21573, 21572], [21444, 21445, 21573], [21445, 21574, 21573], [21445, 21446, 21575], [21445, 21575, 21574], [21446, 21447, 21575], [21447, 21576, 21575], [21447, 21448, 21577], [21447, 21577, 21576], [21448, 21449, 21577], [21449, 21578, 21577], [21449, 21450, 21579], [21449, 21579, 21578], [21450, 21451, 21579], [21451, 21580, 21579], [21451, 21452, 21581], [21451, 21581, 21580], [21452, 21453, 21581], [21453, 21582, 21581], [21453, 21454, 21583], [21453, 21583, 21582], [21454, 21455, 21583], [21455, 21584, 21583], [21455, 21456, 21585], [21455, 21585, 21584], [21456, 21457, 21585], [21457, 21586, 21585], [21457, 21458, 21587], [21457, 21587, 21586], [21458, 21459, 21587], [21459, 21588, 21587], [21460, 21461, 21589], [21461, 21590, 21589], [21461, 21462, 21591], [21461, 21591, 21590], [21462, 21463, 21591], [21463, 21592, 21591], [21463, 21464, 21593], [21463, 21593, 21592], [21464, 21465, 21593], [21465, 21594, 21593], [21465, 21466, 21595], [21465, 21595, 21594], [21466, 21467, 21595], [21467, 21596, 21595], [21467, 21468, 21597], [21467, 21597, 21596], [21468, 21469, 21597], [21469, 21598, 21597], [21469, 21470, 21599], [21469, 21599, 21598], [21470, 21471, 21599], [21471, 21600, 21599], [21471, 21472, 21601], [21471, 21601, 21600], [21472, 21473, 21601], [21473, 21602, 21601], [21473, 21474, 21603], [21473, 21603, 21602], [21474, 21475, 21603], [21475, 21604, 21603], [21475, 21476, 21605], [21475, 21605, 21604], [21476, 21477, 21605], [21477, 21606, 21605], [21477, 21478, 21607], [21477, 21607, 21606], [21478, 21479, 21607], [21479, 21608, 21607], [21479, 21480, 21609], [21479, 21609, 21608], [21480, 21481, 21609], [21481, 21610, 21609], [21481, 21482, 21611], [21481, 21611, 21610], [21482, 21483, 21611], [21483, 21612, 21611], [21483, 21484, 21613], [21483, 21613, 21612], [21484, 21485, 21613], [21485, 21614, 21613], [21485, 21486, 21615], [21485, 21615, 21614], [21486, 21487, 21615], [21487, 21616, 21615], [21487, 21488, 21617], [21487, 21617, 21616], [21488, 21489, 21617], [21489, 21618, 21617], [21489, 21490, 21619], [21489, 21619, 21618], [21490, 21491, 21619], [21491, 21620, 21619], [21491, 21492, 21621], [21491, 21621, 21620], [21492, 21493, 21621], [21493, 21622, 21621], [21493, 21494, 21623], [21493, 21623, 21622], [21494, 21495, 21623], [21495, 21624, 21623], [21495, 21496, 21625], [21495, 21625, 21624], [21496, 21497, 21625], [21497, 21626, 21625], [21497, 21498, 21627], [21497, 21627, 21626], [21498, 21499, 21627], [21499, 21628, 21627], [21499, 21500, 21629], [21499, 21629, 21628], [21500, 21501, 21629], [21501, 21630, 21629], [21501, 21502, 21631], [21501, 21631, 21630], [21502, 21503, 21631], [21503, 21632, 21631], [21503, 21504, 21633], [21503, 21633, 21632], [21504, 21505, 21633], [21505, 21634, 21633], [21505, 21506, 21635], [21505, 21635, 21634], [21506, 21507, 21635], [21507, 21636, 21635], [21507, 21508, 21637], [21507, 21637, 21636], [21508, 21509, 21637], [21509, 21638, 21637], [21509, 21510, 21639], [21509, 21639, 21638], [21510, 21511, 21639], [21511, 21640, 21639], [21511, 21512, 21641], [21511, 21641, 21640], [21512, 21513, 21641], [21513, 21642, 21641], [21513, 21514, 21643], [21513, 21643, 21642], [21514, 21515, 21643], [21515, 21644, 21643], [21515, 21516, 21645], [21515, 21645, 21644], [21516, 21517, 21645], [21517, 21646, 21645], [21517, 21518, 21647], [21517, 21647, 21646], [21518, 21519, 21647], [21519, 21648, 21647], [21519, 21520, 21649], [21519, 21649, 21648], [21520, 21521, 21649], [21521, 21650, 21649], [21521, 21522, 21651], [21521, 21651, 21650], [21522, 21523, 21651], [21523, 21652, 21651], [21523, 21524, 21653], [21523, 21653, 21652], [21524, 21525, 21653], [21525, 21654, 21653], [21525, 21526, 21655], [21525, 21655, 21654], [21526, 21527, 21655], [21527, 21656, 21655], [21527, 21528, 21657], [21527, 21657, 21656], [21528, 21529, 21657], [21529, 21658, 21657], [21529, 21530, 21659], [21529, 21659, 21658], [21530, 21531, 21659], [21531, 21660, 21659], [21531, 21532, 21661], [21531, 21661, 21660], [21532, 21533, 21661], [21533, 21662, 21661], [21533, 21534, 21663], [21533, 21663, 21662], [21534, 21535, 21663], [21535, 21664, 21663], [21535, 21536, 21665], [21535, 21665, 21664], [21536, 21537, 21665], [21537, 21666, 21665], [21537, 21538, 21667], [21537, 21667, 21666], [21538, 21539, 21667], [21539, 21668, 21667], [21539, 21540, 21669], [21539, 21669, 21668], [21540, 21541, 21669], [21541, 21670, 21669], [21541, 21542, 21671], [21541, 21671, 21670], [21542, 21543, 21671], [21543, 21672, 21671], [21543, 21544, 21673], [21543, 21673, 21672], [21544, 21545, 21673], [21545, 21674, 21673], [21545, 21546, 21675], [21545, 21675, 21674], [21546, 21547, 21675], [21547, 21676, 21675], [21547, 21548, 21677], [21547, 21677, 21676], [21548, 21549, 21677], [21549, 21678, 21677], [21549, 21550, 21679], [21549, 21679, 21678], [21550, 21551, 21679], [21551, 21680, 21679], [21551, 21552, 21681], [21551, 21681, 21680], [21552, 21553, 21681], [21553, 21682, 21681], [21553, 21554, 21683], [21553, 21683, 21682], [21554, 21555, 21683], [21555, 21684, 21683], [21555, 21556, 21685], [21555, 21685, 21684], [21556, 21557, 21685], [21557, 21686, 21685], [21557, 21558, 21687], [21557, 21687, 21686], [21558, 21559, 21687], [21559, 21688, 21687], [21559, 21560, 21689], [21559, 21689, 21688], [21560, 21561, 21689], [21561, 21690, 21689], [21561, 21562, 21691], [21561, 21691, 21690], [21562, 21563, 21691], [21563, 21692, 21691], [21563, 21564, 21693], [21563, 21693, 21692], [21564, 21565, 21693], [21565, 21694, 21693], [21565, 21566, 21695], [21565, 21695, 21694], [21566, 21567, 21695], [21567, 21696, 21695], [21567, 21568, 21697], [21567, 21697, 21696], [21568, 21569, 21697], [21569, 21698, 21697], [21569, 21570, 21699], [21569, 21699, 21698], [21570, 21571, 21699], [21571, 21700, 21699], [21571, 21572, 21701], [21571, 21701, 21700], [21572, 21573, 21701], [21573, 21702, 21701], [21573, 21574, 21703], [21573, 21703, 21702], [21574, 21575, 21703], [21575, 21704, 21703], [21575, 21576, 21705], [21575, 21705, 21704], [21576, 21577, 21705], [21577, 21706, 21705], [21577, 21578, 21707], [21577, 21707, 21706], [21578, 21579, 21707], [21579, 21708, 21707], [21579, 21580, 21709], [21579, 21709, 21708], [21580, 21581, 21709], [21581, 21710, 21709], [21581, 21582, 21711], [21581, 21711, 21710], [21582, 21583, 21711], [21583, 21712, 21711], [21583, 21584, 21713], [21583, 21713, 21712], [21584, 21585, 21713], [21585, 21714, 21713], [21585, 21586, 21715], [21585, 21715, 21714], [21586, 21587, 21715], [21587, 21716, 21715], [21587, 21588, 21717], [21587, 21717, 21716], [21589, 21590, 21719], [21589, 21719, 21718], [21590, 21591, 21719], [21591, 21720, 21719], [21591, 21592, 21721], [21591, 21721, 21720], [21592, 21593, 21721], [21593, 21722, 21721], [21593, 21594, 21723], [21593, 21723, 21722], [21594, 21595, 21723], [21595, 21724, 21723], [21595, 21596, 21725], [21595, 21725, 21724], [21596, 21597, 21725], [21597, 21726, 21725], [21597, 21598, 21727], [21597, 21727, 21726], [21598, 21599, 21727], [21599, 21728, 21727], [21599, 21600, 21729], [21599, 21729, 21728], [21600, 21601, 21729], [21601, 21730, 21729], [21601, 21602, 21731], [21601, 21731, 21730], [21602, 21603, 21731], [21603, 21732, 21731], [21603, 21604, 21733], [21603, 21733, 21732], [21604, 21605, 21733], [21605, 21734, 21733], [21605, 21606, 21735], [21605, 21735, 21734], [21606, 21607, 21735], [21607, 21736, 21735], [21607, 21608, 21737], [21607, 21737, 21736], [21608, 21609, 21737], [21609, 21738, 21737], [21609, 21610, 21739], [21609, 21739, 21738], [21610, 21611, 21739], [21611, 21740, 21739], [21611, 21612, 21741], [21611, 21741, 21740], [21612, 21613, 21741], [21613, 21742, 21741], [21613, 21614, 21743], [21613, 21743, 21742], [21614, 21615, 21743], [21615, 21744, 21743], [21615, 21616, 21745], [21615, 21745, 21744], [21616, 21617, 21745], [21617, 21746, 21745], [21617, 21618, 21747], [21617, 21747, 21746], [21618, 21619, 21747], [21619, 21748, 21747], [21619, 21620, 21749], [21619, 21749, 21748], [21620, 21621, 21749], [21621, 21750, 21749], [21621, 21622, 21751], [21621, 21751, 21750], [21622, 21623, 21751], [21623, 21752, 21751], [21623, 21624, 21753], [21623, 21753, 21752], [21624, 21625, 21753], [21625, 21754, 21753], [21625, 21626, 21755], [21625, 21755, 21754], [21626, 21627, 21755], [21627, 21756, 21755], [21627, 21628, 21757], [21627, 21757, 21756], [21628, 21629, 21757], [21629, 21758, 21757], [21629, 21630, 21759], [21629, 21759, 21758], [21630, 21631, 21759], [21631, 21760, 21759], [21631, 21632, 21761], [21631, 21761, 21760], [21632, 21633, 21761], [21633, 21762, 21761], [21633, 21634, 21763], [21633, 21763, 21762], [21634, 21635, 21763], [21635, 21764, 21763], [21635, 21636, 21765], [21635, 21765, 21764], [21636, 21637, 21765], [21637, 21766, 21765], [21637, 21638, 21767], [21637, 21767, 21766], [21638, 21639, 21767], [21639, 21768, 21767], [21639, 21640, 21769], [21639, 21769, 21768], [21640, 21641, 21769], [21641, 21770, 21769], [21641, 21642, 21771], [21641, 21771, 21770], [21642, 21643, 21771], [21643, 21772, 21771], [21643, 21644, 21773], [21643, 21773, 21772], [21644, 21645, 21773], [21645, 21774, 21773], [21645, 21646, 21775], [21645, 21775, 21774], [21646, 21647, 21775], [21647, 21776, 21775], [21647, 21648, 21777], [21647, 21777, 21776], [21648, 21649, 21777], [21649, 21778, 21777], [21649, 21650, 21779], [21649, 21779, 21778], [21650, 21651, 21779], [21651, 21780, 21779], [21651, 21652, 21781], [21651, 21781, 21780], [21652, 21653, 21781], [21653, 21782, 21781], [21653, 21654, 21783], [21653, 21783, 21782], [21654, 21655, 21783], [21655, 21784, 21783], [21655, 21656, 21785], [21655, 21785, 21784], [21656, 21657, 21785], [21657, 21786, 21785], [21657, 21658, 21787], [21657, 21787, 21786], [21658, 21659, 21787], [21659, 21788, 21787], [21659, 21660, 21789], [21659, 21789, 21788], [21660, 21661, 21789], [21661, 21790, 21789], [21661, 21662, 21791], [21661, 21791, 21790], [21662, 21663, 21791], [21663, 21792, 21791], [21663, 21664, 21793], [21663, 21793, 21792], [21664, 21665, 21793], [21665, 21794, 21793], [21665, 21666, 21795], [21665, 21795, 21794], [21666, 21667, 21795], [21667, 21796, 21795], [21667, 21668, 21797], [21667, 21797, 21796], [21668, 21669, 21797], [21669, 21798, 21797], [21669, 21670, 21799], [21669, 21799, 21798], [21670, 21671, 21799], [21671, 21800, 21799], [21671, 21672, 21801], [21671, 21801, 21800], [21672, 21673, 21801], [21673, 21802, 21801], [21673, 21674, 21803], [21673, 21803, 21802], [21674, 21675, 21803], [21675, 21804, 21803], [21675, 21676, 21805], [21675, 21805, 21804], [21676, 21677, 21805], [21677, 21806, 21805], [21677, 21678, 21807], [21677, 21807, 21806], [21678, 21679, 21807], [21679, 21808, 21807], [21679, 21680, 21809], [21679, 21809, 21808], [21680, 21681, 21809], [21681, 21810, 21809], [21681, 21682, 21811], [21681, 21811, 21810], [21682, 21683, 21811], [21683, 21812, 21811], [21683, 21684, 21813], [21683, 21813, 21812], [21684, 21685, 21813], [21685, 21814, 21813], [21685, 21686, 21815], [21685, 21815, 21814], [21686, 21687, 21815], [21687, 21816, 21815], [21687, 21688, 21817], [21687, 21817, 21816], [21688, 21689, 21817], [21689, 21818, 21817], [21689, 21690, 21819], [21689, 21819, 21818], [21690, 21691, 21819], [21691, 21820, 21819], [21691, 21692, 21821], [21691, 21821, 21820], [21692, 21693, 21821], [21693, 21822, 21821], [21693, 21694, 21823], [21693, 21823, 21822], [21694, 21695, 21823], [21695, 21824, 21823], [21695, 21696, 21825], [21695, 21825, 21824], [21696, 21697, 21825], [21697, 21826, 21825], [21697, 21698, 21827], [21697, 21827, 21826], [21698, 21699, 21827], [21699, 21828, 21827], [21699, 21700, 21829], [21699, 21829, 21828], [21700, 21701, 21829], [21701, 21830, 21829], [21701, 21702, 21831], [21701, 21831, 21830], [21702, 21703, 21831], [21703, 21832, 21831], [21703, 21704, 21833], [21703, 21833, 21832], [21704, 21705, 21833], [21705, 21834, 21833], [21705, 21706, 21835], [21705, 21835, 21834], [21706, 21707, 21835], [21707, 21836, 21835], [21707, 21708, 21837], [21707, 21837, 21836], [21708, 21709, 21837], [21709, 21838, 21837], [21709, 21710, 21839], [21709, 21839, 21838], [21710, 21711, 21839], [21711, 21840, 21839], [21711, 21712, 21841], [21711, 21841, 21840], [21712, 21713, 21841], [21713, 21842, 21841], [21713, 21714, 21843], [21713, 21843, 21842], [21714, 21715, 21843], [21715, 21844, 21843], [21715, 21716, 21845], [21715, 21845, 21844], [21716, 21717, 21845], [21717, 21846, 21845], [21718, 21719, 21847], [21719, 21848, 21847], [21719, 21720, 21849], [21719, 21849, 21848], [21720, 21721, 21849], [21721, 21850, 21849], [21721, 21722, 21851], [21721, 21851, 21850], [21722, 21723, 21851], [21723, 21852, 21851], [21723, 21724, 21853], [21723, 21853, 21852], [21724, 21725, 21853], [21725, 21854, 21853], [21725, 21726, 21855], [21725, 21855, 21854], [21726, 21727, 21855], [21727, 21856, 21855], [21727, 21728, 21857], [21727, 21857, 21856], [21728, 21729, 21857], [21729, 21858, 21857], [21729, 21730, 21859], [21729, 21859, 21858], [21730, 21731, 21859], [21731, 21860, 21859], [21731, 21732, 21861], [21731, 21861, 21860], [21732, 21733, 21861], [21733, 21862, 21861], [21733, 21734, 21863], [21733, 21863, 21862], [21734, 21735, 21863], [21735, 21864, 21863], [21735, 21736, 21865], [21735, 21865, 21864], [21736, 21737, 21865], [21737, 21866, 21865], [21737, 21738, 21867], [21737, 21867, 21866], [21738, 21739, 21867], [21739, 21868, 21867], [21739, 21740, 21869], [21739, 21869, 21868], [21740, 21741, 21869], [21741, 21870, 21869], [21741, 21742, 21871], [21741, 21871, 21870], [21742, 21743, 21871], [21743, 21872, 21871], [21743, 21744, 21873], [21743, 21873, 21872], [21744, 21745, 21873], [21745, 21874, 21873], [21745, 21746, 21875], [21745, 21875, 21874], [21746, 21747, 21875], [21747, 21876, 21875], [21747, 21748, 21877], [21747, 21877, 21876], [21748, 21749, 21877], [21749, 21878, 21877], [21749, 21750, 21879], [21749, 21879, 21878], [21750, 21751, 21879], [21751, 21880, 21879], [21751, 21752, 21881], [21751, 21881, 21880], [21752, 21753, 21881], [21753, 21882, 21881], [21753, 21754, 21883], [21753, 21883, 21882], [21754, 21755, 21883], [21755, 21884, 21883], [21755, 21756, 21885], [21755, 21885, 21884], [21756, 21757, 21885], [21757, 21886, 21885], [21757, 21758, 21887], [21757, 21887, 21886], [21758, 21759, 21887], [21759, 21888, 21887], [21759, 21760, 21889], [21759, 21889, 21888], [21760, 21761, 21889], [21761, 21890, 21889], [21761, 21762, 21891], [21761, 21891, 21890], [21762, 21763, 21891], [21763, 21892, 21891], [21763, 21764, 21893], [21763, 21893, 21892], [21764, 21765, 21893], [21765, 21894, 21893], [21765, 21766, 21895], [21765, 21895, 21894], [21766, 21767, 21895], [21767, 21896, 21895], [21767, 21768, 21897], [21767, 21897, 21896], [21768, 21769, 21897], [21769, 21898, 21897], [21769, 21770, 21899], [21769, 21899, 21898], [21770, 21771, 21899], [21771, 21900, 21899], [21771, 21772, 21901], [21771, 21901, 21900], [21772, 21773, 21901], [21773, 21902, 21901], [21773, 21774, 21903], [21773, 21903, 21902], [21774, 21775, 21903], [21775, 21904, 21903], [21775, 21776, 21905], [21775, 21905, 21904], [21776, 21777, 21905], [21777, 21906, 21905], [21777, 21778, 21907], [21777, 21907, 21906], [21778, 21779, 21907], [21779, 21908, 21907], [21779, 21780, 21909], [21779, 21909, 21908], [21780, 21781, 21909], [21781, 21910, 21909], [21781, 21782, 21911], [21781, 21911, 21910], [21782, 21783, 21911], [21783, 21912, 21911], [21783, 21784, 21913], [21783, 21913, 21912], [21784, 21785, 21913], [21785, 21914, 21913], [21785, 21786, 21915], [21785, 21915, 21914], [21786, 21787, 21915], [21787, 21916, 21915], [21787, 21788, 21917], [21787, 21917, 21916], [21788, 21789, 21917], [21789, 21918, 21917], [21789, 21790, 21919], [21789, 21919, 21918], [21790, 21791, 21919], [21791, 21920, 21919], [21791, 21792, 21921], [21791, 21921, 21920], [21792, 21793, 21921], [21793, 21922, 21921], [21793, 21794, 21923], [21793, 21923, 21922], [21794, 21795, 21923], [21795, 21924, 21923], [21795, 21796, 21925], [21795, 21925, 21924], [21796, 21797, 21925], [21797, 21926, 21925], [21797, 21798, 21927], [21797, 21927, 21926], [21798, 21799, 21927], [21799, 21928, 21927], [21799, 21800, 21929], [21799, 21929, 21928], [21800, 21801, 21929], [21801, 21930, 21929], [21801, 21802, 21931], [21801, 21931, 21930], [21802, 21803, 21931], [21803, 21932, 21931], [21803, 21804, 21933], [21803, 21933, 21932], [21804, 21805, 21933], [21805, 21934, 21933], [21805, 21806, 21935], [21805, 21935, 21934], [21806, 21807, 21935], [21807, 21936, 21935], [21807, 21808, 21937], [21807, 21937, 21936], [21808, 21809, 21937], [21809, 21938, 21937], [21809, 21810, 21939], [21809, 21939, 21938], [21810, 21811, 21939], [21811, 21940, 21939], [21811, 21812, 21941], [21811, 21941, 21940], [21812, 21813, 21941], [21813, 21942, 21941], [21813, 21814, 21943], [21813, 21943, 21942], [21814, 21815, 21943], [21815, 21944, 21943], [21815, 21816, 21945], [21815, 21945, 21944], [21816, 21817, 21945], [21817, 21946, 21945], [21817, 21818, 21947], [21817, 21947, 21946], [21818, 21819, 21947], [21819, 21948, 21947], [21819, 21820, 21949], [21819, 21949, 21948], [21820, 21821, 21949], [21821, 21950, 21949], [21821, 21822, 21951], [21821, 21951, 21950], [21822, 21823, 21951], [21823, 21952, 21951], [21823, 21824, 21953], [21823, 21953, 21952], [21824, 21825, 21953], [21825, 21954, 21953], [21825, 21826, 21955], [21825, 21955, 21954], [21826, 21827, 21955], [21827, 21956, 21955], [21827, 21828, 21957], [21827, 21957, 21956], [21828, 21829, 21957], [21829, 21958, 21957], [21829, 21830, 21959], [21829, 21959, 21958], [21830, 21831, 21959], [21831, 21960, 21959], [21831, 21832, 21961], [21831, 21961, 21960], [21832, 21833, 21961], [21833, 21962, 21961], [21833, 21834, 21963], [21833, 21963, 21962], [21834, 21835, 21963], [21835, 21964, 21963], [21835, 21836, 21965], [21835, 21965, 21964], [21836, 21837, 21965], [21837, 21966, 21965], [21837, 21838, 21967], [21837, 21967, 21966], [21838, 21839, 21967], [21839, 21968, 21967], [21839, 21840, 21969], [21839, 21969, 21968], [21840, 21841, 21969], [21841, 21970, 21969], [21841, 21842, 21971], [21841, 21971, 21970], [21842, 21843, 21971], [21843, 21972, 21971], [21843, 21844, 21973], [21843, 21973, 21972], [21844, 21845, 21973], [21845, 21974, 21973], [21845, 21846, 21975], [21845, 21975, 21974], [21847, 21848, 21977], [21847, 21977, 21976], [21848, 21849, 21977], [21849, 21978, 21977], [21849, 21850, 21979], [21849, 21979, 21978], [21850, 21851, 21979], [21851, 21980, 21979], [21851, 21852, 21981], [21851, 21981, 21980], [21852, 21853, 21981], [21853, 21982, 21981], [21853, 21854, 21983], [21853, 21983, 21982], [21854, 21855, 21983], [21855, 21984, 21983], [21855, 21856, 21985], [21855, 21985, 21984], [21856, 21857, 21985], [21857, 21986, 21985], [21857, 21858, 21987], [21857, 21987, 21986], [21858, 21859, 21987], [21859, 21988, 21987], [21859, 21860, 21989], [21859, 21989, 21988], [21860, 21861, 21989], [21861, 21990, 21989], [21861, 21862, 21991], [21861, 21991, 21990], [21862, 21863, 21991], [21863, 21992, 21991], [21863, 21864, 21993], [21863, 21993, 21992], [21864, 21865, 21993], [21865, 21994, 21993], [21865, 21866, 21995], [21865, 21995, 21994], [21866, 21867, 21995], [21867, 21996, 21995], [21867, 21868, 21997], [21867, 21997, 21996], [21868, 21869, 21997], [21869, 21998, 21997], [21869, 21870, 21999], [21869, 21999, 21998], [21870, 21871, 21999], [21871, 22000, 21999], [21871, 21872, 22001], [21871, 22001, 22000], [21872, 21873, 22001], [21873, 22002, 22001], [21873, 21874, 22003], [21873, 22003, 22002], [21874, 21875, 22003], [21875, 22004, 22003], [21875, 21876, 22005], [21875, 22005, 22004], [21876, 21877, 22005], [21877, 22006, 22005], [21877, 21878, 22007], [21877, 22007, 22006], [21878, 21879, 22007], [21879, 22008, 22007], [21879, 21880, 22009], [21879, 22009, 22008], [21880, 21881, 22009], [21881, 22010, 22009], [21881, 21882, 22011], [21881, 22011, 22010], [21882, 21883, 22011], [21883, 22012, 22011], [21883, 21884, 22013], [21883, 22013, 22012], [21884, 21885, 22013], [21885, 22014, 22013], [21885, 21886, 22015], [21885, 22015, 22014], [21886, 21887, 22015], [21887, 22016, 22015], [21887, 21888, 22017], [21887, 22017, 22016], [21888, 21889, 22017], [21889, 22018, 22017], [21889, 21890, 22019], [21889, 22019, 22018], [21890, 21891, 22019], [21891, 22020, 22019], [21891, 21892, 22021], [21891, 22021, 22020], [21892, 21893, 22021], [21893, 22022, 22021], [21893, 21894, 22023], [21893, 22023, 22022], [21894, 21895, 22023], [21895, 22024, 22023], [21895, 21896, 22025], [21895, 22025, 22024], [21896, 21897, 22025], [21897, 22026, 22025], [21897, 21898, 22027], [21897, 22027, 22026], [21898, 21899, 22027], [21899, 22028, 22027], [21899, 21900, 22029], [21899, 22029, 22028], [21900, 21901, 22029], [21901, 22030, 22029], [21901, 21902, 22031], [21901, 22031, 22030], [21902, 21903, 22031], [21903, 22032, 22031], [21903, 21904, 22033], [21903, 22033, 22032], [21904, 21905, 22033], [21905, 22034, 22033], [21905, 21906, 22035], [21905, 22035, 22034], [21906, 21907, 22035], [21907, 22036, 22035], [21907, 21908, 22037], [21907, 22037, 22036], [21908, 21909, 22037], [21909, 22038, 22037], [21909, 21910, 22039], [21909, 22039, 22038], [21910, 21911, 22039], [21911, 22040, 22039], [21911, 21912, 22041], [21911, 22041, 22040], [21912, 21913, 22041], [21913, 22042, 22041], [21913, 21914, 22043], [21913, 22043, 22042], [21914, 21915, 22043], [21915, 22044, 22043], [21915, 21916, 22045], [21915, 22045, 22044], [21916, 21917, 22045], [21917, 22046, 22045], [21917, 21918, 22047], [21917, 22047, 22046], [21918, 21919, 22047], [21919, 22048, 22047], [21919, 21920, 22049], [21919, 22049, 22048], [21920, 21921, 22049], [21921, 22050, 22049], [21921, 21922, 22051], [21921, 22051, 22050], [21922, 21923, 22051], [21923, 22052, 22051], [21923, 21924, 22053], [21923, 22053, 22052], [21924, 21925, 22053], [21925, 22054, 22053], [21925, 21926, 22055], [21925, 22055, 22054], [21926, 21927, 22055], [21927, 22056, 22055], [21927, 21928, 22057], [21927, 22057, 22056], [21928, 21929, 22057], [21929, 22058, 22057], [21929, 21930, 22059], [21929, 22059, 22058], [21930, 21931, 22059], [21931, 22060, 22059], [21931, 21932, 22061], [21931, 22061, 22060], [21932, 21933, 22061], [21933, 22062, 22061], [21933, 21934, 22063], [21933, 22063, 22062], [21934, 21935, 22063], [21935, 22064, 22063], [21935, 21936, 22065], [21935, 22065, 22064], [21936, 21937, 22065], [21937, 22066, 22065], [21937, 21938, 22067], [21937, 22067, 22066], [21938, 21939, 22067], [21939, 22068, 22067], [21939, 21940, 22069], [21939, 22069, 22068], [21940, 21941, 22069], [21941, 22070, 22069], [21941, 21942, 22071], [21941, 22071, 22070], [21942, 21943, 22071], [21943, 22072, 22071], [21943, 21944, 22073], [21943, 22073, 22072], [21944, 21945, 22073], [21945, 22074, 22073], [21945, 21946, 22075], [21945, 22075, 22074], [21946, 21947, 22075], [21947, 22076, 22075], [21947, 21948, 22077], [21947, 22077, 22076], [21948, 21949, 22077], [21949, 22078, 22077], [21949, 21950, 22079], [21949, 22079, 22078], [21950, 21951, 22079], [21951, 22080, 22079], [21951, 21952, 22081], [21951, 22081, 22080], [21952, 21953, 22081], [21953, 22082, 22081], [21953, 21954, 22083], [21953, 22083, 22082], [21954, 21955, 22083], [21955, 22084, 22083], [21955, 21956, 22085], [21955, 22085, 22084], [21956, 21957, 22085], [21957, 22086, 22085], [21957, 21958, 22087], [21957, 22087, 22086], [21958, 21959, 22087], [21959, 22088, 22087], [21959, 21960, 22089], [21959, 22089, 22088], [21960, 21961, 22089], [21961, 22090, 22089], [21961, 21962, 22091], [21961, 22091, 22090], [21962, 21963, 22091], [21963, 22092, 22091], [21963, 21964, 22093], [21963, 22093, 22092], [21964, 21965, 22093], [21965, 22094, 22093], [21965, 21966, 22095], [21965, 22095, 22094], [21966, 21967, 22095], [21967, 22096, 22095], [21967, 21968, 22097], [21967, 22097, 22096], [21968, 21969, 22097], [21969, 22098, 22097], [21969, 21970, 22099], [21969, 22099, 22098], [21970, 21971, 22099], [21971, 22100, 22099], [21971, 21972, 22101], [21971, 22101, 22100], [21972, 21973, 22101], [21973, 22102, 22101], [21973, 21974, 22103], [21973, 22103, 22102], [21974, 21975, 22103], [21975, 22104, 22103], [21976, 21977, 22105], [21977, 22106, 22105], [21977, 21978, 22107], [21977, 22107, 22106], [21978, 21979, 22107], [21979, 22108, 22107], [21979, 21980, 22109], [21979, 22109, 22108], [21980, 21981, 22109], [21981, 22110, 22109], [21981, 21982, 22111], [21981, 22111, 22110], [21982, 21983, 22111], [21983, 22112, 22111], [21983, 21984, 22113], [21983, 22113, 22112], [21984, 21985, 22113], [21985, 22114, 22113], [21985, 21986, 22115], [21985, 22115, 22114], [21986, 21987, 22115], [21987, 22116, 22115], [21987, 21988, 22117], [21987, 22117, 22116], [21988, 21989, 22117], [21989, 22118, 22117], [21989, 21990, 22119], [21989, 22119, 22118], [21990, 21991, 22119], [21991, 22120, 22119], [21991, 21992, 22121], [21991, 22121, 22120], [21992, 21993, 22121], [21993, 22122, 22121], [21993, 21994, 22123], [21993, 22123, 22122], [21994, 21995, 22123], [21995, 22124, 22123], [21995, 21996, 22125], [21995, 22125, 22124], [21996, 21997, 22125], [21997, 22126, 22125], [21997, 21998, 22127], [21997, 22127, 22126], [21998, 21999, 22127], [21999, 22128, 22127], [21999, 22000, 22129], [21999, 22129, 22128], [22000, 22001, 22129], [22001, 22130, 22129], [22001, 22002, 22131], [22001, 22131, 22130], [22002, 22003, 22131], [22003, 22132, 22131], [22003, 22004, 22133], [22003, 22133, 22132], [22004, 22005, 22133], [22005, 22134, 22133], [22005, 22006, 22135], [22005, 22135, 22134], [22006, 22007, 22135], [22007, 22136, 22135], [22007, 22008, 22137], [22007, 22137, 22136], [22008, 22009, 22137], [22009, 22138, 22137], [22009, 22010, 22139], [22009, 22139, 22138], [22010, 22011, 22139], [22011, 22140, 22139], [22011, 22012, 22141], [22011, 22141, 22140], [22012, 22013, 22141], [22013, 22142, 22141], [22013, 22014, 22143], [22013, 22143, 22142], [22014, 22015, 22143], [22015, 22144, 22143], [22015, 22016, 22145], [22015, 22145, 22144], [22016, 22017, 22145], [22017, 22146, 22145], [22017, 22018, 22147], [22017, 22147, 22146], [22018, 22019, 22147], [22019, 22148, 22147], [22019, 22020, 22149], [22019, 22149, 22148], [22020, 22021, 22149], [22021, 22150, 22149], [22021, 22022, 22151], [22021, 22151, 22150], [22022, 22023, 22151], [22023, 22152, 22151], [22023, 22024, 22153], [22023, 22153, 22152], [22024, 22025, 22153], [22025, 22154, 22153], [22025, 22026, 22155], [22025, 22155, 22154], [22026, 22027, 22155], [22027, 22156, 22155], [22027, 22028, 22157], [22027, 22157, 22156], [22028, 22029, 22157], [22029, 22158, 22157], [22029, 22030, 22159], [22029, 22159, 22158], [22030, 22031, 22159], [22031, 22160, 22159], [22031, 22032, 22161], [22031, 22161, 22160], [22032, 22033, 22161], [22033, 22162, 22161], [22033, 22034, 22163], [22033, 22163, 22162], [22034, 22035, 22163], [22035, 22164, 22163], [22035, 22036, 22165], [22035, 22165, 22164], [22036, 22037, 22165], [22037, 22166, 22165], [22037, 22038, 22167], [22037, 22167, 22166], [22038, 22039, 22167], [22039, 22168, 22167], [22039, 22040, 22169], [22039, 22169, 22168], [22040, 22041, 22169], [22041, 22170, 22169], [22041, 22042, 22171], [22041, 22171, 22170], [22042, 22043, 22171], [22043, 22172, 22171], [22043, 22044, 22173], [22043, 22173, 22172], [22044, 22045, 22173], [22045, 22174, 22173], [22045, 22046, 22175], [22045, 22175, 22174], [22046, 22047, 22175], [22047, 22176, 22175], [22047, 22048, 22177], [22047, 22177, 22176], [22048, 22049, 22177], [22049, 22178, 22177], [22049, 22050, 22179], [22049, 22179, 22178], [22050, 22051, 22179], [22051, 22180, 22179], [22051, 22052, 22181], [22051, 22181, 22180], [22052, 22053, 22181], [22053, 22182, 22181], [22053, 22054, 22183], [22053, 22183, 22182], [22054, 22055, 22183], [22055, 22184, 22183], [22055, 22056, 22185], [22055, 22185, 22184], [22056, 22057, 22185], [22057, 22186, 22185], [22057, 22058, 22187], [22057, 22187, 22186], [22058, 22059, 22187], [22059, 22188, 22187], [22059, 22060, 22189], [22059, 22189, 22188], [22060, 22061, 22189], [22061, 22190, 22189], [22061, 22062, 22191], [22061, 22191, 22190], [22062, 22063, 22191], [22063, 22192, 22191], [22063, 22064, 22193], [22063, 22193, 22192], [22064, 22065, 22193], [22065, 22194, 22193], [22065, 22066, 22195], [22065, 22195, 22194], [22066, 22067, 22195], [22067, 22196, 22195], [22067, 22068, 22197], [22067, 22197, 22196], [22068, 22069, 22197], [22069, 22198, 22197], [22069, 22070, 22199], [22069, 22199, 22198], [22070, 22071, 22199], [22071, 22200, 22199], [22071, 22072, 22201], [22071, 22201, 22200], [22072, 22073, 22201], [22073, 22202, 22201], [22073, 22074, 22203], [22073, 22203, 22202], [22074, 22075, 22203], [22075, 22204, 22203], [22075, 22076, 22205], [22075, 22205, 22204], [22076, 22077, 22205], [22077, 22206, 22205], [22077, 22078, 22207], [22077, 22207, 22206], [22078, 22079, 22207], [22079, 22208, 22207], [22079, 22080, 22209], [22079, 22209, 22208], [22080, 22081, 22209], [22081, 22210, 22209], [22081, 22082, 22211], [22081, 22211, 22210], [22082, 22083, 22211], [22083, 22212, 22211], [22083, 22084, 22213], [22083, 22213, 22212], [22084, 22085, 22213], [22085, 22214, 22213], [22085, 22086, 22215], [22085, 22215, 22214], [22086, 22087, 22215], [22087, 22216, 22215], [22087, 22088, 22217], [22087, 22217, 22216], [22088, 22089, 22217], [22089, 22218, 22217], [22089, 22090, 22219], [22089, 22219, 22218], [22090, 22091, 22219], [22091, 22220, 22219], [22091, 22092, 22221], [22091, 22221, 22220], [22092, 22093, 22221], [22093, 22222, 22221], [22093, 22094, 22223], [22093, 22223, 22222], [22094, 22095, 22223], [22095, 22224, 22223], [22095, 22096, 22225], [22095, 22225, 22224], [22096, 22097, 22225], [22097, 22226, 22225], [22097, 22098, 22227], [22097, 22227, 22226], [22098, 22099, 22227], [22099, 22228, 22227], [22099, 22100, 22229], [22099, 22229, 22228], [22100, 22101, 22229], [22101, 22230, 22229], [22101, 22102, 22231], [22101, 22231, 22230], [22102, 22103, 22231], [22103, 22232, 22231], [22103, 22104, 22233], [22103, 22233, 22232], [22105, 22106, 22235], [22105, 22235, 22234], [22106, 22107, 22235], [22107, 22236, 22235], [22107, 22108, 22237], [22107, 22237, 22236], [22108, 22109, 22237], [22109, 22238, 22237], [22109, 22110, 22239], [22109, 22239, 22238], [22110, 22111, 22239], [22111, 22240, 22239], [22111, 22112, 22241], [22111, 22241, 22240], [22112, 22113, 22241], [22113, 22242, 22241], [22113, 22114, 22243], [22113, 22243, 22242], [22114, 22115, 22243], [22115, 22244, 22243], [22115, 22116, 22245], [22115, 22245, 22244], [22116, 22117, 22245], [22117, 22246, 22245], [22117, 22118, 22247], [22117, 22247, 22246], [22118, 22119, 22247], [22119, 22248, 22247], [22119, 22120, 22249], [22119, 22249, 22248], [22120, 22121, 22249], [22121, 22250, 22249], [22121, 22122, 22251], [22121, 22251, 22250], [22122, 22123, 22251], [22123, 22252, 22251], [22123, 22124, 22253], [22123, 22253, 22252], [22124, 22125, 22253], [22125, 22254, 22253], [22125, 22126, 22255], [22125, 22255, 22254], [22126, 22127, 22255], [22127, 22256, 22255], [22127, 22128, 22257], [22127, 22257, 22256], [22128, 22129, 22257], [22129, 22258, 22257], [22129, 22130, 22259], [22129, 22259, 22258], [22130, 22131, 22259], [22131, 22260, 22259], [22131, 22132, 22261], [22131, 22261, 22260], [22132, 22133, 22261], [22133, 22262, 22261], [22133, 22134, 22263], [22133, 22263, 22262], [22134, 22135, 22263], [22135, 22264, 22263], [22135, 22136, 22265], [22135, 22265, 22264], [22136, 22137, 22265], [22137, 22266, 22265], [22137, 22138, 22267], [22137, 22267, 22266], [22138, 22139, 22267], [22139, 22268, 22267], [22139, 22140, 22269], [22139, 22269, 22268], [22140, 22141, 22269], [22141, 22270, 22269], [22141, 22142, 22271], [22141, 22271, 22270], [22142, 22143, 22271], [22143, 22272, 22271], [22143, 22144, 22273], [22143, 22273, 22272], [22144, 22145, 22273], [22145, 22274, 22273], [22145, 22146, 22275], [22145, 22275, 22274], [22146, 22147, 22275], [22147, 22276, 22275], [22147, 22148, 22277], [22147, 22277, 22276], [22148, 22149, 22277], [22149, 22278, 22277], [22149, 22150, 22279], [22149, 22279, 22278], [22150, 22151, 22279], [22151, 22280, 22279], [22151, 22152, 22281], [22151, 22281, 22280], [22152, 22153, 22281], [22153, 22282, 22281], [22153, 22154, 22283], [22153, 22283, 22282], [22154, 22155, 22283], [22155, 22284, 22283], [22155, 22156, 22285], [22155, 22285, 22284], [22156, 22157, 22285], [22157, 22286, 22285], [22157, 22158, 22287], [22157, 22287, 22286], [22158, 22159, 22287], [22159, 22288, 22287], [22159, 22160, 22289], [22159, 22289, 22288], [22160, 22161, 22289], [22161, 22290, 22289], [22161, 22162, 22291], [22161, 22291, 22290], [22162, 22163, 22291], [22163, 22292, 22291], [22163, 22164, 22293], [22163, 22293, 22292], [22164, 22165, 22293], [22165, 22294, 22293], [22165, 22166, 22295], [22165, 22295, 22294], [22166, 22167, 22295], [22167, 22296, 22295], [22167, 22168, 22297], [22167, 22297, 22296], [22168, 22169, 22297], [22169, 22298, 22297], [22169, 22170, 22299], [22169, 22299, 22298], [22170, 22171, 22299], [22171, 22300, 22299], [22171, 22172, 22301], [22171, 22301, 22300], [22172, 22173, 22301], [22173, 22302, 22301], [22173, 22174, 22303], [22173, 22303, 22302], [22174, 22175, 22303], [22175, 22304, 22303], [22175, 22176, 22305], [22175, 22305, 22304], [22176, 22177, 22305], [22177, 22306, 22305], [22177, 22178, 22307], [22177, 22307, 22306], [22178, 22179, 22307], [22179, 22308, 22307], [22179, 22180, 22309], [22179, 22309, 22308], [22180, 22181, 22309], [22181, 22310, 22309], [22181, 22182, 22311], [22181, 22311, 22310], [22182, 22183, 22311], [22183, 22312, 22311], [22183, 22184, 22313], [22183, 22313, 22312], [22184, 22185, 22313], [22185, 22314, 22313], [22185, 22186, 22315], [22185, 22315, 22314], [22186, 22187, 22315], [22187, 22316, 22315], [22187, 22188, 22317], [22187, 22317, 22316], [22188, 22189, 22317], [22189, 22318, 22317], [22189, 22190, 22319], [22189, 22319, 22318], [22190, 22191, 22319], [22191, 22320, 22319], [22191, 22192, 22321], [22191, 22321, 22320], [22192, 22193, 22321], [22193, 22322, 22321], [22193, 22194, 22323], [22193, 22323, 22322], [22194, 22195, 22323], [22195, 22324, 22323], [22195, 22196, 22325], [22195, 22325, 22324], [22196, 22197, 22325], [22197, 22326, 22325], [22197, 22198, 22327], [22197, 22327, 22326], [22198, 22199, 22327], [22199, 22328, 22327], [22199, 22200, 22329], [22199, 22329, 22328], [22200, 22201, 22329], [22201, 22330, 22329], [22201, 22202, 22331], [22201, 22331, 22330], [22202, 22203, 22331], [22203, 22332, 22331], [22203, 22204, 22333], [22203, 22333, 22332], [22204, 22205, 22333], [22205, 22334, 22333], [22205, 22206, 22335], [22205, 22335, 22334], [22206, 22207, 22335], [22207, 22336, 22335], [22207, 22208, 22337], [22207, 22337, 22336], [22208, 22209, 22337], [22209, 22338, 22337], [22209, 22210, 22339], [22209, 22339, 22338], [22210, 22211, 22339], [22211, 22340, 22339], [22211, 22212, 22341], [22211, 22341, 22340], [22212, 22213, 22341], [22213, 22342, 22341], [22213, 22214, 22343], [22213, 22343, 22342], [22214, 22215, 22343], [22215, 22344, 22343], [22215, 22216, 22345], [22215, 22345, 22344], [22216, 22217, 22345], [22217, 22346, 22345], [22217, 22218, 22347], [22217, 22347, 22346], [22218, 22219, 22347], [22219, 22348, 22347], [22219, 22220, 22349], [22219, 22349, 22348], [22220, 22221, 22349], [22221, 22350, 22349], [22221, 22222, 22351], [22221, 22351, 22350], [22222, 22223, 22351], [22223, 22352, 22351], [22223, 22224, 22353], [22223, 22353, 22352], [22224, 22225, 22353], [22225, 22354, 22353], [22225, 22226, 22355], [22225, 22355, 22354], [22226, 22227, 22355], [22227, 22356, 22355], [22227, 22228, 22357], [22227, 22357, 22356], [22228, 22229, 22357], [22229, 22358, 22357], [22229, 22230, 22359], [22229, 22359, 22358], [22230, 22231, 22359], [22231, 22360, 22359], [22231, 22232, 22361], [22231, 22361, 22360], [22232, 22233, 22361], [22233, 22362, 22361], [22234, 22235, 22363], [22235, 22364, 22363], [22235, 22236, 22365], [22235, 22365, 22364], [22236, 22237, 22365], [22237, 22366, 22365], [22237, 22238, 22367], [22237, 22367, 22366], [22238, 22239, 22367], [22239, 22368, 22367], [22239, 22240, 22369], [22239, 22369, 22368], [22240, 22241, 22369], [22241, 22370, 22369], [22241, 22242, 22371], [22241, 22371, 22370], [22242, 22243, 22371], [22243, 22372, 22371], [22243, 22244, 22373], [22243, 22373, 22372], [22244, 22245, 22373], [22245, 22374, 22373], [22245, 22246, 22375], [22245, 22375, 22374], [22246, 22247, 22375], [22247, 22376, 22375], [22247, 22248, 22377], [22247, 22377, 22376], [22248, 22249, 22377], [22249, 22378, 22377], [22249, 22250, 22379], [22249, 22379, 22378], [22250, 22251, 22379], [22251, 22380, 22379], [22251, 22252, 22381], [22251, 22381, 22380], [22252, 22253, 22381], [22253, 22382, 22381], [22253, 22254, 22383], [22253, 22383, 22382], [22254, 22255, 22383], [22255, 22384, 22383], [22255, 22256, 22385], [22255, 22385, 22384], [22256, 22257, 22385], [22257, 22386, 22385], [22257, 22258, 22387], [22257, 22387, 22386], [22258, 22259, 22387], [22259, 22388, 22387], [22259, 22260, 22389], [22259, 22389, 22388], [22260, 22261, 22389], [22261, 22390, 22389], [22261, 22262, 22391], [22261, 22391, 22390], [22262, 22263, 22391], [22263, 22392, 22391], [22263, 22264, 22393], [22263, 22393, 22392], [22264, 22265, 22393], [22265, 22394, 22393], [22265, 22266, 22395], [22265, 22395, 22394], [22266, 22267, 22395], [22267, 22396, 22395], [22267, 22268, 22397], [22267, 22397, 22396], [22268, 22269, 22397], [22269, 22398, 22397], [22269, 22270, 22399], [22269, 22399, 22398], [22270, 22271, 22399], [22271, 22400, 22399], [22271, 22272, 22401], [22271, 22401, 22400], [22272, 22273, 22401], [22273, 22402, 22401], [22273, 22274, 22403], [22273, 22403, 22402], [22274, 22275, 22403], [22275, 22404, 22403], [22275, 22276, 22405], [22275, 22405, 22404], [22276, 22277, 22405], [22277, 22406, 22405], [22277, 22278, 22407], [22277, 22407, 22406], [22278, 22279, 22407], [22279, 22408, 22407], [22279, 22280, 22409], [22279, 22409, 22408], [22280, 22281, 22409], [22281, 22410, 22409], [22281, 22282, 22411], [22281, 22411, 22410], [22282, 22283, 22411], [22283, 22412, 22411], [22283, 22284, 22413], [22283, 22413, 22412], [22284, 22285, 22413], [22285, 22414, 22413], [22285, 22286, 22415], [22285, 22415, 22414], [22286, 22287, 22415], [22287, 22416, 22415], [22287, 22288, 22417], [22287, 22417, 22416], [22288, 22289, 22417], [22289, 22418, 22417], [22289, 22290, 22419], [22289, 22419, 22418], [22290, 22291, 22419], [22291, 22420, 22419], [22291, 22292, 22421], [22291, 22421, 22420], [22292, 22293, 22421], [22293, 22422, 22421], [22293, 22294, 22423], [22293, 22423, 22422], [22294, 22295, 22423], [22295, 22424, 22423], [22295, 22296, 22425], [22295, 22425, 22424], [22296, 22297, 22425], [22297, 22426, 22425], [22297, 22298, 22427], [22297, 22427, 22426], [22298, 22299, 22427], [22299, 22428, 22427], [22299, 22300, 22429], [22299, 22429, 22428], [22300, 22301, 22429], [22301, 22430, 22429], [22301, 22302, 22431], [22301, 22431, 22430], [22302, 22303, 22431], [22303, 22432, 22431], [22303, 22304, 22433], [22303, 22433, 22432], [22304, 22305, 22433], [22305, 22434, 22433], [22305, 22306, 22435], [22305, 22435, 22434], [22306, 22307, 22435], [22307, 22436, 22435], [22307, 22308, 22437], [22307, 22437, 22436], [22308, 22309, 22437], [22309, 22438, 22437], [22309, 22310, 22439], [22309, 22439, 22438], [22310, 22311, 22439], [22311, 22440, 22439], [22311, 22312, 22441], [22311, 22441, 22440], [22312, 22313, 22441], [22313, 22442, 22441], [22313, 22314, 22443], [22313, 22443, 22442], [22314, 22315, 22443], [22315, 22444, 22443], [22315, 22316, 22445], [22315, 22445, 22444], [22316, 22317, 22445], [22317, 22446, 22445], [22317, 22318, 22447], [22317, 22447, 22446], [22318, 22319, 22447], [22319, 22448, 22447], [22319, 22320, 22449], [22319, 22449, 22448], [22320, 22321, 22449], [22321, 22450, 22449], [22321, 22322, 22451], [22321, 22451, 22450], [22322, 22323, 22451], [22323, 22452, 22451], [22323, 22324, 22453], [22323, 22453, 22452], [22324, 22325, 22453], [22325, 22454, 22453], [22325, 22326, 22455], [22325, 22455, 22454], [22326, 22327, 22455], [22327, 22456, 22455], [22327, 22328, 22457], [22327, 22457, 22456], [22328, 22329, 22457], [22329, 22458, 22457], [22329, 22330, 22459], [22329, 22459, 22458], [22330, 22331, 22459], [22331, 22460, 22459], [22331, 22332, 22461], [22331, 22461, 22460], [22332, 22333, 22461], [22333, 22462, 22461], [22333, 22334, 22463], [22333, 22463, 22462], [22334, 22335, 22463], [22335, 22464, 22463], [22335, 22336, 22465], [22335, 22465, 22464], [22336, 22337, 22465], [22337, 22466, 22465], [22337, 22338, 22467], [22337, 22467, 22466], [22338, 22339, 22467], [22339, 22468, 22467], [22339, 22340, 22469], [22339, 22469, 22468], [22340, 22341, 22469], [22341, 22470, 22469], [22341, 22342, 22471], [22341, 22471, 22470], [22342, 22343, 22471], [22343, 22472, 22471], [22343, 22344, 22473], [22343, 22473, 22472], [22344, 22345, 22473], [22345, 22474, 22473], [22345, 22346, 22475], [22345, 22475, 22474], [22346, 22347, 22475], [22347, 22476, 22475], [22347, 22348, 22477], [22347, 22477, 22476], [22348, 22349, 22477], [22349, 22478, 22477], [22349, 22350, 22479], [22349, 22479, 22478], [22350, 22351, 22479], [22351, 22480, 22479], [22351, 22352, 22481], [22351, 22481, 22480], [22352, 22353, 22481], [22353, 22482, 22481], [22353, 22354, 22483], [22353, 22483, 22482], [22354, 22355, 22483], [22355, 22484, 22483], [22355, 22356, 22485], [22355, 22485, 22484], [22356, 22357, 22485], [22357, 22486, 22485], [22357, 22358, 22487], [22357, 22487, 22486], [22358, 22359, 22487], [22359, 22488, 22487], [22359, 22360, 22489], [22359, 22489, 22488], [22360, 22361, 22489], [22361, 22490, 22489], [22361, 22362, 22491], [22361, 22491, 22490], [16171, 16300, 22493], [16171, 22493, 22492], [16300, 16429, 22493], [16429, 22494, 22493], [16429, 16558, 22495], [16429, 22495, 22494], [16558, 16687, 22495], [16687, 22496, 22495], [16687, 16816, 22497], [16687, 22497, 22496], [16816, 16945, 22497], [16945, 22498, 22497], [16945, 17074, 22499], [16945, 22499, 22498], [17074, 17203, 22499], [17203, 22500, 22499], [17203, 17332, 22501], [17203, 22501, 22500], [17332, 17461, 22501], [17461, 22502, 22501], [17461, 17590, 22503], [17461, 22503, 22502], [17590, 17719, 22503], [17719, 22504, 22503], [17719, 17848, 22505], [17719, 22505, 22504], [17848, 17977, 22505], [17977, 22506, 22505], [17977, 18106, 22507], [17977, 22507, 22506], [18106, 18235, 22507], [18235, 22508, 22507], [18235, 18364, 22509], [18235, 22509, 22508], [18364, 18493, 22509], [18493, 22510, 22509], [18493, 18622, 22511], [18493, 22511, 22510], [18622, 18751, 22511], [18751, 22512, 22511], [18751, 18880, 22513], [18751, 22513, 22512], [18880, 19009, 22513], [19009, 22514, 22513], [19009, 19138, 22515], [19009, 22515, 22514], [19138, 0, 22515], [0, 129, 22515], [22492, 22493, 22516], [22493, 22517, 22516], [22493, 22494, 22518], [22493, 22518, 22517], [22494, 22495, 22518], [22495, 22519, 22518], [22495, 22496, 22520], [22495, 22520, 22519], [22496, 22497, 22520], [22497, 22521, 22520], [22497, 22498, 22522], [22497, 22522, 22521], [22498, 22499, 22522], [22499, 22523, 22522], [22499, 22500, 22524], [22499, 22524, 22523], [22500, 22501, 22524], [22501, 22525, 22524], [22501, 22502, 22526], [22501, 22526, 22525], [22502, 22503, 22526], [22503, 22527, 22526], [22503, 22504, 22528], [22503, 22528, 22527], [22504, 22505, 22528], [22505, 22529, 22528], [22505, 22506, 22530], [22505, 22530, 22529], [22506, 22507, 22530], [22507, 22531, 22530], [22507, 22508, 22532], [22507, 22532, 22531], [22508, 22509, 22532], [22509, 22533, 22532], [22509, 22510, 22534], [22509, 22534, 22533], [22510, 22511, 22534], [22511, 22535, 22534], [22511, 22512, 22536], [22511, 22536, 22535], [22512, 22513, 22536], [22513, 22537, 22536], [22513, 22514, 22538], [22513, 22538, 22537], [22514, 22515, 22538], [22515, 22539, 22538], [22515, 129, 258], [22515, 258, 22539], [22516, 22517, 22541], [22516, 22541, 22540], [22517, 22518, 22541], [22518, 22542, 22541], [22518, 22519, 22543], [22518, 22543, 22542], [22519, 22520, 22543], [22520, 22544, 22543], [22520, 22521, 22545], [22520, 22545, 22544], [22521, 22522, 22545], [22522, 22546, 22545], [22522, 22523, 22547], [22522, 22547, 22546], [22523, 22524, 22547], [22524, 22548, 22547], [22524, 22525, 22549], [22524, 22549, 22548], [22525, 22526, 22549], [22526, 22550, 22549], [22526, 22527, 22551], [22526, 22551, 22550], [22527, 22528, 22551], [22528, 22552, 22551], [22528, 22529, 22553], [22528, 22553, 22552], [22529, 22530, 22553], [22530, 22554, 22553], [22530, 22531, 22555], [22530, 22555, 22554], [22531, 22532, 22555], [22532, 22556, 22555], [22532, 22533, 22557], [22532, 22557, 22556], [22533, 22534, 22557], [22534, 22558, 22557], [22534, 22535, 22559], [22534, 22559, 22558], [22535, 22536, 22559], [22536, 22560, 22559], [22536, 22537, 22561], [22536, 22561, 22560], [22537, 22538, 22561], [22538, 22562, 22561], [22538, 22539, 22563], [22538, 22563, 22562], [22539, 258, 22563], [258, 387, 22563], [22540, 22541, 22564], [22541, 22565, 22564], [22541, 22542, 22566], [22541, 22566, 22565], [22542, 22543, 22566], [22543, 22567, 22566], [22543, 22544, 22568], [22543, 22568, 22567], [22544, 22545, 22568], [22545, 22569, 22568], [22545, 22546, 22570], [22545, 22570, 22569], [22546, 22547, 22570], [22547, 22571, 22570], [22547, 22548, 22572], [22547, 22572, 22571], [22548, 22549, 22572], [22549, 22573, 22572], [22549, 22550, 22574], [22549, 22574, 22573], [22550, 22551, 22574], [22551, 22575, 22574], [22551, 22552, 22576], [22551, 22576, 22575], [22552, 22553, 22576], [22553, 22577, 22576], [22553, 22554, 22578], [22553, 22578, 22577], [22554, 22555, 22578], [22555, 22579, 22578], [22555, 22556, 22580], [22555, 22580, 22579], [22556, 22557, 22580], [22557, 22581, 22580], [22557, 22558, 22582], [22557, 22582, 22581], [22558, 22559, 22582], [22559, 22583, 22582], [22559, 22560, 22584], [22559, 22584, 22583], [22560, 22561, 22584], [22561, 22585, 22584], [22561, 22562, 22586], [22561, 22586, 22585], [22562, 22563, 22586], [22563, 22587, 22586], [22563, 387, 516], [22563, 516, 22587], [22564, 22565, 22589], [22564, 22589, 22588], [22565, 22566, 22589], [22566, 22590, 22589], [22566, 22567, 22591], [22566, 22591, 22590], [22567, 22568, 22591], [22568, 22592, 22591], [22568, 22569, 22593], [22568, 22593, 22592], [22569, 22570, 22593], [22570, 22594, 22593], [22570, 22571, 22595], [22570, 22595, 22594], [22571, 22572, 22595], [22572, 22596, 22595], [22572, 22573, 22597], [22572, 22597, 22596], [22573, 22574, 22597], [22574, 22598, 22597], [22574, 22575, 22599], [22574, 22599, 22598], [22575, 22576, 22599], [22576, 22600, 22599], [22576, 22577, 22601], [22576, 22601, 22600], [22577, 22578, 22601], [22578, 22602, 22601], [22578, 22579, 22603], [22578, 22603, 22602], [22579, 22580, 22603], [22580, 22604, 22603], [22580, 22581, 22605], [22580, 22605, 22604], [22581, 22582, 22605], [22582, 22606, 22605], [22582, 22583, 22607], [22582, 22607, 22606], [22583, 22584, 22607], [22584, 22608, 22607], [22584, 22585, 22609], [22584, 22609, 22608], [22585, 22586, 22609], [22586, 22610, 22609], [22586, 22587, 22611], [22586, 22611, 22610], [22587, 516, 22611], [516, 645, 22611], [22588, 22589, 22612], [22589, 22613, 22612], [22589, 22590, 22614], [22589, 22614, 22613], [22590, 22591, 22614], [22591, 22615, 22614], [22591, 22592, 22616], [22591, 22616, 22615], [22592, 22593, 22616], [22593, 22617, 22616], [22593, 22594, 22618], [22593, 22618, 22617], [22594, 22595, 22618], [22595, 22619, 22618], [22595, 22596, 22620], [22595, 22620, 22619], [22596, 22597, 22620], [22597, 22621, 22620], [22597, 22598, 22622], [22597, 22622, 22621], [22598, 22599, 22622], [22599, 22623, 22622], [22599, 22600, 22624], [22599, 22624, 22623], [22600, 22601, 22624], [22601, 22625, 22624], [22601, 22602, 22626], [22601, 22626, 22625], [22602, 22603, 22626], [22603, 22627, 22626], [22603, 22604, 22628], [22603, 22628, 22627], [22604, 22605, 22628], [22605, 22629, 22628], [22605, 22606, 22630], [22605, 22630, 22629], [22606, 22607, 22630], [22607, 22631, 22630], [22607, 22608, 22632], [22607, 22632, 22631], [22608, 22609, 22632], [22609, 22633, 22632], [22609, 22610, 22634], [22609, 22634, 22633], [22610, 22611, 22634], [22611, 22635, 22634], [22611, 645, 774], [22611, 774, 22635], [22612, 22613, 22637], [22612, 22637, 22636], [22613, 22614, 22637], [22614, 22638, 22637], [22614, 22615, 22639], [22614, 22639, 22638], [22615, 22616, 22639], [22616, 22640, 22639], [22616, 22617, 22641], [22616, 22641, 22640], [22617, 22618, 22641], [22618, 22642, 22641], [22618, 22619, 22643], [22618, 22643, 22642], [22619, 22620, 22643], [22620, 22644, 22643], [22620, 22621, 22645], [22620, 22645, 22644], [22621, 22622, 22645], [22622, 22646, 22645], [22622, 22623, 22647], [22622, 22647, 22646], [22623, 22624, 22647], [22624, 22648, 22647], [22624, 22625, 22649], [22624, 22649, 22648], [22625, 22626, 22649], [22626, 22650, 22649], [22626, 22627, 22651], [22626, 22651, 22650], [22627, 22628, 22651], [22628, 22652, 22651], [22628, 22629, 22653], [22628, 22653, 22652], [22629, 22630, 22653], [22630, 22654, 22653], [22630, 22631, 22655], [22630, 22655, 22654], [22631, 22632, 22655], [22632, 22656, 22655], [22632, 22633, 22657], [22632, 22657, 22656], [22633, 22634, 22657], [22634, 22658, 22657], [22634, 22635, 22659], [22634, 22659, 22658], [22635, 774, 22659], [774, 903, 22659], [22636, 22637, 22660], [22637, 22661, 22660], [22637, 22638, 22662], [22637, 22662, 22661], [22638, 22639, 22662], [22639, 22663, 22662], [22639, 22640, 22664], [22639, 22664, 22663], [22640, 22641, 22664], [22641, 22665, 22664], [22641, 22642, 22666], [22641, 22666, 22665], [22642, 22643, 22666], [22643, 22667, 22666], [22643, 22644, 22668], [22643, 22668, 22667], [22644, 22645, 22668], [22645, 22669, 22668], [22645, 22646, 22670], [22645, 22670, 22669], [22646, 22647, 22670], [22647, 22671, 22670], [22647, 22648, 22672], [22647, 22672, 22671], [22648, 22649, 22672], [22649, 22673, 22672], [22649, 22650, 22674], [22649, 22674, 22673], [22650, 22651, 22674], [22651, 22675, 22674], [22651, 22652, 22676], [22651, 22676, 22675], [22652, 22653, 22676], [22653, 22677, 22676], [22653, 22654, 22678], [22653, 22678, 22677], [22654, 22655, 22678], [22655, 22679, 22678], [22655, 22656, 22680], [22655, 22680, 22679], [22656, 22657, 22680], [22657, 22681, 22680], [22657, 22658, 22682], [22657, 22682, 22681], [22658, 22659, 22682], [22659, 22683, 22682], [22659, 903, 1032], [22659, 1032, 22683], [22660, 22661, 22685], [22660, 22685, 22684], [22661, 22662, 22685], [22662, 22686, 22685], [22662, 22663, 22687], [22662, 22687, 22686], [22663, 22664, 22687], [22664, 22688, 22687], [22664, 22665, 22689], [22664, 22689, 22688], [22665, 22666, 22689], [22666, 22690, 22689], [22666, 22667, 22691], [22666, 22691, 22690], [22667, 22668, 22691], [22668, 22692, 22691], [22668, 22669, 22693], [22668, 22693, 22692], [22669, 22670, 22693], [22670, 22694, 22693], [22670, 22671, 22695], [22670, 22695, 22694], [22671, 22672, 22695], [22672, 22696, 22695], [22672, 22673, 22697], [22672, 22697, 22696], [22673, 22674, 22697], [22674, 22698, 22697], [22674, 22675, 22699], [22674, 22699, 22698], [22675, 22676, 22699], [22676, 22700, 22699], [22676, 22677, 22701], [22676, 22701, 22700], [22677, 22678, 22701], [22678, 22702, 22701], [22678, 22679, 22703], [22678, 22703, 22702], [22679, 22680, 22703], [22680, 22704, 22703], [22680, 22681, 22705], [22680, 22705, 22704], [22681, 22682, 22705], [22682, 22706, 22705], [22682, 22683, 22707], [22682, 22707, 22706], [22683, 1032, 22707], [1032, 1161, 22707], [22684, 22685, 22708], [22685, 22709, 22708], [22685, 22686, 22710], [22685, 22710, 22709], [22686, 22687, 22710], [22687, 22711, 22710], [22687, 22688, 22712], [22687, 22712, 22711], [22688, 22689, 22712], [22689, 22713, 22712], [22689, 22690, 22714], [22689, 22714, 22713], [22690, 22691, 22714], [22691, 22715, 22714], [22691, 22692, 22716], [22691, 22716, 22715], [22692, 22693, 22716], [22693, 22717, 22716], [22693, 22694, 22718], [22693, 22718, 22717], [22694, 22695, 22718], [22695, 22719, 22718], [22695, 22696, 22720], [22695, 22720, 22719], [22696, 22697, 22720], [22697, 22721, 22720], [22697, 22698, 22722], [22697, 22722, 22721], [22698, 22699, 22722], [22699, 22723, 22722], [22699, 22700, 22724], [22699, 22724, 22723], [22700, 22701, 22724], [22701, 22725, 22724], [22701, 22702, 22726], [22701, 22726, 22725], [22702, 22703, 22726], [22703, 22727, 22726], [22703, 22704, 22728], [22703, 22728, 22727], [22704, 22705, 22728], [22705, 22729, 22728], [22705, 22706, 22730], [22705, 22730, 22729], [22706, 22707, 22730], [22707, 22731, 22730], [22707, 1161, 1290], [22707, 1290, 22731], [22708, 22709, 22733], [22708, 22733, 22732], [22709, 22710, 22733], [22710, 22734, 22733], [22710, 22711, 22735], [22710, 22735, 22734], [22711, 22712, 22735], [22712, 22736, 22735], [22712, 22713, 22737], [22712, 22737, 22736], [22713, 22714, 22737], [22714, 22738, 22737], [22714, 22715, 22739], [22714, 22739, 22738], [22715, 22716, 22739], [22716, 22740, 22739], [22716, 22717, 22741], [22716, 22741, 22740], [22717, 22718, 22741], [22718, 22742, 22741], [22718, 22719, 22743], [22718, 22743, 22742], [22719, 22720, 22743], [22720, 22744, 22743], [22720, 22721, 22745], [22720, 22745, 22744], [22721, 22722, 22745], [22722, 22746, 22745], [22722, 22723, 22747], [22722, 22747, 22746], [22723, 22724, 22747], [22724, 22748, 22747], [22724, 22725, 22749], [22724, 22749, 22748], [22725, 22726, 22749], [22726, 22750, 22749], [22726, 22727, 22751], [22726, 22751, 22750], [22727, 22728, 22751], [22728, 22752, 22751], [22728, 22729, 22753], [22728, 22753, 22752], [22729, 22730, 22753], [22730, 22754, 22753], [22730, 22731, 22755], [22730, 22755, 22754], [22731, 1290, 22755], [1290, 1419, 22755], [22732, 22733, 22756], [22733, 22757, 22756], [22733, 22734, 22758], [22733, 22758, 22757], [22734, 22735, 22758], [22735, 22759, 22758], [22735, 22736, 22760], [22735, 22760, 22759], [22736, 22737, 22760], [22737, 22761, 22760], [22737, 22738, 22762], [22737, 22762, 22761], [22738, 22739, 22762], [22739, 22763, 22762], [22739, 22740, 22764], [22739, 22764, 22763], [22740, 22741, 22764], [22741, 22765, 22764], [22741, 22742, 22766], [22741, 22766, 22765], [22742, 22743, 22766], [22743, 22767, 22766], [22743, 22744, 22768], [22743, 22768, 22767], [22744, 22745, 22768], [22745, 22769, 22768], [22745, 22746, 22770], [22745, 22770, 22769], [22746, 22747, 22770], [22747, 22771, 22770], [22747, 22748, 22772], [22747, 22772, 22771], [22748, 22749, 22772], [22749, 22773, 22772], [22749, 22750, 22774], [22749, 22774, 22773], [22750, 22751, 22774], [22751, 22775, 22774], [22751, 22752, 22776], [22751, 22776, 22775], [22752, 22753, 22776], [22753, 22777, 22776], [22753, 22754, 22778], [22753, 22778, 22777], [22754, 22755, 22778], [22755, 22779, 22778], [22755, 1419, 1548], [22755, 1548, 22779], [22756, 22757, 22781], [22756, 22781, 22780], [22757, 22758, 22781], [22758, 22782, 22781], [22758, 22759, 22783], [22758, 22783, 22782], [22759, 22760, 22783], [22760, 22784, 22783], [22760, 22761, 22785], [22760, 22785, 22784], [22761, 22762, 22785], [22762, 22786, 22785], [22762, 22763, 22787], [22762, 22787, 22786], [22763, 22764, 22787], [22764, 22788, 22787], [22764, 22765, 22789], [22764, 22789, 22788], [22765, 22766, 22789], [22766, 22790, 22789], [22766, 22767, 22791], [22766, 22791, 22790], [22767, 22768, 22791], [22768, 22792, 22791], [22768, 22769, 22793], [22768, 22793, 22792], [22769, 22770, 22793], [22770, 22794, 22793], [22770, 22771, 22795], [22770, 22795, 22794], [22771, 22772, 22795], [22772, 22796, 22795], [22772, 22773, 22797], [22772, 22797, 22796], [22773, 22774, 22797], [22774, 22798, 22797], [22774, 22775, 22799], [22774, 22799, 22798], [22775, 22776, 22799], [22776, 22800, 22799], [22776, 22777, 22801], [22776, 22801, 22800], [22777, 22778, 22801], [22778, 22802, 22801], [22778, 22779, 22803], [22778, 22803, 22802], [22779, 1548, 22803], [1548, 1677, 22803], [22780, 22781, 22804], [22781, 22805, 22804], [22781, 22782, 22806], [22781, 22806, 22805], [22782, 22783, 22806], [22783, 22807, 22806], [22783, 22784, 22808], [22783, 22808, 22807], [22784, 22785, 22808], [22785, 22809, 22808], [22785, 22786, 22810], [22785, 22810, 22809], [22786, 22787, 22810], [22787, 22811, 22810], [22787, 22788, 22812], [22787, 22812, 22811], [22788, 22789, 22812], [22789, 22813, 22812], [22789, 22790, 22814], [22789, 22814, 22813], [22790, 22791, 22814], [22791, 22815, 22814], [22791, 22792, 22816], [22791, 22816, 22815], [22792, 22793, 22816], [22793, 22817, 22816], [22793, 22794, 22818], [22793, 22818, 22817], [22794, 22795, 22818], [22795, 22819, 22818], [22795, 22796, 22820], [22795, 22820, 22819], [22796, 22797, 22820], [22797, 22821, 22820], [22797, 22798, 22822], [22797, 22822, 22821], [22798, 22799, 22822], [22799, 22823, 22822], [22799, 22800, 22824], [22799, 22824, 22823], [22800, 22801, 22824], [22801, 22825, 22824], [22801, 22802, 22826], [22801, 22826, 22825], [22802, 22803, 22826], [22803, 22827, 22826], [22803, 1677, 1806], [22803, 1806, 22827], [22804, 22805, 22829], [22804, 22829, 22828], [22805, 22806, 22829], [22806, 22830, 22829], [22806, 22807, 22831], [22806, 22831, 22830], [22807, 22808, 22831], [22808, 22832, 22831], [22808, 22809, 22833], [22808, 22833, 22832], [22809, 22810, 22833], [22810, 22834, 22833], [22810, 22811, 22835], [22810, 22835, 22834], [22811, 22812, 22835], [22812, 22836, 22835], [22812, 22813, 22837], [22812, 22837, 22836], [22813, 22814, 22837], [22814, 22838, 22837], [22814, 22815, 22839], [22814, 22839, 22838], [22815, 22816, 22839], [22816, 22840, 22839], [22816, 22817, 22841], [22816, 22841, 22840], [22817, 22818, 22841], [22818, 22842, 22841], [22818, 22819, 22843], [22818, 22843, 22842], [22819, 22820, 22843], [22820, 22844, 22843], [22820, 22821, 22845], [22820, 22845, 22844], [22821, 22822, 22845], [22822, 22846, 22845], [22822, 22823, 22847], [22822, 22847, 22846], [22823, 22824, 22847], [22824, 22848, 22847], [22824, 22825, 22849], [22824, 22849, 22848], [22825, 22826, 22849], [22826, 22850, 22849], [22826, 22827, 22851], [22826, 22851, 22850], [22827, 1806, 22851], [1806, 1935, 22851], [22828, 22829, 22852], [22829, 22853, 22852], [22829, 22830, 22854], [22829, 22854, 22853], [22830, 22831, 22854], [22831, 22855, 22854], [22831, 22832, 22856], [22831, 22856, 22855], [22832, 22833, 22856], [22833, 22857, 22856], [22833, 22834, 22858], [22833, 22858, 22857], [22834, 22835, 22858], [22835, 22859, 22858], [22835, 22836, 22860], [22835, 22860, 22859], [22836, 22837, 22860], [22837, 22861, 22860], [22837, 22838, 22862], [22837, 22862, 22861], [22838, 22839, 22862], [22839, 22863, 22862], [22839, 22840, 22864], [22839, 22864, 22863], [22840, 22841, 22864], [22841, 22865, 22864], [22841, 22842, 22866], [22841, 22866, 22865], [22842, 22843, 22866], [22843, 22867, 22866], [22843, 22844, 22868], [22843, 22868, 22867], [22844, 22845, 22868], [22845, 22869, 22868], [22845, 22846, 22870], [22845, 22870, 22869], [22846, 22847, 22870], [22847, 22871, 22870], [22847, 22848, 22872], [22847, 22872, 22871], [22848, 22849, 22872], [22849, 22873, 22872], [22849, 22850, 22874], [22849, 22874, 22873], [22850, 22851, 22874], [22851, 22875, 22874], [22851, 1935, 2064], [22851, 2064, 22875], [22852, 22853, 22877], [22852, 22877, 22876], [22853, 22854, 22877], [22854, 22878, 22877], [22854, 22855, 22879], [22854, 22879, 22878], [22855, 22856, 22879], [22856, 22880, 22879], [22856, 22857, 22881], [22856, 22881, 22880], [22857, 22858, 22881], [22858, 22882, 22881], [22858, 22859, 22883], [22858, 22883, 22882], [22859, 22860, 22883], [22860, 22884, 22883], [22860, 22861, 22885], [22860, 22885, 22884], [22861, 22862, 22885], [22862, 22886, 22885], [22862, 22863, 22887], [22862, 22887, 22886], [22863, 22864, 22887], [22864, 22888, 22887], [22864, 22865, 22889], [22864, 22889, 22888], [22865, 22866, 22889], [22866, 22890, 22889], [22866, 22867, 22891], [22866, 22891, 22890], [22867, 22868, 22891], [22868, 22892, 22891], [22868, 22869, 22893], [22868, 22893, 22892], [22869, 22870, 22893], [22870, 22894, 22893], [22870, 22871, 22895], [22870, 22895, 22894], [22871, 22872, 22895], [22872, 22896, 22895], [22872, 22873, 22897], [22872, 22897, 22896], [22873, 22874, 22897], [22874, 22898, 22897], [22874, 22875, 22899], [22874, 22899, 22898], [22875, 2064, 22899], [2064, 2193, 22899], [22876, 22877, 22900], [22877, 22901, 22900], [22877, 22878, 22902], [22877, 22902, 22901], [22878, 22879, 22902], [22879, 22903, 22902], [22879, 22880, 22904], [22879, 22904, 22903], [22880, 22881, 22904], [22881, 22905, 22904], [22881, 22882, 22906], [22881, 22906, 22905], [22882, 22883, 22906], [22883, 22907, 22906], [22883, 22884, 22908], [22883, 22908, 22907], [22884, 22885, 22908], [22885, 22909, 22908], [22885, 22886, 22910], [22885, 22910, 22909], [22886, 22887, 22910], [22887, 22911, 22910], [22887, 22888, 22912], [22887, 22912, 22911], [22888, 22889, 22912], [22889, 22913, 22912], [22889, 22890, 22914], [22889, 22914, 22913], [22890, 22891, 22914], [22891, 22915, 22914], [22891, 22892, 22916], [22891, 22916, 22915], [22892, 22893, 22916], [22893, 22917, 22916], [22893, 22894, 22918], [22893, 22918, 22917], [22894, 22895, 22918], [22895, 22919, 22918], [22895, 22896, 22920], [22895, 22920, 22919], [22896, 22897, 22920], [22897, 22921, 22920], [22897, 22898, 22922], [22897, 22922, 22921], [22898, 22899, 22922], [22899, 22923, 22922], [22899, 2193, 2322], [22899, 2322, 22923], [22900, 22901, 22925], [22900, 22925, 22924], [22901, 22902, 22925], [22902, 22926, 22925], [22902, 22903, 22927], [22902, 22927, 22926], [22903, 22904, 22927], [22904, 22928, 22927], [22904, 22905, 22929], [22904, 22929, 22928], [22905, 22906, 22929], [22906, 22930, 22929], [22906, 22907, 22931], [22906, 22931, 22930], [22907, 22908, 22931], [22908, 22932, 22931], [22908, 22909, 22933], [22908, 22933, 22932], [22909, 22910, 22933], [22910, 22934, 22933], [22910, 22911, 22935], [22910, 22935, 22934], [22911, 22912, 22935], [22912, 22936, 22935], [22912, 22913, 22937], [22912, 22937, 22936], [22913, 22914, 22937], [22914, 22938, 22937], [22914, 22915, 22939], [22914, 22939, 22938], [22915, 22916, 22939], [22916, 22940, 22939], [22916, 22917, 22941], [22916, 22941, 22940], [22917, 22918, 22941], [22918, 22942, 22941], [22918, 22919, 22943], [22918, 22943, 22942], [22919, 22920, 22943], [22920, 22944, 22943], [22920, 22921, 22945], [22920, 22945, 22944], [22921, 22922, 22945], [22922, 22946, 22945], [22922, 22923, 22947], [22922, 22947, 22946], [22923, 2322, 22947], [2322, 2451, 22947], [22924, 22925, 22948], [22925, 22949, 22948], [22925, 22926, 22950], [22925, 22950, 22949], [22926, 22927, 22950], [22927, 22951, 22950], [22927, 22928, 22952], [22927, 22952, 22951], [22928, 22929, 22952], [22929, 22953, 22952], [22929, 22930, 22954], [22929, 22954, 22953], [22930, 22931, 22954], [22931, 22955, 22954], [22931, 22932, 22956], [22931, 22956, 22955], [22932, 22933, 22956], [22933, 22957, 22956], [22933, 22934, 22958], [22933, 22958, 22957], [22934, 22935, 22958], [22935, 22959, 22958], [22935, 22936, 22960], [22935, 22960, 22959], [22936, 22937, 22960], [22937, 22961, 22960], [22937, 22938, 22962], [22937, 22962, 22961], [22938, 22939, 22962], [22939, 22963, 22962], [22939, 22940, 22964], [22939, 22964, 22963], [22940, 22941, 22964], [22941, 22965, 22964], [22941, 22942, 22966], [22941, 22966, 22965], [22942, 22943, 22966], [22943, 22967, 22966], [22943, 22944, 22968], [22943, 22968, 22967], [22944, 22945, 22968], [22945, 22969, 22968], [22945, 22946, 22970], [22945, 22970, 22969], [22946, 22947, 22970], [22947, 22971, 22970], [22947, 2451, 2580], [22947, 2580, 22971], [22948, 22949, 22973], [22948, 22973, 22972], [22949, 22950, 22973], [22950, 22974, 22973], [22950, 22951, 22975], [22950, 22975, 22974], [22951, 22952, 22975], [22952, 22976, 22975], [22952, 22953, 22977], [22952, 22977, 22976], [22953, 22954, 22977], [22954, 22978, 22977], [22954, 22955, 22979], [22954, 22979, 22978], [22955, 22956, 22979], [22956, 22980, 22979], [22956, 22957, 22981], [22956, 22981, 22980], [22957, 22958, 22981], [22958, 22982, 22981], [22958, 22959, 22983], [22958, 22983, 22982], [22959, 22960, 22983], [22960, 22984, 22983], [22960, 22961, 22985], [22960, 22985, 22984], [22961, 22962, 22985], [22962, 22986, 22985], [22962, 22963, 22987], [22962, 22987, 22986], [22963, 22964, 22987], [22964, 22988, 22987], [22964, 22965, 22989], [22964, 22989, 22988], [22965, 22966, 22989], [22966, 22990, 22989], [22966, 22967, 22991], [22966, 22991, 22990], [22967, 22968, 22991], [22968, 22992, 22991], [22968, 22969, 22993], [22968, 22993, 22992], [22969, 22970, 22993], [22970, 22994, 22993], [22970, 22971, 22995], [22970, 22995, 22994], [22971, 2580, 22995], [2580, 2709, 22995], [22972, 22973, 22996], [22973, 22997, 22996], [22973, 22974, 22998], [22973, 22998, 22997], [22974, 22975, 22998], [22975, 22999, 22998], [22975, 22976, 23000], [22975, 23000, 22999], [22976, 22977, 23000], [22977, 23001, 23000], [22977, 22978, 23002], [22977, 23002, 23001], [22978, 22979, 23002], [22979, 23003, 23002], [22979, 22980, 23004], [22979, 23004, 23003], [22980, 22981, 23004], [22981, 23005, 23004], [22981, 22982, 23006], [22981, 23006, 23005], [22982, 22983, 23006], [22983, 23007, 23006], [22983, 22984, 23008], [22983, 23008, 23007], [22984, 22985, 23008], [22985, 23009, 23008], [22985, 22986, 23010], [22985, 23010, 23009], [22986, 22987, 23010], [22987, 23011, 23010], [22987, 22988, 23012], [22987, 23012, 23011], [22988, 22989, 23012], [22989, 23013, 23012], [22989, 22990, 23014], [22989, 23014, 23013], [22990, 22991, 23014], [22991, 23015, 23014], [22991, 22992, 23016], [22991, 23016, 23015], [22992, 22993, 23016], [22993, 23017, 23016], [22993, 22994, 23018], [22993, 23018, 23017], [22994, 22995, 23018], [22995, 23019, 23018], [22995, 2709, 2838], [22995, 2838, 23019], [22996, 22997, 23021], [22996, 23021, 23020], [22997, 22998, 23021], [22998, 23022, 23021], [22998, 22999, 23023], [22998, 23023, 23022], [22999, 23000, 23023], [23000, 23024, 23023], [23000, 23001, 23025], [23000, 23025, 23024], [23001, 23002, 23025], [23002, 23026, 23025], [23002, 23003, 23027], [23002, 23027, 23026], [23003, 23004, 23027], [23004, 23028, 23027], [23004, 23005, 23029], [23004, 23029, 23028], [23005, 23006, 23029], [23006, 23030, 23029], [23006, 23007, 23031], [23006, 23031, 23030], [23007, 23008, 23031], [23008, 23032, 23031], [23008, 23009, 23033], [23008, 23033, 23032], [23009, 23010, 23033], [23010, 23034, 23033], [23010, 23011, 23035], [23010, 23035, 23034], [23011, 23012, 23035], [23012, 23036, 23035], [23012, 23013, 23037], [23012, 23037, 23036], [23013, 23014, 23037], [23014, 23038, 23037], [23014, 23015, 23039], [23014, 23039, 23038], [23015, 23016, 23039], [23016, 23040, 23039], [23016, 23017, 23041], [23016, 23041, 23040], [23017, 23018, 23041], [23018, 23042, 23041], [23018, 23019, 23043], [23018, 23043, 23042], [23019, 2838, 23043], [2838, 2967, 23043], [23020, 23021, 23044], [23021, 23045, 23044], [23021, 23022, 23046], [23021, 23046, 23045], [23022, 23023, 23046], [23023, 23047, 23046], [23023, 23024, 23048], [23023, 23048, 23047], [23024, 23025, 23048], [23025, 23049, 23048], [23025, 23026, 23050], [23025, 23050, 23049], [23026, 23027, 23050], [23027, 23051, 23050], [23027, 23028, 23052], [23027, 23052, 23051], [23028, 23029, 23052], [23029, 23053, 23052], [23029, 23030, 23054], [23029, 23054, 23053], [23030, 23031, 23054], [23031, 23055, 23054], [23031, 23032, 23056], [23031, 23056, 23055], [23032, 23033, 23056], [23033, 23057, 23056], [23033, 23034, 23058], [23033, 23058, 23057], [23034, 23035, 23058], [23035, 23059, 23058], [23035, 23036, 23060], [23035, 23060, 23059], [23036, 23037, 23060], [23037, 23061, 23060], [23037, 23038, 23062], [23037, 23062, 23061], [23038, 23039, 23062], [23039, 23063, 23062], [23039, 23040, 23064], [23039, 23064, 23063], [23040, 23041, 23064], [23041, 23065, 23064], [23041, 23042, 23066], [23041, 23066, 23065], [23042, 23043, 23066], [23043, 23067, 23066], [23043, 2967, 3096], [23043, 3096, 23067], [23044, 23045, 23069], [23044, 23069, 23068], [23045, 23046, 23069], [23046, 23070, 23069], [23046, 23047, 23071], [23046, 23071, 23070], [23047, 23048, 23071], [23048, 23072, 23071], [23048, 23049, 23073], [23048, 23073, 23072], [23049, 23050, 23073], [23050, 23074, 23073], [23050, 23051, 23075], [23050, 23075, 23074], [23051, 23052, 23075], [23052, 23076, 23075], [23052, 23053, 23077], [23052, 23077, 23076], [23053, 23054, 23077], [23054, 23078, 23077], [23054, 23055, 23079], [23054, 23079, 23078], [23055, 23056, 23079], [23056, 23080, 23079], [23056, 23057, 23081], [23056, 23081, 23080], [23057, 23058, 23081], [23058, 23082, 23081], [23058, 23059, 23083], [23058, 23083, 23082], [23059, 23060, 23083], [23060, 23084, 23083], [23060, 23061, 23085], [23060, 23085, 23084], [23061, 23062, 23085], [23062, 23086, 23085], [23062, 23063, 23087], [23062, 23087, 23086], [23063, 23064, 23087], [23064, 23088, 23087], [23064, 23065, 23089], [23064, 23089, 23088], [23065, 23066, 23089], [23066, 23090, 23089], [23066, 23067, 23091], [23066, 23091, 23090], [23067, 3096, 23091], [3096, 3225, 23091], [23068, 23069, 23092], [23069, 23093, 23092], [23069, 23070, 23094], [23069, 23094, 23093], [23070, 23071, 23094], [23071, 23095, 23094], [23071, 23072, 23096], [23071, 23096, 23095], [23072, 23073, 23096], [23073, 23097, 23096], [23073, 23074, 23098], [23073, 23098, 23097], [23074, 23075, 23098], [23075, 23099, 23098], [23075, 23076, 23100], [23075, 23100, 23099], [23076, 23077, 23100], [23077, 23101, 23100], [23077, 23078, 23102], [23077, 23102, 23101], [23078, 23079, 23102], [23079, 23103, 23102], [23079, 23080, 23104], [23079, 23104, 23103], [23080, 23081, 23104], [23081, 23105, 23104], [23081, 23082, 23106], [23081, 23106, 23105], [23082, 23083, 23106], [23083, 23107, 23106], [23083, 23084, 23108], [23083, 23108, 23107], [23084, 23085, 23108], [23085, 23109, 23108], [23085, 23086, 23110], [23085, 23110, 23109], [23086, 23087, 23110], [23087, 23111, 23110], [23087, 23088, 23112], [23087, 23112, 23111], [23088, 23089, 23112], [23089, 23113, 23112], [23089, 23090, 23114], [23089, 23114, 23113], [23090, 23091, 23114], [23091, 23115, 23114], [23091, 3225, 3354], [23091, 3354, 23115], [23092, 23093, 23117], [23092, 23117, 23116], [23093, 23094, 23117], [23094, 23118, 23117], [23094, 23095, 23119], [23094, 23119, 23118], [23095, 23096, 23119], [23096, 23120, 23119], [23096, 23097, 23121], [23096, 23121, 23120], [23097, 23098, 23121], [23098, 23122, 23121], [23098, 23099, 23123], [23098, 23123, 23122], [23099, 23100, 23123], [23100, 23124, 23123], [23100, 23101, 23125], [23100, 23125, 23124], [23101, 23102, 23125], [23102, 23126, 23125], [23102, 23103, 23127], [23102, 23127, 23126], [23103, 23104, 23127], [23104, 23128, 23127], [23104, 23105, 23129], [23104, 23129, 23128], [23105, 23106, 23129], [23106, 23130, 23129], [23106, 23107, 23131], [23106, 23131, 23130], [23107, 23108, 23131], [23108, 23132, 23131], [23108, 23109, 23133], [23108, 23133, 23132], [23109, 23110, 23133], [23110, 23134, 23133], [23110, 23111, 23135], [23110, 23135, 23134], [23111, 23112, 23135], [23112, 23136, 23135], [23112, 23113, 23137], [23112, 23137, 23136], [23113, 23114, 23137], [23114, 23138, 23137], [23114, 23115, 23139], [23114, 23139, 23138], [23115, 3354, 23139], [3354, 3483, 23139], [23116, 23117, 23140], [23117, 23141, 23140], [23117, 23118, 23142], [23117, 23142, 23141], [23118, 23119, 23142], [23119, 23143, 23142], [23119, 23120, 23144], [23119, 23144, 23143], [23120, 23121, 23144], [23121, 23145, 23144], [23121, 23122, 23146], [23121, 23146, 23145], [23122, 23123, 23146], [23123, 23147, 23146], [23123, 23124, 23148], [23123, 23148, 23147], [23124, 23125, 23148], [23125, 23149, 23148], [23125, 23126, 23150], [23125, 23150, 23149], [23126, 23127, 23150], [23127, 23151, 23150], [23127, 23128, 23152], [23127, 23152, 23151], [23128, 23129, 23152], [23129, 23153, 23152], [23129, 23130, 23154], [23129, 23154, 23153], [23130, 23131, 23154], [23131, 23155, 23154], [23131, 23132, 23156], [23131, 23156, 23155], [23132, 23133, 23156], [23133, 23157, 23156], [23133, 23134, 23158], [23133, 23158, 23157], [23134, 23135, 23158], [23135, 23159, 23158], [23135, 23136, 23160], [23135, 23160, 23159], [23136, 23137, 23160], [23137, 23161, 23160], [23137, 23138, 23162], [23137, 23162, 23161], [23138, 23139, 23162], [23139, 23163, 23162], [23139, 3483, 3612], [23139, 3612, 23163], [23140, 23141, 23165], [23140, 23165, 23164], [23141, 23142, 23165], [23142, 23166, 23165], [23142, 23143, 23167], [23142, 23167, 23166], [23143, 23144, 23167], [23144, 23168, 23167], [23144, 23145, 23169], [23144, 23169, 23168], [23145, 23146, 23169], [23146, 23170, 23169], [23146, 23147, 23171], [23146, 23171, 23170], [23147, 23148, 23171], [23148, 23172, 23171], [23148, 23149, 23173], [23148, 23173, 23172], [23149, 23150, 23173], [23150, 23174, 23173], [23150, 23151, 23175], [23150, 23175, 23174], [23151, 23152, 23175], [23152, 23176, 23175], [23152, 23153, 23177], [23152, 23177, 23176], [23153, 23154, 23177], [23154, 23178, 23177], [23154, 23155, 23179], [23154, 23179, 23178], [23155, 23156, 23179], [23156, 23180, 23179], [23156, 23157, 23181], [23156, 23181, 23180], [23157, 23158, 23181], [23158, 23182, 23181], [23158, 23159, 23183], [23158, 23183, 23182], [23159, 23160, 23183], [23160, 23184, 23183], [23160, 23161, 23185], [23160, 23185, 23184], [23161, 23162, 23185], [23162, 23186, 23185], [23162, 23163, 23187], [23162, 23187, 23186], [23163, 3612, 23187], [3612, 3741, 23187], [23164, 23165, 23188], [23165, 23189, 23188], [23165, 23166, 23190], [23165, 23190, 23189], [23166, 23167, 23190], [23167, 23191, 23190], [23167, 23168, 23192], [23167, 23192, 23191], [23168, 23169, 23192], [23169, 23193, 23192], [23169, 23170, 23194], [23169, 23194, 23193], [23170, 23171, 23194], [23171, 23195, 23194], [23171, 23172, 23196], [23171, 23196, 23195], [23172, 23173, 23196], [23173, 23197, 23196], [23173, 23174, 23198], [23173, 23198, 23197], [23174, 23175, 23198], [23175, 23199, 23198], [23175, 23176, 23200], [23175, 23200, 23199], [23176, 23177, 23200], [23177, 23201, 23200], [23177, 23178, 23202], [23177, 23202, 23201], [23178, 23179, 23202], [23179, 23203, 23202], [23179, 23180, 23204], [23179, 23204, 23203], [23180, 23181, 23204], [23181, 23205, 23204], [23181, 23182, 23206], [23181, 23206, 23205], [23182, 23183, 23206], [23183, 23207, 23206], [23183, 23184, 23208], [23183, 23208, 23207], [23184, 23185, 23208], [23185, 23209, 23208], [23185, 23186, 23210], [23185, 23210, 23209], [23186, 23187, 23210], [23187, 23211, 23210], [23187, 3741, 3870], [23187, 3870, 23211], [23188, 23189, 23213], [23188, 23213, 23212], [23189, 23190, 23213], [23190, 23214, 23213], [23190, 23191, 23215], [23190, 23215, 23214], [23191, 23192, 23215], [23192, 23216, 23215], [23192, 23193, 23217], [23192, 23217, 23216], [23193, 23194, 23217], [23194, 23218, 23217], [23194, 23195, 23219], [23194, 23219, 23218], [23195, 23196, 23219], [23196, 23220, 23219], [23196, 23197, 23221], [23196, 23221, 23220], [23197, 23198, 23221], [23198, 23222, 23221], [23198, 23199, 23223], [23198, 23223, 23222], [23199, 23200, 23223], [23200, 23224, 23223], [23200, 23201, 23225], [23200, 23225, 23224], [23201, 23202, 23225], [23202, 23226, 23225], [23202, 23203, 23227], [23202, 23227, 23226], [23203, 23204, 23227], [23204, 23228, 23227], [23204, 23205, 23229], [23204, 23229, 23228], [23205, 23206, 23229], [23206, 23230, 23229], [23206, 23207, 23231], [23206, 23231, 23230], [23207, 23208, 23231], [23208, 23232, 23231], [23208, 23209, 23233], [23208, 23233, 23232], [23209, 23210, 23233], [23210, 23234, 23233], [23210, 23211, 23235], [23210, 23235, 23234], [23211, 3870, 23235], [3870, 3999, 23235], [23212, 23213, 23236], [23213, 23237, 23236], [23213, 23214, 23238], [23213, 23238, 23237], [23214, 23215, 23238], [23215, 23239, 23238], [23215, 23216, 23240], [23215, 23240, 23239], [23216, 23217, 23240], [23217, 23241, 23240], [23217, 23218, 23242], [23217, 23242, 23241], [23218, 23219, 23242], [23219, 23243, 23242], [23219, 23220, 23244], [23219, 23244, 23243], [23220, 23221, 23244], [23221, 23245, 23244], [23221, 23222, 23246], [23221, 23246, 23245], [23222, 23223, 23246], [23223, 23247, 23246], [23223, 23224, 23248], [23223, 23248, 23247], [23224, 23225, 23248], [23225, 23249, 23248], [23225, 23226, 23250], [23225, 23250, 23249], [23226, 23227, 23250], [23227, 23251, 23250], [23227, 23228, 23252], [23227, 23252, 23251], [23228, 23229, 23252], [23229, 23253, 23252], [23229, 23230, 23254], [23229, 23254, 23253], [23230, 23231, 23254], [23231, 23255, 23254], [23231, 23232, 23256], [23231, 23256, 23255], [23232, 23233, 23256], [23233, 23257, 23256], [23233, 23234, 23258], [23233, 23258, 23257], [23234, 23235, 23258], [23235, 23259, 23258], [23235, 3999, 4128], [23235, 4128, 23259], [23236, 23237, 23261], [23236, 23261, 23260], [23237, 23238, 23261], [23238, 23262, 23261], [23238, 23239, 23263], [23238, 23263, 23262], [23239, 23240, 23263], [23240, 23264, 23263], [23240, 23241, 23265], [23240, 23265, 23264], [23241, 23242, 23265], [23242, 23266, 23265], [23242, 23243, 23267], [23242, 23267, 23266], [23243, 23244, 23267], [23244, 23268, 23267], [23244, 23245, 23269], [23244, 23269, 23268], [23245, 23246, 23269], [23246, 23270, 23269], [23246, 23247, 23271], [23246, 23271, 23270], [23247, 23248, 23271], [23248, 23272, 23271], [23248, 23249, 23273], [23248, 23273, 23272], [23249, 23250, 23273], [23250, 23274, 23273], [23250, 23251, 23275], [23250, 23275, 23274], [23251, 23252, 23275], [23252, 23276, 23275], [23252, 23253, 23277], [23252, 23277, 23276], [23253, 23254, 23277], [23254, 23278, 23277], [23254, 23255, 23279], [23254, 23279, 23278], [23255, 23256, 23279], [23256, 23280, 23279], [23256, 23257, 23281], [23256, 23281, 23280], [23257, 23258, 23281], [23258, 23282, 23281], [23258, 23259, 23283], [23258, 23283, 23282], [23259, 4128, 23283], [4128, 4257, 23283], [23260, 23261, 23284], [23261, 23285, 23284], [23261, 23262, 23286], [23261, 23286, 23285], [23262, 23263, 23286], [23263, 23287, 23286], [23263, 23264, 23288], [23263, 23288, 23287], [23264, 23265, 23288], [23265, 23289, 23288], [23265, 23266, 23290], [23265, 23290, 23289], [23266, 23267, 23290], [23267, 23291, 23290], [23267, 23268, 23292], [23267, 23292, 23291], [23268, 23269, 23292], [23269, 23293, 23292], [23269, 23270, 23294], [23269, 23294, 23293], [23270, 23271, 23294], [23271, 23295, 23294], [23271, 23272, 23296], [23271, 23296, 23295], [23272, 23273, 23296], [23273, 23297, 23296], [23273, 23274, 23298], [23273, 23298, 23297], [23274, 23275, 23298], [23275, 23299, 23298], [23275, 23276, 23300], [23275, 23300, 23299], [23276, 23277, 23300], [23277, 23301, 23300], [23277, 23278, 23302], [23277, 23302, 23301], [23278, 23279, 23302], [23279, 23303, 23302], [23279, 23280, 23304], [23279, 23304, 23303], [23280, 23281, 23304], [23281, 23305, 23304], [23281, 23282, 23306], [23281, 23306, 23305], [23282, 23283, 23306], [23283, 23307, 23306], [23283, 4257, 4386], [23283, 4386, 23307], [23284, 23285, 23309], [23284, 23309, 23308], [23285, 23286, 23309], [23286, 23310, 23309], [23286, 23287, 23311], [23286, 23311, 23310], [23287, 23288, 23311], [23288, 23312, 23311], [23288, 23289, 23313], [23288, 23313, 23312], [23289, 23290, 23313], [23290, 23314, 23313], [23290, 23291, 23315], [23290, 23315, 23314], [23291, 23292, 23315], [23292, 23316, 23315], [23292, 23293, 23317], [23292, 23317, 23316], [23293, 23294, 23317], [23294, 23318, 23317], [23294, 23295, 23319], [23294, 23319, 23318], [23295, 23296, 23319], [23296, 23320, 23319], [23296, 23297, 23321], [23296, 23321, 23320], [23297, 23298, 23321], [23298, 23322, 23321], [23298, 23299, 23323], [23298, 23323, 23322], [23299, 23300, 23323], [23300, 23324, 23323], [23300, 23301, 23325], [23300, 23325, 23324], [23301, 23302, 23325], [23302, 23326, 23325], [23302, 23303, 23327], [23302, 23327, 23326], [23303, 23304, 23327], [23304, 23328, 23327], [23304, 23305, 23329], [23304, 23329, 23328], [23305, 23306, 23329], [23306, 23330, 23329], [23306, 23307, 23331], [23306, 23331, 23330], [23307, 4386, 23331], [4386, 4515, 23331], [23308, 23309, 23332], [23309, 23333, 23332], [23309, 23310, 23334], [23309, 23334, 23333], [23310, 23311, 23334], [23311, 23335, 23334], [23311, 23312, 23336], [23311, 23336, 23335], [23312, 23313, 23336], [23313, 23337, 23336], [23313, 23314, 23338], [23313, 23338, 23337], [23314, 23315, 23338], [23315, 23339, 23338], [23315, 23316, 23340], [23315, 23340, 23339], [23316, 23317, 23340], [23317, 23341, 23340], [23317, 23318, 23342], [23317, 23342, 23341], [23318, 23319, 23342], [23319, 23343, 23342], [23319, 23320, 23344], [23319, 23344, 23343], [23320, 23321, 23344], [23321, 23345, 23344], [23321, 23322, 23346], [23321, 23346, 23345], [23322, 23323, 23346], [23323, 23347, 23346], [23323, 23324, 23348], [23323, 23348, 23347], [23324, 23325, 23348], [23325, 23349, 23348], [23325, 23326, 23350], [23325, 23350, 23349], [23326, 23327, 23350], [23327, 23351, 23350], [23327, 23328, 23352], [23327, 23352, 23351], [23328, 23329, 23352], [23329, 23353, 23352], [23329, 23330, 23354], [23329, 23354, 23353], [23330, 23331, 23354], [23331, 23355, 23354], [23331, 4515, 4644], [23331, 4644, 23355], [23332, 23333, 23357], [23332, 23357, 23356], [23333, 23334, 23357], [23334, 23358, 23357], [23334, 23335, 23359], [23334, 23359, 23358], [23335, 23336, 23359], [23336, 23360, 23359], [23336, 23337, 23361], [23336, 23361, 23360], [23337, 23338, 23361], [23338, 23362, 23361], [23338, 23339, 23363], [23338, 23363, 23362], [23339, 23340, 23363], [23340, 23364, 23363], [23340, 23341, 23365], [23340, 23365, 23364], [23341, 23342, 23365], [23342, 23366, 23365], [23342, 23343, 23367], [23342, 23367, 23366], [23343, 23344, 23367], [23344, 23368, 23367], [23344, 23345, 23369], [23344, 23369, 23368], [23345, 23346, 23369], [23346, 23370, 23369], [23346, 23347, 23371], [23346, 23371, 23370], [23347, 23348, 23371], [23348, 23372, 23371], [23348, 23349, 23373], [23348, 23373, 23372], [23349, 23350, 23373], [23350, 23374, 23373], [23350, 23351, 23375], [23350, 23375, 23374], [23351, 23352, 23375], [23352, 23376, 23375], [23352, 23353, 23377], [23352, 23377, 23376], [23353, 23354, 23377], [23354, 23378, 23377], [23354, 23355, 23379], [23354, 23379, 23378], [23355, 4644, 23379], [4644, 4773, 23379], [23356, 23357, 23380], [23357, 23381, 23380], [23357, 23358, 23382], [23357, 23382, 23381], [23358, 23359, 23382], [23359, 23383, 23382], [23359, 23360, 23384], [23359, 23384, 23383], [23360, 23361, 23384], [23361, 23385, 23384], [23361, 23362, 23386], [23361, 23386, 23385], [23362, 23363, 23386], [23363, 23387, 23386], [23363, 23364, 23388], [23363, 23388, 23387], [23364, 23365, 23388], [23365, 23389, 23388], [23365, 23366, 23390], [23365, 23390, 23389], [23366, 23367, 23390], [23367, 23391, 23390], [23367, 23368, 23392], [23367, 23392, 23391], [23368, 23369, 23392], [23369, 23393, 23392], [23369, 23370, 23394], [23369, 23394, 23393], [23370, 23371, 23394], [23371, 23395, 23394], [23371, 23372, 23396], [23371, 23396, 23395], [23372, 23373, 23396], [23373, 23397, 23396], [23373, 23374, 23398], [23373, 23398, 23397], [23374, 23375, 23398], [23375, 23399, 23398], [23375, 23376, 23400], [23375, 23400, 23399], [23376, 23377, 23400], [23377, 23401, 23400], [23377, 23378, 23402], [23377, 23402, 23401], [23378, 23379, 23402], [23379, 23403, 23402], [23379, 4773, 4902], [23379, 4902, 23403], [23380, 23381, 23405], [23380, 23405, 23404], [23381, 23382, 23405], [23382, 23406, 23405], [23382, 23383, 23407], [23382, 23407, 23406], [23383, 23384, 23407], [23384, 23408, 23407], [23384, 23385, 23409], [23384, 23409, 23408], [23385, 23386, 23409], [23386, 23410, 23409], [23386, 23387, 23411], [23386, 23411, 23410], [23387, 23388, 23411], [23388, 23412, 23411], [23388, 23389, 23413], [23388, 23413, 23412], [23389, 23390, 23413], [23390, 23414, 23413], [23390, 23391, 23415], [23390, 23415, 23414], [23391, 23392, 23415], [23392, 23416, 23415], [23392, 23393, 23417], [23392, 23417, 23416], [23393, 23394, 23417], [23394, 23418, 23417], [23394, 23395, 23419], [23394, 23419, 23418], [23395, 23396, 23419], [23396, 23420, 23419], [23396, 23397, 23421], [23396, 23421, 23420], [23397, 23398, 23421], [23398, 23422, 23421], [23398, 23399, 23423], [23398, 23423, 23422], [23399, 23400, 23423], [23400, 23424, 23423], [23400, 23401, 23425], [23400, 23425, 23424], [23401, 23402, 23425], [23402, 23426, 23425], [23402, 23403, 23427], [23402, 23427, 23426], [23403, 4902, 23427], [4902, 5031, 23427], [23404, 23405, 23428], [23405, 23429, 23428], [23405, 23406, 23430], [23405, 23430, 23429], [23406, 23407, 23430], [23407, 23431, 23430], [23407, 23408, 23432], [23407, 23432, 23431], [23408, 23409, 23432], [23409, 23433, 23432], [23409, 23410, 23434], [23409, 23434, 23433], [23410, 23411, 23434], [23411, 23435, 23434], [23411, 23412, 23436], [23411, 23436, 23435], [23412, 23413, 23436], [23413, 23437, 23436], [23413, 23414, 23438], [23413, 23438, 23437], [23414, 23415, 23438], [23415, 23439, 23438], [23415, 23416, 23440], [23415, 23440, 23439], [23416, 23417, 23440], [23417, 23441, 23440], [23417, 23418, 23442], [23417, 23442, 23441], [23418, 23419, 23442], [23419, 23443, 23442], [23419, 23420, 23444], [23419, 23444, 23443], [23420, 23421, 23444], [23421, 23445, 23444], [23421, 23422, 23446], [23421, 23446, 23445], [23422, 23423, 23446], [23423, 23447, 23446], [23423, 23424, 23448], [23423, 23448, 23447], [23424, 23425, 23448], [23425, 23449, 23448], [23425, 23426, 23450], [23425, 23450, 23449], [23426, 23427, 23450], [23427, 23451, 23450], [23427, 5031, 5160], [23427, 5160, 23451], [23428, 23429, 23453], [23428, 23453, 23452], [23429, 23430, 23453], [23430, 23454, 23453], [23430, 23431, 23455], [23430, 23455, 23454], [23431, 23432, 23455], [23432, 23456, 23455], [23432, 23433, 23457], [23432, 23457, 23456], [23433, 23434, 23457], [23434, 23458, 23457], [23434, 23435, 23459], [23434, 23459, 23458], [23435, 23436, 23459], [23436, 23460, 23459], [23436, 23437, 23461], [23436, 23461, 23460], [23437, 23438, 23461], [23438, 23462, 23461], [23438, 23439, 23463], [23438, 23463, 23462], [23439, 23440, 23463], [23440, 23464, 23463], [23440, 23441, 23465], [23440, 23465, 23464], [23441, 23442, 23465], [23442, 23466, 23465], [23442, 23443, 23467], [23442, 23467, 23466], [23443, 23444, 23467], [23444, 23468, 23467], [23444, 23445, 23469], [23444, 23469, 23468], [23445, 23446, 23469], [23446, 23470, 23469], [23446, 23447, 23471], [23446, 23471, 23470], [23447, 23448, 23471], [23448, 23472, 23471], [23448, 23449, 23473], [23448, 23473, 23472], [23449, 23450, 23473], [23450, 23474, 23473], [23450, 23451, 23475], [23450, 23475, 23474], [23451, 5160, 23475], [5160, 5289, 23475], [23452, 23453, 23476], [23453, 23477, 23476], [23453, 23454, 23478], [23453, 23478, 23477], [23454, 23455, 23478], [23455, 23479, 23478], [23455, 23456, 23480], [23455, 23480, 23479], [23456, 23457, 23480], [23457, 23481, 23480], [23457, 23458, 23482], [23457, 23482, 23481], [23458, 23459, 23482], [23459, 23483, 23482], [23459, 23460, 23484], [23459, 23484, 23483], [23460, 23461, 23484], [23461, 23485, 23484], [23461, 23462, 23486], [23461, 23486, 23485], [23462, 23463, 23486], [23463, 23487, 23486], [23463, 23464, 23488], [23463, 23488, 23487], [23464, 23465, 23488], [23465, 23489, 23488], [23465, 23466, 23490], [23465, 23490, 23489], [23466, 23467, 23490], [23467, 23491, 23490], [23467, 23468, 23492], [23467, 23492, 23491], [23468, 23469, 23492], [23469, 23493, 23492], [23469, 23470, 23494], [23469, 23494, 23493], [23470, 23471, 23494], [23471, 23495, 23494], [23471, 23472, 23496], [23471, 23496, 23495], [23472, 23473, 23496], [23473, 23497, 23496], [23473, 23474, 23498], [23473, 23498, 23497], [23474, 23475, 23498], [23475, 23499, 23498], [23475, 5289, 5418], [23475, 5418, 23499], [23476, 23477, 23501], [23476, 23501, 23500], [23477, 23478, 23501], [23478, 23502, 23501], [23478, 23479, 23503], [23478, 23503, 23502], [23479, 23480, 23503], [23480, 23504, 23503], [23480, 23481, 23505], [23480, 23505, 23504], [23481, 23482, 23505], [23482, 23506, 23505], [23482, 23483, 23507], [23482, 23507, 23506], [23483, 23484, 23507], [23484, 23508, 23507], [23484, 23485, 23509], [23484, 23509, 23508], [23485, 23486, 23509], [23486, 23510, 23509], [23486, 23487, 23511], [23486, 23511, 23510], [23487, 23488, 23511], [23488, 23512, 23511], [23488, 23489, 23513], [23488, 23513, 23512], [23489, 23490, 23513], [23490, 23514, 23513], [23490, 23491, 23515], [23490, 23515, 23514], [23491, 23492, 23515], [23492, 23516, 23515], [23492, 23493, 23517], [23492, 23517, 23516], [23493, 23494, 23517], [23494, 23518, 23517], [23494, 23495, 23519], [23494, 23519, 23518], [23495, 23496, 23519], [23496, 23520, 23519], [23496, 23497, 23521], [23496, 23521, 23520], [23497, 23498, 23521], [23498, 23522, 23521], [23498, 23499, 23523], [23498, 23523, 23522], [23499, 5418, 23523], [5418, 5547, 23523], [23500, 23501, 23524], [23501, 23525, 23524], [23501, 23502, 23526], [23501, 23526, 23525], [23502, 23503, 23526], [23503, 23527, 23526], [23503, 23504, 23528], [23503, 23528, 23527], [23504, 23505, 23528], [23505, 23529, 23528], [23505, 23506, 23530], [23505, 23530, 23529], [23506, 23507, 23530], [23507, 23531, 23530], [23507, 23508, 23532], [23507, 23532, 23531], [23508, 23509, 23532], [23509, 23533, 23532], [23509, 23510, 23534], [23509, 23534, 23533], [23510, 23511, 23534], [23511, 23535, 23534], [23511, 23512, 23536], [23511, 23536, 23535], [23512, 23513, 23536], [23513, 23537, 23536], [23513, 23514, 23538], [23513, 23538, 23537], [23514, 23515, 23538], [23515, 23539, 23538], [23515, 23516, 23540], [23515, 23540, 23539], [23516, 23517, 23540], [23517, 23541, 23540], [23517, 23518, 23542], [23517, 23542, 23541], [23518, 23519, 23542], [23519, 23543, 23542], [23519, 23520, 23544], [23519, 23544, 23543], [23520, 23521, 23544], [23521, 23545, 23544], [23521, 23522, 23546], [23521, 23546, 23545], [23522, 23523, 23546], [23523, 23547, 23546], [23523, 5547, 5676], [23523, 5676, 23547], [23524, 23525, 23549], [23524, 23549, 23548], [23525, 23526, 23549], [23526, 23550, 23549], [23526, 23527, 23551], [23526, 23551, 23550], [23527, 23528, 23551], [23528, 23552, 23551], [23528, 23529, 23553], [23528, 23553, 23552], [23529, 23530, 23553], [23530, 23554, 23553], [23530, 23531, 23555], [23530, 23555, 23554], [23531, 23532, 23555], [23532, 23556, 23555], [23532, 23533, 23557], [23532, 23557, 23556], [23533, 23534, 23557], [23534, 23558, 23557], [23534, 23535, 23559], [23534, 23559, 23558], [23535, 23536, 23559], [23536, 23560, 23559], [23536, 23537, 23561], [23536, 23561, 23560], [23537, 23538, 23561], [23538, 23562, 23561], [23538, 23539, 23563], [23538, 23563, 23562], [23539, 23540, 23563], [23540, 23564, 23563], [23540, 23541, 23565], [23540, 23565, 23564], [23541, 23542, 23565], [23542, 23566, 23565], [23542, 23543, 23567], [23542, 23567, 23566], [23543, 23544, 23567], [23544, 23568, 23567], [23544, 23545, 23569], [23544, 23569, 23568], [23545, 23546, 23569], [23546, 23570, 23569], [23546, 23547, 23571], [23546, 23571, 23570], [23547, 5676, 23571], [5676, 5805, 23571], [23548, 23549, 23572], [23549, 23573, 23572], [23549, 23550, 23574], [23549, 23574, 23573], [23550, 23551, 23574], [23551, 23575, 23574], [23551, 23552, 23576], [23551, 23576, 23575], [23552, 23553, 23576], [23553, 23577, 23576], [23553, 23554, 23578], [23553, 23578, 23577], [23554, 23555, 23578], [23555, 23579, 23578], [23555, 23556, 23580], [23555, 23580, 23579], [23556, 23557, 23580], [23557, 23581, 23580], [23557, 23558, 23582], [23557, 23582, 23581], [23558, 23559, 23582], [23559, 23583, 23582], [23559, 23560, 23584], [23559, 23584, 23583], [23560, 23561, 23584], [23561, 23585, 23584], [23561, 23562, 23586], [23561, 23586, 23585], [23562, 23563, 23586], [23563, 23587, 23586], [23563, 23564, 23588], [23563, 23588, 23587], [23564, 23565, 23588], [23565, 23589, 23588], [23565, 23566, 23590], [23565, 23590, 23589], [23566, 23567, 23590], [23567, 23591, 23590], [23567, 23568, 23592], [23567, 23592, 23591], [23568, 23569, 23592], [23569, 23593, 23592], [23569, 23570, 23594], [23569, 23594, 23593], [23570, 23571, 23594], [23571, 23595, 23594], [23571, 5805, 5934], [23571, 5934, 23595], [23572, 23573, 23597], [23572, 23597, 23596], [23573, 23574, 23597], [23574, 23598, 23597], [23574, 23575, 23599], [23574, 23599, 23598], [23575, 23576, 23599], [23576, 23600, 23599], [23576, 23577, 23601], [23576, 23601, 23600], [23577, 23578, 23601], [23578, 23602, 23601], [23578, 23579, 23603], [23578, 23603, 23602], [23579, 23580, 23603], [23580, 23604, 23603], [23580, 23581, 23605], [23580, 23605, 23604], [23581, 23582, 23605], [23582, 23606, 23605], [23582, 23583, 23607], [23582, 23607, 23606], [23583, 23584, 23607], [23584, 23608, 23607], [23584, 23585, 23609], [23584, 23609, 23608], [23585, 23586, 23609], [23586, 23610, 23609], [23586, 23587, 23611], [23586, 23611, 23610], [23587, 23588, 23611], [23588, 23612, 23611], [23588, 23589, 23613], [23588, 23613, 23612], [23589, 23590, 23613], [23590, 23614, 23613], [23590, 23591, 23615], [23590, 23615, 23614], [23591, 23592, 23615], [23592, 23616, 23615], [23592, 23593, 23617], [23592, 23617, 23616], [23593, 23594, 23617], [23594, 23618, 23617], [23594, 23595, 23619], [23594, 23619, 23618], [23595, 5934, 23619], [5934, 6063, 23619], [23596, 23597, 23620], [23597, 23621, 23620], [23597, 23598, 23622], [23597, 23622, 23621], [23598, 23599, 23622], [23599, 23623, 23622], [23599, 23600, 23624], [23599, 23624, 23623], [23600, 23601, 23624], [23601, 23625, 23624], [23601, 23602, 23626], [23601, 23626, 23625], [23602, 23603, 23626], [23603, 23627, 23626], [23603, 23604, 23628], [23603, 23628, 23627], [23604, 23605, 23628], [23605, 23629, 23628], [23605, 23606, 23630], [23605, 23630, 23629], [23606, 23607, 23630], [23607, 23631, 23630], [23607, 23608, 23632], [23607, 23632, 23631], [23608, 23609, 23632], [23609, 23633, 23632], [23609, 23610, 23634], [23609, 23634, 23633], [23610, 23611, 23634], [23611, 23635, 23634], [23611, 23612, 23636], [23611, 23636, 23635], [23612, 23613, 23636], [23613, 23637, 23636], [23613, 23614, 23638], [23613, 23638, 23637], [23614, 23615, 23638], [23615, 23639, 23638], [23615, 23616, 23640], [23615, 23640, 23639], [23616, 23617, 23640], [23617, 23641, 23640], [23617, 23618, 23642], [23617, 23642, 23641], [23618, 23619, 23642], [23619, 23643, 23642], [23619, 6063, 6190], [23619, 6190, 23643], [23620, 23621, 23645], [23620, 23645, 23644], [23621, 23622, 23645], [23622, 23646, 23645], [23622, 23623, 23647], [23622, 23647, 23646], [23623, 23624, 23647], [23624, 23648, 23647], [23624, 23625, 23649], [23624, 23649, 23648], [23625, 23626, 23649], [23626, 23650, 23649], [23626, 23627, 23651], [23626, 23651, 23650], [23627, 23628, 23651], [23628, 23652, 23651], [23628, 23629, 23653], [23628, 23653, 23652], [23629, 23630, 23653], [23630, 23654, 23653], [23630, 23631, 23655], [23630, 23655, 23654], [23631, 23632, 23655], [23632, 23656, 23655], [23632, 23633, 23657], [23632, 23657, 23656], [23633, 23634, 23657], [23634, 23658, 23657], [23634, 23635, 23659], [23634, 23659, 23658], [23635, 23636, 23659], [23636, 23660, 23659], [23636, 23637, 23661], [23636, 23661, 23660], [23637, 23638, 23661], [23638, 23662, 23661], [23638, 23639, 23663], [23638, 23663, 23662], [23639, 23640, 23663], [23640, 23664, 23663], [23640, 23641, 23665], [23640, 23665, 23664], [23641, 23642, 23665], [23642, 23666, 23665], [23642, 23643, 23667], [23642, 23667, 23666], [23643, 6190, 23667], [6190, 6315, 23667], [23644, 23645, 23668], [23645, 23669, 23668], [23645, 23646, 23670], [23645, 23670, 23669], [23646, 23647, 23670], [23647, 23671, 23670], [23647, 23648, 23672], [23647, 23672, 23671], [23648, 23649, 23672], [23649, 23673, 23672], [23649, 23650, 23674], [23649, 23674, 23673], [23650, 23651, 23674], [23651, 23675, 23674], [23651, 23652, 23676], [23651, 23676, 23675], [23652, 23653, 23676], [23653, 23677, 23676], [23653, 23654, 23678], [23653, 23678, 23677], [23654, 23655, 23678], [23655, 23679, 23678], [23655, 23656, 23680], [23655, 23680, 23679], [23656, 23657, 23680], [23657, 23681, 23680], [23657, 23658, 23682], [23657, 23682, 23681], [23658, 23659, 23682], [23659, 23683, 23682], [23659, 23660, 23684], [23659, 23684, 23683], [23660, 23661, 23684], [23661, 23685, 23684], [23661, 23662, 23686], [23661, 23686, 23685], [23662, 23663, 23686], [23663, 23687, 23686], [23663, 23664, 23688], [23663, 23688, 23687], [23664, 23665, 23688], [23665, 23689, 23688], [23665, 23666, 23690], [23665, 23690, 23689], [23666, 23667, 23690], [23667, 23691, 23690], [23667, 6315, 6438], [23667, 6438, 23691], [23668, 23669, 23693], [23668, 23693, 23692], [23669, 23670, 23693], [23670, 23694, 23693], [23670, 23671, 23695], [23670, 23695, 23694], [23671, 23672, 23695], [23672, 23696, 23695], [23672, 23673, 23697], [23672, 23697, 23696], [23673, 23674, 23697], [23674, 23698, 23697], [23674, 23675, 23699], [23674, 23699, 23698], [23675, 23676, 23699], [23676, 23700, 23699], [23676, 23677, 23701], [23676, 23701, 23700], [23677, 23678, 23701], [23678, 23702, 23701], [23678, 23679, 23703], [23678, 23703, 23702], [23679, 23680, 23703], [23680, 23704, 23703], [23680, 23681, 23705], [23680, 23705, 23704], [23681, 23682, 23705], [23682, 23706, 23705], [23682, 23683, 23707], [23682, 23707, 23706], [23683, 23684, 23707], [23684, 23708, 23707], [23684, 23685, 23709], [23684, 23709, 23708], [23685, 23686, 23709], [23686, 23710, 23709], [23686, 23687, 23711], [23686, 23711, 23710], [23687, 23688, 23711], [23688, 23712, 23711], [23688, 23689, 23713], [23688, 23713, 23712], [23689, 23690, 23713], [23690, 23714, 23713], [23690, 23691, 23715], [23690, 23715, 23714], [23691, 6438, 23715], [6438, 6558, 23715], [23692, 23693, 23716], [23693, 23717, 23716], [23693, 23694, 23718], [23693, 23718, 23717], [23694, 23695, 23718], [23695, 23719, 23718], [23695, 23696, 23720], [23695, 23720, 23719], [23696, 23697, 23720], [23697, 23721, 23720], [23697, 23698, 23722], [23697, 23722, 23721], [23698, 23699, 23722], [23699, 23723, 23722], [23699, 23700, 23724], [23699, 23724, 23723], [23700, 23701, 23724], [23701, 23725, 23724], [23701, 23702, 23726], [23701, 23726, 23725], [23702, 23703, 23726], [23703, 23727, 23726], [23703, 23704, 23728], [23703, 23728, 23727], [23704, 23705, 23728], [23705, 23729, 23728], [23705, 23706, 23730], [23705, 23730, 23729], [23706, 23707, 23730], [23707, 23731, 23730], [23707, 23708, 23732], [23707, 23732, 23731], [23708, 23709, 23732], [23709, 23733, 23732], [23709, 23710, 23734], [23709, 23734, 23733], [23710, 23711, 23734], [23711, 23735, 23734], [23711, 23712, 23736], [23711, 23736, 23735], [23712, 23713, 23736], [23713, 23737, 23736], [23713, 23714, 23738], [23713, 23738, 23737], [23714, 23715, 23738], [23715, 23739, 23738], [23715, 6558, 6676], [23715, 6676, 23739], [23716, 23717, 23741], [23716, 23741, 23740], [23717, 23718, 23741], [23718, 23742, 23741], [23718, 23719, 23743], [23718, 23743, 23742], [23719, 23720, 23743], [23720, 23744, 23743], [23720, 23721, 23745], [23720, 23745, 23744], [23721, 23722, 23745], [23722, 23746, 23745], [23722, 23723, 23747], [23722, 23747, 23746], [23723, 23724, 23747], [23724, 23748, 23747], [23724, 23725, 23749], [23724, 23749, 23748], [23725, 23726, 23749], [23726, 23750, 23749], [23726, 23727, 23751], [23726, 23751, 23750], [23727, 23728, 23751], [23728, 23752, 23751], [23728, 23729, 23753], [23728, 23753, 23752], [23729, 23730, 23753], [23730, 23754, 23753], [23730, 23731, 23755], [23730, 23755, 23754], [23731, 23732, 23755], [23732, 23756, 23755], [23732, 23733, 23757], [23732, 23757, 23756], [23733, 23734, 23757], [23734, 23758, 23757], [23734, 23735, 23759], [23734, 23759, 23758], [23735, 23736, 23759], [23736, 23760, 23759], [23736, 23737, 23761], [23736, 23761, 23760], [23737, 23738, 23761], [23738, 23762, 23761], [23738, 23739, 23763], [23738, 23763, 23762], [23739, 6676, 23763], [6676, 6794, 23763], [23740, 23741, 23764], [23741, 23765, 23764], [23741, 23742, 23766], [23741, 23766, 23765], [23742, 23743, 23766], [23743, 23767, 23766], [23743, 23744, 23768], [23743, 23768, 23767], [23744, 23745, 23768], [23745, 23769, 23768], [23745, 23746, 23770], [23745, 23770, 23769], [23746, 23747, 23770], [23747, 23771, 23770], [23747, 23748, 23772], [23747, 23772, 23771], [23748, 23749, 23772], [23749, 23773, 23772], [23749, 23750, 23774], [23749, 23774, 23773], [23750, 23751, 23774], [23751, 23775, 23774], [23751, 23752, 23776], [23751, 23776, 23775], [23752, 23753, 23776], [23753, 23777, 23776], [23753, 23754, 23778], [23753, 23778, 23777], [23754, 23755, 23778], [23755, 23779, 23778], [23755, 23756, 23780], [23755, 23780, 23779], [23756, 23757, 23780], [23757, 23781, 23780], [23757, 23758, 23782], [23757, 23782, 23781], [23758, 23759, 23782], [23759, 23783, 23782], [23759, 23760, 23784], [23759, 23784, 23783], [23760, 23761, 23784], [23761, 23785, 23784], [23761, 23762, 23786], [23761, 23786, 23785], [23762, 23763, 23786], [23763, 23787, 23786], [23763, 6794, 6912], [23763, 6912, 23787], [23764, 23765, 23789], [23764, 23789, 23788], [23765, 23766, 23789], [23766, 23790, 23789], [23766, 23767, 23791], [23766, 23791, 23790], [23767, 23768, 23791], [23768, 23792, 23791], [23768, 23769, 23793], [23768, 23793, 23792], [23769, 23770, 23793], [23770, 23794, 23793], [23770, 23771, 23795], [23770, 23795, 23794], [23771, 23772, 23795], [23772, 23796, 23795], [23772, 23773, 23797], [23772, 23797, 23796], [23773, 23774, 23797], [23774, 23798, 23797], [23774, 23775, 23799], [23774, 23799, 23798], [23775, 23776, 23799], [23776, 23800, 23799], [23776, 23777, 23801], [23776, 23801, 23800], [23777, 23778, 23801], [23778, 23802, 23801], [23778, 23779, 23803], [23778, 23803, 23802], [23779, 23780, 23803], [23780, 23804, 23803], [23780, 23781, 23805], [23780, 23805, 23804], [23781, 23782, 23805], [23782, 23806, 23805], [23782, 23783, 23807], [23782, 23807, 23806], [23783, 23784, 23807], [23784, 23808, 23807], [23784, 23785, 23809], [23784, 23809, 23808], [23785, 23786, 23809], [23786, 23810, 23809], [23786, 23787, 23811], [23786, 23811, 23810], [23787, 6912, 23811], [6912, 7030, 23811], [23788, 23789, 23812], [23789, 23813, 23812], [23789, 23790, 23814], [23789, 23814, 23813], [23790, 23791, 23814], [23791, 23815, 23814], [23791, 23792, 23816], [23791, 23816, 23815], [23792, 23793, 23816], [23793, 23817, 23816], [23793, 23794, 23818], [23793, 23818, 23817], [23794, 23795, 23818], [23795, 23819, 23818], [23795, 23796, 23820], [23795, 23820, 23819], [23796, 23797, 23820], [23797, 23821, 23820], [23797, 23798, 23822], [23797, 23822, 23821], [23798, 23799, 23822], [23799, 23823, 23822], [23799, 23800, 23824], [23799, 23824, 23823], [23800, 23801, 23824], [23801, 23825, 23824], [23801, 23802, 23826], [23801, 23826, 23825], [23802, 23803, 23826], [23803, 23827, 23826], [23803, 23804, 23828], [23803, 23828, 23827], [23804, 23805, 23828], [23805, 23829, 23828], [23805, 23806, 23830], [23805, 23830, 23829], [23806, 23807, 23830], [23807, 23831, 23830], [23807, 23808, 23832], [23807, 23832, 23831], [23808, 23809, 23832], [23809, 23833, 23832], [23809, 23810, 23834], [23809, 23834, 23833], [23810, 23811, 23834], [23811, 23835, 23834], [23811, 7030, 7148], [23811, 7148, 23835], [23812, 23813, 23837], [23812, 23837, 23836], [23813, 23814, 23837], [23814, 23838, 23837], [23814, 23815, 23839], [23814, 23839, 23838], [23815, 23816, 23839], [23816, 23840, 23839], [23816, 23817, 23841], [23816, 23841, 23840], [23817, 23818, 23841], [23818, 23842, 23841], [23818, 23819, 23843], [23818, 23843, 23842], [23819, 23820, 23843], [23820, 23844, 23843], [23820, 23821, 23845], [23820, 23845, 23844], [23821, 23822, 23845], [23822, 23846, 23845], [23822, 23823, 23847], [23822, 23847, 23846], [23823, 23824, 23847], [23824, 23848, 23847], [23824, 23825, 23849], [23824, 23849, 23848], [23825, 23826, 23849], [23826, 23850, 23849], [23826, 23827, 23851], [23826, 23851, 23850], [23827, 23828, 23851], [23828, 23852, 23851], [23828, 23829, 23853], [23828, 23853, 23852], [23829, 23830, 23853], [23830, 23854, 23853], [23830, 23831, 23855], [23830, 23855, 23854], [23831, 23832, 23855], [23832, 23856, 23855], [23832, 23833, 23857], [23832, 23857, 23856], [23833, 23834, 23857], [23834, 23858, 23857], [23834, 23835, 23859], [23834, 23859, 23858], [23835, 7148, 23859], [7148, 7266, 23859], [23836, 23837, 23860], [23837, 23861, 23860], [23837, 23838, 23862], [23837, 23862, 23861], [23838, 23839, 23862], [23839, 23863, 23862], [23839, 23840, 23864], [23839, 23864, 23863], [23840, 23841, 23864], [23841, 23865, 23864], [23841, 23842, 23866], [23841, 23866, 23865], [23842, 23843, 23866], [23843, 23867, 23866], [23843, 23844, 23868], [23843, 23868, 23867], [23844, 23845, 23868], [23845, 23869, 23868], [23845, 23846, 23870], [23845, 23870, 23869], [23846, 23847, 23870], [23847, 23871, 23870], [23847, 23848, 23872], [23847, 23872, 23871], [23848, 23849, 23872], [23849, 23873, 23872], [23849, 23850, 23874], [23849, 23874, 23873], [23850, 23851, 23874], [23851, 23875, 23874], [23851, 23852, 23876], [23851, 23876, 23875], [23852, 23853, 23876], [23853, 23877, 23876], [23853, 23854, 23878], [23853, 23878, 23877], [23854, 23855, 23878], [23855, 23879, 23878], [23855, 23856, 23880], [23855, 23880, 23879], [23856, 23857, 23880], [23857, 23881, 23880], [23857, 23858, 23882], [23857, 23882, 23881], [23858, 23859, 23882], [23859, 23883, 23882], [23859, 7266, 7384], [23859, 7384, 23883], [23860, 23861, 23885], [23860, 23885, 23884], [23861, 23862, 23885], [23862, 23886, 23885], [23862, 23863, 23887], [23862, 23887, 23886], [23863, 23864, 23887], [23864, 23888, 23887], [23864, 23865, 23889], [23864, 23889, 23888], [23865, 23866, 23889], [23866, 23890, 23889], [23866, 23867, 23891], [23866, 23891, 23890], [23867, 23868, 23891], [23868, 23892, 23891], [23868, 23869, 23893], [23868, 23893, 23892], [23869, 23870, 23893], [23870, 23894, 23893], [23870, 23871, 23895], [23870, 23895, 23894], [23871, 23872, 23895], [23872, 23896, 23895], [23872, 23873, 23897], [23872, 23897, 23896], [23873, 23874, 23897], [23874, 23898, 23897], [23874, 23875, 23899], [23874, 23899, 23898], [23875, 23876, 23899], [23876, 23900, 23899], [23876, 23877, 23901], [23876, 23901, 23900], [23877, 23878, 23901], [23878, 23902, 23901], [23878, 23879, 23903], [23878, 23903, 23902], [23879, 23880, 23903], [23880, 23904, 23903], [23880, 23881, 23905], [23880, 23905, 23904], [23881, 23882, 23905], [23882, 23906, 23905], [23882, 23883, 23907], [23882, 23907, 23906], [23883, 7384, 23907], [7384, 7502, 23907], [23884, 23885, 23908], [23885, 23909, 23908], [23885, 23886, 23910], [23885, 23910, 23909], [23886, 23887, 23910], [23887, 23911, 23910], [23887, 23888, 23912], [23887, 23912, 23911], [23888, 23889, 23912], [23889, 23913, 23912], [23889, 23890, 23914], [23889, 23914, 23913], [23890, 23891, 23914], [23891, 23915, 23914], [23891, 23892, 23916], [23891, 23916, 23915], [23892, 23893, 23916], [23893, 23917, 23916], [23893, 23894, 23918], [23893, 23918, 23917], [23894, 23895, 23918], [23895, 23919, 23918], [23895, 23896, 23920], [23895, 23920, 23919], [23896, 23897, 23920], [23897, 23921, 23920], [23897, 23898, 23922], [23897, 23922, 23921], [23898, 23899, 23922], [23899, 23923, 23922], [23899, 23900, 23924], [23899, 23924, 23923], [23900, 23901, 23924], [23901, 23925, 23924], [23901, 23902, 23926], [23901, 23926, 23925], [23902, 23903, 23926], [23903, 23927, 23926], [23903, 23904, 23928], [23903, 23928, 23927], [23904, 23905, 23928], [23905, 23929, 23928], [23905, 23906, 23930], [23905, 23930, 23929], [23906, 23907, 23930], [23907, 23931, 23930], [23907, 7502, 7620], [23907, 7620, 23931], [23908, 23909, 23933], [23908, 23933, 23932], [23909, 23910, 23933], [23910, 23934, 23933], [23910, 23911, 23935], [23910, 23935, 23934], [23911, 23912, 23935], [23912, 23936, 23935], [23912, 23913, 23937], [23912, 23937, 23936], [23913, 23914, 23937], [23914, 23938, 23937], [23914, 23915, 23939], [23914, 23939, 23938], [23915, 23916, 23939], [23916, 23940, 23939], [23916, 23917, 23941], [23916, 23941, 23940], [23917, 23918, 23941], [23918, 23942, 23941], [23918, 23919, 23943], [23918, 23943, 23942], [23919, 23920, 23943], [23920, 23944, 23943], [23920, 23921, 23945], [23920, 23945, 23944], [23921, 23922, 23945], [23922, 23946, 23945], [23922, 23923, 23947], [23922, 23947, 23946], [23923, 23924, 23947], [23924, 23948, 23947], [23924, 23925, 23949], [23924, 23949, 23948], [23925, 23926, 23949], [23926, 23950, 23949], [23926, 23927, 23951], [23926, 23951, 23950], [23927, 23928, 23951], [23928, 23952, 23951], [23928, 23929, 23953], [23928, 23953, 23952], [23929, 23930, 23953], [23930, 23954, 23953], [23930, 23931, 23955], [23930, 23955, 23954], [23931, 7620, 23955], [7620, 7738, 23955], [23932, 23933, 23956], [23933, 23957, 23956], [23933, 23934, 23958], [23933, 23958, 23957], [23934, 23935, 23958], [23935, 23959, 23958], [23935, 23936, 23960], [23935, 23960, 23959], [23936, 23937, 23960], [23937, 23961, 23960], [23937, 23938, 23962], [23937, 23962, 23961], [23938, 23939, 23962], [23939, 23963, 23962], [23939, 23940, 23964], [23939, 23964, 23963], [23940, 23941, 23964], [23941, 23965, 23964], [23941, 23942, 23966], [23941, 23966, 23965], [23942, 23943, 23966], [23943, 23967, 23966], [23943, 23944, 23968], [23943, 23968, 23967], [23944, 23945, 23968], [23945, 23969, 23968], [23945, 23946, 23970], [23945, 23970, 23969], [23946, 23947, 23970], [23947, 23971, 23970], [23947, 23948, 23972], [23947, 23972, 23971], [23948, 23949, 23972], [23949, 23973, 23972], [23949, 23950, 23974], [23949, 23974, 23973], [23950, 23951, 23974], [23951, 23975, 23974], [23951, 23952, 23976], [23951, 23976, 23975], [23952, 23953, 23976], [23953, 23977, 23976], [23953, 23954, 23978], [23953, 23978, 23977], [23954, 23955, 23978], [23955, 23979, 23978], [23955, 7738, 7856], [23955, 7856, 23979], [23956, 23957, 23981], [23956, 23981, 23980], [23957, 23958, 23981], [23958, 23982, 23981], [23958, 23959, 23983], [23958, 23983, 23982], [23959, 23960, 23983], [23960, 23984, 23983], [23960, 23961, 23985], [23960, 23985, 23984], [23961, 23962, 23985], [23962, 23986, 23985], [23962, 23963, 23987], [23962, 23987, 23986], [23963, 23964, 23987], [23964, 23988, 23987], [23964, 23965, 23989], [23964, 23989, 23988], [23965, 23966, 23989], [23966, 23990, 23989], [23966, 23967, 23991], [23966, 23991, 23990], [23967, 23968, 23991], [23968, 23992, 23991], [23968, 23969, 23993], [23968, 23993, 23992], [23969, 23970, 23993], [23970, 23994, 23993], [23970, 23971, 23995], [23970, 23995, 23994], [23971, 23972, 23995], [23972, 23996, 23995], [23972, 23973, 23997], [23972, 23997, 23996], [23973, 23974, 23997], [23974, 23998, 23997], [23974, 23975, 23999], [23974, 23999, 23998], [23975, 23976, 23999], [23976, 24000, 23999], [23976, 23977, 24001], [23976, 24001, 24000], [23977, 23978, 24001], [23978, 24002, 24001], [23978, 23979, 24003], [23978, 24003, 24002], [23979, 7856, 24003], [7856, 7974, 24003], [23980, 23981, 24004], [23981, 24005, 24004], [23981, 23982, 24006], [23981, 24006, 24005], [23982, 23983, 24006], [23983, 24007, 24006], [23983, 23984, 24008], [23983, 24008, 24007], [23984, 23985, 24008], [23985, 24009, 24008], [23985, 23986, 24010], [23985, 24010, 24009], [23986, 23987, 24010], [23987, 24011, 24010], [23987, 23988, 24012], [23987, 24012, 24011], [23988, 23989, 24012], [23989, 24013, 24012], [23989, 23990, 24014], [23989, 24014, 24013], [23990, 23991, 24014], [23991, 24015, 24014], [23991, 23992, 24016], [23991, 24016, 24015], [23992, 23993, 24016], [23993, 24017, 24016], [23993, 23994, 24018], [23993, 24018, 24017], [23994, 23995, 24018], [23995, 24019, 24018], [23995, 23996, 24020], [23995, 24020, 24019], [23996, 23997, 24020], [23997, 24021, 24020], [23997, 23998, 24022], [23997, 24022, 24021], [23998, 23999, 24022], [23999, 24023, 24022], [23999, 24000, 24024], [23999, 24024, 24023], [24000, 24001, 24024], [24001, 24025, 24024], [24001, 24002, 24026], [24001, 24026, 24025], [24002, 24003, 24026], [24003, 24027, 24026], [24003, 7974, 8092], [24003, 8092, 24027], [24004, 24005, 24029], [24004, 24029, 24028], [24005, 24006, 24029], [24006, 24030, 24029], [24006, 24007, 24031], [24006, 24031, 24030], [24007, 24008, 24031], [24008, 24032, 24031], [24008, 24009, 24033], [24008, 24033, 24032], [24009, 24010, 24033], [24010, 24034, 24033], [24010, 24011, 24035], [24010, 24035, 24034], [24011, 24012, 24035], [24012, 24036, 24035], [24012, 24013, 24037], [24012, 24037, 24036], [24013, 24014, 24037], [24014, 24038, 24037], [24014, 24015, 24039], [24014, 24039, 24038], [24015, 24016, 24039], [24016, 24040, 24039], [24016, 24017, 24041], [24016, 24041, 24040], [24017, 24018, 24041], [24018, 24042, 24041], [24018, 24019, 24043], [24018, 24043, 24042], [24019, 24020, 24043], [24020, 24044, 24043], [24020, 24021, 24045], [24020, 24045, 24044], [24021, 24022, 24045], [24022, 24046, 24045], [24022, 24023, 24047], [24022, 24047, 24046], [24023, 24024, 24047], [24024, 24048, 24047], [24024, 24025, 24049], [24024, 24049, 24048], [24025, 24026, 24049], [24026, 24050, 24049], [24026, 24027, 24051], [24026, 24051, 24050], [24027, 8092, 24051], [8092, 8210, 24051], [24028, 24029, 24052], [24029, 24053, 24052], [24029, 24030, 24054], [24029, 24054, 24053], [24030, 24031, 24054], [24031, 24055, 24054], [24031, 24032, 24056], [24031, 24056, 24055], [24032, 24033, 24056], [24033, 24057, 24056], [24033, 24034, 24058], [24033, 24058, 24057], [24034, 24035, 24058], [24035, 24059, 24058], [24035, 24036, 24060], [24035, 24060, 24059], [24036, 24037, 24060], [24037, 24061, 24060], [24037, 24038, 24062], [24037, 24062, 24061], [24038, 24039, 24062], [24039, 24063, 24062], [24039, 24040, 24064], [24039, 24064, 24063], [24040, 24041, 24064], [24041, 24065, 24064], [24041, 24042, 24066], [24041, 24066, 24065], [24042, 24043, 24066], [24043, 24067, 24066], [24043, 24044, 24068], [24043, 24068, 24067], [24044, 24045, 24068], [24045, 24069, 24068], [24045, 24046, 24070], [24045, 24070, 24069], [24046, 24047, 24070], [24047, 24071, 24070], [24047, 24048, 24072], [24047, 24072, 24071], [24048, 24049, 24072], [24049, 24073, 24072], [24049, 24050, 24074], [24049, 24074, 24073], [24050, 24051, 24074], [24051, 24075, 24074], [24051, 8210, 8328], [24051, 8328, 24075], [24052, 24053, 24077], [24052, 24077, 24076], [24053, 24054, 24077], [24054, 24078, 24077], [24054, 24055, 24079], [24054, 24079, 24078], [24055, 24056, 24079], [24056, 24080, 24079], [24056, 24057, 24081], [24056, 24081, 24080], [24057, 24058, 24081], [24058, 24082, 24081], [24058, 24059, 24083], [24058, 24083, 24082], [24059, 24060, 24083], [24060, 24084, 24083], [24060, 24061, 24085], [24060, 24085, 24084], [24061, 24062, 24085], [24062, 24086, 24085], [24062, 24063, 24087], [24062, 24087, 24086], [24063, 24064, 24087], [24064, 24088, 24087], [24064, 24065, 24089], [24064, 24089, 24088], [24065, 24066, 24089], [24066, 24090, 24089], [24066, 24067, 24091], [24066, 24091, 24090], [24067, 24068, 24091], [24068, 24092, 24091], [24068, 24069, 24093], [24068, 24093, 24092], [24069, 24070, 24093], [24070, 24094, 24093], [24070, 24071, 24095], [24070, 24095, 24094], [24071, 24072, 24095], [24072, 24096, 24095], [24072, 24073, 24097], [24072, 24097, 24096], [24073, 24074, 24097], [24074, 24098, 24097], [24074, 24075, 24099], [24074, 24099, 24098], [24075, 8328, 24099], [8328, 8446, 24099], [24076, 24077, 24100], [24077, 24101, 24100], [24077, 24078, 24102], [24077, 24102, 24101], [24078, 24079, 24102], [24079, 24103, 24102], [24079, 24080, 24104], [24079, 24104, 24103], [24080, 24081, 24104], [24081, 24105, 24104], [24081, 24082, 24106], [24081, 24106, 24105], [24082, 24083, 24106], [24083, 24107, 24106], [24083, 24084, 24108], [24083, 24108, 24107], [24084, 24085, 24108], [24085, 24109, 24108], [24085, 24086, 24110], [24085, 24110, 24109], [24086, 24087, 24110], [24087, 24111, 24110], [24087, 24088, 24112], [24087, 24112, 24111], [24088, 24089, 24112], [24089, 24113, 24112], [24089, 24090, 24114], [24089, 24114, 24113], [24090, 24091, 24114], [24091, 24115, 24114], [24091, 24092, 24116], [24091, 24116, 24115], [24092, 24093, 24116], [24093, 24117, 24116], [24093, 24094, 24118], [24093, 24118, 24117], [24094, 24095, 24118], [24095, 24119, 24118], [24095, 24096, 24120], [24095, 24120, 24119], [24096, 24097, 24120], [24097, 24121, 24120], [24097, 24098, 24122], [24097, 24122, 24121], [24098, 24099, 24122], [24099, 24123, 24122], [24099, 8446, 8564], [24099, 8564, 24123], [24100, 24101, 24125], [24100, 24125, 24124], [24101, 24102, 24125], [24102, 24126, 24125], [24102, 24103, 24127], [24102, 24127, 24126], [24103, 24104, 24127], [24104, 24128, 24127], [24104, 24105, 24129], [24104, 24129, 24128], [24105, 24106, 24129], [24106, 24130, 24129], [24106, 24107, 24131], [24106, 24131, 24130], [24107, 24108, 24131], [24108, 24132, 24131], [24108, 24109, 24133], [24108, 24133, 24132], [24109, 24110, 24133], [24110, 24134, 24133], [24110, 24111, 24135], [24110, 24135, 24134], [24111, 24112, 24135], [24112, 24136, 24135], [24112, 24113, 24137], [24112, 24137, 24136], [24113, 24114, 24137], [24114, 24138, 24137], [24114, 24115, 24139], [24114, 24139, 24138], [24115, 24116, 24139], [24116, 24140, 24139], [24116, 24117, 24141], [24116, 24141, 24140], [24117, 24118, 24141], [24118, 24142, 24141], [24118, 24119, 24143], [24118, 24143, 24142], [24119, 24120, 24143], [24120, 24144, 24143], [24120, 24121, 24145], [24120, 24145, 24144], [24121, 24122, 24145], [24122, 24146, 24145], [24122, 24123, 24147], [24122, 24147, 24146], [24123, 8564, 24147], [8564, 8682, 24147], [24124, 24125, 24148], [24125, 24149, 24148], [24125, 24126, 24150], [24125, 24150, 24149], [24126, 24127, 24150], [24127, 24151, 24150], [24127, 24128, 24152], [24127, 24152, 24151], [24128, 24129, 24152], [24129, 24153, 24152], [24129, 24130, 24154], [24129, 24154, 24153], [24130, 24131, 24154], [24131, 24155, 24154], [24131, 24132, 24156], [24131, 24156, 24155], [24132, 24133, 24156], [24133, 24157, 24156], [24133, 24134, 24158], [24133, 24158, 24157], [24134, 24135, 24158], [24135, 24159, 24158], [24135, 24136, 24160], [24135, 24160, 24159], [24136, 24137, 24160], [24137, 24161, 24160], [24137, 24138, 24162], [24137, 24162, 24161], [24138, 24139, 24162], [24139, 24163, 24162], [24139, 24140, 24164], [24139, 24164, 24163], [24140, 24141, 24164], [24141, 24165, 24164], [24141, 24142, 24166], [24141, 24166, 24165], [24142, 24143, 24166], [24143, 24167, 24166], [24143, 24144, 24168], [24143, 24168, 24167], [24144, 24145, 24168], [24145, 24169, 24168], [24145, 24146, 24170], [24145, 24170, 24169], [24146, 24147, 24170], [24147, 24171, 24170], [24147, 8682, 8800], [24147, 8800, 24171], [24148, 24149, 24173], [24148, 24173, 24172], [24149, 24150, 24173], [24150, 24174, 24173], [24150, 24151, 24175], [24150, 24175, 24174], [24151, 24152, 24175], [24152, 24176, 24175], [24152, 24153, 24177], [24152, 24177, 24176], [24153, 24154, 24177], [24154, 24178, 24177], [24154, 24155, 24179], [24154, 24179, 24178], [24155, 24156, 24179], [24156, 24180, 24179], [24156, 24157, 24181], [24156, 24181, 24180], [24157, 24158, 24181], [24158, 24182, 24181], [24158, 24159, 24183], [24158, 24183, 24182], [24159, 24160, 24183], [24160, 24184, 24183], [24160, 24161, 24185], [24160, 24185, 24184], [24161, 24162, 24185], [24162, 24186, 24185], [24162, 24163, 24187], [24162, 24187, 24186], [24163, 24164, 24187], [24164, 24188, 24187], [24164, 24165, 24189], [24164, 24189, 24188], [24165, 24166, 24189], [24166, 24190, 24189], [24166, 24167, 24191], [24166, 24191, 24190], [24167, 24168, 24191], [24168, 24192, 24191], [24168, 24169, 24193], [24168, 24193, 24192], [24169, 24170, 24193], [24170, 24194, 24193], [24170, 24171, 24195], [24170, 24195, 24194], [24171, 8800, 24195], [8800, 8918, 24195], [24172, 24173, 24196], [24173, 24197, 24196], [24173, 24174, 24198], [24173, 24198, 24197], [24174, 24175, 24198], [24175, 24199, 24198], [24175, 24176, 24200], [24175, 24200, 24199], [24176, 24177, 24200], [24177, 24201, 24200], [24177, 24178, 24202], [24177, 24202, 24201], [24178, 24179, 24202], [24179, 24203, 24202], [24179, 24180, 24204], [24179, 24204, 24203], [24180, 24181, 24204], [24181, 24205, 24204], [24181, 24182, 24206], [24181, 24206, 24205], [24182, 24183, 24206], [24183, 24207, 24206], [24183, 24184, 24208], [24183, 24208, 24207], [24184, 24185, 24208], [24185, 24209, 24208], [24185, 24186, 24210], [24185, 24210, 24209], [24186, 24187, 24210], [24187, 24211, 24210], [24187, 24188, 24212], [24187, 24212, 24211], [24188, 24189, 24212], [24189, 24213, 24212], [24189, 24190, 24214], [24189, 24214, 24213], [24190, 24191, 24214], [24191, 24215, 24214], [24191, 24192, 24216], [24191, 24216, 24215], [24192, 24193, 24216], [24193, 24217, 24216], [24193, 24194, 24218], [24193, 24218, 24217], [24194, 24195, 24218], [24195, 24219, 24218], [24195, 8918, 9036], [24195, 9036, 24219], [24196, 24197, 24221], [24196, 24221, 24220], [24197, 24198, 24221], [24198, 24222, 24221], [24198, 24199, 24223], [24198, 24223, 24222], [24199, 24200, 24223], [24200, 24224, 24223], [24200, 24201, 24225], [24200, 24225, 24224], [24201, 24202, 24225], [24202, 24226, 24225], [24202, 24203, 24227], [24202, 24227, 24226], [24203, 24204, 24227], [24204, 24228, 24227], [24204, 24205, 24229], [24204, 24229, 24228], [24205, 24206, 24229], [24206, 24230, 24229], [24206, 24207, 24231], [24206, 24231, 24230], [24207, 24208, 24231], [24208, 24232, 24231], [24208, 24209, 24233], [24208, 24233, 24232], [24209, 24210, 24233], [24210, 24234, 24233], [24210, 24211, 24235], [24210, 24235, 24234], [24211, 24212, 24235], [24212, 24236, 24235], [24212, 24213, 24237], [24212, 24237, 24236], [24213, 24214, 24237], [24214, 24238, 24237], [24214, 24215, 24239], [24214, 24239, 24238], [24215, 24216, 24239], [24216, 24240, 24239], [24216, 24217, 24241], [24216, 24241, 24240], [24217, 24218, 24241], [24218, 24242, 24241], [24218, 24219, 24243], [24218, 24243, 24242], [24219, 9036, 24243], [9036, 9154, 24243], [24220, 24221, 24244], [24221, 24245, 24244], [24221, 24222, 24246], [24221, 24246, 24245], [24222, 24223, 24246], [24223, 24247, 24246], [24223, 24224, 24248], [24223, 24248, 24247], [24224, 24225, 24248], [24225, 24249, 24248], [24225, 24226, 24250], [24225, 24250, 24249], [24226, 24227, 24250], [24227, 24251, 24250], [24227, 24228, 24252], [24227, 24252, 24251], [24228, 24229, 24252], [24229, 24253, 24252], [24229, 24230, 24254], [24229, 24254, 24253], [24230, 24231, 24254], [24231, 24255, 24254], [24231, 24232, 24256], [24231, 24256, 24255], [24232, 24233, 24256], [24233, 24257, 24256], [24233, 24234, 24258], [24233, 24258, 24257], [24234, 24235, 24258], [24235, 24259, 24258], [24235, 24236, 24260], [24235, 24260, 24259], [24236, 24237, 24260], [24237, 24261, 24260], [24237, 24238, 24262], [24237, 24262, 24261], [24238, 24239, 24262], [24239, 24263, 24262], [24239, 24240, 24264], [24239, 24264, 24263], [24240, 24241, 24264], [24241, 24265, 24264], [24241, 24242, 24266], [24241, 24266, 24265], [24242, 24243, 24266], [24243, 24267, 24266], [24243, 9154, 9272], [24243, 9272, 24267], [24244, 24245, 24269], [24244, 24269, 24268], [24245, 24246, 24269], [24246, 24270, 24269], [24246, 24247, 24271], [24246, 24271, 24270], [24247, 24248, 24271], [24248, 24272, 24271], [24248, 24249, 24273], [24248, 24273, 24272], [24249, 24250, 24273], [24250, 24274, 24273], [24250, 24251, 24275], [24250, 24275, 24274], [24251, 24252, 24275], [24252, 24276, 24275], [24252, 24253, 24277], [24252, 24277, 24276], [24253, 24254, 24277], [24254, 24278, 24277], [24254, 24255, 24279], [24254, 24279, 24278], [24255, 24256, 24279], [24256, 24280, 24279], [24256, 24257, 24281], [24256, 24281, 24280], [24257, 24258, 24281], [24258, 24282, 24281], [24258, 24259, 24283], [24258, 24283, 24282], [24259, 24260, 24283], [24260, 24284, 24283], [24260, 24261, 24285], [24260, 24285, 24284], [24261, 24262, 24285], [24262, 24286, 24285], [24262, 24263, 24287], [24262, 24287, 24286], [24263, 24264, 24287], [24264, 24288, 24287], [24264, 24265, 24289], [24264, 24289, 24288], [24265, 24266, 24289], [24266, 24290, 24289], [24266, 24267, 24291], [24266, 24291, 24290], [24267, 9272, 24291], [9272, 9390, 24291], [24268, 24269, 24292], [24269, 24293, 24292], [24269, 24270, 24294], [24269, 24294, 24293], [24270, 24271, 24294], [24271, 24295, 24294], [24271, 24272, 24296], [24271, 24296, 24295], [24272, 24273, 24296], [24273, 24297, 24296], [24273, 24274, 24298], [24273, 24298, 24297], [24274, 24275, 24298], [24275, 24299, 24298], [24275, 24276, 24300], [24275, 24300, 24299], [24276, 24277, 24300], [24277, 24301, 24300], [24277, 24278, 24302], [24277, 24302, 24301], [24278, 24279, 24302], [24279, 24303, 24302], [24279, 24280, 24304], [24279, 24304, 24303], [24280, 24281, 24304], [24281, 24305, 24304], [24281, 24282, 24306], [24281, 24306, 24305], [24282, 24283, 24306], [24283, 24307, 24306], [24283, 24284, 24308], [24283, 24308, 24307], [24284, 24285, 24308], [24285, 24309, 24308], [24285, 24286, 24310], [24285, 24310, 24309], [24286, 24287, 24310], [24287, 24311, 24310], [24287, 24288, 24312], [24287, 24312, 24311], [24288, 24289, 24312], [24289, 24313, 24312], [24289, 24290, 24314], [24289, 24314, 24313], [24290, 24291, 24314], [24291, 24315, 24314], [24291, 9390, 9508], [24291, 9508, 24315], [24292, 24293, 24317], [24292, 24317, 24316], [24293, 24294, 24317], [24294, 24318, 24317], [24294, 24295, 24319], [24294, 24319, 24318], [24295, 24296, 24319], [24296, 24320, 24319], [24296, 24297, 24321], [24296, 24321, 24320], [24297, 24298, 24321], [24298, 24322, 24321], [24298, 24299, 24323], [24298, 24323, 24322], [24299, 24300, 24323], [24300, 24324, 24323], [24300, 24301, 24325], [24300, 24325, 24324], [24301, 24302, 24325], [24302, 24326, 24325], [24302, 24303, 24327], [24302, 24327, 24326], [24303, 24304, 24327], [24304, 24328, 24327], [24304, 24305, 24329], [24304, 24329, 24328], [24305, 24306, 24329], [24306, 24330, 24329], [24306, 24307, 24331], [24306, 24331, 24330], [24307, 24308, 24331], [24308, 24332, 24331], [24308, 24309, 24333], [24308, 24333, 24332], [24309, 24310, 24333], [24310, 24334, 24333], [24310, 24311, 24335], [24310, 24335, 24334], [24311, 24312, 24335], [24312, 24336, 24335], [24312, 24313, 24337], [24312, 24337, 24336], [24313, 24314, 24337], [24314, 24338, 24337], [24314, 24315, 24339], [24314, 24339, 24338], [24315, 9508, 24339], [9508, 9626, 24339], [24316, 24317, 24340], [24317, 24341, 24340], [24317, 24318, 24342], [24317, 24342, 24341], [24318, 24319, 24342], [24319, 24343, 24342], [24319, 24320, 24344], [24319, 24344, 24343], [24320, 24321, 24344], [24321, 24345, 24344], [24321, 24322, 24346], [24321, 24346, 24345], [24322, 24323, 24346], [24323, 24347, 24346], [24323, 24324, 24348], [24323, 24348, 24347], [24324, 24325, 24348], [24325, 24349, 24348], [24325, 24326, 24350], [24325, 24350, 24349], [24326, 24327, 24350], [24327, 24351, 24350], [24327, 24328, 24352], [24327, 24352, 24351], [24328, 24329, 24352], [24329, 24353, 24352], [24329, 24330, 24354], [24329, 24354, 24353], [24330, 24331, 24354], [24331, 24355, 24354], [24331, 24332, 24356], [24331, 24356, 24355], [24332, 24333, 24356], [24333, 24357, 24356], [24333, 24334, 24358], [24333, 24358, 24357], [24334, 24335, 24358], [24335, 24359, 24358], [24335, 24336, 24360], [24335, 24360, 24359], [24336, 24337, 24360], [24337, 24361, 24360], [24337, 24338, 24362], [24337, 24362, 24361], [24338, 24339, 24362], [24339, 24363, 24362], [24339, 9626, 9744], [24339, 9744, 24363], [24340, 24341, 24365], [24340, 24365, 24364], [24341, 24342, 24365], [24342, 24366, 24365], [24342, 24343, 24367], [24342, 24367, 24366], [24343, 24344, 24367], [24344, 24368, 24367], [24344, 24345, 24369], [24344, 24369, 24368], [24345, 24346, 24369], [24346, 24370, 24369], [24346, 24347, 24371], [24346, 24371, 24370], [24347, 24348, 24371], [24348, 24372, 24371], [24348, 24349, 24373], [24348, 24373, 24372], [24349, 24350, 24373], [24350, 24374, 24373], [24350, 24351, 24375], [24350, 24375, 24374], [24351, 24352, 24375], [24352, 24376, 24375], [24352, 24353, 24377], [24352, 24377, 24376], [24353, 24354, 24377], [24354, 24378, 24377], [24354, 24355, 24379], [24354, 24379, 24378], [24355, 24356, 24379], [24356, 24380, 24379], [24356, 24357, 24381], [24356, 24381, 24380], [24357, 24358, 24381], [24358, 24382, 24381], [24358, 24359, 24383], [24358, 24383, 24382], [24359, 24360, 24383], [24360, 24384, 24383], [24360, 24361, 24385], [24360, 24385, 24384], [24361, 24362, 24385], [24362, 24386, 24385], [24362, 24363, 24387], [24362, 24387, 24386], [24363, 9744, 24387], [9744, 9864, 24387], [24364, 24365, 24388], [24365, 24389, 24388], [24365, 24366, 24390], [24365, 24390, 24389], [24366, 24367, 24390], [24367, 24391, 24390], [24367, 24368, 24392], [24367, 24392, 24391], [24368, 24369, 24392], [24369, 24393, 24392], [24369, 24370, 24394], [24369, 24394, 24393], [24370, 24371, 24394], [24371, 24395, 24394], [24371, 24372, 24396], [24371, 24396, 24395], [24372, 24373, 24396], [24373, 24397, 24396], [24373, 24374, 24398], [24373, 24398, 24397], [24374, 24375, 24398], [24375, 24399, 24398], [24375, 24376, 24400], [24375, 24400, 24399], [24376, 24377, 24400], [24377, 24401, 24400], [24377, 24378, 24402], [24377, 24402, 24401], [24378, 24379, 24402], [24379, 24403, 24402], [24379, 24380, 24404], [24379, 24404, 24403], [24380, 24381, 24404], [24381, 24405, 24404], [24381, 24382, 24406], [24381, 24406, 24405], [24382, 24383, 24406], [24383, 24407, 24406], [24383, 24384, 24408], [24383, 24408, 24407], [24384, 24385, 24408], [24385, 24409, 24408], [24385, 24386, 24410], [24385, 24410, 24409], [24386, 24387, 24410], [24387, 24411, 24410], [24387, 9864, 9985], [24387, 9985, 24411], [24388, 24389, 24413], [24388, 24413, 24412], [24389, 24390, 24413], [24390, 24414, 24413], [24390, 24391, 24415], [24390, 24415, 24414], [24391, 24392, 24415], [24392, 24416, 24415], [24392, 24393, 24417], [24392, 24417, 24416], [24393, 24394, 24417], [24394, 24418, 24417], [24394, 24395, 24419], [24394, 24419, 24418], [24395, 24396, 24419], [24396, 24420, 24419], [24396, 24397, 24421], [24396, 24421, 24420], [24397, 24398, 24421], [24398, 24422, 24421], [24398, 24399, 24423], [24398, 24423, 24422], [24399, 24400, 24423], [24400, 24424, 24423], [24400, 24401, 24425], [24400, 24425, 24424], [24401, 24402, 24425], [24402, 24426, 24425], [24402, 24403, 24427], [24402, 24427, 24426], [24403, 24404, 24427], [24404, 24428, 24427], [24404, 24405, 24429], [24404, 24429, 24428], [24405, 24406, 24429], [24406, 24430, 24429], [24406, 24407, 24431], [24406, 24431, 24430], [24407, 24408, 24431], [24408, 24432, 24431], [24408, 24409, 24433], [24408, 24433, 24432], [24409, 24410, 24433], [24410, 24434, 24433], [24410, 24411, 24435], [24410, 24435, 24434], [24411, 9985, 24435], [9985, 10111, 24435], [24412, 24413, 24436], [24413, 24437, 24436], [24413, 24414, 24438], [24413, 24438, 24437], [24414, 24415, 24438], [24415, 24439, 24438], [24415, 24416, 24440], [24415, 24440, 24439], [24416, 24417, 24440], [24417, 24441, 24440], [24417, 24418, 24442], [24417, 24442, 24441], [24418, 24419, 24442], [24419, 24443, 24442], [24419, 24420, 24444], [24419, 24444, 24443], [24420, 24421, 24444], [24421, 24445, 24444], [24421, 24422, 24446], [24421, 24446, 24445], [24422, 24423, 24446], [24423, 24447, 24446], [24423, 24424, 24448], [24423, 24448, 24447], [24424, 24425, 24448], [24425, 24449, 24448], [24425, 24426, 24450], [24425, 24450, 24449], [24426, 24427, 24450], [24427, 24451, 24450], [24427, 24428, 24452], [24427, 24452, 24451], [24428, 24429, 24452], [24429, 24453, 24452], [24429, 24430, 24454], [24429, 24454, 24453], [24430, 24431, 24454], [24431, 24455, 24454], [24431, 24432, 24456], [24431, 24456, 24455], [24432, 24433, 24456], [24433, 24457, 24456], [24433, 24434, 24458], [24433, 24458, 24457], [24434, 24435, 24458], [24435, 24459, 24458], [24435, 10111, 10237], [24435, 10237, 24459], [24436, 24437, 24461], [24436, 24461, 24460], [24437, 24438, 24461], [24438, 24462, 24461], [24438, 24439, 24463], [24438, 24463, 24462], [24439, 24440, 24463], [24440, 24464, 24463], [24440, 24441, 24465], [24440, 24465, 24464], [24441, 24442, 24465], [24442, 24466, 24465], [24442, 24443, 24467], [24442, 24467, 24466], [24443, 24444, 24467], [24444, 24468, 24467], [24444, 24445, 24469], [24444, 24469, 24468], [24445, 24446, 24469], [24446, 24470, 24469], [24446, 24447, 24471], [24446, 24471, 24470], [24447, 24448, 24471], [24448, 24472, 24471], [24448, 24449, 24473], [24448, 24473, 24472], [24449, 24450, 24473], [24450, 24474, 24473], [24450, 24451, 24475], [24450, 24475, 24474], [24451, 24452, 24475], [24452, 24476, 24475], [24452, 24453, 24477], [24452, 24477, 24476], [24453, 24454, 24477], [24454, 24478, 24477], [24454, 24455, 24479], [24454, 24479, 24478], [24455, 24456, 24479], [24456, 24480, 24479], [24456, 24457, 24481], [24456, 24481, 24480], [24457, 24458, 24481], [24458, 24482, 24481], [24458, 24459, 24483], [24458, 24483, 24482], [24459, 10237, 24483], [10237, 10366, 24483], [24460, 24461, 24484], [24461, 24485, 24484], [24461, 24462, 24486], [24461, 24486, 24485], [24462, 24463, 24486], [24463, 24487, 24486], [24463, 24464, 24488], [24463, 24488, 24487], [24464, 24465, 24488], [24465, 24489, 24488], [24465, 24466, 24490], [24465, 24490, 24489], [24466, 24467, 24490], [24467, 24491, 24490], [24467, 24468, 24492], [24467, 24492, 24491], [24468, 24469, 24492], [24469, 24493, 24492], [24469, 24470, 24494], [24469, 24494, 24493], [24470, 24471, 24494], [24471, 24495, 24494], [24471, 24472, 24496], [24471, 24496, 24495], [24472, 24473, 24496], [24473, 24497, 24496], [24473, 24474, 24498], [24473, 24498, 24497], [24474, 24475, 24498], [24475, 24499, 24498], [24475, 24476, 24500], [24475, 24500, 24499], [24476, 24477, 24500], [24477, 24501, 24500], [24477, 24478, 24502], [24477, 24502, 24501], [24478, 24479, 24502], [24479, 24503, 24502], [24479, 24480, 24504], [24479, 24504, 24503], [24480, 24481, 24504], [24481, 24505, 24504], [24481, 24482, 24506], [24481, 24506, 24505], [24482, 24483, 24506], [24483, 24507, 24506], [24483, 10366, 10495], [24483, 10495, 24507], [24484, 24485, 24509], [24484, 24509, 24508], [24485, 24486, 24509], [24486, 24510, 24509], [24486, 24487, 24511], [24486, 24511, 24510], [24487, 24488, 24511], [24488, 24512, 24511], [24488, 24489, 24513], [24488, 24513, 24512], [24489, 24490, 24513], [24490, 24514, 24513], [24490, 24491, 24515], [24490, 24515, 24514], [24491, 24492, 24515], [24492, 24516, 24515], [24492, 24493, 24517], [24492, 24517, 24516], [24493, 24494, 24517], [24494, 24518, 24517], [24494, 24495, 24519], [24494, 24519, 24518], [24495, 24496, 24519], [24496, 24520, 24519], [24496, 24497, 24521], [24496, 24521, 24520], [24497, 24498, 24521], [24498, 24522, 24521], [24498, 24499, 24523], [24498, 24523, 24522], [24499, 24500, 24523], [24500, 24524, 24523], [24500, 24501, 24525], [24500, 24525, 24524], [24501, 24502, 24525], [24502, 24526, 24525], [24502, 24503, 24527], [24502, 24527, 24526], [24503, 24504, 24527], [24504, 24528, 24527], [24504, 24505, 24529], [24504, 24529, 24528], [24505, 24506, 24529], [24506, 24530, 24529], [24506, 24507, 24531], [24506, 24531, 24530], [24507, 10495, 24531], [10495, 10624, 24531], [24508, 24509, 24532], [24509, 24533, 24532], [24509, 24510, 24534], [24509, 24534, 24533], [24510, 24511, 24534], [24511, 24535, 24534], [24511, 24512, 24536], [24511, 24536, 24535], [24512, 24513, 24536], [24513, 24537, 24536], [24513, 24514, 24538], [24513, 24538, 24537], [24514, 24515, 24538], [24515, 24539, 24538], [24515, 24516, 24540], [24515, 24540, 24539], [24516, 24517, 24540], [24517, 24541, 24540], [24517, 24518, 24542], [24517, 24542, 24541], [24518, 24519, 24542], [24519, 24543, 24542], [24519, 24520, 24544], [24519, 24544, 24543], [24520, 24521, 24544], [24521, 24545, 24544], [24521, 24522, 24546], [24521, 24546, 24545], [24522, 24523, 24546], [24523, 24547, 24546], [24523, 24524, 24548], [24523, 24548, 24547], [24524, 24525, 24548], [24525, 24549, 24548], [24525, 24526, 24550], [24525, 24550, 24549], [24526, 24527, 24550], [24527, 24551, 24550], [24527, 24528, 24552], [24527, 24552, 24551], [24528, 24529, 24552], [24529, 24553, 24552], [24529, 24530, 24554], [24529, 24554, 24553], [24530, 24531, 24554], [24531, 24555, 24554], [24531, 10624, 10753], [24531, 10753, 24555], [24532, 24533, 24557], [24532, 24557, 24556], [24533, 24534, 24557], [24534, 24558, 24557], [24534, 24535, 24559], [24534, 24559, 24558], [24535, 24536, 24559], [24536, 24560, 24559], [24536, 24537, 24561], [24536, 24561, 24560], [24537, 24538, 24561], [24538, 24562, 24561], [24538, 24539, 24563], [24538, 24563, 24562], [24539, 24540, 24563], [24540, 24564, 24563], [24540, 24541, 24565], [24540, 24565, 24564], [24541, 24542, 24565], [24542, 24566, 24565], [24542, 24543, 24567], [24542, 24567, 24566], [24543, 24544, 24567], [24544, 24568, 24567], [24544, 24545, 24569], [24544, 24569, 24568], [24545, 24546, 24569], [24546, 24570, 24569], [24546, 24547, 24571], [24546, 24571, 24570], [24547, 24548, 24571], [24548, 24572, 24571], [24548, 24549, 24573], [24548, 24573, 24572], [24549, 24550, 24573], [24550, 24574, 24573], [24550, 24551, 24575], [24550, 24575, 24574], [24551, 24552, 24575], [24552, 24576, 24575], [24552, 24553, 24577], [24552, 24577, 24576], [24553, 24554, 24577], [24554, 24578, 24577], [24554, 24555, 24579], [24554, 24579, 24578], [24555, 10753, 24579], [10753, 10882, 24579], [24556, 24557, 24580], [24557, 24581, 24580], [24557, 24558, 24582], [24557, 24582, 24581], [24558, 24559, 24582], [24559, 24583, 24582], [24559, 24560, 24584], [24559, 24584, 24583], [24560, 24561, 24584], [24561, 24585, 24584], [24561, 24562, 24586], [24561, 24586, 24585], [24562, 24563, 24586], [24563, 24587, 24586], [24563, 24564, 24588], [24563, 24588, 24587], [24564, 24565, 24588], [24565, 24589, 24588], [24565, 24566, 24590], [24565, 24590, 24589], [24566, 24567, 24590], [24567, 24591, 24590], [24567, 24568, 24592], [24567, 24592, 24591], [24568, 24569, 24592], [24569, 24593, 24592], [24569, 24570, 24594], [24569, 24594, 24593], [24570, 24571, 24594], [24571, 24595, 24594], [24571, 24572, 24596], [24571, 24596, 24595], [24572, 24573, 24596], [24573, 24597, 24596], [24573, 24574, 24598], [24573, 24598, 24597], [24574, 24575, 24598], [24575, 24599, 24598], [24575, 24576, 24600], [24575, 24600, 24599], [24576, 24577, 24600], [24577, 24601, 24600], [24577, 24578, 24602], [24577, 24602, 24601], [24578, 24579, 24602], [24579, 24603, 24602], [24579, 10882, 11011], [24579, 11011, 24603], [24580, 24581, 24605], [24580, 24605, 24604], [24581, 24582, 24605], [24582, 24606, 24605], [24582, 24583, 24607], [24582, 24607, 24606], [24583, 24584, 24607], [24584, 24608, 24607], [24584, 24585, 24609], [24584, 24609, 24608], [24585, 24586, 24609], [24586, 24610, 24609], [24586, 24587, 24611], [24586, 24611, 24610], [24587, 24588, 24611], [24588, 24612, 24611], [24588, 24589, 24613], [24588, 24613, 24612], [24589, 24590, 24613], [24590, 24614, 24613], [24590, 24591, 24615], [24590, 24615, 24614], [24591, 24592, 24615], [24592, 24616, 24615], [24592, 24593, 24617], [24592, 24617, 24616], [24593, 24594, 24617], [24594, 24618, 24617], [24594, 24595, 24619], [24594, 24619, 24618], [24595, 24596, 24619], [24596, 24620, 24619], [24596, 24597, 24621], [24596, 24621, 24620], [24597, 24598, 24621], [24598, 24622, 24621], [24598, 24599, 24623], [24598, 24623, 24622], [24599, 24600, 24623], [24600, 24624, 24623], [24600, 24601, 24625], [24600, 24625, 24624], [24601, 24602, 24625], [24602, 24626, 24625], [24602, 24603, 24627], [24602, 24627, 24626], [24603, 11011, 24627], [11011, 11140, 24627], [24604, 24605, 24628], [24605, 24629, 24628], [24605, 24606, 24630], [24605, 24630, 24629], [24606, 24607, 24630], [24607, 24631, 24630], [24607, 24608, 24632], [24607, 24632, 24631], [24608, 24609, 24632], [24609, 24633, 24632], [24609, 24610, 24634], [24609, 24634, 24633], [24610, 24611, 24634], [24611, 24635, 24634], [24611, 24612, 24636], [24611, 24636, 24635], [24612, 24613, 24636], [24613, 24637, 24636], [24613, 24614, 24638], [24613, 24638, 24637], [24614, 24615, 24638], [24615, 24639, 24638], [24615, 24616, 24640], [24615, 24640, 24639], [24616, 24617, 24640], [24617, 24641, 24640], [24617, 24618, 24642], [24617, 24642, 24641], [24618, 24619, 24642], [24619, 24643, 24642], [24619, 24620, 24644], [24619, 24644, 24643], [24620, 24621, 24644], [24621, 24645, 24644], [24621, 24622, 24646], [24621, 24646, 24645], [24622, 24623, 24646], [24623, 24647, 24646], [24623, 24624, 24648], [24623, 24648, 24647], [24624, 24625, 24648], [24625, 24649, 24648], [24625, 24626, 24650], [24625, 24650, 24649], [24626, 24627, 24650], [24627, 24651, 24650], [24627, 11140, 11269], [24627, 11269, 24651], [24628, 24629, 24653], [24628, 24653, 24652], [24629, 24630, 24653], [24630, 24654, 24653], [24630, 24631, 24655], [24630, 24655, 24654], [24631, 24632, 24655], [24632, 24656, 24655], [24632, 24633, 24657], [24632, 24657, 24656], [24633, 24634, 24657], [24634, 24658, 24657], [24634, 24635, 24659], [24634, 24659, 24658], [24635, 24636, 24659], [24636, 24660, 24659], [24636, 24637, 24661], [24636, 24661, 24660], [24637, 24638, 24661], [24638, 24662, 24661], [24638, 24639, 24663], [24638, 24663, 24662], [24639, 24640, 24663], [24640, 24664, 24663], [24640, 24641, 24665], [24640, 24665, 24664], [24641, 24642, 24665], [24642, 24666, 24665], [24642, 24643, 24667], [24642, 24667, 24666], [24643, 24644, 24667], [24644, 24668, 24667], [24644, 24645, 24669], [24644, 24669, 24668], [24645, 24646, 24669], [24646, 24670, 24669], [24646, 24647, 24671], [24646, 24671, 24670], [24647, 24648, 24671], [24648, 24672, 24671], [24648, 24649, 24673], [24648, 24673, 24672], [24649, 24650, 24673], [24650, 24674, 24673], [24650, 24651, 24675], [24650, 24675, 24674], [24651, 11269, 24675], [11269, 11398, 24675], [24652, 24653, 24676], [24653, 24677, 24676], [24653, 24654, 24678], [24653, 24678, 24677], [24654, 24655, 24678], [24655, 24679, 24678], [24655, 24656, 24680], [24655, 24680, 24679], [24656, 24657, 24680], [24657, 24681, 24680], [24657, 24658, 24682], [24657, 24682, 24681], [24658, 24659, 24682], [24659, 24683, 24682], [24659, 24660, 24684], [24659, 24684, 24683], [24660, 24661, 24684], [24661, 24685, 24684], [24661, 24662, 24686], [24661, 24686, 24685], [24662, 24663, 24686], [24663, 24687, 24686], [24663, 24664, 24688], [24663, 24688, 24687], [24664, 24665, 24688], [24665, 24689, 24688], [24665, 24666, 24690], [24665, 24690, 24689], [24666, 24667, 24690], [24667, 24691, 24690], [24667, 24668, 24692], [24667, 24692, 24691], [24668, 24669, 24692], [24669, 24693, 24692], [24669, 24670, 24694], [24669, 24694, 24693], [24670, 24671, 24694], [24671, 24695, 24694], [24671, 24672, 24696], [24671, 24696, 24695], [24672, 24673, 24696], [24673, 24697, 24696], [24673, 24674, 24698], [24673, 24698, 24697], [24674, 24675, 24698], [24675, 24699, 24698], [24675, 11398, 11527], [24675, 11527, 24699], [24676, 24677, 24701], [24676, 24701, 24700], [24677, 24678, 24701], [24678, 24702, 24701], [24678, 24679, 24703], [24678, 24703, 24702], [24679, 24680, 24703], [24680, 24704, 24703], [24680, 24681, 24705], [24680, 24705, 24704], [24681, 24682, 24705], [24682, 24706, 24705], [24682, 24683, 24707], [24682, 24707, 24706], [24683, 24684, 24707], [24684, 24708, 24707], [24684, 24685, 24709], [24684, 24709, 24708], [24685, 24686, 24709], [24686, 24710, 24709], [24686, 24687, 24711], [24686, 24711, 24710], [24687, 24688, 24711], [24688, 24712, 24711], [24688, 24689, 24713], [24688, 24713, 24712], [24689, 24690, 24713], [24690, 24714, 24713], [24690, 24691, 24715], [24690, 24715, 24714], [24691, 24692, 24715], [24692, 24716, 24715], [24692, 24693, 24717], [24692, 24717, 24716], [24693, 24694, 24717], [24694, 24718, 24717], [24694, 24695, 24719], [24694, 24719, 24718], [24695, 24696, 24719], [24696, 24720, 24719], [24696, 24697, 24721], [24696, 24721, 24720], [24697, 24698, 24721], [24698, 24722, 24721], [24698, 24699, 24723], [24698, 24723, 24722], [24699, 11527, 24723], [11527, 11656, 24723], [24700, 24701, 24724], [24701, 24725, 24724], [24701, 24702, 24726], [24701, 24726, 24725], [24702, 24703, 24726], [24703, 24727, 24726], [24703, 24704, 24728], [24703, 24728, 24727], [24704, 24705, 24728], [24705, 24729, 24728], [24705, 24706, 24730], [24705, 24730, 24729], [24706, 24707, 24730], [24707, 24731, 24730], [24707, 24708, 24732], [24707, 24732, 24731], [24708, 24709, 24732], [24709, 24733, 24732], [24709, 24710, 24734], [24709, 24734, 24733], [24710, 24711, 24734], [24711, 24735, 24734], [24711, 24712, 24736], [24711, 24736, 24735], [24712, 24713, 24736], [24713, 24737, 24736], [24713, 24714, 24738], [24713, 24738, 24737], [24714, 24715, 24738], [24715, 24739, 24738], [24715, 24716, 24740], [24715, 24740, 24739], [24716, 24717, 24740], [24717, 24741, 24740], [24717, 24718, 24742], [24717, 24742, 24741], [24718, 24719, 24742], [24719, 24743, 24742], [24719, 24720, 24744], [24719, 24744, 24743], [24720, 24721, 24744], [24721, 24745, 24744], [24721, 24722, 24746], [24721, 24746, 24745], [24722, 24723, 24746], [24723, 24747, 24746], [24723, 11656, 11785], [24723, 11785, 24747], [24724, 24725, 24749], [24724, 24749, 24748], [24725, 24726, 24749], [24726, 24750, 24749], [24726, 24727, 24751], [24726, 24751, 24750], [24727, 24728, 24751], [24728, 24752, 24751], [24728, 24729, 24753], [24728, 24753, 24752], [24729, 24730, 24753], [24730, 24754, 24753], [24730, 24731, 24755], [24730, 24755, 24754], [24731, 24732, 24755], [24732, 24756, 24755], [24732, 24733, 24757], [24732, 24757, 24756], [24733, 24734, 24757], [24734, 24758, 24757], [24734, 24735, 24759], [24734, 24759, 24758], [24735, 24736, 24759], [24736, 24760, 24759], [24736, 24737, 24761], [24736, 24761, 24760], [24737, 24738, 24761], [24738, 24762, 24761], [24738, 24739, 24763], [24738, 24763, 24762], [24739, 24740, 24763], [24740, 24764, 24763], [24740, 24741, 24765], [24740, 24765, 24764], [24741, 24742, 24765], [24742, 24766, 24765], [24742, 24743, 24767], [24742, 24767, 24766], [24743, 24744, 24767], [24744, 24768, 24767], [24744, 24745, 24769], [24744, 24769, 24768], [24745, 24746, 24769], [24746, 24770, 24769], [24746, 24747, 24771], [24746, 24771, 24770], [24747, 11785, 24771], [11785, 11914, 24771], [24748, 24749, 24772], [24749, 24773, 24772], [24749, 24750, 24774], [24749, 24774, 24773], [24750, 24751, 24774], [24751, 24775, 24774], [24751, 24752, 24776], [24751, 24776, 24775], [24752, 24753, 24776], [24753, 24777, 24776], [24753, 24754, 24778], [24753, 24778, 24777], [24754, 24755, 24778], [24755, 24779, 24778], [24755, 24756, 24780], [24755, 24780, 24779], [24756, 24757, 24780], [24757, 24781, 24780], [24757, 24758, 24782], [24757, 24782, 24781], [24758, 24759, 24782], [24759, 24783, 24782], [24759, 24760, 24784], [24759, 24784, 24783], [24760, 24761, 24784], [24761, 24785, 24784], [24761, 24762, 24786], [24761, 24786, 24785], [24762, 24763, 24786], [24763, 24787, 24786], [24763, 24764, 24788], [24763, 24788, 24787], [24764, 24765, 24788], [24765, 24789, 24788], [24765, 24766, 24790], [24765, 24790, 24789], [24766, 24767, 24790], [24767, 24791, 24790], [24767, 24768, 24792], [24767, 24792, 24791], [24768, 24769, 24792], [24769, 24793, 24792], [24769, 24770, 24794], [24769, 24794, 24793], [24770, 24771, 24794], [24771, 24795, 24794], [24771, 11914, 12043], [24771, 12043, 24795], [24772, 24773, 24797], [24772, 24797, 24796], [24773, 24774, 24797], [24774, 24798, 24797], [24774, 24775, 24799], [24774, 24799, 24798], [24775, 24776, 24799], [24776, 24800, 24799], [24776, 24777, 24801], [24776, 24801, 24800], [24777, 24778, 24801], [24778, 24802, 24801], [24778, 24779, 24803], [24778, 24803, 24802], [24779, 24780, 24803], [24780, 24804, 24803], [24780, 24781, 24805], [24780, 24805, 24804], [24781, 24782, 24805], [24782, 24806, 24805], [24782, 24783, 24807], [24782, 24807, 24806], [24783, 24784, 24807], [24784, 24808, 24807], [24784, 24785, 24809], [24784, 24809, 24808], [24785, 24786, 24809], [24786, 24810, 24809], [24786, 24787, 24811], [24786, 24811, 24810], [24787, 24788, 24811], [24788, 24812, 24811], [24788, 24789, 24813], [24788, 24813, 24812], [24789, 24790, 24813], [24790, 24814, 24813], [24790, 24791, 24815], [24790, 24815, 24814], [24791, 24792, 24815], [24792, 24816, 24815], [24792, 24793, 24817], [24792, 24817, 24816], [24793, 24794, 24817], [24794, 24818, 24817], [24794, 24795, 24819], [24794, 24819, 24818], [24795, 12043, 24819], [12043, 12172, 24819], [24796, 24797, 24820], [24797, 24821, 24820], [24797, 24798, 24822], [24797, 24822, 24821], [24798, 24799, 24822], [24799, 24823, 24822], [24799, 24800, 24824], [24799, 24824, 24823], [24800, 24801, 24824], [24801, 24825, 24824], [24801, 24802, 24826], [24801, 24826, 24825], [24802, 24803, 24826], [24803, 24827, 24826], [24803, 24804, 24828], [24803, 24828, 24827], [24804, 24805, 24828], [24805, 24829, 24828], [24805, 24806, 24830], [24805, 24830, 24829], [24806, 24807, 24830], [24807, 24831, 24830], [24807, 24808, 24832], [24807, 24832, 24831], [24808, 24809, 24832], [24809, 24833, 24832], [24809, 24810, 24834], [24809, 24834, 24833], [24810, 24811, 24834], [24811, 24835, 24834], [24811, 24812, 24836], [24811, 24836, 24835], [24812, 24813, 24836], [24813, 24837, 24836], [24813, 24814, 24838], [24813, 24838, 24837], [24814, 24815, 24838], [24815, 24839, 24838], [24815, 24816, 24840], [24815, 24840, 24839], [24816, 24817, 24840], [24817, 24841, 24840], [24817, 24818, 24842], [24817, 24842, 24841], [24818, 24819, 24842], [24819, 24843, 24842], [24819, 12172, 12301], [24819, 12301, 24843], [24820, 24821, 24845], [24820, 24845, 24844], [24821, 24822, 24845], [24822, 24846, 24845], [24822, 24823, 24847], [24822, 24847, 24846], [24823, 24824, 24847], [24824, 24848, 24847], [24824, 24825, 24849], [24824, 24849, 24848], [24825, 24826, 24849], [24826, 24850, 24849], [24826, 24827, 24851], [24826, 24851, 24850], [24827, 24828, 24851], [24828, 24852, 24851], [24828, 24829, 24853], [24828, 24853, 24852], [24829, 24830, 24853], [24830, 24854, 24853], [24830, 24831, 24855], [24830, 24855, 24854], [24831, 24832, 24855], [24832, 24856, 24855], [24832, 24833, 24857], [24832, 24857, 24856], [24833, 24834, 24857], [24834, 24858, 24857], [24834, 24835, 24859], [24834, 24859, 24858], [24835, 24836, 24859], [24836, 24860, 24859], [24836, 24837, 24861], [24836, 24861, 24860], [24837, 24838, 24861], [24838, 24862, 24861], [24838, 24839, 24863], [24838, 24863, 24862], [24839, 24840, 24863], [24840, 24864, 24863], [24840, 24841, 24865], [24840, 24865, 24864], [24841, 24842, 24865], [24842, 24866, 24865], [24842, 24843, 24867], [24842, 24867, 24866], [24843, 12301, 24867], [12301, 12430, 24867], [24844, 24845, 24868], [24845, 24869, 24868], [24845, 24846, 24870], [24845, 24870, 24869], [24846, 24847, 24870], [24847, 24871, 24870], [24847, 24848, 24872], [24847, 24872, 24871], [24848, 24849, 24872], [24849, 24873, 24872], [24849, 24850, 24874], [24849, 24874, 24873], [24850, 24851, 24874], [24851, 24875, 24874], [24851, 24852, 24876], [24851, 24876, 24875], [24852, 24853, 24876], [24853, 24877, 24876], [24853, 24854, 24878], [24853, 24878, 24877], [24854, 24855, 24878], [24855, 24879, 24878], [24855, 24856, 24880], [24855, 24880, 24879], [24856, 24857, 24880], [24857, 24881, 24880], [24857, 24858, 24882], [24857, 24882, 24881], [24858, 24859, 24882], [24859, 24883, 24882], [24859, 24860, 24884], [24859, 24884, 24883], [24860, 24861, 24884], [24861, 24885, 24884], [24861, 24862, 24886], [24861, 24886, 24885], [24862, 24863, 24886], [24863, 24887, 24886], [24863, 24864, 24888], [24863, 24888, 24887], [24864, 24865, 24888], [24865, 24889, 24888], [24865, 24866, 24890], [24865, 24890, 24889], [24866, 24867, 24890], [24867, 24891, 24890], [24867, 12430, 12559], [24867, 12559, 24891], [24868, 24869, 24893], [24868, 24893, 24892], [24869, 24870, 24893], [24870, 24894, 24893], [24870, 24871, 24895], [24870, 24895, 24894], [24871, 24872, 24895], [24872, 24896, 24895], [24872, 24873, 24897], [24872, 24897, 24896], [24873, 24874, 24897], [24874, 24898, 24897], [24874, 24875, 24899], [24874, 24899, 24898], [24875, 24876, 24899], [24876, 24900, 24899], [24876, 24877, 24901], [24876, 24901, 24900], [24877, 24878, 24901], [24878, 24902, 24901], [24878, 24879, 24903], [24878, 24903, 24902], [24879, 24880, 24903], [24880, 24904, 24903], [24880, 24881, 24905], [24880, 24905, 24904], [24881, 24882, 24905], [24882, 24906, 24905], [24882, 24883, 24907], [24882, 24907, 24906], [24883, 24884, 24907], [24884, 24908, 24907], [24884, 24885, 24909], [24884, 24909, 24908], [24885, 24886, 24909], [24886, 24910, 24909], [24886, 24887, 24911], [24886, 24911, 24910], [24887, 24888, 24911], [24888, 24912, 24911], [24888, 24889, 24913], [24888, 24913, 24912], [24889, 24890, 24913], [24890, 24914, 24913], [24890, 24891, 24915], [24890, 24915, 24914], [24891, 12559, 24915], [12559, 12688, 24915], [24892, 24893, 24916], [24893, 24917, 24916], [24893, 24894, 24918], [24893, 24918, 24917], [24894, 24895, 24918], [24895, 24919, 24918], [24895, 24896, 24920], [24895, 24920, 24919], [24896, 24897, 24920], [24897, 24921, 24920], [24897, 24898, 24922], [24897, 24922, 24921], [24898, 24899, 24922], [24899, 24923, 24922], [24899, 24900, 24924], [24899, 24924, 24923], [24900, 24901, 24924], [24901, 24925, 24924], [24901, 24902, 24926], [24901, 24926, 24925], [24902, 24903, 24926], [24903, 24927, 24926], [24903, 24904, 24928], [24903, 24928, 24927], [24904, 24905, 24928], [24905, 24929, 24928], [24905, 24906, 24930], [24905, 24930, 24929], [24906, 24907, 24930], [24907, 24931, 24930], [24907, 24908, 24932], [24907, 24932, 24931], [24908, 24909, 24932], [24909, 24933, 24932], [24909, 24910, 24934], [24909, 24934, 24933], [24910, 24911, 24934], [24911, 24935, 24934], [24911, 24912, 24936], [24911, 24936, 24935], [24912, 24913, 24936], [24913, 24937, 24936], [24913, 24914, 24938], [24913, 24938, 24937], [24914, 24915, 24938], [24915, 24939, 24938], [24915, 12688, 12817], [24915, 12817, 24939], [24916, 24917, 24941], [24916, 24941, 24940], [24917, 24918, 24941], [24918, 24942, 24941], [24918, 24919, 24943], [24918, 24943, 24942], [24919, 24920, 24943], [24920, 24944, 24943], [24920, 24921, 24945], [24920, 24945, 24944], [24921, 24922, 24945], [24922, 24946, 24945], [24922, 24923, 24947], [24922, 24947, 24946], [24923, 24924, 24947], [24924, 24948, 24947], [24924, 24925, 24949], [24924, 24949, 24948], [24925, 24926, 24949], [24926, 24950, 24949], [24926, 24927, 24951], [24926, 24951, 24950], [24927, 24928, 24951], [24928, 24952, 24951], [24928, 24929, 24953], [24928, 24953, 24952], [24929, 24930, 24953], [24930, 24954, 24953], [24930, 24931, 24955], [24930, 24955, 24954], [24931, 24932, 24955], [24932, 24956, 24955], [24932, 24933, 24957], [24932, 24957, 24956], [24933, 24934, 24957], [24934, 24958, 24957], [24934, 24935, 24959], [24934, 24959, 24958], [24935, 24936, 24959], [24936, 24960, 24959], [24936, 24937, 24961], [24936, 24961, 24960], [24937, 24938, 24961], [24938, 24962, 24961], [24938, 24939, 24963], [24938, 24963, 24962], [24939, 12817, 24963], [12817, 12946, 24963], [24940, 24941, 24964], [24941, 24965, 24964], [24941, 24942, 24966], [24941, 24966, 24965], [24942, 24943, 24966], [24943, 24967, 24966], [24943, 24944, 24968], [24943, 24968, 24967], [24944, 24945, 24968], [24945, 24969, 24968], [24945, 24946, 24970], [24945, 24970, 24969], [24946, 24947, 24970], [24947, 24971, 24970], [24947, 24948, 24972], [24947, 24972, 24971], [24948, 24949, 24972], [24949, 24973, 24972], [24949, 24950, 24974], [24949, 24974, 24973], [24950, 24951, 24974], [24951, 24975, 24974], [24951, 24952, 24976], [24951, 24976, 24975], [24952, 24953, 24976], [24953, 24977, 24976], [24953, 24954, 24978], [24953, 24978, 24977], [24954, 24955, 24978], [24955, 24979, 24978], [24955, 24956, 24980], [24955, 24980, 24979], [24956, 24957, 24980], [24957, 24981, 24980], [24957, 24958, 24982], [24957, 24982, 24981], [24958, 24959, 24982], [24959, 24983, 24982], [24959, 24960, 24984], [24959, 24984, 24983], [24960, 24961, 24984], [24961, 24985, 24984], [24961, 24962, 24986], [24961, 24986, 24985], [24962, 24963, 24986], [24963, 24987, 24986], [24963, 12946, 13075], [24963, 13075, 24987], [24964, 24965, 24989], [24964, 24989, 24988], [24965, 24966, 24989], [24966, 24990, 24989], [24966, 24967, 24991], [24966, 24991, 24990], [24967, 24968, 24991], [24968, 24992, 24991], [24968, 24969, 24993], [24968, 24993, 24992], [24969, 24970, 24993], [24970, 24994, 24993], [24970, 24971, 24995], [24970, 24995, 24994], [24971, 24972, 24995], [24972, 24996, 24995], [24972, 24973, 24997], [24972, 24997, 24996], [24973, 24974, 24997], [24974, 24998, 24997], [24974, 24975, 24999], [24974, 24999, 24998], [24975, 24976, 24999], [24976, 25000, 24999], [24976, 24977, 25001], [24976, 25001, 25000], [24977, 24978, 25001], [24978, 25002, 25001], [24978, 24979, 25003], [24978, 25003, 25002], [24979, 24980, 25003], [24980, 25004, 25003], [24980, 24981, 25005], [24980, 25005, 25004], [24981, 24982, 25005], [24982, 25006, 25005], [24982, 24983, 25007], [24982, 25007, 25006], [24983, 24984, 25007], [24984, 25008, 25007], [24984, 24985, 25009], [24984, 25009, 25008], [24985, 24986, 25009], [24986, 25010, 25009], [24986, 24987, 25011], [24986, 25011, 25010], [24987, 13075, 25011], [13075, 13204, 25011], [24988, 24989, 25012], [24989, 25013, 25012], [24989, 24990, 25014], [24989, 25014, 25013], [24990, 24991, 25014], [24991, 25015, 25014], [24991, 24992, 25016], [24991, 25016, 25015], [24992, 24993, 25016], [24993, 25017, 25016], [24993, 24994, 25018], [24993, 25018, 25017], [24994, 24995, 25018], [24995, 25019, 25018], [24995, 24996, 25020], [24995, 25020, 25019], [24996, 24997, 25020], [24997, 25021, 25020], [24997, 24998, 25022], [24997, 25022, 25021], [24998, 24999, 25022], [24999, 25023, 25022], [24999, 25000, 25024], [24999, 25024, 25023], [25000, 25001, 25024], [25001, 25025, 25024], [25001, 25002, 25026], [25001, 25026, 25025], [25002, 25003, 25026], [25003, 25027, 25026], [25003, 25004, 25028], [25003, 25028, 25027], [25004, 25005, 25028], [25005, 25029, 25028], [25005, 25006, 25030], [25005, 25030, 25029], [25006, 25007, 25030], [25007, 25031, 25030], [25007, 25008, 25032], [25007, 25032, 25031], [25008, 25009, 25032], [25009, 25033, 25032], [25009, 25010, 25034], [25009, 25034, 25033], [25010, 25011, 25034], [25011, 25035, 25034], [25011, 13204, 13333], [25011, 13333, 25035], [25012, 25013, 25037], [25012, 25037, 25036], [25013, 25014, 25037], [25014, 25038, 25037], [25014, 25015, 25039], [25014, 25039, 25038], [25015, 25016, 25039], [25016, 25040, 25039], [25016, 25017, 25041], [25016, 25041, 25040], [25017, 25018, 25041], [25018, 25042, 25041], [25018, 25019, 25043], [25018, 25043, 25042], [25019, 25020, 25043], [25020, 25044, 25043], [25020, 25021, 25045], [25020, 25045, 25044], [25021, 25022, 25045], [25022, 25046, 25045], [25022, 25023, 25047], [25022, 25047, 25046], [25023, 25024, 25047], [25024, 25048, 25047], [25024, 25025, 25049], [25024, 25049, 25048], [25025, 25026, 25049], [25026, 25050, 25049], [25026, 25027, 25051], [25026, 25051, 25050], [25027, 25028, 25051], [25028, 25052, 25051], [25028, 25029, 25053], [25028, 25053, 25052], [25029, 25030, 25053], [25030, 25054, 25053], [25030, 25031, 25055], [25030, 25055, 25054], [25031, 25032, 25055], [25032, 25056, 25055], [25032, 25033, 25057], [25032, 25057, 25056], [25033, 25034, 25057], [25034, 25058, 25057], [25034, 25035, 25059], [25034, 25059, 25058], [25035, 13333, 25059], [13333, 13462, 25059], [25036, 25037, 25060], [25037, 25061, 25060], [25037, 25038, 25062], [25037, 25062, 25061], [25038, 25039, 25062], [25039, 25063, 25062], [25039, 25040, 25064], [25039, 25064, 25063], [25040, 25041, 25064], [25041, 25065, 25064], [25041, 25042, 25066], [25041, 25066, 25065], [25042, 25043, 25066], [25043, 25067, 25066], [25043, 25044, 25068], [25043, 25068, 25067], [25044, 25045, 25068], [25045, 25069, 25068], [25045, 25046, 25070], [25045, 25070, 25069], [25046, 25047, 25070], [25047, 25071, 25070], [25047, 25048, 25072], [25047, 25072, 25071], [25048, 25049, 25072], [25049, 25073, 25072], [25049, 25050, 25074], [25049, 25074, 25073], [25050, 25051, 25074], [25051, 25075, 25074], [25051, 25052, 25076], [25051, 25076, 25075], [25052, 25053, 25076], [25053, 25077, 25076], [25053, 25054, 25078], [25053, 25078, 25077], [25054, 25055, 25078], [25055, 25079, 25078], [25055, 25056, 25080], [25055, 25080, 25079], [25056, 25057, 25080], [25057, 25081, 25080], [25057, 25058, 25082], [25057, 25082, 25081], [25058, 25059, 25082], [25059, 25083, 25082], [25059, 13462, 13591], [25059, 13591, 25083], [25060, 25061, 25085], [25060, 25085, 25084], [25061, 25062, 25085], [25062, 25086, 25085], [25062, 25063, 25087], [25062, 25087, 25086], [25063, 25064, 25087], [25064, 25088, 25087], [25064, 25065, 25089], [25064, 25089, 25088], [25065, 25066, 25089], [25066, 25090, 25089], [25066, 25067, 25091], [25066, 25091, 25090], [25067, 25068, 25091], [25068, 25092, 25091], [25068, 25069, 25093], [25068, 25093, 25092], [25069, 25070, 25093], [25070, 25094, 25093], [25070, 25071, 25095], [25070, 25095, 25094], [25071, 25072, 25095], [25072, 25096, 25095], [25072, 25073, 25097], [25072, 25097, 25096], [25073, 25074, 25097], [25074, 25098, 25097], [25074, 25075, 25099], [25074, 25099, 25098], [25075, 25076, 25099], [25076, 25100, 25099], [25076, 25077, 25101], [25076, 25101, 25100], [25077, 25078, 25101], [25078, 25102, 25101], [25078, 25079, 25103], [25078, 25103, 25102], [25079, 25080, 25103], [25080, 25104, 25103], [25080, 25081, 25105], [25080, 25105, 25104], [25081, 25082, 25105], [25082, 25106, 25105], [25082, 25083, 25107], [25082, 25107, 25106], [25083, 13591, 25107], [13591, 13720, 25107], [25084, 25085, 25108], [25085, 25109, 25108], [25085, 25086, 25110], [25085, 25110, 25109], [25086, 25087, 25110], [25087, 25111, 25110], [25087, 25088, 25112], [25087, 25112, 25111], [25088, 25089, 25112], [25089, 25113, 25112], [25089, 25090, 25114], [25089, 25114, 25113], [25090, 25091, 25114], [25091, 25115, 25114], [25091, 25092, 25116], [25091, 25116, 25115], [25092, 25093, 25116], [25093, 25117, 25116], [25093, 25094, 25118], [25093, 25118, 25117], [25094, 25095, 25118], [25095, 25119, 25118], [25095, 25096, 25120], [25095, 25120, 25119], [25096, 25097, 25120], [25097, 25121, 25120], [25097, 25098, 25122], [25097, 25122, 25121], [25098, 25099, 25122], [25099, 25123, 25122], [25099, 25100, 25124], [25099, 25124, 25123], [25100, 25101, 25124], [25101, 25125, 25124], [25101, 25102, 25126], [25101, 25126, 25125], [25102, 25103, 25126], [25103, 25127, 25126], [25103, 25104, 25128], [25103, 25128, 25127], [25104, 25105, 25128], [25105, 25129, 25128], [25105, 25106, 25130], [25105, 25130, 25129], [25106, 25107, 25130], [25107, 25131, 25130], [25107, 13720, 13849], [25107, 13849, 25131], [25108, 25109, 25133], [25108, 25133, 25132], [25109, 25110, 25133], [25110, 25134, 25133], [25110, 25111, 25135], [25110, 25135, 25134], [25111, 25112, 25135], [25112, 25136, 25135], [25112, 25113, 25137], [25112, 25137, 25136], [25113, 25114, 25137], [25114, 25138, 25137], [25114, 25115, 25139], [25114, 25139, 25138], [25115, 25116, 25139], [25116, 25140, 25139], [25116, 25117, 25141], [25116, 25141, 25140], [25117, 25118, 25141], [25118, 25142, 25141], [25118, 25119, 25143], [25118, 25143, 25142], [25119, 25120, 25143], [25120, 25144, 25143], [25120, 25121, 25145], [25120, 25145, 25144], [25121, 25122, 25145], [25122, 25146, 25145], [25122, 25123, 25147], [25122, 25147, 25146], [25123, 25124, 25147], [25124, 25148, 25147], [25124, 25125, 25149], [25124, 25149, 25148], [25125, 25126, 25149], [25126, 25150, 25149], [25126, 25127, 25151], [25126, 25151, 25150], [25127, 25128, 25151], [25128, 25152, 25151], [25128, 25129, 25153], [25128, 25153, 25152], [25129, 25130, 25153], [25130, 25154, 25153], [25130, 25131, 25155], [25130, 25155, 25154], [25131, 13849, 25155], [13849, 13978, 25155], [25132, 25133, 25156], [25133, 25157, 25156], [25133, 25134, 25158], [25133, 25158, 25157], [25134, 25135, 25158], [25135, 25159, 25158], [25135, 25136, 25160], [25135, 25160, 25159], [25136, 25137, 25160], [25137, 25161, 25160], [25137, 25138, 25162], [25137, 25162, 25161], [25138, 25139, 25162], [25139, 25163, 25162], [25139, 25140, 25164], [25139, 25164, 25163], [25140, 25141, 25164], [25141, 25165, 25164], [25141, 25142, 25166], [25141, 25166, 25165], [25142, 25143, 25166], [25143, 25167, 25166], [25143, 25144, 25168], [25143, 25168, 25167], [25144, 25145, 25168], [25145, 25169, 25168], [25145, 25146, 25170], [25145, 25170, 25169], [25146, 25147, 25170], [25147, 25171, 25170], [25147, 25148, 25172], [25147, 25172, 25171], [25148, 25149, 25172], [25149, 25173, 25172], [25149, 25150, 25174], [25149, 25174, 25173], [25150, 25151, 25174], [25151, 25175, 25174], [25151, 25152, 25176], [25151, 25176, 25175], [25152, 25153, 25176], [25153, 25177, 25176], [25153, 25154, 25178], [25153, 25178, 25177], [25154, 25155, 25178], [25155, 25179, 25178], [25155, 13978, 14107], [25155, 14107, 25179], [25156, 25157, 25181], [25156, 25181, 25180], [25157, 25158, 25181], [25158, 25182, 25181], [25158, 25159, 25183], [25158, 25183, 25182], [25159, 25160, 25183], [25160, 25184, 25183], [25160, 25161, 25185], [25160, 25185, 25184], [25161, 25162, 25185], [25162, 25186, 25185], [25162, 25163, 25187], [25162, 25187, 25186], [25163, 25164, 25187], [25164, 25188, 25187], [25164, 25165, 25189], [25164, 25189, 25188], [25165, 25166, 25189], [25166, 25190, 25189], [25166, 25167, 25191], [25166, 25191, 25190], [25167, 25168, 25191], [25168, 25192, 25191], [25168, 25169, 25193], [25168, 25193, 25192], [25169, 25170, 25193], [25170, 25194, 25193], [25170, 25171, 25195], [25170, 25195, 25194], [25171, 25172, 25195], [25172, 25196, 25195], [25172, 25173, 25197], [25172, 25197, 25196], [25173, 25174, 25197], [25174, 25198, 25197], [25174, 25175, 25199], [25174, 25199, 25198], [25175, 25176, 25199], [25176, 25200, 25199], [25176, 25177, 25201], [25176, 25201, 25200], [25177, 25178, 25201], [25178, 25202, 25201], [25178, 25179, 25203], [25178, 25203, 25202], [25179, 14107, 25203], [14107, 14236, 25203], [25180, 25181, 25204], [25181, 25205, 25204], [25181, 25182, 25206], [25181, 25206, 25205], [25182, 25183, 25206], [25183, 25207, 25206], [25183, 25184, 25208], [25183, 25208, 25207], [25184, 25185, 25208], [25185, 25209, 25208], [25185, 25186, 25210], [25185, 25210, 25209], [25186, 25187, 25210], [25187, 25211, 25210], [25187, 25188, 25212], [25187, 25212, 25211], [25188, 25189, 25212], [25189, 25213, 25212], [25189, 25190, 25214], [25189, 25214, 25213], [25190, 25191, 25214], [25191, 25215, 25214], [25191, 25192, 25216], [25191, 25216, 25215], [25192, 25193, 25216], [25193, 25217, 25216], [25193, 25194, 25218], [25193, 25218, 25217], [25194, 25195, 25218], [25195, 25219, 25218], [25195, 25196, 25220], [25195, 25220, 25219], [25196, 25197, 25220], [25197, 25221, 25220], [25197, 25198, 25222], [25197, 25222, 25221], [25198, 25199, 25222], [25199, 25223, 25222], [25199, 25200, 25224], [25199, 25224, 25223], [25200, 25201, 25224], [25201, 25225, 25224], [25201, 25202, 25226], [25201, 25226, 25225], [25202, 25203, 25226], [25203, 25227, 25226], [25203, 14236, 14365], [25203, 14365, 25227], [25204, 25205, 25229], [25204, 25229, 25228], [25205, 25206, 25229], [25206, 25230, 25229], [25206, 25207, 25231], [25206, 25231, 25230], [25207, 25208, 25231], [25208, 25232, 25231], [25208, 25209, 25233], [25208, 25233, 25232], [25209, 25210, 25233], [25210, 25234, 25233], [25210, 25211, 25235], [25210, 25235, 25234], [25211, 25212, 25235], [25212, 25236, 25235], [25212, 25213, 25237], [25212, 25237, 25236], [25213, 25214, 25237], [25214, 25238, 25237], [25214, 25215, 25239], [25214, 25239, 25238], [25215, 25216, 25239], [25216, 25240, 25239], [25216, 25217, 25241], [25216, 25241, 25240], [25217, 25218, 25241], [25218, 25242, 25241], [25218, 25219, 25243], [25218, 25243, 25242], [25219, 25220, 25243], [25220, 25244, 25243], [25220, 25221, 25245], [25220, 25245, 25244], [25221, 25222, 25245], [25222, 25246, 25245], [25222, 25223, 25247], [25222, 25247, 25246], [25223, 25224, 25247], [25224, 25248, 25247], [25224, 25225, 25249], [25224, 25249, 25248], [25225, 25226, 25249], [25226, 25250, 25249], [25226, 25227, 25251], [25226, 25251, 25250], [25227, 14365, 25251], [14365, 14494, 25251], [25228, 25229, 25252], [25229, 25253, 25252], [25229, 25230, 25254], [25229, 25254, 25253], [25230, 25231, 25254], [25231, 25255, 25254], [25231, 25232, 25256], [25231, 25256, 25255], [25232, 25233, 25256], [25233, 25257, 25256], [25233, 25234, 25258], [25233, 25258, 25257], [25234, 25235, 25258], [25235, 25259, 25258], [25235, 25236, 25260], [25235, 25260, 25259], [25236, 25237, 25260], [25237, 25261, 25260], [25237, 25238, 25262], [25237, 25262, 25261], [25238, 25239, 25262], [25239, 25263, 25262], [25239, 25240, 25264], [25239, 25264, 25263], [25240, 25241, 25264], [25241, 25265, 25264], [25241, 25242, 25266], [25241, 25266, 25265], [25242, 25243, 25266], [25243, 25267, 25266], [25243, 25244, 25268], [25243, 25268, 25267], [25244, 25245, 25268], [25245, 25269, 25268], [25245, 25246, 25270], [25245, 25270, 25269], [25246, 25247, 25270], [25247, 25271, 25270], [25247, 25248, 25272], [25247, 25272, 25271], [25248, 25249, 25272], [25249, 25273, 25272], [25249, 25250, 25274], [25249, 25274, 25273], [25250, 25251, 25274], [25251, 25275, 25274], [25251, 14494, 14623], [25251, 14623, 25275], [25252, 25253, 25277], [25252, 25277, 25276], [25253, 25254, 25277], [25254, 25278, 25277], [25254, 25255, 25279], [25254, 25279, 25278], [25255, 25256, 25279], [25256, 25280, 25279], [25256, 25257, 25281], [25256, 25281, 25280], [25257, 25258, 25281], [25258, 25282, 25281], [25258, 25259, 25283], [25258, 25283, 25282], [25259, 25260, 25283], [25260, 25284, 25283], [25260, 25261, 25285], [25260, 25285, 25284], [25261, 25262, 25285], [25262, 25286, 25285], [25262, 25263, 25287], [25262, 25287, 25286], [25263, 25264, 25287], [25264, 25288, 25287], [25264, 25265, 25289], [25264, 25289, 25288], [25265, 25266, 25289], [25266, 25290, 25289], [25266, 25267, 25291], [25266, 25291, 25290], [25267, 25268, 25291], [25268, 25292, 25291], [25268, 25269, 25293], [25268, 25293, 25292], [25269, 25270, 25293], [25270, 25294, 25293], [25270, 25271, 25295], [25270, 25295, 25294], [25271, 25272, 25295], [25272, 25296, 25295], [25272, 25273, 25297], [25272, 25297, 25296], [25273, 25274, 25297], [25274, 25298, 25297], [25274, 25275, 25299], [25274, 25299, 25298], [25275, 14623, 25299], [14623, 14752, 25299], [25276, 25277, 25300], [25277, 25301, 25300], [25277, 25278, 25302], [25277, 25302, 25301], [25278, 25279, 25302], [25279, 25303, 25302], [25279, 25280, 25304], [25279, 25304, 25303], [25280, 25281, 25304], [25281, 25305, 25304], [25281, 25282, 25306], [25281, 25306, 25305], [25282, 25283, 25306], [25283, 25307, 25306], [25283, 25284, 25308], [25283, 25308, 25307], [25284, 25285, 25308], [25285, 25309, 25308], [25285, 25286, 25310], [25285, 25310, 25309], [25286, 25287, 25310], [25287, 25311, 25310], [25287, 25288, 25312], [25287, 25312, 25311], [25288, 25289, 25312], [25289, 25313, 25312], [25289, 25290, 25314], [25289, 25314, 25313], [25290, 25291, 25314], [25291, 25315, 25314], [25291, 25292, 25316], [25291, 25316, 25315], [25292, 25293, 25316], [25293, 25317, 25316], [25293, 25294, 25318], [25293, 25318, 25317], [25294, 25295, 25318], [25295, 25319, 25318], [25295, 25296, 25320], [25295, 25320, 25319], [25296, 25297, 25320], [25297, 25321, 25320], [25297, 25298, 25322], [25297, 25322, 25321], [25298, 25299, 25322], [25299, 25323, 25322], [25299, 14752, 14881], [25299, 14881, 25323], [25300, 25301, 25325], [25300, 25325, 25324], [25301, 25302, 25325], [25302, 25326, 25325], [25302, 25303, 25327], [25302, 25327, 25326], [25303, 25304, 25327], [25304, 25328, 25327], [25304, 25305, 25329], [25304, 25329, 25328], [25305, 25306, 25329], [25306, 25330, 25329], [25306, 25307, 25331], [25306, 25331, 25330], [25307, 25308, 25331], [25308, 25332, 25331], [25308, 25309, 25333], [25308, 25333, 25332], [25309, 25310, 25333], [25310, 25334, 25333], [25310, 25311, 25335], [25310, 25335, 25334], [25311, 25312, 25335], [25312, 25336, 25335], [25312, 25313, 25337], [25312, 25337, 25336], [25313, 25314, 25337], [25314, 25338, 25337], [25314, 25315, 25339], [25314, 25339, 25338], [25315, 25316, 25339], [25316, 25340, 25339], [25316, 25317, 25341], [25316, 25341, 25340], [25317, 25318, 25341], [25318, 25342, 25341], [25318, 25319, 25343], [25318, 25343, 25342], [25319, 25320, 25343], [25320, 25344, 25343], [25320, 25321, 25345], [25320, 25345, 25344], [25321, 25322, 25345], [25322, 25346, 25345], [25322, 25323, 25347], [25322, 25347, 25346], [25323, 14881, 25347], [14881, 15010, 25347], [25324, 25325, 25348], [25325, 25349, 25348], [25325, 25326, 25350], [25325, 25350, 25349], [25326, 25327, 25350], [25327, 25351, 25350], [25327, 25328, 25352], [25327, 25352, 25351], [25328, 25329, 25352], [25329, 25353, 25352], [25329, 25330, 25354], [25329, 25354, 25353], [25330, 25331, 25354], [25331, 25355, 25354], [25331, 25332, 25356], [25331, 25356, 25355], [25332, 25333, 25356], [25333, 25357, 25356], [25333, 25334, 25358], [25333, 25358, 25357], [25334, 25335, 25358], [25335, 25359, 25358], [25335, 25336, 25360], [25335, 25360, 25359], [25336, 25337, 25360], [25337, 25361, 25360], [25337, 25338, 25362], [25337, 25362, 25361], [25338, 25339, 25362], [25339, 25363, 25362], [25339, 25340, 25364], [25339, 25364, 25363], [25340, 25341, 25364], [25341, 25365, 25364], [25341, 25342, 25366], [25341, 25366, 25365], [25342, 25343, 25366], [25343, 25367, 25366], [25343, 25344, 25368], [25343, 25368, 25367], [25344, 25345, 25368], [25345, 25369, 25368], [25345, 25346, 25370], [25345, 25370, 25369], [25346, 25347, 25370], [25347, 25371, 25370], [25347, 15010, 15139], [25347, 15139, 25371], [25348, 25349, 25373], [25348, 25373, 25372], [25349, 25350, 25373], [25350, 25374, 25373], [25350, 25351, 25375], [25350, 25375, 25374], [25351, 25352, 25375], [25352, 25376, 25375], [25352, 25353, 25377], [25352, 25377, 25376], [25353, 25354, 25377], [25354, 25378, 25377], [25354, 25355, 25379], [25354, 25379, 25378], [25355, 25356, 25379], [25356, 25380, 25379], [25356, 25357, 25381], [25356, 25381, 25380], [25357, 25358, 25381], [25358, 25382, 25381], [25358, 25359, 25383], [25358, 25383, 25382], [25359, 25360, 25383], [25360, 25384, 25383], [25360, 25361, 25385], [25360, 25385, 25384], [25361, 25362, 25385], [25362, 25386, 25385], [25362, 25363, 25387], [25362, 25387, 25386], [25363, 25364, 25387], [25364, 25388, 25387], [25364, 25365, 25389], [25364, 25389, 25388], [25365, 25366, 25389], [25366, 25390, 25389], [25366, 25367, 25391], [25366, 25391, 25390], [25367, 25368, 25391], [25368, 25392, 25391], [25368, 25369, 25393], [25368, 25393, 25392], [25369, 25370, 25393], [25370, 25394, 25393], [25370, 25371, 25395], [25370, 25395, 25394], [25371, 15139, 25395], [15139, 15268, 25395], [25372, 25373, 25396], [25373, 25397, 25396], [25373, 25374, 25398], [25373, 25398, 25397], [25374, 25375, 25398], [25375, 25399, 25398], [25375, 25376, 25400], [25375, 25400, 25399], [25376, 25377, 25400], [25377, 25401, 25400], [25377, 25378, 25402], [25377, 25402, 25401], [25378, 25379, 25402], [25379, 25403, 25402], [25379, 25380, 25404], [25379, 25404, 25403], [25380, 25381, 25404], [25381, 25405, 25404], [25381, 25382, 25406], [25381, 25406, 25405], [25382, 25383, 25406], [25383, 25407, 25406], [25383, 25384, 25408], [25383, 25408, 25407], [25384, 25385, 25408], [25385, 25409, 25408], [25385, 25386, 25410], [25385, 25410, 25409], [25386, 25387, 25410], [25387, 25411, 25410], [25387, 25388, 25412], [25387, 25412, 25411], [25388, 25389, 25412], [25389, 25413, 25412], [25389, 25390, 25414], [25389, 25414, 25413], [25390, 25391, 25414], [25391, 25415, 25414], [25391, 25392, 25416], [25391, 25416, 25415], [25392, 25393, 25416], [25393, 25417, 25416], [25393, 25394, 25418], [25393, 25418, 25417], [25394, 25395, 25418], [25395, 25419, 25418], [25395, 15268, 15397], [25395, 15397, 25419], [25396, 25397, 25421], [25396, 25421, 25420], [25397, 25398, 25421], [25398, 25422, 25421], [25398, 25399, 25423], [25398, 25423, 25422], [25399, 25400, 25423], [25400, 25424, 25423], [25400, 25401, 25425], [25400, 25425, 25424], [25401, 25402, 25425], [25402, 25426, 25425], [25402, 25403, 25427], [25402, 25427, 25426], [25403, 25404, 25427], [25404, 25428, 25427], [25404, 25405, 25429], [25404, 25429, 25428], [25405, 25406, 25429], [25406, 25430, 25429], [25406, 25407, 25431], [25406, 25431, 25430], [25407, 25408, 25431], [25408, 25432, 25431], [25408, 25409, 25433], [25408, 25433, 25432], [25409, 25410, 25433], [25410, 25434, 25433], [25410, 25411, 25435], [25410, 25435, 25434], [25411, 25412, 25435], [25412, 25436, 25435], [25412, 25413, 25437], [25412, 25437, 25436], [25413, 25414, 25437], [25414, 25438, 25437], [25414, 25415, 25439], [25414, 25439, 25438], [25415, 25416, 25439], [25416, 25440, 25439], [25416, 25417, 25441], [25416, 25441, 25440], [25417, 25418, 25441], [25418, 25442, 25441], [25418, 25419, 25443], [25418, 25443, 25442], [25419, 15397, 25443], [15397, 15526, 25443], [25420, 25421, 25444], [25421, 25445, 25444], [25421, 25422, 25446], [25421, 25446, 25445], [25422, 25423, 25446], [25423, 25447, 25446], [25423, 25424, 25448], [25423, 25448, 25447], [25424, 25425, 25448], [25425, 25449, 25448], [25425, 25426, 25450], [25425, 25450, 25449], [25426, 25427, 25450], [25427, 25451, 25450], [25427, 25428, 25452], [25427, 25452, 25451], [25428, 25429, 25452], [25429, 25453, 25452], [25429, 25430, 25454], [25429, 25454, 25453], [25430, 25431, 25454], [25431, 25455, 25454], [25431, 25432, 25456], [25431, 25456, 25455], [25432, 25433, 25456], [25433, 25457, 25456], [25433, 25434, 25458], [25433, 25458, 25457], [25434, 25435, 25458], [25435, 25459, 25458], [25435, 25436, 25460], [25435, 25460, 25459], [25436, 25437, 25460], [25437, 25461, 25460], [25437, 25438, 25462], [25437, 25462, 25461], [25438, 25439, 25462], [25439, 25463, 25462], [25439, 25440, 25464], [25439, 25464, 25463], [25440, 25441, 25464], [25441, 25465, 25464], [25441, 25442, 25466], [25441, 25466, 25465], [25442, 25443, 25466], [25443, 25467, 25466], [25443, 15526, 15655], [25443, 15655, 25467], [25444, 25445, 25469], [25444, 25469, 25468], [25445, 25446, 25469], [25446, 25470, 25469], [25446, 25447, 25471], [25446, 25471, 25470], [25447, 25448, 25471], [25448, 25472, 25471], [25448, 25449, 25473], [25448, 25473, 25472], [25449, 25450, 25473], [25450, 25474, 25473], [25450, 25451, 25475], [25450, 25475, 25474], [25451, 25452, 25475], [25452, 25476, 25475], [25452, 25453, 25477], [25452, 25477, 25476], [25453, 25454, 25477], [25454, 25478, 25477], [25454, 25455, 25479], [25454, 25479, 25478], [25455, 25456, 25479], [25456, 25480, 25479], [25456, 25457, 25481], [25456, 25481, 25480], [25457, 25458, 25481], [25458, 25482, 25481], [25458, 25459, 25483], [25458, 25483, 25482], [25459, 25460, 25483], [25460, 25484, 25483], [25460, 25461, 25485], [25460, 25485, 25484], [25461, 25462, 25485], [25462, 25486, 25485], [25462, 25463, 25487], [25462, 25487, 25486], [25463, 25464, 25487], [25464, 25488, 25487], [25464, 25465, 25489], [25464, 25489, 25488], [25465, 25466, 25489], [25466, 25490, 25489], [25466, 25467, 25491], [25466, 25491, 25490], [25467, 15655, 25491], [15655, 15784, 25491], [25468, 25469, 25492], [25469, 25493, 25492], [25469, 25470, 25494], [25469, 25494, 25493], [25470, 25471, 25494], [25471, 25495, 25494], [25471, 25472, 25496], [25471, 25496, 25495], [25472, 25473, 25496], [25473, 25497, 25496], [25473, 25474, 25498], [25473, 25498, 25497], [25474, 25475, 25498], [25475, 25499, 25498], [25475, 25476, 25500], [25475, 25500, 25499], [25476, 25477, 25500], [25477, 25501, 25500], [25477, 25478, 25502], [25477, 25502, 25501], [25478, 25479, 25502], [25479, 25503, 25502], [25479, 25480, 25504], [25479, 25504, 25503], [25480, 25481, 25504], [25481, 25505, 25504], [25481, 25482, 25506], [25481, 25506, 25505], [25482, 25483, 25506], [25483, 25507, 25506], [25483, 25484, 25508], [25483, 25508, 25507], [25484, 25485, 25508], [25485, 25509, 25508], [25485, 25486, 25510], [25485, 25510, 25509], [25486, 25487, 25510], [25487, 25511, 25510], [25487, 25488, 25512], [25487, 25512, 25511], [25488, 25489, 25512], [25489, 25513, 25512], [25489, 25490, 25514], [25489, 25514, 25513], [25490, 25491, 25514], [25491, 25515, 25514], [25491, 15784, 15913], [25491, 15913, 25515], [25492, 25493, 25517], [25492, 25517, 25516], [25493, 25494, 25517], [25494, 25518, 25517], [25494, 25495, 25519], [25494, 25519, 25518], [25495, 25496, 25519], [25496, 25520, 25519], [25496, 25497, 25521], [25496, 25521, 25520], [25497, 25498, 25521], [25498, 25522, 25521], [25498, 25499, 25523], [25498, 25523, 25522], [25499, 25500, 25523], [25500, 25524, 25523], [25500, 25501, 25525], [25500, 25525, 25524], [25501, 25502, 25525], [25502, 25526, 25525], [25502, 25503, 25527], [25502, 25527, 25526], [25503, 25504, 25527], [25504, 25528, 25527], [25504, 25505, 25529], [25504, 25529, 25528], [25505, 25506, 25529], [25506, 25530, 25529], [25506, 25507, 25531], [25506, 25531, 25530], [25507, 25508, 25531], [25508, 25532, 25531], [25508, 25509, 25533], [25508, 25533, 25532], [25509, 25510, 25533], [25510, 25534, 25533], [25510, 25511, 25535], [25510, 25535, 25534], [25511, 25512, 25535], [25512, 25536, 25535], [25512, 25513, 25537], [25512, 25537, 25536], [25513, 25514, 25537], [25514, 25538, 25537], [25514, 25515, 25539], [25514, 25539, 25538], [25515, 15913, 25539], [15913, 16042, 25539], [25516, 25517, 22363], [25517, 22234, 22363], [25517, 25518, 22105], [25517, 22105, 22234], [25518, 25519, 22105], [25519, 21976, 22105], [25519, 25520, 21847], [25519, 21847, 21976], [25520, 25521, 21847], [25521, 21718, 21847], [25521, 25522, 21589], [25521, 21589, 21718], [25522, 25523, 21589], [25523, 21460, 21589], [25523, 25524, 21331], [25523, 21331, 21460], [25524, 25525, 21331], [25525, 21202, 21331], [25525, 25526, 21073], [25525, 21073, 21202], [25526, 25527, 21073], [25527, 20944, 21073], [25527, 25528, 20815], [25527, 20815, 20944], [25528, 25529, 20815], [25529, 20686, 20815], [25529, 25530, 20557], [25529, 20557, 20686], [25530, 25531, 20557], [25531, 20428, 20557], [25531, 25532, 20299], [25531, 20299, 20428], [25532, 25533, 20299], [25533, 20170, 20299], [25533, 25534, 20041], [25533, 20041, 20170], [25534, 25535, 20041], [25535, 19912, 20041], [25535, 25536, 19783], [25535, 19783, 19912], [25536, 25537, 19783], [25537, 19654, 19783], [25537, 25538, 19525], [25537, 19525, 19654], [25538, 25539, 19525], [25539, 19396, 19525], [25539, 16042, 19267], [25539, 19267, 19396], [128, 19266, 25540], [128, 25540, 257], [19266, 19137, 25540], [19137, 25541, 25540], [19137, 19008, 25542], [19137, 25542, 25541], [19008, 18879, 25542], [18879, 25543, 25542], [18879, 18750, 25544], [18879, 25544, 25543], [18750, 18621, 25544], [18621, 25545, 25544], [18621, 18492, 25546], [18621, 25546, 25545], [18492, 18363, 25546], [18363, 25547, 25546], [18363, 18234, 25548], [18363, 25548, 25547], [18234, 18105, 25548], [18105, 25549, 25548], [18105, 17976, 25550], [18105, 25550, 25549], [17976, 17847, 25550], [17847, 25551, 25550], [17847, 17718, 25552], [17847, 25552, 25551], [17718, 17589, 25552], [17589, 25553, 25552], [17589, 17460, 25554], [17589, 25554, 25553], [17460, 17331, 25554], [17331, 25555, 25554], [17331, 17202, 25556], [17331, 25556, 25555], [17202, 17073, 25556], [17073, 25557, 25556], [17073, 16944, 25558], [17073, 25558, 25557], [16944, 16815, 25558], [16815, 25559, 25558], [16815, 16686, 25560], [16815, 25560, 25559], [16686, 16557, 25560], [16557, 25561, 25560], [16557, 16428, 25562], [16557, 25562, 25561], [16428, 16299, 25562], [16299, 25563, 25562], [257, 25540, 386], [25540, 25564, 386], [25540, 25541, 25565], [25540, 25565, 25564], [25541, 25542, 25565], [25542, 25566, 25565], [25542, 25543, 25567], [25542, 25567, 25566], [25543, 25544, 25567], [25544, 25568, 25567], [25544, 25545, 25569], [25544, 25569, 25568], [25545, 25546, 25569], [25546, 25570, 25569], [25546, 25547, 25571], [25546, 25571, 25570], [25547, 25548, 25571], [25548, 25572, 25571], [25548, 25549, 25573], [25548, 25573, 25572], [25549, 25550, 25573], [25550, 25574, 25573], [25550, 25551, 25575], [25550, 25575, 25574], [25551, 25552, 25575], [25552, 25576, 25575], [25552, 25553, 25577], [25552, 25577, 25576], [25553, 25554, 25577], [25554, 25578, 25577], [25554, 25555, 25579], [25554, 25579, 25578], [25555, 25556, 25579], [25556, 25580, 25579], [25556, 25557, 25581], [25556, 25581, 25580], [25557, 25558, 25581], [25558, 25582, 25581], [25558, 25559, 25583], [25558, 25583, 25582], [25559, 25560, 25583], [25560, 25584, 25583], [25560, 25561, 25585], [25560, 25585, 25584], [25561, 25562, 25585], [25562, 25586, 25585], [25562, 25563, 25587], [25562, 25587, 25586], [386, 25564, 25588], [386, 25588, 515], [25564, 25565, 25588], [25565, 25589, 25588], [25565, 25566, 25590], [25565, 25590, 25589], [25566, 25567, 25590], [25567, 25591, 25590], [25567, 25568, 25592], [25567, 25592, 25591], [25568, 25569, 25592], [25569, 25593, 25592], [25569, 25570, 25594], [25569, 25594, 25593], [25570, 25571, 25594], [25571, 25595, 25594], [25571, 25572, 25596], [25571, 25596, 25595], [25572, 25573, 25596], [25573, 25597, 25596], [25573, 25574, 25598], [25573, 25598, 25597], [25574, 25575, 25598], [25575, 25599, 25598], [25575, 25576, 25600], [25575, 25600, 25599], [25576, 25577, 25600], [25577, 25601, 25600], [25577, 25578, 25602], [25577, 25602, 25601], [25578, 25579, 25602], [25579, 25603, 25602], [25579, 25580, 25604], [25579, 25604, 25603], [25580, 25581, 25604], [25581, 25605, 25604], [25581, 25582, 25606], [25581, 25606, 25605], [25582, 25583, 25606], [25583, 25607, 25606], [25583, 25584, 25608], [25583, 25608, 25607], [25584, 25585, 25608], [25585, 25609, 25608], [25585, 25586, 25610], [25585, 25610, 25609], [25586, 25587, 25610], [25587, 25611, 25610], [515, 25588, 644], [25588, 25612, 644], [25588, 25589, 25613], [25588, 25613, 25612], [25589, 25590, 25613], [25590, 25614, 25613], [25590, 25591, 25615], [25590, 25615, 25614], [25591, 25592, 25615], [25592, 25616, 25615], [25592, 25593, 25617], [25592, 25617, 25616], [25593, 25594, 25617], [25594, 25618, 25617], [25594, 25595, 25619], [25594, 25619, 25618], [25595, 25596, 25619], [25596, 25620, 25619], [25596, 25597, 25621], [25596, 25621, 25620], [25597, 25598, 25621], [25598, 25622, 25621], [25598, 25599, 25623], [25598, 25623, 25622], [25599, 25600, 25623], [25600, 25624, 25623], [25600, 25601, 25625], [25600, 25625, 25624], [25601, 25602, 25625], [25602, 25626, 25625], [25602, 25603, 25627], [25602, 25627, 25626], [25603, 25604, 25627], [25604, 25628, 25627], [25604, 25605, 25629], [25604, 25629, 25628], [25605, 25606, 25629], [25606, 25630, 25629], [25606, 25607, 25631], [25606, 25631, 25630], [25607, 25608, 25631], [25608, 25632, 25631], [25608, 25609, 25633], [25608, 25633, 25632], [25609, 25610, 25633], [25610, 25634, 25633], [25610, 25611, 25635], [25610, 25635, 25634], [644, 25612, 25636], [644, 25636, 773], [25612, 25613, 25636], [25613, 25637, 25636], [25613, 25614, 25638], [25613, 25638, 25637], [25614, 25615, 25638], [25615, 25639, 25638], [25615, 25616, 25640], [25615, 25640, 25639], [25616, 25617, 25640], [25617, 25641, 25640], [25617, 25618, 25642], [25617, 25642, 25641], [25618, 25619, 25642], [25619, 25643, 25642], [25619, 25620, 25644], [25619, 25644, 25643], [25620, 25621, 25644], [25621, 25645, 25644], [25621, 25622, 25646], [25621, 25646, 25645], [25622, 25623, 25646], [25623, 25647, 25646], [25623, 25624, 25648], [25623, 25648, 25647], [25624, 25625, 25648], [25625, 25649, 25648], [25625, 25626, 25650], [25625, 25650, 25649], [25626, 25627, 25650], [25627, 25651, 25650], [25627, 25628, 25652], [25627, 25652, 25651], [25628, 25629, 25652], [25629, 25653, 25652], [25629, 25630, 25654], [25629, 25654, 25653], [25630, 25631, 25654], [25631, 25655, 25654], [25631, 25632, 25656], [25631, 25656, 25655], [25632, 25633, 25656], [25633, 25657, 25656], [25633, 25634, 25658], [25633, 25658, 25657], [25634, 25635, 25658], [25635, 25659, 25658], [773, 25636, 902], [25636, 25660, 902], [25636, 25637, 25661], [25636, 25661, 25660], [25637, 25638, 25661], [25638, 25662, 25661], [25638, 25639, 25663], [25638, 25663, 25662], [25639, 25640, 25663], [25640, 25664, 25663], [25640, 25641, 25665], [25640, 25665, 25664], [25641, 25642, 25665], [25642, 25666, 25665], [25642, 25643, 25667], [25642, 25667, 25666], [25643, 25644, 25667], [25644, 25668, 25667], [25644, 25645, 25669], [25644, 25669, 25668], [25645, 25646, 25669], [25646, 25670, 25669], [25646, 25647, 25671], [25646, 25671, 25670], [25647, 25648, 25671], [25648, 25672, 25671], [25648, 25649, 25673], [25648, 25673, 25672], [25649, 25650, 25673], [25650, 25674, 25673], [25650, 25651, 25675], [25650, 25675, 25674], [25651, 25652, 25675], [25652, 25676, 25675], [25652, 25653, 25677], [25652, 25677, 25676], [25653, 25654, 25677], [25654, 25678, 25677], [25654, 25655, 25679], [25654, 25679, 25678], [25655, 25656, 25679], [25656, 25680, 25679], [25656, 25657, 25681], [25656, 25681, 25680], [25657, 25658, 25681], [25658, 25682, 25681], [25658, 25659, 25683], [25658, 25683, 25682], [902, 25660, 25684], [902, 25684, 1031], [25660, 25661, 25684], [25661, 25685, 25684], [25661, 25662, 25686], [25661, 25686, 25685], [25662, 25663, 25686], [25663, 25687, 25686], [25663, 25664, 25688], [25663, 25688, 25687], [25664, 25665, 25688], [25665, 25689, 25688], [25665, 25666, 25690], [25665, 25690, 25689], [25666, 25667, 25690], [25667, 25691, 25690], [25667, 25668, 25692], [25667, 25692, 25691], [25668, 25669, 25692], [25669, 25693, 25692], [25669, 25670, 25694], [25669, 25694, 25693], [25670, 25671, 25694], [25671, 25695, 25694], [25671, 25672, 25696], [25671, 25696, 25695], [25672, 25673, 25696], [25673, 25697, 25696], [25673, 25674, 25698], [25673, 25698, 25697], [25674, 25675, 25698], [25675, 25699, 25698], [25675, 25676, 25700], [25675, 25700, 25699], [25676, 25677, 25700], [25677, 25701, 25700], [25677, 25678, 25702], [25677, 25702, 25701], [25678, 25679, 25702], [25679, 25703, 25702], [25679, 25680, 25704], [25679, 25704, 25703], [25680, 25681, 25704], [25681, 25705, 25704], [25681, 25682, 25706], [25681, 25706, 25705], [25682, 25683, 25706], [25683, 25707, 25706], [1031, 25684, 1160], [25684, 25708, 1160], [25684, 25685, 25709], [25684, 25709, 25708], [25685, 25686, 25709], [25686, 25710, 25709], [25686, 25687, 25711], [25686, 25711, 25710], [25687, 25688, 25711], [25688, 25712, 25711], [25688, 25689, 25713], [25688, 25713, 25712], [25689, 25690, 25713], [25690, 25714, 25713], [25690, 25691, 25715], [25690, 25715, 25714], [25691, 25692, 25715], [25692, 25716, 25715], [25692, 25693, 25717], [25692, 25717, 25716], [25693, 25694, 25717], [25694, 25718, 25717], [25694, 25695, 25719], [25694, 25719, 25718], [25695, 25696, 25719], [25696, 25720, 25719], [25696, 25697, 25721], [25696, 25721, 25720], [25697, 25698, 25721], [25698, 25722, 25721], [25698, 25699, 25723], [25698, 25723, 25722], [25699, 25700, 25723], [25700, 25724, 25723], [25700, 25701, 25725], [25700, 25725, 25724], [25701, 25702, 25725], [25702, 25726, 25725], [25702, 25703, 25727], [25702, 25727, 25726], [25703, 25704, 25727], [25704, 25728, 25727], [25704, 25705, 25729], [25704, 25729, 25728], [25705, 25706, 25729], [25706, 25730, 25729], [25706, 25707, 25731], [25706, 25731, 25730], [1160, 25708, 25732], [1160, 25732, 1289], [25708, 25709, 25732], [25709, 25733, 25732], [25709, 25710, 25734], [25709, 25734, 25733], [25710, 25711, 25734], [25711, 25735, 25734], [25711, 25712, 25736], [25711, 25736, 25735], [25712, 25713, 25736], [25713, 25737, 25736], [25713, 25714, 25738], [25713, 25738, 25737], [25714, 25715, 25738], [25715, 25739, 25738], [25715, 25716, 25740], [25715, 25740, 25739], [25716, 25717, 25740], [25717, 25741, 25740], [25717, 25718, 25742], [25717, 25742, 25741], [25718, 25719, 25742], [25719, 25743, 25742], [25719, 25720, 25744], [25719, 25744, 25743], [25720, 25721, 25744], [25721, 25745, 25744], [25721, 25722, 25746], [25721, 25746, 25745], [25722, 25723, 25746], [25723, 25747, 25746], [25723, 25724, 25748], [25723, 25748, 25747], [25724, 25725, 25748], [25725, 25749, 25748], [25725, 25726, 25750], [25725, 25750, 25749], [25726, 25727, 25750], [25727, 25751, 25750], [25727, 25728, 25752], [25727, 25752, 25751], [25728, 25729, 25752], [25729, 25753, 25752], [25729, 25730, 25754], [25729, 25754, 25753], [25730, 25731, 25754], [25731, 25755, 25754], [1289, 25732, 1418], [25732, 25756, 1418], [25732, 25733, 25757], [25732, 25757, 25756], [25733, 25734, 25757], [25734, 25758, 25757], [25734, 25735, 25759], [25734, 25759, 25758], [25735, 25736, 25759], [25736, 25760, 25759], [25736, 25737, 25761], [25736, 25761, 25760], [25737, 25738, 25761], [25738, 25762, 25761], [25738, 25739, 25763], [25738, 25763, 25762], [25739, 25740, 25763], [25740, 25764, 25763], [25740, 25741, 25765], [25740, 25765, 25764], [25741, 25742, 25765], [25742, 25766, 25765], [25742, 25743, 25767], [25742, 25767, 25766], [25743, 25744, 25767], [25744, 25768, 25767], [25744, 25745, 25769], [25744, 25769, 25768], [25745, 25746, 25769], [25746, 25770, 25769], [25746, 25747, 25771], [25746, 25771, 25770], [25747, 25748, 25771], [25748, 25772, 25771], [25748, 25749, 25773], [25748, 25773, 25772], [25749, 25750, 25773], [25750, 25774, 25773], [25750, 25751, 25775], [25750, 25775, 25774], [25751, 25752, 25775], [25752, 25776, 25775], [25752, 25753, 25777], [25752, 25777, 25776], [25753, 25754, 25777], [25754, 25778, 25777], [25754, 25755, 25779], [25754, 25779, 25778], [1418, 25756, 25780], [1418, 25780, 1547], [25756, 25757, 25780], [25757, 25781, 25780], [25757, 25758, 25782], [25757, 25782, 25781], [25758, 25759, 25782], [25759, 25783, 25782], [25759, 25760, 25784], [25759, 25784, 25783], [25760, 25761, 25784], [25761, 25785, 25784], [25761, 25762, 25786], [25761, 25786, 25785], [25762, 25763, 25786], [25763, 25787, 25786], [25763, 25764, 25788], [25763, 25788, 25787], [25764, 25765, 25788], [25765, 25789, 25788], [25765, 25766, 25790], [25765, 25790, 25789], [25766, 25767, 25790], [25767, 25791, 25790], [25767, 25768, 25792], [25767, 25792, 25791], [25768, 25769, 25792], [25769, 25793, 25792], [25769, 25770, 25794], [25769, 25794, 25793], [25770, 25771, 25794], [25771, 25795, 25794], [25771, 25772, 25796], [25771, 25796, 25795], [25772, 25773, 25796], [25773, 25797, 25796], [25773, 25774, 25798], [25773, 25798, 25797], [25774, 25775, 25798], [25775, 25799, 25798], [25775, 25776, 25800], [25775, 25800, 25799], [25776, 25777, 25800], [25777, 25801, 25800], [25777, 25778, 25802], [25777, 25802, 25801], [25778, 25779, 25802], [25779, 25803, 25802], [1547, 25780, 1676], [25780, 25804, 1676], [25780, 25781, 25805], [25780, 25805, 25804], [25781, 25782, 25805], [25782, 25806, 25805], [25782, 25783, 25807], [25782, 25807, 25806], [25783, 25784, 25807], [25784, 25808, 25807], [25784, 25785, 25809], [25784, 25809, 25808], [25785, 25786, 25809], [25786, 25810, 25809], [25786, 25787, 25811], [25786, 25811, 25810], [25787, 25788, 25811], [25788, 25812, 25811], [25788, 25789, 25813], [25788, 25813, 25812], [25789, 25790, 25813], [25790, 25814, 25813], [25790, 25791, 25815], [25790, 25815, 25814], [25791, 25792, 25815], [25792, 25816, 25815], [25792, 25793, 25817], [25792, 25817, 25816], [25793, 25794, 25817], [25794, 25818, 25817], [25794, 25795, 25819], [25794, 25819, 25818], [25795, 25796, 25819], [25796, 25820, 25819], [25796, 25797, 25821], [25796, 25821, 25820], [25797, 25798, 25821], [25798, 25822, 25821], [25798, 25799, 25823], [25798, 25823, 25822], [25799, 25800, 25823], [25800, 25824, 25823], [25800, 25801, 25825], [25800, 25825, 25824], [25801, 25802, 25825], [25802, 25826, 25825], [25802, 25803, 25827], [25802, 25827, 25826], [1676, 25804, 25828], [1676, 25828, 1805], [25804, 25805, 25828], [25805, 25829, 25828], [25805, 25806, 25830], [25805, 25830, 25829], [25806, 25807, 25830], [25807, 25831, 25830], [25807, 25808, 25832], [25807, 25832, 25831], [25808, 25809, 25832], [25809, 25833, 25832], [25809, 25810, 25834], [25809, 25834, 25833], [25810, 25811, 25834], [25811, 25835, 25834], [25811, 25812, 25836], [25811, 25836, 25835], [25812, 25813, 25836], [25813, 25837, 25836], [25813, 25814, 25838], [25813, 25838, 25837], [25814, 25815, 25838], [25815, 25839, 25838], [25815, 25816, 25840], [25815, 25840, 25839], [25816, 25817, 25840], [25817, 25841, 25840], [25817, 25818, 25842], [25817, 25842, 25841], [25818, 25819, 25842], [25819, 25843, 25842], [25819, 25820, 25844], [25819, 25844, 25843], [25820, 25821, 25844], [25821, 25845, 25844], [25821, 25822, 25846], [25821, 25846, 25845], [25822, 25823, 25846], [25823, 25847, 25846], [25823, 25824, 25848], [25823, 25848, 25847], [25824, 25825, 25848], [25825, 25849, 25848], [25825, 25826, 25850], [25825, 25850, 25849], [25826, 25827, 25850], [25827, 25851, 25850], [1805, 25828, 1934], [25828, 25852, 1934], [25828, 25829, 25853], [25828, 25853, 25852], [25829, 25830, 25853], [25830, 25854, 25853], [25830, 25831, 25855], [25830, 25855, 25854], [25831, 25832, 25855], [25832, 25856, 25855], [25832, 25833, 25857], [25832, 25857, 25856], [25833, 25834, 25857], [25834, 25858, 25857], [25834, 25835, 25859], [25834, 25859, 25858], [25835, 25836, 25859], [25836, 25860, 25859], [25836, 25837, 25861], [25836, 25861, 25860], [25837, 25838, 25861], [25838, 25862, 25861], [25838, 25839, 25863], [25838, 25863, 25862], [25839, 25840, 25863], [25840, 25864, 25863], [25840, 25841, 25865], [25840, 25865, 25864], [25841, 25842, 25865], [25842, 25866, 25865], [25842, 25843, 25867], [25842, 25867, 25866], [25843, 25844, 25867], [25844, 25868, 25867], [25844, 25845, 25869], [25844, 25869, 25868], [25845, 25846, 25869], [25846, 25870, 25869], [25846, 25847, 25871], [25846, 25871, 25870], [25847, 25848, 25871], [25848, 25872, 25871], [25848, 25849, 25873], [25848, 25873, 25872], [25849, 25850, 25873], [25850, 25874, 25873], [25850, 25851, 25875], [25850, 25875, 25874], [1934, 25852, 25876], [1934, 25876, 2063], [25852, 25853, 25876], [25853, 25877, 25876], [25853, 25854, 25878], [25853, 25878, 25877], [25854, 25855, 25878], [25855, 25879, 25878], [25855, 25856, 25880], [25855, 25880, 25879], [25856, 25857, 25880], [25857, 25881, 25880], [25857, 25858, 25882], [25857, 25882, 25881], [25858, 25859, 25882], [25859, 25883, 25882], [25859, 25860, 25884], [25859, 25884, 25883], [25860, 25861, 25884], [25861, 25885, 25884], [25861, 25862, 25886], [25861, 25886, 25885], [25862, 25863, 25886], [25863, 25887, 25886], [25863, 25864, 25888], [25863, 25888, 25887], [25864, 25865, 25888], [25865, 25889, 25888], [25865, 25866, 25890], [25865, 25890, 25889], [25866, 25867, 25890], [25867, 25891, 25890], [25867, 25868, 25892], [25867, 25892, 25891], [25868, 25869, 25892], [25869, 25893, 25892], [25869, 25870, 25894], [25869, 25894, 25893], [25870, 25871, 25894], [25871, 25895, 25894], [25871, 25872, 25896], [25871, 25896, 25895], [25872, 25873, 25896], [25873, 25897, 25896], [25873, 25874, 25898], [25873, 25898, 25897], [25874, 25875, 25898], [25875, 25899, 25898], [2063, 25876, 2192], [25876, 25900, 2192], [25876, 25877, 25901], [25876, 25901, 25900], [25877, 25878, 25901], [25878, 25902, 25901], [25878, 25879, 25903], [25878, 25903, 25902], [25879, 25880, 25903], [25880, 25904, 25903], [25880, 25881, 25905], [25880, 25905, 25904], [25881, 25882, 25905], [25882, 25906, 25905], [25882, 25883, 25907], [25882, 25907, 25906], [25883, 25884, 25907], [25884, 25908, 25907], [25884, 25885, 25909], [25884, 25909, 25908], [25885, 25886, 25909], [25886, 25910, 25909], [25886, 25887, 25911], [25886, 25911, 25910], [25887, 25888, 25911], [25888, 25912, 25911], [25888, 25889, 25913], [25888, 25913, 25912], [25889, 25890, 25913], [25890, 25914, 25913], [25890, 25891, 25915], [25890, 25915, 25914], [25891, 25892, 25915], [25892, 25916, 25915], [25892, 25893, 25917], [25892, 25917, 25916], [25893, 25894, 25917], [25894, 25918, 25917], [25894, 25895, 25919], [25894, 25919, 25918], [25895, 25896, 25919], [25896, 25920, 25919], [25896, 25897, 25921], [25896, 25921, 25920], [25897, 25898, 25921], [25898, 25922, 25921], [25898, 25899, 25923], [25898, 25923, 25922], [2192, 25900, 25924], [2192, 25924, 2321], [25900, 25901, 25924], [25901, 25925, 25924], [25901, 25902, 25926], [25901, 25926, 25925], [25902, 25903, 25926], [25903, 25927, 25926], [25903, 25904, 25928], [25903, 25928, 25927], [25904, 25905, 25928], [25905, 25929, 25928], [25905, 25906, 25930], [25905, 25930, 25929], [25906, 25907, 25930], [25907, 25931, 25930], [25907, 25908, 25932], [25907, 25932, 25931], [25908, 25909, 25932], [25909, 25933, 25932], [25909, 25910, 25934], [25909, 25934, 25933], [25910, 25911, 25934], [25911, 25935, 25934], [25911, 25912, 25936], [25911, 25936, 25935], [25912, 25913, 25936], [25913, 25937, 25936], [25913, 25914, 25938], [25913, 25938, 25937], [25914, 25915, 25938], [25915, 25939, 25938], [25915, 25916, 25940], [25915, 25940, 25939], [25916, 25917, 25940], [25917, 25941, 25940], [25917, 25918, 25942], [25917, 25942, 25941], [25918, 25919, 25942], [25919, 25943, 25942], [25919, 25920, 25944], [25919, 25944, 25943], [25920, 25921, 25944], [25921, 25945, 25944], [25921, 25922, 25946], [25921, 25946, 25945], [25922, 25923, 25946], [25923, 25947, 25946], [2321, 25924, 2450], [25924, 25948, 2450], [25924, 25925, 25949], [25924, 25949, 25948], [25925, 25926, 25949], [25926, 25950, 25949], [25926, 25927, 25951], [25926, 25951, 25950], [25927, 25928, 25951], [25928, 25952, 25951], [25928, 25929, 25953], [25928, 25953, 25952], [25929, 25930, 25953], [25930, 25954, 25953], [25930, 25931, 25955], [25930, 25955, 25954], [25931, 25932, 25955], [25932, 25956, 25955], [25932, 25933, 25957], [25932, 25957, 25956], [25933, 25934, 25957], [25934, 25958, 25957], [25934, 25935, 25959], [25934, 25959, 25958], [25935, 25936, 25959], [25936, 25960, 25959], [25936, 25937, 25961], [25936, 25961, 25960], [25937, 25938, 25961], [25938, 25962, 25961], [25938, 25939, 25963], [25938, 25963, 25962], [25939, 25940, 25963], [25940, 25964, 25963], [25940, 25941, 25965], [25940, 25965, 25964], [25941, 25942, 25965], [25942, 25966, 25965], [25942, 25943, 25967], [25942, 25967, 25966], [25943, 25944, 25967], [25944, 25968, 25967], [25944, 25945, 25969], [25944, 25969, 25968], [25945, 25946, 25969], [25946, 25970, 25969], [25946, 25947, 25971], [25946, 25971, 25970], [2450, 25948, 25972], [2450, 25972, 2579], [25948, 25949, 25972], [25949, 25973, 25972], [25949, 25950, 25974], [25949, 25974, 25973], [25950, 25951, 25974], [25951, 25975, 25974], [25951, 25952, 25976], [25951, 25976, 25975], [25952, 25953, 25976], [25953, 25977, 25976], [25953, 25954, 25978], [25953, 25978, 25977], [25954, 25955, 25978], [25955, 25979, 25978], [25955, 25956, 25980], [25955, 25980, 25979], [25956, 25957, 25980], [25957, 25981, 25980], [25957, 25958, 25982], [25957, 25982, 25981], [25958, 25959, 25982], [25959, 25983, 25982], [25959, 25960, 25984], [25959, 25984, 25983], [25960, 25961, 25984], [25961, 25985, 25984], [25961, 25962, 25986], [25961, 25986, 25985], [25962, 25963, 25986], [25963, 25987, 25986], [25963, 25964, 25988], [25963, 25988, 25987], [25964, 25965, 25988], [25965, 25989, 25988], [25965, 25966, 25990], [25965, 25990, 25989], [25966, 25967, 25990], [25967, 25991, 25990], [25967, 25968, 25992], [25967, 25992, 25991], [25968, 25969, 25992], [25969, 25993, 25992], [25969, 25970, 25994], [25969, 25994, 25993], [25970, 25971, 25994], [25971, 25995, 25994], [2579, 25972, 2708], [25972, 25996, 2708], [25972, 25973, 25997], [25972, 25997, 25996], [25973, 25974, 25997], [25974, 25998, 25997], [25974, 25975, 25999], [25974, 25999, 25998], [25975, 25976, 25999], [25976, 26000, 25999], [25976, 25977, 26001], [25976, 26001, 26000], [25977, 25978, 26001], [25978, 26002, 26001], [25978, 25979, 26003], [25978, 26003, 26002], [25979, 25980, 26003], [25980, 26004, 26003], [25980, 25981, 26005], [25980, 26005, 26004], [25981, 25982, 26005], [25982, 26006, 26005], [25982, 25983, 26007], [25982, 26007, 26006], [25983, 25984, 26007], [25984, 26008, 26007], [25984, 25985, 26009], [25984, 26009, 26008], [25985, 25986, 26009], [25986, 26010, 26009], [25986, 25987, 26011], [25986, 26011, 26010], [25987, 25988, 26011], [25988, 26012, 26011], [25988, 25989, 26013], [25988, 26013, 26012], [25989, 25990, 26013], [25990, 26014, 26013], [25990, 25991, 26015], [25990, 26015, 26014], [25991, 25992, 26015], [25992, 26016, 26015], [25992, 25993, 26017], [25992, 26017, 26016], [25993, 25994, 26017], [25994, 26018, 26017], [25994, 25995, 26019], [25994, 26019, 26018], [2708, 25996, 26020], [2708, 26020, 2837], [25996, 25997, 26020], [25997, 26021, 26020], [25997, 25998, 26022], [25997, 26022, 26021], [25998, 25999, 26022], [25999, 26023, 26022], [25999, 26000, 26024], [25999, 26024, 26023], [26000, 26001, 26024], [26001, 26025, 26024], [26001, 26002, 26026], [26001, 26026, 26025], [26002, 26003, 26026], [26003, 26027, 26026], [26003, 26004, 26028], [26003, 26028, 26027], [26004, 26005, 26028], [26005, 26029, 26028], [26005, 26006, 26030], [26005, 26030, 26029], [26006, 26007, 26030], [26007, 26031, 26030], [26007, 26008, 26032], [26007, 26032, 26031], [26008, 26009, 26032], [26009, 26033, 26032], [26009, 26010, 26034], [26009, 26034, 26033], [26010, 26011, 26034], [26011, 26035, 26034], [26011, 26012, 26036], [26011, 26036, 26035], [26012, 26013, 26036], [26013, 26037, 26036], [26013, 26014, 26038], [26013, 26038, 26037], [26014, 26015, 26038], [26015, 26039, 26038], [26015, 26016, 26040], [26015, 26040, 26039], [26016, 26017, 26040], [26017, 26041, 26040], [26017, 26018, 26042], [26017, 26042, 26041], [26018, 26019, 26042], [26019, 26043, 26042], [2837, 26020, 2966], [26020, 26044, 2966], [26020, 26021, 26045], [26020, 26045, 26044], [26021, 26022, 26045], [26022, 26046, 26045], [26022, 26023, 26047], [26022, 26047, 26046], [26023, 26024, 26047], [26024, 26048, 26047], [26024, 26025, 26049], [26024, 26049, 26048], [26025, 26026, 26049], [26026, 26050, 26049], [26026, 26027, 26051], [26026, 26051, 26050], [26027, 26028, 26051], [26028, 26052, 26051], [26028, 26029, 26053], [26028, 26053, 26052], [26029, 26030, 26053], [26030, 26054, 26053], [26030, 26031, 26055], [26030, 26055, 26054], [26031, 26032, 26055], [26032, 26056, 26055], [26032, 26033, 26057], [26032, 26057, 26056], [26033, 26034, 26057], [26034, 26058, 26057], [26034, 26035, 26059], [26034, 26059, 26058], [26035, 26036, 26059], [26036, 26060, 26059], [26036, 26037, 26061], [26036, 26061, 26060], [26037, 26038, 26061], [26038, 26062, 26061], [26038, 26039, 26063], [26038, 26063, 26062], [26039, 26040, 26063], [26040, 26064, 26063], [26040, 26041, 26065], [26040, 26065, 26064], [26041, 26042, 26065], [26042, 26066, 26065], [26042, 26043, 26067], [26042, 26067, 26066], [2966, 26044, 26068], [2966, 26068, 3095], [26044, 26045, 26068], [26045, 26069, 26068], [26045, 26046, 26070], [26045, 26070, 26069], [26046, 26047, 26070], [26047, 26071, 26070], [26047, 26048, 26072], [26047, 26072, 26071], [26048, 26049, 26072], [26049, 26073, 26072], [26049, 26050, 26074], [26049, 26074, 26073], [26050, 26051, 26074], [26051, 26075, 26074], [26051, 26052, 26076], [26051, 26076, 26075], [26052, 26053, 26076], [26053, 26077, 26076], [26053, 26054, 26078], [26053, 26078, 26077], [26054, 26055, 26078], [26055, 26079, 26078], [26055, 26056, 26080], [26055, 26080, 26079], [26056, 26057, 26080], [26057, 26081, 26080], [26057, 26058, 26082], [26057, 26082, 26081], [26058, 26059, 26082], [26059, 26083, 26082], [26059, 26060, 26084], [26059, 26084, 26083], [26060, 26061, 26084], [26061, 26085, 26084], [26061, 26062, 26086], [26061, 26086, 26085], [26062, 26063, 26086], [26063, 26087, 26086], [26063, 26064, 26088], [26063, 26088, 26087], [26064, 26065, 26088], [26065, 26089, 26088], [26065, 26066, 26090], [26065, 26090, 26089], [26066, 26067, 26090], [26067, 26091, 26090], [3095, 26068, 3224], [26068, 26092, 3224], [26068, 26069, 26093], [26068, 26093, 26092], [26069, 26070, 26093], [26070, 26094, 26093], [26070, 26071, 26095], [26070, 26095, 26094], [26071, 26072, 26095], [26072, 26096, 26095], [26072, 26073, 26097], [26072, 26097, 26096], [26073, 26074, 26097], [26074, 26098, 26097], [26074, 26075, 26099], [26074, 26099, 26098], [26075, 26076, 26099], [26076, 26100, 26099], [26076, 26077, 26101], [26076, 26101, 26100], [26077, 26078, 26101], [26078, 26102, 26101], [26078, 26079, 26103], [26078, 26103, 26102], [26079, 26080, 26103], [26080, 26104, 26103], [26080, 26081, 26105], [26080, 26105, 26104], [26081, 26082, 26105], [26082, 26106, 26105], [26082, 26083, 26107], [26082, 26107, 26106], [26083, 26084, 26107], [26084, 26108, 26107], [26084, 26085, 26109], [26084, 26109, 26108], [26085, 26086, 26109], [26086, 26110, 26109], [26086, 26087, 26111], [26086, 26111, 26110], [26087, 26088, 26111], [26088, 26112, 26111], [26088, 26089, 26113], [26088, 26113, 26112], [26089, 26090, 26113], [26090, 26114, 26113], [26090, 26091, 26115], [26090, 26115, 26114], [3224, 26092, 26116], [3224, 26116, 3353], [26092, 26093, 26116], [26093, 26117, 26116], [26093, 26094, 26118], [26093, 26118, 26117], [26094, 26095, 26118], [26095, 26119, 26118], [26095, 26096, 26120], [26095, 26120, 26119], [26096, 26097, 26120], [26097, 26121, 26120], [26097, 26098, 26122], [26097, 26122, 26121], [26098, 26099, 26122], [26099, 26123, 26122], [26099, 26100, 26124], [26099, 26124, 26123], [26100, 26101, 26124], [26101, 26125, 26124], [26101, 26102, 26126], [26101, 26126, 26125], [26102, 26103, 26126], [26103, 26127, 26126], [26103, 26104, 26128], [26103, 26128, 26127], [26104, 26105, 26128], [26105, 26129, 26128], [26105, 26106, 26130], [26105, 26130, 26129], [26106, 26107, 26130], [26107, 26131, 26130], [26107, 26108, 26132], [26107, 26132, 26131], [26108, 26109, 26132], [26109, 26133, 26132], [26109, 26110, 26134], [26109, 26134, 26133], [26110, 26111, 26134], [26111, 26135, 26134], [26111, 26112, 26136], [26111, 26136, 26135], [26112, 26113, 26136], [26113, 26137, 26136], [26113, 26114, 26138], [26113, 26138, 26137], [26114, 26115, 26138], [26115, 26139, 26138], [3353, 26116, 3482], [26116, 26140, 3482], [26116, 26117, 26141], [26116, 26141, 26140], [26117, 26118, 26141], [26118, 26142, 26141], [26118, 26119, 26143], [26118, 26143, 26142], [26119, 26120, 26143], [26120, 26144, 26143], [26120, 26121, 26145], [26120, 26145, 26144], [26121, 26122, 26145], [26122, 26146, 26145], [26122, 26123, 26147], [26122, 26147, 26146], [26123, 26124, 26147], [26124, 26148, 26147], [26124, 26125, 26149], [26124, 26149, 26148], [26125, 26126, 26149], [26126, 26150, 26149], [26126, 26127, 26151], [26126, 26151, 26150], [26127, 26128, 26151], [26128, 26152, 26151], [26128, 26129, 26153], [26128, 26153, 26152], [26129, 26130, 26153], [26130, 26154, 26153], [26130, 26131, 26155], [26130, 26155, 26154], [26131, 26132, 26155], [26132, 26156, 26155], [26132, 26133, 26157], [26132, 26157, 26156], [26133, 26134, 26157], [26134, 26158, 26157], [26134, 26135, 26159], [26134, 26159, 26158], [26135, 26136, 26159], [26136, 26160, 26159], [26136, 26137, 26161], [26136, 26161, 26160], [26137, 26138, 26161], [26138, 26162, 26161], [26138, 26139, 26163], [26138, 26163, 26162], [3482, 26140, 26164], [3482, 26164, 3611], [26140, 26141, 26164], [26141, 26165, 26164], [26141, 26142, 26166], [26141, 26166, 26165], [26142, 26143, 26166], [26143, 26167, 26166], [26143, 26144, 26168], [26143, 26168, 26167], [26144, 26145, 26168], [26145, 26169, 26168], [26145, 26146, 26170], [26145, 26170, 26169], [26146, 26147, 26170], [26147, 26171, 26170], [26147, 26148, 26172], [26147, 26172, 26171], [26148, 26149, 26172], [26149, 26173, 26172], [26149, 26150, 26174], [26149, 26174, 26173], [26150, 26151, 26174], [26151, 26175, 26174], [26151, 26152, 26176], [26151, 26176, 26175], [26152, 26153, 26176], [26153, 26177, 26176], [26153, 26154, 26178], [26153, 26178, 26177], [26154, 26155, 26178], [26155, 26179, 26178], [26155, 26156, 26180], [26155, 26180, 26179], [26156, 26157, 26180], [26157, 26181, 26180], [26157, 26158, 26182], [26157, 26182, 26181], [26158, 26159, 26182], [26159, 26183, 26182], [26159, 26160, 26184], [26159, 26184, 26183], [26160, 26161, 26184], [26161, 26185, 26184], [26161, 26162, 26186], [26161, 26186, 26185], [26162, 26163, 26186], [26163, 26187, 26186], [3611, 26164, 3740], [26164, 26188, 3740], [26164, 26165, 26189], [26164, 26189, 26188], [26165, 26166, 26189], [26166, 26190, 26189], [26166, 26167, 26191], [26166, 26191, 26190], [26167, 26168, 26191], [26168, 26192, 26191], [26168, 26169, 26193], [26168, 26193, 26192], [26169, 26170, 26193], [26170, 26194, 26193], [26170, 26171, 26195], [26170, 26195, 26194], [26171, 26172, 26195], [26172, 26196, 26195], [26172, 26173, 26197], [26172, 26197, 26196], [26173, 26174, 26197], [26174, 26198, 26197], [26174, 26175, 26199], [26174, 26199, 26198], [26175, 26176, 26199], [26176, 26200, 26199], [26176, 26177, 26201], [26176, 26201, 26200], [26177, 26178, 26201], [26178, 26202, 26201], [26178, 26179, 26203], [26178, 26203, 26202], [26179, 26180, 26203], [26180, 26204, 26203], [26180, 26181, 26205], [26180, 26205, 26204], [26181, 26182, 26205], [26182, 26206, 26205], [26182, 26183, 26207], [26182, 26207, 26206], [26183, 26184, 26207], [26184, 26208, 26207], [26184, 26185, 26209], [26184, 26209, 26208], [26185, 26186, 26209], [26186, 26210, 26209], [26186, 26187, 26211], [26186, 26211, 26210], [3740, 26188, 26212], [3740, 26212, 3869], [26188, 26189, 26212], [26189, 26213, 26212], [26189, 26190, 26214], [26189, 26214, 26213], [26190, 26191, 26214], [26191, 26215, 26214], [26191, 26192, 26216], [26191, 26216, 26215], [26192, 26193, 26216], [26193, 26217, 26216], [26193, 26194, 26218], [26193, 26218, 26217], [26194, 26195, 26218], [26195, 26219, 26218], [26195, 26196, 26220], [26195, 26220, 26219], [26196, 26197, 26220], [26197, 26221, 26220], [26197, 26198, 26222], [26197, 26222, 26221], [26198, 26199, 26222], [26199, 26223, 26222], [26199, 26200, 26224], [26199, 26224, 26223], [26200, 26201, 26224], [26201, 26225, 26224], [26201, 26202, 26226], [26201, 26226, 26225], [26202, 26203, 26226], [26203, 26227, 26226], [26203, 26204, 26228], [26203, 26228, 26227], [26204, 26205, 26228], [26205, 26229, 26228], [26205, 26206, 26230], [26205, 26230, 26229], [26206, 26207, 26230], [26207, 26231, 26230], [26207, 26208, 26232], [26207, 26232, 26231], [26208, 26209, 26232], [26209, 26233, 26232], [26209, 26210, 26234], [26209, 26234, 26233], [26210, 26211, 26234], [26211, 26235, 26234], [3869, 26212, 3998], [26212, 26236, 3998], [26212, 26213, 26237], [26212, 26237, 26236], [26213, 26214, 26237], [26214, 26238, 26237], [26214, 26215, 26239], [26214, 26239, 26238], [26215, 26216, 26239], [26216, 26240, 26239], [26216, 26217, 26241], [26216, 26241, 26240], [26217, 26218, 26241], [26218, 26242, 26241], [26218, 26219, 26243], [26218, 26243, 26242], [26219, 26220, 26243], [26220, 26244, 26243], [26220, 26221, 26245], [26220, 26245, 26244], [26221, 26222, 26245], [26222, 26246, 26245], [26222, 26223, 26247], [26222, 26247, 26246], [26223, 26224, 26247], [26224, 26248, 26247], [26224, 26225, 26249], [26224, 26249, 26248], [26225, 26226, 26249], [26226, 26250, 26249], [26226, 26227, 26251], [26226, 26251, 26250], [26227, 26228, 26251], [26228, 26252, 26251], [26228, 26229, 26253], [26228, 26253, 26252], [26229, 26230, 26253], [26230, 26254, 26253], [26230, 26231, 26255], [26230, 26255, 26254], [26231, 26232, 26255], [26232, 26256, 26255], [26232, 26233, 26257], [26232, 26257, 26256], [26233, 26234, 26257], [26234, 26258, 26257], [26234, 26235, 26259], [26234, 26259, 26258], [3998, 26236, 26260], [3998, 26260, 4127], [26236, 26237, 26260], [26237, 26261, 26260], [26237, 26238, 26262], [26237, 26262, 26261], [26238, 26239, 26262], [26239, 26263, 26262], [26239, 26240, 26264], [26239, 26264, 26263], [26240, 26241, 26264], [26241, 26265, 26264], [26241, 26242, 26266], [26241, 26266, 26265], [26242, 26243, 26266], [26243, 26267, 26266], [26243, 26244, 26268], [26243, 26268, 26267], [26244, 26245, 26268], [26245, 26269, 26268], [26245, 26246, 26270], [26245, 26270, 26269], [26246, 26247, 26270], [26247, 26271, 26270], [26247, 26248, 26272], [26247, 26272, 26271], [26248, 26249, 26272], [26249, 26273, 26272], [26249, 26250, 26274], [26249, 26274, 26273], [26250, 26251, 26274], [26251, 26275, 26274], [26251, 26252, 26276], [26251, 26276, 26275], [26252, 26253, 26276], [26253, 26277, 26276], [26253, 26254, 26278], [26253, 26278, 26277], [26254, 26255, 26278], [26255, 26279, 26278], [26255, 26256, 26280], [26255, 26280, 26279], [26256, 26257, 26280], [26257, 26281, 26280], [26257, 26258, 26282], [26257, 26282, 26281], [26258, 26259, 26282], [26259, 26283, 26282], [4127, 26260, 4256], [26260, 26284, 4256], [26260, 26261, 26285], [26260, 26285, 26284], [26261, 26262, 26285], [26262, 26286, 26285], [26262, 26263, 26287], [26262, 26287, 26286], [26263, 26264, 26287], [26264, 26288, 26287], [26264, 26265, 26289], [26264, 26289, 26288], [26265, 26266, 26289], [26266, 26290, 26289], [26266, 26267, 26291], [26266, 26291, 26290], [26267, 26268, 26291], [26268, 26292, 26291], [26268, 26269, 26293], [26268, 26293, 26292], [26269, 26270, 26293], [26270, 26294, 26293], [26270, 26271, 26295], [26270, 26295, 26294], [26271, 26272, 26295], [26272, 26296, 26295], [26272, 26273, 26297], [26272, 26297, 26296], [26273, 26274, 26297], [26274, 26298, 26297], [26274, 26275, 26299], [26274, 26299, 26298], [26275, 26276, 26299], [26276, 26300, 26299], [26276, 26277, 26301], [26276, 26301, 26300], [26277, 26278, 26301], [26278, 26302, 26301], [26278, 26279, 26303], [26278, 26303, 26302], [26279, 26280, 26303], [26280, 26304, 26303], [26280, 26281, 26305], [26280, 26305, 26304], [26281, 26282, 26305], [26282, 26306, 26305], [26282, 26283, 26307], [26282, 26307, 26306], [4256, 26284, 26308], [4256, 26308, 4385], [26284, 26285, 26308], [26285, 26309, 26308], [26285, 26286, 26310], [26285, 26310, 26309], [26286, 26287, 26310], [26287, 26311, 26310], [26287, 26288, 26312], [26287, 26312, 26311], [26288, 26289, 26312], [26289, 26313, 26312], [26289, 26290, 26314], [26289, 26314, 26313], [26290, 26291, 26314], [26291, 26315, 26314], [26291, 26292, 26316], [26291, 26316, 26315], [26292, 26293, 26316], [26293, 26317, 26316], [26293, 26294, 26318], [26293, 26318, 26317], [26294, 26295, 26318], [26295, 26319, 26318], [26295, 26296, 26320], [26295, 26320, 26319], [26296, 26297, 26320], [26297, 26321, 26320], [26297, 26298, 26322], [26297, 26322, 26321], [26298, 26299, 26322], [26299, 26323, 26322], [26299, 26300, 26324], [26299, 26324, 26323], [26300, 26301, 26324], [26301, 26325, 26324], [26301, 26302, 26326], [26301, 26326, 26325], [26302, 26303, 26326], [26303, 26327, 26326], [26303, 26304, 26328], [26303, 26328, 26327], [26304, 26305, 26328], [26305, 26329, 26328], [26305, 26306, 26330], [26305, 26330, 26329], [26306, 26307, 26330], [26307, 26331, 26330], [4385, 26308, 4514], [26308, 26332, 4514], [26308, 26309, 26333], [26308, 26333, 26332], [26309, 26310, 26333], [26310, 26334, 26333], [26310, 26311, 26335], [26310, 26335, 26334], [26311, 26312, 26335], [26312, 26336, 26335], [26312, 26313, 26337], [26312, 26337, 26336], [26313, 26314, 26337], [26314, 26338, 26337], [26314, 26315, 26339], [26314, 26339, 26338], [26315, 26316, 26339], [26316, 26340, 26339], [26316, 26317, 26341], [26316, 26341, 26340], [26317, 26318, 26341], [26318, 26342, 26341], [26318, 26319, 26343], [26318, 26343, 26342], [26319, 26320, 26343], [26320, 26344, 26343], [26320, 26321, 26345], [26320, 26345, 26344], [26321, 26322, 26345], [26322, 26346, 26345], [26322, 26323, 26347], [26322, 26347, 26346], [26323, 26324, 26347], [26324, 26348, 26347], [26324, 26325, 26349], [26324, 26349, 26348], [26325, 26326, 26349], [26326, 26350, 26349], [26326, 26327, 26351], [26326, 26351, 26350], [26327, 26328, 26351], [26328, 26352, 26351], [26328, 26329, 26353], [26328, 26353, 26352], [26329, 26330, 26353], [26330, 26354, 26353], [26330, 26331, 26355], [26330, 26355, 26354], [4514, 26332, 26356], [4514, 26356, 4643], [26332, 26333, 26356], [26333, 26357, 26356], [26333, 26334, 26358], [26333, 26358, 26357], [26334, 26335, 26358], [26335, 26359, 26358], [26335, 26336, 26360], [26335, 26360, 26359], [26336, 26337, 26360], [26337, 26361, 26360], [26337, 26338, 26362], [26337, 26362, 26361], [26338, 26339, 26362], [26339, 26363, 26362], [26339, 26340, 26364], [26339, 26364, 26363], [26340, 26341, 26364], [26341, 26365, 26364], [26341, 26342, 26366], [26341, 26366, 26365], [26342, 26343, 26366], [26343, 26367, 26366], [26343, 26344, 26368], [26343, 26368, 26367], [26344, 26345, 26368], [26345, 26369, 26368], [26345, 26346, 26370], [26345, 26370, 26369], [26346, 26347, 26370], [26347, 26371, 26370], [26347, 26348, 26372], [26347, 26372, 26371], [26348, 26349, 26372], [26349, 26373, 26372], [26349, 26350, 26374], [26349, 26374, 26373], [26350, 26351, 26374], [26351, 26375, 26374], [26351, 26352, 26376], [26351, 26376, 26375], [26352, 26353, 26376], [26353, 26377, 26376], [26353, 26354, 26378], [26353, 26378, 26377], [26354, 26355, 26378], [26355, 26379, 26378], [4643, 26356, 4772], [26356, 26380, 4772], [26356, 26357, 26381], [26356, 26381, 26380], [26357, 26358, 26381], [26358, 26382, 26381], [26358, 26359, 26383], [26358, 26383, 26382], [26359, 26360, 26383], [26360, 26384, 26383], [26360, 26361, 26385], [26360, 26385, 26384], [26361, 26362, 26385], [26362, 26386, 26385], [26362, 26363, 26387], [26362, 26387, 26386], [26363, 26364, 26387], [26364, 26388, 26387], [26364, 26365, 26389], [26364, 26389, 26388], [26365, 26366, 26389], [26366, 26390, 26389], [26366, 26367, 26391], [26366, 26391, 26390], [26367, 26368, 26391], [26368, 26392, 26391], [26368, 26369, 26393], [26368, 26393, 26392], [26369, 26370, 26393], [26370, 26394, 26393], [26370, 26371, 26395], [26370, 26395, 26394], [26371, 26372, 26395], [26372, 26396, 26395], [26372, 26373, 26397], [26372, 26397, 26396], [26373, 26374, 26397], [26374, 26398, 26397], [26374, 26375, 26399], [26374, 26399, 26398], [26375, 26376, 26399], [26376, 26400, 26399], [26376, 26377, 26401], [26376, 26401, 26400], [26377, 26378, 26401], [26378, 26402, 26401], [26378, 26379, 26403], [26378, 26403, 26402], [4772, 26380, 26404], [4772, 26404, 4901], [26380, 26381, 26404], [26381, 26405, 26404], [26381, 26382, 26406], [26381, 26406, 26405], [26382, 26383, 26406], [26383, 26407, 26406], [26383, 26384, 26408], [26383, 26408, 26407], [26384, 26385, 26408], [26385, 26409, 26408], [26385, 26386, 26410], [26385, 26410, 26409], [26386, 26387, 26410], [26387, 26411, 26410], [26387, 26388, 26412], [26387, 26412, 26411], [26388, 26389, 26412], [26389, 26413, 26412], [26389, 26390, 26414], [26389, 26414, 26413], [26390, 26391, 26414], [26391, 26415, 26414], [26391, 26392, 26416], [26391, 26416, 26415], [26392, 26393, 26416], [26393, 26417, 26416], [26393, 26394, 26418], [26393, 26418, 26417], [26394, 26395, 26418], [26395, 26419, 26418], [26395, 26396, 26420], [26395, 26420, 26419], [26396, 26397, 26420], [26397, 26421, 26420], [26397, 26398, 26422], [26397, 26422, 26421], [26398, 26399, 26422], [26399, 26423, 26422], [26399, 26400, 26424], [26399, 26424, 26423], [26400, 26401, 26424], [26401, 26425, 26424], [26401, 26402, 26426], [26401, 26426, 26425], [26402, 26403, 26426], [26403, 26427, 26426], [4901, 26404, 5030], [26404, 26428, 5030], [26404, 26405, 26429], [26404, 26429, 26428], [26405, 26406, 26429], [26406, 26430, 26429], [26406, 26407, 26431], [26406, 26431, 26430], [26407, 26408, 26431], [26408, 26432, 26431], [26408, 26409, 26433], [26408, 26433, 26432], [26409, 26410, 26433], [26410, 26434, 26433], [26410, 26411, 26435], [26410, 26435, 26434], [26411, 26412, 26435], [26412, 26436, 26435], [26412, 26413, 26437], [26412, 26437, 26436], [26413, 26414, 26437], [26414, 26438, 26437], [26414, 26415, 26439], [26414, 26439, 26438], [26415, 26416, 26439], [26416, 26440, 26439], [26416, 26417, 26441], [26416, 26441, 26440], [26417, 26418, 26441], [26418, 26442, 26441], [26418, 26419, 26443], [26418, 26443, 26442], [26419, 26420, 26443], [26420, 26444, 26443], [26420, 26421, 26445], [26420, 26445, 26444], [26421, 26422, 26445], [26422, 26446, 26445], [26422, 26423, 26447], [26422, 26447, 26446], [26423, 26424, 26447], [26424, 26448, 26447], [26424, 26425, 26449], [26424, 26449, 26448], [26425, 26426, 26449], [26426, 26450, 26449], [26426, 26427, 26451], [26426, 26451, 26450], [5030, 26428, 26452], [5030, 26452, 5159], [26428, 26429, 26452], [26429, 26453, 26452], [26429, 26430, 26454], [26429, 26454, 26453], [26430, 26431, 26454], [26431, 26455, 26454], [26431, 26432, 26456], [26431, 26456, 26455], [26432, 26433, 26456], [26433, 26457, 26456], [26433, 26434, 26458], [26433, 26458, 26457], [26434, 26435, 26458], [26435, 26459, 26458], [26435, 26436, 26460], [26435, 26460, 26459], [26436, 26437, 26460], [26437, 26461, 26460], [26437, 26438, 26462], [26437, 26462, 26461], [26438, 26439, 26462], [26439, 26463, 26462], [26439, 26440, 26464], [26439, 26464, 26463], [26440, 26441, 26464], [26441, 26465, 26464], [26441, 26442, 26466], [26441, 26466, 26465], [26442, 26443, 26466], [26443, 26467, 26466], [26443, 26444, 26468], [26443, 26468, 26467], [26444, 26445, 26468], [26445, 26469, 26468], [26445, 26446, 26470], [26445, 26470, 26469], [26446, 26447, 26470], [26447, 26471, 26470], [26447, 26448, 26472], [26447, 26472, 26471], [26448, 26449, 26472], [26449, 26473, 26472], [26449, 26450, 26474], [26449, 26474, 26473], [26450, 26451, 26474], [26451, 26475, 26474], [5159, 26452, 5288], [26452, 26476, 5288], [26452, 26453, 26477], [26452, 26477, 26476], [26453, 26454, 26477], [26454, 26478, 26477], [26454, 26455, 26479], [26454, 26479, 26478], [26455, 26456, 26479], [26456, 26480, 26479], [26456, 26457, 26481], [26456, 26481, 26480], [26457, 26458, 26481], [26458, 26482, 26481], [26458, 26459, 26483], [26458, 26483, 26482], [26459, 26460, 26483], [26460, 26484, 26483], [26460, 26461, 26485], [26460, 26485, 26484], [26461, 26462, 26485], [26462, 26486, 26485], [26462, 26463, 26487], [26462, 26487, 26486], [26463, 26464, 26487], [26464, 26488, 26487], [26464, 26465, 26489], [26464, 26489, 26488], [26465, 26466, 26489], [26466, 26490, 26489], [26466, 26467, 26491], [26466, 26491, 26490], [26467, 26468, 26491], [26468, 26492, 26491], [26468, 26469, 26493], [26468, 26493, 26492], [26469, 26470, 26493], [26470, 26494, 26493], [26470, 26471, 26495], [26470, 26495, 26494], [26471, 26472, 26495], [26472, 26496, 26495], [26472, 26473, 26497], [26472, 26497, 26496], [26473, 26474, 26497], [26474, 26498, 26497], [26474, 26475, 26499], [26474, 26499, 26498], [5288, 26476, 26500], [5288, 26500, 5417], [26476, 26477, 26500], [26477, 26501, 26500], [26477, 26478, 26502], [26477, 26502, 26501], [26478, 26479, 26502], [26479, 26503, 26502], [26479, 26480, 26504], [26479, 26504, 26503], [26480, 26481, 26504], [26481, 26505, 26504], [26481, 26482, 26506], [26481, 26506, 26505], [26482, 26483, 26506], [26483, 26507, 26506], [26483, 26484, 26508], [26483, 26508, 26507], [26484, 26485, 26508], [26485, 26509, 26508], [26485, 26486, 26510], [26485, 26510, 26509], [26486, 26487, 26510], [26487, 26511, 26510], [26487, 26488, 26512], [26487, 26512, 26511], [26488, 26489, 26512], [26489, 26513, 26512], [26489, 26490, 26514], [26489, 26514, 26513], [26490, 26491, 26514], [26491, 26515, 26514], [26491, 26492, 26516], [26491, 26516, 26515], [26492, 26493, 26516], [26493, 26517, 26516], [26493, 26494, 26518], [26493, 26518, 26517], [26494, 26495, 26518], [26495, 26519, 26518], [26495, 26496, 26520], [26495, 26520, 26519], [26496, 26497, 26520], [26497, 26521, 26520], [26497, 26498, 26522], [26497, 26522, 26521], [26498, 26499, 26522], [26499, 26523, 26522], [5417, 26500, 5546], [26500, 26524, 5546], [26500, 26501, 26525], [26500, 26525, 26524], [26501, 26502, 26525], [26502, 26526, 26525], [26502, 26503, 26527], [26502, 26527, 26526], [26503, 26504, 26527], [26504, 26528, 26527], [26504, 26505, 26529], [26504, 26529, 26528], [26505, 26506, 26529], [26506, 26530, 26529], [26506, 26507, 26531], [26506, 26531, 26530], [26507, 26508, 26531], [26508, 26532, 26531], [26508, 26509, 26533], [26508, 26533, 26532], [26509, 26510, 26533], [26510, 26534, 26533], [26510, 26511, 26535], [26510, 26535, 26534], [26511, 26512, 26535], [26512, 26536, 26535], [26512, 26513, 26537], [26512, 26537, 26536], [26513, 26514, 26537], [26514, 26538, 26537], [26514, 26515, 26539], [26514, 26539, 26538], [26515, 26516, 26539], [26516, 26540, 26539], [26516, 26517, 26541], [26516, 26541, 26540], [26517, 26518, 26541], [26518, 26542, 26541], [26518, 26519, 26543], [26518, 26543, 26542], [26519, 26520, 26543], [26520, 26544, 26543], [26520, 26521, 26545], [26520, 26545, 26544], [26521, 26522, 26545], [26522, 26546, 26545], [26522, 26523, 26547], [26522, 26547, 26546], [5546, 26524, 26548], [5546, 26548, 5675], [26524, 26525, 26548], [26525, 26549, 26548], [26525, 26526, 26550], [26525, 26550, 26549], [26526, 26527, 26550], [26527, 26551, 26550], [26527, 26528, 26552], [26527, 26552, 26551], [26528, 26529, 26552], [26529, 26553, 26552], [26529, 26530, 26554], [26529, 26554, 26553], [26530, 26531, 26554], [26531, 26555, 26554], [26531, 26532, 26556], [26531, 26556, 26555], [26532, 26533, 26556], [26533, 26557, 26556], [26533, 26534, 26558], [26533, 26558, 26557], [26534, 26535, 26558], [26535, 26559, 26558], [26535, 26536, 26560], [26535, 26560, 26559], [26536, 26537, 26560], [26537, 26561, 26560], [26537, 26538, 26562], [26537, 26562, 26561], [26538, 26539, 26562], [26539, 26563, 26562], [26539, 26540, 26564], [26539, 26564, 26563], [26540, 26541, 26564], [26541, 26565, 26564], [26541, 26542, 26566], [26541, 26566, 26565], [26542, 26543, 26566], [26543, 26567, 26566], [26543, 26544, 26568], [26543, 26568, 26567], [26544, 26545, 26568], [26545, 26569, 26568], [26545, 26546, 26570], [26545, 26570, 26569], [26546, 26547, 26570], [26547, 26571, 26570], [5675, 26548, 5804], [26548, 26572, 5804], [26548, 26549, 26573], [26548, 26573, 26572], [26549, 26550, 26573], [26550, 26574, 26573], [26550, 26551, 26575], [26550, 26575, 26574], [26551, 26552, 26575], [26552, 26576, 26575], [26552, 26553, 26577], [26552, 26577, 26576], [26553, 26554, 26577], [26554, 26578, 26577], [26554, 26555, 26579], [26554, 26579, 26578], [26555, 26556, 26579], [26556, 26580, 26579], [26556, 26557, 26581], [26556, 26581, 26580], [26557, 26558, 26581], [26558, 26582, 26581], [26558, 26559, 26583], [26558, 26583, 26582], [26559, 26560, 26583], [26560, 26584, 26583], [26560, 26561, 26585], [26560, 26585, 26584], [26561, 26562, 26585], [26562, 26586, 26585], [26562, 26563, 26587], [26562, 26587, 26586], [26563, 26564, 26587], [26564, 26588, 26587], [26564, 26565, 26589], [26564, 26589, 26588], [26565, 26566, 26589], [26566, 26590, 26589], [26566, 26567, 26591], [26566, 26591, 26590], [26567, 26568, 26591], [26568, 26592, 26591], [26568, 26569, 26593], [26568, 26593, 26592], [26569, 26570, 26593], [26570, 26594, 26593], [26570, 26571, 26595], [26570, 26595, 26594], [5804, 26572, 26596], [5804, 26596, 5933], [26572, 26573, 26596], [26573, 26597, 26596], [26573, 26574, 26598], [26573, 26598, 26597], [26574, 26575, 26598], [26575, 26599, 26598], [26575, 26576, 26600], [26575, 26600, 26599], [26576, 26577, 26600], [26577, 26601, 26600], [26577, 26578, 26602], [26577, 26602, 26601], [26578, 26579, 26602], [26579, 26603, 26602], [26579, 26580, 26604], [26579, 26604, 26603], [26580, 26581, 26604], [26581, 26605, 26604], [26581, 26582, 26606], [26581, 26606, 26605], [26582, 26583, 26606], [26583, 26607, 26606], [26583, 26584, 26608], [26583, 26608, 26607], [26584, 26585, 26608], [26585, 26609, 26608], [26585, 26586, 26610], [26585, 26610, 26609], [26586, 26587, 26610], [26587, 26611, 26610], [26587, 26588, 26612], [26587, 26612, 26611], [26588, 26589, 26612], [26589, 26613, 26612], [26589, 26590, 26614], [26589, 26614, 26613], [26590, 26591, 26614], [26591, 26615, 26614], [26591, 26592, 26616], [26591, 26616, 26615], [26592, 26593, 26616], [26593, 26617, 26616], [26593, 26594, 26618], [26593, 26618, 26617], [26594, 26595, 26618], [26595, 26619, 26618], [5933, 26596, 6062], [26596, 26620, 6062], [26596, 26597, 26621], [26596, 26621, 26620], [26597, 26598, 26621], [26598, 26622, 26621], [26598, 26599, 26623], [26598, 26623, 26622], [26599, 26600, 26623], [26600, 26624, 26623], [26600, 26601, 26625], [26600, 26625, 26624], [26601, 26602, 26625], [26602, 26626, 26625], [26602, 26603, 26627], [26602, 26627, 26626], [26603, 26604, 26627], [26604, 26628, 26627], [26604, 26605, 26629], [26604, 26629, 26628], [26605, 26606, 26629], [26606, 26630, 26629], [26606, 26607, 26631], [26606, 26631, 26630], [26607, 26608, 26631], [26608, 26632, 26631], [26608, 26609, 26633], [26608, 26633, 26632], [26609, 26610, 26633], [26610, 26634, 26633], [26610, 26611, 26635], [26610, 26635, 26634], [26611, 26612, 26635], [26612, 26636, 26635], [26612, 26613, 26637], [26612, 26637, 26636], [26613, 26614, 26637], [26614, 26638, 26637], [26614, 26615, 26639], [26614, 26639, 26638], [26615, 26616, 26639], [26616, 26640, 26639], [26616, 26617, 26641], [26616, 26641, 26640], [26617, 26618, 26641], [26618, 26642, 26641], [26618, 26619, 26643], [26618, 26643, 26642], [6062, 26620, 26644], [6062, 26644, 6189], [26620, 26621, 26644], [26621, 26645, 26644], [26621, 26622, 26646], [26621, 26646, 26645], [26622, 26623, 26646], [26623, 26647, 26646], [26623, 26624, 26648], [26623, 26648, 26647], [26624, 26625, 26648], [26625, 26649, 26648], [26625, 26626, 26650], [26625, 26650, 26649], [26626, 26627, 26650], [26627, 26651, 26650], [26627, 26628, 26652], [26627, 26652, 26651], [26628, 26629, 26652], [26629, 26653, 26652], [26629, 26630, 26654], [26629, 26654, 26653], [26630, 26631, 26654], [26631, 26655, 26654], [26631, 26632, 26656], [26631, 26656, 26655], [26632, 26633, 26656], [26633, 26657, 26656], [26633, 26634, 26658], [26633, 26658, 26657], [26634, 26635, 26658], [26635, 26659, 26658], [26635, 26636, 26660], [26635, 26660, 26659], [26636, 26637, 26660], [26637, 26661, 26660], [26637, 26638, 26662], [26637, 26662, 26661], [26638, 26639, 26662], [26639, 26663, 26662], [26639, 26640, 26664], [26639, 26664, 26663], [26640, 26641, 26664], [26641, 26665, 26664], [26641, 26642, 26666], [26641, 26666, 26665], [26642, 26643, 26666], [26643, 26667, 26666], [6189, 26644, 6314], [26644, 26668, 6314], [26644, 26645, 26669], [26644, 26669, 26668], [26645, 26646, 26669], [26646, 26670, 26669], [26646, 26647, 26671], [26646, 26671, 26670], [26647, 26648, 26671], [26648, 26672, 26671], [26648, 26649, 26673], [26648, 26673, 26672], [26649, 26650, 26673], [26650, 26674, 26673], [26650, 26651, 26675], [26650, 26675, 26674], [26651, 26652, 26675], [26652, 26676, 26675], [26652, 26653, 26677], [26652, 26677, 26676], [26653, 26654, 26677], [26654, 26678, 26677], [26654, 26655, 26679], [26654, 26679, 26678], [26655, 26656, 26679], [26656, 26680, 26679], [26656, 26657, 26681], [26656, 26681, 26680], [26657, 26658, 26681], [26658, 26682, 26681], [26658, 26659, 26683], [26658, 26683, 26682], [26659, 26660, 26683], [26660, 26684, 26683], [26660, 26661, 26685], [26660, 26685, 26684], [26661, 26662, 26685], [26662, 26686, 26685], [26662, 26663, 26687], [26662, 26687, 26686], [26663, 26664, 26687], [26664, 26688, 26687], [26664, 26665, 26689], [26664, 26689, 26688], [26665, 26666, 26689], [26666, 26690, 26689], [26666, 26667, 26691], [26666, 26691, 26690], [6314, 26668, 26692], [6314, 26692, 6437], [26668, 26669, 26692], [26669, 26693, 26692], [26669, 26670, 26694], [26669, 26694, 26693], [26670, 26671, 26694], [26671, 26695, 26694], [26671, 26672, 26696], [26671, 26696, 26695], [26672, 26673, 26696], [26673, 26697, 26696], [26673, 26674, 26698], [26673, 26698, 26697], [26674, 26675, 26698], [26675, 26699, 26698], [26675, 26676, 26700], [26675, 26700, 26699], [26676, 26677, 26700], [26677, 26701, 26700], [26677, 26678, 26702], [26677, 26702, 26701], [26678, 26679, 26702], [26679, 26703, 26702], [26679, 26680, 26704], [26679, 26704, 26703], [26680, 26681, 26704], [26681, 26705, 26704], [26681, 26682, 26706], [26681, 26706, 26705], [26682, 26683, 26706], [26683, 26707, 26706], [26683, 26684, 26708], [26683, 26708, 26707], [26684, 26685, 26708], [26685, 26709, 26708], [26685, 26686, 26710], [26685, 26710, 26709], [26686, 26687, 26710], [26687, 26711, 26710], [26687, 26688, 26712], [26687, 26712, 26711], [26688, 26689, 26712], [26689, 26713, 26712], [26689, 26690, 26714], [26689, 26714, 26713], [26690, 26691, 26714], [26691, 26715, 26714], [6437, 26692, 6557], [26692, 26716, 6557], [26692, 26693, 26717], [26692, 26717, 26716], [26693, 26694, 26717], [26694, 26718, 26717], [26694, 26695, 26719], [26694, 26719, 26718], [26695, 26696, 26719], [26696, 26720, 26719], [26696, 26697, 26721], [26696, 26721, 26720], [26697, 26698, 26721], [26698, 26722, 26721], [26698, 26699, 26723], [26698, 26723, 26722], [26699, 26700, 26723], [26700, 26724, 26723], [26700, 26701, 26725], [26700, 26725, 26724], [26701, 26702, 26725], [26702, 26726, 26725], [26702, 26703, 26727], [26702, 26727, 26726], [26703, 26704, 26727], [26704, 26728, 26727], [26704, 26705, 26729], [26704, 26729, 26728], [26705, 26706, 26729], [26706, 26730, 26729], [26706, 26707, 26731], [26706, 26731, 26730], [26707, 26708, 26731], [26708, 26732, 26731], [26708, 26709, 26733], [26708, 26733, 26732], [26709, 26710, 26733], [26710, 26734, 26733], [26710, 26711, 26735], [26710, 26735, 26734], [26711, 26712, 26735], [26712, 26736, 26735], [26712, 26713, 26737], [26712, 26737, 26736], [26713, 26714, 26737], [26714, 26738, 26737], [26714, 26715, 26739], [26714, 26739, 26738], [6557, 26716, 26740], [6557, 26740, 6675], [26716, 26717, 26740], [26717, 26741, 26740], [26717, 26718, 26742], [26717, 26742, 26741], [26718, 26719, 26742], [26719, 26743, 26742], [26719, 26720, 26744], [26719, 26744, 26743], [26720, 26721, 26744], [26721, 26745, 26744], [26721, 26722, 26746], [26721, 26746, 26745], [26722, 26723, 26746], [26723, 26747, 26746], [26723, 26724, 26748], [26723, 26748, 26747], [26724, 26725, 26748], [26725, 26749, 26748], [26725, 26726, 26750], [26725, 26750, 26749], [26726, 26727, 26750], [26727, 26751, 26750], [26727, 26728, 26752], [26727, 26752, 26751], [26728, 26729, 26752], [26729, 26753, 26752], [26729, 26730, 26754], [26729, 26754, 26753], [26730, 26731, 26754], [26731, 26755, 26754], [26731, 26732, 26756], [26731, 26756, 26755], [26732, 26733, 26756], [26733, 26757, 26756], [26733, 26734, 26758], [26733, 26758, 26757], [26734, 26735, 26758], [26735, 26759, 26758], [26735, 26736, 26760], [26735, 26760, 26759], [26736, 26737, 26760], [26737, 26761, 26760], [26737, 26738, 26762], [26737, 26762, 26761], [26738, 26739, 26762], [26739, 26763, 26762], [6675, 26740, 6793], [26740, 26764, 6793], [26740, 26741, 26765], [26740, 26765, 26764], [26741, 26742, 26765], [26742, 26766, 26765], [26742, 26743, 26767], [26742, 26767, 26766], [26743, 26744, 26767], [26744, 26768, 26767], [26744, 26745, 26769], [26744, 26769, 26768], [26745, 26746, 26769], [26746, 26770, 26769], [26746, 26747, 26771], [26746, 26771, 26770], [26747, 26748, 26771], [26748, 26772, 26771], [26748, 26749, 26773], [26748, 26773, 26772], [26749, 26750, 26773], [26750, 26774, 26773], [26750, 26751, 26775], [26750, 26775, 26774], [26751, 26752, 26775], [26752, 26776, 26775], [26752, 26753, 26777], [26752, 26777, 26776], [26753, 26754, 26777], [26754, 26778, 26777], [26754, 26755, 26779], [26754, 26779, 26778], [26755, 26756, 26779], [26756, 26780, 26779], [26756, 26757, 26781], [26756, 26781, 26780], [26757, 26758, 26781], [26758, 26782, 26781], [26758, 26759, 26783], [26758, 26783, 26782], [26759, 26760, 26783], [26760, 26784, 26783], [26760, 26761, 26785], [26760, 26785, 26784], [26761, 26762, 26785], [26762, 26786, 26785], [26762, 26763, 26787], [26762, 26787, 26786], [6793, 26764, 26788], [6793, 26788, 6911], [26764, 26765, 26788], [26765, 26789, 26788], [26765, 26766, 26790], [26765, 26790, 26789], [26766, 26767, 26790], [26767, 26791, 26790], [26767, 26768, 26792], [26767, 26792, 26791], [26768, 26769, 26792], [26769, 26793, 26792], [26769, 26770, 26794], [26769, 26794, 26793], [26770, 26771, 26794], [26771, 26795, 26794], [26771, 26772, 26796], [26771, 26796, 26795], [26772, 26773, 26796], [26773, 26797, 26796], [26773, 26774, 26798], [26773, 26798, 26797], [26774, 26775, 26798], [26775, 26799, 26798], [26775, 26776, 26800], [26775, 26800, 26799], [26776, 26777, 26800], [26777, 26801, 26800], [26777, 26778, 26802], [26777, 26802, 26801], [26778, 26779, 26802], [26779, 26803, 26802], [26779, 26780, 26804], [26779, 26804, 26803], [26780, 26781, 26804], [26781, 26805, 26804], [26781, 26782, 26806], [26781, 26806, 26805], [26782, 26783, 26806], [26783, 26807, 26806], [26783, 26784, 26808], [26783, 26808, 26807], [26784, 26785, 26808], [26785, 26809, 26808], [26785, 26786, 26810], [26785, 26810, 26809], [26786, 26787, 26810], [26787, 26811, 26810], [6911, 26788, 7029], [26788, 26812, 7029], [26788, 26789, 26813], [26788, 26813, 26812], [26789, 26790, 26813], [26790, 26814, 26813], [26790, 26791, 26815], [26790, 26815, 26814], [26791, 26792, 26815], [26792, 26816, 26815], [26792, 26793, 26817], [26792, 26817, 26816], [26793, 26794, 26817], [26794, 26818, 26817], [26794, 26795, 26819], [26794, 26819, 26818], [26795, 26796, 26819], [26796, 26820, 26819], [26796, 26797, 26821], [26796, 26821, 26820], [26797, 26798, 26821], [26798, 26822, 26821], [26798, 26799, 26823], [26798, 26823, 26822], [26799, 26800, 26823], [26800, 26824, 26823], [26800, 26801, 26825], [26800, 26825, 26824], [26801, 26802, 26825], [26802, 26826, 26825], [26802, 26803, 26827], [26802, 26827, 26826], [26803, 26804, 26827], [26804, 26828, 26827], [26804, 26805, 26829], [26804, 26829, 26828], [26805, 26806, 26829], [26806, 26830, 26829], [26806, 26807, 26831], [26806, 26831, 26830], [26807, 26808, 26831], [26808, 26832, 26831], [26808, 26809, 26833], [26808, 26833, 26832], [26809, 26810, 26833], [26810, 26834, 26833], [26810, 26811, 26835], [26810, 26835, 26834], [7029, 26812, 26836], [7029, 26836, 7147], [26812, 26813, 26836], [26813, 26837, 26836], [26813, 26814, 26838], [26813, 26838, 26837], [26814, 26815, 26838], [26815, 26839, 26838], [26815, 26816, 26840], [26815, 26840, 26839], [26816, 26817, 26840], [26817, 26841, 26840], [26817, 26818, 26842], [26817, 26842, 26841], [26818, 26819, 26842], [26819, 26843, 26842], [26819, 26820, 26844], [26819, 26844, 26843], [26820, 26821, 26844], [26821, 26845, 26844], [26821, 26822, 26846], [26821, 26846, 26845], [26822, 26823, 26846], [26823, 26847, 26846], [26823, 26824, 26848], [26823, 26848, 26847], [26824, 26825, 26848], [26825, 26849, 26848], [26825, 26826, 26850], [26825, 26850, 26849], [26826, 26827, 26850], [26827, 26851, 26850], [26827, 26828, 26852], [26827, 26852, 26851], [26828, 26829, 26852], [26829, 26853, 26852], [26829, 26830, 26854], [26829, 26854, 26853], [26830, 26831, 26854], [26831, 26855, 26854], [26831, 26832, 26856], [26831, 26856, 26855], [26832, 26833, 26856], [26833, 26857, 26856], [26833, 26834, 26858], [26833, 26858, 26857], [26834, 26835, 26858], [26835, 26859, 26858], [7147, 26836, 7265], [26836, 26860, 7265], [26836, 26837, 26861], [26836, 26861, 26860], [26837, 26838, 26861], [26838, 26862, 26861], [26838, 26839, 26863], [26838, 26863, 26862], [26839, 26840, 26863], [26840, 26864, 26863], [26840, 26841, 26865], [26840, 26865, 26864], [26841, 26842, 26865], [26842, 26866, 26865], [26842, 26843, 26867], [26842, 26867, 26866], [26843, 26844, 26867], [26844, 26868, 26867], [26844, 26845, 26869], [26844, 26869, 26868], [26845, 26846, 26869], [26846, 26870, 26869], [26846, 26847, 26871], [26846, 26871, 26870], [26847, 26848, 26871], [26848, 26872, 26871], [26848, 26849, 26873], [26848, 26873, 26872], [26849, 26850, 26873], [26850, 26874, 26873], [26850, 26851, 26875], [26850, 26875, 26874], [26851, 26852, 26875], [26852, 26876, 26875], [26852, 26853, 26877], [26852, 26877, 26876], [26853, 26854, 26877], [26854, 26878, 26877], [26854, 26855, 26879], [26854, 26879, 26878], [26855, 26856, 26879], [26856, 26880, 26879], [26856, 26857, 26881], [26856, 26881, 26880], [26857, 26858, 26881], [26858, 26882, 26881], [26858, 26859, 26883], [26858, 26883, 26882], [7265, 26860, 26884], [7265, 26884, 7383], [26860, 26861, 26884], [26861, 26885, 26884], [26861, 26862, 26886], [26861, 26886, 26885], [26862, 26863, 26886], [26863, 26887, 26886], [26863, 26864, 26888], [26863, 26888, 26887], [26864, 26865, 26888], [26865, 26889, 26888], [26865, 26866, 26890], [26865, 26890, 26889], [26866, 26867, 26890], [26867, 26891, 26890], [26867, 26868, 26892], [26867, 26892, 26891], [26868, 26869, 26892], [26869, 26893, 26892], [26869, 26870, 26894], [26869, 26894, 26893], [26870, 26871, 26894], [26871, 26895, 26894], [26871, 26872, 26896], [26871, 26896, 26895], [26872, 26873, 26896], [26873, 26897, 26896], [26873, 26874, 26898], [26873, 26898, 26897], [26874, 26875, 26898], [26875, 26899, 26898], [26875, 26876, 26900], [26875, 26900, 26899], [26876, 26877, 26900], [26877, 26901, 26900], [26877, 26878, 26902], [26877, 26902, 26901], [26878, 26879, 26902], [26879, 26903, 26902], [26879, 26880, 26904], [26879, 26904, 26903], [26880, 26881, 26904], [26881, 26905, 26904], [26881, 26882, 26906], [26881, 26906, 26905], [26882, 26883, 26906], [26883, 26907, 26906], [7383, 26884, 7501], [26884, 26908, 7501], [26884, 26885, 26909], [26884, 26909, 26908], [26885, 26886, 26909], [26886, 26910, 26909], [26886, 26887, 26911], [26886, 26911, 26910], [26887, 26888, 26911], [26888, 26912, 26911], [26888, 26889, 26913], [26888, 26913, 26912], [26889, 26890, 26913], [26890, 26914, 26913], [26890, 26891, 26915], [26890, 26915, 26914], [26891, 26892, 26915], [26892, 26916, 26915], [26892, 26893, 26917], [26892, 26917, 26916], [26893, 26894, 26917], [26894, 26918, 26917], [26894, 26895, 26919], [26894, 26919, 26918], [26895, 26896, 26919], [26896, 26920, 26919], [26896, 26897, 26921], [26896, 26921, 26920], [26897, 26898, 26921], [26898, 26922, 26921], [26898, 26899, 26923], [26898, 26923, 26922], [26899, 26900, 26923], [26900, 26924, 26923], [26900, 26901, 26925], [26900, 26925, 26924], [26901, 26902, 26925], [26902, 26926, 26925], [26902, 26903, 26927], [26902, 26927, 26926], [26903, 26904, 26927], [26904, 26928, 26927], [26904, 26905, 26929], [26904, 26929, 26928], [26905, 26906, 26929], [26906, 26930, 26929], [26906, 26907, 26931], [26906, 26931, 26930], [7501, 26908, 26932], [7501, 26932, 7619], [26908, 26909, 26932], [26909, 26933, 26932], [26909, 26910, 26934], [26909, 26934, 26933], [26910, 26911, 26934], [26911, 26935, 26934], [26911, 26912, 26936], [26911, 26936, 26935], [26912, 26913, 26936], [26913, 26937, 26936], [26913, 26914, 26938], [26913, 26938, 26937], [26914, 26915, 26938], [26915, 26939, 26938], [26915, 26916, 26940], [26915, 26940, 26939], [26916, 26917, 26940], [26917, 26941, 26940], [26917, 26918, 26942], [26917, 26942, 26941], [26918, 26919, 26942], [26919, 26943, 26942], [26919, 26920, 26944], [26919, 26944, 26943], [26920, 26921, 26944], [26921, 26945, 26944], [26921, 26922, 26946], [26921, 26946, 26945], [26922, 26923, 26946], [26923, 26947, 26946], [26923, 26924, 26948], [26923, 26948, 26947], [26924, 26925, 26948], [26925, 26949, 26948], [26925, 26926, 26950], [26925, 26950, 26949], [26926, 26927, 26950], [26927, 26951, 26950], [26927, 26928, 26952], [26927, 26952, 26951], [26928, 26929, 26952], [26929, 26953, 26952], [26929, 26930, 26954], [26929, 26954, 26953], [26930, 26931, 26954], [26931, 26955, 26954], [7619, 26932, 7737], [26932, 26956, 7737], [26932, 26933, 26957], [26932, 26957, 26956], [26933, 26934, 26957], [26934, 26958, 26957], [26934, 26935, 26959], [26934, 26959, 26958], [26935, 26936, 26959], [26936, 26960, 26959], [26936, 26937, 26961], [26936, 26961, 26960], [26937, 26938, 26961], [26938, 26962, 26961], [26938, 26939, 26963], [26938, 26963, 26962], [26939, 26940, 26963], [26940, 26964, 26963], [26940, 26941, 26965], [26940, 26965, 26964], [26941, 26942, 26965], [26942, 26966, 26965], [26942, 26943, 26967], [26942, 26967, 26966], [26943, 26944, 26967], [26944, 26968, 26967], [26944, 26945, 26969], [26944, 26969, 26968], [26945, 26946, 26969], [26946, 26970, 26969], [26946, 26947, 26971], [26946, 26971, 26970], [26947, 26948, 26971], [26948, 26972, 26971], [26948, 26949, 26973], [26948, 26973, 26972], [26949, 26950, 26973], [26950, 26974, 26973], [26950, 26951, 26975], [26950, 26975, 26974], [26951, 26952, 26975], [26952, 26976, 26975], [26952, 26953, 26977], [26952, 26977, 26976], [26953, 26954, 26977], [26954, 26978, 26977], [26954, 26955, 26979], [26954, 26979, 26978], [7737, 26956, 26980], [7737, 26980, 7855], [26956, 26957, 26980], [26957, 26981, 26980], [26957, 26958, 26982], [26957, 26982, 26981], [26958, 26959, 26982], [26959, 26983, 26982], [26959, 26960, 26984], [26959, 26984, 26983], [26960, 26961, 26984], [26961, 26985, 26984], [26961, 26962, 26986], [26961, 26986, 26985], [26962, 26963, 26986], [26963, 26987, 26986], [26963, 26964, 26988], [26963, 26988, 26987], [26964, 26965, 26988], [26965, 26989, 26988], [26965, 26966, 26990], [26965, 26990, 26989], [26966, 26967, 26990], [26967, 26991, 26990], [26967, 26968, 26992], [26967, 26992, 26991], [26968, 26969, 26992], [26969, 26993, 26992], [26969, 26970, 26994], [26969, 26994, 26993], [26970, 26971, 26994], [26971, 26995, 26994], [26971, 26972, 26996], [26971, 26996, 26995], [26972, 26973, 26996], [26973, 26997, 26996], [26973, 26974, 26998], [26973, 26998, 26997], [26974, 26975, 26998], [26975, 26999, 26998], [26975, 26976, 27000], [26975, 27000, 26999], [26976, 26977, 27000], [26977, 27001, 27000], [26977, 26978, 27002], [26977, 27002, 27001], [26978, 26979, 27002], [26979, 27003, 27002], [7855, 26980, 7973], [26980, 27004, 7973], [26980, 26981, 27005], [26980, 27005, 27004], [26981, 26982, 27005], [26982, 27006, 27005], [26982, 26983, 27007], [26982, 27007, 27006], [26983, 26984, 27007], [26984, 27008, 27007], [26984, 26985, 27009], [26984, 27009, 27008], [26985, 26986, 27009], [26986, 27010, 27009], [26986, 26987, 27011], [26986, 27011, 27010], [26987, 26988, 27011], [26988, 27012, 27011], [26988, 26989, 27013], [26988, 27013, 27012], [26989, 26990, 27013], [26990, 27014, 27013], [26990, 26991, 27015], [26990, 27015, 27014], [26991, 26992, 27015], [26992, 27016, 27015], [26992, 26993, 27017], [26992, 27017, 27016], [26993, 26994, 27017], [26994, 27018, 27017], [26994, 26995, 27019], [26994, 27019, 27018], [26995, 26996, 27019], [26996, 27020, 27019], [26996, 26997, 27021], [26996, 27021, 27020], [26997, 26998, 27021], [26998, 27022, 27021], [26998, 26999, 27023], [26998, 27023, 27022], [26999, 27000, 27023], [27000, 27024, 27023], [27000, 27001, 27025], [27000, 27025, 27024], [27001, 27002, 27025], [27002, 27026, 27025], [27002, 27003, 27027], [27002, 27027, 27026], [7973, 27004, 27028], [7973, 27028, 8091], [27004, 27005, 27028], [27005, 27029, 27028], [27005, 27006, 27030], [27005, 27030, 27029], [27006, 27007, 27030], [27007, 27031, 27030], [27007, 27008, 27032], [27007, 27032, 27031], [27008, 27009, 27032], [27009, 27033, 27032], [27009, 27010, 27034], [27009, 27034, 27033], [27010, 27011, 27034], [27011, 27035, 27034], [27011, 27012, 27036], [27011, 27036, 27035], [27012, 27013, 27036], [27013, 27037, 27036], [27013, 27014, 27038], [27013, 27038, 27037], [27014, 27015, 27038], [27015, 27039, 27038], [27015, 27016, 27040], [27015, 27040, 27039], [27016, 27017, 27040], [27017, 27041, 27040], [27017, 27018, 27042], [27017, 27042, 27041], [27018, 27019, 27042], [27019, 27043, 27042], [27019, 27020, 27044], [27019, 27044, 27043], [27020, 27021, 27044], [27021, 27045, 27044], [27021, 27022, 27046], [27021, 27046, 27045], [27022, 27023, 27046], [27023, 27047, 27046], [27023, 27024, 27048], [27023, 27048, 27047], [27024, 27025, 27048], [27025, 27049, 27048], [27025, 27026, 27050], [27025, 27050, 27049], [27026, 27027, 27050], [27027, 27051, 27050], [8091, 27028, 8209], [27028, 27052, 8209], [27028, 27029, 27053], [27028, 27053, 27052], [27029, 27030, 27053], [27030, 27054, 27053], [27030, 27031, 27055], [27030, 27055, 27054], [27031, 27032, 27055], [27032, 27056, 27055], [27032, 27033, 27057], [27032, 27057, 27056], [27033, 27034, 27057], [27034, 27058, 27057], [27034, 27035, 27059], [27034, 27059, 27058], [27035, 27036, 27059], [27036, 27060, 27059], [27036, 27037, 27061], [27036, 27061, 27060], [27037, 27038, 27061], [27038, 27062, 27061], [27038, 27039, 27063], [27038, 27063, 27062], [27039, 27040, 27063], [27040, 27064, 27063], [27040, 27041, 27065], [27040, 27065, 27064], [27041, 27042, 27065], [27042, 27066, 27065], [27042, 27043, 27067], [27042, 27067, 27066], [27043, 27044, 27067], [27044, 27068, 27067], [27044, 27045, 27069], [27044, 27069, 27068], [27045, 27046, 27069], [27046, 27070, 27069], [27046, 27047, 27071], [27046, 27071, 27070], [27047, 27048, 27071], [27048, 27072, 27071], [27048, 27049, 27073], [27048, 27073, 27072], [27049, 27050, 27073], [27050, 27074, 27073], [27050, 27051, 27075], [27050, 27075, 27074], [8209, 27052, 27076], [8209, 27076, 8327], [27052, 27053, 27076], [27053, 27077, 27076], [27053, 27054, 27078], [27053, 27078, 27077], [27054, 27055, 27078], [27055, 27079, 27078], [27055, 27056, 27080], [27055, 27080, 27079], [27056, 27057, 27080], [27057, 27081, 27080], [27057, 27058, 27082], [27057, 27082, 27081], [27058, 27059, 27082], [27059, 27083, 27082], [27059, 27060, 27084], [27059, 27084, 27083], [27060, 27061, 27084], [27061, 27085, 27084], [27061, 27062, 27086], [27061, 27086, 27085], [27062, 27063, 27086], [27063, 27087, 27086], [27063, 27064, 27088], [27063, 27088, 27087], [27064, 27065, 27088], [27065, 27089, 27088], [27065, 27066, 27090], [27065, 27090, 27089], [27066, 27067, 27090], [27067, 27091, 27090], [27067, 27068, 27092], [27067, 27092, 27091], [27068, 27069, 27092], [27069, 27093, 27092], [27069, 27070, 27094], [27069, 27094, 27093], [27070, 27071, 27094], [27071, 27095, 27094], [27071, 27072, 27096], [27071, 27096, 27095], [27072, 27073, 27096], [27073, 27097, 27096], [27073, 27074, 27098], [27073, 27098, 27097], [27074, 27075, 27098], [27075, 27099, 27098], [8327, 27076, 8445], [27076, 27100, 8445], [27076, 27077, 27101], [27076, 27101, 27100], [27077, 27078, 27101], [27078, 27102, 27101], [27078, 27079, 27103], [27078, 27103, 27102], [27079, 27080, 27103], [27080, 27104, 27103], [27080, 27081, 27105], [27080, 27105, 27104], [27081, 27082, 27105], [27082, 27106, 27105], [27082, 27083, 27107], [27082, 27107, 27106], [27083, 27084, 27107], [27084, 27108, 27107], [27084, 27085, 27109], [27084, 27109, 27108], [27085, 27086, 27109], [27086, 27110, 27109], [27086, 27087, 27111], [27086, 27111, 27110], [27087, 27088, 27111], [27088, 27112, 27111], [27088, 27089, 27113], [27088, 27113, 27112], [27089, 27090, 27113], [27090, 27114, 27113], [27090, 27091, 27115], [27090, 27115, 27114], [27091, 27092, 27115], [27092, 27116, 27115], [27092, 27093, 27117], [27092, 27117, 27116], [27093, 27094, 27117], [27094, 27118, 27117], [27094, 27095, 27119], [27094, 27119, 27118], [27095, 27096, 27119], [27096, 27120, 27119], [27096, 27097, 27121], [27096, 27121, 27120], [27097, 27098, 27121], [27098, 27122, 27121], [27098, 27099, 27123], [27098, 27123, 27122], [8445, 27100, 27124], [8445, 27124, 8563], [27100, 27101, 27124], [27101, 27125, 27124], [27101, 27102, 27126], [27101, 27126, 27125], [27102, 27103, 27126], [27103, 27127, 27126], [27103, 27104, 27128], [27103, 27128, 27127], [27104, 27105, 27128], [27105, 27129, 27128], [27105, 27106, 27130], [27105, 27130, 27129], [27106, 27107, 27130], [27107, 27131, 27130], [27107, 27108, 27132], [27107, 27132, 27131], [27108, 27109, 27132], [27109, 27133, 27132], [27109, 27110, 27134], [27109, 27134, 27133], [27110, 27111, 27134], [27111, 27135, 27134], [27111, 27112, 27136], [27111, 27136, 27135], [27112, 27113, 27136], [27113, 27137, 27136], [27113, 27114, 27138], [27113, 27138, 27137], [27114, 27115, 27138], [27115, 27139, 27138], [27115, 27116, 27140], [27115, 27140, 27139], [27116, 27117, 27140], [27117, 27141, 27140], [27117, 27118, 27142], [27117, 27142, 27141], [27118, 27119, 27142], [27119, 27143, 27142], [27119, 27120, 27144], [27119, 27144, 27143], [27120, 27121, 27144], [27121, 27145, 27144], [27121, 27122, 27146], [27121, 27146, 27145], [27122, 27123, 27146], [27123, 27147, 27146], [8563, 27124, 8681], [27124, 27148, 8681], [27124, 27125, 27149], [27124, 27149, 27148], [27125, 27126, 27149], [27126, 27150, 27149], [27126, 27127, 27151], [27126, 27151, 27150], [27127, 27128, 27151], [27128, 27152, 27151], [27128, 27129, 27153], [27128, 27153, 27152], [27129, 27130, 27153], [27130, 27154, 27153], [27130, 27131, 27155], [27130, 27155, 27154], [27131, 27132, 27155], [27132, 27156, 27155], [27132, 27133, 27157], [27132, 27157, 27156], [27133, 27134, 27157], [27134, 27158, 27157], [27134, 27135, 27159], [27134, 27159, 27158], [27135, 27136, 27159], [27136, 27160, 27159], [27136, 27137, 27161], [27136, 27161, 27160], [27137, 27138, 27161], [27138, 27162, 27161], [27138, 27139, 27163], [27138, 27163, 27162], [27139, 27140, 27163], [27140, 27164, 27163], [27140, 27141, 27165], [27140, 27165, 27164], [27141, 27142, 27165], [27142, 27166, 27165], [27142, 27143, 27167], [27142, 27167, 27166], [27143, 27144, 27167], [27144, 27168, 27167], [27144, 27145, 27169], [27144, 27169, 27168], [27145, 27146, 27169], [27146, 27170, 27169], [27146, 27147, 27171], [27146, 27171, 27170], [8681, 27148, 27172], [8681, 27172, 8799], [27148, 27149, 27172], [27149, 27173, 27172], [27149, 27150, 27174], [27149, 27174, 27173], [27150, 27151, 27174], [27151, 27175, 27174], [27151, 27152, 27176], [27151, 27176, 27175], [27152, 27153, 27176], [27153, 27177, 27176], [27153, 27154, 27178], [27153, 27178, 27177], [27154, 27155, 27178], [27155, 27179, 27178], [27155, 27156, 27180], [27155, 27180, 27179], [27156, 27157, 27180], [27157, 27181, 27180], [27157, 27158, 27182], [27157, 27182, 27181], [27158, 27159, 27182], [27159, 27183, 27182], [27159, 27160, 27184], [27159, 27184, 27183], [27160, 27161, 27184], [27161, 27185, 27184], [27161, 27162, 27186], [27161, 27186, 27185], [27162, 27163, 27186], [27163, 27187, 27186], [27163, 27164, 27188], [27163, 27188, 27187], [27164, 27165, 27188], [27165, 27189, 27188], [27165, 27166, 27190], [27165, 27190, 27189], [27166, 27167, 27190], [27167, 27191, 27190], [27167, 27168, 27192], [27167, 27192, 27191], [27168, 27169, 27192], [27169, 27193, 27192], [27169, 27170, 27194], [27169, 27194, 27193], [27170, 27171, 27194], [27171, 27195, 27194], [8799, 27172, 8917], [27172, 27196, 8917], [27172, 27173, 27197], [27172, 27197, 27196], [27173, 27174, 27197], [27174, 27198, 27197], [27174, 27175, 27199], [27174, 27199, 27198], [27175, 27176, 27199], [27176, 27200, 27199], [27176, 27177, 27201], [27176, 27201, 27200], [27177, 27178, 27201], [27178, 27202, 27201], [27178, 27179, 27203], [27178, 27203, 27202], [27179, 27180, 27203], [27180, 27204, 27203], [27180, 27181, 27205], [27180, 27205, 27204], [27181, 27182, 27205], [27182, 27206, 27205], [27182, 27183, 27207], [27182, 27207, 27206], [27183, 27184, 27207], [27184, 27208, 27207], [27184, 27185, 27209], [27184, 27209, 27208], [27185, 27186, 27209], [27186, 27210, 27209], [27186, 27187, 27211], [27186, 27211, 27210], [27187, 27188, 27211], [27188, 27212, 27211], [27188, 27189, 27213], [27188, 27213, 27212], [27189, 27190, 27213], [27190, 27214, 27213], [27190, 27191, 27215], [27190, 27215, 27214], [27191, 27192, 27215], [27192, 27216, 27215], [27192, 27193, 27217], [27192, 27217, 27216], [27193, 27194, 27217], [27194, 27218, 27217], [27194, 27195, 27219], [27194, 27219, 27218], [8917, 27196, 27220], [8917, 27220, 9035], [27196, 27197, 27220], [27197, 27221, 27220], [27197, 27198, 27222], [27197, 27222, 27221], [27198, 27199, 27222], [27199, 27223, 27222], [27199, 27200, 27224], [27199, 27224, 27223], [27200, 27201, 27224], [27201, 27225, 27224], [27201, 27202, 27226], [27201, 27226, 27225], [27202, 27203, 27226], [27203, 27227, 27226], [27203, 27204, 27228], [27203, 27228, 27227], [27204, 27205, 27228], [27205, 27229, 27228], [27205, 27206, 27230], [27205, 27230, 27229], [27206, 27207, 27230], [27207, 27231, 27230], [27207, 27208, 27232], [27207, 27232, 27231], [27208, 27209, 27232], [27209, 27233, 27232], [27209, 27210, 27234], [27209, 27234, 27233], [27210, 27211, 27234], [27211, 27235, 27234], [27211, 27212, 27236], [27211, 27236, 27235], [27212, 27213, 27236], [27213, 27237, 27236], [27213, 27214, 27238], [27213, 27238, 27237], [27214, 27215, 27238], [27215, 27239, 27238], [27215, 27216, 27240], [27215, 27240, 27239], [27216, 27217, 27240], [27217, 27241, 27240], [27217, 27218, 27242], [27217, 27242, 27241], [27218, 27219, 27242], [27219, 27243, 27242], [9035, 27220, 9153], [27220, 27244, 9153], [27220, 27221, 27245], [27220, 27245, 27244], [27221, 27222, 27245], [27222, 27246, 27245], [27222, 27223, 27247], [27222, 27247, 27246], [27223, 27224, 27247], [27224, 27248, 27247], [27224, 27225, 27249], [27224, 27249, 27248], [27225, 27226, 27249], [27226, 27250, 27249], [27226, 27227, 27251], [27226, 27251, 27250], [27227, 27228, 27251], [27228, 27252, 27251], [27228, 27229, 27253], [27228, 27253, 27252], [27229, 27230, 27253], [27230, 27254, 27253], [27230, 27231, 27255], [27230, 27255, 27254], [27231, 27232, 27255], [27232, 27256, 27255], [27232, 27233, 27257], [27232, 27257, 27256], [27233, 27234, 27257], [27234, 27258, 27257], [27234, 27235, 27259], [27234, 27259, 27258], [27235, 27236, 27259], [27236, 27260, 27259], [27236, 27237, 27261], [27236, 27261, 27260], [27237, 27238, 27261], [27238, 27262, 27261], [27238, 27239, 27263], [27238, 27263, 27262], [27239, 27240, 27263], [27240, 27264, 27263], [27240, 27241, 27265], [27240, 27265, 27264], [27241, 27242, 27265], [27242, 27266, 27265], [27242, 27243, 27267], [27242, 27267, 27266], [9153, 27244, 27268], [9153, 27268, 9271], [27244, 27245, 27268], [27245, 27269, 27268], [27245, 27246, 27270], [27245, 27270, 27269], [27246, 27247, 27270], [27247, 27271, 27270], [27247, 27248, 27272], [27247, 27272, 27271], [27248, 27249, 27272], [27249, 27273, 27272], [27249, 27250, 27274], [27249, 27274, 27273], [27250, 27251, 27274], [27251, 27275, 27274], [27251, 27252, 27276], [27251, 27276, 27275], [27252, 27253, 27276], [27253, 27277, 27276], [27253, 27254, 27278], [27253, 27278, 27277], [27254, 27255, 27278], [27255, 27279, 27278], [27255, 27256, 27280], [27255, 27280, 27279], [27256, 27257, 27280], [27257, 27281, 27280], [27257, 27258, 27282], [27257, 27282, 27281], [27258, 27259, 27282], [27259, 27283, 27282], [27259, 27260, 27284], [27259, 27284, 27283], [27260, 27261, 27284], [27261, 27285, 27284], [27261, 27262, 27286], [27261, 27286, 27285], [27262, 27263, 27286], [27263, 27287, 27286], [27263, 27264, 27288], [27263, 27288, 27287], [27264, 27265, 27288], [27265, 27289, 27288], [27265, 27266, 27290], [27265, 27290, 27289], [27266, 27267, 27290], [27267, 27291, 27290], [9271, 27268, 9389], [27268, 27292, 9389], [27268, 27269, 27293], [27268, 27293, 27292], [27269, 27270, 27293], [27270, 27294, 27293], [27270, 27271, 27295], [27270, 27295, 27294], [27271, 27272, 27295], [27272, 27296, 27295], [27272, 27273, 27297], [27272, 27297, 27296], [27273, 27274, 27297], [27274, 27298, 27297], [27274, 27275, 27299], [27274, 27299, 27298], [27275, 27276, 27299], [27276, 27300, 27299], [27276, 27277, 27301], [27276, 27301, 27300], [27277, 27278, 27301], [27278, 27302, 27301], [27278, 27279, 27303], [27278, 27303, 27302], [27279, 27280, 27303], [27280, 27304, 27303], [27280, 27281, 27305], [27280, 27305, 27304], [27281, 27282, 27305], [27282, 27306, 27305], [27282, 27283, 27307], [27282, 27307, 27306], [27283, 27284, 27307], [27284, 27308, 27307], [27284, 27285, 27309], [27284, 27309, 27308], [27285, 27286, 27309], [27286, 27310, 27309], [27286, 27287, 27311], [27286, 27311, 27310], [27287, 27288, 27311], [27288, 27312, 27311], [27288, 27289, 27313], [27288, 27313, 27312], [27289, 27290, 27313], [27290, 27314, 27313], [27290, 27291, 27315], [27290, 27315, 27314], [9389, 27292, 27316], [9389, 27316, 9507], [27292, 27293, 27316], [27293, 27317, 27316], [27293, 27294, 27318], [27293, 27318, 27317], [27294, 27295, 27318], [27295, 27319, 27318], [27295, 27296, 27320], [27295, 27320, 27319], [27296, 27297, 27320], [27297, 27321, 27320], [27297, 27298, 27322], [27297, 27322, 27321], [27298, 27299, 27322], [27299, 27323, 27322], [27299, 27300, 27324], [27299, 27324, 27323], [27300, 27301, 27324], [27301, 27325, 27324], [27301, 27302, 27326], [27301, 27326, 27325], [27302, 27303, 27326], [27303, 27327, 27326], [27303, 27304, 27328], [27303, 27328, 27327], [27304, 27305, 27328], [27305, 27329, 27328], [27305, 27306, 27330], [27305, 27330, 27329], [27306, 27307, 27330], [27307, 27331, 27330], [27307, 27308, 27332], [27307, 27332, 27331], [27308, 27309, 27332], [27309, 27333, 27332], [27309, 27310, 27334], [27309, 27334, 27333], [27310, 27311, 27334], [27311, 27335, 27334], [27311, 27312, 27336], [27311, 27336, 27335], [27312, 27313, 27336], [27313, 27337, 27336], [27313, 27314, 27338], [27313, 27338, 27337], [27314, 27315, 27338], [27315, 27339, 27338], [9507, 27316, 9625], [27316, 27340, 9625], [27316, 27317, 27341], [27316, 27341, 27340], [27317, 27318, 27341], [27318, 27342, 27341], [27318, 27319, 27343], [27318, 27343, 27342], [27319, 27320, 27343], [27320, 27344, 27343], [27320, 27321, 27345], [27320, 27345, 27344], [27321, 27322, 27345], [27322, 27346, 27345], [27322, 27323, 27347], [27322, 27347, 27346], [27323, 27324, 27347], [27324, 27348, 27347], [27324, 27325, 27349], [27324, 27349, 27348], [27325, 27326, 27349], [27326, 27350, 27349], [27326, 27327, 27351], [27326, 27351, 27350], [27327, 27328, 27351], [27328, 27352, 27351], [27328, 27329, 27353], [27328, 27353, 27352], [27329, 27330, 27353], [27330, 27354, 27353], [27330, 27331, 27355], [27330, 27355, 27354], [27331, 27332, 27355], [27332, 27356, 27355], [27332, 27333, 27357], [27332, 27357, 27356], [27333, 27334, 27357], [27334, 27358, 27357], [27334, 27335, 27359], [27334, 27359, 27358], [27335, 27336, 27359], [27336, 27360, 27359], [27336, 27337, 27361], [27336, 27361, 27360], [27337, 27338, 27361], [27338, 27362, 27361], [27338, 27339, 27363], [27338, 27363, 27362], [9625, 27340, 27364], [9625, 27364, 9743], [27340, 27341, 27364], [27341, 27365, 27364], [27341, 27342, 27366], [27341, 27366, 27365], [27342, 27343, 27366], [27343, 27367, 27366], [27343, 27344, 27368], [27343, 27368, 27367], [27344, 27345, 27368], [27345, 27369, 27368], [27345, 27346, 27370], [27345, 27370, 27369], [27346, 27347, 27370], [27347, 27371, 27370], [27347, 27348, 27372], [27347, 27372, 27371], [27348, 27349, 27372], [27349, 27373, 27372], [27349, 27350, 27374], [27349, 27374, 27373], [27350, 27351, 27374], [27351, 27375, 27374], [27351, 27352, 27376], [27351, 27376, 27375], [27352, 27353, 27376], [27353, 27377, 27376], [27353, 27354, 27378], [27353, 27378, 27377], [27354, 27355, 27378], [27355, 27379, 27378], [27355, 27356, 27380], [27355, 27380, 27379], [27356, 27357, 27380], [27357, 27381, 27380], [27357, 27358, 27382], [27357, 27382, 27381], [27358, 27359, 27382], [27359, 27383, 27382], [27359, 27360, 27384], [27359, 27384, 27383], [27360, 27361, 27384], [27361, 27385, 27384], [27361, 27362, 27386], [27361, 27386, 27385], [27362, 27363, 27386], [27363, 27387, 27386], [9743, 27364, 9863], [27364, 27388, 9863], [27364, 27365, 27389], [27364, 27389, 27388], [27365, 27366, 27389], [27366, 27390, 27389], [27366, 27367, 27391], [27366, 27391, 27390], [27367, 27368, 27391], [27368, 27392, 27391], [27368, 27369, 27393], [27368, 27393, 27392], [27369, 27370, 27393], [27370, 27394, 27393], [27370, 27371, 27395], [27370, 27395, 27394], [27371, 27372, 27395], [27372, 27396, 27395], [27372, 27373, 27397], [27372, 27397, 27396], [27373, 27374, 27397], [27374, 27398, 27397], [27374, 27375, 27399], [27374, 27399, 27398], [27375, 27376, 27399], [27376, 27400, 27399], [27376, 27377, 27401], [27376, 27401, 27400], [27377, 27378, 27401], [27378, 27402, 27401], [27378, 27379, 27403], [27378, 27403, 27402], [27379, 27380, 27403], [27380, 27404, 27403], [27380, 27381, 27405], [27380, 27405, 27404], [27381, 27382, 27405], [27382, 27406, 27405], [27382, 27383, 27407], [27382, 27407, 27406], [27383, 27384, 27407], [27384, 27408, 27407], [27384, 27385, 27409], [27384, 27409, 27408], [27385, 27386, 27409], [27386, 27410, 27409], [27386, 27387, 27411], [27386, 27411, 27410], [9863, 27388, 27412], [9863, 27412, 9984], [27388, 27389, 27412], [27389, 27413, 27412], [27389, 27390, 27414], [27389, 27414, 27413], [27390, 27391, 27414], [27391, 27415, 27414], [27391, 27392, 27416], [27391, 27416, 27415], [27392, 27393, 27416], [27393, 27417, 27416], [27393, 27394, 27418], [27393, 27418, 27417], [27394, 27395, 27418], [27395, 27419, 27418], [27395, 27396, 27420], [27395, 27420, 27419], [27396, 27397, 27420], [27397, 27421, 27420], [27397, 27398, 27422], [27397, 27422, 27421], [27398, 27399, 27422], [27399, 27423, 27422], [27399, 27400, 27424], [27399, 27424, 27423], [27400, 27401, 27424], [27401, 27425, 27424], [27401, 27402, 27426], [27401, 27426, 27425], [27402, 27403, 27426], [27403, 27427, 27426], [27403, 27404, 27428], [27403, 27428, 27427], [27404, 27405, 27428], [27405, 27429, 27428], [27405, 27406, 27430], [27405, 27430, 27429], [27406, 27407, 27430], [27407, 27431, 27430], [27407, 27408, 27432], [27407, 27432, 27431], [27408, 27409, 27432], [27409, 27433, 27432], [27409, 27410, 27434], [27409, 27434, 27433], [27410, 27411, 27434], [27411, 27435, 27434], [9984, 27412, 10110], [27412, 27436, 10110], [27412, 27413, 27437], [27412, 27437, 27436], [27413, 27414, 27437], [27414, 27438, 27437], [27414, 27415, 27439], [27414, 27439, 27438], [27415, 27416, 27439], [27416, 27440, 27439], [27416, 27417, 27441], [27416, 27441, 27440], [27417, 27418, 27441], [27418, 27442, 27441], [27418, 27419, 27443], [27418, 27443, 27442], [27419, 27420, 27443], [27420, 27444, 27443], [27420, 27421, 27445], [27420, 27445, 27444], [27421, 27422, 27445], [27422, 27446, 27445], [27422, 27423, 27447], [27422, 27447, 27446], [27423, 27424, 27447], [27424, 27448, 27447], [27424, 27425, 27449], [27424, 27449, 27448], [27425, 27426, 27449], [27426, 27450, 27449], [27426, 27427, 27451], [27426, 27451, 27450], [27427, 27428, 27451], [27428, 27452, 27451], [27428, 27429, 27453], [27428, 27453, 27452], [27429, 27430, 27453], [27430, 27454, 27453], [27430, 27431, 27455], [27430, 27455, 27454], [27431, 27432, 27455], [27432, 27456, 27455], [27432, 27433, 27457], [27432, 27457, 27456], [27433, 27434, 27457], [27434, 27458, 27457], [27434, 27435, 27459], [27434, 27459, 27458], [10110, 27436, 27460], [10110, 27460, 10236], [27436, 27437, 27460], [27437, 27461, 27460], [27437, 27438, 27462], [27437, 27462, 27461], [27438, 27439, 27462], [27439, 27463, 27462], [27439, 27440, 27464], [27439, 27464, 27463], [27440, 27441, 27464], [27441, 27465, 27464], [27441, 27442, 27466], [27441, 27466, 27465], [27442, 27443, 27466], [27443, 27467, 27466], [27443, 27444, 27468], [27443, 27468, 27467], [27444, 27445, 27468], [27445, 27469, 27468], [27445, 27446, 27470], [27445, 27470, 27469], [27446, 27447, 27470], [27447, 27471, 27470], [27447, 27448, 27472], [27447, 27472, 27471], [27448, 27449, 27472], [27449, 27473, 27472], [27449, 27450, 27474], [27449, 27474, 27473], [27450, 27451, 27474], [27451, 27475, 27474], [27451, 27452, 27476], [27451, 27476, 27475], [27452, 27453, 27476], [27453, 27477, 27476], [27453, 27454, 27478], [27453, 27478, 27477], [27454, 27455, 27478], [27455, 27479, 27478], [27455, 27456, 27480], [27455, 27480, 27479], [27456, 27457, 27480], [27457, 27481, 27480], [27457, 27458, 27482], [27457, 27482, 27481], [27458, 27459, 27482], [27459, 27483, 27482], [10236, 27460, 10365], [27460, 27484, 10365], [27460, 27461, 27485], [27460, 27485, 27484], [27461, 27462, 27485], [27462, 27486, 27485], [27462, 27463, 27487], [27462, 27487, 27486], [27463, 27464, 27487], [27464, 27488, 27487], [27464, 27465, 27489], [27464, 27489, 27488], [27465, 27466, 27489], [27466, 27490, 27489], [27466, 27467, 27491], [27466, 27491, 27490], [27467, 27468, 27491], [27468, 27492, 27491], [27468, 27469, 27493], [27468, 27493, 27492], [27469, 27470, 27493], [27470, 27494, 27493], [27470, 27471, 27495], [27470, 27495, 27494], [27471, 27472, 27495], [27472, 27496, 27495], [27472, 27473, 27497], [27472, 27497, 27496], [27473, 27474, 27497], [27474, 27498, 27497], [27474, 27475, 27499], [27474, 27499, 27498], [27475, 27476, 27499], [27476, 27500, 27499], [27476, 27477, 27501], [27476, 27501, 27500], [27477, 27478, 27501], [27478, 27502, 27501], [27478, 27479, 27503], [27478, 27503, 27502], [27479, 27480, 27503], [27480, 27504, 27503], [27480, 27481, 27505], [27480, 27505, 27504], [27481, 27482, 27505], [27482, 27506, 27505], [27482, 27483, 27507], [27482, 27507, 27506], [10365, 27484, 27508], [10365, 27508, 10494], [27484, 27485, 27508], [27485, 27509, 27508], [27485, 27486, 27510], [27485, 27510, 27509], [27486, 27487, 27510], [27487, 27511, 27510], [27487, 27488, 27512], [27487, 27512, 27511], [27488, 27489, 27512], [27489, 27513, 27512], [27489, 27490, 27514], [27489, 27514, 27513], [27490, 27491, 27514], [27491, 27515, 27514], [27491, 27492, 27516], [27491, 27516, 27515], [27492, 27493, 27516], [27493, 27517, 27516], [27493, 27494, 27518], [27493, 27518, 27517], [27494, 27495, 27518], [27495, 27519, 27518], [27495, 27496, 27520], [27495, 27520, 27519], [27496, 27497, 27520], [27497, 27521, 27520], [27497, 27498, 27522], [27497, 27522, 27521], [27498, 27499, 27522], [27499, 27523, 27522], [27499, 27500, 27524], [27499, 27524, 27523], [27500, 27501, 27524], [27501, 27525, 27524], [27501, 27502, 27526], [27501, 27526, 27525], [27502, 27503, 27526], [27503, 27527, 27526], [27503, 27504, 27528], [27503, 27528, 27527], [27504, 27505, 27528], [27505, 27529, 27528], [27505, 27506, 27530], [27505, 27530, 27529], [27506, 27507, 27530], [27507, 27531, 27530], [10494, 27508, 10623], [27508, 27532, 10623], [27508, 27509, 27533], [27508, 27533, 27532], [27509, 27510, 27533], [27510, 27534, 27533], [27510, 27511, 27535], [27510, 27535, 27534], [27511, 27512, 27535], [27512, 27536, 27535], [27512, 27513, 27537], [27512, 27537, 27536], [27513, 27514, 27537], [27514, 27538, 27537], [27514, 27515, 27539], [27514, 27539, 27538], [27515, 27516, 27539], [27516, 27540, 27539], [27516, 27517, 27541], [27516, 27541, 27540], [27517, 27518, 27541], [27518, 27542, 27541], [27518, 27519, 27543], [27518, 27543, 27542], [27519, 27520, 27543], [27520, 27544, 27543], [27520, 27521, 27545], [27520, 27545, 27544], [27521, 27522, 27545], [27522, 27546, 27545], [27522, 27523, 27547], [27522, 27547, 27546], [27523, 27524, 27547], [27524, 27548, 27547], [27524, 27525, 27549], [27524, 27549, 27548], [27525, 27526, 27549], [27526, 27550, 27549], [27526, 27527, 27551], [27526, 27551, 27550], [27527, 27528, 27551], [27528, 27552, 27551], [27528, 27529, 27553], [27528, 27553, 27552], [27529, 27530, 27553], [27530, 27554, 27553], [27530, 27531, 27555], [27530, 27555, 27554], [10623, 27532, 27556], [10623, 27556, 10752], [27532, 27533, 27556], [27533, 27557, 27556], [27533, 27534, 27558], [27533, 27558, 27557], [27534, 27535, 27558], [27535, 27559, 27558], [27535, 27536, 27560], [27535, 27560, 27559], [27536, 27537, 27560], [27537, 27561, 27560], [27537, 27538, 27562], [27537, 27562, 27561], [27538, 27539, 27562], [27539, 27563, 27562], [27539, 27540, 27564], [27539, 27564, 27563], [27540, 27541, 27564], [27541, 27565, 27564], [27541, 27542, 27566], [27541, 27566, 27565], [27542, 27543, 27566], [27543, 27567, 27566], [27543, 27544, 27568], [27543, 27568, 27567], [27544, 27545, 27568], [27545, 27569, 27568], [27545, 27546, 27570], [27545, 27570, 27569], [27546, 27547, 27570], [27547, 27571, 27570], [27547, 27548, 27572], [27547, 27572, 27571], [27548, 27549, 27572], [27549, 27573, 27572], [27549, 27550, 27574], [27549, 27574, 27573], [27550, 27551, 27574], [27551, 27575, 27574], [27551, 27552, 27576], [27551, 27576, 27575], [27552, 27553, 27576], [27553, 27577, 27576], [27553, 27554, 27578], [27553, 27578, 27577], [27554, 27555, 27578], [27555, 27579, 27578], [10752, 27556, 10881], [27556, 27580, 10881], [27556, 27557, 27581], [27556, 27581, 27580], [27557, 27558, 27581], [27558, 27582, 27581], [27558, 27559, 27583], [27558, 27583, 27582], [27559, 27560, 27583], [27560, 27584, 27583], [27560, 27561, 27585], [27560, 27585, 27584], [27561, 27562, 27585], [27562, 27586, 27585], [27562, 27563, 27587], [27562, 27587, 27586], [27563, 27564, 27587], [27564, 27588, 27587], [27564, 27565, 27589], [27564, 27589, 27588], [27565, 27566, 27589], [27566, 27590, 27589], [27566, 27567, 27591], [27566, 27591, 27590], [27567, 27568, 27591], [27568, 27592, 27591], [27568, 27569, 27593], [27568, 27593, 27592], [27569, 27570, 27593], [27570, 27594, 27593], [27570, 27571, 27595], [27570, 27595, 27594], [27571, 27572, 27595], [27572, 27596, 27595], [27572, 27573, 27597], [27572, 27597, 27596], [27573, 27574, 27597], [27574, 27598, 27597], [27574, 27575, 27599], [27574, 27599, 27598], [27575, 27576, 27599], [27576, 27600, 27599], [27576, 27577, 27601], [27576, 27601, 27600], [27577, 27578, 27601], [27578, 27602, 27601], [27578, 27579, 27603], [27578, 27603, 27602], [10881, 27580, 27604], [10881, 27604, 11010], [27580, 27581, 27604], [27581, 27605, 27604], [27581, 27582, 27606], [27581, 27606, 27605], [27582, 27583, 27606], [27583, 27607, 27606], [27583, 27584, 27608], [27583, 27608, 27607], [27584, 27585, 27608], [27585, 27609, 27608], [27585, 27586, 27610], [27585, 27610, 27609], [27586, 27587, 27610], [27587, 27611, 27610], [27587, 27588, 27612], [27587, 27612, 27611], [27588, 27589, 27612], [27589, 27613, 27612], [27589, 27590, 27614], [27589, 27614, 27613], [27590, 27591, 27614], [27591, 27615, 27614], [27591, 27592, 27616], [27591, 27616, 27615], [27592, 27593, 27616], [27593, 27617, 27616], [27593, 27594, 27618], [27593, 27618, 27617], [27594, 27595, 27618], [27595, 27619, 27618], [27595, 27596, 27620], [27595, 27620, 27619], [27596, 27597, 27620], [27597, 27621, 27620], [27597, 27598, 27622], [27597, 27622, 27621], [27598, 27599, 27622], [27599, 27623, 27622], [27599, 27600, 27624], [27599, 27624, 27623], [27600, 27601, 27624], [27601, 27625, 27624], [27601, 27602, 27626], [27601, 27626, 27625], [27602, 27603, 27626], [27603, 27627, 27626], [11010, 27604, 11139], [27604, 27628, 11139], [27604, 27605, 27629], [27604, 27629, 27628], [27605, 27606, 27629], [27606, 27630, 27629], [27606, 27607, 27631], [27606, 27631, 27630], [27607, 27608, 27631], [27608, 27632, 27631], [27608, 27609, 27633], [27608, 27633, 27632], [27609, 27610, 27633], [27610, 27634, 27633], [27610, 27611, 27635], [27610, 27635, 27634], [27611, 27612, 27635], [27612, 27636, 27635], [27612, 27613, 27637], [27612, 27637, 27636], [27613, 27614, 27637], [27614, 27638, 27637], [27614, 27615, 27639], [27614, 27639, 27638], [27615, 27616, 27639], [27616, 27640, 27639], [27616, 27617, 27641], [27616, 27641, 27640], [27617, 27618, 27641], [27618, 27642, 27641], [27618, 27619, 27643], [27618, 27643, 27642], [27619, 27620, 27643], [27620, 27644, 27643], [27620, 27621, 27645], [27620, 27645, 27644], [27621, 27622, 27645], [27622, 27646, 27645], [27622, 27623, 27647], [27622, 27647, 27646], [27623, 27624, 27647], [27624, 27648, 27647], [27624, 27625, 27649], [27624, 27649, 27648], [27625, 27626, 27649], [27626, 27650, 27649], [27626, 27627, 27651], [27626, 27651, 27650], [11139, 27628, 27652], [11139, 27652, 11268], [27628, 27629, 27652], [27629, 27653, 27652], [27629, 27630, 27654], [27629, 27654, 27653], [27630, 27631, 27654], [27631, 27655, 27654], [27631, 27632, 27656], [27631, 27656, 27655], [27632, 27633, 27656], [27633, 27657, 27656], [27633, 27634, 27658], [27633, 27658, 27657], [27634, 27635, 27658], [27635, 27659, 27658], [27635, 27636, 27660], [27635, 27660, 27659], [27636, 27637, 27660], [27637, 27661, 27660], [27637, 27638, 27662], [27637, 27662, 27661], [27638, 27639, 27662], [27639, 27663, 27662], [27639, 27640, 27664], [27639, 27664, 27663], [27640, 27641, 27664], [27641, 27665, 27664], [27641, 27642, 27666], [27641, 27666, 27665], [27642, 27643, 27666], [27643, 27667, 27666], [27643, 27644, 27668], [27643, 27668, 27667], [27644, 27645, 27668], [27645, 27669, 27668], [27645, 27646, 27670], [27645, 27670, 27669], [27646, 27647, 27670], [27647, 27671, 27670], [27647, 27648, 27672], [27647, 27672, 27671], [27648, 27649, 27672], [27649, 27673, 27672], [27649, 27650, 27674], [27649, 27674, 27673], [27650, 27651, 27674], [27651, 27675, 27674], [11268, 27652, 11397], [27652, 27676, 11397], [27652, 27653, 27677], [27652, 27677, 27676], [27653, 27654, 27677], [27654, 27678, 27677], [27654, 27655, 27679], [27654, 27679, 27678], [27655, 27656, 27679], [27656, 27680, 27679], [27656, 27657, 27681], [27656, 27681, 27680], [27657, 27658, 27681], [27658, 27682, 27681], [27658, 27659, 27683], [27658, 27683, 27682], [27659, 27660, 27683], [27660, 27684, 27683], [27660, 27661, 27685], [27660, 27685, 27684], [27661, 27662, 27685], [27662, 27686, 27685], [27662, 27663, 27687], [27662, 27687, 27686], [27663, 27664, 27687], [27664, 27688, 27687], [27664, 27665, 27689], [27664, 27689, 27688], [27665, 27666, 27689], [27666, 27690, 27689], [27666, 27667, 27691], [27666, 27691, 27690], [27667, 27668, 27691], [27668, 27692, 27691], [27668, 27669, 27693], [27668, 27693, 27692], [27669, 27670, 27693], [27670, 27694, 27693], [27670, 27671, 27695], [27670, 27695, 27694], [27671, 27672, 27695], [27672, 27696, 27695], [27672, 27673, 27697], [27672, 27697, 27696], [27673, 27674, 27697], [27674, 27698, 27697], [27674, 27675, 27699], [27674, 27699, 27698], [11397, 27676, 27700], [11397, 27700, 11526], [27676, 27677, 27700], [27677, 27701, 27700], [27677, 27678, 27702], [27677, 27702, 27701], [27678, 27679, 27702], [27679, 27703, 27702], [27679, 27680, 27704], [27679, 27704, 27703], [27680, 27681, 27704], [27681, 27705, 27704], [27681, 27682, 27706], [27681, 27706, 27705], [27682, 27683, 27706], [27683, 27707, 27706], [27683, 27684, 27708], [27683, 27708, 27707], [27684, 27685, 27708], [27685, 27709, 27708], [27685, 27686, 27710], [27685, 27710, 27709], [27686, 27687, 27710], [27687, 27711, 27710], [27687, 27688, 27712], [27687, 27712, 27711], [27688, 27689, 27712], [27689, 27713, 27712], [27689, 27690, 27714], [27689, 27714, 27713], [27690, 27691, 27714], [27691, 27715, 27714], [27691, 27692, 27716], [27691, 27716, 27715], [27692, 27693, 27716], [27693, 27717, 27716], [27693, 27694, 27718], [27693, 27718, 27717], [27694, 27695, 27718], [27695, 27719, 27718], [27695, 27696, 27720], [27695, 27720, 27719], [27696, 27697, 27720], [27697, 27721, 27720], [27697, 27698, 27722], [27697, 27722, 27721], [27698, 27699, 27722], [27699, 27723, 27722], [11526, 27700, 11655], [27700, 27724, 11655], [27700, 27701, 27725], [27700, 27725, 27724], [27701, 27702, 27725], [27702, 27726, 27725], [27702, 27703, 27727], [27702, 27727, 27726], [27703, 27704, 27727], [27704, 27728, 27727], [27704, 27705, 27729], [27704, 27729, 27728], [27705, 27706, 27729], [27706, 27730, 27729], [27706, 27707, 27731], [27706, 27731, 27730], [27707, 27708, 27731], [27708, 27732, 27731], [27708, 27709, 27733], [27708, 27733, 27732], [27709, 27710, 27733], [27710, 27734, 27733], [27710, 27711, 27735], [27710, 27735, 27734], [27711, 27712, 27735], [27712, 27736, 27735], [27712, 27713, 27737], [27712, 27737, 27736], [27713, 27714, 27737], [27714, 27738, 27737], [27714, 27715, 27739], [27714, 27739, 27738], [27715, 27716, 27739], [27716, 27740, 27739], [27716, 27717, 27741], [27716, 27741, 27740], [27717, 27718, 27741], [27718, 27742, 27741], [27718, 27719, 27743], [27718, 27743, 27742], [27719, 27720, 27743], [27720, 27744, 27743], [27720, 27721, 27745], [27720, 27745, 27744], [27721, 27722, 27745], [27722, 27746, 27745], [27722, 27723, 27747], [27722, 27747, 27746], [11655, 27724, 27748], [11655, 27748, 11784], [27724, 27725, 27748], [27725, 27749, 27748], [27725, 27726, 27750], [27725, 27750, 27749], [27726, 27727, 27750], [27727, 27751, 27750], [27727, 27728, 27752], [27727, 27752, 27751], [27728, 27729, 27752], [27729, 27753, 27752], [27729, 27730, 27754], [27729, 27754, 27753], [27730, 27731, 27754], [27731, 27755, 27754], [27731, 27732, 27756], [27731, 27756, 27755], [27732, 27733, 27756], [27733, 27757, 27756], [27733, 27734, 27758], [27733, 27758, 27757], [27734, 27735, 27758], [27735, 27759, 27758], [27735, 27736, 27760], [27735, 27760, 27759], [27736, 27737, 27760], [27737, 27761, 27760], [27737, 27738, 27762], [27737, 27762, 27761], [27738, 27739, 27762], [27739, 27763, 27762], [27739, 27740, 27764], [27739, 27764, 27763], [27740, 27741, 27764], [27741, 27765, 27764], [27741, 27742, 27766], [27741, 27766, 27765], [27742, 27743, 27766], [27743, 27767, 27766], [27743, 27744, 27768], [27743, 27768, 27767], [27744, 27745, 27768], [27745, 27769, 27768], [27745, 27746, 27770], [27745, 27770, 27769], [27746, 27747, 27770], [27747, 27771, 27770], [11784, 27748, 11913], [27748, 27772, 11913], [27748, 27749, 27773], [27748, 27773, 27772], [27749, 27750, 27773], [27750, 27774, 27773], [27750, 27751, 27775], [27750, 27775, 27774], [27751, 27752, 27775], [27752, 27776, 27775], [27752, 27753, 27777], [27752, 27777, 27776], [27753, 27754, 27777], [27754, 27778, 27777], [27754, 27755, 27779], [27754, 27779, 27778], [27755, 27756, 27779], [27756, 27780, 27779], [27756, 27757, 27781], [27756, 27781, 27780], [27757, 27758, 27781], [27758, 27782, 27781], [27758, 27759, 27783], [27758, 27783, 27782], [27759, 27760, 27783], [27760, 27784, 27783], [27760, 27761, 27785], [27760, 27785, 27784], [27761, 27762, 27785], [27762, 27786, 27785], [27762, 27763, 27787], [27762, 27787, 27786], [27763, 27764, 27787], [27764, 27788, 27787], [27764, 27765, 27789], [27764, 27789, 27788], [27765, 27766, 27789], [27766, 27790, 27789], [27766, 27767, 27791], [27766, 27791, 27790], [27767, 27768, 27791], [27768, 27792, 27791], [27768, 27769, 27793], [27768, 27793, 27792], [27769, 27770, 27793], [27770, 27794, 27793], [27770, 27771, 27795], [27770, 27795, 27794], [11913, 27772, 27796], [11913, 27796, 12042], [27772, 27773, 27796], [27773, 27797, 27796], [27773, 27774, 27798], [27773, 27798, 27797], [27774, 27775, 27798], [27775, 27799, 27798], [27775, 27776, 27800], [27775, 27800, 27799], [27776, 27777, 27800], [27777, 27801, 27800], [27777, 27778, 27802], [27777, 27802, 27801], [27778, 27779, 27802], [27779, 27803, 27802], [27779, 27780, 27804], [27779, 27804, 27803], [27780, 27781, 27804], [27781, 27805, 27804], [27781, 27782, 27806], [27781, 27806, 27805], [27782, 27783, 27806], [27783, 27807, 27806], [27783, 27784, 27808], [27783, 27808, 27807], [27784, 27785, 27808], [27785, 27809, 27808], [27785, 27786, 27810], [27785, 27810, 27809], [27786, 27787, 27810], [27787, 27811, 27810], [27787, 27788, 27812], [27787, 27812, 27811], [27788, 27789, 27812], [27789, 27813, 27812], [27789, 27790, 27814], [27789, 27814, 27813], [27790, 27791, 27814], [27791, 27815, 27814], [27791, 27792, 27816], [27791, 27816, 27815], [27792, 27793, 27816], [27793, 27817, 27816], [27793, 27794, 27818], [27793, 27818, 27817], [27794, 27795, 27818], [27795, 27819, 27818], [12042, 27796, 12171], [27796, 27820, 12171], [27796, 27797, 27821], [27796, 27821, 27820], [27797, 27798, 27821], [27798, 27822, 27821], [27798, 27799, 27823], [27798, 27823, 27822], [27799, 27800, 27823], [27800, 27824, 27823], [27800, 27801, 27825], [27800, 27825, 27824], [27801, 27802, 27825], [27802, 27826, 27825], [27802, 27803, 27827], [27802, 27827, 27826], [27803, 27804, 27827], [27804, 27828, 27827], [27804, 27805, 27829], [27804, 27829, 27828], [27805, 27806, 27829], [27806, 27830, 27829], [27806, 27807, 27831], [27806, 27831, 27830], [27807, 27808, 27831], [27808, 27832, 27831], [27808, 27809, 27833], [27808, 27833, 27832], [27809, 27810, 27833], [27810, 27834, 27833], [27810, 27811, 27835], [27810, 27835, 27834], [27811, 27812, 27835], [27812, 27836, 27835], [27812, 27813, 27837], [27812, 27837, 27836], [27813, 27814, 27837], [27814, 27838, 27837], [27814, 27815, 27839], [27814, 27839, 27838], [27815, 27816, 27839], [27816, 27840, 27839], [27816, 27817, 27841], [27816, 27841, 27840], [27817, 27818, 27841], [27818, 27842, 27841], [27818, 27819, 27843], [27818, 27843, 27842], [12171, 27820, 27844], [12171, 27844, 12300], [27820, 27821, 27844], [27821, 27845, 27844], [27821, 27822, 27846], [27821, 27846, 27845], [27822, 27823, 27846], [27823, 27847, 27846], [27823, 27824, 27848], [27823, 27848, 27847], [27824, 27825, 27848], [27825, 27849, 27848], [27825, 27826, 27850], [27825, 27850, 27849], [27826, 27827, 27850], [27827, 27851, 27850], [27827, 27828, 27852], [27827, 27852, 27851], [27828, 27829, 27852], [27829, 27853, 27852], [27829, 27830, 27854], [27829, 27854, 27853], [27830, 27831, 27854], [27831, 27855, 27854], [27831, 27832, 27856], [27831, 27856, 27855], [27832, 27833, 27856], [27833, 27857, 27856], [27833, 27834, 27858], [27833, 27858, 27857], [27834, 27835, 27858], [27835, 27859, 27858], [27835, 27836, 27860], [27835, 27860, 27859], [27836, 27837, 27860], [27837, 27861, 27860], [27837, 27838, 27862], [27837, 27862, 27861], [27838, 27839, 27862], [27839, 27863, 27862], [27839, 27840, 27864], [27839, 27864, 27863], [27840, 27841, 27864], [27841, 27865, 27864], [27841, 27842, 27866], [27841, 27866, 27865], [27842, 27843, 27866], [27843, 27867, 27866], [12300, 27844, 12429], [27844, 27868, 12429], [27844, 27845, 27869], [27844, 27869, 27868], [27845, 27846, 27869], [27846, 27870, 27869], [27846, 27847, 27871], [27846, 27871, 27870], [27847, 27848, 27871], [27848, 27872, 27871], [27848, 27849, 27873], [27848, 27873, 27872], [27849, 27850, 27873], [27850, 27874, 27873], [27850, 27851, 27875], [27850, 27875, 27874], [27851, 27852, 27875], [27852, 27876, 27875], [27852, 27853, 27877], [27852, 27877, 27876], [27853, 27854, 27877], [27854, 27878, 27877], [27854, 27855, 27879], [27854, 27879, 27878], [27855, 27856, 27879], [27856, 27880, 27879], [27856, 27857, 27881], [27856, 27881, 27880], [27857, 27858, 27881], [27858, 27882, 27881], [27858, 27859, 27883], [27858, 27883, 27882], [27859, 27860, 27883], [27860, 27884, 27883], [27860, 27861, 27885], [27860, 27885, 27884], [27861, 27862, 27885], [27862, 27886, 27885], [27862, 27863, 27887], [27862, 27887, 27886], [27863, 27864, 27887], [27864, 27888, 27887], [27864, 27865, 27889], [27864, 27889, 27888], [27865, 27866, 27889], [27866, 27890, 27889], [27866, 27867, 27891], [27866, 27891, 27890], [12429, 27868, 27892], [12429, 27892, 12558], [27868, 27869, 27892], [27869, 27893, 27892], [27869, 27870, 27894], [27869, 27894, 27893], [27870, 27871, 27894], [27871, 27895, 27894], [27871, 27872, 27896], [27871, 27896, 27895], [27872, 27873, 27896], [27873, 27897, 27896], [27873, 27874, 27898], [27873, 27898, 27897], [27874, 27875, 27898], [27875, 27899, 27898], [27875, 27876, 27900], [27875, 27900, 27899], [27876, 27877, 27900], [27877, 27901, 27900], [27877, 27878, 27902], [27877, 27902, 27901], [27878, 27879, 27902], [27879, 27903, 27902], [27879, 27880, 27904], [27879, 27904, 27903], [27880, 27881, 27904], [27881, 27905, 27904], [27881, 27882, 27906], [27881, 27906, 27905], [27882, 27883, 27906], [27883, 27907, 27906], [27883, 27884, 27908], [27883, 27908, 27907], [27884, 27885, 27908], [27885, 27909, 27908], [27885, 27886, 27910], [27885, 27910, 27909], [27886, 27887, 27910], [27887, 27911, 27910], [27887, 27888, 27912], [27887, 27912, 27911], [27888, 27889, 27912], [27889, 27913, 27912], [27889, 27890, 27914], [27889, 27914, 27913], [27890, 27891, 27914], [27891, 27915, 27914], [12558, 27892, 12687], [27892, 27916, 12687], [27892, 27893, 27917], [27892, 27917, 27916], [27893, 27894, 27917], [27894, 27918, 27917], [27894, 27895, 27919], [27894, 27919, 27918], [27895, 27896, 27919], [27896, 27920, 27919], [27896, 27897, 27921], [27896, 27921, 27920], [27897, 27898, 27921], [27898, 27922, 27921], [27898, 27899, 27923], [27898, 27923, 27922], [27899, 27900, 27923], [27900, 27924, 27923], [27900, 27901, 27925], [27900, 27925, 27924], [27901, 27902, 27925], [27902, 27926, 27925], [27902, 27903, 27927], [27902, 27927, 27926], [27903, 27904, 27927], [27904, 27928, 27927], [27904, 27905, 27929], [27904, 27929, 27928], [27905, 27906, 27929], [27906, 27930, 27929], [27906, 27907, 27931], [27906, 27931, 27930], [27907, 27908, 27931], [27908, 27932, 27931], [27908, 27909, 27933], [27908, 27933, 27932], [27909, 27910, 27933], [27910, 27934, 27933], [27910, 27911, 27935], [27910, 27935, 27934], [27911, 27912, 27935], [27912, 27936, 27935], [27912, 27913, 27937], [27912, 27937, 27936], [27913, 27914, 27937], [27914, 27938, 27937], [27914, 27915, 27939], [27914, 27939, 27938], [12687, 27916, 27940], [12687, 27940, 12816], [27916, 27917, 27940], [27917, 27941, 27940], [27917, 27918, 27942], [27917, 27942, 27941], [27918, 27919, 27942], [27919, 27943, 27942], [27919, 27920, 27944], [27919, 27944, 27943], [27920, 27921, 27944], [27921, 27945, 27944], [27921, 27922, 27946], [27921, 27946, 27945], [27922, 27923, 27946], [27923, 27947, 27946], [27923, 27924, 27948], [27923, 27948, 27947], [27924, 27925, 27948], [27925, 27949, 27948], [27925, 27926, 27950], [27925, 27950, 27949], [27926, 27927, 27950], [27927, 27951, 27950], [27927, 27928, 27952], [27927, 27952, 27951], [27928, 27929, 27952], [27929, 27953, 27952], [27929, 27930, 27954], [27929, 27954, 27953], [27930, 27931, 27954], [27931, 27955, 27954], [27931, 27932, 27956], [27931, 27956, 27955], [27932, 27933, 27956], [27933, 27957, 27956], [27933, 27934, 27958], [27933, 27958, 27957], [27934, 27935, 27958], [27935, 27959, 27958], [27935, 27936, 27960], [27935, 27960, 27959], [27936, 27937, 27960], [27937, 27961, 27960], [27937, 27938, 27962], [27937, 27962, 27961], [27938, 27939, 27962], [27939, 27963, 27962], [12816, 27940, 12945], [27940, 27964, 12945], [27940, 27941, 27965], [27940, 27965, 27964], [27941, 27942, 27965], [27942, 27966, 27965], [27942, 27943, 27967], [27942, 27967, 27966], [27943, 27944, 27967], [27944, 27968, 27967], [27944, 27945, 27969], [27944, 27969, 27968], [27945, 27946, 27969], [27946, 27970, 27969], [27946, 27947, 27971], [27946, 27971, 27970], [27947, 27948, 27971], [27948, 27972, 27971], [27948, 27949, 27973], [27948, 27973, 27972], [27949, 27950, 27973], [27950, 27974, 27973], [27950, 27951, 27975], [27950, 27975, 27974], [27951, 27952, 27975], [27952, 27976, 27975], [27952, 27953, 27977], [27952, 27977, 27976], [27953, 27954, 27977], [27954, 27978, 27977], [27954, 27955, 27979], [27954, 27979, 27978], [27955, 27956, 27979], [27956, 27980, 27979], [27956, 27957, 27981], [27956, 27981, 27980], [27957, 27958, 27981], [27958, 27982, 27981], [27958, 27959, 27983], [27958, 27983, 27982], [27959, 27960, 27983], [27960, 27984, 27983], [27960, 27961, 27985], [27960, 27985, 27984], [27961, 27962, 27985], [27962, 27986, 27985], [27962, 27963, 27987], [27962, 27987, 27986], [12945, 27964, 27988], [12945, 27988, 13074], [27964, 27965, 27988], [27965, 27989, 27988], [27965, 27966, 27990], [27965, 27990, 27989], [27966, 27967, 27990], [27967, 27991, 27990], [27967, 27968, 27992], [27967, 27992, 27991], [27968, 27969, 27992], [27969, 27993, 27992], [27969, 27970, 27994], [27969, 27994, 27993], [27970, 27971, 27994], [27971, 27995, 27994], [27971, 27972, 27996], [27971, 27996, 27995], [27972, 27973, 27996], [27973, 27997, 27996], [27973, 27974, 27998], [27973, 27998, 27997], [27974, 27975, 27998], [27975, 27999, 27998], [27975, 27976, 28000], [27975, 28000, 27999], [27976, 27977, 28000], [27977, 28001, 28000], [27977, 27978, 28002], [27977, 28002, 28001], [27978, 27979, 28002], [27979, 28003, 28002], [27979, 27980, 28004], [27979, 28004, 28003], [27980, 27981, 28004], [27981, 28005, 28004], [27981, 27982, 28006], [27981, 28006, 28005], [27982, 27983, 28006], [27983, 28007, 28006], [27983, 27984, 28008], [27983, 28008, 28007], [27984, 27985, 28008], [27985, 28009, 28008], [27985, 27986, 28010], [27985, 28010, 28009], [27986, 27987, 28010], [27987, 28011, 28010], [13074, 27988, 13203], [27988, 28012, 13203], [27988, 27989, 28013], [27988, 28013, 28012], [27989, 27990, 28013], [27990, 28014, 28013], [27990, 27991, 28015], [27990, 28015, 28014], [27991, 27992, 28015], [27992, 28016, 28015], [27992, 27993, 28017], [27992, 28017, 28016], [27993, 27994, 28017], [27994, 28018, 28017], [27994, 27995, 28019], [27994, 28019, 28018], [27995, 27996, 28019], [27996, 28020, 28019], [27996, 27997, 28021], [27996, 28021, 28020], [27997, 27998, 28021], [27998, 28022, 28021], [27998, 27999, 28023], [27998, 28023, 28022], [27999, 28000, 28023], [28000, 28024, 28023], [28000, 28001, 28025], [28000, 28025, 28024], [28001, 28002, 28025], [28002, 28026, 28025], [28002, 28003, 28027], [28002, 28027, 28026], [28003, 28004, 28027], [28004, 28028, 28027], [28004, 28005, 28029], [28004, 28029, 28028], [28005, 28006, 28029], [28006, 28030, 28029], [28006, 28007, 28031], [28006, 28031, 28030], [28007, 28008, 28031], [28008, 28032, 28031], [28008, 28009, 28033], [28008, 28033, 28032], [28009, 28010, 28033], [28010, 28034, 28033], [28010, 28011, 28035], [28010, 28035, 28034], [13203, 28012, 28036], [13203, 28036, 13332], [28012, 28013, 28036], [28013, 28037, 28036], [28013, 28014, 28038], [28013, 28038, 28037], [28014, 28015, 28038], [28015, 28039, 28038], [28015, 28016, 28040], [28015, 28040, 28039], [28016, 28017, 28040], [28017, 28041, 28040], [28017, 28018, 28042], [28017, 28042, 28041], [28018, 28019, 28042], [28019, 28043, 28042], [28019, 28020, 28044], [28019, 28044, 28043], [28020, 28021, 28044], [28021, 28045, 28044], [28021, 28022, 28046], [28021, 28046, 28045], [28022, 28023, 28046], [28023, 28047, 28046], [28023, 28024, 28048], [28023, 28048, 28047], [28024, 28025, 28048], [28025, 28049, 28048], [28025, 28026, 28050], [28025, 28050, 28049], [28026, 28027, 28050], [28027, 28051, 28050], [28027, 28028, 28052], [28027, 28052, 28051], [28028, 28029, 28052], [28029, 28053, 28052], [28029, 28030, 28054], [28029, 28054, 28053], [28030, 28031, 28054], [28031, 28055, 28054], [28031, 28032, 28056], [28031, 28056, 28055], [28032, 28033, 28056], [28033, 28057, 28056], [28033, 28034, 28058], [28033, 28058, 28057], [28034, 28035, 28058], [28035, 28059, 28058], [13332, 28036, 13461], [28036, 28060, 13461], [28036, 28037, 28061], [28036, 28061, 28060], [28037, 28038, 28061], [28038, 28062, 28061], [28038, 28039, 28063], [28038, 28063, 28062], [28039, 28040, 28063], [28040, 28064, 28063], [28040, 28041, 28065], [28040, 28065, 28064], [28041, 28042, 28065], [28042, 28066, 28065], [28042, 28043, 28067], [28042, 28067, 28066], [28043, 28044, 28067], [28044, 28068, 28067], [28044, 28045, 28069], [28044, 28069, 28068], [28045, 28046, 28069], [28046, 28070, 28069], [28046, 28047, 28071], [28046, 28071, 28070], [28047, 28048, 28071], [28048, 28072, 28071], [28048, 28049, 28073], [28048, 28073, 28072], [28049, 28050, 28073], [28050, 28074, 28073], [28050, 28051, 28075], [28050, 28075, 28074], [28051, 28052, 28075], [28052, 28076, 28075], [28052, 28053, 28077], [28052, 28077, 28076], [28053, 28054, 28077], [28054, 28078, 28077], [28054, 28055, 28079], [28054, 28079, 28078], [28055, 28056, 28079], [28056, 28080, 28079], [28056, 28057, 28081], [28056, 28081, 28080], [28057, 28058, 28081], [28058, 28082, 28081], [28058, 28059, 28083], [28058, 28083, 28082], [13461, 28060, 28084], [13461, 28084, 13590], [28060, 28061, 28084], [28061, 28085, 28084], [28061, 28062, 28086], [28061, 28086, 28085], [28062, 28063, 28086], [28063, 28087, 28086], [28063, 28064, 28088], [28063, 28088, 28087], [28064, 28065, 28088], [28065, 28089, 28088], [28065, 28066, 28090], [28065, 28090, 28089], [28066, 28067, 28090], [28067, 28091, 28090], [28067, 28068, 28092], [28067, 28092, 28091], [28068, 28069, 28092], [28069, 28093, 28092], [28069, 28070, 28094], [28069, 28094, 28093], [28070, 28071, 28094], [28071, 28095, 28094], [28071, 28072, 28096], [28071, 28096, 28095], [28072, 28073, 28096], [28073, 28097, 28096], [28073, 28074, 28098], [28073, 28098, 28097], [28074, 28075, 28098], [28075, 28099, 28098], [28075, 28076, 28100], [28075, 28100, 28099], [28076, 28077, 28100], [28077, 28101, 28100], [28077, 28078, 28102], [28077, 28102, 28101], [28078, 28079, 28102], [28079, 28103, 28102], [28079, 28080, 28104], [28079, 28104, 28103], [28080, 28081, 28104], [28081, 28105, 28104], [28081, 28082, 28106], [28081, 28106, 28105], [28082, 28083, 28106], [28083, 28107, 28106], [13590, 28084, 13719], [28084, 28108, 13719], [28084, 28085, 28109], [28084, 28109, 28108], [28085, 28086, 28109], [28086, 28110, 28109], [28086, 28087, 28111], [28086, 28111, 28110], [28087, 28088, 28111], [28088, 28112, 28111], [28088, 28089, 28113], [28088, 28113, 28112], [28089, 28090, 28113], [28090, 28114, 28113], [28090, 28091, 28115], [28090, 28115, 28114], [28091, 28092, 28115], [28092, 28116, 28115], [28092, 28093, 28117], [28092, 28117, 28116], [28093, 28094, 28117], [28094, 28118, 28117], [28094, 28095, 28119], [28094, 28119, 28118], [28095, 28096, 28119], [28096, 28120, 28119], [28096, 28097, 28121], [28096, 28121, 28120], [28097, 28098, 28121], [28098, 28122, 28121], [28098, 28099, 28123], [28098, 28123, 28122], [28099, 28100, 28123], [28100, 28124, 28123], [28100, 28101, 28125], [28100, 28125, 28124], [28101, 28102, 28125], [28102, 28126, 28125], [28102, 28103, 28127], [28102, 28127, 28126], [28103, 28104, 28127], [28104, 28128, 28127], [28104, 28105, 28129], [28104, 28129, 28128], [28105, 28106, 28129], [28106, 28130, 28129], [28106, 28107, 28131], [28106, 28131, 28130], [13719, 28108, 28132], [13719, 28132, 13848], [28108, 28109, 28132], [28109, 28133, 28132], [28109, 28110, 28134], [28109, 28134, 28133], [28110, 28111, 28134], [28111, 28135, 28134], [28111, 28112, 28136], [28111, 28136, 28135], [28112, 28113, 28136], [28113, 28137, 28136], [28113, 28114, 28138], [28113, 28138, 28137], [28114, 28115, 28138], [28115, 28139, 28138], [28115, 28116, 28140], [28115, 28140, 28139], [28116, 28117, 28140], [28117, 28141, 28140], [28117, 28118, 28142], [28117, 28142, 28141], [28118, 28119, 28142], [28119, 28143, 28142], [28119, 28120, 28144], [28119, 28144, 28143], [28120, 28121, 28144], [28121, 28145, 28144], [28121, 28122, 28146], [28121, 28146, 28145], [28122, 28123, 28146], [28123, 28147, 28146], [28123, 28124, 28148], [28123, 28148, 28147], [28124, 28125, 28148], [28125, 28149, 28148], [28125, 28126, 28150], [28125, 28150, 28149], [28126, 28127, 28150], [28127, 28151, 28150], [28127, 28128, 28152], [28127, 28152, 28151], [28128, 28129, 28152], [28129, 28153, 28152], [28129, 28130, 28154], [28129, 28154, 28153], [28130, 28131, 28154], [28131, 28155, 28154], [13848, 28132, 13977], [28132, 28156, 13977], [28132, 28133, 28157], [28132, 28157, 28156], [28133, 28134, 28157], [28134, 28158, 28157], [28134, 28135, 28159], [28134, 28159, 28158], [28135, 28136, 28159], [28136, 28160, 28159], [28136, 28137, 28161], [28136, 28161, 28160], [28137, 28138, 28161], [28138, 28162, 28161], [28138, 28139, 28163], [28138, 28163, 28162], [28139, 28140, 28163], [28140, 28164, 28163], [28140, 28141, 28165], [28140, 28165, 28164], [28141, 28142, 28165], [28142, 28166, 28165], [28142, 28143, 28167], [28142, 28167, 28166], [28143, 28144, 28167], [28144, 28168, 28167], [28144, 28145, 28169], [28144, 28169, 28168], [28145, 28146, 28169], [28146, 28170, 28169], [28146, 28147, 28171], [28146, 28171, 28170], [28147, 28148, 28171], [28148, 28172, 28171], [28148, 28149, 28173], [28148, 28173, 28172], [28149, 28150, 28173], [28150, 28174, 28173], [28150, 28151, 28175], [28150, 28175, 28174], [28151, 28152, 28175], [28152, 28176, 28175], [28152, 28153, 28177], [28152, 28177, 28176], [28153, 28154, 28177], [28154, 28178, 28177], [28154, 28155, 28179], [28154, 28179, 28178], [13977, 28156, 28180], [13977, 28180, 14106], [28156, 28157, 28180], [28157, 28181, 28180], [28157, 28158, 28182], [28157, 28182, 28181], [28158, 28159, 28182], [28159, 28183, 28182], [28159, 28160, 28184], [28159, 28184, 28183], [28160, 28161, 28184], [28161, 28185, 28184], [28161, 28162, 28186], [28161, 28186, 28185], [28162, 28163, 28186], [28163, 28187, 28186], [28163, 28164, 28188], [28163, 28188, 28187], [28164, 28165, 28188], [28165, 28189, 28188], [28165, 28166, 28190], [28165, 28190, 28189], [28166, 28167, 28190], [28167, 28191, 28190], [28167, 28168, 28192], [28167, 28192, 28191], [28168, 28169, 28192], [28169, 28193, 28192], [28169, 28170, 28194], [28169, 28194, 28193], [28170, 28171, 28194], [28171, 28195, 28194], [28171, 28172, 28196], [28171, 28196, 28195], [28172, 28173, 28196], [28173, 28197, 28196], [28173, 28174, 28198], [28173, 28198, 28197], [28174, 28175, 28198], [28175, 28199, 28198], [28175, 28176, 28200], [28175, 28200, 28199], [28176, 28177, 28200], [28177, 28201, 28200], [28177, 28178, 28202], [28177, 28202, 28201], [28178, 28179, 28202], [28179, 28203, 28202], [14106, 28180, 14235], [28180, 28204, 14235], [28180, 28181, 28205], [28180, 28205, 28204], [28181, 28182, 28205], [28182, 28206, 28205], [28182, 28183, 28207], [28182, 28207, 28206], [28183, 28184, 28207], [28184, 28208, 28207], [28184, 28185, 28209], [28184, 28209, 28208], [28185, 28186, 28209], [28186, 28210, 28209], [28186, 28187, 28211], [28186, 28211, 28210], [28187, 28188, 28211], [28188, 28212, 28211], [28188, 28189, 28213], [28188, 28213, 28212], [28189, 28190, 28213], [28190, 28214, 28213], [28190, 28191, 28215], [28190, 28215, 28214], [28191, 28192, 28215], [28192, 28216, 28215], [28192, 28193, 28217], [28192, 28217, 28216], [28193, 28194, 28217], [28194, 28218, 28217], [28194, 28195, 28219], [28194, 28219, 28218], [28195, 28196, 28219], [28196, 28220, 28219], [28196, 28197, 28221], [28196, 28221, 28220], [28197, 28198, 28221], [28198, 28222, 28221], [28198, 28199, 28223], [28198, 28223, 28222], [28199, 28200, 28223], [28200, 28224, 28223], [28200, 28201, 28225], [28200, 28225, 28224], [28201, 28202, 28225], [28202, 28226, 28225], [28202, 28203, 28227], [28202, 28227, 28226], [14235, 28204, 28228], [14235, 28228, 14364], [28204, 28205, 28228], [28205, 28229, 28228], [28205, 28206, 28230], [28205, 28230, 28229], [28206, 28207, 28230], [28207, 28231, 28230], [28207, 28208, 28232], [28207, 28232, 28231], [28208, 28209, 28232], [28209, 28233, 28232], [28209, 28210, 28234], [28209, 28234, 28233], [28210, 28211, 28234], [28211, 28235, 28234], [28211, 28212, 28236], [28211, 28236, 28235], [28212, 28213, 28236], [28213, 28237, 28236], [28213, 28214, 28238], [28213, 28238, 28237], [28214, 28215, 28238], [28215, 28239, 28238], [28215, 28216, 28240], [28215, 28240, 28239], [28216, 28217, 28240], [28217, 28241, 28240], [28217, 28218, 28242], [28217, 28242, 28241], [28218, 28219, 28242], [28219, 28243, 28242], [28219, 28220, 28244], [28219, 28244, 28243], [28220, 28221, 28244], [28221, 28245, 28244], [28221, 28222, 28246], [28221, 28246, 28245], [28222, 28223, 28246], [28223, 28247, 28246], [28223, 28224, 28248], [28223, 28248, 28247], [28224, 28225, 28248], [28225, 28249, 28248], [28225, 28226, 28250], [28225, 28250, 28249], [28226, 28227, 28250], [28227, 28251, 28250], [14364, 28228, 14493], [28228, 28252, 14493], [28228, 28229, 28253], [28228, 28253, 28252], [28229, 28230, 28253], [28230, 28254, 28253], [28230, 28231, 28255], [28230, 28255, 28254], [28231, 28232, 28255], [28232, 28256, 28255], [28232, 28233, 28257], [28232, 28257, 28256], [28233, 28234, 28257], [28234, 28258, 28257], [28234, 28235, 28259], [28234, 28259, 28258], [28235, 28236, 28259], [28236, 28260, 28259], [28236, 28237, 28261], [28236, 28261, 28260], [28237, 28238, 28261], [28238, 28262, 28261], [28238, 28239, 28263], [28238, 28263, 28262], [28239, 28240, 28263], [28240, 28264, 28263], [28240, 28241, 28265], [28240, 28265, 28264], [28241, 28242, 28265], [28242, 28266, 28265], [28242, 28243, 28267], [28242, 28267, 28266], [28243, 28244, 28267], [28244, 28268, 28267], [28244, 28245, 28269], [28244, 28269, 28268], [28245, 28246, 28269], [28246, 28270, 28269], [28246, 28247, 28271], [28246, 28271, 28270], [28247, 28248, 28271], [28248, 28272, 28271], [28248, 28249, 28273], [28248, 28273, 28272], [28249, 28250, 28273], [28250, 28274, 28273], [28250, 28251, 28275], [28250, 28275, 28274], [14493, 28252, 28276], [14493, 28276, 14622], [28252, 28253, 28276], [28253, 28277, 28276], [28253, 28254, 28278], [28253, 28278, 28277], [28254, 28255, 28278], [28255, 28279, 28278], [28255, 28256, 28280], [28255, 28280, 28279], [28256, 28257, 28280], [28257, 28281, 28280], [28257, 28258, 28282], [28257, 28282, 28281], [28258, 28259, 28282], [28259, 28283, 28282], [28259, 28260, 28284], [28259, 28284, 28283], [28260, 28261, 28284], [28261, 28285, 28284], [28261, 28262, 28286], [28261, 28286, 28285], [28262, 28263, 28286], [28263, 28287, 28286], [28263, 28264, 28288], [28263, 28288, 28287], [28264, 28265, 28288], [28265, 28289, 28288], [28265, 28266, 28290], [28265, 28290, 28289], [28266, 28267, 28290], [28267, 28291, 28290], [28267, 28268, 28292], [28267, 28292, 28291], [28268, 28269, 28292], [28269, 28293, 28292], [28269, 28270, 28294], [28269, 28294, 28293], [28270, 28271, 28294], [28271, 28295, 28294], [28271, 28272, 28296], [28271, 28296, 28295], [28272, 28273, 28296], [28273, 28297, 28296], [28273, 28274, 28298], [28273, 28298, 28297], [28274, 28275, 28298], [28275, 28299, 28298], [14622, 28276, 14751], [28276, 28300, 14751], [28276, 28277, 28301], [28276, 28301, 28300], [28277, 28278, 28301], [28278, 28302, 28301], [28278, 28279, 28303], [28278, 28303, 28302], [28279, 28280, 28303], [28280, 28304, 28303], [28280, 28281, 28305], [28280, 28305, 28304], [28281, 28282, 28305], [28282, 28306, 28305], [28282, 28283, 28307], [28282, 28307, 28306], [28283, 28284, 28307], [28284, 28308, 28307], [28284, 28285, 28309], [28284, 28309, 28308], [28285, 28286, 28309], [28286, 28310, 28309], [28286, 28287, 28311], [28286, 28311, 28310], [28287, 28288, 28311], [28288, 28312, 28311], [28288, 28289, 28313], [28288, 28313, 28312], [28289, 28290, 28313], [28290, 28314, 28313], [28290, 28291, 28315], [28290, 28315, 28314], [28291, 28292, 28315], [28292, 28316, 28315], [28292, 28293, 28317], [28292, 28317, 28316], [28293, 28294, 28317], [28294, 28318, 28317], [28294, 28295, 28319], [28294, 28319, 28318], [28295, 28296, 28319], [28296, 28320, 28319], [28296, 28297, 28321], [28296, 28321, 28320], [28297, 28298, 28321], [28298, 28322, 28321], [28298, 28299, 28323], [28298, 28323, 28322], [14751, 28300, 28324], [14751, 28324, 14880], [28300, 28301, 28324], [28301, 28325, 28324], [28301, 28302, 28326], [28301, 28326, 28325], [28302, 28303, 28326], [28303, 28327, 28326], [28303, 28304, 28328], [28303, 28328, 28327], [28304, 28305, 28328], [28305, 28329, 28328], [28305, 28306, 28330], [28305, 28330, 28329], [28306, 28307, 28330], [28307, 28331, 28330], [28307, 28308, 28332], [28307, 28332, 28331], [28308, 28309, 28332], [28309, 28333, 28332], [28309, 28310, 28334], [28309, 28334, 28333], [28310, 28311, 28334], [28311, 28335, 28334], [28311, 28312, 28336], [28311, 28336, 28335], [28312, 28313, 28336], [28313, 28337, 28336], [28313, 28314, 28338], [28313, 28338, 28337], [28314, 28315, 28338], [28315, 28339, 28338], [28315, 28316, 28340], [28315, 28340, 28339], [28316, 28317, 28340], [28317, 28341, 28340], [28317, 28318, 28342], [28317, 28342, 28341], [28318, 28319, 28342], [28319, 28343, 28342], [28319, 28320, 28344], [28319, 28344, 28343], [28320, 28321, 28344], [28321, 28345, 28344], [28321, 28322, 28346], [28321, 28346, 28345], [28322, 28323, 28346], [28323, 28347, 28346], [14880, 28324, 15009], [28324, 28348, 15009], [28324, 28325, 28349], [28324, 28349, 28348], [28325, 28326, 28349], [28326, 28350, 28349], [28326, 28327, 28351], [28326, 28351, 28350], [28327, 28328, 28351], [28328, 28352, 28351], [28328, 28329, 28353], [28328, 28353, 28352], [28329, 28330, 28353], [28330, 28354, 28353], [28330, 28331, 28355], [28330, 28355, 28354], [28331, 28332, 28355], [28332, 28356, 28355], [28332, 28333, 28357], [28332, 28357, 28356], [28333, 28334, 28357], [28334, 28358, 28357], [28334, 28335, 28359], [28334, 28359, 28358], [28335, 28336, 28359], [28336, 28360, 28359], [28336, 28337, 28361], [28336, 28361, 28360], [28337, 28338, 28361], [28338, 28362, 28361], [28338, 28339, 28363], [28338, 28363, 28362], [28339, 28340, 28363], [28340, 28364, 28363], [28340, 28341, 28365], [28340, 28365, 28364], [28341, 28342, 28365], [28342, 28366, 28365], [28342, 28343, 28367], [28342, 28367, 28366], [28343, 28344, 28367], [28344, 28368, 28367], [28344, 28345, 28369], [28344, 28369, 28368], [28345, 28346, 28369], [28346, 28370, 28369], [28346, 28347, 28371], [28346, 28371, 28370], [15009, 28348, 28372], [15009, 28372, 15138], [28348, 28349, 28372], [28349, 28373, 28372], [28349, 28350, 28374], [28349, 28374, 28373], [28350, 28351, 28374], [28351, 28375, 28374], [28351, 28352, 28376], [28351, 28376, 28375], [28352, 28353, 28376], [28353, 28377, 28376], [28353, 28354, 28378], [28353, 28378, 28377], [28354, 28355, 28378], [28355, 28379, 28378], [28355, 28356, 28380], [28355, 28380, 28379], [28356, 28357, 28380], [28357, 28381, 28380], [28357, 28358, 28382], [28357, 28382, 28381], [28358, 28359, 28382], [28359, 28383, 28382], [28359, 28360, 28384], [28359, 28384, 28383], [28360, 28361, 28384], [28361, 28385, 28384], [28361, 28362, 28386], [28361, 28386, 28385], [28362, 28363, 28386], [28363, 28387, 28386], [28363, 28364, 28388], [28363, 28388, 28387], [28364, 28365, 28388], [28365, 28389, 28388], [28365, 28366, 28390], [28365, 28390, 28389], [28366, 28367, 28390], [28367, 28391, 28390], [28367, 28368, 28392], [28367, 28392, 28391], [28368, 28369, 28392], [28369, 28393, 28392], [28369, 28370, 28394], [28369, 28394, 28393], [28370, 28371, 28394], [28371, 28395, 28394], [15138, 28372, 15267], [28372, 28396, 15267], [28372, 28373, 28397], [28372, 28397, 28396], [28373, 28374, 28397], [28374, 28398, 28397], [28374, 28375, 28399], [28374, 28399, 28398], [28375, 28376, 28399], [28376, 28400, 28399], [28376, 28377, 28401], [28376, 28401, 28400], [28377, 28378, 28401], [28378, 28402, 28401], [28378, 28379, 28403], [28378, 28403, 28402], [28379, 28380, 28403], [28380, 28404, 28403], [28380, 28381, 28405], [28380, 28405, 28404], [28381, 28382, 28405], [28382, 28406, 28405], [28382, 28383, 28407], [28382, 28407, 28406], [28383, 28384, 28407], [28384, 28408, 28407], [28384, 28385, 28409], [28384, 28409, 28408], [28385, 28386, 28409], [28386, 28410, 28409], [28386, 28387, 28411], [28386, 28411, 28410], [28387, 28388, 28411], [28388, 28412, 28411], [28388, 28389, 28413], [28388, 28413, 28412], [28389, 28390, 28413], [28390, 28414, 28413], [28390, 28391, 28415], [28390, 28415, 28414], [28391, 28392, 28415], [28392, 28416, 28415], [28392, 28393, 28417], [28392, 28417, 28416], [28393, 28394, 28417], [28394, 28418, 28417], [28394, 28395, 28419], [28394, 28419, 28418], [15267, 28396, 28420], [15267, 28420, 15396], [28396, 28397, 28420], [28397, 28421, 28420], [28397, 28398, 28422], [28397, 28422, 28421], [28398, 28399, 28422], [28399, 28423, 28422], [28399, 28400, 28424], [28399, 28424, 28423], [28400, 28401, 28424], [28401, 28425, 28424], [28401, 28402, 28426], [28401, 28426, 28425], [28402, 28403, 28426], [28403, 28427, 28426], [28403, 28404, 28428], [28403, 28428, 28427], [28404, 28405, 28428], [28405, 28429, 28428], [28405, 28406, 28430], [28405, 28430, 28429], [28406, 28407, 28430], [28407, 28431, 28430], [28407, 28408, 28432], [28407, 28432, 28431], [28408, 28409, 28432], [28409, 28433, 28432], [28409, 28410, 28434], [28409, 28434, 28433], [28410, 28411, 28434], [28411, 28435, 28434], [28411, 28412, 28436], [28411, 28436, 28435], [28412, 28413, 28436], [28413, 28437, 28436], [28413, 28414, 28438], [28413, 28438, 28437], [28414, 28415, 28438], [28415, 28439, 28438], [28415, 28416, 28440], [28415, 28440, 28439], [28416, 28417, 28440], [28417, 28441, 28440], [28417, 28418, 28442], [28417, 28442, 28441], [28418, 28419, 28442], [28419, 28443, 28442], [15396, 28420, 15525], [28420, 28444, 15525], [28420, 28421, 28445], [28420, 28445, 28444], [28421, 28422, 28445], [28422, 28446, 28445], [28422, 28423, 28447], [28422, 28447, 28446], [28423, 28424, 28447], [28424, 28448, 28447], [28424, 28425, 28449], [28424, 28449, 28448], [28425, 28426, 28449], [28426, 28450, 28449], [28426, 28427, 28451], [28426, 28451, 28450], [28427, 28428, 28451], [28428, 28452, 28451], [28428, 28429, 28453], [28428, 28453, 28452], [28429, 28430, 28453], [28430, 28454, 28453], [28430, 28431, 28455], [28430, 28455, 28454], [28431, 28432, 28455], [28432, 28456, 28455], [28432, 28433, 28457], [28432, 28457, 28456], [28433, 28434, 28457], [28434, 28458, 28457], [28434, 28435, 28459], [28434, 28459, 28458], [28435, 28436, 28459], [28436, 28460, 28459], [28436, 28437, 28461], [28436, 28461, 28460], [28437, 28438, 28461], [28438, 28462, 28461], [28438, 28439, 28463], [28438, 28463, 28462], [28439, 28440, 28463], [28440, 28464, 28463], [28440, 28441, 28465], [28440, 28465, 28464], [28441, 28442, 28465], [28442, 28466, 28465], [28442, 28443, 28467], [28442, 28467, 28466], [15525, 28444, 28468], [15525, 28468, 15654], [28444, 28445, 28468], [28445, 28469, 28468], [28445, 28446, 28470], [28445, 28470, 28469], [28446, 28447, 28470], [28447, 28471, 28470], [28447, 28448, 28472], [28447, 28472, 28471], [28448, 28449, 28472], [28449, 28473, 28472], [28449, 28450, 28474], [28449, 28474, 28473], [28450, 28451, 28474], [28451, 28475, 28474], [28451, 28452, 28476], [28451, 28476, 28475], [28452, 28453, 28476], [28453, 28477, 28476], [28453, 28454, 28478], [28453, 28478, 28477], [28454, 28455, 28478], [28455, 28479, 28478], [28455, 28456, 28480], [28455, 28480, 28479], [28456, 28457, 28480], [28457, 28481, 28480], [28457, 28458, 28482], [28457, 28482, 28481], [28458, 28459, 28482], [28459, 28483, 28482], [28459, 28460, 28484], [28459, 28484, 28483], [28460, 28461, 28484], [28461, 28485, 28484], [28461, 28462, 28486], [28461, 28486, 28485], [28462, 28463, 28486], [28463, 28487, 28486], [28463, 28464, 28488], [28463, 28488, 28487], [28464, 28465, 28488], [28465, 28489, 28488], [28465, 28466, 28490], [28465, 28490, 28489], [28466, 28467, 28490], [28467, 28491, 28490], [15654, 28468, 15783], [28468, 28492, 15783], [28468, 28469, 28493], [28468, 28493, 28492], [28469, 28470, 28493], [28470, 28494, 28493], [28470, 28471, 28495], [28470, 28495, 28494], [28471, 28472, 28495], [28472, 28496, 28495], [28472, 28473, 28497], [28472, 28497, 28496], [28473, 28474, 28497], [28474, 28498, 28497], [28474, 28475, 28499], [28474, 28499, 28498], [28475, 28476, 28499], [28476, 28500, 28499], [28476, 28477, 28501], [28476, 28501, 28500], [28477, 28478, 28501], [28478, 28502, 28501], [28478, 28479, 28503], [28478, 28503, 28502], [28479, 28480, 28503], [28480, 28504, 28503], [28480, 28481, 28505], [28480, 28505, 28504], [28481, 28482, 28505], [28482, 28506, 28505], [28482, 28483, 28507], [28482, 28507, 28506], [28483, 28484, 28507], [28484, 28508, 28507], [28484, 28485, 28509], [28484, 28509, 28508], [28485, 28486, 28509], [28486, 28510, 28509], [28486, 28487, 28511], [28486, 28511, 28510], [28487, 28488, 28511], [28488, 28512, 28511], [28488, 28489, 28513], [28488, 28513, 28512], [28489, 28490, 28513], [28490, 28514, 28513], [28490, 28491, 28515], [28490, 28515, 28514], [15783, 28492, 28516], [15783, 28516, 15912], [28492, 28493, 28516], [28493, 28517, 28516], [28493, 28494, 28518], [28493, 28518, 28517], [28494, 28495, 28518], [28495, 28519, 28518], [28495, 28496, 28520], [28495, 28520, 28519], [28496, 28497, 28520], [28497, 28521, 28520], [28497, 28498, 28522], [28497, 28522, 28521], [28498, 28499, 28522], [28499, 28523, 28522], [28499, 28500, 28524], [28499, 28524, 28523], [28500, 28501, 28524], [28501, 28525, 28524], [28501, 28502, 28526], [28501, 28526, 28525], [28502, 28503, 28526], [28503, 28527, 28526], [28503, 28504, 28528], [28503, 28528, 28527], [28504, 28505, 28528], [28505, 28529, 28528], [28505, 28506, 28530], [28505, 28530, 28529], [28506, 28507, 28530], [28507, 28531, 28530], [28507, 28508, 28532], [28507, 28532, 28531], [28508, 28509, 28532], [28509, 28533, 28532], [28509, 28510, 28534], [28509, 28534, 28533], [28510, 28511, 28534], [28511, 28535, 28534], [28511, 28512, 28536], [28511, 28536, 28535], [28512, 28513, 28536], [28513, 28537, 28536], [28513, 28514, 28538], [28513, 28538, 28537], [28514, 28515, 28538], [28515, 28539, 28538], [15912, 28516, 16041], [28516, 28540, 16041], [28516, 28517, 28541], [28516, 28541, 28540], [28517, 28518, 28541], [28518, 28542, 28541], [28518, 28519, 28543], [28518, 28543, 28542], [28519, 28520, 28543], [28520, 28544, 28543], [28520, 28521, 28545], [28520, 28545, 28544], [28521, 28522, 28545], [28522, 28546, 28545], [28522, 28523, 28547], [28522, 28547, 28546], [28523, 28524, 28547], [28524, 28548, 28547], [28524, 28525, 28549], [28524, 28549, 28548], [28525, 28526, 28549], [28526, 28550, 28549], [28526, 28527, 28551], [28526, 28551, 28550], [28527, 28528, 28551], [28528, 28552, 28551], [28528, 28529, 28553], [28528, 28553, 28552], [28529, 28530, 28553], [28530, 28554, 28553], [28530, 28531, 28555], [28530, 28555, 28554], [28531, 28532, 28555], [28532, 28556, 28555], [28532, 28533, 28557], [28532, 28557, 28556], [28533, 28534, 28557], [28534, 28558, 28557], [28534, 28535, 28559], [28534, 28559, 28558], [28535, 28536, 28559], [28536, 28560, 28559], [28536, 28537, 28561], [28536, 28561, 28560], [28537, 28538, 28561], [28538, 28562, 28561], [28538, 28539, 28563], [28538, 28563, 28562], [16041, 28540, 28564], [16041, 28564, 16170], [28540, 28541, 28564], [28541, 28565, 28564], [28541, 28542, 28566], [28541, 28566, 28565], [28542, 28543, 28566], [28543, 28567, 28566], [28543, 28544, 28568], [28543, 28568, 28567], [28544, 28545, 28568], [28545, 28569, 28568], [28545, 28546, 28570], [28545, 28570, 28569], [28546, 28547, 28570], [28547, 28571, 28570], [28547, 28548, 28572], [28547, 28572, 28571], [28548, 28549, 28572], [28549, 28573, 28572], [28549, 28550, 28574], [28549, 28574, 28573], [28550, 28551, 28574], [28551, 28575, 28574], [28551, 28552, 28576], [28551, 28576, 28575], [28552, 28553, 28576], [28553, 28577, 28576], [28553, 28554, 28578], [28553, 28578, 28577], [28554, 28555, 28578], [28555, 28579, 28578], [28555, 28556, 28580], [28555, 28580, 28579], [28556, 28557, 28580], [28557, 28581, 28580], [28557, 28558, 28582], [28557, 28582, 28581], [28558, 28559, 28582], [28559, 28583, 28582], [28559, 28560, 28584], [28559, 28584, 28583], [28560, 28561, 28584], [28561, 28585, 28584], [28561, 28562, 28586], [28561, 28586, 28585], [28562, 28563, 28586], [28563, 28587, 28586], [16170, 28564, 19395], [28564, 19524, 19395], [28564, 28565, 19653], [28564, 19653, 19524], [28565, 28566, 19653], [28566, 19782, 19653], [28566, 28567, 19911], [28566, 19911, 19782], [28567, 28568, 19911], [28568, 20040, 19911], [28568, 28569, 20169], [28568, 20169, 20040], [28569, 28570, 20169], [28570, 20298, 20169], [28570, 28571, 20427], [28570, 20427, 20298], [28571, 28572, 20427], [28572, 20556, 20427], [28572, 28573, 20685], [28572, 20685, 20556], [28573, 28574, 20685], [28574, 20814, 20685], [28574, 28575, 20943], [28574, 20943, 20814], [28575, 28576, 20943], [28576, 21072, 20943], [28576, 28577, 21201], [28576, 21201, 21072], [28577, 28578, 21201], [28578, 21330, 21201], [28578, 28579, 21459], [28578, 21459, 21330], [28579, 28580, 21459], [28580, 21588, 21459], [28580, 28581, 21717], [28580, 21717, 21588], [28581, 28582, 21717], [28582, 21846, 21717], [28582, 28583, 21975], [28582, 21975, 21846], [28583, 28584, 21975], [28584, 22104, 21975], [28584, 28585, 22233], [28584, 22233, 22104], [28585, 28586, 22233], [28586, 22362, 22233], [28586, 28587, 22491], [28586, 22491, 22362]], + "textureMapping": { + "triangles": [[0, 1, 130], [0, 130, 129], [1, 2, 130], [2, 131, 130], [2, 3, 132], [2, 132, 131], [3, 4, 132], [4, 133, 132], [4, 5, 134], [4, 134, 133], [5, 6, 134], [6, 135, 134], [6, 7, 136], [6, 136, 135], [7, 8, 136], [8, 137, 136], [8, 9, 138], [8, 138, 137], [9, 10, 138], [10, 139, 138], [10, 11, 140], [10, 140, 139], [11, 12, 140], [12, 141, 140], [12, 13, 142], [12, 142, 141], [13, 14, 142], [14, 143, 142], [14, 15, 144], [14, 144, 143], [15, 16, 144], [16, 145, 144], [16, 17, 146], [16, 146, 145], [17, 18, 146], [18, 147, 146], [18, 19, 148], [18, 148, 147], [19, 20, 148], [20, 149, 148], [20, 21, 150], [20, 150, 149], [21, 22, 150], [22, 151, 150], [22, 23, 152], [22, 152, 151], [23, 24, 152], [24, 153, 152], [24, 25, 154], [24, 154, 153], [25, 26, 154], [26, 155, 154], [26, 27, 156], [26, 156, 155], [27, 28, 156], [28, 157, 156], [28, 29, 158], [28, 158, 157], [29, 30, 158], [30, 159, 158], [30, 31, 160], [30, 160, 159], [31, 32, 160], [32, 161, 160], [32, 33, 162], [32, 162, 161], [33, 34, 162], [34, 163, 162], [34, 35, 164], [34, 164, 163], [35, 36, 164], [36, 165, 164], [36, 37, 166], [36, 166, 165], [37, 38, 166], [38, 167, 166], [38, 39, 168], [38, 168, 167], [39, 40, 168], [40, 169, 168], [40, 41, 170], [40, 170, 169], [41, 42, 170], [42, 171, 170], [42, 43, 172], [42, 172, 171], [43, 44, 172], [44, 173, 172], [44, 45, 174], [44, 174, 173], [45, 46, 174], [46, 175, 174], [46, 47, 176], [46, 176, 175], [47, 48, 176], [48, 177, 176], [48, 49, 178], [48, 178, 177], [49, 50, 178], [50, 179, 178], [50, 51, 180], [50, 180, 179], [51, 52, 180], [52, 181, 180], [52, 53, 182], [52, 182, 181], [53, 54, 182], [54, 183, 182], [54, 55, 184], [54, 184, 183], [55, 56, 184], [56, 185, 184], [56, 57, 186], [56, 186, 185], [57, 58, 186], [58, 187, 186], [58, 59, 188], [58, 188, 187], [59, 60, 188], [60, 189, 188], [60, 61, 190], [60, 190, 189], [61, 62, 190], [62, 191, 190], [62, 63, 192], [62, 192, 191], [63, 64, 192], [64, 193, 192], [64, 65, 194], [64, 194, 193], [65, 66, 194], [66, 195, 194], [66, 67, 196], [66, 196, 195], [67, 68, 196], [68, 197, 196], [68, 69, 198], [68, 198, 197], [69, 70, 198], [70, 199, 198], [70, 71, 200], [70, 200, 199], [71, 72, 200], [72, 201, 200], [72, 73, 202], [72, 202, 201], [73, 74, 202], [74, 203, 202], [74, 75, 204], [74, 204, 203], [75, 76, 204], [76, 205, 204], [76, 77, 206], [76, 206, 205], [77, 78, 206], [78, 207, 206], [78, 79, 208], [78, 208, 207], [79, 80, 208], [80, 209, 208], [80, 81, 210], [80, 210, 209], [81, 82, 210], [82, 211, 210], [82, 83, 212], [82, 212, 211], [83, 84, 212], [84, 213, 212], [84, 85, 214], [84, 214, 213], [85, 86, 214], [86, 215, 214], [86, 87, 216], [86, 216, 215], [87, 88, 216], [88, 217, 216], [88, 89, 218], [88, 218, 217], [89, 90, 218], [90, 219, 218], [90, 91, 220], [90, 220, 219], [91, 92, 220], [92, 221, 220], [92, 93, 222], [92, 222, 221], [93, 94, 222], [94, 223, 222], [94, 95, 224], [94, 224, 223], [95, 96, 224], [96, 225, 224], [96, 97, 226], [96, 226, 225], [97, 98, 226], [98, 227, 226], [98, 99, 228], [98, 228, 227], [99, 100, 228], [100, 229, 228], [100, 101, 230], [100, 230, 229], [101, 102, 230], [102, 231, 230], [102, 103, 232], [102, 232, 231], [103, 104, 232], [104, 233, 232], [104, 105, 234], [104, 234, 233], [105, 106, 234], [106, 235, 234], [106, 107, 236], [106, 236, 235], [107, 108, 236], [108, 237, 236], [108, 109, 238], [108, 238, 237], [109, 110, 238], [110, 239, 238], [110, 111, 240], [110, 240, 239], [111, 112, 240], [112, 241, 240], [112, 113, 242], [112, 242, 241], [113, 114, 242], [114, 243, 242], [114, 115, 244], [114, 244, 243], [115, 116, 244], [116, 245, 244], [116, 117, 246], [116, 246, 245], [117, 118, 246], [118, 247, 246], [118, 119, 248], [118, 248, 247], [119, 120, 248], [120, 249, 248], [120, 121, 250], [120, 250, 249], [121, 122, 250], [122, 251, 250], [122, 123, 252], [122, 252, 251], [123, 124, 252], [124, 253, 252], [124, 125, 254], [124, 254, 253], [125, 126, 254], [126, 255, 254], [126, 127, 256], [126, 256, 255], [127, 128, 256], [128, 257, 256], [129, 130, 258], [130, 259, 258], [130, 131, 260], [130, 260, 259], [131, 132, 260], [132, 261, 260], [132, 133, 262], [132, 262, 261], [133, 134, 262], [134, 263, 262], [134, 135, 264], [134, 264, 263], [135, 136, 264], [136, 265, 264], [136, 137, 266], [136, 266, 265], [137, 138, 266], [138, 267, 266], [138, 139, 268], [138, 268, 267], [139, 140, 268], [140, 269, 268], [140, 141, 270], [140, 270, 269], [141, 142, 270], [142, 271, 270], [142, 143, 272], [142, 272, 271], [143, 144, 272], [144, 273, 272], [144, 145, 274], [144, 274, 273], [145, 146, 274], [146, 275, 274], [146, 147, 276], [146, 276, 275], [147, 148, 276], [148, 277, 276], [148, 149, 278], [148, 278, 277], [149, 150, 278], [150, 279, 278], [150, 151, 280], [150, 280, 279], [151, 152, 280], [152, 281, 280], [152, 153, 282], [152, 282, 281], [153, 154, 282], [154, 283, 282], [154, 155, 284], [154, 284, 283], [155, 156, 284], [156, 285, 284], [156, 157, 286], [156, 286, 285], [157, 158, 286], [158, 287, 286], [158, 159, 288], [158, 288, 287], [159, 160, 288], [160, 289, 288], [160, 161, 290], [160, 290, 289], [161, 162, 290], [162, 291, 290], [162, 163, 292], [162, 292, 291], [163, 164, 292], [164, 293, 292], [164, 165, 294], [164, 294, 293], [165, 166, 294], [166, 295, 294], [166, 167, 296], [166, 296, 295], [167, 168, 296], [168, 297, 296], [168, 169, 298], [168, 298, 297], [169, 170, 298], [170, 299, 298], [170, 171, 300], [170, 300, 299], [171, 172, 300], [172, 301, 300], [172, 173, 302], [172, 302, 301], [173, 174, 302], [174, 303, 302], [174, 175, 304], [174, 304, 303], [175, 176, 304], [176, 305, 304], [176, 177, 306], [176, 306, 305], [177, 178, 306], [178, 307, 306], [178, 179, 308], [178, 308, 307], [179, 180, 308], [180, 309, 308], [180, 181, 310], [180, 310, 309], [181, 182, 310], [182, 311, 310], [182, 183, 312], [182, 312, 311], [183, 184, 312], [184, 313, 312], [184, 185, 314], [184, 314, 313], [185, 186, 314], [186, 315, 314], [186, 187, 316], [186, 316, 315], [187, 188, 316], [188, 317, 316], [188, 189, 318], [188, 318, 317], [189, 190, 318], [190, 319, 318], [190, 191, 320], [190, 320, 319], [191, 192, 320], [192, 321, 320], [192, 193, 322], [192, 322, 321], [193, 194, 322], [194, 323, 322], [194, 195, 324], [194, 324, 323], [195, 196, 324], [196, 325, 324], [196, 197, 326], [196, 326, 325], [197, 198, 326], [198, 327, 326], [198, 199, 328], [198, 328, 327], [199, 200, 328], [200, 329, 328], [200, 201, 330], [200, 330, 329], [201, 202, 330], [202, 331, 330], [202, 203, 332], [202, 332, 331], [203, 204, 332], [204, 333, 332], [204, 205, 334], [204, 334, 333], [205, 206, 334], [206, 335, 334], [206, 207, 336], [206, 336, 335], [207, 208, 336], [208, 337, 336], [208, 209, 338], [208, 338, 337], [209, 210, 338], [210, 339, 338], [210, 211, 340], [210, 340, 339], [211, 212, 340], [212, 341, 340], [212, 213, 342], [212, 342, 341], [213, 214, 342], [214, 343, 342], [214, 215, 344], [214, 344, 343], [215, 216, 344], [216, 345, 344], [216, 217, 346], [216, 346, 345], [217, 218, 346], [218, 347, 346], [218, 219, 348], [218, 348, 347], [219, 220, 348], [220, 349, 348], [220, 221, 350], [220, 350, 349], [221, 222, 350], [222, 351, 350], [222, 223, 352], [222, 352, 351], [223, 224, 352], [224, 353, 352], [224, 225, 354], [224, 354, 353], [225, 226, 354], [226, 355, 354], [226, 227, 356], [226, 356, 355], [227, 228, 356], [228, 357, 356], [228, 229, 358], [228, 358, 357], [229, 230, 358], [230, 359, 358], [230, 231, 360], [230, 360, 359], [231, 232, 360], [232, 361, 360], [232, 233, 362], [232, 362, 361], [233, 234, 362], [234, 363, 362], [234, 235, 364], [234, 364, 363], [235, 236, 364], [236, 365, 364], [236, 237, 366], [236, 366, 365], [237, 238, 366], [238, 367, 366], [238, 239, 368], [238, 368, 367], [239, 240, 368], [240, 369, 368], [240, 241, 370], [240, 370, 369], [241, 242, 370], [242, 371, 370], [242, 243, 372], [242, 372, 371], [243, 244, 372], [244, 373, 372], [244, 245, 374], [244, 374, 373], [245, 246, 374], [246, 375, 374], [246, 247, 376], [246, 376, 375], [247, 248, 376], [248, 377, 376], [248, 249, 378], [248, 378, 377], [249, 250, 378], [250, 379, 378], [250, 251, 380], [250, 380, 379], [251, 252, 380], [252, 381, 380], [252, 253, 382], [252, 382, 381], [253, 254, 382], [254, 383, 382], [254, 255, 384], [254, 384, 383], [255, 256, 384], [256, 385, 384], [256, 257, 386], [256, 386, 385], [258, 259, 388], [258, 388, 387], [259, 260, 388], [260, 389, 388], [260, 261, 390], [260, 390, 389], [261, 262, 390], [262, 391, 390], [262, 263, 392], [262, 392, 391], [263, 264, 392], [264, 393, 392], [264, 265, 394], [264, 394, 393], [265, 266, 394], [266, 395, 394], [266, 267, 396], [266, 396, 395], [267, 268, 396], [268, 397, 396], [268, 269, 398], [268, 398, 397], [269, 270, 398], [270, 399, 398], [270, 271, 400], [270, 400, 399], [271, 272, 400], [272, 401, 400], [272, 273, 402], [272, 402, 401], [273, 274, 402], [274, 403, 402], [274, 275, 404], [274, 404, 403], [275, 276, 404], [276, 405, 404], [276, 277, 406], [276, 406, 405], [277, 278, 406], [278, 407, 406], [278, 279, 408], [278, 408, 407], [279, 280, 408], [280, 409, 408], [280, 281, 410], [280, 410, 409], [281, 282, 410], [282, 411, 410], [282, 283, 412], [282, 412, 411], [283, 284, 412], [284, 413, 412], [284, 285, 414], [284, 414, 413], [285, 286, 414], [286, 415, 414], [286, 287, 416], [286, 416, 415], [287, 288, 416], [288, 417, 416], [288, 289, 418], [288, 418, 417], [289, 290, 418], [290, 419, 418], [290, 291, 420], [290, 420, 419], [291, 292, 420], [292, 421, 420], [292, 293, 422], [292, 422, 421], [293, 294, 422], [294, 423, 422], [294, 295, 424], [294, 424, 423], [295, 296, 424], [296, 425, 424], [296, 297, 426], [296, 426, 425], [297, 298, 426], [298, 427, 426], [298, 299, 428], [298, 428, 427], [299, 300, 428], [300, 429, 428], [300, 301, 430], [300, 430, 429], [301, 302, 430], [302, 431, 430], [302, 303, 432], [302, 432, 431], [303, 304, 432], [304, 433, 432], [304, 305, 434], [304, 434, 433], [305, 306, 434], [306, 435, 434], [306, 307, 436], [306, 436, 435], [307, 308, 436], [308, 437, 436], [308, 309, 438], [308, 438, 437], [309, 310, 438], [310, 439, 438], [310, 311, 440], [310, 440, 439], [311, 312, 440], [312, 441, 440], [312, 313, 442], [312, 442, 441], [313, 314, 442], [314, 443, 442], [314, 315, 444], [314, 444, 443], [315, 316, 444], [316, 445, 444], [316, 317, 446], [316, 446, 445], [317, 318, 446], [318, 447, 446], [318, 319, 448], [318, 448, 447], [319, 320, 448], [320, 449, 448], [320, 321, 450], [320, 450, 449], [321, 322, 450], [322, 451, 450], [322, 323, 452], [322, 452, 451], [323, 324, 452], [324, 453, 452], [324, 325, 454], [324, 454, 453], [325, 326, 454], [326, 455, 454], [326, 327, 456], [326, 456, 455], [327, 328, 456], [328, 457, 456], [328, 329, 458], [328, 458, 457], [329, 330, 458], [330, 459, 458], [330, 331, 460], [330, 460, 459], [331, 332, 460], [332, 461, 460], [332, 333, 462], [332, 462, 461], [333, 334, 462], [334, 463, 462], [334, 335, 464], [334, 464, 463], [335, 336, 464], [336, 465, 464], [336, 337, 466], [336, 466, 465], [337, 338, 466], [338, 467, 466], [338, 339, 468], [338, 468, 467], [339, 340, 468], [340, 469, 468], [340, 341, 470], [340, 470, 469], [341, 342, 470], [342, 471, 470], [342, 343, 472], [342, 472, 471], [343, 344, 472], [344, 473, 472], [344, 345, 474], [344, 474, 473], [345, 346, 474], [346, 475, 474], [346, 347, 476], [346, 476, 475], [347, 348, 476], [348, 477, 476], [348, 349, 478], [348, 478, 477], [349, 350, 478], [350, 479, 478], [350, 351, 480], [350, 480, 479], [351, 352, 480], [352, 481, 480], [352, 353, 482], [352, 482, 481], [353, 354, 482], [354, 483, 482], [354, 355, 484], [354, 484, 483], [355, 356, 484], [356, 485, 484], [356, 357, 486], [356, 486, 485], [357, 358, 486], [358, 487, 486], [358, 359, 488], [358, 488, 487], [359, 360, 488], [360, 489, 488], [360, 361, 490], [360, 490, 489], [361, 362, 490], [362, 491, 490], [362, 363, 492], [362, 492, 491], [363, 364, 492], [364, 493, 492], [364, 365, 494], [364, 494, 493], [365, 366, 494], [366, 495, 494], [366, 367, 496], [366, 496, 495], [367, 368, 496], [368, 497, 496], [368, 369, 498], [368, 498, 497], [369, 370, 498], [370, 499, 498], [370, 371, 500], [370, 500, 499], [371, 372, 500], [372, 501, 500], [372, 373, 502], [372, 502, 501], [373, 374, 502], [374, 503, 502], [374, 375, 504], [374, 504, 503], [375, 376, 504], [376, 505, 504], [376, 377, 506], [376, 506, 505], [377, 378, 506], [378, 507, 506], [378, 379, 508], [378, 508, 507], [379, 380, 508], [380, 509, 508], [380, 381, 510], [380, 510, 509], [381, 382, 510], [382, 511, 510], [382, 383, 512], [382, 512, 511], [383, 384, 512], [384, 513, 512], [384, 385, 514], [384, 514, 513], [385, 386, 514], [386, 515, 514], [387, 388, 516], [388, 517, 516], [388, 389, 518], [388, 518, 517], [389, 390, 518], [390, 519, 518], [390, 391, 520], [390, 520, 519], [391, 392, 520], [392, 521, 520], [392, 393, 522], [392, 522, 521], [393, 394, 522], [394, 523, 522], [394, 395, 524], [394, 524, 523], [395, 396, 524], [396, 525, 524], [396, 397, 526], [396, 526, 525], [397, 398, 526], [398, 527, 526], [398, 399, 528], [398, 528, 527], [399, 400, 528], [400, 529, 528], [400, 401, 530], [400, 530, 529], [401, 402, 530], [402, 531, 530], [402, 403, 532], [402, 532, 531], [403, 404, 532], [404, 533, 532], [404, 405, 534], [404, 534, 533], [405, 406, 534], [406, 535, 534], [406, 407, 536], [406, 536, 535], [407, 408, 536], [408, 537, 536], [408, 409, 538], [408, 538, 537], [409, 410, 538], [410, 539, 538], [410, 411, 540], [410, 540, 539], [411, 412, 540], [412, 541, 540], [412, 413, 542], [412, 542, 541], [413, 414, 542], [414, 543, 542], [414, 415, 544], [414, 544, 543], [415, 416, 544], [416, 545, 544], [416, 417, 546], [416, 546, 545], [417, 418, 546], [418, 547, 546], [418, 419, 548], [418, 548, 547], [419, 420, 548], [420, 549, 548], [420, 421, 550], [420, 550, 549], [421, 422, 550], [422, 551, 550], [422, 423, 552], [422, 552, 551], [423, 424, 552], [424, 553, 552], [424, 425, 554], [424, 554, 553], [425, 426, 554], [426, 555, 554], [426, 427, 556], [426, 556, 555], [427, 428, 556], [428, 557, 556], [428, 429, 558], [428, 558, 557], [429, 430, 558], [430, 559, 558], [430, 431, 560], [430, 560, 559], [431, 432, 560], [432, 561, 560], [432, 433, 562], [432, 562, 561], [433, 434, 562], [434, 563, 562], [434, 435, 564], [434, 564, 563], [435, 436, 564], [436, 565, 564], [436, 437, 566], [436, 566, 565], [437, 438, 566], [438, 567, 566], [438, 439, 568], [438, 568, 567], [439, 440, 568], [440, 569, 568], [440, 441, 570], [440, 570, 569], [441, 442, 570], [442, 571, 570], [442, 443, 572], [442, 572, 571], [443, 444, 572], [444, 573, 572], [444, 445, 574], [444, 574, 573], [445, 446, 574], [446, 575, 574], [446, 447, 576], [446, 576, 575], [447, 448, 576], [448, 577, 576], [448, 449, 578], [448, 578, 577], [449, 450, 578], [450, 579, 578], [450, 451, 580], [450, 580, 579], [451, 452, 580], [452, 581, 580], [452, 453, 582], [452, 582, 581], [453, 454, 582], [454, 583, 582], [454, 455, 584], [454, 584, 583], [455, 456, 584], [456, 585, 584], [456, 457, 586], [456, 586, 585], [457, 458, 586], [458, 587, 586], [458, 459, 588], [458, 588, 587], [459, 460, 588], [460, 589, 588], [460, 461, 590], [460, 590, 589], [461, 462, 590], [462, 591, 590], [462, 463, 592], [462, 592, 591], [463, 464, 592], [464, 593, 592], [464, 465, 594], [464, 594, 593], [465, 466, 594], [466, 595, 594], [466, 467, 596], [466, 596, 595], [467, 468, 596], [468, 597, 596], [468, 469, 598], [468, 598, 597], [469, 470, 598], [470, 599, 598], [470, 471, 600], [470, 600, 599], [471, 472, 600], [472, 601, 600], [472, 473, 602], [472, 602, 601], [473, 474, 602], [474, 603, 602], [474, 475, 604], [474, 604, 603], [475, 476, 604], [476, 605, 604], [476, 477, 606], [476, 606, 605], [477, 478, 606], [478, 607, 606], [478, 479, 608], [478, 608, 607], [479, 480, 608], [480, 609, 608], [480, 481, 610], [480, 610, 609], [481, 482, 610], [482, 611, 610], [482, 483, 612], [482, 612, 611], [483, 484, 612], [484, 613, 612], [484, 485, 614], [484, 614, 613], [485, 486, 614], [486, 615, 614], [486, 487, 616], [486, 616, 615], [487, 488, 616], [488, 617, 616], [488, 489, 618], [488, 618, 617], [489, 490, 618], [490, 619, 618], [490, 491, 620], [490, 620, 619], [491, 492, 620], [492, 621, 620], [492, 493, 622], [492, 622, 621], [493, 494, 622], [494, 623, 622], [494, 495, 624], [494, 624, 623], [495, 496, 624], [496, 625, 624], [496, 497, 626], [496, 626, 625], [497, 498, 626], [498, 627, 626], [498, 499, 628], [498, 628, 627], [499, 500, 628], [500, 629, 628], [500, 501, 630], [500, 630, 629], [501, 502, 630], [502, 631, 630], [502, 503, 632], [502, 632, 631], [503, 504, 632], [504, 633, 632], [504, 505, 634], [504, 634, 633], [505, 506, 634], [506, 635, 634], [506, 507, 636], [506, 636, 635], [507, 508, 636], [508, 637, 636], [508, 509, 638], [508, 638, 637], [509, 510, 638], [510, 639, 638], [510, 511, 640], [510, 640, 639], [511, 512, 640], [512, 641, 640], [512, 513, 642], [512, 642, 641], [513, 514, 642], [514, 643, 642], [514, 515, 644], [514, 644, 643], [516, 517, 646], [516, 646, 645], [517, 518, 646], [518, 647, 646], [518, 519, 648], [518, 648, 647], [519, 520, 648], [520, 649, 648], [520, 521, 650], [520, 650, 649], [521, 522, 650], [522, 651, 650], [522, 523, 652], [522, 652, 651], [523, 524, 652], [524, 653, 652], [524, 525, 654], [524, 654, 653], [525, 526, 654], [526, 655, 654], [526, 527, 656], [526, 656, 655], [527, 528, 656], [528, 657, 656], [528, 529, 658], [528, 658, 657], [529, 530, 658], [530, 659, 658], [530, 531, 660], [530, 660, 659], [531, 532, 660], [532, 661, 660], [532, 533, 662], [532, 662, 661], [533, 534, 662], [534, 663, 662], [534, 535, 664], [534, 664, 663], [535, 536, 664], [536, 665, 664], [536, 537, 666], [536, 666, 665], [537, 538, 666], [538, 667, 666], [538, 539, 668], [538, 668, 667], [539, 540, 668], [540, 669, 668], [540, 541, 670], [540, 670, 669], [541, 542, 670], [542, 671, 670], [542, 543, 672], [542, 672, 671], [543, 544, 672], [544, 673, 672], [544, 545, 674], [544, 674, 673], [545, 546, 674], [546, 675, 674], [546, 547, 676], [546, 676, 675], [547, 548, 676], [548, 677, 676], [548, 549, 678], [548, 678, 677], [549, 550, 678], [550, 679, 678], [550, 551, 680], [550, 680, 679], [551, 552, 680], [552, 681, 680], [552, 553, 682], [552, 682, 681], [553, 554, 682], [554, 683, 682], [554, 555, 684], [554, 684, 683], [555, 556, 684], [556, 685, 684], [556, 557, 686], [556, 686, 685], [557, 558, 686], [558, 687, 686], [558, 559, 688], [558, 688, 687], [559, 560, 688], [560, 689, 688], [560, 561, 690], [560, 690, 689], [561, 562, 690], [562, 691, 690], [562, 563, 692], [562, 692, 691], [563, 564, 692], [564, 693, 692], [564, 565, 694], [564, 694, 693], [565, 566, 694], [566, 695, 694], [566, 567, 696], [566, 696, 695], [567, 568, 696], [568, 697, 696], [568, 569, 698], [568, 698, 697], [569, 570, 698], [570, 699, 698], [570, 571, 700], [570, 700, 699], [571, 572, 700], [572, 701, 700], [572, 573, 702], [572, 702, 701], [573, 574, 702], [574, 703, 702], [574, 575, 704], [574, 704, 703], [575, 576, 704], [576, 705, 704], [576, 577, 706], [576, 706, 705], [577, 578, 706], [578, 707, 706], [578, 579, 708], [578, 708, 707], [579, 580, 708], [580, 709, 708], [580, 581, 710], [580, 710, 709], [581, 582, 710], [582, 711, 710], [582, 583, 712], [582, 712, 711], [583, 584, 712], [584, 713, 712], [584, 585, 714], [584, 714, 713], [585, 586, 714], [586, 715, 714], [586, 587, 716], [586, 716, 715], [587, 588, 716], [588, 717, 716], [588, 589, 718], [588, 718, 717], [589, 590, 718], [590, 719, 718], [590, 591, 720], [590, 720, 719], [591, 592, 720], [592, 721, 720], [592, 593, 722], [592, 722, 721], [593, 594, 722], [594, 723, 722], [594, 595, 724], [594, 724, 723], [595, 596, 724], [596, 725, 724], [596, 597, 726], [596, 726, 725], [597, 598, 726], [598, 727, 726], [598, 599, 728], [598, 728, 727], [599, 600, 728], [600, 729, 728], [600, 601, 730], [600, 730, 729], [601, 602, 730], [602, 731, 730], [602, 603, 732], [602, 732, 731], [603, 604, 732], [604, 733, 732], [604, 605, 734], [604, 734, 733], [605, 606, 734], [606, 735, 734], [606, 607, 736], [606, 736, 735], [607, 608, 736], [608, 737, 736], [608, 609, 738], [608, 738, 737], [609, 610, 738], [610, 739, 738], [610, 611, 740], [610, 740, 739], [611, 612, 740], [612, 741, 740], [612, 613, 742], [612, 742, 741], [613, 614, 742], [614, 743, 742], [614, 615, 744], [614, 744, 743], [615, 616, 744], [616, 745, 744], [616, 617, 746], [616, 746, 745], [617, 618, 746], [618, 747, 746], [618, 619, 748], [618, 748, 747], [619, 620, 748], [620, 749, 748], [620, 621, 750], [620, 750, 749], [621, 622, 750], [622, 751, 750], [622, 623, 752], [622, 752, 751], [623, 624, 752], [624, 753, 752], [624, 625, 754], [624, 754, 753], [625, 626, 754], [626, 755, 754], [626, 627, 756], [626, 756, 755], [627, 628, 756], [628, 757, 756], [628, 629, 758], [628, 758, 757], [629, 630, 758], [630, 759, 758], [630, 631, 760], [630, 760, 759], [631, 632, 760], [632, 761, 760], [632, 633, 762], [632, 762, 761], [633, 634, 762], [634, 763, 762], [634, 635, 764], [634, 764, 763], [635, 636, 764], [636, 765, 764], [636, 637, 766], [636, 766, 765], [637, 638, 766], [638, 767, 766], [638, 639, 768], [638, 768, 767], [639, 640, 768], [640, 769, 768], [640, 641, 770], [640, 770, 769], [641, 642, 770], [642, 771, 770], [642, 643, 772], [642, 772, 771], [643, 644, 772], [644, 773, 772], [645, 646, 774], [646, 775, 774], [646, 647, 776], [646, 776, 775], [647, 648, 776], [648, 777, 776], [648, 649, 778], [648, 778, 777], [649, 650, 778], [650, 779, 778], [650, 651, 780], [650, 780, 779], [651, 652, 780], [652, 781, 780], [652, 653, 782], [652, 782, 781], [653, 654, 782], [654, 783, 782], [654, 655, 784], [654, 784, 783], [655, 656, 784], [656, 785, 784], [656, 657, 786], [656, 786, 785], [657, 658, 786], [658, 787, 786], [658, 659, 788], [658, 788, 787], [659, 660, 788], [660, 789, 788], [660, 661, 790], [660, 790, 789], [661, 662, 790], [662, 791, 790], [662, 663, 792], [662, 792, 791], [663, 664, 792], [664, 793, 792], [664, 665, 794], [664, 794, 793], [665, 666, 794], [666, 795, 794], [666, 667, 796], [666, 796, 795], [667, 668, 796], [668, 797, 796], [668, 669, 798], [668, 798, 797], [669, 670, 798], [670, 799, 798], [670, 671, 800], [670, 800, 799], [671, 672, 800], [672, 801, 800], [672, 673, 802], [672, 802, 801], [673, 674, 802], [674, 803, 802], [674, 675, 804], [674, 804, 803], [675, 676, 804], [676, 805, 804], [676, 677, 806], [676, 806, 805], [677, 678, 806], [678, 807, 806], [678, 679, 808], [678, 808, 807], [679, 680, 808], [680, 809, 808], [680, 681, 810], [680, 810, 809], [681, 682, 810], [682, 811, 810], [682, 683, 812], [682, 812, 811], [683, 684, 812], [684, 813, 812], [684, 685, 814], [684, 814, 813], [685, 686, 814], [686, 815, 814], [686, 687, 816], [686, 816, 815], [687, 688, 816], [688, 817, 816], [688, 689, 818], [688, 818, 817], [689, 690, 818], [690, 819, 818], [690, 691, 820], [690, 820, 819], [691, 692, 820], [692, 821, 820], [692, 693, 822], [692, 822, 821], [693, 694, 822], [694, 823, 822], [694, 695, 824], [694, 824, 823], [695, 696, 824], [696, 825, 824], [696, 697, 826], [696, 826, 825], [697, 698, 826], [698, 827, 826], [698, 699, 828], [698, 828, 827], [699, 700, 828], [700, 829, 828], [700, 701, 830], [700, 830, 829], [701, 702, 830], [702, 831, 830], [702, 703, 832], [702, 832, 831], [703, 704, 832], [704, 833, 832], [704, 705, 834], [704, 834, 833], [705, 706, 834], [706, 835, 834], [706, 707, 836], [706, 836, 835], [707, 708, 836], [708, 837, 836], [708, 709, 838], [708, 838, 837], [709, 710, 838], [710, 839, 838], [710, 711, 840], [710, 840, 839], [711, 712, 840], [712, 841, 840], [712, 713, 842], [712, 842, 841], [713, 714, 842], [714, 843, 842], [714, 715, 844], [714, 844, 843], [715, 716, 844], [716, 845, 844], [716, 717, 846], [716, 846, 845], [717, 718, 846], [718, 847, 846], [718, 719, 848], [718, 848, 847], [719, 720, 848], [720, 849, 848], [720, 721, 850], [720, 850, 849], [721, 722, 850], [722, 851, 850], [722, 723, 852], [722, 852, 851], [723, 724, 852], [724, 853, 852], [724, 725, 854], [724, 854, 853], [725, 726, 854], [726, 855, 854], [726, 727, 856], [726, 856, 855], [727, 728, 856], [728, 857, 856], [728, 729, 858], [728, 858, 857], [729, 730, 858], [730, 859, 858], [730, 731, 860], [730, 860, 859], [731, 732, 860], [732, 861, 860], [732, 733, 862], [732, 862, 861], [733, 734, 862], [734, 863, 862], [734, 735, 864], [734, 864, 863], [735, 736, 864], [736, 865, 864], [736, 737, 866], [736, 866, 865], [737, 738, 866], [738, 867, 866], [738, 739, 868], [738, 868, 867], [739, 740, 868], [740, 869, 868], [740, 741, 870], [740, 870, 869], [741, 742, 870], [742, 871, 870], [742, 743, 872], [742, 872, 871], [743, 744, 872], [744, 873, 872], [744, 745, 874], [744, 874, 873], [745, 746, 874], [746, 875, 874], [746, 747, 876], [746, 876, 875], [747, 748, 876], [748, 877, 876], [748, 749, 878], [748, 878, 877], [749, 750, 878], [750, 879, 878], [750, 751, 880], [750, 880, 879], [751, 752, 880], [752, 881, 880], [752, 753, 882], [752, 882, 881], [753, 754, 882], [754, 883, 882], [754, 755, 884], [754, 884, 883], [755, 756, 884], [756, 885, 884], [756, 757, 886], [756, 886, 885], [757, 758, 886], [758, 887, 886], [758, 759, 888], [758, 888, 887], [759, 760, 888], [760, 889, 888], [760, 761, 890], [760, 890, 889], [761, 762, 890], [762, 891, 890], [762, 763, 892], [762, 892, 891], [763, 764, 892], [764, 893, 892], [764, 765, 894], [764, 894, 893], [765, 766, 894], [766, 895, 894], [766, 767, 896], [766, 896, 895], [767, 768, 896], [768, 897, 896], [768, 769, 898], [768, 898, 897], [769, 770, 898], [770, 899, 898], [770, 771, 900], [770, 900, 899], [771, 772, 900], [772, 901, 900], [772, 773, 902], [772, 902, 901], [774, 775, 904], [774, 904, 903], [775, 776, 904], [776, 905, 904], [776, 777, 906], [776, 906, 905], [777, 778, 906], [778, 907, 906], [778, 779, 908], [778, 908, 907], [779, 780, 908], [780, 909, 908], [780, 781, 910], [780, 910, 909], [781, 782, 910], [782, 911, 910], [782, 783, 912], [782, 912, 911], [783, 784, 912], [784, 913, 912], [784, 785, 914], [784, 914, 913], [785, 786, 914], [786, 915, 914], [786, 787, 916], [786, 916, 915], [787, 788, 916], [788, 917, 916], [788, 789, 918], [788, 918, 917], [789, 790, 918], [790, 919, 918], [790, 791, 920], [790, 920, 919], [791, 792, 920], [792, 921, 920], [792, 793, 922], [792, 922, 921], [793, 794, 922], [794, 923, 922], [794, 795, 924], [794, 924, 923], [795, 796, 924], [796, 925, 924], [796, 797, 926], [796, 926, 925], [797, 798, 926], [798, 927, 926], [798, 799, 928], [798, 928, 927], [799, 800, 928], [800, 929, 928], [800, 801, 930], [800, 930, 929], [801, 802, 930], [802, 931, 930], [802, 803, 932], [802, 932, 931], [803, 804, 932], [804, 933, 932], [804, 805, 934], [804, 934, 933], [805, 806, 934], [806, 935, 934], [806, 807, 936], [806, 936, 935], [807, 808, 936], [808, 937, 936], [808, 809, 938], [808, 938, 937], [809, 810, 938], [810, 939, 938], [810, 811, 940], [810, 940, 939], [811, 812, 940], [812, 941, 940], [812, 813, 942], [812, 942, 941], [813, 814, 942], [814, 943, 942], [814, 815, 944], [814, 944, 943], [815, 816, 944], [816, 945, 944], [816, 817, 946], [816, 946, 945], [817, 818, 946], [818, 947, 946], [818, 819, 948], [818, 948, 947], [819, 820, 948], [820, 949, 948], [820, 821, 950], [820, 950, 949], [821, 822, 950], [822, 951, 950], [822, 823, 952], [822, 952, 951], [823, 824, 952], [824, 953, 952], [824, 825, 954], [824, 954, 953], [825, 826, 954], [826, 955, 954], [826, 827, 956], [826, 956, 955], [827, 828, 956], [828, 957, 956], [828, 829, 958], [828, 958, 957], [829, 830, 958], [830, 959, 958], [830, 831, 960], [830, 960, 959], [831, 832, 960], [832, 961, 960], [832, 833, 962], [832, 962, 961], [833, 834, 962], [834, 963, 962], [834, 835, 964], [834, 964, 963], [835, 836, 964], [836, 965, 964], [836, 837, 966], [836, 966, 965], [837, 838, 966], [838, 967, 966], [838, 839, 968], [838, 968, 967], [839, 840, 968], [840, 969, 968], [840, 841, 970], [840, 970, 969], [841, 842, 970], [842, 971, 970], [842, 843, 972], [842, 972, 971], [843, 844, 972], [844, 973, 972], [844, 845, 974], [844, 974, 973], [845, 846, 974], [846, 975, 974], [846, 847, 976], [846, 976, 975], [847, 848, 976], [848, 977, 976], [848, 849, 978], [848, 978, 977], [849, 850, 978], [850, 979, 978], [850, 851, 980], [850, 980, 979], [851, 852, 980], [852, 981, 980], [852, 853, 982], [852, 982, 981], [853, 854, 982], [854, 983, 982], [854, 855, 984], [854, 984, 983], [855, 856, 984], [856, 985, 984], [856, 857, 986], [856, 986, 985], [857, 858, 986], [858, 987, 986], [858, 859, 988], [858, 988, 987], [859, 860, 988], [860, 989, 988], [860, 861, 990], [860, 990, 989], [861, 862, 990], [862, 991, 990], [862, 863, 992], [862, 992, 991], [863, 864, 992], [864, 993, 992], [864, 865, 994], [864, 994, 993], [865, 866, 994], [866, 995, 994], [866, 867, 996], [866, 996, 995], [867, 868, 996], [868, 997, 996], [868, 869, 998], [868, 998, 997], [869, 870, 998], [870, 999, 998], [870, 871, 1000], [870, 1000, 999], [871, 872, 1000], [872, 1001, 1000], [872, 873, 1002], [872, 1002, 1001], [873, 874, 1002], [874, 1003, 1002], [874, 875, 1004], [874, 1004, 1003], [875, 876, 1004], [876, 1005, 1004], [876, 877, 1006], [876, 1006, 1005], [877, 878, 1006], [878, 1007, 1006], [878, 879, 1008], [878, 1008, 1007], [879, 880, 1008], [880, 1009, 1008], [880, 881, 1010], [880, 1010, 1009], [881, 882, 1010], [882, 1011, 1010], [882, 883, 1012], [882, 1012, 1011], [883, 884, 1012], [884, 1013, 1012], [884, 885, 1014], [884, 1014, 1013], [885, 886, 1014], [886, 1015, 1014], [886, 887, 1016], [886, 1016, 1015], [887, 888, 1016], [888, 1017, 1016], [888, 889, 1018], [888, 1018, 1017], [889, 890, 1018], [890, 1019, 1018], [890, 891, 1020], [890, 1020, 1019], [891, 892, 1020], [892, 1021, 1020], [892, 893, 1022], [892, 1022, 1021], [893, 894, 1022], [894, 1023, 1022], [894, 895, 1024], [894, 1024, 1023], [895, 896, 1024], [896, 1025, 1024], [896, 897, 1026], [896, 1026, 1025], [897, 898, 1026], [898, 1027, 1026], [898, 899, 1028], [898, 1028, 1027], [899, 900, 1028], [900, 1029, 1028], [900, 901, 1030], [900, 1030, 1029], [901, 902, 1030], [902, 1031, 1030], [903, 904, 1032], [904, 1033, 1032], [904, 905, 1034], [904, 1034, 1033], [905, 906, 1034], [906, 1035, 1034], [906, 907, 1036], [906, 1036, 1035], [907, 908, 1036], [908, 1037, 1036], [908, 909, 1038], [908, 1038, 1037], [909, 910, 1038], [910, 1039, 1038], [910, 911, 1040], [910, 1040, 1039], [911, 912, 1040], [912, 1041, 1040], [912, 913, 1042], [912, 1042, 1041], [913, 914, 1042], [914, 1043, 1042], [914, 915, 1044], [914, 1044, 1043], [915, 916, 1044], [916, 1045, 1044], [916, 917, 1046], [916, 1046, 1045], [917, 918, 1046], [918, 1047, 1046], [918, 919, 1048], [918, 1048, 1047], [919, 920, 1048], [920, 1049, 1048], [920, 921, 1050], [920, 1050, 1049], [921, 922, 1050], [922, 1051, 1050], [922, 923, 1052], [922, 1052, 1051], [923, 924, 1052], [924, 1053, 1052], [924, 925, 1054], [924, 1054, 1053], [925, 926, 1054], [926, 1055, 1054], [926, 927, 1056], [926, 1056, 1055], [927, 928, 1056], [928, 1057, 1056], [928, 929, 1058], [928, 1058, 1057], [929, 930, 1058], [930, 1059, 1058], [930, 931, 1060], [930, 1060, 1059], [931, 932, 1060], [932, 1061, 1060], [932, 933, 1062], [932, 1062, 1061], [933, 934, 1062], [934, 1063, 1062], [934, 935, 1064], [934, 1064, 1063], [935, 936, 1064], [936, 1065, 1064], [936, 937, 1066], [936, 1066, 1065], [937, 938, 1066], [938, 1067, 1066], [938, 939, 1068], [938, 1068, 1067], [939, 940, 1068], [940, 1069, 1068], [940, 941, 1070], [940, 1070, 1069], [941, 942, 1070], [942, 1071, 1070], [942, 943, 1072], [942, 1072, 1071], [943, 944, 1072], [944, 1073, 1072], [944, 945, 1074], [944, 1074, 1073], [945, 946, 1074], [946, 1075, 1074], [946, 947, 1076], [946, 1076, 1075], [947, 948, 1076], [948, 1077, 1076], [948, 949, 1078], [948, 1078, 1077], [949, 950, 1078], [950, 1079, 1078], [950, 951, 1080], [950, 1080, 1079], [951, 952, 1080], [952, 1081, 1080], [952, 953, 1082], [952, 1082, 1081], [953, 954, 1082], [954, 1083, 1082], [954, 955, 1084], [954, 1084, 1083], [955, 956, 1084], [956, 1085, 1084], [956, 957, 1086], [956, 1086, 1085], [957, 958, 1086], [958, 1087, 1086], [958, 959, 1088], [958, 1088, 1087], [959, 960, 1088], [960, 1089, 1088], [960, 961, 1090], [960, 1090, 1089], [961, 962, 1090], [962, 1091, 1090], [962, 963, 1092], [962, 1092, 1091], [963, 964, 1092], [964, 1093, 1092], [964, 965, 1094], [964, 1094, 1093], [965, 966, 1094], [966, 1095, 1094], [966, 967, 1096], [966, 1096, 1095], [967, 968, 1096], [968, 1097, 1096], [968, 969, 1098], [968, 1098, 1097], [969, 970, 1098], [970, 1099, 1098], [970, 971, 1100], [970, 1100, 1099], [971, 972, 1100], [972, 1101, 1100], [972, 973, 1102], [972, 1102, 1101], [973, 974, 1102], [974, 1103, 1102], [974, 975, 1104], [974, 1104, 1103], [975, 976, 1104], [976, 1105, 1104], [976, 977, 1106], [976, 1106, 1105], [977, 978, 1106], [978, 1107, 1106], [978, 979, 1108], [978, 1108, 1107], [979, 980, 1108], [980, 1109, 1108], [980, 981, 1110], [980, 1110, 1109], [981, 982, 1110], [982, 1111, 1110], [982, 983, 1112], [982, 1112, 1111], [983, 984, 1112], [984, 1113, 1112], [984, 985, 1114], [984, 1114, 1113], [985, 986, 1114], [986, 1115, 1114], [986, 987, 1116], [986, 1116, 1115], [987, 988, 1116], [988, 1117, 1116], [988, 989, 1118], [988, 1118, 1117], [989, 990, 1118], [990, 1119, 1118], [990, 991, 1120], [990, 1120, 1119], [991, 992, 1120], [992, 1121, 1120], [992, 993, 1122], [992, 1122, 1121], [993, 994, 1122], [994, 1123, 1122], [994, 995, 1124], [994, 1124, 1123], [995, 996, 1124], [996, 1125, 1124], [996, 997, 1126], [996, 1126, 1125], [997, 998, 1126], [998, 1127, 1126], [998, 999, 1128], [998, 1128, 1127], [999, 1000, 1128], [1000, 1129, 1128], [1000, 1001, 1130], [1000, 1130, 1129], [1001, 1002, 1130], [1002, 1131, 1130], [1002, 1003, 1132], [1002, 1132, 1131], [1003, 1004, 1132], [1004, 1133, 1132], [1004, 1005, 1134], [1004, 1134, 1133], [1005, 1006, 1134], [1006, 1135, 1134], [1006, 1007, 1136], [1006, 1136, 1135], [1007, 1008, 1136], [1008, 1137, 1136], [1008, 1009, 1138], [1008, 1138, 1137], [1009, 1010, 1138], [1010, 1139, 1138], [1010, 1011, 1140], [1010, 1140, 1139], [1011, 1012, 1140], [1012, 1141, 1140], [1012, 1013, 1142], [1012, 1142, 1141], [1013, 1014, 1142], [1014, 1143, 1142], [1014, 1015, 1144], [1014, 1144, 1143], [1015, 1016, 1144], [1016, 1145, 1144], [1016, 1017, 1146], [1016, 1146, 1145], [1017, 1018, 1146], [1018, 1147, 1146], [1018, 1019, 1148], [1018, 1148, 1147], [1019, 1020, 1148], [1020, 1149, 1148], [1020, 1021, 1150], [1020, 1150, 1149], [1021, 1022, 1150], [1022, 1151, 1150], [1022, 1023, 1152], [1022, 1152, 1151], [1023, 1024, 1152], [1024, 1153, 1152], [1024, 1025, 1154], [1024, 1154, 1153], [1025, 1026, 1154], [1026, 1155, 1154], [1026, 1027, 1156], [1026, 1156, 1155], [1027, 1028, 1156], [1028, 1157, 1156], [1028, 1029, 1158], [1028, 1158, 1157], [1029, 1030, 1158], [1030, 1159, 1158], [1030, 1031, 1160], [1030, 1160, 1159], [1032, 1033, 1162], [1032, 1162, 1161], [1033, 1034, 1162], [1034, 1163, 1162], [1034, 1035, 1164], [1034, 1164, 1163], [1035, 1036, 1164], [1036, 1165, 1164], [1036, 1037, 1166], [1036, 1166, 1165], [1037, 1038, 1166], [1038, 1167, 1166], [1038, 1039, 1168], [1038, 1168, 1167], [1039, 1040, 1168], [1040, 1169, 1168], [1040, 1041, 1170], [1040, 1170, 1169], [1041, 1042, 1170], [1042, 1171, 1170], [1042, 1043, 1172], [1042, 1172, 1171], [1043, 1044, 1172], [1044, 1173, 1172], [1044, 1045, 1174], [1044, 1174, 1173], [1045, 1046, 1174], [1046, 1175, 1174], [1046, 1047, 1176], [1046, 1176, 1175], [1047, 1048, 1176], [1048, 1177, 1176], [1048, 1049, 1178], [1048, 1178, 1177], [1049, 1050, 1178], [1050, 1179, 1178], [1050, 1051, 1180], [1050, 1180, 1179], [1051, 1052, 1180], [1052, 1181, 1180], [1052, 1053, 1182], [1052, 1182, 1181], [1053, 1054, 1182], [1054, 1183, 1182], [1054, 1055, 1184], [1054, 1184, 1183], [1055, 1056, 1184], [1056, 1185, 1184], [1056, 1057, 1186], [1056, 1186, 1185], [1057, 1058, 1186], [1058, 1187, 1186], [1058, 1059, 1188], [1058, 1188, 1187], [1059, 1060, 1188], [1060, 1189, 1188], [1060, 1061, 1190], [1060, 1190, 1189], [1061, 1062, 1190], [1062, 1191, 1190], [1062, 1063, 1192], [1062, 1192, 1191], [1063, 1064, 1192], [1064, 1193, 1192], [1064, 1065, 1194], [1064, 1194, 1193], [1065, 1066, 1194], [1066, 1195, 1194], [1066, 1067, 1196], [1066, 1196, 1195], [1067, 1068, 1196], [1068, 1197, 1196], [1068, 1069, 1198], [1068, 1198, 1197], [1069, 1070, 1198], [1070, 1199, 1198], [1070, 1071, 1200], [1070, 1200, 1199], [1071, 1072, 1200], [1072, 1201, 1200], [1072, 1073, 1202], [1072, 1202, 1201], [1073, 1074, 1202], [1074, 1203, 1202], [1074, 1075, 1204], [1074, 1204, 1203], [1075, 1076, 1204], [1076, 1205, 1204], [1076, 1077, 1206], [1076, 1206, 1205], [1077, 1078, 1206], [1078, 1207, 1206], [1078, 1079, 1208], [1078, 1208, 1207], [1079, 1080, 1208], [1080, 1209, 1208], [1080, 1081, 1210], [1080, 1210, 1209], [1081, 1082, 1210], [1082, 1211, 1210], [1082, 1083, 1212], [1082, 1212, 1211], [1083, 1084, 1212], [1084, 1213, 1212], [1084, 1085, 1214], [1084, 1214, 1213], [1085, 1086, 1214], [1086, 1215, 1214], [1086, 1087, 1216], [1086, 1216, 1215], [1087, 1088, 1216], [1088, 1217, 1216], [1088, 1089, 1218], [1088, 1218, 1217], [1089, 1090, 1218], [1090, 1219, 1218], [1090, 1091, 1220], [1090, 1220, 1219], [1091, 1092, 1220], [1092, 1221, 1220], [1092, 1093, 1222], [1092, 1222, 1221], [1093, 1094, 1222], [1094, 1223, 1222], [1094, 1095, 1224], [1094, 1224, 1223], [1095, 1096, 1224], [1096, 1225, 1224], [1096, 1097, 1226], [1096, 1226, 1225], [1097, 1098, 1226], [1098, 1227, 1226], [1098, 1099, 1228], [1098, 1228, 1227], [1099, 1100, 1228], [1100, 1229, 1228], [1100, 1101, 1230], [1100, 1230, 1229], [1101, 1102, 1230], [1102, 1231, 1230], [1102, 1103, 1232], [1102, 1232, 1231], [1103, 1104, 1232], [1104, 1233, 1232], [1104, 1105, 1234], [1104, 1234, 1233], [1105, 1106, 1234], [1106, 1235, 1234], [1106, 1107, 1236], [1106, 1236, 1235], [1107, 1108, 1236], [1108, 1237, 1236], [1108, 1109, 1238], [1108, 1238, 1237], [1109, 1110, 1238], [1110, 1239, 1238], [1110, 1111, 1240], [1110, 1240, 1239], [1111, 1112, 1240], [1112, 1241, 1240], [1112, 1113, 1242], [1112, 1242, 1241], [1113, 1114, 1242], [1114, 1243, 1242], [1114, 1115, 1244], [1114, 1244, 1243], [1115, 1116, 1244], [1116, 1245, 1244], [1116, 1117, 1246], [1116, 1246, 1245], [1117, 1118, 1246], [1118, 1247, 1246], [1118, 1119, 1248], [1118, 1248, 1247], [1119, 1120, 1248], [1120, 1249, 1248], [1120, 1121, 1250], [1120, 1250, 1249], [1121, 1122, 1250], [1122, 1251, 1250], [1122, 1123, 1252], [1122, 1252, 1251], [1123, 1124, 1252], [1124, 1253, 1252], [1124, 1125, 1254], [1124, 1254, 1253], [1125, 1126, 1254], [1126, 1255, 1254], [1126, 1127, 1256], [1126, 1256, 1255], [1127, 1128, 1256], [1128, 1257, 1256], [1128, 1129, 1258], [1128, 1258, 1257], [1129, 1130, 1258], [1130, 1259, 1258], [1130, 1131, 1260], [1130, 1260, 1259], [1131, 1132, 1260], [1132, 1261, 1260], [1132, 1133, 1262], [1132, 1262, 1261], [1133, 1134, 1262], [1134, 1263, 1262], [1134, 1135, 1264], [1134, 1264, 1263], [1135, 1136, 1264], [1136, 1265, 1264], [1136, 1137, 1266], [1136, 1266, 1265], [1137, 1138, 1266], [1138, 1267, 1266], [1138, 1139, 1268], [1138, 1268, 1267], [1139, 1140, 1268], [1140, 1269, 1268], [1140, 1141, 1270], [1140, 1270, 1269], [1141, 1142, 1270], [1142, 1271, 1270], [1142, 1143, 1272], [1142, 1272, 1271], [1143, 1144, 1272], [1144, 1273, 1272], [1144, 1145, 1274], [1144, 1274, 1273], [1145, 1146, 1274], [1146, 1275, 1274], [1146, 1147, 1276], [1146, 1276, 1275], [1147, 1148, 1276], [1148, 1277, 1276], [1148, 1149, 1278], [1148, 1278, 1277], [1149, 1150, 1278], [1150, 1279, 1278], [1150, 1151, 1280], [1150, 1280, 1279], [1151, 1152, 1280], [1152, 1281, 1280], [1152, 1153, 1282], [1152, 1282, 1281], [1153, 1154, 1282], [1154, 1283, 1282], [1154, 1155, 1284], [1154, 1284, 1283], [1155, 1156, 1284], [1156, 1285, 1284], [1156, 1157, 1286], [1156, 1286, 1285], [1157, 1158, 1286], [1158, 1287, 1286], [1158, 1159, 1288], [1158, 1288, 1287], [1159, 1160, 1288], [1160, 1289, 1288], [1161, 1162, 1290], [1162, 1291, 1290], [1162, 1163, 1292], [1162, 1292, 1291], [1163, 1164, 1292], [1164, 1293, 1292], [1164, 1165, 1294], [1164, 1294, 1293], [1165, 1166, 1294], [1166, 1295, 1294], [1166, 1167, 1296], [1166, 1296, 1295], [1167, 1168, 1296], [1168, 1297, 1296], [1168, 1169, 1298], [1168, 1298, 1297], [1169, 1170, 1298], [1170, 1299, 1298], [1170, 1171, 1300], [1170, 1300, 1299], [1171, 1172, 1300], [1172, 1301, 1300], [1172, 1173, 1302], [1172, 1302, 1301], [1173, 1174, 1302], [1174, 1303, 1302], [1174, 1175, 1304], [1174, 1304, 1303], [1175, 1176, 1304], [1176, 1305, 1304], [1176, 1177, 1306], [1176, 1306, 1305], [1177, 1178, 1306], [1178, 1307, 1306], [1178, 1179, 1308], [1178, 1308, 1307], [1179, 1180, 1308], [1180, 1309, 1308], [1180, 1181, 1310], [1180, 1310, 1309], [1181, 1182, 1310], [1182, 1311, 1310], [1182, 1183, 1312], [1182, 1312, 1311], [1183, 1184, 1312], [1184, 1313, 1312], [1184, 1185, 1314], [1184, 1314, 1313], [1185, 1186, 1314], [1186, 1315, 1314], [1186, 1187, 1316], [1186, 1316, 1315], [1187, 1188, 1316], [1188, 1317, 1316], [1188, 1189, 1318], [1188, 1318, 1317], [1189, 1190, 1318], [1190, 1319, 1318], [1190, 1191, 1320], [1190, 1320, 1319], [1191, 1192, 1320], [1192, 1321, 1320], [1192, 1193, 1322], [1192, 1322, 1321], [1193, 1194, 1322], [1194, 1323, 1322], [1194, 1195, 1324], [1194, 1324, 1323], [1195, 1196, 1324], [1196, 1325, 1324], [1196, 1197, 1326], [1196, 1326, 1325], [1197, 1198, 1326], [1198, 1327, 1326], [1198, 1199, 1328], [1198, 1328, 1327], [1199, 1200, 1328], [1200, 1329, 1328], [1200, 1201, 1330], [1200, 1330, 1329], [1201, 1202, 1330], [1202, 1331, 1330], [1202, 1203, 1332], [1202, 1332, 1331], [1203, 1204, 1332], [1204, 1333, 1332], [1204, 1205, 1334], [1204, 1334, 1333], [1205, 1206, 1334], [1206, 1335, 1334], [1206, 1207, 1336], [1206, 1336, 1335], [1207, 1208, 1336], [1208, 1337, 1336], [1208, 1209, 1338], [1208, 1338, 1337], [1209, 1210, 1338], [1210, 1339, 1338], [1210, 1211, 1340], [1210, 1340, 1339], [1211, 1212, 1340], [1212, 1341, 1340], [1212, 1213, 1342], [1212, 1342, 1341], [1213, 1214, 1342], [1214, 1343, 1342], [1214, 1215, 1344], [1214, 1344, 1343], [1215, 1216, 1344], [1216, 1345, 1344], [1216, 1217, 1346], [1216, 1346, 1345], [1217, 1218, 1346], [1218, 1347, 1346], [1218, 1219, 1348], [1218, 1348, 1347], [1219, 1220, 1348], [1220, 1349, 1348], [1220, 1221, 1350], [1220, 1350, 1349], [1221, 1222, 1350], [1222, 1351, 1350], [1222, 1223, 1352], [1222, 1352, 1351], [1223, 1224, 1352], [1224, 1353, 1352], [1224, 1225, 1354], [1224, 1354, 1353], [1225, 1226, 1354], [1226, 1355, 1354], [1226, 1227, 1356], [1226, 1356, 1355], [1227, 1228, 1356], [1228, 1357, 1356], [1228, 1229, 1358], [1228, 1358, 1357], [1229, 1230, 1358], [1230, 1359, 1358], [1230, 1231, 1360], [1230, 1360, 1359], [1231, 1232, 1360], [1232, 1361, 1360], [1232, 1233, 1362], [1232, 1362, 1361], [1233, 1234, 1362], [1234, 1363, 1362], [1234, 1235, 1364], [1234, 1364, 1363], [1235, 1236, 1364], [1236, 1365, 1364], [1236, 1237, 1366], [1236, 1366, 1365], [1237, 1238, 1366], [1238, 1367, 1366], [1238, 1239, 1368], [1238, 1368, 1367], [1239, 1240, 1368], [1240, 1369, 1368], [1240, 1241, 1370], [1240, 1370, 1369], [1241, 1242, 1370], [1242, 1371, 1370], [1242, 1243, 1372], [1242, 1372, 1371], [1243, 1244, 1372], [1244, 1373, 1372], [1244, 1245, 1374], [1244, 1374, 1373], [1245, 1246, 1374], [1246, 1375, 1374], [1246, 1247, 1376], [1246, 1376, 1375], [1247, 1248, 1376], [1248, 1377, 1376], [1248, 1249, 1378], [1248, 1378, 1377], [1249, 1250, 1378], [1250, 1379, 1378], [1250, 1251, 1380], [1250, 1380, 1379], [1251, 1252, 1380], [1252, 1381, 1380], [1252, 1253, 1382], [1252, 1382, 1381], [1253, 1254, 1382], [1254, 1383, 1382], [1254, 1255, 1384], [1254, 1384, 1383], [1255, 1256, 1384], [1256, 1385, 1384], [1256, 1257, 1386], [1256, 1386, 1385], [1257, 1258, 1386], [1258, 1387, 1386], [1258, 1259, 1388], [1258, 1388, 1387], [1259, 1260, 1388], [1260, 1389, 1388], [1260, 1261, 1390], [1260, 1390, 1389], [1261, 1262, 1390], [1262, 1391, 1390], [1262, 1263, 1392], [1262, 1392, 1391], [1263, 1264, 1392], [1264, 1393, 1392], [1264, 1265, 1394], [1264, 1394, 1393], [1265, 1266, 1394], [1266, 1395, 1394], [1266, 1267, 1396], [1266, 1396, 1395], [1267, 1268, 1396], [1268, 1397, 1396], [1268, 1269, 1398], [1268, 1398, 1397], [1269, 1270, 1398], [1270, 1399, 1398], [1270, 1271, 1400], [1270, 1400, 1399], [1271, 1272, 1400], [1272, 1401, 1400], [1272, 1273, 1402], [1272, 1402, 1401], [1273, 1274, 1402], [1274, 1403, 1402], [1274, 1275, 1404], [1274, 1404, 1403], [1275, 1276, 1404], [1276, 1405, 1404], [1276, 1277, 1406], [1276, 1406, 1405], [1277, 1278, 1406], [1278, 1407, 1406], [1278, 1279, 1408], [1278, 1408, 1407], [1279, 1280, 1408], [1280, 1409, 1408], [1280, 1281, 1410], [1280, 1410, 1409], [1281, 1282, 1410], [1282, 1411, 1410], [1282, 1283, 1412], [1282, 1412, 1411], [1283, 1284, 1412], [1284, 1413, 1412], [1284, 1285, 1414], [1284, 1414, 1413], [1285, 1286, 1414], [1286, 1415, 1414], [1286, 1287, 1416], [1286, 1416, 1415], [1287, 1288, 1416], [1288, 1417, 1416], [1288, 1289, 1418], [1288, 1418, 1417], [1290, 1291, 1420], [1290, 1420, 1419], [1291, 1292, 1420], [1292, 1421, 1420], [1292, 1293, 1422], [1292, 1422, 1421], [1293, 1294, 1422], [1294, 1423, 1422], [1294, 1295, 1424], [1294, 1424, 1423], [1295, 1296, 1424], [1296, 1425, 1424], [1296, 1297, 1426], [1296, 1426, 1425], [1297, 1298, 1426], [1298, 1427, 1426], [1298, 1299, 1428], [1298, 1428, 1427], [1299, 1300, 1428], [1300, 1429, 1428], [1300, 1301, 1430], [1300, 1430, 1429], [1301, 1302, 1430], [1302, 1431, 1430], [1302, 1303, 1432], [1302, 1432, 1431], [1303, 1304, 1432], [1304, 1433, 1432], [1304, 1305, 1434], [1304, 1434, 1433], [1305, 1306, 1434], [1306, 1435, 1434], [1306, 1307, 1436], [1306, 1436, 1435], [1307, 1308, 1436], [1308, 1437, 1436], [1308, 1309, 1438], [1308, 1438, 1437], [1309, 1310, 1438], [1310, 1439, 1438], [1310, 1311, 1440], [1310, 1440, 1439], [1311, 1312, 1440], [1312, 1441, 1440], [1312, 1313, 1442], [1312, 1442, 1441], [1313, 1314, 1442], [1314, 1443, 1442], [1314, 1315, 1444], [1314, 1444, 1443], [1315, 1316, 1444], [1316, 1445, 1444], [1316, 1317, 1446], [1316, 1446, 1445], [1317, 1318, 1446], [1318, 1447, 1446], [1318, 1319, 1448], [1318, 1448, 1447], [1319, 1320, 1448], [1320, 1449, 1448], [1320, 1321, 1450], [1320, 1450, 1449], [1321, 1322, 1450], [1322, 1451, 1450], [1322, 1323, 1452], [1322, 1452, 1451], [1323, 1324, 1452], [1324, 1453, 1452], [1324, 1325, 1454], [1324, 1454, 1453], [1325, 1326, 1454], [1326, 1455, 1454], [1326, 1327, 1456], [1326, 1456, 1455], [1327, 1328, 1456], [1328, 1457, 1456], [1328, 1329, 1458], [1328, 1458, 1457], [1329, 1330, 1458], [1330, 1459, 1458], [1330, 1331, 1460], [1330, 1460, 1459], [1331, 1332, 1460], [1332, 1461, 1460], [1332, 1333, 1462], [1332, 1462, 1461], [1333, 1334, 1462], [1334, 1463, 1462], [1334, 1335, 1464], [1334, 1464, 1463], [1335, 1336, 1464], [1336, 1465, 1464], [1336, 1337, 1466], [1336, 1466, 1465], [1337, 1338, 1466], [1338, 1467, 1466], [1338, 1339, 1468], [1338, 1468, 1467], [1339, 1340, 1468], [1340, 1469, 1468], [1340, 1341, 1470], [1340, 1470, 1469], [1341, 1342, 1470], [1342, 1471, 1470], [1342, 1343, 1472], [1342, 1472, 1471], [1343, 1344, 1472], [1344, 1473, 1472], [1344, 1345, 1474], [1344, 1474, 1473], [1345, 1346, 1474], [1346, 1475, 1474], [1346, 1347, 1476], [1346, 1476, 1475], [1347, 1348, 1476], [1348, 1477, 1476], [1348, 1349, 1478], [1348, 1478, 1477], [1349, 1350, 1478], [1350, 1479, 1478], [1350, 1351, 1480], [1350, 1480, 1479], [1351, 1352, 1480], [1352, 1481, 1480], [1352, 1353, 1482], [1352, 1482, 1481], [1353, 1354, 1482], [1354, 1483, 1482], [1354, 1355, 1484], [1354, 1484, 1483], [1355, 1356, 1484], [1356, 1485, 1484], [1356, 1357, 1486], [1356, 1486, 1485], [1357, 1358, 1486], [1358, 1487, 1486], [1358, 1359, 1488], [1358, 1488, 1487], [1359, 1360, 1488], [1360, 1489, 1488], [1360, 1361, 1490], [1360, 1490, 1489], [1361, 1362, 1490], [1362, 1491, 1490], [1362, 1363, 1492], [1362, 1492, 1491], [1363, 1364, 1492], [1364, 1493, 1492], [1364, 1365, 1494], [1364, 1494, 1493], [1365, 1366, 1494], [1366, 1495, 1494], [1366, 1367, 1496], [1366, 1496, 1495], [1367, 1368, 1496], [1368, 1497, 1496], [1368, 1369, 1498], [1368, 1498, 1497], [1369, 1370, 1498], [1370, 1499, 1498], [1370, 1371, 1500], [1370, 1500, 1499], [1371, 1372, 1500], [1372, 1501, 1500], [1372, 1373, 1502], [1372, 1502, 1501], [1373, 1374, 1502], [1374, 1503, 1502], [1374, 1375, 1504], [1374, 1504, 1503], [1375, 1376, 1504], [1376, 1505, 1504], [1376, 1377, 1506], [1376, 1506, 1505], [1377, 1378, 1506], [1378, 1507, 1506], [1378, 1379, 1508], [1378, 1508, 1507], [1379, 1380, 1508], [1380, 1509, 1508], [1380, 1381, 1510], [1380, 1510, 1509], [1381, 1382, 1510], [1382, 1511, 1510], [1382, 1383, 1512], [1382, 1512, 1511], [1383, 1384, 1512], [1384, 1513, 1512], [1384, 1385, 1514], [1384, 1514, 1513], [1385, 1386, 1514], [1386, 1515, 1514], [1386, 1387, 1516], [1386, 1516, 1515], [1387, 1388, 1516], [1388, 1517, 1516], [1388, 1389, 1518], [1388, 1518, 1517], [1389, 1390, 1518], [1390, 1519, 1518], [1390, 1391, 1520], [1390, 1520, 1519], [1391, 1392, 1520], [1392, 1521, 1520], [1392, 1393, 1522], [1392, 1522, 1521], [1393, 1394, 1522], [1394, 1523, 1522], [1394, 1395, 1524], [1394, 1524, 1523], [1395, 1396, 1524], [1396, 1525, 1524], [1396, 1397, 1526], [1396, 1526, 1525], [1397, 1398, 1526], [1398, 1527, 1526], [1398, 1399, 1528], [1398, 1528, 1527], [1399, 1400, 1528], [1400, 1529, 1528], [1400, 1401, 1530], [1400, 1530, 1529], [1401, 1402, 1530], [1402, 1531, 1530], [1402, 1403, 1532], [1402, 1532, 1531], [1403, 1404, 1532], [1404, 1533, 1532], [1404, 1405, 1534], [1404, 1534, 1533], [1405, 1406, 1534], [1406, 1535, 1534], [1406, 1407, 1536], [1406, 1536, 1535], [1407, 1408, 1536], [1408, 1537, 1536], [1408, 1409, 1538], [1408, 1538, 1537], [1409, 1410, 1538], [1410, 1539, 1538], [1410, 1411, 1540], [1410, 1540, 1539], [1411, 1412, 1540], [1412, 1541, 1540], [1412, 1413, 1542], [1412, 1542, 1541], [1413, 1414, 1542], [1414, 1543, 1542], [1414, 1415, 1544], [1414, 1544, 1543], [1415, 1416, 1544], [1416, 1545, 1544], [1416, 1417, 1546], [1416, 1546, 1545], [1417, 1418, 1546], [1418, 1547, 1546], [1419, 1420, 1548], [1420, 1549, 1548], [1420, 1421, 1550], [1420, 1550, 1549], [1421, 1422, 1550], [1422, 1551, 1550], [1422, 1423, 1552], [1422, 1552, 1551], [1423, 1424, 1552], [1424, 1553, 1552], [1424, 1425, 1554], [1424, 1554, 1553], [1425, 1426, 1554], [1426, 1555, 1554], [1426, 1427, 1556], [1426, 1556, 1555], [1427, 1428, 1556], [1428, 1557, 1556], [1428, 1429, 1558], [1428, 1558, 1557], [1429, 1430, 1558], [1430, 1559, 1558], [1430, 1431, 1560], [1430, 1560, 1559], [1431, 1432, 1560], [1432, 1561, 1560], [1432, 1433, 1562], [1432, 1562, 1561], [1433, 1434, 1562], [1434, 1563, 1562], [1434, 1435, 1564], [1434, 1564, 1563], [1435, 1436, 1564], [1436, 1565, 1564], [1436, 1437, 1566], [1436, 1566, 1565], [1437, 1438, 1566], [1438, 1567, 1566], [1438, 1439, 1568], [1438, 1568, 1567], [1439, 1440, 1568], [1440, 1569, 1568], [1440, 1441, 1570], [1440, 1570, 1569], [1441, 1442, 1570], [1442, 1571, 1570], [1442, 1443, 1572], [1442, 1572, 1571], [1443, 1444, 1572], [1444, 1573, 1572], [1444, 1445, 1574], [1444, 1574, 1573], [1445, 1446, 1574], [1446, 1575, 1574], [1446, 1447, 1576], [1446, 1576, 1575], [1447, 1448, 1576], [1448, 1577, 1576], [1448, 1449, 1578], [1448, 1578, 1577], [1449, 1450, 1578], [1450, 1579, 1578], [1450, 1451, 1580], [1450, 1580, 1579], [1451, 1452, 1580], [1452, 1581, 1580], [1452, 1453, 1582], [1452, 1582, 1581], [1453, 1454, 1582], [1454, 1583, 1582], [1454, 1455, 1584], [1454, 1584, 1583], [1455, 1456, 1584], [1456, 1585, 1584], [1456, 1457, 1586], [1456, 1586, 1585], [1457, 1458, 1586], [1458, 1587, 1586], [1458, 1459, 1588], [1458, 1588, 1587], [1459, 1460, 1588], [1460, 1589, 1588], [1460, 1461, 1590], [1460, 1590, 1589], [1461, 1462, 1590], [1462, 1591, 1590], [1462, 1463, 1592], [1462, 1592, 1591], [1463, 1464, 1592], [1464, 1593, 1592], [1464, 1465, 1594], [1464, 1594, 1593], [1465, 1466, 1594], [1466, 1595, 1594], [1466, 1467, 1596], [1466, 1596, 1595], [1467, 1468, 1596], [1468, 1597, 1596], [1468, 1469, 1598], [1468, 1598, 1597], [1469, 1470, 1598], [1470, 1599, 1598], [1470, 1471, 1600], [1470, 1600, 1599], [1471, 1472, 1600], [1472, 1601, 1600], [1472, 1473, 1602], [1472, 1602, 1601], [1473, 1474, 1602], [1474, 1603, 1602], [1474, 1475, 1604], [1474, 1604, 1603], [1475, 1476, 1604], [1476, 1605, 1604], [1476, 1477, 1606], [1476, 1606, 1605], [1477, 1478, 1606], [1478, 1607, 1606], [1478, 1479, 1608], [1478, 1608, 1607], [1479, 1480, 1608], [1480, 1609, 1608], [1480, 1481, 1610], [1480, 1610, 1609], [1481, 1482, 1610], [1482, 1611, 1610], [1482, 1483, 1612], [1482, 1612, 1611], [1483, 1484, 1612], [1484, 1613, 1612], [1484, 1485, 1614], [1484, 1614, 1613], [1485, 1486, 1614], [1486, 1615, 1614], [1486, 1487, 1616], [1486, 1616, 1615], [1487, 1488, 1616], [1488, 1617, 1616], [1488, 1489, 1618], [1488, 1618, 1617], [1489, 1490, 1618], [1490, 1619, 1618], [1490, 1491, 1620], [1490, 1620, 1619], [1491, 1492, 1620], [1492, 1621, 1620], [1492, 1493, 1622], [1492, 1622, 1621], [1493, 1494, 1622], [1494, 1623, 1622], [1494, 1495, 1624], [1494, 1624, 1623], [1495, 1496, 1624], [1496, 1625, 1624], [1496, 1497, 1626], [1496, 1626, 1625], [1497, 1498, 1626], [1498, 1627, 1626], [1498, 1499, 1628], [1498, 1628, 1627], [1499, 1500, 1628], [1500, 1629, 1628], [1500, 1501, 1630], [1500, 1630, 1629], [1501, 1502, 1630], [1502, 1631, 1630], [1502, 1503, 1632], [1502, 1632, 1631], [1503, 1504, 1632], [1504, 1633, 1632], [1504, 1505, 1634], [1504, 1634, 1633], [1505, 1506, 1634], [1506, 1635, 1634], [1506, 1507, 1636], [1506, 1636, 1635], [1507, 1508, 1636], [1508, 1637, 1636], [1508, 1509, 1638], [1508, 1638, 1637], [1509, 1510, 1638], [1510, 1639, 1638], [1510, 1511, 1640], [1510, 1640, 1639], [1511, 1512, 1640], [1512, 1641, 1640], [1512, 1513, 1642], [1512, 1642, 1641], [1513, 1514, 1642], [1514, 1643, 1642], [1514, 1515, 1644], [1514, 1644, 1643], [1515, 1516, 1644], [1516, 1645, 1644], [1516, 1517, 1646], [1516, 1646, 1645], [1517, 1518, 1646], [1518, 1647, 1646], [1518, 1519, 1648], [1518, 1648, 1647], [1519, 1520, 1648], [1520, 1649, 1648], [1520, 1521, 1650], [1520, 1650, 1649], [1521, 1522, 1650], [1522, 1651, 1650], [1522, 1523, 1652], [1522, 1652, 1651], [1523, 1524, 1652], [1524, 1653, 1652], [1524, 1525, 1654], [1524, 1654, 1653], [1525, 1526, 1654], [1526, 1655, 1654], [1526, 1527, 1656], [1526, 1656, 1655], [1527, 1528, 1656], [1528, 1657, 1656], [1528, 1529, 1658], [1528, 1658, 1657], [1529, 1530, 1658], [1530, 1659, 1658], [1530, 1531, 1660], [1530, 1660, 1659], [1531, 1532, 1660], [1532, 1661, 1660], [1532, 1533, 1662], [1532, 1662, 1661], [1533, 1534, 1662], [1534, 1663, 1662], [1534, 1535, 1664], [1534, 1664, 1663], [1535, 1536, 1664], [1536, 1665, 1664], [1536, 1537, 1666], [1536, 1666, 1665], [1537, 1538, 1666], [1538, 1667, 1666], [1538, 1539, 1668], [1538, 1668, 1667], [1539, 1540, 1668], [1540, 1669, 1668], [1540, 1541, 1670], [1540, 1670, 1669], [1541, 1542, 1670], [1542, 1671, 1670], [1542, 1543, 1672], [1542, 1672, 1671], [1543, 1544, 1672], [1544, 1673, 1672], [1544, 1545, 1674], [1544, 1674, 1673], [1545, 1546, 1674], [1546, 1675, 1674], [1546, 1547, 1676], [1546, 1676, 1675], [1548, 1549, 1678], [1548, 1678, 1677], [1549, 1550, 1678], [1550, 1679, 1678], [1550, 1551, 1680], [1550, 1680, 1679], [1551, 1552, 1680], [1552, 1681, 1680], [1552, 1553, 1682], [1552, 1682, 1681], [1553, 1554, 1682], [1554, 1683, 1682], [1554, 1555, 1684], [1554, 1684, 1683], [1555, 1556, 1684], [1556, 1685, 1684], [1556, 1557, 1686], [1556, 1686, 1685], [1557, 1558, 1686], [1558, 1687, 1686], [1558, 1559, 1688], [1558, 1688, 1687], [1559, 1560, 1688], [1560, 1689, 1688], [1560, 1561, 1690], [1560, 1690, 1689], [1561, 1562, 1690], [1562, 1691, 1690], [1562, 1563, 1692], [1562, 1692, 1691], [1563, 1564, 1692], [1564, 1693, 1692], [1564, 1565, 1694], [1564, 1694, 1693], [1565, 1566, 1694], [1566, 1695, 1694], [1566, 1567, 1696], [1566, 1696, 1695], [1567, 1568, 1696], [1568, 1697, 1696], [1568, 1569, 1698], [1568, 1698, 1697], [1569, 1570, 1698], [1570, 1699, 1698], [1570, 1571, 1700], [1570, 1700, 1699], [1571, 1572, 1700], [1572, 1701, 1700], [1572, 1573, 1702], [1572, 1702, 1701], [1573, 1574, 1702], [1574, 1703, 1702], [1574, 1575, 1704], [1574, 1704, 1703], [1575, 1576, 1704], [1576, 1705, 1704], [1576, 1577, 1706], [1576, 1706, 1705], [1577, 1578, 1706], [1578, 1707, 1706], [1578, 1579, 1708], [1578, 1708, 1707], [1579, 1580, 1708], [1580, 1709, 1708], [1580, 1581, 1710], [1580, 1710, 1709], [1581, 1582, 1710], [1582, 1711, 1710], [1582, 1583, 1712], [1582, 1712, 1711], [1583, 1584, 1712], [1584, 1713, 1712], [1584, 1585, 1714], [1584, 1714, 1713], [1585, 1586, 1714], [1586, 1715, 1714], [1586, 1587, 1716], [1586, 1716, 1715], [1587, 1588, 1716], [1588, 1717, 1716], [1588, 1589, 1718], [1588, 1718, 1717], [1589, 1590, 1718], [1590, 1719, 1718], [1590, 1591, 1720], [1590, 1720, 1719], [1591, 1592, 1720], [1592, 1721, 1720], [1592, 1593, 1722], [1592, 1722, 1721], [1593, 1594, 1722], [1594, 1723, 1722], [1594, 1595, 1724], [1594, 1724, 1723], [1595, 1596, 1724], [1596, 1725, 1724], [1596, 1597, 1726], [1596, 1726, 1725], [1597, 1598, 1726], [1598, 1727, 1726], [1598, 1599, 1728], [1598, 1728, 1727], [1599, 1600, 1728], [1600, 1729, 1728], [1600, 1601, 1730], [1600, 1730, 1729], [1601, 1602, 1730], [1602, 1731, 1730], [1602, 1603, 1732], [1602, 1732, 1731], [1603, 1604, 1732], [1604, 1733, 1732], [1604, 1605, 1734], [1604, 1734, 1733], [1605, 1606, 1734], [1606, 1735, 1734], [1606, 1607, 1736], [1606, 1736, 1735], [1607, 1608, 1736], [1608, 1737, 1736], [1608, 1609, 1738], [1608, 1738, 1737], [1609, 1610, 1738], [1610, 1739, 1738], [1610, 1611, 1740], [1610, 1740, 1739], [1611, 1612, 1740], [1612, 1741, 1740], [1612, 1613, 1742], [1612, 1742, 1741], [1613, 1614, 1742], [1614, 1743, 1742], [1614, 1615, 1744], [1614, 1744, 1743], [1615, 1616, 1744], [1616, 1745, 1744], [1616, 1617, 1746], [1616, 1746, 1745], [1617, 1618, 1746], [1618, 1747, 1746], [1618, 1619, 1748], [1618, 1748, 1747], [1619, 1620, 1748], [1620, 1749, 1748], [1620, 1621, 1750], [1620, 1750, 1749], [1621, 1622, 1750], [1622, 1751, 1750], [1622, 1623, 1752], [1622, 1752, 1751], [1623, 1624, 1752], [1624, 1753, 1752], [1624, 1625, 1754], [1624, 1754, 1753], [1625, 1626, 1754], [1626, 1755, 1754], [1626, 1627, 1756], [1626, 1756, 1755], [1627, 1628, 1756], [1628, 1757, 1756], [1628, 1629, 1758], [1628, 1758, 1757], [1629, 1630, 1758], [1630, 1759, 1758], [1630, 1631, 1760], [1630, 1760, 1759], [1631, 1632, 1760], [1632, 1761, 1760], [1632, 1633, 1762], [1632, 1762, 1761], [1633, 1634, 1762], [1634, 1763, 1762], [1634, 1635, 1764], [1634, 1764, 1763], [1635, 1636, 1764], [1636, 1765, 1764], [1636, 1637, 1766], [1636, 1766, 1765], [1637, 1638, 1766], [1638, 1767, 1766], [1638, 1639, 1768], [1638, 1768, 1767], [1639, 1640, 1768], [1640, 1769, 1768], [1640, 1641, 1770], [1640, 1770, 1769], [1641, 1642, 1770], [1642, 1771, 1770], [1642, 1643, 1772], [1642, 1772, 1771], [1643, 1644, 1772], [1644, 1773, 1772], [1644, 1645, 1774], [1644, 1774, 1773], [1645, 1646, 1774], [1646, 1775, 1774], [1646, 1647, 1776], [1646, 1776, 1775], [1647, 1648, 1776], [1648, 1777, 1776], [1648, 1649, 1778], [1648, 1778, 1777], [1649, 1650, 1778], [1650, 1779, 1778], [1650, 1651, 1780], [1650, 1780, 1779], [1651, 1652, 1780], [1652, 1781, 1780], [1652, 1653, 1782], [1652, 1782, 1781], [1653, 1654, 1782], [1654, 1783, 1782], [1654, 1655, 1784], [1654, 1784, 1783], [1655, 1656, 1784], [1656, 1785, 1784], [1656, 1657, 1786], [1656, 1786, 1785], [1657, 1658, 1786], [1658, 1787, 1786], [1658, 1659, 1788], [1658, 1788, 1787], [1659, 1660, 1788], [1660, 1789, 1788], [1660, 1661, 1790], [1660, 1790, 1789], [1661, 1662, 1790], [1662, 1791, 1790], [1662, 1663, 1792], [1662, 1792, 1791], [1663, 1664, 1792], [1664, 1793, 1792], [1664, 1665, 1794], [1664, 1794, 1793], [1665, 1666, 1794], [1666, 1795, 1794], [1666, 1667, 1796], [1666, 1796, 1795], [1667, 1668, 1796], [1668, 1797, 1796], [1668, 1669, 1798], [1668, 1798, 1797], [1669, 1670, 1798], [1670, 1799, 1798], [1670, 1671, 1800], [1670, 1800, 1799], [1671, 1672, 1800], [1672, 1801, 1800], [1672, 1673, 1802], [1672, 1802, 1801], [1673, 1674, 1802], [1674, 1803, 1802], [1674, 1675, 1804], [1674, 1804, 1803], [1675, 1676, 1804], [1676, 1805, 1804], [1677, 1678, 1806], [1678, 1807, 1806], [1678, 1679, 1808], [1678, 1808, 1807], [1679, 1680, 1808], [1680, 1809, 1808], [1680, 1681, 1810], [1680, 1810, 1809], [1681, 1682, 1810], [1682, 1811, 1810], [1682, 1683, 1812], [1682, 1812, 1811], [1683, 1684, 1812], [1684, 1813, 1812], [1684, 1685, 1814], [1684, 1814, 1813], [1685, 1686, 1814], [1686, 1815, 1814], [1686, 1687, 1816], [1686, 1816, 1815], [1687, 1688, 1816], [1688, 1817, 1816], [1688, 1689, 1818], [1688, 1818, 1817], [1689, 1690, 1818], [1690, 1819, 1818], [1690, 1691, 1820], [1690, 1820, 1819], [1691, 1692, 1820], [1692, 1821, 1820], [1692, 1693, 1822], [1692, 1822, 1821], [1693, 1694, 1822], [1694, 1823, 1822], [1694, 1695, 1824], [1694, 1824, 1823], [1695, 1696, 1824], [1696, 1825, 1824], [1696, 1697, 1826], [1696, 1826, 1825], [1697, 1698, 1826], [1698, 1827, 1826], [1698, 1699, 1828], [1698, 1828, 1827], [1699, 1700, 1828], [1700, 1829, 1828], [1700, 1701, 1830], [1700, 1830, 1829], [1701, 1702, 1830], [1702, 1831, 1830], [1702, 1703, 1832], [1702, 1832, 1831], [1703, 1704, 1832], [1704, 1833, 1832], [1704, 1705, 1834], [1704, 1834, 1833], [1705, 1706, 1834], [1706, 1835, 1834], [1706, 1707, 1836], [1706, 1836, 1835], [1707, 1708, 1836], [1708, 1837, 1836], [1708, 1709, 1838], [1708, 1838, 1837], [1709, 1710, 1838], [1710, 1839, 1838], [1710, 1711, 1840], [1710, 1840, 1839], [1711, 1712, 1840], [1712, 1841, 1840], [1712, 1713, 1842], [1712, 1842, 1841], [1713, 1714, 1842], [1714, 1843, 1842], [1714, 1715, 1844], [1714, 1844, 1843], [1715, 1716, 1844], [1716, 1845, 1844], [1716, 1717, 1846], [1716, 1846, 1845], [1717, 1718, 1846], [1718, 1847, 1846], [1718, 1719, 1848], [1718, 1848, 1847], [1719, 1720, 1848], [1720, 1849, 1848], [1720, 1721, 1850], [1720, 1850, 1849], [1721, 1722, 1850], [1722, 1851, 1850], [1722, 1723, 1852], [1722, 1852, 1851], [1723, 1724, 1852], [1724, 1853, 1852], [1724, 1725, 1854], [1724, 1854, 1853], [1725, 1726, 1854], [1726, 1855, 1854], [1726, 1727, 1856], [1726, 1856, 1855], [1727, 1728, 1856], [1728, 1857, 1856], [1728, 1729, 1858], [1728, 1858, 1857], [1729, 1730, 1858], [1730, 1859, 1858], [1730, 1731, 1860], [1730, 1860, 1859], [1731, 1732, 1860], [1732, 1861, 1860], [1732, 1733, 1862], [1732, 1862, 1861], [1733, 1734, 1862], [1734, 1863, 1862], [1734, 1735, 1864], [1734, 1864, 1863], [1735, 1736, 1864], [1736, 1865, 1864], [1736, 1737, 1866], [1736, 1866, 1865], [1737, 1738, 1866], [1738, 1867, 1866], [1738, 1739, 1868], [1738, 1868, 1867], [1739, 1740, 1868], [1740, 1869, 1868], [1740, 1741, 1870], [1740, 1870, 1869], [1741, 1742, 1870], [1742, 1871, 1870], [1742, 1743, 1872], [1742, 1872, 1871], [1743, 1744, 1872], [1744, 1873, 1872], [1744, 1745, 1874], [1744, 1874, 1873], [1745, 1746, 1874], [1746, 1875, 1874], [1746, 1747, 1876], [1746, 1876, 1875], [1747, 1748, 1876], [1748, 1877, 1876], [1748, 1749, 1878], [1748, 1878, 1877], [1749, 1750, 1878], [1750, 1879, 1878], [1750, 1751, 1880], [1750, 1880, 1879], [1751, 1752, 1880], [1752, 1881, 1880], [1752, 1753, 1882], [1752, 1882, 1881], [1753, 1754, 1882], [1754, 1883, 1882], [1754, 1755, 1884], [1754, 1884, 1883], [1755, 1756, 1884], [1756, 1885, 1884], [1756, 1757, 1886], [1756, 1886, 1885], [1757, 1758, 1886], [1758, 1887, 1886], [1758, 1759, 1888], [1758, 1888, 1887], [1759, 1760, 1888], [1760, 1889, 1888], [1760, 1761, 1890], [1760, 1890, 1889], [1761, 1762, 1890], [1762, 1891, 1890], [1762, 1763, 1892], [1762, 1892, 1891], [1763, 1764, 1892], [1764, 1893, 1892], [1764, 1765, 1894], [1764, 1894, 1893], [1765, 1766, 1894], [1766, 1895, 1894], [1766, 1767, 1896], [1766, 1896, 1895], [1767, 1768, 1896], [1768, 1897, 1896], [1768, 1769, 1898], [1768, 1898, 1897], [1769, 1770, 1898], [1770, 1899, 1898], [1770, 1771, 1900], [1770, 1900, 1899], [1771, 1772, 1900], [1772, 1901, 1900], [1772, 1773, 1902], [1772, 1902, 1901], [1773, 1774, 1902], [1774, 1903, 1902], [1774, 1775, 1904], [1774, 1904, 1903], [1775, 1776, 1904], [1776, 1905, 1904], [1776, 1777, 1906], [1776, 1906, 1905], [1777, 1778, 1906], [1778, 1907, 1906], [1778, 1779, 1908], [1778, 1908, 1907], [1779, 1780, 1908], [1780, 1909, 1908], [1780, 1781, 1910], [1780, 1910, 1909], [1781, 1782, 1910], [1782, 1911, 1910], [1782, 1783, 1912], [1782, 1912, 1911], [1783, 1784, 1912], [1784, 1913, 1912], [1784, 1785, 1914], [1784, 1914, 1913], [1785, 1786, 1914], [1786, 1915, 1914], [1786, 1787, 1916], [1786, 1916, 1915], [1787, 1788, 1916], [1788, 1917, 1916], [1788, 1789, 1918], [1788, 1918, 1917], [1789, 1790, 1918], [1790, 1919, 1918], [1790, 1791, 1920], [1790, 1920, 1919], [1791, 1792, 1920], [1792, 1921, 1920], [1792, 1793, 1922], [1792, 1922, 1921], [1793, 1794, 1922], [1794, 1923, 1922], [1794, 1795, 1924], [1794, 1924, 1923], [1795, 1796, 1924], [1796, 1925, 1924], [1796, 1797, 1926], [1796, 1926, 1925], [1797, 1798, 1926], [1798, 1927, 1926], [1798, 1799, 1928], [1798, 1928, 1927], [1799, 1800, 1928], [1800, 1929, 1928], [1800, 1801, 1930], [1800, 1930, 1929], [1801, 1802, 1930], [1802, 1931, 1930], [1802, 1803, 1932], [1802, 1932, 1931], [1803, 1804, 1932], [1804, 1933, 1932], [1804, 1805, 1934], [1804, 1934, 1933], [1806, 1807, 1936], [1806, 1936, 1935], [1807, 1808, 1936], [1808, 1937, 1936], [1808, 1809, 1938], [1808, 1938, 1937], [1809, 1810, 1938], [1810, 1939, 1938], [1810, 1811, 1940], [1810, 1940, 1939], [1811, 1812, 1940], [1812, 1941, 1940], [1812, 1813, 1942], [1812, 1942, 1941], [1813, 1814, 1942], [1814, 1943, 1942], [1814, 1815, 1944], [1814, 1944, 1943], [1815, 1816, 1944], [1816, 1945, 1944], [1816, 1817, 1946], [1816, 1946, 1945], [1817, 1818, 1946], [1818, 1947, 1946], [1818, 1819, 1948], [1818, 1948, 1947], [1819, 1820, 1948], [1820, 1949, 1948], [1820, 1821, 1950], [1820, 1950, 1949], [1821, 1822, 1950], [1822, 1951, 1950], [1822, 1823, 1952], [1822, 1952, 1951], [1823, 1824, 1952], [1824, 1953, 1952], [1824, 1825, 1954], [1824, 1954, 1953], [1825, 1826, 1954], [1826, 1955, 1954], [1826, 1827, 1956], [1826, 1956, 1955], [1827, 1828, 1956], [1828, 1957, 1956], [1828, 1829, 1958], [1828, 1958, 1957], [1829, 1830, 1958], [1830, 1959, 1958], [1830, 1831, 1960], [1830, 1960, 1959], [1831, 1832, 1960], [1832, 1961, 1960], [1832, 1833, 1962], [1832, 1962, 1961], [1833, 1834, 1962], [1834, 1963, 1962], [1834, 1835, 1964], [1834, 1964, 1963], [1835, 1836, 1964], [1836, 1965, 1964], [1836, 1837, 1966], [1836, 1966, 1965], [1837, 1838, 1966], [1838, 1967, 1966], [1838, 1839, 1968], [1838, 1968, 1967], [1839, 1840, 1968], [1840, 1969, 1968], [1840, 1841, 1970], [1840, 1970, 1969], [1841, 1842, 1970], [1842, 1971, 1970], [1842, 1843, 1972], [1842, 1972, 1971], [1843, 1844, 1972], [1844, 1973, 1972], [1844, 1845, 1974], [1844, 1974, 1973], [1845, 1846, 1974], [1846, 1975, 1974], [1846, 1847, 1976], [1846, 1976, 1975], [1847, 1848, 1976], [1848, 1977, 1976], [1848, 1849, 1978], [1848, 1978, 1977], [1849, 1850, 1978], [1850, 1979, 1978], [1850, 1851, 1980], [1850, 1980, 1979], [1851, 1852, 1980], [1852, 1981, 1980], [1852, 1853, 1982], [1852, 1982, 1981], [1853, 1854, 1982], [1854, 1983, 1982], [1854, 1855, 1984], [1854, 1984, 1983], [1855, 1856, 1984], [1856, 1985, 1984], [1856, 1857, 1986], [1856, 1986, 1985], [1857, 1858, 1986], [1858, 1987, 1986], [1858, 1859, 1988], [1858, 1988, 1987], [1859, 1860, 1988], [1860, 1989, 1988], [1860, 1861, 1990], [1860, 1990, 1989], [1861, 1862, 1990], [1862, 1991, 1990], [1862, 1863, 1992], [1862, 1992, 1991], [1863, 1864, 1992], [1864, 1993, 1992], [1864, 1865, 1994], [1864, 1994, 1993], [1865, 1866, 1994], [1866, 1995, 1994], [1866, 1867, 1996], [1866, 1996, 1995], [1867, 1868, 1996], [1868, 1997, 1996], [1868, 1869, 1998], [1868, 1998, 1997], [1869, 1870, 1998], [1870, 1999, 1998], [1870, 1871, 2000], [1870, 2000, 1999], [1871, 1872, 2000], [1872, 2001, 2000], [1872, 1873, 2002], [1872, 2002, 2001], [1873, 1874, 2002], [1874, 2003, 2002], [1874, 1875, 2004], [1874, 2004, 2003], [1875, 1876, 2004], [1876, 2005, 2004], [1876, 1877, 2006], [1876, 2006, 2005], [1877, 1878, 2006], [1878, 2007, 2006], [1878, 1879, 2008], [1878, 2008, 2007], [1879, 1880, 2008], [1880, 2009, 2008], [1880, 1881, 2010], [1880, 2010, 2009], [1881, 1882, 2010], [1882, 2011, 2010], [1882, 1883, 2012], [1882, 2012, 2011], [1883, 1884, 2012], [1884, 2013, 2012], [1884, 1885, 2014], [1884, 2014, 2013], [1885, 1886, 2014], [1886, 2015, 2014], [1886, 1887, 2016], [1886, 2016, 2015], [1887, 1888, 2016], [1888, 2017, 2016], [1888, 1889, 2018], [1888, 2018, 2017], [1889, 1890, 2018], [1890, 2019, 2018], [1890, 1891, 2020], [1890, 2020, 2019], [1891, 1892, 2020], [1892, 2021, 2020], [1892, 1893, 2022], [1892, 2022, 2021], [1893, 1894, 2022], [1894, 2023, 2022], [1894, 1895, 2024], [1894, 2024, 2023], [1895, 1896, 2024], [1896, 2025, 2024], [1896, 1897, 2026], [1896, 2026, 2025], [1897, 1898, 2026], [1898, 2027, 2026], [1898, 1899, 2028], [1898, 2028, 2027], [1899, 1900, 2028], [1900, 2029, 2028], [1900, 1901, 2030], [1900, 2030, 2029], [1901, 1902, 2030], [1902, 2031, 2030], [1902, 1903, 2032], [1902, 2032, 2031], [1903, 1904, 2032], [1904, 2033, 2032], [1904, 1905, 2034], [1904, 2034, 2033], [1905, 1906, 2034], [1906, 2035, 2034], [1906, 1907, 2036], [1906, 2036, 2035], [1907, 1908, 2036], [1908, 2037, 2036], [1908, 1909, 2038], [1908, 2038, 2037], [1909, 1910, 2038], [1910, 2039, 2038], [1910, 1911, 2040], [1910, 2040, 2039], [1911, 1912, 2040], [1912, 2041, 2040], [1912, 1913, 2042], [1912, 2042, 2041], [1913, 1914, 2042], [1914, 2043, 2042], [1914, 1915, 2044], [1914, 2044, 2043], [1915, 1916, 2044], [1916, 2045, 2044], [1916, 1917, 2046], [1916, 2046, 2045], [1917, 1918, 2046], [1918, 2047, 2046], [1918, 1919, 2048], [1918, 2048, 2047], [1919, 1920, 2048], [1920, 2049, 2048], [1920, 1921, 2050], [1920, 2050, 2049], [1921, 1922, 2050], [1922, 2051, 2050], [1922, 1923, 2052], [1922, 2052, 2051], [1923, 1924, 2052], [1924, 2053, 2052], [1924, 1925, 2054], [1924, 2054, 2053], [1925, 1926, 2054], [1926, 2055, 2054], [1926, 1927, 2056], [1926, 2056, 2055], [1927, 1928, 2056], [1928, 2057, 2056], [1928, 1929, 2058], [1928, 2058, 2057], [1929, 1930, 2058], [1930, 2059, 2058], [1930, 1931, 2060], [1930, 2060, 2059], [1931, 1932, 2060], [1932, 2061, 2060], [1932, 1933, 2062], [1932, 2062, 2061], [1933, 1934, 2062], [1934, 2063, 2062], [1935, 1936, 2064], [1936, 2065, 2064], [1936, 1937, 2066], [1936, 2066, 2065], [1937, 1938, 2066], [1938, 2067, 2066], [1938, 1939, 2068], [1938, 2068, 2067], [1939, 1940, 2068], [1940, 2069, 2068], [1940, 1941, 2070], [1940, 2070, 2069], [1941, 1942, 2070], [1942, 2071, 2070], [1942, 1943, 2072], [1942, 2072, 2071], [1943, 1944, 2072], [1944, 2073, 2072], [1944, 1945, 2074], [1944, 2074, 2073], [1945, 1946, 2074], [1946, 2075, 2074], [1946, 1947, 2076], [1946, 2076, 2075], [1947, 1948, 2076], [1948, 2077, 2076], [1948, 1949, 2078], [1948, 2078, 2077], [1949, 1950, 2078], [1950, 2079, 2078], [1950, 1951, 2080], [1950, 2080, 2079], [1951, 1952, 2080], [1952, 2081, 2080], [1952, 1953, 2082], [1952, 2082, 2081], [1953, 1954, 2082], [1954, 2083, 2082], [1954, 1955, 2084], [1954, 2084, 2083], [1955, 1956, 2084], [1956, 2085, 2084], [1956, 1957, 2086], [1956, 2086, 2085], [1957, 1958, 2086], [1958, 2087, 2086], [1958, 1959, 2088], [1958, 2088, 2087], [1959, 1960, 2088], [1960, 2089, 2088], [1960, 1961, 2090], [1960, 2090, 2089], [1961, 1962, 2090], [1962, 2091, 2090], [1962, 1963, 2092], [1962, 2092, 2091], [1963, 1964, 2092], [1964, 2093, 2092], [1964, 1965, 2094], [1964, 2094, 2093], [1965, 1966, 2094], [1966, 2095, 2094], [1966, 1967, 2096], [1966, 2096, 2095], [1967, 1968, 2096], [1968, 2097, 2096], [1968, 1969, 2098], [1968, 2098, 2097], [1969, 1970, 2098], [1970, 2099, 2098], [1970, 1971, 2100], [1970, 2100, 2099], [1971, 1972, 2100], [1972, 2101, 2100], [1972, 1973, 2102], [1972, 2102, 2101], [1973, 1974, 2102], [1974, 2103, 2102], [1974, 1975, 2104], [1974, 2104, 2103], [1975, 1976, 2104], [1976, 2105, 2104], [1976, 1977, 2106], [1976, 2106, 2105], [1977, 1978, 2106], [1978, 2107, 2106], [1978, 1979, 2108], [1978, 2108, 2107], [1979, 1980, 2108], [1980, 2109, 2108], [1980, 1981, 2110], [1980, 2110, 2109], [1981, 1982, 2110], [1982, 2111, 2110], [1982, 1983, 2112], [1982, 2112, 2111], [1983, 1984, 2112], [1984, 2113, 2112], [1984, 1985, 2114], [1984, 2114, 2113], [1985, 1986, 2114], [1986, 2115, 2114], [1986, 1987, 2116], [1986, 2116, 2115], [1987, 1988, 2116], [1988, 2117, 2116], [1988, 1989, 2118], [1988, 2118, 2117], [1989, 1990, 2118], [1990, 2119, 2118], [1990, 1991, 2120], [1990, 2120, 2119], [1991, 1992, 2120], [1992, 2121, 2120], [1992, 1993, 2122], [1992, 2122, 2121], [1993, 1994, 2122], [1994, 2123, 2122], [1994, 1995, 2124], [1994, 2124, 2123], [1995, 1996, 2124], [1996, 2125, 2124], [1996, 1997, 2126], [1996, 2126, 2125], [1997, 1998, 2126], [1998, 2127, 2126], [1998, 1999, 2128], [1998, 2128, 2127], [1999, 2000, 2128], [2000, 2129, 2128], [2000, 2001, 2130], [2000, 2130, 2129], [2001, 2002, 2130], [2002, 2131, 2130], [2002, 2003, 2132], [2002, 2132, 2131], [2003, 2004, 2132], [2004, 2133, 2132], [2004, 2005, 2134], [2004, 2134, 2133], [2005, 2006, 2134], [2006, 2135, 2134], [2006, 2007, 2136], [2006, 2136, 2135], [2007, 2008, 2136], [2008, 2137, 2136], [2008, 2009, 2138], [2008, 2138, 2137], [2009, 2010, 2138], [2010, 2139, 2138], [2010, 2011, 2140], [2010, 2140, 2139], [2011, 2012, 2140], [2012, 2141, 2140], [2012, 2013, 2142], [2012, 2142, 2141], [2013, 2014, 2142], [2014, 2143, 2142], [2014, 2015, 2144], [2014, 2144, 2143], [2015, 2016, 2144], [2016, 2145, 2144], [2016, 2017, 2146], [2016, 2146, 2145], [2017, 2018, 2146], [2018, 2147, 2146], [2018, 2019, 2148], [2018, 2148, 2147], [2019, 2020, 2148], [2020, 2149, 2148], [2020, 2021, 2150], [2020, 2150, 2149], [2021, 2022, 2150], [2022, 2151, 2150], [2022, 2023, 2152], [2022, 2152, 2151], [2023, 2024, 2152], [2024, 2153, 2152], [2024, 2025, 2154], [2024, 2154, 2153], [2025, 2026, 2154], [2026, 2155, 2154], [2026, 2027, 2156], [2026, 2156, 2155], [2027, 2028, 2156], [2028, 2157, 2156], [2028, 2029, 2158], [2028, 2158, 2157], [2029, 2030, 2158], [2030, 2159, 2158], [2030, 2031, 2160], [2030, 2160, 2159], [2031, 2032, 2160], [2032, 2161, 2160], [2032, 2033, 2162], [2032, 2162, 2161], [2033, 2034, 2162], [2034, 2163, 2162], [2034, 2035, 2164], [2034, 2164, 2163], [2035, 2036, 2164], [2036, 2165, 2164], [2036, 2037, 2166], [2036, 2166, 2165], [2037, 2038, 2166], [2038, 2167, 2166], [2038, 2039, 2168], [2038, 2168, 2167], [2039, 2040, 2168], [2040, 2169, 2168], [2040, 2041, 2170], [2040, 2170, 2169], [2041, 2042, 2170], [2042, 2171, 2170], [2042, 2043, 2172], [2042, 2172, 2171], [2043, 2044, 2172], [2044, 2173, 2172], [2044, 2045, 2174], [2044, 2174, 2173], [2045, 2046, 2174], [2046, 2175, 2174], [2046, 2047, 2176], [2046, 2176, 2175], [2047, 2048, 2176], [2048, 2177, 2176], [2048, 2049, 2178], [2048, 2178, 2177], [2049, 2050, 2178], [2050, 2179, 2178], [2050, 2051, 2180], [2050, 2180, 2179], [2051, 2052, 2180], [2052, 2181, 2180], [2052, 2053, 2182], [2052, 2182, 2181], [2053, 2054, 2182], [2054, 2183, 2182], [2054, 2055, 2184], [2054, 2184, 2183], [2055, 2056, 2184], [2056, 2185, 2184], [2056, 2057, 2186], [2056, 2186, 2185], [2057, 2058, 2186], [2058, 2187, 2186], [2058, 2059, 2188], [2058, 2188, 2187], [2059, 2060, 2188], [2060, 2189, 2188], [2060, 2061, 2190], [2060, 2190, 2189], [2061, 2062, 2190], [2062, 2191, 2190], [2062, 2063, 2192], [2062, 2192, 2191], [2064, 2065, 2194], [2064, 2194, 2193], [2065, 2066, 2194], [2066, 2195, 2194], [2066, 2067, 2196], [2066, 2196, 2195], [2067, 2068, 2196], [2068, 2197, 2196], [2068, 2069, 2198], [2068, 2198, 2197], [2069, 2070, 2198], [2070, 2199, 2198], [2070, 2071, 2200], [2070, 2200, 2199], [2071, 2072, 2200], [2072, 2201, 2200], [2072, 2073, 2202], [2072, 2202, 2201], [2073, 2074, 2202], [2074, 2203, 2202], [2074, 2075, 2204], [2074, 2204, 2203], [2075, 2076, 2204], [2076, 2205, 2204], [2076, 2077, 2206], [2076, 2206, 2205], [2077, 2078, 2206], [2078, 2207, 2206], [2078, 2079, 2208], [2078, 2208, 2207], [2079, 2080, 2208], [2080, 2209, 2208], [2080, 2081, 2210], [2080, 2210, 2209], [2081, 2082, 2210], [2082, 2211, 2210], [2082, 2083, 2212], [2082, 2212, 2211], [2083, 2084, 2212], [2084, 2213, 2212], [2084, 2085, 2214], [2084, 2214, 2213], [2085, 2086, 2214], [2086, 2215, 2214], [2086, 2087, 2216], [2086, 2216, 2215], [2087, 2088, 2216], [2088, 2217, 2216], [2088, 2089, 2218], [2088, 2218, 2217], [2089, 2090, 2218], [2090, 2219, 2218], [2090, 2091, 2220], [2090, 2220, 2219], [2091, 2092, 2220], [2092, 2221, 2220], [2092, 2093, 2222], [2092, 2222, 2221], [2093, 2094, 2222], [2094, 2223, 2222], [2094, 2095, 2224], [2094, 2224, 2223], [2095, 2096, 2224], [2096, 2225, 2224], [2096, 2097, 2226], [2096, 2226, 2225], [2097, 2098, 2226], [2098, 2227, 2226], [2098, 2099, 2228], [2098, 2228, 2227], [2099, 2100, 2228], [2100, 2229, 2228], [2100, 2101, 2230], [2100, 2230, 2229], [2101, 2102, 2230], [2102, 2231, 2230], [2102, 2103, 2232], [2102, 2232, 2231], [2103, 2104, 2232], [2104, 2233, 2232], [2104, 2105, 2234], [2104, 2234, 2233], [2105, 2106, 2234], [2106, 2235, 2234], [2106, 2107, 2236], [2106, 2236, 2235], [2107, 2108, 2236], [2108, 2237, 2236], [2108, 2109, 2238], [2108, 2238, 2237], [2109, 2110, 2238], [2110, 2239, 2238], [2110, 2111, 2240], [2110, 2240, 2239], [2111, 2112, 2240], [2112, 2241, 2240], [2112, 2113, 2242], [2112, 2242, 2241], [2113, 2114, 2242], [2114, 2243, 2242], [2114, 2115, 2244], [2114, 2244, 2243], [2115, 2116, 2244], [2116, 2245, 2244], [2116, 2117, 2246], [2116, 2246, 2245], [2117, 2118, 2246], [2118, 2247, 2246], [2118, 2119, 2248], [2118, 2248, 2247], [2119, 2120, 2248], [2120, 2249, 2248], [2120, 2121, 2250], [2120, 2250, 2249], [2121, 2122, 2250], [2122, 2251, 2250], [2122, 2123, 2252], [2122, 2252, 2251], [2123, 2124, 2252], [2124, 2253, 2252], [2124, 2125, 2254], [2124, 2254, 2253], [2125, 2126, 2254], [2126, 2255, 2254], [2126, 2127, 2256], [2126, 2256, 2255], [2127, 2128, 2256], [2128, 2257, 2256], [2128, 2129, 2258], [2128, 2258, 2257], [2129, 2130, 2258], [2130, 2259, 2258], [2130, 2131, 2260], [2130, 2260, 2259], [2131, 2132, 2260], [2132, 2261, 2260], [2132, 2133, 2262], [2132, 2262, 2261], [2133, 2134, 2262], [2134, 2263, 2262], [2134, 2135, 2264], [2134, 2264, 2263], [2135, 2136, 2264], [2136, 2265, 2264], [2136, 2137, 2266], [2136, 2266, 2265], [2137, 2138, 2266], [2138, 2267, 2266], [2138, 2139, 2268], [2138, 2268, 2267], [2139, 2140, 2268], [2140, 2269, 2268], [2140, 2141, 2270], [2140, 2270, 2269], [2141, 2142, 2270], [2142, 2271, 2270], [2142, 2143, 2272], [2142, 2272, 2271], [2143, 2144, 2272], [2144, 2273, 2272], [2144, 2145, 2274], [2144, 2274, 2273], [2145, 2146, 2274], [2146, 2275, 2274], [2146, 2147, 2276], [2146, 2276, 2275], [2147, 2148, 2276], [2148, 2277, 2276], [2148, 2149, 2278], [2148, 2278, 2277], [2149, 2150, 2278], [2150, 2279, 2278], [2150, 2151, 2280], [2150, 2280, 2279], [2151, 2152, 2280], [2152, 2281, 2280], [2152, 2153, 2282], [2152, 2282, 2281], [2153, 2154, 2282], [2154, 2283, 2282], [2154, 2155, 2284], [2154, 2284, 2283], [2155, 2156, 2284], [2156, 2285, 2284], [2156, 2157, 2286], [2156, 2286, 2285], [2157, 2158, 2286], [2158, 2287, 2286], [2158, 2159, 2288], [2158, 2288, 2287], [2159, 2160, 2288], [2160, 2289, 2288], [2160, 2161, 2290], [2160, 2290, 2289], [2161, 2162, 2290], [2162, 2291, 2290], [2162, 2163, 2292], [2162, 2292, 2291], [2163, 2164, 2292], [2164, 2293, 2292], [2164, 2165, 2294], [2164, 2294, 2293], [2165, 2166, 2294], [2166, 2295, 2294], [2166, 2167, 2296], [2166, 2296, 2295], [2167, 2168, 2296], [2168, 2297, 2296], [2168, 2169, 2298], [2168, 2298, 2297], [2169, 2170, 2298], [2170, 2299, 2298], [2170, 2171, 2300], [2170, 2300, 2299], [2171, 2172, 2300], [2172, 2301, 2300], [2172, 2173, 2302], [2172, 2302, 2301], [2173, 2174, 2302], [2174, 2303, 2302], [2174, 2175, 2304], [2174, 2304, 2303], [2175, 2176, 2304], [2176, 2305, 2304], [2176, 2177, 2306], [2176, 2306, 2305], [2177, 2178, 2306], [2178, 2307, 2306], [2178, 2179, 2308], [2178, 2308, 2307], [2179, 2180, 2308], [2180, 2309, 2308], [2180, 2181, 2310], [2180, 2310, 2309], [2181, 2182, 2310], [2182, 2311, 2310], [2182, 2183, 2312], [2182, 2312, 2311], [2183, 2184, 2312], [2184, 2313, 2312], [2184, 2185, 2314], [2184, 2314, 2313], [2185, 2186, 2314], [2186, 2315, 2314], [2186, 2187, 2316], [2186, 2316, 2315], [2187, 2188, 2316], [2188, 2317, 2316], [2188, 2189, 2318], [2188, 2318, 2317], [2189, 2190, 2318], [2190, 2319, 2318], [2190, 2191, 2320], [2190, 2320, 2319], [2191, 2192, 2320], [2192, 2321, 2320], [2193, 2194, 2322], [2194, 2323, 2322], [2194, 2195, 2324], [2194, 2324, 2323], [2195, 2196, 2324], [2196, 2325, 2324], [2196, 2197, 2326], [2196, 2326, 2325], [2197, 2198, 2326], [2198, 2327, 2326], [2198, 2199, 2328], [2198, 2328, 2327], [2199, 2200, 2328], [2200, 2329, 2328], [2200, 2201, 2330], [2200, 2330, 2329], [2201, 2202, 2330], [2202, 2331, 2330], [2202, 2203, 2332], [2202, 2332, 2331], [2203, 2204, 2332], [2204, 2333, 2332], [2204, 2205, 2334], [2204, 2334, 2333], [2205, 2206, 2334], [2206, 2335, 2334], [2206, 2207, 2336], [2206, 2336, 2335], [2207, 2208, 2336], [2208, 2337, 2336], [2208, 2209, 2338], [2208, 2338, 2337], [2209, 2210, 2338], [2210, 2339, 2338], [2210, 2211, 2340], [2210, 2340, 2339], [2211, 2212, 2340], [2212, 2341, 2340], [2212, 2213, 2342], [2212, 2342, 2341], [2213, 2214, 2342], [2214, 2343, 2342], [2214, 2215, 2344], [2214, 2344, 2343], [2215, 2216, 2344], [2216, 2345, 2344], [2216, 2217, 2346], [2216, 2346, 2345], [2217, 2218, 2346], [2218, 2347, 2346], [2218, 2219, 2348], [2218, 2348, 2347], [2219, 2220, 2348], [2220, 2349, 2348], [2220, 2221, 2350], [2220, 2350, 2349], [2221, 2222, 2350], [2222, 2351, 2350], [2222, 2223, 2352], [2222, 2352, 2351], [2223, 2224, 2352], [2224, 2353, 2352], [2224, 2225, 2354], [2224, 2354, 2353], [2225, 2226, 2354], [2226, 2355, 2354], [2226, 2227, 2356], [2226, 2356, 2355], [2227, 2228, 2356], [2228, 2357, 2356], [2228, 2229, 2358], [2228, 2358, 2357], [2229, 2230, 2358], [2230, 2359, 2358], [2230, 2231, 2360], [2230, 2360, 2359], [2231, 2232, 2360], [2232, 2361, 2360], [2232, 2233, 2362], [2232, 2362, 2361], [2233, 2234, 2362], [2234, 2363, 2362], [2234, 2235, 2364], [2234, 2364, 2363], [2235, 2236, 2364], [2236, 2365, 2364], [2236, 2237, 2366], [2236, 2366, 2365], [2237, 2238, 2366], [2238, 2367, 2366], [2238, 2239, 2368], [2238, 2368, 2367], [2239, 2240, 2368], [2240, 2369, 2368], [2240, 2241, 2370], [2240, 2370, 2369], [2241, 2242, 2370], [2242, 2371, 2370], [2242, 2243, 2372], [2242, 2372, 2371], [2243, 2244, 2372], [2244, 2373, 2372], [2244, 2245, 2374], [2244, 2374, 2373], [2245, 2246, 2374], [2246, 2375, 2374], [2246, 2247, 2376], [2246, 2376, 2375], [2247, 2248, 2376], [2248, 2377, 2376], [2248, 2249, 2378], [2248, 2378, 2377], [2249, 2250, 2378], [2250, 2379, 2378], [2250, 2251, 2380], [2250, 2380, 2379], [2251, 2252, 2380], [2252, 2381, 2380], [2252, 2253, 2382], [2252, 2382, 2381], [2253, 2254, 2382], [2254, 2383, 2382], [2254, 2255, 2384], [2254, 2384, 2383], [2255, 2256, 2384], [2256, 2385, 2384], [2256, 2257, 2386], [2256, 2386, 2385], [2257, 2258, 2386], [2258, 2387, 2386], [2258, 2259, 2388], [2258, 2388, 2387], [2259, 2260, 2388], [2260, 2389, 2388], [2260, 2261, 2390], [2260, 2390, 2389], [2261, 2262, 2390], [2262, 2391, 2390], [2262, 2263, 2392], [2262, 2392, 2391], [2263, 2264, 2392], [2264, 2393, 2392], [2264, 2265, 2394], [2264, 2394, 2393], [2265, 2266, 2394], [2266, 2395, 2394], [2266, 2267, 2396], [2266, 2396, 2395], [2267, 2268, 2396], [2268, 2397, 2396], [2268, 2269, 2398], [2268, 2398, 2397], [2269, 2270, 2398], [2270, 2399, 2398], [2270, 2271, 2400], [2270, 2400, 2399], [2271, 2272, 2400], [2272, 2401, 2400], [2272, 2273, 2402], [2272, 2402, 2401], [2273, 2274, 2402], [2274, 2403, 2402], [2274, 2275, 2404], [2274, 2404, 2403], [2275, 2276, 2404], [2276, 2405, 2404], [2276, 2277, 2406], [2276, 2406, 2405], [2277, 2278, 2406], [2278, 2407, 2406], [2278, 2279, 2408], [2278, 2408, 2407], [2279, 2280, 2408], [2280, 2409, 2408], [2280, 2281, 2410], [2280, 2410, 2409], [2281, 2282, 2410], [2282, 2411, 2410], [2282, 2283, 2412], [2282, 2412, 2411], [2283, 2284, 2412], [2284, 2413, 2412], [2284, 2285, 2414], [2284, 2414, 2413], [2285, 2286, 2414], [2286, 2415, 2414], [2286, 2287, 2416], [2286, 2416, 2415], [2287, 2288, 2416], [2288, 2417, 2416], [2288, 2289, 2418], [2288, 2418, 2417], [2289, 2290, 2418], [2290, 2419, 2418], [2290, 2291, 2420], [2290, 2420, 2419], [2291, 2292, 2420], [2292, 2421, 2420], [2292, 2293, 2422], [2292, 2422, 2421], [2293, 2294, 2422], [2294, 2423, 2422], [2294, 2295, 2424], [2294, 2424, 2423], [2295, 2296, 2424], [2296, 2425, 2424], [2296, 2297, 2426], [2296, 2426, 2425], [2297, 2298, 2426], [2298, 2427, 2426], [2298, 2299, 2428], [2298, 2428, 2427], [2299, 2300, 2428], [2300, 2429, 2428], [2300, 2301, 2430], [2300, 2430, 2429], [2301, 2302, 2430], [2302, 2431, 2430], [2302, 2303, 2432], [2302, 2432, 2431], [2303, 2304, 2432], [2304, 2433, 2432], [2304, 2305, 2434], [2304, 2434, 2433], [2305, 2306, 2434], [2306, 2435, 2434], [2306, 2307, 2436], [2306, 2436, 2435], [2307, 2308, 2436], [2308, 2437, 2436], [2308, 2309, 2438], [2308, 2438, 2437], [2309, 2310, 2438], [2310, 2439, 2438], [2310, 2311, 2440], [2310, 2440, 2439], [2311, 2312, 2440], [2312, 2441, 2440], [2312, 2313, 2442], [2312, 2442, 2441], [2313, 2314, 2442], [2314, 2443, 2442], [2314, 2315, 2444], [2314, 2444, 2443], [2315, 2316, 2444], [2316, 2445, 2444], [2316, 2317, 2446], [2316, 2446, 2445], [2317, 2318, 2446], [2318, 2447, 2446], [2318, 2319, 2448], [2318, 2448, 2447], [2319, 2320, 2448], [2320, 2449, 2448], [2320, 2321, 2450], [2320, 2450, 2449], [2322, 2323, 2452], [2322, 2452, 2451], [2323, 2324, 2452], [2324, 2453, 2452], [2324, 2325, 2454], [2324, 2454, 2453], [2325, 2326, 2454], [2326, 2455, 2454], [2326, 2327, 2456], [2326, 2456, 2455], [2327, 2328, 2456], [2328, 2457, 2456], [2328, 2329, 2458], [2328, 2458, 2457], [2329, 2330, 2458], [2330, 2459, 2458], [2330, 2331, 2460], [2330, 2460, 2459], [2331, 2332, 2460], [2332, 2461, 2460], [2332, 2333, 2462], [2332, 2462, 2461], [2333, 2334, 2462], [2334, 2463, 2462], [2334, 2335, 2464], [2334, 2464, 2463], [2335, 2336, 2464], [2336, 2465, 2464], [2336, 2337, 2466], [2336, 2466, 2465], [2337, 2338, 2466], [2338, 2467, 2466], [2338, 2339, 2468], [2338, 2468, 2467], [2339, 2340, 2468], [2340, 2469, 2468], [2340, 2341, 2470], [2340, 2470, 2469], [2341, 2342, 2470], [2342, 2471, 2470], [2342, 2343, 2472], [2342, 2472, 2471], [2343, 2344, 2472], [2344, 2473, 2472], [2344, 2345, 2474], [2344, 2474, 2473], [2345, 2346, 2474], [2346, 2475, 2474], [2346, 2347, 2476], [2346, 2476, 2475], [2347, 2348, 2476], [2348, 2477, 2476], [2348, 2349, 2478], [2348, 2478, 2477], [2349, 2350, 2478], [2350, 2479, 2478], [2350, 2351, 2480], [2350, 2480, 2479], [2351, 2352, 2480], [2352, 2481, 2480], [2352, 2353, 2482], [2352, 2482, 2481], [2353, 2354, 2482], [2354, 2483, 2482], [2354, 2355, 2484], [2354, 2484, 2483], [2355, 2356, 2484], [2356, 2485, 2484], [2356, 2357, 2486], [2356, 2486, 2485], [2357, 2358, 2486], [2358, 2487, 2486], [2358, 2359, 2488], [2358, 2488, 2487], [2359, 2360, 2488], [2360, 2489, 2488], [2360, 2361, 2490], [2360, 2490, 2489], [2361, 2362, 2490], [2362, 2491, 2490], [2362, 2363, 2492], [2362, 2492, 2491], [2363, 2364, 2492], [2364, 2493, 2492], [2364, 2365, 2494], [2364, 2494, 2493], [2365, 2366, 2494], [2366, 2495, 2494], [2366, 2367, 2496], [2366, 2496, 2495], [2367, 2368, 2496], [2368, 2497, 2496], [2368, 2369, 2498], [2368, 2498, 2497], [2369, 2370, 2498], [2370, 2499, 2498], [2370, 2371, 2500], [2370, 2500, 2499], [2371, 2372, 2500], [2372, 2501, 2500], [2372, 2373, 2502], [2372, 2502, 2501], [2373, 2374, 2502], [2374, 2503, 2502], [2374, 2375, 2504], [2374, 2504, 2503], [2375, 2376, 2504], [2376, 2505, 2504], [2376, 2377, 2506], [2376, 2506, 2505], [2377, 2378, 2506], [2378, 2507, 2506], [2378, 2379, 2508], [2378, 2508, 2507], [2379, 2380, 2508], [2380, 2509, 2508], [2380, 2381, 2510], [2380, 2510, 2509], [2381, 2382, 2510], [2382, 2511, 2510], [2382, 2383, 2512], [2382, 2512, 2511], [2383, 2384, 2512], [2384, 2513, 2512], [2384, 2385, 2514], [2384, 2514, 2513], [2385, 2386, 2514], [2386, 2515, 2514], [2386, 2387, 2516], [2386, 2516, 2515], [2387, 2388, 2516], [2388, 2517, 2516], [2388, 2389, 2518], [2388, 2518, 2517], [2389, 2390, 2518], [2390, 2519, 2518], [2390, 2391, 2520], [2390, 2520, 2519], [2391, 2392, 2520], [2392, 2521, 2520], [2392, 2393, 2522], [2392, 2522, 2521], [2393, 2394, 2522], [2394, 2523, 2522], [2394, 2395, 2524], [2394, 2524, 2523], [2395, 2396, 2524], [2396, 2525, 2524], [2396, 2397, 2526], [2396, 2526, 2525], [2397, 2398, 2526], [2398, 2527, 2526], [2398, 2399, 2528], [2398, 2528, 2527], [2399, 2400, 2528], [2400, 2529, 2528], [2400, 2401, 2530], [2400, 2530, 2529], [2401, 2402, 2530], [2402, 2531, 2530], [2402, 2403, 2532], [2402, 2532, 2531], [2403, 2404, 2532], [2404, 2533, 2532], [2404, 2405, 2534], [2404, 2534, 2533], [2405, 2406, 2534], [2406, 2535, 2534], [2406, 2407, 2536], [2406, 2536, 2535], [2407, 2408, 2536], [2408, 2537, 2536], [2408, 2409, 2538], [2408, 2538, 2537], [2409, 2410, 2538], [2410, 2539, 2538], [2410, 2411, 2540], [2410, 2540, 2539], [2411, 2412, 2540], [2412, 2541, 2540], [2412, 2413, 2542], [2412, 2542, 2541], [2413, 2414, 2542], [2414, 2543, 2542], [2414, 2415, 2544], [2414, 2544, 2543], [2415, 2416, 2544], [2416, 2545, 2544], [2416, 2417, 2546], [2416, 2546, 2545], [2417, 2418, 2546], [2418, 2547, 2546], [2418, 2419, 2548], [2418, 2548, 2547], [2419, 2420, 2548], [2420, 2549, 2548], [2420, 2421, 2550], [2420, 2550, 2549], [2421, 2422, 2550], [2422, 2551, 2550], [2422, 2423, 2552], [2422, 2552, 2551], [2423, 2424, 2552], [2424, 2553, 2552], [2424, 2425, 2554], [2424, 2554, 2553], [2425, 2426, 2554], [2426, 2555, 2554], [2426, 2427, 2556], [2426, 2556, 2555], [2427, 2428, 2556], [2428, 2557, 2556], [2428, 2429, 2558], [2428, 2558, 2557], [2429, 2430, 2558], [2430, 2559, 2558], [2430, 2431, 2560], [2430, 2560, 2559], [2431, 2432, 2560], [2432, 2561, 2560], [2432, 2433, 2562], [2432, 2562, 2561], [2433, 2434, 2562], [2434, 2563, 2562], [2434, 2435, 2564], [2434, 2564, 2563], [2435, 2436, 2564], [2436, 2565, 2564], [2436, 2437, 2566], [2436, 2566, 2565], [2437, 2438, 2566], [2438, 2567, 2566], [2438, 2439, 2568], [2438, 2568, 2567], [2439, 2440, 2568], [2440, 2569, 2568], [2440, 2441, 2570], [2440, 2570, 2569], [2441, 2442, 2570], [2442, 2571, 2570], [2442, 2443, 2572], [2442, 2572, 2571], [2443, 2444, 2572], [2444, 2573, 2572], [2444, 2445, 2574], [2444, 2574, 2573], [2445, 2446, 2574], [2446, 2575, 2574], [2446, 2447, 2576], [2446, 2576, 2575], [2447, 2448, 2576], [2448, 2577, 2576], [2448, 2449, 2578], [2448, 2578, 2577], [2449, 2450, 2578], [2450, 2579, 2578], [2451, 2452, 2580], [2452, 2581, 2580], [2452, 2453, 2582], [2452, 2582, 2581], [2453, 2454, 2582], [2454, 2583, 2582], [2454, 2455, 2584], [2454, 2584, 2583], [2455, 2456, 2584], [2456, 2585, 2584], [2456, 2457, 2586], [2456, 2586, 2585], [2457, 2458, 2586], [2458, 2587, 2586], [2458, 2459, 2588], [2458, 2588, 2587], [2459, 2460, 2588], [2460, 2589, 2588], [2460, 2461, 2590], [2460, 2590, 2589], [2461, 2462, 2590], [2462, 2591, 2590], [2462, 2463, 2592], [2462, 2592, 2591], [2463, 2464, 2592], [2464, 2593, 2592], [2464, 2465, 2594], [2464, 2594, 2593], [2465, 2466, 2594], [2466, 2595, 2594], [2466, 2467, 2596], [2466, 2596, 2595], [2467, 2468, 2596], [2468, 2597, 2596], [2468, 2469, 2598], [2468, 2598, 2597], [2469, 2470, 2598], [2470, 2599, 2598], [2470, 2471, 2600], [2470, 2600, 2599], [2471, 2472, 2600], [2472, 2601, 2600], [2472, 2473, 2602], [2472, 2602, 2601], [2473, 2474, 2602], [2474, 2603, 2602], [2474, 2475, 2604], [2474, 2604, 2603], [2475, 2476, 2604], [2476, 2605, 2604], [2476, 2477, 2606], [2476, 2606, 2605], [2477, 2478, 2606], [2478, 2607, 2606], [2478, 2479, 2608], [2478, 2608, 2607], [2479, 2480, 2608], [2480, 2609, 2608], [2480, 2481, 2610], [2480, 2610, 2609], [2481, 2482, 2610], [2482, 2611, 2610], [2482, 2483, 2612], [2482, 2612, 2611], [2483, 2484, 2612], [2484, 2613, 2612], [2484, 2485, 2614], [2484, 2614, 2613], [2485, 2486, 2614], [2486, 2615, 2614], [2486, 2487, 2616], [2486, 2616, 2615], [2487, 2488, 2616], [2488, 2617, 2616], [2488, 2489, 2618], [2488, 2618, 2617], [2489, 2490, 2618], [2490, 2619, 2618], [2490, 2491, 2620], [2490, 2620, 2619], [2491, 2492, 2620], [2492, 2621, 2620], [2492, 2493, 2622], [2492, 2622, 2621], [2493, 2494, 2622], [2494, 2623, 2622], [2494, 2495, 2624], [2494, 2624, 2623], [2495, 2496, 2624], [2496, 2625, 2624], [2496, 2497, 2626], [2496, 2626, 2625], [2497, 2498, 2626], [2498, 2627, 2626], [2498, 2499, 2628], [2498, 2628, 2627], [2499, 2500, 2628], [2500, 2629, 2628], [2500, 2501, 2630], [2500, 2630, 2629], [2501, 2502, 2630], [2502, 2631, 2630], [2502, 2503, 2632], [2502, 2632, 2631], [2503, 2504, 2632], [2504, 2633, 2632], [2504, 2505, 2634], [2504, 2634, 2633], [2505, 2506, 2634], [2506, 2635, 2634], [2506, 2507, 2636], [2506, 2636, 2635], [2507, 2508, 2636], [2508, 2637, 2636], [2508, 2509, 2638], [2508, 2638, 2637], [2509, 2510, 2638], [2510, 2639, 2638], [2510, 2511, 2640], [2510, 2640, 2639], [2511, 2512, 2640], [2512, 2641, 2640], [2512, 2513, 2642], [2512, 2642, 2641], [2513, 2514, 2642], [2514, 2643, 2642], [2514, 2515, 2644], [2514, 2644, 2643], [2515, 2516, 2644], [2516, 2645, 2644], [2516, 2517, 2646], [2516, 2646, 2645], [2517, 2518, 2646], [2518, 2647, 2646], [2518, 2519, 2648], [2518, 2648, 2647], [2519, 2520, 2648], [2520, 2649, 2648], [2520, 2521, 2650], [2520, 2650, 2649], [2521, 2522, 2650], [2522, 2651, 2650], [2522, 2523, 2652], [2522, 2652, 2651], [2523, 2524, 2652], [2524, 2653, 2652], [2524, 2525, 2654], [2524, 2654, 2653], [2525, 2526, 2654], [2526, 2655, 2654], [2526, 2527, 2656], [2526, 2656, 2655], [2527, 2528, 2656], [2528, 2657, 2656], [2528, 2529, 2658], [2528, 2658, 2657], [2529, 2530, 2658], [2530, 2659, 2658], [2530, 2531, 2660], [2530, 2660, 2659], [2531, 2532, 2660], [2532, 2661, 2660], [2532, 2533, 2662], [2532, 2662, 2661], [2533, 2534, 2662], [2534, 2663, 2662], [2534, 2535, 2664], [2534, 2664, 2663], [2535, 2536, 2664], [2536, 2665, 2664], [2536, 2537, 2666], [2536, 2666, 2665], [2537, 2538, 2666], [2538, 2667, 2666], [2538, 2539, 2668], [2538, 2668, 2667], [2539, 2540, 2668], [2540, 2669, 2668], [2540, 2541, 2670], [2540, 2670, 2669], [2541, 2542, 2670], [2542, 2671, 2670], [2542, 2543, 2672], [2542, 2672, 2671], [2543, 2544, 2672], [2544, 2673, 2672], [2544, 2545, 2674], [2544, 2674, 2673], [2545, 2546, 2674], [2546, 2675, 2674], [2546, 2547, 2676], [2546, 2676, 2675], [2547, 2548, 2676], [2548, 2677, 2676], [2548, 2549, 2678], [2548, 2678, 2677], [2549, 2550, 2678], [2550, 2679, 2678], [2550, 2551, 2680], [2550, 2680, 2679], [2551, 2552, 2680], [2552, 2681, 2680], [2552, 2553, 2682], [2552, 2682, 2681], [2553, 2554, 2682], [2554, 2683, 2682], [2554, 2555, 2684], [2554, 2684, 2683], [2555, 2556, 2684], [2556, 2685, 2684], [2556, 2557, 2686], [2556, 2686, 2685], [2557, 2558, 2686], [2558, 2687, 2686], [2558, 2559, 2688], [2558, 2688, 2687], [2559, 2560, 2688], [2560, 2689, 2688], [2560, 2561, 2690], [2560, 2690, 2689], [2561, 2562, 2690], [2562, 2691, 2690], [2562, 2563, 2692], [2562, 2692, 2691], [2563, 2564, 2692], [2564, 2693, 2692], [2564, 2565, 2694], [2564, 2694, 2693], [2565, 2566, 2694], [2566, 2695, 2694], [2566, 2567, 2696], [2566, 2696, 2695], [2567, 2568, 2696], [2568, 2697, 2696], [2568, 2569, 2698], [2568, 2698, 2697], [2569, 2570, 2698], [2570, 2699, 2698], [2570, 2571, 2700], [2570, 2700, 2699], [2571, 2572, 2700], [2572, 2701, 2700], [2572, 2573, 2702], [2572, 2702, 2701], [2573, 2574, 2702], [2574, 2703, 2702], [2574, 2575, 2704], [2574, 2704, 2703], [2575, 2576, 2704], [2576, 2705, 2704], [2576, 2577, 2706], [2576, 2706, 2705], [2577, 2578, 2706], [2578, 2707, 2706], [2578, 2579, 2708], [2578, 2708, 2707], [2580, 2581, 2710], [2580, 2710, 2709], [2581, 2582, 2710], [2582, 2711, 2710], [2582, 2583, 2712], [2582, 2712, 2711], [2583, 2584, 2712], [2584, 2713, 2712], [2584, 2585, 2714], [2584, 2714, 2713], [2585, 2586, 2714], [2586, 2715, 2714], [2586, 2587, 2716], [2586, 2716, 2715], [2587, 2588, 2716], [2588, 2717, 2716], [2588, 2589, 2718], [2588, 2718, 2717], [2589, 2590, 2718], [2590, 2719, 2718], [2590, 2591, 2720], [2590, 2720, 2719], [2591, 2592, 2720], [2592, 2721, 2720], [2592, 2593, 2722], [2592, 2722, 2721], [2593, 2594, 2722], [2594, 2723, 2722], [2594, 2595, 2724], [2594, 2724, 2723], [2595, 2596, 2724], [2596, 2725, 2724], [2596, 2597, 2726], [2596, 2726, 2725], [2597, 2598, 2726], [2598, 2727, 2726], [2598, 2599, 2728], [2598, 2728, 2727], [2599, 2600, 2728], [2600, 2729, 2728], [2600, 2601, 2730], [2600, 2730, 2729], [2601, 2602, 2730], [2602, 2731, 2730], [2602, 2603, 2732], [2602, 2732, 2731], [2603, 2604, 2732], [2604, 2733, 2732], [2604, 2605, 2734], [2604, 2734, 2733], [2605, 2606, 2734], [2606, 2735, 2734], [2606, 2607, 2736], [2606, 2736, 2735], [2607, 2608, 2736], [2608, 2737, 2736], [2608, 2609, 2738], [2608, 2738, 2737], [2609, 2610, 2738], [2610, 2739, 2738], [2610, 2611, 2740], [2610, 2740, 2739], [2611, 2612, 2740], [2612, 2741, 2740], [2612, 2613, 2742], [2612, 2742, 2741], [2613, 2614, 2742], [2614, 2743, 2742], [2614, 2615, 2744], [2614, 2744, 2743], [2615, 2616, 2744], [2616, 2745, 2744], [2616, 2617, 2746], [2616, 2746, 2745], [2617, 2618, 2746], [2618, 2747, 2746], [2618, 2619, 2748], [2618, 2748, 2747], [2619, 2620, 2748], [2620, 2749, 2748], [2620, 2621, 2750], [2620, 2750, 2749], [2621, 2622, 2750], [2622, 2751, 2750], [2622, 2623, 2752], [2622, 2752, 2751], [2623, 2624, 2752], [2624, 2753, 2752], [2624, 2625, 2754], [2624, 2754, 2753], [2625, 2626, 2754], [2626, 2755, 2754], [2626, 2627, 2756], [2626, 2756, 2755], [2627, 2628, 2756], [2628, 2757, 2756], [2628, 2629, 2758], [2628, 2758, 2757], [2629, 2630, 2758], [2630, 2759, 2758], [2630, 2631, 2760], [2630, 2760, 2759], [2631, 2632, 2760], [2632, 2761, 2760], [2632, 2633, 2762], [2632, 2762, 2761], [2633, 2634, 2762], [2634, 2763, 2762], [2634, 2635, 2764], [2634, 2764, 2763], [2635, 2636, 2764], [2636, 2765, 2764], [2636, 2637, 2766], [2636, 2766, 2765], [2637, 2638, 2766], [2638, 2767, 2766], [2638, 2639, 2768], [2638, 2768, 2767], [2639, 2640, 2768], [2640, 2769, 2768], [2640, 2641, 2770], [2640, 2770, 2769], [2641, 2642, 2770], [2642, 2771, 2770], [2642, 2643, 2772], [2642, 2772, 2771], [2643, 2644, 2772], [2644, 2773, 2772], [2644, 2645, 2774], [2644, 2774, 2773], [2645, 2646, 2774], [2646, 2775, 2774], [2646, 2647, 2776], [2646, 2776, 2775], [2647, 2648, 2776], [2648, 2777, 2776], [2648, 2649, 2778], [2648, 2778, 2777], [2649, 2650, 2778], [2650, 2779, 2778], [2650, 2651, 2780], [2650, 2780, 2779], [2651, 2652, 2780], [2652, 2781, 2780], [2652, 2653, 2782], [2652, 2782, 2781], [2653, 2654, 2782], [2654, 2783, 2782], [2654, 2655, 2784], [2654, 2784, 2783], [2655, 2656, 2784], [2656, 2785, 2784], [2656, 2657, 2786], [2656, 2786, 2785], [2657, 2658, 2786], [2658, 2787, 2786], [2658, 2659, 2788], [2658, 2788, 2787], [2659, 2660, 2788], [2660, 2789, 2788], [2660, 2661, 2790], [2660, 2790, 2789], [2661, 2662, 2790], [2662, 2791, 2790], [2662, 2663, 2792], [2662, 2792, 2791], [2663, 2664, 2792], [2664, 2793, 2792], [2664, 2665, 2794], [2664, 2794, 2793], [2665, 2666, 2794], [2666, 2795, 2794], [2666, 2667, 2796], [2666, 2796, 2795], [2667, 2668, 2796], [2668, 2797, 2796], [2668, 2669, 2798], [2668, 2798, 2797], [2669, 2670, 2798], [2670, 2799, 2798], [2670, 2671, 2800], [2670, 2800, 2799], [2671, 2672, 2800], [2672, 2801, 2800], [2672, 2673, 2802], [2672, 2802, 2801], [2673, 2674, 2802], [2674, 2803, 2802], [2674, 2675, 2804], [2674, 2804, 2803], [2675, 2676, 2804], [2676, 2805, 2804], [2676, 2677, 2806], [2676, 2806, 2805], [2677, 2678, 2806], [2678, 2807, 2806], [2678, 2679, 2808], [2678, 2808, 2807], [2679, 2680, 2808], [2680, 2809, 2808], [2680, 2681, 2810], [2680, 2810, 2809], [2681, 2682, 2810], [2682, 2811, 2810], [2682, 2683, 2812], [2682, 2812, 2811], [2683, 2684, 2812], [2684, 2813, 2812], [2684, 2685, 2814], [2684, 2814, 2813], [2685, 2686, 2814], [2686, 2815, 2814], [2686, 2687, 2816], [2686, 2816, 2815], [2687, 2688, 2816], [2688, 2817, 2816], [2688, 2689, 2818], [2688, 2818, 2817], [2689, 2690, 2818], [2690, 2819, 2818], [2690, 2691, 2820], [2690, 2820, 2819], [2691, 2692, 2820], [2692, 2821, 2820], [2692, 2693, 2822], [2692, 2822, 2821], [2693, 2694, 2822], [2694, 2823, 2822], [2694, 2695, 2824], [2694, 2824, 2823], [2695, 2696, 2824], [2696, 2825, 2824], [2696, 2697, 2826], [2696, 2826, 2825], [2697, 2698, 2826], [2698, 2827, 2826], [2698, 2699, 2828], [2698, 2828, 2827], [2699, 2700, 2828], [2700, 2829, 2828], [2700, 2701, 2830], [2700, 2830, 2829], [2701, 2702, 2830], [2702, 2831, 2830], [2702, 2703, 2832], [2702, 2832, 2831], [2703, 2704, 2832], [2704, 2833, 2832], [2704, 2705, 2834], [2704, 2834, 2833], [2705, 2706, 2834], [2706, 2835, 2834], [2706, 2707, 2836], [2706, 2836, 2835], [2707, 2708, 2836], [2708, 2837, 2836], [2709, 2710, 2838], [2710, 2839, 2838], [2710, 2711, 2840], [2710, 2840, 2839], [2711, 2712, 2840], [2712, 2841, 2840], [2712, 2713, 2842], [2712, 2842, 2841], [2713, 2714, 2842], [2714, 2843, 2842], [2714, 2715, 2844], [2714, 2844, 2843], [2715, 2716, 2844], [2716, 2845, 2844], [2716, 2717, 2846], [2716, 2846, 2845], [2717, 2718, 2846], [2718, 2847, 2846], [2718, 2719, 2848], [2718, 2848, 2847], [2719, 2720, 2848], [2720, 2849, 2848], [2720, 2721, 2850], [2720, 2850, 2849], [2721, 2722, 2850], [2722, 2851, 2850], [2722, 2723, 2852], [2722, 2852, 2851], [2723, 2724, 2852], [2724, 2853, 2852], [2724, 2725, 2854], [2724, 2854, 2853], [2725, 2726, 2854], [2726, 2855, 2854], [2726, 2727, 2856], [2726, 2856, 2855], [2727, 2728, 2856], [2728, 2857, 2856], [2728, 2729, 2858], [2728, 2858, 2857], [2729, 2730, 2858], [2730, 2859, 2858], [2730, 2731, 2860], [2730, 2860, 2859], [2731, 2732, 2860], [2732, 2861, 2860], [2732, 2733, 2862], [2732, 2862, 2861], [2733, 2734, 2862], [2734, 2863, 2862], [2734, 2735, 2864], [2734, 2864, 2863], [2735, 2736, 2864], [2736, 2865, 2864], [2736, 2737, 2866], [2736, 2866, 2865], [2737, 2738, 2866], [2738, 2867, 2866], [2738, 2739, 2868], [2738, 2868, 2867], [2739, 2740, 2868], [2740, 2869, 2868], [2740, 2741, 2870], [2740, 2870, 2869], [2741, 2742, 2870], [2742, 2871, 2870], [2742, 2743, 2872], [2742, 2872, 2871], [2743, 2744, 2872], [2744, 2873, 2872], [2744, 2745, 2874], [2744, 2874, 2873], [2745, 2746, 2874], [2746, 2875, 2874], [2746, 2747, 2876], [2746, 2876, 2875], [2747, 2748, 2876], [2748, 2877, 2876], [2748, 2749, 2878], [2748, 2878, 2877], [2749, 2750, 2878], [2750, 2879, 2878], [2750, 2751, 2880], [2750, 2880, 2879], [2751, 2752, 2880], [2752, 2881, 2880], [2752, 2753, 2882], [2752, 2882, 2881], [2753, 2754, 2882], [2754, 2883, 2882], [2754, 2755, 2884], [2754, 2884, 2883], [2755, 2756, 2884], [2756, 2885, 2884], [2756, 2757, 2886], [2756, 2886, 2885], [2757, 2758, 2886], [2758, 2887, 2886], [2758, 2759, 2888], [2758, 2888, 2887], [2759, 2760, 2888], [2760, 2889, 2888], [2760, 2761, 2890], [2760, 2890, 2889], [2761, 2762, 2890], [2762, 2891, 2890], [2762, 2763, 2892], [2762, 2892, 2891], [2763, 2764, 2892], [2764, 2893, 2892], [2764, 2765, 2894], [2764, 2894, 2893], [2765, 2766, 2894], [2766, 2895, 2894], [2766, 2767, 2896], [2766, 2896, 2895], [2767, 2768, 2896], [2768, 2897, 2896], [2768, 2769, 2898], [2768, 2898, 2897], [2769, 2770, 2898], [2770, 2899, 2898], [2770, 2771, 2900], [2770, 2900, 2899], [2771, 2772, 2900], [2772, 2901, 2900], [2772, 2773, 2902], [2772, 2902, 2901], [2773, 2774, 2902], [2774, 2903, 2902], [2774, 2775, 2904], [2774, 2904, 2903], [2775, 2776, 2904], [2776, 2905, 2904], [2776, 2777, 2906], [2776, 2906, 2905], [2777, 2778, 2906], [2778, 2907, 2906], [2778, 2779, 2908], [2778, 2908, 2907], [2779, 2780, 2908], [2780, 2909, 2908], [2780, 2781, 2910], [2780, 2910, 2909], [2781, 2782, 2910], [2782, 2911, 2910], [2782, 2783, 2912], [2782, 2912, 2911], [2783, 2784, 2912], [2784, 2913, 2912], [2784, 2785, 2914], [2784, 2914, 2913], [2785, 2786, 2914], [2786, 2915, 2914], [2786, 2787, 2916], [2786, 2916, 2915], [2787, 2788, 2916], [2788, 2917, 2916], [2788, 2789, 2918], [2788, 2918, 2917], [2789, 2790, 2918], [2790, 2919, 2918], [2790, 2791, 2920], [2790, 2920, 2919], [2791, 2792, 2920], [2792, 2921, 2920], [2792, 2793, 2922], [2792, 2922, 2921], [2793, 2794, 2922], [2794, 2923, 2922], [2794, 2795, 2924], [2794, 2924, 2923], [2795, 2796, 2924], [2796, 2925, 2924], [2796, 2797, 2926], [2796, 2926, 2925], [2797, 2798, 2926], [2798, 2927, 2926], [2798, 2799, 2928], [2798, 2928, 2927], [2799, 2800, 2928], [2800, 2929, 2928], [2800, 2801, 2930], [2800, 2930, 2929], [2801, 2802, 2930], [2802, 2931, 2930], [2802, 2803, 2932], [2802, 2932, 2931], [2803, 2804, 2932], [2804, 2933, 2932], [2804, 2805, 2934], [2804, 2934, 2933], [2805, 2806, 2934], [2806, 2935, 2934], [2806, 2807, 2936], [2806, 2936, 2935], [2807, 2808, 2936], [2808, 2937, 2936], [2808, 2809, 2938], [2808, 2938, 2937], [2809, 2810, 2938], [2810, 2939, 2938], [2810, 2811, 2940], [2810, 2940, 2939], [2811, 2812, 2940], [2812, 2941, 2940], [2812, 2813, 2942], [2812, 2942, 2941], [2813, 2814, 2942], [2814, 2943, 2942], [2814, 2815, 2944], [2814, 2944, 2943], [2815, 2816, 2944], [2816, 2945, 2944], [2816, 2817, 2946], [2816, 2946, 2945], [2817, 2818, 2946], [2818, 2947, 2946], [2818, 2819, 2948], [2818, 2948, 2947], [2819, 2820, 2948], [2820, 2949, 2948], [2820, 2821, 2950], [2820, 2950, 2949], [2821, 2822, 2950], [2822, 2951, 2950], [2822, 2823, 2952], [2822, 2952, 2951], [2823, 2824, 2952], [2824, 2953, 2952], [2824, 2825, 2954], [2824, 2954, 2953], [2825, 2826, 2954], [2826, 2955, 2954], [2826, 2827, 2956], [2826, 2956, 2955], [2827, 2828, 2956], [2828, 2957, 2956], [2828, 2829, 2958], [2828, 2958, 2957], [2829, 2830, 2958], [2830, 2959, 2958], [2830, 2831, 2960], [2830, 2960, 2959], [2831, 2832, 2960], [2832, 2961, 2960], [2832, 2833, 2962], [2832, 2962, 2961], [2833, 2834, 2962], [2834, 2963, 2962], [2834, 2835, 2964], [2834, 2964, 2963], [2835, 2836, 2964], [2836, 2965, 2964], [2836, 2837, 2966], [2836, 2966, 2965], [2838, 2839, 2968], [2838, 2968, 2967], [2839, 2840, 2968], [2840, 2969, 2968], [2840, 2841, 2970], [2840, 2970, 2969], [2841, 2842, 2970], [2842, 2971, 2970], [2842, 2843, 2972], [2842, 2972, 2971], [2843, 2844, 2972], [2844, 2973, 2972], [2844, 2845, 2974], [2844, 2974, 2973], [2845, 2846, 2974], [2846, 2975, 2974], [2846, 2847, 2976], [2846, 2976, 2975], [2847, 2848, 2976], [2848, 2977, 2976], [2848, 2849, 2978], [2848, 2978, 2977], [2849, 2850, 2978], [2850, 2979, 2978], [2850, 2851, 2980], [2850, 2980, 2979], [2851, 2852, 2980], [2852, 2981, 2980], [2852, 2853, 2982], [2852, 2982, 2981], [2853, 2854, 2982], [2854, 2983, 2982], [2854, 2855, 2984], [2854, 2984, 2983], [2855, 2856, 2984], [2856, 2985, 2984], [2856, 2857, 2986], [2856, 2986, 2985], [2857, 2858, 2986], [2858, 2987, 2986], [2858, 2859, 2988], [2858, 2988, 2987], [2859, 2860, 2988], [2860, 2989, 2988], [2860, 2861, 2990], [2860, 2990, 2989], [2861, 2862, 2990], [2862, 2991, 2990], [2862, 2863, 2992], [2862, 2992, 2991], [2863, 2864, 2992], [2864, 2993, 2992], [2864, 2865, 2994], [2864, 2994, 2993], [2865, 2866, 2994], [2866, 2995, 2994], [2866, 2867, 2996], [2866, 2996, 2995], [2867, 2868, 2996], [2868, 2997, 2996], [2868, 2869, 2998], [2868, 2998, 2997], [2869, 2870, 2998], [2870, 2999, 2998], [2870, 2871, 3000], [2870, 3000, 2999], [2871, 2872, 3000], [2872, 3001, 3000], [2872, 2873, 3002], [2872, 3002, 3001], [2873, 2874, 3002], [2874, 3003, 3002], [2874, 2875, 3004], [2874, 3004, 3003], [2875, 2876, 3004], [2876, 3005, 3004], [2876, 2877, 3006], [2876, 3006, 3005], [2877, 2878, 3006], [2878, 3007, 3006], [2878, 2879, 3008], [2878, 3008, 3007], [2879, 2880, 3008], [2880, 3009, 3008], [2880, 2881, 3010], [2880, 3010, 3009], [2881, 2882, 3010], [2882, 3011, 3010], [2882, 2883, 3012], [2882, 3012, 3011], [2883, 2884, 3012], [2884, 3013, 3012], [2884, 2885, 3014], [2884, 3014, 3013], [2885, 2886, 3014], [2886, 3015, 3014], [2886, 2887, 3016], [2886, 3016, 3015], [2887, 2888, 3016], [2888, 3017, 3016], [2888, 2889, 3018], [2888, 3018, 3017], [2889, 2890, 3018], [2890, 3019, 3018], [2890, 2891, 3020], [2890, 3020, 3019], [2891, 2892, 3020], [2892, 3021, 3020], [2892, 2893, 3022], [2892, 3022, 3021], [2893, 2894, 3022], [2894, 3023, 3022], [2894, 2895, 3024], [2894, 3024, 3023], [2895, 2896, 3024], [2896, 3025, 3024], [2896, 2897, 3026], [2896, 3026, 3025], [2897, 2898, 3026], [2898, 3027, 3026], [2898, 2899, 3028], [2898, 3028, 3027], [2899, 2900, 3028], [2900, 3029, 3028], [2900, 2901, 3030], [2900, 3030, 3029], [2901, 2902, 3030], [2902, 3031, 3030], [2902, 2903, 3032], [2902, 3032, 3031], [2903, 2904, 3032], [2904, 3033, 3032], [2904, 2905, 3034], [2904, 3034, 3033], [2905, 2906, 3034], [2906, 3035, 3034], [2906, 2907, 3036], [2906, 3036, 3035], [2907, 2908, 3036], [2908, 3037, 3036], [2908, 2909, 3038], [2908, 3038, 3037], [2909, 2910, 3038], [2910, 3039, 3038], [2910, 2911, 3040], [2910, 3040, 3039], [2911, 2912, 3040], [2912, 3041, 3040], [2912, 2913, 3042], [2912, 3042, 3041], [2913, 2914, 3042], [2914, 3043, 3042], [2914, 2915, 3044], [2914, 3044, 3043], [2915, 2916, 3044], [2916, 3045, 3044], [2916, 2917, 3046], [2916, 3046, 3045], [2917, 2918, 3046], [2918, 3047, 3046], [2918, 2919, 3048], [2918, 3048, 3047], [2919, 2920, 3048], [2920, 3049, 3048], [2920, 2921, 3050], [2920, 3050, 3049], [2921, 2922, 3050], [2922, 3051, 3050], [2922, 2923, 3052], [2922, 3052, 3051], [2923, 2924, 3052], [2924, 3053, 3052], [2924, 2925, 3054], [2924, 3054, 3053], [2925, 2926, 3054], [2926, 3055, 3054], [2926, 2927, 3056], [2926, 3056, 3055], [2927, 2928, 3056], [2928, 3057, 3056], [2928, 2929, 3058], [2928, 3058, 3057], [2929, 2930, 3058], [2930, 3059, 3058], [2930, 2931, 3060], [2930, 3060, 3059], [2931, 2932, 3060], [2932, 3061, 3060], [2932, 2933, 3062], [2932, 3062, 3061], [2933, 2934, 3062], [2934, 3063, 3062], [2934, 2935, 3064], [2934, 3064, 3063], [2935, 2936, 3064], [2936, 3065, 3064], [2936, 2937, 3066], [2936, 3066, 3065], [2937, 2938, 3066], [2938, 3067, 3066], [2938, 2939, 3068], [2938, 3068, 3067], [2939, 2940, 3068], [2940, 3069, 3068], [2940, 2941, 3070], [2940, 3070, 3069], [2941, 2942, 3070], [2942, 3071, 3070], [2942, 2943, 3072], [2942, 3072, 3071], [2943, 2944, 3072], [2944, 3073, 3072], [2944, 2945, 3074], [2944, 3074, 3073], [2945, 2946, 3074], [2946, 3075, 3074], [2946, 2947, 3076], [2946, 3076, 3075], [2947, 2948, 3076], [2948, 3077, 3076], [2948, 2949, 3078], [2948, 3078, 3077], [2949, 2950, 3078], [2950, 3079, 3078], [2950, 2951, 3080], [2950, 3080, 3079], [2951, 2952, 3080], [2952, 3081, 3080], [2952, 2953, 3082], [2952, 3082, 3081], [2953, 2954, 3082], [2954, 3083, 3082], [2954, 2955, 3084], [2954, 3084, 3083], [2955, 2956, 3084], [2956, 3085, 3084], [2956, 2957, 3086], [2956, 3086, 3085], [2957, 2958, 3086], [2958, 3087, 3086], [2958, 2959, 3088], [2958, 3088, 3087], [2959, 2960, 3088], [2960, 3089, 3088], [2960, 2961, 3090], [2960, 3090, 3089], [2961, 2962, 3090], [2962, 3091, 3090], [2962, 2963, 3092], [2962, 3092, 3091], [2963, 2964, 3092], [2964, 3093, 3092], [2964, 2965, 3094], [2964, 3094, 3093], [2965, 2966, 3094], [2966, 3095, 3094], [2967, 2968, 3096], [2968, 3097, 3096], [2968, 2969, 3098], [2968, 3098, 3097], [2969, 2970, 3098], [2970, 3099, 3098], [2970, 2971, 3100], [2970, 3100, 3099], [2971, 2972, 3100], [2972, 3101, 3100], [2972, 2973, 3102], [2972, 3102, 3101], [2973, 2974, 3102], [2974, 3103, 3102], [2974, 2975, 3104], [2974, 3104, 3103], [2975, 2976, 3104], [2976, 3105, 3104], [2976, 2977, 3106], [2976, 3106, 3105], [2977, 2978, 3106], [2978, 3107, 3106], [2978, 2979, 3108], [2978, 3108, 3107], [2979, 2980, 3108], [2980, 3109, 3108], [2980, 2981, 3110], [2980, 3110, 3109], [2981, 2982, 3110], [2982, 3111, 3110], [2982, 2983, 3112], [2982, 3112, 3111], [2983, 2984, 3112], [2984, 3113, 3112], [2984, 2985, 3114], [2984, 3114, 3113], [2985, 2986, 3114], [2986, 3115, 3114], [2986, 2987, 3116], [2986, 3116, 3115], [2987, 2988, 3116], [2988, 3117, 3116], [2988, 2989, 3118], [2988, 3118, 3117], [2989, 2990, 3118], [2990, 3119, 3118], [2990, 2991, 3120], [2990, 3120, 3119], [2991, 2992, 3120], [2992, 3121, 3120], [2992, 2993, 3122], [2992, 3122, 3121], [2993, 2994, 3122], [2994, 3123, 3122], [2994, 2995, 3124], [2994, 3124, 3123], [2995, 2996, 3124], [2996, 3125, 3124], [2996, 2997, 3126], [2996, 3126, 3125], [2997, 2998, 3126], [2998, 3127, 3126], [2998, 2999, 3128], [2998, 3128, 3127], [2999, 3000, 3128], [3000, 3129, 3128], [3000, 3001, 3130], [3000, 3130, 3129], [3001, 3002, 3130], [3002, 3131, 3130], [3002, 3003, 3132], [3002, 3132, 3131], [3003, 3004, 3132], [3004, 3133, 3132], [3004, 3005, 3134], [3004, 3134, 3133], [3005, 3006, 3134], [3006, 3135, 3134], [3006, 3007, 3136], [3006, 3136, 3135], [3007, 3008, 3136], [3008, 3137, 3136], [3008, 3009, 3138], [3008, 3138, 3137], [3009, 3010, 3138], [3010, 3139, 3138], [3010, 3011, 3140], [3010, 3140, 3139], [3011, 3012, 3140], [3012, 3141, 3140], [3012, 3013, 3142], [3012, 3142, 3141], [3013, 3014, 3142], [3014, 3143, 3142], [3014, 3015, 3144], [3014, 3144, 3143], [3015, 3016, 3144], [3016, 3145, 3144], [3016, 3017, 3146], [3016, 3146, 3145], [3017, 3018, 3146], [3018, 3147, 3146], [3018, 3019, 3148], [3018, 3148, 3147], [3019, 3020, 3148], [3020, 3149, 3148], [3020, 3021, 3150], [3020, 3150, 3149], [3021, 3022, 3150], [3022, 3151, 3150], [3022, 3023, 3152], [3022, 3152, 3151], [3023, 3024, 3152], [3024, 3153, 3152], [3024, 3025, 3154], [3024, 3154, 3153], [3025, 3026, 3154], [3026, 3155, 3154], [3026, 3027, 3156], [3026, 3156, 3155], [3027, 3028, 3156], [3028, 3157, 3156], [3028, 3029, 3158], [3028, 3158, 3157], [3029, 3030, 3158], [3030, 3159, 3158], [3030, 3031, 3160], [3030, 3160, 3159], [3031, 3032, 3160], [3032, 3161, 3160], [3032, 3033, 3162], [3032, 3162, 3161], [3033, 3034, 3162], [3034, 3163, 3162], [3034, 3035, 3164], [3034, 3164, 3163], [3035, 3036, 3164], [3036, 3165, 3164], [3036, 3037, 3166], [3036, 3166, 3165], [3037, 3038, 3166], [3038, 3167, 3166], [3038, 3039, 3168], [3038, 3168, 3167], [3039, 3040, 3168], [3040, 3169, 3168], [3040, 3041, 3170], [3040, 3170, 3169], [3041, 3042, 3170], [3042, 3171, 3170], [3042, 3043, 3172], [3042, 3172, 3171], [3043, 3044, 3172], [3044, 3173, 3172], [3044, 3045, 3174], [3044, 3174, 3173], [3045, 3046, 3174], [3046, 3175, 3174], [3046, 3047, 3176], [3046, 3176, 3175], [3047, 3048, 3176], [3048, 3177, 3176], [3048, 3049, 3178], [3048, 3178, 3177], [3049, 3050, 3178], [3050, 3179, 3178], [3050, 3051, 3180], [3050, 3180, 3179], [3051, 3052, 3180], [3052, 3181, 3180], [3052, 3053, 3182], [3052, 3182, 3181], [3053, 3054, 3182], [3054, 3183, 3182], [3054, 3055, 3184], [3054, 3184, 3183], [3055, 3056, 3184], [3056, 3185, 3184], [3056, 3057, 3186], [3056, 3186, 3185], [3057, 3058, 3186], [3058, 3187, 3186], [3058, 3059, 3188], [3058, 3188, 3187], [3059, 3060, 3188], [3060, 3189, 3188], [3060, 3061, 3190], [3060, 3190, 3189], [3061, 3062, 3190], [3062, 3191, 3190], [3062, 3063, 3192], [3062, 3192, 3191], [3063, 3064, 3192], [3064, 3193, 3192], [3064, 3065, 3194], [3064, 3194, 3193], [3065, 3066, 3194], [3066, 3195, 3194], [3066, 3067, 3196], [3066, 3196, 3195], [3067, 3068, 3196], [3068, 3197, 3196], [3068, 3069, 3198], [3068, 3198, 3197], [3069, 3070, 3198], [3070, 3199, 3198], [3070, 3071, 3200], [3070, 3200, 3199], [3071, 3072, 3200], [3072, 3201, 3200], [3072, 3073, 3202], [3072, 3202, 3201], [3073, 3074, 3202], [3074, 3203, 3202], [3074, 3075, 3204], [3074, 3204, 3203], [3075, 3076, 3204], [3076, 3205, 3204], [3076, 3077, 3206], [3076, 3206, 3205], [3077, 3078, 3206], [3078, 3207, 3206], [3078, 3079, 3208], [3078, 3208, 3207], [3079, 3080, 3208], [3080, 3209, 3208], [3080, 3081, 3210], [3080, 3210, 3209], [3081, 3082, 3210], [3082, 3211, 3210], [3082, 3083, 3212], [3082, 3212, 3211], [3083, 3084, 3212], [3084, 3213, 3212], [3084, 3085, 3214], [3084, 3214, 3213], [3085, 3086, 3214], [3086, 3215, 3214], [3086, 3087, 3216], [3086, 3216, 3215], [3087, 3088, 3216], [3088, 3217, 3216], [3088, 3089, 3218], [3088, 3218, 3217], [3089, 3090, 3218], [3090, 3219, 3218], [3090, 3091, 3220], [3090, 3220, 3219], [3091, 3092, 3220], [3092, 3221, 3220], [3092, 3093, 3222], [3092, 3222, 3221], [3093, 3094, 3222], [3094, 3223, 3222], [3094, 3095, 3224], [3094, 3224, 3223], [3096, 3097, 3226], [3096, 3226, 3225], [3097, 3098, 3226], [3098, 3227, 3226], [3098, 3099, 3228], [3098, 3228, 3227], [3099, 3100, 3228], [3100, 3229, 3228], [3100, 3101, 3230], [3100, 3230, 3229], [3101, 3102, 3230], [3102, 3231, 3230], [3102, 3103, 3232], [3102, 3232, 3231], [3103, 3104, 3232], [3104, 3233, 3232], [3104, 3105, 3234], [3104, 3234, 3233], [3105, 3106, 3234], [3106, 3235, 3234], [3106, 3107, 3236], [3106, 3236, 3235], [3107, 3108, 3236], [3108, 3237, 3236], [3108, 3109, 3238], [3108, 3238, 3237], [3109, 3110, 3238], [3110, 3239, 3238], [3110, 3111, 3240], [3110, 3240, 3239], [3111, 3112, 3240], [3112, 3241, 3240], [3112, 3113, 3242], [3112, 3242, 3241], [3113, 3114, 3242], [3114, 3243, 3242], [3114, 3115, 3244], [3114, 3244, 3243], [3115, 3116, 3244], [3116, 3245, 3244], [3116, 3117, 3246], [3116, 3246, 3245], [3117, 3118, 3246], [3118, 3247, 3246], [3118, 3119, 3248], [3118, 3248, 3247], [3119, 3120, 3248], [3120, 3249, 3248], [3120, 3121, 3250], [3120, 3250, 3249], [3121, 3122, 3250], [3122, 3251, 3250], [3122, 3123, 3252], [3122, 3252, 3251], [3123, 3124, 3252], [3124, 3253, 3252], [3124, 3125, 3254], [3124, 3254, 3253], [3125, 3126, 3254], [3126, 3255, 3254], [3126, 3127, 3256], [3126, 3256, 3255], [3127, 3128, 3256], [3128, 3257, 3256], [3128, 3129, 3258], [3128, 3258, 3257], [3129, 3130, 3258], [3130, 3259, 3258], [3130, 3131, 3260], [3130, 3260, 3259], [3131, 3132, 3260], [3132, 3261, 3260], [3132, 3133, 3262], [3132, 3262, 3261], [3133, 3134, 3262], [3134, 3263, 3262], [3134, 3135, 3264], [3134, 3264, 3263], [3135, 3136, 3264], [3136, 3265, 3264], [3136, 3137, 3266], [3136, 3266, 3265], [3137, 3138, 3266], [3138, 3267, 3266], [3138, 3139, 3268], [3138, 3268, 3267], [3139, 3140, 3268], [3140, 3269, 3268], [3140, 3141, 3270], [3140, 3270, 3269], [3141, 3142, 3270], [3142, 3271, 3270], [3142, 3143, 3272], [3142, 3272, 3271], [3143, 3144, 3272], [3144, 3273, 3272], [3144, 3145, 3274], [3144, 3274, 3273], [3145, 3146, 3274], [3146, 3275, 3274], [3146, 3147, 3276], [3146, 3276, 3275], [3147, 3148, 3276], [3148, 3277, 3276], [3148, 3149, 3278], [3148, 3278, 3277], [3149, 3150, 3278], [3150, 3279, 3278], [3150, 3151, 3280], [3150, 3280, 3279], [3151, 3152, 3280], [3152, 3281, 3280], [3152, 3153, 3282], [3152, 3282, 3281], [3153, 3154, 3282], [3154, 3283, 3282], [3154, 3155, 3284], [3154, 3284, 3283], [3155, 3156, 3284], [3156, 3285, 3284], [3156, 3157, 3286], [3156, 3286, 3285], [3157, 3158, 3286], [3158, 3287, 3286], [3158, 3159, 3288], [3158, 3288, 3287], [3159, 3160, 3288], [3160, 3289, 3288], [3160, 3161, 3290], [3160, 3290, 3289], [3161, 3162, 3290], [3162, 3291, 3290], [3162, 3163, 3292], [3162, 3292, 3291], [3163, 3164, 3292], [3164, 3293, 3292], [3164, 3165, 3294], [3164, 3294, 3293], [3165, 3166, 3294], [3166, 3295, 3294], [3166, 3167, 3296], [3166, 3296, 3295], [3167, 3168, 3296], [3168, 3297, 3296], [3168, 3169, 3298], [3168, 3298, 3297], [3169, 3170, 3298], [3170, 3299, 3298], [3170, 3171, 3300], [3170, 3300, 3299], [3171, 3172, 3300], [3172, 3301, 3300], [3172, 3173, 3302], [3172, 3302, 3301], [3173, 3174, 3302], [3174, 3303, 3302], [3174, 3175, 3304], [3174, 3304, 3303], [3175, 3176, 3304], [3176, 3305, 3304], [3176, 3177, 3306], [3176, 3306, 3305], [3177, 3178, 3306], [3178, 3307, 3306], [3178, 3179, 3308], [3178, 3308, 3307], [3179, 3180, 3308], [3180, 3309, 3308], [3180, 3181, 3310], [3180, 3310, 3309], [3181, 3182, 3310], [3182, 3311, 3310], [3182, 3183, 3312], [3182, 3312, 3311], [3183, 3184, 3312], [3184, 3313, 3312], [3184, 3185, 3314], [3184, 3314, 3313], [3185, 3186, 3314], [3186, 3315, 3314], [3186, 3187, 3316], [3186, 3316, 3315], [3187, 3188, 3316], [3188, 3317, 3316], [3188, 3189, 3318], [3188, 3318, 3317], [3189, 3190, 3318], [3190, 3319, 3318], [3190, 3191, 3320], [3190, 3320, 3319], [3191, 3192, 3320], [3192, 3321, 3320], [3192, 3193, 3322], [3192, 3322, 3321], [3193, 3194, 3322], [3194, 3323, 3322], [3194, 3195, 3324], [3194, 3324, 3323], [3195, 3196, 3324], [3196, 3325, 3324], [3196, 3197, 3326], [3196, 3326, 3325], [3197, 3198, 3326], [3198, 3327, 3326], [3198, 3199, 3328], [3198, 3328, 3327], [3199, 3200, 3328], [3200, 3329, 3328], [3200, 3201, 3330], [3200, 3330, 3329], [3201, 3202, 3330], [3202, 3331, 3330], [3202, 3203, 3332], [3202, 3332, 3331], [3203, 3204, 3332], [3204, 3333, 3332], [3204, 3205, 3334], [3204, 3334, 3333], [3205, 3206, 3334], [3206, 3335, 3334], [3206, 3207, 3336], [3206, 3336, 3335], [3207, 3208, 3336], [3208, 3337, 3336], [3208, 3209, 3338], [3208, 3338, 3337], [3209, 3210, 3338], [3210, 3339, 3338], [3210, 3211, 3340], [3210, 3340, 3339], [3211, 3212, 3340], [3212, 3341, 3340], [3212, 3213, 3342], [3212, 3342, 3341], [3213, 3214, 3342], [3214, 3343, 3342], [3214, 3215, 3344], [3214, 3344, 3343], [3215, 3216, 3344], [3216, 3345, 3344], [3216, 3217, 3346], [3216, 3346, 3345], [3217, 3218, 3346], [3218, 3347, 3346], [3218, 3219, 3348], [3218, 3348, 3347], [3219, 3220, 3348], [3220, 3349, 3348], [3220, 3221, 3350], [3220, 3350, 3349], [3221, 3222, 3350], [3222, 3351, 3350], [3222, 3223, 3352], [3222, 3352, 3351], [3223, 3224, 3352], [3224, 3353, 3352], [3225, 3226, 3354], [3226, 3355, 3354], [3226, 3227, 3356], [3226, 3356, 3355], [3227, 3228, 3356], [3228, 3357, 3356], [3228, 3229, 3358], [3228, 3358, 3357], [3229, 3230, 3358], [3230, 3359, 3358], [3230, 3231, 3360], [3230, 3360, 3359], [3231, 3232, 3360], [3232, 3361, 3360], [3232, 3233, 3362], [3232, 3362, 3361], [3233, 3234, 3362], [3234, 3363, 3362], [3234, 3235, 3364], [3234, 3364, 3363], [3235, 3236, 3364], [3236, 3365, 3364], [3236, 3237, 3366], [3236, 3366, 3365], [3237, 3238, 3366], [3238, 3367, 3366], [3238, 3239, 3368], [3238, 3368, 3367], [3239, 3240, 3368], [3240, 3369, 3368], [3240, 3241, 3370], [3240, 3370, 3369], [3241, 3242, 3370], [3242, 3371, 3370], [3242, 3243, 3372], [3242, 3372, 3371], [3243, 3244, 3372], [3244, 3373, 3372], [3244, 3245, 3374], [3244, 3374, 3373], [3245, 3246, 3374], [3246, 3375, 3374], [3246, 3247, 3376], [3246, 3376, 3375], [3247, 3248, 3376], [3248, 3377, 3376], [3248, 3249, 3378], [3248, 3378, 3377], [3249, 3250, 3378], [3250, 3379, 3378], [3250, 3251, 3380], [3250, 3380, 3379], [3251, 3252, 3380], [3252, 3381, 3380], [3252, 3253, 3382], [3252, 3382, 3381], [3253, 3254, 3382], [3254, 3383, 3382], [3254, 3255, 3384], [3254, 3384, 3383], [3255, 3256, 3384], [3256, 3385, 3384], [3256, 3257, 3386], [3256, 3386, 3385], [3257, 3258, 3386], [3258, 3387, 3386], [3258, 3259, 3388], [3258, 3388, 3387], [3259, 3260, 3388], [3260, 3389, 3388], [3260, 3261, 3390], [3260, 3390, 3389], [3261, 3262, 3390], [3262, 3391, 3390], [3262, 3263, 3392], [3262, 3392, 3391], [3263, 3264, 3392], [3264, 3393, 3392], [3264, 3265, 3394], [3264, 3394, 3393], [3265, 3266, 3394], [3266, 3395, 3394], [3266, 3267, 3396], [3266, 3396, 3395], [3267, 3268, 3396], [3268, 3397, 3396], [3268, 3269, 3398], [3268, 3398, 3397], [3269, 3270, 3398], [3270, 3399, 3398], [3270, 3271, 3400], [3270, 3400, 3399], [3271, 3272, 3400], [3272, 3401, 3400], [3272, 3273, 3402], [3272, 3402, 3401], [3273, 3274, 3402], [3274, 3403, 3402], [3274, 3275, 3404], [3274, 3404, 3403], [3275, 3276, 3404], [3276, 3405, 3404], [3276, 3277, 3406], [3276, 3406, 3405], [3277, 3278, 3406], [3278, 3407, 3406], [3278, 3279, 3408], [3278, 3408, 3407], [3279, 3280, 3408], [3280, 3409, 3408], [3280, 3281, 3410], [3280, 3410, 3409], [3281, 3282, 3410], [3282, 3411, 3410], [3282, 3283, 3412], [3282, 3412, 3411], [3283, 3284, 3412], [3284, 3413, 3412], [3284, 3285, 3414], [3284, 3414, 3413], [3285, 3286, 3414], [3286, 3415, 3414], [3286, 3287, 3416], [3286, 3416, 3415], [3287, 3288, 3416], [3288, 3417, 3416], [3288, 3289, 3418], [3288, 3418, 3417], [3289, 3290, 3418], [3290, 3419, 3418], [3290, 3291, 3420], [3290, 3420, 3419], [3291, 3292, 3420], [3292, 3421, 3420], [3292, 3293, 3422], [3292, 3422, 3421], [3293, 3294, 3422], [3294, 3423, 3422], [3294, 3295, 3424], [3294, 3424, 3423], [3295, 3296, 3424], [3296, 3425, 3424], [3296, 3297, 3426], [3296, 3426, 3425], [3297, 3298, 3426], [3298, 3427, 3426], [3298, 3299, 3428], [3298, 3428, 3427], [3299, 3300, 3428], [3300, 3429, 3428], [3300, 3301, 3430], [3300, 3430, 3429], [3301, 3302, 3430], [3302, 3431, 3430], [3302, 3303, 3432], [3302, 3432, 3431], [3303, 3304, 3432], [3304, 3433, 3432], [3304, 3305, 3434], [3304, 3434, 3433], [3305, 3306, 3434], [3306, 3435, 3434], [3306, 3307, 3436], [3306, 3436, 3435], [3307, 3308, 3436], [3308, 3437, 3436], [3308, 3309, 3438], [3308, 3438, 3437], [3309, 3310, 3438], [3310, 3439, 3438], [3310, 3311, 3440], [3310, 3440, 3439], [3311, 3312, 3440], [3312, 3441, 3440], [3312, 3313, 3442], [3312, 3442, 3441], [3313, 3314, 3442], [3314, 3443, 3442], [3314, 3315, 3444], [3314, 3444, 3443], [3315, 3316, 3444], [3316, 3445, 3444], [3316, 3317, 3446], [3316, 3446, 3445], [3317, 3318, 3446], [3318, 3447, 3446], [3318, 3319, 3448], [3318, 3448, 3447], [3319, 3320, 3448], [3320, 3449, 3448], [3320, 3321, 3450], [3320, 3450, 3449], [3321, 3322, 3450], [3322, 3451, 3450], [3322, 3323, 3452], [3322, 3452, 3451], [3323, 3324, 3452], [3324, 3453, 3452], [3324, 3325, 3454], [3324, 3454, 3453], [3325, 3326, 3454], [3326, 3455, 3454], [3326, 3327, 3456], [3326, 3456, 3455], [3327, 3328, 3456], [3328, 3457, 3456], [3328, 3329, 3458], [3328, 3458, 3457], [3329, 3330, 3458], [3330, 3459, 3458], [3330, 3331, 3460], [3330, 3460, 3459], [3331, 3332, 3460], [3332, 3461, 3460], [3332, 3333, 3462], [3332, 3462, 3461], [3333, 3334, 3462], [3334, 3463, 3462], [3334, 3335, 3464], [3334, 3464, 3463], [3335, 3336, 3464], [3336, 3465, 3464], [3336, 3337, 3466], [3336, 3466, 3465], [3337, 3338, 3466], [3338, 3467, 3466], [3338, 3339, 3468], [3338, 3468, 3467], [3339, 3340, 3468], [3340, 3469, 3468], [3340, 3341, 3470], [3340, 3470, 3469], [3341, 3342, 3470], [3342, 3471, 3470], [3342, 3343, 3472], [3342, 3472, 3471], [3343, 3344, 3472], [3344, 3473, 3472], [3344, 3345, 3474], [3344, 3474, 3473], [3345, 3346, 3474], [3346, 3475, 3474], [3346, 3347, 3476], [3346, 3476, 3475], [3347, 3348, 3476], [3348, 3477, 3476], [3348, 3349, 3478], [3348, 3478, 3477], [3349, 3350, 3478], [3350, 3479, 3478], [3350, 3351, 3480], [3350, 3480, 3479], [3351, 3352, 3480], [3352, 3481, 3480], [3352, 3353, 3482], [3352, 3482, 3481], [3354, 3355, 3484], [3354, 3484, 3483], [3355, 3356, 3484], [3356, 3485, 3484], [3356, 3357, 3486], [3356, 3486, 3485], [3357, 3358, 3486], [3358, 3487, 3486], [3358, 3359, 3488], [3358, 3488, 3487], [3359, 3360, 3488], [3360, 3489, 3488], [3360, 3361, 3490], [3360, 3490, 3489], [3361, 3362, 3490], [3362, 3491, 3490], [3362, 3363, 3492], [3362, 3492, 3491], [3363, 3364, 3492], [3364, 3493, 3492], [3364, 3365, 3494], [3364, 3494, 3493], [3365, 3366, 3494], [3366, 3495, 3494], [3366, 3367, 3496], [3366, 3496, 3495], [3367, 3368, 3496], [3368, 3497, 3496], [3368, 3369, 3498], [3368, 3498, 3497], [3369, 3370, 3498], [3370, 3499, 3498], [3370, 3371, 3500], [3370, 3500, 3499], [3371, 3372, 3500], [3372, 3501, 3500], [3372, 3373, 3502], [3372, 3502, 3501], [3373, 3374, 3502], [3374, 3503, 3502], [3374, 3375, 3504], [3374, 3504, 3503], [3375, 3376, 3504], [3376, 3505, 3504], [3376, 3377, 3506], [3376, 3506, 3505], [3377, 3378, 3506], [3378, 3507, 3506], [3378, 3379, 3508], [3378, 3508, 3507], [3379, 3380, 3508], [3380, 3509, 3508], [3380, 3381, 3510], [3380, 3510, 3509], [3381, 3382, 3510], [3382, 3511, 3510], [3382, 3383, 3512], [3382, 3512, 3511], [3383, 3384, 3512], [3384, 3513, 3512], [3384, 3385, 3514], [3384, 3514, 3513], [3385, 3386, 3514], [3386, 3515, 3514], [3386, 3387, 3516], [3386, 3516, 3515], [3387, 3388, 3516], [3388, 3517, 3516], [3388, 3389, 3518], [3388, 3518, 3517], [3389, 3390, 3518], [3390, 3519, 3518], [3390, 3391, 3520], [3390, 3520, 3519], [3391, 3392, 3520], [3392, 3521, 3520], [3392, 3393, 3522], [3392, 3522, 3521], [3393, 3394, 3522], [3394, 3523, 3522], [3394, 3395, 3524], [3394, 3524, 3523], [3395, 3396, 3524], [3396, 3525, 3524], [3396, 3397, 3526], [3396, 3526, 3525], [3397, 3398, 3526], [3398, 3527, 3526], [3398, 3399, 3528], [3398, 3528, 3527], [3399, 3400, 3528], [3400, 3529, 3528], [3400, 3401, 3530], [3400, 3530, 3529], [3401, 3402, 3530], [3402, 3531, 3530], [3402, 3403, 3532], [3402, 3532, 3531], [3403, 3404, 3532], [3404, 3533, 3532], [3404, 3405, 3534], [3404, 3534, 3533], [3405, 3406, 3534], [3406, 3535, 3534], [3406, 3407, 3536], [3406, 3536, 3535], [3407, 3408, 3536], [3408, 3537, 3536], [3408, 3409, 3538], [3408, 3538, 3537], [3409, 3410, 3538], [3410, 3539, 3538], [3410, 3411, 3540], [3410, 3540, 3539], [3411, 3412, 3540], [3412, 3541, 3540], [3412, 3413, 3542], [3412, 3542, 3541], [3413, 3414, 3542], [3414, 3543, 3542], [3414, 3415, 3544], [3414, 3544, 3543], [3415, 3416, 3544], [3416, 3545, 3544], [3416, 3417, 3546], [3416, 3546, 3545], [3417, 3418, 3546], [3418, 3547, 3546], [3418, 3419, 3548], [3418, 3548, 3547], [3419, 3420, 3548], [3420, 3549, 3548], [3420, 3421, 3550], [3420, 3550, 3549], [3421, 3422, 3550], [3422, 3551, 3550], [3422, 3423, 3552], [3422, 3552, 3551], [3423, 3424, 3552], [3424, 3553, 3552], [3424, 3425, 3554], [3424, 3554, 3553], [3425, 3426, 3554], [3426, 3555, 3554], [3426, 3427, 3556], [3426, 3556, 3555], [3427, 3428, 3556], [3428, 3557, 3556], [3428, 3429, 3558], [3428, 3558, 3557], [3429, 3430, 3558], [3430, 3559, 3558], [3430, 3431, 3560], [3430, 3560, 3559], [3431, 3432, 3560], [3432, 3561, 3560], [3432, 3433, 3562], [3432, 3562, 3561], [3433, 3434, 3562], [3434, 3563, 3562], [3434, 3435, 3564], [3434, 3564, 3563], [3435, 3436, 3564], [3436, 3565, 3564], [3436, 3437, 3566], [3436, 3566, 3565], [3437, 3438, 3566], [3438, 3567, 3566], [3438, 3439, 3568], [3438, 3568, 3567], [3439, 3440, 3568], [3440, 3569, 3568], [3440, 3441, 3570], [3440, 3570, 3569], [3441, 3442, 3570], [3442, 3571, 3570], [3442, 3443, 3572], [3442, 3572, 3571], [3443, 3444, 3572], [3444, 3573, 3572], [3444, 3445, 3574], [3444, 3574, 3573], [3445, 3446, 3574], [3446, 3575, 3574], [3446, 3447, 3576], [3446, 3576, 3575], [3447, 3448, 3576], [3448, 3577, 3576], [3448, 3449, 3578], [3448, 3578, 3577], [3449, 3450, 3578], [3450, 3579, 3578], [3450, 3451, 3580], [3450, 3580, 3579], [3451, 3452, 3580], [3452, 3581, 3580], [3452, 3453, 3582], [3452, 3582, 3581], [3453, 3454, 3582], [3454, 3583, 3582], [3454, 3455, 3584], [3454, 3584, 3583], [3455, 3456, 3584], [3456, 3585, 3584], [3456, 3457, 3586], [3456, 3586, 3585], [3457, 3458, 3586], [3458, 3587, 3586], [3458, 3459, 3588], [3458, 3588, 3587], [3459, 3460, 3588], [3460, 3589, 3588], [3460, 3461, 3590], [3460, 3590, 3589], [3461, 3462, 3590], [3462, 3591, 3590], [3462, 3463, 3592], [3462, 3592, 3591], [3463, 3464, 3592], [3464, 3593, 3592], [3464, 3465, 3594], [3464, 3594, 3593], [3465, 3466, 3594], [3466, 3595, 3594], [3466, 3467, 3596], [3466, 3596, 3595], [3467, 3468, 3596], [3468, 3597, 3596], [3468, 3469, 3598], [3468, 3598, 3597], [3469, 3470, 3598], [3470, 3599, 3598], [3470, 3471, 3600], [3470, 3600, 3599], [3471, 3472, 3600], [3472, 3601, 3600], [3472, 3473, 3602], [3472, 3602, 3601], [3473, 3474, 3602], [3474, 3603, 3602], [3474, 3475, 3604], [3474, 3604, 3603], [3475, 3476, 3604], [3476, 3605, 3604], [3476, 3477, 3606], [3476, 3606, 3605], [3477, 3478, 3606], [3478, 3607, 3606], [3478, 3479, 3608], [3478, 3608, 3607], [3479, 3480, 3608], [3480, 3609, 3608], [3480, 3481, 3610], [3480, 3610, 3609], [3481, 3482, 3610], [3482, 3611, 3610], [3483, 3484, 3612], [3484, 3613, 3612], [3484, 3485, 3614], [3484, 3614, 3613], [3485, 3486, 3614], [3486, 3615, 3614], [3486, 3487, 3616], [3486, 3616, 3615], [3487, 3488, 3616], [3488, 3617, 3616], [3488, 3489, 3618], [3488, 3618, 3617], [3489, 3490, 3618], [3490, 3619, 3618], [3490, 3491, 3620], [3490, 3620, 3619], [3491, 3492, 3620], [3492, 3621, 3620], [3492, 3493, 3622], [3492, 3622, 3621], [3493, 3494, 3622], [3494, 3623, 3622], [3494, 3495, 3624], [3494, 3624, 3623], [3495, 3496, 3624], [3496, 3625, 3624], [3496, 3497, 3626], [3496, 3626, 3625], [3497, 3498, 3626], [3498, 3627, 3626], [3498, 3499, 3628], [3498, 3628, 3627], [3499, 3500, 3628], [3500, 3629, 3628], [3500, 3501, 3630], [3500, 3630, 3629], [3501, 3502, 3630], [3502, 3631, 3630], [3502, 3503, 3632], [3502, 3632, 3631], [3503, 3504, 3632], [3504, 3633, 3632], [3504, 3505, 3634], [3504, 3634, 3633], [3505, 3506, 3634], [3506, 3635, 3634], [3506, 3507, 3636], [3506, 3636, 3635], [3507, 3508, 3636], [3508, 3637, 3636], [3508, 3509, 3638], [3508, 3638, 3637], [3509, 3510, 3638], [3510, 3639, 3638], [3510, 3511, 3640], [3510, 3640, 3639], [3511, 3512, 3640], [3512, 3641, 3640], [3512, 3513, 3642], [3512, 3642, 3641], [3513, 3514, 3642], [3514, 3643, 3642], [3514, 3515, 3644], [3514, 3644, 3643], [3515, 3516, 3644], [3516, 3645, 3644], [3516, 3517, 3646], [3516, 3646, 3645], [3517, 3518, 3646], [3518, 3647, 3646], [3518, 3519, 3648], [3518, 3648, 3647], [3519, 3520, 3648], [3520, 3649, 3648], [3520, 3521, 3650], [3520, 3650, 3649], [3521, 3522, 3650], [3522, 3651, 3650], [3522, 3523, 3652], [3522, 3652, 3651], [3523, 3524, 3652], [3524, 3653, 3652], [3524, 3525, 3654], [3524, 3654, 3653], [3525, 3526, 3654], [3526, 3655, 3654], [3526, 3527, 3656], [3526, 3656, 3655], [3527, 3528, 3656], [3528, 3657, 3656], [3528, 3529, 3658], [3528, 3658, 3657], [3529, 3530, 3658], [3530, 3659, 3658], [3530, 3531, 3660], [3530, 3660, 3659], [3531, 3532, 3660], [3532, 3661, 3660], [3532, 3533, 3662], [3532, 3662, 3661], [3533, 3534, 3662], [3534, 3663, 3662], [3534, 3535, 3664], [3534, 3664, 3663], [3535, 3536, 3664], [3536, 3665, 3664], [3536, 3537, 3666], [3536, 3666, 3665], [3537, 3538, 3666], [3538, 3667, 3666], [3538, 3539, 3668], [3538, 3668, 3667], [3539, 3540, 3668], [3540, 3669, 3668], [3540, 3541, 3670], [3540, 3670, 3669], [3541, 3542, 3670], [3542, 3671, 3670], [3542, 3543, 3672], [3542, 3672, 3671], [3543, 3544, 3672], [3544, 3673, 3672], [3544, 3545, 3674], [3544, 3674, 3673], [3545, 3546, 3674], [3546, 3675, 3674], [3546, 3547, 3676], [3546, 3676, 3675], [3547, 3548, 3676], [3548, 3677, 3676], [3548, 3549, 3678], [3548, 3678, 3677], [3549, 3550, 3678], [3550, 3679, 3678], [3550, 3551, 3680], [3550, 3680, 3679], [3551, 3552, 3680], [3552, 3681, 3680], [3552, 3553, 3682], [3552, 3682, 3681], [3553, 3554, 3682], [3554, 3683, 3682], [3554, 3555, 3684], [3554, 3684, 3683], [3555, 3556, 3684], [3556, 3685, 3684], [3556, 3557, 3686], [3556, 3686, 3685], [3557, 3558, 3686], [3558, 3687, 3686], [3558, 3559, 3688], [3558, 3688, 3687], [3559, 3560, 3688], [3560, 3689, 3688], [3560, 3561, 3690], [3560, 3690, 3689], [3561, 3562, 3690], [3562, 3691, 3690], [3562, 3563, 3692], [3562, 3692, 3691], [3563, 3564, 3692], [3564, 3693, 3692], [3564, 3565, 3694], [3564, 3694, 3693], [3565, 3566, 3694], [3566, 3695, 3694], [3566, 3567, 3696], [3566, 3696, 3695], [3567, 3568, 3696], [3568, 3697, 3696], [3568, 3569, 3698], [3568, 3698, 3697], [3569, 3570, 3698], [3570, 3699, 3698], [3570, 3571, 3700], [3570, 3700, 3699], [3571, 3572, 3700], [3572, 3701, 3700], [3572, 3573, 3702], [3572, 3702, 3701], [3573, 3574, 3702], [3574, 3703, 3702], [3574, 3575, 3704], [3574, 3704, 3703], [3575, 3576, 3704], [3576, 3705, 3704], [3576, 3577, 3706], [3576, 3706, 3705], [3577, 3578, 3706], [3578, 3707, 3706], [3578, 3579, 3708], [3578, 3708, 3707], [3579, 3580, 3708], [3580, 3709, 3708], [3580, 3581, 3710], [3580, 3710, 3709], [3581, 3582, 3710], [3582, 3711, 3710], [3582, 3583, 3712], [3582, 3712, 3711], [3583, 3584, 3712], [3584, 3713, 3712], [3584, 3585, 3714], [3584, 3714, 3713], [3585, 3586, 3714], [3586, 3715, 3714], [3586, 3587, 3716], [3586, 3716, 3715], [3587, 3588, 3716], [3588, 3717, 3716], [3588, 3589, 3718], [3588, 3718, 3717], [3589, 3590, 3718], [3590, 3719, 3718], [3590, 3591, 3720], [3590, 3720, 3719], [3591, 3592, 3720], [3592, 3721, 3720], [3592, 3593, 3722], [3592, 3722, 3721], [3593, 3594, 3722], [3594, 3723, 3722], [3594, 3595, 3724], [3594, 3724, 3723], [3595, 3596, 3724], [3596, 3725, 3724], [3596, 3597, 3726], [3596, 3726, 3725], [3597, 3598, 3726], [3598, 3727, 3726], [3598, 3599, 3728], [3598, 3728, 3727], [3599, 3600, 3728], [3600, 3729, 3728], [3600, 3601, 3730], [3600, 3730, 3729], [3601, 3602, 3730], [3602, 3731, 3730], [3602, 3603, 3732], [3602, 3732, 3731], [3603, 3604, 3732], [3604, 3733, 3732], [3604, 3605, 3734], [3604, 3734, 3733], [3605, 3606, 3734], [3606, 3735, 3734], [3606, 3607, 3736], [3606, 3736, 3735], [3607, 3608, 3736], [3608, 3737, 3736], [3608, 3609, 3738], [3608, 3738, 3737], [3609, 3610, 3738], [3610, 3739, 3738], [3610, 3611, 3740], [3610, 3740, 3739], [3612, 3613, 3742], [3612, 3742, 3741], [3613, 3614, 3742], [3614, 3743, 3742], [3614, 3615, 3744], [3614, 3744, 3743], [3615, 3616, 3744], [3616, 3745, 3744], [3616, 3617, 3746], [3616, 3746, 3745], [3617, 3618, 3746], [3618, 3747, 3746], [3618, 3619, 3748], [3618, 3748, 3747], [3619, 3620, 3748], [3620, 3749, 3748], [3620, 3621, 3750], [3620, 3750, 3749], [3621, 3622, 3750], [3622, 3751, 3750], [3622, 3623, 3752], [3622, 3752, 3751], [3623, 3624, 3752], [3624, 3753, 3752], [3624, 3625, 3754], [3624, 3754, 3753], [3625, 3626, 3754], [3626, 3755, 3754], [3626, 3627, 3756], [3626, 3756, 3755], [3627, 3628, 3756], [3628, 3757, 3756], [3628, 3629, 3758], [3628, 3758, 3757], [3629, 3630, 3758], [3630, 3759, 3758], [3630, 3631, 3760], [3630, 3760, 3759], [3631, 3632, 3760], [3632, 3761, 3760], [3632, 3633, 3762], [3632, 3762, 3761], [3633, 3634, 3762], [3634, 3763, 3762], [3634, 3635, 3764], [3634, 3764, 3763], [3635, 3636, 3764], [3636, 3765, 3764], [3636, 3637, 3766], [3636, 3766, 3765], [3637, 3638, 3766], [3638, 3767, 3766], [3638, 3639, 3768], [3638, 3768, 3767], [3639, 3640, 3768], [3640, 3769, 3768], [3640, 3641, 3770], [3640, 3770, 3769], [3641, 3642, 3770], [3642, 3771, 3770], [3642, 3643, 3772], [3642, 3772, 3771], [3643, 3644, 3772], [3644, 3773, 3772], [3644, 3645, 3774], [3644, 3774, 3773], [3645, 3646, 3774], [3646, 3775, 3774], [3646, 3647, 3776], [3646, 3776, 3775], [3647, 3648, 3776], [3648, 3777, 3776], [3648, 3649, 3778], [3648, 3778, 3777], [3649, 3650, 3778], [3650, 3779, 3778], [3650, 3651, 3780], [3650, 3780, 3779], [3651, 3652, 3780], [3652, 3781, 3780], [3652, 3653, 3782], [3652, 3782, 3781], [3653, 3654, 3782], [3654, 3783, 3782], [3654, 3655, 3784], [3654, 3784, 3783], [3655, 3656, 3784], [3656, 3785, 3784], [3656, 3657, 3786], [3656, 3786, 3785], [3657, 3658, 3786], [3658, 3787, 3786], [3658, 3659, 3788], [3658, 3788, 3787], [3659, 3660, 3788], [3660, 3789, 3788], [3660, 3661, 3790], [3660, 3790, 3789], [3661, 3662, 3790], [3662, 3791, 3790], [3662, 3663, 3792], [3662, 3792, 3791], [3663, 3664, 3792], [3664, 3793, 3792], [3664, 3665, 3794], [3664, 3794, 3793], [3665, 3666, 3794], [3666, 3795, 3794], [3666, 3667, 3796], [3666, 3796, 3795], [3667, 3668, 3796], [3668, 3797, 3796], [3668, 3669, 3798], [3668, 3798, 3797], [3669, 3670, 3798], [3670, 3799, 3798], [3670, 3671, 3800], [3670, 3800, 3799], [3671, 3672, 3800], [3672, 3801, 3800], [3672, 3673, 3802], [3672, 3802, 3801], [3673, 3674, 3802], [3674, 3803, 3802], [3674, 3675, 3804], [3674, 3804, 3803], [3675, 3676, 3804], [3676, 3805, 3804], [3676, 3677, 3806], [3676, 3806, 3805], [3677, 3678, 3806], [3678, 3807, 3806], [3678, 3679, 3808], [3678, 3808, 3807], [3679, 3680, 3808], [3680, 3809, 3808], [3680, 3681, 3810], [3680, 3810, 3809], [3681, 3682, 3810], [3682, 3811, 3810], [3682, 3683, 3812], [3682, 3812, 3811], [3683, 3684, 3812], [3684, 3813, 3812], [3684, 3685, 3814], [3684, 3814, 3813], [3685, 3686, 3814], [3686, 3815, 3814], [3686, 3687, 3816], [3686, 3816, 3815], [3687, 3688, 3816], [3688, 3817, 3816], [3688, 3689, 3818], [3688, 3818, 3817], [3689, 3690, 3818], [3690, 3819, 3818], [3690, 3691, 3820], [3690, 3820, 3819], [3691, 3692, 3820], [3692, 3821, 3820], [3692, 3693, 3822], [3692, 3822, 3821], [3693, 3694, 3822], [3694, 3823, 3822], [3694, 3695, 3824], [3694, 3824, 3823], [3695, 3696, 3824], [3696, 3825, 3824], [3696, 3697, 3826], [3696, 3826, 3825], [3697, 3698, 3826], [3698, 3827, 3826], [3698, 3699, 3828], [3698, 3828, 3827], [3699, 3700, 3828], [3700, 3829, 3828], [3700, 3701, 3830], [3700, 3830, 3829], [3701, 3702, 3830], [3702, 3831, 3830], [3702, 3703, 3832], [3702, 3832, 3831], [3703, 3704, 3832], [3704, 3833, 3832], [3704, 3705, 3834], [3704, 3834, 3833], [3705, 3706, 3834], [3706, 3835, 3834], [3706, 3707, 3836], [3706, 3836, 3835], [3707, 3708, 3836], [3708, 3837, 3836], [3708, 3709, 3838], [3708, 3838, 3837], [3709, 3710, 3838], [3710, 3839, 3838], [3710, 3711, 3840], [3710, 3840, 3839], [3711, 3712, 3840], [3712, 3841, 3840], [3712, 3713, 3842], [3712, 3842, 3841], [3713, 3714, 3842], [3714, 3843, 3842], [3714, 3715, 3844], [3714, 3844, 3843], [3715, 3716, 3844], [3716, 3845, 3844], [3716, 3717, 3846], [3716, 3846, 3845], [3717, 3718, 3846], [3718, 3847, 3846], [3718, 3719, 3848], [3718, 3848, 3847], [3719, 3720, 3848], [3720, 3849, 3848], [3720, 3721, 3850], [3720, 3850, 3849], [3721, 3722, 3850], [3722, 3851, 3850], [3722, 3723, 3852], [3722, 3852, 3851], [3723, 3724, 3852], [3724, 3853, 3852], [3724, 3725, 3854], [3724, 3854, 3853], [3725, 3726, 3854], [3726, 3855, 3854], [3726, 3727, 3856], [3726, 3856, 3855], [3727, 3728, 3856], [3728, 3857, 3856], [3728, 3729, 3858], [3728, 3858, 3857], [3729, 3730, 3858], [3730, 3859, 3858], [3730, 3731, 3860], [3730, 3860, 3859], [3731, 3732, 3860], [3732, 3861, 3860], [3732, 3733, 3862], [3732, 3862, 3861], [3733, 3734, 3862], [3734, 3863, 3862], [3734, 3735, 3864], [3734, 3864, 3863], [3735, 3736, 3864], [3736, 3865, 3864], [3736, 3737, 3866], [3736, 3866, 3865], [3737, 3738, 3866], [3738, 3867, 3866], [3738, 3739, 3868], [3738, 3868, 3867], [3739, 3740, 3868], [3740, 3869, 3868], [3741, 3742, 3870], [3742, 3871, 3870], [3742, 3743, 3872], [3742, 3872, 3871], [3743, 3744, 3872], [3744, 3873, 3872], [3744, 3745, 3874], [3744, 3874, 3873], [3745, 3746, 3874], [3746, 3875, 3874], [3746, 3747, 3876], [3746, 3876, 3875], [3747, 3748, 3876], [3748, 3877, 3876], [3748, 3749, 3878], [3748, 3878, 3877], [3749, 3750, 3878], [3750, 3879, 3878], [3750, 3751, 3880], [3750, 3880, 3879], [3751, 3752, 3880], [3752, 3881, 3880], [3752, 3753, 3882], [3752, 3882, 3881], [3753, 3754, 3882], [3754, 3883, 3882], [3754, 3755, 3884], [3754, 3884, 3883], [3755, 3756, 3884], [3756, 3885, 3884], [3756, 3757, 3886], [3756, 3886, 3885], [3757, 3758, 3886], [3758, 3887, 3886], [3758, 3759, 3888], [3758, 3888, 3887], [3759, 3760, 3888], [3760, 3889, 3888], [3760, 3761, 3890], [3760, 3890, 3889], [3761, 3762, 3890], [3762, 3891, 3890], [3762, 3763, 3892], [3762, 3892, 3891], [3763, 3764, 3892], [3764, 3893, 3892], [3764, 3765, 3894], [3764, 3894, 3893], [3765, 3766, 3894], [3766, 3895, 3894], [3766, 3767, 3896], [3766, 3896, 3895], [3767, 3768, 3896], [3768, 3897, 3896], [3768, 3769, 3898], [3768, 3898, 3897], [3769, 3770, 3898], [3770, 3899, 3898], [3770, 3771, 3900], [3770, 3900, 3899], [3771, 3772, 3900], [3772, 3901, 3900], [3772, 3773, 3902], [3772, 3902, 3901], [3773, 3774, 3902], [3774, 3903, 3902], [3774, 3775, 3904], [3774, 3904, 3903], [3775, 3776, 3904], [3776, 3905, 3904], [3776, 3777, 3906], [3776, 3906, 3905], [3777, 3778, 3906], [3778, 3907, 3906], [3778, 3779, 3908], [3778, 3908, 3907], [3779, 3780, 3908], [3780, 3909, 3908], [3780, 3781, 3910], [3780, 3910, 3909], [3781, 3782, 3910], [3782, 3911, 3910], [3782, 3783, 3912], [3782, 3912, 3911], [3783, 3784, 3912], [3784, 3913, 3912], [3784, 3785, 3914], [3784, 3914, 3913], [3785, 3786, 3914], [3786, 3915, 3914], [3786, 3787, 3916], [3786, 3916, 3915], [3787, 3788, 3916], [3788, 3917, 3916], [3788, 3789, 3918], [3788, 3918, 3917], [3789, 3790, 3918], [3790, 3919, 3918], [3790, 3791, 3920], [3790, 3920, 3919], [3791, 3792, 3920], [3792, 3921, 3920], [3792, 3793, 3922], [3792, 3922, 3921], [3793, 3794, 3922], [3794, 3923, 3922], [3794, 3795, 3924], [3794, 3924, 3923], [3795, 3796, 3924], [3796, 3925, 3924], [3796, 3797, 3926], [3796, 3926, 3925], [3797, 3798, 3926], [3798, 3927, 3926], [3798, 3799, 3928], [3798, 3928, 3927], [3799, 3800, 3928], [3800, 3929, 3928], [3800, 3801, 3930], [3800, 3930, 3929], [3801, 3802, 3930], [3802, 3931, 3930], [3802, 3803, 3932], [3802, 3932, 3931], [3803, 3804, 3932], [3804, 3933, 3932], [3804, 3805, 3934], [3804, 3934, 3933], [3805, 3806, 3934], [3806, 3935, 3934], [3806, 3807, 3936], [3806, 3936, 3935], [3807, 3808, 3936], [3808, 3937, 3936], [3808, 3809, 3938], [3808, 3938, 3937], [3809, 3810, 3938], [3810, 3939, 3938], [3810, 3811, 3940], [3810, 3940, 3939], [3811, 3812, 3940], [3812, 3941, 3940], [3812, 3813, 3942], [3812, 3942, 3941], [3813, 3814, 3942], [3814, 3943, 3942], [3814, 3815, 3944], [3814, 3944, 3943], [3815, 3816, 3944], [3816, 3945, 3944], [3816, 3817, 3946], [3816, 3946, 3945], [3817, 3818, 3946], [3818, 3947, 3946], [3818, 3819, 3948], [3818, 3948, 3947], [3819, 3820, 3948], [3820, 3949, 3948], [3820, 3821, 3950], [3820, 3950, 3949], [3821, 3822, 3950], [3822, 3951, 3950], [3822, 3823, 3952], [3822, 3952, 3951], [3823, 3824, 3952], [3824, 3953, 3952], [3824, 3825, 3954], [3824, 3954, 3953], [3825, 3826, 3954], [3826, 3955, 3954], [3826, 3827, 3956], [3826, 3956, 3955], [3827, 3828, 3956], [3828, 3957, 3956], [3828, 3829, 3958], [3828, 3958, 3957], [3829, 3830, 3958], [3830, 3959, 3958], [3830, 3831, 3960], [3830, 3960, 3959], [3831, 3832, 3960], [3832, 3961, 3960], [3832, 3833, 3962], [3832, 3962, 3961], [3833, 3834, 3962], [3834, 3963, 3962], [3834, 3835, 3964], [3834, 3964, 3963], [3835, 3836, 3964], [3836, 3965, 3964], [3836, 3837, 3966], [3836, 3966, 3965], [3837, 3838, 3966], [3838, 3967, 3966], [3838, 3839, 3968], [3838, 3968, 3967], [3839, 3840, 3968], [3840, 3969, 3968], [3840, 3841, 3970], [3840, 3970, 3969], [3841, 3842, 3970], [3842, 3971, 3970], [3842, 3843, 3972], [3842, 3972, 3971], [3843, 3844, 3972], [3844, 3973, 3972], [3844, 3845, 3974], [3844, 3974, 3973], [3845, 3846, 3974], [3846, 3975, 3974], [3846, 3847, 3976], [3846, 3976, 3975], [3847, 3848, 3976], [3848, 3977, 3976], [3848, 3849, 3978], [3848, 3978, 3977], [3849, 3850, 3978], [3850, 3979, 3978], [3850, 3851, 3980], [3850, 3980, 3979], [3851, 3852, 3980], [3852, 3981, 3980], [3852, 3853, 3982], [3852, 3982, 3981], [3853, 3854, 3982], [3854, 3983, 3982], [3854, 3855, 3984], [3854, 3984, 3983], [3855, 3856, 3984], [3856, 3985, 3984], [3856, 3857, 3986], [3856, 3986, 3985], [3857, 3858, 3986], [3858, 3987, 3986], [3858, 3859, 3988], [3858, 3988, 3987], [3859, 3860, 3988], [3860, 3989, 3988], [3860, 3861, 3990], [3860, 3990, 3989], [3861, 3862, 3990], [3862, 3991, 3990], [3862, 3863, 3992], [3862, 3992, 3991], [3863, 3864, 3992], [3864, 3993, 3992], [3864, 3865, 3994], [3864, 3994, 3993], [3865, 3866, 3994], [3866, 3995, 3994], [3866, 3867, 3996], [3866, 3996, 3995], [3867, 3868, 3996], [3868, 3997, 3996], [3868, 3869, 3998], [3868, 3998, 3997], [3870, 3871, 4000], [3870, 4000, 3999], [3871, 3872, 4000], [3872, 4001, 4000], [3872, 3873, 4002], [3872, 4002, 4001], [3873, 3874, 4002], [3874, 4003, 4002], [3874, 3875, 4004], [3874, 4004, 4003], [3875, 3876, 4004], [3876, 4005, 4004], [3876, 3877, 4006], [3876, 4006, 4005], [3877, 3878, 4006], [3878, 4007, 4006], [3878, 3879, 4008], [3878, 4008, 4007], [3879, 3880, 4008], [3880, 4009, 4008], [3880, 3881, 4010], [3880, 4010, 4009], [3881, 3882, 4010], [3882, 4011, 4010], [3882, 3883, 4012], [3882, 4012, 4011], [3883, 3884, 4012], [3884, 4013, 4012], [3884, 3885, 4014], [3884, 4014, 4013], [3885, 3886, 4014], [3886, 4015, 4014], [3886, 3887, 4016], [3886, 4016, 4015], [3887, 3888, 4016], [3888, 4017, 4016], [3888, 3889, 4018], [3888, 4018, 4017], [3889, 3890, 4018], [3890, 4019, 4018], [3890, 3891, 4020], [3890, 4020, 4019], [3891, 3892, 4020], [3892, 4021, 4020], [3892, 3893, 4022], [3892, 4022, 4021], [3893, 3894, 4022], [3894, 4023, 4022], [3894, 3895, 4024], [3894, 4024, 4023], [3895, 3896, 4024], [3896, 4025, 4024], [3896, 3897, 4026], [3896, 4026, 4025], [3897, 3898, 4026], [3898, 4027, 4026], [3898, 3899, 4028], [3898, 4028, 4027], [3899, 3900, 4028], [3900, 4029, 4028], [3900, 3901, 4030], [3900, 4030, 4029], [3901, 3902, 4030], [3902, 4031, 4030], [3902, 3903, 4032], [3902, 4032, 4031], [3903, 3904, 4032], [3904, 4033, 4032], [3904, 3905, 4034], [3904, 4034, 4033], [3905, 3906, 4034], [3906, 4035, 4034], [3906, 3907, 4036], [3906, 4036, 4035], [3907, 3908, 4036], [3908, 4037, 4036], [3908, 3909, 4038], [3908, 4038, 4037], [3909, 3910, 4038], [3910, 4039, 4038], [3910, 3911, 4040], [3910, 4040, 4039], [3911, 3912, 4040], [3912, 4041, 4040], [3912, 3913, 4042], [3912, 4042, 4041], [3913, 3914, 4042], [3914, 4043, 4042], [3914, 3915, 4044], [3914, 4044, 4043], [3915, 3916, 4044], [3916, 4045, 4044], [3916, 3917, 4046], [3916, 4046, 4045], [3917, 3918, 4046], [3918, 4047, 4046], [3918, 3919, 4048], [3918, 4048, 4047], [3919, 3920, 4048], [3920, 4049, 4048], [3920, 3921, 4050], [3920, 4050, 4049], [3921, 3922, 4050], [3922, 4051, 4050], [3922, 3923, 4052], [3922, 4052, 4051], [3923, 3924, 4052], [3924, 4053, 4052], [3924, 3925, 4054], [3924, 4054, 4053], [3925, 3926, 4054], [3926, 4055, 4054], [3926, 3927, 4056], [3926, 4056, 4055], [3927, 3928, 4056], [3928, 4057, 4056], [3928, 3929, 4058], [3928, 4058, 4057], [3929, 3930, 4058], [3930, 4059, 4058], [3930, 3931, 4060], [3930, 4060, 4059], [3931, 3932, 4060], [3932, 4061, 4060], [3932, 3933, 4062], [3932, 4062, 4061], [3933, 3934, 4062], [3934, 4063, 4062], [3934, 3935, 4064], [3934, 4064, 4063], [3935, 3936, 4064], [3936, 4065, 4064], [3936, 3937, 4066], [3936, 4066, 4065], [3937, 3938, 4066], [3938, 4067, 4066], [3938, 3939, 4068], [3938, 4068, 4067], [3939, 3940, 4068], [3940, 4069, 4068], [3940, 3941, 4070], [3940, 4070, 4069], [3941, 3942, 4070], [3942, 4071, 4070], [3942, 3943, 4072], [3942, 4072, 4071], [3943, 3944, 4072], [3944, 4073, 4072], [3944, 3945, 4074], [3944, 4074, 4073], [3945, 3946, 4074], [3946, 4075, 4074], [3946, 3947, 4076], [3946, 4076, 4075], [3947, 3948, 4076], [3948, 4077, 4076], [3948, 3949, 4078], [3948, 4078, 4077], [3949, 3950, 4078], [3950, 4079, 4078], [3950, 3951, 4080], [3950, 4080, 4079], [3951, 3952, 4080], [3952, 4081, 4080], [3952, 3953, 4082], [3952, 4082, 4081], [3953, 3954, 4082], [3954, 4083, 4082], [3954, 3955, 4084], [3954, 4084, 4083], [3955, 3956, 4084], [3956, 4085, 4084], [3956, 3957, 4086], [3956, 4086, 4085], [3957, 3958, 4086], [3958, 4087, 4086], [3958, 3959, 4088], [3958, 4088, 4087], [3959, 3960, 4088], [3960, 4089, 4088], [3960, 3961, 4090], [3960, 4090, 4089], [3961, 3962, 4090], [3962, 4091, 4090], [3962, 3963, 4092], [3962, 4092, 4091], [3963, 3964, 4092], [3964, 4093, 4092], [3964, 3965, 4094], [3964, 4094, 4093], [3965, 3966, 4094], [3966, 4095, 4094], [3966, 3967, 4096], [3966, 4096, 4095], [3967, 3968, 4096], [3968, 4097, 4096], [3968, 3969, 4098], [3968, 4098, 4097], [3969, 3970, 4098], [3970, 4099, 4098], [3970, 3971, 4100], [3970, 4100, 4099], [3971, 3972, 4100], [3972, 4101, 4100], [3972, 3973, 4102], [3972, 4102, 4101], [3973, 3974, 4102], [3974, 4103, 4102], [3974, 3975, 4104], [3974, 4104, 4103], [3975, 3976, 4104], [3976, 4105, 4104], [3976, 3977, 4106], [3976, 4106, 4105], [3977, 3978, 4106], [3978, 4107, 4106], [3978, 3979, 4108], [3978, 4108, 4107], [3979, 3980, 4108], [3980, 4109, 4108], [3980, 3981, 4110], [3980, 4110, 4109], [3981, 3982, 4110], [3982, 4111, 4110], [3982, 3983, 4112], [3982, 4112, 4111], [3983, 3984, 4112], [3984, 4113, 4112], [3984, 3985, 4114], [3984, 4114, 4113], [3985, 3986, 4114], [3986, 4115, 4114], [3986, 3987, 4116], [3986, 4116, 4115], [3987, 3988, 4116], [3988, 4117, 4116], [3988, 3989, 4118], [3988, 4118, 4117], [3989, 3990, 4118], [3990, 4119, 4118], [3990, 3991, 4120], [3990, 4120, 4119], [3991, 3992, 4120], [3992, 4121, 4120], [3992, 3993, 4122], [3992, 4122, 4121], [3993, 3994, 4122], [3994, 4123, 4122], [3994, 3995, 4124], [3994, 4124, 4123], [3995, 3996, 4124], [3996, 4125, 4124], [3996, 3997, 4126], [3996, 4126, 4125], [3997, 3998, 4126], [3998, 4127, 4126], [3999, 4000, 4128], [4000, 4129, 4128], [4000, 4001, 4130], [4000, 4130, 4129], [4001, 4002, 4130], [4002, 4131, 4130], [4002, 4003, 4132], [4002, 4132, 4131], [4003, 4004, 4132], [4004, 4133, 4132], [4004, 4005, 4134], [4004, 4134, 4133], [4005, 4006, 4134], [4006, 4135, 4134], [4006, 4007, 4136], [4006, 4136, 4135], [4007, 4008, 4136], [4008, 4137, 4136], [4008, 4009, 4138], [4008, 4138, 4137], [4009, 4010, 4138], [4010, 4139, 4138], [4010, 4011, 4140], [4010, 4140, 4139], [4011, 4012, 4140], [4012, 4141, 4140], [4012, 4013, 4142], [4012, 4142, 4141], [4013, 4014, 4142], [4014, 4143, 4142], [4014, 4015, 4144], [4014, 4144, 4143], [4015, 4016, 4144], [4016, 4145, 4144], [4016, 4017, 4146], [4016, 4146, 4145], [4017, 4018, 4146], [4018, 4147, 4146], [4018, 4019, 4148], [4018, 4148, 4147], [4019, 4020, 4148], [4020, 4149, 4148], [4020, 4021, 4150], [4020, 4150, 4149], [4021, 4022, 4150], [4022, 4151, 4150], [4022, 4023, 4152], [4022, 4152, 4151], [4023, 4024, 4152], [4024, 4153, 4152], [4024, 4025, 4154], [4024, 4154, 4153], [4025, 4026, 4154], [4026, 4155, 4154], [4026, 4027, 4156], [4026, 4156, 4155], [4027, 4028, 4156], [4028, 4157, 4156], [4028, 4029, 4158], [4028, 4158, 4157], [4029, 4030, 4158], [4030, 4159, 4158], [4030, 4031, 4160], [4030, 4160, 4159], [4031, 4032, 4160], [4032, 4161, 4160], [4032, 4033, 4162], [4032, 4162, 4161], [4033, 4034, 4162], [4034, 4163, 4162], [4034, 4035, 4164], [4034, 4164, 4163], [4035, 4036, 4164], [4036, 4165, 4164], [4036, 4037, 4166], [4036, 4166, 4165], [4037, 4038, 4166], [4038, 4167, 4166], [4038, 4039, 4168], [4038, 4168, 4167], [4039, 4040, 4168], [4040, 4169, 4168], [4040, 4041, 4170], [4040, 4170, 4169], [4041, 4042, 4170], [4042, 4171, 4170], [4042, 4043, 4172], [4042, 4172, 4171], [4043, 4044, 4172], [4044, 4173, 4172], [4044, 4045, 4174], [4044, 4174, 4173], [4045, 4046, 4174], [4046, 4175, 4174], [4046, 4047, 4176], [4046, 4176, 4175], [4047, 4048, 4176], [4048, 4177, 4176], [4048, 4049, 4178], [4048, 4178, 4177], [4049, 4050, 4178], [4050, 4179, 4178], [4050, 4051, 4180], [4050, 4180, 4179], [4051, 4052, 4180], [4052, 4181, 4180], [4052, 4053, 4182], [4052, 4182, 4181], [4053, 4054, 4182], [4054, 4183, 4182], [4054, 4055, 4184], [4054, 4184, 4183], [4055, 4056, 4184], [4056, 4185, 4184], [4056, 4057, 4186], [4056, 4186, 4185], [4057, 4058, 4186], [4058, 4187, 4186], [4058, 4059, 4188], [4058, 4188, 4187], [4059, 4060, 4188], [4060, 4189, 4188], [4060, 4061, 4190], [4060, 4190, 4189], [4061, 4062, 4190], [4062, 4191, 4190], [4062, 4063, 4192], [4062, 4192, 4191], [4063, 4064, 4192], [4064, 4193, 4192], [4064, 4065, 4194], [4064, 4194, 4193], [4065, 4066, 4194], [4066, 4195, 4194], [4066, 4067, 4196], [4066, 4196, 4195], [4067, 4068, 4196], [4068, 4197, 4196], [4068, 4069, 4198], [4068, 4198, 4197], [4069, 4070, 4198], [4070, 4199, 4198], [4070, 4071, 4200], [4070, 4200, 4199], [4071, 4072, 4200], [4072, 4201, 4200], [4072, 4073, 4202], [4072, 4202, 4201], [4073, 4074, 4202], [4074, 4203, 4202], [4074, 4075, 4204], [4074, 4204, 4203], [4075, 4076, 4204], [4076, 4205, 4204], [4076, 4077, 4206], [4076, 4206, 4205], [4077, 4078, 4206], [4078, 4207, 4206], [4078, 4079, 4208], [4078, 4208, 4207], [4079, 4080, 4208], [4080, 4209, 4208], [4080, 4081, 4210], [4080, 4210, 4209], [4081, 4082, 4210], [4082, 4211, 4210], [4082, 4083, 4212], [4082, 4212, 4211], [4083, 4084, 4212], [4084, 4213, 4212], [4084, 4085, 4214], [4084, 4214, 4213], [4085, 4086, 4214], [4086, 4215, 4214], [4086, 4087, 4216], [4086, 4216, 4215], [4087, 4088, 4216], [4088, 4217, 4216], [4088, 4089, 4218], [4088, 4218, 4217], [4089, 4090, 4218], [4090, 4219, 4218], [4090, 4091, 4220], [4090, 4220, 4219], [4091, 4092, 4220], [4092, 4221, 4220], [4092, 4093, 4222], [4092, 4222, 4221], [4093, 4094, 4222], [4094, 4223, 4222], [4094, 4095, 4224], [4094, 4224, 4223], [4095, 4096, 4224], [4096, 4225, 4224], [4096, 4097, 4226], [4096, 4226, 4225], [4097, 4098, 4226], [4098, 4227, 4226], [4098, 4099, 4228], [4098, 4228, 4227], [4099, 4100, 4228], [4100, 4229, 4228], [4100, 4101, 4230], [4100, 4230, 4229], [4101, 4102, 4230], [4102, 4231, 4230], [4102, 4103, 4232], [4102, 4232, 4231], [4103, 4104, 4232], [4104, 4233, 4232], [4104, 4105, 4234], [4104, 4234, 4233], [4105, 4106, 4234], [4106, 4235, 4234], [4106, 4107, 4236], [4106, 4236, 4235], [4107, 4108, 4236], [4108, 4237, 4236], [4108, 4109, 4238], [4108, 4238, 4237], [4109, 4110, 4238], [4110, 4239, 4238], [4110, 4111, 4240], [4110, 4240, 4239], [4111, 4112, 4240], [4112, 4241, 4240], [4112, 4113, 4242], [4112, 4242, 4241], [4113, 4114, 4242], [4114, 4243, 4242], [4114, 4115, 4244], [4114, 4244, 4243], [4115, 4116, 4244], [4116, 4245, 4244], [4116, 4117, 4246], [4116, 4246, 4245], [4117, 4118, 4246], [4118, 4247, 4246], [4118, 4119, 4248], [4118, 4248, 4247], [4119, 4120, 4248], [4120, 4249, 4248], [4120, 4121, 4250], [4120, 4250, 4249], [4121, 4122, 4250], [4122, 4251, 4250], [4122, 4123, 4252], [4122, 4252, 4251], [4123, 4124, 4252], [4124, 4253, 4252], [4124, 4125, 4254], [4124, 4254, 4253], [4125, 4126, 4254], [4126, 4255, 4254], [4126, 4127, 4256], [4126, 4256, 4255], [4128, 4129, 4258], [4128, 4258, 4257], [4129, 4130, 4258], [4130, 4259, 4258], [4130, 4131, 4260], [4130, 4260, 4259], [4131, 4132, 4260], [4132, 4261, 4260], [4132, 4133, 4262], [4132, 4262, 4261], [4133, 4134, 4262], [4134, 4263, 4262], [4134, 4135, 4264], [4134, 4264, 4263], [4135, 4136, 4264], [4136, 4265, 4264], [4136, 4137, 4266], [4136, 4266, 4265], [4137, 4138, 4266], [4138, 4267, 4266], [4138, 4139, 4268], [4138, 4268, 4267], [4139, 4140, 4268], [4140, 4269, 4268], [4140, 4141, 4270], [4140, 4270, 4269], [4141, 4142, 4270], [4142, 4271, 4270], [4142, 4143, 4272], [4142, 4272, 4271], [4143, 4144, 4272], [4144, 4273, 4272], [4144, 4145, 4274], [4144, 4274, 4273], [4145, 4146, 4274], [4146, 4275, 4274], [4146, 4147, 4276], [4146, 4276, 4275], [4147, 4148, 4276], [4148, 4277, 4276], [4148, 4149, 4278], [4148, 4278, 4277], [4149, 4150, 4278], [4150, 4279, 4278], [4150, 4151, 4280], [4150, 4280, 4279], [4151, 4152, 4280], [4152, 4281, 4280], [4152, 4153, 4282], [4152, 4282, 4281], [4153, 4154, 4282], [4154, 4283, 4282], [4154, 4155, 4284], [4154, 4284, 4283], [4155, 4156, 4284], [4156, 4285, 4284], [4156, 4157, 4286], [4156, 4286, 4285], [4157, 4158, 4286], [4158, 4287, 4286], [4158, 4159, 4288], [4158, 4288, 4287], [4159, 4160, 4288], [4160, 4289, 4288], [4160, 4161, 4290], [4160, 4290, 4289], [4161, 4162, 4290], [4162, 4291, 4290], [4162, 4163, 4292], [4162, 4292, 4291], [4163, 4164, 4292], [4164, 4293, 4292], [4164, 4165, 4294], [4164, 4294, 4293], [4165, 4166, 4294], [4166, 4295, 4294], [4166, 4167, 4296], [4166, 4296, 4295], [4167, 4168, 4296], [4168, 4297, 4296], [4168, 4169, 4298], [4168, 4298, 4297], [4169, 4170, 4298], [4170, 4299, 4298], [4170, 4171, 4300], [4170, 4300, 4299], [4171, 4172, 4300], [4172, 4301, 4300], [4172, 4173, 4302], [4172, 4302, 4301], [4173, 4174, 4302], [4174, 4303, 4302], [4174, 4175, 4304], [4174, 4304, 4303], [4175, 4176, 4304], [4176, 4305, 4304], [4176, 4177, 4306], [4176, 4306, 4305], [4177, 4178, 4306], [4178, 4307, 4306], [4178, 4179, 4308], [4178, 4308, 4307], [4179, 4180, 4308], [4180, 4309, 4308], [4180, 4181, 4310], [4180, 4310, 4309], [4181, 4182, 4310], [4182, 4311, 4310], [4182, 4183, 4312], [4182, 4312, 4311], [4183, 4184, 4312], [4184, 4313, 4312], [4184, 4185, 4314], [4184, 4314, 4313], [4185, 4186, 4314], [4186, 4315, 4314], [4186, 4187, 4316], [4186, 4316, 4315], [4187, 4188, 4316], [4188, 4317, 4316], [4188, 4189, 4318], [4188, 4318, 4317], [4189, 4190, 4318], [4190, 4319, 4318], [4190, 4191, 4320], [4190, 4320, 4319], [4191, 4192, 4320], [4192, 4321, 4320], [4192, 4193, 4322], [4192, 4322, 4321], [4193, 4194, 4322], [4194, 4323, 4322], [4194, 4195, 4324], [4194, 4324, 4323], [4195, 4196, 4324], [4196, 4325, 4324], [4196, 4197, 4326], [4196, 4326, 4325], [4197, 4198, 4326], [4198, 4327, 4326], [4198, 4199, 4328], [4198, 4328, 4327], [4199, 4200, 4328], [4200, 4329, 4328], [4200, 4201, 4330], [4200, 4330, 4329], [4201, 4202, 4330], [4202, 4331, 4330], [4202, 4203, 4332], [4202, 4332, 4331], [4203, 4204, 4332], [4204, 4333, 4332], [4204, 4205, 4334], [4204, 4334, 4333], [4205, 4206, 4334], [4206, 4335, 4334], [4206, 4207, 4336], [4206, 4336, 4335], [4207, 4208, 4336], [4208, 4337, 4336], [4208, 4209, 4338], [4208, 4338, 4337], [4209, 4210, 4338], [4210, 4339, 4338], [4210, 4211, 4340], [4210, 4340, 4339], [4211, 4212, 4340], [4212, 4341, 4340], [4212, 4213, 4342], [4212, 4342, 4341], [4213, 4214, 4342], [4214, 4343, 4342], [4214, 4215, 4344], [4214, 4344, 4343], [4215, 4216, 4344], [4216, 4345, 4344], [4216, 4217, 4346], [4216, 4346, 4345], [4217, 4218, 4346], [4218, 4347, 4346], [4218, 4219, 4348], [4218, 4348, 4347], [4219, 4220, 4348], [4220, 4349, 4348], [4220, 4221, 4350], [4220, 4350, 4349], [4221, 4222, 4350], [4222, 4351, 4350], [4222, 4223, 4352], [4222, 4352, 4351], [4223, 4224, 4352], [4224, 4353, 4352], [4224, 4225, 4354], [4224, 4354, 4353], [4225, 4226, 4354], [4226, 4355, 4354], [4226, 4227, 4356], [4226, 4356, 4355], [4227, 4228, 4356], [4228, 4357, 4356], [4228, 4229, 4358], [4228, 4358, 4357], [4229, 4230, 4358], [4230, 4359, 4358], [4230, 4231, 4360], [4230, 4360, 4359], [4231, 4232, 4360], [4232, 4361, 4360], [4232, 4233, 4362], [4232, 4362, 4361], [4233, 4234, 4362], [4234, 4363, 4362], [4234, 4235, 4364], [4234, 4364, 4363], [4235, 4236, 4364], [4236, 4365, 4364], [4236, 4237, 4366], [4236, 4366, 4365], [4237, 4238, 4366], [4238, 4367, 4366], [4238, 4239, 4368], [4238, 4368, 4367], [4239, 4240, 4368], [4240, 4369, 4368], [4240, 4241, 4370], [4240, 4370, 4369], [4241, 4242, 4370], [4242, 4371, 4370], [4242, 4243, 4372], [4242, 4372, 4371], [4243, 4244, 4372], [4244, 4373, 4372], [4244, 4245, 4374], [4244, 4374, 4373], [4245, 4246, 4374], [4246, 4375, 4374], [4246, 4247, 4376], [4246, 4376, 4375], [4247, 4248, 4376], [4248, 4377, 4376], [4248, 4249, 4378], [4248, 4378, 4377], [4249, 4250, 4378], [4250, 4379, 4378], [4250, 4251, 4380], [4250, 4380, 4379], [4251, 4252, 4380], [4252, 4381, 4380], [4252, 4253, 4382], [4252, 4382, 4381], [4253, 4254, 4382], [4254, 4383, 4382], [4254, 4255, 4384], [4254, 4384, 4383], [4255, 4256, 4384], [4256, 4385, 4384], [4257, 4258, 4386], [4258, 4387, 4386], [4258, 4259, 4388], [4258, 4388, 4387], [4259, 4260, 4388], [4260, 4389, 4388], [4260, 4261, 4390], [4260, 4390, 4389], [4261, 4262, 4390], [4262, 4391, 4390], [4262, 4263, 4392], [4262, 4392, 4391], [4263, 4264, 4392], [4264, 4393, 4392], [4264, 4265, 4394], [4264, 4394, 4393], [4265, 4266, 4394], [4266, 4395, 4394], [4266, 4267, 4396], [4266, 4396, 4395], [4267, 4268, 4396], [4268, 4397, 4396], [4268, 4269, 4398], [4268, 4398, 4397], [4269, 4270, 4398], [4270, 4399, 4398], [4270, 4271, 4400], [4270, 4400, 4399], [4271, 4272, 4400], [4272, 4401, 4400], [4272, 4273, 4402], [4272, 4402, 4401], [4273, 4274, 4402], [4274, 4403, 4402], [4274, 4275, 4404], [4274, 4404, 4403], [4275, 4276, 4404], [4276, 4405, 4404], [4276, 4277, 4406], [4276, 4406, 4405], [4277, 4278, 4406], [4278, 4407, 4406], [4278, 4279, 4408], [4278, 4408, 4407], [4279, 4280, 4408], [4280, 4409, 4408], [4280, 4281, 4410], [4280, 4410, 4409], [4281, 4282, 4410], [4282, 4411, 4410], [4282, 4283, 4412], [4282, 4412, 4411], [4283, 4284, 4412], [4284, 4413, 4412], [4284, 4285, 4414], [4284, 4414, 4413], [4285, 4286, 4414], [4286, 4415, 4414], [4286, 4287, 4416], [4286, 4416, 4415], [4287, 4288, 4416], [4288, 4417, 4416], [4288, 4289, 4418], [4288, 4418, 4417], [4289, 4290, 4418], [4290, 4419, 4418], [4290, 4291, 4420], [4290, 4420, 4419], [4291, 4292, 4420], [4292, 4421, 4420], [4292, 4293, 4422], [4292, 4422, 4421], [4293, 4294, 4422], [4294, 4423, 4422], [4294, 4295, 4424], [4294, 4424, 4423], [4295, 4296, 4424], [4296, 4425, 4424], [4296, 4297, 4426], [4296, 4426, 4425], [4297, 4298, 4426], [4298, 4427, 4426], [4298, 4299, 4428], [4298, 4428, 4427], [4299, 4300, 4428], [4300, 4429, 4428], [4300, 4301, 4430], [4300, 4430, 4429], [4301, 4302, 4430], [4302, 4431, 4430], [4302, 4303, 4432], [4302, 4432, 4431], [4303, 4304, 4432], [4304, 4433, 4432], [4304, 4305, 4434], [4304, 4434, 4433], [4305, 4306, 4434], [4306, 4435, 4434], [4306, 4307, 4436], [4306, 4436, 4435], [4307, 4308, 4436], [4308, 4437, 4436], [4308, 4309, 4438], [4308, 4438, 4437], [4309, 4310, 4438], [4310, 4439, 4438], [4310, 4311, 4440], [4310, 4440, 4439], [4311, 4312, 4440], [4312, 4441, 4440], [4312, 4313, 4442], [4312, 4442, 4441], [4313, 4314, 4442], [4314, 4443, 4442], [4314, 4315, 4444], [4314, 4444, 4443], [4315, 4316, 4444], [4316, 4445, 4444], [4316, 4317, 4446], [4316, 4446, 4445], [4317, 4318, 4446], [4318, 4447, 4446], [4318, 4319, 4448], [4318, 4448, 4447], [4319, 4320, 4448], [4320, 4449, 4448], [4320, 4321, 4450], [4320, 4450, 4449], [4321, 4322, 4450], [4322, 4451, 4450], [4322, 4323, 4452], [4322, 4452, 4451], [4323, 4324, 4452], [4324, 4453, 4452], [4324, 4325, 4454], [4324, 4454, 4453], [4325, 4326, 4454], [4326, 4455, 4454], [4326, 4327, 4456], [4326, 4456, 4455], [4327, 4328, 4456], [4328, 4457, 4456], [4328, 4329, 4458], [4328, 4458, 4457], [4329, 4330, 4458], [4330, 4459, 4458], [4330, 4331, 4460], [4330, 4460, 4459], [4331, 4332, 4460], [4332, 4461, 4460], [4332, 4333, 4462], [4332, 4462, 4461], [4333, 4334, 4462], [4334, 4463, 4462], [4334, 4335, 4464], [4334, 4464, 4463], [4335, 4336, 4464], [4336, 4465, 4464], [4336, 4337, 4466], [4336, 4466, 4465], [4337, 4338, 4466], [4338, 4467, 4466], [4338, 4339, 4468], [4338, 4468, 4467], [4339, 4340, 4468], [4340, 4469, 4468], [4340, 4341, 4470], [4340, 4470, 4469], [4341, 4342, 4470], [4342, 4471, 4470], [4342, 4343, 4472], [4342, 4472, 4471], [4343, 4344, 4472], [4344, 4473, 4472], [4344, 4345, 4474], [4344, 4474, 4473], [4345, 4346, 4474], [4346, 4475, 4474], [4346, 4347, 4476], [4346, 4476, 4475], [4347, 4348, 4476], [4348, 4477, 4476], [4348, 4349, 4478], [4348, 4478, 4477], [4349, 4350, 4478], [4350, 4479, 4478], [4350, 4351, 4480], [4350, 4480, 4479], [4351, 4352, 4480], [4352, 4481, 4480], [4352, 4353, 4482], [4352, 4482, 4481], [4353, 4354, 4482], [4354, 4483, 4482], [4354, 4355, 4484], [4354, 4484, 4483], [4355, 4356, 4484], [4356, 4485, 4484], [4356, 4357, 4486], [4356, 4486, 4485], [4357, 4358, 4486], [4358, 4487, 4486], [4358, 4359, 4488], [4358, 4488, 4487], [4359, 4360, 4488], [4360, 4489, 4488], [4360, 4361, 4490], [4360, 4490, 4489], [4361, 4362, 4490], [4362, 4491, 4490], [4362, 4363, 4492], [4362, 4492, 4491], [4363, 4364, 4492], [4364, 4493, 4492], [4364, 4365, 4494], [4364, 4494, 4493], [4365, 4366, 4494], [4366, 4495, 4494], [4366, 4367, 4496], [4366, 4496, 4495], [4367, 4368, 4496], [4368, 4497, 4496], [4368, 4369, 4498], [4368, 4498, 4497], [4369, 4370, 4498], [4370, 4499, 4498], [4370, 4371, 4500], [4370, 4500, 4499], [4371, 4372, 4500], [4372, 4501, 4500], [4372, 4373, 4502], [4372, 4502, 4501], [4373, 4374, 4502], [4374, 4503, 4502], [4374, 4375, 4504], [4374, 4504, 4503], [4375, 4376, 4504], [4376, 4505, 4504], [4376, 4377, 4506], [4376, 4506, 4505], [4377, 4378, 4506], [4378, 4507, 4506], [4378, 4379, 4508], [4378, 4508, 4507], [4379, 4380, 4508], [4380, 4509, 4508], [4380, 4381, 4510], [4380, 4510, 4509], [4381, 4382, 4510], [4382, 4511, 4510], [4382, 4383, 4512], [4382, 4512, 4511], [4383, 4384, 4512], [4384, 4513, 4512], [4384, 4385, 4514], [4384, 4514, 4513], [4386, 4387, 4516], [4386, 4516, 4515], [4387, 4388, 4516], [4388, 4517, 4516], [4388, 4389, 4518], [4388, 4518, 4517], [4389, 4390, 4518], [4390, 4519, 4518], [4390, 4391, 4520], [4390, 4520, 4519], [4391, 4392, 4520], [4392, 4521, 4520], [4392, 4393, 4522], [4392, 4522, 4521], [4393, 4394, 4522], [4394, 4523, 4522], [4394, 4395, 4524], [4394, 4524, 4523], [4395, 4396, 4524], [4396, 4525, 4524], [4396, 4397, 4526], [4396, 4526, 4525], [4397, 4398, 4526], [4398, 4527, 4526], [4398, 4399, 4528], [4398, 4528, 4527], [4399, 4400, 4528], [4400, 4529, 4528], [4400, 4401, 4530], [4400, 4530, 4529], [4401, 4402, 4530], [4402, 4531, 4530], [4402, 4403, 4532], [4402, 4532, 4531], [4403, 4404, 4532], [4404, 4533, 4532], [4404, 4405, 4534], [4404, 4534, 4533], [4405, 4406, 4534], [4406, 4535, 4534], [4406, 4407, 4536], [4406, 4536, 4535], [4407, 4408, 4536], [4408, 4537, 4536], [4408, 4409, 4538], [4408, 4538, 4537], [4409, 4410, 4538], [4410, 4539, 4538], [4410, 4411, 4540], [4410, 4540, 4539], [4411, 4412, 4540], [4412, 4541, 4540], [4412, 4413, 4542], [4412, 4542, 4541], [4413, 4414, 4542], [4414, 4543, 4542], [4414, 4415, 4544], [4414, 4544, 4543], [4415, 4416, 4544], [4416, 4545, 4544], [4416, 4417, 4546], [4416, 4546, 4545], [4417, 4418, 4546], [4418, 4547, 4546], [4418, 4419, 4548], [4418, 4548, 4547], [4419, 4420, 4548], [4420, 4549, 4548], [4420, 4421, 4550], [4420, 4550, 4549], [4421, 4422, 4550], [4422, 4551, 4550], [4422, 4423, 4552], [4422, 4552, 4551], [4423, 4424, 4552], [4424, 4553, 4552], [4424, 4425, 4554], [4424, 4554, 4553], [4425, 4426, 4554], [4426, 4555, 4554], [4426, 4427, 4556], [4426, 4556, 4555], [4427, 4428, 4556], [4428, 4557, 4556], [4428, 4429, 4558], [4428, 4558, 4557], [4429, 4430, 4558], [4430, 4559, 4558], [4430, 4431, 4560], [4430, 4560, 4559], [4431, 4432, 4560], [4432, 4561, 4560], [4432, 4433, 4562], [4432, 4562, 4561], [4433, 4434, 4562], [4434, 4563, 4562], [4434, 4435, 4564], [4434, 4564, 4563], [4435, 4436, 4564], [4436, 4565, 4564], [4436, 4437, 4566], [4436, 4566, 4565], [4437, 4438, 4566], [4438, 4567, 4566], [4438, 4439, 4568], [4438, 4568, 4567], [4439, 4440, 4568], [4440, 4569, 4568], [4440, 4441, 4570], [4440, 4570, 4569], [4441, 4442, 4570], [4442, 4571, 4570], [4442, 4443, 4572], [4442, 4572, 4571], [4443, 4444, 4572], [4444, 4573, 4572], [4444, 4445, 4574], [4444, 4574, 4573], [4445, 4446, 4574], [4446, 4575, 4574], [4446, 4447, 4576], [4446, 4576, 4575], [4447, 4448, 4576], [4448, 4577, 4576], [4448, 4449, 4578], [4448, 4578, 4577], [4449, 4450, 4578], [4450, 4579, 4578], [4450, 4451, 4580], [4450, 4580, 4579], [4451, 4452, 4580], [4452, 4581, 4580], [4452, 4453, 4582], [4452, 4582, 4581], [4453, 4454, 4582], [4454, 4583, 4582], [4454, 4455, 4584], [4454, 4584, 4583], [4455, 4456, 4584], [4456, 4585, 4584], [4456, 4457, 4586], [4456, 4586, 4585], [4457, 4458, 4586], [4458, 4587, 4586], [4458, 4459, 4588], [4458, 4588, 4587], [4459, 4460, 4588], [4460, 4589, 4588], [4460, 4461, 4590], [4460, 4590, 4589], [4461, 4462, 4590], [4462, 4591, 4590], [4462, 4463, 4592], [4462, 4592, 4591], [4463, 4464, 4592], [4464, 4593, 4592], [4464, 4465, 4594], [4464, 4594, 4593], [4465, 4466, 4594], [4466, 4595, 4594], [4466, 4467, 4596], [4466, 4596, 4595], [4467, 4468, 4596], [4468, 4597, 4596], [4468, 4469, 4598], [4468, 4598, 4597], [4469, 4470, 4598], [4470, 4599, 4598], [4470, 4471, 4600], [4470, 4600, 4599], [4471, 4472, 4600], [4472, 4601, 4600], [4472, 4473, 4602], [4472, 4602, 4601], [4473, 4474, 4602], [4474, 4603, 4602], [4474, 4475, 4604], [4474, 4604, 4603], [4475, 4476, 4604], [4476, 4605, 4604], [4476, 4477, 4606], [4476, 4606, 4605], [4477, 4478, 4606], [4478, 4607, 4606], [4478, 4479, 4608], [4478, 4608, 4607], [4479, 4480, 4608], [4480, 4609, 4608], [4480, 4481, 4610], [4480, 4610, 4609], [4481, 4482, 4610], [4482, 4611, 4610], [4482, 4483, 4612], [4482, 4612, 4611], [4483, 4484, 4612], [4484, 4613, 4612], [4484, 4485, 4614], [4484, 4614, 4613], [4485, 4486, 4614], [4486, 4615, 4614], [4486, 4487, 4616], [4486, 4616, 4615], [4487, 4488, 4616], [4488, 4617, 4616], [4488, 4489, 4618], [4488, 4618, 4617], [4489, 4490, 4618], [4490, 4619, 4618], [4490, 4491, 4620], [4490, 4620, 4619], [4491, 4492, 4620], [4492, 4621, 4620], [4492, 4493, 4622], [4492, 4622, 4621], [4493, 4494, 4622], [4494, 4623, 4622], [4494, 4495, 4624], [4494, 4624, 4623], [4495, 4496, 4624], [4496, 4625, 4624], [4496, 4497, 4626], [4496, 4626, 4625], [4497, 4498, 4626], [4498, 4627, 4626], [4498, 4499, 4628], [4498, 4628, 4627], [4499, 4500, 4628], [4500, 4629, 4628], [4500, 4501, 4630], [4500, 4630, 4629], [4501, 4502, 4630], [4502, 4631, 4630], [4502, 4503, 4632], [4502, 4632, 4631], [4503, 4504, 4632], [4504, 4633, 4632], [4504, 4505, 4634], [4504, 4634, 4633], [4505, 4506, 4634], [4506, 4635, 4634], [4506, 4507, 4636], [4506, 4636, 4635], [4507, 4508, 4636], [4508, 4637, 4636], [4508, 4509, 4638], [4508, 4638, 4637], [4509, 4510, 4638], [4510, 4639, 4638], [4510, 4511, 4640], [4510, 4640, 4639], [4511, 4512, 4640], [4512, 4641, 4640], [4512, 4513, 4642], [4512, 4642, 4641], [4513, 4514, 4642], [4514, 4643, 4642], [4515, 4516, 4644], [4516, 4645, 4644], [4516, 4517, 4646], [4516, 4646, 4645], [4517, 4518, 4646], [4518, 4647, 4646], [4518, 4519, 4648], [4518, 4648, 4647], [4519, 4520, 4648], [4520, 4649, 4648], [4520, 4521, 4650], [4520, 4650, 4649], [4521, 4522, 4650], [4522, 4651, 4650], [4522, 4523, 4652], [4522, 4652, 4651], [4523, 4524, 4652], [4524, 4653, 4652], [4524, 4525, 4654], [4524, 4654, 4653], [4525, 4526, 4654], [4526, 4655, 4654], [4526, 4527, 4656], [4526, 4656, 4655], [4527, 4528, 4656], [4528, 4657, 4656], [4528, 4529, 4658], [4528, 4658, 4657], [4529, 4530, 4658], [4530, 4659, 4658], [4530, 4531, 4660], [4530, 4660, 4659], [4531, 4532, 4660], [4532, 4661, 4660], [4532, 4533, 4662], [4532, 4662, 4661], [4533, 4534, 4662], [4534, 4663, 4662], [4534, 4535, 4664], [4534, 4664, 4663], [4535, 4536, 4664], [4536, 4665, 4664], [4536, 4537, 4666], [4536, 4666, 4665], [4537, 4538, 4666], [4538, 4667, 4666], [4538, 4539, 4668], [4538, 4668, 4667], [4539, 4540, 4668], [4540, 4669, 4668], [4540, 4541, 4670], [4540, 4670, 4669], [4541, 4542, 4670], [4542, 4671, 4670], [4542, 4543, 4672], [4542, 4672, 4671], [4543, 4544, 4672], [4544, 4673, 4672], [4544, 4545, 4674], [4544, 4674, 4673], [4545, 4546, 4674], [4546, 4675, 4674], [4546, 4547, 4676], [4546, 4676, 4675], [4547, 4548, 4676], [4548, 4677, 4676], [4548, 4549, 4678], [4548, 4678, 4677], [4549, 4550, 4678], [4550, 4679, 4678], [4550, 4551, 4680], [4550, 4680, 4679], [4551, 4552, 4680], [4552, 4681, 4680], [4552, 4553, 4682], [4552, 4682, 4681], [4553, 4554, 4682], [4554, 4683, 4682], [4554, 4555, 4684], [4554, 4684, 4683], [4555, 4556, 4684], [4556, 4685, 4684], [4556, 4557, 4686], [4556, 4686, 4685], [4557, 4558, 4686], [4558, 4687, 4686], [4558, 4559, 4688], [4558, 4688, 4687], [4559, 4560, 4688], [4560, 4689, 4688], [4560, 4561, 4690], [4560, 4690, 4689], [4561, 4562, 4690], [4562, 4691, 4690], [4562, 4563, 4692], [4562, 4692, 4691], [4563, 4564, 4692], [4564, 4693, 4692], [4564, 4565, 4694], [4564, 4694, 4693], [4565, 4566, 4694], [4566, 4695, 4694], [4566, 4567, 4696], [4566, 4696, 4695], [4567, 4568, 4696], [4568, 4697, 4696], [4568, 4569, 4698], [4568, 4698, 4697], [4569, 4570, 4698], [4570, 4699, 4698], [4570, 4571, 4700], [4570, 4700, 4699], [4571, 4572, 4700], [4572, 4701, 4700], [4572, 4573, 4702], [4572, 4702, 4701], [4573, 4574, 4702], [4574, 4703, 4702], [4574, 4575, 4704], [4574, 4704, 4703], [4575, 4576, 4704], [4576, 4705, 4704], [4576, 4577, 4706], [4576, 4706, 4705], [4577, 4578, 4706], [4578, 4707, 4706], [4578, 4579, 4708], [4578, 4708, 4707], [4579, 4580, 4708], [4580, 4709, 4708], [4580, 4581, 4710], [4580, 4710, 4709], [4581, 4582, 4710], [4582, 4711, 4710], [4582, 4583, 4712], [4582, 4712, 4711], [4583, 4584, 4712], [4584, 4713, 4712], [4584, 4585, 4714], [4584, 4714, 4713], [4585, 4586, 4714], [4586, 4715, 4714], [4586, 4587, 4716], [4586, 4716, 4715], [4587, 4588, 4716], [4588, 4717, 4716], [4588, 4589, 4718], [4588, 4718, 4717], [4589, 4590, 4718], [4590, 4719, 4718], [4590, 4591, 4720], [4590, 4720, 4719], [4591, 4592, 4720], [4592, 4721, 4720], [4592, 4593, 4722], [4592, 4722, 4721], [4593, 4594, 4722], [4594, 4723, 4722], [4594, 4595, 4724], [4594, 4724, 4723], [4595, 4596, 4724], [4596, 4725, 4724], [4596, 4597, 4726], [4596, 4726, 4725], [4597, 4598, 4726], [4598, 4727, 4726], [4598, 4599, 4728], [4598, 4728, 4727], [4599, 4600, 4728], [4600, 4729, 4728], [4600, 4601, 4730], [4600, 4730, 4729], [4601, 4602, 4730], [4602, 4731, 4730], [4602, 4603, 4732], [4602, 4732, 4731], [4603, 4604, 4732], [4604, 4733, 4732], [4604, 4605, 4734], [4604, 4734, 4733], [4605, 4606, 4734], [4606, 4735, 4734], [4606, 4607, 4736], [4606, 4736, 4735], [4607, 4608, 4736], [4608, 4737, 4736], [4608, 4609, 4738], [4608, 4738, 4737], [4609, 4610, 4738], [4610, 4739, 4738], [4610, 4611, 4740], [4610, 4740, 4739], [4611, 4612, 4740], [4612, 4741, 4740], [4612, 4613, 4742], [4612, 4742, 4741], [4613, 4614, 4742], [4614, 4743, 4742], [4614, 4615, 4744], [4614, 4744, 4743], [4615, 4616, 4744], [4616, 4745, 4744], [4616, 4617, 4746], [4616, 4746, 4745], [4617, 4618, 4746], [4618, 4747, 4746], [4618, 4619, 4748], [4618, 4748, 4747], [4619, 4620, 4748], [4620, 4749, 4748], [4620, 4621, 4750], [4620, 4750, 4749], [4621, 4622, 4750], [4622, 4751, 4750], [4622, 4623, 4752], [4622, 4752, 4751], [4623, 4624, 4752], [4624, 4753, 4752], [4624, 4625, 4754], [4624, 4754, 4753], [4625, 4626, 4754], [4626, 4755, 4754], [4626, 4627, 4756], [4626, 4756, 4755], [4627, 4628, 4756], [4628, 4757, 4756], [4628, 4629, 4758], [4628, 4758, 4757], [4629, 4630, 4758], [4630, 4759, 4758], [4630, 4631, 4760], [4630, 4760, 4759], [4631, 4632, 4760], [4632, 4761, 4760], [4632, 4633, 4762], [4632, 4762, 4761], [4633, 4634, 4762], [4634, 4763, 4762], [4634, 4635, 4764], [4634, 4764, 4763], [4635, 4636, 4764], [4636, 4765, 4764], [4636, 4637, 4766], [4636, 4766, 4765], [4637, 4638, 4766], [4638, 4767, 4766], [4638, 4639, 4768], [4638, 4768, 4767], [4639, 4640, 4768], [4640, 4769, 4768], [4640, 4641, 4770], [4640, 4770, 4769], [4641, 4642, 4770], [4642, 4771, 4770], [4642, 4643, 4772], [4642, 4772, 4771], [4644, 4645, 4774], [4644, 4774, 4773], [4645, 4646, 4774], [4646, 4775, 4774], [4646, 4647, 4776], [4646, 4776, 4775], [4647, 4648, 4776], [4648, 4777, 4776], [4648, 4649, 4778], [4648, 4778, 4777], [4649, 4650, 4778], [4650, 4779, 4778], [4650, 4651, 4780], [4650, 4780, 4779], [4651, 4652, 4780], [4652, 4781, 4780], [4652, 4653, 4782], [4652, 4782, 4781], [4653, 4654, 4782], [4654, 4783, 4782], [4654, 4655, 4784], [4654, 4784, 4783], [4655, 4656, 4784], [4656, 4785, 4784], [4656, 4657, 4786], [4656, 4786, 4785], [4657, 4658, 4786], [4658, 4787, 4786], [4658, 4659, 4788], [4658, 4788, 4787], [4659, 4660, 4788], [4660, 4789, 4788], [4660, 4661, 4790], [4660, 4790, 4789], [4661, 4662, 4790], [4662, 4791, 4790], [4662, 4663, 4792], [4662, 4792, 4791], [4663, 4664, 4792], [4664, 4793, 4792], [4664, 4665, 4794], [4664, 4794, 4793], [4665, 4666, 4794], [4666, 4795, 4794], [4666, 4667, 4796], [4666, 4796, 4795], [4667, 4668, 4796], [4668, 4797, 4796], [4668, 4669, 4798], [4668, 4798, 4797], [4669, 4670, 4798], [4670, 4799, 4798], [4670, 4671, 4800], [4670, 4800, 4799], [4671, 4672, 4800], [4672, 4801, 4800], [4672, 4673, 4802], [4672, 4802, 4801], [4673, 4674, 4802], [4674, 4803, 4802], [4674, 4675, 4804], [4674, 4804, 4803], [4675, 4676, 4804], [4676, 4805, 4804], [4676, 4677, 4806], [4676, 4806, 4805], [4677, 4678, 4806], [4678, 4807, 4806], [4678, 4679, 4808], [4678, 4808, 4807], [4679, 4680, 4808], [4680, 4809, 4808], [4680, 4681, 4810], [4680, 4810, 4809], [4681, 4682, 4810], [4682, 4811, 4810], [4682, 4683, 4812], [4682, 4812, 4811], [4683, 4684, 4812], [4684, 4813, 4812], [4684, 4685, 4814], [4684, 4814, 4813], [4685, 4686, 4814], [4686, 4815, 4814], [4686, 4687, 4816], [4686, 4816, 4815], [4687, 4688, 4816], [4688, 4817, 4816], [4688, 4689, 4818], [4688, 4818, 4817], [4689, 4690, 4818], [4690, 4819, 4818], [4690, 4691, 4820], [4690, 4820, 4819], [4691, 4692, 4820], [4692, 4821, 4820], [4692, 4693, 4822], [4692, 4822, 4821], [4693, 4694, 4822], [4694, 4823, 4822], [4694, 4695, 4824], [4694, 4824, 4823], [4695, 4696, 4824], [4696, 4825, 4824], [4696, 4697, 4826], [4696, 4826, 4825], [4697, 4698, 4826], [4698, 4827, 4826], [4698, 4699, 4828], [4698, 4828, 4827], [4699, 4700, 4828], [4700, 4829, 4828], [4700, 4701, 4830], [4700, 4830, 4829], [4701, 4702, 4830], [4702, 4831, 4830], [4702, 4703, 4832], [4702, 4832, 4831], [4703, 4704, 4832], [4704, 4833, 4832], [4704, 4705, 4834], [4704, 4834, 4833], [4705, 4706, 4834], [4706, 4835, 4834], [4706, 4707, 4836], [4706, 4836, 4835], [4707, 4708, 4836], [4708, 4837, 4836], [4708, 4709, 4838], [4708, 4838, 4837], [4709, 4710, 4838], [4710, 4839, 4838], [4710, 4711, 4840], [4710, 4840, 4839], [4711, 4712, 4840], [4712, 4841, 4840], [4712, 4713, 4842], [4712, 4842, 4841], [4713, 4714, 4842], [4714, 4843, 4842], [4714, 4715, 4844], [4714, 4844, 4843], [4715, 4716, 4844], [4716, 4845, 4844], [4716, 4717, 4846], [4716, 4846, 4845], [4717, 4718, 4846], [4718, 4847, 4846], [4718, 4719, 4848], [4718, 4848, 4847], [4719, 4720, 4848], [4720, 4849, 4848], [4720, 4721, 4850], [4720, 4850, 4849], [4721, 4722, 4850], [4722, 4851, 4850], [4722, 4723, 4852], [4722, 4852, 4851], [4723, 4724, 4852], [4724, 4853, 4852], [4724, 4725, 4854], [4724, 4854, 4853], [4725, 4726, 4854], [4726, 4855, 4854], [4726, 4727, 4856], [4726, 4856, 4855], [4727, 4728, 4856], [4728, 4857, 4856], [4728, 4729, 4858], [4728, 4858, 4857], [4729, 4730, 4858], [4730, 4859, 4858], [4730, 4731, 4860], [4730, 4860, 4859], [4731, 4732, 4860], [4732, 4861, 4860], [4732, 4733, 4862], [4732, 4862, 4861], [4733, 4734, 4862], [4734, 4863, 4862], [4734, 4735, 4864], [4734, 4864, 4863], [4735, 4736, 4864], [4736, 4865, 4864], [4736, 4737, 4866], [4736, 4866, 4865], [4737, 4738, 4866], [4738, 4867, 4866], [4738, 4739, 4868], [4738, 4868, 4867], [4739, 4740, 4868], [4740, 4869, 4868], [4740, 4741, 4870], [4740, 4870, 4869], [4741, 4742, 4870], [4742, 4871, 4870], [4742, 4743, 4872], [4742, 4872, 4871], [4743, 4744, 4872], [4744, 4873, 4872], [4744, 4745, 4874], [4744, 4874, 4873], [4745, 4746, 4874], [4746, 4875, 4874], [4746, 4747, 4876], [4746, 4876, 4875], [4747, 4748, 4876], [4748, 4877, 4876], [4748, 4749, 4878], [4748, 4878, 4877], [4749, 4750, 4878], [4750, 4879, 4878], [4750, 4751, 4880], [4750, 4880, 4879], [4751, 4752, 4880], [4752, 4881, 4880], [4752, 4753, 4882], [4752, 4882, 4881], [4753, 4754, 4882], [4754, 4883, 4882], [4754, 4755, 4884], [4754, 4884, 4883], [4755, 4756, 4884], [4756, 4885, 4884], [4756, 4757, 4886], [4756, 4886, 4885], [4757, 4758, 4886], [4758, 4887, 4886], [4758, 4759, 4888], [4758, 4888, 4887], [4759, 4760, 4888], [4760, 4889, 4888], [4760, 4761, 4890], [4760, 4890, 4889], [4761, 4762, 4890], [4762, 4891, 4890], [4762, 4763, 4892], [4762, 4892, 4891], [4763, 4764, 4892], [4764, 4893, 4892], [4764, 4765, 4894], [4764, 4894, 4893], [4765, 4766, 4894], [4766, 4895, 4894], [4766, 4767, 4896], [4766, 4896, 4895], [4767, 4768, 4896], [4768, 4897, 4896], [4768, 4769, 4898], [4768, 4898, 4897], [4769, 4770, 4898], [4770, 4899, 4898], [4770, 4771, 4900], [4770, 4900, 4899], [4771, 4772, 4900], [4772, 4901, 4900], [4773, 4774, 4902], [4774, 4903, 4902], [4774, 4775, 4904], [4774, 4904, 4903], [4775, 4776, 4904], [4776, 4905, 4904], [4776, 4777, 4906], [4776, 4906, 4905], [4777, 4778, 4906], [4778, 4907, 4906], [4778, 4779, 4908], [4778, 4908, 4907], [4779, 4780, 4908], [4780, 4909, 4908], [4780, 4781, 4910], [4780, 4910, 4909], [4781, 4782, 4910], [4782, 4911, 4910], [4782, 4783, 4912], [4782, 4912, 4911], [4783, 4784, 4912], [4784, 4913, 4912], [4784, 4785, 4914], [4784, 4914, 4913], [4785, 4786, 4914], [4786, 4915, 4914], [4786, 4787, 4916], [4786, 4916, 4915], [4787, 4788, 4916], [4788, 4917, 4916], [4788, 4789, 4918], [4788, 4918, 4917], [4789, 4790, 4918], [4790, 4919, 4918], [4790, 4791, 4920], [4790, 4920, 4919], [4791, 4792, 4920], [4792, 4921, 4920], [4792, 4793, 4922], [4792, 4922, 4921], [4793, 4794, 4922], [4794, 4923, 4922], [4794, 4795, 4924], [4794, 4924, 4923], [4795, 4796, 4924], [4796, 4925, 4924], [4796, 4797, 4926], [4796, 4926, 4925], [4797, 4798, 4926], [4798, 4927, 4926], [4798, 4799, 4928], [4798, 4928, 4927], [4799, 4800, 4928], [4800, 4929, 4928], [4800, 4801, 4930], [4800, 4930, 4929], [4801, 4802, 4930], [4802, 4931, 4930], [4802, 4803, 4932], [4802, 4932, 4931], [4803, 4804, 4932], [4804, 4933, 4932], [4804, 4805, 4934], [4804, 4934, 4933], [4805, 4806, 4934], [4806, 4935, 4934], [4806, 4807, 4936], [4806, 4936, 4935], [4807, 4808, 4936], [4808, 4937, 4936], [4808, 4809, 4938], [4808, 4938, 4937], [4809, 4810, 4938], [4810, 4939, 4938], [4810, 4811, 4940], [4810, 4940, 4939], [4811, 4812, 4940], [4812, 4941, 4940], [4812, 4813, 4942], [4812, 4942, 4941], [4813, 4814, 4942], [4814, 4943, 4942], [4814, 4815, 4944], [4814, 4944, 4943], [4815, 4816, 4944], [4816, 4945, 4944], [4816, 4817, 4946], [4816, 4946, 4945], [4817, 4818, 4946], [4818, 4947, 4946], [4818, 4819, 4948], [4818, 4948, 4947], [4819, 4820, 4948], [4820, 4949, 4948], [4820, 4821, 4950], [4820, 4950, 4949], [4821, 4822, 4950], [4822, 4951, 4950], [4822, 4823, 4952], [4822, 4952, 4951], [4823, 4824, 4952], [4824, 4953, 4952], [4824, 4825, 4954], [4824, 4954, 4953], [4825, 4826, 4954], [4826, 4955, 4954], [4826, 4827, 4956], [4826, 4956, 4955], [4827, 4828, 4956], [4828, 4957, 4956], [4828, 4829, 4958], [4828, 4958, 4957], [4829, 4830, 4958], [4830, 4959, 4958], [4830, 4831, 4960], [4830, 4960, 4959], [4831, 4832, 4960], [4832, 4961, 4960], [4832, 4833, 4962], [4832, 4962, 4961], [4833, 4834, 4962], [4834, 4963, 4962], [4834, 4835, 4964], [4834, 4964, 4963], [4835, 4836, 4964], [4836, 4965, 4964], [4836, 4837, 4966], [4836, 4966, 4965], [4837, 4838, 4966], [4838, 4967, 4966], [4838, 4839, 4968], [4838, 4968, 4967], [4839, 4840, 4968], [4840, 4969, 4968], [4840, 4841, 4970], [4840, 4970, 4969], [4841, 4842, 4970], [4842, 4971, 4970], [4842, 4843, 4972], [4842, 4972, 4971], [4843, 4844, 4972], [4844, 4973, 4972], [4844, 4845, 4974], [4844, 4974, 4973], [4845, 4846, 4974], [4846, 4975, 4974], [4846, 4847, 4976], [4846, 4976, 4975], [4847, 4848, 4976], [4848, 4977, 4976], [4848, 4849, 4978], [4848, 4978, 4977], [4849, 4850, 4978], [4850, 4979, 4978], [4850, 4851, 4980], [4850, 4980, 4979], [4851, 4852, 4980], [4852, 4981, 4980], [4852, 4853, 4982], [4852, 4982, 4981], [4853, 4854, 4982], [4854, 4983, 4982], [4854, 4855, 4984], [4854, 4984, 4983], [4855, 4856, 4984], [4856, 4985, 4984], [4856, 4857, 4986], [4856, 4986, 4985], [4857, 4858, 4986], [4858, 4987, 4986], [4858, 4859, 4988], [4858, 4988, 4987], [4859, 4860, 4988], [4860, 4989, 4988], [4860, 4861, 4990], [4860, 4990, 4989], [4861, 4862, 4990], [4862, 4991, 4990], [4862, 4863, 4992], [4862, 4992, 4991], [4863, 4864, 4992], [4864, 4993, 4992], [4864, 4865, 4994], [4864, 4994, 4993], [4865, 4866, 4994], [4866, 4995, 4994], [4866, 4867, 4996], [4866, 4996, 4995], [4867, 4868, 4996], [4868, 4997, 4996], [4868, 4869, 4998], [4868, 4998, 4997], [4869, 4870, 4998], [4870, 4999, 4998], [4870, 4871, 5000], [4870, 5000, 4999], [4871, 4872, 5000], [4872, 5001, 5000], [4872, 4873, 5002], [4872, 5002, 5001], [4873, 4874, 5002], [4874, 5003, 5002], [4874, 4875, 5004], [4874, 5004, 5003], [4875, 4876, 5004], [4876, 5005, 5004], [4876, 4877, 5006], [4876, 5006, 5005], [4877, 4878, 5006], [4878, 5007, 5006], [4878, 4879, 5008], [4878, 5008, 5007], [4879, 4880, 5008], [4880, 5009, 5008], [4880, 4881, 5010], [4880, 5010, 5009], [4881, 4882, 5010], [4882, 5011, 5010], [4882, 4883, 5012], [4882, 5012, 5011], [4883, 4884, 5012], [4884, 5013, 5012], [4884, 4885, 5014], [4884, 5014, 5013], [4885, 4886, 5014], [4886, 5015, 5014], [4886, 4887, 5016], [4886, 5016, 5015], [4887, 4888, 5016], [4888, 5017, 5016], [4888, 4889, 5018], [4888, 5018, 5017], [4889, 4890, 5018], [4890, 5019, 5018], [4890, 4891, 5020], [4890, 5020, 5019], [4891, 4892, 5020], [4892, 5021, 5020], [4892, 4893, 5022], [4892, 5022, 5021], [4893, 4894, 5022], [4894, 5023, 5022], [4894, 4895, 5024], [4894, 5024, 5023], [4895, 4896, 5024], [4896, 5025, 5024], [4896, 4897, 5026], [4896, 5026, 5025], [4897, 4898, 5026], [4898, 5027, 5026], [4898, 4899, 5028], [4898, 5028, 5027], [4899, 4900, 5028], [4900, 5029, 5028], [4900, 4901, 5030], [4900, 5030, 5029], [4902, 4903, 5032], [4902, 5032, 5031], [4903, 4904, 5032], [4904, 5033, 5032], [4904, 4905, 5034], [4904, 5034, 5033], [4905, 4906, 5034], [4906, 5035, 5034], [4906, 4907, 5036], [4906, 5036, 5035], [4907, 4908, 5036], [4908, 5037, 5036], [4908, 4909, 5038], [4908, 5038, 5037], [4909, 4910, 5038], [4910, 5039, 5038], [4910, 4911, 5040], [4910, 5040, 5039], [4911, 4912, 5040], [4912, 5041, 5040], [4912, 4913, 5042], [4912, 5042, 5041], [4913, 4914, 5042], [4914, 5043, 5042], [4914, 4915, 5044], [4914, 5044, 5043], [4915, 4916, 5044], [4916, 5045, 5044], [4916, 4917, 5046], [4916, 5046, 5045], [4917, 4918, 5046], [4918, 5047, 5046], [4918, 4919, 5048], [4918, 5048, 5047], [4919, 4920, 5048], [4920, 5049, 5048], [4920, 4921, 5050], [4920, 5050, 5049], [4921, 4922, 5050], [4922, 5051, 5050], [4922, 4923, 5052], [4922, 5052, 5051], [4923, 4924, 5052], [4924, 5053, 5052], [4924, 4925, 5054], [4924, 5054, 5053], [4925, 4926, 5054], [4926, 5055, 5054], [4926, 4927, 5056], [4926, 5056, 5055], [4927, 4928, 5056], [4928, 5057, 5056], [4928, 4929, 5058], [4928, 5058, 5057], [4929, 4930, 5058], [4930, 5059, 5058], [4930, 4931, 5060], [4930, 5060, 5059], [4931, 4932, 5060], [4932, 5061, 5060], [4932, 4933, 5062], [4932, 5062, 5061], [4933, 4934, 5062], [4934, 5063, 5062], [4934, 4935, 5064], [4934, 5064, 5063], [4935, 4936, 5064], [4936, 5065, 5064], [4936, 4937, 5066], [4936, 5066, 5065], [4937, 4938, 5066], [4938, 5067, 5066], [4938, 4939, 5068], [4938, 5068, 5067], [4939, 4940, 5068], [4940, 5069, 5068], [4940, 4941, 5070], [4940, 5070, 5069], [4941, 4942, 5070], [4942, 5071, 5070], [4942, 4943, 5072], [4942, 5072, 5071], [4943, 4944, 5072], [4944, 5073, 5072], [4944, 4945, 5074], [4944, 5074, 5073], [4945, 4946, 5074], [4946, 5075, 5074], [4946, 4947, 5076], [4946, 5076, 5075], [4947, 4948, 5076], [4948, 5077, 5076], [4948, 4949, 5078], [4948, 5078, 5077], [4949, 4950, 5078], [4950, 5079, 5078], [4950, 4951, 5080], [4950, 5080, 5079], [4951, 4952, 5080], [4952, 5081, 5080], [4952, 4953, 5082], [4952, 5082, 5081], [4953, 4954, 5082], [4954, 5083, 5082], [4954, 4955, 5084], [4954, 5084, 5083], [4955, 4956, 5084], [4956, 5085, 5084], [4956, 4957, 5086], [4956, 5086, 5085], [4957, 4958, 5086], [4958, 5087, 5086], [4958, 4959, 5088], [4958, 5088, 5087], [4959, 4960, 5088], [4960, 5089, 5088], [4960, 4961, 5090], [4960, 5090, 5089], [4961, 4962, 5090], [4962, 5091, 5090], [4962, 4963, 5092], [4962, 5092, 5091], [4963, 4964, 5092], [4964, 5093, 5092], [4964, 4965, 5094], [4964, 5094, 5093], [4965, 4966, 5094], [4966, 5095, 5094], [4966, 4967, 5096], [4966, 5096, 5095], [4967, 4968, 5096], [4968, 5097, 5096], [4968, 4969, 5098], [4968, 5098, 5097], [4969, 4970, 5098], [4970, 5099, 5098], [4970, 4971, 5100], [4970, 5100, 5099], [4971, 4972, 5100], [4972, 5101, 5100], [4972, 4973, 5102], [4972, 5102, 5101], [4973, 4974, 5102], [4974, 5103, 5102], [4974, 4975, 5104], [4974, 5104, 5103], [4975, 4976, 5104], [4976, 5105, 5104], [4976, 4977, 5106], [4976, 5106, 5105], [4977, 4978, 5106], [4978, 5107, 5106], [4978, 4979, 5108], [4978, 5108, 5107], [4979, 4980, 5108], [4980, 5109, 5108], [4980, 4981, 5110], [4980, 5110, 5109], [4981, 4982, 5110], [4982, 5111, 5110], [4982, 4983, 5112], [4982, 5112, 5111], [4983, 4984, 5112], [4984, 5113, 5112], [4984, 4985, 5114], [4984, 5114, 5113], [4985, 4986, 5114], [4986, 5115, 5114], [4986, 4987, 5116], [4986, 5116, 5115], [4987, 4988, 5116], [4988, 5117, 5116], [4988, 4989, 5118], [4988, 5118, 5117], [4989, 4990, 5118], [4990, 5119, 5118], [4990, 4991, 5120], [4990, 5120, 5119], [4991, 4992, 5120], [4992, 5121, 5120], [4992, 4993, 5122], [4992, 5122, 5121], [4993, 4994, 5122], [4994, 5123, 5122], [4994, 4995, 5124], [4994, 5124, 5123], [4995, 4996, 5124], [4996, 5125, 5124], [4996, 4997, 5126], [4996, 5126, 5125], [4997, 4998, 5126], [4998, 5127, 5126], [4998, 4999, 5128], [4998, 5128, 5127], [4999, 5000, 5128], [5000, 5129, 5128], [5000, 5001, 5130], [5000, 5130, 5129], [5001, 5002, 5130], [5002, 5131, 5130], [5002, 5003, 5132], [5002, 5132, 5131], [5003, 5004, 5132], [5004, 5133, 5132], [5004, 5005, 5134], [5004, 5134, 5133], [5005, 5006, 5134], [5006, 5135, 5134], [5006, 5007, 5136], [5006, 5136, 5135], [5007, 5008, 5136], [5008, 5137, 5136], [5008, 5009, 5138], [5008, 5138, 5137], [5009, 5010, 5138], [5010, 5139, 5138], [5010, 5011, 5140], [5010, 5140, 5139], [5011, 5012, 5140], [5012, 5141, 5140], [5012, 5013, 5142], [5012, 5142, 5141], [5013, 5014, 5142], [5014, 5143, 5142], [5014, 5015, 5144], [5014, 5144, 5143], [5015, 5016, 5144], [5016, 5145, 5144], [5016, 5017, 5146], [5016, 5146, 5145], [5017, 5018, 5146], [5018, 5147, 5146], [5018, 5019, 5148], [5018, 5148, 5147], [5019, 5020, 5148], [5020, 5149, 5148], [5020, 5021, 5150], [5020, 5150, 5149], [5021, 5022, 5150], [5022, 5151, 5150], [5022, 5023, 5152], [5022, 5152, 5151], [5023, 5024, 5152], [5024, 5153, 5152], [5024, 5025, 5154], [5024, 5154, 5153], [5025, 5026, 5154], [5026, 5155, 5154], [5026, 5027, 5156], [5026, 5156, 5155], [5027, 5028, 5156], [5028, 5157, 5156], [5028, 5029, 5158], [5028, 5158, 5157], [5029, 5030, 5158], [5030, 5159, 5158], [5031, 5032, 5160], [5032, 5161, 5160], [5032, 5033, 5162], [5032, 5162, 5161], [5033, 5034, 5162], [5034, 5163, 5162], [5034, 5035, 5164], [5034, 5164, 5163], [5035, 5036, 5164], [5036, 5165, 5164], [5036, 5037, 5166], [5036, 5166, 5165], [5037, 5038, 5166], [5038, 5167, 5166], [5038, 5039, 5168], [5038, 5168, 5167], [5039, 5040, 5168], [5040, 5169, 5168], [5040, 5041, 5170], [5040, 5170, 5169], [5041, 5042, 5170], [5042, 5171, 5170], [5042, 5043, 5172], [5042, 5172, 5171], [5043, 5044, 5172], [5044, 5173, 5172], [5044, 5045, 5174], [5044, 5174, 5173], [5045, 5046, 5174], [5046, 5175, 5174], [5046, 5047, 5176], [5046, 5176, 5175], [5047, 5048, 5176], [5048, 5177, 5176], [5048, 5049, 5178], [5048, 5178, 5177], [5049, 5050, 5178], [5050, 5179, 5178], [5050, 5051, 5180], [5050, 5180, 5179], [5051, 5052, 5180], [5052, 5181, 5180], [5052, 5053, 5182], [5052, 5182, 5181], [5053, 5054, 5182], [5054, 5183, 5182], [5054, 5055, 5184], [5054, 5184, 5183], [5055, 5056, 5184], [5056, 5185, 5184], [5056, 5057, 5186], [5056, 5186, 5185], [5057, 5058, 5186], [5058, 5187, 5186], [5058, 5059, 5188], [5058, 5188, 5187], [5059, 5060, 5188], [5060, 5189, 5188], [5060, 5061, 5190], [5060, 5190, 5189], [5061, 5062, 5190], [5062, 5191, 5190], [5062, 5063, 5192], [5062, 5192, 5191], [5063, 5064, 5192], [5064, 5193, 5192], [5064, 5065, 5194], [5064, 5194, 5193], [5065, 5066, 5194], [5066, 5195, 5194], [5066, 5067, 5196], [5066, 5196, 5195], [5067, 5068, 5196], [5068, 5197, 5196], [5068, 5069, 5198], [5068, 5198, 5197], [5069, 5070, 5198], [5070, 5199, 5198], [5070, 5071, 5200], [5070, 5200, 5199], [5071, 5072, 5200], [5072, 5201, 5200], [5072, 5073, 5202], [5072, 5202, 5201], [5073, 5074, 5202], [5074, 5203, 5202], [5074, 5075, 5204], [5074, 5204, 5203], [5075, 5076, 5204], [5076, 5205, 5204], [5076, 5077, 5206], [5076, 5206, 5205], [5077, 5078, 5206], [5078, 5207, 5206], [5078, 5079, 5208], [5078, 5208, 5207], [5079, 5080, 5208], [5080, 5209, 5208], [5080, 5081, 5210], [5080, 5210, 5209], [5081, 5082, 5210], [5082, 5211, 5210], [5082, 5083, 5212], [5082, 5212, 5211], [5083, 5084, 5212], [5084, 5213, 5212], [5084, 5085, 5214], [5084, 5214, 5213], [5085, 5086, 5214], [5086, 5215, 5214], [5086, 5087, 5216], [5086, 5216, 5215], [5087, 5088, 5216], [5088, 5217, 5216], [5088, 5089, 5218], [5088, 5218, 5217], [5089, 5090, 5218], [5090, 5219, 5218], [5090, 5091, 5220], [5090, 5220, 5219], [5091, 5092, 5220], [5092, 5221, 5220], [5092, 5093, 5222], [5092, 5222, 5221], [5093, 5094, 5222], [5094, 5223, 5222], [5094, 5095, 5224], [5094, 5224, 5223], [5095, 5096, 5224], [5096, 5225, 5224], [5096, 5097, 5226], [5096, 5226, 5225], [5097, 5098, 5226], [5098, 5227, 5226], [5098, 5099, 5228], [5098, 5228, 5227], [5099, 5100, 5228], [5100, 5229, 5228], [5100, 5101, 5230], [5100, 5230, 5229], [5101, 5102, 5230], [5102, 5231, 5230], [5102, 5103, 5232], [5102, 5232, 5231], [5103, 5104, 5232], [5104, 5233, 5232], [5104, 5105, 5234], [5104, 5234, 5233], [5105, 5106, 5234], [5106, 5235, 5234], [5106, 5107, 5236], [5106, 5236, 5235], [5107, 5108, 5236], [5108, 5237, 5236], [5108, 5109, 5238], [5108, 5238, 5237], [5109, 5110, 5238], [5110, 5239, 5238], [5110, 5111, 5240], [5110, 5240, 5239], [5111, 5112, 5240], [5112, 5241, 5240], [5112, 5113, 5242], [5112, 5242, 5241], [5113, 5114, 5242], [5114, 5243, 5242], [5114, 5115, 5244], [5114, 5244, 5243], [5115, 5116, 5244], [5116, 5245, 5244], [5116, 5117, 5246], [5116, 5246, 5245], [5117, 5118, 5246], [5118, 5247, 5246], [5118, 5119, 5248], [5118, 5248, 5247], [5119, 5120, 5248], [5120, 5249, 5248], [5120, 5121, 5250], [5120, 5250, 5249], [5121, 5122, 5250], [5122, 5251, 5250], [5122, 5123, 5252], [5122, 5252, 5251], [5123, 5124, 5252], [5124, 5253, 5252], [5124, 5125, 5254], [5124, 5254, 5253], [5125, 5126, 5254], [5126, 5255, 5254], [5126, 5127, 5256], [5126, 5256, 5255], [5127, 5128, 5256], [5128, 5257, 5256], [5128, 5129, 5258], [5128, 5258, 5257], [5129, 5130, 5258], [5130, 5259, 5258], [5130, 5131, 5260], [5130, 5260, 5259], [5131, 5132, 5260], [5132, 5261, 5260], [5132, 5133, 5262], [5132, 5262, 5261], [5133, 5134, 5262], [5134, 5263, 5262], [5134, 5135, 5264], [5134, 5264, 5263], [5135, 5136, 5264], [5136, 5265, 5264], [5136, 5137, 5266], [5136, 5266, 5265], [5137, 5138, 5266], [5138, 5267, 5266], [5138, 5139, 5268], [5138, 5268, 5267], [5139, 5140, 5268], [5140, 5269, 5268], [5140, 5141, 5270], [5140, 5270, 5269], [5141, 5142, 5270], [5142, 5271, 5270], [5142, 5143, 5272], [5142, 5272, 5271], [5143, 5144, 5272], [5144, 5273, 5272], [5144, 5145, 5274], [5144, 5274, 5273], [5145, 5146, 5274], [5146, 5275, 5274], [5146, 5147, 5276], [5146, 5276, 5275], [5147, 5148, 5276], [5148, 5277, 5276], [5148, 5149, 5278], [5148, 5278, 5277], [5149, 5150, 5278], [5150, 5279, 5278], [5150, 5151, 5280], [5150, 5280, 5279], [5151, 5152, 5280], [5152, 5281, 5280], [5152, 5153, 5282], [5152, 5282, 5281], [5153, 5154, 5282], [5154, 5283, 5282], [5154, 5155, 5284], [5154, 5284, 5283], [5155, 5156, 5284], [5156, 5285, 5284], [5156, 5157, 5286], [5156, 5286, 5285], [5157, 5158, 5286], [5158, 5287, 5286], [5158, 5159, 5288], [5158, 5288, 5287], [5160, 5161, 5290], [5160, 5290, 5289], [5161, 5162, 5290], [5162, 5291, 5290], [5162, 5163, 5292], [5162, 5292, 5291], [5163, 5164, 5292], [5164, 5293, 5292], [5164, 5165, 5294], [5164, 5294, 5293], [5165, 5166, 5294], [5166, 5295, 5294], [5166, 5167, 5296], [5166, 5296, 5295], [5167, 5168, 5296], [5168, 5297, 5296], [5168, 5169, 5298], [5168, 5298, 5297], [5169, 5170, 5298], [5170, 5299, 5298], [5170, 5171, 5300], [5170, 5300, 5299], [5171, 5172, 5300], [5172, 5301, 5300], [5172, 5173, 5302], [5172, 5302, 5301], [5173, 5174, 5302], [5174, 5303, 5302], [5174, 5175, 5304], [5174, 5304, 5303], [5175, 5176, 5304], [5176, 5305, 5304], [5176, 5177, 5306], [5176, 5306, 5305], [5177, 5178, 5306], [5178, 5307, 5306], [5178, 5179, 5308], [5178, 5308, 5307], [5179, 5180, 5308], [5180, 5309, 5308], [5180, 5181, 5310], [5180, 5310, 5309], [5181, 5182, 5310], [5182, 5311, 5310], [5182, 5183, 5312], [5182, 5312, 5311], [5183, 5184, 5312], [5184, 5313, 5312], [5184, 5185, 5314], [5184, 5314, 5313], [5185, 5186, 5314], [5186, 5315, 5314], [5186, 5187, 5316], [5186, 5316, 5315], [5187, 5188, 5316], [5188, 5317, 5316], [5188, 5189, 5318], [5188, 5318, 5317], [5189, 5190, 5318], [5190, 5319, 5318], [5190, 5191, 5320], [5190, 5320, 5319], [5191, 5192, 5320], [5192, 5321, 5320], [5192, 5193, 5322], [5192, 5322, 5321], [5193, 5194, 5322], [5194, 5323, 5322], [5194, 5195, 5324], [5194, 5324, 5323], [5195, 5196, 5324], [5196, 5325, 5324], [5196, 5197, 5326], [5196, 5326, 5325], [5197, 5198, 5326], [5198, 5327, 5326], [5198, 5199, 5328], [5198, 5328, 5327], [5199, 5200, 5328], [5200, 5329, 5328], [5200, 5201, 5330], [5200, 5330, 5329], [5201, 5202, 5330], [5202, 5331, 5330], [5202, 5203, 5332], [5202, 5332, 5331], [5203, 5204, 5332], [5204, 5333, 5332], [5204, 5205, 5334], [5204, 5334, 5333], [5205, 5206, 5334], [5206, 5335, 5334], [5206, 5207, 5336], [5206, 5336, 5335], [5207, 5208, 5336], [5208, 5337, 5336], [5208, 5209, 5338], [5208, 5338, 5337], [5209, 5210, 5338], [5210, 5339, 5338], [5210, 5211, 5340], [5210, 5340, 5339], [5211, 5212, 5340], [5212, 5341, 5340], [5212, 5213, 5342], [5212, 5342, 5341], [5213, 5214, 5342], [5214, 5343, 5342], [5214, 5215, 5344], [5214, 5344, 5343], [5215, 5216, 5344], [5216, 5345, 5344], [5216, 5217, 5346], [5216, 5346, 5345], [5217, 5218, 5346], [5218, 5347, 5346], [5218, 5219, 5348], [5218, 5348, 5347], [5219, 5220, 5348], [5220, 5349, 5348], [5220, 5221, 5350], [5220, 5350, 5349], [5221, 5222, 5350], [5222, 5351, 5350], [5222, 5223, 5352], [5222, 5352, 5351], [5223, 5224, 5352], [5224, 5353, 5352], [5224, 5225, 5354], [5224, 5354, 5353], [5225, 5226, 5354], [5226, 5355, 5354], [5226, 5227, 5356], [5226, 5356, 5355], [5227, 5228, 5356], [5228, 5357, 5356], [5228, 5229, 5358], [5228, 5358, 5357], [5229, 5230, 5358], [5230, 5359, 5358], [5230, 5231, 5360], [5230, 5360, 5359], [5231, 5232, 5360], [5232, 5361, 5360], [5232, 5233, 5362], [5232, 5362, 5361], [5233, 5234, 5362], [5234, 5363, 5362], [5234, 5235, 5364], [5234, 5364, 5363], [5235, 5236, 5364], [5236, 5365, 5364], [5236, 5237, 5366], [5236, 5366, 5365], [5237, 5238, 5366], [5238, 5367, 5366], [5238, 5239, 5368], [5238, 5368, 5367], [5239, 5240, 5368], [5240, 5369, 5368], [5240, 5241, 5370], [5240, 5370, 5369], [5241, 5242, 5370], [5242, 5371, 5370], [5242, 5243, 5372], [5242, 5372, 5371], [5243, 5244, 5372], [5244, 5373, 5372], [5244, 5245, 5374], [5244, 5374, 5373], [5245, 5246, 5374], [5246, 5375, 5374], [5246, 5247, 5376], [5246, 5376, 5375], [5247, 5248, 5376], [5248, 5377, 5376], [5248, 5249, 5378], [5248, 5378, 5377], [5249, 5250, 5378], [5250, 5379, 5378], [5250, 5251, 5380], [5250, 5380, 5379], [5251, 5252, 5380], [5252, 5381, 5380], [5252, 5253, 5382], [5252, 5382, 5381], [5253, 5254, 5382], [5254, 5383, 5382], [5254, 5255, 5384], [5254, 5384, 5383], [5255, 5256, 5384], [5256, 5385, 5384], [5256, 5257, 5386], [5256, 5386, 5385], [5257, 5258, 5386], [5258, 5387, 5386], [5258, 5259, 5388], [5258, 5388, 5387], [5259, 5260, 5388], [5260, 5389, 5388], [5260, 5261, 5390], [5260, 5390, 5389], [5261, 5262, 5390], [5262, 5391, 5390], [5262, 5263, 5392], [5262, 5392, 5391], [5263, 5264, 5392], [5264, 5393, 5392], [5264, 5265, 5394], [5264, 5394, 5393], [5265, 5266, 5394], [5266, 5395, 5394], [5266, 5267, 5396], [5266, 5396, 5395], [5267, 5268, 5396], [5268, 5397, 5396], [5268, 5269, 5398], [5268, 5398, 5397], [5269, 5270, 5398], [5270, 5399, 5398], [5270, 5271, 5400], [5270, 5400, 5399], [5271, 5272, 5400], [5272, 5401, 5400], [5272, 5273, 5402], [5272, 5402, 5401], [5273, 5274, 5402], [5274, 5403, 5402], [5274, 5275, 5404], [5274, 5404, 5403], [5275, 5276, 5404], [5276, 5405, 5404], [5276, 5277, 5406], [5276, 5406, 5405], [5277, 5278, 5406], [5278, 5407, 5406], [5278, 5279, 5408], [5278, 5408, 5407], [5279, 5280, 5408], [5280, 5409, 5408], [5280, 5281, 5410], [5280, 5410, 5409], [5281, 5282, 5410], [5282, 5411, 5410], [5282, 5283, 5412], [5282, 5412, 5411], [5283, 5284, 5412], [5284, 5413, 5412], [5284, 5285, 5414], [5284, 5414, 5413], [5285, 5286, 5414], [5286, 5415, 5414], [5286, 5287, 5416], [5286, 5416, 5415], [5287, 5288, 5416], [5288, 5417, 5416], [5289, 5290, 5418], [5290, 5419, 5418], [5290, 5291, 5420], [5290, 5420, 5419], [5291, 5292, 5420], [5292, 5421, 5420], [5292, 5293, 5422], [5292, 5422, 5421], [5293, 5294, 5422], [5294, 5423, 5422], [5294, 5295, 5424], [5294, 5424, 5423], [5295, 5296, 5424], [5296, 5425, 5424], [5296, 5297, 5426], [5296, 5426, 5425], [5297, 5298, 5426], [5298, 5427, 5426], [5298, 5299, 5428], [5298, 5428, 5427], [5299, 5300, 5428], [5300, 5429, 5428], [5300, 5301, 5430], [5300, 5430, 5429], [5301, 5302, 5430], [5302, 5431, 5430], [5302, 5303, 5432], [5302, 5432, 5431], [5303, 5304, 5432], [5304, 5433, 5432], [5304, 5305, 5434], [5304, 5434, 5433], [5305, 5306, 5434], [5306, 5435, 5434], [5306, 5307, 5436], [5306, 5436, 5435], [5307, 5308, 5436], [5308, 5437, 5436], [5308, 5309, 5438], [5308, 5438, 5437], [5309, 5310, 5438], [5310, 5439, 5438], [5310, 5311, 5440], [5310, 5440, 5439], [5311, 5312, 5440], [5312, 5441, 5440], [5312, 5313, 5442], [5312, 5442, 5441], [5313, 5314, 5442], [5314, 5443, 5442], [5314, 5315, 5444], [5314, 5444, 5443], [5315, 5316, 5444], [5316, 5445, 5444], [5316, 5317, 5446], [5316, 5446, 5445], [5317, 5318, 5446], [5318, 5447, 5446], [5318, 5319, 5448], [5318, 5448, 5447], [5319, 5320, 5448], [5320, 5449, 5448], [5320, 5321, 5450], [5320, 5450, 5449], [5321, 5322, 5450], [5322, 5451, 5450], [5322, 5323, 5452], [5322, 5452, 5451], [5323, 5324, 5452], [5324, 5453, 5452], [5324, 5325, 5454], [5324, 5454, 5453], [5325, 5326, 5454], [5326, 5455, 5454], [5326, 5327, 5456], [5326, 5456, 5455], [5327, 5328, 5456], [5328, 5457, 5456], [5328, 5329, 5458], [5328, 5458, 5457], [5329, 5330, 5458], [5330, 5459, 5458], [5330, 5331, 5460], [5330, 5460, 5459], [5331, 5332, 5460], [5332, 5461, 5460], [5332, 5333, 5462], [5332, 5462, 5461], [5333, 5334, 5462], [5334, 5463, 5462], [5334, 5335, 5464], [5334, 5464, 5463], [5335, 5336, 5464], [5336, 5465, 5464], [5336, 5337, 5466], [5336, 5466, 5465], [5337, 5338, 5466], [5338, 5467, 5466], [5338, 5339, 5468], [5338, 5468, 5467], [5339, 5340, 5468], [5340, 5469, 5468], [5340, 5341, 5470], [5340, 5470, 5469], [5341, 5342, 5470], [5342, 5471, 5470], [5342, 5343, 5472], [5342, 5472, 5471], [5343, 5344, 5472], [5344, 5473, 5472], [5344, 5345, 5474], [5344, 5474, 5473], [5345, 5346, 5474], [5346, 5475, 5474], [5346, 5347, 5476], [5346, 5476, 5475], [5347, 5348, 5476], [5348, 5477, 5476], [5348, 5349, 5478], [5348, 5478, 5477], [5349, 5350, 5478], [5350, 5479, 5478], [5350, 5351, 5480], [5350, 5480, 5479], [5351, 5352, 5480], [5352, 5481, 5480], [5352, 5353, 5482], [5352, 5482, 5481], [5353, 5354, 5482], [5354, 5483, 5482], [5354, 5355, 5484], [5354, 5484, 5483], [5355, 5356, 5484], [5356, 5485, 5484], [5356, 5357, 5486], [5356, 5486, 5485], [5357, 5358, 5486], [5358, 5487, 5486], [5358, 5359, 5488], [5358, 5488, 5487], [5359, 5360, 5488], [5360, 5489, 5488], [5360, 5361, 5490], [5360, 5490, 5489], [5361, 5362, 5490], [5362, 5491, 5490], [5362, 5363, 5492], [5362, 5492, 5491], [5363, 5364, 5492], [5364, 5493, 5492], [5364, 5365, 5494], [5364, 5494, 5493], [5365, 5366, 5494], [5366, 5495, 5494], [5366, 5367, 5496], [5366, 5496, 5495], [5367, 5368, 5496], [5368, 5497, 5496], [5368, 5369, 5498], [5368, 5498, 5497], [5369, 5370, 5498], [5370, 5499, 5498], [5370, 5371, 5500], [5370, 5500, 5499], [5371, 5372, 5500], [5372, 5501, 5500], [5372, 5373, 5502], [5372, 5502, 5501], [5373, 5374, 5502], [5374, 5503, 5502], [5374, 5375, 5504], [5374, 5504, 5503], [5375, 5376, 5504], [5376, 5505, 5504], [5376, 5377, 5506], [5376, 5506, 5505], [5377, 5378, 5506], [5378, 5507, 5506], [5378, 5379, 5508], [5378, 5508, 5507], [5379, 5380, 5508], [5380, 5509, 5508], [5380, 5381, 5510], [5380, 5510, 5509], [5381, 5382, 5510], [5382, 5511, 5510], [5382, 5383, 5512], [5382, 5512, 5511], [5383, 5384, 5512], [5384, 5513, 5512], [5384, 5385, 5514], [5384, 5514, 5513], [5385, 5386, 5514], [5386, 5515, 5514], [5386, 5387, 5516], [5386, 5516, 5515], [5387, 5388, 5516], [5388, 5517, 5516], [5388, 5389, 5518], [5388, 5518, 5517], [5389, 5390, 5518], [5390, 5519, 5518], [5390, 5391, 5520], [5390, 5520, 5519], [5391, 5392, 5520], [5392, 5521, 5520], [5392, 5393, 5522], [5392, 5522, 5521], [5393, 5394, 5522], [5394, 5523, 5522], [5394, 5395, 5524], [5394, 5524, 5523], [5395, 5396, 5524], [5396, 5525, 5524], [5396, 5397, 5526], [5396, 5526, 5525], [5397, 5398, 5526], [5398, 5527, 5526], [5398, 5399, 5528], [5398, 5528, 5527], [5399, 5400, 5528], [5400, 5529, 5528], [5400, 5401, 5530], [5400, 5530, 5529], [5401, 5402, 5530], [5402, 5531, 5530], [5402, 5403, 5532], [5402, 5532, 5531], [5403, 5404, 5532], [5404, 5533, 5532], [5404, 5405, 5534], [5404, 5534, 5533], [5405, 5406, 5534], [5406, 5535, 5534], [5406, 5407, 5536], [5406, 5536, 5535], [5407, 5408, 5536], [5408, 5537, 5536], [5408, 5409, 5538], [5408, 5538, 5537], [5409, 5410, 5538], [5410, 5539, 5538], [5410, 5411, 5540], [5410, 5540, 5539], [5411, 5412, 5540], [5412, 5541, 5540], [5412, 5413, 5542], [5412, 5542, 5541], [5413, 5414, 5542], [5414, 5543, 5542], [5414, 5415, 5544], [5414, 5544, 5543], [5415, 5416, 5544], [5416, 5545, 5544], [5416, 5417, 5546], [5416, 5546, 5545], [5418, 5419, 5548], [5418, 5548, 5547], [5419, 5420, 5548], [5420, 5549, 5548], [5420, 5421, 5550], [5420, 5550, 5549], [5421, 5422, 5550], [5422, 5551, 5550], [5422, 5423, 5552], [5422, 5552, 5551], [5423, 5424, 5552], [5424, 5553, 5552], [5424, 5425, 5554], [5424, 5554, 5553], [5425, 5426, 5554], [5426, 5555, 5554], [5426, 5427, 5556], [5426, 5556, 5555], [5427, 5428, 5556], [5428, 5557, 5556], [5428, 5429, 5558], [5428, 5558, 5557], [5429, 5430, 5558], [5430, 5559, 5558], [5430, 5431, 5560], [5430, 5560, 5559], [5431, 5432, 5560], [5432, 5561, 5560], [5432, 5433, 5562], [5432, 5562, 5561], [5433, 5434, 5562], [5434, 5563, 5562], [5434, 5435, 5564], [5434, 5564, 5563], [5435, 5436, 5564], [5436, 5565, 5564], [5436, 5437, 5566], [5436, 5566, 5565], [5437, 5438, 5566], [5438, 5567, 5566], [5438, 5439, 5568], [5438, 5568, 5567], [5439, 5440, 5568], [5440, 5569, 5568], [5440, 5441, 5570], [5440, 5570, 5569], [5441, 5442, 5570], [5442, 5571, 5570], [5442, 5443, 5572], [5442, 5572, 5571], [5443, 5444, 5572], [5444, 5573, 5572], [5444, 5445, 5574], [5444, 5574, 5573], [5445, 5446, 5574], [5446, 5575, 5574], [5446, 5447, 5576], [5446, 5576, 5575], [5447, 5448, 5576], [5448, 5577, 5576], [5448, 5449, 5578], [5448, 5578, 5577], [5449, 5450, 5578], [5450, 5579, 5578], [5450, 5451, 5580], [5450, 5580, 5579], [5451, 5452, 5580], [5452, 5581, 5580], [5452, 5453, 5582], [5452, 5582, 5581], [5453, 5454, 5582], [5454, 5583, 5582], [5454, 5455, 5584], [5454, 5584, 5583], [5455, 5456, 5584], [5456, 5585, 5584], [5456, 5457, 5586], [5456, 5586, 5585], [5457, 5458, 5586], [5458, 5587, 5586], [5458, 5459, 5588], [5458, 5588, 5587], [5459, 5460, 5588], [5460, 5589, 5588], [5460, 5461, 5590], [5460, 5590, 5589], [5461, 5462, 5590], [5462, 5591, 5590], [5462, 5463, 5592], [5462, 5592, 5591], [5463, 5464, 5592], [5464, 5593, 5592], [5464, 5465, 5594], [5464, 5594, 5593], [5465, 5466, 5594], [5466, 5595, 5594], [5466, 5467, 5596], [5466, 5596, 5595], [5467, 5468, 5596], [5468, 5597, 5596], [5468, 5469, 5598], [5468, 5598, 5597], [5469, 5470, 5598], [5470, 5599, 5598], [5470, 5471, 5600], [5470, 5600, 5599], [5471, 5472, 5600], [5472, 5601, 5600], [5472, 5473, 5602], [5472, 5602, 5601], [5473, 5474, 5602], [5474, 5603, 5602], [5474, 5475, 5604], [5474, 5604, 5603], [5475, 5476, 5604], [5476, 5605, 5604], [5476, 5477, 5606], [5476, 5606, 5605], [5477, 5478, 5606], [5478, 5607, 5606], [5478, 5479, 5608], [5478, 5608, 5607], [5479, 5480, 5608], [5480, 5609, 5608], [5480, 5481, 5610], [5480, 5610, 5609], [5481, 5482, 5610], [5482, 5611, 5610], [5482, 5483, 5612], [5482, 5612, 5611], [5483, 5484, 5612], [5484, 5613, 5612], [5484, 5485, 5614], [5484, 5614, 5613], [5485, 5486, 5614], [5486, 5615, 5614], [5486, 5487, 5616], [5486, 5616, 5615], [5487, 5488, 5616], [5488, 5617, 5616], [5488, 5489, 5618], [5488, 5618, 5617], [5489, 5490, 5618], [5490, 5619, 5618], [5490, 5491, 5620], [5490, 5620, 5619], [5491, 5492, 5620], [5492, 5621, 5620], [5492, 5493, 5622], [5492, 5622, 5621], [5493, 5494, 5622], [5494, 5623, 5622], [5494, 5495, 5624], [5494, 5624, 5623], [5495, 5496, 5624], [5496, 5625, 5624], [5496, 5497, 5626], [5496, 5626, 5625], [5497, 5498, 5626], [5498, 5627, 5626], [5498, 5499, 5628], [5498, 5628, 5627], [5499, 5500, 5628], [5500, 5629, 5628], [5500, 5501, 5630], [5500, 5630, 5629], [5501, 5502, 5630], [5502, 5631, 5630], [5502, 5503, 5632], [5502, 5632, 5631], [5503, 5504, 5632], [5504, 5633, 5632], [5504, 5505, 5634], [5504, 5634, 5633], [5505, 5506, 5634], [5506, 5635, 5634], [5506, 5507, 5636], [5506, 5636, 5635], [5507, 5508, 5636], [5508, 5637, 5636], [5508, 5509, 5638], [5508, 5638, 5637], [5509, 5510, 5638], [5510, 5639, 5638], [5510, 5511, 5640], [5510, 5640, 5639], [5511, 5512, 5640], [5512, 5641, 5640], [5512, 5513, 5642], [5512, 5642, 5641], [5513, 5514, 5642], [5514, 5643, 5642], [5514, 5515, 5644], [5514, 5644, 5643], [5515, 5516, 5644], [5516, 5645, 5644], [5516, 5517, 5646], [5516, 5646, 5645], [5517, 5518, 5646], [5518, 5647, 5646], [5518, 5519, 5648], [5518, 5648, 5647], [5519, 5520, 5648], [5520, 5649, 5648], [5520, 5521, 5650], [5520, 5650, 5649], [5521, 5522, 5650], [5522, 5651, 5650], [5522, 5523, 5652], [5522, 5652, 5651], [5523, 5524, 5652], [5524, 5653, 5652], [5524, 5525, 5654], [5524, 5654, 5653], [5525, 5526, 5654], [5526, 5655, 5654], [5526, 5527, 5656], [5526, 5656, 5655], [5527, 5528, 5656], [5528, 5657, 5656], [5528, 5529, 5658], [5528, 5658, 5657], [5529, 5530, 5658], [5530, 5659, 5658], [5530, 5531, 5660], [5530, 5660, 5659], [5531, 5532, 5660], [5532, 5661, 5660], [5532, 5533, 5662], [5532, 5662, 5661], [5533, 5534, 5662], [5534, 5663, 5662], [5534, 5535, 5664], [5534, 5664, 5663], [5535, 5536, 5664], [5536, 5665, 5664], [5536, 5537, 5666], [5536, 5666, 5665], [5537, 5538, 5666], [5538, 5667, 5666], [5538, 5539, 5668], [5538, 5668, 5667], [5539, 5540, 5668], [5540, 5669, 5668], [5540, 5541, 5670], [5540, 5670, 5669], [5541, 5542, 5670], [5542, 5671, 5670], [5542, 5543, 5672], [5542, 5672, 5671], [5543, 5544, 5672], [5544, 5673, 5672], [5544, 5545, 5674], [5544, 5674, 5673], [5545, 5546, 5674], [5546, 5675, 5674], [5547, 5548, 5676], [5548, 5677, 5676], [5548, 5549, 5678], [5548, 5678, 5677], [5549, 5550, 5678], [5550, 5679, 5678], [5550, 5551, 5680], [5550, 5680, 5679], [5551, 5552, 5680], [5552, 5681, 5680], [5552, 5553, 5682], [5552, 5682, 5681], [5553, 5554, 5682], [5554, 5683, 5682], [5554, 5555, 5684], [5554, 5684, 5683], [5555, 5556, 5684], [5556, 5685, 5684], [5556, 5557, 5686], [5556, 5686, 5685], [5557, 5558, 5686], [5558, 5687, 5686], [5558, 5559, 5688], [5558, 5688, 5687], [5559, 5560, 5688], [5560, 5689, 5688], [5560, 5561, 5690], [5560, 5690, 5689], [5561, 5562, 5690], [5562, 5691, 5690], [5562, 5563, 5692], [5562, 5692, 5691], [5563, 5564, 5692], [5564, 5693, 5692], [5564, 5565, 5694], [5564, 5694, 5693], [5565, 5566, 5694], [5566, 5695, 5694], [5566, 5567, 5696], [5566, 5696, 5695], [5567, 5568, 5696], [5568, 5697, 5696], [5568, 5569, 5698], [5568, 5698, 5697], [5569, 5570, 5698], [5570, 5699, 5698], [5570, 5571, 5700], [5570, 5700, 5699], [5571, 5572, 5700], [5572, 5701, 5700], [5572, 5573, 5702], [5572, 5702, 5701], [5573, 5574, 5702], [5574, 5703, 5702], [5574, 5575, 5704], [5574, 5704, 5703], [5575, 5576, 5704], [5576, 5705, 5704], [5576, 5577, 5706], [5576, 5706, 5705], [5577, 5578, 5706], [5578, 5707, 5706], [5578, 5579, 5708], [5578, 5708, 5707], [5579, 5580, 5708], [5580, 5709, 5708], [5580, 5581, 5710], [5580, 5710, 5709], [5581, 5582, 5710], [5582, 5711, 5710], [5582, 5583, 5712], [5582, 5712, 5711], [5583, 5584, 5712], [5584, 5713, 5712], [5584, 5585, 5714], [5584, 5714, 5713], [5585, 5586, 5714], [5586, 5715, 5714], [5586, 5587, 5716], [5586, 5716, 5715], [5587, 5588, 5716], [5588, 5717, 5716], [5588, 5589, 5718], [5588, 5718, 5717], [5589, 5590, 5718], [5590, 5719, 5718], [5590, 5591, 5720], [5590, 5720, 5719], [5591, 5592, 5720], [5592, 5721, 5720], [5592, 5593, 5722], [5592, 5722, 5721], [5593, 5594, 5722], [5594, 5723, 5722], [5594, 5595, 5724], [5594, 5724, 5723], [5595, 5596, 5724], [5596, 5725, 5724], [5596, 5597, 5726], [5596, 5726, 5725], [5597, 5598, 5726], [5598, 5727, 5726], [5598, 5599, 5728], [5598, 5728, 5727], [5599, 5600, 5728], [5600, 5729, 5728], [5600, 5601, 5730], [5600, 5730, 5729], [5601, 5602, 5730], [5602, 5731, 5730], [5602, 5603, 5732], [5602, 5732, 5731], [5603, 5604, 5732], [5604, 5733, 5732], [5604, 5605, 5734], [5604, 5734, 5733], [5605, 5606, 5734], [5606, 5735, 5734], [5606, 5607, 5736], [5606, 5736, 5735], [5607, 5608, 5736], [5608, 5737, 5736], [5608, 5609, 5738], [5608, 5738, 5737], [5609, 5610, 5738], [5610, 5739, 5738], [5610, 5611, 5740], [5610, 5740, 5739], [5611, 5612, 5740], [5612, 5741, 5740], [5612, 5613, 5742], [5612, 5742, 5741], [5613, 5614, 5742], [5614, 5743, 5742], [5614, 5615, 5744], [5614, 5744, 5743], [5615, 5616, 5744], [5616, 5745, 5744], [5616, 5617, 5746], [5616, 5746, 5745], [5617, 5618, 5746], [5618, 5747, 5746], [5618, 5619, 5748], [5618, 5748, 5747], [5619, 5620, 5748], [5620, 5749, 5748], [5620, 5621, 5750], [5620, 5750, 5749], [5621, 5622, 5750], [5622, 5751, 5750], [5622, 5623, 5752], [5622, 5752, 5751], [5623, 5624, 5752], [5624, 5753, 5752], [5624, 5625, 5754], [5624, 5754, 5753], [5625, 5626, 5754], [5626, 5755, 5754], [5626, 5627, 5756], [5626, 5756, 5755], [5627, 5628, 5756], [5628, 5757, 5756], [5628, 5629, 5758], [5628, 5758, 5757], [5629, 5630, 5758], [5630, 5759, 5758], [5630, 5631, 5760], [5630, 5760, 5759], [5631, 5632, 5760], [5632, 5761, 5760], [5632, 5633, 5762], [5632, 5762, 5761], [5633, 5634, 5762], [5634, 5763, 5762], [5634, 5635, 5764], [5634, 5764, 5763], [5635, 5636, 5764], [5636, 5765, 5764], [5636, 5637, 5766], [5636, 5766, 5765], [5637, 5638, 5766], [5638, 5767, 5766], [5638, 5639, 5768], [5638, 5768, 5767], [5639, 5640, 5768], [5640, 5769, 5768], [5640, 5641, 5770], [5640, 5770, 5769], [5641, 5642, 5770], [5642, 5771, 5770], [5642, 5643, 5772], [5642, 5772, 5771], [5643, 5644, 5772], [5644, 5773, 5772], [5644, 5645, 5774], [5644, 5774, 5773], [5645, 5646, 5774], [5646, 5775, 5774], [5646, 5647, 5776], [5646, 5776, 5775], [5647, 5648, 5776], [5648, 5777, 5776], [5648, 5649, 5778], [5648, 5778, 5777], [5649, 5650, 5778], [5650, 5779, 5778], [5650, 5651, 5780], [5650, 5780, 5779], [5651, 5652, 5780], [5652, 5781, 5780], [5652, 5653, 5782], [5652, 5782, 5781], [5653, 5654, 5782], [5654, 5783, 5782], [5654, 5655, 5784], [5654, 5784, 5783], [5655, 5656, 5784], [5656, 5785, 5784], [5656, 5657, 5786], [5656, 5786, 5785], [5657, 5658, 5786], [5658, 5787, 5786], [5658, 5659, 5788], [5658, 5788, 5787], [5659, 5660, 5788], [5660, 5789, 5788], [5660, 5661, 5790], [5660, 5790, 5789], [5661, 5662, 5790], [5662, 5791, 5790], [5662, 5663, 5792], [5662, 5792, 5791], [5663, 5664, 5792], [5664, 5793, 5792], [5664, 5665, 5794], [5664, 5794, 5793], [5665, 5666, 5794], [5666, 5795, 5794], [5666, 5667, 5796], [5666, 5796, 5795], [5667, 5668, 5796], [5668, 5797, 5796], [5668, 5669, 5798], [5668, 5798, 5797], [5669, 5670, 5798], [5670, 5799, 5798], [5670, 5671, 5800], [5670, 5800, 5799], [5671, 5672, 5800], [5672, 5801, 5800], [5672, 5673, 5802], [5672, 5802, 5801], [5673, 5674, 5802], [5674, 5803, 5802], [5674, 5675, 5804], [5674, 5804, 5803], [5676, 5677, 5806], [5676, 5806, 5805], [5677, 5678, 5806], [5678, 5807, 5806], [5678, 5679, 5808], [5678, 5808, 5807], [5679, 5680, 5808], [5680, 5809, 5808], [5680, 5681, 5810], [5680, 5810, 5809], [5681, 5682, 5810], [5682, 5811, 5810], [5682, 5683, 5812], [5682, 5812, 5811], [5683, 5684, 5812], [5684, 5813, 5812], [5684, 5685, 5814], [5684, 5814, 5813], [5685, 5686, 5814], [5686, 5815, 5814], [5686, 5687, 5816], [5686, 5816, 5815], [5687, 5688, 5816], [5688, 5817, 5816], [5688, 5689, 5818], [5688, 5818, 5817], [5689, 5690, 5818], [5690, 5819, 5818], [5690, 5691, 5820], [5690, 5820, 5819], [5691, 5692, 5820], [5692, 5821, 5820], [5692, 5693, 5822], [5692, 5822, 5821], [5693, 5694, 5822], [5694, 5823, 5822], [5694, 5695, 5824], [5694, 5824, 5823], [5695, 5696, 5824], [5696, 5825, 5824], [5696, 5697, 5826], [5696, 5826, 5825], [5697, 5698, 5826], [5698, 5827, 5826], [5698, 5699, 5828], [5698, 5828, 5827], [5699, 5700, 5828], [5700, 5829, 5828], [5700, 5701, 5830], [5700, 5830, 5829], [5701, 5702, 5830], [5702, 5831, 5830], [5702, 5703, 5832], [5702, 5832, 5831], [5703, 5704, 5832], [5704, 5833, 5832], [5704, 5705, 5834], [5704, 5834, 5833], [5705, 5706, 5834], [5706, 5835, 5834], [5706, 5707, 5836], [5706, 5836, 5835], [5707, 5708, 5836], [5708, 5837, 5836], [5708, 5709, 5838], [5708, 5838, 5837], [5709, 5710, 5838], [5710, 5839, 5838], [5710, 5711, 5840], [5710, 5840, 5839], [5711, 5712, 5840], [5712, 5841, 5840], [5712, 5713, 5842], [5712, 5842, 5841], [5713, 5714, 5842], [5714, 5843, 5842], [5714, 5715, 5844], [5714, 5844, 5843], [5715, 5716, 5844], [5716, 5845, 5844], [5716, 5717, 5846], [5716, 5846, 5845], [5717, 5718, 5846], [5718, 5847, 5846], [5718, 5719, 5848], [5718, 5848, 5847], [5719, 5720, 5848], [5720, 5849, 5848], [5720, 5721, 5850], [5720, 5850, 5849], [5721, 5722, 5850], [5722, 5851, 5850], [5722, 5723, 5852], [5722, 5852, 5851], [5723, 5724, 5852], [5724, 5853, 5852], [5724, 5725, 5854], [5724, 5854, 5853], [5725, 5726, 5854], [5726, 5855, 5854], [5726, 5727, 5856], [5726, 5856, 5855], [5727, 5728, 5856], [5728, 5857, 5856], [5728, 5729, 5858], [5728, 5858, 5857], [5729, 5730, 5858], [5730, 5859, 5858], [5730, 5731, 5860], [5730, 5860, 5859], [5731, 5732, 5860], [5732, 5861, 5860], [5732, 5733, 5862], [5732, 5862, 5861], [5733, 5734, 5862], [5734, 5863, 5862], [5734, 5735, 5864], [5734, 5864, 5863], [5735, 5736, 5864], [5736, 5865, 5864], [5736, 5737, 5866], [5736, 5866, 5865], [5737, 5738, 5866], [5738, 5867, 5866], [5738, 5739, 5868], [5738, 5868, 5867], [5739, 5740, 5868], [5740, 5869, 5868], [5740, 5741, 5870], [5740, 5870, 5869], [5741, 5742, 5870], [5742, 5871, 5870], [5742, 5743, 5872], [5742, 5872, 5871], [5743, 5744, 5872], [5744, 5873, 5872], [5744, 5745, 5874], [5744, 5874, 5873], [5745, 5746, 5874], [5746, 5875, 5874], [5746, 5747, 5876], [5746, 5876, 5875], [5747, 5748, 5876], [5748, 5877, 5876], [5748, 5749, 5878], [5748, 5878, 5877], [5749, 5750, 5878], [5750, 5879, 5878], [5750, 5751, 5880], [5750, 5880, 5879], [5751, 5752, 5880], [5752, 5881, 5880], [5752, 5753, 5882], [5752, 5882, 5881], [5753, 5754, 5882], [5754, 5883, 5882], [5754, 5755, 5884], [5754, 5884, 5883], [5755, 5756, 5884], [5756, 5885, 5884], [5756, 5757, 5886], [5756, 5886, 5885], [5757, 5758, 5886], [5758, 5887, 5886], [5758, 5759, 5888], [5758, 5888, 5887], [5759, 5760, 5888], [5760, 5889, 5888], [5760, 5761, 5890], [5760, 5890, 5889], [5761, 5762, 5890], [5762, 5891, 5890], [5762, 5763, 5892], [5762, 5892, 5891], [5763, 5764, 5892], [5764, 5893, 5892], [5764, 5765, 5894], [5764, 5894, 5893], [5765, 5766, 5894], [5766, 5895, 5894], [5766, 5767, 5896], [5766, 5896, 5895], [5767, 5768, 5896], [5768, 5897, 5896], [5768, 5769, 5898], [5768, 5898, 5897], [5769, 5770, 5898], [5770, 5899, 5898], [5770, 5771, 5900], [5770, 5900, 5899], [5771, 5772, 5900], [5772, 5901, 5900], [5772, 5773, 5902], [5772, 5902, 5901], [5773, 5774, 5902], [5774, 5903, 5902], [5774, 5775, 5904], [5774, 5904, 5903], [5775, 5776, 5904], [5776, 5905, 5904], [5776, 5777, 5906], [5776, 5906, 5905], [5777, 5778, 5906], [5778, 5907, 5906], [5778, 5779, 5908], [5778, 5908, 5907], [5779, 5780, 5908], [5780, 5909, 5908], [5780, 5781, 5910], [5780, 5910, 5909], [5781, 5782, 5910], [5782, 5911, 5910], [5782, 5783, 5912], [5782, 5912, 5911], [5783, 5784, 5912], [5784, 5913, 5912], [5784, 5785, 5914], [5784, 5914, 5913], [5785, 5786, 5914], [5786, 5915, 5914], [5786, 5787, 5916], [5786, 5916, 5915], [5787, 5788, 5916], [5788, 5917, 5916], [5788, 5789, 5918], [5788, 5918, 5917], [5789, 5790, 5918], [5790, 5919, 5918], [5790, 5791, 5920], [5790, 5920, 5919], [5791, 5792, 5920], [5792, 5921, 5920], [5792, 5793, 5922], [5792, 5922, 5921], [5793, 5794, 5922], [5794, 5923, 5922], [5794, 5795, 5924], [5794, 5924, 5923], [5795, 5796, 5924], [5796, 5925, 5924], [5796, 5797, 5926], [5796, 5926, 5925], [5797, 5798, 5926], [5798, 5927, 5926], [5798, 5799, 5928], [5798, 5928, 5927], [5799, 5800, 5928], [5800, 5929, 5928], [5800, 5801, 5930], [5800, 5930, 5929], [5801, 5802, 5930], [5802, 5931, 5930], [5802, 5803, 5932], [5802, 5932, 5931], [5803, 5804, 5932], [5804, 5933, 5932], [5805, 5806, 5934], [5806, 5935, 5934], [5806, 5807, 5936], [5806, 5936, 5935], [5807, 5808, 5936], [5808, 5937, 5936], [5808, 5809, 5938], [5808, 5938, 5937], [5809, 5810, 5938], [5810, 5939, 5938], [5810, 5811, 5940], [5810, 5940, 5939], [5811, 5812, 5940], [5812, 5941, 5940], [5812, 5813, 5942], [5812, 5942, 5941], [5813, 5814, 5942], [5814, 5943, 5942], [5814, 5815, 5944], [5814, 5944, 5943], [5815, 5816, 5944], [5816, 5945, 5944], [5816, 5817, 5946], [5816, 5946, 5945], [5817, 5818, 5946], [5818, 5947, 5946], [5818, 5819, 5948], [5818, 5948, 5947], [5819, 5820, 5948], [5820, 5949, 5948], [5820, 5821, 5950], [5820, 5950, 5949], [5821, 5822, 5950], [5822, 5951, 5950], [5822, 5823, 5952], [5822, 5952, 5951], [5823, 5824, 5952], [5824, 5953, 5952], [5824, 5825, 5954], [5824, 5954, 5953], [5825, 5826, 5954], [5826, 5955, 5954], [5826, 5827, 5956], [5826, 5956, 5955], [5827, 5828, 5956], [5828, 5957, 5956], [5828, 5829, 5958], [5828, 5958, 5957], [5829, 5830, 5958], [5830, 5959, 5958], [5830, 5831, 5960], [5830, 5960, 5959], [5831, 5832, 5960], [5832, 5961, 5960], [5832, 5833, 5962], [5832, 5962, 5961], [5833, 5834, 5962], [5834, 5963, 5962], [5834, 5835, 5964], [5834, 5964, 5963], [5835, 5836, 5964], [5836, 5965, 5964], [5836, 5837, 5966], [5836, 5966, 5965], [5837, 5838, 5966], [5838, 5967, 5966], [5838, 5839, 5968], [5838, 5968, 5967], [5839, 5840, 5968], [5840, 5969, 5968], [5840, 5841, 5970], [5840, 5970, 5969], [5841, 5842, 5970], [5842, 5971, 5970], [5842, 5843, 5972], [5842, 5972, 5971], [5843, 5844, 5972], [5844, 5973, 5972], [5844, 5845, 5974], [5844, 5974, 5973], [5845, 5846, 5974], [5846, 5975, 5974], [5846, 5847, 5976], [5846, 5976, 5975], [5847, 5848, 5976], [5848, 5977, 5976], [5848, 5849, 5978], [5848, 5978, 5977], [5849, 5850, 5978], [5850, 5979, 5978], [5850, 5851, 5980], [5850, 5980, 5979], [5851, 5852, 5980], [5852, 5981, 5980], [5852, 5853, 5982], [5852, 5982, 5981], [5853, 5854, 5982], [5854, 5983, 5982], [5854, 5855, 5984], [5854, 5984, 5983], [5855, 5856, 5984], [5856, 5985, 5984], [5856, 5857, 5986], [5856, 5986, 5985], [5857, 5858, 5986], [5858, 5987, 5986], [5858, 5859, 5988], [5858, 5988, 5987], [5859, 5860, 5988], [5860, 5989, 5988], [5860, 5861, 5990], [5860, 5990, 5989], [5861, 5862, 5990], [5862, 5991, 5990], [5862, 5863, 5992], [5862, 5992, 5991], [5863, 5864, 5992], [5864, 5993, 5992], [5864, 5865, 5994], [5864, 5994, 5993], [5865, 5866, 5994], [5866, 5995, 5994], [5866, 5867, 5996], [5866, 5996, 5995], [5867, 5868, 5996], [5868, 5997, 5996], [5868, 5869, 5998], [5868, 5998, 5997], [5869, 5870, 5998], [5870, 5999, 5998], [5870, 5871, 6000], [5870, 6000, 5999], [5871, 5872, 6000], [5872, 6001, 6000], [5872, 5873, 6002], [5872, 6002, 6001], [5873, 5874, 6002], [5874, 6003, 6002], [5874, 5875, 6004], [5874, 6004, 6003], [5875, 5876, 6004], [5876, 6005, 6004], [5876, 5877, 6006], [5876, 6006, 6005], [5877, 5878, 6006], [5878, 6007, 6006], [5878, 5879, 6008], [5878, 6008, 6007], [5879, 5880, 6008], [5880, 6009, 6008], [5880, 5881, 6010], [5880, 6010, 6009], [5881, 5882, 6010], [5882, 6011, 6010], [5882, 5883, 6012], [5882, 6012, 6011], [5883, 5884, 6012], [5884, 6013, 6012], [5884, 5885, 6014], [5884, 6014, 6013], [5885, 5886, 6014], [5886, 6015, 6014], [5886, 5887, 6016], [5886, 6016, 6015], [5887, 5888, 6016], [5888, 6017, 6016], [5888, 5889, 6018], [5888, 6018, 6017], [5889, 5890, 6018], [5890, 6019, 6018], [5890, 5891, 6020], [5890, 6020, 6019], [5891, 5892, 6020], [5892, 6021, 6020], [5892, 5893, 6022], [5892, 6022, 6021], [5893, 5894, 6022], [5894, 6023, 6022], [5894, 5895, 6024], [5894, 6024, 6023], [5895, 5896, 6024], [5896, 6025, 6024], [5896, 5897, 6026], [5896, 6026, 6025], [5897, 5898, 6026], [5898, 6027, 6026], [5898, 5899, 6028], [5898, 6028, 6027], [5899, 5900, 6028], [5900, 6029, 6028], [5900, 5901, 6030], [5900, 6030, 6029], [5901, 5902, 6030], [5902, 6031, 6030], [5902, 5903, 6032], [5902, 6032, 6031], [5903, 5904, 6032], [5904, 6033, 6032], [5904, 5905, 6034], [5904, 6034, 6033], [5905, 5906, 6034], [5906, 6035, 6034], [5906, 5907, 6036], [5906, 6036, 6035], [5907, 5908, 6036], [5908, 6037, 6036], [5908, 5909, 6038], [5908, 6038, 6037], [5909, 5910, 6038], [5910, 6039, 6038], [5910, 5911, 6040], [5910, 6040, 6039], [5911, 5912, 6040], [5912, 6041, 6040], [5912, 5913, 6042], [5912, 6042, 6041], [5913, 5914, 6042], [5914, 6043, 6042], [5914, 5915, 6044], [5914, 6044, 6043], [5915, 5916, 6044], [5916, 6045, 6044], [5916, 5917, 6046], [5916, 6046, 6045], [5917, 5918, 6046], [5918, 6047, 6046], [5918, 5919, 6048], [5918, 6048, 6047], [5919, 5920, 6048], [5920, 6049, 6048], [5920, 5921, 6050], [5920, 6050, 6049], [5921, 5922, 6050], [5922, 6051, 6050], [5922, 5923, 6052], [5922, 6052, 6051], [5923, 5924, 6052], [5924, 6053, 6052], [5924, 5925, 6054], [5924, 6054, 6053], [5925, 5926, 6054], [5926, 6055, 6054], [5926, 5927, 6056], [5926, 6056, 6055], [5927, 5928, 6056], [5928, 6057, 6056], [5928, 5929, 6058], [5928, 6058, 6057], [5929, 5930, 6058], [5930, 6059, 6058], [5930, 5931, 6060], [5930, 6060, 6059], [5931, 5932, 6060], [5932, 6061, 6060], [5932, 5933, 6062], [5932, 6062, 6061], [5934, 5935, 6064], [5934, 6064, 6063], [5935, 5936, 6064], [5936, 6065, 6064], [5936, 5937, 6066], [5936, 6066, 6065], [5937, 5938, 6066], [5938, 6067, 6066], [5938, 5939, 6068], [5938, 6068, 6067], [5939, 5940, 6068], [5940, 6069, 6068], [5940, 5941, 6070], [5940, 6070, 6069], [5941, 5942, 6070], [5942, 6071, 6070], [5942, 5943, 6072], [5942, 6072, 6071], [5943, 5944, 6072], [5944, 6073, 6072], [5944, 5945, 6074], [5944, 6074, 6073], [5945, 5946, 6074], [5946, 6075, 6074], [5946, 5947, 6076], [5946, 6076, 6075], [5947, 5948, 6076], [5948, 6077, 6076], [5948, 5949, 6078], [5948, 6078, 6077], [5949, 5950, 6078], [5950, 6079, 6078], [5950, 5951, 6080], [5950, 6080, 6079], [5951, 5952, 6080], [5952, 6081, 6080], [5952, 5953, 6082], [5952, 6082, 6081], [5953, 5954, 6082], [5954, 6083, 6082], [5954, 5955, 6084], [5954, 6084, 6083], [5955, 5956, 6084], [5956, 6085, 6084], [5956, 5957, 6086], [5956, 6086, 6085], [5957, 5958, 6086], [5958, 6087, 6086], [5958, 5959, 6088], [5958, 6088, 6087], [5959, 5960, 6088], [5960, 6089, 6088], [5960, 5961, 6090], [5960, 6090, 6089], [5961, 5962, 6090], [5962, 6091, 6090], [5962, 5963, 6092], [5962, 6092, 6091], [5963, 5964, 6092], [5964, 6093, 6092], [5964, 5965, 6094], [5964, 6094, 6093], [5965, 5966, 6094], [5966, 6095, 6094], [5966, 5967, 6096], [5966, 6096, 6095], [5967, 5968, 6096], [5968, 6097, 6096], [5968, 5969, 6098], [5968, 6098, 6097], [5969, 5970, 6098], [5970, 6099, 6098], [5970, 5971, 6100], [5970, 6100, 6099], [5971, 5972, 6100], [5972, 6101, 6100], [5972, 5973, 6102], [5972, 6102, 6101], [5973, 5974, 6102], [5974, 6103, 6102], [5974, 5975, 6104], [5974, 6104, 6103], [5975, 5976, 6104], [5976, 6105, 6104], [5976, 5977, 6106], [5976, 6106, 6105], [5977, 5978, 6106], [5978, 6107, 6106], [5978, 5979, 6108], [5978, 6108, 6107], [5979, 5980, 6108], [5980, 6109, 6108], [5980, 5981, 6110], [5980, 6110, 6109], [5981, 5982, 6110], [5982, 6111, 6110], [5982, 5983, 6112], [5982, 6112, 6111], [5983, 5984, 6112], [5984, 6113, 6112], [5984, 5985, 6114], [5984, 6114, 6113], [5985, 5986, 6114], [5986, 6115, 6114], [5986, 5987, 6116], [5986, 6116, 6115], [5987, 5988, 6116], [5988, 6117, 6116], [5988, 5989, 6118], [5988, 6118, 6117], [5989, 5990, 6118], [5990, 6119, 6118], [5990, 5991, 6120], [5990, 6120, 6119], [5991, 5992, 6120], [5992, 6121, 6120], [5992, 5993, 6122], [5992, 6122, 6121], [5993, 5994, 6122], [5994, 6123, 6122], [5994, 5995, 6124], [5994, 6124, 6123], [5995, 5996, 6124], [5996, 6125, 6124], [5996, 5997, 6126], [5996, 6126, 6125], [5997, 5998, 6126], [5998, 6127, 6126], [5998, 5999, 6128], [5998, 6128, 6127], [5999, 6000, 6128], [6000, 6129, 6128], [6000, 6001, 6130], [6000, 6130, 6129], [6001, 6002, 6130], [6002, 6131, 6130], [6002, 6003, 6132], [6002, 6132, 6131], [6003, 6004, 6132], [6004, 6133, 6132], [6004, 6005, 6134], [6004, 6134, 6133], [6005, 6006, 6134], [6006, 6135, 6134], [6006, 6007, 6136], [6006, 6136, 6135], [6007, 6008, 6136], [6008, 6137, 6136], [6008, 6009, 6138], [6008, 6138, 6137], [6009, 6010, 6138], [6010, 6139, 6138], [6010, 6011, 6140], [6010, 6140, 6139], [6011, 6012, 6140], [6012, 6141, 6140], [6012, 6013, 6142], [6012, 6142, 6141], [6013, 6014, 6142], [6014, 6143, 6142], [6014, 6015, 6144], [6014, 6144, 6143], [6015, 6016, 6144], [6016, 6145, 6144], [6016, 6017, 6146], [6016, 6146, 6145], [6017, 6018, 6146], [6018, 6147, 6146], [6018, 6019, 6148], [6018, 6148, 6147], [6019, 6020, 6148], [6020, 6149, 6148], [6020, 6021, 6150], [6020, 6150, 6149], [6021, 6022, 6150], [6022, 6151, 6150], [6022, 6023, 6152], [6022, 6152, 6151], [6023, 6024, 6152], [6024, 6153, 6152], [6024, 6025, 6154], [6024, 6154, 6153], [6025, 6026, 6154], [6026, 6155, 6154], [6026, 6027, 6156], [6026, 6156, 6155], [6027, 6028, 6156], [6028, 6157, 6156], [6028, 6029, 6158], [6028, 6158, 6157], [6029, 6030, 6158], [6030, 6159, 6158], [6030, 6031, 6160], [6030, 6160, 6159], [6031, 6032, 6160], [6032, 6161, 6160], [6032, 6033, 6162], [6032, 6162, 6161], [6033, 6034, 6162], [6034, 6163, 6162], [6036, 6037, 6164], [6037, 6038, 6164], [6038, 6165, 6164], [6038, 6039, 6166], [6038, 6166, 6165], [6039, 6040, 6166], [6040, 6167, 6166], [6040, 6041, 6168], [6040, 6168, 6167], [6041, 6042, 6168], [6042, 6169, 6168], [6042, 6043, 6170], [6042, 6170, 6169], [6043, 6044, 6170], [6044, 6171, 6170], [6044, 6045, 6172], [6044, 6172, 6171], [6045, 6046, 6172], [6046, 6173, 6172], [6046, 6047, 6174], [6046, 6174, 6173], [6047, 6048, 6174], [6048, 6175, 6174], [6048, 6049, 6176], [6048, 6176, 6175], [6049, 6050, 6176], [6050, 6177, 6176], [6050, 6051, 6178], [6050, 6178, 6177], [6051, 6052, 6178], [6052, 6179, 6178], [6052, 6053, 6180], [6052, 6180, 6179], [6053, 6054, 6180], [6054, 6181, 6180], [6054, 6055, 6182], [6054, 6182, 6181], [6055, 6056, 6182], [6056, 6183, 6182], [6056, 6057, 6184], [6056, 6184, 6183], [6057, 6058, 6184], [6058, 6185, 6184], [6058, 6059, 6186], [6058, 6186, 6185], [6059, 6060, 6186], [6060, 6187, 6186], [6060, 6061, 6188], [6060, 6188, 6187], [6061, 6062, 6188], [6062, 6189, 6188], [6063, 6064, 6190], [6064, 6191, 6190], [6064, 6065, 6192], [6064, 6192, 6191], [6065, 6066, 6192], [6066, 6193, 6192], [6066, 6067, 6194], [6066, 6194, 6193], [6067, 6068, 6194], [6068, 6195, 6194], [6068, 6069, 6196], [6068, 6196, 6195], [6069, 6070, 6196], [6070, 6197, 6196], [6070, 6071, 6198], [6070, 6198, 6197], [6071, 6072, 6198], [6072, 6199, 6198], [6072, 6073, 6200], [6072, 6200, 6199], [6073, 6074, 6200], [6074, 6201, 6200], [6074, 6075, 6202], [6074, 6202, 6201], [6075, 6076, 6202], [6076, 6203, 6202], [6076, 6077, 6204], [6076, 6204, 6203], [6077, 6078, 6204], [6078, 6205, 6204], [6078, 6079, 6206], [6078, 6206, 6205], [6079, 6080, 6206], [6080, 6207, 6206], [6080, 6081, 6208], [6080, 6208, 6207], [6081, 6082, 6208], [6082, 6209, 6208], [6082, 6083, 6210], [6082, 6210, 6209], [6083, 6084, 6210], [6084, 6211, 6210], [6084, 6085, 6212], [6084, 6212, 6211], [6085, 6086, 6212], [6086, 6213, 6212], [6086, 6087, 6214], [6086, 6214, 6213], [6087, 6088, 6214], [6088, 6215, 6214], [6088, 6089, 6216], [6088, 6216, 6215], [6089, 6090, 6216], [6090, 6217, 6216], [6090, 6091, 6218], [6090, 6218, 6217], [6091, 6092, 6218], [6092, 6219, 6218], [6092, 6093, 6220], [6092, 6220, 6219], [6093, 6094, 6220], [6094, 6221, 6220], [6094, 6095, 6222], [6094, 6222, 6221], [6095, 6096, 6222], [6096, 6223, 6222], [6096, 6097, 6224], [6096, 6224, 6223], [6097, 6098, 6224], [6098, 6225, 6224], [6098, 6099, 6226], [6098, 6226, 6225], [6099, 6100, 6226], [6100, 6227, 6226], [6100, 6101, 6228], [6100, 6228, 6227], [6101, 6102, 6228], [6102, 6229, 6228], [6102, 6103, 6230], [6102, 6230, 6229], [6103, 6104, 6230], [6104, 6231, 6230], [6104, 6105, 6232], [6104, 6232, 6231], [6105, 6106, 6232], [6106, 6233, 6232], [6106, 6107, 6234], [6106, 6234, 6233], [6107, 6108, 6234], [6108, 6235, 6234], [6108, 6109, 6236], [6108, 6236, 6235], [6109, 6110, 6236], [6110, 6237, 6236], [6110, 6111, 6238], [6110, 6238, 6237], [6111, 6112, 6238], [6112, 6239, 6238], [6112, 6113, 6240], [6112, 6240, 6239], [6113, 6114, 6240], [6114, 6241, 6240], [6114, 6115, 6242], [6114, 6242, 6241], [6115, 6116, 6242], [6116, 6243, 6242], [6116, 6117, 6244], [6116, 6244, 6243], [6117, 6118, 6244], [6118, 6245, 6244], [6118, 6119, 6246], [6118, 6246, 6245], [6119, 6120, 6246], [6120, 6247, 6246], [6120, 6121, 6248], [6120, 6248, 6247], [6121, 6122, 6248], [6122, 6249, 6248], [6122, 6123, 6250], [6122, 6250, 6249], [6123, 6124, 6250], [6124, 6251, 6250], [6124, 6125, 6252], [6124, 6252, 6251], [6125, 6126, 6252], [6126, 6253, 6252], [6126, 6127, 6254], [6126, 6254, 6253], [6127, 6128, 6254], [6128, 6255, 6254], [6128, 6129, 6256], [6128, 6256, 6255], [6129, 6130, 6256], [6130, 6257, 6256], [6130, 6131, 6258], [6130, 6258, 6257], [6131, 6132, 6258], [6132, 6259, 6258], [6132, 6133, 6260], [6132, 6260, 6259], [6133, 6134, 6260], [6134, 6261, 6260], [6134, 6135, 6262], [6134, 6262, 6261], [6135, 6136, 6262], [6136, 6263, 6262], [6136, 6137, 6264], [6136, 6264, 6263], [6137, 6138, 6264], [6138, 6265, 6264], [6138, 6139, 6266], [6138, 6266, 6265], [6139, 6140, 6266], [6140, 6267, 6266], [6140, 6141, 6268], [6140, 6268, 6267], [6141, 6142, 6268], [6142, 6269, 6268], [6142, 6143, 6270], [6142, 6270, 6269], [6143, 6144, 6270], [6144, 6271, 6270], [6144, 6145, 6272], [6144, 6272, 6271], [6145, 6146, 6272], [6146, 6273, 6272], [6146, 6147, 6274], [6146, 6274, 6273], [6147, 6148, 6274], [6148, 6275, 6274], [6148, 6149, 6276], [6148, 6276, 6275], [6149, 6150, 6276], [6150, 6277, 6276], [6150, 6151, 6278], [6150, 6278, 6277], [6151, 6152, 6278], [6152, 6279, 6278], [6152, 6153, 6280], [6152, 6280, 6279], [6153, 6154, 6280], [6154, 6281, 6280], [6154, 6155, 6282], [6154, 6282, 6281], [6155, 6156, 6282], [6156, 6283, 6282], [6156, 6157, 6284], [6156, 6284, 6283], [6157, 6158, 6284], [6158, 6285, 6284], [6158, 6159, 6286], [6158, 6286, 6285], [6159, 6160, 6286], [6160, 6287, 6286], [6160, 6161, 6288], [6160, 6288, 6287], [6161, 6162, 6288], [6162, 6289, 6288], [6162, 6163, 6290], [6162, 6290, 6289], [6166, 6167, 6292], [6166, 6292, 6291], [6167, 6168, 6292], [6168, 6293, 6292], [6168, 6169, 6294], [6168, 6294, 6293], [6169, 6170, 6294], [6170, 6295, 6294], [6170, 6171, 6296], [6170, 6296, 6295], [6171, 6172, 6296], [6172, 6297, 6296], [6172, 6173, 6298], [6172, 6298, 6297], [6173, 6174, 6298], [6174, 6299, 6298], [6174, 6175, 6300], [6174, 6300, 6299], [6175, 6176, 6300], [6176, 6301, 6300], [6176, 6177, 6302], [6176, 6302, 6301], [6177, 6178, 6302], [6178, 6303, 6302], [6178, 6179, 6304], [6178, 6304, 6303], [6179, 6180, 6304], [6180, 6305, 6304], [6180, 6181, 6306], [6180, 6306, 6305], [6181, 6182, 6306], [6182, 6307, 6306], [6182, 6183, 6308], [6182, 6308, 6307], [6183, 6184, 6308], [6184, 6309, 6308], [6184, 6185, 6310], [6184, 6310, 6309], [6185, 6186, 6310], [6186, 6311, 6310], [6186, 6187, 6312], [6186, 6312, 6311], [6187, 6188, 6312], [6188, 6313, 6312], [6188, 6189, 6314], [6188, 6314, 6313], [6190, 6191, 6316], [6190, 6316, 6315], [6191, 6192, 6316], [6192, 6317, 6316], [6192, 6193, 6318], [6192, 6318, 6317], [6193, 6194, 6318], [6194, 6319, 6318], [6194, 6195, 6320], [6194, 6320, 6319], [6195, 6196, 6320], [6196, 6321, 6320], [6196, 6197, 6322], [6196, 6322, 6321], [6197, 6198, 6322], [6198, 6323, 6322], [6198, 6199, 6324], [6198, 6324, 6323], [6199, 6200, 6324], [6200, 6325, 6324], [6200, 6201, 6326], [6200, 6326, 6325], [6201, 6202, 6326], [6202, 6327, 6326], [6202, 6203, 6328], [6202, 6328, 6327], [6203, 6204, 6328], [6204, 6329, 6328], [6204, 6205, 6330], [6204, 6330, 6329], [6205, 6206, 6330], [6206, 6331, 6330], [6206, 6207, 6332], [6206, 6332, 6331], [6207, 6208, 6332], [6208, 6333, 6332], [6208, 6209, 6334], [6208, 6334, 6333], [6209, 6210, 6334], [6210, 6335, 6334], [6210, 6211, 6336], [6210, 6336, 6335], [6211, 6212, 6336], [6212, 6337, 6336], [6212, 6213, 6338], [6212, 6338, 6337], [6213, 6214, 6338], [6214, 6339, 6338], [6214, 6215, 6340], [6214, 6340, 6339], [6215, 6216, 6340], [6216, 6341, 6340], [6216, 6217, 6342], [6216, 6342, 6341], [6217, 6218, 6342], [6218, 6343, 6342], [6218, 6219, 6344], [6218, 6344, 6343], [6219, 6220, 6344], [6220, 6345, 6344], [6220, 6221, 6346], [6220, 6346, 6345], [6221, 6222, 6346], [6222, 6347, 6346], [6222, 6223, 6348], [6222, 6348, 6347], [6223, 6224, 6348], [6224, 6349, 6348], [6224, 6225, 6350], [6224, 6350, 6349], [6225, 6226, 6350], [6226, 6351, 6350], [6226, 6227, 6352], [6226, 6352, 6351], [6227, 6228, 6352], [6228, 6353, 6352], [6228, 6229, 6354], [6228, 6354, 6353], [6229, 6230, 6354], [6230, 6355, 6354], [6230, 6231, 6356], [6230, 6356, 6355], [6231, 6232, 6356], [6232, 6357, 6356], [6232, 6233, 6358], [6232, 6358, 6357], [6233, 6234, 6358], [6234, 6359, 6358], [6234, 6235, 6360], [6234, 6360, 6359], [6235, 6236, 6360], [6236, 6361, 6360], [6236, 6237, 6362], [6236, 6362, 6361], [6237, 6238, 6362], [6238, 6363, 6362], [6238, 6239, 6364], [6238, 6364, 6363], [6239, 6240, 6364], [6240, 6365, 6364], [6240, 6241, 6366], [6240, 6366, 6365], [6241, 6242, 6366], [6242, 6367, 6366], [6242, 6243, 6368], [6242, 6368, 6367], [6243, 6244, 6368], [6244, 6369, 6368], [6244, 6245, 6370], [6244, 6370, 6369], [6245, 6246, 6370], [6246, 6371, 6370], [6246, 6247, 6372], [6246, 6372, 6371], [6247, 6248, 6372], [6248, 6373, 6372], [6248, 6249, 6374], [6248, 6374, 6373], [6249, 6250, 6374], [6250, 6375, 6374], [6250, 6251, 6376], [6250, 6376, 6375], [6251, 6252, 6376], [6252, 6377, 6376], [6252, 6253, 6378], [6252, 6378, 6377], [6253, 6254, 6378], [6254, 6379, 6378], [6254, 6255, 6380], [6254, 6380, 6379], [6255, 6256, 6380], [6256, 6381, 6380], [6256, 6257, 6382], [6256, 6382, 6381], [6257, 6258, 6382], [6258, 6383, 6382], [6258, 6259, 6384], [6258, 6384, 6383], [6259, 6260, 6384], [6260, 6385, 6384], [6260, 6261, 6386], [6260, 6386, 6385], [6261, 6262, 6386], [6262, 6387, 6386], [6262, 6263, 6388], [6262, 6388, 6387], [6263, 6264, 6388], [6264, 6389, 6388], [6264, 6265, 6390], [6264, 6390, 6389], [6265, 6266, 6390], [6266, 6391, 6390], [6266, 6267, 6392], [6266, 6392, 6391], [6267, 6268, 6392], [6268, 6393, 6392], [6268, 6269, 6394], [6268, 6394, 6393], [6269, 6270, 6394], [6270, 6395, 6394], [6270, 6271, 6396], [6270, 6396, 6395], [6271, 6272, 6396], [6272, 6397, 6396], [6272, 6273, 6398], [6272, 6398, 6397], [6273, 6274, 6398], [6274, 6399, 6398], [6274, 6275, 6400], [6274, 6400, 6399], [6275, 6276, 6400], [6276, 6401, 6400], [6276, 6277, 6402], [6276, 6402, 6401], [6277, 6278, 6402], [6278, 6403, 6402], [6278, 6279, 6404], [6278, 6404, 6403], [6279, 6280, 6404], [6280, 6405, 6404], [6280, 6281, 6406], [6280, 6406, 6405], [6281, 6282, 6406], [6282, 6407, 6406], [6282, 6283, 6408], [6282, 6408, 6407], [6283, 6284, 6408], [6284, 6409, 6408], [6284, 6285, 6410], [6284, 6410, 6409], [6285, 6286, 6410], [6286, 6411, 6410], [6286, 6287, 6412], [6286, 6412, 6411], [6287, 6288, 6412], [6288, 6413, 6412], [6291, 6292, 6414], [6292, 6415, 6414], [6292, 6293, 6416], [6292, 6416, 6415], [6293, 6294, 6416], [6294, 6417, 6416], [6294, 6295, 6418], [6294, 6418, 6417], [6295, 6296, 6418], [6296, 6419, 6418], [6296, 6297, 6420], [6296, 6420, 6419], [6297, 6298, 6420], [6298, 6421, 6420], [6298, 6299, 6422], [6298, 6422, 6421], [6299, 6300, 6422], [6300, 6423, 6422], [6300, 6301, 6424], [6300, 6424, 6423], [6301, 6302, 6424], [6302, 6425, 6424], [6302, 6303, 6426], [6302, 6426, 6425], [6303, 6304, 6426], [6304, 6427, 6426], [6304, 6305, 6428], [6304, 6428, 6427], [6305, 6306, 6428], [6306, 6429, 6428], [6306, 6307, 6430], [6306, 6430, 6429], [6307, 6308, 6430], [6308, 6431, 6430], [6308, 6309, 6432], [6308, 6432, 6431], [6309, 6310, 6432], [6310, 6433, 6432], [6310, 6311, 6434], [6310, 6434, 6433], [6311, 6312, 6434], [6312, 6435, 6434], [6312, 6313, 6436], [6312, 6436, 6435], [6313, 6314, 6436], [6314, 6437, 6436], [6315, 6316, 6438], [6316, 6439, 6438], [6316, 6317, 6440], [6316, 6440, 6439], [6317, 6318, 6440], [6318, 6441, 6440], [6318, 6319, 6442], [6318, 6442, 6441], [6319, 6320, 6442], [6320, 6443, 6442], [6320, 6321, 6444], [6320, 6444, 6443], [6321, 6322, 6444], [6322, 6445, 6444], [6322, 6323, 6446], [6322, 6446, 6445], [6323, 6324, 6446], [6324, 6447, 6446], [6324, 6325, 6448], [6324, 6448, 6447], [6325, 6326, 6448], [6326, 6449, 6448], [6326, 6327, 6450], [6326, 6450, 6449], [6327, 6328, 6450], [6328, 6451, 6450], [6328, 6329, 6452], [6328, 6452, 6451], [6329, 6330, 6452], [6330, 6453, 6452], [6330, 6331, 6454], [6330, 6454, 6453], [6331, 6332, 6454], [6332, 6455, 6454], [6332, 6333, 6456], [6332, 6456, 6455], [6333, 6334, 6456], [6334, 6457, 6456], [6334, 6335, 6458], [6334, 6458, 6457], [6335, 6336, 6458], [6336, 6459, 6458], [6336, 6337, 6460], [6336, 6460, 6459], [6337, 6338, 6460], [6338, 6461, 6460], [6338, 6339, 6462], [6338, 6462, 6461], [6339, 6340, 6462], [6340, 6463, 6462], [6340, 6341, 6464], [6340, 6464, 6463], [6341, 6342, 6464], [6342, 6465, 6464], [6342, 6343, 6466], [6342, 6466, 6465], [6343, 6344, 6466], [6344, 6467, 6466], [6344, 6345, 6468], [6344, 6468, 6467], [6345, 6346, 6468], [6346, 6469, 6468], [6346, 6347, 6470], [6346, 6470, 6469], [6347, 6348, 6470], [6348, 6471, 6470], [6348, 6349, 6472], [6348, 6472, 6471], [6349, 6350, 6472], [6350, 6473, 6472], [6350, 6351, 6474], [6350, 6474, 6473], [6351, 6352, 6474], [6352, 6475, 6474], [6352, 6353, 6476], [6352, 6476, 6475], [6353, 6354, 6476], [6354, 6477, 6476], [6354, 6355, 6478], [6354, 6478, 6477], [6355, 6356, 6478], [6356, 6479, 6478], [6356, 6357, 6480], [6356, 6480, 6479], [6357, 6358, 6480], [6358, 6481, 6480], [6358, 6359, 6482], [6358, 6482, 6481], [6359, 6360, 6482], [6360, 6483, 6482], [6360, 6361, 6484], [6360, 6484, 6483], [6361, 6362, 6484], [6362, 6485, 6484], [6362, 6363, 6486], [6362, 6486, 6485], [6363, 6364, 6486], [6364, 6487, 6486], [6364, 6365, 6488], [6364, 6488, 6487], [6365, 6366, 6488], [6366, 6489, 6488], [6366, 6367, 6490], [6366, 6490, 6489], [6367, 6368, 6490], [6368, 6491, 6490], [6368, 6369, 6492], [6368, 6492, 6491], [6369, 6370, 6492], [6370, 6493, 6492], [6370, 6371, 6494], [6370, 6494, 6493], [6371, 6372, 6494], [6372, 6495, 6494], [6372, 6373, 6496], [6372, 6496, 6495], [6373, 6374, 6496], [6374, 6497, 6496], [6374, 6375, 6498], [6374, 6498, 6497], [6375, 6376, 6498], [6376, 6499, 6498], [6376, 6377, 6500], [6376, 6500, 6499], [6377, 6378, 6500], [6378, 6501, 6500], [6378, 6379, 6502], [6378, 6502, 6501], [6379, 6380, 6502], [6380, 6503, 6502], [6380, 6381, 6504], [6380, 6504, 6503], [6381, 6382, 6504], [6382, 6505, 6504], [6382, 6383, 6506], [6382, 6506, 6505], [6383, 6384, 6506], [6384, 6507, 6506], [6384, 6385, 6508], [6384, 6508, 6507], [6385, 6386, 6508], [6386, 6509, 6508], [6386, 6387, 6510], [6386, 6510, 6509], [6387, 6388, 6510], [6388, 6511, 6510], [6388, 6389, 6512], [6388, 6512, 6511], [6389, 6390, 6512], [6390, 6513, 6512], [6390, 6391, 6514], [6390, 6514, 6513], [6391, 6392, 6514], [6392, 6515, 6514], [6392, 6393, 6516], [6392, 6516, 6515], [6393, 6394, 6516], [6394, 6517, 6516], [6394, 6395, 6518], [6394, 6518, 6517], [6395, 6396, 6518], [6396, 6519, 6518], [6396, 6397, 6520], [6396, 6520, 6519], [6397, 6398, 6520], [6398, 6521, 6520], [6398, 6399, 6522], [6398, 6522, 6521], [6399, 6400, 6522], [6400, 6523, 6522], [6400, 6401, 6524], [6400, 6524, 6523], [6401, 6402, 6524], [6402, 6525, 6524], [6402, 6403, 6526], [6402, 6526, 6525], [6403, 6404, 6526], [6404, 6527, 6526], [6404, 6405, 6528], [6404, 6528, 6527], [6405, 6406, 6528], [6406, 6529, 6528], [6406, 6407, 6530], [6406, 6530, 6529], [6407, 6408, 6530], [6408, 6531, 6530], [6408, 6409, 6532], [6408, 6532, 6531], [6409, 6410, 6532], [6410, 6533, 6532], [6410, 6411, 6534], [6410, 6534, 6533], [6411, 6412, 6534], [6412, 6535, 6534], [6412, 6413, 6536], [6412, 6536, 6535], [6416, 6417, 6537], [6417, 6418, 6537], [6418, 6538, 6537], [6418, 6419, 6539], [6418, 6539, 6538], [6419, 6420, 6539], [6420, 6540, 6539], [6420, 6421, 6541], [6420, 6541, 6540], [6421, 6422, 6541], [6422, 6542, 6541], [6422, 6423, 6543], [6422, 6543, 6542], [6423, 6424, 6543], [6424, 6544, 6543], [6424, 6425, 6545], [6424, 6545, 6544], [6425, 6426, 6545], [6426, 6546, 6545], [6426, 6427, 6547], [6426, 6547, 6546], [6427, 6428, 6547], [6428, 6548, 6547], [6428, 6429, 6549], [6428, 6549, 6548], [6429, 6430, 6549], [6430, 6550, 6549], [6430, 6431, 6551], [6430, 6551, 6550], [6431, 6432, 6551], [6432, 6552, 6551], [6432, 6433, 6553], [6432, 6553, 6552], [6433, 6434, 6553], [6434, 6554, 6553], [6434, 6435, 6555], [6434, 6555, 6554], [6435, 6436, 6555], [6436, 6556, 6555], [6436, 6437, 6557], [6436, 6557, 6556], [6438, 6439, 6559], [6438, 6559, 6558], [6439, 6440, 6559], [6440, 6560, 6559], [6440, 6441, 6561], [6440, 6561, 6560], [6441, 6442, 6561], [6442, 6562, 6561], [6442, 6443, 6563], [6442, 6563, 6562], [6443, 6444, 6563], [6444, 6564, 6563], [6444, 6445, 6565], [6444, 6565, 6564], [6445, 6446, 6565], [6446, 6566, 6565], [6446, 6447, 6567], [6446, 6567, 6566], [6447, 6448, 6567], [6448, 6568, 6567], [6448, 6449, 6569], [6448, 6569, 6568], [6449, 6450, 6569], [6450, 6570, 6569], [6450, 6451, 6571], [6450, 6571, 6570], [6451, 6452, 6571], [6452, 6572, 6571], [6452, 6453, 6573], [6452, 6573, 6572], [6453, 6454, 6573], [6454, 6574, 6573], [6454, 6455, 6575], [6454, 6575, 6574], [6455, 6456, 6575], [6456, 6576, 6575], [6456, 6457, 6577], [6456, 6577, 6576], [6457, 6458, 6577], [6458, 6578, 6577], [6458, 6459, 6579], [6458, 6579, 6578], [6459, 6460, 6579], [6460, 6580, 6579], [6460, 6461, 6581], [6460, 6581, 6580], [6461, 6462, 6581], [6462, 6582, 6581], [6462, 6463, 6583], [6462, 6583, 6582], [6463, 6464, 6583], [6464, 6584, 6583], [6464, 6465, 6585], [6464, 6585, 6584], [6465, 6466, 6585], [6466, 6586, 6585], [6466, 6467, 6587], [6466, 6587, 6586], [6467, 6468, 6587], [6468, 6588, 6587], [6468, 6469, 6589], [6468, 6589, 6588], [6469, 6470, 6589], [6470, 6590, 6589], [6470, 6471, 6591], [6470, 6591, 6590], [6471, 6472, 6591], [6472, 6592, 6591], [6472, 6473, 6593], [6472, 6593, 6592], [6473, 6474, 6593], [6474, 6594, 6593], [6474, 6475, 6595], [6474, 6595, 6594], [6475, 6476, 6595], [6476, 6596, 6595], [6476, 6477, 6597], [6476, 6597, 6596], [6477, 6478, 6597], [6478, 6598, 6597], [6478, 6479, 6599], [6478, 6599, 6598], [6479, 6480, 6599], [6480, 6600, 6599], [6480, 6481, 6601], [6480, 6601, 6600], [6481, 6482, 6601], [6482, 6602, 6601], [6482, 6483, 6603], [6482, 6603, 6602], [6483, 6484, 6603], [6484, 6604, 6603], [6484, 6485, 6605], [6484, 6605, 6604], [6485, 6486, 6605], [6486, 6606, 6605], [6486, 6487, 6607], [6486, 6607, 6606], [6487, 6488, 6607], [6488, 6608, 6607], [6488, 6489, 6609], [6488, 6609, 6608], [6489, 6490, 6609], [6490, 6610, 6609], [6490, 6491, 6611], [6490, 6611, 6610], [6491, 6492, 6611], [6492, 6612, 6611], [6492, 6493, 6613], [6492, 6613, 6612], [6493, 6494, 6613], [6494, 6614, 6613], [6494, 6495, 6615], [6494, 6615, 6614], [6495, 6496, 6615], [6496, 6616, 6615], [6496, 6497, 6617], [6496, 6617, 6616], [6497, 6498, 6617], [6498, 6618, 6617], [6498, 6499, 6619], [6498, 6619, 6618], [6499, 6500, 6619], [6500, 6620, 6619], [6500, 6501, 6621], [6500, 6621, 6620], [6501, 6502, 6621], [6502, 6622, 6621], [6502, 6503, 6623], [6502, 6623, 6622], [6503, 6504, 6623], [6504, 6624, 6623], [6504, 6505, 6625], [6504, 6625, 6624], [6505, 6506, 6625], [6506, 6626, 6625], [6506, 6507, 6627], [6506, 6627, 6626], [6507, 6508, 6627], [6508, 6628, 6627], [6508, 6509, 6629], [6508, 6629, 6628], [6509, 6510, 6629], [6510, 6630, 6629], [6510, 6511, 6631], [6510, 6631, 6630], [6511, 6512, 6631], [6512, 6632, 6631], [6512, 6513, 6633], [6512, 6633, 6632], [6513, 6514, 6633], [6514, 6634, 6633], [6514, 6515, 6635], [6514, 6635, 6634], [6515, 6516, 6635], [6516, 6636, 6635], [6516, 6517, 6637], [6516, 6637, 6636], [6517, 6518, 6637], [6518, 6638, 6637], [6518, 6519, 6639], [6518, 6639, 6638], [6519, 6520, 6639], [6520, 6640, 6639], [6520, 6521, 6641], [6520, 6641, 6640], [6521, 6522, 6641], [6522, 6642, 6641], [6522, 6523, 6643], [6522, 6643, 6642], [6523, 6524, 6643], [6524, 6644, 6643], [6524, 6525, 6645], [6524, 6645, 6644], [6525, 6526, 6645], [6526, 6646, 6645], [6526, 6527, 6647], [6526, 6647, 6646], [6527, 6528, 6647], [6528, 6648, 6647], [6528, 6529, 6649], [6528, 6649, 6648], [6529, 6530, 6649], [6530, 6650, 6649], [6530, 6531, 6651], [6530, 6651, 6650], [6531, 6532, 6651], [6532, 6652, 6651], [6532, 6533, 6653], [6532, 6653, 6652], [6533, 6534, 6653], [6534, 6654, 6653], [6534, 6535, 6655], [6534, 6655, 6654], [6535, 6536, 6655], [6536, 6656, 6655], [6539, 6540, 6658], [6539, 6658, 6657], [6540, 6541, 6658], [6541, 6659, 6658], [6541, 6542, 6660], [6541, 6660, 6659], [6542, 6543, 6660], [6543, 6661, 6660], [6543, 6544, 6662], [6543, 6662, 6661], [6544, 6545, 6662], [6545, 6663, 6662], [6545, 6546, 6664], [6545, 6664, 6663], [6546, 6547, 6664], [6547, 6665, 6664], [6547, 6548, 6666], [6547, 6666, 6665], [6548, 6549, 6666], [6549, 6667, 6666], [6549, 6550, 6668], [6549, 6668, 6667], [6550, 6551, 6668], [6551, 6669, 6668], [6551, 6552, 6670], [6551, 6670, 6669], [6552, 6553, 6670], [6553, 6671, 6670], [6553, 6554, 6672], [6553, 6672, 6671], [6554, 6555, 6672], [6555, 6673, 6672], [6555, 6556, 6674], [6555, 6674, 6673], [6556, 6557, 6674], [6557, 6675, 6674], [6558, 6559, 6676], [6559, 6677, 6676], [6559, 6560, 6678], [6559, 6678, 6677], [6560, 6561, 6678], [6561, 6679, 6678], [6561, 6562, 6680], [6561, 6680, 6679], [6562, 6563, 6680], [6563, 6681, 6680], [6563, 6564, 6682], [6563, 6682, 6681], [6564, 6565, 6682], [6565, 6683, 6682], [6565, 6566, 6684], [6565, 6684, 6683], [6566, 6567, 6684], [6567, 6685, 6684], [6567, 6568, 6686], [6567, 6686, 6685], [6568, 6569, 6686], [6569, 6687, 6686], [6569, 6570, 6688], [6569, 6688, 6687], [6570, 6571, 6688], [6571, 6689, 6688], [6571, 6572, 6690], [6571, 6690, 6689], [6572, 6573, 6690], [6573, 6691, 6690], [6573, 6574, 6692], [6573, 6692, 6691], [6574, 6575, 6692], [6575, 6693, 6692], [6575, 6576, 6694], [6575, 6694, 6693], [6576, 6577, 6694], [6577, 6695, 6694], [6577, 6578, 6696], [6577, 6696, 6695], [6578, 6579, 6696], [6579, 6697, 6696], [6579, 6580, 6698], [6579, 6698, 6697], [6580, 6581, 6698], [6581, 6699, 6698], [6581, 6582, 6700], [6581, 6700, 6699], [6582, 6583, 6700], [6583, 6701, 6700], [6583, 6584, 6702], [6583, 6702, 6701], [6584, 6585, 6702], [6585, 6703, 6702], [6585, 6586, 6704], [6585, 6704, 6703], [6586, 6587, 6704], [6587, 6705, 6704], [6587, 6588, 6706], [6587, 6706, 6705], [6588, 6589, 6706], [6589, 6707, 6706], [6589, 6590, 6708], [6589, 6708, 6707], [6590, 6591, 6708], [6591, 6709, 6708], [6591, 6592, 6710], [6591, 6710, 6709], [6592, 6593, 6710], [6593, 6711, 6710], [6593, 6594, 6712], [6593, 6712, 6711], [6594, 6595, 6712], [6595, 6713, 6712], [6595, 6596, 6714], [6595, 6714, 6713], [6596, 6597, 6714], [6597, 6715, 6714], [6597, 6598, 6716], [6597, 6716, 6715], [6598, 6599, 6716], [6599, 6717, 6716], [6599, 6600, 6718], [6599, 6718, 6717], [6600, 6601, 6718], [6601, 6719, 6718], [6601, 6602, 6720], [6601, 6720, 6719], [6602, 6603, 6720], [6603, 6721, 6720], [6603, 6604, 6722], [6603, 6722, 6721], [6604, 6605, 6722], [6605, 6723, 6722], [6605, 6606, 6724], [6605, 6724, 6723], [6606, 6607, 6724], [6607, 6725, 6724], [6607, 6608, 6726], [6607, 6726, 6725], [6608, 6609, 6726], [6609, 6727, 6726], [6609, 6610, 6728], [6609, 6728, 6727], [6610, 6611, 6728], [6611, 6729, 6728], [6611, 6612, 6730], [6611, 6730, 6729], [6612, 6613, 6730], [6613, 6731, 6730], [6613, 6614, 6732], [6613, 6732, 6731], [6614, 6615, 6732], [6615, 6733, 6732], [6615, 6616, 6734], [6615, 6734, 6733], [6616, 6617, 6734], [6617, 6735, 6734], [6617, 6618, 6736], [6617, 6736, 6735], [6618, 6619, 6736], [6619, 6737, 6736], [6619, 6620, 6738], [6619, 6738, 6737], [6620, 6621, 6738], [6621, 6739, 6738], [6621, 6622, 6740], [6621, 6740, 6739], [6622, 6623, 6740], [6623, 6741, 6740], [6623, 6624, 6742], [6623, 6742, 6741], [6624, 6625, 6742], [6625, 6743, 6742], [6625, 6626, 6744], [6625, 6744, 6743], [6626, 6627, 6744], [6627, 6745, 6744], [6627, 6628, 6746], [6627, 6746, 6745], [6628, 6629, 6746], [6629, 6747, 6746], [6629, 6630, 6748], [6629, 6748, 6747], [6630, 6631, 6748], [6631, 6749, 6748], [6631, 6632, 6750], [6631, 6750, 6749], [6632, 6633, 6750], [6633, 6751, 6750], [6633, 6634, 6752], [6633, 6752, 6751], [6634, 6635, 6752], [6635, 6753, 6752], [6635, 6636, 6754], [6635, 6754, 6753], [6636, 6637, 6754], [6637, 6755, 6754], [6637, 6638, 6756], [6637, 6756, 6755], [6638, 6639, 6756], [6639, 6757, 6756], [6639, 6640, 6758], [6639, 6758, 6757], [6640, 6641, 6758], [6641, 6759, 6758], [6641, 6642, 6760], [6641, 6760, 6759], [6642, 6643, 6760], [6643, 6761, 6760], [6643, 6644, 6762], [6643, 6762, 6761], [6644, 6645, 6762], [6645, 6763, 6762], [6645, 6646, 6764], [6645, 6764, 6763], [6646, 6647, 6764], [6647, 6765, 6764], [6647, 6648, 6766], [6647, 6766, 6765], [6648, 6649, 6766], [6649, 6767, 6766], [6649, 6650, 6768], [6649, 6768, 6767], [6650, 6651, 6768], [6651, 6769, 6768], [6651, 6652, 6770], [6651, 6770, 6769], [6652, 6653, 6770], [6653, 6771, 6770], [6653, 6654, 6772], [6653, 6772, 6771], [6654, 6655, 6772], [6655, 6773, 6772], [6655, 6656, 6774], [6655, 6774, 6773], [6657, 6658, 6775], [6658, 6776, 6775], [6658, 6659, 6777], [6658, 6777, 6776], [6659, 6660, 6777], [6660, 6778, 6777], [6660, 6661, 6779], [6660, 6779, 6778], [6661, 6662, 6779], [6662, 6780, 6779], [6662, 6663, 6781], [6662, 6781, 6780], [6663, 6664, 6781], [6664, 6782, 6781], [6664, 6665, 6783], [6664, 6783, 6782], [6665, 6666, 6783], [6666, 6784, 6783], [6666, 6667, 6785], [6666, 6785, 6784], [6667, 6668, 6785], [6668, 6786, 6785], [6668, 6669, 6787], [6668, 6787, 6786], [6669, 6670, 6787], [6670, 6788, 6787], [6670, 6671, 6789], [6670, 6789, 6788], [6671, 6672, 6789], [6672, 6790, 6789], [6672, 6673, 6791], [6672, 6791, 6790], [6673, 6674, 6791], [6674, 6792, 6791], [6674, 6675, 6793], [6674, 6793, 6792], [6676, 6677, 6795], [6676, 6795, 6794], [6677, 6678, 6795], [6678, 6796, 6795], [6678, 6679, 6797], [6678, 6797, 6796], [6679, 6680, 6797], [6680, 6798, 6797], [6680, 6681, 6799], [6680, 6799, 6798], [6681, 6682, 6799], [6682, 6800, 6799], [6682, 6683, 6801], [6682, 6801, 6800], [6683, 6684, 6801], [6684, 6802, 6801], [6684, 6685, 6803], [6684, 6803, 6802], [6685, 6686, 6803], [6686, 6804, 6803], [6686, 6687, 6805], [6686, 6805, 6804], [6687, 6688, 6805], [6688, 6806, 6805], [6688, 6689, 6807], [6688, 6807, 6806], [6689, 6690, 6807], [6690, 6808, 6807], [6690, 6691, 6809], [6690, 6809, 6808], [6691, 6692, 6809], [6692, 6810, 6809], [6692, 6693, 6811], [6692, 6811, 6810], [6693, 6694, 6811], [6694, 6812, 6811], [6694, 6695, 6813], [6694, 6813, 6812], [6695, 6696, 6813], [6696, 6814, 6813], [6696, 6697, 6815], [6696, 6815, 6814], [6697, 6698, 6815], [6698, 6816, 6815], [6698, 6699, 6817], [6698, 6817, 6816], [6699, 6700, 6817], [6700, 6818, 6817], [6700, 6701, 6819], [6700, 6819, 6818], [6701, 6702, 6819], [6702, 6820, 6819], [6702, 6703, 6821], [6702, 6821, 6820], [6703, 6704, 6821], [6704, 6822, 6821], [6704, 6705, 6823], [6704, 6823, 6822], [6705, 6706, 6823], [6706, 6824, 6823], [6706, 6707, 6825], [6706, 6825, 6824], [6707, 6708, 6825], [6708, 6826, 6825], [6708, 6709, 6827], [6708, 6827, 6826], [6709, 6710, 6827], [6710, 6828, 6827], [6710, 6711, 6829], [6710, 6829, 6828], [6711, 6712, 6829], [6712, 6830, 6829], [6712, 6713, 6831], [6712, 6831, 6830], [6713, 6714, 6831], [6714, 6832, 6831], [6714, 6715, 6833], [6714, 6833, 6832], [6715, 6716, 6833], [6716, 6834, 6833], [6716, 6717, 6835], [6716, 6835, 6834], [6717, 6718, 6835], [6718, 6836, 6835], [6718, 6719, 6837], [6718, 6837, 6836], [6719, 6720, 6837], [6720, 6838, 6837], [6720, 6721, 6839], [6720, 6839, 6838], [6721, 6722, 6839], [6722, 6840, 6839], [6722, 6723, 6841], [6722, 6841, 6840], [6723, 6724, 6841], [6724, 6842, 6841], [6724, 6725, 6843], [6724, 6843, 6842], [6725, 6726, 6843], [6726, 6844, 6843], [6726, 6727, 6845], [6726, 6845, 6844], [6727, 6728, 6845], [6728, 6846, 6845], [6728, 6729, 6847], [6728, 6847, 6846], [6729, 6730, 6847], [6730, 6848, 6847], [6730, 6731, 6849], [6730, 6849, 6848], [6731, 6732, 6849], [6732, 6850, 6849], [6732, 6733, 6851], [6732, 6851, 6850], [6733, 6734, 6851], [6734, 6852, 6851], [6734, 6735, 6853], [6734, 6853, 6852], [6735, 6736, 6853], [6736, 6854, 6853], [6736, 6737, 6855], [6736, 6855, 6854], [6737, 6738, 6855], [6738, 6856, 6855], [6738, 6739, 6857], [6738, 6857, 6856], [6739, 6740, 6857], [6740, 6858, 6857], [6740, 6741, 6859], [6740, 6859, 6858], [6741, 6742, 6859], [6742, 6860, 6859], [6742, 6743, 6861], [6742, 6861, 6860], [6743, 6744, 6861], [6744, 6862, 6861], [6744, 6745, 6863], [6744, 6863, 6862], [6745, 6746, 6863], [6746, 6864, 6863], [6746, 6747, 6865], [6746, 6865, 6864], [6747, 6748, 6865], [6748, 6866, 6865], [6748, 6749, 6867], [6748, 6867, 6866], [6749, 6750, 6867], [6750, 6868, 6867], [6750, 6751, 6869], [6750, 6869, 6868], [6751, 6752, 6869], [6752, 6870, 6869], [6752, 6753, 6871], [6752, 6871, 6870], [6753, 6754, 6871], [6754, 6872, 6871], [6754, 6755, 6873], [6754, 6873, 6872], [6755, 6756, 6873], [6756, 6874, 6873], [6756, 6757, 6875], [6756, 6875, 6874], [6757, 6758, 6875], [6758, 6876, 6875], [6758, 6759, 6877], [6758, 6877, 6876], [6759, 6760, 6877], [6760, 6878, 6877], [6760, 6761, 6879], [6760, 6879, 6878], [6761, 6762, 6879], [6762, 6880, 6879], [6762, 6763, 6881], [6762, 6881, 6880], [6763, 6764, 6881], [6764, 6882, 6881], [6764, 6765, 6883], [6764, 6883, 6882], [6765, 6766, 6883], [6766, 6884, 6883], [6766, 6767, 6885], [6766, 6885, 6884], [6767, 6768, 6885], [6768, 6886, 6885], [6768, 6769, 6887], [6768, 6887, 6886], [6769, 6770, 6887], [6770, 6888, 6887], [6770, 6771, 6889], [6770, 6889, 6888], [6771, 6772, 6889], [6772, 6890, 6889], [6772, 6773, 6891], [6772, 6891, 6890], [6773, 6774, 6891], [6774, 6892, 6891], [6775, 6776, 6894], [6775, 6894, 6893], [6776, 6777, 6894], [6777, 6895, 6894], [6777, 6778, 6896], [6777, 6896, 6895], [6778, 6779, 6896], [6779, 6897, 6896], [6779, 6780, 6898], [6779, 6898, 6897], [6780, 6781, 6898], [6781, 6899, 6898], [6781, 6782, 6900], [6781, 6900, 6899], [6782, 6783, 6900], [6783, 6901, 6900], [6783, 6784, 6902], [6783, 6902, 6901], [6784, 6785, 6902], [6785, 6903, 6902], [6785, 6786, 6904], [6785, 6904, 6903], [6786, 6787, 6904], [6787, 6905, 6904], [6787, 6788, 6906], [6787, 6906, 6905], [6788, 6789, 6906], [6789, 6907, 6906], [6789, 6790, 6908], [6789, 6908, 6907], [6790, 6791, 6908], [6791, 6909, 6908], [6791, 6792, 6910], [6791, 6910, 6909], [6792, 6793, 6910], [6793, 6911, 6910], [6794, 6795, 6912], [6795, 6913, 6912], [6795, 6796, 6914], [6795, 6914, 6913], [6796, 6797, 6914], [6797, 6915, 6914], [6797, 6798, 6916], [6797, 6916, 6915], [6798, 6799, 6916], [6799, 6917, 6916], [6799, 6800, 6918], [6799, 6918, 6917], [6800, 6801, 6918], [6801, 6919, 6918], [6801, 6802, 6920], [6801, 6920, 6919], [6802, 6803, 6920], [6803, 6921, 6920], [6803, 6804, 6922], [6803, 6922, 6921], [6804, 6805, 6922], [6805, 6923, 6922], [6805, 6806, 6924], [6805, 6924, 6923], [6806, 6807, 6924], [6807, 6925, 6924], [6807, 6808, 6926], [6807, 6926, 6925], [6808, 6809, 6926], [6809, 6927, 6926], [6809, 6810, 6928], [6809, 6928, 6927], [6810, 6811, 6928], [6811, 6929, 6928], [6811, 6812, 6930], [6811, 6930, 6929], [6812, 6813, 6930], [6813, 6931, 6930], [6813, 6814, 6932], [6813, 6932, 6931], [6814, 6815, 6932], [6815, 6933, 6932], [6815, 6816, 6934], [6815, 6934, 6933], [6816, 6817, 6934], [6817, 6935, 6934], [6817, 6818, 6936], [6817, 6936, 6935], [6818, 6819, 6936], [6819, 6937, 6936], [6819, 6820, 6938], [6819, 6938, 6937], [6820, 6821, 6938], [6821, 6939, 6938], [6821, 6822, 6940], [6821, 6940, 6939], [6822, 6823, 6940], [6823, 6941, 6940], [6823, 6824, 6942], [6823, 6942, 6941], [6824, 6825, 6942], [6825, 6943, 6942], [6825, 6826, 6944], [6825, 6944, 6943], [6826, 6827, 6944], [6827, 6945, 6944], [6827, 6828, 6946], [6827, 6946, 6945], [6828, 6829, 6946], [6829, 6947, 6946], [6829, 6830, 6948], [6829, 6948, 6947], [6830, 6831, 6948], [6831, 6949, 6948], [6831, 6832, 6950], [6831, 6950, 6949], [6832, 6833, 6950], [6833, 6951, 6950], [6833, 6834, 6952], [6833, 6952, 6951], [6834, 6835, 6952], [6835, 6953, 6952], [6835, 6836, 6954], [6835, 6954, 6953], [6836, 6837, 6954], [6837, 6955, 6954], [6837, 6838, 6956], [6837, 6956, 6955], [6838, 6839, 6956], [6839, 6957, 6956], [6839, 6840, 6958], [6839, 6958, 6957], [6840, 6841, 6958], [6841, 6959, 6958], [6841, 6842, 6960], [6841, 6960, 6959], [6842, 6843, 6960], [6843, 6961, 6960], [6843, 6844, 6962], [6843, 6962, 6961], [6844, 6845, 6962], [6845, 6963, 6962], [6845, 6846, 6964], [6845, 6964, 6963], [6846, 6847, 6964], [6847, 6965, 6964], [6847, 6848, 6966], [6847, 6966, 6965], [6848, 6849, 6966], [6849, 6967, 6966], [6849, 6850, 6968], [6849, 6968, 6967], [6850, 6851, 6968], [6851, 6969, 6968], [6851, 6852, 6970], [6851, 6970, 6969], [6852, 6853, 6970], [6853, 6971, 6970], [6853, 6854, 6972], [6853, 6972, 6971], [6854, 6855, 6972], [6855, 6973, 6972], [6855, 6856, 6974], [6855, 6974, 6973], [6856, 6857, 6974], [6857, 6975, 6974], [6857, 6858, 6976], [6857, 6976, 6975], [6858, 6859, 6976], [6859, 6977, 6976], [6859, 6860, 6978], [6859, 6978, 6977], [6860, 6861, 6978], [6861, 6979, 6978], [6861, 6862, 6980], [6861, 6980, 6979], [6862, 6863, 6980], [6863, 6981, 6980], [6863, 6864, 6982], [6863, 6982, 6981], [6864, 6865, 6982], [6865, 6983, 6982], [6865, 6866, 6984], [6865, 6984, 6983], [6866, 6867, 6984], [6867, 6985, 6984], [6867, 6868, 6986], [6867, 6986, 6985], [6868, 6869, 6986], [6869, 6987, 6986], [6869, 6870, 6988], [6869, 6988, 6987], [6870, 6871, 6988], [6871, 6989, 6988], [6871, 6872, 6990], [6871, 6990, 6989], [6872, 6873, 6990], [6873, 6991, 6990], [6873, 6874, 6992], [6873, 6992, 6991], [6874, 6875, 6992], [6875, 6993, 6992], [6875, 6876, 6994], [6875, 6994, 6993], [6876, 6877, 6994], [6877, 6995, 6994], [6877, 6878, 6996], [6877, 6996, 6995], [6878, 6879, 6996], [6879, 6997, 6996], [6879, 6880, 6998], [6879, 6998, 6997], [6880, 6881, 6998], [6881, 6999, 6998], [6881, 6882, 7000], [6881, 7000, 6999], [6882, 6883, 7000], [6883, 7001, 7000], [6883, 6884, 7002], [6883, 7002, 7001], [6884, 6885, 7002], [6885, 7003, 7002], [6885, 6886, 7004], [6885, 7004, 7003], [6886, 6887, 7004], [6887, 7005, 7004], [6887, 6888, 7006], [6887, 7006, 7005], [6888, 6889, 7006], [6889, 7007, 7006], [6889, 6890, 7008], [6889, 7008, 7007], [6890, 6891, 7008], [6891, 7009, 7008], [6891, 6892, 7010], [6891, 7010, 7009], [6893, 6894, 7011], [6894, 7012, 7011], [6894, 6895, 7013], [6894, 7013, 7012], [6895, 6896, 7013], [6896, 7014, 7013], [6896, 6897, 7015], [6896, 7015, 7014], [6897, 6898, 7015], [6898, 7016, 7015], [6898, 6899, 7017], [6898, 7017, 7016], [6899, 6900, 7017], [6900, 7018, 7017], [6900, 6901, 7019], [6900, 7019, 7018], [6901, 6902, 7019], [6902, 7020, 7019], [6902, 6903, 7021], [6902, 7021, 7020], [6903, 6904, 7021], [6904, 7022, 7021], [6904, 6905, 7023], [6904, 7023, 7022], [6905, 6906, 7023], [6906, 7024, 7023], [6906, 6907, 7025], [6906, 7025, 7024], [6907, 6908, 7025], [6908, 7026, 7025], [6908, 6909, 7027], [6908, 7027, 7026], [6909, 6910, 7027], [6910, 7028, 7027], [6910, 6911, 7029], [6910, 7029, 7028], [6912, 6913, 7031], [6912, 7031, 7030], [6913, 6914, 7031], [6914, 7032, 7031], [6914, 6915, 7033], [6914, 7033, 7032], [6915, 6916, 7033], [6916, 7034, 7033], [6916, 6917, 7035], [6916, 7035, 7034], [6917, 6918, 7035], [6918, 7036, 7035], [6918, 6919, 7037], [6918, 7037, 7036], [6919, 6920, 7037], [6920, 7038, 7037], [6920, 6921, 7039], [6920, 7039, 7038], [6921, 6922, 7039], [6922, 7040, 7039], [6922, 6923, 7041], [6922, 7041, 7040], [6923, 6924, 7041], [6924, 7042, 7041], [6924, 6925, 7043], [6924, 7043, 7042], [6925, 6926, 7043], [6926, 7044, 7043], [6926, 6927, 7045], [6926, 7045, 7044], [6927, 6928, 7045], [6928, 7046, 7045], [6928, 6929, 7047], [6928, 7047, 7046], [6929, 6930, 7047], [6930, 7048, 7047], [6930, 6931, 7049], [6930, 7049, 7048], [6931, 6932, 7049], [6932, 7050, 7049], [6932, 6933, 7051], [6932, 7051, 7050], [6933, 6934, 7051], [6934, 7052, 7051], [6934, 6935, 7053], [6934, 7053, 7052], [6935, 6936, 7053], [6936, 7054, 7053], [6936, 6937, 7055], [6936, 7055, 7054], [6937, 6938, 7055], [6938, 7056, 7055], [6938, 6939, 7057], [6938, 7057, 7056], [6939, 6940, 7057], [6940, 7058, 7057], [6940, 6941, 7059], [6940, 7059, 7058], [6941, 6942, 7059], [6942, 7060, 7059], [6942, 6943, 7061], [6942, 7061, 7060], [6943, 6944, 7061], [6944, 7062, 7061], [6944, 6945, 7063], [6944, 7063, 7062], [6945, 6946, 7063], [6946, 7064, 7063], [6946, 6947, 7065], [6946, 7065, 7064], [6947, 6948, 7065], [6948, 7066, 7065], [6948, 6949, 7067], [6948, 7067, 7066], [6949, 6950, 7067], [6950, 7068, 7067], [6950, 6951, 7069], [6950, 7069, 7068], [6951, 6952, 7069], [6952, 7070, 7069], [6952, 6953, 7071], [6952, 7071, 7070], [6953, 6954, 7071], [6954, 7072, 7071], [6954, 6955, 7073], [6954, 7073, 7072], [6955, 6956, 7073], [6956, 7074, 7073], [6956, 6957, 7075], [6956, 7075, 7074], [6957, 6958, 7075], [6958, 7076, 7075], [6958, 6959, 7077], [6958, 7077, 7076], [6959, 6960, 7077], [6960, 7078, 7077], [6960, 6961, 7079], [6960, 7079, 7078], [6961, 6962, 7079], [6962, 7080, 7079], [6962, 6963, 7081], [6962, 7081, 7080], [6963, 6964, 7081], [6964, 7082, 7081], [6964, 6965, 7083], [6964, 7083, 7082], [6965, 6966, 7083], [6966, 7084, 7083], [6966, 6967, 7085], [6966, 7085, 7084], [6967, 6968, 7085], [6968, 7086, 7085], [6968, 6969, 7087], [6968, 7087, 7086], [6969, 6970, 7087], [6970, 7088, 7087], [6970, 6971, 7089], [6970, 7089, 7088], [6971, 6972, 7089], [6972, 7090, 7089], [6972, 6973, 7091], [6972, 7091, 7090], [6973, 6974, 7091], [6974, 7092, 7091], [6974, 6975, 7093], [6974, 7093, 7092], [6975, 6976, 7093], [6976, 7094, 7093], [6976, 6977, 7095], [6976, 7095, 7094], [6977, 6978, 7095], [6978, 7096, 7095], [6978, 6979, 7097], [6978, 7097, 7096], [6979, 6980, 7097], [6980, 7098, 7097], [6980, 6981, 7099], [6980, 7099, 7098], [6981, 6982, 7099], [6982, 7100, 7099], [6982, 6983, 7101], [6982, 7101, 7100], [6983, 6984, 7101], [6984, 7102, 7101], [6984, 6985, 7103], [6984, 7103, 7102], [6985, 6986, 7103], [6986, 7104, 7103], [6986, 6987, 7105], [6986, 7105, 7104], [6987, 6988, 7105], [6988, 7106, 7105], [6988, 6989, 7107], [6988, 7107, 7106], [6989, 6990, 7107], [6990, 7108, 7107], [6990, 6991, 7109], [6990, 7109, 7108], [6991, 6992, 7109], [6992, 7110, 7109], [6992, 6993, 7111], [6992, 7111, 7110], [6993, 6994, 7111], [6994, 7112, 7111], [6994, 6995, 7113], [6994, 7113, 7112], [6995, 6996, 7113], [6996, 7114, 7113], [6996, 6997, 7115], [6996, 7115, 7114], [6997, 6998, 7115], [6998, 7116, 7115], [6998, 6999, 7117], [6998, 7117, 7116], [6999, 7000, 7117], [7000, 7118, 7117], [7000, 7001, 7119], [7000, 7119, 7118], [7001, 7002, 7119], [7002, 7120, 7119], [7002, 7003, 7121], [7002, 7121, 7120], [7003, 7004, 7121], [7004, 7122, 7121], [7004, 7005, 7123], [7004, 7123, 7122], [7005, 7006, 7123], [7006, 7124, 7123], [7006, 7007, 7125], [7006, 7125, 7124], [7007, 7008, 7125], [7008, 7126, 7125], [7008, 7009, 7127], [7008, 7127, 7126], [7009, 7010, 7127], [7010, 7128, 7127], [7011, 7012, 7130], [7011, 7130, 7129], [7012, 7013, 7130], [7013, 7131, 7130], [7013, 7014, 7132], [7013, 7132, 7131], [7014, 7015, 7132], [7015, 7133, 7132], [7015, 7016, 7134], [7015, 7134, 7133], [7016, 7017, 7134], [7017, 7135, 7134], [7017, 7018, 7136], [7017, 7136, 7135], [7018, 7019, 7136], [7019, 7137, 7136], [7019, 7020, 7138], [7019, 7138, 7137], [7020, 7021, 7138], [7021, 7139, 7138], [7021, 7022, 7140], [7021, 7140, 7139], [7022, 7023, 7140], [7023, 7141, 7140], [7023, 7024, 7142], [7023, 7142, 7141], [7024, 7025, 7142], [7025, 7143, 7142], [7025, 7026, 7144], [7025, 7144, 7143], [7026, 7027, 7144], [7027, 7145, 7144], [7027, 7028, 7146], [7027, 7146, 7145], [7028, 7029, 7146], [7029, 7147, 7146], [7030, 7031, 7148], [7031, 7149, 7148], [7031, 7032, 7150], [7031, 7150, 7149], [7032, 7033, 7150], [7033, 7151, 7150], [7033, 7034, 7152], [7033, 7152, 7151], [7034, 7035, 7152], [7035, 7153, 7152], [7035, 7036, 7154], [7035, 7154, 7153], [7036, 7037, 7154], [7037, 7155, 7154], [7037, 7038, 7156], [7037, 7156, 7155], [7038, 7039, 7156], [7039, 7157, 7156], [7039, 7040, 7158], [7039, 7158, 7157], [7040, 7041, 7158], [7041, 7159, 7158], [7041, 7042, 7160], [7041, 7160, 7159], [7042, 7043, 7160], [7043, 7161, 7160], [7043, 7044, 7162], [7043, 7162, 7161], [7044, 7045, 7162], [7045, 7163, 7162], [7045, 7046, 7164], [7045, 7164, 7163], [7046, 7047, 7164], [7047, 7165, 7164], [7047, 7048, 7166], [7047, 7166, 7165], [7048, 7049, 7166], [7049, 7167, 7166], [7049, 7050, 7168], [7049, 7168, 7167], [7050, 7051, 7168], [7051, 7169, 7168], [7051, 7052, 7170], [7051, 7170, 7169], [7052, 7053, 7170], [7053, 7171, 7170], [7053, 7054, 7172], [7053, 7172, 7171], [7054, 7055, 7172], [7055, 7173, 7172], [7055, 7056, 7174], [7055, 7174, 7173], [7056, 7057, 7174], [7057, 7175, 7174], [7057, 7058, 7176], [7057, 7176, 7175], [7058, 7059, 7176], [7059, 7177, 7176], [7059, 7060, 7178], [7059, 7178, 7177], [7060, 7061, 7178], [7061, 7179, 7178], [7061, 7062, 7180], [7061, 7180, 7179], [7062, 7063, 7180], [7063, 7181, 7180], [7063, 7064, 7182], [7063, 7182, 7181], [7064, 7065, 7182], [7065, 7183, 7182], [7065, 7066, 7184], [7065, 7184, 7183], [7066, 7067, 7184], [7067, 7185, 7184], [7067, 7068, 7186], [7067, 7186, 7185], [7068, 7069, 7186], [7069, 7187, 7186], [7069, 7070, 7188], [7069, 7188, 7187], [7070, 7071, 7188], [7071, 7189, 7188], [7071, 7072, 7190], [7071, 7190, 7189], [7072, 7073, 7190], [7073, 7191, 7190], [7073, 7074, 7192], [7073, 7192, 7191], [7074, 7075, 7192], [7075, 7193, 7192], [7075, 7076, 7194], [7075, 7194, 7193], [7076, 7077, 7194], [7077, 7195, 7194], [7077, 7078, 7196], [7077, 7196, 7195], [7078, 7079, 7196], [7079, 7197, 7196], [7079, 7080, 7198], [7079, 7198, 7197], [7080, 7081, 7198], [7081, 7199, 7198], [7081, 7082, 7200], [7081, 7200, 7199], [7082, 7083, 7200], [7083, 7201, 7200], [7083, 7084, 7202], [7083, 7202, 7201], [7084, 7085, 7202], [7085, 7203, 7202], [7085, 7086, 7204], [7085, 7204, 7203], [7086, 7087, 7204], [7087, 7205, 7204], [7087, 7088, 7206], [7087, 7206, 7205], [7088, 7089, 7206], [7089, 7207, 7206], [7089, 7090, 7208], [7089, 7208, 7207], [7090, 7091, 7208], [7091, 7209, 7208], [7091, 7092, 7210], [7091, 7210, 7209], [7092, 7093, 7210], [7093, 7211, 7210], [7093, 7094, 7212], [7093, 7212, 7211], [7094, 7095, 7212], [7095, 7213, 7212], [7095, 7096, 7214], [7095, 7214, 7213], [7096, 7097, 7214], [7097, 7215, 7214], [7097, 7098, 7216], [7097, 7216, 7215], [7098, 7099, 7216], [7099, 7217, 7216], [7099, 7100, 7218], [7099, 7218, 7217], [7100, 7101, 7218], [7101, 7219, 7218], [7101, 7102, 7220], [7101, 7220, 7219], [7102, 7103, 7220], [7103, 7221, 7220], [7103, 7104, 7222], [7103, 7222, 7221], [7104, 7105, 7222], [7105, 7223, 7222], [7105, 7106, 7224], [7105, 7224, 7223], [7106, 7107, 7224], [7107, 7225, 7224], [7107, 7108, 7226], [7107, 7226, 7225], [7108, 7109, 7226], [7109, 7227, 7226], [7109, 7110, 7228], [7109, 7228, 7227], [7110, 7111, 7228], [7111, 7229, 7228], [7111, 7112, 7230], [7111, 7230, 7229], [7112, 7113, 7230], [7113, 7231, 7230], [7113, 7114, 7232], [7113, 7232, 7231], [7114, 7115, 7232], [7115, 7233, 7232], [7115, 7116, 7234], [7115, 7234, 7233], [7116, 7117, 7234], [7117, 7235, 7234], [7117, 7118, 7236], [7117, 7236, 7235], [7118, 7119, 7236], [7119, 7237, 7236], [7119, 7120, 7238], [7119, 7238, 7237], [7120, 7121, 7238], [7121, 7239, 7238], [7121, 7122, 7240], [7121, 7240, 7239], [7122, 7123, 7240], [7123, 7241, 7240], [7123, 7124, 7242], [7123, 7242, 7241], [7124, 7125, 7242], [7125, 7243, 7242], [7125, 7126, 7244], [7125, 7244, 7243], [7126, 7127, 7244], [7127, 7245, 7244], [7127, 7128, 7246], [7127, 7246, 7245], [7129, 7130, 7247], [7130, 7248, 7247], [7130, 7131, 7249], [7130, 7249, 7248], [7131, 7132, 7249], [7132, 7250, 7249], [7132, 7133, 7251], [7132, 7251, 7250], [7133, 7134, 7251], [7134, 7252, 7251], [7134, 7135, 7253], [7134, 7253, 7252], [7135, 7136, 7253], [7136, 7254, 7253], [7136, 7137, 7255], [7136, 7255, 7254], [7137, 7138, 7255], [7138, 7256, 7255], [7138, 7139, 7257], [7138, 7257, 7256], [7139, 7140, 7257], [7140, 7258, 7257], [7140, 7141, 7259], [7140, 7259, 7258], [7141, 7142, 7259], [7142, 7260, 7259], [7142, 7143, 7261], [7142, 7261, 7260], [7143, 7144, 7261], [7144, 7262, 7261], [7144, 7145, 7263], [7144, 7263, 7262], [7145, 7146, 7263], [7146, 7264, 7263], [7146, 7147, 7265], [7146, 7265, 7264], [7148, 7149, 7267], [7148, 7267, 7266], [7149, 7150, 7267], [7150, 7268, 7267], [7150, 7151, 7269], [7150, 7269, 7268], [7151, 7152, 7269], [7152, 7270, 7269], [7152, 7153, 7271], [7152, 7271, 7270], [7153, 7154, 7271], [7154, 7272, 7271], [7154, 7155, 7273], [7154, 7273, 7272], [7155, 7156, 7273], [7156, 7274, 7273], [7156, 7157, 7275], [7156, 7275, 7274], [7157, 7158, 7275], [7158, 7276, 7275], [7158, 7159, 7277], [7158, 7277, 7276], [7159, 7160, 7277], [7160, 7278, 7277], [7160, 7161, 7279], [7160, 7279, 7278], [7161, 7162, 7279], [7162, 7280, 7279], [7162, 7163, 7281], [7162, 7281, 7280], [7163, 7164, 7281], [7164, 7282, 7281], [7164, 7165, 7283], [7164, 7283, 7282], [7165, 7166, 7283], [7166, 7284, 7283], [7166, 7167, 7285], [7166, 7285, 7284], [7167, 7168, 7285], [7168, 7286, 7285], [7168, 7169, 7287], [7168, 7287, 7286], [7169, 7170, 7287], [7170, 7288, 7287], [7170, 7171, 7289], [7170, 7289, 7288], [7171, 7172, 7289], [7172, 7290, 7289], [7172, 7173, 7291], [7172, 7291, 7290], [7173, 7174, 7291], [7174, 7292, 7291], [7174, 7175, 7293], [7174, 7293, 7292], [7175, 7176, 7293], [7176, 7294, 7293], [7176, 7177, 7295], [7176, 7295, 7294], [7177, 7178, 7295], [7178, 7296, 7295], [7178, 7179, 7297], [7178, 7297, 7296], [7179, 7180, 7297], [7180, 7298, 7297], [7180, 7181, 7299], [7180, 7299, 7298], [7181, 7182, 7299], [7182, 7300, 7299], [7182, 7183, 7301], [7182, 7301, 7300], [7183, 7184, 7301], [7184, 7302, 7301], [7184, 7185, 7303], [7184, 7303, 7302], [7185, 7186, 7303], [7186, 7304, 7303], [7186, 7187, 7305], [7186, 7305, 7304], [7187, 7188, 7305], [7188, 7306, 7305], [7188, 7189, 7307], [7188, 7307, 7306], [7189, 7190, 7307], [7190, 7308, 7307], [7190, 7191, 7309], [7190, 7309, 7308], [7191, 7192, 7309], [7192, 7310, 7309], [7192, 7193, 7311], [7192, 7311, 7310], [7193, 7194, 7311], [7194, 7312, 7311], [7194, 7195, 7313], [7194, 7313, 7312], [7195, 7196, 7313], [7196, 7314, 7313], [7196, 7197, 7315], [7196, 7315, 7314], [7197, 7198, 7315], [7198, 7316, 7315], [7198, 7199, 7317], [7198, 7317, 7316], [7199, 7200, 7317], [7200, 7318, 7317], [7200, 7201, 7319], [7200, 7319, 7318], [7201, 7202, 7319], [7202, 7320, 7319], [7202, 7203, 7321], [7202, 7321, 7320], [7203, 7204, 7321], [7204, 7322, 7321], [7204, 7205, 7323], [7204, 7323, 7322], [7205, 7206, 7323], [7206, 7324, 7323], [7206, 7207, 7325], [7206, 7325, 7324], [7207, 7208, 7325], [7208, 7326, 7325], [7208, 7209, 7327], [7208, 7327, 7326], [7209, 7210, 7327], [7210, 7328, 7327], [7210, 7211, 7329], [7210, 7329, 7328], [7211, 7212, 7329], [7212, 7330, 7329], [7212, 7213, 7331], [7212, 7331, 7330], [7213, 7214, 7331], [7214, 7332, 7331], [7214, 7215, 7333], [7214, 7333, 7332], [7215, 7216, 7333], [7216, 7334, 7333], [7216, 7217, 7335], [7216, 7335, 7334], [7217, 7218, 7335], [7218, 7336, 7335], [7218, 7219, 7337], [7218, 7337, 7336], [7219, 7220, 7337], [7220, 7338, 7337], [7220, 7221, 7339], [7220, 7339, 7338], [7221, 7222, 7339], [7222, 7340, 7339], [7222, 7223, 7341], [7222, 7341, 7340], [7223, 7224, 7341], [7224, 7342, 7341], [7224, 7225, 7343], [7224, 7343, 7342], [7225, 7226, 7343], [7226, 7344, 7343], [7226, 7227, 7345], [7226, 7345, 7344], [7227, 7228, 7345], [7228, 7346, 7345], [7228, 7229, 7347], [7228, 7347, 7346], [7229, 7230, 7347], [7230, 7348, 7347], [7230, 7231, 7349], [7230, 7349, 7348], [7231, 7232, 7349], [7232, 7350, 7349], [7232, 7233, 7351], [7232, 7351, 7350], [7233, 7234, 7351], [7234, 7352, 7351], [7234, 7235, 7353], [7234, 7353, 7352], [7235, 7236, 7353], [7236, 7354, 7353], [7236, 7237, 7355], [7236, 7355, 7354], [7237, 7238, 7355], [7238, 7356, 7355], [7238, 7239, 7357], [7238, 7357, 7356], [7239, 7240, 7357], [7240, 7358, 7357], [7240, 7241, 7359], [7240, 7359, 7358], [7241, 7242, 7359], [7242, 7360, 7359], [7242, 7243, 7361], [7242, 7361, 7360], [7243, 7244, 7361], [7244, 7362, 7361], [7244, 7245, 7363], [7244, 7363, 7362], [7245, 7246, 7363], [7246, 7364, 7363], [7247, 7248, 7366], [7247, 7366, 7365], [7248, 7249, 7366], [7249, 7367, 7366], [7249, 7250, 7368], [7249, 7368, 7367], [7250, 7251, 7368], [7251, 7369, 7368], [7251, 7252, 7370], [7251, 7370, 7369], [7252, 7253, 7370], [7253, 7371, 7370], [7253, 7254, 7372], [7253, 7372, 7371], [7254, 7255, 7372], [7255, 7373, 7372], [7255, 7256, 7374], [7255, 7374, 7373], [7256, 7257, 7374], [7257, 7375, 7374], [7257, 7258, 7376], [7257, 7376, 7375], [7258, 7259, 7376], [7259, 7377, 7376], [7259, 7260, 7378], [7259, 7378, 7377], [7260, 7261, 7378], [7261, 7379, 7378], [7261, 7262, 7380], [7261, 7380, 7379], [7262, 7263, 7380], [7263, 7381, 7380], [7263, 7264, 7382], [7263, 7382, 7381], [7264, 7265, 7382], [7265, 7383, 7382], [7266, 7267, 7384], [7267, 7385, 7384], [7267, 7268, 7386], [7267, 7386, 7385], [7268, 7269, 7386], [7269, 7387, 7386], [7269, 7270, 7388], [7269, 7388, 7387], [7270, 7271, 7388], [7271, 7389, 7388], [7271, 7272, 7390], [7271, 7390, 7389], [7272, 7273, 7390], [7273, 7391, 7390], [7273, 7274, 7392], [7273, 7392, 7391], [7274, 7275, 7392], [7275, 7393, 7392], [7275, 7276, 7394], [7275, 7394, 7393], [7276, 7277, 7394], [7277, 7395, 7394], [7277, 7278, 7396], [7277, 7396, 7395], [7278, 7279, 7396], [7279, 7397, 7396], [7279, 7280, 7398], [7279, 7398, 7397], [7280, 7281, 7398], [7281, 7399, 7398], [7281, 7282, 7400], [7281, 7400, 7399], [7282, 7283, 7400], [7283, 7401, 7400], [7283, 7284, 7402], [7283, 7402, 7401], [7284, 7285, 7402], [7285, 7403, 7402], [7285, 7286, 7404], [7285, 7404, 7403], [7286, 7287, 7404], [7287, 7405, 7404], [7287, 7288, 7406], [7287, 7406, 7405], [7288, 7289, 7406], [7289, 7407, 7406], [7289, 7290, 7408], [7289, 7408, 7407], [7290, 7291, 7408], [7291, 7409, 7408], [7291, 7292, 7410], [7291, 7410, 7409], [7292, 7293, 7410], [7293, 7411, 7410], [7293, 7294, 7412], [7293, 7412, 7411], [7294, 7295, 7412], [7295, 7413, 7412], [7295, 7296, 7414], [7295, 7414, 7413], [7296, 7297, 7414], [7297, 7415, 7414], [7297, 7298, 7416], [7297, 7416, 7415], [7298, 7299, 7416], [7299, 7417, 7416], [7299, 7300, 7418], [7299, 7418, 7417], [7300, 7301, 7418], [7301, 7419, 7418], [7301, 7302, 7420], [7301, 7420, 7419], [7302, 7303, 7420], [7303, 7421, 7420], [7303, 7304, 7422], [7303, 7422, 7421], [7304, 7305, 7422], [7305, 7423, 7422], [7305, 7306, 7424], [7305, 7424, 7423], [7306, 7307, 7424], [7307, 7425, 7424], [7307, 7308, 7426], [7307, 7426, 7425], [7308, 7309, 7426], [7309, 7427, 7426], [7309, 7310, 7428], [7309, 7428, 7427], [7310, 7311, 7428], [7311, 7429, 7428], [7311, 7312, 7430], [7311, 7430, 7429], [7312, 7313, 7430], [7313, 7431, 7430], [7313, 7314, 7432], [7313, 7432, 7431], [7314, 7315, 7432], [7315, 7433, 7432], [7315, 7316, 7434], [7315, 7434, 7433], [7316, 7317, 7434], [7317, 7435, 7434], [7317, 7318, 7436], [7317, 7436, 7435], [7318, 7319, 7436], [7319, 7437, 7436], [7319, 7320, 7438], [7319, 7438, 7437], [7320, 7321, 7438], [7321, 7439, 7438], [7321, 7322, 7440], [7321, 7440, 7439], [7322, 7323, 7440], [7323, 7441, 7440], [7323, 7324, 7442], [7323, 7442, 7441], [7324, 7325, 7442], [7325, 7443, 7442], [7325, 7326, 7444], [7325, 7444, 7443], [7326, 7327, 7444], [7327, 7445, 7444], [7327, 7328, 7446], [7327, 7446, 7445], [7328, 7329, 7446], [7329, 7447, 7446], [7329, 7330, 7448], [7329, 7448, 7447], [7330, 7331, 7448], [7331, 7449, 7448], [7331, 7332, 7450], [7331, 7450, 7449], [7332, 7333, 7450], [7333, 7451, 7450], [7333, 7334, 7452], [7333, 7452, 7451], [7334, 7335, 7452], [7335, 7453, 7452], [7335, 7336, 7454], [7335, 7454, 7453], [7336, 7337, 7454], [7337, 7455, 7454], [7337, 7338, 7456], [7337, 7456, 7455], [7338, 7339, 7456], [7339, 7457, 7456], [7339, 7340, 7458], [7339, 7458, 7457], [7340, 7341, 7458], [7341, 7459, 7458], [7341, 7342, 7460], [7341, 7460, 7459], [7342, 7343, 7460], [7343, 7461, 7460], [7343, 7344, 7462], [7343, 7462, 7461], [7344, 7345, 7462], [7345, 7463, 7462], [7345, 7346, 7464], [7345, 7464, 7463], [7346, 7347, 7464], [7347, 7465, 7464], [7347, 7348, 7466], [7347, 7466, 7465], [7348, 7349, 7466], [7349, 7467, 7466], [7349, 7350, 7468], [7349, 7468, 7467], [7350, 7351, 7468], [7351, 7469, 7468], [7351, 7352, 7470], [7351, 7470, 7469], [7352, 7353, 7470], [7353, 7471, 7470], [7353, 7354, 7472], [7353, 7472, 7471], [7354, 7355, 7472], [7355, 7473, 7472], [7355, 7356, 7474], [7355, 7474, 7473], [7356, 7357, 7474], [7357, 7475, 7474], [7357, 7358, 7476], [7357, 7476, 7475], [7358, 7359, 7476], [7359, 7477, 7476], [7359, 7360, 7478], [7359, 7478, 7477], [7360, 7361, 7478], [7361, 7479, 7478], [7361, 7362, 7480], [7361, 7480, 7479], [7362, 7363, 7480], [7363, 7481, 7480], [7363, 7364, 7482], [7363, 7482, 7481], [7365, 7366, 7483], [7366, 7484, 7483], [7366, 7367, 7485], [7366, 7485, 7484], [7367, 7368, 7485], [7368, 7486, 7485], [7368, 7369, 7487], [7368, 7487, 7486], [7369, 7370, 7487], [7370, 7488, 7487], [7370, 7371, 7489], [7370, 7489, 7488], [7371, 7372, 7489], [7372, 7490, 7489], [7372, 7373, 7491], [7372, 7491, 7490], [7373, 7374, 7491], [7374, 7492, 7491], [7374, 7375, 7493], [7374, 7493, 7492], [7375, 7376, 7493], [7376, 7494, 7493], [7376, 7377, 7495], [7376, 7495, 7494], [7377, 7378, 7495], [7378, 7496, 7495], [7378, 7379, 7497], [7378, 7497, 7496], [7379, 7380, 7497], [7380, 7498, 7497], [7380, 7381, 7499], [7380, 7499, 7498], [7381, 7382, 7499], [7382, 7500, 7499], [7382, 7383, 7501], [7382, 7501, 7500], [7384, 7385, 7503], [7384, 7503, 7502], [7385, 7386, 7503], [7386, 7504, 7503], [7386, 7387, 7505], [7386, 7505, 7504], [7387, 7388, 7505], [7388, 7506, 7505], [7388, 7389, 7507], [7388, 7507, 7506], [7389, 7390, 7507], [7390, 7508, 7507], [7390, 7391, 7509], [7390, 7509, 7508], [7391, 7392, 7509], [7392, 7510, 7509], [7392, 7393, 7511], [7392, 7511, 7510], [7393, 7394, 7511], [7394, 7512, 7511], [7394, 7395, 7513], [7394, 7513, 7512], [7395, 7396, 7513], [7396, 7514, 7513], [7396, 7397, 7515], [7396, 7515, 7514], [7397, 7398, 7515], [7398, 7516, 7515], [7398, 7399, 7517], [7398, 7517, 7516], [7399, 7400, 7517], [7400, 7518, 7517], [7400, 7401, 7519], [7400, 7519, 7518], [7401, 7402, 7519], [7402, 7520, 7519], [7402, 7403, 7521], [7402, 7521, 7520], [7403, 7404, 7521], [7404, 7522, 7521], [7404, 7405, 7523], [7404, 7523, 7522], [7405, 7406, 7523], [7406, 7524, 7523], [7406, 7407, 7525], [7406, 7525, 7524], [7407, 7408, 7525], [7408, 7526, 7525], [7408, 7409, 7527], [7408, 7527, 7526], [7409, 7410, 7527], [7410, 7528, 7527], [7410, 7411, 7529], [7410, 7529, 7528], [7411, 7412, 7529], [7412, 7530, 7529], [7412, 7413, 7531], [7412, 7531, 7530], [7413, 7414, 7531], [7414, 7532, 7531], [7414, 7415, 7533], [7414, 7533, 7532], [7415, 7416, 7533], [7416, 7534, 7533], [7416, 7417, 7535], [7416, 7535, 7534], [7417, 7418, 7535], [7418, 7536, 7535], [7418, 7419, 7537], [7418, 7537, 7536], [7419, 7420, 7537], [7420, 7538, 7537], [7420, 7421, 7539], [7420, 7539, 7538], [7421, 7422, 7539], [7422, 7540, 7539], [7422, 7423, 7541], [7422, 7541, 7540], [7423, 7424, 7541], [7424, 7542, 7541], [7424, 7425, 7543], [7424, 7543, 7542], [7425, 7426, 7543], [7426, 7544, 7543], [7426, 7427, 7545], [7426, 7545, 7544], [7427, 7428, 7545], [7428, 7546, 7545], [7428, 7429, 7547], [7428, 7547, 7546], [7429, 7430, 7547], [7430, 7548, 7547], [7430, 7431, 7549], [7430, 7549, 7548], [7431, 7432, 7549], [7432, 7550, 7549], [7432, 7433, 7551], [7432, 7551, 7550], [7433, 7434, 7551], [7434, 7552, 7551], [7434, 7435, 7553], [7434, 7553, 7552], [7435, 7436, 7553], [7436, 7554, 7553], [7436, 7437, 7555], [7436, 7555, 7554], [7437, 7438, 7555], [7438, 7556, 7555], [7438, 7439, 7557], [7438, 7557, 7556], [7439, 7440, 7557], [7440, 7558, 7557], [7440, 7441, 7559], [7440, 7559, 7558], [7441, 7442, 7559], [7442, 7560, 7559], [7442, 7443, 7561], [7442, 7561, 7560], [7443, 7444, 7561], [7444, 7562, 7561], [7444, 7445, 7563], [7444, 7563, 7562], [7445, 7446, 7563], [7446, 7564, 7563], [7446, 7447, 7565], [7446, 7565, 7564], [7447, 7448, 7565], [7448, 7566, 7565], [7448, 7449, 7567], [7448, 7567, 7566], [7449, 7450, 7567], [7450, 7568, 7567], [7450, 7451, 7569], [7450, 7569, 7568], [7451, 7452, 7569], [7452, 7570, 7569], [7452, 7453, 7571], [7452, 7571, 7570], [7453, 7454, 7571], [7454, 7572, 7571], [7454, 7455, 7573], [7454, 7573, 7572], [7455, 7456, 7573], [7456, 7574, 7573], [7456, 7457, 7575], [7456, 7575, 7574], [7457, 7458, 7575], [7458, 7576, 7575], [7458, 7459, 7577], [7458, 7577, 7576], [7459, 7460, 7577], [7460, 7578, 7577], [7460, 7461, 7579], [7460, 7579, 7578], [7461, 7462, 7579], [7462, 7580, 7579], [7462, 7463, 7581], [7462, 7581, 7580], [7463, 7464, 7581], [7464, 7582, 7581], [7464, 7465, 7583], [7464, 7583, 7582], [7465, 7466, 7583], [7466, 7584, 7583], [7466, 7467, 7585], [7466, 7585, 7584], [7467, 7468, 7585], [7468, 7586, 7585], [7468, 7469, 7587], [7468, 7587, 7586], [7469, 7470, 7587], [7470, 7588, 7587], [7470, 7471, 7589], [7470, 7589, 7588], [7471, 7472, 7589], [7472, 7590, 7589], [7472, 7473, 7591], [7472, 7591, 7590], [7473, 7474, 7591], [7474, 7592, 7591], [7474, 7475, 7593], [7474, 7593, 7592], [7475, 7476, 7593], [7476, 7594, 7593], [7476, 7477, 7595], [7476, 7595, 7594], [7477, 7478, 7595], [7478, 7596, 7595], [7478, 7479, 7597], [7478, 7597, 7596], [7479, 7480, 7597], [7480, 7598, 7597], [7480, 7481, 7599], [7480, 7599, 7598], [7481, 7482, 7599], [7482, 7600, 7599], [7483, 7484, 7602], [7483, 7602, 7601], [7484, 7485, 7602], [7485, 7603, 7602], [7485, 7486, 7604], [7485, 7604, 7603], [7486, 7487, 7604], [7487, 7605, 7604], [7487, 7488, 7606], [7487, 7606, 7605], [7488, 7489, 7606], [7489, 7607, 7606], [7489, 7490, 7608], [7489, 7608, 7607], [7490, 7491, 7608], [7491, 7609, 7608], [7491, 7492, 7610], [7491, 7610, 7609], [7492, 7493, 7610], [7493, 7611, 7610], [7493, 7494, 7612], [7493, 7612, 7611], [7494, 7495, 7612], [7495, 7613, 7612], [7495, 7496, 7614], [7495, 7614, 7613], [7496, 7497, 7614], [7497, 7615, 7614], [7497, 7498, 7616], [7497, 7616, 7615], [7498, 7499, 7616], [7499, 7617, 7616], [7499, 7500, 7618], [7499, 7618, 7617], [7500, 7501, 7618], [7501, 7619, 7618], [7502, 7503, 7620], [7503, 7621, 7620], [7503, 7504, 7622], [7503, 7622, 7621], [7504, 7505, 7622], [7505, 7623, 7622], [7505, 7506, 7624], [7505, 7624, 7623], [7506, 7507, 7624], [7507, 7625, 7624], [7507, 7508, 7626], [7507, 7626, 7625], [7508, 7509, 7626], [7509, 7627, 7626], [7509, 7510, 7628], [7509, 7628, 7627], [7510, 7511, 7628], [7511, 7629, 7628], [7511, 7512, 7630], [7511, 7630, 7629], [7512, 7513, 7630], [7513, 7631, 7630], [7513, 7514, 7632], [7513, 7632, 7631], [7514, 7515, 7632], [7515, 7633, 7632], [7515, 7516, 7634], [7515, 7634, 7633], [7516, 7517, 7634], [7517, 7635, 7634], [7517, 7518, 7636], [7517, 7636, 7635], [7518, 7519, 7636], [7519, 7637, 7636], [7519, 7520, 7638], [7519, 7638, 7637], [7520, 7521, 7638], [7521, 7639, 7638], [7521, 7522, 7640], [7521, 7640, 7639], [7522, 7523, 7640], [7523, 7641, 7640], [7523, 7524, 7642], [7523, 7642, 7641], [7524, 7525, 7642], [7525, 7643, 7642], [7525, 7526, 7644], [7525, 7644, 7643], [7526, 7527, 7644], [7527, 7645, 7644], [7527, 7528, 7646], [7527, 7646, 7645], [7528, 7529, 7646], [7529, 7647, 7646], [7529, 7530, 7648], [7529, 7648, 7647], [7530, 7531, 7648], [7531, 7649, 7648], [7531, 7532, 7650], [7531, 7650, 7649], [7532, 7533, 7650], [7533, 7651, 7650], [7533, 7534, 7652], [7533, 7652, 7651], [7534, 7535, 7652], [7535, 7653, 7652], [7535, 7536, 7654], [7535, 7654, 7653], [7536, 7537, 7654], [7537, 7655, 7654], [7537, 7538, 7656], [7537, 7656, 7655], [7538, 7539, 7656], [7539, 7657, 7656], [7539, 7540, 7658], [7539, 7658, 7657], [7540, 7541, 7658], [7541, 7659, 7658], [7541, 7542, 7660], [7541, 7660, 7659], [7542, 7543, 7660], [7543, 7661, 7660], [7543, 7544, 7662], [7543, 7662, 7661], [7544, 7545, 7662], [7545, 7663, 7662], [7545, 7546, 7664], [7545, 7664, 7663], [7546, 7547, 7664], [7547, 7665, 7664], [7547, 7548, 7666], [7547, 7666, 7665], [7548, 7549, 7666], [7549, 7667, 7666], [7549, 7550, 7668], [7549, 7668, 7667], [7550, 7551, 7668], [7551, 7669, 7668], [7551, 7552, 7670], [7551, 7670, 7669], [7552, 7553, 7670], [7553, 7671, 7670], [7553, 7554, 7672], [7553, 7672, 7671], [7554, 7555, 7672], [7555, 7673, 7672], [7555, 7556, 7674], [7555, 7674, 7673], [7556, 7557, 7674], [7557, 7675, 7674], [7557, 7558, 7676], [7557, 7676, 7675], [7558, 7559, 7676], [7559, 7677, 7676], [7559, 7560, 7678], [7559, 7678, 7677], [7560, 7561, 7678], [7561, 7679, 7678], [7561, 7562, 7680], [7561, 7680, 7679], [7562, 7563, 7680], [7563, 7681, 7680], [7563, 7564, 7682], [7563, 7682, 7681], [7564, 7565, 7682], [7565, 7683, 7682], [7565, 7566, 7684], [7565, 7684, 7683], [7566, 7567, 7684], [7567, 7685, 7684], [7567, 7568, 7686], [7567, 7686, 7685], [7568, 7569, 7686], [7569, 7687, 7686], [7569, 7570, 7688], [7569, 7688, 7687], [7570, 7571, 7688], [7571, 7689, 7688], [7571, 7572, 7690], [7571, 7690, 7689], [7572, 7573, 7690], [7573, 7691, 7690], [7573, 7574, 7692], [7573, 7692, 7691], [7574, 7575, 7692], [7575, 7693, 7692], [7575, 7576, 7694], [7575, 7694, 7693], [7576, 7577, 7694], [7577, 7695, 7694], [7577, 7578, 7696], [7577, 7696, 7695], [7578, 7579, 7696], [7579, 7697, 7696], [7579, 7580, 7698], [7579, 7698, 7697], [7580, 7581, 7698], [7581, 7699, 7698], [7581, 7582, 7700], [7581, 7700, 7699], [7582, 7583, 7700], [7583, 7701, 7700], [7583, 7584, 7702], [7583, 7702, 7701], [7584, 7585, 7702], [7585, 7703, 7702], [7585, 7586, 7704], [7585, 7704, 7703], [7586, 7587, 7704], [7587, 7705, 7704], [7587, 7588, 7706], [7587, 7706, 7705], [7588, 7589, 7706], [7589, 7707, 7706], [7589, 7590, 7708], [7589, 7708, 7707], [7590, 7591, 7708], [7591, 7709, 7708], [7591, 7592, 7710], [7591, 7710, 7709], [7592, 7593, 7710], [7593, 7711, 7710], [7593, 7594, 7712], [7593, 7712, 7711], [7594, 7595, 7712], [7595, 7713, 7712], [7595, 7596, 7714], [7595, 7714, 7713], [7596, 7597, 7714], [7597, 7715, 7714], [7597, 7598, 7716], [7597, 7716, 7715], [7598, 7599, 7716], [7599, 7717, 7716], [7599, 7600, 7718], [7599, 7718, 7717], [7601, 7602, 7719], [7602, 7720, 7719], [7602, 7603, 7721], [7602, 7721, 7720], [7603, 7604, 7721], [7604, 7722, 7721], [7604, 7605, 7723], [7604, 7723, 7722], [7605, 7606, 7723], [7606, 7724, 7723], [7606, 7607, 7725], [7606, 7725, 7724], [7607, 7608, 7725], [7608, 7726, 7725], [7608, 7609, 7727], [7608, 7727, 7726], [7609, 7610, 7727], [7610, 7728, 7727], [7610, 7611, 7729], [7610, 7729, 7728], [7611, 7612, 7729], [7612, 7730, 7729], [7612, 7613, 7731], [7612, 7731, 7730], [7613, 7614, 7731], [7614, 7732, 7731], [7614, 7615, 7733], [7614, 7733, 7732], [7615, 7616, 7733], [7616, 7734, 7733], [7616, 7617, 7735], [7616, 7735, 7734], [7617, 7618, 7735], [7618, 7736, 7735], [7618, 7619, 7737], [7618, 7737, 7736], [7620, 7621, 7739], [7620, 7739, 7738], [7621, 7622, 7739], [7622, 7740, 7739], [7622, 7623, 7741], [7622, 7741, 7740], [7623, 7624, 7741], [7624, 7742, 7741], [7624, 7625, 7743], [7624, 7743, 7742], [7625, 7626, 7743], [7626, 7744, 7743], [7626, 7627, 7745], [7626, 7745, 7744], [7627, 7628, 7745], [7628, 7746, 7745], [7628, 7629, 7747], [7628, 7747, 7746], [7629, 7630, 7747], [7630, 7748, 7747], [7630, 7631, 7749], [7630, 7749, 7748], [7631, 7632, 7749], [7632, 7750, 7749], [7632, 7633, 7751], [7632, 7751, 7750], [7633, 7634, 7751], [7634, 7752, 7751], [7634, 7635, 7753], [7634, 7753, 7752], [7635, 7636, 7753], [7636, 7754, 7753], [7636, 7637, 7755], [7636, 7755, 7754], [7637, 7638, 7755], [7638, 7756, 7755], [7638, 7639, 7757], [7638, 7757, 7756], [7639, 7640, 7757], [7640, 7758, 7757], [7640, 7641, 7759], [7640, 7759, 7758], [7641, 7642, 7759], [7642, 7760, 7759], [7642, 7643, 7761], [7642, 7761, 7760], [7643, 7644, 7761], [7644, 7762, 7761], [7644, 7645, 7763], [7644, 7763, 7762], [7645, 7646, 7763], [7646, 7764, 7763], [7646, 7647, 7765], [7646, 7765, 7764], [7647, 7648, 7765], [7648, 7766, 7765], [7648, 7649, 7767], [7648, 7767, 7766], [7649, 7650, 7767], [7650, 7768, 7767], [7650, 7651, 7769], [7650, 7769, 7768], [7651, 7652, 7769], [7652, 7770, 7769], [7652, 7653, 7771], [7652, 7771, 7770], [7653, 7654, 7771], [7654, 7772, 7771], [7654, 7655, 7773], [7654, 7773, 7772], [7655, 7656, 7773], [7656, 7774, 7773], [7656, 7657, 7775], [7656, 7775, 7774], [7657, 7658, 7775], [7658, 7776, 7775], [7658, 7659, 7777], [7658, 7777, 7776], [7659, 7660, 7777], [7660, 7778, 7777], [7660, 7661, 7779], [7660, 7779, 7778], [7661, 7662, 7779], [7662, 7780, 7779], [7662, 7663, 7781], [7662, 7781, 7780], [7663, 7664, 7781], [7664, 7782, 7781], [7664, 7665, 7783], [7664, 7783, 7782], [7665, 7666, 7783], [7666, 7784, 7783], [7666, 7667, 7785], [7666, 7785, 7784], [7667, 7668, 7785], [7668, 7786, 7785], [7668, 7669, 7787], [7668, 7787, 7786], [7669, 7670, 7787], [7670, 7788, 7787], [7670, 7671, 7789], [7670, 7789, 7788], [7671, 7672, 7789], [7672, 7790, 7789], [7672, 7673, 7791], [7672, 7791, 7790], [7673, 7674, 7791], [7674, 7792, 7791], [7674, 7675, 7793], [7674, 7793, 7792], [7675, 7676, 7793], [7676, 7794, 7793], [7676, 7677, 7795], [7676, 7795, 7794], [7677, 7678, 7795], [7678, 7796, 7795], [7678, 7679, 7797], [7678, 7797, 7796], [7679, 7680, 7797], [7680, 7798, 7797], [7680, 7681, 7799], [7680, 7799, 7798], [7681, 7682, 7799], [7682, 7800, 7799], [7682, 7683, 7801], [7682, 7801, 7800], [7683, 7684, 7801], [7684, 7802, 7801], [7684, 7685, 7803], [7684, 7803, 7802], [7685, 7686, 7803], [7686, 7804, 7803], [7686, 7687, 7805], [7686, 7805, 7804], [7687, 7688, 7805], [7688, 7806, 7805], [7688, 7689, 7807], [7688, 7807, 7806], [7689, 7690, 7807], [7690, 7808, 7807], [7690, 7691, 7809], [7690, 7809, 7808], [7691, 7692, 7809], [7692, 7810, 7809], [7692, 7693, 7811], [7692, 7811, 7810], [7693, 7694, 7811], [7694, 7812, 7811], [7694, 7695, 7813], [7694, 7813, 7812], [7695, 7696, 7813], [7696, 7814, 7813], [7696, 7697, 7815], [7696, 7815, 7814], [7697, 7698, 7815], [7698, 7816, 7815], [7698, 7699, 7817], [7698, 7817, 7816], [7699, 7700, 7817], [7700, 7818, 7817], [7700, 7701, 7819], [7700, 7819, 7818], [7701, 7702, 7819], [7702, 7820, 7819], [7702, 7703, 7821], [7702, 7821, 7820], [7703, 7704, 7821], [7704, 7822, 7821], [7704, 7705, 7823], [7704, 7823, 7822], [7705, 7706, 7823], [7706, 7824, 7823], [7706, 7707, 7825], [7706, 7825, 7824], [7707, 7708, 7825], [7708, 7826, 7825], [7708, 7709, 7827], [7708, 7827, 7826], [7709, 7710, 7827], [7710, 7828, 7827], [7710, 7711, 7829], [7710, 7829, 7828], [7711, 7712, 7829], [7712, 7830, 7829], [7712, 7713, 7831], [7712, 7831, 7830], [7713, 7714, 7831], [7714, 7832, 7831], [7714, 7715, 7833], [7714, 7833, 7832], [7715, 7716, 7833], [7716, 7834, 7833], [7716, 7717, 7835], [7716, 7835, 7834], [7717, 7718, 7835], [7718, 7836, 7835], [7719, 7720, 7838], [7719, 7838, 7837], [7720, 7721, 7838], [7721, 7839, 7838], [7721, 7722, 7840], [7721, 7840, 7839], [7722, 7723, 7840], [7723, 7841, 7840], [7723, 7724, 7842], [7723, 7842, 7841], [7724, 7725, 7842], [7725, 7843, 7842], [7725, 7726, 7844], [7725, 7844, 7843], [7726, 7727, 7844], [7727, 7845, 7844], [7727, 7728, 7846], [7727, 7846, 7845], [7728, 7729, 7846], [7729, 7847, 7846], [7729, 7730, 7848], [7729, 7848, 7847], [7730, 7731, 7848], [7731, 7849, 7848], [7731, 7732, 7850], [7731, 7850, 7849], [7732, 7733, 7850], [7733, 7851, 7850], [7733, 7734, 7852], [7733, 7852, 7851], [7734, 7735, 7852], [7735, 7853, 7852], [7735, 7736, 7854], [7735, 7854, 7853], [7736, 7737, 7854], [7737, 7855, 7854], [7738, 7739, 7856], [7739, 7857, 7856], [7739, 7740, 7858], [7739, 7858, 7857], [7740, 7741, 7858], [7741, 7859, 7858], [7741, 7742, 7860], [7741, 7860, 7859], [7742, 7743, 7860], [7743, 7861, 7860], [7743, 7744, 7862], [7743, 7862, 7861], [7744, 7745, 7862], [7745, 7863, 7862], [7745, 7746, 7864], [7745, 7864, 7863], [7746, 7747, 7864], [7747, 7865, 7864], [7747, 7748, 7866], [7747, 7866, 7865], [7748, 7749, 7866], [7749, 7867, 7866], [7749, 7750, 7868], [7749, 7868, 7867], [7750, 7751, 7868], [7751, 7869, 7868], [7751, 7752, 7870], [7751, 7870, 7869], [7752, 7753, 7870], [7753, 7871, 7870], [7753, 7754, 7872], [7753, 7872, 7871], [7754, 7755, 7872], [7755, 7873, 7872], [7755, 7756, 7874], [7755, 7874, 7873], [7756, 7757, 7874], [7757, 7875, 7874], [7757, 7758, 7876], [7757, 7876, 7875], [7758, 7759, 7876], [7759, 7877, 7876], [7759, 7760, 7878], [7759, 7878, 7877], [7760, 7761, 7878], [7761, 7879, 7878], [7761, 7762, 7880], [7761, 7880, 7879], [7762, 7763, 7880], [7763, 7881, 7880], [7763, 7764, 7882], [7763, 7882, 7881], [7764, 7765, 7882], [7765, 7883, 7882], [7765, 7766, 7884], [7765, 7884, 7883], [7766, 7767, 7884], [7767, 7885, 7884], [7767, 7768, 7886], [7767, 7886, 7885], [7768, 7769, 7886], [7769, 7887, 7886], [7769, 7770, 7888], [7769, 7888, 7887], [7770, 7771, 7888], [7771, 7889, 7888], [7771, 7772, 7890], [7771, 7890, 7889], [7772, 7773, 7890], [7773, 7891, 7890], [7773, 7774, 7892], [7773, 7892, 7891], [7774, 7775, 7892], [7775, 7893, 7892], [7775, 7776, 7894], [7775, 7894, 7893], [7776, 7777, 7894], [7777, 7895, 7894], [7777, 7778, 7896], [7777, 7896, 7895], [7778, 7779, 7896], [7779, 7897, 7896], [7779, 7780, 7898], [7779, 7898, 7897], [7780, 7781, 7898], [7781, 7899, 7898], [7781, 7782, 7900], [7781, 7900, 7899], [7782, 7783, 7900], [7783, 7901, 7900], [7783, 7784, 7902], [7783, 7902, 7901], [7784, 7785, 7902], [7785, 7903, 7902], [7785, 7786, 7904], [7785, 7904, 7903], [7786, 7787, 7904], [7787, 7905, 7904], [7787, 7788, 7906], [7787, 7906, 7905], [7788, 7789, 7906], [7789, 7907, 7906], [7789, 7790, 7908], [7789, 7908, 7907], [7790, 7791, 7908], [7791, 7909, 7908], [7791, 7792, 7910], [7791, 7910, 7909], [7792, 7793, 7910], [7793, 7911, 7910], [7793, 7794, 7912], [7793, 7912, 7911], [7794, 7795, 7912], [7795, 7913, 7912], [7795, 7796, 7914], [7795, 7914, 7913], [7796, 7797, 7914], [7797, 7915, 7914], [7797, 7798, 7916], [7797, 7916, 7915], [7798, 7799, 7916], [7799, 7917, 7916], [7799, 7800, 7918], [7799, 7918, 7917], [7800, 7801, 7918], [7801, 7919, 7918], [7801, 7802, 7920], [7801, 7920, 7919], [7802, 7803, 7920], [7803, 7921, 7920], [7803, 7804, 7922], [7803, 7922, 7921], [7804, 7805, 7922], [7805, 7923, 7922], [7805, 7806, 7924], [7805, 7924, 7923], [7806, 7807, 7924], [7807, 7925, 7924], [7807, 7808, 7926], [7807, 7926, 7925], [7808, 7809, 7926], [7809, 7927, 7926], [7809, 7810, 7928], [7809, 7928, 7927], [7810, 7811, 7928], [7811, 7929, 7928], [7811, 7812, 7930], [7811, 7930, 7929], [7812, 7813, 7930], [7813, 7931, 7930], [7813, 7814, 7932], [7813, 7932, 7931], [7814, 7815, 7932], [7815, 7933, 7932], [7815, 7816, 7934], [7815, 7934, 7933], [7816, 7817, 7934], [7817, 7935, 7934], [7817, 7818, 7936], [7817, 7936, 7935], [7818, 7819, 7936], [7819, 7937, 7936], [7819, 7820, 7938], [7819, 7938, 7937], [7820, 7821, 7938], [7821, 7939, 7938], [7821, 7822, 7940], [7821, 7940, 7939], [7822, 7823, 7940], [7823, 7941, 7940], [7823, 7824, 7942], [7823, 7942, 7941], [7824, 7825, 7942], [7825, 7943, 7942], [7825, 7826, 7944], [7825, 7944, 7943], [7826, 7827, 7944], [7827, 7945, 7944], [7827, 7828, 7946], [7827, 7946, 7945], [7828, 7829, 7946], [7829, 7947, 7946], [7829, 7830, 7948], [7829, 7948, 7947], [7830, 7831, 7948], [7831, 7949, 7948], [7831, 7832, 7950], [7831, 7950, 7949], [7832, 7833, 7950], [7833, 7951, 7950], [7833, 7834, 7952], [7833, 7952, 7951], [7834, 7835, 7952], [7835, 7953, 7952], [7835, 7836, 7954], [7835, 7954, 7953], [7837, 7838, 7955], [7838, 7956, 7955], [7838, 7839, 7957], [7838, 7957, 7956], [7839, 7840, 7957], [7840, 7958, 7957], [7840, 7841, 7959], [7840, 7959, 7958], [7841, 7842, 7959], [7842, 7960, 7959], [7842, 7843, 7961], [7842, 7961, 7960], [7843, 7844, 7961], [7844, 7962, 7961], [7844, 7845, 7963], [7844, 7963, 7962], [7845, 7846, 7963], [7846, 7964, 7963], [7846, 7847, 7965], [7846, 7965, 7964], [7847, 7848, 7965], [7848, 7966, 7965], [7848, 7849, 7967], [7848, 7967, 7966], [7849, 7850, 7967], [7850, 7968, 7967], [7850, 7851, 7969], [7850, 7969, 7968], [7851, 7852, 7969], [7852, 7970, 7969], [7852, 7853, 7971], [7852, 7971, 7970], [7853, 7854, 7971], [7854, 7972, 7971], [7854, 7855, 7973], [7854, 7973, 7972], [7856, 7857, 7975], [7856, 7975, 7974], [7857, 7858, 7975], [7858, 7976, 7975], [7858, 7859, 7977], [7858, 7977, 7976], [7859, 7860, 7977], [7860, 7978, 7977], [7860, 7861, 7979], [7860, 7979, 7978], [7861, 7862, 7979], [7862, 7980, 7979], [7862, 7863, 7981], [7862, 7981, 7980], [7863, 7864, 7981], [7864, 7982, 7981], [7864, 7865, 7983], [7864, 7983, 7982], [7865, 7866, 7983], [7866, 7984, 7983], [7866, 7867, 7985], [7866, 7985, 7984], [7867, 7868, 7985], [7868, 7986, 7985], [7868, 7869, 7987], [7868, 7987, 7986], [7869, 7870, 7987], [7870, 7988, 7987], [7870, 7871, 7989], [7870, 7989, 7988], [7871, 7872, 7989], [7872, 7990, 7989], [7872, 7873, 7991], [7872, 7991, 7990], [7873, 7874, 7991], [7874, 7992, 7991], [7874, 7875, 7993], [7874, 7993, 7992], [7875, 7876, 7993], [7876, 7994, 7993], [7876, 7877, 7995], [7876, 7995, 7994], [7877, 7878, 7995], [7878, 7996, 7995], [7878, 7879, 7997], [7878, 7997, 7996], [7879, 7880, 7997], [7880, 7998, 7997], [7880, 7881, 7999], [7880, 7999, 7998], [7881, 7882, 7999], [7882, 8000, 7999], [7882, 7883, 8001], [7882, 8001, 8000], [7883, 7884, 8001], [7884, 8002, 8001], [7884, 7885, 8003], [7884, 8003, 8002], [7885, 7886, 8003], [7886, 8004, 8003], [7886, 7887, 8005], [7886, 8005, 8004], [7887, 7888, 8005], [7888, 8006, 8005], [7888, 7889, 8007], [7888, 8007, 8006], [7889, 7890, 8007], [7890, 8008, 8007], [7890, 7891, 8009], [7890, 8009, 8008], [7891, 7892, 8009], [7892, 8010, 8009], [7892, 7893, 8011], [7892, 8011, 8010], [7893, 7894, 8011], [7894, 8012, 8011], [7894, 7895, 8013], [7894, 8013, 8012], [7895, 7896, 8013], [7896, 8014, 8013], [7896, 7897, 8015], [7896, 8015, 8014], [7897, 7898, 8015], [7898, 8016, 8015], [7898, 7899, 8017], [7898, 8017, 8016], [7899, 7900, 8017], [7900, 8018, 8017], [7900, 7901, 8019], [7900, 8019, 8018], [7901, 7902, 8019], [7902, 8020, 8019], [7902, 7903, 8021], [7902, 8021, 8020], [7903, 7904, 8021], [7904, 8022, 8021], [7904, 7905, 8023], [7904, 8023, 8022], [7905, 7906, 8023], [7906, 8024, 8023], [7906, 7907, 8025], [7906, 8025, 8024], [7907, 7908, 8025], [7908, 8026, 8025], [7908, 7909, 8027], [7908, 8027, 8026], [7909, 7910, 8027], [7910, 8028, 8027], [7910, 7911, 8029], [7910, 8029, 8028], [7911, 7912, 8029], [7912, 8030, 8029], [7912, 7913, 8031], [7912, 8031, 8030], [7913, 7914, 8031], [7914, 8032, 8031], [7914, 7915, 8033], [7914, 8033, 8032], [7915, 7916, 8033], [7916, 8034, 8033], [7916, 7917, 8035], [7916, 8035, 8034], [7917, 7918, 8035], [7918, 8036, 8035], [7918, 7919, 8037], [7918, 8037, 8036], [7919, 7920, 8037], [7920, 8038, 8037], [7920, 7921, 8039], [7920, 8039, 8038], [7921, 7922, 8039], [7922, 8040, 8039], [7922, 7923, 8041], [7922, 8041, 8040], [7923, 7924, 8041], [7924, 8042, 8041], [7924, 7925, 8043], [7924, 8043, 8042], [7925, 7926, 8043], [7926, 8044, 8043], [7926, 7927, 8045], [7926, 8045, 8044], [7927, 7928, 8045], [7928, 8046, 8045], [7928, 7929, 8047], [7928, 8047, 8046], [7929, 7930, 8047], [7930, 8048, 8047], [7930, 7931, 8049], [7930, 8049, 8048], [7931, 7932, 8049], [7932, 8050, 8049], [7932, 7933, 8051], [7932, 8051, 8050], [7933, 7934, 8051], [7934, 8052, 8051], [7934, 7935, 8053], [7934, 8053, 8052], [7935, 7936, 8053], [7936, 8054, 8053], [7936, 7937, 8055], [7936, 8055, 8054], [7937, 7938, 8055], [7938, 8056, 8055], [7938, 7939, 8057], [7938, 8057, 8056], [7939, 7940, 8057], [7940, 8058, 8057], [7940, 7941, 8059], [7940, 8059, 8058], [7941, 7942, 8059], [7942, 8060, 8059], [7942, 7943, 8061], [7942, 8061, 8060], [7943, 7944, 8061], [7944, 8062, 8061], [7944, 7945, 8063], [7944, 8063, 8062], [7945, 7946, 8063], [7946, 8064, 8063], [7946, 7947, 8065], [7946, 8065, 8064], [7947, 7948, 8065], [7948, 8066, 8065], [7948, 7949, 8067], [7948, 8067, 8066], [7949, 7950, 8067], [7950, 8068, 8067], [7950, 7951, 8069], [7950, 8069, 8068], [7951, 7952, 8069], [7952, 8070, 8069], [7952, 7953, 8071], [7952, 8071, 8070], [7953, 7954, 8071], [7954, 8072, 8071], [7955, 7956, 8074], [7955, 8074, 8073], [7956, 7957, 8074], [7957, 8075, 8074], [7957, 7958, 8076], [7957, 8076, 8075], [7958, 7959, 8076], [7959, 8077, 8076], [7959, 7960, 8078], [7959, 8078, 8077], [7960, 7961, 8078], [7961, 8079, 8078], [7961, 7962, 8080], [7961, 8080, 8079], [7962, 7963, 8080], [7963, 8081, 8080], [7963, 7964, 8082], [7963, 8082, 8081], [7964, 7965, 8082], [7965, 8083, 8082], [7965, 7966, 8084], [7965, 8084, 8083], [7966, 7967, 8084], [7967, 8085, 8084], [7967, 7968, 8086], [7967, 8086, 8085], [7968, 7969, 8086], [7969, 8087, 8086], [7969, 7970, 8088], [7969, 8088, 8087], [7970, 7971, 8088], [7971, 8089, 8088], [7971, 7972, 8090], [7971, 8090, 8089], [7972, 7973, 8090], [7973, 8091, 8090], [7974, 7975, 8092], [7975, 8093, 8092], [7975, 7976, 8094], [7975, 8094, 8093], [7976, 7977, 8094], [7977, 8095, 8094], [7977, 7978, 8096], [7977, 8096, 8095], [7978, 7979, 8096], [7979, 8097, 8096], [7979, 7980, 8098], [7979, 8098, 8097], [7980, 7981, 8098], [7981, 8099, 8098], [7981, 7982, 8100], [7981, 8100, 8099], [7982, 7983, 8100], [7983, 8101, 8100], [7983, 7984, 8102], [7983, 8102, 8101], [7984, 7985, 8102], [7985, 8103, 8102], [7985, 7986, 8104], [7985, 8104, 8103], [7986, 7987, 8104], [7987, 8105, 8104], [7987, 7988, 8106], [7987, 8106, 8105], [7988, 7989, 8106], [7989, 8107, 8106], [7989, 7990, 8108], [7989, 8108, 8107], [7990, 7991, 8108], [7991, 8109, 8108], [7991, 7992, 8110], [7991, 8110, 8109], [7992, 7993, 8110], [7993, 8111, 8110], [7993, 7994, 8112], [7993, 8112, 8111], [7994, 7995, 8112], [7995, 8113, 8112], [7995, 7996, 8114], [7995, 8114, 8113], [7996, 7997, 8114], [7997, 8115, 8114], [7997, 7998, 8116], [7997, 8116, 8115], [7998, 7999, 8116], [7999, 8117, 8116], [7999, 8000, 8118], [7999, 8118, 8117], [8000, 8001, 8118], [8001, 8119, 8118], [8001, 8002, 8120], [8001, 8120, 8119], [8002, 8003, 8120], [8003, 8121, 8120], [8003, 8004, 8122], [8003, 8122, 8121], [8004, 8005, 8122], [8005, 8123, 8122], [8005, 8006, 8124], [8005, 8124, 8123], [8006, 8007, 8124], [8007, 8125, 8124], [8007, 8008, 8126], [8007, 8126, 8125], [8008, 8009, 8126], [8009, 8127, 8126], [8009, 8010, 8128], [8009, 8128, 8127], [8010, 8011, 8128], [8011, 8129, 8128], [8011, 8012, 8130], [8011, 8130, 8129], [8012, 8013, 8130], [8013, 8131, 8130], [8013, 8014, 8132], [8013, 8132, 8131], [8014, 8015, 8132], [8015, 8133, 8132], [8015, 8016, 8134], [8015, 8134, 8133], [8016, 8017, 8134], [8017, 8135, 8134], [8017, 8018, 8136], [8017, 8136, 8135], [8018, 8019, 8136], [8019, 8137, 8136], [8019, 8020, 8138], [8019, 8138, 8137], [8020, 8021, 8138], [8021, 8139, 8138], [8021, 8022, 8140], [8021, 8140, 8139], [8022, 8023, 8140], [8023, 8141, 8140], [8023, 8024, 8142], [8023, 8142, 8141], [8024, 8025, 8142], [8025, 8143, 8142], [8025, 8026, 8144], [8025, 8144, 8143], [8026, 8027, 8144], [8027, 8145, 8144], [8027, 8028, 8146], [8027, 8146, 8145], [8028, 8029, 8146], [8029, 8147, 8146], [8029, 8030, 8148], [8029, 8148, 8147], [8030, 8031, 8148], [8031, 8149, 8148], [8031, 8032, 8150], [8031, 8150, 8149], [8032, 8033, 8150], [8033, 8151, 8150], [8033, 8034, 8152], [8033, 8152, 8151], [8034, 8035, 8152], [8035, 8153, 8152], [8035, 8036, 8154], [8035, 8154, 8153], [8036, 8037, 8154], [8037, 8155, 8154], [8037, 8038, 8156], [8037, 8156, 8155], [8038, 8039, 8156], [8039, 8157, 8156], [8039, 8040, 8158], [8039, 8158, 8157], [8040, 8041, 8158], [8041, 8159, 8158], [8041, 8042, 8160], [8041, 8160, 8159], [8042, 8043, 8160], [8043, 8161, 8160], [8043, 8044, 8162], [8043, 8162, 8161], [8044, 8045, 8162], [8045, 8163, 8162], [8045, 8046, 8164], [8045, 8164, 8163], [8046, 8047, 8164], [8047, 8165, 8164], [8047, 8048, 8166], [8047, 8166, 8165], [8048, 8049, 8166], [8049, 8167, 8166], [8049, 8050, 8168], [8049, 8168, 8167], [8050, 8051, 8168], [8051, 8169, 8168], [8051, 8052, 8170], [8051, 8170, 8169], [8052, 8053, 8170], [8053, 8171, 8170], [8053, 8054, 8172], [8053, 8172, 8171], [8054, 8055, 8172], [8055, 8173, 8172], [8055, 8056, 8174], [8055, 8174, 8173], [8056, 8057, 8174], [8057, 8175, 8174], [8057, 8058, 8176], [8057, 8176, 8175], [8058, 8059, 8176], [8059, 8177, 8176], [8059, 8060, 8178], [8059, 8178, 8177], [8060, 8061, 8178], [8061, 8179, 8178], [8061, 8062, 8180], [8061, 8180, 8179], [8062, 8063, 8180], [8063, 8181, 8180], [8063, 8064, 8182], [8063, 8182, 8181], [8064, 8065, 8182], [8065, 8183, 8182], [8065, 8066, 8184], [8065, 8184, 8183], [8066, 8067, 8184], [8067, 8185, 8184], [8067, 8068, 8186], [8067, 8186, 8185], [8068, 8069, 8186], [8069, 8187, 8186], [8069, 8070, 8188], [8069, 8188, 8187], [8070, 8071, 8188], [8071, 8189, 8188], [8071, 8072, 8190], [8071, 8190, 8189], [8073, 8074, 8191], [8074, 8192, 8191], [8074, 8075, 8193], [8074, 8193, 8192], [8075, 8076, 8193], [8076, 8194, 8193], [8076, 8077, 8195], [8076, 8195, 8194], [8077, 8078, 8195], [8078, 8196, 8195], [8078, 8079, 8197], [8078, 8197, 8196], [8079, 8080, 8197], [8080, 8198, 8197], [8080, 8081, 8199], [8080, 8199, 8198], [8081, 8082, 8199], [8082, 8200, 8199], [8082, 8083, 8201], [8082, 8201, 8200], [8083, 8084, 8201], [8084, 8202, 8201], [8084, 8085, 8203], [8084, 8203, 8202], [8085, 8086, 8203], [8086, 8204, 8203], [8086, 8087, 8205], [8086, 8205, 8204], [8087, 8088, 8205], [8088, 8206, 8205], [8088, 8089, 8207], [8088, 8207, 8206], [8089, 8090, 8207], [8090, 8208, 8207], [8090, 8091, 8209], [8090, 8209, 8208], [8092, 8093, 8211], [8092, 8211, 8210], [8093, 8094, 8211], [8094, 8212, 8211], [8094, 8095, 8213], [8094, 8213, 8212], [8095, 8096, 8213], [8096, 8214, 8213], [8096, 8097, 8215], [8096, 8215, 8214], [8097, 8098, 8215], [8098, 8216, 8215], [8098, 8099, 8217], [8098, 8217, 8216], [8099, 8100, 8217], [8100, 8218, 8217], [8100, 8101, 8219], [8100, 8219, 8218], [8101, 8102, 8219], [8102, 8220, 8219], [8102, 8103, 8221], [8102, 8221, 8220], [8103, 8104, 8221], [8104, 8222, 8221], [8104, 8105, 8223], [8104, 8223, 8222], [8105, 8106, 8223], [8106, 8224, 8223], [8106, 8107, 8225], [8106, 8225, 8224], [8107, 8108, 8225], [8108, 8226, 8225], [8108, 8109, 8227], [8108, 8227, 8226], [8109, 8110, 8227], [8110, 8228, 8227], [8110, 8111, 8229], [8110, 8229, 8228], [8111, 8112, 8229], [8112, 8230, 8229], [8112, 8113, 8231], [8112, 8231, 8230], [8113, 8114, 8231], [8114, 8232, 8231], [8114, 8115, 8233], [8114, 8233, 8232], [8115, 8116, 8233], [8116, 8234, 8233], [8116, 8117, 8235], [8116, 8235, 8234], [8117, 8118, 8235], [8118, 8236, 8235], [8118, 8119, 8237], [8118, 8237, 8236], [8119, 8120, 8237], [8120, 8238, 8237], [8120, 8121, 8239], [8120, 8239, 8238], [8121, 8122, 8239], [8122, 8240, 8239], [8122, 8123, 8241], [8122, 8241, 8240], [8123, 8124, 8241], [8124, 8242, 8241], [8124, 8125, 8243], [8124, 8243, 8242], [8125, 8126, 8243], [8126, 8244, 8243], [8126, 8127, 8245], [8126, 8245, 8244], [8127, 8128, 8245], [8128, 8246, 8245], [8128, 8129, 8247], [8128, 8247, 8246], [8129, 8130, 8247], [8130, 8248, 8247], [8130, 8131, 8249], [8130, 8249, 8248], [8131, 8132, 8249], [8132, 8250, 8249], [8132, 8133, 8251], [8132, 8251, 8250], [8133, 8134, 8251], [8134, 8252, 8251], [8134, 8135, 8253], [8134, 8253, 8252], [8135, 8136, 8253], [8136, 8254, 8253], [8136, 8137, 8255], [8136, 8255, 8254], [8137, 8138, 8255], [8138, 8256, 8255], [8138, 8139, 8257], [8138, 8257, 8256], [8139, 8140, 8257], [8140, 8258, 8257], [8140, 8141, 8259], [8140, 8259, 8258], [8141, 8142, 8259], [8142, 8260, 8259], [8142, 8143, 8261], [8142, 8261, 8260], [8143, 8144, 8261], [8144, 8262, 8261], [8144, 8145, 8263], [8144, 8263, 8262], [8145, 8146, 8263], [8146, 8264, 8263], [8146, 8147, 8265], [8146, 8265, 8264], [8147, 8148, 8265], [8148, 8266, 8265], [8148, 8149, 8267], [8148, 8267, 8266], [8149, 8150, 8267], [8150, 8268, 8267], [8150, 8151, 8269], [8150, 8269, 8268], [8151, 8152, 8269], [8152, 8270, 8269], [8152, 8153, 8271], [8152, 8271, 8270], [8153, 8154, 8271], [8154, 8272, 8271], [8154, 8155, 8273], [8154, 8273, 8272], [8155, 8156, 8273], [8156, 8274, 8273], [8156, 8157, 8275], [8156, 8275, 8274], [8157, 8158, 8275], [8158, 8276, 8275], [8158, 8159, 8277], [8158, 8277, 8276], [8159, 8160, 8277], [8160, 8278, 8277], [8160, 8161, 8279], [8160, 8279, 8278], [8161, 8162, 8279], [8162, 8280, 8279], [8162, 8163, 8281], [8162, 8281, 8280], [8163, 8164, 8281], [8164, 8282, 8281], [8164, 8165, 8283], [8164, 8283, 8282], [8165, 8166, 8283], [8166, 8284, 8283], [8166, 8167, 8285], [8166, 8285, 8284], [8167, 8168, 8285], [8168, 8286, 8285], [8168, 8169, 8287], [8168, 8287, 8286], [8169, 8170, 8287], [8170, 8288, 8287], [8170, 8171, 8289], [8170, 8289, 8288], [8171, 8172, 8289], [8172, 8290, 8289], [8172, 8173, 8291], [8172, 8291, 8290], [8173, 8174, 8291], [8174, 8292, 8291], [8174, 8175, 8293], [8174, 8293, 8292], [8175, 8176, 8293], [8176, 8294, 8293], [8176, 8177, 8295], [8176, 8295, 8294], [8177, 8178, 8295], [8178, 8296, 8295], [8178, 8179, 8297], [8178, 8297, 8296], [8179, 8180, 8297], [8180, 8298, 8297], [8180, 8181, 8299], [8180, 8299, 8298], [8181, 8182, 8299], [8182, 8300, 8299], [8182, 8183, 8301], [8182, 8301, 8300], [8183, 8184, 8301], [8184, 8302, 8301], [8184, 8185, 8303], [8184, 8303, 8302], [8185, 8186, 8303], [8186, 8304, 8303], [8186, 8187, 8305], [8186, 8305, 8304], [8187, 8188, 8305], [8188, 8306, 8305], [8188, 8189, 8307], [8188, 8307, 8306], [8189, 8190, 8307], [8190, 8308, 8307], [8191, 8192, 8310], [8191, 8310, 8309], [8192, 8193, 8310], [8193, 8311, 8310], [8193, 8194, 8312], [8193, 8312, 8311], [8194, 8195, 8312], [8195, 8313, 8312], [8195, 8196, 8314], [8195, 8314, 8313], [8196, 8197, 8314], [8197, 8315, 8314], [8197, 8198, 8316], [8197, 8316, 8315], [8198, 8199, 8316], [8199, 8317, 8316], [8199, 8200, 8318], [8199, 8318, 8317], [8200, 8201, 8318], [8201, 8319, 8318], [8201, 8202, 8320], [8201, 8320, 8319], [8202, 8203, 8320], [8203, 8321, 8320], [8203, 8204, 8322], [8203, 8322, 8321], [8204, 8205, 8322], [8205, 8323, 8322], [8205, 8206, 8324], [8205, 8324, 8323], [8206, 8207, 8324], [8207, 8325, 8324], [8207, 8208, 8326], [8207, 8326, 8325], [8208, 8209, 8326], [8209, 8327, 8326], [8210, 8211, 8328], [8211, 8329, 8328], [8211, 8212, 8330], [8211, 8330, 8329], [8212, 8213, 8330], [8213, 8331, 8330], [8213, 8214, 8332], [8213, 8332, 8331], [8214, 8215, 8332], [8215, 8333, 8332], [8215, 8216, 8334], [8215, 8334, 8333], [8216, 8217, 8334], [8217, 8335, 8334], [8217, 8218, 8336], [8217, 8336, 8335], [8218, 8219, 8336], [8219, 8337, 8336], [8219, 8220, 8338], [8219, 8338, 8337], [8220, 8221, 8338], [8221, 8339, 8338], [8221, 8222, 8340], [8221, 8340, 8339], [8222, 8223, 8340], [8223, 8341, 8340], [8223, 8224, 8342], [8223, 8342, 8341], [8224, 8225, 8342], [8225, 8343, 8342], [8225, 8226, 8344], [8225, 8344, 8343], [8226, 8227, 8344], [8227, 8345, 8344], [8227, 8228, 8346], [8227, 8346, 8345], [8228, 8229, 8346], [8229, 8347, 8346], [8229, 8230, 8348], [8229, 8348, 8347], [8230, 8231, 8348], [8231, 8349, 8348], [8231, 8232, 8350], [8231, 8350, 8349], [8232, 8233, 8350], [8233, 8351, 8350], [8233, 8234, 8352], [8233, 8352, 8351], [8234, 8235, 8352], [8235, 8353, 8352], [8235, 8236, 8354], [8235, 8354, 8353], [8236, 8237, 8354], [8237, 8355, 8354], [8237, 8238, 8356], [8237, 8356, 8355], [8238, 8239, 8356], [8239, 8357, 8356], [8239, 8240, 8358], [8239, 8358, 8357], [8240, 8241, 8358], [8241, 8359, 8358], [8241, 8242, 8360], [8241, 8360, 8359], [8242, 8243, 8360], [8243, 8361, 8360], [8243, 8244, 8362], [8243, 8362, 8361], [8244, 8245, 8362], [8245, 8363, 8362], [8245, 8246, 8364], [8245, 8364, 8363], [8246, 8247, 8364], [8247, 8365, 8364], [8247, 8248, 8366], [8247, 8366, 8365], [8248, 8249, 8366], [8249, 8367, 8366], [8249, 8250, 8368], [8249, 8368, 8367], [8250, 8251, 8368], [8251, 8369, 8368], [8251, 8252, 8370], [8251, 8370, 8369], [8252, 8253, 8370], [8253, 8371, 8370], [8253, 8254, 8372], [8253, 8372, 8371], [8254, 8255, 8372], [8255, 8373, 8372], [8255, 8256, 8374], [8255, 8374, 8373], [8256, 8257, 8374], [8257, 8375, 8374], [8257, 8258, 8376], [8257, 8376, 8375], [8258, 8259, 8376], [8259, 8377, 8376], [8259, 8260, 8378], [8259, 8378, 8377], [8260, 8261, 8378], [8261, 8379, 8378], [8261, 8262, 8380], [8261, 8380, 8379], [8262, 8263, 8380], [8263, 8381, 8380], [8263, 8264, 8382], [8263, 8382, 8381], [8264, 8265, 8382], [8265, 8383, 8382], [8265, 8266, 8384], [8265, 8384, 8383], [8266, 8267, 8384], [8267, 8385, 8384], [8267, 8268, 8386], [8267, 8386, 8385], [8268, 8269, 8386], [8269, 8387, 8386], [8269, 8270, 8388], [8269, 8388, 8387], [8270, 8271, 8388], [8271, 8389, 8388], [8271, 8272, 8390], [8271, 8390, 8389], [8272, 8273, 8390], [8273, 8391, 8390], [8273, 8274, 8392], [8273, 8392, 8391], [8274, 8275, 8392], [8275, 8393, 8392], [8275, 8276, 8394], [8275, 8394, 8393], [8276, 8277, 8394], [8277, 8395, 8394], [8277, 8278, 8396], [8277, 8396, 8395], [8278, 8279, 8396], [8279, 8397, 8396], [8279, 8280, 8398], [8279, 8398, 8397], [8280, 8281, 8398], [8281, 8399, 8398], [8281, 8282, 8400], [8281, 8400, 8399], [8282, 8283, 8400], [8283, 8401, 8400], [8283, 8284, 8402], [8283, 8402, 8401], [8284, 8285, 8402], [8285, 8403, 8402], [8285, 8286, 8404], [8285, 8404, 8403], [8286, 8287, 8404], [8287, 8405, 8404], [8287, 8288, 8406], [8287, 8406, 8405], [8288, 8289, 8406], [8289, 8407, 8406], [8289, 8290, 8408], [8289, 8408, 8407], [8290, 8291, 8408], [8291, 8409, 8408], [8291, 8292, 8410], [8291, 8410, 8409], [8292, 8293, 8410], [8293, 8411, 8410], [8293, 8294, 8412], [8293, 8412, 8411], [8294, 8295, 8412], [8295, 8413, 8412], [8295, 8296, 8414], [8295, 8414, 8413], [8296, 8297, 8414], [8297, 8415, 8414], [8297, 8298, 8416], [8297, 8416, 8415], [8298, 8299, 8416], [8299, 8417, 8416], [8299, 8300, 8418], [8299, 8418, 8417], [8300, 8301, 8418], [8301, 8419, 8418], [8301, 8302, 8420], [8301, 8420, 8419], [8302, 8303, 8420], [8303, 8421, 8420], [8303, 8304, 8422], [8303, 8422, 8421], [8304, 8305, 8422], [8305, 8423, 8422], [8305, 8306, 8424], [8305, 8424, 8423], [8306, 8307, 8424], [8307, 8425, 8424], [8307, 8308, 8426], [8307, 8426, 8425], [8309, 8310, 8427], [8310, 8428, 8427], [8310, 8311, 8429], [8310, 8429, 8428], [8311, 8312, 8429], [8312, 8430, 8429], [8312, 8313, 8431], [8312, 8431, 8430], [8313, 8314, 8431], [8314, 8432, 8431], [8314, 8315, 8433], [8314, 8433, 8432], [8315, 8316, 8433], [8316, 8434, 8433], [8316, 8317, 8435], [8316, 8435, 8434], [8317, 8318, 8435], [8318, 8436, 8435], [8318, 8319, 8437], [8318, 8437, 8436], [8319, 8320, 8437], [8320, 8438, 8437], [8320, 8321, 8439], [8320, 8439, 8438], [8321, 8322, 8439], [8322, 8440, 8439], [8322, 8323, 8441], [8322, 8441, 8440], [8323, 8324, 8441], [8324, 8442, 8441], [8324, 8325, 8443], [8324, 8443, 8442], [8325, 8326, 8443], [8326, 8444, 8443], [8326, 8327, 8445], [8326, 8445, 8444], [8328, 8329, 8447], [8328, 8447, 8446], [8329, 8330, 8447], [8330, 8448, 8447], [8330, 8331, 8449], [8330, 8449, 8448], [8331, 8332, 8449], [8332, 8450, 8449], [8332, 8333, 8451], [8332, 8451, 8450], [8333, 8334, 8451], [8334, 8452, 8451], [8334, 8335, 8453], [8334, 8453, 8452], [8335, 8336, 8453], [8336, 8454, 8453], [8336, 8337, 8455], [8336, 8455, 8454], [8337, 8338, 8455], [8338, 8456, 8455], [8338, 8339, 8457], [8338, 8457, 8456], [8339, 8340, 8457], [8340, 8458, 8457], [8340, 8341, 8459], [8340, 8459, 8458], [8341, 8342, 8459], [8342, 8460, 8459], [8342, 8343, 8461], [8342, 8461, 8460], [8343, 8344, 8461], [8344, 8462, 8461], [8344, 8345, 8463], [8344, 8463, 8462], [8345, 8346, 8463], [8346, 8464, 8463], [8346, 8347, 8465], [8346, 8465, 8464], [8347, 8348, 8465], [8348, 8466, 8465], [8348, 8349, 8467], [8348, 8467, 8466], [8349, 8350, 8467], [8350, 8468, 8467], [8350, 8351, 8469], [8350, 8469, 8468], [8351, 8352, 8469], [8352, 8470, 8469], [8352, 8353, 8471], [8352, 8471, 8470], [8353, 8354, 8471], [8354, 8472, 8471], [8354, 8355, 8473], [8354, 8473, 8472], [8355, 8356, 8473], [8356, 8474, 8473], [8356, 8357, 8475], [8356, 8475, 8474], [8357, 8358, 8475], [8358, 8476, 8475], [8358, 8359, 8477], [8358, 8477, 8476], [8359, 8360, 8477], [8360, 8478, 8477], [8360, 8361, 8479], [8360, 8479, 8478], [8361, 8362, 8479], [8362, 8480, 8479], [8362, 8363, 8481], [8362, 8481, 8480], [8363, 8364, 8481], [8364, 8482, 8481], [8364, 8365, 8483], [8364, 8483, 8482], [8365, 8366, 8483], [8366, 8484, 8483], [8366, 8367, 8485], [8366, 8485, 8484], [8367, 8368, 8485], [8368, 8486, 8485], [8368, 8369, 8487], [8368, 8487, 8486], [8369, 8370, 8487], [8370, 8488, 8487], [8370, 8371, 8489], [8370, 8489, 8488], [8371, 8372, 8489], [8372, 8490, 8489], [8372, 8373, 8491], [8372, 8491, 8490], [8373, 8374, 8491], [8374, 8492, 8491], [8374, 8375, 8493], [8374, 8493, 8492], [8375, 8376, 8493], [8376, 8494, 8493], [8376, 8377, 8495], [8376, 8495, 8494], [8377, 8378, 8495], [8378, 8496, 8495], [8378, 8379, 8497], [8378, 8497, 8496], [8379, 8380, 8497], [8380, 8498, 8497], [8380, 8381, 8499], [8380, 8499, 8498], [8381, 8382, 8499], [8382, 8500, 8499], [8382, 8383, 8501], [8382, 8501, 8500], [8383, 8384, 8501], [8384, 8502, 8501], [8384, 8385, 8503], [8384, 8503, 8502], [8385, 8386, 8503], [8386, 8504, 8503], [8386, 8387, 8505], [8386, 8505, 8504], [8387, 8388, 8505], [8388, 8506, 8505], [8388, 8389, 8507], [8388, 8507, 8506], [8389, 8390, 8507], [8390, 8508, 8507], [8390, 8391, 8509], [8390, 8509, 8508], [8391, 8392, 8509], [8392, 8510, 8509], [8392, 8393, 8511], [8392, 8511, 8510], [8393, 8394, 8511], [8394, 8512, 8511], [8394, 8395, 8513], [8394, 8513, 8512], [8395, 8396, 8513], [8396, 8514, 8513], [8396, 8397, 8515], [8396, 8515, 8514], [8397, 8398, 8515], [8398, 8516, 8515], [8398, 8399, 8517], [8398, 8517, 8516], [8399, 8400, 8517], [8400, 8518, 8517], [8400, 8401, 8519], [8400, 8519, 8518], [8401, 8402, 8519], [8402, 8520, 8519], [8402, 8403, 8521], [8402, 8521, 8520], [8403, 8404, 8521], [8404, 8522, 8521], [8404, 8405, 8523], [8404, 8523, 8522], [8405, 8406, 8523], [8406, 8524, 8523], [8406, 8407, 8525], [8406, 8525, 8524], [8407, 8408, 8525], [8408, 8526, 8525], [8408, 8409, 8527], [8408, 8527, 8526], [8409, 8410, 8527], [8410, 8528, 8527], [8410, 8411, 8529], [8410, 8529, 8528], [8411, 8412, 8529], [8412, 8530, 8529], [8412, 8413, 8531], [8412, 8531, 8530], [8413, 8414, 8531], [8414, 8532, 8531], [8414, 8415, 8533], [8414, 8533, 8532], [8415, 8416, 8533], [8416, 8534, 8533], [8416, 8417, 8535], [8416, 8535, 8534], [8417, 8418, 8535], [8418, 8536, 8535], [8418, 8419, 8537], [8418, 8537, 8536], [8419, 8420, 8537], [8420, 8538, 8537], [8420, 8421, 8539], [8420, 8539, 8538], [8421, 8422, 8539], [8422, 8540, 8539], [8422, 8423, 8541], [8422, 8541, 8540], [8423, 8424, 8541], [8424, 8542, 8541], [8424, 8425, 8543], [8424, 8543, 8542], [8425, 8426, 8543], [8426, 8544, 8543], [8427, 8428, 8546], [8427, 8546, 8545], [8428, 8429, 8546], [8429, 8547, 8546], [8429, 8430, 8548], [8429, 8548, 8547], [8430, 8431, 8548], [8431, 8549, 8548], [8431, 8432, 8550], [8431, 8550, 8549], [8432, 8433, 8550], [8433, 8551, 8550], [8433, 8434, 8552], [8433, 8552, 8551], [8434, 8435, 8552], [8435, 8553, 8552], [8435, 8436, 8554], [8435, 8554, 8553], [8436, 8437, 8554], [8437, 8555, 8554], [8437, 8438, 8556], [8437, 8556, 8555], [8438, 8439, 8556], [8439, 8557, 8556], [8439, 8440, 8558], [8439, 8558, 8557], [8440, 8441, 8558], [8441, 8559, 8558], [8441, 8442, 8560], [8441, 8560, 8559], [8442, 8443, 8560], [8443, 8561, 8560], [8443, 8444, 8562], [8443, 8562, 8561], [8444, 8445, 8562], [8445, 8563, 8562], [8446, 8447, 8564], [8447, 8565, 8564], [8447, 8448, 8566], [8447, 8566, 8565], [8448, 8449, 8566], [8449, 8567, 8566], [8449, 8450, 8568], [8449, 8568, 8567], [8450, 8451, 8568], [8451, 8569, 8568], [8451, 8452, 8570], [8451, 8570, 8569], [8452, 8453, 8570], [8453, 8571, 8570], [8453, 8454, 8572], [8453, 8572, 8571], [8454, 8455, 8572], [8455, 8573, 8572], [8455, 8456, 8574], [8455, 8574, 8573], [8456, 8457, 8574], [8457, 8575, 8574], [8457, 8458, 8576], [8457, 8576, 8575], [8458, 8459, 8576], [8459, 8577, 8576], [8459, 8460, 8578], [8459, 8578, 8577], [8460, 8461, 8578], [8461, 8579, 8578], [8461, 8462, 8580], [8461, 8580, 8579], [8462, 8463, 8580], [8463, 8581, 8580], [8463, 8464, 8582], [8463, 8582, 8581], [8464, 8465, 8582], [8465, 8583, 8582], [8465, 8466, 8584], [8465, 8584, 8583], [8466, 8467, 8584], [8467, 8585, 8584], [8467, 8468, 8586], [8467, 8586, 8585], [8468, 8469, 8586], [8469, 8587, 8586], [8469, 8470, 8588], [8469, 8588, 8587], [8470, 8471, 8588], [8471, 8589, 8588], [8471, 8472, 8590], [8471, 8590, 8589], [8472, 8473, 8590], [8473, 8591, 8590], [8473, 8474, 8592], [8473, 8592, 8591], [8474, 8475, 8592], [8475, 8593, 8592], [8475, 8476, 8594], [8475, 8594, 8593], [8476, 8477, 8594], [8477, 8595, 8594], [8477, 8478, 8596], [8477, 8596, 8595], [8478, 8479, 8596], [8479, 8597, 8596], [8479, 8480, 8598], [8479, 8598, 8597], [8480, 8481, 8598], [8481, 8599, 8598], [8481, 8482, 8600], [8481, 8600, 8599], [8482, 8483, 8600], [8483, 8601, 8600], [8483, 8484, 8602], [8483, 8602, 8601], [8484, 8485, 8602], [8485, 8603, 8602], [8485, 8486, 8604], [8485, 8604, 8603], [8486, 8487, 8604], [8487, 8605, 8604], [8487, 8488, 8606], [8487, 8606, 8605], [8488, 8489, 8606], [8489, 8607, 8606], [8489, 8490, 8608], [8489, 8608, 8607], [8490, 8491, 8608], [8491, 8609, 8608], [8491, 8492, 8610], [8491, 8610, 8609], [8492, 8493, 8610], [8493, 8611, 8610], [8493, 8494, 8612], [8493, 8612, 8611], [8494, 8495, 8612], [8495, 8613, 8612], [8495, 8496, 8614], [8495, 8614, 8613], [8496, 8497, 8614], [8497, 8615, 8614], [8497, 8498, 8616], [8497, 8616, 8615], [8498, 8499, 8616], [8499, 8617, 8616], [8499, 8500, 8618], [8499, 8618, 8617], [8500, 8501, 8618], [8501, 8619, 8618], [8501, 8502, 8620], [8501, 8620, 8619], [8502, 8503, 8620], [8503, 8621, 8620], [8503, 8504, 8622], [8503, 8622, 8621], [8504, 8505, 8622], [8505, 8623, 8622], [8505, 8506, 8624], [8505, 8624, 8623], [8506, 8507, 8624], [8507, 8625, 8624], [8507, 8508, 8626], [8507, 8626, 8625], [8508, 8509, 8626], [8509, 8627, 8626], [8509, 8510, 8628], [8509, 8628, 8627], [8510, 8511, 8628], [8511, 8629, 8628], [8511, 8512, 8630], [8511, 8630, 8629], [8512, 8513, 8630], [8513, 8631, 8630], [8513, 8514, 8632], [8513, 8632, 8631], [8514, 8515, 8632], [8515, 8633, 8632], [8515, 8516, 8634], [8515, 8634, 8633], [8516, 8517, 8634], [8517, 8635, 8634], [8517, 8518, 8636], [8517, 8636, 8635], [8518, 8519, 8636], [8519, 8637, 8636], [8519, 8520, 8638], [8519, 8638, 8637], [8520, 8521, 8638], [8521, 8639, 8638], [8521, 8522, 8640], [8521, 8640, 8639], [8522, 8523, 8640], [8523, 8641, 8640], [8523, 8524, 8642], [8523, 8642, 8641], [8524, 8525, 8642], [8525, 8643, 8642], [8525, 8526, 8644], [8525, 8644, 8643], [8526, 8527, 8644], [8527, 8645, 8644], [8527, 8528, 8646], [8527, 8646, 8645], [8528, 8529, 8646], [8529, 8647, 8646], [8529, 8530, 8648], [8529, 8648, 8647], [8530, 8531, 8648], [8531, 8649, 8648], [8531, 8532, 8650], [8531, 8650, 8649], [8532, 8533, 8650], [8533, 8651, 8650], [8533, 8534, 8652], [8533, 8652, 8651], [8534, 8535, 8652], [8535, 8653, 8652], [8535, 8536, 8654], [8535, 8654, 8653], [8536, 8537, 8654], [8537, 8655, 8654], [8537, 8538, 8656], [8537, 8656, 8655], [8538, 8539, 8656], [8539, 8657, 8656], [8539, 8540, 8658], [8539, 8658, 8657], [8540, 8541, 8658], [8541, 8659, 8658], [8541, 8542, 8660], [8541, 8660, 8659], [8542, 8543, 8660], [8543, 8661, 8660], [8543, 8544, 8662], [8543, 8662, 8661], [8545, 8546, 8663], [8546, 8664, 8663], [8546, 8547, 8665], [8546, 8665, 8664], [8547, 8548, 8665], [8548, 8666, 8665], [8548, 8549, 8667], [8548, 8667, 8666], [8549, 8550, 8667], [8550, 8668, 8667], [8550, 8551, 8669], [8550, 8669, 8668], [8551, 8552, 8669], [8552, 8670, 8669], [8552, 8553, 8671], [8552, 8671, 8670], [8553, 8554, 8671], [8554, 8672, 8671], [8554, 8555, 8673], [8554, 8673, 8672], [8555, 8556, 8673], [8556, 8674, 8673], [8556, 8557, 8675], [8556, 8675, 8674], [8557, 8558, 8675], [8558, 8676, 8675], [8558, 8559, 8677], [8558, 8677, 8676], [8559, 8560, 8677], [8560, 8678, 8677], [8560, 8561, 8679], [8560, 8679, 8678], [8561, 8562, 8679], [8562, 8680, 8679], [8562, 8563, 8681], [8562, 8681, 8680], [8564, 8565, 8683], [8564, 8683, 8682], [8565, 8566, 8683], [8566, 8684, 8683], [8566, 8567, 8685], [8566, 8685, 8684], [8567, 8568, 8685], [8568, 8686, 8685], [8568, 8569, 8687], [8568, 8687, 8686], [8569, 8570, 8687], [8570, 8688, 8687], [8570, 8571, 8689], [8570, 8689, 8688], [8571, 8572, 8689], [8572, 8690, 8689], [8572, 8573, 8691], [8572, 8691, 8690], [8573, 8574, 8691], [8574, 8692, 8691], [8574, 8575, 8693], [8574, 8693, 8692], [8575, 8576, 8693], [8576, 8694, 8693], [8576, 8577, 8695], [8576, 8695, 8694], [8577, 8578, 8695], [8578, 8696, 8695], [8578, 8579, 8697], [8578, 8697, 8696], [8579, 8580, 8697], [8580, 8698, 8697], [8580, 8581, 8699], [8580, 8699, 8698], [8581, 8582, 8699], [8582, 8700, 8699], [8582, 8583, 8701], [8582, 8701, 8700], [8583, 8584, 8701], [8584, 8702, 8701], [8584, 8585, 8703], [8584, 8703, 8702], [8585, 8586, 8703], [8586, 8704, 8703], [8586, 8587, 8705], [8586, 8705, 8704], [8587, 8588, 8705], [8588, 8706, 8705], [8588, 8589, 8707], [8588, 8707, 8706], [8589, 8590, 8707], [8590, 8708, 8707], [8590, 8591, 8709], [8590, 8709, 8708], [8591, 8592, 8709], [8592, 8710, 8709], [8592, 8593, 8711], [8592, 8711, 8710], [8593, 8594, 8711], [8594, 8712, 8711], [8594, 8595, 8713], [8594, 8713, 8712], [8595, 8596, 8713], [8596, 8714, 8713], [8596, 8597, 8715], [8596, 8715, 8714], [8597, 8598, 8715], [8598, 8716, 8715], [8598, 8599, 8717], [8598, 8717, 8716], [8599, 8600, 8717], [8600, 8718, 8717], [8600, 8601, 8719], [8600, 8719, 8718], [8601, 8602, 8719], [8602, 8720, 8719], [8602, 8603, 8721], [8602, 8721, 8720], [8603, 8604, 8721], [8604, 8722, 8721], [8604, 8605, 8723], [8604, 8723, 8722], [8605, 8606, 8723], [8606, 8724, 8723], [8606, 8607, 8725], [8606, 8725, 8724], [8607, 8608, 8725], [8608, 8726, 8725], [8608, 8609, 8727], [8608, 8727, 8726], [8609, 8610, 8727], [8610, 8728, 8727], [8610, 8611, 8729], [8610, 8729, 8728], [8611, 8612, 8729], [8612, 8730, 8729], [8612, 8613, 8731], [8612, 8731, 8730], [8613, 8614, 8731], [8614, 8732, 8731], [8614, 8615, 8733], [8614, 8733, 8732], [8615, 8616, 8733], [8616, 8734, 8733], [8616, 8617, 8735], [8616, 8735, 8734], [8617, 8618, 8735], [8618, 8736, 8735], [8618, 8619, 8737], [8618, 8737, 8736], [8619, 8620, 8737], [8620, 8738, 8737], [8620, 8621, 8739], [8620, 8739, 8738], [8621, 8622, 8739], [8622, 8740, 8739], [8622, 8623, 8741], [8622, 8741, 8740], [8623, 8624, 8741], [8624, 8742, 8741], [8624, 8625, 8743], [8624, 8743, 8742], [8625, 8626, 8743], [8626, 8744, 8743], [8626, 8627, 8745], [8626, 8745, 8744], [8627, 8628, 8745], [8628, 8746, 8745], [8628, 8629, 8747], [8628, 8747, 8746], [8629, 8630, 8747], [8630, 8748, 8747], [8630, 8631, 8749], [8630, 8749, 8748], [8631, 8632, 8749], [8632, 8750, 8749], [8632, 8633, 8751], [8632, 8751, 8750], [8633, 8634, 8751], [8634, 8752, 8751], [8634, 8635, 8753], [8634, 8753, 8752], [8635, 8636, 8753], [8636, 8754, 8753], [8636, 8637, 8755], [8636, 8755, 8754], [8637, 8638, 8755], [8638, 8756, 8755], [8638, 8639, 8757], [8638, 8757, 8756], [8639, 8640, 8757], [8640, 8758, 8757], [8640, 8641, 8759], [8640, 8759, 8758], [8641, 8642, 8759], [8642, 8760, 8759], [8642, 8643, 8761], [8642, 8761, 8760], [8643, 8644, 8761], [8644, 8762, 8761], [8644, 8645, 8763], [8644, 8763, 8762], [8645, 8646, 8763], [8646, 8764, 8763], [8646, 8647, 8765], [8646, 8765, 8764], [8647, 8648, 8765], [8648, 8766, 8765], [8648, 8649, 8767], [8648, 8767, 8766], [8649, 8650, 8767], [8650, 8768, 8767], [8650, 8651, 8769], [8650, 8769, 8768], [8651, 8652, 8769], [8652, 8770, 8769], [8652, 8653, 8771], [8652, 8771, 8770], [8653, 8654, 8771], [8654, 8772, 8771], [8654, 8655, 8773], [8654, 8773, 8772], [8655, 8656, 8773], [8656, 8774, 8773], [8656, 8657, 8775], [8656, 8775, 8774], [8657, 8658, 8775], [8658, 8776, 8775], [8658, 8659, 8777], [8658, 8777, 8776], [8659, 8660, 8777], [8660, 8778, 8777], [8660, 8661, 8779], [8660, 8779, 8778], [8661, 8662, 8779], [8662, 8780, 8779], [8663, 8664, 8782], [8663, 8782, 8781], [8664, 8665, 8782], [8665, 8783, 8782], [8665, 8666, 8784], [8665, 8784, 8783], [8666, 8667, 8784], [8667, 8785, 8784], [8667, 8668, 8786], [8667, 8786, 8785], [8668, 8669, 8786], [8669, 8787, 8786], [8669, 8670, 8788], [8669, 8788, 8787], [8670, 8671, 8788], [8671, 8789, 8788], [8671, 8672, 8790], [8671, 8790, 8789], [8672, 8673, 8790], [8673, 8791, 8790], [8673, 8674, 8792], [8673, 8792, 8791], [8674, 8675, 8792], [8675, 8793, 8792], [8675, 8676, 8794], [8675, 8794, 8793], [8676, 8677, 8794], [8677, 8795, 8794], [8677, 8678, 8796], [8677, 8796, 8795], [8678, 8679, 8796], [8679, 8797, 8796], [8679, 8680, 8798], [8679, 8798, 8797], [8680, 8681, 8798], [8681, 8799, 8798], [8682, 8683, 8800], [8683, 8801, 8800], [8683, 8684, 8802], [8683, 8802, 8801], [8684, 8685, 8802], [8685, 8803, 8802], [8685, 8686, 8804], [8685, 8804, 8803], [8686, 8687, 8804], [8687, 8805, 8804], [8687, 8688, 8806], [8687, 8806, 8805], [8688, 8689, 8806], [8689, 8807, 8806], [8689, 8690, 8808], [8689, 8808, 8807], [8690, 8691, 8808], [8691, 8809, 8808], [8691, 8692, 8810], [8691, 8810, 8809], [8692, 8693, 8810], [8693, 8811, 8810], [8693, 8694, 8812], [8693, 8812, 8811], [8694, 8695, 8812], [8695, 8813, 8812], [8695, 8696, 8814], [8695, 8814, 8813], [8696, 8697, 8814], [8697, 8815, 8814], [8697, 8698, 8816], [8697, 8816, 8815], [8698, 8699, 8816], [8699, 8817, 8816], [8699, 8700, 8818], [8699, 8818, 8817], [8700, 8701, 8818], [8701, 8819, 8818], [8701, 8702, 8820], [8701, 8820, 8819], [8702, 8703, 8820], [8703, 8821, 8820], [8703, 8704, 8822], [8703, 8822, 8821], [8704, 8705, 8822], [8705, 8823, 8822], [8705, 8706, 8824], [8705, 8824, 8823], [8706, 8707, 8824], [8707, 8825, 8824], [8707, 8708, 8826], [8707, 8826, 8825], [8708, 8709, 8826], [8709, 8827, 8826], [8709, 8710, 8828], [8709, 8828, 8827], [8710, 8711, 8828], [8711, 8829, 8828], [8711, 8712, 8830], [8711, 8830, 8829], [8712, 8713, 8830], [8713, 8831, 8830], [8713, 8714, 8832], [8713, 8832, 8831], [8714, 8715, 8832], [8715, 8833, 8832], [8715, 8716, 8834], [8715, 8834, 8833], [8716, 8717, 8834], [8717, 8835, 8834], [8717, 8718, 8836], [8717, 8836, 8835], [8718, 8719, 8836], [8719, 8837, 8836], [8719, 8720, 8838], [8719, 8838, 8837], [8720, 8721, 8838], [8721, 8839, 8838], [8721, 8722, 8840], [8721, 8840, 8839], [8722, 8723, 8840], [8723, 8841, 8840], [8723, 8724, 8842], [8723, 8842, 8841], [8724, 8725, 8842], [8725, 8843, 8842], [8725, 8726, 8844], [8725, 8844, 8843], [8726, 8727, 8844], [8727, 8845, 8844], [8727, 8728, 8846], [8727, 8846, 8845], [8728, 8729, 8846], [8729, 8847, 8846], [8729, 8730, 8848], [8729, 8848, 8847], [8730, 8731, 8848], [8731, 8849, 8848], [8731, 8732, 8850], [8731, 8850, 8849], [8732, 8733, 8850], [8733, 8851, 8850], [8733, 8734, 8852], [8733, 8852, 8851], [8734, 8735, 8852], [8735, 8853, 8852], [8735, 8736, 8854], [8735, 8854, 8853], [8736, 8737, 8854], [8737, 8855, 8854], [8737, 8738, 8856], [8737, 8856, 8855], [8738, 8739, 8856], [8739, 8857, 8856], [8739, 8740, 8858], [8739, 8858, 8857], [8740, 8741, 8858], [8741, 8859, 8858], [8741, 8742, 8860], [8741, 8860, 8859], [8742, 8743, 8860], [8743, 8861, 8860], [8743, 8744, 8862], [8743, 8862, 8861], [8744, 8745, 8862], [8745, 8863, 8862], [8745, 8746, 8864], [8745, 8864, 8863], [8746, 8747, 8864], [8747, 8865, 8864], [8747, 8748, 8866], [8747, 8866, 8865], [8748, 8749, 8866], [8749, 8867, 8866], [8749, 8750, 8868], [8749, 8868, 8867], [8750, 8751, 8868], [8751, 8869, 8868], [8751, 8752, 8870], [8751, 8870, 8869], [8752, 8753, 8870], [8753, 8871, 8870], [8753, 8754, 8872], [8753, 8872, 8871], [8754, 8755, 8872], [8755, 8873, 8872], [8755, 8756, 8874], [8755, 8874, 8873], [8756, 8757, 8874], [8757, 8875, 8874], [8757, 8758, 8876], [8757, 8876, 8875], [8758, 8759, 8876], [8759, 8877, 8876], [8759, 8760, 8878], [8759, 8878, 8877], [8760, 8761, 8878], [8761, 8879, 8878], [8761, 8762, 8880], [8761, 8880, 8879], [8762, 8763, 8880], [8763, 8881, 8880], [8763, 8764, 8882], [8763, 8882, 8881], [8764, 8765, 8882], [8765, 8883, 8882], [8765, 8766, 8884], [8765, 8884, 8883], [8766, 8767, 8884], [8767, 8885, 8884], [8767, 8768, 8886], [8767, 8886, 8885], [8768, 8769, 8886], [8769, 8887, 8886], [8769, 8770, 8888], [8769, 8888, 8887], [8770, 8771, 8888], [8771, 8889, 8888], [8771, 8772, 8890], [8771, 8890, 8889], [8772, 8773, 8890], [8773, 8891, 8890], [8773, 8774, 8892], [8773, 8892, 8891], [8774, 8775, 8892], [8775, 8893, 8892], [8775, 8776, 8894], [8775, 8894, 8893], [8776, 8777, 8894], [8777, 8895, 8894], [8777, 8778, 8896], [8777, 8896, 8895], [8778, 8779, 8896], [8779, 8897, 8896], [8779, 8780, 8898], [8779, 8898, 8897], [8781, 8782, 8899], [8782, 8900, 8899], [8782, 8783, 8901], [8782, 8901, 8900], [8783, 8784, 8901], [8784, 8902, 8901], [8784, 8785, 8903], [8784, 8903, 8902], [8785, 8786, 8903], [8786, 8904, 8903], [8786, 8787, 8905], [8786, 8905, 8904], [8787, 8788, 8905], [8788, 8906, 8905], [8788, 8789, 8907], [8788, 8907, 8906], [8789, 8790, 8907], [8790, 8908, 8907], [8790, 8791, 8909], [8790, 8909, 8908], [8791, 8792, 8909], [8792, 8910, 8909], [8792, 8793, 8911], [8792, 8911, 8910], [8793, 8794, 8911], [8794, 8912, 8911], [8794, 8795, 8913], [8794, 8913, 8912], [8795, 8796, 8913], [8796, 8914, 8913], [8796, 8797, 8915], [8796, 8915, 8914], [8797, 8798, 8915], [8798, 8916, 8915], [8798, 8799, 8917], [8798, 8917, 8916], [8800, 8801, 8919], [8800, 8919, 8918], [8801, 8802, 8919], [8802, 8920, 8919], [8802, 8803, 8921], [8802, 8921, 8920], [8803, 8804, 8921], [8804, 8922, 8921], [8804, 8805, 8923], [8804, 8923, 8922], [8805, 8806, 8923], [8806, 8924, 8923], [8806, 8807, 8925], [8806, 8925, 8924], [8807, 8808, 8925], [8808, 8926, 8925], [8808, 8809, 8927], [8808, 8927, 8926], [8809, 8810, 8927], [8810, 8928, 8927], [8810, 8811, 8929], [8810, 8929, 8928], [8811, 8812, 8929], [8812, 8930, 8929], [8812, 8813, 8931], [8812, 8931, 8930], [8813, 8814, 8931], [8814, 8932, 8931], [8814, 8815, 8933], [8814, 8933, 8932], [8815, 8816, 8933], [8816, 8934, 8933], [8816, 8817, 8935], [8816, 8935, 8934], [8817, 8818, 8935], [8818, 8936, 8935], [8818, 8819, 8937], [8818, 8937, 8936], [8819, 8820, 8937], [8820, 8938, 8937], [8820, 8821, 8939], [8820, 8939, 8938], [8821, 8822, 8939], [8822, 8940, 8939], [8822, 8823, 8941], [8822, 8941, 8940], [8823, 8824, 8941], [8824, 8942, 8941], [8824, 8825, 8943], [8824, 8943, 8942], [8825, 8826, 8943], [8826, 8944, 8943], [8826, 8827, 8945], [8826, 8945, 8944], [8827, 8828, 8945], [8828, 8946, 8945], [8828, 8829, 8947], [8828, 8947, 8946], [8829, 8830, 8947], [8830, 8948, 8947], [8830, 8831, 8949], [8830, 8949, 8948], [8831, 8832, 8949], [8832, 8950, 8949], [8832, 8833, 8951], [8832, 8951, 8950], [8833, 8834, 8951], [8834, 8952, 8951], [8834, 8835, 8953], [8834, 8953, 8952], [8835, 8836, 8953], [8836, 8954, 8953], [8836, 8837, 8955], [8836, 8955, 8954], [8837, 8838, 8955], [8838, 8956, 8955], [8838, 8839, 8957], [8838, 8957, 8956], [8839, 8840, 8957], [8840, 8958, 8957], [8840, 8841, 8959], [8840, 8959, 8958], [8841, 8842, 8959], [8842, 8960, 8959], [8842, 8843, 8961], [8842, 8961, 8960], [8843, 8844, 8961], [8844, 8962, 8961], [8844, 8845, 8963], [8844, 8963, 8962], [8845, 8846, 8963], [8846, 8964, 8963], [8846, 8847, 8965], [8846, 8965, 8964], [8847, 8848, 8965], [8848, 8966, 8965], [8848, 8849, 8967], [8848, 8967, 8966], [8849, 8850, 8967], [8850, 8968, 8967], [8850, 8851, 8969], [8850, 8969, 8968], [8851, 8852, 8969], [8852, 8970, 8969], [8852, 8853, 8971], [8852, 8971, 8970], [8853, 8854, 8971], [8854, 8972, 8971], [8854, 8855, 8973], [8854, 8973, 8972], [8855, 8856, 8973], [8856, 8974, 8973], [8856, 8857, 8975], [8856, 8975, 8974], [8857, 8858, 8975], [8858, 8976, 8975], [8858, 8859, 8977], [8858, 8977, 8976], [8859, 8860, 8977], [8860, 8978, 8977], [8860, 8861, 8979], [8860, 8979, 8978], [8861, 8862, 8979], [8862, 8980, 8979], [8862, 8863, 8981], [8862, 8981, 8980], [8863, 8864, 8981], [8864, 8982, 8981], [8864, 8865, 8983], [8864, 8983, 8982], [8865, 8866, 8983], [8866, 8984, 8983], [8866, 8867, 8985], [8866, 8985, 8984], [8867, 8868, 8985], [8868, 8986, 8985], [8868, 8869, 8987], [8868, 8987, 8986], [8869, 8870, 8987], [8870, 8988, 8987], [8870, 8871, 8989], [8870, 8989, 8988], [8871, 8872, 8989], [8872, 8990, 8989], [8872, 8873, 8991], [8872, 8991, 8990], [8873, 8874, 8991], [8874, 8992, 8991], [8874, 8875, 8993], [8874, 8993, 8992], [8875, 8876, 8993], [8876, 8994, 8993], [8876, 8877, 8995], [8876, 8995, 8994], [8877, 8878, 8995], [8878, 8996, 8995], [8878, 8879, 8997], [8878, 8997, 8996], [8879, 8880, 8997], [8880, 8998, 8997], [8880, 8881, 8999], [8880, 8999, 8998], [8881, 8882, 8999], [8882, 9000, 8999], [8882, 8883, 9001], [8882, 9001, 9000], [8883, 8884, 9001], [8884, 9002, 9001], [8884, 8885, 9003], [8884, 9003, 9002], [8885, 8886, 9003], [8886, 9004, 9003], [8886, 8887, 9005], [8886, 9005, 9004], [8887, 8888, 9005], [8888, 9006, 9005], [8888, 8889, 9007], [8888, 9007, 9006], [8889, 8890, 9007], [8890, 9008, 9007], [8890, 8891, 9009], [8890, 9009, 9008], [8891, 8892, 9009], [8892, 9010, 9009], [8892, 8893, 9011], [8892, 9011, 9010], [8893, 8894, 9011], [8894, 9012, 9011], [8894, 8895, 9013], [8894, 9013, 9012], [8895, 8896, 9013], [8896, 9014, 9013], [8896, 8897, 9015], [8896, 9015, 9014], [8897, 8898, 9015], [8898, 9016, 9015], [8899, 8900, 9018], [8899, 9018, 9017], [8900, 8901, 9018], [8901, 9019, 9018], [8901, 8902, 9020], [8901, 9020, 9019], [8902, 8903, 9020], [8903, 9021, 9020], [8903, 8904, 9022], [8903, 9022, 9021], [8904, 8905, 9022], [8905, 9023, 9022], [8905, 8906, 9024], [8905, 9024, 9023], [8906, 8907, 9024], [8907, 9025, 9024], [8907, 8908, 9026], [8907, 9026, 9025], [8908, 8909, 9026], [8909, 9027, 9026], [8909, 8910, 9028], [8909, 9028, 9027], [8910, 8911, 9028], [8911, 9029, 9028], [8911, 8912, 9030], [8911, 9030, 9029], [8912, 8913, 9030], [8913, 9031, 9030], [8913, 8914, 9032], [8913, 9032, 9031], [8914, 8915, 9032], [8915, 9033, 9032], [8915, 8916, 9034], [8915, 9034, 9033], [8916, 8917, 9034], [8917, 9035, 9034], [8918, 8919, 9036], [8919, 9037, 9036], [8919, 8920, 9038], [8919, 9038, 9037], [8920, 8921, 9038], [8921, 9039, 9038], [8921, 8922, 9040], [8921, 9040, 9039], [8922, 8923, 9040], [8923, 9041, 9040], [8923, 8924, 9042], [8923, 9042, 9041], [8924, 8925, 9042], [8925, 9043, 9042], [8925, 8926, 9044], [8925, 9044, 9043], [8926, 8927, 9044], [8927, 9045, 9044], [8927, 8928, 9046], [8927, 9046, 9045], [8928, 8929, 9046], [8929, 9047, 9046], [8929, 8930, 9048], [8929, 9048, 9047], [8930, 8931, 9048], [8931, 9049, 9048], [8931, 8932, 9050], [8931, 9050, 9049], [8932, 8933, 9050], [8933, 9051, 9050], [8933, 8934, 9052], [8933, 9052, 9051], [8934, 8935, 9052], [8935, 9053, 9052], [8935, 8936, 9054], [8935, 9054, 9053], [8936, 8937, 9054], [8937, 9055, 9054], [8937, 8938, 9056], [8937, 9056, 9055], [8938, 8939, 9056], [8939, 9057, 9056], [8939, 8940, 9058], [8939, 9058, 9057], [8940, 8941, 9058], [8941, 9059, 9058], [8941, 8942, 9060], [8941, 9060, 9059], [8942, 8943, 9060], [8943, 9061, 9060], [8943, 8944, 9062], [8943, 9062, 9061], [8944, 8945, 9062], [8945, 9063, 9062], [8945, 8946, 9064], [8945, 9064, 9063], [8946, 8947, 9064], [8947, 9065, 9064], [8947, 8948, 9066], [8947, 9066, 9065], [8948, 8949, 9066], [8949, 9067, 9066], [8949, 8950, 9068], [8949, 9068, 9067], [8950, 8951, 9068], [8951, 9069, 9068], [8951, 8952, 9070], [8951, 9070, 9069], [8952, 8953, 9070], [8953, 9071, 9070], [8953, 8954, 9072], [8953, 9072, 9071], [8954, 8955, 9072], [8955, 9073, 9072], [8955, 8956, 9074], [8955, 9074, 9073], [8956, 8957, 9074], [8957, 9075, 9074], [8957, 8958, 9076], [8957, 9076, 9075], [8958, 8959, 9076], [8959, 9077, 9076], [8959, 8960, 9078], [8959, 9078, 9077], [8960, 8961, 9078], [8961, 9079, 9078], [8961, 8962, 9080], [8961, 9080, 9079], [8962, 8963, 9080], [8963, 9081, 9080], [8963, 8964, 9082], [8963, 9082, 9081], [8964, 8965, 9082], [8965, 9083, 9082], [8965, 8966, 9084], [8965, 9084, 9083], [8966, 8967, 9084], [8967, 9085, 9084], [8967, 8968, 9086], [8967, 9086, 9085], [8968, 8969, 9086], [8969, 9087, 9086], [8969, 8970, 9088], [8969, 9088, 9087], [8970, 8971, 9088], [8971, 9089, 9088], [8971, 8972, 9090], [8971, 9090, 9089], [8972, 8973, 9090], [8973, 9091, 9090], [8973, 8974, 9092], [8973, 9092, 9091], [8974, 8975, 9092], [8975, 9093, 9092], [8975, 8976, 9094], [8975, 9094, 9093], [8976, 8977, 9094], [8977, 9095, 9094], [8977, 8978, 9096], [8977, 9096, 9095], [8978, 8979, 9096], [8979, 9097, 9096], [8979, 8980, 9098], [8979, 9098, 9097], [8980, 8981, 9098], [8981, 9099, 9098], [8981, 8982, 9100], [8981, 9100, 9099], [8982, 8983, 9100], [8983, 9101, 9100], [8983, 8984, 9102], [8983, 9102, 9101], [8984, 8985, 9102], [8985, 9103, 9102], [8985, 8986, 9104], [8985, 9104, 9103], [8986, 8987, 9104], [8987, 9105, 9104], [8987, 8988, 9106], [8987, 9106, 9105], [8988, 8989, 9106], [8989, 9107, 9106], [8989, 8990, 9108], [8989, 9108, 9107], [8990, 8991, 9108], [8991, 9109, 9108], [8991, 8992, 9110], [8991, 9110, 9109], [8992, 8993, 9110], [8993, 9111, 9110], [8993, 8994, 9112], [8993, 9112, 9111], [8994, 8995, 9112], [8995, 9113, 9112], [8995, 8996, 9114], [8995, 9114, 9113], [8996, 8997, 9114], [8997, 9115, 9114], [8997, 8998, 9116], [8997, 9116, 9115], [8998, 8999, 9116], [8999, 9117, 9116], [8999, 9000, 9118], [8999, 9118, 9117], [9000, 9001, 9118], [9001, 9119, 9118], [9001, 9002, 9120], [9001, 9120, 9119], [9002, 9003, 9120], [9003, 9121, 9120], [9003, 9004, 9122], [9003, 9122, 9121], [9004, 9005, 9122], [9005, 9123, 9122], [9005, 9006, 9124], [9005, 9124, 9123], [9006, 9007, 9124], [9007, 9125, 9124], [9007, 9008, 9126], [9007, 9126, 9125], [9008, 9009, 9126], [9009, 9127, 9126], [9009, 9010, 9128], [9009, 9128, 9127], [9010, 9011, 9128], [9011, 9129, 9128], [9011, 9012, 9130], [9011, 9130, 9129], [9012, 9013, 9130], [9013, 9131, 9130], [9013, 9014, 9132], [9013, 9132, 9131], [9014, 9015, 9132], [9015, 9133, 9132], [9015, 9016, 9134], [9015, 9134, 9133], [9017, 9018, 9135], [9018, 9136, 9135], [9018, 9019, 9137], [9018, 9137, 9136], [9019, 9020, 9137], [9020, 9138, 9137], [9020, 9021, 9139], [9020, 9139, 9138], [9021, 9022, 9139], [9022, 9140, 9139], [9022, 9023, 9141], [9022, 9141, 9140], [9023, 9024, 9141], [9024, 9142, 9141], [9024, 9025, 9143], [9024, 9143, 9142], [9025, 9026, 9143], [9026, 9144, 9143], [9026, 9027, 9145], [9026, 9145, 9144], [9027, 9028, 9145], [9028, 9146, 9145], [9028, 9029, 9147], [9028, 9147, 9146], [9029, 9030, 9147], [9030, 9148, 9147], [9030, 9031, 9149], [9030, 9149, 9148], [9031, 9032, 9149], [9032, 9150, 9149], [9032, 9033, 9151], [9032, 9151, 9150], [9033, 9034, 9151], [9034, 9152, 9151], [9034, 9035, 9153], [9034, 9153, 9152], [9036, 9037, 9155], [9036, 9155, 9154], [9037, 9038, 9155], [9038, 9156, 9155], [9038, 9039, 9157], [9038, 9157, 9156], [9039, 9040, 9157], [9040, 9158, 9157], [9040, 9041, 9159], [9040, 9159, 9158], [9041, 9042, 9159], [9042, 9160, 9159], [9042, 9043, 9161], [9042, 9161, 9160], [9043, 9044, 9161], [9044, 9162, 9161], [9044, 9045, 9163], [9044, 9163, 9162], [9045, 9046, 9163], [9046, 9164, 9163], [9046, 9047, 9165], [9046, 9165, 9164], [9047, 9048, 9165], [9048, 9166, 9165], [9048, 9049, 9167], [9048, 9167, 9166], [9049, 9050, 9167], [9050, 9168, 9167], [9050, 9051, 9169], [9050, 9169, 9168], [9051, 9052, 9169], [9052, 9170, 9169], [9052, 9053, 9171], [9052, 9171, 9170], [9053, 9054, 9171], [9054, 9172, 9171], [9054, 9055, 9173], [9054, 9173, 9172], [9055, 9056, 9173], [9056, 9174, 9173], [9056, 9057, 9175], [9056, 9175, 9174], [9057, 9058, 9175], [9058, 9176, 9175], [9058, 9059, 9177], [9058, 9177, 9176], [9059, 9060, 9177], [9060, 9178, 9177], [9060, 9061, 9179], [9060, 9179, 9178], [9061, 9062, 9179], [9062, 9180, 9179], [9062, 9063, 9181], [9062, 9181, 9180], [9063, 9064, 9181], [9064, 9182, 9181], [9064, 9065, 9183], [9064, 9183, 9182], [9065, 9066, 9183], [9066, 9184, 9183], [9066, 9067, 9185], [9066, 9185, 9184], [9067, 9068, 9185], [9068, 9186, 9185], [9068, 9069, 9187], [9068, 9187, 9186], [9069, 9070, 9187], [9070, 9188, 9187], [9070, 9071, 9189], [9070, 9189, 9188], [9071, 9072, 9189], [9072, 9190, 9189], [9072, 9073, 9191], [9072, 9191, 9190], [9073, 9074, 9191], [9074, 9192, 9191], [9074, 9075, 9193], [9074, 9193, 9192], [9075, 9076, 9193], [9076, 9194, 9193], [9076, 9077, 9195], [9076, 9195, 9194], [9077, 9078, 9195], [9078, 9196, 9195], [9078, 9079, 9197], [9078, 9197, 9196], [9079, 9080, 9197], [9080, 9198, 9197], [9080, 9081, 9199], [9080, 9199, 9198], [9081, 9082, 9199], [9082, 9200, 9199], [9082, 9083, 9201], [9082, 9201, 9200], [9083, 9084, 9201], [9084, 9202, 9201], [9084, 9085, 9203], [9084, 9203, 9202], [9085, 9086, 9203], [9086, 9204, 9203], [9086, 9087, 9205], [9086, 9205, 9204], [9087, 9088, 9205], [9088, 9206, 9205], [9088, 9089, 9207], [9088, 9207, 9206], [9089, 9090, 9207], [9090, 9208, 9207], [9090, 9091, 9209], [9090, 9209, 9208], [9091, 9092, 9209], [9092, 9210, 9209], [9092, 9093, 9211], [9092, 9211, 9210], [9093, 9094, 9211], [9094, 9212, 9211], [9094, 9095, 9213], [9094, 9213, 9212], [9095, 9096, 9213], [9096, 9214, 9213], [9096, 9097, 9215], [9096, 9215, 9214], [9097, 9098, 9215], [9098, 9216, 9215], [9098, 9099, 9217], [9098, 9217, 9216], [9099, 9100, 9217], [9100, 9218, 9217], [9100, 9101, 9219], [9100, 9219, 9218], [9101, 9102, 9219], [9102, 9220, 9219], [9102, 9103, 9221], [9102, 9221, 9220], [9103, 9104, 9221], [9104, 9222, 9221], [9104, 9105, 9223], [9104, 9223, 9222], [9105, 9106, 9223], [9106, 9224, 9223], [9106, 9107, 9225], [9106, 9225, 9224], [9107, 9108, 9225], [9108, 9226, 9225], [9108, 9109, 9227], [9108, 9227, 9226], [9109, 9110, 9227], [9110, 9228, 9227], [9110, 9111, 9229], [9110, 9229, 9228], [9111, 9112, 9229], [9112, 9230, 9229], [9112, 9113, 9231], [9112, 9231, 9230], [9113, 9114, 9231], [9114, 9232, 9231], [9114, 9115, 9233], [9114, 9233, 9232], [9115, 9116, 9233], [9116, 9234, 9233], [9116, 9117, 9235], [9116, 9235, 9234], [9117, 9118, 9235], [9118, 9236, 9235], [9118, 9119, 9237], [9118, 9237, 9236], [9119, 9120, 9237], [9120, 9238, 9237], [9120, 9121, 9239], [9120, 9239, 9238], [9121, 9122, 9239], [9122, 9240, 9239], [9122, 9123, 9241], [9122, 9241, 9240], [9123, 9124, 9241], [9124, 9242, 9241], [9124, 9125, 9243], [9124, 9243, 9242], [9125, 9126, 9243], [9126, 9244, 9243], [9126, 9127, 9245], [9126, 9245, 9244], [9127, 9128, 9245], [9128, 9246, 9245], [9128, 9129, 9247], [9128, 9247, 9246], [9129, 9130, 9247], [9130, 9248, 9247], [9130, 9131, 9249], [9130, 9249, 9248], [9131, 9132, 9249], [9132, 9250, 9249], [9132, 9133, 9251], [9132, 9251, 9250], [9133, 9134, 9251], [9134, 9252, 9251], [9135, 9136, 9254], [9135, 9254, 9253], [9136, 9137, 9254], [9137, 9255, 9254], [9137, 9138, 9256], [9137, 9256, 9255], [9138, 9139, 9256], [9139, 9257, 9256], [9139, 9140, 9258], [9139, 9258, 9257], [9140, 9141, 9258], [9141, 9259, 9258], [9141, 9142, 9260], [9141, 9260, 9259], [9142, 9143, 9260], [9143, 9261, 9260], [9143, 9144, 9262], [9143, 9262, 9261], [9144, 9145, 9262], [9145, 9263, 9262], [9145, 9146, 9264], [9145, 9264, 9263], [9146, 9147, 9264], [9147, 9265, 9264], [9147, 9148, 9266], [9147, 9266, 9265], [9148, 9149, 9266], [9149, 9267, 9266], [9149, 9150, 9268], [9149, 9268, 9267], [9150, 9151, 9268], [9151, 9269, 9268], [9151, 9152, 9270], [9151, 9270, 9269], [9152, 9153, 9270], [9153, 9271, 9270], [9154, 9155, 9272], [9155, 9273, 9272], [9155, 9156, 9274], [9155, 9274, 9273], [9156, 9157, 9274], [9157, 9275, 9274], [9157, 9158, 9276], [9157, 9276, 9275], [9158, 9159, 9276], [9159, 9277, 9276], [9159, 9160, 9278], [9159, 9278, 9277], [9160, 9161, 9278], [9161, 9279, 9278], [9161, 9162, 9280], [9161, 9280, 9279], [9162, 9163, 9280], [9163, 9281, 9280], [9163, 9164, 9282], [9163, 9282, 9281], [9164, 9165, 9282], [9165, 9283, 9282], [9165, 9166, 9284], [9165, 9284, 9283], [9166, 9167, 9284], [9167, 9285, 9284], [9167, 9168, 9286], [9167, 9286, 9285], [9168, 9169, 9286], [9169, 9287, 9286], [9169, 9170, 9288], [9169, 9288, 9287], [9170, 9171, 9288], [9171, 9289, 9288], [9171, 9172, 9290], [9171, 9290, 9289], [9172, 9173, 9290], [9173, 9291, 9290], [9173, 9174, 9292], [9173, 9292, 9291], [9174, 9175, 9292], [9175, 9293, 9292], [9175, 9176, 9294], [9175, 9294, 9293], [9176, 9177, 9294], [9177, 9295, 9294], [9177, 9178, 9296], [9177, 9296, 9295], [9178, 9179, 9296], [9179, 9297, 9296], [9179, 9180, 9298], [9179, 9298, 9297], [9180, 9181, 9298], [9181, 9299, 9298], [9181, 9182, 9300], [9181, 9300, 9299], [9182, 9183, 9300], [9183, 9301, 9300], [9183, 9184, 9302], [9183, 9302, 9301], [9184, 9185, 9302], [9185, 9303, 9302], [9185, 9186, 9304], [9185, 9304, 9303], [9186, 9187, 9304], [9187, 9305, 9304], [9187, 9188, 9306], [9187, 9306, 9305], [9188, 9189, 9306], [9189, 9307, 9306], [9189, 9190, 9308], [9189, 9308, 9307], [9190, 9191, 9308], [9191, 9309, 9308], [9191, 9192, 9310], [9191, 9310, 9309], [9192, 9193, 9310], [9193, 9311, 9310], [9193, 9194, 9312], [9193, 9312, 9311], [9194, 9195, 9312], [9195, 9313, 9312], [9195, 9196, 9314], [9195, 9314, 9313], [9196, 9197, 9314], [9197, 9315, 9314], [9197, 9198, 9316], [9197, 9316, 9315], [9198, 9199, 9316], [9199, 9317, 9316], [9199, 9200, 9318], [9199, 9318, 9317], [9200, 9201, 9318], [9201, 9319, 9318], [9201, 9202, 9320], [9201, 9320, 9319], [9202, 9203, 9320], [9203, 9321, 9320], [9203, 9204, 9322], [9203, 9322, 9321], [9204, 9205, 9322], [9205, 9323, 9322], [9205, 9206, 9324], [9205, 9324, 9323], [9206, 9207, 9324], [9207, 9325, 9324], [9207, 9208, 9326], [9207, 9326, 9325], [9208, 9209, 9326], [9209, 9327, 9326], [9209, 9210, 9328], [9209, 9328, 9327], [9210, 9211, 9328], [9211, 9329, 9328], [9211, 9212, 9330], [9211, 9330, 9329], [9212, 9213, 9330], [9213, 9331, 9330], [9213, 9214, 9332], [9213, 9332, 9331], [9214, 9215, 9332], [9215, 9333, 9332], [9215, 9216, 9334], [9215, 9334, 9333], [9216, 9217, 9334], [9217, 9335, 9334], [9217, 9218, 9336], [9217, 9336, 9335], [9218, 9219, 9336], [9219, 9337, 9336], [9219, 9220, 9338], [9219, 9338, 9337], [9220, 9221, 9338], [9221, 9339, 9338], [9221, 9222, 9340], [9221, 9340, 9339], [9222, 9223, 9340], [9223, 9341, 9340], [9223, 9224, 9342], [9223, 9342, 9341], [9224, 9225, 9342], [9225, 9343, 9342], [9225, 9226, 9344], [9225, 9344, 9343], [9226, 9227, 9344], [9227, 9345, 9344], [9227, 9228, 9346], [9227, 9346, 9345], [9228, 9229, 9346], [9229, 9347, 9346], [9229, 9230, 9348], [9229, 9348, 9347], [9230, 9231, 9348], [9231, 9349, 9348], [9231, 9232, 9350], [9231, 9350, 9349], [9232, 9233, 9350], [9233, 9351, 9350], [9233, 9234, 9352], [9233, 9352, 9351], [9234, 9235, 9352], [9235, 9353, 9352], [9235, 9236, 9354], [9235, 9354, 9353], [9236, 9237, 9354], [9237, 9355, 9354], [9237, 9238, 9356], [9237, 9356, 9355], [9238, 9239, 9356], [9239, 9357, 9356], [9239, 9240, 9358], [9239, 9358, 9357], [9240, 9241, 9358], [9241, 9359, 9358], [9241, 9242, 9360], [9241, 9360, 9359], [9242, 9243, 9360], [9243, 9361, 9360], [9243, 9244, 9362], [9243, 9362, 9361], [9244, 9245, 9362], [9245, 9363, 9362], [9245, 9246, 9364], [9245, 9364, 9363], [9246, 9247, 9364], [9247, 9365, 9364], [9247, 9248, 9366], [9247, 9366, 9365], [9248, 9249, 9366], [9249, 9367, 9366], [9249, 9250, 9368], [9249, 9368, 9367], [9250, 9251, 9368], [9251, 9369, 9368], [9251, 9252, 9370], [9251, 9370, 9369], [9253, 9254, 9371], [9254, 9372, 9371], [9254, 9255, 9373], [9254, 9373, 9372], [9255, 9256, 9373], [9256, 9374, 9373], [9256, 9257, 9375], [9256, 9375, 9374], [9257, 9258, 9375], [9258, 9376, 9375], [9258, 9259, 9377], [9258, 9377, 9376], [9259, 9260, 9377], [9260, 9378, 9377], [9260, 9261, 9379], [9260, 9379, 9378], [9261, 9262, 9379], [9262, 9380, 9379], [9262, 9263, 9381], [9262, 9381, 9380], [9263, 9264, 9381], [9264, 9382, 9381], [9264, 9265, 9383], [9264, 9383, 9382], [9265, 9266, 9383], [9266, 9384, 9383], [9266, 9267, 9385], [9266, 9385, 9384], [9267, 9268, 9385], [9268, 9386, 9385], [9268, 9269, 9387], [9268, 9387, 9386], [9269, 9270, 9387], [9270, 9388, 9387], [9270, 9271, 9389], [9270, 9389, 9388], [9272, 9273, 9391], [9272, 9391, 9390], [9273, 9274, 9391], [9274, 9392, 9391], [9274, 9275, 9393], [9274, 9393, 9392], [9275, 9276, 9393], [9276, 9394, 9393], [9276, 9277, 9395], [9276, 9395, 9394], [9277, 9278, 9395], [9278, 9396, 9395], [9278, 9279, 9397], [9278, 9397, 9396], [9279, 9280, 9397], [9280, 9398, 9397], [9280, 9281, 9399], [9280, 9399, 9398], [9281, 9282, 9399], [9282, 9400, 9399], [9282, 9283, 9401], [9282, 9401, 9400], [9283, 9284, 9401], [9284, 9402, 9401], [9284, 9285, 9403], [9284, 9403, 9402], [9285, 9286, 9403], [9286, 9404, 9403], [9286, 9287, 9405], [9286, 9405, 9404], [9287, 9288, 9405], [9288, 9406, 9405], [9288, 9289, 9407], [9288, 9407, 9406], [9289, 9290, 9407], [9290, 9408, 9407], [9290, 9291, 9409], [9290, 9409, 9408], [9291, 9292, 9409], [9292, 9410, 9409], [9292, 9293, 9411], [9292, 9411, 9410], [9293, 9294, 9411], [9294, 9412, 9411], [9294, 9295, 9413], [9294, 9413, 9412], [9295, 9296, 9413], [9296, 9414, 9413], [9296, 9297, 9415], [9296, 9415, 9414], [9297, 9298, 9415], [9298, 9416, 9415], [9298, 9299, 9417], [9298, 9417, 9416], [9299, 9300, 9417], [9300, 9418, 9417], [9300, 9301, 9419], [9300, 9419, 9418], [9301, 9302, 9419], [9302, 9420, 9419], [9302, 9303, 9421], [9302, 9421, 9420], [9303, 9304, 9421], [9304, 9422, 9421], [9304, 9305, 9423], [9304, 9423, 9422], [9305, 9306, 9423], [9306, 9424, 9423], [9306, 9307, 9425], [9306, 9425, 9424], [9307, 9308, 9425], [9308, 9426, 9425], [9308, 9309, 9427], [9308, 9427, 9426], [9309, 9310, 9427], [9310, 9428, 9427], [9310, 9311, 9429], [9310, 9429, 9428], [9311, 9312, 9429], [9312, 9430, 9429], [9312, 9313, 9431], [9312, 9431, 9430], [9313, 9314, 9431], [9314, 9432, 9431], [9314, 9315, 9433], [9314, 9433, 9432], [9315, 9316, 9433], [9316, 9434, 9433], [9316, 9317, 9435], [9316, 9435, 9434], [9317, 9318, 9435], [9318, 9436, 9435], [9318, 9319, 9437], [9318, 9437, 9436], [9319, 9320, 9437], [9320, 9438, 9437], [9320, 9321, 9439], [9320, 9439, 9438], [9321, 9322, 9439], [9322, 9440, 9439], [9322, 9323, 9441], [9322, 9441, 9440], [9323, 9324, 9441], [9324, 9442, 9441], [9324, 9325, 9443], [9324, 9443, 9442], [9325, 9326, 9443], [9326, 9444, 9443], [9326, 9327, 9445], [9326, 9445, 9444], [9327, 9328, 9445], [9328, 9446, 9445], [9328, 9329, 9447], [9328, 9447, 9446], [9329, 9330, 9447], [9330, 9448, 9447], [9330, 9331, 9449], [9330, 9449, 9448], [9331, 9332, 9449], [9332, 9450, 9449], [9332, 9333, 9451], [9332, 9451, 9450], [9333, 9334, 9451], [9334, 9452, 9451], [9334, 9335, 9453], [9334, 9453, 9452], [9335, 9336, 9453], [9336, 9454, 9453], [9336, 9337, 9455], [9336, 9455, 9454], [9337, 9338, 9455], [9338, 9456, 9455], [9338, 9339, 9457], [9338, 9457, 9456], [9339, 9340, 9457], [9340, 9458, 9457], [9340, 9341, 9459], [9340, 9459, 9458], [9341, 9342, 9459], [9342, 9460, 9459], [9342, 9343, 9461], [9342, 9461, 9460], [9343, 9344, 9461], [9344, 9462, 9461], [9344, 9345, 9463], [9344, 9463, 9462], [9345, 9346, 9463], [9346, 9464, 9463], [9346, 9347, 9465], [9346, 9465, 9464], [9347, 9348, 9465], [9348, 9466, 9465], [9348, 9349, 9467], [9348, 9467, 9466], [9349, 9350, 9467], [9350, 9468, 9467], [9350, 9351, 9469], [9350, 9469, 9468], [9351, 9352, 9469], [9352, 9470, 9469], [9352, 9353, 9471], [9352, 9471, 9470], [9353, 9354, 9471], [9354, 9472, 9471], [9354, 9355, 9473], [9354, 9473, 9472], [9355, 9356, 9473], [9356, 9474, 9473], [9356, 9357, 9475], [9356, 9475, 9474], [9357, 9358, 9475], [9358, 9476, 9475], [9358, 9359, 9477], [9358, 9477, 9476], [9359, 9360, 9477], [9360, 9478, 9477], [9360, 9361, 9479], [9360, 9479, 9478], [9361, 9362, 9479], [9362, 9480, 9479], [9362, 9363, 9481], [9362, 9481, 9480], [9363, 9364, 9481], [9364, 9482, 9481], [9364, 9365, 9483], [9364, 9483, 9482], [9365, 9366, 9483], [9366, 9484, 9483], [9366, 9367, 9485], [9366, 9485, 9484], [9367, 9368, 9485], [9368, 9486, 9485], [9368, 9369, 9487], [9368, 9487, 9486], [9369, 9370, 9487], [9370, 9488, 9487], [9371, 9372, 9490], [9371, 9490, 9489], [9372, 9373, 9490], [9373, 9491, 9490], [9373, 9374, 9492], [9373, 9492, 9491], [9374, 9375, 9492], [9375, 9493, 9492], [9375, 9376, 9494], [9375, 9494, 9493], [9376, 9377, 9494], [9377, 9495, 9494], [9377, 9378, 9496], [9377, 9496, 9495], [9378, 9379, 9496], [9379, 9497, 9496], [9379, 9380, 9498], [9379, 9498, 9497], [9380, 9381, 9498], [9381, 9499, 9498], [9381, 9382, 9500], [9381, 9500, 9499], [9382, 9383, 9500], [9383, 9501, 9500], [9383, 9384, 9502], [9383, 9502, 9501], [9384, 9385, 9502], [9385, 9503, 9502], [9385, 9386, 9504], [9385, 9504, 9503], [9386, 9387, 9504], [9387, 9505, 9504], [9387, 9388, 9506], [9387, 9506, 9505], [9388, 9389, 9506], [9389, 9507, 9506], [9390, 9391, 9508], [9391, 9509, 9508], [9391, 9392, 9510], [9391, 9510, 9509], [9392, 9393, 9510], [9393, 9511, 9510], [9393, 9394, 9512], [9393, 9512, 9511], [9394, 9395, 9512], [9395, 9513, 9512], [9395, 9396, 9514], [9395, 9514, 9513], [9396, 9397, 9514], [9397, 9515, 9514], [9397, 9398, 9516], [9397, 9516, 9515], [9398, 9399, 9516], [9399, 9517, 9516], [9399, 9400, 9518], [9399, 9518, 9517], [9400, 9401, 9518], [9401, 9519, 9518], [9401, 9402, 9520], [9401, 9520, 9519], [9402, 9403, 9520], [9403, 9521, 9520], [9403, 9404, 9522], [9403, 9522, 9521], [9404, 9405, 9522], [9405, 9523, 9522], [9405, 9406, 9524], [9405, 9524, 9523], [9406, 9407, 9524], [9407, 9525, 9524], [9407, 9408, 9526], [9407, 9526, 9525], [9408, 9409, 9526], [9409, 9527, 9526], [9409, 9410, 9528], [9409, 9528, 9527], [9410, 9411, 9528], [9411, 9529, 9528], [9411, 9412, 9530], [9411, 9530, 9529], [9412, 9413, 9530], [9413, 9531, 9530], [9413, 9414, 9532], [9413, 9532, 9531], [9414, 9415, 9532], [9415, 9533, 9532], [9415, 9416, 9534], [9415, 9534, 9533], [9416, 9417, 9534], [9417, 9535, 9534], [9417, 9418, 9536], [9417, 9536, 9535], [9418, 9419, 9536], [9419, 9537, 9536], [9419, 9420, 9538], [9419, 9538, 9537], [9420, 9421, 9538], [9421, 9539, 9538], [9421, 9422, 9540], [9421, 9540, 9539], [9422, 9423, 9540], [9423, 9541, 9540], [9423, 9424, 9542], [9423, 9542, 9541], [9424, 9425, 9542], [9425, 9543, 9542], [9425, 9426, 9544], [9425, 9544, 9543], [9426, 9427, 9544], [9427, 9545, 9544], [9427, 9428, 9546], [9427, 9546, 9545], [9428, 9429, 9546], [9429, 9547, 9546], [9429, 9430, 9548], [9429, 9548, 9547], [9430, 9431, 9548], [9431, 9549, 9548], [9431, 9432, 9550], [9431, 9550, 9549], [9432, 9433, 9550], [9433, 9551, 9550], [9433, 9434, 9552], [9433, 9552, 9551], [9434, 9435, 9552], [9435, 9553, 9552], [9435, 9436, 9554], [9435, 9554, 9553], [9436, 9437, 9554], [9437, 9555, 9554], [9437, 9438, 9556], [9437, 9556, 9555], [9438, 9439, 9556], [9439, 9557, 9556], [9439, 9440, 9558], [9439, 9558, 9557], [9440, 9441, 9558], [9441, 9559, 9558], [9441, 9442, 9560], [9441, 9560, 9559], [9442, 9443, 9560], [9443, 9561, 9560], [9443, 9444, 9562], [9443, 9562, 9561], [9444, 9445, 9562], [9445, 9563, 9562], [9445, 9446, 9564], [9445, 9564, 9563], [9446, 9447, 9564], [9447, 9565, 9564], [9447, 9448, 9566], [9447, 9566, 9565], [9448, 9449, 9566], [9449, 9567, 9566], [9449, 9450, 9568], [9449, 9568, 9567], [9450, 9451, 9568], [9451, 9569, 9568], [9451, 9452, 9570], [9451, 9570, 9569], [9452, 9453, 9570], [9453, 9571, 9570], [9453, 9454, 9572], [9453, 9572, 9571], [9454, 9455, 9572], [9455, 9573, 9572], [9455, 9456, 9574], [9455, 9574, 9573], [9456, 9457, 9574], [9457, 9575, 9574], [9457, 9458, 9576], [9457, 9576, 9575], [9458, 9459, 9576], [9459, 9577, 9576], [9459, 9460, 9578], [9459, 9578, 9577], [9460, 9461, 9578], [9461, 9579, 9578], [9461, 9462, 9580], [9461, 9580, 9579], [9462, 9463, 9580], [9463, 9581, 9580], [9463, 9464, 9582], [9463, 9582, 9581], [9464, 9465, 9582], [9465, 9583, 9582], [9465, 9466, 9584], [9465, 9584, 9583], [9466, 9467, 9584], [9467, 9585, 9584], [9467, 9468, 9586], [9467, 9586, 9585], [9468, 9469, 9586], [9469, 9587, 9586], [9469, 9470, 9588], [9469, 9588, 9587], [9470, 9471, 9588], [9471, 9589, 9588], [9471, 9472, 9590], [9471, 9590, 9589], [9472, 9473, 9590], [9473, 9591, 9590], [9473, 9474, 9592], [9473, 9592, 9591], [9474, 9475, 9592], [9475, 9593, 9592], [9475, 9476, 9594], [9475, 9594, 9593], [9476, 9477, 9594], [9477, 9595, 9594], [9477, 9478, 9596], [9477, 9596, 9595], [9478, 9479, 9596], [9479, 9597, 9596], [9479, 9480, 9598], [9479, 9598, 9597], [9480, 9481, 9598], [9481, 9599, 9598], [9481, 9482, 9600], [9481, 9600, 9599], [9482, 9483, 9600], [9483, 9601, 9600], [9483, 9484, 9602], [9483, 9602, 9601], [9484, 9485, 9602], [9485, 9603, 9602], [9485, 9486, 9604], [9485, 9604, 9603], [9486, 9487, 9604], [9487, 9605, 9604], [9487, 9488, 9606], [9487, 9606, 9605], [9489, 9490, 9607], [9490, 9608, 9607], [9490, 9491, 9609], [9490, 9609, 9608], [9491, 9492, 9609], [9492, 9610, 9609], [9492, 9493, 9611], [9492, 9611, 9610], [9493, 9494, 9611], [9494, 9612, 9611], [9494, 9495, 9613], [9494, 9613, 9612], [9495, 9496, 9613], [9496, 9614, 9613], [9496, 9497, 9615], [9496, 9615, 9614], [9497, 9498, 9615], [9498, 9616, 9615], [9498, 9499, 9617], [9498, 9617, 9616], [9499, 9500, 9617], [9500, 9618, 9617], [9500, 9501, 9619], [9500, 9619, 9618], [9501, 9502, 9619], [9502, 9620, 9619], [9502, 9503, 9621], [9502, 9621, 9620], [9503, 9504, 9621], [9504, 9622, 9621], [9504, 9505, 9623], [9504, 9623, 9622], [9505, 9506, 9623], [9506, 9624, 9623], [9506, 9507, 9625], [9506, 9625, 9624], [9508, 9509, 9627], [9508, 9627, 9626], [9509, 9510, 9627], [9510, 9628, 9627], [9510, 9511, 9629], [9510, 9629, 9628], [9511, 9512, 9629], [9512, 9630, 9629], [9512, 9513, 9631], [9512, 9631, 9630], [9513, 9514, 9631], [9514, 9632, 9631], [9514, 9515, 9633], [9514, 9633, 9632], [9515, 9516, 9633], [9516, 9634, 9633], [9516, 9517, 9635], [9516, 9635, 9634], [9517, 9518, 9635], [9518, 9636, 9635], [9518, 9519, 9637], [9518, 9637, 9636], [9519, 9520, 9637], [9520, 9638, 9637], [9520, 9521, 9639], [9520, 9639, 9638], [9521, 9522, 9639], [9522, 9640, 9639], [9522, 9523, 9641], [9522, 9641, 9640], [9523, 9524, 9641], [9524, 9642, 9641], [9524, 9525, 9643], [9524, 9643, 9642], [9525, 9526, 9643], [9526, 9644, 9643], [9526, 9527, 9645], [9526, 9645, 9644], [9527, 9528, 9645], [9528, 9646, 9645], [9528, 9529, 9647], [9528, 9647, 9646], [9529, 9530, 9647], [9530, 9648, 9647], [9530, 9531, 9649], [9530, 9649, 9648], [9531, 9532, 9649], [9532, 9650, 9649], [9532, 9533, 9651], [9532, 9651, 9650], [9533, 9534, 9651], [9534, 9652, 9651], [9534, 9535, 9653], [9534, 9653, 9652], [9535, 9536, 9653], [9536, 9654, 9653], [9536, 9537, 9655], [9536, 9655, 9654], [9537, 9538, 9655], [9538, 9656, 9655], [9538, 9539, 9657], [9538, 9657, 9656], [9539, 9540, 9657], [9540, 9658, 9657], [9540, 9541, 9659], [9540, 9659, 9658], [9541, 9542, 9659], [9542, 9660, 9659], [9542, 9543, 9661], [9542, 9661, 9660], [9543, 9544, 9661], [9544, 9662, 9661], [9544, 9545, 9663], [9544, 9663, 9662], [9545, 9546, 9663], [9546, 9664, 9663], [9546, 9547, 9665], [9546, 9665, 9664], [9547, 9548, 9665], [9548, 9666, 9665], [9548, 9549, 9667], [9548, 9667, 9666], [9549, 9550, 9667], [9550, 9668, 9667], [9550, 9551, 9669], [9550, 9669, 9668], [9551, 9552, 9669], [9552, 9670, 9669], [9552, 9553, 9671], [9552, 9671, 9670], [9553, 9554, 9671], [9554, 9672, 9671], [9554, 9555, 9673], [9554, 9673, 9672], [9555, 9556, 9673], [9556, 9674, 9673], [9556, 9557, 9675], [9556, 9675, 9674], [9557, 9558, 9675], [9558, 9676, 9675], [9558, 9559, 9677], [9558, 9677, 9676], [9559, 9560, 9677], [9560, 9678, 9677], [9560, 9561, 9679], [9560, 9679, 9678], [9561, 9562, 9679], [9562, 9680, 9679], [9562, 9563, 9681], [9562, 9681, 9680], [9563, 9564, 9681], [9564, 9682, 9681], [9564, 9565, 9683], [9564, 9683, 9682], [9565, 9566, 9683], [9566, 9684, 9683], [9566, 9567, 9685], [9566, 9685, 9684], [9567, 9568, 9685], [9568, 9686, 9685], [9568, 9569, 9687], [9568, 9687, 9686], [9569, 9570, 9687], [9570, 9688, 9687], [9570, 9571, 9689], [9570, 9689, 9688], [9571, 9572, 9689], [9572, 9690, 9689], [9572, 9573, 9691], [9572, 9691, 9690], [9573, 9574, 9691], [9574, 9692, 9691], [9574, 9575, 9693], [9574, 9693, 9692], [9575, 9576, 9693], [9576, 9694, 9693], [9576, 9577, 9695], [9576, 9695, 9694], [9577, 9578, 9695], [9578, 9696, 9695], [9578, 9579, 9697], [9578, 9697, 9696], [9579, 9580, 9697], [9580, 9698, 9697], [9580, 9581, 9699], [9580, 9699, 9698], [9581, 9582, 9699], [9582, 9700, 9699], [9582, 9583, 9701], [9582, 9701, 9700], [9583, 9584, 9701], [9584, 9702, 9701], [9584, 9585, 9703], [9584, 9703, 9702], [9585, 9586, 9703], [9586, 9704, 9703], [9586, 9587, 9705], [9586, 9705, 9704], [9587, 9588, 9705], [9588, 9706, 9705], [9588, 9589, 9707], [9588, 9707, 9706], [9589, 9590, 9707], [9590, 9708, 9707], [9590, 9591, 9709], [9590, 9709, 9708], [9591, 9592, 9709], [9592, 9710, 9709], [9592, 9593, 9711], [9592, 9711, 9710], [9593, 9594, 9711], [9594, 9712, 9711], [9594, 9595, 9713], [9594, 9713, 9712], [9595, 9596, 9713], [9596, 9714, 9713], [9596, 9597, 9715], [9596, 9715, 9714], [9597, 9598, 9715], [9598, 9716, 9715], [9598, 9599, 9717], [9598, 9717, 9716], [9599, 9600, 9717], [9600, 9718, 9717], [9600, 9601, 9719], [9600, 9719, 9718], [9601, 9602, 9719], [9602, 9720, 9719], [9602, 9603, 9721], [9602, 9721, 9720], [9603, 9604, 9721], [9604, 9722, 9721], [9604, 9605, 9723], [9604, 9723, 9722], [9605, 9606, 9723], [9606, 9724, 9723], [9607, 9608, 9726], [9607, 9726, 9725], [9608, 9609, 9726], [9609, 9727, 9726], [9609, 9610, 9728], [9609, 9728, 9727], [9610, 9611, 9728], [9611, 9729, 9728], [9611, 9612, 9730], [9611, 9730, 9729], [9612, 9613, 9730], [9613, 9731, 9730], [9613, 9614, 9732], [9613, 9732, 9731], [9614, 9615, 9732], [9615, 9733, 9732], [9615, 9616, 9734], [9615, 9734, 9733], [9616, 9617, 9734], [9617, 9735, 9734], [9617, 9618, 9736], [9617, 9736, 9735], [9618, 9619, 9736], [9619, 9737, 9736], [9619, 9620, 9738], [9619, 9738, 9737], [9620, 9621, 9738], [9621, 9739, 9738], [9621, 9622, 9740], [9621, 9740, 9739], [9622, 9623, 9740], [9623, 9741, 9740], [9623, 9624, 9742], [9623, 9742, 9741], [9624, 9625, 9742], [9625, 9743, 9742], [9626, 9627, 9744], [9627, 9745, 9744], [9627, 9628, 9746], [9627, 9746, 9745], [9628, 9629, 9746], [9629, 9747, 9746], [9629, 9630, 9748], [9629, 9748, 9747], [9630, 9631, 9748], [9631, 9749, 9748], [9631, 9632, 9750], [9631, 9750, 9749], [9632, 9633, 9750], [9633, 9751, 9750], [9633, 9634, 9752], [9633, 9752, 9751], [9634, 9635, 9752], [9635, 9753, 9752], [9635, 9636, 9754], [9635, 9754, 9753], [9636, 9637, 9754], [9637, 9755, 9754], [9637, 9638, 9756], [9637, 9756, 9755], [9638, 9639, 9756], [9639, 9757, 9756], [9639, 9640, 9758], [9639, 9758, 9757], [9640, 9641, 9758], [9641, 9759, 9758], [9641, 9642, 9760], [9641, 9760, 9759], [9642, 9643, 9760], [9643, 9761, 9760], [9643, 9644, 9762], [9643, 9762, 9761], [9644, 9645, 9762], [9645, 9763, 9762], [9645, 9646, 9764], [9645, 9764, 9763], [9646, 9647, 9764], [9647, 9765, 9764], [9647, 9648, 9766], [9647, 9766, 9765], [9648, 9649, 9766], [9649, 9767, 9766], [9649, 9650, 9768], [9649, 9768, 9767], [9650, 9651, 9768], [9651, 9769, 9768], [9651, 9652, 9770], [9651, 9770, 9769], [9652, 9653, 9770], [9653, 9771, 9770], [9653, 9654, 9772], [9653, 9772, 9771], [9654, 9655, 9772], [9655, 9773, 9772], [9655, 9656, 9774], [9655, 9774, 9773], [9656, 9657, 9774], [9657, 9775, 9774], [9657, 9658, 9776], [9657, 9776, 9775], [9658, 9659, 9776], [9659, 9777, 9776], [9659, 9660, 9778], [9659, 9778, 9777], [9660, 9661, 9778], [9661, 9779, 9778], [9661, 9662, 9780], [9661, 9780, 9779], [9662, 9663, 9780], [9663, 9781, 9780], [9663, 9664, 9782], [9663, 9782, 9781], [9664, 9665, 9782], [9665, 9783, 9782], [9665, 9666, 9784], [9665, 9784, 9783], [9666, 9667, 9784], [9667, 9785, 9784], [9667, 9668, 9786], [9667, 9786, 9785], [9668, 9669, 9786], [9669, 9787, 9786], [9669, 9670, 9788], [9669, 9788, 9787], [9670, 9671, 9788], [9671, 9789, 9788], [9671, 9672, 9790], [9671, 9790, 9789], [9672, 9673, 9790], [9673, 9791, 9790], [9673, 9674, 9792], [9673, 9792, 9791], [9674, 9675, 9792], [9675, 9793, 9792], [9675, 9676, 9794], [9675, 9794, 9793], [9676, 9677, 9794], [9677, 9795, 9794], [9677, 9678, 9796], [9677, 9796, 9795], [9678, 9679, 9796], [9679, 9797, 9796], [9679, 9680, 9798], [9679, 9798, 9797], [9680, 9681, 9798], [9681, 9799, 9798], [9681, 9682, 9800], [9681, 9800, 9799], [9682, 9683, 9800], [9683, 9801, 9800], [9683, 9684, 9802], [9683, 9802, 9801], [9684, 9685, 9802], [9685, 9803, 9802], [9685, 9686, 9804], [9685, 9804, 9803], [9686, 9687, 9804], [9687, 9805, 9804], [9687, 9688, 9806], [9687, 9806, 9805], [9688, 9689, 9806], [9689, 9807, 9806], [9689, 9690, 9808], [9689, 9808, 9807], [9690, 9691, 9808], [9691, 9809, 9808], [9691, 9692, 9810], [9691, 9810, 9809], [9692, 9693, 9810], [9693, 9811, 9810], [9693, 9694, 9812], [9693, 9812, 9811], [9694, 9695, 9812], [9695, 9813, 9812], [9695, 9696, 9814], [9695, 9814, 9813], [9696, 9697, 9814], [9697, 9815, 9814], [9697, 9698, 9816], [9697, 9816, 9815], [9698, 9699, 9816], [9699, 9817, 9816], [9699, 9700, 9818], [9699, 9818, 9817], [9700, 9701, 9818], [9701, 9819, 9818], [9701, 9702, 9820], [9701, 9820, 9819], [9702, 9703, 9820], [9703, 9821, 9820], [9703, 9704, 9822], [9703, 9822, 9821], [9704, 9705, 9822], [9705, 9823, 9822], [9705, 9706, 9824], [9705, 9824, 9823], [9706, 9707, 9824], [9707, 9825, 9824], [9707, 9708, 9826], [9707, 9826, 9825], [9708, 9709, 9826], [9709, 9827, 9826], [9709, 9710, 9828], [9709, 9828, 9827], [9710, 9711, 9828], [9711, 9829, 9828], [9711, 9712, 9830], [9711, 9830, 9829], [9712, 9713, 9830], [9713, 9831, 9830], [9713, 9714, 9832], [9713, 9832, 9831], [9714, 9715, 9832], [9715, 9833, 9832], [9715, 9716, 9834], [9715, 9834, 9833], [9716, 9717, 9834], [9717, 9835, 9834], [9717, 9718, 9836], [9717, 9836, 9835], [9718, 9719, 9836], [9719, 9837, 9836], [9719, 9720, 9838], [9719, 9838, 9837], [9720, 9721, 9838], [9721, 9839, 9838], [9721, 9722, 9840], [9721, 9840, 9839], [9722, 9723, 9840], [9723, 9841, 9840], [9723, 9724, 9842], [9723, 9842, 9841], [9725, 9726, 9845], [9726, 9846, 9845], [9726, 9727, 9847], [9726, 9847, 9846], [9727, 9728, 9847], [9728, 9848, 9847], [9728, 9729, 9849], [9728, 9849, 9848], [9729, 9730, 9849], [9730, 9850, 9849], [9730, 9731, 9851], [9730, 9851, 9850], [9731, 9732, 9851], [9732, 9852, 9851], [9732, 9733, 9853], [9732, 9853, 9852], [9733, 9734, 9853], [9734, 9854, 9853], [9734, 9735, 9855], [9734, 9855, 9854], [9735, 9736, 9855], [9736, 9856, 9855], [9736, 9737, 9857], [9736, 9857, 9856], [9737, 9738, 9857], [9738, 9858, 9857], [9738, 9739, 9859], [9738, 9859, 9858], [9739, 9740, 9859], [9740, 9860, 9859], [9740, 9741, 9861], [9740, 9861, 9860], [9741, 9742, 9861], [9742, 9862, 9861], [9742, 9743, 9863], [9742, 9863, 9862], [9744, 9745, 9865], [9744, 9865, 9864], [9745, 9746, 9865], [9746, 9866, 9865], [9746, 9747, 9867], [9746, 9867, 9866], [9747, 9748, 9867], [9748, 9868, 9867], [9748, 9749, 9869], [9748, 9869, 9868], [9749, 9750, 9869], [9750, 9870, 9869], [9750, 9751, 9871], [9750, 9871, 9870], [9751, 9752, 9871], [9752, 9872, 9871], [9752, 9753, 9873], [9752, 9873, 9872], [9753, 9754, 9873], [9754, 9874, 9873], [9754, 9755, 9875], [9754, 9875, 9874], [9755, 9756, 9875], [9756, 9876, 9875], [9756, 9757, 9877], [9756, 9877, 9876], [9757, 9758, 9877], [9758, 9878, 9877], [9758, 9759, 9879], [9758, 9879, 9878], [9759, 9760, 9879], [9760, 9880, 9879], [9760, 9761, 9881], [9760, 9881, 9880], [9761, 9762, 9881], [9762, 9882, 9881], [9762, 9763, 9883], [9762, 9883, 9882], [9763, 9764, 9883], [9764, 9884, 9883], [9764, 9765, 9885], [9764, 9885, 9884], [9765, 9766, 9885], [9766, 9886, 9885], [9766, 9767, 9887], [9766, 9887, 9886], [9767, 9768, 9887], [9768, 9888, 9887], [9768, 9769, 9889], [9768, 9889, 9888], [9769, 9770, 9889], [9770, 9890, 9889], [9770, 9771, 9891], [9770, 9891, 9890], [9771, 9772, 9891], [9772, 9892, 9891], [9772, 9773, 9893], [9772, 9893, 9892], [9773, 9774, 9893], [9774, 9894, 9893], [9774, 9775, 9895], [9774, 9895, 9894], [9775, 9776, 9895], [9776, 9896, 9895], [9776, 9777, 9897], [9776, 9897, 9896], [9777, 9778, 9897], [9778, 9898, 9897], [9778, 9779, 9899], [9778, 9899, 9898], [9779, 9780, 9899], [9780, 9900, 9899], [9780, 9781, 9901], [9780, 9901, 9900], [9781, 9782, 9901], [9782, 9902, 9901], [9782, 9783, 9903], [9782, 9903, 9902], [9783, 9784, 9903], [9784, 9904, 9903], [9784, 9785, 9905], [9784, 9905, 9904], [9785, 9786, 9905], [9786, 9906, 9905], [9786, 9787, 9907], [9786, 9907, 9906], [9787, 9788, 9907], [9788, 9908, 9907], [9788, 9789, 9909], [9788, 9909, 9908], [9789, 9790, 9909], [9790, 9910, 9909], [9790, 9791, 9911], [9790, 9911, 9910], [9791, 9792, 9911], [9792, 9912, 9911], [9792, 9793, 9913], [9792, 9913, 9912], [9793, 9794, 9913], [9794, 9914, 9913], [9794, 9795, 9915], [9794, 9915, 9914], [9795, 9796, 9915], [9796, 9916, 9915], [9796, 9797, 9917], [9796, 9917, 9916], [9797, 9798, 9917], [9798, 9918, 9917], [9798, 9799, 9919], [9798, 9919, 9918], [9799, 9800, 9919], [9800, 9920, 9919], [9800, 9801, 9921], [9800, 9921, 9920], [9801, 9802, 9921], [9802, 9922, 9921], [9802, 9803, 9923], [9802, 9923, 9922], [9803, 9804, 9923], [9804, 9924, 9923], [9804, 9805, 9925], [9804, 9925, 9924], [9805, 9806, 9925], [9806, 9926, 9925], [9806, 9807, 9927], [9806, 9927, 9926], [9807, 9808, 9927], [9808, 9928, 9927], [9808, 9809, 9929], [9808, 9929, 9928], [9809, 9810, 9929], [9810, 9930, 9929], [9810, 9811, 9931], [9810, 9931, 9930], [9811, 9812, 9931], [9812, 9932, 9931], [9812, 9813, 9933], [9812, 9933, 9932], [9813, 9814, 9933], [9814, 9934, 9933], [9814, 9815, 9935], [9814, 9935, 9934], [9815, 9816, 9935], [9816, 9936, 9935], [9816, 9817, 9937], [9816, 9937, 9936], [9817, 9818, 9937], [9818, 9938, 9937], [9818, 9819, 9939], [9818, 9939, 9938], [9819, 9820, 9939], [9820, 9940, 9939], [9820, 9821, 9941], [9820, 9941, 9940], [9821, 9822, 9941], [9822, 9942, 9941], [9822, 9823, 9943], [9822, 9943, 9942], [9823, 9824, 9943], [9824, 9944, 9943], [9824, 9825, 9945], [9824, 9945, 9944], [9825, 9826, 9945], [9826, 9946, 9945], [9826, 9827, 9947], [9826, 9947, 9946], [9827, 9828, 9947], [9828, 9948, 9947], [9828, 9829, 9949], [9828, 9949, 9948], [9829, 9830, 9949], [9830, 9950, 9949], [9830, 9831, 9951], [9830, 9951, 9950], [9831, 9832, 9951], [9832, 9952, 9951], [9832, 9833, 9953], [9832, 9953, 9952], [9833, 9834, 9953], [9834, 9954, 9953], [9834, 9835, 9955], [9834, 9955, 9954], [9835, 9836, 9955], [9836, 9956, 9955], [9836, 9837, 9957], [9836, 9957, 9956], [9837, 9838, 9957], [9838, 9958, 9957], [9838, 9839, 9959], [9838, 9959, 9958], [9839, 9840, 9959], [9840, 9960, 9959], [9840, 9841, 9961], [9840, 9961, 9960], [9841, 9842, 9961], [9842, 9962, 9961], [9843, 9964, 9963], [9843, 9844, 9965], [9843, 9965, 9964], [9844, 9845, 9965], [9845, 9966, 9965], [9845, 9846, 9967], [9845, 9967, 9966], [9846, 9847, 9967], [9847, 9968, 9967], [9847, 9848, 9969], [9847, 9969, 9968], [9848, 9849, 9969], [9849, 9970, 9969], [9849, 9850, 9971], [9849, 9971, 9970], [9850, 9851, 9971], [9851, 9972, 9971], [9851, 9852, 9973], [9851, 9973, 9972], [9852, 9853, 9973], [9853, 9974, 9973], [9853, 9854, 9975], [9853, 9975, 9974], [9854, 9855, 9975], [9855, 9976, 9975], [9855, 9856, 9977], [9855, 9977, 9976], [9856, 9857, 9977], [9857, 9978, 9977], [9857, 9858, 9979], [9857, 9979, 9978], [9858, 9859, 9979], [9859, 9980, 9979], [9859, 9860, 9981], [9859, 9981, 9980], [9860, 9861, 9981], [9861, 9982, 9981], [9861, 9862, 9983], [9861, 9983, 9982], [9862, 9863, 9983], [9863, 9984, 9983], [9864, 9865, 9985], [9865, 9986, 9985], [9865, 9866, 9987], [9865, 9987, 9986], [9866, 9867, 9987], [9867, 9988, 9987], [9867, 9868, 9989], [9867, 9989, 9988], [9868, 9869, 9989], [9869, 9990, 9989], [9869, 9870, 9991], [9869, 9991, 9990], [9870, 9871, 9991], [9871, 9992, 9991], [9871, 9872, 9993], [9871, 9993, 9992], [9872, 9873, 9993], [9873, 9994, 9993], [9873, 9874, 9995], [9873, 9995, 9994], [9874, 9875, 9995], [9875, 9996, 9995], [9875, 9876, 9997], [9875, 9997, 9996], [9876, 9877, 9997], [9877, 9998, 9997], [9877, 9878, 9999], [9877, 9999, 9998], [9878, 9879, 9999], [9879, 10000, 9999], [9879, 9880, 10001], [9879, 10001, 10000], [9880, 9881, 10001], [9881, 10002, 10001], [9881, 9882, 10003], [9881, 10003, 10002], [9882, 9883, 10003], [9883, 10004, 10003], [9883, 9884, 10005], [9883, 10005, 10004], [9884, 9885, 10005], [9885, 10006, 10005], [9885, 9886, 10007], [9885, 10007, 10006], [9886, 9887, 10007], [9887, 10008, 10007], [9887, 9888, 10009], [9887, 10009, 10008], [9888, 9889, 10009], [9889, 10010, 10009], [9889, 9890, 10011], [9889, 10011, 10010], [9890, 9891, 10011], [9891, 10012, 10011], [9891, 9892, 10013], [9891, 10013, 10012], [9892, 9893, 10013], [9893, 10014, 10013], [9893, 9894, 10015], [9893, 10015, 10014], [9894, 9895, 10015], [9895, 10016, 10015], [9895, 9896, 10017], [9895, 10017, 10016], [9896, 9897, 10017], [9897, 10018, 10017], [9897, 9898, 10019], [9897, 10019, 10018], [9898, 9899, 10019], [9899, 10020, 10019], [9899, 9900, 10021], [9899, 10021, 10020], [9900, 9901, 10021], [9901, 10022, 10021], [9901, 9902, 10023], [9901, 10023, 10022], [9902, 9903, 10023], [9903, 10024, 10023], [9903, 9904, 10025], [9903, 10025, 10024], [9904, 9905, 10025], [9905, 10026, 10025], [9905, 9906, 10027], [9905, 10027, 10026], [9906, 9907, 10027], [9907, 10028, 10027], [9907, 9908, 10029], [9907, 10029, 10028], [9908, 9909, 10029], [9909, 10030, 10029], [9909, 9910, 10031], [9909, 10031, 10030], [9910, 9911, 10031], [9911, 10032, 10031], [9911, 9912, 10033], [9911, 10033, 10032], [9912, 9913, 10033], [9913, 10034, 10033], [9913, 9914, 10035], [9913, 10035, 10034], [9914, 9915, 10035], [9915, 10036, 10035], [9915, 9916, 10037], [9915, 10037, 10036], [9916, 9917, 10037], [9917, 10038, 10037], [9917, 9918, 10039], [9917, 10039, 10038], [9918, 9919, 10039], [9919, 10040, 10039], [9919, 9920, 10041], [9919, 10041, 10040], [9920, 9921, 10041], [9921, 10042, 10041], [9921, 9922, 10043], [9921, 10043, 10042], [9922, 9923, 10043], [9923, 10044, 10043], [9923, 9924, 10045], [9923, 10045, 10044], [9924, 9925, 10045], [9925, 10046, 10045], [9925, 9926, 10047], [9925, 10047, 10046], [9926, 9927, 10047], [9927, 10048, 10047], [9927, 9928, 10049], [9927, 10049, 10048], [9928, 9929, 10049], [9929, 10050, 10049], [9929, 9930, 10051], [9929, 10051, 10050], [9930, 9931, 10051], [9931, 10052, 10051], [9931, 9932, 10053], [9931, 10053, 10052], [9932, 9933, 10053], [9933, 10054, 10053], [9933, 9934, 10055], [9933, 10055, 10054], [9934, 9935, 10055], [9935, 10056, 10055], [9935, 9936, 10057], [9935, 10057, 10056], [9936, 9937, 10057], [9937, 10058, 10057], [9937, 9938, 10059], [9937, 10059, 10058], [9938, 9939, 10059], [9939, 10060, 10059], [9939, 9940, 10061], [9939, 10061, 10060], [9940, 9941, 10061], [9941, 10062, 10061], [9941, 9942, 10063], [9941, 10063, 10062], [9942, 9943, 10063], [9943, 10064, 10063], [9943, 9944, 10065], [9943, 10065, 10064], [9944, 9945, 10065], [9945, 10066, 10065], [9945, 9946, 10067], [9945, 10067, 10066], [9946, 9947, 10067], [9947, 10068, 10067], [9947, 9948, 10069], [9947, 10069, 10068], [9948, 9949, 10069], [9949, 10070, 10069], [9949, 9950, 10071], [9949, 10071, 10070], [9950, 9951, 10071], [9951, 10072, 10071], [9951, 9952, 10073], [9951, 10073, 10072], [9952, 9953, 10073], [9953, 10074, 10073], [9953, 9954, 10075], [9953, 10075, 10074], [9954, 9955, 10075], [9955, 10076, 10075], [9955, 9956, 10077], [9955, 10077, 10076], [9956, 9957, 10077], [9957, 10078, 10077], [9957, 9958, 10079], [9957, 10079, 10078], [9958, 9959, 10079], [9959, 10080, 10079], [9959, 9960, 10081], [9959, 10081, 10080], [9960, 9961, 10081], [9961, 10082, 10081], [9961, 9962, 10083], [9961, 10083, 10082], [9963, 10089, 10088], [9963, 9964, 10090], [9963, 10090, 10089], [9964, 9965, 10090], [9965, 10091, 10090], [9965, 9966, 10092], [9965, 10092, 10091], [9966, 9967, 10092], [9967, 10093, 10092], [9967, 9968, 10094], [9967, 10094, 10093], [9968, 9969, 10094], [9969, 10095, 10094], [9969, 9970, 10096], [9969, 10096, 10095], [9970, 9971, 10096], [9971, 10097, 10096], [9971, 9972, 10098], [9971, 10098, 10097], [9972, 9973, 10098], [9973, 10099, 10098], [9973, 9974, 10100], [9973, 10100, 10099], [9974, 9975, 10100], [9975, 10101, 10100], [9975, 9976, 10102], [9975, 10102, 10101], [9976, 9977, 10102], [9977, 10103, 10102], [9977, 9978, 10104], [9977, 10104, 10103], [9978, 9979, 10104], [9979, 10105, 10104], [9979, 9980, 10106], [9979, 10106, 10105], [9980, 9981, 10106], [9981, 10107, 10106], [9981, 9982, 10108], [9981, 10108, 10107], [9982, 9983, 10108], [9983, 10109, 10108], [9983, 9984, 10110], [9983, 10110, 10109], [9985, 9986, 10112], [9985, 10112, 10111], [9986, 9987, 10112], [9987, 10113, 10112], [9987, 9988, 10114], [9987, 10114, 10113], [9988, 9989, 10114], [9989, 10115, 10114], [9989, 9990, 10116], [9989, 10116, 10115], [9990, 9991, 10116], [9991, 10117, 10116], [9991, 9992, 10118], [9991, 10118, 10117], [9992, 9993, 10118], [9993, 10119, 10118], [9993, 9994, 10120], [9993, 10120, 10119], [9994, 9995, 10120], [9995, 10121, 10120], [9995, 9996, 10122], [9995, 10122, 10121], [9996, 9997, 10122], [9997, 10123, 10122], [9997, 9998, 10124], [9997, 10124, 10123], [9998, 9999, 10124], [9999, 10125, 10124], [9999, 10000, 10126], [9999, 10126, 10125], [10000, 10001, 10126], [10001, 10127, 10126], [10001, 10002, 10128], [10001, 10128, 10127], [10002, 10003, 10128], [10003, 10129, 10128], [10003, 10004, 10130], [10003, 10130, 10129], [10004, 10005, 10130], [10005, 10131, 10130], [10005, 10006, 10132], [10005, 10132, 10131], [10006, 10007, 10132], [10007, 10133, 10132], [10007, 10008, 10134], [10007, 10134, 10133], [10008, 10009, 10134], [10009, 10135, 10134], [10009, 10010, 10136], [10009, 10136, 10135], [10010, 10011, 10136], [10011, 10137, 10136], [10011, 10012, 10138], [10011, 10138, 10137], [10012, 10013, 10138], [10013, 10139, 10138], [10013, 10014, 10140], [10013, 10140, 10139], [10014, 10015, 10140], [10015, 10141, 10140], [10015, 10016, 10142], [10015, 10142, 10141], [10016, 10017, 10142], [10017, 10143, 10142], [10017, 10018, 10144], [10017, 10144, 10143], [10018, 10019, 10144], [10019, 10145, 10144], [10019, 10020, 10146], [10019, 10146, 10145], [10020, 10021, 10146], [10021, 10147, 10146], [10021, 10022, 10148], [10021, 10148, 10147], [10022, 10023, 10148], [10023, 10149, 10148], [10023, 10024, 10150], [10023, 10150, 10149], [10024, 10025, 10150], [10025, 10151, 10150], [10025, 10026, 10152], [10025, 10152, 10151], [10026, 10027, 10152], [10027, 10153, 10152], [10027, 10028, 10154], [10027, 10154, 10153], [10028, 10029, 10154], [10029, 10155, 10154], [10029, 10030, 10156], [10029, 10156, 10155], [10030, 10031, 10156], [10031, 10157, 10156], [10031, 10032, 10158], [10031, 10158, 10157], [10032, 10033, 10158], [10033, 10159, 10158], [10033, 10034, 10160], [10033, 10160, 10159], [10034, 10035, 10160], [10035, 10161, 10160], [10035, 10036, 10162], [10035, 10162, 10161], [10036, 10037, 10162], [10037, 10163, 10162], [10037, 10038, 10164], [10037, 10164, 10163], [10038, 10039, 10164], [10039, 10165, 10164], [10039, 10040, 10166], [10039, 10166, 10165], [10040, 10041, 10166], [10041, 10167, 10166], [10041, 10042, 10168], [10041, 10168, 10167], [10042, 10043, 10168], [10043, 10169, 10168], [10043, 10044, 10170], [10043, 10170, 10169], [10044, 10045, 10170], [10045, 10171, 10170], [10045, 10046, 10172], [10045, 10172, 10171], [10046, 10047, 10172], [10047, 10173, 10172], [10047, 10048, 10174], [10047, 10174, 10173], [10048, 10049, 10174], [10049, 10175, 10174], [10049, 10050, 10176], [10049, 10176, 10175], [10050, 10051, 10176], [10051, 10177, 10176], [10051, 10052, 10178], [10051, 10178, 10177], [10052, 10053, 10178], [10053, 10179, 10178], [10053, 10054, 10180], [10053, 10180, 10179], [10054, 10055, 10180], [10055, 10181, 10180], [10055, 10056, 10182], [10055, 10182, 10181], [10056, 10057, 10182], [10057, 10183, 10182], [10057, 10058, 10184], [10057, 10184, 10183], [10058, 10059, 10184], [10059, 10185, 10184], [10059, 10060, 10186], [10059, 10186, 10185], [10060, 10061, 10186], [10061, 10187, 10186], [10061, 10062, 10188], [10061, 10188, 10187], [10062, 10063, 10188], [10063, 10189, 10188], [10063, 10064, 10190], [10063, 10190, 10189], [10064, 10065, 10190], [10065, 10191, 10190], [10065, 10066, 10192], [10065, 10192, 10191], [10066, 10067, 10192], [10067, 10193, 10192], [10067, 10068, 10194], [10067, 10194, 10193], [10068, 10069, 10194], [10069, 10195, 10194], [10069, 10070, 10196], [10069, 10196, 10195], [10070, 10071, 10196], [10071, 10197, 10196], [10071, 10072, 10198], [10071, 10198, 10197], [10072, 10073, 10198], [10073, 10199, 10198], [10073, 10074, 10200], [10073, 10200, 10199], [10074, 10075, 10200], [10075, 10201, 10200], [10075, 10076, 10202], [10075, 10202, 10201], [10076, 10077, 10202], [10077, 10203, 10202], [10077, 10078, 10204], [10077, 10204, 10203], [10078, 10079, 10204], [10079, 10205, 10204], [10079, 10080, 10206], [10079, 10206, 10205], [10080, 10081, 10206], [10081, 10207, 10206], [10081, 10082, 10208], [10081, 10208, 10207], [10082, 10083, 10208], [10083, 10209, 10208], [10083, 10084, 10210], [10083, 10210, 10209], [10084, 10085, 10210], [10085, 10211, 10210], [10086, 10087, 10213], [10086, 10213, 10212], [10087, 10088, 10213], [10088, 10214, 10213], [10088, 10089, 10215], [10088, 10215, 10214], [10089, 10090, 10215], [10090, 10216, 10215], [10090, 10091, 10217], [10090, 10217, 10216], [10091, 10092, 10217], [10092, 10218, 10217], [10092, 10093, 10219], [10092, 10219, 10218], [10093, 10094, 10219], [10094, 10220, 10219], [10094, 10095, 10221], [10094, 10221, 10220], [10095, 10096, 10221], [10096, 10222, 10221], [10096, 10097, 10223], [10096, 10223, 10222], [10097, 10098, 10223], [10098, 10224, 10223], [10098, 10099, 10225], [10098, 10225, 10224], [10099, 10100, 10225], [10100, 10226, 10225], [10100, 10101, 10227], [10100, 10227, 10226], [10101, 10102, 10227], [10102, 10228, 10227], [10102, 10103, 10229], [10102, 10229, 10228], [10103, 10104, 10229], [10104, 10230, 10229], [10104, 10105, 10231], [10104, 10231, 10230], [10105, 10106, 10231], [10106, 10232, 10231], [10106, 10107, 10233], [10106, 10233, 10232], [10107, 10108, 10233], [10108, 10234, 10233], [10108, 10109, 10235], [10108, 10235, 10234], [10109, 10110, 10235], [10110, 10236, 10235], [10111, 10112, 10237], [10112, 10238, 10237], [10112, 10113, 10239], [10112, 10239, 10238], [10113, 10114, 10239], [10114, 10240, 10239], [10114, 10115, 10241], [10114, 10241, 10240], [10115, 10116, 10241], [10116, 10242, 10241], [10116, 10117, 10243], [10116, 10243, 10242], [10117, 10118, 10243], [10118, 10244, 10243], [10118, 10119, 10245], [10118, 10245, 10244], [10119, 10120, 10245], [10120, 10246, 10245], [10120, 10121, 10247], [10120, 10247, 10246], [10121, 10122, 10247], [10122, 10248, 10247], [10122, 10123, 10249], [10122, 10249, 10248], [10123, 10124, 10249], [10124, 10250, 10249], [10124, 10125, 10251], [10124, 10251, 10250], [10125, 10126, 10251], [10126, 10252, 10251], [10126, 10127, 10253], [10126, 10253, 10252], [10127, 10128, 10253], [10128, 10254, 10253], [10128, 10129, 10255], [10128, 10255, 10254], [10129, 10130, 10255], [10130, 10256, 10255], [10130, 10131, 10257], [10130, 10257, 10256], [10131, 10132, 10257], [10132, 10258, 10257], [10132, 10133, 10259], [10132, 10259, 10258], [10133, 10134, 10259], [10134, 10260, 10259], [10134, 10135, 10261], [10134, 10261, 10260], [10135, 10136, 10261], [10136, 10262, 10261], [10136, 10137, 10263], [10136, 10263, 10262], [10137, 10138, 10263], [10138, 10264, 10263], [10138, 10139, 10265], [10138, 10265, 10264], [10139, 10140, 10265], [10140, 10266, 10265], [10140, 10141, 10267], [10140, 10267, 10266], [10141, 10142, 10267], [10142, 10268, 10267], [10142, 10143, 10269], [10142, 10269, 10268], [10143, 10144, 10269], [10144, 10270, 10269], [10144, 10145, 10271], [10144, 10271, 10270], [10145, 10146, 10271], [10146, 10272, 10271], [10146, 10147, 10273], [10146, 10273, 10272], [10147, 10148, 10273], [10148, 10274, 10273], [10148, 10149, 10275], [10148, 10275, 10274], [10149, 10150, 10275], [10150, 10276, 10275], [10150, 10151, 10277], [10150, 10277, 10276], [10151, 10152, 10277], [10152, 10278, 10277], [10152, 10153, 10279], [10152, 10279, 10278], [10153, 10154, 10279], [10154, 10280, 10279], [10154, 10155, 10281], [10154, 10281, 10280], [10155, 10156, 10281], [10156, 10282, 10281], [10156, 10157, 10283], [10156, 10283, 10282], [10157, 10158, 10283], [10158, 10284, 10283], [10158, 10159, 10285], [10158, 10285, 10284], [10159, 10160, 10285], [10160, 10286, 10285], [10160, 10161, 10287], [10160, 10287, 10286], [10161, 10162, 10287], [10162, 10288, 10287], [10162, 10163, 10289], [10162, 10289, 10288], [10163, 10164, 10289], [10164, 10290, 10289], [10164, 10165, 10291], [10164, 10291, 10290], [10165, 10166, 10291], [10166, 10292, 10291], [10166, 10167, 10293], [10166, 10293, 10292], [10167, 10168, 10293], [10168, 10294, 10293], [10168, 10169, 10295], [10168, 10295, 10294], [10169, 10170, 10295], [10170, 10296, 10295], [10170, 10171, 10297], [10170, 10297, 10296], [10171, 10172, 10297], [10172, 10298, 10297], [10172, 10173, 10299], [10172, 10299, 10298], [10173, 10174, 10299], [10174, 10300, 10299], [10174, 10175, 10301], [10174, 10301, 10300], [10175, 10176, 10301], [10176, 10302, 10301], [10176, 10177, 10303], [10176, 10303, 10302], [10177, 10178, 10303], [10178, 10304, 10303], [10178, 10179, 10305], [10178, 10305, 10304], [10179, 10180, 10305], [10180, 10306, 10305], [10180, 10181, 10307], [10180, 10307, 10306], [10181, 10182, 10307], [10182, 10308, 10307], [10182, 10183, 10309], [10182, 10309, 10308], [10183, 10184, 10309], [10184, 10310, 10309], [10184, 10185, 10311], [10184, 10311, 10310], [10185, 10186, 10311], [10186, 10312, 10311], [10186, 10187, 10313], [10186, 10313, 10312], [10187, 10188, 10313], [10188, 10314, 10313], [10188, 10189, 10315], [10188, 10315, 10314], [10189, 10190, 10315], [10190, 10316, 10315], [10190, 10191, 10317], [10190, 10317, 10316], [10191, 10192, 10317], [10192, 10318, 10317], [10192, 10193, 10319], [10192, 10319, 10318], [10193, 10194, 10319], [10194, 10320, 10319], [10194, 10195, 10321], [10194, 10321, 10320], [10195, 10196, 10321], [10196, 10322, 10321], [10196, 10197, 10323], [10196, 10323, 10322], [10197, 10198, 10323], [10198, 10324, 10323], [10198, 10199, 10325], [10198, 10325, 10324], [10199, 10200, 10325], [10200, 10326, 10325], [10200, 10201, 10327], [10200, 10327, 10326], [10201, 10202, 10327], [10202, 10328, 10327], [10202, 10203, 10329], [10202, 10329, 10328], [10203, 10204, 10329], [10204, 10330, 10329], [10204, 10205, 10331], [10204, 10331, 10330], [10205, 10206, 10331], [10206, 10332, 10331], [10206, 10207, 10333], [10206, 10333, 10332], [10207, 10208, 10333], [10208, 10334, 10333], [10208, 10209, 10335], [10208, 10335, 10334], [10209, 10210, 10335], [10210, 10336, 10335], [10210, 10211, 10337], [10210, 10337, 10336], [10212, 10213, 10341], [10213, 10342, 10341], [10213, 10214, 10343], [10213, 10343, 10342], [10214, 10215, 10343], [10215, 10344, 10343], [10215, 10216, 10345], [10215, 10345, 10344], [10216, 10217, 10345], [10217, 10346, 10345], [10217, 10218, 10347], [10217, 10347, 10346], [10218, 10219, 10347], [10219, 10348, 10347], [10219, 10220, 10349], [10219, 10349, 10348], [10220, 10221, 10349], [10221, 10350, 10349], [10221, 10222, 10351], [10221, 10351, 10350], [10222, 10223, 10351], [10223, 10352, 10351], [10223, 10224, 10353], [10223, 10353, 10352], [10224, 10225, 10353], [10225, 10354, 10353], [10225, 10226, 10355], [10225, 10355, 10354], [10226, 10227, 10355], [10227, 10356, 10355], [10227, 10228, 10357], [10227, 10357, 10356], [10228, 10229, 10357], [10229, 10358, 10357], [10229, 10230, 10359], [10229, 10359, 10358], [10230, 10231, 10359], [10231, 10360, 10359], [10231, 10232, 10361], [10231, 10361, 10360], [10232, 10233, 10361], [10233, 10362, 10361], [10233, 10234, 10363], [10233, 10363, 10362], [10234, 10235, 10363], [10235, 10364, 10363], [10235, 10236, 10365], [10235, 10365, 10364], [10237, 10238, 10367], [10237, 10367, 10366], [10238, 10239, 10367], [10239, 10368, 10367], [10239, 10240, 10369], [10239, 10369, 10368], [10240, 10241, 10369], [10241, 10370, 10369], [10241, 10242, 10371], [10241, 10371, 10370], [10242, 10243, 10371], [10243, 10372, 10371], [10243, 10244, 10373], [10243, 10373, 10372], [10244, 10245, 10373], [10245, 10374, 10373], [10245, 10246, 10375], [10245, 10375, 10374], [10246, 10247, 10375], [10247, 10376, 10375], [10247, 10248, 10377], [10247, 10377, 10376], [10248, 10249, 10377], [10249, 10378, 10377], [10249, 10250, 10379], [10249, 10379, 10378], [10250, 10251, 10379], [10251, 10380, 10379], [10251, 10252, 10381], [10251, 10381, 10380], [10252, 10253, 10381], [10253, 10382, 10381], [10253, 10254, 10383], [10253, 10383, 10382], [10254, 10255, 10383], [10255, 10384, 10383], [10255, 10256, 10385], [10255, 10385, 10384], [10256, 10257, 10385], [10257, 10386, 10385], [10257, 10258, 10387], [10257, 10387, 10386], [10258, 10259, 10387], [10259, 10388, 10387], [10259, 10260, 10389], [10259, 10389, 10388], [10260, 10261, 10389], [10261, 10390, 10389], [10261, 10262, 10391], [10261, 10391, 10390], [10262, 10263, 10391], [10263, 10392, 10391], [10263, 10264, 10393], [10263, 10393, 10392], [10264, 10265, 10393], [10265, 10394, 10393], [10265, 10266, 10395], [10265, 10395, 10394], [10266, 10267, 10395], [10267, 10396, 10395], [10267, 10268, 10397], [10267, 10397, 10396], [10268, 10269, 10397], [10269, 10398, 10397], [10269, 10270, 10399], [10269, 10399, 10398], [10270, 10271, 10399], [10271, 10400, 10399], [10271, 10272, 10401], [10271, 10401, 10400], [10272, 10273, 10401], [10273, 10402, 10401], [10273, 10274, 10403], [10273, 10403, 10402], [10274, 10275, 10403], [10275, 10404, 10403], [10275, 10276, 10405], [10275, 10405, 10404], [10276, 10277, 10405], [10277, 10406, 10405], [10277, 10278, 10407], [10277, 10407, 10406], [10278, 10279, 10407], [10279, 10408, 10407], [10279, 10280, 10409], [10279, 10409, 10408], [10280, 10281, 10409], [10281, 10410, 10409], [10281, 10282, 10411], [10281, 10411, 10410], [10282, 10283, 10411], [10283, 10412, 10411], [10283, 10284, 10413], [10283, 10413, 10412], [10284, 10285, 10413], [10285, 10414, 10413], [10285, 10286, 10415], [10285, 10415, 10414], [10286, 10287, 10415], [10287, 10416, 10415], [10287, 10288, 10417], [10287, 10417, 10416], [10288, 10289, 10417], [10289, 10418, 10417], [10289, 10290, 10419], [10289, 10419, 10418], [10290, 10291, 10419], [10291, 10420, 10419], [10291, 10292, 10421], [10291, 10421, 10420], [10292, 10293, 10421], [10293, 10422, 10421], [10293, 10294, 10423], [10293, 10423, 10422], [10294, 10295, 10423], [10295, 10424, 10423], [10295, 10296, 10425], [10295, 10425, 10424], [10296, 10297, 10425], [10297, 10426, 10425], [10297, 10298, 10427], [10297, 10427, 10426], [10298, 10299, 10427], [10299, 10428, 10427], [10299, 10300, 10429], [10299, 10429, 10428], [10300, 10301, 10429], [10301, 10430, 10429], [10301, 10302, 10431], [10301, 10431, 10430], [10302, 10303, 10431], [10303, 10432, 10431], [10303, 10304, 10433], [10303, 10433, 10432], [10304, 10305, 10433], [10305, 10434, 10433], [10305, 10306, 10435], [10305, 10435, 10434], [10306, 10307, 10435], [10307, 10436, 10435], [10307, 10308, 10437], [10307, 10437, 10436], [10308, 10309, 10437], [10309, 10438, 10437], [10309, 10310, 10439], [10309, 10439, 10438], [10310, 10311, 10439], [10311, 10440, 10439], [10311, 10312, 10441], [10311, 10441, 10440], [10312, 10313, 10441], [10313, 10442, 10441], [10313, 10314, 10443], [10313, 10443, 10442], [10314, 10315, 10443], [10315, 10444, 10443], [10315, 10316, 10445], [10315, 10445, 10444], [10316, 10317, 10445], [10317, 10446, 10445], [10317, 10318, 10447], [10317, 10447, 10446], [10318, 10319, 10447], [10319, 10448, 10447], [10319, 10320, 10449], [10319, 10449, 10448], [10320, 10321, 10449], [10321, 10450, 10449], [10321, 10322, 10451], [10321, 10451, 10450], [10322, 10323, 10451], [10323, 10452, 10451], [10323, 10324, 10453], [10323, 10453, 10452], [10324, 10325, 10453], [10325, 10454, 10453], [10325, 10326, 10455], [10325, 10455, 10454], [10326, 10327, 10455], [10327, 10456, 10455], [10327, 10328, 10457], [10327, 10457, 10456], [10328, 10329, 10457], [10329, 10458, 10457], [10329, 10330, 10459], [10329, 10459, 10458], [10330, 10331, 10459], [10331, 10460, 10459], [10331, 10332, 10461], [10331, 10461, 10460], [10332, 10333, 10461], [10333, 10462, 10461], [10333, 10334, 10463], [10333, 10463, 10462], [10334, 10335, 10463], [10335, 10464, 10463], [10335, 10336, 10465], [10335, 10465, 10464], [10336, 10337, 10465], [10337, 10466, 10465], [10337, 10338, 10467], [10337, 10467, 10466], [10338, 10339, 10467], [10339, 10468, 10467], [10339, 10340, 10469], [10339, 10469, 10468], [10340, 10341, 10469], [10341, 10470, 10469], [10341, 10342, 10471], [10341, 10471, 10470], [10342, 10343, 10471], [10343, 10472, 10471], [10343, 10344, 10473], [10343, 10473, 10472], [10344, 10345, 10473], [10345, 10474, 10473], [10345, 10346, 10475], [10345, 10475, 10474], [10346, 10347, 10475], [10347, 10476, 10475], [10347, 10348, 10477], [10347, 10477, 10476], [10348, 10349, 10477], [10349, 10478, 10477], [10349, 10350, 10479], [10349, 10479, 10478], [10350, 10351, 10479], [10351, 10480, 10479], [10351, 10352, 10481], [10351, 10481, 10480], [10352, 10353, 10481], [10353, 10482, 10481], [10353, 10354, 10483], [10353, 10483, 10482], [10354, 10355, 10483], [10355, 10484, 10483], [10355, 10356, 10485], [10355, 10485, 10484], [10356, 10357, 10485], [10357, 10486, 10485], [10357, 10358, 10487], [10357, 10487, 10486], [10358, 10359, 10487], [10359, 10488, 10487], [10359, 10360, 10489], [10359, 10489, 10488], [10360, 10361, 10489], [10361, 10490, 10489], [10361, 10362, 10491], [10361, 10491, 10490], [10362, 10363, 10491], [10363, 10492, 10491], [10363, 10364, 10493], [10363, 10493, 10492], [10364, 10365, 10493], [10365, 10494, 10493], [10366, 10367, 10495], [10367, 10496, 10495], [10367, 10368, 10497], [10367, 10497, 10496], [10368, 10369, 10497], [10369, 10498, 10497], [10369, 10370, 10499], [10369, 10499, 10498], [10370, 10371, 10499], [10371, 10500, 10499], [10371, 10372, 10501], [10371, 10501, 10500], [10372, 10373, 10501], [10373, 10502, 10501], [10373, 10374, 10503], [10373, 10503, 10502], [10374, 10375, 10503], [10375, 10504, 10503], [10375, 10376, 10505], [10375, 10505, 10504], [10376, 10377, 10505], [10377, 10506, 10505], [10377, 10378, 10507], [10377, 10507, 10506], [10378, 10379, 10507], [10379, 10508, 10507], [10379, 10380, 10509], [10379, 10509, 10508], [10380, 10381, 10509], [10381, 10510, 10509], [10381, 10382, 10511], [10381, 10511, 10510], [10382, 10383, 10511], [10383, 10512, 10511], [10383, 10384, 10513], [10383, 10513, 10512], [10384, 10385, 10513], [10385, 10514, 10513], [10385, 10386, 10515], [10385, 10515, 10514], [10386, 10387, 10515], [10387, 10516, 10515], [10387, 10388, 10517], [10387, 10517, 10516], [10388, 10389, 10517], [10389, 10518, 10517], [10389, 10390, 10519], [10389, 10519, 10518], [10390, 10391, 10519], [10391, 10520, 10519], [10391, 10392, 10521], [10391, 10521, 10520], [10392, 10393, 10521], [10393, 10522, 10521], [10393, 10394, 10523], [10393, 10523, 10522], [10394, 10395, 10523], [10395, 10524, 10523], [10395, 10396, 10525], [10395, 10525, 10524], [10396, 10397, 10525], [10397, 10526, 10525], [10397, 10398, 10527], [10397, 10527, 10526], [10398, 10399, 10527], [10399, 10528, 10527], [10399, 10400, 10529], [10399, 10529, 10528], [10400, 10401, 10529], [10401, 10530, 10529], [10401, 10402, 10531], [10401, 10531, 10530], [10402, 10403, 10531], [10403, 10532, 10531], [10403, 10404, 10533], [10403, 10533, 10532], [10404, 10405, 10533], [10405, 10534, 10533], [10405, 10406, 10535], [10405, 10535, 10534], [10406, 10407, 10535], [10407, 10536, 10535], [10407, 10408, 10537], [10407, 10537, 10536], [10408, 10409, 10537], [10409, 10538, 10537], [10409, 10410, 10539], [10409, 10539, 10538], [10410, 10411, 10539], [10411, 10540, 10539], [10411, 10412, 10541], [10411, 10541, 10540], [10412, 10413, 10541], [10413, 10542, 10541], [10413, 10414, 10543], [10413, 10543, 10542], [10414, 10415, 10543], [10415, 10544, 10543], [10415, 10416, 10545], [10415, 10545, 10544], [10416, 10417, 10545], [10417, 10546, 10545], [10417, 10418, 10547], [10417, 10547, 10546], [10418, 10419, 10547], [10419, 10548, 10547], [10419, 10420, 10549], [10419, 10549, 10548], [10420, 10421, 10549], [10421, 10550, 10549], [10421, 10422, 10551], [10421, 10551, 10550], [10422, 10423, 10551], [10423, 10552, 10551], [10423, 10424, 10553], [10423, 10553, 10552], [10424, 10425, 10553], [10425, 10554, 10553], [10425, 10426, 10555], [10425, 10555, 10554], [10426, 10427, 10555], [10427, 10556, 10555], [10427, 10428, 10557], [10427, 10557, 10556], [10428, 10429, 10557], [10429, 10558, 10557], [10429, 10430, 10559], [10429, 10559, 10558], [10430, 10431, 10559], [10431, 10560, 10559], [10431, 10432, 10561], [10431, 10561, 10560], [10432, 10433, 10561], [10433, 10562, 10561], [10433, 10434, 10563], [10433, 10563, 10562], [10434, 10435, 10563], [10435, 10564, 10563], [10435, 10436, 10565], [10435, 10565, 10564], [10436, 10437, 10565], [10437, 10566, 10565], [10437, 10438, 10567], [10437, 10567, 10566], [10438, 10439, 10567], [10439, 10568, 10567], [10439, 10440, 10569], [10439, 10569, 10568], [10440, 10441, 10569], [10441, 10570, 10569], [10441, 10442, 10571], [10441, 10571, 10570], [10442, 10443, 10571], [10443, 10572, 10571], [10443, 10444, 10573], [10443, 10573, 10572], [10444, 10445, 10573], [10445, 10574, 10573], [10445, 10446, 10575], [10445, 10575, 10574], [10446, 10447, 10575], [10447, 10576, 10575], [10447, 10448, 10577], [10447, 10577, 10576], [10448, 10449, 10577], [10449, 10578, 10577], [10449, 10450, 10579], [10449, 10579, 10578], [10450, 10451, 10579], [10451, 10580, 10579], [10451, 10452, 10581], [10451, 10581, 10580], [10452, 10453, 10581], [10453, 10582, 10581], [10453, 10454, 10583], [10453, 10583, 10582], [10454, 10455, 10583], [10455, 10584, 10583], [10455, 10456, 10585], [10455, 10585, 10584], [10456, 10457, 10585], [10457, 10586, 10585], [10457, 10458, 10587], [10457, 10587, 10586], [10458, 10459, 10587], [10459, 10588, 10587], [10459, 10460, 10589], [10459, 10589, 10588], [10460, 10461, 10589], [10461, 10590, 10589], [10461, 10462, 10591], [10461, 10591, 10590], [10462, 10463, 10591], [10463, 10592, 10591], [10463, 10464, 10593], [10463, 10593, 10592], [10464, 10465, 10593], [10465, 10594, 10593], [10465, 10466, 10595], [10465, 10595, 10594], [10466, 10467, 10595], [10467, 10596, 10595], [10467, 10468, 10597], [10467, 10597, 10596], [10468, 10469, 10597], [10469, 10598, 10597], [10469, 10470, 10599], [10469, 10599, 10598], [10470, 10471, 10599], [10471, 10600, 10599], [10471, 10472, 10601], [10471, 10601, 10600], [10472, 10473, 10601], [10473, 10602, 10601], [10473, 10474, 10603], [10473, 10603, 10602], [10474, 10475, 10603], [10475, 10604, 10603], [10475, 10476, 10605], [10475, 10605, 10604], [10476, 10477, 10605], [10477, 10606, 10605], [10477, 10478, 10607], [10477, 10607, 10606], [10478, 10479, 10607], [10479, 10608, 10607], [10479, 10480, 10609], [10479, 10609, 10608], [10480, 10481, 10609], [10481, 10610, 10609], [10481, 10482, 10611], [10481, 10611, 10610], [10482, 10483, 10611], [10483, 10612, 10611], [10483, 10484, 10613], [10483, 10613, 10612], [10484, 10485, 10613], [10485, 10614, 10613], [10485, 10486, 10615], [10485, 10615, 10614], [10486, 10487, 10615], [10487, 10616, 10615], [10487, 10488, 10617], [10487, 10617, 10616], [10488, 10489, 10617], [10489, 10618, 10617], [10489, 10490, 10619], [10489, 10619, 10618], [10490, 10491, 10619], [10491, 10620, 10619], [10491, 10492, 10621], [10491, 10621, 10620], [10492, 10493, 10621], [10493, 10622, 10621], [10493, 10494, 10623], [10493, 10623, 10622], [10495, 10496, 10625], [10495, 10625, 10624], [10496, 10497, 10625], [10497, 10626, 10625], [10497, 10498, 10627], [10497, 10627, 10626], [10498, 10499, 10627], [10499, 10628, 10627], [10499, 10500, 10629], [10499, 10629, 10628], [10500, 10501, 10629], [10501, 10630, 10629], [10501, 10502, 10631], [10501, 10631, 10630], [10502, 10503, 10631], [10503, 10632, 10631], [10503, 10504, 10633], [10503, 10633, 10632], [10504, 10505, 10633], [10505, 10634, 10633], [10505, 10506, 10635], [10505, 10635, 10634], [10506, 10507, 10635], [10507, 10636, 10635], [10507, 10508, 10637], [10507, 10637, 10636], [10508, 10509, 10637], [10509, 10638, 10637], [10509, 10510, 10639], [10509, 10639, 10638], [10510, 10511, 10639], [10511, 10640, 10639], [10511, 10512, 10641], [10511, 10641, 10640], [10512, 10513, 10641], [10513, 10642, 10641], [10513, 10514, 10643], [10513, 10643, 10642], [10514, 10515, 10643], [10515, 10644, 10643], [10515, 10516, 10645], [10515, 10645, 10644], [10516, 10517, 10645], [10517, 10646, 10645], [10517, 10518, 10647], [10517, 10647, 10646], [10518, 10519, 10647], [10519, 10648, 10647], [10519, 10520, 10649], [10519, 10649, 10648], [10520, 10521, 10649], [10521, 10650, 10649], [10521, 10522, 10651], [10521, 10651, 10650], [10522, 10523, 10651], [10523, 10652, 10651], [10523, 10524, 10653], [10523, 10653, 10652], [10524, 10525, 10653], [10525, 10654, 10653], [10525, 10526, 10655], [10525, 10655, 10654], [10526, 10527, 10655], [10527, 10656, 10655], [10527, 10528, 10657], [10527, 10657, 10656], [10528, 10529, 10657], [10529, 10658, 10657], [10529, 10530, 10659], [10529, 10659, 10658], [10530, 10531, 10659], [10531, 10660, 10659], [10531, 10532, 10661], [10531, 10661, 10660], [10532, 10533, 10661], [10533, 10662, 10661], [10533, 10534, 10663], [10533, 10663, 10662], [10534, 10535, 10663], [10535, 10664, 10663], [10535, 10536, 10665], [10535, 10665, 10664], [10536, 10537, 10665], [10537, 10666, 10665], [10537, 10538, 10667], [10537, 10667, 10666], [10538, 10539, 10667], [10539, 10668, 10667], [10539, 10540, 10669], [10539, 10669, 10668], [10540, 10541, 10669], [10541, 10670, 10669], [10541, 10542, 10671], [10541, 10671, 10670], [10542, 10543, 10671], [10543, 10672, 10671], [10543, 10544, 10673], [10543, 10673, 10672], [10544, 10545, 10673], [10545, 10674, 10673], [10545, 10546, 10675], [10545, 10675, 10674], [10546, 10547, 10675], [10547, 10676, 10675], [10547, 10548, 10677], [10547, 10677, 10676], [10548, 10549, 10677], [10549, 10678, 10677], [10549, 10550, 10679], [10549, 10679, 10678], [10550, 10551, 10679], [10551, 10680, 10679], [10551, 10552, 10681], [10551, 10681, 10680], [10552, 10553, 10681], [10553, 10682, 10681], [10553, 10554, 10683], [10553, 10683, 10682], [10554, 10555, 10683], [10555, 10684, 10683], [10555, 10556, 10685], [10555, 10685, 10684], [10556, 10557, 10685], [10557, 10686, 10685], [10557, 10558, 10687], [10557, 10687, 10686], [10558, 10559, 10687], [10559, 10688, 10687], [10559, 10560, 10689], [10559, 10689, 10688], [10560, 10561, 10689], [10561, 10690, 10689], [10561, 10562, 10691], [10561, 10691, 10690], [10562, 10563, 10691], [10563, 10692, 10691], [10563, 10564, 10693], [10563, 10693, 10692], [10564, 10565, 10693], [10565, 10694, 10693], [10565, 10566, 10695], [10565, 10695, 10694], [10566, 10567, 10695], [10567, 10696, 10695], [10567, 10568, 10697], [10567, 10697, 10696], [10568, 10569, 10697], [10569, 10698, 10697], [10569, 10570, 10699], [10569, 10699, 10698], [10570, 10571, 10699], [10571, 10700, 10699], [10571, 10572, 10701], [10571, 10701, 10700], [10572, 10573, 10701], [10573, 10702, 10701], [10573, 10574, 10703], [10573, 10703, 10702], [10574, 10575, 10703], [10575, 10704, 10703], [10575, 10576, 10705], [10575, 10705, 10704], [10576, 10577, 10705], [10577, 10706, 10705], [10577, 10578, 10707], [10577, 10707, 10706], [10578, 10579, 10707], [10579, 10708, 10707], [10579, 10580, 10709], [10579, 10709, 10708], [10580, 10581, 10709], [10581, 10710, 10709], [10581, 10582, 10711], [10581, 10711, 10710], [10582, 10583, 10711], [10583, 10712, 10711], [10583, 10584, 10713], [10583, 10713, 10712], [10584, 10585, 10713], [10585, 10714, 10713], [10585, 10586, 10715], [10585, 10715, 10714], [10586, 10587, 10715], [10587, 10716, 10715], [10587, 10588, 10717], [10587, 10717, 10716], [10588, 10589, 10717], [10589, 10718, 10717], [10589, 10590, 10719], [10589, 10719, 10718], [10590, 10591, 10719], [10591, 10720, 10719], [10591, 10592, 10721], [10591, 10721, 10720], [10592, 10593, 10721], [10593, 10722, 10721], [10593, 10594, 10723], [10593, 10723, 10722], [10594, 10595, 10723], [10595, 10724, 10723], [10595, 10596, 10725], [10595, 10725, 10724], [10596, 10597, 10725], [10597, 10726, 10725], [10597, 10598, 10727], [10597, 10727, 10726], [10598, 10599, 10727], [10599, 10728, 10727], [10599, 10600, 10729], [10599, 10729, 10728], [10600, 10601, 10729], [10601, 10730, 10729], [10601, 10602, 10731], [10601, 10731, 10730], [10602, 10603, 10731], [10603, 10732, 10731], [10603, 10604, 10733], [10603, 10733, 10732], [10604, 10605, 10733], [10605, 10734, 10733], [10605, 10606, 10735], [10605, 10735, 10734], [10606, 10607, 10735], [10607, 10736, 10735], [10607, 10608, 10737], [10607, 10737, 10736], [10608, 10609, 10737], [10609, 10738, 10737], [10609, 10610, 10739], [10609, 10739, 10738], [10610, 10611, 10739], [10611, 10740, 10739], [10611, 10612, 10741], [10611, 10741, 10740], [10612, 10613, 10741], [10613, 10742, 10741], [10613, 10614, 10743], [10613, 10743, 10742], [10614, 10615, 10743], [10615, 10744, 10743], [10615, 10616, 10745], [10615, 10745, 10744], [10616, 10617, 10745], [10617, 10746, 10745], [10617, 10618, 10747], [10617, 10747, 10746], [10618, 10619, 10747], [10619, 10748, 10747], [10619, 10620, 10749], [10619, 10749, 10748], [10620, 10621, 10749], [10621, 10750, 10749], [10621, 10622, 10751], [10621, 10751, 10750], [10622, 10623, 10751], [10623, 10752, 10751], [10624, 10625, 10753], [10625, 10754, 10753], [10625, 10626, 10755], [10625, 10755, 10754], [10626, 10627, 10755], [10627, 10756, 10755], [10627, 10628, 10757], [10627, 10757, 10756], [10628, 10629, 10757], [10629, 10758, 10757], [10629, 10630, 10759], [10629, 10759, 10758], [10630, 10631, 10759], [10631, 10760, 10759], [10631, 10632, 10761], [10631, 10761, 10760], [10632, 10633, 10761], [10633, 10762, 10761], [10633, 10634, 10763], [10633, 10763, 10762], [10634, 10635, 10763], [10635, 10764, 10763], [10635, 10636, 10765], [10635, 10765, 10764], [10636, 10637, 10765], [10637, 10766, 10765], [10637, 10638, 10767], [10637, 10767, 10766], [10638, 10639, 10767], [10639, 10768, 10767], [10639, 10640, 10769], [10639, 10769, 10768], [10640, 10641, 10769], [10641, 10770, 10769], [10641, 10642, 10771], [10641, 10771, 10770], [10642, 10643, 10771], [10643, 10772, 10771], [10643, 10644, 10773], [10643, 10773, 10772], [10644, 10645, 10773], [10645, 10774, 10773], [10645, 10646, 10775], [10645, 10775, 10774], [10646, 10647, 10775], [10647, 10776, 10775], [10647, 10648, 10777], [10647, 10777, 10776], [10648, 10649, 10777], [10649, 10778, 10777], [10649, 10650, 10779], [10649, 10779, 10778], [10650, 10651, 10779], [10651, 10780, 10779], [10651, 10652, 10781], [10651, 10781, 10780], [10652, 10653, 10781], [10653, 10782, 10781], [10653, 10654, 10783], [10653, 10783, 10782], [10654, 10655, 10783], [10655, 10784, 10783], [10655, 10656, 10785], [10655, 10785, 10784], [10656, 10657, 10785], [10657, 10786, 10785], [10657, 10658, 10787], [10657, 10787, 10786], [10658, 10659, 10787], [10659, 10788, 10787], [10659, 10660, 10789], [10659, 10789, 10788], [10660, 10661, 10789], [10661, 10790, 10789], [10661, 10662, 10791], [10661, 10791, 10790], [10662, 10663, 10791], [10663, 10792, 10791], [10663, 10664, 10793], [10663, 10793, 10792], [10664, 10665, 10793], [10665, 10794, 10793], [10665, 10666, 10795], [10665, 10795, 10794], [10666, 10667, 10795], [10667, 10796, 10795], [10667, 10668, 10797], [10667, 10797, 10796], [10668, 10669, 10797], [10669, 10798, 10797], [10669, 10670, 10799], [10669, 10799, 10798], [10670, 10671, 10799], [10671, 10800, 10799], [10671, 10672, 10801], [10671, 10801, 10800], [10672, 10673, 10801], [10673, 10802, 10801], [10673, 10674, 10803], [10673, 10803, 10802], [10674, 10675, 10803], [10675, 10804, 10803], [10675, 10676, 10805], [10675, 10805, 10804], [10676, 10677, 10805], [10677, 10806, 10805], [10677, 10678, 10807], [10677, 10807, 10806], [10678, 10679, 10807], [10679, 10808, 10807], [10679, 10680, 10809], [10679, 10809, 10808], [10680, 10681, 10809], [10681, 10810, 10809], [10681, 10682, 10811], [10681, 10811, 10810], [10682, 10683, 10811], [10683, 10812, 10811], [10683, 10684, 10813], [10683, 10813, 10812], [10684, 10685, 10813], [10685, 10814, 10813], [10685, 10686, 10815], [10685, 10815, 10814], [10686, 10687, 10815], [10687, 10816, 10815], [10687, 10688, 10817], [10687, 10817, 10816], [10688, 10689, 10817], [10689, 10818, 10817], [10689, 10690, 10819], [10689, 10819, 10818], [10690, 10691, 10819], [10691, 10820, 10819], [10691, 10692, 10821], [10691, 10821, 10820], [10692, 10693, 10821], [10693, 10822, 10821], [10693, 10694, 10823], [10693, 10823, 10822], [10694, 10695, 10823], [10695, 10824, 10823], [10695, 10696, 10825], [10695, 10825, 10824], [10696, 10697, 10825], [10697, 10826, 10825], [10697, 10698, 10827], [10697, 10827, 10826], [10698, 10699, 10827], [10699, 10828, 10827], [10699, 10700, 10829], [10699, 10829, 10828], [10700, 10701, 10829], [10701, 10830, 10829], [10701, 10702, 10831], [10701, 10831, 10830], [10702, 10703, 10831], [10703, 10832, 10831], [10703, 10704, 10833], [10703, 10833, 10832], [10704, 10705, 10833], [10705, 10834, 10833], [10705, 10706, 10835], [10705, 10835, 10834], [10706, 10707, 10835], [10707, 10836, 10835], [10707, 10708, 10837], [10707, 10837, 10836], [10708, 10709, 10837], [10709, 10838, 10837], [10709, 10710, 10839], [10709, 10839, 10838], [10710, 10711, 10839], [10711, 10840, 10839], [10711, 10712, 10841], [10711, 10841, 10840], [10712, 10713, 10841], [10713, 10842, 10841], [10713, 10714, 10843], [10713, 10843, 10842], [10714, 10715, 10843], [10715, 10844, 10843], [10715, 10716, 10845], [10715, 10845, 10844], [10716, 10717, 10845], [10717, 10846, 10845], [10717, 10718, 10847], [10717, 10847, 10846], [10718, 10719, 10847], [10719, 10848, 10847], [10719, 10720, 10849], [10719, 10849, 10848], [10720, 10721, 10849], [10721, 10850, 10849], [10721, 10722, 10851], [10721, 10851, 10850], [10722, 10723, 10851], [10723, 10852, 10851], [10723, 10724, 10853], [10723, 10853, 10852], [10724, 10725, 10853], [10725, 10854, 10853], [10725, 10726, 10855], [10725, 10855, 10854], [10726, 10727, 10855], [10727, 10856, 10855], [10727, 10728, 10857], [10727, 10857, 10856], [10728, 10729, 10857], [10729, 10858, 10857], [10729, 10730, 10859], [10729, 10859, 10858], [10730, 10731, 10859], [10731, 10860, 10859], [10731, 10732, 10861], [10731, 10861, 10860], [10732, 10733, 10861], [10733, 10862, 10861], [10733, 10734, 10863], [10733, 10863, 10862], [10734, 10735, 10863], [10735, 10864, 10863], [10735, 10736, 10865], [10735, 10865, 10864], [10736, 10737, 10865], [10737, 10866, 10865], [10737, 10738, 10867], [10737, 10867, 10866], [10738, 10739, 10867], [10739, 10868, 10867], [10739, 10740, 10869], [10739, 10869, 10868], [10740, 10741, 10869], [10741, 10870, 10869], [10741, 10742, 10871], [10741, 10871, 10870], [10742, 10743, 10871], [10743, 10872, 10871], [10743, 10744, 10873], [10743, 10873, 10872], [10744, 10745, 10873], [10745, 10874, 10873], [10745, 10746, 10875], [10745, 10875, 10874], [10746, 10747, 10875], [10747, 10876, 10875], [10747, 10748, 10877], [10747, 10877, 10876], [10748, 10749, 10877], [10749, 10878, 10877], [10749, 10750, 10879], [10749, 10879, 10878], [10750, 10751, 10879], [10751, 10880, 10879], [10751, 10752, 10881], [10751, 10881, 10880], [10753, 10754, 10883], [10753, 10883, 10882], [10754, 10755, 10883], [10755, 10884, 10883], [10755, 10756, 10885], [10755, 10885, 10884], [10756, 10757, 10885], [10757, 10886, 10885], [10757, 10758, 10887], [10757, 10887, 10886], [10758, 10759, 10887], [10759, 10888, 10887], [10759, 10760, 10889], [10759, 10889, 10888], [10760, 10761, 10889], [10761, 10890, 10889], [10761, 10762, 10891], [10761, 10891, 10890], [10762, 10763, 10891], [10763, 10892, 10891], [10763, 10764, 10893], [10763, 10893, 10892], [10764, 10765, 10893], [10765, 10894, 10893], [10765, 10766, 10895], [10765, 10895, 10894], [10766, 10767, 10895], [10767, 10896, 10895], [10767, 10768, 10897], [10767, 10897, 10896], [10768, 10769, 10897], [10769, 10898, 10897], [10769, 10770, 10899], [10769, 10899, 10898], [10770, 10771, 10899], [10771, 10900, 10899], [10771, 10772, 10901], [10771, 10901, 10900], [10772, 10773, 10901], [10773, 10902, 10901], [10773, 10774, 10903], [10773, 10903, 10902], [10774, 10775, 10903], [10775, 10904, 10903], [10775, 10776, 10905], [10775, 10905, 10904], [10776, 10777, 10905], [10777, 10906, 10905], [10777, 10778, 10907], [10777, 10907, 10906], [10778, 10779, 10907], [10779, 10908, 10907], [10779, 10780, 10909], [10779, 10909, 10908], [10780, 10781, 10909], [10781, 10910, 10909], [10781, 10782, 10911], [10781, 10911, 10910], [10782, 10783, 10911], [10783, 10912, 10911], [10783, 10784, 10913], [10783, 10913, 10912], [10784, 10785, 10913], [10785, 10914, 10913], [10785, 10786, 10915], [10785, 10915, 10914], [10786, 10787, 10915], [10787, 10916, 10915], [10787, 10788, 10917], [10787, 10917, 10916], [10788, 10789, 10917], [10789, 10918, 10917], [10789, 10790, 10919], [10789, 10919, 10918], [10790, 10791, 10919], [10791, 10920, 10919], [10791, 10792, 10921], [10791, 10921, 10920], [10792, 10793, 10921], [10793, 10922, 10921], [10793, 10794, 10923], [10793, 10923, 10922], [10794, 10795, 10923], [10795, 10924, 10923], [10795, 10796, 10925], [10795, 10925, 10924], [10796, 10797, 10925], [10797, 10926, 10925], [10797, 10798, 10927], [10797, 10927, 10926], [10798, 10799, 10927], [10799, 10928, 10927], [10799, 10800, 10929], [10799, 10929, 10928], [10800, 10801, 10929], [10801, 10930, 10929], [10801, 10802, 10931], [10801, 10931, 10930], [10802, 10803, 10931], [10803, 10932, 10931], [10803, 10804, 10933], [10803, 10933, 10932], [10804, 10805, 10933], [10805, 10934, 10933], [10805, 10806, 10935], [10805, 10935, 10934], [10806, 10807, 10935], [10807, 10936, 10935], [10807, 10808, 10937], [10807, 10937, 10936], [10808, 10809, 10937], [10809, 10938, 10937], [10809, 10810, 10939], [10809, 10939, 10938], [10810, 10811, 10939], [10811, 10940, 10939], [10811, 10812, 10941], [10811, 10941, 10940], [10812, 10813, 10941], [10813, 10942, 10941], [10813, 10814, 10943], [10813, 10943, 10942], [10814, 10815, 10943], [10815, 10944, 10943], [10815, 10816, 10945], [10815, 10945, 10944], [10816, 10817, 10945], [10817, 10946, 10945], [10817, 10818, 10947], [10817, 10947, 10946], [10818, 10819, 10947], [10819, 10948, 10947], [10819, 10820, 10949], [10819, 10949, 10948], [10820, 10821, 10949], [10821, 10950, 10949], [10821, 10822, 10951], [10821, 10951, 10950], [10822, 10823, 10951], [10823, 10952, 10951], [10823, 10824, 10953], [10823, 10953, 10952], [10824, 10825, 10953], [10825, 10954, 10953], [10825, 10826, 10955], [10825, 10955, 10954], [10826, 10827, 10955], [10827, 10956, 10955], [10827, 10828, 10957], [10827, 10957, 10956], [10828, 10829, 10957], [10829, 10958, 10957], [10829, 10830, 10959], [10829, 10959, 10958], [10830, 10831, 10959], [10831, 10960, 10959], [10831, 10832, 10961], [10831, 10961, 10960], [10832, 10833, 10961], [10833, 10962, 10961], [10833, 10834, 10963], [10833, 10963, 10962], [10834, 10835, 10963], [10835, 10964, 10963], [10835, 10836, 10965], [10835, 10965, 10964], [10836, 10837, 10965], [10837, 10966, 10965], [10837, 10838, 10967], [10837, 10967, 10966], [10838, 10839, 10967], [10839, 10968, 10967], [10839, 10840, 10969], [10839, 10969, 10968], [10840, 10841, 10969], [10841, 10970, 10969], [10841, 10842, 10971], [10841, 10971, 10970], [10842, 10843, 10971], [10843, 10972, 10971], [10843, 10844, 10973], [10843, 10973, 10972], [10844, 10845, 10973], [10845, 10974, 10973], [10845, 10846, 10975], [10845, 10975, 10974], [10846, 10847, 10975], [10847, 10976, 10975], [10847, 10848, 10977], [10847, 10977, 10976], [10848, 10849, 10977], [10849, 10978, 10977], [10849, 10850, 10979], [10849, 10979, 10978], [10850, 10851, 10979], [10851, 10980, 10979], [10851, 10852, 10981], [10851, 10981, 10980], [10852, 10853, 10981], [10853, 10982, 10981], [10853, 10854, 10983], [10853, 10983, 10982], [10854, 10855, 10983], [10855, 10984, 10983], [10855, 10856, 10985], [10855, 10985, 10984], [10856, 10857, 10985], [10857, 10986, 10985], [10857, 10858, 10987], [10857, 10987, 10986], [10858, 10859, 10987], [10859, 10988, 10987], [10859, 10860, 10989], [10859, 10989, 10988], [10860, 10861, 10989], [10861, 10990, 10989], [10861, 10862, 10991], [10861, 10991, 10990], [10862, 10863, 10991], [10863, 10992, 10991], [10863, 10864, 10993], [10863, 10993, 10992], [10864, 10865, 10993], [10865, 10994, 10993], [10865, 10866, 10995], [10865, 10995, 10994], [10866, 10867, 10995], [10867, 10996, 10995], [10867, 10868, 10997], [10867, 10997, 10996], [10868, 10869, 10997], [10869, 10998, 10997], [10869, 10870, 10999], [10869, 10999, 10998], [10870, 10871, 10999], [10871, 11000, 10999], [10871, 10872, 11001], [10871, 11001, 11000], [10872, 10873, 11001], [10873, 11002, 11001], [10873, 10874, 11003], [10873, 11003, 11002], [10874, 10875, 11003], [10875, 11004, 11003], [10875, 10876, 11005], [10875, 11005, 11004], [10876, 10877, 11005], [10877, 11006, 11005], [10877, 10878, 11007], [10877, 11007, 11006], [10878, 10879, 11007], [10879, 11008, 11007], [10879, 10880, 11009], [10879, 11009, 11008], [10880, 10881, 11009], [10881, 11010, 11009], [10882, 10883, 11011], [10883, 11012, 11011], [10883, 10884, 11013], [10883, 11013, 11012], [10884, 10885, 11013], [10885, 11014, 11013], [10885, 10886, 11015], [10885, 11015, 11014], [10886, 10887, 11015], [10887, 11016, 11015], [10887, 10888, 11017], [10887, 11017, 11016], [10888, 10889, 11017], [10889, 11018, 11017], [10889, 10890, 11019], [10889, 11019, 11018], [10890, 10891, 11019], [10891, 11020, 11019], [10891, 10892, 11021], [10891, 11021, 11020], [10892, 10893, 11021], [10893, 11022, 11021], [10893, 10894, 11023], [10893, 11023, 11022], [10894, 10895, 11023], [10895, 11024, 11023], [10895, 10896, 11025], [10895, 11025, 11024], [10896, 10897, 11025], [10897, 11026, 11025], [10897, 10898, 11027], [10897, 11027, 11026], [10898, 10899, 11027], [10899, 11028, 11027], [10899, 10900, 11029], [10899, 11029, 11028], [10900, 10901, 11029], [10901, 11030, 11029], [10901, 10902, 11031], [10901, 11031, 11030], [10902, 10903, 11031], [10903, 11032, 11031], [10903, 10904, 11033], [10903, 11033, 11032], [10904, 10905, 11033], [10905, 11034, 11033], [10905, 10906, 11035], [10905, 11035, 11034], [10906, 10907, 11035], [10907, 11036, 11035], [10907, 10908, 11037], [10907, 11037, 11036], [10908, 10909, 11037], [10909, 11038, 11037], [10909, 10910, 11039], [10909, 11039, 11038], [10910, 10911, 11039], [10911, 11040, 11039], [10911, 10912, 11041], [10911, 11041, 11040], [10912, 10913, 11041], [10913, 11042, 11041], [10913, 10914, 11043], [10913, 11043, 11042], [10914, 10915, 11043], [10915, 11044, 11043], [10915, 10916, 11045], [10915, 11045, 11044], [10916, 10917, 11045], [10917, 11046, 11045], [10917, 10918, 11047], [10917, 11047, 11046], [10918, 10919, 11047], [10919, 11048, 11047], [10919, 10920, 11049], [10919, 11049, 11048], [10920, 10921, 11049], [10921, 11050, 11049], [10921, 10922, 11051], [10921, 11051, 11050], [10922, 10923, 11051], [10923, 11052, 11051], [10923, 10924, 11053], [10923, 11053, 11052], [10924, 10925, 11053], [10925, 11054, 11053], [10925, 10926, 11055], [10925, 11055, 11054], [10926, 10927, 11055], [10927, 11056, 11055], [10927, 10928, 11057], [10927, 11057, 11056], [10928, 10929, 11057], [10929, 11058, 11057], [10929, 10930, 11059], [10929, 11059, 11058], [10930, 10931, 11059], [10931, 11060, 11059], [10931, 10932, 11061], [10931, 11061, 11060], [10932, 10933, 11061], [10933, 11062, 11061], [10933, 10934, 11063], [10933, 11063, 11062], [10934, 10935, 11063], [10935, 11064, 11063], [10935, 10936, 11065], [10935, 11065, 11064], [10936, 10937, 11065], [10937, 11066, 11065], [10937, 10938, 11067], [10937, 11067, 11066], [10938, 10939, 11067], [10939, 11068, 11067], [10939, 10940, 11069], [10939, 11069, 11068], [10940, 10941, 11069], [10941, 11070, 11069], [10941, 10942, 11071], [10941, 11071, 11070], [10942, 10943, 11071], [10943, 11072, 11071], [10943, 10944, 11073], [10943, 11073, 11072], [10944, 10945, 11073], [10945, 11074, 11073], [10945, 10946, 11075], [10945, 11075, 11074], [10946, 10947, 11075], [10947, 11076, 11075], [10947, 10948, 11077], [10947, 11077, 11076], [10948, 10949, 11077], [10949, 11078, 11077], [10949, 10950, 11079], [10949, 11079, 11078], [10950, 10951, 11079], [10951, 11080, 11079], [10951, 10952, 11081], [10951, 11081, 11080], [10952, 10953, 11081], [10953, 11082, 11081], [10953, 10954, 11083], [10953, 11083, 11082], [10954, 10955, 11083], [10955, 11084, 11083], [10955, 10956, 11085], [10955, 11085, 11084], [10956, 10957, 11085], [10957, 11086, 11085], [10957, 10958, 11087], [10957, 11087, 11086], [10958, 10959, 11087], [10959, 11088, 11087], [10959, 10960, 11089], [10959, 11089, 11088], [10960, 10961, 11089], [10961, 11090, 11089], [10961, 10962, 11091], [10961, 11091, 11090], [10962, 10963, 11091], [10963, 11092, 11091], [10963, 10964, 11093], [10963, 11093, 11092], [10964, 10965, 11093], [10965, 11094, 11093], [10965, 10966, 11095], [10965, 11095, 11094], [10966, 10967, 11095], [10967, 11096, 11095], [10967, 10968, 11097], [10967, 11097, 11096], [10968, 10969, 11097], [10969, 11098, 11097], [10969, 10970, 11099], [10969, 11099, 11098], [10970, 10971, 11099], [10971, 11100, 11099], [10971, 10972, 11101], [10971, 11101, 11100], [10972, 10973, 11101], [10973, 11102, 11101], [10973, 10974, 11103], [10973, 11103, 11102], [10974, 10975, 11103], [10975, 11104, 11103], [10975, 10976, 11105], [10975, 11105, 11104], [10976, 10977, 11105], [10977, 11106, 11105], [10977, 10978, 11107], [10977, 11107, 11106], [10978, 10979, 11107], [10979, 11108, 11107], [10979, 10980, 11109], [10979, 11109, 11108], [10980, 10981, 11109], [10981, 11110, 11109], [10981, 10982, 11111], [10981, 11111, 11110], [10982, 10983, 11111], [10983, 11112, 11111], [10983, 10984, 11113], [10983, 11113, 11112], [10984, 10985, 11113], [10985, 11114, 11113], [10985, 10986, 11115], [10985, 11115, 11114], [10986, 10987, 11115], [10987, 11116, 11115], [10987, 10988, 11117], [10987, 11117, 11116], [10988, 10989, 11117], [10989, 11118, 11117], [10989, 10990, 11119], [10989, 11119, 11118], [10990, 10991, 11119], [10991, 11120, 11119], [10991, 10992, 11121], [10991, 11121, 11120], [10992, 10993, 11121], [10993, 11122, 11121], [10993, 10994, 11123], [10993, 11123, 11122], [10994, 10995, 11123], [10995, 11124, 11123], [10995, 10996, 11125], [10995, 11125, 11124], [10996, 10997, 11125], [10997, 11126, 11125], [10997, 10998, 11127], [10997, 11127, 11126], [10998, 10999, 11127], [10999, 11128, 11127], [10999, 11000, 11129], [10999, 11129, 11128], [11000, 11001, 11129], [11001, 11130, 11129], [11001, 11002, 11131], [11001, 11131, 11130], [11002, 11003, 11131], [11003, 11132, 11131], [11003, 11004, 11133], [11003, 11133, 11132], [11004, 11005, 11133], [11005, 11134, 11133], [11005, 11006, 11135], [11005, 11135, 11134], [11006, 11007, 11135], [11007, 11136, 11135], [11007, 11008, 11137], [11007, 11137, 11136], [11008, 11009, 11137], [11009, 11138, 11137], [11009, 11010, 11139], [11009, 11139, 11138], [11011, 11012, 11141], [11011, 11141, 11140], [11012, 11013, 11141], [11013, 11142, 11141], [11013, 11014, 11143], [11013, 11143, 11142], [11014, 11015, 11143], [11015, 11144, 11143], [11015, 11016, 11145], [11015, 11145, 11144], [11016, 11017, 11145], [11017, 11146, 11145], [11017, 11018, 11147], [11017, 11147, 11146], [11018, 11019, 11147], [11019, 11148, 11147], [11019, 11020, 11149], [11019, 11149, 11148], [11020, 11021, 11149], [11021, 11150, 11149], [11021, 11022, 11151], [11021, 11151, 11150], [11022, 11023, 11151], [11023, 11152, 11151], [11023, 11024, 11153], [11023, 11153, 11152], [11024, 11025, 11153], [11025, 11154, 11153], [11025, 11026, 11155], [11025, 11155, 11154], [11026, 11027, 11155], [11027, 11156, 11155], [11027, 11028, 11157], [11027, 11157, 11156], [11028, 11029, 11157], [11029, 11158, 11157], [11029, 11030, 11159], [11029, 11159, 11158], [11030, 11031, 11159], [11031, 11160, 11159], [11031, 11032, 11161], [11031, 11161, 11160], [11032, 11033, 11161], [11033, 11162, 11161], [11033, 11034, 11163], [11033, 11163, 11162], [11034, 11035, 11163], [11035, 11164, 11163], [11035, 11036, 11165], [11035, 11165, 11164], [11036, 11037, 11165], [11037, 11166, 11165], [11037, 11038, 11167], [11037, 11167, 11166], [11038, 11039, 11167], [11039, 11168, 11167], [11039, 11040, 11169], [11039, 11169, 11168], [11040, 11041, 11169], [11041, 11170, 11169], [11041, 11042, 11171], [11041, 11171, 11170], [11042, 11043, 11171], [11043, 11172, 11171], [11043, 11044, 11173], [11043, 11173, 11172], [11044, 11045, 11173], [11045, 11174, 11173], [11045, 11046, 11175], [11045, 11175, 11174], [11046, 11047, 11175], [11047, 11176, 11175], [11047, 11048, 11177], [11047, 11177, 11176], [11048, 11049, 11177], [11049, 11178, 11177], [11049, 11050, 11179], [11049, 11179, 11178], [11050, 11051, 11179], [11051, 11180, 11179], [11051, 11052, 11181], [11051, 11181, 11180], [11052, 11053, 11181], [11053, 11182, 11181], [11053, 11054, 11183], [11053, 11183, 11182], [11054, 11055, 11183], [11055, 11184, 11183], [11055, 11056, 11185], [11055, 11185, 11184], [11056, 11057, 11185], [11057, 11186, 11185], [11057, 11058, 11187], [11057, 11187, 11186], [11058, 11059, 11187], [11059, 11188, 11187], [11059, 11060, 11189], [11059, 11189, 11188], [11060, 11061, 11189], [11061, 11190, 11189], [11061, 11062, 11191], [11061, 11191, 11190], [11062, 11063, 11191], [11063, 11192, 11191], [11063, 11064, 11193], [11063, 11193, 11192], [11064, 11065, 11193], [11065, 11194, 11193], [11065, 11066, 11195], [11065, 11195, 11194], [11066, 11067, 11195], [11067, 11196, 11195], [11067, 11068, 11197], [11067, 11197, 11196], [11068, 11069, 11197], [11069, 11198, 11197], [11069, 11070, 11199], [11069, 11199, 11198], [11070, 11071, 11199], [11071, 11200, 11199], [11071, 11072, 11201], [11071, 11201, 11200], [11072, 11073, 11201], [11073, 11202, 11201], [11073, 11074, 11203], [11073, 11203, 11202], [11074, 11075, 11203], [11075, 11204, 11203], [11075, 11076, 11205], [11075, 11205, 11204], [11076, 11077, 11205], [11077, 11206, 11205], [11077, 11078, 11207], [11077, 11207, 11206], [11078, 11079, 11207], [11079, 11208, 11207], [11079, 11080, 11209], [11079, 11209, 11208], [11080, 11081, 11209], [11081, 11210, 11209], [11081, 11082, 11211], [11081, 11211, 11210], [11082, 11083, 11211], [11083, 11212, 11211], [11083, 11084, 11213], [11083, 11213, 11212], [11084, 11085, 11213], [11085, 11214, 11213], [11085, 11086, 11215], [11085, 11215, 11214], [11086, 11087, 11215], [11087, 11216, 11215], [11087, 11088, 11217], [11087, 11217, 11216], [11088, 11089, 11217], [11089, 11218, 11217], [11089, 11090, 11219], [11089, 11219, 11218], [11090, 11091, 11219], [11091, 11220, 11219], [11091, 11092, 11221], [11091, 11221, 11220], [11092, 11093, 11221], [11093, 11222, 11221], [11093, 11094, 11223], [11093, 11223, 11222], [11094, 11095, 11223], [11095, 11224, 11223], [11095, 11096, 11225], [11095, 11225, 11224], [11096, 11097, 11225], [11097, 11226, 11225], [11097, 11098, 11227], [11097, 11227, 11226], [11098, 11099, 11227], [11099, 11228, 11227], [11099, 11100, 11229], [11099, 11229, 11228], [11100, 11101, 11229], [11101, 11230, 11229], [11101, 11102, 11231], [11101, 11231, 11230], [11102, 11103, 11231], [11103, 11232, 11231], [11103, 11104, 11233], [11103, 11233, 11232], [11104, 11105, 11233], [11105, 11234, 11233], [11105, 11106, 11235], [11105, 11235, 11234], [11106, 11107, 11235], [11107, 11236, 11235], [11107, 11108, 11237], [11107, 11237, 11236], [11108, 11109, 11237], [11109, 11238, 11237], [11109, 11110, 11239], [11109, 11239, 11238], [11110, 11111, 11239], [11111, 11240, 11239], [11111, 11112, 11241], [11111, 11241, 11240], [11112, 11113, 11241], [11113, 11242, 11241], [11113, 11114, 11243], [11113, 11243, 11242], [11114, 11115, 11243], [11115, 11244, 11243], [11115, 11116, 11245], [11115, 11245, 11244], [11116, 11117, 11245], [11117, 11246, 11245], [11117, 11118, 11247], [11117, 11247, 11246], [11118, 11119, 11247], [11119, 11248, 11247], [11119, 11120, 11249], [11119, 11249, 11248], [11120, 11121, 11249], [11121, 11250, 11249], [11121, 11122, 11251], [11121, 11251, 11250], [11122, 11123, 11251], [11123, 11252, 11251], [11123, 11124, 11253], [11123, 11253, 11252], [11124, 11125, 11253], [11125, 11254, 11253], [11125, 11126, 11255], [11125, 11255, 11254], [11126, 11127, 11255], [11127, 11256, 11255], [11127, 11128, 11257], [11127, 11257, 11256], [11128, 11129, 11257], [11129, 11258, 11257], [11129, 11130, 11259], [11129, 11259, 11258], [11130, 11131, 11259], [11131, 11260, 11259], [11131, 11132, 11261], [11131, 11261, 11260], [11132, 11133, 11261], [11133, 11262, 11261], [11133, 11134, 11263], [11133, 11263, 11262], [11134, 11135, 11263], [11135, 11264, 11263], [11135, 11136, 11265], [11135, 11265, 11264], [11136, 11137, 11265], [11137, 11266, 11265], [11137, 11138, 11267], [11137, 11267, 11266], [11138, 11139, 11267], [11139, 11268, 11267], [11140, 11141, 11269], [11141, 11270, 11269], [11141, 11142, 11271], [11141, 11271, 11270], [11142, 11143, 11271], [11143, 11272, 11271], [11143, 11144, 11273], [11143, 11273, 11272], [11144, 11145, 11273], [11145, 11274, 11273], [11145, 11146, 11275], [11145, 11275, 11274], [11146, 11147, 11275], [11147, 11276, 11275], [11147, 11148, 11277], [11147, 11277, 11276], [11148, 11149, 11277], [11149, 11278, 11277], [11149, 11150, 11279], [11149, 11279, 11278], [11150, 11151, 11279], [11151, 11280, 11279], [11151, 11152, 11281], [11151, 11281, 11280], [11152, 11153, 11281], [11153, 11282, 11281], [11153, 11154, 11283], [11153, 11283, 11282], [11154, 11155, 11283], [11155, 11284, 11283], [11155, 11156, 11285], [11155, 11285, 11284], [11156, 11157, 11285], [11157, 11286, 11285], [11157, 11158, 11287], [11157, 11287, 11286], [11158, 11159, 11287], [11159, 11288, 11287], [11159, 11160, 11289], [11159, 11289, 11288], [11160, 11161, 11289], [11161, 11290, 11289], [11161, 11162, 11291], [11161, 11291, 11290], [11162, 11163, 11291], [11163, 11292, 11291], [11163, 11164, 11293], [11163, 11293, 11292], [11164, 11165, 11293], [11165, 11294, 11293], [11165, 11166, 11295], [11165, 11295, 11294], [11166, 11167, 11295], [11167, 11296, 11295], [11167, 11168, 11297], [11167, 11297, 11296], [11168, 11169, 11297], [11169, 11298, 11297], [11169, 11170, 11299], [11169, 11299, 11298], [11170, 11171, 11299], [11171, 11300, 11299], [11171, 11172, 11301], [11171, 11301, 11300], [11172, 11173, 11301], [11173, 11302, 11301], [11173, 11174, 11303], [11173, 11303, 11302], [11174, 11175, 11303], [11175, 11304, 11303], [11175, 11176, 11305], [11175, 11305, 11304], [11176, 11177, 11305], [11177, 11306, 11305], [11177, 11178, 11307], [11177, 11307, 11306], [11178, 11179, 11307], [11179, 11308, 11307], [11179, 11180, 11309], [11179, 11309, 11308], [11180, 11181, 11309], [11181, 11310, 11309], [11181, 11182, 11311], [11181, 11311, 11310], [11182, 11183, 11311], [11183, 11312, 11311], [11183, 11184, 11313], [11183, 11313, 11312], [11184, 11185, 11313], [11185, 11314, 11313], [11185, 11186, 11315], [11185, 11315, 11314], [11186, 11187, 11315], [11187, 11316, 11315], [11187, 11188, 11317], [11187, 11317, 11316], [11188, 11189, 11317], [11189, 11318, 11317], [11189, 11190, 11319], [11189, 11319, 11318], [11190, 11191, 11319], [11191, 11320, 11319], [11191, 11192, 11321], [11191, 11321, 11320], [11192, 11193, 11321], [11193, 11322, 11321], [11193, 11194, 11323], [11193, 11323, 11322], [11194, 11195, 11323], [11195, 11324, 11323], [11195, 11196, 11325], [11195, 11325, 11324], [11196, 11197, 11325], [11197, 11326, 11325], [11197, 11198, 11327], [11197, 11327, 11326], [11198, 11199, 11327], [11199, 11328, 11327], [11199, 11200, 11329], [11199, 11329, 11328], [11200, 11201, 11329], [11201, 11330, 11329], [11201, 11202, 11331], [11201, 11331, 11330], [11202, 11203, 11331], [11203, 11332, 11331], [11203, 11204, 11333], [11203, 11333, 11332], [11204, 11205, 11333], [11205, 11334, 11333], [11205, 11206, 11335], [11205, 11335, 11334], [11206, 11207, 11335], [11207, 11336, 11335], [11207, 11208, 11337], [11207, 11337, 11336], [11208, 11209, 11337], [11209, 11338, 11337], [11209, 11210, 11339], [11209, 11339, 11338], [11210, 11211, 11339], [11211, 11340, 11339], [11211, 11212, 11341], [11211, 11341, 11340], [11212, 11213, 11341], [11213, 11342, 11341], [11213, 11214, 11343], [11213, 11343, 11342], [11214, 11215, 11343], [11215, 11344, 11343], [11215, 11216, 11345], [11215, 11345, 11344], [11216, 11217, 11345], [11217, 11346, 11345], [11217, 11218, 11347], [11217, 11347, 11346], [11218, 11219, 11347], [11219, 11348, 11347], [11219, 11220, 11349], [11219, 11349, 11348], [11220, 11221, 11349], [11221, 11350, 11349], [11221, 11222, 11351], [11221, 11351, 11350], [11222, 11223, 11351], [11223, 11352, 11351], [11223, 11224, 11353], [11223, 11353, 11352], [11224, 11225, 11353], [11225, 11354, 11353], [11225, 11226, 11355], [11225, 11355, 11354], [11226, 11227, 11355], [11227, 11356, 11355], [11227, 11228, 11357], [11227, 11357, 11356], [11228, 11229, 11357], [11229, 11358, 11357], [11229, 11230, 11359], [11229, 11359, 11358], [11230, 11231, 11359], [11231, 11360, 11359], [11231, 11232, 11361], [11231, 11361, 11360], [11232, 11233, 11361], [11233, 11362, 11361], [11233, 11234, 11363], [11233, 11363, 11362], [11234, 11235, 11363], [11235, 11364, 11363], [11235, 11236, 11365], [11235, 11365, 11364], [11236, 11237, 11365], [11237, 11366, 11365], [11237, 11238, 11367], [11237, 11367, 11366], [11238, 11239, 11367], [11239, 11368, 11367], [11239, 11240, 11369], [11239, 11369, 11368], [11240, 11241, 11369], [11241, 11370, 11369], [11241, 11242, 11371], [11241, 11371, 11370], [11242, 11243, 11371], [11243, 11372, 11371], [11243, 11244, 11373], [11243, 11373, 11372], [11244, 11245, 11373], [11245, 11374, 11373], [11245, 11246, 11375], [11245, 11375, 11374], [11246, 11247, 11375], [11247, 11376, 11375], [11247, 11248, 11377], [11247, 11377, 11376], [11248, 11249, 11377], [11249, 11378, 11377], [11249, 11250, 11379], [11249, 11379, 11378], [11250, 11251, 11379], [11251, 11380, 11379], [11251, 11252, 11381], [11251, 11381, 11380], [11252, 11253, 11381], [11253, 11382, 11381], [11253, 11254, 11383], [11253, 11383, 11382], [11254, 11255, 11383], [11255, 11384, 11383], [11255, 11256, 11385], [11255, 11385, 11384], [11256, 11257, 11385], [11257, 11386, 11385], [11257, 11258, 11387], [11257, 11387, 11386], [11258, 11259, 11387], [11259, 11388, 11387], [11259, 11260, 11389], [11259, 11389, 11388], [11260, 11261, 11389], [11261, 11390, 11389], [11261, 11262, 11391], [11261, 11391, 11390], [11262, 11263, 11391], [11263, 11392, 11391], [11263, 11264, 11393], [11263, 11393, 11392], [11264, 11265, 11393], [11265, 11394, 11393], [11265, 11266, 11395], [11265, 11395, 11394], [11266, 11267, 11395], [11267, 11396, 11395], [11267, 11268, 11397], [11267, 11397, 11396], [11269, 11270, 11399], [11269, 11399, 11398], [11270, 11271, 11399], [11271, 11400, 11399], [11271, 11272, 11401], [11271, 11401, 11400], [11272, 11273, 11401], [11273, 11402, 11401], [11273, 11274, 11403], [11273, 11403, 11402], [11274, 11275, 11403], [11275, 11404, 11403], [11275, 11276, 11405], [11275, 11405, 11404], [11276, 11277, 11405], [11277, 11406, 11405], [11277, 11278, 11407], [11277, 11407, 11406], [11278, 11279, 11407], [11279, 11408, 11407], [11279, 11280, 11409], [11279, 11409, 11408], [11280, 11281, 11409], [11281, 11410, 11409], [11281, 11282, 11411], [11281, 11411, 11410], [11282, 11283, 11411], [11283, 11412, 11411], [11283, 11284, 11413], [11283, 11413, 11412], [11284, 11285, 11413], [11285, 11414, 11413], [11285, 11286, 11415], [11285, 11415, 11414], [11286, 11287, 11415], [11287, 11416, 11415], [11287, 11288, 11417], [11287, 11417, 11416], [11288, 11289, 11417], [11289, 11418, 11417], [11289, 11290, 11419], [11289, 11419, 11418], [11290, 11291, 11419], [11291, 11420, 11419], [11291, 11292, 11421], [11291, 11421, 11420], [11292, 11293, 11421], [11293, 11422, 11421], [11293, 11294, 11423], [11293, 11423, 11422], [11294, 11295, 11423], [11295, 11424, 11423], [11295, 11296, 11425], [11295, 11425, 11424], [11296, 11297, 11425], [11297, 11426, 11425], [11297, 11298, 11427], [11297, 11427, 11426], [11298, 11299, 11427], [11299, 11428, 11427], [11299, 11300, 11429], [11299, 11429, 11428], [11300, 11301, 11429], [11301, 11430, 11429], [11301, 11302, 11431], [11301, 11431, 11430], [11302, 11303, 11431], [11303, 11432, 11431], [11303, 11304, 11433], [11303, 11433, 11432], [11304, 11305, 11433], [11305, 11434, 11433], [11305, 11306, 11435], [11305, 11435, 11434], [11306, 11307, 11435], [11307, 11436, 11435], [11307, 11308, 11437], [11307, 11437, 11436], [11308, 11309, 11437], [11309, 11438, 11437], [11309, 11310, 11439], [11309, 11439, 11438], [11310, 11311, 11439], [11311, 11440, 11439], [11311, 11312, 11441], [11311, 11441, 11440], [11312, 11313, 11441], [11313, 11442, 11441], [11313, 11314, 11443], [11313, 11443, 11442], [11314, 11315, 11443], [11315, 11444, 11443], [11315, 11316, 11445], [11315, 11445, 11444], [11316, 11317, 11445], [11317, 11446, 11445], [11317, 11318, 11447], [11317, 11447, 11446], [11318, 11319, 11447], [11319, 11448, 11447], [11319, 11320, 11449], [11319, 11449, 11448], [11320, 11321, 11449], [11321, 11450, 11449], [11321, 11322, 11451], [11321, 11451, 11450], [11322, 11323, 11451], [11323, 11452, 11451], [11323, 11324, 11453], [11323, 11453, 11452], [11324, 11325, 11453], [11325, 11454, 11453], [11325, 11326, 11455], [11325, 11455, 11454], [11326, 11327, 11455], [11327, 11456, 11455], [11327, 11328, 11457], [11327, 11457, 11456], [11328, 11329, 11457], [11329, 11458, 11457], [11329, 11330, 11459], [11329, 11459, 11458], [11330, 11331, 11459], [11331, 11460, 11459], [11331, 11332, 11461], [11331, 11461, 11460], [11332, 11333, 11461], [11333, 11462, 11461], [11333, 11334, 11463], [11333, 11463, 11462], [11334, 11335, 11463], [11335, 11464, 11463], [11335, 11336, 11465], [11335, 11465, 11464], [11336, 11337, 11465], [11337, 11466, 11465], [11337, 11338, 11467], [11337, 11467, 11466], [11338, 11339, 11467], [11339, 11468, 11467], [11339, 11340, 11469], [11339, 11469, 11468], [11340, 11341, 11469], [11341, 11470, 11469], [11341, 11342, 11471], [11341, 11471, 11470], [11342, 11343, 11471], [11343, 11472, 11471], [11343, 11344, 11473], [11343, 11473, 11472], [11344, 11345, 11473], [11345, 11474, 11473], [11345, 11346, 11475], [11345, 11475, 11474], [11346, 11347, 11475], [11347, 11476, 11475], [11347, 11348, 11477], [11347, 11477, 11476], [11348, 11349, 11477], [11349, 11478, 11477], [11349, 11350, 11479], [11349, 11479, 11478], [11350, 11351, 11479], [11351, 11480, 11479], [11351, 11352, 11481], [11351, 11481, 11480], [11352, 11353, 11481], [11353, 11482, 11481], [11353, 11354, 11483], [11353, 11483, 11482], [11354, 11355, 11483], [11355, 11484, 11483], [11355, 11356, 11485], [11355, 11485, 11484], [11356, 11357, 11485], [11357, 11486, 11485], [11357, 11358, 11487], [11357, 11487, 11486], [11358, 11359, 11487], [11359, 11488, 11487], [11359, 11360, 11489], [11359, 11489, 11488], [11360, 11361, 11489], [11361, 11490, 11489], [11361, 11362, 11491], [11361, 11491, 11490], [11362, 11363, 11491], [11363, 11492, 11491], [11363, 11364, 11493], [11363, 11493, 11492], [11364, 11365, 11493], [11365, 11494, 11493], [11365, 11366, 11495], [11365, 11495, 11494], [11366, 11367, 11495], [11367, 11496, 11495], [11367, 11368, 11497], [11367, 11497, 11496], [11368, 11369, 11497], [11369, 11498, 11497], [11369, 11370, 11499], [11369, 11499, 11498], [11370, 11371, 11499], [11371, 11500, 11499], [11371, 11372, 11501], [11371, 11501, 11500], [11372, 11373, 11501], [11373, 11502, 11501], [11373, 11374, 11503], [11373, 11503, 11502], [11374, 11375, 11503], [11375, 11504, 11503], [11375, 11376, 11505], [11375, 11505, 11504], [11376, 11377, 11505], [11377, 11506, 11505], [11377, 11378, 11507], [11377, 11507, 11506], [11378, 11379, 11507], [11379, 11508, 11507], [11379, 11380, 11509], [11379, 11509, 11508], [11380, 11381, 11509], [11381, 11510, 11509], [11381, 11382, 11511], [11381, 11511, 11510], [11382, 11383, 11511], [11383, 11512, 11511], [11383, 11384, 11513], [11383, 11513, 11512], [11384, 11385, 11513], [11385, 11514, 11513], [11385, 11386, 11515], [11385, 11515, 11514], [11386, 11387, 11515], [11387, 11516, 11515], [11387, 11388, 11517], [11387, 11517, 11516], [11388, 11389, 11517], [11389, 11518, 11517], [11389, 11390, 11519], [11389, 11519, 11518], [11390, 11391, 11519], [11391, 11520, 11519], [11391, 11392, 11521], [11391, 11521, 11520], [11392, 11393, 11521], [11393, 11522, 11521], [11393, 11394, 11523], [11393, 11523, 11522], [11394, 11395, 11523], [11395, 11524, 11523], [11395, 11396, 11525], [11395, 11525, 11524], [11396, 11397, 11525], [11397, 11526, 11525], [11398, 11399, 11527], [11399, 11528, 11527], [11399, 11400, 11529], [11399, 11529, 11528], [11400, 11401, 11529], [11401, 11530, 11529], [11401, 11402, 11531], [11401, 11531, 11530], [11402, 11403, 11531], [11403, 11532, 11531], [11403, 11404, 11533], [11403, 11533, 11532], [11404, 11405, 11533], [11405, 11534, 11533], [11405, 11406, 11535], [11405, 11535, 11534], [11406, 11407, 11535], [11407, 11536, 11535], [11407, 11408, 11537], [11407, 11537, 11536], [11408, 11409, 11537], [11409, 11538, 11537], [11409, 11410, 11539], [11409, 11539, 11538], [11410, 11411, 11539], [11411, 11540, 11539], [11411, 11412, 11541], [11411, 11541, 11540], [11412, 11413, 11541], [11413, 11542, 11541], [11413, 11414, 11543], [11413, 11543, 11542], [11414, 11415, 11543], [11415, 11544, 11543], [11415, 11416, 11545], [11415, 11545, 11544], [11416, 11417, 11545], [11417, 11546, 11545], [11417, 11418, 11547], [11417, 11547, 11546], [11418, 11419, 11547], [11419, 11548, 11547], [11419, 11420, 11549], [11419, 11549, 11548], [11420, 11421, 11549], [11421, 11550, 11549], [11421, 11422, 11551], [11421, 11551, 11550], [11422, 11423, 11551], [11423, 11552, 11551], [11423, 11424, 11553], [11423, 11553, 11552], [11424, 11425, 11553], [11425, 11554, 11553], [11425, 11426, 11555], [11425, 11555, 11554], [11426, 11427, 11555], [11427, 11556, 11555], [11427, 11428, 11557], [11427, 11557, 11556], [11428, 11429, 11557], [11429, 11558, 11557], [11429, 11430, 11559], [11429, 11559, 11558], [11430, 11431, 11559], [11431, 11560, 11559], [11431, 11432, 11561], [11431, 11561, 11560], [11432, 11433, 11561], [11433, 11562, 11561], [11433, 11434, 11563], [11433, 11563, 11562], [11434, 11435, 11563], [11435, 11564, 11563], [11435, 11436, 11565], [11435, 11565, 11564], [11436, 11437, 11565], [11437, 11566, 11565], [11437, 11438, 11567], [11437, 11567, 11566], [11438, 11439, 11567], [11439, 11568, 11567], [11439, 11440, 11569], [11439, 11569, 11568], [11440, 11441, 11569], [11441, 11570, 11569], [11441, 11442, 11571], [11441, 11571, 11570], [11442, 11443, 11571], [11443, 11572, 11571], [11443, 11444, 11573], [11443, 11573, 11572], [11444, 11445, 11573], [11445, 11574, 11573], [11445, 11446, 11575], [11445, 11575, 11574], [11446, 11447, 11575], [11447, 11576, 11575], [11447, 11448, 11577], [11447, 11577, 11576], [11448, 11449, 11577], [11449, 11578, 11577], [11449, 11450, 11579], [11449, 11579, 11578], [11450, 11451, 11579], [11451, 11580, 11579], [11451, 11452, 11581], [11451, 11581, 11580], [11452, 11453, 11581], [11453, 11582, 11581], [11453, 11454, 11583], [11453, 11583, 11582], [11454, 11455, 11583], [11455, 11584, 11583], [11455, 11456, 11585], [11455, 11585, 11584], [11456, 11457, 11585], [11457, 11586, 11585], [11457, 11458, 11587], [11457, 11587, 11586], [11458, 11459, 11587], [11459, 11588, 11587], [11459, 11460, 11589], [11459, 11589, 11588], [11460, 11461, 11589], [11461, 11590, 11589], [11461, 11462, 11591], [11461, 11591, 11590], [11462, 11463, 11591], [11463, 11592, 11591], [11463, 11464, 11593], [11463, 11593, 11592], [11464, 11465, 11593], [11465, 11594, 11593], [11465, 11466, 11595], [11465, 11595, 11594], [11466, 11467, 11595], [11467, 11596, 11595], [11467, 11468, 11597], [11467, 11597, 11596], [11468, 11469, 11597], [11469, 11598, 11597], [11469, 11470, 11599], [11469, 11599, 11598], [11470, 11471, 11599], [11471, 11600, 11599], [11471, 11472, 11601], [11471, 11601, 11600], [11472, 11473, 11601], [11473, 11602, 11601], [11473, 11474, 11603], [11473, 11603, 11602], [11474, 11475, 11603], [11475, 11604, 11603], [11475, 11476, 11605], [11475, 11605, 11604], [11476, 11477, 11605], [11477, 11606, 11605], [11477, 11478, 11607], [11477, 11607, 11606], [11478, 11479, 11607], [11479, 11608, 11607], [11479, 11480, 11609], [11479, 11609, 11608], [11480, 11481, 11609], [11481, 11610, 11609], [11481, 11482, 11611], [11481, 11611, 11610], [11482, 11483, 11611], [11483, 11612, 11611], [11483, 11484, 11613], [11483, 11613, 11612], [11484, 11485, 11613], [11485, 11614, 11613], [11485, 11486, 11615], [11485, 11615, 11614], [11486, 11487, 11615], [11487, 11616, 11615], [11487, 11488, 11617], [11487, 11617, 11616], [11488, 11489, 11617], [11489, 11618, 11617], [11489, 11490, 11619], [11489, 11619, 11618], [11490, 11491, 11619], [11491, 11620, 11619], [11491, 11492, 11621], [11491, 11621, 11620], [11492, 11493, 11621], [11493, 11622, 11621], [11493, 11494, 11623], [11493, 11623, 11622], [11494, 11495, 11623], [11495, 11624, 11623], [11495, 11496, 11625], [11495, 11625, 11624], [11496, 11497, 11625], [11497, 11626, 11625], [11497, 11498, 11627], [11497, 11627, 11626], [11498, 11499, 11627], [11499, 11628, 11627], [11499, 11500, 11629], [11499, 11629, 11628], [11500, 11501, 11629], [11501, 11630, 11629], [11501, 11502, 11631], [11501, 11631, 11630], [11502, 11503, 11631], [11503, 11632, 11631], [11503, 11504, 11633], [11503, 11633, 11632], [11504, 11505, 11633], [11505, 11634, 11633], [11505, 11506, 11635], [11505, 11635, 11634], [11506, 11507, 11635], [11507, 11636, 11635], [11507, 11508, 11637], [11507, 11637, 11636], [11508, 11509, 11637], [11509, 11638, 11637], [11509, 11510, 11639], [11509, 11639, 11638], [11510, 11511, 11639], [11511, 11640, 11639], [11511, 11512, 11641], [11511, 11641, 11640], [11512, 11513, 11641], [11513, 11642, 11641], [11513, 11514, 11643], [11513, 11643, 11642], [11514, 11515, 11643], [11515, 11644, 11643], [11515, 11516, 11645], [11515, 11645, 11644], [11516, 11517, 11645], [11517, 11646, 11645], [11517, 11518, 11647], [11517, 11647, 11646], [11518, 11519, 11647], [11519, 11648, 11647], [11519, 11520, 11649], [11519, 11649, 11648], [11520, 11521, 11649], [11521, 11650, 11649], [11521, 11522, 11651], [11521, 11651, 11650], [11522, 11523, 11651], [11523, 11652, 11651], [11523, 11524, 11653], [11523, 11653, 11652], [11524, 11525, 11653], [11525, 11654, 11653], [11525, 11526, 11655], [11525, 11655, 11654], [11527, 11528, 11657], [11527, 11657, 11656], [11528, 11529, 11657], [11529, 11658, 11657], [11529, 11530, 11659], [11529, 11659, 11658], [11530, 11531, 11659], [11531, 11660, 11659], [11531, 11532, 11661], [11531, 11661, 11660], [11532, 11533, 11661], [11533, 11662, 11661], [11533, 11534, 11663], [11533, 11663, 11662], [11534, 11535, 11663], [11535, 11664, 11663], [11535, 11536, 11665], [11535, 11665, 11664], [11536, 11537, 11665], [11537, 11666, 11665], [11537, 11538, 11667], [11537, 11667, 11666], [11538, 11539, 11667], [11539, 11668, 11667], [11539, 11540, 11669], [11539, 11669, 11668], [11540, 11541, 11669], [11541, 11670, 11669], [11541, 11542, 11671], [11541, 11671, 11670], [11542, 11543, 11671], [11543, 11672, 11671], [11543, 11544, 11673], [11543, 11673, 11672], [11544, 11545, 11673], [11545, 11674, 11673], [11545, 11546, 11675], [11545, 11675, 11674], [11546, 11547, 11675], [11547, 11676, 11675], [11547, 11548, 11677], [11547, 11677, 11676], [11548, 11549, 11677], [11549, 11678, 11677], [11549, 11550, 11679], [11549, 11679, 11678], [11550, 11551, 11679], [11551, 11680, 11679], [11551, 11552, 11681], [11551, 11681, 11680], [11552, 11553, 11681], [11553, 11682, 11681], [11553, 11554, 11683], [11553, 11683, 11682], [11554, 11555, 11683], [11555, 11684, 11683], [11555, 11556, 11685], [11555, 11685, 11684], [11556, 11557, 11685], [11557, 11686, 11685], [11557, 11558, 11687], [11557, 11687, 11686], [11558, 11559, 11687], [11559, 11688, 11687], [11559, 11560, 11689], [11559, 11689, 11688], [11560, 11561, 11689], [11561, 11690, 11689], [11561, 11562, 11691], [11561, 11691, 11690], [11562, 11563, 11691], [11563, 11692, 11691], [11563, 11564, 11693], [11563, 11693, 11692], [11564, 11565, 11693], [11565, 11694, 11693], [11565, 11566, 11695], [11565, 11695, 11694], [11566, 11567, 11695], [11567, 11696, 11695], [11567, 11568, 11697], [11567, 11697, 11696], [11568, 11569, 11697], [11569, 11698, 11697], [11569, 11570, 11699], [11569, 11699, 11698], [11570, 11571, 11699], [11571, 11700, 11699], [11571, 11572, 11701], [11571, 11701, 11700], [11572, 11573, 11701], [11573, 11702, 11701], [11573, 11574, 11703], [11573, 11703, 11702], [11574, 11575, 11703], [11575, 11704, 11703], [11575, 11576, 11705], [11575, 11705, 11704], [11576, 11577, 11705], [11577, 11706, 11705], [11577, 11578, 11707], [11577, 11707, 11706], [11578, 11579, 11707], [11579, 11708, 11707], [11579, 11580, 11709], [11579, 11709, 11708], [11580, 11581, 11709], [11581, 11710, 11709], [11581, 11582, 11711], [11581, 11711, 11710], [11582, 11583, 11711], [11583, 11712, 11711], [11583, 11584, 11713], [11583, 11713, 11712], [11584, 11585, 11713], [11585, 11714, 11713], [11585, 11586, 11715], [11585, 11715, 11714], [11586, 11587, 11715], [11587, 11716, 11715], [11587, 11588, 11717], [11587, 11717, 11716], [11588, 11589, 11717], [11589, 11718, 11717], [11589, 11590, 11719], [11589, 11719, 11718], [11590, 11591, 11719], [11591, 11720, 11719], [11591, 11592, 11721], [11591, 11721, 11720], [11592, 11593, 11721], [11593, 11722, 11721], [11593, 11594, 11723], [11593, 11723, 11722], [11594, 11595, 11723], [11595, 11724, 11723], [11595, 11596, 11725], [11595, 11725, 11724], [11596, 11597, 11725], [11597, 11726, 11725], [11597, 11598, 11727], [11597, 11727, 11726], [11598, 11599, 11727], [11599, 11728, 11727], [11599, 11600, 11729], [11599, 11729, 11728], [11600, 11601, 11729], [11601, 11730, 11729], [11601, 11602, 11731], [11601, 11731, 11730], [11602, 11603, 11731], [11603, 11732, 11731], [11603, 11604, 11733], [11603, 11733, 11732], [11604, 11605, 11733], [11605, 11734, 11733], [11605, 11606, 11735], [11605, 11735, 11734], [11606, 11607, 11735], [11607, 11736, 11735], [11607, 11608, 11737], [11607, 11737, 11736], [11608, 11609, 11737], [11609, 11738, 11737], [11609, 11610, 11739], [11609, 11739, 11738], [11610, 11611, 11739], [11611, 11740, 11739], [11611, 11612, 11741], [11611, 11741, 11740], [11612, 11613, 11741], [11613, 11742, 11741], [11613, 11614, 11743], [11613, 11743, 11742], [11614, 11615, 11743], [11615, 11744, 11743], [11615, 11616, 11745], [11615, 11745, 11744], [11616, 11617, 11745], [11617, 11746, 11745], [11617, 11618, 11747], [11617, 11747, 11746], [11618, 11619, 11747], [11619, 11748, 11747], [11619, 11620, 11749], [11619, 11749, 11748], [11620, 11621, 11749], [11621, 11750, 11749], [11621, 11622, 11751], [11621, 11751, 11750], [11622, 11623, 11751], [11623, 11752, 11751], [11623, 11624, 11753], [11623, 11753, 11752], [11624, 11625, 11753], [11625, 11754, 11753], [11625, 11626, 11755], [11625, 11755, 11754], [11626, 11627, 11755], [11627, 11756, 11755], [11627, 11628, 11757], [11627, 11757, 11756], [11628, 11629, 11757], [11629, 11758, 11757], [11629, 11630, 11759], [11629, 11759, 11758], [11630, 11631, 11759], [11631, 11760, 11759], [11631, 11632, 11761], [11631, 11761, 11760], [11632, 11633, 11761], [11633, 11762, 11761], [11633, 11634, 11763], [11633, 11763, 11762], [11634, 11635, 11763], [11635, 11764, 11763], [11635, 11636, 11765], [11635, 11765, 11764], [11636, 11637, 11765], [11637, 11766, 11765], [11637, 11638, 11767], [11637, 11767, 11766], [11638, 11639, 11767], [11639, 11768, 11767], [11639, 11640, 11769], [11639, 11769, 11768], [11640, 11641, 11769], [11641, 11770, 11769], [11641, 11642, 11771], [11641, 11771, 11770], [11642, 11643, 11771], [11643, 11772, 11771], [11643, 11644, 11773], [11643, 11773, 11772], [11644, 11645, 11773], [11645, 11774, 11773], [11645, 11646, 11775], [11645, 11775, 11774], [11646, 11647, 11775], [11647, 11776, 11775], [11647, 11648, 11777], [11647, 11777, 11776], [11648, 11649, 11777], [11649, 11778, 11777], [11649, 11650, 11779], [11649, 11779, 11778], [11650, 11651, 11779], [11651, 11780, 11779], [11651, 11652, 11781], [11651, 11781, 11780], [11652, 11653, 11781], [11653, 11782, 11781], [11653, 11654, 11783], [11653, 11783, 11782], [11654, 11655, 11783], [11655, 11784, 11783], [11656, 11657, 11785], [11657, 11786, 11785], [11657, 11658, 11787], [11657, 11787, 11786], [11658, 11659, 11787], [11659, 11788, 11787], [11659, 11660, 11789], [11659, 11789, 11788], [11660, 11661, 11789], [11661, 11790, 11789], [11661, 11662, 11791], [11661, 11791, 11790], [11662, 11663, 11791], [11663, 11792, 11791], [11663, 11664, 11793], [11663, 11793, 11792], [11664, 11665, 11793], [11665, 11794, 11793], [11665, 11666, 11795], [11665, 11795, 11794], [11666, 11667, 11795], [11667, 11796, 11795], [11667, 11668, 11797], [11667, 11797, 11796], [11668, 11669, 11797], [11669, 11798, 11797], [11669, 11670, 11799], [11669, 11799, 11798], [11670, 11671, 11799], [11671, 11800, 11799], [11671, 11672, 11801], [11671, 11801, 11800], [11672, 11673, 11801], [11673, 11802, 11801], [11673, 11674, 11803], [11673, 11803, 11802], [11674, 11675, 11803], [11675, 11804, 11803], [11675, 11676, 11805], [11675, 11805, 11804], [11676, 11677, 11805], [11677, 11806, 11805], [11677, 11678, 11807], [11677, 11807, 11806], [11678, 11679, 11807], [11679, 11808, 11807], [11679, 11680, 11809], [11679, 11809, 11808], [11680, 11681, 11809], [11681, 11810, 11809], [11681, 11682, 11811], [11681, 11811, 11810], [11682, 11683, 11811], [11683, 11812, 11811], [11683, 11684, 11813], [11683, 11813, 11812], [11684, 11685, 11813], [11685, 11814, 11813], [11685, 11686, 11815], [11685, 11815, 11814], [11686, 11687, 11815], [11687, 11816, 11815], [11687, 11688, 11817], [11687, 11817, 11816], [11688, 11689, 11817], [11689, 11818, 11817], [11689, 11690, 11819], [11689, 11819, 11818], [11690, 11691, 11819], [11691, 11820, 11819], [11691, 11692, 11821], [11691, 11821, 11820], [11692, 11693, 11821], [11693, 11822, 11821], [11693, 11694, 11823], [11693, 11823, 11822], [11694, 11695, 11823], [11695, 11824, 11823], [11695, 11696, 11825], [11695, 11825, 11824], [11696, 11697, 11825], [11697, 11826, 11825], [11697, 11698, 11827], [11697, 11827, 11826], [11698, 11699, 11827], [11699, 11828, 11827], [11699, 11700, 11829], [11699, 11829, 11828], [11700, 11701, 11829], [11701, 11830, 11829], [11701, 11702, 11831], [11701, 11831, 11830], [11702, 11703, 11831], [11703, 11832, 11831], [11703, 11704, 11833], [11703, 11833, 11832], [11704, 11705, 11833], [11705, 11834, 11833], [11705, 11706, 11835], [11705, 11835, 11834], [11706, 11707, 11835], [11707, 11836, 11835], [11707, 11708, 11837], [11707, 11837, 11836], [11708, 11709, 11837], [11709, 11838, 11837], [11709, 11710, 11839], [11709, 11839, 11838], [11710, 11711, 11839], [11711, 11840, 11839], [11711, 11712, 11841], [11711, 11841, 11840], [11712, 11713, 11841], [11713, 11842, 11841], [11713, 11714, 11843], [11713, 11843, 11842], [11714, 11715, 11843], [11715, 11844, 11843], [11715, 11716, 11845], [11715, 11845, 11844], [11716, 11717, 11845], [11717, 11846, 11845], [11717, 11718, 11847], [11717, 11847, 11846], [11718, 11719, 11847], [11719, 11848, 11847], [11719, 11720, 11849], [11719, 11849, 11848], [11720, 11721, 11849], [11721, 11850, 11849], [11721, 11722, 11851], [11721, 11851, 11850], [11722, 11723, 11851], [11723, 11852, 11851], [11723, 11724, 11853], [11723, 11853, 11852], [11724, 11725, 11853], [11725, 11854, 11853], [11725, 11726, 11855], [11725, 11855, 11854], [11726, 11727, 11855], [11727, 11856, 11855], [11727, 11728, 11857], [11727, 11857, 11856], [11728, 11729, 11857], [11729, 11858, 11857], [11729, 11730, 11859], [11729, 11859, 11858], [11730, 11731, 11859], [11731, 11860, 11859], [11731, 11732, 11861], [11731, 11861, 11860], [11732, 11733, 11861], [11733, 11862, 11861], [11733, 11734, 11863], [11733, 11863, 11862], [11734, 11735, 11863], [11735, 11864, 11863], [11735, 11736, 11865], [11735, 11865, 11864], [11736, 11737, 11865], [11737, 11866, 11865], [11737, 11738, 11867], [11737, 11867, 11866], [11738, 11739, 11867], [11739, 11868, 11867], [11739, 11740, 11869], [11739, 11869, 11868], [11740, 11741, 11869], [11741, 11870, 11869], [11741, 11742, 11871], [11741, 11871, 11870], [11742, 11743, 11871], [11743, 11872, 11871], [11743, 11744, 11873], [11743, 11873, 11872], [11744, 11745, 11873], [11745, 11874, 11873], [11745, 11746, 11875], [11745, 11875, 11874], [11746, 11747, 11875], [11747, 11876, 11875], [11747, 11748, 11877], [11747, 11877, 11876], [11748, 11749, 11877], [11749, 11878, 11877], [11749, 11750, 11879], [11749, 11879, 11878], [11750, 11751, 11879], [11751, 11880, 11879], [11751, 11752, 11881], [11751, 11881, 11880], [11752, 11753, 11881], [11753, 11882, 11881], [11753, 11754, 11883], [11753, 11883, 11882], [11754, 11755, 11883], [11755, 11884, 11883], [11755, 11756, 11885], [11755, 11885, 11884], [11756, 11757, 11885], [11757, 11886, 11885], [11757, 11758, 11887], [11757, 11887, 11886], [11758, 11759, 11887], [11759, 11888, 11887], [11759, 11760, 11889], [11759, 11889, 11888], [11760, 11761, 11889], [11761, 11890, 11889], [11761, 11762, 11891], [11761, 11891, 11890], [11762, 11763, 11891], [11763, 11892, 11891], [11763, 11764, 11893], [11763, 11893, 11892], [11764, 11765, 11893], [11765, 11894, 11893], [11765, 11766, 11895], [11765, 11895, 11894], [11766, 11767, 11895], [11767, 11896, 11895], [11767, 11768, 11897], [11767, 11897, 11896], [11768, 11769, 11897], [11769, 11898, 11897], [11769, 11770, 11899], [11769, 11899, 11898], [11770, 11771, 11899], [11771, 11900, 11899], [11771, 11772, 11901], [11771, 11901, 11900], [11772, 11773, 11901], [11773, 11902, 11901], [11773, 11774, 11903], [11773, 11903, 11902], [11774, 11775, 11903], [11775, 11904, 11903], [11775, 11776, 11905], [11775, 11905, 11904], [11776, 11777, 11905], [11777, 11906, 11905], [11777, 11778, 11907], [11777, 11907, 11906], [11778, 11779, 11907], [11779, 11908, 11907], [11779, 11780, 11909], [11779, 11909, 11908], [11780, 11781, 11909], [11781, 11910, 11909], [11781, 11782, 11911], [11781, 11911, 11910], [11782, 11783, 11911], [11783, 11912, 11911], [11783, 11784, 11913], [11783, 11913, 11912], [11785, 11786, 11915], [11785, 11915, 11914], [11786, 11787, 11915], [11787, 11916, 11915], [11787, 11788, 11917], [11787, 11917, 11916], [11788, 11789, 11917], [11789, 11918, 11917], [11789, 11790, 11919], [11789, 11919, 11918], [11790, 11791, 11919], [11791, 11920, 11919], [11791, 11792, 11921], [11791, 11921, 11920], [11792, 11793, 11921], [11793, 11922, 11921], [11793, 11794, 11923], [11793, 11923, 11922], [11794, 11795, 11923], [11795, 11924, 11923], [11795, 11796, 11925], [11795, 11925, 11924], [11796, 11797, 11925], [11797, 11926, 11925], [11797, 11798, 11927], [11797, 11927, 11926], [11798, 11799, 11927], [11799, 11928, 11927], [11799, 11800, 11929], [11799, 11929, 11928], [11800, 11801, 11929], [11801, 11930, 11929], [11801, 11802, 11931], [11801, 11931, 11930], [11802, 11803, 11931], [11803, 11932, 11931], [11803, 11804, 11933], [11803, 11933, 11932], [11804, 11805, 11933], [11805, 11934, 11933], [11805, 11806, 11935], [11805, 11935, 11934], [11806, 11807, 11935], [11807, 11936, 11935], [11807, 11808, 11937], [11807, 11937, 11936], [11808, 11809, 11937], [11809, 11938, 11937], [11809, 11810, 11939], [11809, 11939, 11938], [11810, 11811, 11939], [11811, 11940, 11939], [11811, 11812, 11941], [11811, 11941, 11940], [11812, 11813, 11941], [11813, 11942, 11941], [11813, 11814, 11943], [11813, 11943, 11942], [11814, 11815, 11943], [11815, 11944, 11943], [11815, 11816, 11945], [11815, 11945, 11944], [11816, 11817, 11945], [11817, 11946, 11945], [11817, 11818, 11947], [11817, 11947, 11946], [11818, 11819, 11947], [11819, 11948, 11947], [11819, 11820, 11949], [11819, 11949, 11948], [11820, 11821, 11949], [11821, 11950, 11949], [11821, 11822, 11951], [11821, 11951, 11950], [11822, 11823, 11951], [11823, 11952, 11951], [11823, 11824, 11953], [11823, 11953, 11952], [11824, 11825, 11953], [11825, 11954, 11953], [11825, 11826, 11955], [11825, 11955, 11954], [11826, 11827, 11955], [11827, 11956, 11955], [11827, 11828, 11957], [11827, 11957, 11956], [11828, 11829, 11957], [11829, 11958, 11957], [11829, 11830, 11959], [11829, 11959, 11958], [11830, 11831, 11959], [11831, 11960, 11959], [11831, 11832, 11961], [11831, 11961, 11960], [11832, 11833, 11961], [11833, 11962, 11961], [11833, 11834, 11963], [11833, 11963, 11962], [11834, 11835, 11963], [11835, 11964, 11963], [11835, 11836, 11965], [11835, 11965, 11964], [11836, 11837, 11965], [11837, 11966, 11965], [11837, 11838, 11967], [11837, 11967, 11966], [11838, 11839, 11967], [11839, 11968, 11967], [11839, 11840, 11969], [11839, 11969, 11968], [11840, 11841, 11969], [11841, 11970, 11969], [11841, 11842, 11971], [11841, 11971, 11970], [11842, 11843, 11971], [11843, 11972, 11971], [11843, 11844, 11973], [11843, 11973, 11972], [11844, 11845, 11973], [11845, 11974, 11973], [11845, 11846, 11975], [11845, 11975, 11974], [11846, 11847, 11975], [11847, 11976, 11975], [11847, 11848, 11977], [11847, 11977, 11976], [11848, 11849, 11977], [11849, 11978, 11977], [11849, 11850, 11979], [11849, 11979, 11978], [11850, 11851, 11979], [11851, 11980, 11979], [11851, 11852, 11981], [11851, 11981, 11980], [11852, 11853, 11981], [11853, 11982, 11981], [11853, 11854, 11983], [11853, 11983, 11982], [11854, 11855, 11983], [11855, 11984, 11983], [11855, 11856, 11985], [11855, 11985, 11984], [11856, 11857, 11985], [11857, 11986, 11985], [11857, 11858, 11987], [11857, 11987, 11986], [11858, 11859, 11987], [11859, 11988, 11987], [11859, 11860, 11989], [11859, 11989, 11988], [11860, 11861, 11989], [11861, 11990, 11989], [11861, 11862, 11991], [11861, 11991, 11990], [11862, 11863, 11991], [11863, 11992, 11991], [11863, 11864, 11993], [11863, 11993, 11992], [11864, 11865, 11993], [11865, 11994, 11993], [11865, 11866, 11995], [11865, 11995, 11994], [11866, 11867, 11995], [11867, 11996, 11995], [11867, 11868, 11997], [11867, 11997, 11996], [11868, 11869, 11997], [11869, 11998, 11997], [11869, 11870, 11999], [11869, 11999, 11998], [11870, 11871, 11999], [11871, 12000, 11999], [11871, 11872, 12001], [11871, 12001, 12000], [11872, 11873, 12001], [11873, 12002, 12001], [11873, 11874, 12003], [11873, 12003, 12002], [11874, 11875, 12003], [11875, 12004, 12003], [11875, 11876, 12005], [11875, 12005, 12004], [11876, 11877, 12005], [11877, 12006, 12005], [11877, 11878, 12007], [11877, 12007, 12006], [11878, 11879, 12007], [11879, 12008, 12007], [11879, 11880, 12009], [11879, 12009, 12008], [11880, 11881, 12009], [11881, 12010, 12009], [11881, 11882, 12011], [11881, 12011, 12010], [11882, 11883, 12011], [11883, 12012, 12011], [11883, 11884, 12013], [11883, 12013, 12012], [11884, 11885, 12013], [11885, 12014, 12013], [11885, 11886, 12015], [11885, 12015, 12014], [11886, 11887, 12015], [11887, 12016, 12015], [11887, 11888, 12017], [11887, 12017, 12016], [11888, 11889, 12017], [11889, 12018, 12017], [11889, 11890, 12019], [11889, 12019, 12018], [11890, 11891, 12019], [11891, 12020, 12019], [11891, 11892, 12021], [11891, 12021, 12020], [11892, 11893, 12021], [11893, 12022, 12021], [11893, 11894, 12023], [11893, 12023, 12022], [11894, 11895, 12023], [11895, 12024, 12023], [11895, 11896, 12025], [11895, 12025, 12024], [11896, 11897, 12025], [11897, 12026, 12025], [11897, 11898, 12027], [11897, 12027, 12026], [11898, 11899, 12027], [11899, 12028, 12027], [11899, 11900, 12029], [11899, 12029, 12028], [11900, 11901, 12029], [11901, 12030, 12029], [11901, 11902, 12031], [11901, 12031, 12030], [11902, 11903, 12031], [11903, 12032, 12031], [11903, 11904, 12033], [11903, 12033, 12032], [11904, 11905, 12033], [11905, 12034, 12033], [11905, 11906, 12035], [11905, 12035, 12034], [11906, 11907, 12035], [11907, 12036, 12035], [11907, 11908, 12037], [11907, 12037, 12036], [11908, 11909, 12037], [11909, 12038, 12037], [11909, 11910, 12039], [11909, 12039, 12038], [11910, 11911, 12039], [11911, 12040, 12039], [11911, 11912, 12041], [11911, 12041, 12040], [11912, 11913, 12041], [11913, 12042, 12041], [11914, 11915, 12043], [11915, 12044, 12043], [11915, 11916, 12045], [11915, 12045, 12044], [11916, 11917, 12045], [11917, 12046, 12045], [11917, 11918, 12047], [11917, 12047, 12046], [11918, 11919, 12047], [11919, 12048, 12047], [11919, 11920, 12049], [11919, 12049, 12048], [11920, 11921, 12049], [11921, 12050, 12049], [11921, 11922, 12051], [11921, 12051, 12050], [11922, 11923, 12051], [11923, 12052, 12051], [11923, 11924, 12053], [11923, 12053, 12052], [11924, 11925, 12053], [11925, 12054, 12053], [11925, 11926, 12055], [11925, 12055, 12054], [11926, 11927, 12055], [11927, 12056, 12055], [11927, 11928, 12057], [11927, 12057, 12056], [11928, 11929, 12057], [11929, 12058, 12057], [11929, 11930, 12059], [11929, 12059, 12058], [11930, 11931, 12059], [11931, 12060, 12059], [11931, 11932, 12061], [11931, 12061, 12060], [11932, 11933, 12061], [11933, 12062, 12061], [11933, 11934, 12063], [11933, 12063, 12062], [11934, 11935, 12063], [11935, 12064, 12063], [11935, 11936, 12065], [11935, 12065, 12064], [11936, 11937, 12065], [11937, 12066, 12065], [11937, 11938, 12067], [11937, 12067, 12066], [11938, 11939, 12067], [11939, 12068, 12067], [11939, 11940, 12069], [11939, 12069, 12068], [11940, 11941, 12069], [11941, 12070, 12069], [11941, 11942, 12071], [11941, 12071, 12070], [11942, 11943, 12071], [11943, 12072, 12071], [11943, 11944, 12073], [11943, 12073, 12072], [11944, 11945, 12073], [11945, 12074, 12073], [11945, 11946, 12075], [11945, 12075, 12074], [11946, 11947, 12075], [11947, 12076, 12075], [11947, 11948, 12077], [11947, 12077, 12076], [11948, 11949, 12077], [11949, 12078, 12077], [11949, 11950, 12079], [11949, 12079, 12078], [11950, 11951, 12079], [11951, 12080, 12079], [11951, 11952, 12081], [11951, 12081, 12080], [11952, 11953, 12081], [11953, 12082, 12081], [11953, 11954, 12083], [11953, 12083, 12082], [11954, 11955, 12083], [11955, 12084, 12083], [11955, 11956, 12085], [11955, 12085, 12084], [11956, 11957, 12085], [11957, 12086, 12085], [11957, 11958, 12087], [11957, 12087, 12086], [11958, 11959, 12087], [11959, 12088, 12087], [11959, 11960, 12089], [11959, 12089, 12088], [11960, 11961, 12089], [11961, 12090, 12089], [11961, 11962, 12091], [11961, 12091, 12090], [11962, 11963, 12091], [11963, 12092, 12091], [11963, 11964, 12093], [11963, 12093, 12092], [11964, 11965, 12093], [11965, 12094, 12093], [11965, 11966, 12095], [11965, 12095, 12094], [11966, 11967, 12095], [11967, 12096, 12095], [11967, 11968, 12097], [11967, 12097, 12096], [11968, 11969, 12097], [11969, 12098, 12097], [11969, 11970, 12099], [11969, 12099, 12098], [11970, 11971, 12099], [11971, 12100, 12099], [11971, 11972, 12101], [11971, 12101, 12100], [11972, 11973, 12101], [11973, 12102, 12101], [11973, 11974, 12103], [11973, 12103, 12102], [11974, 11975, 12103], [11975, 12104, 12103], [11975, 11976, 12105], [11975, 12105, 12104], [11976, 11977, 12105], [11977, 12106, 12105], [11977, 11978, 12107], [11977, 12107, 12106], [11978, 11979, 12107], [11979, 12108, 12107], [11979, 11980, 12109], [11979, 12109, 12108], [11980, 11981, 12109], [11981, 12110, 12109], [11981, 11982, 12111], [11981, 12111, 12110], [11982, 11983, 12111], [11983, 12112, 12111], [11983, 11984, 12113], [11983, 12113, 12112], [11984, 11985, 12113], [11985, 12114, 12113], [11985, 11986, 12115], [11985, 12115, 12114], [11986, 11987, 12115], [11987, 12116, 12115], [11987, 11988, 12117], [11987, 12117, 12116], [11988, 11989, 12117], [11989, 12118, 12117], [11989, 11990, 12119], [11989, 12119, 12118], [11990, 11991, 12119], [11991, 12120, 12119], [11991, 11992, 12121], [11991, 12121, 12120], [11992, 11993, 12121], [11993, 12122, 12121], [11993, 11994, 12123], [11993, 12123, 12122], [11994, 11995, 12123], [11995, 12124, 12123], [11995, 11996, 12125], [11995, 12125, 12124], [11996, 11997, 12125], [11997, 12126, 12125], [11997, 11998, 12127], [11997, 12127, 12126], [11998, 11999, 12127], [11999, 12128, 12127], [11999, 12000, 12129], [11999, 12129, 12128], [12000, 12001, 12129], [12001, 12130, 12129], [12001, 12002, 12131], [12001, 12131, 12130], [12002, 12003, 12131], [12003, 12132, 12131], [12003, 12004, 12133], [12003, 12133, 12132], [12004, 12005, 12133], [12005, 12134, 12133], [12005, 12006, 12135], [12005, 12135, 12134], [12006, 12007, 12135], [12007, 12136, 12135], [12007, 12008, 12137], [12007, 12137, 12136], [12008, 12009, 12137], [12009, 12138, 12137], [12009, 12010, 12139], [12009, 12139, 12138], [12010, 12011, 12139], [12011, 12140, 12139], [12011, 12012, 12141], [12011, 12141, 12140], [12012, 12013, 12141], [12013, 12142, 12141], [12013, 12014, 12143], [12013, 12143, 12142], [12014, 12015, 12143], [12015, 12144, 12143], [12015, 12016, 12145], [12015, 12145, 12144], [12016, 12017, 12145], [12017, 12146, 12145], [12017, 12018, 12147], [12017, 12147, 12146], [12018, 12019, 12147], [12019, 12148, 12147], [12019, 12020, 12149], [12019, 12149, 12148], [12020, 12021, 12149], [12021, 12150, 12149], [12021, 12022, 12151], [12021, 12151, 12150], [12022, 12023, 12151], [12023, 12152, 12151], [12023, 12024, 12153], [12023, 12153, 12152], [12024, 12025, 12153], [12025, 12154, 12153], [12025, 12026, 12155], [12025, 12155, 12154], [12026, 12027, 12155], [12027, 12156, 12155], [12027, 12028, 12157], [12027, 12157, 12156], [12028, 12029, 12157], [12029, 12158, 12157], [12029, 12030, 12159], [12029, 12159, 12158], [12030, 12031, 12159], [12031, 12160, 12159], [12031, 12032, 12161], [12031, 12161, 12160], [12032, 12033, 12161], [12033, 12162, 12161], [12033, 12034, 12163], [12033, 12163, 12162], [12034, 12035, 12163], [12035, 12164, 12163], [12035, 12036, 12165], [12035, 12165, 12164], [12036, 12037, 12165], [12037, 12166, 12165], [12037, 12038, 12167], [12037, 12167, 12166], [12038, 12039, 12167], [12039, 12168, 12167], [12039, 12040, 12169], [12039, 12169, 12168], [12040, 12041, 12169], [12041, 12170, 12169], [12041, 12042, 12171], [12041, 12171, 12170], [12043, 12044, 12173], [12043, 12173, 12172], [12044, 12045, 12173], [12045, 12174, 12173], [12045, 12046, 12175], [12045, 12175, 12174], [12046, 12047, 12175], [12047, 12176, 12175], [12047, 12048, 12177], [12047, 12177, 12176], [12048, 12049, 12177], [12049, 12178, 12177], [12049, 12050, 12179], [12049, 12179, 12178], [12050, 12051, 12179], [12051, 12180, 12179], [12051, 12052, 12181], [12051, 12181, 12180], [12052, 12053, 12181], [12053, 12182, 12181], [12053, 12054, 12183], [12053, 12183, 12182], [12054, 12055, 12183], [12055, 12184, 12183], [12055, 12056, 12185], [12055, 12185, 12184], [12056, 12057, 12185], [12057, 12186, 12185], [12057, 12058, 12187], [12057, 12187, 12186], [12058, 12059, 12187], [12059, 12188, 12187], [12059, 12060, 12189], [12059, 12189, 12188], [12060, 12061, 12189], [12061, 12190, 12189], [12061, 12062, 12191], [12061, 12191, 12190], [12062, 12063, 12191], [12063, 12192, 12191], [12063, 12064, 12193], [12063, 12193, 12192], [12064, 12065, 12193], [12065, 12194, 12193], [12065, 12066, 12195], [12065, 12195, 12194], [12066, 12067, 12195], [12067, 12196, 12195], [12067, 12068, 12197], [12067, 12197, 12196], [12068, 12069, 12197], [12069, 12198, 12197], [12069, 12070, 12199], [12069, 12199, 12198], [12070, 12071, 12199], [12071, 12200, 12199], [12071, 12072, 12201], [12071, 12201, 12200], [12072, 12073, 12201], [12073, 12202, 12201], [12073, 12074, 12203], [12073, 12203, 12202], [12074, 12075, 12203], [12075, 12204, 12203], [12075, 12076, 12205], [12075, 12205, 12204], [12076, 12077, 12205], [12077, 12206, 12205], [12077, 12078, 12207], [12077, 12207, 12206], [12078, 12079, 12207], [12079, 12208, 12207], [12079, 12080, 12209], [12079, 12209, 12208], [12080, 12081, 12209], [12081, 12210, 12209], [12081, 12082, 12211], [12081, 12211, 12210], [12082, 12083, 12211], [12083, 12212, 12211], [12083, 12084, 12213], [12083, 12213, 12212], [12084, 12085, 12213], [12085, 12214, 12213], [12085, 12086, 12215], [12085, 12215, 12214], [12086, 12087, 12215], [12087, 12216, 12215], [12087, 12088, 12217], [12087, 12217, 12216], [12088, 12089, 12217], [12089, 12218, 12217], [12089, 12090, 12219], [12089, 12219, 12218], [12090, 12091, 12219], [12091, 12220, 12219], [12091, 12092, 12221], [12091, 12221, 12220], [12092, 12093, 12221], [12093, 12222, 12221], [12093, 12094, 12223], [12093, 12223, 12222], [12094, 12095, 12223], [12095, 12224, 12223], [12095, 12096, 12225], [12095, 12225, 12224], [12096, 12097, 12225], [12097, 12226, 12225], [12097, 12098, 12227], [12097, 12227, 12226], [12098, 12099, 12227], [12099, 12228, 12227], [12099, 12100, 12229], [12099, 12229, 12228], [12100, 12101, 12229], [12101, 12230, 12229], [12101, 12102, 12231], [12101, 12231, 12230], [12102, 12103, 12231], [12103, 12232, 12231], [12103, 12104, 12233], [12103, 12233, 12232], [12104, 12105, 12233], [12105, 12234, 12233], [12105, 12106, 12235], [12105, 12235, 12234], [12106, 12107, 12235], [12107, 12236, 12235], [12107, 12108, 12237], [12107, 12237, 12236], [12108, 12109, 12237], [12109, 12238, 12237], [12109, 12110, 12239], [12109, 12239, 12238], [12110, 12111, 12239], [12111, 12240, 12239], [12111, 12112, 12241], [12111, 12241, 12240], [12112, 12113, 12241], [12113, 12242, 12241], [12113, 12114, 12243], [12113, 12243, 12242], [12114, 12115, 12243], [12115, 12244, 12243], [12115, 12116, 12245], [12115, 12245, 12244], [12116, 12117, 12245], [12117, 12246, 12245], [12117, 12118, 12247], [12117, 12247, 12246], [12118, 12119, 12247], [12119, 12248, 12247], [12119, 12120, 12249], [12119, 12249, 12248], [12120, 12121, 12249], [12121, 12250, 12249], [12121, 12122, 12251], [12121, 12251, 12250], [12122, 12123, 12251], [12123, 12252, 12251], [12123, 12124, 12253], [12123, 12253, 12252], [12124, 12125, 12253], [12125, 12254, 12253], [12125, 12126, 12255], [12125, 12255, 12254], [12126, 12127, 12255], [12127, 12256, 12255], [12127, 12128, 12257], [12127, 12257, 12256], [12128, 12129, 12257], [12129, 12258, 12257], [12129, 12130, 12259], [12129, 12259, 12258], [12130, 12131, 12259], [12131, 12260, 12259], [12131, 12132, 12261], [12131, 12261, 12260], [12132, 12133, 12261], [12133, 12262, 12261], [12133, 12134, 12263], [12133, 12263, 12262], [12134, 12135, 12263], [12135, 12264, 12263], [12135, 12136, 12265], [12135, 12265, 12264], [12136, 12137, 12265], [12137, 12266, 12265], [12137, 12138, 12267], [12137, 12267, 12266], [12138, 12139, 12267], [12139, 12268, 12267], [12139, 12140, 12269], [12139, 12269, 12268], [12140, 12141, 12269], [12141, 12270, 12269], [12141, 12142, 12271], [12141, 12271, 12270], [12142, 12143, 12271], [12143, 12272, 12271], [12143, 12144, 12273], [12143, 12273, 12272], [12144, 12145, 12273], [12145, 12274, 12273], [12145, 12146, 12275], [12145, 12275, 12274], [12146, 12147, 12275], [12147, 12276, 12275], [12147, 12148, 12277], [12147, 12277, 12276], [12148, 12149, 12277], [12149, 12278, 12277], [12149, 12150, 12279], [12149, 12279, 12278], [12150, 12151, 12279], [12151, 12280, 12279], [12151, 12152, 12281], [12151, 12281, 12280], [12152, 12153, 12281], [12153, 12282, 12281], [12153, 12154, 12283], [12153, 12283, 12282], [12154, 12155, 12283], [12155, 12284, 12283], [12155, 12156, 12285], [12155, 12285, 12284], [12156, 12157, 12285], [12157, 12286, 12285], [12157, 12158, 12287], [12157, 12287, 12286], [12158, 12159, 12287], [12159, 12288, 12287], [12159, 12160, 12289], [12159, 12289, 12288], [12160, 12161, 12289], [12161, 12290, 12289], [12161, 12162, 12291], [12161, 12291, 12290], [12162, 12163, 12291], [12163, 12292, 12291], [12163, 12164, 12293], [12163, 12293, 12292], [12164, 12165, 12293], [12165, 12294, 12293], [12165, 12166, 12295], [12165, 12295, 12294], [12166, 12167, 12295], [12167, 12296, 12295], [12167, 12168, 12297], [12167, 12297, 12296], [12168, 12169, 12297], [12169, 12298, 12297], [12169, 12170, 12299], [12169, 12299, 12298], [12170, 12171, 12299], [12171, 12300, 12299], [12172, 12173, 12301], [12173, 12302, 12301], [12173, 12174, 12303], [12173, 12303, 12302], [12174, 12175, 12303], [12175, 12304, 12303], [12175, 12176, 12305], [12175, 12305, 12304], [12176, 12177, 12305], [12177, 12306, 12305], [12177, 12178, 12307], [12177, 12307, 12306], [12178, 12179, 12307], [12179, 12308, 12307], [12179, 12180, 12309], [12179, 12309, 12308], [12180, 12181, 12309], [12181, 12310, 12309], [12181, 12182, 12311], [12181, 12311, 12310], [12182, 12183, 12311], [12183, 12312, 12311], [12183, 12184, 12313], [12183, 12313, 12312], [12184, 12185, 12313], [12185, 12314, 12313], [12185, 12186, 12315], [12185, 12315, 12314], [12186, 12187, 12315], [12187, 12316, 12315], [12187, 12188, 12317], [12187, 12317, 12316], [12188, 12189, 12317], [12189, 12318, 12317], [12189, 12190, 12319], [12189, 12319, 12318], [12190, 12191, 12319], [12191, 12320, 12319], [12191, 12192, 12321], [12191, 12321, 12320], [12192, 12193, 12321], [12193, 12322, 12321], [12193, 12194, 12323], [12193, 12323, 12322], [12194, 12195, 12323], [12195, 12324, 12323], [12195, 12196, 12325], [12195, 12325, 12324], [12196, 12197, 12325], [12197, 12326, 12325], [12197, 12198, 12327], [12197, 12327, 12326], [12198, 12199, 12327], [12199, 12328, 12327], [12199, 12200, 12329], [12199, 12329, 12328], [12200, 12201, 12329], [12201, 12330, 12329], [12201, 12202, 12331], [12201, 12331, 12330], [12202, 12203, 12331], [12203, 12332, 12331], [12203, 12204, 12333], [12203, 12333, 12332], [12204, 12205, 12333], [12205, 12334, 12333], [12205, 12206, 12335], [12205, 12335, 12334], [12206, 12207, 12335], [12207, 12336, 12335], [12207, 12208, 12337], [12207, 12337, 12336], [12208, 12209, 12337], [12209, 12338, 12337], [12209, 12210, 12339], [12209, 12339, 12338], [12210, 12211, 12339], [12211, 12340, 12339], [12211, 12212, 12341], [12211, 12341, 12340], [12212, 12213, 12341], [12213, 12342, 12341], [12213, 12214, 12343], [12213, 12343, 12342], [12214, 12215, 12343], [12215, 12344, 12343], [12215, 12216, 12345], [12215, 12345, 12344], [12216, 12217, 12345], [12217, 12346, 12345], [12217, 12218, 12347], [12217, 12347, 12346], [12218, 12219, 12347], [12219, 12348, 12347], [12219, 12220, 12349], [12219, 12349, 12348], [12220, 12221, 12349], [12221, 12350, 12349], [12221, 12222, 12351], [12221, 12351, 12350], [12222, 12223, 12351], [12223, 12352, 12351], [12223, 12224, 12353], [12223, 12353, 12352], [12224, 12225, 12353], [12225, 12354, 12353], [12225, 12226, 12355], [12225, 12355, 12354], [12226, 12227, 12355], [12227, 12356, 12355], [12227, 12228, 12357], [12227, 12357, 12356], [12228, 12229, 12357], [12229, 12358, 12357], [12229, 12230, 12359], [12229, 12359, 12358], [12230, 12231, 12359], [12231, 12360, 12359], [12231, 12232, 12361], [12231, 12361, 12360], [12232, 12233, 12361], [12233, 12362, 12361], [12233, 12234, 12363], [12233, 12363, 12362], [12234, 12235, 12363], [12235, 12364, 12363], [12235, 12236, 12365], [12235, 12365, 12364], [12236, 12237, 12365], [12237, 12366, 12365], [12237, 12238, 12367], [12237, 12367, 12366], [12238, 12239, 12367], [12239, 12368, 12367], [12239, 12240, 12369], [12239, 12369, 12368], [12240, 12241, 12369], [12241, 12370, 12369], [12241, 12242, 12371], [12241, 12371, 12370], [12242, 12243, 12371], [12243, 12372, 12371], [12243, 12244, 12373], [12243, 12373, 12372], [12244, 12245, 12373], [12245, 12374, 12373], [12245, 12246, 12375], [12245, 12375, 12374], [12246, 12247, 12375], [12247, 12376, 12375], [12247, 12248, 12377], [12247, 12377, 12376], [12248, 12249, 12377], [12249, 12378, 12377], [12249, 12250, 12379], [12249, 12379, 12378], [12250, 12251, 12379], [12251, 12380, 12379], [12251, 12252, 12381], [12251, 12381, 12380], [12252, 12253, 12381], [12253, 12382, 12381], [12253, 12254, 12383], [12253, 12383, 12382], [12254, 12255, 12383], [12255, 12384, 12383], [12255, 12256, 12385], [12255, 12385, 12384], [12256, 12257, 12385], [12257, 12386, 12385], [12257, 12258, 12387], [12257, 12387, 12386], [12258, 12259, 12387], [12259, 12388, 12387], [12259, 12260, 12389], [12259, 12389, 12388], [12260, 12261, 12389], [12261, 12390, 12389], [12261, 12262, 12391], [12261, 12391, 12390], [12262, 12263, 12391], [12263, 12392, 12391], [12263, 12264, 12393], [12263, 12393, 12392], [12264, 12265, 12393], [12265, 12394, 12393], [12265, 12266, 12395], [12265, 12395, 12394], [12266, 12267, 12395], [12267, 12396, 12395], [12267, 12268, 12397], [12267, 12397, 12396], [12268, 12269, 12397], [12269, 12398, 12397], [12269, 12270, 12399], [12269, 12399, 12398], [12270, 12271, 12399], [12271, 12400, 12399], [12271, 12272, 12401], [12271, 12401, 12400], [12272, 12273, 12401], [12273, 12402, 12401], [12273, 12274, 12403], [12273, 12403, 12402], [12274, 12275, 12403], [12275, 12404, 12403], [12275, 12276, 12405], [12275, 12405, 12404], [12276, 12277, 12405], [12277, 12406, 12405], [12277, 12278, 12407], [12277, 12407, 12406], [12278, 12279, 12407], [12279, 12408, 12407], [12279, 12280, 12409], [12279, 12409, 12408], [12280, 12281, 12409], [12281, 12410, 12409], [12281, 12282, 12411], [12281, 12411, 12410], [12282, 12283, 12411], [12283, 12412, 12411], [12283, 12284, 12413], [12283, 12413, 12412], [12284, 12285, 12413], [12285, 12414, 12413], [12285, 12286, 12415], [12285, 12415, 12414], [12286, 12287, 12415], [12287, 12416, 12415], [12287, 12288, 12417], [12287, 12417, 12416], [12288, 12289, 12417], [12289, 12418, 12417], [12289, 12290, 12419], [12289, 12419, 12418], [12290, 12291, 12419], [12291, 12420, 12419], [12291, 12292, 12421], [12291, 12421, 12420], [12292, 12293, 12421], [12293, 12422, 12421], [12293, 12294, 12423], [12293, 12423, 12422], [12294, 12295, 12423], [12295, 12424, 12423], [12295, 12296, 12425], [12295, 12425, 12424], [12296, 12297, 12425], [12297, 12426, 12425], [12297, 12298, 12427], [12297, 12427, 12426], [12298, 12299, 12427], [12299, 12428, 12427], [12299, 12300, 12429], [12299, 12429, 12428], [12301, 12302, 12431], [12301, 12431, 12430], [12302, 12303, 12431], [12303, 12432, 12431], [12303, 12304, 12433], [12303, 12433, 12432], [12304, 12305, 12433], [12305, 12434, 12433], [12305, 12306, 12435], [12305, 12435, 12434], [12306, 12307, 12435], [12307, 12436, 12435], [12307, 12308, 12437], [12307, 12437, 12436], [12308, 12309, 12437], [12309, 12438, 12437], [12309, 12310, 12439], [12309, 12439, 12438], [12310, 12311, 12439], [12311, 12440, 12439], [12311, 12312, 12441], [12311, 12441, 12440], [12312, 12313, 12441], [12313, 12442, 12441], [12313, 12314, 12443], [12313, 12443, 12442], [12314, 12315, 12443], [12315, 12444, 12443], [12315, 12316, 12445], [12315, 12445, 12444], [12316, 12317, 12445], [12317, 12446, 12445], [12317, 12318, 12447], [12317, 12447, 12446], [12318, 12319, 12447], [12319, 12448, 12447], [12319, 12320, 12449], [12319, 12449, 12448], [12320, 12321, 12449], [12321, 12450, 12449], [12321, 12322, 12451], [12321, 12451, 12450], [12322, 12323, 12451], [12323, 12452, 12451], [12323, 12324, 12453], [12323, 12453, 12452], [12324, 12325, 12453], [12325, 12454, 12453], [12325, 12326, 12455], [12325, 12455, 12454], [12326, 12327, 12455], [12327, 12456, 12455], [12327, 12328, 12457], [12327, 12457, 12456], [12328, 12329, 12457], [12329, 12458, 12457], [12329, 12330, 12459], [12329, 12459, 12458], [12330, 12331, 12459], [12331, 12460, 12459], [12331, 12332, 12461], [12331, 12461, 12460], [12332, 12333, 12461], [12333, 12462, 12461], [12333, 12334, 12463], [12333, 12463, 12462], [12334, 12335, 12463], [12335, 12464, 12463], [12335, 12336, 12465], [12335, 12465, 12464], [12336, 12337, 12465], [12337, 12466, 12465], [12337, 12338, 12467], [12337, 12467, 12466], [12338, 12339, 12467], [12339, 12468, 12467], [12339, 12340, 12469], [12339, 12469, 12468], [12340, 12341, 12469], [12341, 12470, 12469], [12341, 12342, 12471], [12341, 12471, 12470], [12342, 12343, 12471], [12343, 12472, 12471], [12343, 12344, 12473], [12343, 12473, 12472], [12344, 12345, 12473], [12345, 12474, 12473], [12345, 12346, 12475], [12345, 12475, 12474], [12346, 12347, 12475], [12347, 12476, 12475], [12347, 12348, 12477], [12347, 12477, 12476], [12348, 12349, 12477], [12349, 12478, 12477], [12349, 12350, 12479], [12349, 12479, 12478], [12350, 12351, 12479], [12351, 12480, 12479], [12351, 12352, 12481], [12351, 12481, 12480], [12352, 12353, 12481], [12353, 12482, 12481], [12353, 12354, 12483], [12353, 12483, 12482], [12354, 12355, 12483], [12355, 12484, 12483], [12355, 12356, 12485], [12355, 12485, 12484], [12356, 12357, 12485], [12357, 12486, 12485], [12357, 12358, 12487], [12357, 12487, 12486], [12358, 12359, 12487], [12359, 12488, 12487], [12359, 12360, 12489], [12359, 12489, 12488], [12360, 12361, 12489], [12361, 12490, 12489], [12361, 12362, 12491], [12361, 12491, 12490], [12362, 12363, 12491], [12363, 12492, 12491], [12363, 12364, 12493], [12363, 12493, 12492], [12364, 12365, 12493], [12365, 12494, 12493], [12365, 12366, 12495], [12365, 12495, 12494], [12366, 12367, 12495], [12367, 12496, 12495], [12367, 12368, 12497], [12367, 12497, 12496], [12368, 12369, 12497], [12369, 12498, 12497], [12369, 12370, 12499], [12369, 12499, 12498], [12370, 12371, 12499], [12371, 12500, 12499], [12371, 12372, 12501], [12371, 12501, 12500], [12372, 12373, 12501], [12373, 12502, 12501], [12373, 12374, 12503], [12373, 12503, 12502], [12374, 12375, 12503], [12375, 12504, 12503], [12375, 12376, 12505], [12375, 12505, 12504], [12376, 12377, 12505], [12377, 12506, 12505], [12377, 12378, 12507], [12377, 12507, 12506], [12378, 12379, 12507], [12379, 12508, 12507], [12379, 12380, 12509], [12379, 12509, 12508], [12380, 12381, 12509], [12381, 12510, 12509], [12381, 12382, 12511], [12381, 12511, 12510], [12382, 12383, 12511], [12383, 12512, 12511], [12383, 12384, 12513], [12383, 12513, 12512], [12384, 12385, 12513], [12385, 12514, 12513], [12385, 12386, 12515], [12385, 12515, 12514], [12386, 12387, 12515], [12387, 12516, 12515], [12387, 12388, 12517], [12387, 12517, 12516], [12388, 12389, 12517], [12389, 12518, 12517], [12389, 12390, 12519], [12389, 12519, 12518], [12390, 12391, 12519], [12391, 12520, 12519], [12391, 12392, 12521], [12391, 12521, 12520], [12392, 12393, 12521], [12393, 12522, 12521], [12393, 12394, 12523], [12393, 12523, 12522], [12394, 12395, 12523], [12395, 12524, 12523], [12395, 12396, 12525], [12395, 12525, 12524], [12396, 12397, 12525], [12397, 12526, 12525], [12397, 12398, 12527], [12397, 12527, 12526], [12398, 12399, 12527], [12399, 12528, 12527], [12399, 12400, 12529], [12399, 12529, 12528], [12400, 12401, 12529], [12401, 12530, 12529], [12401, 12402, 12531], [12401, 12531, 12530], [12402, 12403, 12531], [12403, 12532, 12531], [12403, 12404, 12533], [12403, 12533, 12532], [12404, 12405, 12533], [12405, 12534, 12533], [12405, 12406, 12535], [12405, 12535, 12534], [12406, 12407, 12535], [12407, 12536, 12535], [12407, 12408, 12537], [12407, 12537, 12536], [12408, 12409, 12537], [12409, 12538, 12537], [12409, 12410, 12539], [12409, 12539, 12538], [12410, 12411, 12539], [12411, 12540, 12539], [12411, 12412, 12541], [12411, 12541, 12540], [12412, 12413, 12541], [12413, 12542, 12541], [12413, 12414, 12543], [12413, 12543, 12542], [12414, 12415, 12543], [12415, 12544, 12543], [12415, 12416, 12545], [12415, 12545, 12544], [12416, 12417, 12545], [12417, 12546, 12545], [12417, 12418, 12547], [12417, 12547, 12546], [12418, 12419, 12547], [12419, 12548, 12547], [12419, 12420, 12549], [12419, 12549, 12548], [12420, 12421, 12549], [12421, 12550, 12549], [12421, 12422, 12551], [12421, 12551, 12550], [12422, 12423, 12551], [12423, 12552, 12551], [12423, 12424, 12553], [12423, 12553, 12552], [12424, 12425, 12553], [12425, 12554, 12553], [12425, 12426, 12555], [12425, 12555, 12554], [12426, 12427, 12555], [12427, 12556, 12555], [12427, 12428, 12557], [12427, 12557, 12556], [12428, 12429, 12557], [12429, 12558, 12557], [12430, 12431, 12559], [12431, 12560, 12559], [12431, 12432, 12561], [12431, 12561, 12560], [12432, 12433, 12561], [12433, 12562, 12561], [12433, 12434, 12563], [12433, 12563, 12562], [12434, 12435, 12563], [12435, 12564, 12563], [12435, 12436, 12565], [12435, 12565, 12564], [12436, 12437, 12565], [12437, 12566, 12565], [12437, 12438, 12567], [12437, 12567, 12566], [12438, 12439, 12567], [12439, 12568, 12567], [12439, 12440, 12569], [12439, 12569, 12568], [12440, 12441, 12569], [12441, 12570, 12569], [12441, 12442, 12571], [12441, 12571, 12570], [12442, 12443, 12571], [12443, 12572, 12571], [12443, 12444, 12573], [12443, 12573, 12572], [12444, 12445, 12573], [12445, 12574, 12573], [12445, 12446, 12575], [12445, 12575, 12574], [12446, 12447, 12575], [12447, 12576, 12575], [12447, 12448, 12577], [12447, 12577, 12576], [12448, 12449, 12577], [12449, 12578, 12577], [12449, 12450, 12579], [12449, 12579, 12578], [12450, 12451, 12579], [12451, 12580, 12579], [12451, 12452, 12581], [12451, 12581, 12580], [12452, 12453, 12581], [12453, 12582, 12581], [12453, 12454, 12583], [12453, 12583, 12582], [12454, 12455, 12583], [12455, 12584, 12583], [12455, 12456, 12585], [12455, 12585, 12584], [12456, 12457, 12585], [12457, 12586, 12585], [12457, 12458, 12587], [12457, 12587, 12586], [12458, 12459, 12587], [12459, 12588, 12587], [12459, 12460, 12589], [12459, 12589, 12588], [12460, 12461, 12589], [12461, 12590, 12589], [12461, 12462, 12591], [12461, 12591, 12590], [12462, 12463, 12591], [12463, 12592, 12591], [12463, 12464, 12593], [12463, 12593, 12592], [12464, 12465, 12593], [12465, 12594, 12593], [12465, 12466, 12595], [12465, 12595, 12594], [12466, 12467, 12595], [12467, 12596, 12595], [12467, 12468, 12597], [12467, 12597, 12596], [12468, 12469, 12597], [12469, 12598, 12597], [12469, 12470, 12599], [12469, 12599, 12598], [12470, 12471, 12599], [12471, 12600, 12599], [12471, 12472, 12601], [12471, 12601, 12600], [12472, 12473, 12601], [12473, 12602, 12601], [12473, 12474, 12603], [12473, 12603, 12602], [12474, 12475, 12603], [12475, 12604, 12603], [12475, 12476, 12605], [12475, 12605, 12604], [12476, 12477, 12605], [12477, 12606, 12605], [12477, 12478, 12607], [12477, 12607, 12606], [12478, 12479, 12607], [12479, 12608, 12607], [12479, 12480, 12609], [12479, 12609, 12608], [12480, 12481, 12609], [12481, 12610, 12609], [12481, 12482, 12611], [12481, 12611, 12610], [12482, 12483, 12611], [12483, 12612, 12611], [12483, 12484, 12613], [12483, 12613, 12612], [12484, 12485, 12613], [12485, 12614, 12613], [12485, 12486, 12615], [12485, 12615, 12614], [12486, 12487, 12615], [12487, 12616, 12615], [12487, 12488, 12617], [12487, 12617, 12616], [12488, 12489, 12617], [12489, 12618, 12617], [12489, 12490, 12619], [12489, 12619, 12618], [12490, 12491, 12619], [12491, 12620, 12619], [12491, 12492, 12621], [12491, 12621, 12620], [12492, 12493, 12621], [12493, 12622, 12621], [12493, 12494, 12623], [12493, 12623, 12622], [12494, 12495, 12623], [12495, 12624, 12623], [12495, 12496, 12625], [12495, 12625, 12624], [12496, 12497, 12625], [12497, 12626, 12625], [12497, 12498, 12627], [12497, 12627, 12626], [12498, 12499, 12627], [12499, 12628, 12627], [12499, 12500, 12629], [12499, 12629, 12628], [12500, 12501, 12629], [12501, 12630, 12629], [12501, 12502, 12631], [12501, 12631, 12630], [12502, 12503, 12631], [12503, 12632, 12631], [12503, 12504, 12633], [12503, 12633, 12632], [12504, 12505, 12633], [12505, 12634, 12633], [12505, 12506, 12635], [12505, 12635, 12634], [12506, 12507, 12635], [12507, 12636, 12635], [12507, 12508, 12637], [12507, 12637, 12636], [12508, 12509, 12637], [12509, 12638, 12637], [12509, 12510, 12639], [12509, 12639, 12638], [12510, 12511, 12639], [12511, 12640, 12639], [12511, 12512, 12641], [12511, 12641, 12640], [12512, 12513, 12641], [12513, 12642, 12641], [12513, 12514, 12643], [12513, 12643, 12642], [12514, 12515, 12643], [12515, 12644, 12643], [12515, 12516, 12645], [12515, 12645, 12644], [12516, 12517, 12645], [12517, 12646, 12645], [12517, 12518, 12647], [12517, 12647, 12646], [12518, 12519, 12647], [12519, 12648, 12647], [12519, 12520, 12649], [12519, 12649, 12648], [12520, 12521, 12649], [12521, 12650, 12649], [12521, 12522, 12651], [12521, 12651, 12650], [12522, 12523, 12651], [12523, 12652, 12651], [12523, 12524, 12653], [12523, 12653, 12652], [12524, 12525, 12653], [12525, 12654, 12653], [12525, 12526, 12655], [12525, 12655, 12654], [12526, 12527, 12655], [12527, 12656, 12655], [12527, 12528, 12657], [12527, 12657, 12656], [12528, 12529, 12657], [12529, 12658, 12657], [12529, 12530, 12659], [12529, 12659, 12658], [12530, 12531, 12659], [12531, 12660, 12659], [12531, 12532, 12661], [12531, 12661, 12660], [12532, 12533, 12661], [12533, 12662, 12661], [12533, 12534, 12663], [12533, 12663, 12662], [12534, 12535, 12663], [12535, 12664, 12663], [12535, 12536, 12665], [12535, 12665, 12664], [12536, 12537, 12665], [12537, 12666, 12665], [12537, 12538, 12667], [12537, 12667, 12666], [12538, 12539, 12667], [12539, 12668, 12667], [12539, 12540, 12669], [12539, 12669, 12668], [12540, 12541, 12669], [12541, 12670, 12669], [12541, 12542, 12671], [12541, 12671, 12670], [12542, 12543, 12671], [12543, 12672, 12671], [12543, 12544, 12673], [12543, 12673, 12672], [12544, 12545, 12673], [12545, 12674, 12673], [12545, 12546, 12675], [12545, 12675, 12674], [12546, 12547, 12675], [12547, 12676, 12675], [12547, 12548, 12677], [12547, 12677, 12676], [12548, 12549, 12677], [12549, 12678, 12677], [12549, 12550, 12679], [12549, 12679, 12678], [12550, 12551, 12679], [12551, 12680, 12679], [12551, 12552, 12681], [12551, 12681, 12680], [12552, 12553, 12681], [12553, 12682, 12681], [12553, 12554, 12683], [12553, 12683, 12682], [12554, 12555, 12683], [12555, 12684, 12683], [12555, 12556, 12685], [12555, 12685, 12684], [12556, 12557, 12685], [12557, 12686, 12685], [12557, 12558, 12687], [12557, 12687, 12686], [12559, 12560, 12689], [12559, 12689, 12688], [12560, 12561, 12689], [12561, 12690, 12689], [12561, 12562, 12691], [12561, 12691, 12690], [12562, 12563, 12691], [12563, 12692, 12691], [12563, 12564, 12693], [12563, 12693, 12692], [12564, 12565, 12693], [12565, 12694, 12693], [12565, 12566, 12695], [12565, 12695, 12694], [12566, 12567, 12695], [12567, 12696, 12695], [12567, 12568, 12697], [12567, 12697, 12696], [12568, 12569, 12697], [12569, 12698, 12697], [12569, 12570, 12699], [12569, 12699, 12698], [12570, 12571, 12699], [12571, 12700, 12699], [12571, 12572, 12701], [12571, 12701, 12700], [12572, 12573, 12701], [12573, 12702, 12701], [12573, 12574, 12703], [12573, 12703, 12702], [12574, 12575, 12703], [12575, 12704, 12703], [12575, 12576, 12705], [12575, 12705, 12704], [12576, 12577, 12705], [12577, 12706, 12705], [12577, 12578, 12707], [12577, 12707, 12706], [12578, 12579, 12707], [12579, 12708, 12707], [12579, 12580, 12709], [12579, 12709, 12708], [12580, 12581, 12709], [12581, 12710, 12709], [12581, 12582, 12711], [12581, 12711, 12710], [12582, 12583, 12711], [12583, 12712, 12711], [12583, 12584, 12713], [12583, 12713, 12712], [12584, 12585, 12713], [12585, 12714, 12713], [12585, 12586, 12715], [12585, 12715, 12714], [12586, 12587, 12715], [12587, 12716, 12715], [12587, 12588, 12717], [12587, 12717, 12716], [12588, 12589, 12717], [12589, 12718, 12717], [12589, 12590, 12719], [12589, 12719, 12718], [12590, 12591, 12719], [12591, 12720, 12719], [12591, 12592, 12721], [12591, 12721, 12720], [12592, 12593, 12721], [12593, 12722, 12721], [12593, 12594, 12723], [12593, 12723, 12722], [12594, 12595, 12723], [12595, 12724, 12723], [12595, 12596, 12725], [12595, 12725, 12724], [12596, 12597, 12725], [12597, 12726, 12725], [12597, 12598, 12727], [12597, 12727, 12726], [12598, 12599, 12727], [12599, 12728, 12727], [12599, 12600, 12729], [12599, 12729, 12728], [12600, 12601, 12729], [12601, 12730, 12729], [12601, 12602, 12731], [12601, 12731, 12730], [12602, 12603, 12731], [12603, 12732, 12731], [12603, 12604, 12733], [12603, 12733, 12732], [12604, 12605, 12733], [12605, 12734, 12733], [12605, 12606, 12735], [12605, 12735, 12734], [12606, 12607, 12735], [12607, 12736, 12735], [12607, 12608, 12737], [12607, 12737, 12736], [12608, 12609, 12737], [12609, 12738, 12737], [12609, 12610, 12739], [12609, 12739, 12738], [12610, 12611, 12739], [12611, 12740, 12739], [12611, 12612, 12741], [12611, 12741, 12740], [12612, 12613, 12741], [12613, 12742, 12741], [12613, 12614, 12743], [12613, 12743, 12742], [12614, 12615, 12743], [12615, 12744, 12743], [12615, 12616, 12745], [12615, 12745, 12744], [12616, 12617, 12745], [12617, 12746, 12745], [12617, 12618, 12747], [12617, 12747, 12746], [12618, 12619, 12747], [12619, 12748, 12747], [12619, 12620, 12749], [12619, 12749, 12748], [12620, 12621, 12749], [12621, 12750, 12749], [12621, 12622, 12751], [12621, 12751, 12750], [12622, 12623, 12751], [12623, 12752, 12751], [12623, 12624, 12753], [12623, 12753, 12752], [12624, 12625, 12753], [12625, 12754, 12753], [12625, 12626, 12755], [12625, 12755, 12754], [12626, 12627, 12755], [12627, 12756, 12755], [12627, 12628, 12757], [12627, 12757, 12756], [12628, 12629, 12757], [12629, 12758, 12757], [12629, 12630, 12759], [12629, 12759, 12758], [12630, 12631, 12759], [12631, 12760, 12759], [12631, 12632, 12761], [12631, 12761, 12760], [12632, 12633, 12761], [12633, 12762, 12761], [12633, 12634, 12763], [12633, 12763, 12762], [12634, 12635, 12763], [12635, 12764, 12763], [12635, 12636, 12765], [12635, 12765, 12764], [12636, 12637, 12765], [12637, 12766, 12765], [12637, 12638, 12767], [12637, 12767, 12766], [12638, 12639, 12767], [12639, 12768, 12767], [12639, 12640, 12769], [12639, 12769, 12768], [12640, 12641, 12769], [12641, 12770, 12769], [12641, 12642, 12771], [12641, 12771, 12770], [12642, 12643, 12771], [12643, 12772, 12771], [12643, 12644, 12773], [12643, 12773, 12772], [12644, 12645, 12773], [12645, 12774, 12773], [12645, 12646, 12775], [12645, 12775, 12774], [12646, 12647, 12775], [12647, 12776, 12775], [12647, 12648, 12777], [12647, 12777, 12776], [12648, 12649, 12777], [12649, 12778, 12777], [12649, 12650, 12779], [12649, 12779, 12778], [12650, 12651, 12779], [12651, 12780, 12779], [12651, 12652, 12781], [12651, 12781, 12780], [12652, 12653, 12781], [12653, 12782, 12781], [12653, 12654, 12783], [12653, 12783, 12782], [12654, 12655, 12783], [12655, 12784, 12783], [12655, 12656, 12785], [12655, 12785, 12784], [12656, 12657, 12785], [12657, 12786, 12785], [12657, 12658, 12787], [12657, 12787, 12786], [12658, 12659, 12787], [12659, 12788, 12787], [12659, 12660, 12789], [12659, 12789, 12788], [12660, 12661, 12789], [12661, 12790, 12789], [12661, 12662, 12791], [12661, 12791, 12790], [12662, 12663, 12791], [12663, 12792, 12791], [12663, 12664, 12793], [12663, 12793, 12792], [12664, 12665, 12793], [12665, 12794, 12793], [12665, 12666, 12795], [12665, 12795, 12794], [12666, 12667, 12795], [12667, 12796, 12795], [12667, 12668, 12797], [12667, 12797, 12796], [12668, 12669, 12797], [12669, 12798, 12797], [12669, 12670, 12799], [12669, 12799, 12798], [12670, 12671, 12799], [12671, 12800, 12799], [12671, 12672, 12801], [12671, 12801, 12800], [12672, 12673, 12801], [12673, 12802, 12801], [12673, 12674, 12803], [12673, 12803, 12802], [12674, 12675, 12803], [12675, 12804, 12803], [12675, 12676, 12805], [12675, 12805, 12804], [12676, 12677, 12805], [12677, 12806, 12805], [12677, 12678, 12807], [12677, 12807, 12806], [12678, 12679, 12807], [12679, 12808, 12807], [12679, 12680, 12809], [12679, 12809, 12808], [12680, 12681, 12809], [12681, 12810, 12809], [12681, 12682, 12811], [12681, 12811, 12810], [12682, 12683, 12811], [12683, 12812, 12811], [12683, 12684, 12813], [12683, 12813, 12812], [12684, 12685, 12813], [12685, 12814, 12813], [12685, 12686, 12815], [12685, 12815, 12814], [12686, 12687, 12815], [12687, 12816, 12815], [12688, 12689, 12817], [12689, 12818, 12817], [12689, 12690, 12819], [12689, 12819, 12818], [12690, 12691, 12819], [12691, 12820, 12819], [12691, 12692, 12821], [12691, 12821, 12820], [12692, 12693, 12821], [12693, 12822, 12821], [12693, 12694, 12823], [12693, 12823, 12822], [12694, 12695, 12823], [12695, 12824, 12823], [12695, 12696, 12825], [12695, 12825, 12824], [12696, 12697, 12825], [12697, 12826, 12825], [12697, 12698, 12827], [12697, 12827, 12826], [12698, 12699, 12827], [12699, 12828, 12827], [12699, 12700, 12829], [12699, 12829, 12828], [12700, 12701, 12829], [12701, 12830, 12829], [12701, 12702, 12831], [12701, 12831, 12830], [12702, 12703, 12831], [12703, 12832, 12831], [12703, 12704, 12833], [12703, 12833, 12832], [12704, 12705, 12833], [12705, 12834, 12833], [12705, 12706, 12835], [12705, 12835, 12834], [12706, 12707, 12835], [12707, 12836, 12835], [12707, 12708, 12837], [12707, 12837, 12836], [12708, 12709, 12837], [12709, 12838, 12837], [12709, 12710, 12839], [12709, 12839, 12838], [12710, 12711, 12839], [12711, 12840, 12839], [12711, 12712, 12841], [12711, 12841, 12840], [12712, 12713, 12841], [12713, 12842, 12841], [12713, 12714, 12843], [12713, 12843, 12842], [12714, 12715, 12843], [12715, 12844, 12843], [12715, 12716, 12845], [12715, 12845, 12844], [12716, 12717, 12845], [12717, 12846, 12845], [12717, 12718, 12847], [12717, 12847, 12846], [12718, 12719, 12847], [12719, 12848, 12847], [12719, 12720, 12849], [12719, 12849, 12848], [12720, 12721, 12849], [12721, 12850, 12849], [12721, 12722, 12851], [12721, 12851, 12850], [12722, 12723, 12851], [12723, 12852, 12851], [12723, 12724, 12853], [12723, 12853, 12852], [12724, 12725, 12853], [12725, 12854, 12853], [12725, 12726, 12855], [12725, 12855, 12854], [12726, 12727, 12855], [12727, 12856, 12855], [12727, 12728, 12857], [12727, 12857, 12856], [12728, 12729, 12857], [12729, 12858, 12857], [12729, 12730, 12859], [12729, 12859, 12858], [12730, 12731, 12859], [12731, 12860, 12859], [12731, 12732, 12861], [12731, 12861, 12860], [12732, 12733, 12861], [12733, 12862, 12861], [12733, 12734, 12863], [12733, 12863, 12862], [12734, 12735, 12863], [12735, 12864, 12863], [12735, 12736, 12865], [12735, 12865, 12864], [12736, 12737, 12865], [12737, 12866, 12865], [12737, 12738, 12867], [12737, 12867, 12866], [12738, 12739, 12867], [12739, 12868, 12867], [12739, 12740, 12869], [12739, 12869, 12868], [12740, 12741, 12869], [12741, 12870, 12869], [12741, 12742, 12871], [12741, 12871, 12870], [12742, 12743, 12871], [12743, 12872, 12871], [12743, 12744, 12873], [12743, 12873, 12872], [12744, 12745, 12873], [12745, 12874, 12873], [12745, 12746, 12875], [12745, 12875, 12874], [12746, 12747, 12875], [12747, 12876, 12875], [12747, 12748, 12877], [12747, 12877, 12876], [12748, 12749, 12877], [12749, 12878, 12877], [12749, 12750, 12879], [12749, 12879, 12878], [12750, 12751, 12879], [12751, 12880, 12879], [12751, 12752, 12881], [12751, 12881, 12880], [12752, 12753, 12881], [12753, 12882, 12881], [12753, 12754, 12883], [12753, 12883, 12882], [12754, 12755, 12883], [12755, 12884, 12883], [12755, 12756, 12885], [12755, 12885, 12884], [12756, 12757, 12885], [12757, 12886, 12885], [12757, 12758, 12887], [12757, 12887, 12886], [12758, 12759, 12887], [12759, 12888, 12887], [12759, 12760, 12889], [12759, 12889, 12888], [12760, 12761, 12889], [12761, 12890, 12889], [12761, 12762, 12891], [12761, 12891, 12890], [12762, 12763, 12891], [12763, 12892, 12891], [12763, 12764, 12893], [12763, 12893, 12892], [12764, 12765, 12893], [12765, 12894, 12893], [12765, 12766, 12895], [12765, 12895, 12894], [12766, 12767, 12895], [12767, 12896, 12895], [12767, 12768, 12897], [12767, 12897, 12896], [12768, 12769, 12897], [12769, 12898, 12897], [12769, 12770, 12899], [12769, 12899, 12898], [12770, 12771, 12899], [12771, 12900, 12899], [12771, 12772, 12901], [12771, 12901, 12900], [12772, 12773, 12901], [12773, 12902, 12901], [12773, 12774, 12903], [12773, 12903, 12902], [12774, 12775, 12903], [12775, 12904, 12903], [12775, 12776, 12905], [12775, 12905, 12904], [12776, 12777, 12905], [12777, 12906, 12905], [12777, 12778, 12907], [12777, 12907, 12906], [12778, 12779, 12907], [12779, 12908, 12907], [12779, 12780, 12909], [12779, 12909, 12908], [12780, 12781, 12909], [12781, 12910, 12909], [12781, 12782, 12911], [12781, 12911, 12910], [12782, 12783, 12911], [12783, 12912, 12911], [12783, 12784, 12913], [12783, 12913, 12912], [12784, 12785, 12913], [12785, 12914, 12913], [12785, 12786, 12915], [12785, 12915, 12914], [12786, 12787, 12915], [12787, 12916, 12915], [12787, 12788, 12917], [12787, 12917, 12916], [12788, 12789, 12917], [12789, 12918, 12917], [12789, 12790, 12919], [12789, 12919, 12918], [12790, 12791, 12919], [12791, 12920, 12919], [12791, 12792, 12921], [12791, 12921, 12920], [12792, 12793, 12921], [12793, 12922, 12921], [12793, 12794, 12923], [12793, 12923, 12922], [12794, 12795, 12923], [12795, 12924, 12923], [12795, 12796, 12925], [12795, 12925, 12924], [12796, 12797, 12925], [12797, 12926, 12925], [12797, 12798, 12927], [12797, 12927, 12926], [12798, 12799, 12927], [12799, 12928, 12927], [12799, 12800, 12929], [12799, 12929, 12928], [12800, 12801, 12929], [12801, 12930, 12929], [12801, 12802, 12931], [12801, 12931, 12930], [12802, 12803, 12931], [12803, 12932, 12931], [12803, 12804, 12933], [12803, 12933, 12932], [12804, 12805, 12933], [12805, 12934, 12933], [12805, 12806, 12935], [12805, 12935, 12934], [12806, 12807, 12935], [12807, 12936, 12935], [12807, 12808, 12937], [12807, 12937, 12936], [12808, 12809, 12937], [12809, 12938, 12937], [12809, 12810, 12939], [12809, 12939, 12938], [12810, 12811, 12939], [12811, 12940, 12939], [12811, 12812, 12941], [12811, 12941, 12940], [12812, 12813, 12941], [12813, 12942, 12941], [12813, 12814, 12943], [12813, 12943, 12942], [12814, 12815, 12943], [12815, 12944, 12943], [12815, 12816, 12945], [12815, 12945, 12944], [12817, 12818, 12947], [12817, 12947, 12946], [12818, 12819, 12947], [12819, 12948, 12947], [12819, 12820, 12949], [12819, 12949, 12948], [12820, 12821, 12949], [12821, 12950, 12949], [12821, 12822, 12951], [12821, 12951, 12950], [12822, 12823, 12951], [12823, 12952, 12951], [12823, 12824, 12953], [12823, 12953, 12952], [12824, 12825, 12953], [12825, 12954, 12953], [12825, 12826, 12955], [12825, 12955, 12954], [12826, 12827, 12955], [12827, 12956, 12955], [12827, 12828, 12957], [12827, 12957, 12956], [12828, 12829, 12957], [12829, 12958, 12957], [12829, 12830, 12959], [12829, 12959, 12958], [12830, 12831, 12959], [12831, 12960, 12959], [12831, 12832, 12961], [12831, 12961, 12960], [12832, 12833, 12961], [12833, 12962, 12961], [12833, 12834, 12963], [12833, 12963, 12962], [12834, 12835, 12963], [12835, 12964, 12963], [12835, 12836, 12965], [12835, 12965, 12964], [12836, 12837, 12965], [12837, 12966, 12965], [12837, 12838, 12967], [12837, 12967, 12966], [12838, 12839, 12967], [12839, 12968, 12967], [12839, 12840, 12969], [12839, 12969, 12968], [12840, 12841, 12969], [12841, 12970, 12969], [12841, 12842, 12971], [12841, 12971, 12970], [12842, 12843, 12971], [12843, 12972, 12971], [12843, 12844, 12973], [12843, 12973, 12972], [12844, 12845, 12973], [12845, 12974, 12973], [12845, 12846, 12975], [12845, 12975, 12974], [12846, 12847, 12975], [12847, 12976, 12975], [12847, 12848, 12977], [12847, 12977, 12976], [12848, 12849, 12977], [12849, 12978, 12977], [12849, 12850, 12979], [12849, 12979, 12978], [12850, 12851, 12979], [12851, 12980, 12979], [12851, 12852, 12981], [12851, 12981, 12980], [12852, 12853, 12981], [12853, 12982, 12981], [12853, 12854, 12983], [12853, 12983, 12982], [12854, 12855, 12983], [12855, 12984, 12983], [12855, 12856, 12985], [12855, 12985, 12984], [12856, 12857, 12985], [12857, 12986, 12985], [12857, 12858, 12987], [12857, 12987, 12986], [12858, 12859, 12987], [12859, 12988, 12987], [12859, 12860, 12989], [12859, 12989, 12988], [12860, 12861, 12989], [12861, 12990, 12989], [12861, 12862, 12991], [12861, 12991, 12990], [12862, 12863, 12991], [12863, 12992, 12991], [12863, 12864, 12993], [12863, 12993, 12992], [12864, 12865, 12993], [12865, 12994, 12993], [12865, 12866, 12995], [12865, 12995, 12994], [12866, 12867, 12995], [12867, 12996, 12995], [12867, 12868, 12997], [12867, 12997, 12996], [12868, 12869, 12997], [12869, 12998, 12997], [12869, 12870, 12999], [12869, 12999, 12998], [12870, 12871, 12999], [12871, 13000, 12999], [12871, 12872, 13001], [12871, 13001, 13000], [12872, 12873, 13001], [12873, 13002, 13001], [12873, 12874, 13003], [12873, 13003, 13002], [12874, 12875, 13003], [12875, 13004, 13003], [12875, 12876, 13005], [12875, 13005, 13004], [12876, 12877, 13005], [12877, 13006, 13005], [12877, 12878, 13007], [12877, 13007, 13006], [12878, 12879, 13007], [12879, 13008, 13007], [12879, 12880, 13009], [12879, 13009, 13008], [12880, 12881, 13009], [12881, 13010, 13009], [12881, 12882, 13011], [12881, 13011, 13010], [12882, 12883, 13011], [12883, 13012, 13011], [12883, 12884, 13013], [12883, 13013, 13012], [12884, 12885, 13013], [12885, 13014, 13013], [12885, 12886, 13015], [12885, 13015, 13014], [12886, 12887, 13015], [12887, 13016, 13015], [12887, 12888, 13017], [12887, 13017, 13016], [12888, 12889, 13017], [12889, 13018, 13017], [12889, 12890, 13019], [12889, 13019, 13018], [12890, 12891, 13019], [12891, 13020, 13019], [12891, 12892, 13021], [12891, 13021, 13020], [12892, 12893, 13021], [12893, 13022, 13021], [12893, 12894, 13023], [12893, 13023, 13022], [12894, 12895, 13023], [12895, 13024, 13023], [12895, 12896, 13025], [12895, 13025, 13024], [12896, 12897, 13025], [12897, 13026, 13025], [12897, 12898, 13027], [12897, 13027, 13026], [12898, 12899, 13027], [12899, 13028, 13027], [12899, 12900, 13029], [12899, 13029, 13028], [12900, 12901, 13029], [12901, 13030, 13029], [12901, 12902, 13031], [12901, 13031, 13030], [12902, 12903, 13031], [12903, 13032, 13031], [12903, 12904, 13033], [12903, 13033, 13032], [12904, 12905, 13033], [12905, 13034, 13033], [12905, 12906, 13035], [12905, 13035, 13034], [12906, 12907, 13035], [12907, 13036, 13035], [12907, 12908, 13037], [12907, 13037, 13036], [12908, 12909, 13037], [12909, 13038, 13037], [12909, 12910, 13039], [12909, 13039, 13038], [12910, 12911, 13039], [12911, 13040, 13039], [12911, 12912, 13041], [12911, 13041, 13040], [12912, 12913, 13041], [12913, 13042, 13041], [12913, 12914, 13043], [12913, 13043, 13042], [12914, 12915, 13043], [12915, 13044, 13043], [12915, 12916, 13045], [12915, 13045, 13044], [12916, 12917, 13045], [12917, 13046, 13045], [12917, 12918, 13047], [12917, 13047, 13046], [12918, 12919, 13047], [12919, 13048, 13047], [12919, 12920, 13049], [12919, 13049, 13048], [12920, 12921, 13049], [12921, 13050, 13049], [12921, 12922, 13051], [12921, 13051, 13050], [12922, 12923, 13051], [12923, 13052, 13051], [12923, 12924, 13053], [12923, 13053, 13052], [12924, 12925, 13053], [12925, 13054, 13053], [12925, 12926, 13055], [12925, 13055, 13054], [12926, 12927, 13055], [12927, 13056, 13055], [12927, 12928, 13057], [12927, 13057, 13056], [12928, 12929, 13057], [12929, 13058, 13057], [12929, 12930, 13059], [12929, 13059, 13058], [12930, 12931, 13059], [12931, 13060, 13059], [12931, 12932, 13061], [12931, 13061, 13060], [12932, 12933, 13061], [12933, 13062, 13061], [12933, 12934, 13063], [12933, 13063, 13062], [12934, 12935, 13063], [12935, 13064, 13063], [12935, 12936, 13065], [12935, 13065, 13064], [12936, 12937, 13065], [12937, 13066, 13065], [12937, 12938, 13067], [12937, 13067, 13066], [12938, 12939, 13067], [12939, 13068, 13067], [12939, 12940, 13069], [12939, 13069, 13068], [12940, 12941, 13069], [12941, 13070, 13069], [12941, 12942, 13071], [12941, 13071, 13070], [12942, 12943, 13071], [12943, 13072, 13071], [12943, 12944, 13073], [12943, 13073, 13072], [12944, 12945, 13073], [12945, 13074, 13073], [12946, 12947, 13075], [12947, 13076, 13075], [12947, 12948, 13077], [12947, 13077, 13076], [12948, 12949, 13077], [12949, 13078, 13077], [12949, 12950, 13079], [12949, 13079, 13078], [12950, 12951, 13079], [12951, 13080, 13079], [12951, 12952, 13081], [12951, 13081, 13080], [12952, 12953, 13081], [12953, 13082, 13081], [12953, 12954, 13083], [12953, 13083, 13082], [12954, 12955, 13083], [12955, 13084, 13083], [12955, 12956, 13085], [12955, 13085, 13084], [12956, 12957, 13085], [12957, 13086, 13085], [12957, 12958, 13087], [12957, 13087, 13086], [12958, 12959, 13087], [12959, 13088, 13087], [12959, 12960, 13089], [12959, 13089, 13088], [12960, 12961, 13089], [12961, 13090, 13089], [12961, 12962, 13091], [12961, 13091, 13090], [12962, 12963, 13091], [12963, 13092, 13091], [12963, 12964, 13093], [12963, 13093, 13092], [12964, 12965, 13093], [12965, 13094, 13093], [12965, 12966, 13095], [12965, 13095, 13094], [12966, 12967, 13095], [12967, 13096, 13095], [12967, 12968, 13097], [12967, 13097, 13096], [12968, 12969, 13097], [12969, 13098, 13097], [12969, 12970, 13099], [12969, 13099, 13098], [12970, 12971, 13099], [12971, 13100, 13099], [12971, 12972, 13101], [12971, 13101, 13100], [12972, 12973, 13101], [12973, 13102, 13101], [12973, 12974, 13103], [12973, 13103, 13102], [12974, 12975, 13103], [12975, 13104, 13103], [12975, 12976, 13105], [12975, 13105, 13104], [12976, 12977, 13105], [12977, 13106, 13105], [12977, 12978, 13107], [12977, 13107, 13106], [12978, 12979, 13107], [12979, 13108, 13107], [12979, 12980, 13109], [12979, 13109, 13108], [12980, 12981, 13109], [12981, 13110, 13109], [12981, 12982, 13111], [12981, 13111, 13110], [12982, 12983, 13111], [12983, 13112, 13111], [12983, 12984, 13113], [12983, 13113, 13112], [12984, 12985, 13113], [12985, 13114, 13113], [12985, 12986, 13115], [12985, 13115, 13114], [12986, 12987, 13115], [12987, 13116, 13115], [12987, 12988, 13117], [12987, 13117, 13116], [12988, 12989, 13117], [12989, 13118, 13117], [12989, 12990, 13119], [12989, 13119, 13118], [12990, 12991, 13119], [12991, 13120, 13119], [12991, 12992, 13121], [12991, 13121, 13120], [12992, 12993, 13121], [12993, 13122, 13121], [12993, 12994, 13123], [12993, 13123, 13122], [12994, 12995, 13123], [12995, 13124, 13123], [12995, 12996, 13125], [12995, 13125, 13124], [12996, 12997, 13125], [12997, 13126, 13125], [12997, 12998, 13127], [12997, 13127, 13126], [12998, 12999, 13127], [12999, 13128, 13127], [12999, 13000, 13129], [12999, 13129, 13128], [13000, 13001, 13129], [13001, 13130, 13129], [13001, 13002, 13131], [13001, 13131, 13130], [13002, 13003, 13131], [13003, 13132, 13131], [13003, 13004, 13133], [13003, 13133, 13132], [13004, 13005, 13133], [13005, 13134, 13133], [13005, 13006, 13135], [13005, 13135, 13134], [13006, 13007, 13135], [13007, 13136, 13135], [13007, 13008, 13137], [13007, 13137, 13136], [13008, 13009, 13137], [13009, 13138, 13137], [13009, 13010, 13139], [13009, 13139, 13138], [13010, 13011, 13139], [13011, 13140, 13139], [13011, 13012, 13141], [13011, 13141, 13140], [13012, 13013, 13141], [13013, 13142, 13141], [13013, 13014, 13143], [13013, 13143, 13142], [13014, 13015, 13143], [13015, 13144, 13143], [13015, 13016, 13145], [13015, 13145, 13144], [13016, 13017, 13145], [13017, 13146, 13145], [13017, 13018, 13147], [13017, 13147, 13146], [13018, 13019, 13147], [13019, 13148, 13147], [13019, 13020, 13149], [13019, 13149, 13148], [13020, 13021, 13149], [13021, 13150, 13149], [13021, 13022, 13151], [13021, 13151, 13150], [13022, 13023, 13151], [13023, 13152, 13151], [13023, 13024, 13153], [13023, 13153, 13152], [13024, 13025, 13153], [13025, 13154, 13153], [13025, 13026, 13155], [13025, 13155, 13154], [13026, 13027, 13155], [13027, 13156, 13155], [13027, 13028, 13157], [13027, 13157, 13156], [13028, 13029, 13157], [13029, 13158, 13157], [13029, 13030, 13159], [13029, 13159, 13158], [13030, 13031, 13159], [13031, 13160, 13159], [13031, 13032, 13161], [13031, 13161, 13160], [13032, 13033, 13161], [13033, 13162, 13161], [13033, 13034, 13163], [13033, 13163, 13162], [13034, 13035, 13163], [13035, 13164, 13163], [13035, 13036, 13165], [13035, 13165, 13164], [13036, 13037, 13165], [13037, 13166, 13165], [13037, 13038, 13167], [13037, 13167, 13166], [13038, 13039, 13167], [13039, 13168, 13167], [13039, 13040, 13169], [13039, 13169, 13168], [13040, 13041, 13169], [13041, 13170, 13169], [13041, 13042, 13171], [13041, 13171, 13170], [13042, 13043, 13171], [13043, 13172, 13171], [13043, 13044, 13173], [13043, 13173, 13172], [13044, 13045, 13173], [13045, 13174, 13173], [13045, 13046, 13175], [13045, 13175, 13174], [13046, 13047, 13175], [13047, 13176, 13175], [13047, 13048, 13177], [13047, 13177, 13176], [13048, 13049, 13177], [13049, 13178, 13177], [13049, 13050, 13179], [13049, 13179, 13178], [13050, 13051, 13179], [13051, 13180, 13179], [13051, 13052, 13181], [13051, 13181, 13180], [13052, 13053, 13181], [13053, 13182, 13181], [13053, 13054, 13183], [13053, 13183, 13182], [13054, 13055, 13183], [13055, 13184, 13183], [13055, 13056, 13185], [13055, 13185, 13184], [13056, 13057, 13185], [13057, 13186, 13185], [13057, 13058, 13187], [13057, 13187, 13186], [13058, 13059, 13187], [13059, 13188, 13187], [13059, 13060, 13189], [13059, 13189, 13188], [13060, 13061, 13189], [13061, 13190, 13189], [13061, 13062, 13191], [13061, 13191, 13190], [13062, 13063, 13191], [13063, 13192, 13191], [13063, 13064, 13193], [13063, 13193, 13192], [13064, 13065, 13193], [13065, 13194, 13193], [13065, 13066, 13195], [13065, 13195, 13194], [13066, 13067, 13195], [13067, 13196, 13195], [13067, 13068, 13197], [13067, 13197, 13196], [13068, 13069, 13197], [13069, 13198, 13197], [13069, 13070, 13199], [13069, 13199, 13198], [13070, 13071, 13199], [13071, 13200, 13199], [13071, 13072, 13201], [13071, 13201, 13200], [13072, 13073, 13201], [13073, 13202, 13201], [13073, 13074, 13203], [13073, 13203, 13202], [13075, 13076, 13205], [13075, 13205, 13204], [13076, 13077, 13205], [13077, 13206, 13205], [13077, 13078, 13207], [13077, 13207, 13206], [13078, 13079, 13207], [13079, 13208, 13207], [13079, 13080, 13209], [13079, 13209, 13208], [13080, 13081, 13209], [13081, 13210, 13209], [13081, 13082, 13211], [13081, 13211, 13210], [13082, 13083, 13211], [13083, 13212, 13211], [13083, 13084, 13213], [13083, 13213, 13212], [13084, 13085, 13213], [13085, 13214, 13213], [13085, 13086, 13215], [13085, 13215, 13214], [13086, 13087, 13215], [13087, 13216, 13215], [13087, 13088, 13217], [13087, 13217, 13216], [13088, 13089, 13217], [13089, 13218, 13217], [13089, 13090, 13219], [13089, 13219, 13218], [13090, 13091, 13219], [13091, 13220, 13219], [13091, 13092, 13221], [13091, 13221, 13220], [13092, 13093, 13221], [13093, 13222, 13221], [13093, 13094, 13223], [13093, 13223, 13222], [13094, 13095, 13223], [13095, 13224, 13223], [13095, 13096, 13225], [13095, 13225, 13224], [13096, 13097, 13225], [13097, 13226, 13225], [13097, 13098, 13227], [13097, 13227, 13226], [13098, 13099, 13227], [13099, 13228, 13227], [13099, 13100, 13229], [13099, 13229, 13228], [13100, 13101, 13229], [13101, 13230, 13229], [13101, 13102, 13231], [13101, 13231, 13230], [13102, 13103, 13231], [13103, 13232, 13231], [13103, 13104, 13233], [13103, 13233, 13232], [13104, 13105, 13233], [13105, 13234, 13233], [13105, 13106, 13235], [13105, 13235, 13234], [13106, 13107, 13235], [13107, 13236, 13235], [13107, 13108, 13237], [13107, 13237, 13236], [13108, 13109, 13237], [13109, 13238, 13237], [13109, 13110, 13239], [13109, 13239, 13238], [13110, 13111, 13239], [13111, 13240, 13239], [13111, 13112, 13241], [13111, 13241, 13240], [13112, 13113, 13241], [13113, 13242, 13241], [13113, 13114, 13243], [13113, 13243, 13242], [13114, 13115, 13243], [13115, 13244, 13243], [13115, 13116, 13245], [13115, 13245, 13244], [13116, 13117, 13245], [13117, 13246, 13245], [13117, 13118, 13247], [13117, 13247, 13246], [13118, 13119, 13247], [13119, 13248, 13247], [13119, 13120, 13249], [13119, 13249, 13248], [13120, 13121, 13249], [13121, 13250, 13249], [13121, 13122, 13251], [13121, 13251, 13250], [13122, 13123, 13251], [13123, 13252, 13251], [13123, 13124, 13253], [13123, 13253, 13252], [13124, 13125, 13253], [13125, 13254, 13253], [13125, 13126, 13255], [13125, 13255, 13254], [13126, 13127, 13255], [13127, 13256, 13255], [13127, 13128, 13257], [13127, 13257, 13256], [13128, 13129, 13257], [13129, 13258, 13257], [13129, 13130, 13259], [13129, 13259, 13258], [13130, 13131, 13259], [13131, 13260, 13259], [13131, 13132, 13261], [13131, 13261, 13260], [13132, 13133, 13261], [13133, 13262, 13261], [13133, 13134, 13263], [13133, 13263, 13262], [13134, 13135, 13263], [13135, 13264, 13263], [13135, 13136, 13265], [13135, 13265, 13264], [13136, 13137, 13265], [13137, 13266, 13265], [13137, 13138, 13267], [13137, 13267, 13266], [13138, 13139, 13267], [13139, 13268, 13267], [13139, 13140, 13269], [13139, 13269, 13268], [13140, 13141, 13269], [13141, 13270, 13269], [13141, 13142, 13271], [13141, 13271, 13270], [13142, 13143, 13271], [13143, 13272, 13271], [13143, 13144, 13273], [13143, 13273, 13272], [13144, 13145, 13273], [13145, 13274, 13273], [13145, 13146, 13275], [13145, 13275, 13274], [13146, 13147, 13275], [13147, 13276, 13275], [13147, 13148, 13277], [13147, 13277, 13276], [13148, 13149, 13277], [13149, 13278, 13277], [13149, 13150, 13279], [13149, 13279, 13278], [13150, 13151, 13279], [13151, 13280, 13279], [13151, 13152, 13281], [13151, 13281, 13280], [13152, 13153, 13281], [13153, 13282, 13281], [13153, 13154, 13283], [13153, 13283, 13282], [13154, 13155, 13283], [13155, 13284, 13283], [13155, 13156, 13285], [13155, 13285, 13284], [13156, 13157, 13285], [13157, 13286, 13285], [13157, 13158, 13287], [13157, 13287, 13286], [13158, 13159, 13287], [13159, 13288, 13287], [13159, 13160, 13289], [13159, 13289, 13288], [13160, 13161, 13289], [13161, 13290, 13289], [13161, 13162, 13291], [13161, 13291, 13290], [13162, 13163, 13291], [13163, 13292, 13291], [13163, 13164, 13293], [13163, 13293, 13292], [13164, 13165, 13293], [13165, 13294, 13293], [13165, 13166, 13295], [13165, 13295, 13294], [13166, 13167, 13295], [13167, 13296, 13295], [13167, 13168, 13297], [13167, 13297, 13296], [13168, 13169, 13297], [13169, 13298, 13297], [13169, 13170, 13299], [13169, 13299, 13298], [13170, 13171, 13299], [13171, 13300, 13299], [13171, 13172, 13301], [13171, 13301, 13300], [13172, 13173, 13301], [13173, 13302, 13301], [13173, 13174, 13303], [13173, 13303, 13302], [13174, 13175, 13303], [13175, 13304, 13303], [13175, 13176, 13305], [13175, 13305, 13304], [13176, 13177, 13305], [13177, 13306, 13305], [13177, 13178, 13307], [13177, 13307, 13306], [13178, 13179, 13307], [13179, 13308, 13307], [13179, 13180, 13309], [13179, 13309, 13308], [13180, 13181, 13309], [13181, 13310, 13309], [13181, 13182, 13311], [13181, 13311, 13310], [13182, 13183, 13311], [13183, 13312, 13311], [13183, 13184, 13313], [13183, 13313, 13312], [13184, 13185, 13313], [13185, 13314, 13313], [13185, 13186, 13315], [13185, 13315, 13314], [13186, 13187, 13315], [13187, 13316, 13315], [13187, 13188, 13317], [13187, 13317, 13316], [13188, 13189, 13317], [13189, 13318, 13317], [13189, 13190, 13319], [13189, 13319, 13318], [13190, 13191, 13319], [13191, 13320, 13319], [13191, 13192, 13321], [13191, 13321, 13320], [13192, 13193, 13321], [13193, 13322, 13321], [13193, 13194, 13323], [13193, 13323, 13322], [13194, 13195, 13323], [13195, 13324, 13323], [13195, 13196, 13325], [13195, 13325, 13324], [13196, 13197, 13325], [13197, 13326, 13325], [13197, 13198, 13327], [13197, 13327, 13326], [13198, 13199, 13327], [13199, 13328, 13327], [13199, 13200, 13329], [13199, 13329, 13328], [13200, 13201, 13329], [13201, 13330, 13329], [13201, 13202, 13331], [13201, 13331, 13330], [13202, 13203, 13331], [13203, 13332, 13331], [13204, 13205, 13333], [13205, 13334, 13333], [13205, 13206, 13335], [13205, 13335, 13334], [13206, 13207, 13335], [13207, 13336, 13335], [13207, 13208, 13337], [13207, 13337, 13336], [13208, 13209, 13337], [13209, 13338, 13337], [13209, 13210, 13339], [13209, 13339, 13338], [13210, 13211, 13339], [13211, 13340, 13339], [13211, 13212, 13341], [13211, 13341, 13340], [13212, 13213, 13341], [13213, 13342, 13341], [13213, 13214, 13343], [13213, 13343, 13342], [13214, 13215, 13343], [13215, 13344, 13343], [13215, 13216, 13345], [13215, 13345, 13344], [13216, 13217, 13345], [13217, 13346, 13345], [13217, 13218, 13347], [13217, 13347, 13346], [13218, 13219, 13347], [13219, 13348, 13347], [13219, 13220, 13349], [13219, 13349, 13348], [13220, 13221, 13349], [13221, 13350, 13349], [13221, 13222, 13351], [13221, 13351, 13350], [13222, 13223, 13351], [13223, 13352, 13351], [13223, 13224, 13353], [13223, 13353, 13352], [13224, 13225, 13353], [13225, 13354, 13353], [13225, 13226, 13355], [13225, 13355, 13354], [13226, 13227, 13355], [13227, 13356, 13355], [13227, 13228, 13357], [13227, 13357, 13356], [13228, 13229, 13357], [13229, 13358, 13357], [13229, 13230, 13359], [13229, 13359, 13358], [13230, 13231, 13359], [13231, 13360, 13359], [13231, 13232, 13361], [13231, 13361, 13360], [13232, 13233, 13361], [13233, 13362, 13361], [13233, 13234, 13363], [13233, 13363, 13362], [13234, 13235, 13363], [13235, 13364, 13363], [13235, 13236, 13365], [13235, 13365, 13364], [13236, 13237, 13365], [13237, 13366, 13365], [13237, 13238, 13367], [13237, 13367, 13366], [13238, 13239, 13367], [13239, 13368, 13367], [13239, 13240, 13369], [13239, 13369, 13368], [13240, 13241, 13369], [13241, 13370, 13369], [13241, 13242, 13371], [13241, 13371, 13370], [13242, 13243, 13371], [13243, 13372, 13371], [13243, 13244, 13373], [13243, 13373, 13372], [13244, 13245, 13373], [13245, 13374, 13373], [13245, 13246, 13375], [13245, 13375, 13374], [13246, 13247, 13375], [13247, 13376, 13375], [13247, 13248, 13377], [13247, 13377, 13376], [13248, 13249, 13377], [13249, 13378, 13377], [13249, 13250, 13379], [13249, 13379, 13378], [13250, 13251, 13379], [13251, 13380, 13379], [13251, 13252, 13381], [13251, 13381, 13380], [13252, 13253, 13381], [13253, 13382, 13381], [13253, 13254, 13383], [13253, 13383, 13382], [13254, 13255, 13383], [13255, 13384, 13383], [13255, 13256, 13385], [13255, 13385, 13384], [13256, 13257, 13385], [13257, 13386, 13385], [13257, 13258, 13387], [13257, 13387, 13386], [13258, 13259, 13387], [13259, 13388, 13387], [13259, 13260, 13389], [13259, 13389, 13388], [13260, 13261, 13389], [13261, 13390, 13389], [13261, 13262, 13391], [13261, 13391, 13390], [13262, 13263, 13391], [13263, 13392, 13391], [13263, 13264, 13393], [13263, 13393, 13392], [13264, 13265, 13393], [13265, 13394, 13393], [13265, 13266, 13395], [13265, 13395, 13394], [13266, 13267, 13395], [13267, 13396, 13395], [13267, 13268, 13397], [13267, 13397, 13396], [13268, 13269, 13397], [13269, 13398, 13397], [13269, 13270, 13399], [13269, 13399, 13398], [13270, 13271, 13399], [13271, 13400, 13399], [13271, 13272, 13401], [13271, 13401, 13400], [13272, 13273, 13401], [13273, 13402, 13401], [13273, 13274, 13403], [13273, 13403, 13402], [13274, 13275, 13403], [13275, 13404, 13403], [13275, 13276, 13405], [13275, 13405, 13404], [13276, 13277, 13405], [13277, 13406, 13405], [13277, 13278, 13407], [13277, 13407, 13406], [13278, 13279, 13407], [13279, 13408, 13407], [13279, 13280, 13409], [13279, 13409, 13408], [13280, 13281, 13409], [13281, 13410, 13409], [13281, 13282, 13411], [13281, 13411, 13410], [13282, 13283, 13411], [13283, 13412, 13411], [13283, 13284, 13413], [13283, 13413, 13412], [13284, 13285, 13413], [13285, 13414, 13413], [13285, 13286, 13415], [13285, 13415, 13414], [13286, 13287, 13415], [13287, 13416, 13415], [13287, 13288, 13417], [13287, 13417, 13416], [13288, 13289, 13417], [13289, 13418, 13417], [13289, 13290, 13419], [13289, 13419, 13418], [13290, 13291, 13419], [13291, 13420, 13419], [13291, 13292, 13421], [13291, 13421, 13420], [13292, 13293, 13421], [13293, 13422, 13421], [13293, 13294, 13423], [13293, 13423, 13422], [13294, 13295, 13423], [13295, 13424, 13423], [13295, 13296, 13425], [13295, 13425, 13424], [13296, 13297, 13425], [13297, 13426, 13425], [13297, 13298, 13427], [13297, 13427, 13426], [13298, 13299, 13427], [13299, 13428, 13427], [13299, 13300, 13429], [13299, 13429, 13428], [13300, 13301, 13429], [13301, 13430, 13429], [13301, 13302, 13431], [13301, 13431, 13430], [13302, 13303, 13431], [13303, 13432, 13431], [13303, 13304, 13433], [13303, 13433, 13432], [13304, 13305, 13433], [13305, 13434, 13433], [13305, 13306, 13435], [13305, 13435, 13434], [13306, 13307, 13435], [13307, 13436, 13435], [13307, 13308, 13437], [13307, 13437, 13436], [13308, 13309, 13437], [13309, 13438, 13437], [13309, 13310, 13439], [13309, 13439, 13438], [13310, 13311, 13439], [13311, 13440, 13439], [13311, 13312, 13441], [13311, 13441, 13440], [13312, 13313, 13441], [13313, 13442, 13441], [13313, 13314, 13443], [13313, 13443, 13442], [13314, 13315, 13443], [13315, 13444, 13443], [13315, 13316, 13445], [13315, 13445, 13444], [13316, 13317, 13445], [13317, 13446, 13445], [13317, 13318, 13447], [13317, 13447, 13446], [13318, 13319, 13447], [13319, 13448, 13447], [13319, 13320, 13449], [13319, 13449, 13448], [13320, 13321, 13449], [13321, 13450, 13449], [13321, 13322, 13451], [13321, 13451, 13450], [13322, 13323, 13451], [13323, 13452, 13451], [13323, 13324, 13453], [13323, 13453, 13452], [13324, 13325, 13453], [13325, 13454, 13453], [13325, 13326, 13455], [13325, 13455, 13454], [13326, 13327, 13455], [13327, 13456, 13455], [13327, 13328, 13457], [13327, 13457, 13456], [13328, 13329, 13457], [13329, 13458, 13457], [13329, 13330, 13459], [13329, 13459, 13458], [13330, 13331, 13459], [13331, 13460, 13459], [13331, 13332, 13461], [13331, 13461, 13460], [13333, 13334, 13463], [13333, 13463, 13462], [13334, 13335, 13463], [13335, 13464, 13463], [13335, 13336, 13465], [13335, 13465, 13464], [13336, 13337, 13465], [13337, 13466, 13465], [13337, 13338, 13467], [13337, 13467, 13466], [13338, 13339, 13467], [13339, 13468, 13467], [13339, 13340, 13469], [13339, 13469, 13468], [13340, 13341, 13469], [13341, 13470, 13469], [13341, 13342, 13471], [13341, 13471, 13470], [13342, 13343, 13471], [13343, 13472, 13471], [13343, 13344, 13473], [13343, 13473, 13472], [13344, 13345, 13473], [13345, 13474, 13473], [13345, 13346, 13475], [13345, 13475, 13474], [13346, 13347, 13475], [13347, 13476, 13475], [13347, 13348, 13477], [13347, 13477, 13476], [13348, 13349, 13477], [13349, 13478, 13477], [13349, 13350, 13479], [13349, 13479, 13478], [13350, 13351, 13479], [13351, 13480, 13479], [13351, 13352, 13481], [13351, 13481, 13480], [13352, 13353, 13481], [13353, 13482, 13481], [13353, 13354, 13483], [13353, 13483, 13482], [13354, 13355, 13483], [13355, 13484, 13483], [13355, 13356, 13485], [13355, 13485, 13484], [13356, 13357, 13485], [13357, 13486, 13485], [13357, 13358, 13487], [13357, 13487, 13486], [13358, 13359, 13487], [13359, 13488, 13487], [13359, 13360, 13489], [13359, 13489, 13488], [13360, 13361, 13489], [13361, 13490, 13489], [13361, 13362, 13491], [13361, 13491, 13490], [13362, 13363, 13491], [13363, 13492, 13491], [13363, 13364, 13493], [13363, 13493, 13492], [13364, 13365, 13493], [13365, 13494, 13493], [13365, 13366, 13495], [13365, 13495, 13494], [13366, 13367, 13495], [13367, 13496, 13495], [13367, 13368, 13497], [13367, 13497, 13496], [13368, 13369, 13497], [13369, 13498, 13497], [13369, 13370, 13499], [13369, 13499, 13498], [13370, 13371, 13499], [13371, 13500, 13499], [13371, 13372, 13501], [13371, 13501, 13500], [13372, 13373, 13501], [13373, 13502, 13501], [13373, 13374, 13503], [13373, 13503, 13502], [13374, 13375, 13503], [13375, 13504, 13503], [13375, 13376, 13505], [13375, 13505, 13504], [13376, 13377, 13505], [13377, 13506, 13505], [13377, 13378, 13507], [13377, 13507, 13506], [13378, 13379, 13507], [13379, 13508, 13507], [13379, 13380, 13509], [13379, 13509, 13508], [13380, 13381, 13509], [13381, 13510, 13509], [13381, 13382, 13511], [13381, 13511, 13510], [13382, 13383, 13511], [13383, 13512, 13511], [13383, 13384, 13513], [13383, 13513, 13512], [13384, 13385, 13513], [13385, 13514, 13513], [13385, 13386, 13515], [13385, 13515, 13514], [13386, 13387, 13515], [13387, 13516, 13515], [13387, 13388, 13517], [13387, 13517, 13516], [13388, 13389, 13517], [13389, 13518, 13517], [13389, 13390, 13519], [13389, 13519, 13518], [13390, 13391, 13519], [13391, 13520, 13519], [13391, 13392, 13521], [13391, 13521, 13520], [13392, 13393, 13521], [13393, 13522, 13521], [13393, 13394, 13523], [13393, 13523, 13522], [13394, 13395, 13523], [13395, 13524, 13523], [13395, 13396, 13525], [13395, 13525, 13524], [13396, 13397, 13525], [13397, 13526, 13525], [13397, 13398, 13527], [13397, 13527, 13526], [13398, 13399, 13527], [13399, 13528, 13527], [13399, 13400, 13529], [13399, 13529, 13528], [13400, 13401, 13529], [13401, 13530, 13529], [13401, 13402, 13531], [13401, 13531, 13530], [13402, 13403, 13531], [13403, 13532, 13531], [13403, 13404, 13533], [13403, 13533, 13532], [13404, 13405, 13533], [13405, 13534, 13533], [13405, 13406, 13535], [13405, 13535, 13534], [13406, 13407, 13535], [13407, 13536, 13535], [13407, 13408, 13537], [13407, 13537, 13536], [13408, 13409, 13537], [13409, 13538, 13537], [13409, 13410, 13539], [13409, 13539, 13538], [13410, 13411, 13539], [13411, 13540, 13539], [13411, 13412, 13541], [13411, 13541, 13540], [13412, 13413, 13541], [13413, 13542, 13541], [13413, 13414, 13543], [13413, 13543, 13542], [13414, 13415, 13543], [13415, 13544, 13543], [13415, 13416, 13545], [13415, 13545, 13544], [13416, 13417, 13545], [13417, 13546, 13545], [13417, 13418, 13547], [13417, 13547, 13546], [13418, 13419, 13547], [13419, 13548, 13547], [13419, 13420, 13549], [13419, 13549, 13548], [13420, 13421, 13549], [13421, 13550, 13549], [13421, 13422, 13551], [13421, 13551, 13550], [13422, 13423, 13551], [13423, 13552, 13551], [13423, 13424, 13553], [13423, 13553, 13552], [13424, 13425, 13553], [13425, 13554, 13553], [13425, 13426, 13555], [13425, 13555, 13554], [13426, 13427, 13555], [13427, 13556, 13555], [13427, 13428, 13557], [13427, 13557, 13556], [13428, 13429, 13557], [13429, 13558, 13557], [13429, 13430, 13559], [13429, 13559, 13558], [13430, 13431, 13559], [13431, 13560, 13559], [13431, 13432, 13561], [13431, 13561, 13560], [13432, 13433, 13561], [13433, 13562, 13561], [13433, 13434, 13563], [13433, 13563, 13562], [13434, 13435, 13563], [13435, 13564, 13563], [13435, 13436, 13565], [13435, 13565, 13564], [13436, 13437, 13565], [13437, 13566, 13565], [13437, 13438, 13567], [13437, 13567, 13566], [13438, 13439, 13567], [13439, 13568, 13567], [13439, 13440, 13569], [13439, 13569, 13568], [13440, 13441, 13569], [13441, 13570, 13569], [13441, 13442, 13571], [13441, 13571, 13570], [13442, 13443, 13571], [13443, 13572, 13571], [13443, 13444, 13573], [13443, 13573, 13572], [13444, 13445, 13573], [13445, 13574, 13573], [13445, 13446, 13575], [13445, 13575, 13574], [13446, 13447, 13575], [13447, 13576, 13575], [13447, 13448, 13577], [13447, 13577, 13576], [13448, 13449, 13577], [13449, 13578, 13577], [13449, 13450, 13579], [13449, 13579, 13578], [13450, 13451, 13579], [13451, 13580, 13579], [13451, 13452, 13581], [13451, 13581, 13580], [13452, 13453, 13581], [13453, 13582, 13581], [13453, 13454, 13583], [13453, 13583, 13582], [13454, 13455, 13583], [13455, 13584, 13583], [13455, 13456, 13585], [13455, 13585, 13584], [13456, 13457, 13585], [13457, 13586, 13585], [13457, 13458, 13587], [13457, 13587, 13586], [13458, 13459, 13587], [13459, 13588, 13587], [13459, 13460, 13589], [13459, 13589, 13588], [13460, 13461, 13589], [13461, 13590, 13589], [13462, 13463, 13591], [13463, 13592, 13591], [13463, 13464, 13593], [13463, 13593, 13592], [13464, 13465, 13593], [13465, 13594, 13593], [13465, 13466, 13595], [13465, 13595, 13594], [13466, 13467, 13595], [13467, 13596, 13595], [13467, 13468, 13597], [13467, 13597, 13596], [13468, 13469, 13597], [13469, 13598, 13597], [13469, 13470, 13599], [13469, 13599, 13598], [13470, 13471, 13599], [13471, 13600, 13599], [13471, 13472, 13601], [13471, 13601, 13600], [13472, 13473, 13601], [13473, 13602, 13601], [13473, 13474, 13603], [13473, 13603, 13602], [13474, 13475, 13603], [13475, 13604, 13603], [13475, 13476, 13605], [13475, 13605, 13604], [13476, 13477, 13605], [13477, 13606, 13605], [13477, 13478, 13607], [13477, 13607, 13606], [13478, 13479, 13607], [13479, 13608, 13607], [13479, 13480, 13609], [13479, 13609, 13608], [13480, 13481, 13609], [13481, 13610, 13609], [13481, 13482, 13611], [13481, 13611, 13610], [13482, 13483, 13611], [13483, 13612, 13611], [13483, 13484, 13613], [13483, 13613, 13612], [13484, 13485, 13613], [13485, 13614, 13613], [13485, 13486, 13615], [13485, 13615, 13614], [13486, 13487, 13615], [13487, 13616, 13615], [13487, 13488, 13617], [13487, 13617, 13616], [13488, 13489, 13617], [13489, 13618, 13617], [13489, 13490, 13619], [13489, 13619, 13618], [13490, 13491, 13619], [13491, 13620, 13619], [13491, 13492, 13621], [13491, 13621, 13620], [13492, 13493, 13621], [13493, 13622, 13621], [13493, 13494, 13623], [13493, 13623, 13622], [13494, 13495, 13623], [13495, 13624, 13623], [13495, 13496, 13625], [13495, 13625, 13624], [13496, 13497, 13625], [13497, 13626, 13625], [13497, 13498, 13627], [13497, 13627, 13626], [13498, 13499, 13627], [13499, 13628, 13627], [13499, 13500, 13629], [13499, 13629, 13628], [13500, 13501, 13629], [13501, 13630, 13629], [13501, 13502, 13631], [13501, 13631, 13630], [13502, 13503, 13631], [13503, 13632, 13631], [13503, 13504, 13633], [13503, 13633, 13632], [13504, 13505, 13633], [13505, 13634, 13633], [13505, 13506, 13635], [13505, 13635, 13634], [13506, 13507, 13635], [13507, 13636, 13635], [13507, 13508, 13637], [13507, 13637, 13636], [13508, 13509, 13637], [13509, 13638, 13637], [13509, 13510, 13639], [13509, 13639, 13638], [13510, 13511, 13639], [13511, 13640, 13639], [13511, 13512, 13641], [13511, 13641, 13640], [13512, 13513, 13641], [13513, 13642, 13641], [13513, 13514, 13643], [13513, 13643, 13642], [13514, 13515, 13643], [13515, 13644, 13643], [13515, 13516, 13645], [13515, 13645, 13644], [13516, 13517, 13645], [13517, 13646, 13645], [13517, 13518, 13647], [13517, 13647, 13646], [13518, 13519, 13647], [13519, 13648, 13647], [13519, 13520, 13649], [13519, 13649, 13648], [13520, 13521, 13649], [13521, 13650, 13649], [13521, 13522, 13651], [13521, 13651, 13650], [13522, 13523, 13651], [13523, 13652, 13651], [13523, 13524, 13653], [13523, 13653, 13652], [13524, 13525, 13653], [13525, 13654, 13653], [13525, 13526, 13655], [13525, 13655, 13654], [13526, 13527, 13655], [13527, 13656, 13655], [13527, 13528, 13657], [13527, 13657, 13656], [13528, 13529, 13657], [13529, 13658, 13657], [13529, 13530, 13659], [13529, 13659, 13658], [13530, 13531, 13659], [13531, 13660, 13659], [13531, 13532, 13661], [13531, 13661, 13660], [13532, 13533, 13661], [13533, 13662, 13661], [13533, 13534, 13663], [13533, 13663, 13662], [13534, 13535, 13663], [13535, 13664, 13663], [13535, 13536, 13665], [13535, 13665, 13664], [13536, 13537, 13665], [13537, 13666, 13665], [13537, 13538, 13667], [13537, 13667, 13666], [13538, 13539, 13667], [13539, 13668, 13667], [13539, 13540, 13669], [13539, 13669, 13668], [13540, 13541, 13669], [13541, 13670, 13669], [13541, 13542, 13671], [13541, 13671, 13670], [13542, 13543, 13671], [13543, 13672, 13671], [13543, 13544, 13673], [13543, 13673, 13672], [13544, 13545, 13673], [13545, 13674, 13673], [13545, 13546, 13675], [13545, 13675, 13674], [13546, 13547, 13675], [13547, 13676, 13675], [13547, 13548, 13677], [13547, 13677, 13676], [13548, 13549, 13677], [13549, 13678, 13677], [13549, 13550, 13679], [13549, 13679, 13678], [13550, 13551, 13679], [13551, 13680, 13679], [13551, 13552, 13681], [13551, 13681, 13680], [13552, 13553, 13681], [13553, 13682, 13681], [13553, 13554, 13683], [13553, 13683, 13682], [13554, 13555, 13683], [13555, 13684, 13683], [13555, 13556, 13685], [13555, 13685, 13684], [13556, 13557, 13685], [13557, 13686, 13685], [13557, 13558, 13687], [13557, 13687, 13686], [13558, 13559, 13687], [13559, 13688, 13687], [13559, 13560, 13689], [13559, 13689, 13688], [13560, 13561, 13689], [13561, 13690, 13689], [13561, 13562, 13691], [13561, 13691, 13690], [13562, 13563, 13691], [13563, 13692, 13691], [13563, 13564, 13693], [13563, 13693, 13692], [13564, 13565, 13693], [13565, 13694, 13693], [13565, 13566, 13695], [13565, 13695, 13694], [13566, 13567, 13695], [13567, 13696, 13695], [13567, 13568, 13697], [13567, 13697, 13696], [13568, 13569, 13697], [13569, 13698, 13697], [13569, 13570, 13699], [13569, 13699, 13698], [13570, 13571, 13699], [13571, 13700, 13699], [13571, 13572, 13701], [13571, 13701, 13700], [13572, 13573, 13701], [13573, 13702, 13701], [13573, 13574, 13703], [13573, 13703, 13702], [13574, 13575, 13703], [13575, 13704, 13703], [13575, 13576, 13705], [13575, 13705, 13704], [13576, 13577, 13705], [13577, 13706, 13705], [13577, 13578, 13707], [13577, 13707, 13706], [13578, 13579, 13707], [13579, 13708, 13707], [13579, 13580, 13709], [13579, 13709, 13708], [13580, 13581, 13709], [13581, 13710, 13709], [13581, 13582, 13711], [13581, 13711, 13710], [13582, 13583, 13711], [13583, 13712, 13711], [13583, 13584, 13713], [13583, 13713, 13712], [13584, 13585, 13713], [13585, 13714, 13713], [13585, 13586, 13715], [13585, 13715, 13714], [13586, 13587, 13715], [13587, 13716, 13715], [13587, 13588, 13717], [13587, 13717, 13716], [13588, 13589, 13717], [13589, 13718, 13717], [13589, 13590, 13719], [13589, 13719, 13718], [13591, 13592, 13721], [13591, 13721, 13720], [13592, 13593, 13721], [13593, 13722, 13721], [13593, 13594, 13723], [13593, 13723, 13722], [13594, 13595, 13723], [13595, 13724, 13723], [13595, 13596, 13725], [13595, 13725, 13724], [13596, 13597, 13725], [13597, 13726, 13725], [13597, 13598, 13727], [13597, 13727, 13726], [13598, 13599, 13727], [13599, 13728, 13727], [13599, 13600, 13729], [13599, 13729, 13728], [13600, 13601, 13729], [13601, 13730, 13729], [13601, 13602, 13731], [13601, 13731, 13730], [13602, 13603, 13731], [13603, 13732, 13731], [13603, 13604, 13733], [13603, 13733, 13732], [13604, 13605, 13733], [13605, 13734, 13733], [13605, 13606, 13735], [13605, 13735, 13734], [13606, 13607, 13735], [13607, 13736, 13735], [13607, 13608, 13737], [13607, 13737, 13736], [13608, 13609, 13737], [13609, 13738, 13737], [13609, 13610, 13739], [13609, 13739, 13738], [13610, 13611, 13739], [13611, 13740, 13739], [13611, 13612, 13741], [13611, 13741, 13740], [13612, 13613, 13741], [13613, 13742, 13741], [13613, 13614, 13743], [13613, 13743, 13742], [13614, 13615, 13743], [13615, 13744, 13743], [13615, 13616, 13745], [13615, 13745, 13744], [13616, 13617, 13745], [13617, 13746, 13745], [13617, 13618, 13747], [13617, 13747, 13746], [13618, 13619, 13747], [13619, 13748, 13747], [13619, 13620, 13749], [13619, 13749, 13748], [13620, 13621, 13749], [13621, 13750, 13749], [13621, 13622, 13751], [13621, 13751, 13750], [13622, 13623, 13751], [13623, 13752, 13751], [13623, 13624, 13753], [13623, 13753, 13752], [13624, 13625, 13753], [13625, 13754, 13753], [13625, 13626, 13755], [13625, 13755, 13754], [13626, 13627, 13755], [13627, 13756, 13755], [13627, 13628, 13757], [13627, 13757, 13756], [13628, 13629, 13757], [13629, 13758, 13757], [13629, 13630, 13759], [13629, 13759, 13758], [13630, 13631, 13759], [13631, 13760, 13759], [13631, 13632, 13761], [13631, 13761, 13760], [13632, 13633, 13761], [13633, 13762, 13761], [13633, 13634, 13763], [13633, 13763, 13762], [13634, 13635, 13763], [13635, 13764, 13763], [13635, 13636, 13765], [13635, 13765, 13764], [13636, 13637, 13765], [13637, 13766, 13765], [13637, 13638, 13767], [13637, 13767, 13766], [13638, 13639, 13767], [13639, 13768, 13767], [13639, 13640, 13769], [13639, 13769, 13768], [13640, 13641, 13769], [13641, 13770, 13769], [13641, 13642, 13771], [13641, 13771, 13770], [13642, 13643, 13771], [13643, 13772, 13771], [13643, 13644, 13773], [13643, 13773, 13772], [13644, 13645, 13773], [13645, 13774, 13773], [13645, 13646, 13775], [13645, 13775, 13774], [13646, 13647, 13775], [13647, 13776, 13775], [13647, 13648, 13777], [13647, 13777, 13776], [13648, 13649, 13777], [13649, 13778, 13777], [13649, 13650, 13779], [13649, 13779, 13778], [13650, 13651, 13779], [13651, 13780, 13779], [13651, 13652, 13781], [13651, 13781, 13780], [13652, 13653, 13781], [13653, 13782, 13781], [13653, 13654, 13783], [13653, 13783, 13782], [13654, 13655, 13783], [13655, 13784, 13783], [13655, 13656, 13785], [13655, 13785, 13784], [13656, 13657, 13785], [13657, 13786, 13785], [13657, 13658, 13787], [13657, 13787, 13786], [13658, 13659, 13787], [13659, 13788, 13787], [13659, 13660, 13789], [13659, 13789, 13788], [13660, 13661, 13789], [13661, 13790, 13789], [13661, 13662, 13791], [13661, 13791, 13790], [13662, 13663, 13791], [13663, 13792, 13791], [13663, 13664, 13793], [13663, 13793, 13792], [13664, 13665, 13793], [13665, 13794, 13793], [13665, 13666, 13795], [13665, 13795, 13794], [13666, 13667, 13795], [13667, 13796, 13795], [13667, 13668, 13797], [13667, 13797, 13796], [13668, 13669, 13797], [13669, 13798, 13797], [13669, 13670, 13799], [13669, 13799, 13798], [13670, 13671, 13799], [13671, 13800, 13799], [13671, 13672, 13801], [13671, 13801, 13800], [13672, 13673, 13801], [13673, 13802, 13801], [13673, 13674, 13803], [13673, 13803, 13802], [13674, 13675, 13803], [13675, 13804, 13803], [13675, 13676, 13805], [13675, 13805, 13804], [13676, 13677, 13805], [13677, 13806, 13805], [13677, 13678, 13807], [13677, 13807, 13806], [13678, 13679, 13807], [13679, 13808, 13807], [13679, 13680, 13809], [13679, 13809, 13808], [13680, 13681, 13809], [13681, 13810, 13809], [13681, 13682, 13811], [13681, 13811, 13810], [13682, 13683, 13811], [13683, 13812, 13811], [13683, 13684, 13813], [13683, 13813, 13812], [13684, 13685, 13813], [13685, 13814, 13813], [13685, 13686, 13815], [13685, 13815, 13814], [13686, 13687, 13815], [13687, 13816, 13815], [13687, 13688, 13817], [13687, 13817, 13816], [13688, 13689, 13817], [13689, 13818, 13817], [13689, 13690, 13819], [13689, 13819, 13818], [13690, 13691, 13819], [13691, 13820, 13819], [13691, 13692, 13821], [13691, 13821, 13820], [13692, 13693, 13821], [13693, 13822, 13821], [13693, 13694, 13823], [13693, 13823, 13822], [13694, 13695, 13823], [13695, 13824, 13823], [13695, 13696, 13825], [13695, 13825, 13824], [13696, 13697, 13825], [13697, 13826, 13825], [13697, 13698, 13827], [13697, 13827, 13826], [13698, 13699, 13827], [13699, 13828, 13827], [13699, 13700, 13829], [13699, 13829, 13828], [13700, 13701, 13829], [13701, 13830, 13829], [13701, 13702, 13831], [13701, 13831, 13830], [13702, 13703, 13831], [13703, 13832, 13831], [13703, 13704, 13833], [13703, 13833, 13832], [13704, 13705, 13833], [13705, 13834, 13833], [13705, 13706, 13835], [13705, 13835, 13834], [13706, 13707, 13835], [13707, 13836, 13835], [13707, 13708, 13837], [13707, 13837, 13836], [13708, 13709, 13837], [13709, 13838, 13837], [13709, 13710, 13839], [13709, 13839, 13838], [13710, 13711, 13839], [13711, 13840, 13839], [13711, 13712, 13841], [13711, 13841, 13840], [13712, 13713, 13841], [13713, 13842, 13841], [13713, 13714, 13843], [13713, 13843, 13842], [13714, 13715, 13843], [13715, 13844, 13843], [13715, 13716, 13845], [13715, 13845, 13844], [13716, 13717, 13845], [13717, 13846, 13845], [13717, 13718, 13847], [13717, 13847, 13846], [13718, 13719, 13847], [13719, 13848, 13847], [13720, 13721, 13849], [13721, 13850, 13849], [13721, 13722, 13851], [13721, 13851, 13850], [13722, 13723, 13851], [13723, 13852, 13851], [13723, 13724, 13853], [13723, 13853, 13852], [13724, 13725, 13853], [13725, 13854, 13853], [13725, 13726, 13855], [13725, 13855, 13854], [13726, 13727, 13855], [13727, 13856, 13855], [13727, 13728, 13857], [13727, 13857, 13856], [13728, 13729, 13857], [13729, 13858, 13857], [13729, 13730, 13859], [13729, 13859, 13858], [13730, 13731, 13859], [13731, 13860, 13859], [13731, 13732, 13861], [13731, 13861, 13860], [13732, 13733, 13861], [13733, 13862, 13861], [13733, 13734, 13863], [13733, 13863, 13862], [13734, 13735, 13863], [13735, 13864, 13863], [13735, 13736, 13865], [13735, 13865, 13864], [13736, 13737, 13865], [13737, 13866, 13865], [13737, 13738, 13867], [13737, 13867, 13866], [13738, 13739, 13867], [13739, 13868, 13867], [13739, 13740, 13869], [13739, 13869, 13868], [13740, 13741, 13869], [13741, 13870, 13869], [13741, 13742, 13871], [13741, 13871, 13870], [13742, 13743, 13871], [13743, 13872, 13871], [13743, 13744, 13873], [13743, 13873, 13872], [13744, 13745, 13873], [13745, 13874, 13873], [13745, 13746, 13875], [13745, 13875, 13874], [13746, 13747, 13875], [13747, 13876, 13875], [13747, 13748, 13877], [13747, 13877, 13876], [13748, 13749, 13877], [13749, 13878, 13877], [13749, 13750, 13879], [13749, 13879, 13878], [13750, 13751, 13879], [13751, 13880, 13879], [13751, 13752, 13881], [13751, 13881, 13880], [13752, 13753, 13881], [13753, 13882, 13881], [13753, 13754, 13883], [13753, 13883, 13882], [13754, 13755, 13883], [13755, 13884, 13883], [13755, 13756, 13885], [13755, 13885, 13884], [13756, 13757, 13885], [13757, 13886, 13885], [13757, 13758, 13887], [13757, 13887, 13886], [13758, 13759, 13887], [13759, 13888, 13887], [13759, 13760, 13889], [13759, 13889, 13888], [13760, 13761, 13889], [13761, 13890, 13889], [13761, 13762, 13891], [13761, 13891, 13890], [13762, 13763, 13891], [13763, 13892, 13891], [13763, 13764, 13893], [13763, 13893, 13892], [13764, 13765, 13893], [13765, 13894, 13893], [13765, 13766, 13895], [13765, 13895, 13894], [13766, 13767, 13895], [13767, 13896, 13895], [13767, 13768, 13897], [13767, 13897, 13896], [13768, 13769, 13897], [13769, 13898, 13897], [13769, 13770, 13899], [13769, 13899, 13898], [13770, 13771, 13899], [13771, 13900, 13899], [13771, 13772, 13901], [13771, 13901, 13900], [13772, 13773, 13901], [13773, 13902, 13901], [13773, 13774, 13903], [13773, 13903, 13902], [13774, 13775, 13903], [13775, 13904, 13903], [13775, 13776, 13905], [13775, 13905, 13904], [13776, 13777, 13905], [13777, 13906, 13905], [13777, 13778, 13907], [13777, 13907, 13906], [13778, 13779, 13907], [13779, 13908, 13907], [13779, 13780, 13909], [13779, 13909, 13908], [13780, 13781, 13909], [13781, 13910, 13909], [13781, 13782, 13911], [13781, 13911, 13910], [13782, 13783, 13911], [13783, 13912, 13911], [13783, 13784, 13913], [13783, 13913, 13912], [13784, 13785, 13913], [13785, 13914, 13913], [13785, 13786, 13915], [13785, 13915, 13914], [13786, 13787, 13915], [13787, 13916, 13915], [13787, 13788, 13917], [13787, 13917, 13916], [13788, 13789, 13917], [13789, 13918, 13917], [13789, 13790, 13919], [13789, 13919, 13918], [13790, 13791, 13919], [13791, 13920, 13919], [13791, 13792, 13921], [13791, 13921, 13920], [13792, 13793, 13921], [13793, 13922, 13921], [13793, 13794, 13923], [13793, 13923, 13922], [13794, 13795, 13923], [13795, 13924, 13923], [13795, 13796, 13925], [13795, 13925, 13924], [13796, 13797, 13925], [13797, 13926, 13925], [13797, 13798, 13927], [13797, 13927, 13926], [13798, 13799, 13927], [13799, 13928, 13927], [13799, 13800, 13929], [13799, 13929, 13928], [13800, 13801, 13929], [13801, 13930, 13929], [13801, 13802, 13931], [13801, 13931, 13930], [13802, 13803, 13931], [13803, 13932, 13931], [13803, 13804, 13933], [13803, 13933, 13932], [13804, 13805, 13933], [13805, 13934, 13933], [13805, 13806, 13935], [13805, 13935, 13934], [13806, 13807, 13935], [13807, 13936, 13935], [13807, 13808, 13937], [13807, 13937, 13936], [13808, 13809, 13937], [13809, 13938, 13937], [13809, 13810, 13939], [13809, 13939, 13938], [13810, 13811, 13939], [13811, 13940, 13939], [13811, 13812, 13941], [13811, 13941, 13940], [13812, 13813, 13941], [13813, 13942, 13941], [13813, 13814, 13943], [13813, 13943, 13942], [13814, 13815, 13943], [13815, 13944, 13943], [13815, 13816, 13945], [13815, 13945, 13944], [13816, 13817, 13945], [13817, 13946, 13945], [13817, 13818, 13947], [13817, 13947, 13946], [13818, 13819, 13947], [13819, 13948, 13947], [13819, 13820, 13949], [13819, 13949, 13948], [13820, 13821, 13949], [13821, 13950, 13949], [13821, 13822, 13951], [13821, 13951, 13950], [13822, 13823, 13951], [13823, 13952, 13951], [13823, 13824, 13953], [13823, 13953, 13952], [13824, 13825, 13953], [13825, 13954, 13953], [13825, 13826, 13955], [13825, 13955, 13954], [13826, 13827, 13955], [13827, 13956, 13955], [13827, 13828, 13957], [13827, 13957, 13956], [13828, 13829, 13957], [13829, 13958, 13957], [13829, 13830, 13959], [13829, 13959, 13958], [13830, 13831, 13959], [13831, 13960, 13959], [13831, 13832, 13961], [13831, 13961, 13960], [13832, 13833, 13961], [13833, 13962, 13961], [13833, 13834, 13963], [13833, 13963, 13962], [13834, 13835, 13963], [13835, 13964, 13963], [13835, 13836, 13965], [13835, 13965, 13964], [13836, 13837, 13965], [13837, 13966, 13965], [13837, 13838, 13967], [13837, 13967, 13966], [13838, 13839, 13967], [13839, 13968, 13967], [13839, 13840, 13969], [13839, 13969, 13968], [13840, 13841, 13969], [13841, 13970, 13969], [13841, 13842, 13971], [13841, 13971, 13970], [13842, 13843, 13971], [13843, 13972, 13971], [13843, 13844, 13973], [13843, 13973, 13972], [13844, 13845, 13973], [13845, 13974, 13973], [13845, 13846, 13975], [13845, 13975, 13974], [13846, 13847, 13975], [13847, 13976, 13975], [13847, 13848, 13977], [13847, 13977, 13976], [13849, 13850, 13979], [13849, 13979, 13978], [13850, 13851, 13979], [13851, 13980, 13979], [13851, 13852, 13981], [13851, 13981, 13980], [13852, 13853, 13981], [13853, 13982, 13981], [13853, 13854, 13983], [13853, 13983, 13982], [13854, 13855, 13983], [13855, 13984, 13983], [13855, 13856, 13985], [13855, 13985, 13984], [13856, 13857, 13985], [13857, 13986, 13985], [13857, 13858, 13987], [13857, 13987, 13986], [13858, 13859, 13987], [13859, 13988, 13987], [13859, 13860, 13989], [13859, 13989, 13988], [13860, 13861, 13989], [13861, 13990, 13989], [13861, 13862, 13991], [13861, 13991, 13990], [13862, 13863, 13991], [13863, 13992, 13991], [13863, 13864, 13993], [13863, 13993, 13992], [13864, 13865, 13993], [13865, 13994, 13993], [13865, 13866, 13995], [13865, 13995, 13994], [13866, 13867, 13995], [13867, 13996, 13995], [13867, 13868, 13997], [13867, 13997, 13996], [13868, 13869, 13997], [13869, 13998, 13997], [13869, 13870, 13999], [13869, 13999, 13998], [13870, 13871, 13999], [13871, 14000, 13999], [13871, 13872, 14001], [13871, 14001, 14000], [13872, 13873, 14001], [13873, 14002, 14001], [13873, 13874, 14003], [13873, 14003, 14002], [13874, 13875, 14003], [13875, 14004, 14003], [13875, 13876, 14005], [13875, 14005, 14004], [13876, 13877, 14005], [13877, 14006, 14005], [13877, 13878, 14007], [13877, 14007, 14006], [13878, 13879, 14007], [13879, 14008, 14007], [13879, 13880, 14009], [13879, 14009, 14008], [13880, 13881, 14009], [13881, 14010, 14009], [13881, 13882, 14011], [13881, 14011, 14010], [13882, 13883, 14011], [13883, 14012, 14011], [13883, 13884, 14013], [13883, 14013, 14012], [13884, 13885, 14013], [13885, 14014, 14013], [13885, 13886, 14015], [13885, 14015, 14014], [13886, 13887, 14015], [13887, 14016, 14015], [13887, 13888, 14017], [13887, 14017, 14016], [13888, 13889, 14017], [13889, 14018, 14017], [13889, 13890, 14019], [13889, 14019, 14018], [13890, 13891, 14019], [13891, 14020, 14019], [13891, 13892, 14021], [13891, 14021, 14020], [13892, 13893, 14021], [13893, 14022, 14021], [13893, 13894, 14023], [13893, 14023, 14022], [13894, 13895, 14023], [13895, 14024, 14023], [13895, 13896, 14025], [13895, 14025, 14024], [13896, 13897, 14025], [13897, 14026, 14025], [13897, 13898, 14027], [13897, 14027, 14026], [13898, 13899, 14027], [13899, 14028, 14027], [13899, 13900, 14029], [13899, 14029, 14028], [13900, 13901, 14029], [13901, 14030, 14029], [13901, 13902, 14031], [13901, 14031, 14030], [13902, 13903, 14031], [13903, 14032, 14031], [13903, 13904, 14033], [13903, 14033, 14032], [13904, 13905, 14033], [13905, 14034, 14033], [13905, 13906, 14035], [13905, 14035, 14034], [13906, 13907, 14035], [13907, 14036, 14035], [13907, 13908, 14037], [13907, 14037, 14036], [13908, 13909, 14037], [13909, 14038, 14037], [13909, 13910, 14039], [13909, 14039, 14038], [13910, 13911, 14039], [13911, 14040, 14039], [13911, 13912, 14041], [13911, 14041, 14040], [13912, 13913, 14041], [13913, 14042, 14041], [13913, 13914, 14043], [13913, 14043, 14042], [13914, 13915, 14043], [13915, 14044, 14043], [13915, 13916, 14045], [13915, 14045, 14044], [13916, 13917, 14045], [13917, 14046, 14045], [13917, 13918, 14047], [13917, 14047, 14046], [13918, 13919, 14047], [13919, 14048, 14047], [13919, 13920, 14049], [13919, 14049, 14048], [13920, 13921, 14049], [13921, 14050, 14049], [13921, 13922, 14051], [13921, 14051, 14050], [13922, 13923, 14051], [13923, 14052, 14051], [13923, 13924, 14053], [13923, 14053, 14052], [13924, 13925, 14053], [13925, 14054, 14053], [13925, 13926, 14055], [13925, 14055, 14054], [13926, 13927, 14055], [13927, 14056, 14055], [13927, 13928, 14057], [13927, 14057, 14056], [13928, 13929, 14057], [13929, 14058, 14057], [13929, 13930, 14059], [13929, 14059, 14058], [13930, 13931, 14059], [13931, 14060, 14059], [13931, 13932, 14061], [13931, 14061, 14060], [13932, 13933, 14061], [13933, 14062, 14061], [13933, 13934, 14063], [13933, 14063, 14062], [13934, 13935, 14063], [13935, 14064, 14063], [13935, 13936, 14065], [13935, 14065, 14064], [13936, 13937, 14065], [13937, 14066, 14065], [13937, 13938, 14067], [13937, 14067, 14066], [13938, 13939, 14067], [13939, 14068, 14067], [13939, 13940, 14069], [13939, 14069, 14068], [13940, 13941, 14069], [13941, 14070, 14069], [13941, 13942, 14071], [13941, 14071, 14070], [13942, 13943, 14071], [13943, 14072, 14071], [13943, 13944, 14073], [13943, 14073, 14072], [13944, 13945, 14073], [13945, 14074, 14073], [13945, 13946, 14075], [13945, 14075, 14074], [13946, 13947, 14075], [13947, 14076, 14075], [13947, 13948, 14077], [13947, 14077, 14076], [13948, 13949, 14077], [13949, 14078, 14077], [13949, 13950, 14079], [13949, 14079, 14078], [13950, 13951, 14079], [13951, 14080, 14079], [13951, 13952, 14081], [13951, 14081, 14080], [13952, 13953, 14081], [13953, 14082, 14081], [13953, 13954, 14083], [13953, 14083, 14082], [13954, 13955, 14083], [13955, 14084, 14083], [13955, 13956, 14085], [13955, 14085, 14084], [13956, 13957, 14085], [13957, 14086, 14085], [13957, 13958, 14087], [13957, 14087, 14086], [13958, 13959, 14087], [13959, 14088, 14087], [13959, 13960, 14089], [13959, 14089, 14088], [13960, 13961, 14089], [13961, 14090, 14089], [13961, 13962, 14091], [13961, 14091, 14090], [13962, 13963, 14091], [13963, 14092, 14091], [13963, 13964, 14093], [13963, 14093, 14092], [13964, 13965, 14093], [13965, 14094, 14093], [13965, 13966, 14095], [13965, 14095, 14094], [13966, 13967, 14095], [13967, 14096, 14095], [13967, 13968, 14097], [13967, 14097, 14096], [13968, 13969, 14097], [13969, 14098, 14097], [13969, 13970, 14099], [13969, 14099, 14098], [13970, 13971, 14099], [13971, 14100, 14099], [13971, 13972, 14101], [13971, 14101, 14100], [13972, 13973, 14101], [13973, 14102, 14101], [13973, 13974, 14103], [13973, 14103, 14102], [13974, 13975, 14103], [13975, 14104, 14103], [13975, 13976, 14105], [13975, 14105, 14104], [13976, 13977, 14105], [13977, 14106, 14105], [13978, 13979, 14107], [13979, 14108, 14107], [13979, 13980, 14109], [13979, 14109, 14108], [13980, 13981, 14109], [13981, 14110, 14109], [13981, 13982, 14111], [13981, 14111, 14110], [13982, 13983, 14111], [13983, 14112, 14111], [13983, 13984, 14113], [13983, 14113, 14112], [13984, 13985, 14113], [13985, 14114, 14113], [13985, 13986, 14115], [13985, 14115, 14114], [13986, 13987, 14115], [13987, 14116, 14115], [13987, 13988, 14117], [13987, 14117, 14116], [13988, 13989, 14117], [13989, 14118, 14117], [13989, 13990, 14119], [13989, 14119, 14118], [13990, 13991, 14119], [13991, 14120, 14119], [13991, 13992, 14121], [13991, 14121, 14120], [13992, 13993, 14121], [13993, 14122, 14121], [13993, 13994, 14123], [13993, 14123, 14122], [13994, 13995, 14123], [13995, 14124, 14123], [13995, 13996, 14125], [13995, 14125, 14124], [13996, 13997, 14125], [13997, 14126, 14125], [13997, 13998, 14127], [13997, 14127, 14126], [13998, 13999, 14127], [13999, 14128, 14127], [13999, 14000, 14129], [13999, 14129, 14128], [14000, 14001, 14129], [14001, 14130, 14129], [14001, 14002, 14131], [14001, 14131, 14130], [14002, 14003, 14131], [14003, 14132, 14131], [14003, 14004, 14133], [14003, 14133, 14132], [14004, 14005, 14133], [14005, 14134, 14133], [14005, 14006, 14135], [14005, 14135, 14134], [14006, 14007, 14135], [14007, 14136, 14135], [14007, 14008, 14137], [14007, 14137, 14136], [14008, 14009, 14137], [14009, 14138, 14137], [14009, 14010, 14139], [14009, 14139, 14138], [14010, 14011, 14139], [14011, 14140, 14139], [14011, 14012, 14141], [14011, 14141, 14140], [14012, 14013, 14141], [14013, 14142, 14141], [14013, 14014, 14143], [14013, 14143, 14142], [14014, 14015, 14143], [14015, 14144, 14143], [14015, 14016, 14145], [14015, 14145, 14144], [14016, 14017, 14145], [14017, 14146, 14145], [14017, 14018, 14147], [14017, 14147, 14146], [14018, 14019, 14147], [14019, 14148, 14147], [14019, 14020, 14149], [14019, 14149, 14148], [14020, 14021, 14149], [14021, 14150, 14149], [14021, 14022, 14151], [14021, 14151, 14150], [14022, 14023, 14151], [14023, 14152, 14151], [14023, 14024, 14153], [14023, 14153, 14152], [14024, 14025, 14153], [14025, 14154, 14153], [14025, 14026, 14155], [14025, 14155, 14154], [14026, 14027, 14155], [14027, 14156, 14155], [14027, 14028, 14157], [14027, 14157, 14156], [14028, 14029, 14157], [14029, 14158, 14157], [14029, 14030, 14159], [14029, 14159, 14158], [14030, 14031, 14159], [14031, 14160, 14159], [14031, 14032, 14161], [14031, 14161, 14160], [14032, 14033, 14161], [14033, 14162, 14161], [14033, 14034, 14163], [14033, 14163, 14162], [14034, 14035, 14163], [14035, 14164, 14163], [14035, 14036, 14165], [14035, 14165, 14164], [14036, 14037, 14165], [14037, 14166, 14165], [14037, 14038, 14167], [14037, 14167, 14166], [14038, 14039, 14167], [14039, 14168, 14167], [14039, 14040, 14169], [14039, 14169, 14168], [14040, 14041, 14169], [14041, 14170, 14169], [14041, 14042, 14171], [14041, 14171, 14170], [14042, 14043, 14171], [14043, 14172, 14171], [14043, 14044, 14173], [14043, 14173, 14172], [14044, 14045, 14173], [14045, 14174, 14173], [14045, 14046, 14175], [14045, 14175, 14174], [14046, 14047, 14175], [14047, 14176, 14175], [14047, 14048, 14177], [14047, 14177, 14176], [14048, 14049, 14177], [14049, 14178, 14177], [14049, 14050, 14179], [14049, 14179, 14178], [14050, 14051, 14179], [14051, 14180, 14179], [14051, 14052, 14181], [14051, 14181, 14180], [14052, 14053, 14181], [14053, 14182, 14181], [14053, 14054, 14183], [14053, 14183, 14182], [14054, 14055, 14183], [14055, 14184, 14183], [14055, 14056, 14185], [14055, 14185, 14184], [14056, 14057, 14185], [14057, 14186, 14185], [14057, 14058, 14187], [14057, 14187, 14186], [14058, 14059, 14187], [14059, 14188, 14187], [14059, 14060, 14189], [14059, 14189, 14188], [14060, 14061, 14189], [14061, 14190, 14189], [14061, 14062, 14191], [14061, 14191, 14190], [14062, 14063, 14191], [14063, 14192, 14191], [14063, 14064, 14193], [14063, 14193, 14192], [14064, 14065, 14193], [14065, 14194, 14193], [14065, 14066, 14195], [14065, 14195, 14194], [14066, 14067, 14195], [14067, 14196, 14195], [14067, 14068, 14197], [14067, 14197, 14196], [14068, 14069, 14197], [14069, 14198, 14197], [14069, 14070, 14199], [14069, 14199, 14198], [14070, 14071, 14199], [14071, 14200, 14199], [14071, 14072, 14201], [14071, 14201, 14200], [14072, 14073, 14201], [14073, 14202, 14201], [14073, 14074, 14203], [14073, 14203, 14202], [14074, 14075, 14203], [14075, 14204, 14203], [14075, 14076, 14205], [14075, 14205, 14204], [14076, 14077, 14205], [14077, 14206, 14205], [14077, 14078, 14207], [14077, 14207, 14206], [14078, 14079, 14207], [14079, 14208, 14207], [14079, 14080, 14209], [14079, 14209, 14208], [14080, 14081, 14209], [14081, 14210, 14209], [14081, 14082, 14211], [14081, 14211, 14210], [14082, 14083, 14211], [14083, 14212, 14211], [14083, 14084, 14213], [14083, 14213, 14212], [14084, 14085, 14213], [14085, 14214, 14213], [14085, 14086, 14215], [14085, 14215, 14214], [14086, 14087, 14215], [14087, 14216, 14215], [14087, 14088, 14217], [14087, 14217, 14216], [14088, 14089, 14217], [14089, 14218, 14217], [14089, 14090, 14219], [14089, 14219, 14218], [14090, 14091, 14219], [14091, 14220, 14219], [14091, 14092, 14221], [14091, 14221, 14220], [14092, 14093, 14221], [14093, 14222, 14221], [14093, 14094, 14223], [14093, 14223, 14222], [14094, 14095, 14223], [14095, 14224, 14223], [14095, 14096, 14225], [14095, 14225, 14224], [14096, 14097, 14225], [14097, 14226, 14225], [14097, 14098, 14227], [14097, 14227, 14226], [14098, 14099, 14227], [14099, 14228, 14227], [14099, 14100, 14229], [14099, 14229, 14228], [14100, 14101, 14229], [14101, 14230, 14229], [14101, 14102, 14231], [14101, 14231, 14230], [14102, 14103, 14231], [14103, 14232, 14231], [14103, 14104, 14233], [14103, 14233, 14232], [14104, 14105, 14233], [14105, 14234, 14233], [14105, 14106, 14235], [14105, 14235, 14234], [14107, 14108, 14237], [14107, 14237, 14236], [14108, 14109, 14237], [14109, 14238, 14237], [14109, 14110, 14239], [14109, 14239, 14238], [14110, 14111, 14239], [14111, 14240, 14239], [14111, 14112, 14241], [14111, 14241, 14240], [14112, 14113, 14241], [14113, 14242, 14241], [14113, 14114, 14243], [14113, 14243, 14242], [14114, 14115, 14243], [14115, 14244, 14243], [14115, 14116, 14245], [14115, 14245, 14244], [14116, 14117, 14245], [14117, 14246, 14245], [14117, 14118, 14247], [14117, 14247, 14246], [14118, 14119, 14247], [14119, 14248, 14247], [14119, 14120, 14249], [14119, 14249, 14248], [14120, 14121, 14249], [14121, 14250, 14249], [14121, 14122, 14251], [14121, 14251, 14250], [14122, 14123, 14251], [14123, 14252, 14251], [14123, 14124, 14253], [14123, 14253, 14252], [14124, 14125, 14253], [14125, 14254, 14253], [14125, 14126, 14255], [14125, 14255, 14254], [14126, 14127, 14255], [14127, 14256, 14255], [14127, 14128, 14257], [14127, 14257, 14256], [14128, 14129, 14257], [14129, 14258, 14257], [14129, 14130, 14259], [14129, 14259, 14258], [14130, 14131, 14259], [14131, 14260, 14259], [14131, 14132, 14261], [14131, 14261, 14260], [14132, 14133, 14261], [14133, 14262, 14261], [14133, 14134, 14263], [14133, 14263, 14262], [14134, 14135, 14263], [14135, 14264, 14263], [14135, 14136, 14265], [14135, 14265, 14264], [14136, 14137, 14265], [14137, 14266, 14265], [14137, 14138, 14267], [14137, 14267, 14266], [14138, 14139, 14267], [14139, 14268, 14267], [14139, 14140, 14269], [14139, 14269, 14268], [14140, 14141, 14269], [14141, 14270, 14269], [14141, 14142, 14271], [14141, 14271, 14270], [14142, 14143, 14271], [14143, 14272, 14271], [14143, 14144, 14273], [14143, 14273, 14272], [14144, 14145, 14273], [14145, 14274, 14273], [14145, 14146, 14275], [14145, 14275, 14274], [14146, 14147, 14275], [14147, 14276, 14275], [14147, 14148, 14277], [14147, 14277, 14276], [14148, 14149, 14277], [14149, 14278, 14277], [14149, 14150, 14279], [14149, 14279, 14278], [14150, 14151, 14279], [14151, 14280, 14279], [14151, 14152, 14281], [14151, 14281, 14280], [14152, 14153, 14281], [14153, 14282, 14281], [14153, 14154, 14283], [14153, 14283, 14282], [14154, 14155, 14283], [14155, 14284, 14283], [14155, 14156, 14285], [14155, 14285, 14284], [14156, 14157, 14285], [14157, 14286, 14285], [14157, 14158, 14287], [14157, 14287, 14286], [14158, 14159, 14287], [14159, 14288, 14287], [14159, 14160, 14289], [14159, 14289, 14288], [14160, 14161, 14289], [14161, 14290, 14289], [14161, 14162, 14291], [14161, 14291, 14290], [14162, 14163, 14291], [14163, 14292, 14291], [14163, 14164, 14293], [14163, 14293, 14292], [14164, 14165, 14293], [14165, 14294, 14293], [14165, 14166, 14295], [14165, 14295, 14294], [14166, 14167, 14295], [14167, 14296, 14295], [14167, 14168, 14297], [14167, 14297, 14296], [14168, 14169, 14297], [14169, 14298, 14297], [14169, 14170, 14299], [14169, 14299, 14298], [14170, 14171, 14299], [14171, 14300, 14299], [14171, 14172, 14301], [14171, 14301, 14300], [14172, 14173, 14301], [14173, 14302, 14301], [14173, 14174, 14303], [14173, 14303, 14302], [14174, 14175, 14303], [14175, 14304, 14303], [14175, 14176, 14305], [14175, 14305, 14304], [14176, 14177, 14305], [14177, 14306, 14305], [14177, 14178, 14307], [14177, 14307, 14306], [14178, 14179, 14307], [14179, 14308, 14307], [14179, 14180, 14309], [14179, 14309, 14308], [14180, 14181, 14309], [14181, 14310, 14309], [14181, 14182, 14311], [14181, 14311, 14310], [14182, 14183, 14311], [14183, 14312, 14311], [14183, 14184, 14313], [14183, 14313, 14312], [14184, 14185, 14313], [14185, 14314, 14313], [14185, 14186, 14315], [14185, 14315, 14314], [14186, 14187, 14315], [14187, 14316, 14315], [14187, 14188, 14317], [14187, 14317, 14316], [14188, 14189, 14317], [14189, 14318, 14317], [14189, 14190, 14319], [14189, 14319, 14318], [14190, 14191, 14319], [14191, 14320, 14319], [14191, 14192, 14321], [14191, 14321, 14320], [14192, 14193, 14321], [14193, 14322, 14321], [14193, 14194, 14323], [14193, 14323, 14322], [14194, 14195, 14323], [14195, 14324, 14323], [14195, 14196, 14325], [14195, 14325, 14324], [14196, 14197, 14325], [14197, 14326, 14325], [14197, 14198, 14327], [14197, 14327, 14326], [14198, 14199, 14327], [14199, 14328, 14327], [14199, 14200, 14329], [14199, 14329, 14328], [14200, 14201, 14329], [14201, 14330, 14329], [14201, 14202, 14331], [14201, 14331, 14330], [14202, 14203, 14331], [14203, 14332, 14331], [14203, 14204, 14333], [14203, 14333, 14332], [14204, 14205, 14333], [14205, 14334, 14333], [14205, 14206, 14335], [14205, 14335, 14334], [14206, 14207, 14335], [14207, 14336, 14335], [14207, 14208, 14337], [14207, 14337, 14336], [14208, 14209, 14337], [14209, 14338, 14337], [14209, 14210, 14339], [14209, 14339, 14338], [14210, 14211, 14339], [14211, 14340, 14339], [14211, 14212, 14341], [14211, 14341, 14340], [14212, 14213, 14341], [14213, 14342, 14341], [14213, 14214, 14343], [14213, 14343, 14342], [14214, 14215, 14343], [14215, 14344, 14343], [14215, 14216, 14345], [14215, 14345, 14344], [14216, 14217, 14345], [14217, 14346, 14345], [14217, 14218, 14347], [14217, 14347, 14346], [14218, 14219, 14347], [14219, 14348, 14347], [14219, 14220, 14349], [14219, 14349, 14348], [14220, 14221, 14349], [14221, 14350, 14349], [14221, 14222, 14351], [14221, 14351, 14350], [14222, 14223, 14351], [14223, 14352, 14351], [14223, 14224, 14353], [14223, 14353, 14352], [14224, 14225, 14353], [14225, 14354, 14353], [14225, 14226, 14355], [14225, 14355, 14354], [14226, 14227, 14355], [14227, 14356, 14355], [14227, 14228, 14357], [14227, 14357, 14356], [14228, 14229, 14357], [14229, 14358, 14357], [14229, 14230, 14359], [14229, 14359, 14358], [14230, 14231, 14359], [14231, 14360, 14359], [14231, 14232, 14361], [14231, 14361, 14360], [14232, 14233, 14361], [14233, 14362, 14361], [14233, 14234, 14363], [14233, 14363, 14362], [14234, 14235, 14363], [14235, 14364, 14363], [14236, 14237, 14365], [14237, 14366, 14365], [14237, 14238, 14367], [14237, 14367, 14366], [14238, 14239, 14367], [14239, 14368, 14367], [14239, 14240, 14369], [14239, 14369, 14368], [14240, 14241, 14369], [14241, 14370, 14369], [14241, 14242, 14371], [14241, 14371, 14370], [14242, 14243, 14371], [14243, 14372, 14371], [14243, 14244, 14373], [14243, 14373, 14372], [14244, 14245, 14373], [14245, 14374, 14373], [14245, 14246, 14375], [14245, 14375, 14374], [14246, 14247, 14375], [14247, 14376, 14375], [14247, 14248, 14377], [14247, 14377, 14376], [14248, 14249, 14377], [14249, 14378, 14377], [14249, 14250, 14379], [14249, 14379, 14378], [14250, 14251, 14379], [14251, 14380, 14379], [14251, 14252, 14381], [14251, 14381, 14380], [14252, 14253, 14381], [14253, 14382, 14381], [14253, 14254, 14383], [14253, 14383, 14382], [14254, 14255, 14383], [14255, 14384, 14383], [14255, 14256, 14385], [14255, 14385, 14384], [14256, 14257, 14385], [14257, 14386, 14385], [14257, 14258, 14387], [14257, 14387, 14386], [14258, 14259, 14387], [14259, 14388, 14387], [14259, 14260, 14389], [14259, 14389, 14388], [14260, 14261, 14389], [14261, 14390, 14389], [14261, 14262, 14391], [14261, 14391, 14390], [14262, 14263, 14391], [14263, 14392, 14391], [14263, 14264, 14393], [14263, 14393, 14392], [14264, 14265, 14393], [14265, 14394, 14393], [14265, 14266, 14395], [14265, 14395, 14394], [14266, 14267, 14395], [14267, 14396, 14395], [14267, 14268, 14397], [14267, 14397, 14396], [14268, 14269, 14397], [14269, 14398, 14397], [14269, 14270, 14399], [14269, 14399, 14398], [14270, 14271, 14399], [14271, 14400, 14399], [14271, 14272, 14401], [14271, 14401, 14400], [14272, 14273, 14401], [14273, 14402, 14401], [14273, 14274, 14403], [14273, 14403, 14402], [14274, 14275, 14403], [14275, 14404, 14403], [14275, 14276, 14405], [14275, 14405, 14404], [14276, 14277, 14405], [14277, 14406, 14405], [14277, 14278, 14407], [14277, 14407, 14406], [14278, 14279, 14407], [14279, 14408, 14407], [14279, 14280, 14409], [14279, 14409, 14408], [14280, 14281, 14409], [14281, 14410, 14409], [14281, 14282, 14411], [14281, 14411, 14410], [14282, 14283, 14411], [14283, 14412, 14411], [14283, 14284, 14413], [14283, 14413, 14412], [14284, 14285, 14413], [14285, 14414, 14413], [14285, 14286, 14415], [14285, 14415, 14414], [14286, 14287, 14415], [14287, 14416, 14415], [14287, 14288, 14417], [14287, 14417, 14416], [14288, 14289, 14417], [14289, 14418, 14417], [14289, 14290, 14419], [14289, 14419, 14418], [14290, 14291, 14419], [14291, 14420, 14419], [14291, 14292, 14421], [14291, 14421, 14420], [14292, 14293, 14421], [14293, 14422, 14421], [14293, 14294, 14423], [14293, 14423, 14422], [14294, 14295, 14423], [14295, 14424, 14423], [14295, 14296, 14425], [14295, 14425, 14424], [14296, 14297, 14425], [14297, 14426, 14425], [14297, 14298, 14427], [14297, 14427, 14426], [14298, 14299, 14427], [14299, 14428, 14427], [14299, 14300, 14429], [14299, 14429, 14428], [14300, 14301, 14429], [14301, 14430, 14429], [14301, 14302, 14431], [14301, 14431, 14430], [14302, 14303, 14431], [14303, 14432, 14431], [14303, 14304, 14433], [14303, 14433, 14432], [14304, 14305, 14433], [14305, 14434, 14433], [14305, 14306, 14435], [14305, 14435, 14434], [14306, 14307, 14435], [14307, 14436, 14435], [14307, 14308, 14437], [14307, 14437, 14436], [14308, 14309, 14437], [14309, 14438, 14437], [14309, 14310, 14439], [14309, 14439, 14438], [14310, 14311, 14439], [14311, 14440, 14439], [14311, 14312, 14441], [14311, 14441, 14440], [14312, 14313, 14441], [14313, 14442, 14441], [14313, 14314, 14443], [14313, 14443, 14442], [14314, 14315, 14443], [14315, 14444, 14443], [14315, 14316, 14445], [14315, 14445, 14444], [14316, 14317, 14445], [14317, 14446, 14445], [14317, 14318, 14447], [14317, 14447, 14446], [14318, 14319, 14447], [14319, 14448, 14447], [14319, 14320, 14449], [14319, 14449, 14448], [14320, 14321, 14449], [14321, 14450, 14449], [14321, 14322, 14451], [14321, 14451, 14450], [14322, 14323, 14451], [14323, 14452, 14451], [14323, 14324, 14453], [14323, 14453, 14452], [14324, 14325, 14453], [14325, 14454, 14453], [14325, 14326, 14455], [14325, 14455, 14454], [14326, 14327, 14455], [14327, 14456, 14455], [14327, 14328, 14457], [14327, 14457, 14456], [14328, 14329, 14457], [14329, 14458, 14457], [14329, 14330, 14459], [14329, 14459, 14458], [14330, 14331, 14459], [14331, 14460, 14459], [14331, 14332, 14461], [14331, 14461, 14460], [14332, 14333, 14461], [14333, 14462, 14461], [14333, 14334, 14463], [14333, 14463, 14462], [14334, 14335, 14463], [14335, 14464, 14463], [14335, 14336, 14465], [14335, 14465, 14464], [14336, 14337, 14465], [14337, 14466, 14465], [14337, 14338, 14467], [14337, 14467, 14466], [14338, 14339, 14467], [14339, 14468, 14467], [14339, 14340, 14469], [14339, 14469, 14468], [14340, 14341, 14469], [14341, 14470, 14469], [14341, 14342, 14471], [14341, 14471, 14470], [14342, 14343, 14471], [14343, 14472, 14471], [14343, 14344, 14473], [14343, 14473, 14472], [14344, 14345, 14473], [14345, 14474, 14473], [14345, 14346, 14475], [14345, 14475, 14474], [14346, 14347, 14475], [14347, 14476, 14475], [14347, 14348, 14477], [14347, 14477, 14476], [14348, 14349, 14477], [14349, 14478, 14477], [14349, 14350, 14479], [14349, 14479, 14478], [14350, 14351, 14479], [14351, 14480, 14479], [14351, 14352, 14481], [14351, 14481, 14480], [14352, 14353, 14481], [14353, 14482, 14481], [14353, 14354, 14483], [14353, 14483, 14482], [14354, 14355, 14483], [14355, 14484, 14483], [14355, 14356, 14485], [14355, 14485, 14484], [14356, 14357, 14485], [14357, 14486, 14485], [14357, 14358, 14487], [14357, 14487, 14486], [14358, 14359, 14487], [14359, 14488, 14487], [14359, 14360, 14489], [14359, 14489, 14488], [14360, 14361, 14489], [14361, 14490, 14489], [14361, 14362, 14491], [14361, 14491, 14490], [14362, 14363, 14491], [14363, 14492, 14491], [14363, 14364, 14493], [14363, 14493, 14492], [14365, 14366, 14495], [14365, 14495, 14494], [14366, 14367, 14495], [14367, 14496, 14495], [14367, 14368, 14497], [14367, 14497, 14496], [14368, 14369, 14497], [14369, 14498, 14497], [14369, 14370, 14499], [14369, 14499, 14498], [14370, 14371, 14499], [14371, 14500, 14499], [14371, 14372, 14501], [14371, 14501, 14500], [14372, 14373, 14501], [14373, 14502, 14501], [14373, 14374, 14503], [14373, 14503, 14502], [14374, 14375, 14503], [14375, 14504, 14503], [14375, 14376, 14505], [14375, 14505, 14504], [14376, 14377, 14505], [14377, 14506, 14505], [14377, 14378, 14507], [14377, 14507, 14506], [14378, 14379, 14507], [14379, 14508, 14507], [14379, 14380, 14509], [14379, 14509, 14508], [14380, 14381, 14509], [14381, 14510, 14509], [14381, 14382, 14511], [14381, 14511, 14510], [14382, 14383, 14511], [14383, 14512, 14511], [14383, 14384, 14513], [14383, 14513, 14512], [14384, 14385, 14513], [14385, 14514, 14513], [14385, 14386, 14515], [14385, 14515, 14514], [14386, 14387, 14515], [14387, 14516, 14515], [14387, 14388, 14517], [14387, 14517, 14516], [14388, 14389, 14517], [14389, 14518, 14517], [14389, 14390, 14519], [14389, 14519, 14518], [14390, 14391, 14519], [14391, 14520, 14519], [14391, 14392, 14521], [14391, 14521, 14520], [14392, 14393, 14521], [14393, 14522, 14521], [14393, 14394, 14523], [14393, 14523, 14522], [14394, 14395, 14523], [14395, 14524, 14523], [14395, 14396, 14525], [14395, 14525, 14524], [14396, 14397, 14525], [14397, 14526, 14525], [14397, 14398, 14527], [14397, 14527, 14526], [14398, 14399, 14527], [14399, 14528, 14527], [14399, 14400, 14529], [14399, 14529, 14528], [14400, 14401, 14529], [14401, 14530, 14529], [14401, 14402, 14531], [14401, 14531, 14530], [14402, 14403, 14531], [14403, 14532, 14531], [14403, 14404, 14533], [14403, 14533, 14532], [14404, 14405, 14533], [14405, 14534, 14533], [14405, 14406, 14535], [14405, 14535, 14534], [14406, 14407, 14535], [14407, 14536, 14535], [14407, 14408, 14537], [14407, 14537, 14536], [14408, 14409, 14537], [14409, 14538, 14537], [14409, 14410, 14539], [14409, 14539, 14538], [14410, 14411, 14539], [14411, 14540, 14539], [14411, 14412, 14541], [14411, 14541, 14540], [14412, 14413, 14541], [14413, 14542, 14541], [14413, 14414, 14543], [14413, 14543, 14542], [14414, 14415, 14543], [14415, 14544, 14543], [14415, 14416, 14545], [14415, 14545, 14544], [14416, 14417, 14545], [14417, 14546, 14545], [14417, 14418, 14547], [14417, 14547, 14546], [14418, 14419, 14547], [14419, 14548, 14547], [14419, 14420, 14549], [14419, 14549, 14548], [14420, 14421, 14549], [14421, 14550, 14549], [14421, 14422, 14551], [14421, 14551, 14550], [14422, 14423, 14551], [14423, 14552, 14551], [14423, 14424, 14553], [14423, 14553, 14552], [14424, 14425, 14553], [14425, 14554, 14553], [14425, 14426, 14555], [14425, 14555, 14554], [14426, 14427, 14555], [14427, 14556, 14555], [14427, 14428, 14557], [14427, 14557, 14556], [14428, 14429, 14557], [14429, 14558, 14557], [14429, 14430, 14559], [14429, 14559, 14558], [14430, 14431, 14559], [14431, 14560, 14559], [14431, 14432, 14561], [14431, 14561, 14560], [14432, 14433, 14561], [14433, 14562, 14561], [14433, 14434, 14563], [14433, 14563, 14562], [14434, 14435, 14563], [14435, 14564, 14563], [14435, 14436, 14565], [14435, 14565, 14564], [14436, 14437, 14565], [14437, 14566, 14565], [14437, 14438, 14567], [14437, 14567, 14566], [14438, 14439, 14567], [14439, 14568, 14567], [14439, 14440, 14569], [14439, 14569, 14568], [14440, 14441, 14569], [14441, 14570, 14569], [14441, 14442, 14571], [14441, 14571, 14570], [14442, 14443, 14571], [14443, 14572, 14571], [14443, 14444, 14573], [14443, 14573, 14572], [14444, 14445, 14573], [14445, 14574, 14573], [14445, 14446, 14575], [14445, 14575, 14574], [14446, 14447, 14575], [14447, 14576, 14575], [14447, 14448, 14577], [14447, 14577, 14576], [14448, 14449, 14577], [14449, 14578, 14577], [14449, 14450, 14579], [14449, 14579, 14578], [14450, 14451, 14579], [14451, 14580, 14579], [14451, 14452, 14581], [14451, 14581, 14580], [14452, 14453, 14581], [14453, 14582, 14581], [14453, 14454, 14583], [14453, 14583, 14582], [14454, 14455, 14583], [14455, 14584, 14583], [14455, 14456, 14585], [14455, 14585, 14584], [14456, 14457, 14585], [14457, 14586, 14585], [14457, 14458, 14587], [14457, 14587, 14586], [14458, 14459, 14587], [14459, 14588, 14587], [14459, 14460, 14589], [14459, 14589, 14588], [14460, 14461, 14589], [14461, 14590, 14589], [14461, 14462, 14591], [14461, 14591, 14590], [14462, 14463, 14591], [14463, 14592, 14591], [14463, 14464, 14593], [14463, 14593, 14592], [14464, 14465, 14593], [14465, 14594, 14593], [14465, 14466, 14595], [14465, 14595, 14594], [14466, 14467, 14595], [14467, 14596, 14595], [14467, 14468, 14597], [14467, 14597, 14596], [14468, 14469, 14597], [14469, 14598, 14597], [14469, 14470, 14599], [14469, 14599, 14598], [14470, 14471, 14599], [14471, 14600, 14599], [14471, 14472, 14601], [14471, 14601, 14600], [14472, 14473, 14601], [14473, 14602, 14601], [14473, 14474, 14603], [14473, 14603, 14602], [14474, 14475, 14603], [14475, 14604, 14603], [14475, 14476, 14605], [14475, 14605, 14604], [14476, 14477, 14605], [14477, 14606, 14605], [14477, 14478, 14607], [14477, 14607, 14606], [14478, 14479, 14607], [14479, 14608, 14607], [14479, 14480, 14609], [14479, 14609, 14608], [14480, 14481, 14609], [14481, 14610, 14609], [14481, 14482, 14611], [14481, 14611, 14610], [14482, 14483, 14611], [14483, 14612, 14611], [14483, 14484, 14613], [14483, 14613, 14612], [14484, 14485, 14613], [14485, 14614, 14613], [14485, 14486, 14615], [14485, 14615, 14614], [14486, 14487, 14615], [14487, 14616, 14615], [14487, 14488, 14617], [14487, 14617, 14616], [14488, 14489, 14617], [14489, 14618, 14617], [14489, 14490, 14619], [14489, 14619, 14618], [14490, 14491, 14619], [14491, 14620, 14619], [14491, 14492, 14621], [14491, 14621, 14620], [14492, 14493, 14621], [14493, 14622, 14621], [14494, 14495, 14623], [14495, 14624, 14623], [14495, 14496, 14625], [14495, 14625, 14624], [14496, 14497, 14625], [14497, 14626, 14625], [14497, 14498, 14627], [14497, 14627, 14626], [14498, 14499, 14627], [14499, 14628, 14627], [14499, 14500, 14629], [14499, 14629, 14628], [14500, 14501, 14629], [14501, 14630, 14629], [14501, 14502, 14631], [14501, 14631, 14630], [14502, 14503, 14631], [14503, 14632, 14631], [14503, 14504, 14633], [14503, 14633, 14632], [14504, 14505, 14633], [14505, 14634, 14633], [14505, 14506, 14635], [14505, 14635, 14634], [14506, 14507, 14635], [14507, 14636, 14635], [14507, 14508, 14637], [14507, 14637, 14636], [14508, 14509, 14637], [14509, 14638, 14637], [14509, 14510, 14639], [14509, 14639, 14638], [14510, 14511, 14639], [14511, 14640, 14639], [14511, 14512, 14641], [14511, 14641, 14640], [14512, 14513, 14641], [14513, 14642, 14641], [14513, 14514, 14643], [14513, 14643, 14642], [14514, 14515, 14643], [14515, 14644, 14643], [14515, 14516, 14645], [14515, 14645, 14644], [14516, 14517, 14645], [14517, 14646, 14645], [14517, 14518, 14647], [14517, 14647, 14646], [14518, 14519, 14647], [14519, 14648, 14647], [14519, 14520, 14649], [14519, 14649, 14648], [14520, 14521, 14649], [14521, 14650, 14649], [14521, 14522, 14651], [14521, 14651, 14650], [14522, 14523, 14651], [14523, 14652, 14651], [14523, 14524, 14653], [14523, 14653, 14652], [14524, 14525, 14653], [14525, 14654, 14653], [14525, 14526, 14655], [14525, 14655, 14654], [14526, 14527, 14655], [14527, 14656, 14655], [14527, 14528, 14657], [14527, 14657, 14656], [14528, 14529, 14657], [14529, 14658, 14657], [14529, 14530, 14659], [14529, 14659, 14658], [14530, 14531, 14659], [14531, 14660, 14659], [14531, 14532, 14661], [14531, 14661, 14660], [14532, 14533, 14661], [14533, 14662, 14661], [14533, 14534, 14663], [14533, 14663, 14662], [14534, 14535, 14663], [14535, 14664, 14663], [14535, 14536, 14665], [14535, 14665, 14664], [14536, 14537, 14665], [14537, 14666, 14665], [14537, 14538, 14667], [14537, 14667, 14666], [14538, 14539, 14667], [14539, 14668, 14667], [14539, 14540, 14669], [14539, 14669, 14668], [14540, 14541, 14669], [14541, 14670, 14669], [14541, 14542, 14671], [14541, 14671, 14670], [14542, 14543, 14671], [14543, 14672, 14671], [14543, 14544, 14673], [14543, 14673, 14672], [14544, 14545, 14673], [14545, 14674, 14673], [14545, 14546, 14675], [14545, 14675, 14674], [14546, 14547, 14675], [14547, 14676, 14675], [14547, 14548, 14677], [14547, 14677, 14676], [14548, 14549, 14677], [14549, 14678, 14677], [14549, 14550, 14679], [14549, 14679, 14678], [14550, 14551, 14679], [14551, 14680, 14679], [14551, 14552, 14681], [14551, 14681, 14680], [14552, 14553, 14681], [14553, 14682, 14681], [14553, 14554, 14683], [14553, 14683, 14682], [14554, 14555, 14683], [14555, 14684, 14683], [14555, 14556, 14685], [14555, 14685, 14684], [14556, 14557, 14685], [14557, 14686, 14685], [14557, 14558, 14687], [14557, 14687, 14686], [14558, 14559, 14687], [14559, 14688, 14687], [14559, 14560, 14689], [14559, 14689, 14688], [14560, 14561, 14689], [14561, 14690, 14689], [14561, 14562, 14691], [14561, 14691, 14690], [14562, 14563, 14691], [14563, 14692, 14691], [14563, 14564, 14693], [14563, 14693, 14692], [14564, 14565, 14693], [14565, 14694, 14693], [14565, 14566, 14695], [14565, 14695, 14694], [14566, 14567, 14695], [14567, 14696, 14695], [14567, 14568, 14697], [14567, 14697, 14696], [14568, 14569, 14697], [14569, 14698, 14697], [14569, 14570, 14699], [14569, 14699, 14698], [14570, 14571, 14699], [14571, 14700, 14699], [14571, 14572, 14701], [14571, 14701, 14700], [14572, 14573, 14701], [14573, 14702, 14701], [14573, 14574, 14703], [14573, 14703, 14702], [14574, 14575, 14703], [14575, 14704, 14703], [14575, 14576, 14705], [14575, 14705, 14704], [14576, 14577, 14705], [14577, 14706, 14705], [14577, 14578, 14707], [14577, 14707, 14706], [14578, 14579, 14707], [14579, 14708, 14707], [14579, 14580, 14709], [14579, 14709, 14708], [14580, 14581, 14709], [14581, 14710, 14709], [14581, 14582, 14711], [14581, 14711, 14710], [14582, 14583, 14711], [14583, 14712, 14711], [14583, 14584, 14713], [14583, 14713, 14712], [14584, 14585, 14713], [14585, 14714, 14713], [14585, 14586, 14715], [14585, 14715, 14714], [14586, 14587, 14715], [14587, 14716, 14715], [14587, 14588, 14717], [14587, 14717, 14716], [14588, 14589, 14717], [14589, 14718, 14717], [14589, 14590, 14719], [14589, 14719, 14718], [14590, 14591, 14719], [14591, 14720, 14719], [14591, 14592, 14721], [14591, 14721, 14720], [14592, 14593, 14721], [14593, 14722, 14721], [14593, 14594, 14723], [14593, 14723, 14722], [14594, 14595, 14723], [14595, 14724, 14723], [14595, 14596, 14725], [14595, 14725, 14724], [14596, 14597, 14725], [14597, 14726, 14725], [14597, 14598, 14727], [14597, 14727, 14726], [14598, 14599, 14727], [14599, 14728, 14727], [14599, 14600, 14729], [14599, 14729, 14728], [14600, 14601, 14729], [14601, 14730, 14729], [14601, 14602, 14731], [14601, 14731, 14730], [14602, 14603, 14731], [14603, 14732, 14731], [14603, 14604, 14733], [14603, 14733, 14732], [14604, 14605, 14733], [14605, 14734, 14733], [14605, 14606, 14735], [14605, 14735, 14734], [14606, 14607, 14735], [14607, 14736, 14735], [14607, 14608, 14737], [14607, 14737, 14736], [14608, 14609, 14737], [14609, 14738, 14737], [14609, 14610, 14739], [14609, 14739, 14738], [14610, 14611, 14739], [14611, 14740, 14739], [14611, 14612, 14741], [14611, 14741, 14740], [14612, 14613, 14741], [14613, 14742, 14741], [14613, 14614, 14743], [14613, 14743, 14742], [14614, 14615, 14743], [14615, 14744, 14743], [14615, 14616, 14745], [14615, 14745, 14744], [14616, 14617, 14745], [14617, 14746, 14745], [14617, 14618, 14747], [14617, 14747, 14746], [14618, 14619, 14747], [14619, 14748, 14747], [14619, 14620, 14749], [14619, 14749, 14748], [14620, 14621, 14749], [14621, 14750, 14749], [14621, 14622, 14751], [14621, 14751, 14750], [14623, 14624, 14753], [14623, 14753, 14752], [14624, 14625, 14753], [14625, 14754, 14753], [14625, 14626, 14755], [14625, 14755, 14754], [14626, 14627, 14755], [14627, 14756, 14755], [14627, 14628, 14757], [14627, 14757, 14756], [14628, 14629, 14757], [14629, 14758, 14757], [14629, 14630, 14759], [14629, 14759, 14758], [14630, 14631, 14759], [14631, 14760, 14759], [14631, 14632, 14761], [14631, 14761, 14760], [14632, 14633, 14761], [14633, 14762, 14761], [14633, 14634, 14763], [14633, 14763, 14762], [14634, 14635, 14763], [14635, 14764, 14763], [14635, 14636, 14765], [14635, 14765, 14764], [14636, 14637, 14765], [14637, 14766, 14765], [14637, 14638, 14767], [14637, 14767, 14766], [14638, 14639, 14767], [14639, 14768, 14767], [14639, 14640, 14769], [14639, 14769, 14768], [14640, 14641, 14769], [14641, 14770, 14769], [14641, 14642, 14771], [14641, 14771, 14770], [14642, 14643, 14771], [14643, 14772, 14771], [14643, 14644, 14773], [14643, 14773, 14772], [14644, 14645, 14773], [14645, 14774, 14773], [14645, 14646, 14775], [14645, 14775, 14774], [14646, 14647, 14775], [14647, 14776, 14775], [14647, 14648, 14777], [14647, 14777, 14776], [14648, 14649, 14777], [14649, 14778, 14777], [14649, 14650, 14779], [14649, 14779, 14778], [14650, 14651, 14779], [14651, 14780, 14779], [14651, 14652, 14781], [14651, 14781, 14780], [14652, 14653, 14781], [14653, 14782, 14781], [14653, 14654, 14783], [14653, 14783, 14782], [14654, 14655, 14783], [14655, 14784, 14783], [14655, 14656, 14785], [14655, 14785, 14784], [14656, 14657, 14785], [14657, 14786, 14785], [14657, 14658, 14787], [14657, 14787, 14786], [14658, 14659, 14787], [14659, 14788, 14787], [14659, 14660, 14789], [14659, 14789, 14788], [14660, 14661, 14789], [14661, 14790, 14789], [14661, 14662, 14791], [14661, 14791, 14790], [14662, 14663, 14791], [14663, 14792, 14791], [14663, 14664, 14793], [14663, 14793, 14792], [14664, 14665, 14793], [14665, 14794, 14793], [14665, 14666, 14795], [14665, 14795, 14794], [14666, 14667, 14795], [14667, 14796, 14795], [14667, 14668, 14797], [14667, 14797, 14796], [14668, 14669, 14797], [14669, 14798, 14797], [14669, 14670, 14799], [14669, 14799, 14798], [14670, 14671, 14799], [14671, 14800, 14799], [14671, 14672, 14801], [14671, 14801, 14800], [14672, 14673, 14801], [14673, 14802, 14801], [14673, 14674, 14803], [14673, 14803, 14802], [14674, 14675, 14803], [14675, 14804, 14803], [14675, 14676, 14805], [14675, 14805, 14804], [14676, 14677, 14805], [14677, 14806, 14805], [14677, 14678, 14807], [14677, 14807, 14806], [14678, 14679, 14807], [14679, 14808, 14807], [14679, 14680, 14809], [14679, 14809, 14808], [14680, 14681, 14809], [14681, 14810, 14809], [14681, 14682, 14811], [14681, 14811, 14810], [14682, 14683, 14811], [14683, 14812, 14811], [14683, 14684, 14813], [14683, 14813, 14812], [14684, 14685, 14813], [14685, 14814, 14813], [14685, 14686, 14815], [14685, 14815, 14814], [14686, 14687, 14815], [14687, 14816, 14815], [14687, 14688, 14817], [14687, 14817, 14816], [14688, 14689, 14817], [14689, 14818, 14817], [14689, 14690, 14819], [14689, 14819, 14818], [14690, 14691, 14819], [14691, 14820, 14819], [14691, 14692, 14821], [14691, 14821, 14820], [14692, 14693, 14821], [14693, 14822, 14821], [14693, 14694, 14823], [14693, 14823, 14822], [14694, 14695, 14823], [14695, 14824, 14823], [14695, 14696, 14825], [14695, 14825, 14824], [14696, 14697, 14825], [14697, 14826, 14825], [14697, 14698, 14827], [14697, 14827, 14826], [14698, 14699, 14827], [14699, 14828, 14827], [14699, 14700, 14829], [14699, 14829, 14828], [14700, 14701, 14829], [14701, 14830, 14829], [14701, 14702, 14831], [14701, 14831, 14830], [14702, 14703, 14831], [14703, 14832, 14831], [14703, 14704, 14833], [14703, 14833, 14832], [14704, 14705, 14833], [14705, 14834, 14833], [14705, 14706, 14835], [14705, 14835, 14834], [14706, 14707, 14835], [14707, 14836, 14835], [14707, 14708, 14837], [14707, 14837, 14836], [14708, 14709, 14837], [14709, 14838, 14837], [14709, 14710, 14839], [14709, 14839, 14838], [14710, 14711, 14839], [14711, 14840, 14839], [14711, 14712, 14841], [14711, 14841, 14840], [14712, 14713, 14841], [14713, 14842, 14841], [14713, 14714, 14843], [14713, 14843, 14842], [14714, 14715, 14843], [14715, 14844, 14843], [14715, 14716, 14845], [14715, 14845, 14844], [14716, 14717, 14845], [14717, 14846, 14845], [14717, 14718, 14847], [14717, 14847, 14846], [14718, 14719, 14847], [14719, 14848, 14847], [14719, 14720, 14849], [14719, 14849, 14848], [14720, 14721, 14849], [14721, 14850, 14849], [14721, 14722, 14851], [14721, 14851, 14850], [14722, 14723, 14851], [14723, 14852, 14851], [14723, 14724, 14853], [14723, 14853, 14852], [14724, 14725, 14853], [14725, 14854, 14853], [14725, 14726, 14855], [14725, 14855, 14854], [14726, 14727, 14855], [14727, 14856, 14855], [14727, 14728, 14857], [14727, 14857, 14856], [14728, 14729, 14857], [14729, 14858, 14857], [14729, 14730, 14859], [14729, 14859, 14858], [14730, 14731, 14859], [14731, 14860, 14859], [14731, 14732, 14861], [14731, 14861, 14860], [14732, 14733, 14861], [14733, 14862, 14861], [14733, 14734, 14863], [14733, 14863, 14862], [14734, 14735, 14863], [14735, 14864, 14863], [14735, 14736, 14865], [14735, 14865, 14864], [14736, 14737, 14865], [14737, 14866, 14865], [14737, 14738, 14867], [14737, 14867, 14866], [14738, 14739, 14867], [14739, 14868, 14867], [14739, 14740, 14869], [14739, 14869, 14868], [14740, 14741, 14869], [14741, 14870, 14869], [14741, 14742, 14871], [14741, 14871, 14870], [14742, 14743, 14871], [14743, 14872, 14871], [14743, 14744, 14873], [14743, 14873, 14872], [14744, 14745, 14873], [14745, 14874, 14873], [14745, 14746, 14875], [14745, 14875, 14874], [14746, 14747, 14875], [14747, 14876, 14875], [14747, 14748, 14877], [14747, 14877, 14876], [14748, 14749, 14877], [14749, 14878, 14877], [14749, 14750, 14879], [14749, 14879, 14878], [14750, 14751, 14879], [14751, 14880, 14879], [14752, 14753, 14881], [14753, 14882, 14881], [14753, 14754, 14883], [14753, 14883, 14882], [14754, 14755, 14883], [14755, 14884, 14883], [14755, 14756, 14885], [14755, 14885, 14884], [14756, 14757, 14885], [14757, 14886, 14885], [14757, 14758, 14887], [14757, 14887, 14886], [14758, 14759, 14887], [14759, 14888, 14887], [14759, 14760, 14889], [14759, 14889, 14888], [14760, 14761, 14889], [14761, 14890, 14889], [14761, 14762, 14891], [14761, 14891, 14890], [14762, 14763, 14891], [14763, 14892, 14891], [14763, 14764, 14893], [14763, 14893, 14892], [14764, 14765, 14893], [14765, 14894, 14893], [14765, 14766, 14895], [14765, 14895, 14894], [14766, 14767, 14895], [14767, 14896, 14895], [14767, 14768, 14897], [14767, 14897, 14896], [14768, 14769, 14897], [14769, 14898, 14897], [14769, 14770, 14899], [14769, 14899, 14898], [14770, 14771, 14899], [14771, 14900, 14899], [14771, 14772, 14901], [14771, 14901, 14900], [14772, 14773, 14901], [14773, 14902, 14901], [14773, 14774, 14903], [14773, 14903, 14902], [14774, 14775, 14903], [14775, 14904, 14903], [14775, 14776, 14905], [14775, 14905, 14904], [14776, 14777, 14905], [14777, 14906, 14905], [14777, 14778, 14907], [14777, 14907, 14906], [14778, 14779, 14907], [14779, 14908, 14907], [14779, 14780, 14909], [14779, 14909, 14908], [14780, 14781, 14909], [14781, 14910, 14909], [14781, 14782, 14911], [14781, 14911, 14910], [14782, 14783, 14911], [14783, 14912, 14911], [14783, 14784, 14913], [14783, 14913, 14912], [14784, 14785, 14913], [14785, 14914, 14913], [14785, 14786, 14915], [14785, 14915, 14914], [14786, 14787, 14915], [14787, 14916, 14915], [14787, 14788, 14917], [14787, 14917, 14916], [14788, 14789, 14917], [14789, 14918, 14917], [14789, 14790, 14919], [14789, 14919, 14918], [14790, 14791, 14919], [14791, 14920, 14919], [14791, 14792, 14921], [14791, 14921, 14920], [14792, 14793, 14921], [14793, 14922, 14921], [14793, 14794, 14923], [14793, 14923, 14922], [14794, 14795, 14923], [14795, 14924, 14923], [14795, 14796, 14925], [14795, 14925, 14924], [14796, 14797, 14925], [14797, 14926, 14925], [14797, 14798, 14927], [14797, 14927, 14926], [14798, 14799, 14927], [14799, 14928, 14927], [14799, 14800, 14929], [14799, 14929, 14928], [14800, 14801, 14929], [14801, 14930, 14929], [14801, 14802, 14931], [14801, 14931, 14930], [14802, 14803, 14931], [14803, 14932, 14931], [14803, 14804, 14933], [14803, 14933, 14932], [14804, 14805, 14933], [14805, 14934, 14933], [14805, 14806, 14935], [14805, 14935, 14934], [14806, 14807, 14935], [14807, 14936, 14935], [14807, 14808, 14937], [14807, 14937, 14936], [14808, 14809, 14937], [14809, 14938, 14937], [14809, 14810, 14939], [14809, 14939, 14938], [14810, 14811, 14939], [14811, 14940, 14939], [14811, 14812, 14941], [14811, 14941, 14940], [14812, 14813, 14941], [14813, 14942, 14941], [14813, 14814, 14943], [14813, 14943, 14942], [14814, 14815, 14943], [14815, 14944, 14943], [14815, 14816, 14945], [14815, 14945, 14944], [14816, 14817, 14945], [14817, 14946, 14945], [14817, 14818, 14947], [14817, 14947, 14946], [14818, 14819, 14947], [14819, 14948, 14947], [14819, 14820, 14949], [14819, 14949, 14948], [14820, 14821, 14949], [14821, 14950, 14949], [14821, 14822, 14951], [14821, 14951, 14950], [14822, 14823, 14951], [14823, 14952, 14951], [14823, 14824, 14953], [14823, 14953, 14952], [14824, 14825, 14953], [14825, 14954, 14953], [14825, 14826, 14955], [14825, 14955, 14954], [14826, 14827, 14955], [14827, 14956, 14955], [14827, 14828, 14957], [14827, 14957, 14956], [14828, 14829, 14957], [14829, 14958, 14957], [14829, 14830, 14959], [14829, 14959, 14958], [14830, 14831, 14959], [14831, 14960, 14959], [14831, 14832, 14961], [14831, 14961, 14960], [14832, 14833, 14961], [14833, 14962, 14961], [14833, 14834, 14963], [14833, 14963, 14962], [14834, 14835, 14963], [14835, 14964, 14963], [14835, 14836, 14965], [14835, 14965, 14964], [14836, 14837, 14965], [14837, 14966, 14965], [14837, 14838, 14967], [14837, 14967, 14966], [14838, 14839, 14967], [14839, 14968, 14967], [14839, 14840, 14969], [14839, 14969, 14968], [14840, 14841, 14969], [14841, 14970, 14969], [14841, 14842, 14971], [14841, 14971, 14970], [14842, 14843, 14971], [14843, 14972, 14971], [14843, 14844, 14973], [14843, 14973, 14972], [14844, 14845, 14973], [14845, 14974, 14973], [14845, 14846, 14975], [14845, 14975, 14974], [14846, 14847, 14975], [14847, 14976, 14975], [14847, 14848, 14977], [14847, 14977, 14976], [14848, 14849, 14977], [14849, 14978, 14977], [14849, 14850, 14979], [14849, 14979, 14978], [14850, 14851, 14979], [14851, 14980, 14979], [14851, 14852, 14981], [14851, 14981, 14980], [14852, 14853, 14981], [14853, 14982, 14981], [14853, 14854, 14983], [14853, 14983, 14982], [14854, 14855, 14983], [14855, 14984, 14983], [14855, 14856, 14985], [14855, 14985, 14984], [14856, 14857, 14985], [14857, 14986, 14985], [14857, 14858, 14987], [14857, 14987, 14986], [14858, 14859, 14987], [14859, 14988, 14987], [14859, 14860, 14989], [14859, 14989, 14988], [14860, 14861, 14989], [14861, 14990, 14989], [14861, 14862, 14991], [14861, 14991, 14990], [14862, 14863, 14991], [14863, 14992, 14991], [14863, 14864, 14993], [14863, 14993, 14992], [14864, 14865, 14993], [14865, 14994, 14993], [14865, 14866, 14995], [14865, 14995, 14994], [14866, 14867, 14995], [14867, 14996, 14995], [14867, 14868, 14997], [14867, 14997, 14996], [14868, 14869, 14997], [14869, 14998, 14997], [14869, 14870, 14999], [14869, 14999, 14998], [14870, 14871, 14999], [14871, 15000, 14999], [14871, 14872, 15001], [14871, 15001, 15000], [14872, 14873, 15001], [14873, 15002, 15001], [14873, 14874, 15003], [14873, 15003, 15002], [14874, 14875, 15003], [14875, 15004, 15003], [14875, 14876, 15005], [14875, 15005, 15004], [14876, 14877, 15005], [14877, 15006, 15005], [14877, 14878, 15007], [14877, 15007, 15006], [14878, 14879, 15007], [14879, 15008, 15007], [14879, 14880, 15009], [14879, 15009, 15008], [14881, 14882, 15011], [14881, 15011, 15010], [14882, 14883, 15011], [14883, 15012, 15011], [14883, 14884, 15013], [14883, 15013, 15012], [14884, 14885, 15013], [14885, 15014, 15013], [14885, 14886, 15015], [14885, 15015, 15014], [14886, 14887, 15015], [14887, 15016, 15015], [14887, 14888, 15017], [14887, 15017, 15016], [14888, 14889, 15017], [14889, 15018, 15017], [14889, 14890, 15019], [14889, 15019, 15018], [14890, 14891, 15019], [14891, 15020, 15019], [14891, 14892, 15021], [14891, 15021, 15020], [14892, 14893, 15021], [14893, 15022, 15021], [14893, 14894, 15023], [14893, 15023, 15022], [14894, 14895, 15023], [14895, 15024, 15023], [14895, 14896, 15025], [14895, 15025, 15024], [14896, 14897, 15025], [14897, 15026, 15025], [14897, 14898, 15027], [14897, 15027, 15026], [14898, 14899, 15027], [14899, 15028, 15027], [14899, 14900, 15029], [14899, 15029, 15028], [14900, 14901, 15029], [14901, 15030, 15029], [14901, 14902, 15031], [14901, 15031, 15030], [14902, 14903, 15031], [14903, 15032, 15031], [14903, 14904, 15033], [14903, 15033, 15032], [14904, 14905, 15033], [14905, 15034, 15033], [14905, 14906, 15035], [14905, 15035, 15034], [14906, 14907, 15035], [14907, 15036, 15035], [14907, 14908, 15037], [14907, 15037, 15036], [14908, 14909, 15037], [14909, 15038, 15037], [14909, 14910, 15039], [14909, 15039, 15038], [14910, 14911, 15039], [14911, 15040, 15039], [14911, 14912, 15041], [14911, 15041, 15040], [14912, 14913, 15041], [14913, 15042, 15041], [14913, 14914, 15043], [14913, 15043, 15042], [14914, 14915, 15043], [14915, 15044, 15043], [14915, 14916, 15045], [14915, 15045, 15044], [14916, 14917, 15045], [14917, 15046, 15045], [14917, 14918, 15047], [14917, 15047, 15046], [14918, 14919, 15047], [14919, 15048, 15047], [14919, 14920, 15049], [14919, 15049, 15048], [14920, 14921, 15049], [14921, 15050, 15049], [14921, 14922, 15051], [14921, 15051, 15050], [14922, 14923, 15051], [14923, 15052, 15051], [14923, 14924, 15053], [14923, 15053, 15052], [14924, 14925, 15053], [14925, 15054, 15053], [14925, 14926, 15055], [14925, 15055, 15054], [14926, 14927, 15055], [14927, 15056, 15055], [14927, 14928, 15057], [14927, 15057, 15056], [14928, 14929, 15057], [14929, 15058, 15057], [14929, 14930, 15059], [14929, 15059, 15058], [14930, 14931, 15059], [14931, 15060, 15059], [14931, 14932, 15061], [14931, 15061, 15060], [14932, 14933, 15061], [14933, 15062, 15061], [14933, 14934, 15063], [14933, 15063, 15062], [14934, 14935, 15063], [14935, 15064, 15063], [14935, 14936, 15065], [14935, 15065, 15064], [14936, 14937, 15065], [14937, 15066, 15065], [14937, 14938, 15067], [14937, 15067, 15066], [14938, 14939, 15067], [14939, 15068, 15067], [14939, 14940, 15069], [14939, 15069, 15068], [14940, 14941, 15069], [14941, 15070, 15069], [14941, 14942, 15071], [14941, 15071, 15070], [14942, 14943, 15071], [14943, 15072, 15071], [14943, 14944, 15073], [14943, 15073, 15072], [14944, 14945, 15073], [14945, 15074, 15073], [14945, 14946, 15075], [14945, 15075, 15074], [14946, 14947, 15075], [14947, 15076, 15075], [14947, 14948, 15077], [14947, 15077, 15076], [14948, 14949, 15077], [14949, 15078, 15077], [14949, 14950, 15079], [14949, 15079, 15078], [14950, 14951, 15079], [14951, 15080, 15079], [14951, 14952, 15081], [14951, 15081, 15080], [14952, 14953, 15081], [14953, 15082, 15081], [14953, 14954, 15083], [14953, 15083, 15082], [14954, 14955, 15083], [14955, 15084, 15083], [14955, 14956, 15085], [14955, 15085, 15084], [14956, 14957, 15085], [14957, 15086, 15085], [14957, 14958, 15087], [14957, 15087, 15086], [14958, 14959, 15087], [14959, 15088, 15087], [14959, 14960, 15089], [14959, 15089, 15088], [14960, 14961, 15089], [14961, 15090, 15089], [14961, 14962, 15091], [14961, 15091, 15090], [14962, 14963, 15091], [14963, 15092, 15091], [14963, 14964, 15093], [14963, 15093, 15092], [14964, 14965, 15093], [14965, 15094, 15093], [14965, 14966, 15095], [14965, 15095, 15094], [14966, 14967, 15095], [14967, 15096, 15095], [14967, 14968, 15097], [14967, 15097, 15096], [14968, 14969, 15097], [14969, 15098, 15097], [14969, 14970, 15099], [14969, 15099, 15098], [14970, 14971, 15099], [14971, 15100, 15099], [14971, 14972, 15101], [14971, 15101, 15100], [14972, 14973, 15101], [14973, 15102, 15101], [14973, 14974, 15103], [14973, 15103, 15102], [14974, 14975, 15103], [14975, 15104, 15103], [14975, 14976, 15105], [14975, 15105, 15104], [14976, 14977, 15105], [14977, 15106, 15105], [14977, 14978, 15107], [14977, 15107, 15106], [14978, 14979, 15107], [14979, 15108, 15107], [14979, 14980, 15109], [14979, 15109, 15108], [14980, 14981, 15109], [14981, 15110, 15109], [14981, 14982, 15111], [14981, 15111, 15110], [14982, 14983, 15111], [14983, 15112, 15111], [14983, 14984, 15113], [14983, 15113, 15112], [14984, 14985, 15113], [14985, 15114, 15113], [14985, 14986, 15115], [14985, 15115, 15114], [14986, 14987, 15115], [14987, 15116, 15115], [14987, 14988, 15117], [14987, 15117, 15116], [14988, 14989, 15117], [14989, 15118, 15117], [14989, 14990, 15119], [14989, 15119, 15118], [14990, 14991, 15119], [14991, 15120, 15119], [14991, 14992, 15121], [14991, 15121, 15120], [14992, 14993, 15121], [14993, 15122, 15121], [14993, 14994, 15123], [14993, 15123, 15122], [14994, 14995, 15123], [14995, 15124, 15123], [14995, 14996, 15125], [14995, 15125, 15124], [14996, 14997, 15125], [14997, 15126, 15125], [14997, 14998, 15127], [14997, 15127, 15126], [14998, 14999, 15127], [14999, 15128, 15127], [14999, 15000, 15129], [14999, 15129, 15128], [15000, 15001, 15129], [15001, 15130, 15129], [15001, 15002, 15131], [15001, 15131, 15130], [15002, 15003, 15131], [15003, 15132, 15131], [15003, 15004, 15133], [15003, 15133, 15132], [15004, 15005, 15133], [15005, 15134, 15133], [15005, 15006, 15135], [15005, 15135, 15134], [15006, 15007, 15135], [15007, 15136, 15135], [15007, 15008, 15137], [15007, 15137, 15136], [15008, 15009, 15137], [15009, 15138, 15137], [15010, 15011, 15139], [15011, 15140, 15139], [15011, 15012, 15141], [15011, 15141, 15140], [15012, 15013, 15141], [15013, 15142, 15141], [15013, 15014, 15143], [15013, 15143, 15142], [15014, 15015, 15143], [15015, 15144, 15143], [15015, 15016, 15145], [15015, 15145, 15144], [15016, 15017, 15145], [15017, 15146, 15145], [15017, 15018, 15147], [15017, 15147, 15146], [15018, 15019, 15147], [15019, 15148, 15147], [15019, 15020, 15149], [15019, 15149, 15148], [15020, 15021, 15149], [15021, 15150, 15149], [15021, 15022, 15151], [15021, 15151, 15150], [15022, 15023, 15151], [15023, 15152, 15151], [15023, 15024, 15153], [15023, 15153, 15152], [15024, 15025, 15153], [15025, 15154, 15153], [15025, 15026, 15155], [15025, 15155, 15154], [15026, 15027, 15155], [15027, 15156, 15155], [15027, 15028, 15157], [15027, 15157, 15156], [15028, 15029, 15157], [15029, 15158, 15157], [15029, 15030, 15159], [15029, 15159, 15158], [15030, 15031, 15159], [15031, 15160, 15159], [15031, 15032, 15161], [15031, 15161, 15160], [15032, 15033, 15161], [15033, 15162, 15161], [15033, 15034, 15163], [15033, 15163, 15162], [15034, 15035, 15163], [15035, 15164, 15163], [15035, 15036, 15165], [15035, 15165, 15164], [15036, 15037, 15165], [15037, 15166, 15165], [15037, 15038, 15167], [15037, 15167, 15166], [15038, 15039, 15167], [15039, 15168, 15167], [15039, 15040, 15169], [15039, 15169, 15168], [15040, 15041, 15169], [15041, 15170, 15169], [15041, 15042, 15171], [15041, 15171, 15170], [15042, 15043, 15171], [15043, 15172, 15171], [15043, 15044, 15173], [15043, 15173, 15172], [15044, 15045, 15173], [15045, 15174, 15173], [15045, 15046, 15175], [15045, 15175, 15174], [15046, 15047, 15175], [15047, 15176, 15175], [15047, 15048, 15177], [15047, 15177, 15176], [15048, 15049, 15177], [15049, 15178, 15177], [15049, 15050, 15179], [15049, 15179, 15178], [15050, 15051, 15179], [15051, 15180, 15179], [15051, 15052, 15181], [15051, 15181, 15180], [15052, 15053, 15181], [15053, 15182, 15181], [15053, 15054, 15183], [15053, 15183, 15182], [15054, 15055, 15183], [15055, 15184, 15183], [15055, 15056, 15185], [15055, 15185, 15184], [15056, 15057, 15185], [15057, 15186, 15185], [15057, 15058, 15187], [15057, 15187, 15186], [15058, 15059, 15187], [15059, 15188, 15187], [15059, 15060, 15189], [15059, 15189, 15188], [15060, 15061, 15189], [15061, 15190, 15189], [15061, 15062, 15191], [15061, 15191, 15190], [15062, 15063, 15191], [15063, 15192, 15191], [15063, 15064, 15193], [15063, 15193, 15192], [15064, 15065, 15193], [15065, 15194, 15193], [15065, 15066, 15195], [15065, 15195, 15194], [15066, 15067, 15195], [15067, 15196, 15195], [15067, 15068, 15197], [15067, 15197, 15196], [15068, 15069, 15197], [15069, 15198, 15197], [15069, 15070, 15199], [15069, 15199, 15198], [15070, 15071, 15199], [15071, 15200, 15199], [15071, 15072, 15201], [15071, 15201, 15200], [15072, 15073, 15201], [15073, 15202, 15201], [15073, 15074, 15203], [15073, 15203, 15202], [15074, 15075, 15203], [15075, 15204, 15203], [15075, 15076, 15205], [15075, 15205, 15204], [15076, 15077, 15205], [15077, 15206, 15205], [15077, 15078, 15207], [15077, 15207, 15206], [15078, 15079, 15207], [15079, 15208, 15207], [15079, 15080, 15209], [15079, 15209, 15208], [15080, 15081, 15209], [15081, 15210, 15209], [15081, 15082, 15211], [15081, 15211, 15210], [15082, 15083, 15211], [15083, 15212, 15211], [15083, 15084, 15213], [15083, 15213, 15212], [15084, 15085, 15213], [15085, 15214, 15213], [15085, 15086, 15215], [15085, 15215, 15214], [15086, 15087, 15215], [15087, 15216, 15215], [15087, 15088, 15217], [15087, 15217, 15216], [15088, 15089, 15217], [15089, 15218, 15217], [15089, 15090, 15219], [15089, 15219, 15218], [15090, 15091, 15219], [15091, 15220, 15219], [15091, 15092, 15221], [15091, 15221, 15220], [15092, 15093, 15221], [15093, 15222, 15221], [15093, 15094, 15223], [15093, 15223, 15222], [15094, 15095, 15223], [15095, 15224, 15223], [15095, 15096, 15225], [15095, 15225, 15224], [15096, 15097, 15225], [15097, 15226, 15225], [15097, 15098, 15227], [15097, 15227, 15226], [15098, 15099, 15227], [15099, 15228, 15227], [15099, 15100, 15229], [15099, 15229, 15228], [15100, 15101, 15229], [15101, 15230, 15229], [15101, 15102, 15231], [15101, 15231, 15230], [15102, 15103, 15231], [15103, 15232, 15231], [15103, 15104, 15233], [15103, 15233, 15232], [15104, 15105, 15233], [15105, 15234, 15233], [15105, 15106, 15235], [15105, 15235, 15234], [15106, 15107, 15235], [15107, 15236, 15235], [15107, 15108, 15237], [15107, 15237, 15236], [15108, 15109, 15237], [15109, 15238, 15237], [15109, 15110, 15239], [15109, 15239, 15238], [15110, 15111, 15239], [15111, 15240, 15239], [15111, 15112, 15241], [15111, 15241, 15240], [15112, 15113, 15241], [15113, 15242, 15241], [15113, 15114, 15243], [15113, 15243, 15242], [15114, 15115, 15243], [15115, 15244, 15243], [15115, 15116, 15245], [15115, 15245, 15244], [15116, 15117, 15245], [15117, 15246, 15245], [15117, 15118, 15247], [15117, 15247, 15246], [15118, 15119, 15247], [15119, 15248, 15247], [15119, 15120, 15249], [15119, 15249, 15248], [15120, 15121, 15249], [15121, 15250, 15249], [15121, 15122, 15251], [15121, 15251, 15250], [15122, 15123, 15251], [15123, 15252, 15251], [15123, 15124, 15253], [15123, 15253, 15252], [15124, 15125, 15253], [15125, 15254, 15253], [15125, 15126, 15255], [15125, 15255, 15254], [15126, 15127, 15255], [15127, 15256, 15255], [15127, 15128, 15257], [15127, 15257, 15256], [15128, 15129, 15257], [15129, 15258, 15257], [15129, 15130, 15259], [15129, 15259, 15258], [15130, 15131, 15259], [15131, 15260, 15259], [15131, 15132, 15261], [15131, 15261, 15260], [15132, 15133, 15261], [15133, 15262, 15261], [15133, 15134, 15263], [15133, 15263, 15262], [15134, 15135, 15263], [15135, 15264, 15263], [15135, 15136, 15265], [15135, 15265, 15264], [15136, 15137, 15265], [15137, 15266, 15265], [15137, 15138, 15267], [15137, 15267, 15266], [15139, 15140, 15269], [15139, 15269, 15268], [15140, 15141, 15269], [15141, 15270, 15269], [15141, 15142, 15271], [15141, 15271, 15270], [15142, 15143, 15271], [15143, 15272, 15271], [15143, 15144, 15273], [15143, 15273, 15272], [15144, 15145, 15273], [15145, 15274, 15273], [15145, 15146, 15275], [15145, 15275, 15274], [15146, 15147, 15275], [15147, 15276, 15275], [15147, 15148, 15277], [15147, 15277, 15276], [15148, 15149, 15277], [15149, 15278, 15277], [15149, 15150, 15279], [15149, 15279, 15278], [15150, 15151, 15279], [15151, 15280, 15279], [15151, 15152, 15281], [15151, 15281, 15280], [15152, 15153, 15281], [15153, 15282, 15281], [15153, 15154, 15283], [15153, 15283, 15282], [15154, 15155, 15283], [15155, 15284, 15283], [15155, 15156, 15285], [15155, 15285, 15284], [15156, 15157, 15285], [15157, 15286, 15285], [15157, 15158, 15287], [15157, 15287, 15286], [15158, 15159, 15287], [15159, 15288, 15287], [15159, 15160, 15289], [15159, 15289, 15288], [15160, 15161, 15289], [15161, 15290, 15289], [15161, 15162, 15291], [15161, 15291, 15290], [15162, 15163, 15291], [15163, 15292, 15291], [15163, 15164, 15293], [15163, 15293, 15292], [15164, 15165, 15293], [15165, 15294, 15293], [15165, 15166, 15295], [15165, 15295, 15294], [15166, 15167, 15295], [15167, 15296, 15295], [15167, 15168, 15297], [15167, 15297, 15296], [15168, 15169, 15297], [15169, 15298, 15297], [15169, 15170, 15299], [15169, 15299, 15298], [15170, 15171, 15299], [15171, 15300, 15299], [15171, 15172, 15301], [15171, 15301, 15300], [15172, 15173, 15301], [15173, 15302, 15301], [15173, 15174, 15303], [15173, 15303, 15302], [15174, 15175, 15303], [15175, 15304, 15303], [15175, 15176, 15305], [15175, 15305, 15304], [15176, 15177, 15305], [15177, 15306, 15305], [15177, 15178, 15307], [15177, 15307, 15306], [15178, 15179, 15307], [15179, 15308, 15307], [15179, 15180, 15309], [15179, 15309, 15308], [15180, 15181, 15309], [15181, 15310, 15309], [15181, 15182, 15311], [15181, 15311, 15310], [15182, 15183, 15311], [15183, 15312, 15311], [15183, 15184, 15313], [15183, 15313, 15312], [15184, 15185, 15313], [15185, 15314, 15313], [15185, 15186, 15315], [15185, 15315, 15314], [15186, 15187, 15315], [15187, 15316, 15315], [15187, 15188, 15317], [15187, 15317, 15316], [15188, 15189, 15317], [15189, 15318, 15317], [15189, 15190, 15319], [15189, 15319, 15318], [15190, 15191, 15319], [15191, 15320, 15319], [15191, 15192, 15321], [15191, 15321, 15320], [15192, 15193, 15321], [15193, 15322, 15321], [15193, 15194, 15323], [15193, 15323, 15322], [15194, 15195, 15323], [15195, 15324, 15323], [15195, 15196, 15325], [15195, 15325, 15324], [15196, 15197, 15325], [15197, 15326, 15325], [15197, 15198, 15327], [15197, 15327, 15326], [15198, 15199, 15327], [15199, 15328, 15327], [15199, 15200, 15329], [15199, 15329, 15328], [15200, 15201, 15329], [15201, 15330, 15329], [15201, 15202, 15331], [15201, 15331, 15330], [15202, 15203, 15331], [15203, 15332, 15331], [15203, 15204, 15333], [15203, 15333, 15332], [15204, 15205, 15333], [15205, 15334, 15333], [15205, 15206, 15335], [15205, 15335, 15334], [15206, 15207, 15335], [15207, 15336, 15335], [15207, 15208, 15337], [15207, 15337, 15336], [15208, 15209, 15337], [15209, 15338, 15337], [15209, 15210, 15339], [15209, 15339, 15338], [15210, 15211, 15339], [15211, 15340, 15339], [15211, 15212, 15341], [15211, 15341, 15340], [15212, 15213, 15341], [15213, 15342, 15341], [15213, 15214, 15343], [15213, 15343, 15342], [15214, 15215, 15343], [15215, 15344, 15343], [15215, 15216, 15345], [15215, 15345, 15344], [15216, 15217, 15345], [15217, 15346, 15345], [15217, 15218, 15347], [15217, 15347, 15346], [15218, 15219, 15347], [15219, 15348, 15347], [15219, 15220, 15349], [15219, 15349, 15348], [15220, 15221, 15349], [15221, 15350, 15349], [15221, 15222, 15351], [15221, 15351, 15350], [15222, 15223, 15351], [15223, 15352, 15351], [15223, 15224, 15353], [15223, 15353, 15352], [15224, 15225, 15353], [15225, 15354, 15353], [15225, 15226, 15355], [15225, 15355, 15354], [15226, 15227, 15355], [15227, 15356, 15355], [15227, 15228, 15357], [15227, 15357, 15356], [15228, 15229, 15357], [15229, 15358, 15357], [15229, 15230, 15359], [15229, 15359, 15358], [15230, 15231, 15359], [15231, 15360, 15359], [15231, 15232, 15361], [15231, 15361, 15360], [15232, 15233, 15361], [15233, 15362, 15361], [15233, 15234, 15363], [15233, 15363, 15362], [15234, 15235, 15363], [15235, 15364, 15363], [15235, 15236, 15365], [15235, 15365, 15364], [15236, 15237, 15365], [15237, 15366, 15365], [15237, 15238, 15367], [15237, 15367, 15366], [15238, 15239, 15367], [15239, 15368, 15367], [15239, 15240, 15369], [15239, 15369, 15368], [15240, 15241, 15369], [15241, 15370, 15369], [15241, 15242, 15371], [15241, 15371, 15370], [15242, 15243, 15371], [15243, 15372, 15371], [15243, 15244, 15373], [15243, 15373, 15372], [15244, 15245, 15373], [15245, 15374, 15373], [15245, 15246, 15375], [15245, 15375, 15374], [15246, 15247, 15375], [15247, 15376, 15375], [15247, 15248, 15377], [15247, 15377, 15376], [15248, 15249, 15377], [15249, 15378, 15377], [15249, 15250, 15379], [15249, 15379, 15378], [15250, 15251, 15379], [15251, 15380, 15379], [15251, 15252, 15381], [15251, 15381, 15380], [15252, 15253, 15381], [15253, 15382, 15381], [15253, 15254, 15383], [15253, 15383, 15382], [15254, 15255, 15383], [15255, 15384, 15383], [15255, 15256, 15385], [15255, 15385, 15384], [15256, 15257, 15385], [15257, 15386, 15385], [15257, 15258, 15387], [15257, 15387, 15386], [15258, 15259, 15387], [15259, 15388, 15387], [15259, 15260, 15389], [15259, 15389, 15388], [15260, 15261, 15389], [15261, 15390, 15389], [15261, 15262, 15391], [15261, 15391, 15390], [15262, 15263, 15391], [15263, 15392, 15391], [15263, 15264, 15393], [15263, 15393, 15392], [15264, 15265, 15393], [15265, 15394, 15393], [15265, 15266, 15395], [15265, 15395, 15394], [15266, 15267, 15395], [15267, 15396, 15395], [15268, 15269, 15397], [15269, 15398, 15397], [15269, 15270, 15399], [15269, 15399, 15398], [15270, 15271, 15399], [15271, 15400, 15399], [15271, 15272, 15401], [15271, 15401, 15400], [15272, 15273, 15401], [15273, 15402, 15401], [15273, 15274, 15403], [15273, 15403, 15402], [15274, 15275, 15403], [15275, 15404, 15403], [15275, 15276, 15405], [15275, 15405, 15404], [15276, 15277, 15405], [15277, 15406, 15405], [15277, 15278, 15407], [15277, 15407, 15406], [15278, 15279, 15407], [15279, 15408, 15407], [15279, 15280, 15409], [15279, 15409, 15408], [15280, 15281, 15409], [15281, 15410, 15409], [15281, 15282, 15411], [15281, 15411, 15410], [15282, 15283, 15411], [15283, 15412, 15411], [15283, 15284, 15413], [15283, 15413, 15412], [15284, 15285, 15413], [15285, 15414, 15413], [15285, 15286, 15415], [15285, 15415, 15414], [15286, 15287, 15415], [15287, 15416, 15415], [15287, 15288, 15417], [15287, 15417, 15416], [15288, 15289, 15417], [15289, 15418, 15417], [15289, 15290, 15419], [15289, 15419, 15418], [15290, 15291, 15419], [15291, 15420, 15419], [15291, 15292, 15421], [15291, 15421, 15420], [15292, 15293, 15421], [15293, 15422, 15421], [15293, 15294, 15423], [15293, 15423, 15422], [15294, 15295, 15423], [15295, 15424, 15423], [15295, 15296, 15425], [15295, 15425, 15424], [15296, 15297, 15425], [15297, 15426, 15425], [15297, 15298, 15427], [15297, 15427, 15426], [15298, 15299, 15427], [15299, 15428, 15427], [15299, 15300, 15429], [15299, 15429, 15428], [15300, 15301, 15429], [15301, 15430, 15429], [15301, 15302, 15431], [15301, 15431, 15430], [15302, 15303, 15431], [15303, 15432, 15431], [15303, 15304, 15433], [15303, 15433, 15432], [15304, 15305, 15433], [15305, 15434, 15433], [15305, 15306, 15435], [15305, 15435, 15434], [15306, 15307, 15435], [15307, 15436, 15435], [15307, 15308, 15437], [15307, 15437, 15436], [15308, 15309, 15437], [15309, 15438, 15437], [15309, 15310, 15439], [15309, 15439, 15438], [15310, 15311, 15439], [15311, 15440, 15439], [15311, 15312, 15441], [15311, 15441, 15440], [15312, 15313, 15441], [15313, 15442, 15441], [15313, 15314, 15443], [15313, 15443, 15442], [15314, 15315, 15443], [15315, 15444, 15443], [15315, 15316, 15445], [15315, 15445, 15444], [15316, 15317, 15445], [15317, 15446, 15445], [15317, 15318, 15447], [15317, 15447, 15446], [15318, 15319, 15447], [15319, 15448, 15447], [15319, 15320, 15449], [15319, 15449, 15448], [15320, 15321, 15449], [15321, 15450, 15449], [15321, 15322, 15451], [15321, 15451, 15450], [15322, 15323, 15451], [15323, 15452, 15451], [15323, 15324, 15453], [15323, 15453, 15452], [15324, 15325, 15453], [15325, 15454, 15453], [15325, 15326, 15455], [15325, 15455, 15454], [15326, 15327, 15455], [15327, 15456, 15455], [15327, 15328, 15457], [15327, 15457, 15456], [15328, 15329, 15457], [15329, 15458, 15457], [15329, 15330, 15459], [15329, 15459, 15458], [15330, 15331, 15459], [15331, 15460, 15459], [15331, 15332, 15461], [15331, 15461, 15460], [15332, 15333, 15461], [15333, 15462, 15461], [15333, 15334, 15463], [15333, 15463, 15462], [15334, 15335, 15463], [15335, 15464, 15463], [15335, 15336, 15465], [15335, 15465, 15464], [15336, 15337, 15465], [15337, 15466, 15465], [15337, 15338, 15467], [15337, 15467, 15466], [15338, 15339, 15467], [15339, 15468, 15467], [15339, 15340, 15469], [15339, 15469, 15468], [15340, 15341, 15469], [15341, 15470, 15469], [15341, 15342, 15471], [15341, 15471, 15470], [15342, 15343, 15471], [15343, 15472, 15471], [15343, 15344, 15473], [15343, 15473, 15472], [15344, 15345, 15473], [15345, 15474, 15473], [15345, 15346, 15475], [15345, 15475, 15474], [15346, 15347, 15475], [15347, 15476, 15475], [15347, 15348, 15477], [15347, 15477, 15476], [15348, 15349, 15477], [15349, 15478, 15477], [15349, 15350, 15479], [15349, 15479, 15478], [15350, 15351, 15479], [15351, 15480, 15479], [15351, 15352, 15481], [15351, 15481, 15480], [15352, 15353, 15481], [15353, 15482, 15481], [15353, 15354, 15483], [15353, 15483, 15482], [15354, 15355, 15483], [15355, 15484, 15483], [15355, 15356, 15485], [15355, 15485, 15484], [15356, 15357, 15485], [15357, 15486, 15485], [15357, 15358, 15487], [15357, 15487, 15486], [15358, 15359, 15487], [15359, 15488, 15487], [15359, 15360, 15489], [15359, 15489, 15488], [15360, 15361, 15489], [15361, 15490, 15489], [15361, 15362, 15491], [15361, 15491, 15490], [15362, 15363, 15491], [15363, 15492, 15491], [15363, 15364, 15493], [15363, 15493, 15492], [15364, 15365, 15493], [15365, 15494, 15493], [15365, 15366, 15495], [15365, 15495, 15494], [15366, 15367, 15495], [15367, 15496, 15495], [15367, 15368, 15497], [15367, 15497, 15496], [15368, 15369, 15497], [15369, 15498, 15497], [15369, 15370, 15499], [15369, 15499, 15498], [15370, 15371, 15499], [15371, 15500, 15499], [15371, 15372, 15501], [15371, 15501, 15500], [15372, 15373, 15501], [15373, 15502, 15501], [15373, 15374, 15503], [15373, 15503, 15502], [15374, 15375, 15503], [15375, 15504, 15503], [15375, 15376, 15505], [15375, 15505, 15504], [15376, 15377, 15505], [15377, 15506, 15505], [15377, 15378, 15507], [15377, 15507, 15506], [15378, 15379, 15507], [15379, 15508, 15507], [15379, 15380, 15509], [15379, 15509, 15508], [15380, 15381, 15509], [15381, 15510, 15509], [15381, 15382, 15511], [15381, 15511, 15510], [15382, 15383, 15511], [15383, 15512, 15511], [15383, 15384, 15513], [15383, 15513, 15512], [15384, 15385, 15513], [15385, 15514, 15513], [15385, 15386, 15515], [15385, 15515, 15514], [15386, 15387, 15515], [15387, 15516, 15515], [15387, 15388, 15517], [15387, 15517, 15516], [15388, 15389, 15517], [15389, 15518, 15517], [15389, 15390, 15519], [15389, 15519, 15518], [15390, 15391, 15519], [15391, 15520, 15519], [15391, 15392, 15521], [15391, 15521, 15520], [15392, 15393, 15521], [15393, 15522, 15521], [15393, 15394, 15523], [15393, 15523, 15522], [15394, 15395, 15523], [15395, 15524, 15523], [15395, 15396, 15525], [15395, 15525, 15524], [15397, 15398, 15527], [15397, 15527, 15526], [15398, 15399, 15527], [15399, 15528, 15527], [15399, 15400, 15529], [15399, 15529, 15528], [15400, 15401, 15529], [15401, 15530, 15529], [15401, 15402, 15531], [15401, 15531, 15530], [15402, 15403, 15531], [15403, 15532, 15531], [15403, 15404, 15533], [15403, 15533, 15532], [15404, 15405, 15533], [15405, 15534, 15533], [15405, 15406, 15535], [15405, 15535, 15534], [15406, 15407, 15535], [15407, 15536, 15535], [15407, 15408, 15537], [15407, 15537, 15536], [15408, 15409, 15537], [15409, 15538, 15537], [15409, 15410, 15539], [15409, 15539, 15538], [15410, 15411, 15539], [15411, 15540, 15539], [15411, 15412, 15541], [15411, 15541, 15540], [15412, 15413, 15541], [15413, 15542, 15541], [15413, 15414, 15543], [15413, 15543, 15542], [15414, 15415, 15543], [15415, 15544, 15543], [15415, 15416, 15545], [15415, 15545, 15544], [15416, 15417, 15545], [15417, 15546, 15545], [15417, 15418, 15547], [15417, 15547, 15546], [15418, 15419, 15547], [15419, 15548, 15547], [15419, 15420, 15549], [15419, 15549, 15548], [15420, 15421, 15549], [15421, 15550, 15549], [15421, 15422, 15551], [15421, 15551, 15550], [15422, 15423, 15551], [15423, 15552, 15551], [15423, 15424, 15553], [15423, 15553, 15552], [15424, 15425, 15553], [15425, 15554, 15553], [15425, 15426, 15555], [15425, 15555, 15554], [15426, 15427, 15555], [15427, 15556, 15555], [15427, 15428, 15557], [15427, 15557, 15556], [15428, 15429, 15557], [15429, 15558, 15557], [15429, 15430, 15559], [15429, 15559, 15558], [15430, 15431, 15559], [15431, 15560, 15559], [15431, 15432, 15561], [15431, 15561, 15560], [15432, 15433, 15561], [15433, 15562, 15561], [15433, 15434, 15563], [15433, 15563, 15562], [15434, 15435, 15563], [15435, 15564, 15563], [15435, 15436, 15565], [15435, 15565, 15564], [15436, 15437, 15565], [15437, 15566, 15565], [15437, 15438, 15567], [15437, 15567, 15566], [15438, 15439, 15567], [15439, 15568, 15567], [15439, 15440, 15569], [15439, 15569, 15568], [15440, 15441, 15569], [15441, 15570, 15569], [15441, 15442, 15571], [15441, 15571, 15570], [15442, 15443, 15571], [15443, 15572, 15571], [15443, 15444, 15573], [15443, 15573, 15572], [15444, 15445, 15573], [15445, 15574, 15573], [15445, 15446, 15575], [15445, 15575, 15574], [15446, 15447, 15575], [15447, 15576, 15575], [15447, 15448, 15577], [15447, 15577, 15576], [15448, 15449, 15577], [15449, 15578, 15577], [15449, 15450, 15579], [15449, 15579, 15578], [15450, 15451, 15579], [15451, 15580, 15579], [15451, 15452, 15581], [15451, 15581, 15580], [15452, 15453, 15581], [15453, 15582, 15581], [15453, 15454, 15583], [15453, 15583, 15582], [15454, 15455, 15583], [15455, 15584, 15583], [15455, 15456, 15585], [15455, 15585, 15584], [15456, 15457, 15585], [15457, 15586, 15585], [15457, 15458, 15587], [15457, 15587, 15586], [15458, 15459, 15587], [15459, 15588, 15587], [15459, 15460, 15589], [15459, 15589, 15588], [15460, 15461, 15589], [15461, 15590, 15589], [15461, 15462, 15591], [15461, 15591, 15590], [15462, 15463, 15591], [15463, 15592, 15591], [15463, 15464, 15593], [15463, 15593, 15592], [15464, 15465, 15593], [15465, 15594, 15593], [15465, 15466, 15595], [15465, 15595, 15594], [15466, 15467, 15595], [15467, 15596, 15595], [15467, 15468, 15597], [15467, 15597, 15596], [15468, 15469, 15597], [15469, 15598, 15597], [15469, 15470, 15599], [15469, 15599, 15598], [15470, 15471, 15599], [15471, 15600, 15599], [15471, 15472, 15601], [15471, 15601, 15600], [15472, 15473, 15601], [15473, 15602, 15601], [15473, 15474, 15603], [15473, 15603, 15602], [15474, 15475, 15603], [15475, 15604, 15603], [15475, 15476, 15605], [15475, 15605, 15604], [15476, 15477, 15605], [15477, 15606, 15605], [15477, 15478, 15607], [15477, 15607, 15606], [15478, 15479, 15607], [15479, 15608, 15607], [15479, 15480, 15609], [15479, 15609, 15608], [15480, 15481, 15609], [15481, 15610, 15609], [15481, 15482, 15611], [15481, 15611, 15610], [15482, 15483, 15611], [15483, 15612, 15611], [15483, 15484, 15613], [15483, 15613, 15612], [15484, 15485, 15613], [15485, 15614, 15613], [15485, 15486, 15615], [15485, 15615, 15614], [15486, 15487, 15615], [15487, 15616, 15615], [15487, 15488, 15617], [15487, 15617, 15616], [15488, 15489, 15617], [15489, 15618, 15617], [15489, 15490, 15619], [15489, 15619, 15618], [15490, 15491, 15619], [15491, 15620, 15619], [15491, 15492, 15621], [15491, 15621, 15620], [15492, 15493, 15621], [15493, 15622, 15621], [15493, 15494, 15623], [15493, 15623, 15622], [15494, 15495, 15623], [15495, 15624, 15623], [15495, 15496, 15625], [15495, 15625, 15624], [15496, 15497, 15625], [15497, 15626, 15625], [15497, 15498, 15627], [15497, 15627, 15626], [15498, 15499, 15627], [15499, 15628, 15627], [15499, 15500, 15629], [15499, 15629, 15628], [15500, 15501, 15629], [15501, 15630, 15629], [15501, 15502, 15631], [15501, 15631, 15630], [15502, 15503, 15631], [15503, 15632, 15631], [15503, 15504, 15633], [15503, 15633, 15632], [15504, 15505, 15633], [15505, 15634, 15633], [15505, 15506, 15635], [15505, 15635, 15634], [15506, 15507, 15635], [15507, 15636, 15635], [15507, 15508, 15637], [15507, 15637, 15636], [15508, 15509, 15637], [15509, 15638, 15637], [15509, 15510, 15639], [15509, 15639, 15638], [15510, 15511, 15639], [15511, 15640, 15639], [15511, 15512, 15641], [15511, 15641, 15640], [15512, 15513, 15641], [15513, 15642, 15641], [15513, 15514, 15643], [15513, 15643, 15642], [15514, 15515, 15643], [15515, 15644, 15643], [15515, 15516, 15645], [15515, 15645, 15644], [15516, 15517, 15645], [15517, 15646, 15645], [15517, 15518, 15647], [15517, 15647, 15646], [15518, 15519, 15647], [15519, 15648, 15647], [15519, 15520, 15649], [15519, 15649, 15648], [15520, 15521, 15649], [15521, 15650, 15649], [15521, 15522, 15651], [15521, 15651, 15650], [15522, 15523, 15651], [15523, 15652, 15651], [15523, 15524, 15653], [15523, 15653, 15652], [15524, 15525, 15653], [15525, 15654, 15653], [15526, 15527, 15655], [15527, 15656, 15655], [15527, 15528, 15657], [15527, 15657, 15656], [15528, 15529, 15657], [15529, 15658, 15657], [15529, 15530, 15659], [15529, 15659, 15658], [15530, 15531, 15659], [15531, 15660, 15659], [15531, 15532, 15661], [15531, 15661, 15660], [15532, 15533, 15661], [15533, 15662, 15661], [15533, 15534, 15663], [15533, 15663, 15662], [15534, 15535, 15663], [15535, 15664, 15663], [15535, 15536, 15665], [15535, 15665, 15664], [15536, 15537, 15665], [15537, 15666, 15665], [15537, 15538, 15667], [15537, 15667, 15666], [15538, 15539, 15667], [15539, 15668, 15667], [15539, 15540, 15669], [15539, 15669, 15668], [15540, 15541, 15669], [15541, 15670, 15669], [15541, 15542, 15671], [15541, 15671, 15670], [15542, 15543, 15671], [15543, 15672, 15671], [15543, 15544, 15673], [15543, 15673, 15672], [15544, 15545, 15673], [15545, 15674, 15673], [15545, 15546, 15675], [15545, 15675, 15674], [15546, 15547, 15675], [15547, 15676, 15675], [15547, 15548, 15677], [15547, 15677, 15676], [15548, 15549, 15677], [15549, 15678, 15677], [15549, 15550, 15679], [15549, 15679, 15678], [15550, 15551, 15679], [15551, 15680, 15679], [15551, 15552, 15681], [15551, 15681, 15680], [15552, 15553, 15681], [15553, 15682, 15681], [15553, 15554, 15683], [15553, 15683, 15682], [15554, 15555, 15683], [15555, 15684, 15683], [15555, 15556, 15685], [15555, 15685, 15684], [15556, 15557, 15685], [15557, 15686, 15685], [15557, 15558, 15687], [15557, 15687, 15686], [15558, 15559, 15687], [15559, 15688, 15687], [15559, 15560, 15689], [15559, 15689, 15688], [15560, 15561, 15689], [15561, 15690, 15689], [15561, 15562, 15691], [15561, 15691, 15690], [15562, 15563, 15691], [15563, 15692, 15691], [15563, 15564, 15693], [15563, 15693, 15692], [15564, 15565, 15693], [15565, 15694, 15693], [15565, 15566, 15695], [15565, 15695, 15694], [15566, 15567, 15695], [15567, 15696, 15695], [15567, 15568, 15697], [15567, 15697, 15696], [15568, 15569, 15697], [15569, 15698, 15697], [15569, 15570, 15699], [15569, 15699, 15698], [15570, 15571, 15699], [15571, 15700, 15699], [15571, 15572, 15701], [15571, 15701, 15700], [15572, 15573, 15701], [15573, 15702, 15701], [15573, 15574, 15703], [15573, 15703, 15702], [15574, 15575, 15703], [15575, 15704, 15703], [15575, 15576, 15705], [15575, 15705, 15704], [15576, 15577, 15705], [15577, 15706, 15705], [15577, 15578, 15707], [15577, 15707, 15706], [15578, 15579, 15707], [15579, 15708, 15707], [15579, 15580, 15709], [15579, 15709, 15708], [15580, 15581, 15709], [15581, 15710, 15709], [15581, 15582, 15711], [15581, 15711, 15710], [15582, 15583, 15711], [15583, 15712, 15711], [15583, 15584, 15713], [15583, 15713, 15712], [15584, 15585, 15713], [15585, 15714, 15713], [15585, 15586, 15715], [15585, 15715, 15714], [15586, 15587, 15715], [15587, 15716, 15715], [15587, 15588, 15717], [15587, 15717, 15716], [15588, 15589, 15717], [15589, 15718, 15717], [15589, 15590, 15719], [15589, 15719, 15718], [15590, 15591, 15719], [15591, 15720, 15719], [15591, 15592, 15721], [15591, 15721, 15720], [15592, 15593, 15721], [15593, 15722, 15721], [15593, 15594, 15723], [15593, 15723, 15722], [15594, 15595, 15723], [15595, 15724, 15723], [15595, 15596, 15725], [15595, 15725, 15724], [15596, 15597, 15725], [15597, 15726, 15725], [15597, 15598, 15727], [15597, 15727, 15726], [15598, 15599, 15727], [15599, 15728, 15727], [15599, 15600, 15729], [15599, 15729, 15728], [15600, 15601, 15729], [15601, 15730, 15729], [15601, 15602, 15731], [15601, 15731, 15730], [15602, 15603, 15731], [15603, 15732, 15731], [15603, 15604, 15733], [15603, 15733, 15732], [15604, 15605, 15733], [15605, 15734, 15733], [15605, 15606, 15735], [15605, 15735, 15734], [15606, 15607, 15735], [15607, 15736, 15735], [15607, 15608, 15737], [15607, 15737, 15736], [15608, 15609, 15737], [15609, 15738, 15737], [15609, 15610, 15739], [15609, 15739, 15738], [15610, 15611, 15739], [15611, 15740, 15739], [15611, 15612, 15741], [15611, 15741, 15740], [15612, 15613, 15741], [15613, 15742, 15741], [15613, 15614, 15743], [15613, 15743, 15742], [15614, 15615, 15743], [15615, 15744, 15743], [15615, 15616, 15745], [15615, 15745, 15744], [15616, 15617, 15745], [15617, 15746, 15745], [15617, 15618, 15747], [15617, 15747, 15746], [15618, 15619, 15747], [15619, 15748, 15747], [15619, 15620, 15749], [15619, 15749, 15748], [15620, 15621, 15749], [15621, 15750, 15749], [15621, 15622, 15751], [15621, 15751, 15750], [15622, 15623, 15751], [15623, 15752, 15751], [15623, 15624, 15753], [15623, 15753, 15752], [15624, 15625, 15753], [15625, 15754, 15753], [15625, 15626, 15755], [15625, 15755, 15754], [15626, 15627, 15755], [15627, 15756, 15755], [15627, 15628, 15757], [15627, 15757, 15756], [15628, 15629, 15757], [15629, 15758, 15757], [15629, 15630, 15759], [15629, 15759, 15758], [15630, 15631, 15759], [15631, 15760, 15759], [15631, 15632, 15761], [15631, 15761, 15760], [15632, 15633, 15761], [15633, 15762, 15761], [15633, 15634, 15763], [15633, 15763, 15762], [15634, 15635, 15763], [15635, 15764, 15763], [15635, 15636, 15765], [15635, 15765, 15764], [15636, 15637, 15765], [15637, 15766, 15765], [15637, 15638, 15767], [15637, 15767, 15766], [15638, 15639, 15767], [15639, 15768, 15767], [15639, 15640, 15769], [15639, 15769, 15768], [15640, 15641, 15769], [15641, 15770, 15769], [15641, 15642, 15771], [15641, 15771, 15770], [15642, 15643, 15771], [15643, 15772, 15771], [15643, 15644, 15773], [15643, 15773, 15772], [15644, 15645, 15773], [15645, 15774, 15773], [15645, 15646, 15775], [15645, 15775, 15774], [15646, 15647, 15775], [15647, 15776, 15775], [15647, 15648, 15777], [15647, 15777, 15776], [15648, 15649, 15777], [15649, 15778, 15777], [15649, 15650, 15779], [15649, 15779, 15778], [15650, 15651, 15779], [15651, 15780, 15779], [15651, 15652, 15781], [15651, 15781, 15780], [15652, 15653, 15781], [15653, 15782, 15781], [15653, 15654, 15783], [15653, 15783, 15782], [15655, 15656, 15785], [15655, 15785, 15784], [15656, 15657, 15785], [15657, 15786, 15785], [15657, 15658, 15787], [15657, 15787, 15786], [15658, 15659, 15787], [15659, 15788, 15787], [15659, 15660, 15789], [15659, 15789, 15788], [15660, 15661, 15789], [15661, 15790, 15789], [15661, 15662, 15791], [15661, 15791, 15790], [15662, 15663, 15791], [15663, 15792, 15791], [15663, 15664, 15793], [15663, 15793, 15792], [15664, 15665, 15793], [15665, 15794, 15793], [15665, 15666, 15795], [15665, 15795, 15794], [15666, 15667, 15795], [15667, 15796, 15795], [15667, 15668, 15797], [15667, 15797, 15796], [15668, 15669, 15797], [15669, 15798, 15797], [15669, 15670, 15799], [15669, 15799, 15798], [15670, 15671, 15799], [15671, 15800, 15799], [15671, 15672, 15801], [15671, 15801, 15800], [15672, 15673, 15801], [15673, 15802, 15801], [15673, 15674, 15803], [15673, 15803, 15802], [15674, 15675, 15803], [15675, 15804, 15803], [15675, 15676, 15805], [15675, 15805, 15804], [15676, 15677, 15805], [15677, 15806, 15805], [15677, 15678, 15807], [15677, 15807, 15806], [15678, 15679, 15807], [15679, 15808, 15807], [15679, 15680, 15809], [15679, 15809, 15808], [15680, 15681, 15809], [15681, 15810, 15809], [15681, 15682, 15811], [15681, 15811, 15810], [15682, 15683, 15811], [15683, 15812, 15811], [15683, 15684, 15813], [15683, 15813, 15812], [15684, 15685, 15813], [15685, 15814, 15813], [15685, 15686, 15815], [15685, 15815, 15814], [15686, 15687, 15815], [15687, 15816, 15815], [15687, 15688, 15817], [15687, 15817, 15816], [15688, 15689, 15817], [15689, 15818, 15817], [15689, 15690, 15819], [15689, 15819, 15818], [15690, 15691, 15819], [15691, 15820, 15819], [15691, 15692, 15821], [15691, 15821, 15820], [15692, 15693, 15821], [15693, 15822, 15821], [15693, 15694, 15823], [15693, 15823, 15822], [15694, 15695, 15823], [15695, 15824, 15823], [15695, 15696, 15825], [15695, 15825, 15824], [15696, 15697, 15825], [15697, 15826, 15825], [15697, 15698, 15827], [15697, 15827, 15826], [15698, 15699, 15827], [15699, 15828, 15827], [15699, 15700, 15829], [15699, 15829, 15828], [15700, 15701, 15829], [15701, 15830, 15829], [15701, 15702, 15831], [15701, 15831, 15830], [15702, 15703, 15831], [15703, 15832, 15831], [15703, 15704, 15833], [15703, 15833, 15832], [15704, 15705, 15833], [15705, 15834, 15833], [15705, 15706, 15835], [15705, 15835, 15834], [15706, 15707, 15835], [15707, 15836, 15835], [15707, 15708, 15837], [15707, 15837, 15836], [15708, 15709, 15837], [15709, 15838, 15837], [15709, 15710, 15839], [15709, 15839, 15838], [15710, 15711, 15839], [15711, 15840, 15839], [15711, 15712, 15841], [15711, 15841, 15840], [15712, 15713, 15841], [15713, 15842, 15841], [15713, 15714, 15843], [15713, 15843, 15842], [15714, 15715, 15843], [15715, 15844, 15843], [15715, 15716, 15845], [15715, 15845, 15844], [15716, 15717, 15845], [15717, 15846, 15845], [15717, 15718, 15847], [15717, 15847, 15846], [15718, 15719, 15847], [15719, 15848, 15847], [15719, 15720, 15849], [15719, 15849, 15848], [15720, 15721, 15849], [15721, 15850, 15849], [15721, 15722, 15851], [15721, 15851, 15850], [15722, 15723, 15851], [15723, 15852, 15851], [15723, 15724, 15853], [15723, 15853, 15852], [15724, 15725, 15853], [15725, 15854, 15853], [15725, 15726, 15855], [15725, 15855, 15854], [15726, 15727, 15855], [15727, 15856, 15855], [15727, 15728, 15857], [15727, 15857, 15856], [15728, 15729, 15857], [15729, 15858, 15857], [15729, 15730, 15859], [15729, 15859, 15858], [15730, 15731, 15859], [15731, 15860, 15859], [15731, 15732, 15861], [15731, 15861, 15860], [15732, 15733, 15861], [15733, 15862, 15861], [15733, 15734, 15863], [15733, 15863, 15862], [15734, 15735, 15863], [15735, 15864, 15863], [15735, 15736, 15865], [15735, 15865, 15864], [15736, 15737, 15865], [15737, 15866, 15865], [15737, 15738, 15867], [15737, 15867, 15866], [15738, 15739, 15867], [15739, 15868, 15867], [15739, 15740, 15869], [15739, 15869, 15868], [15740, 15741, 15869], [15741, 15870, 15869], [15741, 15742, 15871], [15741, 15871, 15870], [15742, 15743, 15871], [15743, 15872, 15871], [15743, 15744, 15873], [15743, 15873, 15872], [15744, 15745, 15873], [15745, 15874, 15873], [15745, 15746, 15875], [15745, 15875, 15874], [15746, 15747, 15875], [15747, 15876, 15875], [15747, 15748, 15877], [15747, 15877, 15876], [15748, 15749, 15877], [15749, 15878, 15877], [15749, 15750, 15879], [15749, 15879, 15878], [15750, 15751, 15879], [15751, 15880, 15879], [15751, 15752, 15881], [15751, 15881, 15880], [15752, 15753, 15881], [15753, 15882, 15881], [15753, 15754, 15883], [15753, 15883, 15882], [15754, 15755, 15883], [15755, 15884, 15883], [15755, 15756, 15885], [15755, 15885, 15884], [15756, 15757, 15885], [15757, 15886, 15885], [15757, 15758, 15887], [15757, 15887, 15886], [15758, 15759, 15887], [15759, 15888, 15887], [15759, 15760, 15889], [15759, 15889, 15888], [15760, 15761, 15889], [15761, 15890, 15889], [15761, 15762, 15891], [15761, 15891, 15890], [15762, 15763, 15891], [15763, 15892, 15891], [15763, 15764, 15893], [15763, 15893, 15892], [15764, 15765, 15893], [15765, 15894, 15893], [15765, 15766, 15895], [15765, 15895, 15894], [15766, 15767, 15895], [15767, 15896, 15895], [15767, 15768, 15897], [15767, 15897, 15896], [15768, 15769, 15897], [15769, 15898, 15897], [15769, 15770, 15899], [15769, 15899, 15898], [15770, 15771, 15899], [15771, 15900, 15899], [15771, 15772, 15901], [15771, 15901, 15900], [15772, 15773, 15901], [15773, 15902, 15901], [15773, 15774, 15903], [15773, 15903, 15902], [15774, 15775, 15903], [15775, 15904, 15903], [15775, 15776, 15905], [15775, 15905, 15904], [15776, 15777, 15905], [15777, 15906, 15905], [15777, 15778, 15907], [15777, 15907, 15906], [15778, 15779, 15907], [15779, 15908, 15907], [15779, 15780, 15909], [15779, 15909, 15908], [15780, 15781, 15909], [15781, 15910, 15909], [15781, 15782, 15911], [15781, 15911, 15910], [15782, 15783, 15911], [15783, 15912, 15911], [15784, 15785, 15913], [15785, 15914, 15913], [15785, 15786, 15915], [15785, 15915, 15914], [15786, 15787, 15915], [15787, 15916, 15915], [15787, 15788, 15917], [15787, 15917, 15916], [15788, 15789, 15917], [15789, 15918, 15917], [15789, 15790, 15919], [15789, 15919, 15918], [15790, 15791, 15919], [15791, 15920, 15919], [15791, 15792, 15921], [15791, 15921, 15920], [15792, 15793, 15921], [15793, 15922, 15921], [15793, 15794, 15923], [15793, 15923, 15922], [15794, 15795, 15923], [15795, 15924, 15923], [15795, 15796, 15925], [15795, 15925, 15924], [15796, 15797, 15925], [15797, 15926, 15925], [15797, 15798, 15927], [15797, 15927, 15926], [15798, 15799, 15927], [15799, 15928, 15927], [15799, 15800, 15929], [15799, 15929, 15928], [15800, 15801, 15929], [15801, 15930, 15929], [15801, 15802, 15931], [15801, 15931, 15930], [15802, 15803, 15931], [15803, 15932, 15931], [15803, 15804, 15933], [15803, 15933, 15932], [15804, 15805, 15933], [15805, 15934, 15933], [15805, 15806, 15935], [15805, 15935, 15934], [15806, 15807, 15935], [15807, 15936, 15935], [15807, 15808, 15937], [15807, 15937, 15936], [15808, 15809, 15937], [15809, 15938, 15937], [15809, 15810, 15939], [15809, 15939, 15938], [15810, 15811, 15939], [15811, 15940, 15939], [15811, 15812, 15941], [15811, 15941, 15940], [15812, 15813, 15941], [15813, 15942, 15941], [15813, 15814, 15943], [15813, 15943, 15942], [15814, 15815, 15943], [15815, 15944, 15943], [15815, 15816, 15945], [15815, 15945, 15944], [15816, 15817, 15945], [15817, 15946, 15945], [15817, 15818, 15947], [15817, 15947, 15946], [15818, 15819, 15947], [15819, 15948, 15947], [15819, 15820, 15949], [15819, 15949, 15948], [15820, 15821, 15949], [15821, 15950, 15949], [15821, 15822, 15951], [15821, 15951, 15950], [15822, 15823, 15951], [15823, 15952, 15951], [15823, 15824, 15953], [15823, 15953, 15952], [15824, 15825, 15953], [15825, 15954, 15953], [15825, 15826, 15955], [15825, 15955, 15954], [15826, 15827, 15955], [15827, 15956, 15955], [15827, 15828, 15957], [15827, 15957, 15956], [15828, 15829, 15957], [15829, 15958, 15957], [15829, 15830, 15959], [15829, 15959, 15958], [15830, 15831, 15959], [15831, 15960, 15959], [15831, 15832, 15961], [15831, 15961, 15960], [15832, 15833, 15961], [15833, 15962, 15961], [15833, 15834, 15963], [15833, 15963, 15962], [15834, 15835, 15963], [15835, 15964, 15963], [15835, 15836, 15965], [15835, 15965, 15964], [15836, 15837, 15965], [15837, 15966, 15965], [15837, 15838, 15967], [15837, 15967, 15966], [15838, 15839, 15967], [15839, 15968, 15967], [15839, 15840, 15969], [15839, 15969, 15968], [15840, 15841, 15969], [15841, 15970, 15969], [15841, 15842, 15971], [15841, 15971, 15970], [15842, 15843, 15971], [15843, 15972, 15971], [15843, 15844, 15973], [15843, 15973, 15972], [15844, 15845, 15973], [15845, 15974, 15973], [15845, 15846, 15975], [15845, 15975, 15974], [15846, 15847, 15975], [15847, 15976, 15975], [15847, 15848, 15977], [15847, 15977, 15976], [15848, 15849, 15977], [15849, 15978, 15977], [15849, 15850, 15979], [15849, 15979, 15978], [15850, 15851, 15979], [15851, 15980, 15979], [15851, 15852, 15981], [15851, 15981, 15980], [15852, 15853, 15981], [15853, 15982, 15981], [15853, 15854, 15983], [15853, 15983, 15982], [15854, 15855, 15983], [15855, 15984, 15983], [15855, 15856, 15985], [15855, 15985, 15984], [15856, 15857, 15985], [15857, 15986, 15985], [15857, 15858, 15987], [15857, 15987, 15986], [15858, 15859, 15987], [15859, 15988, 15987], [15859, 15860, 15989], [15859, 15989, 15988], [15860, 15861, 15989], [15861, 15990, 15989], [15861, 15862, 15991], [15861, 15991, 15990], [15862, 15863, 15991], [15863, 15992, 15991], [15863, 15864, 15993], [15863, 15993, 15992], [15864, 15865, 15993], [15865, 15994, 15993], [15865, 15866, 15995], [15865, 15995, 15994], [15866, 15867, 15995], [15867, 15996, 15995], [15867, 15868, 15997], [15867, 15997, 15996], [15868, 15869, 15997], [15869, 15998, 15997], [15869, 15870, 15999], [15869, 15999, 15998], [15870, 15871, 15999], [15871, 16000, 15999], [15871, 15872, 16001], [15871, 16001, 16000], [15872, 15873, 16001], [15873, 16002, 16001], [15873, 15874, 16003], [15873, 16003, 16002], [15874, 15875, 16003], [15875, 16004, 16003], [15875, 15876, 16005], [15875, 16005, 16004], [15876, 15877, 16005], [15877, 16006, 16005], [15877, 15878, 16007], [15877, 16007, 16006], [15878, 15879, 16007], [15879, 16008, 16007], [15879, 15880, 16009], [15879, 16009, 16008], [15880, 15881, 16009], [15881, 16010, 16009], [15881, 15882, 16011], [15881, 16011, 16010], [15882, 15883, 16011], [15883, 16012, 16011], [15883, 15884, 16013], [15883, 16013, 16012], [15884, 15885, 16013], [15885, 16014, 16013], [15885, 15886, 16015], [15885, 16015, 16014], [15886, 15887, 16015], [15887, 16016, 16015], [15887, 15888, 16017], [15887, 16017, 16016], [15888, 15889, 16017], [15889, 16018, 16017], [15889, 15890, 16019], [15889, 16019, 16018], [15890, 15891, 16019], [15891, 16020, 16019], [15891, 15892, 16021], [15891, 16021, 16020], [15892, 15893, 16021], [15893, 16022, 16021], [15893, 15894, 16023], [15893, 16023, 16022], [15894, 15895, 16023], [15895, 16024, 16023], [15895, 15896, 16025], [15895, 16025, 16024], [15896, 15897, 16025], [15897, 16026, 16025], [15897, 15898, 16027], [15897, 16027, 16026], [15898, 15899, 16027], [15899, 16028, 16027], [15899, 15900, 16029], [15899, 16029, 16028], [15900, 15901, 16029], [15901, 16030, 16029], [15901, 15902, 16031], [15901, 16031, 16030], [15902, 15903, 16031], [15903, 16032, 16031], [15903, 15904, 16033], [15903, 16033, 16032], [15904, 15905, 16033], [15905, 16034, 16033], [15905, 15906, 16035], [15905, 16035, 16034], [15906, 15907, 16035], [15907, 16036, 16035], [15907, 15908, 16037], [15907, 16037, 16036], [15908, 15909, 16037], [15909, 16038, 16037], [15909, 15910, 16039], [15909, 16039, 16038], [15910, 15911, 16039], [15911, 16040, 16039], [15911, 15912, 16041], [15911, 16041, 16040], [15913, 15914, 16043], [15913, 16043, 16042], [15914, 15915, 16043], [15915, 16044, 16043], [15915, 15916, 16045], [15915, 16045, 16044], [15916, 15917, 16045], [15917, 16046, 16045], [15917, 15918, 16047], [15917, 16047, 16046], [15918, 15919, 16047], [15919, 16048, 16047], [15919, 15920, 16049], [15919, 16049, 16048], [15920, 15921, 16049], [15921, 16050, 16049], [15921, 15922, 16051], [15921, 16051, 16050], [15922, 15923, 16051], [15923, 16052, 16051], [15923, 15924, 16053], [15923, 16053, 16052], [15924, 15925, 16053], [15925, 16054, 16053], [15925, 15926, 16055], [15925, 16055, 16054], [15926, 15927, 16055], [15927, 16056, 16055], [15927, 15928, 16057], [15927, 16057, 16056], [15928, 15929, 16057], [15929, 16058, 16057], [15929, 15930, 16059], [15929, 16059, 16058], [15930, 15931, 16059], [15931, 16060, 16059], [15931, 15932, 16061], [15931, 16061, 16060], [15932, 15933, 16061], [15933, 16062, 16061], [15933, 15934, 16063], [15933, 16063, 16062], [15934, 15935, 16063], [15935, 16064, 16063], [15935, 15936, 16065], [15935, 16065, 16064], [15936, 15937, 16065], [15937, 16066, 16065], [15937, 15938, 16067], [15937, 16067, 16066], [15938, 15939, 16067], [15939, 16068, 16067], [15939, 15940, 16069], [15939, 16069, 16068], [15940, 15941, 16069], [15941, 16070, 16069], [15941, 15942, 16071], [15941, 16071, 16070], [15942, 15943, 16071], [15943, 16072, 16071], [15943, 15944, 16073], [15943, 16073, 16072], [15944, 15945, 16073], [15945, 16074, 16073], [15945, 15946, 16075], [15945, 16075, 16074], [15946, 15947, 16075], [15947, 16076, 16075], [15947, 15948, 16077], [15947, 16077, 16076], [15948, 15949, 16077], [15949, 16078, 16077], [15949, 15950, 16079], [15949, 16079, 16078], [15950, 15951, 16079], [15951, 16080, 16079], [15951, 15952, 16081], [15951, 16081, 16080], [15952, 15953, 16081], [15953, 16082, 16081], [15953, 15954, 16083], [15953, 16083, 16082], [15954, 15955, 16083], [15955, 16084, 16083], [15955, 15956, 16085], [15955, 16085, 16084], [15956, 15957, 16085], [15957, 16086, 16085], [15957, 15958, 16087], [15957, 16087, 16086], [15958, 15959, 16087], [15959, 16088, 16087], [15959, 15960, 16089], [15959, 16089, 16088], [15960, 15961, 16089], [15961, 16090, 16089], [15961, 15962, 16091], [15961, 16091, 16090], [15962, 15963, 16091], [15963, 16092, 16091], [15963, 15964, 16093], [15963, 16093, 16092], [15964, 15965, 16093], [15965, 16094, 16093], [15965, 15966, 16095], [15965, 16095, 16094], [15966, 15967, 16095], [15967, 16096, 16095], [15967, 15968, 16097], [15967, 16097, 16096], [15968, 15969, 16097], [15969, 16098, 16097], [15969, 15970, 16099], [15969, 16099, 16098], [15970, 15971, 16099], [15971, 16100, 16099], [15971, 15972, 16101], [15971, 16101, 16100], [15972, 15973, 16101], [15973, 16102, 16101], [15973, 15974, 16103], [15973, 16103, 16102], [15974, 15975, 16103], [15975, 16104, 16103], [15975, 15976, 16105], [15975, 16105, 16104], [15976, 15977, 16105], [15977, 16106, 16105], [15977, 15978, 16107], [15977, 16107, 16106], [15978, 15979, 16107], [15979, 16108, 16107], [15979, 15980, 16109], [15979, 16109, 16108], [15980, 15981, 16109], [15981, 16110, 16109], [15981, 15982, 16111], [15981, 16111, 16110], [15982, 15983, 16111], [15983, 16112, 16111], [15983, 15984, 16113], [15983, 16113, 16112], [15984, 15985, 16113], [15985, 16114, 16113], [15985, 15986, 16115], [15985, 16115, 16114], [15986, 15987, 16115], [15987, 16116, 16115], [15987, 15988, 16117], [15987, 16117, 16116], [15988, 15989, 16117], [15989, 16118, 16117], [15989, 15990, 16119], [15989, 16119, 16118], [15990, 15991, 16119], [15991, 16120, 16119], [15991, 15992, 16121], [15991, 16121, 16120], [15992, 15993, 16121], [15993, 16122, 16121], [15993, 15994, 16123], [15993, 16123, 16122], [15994, 15995, 16123], [15995, 16124, 16123], [15995, 15996, 16125], [15995, 16125, 16124], [15996, 15997, 16125], [15997, 16126, 16125], [15997, 15998, 16127], [15997, 16127, 16126], [15998, 15999, 16127], [15999, 16128, 16127], [15999, 16000, 16129], [15999, 16129, 16128], [16000, 16001, 16129], [16001, 16130, 16129], [16001, 16002, 16131], [16001, 16131, 16130], [16002, 16003, 16131], [16003, 16132, 16131], [16003, 16004, 16133], [16003, 16133, 16132], [16004, 16005, 16133], [16005, 16134, 16133], [16005, 16006, 16135], [16005, 16135, 16134], [16006, 16007, 16135], [16007, 16136, 16135], [16007, 16008, 16137], [16007, 16137, 16136], [16008, 16009, 16137], [16009, 16138, 16137], [16009, 16010, 16139], [16009, 16139, 16138], [16010, 16011, 16139], [16011, 16140, 16139], [16011, 16012, 16141], [16011, 16141, 16140], [16012, 16013, 16141], [16013, 16142, 16141], [16013, 16014, 16143], [16013, 16143, 16142], [16014, 16015, 16143], [16015, 16144, 16143], [16015, 16016, 16145], [16015, 16145, 16144], [16016, 16017, 16145], [16017, 16146, 16145], [16017, 16018, 16147], [16017, 16147, 16146], [16018, 16019, 16147], [16019, 16148, 16147], [16019, 16020, 16149], [16019, 16149, 16148], [16020, 16021, 16149], [16021, 16150, 16149], [16021, 16022, 16151], [16021, 16151, 16150], [16022, 16023, 16151], [16023, 16152, 16151], [16023, 16024, 16153], [16023, 16153, 16152], [16024, 16025, 16153], [16025, 16154, 16153], [16025, 16026, 16155], [16025, 16155, 16154], [16026, 16027, 16155], [16027, 16156, 16155], [16027, 16028, 16157], [16027, 16157, 16156], [16028, 16029, 16157], [16029, 16158, 16157], [16029, 16030, 16159], [16029, 16159, 16158], [16030, 16031, 16159], [16031, 16160, 16159], [16031, 16032, 16161], [16031, 16161, 16160], [16032, 16033, 16161], [16033, 16162, 16161], [16033, 16034, 16163], [16033, 16163, 16162], [16034, 16035, 16163], [16035, 16164, 16163], [16035, 16036, 16165], [16035, 16165, 16164], [16036, 16037, 16165], [16037, 16166, 16165], [16037, 16038, 16167], [16037, 16167, 16166], [16038, 16039, 16167], [16039, 16168, 16167], [16039, 16040, 16169], [16039, 16169, 16168], [16040, 16041, 16169], [16041, 16170, 16169], [16042, 16043, 19267], [16043, 19268, 19267], [16043, 16044, 19269], [16043, 19269, 19268], [16044, 16045, 19269], [16045, 19270, 19269], [16045, 16046, 19271], [16045, 19271, 19270], [16046, 16047, 19271], [16047, 19272, 19271], [16047, 16048, 19273], [16047, 19273, 19272], [16048, 16049, 19273], [16049, 19274, 19273], [16049, 16050, 19275], [16049, 19275, 19274], [16050, 16051, 19275], [16051, 19276, 19275], [16051, 16052, 19277], [16051, 19277, 19276], [16052, 16053, 19277], [16053, 19278, 19277], [16053, 16054, 19279], [16053, 19279, 19278], [16054, 16055, 19279], [16055, 19280, 19279], [16055, 16056, 19281], [16055, 19281, 19280], [16056, 16057, 19281], [16057, 19282, 19281], [16057, 16058, 19283], [16057, 19283, 19282], [16058, 16059, 19283], [16059, 19284, 19283], [16059, 16060, 19285], [16059, 19285, 19284], [16060, 16061, 19285], [16061, 19286, 19285], [16061, 16062, 19287], [16061, 19287, 19286], [16062, 16063, 19287], [16063, 19288, 19287], [16063, 16064, 19289], [16063, 19289, 19288], [16064, 16065, 19289], [16065, 19290, 19289], [16065, 16066, 19291], [16065, 19291, 19290], [16066, 16067, 19291], [16067, 19292, 19291], [16067, 16068, 19293], [16067, 19293, 19292], [16068, 16069, 19293], [16069, 19294, 19293], [16069, 16070, 19295], [16069, 19295, 19294], [16070, 16071, 19295], [16071, 19296, 19295], [16071, 16072, 19297], [16071, 19297, 19296], [16072, 16073, 19297], [16073, 19298, 19297], [16073, 16074, 19299], [16073, 19299, 19298], [16074, 16075, 19299], [16075, 19300, 19299], [16075, 16076, 19301], [16075, 19301, 19300], [16076, 16077, 19301], [16077, 19302, 19301], [16077, 16078, 19303], [16077, 19303, 19302], [16078, 16079, 19303], [16079, 19304, 19303], [16079, 16080, 19305], [16079, 19305, 19304], [16080, 16081, 19305], [16081, 19306, 19305], [16081, 16082, 19307], [16081, 19307, 19306], [16082, 16083, 19307], [16083, 19308, 19307], [16083, 16084, 19309], [16083, 19309, 19308], [16084, 16085, 19309], [16085, 19310, 19309], [16085, 16086, 19311], [16085, 19311, 19310], [16086, 16087, 19311], [16087, 19312, 19311], [16087, 16088, 19313], [16087, 19313, 19312], [16088, 16089, 19313], [16089, 19314, 19313], [16089, 16090, 19315], [16089, 19315, 19314], [16090, 16091, 19315], [16091, 19316, 19315], [16091, 16092, 19317], [16091, 19317, 19316], [16092, 16093, 19317], [16093, 19318, 19317], [16093, 16094, 19319], [16093, 19319, 19318], [16094, 16095, 19319], [16095, 19320, 19319], [16095, 16096, 19321], [16095, 19321, 19320], [16096, 16097, 19321], [16097, 19322, 19321], [16097, 16098, 19323], [16097, 19323, 19322], [16098, 16099, 19323], [16099, 19324, 19323], [16099, 16100, 19325], [16099, 19325, 19324], [16100, 16101, 19325], [16101, 19326, 19325], [16101, 16102, 19327], [16101, 19327, 19326], [16102, 16103, 19327], [16103, 19328, 19327], [16103, 16104, 19329], [16103, 19329, 19328], [16104, 16105, 19329], [16105, 19330, 19329], [16105, 16106, 19331], [16105, 19331, 19330], [16106, 16107, 19331], [16107, 19332, 19331], [16107, 16108, 19333], [16107, 19333, 19332], [16108, 16109, 19333], [16109, 19334, 19333], [16109, 16110, 19335], [16109, 19335, 19334], [16110, 16111, 19335], [16111, 19336, 19335], [16111, 16112, 19337], [16111, 19337, 19336], [16112, 16113, 19337], [16113, 19338, 19337], [16113, 16114, 19339], [16113, 19339, 19338], [16114, 16115, 19339], [16115, 19340, 19339], [16115, 16116, 19341], [16115, 19341, 19340], [16116, 16117, 19341], [16117, 19342, 19341], [16117, 16118, 19343], [16117, 19343, 19342], [16118, 16119, 19343], [16119, 19344, 19343], [16119, 16120, 19345], [16119, 19345, 19344], [16120, 16121, 19345], [16121, 19346, 19345], [16121, 16122, 19347], [16121, 19347, 19346], [16122, 16123, 19347], [16123, 19348, 19347], [16123, 16124, 19349], [16123, 19349, 19348], [16124, 16125, 19349], [16125, 19350, 19349], [16125, 16126, 19351], [16125, 19351, 19350], [16126, 16127, 19351], [16127, 19352, 19351], [16127, 16128, 19353], [16127, 19353, 19352], [16128, 16129, 19353], [16129, 19354, 19353], [16129, 16130, 19355], [16129, 19355, 19354], [16130, 16131, 19355], [16131, 19356, 19355], [16131, 16132, 19357], [16131, 19357, 19356], [16132, 16133, 19357], [16133, 19358, 19357], [16133, 16134, 19359], [16133, 19359, 19358], [16134, 16135, 19359], [16135, 19360, 19359], [16135, 16136, 19361], [16135, 19361, 19360], [16136, 16137, 19361], [16137, 19362, 19361], [16137, 16138, 19363], [16137, 19363, 19362], [16138, 16139, 19363], [16139, 19364, 19363], [16139, 16140, 19365], [16139, 19365, 19364], [16140, 16141, 19365], [16141, 19366, 19365], [16141, 16142, 19367], [16141, 19367, 19366], [16142, 16143, 19367], [16143, 19368, 19367], [16143, 16144, 19369], [16143, 19369, 19368], [16144, 16145, 19369], [16145, 19370, 19369], [16145, 16146, 19371], [16145, 19371, 19370], [16146, 16147, 19371], [16147, 19372, 19371], [16147, 16148, 19373], [16147, 19373, 19372], [16148, 16149, 19373], [16149, 19374, 19373], [16149, 16150, 19375], [16149, 19375, 19374], [16150, 16151, 19375], [16151, 19376, 19375], [16151, 16152, 19377], [16151, 19377, 19376], [16152, 16153, 19377], [16153, 19378, 19377], [16153, 16154, 19379], [16153, 19379, 19378], [16154, 16155, 19379], [16155, 19380, 19379], [16155, 16156, 19381], [16155, 19381, 19380], [16156, 16157, 19381], [16157, 19382, 19381], [16157, 16158, 19383], [16157, 19383, 19382], [16158, 16159, 19383], [16159, 19384, 19383], [16159, 16160, 19385], [16159, 19385, 19384], [16160, 16161, 19385], [16161, 19386, 19385], [16161, 16162, 19387], [16161, 19387, 19386], [16162, 16163, 19387], [16163, 19388, 19387], [16163, 16164, 19389], [16163, 19389, 19388], [16164, 16165, 19389], [16165, 19390, 19389], [16165, 16166, 19391], [16165, 19391, 19390], [16166, 16167, 19391], [16167, 19392, 19391], [16167, 16168, 19393], [16167, 19393, 19392], [16168, 16169, 19393], [16169, 19394, 19393], [16169, 16170, 19395], [16169, 19395, 19394], [16171, 16172, 16301], [16171, 16301, 16300], [16172, 16173, 16301], [16173, 16302, 16301], [16173, 16174, 16303], [16173, 16303, 16302], [16174, 16175, 16303], [16175, 16304, 16303], [16175, 16176, 16305], [16175, 16305, 16304], [16176, 16177, 16305], [16177, 16306, 16305], [16177, 16178, 16307], [16177, 16307, 16306], [16178, 16179, 16307], [16179, 16308, 16307], [16179, 16180, 16309], [16179, 16309, 16308], [16180, 16181, 16309], [16181, 16310, 16309], [16181, 16182, 16311], [16181, 16311, 16310], [16182, 16183, 16311], [16183, 16312, 16311], [16183, 16184, 16313], [16183, 16313, 16312], [16184, 16185, 16313], [16185, 16314, 16313], [16185, 16186, 16315], [16185, 16315, 16314], [16186, 16187, 16315], [16187, 16316, 16315], [16187, 16188, 16317], [16187, 16317, 16316], [16188, 16189, 16317], [16189, 16318, 16317], [16189, 16190, 16319], [16189, 16319, 16318], [16190, 16191, 16319], [16191, 16320, 16319], [16191, 16192, 16321], [16191, 16321, 16320], [16192, 16193, 16321], [16193, 16322, 16321], [16193, 16194, 16323], [16193, 16323, 16322], [16194, 16195, 16323], [16195, 16324, 16323], [16195, 16196, 16325], [16195, 16325, 16324], [16196, 16197, 16325], [16197, 16326, 16325], [16197, 16198, 16327], [16197, 16327, 16326], [16198, 16199, 16327], [16199, 16328, 16327], [16199, 16200, 16329], [16199, 16329, 16328], [16200, 16201, 16329], [16201, 16330, 16329], [16201, 16202, 16331], [16201, 16331, 16330], [16202, 16203, 16331], [16203, 16332, 16331], [16203, 16204, 16333], [16203, 16333, 16332], [16204, 16205, 16333], [16205, 16334, 16333], [16205, 16206, 16335], [16205, 16335, 16334], [16206, 16207, 16335], [16207, 16336, 16335], [16207, 16208, 16337], [16207, 16337, 16336], [16208, 16209, 16337], [16209, 16338, 16337], [16209, 16210, 16339], [16209, 16339, 16338], [16210, 16211, 16339], [16211, 16340, 16339], [16211, 16212, 16341], [16211, 16341, 16340], [16212, 16213, 16341], [16213, 16342, 16341], [16213, 16214, 16343], [16213, 16343, 16342], [16214, 16215, 16343], [16215, 16344, 16343], [16215, 16216, 16345], [16215, 16345, 16344], [16216, 16217, 16345], [16217, 16346, 16345], [16217, 16218, 16347], [16217, 16347, 16346], [16218, 16219, 16347], [16219, 16348, 16347], [16219, 16220, 16349], [16219, 16349, 16348], [16220, 16221, 16349], [16221, 16350, 16349], [16221, 16222, 16351], [16221, 16351, 16350], [16222, 16223, 16351], [16223, 16352, 16351], [16223, 16224, 16353], [16223, 16353, 16352], [16224, 16225, 16353], [16225, 16354, 16353], [16225, 16226, 16355], [16225, 16355, 16354], [16226, 16227, 16355], [16227, 16356, 16355], [16227, 16228, 16357], [16227, 16357, 16356], [16228, 16229, 16357], [16229, 16358, 16357], [16229, 16230, 16359], [16229, 16359, 16358], [16230, 16231, 16359], [16231, 16360, 16359], [16231, 16232, 16361], [16231, 16361, 16360], [16232, 16233, 16361], [16233, 16362, 16361], [16233, 16234, 16363], [16233, 16363, 16362], [16234, 16235, 16363], [16235, 16364, 16363], [16235, 16236, 16365], [16235, 16365, 16364], [16236, 16237, 16365], [16237, 16366, 16365], [16237, 16238, 16367], [16237, 16367, 16366], [16238, 16239, 16367], [16239, 16368, 16367], [16239, 16240, 16369], [16239, 16369, 16368], [16240, 16241, 16369], [16241, 16370, 16369], [16241, 16242, 16371], [16241, 16371, 16370], [16242, 16243, 16371], [16243, 16372, 16371], [16243, 16244, 16373], [16243, 16373, 16372], [16244, 16245, 16373], [16245, 16374, 16373], [16245, 16246, 16375], [16245, 16375, 16374], [16246, 16247, 16375], [16247, 16376, 16375], [16247, 16248, 16377], [16247, 16377, 16376], [16248, 16249, 16377], [16249, 16378, 16377], [16249, 16250, 16379], [16249, 16379, 16378], [16250, 16251, 16379], [16251, 16380, 16379], [16251, 16252, 16381], [16251, 16381, 16380], [16252, 16253, 16381], [16253, 16382, 16381], [16253, 16254, 16383], [16253, 16383, 16382], [16254, 16255, 16383], [16255, 16384, 16383], [16255, 16256, 16385], [16255, 16385, 16384], [16256, 16257, 16385], [16257, 16386, 16385], [16257, 16258, 16387], [16257, 16387, 16386], [16258, 16259, 16387], [16259, 16388, 16387], [16259, 16260, 16389], [16259, 16389, 16388], [16260, 16261, 16389], [16261, 16390, 16389], [16261, 16262, 16391], [16261, 16391, 16390], [16262, 16263, 16391], [16263, 16392, 16391], [16263, 16264, 16393], [16263, 16393, 16392], [16264, 16265, 16393], [16265, 16394, 16393], [16265, 16266, 16395], [16265, 16395, 16394], [16266, 16267, 16395], [16267, 16396, 16395], [16267, 16268, 16397], [16267, 16397, 16396], [16268, 16269, 16397], [16269, 16398, 16397], [16269, 16270, 16399], [16269, 16399, 16398], [16270, 16271, 16399], [16271, 16400, 16399], [16271, 16272, 16401], [16271, 16401, 16400], [16272, 16273, 16401], [16273, 16402, 16401], [16273, 16274, 16403], [16273, 16403, 16402], [16274, 16275, 16403], [16275, 16404, 16403], [16275, 16276, 16405], [16275, 16405, 16404], [16276, 16277, 16405], [16277, 16406, 16405], [16277, 16278, 16407], [16277, 16407, 16406], [16278, 16279, 16407], [16279, 16408, 16407], [16279, 16280, 16409], [16279, 16409, 16408], [16280, 16281, 16409], [16281, 16410, 16409], [16281, 16282, 16411], [16281, 16411, 16410], [16282, 16283, 16411], [16283, 16412, 16411], [16283, 16284, 16413], [16283, 16413, 16412], [16284, 16285, 16413], [16285, 16414, 16413], [16285, 16286, 16415], [16285, 16415, 16414], [16286, 16287, 16415], [16287, 16416, 16415], [16287, 16288, 16417], [16287, 16417, 16416], [16288, 16289, 16417], [16289, 16418, 16417], [16289, 16290, 16419], [16289, 16419, 16418], [16290, 16291, 16419], [16291, 16420, 16419], [16291, 16292, 16421], [16291, 16421, 16420], [16292, 16293, 16421], [16293, 16422, 16421], [16293, 16294, 16423], [16293, 16423, 16422], [16294, 16295, 16423], [16295, 16424, 16423], [16295, 16296, 16425], [16295, 16425, 16424], [16296, 16297, 16425], [16297, 16426, 16425], [16297, 16298, 16427], [16297, 16427, 16426], [16298, 16299, 16427], [16299, 16428, 16427], [16300, 16301, 16429], [16301, 16430, 16429], [16301, 16302, 16431], [16301, 16431, 16430], [16302, 16303, 16431], [16303, 16432, 16431], [16303, 16304, 16433], [16303, 16433, 16432], [16304, 16305, 16433], [16305, 16434, 16433], [16305, 16306, 16435], [16305, 16435, 16434], [16306, 16307, 16435], [16307, 16436, 16435], [16307, 16308, 16437], [16307, 16437, 16436], [16308, 16309, 16437], [16309, 16438, 16437], [16309, 16310, 16439], [16309, 16439, 16438], [16310, 16311, 16439], [16311, 16440, 16439], [16311, 16312, 16441], [16311, 16441, 16440], [16312, 16313, 16441], [16313, 16442, 16441], [16313, 16314, 16443], [16313, 16443, 16442], [16314, 16315, 16443], [16315, 16444, 16443], [16315, 16316, 16445], [16315, 16445, 16444], [16316, 16317, 16445], [16317, 16446, 16445], [16317, 16318, 16447], [16317, 16447, 16446], [16318, 16319, 16447], [16319, 16448, 16447], [16319, 16320, 16449], [16319, 16449, 16448], [16320, 16321, 16449], [16321, 16450, 16449], [16321, 16322, 16451], [16321, 16451, 16450], [16322, 16323, 16451], [16323, 16452, 16451], [16323, 16324, 16453], [16323, 16453, 16452], [16324, 16325, 16453], [16325, 16454, 16453], [16325, 16326, 16455], [16325, 16455, 16454], [16326, 16327, 16455], [16327, 16456, 16455], [16327, 16328, 16457], [16327, 16457, 16456], [16328, 16329, 16457], [16329, 16458, 16457], [16329, 16330, 16459], [16329, 16459, 16458], [16330, 16331, 16459], [16331, 16460, 16459], [16331, 16332, 16461], [16331, 16461, 16460], [16332, 16333, 16461], [16333, 16462, 16461], [16333, 16334, 16463], [16333, 16463, 16462], [16334, 16335, 16463], [16335, 16464, 16463], [16335, 16336, 16465], [16335, 16465, 16464], [16336, 16337, 16465], [16337, 16466, 16465], [16337, 16338, 16467], [16337, 16467, 16466], [16338, 16339, 16467], [16339, 16468, 16467], [16339, 16340, 16469], [16339, 16469, 16468], [16340, 16341, 16469], [16341, 16470, 16469], [16341, 16342, 16471], [16341, 16471, 16470], [16342, 16343, 16471], [16343, 16472, 16471], [16343, 16344, 16473], [16343, 16473, 16472], [16344, 16345, 16473], [16345, 16474, 16473], [16345, 16346, 16475], [16345, 16475, 16474], [16346, 16347, 16475], [16347, 16476, 16475], [16347, 16348, 16477], [16347, 16477, 16476], [16348, 16349, 16477], [16349, 16478, 16477], [16349, 16350, 16479], [16349, 16479, 16478], [16350, 16351, 16479], [16351, 16480, 16479], [16351, 16352, 16481], [16351, 16481, 16480], [16352, 16353, 16481], [16353, 16482, 16481], [16353, 16354, 16483], [16353, 16483, 16482], [16354, 16355, 16483], [16355, 16484, 16483], [16355, 16356, 16485], [16355, 16485, 16484], [16356, 16357, 16485], [16357, 16486, 16485], [16357, 16358, 16487], [16357, 16487, 16486], [16358, 16359, 16487], [16359, 16488, 16487], [16359, 16360, 16489], [16359, 16489, 16488], [16360, 16361, 16489], [16361, 16490, 16489], [16361, 16362, 16491], [16361, 16491, 16490], [16362, 16363, 16491], [16363, 16492, 16491], [16363, 16364, 16493], [16363, 16493, 16492], [16364, 16365, 16493], [16365, 16494, 16493], [16365, 16366, 16495], [16365, 16495, 16494], [16366, 16367, 16495], [16367, 16496, 16495], [16367, 16368, 16497], [16367, 16497, 16496], [16368, 16369, 16497], [16369, 16498, 16497], [16369, 16370, 16499], [16369, 16499, 16498], [16370, 16371, 16499], [16371, 16500, 16499], [16371, 16372, 16501], [16371, 16501, 16500], [16372, 16373, 16501], [16373, 16502, 16501], [16373, 16374, 16503], [16373, 16503, 16502], [16374, 16375, 16503], [16375, 16504, 16503], [16375, 16376, 16505], [16375, 16505, 16504], [16376, 16377, 16505], [16377, 16506, 16505], [16377, 16378, 16507], [16377, 16507, 16506], [16378, 16379, 16507], [16379, 16508, 16507], [16379, 16380, 16509], [16379, 16509, 16508], [16380, 16381, 16509], [16381, 16510, 16509], [16381, 16382, 16511], [16381, 16511, 16510], [16382, 16383, 16511], [16383, 16512, 16511], [16383, 16384, 16513], [16383, 16513, 16512], [16384, 16385, 16513], [16385, 16514, 16513], [16385, 16386, 16515], [16385, 16515, 16514], [16386, 16387, 16515], [16387, 16516, 16515], [16387, 16388, 16517], [16387, 16517, 16516], [16388, 16389, 16517], [16389, 16518, 16517], [16389, 16390, 16519], [16389, 16519, 16518], [16390, 16391, 16519], [16391, 16520, 16519], [16391, 16392, 16521], [16391, 16521, 16520], [16392, 16393, 16521], [16393, 16522, 16521], [16393, 16394, 16523], [16393, 16523, 16522], [16394, 16395, 16523], [16395, 16524, 16523], [16395, 16396, 16525], [16395, 16525, 16524], [16396, 16397, 16525], [16397, 16526, 16525], [16397, 16398, 16527], [16397, 16527, 16526], [16398, 16399, 16527], [16399, 16528, 16527], [16399, 16400, 16529], [16399, 16529, 16528], [16400, 16401, 16529], [16401, 16530, 16529], [16401, 16402, 16531], [16401, 16531, 16530], [16402, 16403, 16531], [16403, 16532, 16531], [16403, 16404, 16533], [16403, 16533, 16532], [16404, 16405, 16533], [16405, 16534, 16533], [16405, 16406, 16535], [16405, 16535, 16534], [16406, 16407, 16535], [16407, 16536, 16535], [16407, 16408, 16537], [16407, 16537, 16536], [16408, 16409, 16537], [16409, 16538, 16537], [16409, 16410, 16539], [16409, 16539, 16538], [16410, 16411, 16539], [16411, 16540, 16539], [16411, 16412, 16541], [16411, 16541, 16540], [16412, 16413, 16541], [16413, 16542, 16541], [16413, 16414, 16543], [16413, 16543, 16542], [16414, 16415, 16543], [16415, 16544, 16543], [16415, 16416, 16545], [16415, 16545, 16544], [16416, 16417, 16545], [16417, 16546, 16545], [16417, 16418, 16547], [16417, 16547, 16546], [16418, 16419, 16547], [16419, 16548, 16547], [16419, 16420, 16549], [16419, 16549, 16548], [16420, 16421, 16549], [16421, 16550, 16549], [16421, 16422, 16551], [16421, 16551, 16550], [16422, 16423, 16551], [16423, 16552, 16551], [16423, 16424, 16553], [16423, 16553, 16552], [16424, 16425, 16553], [16425, 16554, 16553], [16425, 16426, 16555], [16425, 16555, 16554], [16426, 16427, 16555], [16427, 16556, 16555], [16427, 16428, 16557], [16427, 16557, 16556], [16429, 16430, 16559], [16429, 16559, 16558], [16430, 16431, 16559], [16431, 16560, 16559], [16431, 16432, 16561], [16431, 16561, 16560], [16432, 16433, 16561], [16433, 16562, 16561], [16433, 16434, 16563], [16433, 16563, 16562], [16434, 16435, 16563], [16435, 16564, 16563], [16435, 16436, 16565], [16435, 16565, 16564], [16436, 16437, 16565], [16437, 16566, 16565], [16437, 16438, 16567], [16437, 16567, 16566], [16438, 16439, 16567], [16439, 16568, 16567], [16439, 16440, 16569], [16439, 16569, 16568], [16440, 16441, 16569], [16441, 16570, 16569], [16441, 16442, 16571], [16441, 16571, 16570], [16442, 16443, 16571], [16443, 16572, 16571], [16443, 16444, 16573], [16443, 16573, 16572], [16444, 16445, 16573], [16445, 16574, 16573], [16445, 16446, 16575], [16445, 16575, 16574], [16446, 16447, 16575], [16447, 16576, 16575], [16447, 16448, 16577], [16447, 16577, 16576], [16448, 16449, 16577], [16449, 16578, 16577], [16449, 16450, 16579], [16449, 16579, 16578], [16450, 16451, 16579], [16451, 16580, 16579], [16451, 16452, 16581], [16451, 16581, 16580], [16452, 16453, 16581], [16453, 16582, 16581], [16453, 16454, 16583], [16453, 16583, 16582], [16454, 16455, 16583], [16455, 16584, 16583], [16455, 16456, 16585], [16455, 16585, 16584], [16456, 16457, 16585], [16457, 16586, 16585], [16457, 16458, 16587], [16457, 16587, 16586], [16458, 16459, 16587], [16459, 16588, 16587], [16459, 16460, 16589], [16459, 16589, 16588], [16460, 16461, 16589], [16461, 16590, 16589], [16461, 16462, 16591], [16461, 16591, 16590], [16462, 16463, 16591], [16463, 16592, 16591], [16463, 16464, 16593], [16463, 16593, 16592], [16464, 16465, 16593], [16465, 16594, 16593], [16465, 16466, 16595], [16465, 16595, 16594], [16466, 16467, 16595], [16467, 16596, 16595], [16467, 16468, 16597], [16467, 16597, 16596], [16468, 16469, 16597], [16469, 16598, 16597], [16469, 16470, 16599], [16469, 16599, 16598], [16470, 16471, 16599], [16471, 16600, 16599], [16471, 16472, 16601], [16471, 16601, 16600], [16472, 16473, 16601], [16473, 16602, 16601], [16473, 16474, 16603], [16473, 16603, 16602], [16474, 16475, 16603], [16475, 16604, 16603], [16475, 16476, 16605], [16475, 16605, 16604], [16476, 16477, 16605], [16477, 16606, 16605], [16477, 16478, 16607], [16477, 16607, 16606], [16478, 16479, 16607], [16479, 16608, 16607], [16479, 16480, 16609], [16479, 16609, 16608], [16480, 16481, 16609], [16481, 16610, 16609], [16481, 16482, 16611], [16481, 16611, 16610], [16482, 16483, 16611], [16483, 16612, 16611], [16483, 16484, 16613], [16483, 16613, 16612], [16484, 16485, 16613], [16485, 16614, 16613], [16485, 16486, 16615], [16485, 16615, 16614], [16486, 16487, 16615], [16487, 16616, 16615], [16487, 16488, 16617], [16487, 16617, 16616], [16488, 16489, 16617], [16489, 16618, 16617], [16489, 16490, 16619], [16489, 16619, 16618], [16490, 16491, 16619], [16491, 16620, 16619], [16491, 16492, 16621], [16491, 16621, 16620], [16492, 16493, 16621], [16493, 16622, 16621], [16493, 16494, 16623], [16493, 16623, 16622], [16494, 16495, 16623], [16495, 16624, 16623], [16495, 16496, 16625], [16495, 16625, 16624], [16496, 16497, 16625], [16497, 16626, 16625], [16497, 16498, 16627], [16497, 16627, 16626], [16498, 16499, 16627], [16499, 16628, 16627], [16499, 16500, 16629], [16499, 16629, 16628], [16500, 16501, 16629], [16501, 16630, 16629], [16501, 16502, 16631], [16501, 16631, 16630], [16502, 16503, 16631], [16503, 16632, 16631], [16503, 16504, 16633], [16503, 16633, 16632], [16504, 16505, 16633], [16505, 16634, 16633], [16505, 16506, 16635], [16505, 16635, 16634], [16506, 16507, 16635], [16507, 16636, 16635], [16507, 16508, 16637], [16507, 16637, 16636], [16508, 16509, 16637], [16509, 16638, 16637], [16509, 16510, 16639], [16509, 16639, 16638], [16510, 16511, 16639], [16511, 16640, 16639], [16511, 16512, 16641], [16511, 16641, 16640], [16512, 16513, 16641], [16513, 16642, 16641], [16513, 16514, 16643], [16513, 16643, 16642], [16514, 16515, 16643], [16515, 16644, 16643], [16515, 16516, 16645], [16515, 16645, 16644], [16516, 16517, 16645], [16517, 16646, 16645], [16517, 16518, 16647], [16517, 16647, 16646], [16518, 16519, 16647], [16519, 16648, 16647], [16519, 16520, 16649], [16519, 16649, 16648], [16520, 16521, 16649], [16521, 16650, 16649], [16521, 16522, 16651], [16521, 16651, 16650], [16522, 16523, 16651], [16523, 16652, 16651], [16523, 16524, 16653], [16523, 16653, 16652], [16524, 16525, 16653], [16525, 16654, 16653], [16525, 16526, 16655], [16525, 16655, 16654], [16526, 16527, 16655], [16527, 16656, 16655], [16527, 16528, 16657], [16527, 16657, 16656], [16528, 16529, 16657], [16529, 16658, 16657], [16529, 16530, 16659], [16529, 16659, 16658], [16530, 16531, 16659], [16531, 16660, 16659], [16531, 16532, 16661], [16531, 16661, 16660], [16532, 16533, 16661], [16533, 16662, 16661], [16533, 16534, 16663], [16533, 16663, 16662], [16534, 16535, 16663], [16535, 16664, 16663], [16535, 16536, 16665], [16535, 16665, 16664], [16536, 16537, 16665], [16537, 16666, 16665], [16537, 16538, 16667], [16537, 16667, 16666], [16538, 16539, 16667], [16539, 16668, 16667], [16539, 16540, 16669], [16539, 16669, 16668], [16540, 16541, 16669], [16541, 16670, 16669], [16541, 16542, 16671], [16541, 16671, 16670], [16542, 16543, 16671], [16543, 16672, 16671], [16543, 16544, 16673], [16543, 16673, 16672], [16544, 16545, 16673], [16545, 16674, 16673], [16545, 16546, 16675], [16545, 16675, 16674], [16546, 16547, 16675], [16547, 16676, 16675], [16547, 16548, 16677], [16547, 16677, 16676], [16548, 16549, 16677], [16549, 16678, 16677], [16549, 16550, 16679], [16549, 16679, 16678], [16550, 16551, 16679], [16551, 16680, 16679], [16551, 16552, 16681], [16551, 16681, 16680], [16552, 16553, 16681], [16553, 16682, 16681], [16553, 16554, 16683], [16553, 16683, 16682], [16554, 16555, 16683], [16555, 16684, 16683], [16555, 16556, 16685], [16555, 16685, 16684], [16556, 16557, 16685], [16557, 16686, 16685], [16558, 16559, 16687], [16559, 16688, 16687], [16559, 16560, 16689], [16559, 16689, 16688], [16560, 16561, 16689], [16561, 16690, 16689], [16561, 16562, 16691], [16561, 16691, 16690], [16562, 16563, 16691], [16563, 16692, 16691], [16563, 16564, 16693], [16563, 16693, 16692], [16564, 16565, 16693], [16565, 16694, 16693], [16565, 16566, 16695], [16565, 16695, 16694], [16566, 16567, 16695], [16567, 16696, 16695], [16567, 16568, 16697], [16567, 16697, 16696], [16568, 16569, 16697], [16569, 16698, 16697], [16569, 16570, 16699], [16569, 16699, 16698], [16570, 16571, 16699], [16571, 16700, 16699], [16571, 16572, 16701], [16571, 16701, 16700], [16572, 16573, 16701], [16573, 16702, 16701], [16573, 16574, 16703], [16573, 16703, 16702], [16574, 16575, 16703], [16575, 16704, 16703], [16575, 16576, 16705], [16575, 16705, 16704], [16576, 16577, 16705], [16577, 16706, 16705], [16577, 16578, 16707], [16577, 16707, 16706], [16578, 16579, 16707], [16579, 16708, 16707], [16579, 16580, 16709], [16579, 16709, 16708], [16580, 16581, 16709], [16581, 16710, 16709], [16581, 16582, 16711], [16581, 16711, 16710], [16582, 16583, 16711], [16583, 16712, 16711], [16583, 16584, 16713], [16583, 16713, 16712], [16584, 16585, 16713], [16585, 16714, 16713], [16585, 16586, 16715], [16585, 16715, 16714], [16586, 16587, 16715], [16587, 16716, 16715], [16587, 16588, 16717], [16587, 16717, 16716], [16588, 16589, 16717], [16589, 16718, 16717], [16589, 16590, 16719], [16589, 16719, 16718], [16590, 16591, 16719], [16591, 16720, 16719], [16591, 16592, 16721], [16591, 16721, 16720], [16592, 16593, 16721], [16593, 16722, 16721], [16593, 16594, 16723], [16593, 16723, 16722], [16594, 16595, 16723], [16595, 16724, 16723], [16595, 16596, 16725], [16595, 16725, 16724], [16596, 16597, 16725], [16597, 16726, 16725], [16597, 16598, 16727], [16597, 16727, 16726], [16598, 16599, 16727], [16599, 16728, 16727], [16599, 16600, 16729], [16599, 16729, 16728], [16600, 16601, 16729], [16601, 16730, 16729], [16601, 16602, 16731], [16601, 16731, 16730], [16602, 16603, 16731], [16603, 16732, 16731], [16603, 16604, 16733], [16603, 16733, 16732], [16604, 16605, 16733], [16605, 16734, 16733], [16605, 16606, 16735], [16605, 16735, 16734], [16606, 16607, 16735], [16607, 16736, 16735], [16607, 16608, 16737], [16607, 16737, 16736], [16608, 16609, 16737], [16609, 16738, 16737], [16609, 16610, 16739], [16609, 16739, 16738], [16610, 16611, 16739], [16611, 16740, 16739], [16611, 16612, 16741], [16611, 16741, 16740], [16612, 16613, 16741], [16613, 16742, 16741], [16613, 16614, 16743], [16613, 16743, 16742], [16614, 16615, 16743], [16615, 16744, 16743], [16615, 16616, 16745], [16615, 16745, 16744], [16616, 16617, 16745], [16617, 16746, 16745], [16617, 16618, 16747], [16617, 16747, 16746], [16618, 16619, 16747], [16619, 16748, 16747], [16619, 16620, 16749], [16619, 16749, 16748], [16620, 16621, 16749], [16621, 16750, 16749], [16621, 16622, 16751], [16621, 16751, 16750], [16622, 16623, 16751], [16623, 16752, 16751], [16623, 16624, 16753], [16623, 16753, 16752], [16624, 16625, 16753], [16625, 16754, 16753], [16625, 16626, 16755], [16625, 16755, 16754], [16626, 16627, 16755], [16627, 16756, 16755], [16627, 16628, 16757], [16627, 16757, 16756], [16628, 16629, 16757], [16629, 16758, 16757], [16629, 16630, 16759], [16629, 16759, 16758], [16630, 16631, 16759], [16631, 16760, 16759], [16631, 16632, 16761], [16631, 16761, 16760], [16632, 16633, 16761], [16633, 16762, 16761], [16633, 16634, 16763], [16633, 16763, 16762], [16634, 16635, 16763], [16635, 16764, 16763], [16635, 16636, 16765], [16635, 16765, 16764], [16636, 16637, 16765], [16637, 16766, 16765], [16637, 16638, 16767], [16637, 16767, 16766], [16638, 16639, 16767], [16639, 16768, 16767], [16639, 16640, 16769], [16639, 16769, 16768], [16640, 16641, 16769], [16641, 16770, 16769], [16641, 16642, 16771], [16641, 16771, 16770], [16642, 16643, 16771], [16643, 16772, 16771], [16643, 16644, 16773], [16643, 16773, 16772], [16644, 16645, 16773], [16645, 16774, 16773], [16645, 16646, 16775], [16645, 16775, 16774], [16646, 16647, 16775], [16647, 16776, 16775], [16647, 16648, 16777], [16647, 16777, 16776], [16648, 16649, 16777], [16649, 16778, 16777], [16649, 16650, 16779], [16649, 16779, 16778], [16650, 16651, 16779], [16651, 16780, 16779], [16651, 16652, 16781], [16651, 16781, 16780], [16652, 16653, 16781], [16653, 16782, 16781], [16653, 16654, 16783], [16653, 16783, 16782], [16654, 16655, 16783], [16655, 16784, 16783], [16655, 16656, 16785], [16655, 16785, 16784], [16656, 16657, 16785], [16657, 16786, 16785], [16657, 16658, 16787], [16657, 16787, 16786], [16658, 16659, 16787], [16659, 16788, 16787], [16659, 16660, 16789], [16659, 16789, 16788], [16660, 16661, 16789], [16661, 16790, 16789], [16661, 16662, 16791], [16661, 16791, 16790], [16662, 16663, 16791], [16663, 16792, 16791], [16663, 16664, 16793], [16663, 16793, 16792], [16664, 16665, 16793], [16665, 16794, 16793], [16665, 16666, 16795], [16665, 16795, 16794], [16666, 16667, 16795], [16667, 16796, 16795], [16667, 16668, 16797], [16667, 16797, 16796], [16668, 16669, 16797], [16669, 16798, 16797], [16669, 16670, 16799], [16669, 16799, 16798], [16670, 16671, 16799], [16671, 16800, 16799], [16671, 16672, 16801], [16671, 16801, 16800], [16672, 16673, 16801], [16673, 16802, 16801], [16673, 16674, 16803], [16673, 16803, 16802], [16674, 16675, 16803], [16675, 16804, 16803], [16675, 16676, 16805], [16675, 16805, 16804], [16676, 16677, 16805], [16677, 16806, 16805], [16677, 16678, 16807], [16677, 16807, 16806], [16678, 16679, 16807], [16679, 16808, 16807], [16679, 16680, 16809], [16679, 16809, 16808], [16680, 16681, 16809], [16681, 16810, 16809], [16681, 16682, 16811], [16681, 16811, 16810], [16682, 16683, 16811], [16683, 16812, 16811], [16683, 16684, 16813], [16683, 16813, 16812], [16684, 16685, 16813], [16685, 16814, 16813], [16685, 16686, 16815], [16685, 16815, 16814], [16687, 16688, 16817], [16687, 16817, 16816], [16688, 16689, 16817], [16689, 16818, 16817], [16689, 16690, 16819], [16689, 16819, 16818], [16690, 16691, 16819], [16691, 16820, 16819], [16691, 16692, 16821], [16691, 16821, 16820], [16692, 16693, 16821], [16693, 16822, 16821], [16693, 16694, 16823], [16693, 16823, 16822], [16694, 16695, 16823], [16695, 16824, 16823], [16695, 16696, 16825], [16695, 16825, 16824], [16696, 16697, 16825], [16697, 16826, 16825], [16697, 16698, 16827], [16697, 16827, 16826], [16698, 16699, 16827], [16699, 16828, 16827], [16699, 16700, 16829], [16699, 16829, 16828], [16700, 16701, 16829], [16701, 16830, 16829], [16701, 16702, 16831], [16701, 16831, 16830], [16702, 16703, 16831], [16703, 16832, 16831], [16703, 16704, 16833], [16703, 16833, 16832], [16704, 16705, 16833], [16705, 16834, 16833], [16705, 16706, 16835], [16705, 16835, 16834], [16706, 16707, 16835], [16707, 16836, 16835], [16707, 16708, 16837], [16707, 16837, 16836], [16708, 16709, 16837], [16709, 16838, 16837], [16709, 16710, 16839], [16709, 16839, 16838], [16710, 16711, 16839], [16711, 16840, 16839], [16711, 16712, 16841], [16711, 16841, 16840], [16712, 16713, 16841], [16713, 16842, 16841], [16713, 16714, 16843], [16713, 16843, 16842], [16714, 16715, 16843], [16715, 16844, 16843], [16715, 16716, 16845], [16715, 16845, 16844], [16716, 16717, 16845], [16717, 16846, 16845], [16717, 16718, 16847], [16717, 16847, 16846], [16718, 16719, 16847], [16719, 16848, 16847], [16719, 16720, 16849], [16719, 16849, 16848], [16720, 16721, 16849], [16721, 16850, 16849], [16721, 16722, 16851], [16721, 16851, 16850], [16722, 16723, 16851], [16723, 16852, 16851], [16723, 16724, 16853], [16723, 16853, 16852], [16724, 16725, 16853], [16725, 16854, 16853], [16725, 16726, 16855], [16725, 16855, 16854], [16726, 16727, 16855], [16727, 16856, 16855], [16727, 16728, 16857], [16727, 16857, 16856], [16728, 16729, 16857], [16729, 16858, 16857], [16729, 16730, 16859], [16729, 16859, 16858], [16730, 16731, 16859], [16731, 16860, 16859], [16731, 16732, 16861], [16731, 16861, 16860], [16732, 16733, 16861], [16733, 16862, 16861], [16733, 16734, 16863], [16733, 16863, 16862], [16734, 16735, 16863], [16735, 16864, 16863], [16735, 16736, 16865], [16735, 16865, 16864], [16736, 16737, 16865], [16737, 16866, 16865], [16737, 16738, 16867], [16737, 16867, 16866], [16738, 16739, 16867], [16739, 16868, 16867], [16739, 16740, 16869], [16739, 16869, 16868], [16740, 16741, 16869], [16741, 16870, 16869], [16741, 16742, 16871], [16741, 16871, 16870], [16742, 16743, 16871], [16743, 16872, 16871], [16743, 16744, 16873], [16743, 16873, 16872], [16744, 16745, 16873], [16745, 16874, 16873], [16745, 16746, 16875], [16745, 16875, 16874], [16746, 16747, 16875], [16747, 16876, 16875], [16747, 16748, 16877], [16747, 16877, 16876], [16748, 16749, 16877], [16749, 16878, 16877], [16749, 16750, 16879], [16749, 16879, 16878], [16750, 16751, 16879], [16751, 16880, 16879], [16751, 16752, 16881], [16751, 16881, 16880], [16752, 16753, 16881], [16753, 16882, 16881], [16753, 16754, 16883], [16753, 16883, 16882], [16754, 16755, 16883], [16755, 16884, 16883], [16755, 16756, 16885], [16755, 16885, 16884], [16756, 16757, 16885], [16757, 16886, 16885], [16757, 16758, 16887], [16757, 16887, 16886], [16758, 16759, 16887], [16759, 16888, 16887], [16759, 16760, 16889], [16759, 16889, 16888], [16760, 16761, 16889], [16761, 16890, 16889], [16761, 16762, 16891], [16761, 16891, 16890], [16762, 16763, 16891], [16763, 16892, 16891], [16763, 16764, 16893], [16763, 16893, 16892], [16764, 16765, 16893], [16765, 16894, 16893], [16765, 16766, 16895], [16765, 16895, 16894], [16766, 16767, 16895], [16767, 16896, 16895], [16767, 16768, 16897], [16767, 16897, 16896], [16768, 16769, 16897], [16769, 16898, 16897], [16769, 16770, 16899], [16769, 16899, 16898], [16770, 16771, 16899], [16771, 16900, 16899], [16771, 16772, 16901], [16771, 16901, 16900], [16772, 16773, 16901], [16773, 16902, 16901], [16773, 16774, 16903], [16773, 16903, 16902], [16774, 16775, 16903], [16775, 16904, 16903], [16775, 16776, 16905], [16775, 16905, 16904], [16776, 16777, 16905], [16777, 16906, 16905], [16777, 16778, 16907], [16777, 16907, 16906], [16778, 16779, 16907], [16779, 16908, 16907], [16779, 16780, 16909], [16779, 16909, 16908], [16780, 16781, 16909], [16781, 16910, 16909], [16781, 16782, 16911], [16781, 16911, 16910], [16782, 16783, 16911], [16783, 16912, 16911], [16783, 16784, 16913], [16783, 16913, 16912], [16784, 16785, 16913], [16785, 16914, 16913], [16785, 16786, 16915], [16785, 16915, 16914], [16786, 16787, 16915], [16787, 16916, 16915], [16787, 16788, 16917], [16787, 16917, 16916], [16788, 16789, 16917], [16789, 16918, 16917], [16789, 16790, 16919], [16789, 16919, 16918], [16790, 16791, 16919], [16791, 16920, 16919], [16791, 16792, 16921], [16791, 16921, 16920], [16792, 16793, 16921], [16793, 16922, 16921], [16793, 16794, 16923], [16793, 16923, 16922], [16794, 16795, 16923], [16795, 16924, 16923], [16795, 16796, 16925], [16795, 16925, 16924], [16796, 16797, 16925], [16797, 16926, 16925], [16797, 16798, 16927], [16797, 16927, 16926], [16798, 16799, 16927], [16799, 16928, 16927], [16799, 16800, 16929], [16799, 16929, 16928], [16800, 16801, 16929], [16801, 16930, 16929], [16801, 16802, 16931], [16801, 16931, 16930], [16802, 16803, 16931], [16803, 16932, 16931], [16803, 16804, 16933], [16803, 16933, 16932], [16804, 16805, 16933], [16805, 16934, 16933], [16805, 16806, 16935], [16805, 16935, 16934], [16806, 16807, 16935], [16807, 16936, 16935], [16807, 16808, 16937], [16807, 16937, 16936], [16808, 16809, 16937], [16809, 16938, 16937], [16809, 16810, 16939], [16809, 16939, 16938], [16810, 16811, 16939], [16811, 16940, 16939], [16811, 16812, 16941], [16811, 16941, 16940], [16812, 16813, 16941], [16813, 16942, 16941], [16813, 16814, 16943], [16813, 16943, 16942], [16814, 16815, 16943], [16815, 16944, 16943], [16816, 16817, 16945], [16817, 16946, 16945], [16817, 16818, 16947], [16817, 16947, 16946], [16818, 16819, 16947], [16819, 16948, 16947], [16819, 16820, 16949], [16819, 16949, 16948], [16820, 16821, 16949], [16821, 16950, 16949], [16821, 16822, 16951], [16821, 16951, 16950], [16822, 16823, 16951], [16823, 16952, 16951], [16823, 16824, 16953], [16823, 16953, 16952], [16824, 16825, 16953], [16825, 16954, 16953], [16825, 16826, 16955], [16825, 16955, 16954], [16826, 16827, 16955], [16827, 16956, 16955], [16827, 16828, 16957], [16827, 16957, 16956], [16828, 16829, 16957], [16829, 16958, 16957], [16829, 16830, 16959], [16829, 16959, 16958], [16830, 16831, 16959], [16831, 16960, 16959], [16831, 16832, 16961], [16831, 16961, 16960], [16832, 16833, 16961], [16833, 16962, 16961], [16833, 16834, 16963], [16833, 16963, 16962], [16834, 16835, 16963], [16835, 16964, 16963], [16835, 16836, 16965], [16835, 16965, 16964], [16836, 16837, 16965], [16837, 16966, 16965], [16837, 16838, 16967], [16837, 16967, 16966], [16838, 16839, 16967], [16839, 16968, 16967], [16839, 16840, 16969], [16839, 16969, 16968], [16840, 16841, 16969], [16841, 16970, 16969], [16841, 16842, 16971], [16841, 16971, 16970], [16842, 16843, 16971], [16843, 16972, 16971], [16843, 16844, 16973], [16843, 16973, 16972], [16844, 16845, 16973], [16845, 16974, 16973], [16845, 16846, 16975], [16845, 16975, 16974], [16846, 16847, 16975], [16847, 16976, 16975], [16847, 16848, 16977], [16847, 16977, 16976], [16848, 16849, 16977], [16849, 16978, 16977], [16849, 16850, 16979], [16849, 16979, 16978], [16850, 16851, 16979], [16851, 16980, 16979], [16851, 16852, 16981], [16851, 16981, 16980], [16852, 16853, 16981], [16853, 16982, 16981], [16853, 16854, 16983], [16853, 16983, 16982], [16854, 16855, 16983], [16855, 16984, 16983], [16855, 16856, 16985], [16855, 16985, 16984], [16856, 16857, 16985], [16857, 16986, 16985], [16857, 16858, 16987], [16857, 16987, 16986], [16858, 16859, 16987], [16859, 16988, 16987], [16859, 16860, 16989], [16859, 16989, 16988], [16860, 16861, 16989], [16861, 16990, 16989], [16861, 16862, 16991], [16861, 16991, 16990], [16862, 16863, 16991], [16863, 16992, 16991], [16863, 16864, 16993], [16863, 16993, 16992], [16864, 16865, 16993], [16865, 16994, 16993], [16865, 16866, 16995], [16865, 16995, 16994], [16866, 16867, 16995], [16867, 16996, 16995], [16867, 16868, 16997], [16867, 16997, 16996], [16868, 16869, 16997], [16869, 16998, 16997], [16869, 16870, 16999], [16869, 16999, 16998], [16870, 16871, 16999], [16871, 17000, 16999], [16871, 16872, 17001], [16871, 17001, 17000], [16872, 16873, 17001], [16873, 17002, 17001], [16873, 16874, 17003], [16873, 17003, 17002], [16874, 16875, 17003], [16875, 17004, 17003], [16875, 16876, 17005], [16875, 17005, 17004], [16876, 16877, 17005], [16877, 17006, 17005], [16877, 16878, 17007], [16877, 17007, 17006], [16878, 16879, 17007], [16879, 17008, 17007], [16879, 16880, 17009], [16879, 17009, 17008], [16880, 16881, 17009], [16881, 17010, 17009], [16881, 16882, 17011], [16881, 17011, 17010], [16882, 16883, 17011], [16883, 17012, 17011], [16883, 16884, 17013], [16883, 17013, 17012], [16884, 16885, 17013], [16885, 17014, 17013], [16885, 16886, 17015], [16885, 17015, 17014], [16886, 16887, 17015], [16887, 17016, 17015], [16887, 16888, 17017], [16887, 17017, 17016], [16888, 16889, 17017], [16889, 17018, 17017], [16889, 16890, 17019], [16889, 17019, 17018], [16890, 16891, 17019], [16891, 17020, 17019], [16891, 16892, 17021], [16891, 17021, 17020], [16892, 16893, 17021], [16893, 17022, 17021], [16893, 16894, 17023], [16893, 17023, 17022], [16894, 16895, 17023], [16895, 17024, 17023], [16895, 16896, 17025], [16895, 17025, 17024], [16896, 16897, 17025], [16897, 17026, 17025], [16897, 16898, 17027], [16897, 17027, 17026], [16898, 16899, 17027], [16899, 17028, 17027], [16899, 16900, 17029], [16899, 17029, 17028], [16900, 16901, 17029], [16901, 17030, 17029], [16901, 16902, 17031], [16901, 17031, 17030], [16902, 16903, 17031], [16903, 17032, 17031], [16903, 16904, 17033], [16903, 17033, 17032], [16904, 16905, 17033], [16905, 17034, 17033], [16905, 16906, 17035], [16905, 17035, 17034], [16906, 16907, 17035], [16907, 17036, 17035], [16907, 16908, 17037], [16907, 17037, 17036], [16908, 16909, 17037], [16909, 17038, 17037], [16909, 16910, 17039], [16909, 17039, 17038], [16910, 16911, 17039], [16911, 17040, 17039], [16911, 16912, 17041], [16911, 17041, 17040], [16912, 16913, 17041], [16913, 17042, 17041], [16913, 16914, 17043], [16913, 17043, 17042], [16914, 16915, 17043], [16915, 17044, 17043], [16915, 16916, 17045], [16915, 17045, 17044], [16916, 16917, 17045], [16917, 17046, 17045], [16917, 16918, 17047], [16917, 17047, 17046], [16918, 16919, 17047], [16919, 17048, 17047], [16919, 16920, 17049], [16919, 17049, 17048], [16920, 16921, 17049], [16921, 17050, 17049], [16921, 16922, 17051], [16921, 17051, 17050], [16922, 16923, 17051], [16923, 17052, 17051], [16923, 16924, 17053], [16923, 17053, 17052], [16924, 16925, 17053], [16925, 17054, 17053], [16925, 16926, 17055], [16925, 17055, 17054], [16926, 16927, 17055], [16927, 17056, 17055], [16927, 16928, 17057], [16927, 17057, 17056], [16928, 16929, 17057], [16929, 17058, 17057], [16929, 16930, 17059], [16929, 17059, 17058], [16930, 16931, 17059], [16931, 17060, 17059], [16931, 16932, 17061], [16931, 17061, 17060], [16932, 16933, 17061], [16933, 17062, 17061], [16933, 16934, 17063], [16933, 17063, 17062], [16934, 16935, 17063], [16935, 17064, 17063], [16935, 16936, 17065], [16935, 17065, 17064], [16936, 16937, 17065], [16937, 17066, 17065], [16937, 16938, 17067], [16937, 17067, 17066], [16938, 16939, 17067], [16939, 17068, 17067], [16939, 16940, 17069], [16939, 17069, 17068], [16940, 16941, 17069], [16941, 17070, 17069], [16941, 16942, 17071], [16941, 17071, 17070], [16942, 16943, 17071], [16943, 17072, 17071], [16943, 16944, 17073], [16943, 17073, 17072], [16945, 16946, 17075], [16945, 17075, 17074], [16946, 16947, 17075], [16947, 17076, 17075], [16947, 16948, 17077], [16947, 17077, 17076], [16948, 16949, 17077], [16949, 17078, 17077], [16949, 16950, 17079], [16949, 17079, 17078], [16950, 16951, 17079], [16951, 17080, 17079], [16951, 16952, 17081], [16951, 17081, 17080], [16952, 16953, 17081], [16953, 17082, 17081], [16953, 16954, 17083], [16953, 17083, 17082], [16954, 16955, 17083], [16955, 17084, 17083], [16955, 16956, 17085], [16955, 17085, 17084], [16956, 16957, 17085], [16957, 17086, 17085], [16957, 16958, 17087], [16957, 17087, 17086], [16958, 16959, 17087], [16959, 17088, 17087], [16959, 16960, 17089], [16959, 17089, 17088], [16960, 16961, 17089], [16961, 17090, 17089], [16961, 16962, 17091], [16961, 17091, 17090], [16962, 16963, 17091], [16963, 17092, 17091], [16963, 16964, 17093], [16963, 17093, 17092], [16964, 16965, 17093], [16965, 17094, 17093], [16965, 16966, 17095], [16965, 17095, 17094], [16966, 16967, 17095], [16967, 17096, 17095], [16967, 16968, 17097], [16967, 17097, 17096], [16968, 16969, 17097], [16969, 17098, 17097], [16969, 16970, 17099], [16969, 17099, 17098], [16970, 16971, 17099], [16971, 17100, 17099], [16971, 16972, 17101], [16971, 17101, 17100], [16972, 16973, 17101], [16973, 17102, 17101], [16973, 16974, 17103], [16973, 17103, 17102], [16974, 16975, 17103], [16975, 17104, 17103], [16975, 16976, 17105], [16975, 17105, 17104], [16976, 16977, 17105], [16977, 17106, 17105], [16977, 16978, 17107], [16977, 17107, 17106], [16978, 16979, 17107], [16979, 17108, 17107], [16979, 16980, 17109], [16979, 17109, 17108], [16980, 16981, 17109], [16981, 17110, 17109], [16981, 16982, 17111], [16981, 17111, 17110], [16982, 16983, 17111], [16983, 17112, 17111], [16983, 16984, 17113], [16983, 17113, 17112], [16984, 16985, 17113], [16985, 17114, 17113], [16985, 16986, 17115], [16985, 17115, 17114], [16986, 16987, 17115], [16987, 17116, 17115], [16987, 16988, 17117], [16987, 17117, 17116], [16988, 16989, 17117], [16989, 17118, 17117], [16989, 16990, 17119], [16989, 17119, 17118], [16990, 16991, 17119], [16991, 17120, 17119], [16991, 16992, 17121], [16991, 17121, 17120], [16992, 16993, 17121], [16993, 17122, 17121], [16993, 16994, 17123], [16993, 17123, 17122], [16994, 16995, 17123], [16995, 17124, 17123], [16995, 16996, 17125], [16995, 17125, 17124], [16996, 16997, 17125], [16997, 17126, 17125], [16997, 16998, 17127], [16997, 17127, 17126], [16998, 16999, 17127], [16999, 17128, 17127], [16999, 17000, 17129], [16999, 17129, 17128], [17000, 17001, 17129], [17001, 17130, 17129], [17001, 17002, 17131], [17001, 17131, 17130], [17002, 17003, 17131], [17003, 17132, 17131], [17003, 17004, 17133], [17003, 17133, 17132], [17004, 17005, 17133], [17005, 17134, 17133], [17005, 17006, 17135], [17005, 17135, 17134], [17006, 17007, 17135], [17007, 17136, 17135], [17007, 17008, 17137], [17007, 17137, 17136], [17008, 17009, 17137], [17009, 17138, 17137], [17009, 17010, 17139], [17009, 17139, 17138], [17010, 17011, 17139], [17011, 17140, 17139], [17011, 17012, 17141], [17011, 17141, 17140], [17012, 17013, 17141], [17013, 17142, 17141], [17013, 17014, 17143], [17013, 17143, 17142], [17014, 17015, 17143], [17015, 17144, 17143], [17015, 17016, 17145], [17015, 17145, 17144], [17016, 17017, 17145], [17017, 17146, 17145], [17017, 17018, 17147], [17017, 17147, 17146], [17018, 17019, 17147], [17019, 17148, 17147], [17019, 17020, 17149], [17019, 17149, 17148], [17020, 17021, 17149], [17021, 17150, 17149], [17021, 17022, 17151], [17021, 17151, 17150], [17022, 17023, 17151], [17023, 17152, 17151], [17023, 17024, 17153], [17023, 17153, 17152], [17024, 17025, 17153], [17025, 17154, 17153], [17025, 17026, 17155], [17025, 17155, 17154], [17026, 17027, 17155], [17027, 17156, 17155], [17027, 17028, 17157], [17027, 17157, 17156], [17028, 17029, 17157], [17029, 17158, 17157], [17029, 17030, 17159], [17029, 17159, 17158], [17030, 17031, 17159], [17031, 17160, 17159], [17031, 17032, 17161], [17031, 17161, 17160], [17032, 17033, 17161], [17033, 17162, 17161], [17033, 17034, 17163], [17033, 17163, 17162], [17034, 17035, 17163], [17035, 17164, 17163], [17035, 17036, 17165], [17035, 17165, 17164], [17036, 17037, 17165], [17037, 17166, 17165], [17037, 17038, 17167], [17037, 17167, 17166], [17038, 17039, 17167], [17039, 17168, 17167], [17039, 17040, 17169], [17039, 17169, 17168], [17040, 17041, 17169], [17041, 17170, 17169], [17041, 17042, 17171], [17041, 17171, 17170], [17042, 17043, 17171], [17043, 17172, 17171], [17043, 17044, 17173], [17043, 17173, 17172], [17044, 17045, 17173], [17045, 17174, 17173], [17045, 17046, 17175], [17045, 17175, 17174], [17046, 17047, 17175], [17047, 17176, 17175], [17047, 17048, 17177], [17047, 17177, 17176], [17048, 17049, 17177], [17049, 17178, 17177], [17049, 17050, 17179], [17049, 17179, 17178], [17050, 17051, 17179], [17051, 17180, 17179], [17051, 17052, 17181], [17051, 17181, 17180], [17052, 17053, 17181], [17053, 17182, 17181], [17053, 17054, 17183], [17053, 17183, 17182], [17054, 17055, 17183], [17055, 17184, 17183], [17055, 17056, 17185], [17055, 17185, 17184], [17056, 17057, 17185], [17057, 17186, 17185], [17057, 17058, 17187], [17057, 17187, 17186], [17058, 17059, 17187], [17059, 17188, 17187], [17059, 17060, 17189], [17059, 17189, 17188], [17060, 17061, 17189], [17061, 17190, 17189], [17061, 17062, 17191], [17061, 17191, 17190], [17062, 17063, 17191], [17063, 17192, 17191], [17063, 17064, 17193], [17063, 17193, 17192], [17064, 17065, 17193], [17065, 17194, 17193], [17065, 17066, 17195], [17065, 17195, 17194], [17066, 17067, 17195], [17067, 17196, 17195], [17067, 17068, 17197], [17067, 17197, 17196], [17068, 17069, 17197], [17069, 17198, 17197], [17069, 17070, 17199], [17069, 17199, 17198], [17070, 17071, 17199], [17071, 17200, 17199], [17071, 17072, 17201], [17071, 17201, 17200], [17072, 17073, 17201], [17073, 17202, 17201], [17074, 17075, 17203], [17075, 17204, 17203], [17075, 17076, 17205], [17075, 17205, 17204], [17076, 17077, 17205], [17077, 17206, 17205], [17077, 17078, 17207], [17077, 17207, 17206], [17078, 17079, 17207], [17079, 17208, 17207], [17079, 17080, 17209], [17079, 17209, 17208], [17080, 17081, 17209], [17081, 17210, 17209], [17081, 17082, 17211], [17081, 17211, 17210], [17082, 17083, 17211], [17083, 17212, 17211], [17083, 17084, 17213], [17083, 17213, 17212], [17084, 17085, 17213], [17085, 17214, 17213], [17085, 17086, 17215], [17085, 17215, 17214], [17086, 17087, 17215], [17087, 17216, 17215], [17087, 17088, 17217], [17087, 17217, 17216], [17088, 17089, 17217], [17089, 17218, 17217], [17089, 17090, 17219], [17089, 17219, 17218], [17090, 17091, 17219], [17091, 17220, 17219], [17091, 17092, 17221], [17091, 17221, 17220], [17092, 17093, 17221], [17093, 17222, 17221], [17093, 17094, 17223], [17093, 17223, 17222], [17094, 17095, 17223], [17095, 17224, 17223], [17095, 17096, 17225], [17095, 17225, 17224], [17096, 17097, 17225], [17097, 17226, 17225], [17097, 17098, 17227], [17097, 17227, 17226], [17098, 17099, 17227], [17099, 17228, 17227], [17099, 17100, 17229], [17099, 17229, 17228], [17100, 17101, 17229], [17101, 17230, 17229], [17101, 17102, 17231], [17101, 17231, 17230], [17102, 17103, 17231], [17103, 17232, 17231], [17103, 17104, 17233], [17103, 17233, 17232], [17104, 17105, 17233], [17105, 17234, 17233], [17105, 17106, 17235], [17105, 17235, 17234], [17106, 17107, 17235], [17107, 17236, 17235], [17107, 17108, 17237], [17107, 17237, 17236], [17108, 17109, 17237], [17109, 17238, 17237], [17109, 17110, 17239], [17109, 17239, 17238], [17110, 17111, 17239], [17111, 17240, 17239], [17111, 17112, 17241], [17111, 17241, 17240], [17112, 17113, 17241], [17113, 17242, 17241], [17113, 17114, 17243], [17113, 17243, 17242], [17114, 17115, 17243], [17115, 17244, 17243], [17115, 17116, 17245], [17115, 17245, 17244], [17116, 17117, 17245], [17117, 17246, 17245], [17117, 17118, 17247], [17117, 17247, 17246], [17118, 17119, 17247], [17119, 17248, 17247], [17119, 17120, 17249], [17119, 17249, 17248], [17120, 17121, 17249], [17121, 17250, 17249], [17121, 17122, 17251], [17121, 17251, 17250], [17122, 17123, 17251], [17123, 17252, 17251], [17123, 17124, 17253], [17123, 17253, 17252], [17124, 17125, 17253], [17125, 17254, 17253], [17125, 17126, 17255], [17125, 17255, 17254], [17126, 17127, 17255], [17127, 17256, 17255], [17127, 17128, 17257], [17127, 17257, 17256], [17128, 17129, 17257], [17129, 17258, 17257], [17129, 17130, 17259], [17129, 17259, 17258], [17130, 17131, 17259], [17131, 17260, 17259], [17131, 17132, 17261], [17131, 17261, 17260], [17132, 17133, 17261], [17133, 17262, 17261], [17133, 17134, 17263], [17133, 17263, 17262], [17134, 17135, 17263], [17135, 17264, 17263], [17135, 17136, 17265], [17135, 17265, 17264], [17136, 17137, 17265], [17137, 17266, 17265], [17137, 17138, 17267], [17137, 17267, 17266], [17138, 17139, 17267], [17139, 17268, 17267], [17139, 17140, 17269], [17139, 17269, 17268], [17140, 17141, 17269], [17141, 17270, 17269], [17141, 17142, 17271], [17141, 17271, 17270], [17142, 17143, 17271], [17143, 17272, 17271], [17143, 17144, 17273], [17143, 17273, 17272], [17144, 17145, 17273], [17145, 17274, 17273], [17145, 17146, 17275], [17145, 17275, 17274], [17146, 17147, 17275], [17147, 17276, 17275], [17147, 17148, 17277], [17147, 17277, 17276], [17148, 17149, 17277], [17149, 17278, 17277], [17149, 17150, 17279], [17149, 17279, 17278], [17150, 17151, 17279], [17151, 17280, 17279], [17151, 17152, 17281], [17151, 17281, 17280], [17152, 17153, 17281], [17153, 17282, 17281], [17153, 17154, 17283], [17153, 17283, 17282], [17154, 17155, 17283], [17155, 17284, 17283], [17155, 17156, 17285], [17155, 17285, 17284], [17156, 17157, 17285], [17157, 17286, 17285], [17157, 17158, 17287], [17157, 17287, 17286], [17158, 17159, 17287], [17159, 17288, 17287], [17159, 17160, 17289], [17159, 17289, 17288], [17160, 17161, 17289], [17161, 17290, 17289], [17161, 17162, 17291], [17161, 17291, 17290], [17162, 17163, 17291], [17163, 17292, 17291], [17163, 17164, 17293], [17163, 17293, 17292], [17164, 17165, 17293], [17165, 17294, 17293], [17165, 17166, 17295], [17165, 17295, 17294], [17166, 17167, 17295], [17167, 17296, 17295], [17167, 17168, 17297], [17167, 17297, 17296], [17168, 17169, 17297], [17169, 17298, 17297], [17169, 17170, 17299], [17169, 17299, 17298], [17170, 17171, 17299], [17171, 17300, 17299], [17171, 17172, 17301], [17171, 17301, 17300], [17172, 17173, 17301], [17173, 17302, 17301], [17173, 17174, 17303], [17173, 17303, 17302], [17174, 17175, 17303], [17175, 17304, 17303], [17175, 17176, 17305], [17175, 17305, 17304], [17176, 17177, 17305], [17177, 17306, 17305], [17177, 17178, 17307], [17177, 17307, 17306], [17178, 17179, 17307], [17179, 17308, 17307], [17179, 17180, 17309], [17179, 17309, 17308], [17180, 17181, 17309], [17181, 17310, 17309], [17181, 17182, 17311], [17181, 17311, 17310], [17182, 17183, 17311], [17183, 17312, 17311], [17183, 17184, 17313], [17183, 17313, 17312], [17184, 17185, 17313], [17185, 17314, 17313], [17185, 17186, 17315], [17185, 17315, 17314], [17186, 17187, 17315], [17187, 17316, 17315], [17187, 17188, 17317], [17187, 17317, 17316], [17188, 17189, 17317], [17189, 17318, 17317], [17189, 17190, 17319], [17189, 17319, 17318], [17190, 17191, 17319], [17191, 17320, 17319], [17191, 17192, 17321], [17191, 17321, 17320], [17192, 17193, 17321], [17193, 17322, 17321], [17193, 17194, 17323], [17193, 17323, 17322], [17194, 17195, 17323], [17195, 17324, 17323], [17195, 17196, 17325], [17195, 17325, 17324], [17196, 17197, 17325], [17197, 17326, 17325], [17197, 17198, 17327], [17197, 17327, 17326], [17198, 17199, 17327], [17199, 17328, 17327], [17199, 17200, 17329], [17199, 17329, 17328], [17200, 17201, 17329], [17201, 17330, 17329], [17201, 17202, 17331], [17201, 17331, 17330], [17203, 17204, 17333], [17203, 17333, 17332], [17204, 17205, 17333], [17205, 17334, 17333], [17205, 17206, 17335], [17205, 17335, 17334], [17206, 17207, 17335], [17207, 17336, 17335], [17207, 17208, 17337], [17207, 17337, 17336], [17208, 17209, 17337], [17209, 17338, 17337], [17209, 17210, 17339], [17209, 17339, 17338], [17210, 17211, 17339], [17211, 17340, 17339], [17211, 17212, 17341], [17211, 17341, 17340], [17212, 17213, 17341], [17213, 17342, 17341], [17213, 17214, 17343], [17213, 17343, 17342], [17214, 17215, 17343], [17215, 17344, 17343], [17215, 17216, 17345], [17215, 17345, 17344], [17216, 17217, 17345], [17217, 17346, 17345], [17217, 17218, 17347], [17217, 17347, 17346], [17218, 17219, 17347], [17219, 17348, 17347], [17219, 17220, 17349], [17219, 17349, 17348], [17220, 17221, 17349], [17221, 17350, 17349], [17221, 17222, 17351], [17221, 17351, 17350], [17222, 17223, 17351], [17223, 17352, 17351], [17223, 17224, 17353], [17223, 17353, 17352], [17224, 17225, 17353], [17225, 17354, 17353], [17225, 17226, 17355], [17225, 17355, 17354], [17226, 17227, 17355], [17227, 17356, 17355], [17227, 17228, 17357], [17227, 17357, 17356], [17228, 17229, 17357], [17229, 17358, 17357], [17229, 17230, 17359], [17229, 17359, 17358], [17230, 17231, 17359], [17231, 17360, 17359], [17231, 17232, 17361], [17231, 17361, 17360], [17232, 17233, 17361], [17233, 17362, 17361], [17233, 17234, 17363], [17233, 17363, 17362], [17234, 17235, 17363], [17235, 17364, 17363], [17235, 17236, 17365], [17235, 17365, 17364], [17236, 17237, 17365], [17237, 17366, 17365], [17237, 17238, 17367], [17237, 17367, 17366], [17238, 17239, 17367], [17239, 17368, 17367], [17239, 17240, 17369], [17239, 17369, 17368], [17240, 17241, 17369], [17241, 17370, 17369], [17241, 17242, 17371], [17241, 17371, 17370], [17242, 17243, 17371], [17243, 17372, 17371], [17243, 17244, 17373], [17243, 17373, 17372], [17244, 17245, 17373], [17245, 17374, 17373], [17245, 17246, 17375], [17245, 17375, 17374], [17246, 17247, 17375], [17247, 17376, 17375], [17247, 17248, 17377], [17247, 17377, 17376], [17248, 17249, 17377], [17249, 17378, 17377], [17249, 17250, 17379], [17249, 17379, 17378], [17250, 17251, 17379], [17251, 17380, 17379], [17251, 17252, 17381], [17251, 17381, 17380], [17252, 17253, 17381], [17253, 17382, 17381], [17253, 17254, 17383], [17253, 17383, 17382], [17254, 17255, 17383], [17255, 17384, 17383], [17255, 17256, 17385], [17255, 17385, 17384], [17256, 17257, 17385], [17257, 17386, 17385], [17257, 17258, 17387], [17257, 17387, 17386], [17258, 17259, 17387], [17259, 17388, 17387], [17259, 17260, 17389], [17259, 17389, 17388], [17260, 17261, 17389], [17261, 17390, 17389], [17261, 17262, 17391], [17261, 17391, 17390], [17262, 17263, 17391], [17263, 17392, 17391], [17263, 17264, 17393], [17263, 17393, 17392], [17264, 17265, 17393], [17265, 17394, 17393], [17265, 17266, 17395], [17265, 17395, 17394], [17266, 17267, 17395], [17267, 17396, 17395], [17267, 17268, 17397], [17267, 17397, 17396], [17268, 17269, 17397], [17269, 17398, 17397], [17269, 17270, 17399], [17269, 17399, 17398], [17270, 17271, 17399], [17271, 17400, 17399], [17271, 17272, 17401], [17271, 17401, 17400], [17272, 17273, 17401], [17273, 17402, 17401], [17273, 17274, 17403], [17273, 17403, 17402], [17274, 17275, 17403], [17275, 17404, 17403], [17275, 17276, 17405], [17275, 17405, 17404], [17276, 17277, 17405], [17277, 17406, 17405], [17277, 17278, 17407], [17277, 17407, 17406], [17278, 17279, 17407], [17279, 17408, 17407], [17279, 17280, 17409], [17279, 17409, 17408], [17280, 17281, 17409], [17281, 17410, 17409], [17281, 17282, 17411], [17281, 17411, 17410], [17282, 17283, 17411], [17283, 17412, 17411], [17283, 17284, 17413], [17283, 17413, 17412], [17284, 17285, 17413], [17285, 17414, 17413], [17285, 17286, 17415], [17285, 17415, 17414], [17286, 17287, 17415], [17287, 17416, 17415], [17287, 17288, 17417], [17287, 17417, 17416], [17288, 17289, 17417], [17289, 17418, 17417], [17289, 17290, 17419], [17289, 17419, 17418], [17290, 17291, 17419], [17291, 17420, 17419], [17291, 17292, 17421], [17291, 17421, 17420], [17292, 17293, 17421], [17293, 17422, 17421], [17293, 17294, 17423], [17293, 17423, 17422], [17294, 17295, 17423], [17295, 17424, 17423], [17295, 17296, 17425], [17295, 17425, 17424], [17296, 17297, 17425], [17297, 17426, 17425], [17297, 17298, 17427], [17297, 17427, 17426], [17298, 17299, 17427], [17299, 17428, 17427], [17299, 17300, 17429], [17299, 17429, 17428], [17300, 17301, 17429], [17301, 17430, 17429], [17301, 17302, 17431], [17301, 17431, 17430], [17302, 17303, 17431], [17303, 17432, 17431], [17303, 17304, 17433], [17303, 17433, 17432], [17304, 17305, 17433], [17305, 17434, 17433], [17305, 17306, 17435], [17305, 17435, 17434], [17306, 17307, 17435], [17307, 17436, 17435], [17307, 17308, 17437], [17307, 17437, 17436], [17308, 17309, 17437], [17309, 17438, 17437], [17309, 17310, 17439], [17309, 17439, 17438], [17310, 17311, 17439], [17311, 17440, 17439], [17311, 17312, 17441], [17311, 17441, 17440], [17312, 17313, 17441], [17313, 17442, 17441], [17313, 17314, 17443], [17313, 17443, 17442], [17314, 17315, 17443], [17315, 17444, 17443], [17315, 17316, 17445], [17315, 17445, 17444], [17316, 17317, 17445], [17317, 17446, 17445], [17317, 17318, 17447], [17317, 17447, 17446], [17318, 17319, 17447], [17319, 17448, 17447], [17319, 17320, 17449], [17319, 17449, 17448], [17320, 17321, 17449], [17321, 17450, 17449], [17321, 17322, 17451], [17321, 17451, 17450], [17322, 17323, 17451], [17323, 17452, 17451], [17323, 17324, 17453], [17323, 17453, 17452], [17324, 17325, 17453], [17325, 17454, 17453], [17325, 17326, 17455], [17325, 17455, 17454], [17326, 17327, 17455], [17327, 17456, 17455], [17327, 17328, 17457], [17327, 17457, 17456], [17328, 17329, 17457], [17329, 17458, 17457], [17329, 17330, 17459], [17329, 17459, 17458], [17330, 17331, 17459], [17331, 17460, 17459], [17332, 17333, 17461], [17333, 17462, 17461], [17333, 17334, 17463], [17333, 17463, 17462], [17334, 17335, 17463], [17335, 17464, 17463], [17335, 17336, 17465], [17335, 17465, 17464], [17336, 17337, 17465], [17337, 17466, 17465], [17337, 17338, 17467], [17337, 17467, 17466], [17338, 17339, 17467], [17339, 17468, 17467], [17339, 17340, 17469], [17339, 17469, 17468], [17340, 17341, 17469], [17341, 17470, 17469], [17341, 17342, 17471], [17341, 17471, 17470], [17342, 17343, 17471], [17343, 17472, 17471], [17343, 17344, 17473], [17343, 17473, 17472], [17344, 17345, 17473], [17345, 17474, 17473], [17345, 17346, 17475], [17345, 17475, 17474], [17346, 17347, 17475], [17347, 17476, 17475], [17347, 17348, 17477], [17347, 17477, 17476], [17348, 17349, 17477], [17349, 17478, 17477], [17349, 17350, 17479], [17349, 17479, 17478], [17350, 17351, 17479], [17351, 17480, 17479], [17351, 17352, 17481], [17351, 17481, 17480], [17352, 17353, 17481], [17353, 17482, 17481], [17353, 17354, 17483], [17353, 17483, 17482], [17354, 17355, 17483], [17355, 17484, 17483], [17355, 17356, 17485], [17355, 17485, 17484], [17356, 17357, 17485], [17357, 17486, 17485], [17357, 17358, 17487], [17357, 17487, 17486], [17358, 17359, 17487], [17359, 17488, 17487], [17359, 17360, 17489], [17359, 17489, 17488], [17360, 17361, 17489], [17361, 17490, 17489], [17361, 17362, 17491], [17361, 17491, 17490], [17362, 17363, 17491], [17363, 17492, 17491], [17363, 17364, 17493], [17363, 17493, 17492], [17364, 17365, 17493], [17365, 17494, 17493], [17365, 17366, 17495], [17365, 17495, 17494], [17366, 17367, 17495], [17367, 17496, 17495], [17367, 17368, 17497], [17367, 17497, 17496], [17368, 17369, 17497], [17369, 17498, 17497], [17369, 17370, 17499], [17369, 17499, 17498], [17370, 17371, 17499], [17371, 17500, 17499], [17371, 17372, 17501], [17371, 17501, 17500], [17372, 17373, 17501], [17373, 17502, 17501], [17373, 17374, 17503], [17373, 17503, 17502], [17374, 17375, 17503], [17375, 17504, 17503], [17375, 17376, 17505], [17375, 17505, 17504], [17376, 17377, 17505], [17377, 17506, 17505], [17377, 17378, 17507], [17377, 17507, 17506], [17378, 17379, 17507], [17379, 17508, 17507], [17379, 17380, 17509], [17379, 17509, 17508], [17380, 17381, 17509], [17381, 17510, 17509], [17381, 17382, 17511], [17381, 17511, 17510], [17382, 17383, 17511], [17383, 17512, 17511], [17383, 17384, 17513], [17383, 17513, 17512], [17384, 17385, 17513], [17385, 17514, 17513], [17385, 17386, 17515], [17385, 17515, 17514], [17386, 17387, 17515], [17387, 17516, 17515], [17387, 17388, 17517], [17387, 17517, 17516], [17388, 17389, 17517], [17389, 17518, 17517], [17389, 17390, 17519], [17389, 17519, 17518], [17390, 17391, 17519], [17391, 17520, 17519], [17391, 17392, 17521], [17391, 17521, 17520], [17392, 17393, 17521], [17393, 17522, 17521], [17393, 17394, 17523], [17393, 17523, 17522], [17394, 17395, 17523], [17395, 17524, 17523], [17395, 17396, 17525], [17395, 17525, 17524], [17396, 17397, 17525], [17397, 17526, 17525], [17397, 17398, 17527], [17397, 17527, 17526], [17398, 17399, 17527], [17399, 17528, 17527], [17399, 17400, 17529], [17399, 17529, 17528], [17400, 17401, 17529], [17401, 17530, 17529], [17401, 17402, 17531], [17401, 17531, 17530], [17402, 17403, 17531], [17403, 17532, 17531], [17403, 17404, 17533], [17403, 17533, 17532], [17404, 17405, 17533], [17405, 17534, 17533], [17405, 17406, 17535], [17405, 17535, 17534], [17406, 17407, 17535], [17407, 17536, 17535], [17407, 17408, 17537], [17407, 17537, 17536], [17408, 17409, 17537], [17409, 17538, 17537], [17409, 17410, 17539], [17409, 17539, 17538], [17410, 17411, 17539], [17411, 17540, 17539], [17411, 17412, 17541], [17411, 17541, 17540], [17412, 17413, 17541], [17413, 17542, 17541], [17413, 17414, 17543], [17413, 17543, 17542], [17414, 17415, 17543], [17415, 17544, 17543], [17415, 17416, 17545], [17415, 17545, 17544], [17416, 17417, 17545], [17417, 17546, 17545], [17417, 17418, 17547], [17417, 17547, 17546], [17418, 17419, 17547], [17419, 17548, 17547], [17419, 17420, 17549], [17419, 17549, 17548], [17420, 17421, 17549], [17421, 17550, 17549], [17421, 17422, 17551], [17421, 17551, 17550], [17422, 17423, 17551], [17423, 17552, 17551], [17423, 17424, 17553], [17423, 17553, 17552], [17424, 17425, 17553], [17425, 17554, 17553], [17425, 17426, 17555], [17425, 17555, 17554], [17426, 17427, 17555], [17427, 17556, 17555], [17427, 17428, 17557], [17427, 17557, 17556], [17428, 17429, 17557], [17429, 17558, 17557], [17429, 17430, 17559], [17429, 17559, 17558], [17430, 17431, 17559], [17431, 17560, 17559], [17431, 17432, 17561], [17431, 17561, 17560], [17432, 17433, 17561], [17433, 17562, 17561], [17433, 17434, 17563], [17433, 17563, 17562], [17434, 17435, 17563], [17435, 17564, 17563], [17435, 17436, 17565], [17435, 17565, 17564], [17436, 17437, 17565], [17437, 17566, 17565], [17437, 17438, 17567], [17437, 17567, 17566], [17438, 17439, 17567], [17439, 17568, 17567], [17439, 17440, 17569], [17439, 17569, 17568], [17440, 17441, 17569], [17441, 17570, 17569], [17441, 17442, 17571], [17441, 17571, 17570], [17442, 17443, 17571], [17443, 17572, 17571], [17443, 17444, 17573], [17443, 17573, 17572], [17444, 17445, 17573], [17445, 17574, 17573], [17445, 17446, 17575], [17445, 17575, 17574], [17446, 17447, 17575], [17447, 17576, 17575], [17447, 17448, 17577], [17447, 17577, 17576], [17448, 17449, 17577], [17449, 17578, 17577], [17449, 17450, 17579], [17449, 17579, 17578], [17450, 17451, 17579], [17451, 17580, 17579], [17451, 17452, 17581], [17451, 17581, 17580], [17452, 17453, 17581], [17453, 17582, 17581], [17453, 17454, 17583], [17453, 17583, 17582], [17454, 17455, 17583], [17455, 17584, 17583], [17455, 17456, 17585], [17455, 17585, 17584], [17456, 17457, 17585], [17457, 17586, 17585], [17457, 17458, 17587], [17457, 17587, 17586], [17458, 17459, 17587], [17459, 17588, 17587], [17459, 17460, 17589], [17459, 17589, 17588], [17461, 17462, 17591], [17461, 17591, 17590], [17462, 17463, 17591], [17463, 17592, 17591], [17463, 17464, 17593], [17463, 17593, 17592], [17464, 17465, 17593], [17465, 17594, 17593], [17465, 17466, 17595], [17465, 17595, 17594], [17466, 17467, 17595], [17467, 17596, 17595], [17467, 17468, 17597], [17467, 17597, 17596], [17468, 17469, 17597], [17469, 17598, 17597], [17469, 17470, 17599], [17469, 17599, 17598], [17470, 17471, 17599], [17471, 17600, 17599], [17471, 17472, 17601], [17471, 17601, 17600], [17472, 17473, 17601], [17473, 17602, 17601], [17473, 17474, 17603], [17473, 17603, 17602], [17474, 17475, 17603], [17475, 17604, 17603], [17475, 17476, 17605], [17475, 17605, 17604], [17476, 17477, 17605], [17477, 17606, 17605], [17477, 17478, 17607], [17477, 17607, 17606], [17478, 17479, 17607], [17479, 17608, 17607], [17479, 17480, 17609], [17479, 17609, 17608], [17480, 17481, 17609], [17481, 17610, 17609], [17481, 17482, 17611], [17481, 17611, 17610], [17482, 17483, 17611], [17483, 17612, 17611], [17483, 17484, 17613], [17483, 17613, 17612], [17484, 17485, 17613], [17485, 17614, 17613], [17485, 17486, 17615], [17485, 17615, 17614], [17486, 17487, 17615], [17487, 17616, 17615], [17487, 17488, 17617], [17487, 17617, 17616], [17488, 17489, 17617], [17489, 17618, 17617], [17489, 17490, 17619], [17489, 17619, 17618], [17490, 17491, 17619], [17491, 17620, 17619], [17491, 17492, 17621], [17491, 17621, 17620], [17492, 17493, 17621], [17493, 17622, 17621], [17493, 17494, 17623], [17493, 17623, 17622], [17494, 17495, 17623], [17495, 17624, 17623], [17495, 17496, 17625], [17495, 17625, 17624], [17496, 17497, 17625], [17497, 17626, 17625], [17497, 17498, 17627], [17497, 17627, 17626], [17498, 17499, 17627], [17499, 17628, 17627], [17499, 17500, 17629], [17499, 17629, 17628], [17500, 17501, 17629], [17501, 17630, 17629], [17501, 17502, 17631], [17501, 17631, 17630], [17502, 17503, 17631], [17503, 17632, 17631], [17503, 17504, 17633], [17503, 17633, 17632], [17504, 17505, 17633], [17505, 17634, 17633], [17505, 17506, 17635], [17505, 17635, 17634], [17506, 17507, 17635], [17507, 17636, 17635], [17507, 17508, 17637], [17507, 17637, 17636], [17508, 17509, 17637], [17509, 17638, 17637], [17509, 17510, 17639], [17509, 17639, 17638], [17510, 17511, 17639], [17511, 17640, 17639], [17511, 17512, 17641], [17511, 17641, 17640], [17512, 17513, 17641], [17513, 17642, 17641], [17513, 17514, 17643], [17513, 17643, 17642], [17514, 17515, 17643], [17515, 17644, 17643], [17515, 17516, 17645], [17515, 17645, 17644], [17516, 17517, 17645], [17517, 17646, 17645], [17517, 17518, 17647], [17517, 17647, 17646], [17518, 17519, 17647], [17519, 17648, 17647], [17519, 17520, 17649], [17519, 17649, 17648], [17520, 17521, 17649], [17521, 17650, 17649], [17521, 17522, 17651], [17521, 17651, 17650], [17522, 17523, 17651], [17523, 17652, 17651], [17523, 17524, 17653], [17523, 17653, 17652], [17524, 17525, 17653], [17525, 17654, 17653], [17525, 17526, 17655], [17525, 17655, 17654], [17526, 17527, 17655], [17527, 17656, 17655], [17527, 17528, 17657], [17527, 17657, 17656], [17528, 17529, 17657], [17529, 17658, 17657], [17529, 17530, 17659], [17529, 17659, 17658], [17530, 17531, 17659], [17531, 17660, 17659], [17531, 17532, 17661], [17531, 17661, 17660], [17532, 17533, 17661], [17533, 17662, 17661], [17533, 17534, 17663], [17533, 17663, 17662], [17534, 17535, 17663], [17535, 17664, 17663], [17535, 17536, 17665], [17535, 17665, 17664], [17536, 17537, 17665], [17537, 17666, 17665], [17537, 17538, 17667], [17537, 17667, 17666], [17538, 17539, 17667], [17539, 17668, 17667], [17539, 17540, 17669], [17539, 17669, 17668], [17540, 17541, 17669], [17541, 17670, 17669], [17541, 17542, 17671], [17541, 17671, 17670], [17542, 17543, 17671], [17543, 17672, 17671], [17543, 17544, 17673], [17543, 17673, 17672], [17544, 17545, 17673], [17545, 17674, 17673], [17545, 17546, 17675], [17545, 17675, 17674], [17546, 17547, 17675], [17547, 17676, 17675], [17547, 17548, 17677], [17547, 17677, 17676], [17548, 17549, 17677], [17549, 17678, 17677], [17549, 17550, 17679], [17549, 17679, 17678], [17550, 17551, 17679], [17551, 17680, 17679], [17551, 17552, 17681], [17551, 17681, 17680], [17552, 17553, 17681], [17553, 17682, 17681], [17553, 17554, 17683], [17553, 17683, 17682], [17554, 17555, 17683], [17555, 17684, 17683], [17555, 17556, 17685], [17555, 17685, 17684], [17556, 17557, 17685], [17557, 17686, 17685], [17557, 17558, 17687], [17557, 17687, 17686], [17558, 17559, 17687], [17559, 17688, 17687], [17559, 17560, 17689], [17559, 17689, 17688], [17560, 17561, 17689], [17561, 17690, 17689], [17561, 17562, 17691], [17561, 17691, 17690], [17562, 17563, 17691], [17563, 17692, 17691], [17563, 17564, 17693], [17563, 17693, 17692], [17564, 17565, 17693], [17565, 17694, 17693], [17565, 17566, 17695], [17565, 17695, 17694], [17566, 17567, 17695], [17567, 17696, 17695], [17567, 17568, 17697], [17567, 17697, 17696], [17568, 17569, 17697], [17569, 17698, 17697], [17569, 17570, 17699], [17569, 17699, 17698], [17570, 17571, 17699], [17571, 17700, 17699], [17571, 17572, 17701], [17571, 17701, 17700], [17572, 17573, 17701], [17573, 17702, 17701], [17573, 17574, 17703], [17573, 17703, 17702], [17574, 17575, 17703], [17575, 17704, 17703], [17575, 17576, 17705], [17575, 17705, 17704], [17576, 17577, 17705], [17577, 17706, 17705], [17577, 17578, 17707], [17577, 17707, 17706], [17578, 17579, 17707], [17579, 17708, 17707], [17579, 17580, 17709], [17579, 17709, 17708], [17580, 17581, 17709], [17581, 17710, 17709], [17581, 17582, 17711], [17581, 17711, 17710], [17582, 17583, 17711], [17583, 17712, 17711], [17583, 17584, 17713], [17583, 17713, 17712], [17584, 17585, 17713], [17585, 17714, 17713], [17585, 17586, 17715], [17585, 17715, 17714], [17586, 17587, 17715], [17587, 17716, 17715], [17587, 17588, 17717], [17587, 17717, 17716], [17588, 17589, 17717], [17589, 17718, 17717], [17590, 17591, 17719], [17591, 17720, 17719], [17591, 17592, 17721], [17591, 17721, 17720], [17592, 17593, 17721], [17593, 17722, 17721], [17593, 17594, 17723], [17593, 17723, 17722], [17594, 17595, 17723], [17595, 17724, 17723], [17595, 17596, 17725], [17595, 17725, 17724], [17596, 17597, 17725], [17597, 17726, 17725], [17597, 17598, 17727], [17597, 17727, 17726], [17598, 17599, 17727], [17599, 17728, 17727], [17599, 17600, 17729], [17599, 17729, 17728], [17600, 17601, 17729], [17601, 17730, 17729], [17601, 17602, 17731], [17601, 17731, 17730], [17602, 17603, 17731], [17603, 17732, 17731], [17603, 17604, 17733], [17603, 17733, 17732], [17604, 17605, 17733], [17605, 17734, 17733], [17605, 17606, 17735], [17605, 17735, 17734], [17606, 17607, 17735], [17607, 17736, 17735], [17607, 17608, 17737], [17607, 17737, 17736], [17608, 17609, 17737], [17609, 17738, 17737], [17609, 17610, 17739], [17609, 17739, 17738], [17610, 17611, 17739], [17611, 17740, 17739], [17611, 17612, 17741], [17611, 17741, 17740], [17612, 17613, 17741], [17613, 17742, 17741], [17613, 17614, 17743], [17613, 17743, 17742], [17614, 17615, 17743], [17615, 17744, 17743], [17615, 17616, 17745], [17615, 17745, 17744], [17616, 17617, 17745], [17617, 17746, 17745], [17617, 17618, 17747], [17617, 17747, 17746], [17618, 17619, 17747], [17619, 17748, 17747], [17619, 17620, 17749], [17619, 17749, 17748], [17620, 17621, 17749], [17621, 17750, 17749], [17621, 17622, 17751], [17621, 17751, 17750], [17622, 17623, 17751], [17623, 17752, 17751], [17623, 17624, 17753], [17623, 17753, 17752], [17624, 17625, 17753], [17625, 17754, 17753], [17625, 17626, 17755], [17625, 17755, 17754], [17626, 17627, 17755], [17627, 17756, 17755], [17627, 17628, 17757], [17627, 17757, 17756], [17628, 17629, 17757], [17629, 17758, 17757], [17629, 17630, 17759], [17629, 17759, 17758], [17630, 17631, 17759], [17631, 17760, 17759], [17631, 17632, 17761], [17631, 17761, 17760], [17632, 17633, 17761], [17633, 17762, 17761], [17633, 17634, 17763], [17633, 17763, 17762], [17634, 17635, 17763], [17635, 17764, 17763], [17635, 17636, 17765], [17635, 17765, 17764], [17636, 17637, 17765], [17637, 17766, 17765], [17637, 17638, 17767], [17637, 17767, 17766], [17638, 17639, 17767], [17639, 17768, 17767], [17639, 17640, 17769], [17639, 17769, 17768], [17640, 17641, 17769], [17641, 17770, 17769], [17641, 17642, 17771], [17641, 17771, 17770], [17642, 17643, 17771], [17643, 17772, 17771], [17643, 17644, 17773], [17643, 17773, 17772], [17644, 17645, 17773], [17645, 17774, 17773], [17645, 17646, 17775], [17645, 17775, 17774], [17646, 17647, 17775], [17647, 17776, 17775], [17647, 17648, 17777], [17647, 17777, 17776], [17648, 17649, 17777], [17649, 17778, 17777], [17649, 17650, 17779], [17649, 17779, 17778], [17650, 17651, 17779], [17651, 17780, 17779], [17651, 17652, 17781], [17651, 17781, 17780], [17652, 17653, 17781], [17653, 17782, 17781], [17653, 17654, 17783], [17653, 17783, 17782], [17654, 17655, 17783], [17655, 17784, 17783], [17655, 17656, 17785], [17655, 17785, 17784], [17656, 17657, 17785], [17657, 17786, 17785], [17657, 17658, 17787], [17657, 17787, 17786], [17658, 17659, 17787], [17659, 17788, 17787], [17659, 17660, 17789], [17659, 17789, 17788], [17660, 17661, 17789], [17661, 17790, 17789], [17661, 17662, 17791], [17661, 17791, 17790], [17662, 17663, 17791], [17663, 17792, 17791], [17663, 17664, 17793], [17663, 17793, 17792], [17664, 17665, 17793], [17665, 17794, 17793], [17665, 17666, 17795], [17665, 17795, 17794], [17666, 17667, 17795], [17667, 17796, 17795], [17667, 17668, 17797], [17667, 17797, 17796], [17668, 17669, 17797], [17669, 17798, 17797], [17669, 17670, 17799], [17669, 17799, 17798], [17670, 17671, 17799], [17671, 17800, 17799], [17671, 17672, 17801], [17671, 17801, 17800], [17672, 17673, 17801], [17673, 17802, 17801], [17673, 17674, 17803], [17673, 17803, 17802], [17674, 17675, 17803], [17675, 17804, 17803], [17675, 17676, 17805], [17675, 17805, 17804], [17676, 17677, 17805], [17677, 17806, 17805], [17677, 17678, 17807], [17677, 17807, 17806], [17678, 17679, 17807], [17679, 17808, 17807], [17679, 17680, 17809], [17679, 17809, 17808], [17680, 17681, 17809], [17681, 17810, 17809], [17681, 17682, 17811], [17681, 17811, 17810], [17682, 17683, 17811], [17683, 17812, 17811], [17683, 17684, 17813], [17683, 17813, 17812], [17684, 17685, 17813], [17685, 17814, 17813], [17685, 17686, 17815], [17685, 17815, 17814], [17686, 17687, 17815], [17687, 17816, 17815], [17687, 17688, 17817], [17687, 17817, 17816], [17688, 17689, 17817], [17689, 17818, 17817], [17689, 17690, 17819], [17689, 17819, 17818], [17690, 17691, 17819], [17691, 17820, 17819], [17691, 17692, 17821], [17691, 17821, 17820], [17692, 17693, 17821], [17693, 17822, 17821], [17693, 17694, 17823], [17693, 17823, 17822], [17694, 17695, 17823], [17695, 17824, 17823], [17695, 17696, 17825], [17695, 17825, 17824], [17696, 17697, 17825], [17697, 17826, 17825], [17697, 17698, 17827], [17697, 17827, 17826], [17698, 17699, 17827], [17699, 17828, 17827], [17699, 17700, 17829], [17699, 17829, 17828], [17700, 17701, 17829], [17701, 17830, 17829], [17701, 17702, 17831], [17701, 17831, 17830], [17702, 17703, 17831], [17703, 17832, 17831], [17703, 17704, 17833], [17703, 17833, 17832], [17704, 17705, 17833], [17705, 17834, 17833], [17705, 17706, 17835], [17705, 17835, 17834], [17706, 17707, 17835], [17707, 17836, 17835], [17707, 17708, 17837], [17707, 17837, 17836], [17708, 17709, 17837], [17709, 17838, 17837], [17709, 17710, 17839], [17709, 17839, 17838], [17710, 17711, 17839], [17711, 17840, 17839], [17711, 17712, 17841], [17711, 17841, 17840], [17712, 17713, 17841], [17713, 17842, 17841], [17713, 17714, 17843], [17713, 17843, 17842], [17714, 17715, 17843], [17715, 17844, 17843], [17715, 17716, 17845], [17715, 17845, 17844], [17716, 17717, 17845], [17717, 17846, 17845], [17717, 17718, 17847], [17717, 17847, 17846], [17719, 17720, 17849], [17719, 17849, 17848], [17720, 17721, 17849], [17721, 17850, 17849], [17721, 17722, 17851], [17721, 17851, 17850], [17722, 17723, 17851], [17723, 17852, 17851], [17723, 17724, 17853], [17723, 17853, 17852], [17724, 17725, 17853], [17725, 17854, 17853], [17725, 17726, 17855], [17725, 17855, 17854], [17726, 17727, 17855], [17727, 17856, 17855], [17727, 17728, 17857], [17727, 17857, 17856], [17728, 17729, 17857], [17729, 17858, 17857], [17729, 17730, 17859], [17729, 17859, 17858], [17730, 17731, 17859], [17731, 17860, 17859], [17731, 17732, 17861], [17731, 17861, 17860], [17732, 17733, 17861], [17733, 17862, 17861], [17733, 17734, 17863], [17733, 17863, 17862], [17734, 17735, 17863], [17735, 17864, 17863], [17735, 17736, 17865], [17735, 17865, 17864], [17736, 17737, 17865], [17737, 17866, 17865], [17737, 17738, 17867], [17737, 17867, 17866], [17738, 17739, 17867], [17739, 17868, 17867], [17739, 17740, 17869], [17739, 17869, 17868], [17740, 17741, 17869], [17741, 17870, 17869], [17741, 17742, 17871], [17741, 17871, 17870], [17742, 17743, 17871], [17743, 17872, 17871], [17743, 17744, 17873], [17743, 17873, 17872], [17744, 17745, 17873], [17745, 17874, 17873], [17745, 17746, 17875], [17745, 17875, 17874], [17746, 17747, 17875], [17747, 17876, 17875], [17747, 17748, 17877], [17747, 17877, 17876], [17748, 17749, 17877], [17749, 17878, 17877], [17749, 17750, 17879], [17749, 17879, 17878], [17750, 17751, 17879], [17751, 17880, 17879], [17751, 17752, 17881], [17751, 17881, 17880], [17752, 17753, 17881], [17753, 17882, 17881], [17753, 17754, 17883], [17753, 17883, 17882], [17754, 17755, 17883], [17755, 17884, 17883], [17755, 17756, 17885], [17755, 17885, 17884], [17756, 17757, 17885], [17757, 17886, 17885], [17757, 17758, 17887], [17757, 17887, 17886], [17758, 17759, 17887], [17759, 17888, 17887], [17759, 17760, 17889], [17759, 17889, 17888], [17760, 17761, 17889], [17761, 17890, 17889], [17761, 17762, 17891], [17761, 17891, 17890], [17762, 17763, 17891], [17763, 17892, 17891], [17763, 17764, 17893], [17763, 17893, 17892], [17764, 17765, 17893], [17765, 17894, 17893], [17765, 17766, 17895], [17765, 17895, 17894], [17766, 17767, 17895], [17767, 17896, 17895], [17767, 17768, 17897], [17767, 17897, 17896], [17768, 17769, 17897], [17769, 17898, 17897], [17769, 17770, 17899], [17769, 17899, 17898], [17770, 17771, 17899], [17771, 17900, 17899], [17771, 17772, 17901], [17771, 17901, 17900], [17772, 17773, 17901], [17773, 17902, 17901], [17773, 17774, 17903], [17773, 17903, 17902], [17774, 17775, 17903], [17775, 17904, 17903], [17775, 17776, 17905], [17775, 17905, 17904], [17776, 17777, 17905], [17777, 17906, 17905], [17777, 17778, 17907], [17777, 17907, 17906], [17778, 17779, 17907], [17779, 17908, 17907], [17779, 17780, 17909], [17779, 17909, 17908], [17780, 17781, 17909], [17781, 17910, 17909], [17781, 17782, 17911], [17781, 17911, 17910], [17782, 17783, 17911], [17783, 17912, 17911], [17783, 17784, 17913], [17783, 17913, 17912], [17784, 17785, 17913], [17785, 17914, 17913], [17785, 17786, 17915], [17785, 17915, 17914], [17786, 17787, 17915], [17787, 17916, 17915], [17787, 17788, 17917], [17787, 17917, 17916], [17788, 17789, 17917], [17789, 17918, 17917], [17789, 17790, 17919], [17789, 17919, 17918], [17790, 17791, 17919], [17791, 17920, 17919], [17791, 17792, 17921], [17791, 17921, 17920], [17792, 17793, 17921], [17793, 17922, 17921], [17793, 17794, 17923], [17793, 17923, 17922], [17794, 17795, 17923], [17795, 17924, 17923], [17795, 17796, 17925], [17795, 17925, 17924], [17796, 17797, 17925], [17797, 17926, 17925], [17797, 17798, 17927], [17797, 17927, 17926], [17798, 17799, 17927], [17799, 17928, 17927], [17799, 17800, 17929], [17799, 17929, 17928], [17800, 17801, 17929], [17801, 17930, 17929], [17801, 17802, 17931], [17801, 17931, 17930], [17802, 17803, 17931], [17803, 17932, 17931], [17803, 17804, 17933], [17803, 17933, 17932], [17804, 17805, 17933], [17805, 17934, 17933], [17805, 17806, 17935], [17805, 17935, 17934], [17806, 17807, 17935], [17807, 17936, 17935], [17807, 17808, 17937], [17807, 17937, 17936], [17808, 17809, 17937], [17809, 17938, 17937], [17809, 17810, 17939], [17809, 17939, 17938], [17810, 17811, 17939], [17811, 17940, 17939], [17811, 17812, 17941], [17811, 17941, 17940], [17812, 17813, 17941], [17813, 17942, 17941], [17813, 17814, 17943], [17813, 17943, 17942], [17814, 17815, 17943], [17815, 17944, 17943], [17815, 17816, 17945], [17815, 17945, 17944], [17816, 17817, 17945], [17817, 17946, 17945], [17817, 17818, 17947], [17817, 17947, 17946], [17818, 17819, 17947], [17819, 17948, 17947], [17819, 17820, 17949], [17819, 17949, 17948], [17820, 17821, 17949], [17821, 17950, 17949], [17821, 17822, 17951], [17821, 17951, 17950], [17822, 17823, 17951], [17823, 17952, 17951], [17823, 17824, 17953], [17823, 17953, 17952], [17824, 17825, 17953], [17825, 17954, 17953], [17825, 17826, 17955], [17825, 17955, 17954], [17826, 17827, 17955], [17827, 17956, 17955], [17827, 17828, 17957], [17827, 17957, 17956], [17828, 17829, 17957], [17829, 17958, 17957], [17829, 17830, 17959], [17829, 17959, 17958], [17830, 17831, 17959], [17831, 17960, 17959], [17831, 17832, 17961], [17831, 17961, 17960], [17832, 17833, 17961], [17833, 17962, 17961], [17833, 17834, 17963], [17833, 17963, 17962], [17834, 17835, 17963], [17835, 17964, 17963], [17835, 17836, 17965], [17835, 17965, 17964], [17836, 17837, 17965], [17837, 17966, 17965], [17837, 17838, 17967], [17837, 17967, 17966], [17838, 17839, 17967], [17839, 17968, 17967], [17839, 17840, 17969], [17839, 17969, 17968], [17840, 17841, 17969], [17841, 17970, 17969], [17841, 17842, 17971], [17841, 17971, 17970], [17842, 17843, 17971], [17843, 17972, 17971], [17843, 17844, 17973], [17843, 17973, 17972], [17844, 17845, 17973], [17845, 17974, 17973], [17845, 17846, 17975], [17845, 17975, 17974], [17846, 17847, 17975], [17847, 17976, 17975], [17848, 17849, 17977], [17849, 17978, 17977], [17849, 17850, 17979], [17849, 17979, 17978], [17850, 17851, 17979], [17851, 17980, 17979], [17851, 17852, 17981], [17851, 17981, 17980], [17852, 17853, 17981], [17853, 17982, 17981], [17853, 17854, 17983], [17853, 17983, 17982], [17854, 17855, 17983], [17855, 17984, 17983], [17855, 17856, 17985], [17855, 17985, 17984], [17856, 17857, 17985], [17857, 17986, 17985], [17857, 17858, 17987], [17857, 17987, 17986], [17858, 17859, 17987], [17859, 17988, 17987], [17859, 17860, 17989], [17859, 17989, 17988], [17860, 17861, 17989], [17861, 17990, 17989], [17861, 17862, 17991], [17861, 17991, 17990], [17862, 17863, 17991], [17863, 17992, 17991], [17863, 17864, 17993], [17863, 17993, 17992], [17864, 17865, 17993], [17865, 17994, 17993], [17865, 17866, 17995], [17865, 17995, 17994], [17866, 17867, 17995], [17867, 17996, 17995], [17867, 17868, 17997], [17867, 17997, 17996], [17868, 17869, 17997], [17869, 17998, 17997], [17869, 17870, 17999], [17869, 17999, 17998], [17870, 17871, 17999], [17871, 18000, 17999], [17871, 17872, 18001], [17871, 18001, 18000], [17872, 17873, 18001], [17873, 18002, 18001], [17873, 17874, 18003], [17873, 18003, 18002], [17874, 17875, 18003], [17875, 18004, 18003], [17875, 17876, 18005], [17875, 18005, 18004], [17876, 17877, 18005], [17877, 18006, 18005], [17877, 17878, 18007], [17877, 18007, 18006], [17878, 17879, 18007], [17879, 18008, 18007], [17879, 17880, 18009], [17879, 18009, 18008], [17880, 17881, 18009], [17881, 18010, 18009], [17881, 17882, 18011], [17881, 18011, 18010], [17882, 17883, 18011], [17883, 18012, 18011], [17883, 17884, 18013], [17883, 18013, 18012], [17884, 17885, 18013], [17885, 18014, 18013], [17885, 17886, 18015], [17885, 18015, 18014], [17886, 17887, 18015], [17887, 18016, 18015], [17887, 17888, 18017], [17887, 18017, 18016], [17888, 17889, 18017], [17889, 18018, 18017], [17889, 17890, 18019], [17889, 18019, 18018], [17890, 17891, 18019], [17891, 18020, 18019], [17891, 17892, 18021], [17891, 18021, 18020], [17892, 17893, 18021], [17893, 18022, 18021], [17893, 17894, 18023], [17893, 18023, 18022], [17894, 17895, 18023], [17895, 18024, 18023], [17895, 17896, 18025], [17895, 18025, 18024], [17896, 17897, 18025], [17897, 18026, 18025], [17897, 17898, 18027], [17897, 18027, 18026], [17898, 17899, 18027], [17899, 18028, 18027], [17899, 17900, 18029], [17899, 18029, 18028], [17900, 17901, 18029], [17901, 18030, 18029], [17901, 17902, 18031], [17901, 18031, 18030], [17902, 17903, 18031], [17903, 18032, 18031], [17903, 17904, 18033], [17903, 18033, 18032], [17904, 17905, 18033], [17905, 18034, 18033], [17905, 17906, 18035], [17905, 18035, 18034], [17906, 17907, 18035], [17907, 18036, 18035], [17907, 17908, 18037], [17907, 18037, 18036], [17908, 17909, 18037], [17909, 18038, 18037], [17909, 17910, 18039], [17909, 18039, 18038], [17910, 17911, 18039], [17911, 18040, 18039], [17911, 17912, 18041], [17911, 18041, 18040], [17912, 17913, 18041], [17913, 18042, 18041], [17913, 17914, 18043], [17913, 18043, 18042], [17914, 17915, 18043], [17915, 18044, 18043], [17915, 17916, 18045], [17915, 18045, 18044], [17916, 17917, 18045], [17917, 18046, 18045], [17917, 17918, 18047], [17917, 18047, 18046], [17918, 17919, 18047], [17919, 18048, 18047], [17919, 17920, 18049], [17919, 18049, 18048], [17920, 17921, 18049], [17921, 18050, 18049], [17921, 17922, 18051], [17921, 18051, 18050], [17922, 17923, 18051], [17923, 18052, 18051], [17923, 17924, 18053], [17923, 18053, 18052], [17924, 17925, 18053], [17925, 18054, 18053], [17925, 17926, 18055], [17925, 18055, 18054], [17926, 17927, 18055], [17927, 18056, 18055], [17927, 17928, 18057], [17927, 18057, 18056], [17928, 17929, 18057], [17929, 18058, 18057], [17929, 17930, 18059], [17929, 18059, 18058], [17930, 17931, 18059], [17931, 18060, 18059], [17931, 17932, 18061], [17931, 18061, 18060], [17932, 17933, 18061], [17933, 18062, 18061], [17933, 17934, 18063], [17933, 18063, 18062], [17934, 17935, 18063], [17935, 18064, 18063], [17935, 17936, 18065], [17935, 18065, 18064], [17936, 17937, 18065], [17937, 18066, 18065], [17937, 17938, 18067], [17937, 18067, 18066], [17938, 17939, 18067], [17939, 18068, 18067], [17939, 17940, 18069], [17939, 18069, 18068], [17940, 17941, 18069], [17941, 18070, 18069], [17941, 17942, 18071], [17941, 18071, 18070], [17942, 17943, 18071], [17943, 18072, 18071], [17943, 17944, 18073], [17943, 18073, 18072], [17944, 17945, 18073], [17945, 18074, 18073], [17945, 17946, 18075], [17945, 18075, 18074], [17946, 17947, 18075], [17947, 18076, 18075], [17947, 17948, 18077], [17947, 18077, 18076], [17948, 17949, 18077], [17949, 18078, 18077], [17949, 17950, 18079], [17949, 18079, 18078], [17950, 17951, 18079], [17951, 18080, 18079], [17951, 17952, 18081], [17951, 18081, 18080], [17952, 17953, 18081], [17953, 18082, 18081], [17953, 17954, 18083], [17953, 18083, 18082], [17954, 17955, 18083], [17955, 18084, 18083], [17955, 17956, 18085], [17955, 18085, 18084], [17956, 17957, 18085], [17957, 18086, 18085], [17957, 17958, 18087], [17957, 18087, 18086], [17958, 17959, 18087], [17959, 18088, 18087], [17959, 17960, 18089], [17959, 18089, 18088], [17960, 17961, 18089], [17961, 18090, 18089], [17961, 17962, 18091], [17961, 18091, 18090], [17962, 17963, 18091], [17963, 18092, 18091], [17963, 17964, 18093], [17963, 18093, 18092], [17964, 17965, 18093], [17965, 18094, 18093], [17965, 17966, 18095], [17965, 18095, 18094], [17966, 17967, 18095], [17967, 18096, 18095], [17967, 17968, 18097], [17967, 18097, 18096], [17968, 17969, 18097], [17969, 18098, 18097], [17969, 17970, 18099], [17969, 18099, 18098], [17970, 17971, 18099], [17971, 18100, 18099], [17971, 17972, 18101], [17971, 18101, 18100], [17972, 17973, 18101], [17973, 18102, 18101], [17973, 17974, 18103], [17973, 18103, 18102], [17974, 17975, 18103], [17975, 18104, 18103], [17975, 17976, 18105], [17975, 18105, 18104], [17977, 17978, 18107], [17977, 18107, 18106], [17978, 17979, 18107], [17979, 18108, 18107], [17979, 17980, 18109], [17979, 18109, 18108], [17980, 17981, 18109], [17981, 18110, 18109], [17981, 17982, 18111], [17981, 18111, 18110], [17982, 17983, 18111], [17983, 18112, 18111], [17983, 17984, 18113], [17983, 18113, 18112], [17984, 17985, 18113], [17985, 18114, 18113], [17985, 17986, 18115], [17985, 18115, 18114], [17986, 17987, 18115], [17987, 18116, 18115], [17987, 17988, 18117], [17987, 18117, 18116], [17988, 17989, 18117], [17989, 18118, 18117], [17989, 17990, 18119], [17989, 18119, 18118], [17990, 17991, 18119], [17991, 18120, 18119], [17991, 17992, 18121], [17991, 18121, 18120], [17992, 17993, 18121], [17993, 18122, 18121], [17993, 17994, 18123], [17993, 18123, 18122], [17994, 17995, 18123], [17995, 18124, 18123], [17995, 17996, 18125], [17995, 18125, 18124], [17996, 17997, 18125], [17997, 18126, 18125], [17997, 17998, 18127], [17997, 18127, 18126], [17998, 17999, 18127], [17999, 18128, 18127], [17999, 18000, 18129], [17999, 18129, 18128], [18000, 18001, 18129], [18001, 18130, 18129], [18001, 18002, 18131], [18001, 18131, 18130], [18002, 18003, 18131], [18003, 18132, 18131], [18003, 18004, 18133], [18003, 18133, 18132], [18004, 18005, 18133], [18005, 18134, 18133], [18005, 18006, 18135], [18005, 18135, 18134], [18006, 18007, 18135], [18007, 18136, 18135], [18007, 18008, 18137], [18007, 18137, 18136], [18008, 18009, 18137], [18009, 18138, 18137], [18009, 18010, 18139], [18009, 18139, 18138], [18010, 18011, 18139], [18011, 18140, 18139], [18011, 18012, 18141], [18011, 18141, 18140], [18012, 18013, 18141], [18013, 18142, 18141], [18013, 18014, 18143], [18013, 18143, 18142], [18014, 18015, 18143], [18015, 18144, 18143], [18015, 18016, 18145], [18015, 18145, 18144], [18016, 18017, 18145], [18017, 18146, 18145], [18017, 18018, 18147], [18017, 18147, 18146], [18018, 18019, 18147], [18019, 18148, 18147], [18019, 18020, 18149], [18019, 18149, 18148], [18020, 18021, 18149], [18021, 18150, 18149], [18021, 18022, 18151], [18021, 18151, 18150], [18022, 18023, 18151], [18023, 18152, 18151], [18023, 18024, 18153], [18023, 18153, 18152], [18024, 18025, 18153], [18025, 18154, 18153], [18025, 18026, 18155], [18025, 18155, 18154], [18026, 18027, 18155], [18027, 18156, 18155], [18027, 18028, 18157], [18027, 18157, 18156], [18028, 18029, 18157], [18029, 18158, 18157], [18029, 18030, 18159], [18029, 18159, 18158], [18030, 18031, 18159], [18031, 18160, 18159], [18031, 18032, 18161], [18031, 18161, 18160], [18032, 18033, 18161], [18033, 18162, 18161], [18033, 18034, 18163], [18033, 18163, 18162], [18034, 18035, 18163], [18035, 18164, 18163], [18035, 18036, 18165], [18035, 18165, 18164], [18036, 18037, 18165], [18037, 18166, 18165], [18037, 18038, 18167], [18037, 18167, 18166], [18038, 18039, 18167], [18039, 18168, 18167], [18039, 18040, 18169], [18039, 18169, 18168], [18040, 18041, 18169], [18041, 18170, 18169], [18041, 18042, 18171], [18041, 18171, 18170], [18042, 18043, 18171], [18043, 18172, 18171], [18043, 18044, 18173], [18043, 18173, 18172], [18044, 18045, 18173], [18045, 18174, 18173], [18045, 18046, 18175], [18045, 18175, 18174], [18046, 18047, 18175], [18047, 18176, 18175], [18047, 18048, 18177], [18047, 18177, 18176], [18048, 18049, 18177], [18049, 18178, 18177], [18049, 18050, 18179], [18049, 18179, 18178], [18050, 18051, 18179], [18051, 18180, 18179], [18051, 18052, 18181], [18051, 18181, 18180], [18052, 18053, 18181], [18053, 18182, 18181], [18053, 18054, 18183], [18053, 18183, 18182], [18054, 18055, 18183], [18055, 18184, 18183], [18055, 18056, 18185], [18055, 18185, 18184], [18056, 18057, 18185], [18057, 18186, 18185], [18057, 18058, 18187], [18057, 18187, 18186], [18058, 18059, 18187], [18059, 18188, 18187], [18059, 18060, 18189], [18059, 18189, 18188], [18060, 18061, 18189], [18061, 18190, 18189], [18061, 18062, 18191], [18061, 18191, 18190], [18062, 18063, 18191], [18063, 18192, 18191], [18063, 18064, 18193], [18063, 18193, 18192], [18064, 18065, 18193], [18065, 18194, 18193], [18065, 18066, 18195], [18065, 18195, 18194], [18066, 18067, 18195], [18067, 18196, 18195], [18067, 18068, 18197], [18067, 18197, 18196], [18068, 18069, 18197], [18069, 18198, 18197], [18069, 18070, 18199], [18069, 18199, 18198], [18070, 18071, 18199], [18071, 18200, 18199], [18071, 18072, 18201], [18071, 18201, 18200], [18072, 18073, 18201], [18073, 18202, 18201], [18073, 18074, 18203], [18073, 18203, 18202], [18074, 18075, 18203], [18075, 18204, 18203], [18075, 18076, 18205], [18075, 18205, 18204], [18076, 18077, 18205], [18077, 18206, 18205], [18077, 18078, 18207], [18077, 18207, 18206], [18078, 18079, 18207], [18079, 18208, 18207], [18079, 18080, 18209], [18079, 18209, 18208], [18080, 18081, 18209], [18081, 18210, 18209], [18081, 18082, 18211], [18081, 18211, 18210], [18082, 18083, 18211], [18083, 18212, 18211], [18083, 18084, 18213], [18083, 18213, 18212], [18084, 18085, 18213], [18085, 18214, 18213], [18085, 18086, 18215], [18085, 18215, 18214], [18086, 18087, 18215], [18087, 18216, 18215], [18087, 18088, 18217], [18087, 18217, 18216], [18088, 18089, 18217], [18089, 18218, 18217], [18089, 18090, 18219], [18089, 18219, 18218], [18090, 18091, 18219], [18091, 18220, 18219], [18091, 18092, 18221], [18091, 18221, 18220], [18092, 18093, 18221], [18093, 18222, 18221], [18093, 18094, 18223], [18093, 18223, 18222], [18094, 18095, 18223], [18095, 18224, 18223], [18095, 18096, 18225], [18095, 18225, 18224], [18096, 18097, 18225], [18097, 18226, 18225], [18097, 18098, 18227], [18097, 18227, 18226], [18098, 18099, 18227], [18099, 18228, 18227], [18099, 18100, 18229], [18099, 18229, 18228], [18100, 18101, 18229], [18101, 18230, 18229], [18101, 18102, 18231], [18101, 18231, 18230], [18102, 18103, 18231], [18103, 18232, 18231], [18103, 18104, 18233], [18103, 18233, 18232], [18104, 18105, 18233], [18105, 18234, 18233], [18106, 18107, 18235], [18107, 18236, 18235], [18107, 18108, 18237], [18107, 18237, 18236], [18108, 18109, 18237], [18109, 18238, 18237], [18109, 18110, 18239], [18109, 18239, 18238], [18110, 18111, 18239], [18111, 18240, 18239], [18111, 18112, 18241], [18111, 18241, 18240], [18112, 18113, 18241], [18113, 18242, 18241], [18113, 18114, 18243], [18113, 18243, 18242], [18114, 18115, 18243], [18115, 18244, 18243], [18115, 18116, 18245], [18115, 18245, 18244], [18116, 18117, 18245], [18117, 18246, 18245], [18117, 18118, 18247], [18117, 18247, 18246], [18118, 18119, 18247], [18119, 18248, 18247], [18119, 18120, 18249], [18119, 18249, 18248], [18120, 18121, 18249], [18121, 18250, 18249], [18121, 18122, 18251], [18121, 18251, 18250], [18122, 18123, 18251], [18123, 18252, 18251], [18123, 18124, 18253], [18123, 18253, 18252], [18124, 18125, 18253], [18125, 18254, 18253], [18125, 18126, 18255], [18125, 18255, 18254], [18126, 18127, 18255], [18127, 18256, 18255], [18127, 18128, 18257], [18127, 18257, 18256], [18128, 18129, 18257], [18129, 18258, 18257], [18129, 18130, 18259], [18129, 18259, 18258], [18130, 18131, 18259], [18131, 18260, 18259], [18131, 18132, 18261], [18131, 18261, 18260], [18132, 18133, 18261], [18133, 18262, 18261], [18133, 18134, 18263], [18133, 18263, 18262], [18134, 18135, 18263], [18135, 18264, 18263], [18135, 18136, 18265], [18135, 18265, 18264], [18136, 18137, 18265], [18137, 18266, 18265], [18137, 18138, 18267], [18137, 18267, 18266], [18138, 18139, 18267], [18139, 18268, 18267], [18139, 18140, 18269], [18139, 18269, 18268], [18140, 18141, 18269], [18141, 18270, 18269], [18141, 18142, 18271], [18141, 18271, 18270], [18142, 18143, 18271], [18143, 18272, 18271], [18143, 18144, 18273], [18143, 18273, 18272], [18144, 18145, 18273], [18145, 18274, 18273], [18145, 18146, 18275], [18145, 18275, 18274], [18146, 18147, 18275], [18147, 18276, 18275], [18147, 18148, 18277], [18147, 18277, 18276], [18148, 18149, 18277], [18149, 18278, 18277], [18149, 18150, 18279], [18149, 18279, 18278], [18150, 18151, 18279], [18151, 18280, 18279], [18151, 18152, 18281], [18151, 18281, 18280], [18152, 18153, 18281], [18153, 18282, 18281], [18153, 18154, 18283], [18153, 18283, 18282], [18154, 18155, 18283], [18155, 18284, 18283], [18155, 18156, 18285], [18155, 18285, 18284], [18156, 18157, 18285], [18157, 18286, 18285], [18157, 18158, 18287], [18157, 18287, 18286], [18158, 18159, 18287], [18159, 18288, 18287], [18159, 18160, 18289], [18159, 18289, 18288], [18160, 18161, 18289], [18161, 18290, 18289], [18161, 18162, 18291], [18161, 18291, 18290], [18162, 18163, 18291], [18163, 18292, 18291], [18163, 18164, 18293], [18163, 18293, 18292], [18164, 18165, 18293], [18165, 18294, 18293], [18165, 18166, 18295], [18165, 18295, 18294], [18166, 18167, 18295], [18167, 18296, 18295], [18167, 18168, 18297], [18167, 18297, 18296], [18168, 18169, 18297], [18169, 18298, 18297], [18169, 18170, 18299], [18169, 18299, 18298], [18170, 18171, 18299], [18171, 18300, 18299], [18171, 18172, 18301], [18171, 18301, 18300], [18172, 18173, 18301], [18173, 18302, 18301], [18173, 18174, 18303], [18173, 18303, 18302], [18174, 18175, 18303], [18175, 18304, 18303], [18175, 18176, 18305], [18175, 18305, 18304], [18176, 18177, 18305], [18177, 18306, 18305], [18177, 18178, 18307], [18177, 18307, 18306], [18178, 18179, 18307], [18179, 18308, 18307], [18179, 18180, 18309], [18179, 18309, 18308], [18180, 18181, 18309], [18181, 18310, 18309], [18181, 18182, 18311], [18181, 18311, 18310], [18182, 18183, 18311], [18183, 18312, 18311], [18183, 18184, 18313], [18183, 18313, 18312], [18184, 18185, 18313], [18185, 18314, 18313], [18185, 18186, 18315], [18185, 18315, 18314], [18186, 18187, 18315], [18187, 18316, 18315], [18187, 18188, 18317], [18187, 18317, 18316], [18188, 18189, 18317], [18189, 18318, 18317], [18189, 18190, 18319], [18189, 18319, 18318], [18190, 18191, 18319], [18191, 18320, 18319], [18191, 18192, 18321], [18191, 18321, 18320], [18192, 18193, 18321], [18193, 18322, 18321], [18193, 18194, 18323], [18193, 18323, 18322], [18194, 18195, 18323], [18195, 18324, 18323], [18195, 18196, 18325], [18195, 18325, 18324], [18196, 18197, 18325], [18197, 18326, 18325], [18197, 18198, 18327], [18197, 18327, 18326], [18198, 18199, 18327], [18199, 18328, 18327], [18199, 18200, 18329], [18199, 18329, 18328], [18200, 18201, 18329], [18201, 18330, 18329], [18201, 18202, 18331], [18201, 18331, 18330], [18202, 18203, 18331], [18203, 18332, 18331], [18203, 18204, 18333], [18203, 18333, 18332], [18204, 18205, 18333], [18205, 18334, 18333], [18205, 18206, 18335], [18205, 18335, 18334], [18206, 18207, 18335], [18207, 18336, 18335], [18207, 18208, 18337], [18207, 18337, 18336], [18208, 18209, 18337], [18209, 18338, 18337], [18209, 18210, 18339], [18209, 18339, 18338], [18210, 18211, 18339], [18211, 18340, 18339], [18211, 18212, 18341], [18211, 18341, 18340], [18212, 18213, 18341], [18213, 18342, 18341], [18213, 18214, 18343], [18213, 18343, 18342], [18214, 18215, 18343], [18215, 18344, 18343], [18215, 18216, 18345], [18215, 18345, 18344], [18216, 18217, 18345], [18217, 18346, 18345], [18217, 18218, 18347], [18217, 18347, 18346], [18218, 18219, 18347], [18219, 18348, 18347], [18219, 18220, 18349], [18219, 18349, 18348], [18220, 18221, 18349], [18221, 18350, 18349], [18221, 18222, 18351], [18221, 18351, 18350], [18222, 18223, 18351], [18223, 18352, 18351], [18223, 18224, 18353], [18223, 18353, 18352], [18224, 18225, 18353], [18225, 18354, 18353], [18225, 18226, 18355], [18225, 18355, 18354], [18226, 18227, 18355], [18227, 18356, 18355], [18227, 18228, 18357], [18227, 18357, 18356], [18228, 18229, 18357], [18229, 18358, 18357], [18229, 18230, 18359], [18229, 18359, 18358], [18230, 18231, 18359], [18231, 18360, 18359], [18231, 18232, 18361], [18231, 18361, 18360], [18232, 18233, 18361], [18233, 18362, 18361], [18233, 18234, 18363], [18233, 18363, 18362], [18235, 18236, 18365], [18235, 18365, 18364], [18236, 18237, 18365], [18237, 18366, 18365], [18237, 18238, 18367], [18237, 18367, 18366], [18238, 18239, 18367], [18239, 18368, 18367], [18239, 18240, 18369], [18239, 18369, 18368], [18240, 18241, 18369], [18241, 18370, 18369], [18241, 18242, 18371], [18241, 18371, 18370], [18242, 18243, 18371], [18243, 18372, 18371], [18243, 18244, 18373], [18243, 18373, 18372], [18244, 18245, 18373], [18245, 18374, 18373], [18245, 18246, 18375], [18245, 18375, 18374], [18246, 18247, 18375], [18247, 18376, 18375], [18247, 18248, 18377], [18247, 18377, 18376], [18248, 18249, 18377], [18249, 18378, 18377], [18249, 18250, 18379], [18249, 18379, 18378], [18250, 18251, 18379], [18251, 18380, 18379], [18251, 18252, 18381], [18251, 18381, 18380], [18252, 18253, 18381], [18253, 18382, 18381], [18253, 18254, 18383], [18253, 18383, 18382], [18254, 18255, 18383], [18255, 18384, 18383], [18255, 18256, 18385], [18255, 18385, 18384], [18256, 18257, 18385], [18257, 18386, 18385], [18257, 18258, 18387], [18257, 18387, 18386], [18258, 18259, 18387], [18259, 18388, 18387], [18259, 18260, 18389], [18259, 18389, 18388], [18260, 18261, 18389], [18261, 18390, 18389], [18261, 18262, 18391], [18261, 18391, 18390], [18262, 18263, 18391], [18263, 18392, 18391], [18263, 18264, 18393], [18263, 18393, 18392], [18264, 18265, 18393], [18265, 18394, 18393], [18265, 18266, 18395], [18265, 18395, 18394], [18266, 18267, 18395], [18267, 18396, 18395], [18267, 18268, 18397], [18267, 18397, 18396], [18268, 18269, 18397], [18269, 18398, 18397], [18269, 18270, 18399], [18269, 18399, 18398], [18270, 18271, 18399], [18271, 18400, 18399], [18271, 18272, 18401], [18271, 18401, 18400], [18272, 18273, 18401], [18273, 18402, 18401], [18273, 18274, 18403], [18273, 18403, 18402], [18274, 18275, 18403], [18275, 18404, 18403], [18275, 18276, 18405], [18275, 18405, 18404], [18276, 18277, 18405], [18277, 18406, 18405], [18277, 18278, 18407], [18277, 18407, 18406], [18278, 18279, 18407], [18279, 18408, 18407], [18279, 18280, 18409], [18279, 18409, 18408], [18280, 18281, 18409], [18281, 18410, 18409], [18281, 18282, 18411], [18281, 18411, 18410], [18282, 18283, 18411], [18283, 18412, 18411], [18283, 18284, 18413], [18283, 18413, 18412], [18284, 18285, 18413], [18285, 18414, 18413], [18285, 18286, 18415], [18285, 18415, 18414], [18286, 18287, 18415], [18287, 18416, 18415], [18287, 18288, 18417], [18287, 18417, 18416], [18288, 18289, 18417], [18289, 18418, 18417], [18289, 18290, 18419], [18289, 18419, 18418], [18290, 18291, 18419], [18291, 18420, 18419], [18291, 18292, 18421], [18291, 18421, 18420], [18292, 18293, 18421], [18293, 18422, 18421], [18293, 18294, 18423], [18293, 18423, 18422], [18294, 18295, 18423], [18295, 18424, 18423], [18295, 18296, 18425], [18295, 18425, 18424], [18296, 18297, 18425], [18297, 18426, 18425], [18297, 18298, 18427], [18297, 18427, 18426], [18298, 18299, 18427], [18299, 18428, 18427], [18299, 18300, 18429], [18299, 18429, 18428], [18300, 18301, 18429], [18301, 18430, 18429], [18301, 18302, 18431], [18301, 18431, 18430], [18302, 18303, 18431], [18303, 18432, 18431], [18303, 18304, 18433], [18303, 18433, 18432], [18304, 18305, 18433], [18305, 18434, 18433], [18305, 18306, 18435], [18305, 18435, 18434], [18306, 18307, 18435], [18307, 18436, 18435], [18307, 18308, 18437], [18307, 18437, 18436], [18308, 18309, 18437], [18309, 18438, 18437], [18309, 18310, 18439], [18309, 18439, 18438], [18310, 18311, 18439], [18311, 18440, 18439], [18311, 18312, 18441], [18311, 18441, 18440], [18312, 18313, 18441], [18313, 18442, 18441], [18313, 18314, 18443], [18313, 18443, 18442], [18314, 18315, 18443], [18315, 18444, 18443], [18315, 18316, 18445], [18315, 18445, 18444], [18316, 18317, 18445], [18317, 18446, 18445], [18317, 18318, 18447], [18317, 18447, 18446], [18318, 18319, 18447], [18319, 18448, 18447], [18319, 18320, 18449], [18319, 18449, 18448], [18320, 18321, 18449], [18321, 18450, 18449], [18321, 18322, 18451], [18321, 18451, 18450], [18322, 18323, 18451], [18323, 18452, 18451], [18323, 18324, 18453], [18323, 18453, 18452], [18324, 18325, 18453], [18325, 18454, 18453], [18325, 18326, 18455], [18325, 18455, 18454], [18326, 18327, 18455], [18327, 18456, 18455], [18327, 18328, 18457], [18327, 18457, 18456], [18328, 18329, 18457], [18329, 18458, 18457], [18329, 18330, 18459], [18329, 18459, 18458], [18330, 18331, 18459], [18331, 18460, 18459], [18331, 18332, 18461], [18331, 18461, 18460], [18332, 18333, 18461], [18333, 18462, 18461], [18333, 18334, 18463], [18333, 18463, 18462], [18334, 18335, 18463], [18335, 18464, 18463], [18335, 18336, 18465], [18335, 18465, 18464], [18336, 18337, 18465], [18337, 18466, 18465], [18337, 18338, 18467], [18337, 18467, 18466], [18338, 18339, 18467], [18339, 18468, 18467], [18339, 18340, 18469], [18339, 18469, 18468], [18340, 18341, 18469], [18341, 18470, 18469], [18341, 18342, 18471], [18341, 18471, 18470], [18342, 18343, 18471], [18343, 18472, 18471], [18343, 18344, 18473], [18343, 18473, 18472], [18344, 18345, 18473], [18345, 18474, 18473], [18345, 18346, 18475], [18345, 18475, 18474], [18346, 18347, 18475], [18347, 18476, 18475], [18347, 18348, 18477], [18347, 18477, 18476], [18348, 18349, 18477], [18349, 18478, 18477], [18349, 18350, 18479], [18349, 18479, 18478], [18350, 18351, 18479], [18351, 18480, 18479], [18351, 18352, 18481], [18351, 18481, 18480], [18352, 18353, 18481], [18353, 18482, 18481], [18353, 18354, 18483], [18353, 18483, 18482], [18354, 18355, 18483], [18355, 18484, 18483], [18355, 18356, 18485], [18355, 18485, 18484], [18356, 18357, 18485], [18357, 18486, 18485], [18357, 18358, 18487], [18357, 18487, 18486], [18358, 18359, 18487], [18359, 18488, 18487], [18359, 18360, 18489], [18359, 18489, 18488], [18360, 18361, 18489], [18361, 18490, 18489], [18361, 18362, 18491], [18361, 18491, 18490], [18362, 18363, 18491], [18363, 18492, 18491], [18364, 18365, 18493], [18365, 18494, 18493], [18365, 18366, 18495], [18365, 18495, 18494], [18366, 18367, 18495], [18367, 18496, 18495], [18367, 18368, 18497], [18367, 18497, 18496], [18368, 18369, 18497], [18369, 18498, 18497], [18369, 18370, 18499], [18369, 18499, 18498], [18370, 18371, 18499], [18371, 18500, 18499], [18371, 18372, 18501], [18371, 18501, 18500], [18372, 18373, 18501], [18373, 18502, 18501], [18373, 18374, 18503], [18373, 18503, 18502], [18374, 18375, 18503], [18375, 18504, 18503], [18375, 18376, 18505], [18375, 18505, 18504], [18376, 18377, 18505], [18377, 18506, 18505], [18377, 18378, 18507], [18377, 18507, 18506], [18378, 18379, 18507], [18379, 18508, 18507], [18379, 18380, 18509], [18379, 18509, 18508], [18380, 18381, 18509], [18381, 18510, 18509], [18381, 18382, 18511], [18381, 18511, 18510], [18382, 18383, 18511], [18383, 18512, 18511], [18383, 18384, 18513], [18383, 18513, 18512], [18384, 18385, 18513], [18385, 18514, 18513], [18385, 18386, 18515], [18385, 18515, 18514], [18386, 18387, 18515], [18387, 18516, 18515], [18387, 18388, 18517], [18387, 18517, 18516], [18388, 18389, 18517], [18389, 18518, 18517], [18389, 18390, 18519], [18389, 18519, 18518], [18390, 18391, 18519], [18391, 18520, 18519], [18391, 18392, 18521], [18391, 18521, 18520], [18392, 18393, 18521], [18393, 18522, 18521], [18393, 18394, 18523], [18393, 18523, 18522], [18394, 18395, 18523], [18395, 18524, 18523], [18395, 18396, 18525], [18395, 18525, 18524], [18396, 18397, 18525], [18397, 18526, 18525], [18397, 18398, 18527], [18397, 18527, 18526], [18398, 18399, 18527], [18399, 18528, 18527], [18399, 18400, 18529], [18399, 18529, 18528], [18400, 18401, 18529], [18401, 18530, 18529], [18401, 18402, 18531], [18401, 18531, 18530], [18402, 18403, 18531], [18403, 18532, 18531], [18403, 18404, 18533], [18403, 18533, 18532], [18404, 18405, 18533], [18405, 18534, 18533], [18405, 18406, 18535], [18405, 18535, 18534], [18406, 18407, 18535], [18407, 18536, 18535], [18407, 18408, 18537], [18407, 18537, 18536], [18408, 18409, 18537], [18409, 18538, 18537], [18409, 18410, 18539], [18409, 18539, 18538], [18410, 18411, 18539], [18411, 18540, 18539], [18411, 18412, 18541], [18411, 18541, 18540], [18412, 18413, 18541], [18413, 18542, 18541], [18413, 18414, 18543], [18413, 18543, 18542], [18414, 18415, 18543], [18415, 18544, 18543], [18415, 18416, 18545], [18415, 18545, 18544], [18416, 18417, 18545], [18417, 18546, 18545], [18417, 18418, 18547], [18417, 18547, 18546], [18418, 18419, 18547], [18419, 18548, 18547], [18419, 18420, 18549], [18419, 18549, 18548], [18420, 18421, 18549], [18421, 18550, 18549], [18421, 18422, 18551], [18421, 18551, 18550], [18422, 18423, 18551], [18423, 18552, 18551], [18423, 18424, 18553], [18423, 18553, 18552], [18424, 18425, 18553], [18425, 18554, 18553], [18425, 18426, 18555], [18425, 18555, 18554], [18426, 18427, 18555], [18427, 18556, 18555], [18427, 18428, 18557], [18427, 18557, 18556], [18428, 18429, 18557], [18429, 18558, 18557], [18429, 18430, 18559], [18429, 18559, 18558], [18430, 18431, 18559], [18431, 18560, 18559], [18431, 18432, 18561], [18431, 18561, 18560], [18432, 18433, 18561], [18433, 18562, 18561], [18433, 18434, 18563], [18433, 18563, 18562], [18434, 18435, 18563], [18435, 18564, 18563], [18435, 18436, 18565], [18435, 18565, 18564], [18436, 18437, 18565], [18437, 18566, 18565], [18437, 18438, 18567], [18437, 18567, 18566], [18438, 18439, 18567], [18439, 18568, 18567], [18439, 18440, 18569], [18439, 18569, 18568], [18440, 18441, 18569], [18441, 18570, 18569], [18441, 18442, 18571], [18441, 18571, 18570], [18442, 18443, 18571], [18443, 18572, 18571], [18443, 18444, 18573], [18443, 18573, 18572], [18444, 18445, 18573], [18445, 18574, 18573], [18445, 18446, 18575], [18445, 18575, 18574], [18446, 18447, 18575], [18447, 18576, 18575], [18447, 18448, 18577], [18447, 18577, 18576], [18448, 18449, 18577], [18449, 18578, 18577], [18449, 18450, 18579], [18449, 18579, 18578], [18450, 18451, 18579], [18451, 18580, 18579], [18451, 18452, 18581], [18451, 18581, 18580], [18452, 18453, 18581], [18453, 18582, 18581], [18453, 18454, 18583], [18453, 18583, 18582], [18454, 18455, 18583], [18455, 18584, 18583], [18455, 18456, 18585], [18455, 18585, 18584], [18456, 18457, 18585], [18457, 18586, 18585], [18457, 18458, 18587], [18457, 18587, 18586], [18458, 18459, 18587], [18459, 18588, 18587], [18459, 18460, 18589], [18459, 18589, 18588], [18460, 18461, 18589], [18461, 18590, 18589], [18461, 18462, 18591], [18461, 18591, 18590], [18462, 18463, 18591], [18463, 18592, 18591], [18463, 18464, 18593], [18463, 18593, 18592], [18464, 18465, 18593], [18465, 18594, 18593], [18465, 18466, 18595], [18465, 18595, 18594], [18466, 18467, 18595], [18467, 18596, 18595], [18467, 18468, 18597], [18467, 18597, 18596], [18468, 18469, 18597], [18469, 18598, 18597], [18469, 18470, 18599], [18469, 18599, 18598], [18470, 18471, 18599], [18471, 18600, 18599], [18471, 18472, 18601], [18471, 18601, 18600], [18472, 18473, 18601], [18473, 18602, 18601], [18473, 18474, 18603], [18473, 18603, 18602], [18474, 18475, 18603], [18475, 18604, 18603], [18475, 18476, 18605], [18475, 18605, 18604], [18476, 18477, 18605], [18477, 18606, 18605], [18477, 18478, 18607], [18477, 18607, 18606], [18478, 18479, 18607], [18479, 18608, 18607], [18479, 18480, 18609], [18479, 18609, 18608], [18480, 18481, 18609], [18481, 18610, 18609], [18481, 18482, 18611], [18481, 18611, 18610], [18482, 18483, 18611], [18483, 18612, 18611], [18483, 18484, 18613], [18483, 18613, 18612], [18484, 18485, 18613], [18485, 18614, 18613], [18485, 18486, 18615], [18485, 18615, 18614], [18486, 18487, 18615], [18487, 18616, 18615], [18487, 18488, 18617], [18487, 18617, 18616], [18488, 18489, 18617], [18489, 18618, 18617], [18489, 18490, 18619], [18489, 18619, 18618], [18490, 18491, 18619], [18491, 18620, 18619], [18491, 18492, 18621], [18491, 18621, 18620], [18493, 18494, 18623], [18493, 18623, 18622], [18494, 18495, 18623], [18495, 18624, 18623], [18495, 18496, 18625], [18495, 18625, 18624], [18496, 18497, 18625], [18497, 18626, 18625], [18497, 18498, 18627], [18497, 18627, 18626], [18498, 18499, 18627], [18499, 18628, 18627], [18499, 18500, 18629], [18499, 18629, 18628], [18500, 18501, 18629], [18501, 18630, 18629], [18501, 18502, 18631], [18501, 18631, 18630], [18502, 18503, 18631], [18503, 18632, 18631], [18503, 18504, 18633], [18503, 18633, 18632], [18504, 18505, 18633], [18505, 18634, 18633], [18505, 18506, 18635], [18505, 18635, 18634], [18506, 18507, 18635], [18507, 18636, 18635], [18507, 18508, 18637], [18507, 18637, 18636], [18508, 18509, 18637], [18509, 18638, 18637], [18509, 18510, 18639], [18509, 18639, 18638], [18510, 18511, 18639], [18511, 18640, 18639], [18511, 18512, 18641], [18511, 18641, 18640], [18512, 18513, 18641], [18513, 18642, 18641], [18513, 18514, 18643], [18513, 18643, 18642], [18514, 18515, 18643], [18515, 18644, 18643], [18515, 18516, 18645], [18515, 18645, 18644], [18516, 18517, 18645], [18517, 18646, 18645], [18517, 18518, 18647], [18517, 18647, 18646], [18518, 18519, 18647], [18519, 18648, 18647], [18519, 18520, 18649], [18519, 18649, 18648], [18520, 18521, 18649], [18521, 18650, 18649], [18521, 18522, 18651], [18521, 18651, 18650], [18522, 18523, 18651], [18523, 18652, 18651], [18523, 18524, 18653], [18523, 18653, 18652], [18524, 18525, 18653], [18525, 18654, 18653], [18525, 18526, 18655], [18525, 18655, 18654], [18526, 18527, 18655], [18527, 18656, 18655], [18527, 18528, 18657], [18527, 18657, 18656], [18528, 18529, 18657], [18529, 18658, 18657], [18529, 18530, 18659], [18529, 18659, 18658], [18530, 18531, 18659], [18531, 18660, 18659], [18531, 18532, 18661], [18531, 18661, 18660], [18532, 18533, 18661], [18533, 18662, 18661], [18533, 18534, 18663], [18533, 18663, 18662], [18534, 18535, 18663], [18535, 18664, 18663], [18535, 18536, 18665], [18535, 18665, 18664], [18536, 18537, 18665], [18537, 18666, 18665], [18537, 18538, 18667], [18537, 18667, 18666], [18538, 18539, 18667], [18539, 18668, 18667], [18539, 18540, 18669], [18539, 18669, 18668], [18540, 18541, 18669], [18541, 18670, 18669], [18541, 18542, 18671], [18541, 18671, 18670], [18542, 18543, 18671], [18543, 18672, 18671], [18543, 18544, 18673], [18543, 18673, 18672], [18544, 18545, 18673], [18545, 18674, 18673], [18545, 18546, 18675], [18545, 18675, 18674], [18546, 18547, 18675], [18547, 18676, 18675], [18547, 18548, 18677], [18547, 18677, 18676], [18548, 18549, 18677], [18549, 18678, 18677], [18549, 18550, 18679], [18549, 18679, 18678], [18550, 18551, 18679], [18551, 18680, 18679], [18551, 18552, 18681], [18551, 18681, 18680], [18552, 18553, 18681], [18553, 18682, 18681], [18553, 18554, 18683], [18553, 18683, 18682], [18554, 18555, 18683], [18555, 18684, 18683], [18555, 18556, 18685], [18555, 18685, 18684], [18556, 18557, 18685], [18557, 18686, 18685], [18557, 18558, 18687], [18557, 18687, 18686], [18558, 18559, 18687], [18559, 18688, 18687], [18559, 18560, 18689], [18559, 18689, 18688], [18560, 18561, 18689], [18561, 18690, 18689], [18561, 18562, 18691], [18561, 18691, 18690], [18562, 18563, 18691], [18563, 18692, 18691], [18563, 18564, 18693], [18563, 18693, 18692], [18564, 18565, 18693], [18565, 18694, 18693], [18565, 18566, 18695], [18565, 18695, 18694], [18566, 18567, 18695], [18567, 18696, 18695], [18567, 18568, 18697], [18567, 18697, 18696], [18568, 18569, 18697], [18569, 18698, 18697], [18569, 18570, 18699], [18569, 18699, 18698], [18570, 18571, 18699], [18571, 18700, 18699], [18571, 18572, 18701], [18571, 18701, 18700], [18572, 18573, 18701], [18573, 18702, 18701], [18573, 18574, 18703], [18573, 18703, 18702], [18574, 18575, 18703], [18575, 18704, 18703], [18575, 18576, 18705], [18575, 18705, 18704], [18576, 18577, 18705], [18577, 18706, 18705], [18577, 18578, 18707], [18577, 18707, 18706], [18578, 18579, 18707], [18579, 18708, 18707], [18579, 18580, 18709], [18579, 18709, 18708], [18580, 18581, 18709], [18581, 18710, 18709], [18581, 18582, 18711], [18581, 18711, 18710], [18582, 18583, 18711], [18583, 18712, 18711], [18583, 18584, 18713], [18583, 18713, 18712], [18584, 18585, 18713], [18585, 18714, 18713], [18585, 18586, 18715], [18585, 18715, 18714], [18586, 18587, 18715], [18587, 18716, 18715], [18587, 18588, 18717], [18587, 18717, 18716], [18588, 18589, 18717], [18589, 18718, 18717], [18589, 18590, 18719], [18589, 18719, 18718], [18590, 18591, 18719], [18591, 18720, 18719], [18591, 18592, 18721], [18591, 18721, 18720], [18592, 18593, 18721], [18593, 18722, 18721], [18593, 18594, 18723], [18593, 18723, 18722], [18594, 18595, 18723], [18595, 18724, 18723], [18595, 18596, 18725], [18595, 18725, 18724], [18596, 18597, 18725], [18597, 18726, 18725], [18597, 18598, 18727], [18597, 18727, 18726], [18598, 18599, 18727], [18599, 18728, 18727], [18599, 18600, 18729], [18599, 18729, 18728], [18600, 18601, 18729], [18601, 18730, 18729], [18601, 18602, 18731], [18601, 18731, 18730], [18602, 18603, 18731], [18603, 18732, 18731], [18603, 18604, 18733], [18603, 18733, 18732], [18604, 18605, 18733], [18605, 18734, 18733], [18605, 18606, 18735], [18605, 18735, 18734], [18606, 18607, 18735], [18607, 18736, 18735], [18607, 18608, 18737], [18607, 18737, 18736], [18608, 18609, 18737], [18609, 18738, 18737], [18609, 18610, 18739], [18609, 18739, 18738], [18610, 18611, 18739], [18611, 18740, 18739], [18611, 18612, 18741], [18611, 18741, 18740], [18612, 18613, 18741], [18613, 18742, 18741], [18613, 18614, 18743], [18613, 18743, 18742], [18614, 18615, 18743], [18615, 18744, 18743], [18615, 18616, 18745], [18615, 18745, 18744], [18616, 18617, 18745], [18617, 18746, 18745], [18617, 18618, 18747], [18617, 18747, 18746], [18618, 18619, 18747], [18619, 18748, 18747], [18619, 18620, 18749], [18619, 18749, 18748], [18620, 18621, 18749], [18621, 18750, 18749], [18622, 18623, 18751], [18623, 18752, 18751], [18623, 18624, 18753], [18623, 18753, 18752], [18624, 18625, 18753], [18625, 18754, 18753], [18625, 18626, 18755], [18625, 18755, 18754], [18626, 18627, 18755], [18627, 18756, 18755], [18627, 18628, 18757], [18627, 18757, 18756], [18628, 18629, 18757], [18629, 18758, 18757], [18629, 18630, 18759], [18629, 18759, 18758], [18630, 18631, 18759], [18631, 18760, 18759], [18631, 18632, 18761], [18631, 18761, 18760], [18632, 18633, 18761], [18633, 18762, 18761], [18633, 18634, 18763], [18633, 18763, 18762], [18634, 18635, 18763], [18635, 18764, 18763], [18635, 18636, 18765], [18635, 18765, 18764], [18636, 18637, 18765], [18637, 18766, 18765], [18637, 18638, 18767], [18637, 18767, 18766], [18638, 18639, 18767], [18639, 18768, 18767], [18639, 18640, 18769], [18639, 18769, 18768], [18640, 18641, 18769], [18641, 18770, 18769], [18641, 18642, 18771], [18641, 18771, 18770], [18642, 18643, 18771], [18643, 18772, 18771], [18643, 18644, 18773], [18643, 18773, 18772], [18644, 18645, 18773], [18645, 18774, 18773], [18645, 18646, 18775], [18645, 18775, 18774], [18646, 18647, 18775], [18647, 18776, 18775], [18647, 18648, 18777], [18647, 18777, 18776], [18648, 18649, 18777], [18649, 18778, 18777], [18649, 18650, 18779], [18649, 18779, 18778], [18650, 18651, 18779], [18651, 18780, 18779], [18651, 18652, 18781], [18651, 18781, 18780], [18652, 18653, 18781], [18653, 18782, 18781], [18653, 18654, 18783], [18653, 18783, 18782], [18654, 18655, 18783], [18655, 18784, 18783], [18655, 18656, 18785], [18655, 18785, 18784], [18656, 18657, 18785], [18657, 18786, 18785], [18657, 18658, 18787], [18657, 18787, 18786], [18658, 18659, 18787], [18659, 18788, 18787], [18659, 18660, 18789], [18659, 18789, 18788], [18660, 18661, 18789], [18661, 18790, 18789], [18661, 18662, 18791], [18661, 18791, 18790], [18662, 18663, 18791], [18663, 18792, 18791], [18663, 18664, 18793], [18663, 18793, 18792], [18664, 18665, 18793], [18665, 18794, 18793], [18665, 18666, 18795], [18665, 18795, 18794], [18666, 18667, 18795], [18667, 18796, 18795], [18667, 18668, 18797], [18667, 18797, 18796], [18668, 18669, 18797], [18669, 18798, 18797], [18669, 18670, 18799], [18669, 18799, 18798], [18670, 18671, 18799], [18671, 18800, 18799], [18671, 18672, 18801], [18671, 18801, 18800], [18672, 18673, 18801], [18673, 18802, 18801], [18673, 18674, 18803], [18673, 18803, 18802], [18674, 18675, 18803], [18675, 18804, 18803], [18675, 18676, 18805], [18675, 18805, 18804], [18676, 18677, 18805], [18677, 18806, 18805], [18677, 18678, 18807], [18677, 18807, 18806], [18678, 18679, 18807], [18679, 18808, 18807], [18679, 18680, 18809], [18679, 18809, 18808], [18680, 18681, 18809], [18681, 18810, 18809], [18681, 18682, 18811], [18681, 18811, 18810], [18682, 18683, 18811], [18683, 18812, 18811], [18683, 18684, 18813], [18683, 18813, 18812], [18684, 18685, 18813], [18685, 18814, 18813], [18685, 18686, 18815], [18685, 18815, 18814], [18686, 18687, 18815], [18687, 18816, 18815], [18687, 18688, 18817], [18687, 18817, 18816], [18688, 18689, 18817], [18689, 18818, 18817], [18689, 18690, 18819], [18689, 18819, 18818], [18690, 18691, 18819], [18691, 18820, 18819], [18691, 18692, 18821], [18691, 18821, 18820], [18692, 18693, 18821], [18693, 18822, 18821], [18693, 18694, 18823], [18693, 18823, 18822], [18694, 18695, 18823], [18695, 18824, 18823], [18695, 18696, 18825], [18695, 18825, 18824], [18696, 18697, 18825], [18697, 18826, 18825], [18697, 18698, 18827], [18697, 18827, 18826], [18698, 18699, 18827], [18699, 18828, 18827], [18699, 18700, 18829], [18699, 18829, 18828], [18700, 18701, 18829], [18701, 18830, 18829], [18701, 18702, 18831], [18701, 18831, 18830], [18702, 18703, 18831], [18703, 18832, 18831], [18703, 18704, 18833], [18703, 18833, 18832], [18704, 18705, 18833], [18705, 18834, 18833], [18705, 18706, 18835], [18705, 18835, 18834], [18706, 18707, 18835], [18707, 18836, 18835], [18707, 18708, 18837], [18707, 18837, 18836], [18708, 18709, 18837], [18709, 18838, 18837], [18709, 18710, 18839], [18709, 18839, 18838], [18710, 18711, 18839], [18711, 18840, 18839], [18711, 18712, 18841], [18711, 18841, 18840], [18712, 18713, 18841], [18713, 18842, 18841], [18713, 18714, 18843], [18713, 18843, 18842], [18714, 18715, 18843], [18715, 18844, 18843], [18715, 18716, 18845], [18715, 18845, 18844], [18716, 18717, 18845], [18717, 18846, 18845], [18717, 18718, 18847], [18717, 18847, 18846], [18718, 18719, 18847], [18719, 18848, 18847], [18719, 18720, 18849], [18719, 18849, 18848], [18720, 18721, 18849], [18721, 18850, 18849], [18721, 18722, 18851], [18721, 18851, 18850], [18722, 18723, 18851], [18723, 18852, 18851], [18723, 18724, 18853], [18723, 18853, 18852], [18724, 18725, 18853], [18725, 18854, 18853], [18725, 18726, 18855], [18725, 18855, 18854], [18726, 18727, 18855], [18727, 18856, 18855], [18727, 18728, 18857], [18727, 18857, 18856], [18728, 18729, 18857], [18729, 18858, 18857], [18729, 18730, 18859], [18729, 18859, 18858], [18730, 18731, 18859], [18731, 18860, 18859], [18731, 18732, 18861], [18731, 18861, 18860], [18732, 18733, 18861], [18733, 18862, 18861], [18733, 18734, 18863], [18733, 18863, 18862], [18734, 18735, 18863], [18735, 18864, 18863], [18735, 18736, 18865], [18735, 18865, 18864], [18736, 18737, 18865], [18737, 18866, 18865], [18737, 18738, 18867], [18737, 18867, 18866], [18738, 18739, 18867], [18739, 18868, 18867], [18739, 18740, 18869], [18739, 18869, 18868], [18740, 18741, 18869], [18741, 18870, 18869], [18741, 18742, 18871], [18741, 18871, 18870], [18742, 18743, 18871], [18743, 18872, 18871], [18743, 18744, 18873], [18743, 18873, 18872], [18744, 18745, 18873], [18745, 18874, 18873], [18745, 18746, 18875], [18745, 18875, 18874], [18746, 18747, 18875], [18747, 18876, 18875], [18747, 18748, 18877], [18747, 18877, 18876], [18748, 18749, 18877], [18749, 18878, 18877], [18749, 18750, 18879], [18749, 18879, 18878], [18751, 18752, 18881], [18751, 18881, 18880], [18752, 18753, 18881], [18753, 18882, 18881], [18753, 18754, 18883], [18753, 18883, 18882], [18754, 18755, 18883], [18755, 18884, 18883], [18755, 18756, 18885], [18755, 18885, 18884], [18756, 18757, 18885], [18757, 18886, 18885], [18757, 18758, 18887], [18757, 18887, 18886], [18758, 18759, 18887], [18759, 18888, 18887], [18759, 18760, 18889], [18759, 18889, 18888], [18760, 18761, 18889], [18761, 18890, 18889], [18761, 18762, 18891], [18761, 18891, 18890], [18762, 18763, 18891], [18763, 18892, 18891], [18763, 18764, 18893], [18763, 18893, 18892], [18764, 18765, 18893], [18765, 18894, 18893], [18765, 18766, 18895], [18765, 18895, 18894], [18766, 18767, 18895], [18767, 18896, 18895], [18767, 18768, 18897], [18767, 18897, 18896], [18768, 18769, 18897], [18769, 18898, 18897], [18769, 18770, 18899], [18769, 18899, 18898], [18770, 18771, 18899], [18771, 18900, 18899], [18771, 18772, 18901], [18771, 18901, 18900], [18772, 18773, 18901], [18773, 18902, 18901], [18773, 18774, 18903], [18773, 18903, 18902], [18774, 18775, 18903], [18775, 18904, 18903], [18775, 18776, 18905], [18775, 18905, 18904], [18776, 18777, 18905], [18777, 18906, 18905], [18777, 18778, 18907], [18777, 18907, 18906], [18778, 18779, 18907], [18779, 18908, 18907], [18779, 18780, 18909], [18779, 18909, 18908], [18780, 18781, 18909], [18781, 18910, 18909], [18781, 18782, 18911], [18781, 18911, 18910], [18782, 18783, 18911], [18783, 18912, 18911], [18783, 18784, 18913], [18783, 18913, 18912], [18784, 18785, 18913], [18785, 18914, 18913], [18785, 18786, 18915], [18785, 18915, 18914], [18786, 18787, 18915], [18787, 18916, 18915], [18787, 18788, 18917], [18787, 18917, 18916], [18788, 18789, 18917], [18789, 18918, 18917], [18789, 18790, 18919], [18789, 18919, 18918], [18790, 18791, 18919], [18791, 18920, 18919], [18791, 18792, 18921], [18791, 18921, 18920], [18792, 18793, 18921], [18793, 18922, 18921], [18793, 18794, 18923], [18793, 18923, 18922], [18794, 18795, 18923], [18795, 18924, 18923], [18795, 18796, 18925], [18795, 18925, 18924], [18796, 18797, 18925], [18797, 18926, 18925], [18797, 18798, 18927], [18797, 18927, 18926], [18798, 18799, 18927], [18799, 18928, 18927], [18799, 18800, 18929], [18799, 18929, 18928], [18800, 18801, 18929], [18801, 18930, 18929], [18801, 18802, 18931], [18801, 18931, 18930], [18802, 18803, 18931], [18803, 18932, 18931], [18803, 18804, 18933], [18803, 18933, 18932], [18804, 18805, 18933], [18805, 18934, 18933], [18805, 18806, 18935], [18805, 18935, 18934], [18806, 18807, 18935], [18807, 18936, 18935], [18807, 18808, 18937], [18807, 18937, 18936], [18808, 18809, 18937], [18809, 18938, 18937], [18809, 18810, 18939], [18809, 18939, 18938], [18810, 18811, 18939], [18811, 18940, 18939], [18811, 18812, 18941], [18811, 18941, 18940], [18812, 18813, 18941], [18813, 18942, 18941], [18813, 18814, 18943], [18813, 18943, 18942], [18814, 18815, 18943], [18815, 18944, 18943], [18815, 18816, 18945], [18815, 18945, 18944], [18816, 18817, 18945], [18817, 18946, 18945], [18817, 18818, 18947], [18817, 18947, 18946], [18818, 18819, 18947], [18819, 18948, 18947], [18819, 18820, 18949], [18819, 18949, 18948], [18820, 18821, 18949], [18821, 18950, 18949], [18821, 18822, 18951], [18821, 18951, 18950], [18822, 18823, 18951], [18823, 18952, 18951], [18823, 18824, 18953], [18823, 18953, 18952], [18824, 18825, 18953], [18825, 18954, 18953], [18825, 18826, 18955], [18825, 18955, 18954], [18826, 18827, 18955], [18827, 18956, 18955], [18827, 18828, 18957], [18827, 18957, 18956], [18828, 18829, 18957], [18829, 18958, 18957], [18829, 18830, 18959], [18829, 18959, 18958], [18830, 18831, 18959], [18831, 18960, 18959], [18831, 18832, 18961], [18831, 18961, 18960], [18832, 18833, 18961], [18833, 18962, 18961], [18833, 18834, 18963], [18833, 18963, 18962], [18834, 18835, 18963], [18835, 18964, 18963], [18835, 18836, 18965], [18835, 18965, 18964], [18836, 18837, 18965], [18837, 18966, 18965], [18837, 18838, 18967], [18837, 18967, 18966], [18838, 18839, 18967], [18839, 18968, 18967], [18839, 18840, 18969], [18839, 18969, 18968], [18840, 18841, 18969], [18841, 18970, 18969], [18841, 18842, 18971], [18841, 18971, 18970], [18842, 18843, 18971], [18843, 18972, 18971], [18843, 18844, 18973], [18843, 18973, 18972], [18844, 18845, 18973], [18845, 18974, 18973], [18845, 18846, 18975], [18845, 18975, 18974], [18846, 18847, 18975], [18847, 18976, 18975], [18847, 18848, 18977], [18847, 18977, 18976], [18848, 18849, 18977], [18849, 18978, 18977], [18849, 18850, 18979], [18849, 18979, 18978], [18850, 18851, 18979], [18851, 18980, 18979], [18851, 18852, 18981], [18851, 18981, 18980], [18852, 18853, 18981], [18853, 18982, 18981], [18853, 18854, 18983], [18853, 18983, 18982], [18854, 18855, 18983], [18855, 18984, 18983], [18855, 18856, 18985], [18855, 18985, 18984], [18856, 18857, 18985], [18857, 18986, 18985], [18857, 18858, 18987], [18857, 18987, 18986], [18858, 18859, 18987], [18859, 18988, 18987], [18859, 18860, 18989], [18859, 18989, 18988], [18860, 18861, 18989], [18861, 18990, 18989], [18861, 18862, 18991], [18861, 18991, 18990], [18862, 18863, 18991], [18863, 18992, 18991], [18863, 18864, 18993], [18863, 18993, 18992], [18864, 18865, 18993], [18865, 18994, 18993], [18865, 18866, 18995], [18865, 18995, 18994], [18866, 18867, 18995], [18867, 18996, 18995], [18867, 18868, 18997], [18867, 18997, 18996], [18868, 18869, 18997], [18869, 18998, 18997], [18869, 18870, 18999], [18869, 18999, 18998], [18870, 18871, 18999], [18871, 19000, 18999], [18871, 18872, 19001], [18871, 19001, 19000], [18872, 18873, 19001], [18873, 19002, 19001], [18873, 18874, 19003], [18873, 19003, 19002], [18874, 18875, 19003], [18875, 19004, 19003], [18875, 18876, 19005], [18875, 19005, 19004], [18876, 18877, 19005], [18877, 19006, 19005], [18877, 18878, 19007], [18877, 19007, 19006], [18878, 18879, 19007], [18879, 19008, 19007], [18880, 18881, 19009], [18881, 19010, 19009], [18881, 18882, 19011], [18881, 19011, 19010], [18882, 18883, 19011], [18883, 19012, 19011], [18883, 18884, 19013], [18883, 19013, 19012], [18884, 18885, 19013], [18885, 19014, 19013], [18885, 18886, 19015], [18885, 19015, 19014], [18886, 18887, 19015], [18887, 19016, 19015], [18887, 18888, 19017], [18887, 19017, 19016], [18888, 18889, 19017], [18889, 19018, 19017], [18889, 18890, 19019], [18889, 19019, 19018], [18890, 18891, 19019], [18891, 19020, 19019], [18891, 18892, 19021], [18891, 19021, 19020], [18892, 18893, 19021], [18893, 19022, 19021], [18893, 18894, 19023], [18893, 19023, 19022], [18894, 18895, 19023], [18895, 19024, 19023], [18895, 18896, 19025], [18895, 19025, 19024], [18896, 18897, 19025], [18897, 19026, 19025], [18897, 18898, 19027], [18897, 19027, 19026], [18898, 18899, 19027], [18899, 19028, 19027], [18899, 18900, 19029], [18899, 19029, 19028], [18900, 18901, 19029], [18901, 19030, 19029], [18901, 18902, 19031], [18901, 19031, 19030], [18902, 18903, 19031], [18903, 19032, 19031], [18903, 18904, 19033], [18903, 19033, 19032], [18904, 18905, 19033], [18905, 19034, 19033], [18905, 18906, 19035], [18905, 19035, 19034], [18906, 18907, 19035], [18907, 19036, 19035], [18907, 18908, 19037], [18907, 19037, 19036], [18908, 18909, 19037], [18909, 19038, 19037], [18909, 18910, 19039], [18909, 19039, 19038], [18910, 18911, 19039], [18911, 19040, 19039], [18911, 18912, 19041], [18911, 19041, 19040], [18912, 18913, 19041], [18913, 19042, 19041], [18913, 18914, 19043], [18913, 19043, 19042], [18914, 18915, 19043], [18915, 19044, 19043], [18915, 18916, 19045], [18915, 19045, 19044], [18916, 18917, 19045], [18917, 19046, 19045], [18917, 18918, 19047], [18917, 19047, 19046], [18918, 18919, 19047], [18919, 19048, 19047], [18919, 18920, 19049], [18919, 19049, 19048], [18920, 18921, 19049], [18921, 19050, 19049], [18921, 18922, 19051], [18921, 19051, 19050], [18922, 18923, 19051], [18923, 19052, 19051], [18923, 18924, 19053], [18923, 19053, 19052], [18924, 18925, 19053], [18925, 19054, 19053], [18925, 18926, 19055], [18925, 19055, 19054], [18926, 18927, 19055], [18927, 19056, 19055], [18927, 18928, 19057], [18927, 19057, 19056], [18928, 18929, 19057], [18929, 19058, 19057], [18929, 18930, 19059], [18929, 19059, 19058], [18930, 18931, 19059], [18931, 19060, 19059], [18931, 18932, 19061], [18931, 19061, 19060], [18932, 18933, 19061], [18933, 19062, 19061], [18933, 18934, 19063], [18933, 19063, 19062], [18934, 18935, 19063], [18935, 19064, 19063], [18935, 18936, 19065], [18935, 19065, 19064], [18936, 18937, 19065], [18937, 19066, 19065], [18937, 18938, 19067], [18937, 19067, 19066], [18938, 18939, 19067], [18939, 19068, 19067], [18939, 18940, 19069], [18939, 19069, 19068], [18940, 18941, 19069], [18941, 19070, 19069], [18941, 18942, 19071], [18941, 19071, 19070], [18942, 18943, 19071], [18943, 19072, 19071], [18943, 18944, 19073], [18943, 19073, 19072], [18944, 18945, 19073], [18945, 19074, 19073], [18945, 18946, 19075], [18945, 19075, 19074], [18946, 18947, 19075], [18947, 19076, 19075], [18947, 18948, 19077], [18947, 19077, 19076], [18948, 18949, 19077], [18949, 19078, 19077], [18949, 18950, 19079], [18949, 19079, 19078], [18950, 18951, 19079], [18951, 19080, 19079], [18951, 18952, 19081], [18951, 19081, 19080], [18952, 18953, 19081], [18953, 19082, 19081], [18953, 18954, 19083], [18953, 19083, 19082], [18954, 18955, 19083], [18955, 19084, 19083], [18955, 18956, 19085], [18955, 19085, 19084], [18956, 18957, 19085], [18957, 19086, 19085], [18957, 18958, 19087], [18957, 19087, 19086], [18958, 18959, 19087], [18959, 19088, 19087], [18959, 18960, 19089], [18959, 19089, 19088], [18960, 18961, 19089], [18961, 19090, 19089], [18961, 18962, 19091], [18961, 19091, 19090], [18962, 18963, 19091], [18963, 19092, 19091], [18963, 18964, 19093], [18963, 19093, 19092], [18964, 18965, 19093], [18965, 19094, 19093], [18965, 18966, 19095], [18965, 19095, 19094], [18966, 18967, 19095], [18967, 19096, 19095], [18967, 18968, 19097], [18967, 19097, 19096], [18968, 18969, 19097], [18969, 19098, 19097], [18969, 18970, 19099], [18969, 19099, 19098], [18970, 18971, 19099], [18971, 19100, 19099], [18971, 18972, 19101], [18971, 19101, 19100], [18972, 18973, 19101], [18973, 19102, 19101], [18973, 18974, 19103], [18973, 19103, 19102], [18974, 18975, 19103], [18975, 19104, 19103], [18975, 18976, 19105], [18975, 19105, 19104], [18976, 18977, 19105], [18977, 19106, 19105], [18977, 18978, 19107], [18977, 19107, 19106], [18978, 18979, 19107], [18979, 19108, 19107], [18979, 18980, 19109], [18979, 19109, 19108], [18980, 18981, 19109], [18981, 19110, 19109], [18981, 18982, 19111], [18981, 19111, 19110], [18982, 18983, 19111], [18983, 19112, 19111], [18983, 18984, 19113], [18983, 19113, 19112], [18984, 18985, 19113], [18985, 19114, 19113], [18985, 18986, 19115], [18985, 19115, 19114], [18986, 18987, 19115], [18987, 19116, 19115], [18987, 18988, 19117], [18987, 19117, 19116], [18988, 18989, 19117], [18989, 19118, 19117], [18989, 18990, 19119], [18989, 19119, 19118], [18990, 18991, 19119], [18991, 19120, 19119], [18991, 18992, 19121], [18991, 19121, 19120], [18992, 18993, 19121], [18993, 19122, 19121], [18993, 18994, 19123], [18993, 19123, 19122], [18994, 18995, 19123], [18995, 19124, 19123], [18995, 18996, 19125], [18995, 19125, 19124], [18996, 18997, 19125], [18997, 19126, 19125], [18997, 18998, 19127], [18997, 19127, 19126], [18998, 18999, 19127], [18999, 19128, 19127], [18999, 19000, 19129], [18999, 19129, 19128], [19000, 19001, 19129], [19001, 19130, 19129], [19001, 19002, 19131], [19001, 19131, 19130], [19002, 19003, 19131], [19003, 19132, 19131], [19003, 19004, 19133], [19003, 19133, 19132], [19004, 19005, 19133], [19005, 19134, 19133], [19005, 19006, 19135], [19005, 19135, 19134], [19006, 19007, 19135], [19007, 19136, 19135], [19007, 19008, 19137], [19007, 19137, 19136], [19009, 19010, 19139], [19009, 19139, 19138], [19010, 19011, 19139], [19011, 19140, 19139], [19011, 19012, 19141], [19011, 19141, 19140], [19012, 19013, 19141], [19013, 19142, 19141], [19013, 19014, 19143], [19013, 19143, 19142], [19014, 19015, 19143], [19015, 19144, 19143], [19015, 19016, 19145], [19015, 19145, 19144], [19016, 19017, 19145], [19017, 19146, 19145], [19017, 19018, 19147], [19017, 19147, 19146], [19018, 19019, 19147], [19019, 19148, 19147], [19019, 19020, 19149], [19019, 19149, 19148], [19020, 19021, 19149], [19021, 19150, 19149], [19021, 19022, 19151], [19021, 19151, 19150], [19022, 19023, 19151], [19023, 19152, 19151], [19023, 19024, 19153], [19023, 19153, 19152], [19024, 19025, 19153], [19025, 19154, 19153], [19025, 19026, 19155], [19025, 19155, 19154], [19026, 19027, 19155], [19027, 19156, 19155], [19027, 19028, 19157], [19027, 19157, 19156], [19028, 19029, 19157], [19029, 19158, 19157], [19029, 19030, 19159], [19029, 19159, 19158], [19030, 19031, 19159], [19031, 19160, 19159], [19031, 19032, 19161], [19031, 19161, 19160], [19032, 19033, 19161], [19033, 19162, 19161], [19033, 19034, 19163], [19033, 19163, 19162], [19034, 19035, 19163], [19035, 19164, 19163], [19035, 19036, 19165], [19035, 19165, 19164], [19036, 19037, 19165], [19037, 19166, 19165], [19037, 19038, 19167], [19037, 19167, 19166], [19038, 19039, 19167], [19039, 19168, 19167], [19039, 19040, 19169], [19039, 19169, 19168], [19040, 19041, 19169], [19041, 19170, 19169], [19041, 19042, 19171], [19041, 19171, 19170], [19042, 19043, 19171], [19043, 19172, 19171], [19043, 19044, 19173], [19043, 19173, 19172], [19044, 19045, 19173], [19045, 19174, 19173], [19045, 19046, 19175], [19045, 19175, 19174], [19046, 19047, 19175], [19047, 19176, 19175], [19047, 19048, 19177], [19047, 19177, 19176], [19048, 19049, 19177], [19049, 19178, 19177], [19049, 19050, 19179], [19049, 19179, 19178], [19050, 19051, 19179], [19051, 19180, 19179], [19051, 19052, 19181], [19051, 19181, 19180], [19052, 19053, 19181], [19053, 19182, 19181], [19053, 19054, 19183], [19053, 19183, 19182], [19054, 19055, 19183], [19055, 19184, 19183], [19055, 19056, 19185], [19055, 19185, 19184], [19056, 19057, 19185], [19057, 19186, 19185], [19057, 19058, 19187], [19057, 19187, 19186], [19058, 19059, 19187], [19059, 19188, 19187], [19059, 19060, 19189], [19059, 19189, 19188], [19060, 19061, 19189], [19061, 19190, 19189], [19061, 19062, 19191], [19061, 19191, 19190], [19062, 19063, 19191], [19063, 19192, 19191], [19063, 19064, 19193], [19063, 19193, 19192], [19064, 19065, 19193], [19065, 19194, 19193], [19065, 19066, 19195], [19065, 19195, 19194], [19066, 19067, 19195], [19067, 19196, 19195], [19067, 19068, 19197], [19067, 19197, 19196], [19068, 19069, 19197], [19069, 19198, 19197], [19069, 19070, 19199], [19069, 19199, 19198], [19070, 19071, 19199], [19071, 19200, 19199], [19071, 19072, 19201], [19071, 19201, 19200], [19072, 19073, 19201], [19073, 19202, 19201], [19073, 19074, 19203], [19073, 19203, 19202], [19074, 19075, 19203], [19075, 19204, 19203], [19075, 19076, 19205], [19075, 19205, 19204], [19076, 19077, 19205], [19077, 19206, 19205], [19077, 19078, 19207], [19077, 19207, 19206], [19078, 19079, 19207], [19079, 19208, 19207], [19079, 19080, 19209], [19079, 19209, 19208], [19080, 19081, 19209], [19081, 19210, 19209], [19081, 19082, 19211], [19081, 19211, 19210], [19082, 19083, 19211], [19083, 19212, 19211], [19083, 19084, 19213], [19083, 19213, 19212], [19084, 19085, 19213], [19085, 19214, 19213], [19085, 19086, 19215], [19085, 19215, 19214], [19086, 19087, 19215], [19087, 19216, 19215], [19087, 19088, 19217], [19087, 19217, 19216], [19088, 19089, 19217], [19089, 19218, 19217], [19089, 19090, 19219], [19089, 19219, 19218], [19090, 19091, 19219], [19091, 19220, 19219], [19091, 19092, 19221], [19091, 19221, 19220], [19092, 19093, 19221], [19093, 19222, 19221], [19093, 19094, 19223], [19093, 19223, 19222], [19094, 19095, 19223], [19095, 19224, 19223], [19095, 19096, 19225], [19095, 19225, 19224], [19096, 19097, 19225], [19097, 19226, 19225], [19097, 19098, 19227], [19097, 19227, 19226], [19098, 19099, 19227], [19099, 19228, 19227], [19099, 19100, 19229], [19099, 19229, 19228], [19100, 19101, 19229], [19101, 19230, 19229], [19101, 19102, 19231], [19101, 19231, 19230], [19102, 19103, 19231], [19103, 19232, 19231], [19103, 19104, 19233], [19103, 19233, 19232], [19104, 19105, 19233], [19105, 19234, 19233], [19105, 19106, 19235], [19105, 19235, 19234], [19106, 19107, 19235], [19107, 19236, 19235], [19107, 19108, 19237], [19107, 19237, 19236], [19108, 19109, 19237], [19109, 19238, 19237], [19109, 19110, 19239], [19109, 19239, 19238], [19110, 19111, 19239], [19111, 19240, 19239], [19111, 19112, 19241], [19111, 19241, 19240], [19112, 19113, 19241], [19113, 19242, 19241], [19113, 19114, 19243], [19113, 19243, 19242], [19114, 19115, 19243], [19115, 19244, 19243], [19115, 19116, 19245], [19115, 19245, 19244], [19116, 19117, 19245], [19117, 19246, 19245], [19117, 19118, 19247], [19117, 19247, 19246], [19118, 19119, 19247], [19119, 19248, 19247], [19119, 19120, 19249], [19119, 19249, 19248], [19120, 19121, 19249], [19121, 19250, 19249], [19121, 19122, 19251], [19121, 19251, 19250], [19122, 19123, 19251], [19123, 19252, 19251], [19123, 19124, 19253], [19123, 19253, 19252], [19124, 19125, 19253], [19125, 19254, 19253], [19125, 19126, 19255], [19125, 19255, 19254], [19126, 19127, 19255], [19127, 19256, 19255], [19127, 19128, 19257], [19127, 19257, 19256], [19128, 19129, 19257], [19129, 19258, 19257], [19129, 19130, 19259], [19129, 19259, 19258], [19130, 19131, 19259], [19131, 19260, 19259], [19131, 19132, 19261], [19131, 19261, 19260], [19132, 19133, 19261], [19133, 19262, 19261], [19133, 19134, 19263], [19133, 19263, 19262], [19134, 19135, 19263], [19135, 19264, 19263], [19135, 19136, 19265], [19135, 19265, 19264], [19136, 19137, 19265], [19137, 19266, 19265], [19138, 19139, 0], [19139, 1, 0], [19139, 19140, 2], [19139, 2, 1], [19140, 19141, 2], [19141, 3, 2], [19141, 19142, 4], [19141, 4, 3], [19142, 19143, 4], [19143, 5, 4], [19143, 19144, 6], [19143, 6, 5], [19144, 19145, 6], [19145, 7, 6], [19145, 19146, 8], [19145, 8, 7], [19146, 19147, 8], [19147, 9, 8], [19147, 19148, 10], [19147, 10, 9], [19148, 19149, 10], [19149, 11, 10], [19149, 19150, 12], [19149, 12, 11], [19150, 19151, 12], [19151, 13, 12], [19151, 19152, 14], [19151, 14, 13], [19152, 19153, 14], [19153, 15, 14], [19153, 19154, 16], [19153, 16, 15], [19154, 19155, 16], [19155, 17, 16], [19155, 19156, 18], [19155, 18, 17], [19156, 19157, 18], [19157, 19, 18], [19157, 19158, 20], [19157, 20, 19], [19158, 19159, 20], [19159, 21, 20], [19159, 19160, 22], [19159, 22, 21], [19160, 19161, 22], [19161, 23, 22], [19161, 19162, 24], [19161, 24, 23], [19162, 19163, 24], [19163, 25, 24], [19163, 19164, 26], [19163, 26, 25], [19164, 19165, 26], [19165, 27, 26], [19165, 19166, 28], [19165, 28, 27], [19166, 19167, 28], [19167, 29, 28], [19167, 19168, 30], [19167, 30, 29], [19168, 19169, 30], [19169, 31, 30], [19169, 19170, 32], [19169, 32, 31], [19170, 19171, 32], [19171, 33, 32], [19171, 19172, 34], [19171, 34, 33], [19172, 19173, 34], [19173, 35, 34], [19173, 19174, 36], [19173, 36, 35], [19174, 19175, 36], [19175, 37, 36], [19175, 19176, 38], [19175, 38, 37], [19176, 19177, 38], [19177, 39, 38], [19177, 19178, 40], [19177, 40, 39], [19178, 19179, 40], [19179, 41, 40], [19179, 19180, 42], [19179, 42, 41], [19180, 19181, 42], [19181, 43, 42], [19181, 19182, 44], [19181, 44, 43], [19182, 19183, 44], [19183, 45, 44], [19183, 19184, 46], [19183, 46, 45], [19184, 19185, 46], [19185, 47, 46], [19185, 19186, 48], [19185, 48, 47], [19186, 19187, 48], [19187, 49, 48], [19187, 19188, 50], [19187, 50, 49], [19188, 19189, 50], [19189, 51, 50], [19189, 19190, 52], [19189, 52, 51], [19190, 19191, 52], [19191, 53, 52], [19191, 19192, 54], [19191, 54, 53], [19192, 19193, 54], [19193, 55, 54], [19193, 19194, 56], [19193, 56, 55], [19194, 19195, 56], [19195, 57, 56], [19195, 19196, 58], [19195, 58, 57], [19196, 19197, 58], [19197, 59, 58], [19197, 19198, 60], [19197, 60, 59], [19198, 19199, 60], [19199, 61, 60], [19199, 19200, 62], [19199, 62, 61], [19200, 19201, 62], [19201, 63, 62], [19201, 19202, 64], [19201, 64, 63], [19202, 19203, 64], [19203, 65, 64], [19203, 19204, 66], [19203, 66, 65], [19204, 19205, 66], [19205, 67, 66], [19205, 19206, 68], [19205, 68, 67], [19206, 19207, 68], [19207, 69, 68], [19207, 19208, 70], [19207, 70, 69], [19208, 19209, 70], [19209, 71, 70], [19209, 19210, 72], [19209, 72, 71], [19210, 19211, 72], [19211, 73, 72], [19211, 19212, 74], [19211, 74, 73], [19212, 19213, 74], [19213, 75, 74], [19213, 19214, 76], [19213, 76, 75], [19214, 19215, 76], [19215, 77, 76], [19215, 19216, 78], [19215, 78, 77], [19216, 19217, 78], [19217, 79, 78], [19217, 19218, 80], [19217, 80, 79], [19218, 19219, 80], [19219, 81, 80], [19219, 19220, 82], [19219, 82, 81], [19220, 19221, 82], [19221, 83, 82], [19221, 19222, 84], [19221, 84, 83], [19222, 19223, 84], [19223, 85, 84], [19223, 19224, 86], [19223, 86, 85], [19224, 19225, 86], [19225, 87, 86], [19225, 19226, 88], [19225, 88, 87], [19226, 19227, 88], [19227, 89, 88], [19227, 19228, 90], [19227, 90, 89], [19228, 19229, 90], [19229, 91, 90], [19229, 19230, 92], [19229, 92, 91], [19230, 19231, 92], [19231, 93, 92], [19231, 19232, 94], [19231, 94, 93], [19232, 19233, 94], [19233, 95, 94], [19233, 19234, 96], [19233, 96, 95], [19234, 19235, 96], [19235, 97, 96], [19235, 19236, 98], [19235, 98, 97], [19236, 19237, 98], [19237, 99, 98], [19237, 19238, 100], [19237, 100, 99], [19238, 19239, 100], [19239, 101, 100], [19239, 19240, 102], [19239, 102, 101], [19240, 19241, 102], [19241, 103, 102], [19241, 19242, 104], [19241, 104, 103], [19242, 19243, 104], [19243, 105, 104], [19243, 19244, 106], [19243, 106, 105], [19244, 19245, 106], [19245, 107, 106], [19245, 19246, 108], [19245, 108, 107], [19246, 19247, 108], [19247, 109, 108], [19247, 19248, 110], [19247, 110, 109], [19248, 19249, 110], [19249, 111, 110], [19249, 19250, 112], [19249, 112, 111], [19250, 19251, 112], [19251, 113, 112], [19251, 19252, 114], [19251, 114, 113], [19252, 19253, 114], [19253, 115, 114], [19253, 19254, 116], [19253, 116, 115], [19254, 19255, 116], [19255, 117, 116], [19255, 19256, 118], [19255, 118, 117], [19256, 19257, 118], [19257, 119, 118], [19257, 19258, 120], [19257, 120, 119], [19258, 19259, 120], [19259, 121, 120], [19259, 19260, 122], [19259, 122, 121], [19260, 19261, 122], [19261, 123, 122], [19261, 19262, 124], [19261, 124, 123], [19262, 19263, 124], [19263, 125, 124], [19263, 19264, 126], [19263, 126, 125], [19264, 19265, 126], [19265, 127, 126], [19265, 19266, 128], [19265, 128, 127], [19267, 19268, 19397], [19267, 19397, 19396], [19268, 19269, 19397], [19269, 19398, 19397], [19269, 19270, 19399], [19269, 19399, 19398], [19270, 19271, 19399], [19271, 19400, 19399], [19271, 19272, 19401], [19271, 19401, 19400], [19272, 19273, 19401], [19273, 19402, 19401], [19273, 19274, 19403], [19273, 19403, 19402], [19274, 19275, 19403], [19275, 19404, 19403], [19275, 19276, 19405], [19275, 19405, 19404], [19276, 19277, 19405], [19277, 19406, 19405], [19277, 19278, 19407], [19277, 19407, 19406], [19278, 19279, 19407], [19279, 19408, 19407], [19279, 19280, 19409], [19279, 19409, 19408], [19280, 19281, 19409], [19281, 19410, 19409], [19281, 19282, 19411], [19281, 19411, 19410], [19282, 19283, 19411], [19283, 19412, 19411], [19283, 19284, 19413], [19283, 19413, 19412], [19284, 19285, 19413], [19285, 19414, 19413], [19285, 19286, 19415], [19285, 19415, 19414], [19286, 19287, 19415], [19287, 19416, 19415], [19287, 19288, 19417], [19287, 19417, 19416], [19288, 19289, 19417], [19289, 19418, 19417], [19289, 19290, 19419], [19289, 19419, 19418], [19290, 19291, 19419], [19291, 19420, 19419], [19291, 19292, 19421], [19291, 19421, 19420], [19292, 19293, 19421], [19293, 19422, 19421], [19293, 19294, 19423], [19293, 19423, 19422], [19294, 19295, 19423], [19295, 19424, 19423], [19295, 19296, 19425], [19295, 19425, 19424], [19296, 19297, 19425], [19297, 19426, 19425], [19297, 19298, 19427], [19297, 19427, 19426], [19298, 19299, 19427], [19299, 19428, 19427], [19299, 19300, 19429], [19299, 19429, 19428], [19300, 19301, 19429], [19301, 19430, 19429], [19301, 19302, 19431], [19301, 19431, 19430], [19302, 19303, 19431], [19303, 19432, 19431], [19303, 19304, 19433], [19303, 19433, 19432], [19304, 19305, 19433], [19305, 19434, 19433], [19305, 19306, 19435], [19305, 19435, 19434], [19306, 19307, 19435], [19307, 19436, 19435], [19307, 19308, 19437], [19307, 19437, 19436], [19308, 19309, 19437], [19309, 19438, 19437], [19309, 19310, 19439], [19309, 19439, 19438], [19310, 19311, 19439], [19311, 19440, 19439], [19311, 19312, 19441], [19311, 19441, 19440], [19312, 19313, 19441], [19313, 19442, 19441], [19313, 19314, 19443], [19313, 19443, 19442], [19314, 19315, 19443], [19315, 19444, 19443], [19315, 19316, 19445], [19315, 19445, 19444], [19316, 19317, 19445], [19317, 19446, 19445], [19317, 19318, 19447], [19317, 19447, 19446], [19318, 19319, 19447], [19319, 19448, 19447], [19319, 19320, 19449], [19319, 19449, 19448], [19320, 19321, 19449], [19321, 19450, 19449], [19321, 19322, 19451], [19321, 19451, 19450], [19322, 19323, 19451], [19323, 19452, 19451], [19323, 19324, 19453], [19323, 19453, 19452], [19324, 19325, 19453], [19325, 19454, 19453], [19325, 19326, 19455], [19325, 19455, 19454], [19326, 19327, 19455], [19327, 19456, 19455], [19327, 19328, 19457], [19327, 19457, 19456], [19328, 19329, 19457], [19329, 19458, 19457], [19329, 19330, 19459], [19329, 19459, 19458], [19330, 19331, 19459], [19331, 19460, 19459], [19331, 19332, 19461], [19331, 19461, 19460], [19332, 19333, 19461], [19333, 19462, 19461], [19333, 19334, 19463], [19333, 19463, 19462], [19334, 19335, 19463], [19335, 19464, 19463], [19335, 19336, 19465], [19335, 19465, 19464], [19336, 19337, 19465], [19337, 19466, 19465], [19337, 19338, 19467], [19337, 19467, 19466], [19338, 19339, 19467], [19339, 19468, 19467], [19339, 19340, 19469], [19339, 19469, 19468], [19340, 19341, 19469], [19341, 19470, 19469], [19341, 19342, 19471], [19341, 19471, 19470], [19342, 19343, 19471], [19343, 19472, 19471], [19343, 19344, 19473], [19343, 19473, 19472], [19344, 19345, 19473], [19345, 19474, 19473], [19345, 19346, 19475], [19345, 19475, 19474], [19346, 19347, 19475], [19347, 19476, 19475], [19347, 19348, 19477], [19347, 19477, 19476], [19348, 19349, 19477], [19349, 19478, 19477], [19349, 19350, 19479], [19349, 19479, 19478], [19350, 19351, 19479], [19351, 19480, 19479], [19351, 19352, 19481], [19351, 19481, 19480], [19352, 19353, 19481], [19353, 19482, 19481], [19353, 19354, 19483], [19353, 19483, 19482], [19354, 19355, 19483], [19355, 19484, 19483], [19355, 19356, 19485], [19355, 19485, 19484], [19356, 19357, 19485], [19357, 19486, 19485], [19357, 19358, 19487], [19357, 19487, 19486], [19358, 19359, 19487], [19359, 19488, 19487], [19359, 19360, 19489], [19359, 19489, 19488], [19360, 19361, 19489], [19361, 19490, 19489], [19361, 19362, 19491], [19361, 19491, 19490], [19362, 19363, 19491], [19363, 19492, 19491], [19363, 19364, 19493], [19363, 19493, 19492], [19364, 19365, 19493], [19365, 19494, 19493], [19365, 19366, 19495], [19365, 19495, 19494], [19366, 19367, 19495], [19367, 19496, 19495], [19367, 19368, 19497], [19367, 19497, 19496], [19368, 19369, 19497], [19369, 19498, 19497], [19369, 19370, 19499], [19369, 19499, 19498], [19370, 19371, 19499], [19371, 19500, 19499], [19371, 19372, 19501], [19371, 19501, 19500], [19372, 19373, 19501], [19373, 19502, 19501], [19373, 19374, 19503], [19373, 19503, 19502], [19374, 19375, 19503], [19375, 19504, 19503], [19375, 19376, 19505], [19375, 19505, 19504], [19376, 19377, 19505], [19377, 19506, 19505], [19377, 19378, 19507], [19377, 19507, 19506], [19378, 19379, 19507], [19379, 19508, 19507], [19379, 19380, 19509], [19379, 19509, 19508], [19380, 19381, 19509], [19381, 19510, 19509], [19381, 19382, 19511], [19381, 19511, 19510], [19382, 19383, 19511], [19383, 19512, 19511], [19383, 19384, 19513], [19383, 19513, 19512], [19384, 19385, 19513], [19385, 19514, 19513], [19385, 19386, 19515], [19385, 19515, 19514], [19386, 19387, 19515], [19387, 19516, 19515], [19387, 19388, 19517], [19387, 19517, 19516], [19388, 19389, 19517], [19389, 19518, 19517], [19389, 19390, 19519], [19389, 19519, 19518], [19390, 19391, 19519], [19391, 19520, 19519], [19391, 19392, 19521], [19391, 19521, 19520], [19392, 19393, 19521], [19393, 19522, 19521], [19393, 19394, 19523], [19393, 19523, 19522], [19394, 19395, 19523], [19395, 19524, 19523], [19396, 19397, 19525], [19397, 19526, 19525], [19397, 19398, 19527], [19397, 19527, 19526], [19398, 19399, 19527], [19399, 19528, 19527], [19399, 19400, 19529], [19399, 19529, 19528], [19400, 19401, 19529], [19401, 19530, 19529], [19401, 19402, 19531], [19401, 19531, 19530], [19402, 19403, 19531], [19403, 19532, 19531], [19403, 19404, 19533], [19403, 19533, 19532], [19404, 19405, 19533], [19405, 19534, 19533], [19405, 19406, 19535], [19405, 19535, 19534], [19406, 19407, 19535], [19407, 19536, 19535], [19407, 19408, 19537], [19407, 19537, 19536], [19408, 19409, 19537], [19409, 19538, 19537], [19409, 19410, 19539], [19409, 19539, 19538], [19410, 19411, 19539], [19411, 19540, 19539], [19411, 19412, 19541], [19411, 19541, 19540], [19412, 19413, 19541], [19413, 19542, 19541], [19413, 19414, 19543], [19413, 19543, 19542], [19414, 19415, 19543], [19415, 19544, 19543], [19415, 19416, 19545], [19415, 19545, 19544], [19416, 19417, 19545], [19417, 19546, 19545], [19417, 19418, 19547], [19417, 19547, 19546], [19418, 19419, 19547], [19419, 19548, 19547], [19419, 19420, 19549], [19419, 19549, 19548], [19420, 19421, 19549], [19421, 19550, 19549], [19421, 19422, 19551], [19421, 19551, 19550], [19422, 19423, 19551], [19423, 19552, 19551], [19423, 19424, 19553], [19423, 19553, 19552], [19424, 19425, 19553], [19425, 19554, 19553], [19425, 19426, 19555], [19425, 19555, 19554], [19426, 19427, 19555], [19427, 19556, 19555], [19427, 19428, 19557], [19427, 19557, 19556], [19428, 19429, 19557], [19429, 19558, 19557], [19429, 19430, 19559], [19429, 19559, 19558], [19430, 19431, 19559], [19431, 19560, 19559], [19431, 19432, 19561], [19431, 19561, 19560], [19432, 19433, 19561], [19433, 19562, 19561], [19433, 19434, 19563], [19433, 19563, 19562], [19434, 19435, 19563], [19435, 19564, 19563], [19435, 19436, 19565], [19435, 19565, 19564], [19436, 19437, 19565], [19437, 19566, 19565], [19437, 19438, 19567], [19437, 19567, 19566], [19438, 19439, 19567], [19439, 19568, 19567], [19439, 19440, 19569], [19439, 19569, 19568], [19440, 19441, 19569], [19441, 19570, 19569], [19441, 19442, 19571], [19441, 19571, 19570], [19442, 19443, 19571], [19443, 19572, 19571], [19443, 19444, 19573], [19443, 19573, 19572], [19444, 19445, 19573], [19445, 19574, 19573], [19445, 19446, 19575], [19445, 19575, 19574], [19446, 19447, 19575], [19447, 19576, 19575], [19447, 19448, 19577], [19447, 19577, 19576], [19448, 19449, 19577], [19449, 19578, 19577], [19449, 19450, 19579], [19449, 19579, 19578], [19450, 19451, 19579], [19451, 19580, 19579], [19451, 19452, 19581], [19451, 19581, 19580], [19452, 19453, 19581], [19453, 19582, 19581], [19453, 19454, 19583], [19453, 19583, 19582], [19454, 19455, 19583], [19455, 19584, 19583], [19455, 19456, 19585], [19455, 19585, 19584], [19456, 19457, 19585], [19457, 19586, 19585], [19457, 19458, 19587], [19457, 19587, 19586], [19458, 19459, 19587], [19459, 19588, 19587], [19459, 19460, 19589], [19459, 19589, 19588], [19460, 19461, 19589], [19461, 19590, 19589], [19461, 19462, 19591], [19461, 19591, 19590], [19462, 19463, 19591], [19463, 19592, 19591], [19463, 19464, 19593], [19463, 19593, 19592], [19464, 19465, 19593], [19465, 19594, 19593], [19465, 19466, 19595], [19465, 19595, 19594], [19466, 19467, 19595], [19467, 19596, 19595], [19467, 19468, 19597], [19467, 19597, 19596], [19468, 19469, 19597], [19469, 19598, 19597], [19469, 19470, 19599], [19469, 19599, 19598], [19470, 19471, 19599], [19471, 19600, 19599], [19471, 19472, 19601], [19471, 19601, 19600], [19472, 19473, 19601], [19473, 19602, 19601], [19473, 19474, 19603], [19473, 19603, 19602], [19474, 19475, 19603], [19475, 19604, 19603], [19475, 19476, 19605], [19475, 19605, 19604], [19476, 19477, 19605], [19477, 19606, 19605], [19477, 19478, 19607], [19477, 19607, 19606], [19478, 19479, 19607], [19479, 19608, 19607], [19479, 19480, 19609], [19479, 19609, 19608], [19480, 19481, 19609], [19481, 19610, 19609], [19481, 19482, 19611], [19481, 19611, 19610], [19482, 19483, 19611], [19483, 19612, 19611], [19483, 19484, 19613], [19483, 19613, 19612], [19484, 19485, 19613], [19485, 19614, 19613], [19485, 19486, 19615], [19485, 19615, 19614], [19486, 19487, 19615], [19487, 19616, 19615], [19487, 19488, 19617], [19487, 19617, 19616], [19488, 19489, 19617], [19489, 19618, 19617], [19489, 19490, 19619], [19489, 19619, 19618], [19490, 19491, 19619], [19491, 19620, 19619], [19491, 19492, 19621], [19491, 19621, 19620], [19492, 19493, 19621], [19493, 19622, 19621], [19493, 19494, 19623], [19493, 19623, 19622], [19494, 19495, 19623], [19495, 19624, 19623], [19495, 19496, 19625], [19495, 19625, 19624], [19496, 19497, 19625], [19497, 19626, 19625], [19497, 19498, 19627], [19497, 19627, 19626], [19498, 19499, 19627], [19499, 19628, 19627], [19499, 19500, 19629], [19499, 19629, 19628], [19500, 19501, 19629], [19501, 19630, 19629], [19501, 19502, 19631], [19501, 19631, 19630], [19502, 19503, 19631], [19503, 19632, 19631], [19503, 19504, 19633], [19503, 19633, 19632], [19504, 19505, 19633], [19505, 19634, 19633], [19505, 19506, 19635], [19505, 19635, 19634], [19506, 19507, 19635], [19507, 19636, 19635], [19507, 19508, 19637], [19507, 19637, 19636], [19508, 19509, 19637], [19509, 19638, 19637], [19509, 19510, 19639], [19509, 19639, 19638], [19510, 19511, 19639], [19511, 19640, 19639], [19511, 19512, 19641], [19511, 19641, 19640], [19512, 19513, 19641], [19513, 19642, 19641], [19513, 19514, 19643], [19513, 19643, 19642], [19514, 19515, 19643], [19515, 19644, 19643], [19515, 19516, 19645], [19515, 19645, 19644], [19516, 19517, 19645], [19517, 19646, 19645], [19517, 19518, 19647], [19517, 19647, 19646], [19518, 19519, 19647], [19519, 19648, 19647], [19519, 19520, 19649], [19519, 19649, 19648], [19520, 19521, 19649], [19521, 19650, 19649], [19521, 19522, 19651], [19521, 19651, 19650], [19522, 19523, 19651], [19523, 19652, 19651], [19523, 19524, 19653], [19523, 19653, 19652], [19525, 19526, 19655], [19525, 19655, 19654], [19526, 19527, 19655], [19527, 19656, 19655], [19527, 19528, 19657], [19527, 19657, 19656], [19528, 19529, 19657], [19529, 19658, 19657], [19529, 19530, 19659], [19529, 19659, 19658], [19530, 19531, 19659], [19531, 19660, 19659], [19531, 19532, 19661], [19531, 19661, 19660], [19532, 19533, 19661], [19533, 19662, 19661], [19533, 19534, 19663], [19533, 19663, 19662], [19534, 19535, 19663], [19535, 19664, 19663], [19535, 19536, 19665], [19535, 19665, 19664], [19536, 19537, 19665], [19537, 19666, 19665], [19537, 19538, 19667], [19537, 19667, 19666], [19538, 19539, 19667], [19539, 19668, 19667], [19539, 19540, 19669], [19539, 19669, 19668], [19540, 19541, 19669], [19541, 19670, 19669], [19541, 19542, 19671], [19541, 19671, 19670], [19542, 19543, 19671], [19543, 19672, 19671], [19543, 19544, 19673], [19543, 19673, 19672], [19544, 19545, 19673], [19545, 19674, 19673], [19545, 19546, 19675], [19545, 19675, 19674], [19546, 19547, 19675], [19547, 19676, 19675], [19547, 19548, 19677], [19547, 19677, 19676], [19548, 19549, 19677], [19549, 19678, 19677], [19549, 19550, 19679], [19549, 19679, 19678], [19550, 19551, 19679], [19551, 19680, 19679], [19551, 19552, 19681], [19551, 19681, 19680], [19552, 19553, 19681], [19553, 19682, 19681], [19553, 19554, 19683], [19553, 19683, 19682], [19554, 19555, 19683], [19555, 19684, 19683], [19555, 19556, 19685], [19555, 19685, 19684], [19556, 19557, 19685], [19557, 19686, 19685], [19557, 19558, 19687], [19557, 19687, 19686], [19558, 19559, 19687], [19559, 19688, 19687], [19559, 19560, 19689], [19559, 19689, 19688], [19560, 19561, 19689], [19561, 19690, 19689], [19561, 19562, 19691], [19561, 19691, 19690], [19562, 19563, 19691], [19563, 19692, 19691], [19563, 19564, 19693], [19563, 19693, 19692], [19564, 19565, 19693], [19565, 19694, 19693], [19565, 19566, 19695], [19565, 19695, 19694], [19566, 19567, 19695], [19567, 19696, 19695], [19567, 19568, 19697], [19567, 19697, 19696], [19568, 19569, 19697], [19569, 19698, 19697], [19569, 19570, 19699], [19569, 19699, 19698], [19570, 19571, 19699], [19571, 19700, 19699], [19571, 19572, 19701], [19571, 19701, 19700], [19572, 19573, 19701], [19573, 19702, 19701], [19573, 19574, 19703], [19573, 19703, 19702], [19574, 19575, 19703], [19575, 19704, 19703], [19575, 19576, 19705], [19575, 19705, 19704], [19576, 19577, 19705], [19577, 19706, 19705], [19577, 19578, 19707], [19577, 19707, 19706], [19578, 19579, 19707], [19579, 19708, 19707], [19579, 19580, 19709], [19579, 19709, 19708], [19580, 19581, 19709], [19581, 19710, 19709], [19581, 19582, 19711], [19581, 19711, 19710], [19582, 19583, 19711], [19583, 19712, 19711], [19583, 19584, 19713], [19583, 19713, 19712], [19584, 19585, 19713], [19585, 19714, 19713], [19585, 19586, 19715], [19585, 19715, 19714], [19586, 19587, 19715], [19587, 19716, 19715], [19587, 19588, 19717], [19587, 19717, 19716], [19588, 19589, 19717], [19589, 19718, 19717], [19589, 19590, 19719], [19589, 19719, 19718], [19590, 19591, 19719], [19591, 19720, 19719], [19591, 19592, 19721], [19591, 19721, 19720], [19592, 19593, 19721], [19593, 19722, 19721], [19593, 19594, 19723], [19593, 19723, 19722], [19594, 19595, 19723], [19595, 19724, 19723], [19595, 19596, 19725], [19595, 19725, 19724], [19596, 19597, 19725], [19597, 19726, 19725], [19597, 19598, 19727], [19597, 19727, 19726], [19598, 19599, 19727], [19599, 19728, 19727], [19599, 19600, 19729], [19599, 19729, 19728], [19600, 19601, 19729], [19601, 19730, 19729], [19601, 19602, 19731], [19601, 19731, 19730], [19602, 19603, 19731], [19603, 19732, 19731], [19603, 19604, 19733], [19603, 19733, 19732], [19604, 19605, 19733], [19605, 19734, 19733], [19605, 19606, 19735], [19605, 19735, 19734], [19606, 19607, 19735], [19607, 19736, 19735], [19607, 19608, 19737], [19607, 19737, 19736], [19608, 19609, 19737], [19609, 19738, 19737], [19609, 19610, 19739], [19609, 19739, 19738], [19610, 19611, 19739], [19611, 19740, 19739], [19611, 19612, 19741], [19611, 19741, 19740], [19612, 19613, 19741], [19613, 19742, 19741], [19613, 19614, 19743], [19613, 19743, 19742], [19614, 19615, 19743], [19615, 19744, 19743], [19615, 19616, 19745], [19615, 19745, 19744], [19616, 19617, 19745], [19617, 19746, 19745], [19617, 19618, 19747], [19617, 19747, 19746], [19618, 19619, 19747], [19619, 19748, 19747], [19619, 19620, 19749], [19619, 19749, 19748], [19620, 19621, 19749], [19621, 19750, 19749], [19621, 19622, 19751], [19621, 19751, 19750], [19622, 19623, 19751], [19623, 19752, 19751], [19623, 19624, 19753], [19623, 19753, 19752], [19624, 19625, 19753], [19625, 19754, 19753], [19625, 19626, 19755], [19625, 19755, 19754], [19626, 19627, 19755], [19627, 19756, 19755], [19627, 19628, 19757], [19627, 19757, 19756], [19628, 19629, 19757], [19629, 19758, 19757], [19629, 19630, 19759], [19629, 19759, 19758], [19630, 19631, 19759], [19631, 19760, 19759], [19631, 19632, 19761], [19631, 19761, 19760], [19632, 19633, 19761], [19633, 19762, 19761], [19633, 19634, 19763], [19633, 19763, 19762], [19634, 19635, 19763], [19635, 19764, 19763], [19635, 19636, 19765], [19635, 19765, 19764], [19636, 19637, 19765], [19637, 19766, 19765], [19637, 19638, 19767], [19637, 19767, 19766], [19638, 19639, 19767], [19639, 19768, 19767], [19639, 19640, 19769], [19639, 19769, 19768], [19640, 19641, 19769], [19641, 19770, 19769], [19641, 19642, 19771], [19641, 19771, 19770], [19642, 19643, 19771], [19643, 19772, 19771], [19643, 19644, 19773], [19643, 19773, 19772], [19644, 19645, 19773], [19645, 19774, 19773], [19645, 19646, 19775], [19645, 19775, 19774], [19646, 19647, 19775], [19647, 19776, 19775], [19647, 19648, 19777], [19647, 19777, 19776], [19648, 19649, 19777], [19649, 19778, 19777], [19649, 19650, 19779], [19649, 19779, 19778], [19650, 19651, 19779], [19651, 19780, 19779], [19651, 19652, 19781], [19651, 19781, 19780], [19652, 19653, 19781], [19653, 19782, 19781], [19654, 19655, 19783], [19655, 19784, 19783], [19655, 19656, 19785], [19655, 19785, 19784], [19656, 19657, 19785], [19657, 19786, 19785], [19657, 19658, 19787], [19657, 19787, 19786], [19658, 19659, 19787], [19659, 19788, 19787], [19659, 19660, 19789], [19659, 19789, 19788], [19660, 19661, 19789], [19661, 19790, 19789], [19661, 19662, 19791], [19661, 19791, 19790], [19662, 19663, 19791], [19663, 19792, 19791], [19663, 19664, 19793], [19663, 19793, 19792], [19664, 19665, 19793], [19665, 19794, 19793], [19665, 19666, 19795], [19665, 19795, 19794], [19666, 19667, 19795], [19667, 19796, 19795], [19667, 19668, 19797], [19667, 19797, 19796], [19668, 19669, 19797], [19669, 19798, 19797], [19669, 19670, 19799], [19669, 19799, 19798], [19670, 19671, 19799], [19671, 19800, 19799], [19671, 19672, 19801], [19671, 19801, 19800], [19672, 19673, 19801], [19673, 19802, 19801], [19673, 19674, 19803], [19673, 19803, 19802], [19674, 19675, 19803], [19675, 19804, 19803], [19675, 19676, 19805], [19675, 19805, 19804], [19676, 19677, 19805], [19677, 19806, 19805], [19677, 19678, 19807], [19677, 19807, 19806], [19678, 19679, 19807], [19679, 19808, 19807], [19679, 19680, 19809], [19679, 19809, 19808], [19680, 19681, 19809], [19681, 19810, 19809], [19681, 19682, 19811], [19681, 19811, 19810], [19682, 19683, 19811], [19683, 19812, 19811], [19683, 19684, 19813], [19683, 19813, 19812], [19684, 19685, 19813], [19685, 19814, 19813], [19685, 19686, 19815], [19685, 19815, 19814], [19686, 19687, 19815], [19687, 19816, 19815], [19687, 19688, 19817], [19687, 19817, 19816], [19688, 19689, 19817], [19689, 19818, 19817], [19689, 19690, 19819], [19689, 19819, 19818], [19690, 19691, 19819], [19691, 19820, 19819], [19691, 19692, 19821], [19691, 19821, 19820], [19692, 19693, 19821], [19693, 19822, 19821], [19693, 19694, 19823], [19693, 19823, 19822], [19694, 19695, 19823], [19695, 19824, 19823], [19695, 19696, 19825], [19695, 19825, 19824], [19696, 19697, 19825], [19697, 19826, 19825], [19697, 19698, 19827], [19697, 19827, 19826], [19698, 19699, 19827], [19699, 19828, 19827], [19699, 19700, 19829], [19699, 19829, 19828], [19700, 19701, 19829], [19701, 19830, 19829], [19701, 19702, 19831], [19701, 19831, 19830], [19702, 19703, 19831], [19703, 19832, 19831], [19703, 19704, 19833], [19703, 19833, 19832], [19704, 19705, 19833], [19705, 19834, 19833], [19705, 19706, 19835], [19705, 19835, 19834], [19706, 19707, 19835], [19707, 19836, 19835], [19707, 19708, 19837], [19707, 19837, 19836], [19708, 19709, 19837], [19709, 19838, 19837], [19709, 19710, 19839], [19709, 19839, 19838], [19710, 19711, 19839], [19711, 19840, 19839], [19711, 19712, 19841], [19711, 19841, 19840], [19712, 19713, 19841], [19713, 19842, 19841], [19713, 19714, 19843], [19713, 19843, 19842], [19714, 19715, 19843], [19715, 19844, 19843], [19715, 19716, 19845], [19715, 19845, 19844], [19716, 19717, 19845], [19717, 19846, 19845], [19717, 19718, 19847], [19717, 19847, 19846], [19718, 19719, 19847], [19719, 19848, 19847], [19719, 19720, 19849], [19719, 19849, 19848], [19720, 19721, 19849], [19721, 19850, 19849], [19721, 19722, 19851], [19721, 19851, 19850], [19722, 19723, 19851], [19723, 19852, 19851], [19723, 19724, 19853], [19723, 19853, 19852], [19724, 19725, 19853], [19725, 19854, 19853], [19725, 19726, 19855], [19725, 19855, 19854], [19726, 19727, 19855], [19727, 19856, 19855], [19727, 19728, 19857], [19727, 19857, 19856], [19728, 19729, 19857], [19729, 19858, 19857], [19729, 19730, 19859], [19729, 19859, 19858], [19730, 19731, 19859], [19731, 19860, 19859], [19731, 19732, 19861], [19731, 19861, 19860], [19732, 19733, 19861], [19733, 19862, 19861], [19733, 19734, 19863], [19733, 19863, 19862], [19734, 19735, 19863], [19735, 19864, 19863], [19735, 19736, 19865], [19735, 19865, 19864], [19736, 19737, 19865], [19737, 19866, 19865], [19737, 19738, 19867], [19737, 19867, 19866], [19738, 19739, 19867], [19739, 19868, 19867], [19739, 19740, 19869], [19739, 19869, 19868], [19740, 19741, 19869], [19741, 19870, 19869], [19741, 19742, 19871], [19741, 19871, 19870], [19742, 19743, 19871], [19743, 19872, 19871], [19743, 19744, 19873], [19743, 19873, 19872], [19744, 19745, 19873], [19745, 19874, 19873], [19745, 19746, 19875], [19745, 19875, 19874], [19746, 19747, 19875], [19747, 19876, 19875], [19747, 19748, 19877], [19747, 19877, 19876], [19748, 19749, 19877], [19749, 19878, 19877], [19749, 19750, 19879], [19749, 19879, 19878], [19750, 19751, 19879], [19751, 19880, 19879], [19751, 19752, 19881], [19751, 19881, 19880], [19752, 19753, 19881], [19753, 19882, 19881], [19753, 19754, 19883], [19753, 19883, 19882], [19754, 19755, 19883], [19755, 19884, 19883], [19755, 19756, 19885], [19755, 19885, 19884], [19756, 19757, 19885], [19757, 19886, 19885], [19757, 19758, 19887], [19757, 19887, 19886], [19758, 19759, 19887], [19759, 19888, 19887], [19759, 19760, 19889], [19759, 19889, 19888], [19760, 19761, 19889], [19761, 19890, 19889], [19761, 19762, 19891], [19761, 19891, 19890], [19762, 19763, 19891], [19763, 19892, 19891], [19763, 19764, 19893], [19763, 19893, 19892], [19764, 19765, 19893], [19765, 19894, 19893], [19765, 19766, 19895], [19765, 19895, 19894], [19766, 19767, 19895], [19767, 19896, 19895], [19767, 19768, 19897], [19767, 19897, 19896], [19768, 19769, 19897], [19769, 19898, 19897], [19769, 19770, 19899], [19769, 19899, 19898], [19770, 19771, 19899], [19771, 19900, 19899], [19771, 19772, 19901], [19771, 19901, 19900], [19772, 19773, 19901], [19773, 19902, 19901], [19773, 19774, 19903], [19773, 19903, 19902], [19774, 19775, 19903], [19775, 19904, 19903], [19775, 19776, 19905], [19775, 19905, 19904], [19776, 19777, 19905], [19777, 19906, 19905], [19777, 19778, 19907], [19777, 19907, 19906], [19778, 19779, 19907], [19779, 19908, 19907], [19779, 19780, 19909], [19779, 19909, 19908], [19780, 19781, 19909], [19781, 19910, 19909], [19781, 19782, 19911], [19781, 19911, 19910], [19783, 19784, 19913], [19783, 19913, 19912], [19784, 19785, 19913], [19785, 19914, 19913], [19785, 19786, 19915], [19785, 19915, 19914], [19786, 19787, 19915], [19787, 19916, 19915], [19787, 19788, 19917], [19787, 19917, 19916], [19788, 19789, 19917], [19789, 19918, 19917], [19789, 19790, 19919], [19789, 19919, 19918], [19790, 19791, 19919], [19791, 19920, 19919], [19791, 19792, 19921], [19791, 19921, 19920], [19792, 19793, 19921], [19793, 19922, 19921], [19793, 19794, 19923], [19793, 19923, 19922], [19794, 19795, 19923], [19795, 19924, 19923], [19795, 19796, 19925], [19795, 19925, 19924], [19796, 19797, 19925], [19797, 19926, 19925], [19797, 19798, 19927], [19797, 19927, 19926], [19798, 19799, 19927], [19799, 19928, 19927], [19799, 19800, 19929], [19799, 19929, 19928], [19800, 19801, 19929], [19801, 19930, 19929], [19801, 19802, 19931], [19801, 19931, 19930], [19802, 19803, 19931], [19803, 19932, 19931], [19803, 19804, 19933], [19803, 19933, 19932], [19804, 19805, 19933], [19805, 19934, 19933], [19805, 19806, 19935], [19805, 19935, 19934], [19806, 19807, 19935], [19807, 19936, 19935], [19807, 19808, 19937], [19807, 19937, 19936], [19808, 19809, 19937], [19809, 19938, 19937], [19809, 19810, 19939], [19809, 19939, 19938], [19810, 19811, 19939], [19811, 19940, 19939], [19811, 19812, 19941], [19811, 19941, 19940], [19812, 19813, 19941], [19813, 19942, 19941], [19813, 19814, 19943], [19813, 19943, 19942], [19814, 19815, 19943], [19815, 19944, 19943], [19815, 19816, 19945], [19815, 19945, 19944], [19816, 19817, 19945], [19817, 19946, 19945], [19817, 19818, 19947], [19817, 19947, 19946], [19818, 19819, 19947], [19819, 19948, 19947], [19819, 19820, 19949], [19819, 19949, 19948], [19820, 19821, 19949], [19821, 19950, 19949], [19821, 19822, 19951], [19821, 19951, 19950], [19822, 19823, 19951], [19823, 19952, 19951], [19823, 19824, 19953], [19823, 19953, 19952], [19824, 19825, 19953], [19825, 19954, 19953], [19825, 19826, 19955], [19825, 19955, 19954], [19826, 19827, 19955], [19827, 19956, 19955], [19827, 19828, 19957], [19827, 19957, 19956], [19828, 19829, 19957], [19829, 19958, 19957], [19829, 19830, 19959], [19829, 19959, 19958], [19830, 19831, 19959], [19831, 19960, 19959], [19831, 19832, 19961], [19831, 19961, 19960], [19832, 19833, 19961], [19833, 19962, 19961], [19833, 19834, 19963], [19833, 19963, 19962], [19834, 19835, 19963], [19835, 19964, 19963], [19835, 19836, 19965], [19835, 19965, 19964], [19836, 19837, 19965], [19837, 19966, 19965], [19837, 19838, 19967], [19837, 19967, 19966], [19838, 19839, 19967], [19839, 19968, 19967], [19839, 19840, 19969], [19839, 19969, 19968], [19840, 19841, 19969], [19841, 19970, 19969], [19841, 19842, 19971], [19841, 19971, 19970], [19842, 19843, 19971], [19843, 19972, 19971], [19843, 19844, 19973], [19843, 19973, 19972], [19844, 19845, 19973], [19845, 19974, 19973], [19845, 19846, 19975], [19845, 19975, 19974], [19846, 19847, 19975], [19847, 19976, 19975], [19847, 19848, 19977], [19847, 19977, 19976], [19848, 19849, 19977], [19849, 19978, 19977], [19849, 19850, 19979], [19849, 19979, 19978], [19850, 19851, 19979], [19851, 19980, 19979], [19851, 19852, 19981], [19851, 19981, 19980], [19852, 19853, 19981], [19853, 19982, 19981], [19853, 19854, 19983], [19853, 19983, 19982], [19854, 19855, 19983], [19855, 19984, 19983], [19855, 19856, 19985], [19855, 19985, 19984], [19856, 19857, 19985], [19857, 19986, 19985], [19857, 19858, 19987], [19857, 19987, 19986], [19858, 19859, 19987], [19859, 19988, 19987], [19859, 19860, 19989], [19859, 19989, 19988], [19860, 19861, 19989], [19861, 19990, 19989], [19861, 19862, 19991], [19861, 19991, 19990], [19862, 19863, 19991], [19863, 19992, 19991], [19863, 19864, 19993], [19863, 19993, 19992], [19864, 19865, 19993], [19865, 19994, 19993], [19865, 19866, 19995], [19865, 19995, 19994], [19866, 19867, 19995], [19867, 19996, 19995], [19867, 19868, 19997], [19867, 19997, 19996], [19868, 19869, 19997], [19869, 19998, 19997], [19869, 19870, 19999], [19869, 19999, 19998], [19870, 19871, 19999], [19871, 20000, 19999], [19871, 19872, 20001], [19871, 20001, 20000], [19872, 19873, 20001], [19873, 20002, 20001], [19873, 19874, 20003], [19873, 20003, 20002], [19874, 19875, 20003], [19875, 20004, 20003], [19875, 19876, 20005], [19875, 20005, 20004], [19876, 19877, 20005], [19877, 20006, 20005], [19877, 19878, 20007], [19877, 20007, 20006], [19878, 19879, 20007], [19879, 20008, 20007], [19879, 19880, 20009], [19879, 20009, 20008], [19880, 19881, 20009], [19881, 20010, 20009], [19881, 19882, 20011], [19881, 20011, 20010], [19882, 19883, 20011], [19883, 20012, 20011], [19883, 19884, 20013], [19883, 20013, 20012], [19884, 19885, 20013], [19885, 20014, 20013], [19885, 19886, 20015], [19885, 20015, 20014], [19886, 19887, 20015], [19887, 20016, 20015], [19887, 19888, 20017], [19887, 20017, 20016], [19888, 19889, 20017], [19889, 20018, 20017], [19889, 19890, 20019], [19889, 20019, 20018], [19890, 19891, 20019], [19891, 20020, 20019], [19891, 19892, 20021], [19891, 20021, 20020], [19892, 19893, 20021], [19893, 20022, 20021], [19893, 19894, 20023], [19893, 20023, 20022], [19894, 19895, 20023], [19895, 20024, 20023], [19895, 19896, 20025], [19895, 20025, 20024], [19896, 19897, 20025], [19897, 20026, 20025], [19897, 19898, 20027], [19897, 20027, 20026], [19898, 19899, 20027], [19899, 20028, 20027], [19899, 19900, 20029], [19899, 20029, 20028], [19900, 19901, 20029], [19901, 20030, 20029], [19901, 19902, 20031], [19901, 20031, 20030], [19902, 19903, 20031], [19903, 20032, 20031], [19903, 19904, 20033], [19903, 20033, 20032], [19904, 19905, 20033], [19905, 20034, 20033], [19905, 19906, 20035], [19905, 20035, 20034], [19906, 19907, 20035], [19907, 20036, 20035], [19907, 19908, 20037], [19907, 20037, 20036], [19908, 19909, 20037], [19909, 20038, 20037], [19909, 19910, 20039], [19909, 20039, 20038], [19910, 19911, 20039], [19911, 20040, 20039], [19912, 19913, 20041], [19913, 20042, 20041], [19913, 19914, 20043], [19913, 20043, 20042], [19914, 19915, 20043], [19915, 20044, 20043], [19915, 19916, 20045], [19915, 20045, 20044], [19916, 19917, 20045], [19917, 20046, 20045], [19917, 19918, 20047], [19917, 20047, 20046], [19918, 19919, 20047], [19919, 20048, 20047], [19919, 19920, 20049], [19919, 20049, 20048], [19920, 19921, 20049], [19921, 20050, 20049], [19921, 19922, 20051], [19921, 20051, 20050], [19922, 19923, 20051], [19923, 20052, 20051], [19923, 19924, 20053], [19923, 20053, 20052], [19924, 19925, 20053], [19925, 20054, 20053], [19925, 19926, 20055], [19925, 20055, 20054], [19926, 19927, 20055], [19927, 20056, 20055], [19927, 19928, 20057], [19927, 20057, 20056], [19928, 19929, 20057], [19929, 20058, 20057], [19929, 19930, 20059], [19929, 20059, 20058], [19930, 19931, 20059], [19931, 20060, 20059], [19931, 19932, 20061], [19931, 20061, 20060], [19932, 19933, 20061], [19933, 20062, 20061], [19933, 19934, 20063], [19933, 20063, 20062], [19934, 19935, 20063], [19935, 20064, 20063], [19935, 19936, 20065], [19935, 20065, 20064], [19936, 19937, 20065], [19937, 20066, 20065], [19937, 19938, 20067], [19937, 20067, 20066], [19938, 19939, 20067], [19939, 20068, 20067], [19939, 19940, 20069], [19939, 20069, 20068], [19940, 19941, 20069], [19941, 20070, 20069], [19941, 19942, 20071], [19941, 20071, 20070], [19942, 19943, 20071], [19943, 20072, 20071], [19943, 19944, 20073], [19943, 20073, 20072], [19944, 19945, 20073], [19945, 20074, 20073], [19945, 19946, 20075], [19945, 20075, 20074], [19946, 19947, 20075], [19947, 20076, 20075], [19947, 19948, 20077], [19947, 20077, 20076], [19948, 19949, 20077], [19949, 20078, 20077], [19949, 19950, 20079], [19949, 20079, 20078], [19950, 19951, 20079], [19951, 20080, 20079], [19951, 19952, 20081], [19951, 20081, 20080], [19952, 19953, 20081], [19953, 20082, 20081], [19953, 19954, 20083], [19953, 20083, 20082], [19954, 19955, 20083], [19955, 20084, 20083], [19955, 19956, 20085], [19955, 20085, 20084], [19956, 19957, 20085], [19957, 20086, 20085], [19957, 19958, 20087], [19957, 20087, 20086], [19958, 19959, 20087], [19959, 20088, 20087], [19959, 19960, 20089], [19959, 20089, 20088], [19960, 19961, 20089], [19961, 20090, 20089], [19961, 19962, 20091], [19961, 20091, 20090], [19962, 19963, 20091], [19963, 20092, 20091], [19963, 19964, 20093], [19963, 20093, 20092], [19964, 19965, 20093], [19965, 20094, 20093], [19965, 19966, 20095], [19965, 20095, 20094], [19966, 19967, 20095], [19967, 20096, 20095], [19967, 19968, 20097], [19967, 20097, 20096], [19968, 19969, 20097], [19969, 20098, 20097], [19969, 19970, 20099], [19969, 20099, 20098], [19970, 19971, 20099], [19971, 20100, 20099], [19971, 19972, 20101], [19971, 20101, 20100], [19972, 19973, 20101], [19973, 20102, 20101], [19973, 19974, 20103], [19973, 20103, 20102], [19974, 19975, 20103], [19975, 20104, 20103], [19975, 19976, 20105], [19975, 20105, 20104], [19976, 19977, 20105], [19977, 20106, 20105], [19977, 19978, 20107], [19977, 20107, 20106], [19978, 19979, 20107], [19979, 20108, 20107], [19979, 19980, 20109], [19979, 20109, 20108], [19980, 19981, 20109], [19981, 20110, 20109], [19981, 19982, 20111], [19981, 20111, 20110], [19982, 19983, 20111], [19983, 20112, 20111], [19983, 19984, 20113], [19983, 20113, 20112], [19984, 19985, 20113], [19985, 20114, 20113], [19985, 19986, 20115], [19985, 20115, 20114], [19986, 19987, 20115], [19987, 20116, 20115], [19987, 19988, 20117], [19987, 20117, 20116], [19988, 19989, 20117], [19989, 20118, 20117], [19989, 19990, 20119], [19989, 20119, 20118], [19990, 19991, 20119], [19991, 20120, 20119], [19991, 19992, 20121], [19991, 20121, 20120], [19992, 19993, 20121], [19993, 20122, 20121], [19993, 19994, 20123], [19993, 20123, 20122], [19994, 19995, 20123], [19995, 20124, 20123], [19995, 19996, 20125], [19995, 20125, 20124], [19996, 19997, 20125], [19997, 20126, 20125], [19997, 19998, 20127], [19997, 20127, 20126], [19998, 19999, 20127], [19999, 20128, 20127], [19999, 20000, 20129], [19999, 20129, 20128], [20000, 20001, 20129], [20001, 20130, 20129], [20001, 20002, 20131], [20001, 20131, 20130], [20002, 20003, 20131], [20003, 20132, 20131], [20003, 20004, 20133], [20003, 20133, 20132], [20004, 20005, 20133], [20005, 20134, 20133], [20005, 20006, 20135], [20005, 20135, 20134], [20006, 20007, 20135], [20007, 20136, 20135], [20007, 20008, 20137], [20007, 20137, 20136], [20008, 20009, 20137], [20009, 20138, 20137], [20009, 20010, 20139], [20009, 20139, 20138], [20010, 20011, 20139], [20011, 20140, 20139], [20011, 20012, 20141], [20011, 20141, 20140], [20012, 20013, 20141], [20013, 20142, 20141], [20013, 20014, 20143], [20013, 20143, 20142], [20014, 20015, 20143], [20015, 20144, 20143], [20015, 20016, 20145], [20015, 20145, 20144], [20016, 20017, 20145], [20017, 20146, 20145], [20017, 20018, 20147], [20017, 20147, 20146], [20018, 20019, 20147], [20019, 20148, 20147], [20019, 20020, 20149], [20019, 20149, 20148], [20020, 20021, 20149], [20021, 20150, 20149], [20021, 20022, 20151], [20021, 20151, 20150], [20022, 20023, 20151], [20023, 20152, 20151], [20023, 20024, 20153], [20023, 20153, 20152], [20024, 20025, 20153], [20025, 20154, 20153], [20025, 20026, 20155], [20025, 20155, 20154], [20026, 20027, 20155], [20027, 20156, 20155], [20027, 20028, 20157], [20027, 20157, 20156], [20028, 20029, 20157], [20029, 20158, 20157], [20029, 20030, 20159], [20029, 20159, 20158], [20030, 20031, 20159], [20031, 20160, 20159], [20031, 20032, 20161], [20031, 20161, 20160], [20032, 20033, 20161], [20033, 20162, 20161], [20033, 20034, 20163], [20033, 20163, 20162], [20034, 20035, 20163], [20035, 20164, 20163], [20035, 20036, 20165], [20035, 20165, 20164], [20036, 20037, 20165], [20037, 20166, 20165], [20037, 20038, 20167], [20037, 20167, 20166], [20038, 20039, 20167], [20039, 20168, 20167], [20039, 20040, 20169], [20039, 20169, 20168], [20041, 20042, 20171], [20041, 20171, 20170], [20042, 20043, 20171], [20043, 20172, 20171], [20043, 20044, 20173], [20043, 20173, 20172], [20044, 20045, 20173], [20045, 20174, 20173], [20045, 20046, 20175], [20045, 20175, 20174], [20046, 20047, 20175], [20047, 20176, 20175], [20047, 20048, 20177], [20047, 20177, 20176], [20048, 20049, 20177], [20049, 20178, 20177], [20049, 20050, 20179], [20049, 20179, 20178], [20050, 20051, 20179], [20051, 20180, 20179], [20051, 20052, 20181], [20051, 20181, 20180], [20052, 20053, 20181], [20053, 20182, 20181], [20053, 20054, 20183], [20053, 20183, 20182], [20054, 20055, 20183], [20055, 20184, 20183], [20055, 20056, 20185], [20055, 20185, 20184], [20056, 20057, 20185], [20057, 20186, 20185], [20057, 20058, 20187], [20057, 20187, 20186], [20058, 20059, 20187], [20059, 20188, 20187], [20059, 20060, 20189], [20059, 20189, 20188], [20060, 20061, 20189], [20061, 20190, 20189], [20061, 20062, 20191], [20061, 20191, 20190], [20062, 20063, 20191], [20063, 20192, 20191], [20063, 20064, 20193], [20063, 20193, 20192], [20064, 20065, 20193], [20065, 20194, 20193], [20065, 20066, 20195], [20065, 20195, 20194], [20066, 20067, 20195], [20067, 20196, 20195], [20067, 20068, 20197], [20067, 20197, 20196], [20068, 20069, 20197], [20069, 20198, 20197], [20069, 20070, 20199], [20069, 20199, 20198], [20070, 20071, 20199], [20071, 20200, 20199], [20071, 20072, 20201], [20071, 20201, 20200], [20072, 20073, 20201], [20073, 20202, 20201], [20073, 20074, 20203], [20073, 20203, 20202], [20074, 20075, 20203], [20075, 20204, 20203], [20075, 20076, 20205], [20075, 20205, 20204], [20076, 20077, 20205], [20077, 20206, 20205], [20077, 20078, 20207], [20077, 20207, 20206], [20078, 20079, 20207], [20079, 20208, 20207], [20079, 20080, 20209], [20079, 20209, 20208], [20080, 20081, 20209], [20081, 20210, 20209], [20081, 20082, 20211], [20081, 20211, 20210], [20082, 20083, 20211], [20083, 20212, 20211], [20083, 20084, 20213], [20083, 20213, 20212], [20084, 20085, 20213], [20085, 20214, 20213], [20085, 20086, 20215], [20085, 20215, 20214], [20086, 20087, 20215], [20087, 20216, 20215], [20087, 20088, 20217], [20087, 20217, 20216], [20088, 20089, 20217], [20089, 20218, 20217], [20089, 20090, 20219], [20089, 20219, 20218], [20090, 20091, 20219], [20091, 20220, 20219], [20091, 20092, 20221], [20091, 20221, 20220], [20092, 20093, 20221], [20093, 20222, 20221], [20093, 20094, 20223], [20093, 20223, 20222], [20094, 20095, 20223], [20095, 20224, 20223], [20095, 20096, 20225], [20095, 20225, 20224], [20096, 20097, 20225], [20097, 20226, 20225], [20097, 20098, 20227], [20097, 20227, 20226], [20098, 20099, 20227], [20099, 20228, 20227], [20099, 20100, 20229], [20099, 20229, 20228], [20100, 20101, 20229], [20101, 20230, 20229], [20101, 20102, 20231], [20101, 20231, 20230], [20102, 20103, 20231], [20103, 20232, 20231], [20103, 20104, 20233], [20103, 20233, 20232], [20104, 20105, 20233], [20105, 20234, 20233], [20105, 20106, 20235], [20105, 20235, 20234], [20106, 20107, 20235], [20107, 20236, 20235], [20107, 20108, 20237], [20107, 20237, 20236], [20108, 20109, 20237], [20109, 20238, 20237], [20109, 20110, 20239], [20109, 20239, 20238], [20110, 20111, 20239], [20111, 20240, 20239], [20111, 20112, 20241], [20111, 20241, 20240], [20112, 20113, 20241], [20113, 20242, 20241], [20113, 20114, 20243], [20113, 20243, 20242], [20114, 20115, 20243], [20115, 20244, 20243], [20115, 20116, 20245], [20115, 20245, 20244], [20116, 20117, 20245], [20117, 20246, 20245], [20117, 20118, 20247], [20117, 20247, 20246], [20118, 20119, 20247], [20119, 20248, 20247], [20119, 20120, 20249], [20119, 20249, 20248], [20120, 20121, 20249], [20121, 20250, 20249], [20121, 20122, 20251], [20121, 20251, 20250], [20122, 20123, 20251], [20123, 20252, 20251], [20123, 20124, 20253], [20123, 20253, 20252], [20124, 20125, 20253], [20125, 20254, 20253], [20125, 20126, 20255], [20125, 20255, 20254], [20126, 20127, 20255], [20127, 20256, 20255], [20127, 20128, 20257], [20127, 20257, 20256], [20128, 20129, 20257], [20129, 20258, 20257], [20129, 20130, 20259], [20129, 20259, 20258], [20130, 20131, 20259], [20131, 20260, 20259], [20131, 20132, 20261], [20131, 20261, 20260], [20132, 20133, 20261], [20133, 20262, 20261], [20133, 20134, 20263], [20133, 20263, 20262], [20134, 20135, 20263], [20135, 20264, 20263], [20135, 20136, 20265], [20135, 20265, 20264], [20136, 20137, 20265], [20137, 20266, 20265], [20137, 20138, 20267], [20137, 20267, 20266], [20138, 20139, 20267], [20139, 20268, 20267], [20139, 20140, 20269], [20139, 20269, 20268], [20140, 20141, 20269], [20141, 20270, 20269], [20141, 20142, 20271], [20141, 20271, 20270], [20142, 20143, 20271], [20143, 20272, 20271], [20143, 20144, 20273], [20143, 20273, 20272], [20144, 20145, 20273], [20145, 20274, 20273], [20145, 20146, 20275], [20145, 20275, 20274], [20146, 20147, 20275], [20147, 20276, 20275], [20147, 20148, 20277], [20147, 20277, 20276], [20148, 20149, 20277], [20149, 20278, 20277], [20149, 20150, 20279], [20149, 20279, 20278], [20150, 20151, 20279], [20151, 20280, 20279], [20151, 20152, 20281], [20151, 20281, 20280], [20152, 20153, 20281], [20153, 20282, 20281], [20153, 20154, 20283], [20153, 20283, 20282], [20154, 20155, 20283], [20155, 20284, 20283], [20155, 20156, 20285], [20155, 20285, 20284], [20156, 20157, 20285], [20157, 20286, 20285], [20157, 20158, 20287], [20157, 20287, 20286], [20158, 20159, 20287], [20159, 20288, 20287], [20159, 20160, 20289], [20159, 20289, 20288], [20160, 20161, 20289], [20161, 20290, 20289], [20161, 20162, 20291], [20161, 20291, 20290], [20162, 20163, 20291], [20163, 20292, 20291], [20163, 20164, 20293], [20163, 20293, 20292], [20164, 20165, 20293], [20165, 20294, 20293], [20165, 20166, 20295], [20165, 20295, 20294], [20166, 20167, 20295], [20167, 20296, 20295], [20167, 20168, 20297], [20167, 20297, 20296], [20168, 20169, 20297], [20169, 20298, 20297], [20170, 20171, 20299], [20171, 20300, 20299], [20171, 20172, 20301], [20171, 20301, 20300], [20172, 20173, 20301], [20173, 20302, 20301], [20173, 20174, 20303], [20173, 20303, 20302], [20174, 20175, 20303], [20175, 20304, 20303], [20175, 20176, 20305], [20175, 20305, 20304], [20176, 20177, 20305], [20177, 20306, 20305], [20177, 20178, 20307], [20177, 20307, 20306], [20178, 20179, 20307], [20179, 20308, 20307], [20179, 20180, 20309], [20179, 20309, 20308], [20180, 20181, 20309], [20181, 20310, 20309], [20181, 20182, 20311], [20181, 20311, 20310], [20182, 20183, 20311], [20183, 20312, 20311], [20183, 20184, 20313], [20183, 20313, 20312], [20184, 20185, 20313], [20185, 20314, 20313], [20185, 20186, 20315], [20185, 20315, 20314], [20186, 20187, 20315], [20187, 20316, 20315], [20187, 20188, 20317], [20187, 20317, 20316], [20188, 20189, 20317], [20189, 20318, 20317], [20189, 20190, 20319], [20189, 20319, 20318], [20190, 20191, 20319], [20191, 20320, 20319], [20191, 20192, 20321], [20191, 20321, 20320], [20192, 20193, 20321], [20193, 20322, 20321], [20193, 20194, 20323], [20193, 20323, 20322], [20194, 20195, 20323], [20195, 20324, 20323], [20195, 20196, 20325], [20195, 20325, 20324], [20196, 20197, 20325], [20197, 20326, 20325], [20197, 20198, 20327], [20197, 20327, 20326], [20198, 20199, 20327], [20199, 20328, 20327], [20199, 20200, 20329], [20199, 20329, 20328], [20200, 20201, 20329], [20201, 20330, 20329], [20201, 20202, 20331], [20201, 20331, 20330], [20202, 20203, 20331], [20203, 20332, 20331], [20203, 20204, 20333], [20203, 20333, 20332], [20204, 20205, 20333], [20205, 20334, 20333], [20205, 20206, 20335], [20205, 20335, 20334], [20206, 20207, 20335], [20207, 20336, 20335], [20207, 20208, 20337], [20207, 20337, 20336], [20208, 20209, 20337], [20209, 20338, 20337], [20209, 20210, 20339], [20209, 20339, 20338], [20210, 20211, 20339], [20211, 20340, 20339], [20211, 20212, 20341], [20211, 20341, 20340], [20212, 20213, 20341], [20213, 20342, 20341], [20213, 20214, 20343], [20213, 20343, 20342], [20214, 20215, 20343], [20215, 20344, 20343], [20215, 20216, 20345], [20215, 20345, 20344], [20216, 20217, 20345], [20217, 20346, 20345], [20217, 20218, 20347], [20217, 20347, 20346], [20218, 20219, 20347], [20219, 20348, 20347], [20219, 20220, 20349], [20219, 20349, 20348], [20220, 20221, 20349], [20221, 20350, 20349], [20221, 20222, 20351], [20221, 20351, 20350], [20222, 20223, 20351], [20223, 20352, 20351], [20223, 20224, 20353], [20223, 20353, 20352], [20224, 20225, 20353], [20225, 20354, 20353], [20225, 20226, 20355], [20225, 20355, 20354], [20226, 20227, 20355], [20227, 20356, 20355], [20227, 20228, 20357], [20227, 20357, 20356], [20228, 20229, 20357], [20229, 20358, 20357], [20229, 20230, 20359], [20229, 20359, 20358], [20230, 20231, 20359], [20231, 20360, 20359], [20231, 20232, 20361], [20231, 20361, 20360], [20232, 20233, 20361], [20233, 20362, 20361], [20233, 20234, 20363], [20233, 20363, 20362], [20234, 20235, 20363], [20235, 20364, 20363], [20235, 20236, 20365], [20235, 20365, 20364], [20236, 20237, 20365], [20237, 20366, 20365], [20237, 20238, 20367], [20237, 20367, 20366], [20238, 20239, 20367], [20239, 20368, 20367], [20239, 20240, 20369], [20239, 20369, 20368], [20240, 20241, 20369], [20241, 20370, 20369], [20241, 20242, 20371], [20241, 20371, 20370], [20242, 20243, 20371], [20243, 20372, 20371], [20243, 20244, 20373], [20243, 20373, 20372], [20244, 20245, 20373], [20245, 20374, 20373], [20245, 20246, 20375], [20245, 20375, 20374], [20246, 20247, 20375], [20247, 20376, 20375], [20247, 20248, 20377], [20247, 20377, 20376], [20248, 20249, 20377], [20249, 20378, 20377], [20249, 20250, 20379], [20249, 20379, 20378], [20250, 20251, 20379], [20251, 20380, 20379], [20251, 20252, 20381], [20251, 20381, 20380], [20252, 20253, 20381], [20253, 20382, 20381], [20253, 20254, 20383], [20253, 20383, 20382], [20254, 20255, 20383], [20255, 20384, 20383], [20255, 20256, 20385], [20255, 20385, 20384], [20256, 20257, 20385], [20257, 20386, 20385], [20257, 20258, 20387], [20257, 20387, 20386], [20258, 20259, 20387], [20259, 20388, 20387], [20259, 20260, 20389], [20259, 20389, 20388], [20260, 20261, 20389], [20261, 20390, 20389], [20261, 20262, 20391], [20261, 20391, 20390], [20262, 20263, 20391], [20263, 20392, 20391], [20263, 20264, 20393], [20263, 20393, 20392], [20264, 20265, 20393], [20265, 20394, 20393], [20265, 20266, 20395], [20265, 20395, 20394], [20266, 20267, 20395], [20267, 20396, 20395], [20267, 20268, 20397], [20267, 20397, 20396], [20268, 20269, 20397], [20269, 20398, 20397], [20269, 20270, 20399], [20269, 20399, 20398], [20270, 20271, 20399], [20271, 20400, 20399], [20271, 20272, 20401], [20271, 20401, 20400], [20272, 20273, 20401], [20273, 20402, 20401], [20273, 20274, 20403], [20273, 20403, 20402], [20274, 20275, 20403], [20275, 20404, 20403], [20275, 20276, 20405], [20275, 20405, 20404], [20276, 20277, 20405], [20277, 20406, 20405], [20277, 20278, 20407], [20277, 20407, 20406], [20278, 20279, 20407], [20279, 20408, 20407], [20279, 20280, 20409], [20279, 20409, 20408], [20280, 20281, 20409], [20281, 20410, 20409], [20281, 20282, 20411], [20281, 20411, 20410], [20282, 20283, 20411], [20283, 20412, 20411], [20283, 20284, 20413], [20283, 20413, 20412], [20284, 20285, 20413], [20285, 20414, 20413], [20285, 20286, 20415], [20285, 20415, 20414], [20286, 20287, 20415], [20287, 20416, 20415], [20287, 20288, 20417], [20287, 20417, 20416], [20288, 20289, 20417], [20289, 20418, 20417], [20289, 20290, 20419], [20289, 20419, 20418], [20290, 20291, 20419], [20291, 20420, 20419], [20291, 20292, 20421], [20291, 20421, 20420], [20292, 20293, 20421], [20293, 20422, 20421], [20293, 20294, 20423], [20293, 20423, 20422], [20294, 20295, 20423], [20295, 20424, 20423], [20295, 20296, 20425], [20295, 20425, 20424], [20296, 20297, 20425], [20297, 20426, 20425], [20297, 20298, 20427], [20297, 20427, 20426], [20299, 20300, 20429], [20299, 20429, 20428], [20300, 20301, 20429], [20301, 20430, 20429], [20301, 20302, 20431], [20301, 20431, 20430], [20302, 20303, 20431], [20303, 20432, 20431], [20303, 20304, 20433], [20303, 20433, 20432], [20304, 20305, 20433], [20305, 20434, 20433], [20305, 20306, 20435], [20305, 20435, 20434], [20306, 20307, 20435], [20307, 20436, 20435], [20307, 20308, 20437], [20307, 20437, 20436], [20308, 20309, 20437], [20309, 20438, 20437], [20309, 20310, 20439], [20309, 20439, 20438], [20310, 20311, 20439], [20311, 20440, 20439], [20311, 20312, 20441], [20311, 20441, 20440], [20312, 20313, 20441], [20313, 20442, 20441], [20313, 20314, 20443], [20313, 20443, 20442], [20314, 20315, 20443], [20315, 20444, 20443], [20315, 20316, 20445], [20315, 20445, 20444], [20316, 20317, 20445], [20317, 20446, 20445], [20317, 20318, 20447], [20317, 20447, 20446], [20318, 20319, 20447], [20319, 20448, 20447], [20319, 20320, 20449], [20319, 20449, 20448], [20320, 20321, 20449], [20321, 20450, 20449], [20321, 20322, 20451], [20321, 20451, 20450], [20322, 20323, 20451], [20323, 20452, 20451], [20323, 20324, 20453], [20323, 20453, 20452], [20324, 20325, 20453], [20325, 20454, 20453], [20325, 20326, 20455], [20325, 20455, 20454], [20326, 20327, 20455], [20327, 20456, 20455], [20327, 20328, 20457], [20327, 20457, 20456], [20328, 20329, 20457], [20329, 20458, 20457], [20329, 20330, 20459], [20329, 20459, 20458], [20330, 20331, 20459], [20331, 20460, 20459], [20331, 20332, 20461], [20331, 20461, 20460], [20332, 20333, 20461], [20333, 20462, 20461], [20333, 20334, 20463], [20333, 20463, 20462], [20334, 20335, 20463], [20335, 20464, 20463], [20335, 20336, 20465], [20335, 20465, 20464], [20336, 20337, 20465], [20337, 20466, 20465], [20337, 20338, 20467], [20337, 20467, 20466], [20338, 20339, 20467], [20339, 20468, 20467], [20339, 20340, 20469], [20339, 20469, 20468], [20340, 20341, 20469], [20341, 20470, 20469], [20341, 20342, 20471], [20341, 20471, 20470], [20342, 20343, 20471], [20343, 20472, 20471], [20343, 20344, 20473], [20343, 20473, 20472], [20344, 20345, 20473], [20345, 20474, 20473], [20345, 20346, 20475], [20345, 20475, 20474], [20346, 20347, 20475], [20347, 20476, 20475], [20347, 20348, 20477], [20347, 20477, 20476], [20348, 20349, 20477], [20349, 20478, 20477], [20349, 20350, 20479], [20349, 20479, 20478], [20350, 20351, 20479], [20351, 20480, 20479], [20351, 20352, 20481], [20351, 20481, 20480], [20352, 20353, 20481], [20353, 20482, 20481], [20353, 20354, 20483], [20353, 20483, 20482], [20354, 20355, 20483], [20355, 20484, 20483], [20355, 20356, 20485], [20355, 20485, 20484], [20356, 20357, 20485], [20357, 20486, 20485], [20357, 20358, 20487], [20357, 20487, 20486], [20358, 20359, 20487], [20359, 20488, 20487], [20359, 20360, 20489], [20359, 20489, 20488], [20360, 20361, 20489], [20361, 20490, 20489], [20361, 20362, 20491], [20361, 20491, 20490], [20362, 20363, 20491], [20363, 20492, 20491], [20363, 20364, 20493], [20363, 20493, 20492], [20364, 20365, 20493], [20365, 20494, 20493], [20365, 20366, 20495], [20365, 20495, 20494], [20366, 20367, 20495], [20367, 20496, 20495], [20367, 20368, 20497], [20367, 20497, 20496], [20368, 20369, 20497], [20369, 20498, 20497], [20369, 20370, 20499], [20369, 20499, 20498], [20370, 20371, 20499], [20371, 20500, 20499], [20371, 20372, 20501], [20371, 20501, 20500], [20372, 20373, 20501], [20373, 20502, 20501], [20373, 20374, 20503], [20373, 20503, 20502], [20374, 20375, 20503], [20375, 20504, 20503], [20375, 20376, 20505], [20375, 20505, 20504], [20376, 20377, 20505], [20377, 20506, 20505], [20377, 20378, 20507], [20377, 20507, 20506], [20378, 20379, 20507], [20379, 20508, 20507], [20379, 20380, 20509], [20379, 20509, 20508], [20380, 20381, 20509], [20381, 20510, 20509], [20381, 20382, 20511], [20381, 20511, 20510], [20382, 20383, 20511], [20383, 20512, 20511], [20383, 20384, 20513], [20383, 20513, 20512], [20384, 20385, 20513], [20385, 20514, 20513], [20385, 20386, 20515], [20385, 20515, 20514], [20386, 20387, 20515], [20387, 20516, 20515], [20387, 20388, 20517], [20387, 20517, 20516], [20388, 20389, 20517], [20389, 20518, 20517], [20389, 20390, 20519], [20389, 20519, 20518], [20390, 20391, 20519], [20391, 20520, 20519], [20391, 20392, 20521], [20391, 20521, 20520], [20392, 20393, 20521], [20393, 20522, 20521], [20393, 20394, 20523], [20393, 20523, 20522], [20394, 20395, 20523], [20395, 20524, 20523], [20395, 20396, 20525], [20395, 20525, 20524], [20396, 20397, 20525], [20397, 20526, 20525], [20397, 20398, 20527], [20397, 20527, 20526], [20398, 20399, 20527], [20399, 20528, 20527], [20399, 20400, 20529], [20399, 20529, 20528], [20400, 20401, 20529], [20401, 20530, 20529], [20401, 20402, 20531], [20401, 20531, 20530], [20402, 20403, 20531], [20403, 20532, 20531], [20403, 20404, 20533], [20403, 20533, 20532], [20404, 20405, 20533], [20405, 20534, 20533], [20405, 20406, 20535], [20405, 20535, 20534], [20406, 20407, 20535], [20407, 20536, 20535], [20407, 20408, 20537], [20407, 20537, 20536], [20408, 20409, 20537], [20409, 20538, 20537], [20409, 20410, 20539], [20409, 20539, 20538], [20410, 20411, 20539], [20411, 20540, 20539], [20411, 20412, 20541], [20411, 20541, 20540], [20412, 20413, 20541], [20413, 20542, 20541], [20413, 20414, 20543], [20413, 20543, 20542], [20414, 20415, 20543], [20415, 20544, 20543], [20415, 20416, 20545], [20415, 20545, 20544], [20416, 20417, 20545], [20417, 20546, 20545], [20417, 20418, 20547], [20417, 20547, 20546], [20418, 20419, 20547], [20419, 20548, 20547], [20419, 20420, 20549], [20419, 20549, 20548], [20420, 20421, 20549], [20421, 20550, 20549], [20421, 20422, 20551], [20421, 20551, 20550], [20422, 20423, 20551], [20423, 20552, 20551], [20423, 20424, 20553], [20423, 20553, 20552], [20424, 20425, 20553], [20425, 20554, 20553], [20425, 20426, 20555], [20425, 20555, 20554], [20426, 20427, 20555], [20427, 20556, 20555], [20428, 20429, 20557], [20429, 20558, 20557], [20429, 20430, 20559], [20429, 20559, 20558], [20430, 20431, 20559], [20431, 20560, 20559], [20431, 20432, 20561], [20431, 20561, 20560], [20432, 20433, 20561], [20433, 20562, 20561], [20433, 20434, 20563], [20433, 20563, 20562], [20434, 20435, 20563], [20435, 20564, 20563], [20435, 20436, 20565], [20435, 20565, 20564], [20436, 20437, 20565], [20437, 20566, 20565], [20437, 20438, 20567], [20437, 20567, 20566], [20438, 20439, 20567], [20439, 20568, 20567], [20439, 20440, 20569], [20439, 20569, 20568], [20440, 20441, 20569], [20441, 20570, 20569], [20441, 20442, 20571], [20441, 20571, 20570], [20442, 20443, 20571], [20443, 20572, 20571], [20443, 20444, 20573], [20443, 20573, 20572], [20444, 20445, 20573], [20445, 20574, 20573], [20445, 20446, 20575], [20445, 20575, 20574], [20446, 20447, 20575], [20447, 20576, 20575], [20447, 20448, 20577], [20447, 20577, 20576], [20448, 20449, 20577], [20449, 20578, 20577], [20449, 20450, 20579], [20449, 20579, 20578], [20450, 20451, 20579], [20451, 20580, 20579], [20451, 20452, 20581], [20451, 20581, 20580], [20452, 20453, 20581], [20453, 20582, 20581], [20453, 20454, 20583], [20453, 20583, 20582], [20454, 20455, 20583], [20455, 20584, 20583], [20455, 20456, 20585], [20455, 20585, 20584], [20456, 20457, 20585], [20457, 20586, 20585], [20457, 20458, 20587], [20457, 20587, 20586], [20458, 20459, 20587], [20459, 20588, 20587], [20459, 20460, 20589], [20459, 20589, 20588], [20460, 20461, 20589], [20461, 20590, 20589], [20461, 20462, 20591], [20461, 20591, 20590], [20462, 20463, 20591], [20463, 20592, 20591], [20463, 20464, 20593], [20463, 20593, 20592], [20464, 20465, 20593], [20465, 20594, 20593], [20465, 20466, 20595], [20465, 20595, 20594], [20466, 20467, 20595], [20467, 20596, 20595], [20467, 20468, 20597], [20467, 20597, 20596], [20468, 20469, 20597], [20469, 20598, 20597], [20469, 20470, 20599], [20469, 20599, 20598], [20470, 20471, 20599], [20471, 20600, 20599], [20471, 20472, 20601], [20471, 20601, 20600], [20472, 20473, 20601], [20473, 20602, 20601], [20473, 20474, 20603], [20473, 20603, 20602], [20474, 20475, 20603], [20475, 20604, 20603], [20475, 20476, 20605], [20475, 20605, 20604], [20476, 20477, 20605], [20477, 20606, 20605], [20477, 20478, 20607], [20477, 20607, 20606], [20478, 20479, 20607], [20479, 20608, 20607], [20479, 20480, 20609], [20479, 20609, 20608], [20480, 20481, 20609], [20481, 20610, 20609], [20481, 20482, 20611], [20481, 20611, 20610], [20482, 20483, 20611], [20483, 20612, 20611], [20483, 20484, 20613], [20483, 20613, 20612], [20484, 20485, 20613], [20485, 20614, 20613], [20485, 20486, 20615], [20485, 20615, 20614], [20486, 20487, 20615], [20487, 20616, 20615], [20487, 20488, 20617], [20487, 20617, 20616], [20488, 20489, 20617], [20489, 20618, 20617], [20489, 20490, 20619], [20489, 20619, 20618], [20490, 20491, 20619], [20491, 20620, 20619], [20491, 20492, 20621], [20491, 20621, 20620], [20492, 20493, 20621], [20493, 20622, 20621], [20493, 20494, 20623], [20493, 20623, 20622], [20494, 20495, 20623], [20495, 20624, 20623], [20495, 20496, 20625], [20495, 20625, 20624], [20496, 20497, 20625], [20497, 20626, 20625], [20497, 20498, 20627], [20497, 20627, 20626], [20498, 20499, 20627], [20499, 20628, 20627], [20499, 20500, 20629], [20499, 20629, 20628], [20500, 20501, 20629], [20501, 20630, 20629], [20501, 20502, 20631], [20501, 20631, 20630], [20502, 20503, 20631], [20503, 20632, 20631], [20503, 20504, 20633], [20503, 20633, 20632], [20504, 20505, 20633], [20505, 20634, 20633], [20505, 20506, 20635], [20505, 20635, 20634], [20506, 20507, 20635], [20507, 20636, 20635], [20507, 20508, 20637], [20507, 20637, 20636], [20508, 20509, 20637], [20509, 20638, 20637], [20509, 20510, 20639], [20509, 20639, 20638], [20510, 20511, 20639], [20511, 20640, 20639], [20511, 20512, 20641], [20511, 20641, 20640], [20512, 20513, 20641], [20513, 20642, 20641], [20513, 20514, 20643], [20513, 20643, 20642], [20514, 20515, 20643], [20515, 20644, 20643], [20515, 20516, 20645], [20515, 20645, 20644], [20516, 20517, 20645], [20517, 20646, 20645], [20517, 20518, 20647], [20517, 20647, 20646], [20518, 20519, 20647], [20519, 20648, 20647], [20519, 20520, 20649], [20519, 20649, 20648], [20520, 20521, 20649], [20521, 20650, 20649], [20521, 20522, 20651], [20521, 20651, 20650], [20522, 20523, 20651], [20523, 20652, 20651], [20523, 20524, 20653], [20523, 20653, 20652], [20524, 20525, 20653], [20525, 20654, 20653], [20525, 20526, 20655], [20525, 20655, 20654], [20526, 20527, 20655], [20527, 20656, 20655], [20527, 20528, 20657], [20527, 20657, 20656], [20528, 20529, 20657], [20529, 20658, 20657], [20529, 20530, 20659], [20529, 20659, 20658], [20530, 20531, 20659], [20531, 20660, 20659], [20531, 20532, 20661], [20531, 20661, 20660], [20532, 20533, 20661], [20533, 20662, 20661], [20533, 20534, 20663], [20533, 20663, 20662], [20534, 20535, 20663], [20535, 20664, 20663], [20535, 20536, 20665], [20535, 20665, 20664], [20536, 20537, 20665], [20537, 20666, 20665], [20537, 20538, 20667], [20537, 20667, 20666], [20538, 20539, 20667], [20539, 20668, 20667], [20539, 20540, 20669], [20539, 20669, 20668], [20540, 20541, 20669], [20541, 20670, 20669], [20541, 20542, 20671], [20541, 20671, 20670], [20542, 20543, 20671], [20543, 20672, 20671], [20543, 20544, 20673], [20543, 20673, 20672], [20544, 20545, 20673], [20545, 20674, 20673], [20545, 20546, 20675], [20545, 20675, 20674], [20546, 20547, 20675], [20547, 20676, 20675], [20547, 20548, 20677], [20547, 20677, 20676], [20548, 20549, 20677], [20549, 20678, 20677], [20549, 20550, 20679], [20549, 20679, 20678], [20550, 20551, 20679], [20551, 20680, 20679], [20551, 20552, 20681], [20551, 20681, 20680], [20552, 20553, 20681], [20553, 20682, 20681], [20553, 20554, 20683], [20553, 20683, 20682], [20554, 20555, 20683], [20555, 20684, 20683], [20555, 20556, 20685], [20555, 20685, 20684], [20557, 20558, 20687], [20557, 20687, 20686], [20558, 20559, 20687], [20559, 20688, 20687], [20559, 20560, 20689], [20559, 20689, 20688], [20560, 20561, 20689], [20561, 20690, 20689], [20561, 20562, 20691], [20561, 20691, 20690], [20562, 20563, 20691], [20563, 20692, 20691], [20563, 20564, 20693], [20563, 20693, 20692], [20564, 20565, 20693], [20565, 20694, 20693], [20565, 20566, 20695], [20565, 20695, 20694], [20566, 20567, 20695], [20567, 20696, 20695], [20567, 20568, 20697], [20567, 20697, 20696], [20568, 20569, 20697], [20569, 20698, 20697], [20569, 20570, 20699], [20569, 20699, 20698], [20570, 20571, 20699], [20571, 20700, 20699], [20571, 20572, 20701], [20571, 20701, 20700], [20572, 20573, 20701], [20573, 20702, 20701], [20573, 20574, 20703], [20573, 20703, 20702], [20574, 20575, 20703], [20575, 20704, 20703], [20575, 20576, 20705], [20575, 20705, 20704], [20576, 20577, 20705], [20577, 20706, 20705], [20577, 20578, 20707], [20577, 20707, 20706], [20578, 20579, 20707], [20579, 20708, 20707], [20579, 20580, 20709], [20579, 20709, 20708], [20580, 20581, 20709], [20581, 20710, 20709], [20581, 20582, 20711], [20581, 20711, 20710], [20582, 20583, 20711], [20583, 20712, 20711], [20583, 20584, 20713], [20583, 20713, 20712], [20584, 20585, 20713], [20585, 20714, 20713], [20585, 20586, 20715], [20585, 20715, 20714], [20586, 20587, 20715], [20587, 20716, 20715], [20587, 20588, 20717], [20587, 20717, 20716], [20588, 20589, 20717], [20589, 20718, 20717], [20589, 20590, 20719], [20589, 20719, 20718], [20590, 20591, 20719], [20591, 20720, 20719], [20591, 20592, 20721], [20591, 20721, 20720], [20592, 20593, 20721], [20593, 20722, 20721], [20593, 20594, 20723], [20593, 20723, 20722], [20594, 20595, 20723], [20595, 20724, 20723], [20595, 20596, 20725], [20595, 20725, 20724], [20596, 20597, 20725], [20597, 20726, 20725], [20597, 20598, 20727], [20597, 20727, 20726], [20598, 20599, 20727], [20599, 20728, 20727], [20599, 20600, 20729], [20599, 20729, 20728], [20600, 20601, 20729], [20601, 20730, 20729], [20601, 20602, 20731], [20601, 20731, 20730], [20602, 20603, 20731], [20603, 20732, 20731], [20603, 20604, 20733], [20603, 20733, 20732], [20604, 20605, 20733], [20605, 20734, 20733], [20605, 20606, 20735], [20605, 20735, 20734], [20606, 20607, 20735], [20607, 20736, 20735], [20607, 20608, 20737], [20607, 20737, 20736], [20608, 20609, 20737], [20609, 20738, 20737], [20609, 20610, 20739], [20609, 20739, 20738], [20610, 20611, 20739], [20611, 20740, 20739], [20611, 20612, 20741], [20611, 20741, 20740], [20612, 20613, 20741], [20613, 20742, 20741], [20613, 20614, 20743], [20613, 20743, 20742], [20614, 20615, 20743], [20615, 20744, 20743], [20615, 20616, 20745], [20615, 20745, 20744], [20616, 20617, 20745], [20617, 20746, 20745], [20617, 20618, 20747], [20617, 20747, 20746], [20618, 20619, 20747], [20619, 20748, 20747], [20619, 20620, 20749], [20619, 20749, 20748], [20620, 20621, 20749], [20621, 20750, 20749], [20621, 20622, 20751], [20621, 20751, 20750], [20622, 20623, 20751], [20623, 20752, 20751], [20623, 20624, 20753], [20623, 20753, 20752], [20624, 20625, 20753], [20625, 20754, 20753], [20625, 20626, 20755], [20625, 20755, 20754], [20626, 20627, 20755], [20627, 20756, 20755], [20627, 20628, 20757], [20627, 20757, 20756], [20628, 20629, 20757], [20629, 20758, 20757], [20629, 20630, 20759], [20629, 20759, 20758], [20630, 20631, 20759], [20631, 20760, 20759], [20631, 20632, 20761], [20631, 20761, 20760], [20632, 20633, 20761], [20633, 20762, 20761], [20633, 20634, 20763], [20633, 20763, 20762], [20634, 20635, 20763], [20635, 20764, 20763], [20635, 20636, 20765], [20635, 20765, 20764], [20636, 20637, 20765], [20637, 20766, 20765], [20637, 20638, 20767], [20637, 20767, 20766], [20638, 20639, 20767], [20639, 20768, 20767], [20639, 20640, 20769], [20639, 20769, 20768], [20640, 20641, 20769], [20641, 20770, 20769], [20641, 20642, 20771], [20641, 20771, 20770], [20642, 20643, 20771], [20643, 20772, 20771], [20643, 20644, 20773], [20643, 20773, 20772], [20644, 20645, 20773], [20645, 20774, 20773], [20645, 20646, 20775], [20645, 20775, 20774], [20646, 20647, 20775], [20647, 20776, 20775], [20647, 20648, 20777], [20647, 20777, 20776], [20648, 20649, 20777], [20649, 20778, 20777], [20649, 20650, 20779], [20649, 20779, 20778], [20650, 20651, 20779], [20651, 20780, 20779], [20651, 20652, 20781], [20651, 20781, 20780], [20652, 20653, 20781], [20653, 20782, 20781], [20653, 20654, 20783], [20653, 20783, 20782], [20654, 20655, 20783], [20655, 20784, 20783], [20655, 20656, 20785], [20655, 20785, 20784], [20656, 20657, 20785], [20657, 20786, 20785], [20657, 20658, 20787], [20657, 20787, 20786], [20658, 20659, 20787], [20659, 20788, 20787], [20659, 20660, 20789], [20659, 20789, 20788], [20660, 20661, 20789], [20661, 20790, 20789], [20661, 20662, 20791], [20661, 20791, 20790], [20662, 20663, 20791], [20663, 20792, 20791], [20663, 20664, 20793], [20663, 20793, 20792], [20664, 20665, 20793], [20665, 20794, 20793], [20665, 20666, 20795], [20665, 20795, 20794], [20666, 20667, 20795], [20667, 20796, 20795], [20667, 20668, 20797], [20667, 20797, 20796], [20668, 20669, 20797], [20669, 20798, 20797], [20669, 20670, 20799], [20669, 20799, 20798], [20670, 20671, 20799], [20671, 20800, 20799], [20671, 20672, 20801], [20671, 20801, 20800], [20672, 20673, 20801], [20673, 20802, 20801], [20673, 20674, 20803], [20673, 20803, 20802], [20674, 20675, 20803], [20675, 20804, 20803], [20675, 20676, 20805], [20675, 20805, 20804], [20676, 20677, 20805], [20677, 20806, 20805], [20677, 20678, 20807], [20677, 20807, 20806], [20678, 20679, 20807], [20679, 20808, 20807], [20679, 20680, 20809], [20679, 20809, 20808], [20680, 20681, 20809], [20681, 20810, 20809], [20681, 20682, 20811], [20681, 20811, 20810], [20682, 20683, 20811], [20683, 20812, 20811], [20683, 20684, 20813], [20683, 20813, 20812], [20684, 20685, 20813], [20685, 20814, 20813], [20686, 20687, 20815], [20687, 20816, 20815], [20687, 20688, 20817], [20687, 20817, 20816], [20688, 20689, 20817], [20689, 20818, 20817], [20689, 20690, 20819], [20689, 20819, 20818], [20690, 20691, 20819], [20691, 20820, 20819], [20691, 20692, 20821], [20691, 20821, 20820], [20692, 20693, 20821], [20693, 20822, 20821], [20693, 20694, 20823], [20693, 20823, 20822], [20694, 20695, 20823], [20695, 20824, 20823], [20695, 20696, 20825], [20695, 20825, 20824], [20696, 20697, 20825], [20697, 20826, 20825], [20697, 20698, 20827], [20697, 20827, 20826], [20698, 20699, 20827], [20699, 20828, 20827], [20699, 20700, 20829], [20699, 20829, 20828], [20700, 20701, 20829], [20701, 20830, 20829], [20701, 20702, 20831], [20701, 20831, 20830], [20702, 20703, 20831], [20703, 20832, 20831], [20703, 20704, 20833], [20703, 20833, 20832], [20704, 20705, 20833], [20705, 20834, 20833], [20705, 20706, 20835], [20705, 20835, 20834], [20706, 20707, 20835], [20707, 20836, 20835], [20707, 20708, 20837], [20707, 20837, 20836], [20708, 20709, 20837], [20709, 20838, 20837], [20709, 20710, 20839], [20709, 20839, 20838], [20710, 20711, 20839], [20711, 20840, 20839], [20711, 20712, 20841], [20711, 20841, 20840], [20712, 20713, 20841], [20713, 20842, 20841], [20713, 20714, 20843], [20713, 20843, 20842], [20714, 20715, 20843], [20715, 20844, 20843], [20715, 20716, 20845], [20715, 20845, 20844], [20716, 20717, 20845], [20717, 20846, 20845], [20717, 20718, 20847], [20717, 20847, 20846], [20718, 20719, 20847], [20719, 20848, 20847], [20719, 20720, 20849], [20719, 20849, 20848], [20720, 20721, 20849], [20721, 20850, 20849], [20721, 20722, 20851], [20721, 20851, 20850], [20722, 20723, 20851], [20723, 20852, 20851], [20723, 20724, 20853], [20723, 20853, 20852], [20724, 20725, 20853], [20725, 20854, 20853], [20725, 20726, 20855], [20725, 20855, 20854], [20726, 20727, 20855], [20727, 20856, 20855], [20727, 20728, 20857], [20727, 20857, 20856], [20728, 20729, 20857], [20729, 20858, 20857], [20729, 20730, 20859], [20729, 20859, 20858], [20730, 20731, 20859], [20731, 20860, 20859], [20731, 20732, 20861], [20731, 20861, 20860], [20732, 20733, 20861], [20733, 20862, 20861], [20733, 20734, 20863], [20733, 20863, 20862], [20734, 20735, 20863], [20735, 20864, 20863], [20735, 20736, 20865], [20735, 20865, 20864], [20736, 20737, 20865], [20737, 20866, 20865], [20737, 20738, 20867], [20737, 20867, 20866], [20738, 20739, 20867], [20739, 20868, 20867], [20739, 20740, 20869], [20739, 20869, 20868], [20740, 20741, 20869], [20741, 20870, 20869], [20741, 20742, 20871], [20741, 20871, 20870], [20742, 20743, 20871], [20743, 20872, 20871], [20743, 20744, 20873], [20743, 20873, 20872], [20744, 20745, 20873], [20745, 20874, 20873], [20745, 20746, 20875], [20745, 20875, 20874], [20746, 20747, 20875], [20747, 20876, 20875], [20747, 20748, 20877], [20747, 20877, 20876], [20748, 20749, 20877], [20749, 20878, 20877], [20749, 20750, 20879], [20749, 20879, 20878], [20750, 20751, 20879], [20751, 20880, 20879], [20751, 20752, 20881], [20751, 20881, 20880], [20752, 20753, 20881], [20753, 20882, 20881], [20753, 20754, 20883], [20753, 20883, 20882], [20754, 20755, 20883], [20755, 20884, 20883], [20755, 20756, 20885], [20755, 20885, 20884], [20756, 20757, 20885], [20757, 20886, 20885], [20757, 20758, 20887], [20757, 20887, 20886], [20758, 20759, 20887], [20759, 20888, 20887], [20759, 20760, 20889], [20759, 20889, 20888], [20760, 20761, 20889], [20761, 20890, 20889], [20761, 20762, 20891], [20761, 20891, 20890], [20762, 20763, 20891], [20763, 20892, 20891], [20763, 20764, 20893], [20763, 20893, 20892], [20764, 20765, 20893], [20765, 20894, 20893], [20765, 20766, 20895], [20765, 20895, 20894], [20766, 20767, 20895], [20767, 20896, 20895], [20767, 20768, 20897], [20767, 20897, 20896], [20768, 20769, 20897], [20769, 20898, 20897], [20769, 20770, 20899], [20769, 20899, 20898], [20770, 20771, 20899], [20771, 20900, 20899], [20771, 20772, 20901], [20771, 20901, 20900], [20772, 20773, 20901], [20773, 20902, 20901], [20773, 20774, 20903], [20773, 20903, 20902], [20774, 20775, 20903], [20775, 20904, 20903], [20775, 20776, 20905], [20775, 20905, 20904], [20776, 20777, 20905], [20777, 20906, 20905], [20777, 20778, 20907], [20777, 20907, 20906], [20778, 20779, 20907], [20779, 20908, 20907], [20779, 20780, 20909], [20779, 20909, 20908], [20780, 20781, 20909], [20781, 20910, 20909], [20781, 20782, 20911], [20781, 20911, 20910], [20782, 20783, 20911], [20783, 20912, 20911], [20783, 20784, 20913], [20783, 20913, 20912], [20784, 20785, 20913], [20785, 20914, 20913], [20785, 20786, 20915], [20785, 20915, 20914], [20786, 20787, 20915], [20787, 20916, 20915], [20787, 20788, 20917], [20787, 20917, 20916], [20788, 20789, 20917], [20789, 20918, 20917], [20789, 20790, 20919], [20789, 20919, 20918], [20790, 20791, 20919], [20791, 20920, 20919], [20791, 20792, 20921], [20791, 20921, 20920], [20792, 20793, 20921], [20793, 20922, 20921], [20793, 20794, 20923], [20793, 20923, 20922], [20794, 20795, 20923], [20795, 20924, 20923], [20795, 20796, 20925], [20795, 20925, 20924], [20796, 20797, 20925], [20797, 20926, 20925], [20797, 20798, 20927], [20797, 20927, 20926], [20798, 20799, 20927], [20799, 20928, 20927], [20799, 20800, 20929], [20799, 20929, 20928], [20800, 20801, 20929], [20801, 20930, 20929], [20801, 20802, 20931], [20801, 20931, 20930], [20802, 20803, 20931], [20803, 20932, 20931], [20803, 20804, 20933], [20803, 20933, 20932], [20804, 20805, 20933], [20805, 20934, 20933], [20805, 20806, 20935], [20805, 20935, 20934], [20806, 20807, 20935], [20807, 20936, 20935], [20807, 20808, 20937], [20807, 20937, 20936], [20808, 20809, 20937], [20809, 20938, 20937], [20809, 20810, 20939], [20809, 20939, 20938], [20810, 20811, 20939], [20811, 20940, 20939], [20811, 20812, 20941], [20811, 20941, 20940], [20812, 20813, 20941], [20813, 20942, 20941], [20813, 20814, 20943], [20813, 20943, 20942], [20815, 20816, 20945], [20815, 20945, 20944], [20816, 20817, 20945], [20817, 20946, 20945], [20817, 20818, 20947], [20817, 20947, 20946], [20818, 20819, 20947], [20819, 20948, 20947], [20819, 20820, 20949], [20819, 20949, 20948], [20820, 20821, 20949], [20821, 20950, 20949], [20821, 20822, 20951], [20821, 20951, 20950], [20822, 20823, 20951], [20823, 20952, 20951], [20823, 20824, 20953], [20823, 20953, 20952], [20824, 20825, 20953], [20825, 20954, 20953], [20825, 20826, 20955], [20825, 20955, 20954], [20826, 20827, 20955], [20827, 20956, 20955], [20827, 20828, 20957], [20827, 20957, 20956], [20828, 20829, 20957], [20829, 20958, 20957], [20829, 20830, 20959], [20829, 20959, 20958], [20830, 20831, 20959], [20831, 20960, 20959], [20831, 20832, 20961], [20831, 20961, 20960], [20832, 20833, 20961], [20833, 20962, 20961], [20833, 20834, 20963], [20833, 20963, 20962], [20834, 20835, 20963], [20835, 20964, 20963], [20835, 20836, 20965], [20835, 20965, 20964], [20836, 20837, 20965], [20837, 20966, 20965], [20837, 20838, 20967], [20837, 20967, 20966], [20838, 20839, 20967], [20839, 20968, 20967], [20839, 20840, 20969], [20839, 20969, 20968], [20840, 20841, 20969], [20841, 20970, 20969], [20841, 20842, 20971], [20841, 20971, 20970], [20842, 20843, 20971], [20843, 20972, 20971], [20843, 20844, 20973], [20843, 20973, 20972], [20844, 20845, 20973], [20845, 20974, 20973], [20845, 20846, 20975], [20845, 20975, 20974], [20846, 20847, 20975], [20847, 20976, 20975], [20847, 20848, 20977], [20847, 20977, 20976], [20848, 20849, 20977], [20849, 20978, 20977], [20849, 20850, 20979], [20849, 20979, 20978], [20850, 20851, 20979], [20851, 20980, 20979], [20851, 20852, 20981], [20851, 20981, 20980], [20852, 20853, 20981], [20853, 20982, 20981], [20853, 20854, 20983], [20853, 20983, 20982], [20854, 20855, 20983], [20855, 20984, 20983], [20855, 20856, 20985], [20855, 20985, 20984], [20856, 20857, 20985], [20857, 20986, 20985], [20857, 20858, 20987], [20857, 20987, 20986], [20858, 20859, 20987], [20859, 20988, 20987], [20859, 20860, 20989], [20859, 20989, 20988], [20860, 20861, 20989], [20861, 20990, 20989], [20861, 20862, 20991], [20861, 20991, 20990], [20862, 20863, 20991], [20863, 20992, 20991], [20863, 20864, 20993], [20863, 20993, 20992], [20864, 20865, 20993], [20865, 20994, 20993], [20865, 20866, 20995], [20865, 20995, 20994], [20866, 20867, 20995], [20867, 20996, 20995], [20867, 20868, 20997], [20867, 20997, 20996], [20868, 20869, 20997], [20869, 20998, 20997], [20869, 20870, 20999], [20869, 20999, 20998], [20870, 20871, 20999], [20871, 21000, 20999], [20871, 20872, 21001], [20871, 21001, 21000], [20872, 20873, 21001], [20873, 21002, 21001], [20873, 20874, 21003], [20873, 21003, 21002], [20874, 20875, 21003], [20875, 21004, 21003], [20875, 20876, 21005], [20875, 21005, 21004], [20876, 20877, 21005], [20877, 21006, 21005], [20877, 20878, 21007], [20877, 21007, 21006], [20878, 20879, 21007], [20879, 21008, 21007], [20879, 20880, 21009], [20879, 21009, 21008], [20880, 20881, 21009], [20881, 21010, 21009], [20881, 20882, 21011], [20881, 21011, 21010], [20882, 20883, 21011], [20883, 21012, 21011], [20883, 20884, 21013], [20883, 21013, 21012], [20884, 20885, 21013], [20885, 21014, 21013], [20885, 20886, 21015], [20885, 21015, 21014], [20886, 20887, 21015], [20887, 21016, 21015], [20887, 20888, 21017], [20887, 21017, 21016], [20888, 20889, 21017], [20889, 21018, 21017], [20889, 20890, 21019], [20889, 21019, 21018], [20890, 20891, 21019], [20891, 21020, 21019], [20891, 20892, 21021], [20891, 21021, 21020], [20892, 20893, 21021], [20893, 21022, 21021], [20893, 20894, 21023], [20893, 21023, 21022], [20894, 20895, 21023], [20895, 21024, 21023], [20895, 20896, 21025], [20895, 21025, 21024], [20896, 20897, 21025], [20897, 21026, 21025], [20897, 20898, 21027], [20897, 21027, 21026], [20898, 20899, 21027], [20899, 21028, 21027], [20899, 20900, 21029], [20899, 21029, 21028], [20900, 20901, 21029], [20901, 21030, 21029], [20901, 20902, 21031], [20901, 21031, 21030], [20902, 20903, 21031], [20903, 21032, 21031], [20903, 20904, 21033], [20903, 21033, 21032], [20904, 20905, 21033], [20905, 21034, 21033], [20905, 20906, 21035], [20905, 21035, 21034], [20906, 20907, 21035], [20907, 21036, 21035], [20907, 20908, 21037], [20907, 21037, 21036], [20908, 20909, 21037], [20909, 21038, 21037], [20909, 20910, 21039], [20909, 21039, 21038], [20910, 20911, 21039], [20911, 21040, 21039], [20911, 20912, 21041], [20911, 21041, 21040], [20912, 20913, 21041], [20913, 21042, 21041], [20913, 20914, 21043], [20913, 21043, 21042], [20914, 20915, 21043], [20915, 21044, 21043], [20915, 20916, 21045], [20915, 21045, 21044], [20916, 20917, 21045], [20917, 21046, 21045], [20917, 20918, 21047], [20917, 21047, 21046], [20918, 20919, 21047], [20919, 21048, 21047], [20919, 20920, 21049], [20919, 21049, 21048], [20920, 20921, 21049], [20921, 21050, 21049], [20921, 20922, 21051], [20921, 21051, 21050], [20922, 20923, 21051], [20923, 21052, 21051], [20923, 20924, 21053], [20923, 21053, 21052], [20924, 20925, 21053], [20925, 21054, 21053], [20925, 20926, 21055], [20925, 21055, 21054], [20926, 20927, 21055], [20927, 21056, 21055], [20927, 20928, 21057], [20927, 21057, 21056], [20928, 20929, 21057], [20929, 21058, 21057], [20929, 20930, 21059], [20929, 21059, 21058], [20930, 20931, 21059], [20931, 21060, 21059], [20931, 20932, 21061], [20931, 21061, 21060], [20932, 20933, 21061], [20933, 21062, 21061], [20933, 20934, 21063], [20933, 21063, 21062], [20934, 20935, 21063], [20935, 21064, 21063], [20935, 20936, 21065], [20935, 21065, 21064], [20936, 20937, 21065], [20937, 21066, 21065], [20937, 20938, 21067], [20937, 21067, 21066], [20938, 20939, 21067], [20939, 21068, 21067], [20939, 20940, 21069], [20939, 21069, 21068], [20940, 20941, 21069], [20941, 21070, 21069], [20941, 20942, 21071], [20941, 21071, 21070], [20942, 20943, 21071], [20943, 21072, 21071], [20944, 20945, 21073], [20945, 21074, 21073], [20945, 20946, 21075], [20945, 21075, 21074], [20946, 20947, 21075], [20947, 21076, 21075], [20947, 20948, 21077], [20947, 21077, 21076], [20948, 20949, 21077], [20949, 21078, 21077], [20949, 20950, 21079], [20949, 21079, 21078], [20950, 20951, 21079], [20951, 21080, 21079], [20951, 20952, 21081], [20951, 21081, 21080], [20952, 20953, 21081], [20953, 21082, 21081], [20953, 20954, 21083], [20953, 21083, 21082], [20954, 20955, 21083], [20955, 21084, 21083], [20955, 20956, 21085], [20955, 21085, 21084], [20956, 20957, 21085], [20957, 21086, 21085], [20957, 20958, 21087], [20957, 21087, 21086], [20958, 20959, 21087], [20959, 21088, 21087], [20959, 20960, 21089], [20959, 21089, 21088], [20960, 20961, 21089], [20961, 21090, 21089], [20961, 20962, 21091], [20961, 21091, 21090], [20962, 20963, 21091], [20963, 21092, 21091], [20963, 20964, 21093], [20963, 21093, 21092], [20964, 20965, 21093], [20965, 21094, 21093], [20965, 20966, 21095], [20965, 21095, 21094], [20966, 20967, 21095], [20967, 21096, 21095], [20967, 20968, 21097], [20967, 21097, 21096], [20968, 20969, 21097], [20969, 21098, 21097], [20969, 20970, 21099], [20969, 21099, 21098], [20970, 20971, 21099], [20971, 21100, 21099], [20971, 20972, 21101], [20971, 21101, 21100], [20972, 20973, 21101], [20973, 21102, 21101], [20973, 20974, 21103], [20973, 21103, 21102], [20974, 20975, 21103], [20975, 21104, 21103], [20975, 20976, 21105], [20975, 21105, 21104], [20976, 20977, 21105], [20977, 21106, 21105], [20977, 20978, 21107], [20977, 21107, 21106], [20978, 20979, 21107], [20979, 21108, 21107], [20979, 20980, 21109], [20979, 21109, 21108], [20980, 20981, 21109], [20981, 21110, 21109], [20981, 20982, 21111], [20981, 21111, 21110], [20982, 20983, 21111], [20983, 21112, 21111], [20983, 20984, 21113], [20983, 21113, 21112], [20984, 20985, 21113], [20985, 21114, 21113], [20985, 20986, 21115], [20985, 21115, 21114], [20986, 20987, 21115], [20987, 21116, 21115], [20987, 20988, 21117], [20987, 21117, 21116], [20988, 20989, 21117], [20989, 21118, 21117], [20989, 20990, 21119], [20989, 21119, 21118], [20990, 20991, 21119], [20991, 21120, 21119], [20991, 20992, 21121], [20991, 21121, 21120], [20992, 20993, 21121], [20993, 21122, 21121], [20993, 20994, 21123], [20993, 21123, 21122], [20994, 20995, 21123], [20995, 21124, 21123], [20995, 20996, 21125], [20995, 21125, 21124], [20996, 20997, 21125], [20997, 21126, 21125], [20997, 20998, 21127], [20997, 21127, 21126], [20998, 20999, 21127], [20999, 21128, 21127], [20999, 21000, 21129], [20999, 21129, 21128], [21000, 21001, 21129], [21001, 21130, 21129], [21001, 21002, 21131], [21001, 21131, 21130], [21002, 21003, 21131], [21003, 21132, 21131], [21003, 21004, 21133], [21003, 21133, 21132], [21004, 21005, 21133], [21005, 21134, 21133], [21005, 21006, 21135], [21005, 21135, 21134], [21006, 21007, 21135], [21007, 21136, 21135], [21007, 21008, 21137], [21007, 21137, 21136], [21008, 21009, 21137], [21009, 21138, 21137], [21009, 21010, 21139], [21009, 21139, 21138], [21010, 21011, 21139], [21011, 21140, 21139], [21011, 21012, 21141], [21011, 21141, 21140], [21012, 21013, 21141], [21013, 21142, 21141], [21013, 21014, 21143], [21013, 21143, 21142], [21014, 21015, 21143], [21015, 21144, 21143], [21015, 21016, 21145], [21015, 21145, 21144], [21016, 21017, 21145], [21017, 21146, 21145], [21017, 21018, 21147], [21017, 21147, 21146], [21018, 21019, 21147], [21019, 21148, 21147], [21019, 21020, 21149], [21019, 21149, 21148], [21020, 21021, 21149], [21021, 21150, 21149], [21021, 21022, 21151], [21021, 21151, 21150], [21022, 21023, 21151], [21023, 21152, 21151], [21023, 21024, 21153], [21023, 21153, 21152], [21024, 21025, 21153], [21025, 21154, 21153], [21025, 21026, 21155], [21025, 21155, 21154], [21026, 21027, 21155], [21027, 21156, 21155], [21027, 21028, 21157], [21027, 21157, 21156], [21028, 21029, 21157], [21029, 21158, 21157], [21029, 21030, 21159], [21029, 21159, 21158], [21030, 21031, 21159], [21031, 21160, 21159], [21031, 21032, 21161], [21031, 21161, 21160], [21032, 21033, 21161], [21033, 21162, 21161], [21033, 21034, 21163], [21033, 21163, 21162], [21034, 21035, 21163], [21035, 21164, 21163], [21035, 21036, 21165], [21035, 21165, 21164], [21036, 21037, 21165], [21037, 21166, 21165], [21037, 21038, 21167], [21037, 21167, 21166], [21038, 21039, 21167], [21039, 21168, 21167], [21039, 21040, 21169], [21039, 21169, 21168], [21040, 21041, 21169], [21041, 21170, 21169], [21041, 21042, 21171], [21041, 21171, 21170], [21042, 21043, 21171], [21043, 21172, 21171], [21043, 21044, 21173], [21043, 21173, 21172], [21044, 21045, 21173], [21045, 21174, 21173], [21045, 21046, 21175], [21045, 21175, 21174], [21046, 21047, 21175], [21047, 21176, 21175], [21047, 21048, 21177], [21047, 21177, 21176], [21048, 21049, 21177], [21049, 21178, 21177], [21049, 21050, 21179], [21049, 21179, 21178], [21050, 21051, 21179], [21051, 21180, 21179], [21051, 21052, 21181], [21051, 21181, 21180], [21052, 21053, 21181], [21053, 21182, 21181], [21053, 21054, 21183], [21053, 21183, 21182], [21054, 21055, 21183], [21055, 21184, 21183], [21055, 21056, 21185], [21055, 21185, 21184], [21056, 21057, 21185], [21057, 21186, 21185], [21057, 21058, 21187], [21057, 21187, 21186], [21058, 21059, 21187], [21059, 21188, 21187], [21059, 21060, 21189], [21059, 21189, 21188], [21060, 21061, 21189], [21061, 21190, 21189], [21061, 21062, 21191], [21061, 21191, 21190], [21062, 21063, 21191], [21063, 21192, 21191], [21063, 21064, 21193], [21063, 21193, 21192], [21064, 21065, 21193], [21065, 21194, 21193], [21065, 21066, 21195], [21065, 21195, 21194], [21066, 21067, 21195], [21067, 21196, 21195], [21067, 21068, 21197], [21067, 21197, 21196], [21068, 21069, 21197], [21069, 21198, 21197], [21069, 21070, 21199], [21069, 21199, 21198], [21070, 21071, 21199], [21071, 21200, 21199], [21071, 21072, 21201], [21071, 21201, 21200], [21073, 21074, 21203], [21073, 21203, 21202], [21074, 21075, 21203], [21075, 21204, 21203], [21075, 21076, 21205], [21075, 21205, 21204], [21076, 21077, 21205], [21077, 21206, 21205], [21077, 21078, 21207], [21077, 21207, 21206], [21078, 21079, 21207], [21079, 21208, 21207], [21079, 21080, 21209], [21079, 21209, 21208], [21080, 21081, 21209], [21081, 21210, 21209], [21081, 21082, 21211], [21081, 21211, 21210], [21082, 21083, 21211], [21083, 21212, 21211], [21083, 21084, 21213], [21083, 21213, 21212], [21084, 21085, 21213], [21085, 21214, 21213], [21085, 21086, 21215], [21085, 21215, 21214], [21086, 21087, 21215], [21087, 21216, 21215], [21087, 21088, 21217], [21087, 21217, 21216], [21088, 21089, 21217], [21089, 21218, 21217], [21089, 21090, 21219], [21089, 21219, 21218], [21090, 21091, 21219], [21091, 21220, 21219], [21091, 21092, 21221], [21091, 21221, 21220], [21092, 21093, 21221], [21093, 21222, 21221], [21093, 21094, 21223], [21093, 21223, 21222], [21094, 21095, 21223], [21095, 21224, 21223], [21095, 21096, 21225], [21095, 21225, 21224], [21096, 21097, 21225], [21097, 21226, 21225], [21097, 21098, 21227], [21097, 21227, 21226], [21098, 21099, 21227], [21099, 21228, 21227], [21099, 21100, 21229], [21099, 21229, 21228], [21100, 21101, 21229], [21101, 21230, 21229], [21101, 21102, 21231], [21101, 21231, 21230], [21102, 21103, 21231], [21103, 21232, 21231], [21103, 21104, 21233], [21103, 21233, 21232], [21104, 21105, 21233], [21105, 21234, 21233], [21105, 21106, 21235], [21105, 21235, 21234], [21106, 21107, 21235], [21107, 21236, 21235], [21107, 21108, 21237], [21107, 21237, 21236], [21108, 21109, 21237], [21109, 21238, 21237], [21109, 21110, 21239], [21109, 21239, 21238], [21110, 21111, 21239], [21111, 21240, 21239], [21111, 21112, 21241], [21111, 21241, 21240], [21112, 21113, 21241], [21113, 21242, 21241], [21113, 21114, 21243], [21113, 21243, 21242], [21114, 21115, 21243], [21115, 21244, 21243], [21115, 21116, 21245], [21115, 21245, 21244], [21116, 21117, 21245], [21117, 21246, 21245], [21117, 21118, 21247], [21117, 21247, 21246], [21118, 21119, 21247], [21119, 21248, 21247], [21119, 21120, 21249], [21119, 21249, 21248], [21120, 21121, 21249], [21121, 21250, 21249], [21121, 21122, 21251], [21121, 21251, 21250], [21122, 21123, 21251], [21123, 21252, 21251], [21123, 21124, 21253], [21123, 21253, 21252], [21124, 21125, 21253], [21125, 21254, 21253], [21125, 21126, 21255], [21125, 21255, 21254], [21126, 21127, 21255], [21127, 21256, 21255], [21127, 21128, 21257], [21127, 21257, 21256], [21128, 21129, 21257], [21129, 21258, 21257], [21129, 21130, 21259], [21129, 21259, 21258], [21130, 21131, 21259], [21131, 21260, 21259], [21131, 21132, 21261], [21131, 21261, 21260], [21132, 21133, 21261], [21133, 21262, 21261], [21133, 21134, 21263], [21133, 21263, 21262], [21134, 21135, 21263], [21135, 21264, 21263], [21135, 21136, 21265], [21135, 21265, 21264], [21136, 21137, 21265], [21137, 21266, 21265], [21137, 21138, 21267], [21137, 21267, 21266], [21138, 21139, 21267], [21139, 21268, 21267], [21139, 21140, 21269], [21139, 21269, 21268], [21140, 21141, 21269], [21141, 21270, 21269], [21141, 21142, 21271], [21141, 21271, 21270], [21142, 21143, 21271], [21143, 21272, 21271], [21143, 21144, 21273], [21143, 21273, 21272], [21144, 21145, 21273], [21145, 21274, 21273], [21145, 21146, 21275], [21145, 21275, 21274], [21146, 21147, 21275], [21147, 21276, 21275], [21147, 21148, 21277], [21147, 21277, 21276], [21148, 21149, 21277], [21149, 21278, 21277], [21149, 21150, 21279], [21149, 21279, 21278], [21150, 21151, 21279], [21151, 21280, 21279], [21151, 21152, 21281], [21151, 21281, 21280], [21152, 21153, 21281], [21153, 21282, 21281], [21153, 21154, 21283], [21153, 21283, 21282], [21154, 21155, 21283], [21155, 21284, 21283], [21155, 21156, 21285], [21155, 21285, 21284], [21156, 21157, 21285], [21157, 21286, 21285], [21157, 21158, 21287], [21157, 21287, 21286], [21158, 21159, 21287], [21159, 21288, 21287], [21159, 21160, 21289], [21159, 21289, 21288], [21160, 21161, 21289], [21161, 21290, 21289], [21161, 21162, 21291], [21161, 21291, 21290], [21162, 21163, 21291], [21163, 21292, 21291], [21163, 21164, 21293], [21163, 21293, 21292], [21164, 21165, 21293], [21165, 21294, 21293], [21165, 21166, 21295], [21165, 21295, 21294], [21166, 21167, 21295], [21167, 21296, 21295], [21167, 21168, 21297], [21167, 21297, 21296], [21168, 21169, 21297], [21169, 21298, 21297], [21169, 21170, 21299], [21169, 21299, 21298], [21170, 21171, 21299], [21171, 21300, 21299], [21171, 21172, 21301], [21171, 21301, 21300], [21172, 21173, 21301], [21173, 21302, 21301], [21173, 21174, 21303], [21173, 21303, 21302], [21174, 21175, 21303], [21175, 21304, 21303], [21175, 21176, 21305], [21175, 21305, 21304], [21176, 21177, 21305], [21177, 21306, 21305], [21177, 21178, 21307], [21177, 21307, 21306], [21178, 21179, 21307], [21179, 21308, 21307], [21179, 21180, 21309], [21179, 21309, 21308], [21180, 21181, 21309], [21181, 21310, 21309], [21181, 21182, 21311], [21181, 21311, 21310], [21182, 21183, 21311], [21183, 21312, 21311], [21183, 21184, 21313], [21183, 21313, 21312], [21184, 21185, 21313], [21185, 21314, 21313], [21185, 21186, 21315], [21185, 21315, 21314], [21186, 21187, 21315], [21187, 21316, 21315], [21187, 21188, 21317], [21187, 21317, 21316], [21188, 21189, 21317], [21189, 21318, 21317], [21189, 21190, 21319], [21189, 21319, 21318], [21190, 21191, 21319], [21191, 21320, 21319], [21191, 21192, 21321], [21191, 21321, 21320], [21192, 21193, 21321], [21193, 21322, 21321], [21193, 21194, 21323], [21193, 21323, 21322], [21194, 21195, 21323], [21195, 21324, 21323], [21195, 21196, 21325], [21195, 21325, 21324], [21196, 21197, 21325], [21197, 21326, 21325], [21197, 21198, 21327], [21197, 21327, 21326], [21198, 21199, 21327], [21199, 21328, 21327], [21199, 21200, 21329], [21199, 21329, 21328], [21200, 21201, 21329], [21201, 21330, 21329], [21202, 21203, 21331], [21203, 21332, 21331], [21203, 21204, 21333], [21203, 21333, 21332], [21204, 21205, 21333], [21205, 21334, 21333], [21205, 21206, 21335], [21205, 21335, 21334], [21206, 21207, 21335], [21207, 21336, 21335], [21207, 21208, 21337], [21207, 21337, 21336], [21208, 21209, 21337], [21209, 21338, 21337], [21209, 21210, 21339], [21209, 21339, 21338], [21210, 21211, 21339], [21211, 21340, 21339], [21211, 21212, 21341], [21211, 21341, 21340], [21212, 21213, 21341], [21213, 21342, 21341], [21213, 21214, 21343], [21213, 21343, 21342], [21214, 21215, 21343], [21215, 21344, 21343], [21215, 21216, 21345], [21215, 21345, 21344], [21216, 21217, 21345], [21217, 21346, 21345], [21217, 21218, 21347], [21217, 21347, 21346], [21218, 21219, 21347], [21219, 21348, 21347], [21219, 21220, 21349], [21219, 21349, 21348], [21220, 21221, 21349], [21221, 21350, 21349], [21221, 21222, 21351], [21221, 21351, 21350], [21222, 21223, 21351], [21223, 21352, 21351], [21223, 21224, 21353], [21223, 21353, 21352], [21224, 21225, 21353], [21225, 21354, 21353], [21225, 21226, 21355], [21225, 21355, 21354], [21226, 21227, 21355], [21227, 21356, 21355], [21227, 21228, 21357], [21227, 21357, 21356], [21228, 21229, 21357], [21229, 21358, 21357], [21229, 21230, 21359], [21229, 21359, 21358], [21230, 21231, 21359], [21231, 21360, 21359], [21231, 21232, 21361], [21231, 21361, 21360], [21232, 21233, 21361], [21233, 21362, 21361], [21233, 21234, 21363], [21233, 21363, 21362], [21234, 21235, 21363], [21235, 21364, 21363], [21235, 21236, 21365], [21235, 21365, 21364], [21236, 21237, 21365], [21237, 21366, 21365], [21237, 21238, 21367], [21237, 21367, 21366], [21238, 21239, 21367], [21239, 21368, 21367], [21239, 21240, 21369], [21239, 21369, 21368], [21240, 21241, 21369], [21241, 21370, 21369], [21241, 21242, 21371], [21241, 21371, 21370], [21242, 21243, 21371], [21243, 21372, 21371], [21243, 21244, 21373], [21243, 21373, 21372], [21244, 21245, 21373], [21245, 21374, 21373], [21245, 21246, 21375], [21245, 21375, 21374], [21246, 21247, 21375], [21247, 21376, 21375], [21247, 21248, 21377], [21247, 21377, 21376], [21248, 21249, 21377], [21249, 21378, 21377], [21249, 21250, 21379], [21249, 21379, 21378], [21250, 21251, 21379], [21251, 21380, 21379], [21251, 21252, 21381], [21251, 21381, 21380], [21252, 21253, 21381], [21253, 21382, 21381], [21253, 21254, 21383], [21253, 21383, 21382], [21254, 21255, 21383], [21255, 21384, 21383], [21255, 21256, 21385], [21255, 21385, 21384], [21256, 21257, 21385], [21257, 21386, 21385], [21257, 21258, 21387], [21257, 21387, 21386], [21258, 21259, 21387], [21259, 21388, 21387], [21259, 21260, 21389], [21259, 21389, 21388], [21260, 21261, 21389], [21261, 21390, 21389], [21261, 21262, 21391], [21261, 21391, 21390], [21262, 21263, 21391], [21263, 21392, 21391], [21263, 21264, 21393], [21263, 21393, 21392], [21264, 21265, 21393], [21265, 21394, 21393], [21265, 21266, 21395], [21265, 21395, 21394], [21266, 21267, 21395], [21267, 21396, 21395], [21267, 21268, 21397], [21267, 21397, 21396], [21268, 21269, 21397], [21269, 21398, 21397], [21269, 21270, 21399], [21269, 21399, 21398], [21270, 21271, 21399], [21271, 21400, 21399], [21271, 21272, 21401], [21271, 21401, 21400], [21272, 21273, 21401], [21273, 21402, 21401], [21273, 21274, 21403], [21273, 21403, 21402], [21274, 21275, 21403], [21275, 21404, 21403], [21275, 21276, 21405], [21275, 21405, 21404], [21276, 21277, 21405], [21277, 21406, 21405], [21277, 21278, 21407], [21277, 21407, 21406], [21278, 21279, 21407], [21279, 21408, 21407], [21279, 21280, 21409], [21279, 21409, 21408], [21280, 21281, 21409], [21281, 21410, 21409], [21281, 21282, 21411], [21281, 21411, 21410], [21282, 21283, 21411], [21283, 21412, 21411], [21283, 21284, 21413], [21283, 21413, 21412], [21284, 21285, 21413], [21285, 21414, 21413], [21285, 21286, 21415], [21285, 21415, 21414], [21286, 21287, 21415], [21287, 21416, 21415], [21287, 21288, 21417], [21287, 21417, 21416], [21288, 21289, 21417], [21289, 21418, 21417], [21289, 21290, 21419], [21289, 21419, 21418], [21290, 21291, 21419], [21291, 21420, 21419], [21291, 21292, 21421], [21291, 21421, 21420], [21292, 21293, 21421], [21293, 21422, 21421], [21293, 21294, 21423], [21293, 21423, 21422], [21294, 21295, 21423], [21295, 21424, 21423], [21295, 21296, 21425], [21295, 21425, 21424], [21296, 21297, 21425], [21297, 21426, 21425], [21297, 21298, 21427], [21297, 21427, 21426], [21298, 21299, 21427], [21299, 21428, 21427], [21299, 21300, 21429], [21299, 21429, 21428], [21300, 21301, 21429], [21301, 21430, 21429], [21301, 21302, 21431], [21301, 21431, 21430], [21302, 21303, 21431], [21303, 21432, 21431], [21303, 21304, 21433], [21303, 21433, 21432], [21304, 21305, 21433], [21305, 21434, 21433], [21305, 21306, 21435], [21305, 21435, 21434], [21306, 21307, 21435], [21307, 21436, 21435], [21307, 21308, 21437], [21307, 21437, 21436], [21308, 21309, 21437], [21309, 21438, 21437], [21309, 21310, 21439], [21309, 21439, 21438], [21310, 21311, 21439], [21311, 21440, 21439], [21311, 21312, 21441], [21311, 21441, 21440], [21312, 21313, 21441], [21313, 21442, 21441], [21313, 21314, 21443], [21313, 21443, 21442], [21314, 21315, 21443], [21315, 21444, 21443], [21315, 21316, 21445], [21315, 21445, 21444], [21316, 21317, 21445], [21317, 21446, 21445], [21317, 21318, 21447], [21317, 21447, 21446], [21318, 21319, 21447], [21319, 21448, 21447], [21319, 21320, 21449], [21319, 21449, 21448], [21320, 21321, 21449], [21321, 21450, 21449], [21321, 21322, 21451], [21321, 21451, 21450], [21322, 21323, 21451], [21323, 21452, 21451], [21323, 21324, 21453], [21323, 21453, 21452], [21324, 21325, 21453], [21325, 21454, 21453], [21325, 21326, 21455], [21325, 21455, 21454], [21326, 21327, 21455], [21327, 21456, 21455], [21327, 21328, 21457], [21327, 21457, 21456], [21328, 21329, 21457], [21329, 21458, 21457], [21329, 21330, 21459], [21329, 21459, 21458], [21331, 21332, 21461], [21331, 21461, 21460], [21332, 21333, 21461], [21333, 21462, 21461], [21333, 21334, 21463], [21333, 21463, 21462], [21334, 21335, 21463], [21335, 21464, 21463], [21335, 21336, 21465], [21335, 21465, 21464], [21336, 21337, 21465], [21337, 21466, 21465], [21337, 21338, 21467], [21337, 21467, 21466], [21338, 21339, 21467], [21339, 21468, 21467], [21339, 21340, 21469], [21339, 21469, 21468], [21340, 21341, 21469], [21341, 21470, 21469], [21341, 21342, 21471], [21341, 21471, 21470], [21342, 21343, 21471], [21343, 21472, 21471], [21343, 21344, 21473], [21343, 21473, 21472], [21344, 21345, 21473], [21345, 21474, 21473], [21345, 21346, 21475], [21345, 21475, 21474], [21346, 21347, 21475], [21347, 21476, 21475], [21347, 21348, 21477], [21347, 21477, 21476], [21348, 21349, 21477], [21349, 21478, 21477], [21349, 21350, 21479], [21349, 21479, 21478], [21350, 21351, 21479], [21351, 21480, 21479], [21351, 21352, 21481], [21351, 21481, 21480], [21352, 21353, 21481], [21353, 21482, 21481], [21353, 21354, 21483], [21353, 21483, 21482], [21354, 21355, 21483], [21355, 21484, 21483], [21355, 21356, 21485], [21355, 21485, 21484], [21356, 21357, 21485], [21357, 21486, 21485], [21357, 21358, 21487], [21357, 21487, 21486], [21358, 21359, 21487], [21359, 21488, 21487], [21359, 21360, 21489], [21359, 21489, 21488], [21360, 21361, 21489], [21361, 21490, 21489], [21361, 21362, 21491], [21361, 21491, 21490], [21362, 21363, 21491], [21363, 21492, 21491], [21363, 21364, 21493], [21363, 21493, 21492], [21364, 21365, 21493], [21365, 21494, 21493], [21365, 21366, 21495], [21365, 21495, 21494], [21366, 21367, 21495], [21367, 21496, 21495], [21367, 21368, 21497], [21367, 21497, 21496], [21368, 21369, 21497], [21369, 21498, 21497], [21369, 21370, 21499], [21369, 21499, 21498], [21370, 21371, 21499], [21371, 21500, 21499], [21371, 21372, 21501], [21371, 21501, 21500], [21372, 21373, 21501], [21373, 21502, 21501], [21373, 21374, 21503], [21373, 21503, 21502], [21374, 21375, 21503], [21375, 21504, 21503], [21375, 21376, 21505], [21375, 21505, 21504], [21376, 21377, 21505], [21377, 21506, 21505], [21377, 21378, 21507], [21377, 21507, 21506], [21378, 21379, 21507], [21379, 21508, 21507], [21379, 21380, 21509], [21379, 21509, 21508], [21380, 21381, 21509], [21381, 21510, 21509], [21381, 21382, 21511], [21381, 21511, 21510], [21382, 21383, 21511], [21383, 21512, 21511], [21383, 21384, 21513], [21383, 21513, 21512], [21384, 21385, 21513], [21385, 21514, 21513], [21385, 21386, 21515], [21385, 21515, 21514], [21386, 21387, 21515], [21387, 21516, 21515], [21387, 21388, 21517], [21387, 21517, 21516], [21388, 21389, 21517], [21389, 21518, 21517], [21389, 21390, 21519], [21389, 21519, 21518], [21390, 21391, 21519], [21391, 21520, 21519], [21391, 21392, 21521], [21391, 21521, 21520], [21392, 21393, 21521], [21393, 21522, 21521], [21393, 21394, 21523], [21393, 21523, 21522], [21394, 21395, 21523], [21395, 21524, 21523], [21395, 21396, 21525], [21395, 21525, 21524], [21396, 21397, 21525], [21397, 21526, 21525], [21397, 21398, 21527], [21397, 21527, 21526], [21398, 21399, 21527], [21399, 21528, 21527], [21399, 21400, 21529], [21399, 21529, 21528], [21400, 21401, 21529], [21401, 21530, 21529], [21401, 21402, 21531], [21401, 21531, 21530], [21402, 21403, 21531], [21403, 21532, 21531], [21403, 21404, 21533], [21403, 21533, 21532], [21404, 21405, 21533], [21405, 21534, 21533], [21405, 21406, 21535], [21405, 21535, 21534], [21406, 21407, 21535], [21407, 21536, 21535], [21407, 21408, 21537], [21407, 21537, 21536], [21408, 21409, 21537], [21409, 21538, 21537], [21409, 21410, 21539], [21409, 21539, 21538], [21410, 21411, 21539], [21411, 21540, 21539], [21411, 21412, 21541], [21411, 21541, 21540], [21412, 21413, 21541], [21413, 21542, 21541], [21413, 21414, 21543], [21413, 21543, 21542], [21414, 21415, 21543], [21415, 21544, 21543], [21415, 21416, 21545], [21415, 21545, 21544], [21416, 21417, 21545], [21417, 21546, 21545], [21417, 21418, 21547], [21417, 21547, 21546], [21418, 21419, 21547], [21419, 21548, 21547], [21419, 21420, 21549], [21419, 21549, 21548], [21420, 21421, 21549], [21421, 21550, 21549], [21421, 21422, 21551], [21421, 21551, 21550], [21422, 21423, 21551], [21423, 21552, 21551], [21423, 21424, 21553], [21423, 21553, 21552], [21424, 21425, 21553], [21425, 21554, 21553], [21425, 21426, 21555], [21425, 21555, 21554], [21426, 21427, 21555], [21427, 21556, 21555], [21427, 21428, 21557], [21427, 21557, 21556], [21428, 21429, 21557], [21429, 21558, 21557], [21429, 21430, 21559], [21429, 21559, 21558], [21430, 21431, 21559], [21431, 21560, 21559], [21431, 21432, 21561], [21431, 21561, 21560], [21432, 21433, 21561], [21433, 21562, 21561], [21433, 21434, 21563], [21433, 21563, 21562], [21434, 21435, 21563], [21435, 21564, 21563], [21435, 21436, 21565], [21435, 21565, 21564], [21436, 21437, 21565], [21437, 21566, 21565], [21437, 21438, 21567], [21437, 21567, 21566], [21438, 21439, 21567], [21439, 21568, 21567], [21439, 21440, 21569], [21439, 21569, 21568], [21440, 21441, 21569], [21441, 21570, 21569], [21441, 21442, 21571], [21441, 21571, 21570], [21442, 21443, 21571], [21443, 21572, 21571], [21443, 21444, 21573], [21443, 21573, 21572], [21444, 21445, 21573], [21445, 21574, 21573], [21445, 21446, 21575], [21445, 21575, 21574], [21446, 21447, 21575], [21447, 21576, 21575], [21447, 21448, 21577], [21447, 21577, 21576], [21448, 21449, 21577], [21449, 21578, 21577], [21449, 21450, 21579], [21449, 21579, 21578], [21450, 21451, 21579], [21451, 21580, 21579], [21451, 21452, 21581], [21451, 21581, 21580], [21452, 21453, 21581], [21453, 21582, 21581], [21453, 21454, 21583], [21453, 21583, 21582], [21454, 21455, 21583], [21455, 21584, 21583], [21455, 21456, 21585], [21455, 21585, 21584], [21456, 21457, 21585], [21457, 21586, 21585], [21457, 21458, 21587], [21457, 21587, 21586], [21458, 21459, 21587], [21459, 21588, 21587], [21460, 21461, 21589], [21461, 21590, 21589], [21461, 21462, 21591], [21461, 21591, 21590], [21462, 21463, 21591], [21463, 21592, 21591], [21463, 21464, 21593], [21463, 21593, 21592], [21464, 21465, 21593], [21465, 21594, 21593], [21465, 21466, 21595], [21465, 21595, 21594], [21466, 21467, 21595], [21467, 21596, 21595], [21467, 21468, 21597], [21467, 21597, 21596], [21468, 21469, 21597], [21469, 21598, 21597], [21469, 21470, 21599], [21469, 21599, 21598], [21470, 21471, 21599], [21471, 21600, 21599], [21471, 21472, 21601], [21471, 21601, 21600], [21472, 21473, 21601], [21473, 21602, 21601], [21473, 21474, 21603], [21473, 21603, 21602], [21474, 21475, 21603], [21475, 21604, 21603], [21475, 21476, 21605], [21475, 21605, 21604], [21476, 21477, 21605], [21477, 21606, 21605], [21477, 21478, 21607], [21477, 21607, 21606], [21478, 21479, 21607], [21479, 21608, 21607], [21479, 21480, 21609], [21479, 21609, 21608], [21480, 21481, 21609], [21481, 21610, 21609], [21481, 21482, 21611], [21481, 21611, 21610], [21482, 21483, 21611], [21483, 21612, 21611], [21483, 21484, 21613], [21483, 21613, 21612], [21484, 21485, 21613], [21485, 21614, 21613], [21485, 21486, 21615], [21485, 21615, 21614], [21486, 21487, 21615], [21487, 21616, 21615], [21487, 21488, 21617], [21487, 21617, 21616], [21488, 21489, 21617], [21489, 21618, 21617], [21489, 21490, 21619], [21489, 21619, 21618], [21490, 21491, 21619], [21491, 21620, 21619], [21491, 21492, 21621], [21491, 21621, 21620], [21492, 21493, 21621], [21493, 21622, 21621], [21493, 21494, 21623], [21493, 21623, 21622], [21494, 21495, 21623], [21495, 21624, 21623], [21495, 21496, 21625], [21495, 21625, 21624], [21496, 21497, 21625], [21497, 21626, 21625], [21497, 21498, 21627], [21497, 21627, 21626], [21498, 21499, 21627], [21499, 21628, 21627], [21499, 21500, 21629], [21499, 21629, 21628], [21500, 21501, 21629], [21501, 21630, 21629], [21501, 21502, 21631], [21501, 21631, 21630], [21502, 21503, 21631], [21503, 21632, 21631], [21503, 21504, 21633], [21503, 21633, 21632], [21504, 21505, 21633], [21505, 21634, 21633], [21505, 21506, 21635], [21505, 21635, 21634], [21506, 21507, 21635], [21507, 21636, 21635], [21507, 21508, 21637], [21507, 21637, 21636], [21508, 21509, 21637], [21509, 21638, 21637], [21509, 21510, 21639], [21509, 21639, 21638], [21510, 21511, 21639], [21511, 21640, 21639], [21511, 21512, 21641], [21511, 21641, 21640], [21512, 21513, 21641], [21513, 21642, 21641], [21513, 21514, 21643], [21513, 21643, 21642], [21514, 21515, 21643], [21515, 21644, 21643], [21515, 21516, 21645], [21515, 21645, 21644], [21516, 21517, 21645], [21517, 21646, 21645], [21517, 21518, 21647], [21517, 21647, 21646], [21518, 21519, 21647], [21519, 21648, 21647], [21519, 21520, 21649], [21519, 21649, 21648], [21520, 21521, 21649], [21521, 21650, 21649], [21521, 21522, 21651], [21521, 21651, 21650], [21522, 21523, 21651], [21523, 21652, 21651], [21523, 21524, 21653], [21523, 21653, 21652], [21524, 21525, 21653], [21525, 21654, 21653], [21525, 21526, 21655], [21525, 21655, 21654], [21526, 21527, 21655], [21527, 21656, 21655], [21527, 21528, 21657], [21527, 21657, 21656], [21528, 21529, 21657], [21529, 21658, 21657], [21529, 21530, 21659], [21529, 21659, 21658], [21530, 21531, 21659], [21531, 21660, 21659], [21531, 21532, 21661], [21531, 21661, 21660], [21532, 21533, 21661], [21533, 21662, 21661], [21533, 21534, 21663], [21533, 21663, 21662], [21534, 21535, 21663], [21535, 21664, 21663], [21535, 21536, 21665], [21535, 21665, 21664], [21536, 21537, 21665], [21537, 21666, 21665], [21537, 21538, 21667], [21537, 21667, 21666], [21538, 21539, 21667], [21539, 21668, 21667], [21539, 21540, 21669], [21539, 21669, 21668], [21540, 21541, 21669], [21541, 21670, 21669], [21541, 21542, 21671], [21541, 21671, 21670], [21542, 21543, 21671], [21543, 21672, 21671], [21543, 21544, 21673], [21543, 21673, 21672], [21544, 21545, 21673], [21545, 21674, 21673], [21545, 21546, 21675], [21545, 21675, 21674], [21546, 21547, 21675], [21547, 21676, 21675], [21547, 21548, 21677], [21547, 21677, 21676], [21548, 21549, 21677], [21549, 21678, 21677], [21549, 21550, 21679], [21549, 21679, 21678], [21550, 21551, 21679], [21551, 21680, 21679], [21551, 21552, 21681], [21551, 21681, 21680], [21552, 21553, 21681], [21553, 21682, 21681], [21553, 21554, 21683], [21553, 21683, 21682], [21554, 21555, 21683], [21555, 21684, 21683], [21555, 21556, 21685], [21555, 21685, 21684], [21556, 21557, 21685], [21557, 21686, 21685], [21557, 21558, 21687], [21557, 21687, 21686], [21558, 21559, 21687], [21559, 21688, 21687], [21559, 21560, 21689], [21559, 21689, 21688], [21560, 21561, 21689], [21561, 21690, 21689], [21561, 21562, 21691], [21561, 21691, 21690], [21562, 21563, 21691], [21563, 21692, 21691], [21563, 21564, 21693], [21563, 21693, 21692], [21564, 21565, 21693], [21565, 21694, 21693], [21565, 21566, 21695], [21565, 21695, 21694], [21566, 21567, 21695], [21567, 21696, 21695], [21567, 21568, 21697], [21567, 21697, 21696], [21568, 21569, 21697], [21569, 21698, 21697], [21569, 21570, 21699], [21569, 21699, 21698], [21570, 21571, 21699], [21571, 21700, 21699], [21571, 21572, 21701], [21571, 21701, 21700], [21572, 21573, 21701], [21573, 21702, 21701], [21573, 21574, 21703], [21573, 21703, 21702], [21574, 21575, 21703], [21575, 21704, 21703], [21575, 21576, 21705], [21575, 21705, 21704], [21576, 21577, 21705], [21577, 21706, 21705], [21577, 21578, 21707], [21577, 21707, 21706], [21578, 21579, 21707], [21579, 21708, 21707], [21579, 21580, 21709], [21579, 21709, 21708], [21580, 21581, 21709], [21581, 21710, 21709], [21581, 21582, 21711], [21581, 21711, 21710], [21582, 21583, 21711], [21583, 21712, 21711], [21583, 21584, 21713], [21583, 21713, 21712], [21584, 21585, 21713], [21585, 21714, 21713], [21585, 21586, 21715], [21585, 21715, 21714], [21586, 21587, 21715], [21587, 21716, 21715], [21587, 21588, 21717], [21587, 21717, 21716], [21589, 21590, 21719], [21589, 21719, 21718], [21590, 21591, 21719], [21591, 21720, 21719], [21591, 21592, 21721], [21591, 21721, 21720], [21592, 21593, 21721], [21593, 21722, 21721], [21593, 21594, 21723], [21593, 21723, 21722], [21594, 21595, 21723], [21595, 21724, 21723], [21595, 21596, 21725], [21595, 21725, 21724], [21596, 21597, 21725], [21597, 21726, 21725], [21597, 21598, 21727], [21597, 21727, 21726], [21598, 21599, 21727], [21599, 21728, 21727], [21599, 21600, 21729], [21599, 21729, 21728], [21600, 21601, 21729], [21601, 21730, 21729], [21601, 21602, 21731], [21601, 21731, 21730], [21602, 21603, 21731], [21603, 21732, 21731], [21603, 21604, 21733], [21603, 21733, 21732], [21604, 21605, 21733], [21605, 21734, 21733], [21605, 21606, 21735], [21605, 21735, 21734], [21606, 21607, 21735], [21607, 21736, 21735], [21607, 21608, 21737], [21607, 21737, 21736], [21608, 21609, 21737], [21609, 21738, 21737], [21609, 21610, 21739], [21609, 21739, 21738], [21610, 21611, 21739], [21611, 21740, 21739], [21611, 21612, 21741], [21611, 21741, 21740], [21612, 21613, 21741], [21613, 21742, 21741], [21613, 21614, 21743], [21613, 21743, 21742], [21614, 21615, 21743], [21615, 21744, 21743], [21615, 21616, 21745], [21615, 21745, 21744], [21616, 21617, 21745], [21617, 21746, 21745], [21617, 21618, 21747], [21617, 21747, 21746], [21618, 21619, 21747], [21619, 21748, 21747], [21619, 21620, 21749], [21619, 21749, 21748], [21620, 21621, 21749], [21621, 21750, 21749], [21621, 21622, 21751], [21621, 21751, 21750], [21622, 21623, 21751], [21623, 21752, 21751], [21623, 21624, 21753], [21623, 21753, 21752], [21624, 21625, 21753], [21625, 21754, 21753], [21625, 21626, 21755], [21625, 21755, 21754], [21626, 21627, 21755], [21627, 21756, 21755], [21627, 21628, 21757], [21627, 21757, 21756], [21628, 21629, 21757], [21629, 21758, 21757], [21629, 21630, 21759], [21629, 21759, 21758], [21630, 21631, 21759], [21631, 21760, 21759], [21631, 21632, 21761], [21631, 21761, 21760], [21632, 21633, 21761], [21633, 21762, 21761], [21633, 21634, 21763], [21633, 21763, 21762], [21634, 21635, 21763], [21635, 21764, 21763], [21635, 21636, 21765], [21635, 21765, 21764], [21636, 21637, 21765], [21637, 21766, 21765], [21637, 21638, 21767], [21637, 21767, 21766], [21638, 21639, 21767], [21639, 21768, 21767], [21639, 21640, 21769], [21639, 21769, 21768], [21640, 21641, 21769], [21641, 21770, 21769], [21641, 21642, 21771], [21641, 21771, 21770], [21642, 21643, 21771], [21643, 21772, 21771], [21643, 21644, 21773], [21643, 21773, 21772], [21644, 21645, 21773], [21645, 21774, 21773], [21645, 21646, 21775], [21645, 21775, 21774], [21646, 21647, 21775], [21647, 21776, 21775], [21647, 21648, 21777], [21647, 21777, 21776], [21648, 21649, 21777], [21649, 21778, 21777], [21649, 21650, 21779], [21649, 21779, 21778], [21650, 21651, 21779], [21651, 21780, 21779], [21651, 21652, 21781], [21651, 21781, 21780], [21652, 21653, 21781], [21653, 21782, 21781], [21653, 21654, 21783], [21653, 21783, 21782], [21654, 21655, 21783], [21655, 21784, 21783], [21655, 21656, 21785], [21655, 21785, 21784], [21656, 21657, 21785], [21657, 21786, 21785], [21657, 21658, 21787], [21657, 21787, 21786], [21658, 21659, 21787], [21659, 21788, 21787], [21659, 21660, 21789], [21659, 21789, 21788], [21660, 21661, 21789], [21661, 21790, 21789], [21661, 21662, 21791], [21661, 21791, 21790], [21662, 21663, 21791], [21663, 21792, 21791], [21663, 21664, 21793], [21663, 21793, 21792], [21664, 21665, 21793], [21665, 21794, 21793], [21665, 21666, 21795], [21665, 21795, 21794], [21666, 21667, 21795], [21667, 21796, 21795], [21667, 21668, 21797], [21667, 21797, 21796], [21668, 21669, 21797], [21669, 21798, 21797], [21669, 21670, 21799], [21669, 21799, 21798], [21670, 21671, 21799], [21671, 21800, 21799], [21671, 21672, 21801], [21671, 21801, 21800], [21672, 21673, 21801], [21673, 21802, 21801], [21673, 21674, 21803], [21673, 21803, 21802], [21674, 21675, 21803], [21675, 21804, 21803], [21675, 21676, 21805], [21675, 21805, 21804], [21676, 21677, 21805], [21677, 21806, 21805], [21677, 21678, 21807], [21677, 21807, 21806], [21678, 21679, 21807], [21679, 21808, 21807], [21679, 21680, 21809], [21679, 21809, 21808], [21680, 21681, 21809], [21681, 21810, 21809], [21681, 21682, 21811], [21681, 21811, 21810], [21682, 21683, 21811], [21683, 21812, 21811], [21683, 21684, 21813], [21683, 21813, 21812], [21684, 21685, 21813], [21685, 21814, 21813], [21685, 21686, 21815], [21685, 21815, 21814], [21686, 21687, 21815], [21687, 21816, 21815], [21687, 21688, 21817], [21687, 21817, 21816], [21688, 21689, 21817], [21689, 21818, 21817], [21689, 21690, 21819], [21689, 21819, 21818], [21690, 21691, 21819], [21691, 21820, 21819], [21691, 21692, 21821], [21691, 21821, 21820], [21692, 21693, 21821], [21693, 21822, 21821], [21693, 21694, 21823], [21693, 21823, 21822], [21694, 21695, 21823], [21695, 21824, 21823], [21695, 21696, 21825], [21695, 21825, 21824], [21696, 21697, 21825], [21697, 21826, 21825], [21697, 21698, 21827], [21697, 21827, 21826], [21698, 21699, 21827], [21699, 21828, 21827], [21699, 21700, 21829], [21699, 21829, 21828], [21700, 21701, 21829], [21701, 21830, 21829], [21701, 21702, 21831], [21701, 21831, 21830], [21702, 21703, 21831], [21703, 21832, 21831], [21703, 21704, 21833], [21703, 21833, 21832], [21704, 21705, 21833], [21705, 21834, 21833], [21705, 21706, 21835], [21705, 21835, 21834], [21706, 21707, 21835], [21707, 21836, 21835], [21707, 21708, 21837], [21707, 21837, 21836], [21708, 21709, 21837], [21709, 21838, 21837], [21709, 21710, 21839], [21709, 21839, 21838], [21710, 21711, 21839], [21711, 21840, 21839], [21711, 21712, 21841], [21711, 21841, 21840], [21712, 21713, 21841], [21713, 21842, 21841], [21713, 21714, 21843], [21713, 21843, 21842], [21714, 21715, 21843], [21715, 21844, 21843], [21715, 21716, 21845], [21715, 21845, 21844], [21716, 21717, 21845], [21717, 21846, 21845], [21718, 21719, 21847], [21719, 21848, 21847], [21719, 21720, 21849], [21719, 21849, 21848], [21720, 21721, 21849], [21721, 21850, 21849], [21721, 21722, 21851], [21721, 21851, 21850], [21722, 21723, 21851], [21723, 21852, 21851], [21723, 21724, 21853], [21723, 21853, 21852], [21724, 21725, 21853], [21725, 21854, 21853], [21725, 21726, 21855], [21725, 21855, 21854], [21726, 21727, 21855], [21727, 21856, 21855], [21727, 21728, 21857], [21727, 21857, 21856], [21728, 21729, 21857], [21729, 21858, 21857], [21729, 21730, 21859], [21729, 21859, 21858], [21730, 21731, 21859], [21731, 21860, 21859], [21731, 21732, 21861], [21731, 21861, 21860], [21732, 21733, 21861], [21733, 21862, 21861], [21733, 21734, 21863], [21733, 21863, 21862], [21734, 21735, 21863], [21735, 21864, 21863], [21735, 21736, 21865], [21735, 21865, 21864], [21736, 21737, 21865], [21737, 21866, 21865], [21737, 21738, 21867], [21737, 21867, 21866], [21738, 21739, 21867], [21739, 21868, 21867], [21739, 21740, 21869], [21739, 21869, 21868], [21740, 21741, 21869], [21741, 21870, 21869], [21741, 21742, 21871], [21741, 21871, 21870], [21742, 21743, 21871], [21743, 21872, 21871], [21743, 21744, 21873], [21743, 21873, 21872], [21744, 21745, 21873], [21745, 21874, 21873], [21745, 21746, 21875], [21745, 21875, 21874], [21746, 21747, 21875], [21747, 21876, 21875], [21747, 21748, 21877], [21747, 21877, 21876], [21748, 21749, 21877], [21749, 21878, 21877], [21749, 21750, 21879], [21749, 21879, 21878], [21750, 21751, 21879], [21751, 21880, 21879], [21751, 21752, 21881], [21751, 21881, 21880], [21752, 21753, 21881], [21753, 21882, 21881], [21753, 21754, 21883], [21753, 21883, 21882], [21754, 21755, 21883], [21755, 21884, 21883], [21755, 21756, 21885], [21755, 21885, 21884], [21756, 21757, 21885], [21757, 21886, 21885], [21757, 21758, 21887], [21757, 21887, 21886], [21758, 21759, 21887], [21759, 21888, 21887], [21759, 21760, 21889], [21759, 21889, 21888], [21760, 21761, 21889], [21761, 21890, 21889], [21761, 21762, 21891], [21761, 21891, 21890], [21762, 21763, 21891], [21763, 21892, 21891], [21763, 21764, 21893], [21763, 21893, 21892], [21764, 21765, 21893], [21765, 21894, 21893], [21765, 21766, 21895], [21765, 21895, 21894], [21766, 21767, 21895], [21767, 21896, 21895], [21767, 21768, 21897], [21767, 21897, 21896], [21768, 21769, 21897], [21769, 21898, 21897], [21769, 21770, 21899], [21769, 21899, 21898], [21770, 21771, 21899], [21771, 21900, 21899], [21771, 21772, 21901], [21771, 21901, 21900], [21772, 21773, 21901], [21773, 21902, 21901], [21773, 21774, 21903], [21773, 21903, 21902], [21774, 21775, 21903], [21775, 21904, 21903], [21775, 21776, 21905], [21775, 21905, 21904], [21776, 21777, 21905], [21777, 21906, 21905], [21777, 21778, 21907], [21777, 21907, 21906], [21778, 21779, 21907], [21779, 21908, 21907], [21779, 21780, 21909], [21779, 21909, 21908], [21780, 21781, 21909], [21781, 21910, 21909], [21781, 21782, 21911], [21781, 21911, 21910], [21782, 21783, 21911], [21783, 21912, 21911], [21783, 21784, 21913], [21783, 21913, 21912], [21784, 21785, 21913], [21785, 21914, 21913], [21785, 21786, 21915], [21785, 21915, 21914], [21786, 21787, 21915], [21787, 21916, 21915], [21787, 21788, 21917], [21787, 21917, 21916], [21788, 21789, 21917], [21789, 21918, 21917], [21789, 21790, 21919], [21789, 21919, 21918], [21790, 21791, 21919], [21791, 21920, 21919], [21791, 21792, 21921], [21791, 21921, 21920], [21792, 21793, 21921], [21793, 21922, 21921], [21793, 21794, 21923], [21793, 21923, 21922], [21794, 21795, 21923], [21795, 21924, 21923], [21795, 21796, 21925], [21795, 21925, 21924], [21796, 21797, 21925], [21797, 21926, 21925], [21797, 21798, 21927], [21797, 21927, 21926], [21798, 21799, 21927], [21799, 21928, 21927], [21799, 21800, 21929], [21799, 21929, 21928], [21800, 21801, 21929], [21801, 21930, 21929], [21801, 21802, 21931], [21801, 21931, 21930], [21802, 21803, 21931], [21803, 21932, 21931], [21803, 21804, 21933], [21803, 21933, 21932], [21804, 21805, 21933], [21805, 21934, 21933], [21805, 21806, 21935], [21805, 21935, 21934], [21806, 21807, 21935], [21807, 21936, 21935], [21807, 21808, 21937], [21807, 21937, 21936], [21808, 21809, 21937], [21809, 21938, 21937], [21809, 21810, 21939], [21809, 21939, 21938], [21810, 21811, 21939], [21811, 21940, 21939], [21811, 21812, 21941], [21811, 21941, 21940], [21812, 21813, 21941], [21813, 21942, 21941], [21813, 21814, 21943], [21813, 21943, 21942], [21814, 21815, 21943], [21815, 21944, 21943], [21815, 21816, 21945], [21815, 21945, 21944], [21816, 21817, 21945], [21817, 21946, 21945], [21817, 21818, 21947], [21817, 21947, 21946], [21818, 21819, 21947], [21819, 21948, 21947], [21819, 21820, 21949], [21819, 21949, 21948], [21820, 21821, 21949], [21821, 21950, 21949], [21821, 21822, 21951], [21821, 21951, 21950], [21822, 21823, 21951], [21823, 21952, 21951], [21823, 21824, 21953], [21823, 21953, 21952], [21824, 21825, 21953], [21825, 21954, 21953], [21825, 21826, 21955], [21825, 21955, 21954], [21826, 21827, 21955], [21827, 21956, 21955], [21827, 21828, 21957], [21827, 21957, 21956], [21828, 21829, 21957], [21829, 21958, 21957], [21829, 21830, 21959], [21829, 21959, 21958], [21830, 21831, 21959], [21831, 21960, 21959], [21831, 21832, 21961], [21831, 21961, 21960], [21832, 21833, 21961], [21833, 21962, 21961], [21833, 21834, 21963], [21833, 21963, 21962], [21834, 21835, 21963], [21835, 21964, 21963], [21835, 21836, 21965], [21835, 21965, 21964], [21836, 21837, 21965], [21837, 21966, 21965], [21837, 21838, 21967], [21837, 21967, 21966], [21838, 21839, 21967], [21839, 21968, 21967], [21839, 21840, 21969], [21839, 21969, 21968], [21840, 21841, 21969], [21841, 21970, 21969], [21841, 21842, 21971], [21841, 21971, 21970], [21842, 21843, 21971], [21843, 21972, 21971], [21843, 21844, 21973], [21843, 21973, 21972], [21844, 21845, 21973], [21845, 21974, 21973], [21845, 21846, 21975], [21845, 21975, 21974], [21847, 21848, 21977], [21847, 21977, 21976], [21848, 21849, 21977], [21849, 21978, 21977], [21849, 21850, 21979], [21849, 21979, 21978], [21850, 21851, 21979], [21851, 21980, 21979], [21851, 21852, 21981], [21851, 21981, 21980], [21852, 21853, 21981], [21853, 21982, 21981], [21853, 21854, 21983], [21853, 21983, 21982], [21854, 21855, 21983], [21855, 21984, 21983], [21855, 21856, 21985], [21855, 21985, 21984], [21856, 21857, 21985], [21857, 21986, 21985], [21857, 21858, 21987], [21857, 21987, 21986], [21858, 21859, 21987], [21859, 21988, 21987], [21859, 21860, 21989], [21859, 21989, 21988], [21860, 21861, 21989], [21861, 21990, 21989], [21861, 21862, 21991], [21861, 21991, 21990], [21862, 21863, 21991], [21863, 21992, 21991], [21863, 21864, 21993], [21863, 21993, 21992], [21864, 21865, 21993], [21865, 21994, 21993], [21865, 21866, 21995], [21865, 21995, 21994], [21866, 21867, 21995], [21867, 21996, 21995], [21867, 21868, 21997], [21867, 21997, 21996], [21868, 21869, 21997], [21869, 21998, 21997], [21869, 21870, 21999], [21869, 21999, 21998], [21870, 21871, 21999], [21871, 22000, 21999], [21871, 21872, 22001], [21871, 22001, 22000], [21872, 21873, 22001], [21873, 22002, 22001], [21873, 21874, 22003], [21873, 22003, 22002], [21874, 21875, 22003], [21875, 22004, 22003], [21875, 21876, 22005], [21875, 22005, 22004], [21876, 21877, 22005], [21877, 22006, 22005], [21877, 21878, 22007], [21877, 22007, 22006], [21878, 21879, 22007], [21879, 22008, 22007], [21879, 21880, 22009], [21879, 22009, 22008], [21880, 21881, 22009], [21881, 22010, 22009], [21881, 21882, 22011], [21881, 22011, 22010], [21882, 21883, 22011], [21883, 22012, 22011], [21883, 21884, 22013], [21883, 22013, 22012], [21884, 21885, 22013], [21885, 22014, 22013], [21885, 21886, 22015], [21885, 22015, 22014], [21886, 21887, 22015], [21887, 22016, 22015], [21887, 21888, 22017], [21887, 22017, 22016], [21888, 21889, 22017], [21889, 22018, 22017], [21889, 21890, 22019], [21889, 22019, 22018], [21890, 21891, 22019], [21891, 22020, 22019], [21891, 21892, 22021], [21891, 22021, 22020], [21892, 21893, 22021], [21893, 22022, 22021], [21893, 21894, 22023], [21893, 22023, 22022], [21894, 21895, 22023], [21895, 22024, 22023], [21895, 21896, 22025], [21895, 22025, 22024], [21896, 21897, 22025], [21897, 22026, 22025], [21897, 21898, 22027], [21897, 22027, 22026], [21898, 21899, 22027], [21899, 22028, 22027], [21899, 21900, 22029], [21899, 22029, 22028], [21900, 21901, 22029], [21901, 22030, 22029], [21901, 21902, 22031], [21901, 22031, 22030], [21902, 21903, 22031], [21903, 22032, 22031], [21903, 21904, 22033], [21903, 22033, 22032], [21904, 21905, 22033], [21905, 22034, 22033], [21905, 21906, 22035], [21905, 22035, 22034], [21906, 21907, 22035], [21907, 22036, 22035], [21907, 21908, 22037], [21907, 22037, 22036], [21908, 21909, 22037], [21909, 22038, 22037], [21909, 21910, 22039], [21909, 22039, 22038], [21910, 21911, 22039], [21911, 22040, 22039], [21911, 21912, 22041], [21911, 22041, 22040], [21912, 21913, 22041], [21913, 22042, 22041], [21913, 21914, 22043], [21913, 22043, 22042], [21914, 21915, 22043], [21915, 22044, 22043], [21915, 21916, 22045], [21915, 22045, 22044], [21916, 21917, 22045], [21917, 22046, 22045], [21917, 21918, 22047], [21917, 22047, 22046], [21918, 21919, 22047], [21919, 22048, 22047], [21919, 21920, 22049], [21919, 22049, 22048], [21920, 21921, 22049], [21921, 22050, 22049], [21921, 21922, 22051], [21921, 22051, 22050], [21922, 21923, 22051], [21923, 22052, 22051], [21923, 21924, 22053], [21923, 22053, 22052], [21924, 21925, 22053], [21925, 22054, 22053], [21925, 21926, 22055], [21925, 22055, 22054], [21926, 21927, 22055], [21927, 22056, 22055], [21927, 21928, 22057], [21927, 22057, 22056], [21928, 21929, 22057], [21929, 22058, 22057], [21929, 21930, 22059], [21929, 22059, 22058], [21930, 21931, 22059], [21931, 22060, 22059], [21931, 21932, 22061], [21931, 22061, 22060], [21932, 21933, 22061], [21933, 22062, 22061], [21933, 21934, 22063], [21933, 22063, 22062], [21934, 21935, 22063], [21935, 22064, 22063], [21935, 21936, 22065], [21935, 22065, 22064], [21936, 21937, 22065], [21937, 22066, 22065], [21937, 21938, 22067], [21937, 22067, 22066], [21938, 21939, 22067], [21939, 22068, 22067], [21939, 21940, 22069], [21939, 22069, 22068], [21940, 21941, 22069], [21941, 22070, 22069], [21941, 21942, 22071], [21941, 22071, 22070], [21942, 21943, 22071], [21943, 22072, 22071], [21943, 21944, 22073], [21943, 22073, 22072], [21944, 21945, 22073], [21945, 22074, 22073], [21945, 21946, 22075], [21945, 22075, 22074], [21946, 21947, 22075], [21947, 22076, 22075], [21947, 21948, 22077], [21947, 22077, 22076], [21948, 21949, 22077], [21949, 22078, 22077], [21949, 21950, 22079], [21949, 22079, 22078], [21950, 21951, 22079], [21951, 22080, 22079], [21951, 21952, 22081], [21951, 22081, 22080], [21952, 21953, 22081], [21953, 22082, 22081], [21953, 21954, 22083], [21953, 22083, 22082], [21954, 21955, 22083], [21955, 22084, 22083], [21955, 21956, 22085], [21955, 22085, 22084], [21956, 21957, 22085], [21957, 22086, 22085], [21957, 21958, 22087], [21957, 22087, 22086], [21958, 21959, 22087], [21959, 22088, 22087], [21959, 21960, 22089], [21959, 22089, 22088], [21960, 21961, 22089], [21961, 22090, 22089], [21961, 21962, 22091], [21961, 22091, 22090], [21962, 21963, 22091], [21963, 22092, 22091], [21963, 21964, 22093], [21963, 22093, 22092], [21964, 21965, 22093], [21965, 22094, 22093], [21965, 21966, 22095], [21965, 22095, 22094], [21966, 21967, 22095], [21967, 22096, 22095], [21967, 21968, 22097], [21967, 22097, 22096], [21968, 21969, 22097], [21969, 22098, 22097], [21969, 21970, 22099], [21969, 22099, 22098], [21970, 21971, 22099], [21971, 22100, 22099], [21971, 21972, 22101], [21971, 22101, 22100], [21972, 21973, 22101], [21973, 22102, 22101], [21973, 21974, 22103], [21973, 22103, 22102], [21974, 21975, 22103], [21975, 22104, 22103], [21976, 21977, 22105], [21977, 22106, 22105], [21977, 21978, 22107], [21977, 22107, 22106], [21978, 21979, 22107], [21979, 22108, 22107], [21979, 21980, 22109], [21979, 22109, 22108], [21980, 21981, 22109], [21981, 22110, 22109], [21981, 21982, 22111], [21981, 22111, 22110], [21982, 21983, 22111], [21983, 22112, 22111], [21983, 21984, 22113], [21983, 22113, 22112], [21984, 21985, 22113], [21985, 22114, 22113], [21985, 21986, 22115], [21985, 22115, 22114], [21986, 21987, 22115], [21987, 22116, 22115], [21987, 21988, 22117], [21987, 22117, 22116], [21988, 21989, 22117], [21989, 22118, 22117], [21989, 21990, 22119], [21989, 22119, 22118], [21990, 21991, 22119], [21991, 22120, 22119], [21991, 21992, 22121], [21991, 22121, 22120], [21992, 21993, 22121], [21993, 22122, 22121], [21993, 21994, 22123], [21993, 22123, 22122], [21994, 21995, 22123], [21995, 22124, 22123], [21995, 21996, 22125], [21995, 22125, 22124], [21996, 21997, 22125], [21997, 22126, 22125], [21997, 21998, 22127], [21997, 22127, 22126], [21998, 21999, 22127], [21999, 22128, 22127], [21999, 22000, 22129], [21999, 22129, 22128], [22000, 22001, 22129], [22001, 22130, 22129], [22001, 22002, 22131], [22001, 22131, 22130], [22002, 22003, 22131], [22003, 22132, 22131], [22003, 22004, 22133], [22003, 22133, 22132], [22004, 22005, 22133], [22005, 22134, 22133], [22005, 22006, 22135], [22005, 22135, 22134], [22006, 22007, 22135], [22007, 22136, 22135], [22007, 22008, 22137], [22007, 22137, 22136], [22008, 22009, 22137], [22009, 22138, 22137], [22009, 22010, 22139], [22009, 22139, 22138], [22010, 22011, 22139], [22011, 22140, 22139], [22011, 22012, 22141], [22011, 22141, 22140], [22012, 22013, 22141], [22013, 22142, 22141], [22013, 22014, 22143], [22013, 22143, 22142], [22014, 22015, 22143], [22015, 22144, 22143], [22015, 22016, 22145], [22015, 22145, 22144], [22016, 22017, 22145], [22017, 22146, 22145], [22017, 22018, 22147], [22017, 22147, 22146], [22018, 22019, 22147], [22019, 22148, 22147], [22019, 22020, 22149], [22019, 22149, 22148], [22020, 22021, 22149], [22021, 22150, 22149], [22021, 22022, 22151], [22021, 22151, 22150], [22022, 22023, 22151], [22023, 22152, 22151], [22023, 22024, 22153], [22023, 22153, 22152], [22024, 22025, 22153], [22025, 22154, 22153], [22025, 22026, 22155], [22025, 22155, 22154], [22026, 22027, 22155], [22027, 22156, 22155], [22027, 22028, 22157], [22027, 22157, 22156], [22028, 22029, 22157], [22029, 22158, 22157], [22029, 22030, 22159], [22029, 22159, 22158], [22030, 22031, 22159], [22031, 22160, 22159], [22031, 22032, 22161], [22031, 22161, 22160], [22032, 22033, 22161], [22033, 22162, 22161], [22033, 22034, 22163], [22033, 22163, 22162], [22034, 22035, 22163], [22035, 22164, 22163], [22035, 22036, 22165], [22035, 22165, 22164], [22036, 22037, 22165], [22037, 22166, 22165], [22037, 22038, 22167], [22037, 22167, 22166], [22038, 22039, 22167], [22039, 22168, 22167], [22039, 22040, 22169], [22039, 22169, 22168], [22040, 22041, 22169], [22041, 22170, 22169], [22041, 22042, 22171], [22041, 22171, 22170], [22042, 22043, 22171], [22043, 22172, 22171], [22043, 22044, 22173], [22043, 22173, 22172], [22044, 22045, 22173], [22045, 22174, 22173], [22045, 22046, 22175], [22045, 22175, 22174], [22046, 22047, 22175], [22047, 22176, 22175], [22047, 22048, 22177], [22047, 22177, 22176], [22048, 22049, 22177], [22049, 22178, 22177], [22049, 22050, 22179], [22049, 22179, 22178], [22050, 22051, 22179], [22051, 22180, 22179], [22051, 22052, 22181], [22051, 22181, 22180], [22052, 22053, 22181], [22053, 22182, 22181], [22053, 22054, 22183], [22053, 22183, 22182], [22054, 22055, 22183], [22055, 22184, 22183], [22055, 22056, 22185], [22055, 22185, 22184], [22056, 22057, 22185], [22057, 22186, 22185], [22057, 22058, 22187], [22057, 22187, 22186], [22058, 22059, 22187], [22059, 22188, 22187], [22059, 22060, 22189], [22059, 22189, 22188], [22060, 22061, 22189], [22061, 22190, 22189], [22061, 22062, 22191], [22061, 22191, 22190], [22062, 22063, 22191], [22063, 22192, 22191], [22063, 22064, 22193], [22063, 22193, 22192], [22064, 22065, 22193], [22065, 22194, 22193], [22065, 22066, 22195], [22065, 22195, 22194], [22066, 22067, 22195], [22067, 22196, 22195], [22067, 22068, 22197], [22067, 22197, 22196], [22068, 22069, 22197], [22069, 22198, 22197], [22069, 22070, 22199], [22069, 22199, 22198], [22070, 22071, 22199], [22071, 22200, 22199], [22071, 22072, 22201], [22071, 22201, 22200], [22072, 22073, 22201], [22073, 22202, 22201], [22073, 22074, 22203], [22073, 22203, 22202], [22074, 22075, 22203], [22075, 22204, 22203], [22075, 22076, 22205], [22075, 22205, 22204], [22076, 22077, 22205], [22077, 22206, 22205], [22077, 22078, 22207], [22077, 22207, 22206], [22078, 22079, 22207], [22079, 22208, 22207], [22079, 22080, 22209], [22079, 22209, 22208], [22080, 22081, 22209], [22081, 22210, 22209], [22081, 22082, 22211], [22081, 22211, 22210], [22082, 22083, 22211], [22083, 22212, 22211], [22083, 22084, 22213], [22083, 22213, 22212], [22084, 22085, 22213], [22085, 22214, 22213], [22085, 22086, 22215], [22085, 22215, 22214], [22086, 22087, 22215], [22087, 22216, 22215], [22087, 22088, 22217], [22087, 22217, 22216], [22088, 22089, 22217], [22089, 22218, 22217], [22089, 22090, 22219], [22089, 22219, 22218], [22090, 22091, 22219], [22091, 22220, 22219], [22091, 22092, 22221], [22091, 22221, 22220], [22092, 22093, 22221], [22093, 22222, 22221], [22093, 22094, 22223], [22093, 22223, 22222], [22094, 22095, 22223], [22095, 22224, 22223], [22095, 22096, 22225], [22095, 22225, 22224], [22096, 22097, 22225], [22097, 22226, 22225], [22097, 22098, 22227], [22097, 22227, 22226], [22098, 22099, 22227], [22099, 22228, 22227], [22099, 22100, 22229], [22099, 22229, 22228], [22100, 22101, 22229], [22101, 22230, 22229], [22101, 22102, 22231], [22101, 22231, 22230], [22102, 22103, 22231], [22103, 22232, 22231], [22103, 22104, 22233], [22103, 22233, 22232], [22105, 22106, 22235], [22105, 22235, 22234], [22106, 22107, 22235], [22107, 22236, 22235], [22107, 22108, 22237], [22107, 22237, 22236], [22108, 22109, 22237], [22109, 22238, 22237], [22109, 22110, 22239], [22109, 22239, 22238], [22110, 22111, 22239], [22111, 22240, 22239], [22111, 22112, 22241], [22111, 22241, 22240], [22112, 22113, 22241], [22113, 22242, 22241], [22113, 22114, 22243], [22113, 22243, 22242], [22114, 22115, 22243], [22115, 22244, 22243], [22115, 22116, 22245], [22115, 22245, 22244], [22116, 22117, 22245], [22117, 22246, 22245], [22117, 22118, 22247], [22117, 22247, 22246], [22118, 22119, 22247], [22119, 22248, 22247], [22119, 22120, 22249], [22119, 22249, 22248], [22120, 22121, 22249], [22121, 22250, 22249], [22121, 22122, 22251], [22121, 22251, 22250], [22122, 22123, 22251], [22123, 22252, 22251], [22123, 22124, 22253], [22123, 22253, 22252], [22124, 22125, 22253], [22125, 22254, 22253], [22125, 22126, 22255], [22125, 22255, 22254], [22126, 22127, 22255], [22127, 22256, 22255], [22127, 22128, 22257], [22127, 22257, 22256], [22128, 22129, 22257], [22129, 22258, 22257], [22129, 22130, 22259], [22129, 22259, 22258], [22130, 22131, 22259], [22131, 22260, 22259], [22131, 22132, 22261], [22131, 22261, 22260], [22132, 22133, 22261], [22133, 22262, 22261], [22133, 22134, 22263], [22133, 22263, 22262], [22134, 22135, 22263], [22135, 22264, 22263], [22135, 22136, 22265], [22135, 22265, 22264], [22136, 22137, 22265], [22137, 22266, 22265], [22137, 22138, 22267], [22137, 22267, 22266], [22138, 22139, 22267], [22139, 22268, 22267], [22139, 22140, 22269], [22139, 22269, 22268], [22140, 22141, 22269], [22141, 22270, 22269], [22141, 22142, 22271], [22141, 22271, 22270], [22142, 22143, 22271], [22143, 22272, 22271], [22143, 22144, 22273], [22143, 22273, 22272], [22144, 22145, 22273], [22145, 22274, 22273], [22145, 22146, 22275], [22145, 22275, 22274], [22146, 22147, 22275], [22147, 22276, 22275], [22147, 22148, 22277], [22147, 22277, 22276], [22148, 22149, 22277], [22149, 22278, 22277], [22149, 22150, 22279], [22149, 22279, 22278], [22150, 22151, 22279], [22151, 22280, 22279], [22151, 22152, 22281], [22151, 22281, 22280], [22152, 22153, 22281], [22153, 22282, 22281], [22153, 22154, 22283], [22153, 22283, 22282], [22154, 22155, 22283], [22155, 22284, 22283], [22155, 22156, 22285], [22155, 22285, 22284], [22156, 22157, 22285], [22157, 22286, 22285], [22157, 22158, 22287], [22157, 22287, 22286], [22158, 22159, 22287], [22159, 22288, 22287], [22159, 22160, 22289], [22159, 22289, 22288], [22160, 22161, 22289], [22161, 22290, 22289], [22161, 22162, 22291], [22161, 22291, 22290], [22162, 22163, 22291], [22163, 22292, 22291], [22163, 22164, 22293], [22163, 22293, 22292], [22164, 22165, 22293], [22165, 22294, 22293], [22165, 22166, 22295], [22165, 22295, 22294], [22166, 22167, 22295], [22167, 22296, 22295], [22167, 22168, 22297], [22167, 22297, 22296], [22168, 22169, 22297], [22169, 22298, 22297], [22169, 22170, 22299], [22169, 22299, 22298], [22170, 22171, 22299], [22171, 22300, 22299], [22171, 22172, 22301], [22171, 22301, 22300], [22172, 22173, 22301], [22173, 22302, 22301], [22173, 22174, 22303], [22173, 22303, 22302], [22174, 22175, 22303], [22175, 22304, 22303], [22175, 22176, 22305], [22175, 22305, 22304], [22176, 22177, 22305], [22177, 22306, 22305], [22177, 22178, 22307], [22177, 22307, 22306], [22178, 22179, 22307], [22179, 22308, 22307], [22179, 22180, 22309], [22179, 22309, 22308], [22180, 22181, 22309], [22181, 22310, 22309], [22181, 22182, 22311], [22181, 22311, 22310], [22182, 22183, 22311], [22183, 22312, 22311], [22183, 22184, 22313], [22183, 22313, 22312], [22184, 22185, 22313], [22185, 22314, 22313], [22185, 22186, 22315], [22185, 22315, 22314], [22186, 22187, 22315], [22187, 22316, 22315], [22187, 22188, 22317], [22187, 22317, 22316], [22188, 22189, 22317], [22189, 22318, 22317], [22189, 22190, 22319], [22189, 22319, 22318], [22190, 22191, 22319], [22191, 22320, 22319], [22191, 22192, 22321], [22191, 22321, 22320], [22192, 22193, 22321], [22193, 22322, 22321], [22193, 22194, 22323], [22193, 22323, 22322], [22194, 22195, 22323], [22195, 22324, 22323], [22195, 22196, 22325], [22195, 22325, 22324], [22196, 22197, 22325], [22197, 22326, 22325], [22197, 22198, 22327], [22197, 22327, 22326], [22198, 22199, 22327], [22199, 22328, 22327], [22199, 22200, 22329], [22199, 22329, 22328], [22200, 22201, 22329], [22201, 22330, 22329], [22201, 22202, 22331], [22201, 22331, 22330], [22202, 22203, 22331], [22203, 22332, 22331], [22203, 22204, 22333], [22203, 22333, 22332], [22204, 22205, 22333], [22205, 22334, 22333], [22205, 22206, 22335], [22205, 22335, 22334], [22206, 22207, 22335], [22207, 22336, 22335], [22207, 22208, 22337], [22207, 22337, 22336], [22208, 22209, 22337], [22209, 22338, 22337], [22209, 22210, 22339], [22209, 22339, 22338], [22210, 22211, 22339], [22211, 22340, 22339], [22211, 22212, 22341], [22211, 22341, 22340], [22212, 22213, 22341], [22213, 22342, 22341], [22213, 22214, 22343], [22213, 22343, 22342], [22214, 22215, 22343], [22215, 22344, 22343], [22215, 22216, 22345], [22215, 22345, 22344], [22216, 22217, 22345], [22217, 22346, 22345], [22217, 22218, 22347], [22217, 22347, 22346], [22218, 22219, 22347], [22219, 22348, 22347], [22219, 22220, 22349], [22219, 22349, 22348], [22220, 22221, 22349], [22221, 22350, 22349], [22221, 22222, 22351], [22221, 22351, 22350], [22222, 22223, 22351], [22223, 22352, 22351], [22223, 22224, 22353], [22223, 22353, 22352], [22224, 22225, 22353], [22225, 22354, 22353], [22225, 22226, 22355], [22225, 22355, 22354], [22226, 22227, 22355], [22227, 22356, 22355], [22227, 22228, 22357], [22227, 22357, 22356], [22228, 22229, 22357], [22229, 22358, 22357], [22229, 22230, 22359], [22229, 22359, 22358], [22230, 22231, 22359], [22231, 22360, 22359], [22231, 22232, 22361], [22231, 22361, 22360], [22232, 22233, 22361], [22233, 22362, 22361], [22234, 22235, 22363], [22235, 22364, 22363], [22235, 22236, 22365], [22235, 22365, 22364], [22236, 22237, 22365], [22237, 22366, 22365], [22237, 22238, 22367], [22237, 22367, 22366], [22238, 22239, 22367], [22239, 22368, 22367], [22239, 22240, 22369], [22239, 22369, 22368], [22240, 22241, 22369], [22241, 22370, 22369], [22241, 22242, 22371], [22241, 22371, 22370], [22242, 22243, 22371], [22243, 22372, 22371], [22243, 22244, 22373], [22243, 22373, 22372], [22244, 22245, 22373], [22245, 22374, 22373], [22245, 22246, 22375], [22245, 22375, 22374], [22246, 22247, 22375], [22247, 22376, 22375], [22247, 22248, 22377], [22247, 22377, 22376], [22248, 22249, 22377], [22249, 22378, 22377], [22249, 22250, 22379], [22249, 22379, 22378], [22250, 22251, 22379], [22251, 22380, 22379], [22251, 22252, 22381], [22251, 22381, 22380], [22252, 22253, 22381], [22253, 22382, 22381], [22253, 22254, 22383], [22253, 22383, 22382], [22254, 22255, 22383], [22255, 22384, 22383], [22255, 22256, 22385], [22255, 22385, 22384], [22256, 22257, 22385], [22257, 22386, 22385], [22257, 22258, 22387], [22257, 22387, 22386], [22258, 22259, 22387], [22259, 22388, 22387], [22259, 22260, 22389], [22259, 22389, 22388], [22260, 22261, 22389], [22261, 22390, 22389], [22261, 22262, 22391], [22261, 22391, 22390], [22262, 22263, 22391], [22263, 22392, 22391], [22263, 22264, 22393], [22263, 22393, 22392], [22264, 22265, 22393], [22265, 22394, 22393], [22265, 22266, 22395], [22265, 22395, 22394], [22266, 22267, 22395], [22267, 22396, 22395], [22267, 22268, 22397], [22267, 22397, 22396], [22268, 22269, 22397], [22269, 22398, 22397], [22269, 22270, 22399], [22269, 22399, 22398], [22270, 22271, 22399], [22271, 22400, 22399], [22271, 22272, 22401], [22271, 22401, 22400], [22272, 22273, 22401], [22273, 22402, 22401], [22273, 22274, 22403], [22273, 22403, 22402], [22274, 22275, 22403], [22275, 22404, 22403], [22275, 22276, 22405], [22275, 22405, 22404], [22276, 22277, 22405], [22277, 22406, 22405], [22277, 22278, 22407], [22277, 22407, 22406], [22278, 22279, 22407], [22279, 22408, 22407], [22279, 22280, 22409], [22279, 22409, 22408], [22280, 22281, 22409], [22281, 22410, 22409], [22281, 22282, 22411], [22281, 22411, 22410], [22282, 22283, 22411], [22283, 22412, 22411], [22283, 22284, 22413], [22283, 22413, 22412], [22284, 22285, 22413], [22285, 22414, 22413], [22285, 22286, 22415], [22285, 22415, 22414], [22286, 22287, 22415], [22287, 22416, 22415], [22287, 22288, 22417], [22287, 22417, 22416], [22288, 22289, 22417], [22289, 22418, 22417], [22289, 22290, 22419], [22289, 22419, 22418], [22290, 22291, 22419], [22291, 22420, 22419], [22291, 22292, 22421], [22291, 22421, 22420], [22292, 22293, 22421], [22293, 22422, 22421], [22293, 22294, 22423], [22293, 22423, 22422], [22294, 22295, 22423], [22295, 22424, 22423], [22295, 22296, 22425], [22295, 22425, 22424], [22296, 22297, 22425], [22297, 22426, 22425], [22297, 22298, 22427], [22297, 22427, 22426], [22298, 22299, 22427], [22299, 22428, 22427], [22299, 22300, 22429], [22299, 22429, 22428], [22300, 22301, 22429], [22301, 22430, 22429], [22301, 22302, 22431], [22301, 22431, 22430], [22302, 22303, 22431], [22303, 22432, 22431], [22303, 22304, 22433], [22303, 22433, 22432], [22304, 22305, 22433], [22305, 22434, 22433], [22305, 22306, 22435], [22305, 22435, 22434], [22306, 22307, 22435], [22307, 22436, 22435], [22307, 22308, 22437], [22307, 22437, 22436], [22308, 22309, 22437], [22309, 22438, 22437], [22309, 22310, 22439], [22309, 22439, 22438], [22310, 22311, 22439], [22311, 22440, 22439], [22311, 22312, 22441], [22311, 22441, 22440], [22312, 22313, 22441], [22313, 22442, 22441], [22313, 22314, 22443], [22313, 22443, 22442], [22314, 22315, 22443], [22315, 22444, 22443], [22315, 22316, 22445], [22315, 22445, 22444], [22316, 22317, 22445], [22317, 22446, 22445], [22317, 22318, 22447], [22317, 22447, 22446], [22318, 22319, 22447], [22319, 22448, 22447], [22319, 22320, 22449], [22319, 22449, 22448], [22320, 22321, 22449], [22321, 22450, 22449], [22321, 22322, 22451], [22321, 22451, 22450], [22322, 22323, 22451], [22323, 22452, 22451], [22323, 22324, 22453], [22323, 22453, 22452], [22324, 22325, 22453], [22325, 22454, 22453], [22325, 22326, 22455], [22325, 22455, 22454], [22326, 22327, 22455], [22327, 22456, 22455], [22327, 22328, 22457], [22327, 22457, 22456], [22328, 22329, 22457], [22329, 22458, 22457], [22329, 22330, 22459], [22329, 22459, 22458], [22330, 22331, 22459], [22331, 22460, 22459], [22331, 22332, 22461], [22331, 22461, 22460], [22332, 22333, 22461], [22333, 22462, 22461], [22333, 22334, 22463], [22333, 22463, 22462], [22334, 22335, 22463], [22335, 22464, 22463], [22335, 22336, 22465], [22335, 22465, 22464], [22336, 22337, 22465], [22337, 22466, 22465], [22337, 22338, 22467], [22337, 22467, 22466], [22338, 22339, 22467], [22339, 22468, 22467], [22339, 22340, 22469], [22339, 22469, 22468], [22340, 22341, 22469], [22341, 22470, 22469], [22341, 22342, 22471], [22341, 22471, 22470], [22342, 22343, 22471], [22343, 22472, 22471], [22343, 22344, 22473], [22343, 22473, 22472], [22344, 22345, 22473], [22345, 22474, 22473], [22345, 22346, 22475], [22345, 22475, 22474], [22346, 22347, 22475], [22347, 22476, 22475], [22347, 22348, 22477], [22347, 22477, 22476], [22348, 22349, 22477], [22349, 22478, 22477], [22349, 22350, 22479], [22349, 22479, 22478], [22350, 22351, 22479], [22351, 22480, 22479], [22351, 22352, 22481], [22351, 22481, 22480], [22352, 22353, 22481], [22353, 22482, 22481], [22353, 22354, 22483], [22353, 22483, 22482], [22354, 22355, 22483], [22355, 22484, 22483], [22355, 22356, 22485], [22355, 22485, 22484], [22356, 22357, 22485], [22357, 22486, 22485], [22357, 22358, 22487], [22357, 22487, 22486], [22358, 22359, 22487], [22359, 22488, 22487], [22359, 22360, 22489], [22359, 22489, 22488], [22360, 22361, 22489], [22361, 22490, 22489], [22361, 22362, 22491], [22361, 22491, 22490], [16171, 16300, 22493], [16171, 22493, 22492], [16300, 16429, 22493], [16429, 22494, 22493], [16429, 16558, 22495], [16429, 22495, 22494], [16558, 16687, 22495], [16687, 22496, 22495], [16687, 16816, 22497], [16687, 22497, 22496], [16816, 16945, 22497], [16945, 22498, 22497], [16945, 17074, 22499], [16945, 22499, 22498], [17074, 17203, 22499], [17203, 22500, 22499], [17203, 17332, 22501], [17203, 22501, 22500], [17332, 17461, 22501], [17461, 22502, 22501], [17461, 17590, 22503], [17461, 22503, 22502], [17590, 17719, 22503], [17719, 22504, 22503], [17719, 17848, 22505], [17719, 22505, 22504], [17848, 17977, 22505], [17977, 22506, 22505], [17977, 18106, 22507], [17977, 22507, 22506], [18106, 18235, 22507], [18235, 22508, 22507], [18235, 18364, 22509], [18235, 22509, 22508], [18364, 18493, 22509], [18493, 22510, 22509], [18493, 18622, 22511], [18493, 22511, 22510], [18622, 18751, 22511], [18751, 22512, 22511], [18751, 18880, 22513], [18751, 22513, 22512], [18880, 19009, 22513], [19009, 22514, 22513], [19009, 19138, 22515], [19009, 22515, 22514], [19138, 0, 22515], [0, 129, 22515], [22492, 22493, 22516], [22493, 22517, 22516], [22493, 22494, 22518], [22493, 22518, 22517], [22494, 22495, 22518], [22495, 22519, 22518], [22495, 22496, 22520], [22495, 22520, 22519], [22496, 22497, 22520], [22497, 22521, 22520], [22497, 22498, 22522], [22497, 22522, 22521], [22498, 22499, 22522], [22499, 22523, 22522], [22499, 22500, 22524], [22499, 22524, 22523], [22500, 22501, 22524], [22501, 22525, 22524], [22501, 22502, 22526], [22501, 22526, 22525], [22502, 22503, 22526], [22503, 22527, 22526], [22503, 22504, 22528], [22503, 22528, 22527], [22504, 22505, 22528], [22505, 22529, 22528], [22505, 22506, 22530], [22505, 22530, 22529], [22506, 22507, 22530], [22507, 22531, 22530], [22507, 22508, 22532], [22507, 22532, 22531], [22508, 22509, 22532], [22509, 22533, 22532], [22509, 22510, 22534], [22509, 22534, 22533], [22510, 22511, 22534], [22511, 22535, 22534], [22511, 22512, 22536], [22511, 22536, 22535], [22512, 22513, 22536], [22513, 22537, 22536], [22513, 22514, 22538], [22513, 22538, 22537], [22514, 22515, 22538], [22515, 22539, 22538], [22515, 129, 258], [22515, 258, 22539], [22516, 22517, 22541], [22516, 22541, 22540], [22517, 22518, 22541], [22518, 22542, 22541], [22518, 22519, 22543], [22518, 22543, 22542], [22519, 22520, 22543], [22520, 22544, 22543], [22520, 22521, 22545], [22520, 22545, 22544], [22521, 22522, 22545], [22522, 22546, 22545], [22522, 22523, 22547], [22522, 22547, 22546], [22523, 22524, 22547], [22524, 22548, 22547], [22524, 22525, 22549], [22524, 22549, 22548], [22525, 22526, 22549], [22526, 22550, 22549], [22526, 22527, 22551], [22526, 22551, 22550], [22527, 22528, 22551], [22528, 22552, 22551], [22528, 22529, 22553], [22528, 22553, 22552], [22529, 22530, 22553], [22530, 22554, 22553], [22530, 22531, 22555], [22530, 22555, 22554], [22531, 22532, 22555], [22532, 22556, 22555], [22532, 22533, 22557], [22532, 22557, 22556], [22533, 22534, 22557], [22534, 22558, 22557], [22534, 22535, 22559], [22534, 22559, 22558], [22535, 22536, 22559], [22536, 22560, 22559], [22536, 22537, 22561], [22536, 22561, 22560], [22537, 22538, 22561], [22538, 22562, 22561], [22538, 22539, 22563], [22538, 22563, 22562], [22539, 258, 22563], [258, 387, 22563], [22540, 22541, 22564], [22541, 22565, 22564], [22541, 22542, 22566], [22541, 22566, 22565], [22542, 22543, 22566], [22543, 22567, 22566], [22543, 22544, 22568], [22543, 22568, 22567], [22544, 22545, 22568], [22545, 22569, 22568], [22545, 22546, 22570], [22545, 22570, 22569], [22546, 22547, 22570], [22547, 22571, 22570], [22547, 22548, 22572], [22547, 22572, 22571], [22548, 22549, 22572], [22549, 22573, 22572], [22549, 22550, 22574], [22549, 22574, 22573], [22550, 22551, 22574], [22551, 22575, 22574], [22551, 22552, 22576], [22551, 22576, 22575], [22552, 22553, 22576], [22553, 22577, 22576], [22553, 22554, 22578], [22553, 22578, 22577], [22554, 22555, 22578], [22555, 22579, 22578], [22555, 22556, 22580], [22555, 22580, 22579], [22556, 22557, 22580], [22557, 22581, 22580], [22557, 22558, 22582], [22557, 22582, 22581], [22558, 22559, 22582], [22559, 22583, 22582], [22559, 22560, 22584], [22559, 22584, 22583], [22560, 22561, 22584], [22561, 22585, 22584], [22561, 22562, 22586], [22561, 22586, 22585], [22562, 22563, 22586], [22563, 22587, 22586], [22563, 387, 516], [22563, 516, 22587], [22564, 22565, 22589], [22564, 22589, 22588], [22565, 22566, 22589], [22566, 22590, 22589], [22566, 22567, 22591], [22566, 22591, 22590], [22567, 22568, 22591], [22568, 22592, 22591], [22568, 22569, 22593], [22568, 22593, 22592], [22569, 22570, 22593], [22570, 22594, 22593], [22570, 22571, 22595], [22570, 22595, 22594], [22571, 22572, 22595], [22572, 22596, 22595], [22572, 22573, 22597], [22572, 22597, 22596], [22573, 22574, 22597], [22574, 22598, 22597], [22574, 22575, 22599], [22574, 22599, 22598], [22575, 22576, 22599], [22576, 22600, 22599], [22576, 22577, 22601], [22576, 22601, 22600], [22577, 22578, 22601], [22578, 22602, 22601], [22578, 22579, 22603], [22578, 22603, 22602], [22579, 22580, 22603], [22580, 22604, 22603], [22580, 22581, 22605], [22580, 22605, 22604], [22581, 22582, 22605], [22582, 22606, 22605], [22582, 22583, 22607], [22582, 22607, 22606], [22583, 22584, 22607], [22584, 22608, 22607], [22584, 22585, 22609], [22584, 22609, 22608], [22585, 22586, 22609], [22586, 22610, 22609], [22586, 22587, 22611], [22586, 22611, 22610], [22587, 516, 22611], [516, 645, 22611], [22588, 22589, 22612], [22589, 22613, 22612], [22589, 22590, 22614], [22589, 22614, 22613], [22590, 22591, 22614], [22591, 22615, 22614], [22591, 22592, 22616], [22591, 22616, 22615], [22592, 22593, 22616], [22593, 22617, 22616], [22593, 22594, 22618], [22593, 22618, 22617], [22594, 22595, 22618], [22595, 22619, 22618], [22595, 22596, 22620], [22595, 22620, 22619], [22596, 22597, 22620], [22597, 22621, 22620], [22597, 22598, 22622], [22597, 22622, 22621], [22598, 22599, 22622], [22599, 22623, 22622], [22599, 22600, 22624], [22599, 22624, 22623], [22600, 22601, 22624], [22601, 22625, 22624], [22601, 22602, 22626], [22601, 22626, 22625], [22602, 22603, 22626], [22603, 22627, 22626], [22603, 22604, 22628], [22603, 22628, 22627], [22604, 22605, 22628], [22605, 22629, 22628], [22605, 22606, 22630], [22605, 22630, 22629], [22606, 22607, 22630], [22607, 22631, 22630], [22607, 22608, 22632], [22607, 22632, 22631], [22608, 22609, 22632], [22609, 22633, 22632], [22609, 22610, 22634], [22609, 22634, 22633], [22610, 22611, 22634], [22611, 22635, 22634], [22611, 645, 774], [22611, 774, 22635], [22612, 22613, 22637], [22612, 22637, 22636], [22613, 22614, 22637], [22614, 22638, 22637], [22614, 22615, 22639], [22614, 22639, 22638], [22615, 22616, 22639], [22616, 22640, 22639], [22616, 22617, 22641], [22616, 22641, 22640], [22617, 22618, 22641], [22618, 22642, 22641], [22618, 22619, 22643], [22618, 22643, 22642], [22619, 22620, 22643], [22620, 22644, 22643], [22620, 22621, 22645], [22620, 22645, 22644], [22621, 22622, 22645], [22622, 22646, 22645], [22622, 22623, 22647], [22622, 22647, 22646], [22623, 22624, 22647], [22624, 22648, 22647], [22624, 22625, 22649], [22624, 22649, 22648], [22625, 22626, 22649], [22626, 22650, 22649], [22626, 22627, 22651], [22626, 22651, 22650], [22627, 22628, 22651], [22628, 22652, 22651], [22628, 22629, 22653], [22628, 22653, 22652], [22629, 22630, 22653], [22630, 22654, 22653], [22630, 22631, 22655], [22630, 22655, 22654], [22631, 22632, 22655], [22632, 22656, 22655], [22632, 22633, 22657], [22632, 22657, 22656], [22633, 22634, 22657], [22634, 22658, 22657], [22634, 22635, 22659], [22634, 22659, 22658], [22635, 774, 22659], [774, 903, 22659], [22636, 22637, 22660], [22637, 22661, 22660], [22637, 22638, 22662], [22637, 22662, 22661], [22638, 22639, 22662], [22639, 22663, 22662], [22639, 22640, 22664], [22639, 22664, 22663], [22640, 22641, 22664], [22641, 22665, 22664], [22641, 22642, 22666], [22641, 22666, 22665], [22642, 22643, 22666], [22643, 22667, 22666], [22643, 22644, 22668], [22643, 22668, 22667], [22644, 22645, 22668], [22645, 22669, 22668], [22645, 22646, 22670], [22645, 22670, 22669], [22646, 22647, 22670], [22647, 22671, 22670], [22647, 22648, 22672], [22647, 22672, 22671], [22648, 22649, 22672], [22649, 22673, 22672], [22649, 22650, 22674], [22649, 22674, 22673], [22650, 22651, 22674], [22651, 22675, 22674], [22651, 22652, 22676], [22651, 22676, 22675], [22652, 22653, 22676], [22653, 22677, 22676], [22653, 22654, 22678], [22653, 22678, 22677], [22654, 22655, 22678], [22655, 22679, 22678], [22655, 22656, 22680], [22655, 22680, 22679], [22656, 22657, 22680], [22657, 22681, 22680], [22657, 22658, 22682], [22657, 22682, 22681], [22658, 22659, 22682], [22659, 22683, 22682], [22659, 903, 1032], [22659, 1032, 22683], [22660, 22661, 22685], [22660, 22685, 22684], [22661, 22662, 22685], [22662, 22686, 22685], [22662, 22663, 22687], [22662, 22687, 22686], [22663, 22664, 22687], [22664, 22688, 22687], [22664, 22665, 22689], [22664, 22689, 22688], [22665, 22666, 22689], [22666, 22690, 22689], [22666, 22667, 22691], [22666, 22691, 22690], [22667, 22668, 22691], [22668, 22692, 22691], [22668, 22669, 22693], [22668, 22693, 22692], [22669, 22670, 22693], [22670, 22694, 22693], [22670, 22671, 22695], [22670, 22695, 22694], [22671, 22672, 22695], [22672, 22696, 22695], [22672, 22673, 22697], [22672, 22697, 22696], [22673, 22674, 22697], [22674, 22698, 22697], [22674, 22675, 22699], [22674, 22699, 22698], [22675, 22676, 22699], [22676, 22700, 22699], [22676, 22677, 22701], [22676, 22701, 22700], [22677, 22678, 22701], [22678, 22702, 22701], [22678, 22679, 22703], [22678, 22703, 22702], [22679, 22680, 22703], [22680, 22704, 22703], [22680, 22681, 22705], [22680, 22705, 22704], [22681, 22682, 22705], [22682, 22706, 22705], [22682, 22683, 22707], [22682, 22707, 22706], [22683, 1032, 22707], [1032, 1161, 22707], [22684, 22685, 22708], [22685, 22709, 22708], [22685, 22686, 22710], [22685, 22710, 22709], [22686, 22687, 22710], [22687, 22711, 22710], [22687, 22688, 22712], [22687, 22712, 22711], [22688, 22689, 22712], [22689, 22713, 22712], [22689, 22690, 22714], [22689, 22714, 22713], [22690, 22691, 22714], [22691, 22715, 22714], [22691, 22692, 22716], [22691, 22716, 22715], [22692, 22693, 22716], [22693, 22717, 22716], [22693, 22694, 22718], [22693, 22718, 22717], [22694, 22695, 22718], [22695, 22719, 22718], [22695, 22696, 22720], [22695, 22720, 22719], [22696, 22697, 22720], [22697, 22721, 22720], [22697, 22698, 22722], [22697, 22722, 22721], [22698, 22699, 22722], [22699, 22723, 22722], [22699, 22700, 22724], [22699, 22724, 22723], [22700, 22701, 22724], [22701, 22725, 22724], [22701, 22702, 22726], [22701, 22726, 22725], [22702, 22703, 22726], [22703, 22727, 22726], [22703, 22704, 22728], [22703, 22728, 22727], [22704, 22705, 22728], [22705, 22729, 22728], [22705, 22706, 22730], [22705, 22730, 22729], [22706, 22707, 22730], [22707, 22731, 22730], [22707, 1161, 1290], [22707, 1290, 22731], [22708, 22709, 22733], [22708, 22733, 22732], [22709, 22710, 22733], [22710, 22734, 22733], [22710, 22711, 22735], [22710, 22735, 22734], [22711, 22712, 22735], [22712, 22736, 22735], [22712, 22713, 22737], [22712, 22737, 22736], [22713, 22714, 22737], [22714, 22738, 22737], [22714, 22715, 22739], [22714, 22739, 22738], [22715, 22716, 22739], [22716, 22740, 22739], [22716, 22717, 22741], [22716, 22741, 22740], [22717, 22718, 22741], [22718, 22742, 22741], [22718, 22719, 22743], [22718, 22743, 22742], [22719, 22720, 22743], [22720, 22744, 22743], [22720, 22721, 22745], [22720, 22745, 22744], [22721, 22722, 22745], [22722, 22746, 22745], [22722, 22723, 22747], [22722, 22747, 22746], [22723, 22724, 22747], [22724, 22748, 22747], [22724, 22725, 22749], [22724, 22749, 22748], [22725, 22726, 22749], [22726, 22750, 22749], [22726, 22727, 22751], [22726, 22751, 22750], [22727, 22728, 22751], [22728, 22752, 22751], [22728, 22729, 22753], [22728, 22753, 22752], [22729, 22730, 22753], [22730, 22754, 22753], [22730, 22731, 22755], [22730, 22755, 22754], [22731, 1290, 22755], [1290, 1419, 22755], [22732, 22733, 22756], [22733, 22757, 22756], [22733, 22734, 22758], [22733, 22758, 22757], [22734, 22735, 22758], [22735, 22759, 22758], [22735, 22736, 22760], [22735, 22760, 22759], [22736, 22737, 22760], [22737, 22761, 22760], [22737, 22738, 22762], [22737, 22762, 22761], [22738, 22739, 22762], [22739, 22763, 22762], [22739, 22740, 22764], [22739, 22764, 22763], [22740, 22741, 22764], [22741, 22765, 22764], [22741, 22742, 22766], [22741, 22766, 22765], [22742, 22743, 22766], [22743, 22767, 22766], [22743, 22744, 22768], [22743, 22768, 22767], [22744, 22745, 22768], [22745, 22769, 22768], [22745, 22746, 22770], [22745, 22770, 22769], [22746, 22747, 22770], [22747, 22771, 22770], [22747, 22748, 22772], [22747, 22772, 22771], [22748, 22749, 22772], [22749, 22773, 22772], [22749, 22750, 22774], [22749, 22774, 22773], [22750, 22751, 22774], [22751, 22775, 22774], [22751, 22752, 22776], [22751, 22776, 22775], [22752, 22753, 22776], [22753, 22777, 22776], [22753, 22754, 22778], [22753, 22778, 22777], [22754, 22755, 22778], [22755, 22779, 22778], [22755, 1419, 1548], [22755, 1548, 22779], [22756, 22757, 22781], [22756, 22781, 22780], [22757, 22758, 22781], [22758, 22782, 22781], [22758, 22759, 22783], [22758, 22783, 22782], [22759, 22760, 22783], [22760, 22784, 22783], [22760, 22761, 22785], [22760, 22785, 22784], [22761, 22762, 22785], [22762, 22786, 22785], [22762, 22763, 22787], [22762, 22787, 22786], [22763, 22764, 22787], [22764, 22788, 22787], [22764, 22765, 22789], [22764, 22789, 22788], [22765, 22766, 22789], [22766, 22790, 22789], [22766, 22767, 22791], [22766, 22791, 22790], [22767, 22768, 22791], [22768, 22792, 22791], [22768, 22769, 22793], [22768, 22793, 22792], [22769, 22770, 22793], [22770, 22794, 22793], [22770, 22771, 22795], [22770, 22795, 22794], [22771, 22772, 22795], [22772, 22796, 22795], [22772, 22773, 22797], [22772, 22797, 22796], [22773, 22774, 22797], [22774, 22798, 22797], [22774, 22775, 22799], [22774, 22799, 22798], [22775, 22776, 22799], [22776, 22800, 22799], [22776, 22777, 22801], [22776, 22801, 22800], [22777, 22778, 22801], [22778, 22802, 22801], [22778, 22779, 22803], [22778, 22803, 22802], [22779, 1548, 22803], [1548, 1677, 22803], [22780, 22781, 22804], [22781, 22805, 22804], [22781, 22782, 22806], [22781, 22806, 22805], [22782, 22783, 22806], [22783, 22807, 22806], [22783, 22784, 22808], [22783, 22808, 22807], [22784, 22785, 22808], [22785, 22809, 22808], [22785, 22786, 22810], [22785, 22810, 22809], [22786, 22787, 22810], [22787, 22811, 22810], [22787, 22788, 22812], [22787, 22812, 22811], [22788, 22789, 22812], [22789, 22813, 22812], [22789, 22790, 22814], [22789, 22814, 22813], [22790, 22791, 22814], [22791, 22815, 22814], [22791, 22792, 22816], [22791, 22816, 22815], [22792, 22793, 22816], [22793, 22817, 22816], [22793, 22794, 22818], [22793, 22818, 22817], [22794, 22795, 22818], [22795, 22819, 22818], [22795, 22796, 22820], [22795, 22820, 22819], [22796, 22797, 22820], [22797, 22821, 22820], [22797, 22798, 22822], [22797, 22822, 22821], [22798, 22799, 22822], [22799, 22823, 22822], [22799, 22800, 22824], [22799, 22824, 22823], [22800, 22801, 22824], [22801, 22825, 22824], [22801, 22802, 22826], [22801, 22826, 22825], [22802, 22803, 22826], [22803, 22827, 22826], [22803, 1677, 1806], [22803, 1806, 22827], [22804, 22805, 22829], [22804, 22829, 22828], [22805, 22806, 22829], [22806, 22830, 22829], [22806, 22807, 22831], [22806, 22831, 22830], [22807, 22808, 22831], [22808, 22832, 22831], [22808, 22809, 22833], [22808, 22833, 22832], [22809, 22810, 22833], [22810, 22834, 22833], [22810, 22811, 22835], [22810, 22835, 22834], [22811, 22812, 22835], [22812, 22836, 22835], [22812, 22813, 22837], [22812, 22837, 22836], [22813, 22814, 22837], [22814, 22838, 22837], [22814, 22815, 22839], [22814, 22839, 22838], [22815, 22816, 22839], [22816, 22840, 22839], [22816, 22817, 22841], [22816, 22841, 22840], [22817, 22818, 22841], [22818, 22842, 22841], [22818, 22819, 22843], [22818, 22843, 22842], [22819, 22820, 22843], [22820, 22844, 22843], [22820, 22821, 22845], [22820, 22845, 22844], [22821, 22822, 22845], [22822, 22846, 22845], [22822, 22823, 22847], [22822, 22847, 22846], [22823, 22824, 22847], [22824, 22848, 22847], [22824, 22825, 22849], [22824, 22849, 22848], [22825, 22826, 22849], [22826, 22850, 22849], [22826, 22827, 22851], [22826, 22851, 22850], [22827, 1806, 22851], [1806, 1935, 22851], [22828, 22829, 22852], [22829, 22853, 22852], [22829, 22830, 22854], [22829, 22854, 22853], [22830, 22831, 22854], [22831, 22855, 22854], [22831, 22832, 22856], [22831, 22856, 22855], [22832, 22833, 22856], [22833, 22857, 22856], [22833, 22834, 22858], [22833, 22858, 22857], [22834, 22835, 22858], [22835, 22859, 22858], [22835, 22836, 22860], [22835, 22860, 22859], [22836, 22837, 22860], [22837, 22861, 22860], [22837, 22838, 22862], [22837, 22862, 22861], [22838, 22839, 22862], [22839, 22863, 22862], [22839, 22840, 22864], [22839, 22864, 22863], [22840, 22841, 22864], [22841, 22865, 22864], [22841, 22842, 22866], [22841, 22866, 22865], [22842, 22843, 22866], [22843, 22867, 22866], [22843, 22844, 22868], [22843, 22868, 22867], [22844, 22845, 22868], [22845, 22869, 22868], [22845, 22846, 22870], [22845, 22870, 22869], [22846, 22847, 22870], [22847, 22871, 22870], [22847, 22848, 22872], [22847, 22872, 22871], [22848, 22849, 22872], [22849, 22873, 22872], [22849, 22850, 22874], [22849, 22874, 22873], [22850, 22851, 22874], [22851, 22875, 22874], [22851, 1935, 2064], [22851, 2064, 22875], [22852, 22853, 22877], [22852, 22877, 22876], [22853, 22854, 22877], [22854, 22878, 22877], [22854, 22855, 22879], [22854, 22879, 22878], [22855, 22856, 22879], [22856, 22880, 22879], [22856, 22857, 22881], [22856, 22881, 22880], [22857, 22858, 22881], [22858, 22882, 22881], [22858, 22859, 22883], [22858, 22883, 22882], [22859, 22860, 22883], [22860, 22884, 22883], [22860, 22861, 22885], [22860, 22885, 22884], [22861, 22862, 22885], [22862, 22886, 22885], [22862, 22863, 22887], [22862, 22887, 22886], [22863, 22864, 22887], [22864, 22888, 22887], [22864, 22865, 22889], [22864, 22889, 22888], [22865, 22866, 22889], [22866, 22890, 22889], [22866, 22867, 22891], [22866, 22891, 22890], [22867, 22868, 22891], [22868, 22892, 22891], [22868, 22869, 22893], [22868, 22893, 22892], [22869, 22870, 22893], [22870, 22894, 22893], [22870, 22871, 22895], [22870, 22895, 22894], [22871, 22872, 22895], [22872, 22896, 22895], [22872, 22873, 22897], [22872, 22897, 22896], [22873, 22874, 22897], [22874, 22898, 22897], [22874, 22875, 22899], [22874, 22899, 22898], [22875, 2064, 22899], [2064, 2193, 22899], [22876, 22877, 22900], [22877, 22901, 22900], [22877, 22878, 22902], [22877, 22902, 22901], [22878, 22879, 22902], [22879, 22903, 22902], [22879, 22880, 22904], [22879, 22904, 22903], [22880, 22881, 22904], [22881, 22905, 22904], [22881, 22882, 22906], [22881, 22906, 22905], [22882, 22883, 22906], [22883, 22907, 22906], [22883, 22884, 22908], [22883, 22908, 22907], [22884, 22885, 22908], [22885, 22909, 22908], [22885, 22886, 22910], [22885, 22910, 22909], [22886, 22887, 22910], [22887, 22911, 22910], [22887, 22888, 22912], [22887, 22912, 22911], [22888, 22889, 22912], [22889, 22913, 22912], [22889, 22890, 22914], [22889, 22914, 22913], [22890, 22891, 22914], [22891, 22915, 22914], [22891, 22892, 22916], [22891, 22916, 22915], [22892, 22893, 22916], [22893, 22917, 22916], [22893, 22894, 22918], [22893, 22918, 22917], [22894, 22895, 22918], [22895, 22919, 22918], [22895, 22896, 22920], [22895, 22920, 22919], [22896, 22897, 22920], [22897, 22921, 22920], [22897, 22898, 22922], [22897, 22922, 22921], [22898, 22899, 22922], [22899, 22923, 22922], [22899, 2193, 2322], [22899, 2322, 22923], [22900, 22901, 22925], [22900, 22925, 22924], [22901, 22902, 22925], [22902, 22926, 22925], [22902, 22903, 22927], [22902, 22927, 22926], [22903, 22904, 22927], [22904, 22928, 22927], [22904, 22905, 22929], [22904, 22929, 22928], [22905, 22906, 22929], [22906, 22930, 22929], [22906, 22907, 22931], [22906, 22931, 22930], [22907, 22908, 22931], [22908, 22932, 22931], [22908, 22909, 22933], [22908, 22933, 22932], [22909, 22910, 22933], [22910, 22934, 22933], [22910, 22911, 22935], [22910, 22935, 22934], [22911, 22912, 22935], [22912, 22936, 22935], [22912, 22913, 22937], [22912, 22937, 22936], [22913, 22914, 22937], [22914, 22938, 22937], [22914, 22915, 22939], [22914, 22939, 22938], [22915, 22916, 22939], [22916, 22940, 22939], [22916, 22917, 22941], [22916, 22941, 22940], [22917, 22918, 22941], [22918, 22942, 22941], [22918, 22919, 22943], [22918, 22943, 22942], [22919, 22920, 22943], [22920, 22944, 22943], [22920, 22921, 22945], [22920, 22945, 22944], [22921, 22922, 22945], [22922, 22946, 22945], [22922, 22923, 22947], [22922, 22947, 22946], [22923, 2322, 22947], [2322, 2451, 22947], [22924, 22925, 22948], [22925, 22949, 22948], [22925, 22926, 22950], [22925, 22950, 22949], [22926, 22927, 22950], [22927, 22951, 22950], [22927, 22928, 22952], [22927, 22952, 22951], [22928, 22929, 22952], [22929, 22953, 22952], [22929, 22930, 22954], [22929, 22954, 22953], [22930, 22931, 22954], [22931, 22955, 22954], [22931, 22932, 22956], [22931, 22956, 22955], [22932, 22933, 22956], [22933, 22957, 22956], [22933, 22934, 22958], [22933, 22958, 22957], [22934, 22935, 22958], [22935, 22959, 22958], [22935, 22936, 22960], [22935, 22960, 22959], [22936, 22937, 22960], [22937, 22961, 22960], [22937, 22938, 22962], [22937, 22962, 22961], [22938, 22939, 22962], [22939, 22963, 22962], [22939, 22940, 22964], [22939, 22964, 22963], [22940, 22941, 22964], [22941, 22965, 22964], [22941, 22942, 22966], [22941, 22966, 22965], [22942, 22943, 22966], [22943, 22967, 22966], [22943, 22944, 22968], [22943, 22968, 22967], [22944, 22945, 22968], [22945, 22969, 22968], [22945, 22946, 22970], [22945, 22970, 22969], [22946, 22947, 22970], [22947, 22971, 22970], [22947, 2451, 2580], [22947, 2580, 22971], [22948, 22949, 22973], [22948, 22973, 22972], [22949, 22950, 22973], [22950, 22974, 22973], [22950, 22951, 22975], [22950, 22975, 22974], [22951, 22952, 22975], [22952, 22976, 22975], [22952, 22953, 22977], [22952, 22977, 22976], [22953, 22954, 22977], [22954, 22978, 22977], [22954, 22955, 22979], [22954, 22979, 22978], [22955, 22956, 22979], [22956, 22980, 22979], [22956, 22957, 22981], [22956, 22981, 22980], [22957, 22958, 22981], [22958, 22982, 22981], [22958, 22959, 22983], [22958, 22983, 22982], [22959, 22960, 22983], [22960, 22984, 22983], [22960, 22961, 22985], [22960, 22985, 22984], [22961, 22962, 22985], [22962, 22986, 22985], [22962, 22963, 22987], [22962, 22987, 22986], [22963, 22964, 22987], [22964, 22988, 22987], [22964, 22965, 22989], [22964, 22989, 22988], [22965, 22966, 22989], [22966, 22990, 22989], [22966, 22967, 22991], [22966, 22991, 22990], [22967, 22968, 22991], [22968, 22992, 22991], [22968, 22969, 22993], [22968, 22993, 22992], [22969, 22970, 22993], [22970, 22994, 22993], [22970, 22971, 22995], [22970, 22995, 22994], [22971, 2580, 22995], [2580, 2709, 22995], [22972, 22973, 22996], [22973, 22997, 22996], [22973, 22974, 22998], [22973, 22998, 22997], [22974, 22975, 22998], [22975, 22999, 22998], [22975, 22976, 23000], [22975, 23000, 22999], [22976, 22977, 23000], [22977, 23001, 23000], [22977, 22978, 23002], [22977, 23002, 23001], [22978, 22979, 23002], [22979, 23003, 23002], [22979, 22980, 23004], [22979, 23004, 23003], [22980, 22981, 23004], [22981, 23005, 23004], [22981, 22982, 23006], [22981, 23006, 23005], [22982, 22983, 23006], [22983, 23007, 23006], [22983, 22984, 23008], [22983, 23008, 23007], [22984, 22985, 23008], [22985, 23009, 23008], [22985, 22986, 23010], [22985, 23010, 23009], [22986, 22987, 23010], [22987, 23011, 23010], [22987, 22988, 23012], [22987, 23012, 23011], [22988, 22989, 23012], [22989, 23013, 23012], [22989, 22990, 23014], [22989, 23014, 23013], [22990, 22991, 23014], [22991, 23015, 23014], [22991, 22992, 23016], [22991, 23016, 23015], [22992, 22993, 23016], [22993, 23017, 23016], [22993, 22994, 23018], [22993, 23018, 23017], [22994, 22995, 23018], [22995, 23019, 23018], [22995, 2709, 2838], [22995, 2838, 23019], [22996, 22997, 23021], [22996, 23021, 23020], [22997, 22998, 23021], [22998, 23022, 23021], [22998, 22999, 23023], [22998, 23023, 23022], [22999, 23000, 23023], [23000, 23024, 23023], [23000, 23001, 23025], [23000, 23025, 23024], [23001, 23002, 23025], [23002, 23026, 23025], [23002, 23003, 23027], [23002, 23027, 23026], [23003, 23004, 23027], [23004, 23028, 23027], [23004, 23005, 23029], [23004, 23029, 23028], [23005, 23006, 23029], [23006, 23030, 23029], [23006, 23007, 23031], [23006, 23031, 23030], [23007, 23008, 23031], [23008, 23032, 23031], [23008, 23009, 23033], [23008, 23033, 23032], [23009, 23010, 23033], [23010, 23034, 23033], [23010, 23011, 23035], [23010, 23035, 23034], [23011, 23012, 23035], [23012, 23036, 23035], [23012, 23013, 23037], [23012, 23037, 23036], [23013, 23014, 23037], [23014, 23038, 23037], [23014, 23015, 23039], [23014, 23039, 23038], [23015, 23016, 23039], [23016, 23040, 23039], [23016, 23017, 23041], [23016, 23041, 23040], [23017, 23018, 23041], [23018, 23042, 23041], [23018, 23019, 23043], [23018, 23043, 23042], [23019, 2838, 23043], [2838, 2967, 23043], [23020, 23021, 23044], [23021, 23045, 23044], [23021, 23022, 23046], [23021, 23046, 23045], [23022, 23023, 23046], [23023, 23047, 23046], [23023, 23024, 23048], [23023, 23048, 23047], [23024, 23025, 23048], [23025, 23049, 23048], [23025, 23026, 23050], [23025, 23050, 23049], [23026, 23027, 23050], [23027, 23051, 23050], [23027, 23028, 23052], [23027, 23052, 23051], [23028, 23029, 23052], [23029, 23053, 23052], [23029, 23030, 23054], [23029, 23054, 23053], [23030, 23031, 23054], [23031, 23055, 23054], [23031, 23032, 23056], [23031, 23056, 23055], [23032, 23033, 23056], [23033, 23057, 23056], [23033, 23034, 23058], [23033, 23058, 23057], [23034, 23035, 23058], [23035, 23059, 23058], [23035, 23036, 23060], [23035, 23060, 23059], [23036, 23037, 23060], [23037, 23061, 23060], [23037, 23038, 23062], [23037, 23062, 23061], [23038, 23039, 23062], [23039, 23063, 23062], [23039, 23040, 23064], [23039, 23064, 23063], [23040, 23041, 23064], [23041, 23065, 23064], [23041, 23042, 23066], [23041, 23066, 23065], [23042, 23043, 23066], [23043, 23067, 23066], [23043, 2967, 3096], [23043, 3096, 23067], [23044, 23045, 23069], [23044, 23069, 23068], [23045, 23046, 23069], [23046, 23070, 23069], [23046, 23047, 23071], [23046, 23071, 23070], [23047, 23048, 23071], [23048, 23072, 23071], [23048, 23049, 23073], [23048, 23073, 23072], [23049, 23050, 23073], [23050, 23074, 23073], [23050, 23051, 23075], [23050, 23075, 23074], [23051, 23052, 23075], [23052, 23076, 23075], [23052, 23053, 23077], [23052, 23077, 23076], [23053, 23054, 23077], [23054, 23078, 23077], [23054, 23055, 23079], [23054, 23079, 23078], [23055, 23056, 23079], [23056, 23080, 23079], [23056, 23057, 23081], [23056, 23081, 23080], [23057, 23058, 23081], [23058, 23082, 23081], [23058, 23059, 23083], [23058, 23083, 23082], [23059, 23060, 23083], [23060, 23084, 23083], [23060, 23061, 23085], [23060, 23085, 23084], [23061, 23062, 23085], [23062, 23086, 23085], [23062, 23063, 23087], [23062, 23087, 23086], [23063, 23064, 23087], [23064, 23088, 23087], [23064, 23065, 23089], [23064, 23089, 23088], [23065, 23066, 23089], [23066, 23090, 23089], [23066, 23067, 23091], [23066, 23091, 23090], [23067, 3096, 23091], [3096, 3225, 23091], [23068, 23069, 23092], [23069, 23093, 23092], [23069, 23070, 23094], [23069, 23094, 23093], [23070, 23071, 23094], [23071, 23095, 23094], [23071, 23072, 23096], [23071, 23096, 23095], [23072, 23073, 23096], [23073, 23097, 23096], [23073, 23074, 23098], [23073, 23098, 23097], [23074, 23075, 23098], [23075, 23099, 23098], [23075, 23076, 23100], [23075, 23100, 23099], [23076, 23077, 23100], [23077, 23101, 23100], [23077, 23078, 23102], [23077, 23102, 23101], [23078, 23079, 23102], [23079, 23103, 23102], [23079, 23080, 23104], [23079, 23104, 23103], [23080, 23081, 23104], [23081, 23105, 23104], [23081, 23082, 23106], [23081, 23106, 23105], [23082, 23083, 23106], [23083, 23107, 23106], [23083, 23084, 23108], [23083, 23108, 23107], [23084, 23085, 23108], [23085, 23109, 23108], [23085, 23086, 23110], [23085, 23110, 23109], [23086, 23087, 23110], [23087, 23111, 23110], [23087, 23088, 23112], [23087, 23112, 23111], [23088, 23089, 23112], [23089, 23113, 23112], [23089, 23090, 23114], [23089, 23114, 23113], [23090, 23091, 23114], [23091, 23115, 23114], [23091, 3225, 3354], [23091, 3354, 23115], [23092, 23093, 23117], [23092, 23117, 23116], [23093, 23094, 23117], [23094, 23118, 23117], [23094, 23095, 23119], [23094, 23119, 23118], [23095, 23096, 23119], [23096, 23120, 23119], [23096, 23097, 23121], [23096, 23121, 23120], [23097, 23098, 23121], [23098, 23122, 23121], [23098, 23099, 23123], [23098, 23123, 23122], [23099, 23100, 23123], [23100, 23124, 23123], [23100, 23101, 23125], [23100, 23125, 23124], [23101, 23102, 23125], [23102, 23126, 23125], [23102, 23103, 23127], [23102, 23127, 23126], [23103, 23104, 23127], [23104, 23128, 23127], [23104, 23105, 23129], [23104, 23129, 23128], [23105, 23106, 23129], [23106, 23130, 23129], [23106, 23107, 23131], [23106, 23131, 23130], [23107, 23108, 23131], [23108, 23132, 23131], [23108, 23109, 23133], [23108, 23133, 23132], [23109, 23110, 23133], [23110, 23134, 23133], [23110, 23111, 23135], [23110, 23135, 23134], [23111, 23112, 23135], [23112, 23136, 23135], [23112, 23113, 23137], [23112, 23137, 23136], [23113, 23114, 23137], [23114, 23138, 23137], [23114, 23115, 23139], [23114, 23139, 23138], [23115, 3354, 23139], [3354, 3483, 23139], [23116, 23117, 23140], [23117, 23141, 23140], [23117, 23118, 23142], [23117, 23142, 23141], [23118, 23119, 23142], [23119, 23143, 23142], [23119, 23120, 23144], [23119, 23144, 23143], [23120, 23121, 23144], [23121, 23145, 23144], [23121, 23122, 23146], [23121, 23146, 23145], [23122, 23123, 23146], [23123, 23147, 23146], [23123, 23124, 23148], [23123, 23148, 23147], [23124, 23125, 23148], [23125, 23149, 23148], [23125, 23126, 23150], [23125, 23150, 23149], [23126, 23127, 23150], [23127, 23151, 23150], [23127, 23128, 23152], [23127, 23152, 23151], [23128, 23129, 23152], [23129, 23153, 23152], [23129, 23130, 23154], [23129, 23154, 23153], [23130, 23131, 23154], [23131, 23155, 23154], [23131, 23132, 23156], [23131, 23156, 23155], [23132, 23133, 23156], [23133, 23157, 23156], [23133, 23134, 23158], [23133, 23158, 23157], [23134, 23135, 23158], [23135, 23159, 23158], [23135, 23136, 23160], [23135, 23160, 23159], [23136, 23137, 23160], [23137, 23161, 23160], [23137, 23138, 23162], [23137, 23162, 23161], [23138, 23139, 23162], [23139, 23163, 23162], [23139, 3483, 3612], [23139, 3612, 23163], [23140, 23141, 23165], [23140, 23165, 23164], [23141, 23142, 23165], [23142, 23166, 23165], [23142, 23143, 23167], [23142, 23167, 23166], [23143, 23144, 23167], [23144, 23168, 23167], [23144, 23145, 23169], [23144, 23169, 23168], [23145, 23146, 23169], [23146, 23170, 23169], [23146, 23147, 23171], [23146, 23171, 23170], [23147, 23148, 23171], [23148, 23172, 23171], [23148, 23149, 23173], [23148, 23173, 23172], [23149, 23150, 23173], [23150, 23174, 23173], [23150, 23151, 23175], [23150, 23175, 23174], [23151, 23152, 23175], [23152, 23176, 23175], [23152, 23153, 23177], [23152, 23177, 23176], [23153, 23154, 23177], [23154, 23178, 23177], [23154, 23155, 23179], [23154, 23179, 23178], [23155, 23156, 23179], [23156, 23180, 23179], [23156, 23157, 23181], [23156, 23181, 23180], [23157, 23158, 23181], [23158, 23182, 23181], [23158, 23159, 23183], [23158, 23183, 23182], [23159, 23160, 23183], [23160, 23184, 23183], [23160, 23161, 23185], [23160, 23185, 23184], [23161, 23162, 23185], [23162, 23186, 23185], [23162, 23163, 23187], [23162, 23187, 23186], [23163, 3612, 23187], [3612, 3741, 23187], [23164, 23165, 23188], [23165, 23189, 23188], [23165, 23166, 23190], [23165, 23190, 23189], [23166, 23167, 23190], [23167, 23191, 23190], [23167, 23168, 23192], [23167, 23192, 23191], [23168, 23169, 23192], [23169, 23193, 23192], [23169, 23170, 23194], [23169, 23194, 23193], [23170, 23171, 23194], [23171, 23195, 23194], [23171, 23172, 23196], [23171, 23196, 23195], [23172, 23173, 23196], [23173, 23197, 23196], [23173, 23174, 23198], [23173, 23198, 23197], [23174, 23175, 23198], [23175, 23199, 23198], [23175, 23176, 23200], [23175, 23200, 23199], [23176, 23177, 23200], [23177, 23201, 23200], [23177, 23178, 23202], [23177, 23202, 23201], [23178, 23179, 23202], [23179, 23203, 23202], [23179, 23180, 23204], [23179, 23204, 23203], [23180, 23181, 23204], [23181, 23205, 23204], [23181, 23182, 23206], [23181, 23206, 23205], [23182, 23183, 23206], [23183, 23207, 23206], [23183, 23184, 23208], [23183, 23208, 23207], [23184, 23185, 23208], [23185, 23209, 23208], [23185, 23186, 23210], [23185, 23210, 23209], [23186, 23187, 23210], [23187, 23211, 23210], [23187, 3741, 3870], [23187, 3870, 23211], [23188, 23189, 23213], [23188, 23213, 23212], [23189, 23190, 23213], [23190, 23214, 23213], [23190, 23191, 23215], [23190, 23215, 23214], [23191, 23192, 23215], [23192, 23216, 23215], [23192, 23193, 23217], [23192, 23217, 23216], [23193, 23194, 23217], [23194, 23218, 23217], [23194, 23195, 23219], [23194, 23219, 23218], [23195, 23196, 23219], [23196, 23220, 23219], [23196, 23197, 23221], [23196, 23221, 23220], [23197, 23198, 23221], [23198, 23222, 23221], [23198, 23199, 23223], [23198, 23223, 23222], [23199, 23200, 23223], [23200, 23224, 23223], [23200, 23201, 23225], [23200, 23225, 23224], [23201, 23202, 23225], [23202, 23226, 23225], [23202, 23203, 23227], [23202, 23227, 23226], [23203, 23204, 23227], [23204, 23228, 23227], [23204, 23205, 23229], [23204, 23229, 23228], [23205, 23206, 23229], [23206, 23230, 23229], [23206, 23207, 23231], [23206, 23231, 23230], [23207, 23208, 23231], [23208, 23232, 23231], [23208, 23209, 23233], [23208, 23233, 23232], [23209, 23210, 23233], [23210, 23234, 23233], [23210, 23211, 23235], [23210, 23235, 23234], [23211, 3870, 23235], [3870, 3999, 23235], [23212, 23213, 23236], [23213, 23237, 23236], [23213, 23214, 23238], [23213, 23238, 23237], [23214, 23215, 23238], [23215, 23239, 23238], [23215, 23216, 23240], [23215, 23240, 23239], [23216, 23217, 23240], [23217, 23241, 23240], [23217, 23218, 23242], [23217, 23242, 23241], [23218, 23219, 23242], [23219, 23243, 23242], [23219, 23220, 23244], [23219, 23244, 23243], [23220, 23221, 23244], [23221, 23245, 23244], [23221, 23222, 23246], [23221, 23246, 23245], [23222, 23223, 23246], [23223, 23247, 23246], [23223, 23224, 23248], [23223, 23248, 23247], [23224, 23225, 23248], [23225, 23249, 23248], [23225, 23226, 23250], [23225, 23250, 23249], [23226, 23227, 23250], [23227, 23251, 23250], [23227, 23228, 23252], [23227, 23252, 23251], [23228, 23229, 23252], [23229, 23253, 23252], [23229, 23230, 23254], [23229, 23254, 23253], [23230, 23231, 23254], [23231, 23255, 23254], [23231, 23232, 23256], [23231, 23256, 23255], [23232, 23233, 23256], [23233, 23257, 23256], [23233, 23234, 23258], [23233, 23258, 23257], [23234, 23235, 23258], [23235, 23259, 23258], [23235, 3999, 4128], [23235, 4128, 23259], [23236, 23237, 23261], [23236, 23261, 23260], [23237, 23238, 23261], [23238, 23262, 23261], [23238, 23239, 23263], [23238, 23263, 23262], [23239, 23240, 23263], [23240, 23264, 23263], [23240, 23241, 23265], [23240, 23265, 23264], [23241, 23242, 23265], [23242, 23266, 23265], [23242, 23243, 23267], [23242, 23267, 23266], [23243, 23244, 23267], [23244, 23268, 23267], [23244, 23245, 23269], [23244, 23269, 23268], [23245, 23246, 23269], [23246, 23270, 23269], [23246, 23247, 23271], [23246, 23271, 23270], [23247, 23248, 23271], [23248, 23272, 23271], [23248, 23249, 23273], [23248, 23273, 23272], [23249, 23250, 23273], [23250, 23274, 23273], [23250, 23251, 23275], [23250, 23275, 23274], [23251, 23252, 23275], [23252, 23276, 23275], [23252, 23253, 23277], [23252, 23277, 23276], [23253, 23254, 23277], [23254, 23278, 23277], [23254, 23255, 23279], [23254, 23279, 23278], [23255, 23256, 23279], [23256, 23280, 23279], [23256, 23257, 23281], [23256, 23281, 23280], [23257, 23258, 23281], [23258, 23282, 23281], [23258, 23259, 23283], [23258, 23283, 23282], [23259, 4128, 23283], [4128, 4257, 23283], [23260, 23261, 23284], [23261, 23285, 23284], [23261, 23262, 23286], [23261, 23286, 23285], [23262, 23263, 23286], [23263, 23287, 23286], [23263, 23264, 23288], [23263, 23288, 23287], [23264, 23265, 23288], [23265, 23289, 23288], [23265, 23266, 23290], [23265, 23290, 23289], [23266, 23267, 23290], [23267, 23291, 23290], [23267, 23268, 23292], [23267, 23292, 23291], [23268, 23269, 23292], [23269, 23293, 23292], [23269, 23270, 23294], [23269, 23294, 23293], [23270, 23271, 23294], [23271, 23295, 23294], [23271, 23272, 23296], [23271, 23296, 23295], [23272, 23273, 23296], [23273, 23297, 23296], [23273, 23274, 23298], [23273, 23298, 23297], [23274, 23275, 23298], [23275, 23299, 23298], [23275, 23276, 23300], [23275, 23300, 23299], [23276, 23277, 23300], [23277, 23301, 23300], [23277, 23278, 23302], [23277, 23302, 23301], [23278, 23279, 23302], [23279, 23303, 23302], [23279, 23280, 23304], [23279, 23304, 23303], [23280, 23281, 23304], [23281, 23305, 23304], [23281, 23282, 23306], [23281, 23306, 23305], [23282, 23283, 23306], [23283, 23307, 23306], [23283, 4257, 4386], [23283, 4386, 23307], [23284, 23285, 23309], [23284, 23309, 23308], [23285, 23286, 23309], [23286, 23310, 23309], [23286, 23287, 23311], [23286, 23311, 23310], [23287, 23288, 23311], [23288, 23312, 23311], [23288, 23289, 23313], [23288, 23313, 23312], [23289, 23290, 23313], [23290, 23314, 23313], [23290, 23291, 23315], [23290, 23315, 23314], [23291, 23292, 23315], [23292, 23316, 23315], [23292, 23293, 23317], [23292, 23317, 23316], [23293, 23294, 23317], [23294, 23318, 23317], [23294, 23295, 23319], [23294, 23319, 23318], [23295, 23296, 23319], [23296, 23320, 23319], [23296, 23297, 23321], [23296, 23321, 23320], [23297, 23298, 23321], [23298, 23322, 23321], [23298, 23299, 23323], [23298, 23323, 23322], [23299, 23300, 23323], [23300, 23324, 23323], [23300, 23301, 23325], [23300, 23325, 23324], [23301, 23302, 23325], [23302, 23326, 23325], [23302, 23303, 23327], [23302, 23327, 23326], [23303, 23304, 23327], [23304, 23328, 23327], [23304, 23305, 23329], [23304, 23329, 23328], [23305, 23306, 23329], [23306, 23330, 23329], [23306, 23307, 23331], [23306, 23331, 23330], [23307, 4386, 23331], [4386, 4515, 23331], [23308, 23309, 23332], [23309, 23333, 23332], [23309, 23310, 23334], [23309, 23334, 23333], [23310, 23311, 23334], [23311, 23335, 23334], [23311, 23312, 23336], [23311, 23336, 23335], [23312, 23313, 23336], [23313, 23337, 23336], [23313, 23314, 23338], [23313, 23338, 23337], [23314, 23315, 23338], [23315, 23339, 23338], [23315, 23316, 23340], [23315, 23340, 23339], [23316, 23317, 23340], [23317, 23341, 23340], [23317, 23318, 23342], [23317, 23342, 23341], [23318, 23319, 23342], [23319, 23343, 23342], [23319, 23320, 23344], [23319, 23344, 23343], [23320, 23321, 23344], [23321, 23345, 23344], [23321, 23322, 23346], [23321, 23346, 23345], [23322, 23323, 23346], [23323, 23347, 23346], [23323, 23324, 23348], [23323, 23348, 23347], [23324, 23325, 23348], [23325, 23349, 23348], [23325, 23326, 23350], [23325, 23350, 23349], [23326, 23327, 23350], [23327, 23351, 23350], [23327, 23328, 23352], [23327, 23352, 23351], [23328, 23329, 23352], [23329, 23353, 23352], [23329, 23330, 23354], [23329, 23354, 23353], [23330, 23331, 23354], [23331, 23355, 23354], [23331, 4515, 4644], [23331, 4644, 23355], [23332, 23333, 23357], [23332, 23357, 23356], [23333, 23334, 23357], [23334, 23358, 23357], [23334, 23335, 23359], [23334, 23359, 23358], [23335, 23336, 23359], [23336, 23360, 23359], [23336, 23337, 23361], [23336, 23361, 23360], [23337, 23338, 23361], [23338, 23362, 23361], [23338, 23339, 23363], [23338, 23363, 23362], [23339, 23340, 23363], [23340, 23364, 23363], [23340, 23341, 23365], [23340, 23365, 23364], [23341, 23342, 23365], [23342, 23366, 23365], [23342, 23343, 23367], [23342, 23367, 23366], [23343, 23344, 23367], [23344, 23368, 23367], [23344, 23345, 23369], [23344, 23369, 23368], [23345, 23346, 23369], [23346, 23370, 23369], [23346, 23347, 23371], [23346, 23371, 23370], [23347, 23348, 23371], [23348, 23372, 23371], [23348, 23349, 23373], [23348, 23373, 23372], [23349, 23350, 23373], [23350, 23374, 23373], [23350, 23351, 23375], [23350, 23375, 23374], [23351, 23352, 23375], [23352, 23376, 23375], [23352, 23353, 23377], [23352, 23377, 23376], [23353, 23354, 23377], [23354, 23378, 23377], [23354, 23355, 23379], [23354, 23379, 23378], [23355, 4644, 23379], [4644, 4773, 23379], [23356, 23357, 23380], [23357, 23381, 23380], [23357, 23358, 23382], [23357, 23382, 23381], [23358, 23359, 23382], [23359, 23383, 23382], [23359, 23360, 23384], [23359, 23384, 23383], [23360, 23361, 23384], [23361, 23385, 23384], [23361, 23362, 23386], [23361, 23386, 23385], [23362, 23363, 23386], [23363, 23387, 23386], [23363, 23364, 23388], [23363, 23388, 23387], [23364, 23365, 23388], [23365, 23389, 23388], [23365, 23366, 23390], [23365, 23390, 23389], [23366, 23367, 23390], [23367, 23391, 23390], [23367, 23368, 23392], [23367, 23392, 23391], [23368, 23369, 23392], [23369, 23393, 23392], [23369, 23370, 23394], [23369, 23394, 23393], [23370, 23371, 23394], [23371, 23395, 23394], [23371, 23372, 23396], [23371, 23396, 23395], [23372, 23373, 23396], [23373, 23397, 23396], [23373, 23374, 23398], [23373, 23398, 23397], [23374, 23375, 23398], [23375, 23399, 23398], [23375, 23376, 23400], [23375, 23400, 23399], [23376, 23377, 23400], [23377, 23401, 23400], [23377, 23378, 23402], [23377, 23402, 23401], [23378, 23379, 23402], [23379, 23403, 23402], [23379, 4773, 4902], [23379, 4902, 23403], [23380, 23381, 23405], [23380, 23405, 23404], [23381, 23382, 23405], [23382, 23406, 23405], [23382, 23383, 23407], [23382, 23407, 23406], [23383, 23384, 23407], [23384, 23408, 23407], [23384, 23385, 23409], [23384, 23409, 23408], [23385, 23386, 23409], [23386, 23410, 23409], [23386, 23387, 23411], [23386, 23411, 23410], [23387, 23388, 23411], [23388, 23412, 23411], [23388, 23389, 23413], [23388, 23413, 23412], [23389, 23390, 23413], [23390, 23414, 23413], [23390, 23391, 23415], [23390, 23415, 23414], [23391, 23392, 23415], [23392, 23416, 23415], [23392, 23393, 23417], [23392, 23417, 23416], [23393, 23394, 23417], [23394, 23418, 23417], [23394, 23395, 23419], [23394, 23419, 23418], [23395, 23396, 23419], [23396, 23420, 23419], [23396, 23397, 23421], [23396, 23421, 23420], [23397, 23398, 23421], [23398, 23422, 23421], [23398, 23399, 23423], [23398, 23423, 23422], [23399, 23400, 23423], [23400, 23424, 23423], [23400, 23401, 23425], [23400, 23425, 23424], [23401, 23402, 23425], [23402, 23426, 23425], [23402, 23403, 23427], [23402, 23427, 23426], [23403, 4902, 23427], [4902, 5031, 23427], [23404, 23405, 23428], [23405, 23429, 23428], [23405, 23406, 23430], [23405, 23430, 23429], [23406, 23407, 23430], [23407, 23431, 23430], [23407, 23408, 23432], [23407, 23432, 23431], [23408, 23409, 23432], [23409, 23433, 23432], [23409, 23410, 23434], [23409, 23434, 23433], [23410, 23411, 23434], [23411, 23435, 23434], [23411, 23412, 23436], [23411, 23436, 23435], [23412, 23413, 23436], [23413, 23437, 23436], [23413, 23414, 23438], [23413, 23438, 23437], [23414, 23415, 23438], [23415, 23439, 23438], [23415, 23416, 23440], [23415, 23440, 23439], [23416, 23417, 23440], [23417, 23441, 23440], [23417, 23418, 23442], [23417, 23442, 23441], [23418, 23419, 23442], [23419, 23443, 23442], [23419, 23420, 23444], [23419, 23444, 23443], [23420, 23421, 23444], [23421, 23445, 23444], [23421, 23422, 23446], [23421, 23446, 23445], [23422, 23423, 23446], [23423, 23447, 23446], [23423, 23424, 23448], [23423, 23448, 23447], [23424, 23425, 23448], [23425, 23449, 23448], [23425, 23426, 23450], [23425, 23450, 23449], [23426, 23427, 23450], [23427, 23451, 23450], [23427, 5031, 5160], [23427, 5160, 23451], [23428, 23429, 23453], [23428, 23453, 23452], [23429, 23430, 23453], [23430, 23454, 23453], [23430, 23431, 23455], [23430, 23455, 23454], [23431, 23432, 23455], [23432, 23456, 23455], [23432, 23433, 23457], [23432, 23457, 23456], [23433, 23434, 23457], [23434, 23458, 23457], [23434, 23435, 23459], [23434, 23459, 23458], [23435, 23436, 23459], [23436, 23460, 23459], [23436, 23437, 23461], [23436, 23461, 23460], [23437, 23438, 23461], [23438, 23462, 23461], [23438, 23439, 23463], [23438, 23463, 23462], [23439, 23440, 23463], [23440, 23464, 23463], [23440, 23441, 23465], [23440, 23465, 23464], [23441, 23442, 23465], [23442, 23466, 23465], [23442, 23443, 23467], [23442, 23467, 23466], [23443, 23444, 23467], [23444, 23468, 23467], [23444, 23445, 23469], [23444, 23469, 23468], [23445, 23446, 23469], [23446, 23470, 23469], [23446, 23447, 23471], [23446, 23471, 23470], [23447, 23448, 23471], [23448, 23472, 23471], [23448, 23449, 23473], [23448, 23473, 23472], [23449, 23450, 23473], [23450, 23474, 23473], [23450, 23451, 23475], [23450, 23475, 23474], [23451, 5160, 23475], [5160, 5289, 23475], [23452, 23453, 23476], [23453, 23477, 23476], [23453, 23454, 23478], [23453, 23478, 23477], [23454, 23455, 23478], [23455, 23479, 23478], [23455, 23456, 23480], [23455, 23480, 23479], [23456, 23457, 23480], [23457, 23481, 23480], [23457, 23458, 23482], [23457, 23482, 23481], [23458, 23459, 23482], [23459, 23483, 23482], [23459, 23460, 23484], [23459, 23484, 23483], [23460, 23461, 23484], [23461, 23485, 23484], [23461, 23462, 23486], [23461, 23486, 23485], [23462, 23463, 23486], [23463, 23487, 23486], [23463, 23464, 23488], [23463, 23488, 23487], [23464, 23465, 23488], [23465, 23489, 23488], [23465, 23466, 23490], [23465, 23490, 23489], [23466, 23467, 23490], [23467, 23491, 23490], [23467, 23468, 23492], [23467, 23492, 23491], [23468, 23469, 23492], [23469, 23493, 23492], [23469, 23470, 23494], [23469, 23494, 23493], [23470, 23471, 23494], [23471, 23495, 23494], [23471, 23472, 23496], [23471, 23496, 23495], [23472, 23473, 23496], [23473, 23497, 23496], [23473, 23474, 23498], [23473, 23498, 23497], [23474, 23475, 23498], [23475, 23499, 23498], [23475, 5289, 5418], [23475, 5418, 23499], [23476, 23477, 23501], [23476, 23501, 23500], [23477, 23478, 23501], [23478, 23502, 23501], [23478, 23479, 23503], [23478, 23503, 23502], [23479, 23480, 23503], [23480, 23504, 23503], [23480, 23481, 23505], [23480, 23505, 23504], [23481, 23482, 23505], [23482, 23506, 23505], [23482, 23483, 23507], [23482, 23507, 23506], [23483, 23484, 23507], [23484, 23508, 23507], [23484, 23485, 23509], [23484, 23509, 23508], [23485, 23486, 23509], [23486, 23510, 23509], [23486, 23487, 23511], [23486, 23511, 23510], [23487, 23488, 23511], [23488, 23512, 23511], [23488, 23489, 23513], [23488, 23513, 23512], [23489, 23490, 23513], [23490, 23514, 23513], [23490, 23491, 23515], [23490, 23515, 23514], [23491, 23492, 23515], [23492, 23516, 23515], [23492, 23493, 23517], [23492, 23517, 23516], [23493, 23494, 23517], [23494, 23518, 23517], [23494, 23495, 23519], [23494, 23519, 23518], [23495, 23496, 23519], [23496, 23520, 23519], [23496, 23497, 23521], [23496, 23521, 23520], [23497, 23498, 23521], [23498, 23522, 23521], [23498, 23499, 23523], [23498, 23523, 23522], [23499, 5418, 23523], [5418, 5547, 23523], [23500, 23501, 23524], [23501, 23525, 23524], [23501, 23502, 23526], [23501, 23526, 23525], [23502, 23503, 23526], [23503, 23527, 23526], [23503, 23504, 23528], [23503, 23528, 23527], [23504, 23505, 23528], [23505, 23529, 23528], [23505, 23506, 23530], [23505, 23530, 23529], [23506, 23507, 23530], [23507, 23531, 23530], [23507, 23508, 23532], [23507, 23532, 23531], [23508, 23509, 23532], [23509, 23533, 23532], [23509, 23510, 23534], [23509, 23534, 23533], [23510, 23511, 23534], [23511, 23535, 23534], [23511, 23512, 23536], [23511, 23536, 23535], [23512, 23513, 23536], [23513, 23537, 23536], [23513, 23514, 23538], [23513, 23538, 23537], [23514, 23515, 23538], [23515, 23539, 23538], [23515, 23516, 23540], [23515, 23540, 23539], [23516, 23517, 23540], [23517, 23541, 23540], [23517, 23518, 23542], [23517, 23542, 23541], [23518, 23519, 23542], [23519, 23543, 23542], [23519, 23520, 23544], [23519, 23544, 23543], [23520, 23521, 23544], [23521, 23545, 23544], [23521, 23522, 23546], [23521, 23546, 23545], [23522, 23523, 23546], [23523, 23547, 23546], [23523, 5547, 5676], [23523, 5676, 23547], [23524, 23525, 23549], [23524, 23549, 23548], [23525, 23526, 23549], [23526, 23550, 23549], [23526, 23527, 23551], [23526, 23551, 23550], [23527, 23528, 23551], [23528, 23552, 23551], [23528, 23529, 23553], [23528, 23553, 23552], [23529, 23530, 23553], [23530, 23554, 23553], [23530, 23531, 23555], [23530, 23555, 23554], [23531, 23532, 23555], [23532, 23556, 23555], [23532, 23533, 23557], [23532, 23557, 23556], [23533, 23534, 23557], [23534, 23558, 23557], [23534, 23535, 23559], [23534, 23559, 23558], [23535, 23536, 23559], [23536, 23560, 23559], [23536, 23537, 23561], [23536, 23561, 23560], [23537, 23538, 23561], [23538, 23562, 23561], [23538, 23539, 23563], [23538, 23563, 23562], [23539, 23540, 23563], [23540, 23564, 23563], [23540, 23541, 23565], [23540, 23565, 23564], [23541, 23542, 23565], [23542, 23566, 23565], [23542, 23543, 23567], [23542, 23567, 23566], [23543, 23544, 23567], [23544, 23568, 23567], [23544, 23545, 23569], [23544, 23569, 23568], [23545, 23546, 23569], [23546, 23570, 23569], [23546, 23547, 23571], [23546, 23571, 23570], [23547, 5676, 23571], [5676, 5805, 23571], [23548, 23549, 23572], [23549, 23573, 23572], [23549, 23550, 23574], [23549, 23574, 23573], [23550, 23551, 23574], [23551, 23575, 23574], [23551, 23552, 23576], [23551, 23576, 23575], [23552, 23553, 23576], [23553, 23577, 23576], [23553, 23554, 23578], [23553, 23578, 23577], [23554, 23555, 23578], [23555, 23579, 23578], [23555, 23556, 23580], [23555, 23580, 23579], [23556, 23557, 23580], [23557, 23581, 23580], [23557, 23558, 23582], [23557, 23582, 23581], [23558, 23559, 23582], [23559, 23583, 23582], [23559, 23560, 23584], [23559, 23584, 23583], [23560, 23561, 23584], [23561, 23585, 23584], [23561, 23562, 23586], [23561, 23586, 23585], [23562, 23563, 23586], [23563, 23587, 23586], [23563, 23564, 23588], [23563, 23588, 23587], [23564, 23565, 23588], [23565, 23589, 23588], [23565, 23566, 23590], [23565, 23590, 23589], [23566, 23567, 23590], [23567, 23591, 23590], [23567, 23568, 23592], [23567, 23592, 23591], [23568, 23569, 23592], [23569, 23593, 23592], [23569, 23570, 23594], [23569, 23594, 23593], [23570, 23571, 23594], [23571, 23595, 23594], [23571, 5805, 5934], [23571, 5934, 23595], [23572, 23573, 23597], [23572, 23597, 23596], [23573, 23574, 23597], [23574, 23598, 23597], [23574, 23575, 23599], [23574, 23599, 23598], [23575, 23576, 23599], [23576, 23600, 23599], [23576, 23577, 23601], [23576, 23601, 23600], [23577, 23578, 23601], [23578, 23602, 23601], [23578, 23579, 23603], [23578, 23603, 23602], [23579, 23580, 23603], [23580, 23604, 23603], [23580, 23581, 23605], [23580, 23605, 23604], [23581, 23582, 23605], [23582, 23606, 23605], [23582, 23583, 23607], [23582, 23607, 23606], [23583, 23584, 23607], [23584, 23608, 23607], [23584, 23585, 23609], [23584, 23609, 23608], [23585, 23586, 23609], [23586, 23610, 23609], [23586, 23587, 23611], [23586, 23611, 23610], [23587, 23588, 23611], [23588, 23612, 23611], [23588, 23589, 23613], [23588, 23613, 23612], [23589, 23590, 23613], [23590, 23614, 23613], [23590, 23591, 23615], [23590, 23615, 23614], [23591, 23592, 23615], [23592, 23616, 23615], [23592, 23593, 23617], [23592, 23617, 23616], [23593, 23594, 23617], [23594, 23618, 23617], [23594, 23595, 23619], [23594, 23619, 23618], [23595, 5934, 23619], [5934, 6063, 23619], [23596, 23597, 23620], [23597, 23621, 23620], [23597, 23598, 23622], [23597, 23622, 23621], [23598, 23599, 23622], [23599, 23623, 23622], [23599, 23600, 23624], [23599, 23624, 23623], [23600, 23601, 23624], [23601, 23625, 23624], [23601, 23602, 23626], [23601, 23626, 23625], [23602, 23603, 23626], [23603, 23627, 23626], [23603, 23604, 23628], [23603, 23628, 23627], [23604, 23605, 23628], [23605, 23629, 23628], [23605, 23606, 23630], [23605, 23630, 23629], [23606, 23607, 23630], [23607, 23631, 23630], [23607, 23608, 23632], [23607, 23632, 23631], [23608, 23609, 23632], [23609, 23633, 23632], [23609, 23610, 23634], [23609, 23634, 23633], [23610, 23611, 23634], [23611, 23635, 23634], [23611, 23612, 23636], [23611, 23636, 23635], [23612, 23613, 23636], [23613, 23637, 23636], [23613, 23614, 23638], [23613, 23638, 23637], [23614, 23615, 23638], [23615, 23639, 23638], [23615, 23616, 23640], [23615, 23640, 23639], [23616, 23617, 23640], [23617, 23641, 23640], [23617, 23618, 23642], [23617, 23642, 23641], [23618, 23619, 23642], [23619, 23643, 23642], [23619, 6063, 6190], [23619, 6190, 23643], [23620, 23621, 23645], [23620, 23645, 23644], [23621, 23622, 23645], [23622, 23646, 23645], [23622, 23623, 23647], [23622, 23647, 23646], [23623, 23624, 23647], [23624, 23648, 23647], [23624, 23625, 23649], [23624, 23649, 23648], [23625, 23626, 23649], [23626, 23650, 23649], [23626, 23627, 23651], [23626, 23651, 23650], [23627, 23628, 23651], [23628, 23652, 23651], [23628, 23629, 23653], [23628, 23653, 23652], [23629, 23630, 23653], [23630, 23654, 23653], [23630, 23631, 23655], [23630, 23655, 23654], [23631, 23632, 23655], [23632, 23656, 23655], [23632, 23633, 23657], [23632, 23657, 23656], [23633, 23634, 23657], [23634, 23658, 23657], [23634, 23635, 23659], [23634, 23659, 23658], [23635, 23636, 23659], [23636, 23660, 23659], [23636, 23637, 23661], [23636, 23661, 23660], [23637, 23638, 23661], [23638, 23662, 23661], [23638, 23639, 23663], [23638, 23663, 23662], [23639, 23640, 23663], [23640, 23664, 23663], [23640, 23641, 23665], [23640, 23665, 23664], [23641, 23642, 23665], [23642, 23666, 23665], [23642, 23643, 23667], [23642, 23667, 23666], [23643, 6190, 23667], [6190, 6315, 23667], [23644, 23645, 23668], [23645, 23669, 23668], [23645, 23646, 23670], [23645, 23670, 23669], [23646, 23647, 23670], [23647, 23671, 23670], [23647, 23648, 23672], [23647, 23672, 23671], [23648, 23649, 23672], [23649, 23673, 23672], [23649, 23650, 23674], [23649, 23674, 23673], [23650, 23651, 23674], [23651, 23675, 23674], [23651, 23652, 23676], [23651, 23676, 23675], [23652, 23653, 23676], [23653, 23677, 23676], [23653, 23654, 23678], [23653, 23678, 23677], [23654, 23655, 23678], [23655, 23679, 23678], [23655, 23656, 23680], [23655, 23680, 23679], [23656, 23657, 23680], [23657, 23681, 23680], [23657, 23658, 23682], [23657, 23682, 23681], [23658, 23659, 23682], [23659, 23683, 23682], [23659, 23660, 23684], [23659, 23684, 23683], [23660, 23661, 23684], [23661, 23685, 23684], [23661, 23662, 23686], [23661, 23686, 23685], [23662, 23663, 23686], [23663, 23687, 23686], [23663, 23664, 23688], [23663, 23688, 23687], [23664, 23665, 23688], [23665, 23689, 23688], [23665, 23666, 23690], [23665, 23690, 23689], [23666, 23667, 23690], [23667, 23691, 23690], [23667, 6315, 6438], [23667, 6438, 23691], [23668, 23669, 23693], [23668, 23693, 23692], [23669, 23670, 23693], [23670, 23694, 23693], [23670, 23671, 23695], [23670, 23695, 23694], [23671, 23672, 23695], [23672, 23696, 23695], [23672, 23673, 23697], [23672, 23697, 23696], [23673, 23674, 23697], [23674, 23698, 23697], [23674, 23675, 23699], [23674, 23699, 23698], [23675, 23676, 23699], [23676, 23700, 23699], [23676, 23677, 23701], [23676, 23701, 23700], [23677, 23678, 23701], [23678, 23702, 23701], [23678, 23679, 23703], [23678, 23703, 23702], [23679, 23680, 23703], [23680, 23704, 23703], [23680, 23681, 23705], [23680, 23705, 23704], [23681, 23682, 23705], [23682, 23706, 23705], [23682, 23683, 23707], [23682, 23707, 23706], [23683, 23684, 23707], [23684, 23708, 23707], [23684, 23685, 23709], [23684, 23709, 23708], [23685, 23686, 23709], [23686, 23710, 23709], [23686, 23687, 23711], [23686, 23711, 23710], [23687, 23688, 23711], [23688, 23712, 23711], [23688, 23689, 23713], [23688, 23713, 23712], [23689, 23690, 23713], [23690, 23714, 23713], [23690, 23691, 23715], [23690, 23715, 23714], [23691, 6438, 23715], [6438, 6558, 23715], [23692, 23693, 23716], [23693, 23717, 23716], [23693, 23694, 23718], [23693, 23718, 23717], [23694, 23695, 23718], [23695, 23719, 23718], [23695, 23696, 23720], [23695, 23720, 23719], [23696, 23697, 23720], [23697, 23721, 23720], [23697, 23698, 23722], [23697, 23722, 23721], [23698, 23699, 23722], [23699, 23723, 23722], [23699, 23700, 23724], [23699, 23724, 23723], [23700, 23701, 23724], [23701, 23725, 23724], [23701, 23702, 23726], [23701, 23726, 23725], [23702, 23703, 23726], [23703, 23727, 23726], [23703, 23704, 23728], [23703, 23728, 23727], [23704, 23705, 23728], [23705, 23729, 23728], [23705, 23706, 23730], [23705, 23730, 23729], [23706, 23707, 23730], [23707, 23731, 23730], [23707, 23708, 23732], [23707, 23732, 23731], [23708, 23709, 23732], [23709, 23733, 23732], [23709, 23710, 23734], [23709, 23734, 23733], [23710, 23711, 23734], [23711, 23735, 23734], [23711, 23712, 23736], [23711, 23736, 23735], [23712, 23713, 23736], [23713, 23737, 23736], [23713, 23714, 23738], [23713, 23738, 23737], [23714, 23715, 23738], [23715, 23739, 23738], [23715, 6558, 6676], [23715, 6676, 23739], [23716, 23717, 23741], [23716, 23741, 23740], [23717, 23718, 23741], [23718, 23742, 23741], [23718, 23719, 23743], [23718, 23743, 23742], [23719, 23720, 23743], [23720, 23744, 23743], [23720, 23721, 23745], [23720, 23745, 23744], [23721, 23722, 23745], [23722, 23746, 23745], [23722, 23723, 23747], [23722, 23747, 23746], [23723, 23724, 23747], [23724, 23748, 23747], [23724, 23725, 23749], [23724, 23749, 23748], [23725, 23726, 23749], [23726, 23750, 23749], [23726, 23727, 23751], [23726, 23751, 23750], [23727, 23728, 23751], [23728, 23752, 23751], [23728, 23729, 23753], [23728, 23753, 23752], [23729, 23730, 23753], [23730, 23754, 23753], [23730, 23731, 23755], [23730, 23755, 23754], [23731, 23732, 23755], [23732, 23756, 23755], [23732, 23733, 23757], [23732, 23757, 23756], [23733, 23734, 23757], [23734, 23758, 23757], [23734, 23735, 23759], [23734, 23759, 23758], [23735, 23736, 23759], [23736, 23760, 23759], [23736, 23737, 23761], [23736, 23761, 23760], [23737, 23738, 23761], [23738, 23762, 23761], [23738, 23739, 23763], [23738, 23763, 23762], [23739, 6676, 23763], [6676, 6794, 23763], [23740, 23741, 23764], [23741, 23765, 23764], [23741, 23742, 23766], [23741, 23766, 23765], [23742, 23743, 23766], [23743, 23767, 23766], [23743, 23744, 23768], [23743, 23768, 23767], [23744, 23745, 23768], [23745, 23769, 23768], [23745, 23746, 23770], [23745, 23770, 23769], [23746, 23747, 23770], [23747, 23771, 23770], [23747, 23748, 23772], [23747, 23772, 23771], [23748, 23749, 23772], [23749, 23773, 23772], [23749, 23750, 23774], [23749, 23774, 23773], [23750, 23751, 23774], [23751, 23775, 23774], [23751, 23752, 23776], [23751, 23776, 23775], [23752, 23753, 23776], [23753, 23777, 23776], [23753, 23754, 23778], [23753, 23778, 23777], [23754, 23755, 23778], [23755, 23779, 23778], [23755, 23756, 23780], [23755, 23780, 23779], [23756, 23757, 23780], [23757, 23781, 23780], [23757, 23758, 23782], [23757, 23782, 23781], [23758, 23759, 23782], [23759, 23783, 23782], [23759, 23760, 23784], [23759, 23784, 23783], [23760, 23761, 23784], [23761, 23785, 23784], [23761, 23762, 23786], [23761, 23786, 23785], [23762, 23763, 23786], [23763, 23787, 23786], [23763, 6794, 6912], [23763, 6912, 23787], [23764, 23765, 23789], [23764, 23789, 23788], [23765, 23766, 23789], [23766, 23790, 23789], [23766, 23767, 23791], [23766, 23791, 23790], [23767, 23768, 23791], [23768, 23792, 23791], [23768, 23769, 23793], [23768, 23793, 23792], [23769, 23770, 23793], [23770, 23794, 23793], [23770, 23771, 23795], [23770, 23795, 23794], [23771, 23772, 23795], [23772, 23796, 23795], [23772, 23773, 23797], [23772, 23797, 23796], [23773, 23774, 23797], [23774, 23798, 23797], [23774, 23775, 23799], [23774, 23799, 23798], [23775, 23776, 23799], [23776, 23800, 23799], [23776, 23777, 23801], [23776, 23801, 23800], [23777, 23778, 23801], [23778, 23802, 23801], [23778, 23779, 23803], [23778, 23803, 23802], [23779, 23780, 23803], [23780, 23804, 23803], [23780, 23781, 23805], [23780, 23805, 23804], [23781, 23782, 23805], [23782, 23806, 23805], [23782, 23783, 23807], [23782, 23807, 23806], [23783, 23784, 23807], [23784, 23808, 23807], [23784, 23785, 23809], [23784, 23809, 23808], [23785, 23786, 23809], [23786, 23810, 23809], [23786, 23787, 23811], [23786, 23811, 23810], [23787, 6912, 23811], [6912, 7030, 23811], [23788, 23789, 23812], [23789, 23813, 23812], [23789, 23790, 23814], [23789, 23814, 23813], [23790, 23791, 23814], [23791, 23815, 23814], [23791, 23792, 23816], [23791, 23816, 23815], [23792, 23793, 23816], [23793, 23817, 23816], [23793, 23794, 23818], [23793, 23818, 23817], [23794, 23795, 23818], [23795, 23819, 23818], [23795, 23796, 23820], [23795, 23820, 23819], [23796, 23797, 23820], [23797, 23821, 23820], [23797, 23798, 23822], [23797, 23822, 23821], [23798, 23799, 23822], [23799, 23823, 23822], [23799, 23800, 23824], [23799, 23824, 23823], [23800, 23801, 23824], [23801, 23825, 23824], [23801, 23802, 23826], [23801, 23826, 23825], [23802, 23803, 23826], [23803, 23827, 23826], [23803, 23804, 23828], [23803, 23828, 23827], [23804, 23805, 23828], [23805, 23829, 23828], [23805, 23806, 23830], [23805, 23830, 23829], [23806, 23807, 23830], [23807, 23831, 23830], [23807, 23808, 23832], [23807, 23832, 23831], [23808, 23809, 23832], [23809, 23833, 23832], [23809, 23810, 23834], [23809, 23834, 23833], [23810, 23811, 23834], [23811, 23835, 23834], [23811, 7030, 7148], [23811, 7148, 23835], [23812, 23813, 23837], [23812, 23837, 23836], [23813, 23814, 23837], [23814, 23838, 23837], [23814, 23815, 23839], [23814, 23839, 23838], [23815, 23816, 23839], [23816, 23840, 23839], [23816, 23817, 23841], [23816, 23841, 23840], [23817, 23818, 23841], [23818, 23842, 23841], [23818, 23819, 23843], [23818, 23843, 23842], [23819, 23820, 23843], [23820, 23844, 23843], [23820, 23821, 23845], [23820, 23845, 23844], [23821, 23822, 23845], [23822, 23846, 23845], [23822, 23823, 23847], [23822, 23847, 23846], [23823, 23824, 23847], [23824, 23848, 23847], [23824, 23825, 23849], [23824, 23849, 23848], [23825, 23826, 23849], [23826, 23850, 23849], [23826, 23827, 23851], [23826, 23851, 23850], [23827, 23828, 23851], [23828, 23852, 23851], [23828, 23829, 23853], [23828, 23853, 23852], [23829, 23830, 23853], [23830, 23854, 23853], [23830, 23831, 23855], [23830, 23855, 23854], [23831, 23832, 23855], [23832, 23856, 23855], [23832, 23833, 23857], [23832, 23857, 23856], [23833, 23834, 23857], [23834, 23858, 23857], [23834, 23835, 23859], [23834, 23859, 23858], [23835, 7148, 23859], [7148, 7266, 23859], [23836, 23837, 23860], [23837, 23861, 23860], [23837, 23838, 23862], [23837, 23862, 23861], [23838, 23839, 23862], [23839, 23863, 23862], [23839, 23840, 23864], [23839, 23864, 23863], [23840, 23841, 23864], [23841, 23865, 23864], [23841, 23842, 23866], [23841, 23866, 23865], [23842, 23843, 23866], [23843, 23867, 23866], [23843, 23844, 23868], [23843, 23868, 23867], [23844, 23845, 23868], [23845, 23869, 23868], [23845, 23846, 23870], [23845, 23870, 23869], [23846, 23847, 23870], [23847, 23871, 23870], [23847, 23848, 23872], [23847, 23872, 23871], [23848, 23849, 23872], [23849, 23873, 23872], [23849, 23850, 23874], [23849, 23874, 23873], [23850, 23851, 23874], [23851, 23875, 23874], [23851, 23852, 23876], [23851, 23876, 23875], [23852, 23853, 23876], [23853, 23877, 23876], [23853, 23854, 23878], [23853, 23878, 23877], [23854, 23855, 23878], [23855, 23879, 23878], [23855, 23856, 23880], [23855, 23880, 23879], [23856, 23857, 23880], [23857, 23881, 23880], [23857, 23858, 23882], [23857, 23882, 23881], [23858, 23859, 23882], [23859, 23883, 23882], [23859, 7266, 7384], [23859, 7384, 23883], [23860, 23861, 23885], [23860, 23885, 23884], [23861, 23862, 23885], [23862, 23886, 23885], [23862, 23863, 23887], [23862, 23887, 23886], [23863, 23864, 23887], [23864, 23888, 23887], [23864, 23865, 23889], [23864, 23889, 23888], [23865, 23866, 23889], [23866, 23890, 23889], [23866, 23867, 23891], [23866, 23891, 23890], [23867, 23868, 23891], [23868, 23892, 23891], [23868, 23869, 23893], [23868, 23893, 23892], [23869, 23870, 23893], [23870, 23894, 23893], [23870, 23871, 23895], [23870, 23895, 23894], [23871, 23872, 23895], [23872, 23896, 23895], [23872, 23873, 23897], [23872, 23897, 23896], [23873, 23874, 23897], [23874, 23898, 23897], [23874, 23875, 23899], [23874, 23899, 23898], [23875, 23876, 23899], [23876, 23900, 23899], [23876, 23877, 23901], [23876, 23901, 23900], [23877, 23878, 23901], [23878, 23902, 23901], [23878, 23879, 23903], [23878, 23903, 23902], [23879, 23880, 23903], [23880, 23904, 23903], [23880, 23881, 23905], [23880, 23905, 23904], [23881, 23882, 23905], [23882, 23906, 23905], [23882, 23883, 23907], [23882, 23907, 23906], [23883, 7384, 23907], [7384, 7502, 23907], [23884, 23885, 23908], [23885, 23909, 23908], [23885, 23886, 23910], [23885, 23910, 23909], [23886, 23887, 23910], [23887, 23911, 23910], [23887, 23888, 23912], [23887, 23912, 23911], [23888, 23889, 23912], [23889, 23913, 23912], [23889, 23890, 23914], [23889, 23914, 23913], [23890, 23891, 23914], [23891, 23915, 23914], [23891, 23892, 23916], [23891, 23916, 23915], [23892, 23893, 23916], [23893, 23917, 23916], [23893, 23894, 23918], [23893, 23918, 23917], [23894, 23895, 23918], [23895, 23919, 23918], [23895, 23896, 23920], [23895, 23920, 23919], [23896, 23897, 23920], [23897, 23921, 23920], [23897, 23898, 23922], [23897, 23922, 23921], [23898, 23899, 23922], [23899, 23923, 23922], [23899, 23900, 23924], [23899, 23924, 23923], [23900, 23901, 23924], [23901, 23925, 23924], [23901, 23902, 23926], [23901, 23926, 23925], [23902, 23903, 23926], [23903, 23927, 23926], [23903, 23904, 23928], [23903, 23928, 23927], [23904, 23905, 23928], [23905, 23929, 23928], [23905, 23906, 23930], [23905, 23930, 23929], [23906, 23907, 23930], [23907, 23931, 23930], [23907, 7502, 7620], [23907, 7620, 23931], [23908, 23909, 23933], [23908, 23933, 23932], [23909, 23910, 23933], [23910, 23934, 23933], [23910, 23911, 23935], [23910, 23935, 23934], [23911, 23912, 23935], [23912, 23936, 23935], [23912, 23913, 23937], [23912, 23937, 23936], [23913, 23914, 23937], [23914, 23938, 23937], [23914, 23915, 23939], [23914, 23939, 23938], [23915, 23916, 23939], [23916, 23940, 23939], [23916, 23917, 23941], [23916, 23941, 23940], [23917, 23918, 23941], [23918, 23942, 23941], [23918, 23919, 23943], [23918, 23943, 23942], [23919, 23920, 23943], [23920, 23944, 23943], [23920, 23921, 23945], [23920, 23945, 23944], [23921, 23922, 23945], [23922, 23946, 23945], [23922, 23923, 23947], [23922, 23947, 23946], [23923, 23924, 23947], [23924, 23948, 23947], [23924, 23925, 23949], [23924, 23949, 23948], [23925, 23926, 23949], [23926, 23950, 23949], [23926, 23927, 23951], [23926, 23951, 23950], [23927, 23928, 23951], [23928, 23952, 23951], [23928, 23929, 23953], [23928, 23953, 23952], [23929, 23930, 23953], [23930, 23954, 23953], [23930, 23931, 23955], [23930, 23955, 23954], [23931, 7620, 23955], [7620, 7738, 23955], [23932, 23933, 23956], [23933, 23957, 23956], [23933, 23934, 23958], [23933, 23958, 23957], [23934, 23935, 23958], [23935, 23959, 23958], [23935, 23936, 23960], [23935, 23960, 23959], [23936, 23937, 23960], [23937, 23961, 23960], [23937, 23938, 23962], [23937, 23962, 23961], [23938, 23939, 23962], [23939, 23963, 23962], [23939, 23940, 23964], [23939, 23964, 23963], [23940, 23941, 23964], [23941, 23965, 23964], [23941, 23942, 23966], [23941, 23966, 23965], [23942, 23943, 23966], [23943, 23967, 23966], [23943, 23944, 23968], [23943, 23968, 23967], [23944, 23945, 23968], [23945, 23969, 23968], [23945, 23946, 23970], [23945, 23970, 23969], [23946, 23947, 23970], [23947, 23971, 23970], [23947, 23948, 23972], [23947, 23972, 23971], [23948, 23949, 23972], [23949, 23973, 23972], [23949, 23950, 23974], [23949, 23974, 23973], [23950, 23951, 23974], [23951, 23975, 23974], [23951, 23952, 23976], [23951, 23976, 23975], [23952, 23953, 23976], [23953, 23977, 23976], [23953, 23954, 23978], [23953, 23978, 23977], [23954, 23955, 23978], [23955, 23979, 23978], [23955, 7738, 7856], [23955, 7856, 23979], [23956, 23957, 23981], [23956, 23981, 23980], [23957, 23958, 23981], [23958, 23982, 23981], [23958, 23959, 23983], [23958, 23983, 23982], [23959, 23960, 23983], [23960, 23984, 23983], [23960, 23961, 23985], [23960, 23985, 23984], [23961, 23962, 23985], [23962, 23986, 23985], [23962, 23963, 23987], [23962, 23987, 23986], [23963, 23964, 23987], [23964, 23988, 23987], [23964, 23965, 23989], [23964, 23989, 23988], [23965, 23966, 23989], [23966, 23990, 23989], [23966, 23967, 23991], [23966, 23991, 23990], [23967, 23968, 23991], [23968, 23992, 23991], [23968, 23969, 23993], [23968, 23993, 23992], [23969, 23970, 23993], [23970, 23994, 23993], [23970, 23971, 23995], [23970, 23995, 23994], [23971, 23972, 23995], [23972, 23996, 23995], [23972, 23973, 23997], [23972, 23997, 23996], [23973, 23974, 23997], [23974, 23998, 23997], [23974, 23975, 23999], [23974, 23999, 23998], [23975, 23976, 23999], [23976, 24000, 23999], [23976, 23977, 24001], [23976, 24001, 24000], [23977, 23978, 24001], [23978, 24002, 24001], [23978, 23979, 24003], [23978, 24003, 24002], [23979, 7856, 24003], [7856, 7974, 24003], [23980, 23981, 24004], [23981, 24005, 24004], [23981, 23982, 24006], [23981, 24006, 24005], [23982, 23983, 24006], [23983, 24007, 24006], [23983, 23984, 24008], [23983, 24008, 24007], [23984, 23985, 24008], [23985, 24009, 24008], [23985, 23986, 24010], [23985, 24010, 24009], [23986, 23987, 24010], [23987, 24011, 24010], [23987, 23988, 24012], [23987, 24012, 24011], [23988, 23989, 24012], [23989, 24013, 24012], [23989, 23990, 24014], [23989, 24014, 24013], [23990, 23991, 24014], [23991, 24015, 24014], [23991, 23992, 24016], [23991, 24016, 24015], [23992, 23993, 24016], [23993, 24017, 24016], [23993, 23994, 24018], [23993, 24018, 24017], [23994, 23995, 24018], [23995, 24019, 24018], [23995, 23996, 24020], [23995, 24020, 24019], [23996, 23997, 24020], [23997, 24021, 24020], [23997, 23998, 24022], [23997, 24022, 24021], [23998, 23999, 24022], [23999, 24023, 24022], [23999, 24000, 24024], [23999, 24024, 24023], [24000, 24001, 24024], [24001, 24025, 24024], [24001, 24002, 24026], [24001, 24026, 24025], [24002, 24003, 24026], [24003, 24027, 24026], [24003, 7974, 8092], [24003, 8092, 24027], [24004, 24005, 24029], [24004, 24029, 24028], [24005, 24006, 24029], [24006, 24030, 24029], [24006, 24007, 24031], [24006, 24031, 24030], [24007, 24008, 24031], [24008, 24032, 24031], [24008, 24009, 24033], [24008, 24033, 24032], [24009, 24010, 24033], [24010, 24034, 24033], [24010, 24011, 24035], [24010, 24035, 24034], [24011, 24012, 24035], [24012, 24036, 24035], [24012, 24013, 24037], [24012, 24037, 24036], [24013, 24014, 24037], [24014, 24038, 24037], [24014, 24015, 24039], [24014, 24039, 24038], [24015, 24016, 24039], [24016, 24040, 24039], [24016, 24017, 24041], [24016, 24041, 24040], [24017, 24018, 24041], [24018, 24042, 24041], [24018, 24019, 24043], [24018, 24043, 24042], [24019, 24020, 24043], [24020, 24044, 24043], [24020, 24021, 24045], [24020, 24045, 24044], [24021, 24022, 24045], [24022, 24046, 24045], [24022, 24023, 24047], [24022, 24047, 24046], [24023, 24024, 24047], [24024, 24048, 24047], [24024, 24025, 24049], [24024, 24049, 24048], [24025, 24026, 24049], [24026, 24050, 24049], [24026, 24027, 24051], [24026, 24051, 24050], [24027, 8092, 24051], [8092, 8210, 24051], [24028, 24029, 24052], [24029, 24053, 24052], [24029, 24030, 24054], [24029, 24054, 24053], [24030, 24031, 24054], [24031, 24055, 24054], [24031, 24032, 24056], [24031, 24056, 24055], [24032, 24033, 24056], [24033, 24057, 24056], [24033, 24034, 24058], [24033, 24058, 24057], [24034, 24035, 24058], [24035, 24059, 24058], [24035, 24036, 24060], [24035, 24060, 24059], [24036, 24037, 24060], [24037, 24061, 24060], [24037, 24038, 24062], [24037, 24062, 24061], [24038, 24039, 24062], [24039, 24063, 24062], [24039, 24040, 24064], [24039, 24064, 24063], [24040, 24041, 24064], [24041, 24065, 24064], [24041, 24042, 24066], [24041, 24066, 24065], [24042, 24043, 24066], [24043, 24067, 24066], [24043, 24044, 24068], [24043, 24068, 24067], [24044, 24045, 24068], [24045, 24069, 24068], [24045, 24046, 24070], [24045, 24070, 24069], [24046, 24047, 24070], [24047, 24071, 24070], [24047, 24048, 24072], [24047, 24072, 24071], [24048, 24049, 24072], [24049, 24073, 24072], [24049, 24050, 24074], [24049, 24074, 24073], [24050, 24051, 24074], [24051, 24075, 24074], [24051, 8210, 8328], [24051, 8328, 24075], [24052, 24053, 24077], [24052, 24077, 24076], [24053, 24054, 24077], [24054, 24078, 24077], [24054, 24055, 24079], [24054, 24079, 24078], [24055, 24056, 24079], [24056, 24080, 24079], [24056, 24057, 24081], [24056, 24081, 24080], [24057, 24058, 24081], [24058, 24082, 24081], [24058, 24059, 24083], [24058, 24083, 24082], [24059, 24060, 24083], [24060, 24084, 24083], [24060, 24061, 24085], [24060, 24085, 24084], [24061, 24062, 24085], [24062, 24086, 24085], [24062, 24063, 24087], [24062, 24087, 24086], [24063, 24064, 24087], [24064, 24088, 24087], [24064, 24065, 24089], [24064, 24089, 24088], [24065, 24066, 24089], [24066, 24090, 24089], [24066, 24067, 24091], [24066, 24091, 24090], [24067, 24068, 24091], [24068, 24092, 24091], [24068, 24069, 24093], [24068, 24093, 24092], [24069, 24070, 24093], [24070, 24094, 24093], [24070, 24071, 24095], [24070, 24095, 24094], [24071, 24072, 24095], [24072, 24096, 24095], [24072, 24073, 24097], [24072, 24097, 24096], [24073, 24074, 24097], [24074, 24098, 24097], [24074, 24075, 24099], [24074, 24099, 24098], [24075, 8328, 24099], [8328, 8446, 24099], [24076, 24077, 24100], [24077, 24101, 24100], [24077, 24078, 24102], [24077, 24102, 24101], [24078, 24079, 24102], [24079, 24103, 24102], [24079, 24080, 24104], [24079, 24104, 24103], [24080, 24081, 24104], [24081, 24105, 24104], [24081, 24082, 24106], [24081, 24106, 24105], [24082, 24083, 24106], [24083, 24107, 24106], [24083, 24084, 24108], [24083, 24108, 24107], [24084, 24085, 24108], [24085, 24109, 24108], [24085, 24086, 24110], [24085, 24110, 24109], [24086, 24087, 24110], [24087, 24111, 24110], [24087, 24088, 24112], [24087, 24112, 24111], [24088, 24089, 24112], [24089, 24113, 24112], [24089, 24090, 24114], [24089, 24114, 24113], [24090, 24091, 24114], [24091, 24115, 24114], [24091, 24092, 24116], [24091, 24116, 24115], [24092, 24093, 24116], [24093, 24117, 24116], [24093, 24094, 24118], [24093, 24118, 24117], [24094, 24095, 24118], [24095, 24119, 24118], [24095, 24096, 24120], [24095, 24120, 24119], [24096, 24097, 24120], [24097, 24121, 24120], [24097, 24098, 24122], [24097, 24122, 24121], [24098, 24099, 24122], [24099, 24123, 24122], [24099, 8446, 8564], [24099, 8564, 24123], [24100, 24101, 24125], [24100, 24125, 24124], [24101, 24102, 24125], [24102, 24126, 24125], [24102, 24103, 24127], [24102, 24127, 24126], [24103, 24104, 24127], [24104, 24128, 24127], [24104, 24105, 24129], [24104, 24129, 24128], [24105, 24106, 24129], [24106, 24130, 24129], [24106, 24107, 24131], [24106, 24131, 24130], [24107, 24108, 24131], [24108, 24132, 24131], [24108, 24109, 24133], [24108, 24133, 24132], [24109, 24110, 24133], [24110, 24134, 24133], [24110, 24111, 24135], [24110, 24135, 24134], [24111, 24112, 24135], [24112, 24136, 24135], [24112, 24113, 24137], [24112, 24137, 24136], [24113, 24114, 24137], [24114, 24138, 24137], [24114, 24115, 24139], [24114, 24139, 24138], [24115, 24116, 24139], [24116, 24140, 24139], [24116, 24117, 24141], [24116, 24141, 24140], [24117, 24118, 24141], [24118, 24142, 24141], [24118, 24119, 24143], [24118, 24143, 24142], [24119, 24120, 24143], [24120, 24144, 24143], [24120, 24121, 24145], [24120, 24145, 24144], [24121, 24122, 24145], [24122, 24146, 24145], [24122, 24123, 24147], [24122, 24147, 24146], [24123, 8564, 24147], [8564, 8682, 24147], [24124, 24125, 24148], [24125, 24149, 24148], [24125, 24126, 24150], [24125, 24150, 24149], [24126, 24127, 24150], [24127, 24151, 24150], [24127, 24128, 24152], [24127, 24152, 24151], [24128, 24129, 24152], [24129, 24153, 24152], [24129, 24130, 24154], [24129, 24154, 24153], [24130, 24131, 24154], [24131, 24155, 24154], [24131, 24132, 24156], [24131, 24156, 24155], [24132, 24133, 24156], [24133, 24157, 24156], [24133, 24134, 24158], [24133, 24158, 24157], [24134, 24135, 24158], [24135, 24159, 24158], [24135, 24136, 24160], [24135, 24160, 24159], [24136, 24137, 24160], [24137, 24161, 24160], [24137, 24138, 24162], [24137, 24162, 24161], [24138, 24139, 24162], [24139, 24163, 24162], [24139, 24140, 24164], [24139, 24164, 24163], [24140, 24141, 24164], [24141, 24165, 24164], [24141, 24142, 24166], [24141, 24166, 24165], [24142, 24143, 24166], [24143, 24167, 24166], [24143, 24144, 24168], [24143, 24168, 24167], [24144, 24145, 24168], [24145, 24169, 24168], [24145, 24146, 24170], [24145, 24170, 24169], [24146, 24147, 24170], [24147, 24171, 24170], [24147, 8682, 8800], [24147, 8800, 24171], [24148, 24149, 24173], [24148, 24173, 24172], [24149, 24150, 24173], [24150, 24174, 24173], [24150, 24151, 24175], [24150, 24175, 24174], [24151, 24152, 24175], [24152, 24176, 24175], [24152, 24153, 24177], [24152, 24177, 24176], [24153, 24154, 24177], [24154, 24178, 24177], [24154, 24155, 24179], [24154, 24179, 24178], [24155, 24156, 24179], [24156, 24180, 24179], [24156, 24157, 24181], [24156, 24181, 24180], [24157, 24158, 24181], [24158, 24182, 24181], [24158, 24159, 24183], [24158, 24183, 24182], [24159, 24160, 24183], [24160, 24184, 24183], [24160, 24161, 24185], [24160, 24185, 24184], [24161, 24162, 24185], [24162, 24186, 24185], [24162, 24163, 24187], [24162, 24187, 24186], [24163, 24164, 24187], [24164, 24188, 24187], [24164, 24165, 24189], [24164, 24189, 24188], [24165, 24166, 24189], [24166, 24190, 24189], [24166, 24167, 24191], [24166, 24191, 24190], [24167, 24168, 24191], [24168, 24192, 24191], [24168, 24169, 24193], [24168, 24193, 24192], [24169, 24170, 24193], [24170, 24194, 24193], [24170, 24171, 24195], [24170, 24195, 24194], [24171, 8800, 24195], [8800, 8918, 24195], [24172, 24173, 24196], [24173, 24197, 24196], [24173, 24174, 24198], [24173, 24198, 24197], [24174, 24175, 24198], [24175, 24199, 24198], [24175, 24176, 24200], [24175, 24200, 24199], [24176, 24177, 24200], [24177, 24201, 24200], [24177, 24178, 24202], [24177, 24202, 24201], [24178, 24179, 24202], [24179, 24203, 24202], [24179, 24180, 24204], [24179, 24204, 24203], [24180, 24181, 24204], [24181, 24205, 24204], [24181, 24182, 24206], [24181, 24206, 24205], [24182, 24183, 24206], [24183, 24207, 24206], [24183, 24184, 24208], [24183, 24208, 24207], [24184, 24185, 24208], [24185, 24209, 24208], [24185, 24186, 24210], [24185, 24210, 24209], [24186, 24187, 24210], [24187, 24211, 24210], [24187, 24188, 24212], [24187, 24212, 24211], [24188, 24189, 24212], [24189, 24213, 24212], [24189, 24190, 24214], [24189, 24214, 24213], [24190, 24191, 24214], [24191, 24215, 24214], [24191, 24192, 24216], [24191, 24216, 24215], [24192, 24193, 24216], [24193, 24217, 24216], [24193, 24194, 24218], [24193, 24218, 24217], [24194, 24195, 24218], [24195, 24219, 24218], [24195, 8918, 9036], [24195, 9036, 24219], [24196, 24197, 24221], [24196, 24221, 24220], [24197, 24198, 24221], [24198, 24222, 24221], [24198, 24199, 24223], [24198, 24223, 24222], [24199, 24200, 24223], [24200, 24224, 24223], [24200, 24201, 24225], [24200, 24225, 24224], [24201, 24202, 24225], [24202, 24226, 24225], [24202, 24203, 24227], [24202, 24227, 24226], [24203, 24204, 24227], [24204, 24228, 24227], [24204, 24205, 24229], [24204, 24229, 24228], [24205, 24206, 24229], [24206, 24230, 24229], [24206, 24207, 24231], [24206, 24231, 24230], [24207, 24208, 24231], [24208, 24232, 24231], [24208, 24209, 24233], [24208, 24233, 24232], [24209, 24210, 24233], [24210, 24234, 24233], [24210, 24211, 24235], [24210, 24235, 24234], [24211, 24212, 24235], [24212, 24236, 24235], [24212, 24213, 24237], [24212, 24237, 24236], [24213, 24214, 24237], [24214, 24238, 24237], [24214, 24215, 24239], [24214, 24239, 24238], [24215, 24216, 24239], [24216, 24240, 24239], [24216, 24217, 24241], [24216, 24241, 24240], [24217, 24218, 24241], [24218, 24242, 24241], [24218, 24219, 24243], [24218, 24243, 24242], [24219, 9036, 24243], [9036, 9154, 24243], [24220, 24221, 24244], [24221, 24245, 24244], [24221, 24222, 24246], [24221, 24246, 24245], [24222, 24223, 24246], [24223, 24247, 24246], [24223, 24224, 24248], [24223, 24248, 24247], [24224, 24225, 24248], [24225, 24249, 24248], [24225, 24226, 24250], [24225, 24250, 24249], [24226, 24227, 24250], [24227, 24251, 24250], [24227, 24228, 24252], [24227, 24252, 24251], [24228, 24229, 24252], [24229, 24253, 24252], [24229, 24230, 24254], [24229, 24254, 24253], [24230, 24231, 24254], [24231, 24255, 24254], [24231, 24232, 24256], [24231, 24256, 24255], [24232, 24233, 24256], [24233, 24257, 24256], [24233, 24234, 24258], [24233, 24258, 24257], [24234, 24235, 24258], [24235, 24259, 24258], [24235, 24236, 24260], [24235, 24260, 24259], [24236, 24237, 24260], [24237, 24261, 24260], [24237, 24238, 24262], [24237, 24262, 24261], [24238, 24239, 24262], [24239, 24263, 24262], [24239, 24240, 24264], [24239, 24264, 24263], [24240, 24241, 24264], [24241, 24265, 24264], [24241, 24242, 24266], [24241, 24266, 24265], [24242, 24243, 24266], [24243, 24267, 24266], [24243, 9154, 9272], [24243, 9272, 24267], [24244, 24245, 24269], [24244, 24269, 24268], [24245, 24246, 24269], [24246, 24270, 24269], [24246, 24247, 24271], [24246, 24271, 24270], [24247, 24248, 24271], [24248, 24272, 24271], [24248, 24249, 24273], [24248, 24273, 24272], [24249, 24250, 24273], [24250, 24274, 24273], [24250, 24251, 24275], [24250, 24275, 24274], [24251, 24252, 24275], [24252, 24276, 24275], [24252, 24253, 24277], [24252, 24277, 24276], [24253, 24254, 24277], [24254, 24278, 24277], [24254, 24255, 24279], [24254, 24279, 24278], [24255, 24256, 24279], [24256, 24280, 24279], [24256, 24257, 24281], [24256, 24281, 24280], [24257, 24258, 24281], [24258, 24282, 24281], [24258, 24259, 24283], [24258, 24283, 24282], [24259, 24260, 24283], [24260, 24284, 24283], [24260, 24261, 24285], [24260, 24285, 24284], [24261, 24262, 24285], [24262, 24286, 24285], [24262, 24263, 24287], [24262, 24287, 24286], [24263, 24264, 24287], [24264, 24288, 24287], [24264, 24265, 24289], [24264, 24289, 24288], [24265, 24266, 24289], [24266, 24290, 24289], [24266, 24267, 24291], [24266, 24291, 24290], [24267, 9272, 24291], [9272, 9390, 24291], [24268, 24269, 24292], [24269, 24293, 24292], [24269, 24270, 24294], [24269, 24294, 24293], [24270, 24271, 24294], [24271, 24295, 24294], [24271, 24272, 24296], [24271, 24296, 24295], [24272, 24273, 24296], [24273, 24297, 24296], [24273, 24274, 24298], [24273, 24298, 24297], [24274, 24275, 24298], [24275, 24299, 24298], [24275, 24276, 24300], [24275, 24300, 24299], [24276, 24277, 24300], [24277, 24301, 24300], [24277, 24278, 24302], [24277, 24302, 24301], [24278, 24279, 24302], [24279, 24303, 24302], [24279, 24280, 24304], [24279, 24304, 24303], [24280, 24281, 24304], [24281, 24305, 24304], [24281, 24282, 24306], [24281, 24306, 24305], [24282, 24283, 24306], [24283, 24307, 24306], [24283, 24284, 24308], [24283, 24308, 24307], [24284, 24285, 24308], [24285, 24309, 24308], [24285, 24286, 24310], [24285, 24310, 24309], [24286, 24287, 24310], [24287, 24311, 24310], [24287, 24288, 24312], [24287, 24312, 24311], [24288, 24289, 24312], [24289, 24313, 24312], [24289, 24290, 24314], [24289, 24314, 24313], [24290, 24291, 24314], [24291, 24315, 24314], [24291, 9390, 9508], [24291, 9508, 24315], [24292, 24293, 24317], [24292, 24317, 24316], [24293, 24294, 24317], [24294, 24318, 24317], [24294, 24295, 24319], [24294, 24319, 24318], [24295, 24296, 24319], [24296, 24320, 24319], [24296, 24297, 24321], [24296, 24321, 24320], [24297, 24298, 24321], [24298, 24322, 24321], [24298, 24299, 24323], [24298, 24323, 24322], [24299, 24300, 24323], [24300, 24324, 24323], [24300, 24301, 24325], [24300, 24325, 24324], [24301, 24302, 24325], [24302, 24326, 24325], [24302, 24303, 24327], [24302, 24327, 24326], [24303, 24304, 24327], [24304, 24328, 24327], [24304, 24305, 24329], [24304, 24329, 24328], [24305, 24306, 24329], [24306, 24330, 24329], [24306, 24307, 24331], [24306, 24331, 24330], [24307, 24308, 24331], [24308, 24332, 24331], [24308, 24309, 24333], [24308, 24333, 24332], [24309, 24310, 24333], [24310, 24334, 24333], [24310, 24311, 24335], [24310, 24335, 24334], [24311, 24312, 24335], [24312, 24336, 24335], [24312, 24313, 24337], [24312, 24337, 24336], [24313, 24314, 24337], [24314, 24338, 24337], [24314, 24315, 24339], [24314, 24339, 24338], [24315, 9508, 24339], [9508, 9626, 24339], [24316, 24317, 24340], [24317, 24341, 24340], [24317, 24318, 24342], [24317, 24342, 24341], [24318, 24319, 24342], [24319, 24343, 24342], [24319, 24320, 24344], [24319, 24344, 24343], [24320, 24321, 24344], [24321, 24345, 24344], [24321, 24322, 24346], [24321, 24346, 24345], [24322, 24323, 24346], [24323, 24347, 24346], [24323, 24324, 24348], [24323, 24348, 24347], [24324, 24325, 24348], [24325, 24349, 24348], [24325, 24326, 24350], [24325, 24350, 24349], [24326, 24327, 24350], [24327, 24351, 24350], [24327, 24328, 24352], [24327, 24352, 24351], [24328, 24329, 24352], [24329, 24353, 24352], [24329, 24330, 24354], [24329, 24354, 24353], [24330, 24331, 24354], [24331, 24355, 24354], [24331, 24332, 24356], [24331, 24356, 24355], [24332, 24333, 24356], [24333, 24357, 24356], [24333, 24334, 24358], [24333, 24358, 24357], [24334, 24335, 24358], [24335, 24359, 24358], [24335, 24336, 24360], [24335, 24360, 24359], [24336, 24337, 24360], [24337, 24361, 24360], [24337, 24338, 24362], [24337, 24362, 24361], [24338, 24339, 24362], [24339, 24363, 24362], [24339, 9626, 9744], [24339, 9744, 24363], [24340, 24341, 24365], [24340, 24365, 24364], [24341, 24342, 24365], [24342, 24366, 24365], [24342, 24343, 24367], [24342, 24367, 24366], [24343, 24344, 24367], [24344, 24368, 24367], [24344, 24345, 24369], [24344, 24369, 24368], [24345, 24346, 24369], [24346, 24370, 24369], [24346, 24347, 24371], [24346, 24371, 24370], [24347, 24348, 24371], [24348, 24372, 24371], [24348, 24349, 24373], [24348, 24373, 24372], [24349, 24350, 24373], [24350, 24374, 24373], [24350, 24351, 24375], [24350, 24375, 24374], [24351, 24352, 24375], [24352, 24376, 24375], [24352, 24353, 24377], [24352, 24377, 24376], [24353, 24354, 24377], [24354, 24378, 24377], [24354, 24355, 24379], [24354, 24379, 24378], [24355, 24356, 24379], [24356, 24380, 24379], [24356, 24357, 24381], [24356, 24381, 24380], [24357, 24358, 24381], [24358, 24382, 24381], [24358, 24359, 24383], [24358, 24383, 24382], [24359, 24360, 24383], [24360, 24384, 24383], [24360, 24361, 24385], [24360, 24385, 24384], [24361, 24362, 24385], [24362, 24386, 24385], [24362, 24363, 24387], [24362, 24387, 24386], [24363, 9744, 24387], [9744, 9864, 24387], [24364, 24365, 24388], [24365, 24389, 24388], [24365, 24366, 24390], [24365, 24390, 24389], [24366, 24367, 24390], [24367, 24391, 24390], [24367, 24368, 24392], [24367, 24392, 24391], [24368, 24369, 24392], [24369, 24393, 24392], [24369, 24370, 24394], [24369, 24394, 24393], [24370, 24371, 24394], [24371, 24395, 24394], [24371, 24372, 24396], [24371, 24396, 24395], [24372, 24373, 24396], [24373, 24397, 24396], [24373, 24374, 24398], [24373, 24398, 24397], [24374, 24375, 24398], [24375, 24399, 24398], [24375, 24376, 24400], [24375, 24400, 24399], [24376, 24377, 24400], [24377, 24401, 24400], [24377, 24378, 24402], [24377, 24402, 24401], [24378, 24379, 24402], [24379, 24403, 24402], [24379, 24380, 24404], [24379, 24404, 24403], [24380, 24381, 24404], [24381, 24405, 24404], [24381, 24382, 24406], [24381, 24406, 24405], [24382, 24383, 24406], [24383, 24407, 24406], [24383, 24384, 24408], [24383, 24408, 24407], [24384, 24385, 24408], [24385, 24409, 24408], [24385, 24386, 24410], [24385, 24410, 24409], [24386, 24387, 24410], [24387, 24411, 24410], [24387, 9864, 9985], [24387, 9985, 24411], [24388, 24389, 24413], [24388, 24413, 24412], [24389, 24390, 24413], [24390, 24414, 24413], [24390, 24391, 24415], [24390, 24415, 24414], [24391, 24392, 24415], [24392, 24416, 24415], [24392, 24393, 24417], [24392, 24417, 24416], [24393, 24394, 24417], [24394, 24418, 24417], [24394, 24395, 24419], [24394, 24419, 24418], [24395, 24396, 24419], [24396, 24420, 24419], [24396, 24397, 24421], [24396, 24421, 24420], [24397, 24398, 24421], [24398, 24422, 24421], [24398, 24399, 24423], [24398, 24423, 24422], [24399, 24400, 24423], [24400, 24424, 24423], [24400, 24401, 24425], [24400, 24425, 24424], [24401, 24402, 24425], [24402, 24426, 24425], [24402, 24403, 24427], [24402, 24427, 24426], [24403, 24404, 24427], [24404, 24428, 24427], [24404, 24405, 24429], [24404, 24429, 24428], [24405, 24406, 24429], [24406, 24430, 24429], [24406, 24407, 24431], [24406, 24431, 24430], [24407, 24408, 24431], [24408, 24432, 24431], [24408, 24409, 24433], [24408, 24433, 24432], [24409, 24410, 24433], [24410, 24434, 24433], [24410, 24411, 24435], [24410, 24435, 24434], [24411, 9985, 24435], [9985, 10111, 24435], [24412, 24413, 24436], [24413, 24437, 24436], [24413, 24414, 24438], [24413, 24438, 24437], [24414, 24415, 24438], [24415, 24439, 24438], [24415, 24416, 24440], [24415, 24440, 24439], [24416, 24417, 24440], [24417, 24441, 24440], [24417, 24418, 24442], [24417, 24442, 24441], [24418, 24419, 24442], [24419, 24443, 24442], [24419, 24420, 24444], [24419, 24444, 24443], [24420, 24421, 24444], [24421, 24445, 24444], [24421, 24422, 24446], [24421, 24446, 24445], [24422, 24423, 24446], [24423, 24447, 24446], [24423, 24424, 24448], [24423, 24448, 24447], [24424, 24425, 24448], [24425, 24449, 24448], [24425, 24426, 24450], [24425, 24450, 24449], [24426, 24427, 24450], [24427, 24451, 24450], [24427, 24428, 24452], [24427, 24452, 24451], [24428, 24429, 24452], [24429, 24453, 24452], [24429, 24430, 24454], [24429, 24454, 24453], [24430, 24431, 24454], [24431, 24455, 24454], [24431, 24432, 24456], [24431, 24456, 24455], [24432, 24433, 24456], [24433, 24457, 24456], [24433, 24434, 24458], [24433, 24458, 24457], [24434, 24435, 24458], [24435, 24459, 24458], [24435, 10111, 10237], [24435, 10237, 24459], [24436, 24437, 24461], [24436, 24461, 24460], [24437, 24438, 24461], [24438, 24462, 24461], [24438, 24439, 24463], [24438, 24463, 24462], [24439, 24440, 24463], [24440, 24464, 24463], [24440, 24441, 24465], [24440, 24465, 24464], [24441, 24442, 24465], [24442, 24466, 24465], [24442, 24443, 24467], [24442, 24467, 24466], [24443, 24444, 24467], [24444, 24468, 24467], [24444, 24445, 24469], [24444, 24469, 24468], [24445, 24446, 24469], [24446, 24470, 24469], [24446, 24447, 24471], [24446, 24471, 24470], [24447, 24448, 24471], [24448, 24472, 24471], [24448, 24449, 24473], [24448, 24473, 24472], [24449, 24450, 24473], [24450, 24474, 24473], [24450, 24451, 24475], [24450, 24475, 24474], [24451, 24452, 24475], [24452, 24476, 24475], [24452, 24453, 24477], [24452, 24477, 24476], [24453, 24454, 24477], [24454, 24478, 24477], [24454, 24455, 24479], [24454, 24479, 24478], [24455, 24456, 24479], [24456, 24480, 24479], [24456, 24457, 24481], [24456, 24481, 24480], [24457, 24458, 24481], [24458, 24482, 24481], [24458, 24459, 24483], [24458, 24483, 24482], [24459, 10237, 24483], [10237, 10366, 24483], [24460, 24461, 24484], [24461, 24485, 24484], [24461, 24462, 24486], [24461, 24486, 24485], [24462, 24463, 24486], [24463, 24487, 24486], [24463, 24464, 24488], [24463, 24488, 24487], [24464, 24465, 24488], [24465, 24489, 24488], [24465, 24466, 24490], [24465, 24490, 24489], [24466, 24467, 24490], [24467, 24491, 24490], [24467, 24468, 24492], [24467, 24492, 24491], [24468, 24469, 24492], [24469, 24493, 24492], [24469, 24470, 24494], [24469, 24494, 24493], [24470, 24471, 24494], [24471, 24495, 24494], [24471, 24472, 24496], [24471, 24496, 24495], [24472, 24473, 24496], [24473, 24497, 24496], [24473, 24474, 24498], [24473, 24498, 24497], [24474, 24475, 24498], [24475, 24499, 24498], [24475, 24476, 24500], [24475, 24500, 24499], [24476, 24477, 24500], [24477, 24501, 24500], [24477, 24478, 24502], [24477, 24502, 24501], [24478, 24479, 24502], [24479, 24503, 24502], [24479, 24480, 24504], [24479, 24504, 24503], [24480, 24481, 24504], [24481, 24505, 24504], [24481, 24482, 24506], [24481, 24506, 24505], [24482, 24483, 24506], [24483, 24507, 24506], [24483, 10366, 10495], [24483, 10495, 24507], [24484, 24485, 24509], [24484, 24509, 24508], [24485, 24486, 24509], [24486, 24510, 24509], [24486, 24487, 24511], [24486, 24511, 24510], [24487, 24488, 24511], [24488, 24512, 24511], [24488, 24489, 24513], [24488, 24513, 24512], [24489, 24490, 24513], [24490, 24514, 24513], [24490, 24491, 24515], [24490, 24515, 24514], [24491, 24492, 24515], [24492, 24516, 24515], [24492, 24493, 24517], [24492, 24517, 24516], [24493, 24494, 24517], [24494, 24518, 24517], [24494, 24495, 24519], [24494, 24519, 24518], [24495, 24496, 24519], [24496, 24520, 24519], [24496, 24497, 24521], [24496, 24521, 24520], [24497, 24498, 24521], [24498, 24522, 24521], [24498, 24499, 24523], [24498, 24523, 24522], [24499, 24500, 24523], [24500, 24524, 24523], [24500, 24501, 24525], [24500, 24525, 24524], [24501, 24502, 24525], [24502, 24526, 24525], [24502, 24503, 24527], [24502, 24527, 24526], [24503, 24504, 24527], [24504, 24528, 24527], [24504, 24505, 24529], [24504, 24529, 24528], [24505, 24506, 24529], [24506, 24530, 24529], [24506, 24507, 24531], [24506, 24531, 24530], [24507, 10495, 24531], [10495, 10624, 24531], [24508, 24509, 24532], [24509, 24533, 24532], [24509, 24510, 24534], [24509, 24534, 24533], [24510, 24511, 24534], [24511, 24535, 24534], [24511, 24512, 24536], [24511, 24536, 24535], [24512, 24513, 24536], [24513, 24537, 24536], [24513, 24514, 24538], [24513, 24538, 24537], [24514, 24515, 24538], [24515, 24539, 24538], [24515, 24516, 24540], [24515, 24540, 24539], [24516, 24517, 24540], [24517, 24541, 24540], [24517, 24518, 24542], [24517, 24542, 24541], [24518, 24519, 24542], [24519, 24543, 24542], [24519, 24520, 24544], [24519, 24544, 24543], [24520, 24521, 24544], [24521, 24545, 24544], [24521, 24522, 24546], [24521, 24546, 24545], [24522, 24523, 24546], [24523, 24547, 24546], [24523, 24524, 24548], [24523, 24548, 24547], [24524, 24525, 24548], [24525, 24549, 24548], [24525, 24526, 24550], [24525, 24550, 24549], [24526, 24527, 24550], [24527, 24551, 24550], [24527, 24528, 24552], [24527, 24552, 24551], [24528, 24529, 24552], [24529, 24553, 24552], [24529, 24530, 24554], [24529, 24554, 24553], [24530, 24531, 24554], [24531, 24555, 24554], [24531, 10624, 10753], [24531, 10753, 24555], [24532, 24533, 24557], [24532, 24557, 24556], [24533, 24534, 24557], [24534, 24558, 24557], [24534, 24535, 24559], [24534, 24559, 24558], [24535, 24536, 24559], [24536, 24560, 24559], [24536, 24537, 24561], [24536, 24561, 24560], [24537, 24538, 24561], [24538, 24562, 24561], [24538, 24539, 24563], [24538, 24563, 24562], [24539, 24540, 24563], [24540, 24564, 24563], [24540, 24541, 24565], [24540, 24565, 24564], [24541, 24542, 24565], [24542, 24566, 24565], [24542, 24543, 24567], [24542, 24567, 24566], [24543, 24544, 24567], [24544, 24568, 24567], [24544, 24545, 24569], [24544, 24569, 24568], [24545, 24546, 24569], [24546, 24570, 24569], [24546, 24547, 24571], [24546, 24571, 24570], [24547, 24548, 24571], [24548, 24572, 24571], [24548, 24549, 24573], [24548, 24573, 24572], [24549, 24550, 24573], [24550, 24574, 24573], [24550, 24551, 24575], [24550, 24575, 24574], [24551, 24552, 24575], [24552, 24576, 24575], [24552, 24553, 24577], [24552, 24577, 24576], [24553, 24554, 24577], [24554, 24578, 24577], [24554, 24555, 24579], [24554, 24579, 24578], [24555, 10753, 24579], [10753, 10882, 24579], [24556, 24557, 24580], [24557, 24581, 24580], [24557, 24558, 24582], [24557, 24582, 24581], [24558, 24559, 24582], [24559, 24583, 24582], [24559, 24560, 24584], [24559, 24584, 24583], [24560, 24561, 24584], [24561, 24585, 24584], [24561, 24562, 24586], [24561, 24586, 24585], [24562, 24563, 24586], [24563, 24587, 24586], [24563, 24564, 24588], [24563, 24588, 24587], [24564, 24565, 24588], [24565, 24589, 24588], [24565, 24566, 24590], [24565, 24590, 24589], [24566, 24567, 24590], [24567, 24591, 24590], [24567, 24568, 24592], [24567, 24592, 24591], [24568, 24569, 24592], [24569, 24593, 24592], [24569, 24570, 24594], [24569, 24594, 24593], [24570, 24571, 24594], [24571, 24595, 24594], [24571, 24572, 24596], [24571, 24596, 24595], [24572, 24573, 24596], [24573, 24597, 24596], [24573, 24574, 24598], [24573, 24598, 24597], [24574, 24575, 24598], [24575, 24599, 24598], [24575, 24576, 24600], [24575, 24600, 24599], [24576, 24577, 24600], [24577, 24601, 24600], [24577, 24578, 24602], [24577, 24602, 24601], [24578, 24579, 24602], [24579, 24603, 24602], [24579, 10882, 11011], [24579, 11011, 24603], [24580, 24581, 24605], [24580, 24605, 24604], [24581, 24582, 24605], [24582, 24606, 24605], [24582, 24583, 24607], [24582, 24607, 24606], [24583, 24584, 24607], [24584, 24608, 24607], [24584, 24585, 24609], [24584, 24609, 24608], [24585, 24586, 24609], [24586, 24610, 24609], [24586, 24587, 24611], [24586, 24611, 24610], [24587, 24588, 24611], [24588, 24612, 24611], [24588, 24589, 24613], [24588, 24613, 24612], [24589, 24590, 24613], [24590, 24614, 24613], [24590, 24591, 24615], [24590, 24615, 24614], [24591, 24592, 24615], [24592, 24616, 24615], [24592, 24593, 24617], [24592, 24617, 24616], [24593, 24594, 24617], [24594, 24618, 24617], [24594, 24595, 24619], [24594, 24619, 24618], [24595, 24596, 24619], [24596, 24620, 24619], [24596, 24597, 24621], [24596, 24621, 24620], [24597, 24598, 24621], [24598, 24622, 24621], [24598, 24599, 24623], [24598, 24623, 24622], [24599, 24600, 24623], [24600, 24624, 24623], [24600, 24601, 24625], [24600, 24625, 24624], [24601, 24602, 24625], [24602, 24626, 24625], [24602, 24603, 24627], [24602, 24627, 24626], [24603, 11011, 24627], [11011, 11140, 24627], [24604, 24605, 24628], [24605, 24629, 24628], [24605, 24606, 24630], [24605, 24630, 24629], [24606, 24607, 24630], [24607, 24631, 24630], [24607, 24608, 24632], [24607, 24632, 24631], [24608, 24609, 24632], [24609, 24633, 24632], [24609, 24610, 24634], [24609, 24634, 24633], [24610, 24611, 24634], [24611, 24635, 24634], [24611, 24612, 24636], [24611, 24636, 24635], [24612, 24613, 24636], [24613, 24637, 24636], [24613, 24614, 24638], [24613, 24638, 24637], [24614, 24615, 24638], [24615, 24639, 24638], [24615, 24616, 24640], [24615, 24640, 24639], [24616, 24617, 24640], [24617, 24641, 24640], [24617, 24618, 24642], [24617, 24642, 24641], [24618, 24619, 24642], [24619, 24643, 24642], [24619, 24620, 24644], [24619, 24644, 24643], [24620, 24621, 24644], [24621, 24645, 24644], [24621, 24622, 24646], [24621, 24646, 24645], [24622, 24623, 24646], [24623, 24647, 24646], [24623, 24624, 24648], [24623, 24648, 24647], [24624, 24625, 24648], [24625, 24649, 24648], [24625, 24626, 24650], [24625, 24650, 24649], [24626, 24627, 24650], [24627, 24651, 24650], [24627, 11140, 11269], [24627, 11269, 24651], [24628, 24629, 24653], [24628, 24653, 24652], [24629, 24630, 24653], [24630, 24654, 24653], [24630, 24631, 24655], [24630, 24655, 24654], [24631, 24632, 24655], [24632, 24656, 24655], [24632, 24633, 24657], [24632, 24657, 24656], [24633, 24634, 24657], [24634, 24658, 24657], [24634, 24635, 24659], [24634, 24659, 24658], [24635, 24636, 24659], [24636, 24660, 24659], [24636, 24637, 24661], [24636, 24661, 24660], [24637, 24638, 24661], [24638, 24662, 24661], [24638, 24639, 24663], [24638, 24663, 24662], [24639, 24640, 24663], [24640, 24664, 24663], [24640, 24641, 24665], [24640, 24665, 24664], [24641, 24642, 24665], [24642, 24666, 24665], [24642, 24643, 24667], [24642, 24667, 24666], [24643, 24644, 24667], [24644, 24668, 24667], [24644, 24645, 24669], [24644, 24669, 24668], [24645, 24646, 24669], [24646, 24670, 24669], [24646, 24647, 24671], [24646, 24671, 24670], [24647, 24648, 24671], [24648, 24672, 24671], [24648, 24649, 24673], [24648, 24673, 24672], [24649, 24650, 24673], [24650, 24674, 24673], [24650, 24651, 24675], [24650, 24675, 24674], [24651, 11269, 24675], [11269, 11398, 24675], [24652, 24653, 24676], [24653, 24677, 24676], [24653, 24654, 24678], [24653, 24678, 24677], [24654, 24655, 24678], [24655, 24679, 24678], [24655, 24656, 24680], [24655, 24680, 24679], [24656, 24657, 24680], [24657, 24681, 24680], [24657, 24658, 24682], [24657, 24682, 24681], [24658, 24659, 24682], [24659, 24683, 24682], [24659, 24660, 24684], [24659, 24684, 24683], [24660, 24661, 24684], [24661, 24685, 24684], [24661, 24662, 24686], [24661, 24686, 24685], [24662, 24663, 24686], [24663, 24687, 24686], [24663, 24664, 24688], [24663, 24688, 24687], [24664, 24665, 24688], [24665, 24689, 24688], [24665, 24666, 24690], [24665, 24690, 24689], [24666, 24667, 24690], [24667, 24691, 24690], [24667, 24668, 24692], [24667, 24692, 24691], [24668, 24669, 24692], [24669, 24693, 24692], [24669, 24670, 24694], [24669, 24694, 24693], [24670, 24671, 24694], [24671, 24695, 24694], [24671, 24672, 24696], [24671, 24696, 24695], [24672, 24673, 24696], [24673, 24697, 24696], [24673, 24674, 24698], [24673, 24698, 24697], [24674, 24675, 24698], [24675, 24699, 24698], [24675, 11398, 11527], [24675, 11527, 24699], [24676, 24677, 24701], [24676, 24701, 24700], [24677, 24678, 24701], [24678, 24702, 24701], [24678, 24679, 24703], [24678, 24703, 24702], [24679, 24680, 24703], [24680, 24704, 24703], [24680, 24681, 24705], [24680, 24705, 24704], [24681, 24682, 24705], [24682, 24706, 24705], [24682, 24683, 24707], [24682, 24707, 24706], [24683, 24684, 24707], [24684, 24708, 24707], [24684, 24685, 24709], [24684, 24709, 24708], [24685, 24686, 24709], [24686, 24710, 24709], [24686, 24687, 24711], [24686, 24711, 24710], [24687, 24688, 24711], [24688, 24712, 24711], [24688, 24689, 24713], [24688, 24713, 24712], [24689, 24690, 24713], [24690, 24714, 24713], [24690, 24691, 24715], [24690, 24715, 24714], [24691, 24692, 24715], [24692, 24716, 24715], [24692, 24693, 24717], [24692, 24717, 24716], [24693, 24694, 24717], [24694, 24718, 24717], [24694, 24695, 24719], [24694, 24719, 24718], [24695, 24696, 24719], [24696, 24720, 24719], [24696, 24697, 24721], [24696, 24721, 24720], [24697, 24698, 24721], [24698, 24722, 24721], [24698, 24699, 24723], [24698, 24723, 24722], [24699, 11527, 24723], [11527, 11656, 24723], [24700, 24701, 24724], [24701, 24725, 24724], [24701, 24702, 24726], [24701, 24726, 24725], [24702, 24703, 24726], [24703, 24727, 24726], [24703, 24704, 24728], [24703, 24728, 24727], [24704, 24705, 24728], [24705, 24729, 24728], [24705, 24706, 24730], [24705, 24730, 24729], [24706, 24707, 24730], [24707, 24731, 24730], [24707, 24708, 24732], [24707, 24732, 24731], [24708, 24709, 24732], [24709, 24733, 24732], [24709, 24710, 24734], [24709, 24734, 24733], [24710, 24711, 24734], [24711, 24735, 24734], [24711, 24712, 24736], [24711, 24736, 24735], [24712, 24713, 24736], [24713, 24737, 24736], [24713, 24714, 24738], [24713, 24738, 24737], [24714, 24715, 24738], [24715, 24739, 24738], [24715, 24716, 24740], [24715, 24740, 24739], [24716, 24717, 24740], [24717, 24741, 24740], [24717, 24718, 24742], [24717, 24742, 24741], [24718, 24719, 24742], [24719, 24743, 24742], [24719, 24720, 24744], [24719, 24744, 24743], [24720, 24721, 24744], [24721, 24745, 24744], [24721, 24722, 24746], [24721, 24746, 24745], [24722, 24723, 24746], [24723, 24747, 24746], [24723, 11656, 11785], [24723, 11785, 24747], [24724, 24725, 24749], [24724, 24749, 24748], [24725, 24726, 24749], [24726, 24750, 24749], [24726, 24727, 24751], [24726, 24751, 24750], [24727, 24728, 24751], [24728, 24752, 24751], [24728, 24729, 24753], [24728, 24753, 24752], [24729, 24730, 24753], [24730, 24754, 24753], [24730, 24731, 24755], [24730, 24755, 24754], [24731, 24732, 24755], [24732, 24756, 24755], [24732, 24733, 24757], [24732, 24757, 24756], [24733, 24734, 24757], [24734, 24758, 24757], [24734, 24735, 24759], [24734, 24759, 24758], [24735, 24736, 24759], [24736, 24760, 24759], [24736, 24737, 24761], [24736, 24761, 24760], [24737, 24738, 24761], [24738, 24762, 24761], [24738, 24739, 24763], [24738, 24763, 24762], [24739, 24740, 24763], [24740, 24764, 24763], [24740, 24741, 24765], [24740, 24765, 24764], [24741, 24742, 24765], [24742, 24766, 24765], [24742, 24743, 24767], [24742, 24767, 24766], [24743, 24744, 24767], [24744, 24768, 24767], [24744, 24745, 24769], [24744, 24769, 24768], [24745, 24746, 24769], [24746, 24770, 24769], [24746, 24747, 24771], [24746, 24771, 24770], [24747, 11785, 24771], [11785, 11914, 24771], [24748, 24749, 24772], [24749, 24773, 24772], [24749, 24750, 24774], [24749, 24774, 24773], [24750, 24751, 24774], [24751, 24775, 24774], [24751, 24752, 24776], [24751, 24776, 24775], [24752, 24753, 24776], [24753, 24777, 24776], [24753, 24754, 24778], [24753, 24778, 24777], [24754, 24755, 24778], [24755, 24779, 24778], [24755, 24756, 24780], [24755, 24780, 24779], [24756, 24757, 24780], [24757, 24781, 24780], [24757, 24758, 24782], [24757, 24782, 24781], [24758, 24759, 24782], [24759, 24783, 24782], [24759, 24760, 24784], [24759, 24784, 24783], [24760, 24761, 24784], [24761, 24785, 24784], [24761, 24762, 24786], [24761, 24786, 24785], [24762, 24763, 24786], [24763, 24787, 24786], [24763, 24764, 24788], [24763, 24788, 24787], [24764, 24765, 24788], [24765, 24789, 24788], [24765, 24766, 24790], [24765, 24790, 24789], [24766, 24767, 24790], [24767, 24791, 24790], [24767, 24768, 24792], [24767, 24792, 24791], [24768, 24769, 24792], [24769, 24793, 24792], [24769, 24770, 24794], [24769, 24794, 24793], [24770, 24771, 24794], [24771, 24795, 24794], [24771, 11914, 12043], [24771, 12043, 24795], [24772, 24773, 24797], [24772, 24797, 24796], [24773, 24774, 24797], [24774, 24798, 24797], [24774, 24775, 24799], [24774, 24799, 24798], [24775, 24776, 24799], [24776, 24800, 24799], [24776, 24777, 24801], [24776, 24801, 24800], [24777, 24778, 24801], [24778, 24802, 24801], [24778, 24779, 24803], [24778, 24803, 24802], [24779, 24780, 24803], [24780, 24804, 24803], [24780, 24781, 24805], [24780, 24805, 24804], [24781, 24782, 24805], [24782, 24806, 24805], [24782, 24783, 24807], [24782, 24807, 24806], [24783, 24784, 24807], [24784, 24808, 24807], [24784, 24785, 24809], [24784, 24809, 24808], [24785, 24786, 24809], [24786, 24810, 24809], [24786, 24787, 24811], [24786, 24811, 24810], [24787, 24788, 24811], [24788, 24812, 24811], [24788, 24789, 24813], [24788, 24813, 24812], [24789, 24790, 24813], [24790, 24814, 24813], [24790, 24791, 24815], [24790, 24815, 24814], [24791, 24792, 24815], [24792, 24816, 24815], [24792, 24793, 24817], [24792, 24817, 24816], [24793, 24794, 24817], [24794, 24818, 24817], [24794, 24795, 24819], [24794, 24819, 24818], [24795, 12043, 24819], [12043, 12172, 24819], [24796, 24797, 24820], [24797, 24821, 24820], [24797, 24798, 24822], [24797, 24822, 24821], [24798, 24799, 24822], [24799, 24823, 24822], [24799, 24800, 24824], [24799, 24824, 24823], [24800, 24801, 24824], [24801, 24825, 24824], [24801, 24802, 24826], [24801, 24826, 24825], [24802, 24803, 24826], [24803, 24827, 24826], [24803, 24804, 24828], [24803, 24828, 24827], [24804, 24805, 24828], [24805, 24829, 24828], [24805, 24806, 24830], [24805, 24830, 24829], [24806, 24807, 24830], [24807, 24831, 24830], [24807, 24808, 24832], [24807, 24832, 24831], [24808, 24809, 24832], [24809, 24833, 24832], [24809, 24810, 24834], [24809, 24834, 24833], [24810, 24811, 24834], [24811, 24835, 24834], [24811, 24812, 24836], [24811, 24836, 24835], [24812, 24813, 24836], [24813, 24837, 24836], [24813, 24814, 24838], [24813, 24838, 24837], [24814, 24815, 24838], [24815, 24839, 24838], [24815, 24816, 24840], [24815, 24840, 24839], [24816, 24817, 24840], [24817, 24841, 24840], [24817, 24818, 24842], [24817, 24842, 24841], [24818, 24819, 24842], [24819, 24843, 24842], [24819, 12172, 12301], [24819, 12301, 24843], [24820, 24821, 24845], [24820, 24845, 24844], [24821, 24822, 24845], [24822, 24846, 24845], [24822, 24823, 24847], [24822, 24847, 24846], [24823, 24824, 24847], [24824, 24848, 24847], [24824, 24825, 24849], [24824, 24849, 24848], [24825, 24826, 24849], [24826, 24850, 24849], [24826, 24827, 24851], [24826, 24851, 24850], [24827, 24828, 24851], [24828, 24852, 24851], [24828, 24829, 24853], [24828, 24853, 24852], [24829, 24830, 24853], [24830, 24854, 24853], [24830, 24831, 24855], [24830, 24855, 24854], [24831, 24832, 24855], [24832, 24856, 24855], [24832, 24833, 24857], [24832, 24857, 24856], [24833, 24834, 24857], [24834, 24858, 24857], [24834, 24835, 24859], [24834, 24859, 24858], [24835, 24836, 24859], [24836, 24860, 24859], [24836, 24837, 24861], [24836, 24861, 24860], [24837, 24838, 24861], [24838, 24862, 24861], [24838, 24839, 24863], [24838, 24863, 24862], [24839, 24840, 24863], [24840, 24864, 24863], [24840, 24841, 24865], [24840, 24865, 24864], [24841, 24842, 24865], [24842, 24866, 24865], [24842, 24843, 24867], [24842, 24867, 24866], [24843, 12301, 24867], [12301, 12430, 24867], [24844, 24845, 24868], [24845, 24869, 24868], [24845, 24846, 24870], [24845, 24870, 24869], [24846, 24847, 24870], [24847, 24871, 24870], [24847, 24848, 24872], [24847, 24872, 24871], [24848, 24849, 24872], [24849, 24873, 24872], [24849, 24850, 24874], [24849, 24874, 24873], [24850, 24851, 24874], [24851, 24875, 24874], [24851, 24852, 24876], [24851, 24876, 24875], [24852, 24853, 24876], [24853, 24877, 24876], [24853, 24854, 24878], [24853, 24878, 24877], [24854, 24855, 24878], [24855, 24879, 24878], [24855, 24856, 24880], [24855, 24880, 24879], [24856, 24857, 24880], [24857, 24881, 24880], [24857, 24858, 24882], [24857, 24882, 24881], [24858, 24859, 24882], [24859, 24883, 24882], [24859, 24860, 24884], [24859, 24884, 24883], [24860, 24861, 24884], [24861, 24885, 24884], [24861, 24862, 24886], [24861, 24886, 24885], [24862, 24863, 24886], [24863, 24887, 24886], [24863, 24864, 24888], [24863, 24888, 24887], [24864, 24865, 24888], [24865, 24889, 24888], [24865, 24866, 24890], [24865, 24890, 24889], [24866, 24867, 24890], [24867, 24891, 24890], [24867, 12430, 12559], [24867, 12559, 24891], [24868, 24869, 24893], [24868, 24893, 24892], [24869, 24870, 24893], [24870, 24894, 24893], [24870, 24871, 24895], [24870, 24895, 24894], [24871, 24872, 24895], [24872, 24896, 24895], [24872, 24873, 24897], [24872, 24897, 24896], [24873, 24874, 24897], [24874, 24898, 24897], [24874, 24875, 24899], [24874, 24899, 24898], [24875, 24876, 24899], [24876, 24900, 24899], [24876, 24877, 24901], [24876, 24901, 24900], [24877, 24878, 24901], [24878, 24902, 24901], [24878, 24879, 24903], [24878, 24903, 24902], [24879, 24880, 24903], [24880, 24904, 24903], [24880, 24881, 24905], [24880, 24905, 24904], [24881, 24882, 24905], [24882, 24906, 24905], [24882, 24883, 24907], [24882, 24907, 24906], [24883, 24884, 24907], [24884, 24908, 24907], [24884, 24885, 24909], [24884, 24909, 24908], [24885, 24886, 24909], [24886, 24910, 24909], [24886, 24887, 24911], [24886, 24911, 24910], [24887, 24888, 24911], [24888, 24912, 24911], [24888, 24889, 24913], [24888, 24913, 24912], [24889, 24890, 24913], [24890, 24914, 24913], [24890, 24891, 24915], [24890, 24915, 24914], [24891, 12559, 24915], [12559, 12688, 24915], [24892, 24893, 24916], [24893, 24917, 24916], [24893, 24894, 24918], [24893, 24918, 24917], [24894, 24895, 24918], [24895, 24919, 24918], [24895, 24896, 24920], [24895, 24920, 24919], [24896, 24897, 24920], [24897, 24921, 24920], [24897, 24898, 24922], [24897, 24922, 24921], [24898, 24899, 24922], [24899, 24923, 24922], [24899, 24900, 24924], [24899, 24924, 24923], [24900, 24901, 24924], [24901, 24925, 24924], [24901, 24902, 24926], [24901, 24926, 24925], [24902, 24903, 24926], [24903, 24927, 24926], [24903, 24904, 24928], [24903, 24928, 24927], [24904, 24905, 24928], [24905, 24929, 24928], [24905, 24906, 24930], [24905, 24930, 24929], [24906, 24907, 24930], [24907, 24931, 24930], [24907, 24908, 24932], [24907, 24932, 24931], [24908, 24909, 24932], [24909, 24933, 24932], [24909, 24910, 24934], [24909, 24934, 24933], [24910, 24911, 24934], [24911, 24935, 24934], [24911, 24912, 24936], [24911, 24936, 24935], [24912, 24913, 24936], [24913, 24937, 24936], [24913, 24914, 24938], [24913, 24938, 24937], [24914, 24915, 24938], [24915, 24939, 24938], [24915, 12688, 12817], [24915, 12817, 24939], [24916, 24917, 24941], [24916, 24941, 24940], [24917, 24918, 24941], [24918, 24942, 24941], [24918, 24919, 24943], [24918, 24943, 24942], [24919, 24920, 24943], [24920, 24944, 24943], [24920, 24921, 24945], [24920, 24945, 24944], [24921, 24922, 24945], [24922, 24946, 24945], [24922, 24923, 24947], [24922, 24947, 24946], [24923, 24924, 24947], [24924, 24948, 24947], [24924, 24925, 24949], [24924, 24949, 24948], [24925, 24926, 24949], [24926, 24950, 24949], [24926, 24927, 24951], [24926, 24951, 24950], [24927, 24928, 24951], [24928, 24952, 24951], [24928, 24929, 24953], [24928, 24953, 24952], [24929, 24930, 24953], [24930, 24954, 24953], [24930, 24931, 24955], [24930, 24955, 24954], [24931, 24932, 24955], [24932, 24956, 24955], [24932, 24933, 24957], [24932, 24957, 24956], [24933, 24934, 24957], [24934, 24958, 24957], [24934, 24935, 24959], [24934, 24959, 24958], [24935, 24936, 24959], [24936, 24960, 24959], [24936, 24937, 24961], [24936, 24961, 24960], [24937, 24938, 24961], [24938, 24962, 24961], [24938, 24939, 24963], [24938, 24963, 24962], [24939, 12817, 24963], [12817, 12946, 24963], [24940, 24941, 24964], [24941, 24965, 24964], [24941, 24942, 24966], [24941, 24966, 24965], [24942, 24943, 24966], [24943, 24967, 24966], [24943, 24944, 24968], [24943, 24968, 24967], [24944, 24945, 24968], [24945, 24969, 24968], [24945, 24946, 24970], [24945, 24970, 24969], [24946, 24947, 24970], [24947, 24971, 24970], [24947, 24948, 24972], [24947, 24972, 24971], [24948, 24949, 24972], [24949, 24973, 24972], [24949, 24950, 24974], [24949, 24974, 24973], [24950, 24951, 24974], [24951, 24975, 24974], [24951, 24952, 24976], [24951, 24976, 24975], [24952, 24953, 24976], [24953, 24977, 24976], [24953, 24954, 24978], [24953, 24978, 24977], [24954, 24955, 24978], [24955, 24979, 24978], [24955, 24956, 24980], [24955, 24980, 24979], [24956, 24957, 24980], [24957, 24981, 24980], [24957, 24958, 24982], [24957, 24982, 24981], [24958, 24959, 24982], [24959, 24983, 24982], [24959, 24960, 24984], [24959, 24984, 24983], [24960, 24961, 24984], [24961, 24985, 24984], [24961, 24962, 24986], [24961, 24986, 24985], [24962, 24963, 24986], [24963, 24987, 24986], [24963, 12946, 13075], [24963, 13075, 24987], [24964, 24965, 24989], [24964, 24989, 24988], [24965, 24966, 24989], [24966, 24990, 24989], [24966, 24967, 24991], [24966, 24991, 24990], [24967, 24968, 24991], [24968, 24992, 24991], [24968, 24969, 24993], [24968, 24993, 24992], [24969, 24970, 24993], [24970, 24994, 24993], [24970, 24971, 24995], [24970, 24995, 24994], [24971, 24972, 24995], [24972, 24996, 24995], [24972, 24973, 24997], [24972, 24997, 24996], [24973, 24974, 24997], [24974, 24998, 24997], [24974, 24975, 24999], [24974, 24999, 24998], [24975, 24976, 24999], [24976, 25000, 24999], [24976, 24977, 25001], [24976, 25001, 25000], [24977, 24978, 25001], [24978, 25002, 25001], [24978, 24979, 25003], [24978, 25003, 25002], [24979, 24980, 25003], [24980, 25004, 25003], [24980, 24981, 25005], [24980, 25005, 25004], [24981, 24982, 25005], [24982, 25006, 25005], [24982, 24983, 25007], [24982, 25007, 25006], [24983, 24984, 25007], [24984, 25008, 25007], [24984, 24985, 25009], [24984, 25009, 25008], [24985, 24986, 25009], [24986, 25010, 25009], [24986, 24987, 25011], [24986, 25011, 25010], [24987, 13075, 25011], [13075, 13204, 25011], [24988, 24989, 25012], [24989, 25013, 25012], [24989, 24990, 25014], [24989, 25014, 25013], [24990, 24991, 25014], [24991, 25015, 25014], [24991, 24992, 25016], [24991, 25016, 25015], [24992, 24993, 25016], [24993, 25017, 25016], [24993, 24994, 25018], [24993, 25018, 25017], [24994, 24995, 25018], [24995, 25019, 25018], [24995, 24996, 25020], [24995, 25020, 25019], [24996, 24997, 25020], [24997, 25021, 25020], [24997, 24998, 25022], [24997, 25022, 25021], [24998, 24999, 25022], [24999, 25023, 25022], [24999, 25000, 25024], [24999, 25024, 25023], [25000, 25001, 25024], [25001, 25025, 25024], [25001, 25002, 25026], [25001, 25026, 25025], [25002, 25003, 25026], [25003, 25027, 25026], [25003, 25004, 25028], [25003, 25028, 25027], [25004, 25005, 25028], [25005, 25029, 25028], [25005, 25006, 25030], [25005, 25030, 25029], [25006, 25007, 25030], [25007, 25031, 25030], [25007, 25008, 25032], [25007, 25032, 25031], [25008, 25009, 25032], [25009, 25033, 25032], [25009, 25010, 25034], [25009, 25034, 25033], [25010, 25011, 25034], [25011, 25035, 25034], [25011, 13204, 13333], [25011, 13333, 25035], [25012, 25013, 25037], [25012, 25037, 25036], [25013, 25014, 25037], [25014, 25038, 25037], [25014, 25015, 25039], [25014, 25039, 25038], [25015, 25016, 25039], [25016, 25040, 25039], [25016, 25017, 25041], [25016, 25041, 25040], [25017, 25018, 25041], [25018, 25042, 25041], [25018, 25019, 25043], [25018, 25043, 25042], [25019, 25020, 25043], [25020, 25044, 25043], [25020, 25021, 25045], [25020, 25045, 25044], [25021, 25022, 25045], [25022, 25046, 25045], [25022, 25023, 25047], [25022, 25047, 25046], [25023, 25024, 25047], [25024, 25048, 25047], [25024, 25025, 25049], [25024, 25049, 25048], [25025, 25026, 25049], [25026, 25050, 25049], [25026, 25027, 25051], [25026, 25051, 25050], [25027, 25028, 25051], [25028, 25052, 25051], [25028, 25029, 25053], [25028, 25053, 25052], [25029, 25030, 25053], [25030, 25054, 25053], [25030, 25031, 25055], [25030, 25055, 25054], [25031, 25032, 25055], [25032, 25056, 25055], [25032, 25033, 25057], [25032, 25057, 25056], [25033, 25034, 25057], [25034, 25058, 25057], [25034, 25035, 25059], [25034, 25059, 25058], [25035, 13333, 25059], [13333, 13462, 25059], [25036, 25037, 25060], [25037, 25061, 25060], [25037, 25038, 25062], [25037, 25062, 25061], [25038, 25039, 25062], [25039, 25063, 25062], [25039, 25040, 25064], [25039, 25064, 25063], [25040, 25041, 25064], [25041, 25065, 25064], [25041, 25042, 25066], [25041, 25066, 25065], [25042, 25043, 25066], [25043, 25067, 25066], [25043, 25044, 25068], [25043, 25068, 25067], [25044, 25045, 25068], [25045, 25069, 25068], [25045, 25046, 25070], [25045, 25070, 25069], [25046, 25047, 25070], [25047, 25071, 25070], [25047, 25048, 25072], [25047, 25072, 25071], [25048, 25049, 25072], [25049, 25073, 25072], [25049, 25050, 25074], [25049, 25074, 25073], [25050, 25051, 25074], [25051, 25075, 25074], [25051, 25052, 25076], [25051, 25076, 25075], [25052, 25053, 25076], [25053, 25077, 25076], [25053, 25054, 25078], [25053, 25078, 25077], [25054, 25055, 25078], [25055, 25079, 25078], [25055, 25056, 25080], [25055, 25080, 25079], [25056, 25057, 25080], [25057, 25081, 25080], [25057, 25058, 25082], [25057, 25082, 25081], [25058, 25059, 25082], [25059, 25083, 25082], [25059, 13462, 13591], [25059, 13591, 25083], [25060, 25061, 25085], [25060, 25085, 25084], [25061, 25062, 25085], [25062, 25086, 25085], [25062, 25063, 25087], [25062, 25087, 25086], [25063, 25064, 25087], [25064, 25088, 25087], [25064, 25065, 25089], [25064, 25089, 25088], [25065, 25066, 25089], [25066, 25090, 25089], [25066, 25067, 25091], [25066, 25091, 25090], [25067, 25068, 25091], [25068, 25092, 25091], [25068, 25069, 25093], [25068, 25093, 25092], [25069, 25070, 25093], [25070, 25094, 25093], [25070, 25071, 25095], [25070, 25095, 25094], [25071, 25072, 25095], [25072, 25096, 25095], [25072, 25073, 25097], [25072, 25097, 25096], [25073, 25074, 25097], [25074, 25098, 25097], [25074, 25075, 25099], [25074, 25099, 25098], [25075, 25076, 25099], [25076, 25100, 25099], [25076, 25077, 25101], [25076, 25101, 25100], [25077, 25078, 25101], [25078, 25102, 25101], [25078, 25079, 25103], [25078, 25103, 25102], [25079, 25080, 25103], [25080, 25104, 25103], [25080, 25081, 25105], [25080, 25105, 25104], [25081, 25082, 25105], [25082, 25106, 25105], [25082, 25083, 25107], [25082, 25107, 25106], [25083, 13591, 25107], [13591, 13720, 25107], [25084, 25085, 25108], [25085, 25109, 25108], [25085, 25086, 25110], [25085, 25110, 25109], [25086, 25087, 25110], [25087, 25111, 25110], [25087, 25088, 25112], [25087, 25112, 25111], [25088, 25089, 25112], [25089, 25113, 25112], [25089, 25090, 25114], [25089, 25114, 25113], [25090, 25091, 25114], [25091, 25115, 25114], [25091, 25092, 25116], [25091, 25116, 25115], [25092, 25093, 25116], [25093, 25117, 25116], [25093, 25094, 25118], [25093, 25118, 25117], [25094, 25095, 25118], [25095, 25119, 25118], [25095, 25096, 25120], [25095, 25120, 25119], [25096, 25097, 25120], [25097, 25121, 25120], [25097, 25098, 25122], [25097, 25122, 25121], [25098, 25099, 25122], [25099, 25123, 25122], [25099, 25100, 25124], [25099, 25124, 25123], [25100, 25101, 25124], [25101, 25125, 25124], [25101, 25102, 25126], [25101, 25126, 25125], [25102, 25103, 25126], [25103, 25127, 25126], [25103, 25104, 25128], [25103, 25128, 25127], [25104, 25105, 25128], [25105, 25129, 25128], [25105, 25106, 25130], [25105, 25130, 25129], [25106, 25107, 25130], [25107, 25131, 25130], [25107, 13720, 13849], [25107, 13849, 25131], [25108, 25109, 25133], [25108, 25133, 25132], [25109, 25110, 25133], [25110, 25134, 25133], [25110, 25111, 25135], [25110, 25135, 25134], [25111, 25112, 25135], [25112, 25136, 25135], [25112, 25113, 25137], [25112, 25137, 25136], [25113, 25114, 25137], [25114, 25138, 25137], [25114, 25115, 25139], [25114, 25139, 25138], [25115, 25116, 25139], [25116, 25140, 25139], [25116, 25117, 25141], [25116, 25141, 25140], [25117, 25118, 25141], [25118, 25142, 25141], [25118, 25119, 25143], [25118, 25143, 25142], [25119, 25120, 25143], [25120, 25144, 25143], [25120, 25121, 25145], [25120, 25145, 25144], [25121, 25122, 25145], [25122, 25146, 25145], [25122, 25123, 25147], [25122, 25147, 25146], [25123, 25124, 25147], [25124, 25148, 25147], [25124, 25125, 25149], [25124, 25149, 25148], [25125, 25126, 25149], [25126, 25150, 25149], [25126, 25127, 25151], [25126, 25151, 25150], [25127, 25128, 25151], [25128, 25152, 25151], [25128, 25129, 25153], [25128, 25153, 25152], [25129, 25130, 25153], [25130, 25154, 25153], [25130, 25131, 25155], [25130, 25155, 25154], [25131, 13849, 25155], [13849, 13978, 25155], [25132, 25133, 25156], [25133, 25157, 25156], [25133, 25134, 25158], [25133, 25158, 25157], [25134, 25135, 25158], [25135, 25159, 25158], [25135, 25136, 25160], [25135, 25160, 25159], [25136, 25137, 25160], [25137, 25161, 25160], [25137, 25138, 25162], [25137, 25162, 25161], [25138, 25139, 25162], [25139, 25163, 25162], [25139, 25140, 25164], [25139, 25164, 25163], [25140, 25141, 25164], [25141, 25165, 25164], [25141, 25142, 25166], [25141, 25166, 25165], [25142, 25143, 25166], [25143, 25167, 25166], [25143, 25144, 25168], [25143, 25168, 25167], [25144, 25145, 25168], [25145, 25169, 25168], [25145, 25146, 25170], [25145, 25170, 25169], [25146, 25147, 25170], [25147, 25171, 25170], [25147, 25148, 25172], [25147, 25172, 25171], [25148, 25149, 25172], [25149, 25173, 25172], [25149, 25150, 25174], [25149, 25174, 25173], [25150, 25151, 25174], [25151, 25175, 25174], [25151, 25152, 25176], [25151, 25176, 25175], [25152, 25153, 25176], [25153, 25177, 25176], [25153, 25154, 25178], [25153, 25178, 25177], [25154, 25155, 25178], [25155, 25179, 25178], [25155, 13978, 14107], [25155, 14107, 25179], [25156, 25157, 25181], [25156, 25181, 25180], [25157, 25158, 25181], [25158, 25182, 25181], [25158, 25159, 25183], [25158, 25183, 25182], [25159, 25160, 25183], [25160, 25184, 25183], [25160, 25161, 25185], [25160, 25185, 25184], [25161, 25162, 25185], [25162, 25186, 25185], [25162, 25163, 25187], [25162, 25187, 25186], [25163, 25164, 25187], [25164, 25188, 25187], [25164, 25165, 25189], [25164, 25189, 25188], [25165, 25166, 25189], [25166, 25190, 25189], [25166, 25167, 25191], [25166, 25191, 25190], [25167, 25168, 25191], [25168, 25192, 25191], [25168, 25169, 25193], [25168, 25193, 25192], [25169, 25170, 25193], [25170, 25194, 25193], [25170, 25171, 25195], [25170, 25195, 25194], [25171, 25172, 25195], [25172, 25196, 25195], [25172, 25173, 25197], [25172, 25197, 25196], [25173, 25174, 25197], [25174, 25198, 25197], [25174, 25175, 25199], [25174, 25199, 25198], [25175, 25176, 25199], [25176, 25200, 25199], [25176, 25177, 25201], [25176, 25201, 25200], [25177, 25178, 25201], [25178, 25202, 25201], [25178, 25179, 25203], [25178, 25203, 25202], [25179, 14107, 25203], [14107, 14236, 25203], [25180, 25181, 25204], [25181, 25205, 25204], [25181, 25182, 25206], [25181, 25206, 25205], [25182, 25183, 25206], [25183, 25207, 25206], [25183, 25184, 25208], [25183, 25208, 25207], [25184, 25185, 25208], [25185, 25209, 25208], [25185, 25186, 25210], [25185, 25210, 25209], [25186, 25187, 25210], [25187, 25211, 25210], [25187, 25188, 25212], [25187, 25212, 25211], [25188, 25189, 25212], [25189, 25213, 25212], [25189, 25190, 25214], [25189, 25214, 25213], [25190, 25191, 25214], [25191, 25215, 25214], [25191, 25192, 25216], [25191, 25216, 25215], [25192, 25193, 25216], [25193, 25217, 25216], [25193, 25194, 25218], [25193, 25218, 25217], [25194, 25195, 25218], [25195, 25219, 25218], [25195, 25196, 25220], [25195, 25220, 25219], [25196, 25197, 25220], [25197, 25221, 25220], [25197, 25198, 25222], [25197, 25222, 25221], [25198, 25199, 25222], [25199, 25223, 25222], [25199, 25200, 25224], [25199, 25224, 25223], [25200, 25201, 25224], [25201, 25225, 25224], [25201, 25202, 25226], [25201, 25226, 25225], [25202, 25203, 25226], [25203, 25227, 25226], [25203, 14236, 14365], [25203, 14365, 25227], [25204, 25205, 25229], [25204, 25229, 25228], [25205, 25206, 25229], [25206, 25230, 25229], [25206, 25207, 25231], [25206, 25231, 25230], [25207, 25208, 25231], [25208, 25232, 25231], [25208, 25209, 25233], [25208, 25233, 25232], [25209, 25210, 25233], [25210, 25234, 25233], [25210, 25211, 25235], [25210, 25235, 25234], [25211, 25212, 25235], [25212, 25236, 25235], [25212, 25213, 25237], [25212, 25237, 25236], [25213, 25214, 25237], [25214, 25238, 25237], [25214, 25215, 25239], [25214, 25239, 25238], [25215, 25216, 25239], [25216, 25240, 25239], [25216, 25217, 25241], [25216, 25241, 25240], [25217, 25218, 25241], [25218, 25242, 25241], [25218, 25219, 25243], [25218, 25243, 25242], [25219, 25220, 25243], [25220, 25244, 25243], [25220, 25221, 25245], [25220, 25245, 25244], [25221, 25222, 25245], [25222, 25246, 25245], [25222, 25223, 25247], [25222, 25247, 25246], [25223, 25224, 25247], [25224, 25248, 25247], [25224, 25225, 25249], [25224, 25249, 25248], [25225, 25226, 25249], [25226, 25250, 25249], [25226, 25227, 25251], [25226, 25251, 25250], [25227, 14365, 25251], [14365, 14494, 25251], [25228, 25229, 25252], [25229, 25253, 25252], [25229, 25230, 25254], [25229, 25254, 25253], [25230, 25231, 25254], [25231, 25255, 25254], [25231, 25232, 25256], [25231, 25256, 25255], [25232, 25233, 25256], [25233, 25257, 25256], [25233, 25234, 25258], [25233, 25258, 25257], [25234, 25235, 25258], [25235, 25259, 25258], [25235, 25236, 25260], [25235, 25260, 25259], [25236, 25237, 25260], [25237, 25261, 25260], [25237, 25238, 25262], [25237, 25262, 25261], [25238, 25239, 25262], [25239, 25263, 25262], [25239, 25240, 25264], [25239, 25264, 25263], [25240, 25241, 25264], [25241, 25265, 25264], [25241, 25242, 25266], [25241, 25266, 25265], [25242, 25243, 25266], [25243, 25267, 25266], [25243, 25244, 25268], [25243, 25268, 25267], [25244, 25245, 25268], [25245, 25269, 25268], [25245, 25246, 25270], [25245, 25270, 25269], [25246, 25247, 25270], [25247, 25271, 25270], [25247, 25248, 25272], [25247, 25272, 25271], [25248, 25249, 25272], [25249, 25273, 25272], [25249, 25250, 25274], [25249, 25274, 25273], [25250, 25251, 25274], [25251, 25275, 25274], [25251, 14494, 14623], [25251, 14623, 25275], [25252, 25253, 25277], [25252, 25277, 25276], [25253, 25254, 25277], [25254, 25278, 25277], [25254, 25255, 25279], [25254, 25279, 25278], [25255, 25256, 25279], [25256, 25280, 25279], [25256, 25257, 25281], [25256, 25281, 25280], [25257, 25258, 25281], [25258, 25282, 25281], [25258, 25259, 25283], [25258, 25283, 25282], [25259, 25260, 25283], [25260, 25284, 25283], [25260, 25261, 25285], [25260, 25285, 25284], [25261, 25262, 25285], [25262, 25286, 25285], [25262, 25263, 25287], [25262, 25287, 25286], [25263, 25264, 25287], [25264, 25288, 25287], [25264, 25265, 25289], [25264, 25289, 25288], [25265, 25266, 25289], [25266, 25290, 25289], [25266, 25267, 25291], [25266, 25291, 25290], [25267, 25268, 25291], [25268, 25292, 25291], [25268, 25269, 25293], [25268, 25293, 25292], [25269, 25270, 25293], [25270, 25294, 25293], [25270, 25271, 25295], [25270, 25295, 25294], [25271, 25272, 25295], [25272, 25296, 25295], [25272, 25273, 25297], [25272, 25297, 25296], [25273, 25274, 25297], [25274, 25298, 25297], [25274, 25275, 25299], [25274, 25299, 25298], [25275, 14623, 25299], [14623, 14752, 25299], [25276, 25277, 25300], [25277, 25301, 25300], [25277, 25278, 25302], [25277, 25302, 25301], [25278, 25279, 25302], [25279, 25303, 25302], [25279, 25280, 25304], [25279, 25304, 25303], [25280, 25281, 25304], [25281, 25305, 25304], [25281, 25282, 25306], [25281, 25306, 25305], [25282, 25283, 25306], [25283, 25307, 25306], [25283, 25284, 25308], [25283, 25308, 25307], [25284, 25285, 25308], [25285, 25309, 25308], [25285, 25286, 25310], [25285, 25310, 25309], [25286, 25287, 25310], [25287, 25311, 25310], [25287, 25288, 25312], [25287, 25312, 25311], [25288, 25289, 25312], [25289, 25313, 25312], [25289, 25290, 25314], [25289, 25314, 25313], [25290, 25291, 25314], [25291, 25315, 25314], [25291, 25292, 25316], [25291, 25316, 25315], [25292, 25293, 25316], [25293, 25317, 25316], [25293, 25294, 25318], [25293, 25318, 25317], [25294, 25295, 25318], [25295, 25319, 25318], [25295, 25296, 25320], [25295, 25320, 25319], [25296, 25297, 25320], [25297, 25321, 25320], [25297, 25298, 25322], [25297, 25322, 25321], [25298, 25299, 25322], [25299, 25323, 25322], [25299, 14752, 14881], [25299, 14881, 25323], [25300, 25301, 25325], [25300, 25325, 25324], [25301, 25302, 25325], [25302, 25326, 25325], [25302, 25303, 25327], [25302, 25327, 25326], [25303, 25304, 25327], [25304, 25328, 25327], [25304, 25305, 25329], [25304, 25329, 25328], [25305, 25306, 25329], [25306, 25330, 25329], [25306, 25307, 25331], [25306, 25331, 25330], [25307, 25308, 25331], [25308, 25332, 25331], [25308, 25309, 25333], [25308, 25333, 25332], [25309, 25310, 25333], [25310, 25334, 25333], [25310, 25311, 25335], [25310, 25335, 25334], [25311, 25312, 25335], [25312, 25336, 25335], [25312, 25313, 25337], [25312, 25337, 25336], [25313, 25314, 25337], [25314, 25338, 25337], [25314, 25315, 25339], [25314, 25339, 25338], [25315, 25316, 25339], [25316, 25340, 25339], [25316, 25317, 25341], [25316, 25341, 25340], [25317, 25318, 25341], [25318, 25342, 25341], [25318, 25319, 25343], [25318, 25343, 25342], [25319, 25320, 25343], [25320, 25344, 25343], [25320, 25321, 25345], [25320, 25345, 25344], [25321, 25322, 25345], [25322, 25346, 25345], [25322, 25323, 25347], [25322, 25347, 25346], [25323, 14881, 25347], [14881, 15010, 25347], [25324, 25325, 25348], [25325, 25349, 25348], [25325, 25326, 25350], [25325, 25350, 25349], [25326, 25327, 25350], [25327, 25351, 25350], [25327, 25328, 25352], [25327, 25352, 25351], [25328, 25329, 25352], [25329, 25353, 25352], [25329, 25330, 25354], [25329, 25354, 25353], [25330, 25331, 25354], [25331, 25355, 25354], [25331, 25332, 25356], [25331, 25356, 25355], [25332, 25333, 25356], [25333, 25357, 25356], [25333, 25334, 25358], [25333, 25358, 25357], [25334, 25335, 25358], [25335, 25359, 25358], [25335, 25336, 25360], [25335, 25360, 25359], [25336, 25337, 25360], [25337, 25361, 25360], [25337, 25338, 25362], [25337, 25362, 25361], [25338, 25339, 25362], [25339, 25363, 25362], [25339, 25340, 25364], [25339, 25364, 25363], [25340, 25341, 25364], [25341, 25365, 25364], [25341, 25342, 25366], [25341, 25366, 25365], [25342, 25343, 25366], [25343, 25367, 25366], [25343, 25344, 25368], [25343, 25368, 25367], [25344, 25345, 25368], [25345, 25369, 25368], [25345, 25346, 25370], [25345, 25370, 25369], [25346, 25347, 25370], [25347, 25371, 25370], [25347, 15010, 15139], [25347, 15139, 25371], [25348, 25349, 25373], [25348, 25373, 25372], [25349, 25350, 25373], [25350, 25374, 25373], [25350, 25351, 25375], [25350, 25375, 25374], [25351, 25352, 25375], [25352, 25376, 25375], [25352, 25353, 25377], [25352, 25377, 25376], [25353, 25354, 25377], [25354, 25378, 25377], [25354, 25355, 25379], [25354, 25379, 25378], [25355, 25356, 25379], [25356, 25380, 25379], [25356, 25357, 25381], [25356, 25381, 25380], [25357, 25358, 25381], [25358, 25382, 25381], [25358, 25359, 25383], [25358, 25383, 25382], [25359, 25360, 25383], [25360, 25384, 25383], [25360, 25361, 25385], [25360, 25385, 25384], [25361, 25362, 25385], [25362, 25386, 25385], [25362, 25363, 25387], [25362, 25387, 25386], [25363, 25364, 25387], [25364, 25388, 25387], [25364, 25365, 25389], [25364, 25389, 25388], [25365, 25366, 25389], [25366, 25390, 25389], [25366, 25367, 25391], [25366, 25391, 25390], [25367, 25368, 25391], [25368, 25392, 25391], [25368, 25369, 25393], [25368, 25393, 25392], [25369, 25370, 25393], [25370, 25394, 25393], [25370, 25371, 25395], [25370, 25395, 25394], [25371, 15139, 25395], [15139, 15268, 25395], [25372, 25373, 25396], [25373, 25397, 25396], [25373, 25374, 25398], [25373, 25398, 25397], [25374, 25375, 25398], [25375, 25399, 25398], [25375, 25376, 25400], [25375, 25400, 25399], [25376, 25377, 25400], [25377, 25401, 25400], [25377, 25378, 25402], [25377, 25402, 25401], [25378, 25379, 25402], [25379, 25403, 25402], [25379, 25380, 25404], [25379, 25404, 25403], [25380, 25381, 25404], [25381, 25405, 25404], [25381, 25382, 25406], [25381, 25406, 25405], [25382, 25383, 25406], [25383, 25407, 25406], [25383, 25384, 25408], [25383, 25408, 25407], [25384, 25385, 25408], [25385, 25409, 25408], [25385, 25386, 25410], [25385, 25410, 25409], [25386, 25387, 25410], [25387, 25411, 25410], [25387, 25388, 25412], [25387, 25412, 25411], [25388, 25389, 25412], [25389, 25413, 25412], [25389, 25390, 25414], [25389, 25414, 25413], [25390, 25391, 25414], [25391, 25415, 25414], [25391, 25392, 25416], [25391, 25416, 25415], [25392, 25393, 25416], [25393, 25417, 25416], [25393, 25394, 25418], [25393, 25418, 25417], [25394, 25395, 25418], [25395, 25419, 25418], [25395, 15268, 15397], [25395, 15397, 25419], [25396, 25397, 25421], [25396, 25421, 25420], [25397, 25398, 25421], [25398, 25422, 25421], [25398, 25399, 25423], [25398, 25423, 25422], [25399, 25400, 25423], [25400, 25424, 25423], [25400, 25401, 25425], [25400, 25425, 25424], [25401, 25402, 25425], [25402, 25426, 25425], [25402, 25403, 25427], [25402, 25427, 25426], [25403, 25404, 25427], [25404, 25428, 25427], [25404, 25405, 25429], [25404, 25429, 25428], [25405, 25406, 25429], [25406, 25430, 25429], [25406, 25407, 25431], [25406, 25431, 25430], [25407, 25408, 25431], [25408, 25432, 25431], [25408, 25409, 25433], [25408, 25433, 25432], [25409, 25410, 25433], [25410, 25434, 25433], [25410, 25411, 25435], [25410, 25435, 25434], [25411, 25412, 25435], [25412, 25436, 25435], [25412, 25413, 25437], [25412, 25437, 25436], [25413, 25414, 25437], [25414, 25438, 25437], [25414, 25415, 25439], [25414, 25439, 25438], [25415, 25416, 25439], [25416, 25440, 25439], [25416, 25417, 25441], [25416, 25441, 25440], [25417, 25418, 25441], [25418, 25442, 25441], [25418, 25419, 25443], [25418, 25443, 25442], [25419, 15397, 25443], [15397, 15526, 25443], [25420, 25421, 25444], [25421, 25445, 25444], [25421, 25422, 25446], [25421, 25446, 25445], [25422, 25423, 25446], [25423, 25447, 25446], [25423, 25424, 25448], [25423, 25448, 25447], [25424, 25425, 25448], [25425, 25449, 25448], [25425, 25426, 25450], [25425, 25450, 25449], [25426, 25427, 25450], [25427, 25451, 25450], [25427, 25428, 25452], [25427, 25452, 25451], [25428, 25429, 25452], [25429, 25453, 25452], [25429, 25430, 25454], [25429, 25454, 25453], [25430, 25431, 25454], [25431, 25455, 25454], [25431, 25432, 25456], [25431, 25456, 25455], [25432, 25433, 25456], [25433, 25457, 25456], [25433, 25434, 25458], [25433, 25458, 25457], [25434, 25435, 25458], [25435, 25459, 25458], [25435, 25436, 25460], [25435, 25460, 25459], [25436, 25437, 25460], [25437, 25461, 25460], [25437, 25438, 25462], [25437, 25462, 25461], [25438, 25439, 25462], [25439, 25463, 25462], [25439, 25440, 25464], [25439, 25464, 25463], [25440, 25441, 25464], [25441, 25465, 25464], [25441, 25442, 25466], [25441, 25466, 25465], [25442, 25443, 25466], [25443, 25467, 25466], [25443, 15526, 15655], [25443, 15655, 25467], [25444, 25445, 25469], [25444, 25469, 25468], [25445, 25446, 25469], [25446, 25470, 25469], [25446, 25447, 25471], [25446, 25471, 25470], [25447, 25448, 25471], [25448, 25472, 25471], [25448, 25449, 25473], [25448, 25473, 25472], [25449, 25450, 25473], [25450, 25474, 25473], [25450, 25451, 25475], [25450, 25475, 25474], [25451, 25452, 25475], [25452, 25476, 25475], [25452, 25453, 25477], [25452, 25477, 25476], [25453, 25454, 25477], [25454, 25478, 25477], [25454, 25455, 25479], [25454, 25479, 25478], [25455, 25456, 25479], [25456, 25480, 25479], [25456, 25457, 25481], [25456, 25481, 25480], [25457, 25458, 25481], [25458, 25482, 25481], [25458, 25459, 25483], [25458, 25483, 25482], [25459, 25460, 25483], [25460, 25484, 25483], [25460, 25461, 25485], [25460, 25485, 25484], [25461, 25462, 25485], [25462, 25486, 25485], [25462, 25463, 25487], [25462, 25487, 25486], [25463, 25464, 25487], [25464, 25488, 25487], [25464, 25465, 25489], [25464, 25489, 25488], [25465, 25466, 25489], [25466, 25490, 25489], [25466, 25467, 25491], [25466, 25491, 25490], [25467, 15655, 25491], [15655, 15784, 25491], [25468, 25469, 25492], [25469, 25493, 25492], [25469, 25470, 25494], [25469, 25494, 25493], [25470, 25471, 25494], [25471, 25495, 25494], [25471, 25472, 25496], [25471, 25496, 25495], [25472, 25473, 25496], [25473, 25497, 25496], [25473, 25474, 25498], [25473, 25498, 25497], [25474, 25475, 25498], [25475, 25499, 25498], [25475, 25476, 25500], [25475, 25500, 25499], [25476, 25477, 25500], [25477, 25501, 25500], [25477, 25478, 25502], [25477, 25502, 25501], [25478, 25479, 25502], [25479, 25503, 25502], [25479, 25480, 25504], [25479, 25504, 25503], [25480, 25481, 25504], [25481, 25505, 25504], [25481, 25482, 25506], [25481, 25506, 25505], [25482, 25483, 25506], [25483, 25507, 25506], [25483, 25484, 25508], [25483, 25508, 25507], [25484, 25485, 25508], [25485, 25509, 25508], [25485, 25486, 25510], [25485, 25510, 25509], [25486, 25487, 25510], [25487, 25511, 25510], [25487, 25488, 25512], [25487, 25512, 25511], [25488, 25489, 25512], [25489, 25513, 25512], [25489, 25490, 25514], [25489, 25514, 25513], [25490, 25491, 25514], [25491, 25515, 25514], [25491, 15784, 15913], [25491, 15913, 25515], [25492, 25493, 25517], [25492, 25517, 25516], [25493, 25494, 25517], [25494, 25518, 25517], [25494, 25495, 25519], [25494, 25519, 25518], [25495, 25496, 25519], [25496, 25520, 25519], [25496, 25497, 25521], [25496, 25521, 25520], [25497, 25498, 25521], [25498, 25522, 25521], [25498, 25499, 25523], [25498, 25523, 25522], [25499, 25500, 25523], [25500, 25524, 25523], [25500, 25501, 25525], [25500, 25525, 25524], [25501, 25502, 25525], [25502, 25526, 25525], [25502, 25503, 25527], [25502, 25527, 25526], [25503, 25504, 25527], [25504, 25528, 25527], [25504, 25505, 25529], [25504, 25529, 25528], [25505, 25506, 25529], [25506, 25530, 25529], [25506, 25507, 25531], [25506, 25531, 25530], [25507, 25508, 25531], [25508, 25532, 25531], [25508, 25509, 25533], [25508, 25533, 25532], [25509, 25510, 25533], [25510, 25534, 25533], [25510, 25511, 25535], [25510, 25535, 25534], [25511, 25512, 25535], [25512, 25536, 25535], [25512, 25513, 25537], [25512, 25537, 25536], [25513, 25514, 25537], [25514, 25538, 25537], [25514, 25515, 25539], [25514, 25539, 25538], [25515, 15913, 25539], [15913, 16042, 25539], [25516, 25517, 22363], [25517, 22234, 22363], [25517, 25518, 22105], [25517, 22105, 22234], [25518, 25519, 22105], [25519, 21976, 22105], [25519, 25520, 21847], [25519, 21847, 21976], [25520, 25521, 21847], [25521, 21718, 21847], [25521, 25522, 21589], [25521, 21589, 21718], [25522, 25523, 21589], [25523, 21460, 21589], [25523, 25524, 21331], [25523, 21331, 21460], [25524, 25525, 21331], [25525, 21202, 21331], [25525, 25526, 21073], [25525, 21073, 21202], [25526, 25527, 21073], [25527, 20944, 21073], [25527, 25528, 20815], [25527, 20815, 20944], [25528, 25529, 20815], [25529, 20686, 20815], [25529, 25530, 20557], [25529, 20557, 20686], [25530, 25531, 20557], [25531, 20428, 20557], [25531, 25532, 20299], [25531, 20299, 20428], [25532, 25533, 20299], [25533, 20170, 20299], [25533, 25534, 20041], [25533, 20041, 20170], [25534, 25535, 20041], [25535, 19912, 20041], [25535, 25536, 19783], [25535, 19783, 19912], [25536, 25537, 19783], [25537, 19654, 19783], [25537, 25538, 19525], [25537, 19525, 19654], [25538, 25539, 19525], [25539, 19396, 19525], [25539, 16042, 19267], [25539, 19267, 19396], [128, 19266, 25540], [128, 25540, 257], [19266, 19137, 25540], [19137, 25541, 25540], [19137, 19008, 25542], [19137, 25542, 25541], [19008, 18879, 25542], [18879, 25543, 25542], [18879, 18750, 25544], [18879, 25544, 25543], [18750, 18621, 25544], [18621, 25545, 25544], [18621, 18492, 25546], [18621, 25546, 25545], [18492, 18363, 25546], [18363, 25547, 25546], [18363, 18234, 25548], [18363, 25548, 25547], [18234, 18105, 25548], [18105, 25549, 25548], [18105, 17976, 25550], [18105, 25550, 25549], [17976, 17847, 25550], [17847, 25551, 25550], [17847, 17718, 25552], [17847, 25552, 25551], [17718, 17589, 25552], [17589, 25553, 25552], [17589, 17460, 25554], [17589, 25554, 25553], [17460, 17331, 25554], [17331, 25555, 25554], [17331, 17202, 25556], [17331, 25556, 25555], [17202, 17073, 25556], [17073, 25557, 25556], [17073, 16944, 25558], [17073, 25558, 25557], [16944, 16815, 25558], [16815, 25559, 25558], [16815, 16686, 25560], [16815, 25560, 25559], [16686, 16557, 25560], [16557, 25561, 25560], [16557, 16428, 25562], [16557, 25562, 25561], [16428, 16299, 25562], [16299, 25563, 25562], [257, 25540, 386], [25540, 25564, 386], [25540, 25541, 25565], [25540, 25565, 25564], [25541, 25542, 25565], [25542, 25566, 25565], [25542, 25543, 25567], [25542, 25567, 25566], [25543, 25544, 25567], [25544, 25568, 25567], [25544, 25545, 25569], [25544, 25569, 25568], [25545, 25546, 25569], [25546, 25570, 25569], [25546, 25547, 25571], [25546, 25571, 25570], [25547, 25548, 25571], [25548, 25572, 25571], [25548, 25549, 25573], [25548, 25573, 25572], [25549, 25550, 25573], [25550, 25574, 25573], [25550, 25551, 25575], [25550, 25575, 25574], [25551, 25552, 25575], [25552, 25576, 25575], [25552, 25553, 25577], [25552, 25577, 25576], [25553, 25554, 25577], [25554, 25578, 25577], [25554, 25555, 25579], [25554, 25579, 25578], [25555, 25556, 25579], [25556, 25580, 25579], [25556, 25557, 25581], [25556, 25581, 25580], [25557, 25558, 25581], [25558, 25582, 25581], [25558, 25559, 25583], [25558, 25583, 25582], [25559, 25560, 25583], [25560, 25584, 25583], [25560, 25561, 25585], [25560, 25585, 25584], [25561, 25562, 25585], [25562, 25586, 25585], [25562, 25563, 25587], [25562, 25587, 25586], [386, 25564, 25588], [386, 25588, 515], [25564, 25565, 25588], [25565, 25589, 25588], [25565, 25566, 25590], [25565, 25590, 25589], [25566, 25567, 25590], [25567, 25591, 25590], [25567, 25568, 25592], [25567, 25592, 25591], [25568, 25569, 25592], [25569, 25593, 25592], [25569, 25570, 25594], [25569, 25594, 25593], [25570, 25571, 25594], [25571, 25595, 25594], [25571, 25572, 25596], [25571, 25596, 25595], [25572, 25573, 25596], [25573, 25597, 25596], [25573, 25574, 25598], [25573, 25598, 25597], [25574, 25575, 25598], [25575, 25599, 25598], [25575, 25576, 25600], [25575, 25600, 25599], [25576, 25577, 25600], [25577, 25601, 25600], [25577, 25578, 25602], [25577, 25602, 25601], [25578, 25579, 25602], [25579, 25603, 25602], [25579, 25580, 25604], [25579, 25604, 25603], [25580, 25581, 25604], [25581, 25605, 25604], [25581, 25582, 25606], [25581, 25606, 25605], [25582, 25583, 25606], [25583, 25607, 25606], [25583, 25584, 25608], [25583, 25608, 25607], [25584, 25585, 25608], [25585, 25609, 25608], [25585, 25586, 25610], [25585, 25610, 25609], [25586, 25587, 25610], [25587, 25611, 25610], [515, 25588, 644], [25588, 25612, 644], [25588, 25589, 25613], [25588, 25613, 25612], [25589, 25590, 25613], [25590, 25614, 25613], [25590, 25591, 25615], [25590, 25615, 25614], [25591, 25592, 25615], [25592, 25616, 25615], [25592, 25593, 25617], [25592, 25617, 25616], [25593, 25594, 25617], [25594, 25618, 25617], [25594, 25595, 25619], [25594, 25619, 25618], [25595, 25596, 25619], [25596, 25620, 25619], [25596, 25597, 25621], [25596, 25621, 25620], [25597, 25598, 25621], [25598, 25622, 25621], [25598, 25599, 25623], [25598, 25623, 25622], [25599, 25600, 25623], [25600, 25624, 25623], [25600, 25601, 25625], [25600, 25625, 25624], [25601, 25602, 25625], [25602, 25626, 25625], [25602, 25603, 25627], [25602, 25627, 25626], [25603, 25604, 25627], [25604, 25628, 25627], [25604, 25605, 25629], [25604, 25629, 25628], [25605, 25606, 25629], [25606, 25630, 25629], [25606, 25607, 25631], [25606, 25631, 25630], [25607, 25608, 25631], [25608, 25632, 25631], [25608, 25609, 25633], [25608, 25633, 25632], [25609, 25610, 25633], [25610, 25634, 25633], [25610, 25611, 25635], [25610, 25635, 25634], [644, 25612, 25636], [644, 25636, 773], [25612, 25613, 25636], [25613, 25637, 25636], [25613, 25614, 25638], [25613, 25638, 25637], [25614, 25615, 25638], [25615, 25639, 25638], [25615, 25616, 25640], [25615, 25640, 25639], [25616, 25617, 25640], [25617, 25641, 25640], [25617, 25618, 25642], [25617, 25642, 25641], [25618, 25619, 25642], [25619, 25643, 25642], [25619, 25620, 25644], [25619, 25644, 25643], [25620, 25621, 25644], [25621, 25645, 25644], [25621, 25622, 25646], [25621, 25646, 25645], [25622, 25623, 25646], [25623, 25647, 25646], [25623, 25624, 25648], [25623, 25648, 25647], [25624, 25625, 25648], [25625, 25649, 25648], [25625, 25626, 25650], [25625, 25650, 25649], [25626, 25627, 25650], [25627, 25651, 25650], [25627, 25628, 25652], [25627, 25652, 25651], [25628, 25629, 25652], [25629, 25653, 25652], [25629, 25630, 25654], [25629, 25654, 25653], [25630, 25631, 25654], [25631, 25655, 25654], [25631, 25632, 25656], [25631, 25656, 25655], [25632, 25633, 25656], [25633, 25657, 25656], [25633, 25634, 25658], [25633, 25658, 25657], [25634, 25635, 25658], [25635, 25659, 25658], [773, 25636, 902], [25636, 25660, 902], [25636, 25637, 25661], [25636, 25661, 25660], [25637, 25638, 25661], [25638, 25662, 25661], [25638, 25639, 25663], [25638, 25663, 25662], [25639, 25640, 25663], [25640, 25664, 25663], [25640, 25641, 25665], [25640, 25665, 25664], [25641, 25642, 25665], [25642, 25666, 25665], [25642, 25643, 25667], [25642, 25667, 25666], [25643, 25644, 25667], [25644, 25668, 25667], [25644, 25645, 25669], [25644, 25669, 25668], [25645, 25646, 25669], [25646, 25670, 25669], [25646, 25647, 25671], [25646, 25671, 25670], [25647, 25648, 25671], [25648, 25672, 25671], [25648, 25649, 25673], [25648, 25673, 25672], [25649, 25650, 25673], [25650, 25674, 25673], [25650, 25651, 25675], [25650, 25675, 25674], [25651, 25652, 25675], [25652, 25676, 25675], [25652, 25653, 25677], [25652, 25677, 25676], [25653, 25654, 25677], [25654, 25678, 25677], [25654, 25655, 25679], [25654, 25679, 25678], [25655, 25656, 25679], [25656, 25680, 25679], [25656, 25657, 25681], [25656, 25681, 25680], [25657, 25658, 25681], [25658, 25682, 25681], [25658, 25659, 25683], [25658, 25683, 25682], [902, 25660, 25684], [902, 25684, 1031], [25660, 25661, 25684], [25661, 25685, 25684], [25661, 25662, 25686], [25661, 25686, 25685], [25662, 25663, 25686], [25663, 25687, 25686], [25663, 25664, 25688], [25663, 25688, 25687], [25664, 25665, 25688], [25665, 25689, 25688], [25665, 25666, 25690], [25665, 25690, 25689], [25666, 25667, 25690], [25667, 25691, 25690], [25667, 25668, 25692], [25667, 25692, 25691], [25668, 25669, 25692], [25669, 25693, 25692], [25669, 25670, 25694], [25669, 25694, 25693], [25670, 25671, 25694], [25671, 25695, 25694], [25671, 25672, 25696], [25671, 25696, 25695], [25672, 25673, 25696], [25673, 25697, 25696], [25673, 25674, 25698], [25673, 25698, 25697], [25674, 25675, 25698], [25675, 25699, 25698], [25675, 25676, 25700], [25675, 25700, 25699], [25676, 25677, 25700], [25677, 25701, 25700], [25677, 25678, 25702], [25677, 25702, 25701], [25678, 25679, 25702], [25679, 25703, 25702], [25679, 25680, 25704], [25679, 25704, 25703], [25680, 25681, 25704], [25681, 25705, 25704], [25681, 25682, 25706], [25681, 25706, 25705], [25682, 25683, 25706], [25683, 25707, 25706], [1031, 25684, 1160], [25684, 25708, 1160], [25684, 25685, 25709], [25684, 25709, 25708], [25685, 25686, 25709], [25686, 25710, 25709], [25686, 25687, 25711], [25686, 25711, 25710], [25687, 25688, 25711], [25688, 25712, 25711], [25688, 25689, 25713], [25688, 25713, 25712], [25689, 25690, 25713], [25690, 25714, 25713], [25690, 25691, 25715], [25690, 25715, 25714], [25691, 25692, 25715], [25692, 25716, 25715], [25692, 25693, 25717], [25692, 25717, 25716], [25693, 25694, 25717], [25694, 25718, 25717], [25694, 25695, 25719], [25694, 25719, 25718], [25695, 25696, 25719], [25696, 25720, 25719], [25696, 25697, 25721], [25696, 25721, 25720], [25697, 25698, 25721], [25698, 25722, 25721], [25698, 25699, 25723], [25698, 25723, 25722], [25699, 25700, 25723], [25700, 25724, 25723], [25700, 25701, 25725], [25700, 25725, 25724], [25701, 25702, 25725], [25702, 25726, 25725], [25702, 25703, 25727], [25702, 25727, 25726], [25703, 25704, 25727], [25704, 25728, 25727], [25704, 25705, 25729], [25704, 25729, 25728], [25705, 25706, 25729], [25706, 25730, 25729], [25706, 25707, 25731], [25706, 25731, 25730], [1160, 25708, 25732], [1160, 25732, 1289], [25708, 25709, 25732], [25709, 25733, 25732], [25709, 25710, 25734], [25709, 25734, 25733], [25710, 25711, 25734], [25711, 25735, 25734], [25711, 25712, 25736], [25711, 25736, 25735], [25712, 25713, 25736], [25713, 25737, 25736], [25713, 25714, 25738], [25713, 25738, 25737], [25714, 25715, 25738], [25715, 25739, 25738], [25715, 25716, 25740], [25715, 25740, 25739], [25716, 25717, 25740], [25717, 25741, 25740], [25717, 25718, 25742], [25717, 25742, 25741], [25718, 25719, 25742], [25719, 25743, 25742], [25719, 25720, 25744], [25719, 25744, 25743], [25720, 25721, 25744], [25721, 25745, 25744], [25721, 25722, 25746], [25721, 25746, 25745], [25722, 25723, 25746], [25723, 25747, 25746], [25723, 25724, 25748], [25723, 25748, 25747], [25724, 25725, 25748], [25725, 25749, 25748], [25725, 25726, 25750], [25725, 25750, 25749], [25726, 25727, 25750], [25727, 25751, 25750], [25727, 25728, 25752], [25727, 25752, 25751], [25728, 25729, 25752], [25729, 25753, 25752], [25729, 25730, 25754], [25729, 25754, 25753], [25730, 25731, 25754], [25731, 25755, 25754], [1289, 25732, 1418], [25732, 25756, 1418], [25732, 25733, 25757], [25732, 25757, 25756], [25733, 25734, 25757], [25734, 25758, 25757], [25734, 25735, 25759], [25734, 25759, 25758], [25735, 25736, 25759], [25736, 25760, 25759], [25736, 25737, 25761], [25736, 25761, 25760], [25737, 25738, 25761], [25738, 25762, 25761], [25738, 25739, 25763], [25738, 25763, 25762], [25739, 25740, 25763], [25740, 25764, 25763], [25740, 25741, 25765], [25740, 25765, 25764], [25741, 25742, 25765], [25742, 25766, 25765], [25742, 25743, 25767], [25742, 25767, 25766], [25743, 25744, 25767], [25744, 25768, 25767], [25744, 25745, 25769], [25744, 25769, 25768], [25745, 25746, 25769], [25746, 25770, 25769], [25746, 25747, 25771], [25746, 25771, 25770], [25747, 25748, 25771], [25748, 25772, 25771], [25748, 25749, 25773], [25748, 25773, 25772], [25749, 25750, 25773], [25750, 25774, 25773], [25750, 25751, 25775], [25750, 25775, 25774], [25751, 25752, 25775], [25752, 25776, 25775], [25752, 25753, 25777], [25752, 25777, 25776], [25753, 25754, 25777], [25754, 25778, 25777], [25754, 25755, 25779], [25754, 25779, 25778], [1418, 25756, 25780], [1418, 25780, 1547], [25756, 25757, 25780], [25757, 25781, 25780], [25757, 25758, 25782], [25757, 25782, 25781], [25758, 25759, 25782], [25759, 25783, 25782], [25759, 25760, 25784], [25759, 25784, 25783], [25760, 25761, 25784], [25761, 25785, 25784], [25761, 25762, 25786], [25761, 25786, 25785], [25762, 25763, 25786], [25763, 25787, 25786], [25763, 25764, 25788], [25763, 25788, 25787], [25764, 25765, 25788], [25765, 25789, 25788], [25765, 25766, 25790], [25765, 25790, 25789], [25766, 25767, 25790], [25767, 25791, 25790], [25767, 25768, 25792], [25767, 25792, 25791], [25768, 25769, 25792], [25769, 25793, 25792], [25769, 25770, 25794], [25769, 25794, 25793], [25770, 25771, 25794], [25771, 25795, 25794], [25771, 25772, 25796], [25771, 25796, 25795], [25772, 25773, 25796], [25773, 25797, 25796], [25773, 25774, 25798], [25773, 25798, 25797], [25774, 25775, 25798], [25775, 25799, 25798], [25775, 25776, 25800], [25775, 25800, 25799], [25776, 25777, 25800], [25777, 25801, 25800], [25777, 25778, 25802], [25777, 25802, 25801], [25778, 25779, 25802], [25779, 25803, 25802], [1547, 25780, 1676], [25780, 25804, 1676], [25780, 25781, 25805], [25780, 25805, 25804], [25781, 25782, 25805], [25782, 25806, 25805], [25782, 25783, 25807], [25782, 25807, 25806], [25783, 25784, 25807], [25784, 25808, 25807], [25784, 25785, 25809], [25784, 25809, 25808], [25785, 25786, 25809], [25786, 25810, 25809], [25786, 25787, 25811], [25786, 25811, 25810], [25787, 25788, 25811], [25788, 25812, 25811], [25788, 25789, 25813], [25788, 25813, 25812], [25789, 25790, 25813], [25790, 25814, 25813], [25790, 25791, 25815], [25790, 25815, 25814], [25791, 25792, 25815], [25792, 25816, 25815], [25792, 25793, 25817], [25792, 25817, 25816], [25793, 25794, 25817], [25794, 25818, 25817], [25794, 25795, 25819], [25794, 25819, 25818], [25795, 25796, 25819], [25796, 25820, 25819], [25796, 25797, 25821], [25796, 25821, 25820], [25797, 25798, 25821], [25798, 25822, 25821], [25798, 25799, 25823], [25798, 25823, 25822], [25799, 25800, 25823], [25800, 25824, 25823], [25800, 25801, 25825], [25800, 25825, 25824], [25801, 25802, 25825], [25802, 25826, 25825], [25802, 25803, 25827], [25802, 25827, 25826], [1676, 25804, 25828], [1676, 25828, 1805], [25804, 25805, 25828], [25805, 25829, 25828], [25805, 25806, 25830], [25805, 25830, 25829], [25806, 25807, 25830], [25807, 25831, 25830], [25807, 25808, 25832], [25807, 25832, 25831], [25808, 25809, 25832], [25809, 25833, 25832], [25809, 25810, 25834], [25809, 25834, 25833], [25810, 25811, 25834], [25811, 25835, 25834], [25811, 25812, 25836], [25811, 25836, 25835], [25812, 25813, 25836], [25813, 25837, 25836], [25813, 25814, 25838], [25813, 25838, 25837], [25814, 25815, 25838], [25815, 25839, 25838], [25815, 25816, 25840], [25815, 25840, 25839], [25816, 25817, 25840], [25817, 25841, 25840], [25817, 25818, 25842], [25817, 25842, 25841], [25818, 25819, 25842], [25819, 25843, 25842], [25819, 25820, 25844], [25819, 25844, 25843], [25820, 25821, 25844], [25821, 25845, 25844], [25821, 25822, 25846], [25821, 25846, 25845], [25822, 25823, 25846], [25823, 25847, 25846], [25823, 25824, 25848], [25823, 25848, 25847], [25824, 25825, 25848], [25825, 25849, 25848], [25825, 25826, 25850], [25825, 25850, 25849], [25826, 25827, 25850], [25827, 25851, 25850], [1805, 25828, 1934], [25828, 25852, 1934], [25828, 25829, 25853], [25828, 25853, 25852], [25829, 25830, 25853], [25830, 25854, 25853], [25830, 25831, 25855], [25830, 25855, 25854], [25831, 25832, 25855], [25832, 25856, 25855], [25832, 25833, 25857], [25832, 25857, 25856], [25833, 25834, 25857], [25834, 25858, 25857], [25834, 25835, 25859], [25834, 25859, 25858], [25835, 25836, 25859], [25836, 25860, 25859], [25836, 25837, 25861], [25836, 25861, 25860], [25837, 25838, 25861], [25838, 25862, 25861], [25838, 25839, 25863], [25838, 25863, 25862], [25839, 25840, 25863], [25840, 25864, 25863], [25840, 25841, 25865], [25840, 25865, 25864], [25841, 25842, 25865], [25842, 25866, 25865], [25842, 25843, 25867], [25842, 25867, 25866], [25843, 25844, 25867], [25844, 25868, 25867], [25844, 25845, 25869], [25844, 25869, 25868], [25845, 25846, 25869], [25846, 25870, 25869], [25846, 25847, 25871], [25846, 25871, 25870], [25847, 25848, 25871], [25848, 25872, 25871], [25848, 25849, 25873], [25848, 25873, 25872], [25849, 25850, 25873], [25850, 25874, 25873], [25850, 25851, 25875], [25850, 25875, 25874], [1934, 25852, 25876], [1934, 25876, 2063], [25852, 25853, 25876], [25853, 25877, 25876], [25853, 25854, 25878], [25853, 25878, 25877], [25854, 25855, 25878], [25855, 25879, 25878], [25855, 25856, 25880], [25855, 25880, 25879], [25856, 25857, 25880], [25857, 25881, 25880], [25857, 25858, 25882], [25857, 25882, 25881], [25858, 25859, 25882], [25859, 25883, 25882], [25859, 25860, 25884], [25859, 25884, 25883], [25860, 25861, 25884], [25861, 25885, 25884], [25861, 25862, 25886], [25861, 25886, 25885], [25862, 25863, 25886], [25863, 25887, 25886], [25863, 25864, 25888], [25863, 25888, 25887], [25864, 25865, 25888], [25865, 25889, 25888], [25865, 25866, 25890], [25865, 25890, 25889], [25866, 25867, 25890], [25867, 25891, 25890], [25867, 25868, 25892], [25867, 25892, 25891], [25868, 25869, 25892], [25869, 25893, 25892], [25869, 25870, 25894], [25869, 25894, 25893], [25870, 25871, 25894], [25871, 25895, 25894], [25871, 25872, 25896], [25871, 25896, 25895], [25872, 25873, 25896], [25873, 25897, 25896], [25873, 25874, 25898], [25873, 25898, 25897], [25874, 25875, 25898], [25875, 25899, 25898], [2063, 25876, 2192], [25876, 25900, 2192], [25876, 25877, 25901], [25876, 25901, 25900], [25877, 25878, 25901], [25878, 25902, 25901], [25878, 25879, 25903], [25878, 25903, 25902], [25879, 25880, 25903], [25880, 25904, 25903], [25880, 25881, 25905], [25880, 25905, 25904], [25881, 25882, 25905], [25882, 25906, 25905], [25882, 25883, 25907], [25882, 25907, 25906], [25883, 25884, 25907], [25884, 25908, 25907], [25884, 25885, 25909], [25884, 25909, 25908], [25885, 25886, 25909], [25886, 25910, 25909], [25886, 25887, 25911], [25886, 25911, 25910], [25887, 25888, 25911], [25888, 25912, 25911], [25888, 25889, 25913], [25888, 25913, 25912], [25889, 25890, 25913], [25890, 25914, 25913], [25890, 25891, 25915], [25890, 25915, 25914], [25891, 25892, 25915], [25892, 25916, 25915], [25892, 25893, 25917], [25892, 25917, 25916], [25893, 25894, 25917], [25894, 25918, 25917], [25894, 25895, 25919], [25894, 25919, 25918], [25895, 25896, 25919], [25896, 25920, 25919], [25896, 25897, 25921], [25896, 25921, 25920], [25897, 25898, 25921], [25898, 25922, 25921], [25898, 25899, 25923], [25898, 25923, 25922], [2192, 25900, 25924], [2192, 25924, 2321], [25900, 25901, 25924], [25901, 25925, 25924], [25901, 25902, 25926], [25901, 25926, 25925], [25902, 25903, 25926], [25903, 25927, 25926], [25903, 25904, 25928], [25903, 25928, 25927], [25904, 25905, 25928], [25905, 25929, 25928], [25905, 25906, 25930], [25905, 25930, 25929], [25906, 25907, 25930], [25907, 25931, 25930], [25907, 25908, 25932], [25907, 25932, 25931], [25908, 25909, 25932], [25909, 25933, 25932], [25909, 25910, 25934], [25909, 25934, 25933], [25910, 25911, 25934], [25911, 25935, 25934], [25911, 25912, 25936], [25911, 25936, 25935], [25912, 25913, 25936], [25913, 25937, 25936], [25913, 25914, 25938], [25913, 25938, 25937], [25914, 25915, 25938], [25915, 25939, 25938], [25915, 25916, 25940], [25915, 25940, 25939], [25916, 25917, 25940], [25917, 25941, 25940], [25917, 25918, 25942], [25917, 25942, 25941], [25918, 25919, 25942], [25919, 25943, 25942], [25919, 25920, 25944], [25919, 25944, 25943], [25920, 25921, 25944], [25921, 25945, 25944], [25921, 25922, 25946], [25921, 25946, 25945], [25922, 25923, 25946], [25923, 25947, 25946], [2321, 25924, 2450], [25924, 25948, 2450], [25924, 25925, 25949], [25924, 25949, 25948], [25925, 25926, 25949], [25926, 25950, 25949], [25926, 25927, 25951], [25926, 25951, 25950], [25927, 25928, 25951], [25928, 25952, 25951], [25928, 25929, 25953], [25928, 25953, 25952], [25929, 25930, 25953], [25930, 25954, 25953], [25930, 25931, 25955], [25930, 25955, 25954], [25931, 25932, 25955], [25932, 25956, 25955], [25932, 25933, 25957], [25932, 25957, 25956], [25933, 25934, 25957], [25934, 25958, 25957], [25934, 25935, 25959], [25934, 25959, 25958], [25935, 25936, 25959], [25936, 25960, 25959], [25936, 25937, 25961], [25936, 25961, 25960], [25937, 25938, 25961], [25938, 25962, 25961], [25938, 25939, 25963], [25938, 25963, 25962], [25939, 25940, 25963], [25940, 25964, 25963], [25940, 25941, 25965], [25940, 25965, 25964], [25941, 25942, 25965], [25942, 25966, 25965], [25942, 25943, 25967], [25942, 25967, 25966], [25943, 25944, 25967], [25944, 25968, 25967], [25944, 25945, 25969], [25944, 25969, 25968], [25945, 25946, 25969], [25946, 25970, 25969], [25946, 25947, 25971], [25946, 25971, 25970], [2450, 25948, 25972], [2450, 25972, 2579], [25948, 25949, 25972], [25949, 25973, 25972], [25949, 25950, 25974], [25949, 25974, 25973], [25950, 25951, 25974], [25951, 25975, 25974], [25951, 25952, 25976], [25951, 25976, 25975], [25952, 25953, 25976], [25953, 25977, 25976], [25953, 25954, 25978], [25953, 25978, 25977], [25954, 25955, 25978], [25955, 25979, 25978], [25955, 25956, 25980], [25955, 25980, 25979], [25956, 25957, 25980], [25957, 25981, 25980], [25957, 25958, 25982], [25957, 25982, 25981], [25958, 25959, 25982], [25959, 25983, 25982], [25959, 25960, 25984], [25959, 25984, 25983], [25960, 25961, 25984], [25961, 25985, 25984], [25961, 25962, 25986], [25961, 25986, 25985], [25962, 25963, 25986], [25963, 25987, 25986], [25963, 25964, 25988], [25963, 25988, 25987], [25964, 25965, 25988], [25965, 25989, 25988], [25965, 25966, 25990], [25965, 25990, 25989], [25966, 25967, 25990], [25967, 25991, 25990], [25967, 25968, 25992], [25967, 25992, 25991], [25968, 25969, 25992], [25969, 25993, 25992], [25969, 25970, 25994], [25969, 25994, 25993], [25970, 25971, 25994], [25971, 25995, 25994], [2579, 25972, 2708], [25972, 25996, 2708], [25972, 25973, 25997], [25972, 25997, 25996], [25973, 25974, 25997], [25974, 25998, 25997], [25974, 25975, 25999], [25974, 25999, 25998], [25975, 25976, 25999], [25976, 26000, 25999], [25976, 25977, 26001], [25976, 26001, 26000], [25977, 25978, 26001], [25978, 26002, 26001], [25978, 25979, 26003], [25978, 26003, 26002], [25979, 25980, 26003], [25980, 26004, 26003], [25980, 25981, 26005], [25980, 26005, 26004], [25981, 25982, 26005], [25982, 26006, 26005], [25982, 25983, 26007], [25982, 26007, 26006], [25983, 25984, 26007], [25984, 26008, 26007], [25984, 25985, 26009], [25984, 26009, 26008], [25985, 25986, 26009], [25986, 26010, 26009], [25986, 25987, 26011], [25986, 26011, 26010], [25987, 25988, 26011], [25988, 26012, 26011], [25988, 25989, 26013], [25988, 26013, 26012], [25989, 25990, 26013], [25990, 26014, 26013], [25990, 25991, 26015], [25990, 26015, 26014], [25991, 25992, 26015], [25992, 26016, 26015], [25992, 25993, 26017], [25992, 26017, 26016], [25993, 25994, 26017], [25994, 26018, 26017], [25994, 25995, 26019], [25994, 26019, 26018], [2708, 25996, 26020], [2708, 26020, 2837], [25996, 25997, 26020], [25997, 26021, 26020], [25997, 25998, 26022], [25997, 26022, 26021], [25998, 25999, 26022], [25999, 26023, 26022], [25999, 26000, 26024], [25999, 26024, 26023], [26000, 26001, 26024], [26001, 26025, 26024], [26001, 26002, 26026], [26001, 26026, 26025], [26002, 26003, 26026], [26003, 26027, 26026], [26003, 26004, 26028], [26003, 26028, 26027], [26004, 26005, 26028], [26005, 26029, 26028], [26005, 26006, 26030], [26005, 26030, 26029], [26006, 26007, 26030], [26007, 26031, 26030], [26007, 26008, 26032], [26007, 26032, 26031], [26008, 26009, 26032], [26009, 26033, 26032], [26009, 26010, 26034], [26009, 26034, 26033], [26010, 26011, 26034], [26011, 26035, 26034], [26011, 26012, 26036], [26011, 26036, 26035], [26012, 26013, 26036], [26013, 26037, 26036], [26013, 26014, 26038], [26013, 26038, 26037], [26014, 26015, 26038], [26015, 26039, 26038], [26015, 26016, 26040], [26015, 26040, 26039], [26016, 26017, 26040], [26017, 26041, 26040], [26017, 26018, 26042], [26017, 26042, 26041], [26018, 26019, 26042], [26019, 26043, 26042], [2837, 26020, 2966], [26020, 26044, 2966], [26020, 26021, 26045], [26020, 26045, 26044], [26021, 26022, 26045], [26022, 26046, 26045], [26022, 26023, 26047], [26022, 26047, 26046], [26023, 26024, 26047], [26024, 26048, 26047], [26024, 26025, 26049], [26024, 26049, 26048], [26025, 26026, 26049], [26026, 26050, 26049], [26026, 26027, 26051], [26026, 26051, 26050], [26027, 26028, 26051], [26028, 26052, 26051], [26028, 26029, 26053], [26028, 26053, 26052], [26029, 26030, 26053], [26030, 26054, 26053], [26030, 26031, 26055], [26030, 26055, 26054], [26031, 26032, 26055], [26032, 26056, 26055], [26032, 26033, 26057], [26032, 26057, 26056], [26033, 26034, 26057], [26034, 26058, 26057], [26034, 26035, 26059], [26034, 26059, 26058], [26035, 26036, 26059], [26036, 26060, 26059], [26036, 26037, 26061], [26036, 26061, 26060], [26037, 26038, 26061], [26038, 26062, 26061], [26038, 26039, 26063], [26038, 26063, 26062], [26039, 26040, 26063], [26040, 26064, 26063], [26040, 26041, 26065], [26040, 26065, 26064], [26041, 26042, 26065], [26042, 26066, 26065], [26042, 26043, 26067], [26042, 26067, 26066], [2966, 26044, 26068], [2966, 26068, 3095], [26044, 26045, 26068], [26045, 26069, 26068], [26045, 26046, 26070], [26045, 26070, 26069], [26046, 26047, 26070], [26047, 26071, 26070], [26047, 26048, 26072], [26047, 26072, 26071], [26048, 26049, 26072], [26049, 26073, 26072], [26049, 26050, 26074], [26049, 26074, 26073], [26050, 26051, 26074], [26051, 26075, 26074], [26051, 26052, 26076], [26051, 26076, 26075], [26052, 26053, 26076], [26053, 26077, 26076], [26053, 26054, 26078], [26053, 26078, 26077], [26054, 26055, 26078], [26055, 26079, 26078], [26055, 26056, 26080], [26055, 26080, 26079], [26056, 26057, 26080], [26057, 26081, 26080], [26057, 26058, 26082], [26057, 26082, 26081], [26058, 26059, 26082], [26059, 26083, 26082], [26059, 26060, 26084], [26059, 26084, 26083], [26060, 26061, 26084], [26061, 26085, 26084], [26061, 26062, 26086], [26061, 26086, 26085], [26062, 26063, 26086], [26063, 26087, 26086], [26063, 26064, 26088], [26063, 26088, 26087], [26064, 26065, 26088], [26065, 26089, 26088], [26065, 26066, 26090], [26065, 26090, 26089], [26066, 26067, 26090], [26067, 26091, 26090], [3095, 26068, 3224], [26068, 26092, 3224], [26068, 26069, 26093], [26068, 26093, 26092], [26069, 26070, 26093], [26070, 26094, 26093], [26070, 26071, 26095], [26070, 26095, 26094], [26071, 26072, 26095], [26072, 26096, 26095], [26072, 26073, 26097], [26072, 26097, 26096], [26073, 26074, 26097], [26074, 26098, 26097], [26074, 26075, 26099], [26074, 26099, 26098], [26075, 26076, 26099], [26076, 26100, 26099], [26076, 26077, 26101], [26076, 26101, 26100], [26077, 26078, 26101], [26078, 26102, 26101], [26078, 26079, 26103], [26078, 26103, 26102], [26079, 26080, 26103], [26080, 26104, 26103], [26080, 26081, 26105], [26080, 26105, 26104], [26081, 26082, 26105], [26082, 26106, 26105], [26082, 26083, 26107], [26082, 26107, 26106], [26083, 26084, 26107], [26084, 26108, 26107], [26084, 26085, 26109], [26084, 26109, 26108], [26085, 26086, 26109], [26086, 26110, 26109], [26086, 26087, 26111], [26086, 26111, 26110], [26087, 26088, 26111], [26088, 26112, 26111], [26088, 26089, 26113], [26088, 26113, 26112], [26089, 26090, 26113], [26090, 26114, 26113], [26090, 26091, 26115], [26090, 26115, 26114], [3224, 26092, 26116], [3224, 26116, 3353], [26092, 26093, 26116], [26093, 26117, 26116], [26093, 26094, 26118], [26093, 26118, 26117], [26094, 26095, 26118], [26095, 26119, 26118], [26095, 26096, 26120], [26095, 26120, 26119], [26096, 26097, 26120], [26097, 26121, 26120], [26097, 26098, 26122], [26097, 26122, 26121], [26098, 26099, 26122], [26099, 26123, 26122], [26099, 26100, 26124], [26099, 26124, 26123], [26100, 26101, 26124], [26101, 26125, 26124], [26101, 26102, 26126], [26101, 26126, 26125], [26102, 26103, 26126], [26103, 26127, 26126], [26103, 26104, 26128], [26103, 26128, 26127], [26104, 26105, 26128], [26105, 26129, 26128], [26105, 26106, 26130], [26105, 26130, 26129], [26106, 26107, 26130], [26107, 26131, 26130], [26107, 26108, 26132], [26107, 26132, 26131], [26108, 26109, 26132], [26109, 26133, 26132], [26109, 26110, 26134], [26109, 26134, 26133], [26110, 26111, 26134], [26111, 26135, 26134], [26111, 26112, 26136], [26111, 26136, 26135], [26112, 26113, 26136], [26113, 26137, 26136], [26113, 26114, 26138], [26113, 26138, 26137], [26114, 26115, 26138], [26115, 26139, 26138], [3353, 26116, 3482], [26116, 26140, 3482], [26116, 26117, 26141], [26116, 26141, 26140], [26117, 26118, 26141], [26118, 26142, 26141], [26118, 26119, 26143], [26118, 26143, 26142], [26119, 26120, 26143], [26120, 26144, 26143], [26120, 26121, 26145], [26120, 26145, 26144], [26121, 26122, 26145], [26122, 26146, 26145], [26122, 26123, 26147], [26122, 26147, 26146], [26123, 26124, 26147], [26124, 26148, 26147], [26124, 26125, 26149], [26124, 26149, 26148], [26125, 26126, 26149], [26126, 26150, 26149], [26126, 26127, 26151], [26126, 26151, 26150], [26127, 26128, 26151], [26128, 26152, 26151], [26128, 26129, 26153], [26128, 26153, 26152], [26129, 26130, 26153], [26130, 26154, 26153], [26130, 26131, 26155], [26130, 26155, 26154], [26131, 26132, 26155], [26132, 26156, 26155], [26132, 26133, 26157], [26132, 26157, 26156], [26133, 26134, 26157], [26134, 26158, 26157], [26134, 26135, 26159], [26134, 26159, 26158], [26135, 26136, 26159], [26136, 26160, 26159], [26136, 26137, 26161], [26136, 26161, 26160], [26137, 26138, 26161], [26138, 26162, 26161], [26138, 26139, 26163], [26138, 26163, 26162], [3482, 26140, 26164], [3482, 26164, 3611], [26140, 26141, 26164], [26141, 26165, 26164], [26141, 26142, 26166], [26141, 26166, 26165], [26142, 26143, 26166], [26143, 26167, 26166], [26143, 26144, 26168], [26143, 26168, 26167], [26144, 26145, 26168], [26145, 26169, 26168], [26145, 26146, 26170], [26145, 26170, 26169], [26146, 26147, 26170], [26147, 26171, 26170], [26147, 26148, 26172], [26147, 26172, 26171], [26148, 26149, 26172], [26149, 26173, 26172], [26149, 26150, 26174], [26149, 26174, 26173], [26150, 26151, 26174], [26151, 26175, 26174], [26151, 26152, 26176], [26151, 26176, 26175], [26152, 26153, 26176], [26153, 26177, 26176], [26153, 26154, 26178], [26153, 26178, 26177], [26154, 26155, 26178], [26155, 26179, 26178], [26155, 26156, 26180], [26155, 26180, 26179], [26156, 26157, 26180], [26157, 26181, 26180], [26157, 26158, 26182], [26157, 26182, 26181], [26158, 26159, 26182], [26159, 26183, 26182], [26159, 26160, 26184], [26159, 26184, 26183], [26160, 26161, 26184], [26161, 26185, 26184], [26161, 26162, 26186], [26161, 26186, 26185], [26162, 26163, 26186], [26163, 26187, 26186], [3611, 26164, 3740], [26164, 26188, 3740], [26164, 26165, 26189], [26164, 26189, 26188], [26165, 26166, 26189], [26166, 26190, 26189], [26166, 26167, 26191], [26166, 26191, 26190], [26167, 26168, 26191], [26168, 26192, 26191], [26168, 26169, 26193], [26168, 26193, 26192], [26169, 26170, 26193], [26170, 26194, 26193], [26170, 26171, 26195], [26170, 26195, 26194], [26171, 26172, 26195], [26172, 26196, 26195], [26172, 26173, 26197], [26172, 26197, 26196], [26173, 26174, 26197], [26174, 26198, 26197], [26174, 26175, 26199], [26174, 26199, 26198], [26175, 26176, 26199], [26176, 26200, 26199], [26176, 26177, 26201], [26176, 26201, 26200], [26177, 26178, 26201], [26178, 26202, 26201], [26178, 26179, 26203], [26178, 26203, 26202], [26179, 26180, 26203], [26180, 26204, 26203], [26180, 26181, 26205], [26180, 26205, 26204], [26181, 26182, 26205], [26182, 26206, 26205], [26182, 26183, 26207], [26182, 26207, 26206], [26183, 26184, 26207], [26184, 26208, 26207], [26184, 26185, 26209], [26184, 26209, 26208], [26185, 26186, 26209], [26186, 26210, 26209], [26186, 26187, 26211], [26186, 26211, 26210], [3740, 26188, 26212], [3740, 26212, 3869], [26188, 26189, 26212], [26189, 26213, 26212], [26189, 26190, 26214], [26189, 26214, 26213], [26190, 26191, 26214], [26191, 26215, 26214], [26191, 26192, 26216], [26191, 26216, 26215], [26192, 26193, 26216], [26193, 26217, 26216], [26193, 26194, 26218], [26193, 26218, 26217], [26194, 26195, 26218], [26195, 26219, 26218], [26195, 26196, 26220], [26195, 26220, 26219], [26196, 26197, 26220], [26197, 26221, 26220], [26197, 26198, 26222], [26197, 26222, 26221], [26198, 26199, 26222], [26199, 26223, 26222], [26199, 26200, 26224], [26199, 26224, 26223], [26200, 26201, 26224], [26201, 26225, 26224], [26201, 26202, 26226], [26201, 26226, 26225], [26202, 26203, 26226], [26203, 26227, 26226], [26203, 26204, 26228], [26203, 26228, 26227], [26204, 26205, 26228], [26205, 26229, 26228], [26205, 26206, 26230], [26205, 26230, 26229], [26206, 26207, 26230], [26207, 26231, 26230], [26207, 26208, 26232], [26207, 26232, 26231], [26208, 26209, 26232], [26209, 26233, 26232], [26209, 26210, 26234], [26209, 26234, 26233], [26210, 26211, 26234], [26211, 26235, 26234], [3869, 26212, 3998], [26212, 26236, 3998], [26212, 26213, 26237], [26212, 26237, 26236], [26213, 26214, 26237], [26214, 26238, 26237], [26214, 26215, 26239], [26214, 26239, 26238], [26215, 26216, 26239], [26216, 26240, 26239], [26216, 26217, 26241], [26216, 26241, 26240], [26217, 26218, 26241], [26218, 26242, 26241], [26218, 26219, 26243], [26218, 26243, 26242], [26219, 26220, 26243], [26220, 26244, 26243], [26220, 26221, 26245], [26220, 26245, 26244], [26221, 26222, 26245], [26222, 26246, 26245], [26222, 26223, 26247], [26222, 26247, 26246], [26223, 26224, 26247], [26224, 26248, 26247], [26224, 26225, 26249], [26224, 26249, 26248], [26225, 26226, 26249], [26226, 26250, 26249], [26226, 26227, 26251], [26226, 26251, 26250], [26227, 26228, 26251], [26228, 26252, 26251], [26228, 26229, 26253], [26228, 26253, 26252], [26229, 26230, 26253], [26230, 26254, 26253], [26230, 26231, 26255], [26230, 26255, 26254], [26231, 26232, 26255], [26232, 26256, 26255], [26232, 26233, 26257], [26232, 26257, 26256], [26233, 26234, 26257], [26234, 26258, 26257], [26234, 26235, 26259], [26234, 26259, 26258], [3998, 26236, 26260], [3998, 26260, 4127], [26236, 26237, 26260], [26237, 26261, 26260], [26237, 26238, 26262], [26237, 26262, 26261], [26238, 26239, 26262], [26239, 26263, 26262], [26239, 26240, 26264], [26239, 26264, 26263], [26240, 26241, 26264], [26241, 26265, 26264], [26241, 26242, 26266], [26241, 26266, 26265], [26242, 26243, 26266], [26243, 26267, 26266], [26243, 26244, 26268], [26243, 26268, 26267], [26244, 26245, 26268], [26245, 26269, 26268], [26245, 26246, 26270], [26245, 26270, 26269], [26246, 26247, 26270], [26247, 26271, 26270], [26247, 26248, 26272], [26247, 26272, 26271], [26248, 26249, 26272], [26249, 26273, 26272], [26249, 26250, 26274], [26249, 26274, 26273], [26250, 26251, 26274], [26251, 26275, 26274], [26251, 26252, 26276], [26251, 26276, 26275], [26252, 26253, 26276], [26253, 26277, 26276], [26253, 26254, 26278], [26253, 26278, 26277], [26254, 26255, 26278], [26255, 26279, 26278], [26255, 26256, 26280], [26255, 26280, 26279], [26256, 26257, 26280], [26257, 26281, 26280], [26257, 26258, 26282], [26257, 26282, 26281], [26258, 26259, 26282], [26259, 26283, 26282], [4127, 26260, 4256], [26260, 26284, 4256], [26260, 26261, 26285], [26260, 26285, 26284], [26261, 26262, 26285], [26262, 26286, 26285], [26262, 26263, 26287], [26262, 26287, 26286], [26263, 26264, 26287], [26264, 26288, 26287], [26264, 26265, 26289], [26264, 26289, 26288], [26265, 26266, 26289], [26266, 26290, 26289], [26266, 26267, 26291], [26266, 26291, 26290], [26267, 26268, 26291], [26268, 26292, 26291], [26268, 26269, 26293], [26268, 26293, 26292], [26269, 26270, 26293], [26270, 26294, 26293], [26270, 26271, 26295], [26270, 26295, 26294], [26271, 26272, 26295], [26272, 26296, 26295], [26272, 26273, 26297], [26272, 26297, 26296], [26273, 26274, 26297], [26274, 26298, 26297], [26274, 26275, 26299], [26274, 26299, 26298], [26275, 26276, 26299], [26276, 26300, 26299], [26276, 26277, 26301], [26276, 26301, 26300], [26277, 26278, 26301], [26278, 26302, 26301], [26278, 26279, 26303], [26278, 26303, 26302], [26279, 26280, 26303], [26280, 26304, 26303], [26280, 26281, 26305], [26280, 26305, 26304], [26281, 26282, 26305], [26282, 26306, 26305], [26282, 26283, 26307], [26282, 26307, 26306], [4256, 26284, 26308], [4256, 26308, 4385], [26284, 26285, 26308], [26285, 26309, 26308], [26285, 26286, 26310], [26285, 26310, 26309], [26286, 26287, 26310], [26287, 26311, 26310], [26287, 26288, 26312], [26287, 26312, 26311], [26288, 26289, 26312], [26289, 26313, 26312], [26289, 26290, 26314], [26289, 26314, 26313], [26290, 26291, 26314], [26291, 26315, 26314], [26291, 26292, 26316], [26291, 26316, 26315], [26292, 26293, 26316], [26293, 26317, 26316], [26293, 26294, 26318], [26293, 26318, 26317], [26294, 26295, 26318], [26295, 26319, 26318], [26295, 26296, 26320], [26295, 26320, 26319], [26296, 26297, 26320], [26297, 26321, 26320], [26297, 26298, 26322], [26297, 26322, 26321], [26298, 26299, 26322], [26299, 26323, 26322], [26299, 26300, 26324], [26299, 26324, 26323], [26300, 26301, 26324], [26301, 26325, 26324], [26301, 26302, 26326], [26301, 26326, 26325], [26302, 26303, 26326], [26303, 26327, 26326], [26303, 26304, 26328], [26303, 26328, 26327], [26304, 26305, 26328], [26305, 26329, 26328], [26305, 26306, 26330], [26305, 26330, 26329], [26306, 26307, 26330], [26307, 26331, 26330], [4385, 26308, 4514], [26308, 26332, 4514], [26308, 26309, 26333], [26308, 26333, 26332], [26309, 26310, 26333], [26310, 26334, 26333], [26310, 26311, 26335], [26310, 26335, 26334], [26311, 26312, 26335], [26312, 26336, 26335], [26312, 26313, 26337], [26312, 26337, 26336], [26313, 26314, 26337], [26314, 26338, 26337], [26314, 26315, 26339], [26314, 26339, 26338], [26315, 26316, 26339], [26316, 26340, 26339], [26316, 26317, 26341], [26316, 26341, 26340], [26317, 26318, 26341], [26318, 26342, 26341], [26318, 26319, 26343], [26318, 26343, 26342], [26319, 26320, 26343], [26320, 26344, 26343], [26320, 26321, 26345], [26320, 26345, 26344], [26321, 26322, 26345], [26322, 26346, 26345], [26322, 26323, 26347], [26322, 26347, 26346], [26323, 26324, 26347], [26324, 26348, 26347], [26324, 26325, 26349], [26324, 26349, 26348], [26325, 26326, 26349], [26326, 26350, 26349], [26326, 26327, 26351], [26326, 26351, 26350], [26327, 26328, 26351], [26328, 26352, 26351], [26328, 26329, 26353], [26328, 26353, 26352], [26329, 26330, 26353], [26330, 26354, 26353], [26330, 26331, 26355], [26330, 26355, 26354], [4514, 26332, 26356], [4514, 26356, 4643], [26332, 26333, 26356], [26333, 26357, 26356], [26333, 26334, 26358], [26333, 26358, 26357], [26334, 26335, 26358], [26335, 26359, 26358], [26335, 26336, 26360], [26335, 26360, 26359], [26336, 26337, 26360], [26337, 26361, 26360], [26337, 26338, 26362], [26337, 26362, 26361], [26338, 26339, 26362], [26339, 26363, 26362], [26339, 26340, 26364], [26339, 26364, 26363], [26340, 26341, 26364], [26341, 26365, 26364], [26341, 26342, 26366], [26341, 26366, 26365], [26342, 26343, 26366], [26343, 26367, 26366], [26343, 26344, 26368], [26343, 26368, 26367], [26344, 26345, 26368], [26345, 26369, 26368], [26345, 26346, 26370], [26345, 26370, 26369], [26346, 26347, 26370], [26347, 26371, 26370], [26347, 26348, 26372], [26347, 26372, 26371], [26348, 26349, 26372], [26349, 26373, 26372], [26349, 26350, 26374], [26349, 26374, 26373], [26350, 26351, 26374], [26351, 26375, 26374], [26351, 26352, 26376], [26351, 26376, 26375], [26352, 26353, 26376], [26353, 26377, 26376], [26353, 26354, 26378], [26353, 26378, 26377], [26354, 26355, 26378], [26355, 26379, 26378], [4643, 26356, 4772], [26356, 26380, 4772], [26356, 26357, 26381], [26356, 26381, 26380], [26357, 26358, 26381], [26358, 26382, 26381], [26358, 26359, 26383], [26358, 26383, 26382], [26359, 26360, 26383], [26360, 26384, 26383], [26360, 26361, 26385], [26360, 26385, 26384], [26361, 26362, 26385], [26362, 26386, 26385], [26362, 26363, 26387], [26362, 26387, 26386], [26363, 26364, 26387], [26364, 26388, 26387], [26364, 26365, 26389], [26364, 26389, 26388], [26365, 26366, 26389], [26366, 26390, 26389], [26366, 26367, 26391], [26366, 26391, 26390], [26367, 26368, 26391], [26368, 26392, 26391], [26368, 26369, 26393], [26368, 26393, 26392], [26369, 26370, 26393], [26370, 26394, 26393], [26370, 26371, 26395], [26370, 26395, 26394], [26371, 26372, 26395], [26372, 26396, 26395], [26372, 26373, 26397], [26372, 26397, 26396], [26373, 26374, 26397], [26374, 26398, 26397], [26374, 26375, 26399], [26374, 26399, 26398], [26375, 26376, 26399], [26376, 26400, 26399], [26376, 26377, 26401], [26376, 26401, 26400], [26377, 26378, 26401], [26378, 26402, 26401], [26378, 26379, 26403], [26378, 26403, 26402], [4772, 26380, 26404], [4772, 26404, 4901], [26380, 26381, 26404], [26381, 26405, 26404], [26381, 26382, 26406], [26381, 26406, 26405], [26382, 26383, 26406], [26383, 26407, 26406], [26383, 26384, 26408], [26383, 26408, 26407], [26384, 26385, 26408], [26385, 26409, 26408], [26385, 26386, 26410], [26385, 26410, 26409], [26386, 26387, 26410], [26387, 26411, 26410], [26387, 26388, 26412], [26387, 26412, 26411], [26388, 26389, 26412], [26389, 26413, 26412], [26389, 26390, 26414], [26389, 26414, 26413], [26390, 26391, 26414], [26391, 26415, 26414], [26391, 26392, 26416], [26391, 26416, 26415], [26392, 26393, 26416], [26393, 26417, 26416], [26393, 26394, 26418], [26393, 26418, 26417], [26394, 26395, 26418], [26395, 26419, 26418], [26395, 26396, 26420], [26395, 26420, 26419], [26396, 26397, 26420], [26397, 26421, 26420], [26397, 26398, 26422], [26397, 26422, 26421], [26398, 26399, 26422], [26399, 26423, 26422], [26399, 26400, 26424], [26399, 26424, 26423], [26400, 26401, 26424], [26401, 26425, 26424], [26401, 26402, 26426], [26401, 26426, 26425], [26402, 26403, 26426], [26403, 26427, 26426], [4901, 26404, 5030], [26404, 26428, 5030], [26404, 26405, 26429], [26404, 26429, 26428], [26405, 26406, 26429], [26406, 26430, 26429], [26406, 26407, 26431], [26406, 26431, 26430], [26407, 26408, 26431], [26408, 26432, 26431], [26408, 26409, 26433], [26408, 26433, 26432], [26409, 26410, 26433], [26410, 26434, 26433], [26410, 26411, 26435], [26410, 26435, 26434], [26411, 26412, 26435], [26412, 26436, 26435], [26412, 26413, 26437], [26412, 26437, 26436], [26413, 26414, 26437], [26414, 26438, 26437], [26414, 26415, 26439], [26414, 26439, 26438], [26415, 26416, 26439], [26416, 26440, 26439], [26416, 26417, 26441], [26416, 26441, 26440], [26417, 26418, 26441], [26418, 26442, 26441], [26418, 26419, 26443], [26418, 26443, 26442], [26419, 26420, 26443], [26420, 26444, 26443], [26420, 26421, 26445], [26420, 26445, 26444], [26421, 26422, 26445], [26422, 26446, 26445], [26422, 26423, 26447], [26422, 26447, 26446], [26423, 26424, 26447], [26424, 26448, 26447], [26424, 26425, 26449], [26424, 26449, 26448], [26425, 26426, 26449], [26426, 26450, 26449], [26426, 26427, 26451], [26426, 26451, 26450], [5030, 26428, 26452], [5030, 26452, 5159], [26428, 26429, 26452], [26429, 26453, 26452], [26429, 26430, 26454], [26429, 26454, 26453], [26430, 26431, 26454], [26431, 26455, 26454], [26431, 26432, 26456], [26431, 26456, 26455], [26432, 26433, 26456], [26433, 26457, 26456], [26433, 26434, 26458], [26433, 26458, 26457], [26434, 26435, 26458], [26435, 26459, 26458], [26435, 26436, 26460], [26435, 26460, 26459], [26436, 26437, 26460], [26437, 26461, 26460], [26437, 26438, 26462], [26437, 26462, 26461], [26438, 26439, 26462], [26439, 26463, 26462], [26439, 26440, 26464], [26439, 26464, 26463], [26440, 26441, 26464], [26441, 26465, 26464], [26441, 26442, 26466], [26441, 26466, 26465], [26442, 26443, 26466], [26443, 26467, 26466], [26443, 26444, 26468], [26443, 26468, 26467], [26444, 26445, 26468], [26445, 26469, 26468], [26445, 26446, 26470], [26445, 26470, 26469], [26446, 26447, 26470], [26447, 26471, 26470], [26447, 26448, 26472], [26447, 26472, 26471], [26448, 26449, 26472], [26449, 26473, 26472], [26449, 26450, 26474], [26449, 26474, 26473], [26450, 26451, 26474], [26451, 26475, 26474], [5159, 26452, 5288], [26452, 26476, 5288], [26452, 26453, 26477], [26452, 26477, 26476], [26453, 26454, 26477], [26454, 26478, 26477], [26454, 26455, 26479], [26454, 26479, 26478], [26455, 26456, 26479], [26456, 26480, 26479], [26456, 26457, 26481], [26456, 26481, 26480], [26457, 26458, 26481], [26458, 26482, 26481], [26458, 26459, 26483], [26458, 26483, 26482], [26459, 26460, 26483], [26460, 26484, 26483], [26460, 26461, 26485], [26460, 26485, 26484], [26461, 26462, 26485], [26462, 26486, 26485], [26462, 26463, 26487], [26462, 26487, 26486], [26463, 26464, 26487], [26464, 26488, 26487], [26464, 26465, 26489], [26464, 26489, 26488], [26465, 26466, 26489], [26466, 26490, 26489], [26466, 26467, 26491], [26466, 26491, 26490], [26467, 26468, 26491], [26468, 26492, 26491], [26468, 26469, 26493], [26468, 26493, 26492], [26469, 26470, 26493], [26470, 26494, 26493], [26470, 26471, 26495], [26470, 26495, 26494], [26471, 26472, 26495], [26472, 26496, 26495], [26472, 26473, 26497], [26472, 26497, 26496], [26473, 26474, 26497], [26474, 26498, 26497], [26474, 26475, 26499], [26474, 26499, 26498], [5288, 26476, 26500], [5288, 26500, 5417], [26476, 26477, 26500], [26477, 26501, 26500], [26477, 26478, 26502], [26477, 26502, 26501], [26478, 26479, 26502], [26479, 26503, 26502], [26479, 26480, 26504], [26479, 26504, 26503], [26480, 26481, 26504], [26481, 26505, 26504], [26481, 26482, 26506], [26481, 26506, 26505], [26482, 26483, 26506], [26483, 26507, 26506], [26483, 26484, 26508], [26483, 26508, 26507], [26484, 26485, 26508], [26485, 26509, 26508], [26485, 26486, 26510], [26485, 26510, 26509], [26486, 26487, 26510], [26487, 26511, 26510], [26487, 26488, 26512], [26487, 26512, 26511], [26488, 26489, 26512], [26489, 26513, 26512], [26489, 26490, 26514], [26489, 26514, 26513], [26490, 26491, 26514], [26491, 26515, 26514], [26491, 26492, 26516], [26491, 26516, 26515], [26492, 26493, 26516], [26493, 26517, 26516], [26493, 26494, 26518], [26493, 26518, 26517], [26494, 26495, 26518], [26495, 26519, 26518], [26495, 26496, 26520], [26495, 26520, 26519], [26496, 26497, 26520], [26497, 26521, 26520], [26497, 26498, 26522], [26497, 26522, 26521], [26498, 26499, 26522], [26499, 26523, 26522], [5417, 26500, 5546], [26500, 26524, 5546], [26500, 26501, 26525], [26500, 26525, 26524], [26501, 26502, 26525], [26502, 26526, 26525], [26502, 26503, 26527], [26502, 26527, 26526], [26503, 26504, 26527], [26504, 26528, 26527], [26504, 26505, 26529], [26504, 26529, 26528], [26505, 26506, 26529], [26506, 26530, 26529], [26506, 26507, 26531], [26506, 26531, 26530], [26507, 26508, 26531], [26508, 26532, 26531], [26508, 26509, 26533], [26508, 26533, 26532], [26509, 26510, 26533], [26510, 26534, 26533], [26510, 26511, 26535], [26510, 26535, 26534], [26511, 26512, 26535], [26512, 26536, 26535], [26512, 26513, 26537], [26512, 26537, 26536], [26513, 26514, 26537], [26514, 26538, 26537], [26514, 26515, 26539], [26514, 26539, 26538], [26515, 26516, 26539], [26516, 26540, 26539], [26516, 26517, 26541], [26516, 26541, 26540], [26517, 26518, 26541], [26518, 26542, 26541], [26518, 26519, 26543], [26518, 26543, 26542], [26519, 26520, 26543], [26520, 26544, 26543], [26520, 26521, 26545], [26520, 26545, 26544], [26521, 26522, 26545], [26522, 26546, 26545], [26522, 26523, 26547], [26522, 26547, 26546], [5546, 26524, 26548], [5546, 26548, 5675], [26524, 26525, 26548], [26525, 26549, 26548], [26525, 26526, 26550], [26525, 26550, 26549], [26526, 26527, 26550], [26527, 26551, 26550], [26527, 26528, 26552], [26527, 26552, 26551], [26528, 26529, 26552], [26529, 26553, 26552], [26529, 26530, 26554], [26529, 26554, 26553], [26530, 26531, 26554], [26531, 26555, 26554], [26531, 26532, 26556], [26531, 26556, 26555], [26532, 26533, 26556], [26533, 26557, 26556], [26533, 26534, 26558], [26533, 26558, 26557], [26534, 26535, 26558], [26535, 26559, 26558], [26535, 26536, 26560], [26535, 26560, 26559], [26536, 26537, 26560], [26537, 26561, 26560], [26537, 26538, 26562], [26537, 26562, 26561], [26538, 26539, 26562], [26539, 26563, 26562], [26539, 26540, 26564], [26539, 26564, 26563], [26540, 26541, 26564], [26541, 26565, 26564], [26541, 26542, 26566], [26541, 26566, 26565], [26542, 26543, 26566], [26543, 26567, 26566], [26543, 26544, 26568], [26543, 26568, 26567], [26544, 26545, 26568], [26545, 26569, 26568], [26545, 26546, 26570], [26545, 26570, 26569], [26546, 26547, 26570], [26547, 26571, 26570], [5675, 26548, 5804], [26548, 26572, 5804], [26548, 26549, 26573], [26548, 26573, 26572], [26549, 26550, 26573], [26550, 26574, 26573], [26550, 26551, 26575], [26550, 26575, 26574], [26551, 26552, 26575], [26552, 26576, 26575], [26552, 26553, 26577], [26552, 26577, 26576], [26553, 26554, 26577], [26554, 26578, 26577], [26554, 26555, 26579], [26554, 26579, 26578], [26555, 26556, 26579], [26556, 26580, 26579], [26556, 26557, 26581], [26556, 26581, 26580], [26557, 26558, 26581], [26558, 26582, 26581], [26558, 26559, 26583], [26558, 26583, 26582], [26559, 26560, 26583], [26560, 26584, 26583], [26560, 26561, 26585], [26560, 26585, 26584], [26561, 26562, 26585], [26562, 26586, 26585], [26562, 26563, 26587], [26562, 26587, 26586], [26563, 26564, 26587], [26564, 26588, 26587], [26564, 26565, 26589], [26564, 26589, 26588], [26565, 26566, 26589], [26566, 26590, 26589], [26566, 26567, 26591], [26566, 26591, 26590], [26567, 26568, 26591], [26568, 26592, 26591], [26568, 26569, 26593], [26568, 26593, 26592], [26569, 26570, 26593], [26570, 26594, 26593], [26570, 26571, 26595], [26570, 26595, 26594], [5804, 26572, 26596], [5804, 26596, 5933], [26572, 26573, 26596], [26573, 26597, 26596], [26573, 26574, 26598], [26573, 26598, 26597], [26574, 26575, 26598], [26575, 26599, 26598], [26575, 26576, 26600], [26575, 26600, 26599], [26576, 26577, 26600], [26577, 26601, 26600], [26577, 26578, 26602], [26577, 26602, 26601], [26578, 26579, 26602], [26579, 26603, 26602], [26579, 26580, 26604], [26579, 26604, 26603], [26580, 26581, 26604], [26581, 26605, 26604], [26581, 26582, 26606], [26581, 26606, 26605], [26582, 26583, 26606], [26583, 26607, 26606], [26583, 26584, 26608], [26583, 26608, 26607], [26584, 26585, 26608], [26585, 26609, 26608], [26585, 26586, 26610], [26585, 26610, 26609], [26586, 26587, 26610], [26587, 26611, 26610], [26587, 26588, 26612], [26587, 26612, 26611], [26588, 26589, 26612], [26589, 26613, 26612], [26589, 26590, 26614], [26589, 26614, 26613], [26590, 26591, 26614], [26591, 26615, 26614], [26591, 26592, 26616], [26591, 26616, 26615], [26592, 26593, 26616], [26593, 26617, 26616], [26593, 26594, 26618], [26593, 26618, 26617], [26594, 26595, 26618], [26595, 26619, 26618], [5933, 26596, 6062], [26596, 26620, 6062], [26596, 26597, 26621], [26596, 26621, 26620], [26597, 26598, 26621], [26598, 26622, 26621], [26598, 26599, 26623], [26598, 26623, 26622], [26599, 26600, 26623], [26600, 26624, 26623], [26600, 26601, 26625], [26600, 26625, 26624], [26601, 26602, 26625], [26602, 26626, 26625], [26602, 26603, 26627], [26602, 26627, 26626], [26603, 26604, 26627], [26604, 26628, 26627], [26604, 26605, 26629], [26604, 26629, 26628], [26605, 26606, 26629], [26606, 26630, 26629], [26606, 26607, 26631], [26606, 26631, 26630], [26607, 26608, 26631], [26608, 26632, 26631], [26608, 26609, 26633], [26608, 26633, 26632], [26609, 26610, 26633], [26610, 26634, 26633], [26610, 26611, 26635], [26610, 26635, 26634], [26611, 26612, 26635], [26612, 26636, 26635], [26612, 26613, 26637], [26612, 26637, 26636], [26613, 26614, 26637], [26614, 26638, 26637], [26614, 26615, 26639], [26614, 26639, 26638], [26615, 26616, 26639], [26616, 26640, 26639], [26616, 26617, 26641], [26616, 26641, 26640], [26617, 26618, 26641], [26618, 26642, 26641], [26618, 26619, 26643], [26618, 26643, 26642], [6062, 26620, 26644], [6062, 26644, 6189], [26620, 26621, 26644], [26621, 26645, 26644], [26621, 26622, 26646], [26621, 26646, 26645], [26622, 26623, 26646], [26623, 26647, 26646], [26623, 26624, 26648], [26623, 26648, 26647], [26624, 26625, 26648], [26625, 26649, 26648], [26625, 26626, 26650], [26625, 26650, 26649], [26626, 26627, 26650], [26627, 26651, 26650], [26627, 26628, 26652], [26627, 26652, 26651], [26628, 26629, 26652], [26629, 26653, 26652], [26629, 26630, 26654], [26629, 26654, 26653], [26630, 26631, 26654], [26631, 26655, 26654], [26631, 26632, 26656], [26631, 26656, 26655], [26632, 26633, 26656], [26633, 26657, 26656], [26633, 26634, 26658], [26633, 26658, 26657], [26634, 26635, 26658], [26635, 26659, 26658], [26635, 26636, 26660], [26635, 26660, 26659], [26636, 26637, 26660], [26637, 26661, 26660], [26637, 26638, 26662], [26637, 26662, 26661], [26638, 26639, 26662], [26639, 26663, 26662], [26639, 26640, 26664], [26639, 26664, 26663], [26640, 26641, 26664], [26641, 26665, 26664], [26641, 26642, 26666], [26641, 26666, 26665], [26642, 26643, 26666], [26643, 26667, 26666], [6189, 26644, 6314], [26644, 26668, 6314], [26644, 26645, 26669], [26644, 26669, 26668], [26645, 26646, 26669], [26646, 26670, 26669], [26646, 26647, 26671], [26646, 26671, 26670], [26647, 26648, 26671], [26648, 26672, 26671], [26648, 26649, 26673], [26648, 26673, 26672], [26649, 26650, 26673], [26650, 26674, 26673], [26650, 26651, 26675], [26650, 26675, 26674], [26651, 26652, 26675], [26652, 26676, 26675], [26652, 26653, 26677], [26652, 26677, 26676], [26653, 26654, 26677], [26654, 26678, 26677], [26654, 26655, 26679], [26654, 26679, 26678], [26655, 26656, 26679], [26656, 26680, 26679], [26656, 26657, 26681], [26656, 26681, 26680], [26657, 26658, 26681], [26658, 26682, 26681], [26658, 26659, 26683], [26658, 26683, 26682], [26659, 26660, 26683], [26660, 26684, 26683], [26660, 26661, 26685], [26660, 26685, 26684], [26661, 26662, 26685], [26662, 26686, 26685], [26662, 26663, 26687], [26662, 26687, 26686], [26663, 26664, 26687], [26664, 26688, 26687], [26664, 26665, 26689], [26664, 26689, 26688], [26665, 26666, 26689], [26666, 26690, 26689], [26666, 26667, 26691], [26666, 26691, 26690], [6314, 26668, 26692], [6314, 26692, 6437], [26668, 26669, 26692], [26669, 26693, 26692], [26669, 26670, 26694], [26669, 26694, 26693], [26670, 26671, 26694], [26671, 26695, 26694], [26671, 26672, 26696], [26671, 26696, 26695], [26672, 26673, 26696], [26673, 26697, 26696], [26673, 26674, 26698], [26673, 26698, 26697], [26674, 26675, 26698], [26675, 26699, 26698], [26675, 26676, 26700], [26675, 26700, 26699], [26676, 26677, 26700], [26677, 26701, 26700], [26677, 26678, 26702], [26677, 26702, 26701], [26678, 26679, 26702], [26679, 26703, 26702], [26679, 26680, 26704], [26679, 26704, 26703], [26680, 26681, 26704], [26681, 26705, 26704], [26681, 26682, 26706], [26681, 26706, 26705], [26682, 26683, 26706], [26683, 26707, 26706], [26683, 26684, 26708], [26683, 26708, 26707], [26684, 26685, 26708], [26685, 26709, 26708], [26685, 26686, 26710], [26685, 26710, 26709], [26686, 26687, 26710], [26687, 26711, 26710], [26687, 26688, 26712], [26687, 26712, 26711], [26688, 26689, 26712], [26689, 26713, 26712], [26689, 26690, 26714], [26689, 26714, 26713], [26690, 26691, 26714], [26691, 26715, 26714], [6437, 26692, 6557], [26692, 26716, 6557], [26692, 26693, 26717], [26692, 26717, 26716], [26693, 26694, 26717], [26694, 26718, 26717], [26694, 26695, 26719], [26694, 26719, 26718], [26695, 26696, 26719], [26696, 26720, 26719], [26696, 26697, 26721], [26696, 26721, 26720], [26697, 26698, 26721], [26698, 26722, 26721], [26698, 26699, 26723], [26698, 26723, 26722], [26699, 26700, 26723], [26700, 26724, 26723], [26700, 26701, 26725], [26700, 26725, 26724], [26701, 26702, 26725], [26702, 26726, 26725], [26702, 26703, 26727], [26702, 26727, 26726], [26703, 26704, 26727], [26704, 26728, 26727], [26704, 26705, 26729], [26704, 26729, 26728], [26705, 26706, 26729], [26706, 26730, 26729], [26706, 26707, 26731], [26706, 26731, 26730], [26707, 26708, 26731], [26708, 26732, 26731], [26708, 26709, 26733], [26708, 26733, 26732], [26709, 26710, 26733], [26710, 26734, 26733], [26710, 26711, 26735], [26710, 26735, 26734], [26711, 26712, 26735], [26712, 26736, 26735], [26712, 26713, 26737], [26712, 26737, 26736], [26713, 26714, 26737], [26714, 26738, 26737], [26714, 26715, 26739], [26714, 26739, 26738], [6557, 26716, 26740], [6557, 26740, 6675], [26716, 26717, 26740], [26717, 26741, 26740], [26717, 26718, 26742], [26717, 26742, 26741], [26718, 26719, 26742], [26719, 26743, 26742], [26719, 26720, 26744], [26719, 26744, 26743], [26720, 26721, 26744], [26721, 26745, 26744], [26721, 26722, 26746], [26721, 26746, 26745], [26722, 26723, 26746], [26723, 26747, 26746], [26723, 26724, 26748], [26723, 26748, 26747], [26724, 26725, 26748], [26725, 26749, 26748], [26725, 26726, 26750], [26725, 26750, 26749], [26726, 26727, 26750], [26727, 26751, 26750], [26727, 26728, 26752], [26727, 26752, 26751], [26728, 26729, 26752], [26729, 26753, 26752], [26729, 26730, 26754], [26729, 26754, 26753], [26730, 26731, 26754], [26731, 26755, 26754], [26731, 26732, 26756], [26731, 26756, 26755], [26732, 26733, 26756], [26733, 26757, 26756], [26733, 26734, 26758], [26733, 26758, 26757], [26734, 26735, 26758], [26735, 26759, 26758], [26735, 26736, 26760], [26735, 26760, 26759], [26736, 26737, 26760], [26737, 26761, 26760], [26737, 26738, 26762], [26737, 26762, 26761], [26738, 26739, 26762], [26739, 26763, 26762], [6675, 26740, 6793], [26740, 26764, 6793], [26740, 26741, 26765], [26740, 26765, 26764], [26741, 26742, 26765], [26742, 26766, 26765], [26742, 26743, 26767], [26742, 26767, 26766], [26743, 26744, 26767], [26744, 26768, 26767], [26744, 26745, 26769], [26744, 26769, 26768], [26745, 26746, 26769], [26746, 26770, 26769], [26746, 26747, 26771], [26746, 26771, 26770], [26747, 26748, 26771], [26748, 26772, 26771], [26748, 26749, 26773], [26748, 26773, 26772], [26749, 26750, 26773], [26750, 26774, 26773], [26750, 26751, 26775], [26750, 26775, 26774], [26751, 26752, 26775], [26752, 26776, 26775], [26752, 26753, 26777], [26752, 26777, 26776], [26753, 26754, 26777], [26754, 26778, 26777], [26754, 26755, 26779], [26754, 26779, 26778], [26755, 26756, 26779], [26756, 26780, 26779], [26756, 26757, 26781], [26756, 26781, 26780], [26757, 26758, 26781], [26758, 26782, 26781], [26758, 26759, 26783], [26758, 26783, 26782], [26759, 26760, 26783], [26760, 26784, 26783], [26760, 26761, 26785], [26760, 26785, 26784], [26761, 26762, 26785], [26762, 26786, 26785], [26762, 26763, 26787], [26762, 26787, 26786], [6793, 26764, 26788], [6793, 26788, 6911], [26764, 26765, 26788], [26765, 26789, 26788], [26765, 26766, 26790], [26765, 26790, 26789], [26766, 26767, 26790], [26767, 26791, 26790], [26767, 26768, 26792], [26767, 26792, 26791], [26768, 26769, 26792], [26769, 26793, 26792], [26769, 26770, 26794], [26769, 26794, 26793], [26770, 26771, 26794], [26771, 26795, 26794], [26771, 26772, 26796], [26771, 26796, 26795], [26772, 26773, 26796], [26773, 26797, 26796], [26773, 26774, 26798], [26773, 26798, 26797], [26774, 26775, 26798], [26775, 26799, 26798], [26775, 26776, 26800], [26775, 26800, 26799], [26776, 26777, 26800], [26777, 26801, 26800], [26777, 26778, 26802], [26777, 26802, 26801], [26778, 26779, 26802], [26779, 26803, 26802], [26779, 26780, 26804], [26779, 26804, 26803], [26780, 26781, 26804], [26781, 26805, 26804], [26781, 26782, 26806], [26781, 26806, 26805], [26782, 26783, 26806], [26783, 26807, 26806], [26783, 26784, 26808], [26783, 26808, 26807], [26784, 26785, 26808], [26785, 26809, 26808], [26785, 26786, 26810], [26785, 26810, 26809], [26786, 26787, 26810], [26787, 26811, 26810], [6911, 26788, 7029], [26788, 26812, 7029], [26788, 26789, 26813], [26788, 26813, 26812], [26789, 26790, 26813], [26790, 26814, 26813], [26790, 26791, 26815], [26790, 26815, 26814], [26791, 26792, 26815], [26792, 26816, 26815], [26792, 26793, 26817], [26792, 26817, 26816], [26793, 26794, 26817], [26794, 26818, 26817], [26794, 26795, 26819], [26794, 26819, 26818], [26795, 26796, 26819], [26796, 26820, 26819], [26796, 26797, 26821], [26796, 26821, 26820], [26797, 26798, 26821], [26798, 26822, 26821], [26798, 26799, 26823], [26798, 26823, 26822], [26799, 26800, 26823], [26800, 26824, 26823], [26800, 26801, 26825], [26800, 26825, 26824], [26801, 26802, 26825], [26802, 26826, 26825], [26802, 26803, 26827], [26802, 26827, 26826], [26803, 26804, 26827], [26804, 26828, 26827], [26804, 26805, 26829], [26804, 26829, 26828], [26805, 26806, 26829], [26806, 26830, 26829], [26806, 26807, 26831], [26806, 26831, 26830], [26807, 26808, 26831], [26808, 26832, 26831], [26808, 26809, 26833], [26808, 26833, 26832], [26809, 26810, 26833], [26810, 26834, 26833], [26810, 26811, 26835], [26810, 26835, 26834], [7029, 26812, 26836], [7029, 26836, 7147], [26812, 26813, 26836], [26813, 26837, 26836], [26813, 26814, 26838], [26813, 26838, 26837], [26814, 26815, 26838], [26815, 26839, 26838], [26815, 26816, 26840], [26815, 26840, 26839], [26816, 26817, 26840], [26817, 26841, 26840], [26817, 26818, 26842], [26817, 26842, 26841], [26818, 26819, 26842], [26819, 26843, 26842], [26819, 26820, 26844], [26819, 26844, 26843], [26820, 26821, 26844], [26821, 26845, 26844], [26821, 26822, 26846], [26821, 26846, 26845], [26822, 26823, 26846], [26823, 26847, 26846], [26823, 26824, 26848], [26823, 26848, 26847], [26824, 26825, 26848], [26825, 26849, 26848], [26825, 26826, 26850], [26825, 26850, 26849], [26826, 26827, 26850], [26827, 26851, 26850], [26827, 26828, 26852], [26827, 26852, 26851], [26828, 26829, 26852], [26829, 26853, 26852], [26829, 26830, 26854], [26829, 26854, 26853], [26830, 26831, 26854], [26831, 26855, 26854], [26831, 26832, 26856], [26831, 26856, 26855], [26832, 26833, 26856], [26833, 26857, 26856], [26833, 26834, 26858], [26833, 26858, 26857], [26834, 26835, 26858], [26835, 26859, 26858], [7147, 26836, 7265], [26836, 26860, 7265], [26836, 26837, 26861], [26836, 26861, 26860], [26837, 26838, 26861], [26838, 26862, 26861], [26838, 26839, 26863], [26838, 26863, 26862], [26839, 26840, 26863], [26840, 26864, 26863], [26840, 26841, 26865], [26840, 26865, 26864], [26841, 26842, 26865], [26842, 26866, 26865], [26842, 26843, 26867], [26842, 26867, 26866], [26843, 26844, 26867], [26844, 26868, 26867], [26844, 26845, 26869], [26844, 26869, 26868], [26845, 26846, 26869], [26846, 26870, 26869], [26846, 26847, 26871], [26846, 26871, 26870], [26847, 26848, 26871], [26848, 26872, 26871], [26848, 26849, 26873], [26848, 26873, 26872], [26849, 26850, 26873], [26850, 26874, 26873], [26850, 26851, 26875], [26850, 26875, 26874], [26851, 26852, 26875], [26852, 26876, 26875], [26852, 26853, 26877], [26852, 26877, 26876], [26853, 26854, 26877], [26854, 26878, 26877], [26854, 26855, 26879], [26854, 26879, 26878], [26855, 26856, 26879], [26856, 26880, 26879], [26856, 26857, 26881], [26856, 26881, 26880], [26857, 26858, 26881], [26858, 26882, 26881], [26858, 26859, 26883], [26858, 26883, 26882], [7265, 26860, 26884], [7265, 26884, 7383], [26860, 26861, 26884], [26861, 26885, 26884], [26861, 26862, 26886], [26861, 26886, 26885], [26862, 26863, 26886], [26863, 26887, 26886], [26863, 26864, 26888], [26863, 26888, 26887], [26864, 26865, 26888], [26865, 26889, 26888], [26865, 26866, 26890], [26865, 26890, 26889], [26866, 26867, 26890], [26867, 26891, 26890], [26867, 26868, 26892], [26867, 26892, 26891], [26868, 26869, 26892], [26869, 26893, 26892], [26869, 26870, 26894], [26869, 26894, 26893], [26870, 26871, 26894], [26871, 26895, 26894], [26871, 26872, 26896], [26871, 26896, 26895], [26872, 26873, 26896], [26873, 26897, 26896], [26873, 26874, 26898], [26873, 26898, 26897], [26874, 26875, 26898], [26875, 26899, 26898], [26875, 26876, 26900], [26875, 26900, 26899], [26876, 26877, 26900], [26877, 26901, 26900], [26877, 26878, 26902], [26877, 26902, 26901], [26878, 26879, 26902], [26879, 26903, 26902], [26879, 26880, 26904], [26879, 26904, 26903], [26880, 26881, 26904], [26881, 26905, 26904], [26881, 26882, 26906], [26881, 26906, 26905], [26882, 26883, 26906], [26883, 26907, 26906], [7383, 26884, 7501], [26884, 26908, 7501], [26884, 26885, 26909], [26884, 26909, 26908], [26885, 26886, 26909], [26886, 26910, 26909], [26886, 26887, 26911], [26886, 26911, 26910], [26887, 26888, 26911], [26888, 26912, 26911], [26888, 26889, 26913], [26888, 26913, 26912], [26889, 26890, 26913], [26890, 26914, 26913], [26890, 26891, 26915], [26890, 26915, 26914], [26891, 26892, 26915], [26892, 26916, 26915], [26892, 26893, 26917], [26892, 26917, 26916], [26893, 26894, 26917], [26894, 26918, 26917], [26894, 26895, 26919], [26894, 26919, 26918], [26895, 26896, 26919], [26896, 26920, 26919], [26896, 26897, 26921], [26896, 26921, 26920], [26897, 26898, 26921], [26898, 26922, 26921], [26898, 26899, 26923], [26898, 26923, 26922], [26899, 26900, 26923], [26900, 26924, 26923], [26900, 26901, 26925], [26900, 26925, 26924], [26901, 26902, 26925], [26902, 26926, 26925], [26902, 26903, 26927], [26902, 26927, 26926], [26903, 26904, 26927], [26904, 26928, 26927], [26904, 26905, 26929], [26904, 26929, 26928], [26905, 26906, 26929], [26906, 26930, 26929], [26906, 26907, 26931], [26906, 26931, 26930], [7501, 26908, 26932], [7501, 26932, 7619], [26908, 26909, 26932], [26909, 26933, 26932], [26909, 26910, 26934], [26909, 26934, 26933], [26910, 26911, 26934], [26911, 26935, 26934], [26911, 26912, 26936], [26911, 26936, 26935], [26912, 26913, 26936], [26913, 26937, 26936], [26913, 26914, 26938], [26913, 26938, 26937], [26914, 26915, 26938], [26915, 26939, 26938], [26915, 26916, 26940], [26915, 26940, 26939], [26916, 26917, 26940], [26917, 26941, 26940], [26917, 26918, 26942], [26917, 26942, 26941], [26918, 26919, 26942], [26919, 26943, 26942], [26919, 26920, 26944], [26919, 26944, 26943], [26920, 26921, 26944], [26921, 26945, 26944], [26921, 26922, 26946], [26921, 26946, 26945], [26922, 26923, 26946], [26923, 26947, 26946], [26923, 26924, 26948], [26923, 26948, 26947], [26924, 26925, 26948], [26925, 26949, 26948], [26925, 26926, 26950], [26925, 26950, 26949], [26926, 26927, 26950], [26927, 26951, 26950], [26927, 26928, 26952], [26927, 26952, 26951], [26928, 26929, 26952], [26929, 26953, 26952], [26929, 26930, 26954], [26929, 26954, 26953], [26930, 26931, 26954], [26931, 26955, 26954], [7619, 26932, 7737], [26932, 26956, 7737], [26932, 26933, 26957], [26932, 26957, 26956], [26933, 26934, 26957], [26934, 26958, 26957], [26934, 26935, 26959], [26934, 26959, 26958], [26935, 26936, 26959], [26936, 26960, 26959], [26936, 26937, 26961], [26936, 26961, 26960], [26937, 26938, 26961], [26938, 26962, 26961], [26938, 26939, 26963], [26938, 26963, 26962], [26939, 26940, 26963], [26940, 26964, 26963], [26940, 26941, 26965], [26940, 26965, 26964], [26941, 26942, 26965], [26942, 26966, 26965], [26942, 26943, 26967], [26942, 26967, 26966], [26943, 26944, 26967], [26944, 26968, 26967], [26944, 26945, 26969], [26944, 26969, 26968], [26945, 26946, 26969], [26946, 26970, 26969], [26946, 26947, 26971], [26946, 26971, 26970], [26947, 26948, 26971], [26948, 26972, 26971], [26948, 26949, 26973], [26948, 26973, 26972], [26949, 26950, 26973], [26950, 26974, 26973], [26950, 26951, 26975], [26950, 26975, 26974], [26951, 26952, 26975], [26952, 26976, 26975], [26952, 26953, 26977], [26952, 26977, 26976], [26953, 26954, 26977], [26954, 26978, 26977], [26954, 26955, 26979], [26954, 26979, 26978], [7737, 26956, 26980], [7737, 26980, 7855], [26956, 26957, 26980], [26957, 26981, 26980], [26957, 26958, 26982], [26957, 26982, 26981], [26958, 26959, 26982], [26959, 26983, 26982], [26959, 26960, 26984], [26959, 26984, 26983], [26960, 26961, 26984], [26961, 26985, 26984], [26961, 26962, 26986], [26961, 26986, 26985], [26962, 26963, 26986], [26963, 26987, 26986], [26963, 26964, 26988], [26963, 26988, 26987], [26964, 26965, 26988], [26965, 26989, 26988], [26965, 26966, 26990], [26965, 26990, 26989], [26966, 26967, 26990], [26967, 26991, 26990], [26967, 26968, 26992], [26967, 26992, 26991], [26968, 26969, 26992], [26969, 26993, 26992], [26969, 26970, 26994], [26969, 26994, 26993], [26970, 26971, 26994], [26971, 26995, 26994], [26971, 26972, 26996], [26971, 26996, 26995], [26972, 26973, 26996], [26973, 26997, 26996], [26973, 26974, 26998], [26973, 26998, 26997], [26974, 26975, 26998], [26975, 26999, 26998], [26975, 26976, 27000], [26975, 27000, 26999], [26976, 26977, 27000], [26977, 27001, 27000], [26977, 26978, 27002], [26977, 27002, 27001], [26978, 26979, 27002], [26979, 27003, 27002], [7855, 26980, 7973], [26980, 27004, 7973], [26980, 26981, 27005], [26980, 27005, 27004], [26981, 26982, 27005], [26982, 27006, 27005], [26982, 26983, 27007], [26982, 27007, 27006], [26983, 26984, 27007], [26984, 27008, 27007], [26984, 26985, 27009], [26984, 27009, 27008], [26985, 26986, 27009], [26986, 27010, 27009], [26986, 26987, 27011], [26986, 27011, 27010], [26987, 26988, 27011], [26988, 27012, 27011], [26988, 26989, 27013], [26988, 27013, 27012], [26989, 26990, 27013], [26990, 27014, 27013], [26990, 26991, 27015], [26990, 27015, 27014], [26991, 26992, 27015], [26992, 27016, 27015], [26992, 26993, 27017], [26992, 27017, 27016], [26993, 26994, 27017], [26994, 27018, 27017], [26994, 26995, 27019], [26994, 27019, 27018], [26995, 26996, 27019], [26996, 27020, 27019], [26996, 26997, 27021], [26996, 27021, 27020], [26997, 26998, 27021], [26998, 27022, 27021], [26998, 26999, 27023], [26998, 27023, 27022], [26999, 27000, 27023], [27000, 27024, 27023], [27000, 27001, 27025], [27000, 27025, 27024], [27001, 27002, 27025], [27002, 27026, 27025], [27002, 27003, 27027], [27002, 27027, 27026], [7973, 27004, 27028], [7973, 27028, 8091], [27004, 27005, 27028], [27005, 27029, 27028], [27005, 27006, 27030], [27005, 27030, 27029], [27006, 27007, 27030], [27007, 27031, 27030], [27007, 27008, 27032], [27007, 27032, 27031], [27008, 27009, 27032], [27009, 27033, 27032], [27009, 27010, 27034], [27009, 27034, 27033], [27010, 27011, 27034], [27011, 27035, 27034], [27011, 27012, 27036], [27011, 27036, 27035], [27012, 27013, 27036], [27013, 27037, 27036], [27013, 27014, 27038], [27013, 27038, 27037], [27014, 27015, 27038], [27015, 27039, 27038], [27015, 27016, 27040], [27015, 27040, 27039], [27016, 27017, 27040], [27017, 27041, 27040], [27017, 27018, 27042], [27017, 27042, 27041], [27018, 27019, 27042], [27019, 27043, 27042], [27019, 27020, 27044], [27019, 27044, 27043], [27020, 27021, 27044], [27021, 27045, 27044], [27021, 27022, 27046], [27021, 27046, 27045], [27022, 27023, 27046], [27023, 27047, 27046], [27023, 27024, 27048], [27023, 27048, 27047], [27024, 27025, 27048], [27025, 27049, 27048], [27025, 27026, 27050], [27025, 27050, 27049], [27026, 27027, 27050], [27027, 27051, 27050], [8091, 27028, 8209], [27028, 27052, 8209], [27028, 27029, 27053], [27028, 27053, 27052], [27029, 27030, 27053], [27030, 27054, 27053], [27030, 27031, 27055], [27030, 27055, 27054], [27031, 27032, 27055], [27032, 27056, 27055], [27032, 27033, 27057], [27032, 27057, 27056], [27033, 27034, 27057], [27034, 27058, 27057], [27034, 27035, 27059], [27034, 27059, 27058], [27035, 27036, 27059], [27036, 27060, 27059], [27036, 27037, 27061], [27036, 27061, 27060], [27037, 27038, 27061], [27038, 27062, 27061], [27038, 27039, 27063], [27038, 27063, 27062], [27039, 27040, 27063], [27040, 27064, 27063], [27040, 27041, 27065], [27040, 27065, 27064], [27041, 27042, 27065], [27042, 27066, 27065], [27042, 27043, 27067], [27042, 27067, 27066], [27043, 27044, 27067], [27044, 27068, 27067], [27044, 27045, 27069], [27044, 27069, 27068], [27045, 27046, 27069], [27046, 27070, 27069], [27046, 27047, 27071], [27046, 27071, 27070], [27047, 27048, 27071], [27048, 27072, 27071], [27048, 27049, 27073], [27048, 27073, 27072], [27049, 27050, 27073], [27050, 27074, 27073], [27050, 27051, 27075], [27050, 27075, 27074], [8209, 27052, 27076], [8209, 27076, 8327], [27052, 27053, 27076], [27053, 27077, 27076], [27053, 27054, 27078], [27053, 27078, 27077], [27054, 27055, 27078], [27055, 27079, 27078], [27055, 27056, 27080], [27055, 27080, 27079], [27056, 27057, 27080], [27057, 27081, 27080], [27057, 27058, 27082], [27057, 27082, 27081], [27058, 27059, 27082], [27059, 27083, 27082], [27059, 27060, 27084], [27059, 27084, 27083], [27060, 27061, 27084], [27061, 27085, 27084], [27061, 27062, 27086], [27061, 27086, 27085], [27062, 27063, 27086], [27063, 27087, 27086], [27063, 27064, 27088], [27063, 27088, 27087], [27064, 27065, 27088], [27065, 27089, 27088], [27065, 27066, 27090], [27065, 27090, 27089], [27066, 27067, 27090], [27067, 27091, 27090], [27067, 27068, 27092], [27067, 27092, 27091], [27068, 27069, 27092], [27069, 27093, 27092], [27069, 27070, 27094], [27069, 27094, 27093], [27070, 27071, 27094], [27071, 27095, 27094], [27071, 27072, 27096], [27071, 27096, 27095], [27072, 27073, 27096], [27073, 27097, 27096], [27073, 27074, 27098], [27073, 27098, 27097], [27074, 27075, 27098], [27075, 27099, 27098], [8327, 27076, 8445], [27076, 27100, 8445], [27076, 27077, 27101], [27076, 27101, 27100], [27077, 27078, 27101], [27078, 27102, 27101], [27078, 27079, 27103], [27078, 27103, 27102], [27079, 27080, 27103], [27080, 27104, 27103], [27080, 27081, 27105], [27080, 27105, 27104], [27081, 27082, 27105], [27082, 27106, 27105], [27082, 27083, 27107], [27082, 27107, 27106], [27083, 27084, 27107], [27084, 27108, 27107], [27084, 27085, 27109], [27084, 27109, 27108], [27085, 27086, 27109], [27086, 27110, 27109], [27086, 27087, 27111], [27086, 27111, 27110], [27087, 27088, 27111], [27088, 27112, 27111], [27088, 27089, 27113], [27088, 27113, 27112], [27089, 27090, 27113], [27090, 27114, 27113], [27090, 27091, 27115], [27090, 27115, 27114], [27091, 27092, 27115], [27092, 27116, 27115], [27092, 27093, 27117], [27092, 27117, 27116], [27093, 27094, 27117], [27094, 27118, 27117], [27094, 27095, 27119], [27094, 27119, 27118], [27095, 27096, 27119], [27096, 27120, 27119], [27096, 27097, 27121], [27096, 27121, 27120], [27097, 27098, 27121], [27098, 27122, 27121], [27098, 27099, 27123], [27098, 27123, 27122], [8445, 27100, 27124], [8445, 27124, 8563], [27100, 27101, 27124], [27101, 27125, 27124], [27101, 27102, 27126], [27101, 27126, 27125], [27102, 27103, 27126], [27103, 27127, 27126], [27103, 27104, 27128], [27103, 27128, 27127], [27104, 27105, 27128], [27105, 27129, 27128], [27105, 27106, 27130], [27105, 27130, 27129], [27106, 27107, 27130], [27107, 27131, 27130], [27107, 27108, 27132], [27107, 27132, 27131], [27108, 27109, 27132], [27109, 27133, 27132], [27109, 27110, 27134], [27109, 27134, 27133], [27110, 27111, 27134], [27111, 27135, 27134], [27111, 27112, 27136], [27111, 27136, 27135], [27112, 27113, 27136], [27113, 27137, 27136], [27113, 27114, 27138], [27113, 27138, 27137], [27114, 27115, 27138], [27115, 27139, 27138], [27115, 27116, 27140], [27115, 27140, 27139], [27116, 27117, 27140], [27117, 27141, 27140], [27117, 27118, 27142], [27117, 27142, 27141], [27118, 27119, 27142], [27119, 27143, 27142], [27119, 27120, 27144], [27119, 27144, 27143], [27120, 27121, 27144], [27121, 27145, 27144], [27121, 27122, 27146], [27121, 27146, 27145], [27122, 27123, 27146], [27123, 27147, 27146], [8563, 27124, 8681], [27124, 27148, 8681], [27124, 27125, 27149], [27124, 27149, 27148], [27125, 27126, 27149], [27126, 27150, 27149], [27126, 27127, 27151], [27126, 27151, 27150], [27127, 27128, 27151], [27128, 27152, 27151], [27128, 27129, 27153], [27128, 27153, 27152], [27129, 27130, 27153], [27130, 27154, 27153], [27130, 27131, 27155], [27130, 27155, 27154], [27131, 27132, 27155], [27132, 27156, 27155], [27132, 27133, 27157], [27132, 27157, 27156], [27133, 27134, 27157], [27134, 27158, 27157], [27134, 27135, 27159], [27134, 27159, 27158], [27135, 27136, 27159], [27136, 27160, 27159], [27136, 27137, 27161], [27136, 27161, 27160], [27137, 27138, 27161], [27138, 27162, 27161], [27138, 27139, 27163], [27138, 27163, 27162], [27139, 27140, 27163], [27140, 27164, 27163], [27140, 27141, 27165], [27140, 27165, 27164], [27141, 27142, 27165], [27142, 27166, 27165], [27142, 27143, 27167], [27142, 27167, 27166], [27143, 27144, 27167], [27144, 27168, 27167], [27144, 27145, 27169], [27144, 27169, 27168], [27145, 27146, 27169], [27146, 27170, 27169], [27146, 27147, 27171], [27146, 27171, 27170], [8681, 27148, 27172], [8681, 27172, 8799], [27148, 27149, 27172], [27149, 27173, 27172], [27149, 27150, 27174], [27149, 27174, 27173], [27150, 27151, 27174], [27151, 27175, 27174], [27151, 27152, 27176], [27151, 27176, 27175], [27152, 27153, 27176], [27153, 27177, 27176], [27153, 27154, 27178], [27153, 27178, 27177], [27154, 27155, 27178], [27155, 27179, 27178], [27155, 27156, 27180], [27155, 27180, 27179], [27156, 27157, 27180], [27157, 27181, 27180], [27157, 27158, 27182], [27157, 27182, 27181], [27158, 27159, 27182], [27159, 27183, 27182], [27159, 27160, 27184], [27159, 27184, 27183], [27160, 27161, 27184], [27161, 27185, 27184], [27161, 27162, 27186], [27161, 27186, 27185], [27162, 27163, 27186], [27163, 27187, 27186], [27163, 27164, 27188], [27163, 27188, 27187], [27164, 27165, 27188], [27165, 27189, 27188], [27165, 27166, 27190], [27165, 27190, 27189], [27166, 27167, 27190], [27167, 27191, 27190], [27167, 27168, 27192], [27167, 27192, 27191], [27168, 27169, 27192], [27169, 27193, 27192], [27169, 27170, 27194], [27169, 27194, 27193], [27170, 27171, 27194], [27171, 27195, 27194], [8799, 27172, 8917], [27172, 27196, 8917], [27172, 27173, 27197], [27172, 27197, 27196], [27173, 27174, 27197], [27174, 27198, 27197], [27174, 27175, 27199], [27174, 27199, 27198], [27175, 27176, 27199], [27176, 27200, 27199], [27176, 27177, 27201], [27176, 27201, 27200], [27177, 27178, 27201], [27178, 27202, 27201], [27178, 27179, 27203], [27178, 27203, 27202], [27179, 27180, 27203], [27180, 27204, 27203], [27180, 27181, 27205], [27180, 27205, 27204], [27181, 27182, 27205], [27182, 27206, 27205], [27182, 27183, 27207], [27182, 27207, 27206], [27183, 27184, 27207], [27184, 27208, 27207], [27184, 27185, 27209], [27184, 27209, 27208], [27185, 27186, 27209], [27186, 27210, 27209], [27186, 27187, 27211], [27186, 27211, 27210], [27187, 27188, 27211], [27188, 27212, 27211], [27188, 27189, 27213], [27188, 27213, 27212], [27189, 27190, 27213], [27190, 27214, 27213], [27190, 27191, 27215], [27190, 27215, 27214], [27191, 27192, 27215], [27192, 27216, 27215], [27192, 27193, 27217], [27192, 27217, 27216], [27193, 27194, 27217], [27194, 27218, 27217], [27194, 27195, 27219], [27194, 27219, 27218], [8917, 27196, 27220], [8917, 27220, 9035], [27196, 27197, 27220], [27197, 27221, 27220], [27197, 27198, 27222], [27197, 27222, 27221], [27198, 27199, 27222], [27199, 27223, 27222], [27199, 27200, 27224], [27199, 27224, 27223], [27200, 27201, 27224], [27201, 27225, 27224], [27201, 27202, 27226], [27201, 27226, 27225], [27202, 27203, 27226], [27203, 27227, 27226], [27203, 27204, 27228], [27203, 27228, 27227], [27204, 27205, 27228], [27205, 27229, 27228], [27205, 27206, 27230], [27205, 27230, 27229], [27206, 27207, 27230], [27207, 27231, 27230], [27207, 27208, 27232], [27207, 27232, 27231], [27208, 27209, 27232], [27209, 27233, 27232], [27209, 27210, 27234], [27209, 27234, 27233], [27210, 27211, 27234], [27211, 27235, 27234], [27211, 27212, 27236], [27211, 27236, 27235], [27212, 27213, 27236], [27213, 27237, 27236], [27213, 27214, 27238], [27213, 27238, 27237], [27214, 27215, 27238], [27215, 27239, 27238], [27215, 27216, 27240], [27215, 27240, 27239], [27216, 27217, 27240], [27217, 27241, 27240], [27217, 27218, 27242], [27217, 27242, 27241], [27218, 27219, 27242], [27219, 27243, 27242], [9035, 27220, 9153], [27220, 27244, 9153], [27220, 27221, 27245], [27220, 27245, 27244], [27221, 27222, 27245], [27222, 27246, 27245], [27222, 27223, 27247], [27222, 27247, 27246], [27223, 27224, 27247], [27224, 27248, 27247], [27224, 27225, 27249], [27224, 27249, 27248], [27225, 27226, 27249], [27226, 27250, 27249], [27226, 27227, 27251], [27226, 27251, 27250], [27227, 27228, 27251], [27228, 27252, 27251], [27228, 27229, 27253], [27228, 27253, 27252], [27229, 27230, 27253], [27230, 27254, 27253], [27230, 27231, 27255], [27230, 27255, 27254], [27231, 27232, 27255], [27232, 27256, 27255], [27232, 27233, 27257], [27232, 27257, 27256], [27233, 27234, 27257], [27234, 27258, 27257], [27234, 27235, 27259], [27234, 27259, 27258], [27235, 27236, 27259], [27236, 27260, 27259], [27236, 27237, 27261], [27236, 27261, 27260], [27237, 27238, 27261], [27238, 27262, 27261], [27238, 27239, 27263], [27238, 27263, 27262], [27239, 27240, 27263], [27240, 27264, 27263], [27240, 27241, 27265], [27240, 27265, 27264], [27241, 27242, 27265], [27242, 27266, 27265], [27242, 27243, 27267], [27242, 27267, 27266], [9153, 27244, 27268], [9153, 27268, 9271], [27244, 27245, 27268], [27245, 27269, 27268], [27245, 27246, 27270], [27245, 27270, 27269], [27246, 27247, 27270], [27247, 27271, 27270], [27247, 27248, 27272], [27247, 27272, 27271], [27248, 27249, 27272], [27249, 27273, 27272], [27249, 27250, 27274], [27249, 27274, 27273], [27250, 27251, 27274], [27251, 27275, 27274], [27251, 27252, 27276], [27251, 27276, 27275], [27252, 27253, 27276], [27253, 27277, 27276], [27253, 27254, 27278], [27253, 27278, 27277], [27254, 27255, 27278], [27255, 27279, 27278], [27255, 27256, 27280], [27255, 27280, 27279], [27256, 27257, 27280], [27257, 27281, 27280], [27257, 27258, 27282], [27257, 27282, 27281], [27258, 27259, 27282], [27259, 27283, 27282], [27259, 27260, 27284], [27259, 27284, 27283], [27260, 27261, 27284], [27261, 27285, 27284], [27261, 27262, 27286], [27261, 27286, 27285], [27262, 27263, 27286], [27263, 27287, 27286], [27263, 27264, 27288], [27263, 27288, 27287], [27264, 27265, 27288], [27265, 27289, 27288], [27265, 27266, 27290], [27265, 27290, 27289], [27266, 27267, 27290], [27267, 27291, 27290], [9271, 27268, 9389], [27268, 27292, 9389], [27268, 27269, 27293], [27268, 27293, 27292], [27269, 27270, 27293], [27270, 27294, 27293], [27270, 27271, 27295], [27270, 27295, 27294], [27271, 27272, 27295], [27272, 27296, 27295], [27272, 27273, 27297], [27272, 27297, 27296], [27273, 27274, 27297], [27274, 27298, 27297], [27274, 27275, 27299], [27274, 27299, 27298], [27275, 27276, 27299], [27276, 27300, 27299], [27276, 27277, 27301], [27276, 27301, 27300], [27277, 27278, 27301], [27278, 27302, 27301], [27278, 27279, 27303], [27278, 27303, 27302], [27279, 27280, 27303], [27280, 27304, 27303], [27280, 27281, 27305], [27280, 27305, 27304], [27281, 27282, 27305], [27282, 27306, 27305], [27282, 27283, 27307], [27282, 27307, 27306], [27283, 27284, 27307], [27284, 27308, 27307], [27284, 27285, 27309], [27284, 27309, 27308], [27285, 27286, 27309], [27286, 27310, 27309], [27286, 27287, 27311], [27286, 27311, 27310], [27287, 27288, 27311], [27288, 27312, 27311], [27288, 27289, 27313], [27288, 27313, 27312], [27289, 27290, 27313], [27290, 27314, 27313], [27290, 27291, 27315], [27290, 27315, 27314], [9389, 27292, 27316], [9389, 27316, 9507], [27292, 27293, 27316], [27293, 27317, 27316], [27293, 27294, 27318], [27293, 27318, 27317], [27294, 27295, 27318], [27295, 27319, 27318], [27295, 27296, 27320], [27295, 27320, 27319], [27296, 27297, 27320], [27297, 27321, 27320], [27297, 27298, 27322], [27297, 27322, 27321], [27298, 27299, 27322], [27299, 27323, 27322], [27299, 27300, 27324], [27299, 27324, 27323], [27300, 27301, 27324], [27301, 27325, 27324], [27301, 27302, 27326], [27301, 27326, 27325], [27302, 27303, 27326], [27303, 27327, 27326], [27303, 27304, 27328], [27303, 27328, 27327], [27304, 27305, 27328], [27305, 27329, 27328], [27305, 27306, 27330], [27305, 27330, 27329], [27306, 27307, 27330], [27307, 27331, 27330], [27307, 27308, 27332], [27307, 27332, 27331], [27308, 27309, 27332], [27309, 27333, 27332], [27309, 27310, 27334], [27309, 27334, 27333], [27310, 27311, 27334], [27311, 27335, 27334], [27311, 27312, 27336], [27311, 27336, 27335], [27312, 27313, 27336], [27313, 27337, 27336], [27313, 27314, 27338], [27313, 27338, 27337], [27314, 27315, 27338], [27315, 27339, 27338], [9507, 27316, 9625], [27316, 27340, 9625], [27316, 27317, 27341], [27316, 27341, 27340], [27317, 27318, 27341], [27318, 27342, 27341], [27318, 27319, 27343], [27318, 27343, 27342], [27319, 27320, 27343], [27320, 27344, 27343], [27320, 27321, 27345], [27320, 27345, 27344], [27321, 27322, 27345], [27322, 27346, 27345], [27322, 27323, 27347], [27322, 27347, 27346], [27323, 27324, 27347], [27324, 27348, 27347], [27324, 27325, 27349], [27324, 27349, 27348], [27325, 27326, 27349], [27326, 27350, 27349], [27326, 27327, 27351], [27326, 27351, 27350], [27327, 27328, 27351], [27328, 27352, 27351], [27328, 27329, 27353], [27328, 27353, 27352], [27329, 27330, 27353], [27330, 27354, 27353], [27330, 27331, 27355], [27330, 27355, 27354], [27331, 27332, 27355], [27332, 27356, 27355], [27332, 27333, 27357], [27332, 27357, 27356], [27333, 27334, 27357], [27334, 27358, 27357], [27334, 27335, 27359], [27334, 27359, 27358], [27335, 27336, 27359], [27336, 27360, 27359], [27336, 27337, 27361], [27336, 27361, 27360], [27337, 27338, 27361], [27338, 27362, 27361], [27338, 27339, 27363], [27338, 27363, 27362], [9625, 27340, 27364], [9625, 27364, 9743], [27340, 27341, 27364], [27341, 27365, 27364], [27341, 27342, 27366], [27341, 27366, 27365], [27342, 27343, 27366], [27343, 27367, 27366], [27343, 27344, 27368], [27343, 27368, 27367], [27344, 27345, 27368], [27345, 27369, 27368], [27345, 27346, 27370], [27345, 27370, 27369], [27346, 27347, 27370], [27347, 27371, 27370], [27347, 27348, 27372], [27347, 27372, 27371], [27348, 27349, 27372], [27349, 27373, 27372], [27349, 27350, 27374], [27349, 27374, 27373], [27350, 27351, 27374], [27351, 27375, 27374], [27351, 27352, 27376], [27351, 27376, 27375], [27352, 27353, 27376], [27353, 27377, 27376], [27353, 27354, 27378], [27353, 27378, 27377], [27354, 27355, 27378], [27355, 27379, 27378], [27355, 27356, 27380], [27355, 27380, 27379], [27356, 27357, 27380], [27357, 27381, 27380], [27357, 27358, 27382], [27357, 27382, 27381], [27358, 27359, 27382], [27359, 27383, 27382], [27359, 27360, 27384], [27359, 27384, 27383], [27360, 27361, 27384], [27361, 27385, 27384], [27361, 27362, 27386], [27361, 27386, 27385], [27362, 27363, 27386], [27363, 27387, 27386], [9743, 27364, 9863], [27364, 27388, 9863], [27364, 27365, 27389], [27364, 27389, 27388], [27365, 27366, 27389], [27366, 27390, 27389], [27366, 27367, 27391], [27366, 27391, 27390], [27367, 27368, 27391], [27368, 27392, 27391], [27368, 27369, 27393], [27368, 27393, 27392], [27369, 27370, 27393], [27370, 27394, 27393], [27370, 27371, 27395], [27370, 27395, 27394], [27371, 27372, 27395], [27372, 27396, 27395], [27372, 27373, 27397], [27372, 27397, 27396], [27373, 27374, 27397], [27374, 27398, 27397], [27374, 27375, 27399], [27374, 27399, 27398], [27375, 27376, 27399], [27376, 27400, 27399], [27376, 27377, 27401], [27376, 27401, 27400], [27377, 27378, 27401], [27378, 27402, 27401], [27378, 27379, 27403], [27378, 27403, 27402], [27379, 27380, 27403], [27380, 27404, 27403], [27380, 27381, 27405], [27380, 27405, 27404], [27381, 27382, 27405], [27382, 27406, 27405], [27382, 27383, 27407], [27382, 27407, 27406], [27383, 27384, 27407], [27384, 27408, 27407], [27384, 27385, 27409], [27384, 27409, 27408], [27385, 27386, 27409], [27386, 27410, 27409], [27386, 27387, 27411], [27386, 27411, 27410], [9863, 27388, 27412], [9863, 27412, 9984], [27388, 27389, 27412], [27389, 27413, 27412], [27389, 27390, 27414], [27389, 27414, 27413], [27390, 27391, 27414], [27391, 27415, 27414], [27391, 27392, 27416], [27391, 27416, 27415], [27392, 27393, 27416], [27393, 27417, 27416], [27393, 27394, 27418], [27393, 27418, 27417], [27394, 27395, 27418], [27395, 27419, 27418], [27395, 27396, 27420], [27395, 27420, 27419], [27396, 27397, 27420], [27397, 27421, 27420], [27397, 27398, 27422], [27397, 27422, 27421], [27398, 27399, 27422], [27399, 27423, 27422], [27399, 27400, 27424], [27399, 27424, 27423], [27400, 27401, 27424], [27401, 27425, 27424], [27401, 27402, 27426], [27401, 27426, 27425], [27402, 27403, 27426], [27403, 27427, 27426], [27403, 27404, 27428], [27403, 27428, 27427], [27404, 27405, 27428], [27405, 27429, 27428], [27405, 27406, 27430], [27405, 27430, 27429], [27406, 27407, 27430], [27407, 27431, 27430], [27407, 27408, 27432], [27407, 27432, 27431], [27408, 27409, 27432], [27409, 27433, 27432], [27409, 27410, 27434], [27409, 27434, 27433], [27410, 27411, 27434], [27411, 27435, 27434], [9984, 27412, 10110], [27412, 27436, 10110], [27412, 27413, 27437], [27412, 27437, 27436], [27413, 27414, 27437], [27414, 27438, 27437], [27414, 27415, 27439], [27414, 27439, 27438], [27415, 27416, 27439], [27416, 27440, 27439], [27416, 27417, 27441], [27416, 27441, 27440], [27417, 27418, 27441], [27418, 27442, 27441], [27418, 27419, 27443], [27418, 27443, 27442], [27419, 27420, 27443], [27420, 27444, 27443], [27420, 27421, 27445], [27420, 27445, 27444], [27421, 27422, 27445], [27422, 27446, 27445], [27422, 27423, 27447], [27422, 27447, 27446], [27423, 27424, 27447], [27424, 27448, 27447], [27424, 27425, 27449], [27424, 27449, 27448], [27425, 27426, 27449], [27426, 27450, 27449], [27426, 27427, 27451], [27426, 27451, 27450], [27427, 27428, 27451], [27428, 27452, 27451], [27428, 27429, 27453], [27428, 27453, 27452], [27429, 27430, 27453], [27430, 27454, 27453], [27430, 27431, 27455], [27430, 27455, 27454], [27431, 27432, 27455], [27432, 27456, 27455], [27432, 27433, 27457], [27432, 27457, 27456], [27433, 27434, 27457], [27434, 27458, 27457], [27434, 27435, 27459], [27434, 27459, 27458], [10110, 27436, 27460], [10110, 27460, 10236], [27436, 27437, 27460], [27437, 27461, 27460], [27437, 27438, 27462], [27437, 27462, 27461], [27438, 27439, 27462], [27439, 27463, 27462], [27439, 27440, 27464], [27439, 27464, 27463], [27440, 27441, 27464], [27441, 27465, 27464], [27441, 27442, 27466], [27441, 27466, 27465], [27442, 27443, 27466], [27443, 27467, 27466], [27443, 27444, 27468], [27443, 27468, 27467], [27444, 27445, 27468], [27445, 27469, 27468], [27445, 27446, 27470], [27445, 27470, 27469], [27446, 27447, 27470], [27447, 27471, 27470], [27447, 27448, 27472], [27447, 27472, 27471], [27448, 27449, 27472], [27449, 27473, 27472], [27449, 27450, 27474], [27449, 27474, 27473], [27450, 27451, 27474], [27451, 27475, 27474], [27451, 27452, 27476], [27451, 27476, 27475], [27452, 27453, 27476], [27453, 27477, 27476], [27453, 27454, 27478], [27453, 27478, 27477], [27454, 27455, 27478], [27455, 27479, 27478], [27455, 27456, 27480], [27455, 27480, 27479], [27456, 27457, 27480], [27457, 27481, 27480], [27457, 27458, 27482], [27457, 27482, 27481], [27458, 27459, 27482], [27459, 27483, 27482], [10236, 27460, 10365], [27460, 27484, 10365], [27460, 27461, 27485], [27460, 27485, 27484], [27461, 27462, 27485], [27462, 27486, 27485], [27462, 27463, 27487], [27462, 27487, 27486], [27463, 27464, 27487], [27464, 27488, 27487], [27464, 27465, 27489], [27464, 27489, 27488], [27465, 27466, 27489], [27466, 27490, 27489], [27466, 27467, 27491], [27466, 27491, 27490], [27467, 27468, 27491], [27468, 27492, 27491], [27468, 27469, 27493], [27468, 27493, 27492], [27469, 27470, 27493], [27470, 27494, 27493], [27470, 27471, 27495], [27470, 27495, 27494], [27471, 27472, 27495], [27472, 27496, 27495], [27472, 27473, 27497], [27472, 27497, 27496], [27473, 27474, 27497], [27474, 27498, 27497], [27474, 27475, 27499], [27474, 27499, 27498], [27475, 27476, 27499], [27476, 27500, 27499], [27476, 27477, 27501], [27476, 27501, 27500], [27477, 27478, 27501], [27478, 27502, 27501], [27478, 27479, 27503], [27478, 27503, 27502], [27479, 27480, 27503], [27480, 27504, 27503], [27480, 27481, 27505], [27480, 27505, 27504], [27481, 27482, 27505], [27482, 27506, 27505], [27482, 27483, 27507], [27482, 27507, 27506], [10365, 27484, 27508], [10365, 27508, 10494], [27484, 27485, 27508], [27485, 27509, 27508], [27485, 27486, 27510], [27485, 27510, 27509], [27486, 27487, 27510], [27487, 27511, 27510], [27487, 27488, 27512], [27487, 27512, 27511], [27488, 27489, 27512], [27489, 27513, 27512], [27489, 27490, 27514], [27489, 27514, 27513], [27490, 27491, 27514], [27491, 27515, 27514], [27491, 27492, 27516], [27491, 27516, 27515], [27492, 27493, 27516], [27493, 27517, 27516], [27493, 27494, 27518], [27493, 27518, 27517], [27494, 27495, 27518], [27495, 27519, 27518], [27495, 27496, 27520], [27495, 27520, 27519], [27496, 27497, 27520], [27497, 27521, 27520], [27497, 27498, 27522], [27497, 27522, 27521], [27498, 27499, 27522], [27499, 27523, 27522], [27499, 27500, 27524], [27499, 27524, 27523], [27500, 27501, 27524], [27501, 27525, 27524], [27501, 27502, 27526], [27501, 27526, 27525], [27502, 27503, 27526], [27503, 27527, 27526], [27503, 27504, 27528], [27503, 27528, 27527], [27504, 27505, 27528], [27505, 27529, 27528], [27505, 27506, 27530], [27505, 27530, 27529], [27506, 27507, 27530], [27507, 27531, 27530], [10494, 27508, 10623], [27508, 27532, 10623], [27508, 27509, 27533], [27508, 27533, 27532], [27509, 27510, 27533], [27510, 27534, 27533], [27510, 27511, 27535], [27510, 27535, 27534], [27511, 27512, 27535], [27512, 27536, 27535], [27512, 27513, 27537], [27512, 27537, 27536], [27513, 27514, 27537], [27514, 27538, 27537], [27514, 27515, 27539], [27514, 27539, 27538], [27515, 27516, 27539], [27516, 27540, 27539], [27516, 27517, 27541], [27516, 27541, 27540], [27517, 27518, 27541], [27518, 27542, 27541], [27518, 27519, 27543], [27518, 27543, 27542], [27519, 27520, 27543], [27520, 27544, 27543], [27520, 27521, 27545], [27520, 27545, 27544], [27521, 27522, 27545], [27522, 27546, 27545], [27522, 27523, 27547], [27522, 27547, 27546], [27523, 27524, 27547], [27524, 27548, 27547], [27524, 27525, 27549], [27524, 27549, 27548], [27525, 27526, 27549], [27526, 27550, 27549], [27526, 27527, 27551], [27526, 27551, 27550], [27527, 27528, 27551], [27528, 27552, 27551], [27528, 27529, 27553], [27528, 27553, 27552], [27529, 27530, 27553], [27530, 27554, 27553], [27530, 27531, 27555], [27530, 27555, 27554], [10623, 27532, 27556], [10623, 27556, 10752], [27532, 27533, 27556], [27533, 27557, 27556], [27533, 27534, 27558], [27533, 27558, 27557], [27534, 27535, 27558], [27535, 27559, 27558], [27535, 27536, 27560], [27535, 27560, 27559], [27536, 27537, 27560], [27537, 27561, 27560], [27537, 27538, 27562], [27537, 27562, 27561], [27538, 27539, 27562], [27539, 27563, 27562], [27539, 27540, 27564], [27539, 27564, 27563], [27540, 27541, 27564], [27541, 27565, 27564], [27541, 27542, 27566], [27541, 27566, 27565], [27542, 27543, 27566], [27543, 27567, 27566], [27543, 27544, 27568], [27543, 27568, 27567], [27544, 27545, 27568], [27545, 27569, 27568], [27545, 27546, 27570], [27545, 27570, 27569], [27546, 27547, 27570], [27547, 27571, 27570], [27547, 27548, 27572], [27547, 27572, 27571], [27548, 27549, 27572], [27549, 27573, 27572], [27549, 27550, 27574], [27549, 27574, 27573], [27550, 27551, 27574], [27551, 27575, 27574], [27551, 27552, 27576], [27551, 27576, 27575], [27552, 27553, 27576], [27553, 27577, 27576], [27553, 27554, 27578], [27553, 27578, 27577], [27554, 27555, 27578], [27555, 27579, 27578], [10752, 27556, 10881], [27556, 27580, 10881], [27556, 27557, 27581], [27556, 27581, 27580], [27557, 27558, 27581], [27558, 27582, 27581], [27558, 27559, 27583], [27558, 27583, 27582], [27559, 27560, 27583], [27560, 27584, 27583], [27560, 27561, 27585], [27560, 27585, 27584], [27561, 27562, 27585], [27562, 27586, 27585], [27562, 27563, 27587], [27562, 27587, 27586], [27563, 27564, 27587], [27564, 27588, 27587], [27564, 27565, 27589], [27564, 27589, 27588], [27565, 27566, 27589], [27566, 27590, 27589], [27566, 27567, 27591], [27566, 27591, 27590], [27567, 27568, 27591], [27568, 27592, 27591], [27568, 27569, 27593], [27568, 27593, 27592], [27569, 27570, 27593], [27570, 27594, 27593], [27570, 27571, 27595], [27570, 27595, 27594], [27571, 27572, 27595], [27572, 27596, 27595], [27572, 27573, 27597], [27572, 27597, 27596], [27573, 27574, 27597], [27574, 27598, 27597], [27574, 27575, 27599], [27574, 27599, 27598], [27575, 27576, 27599], [27576, 27600, 27599], [27576, 27577, 27601], [27576, 27601, 27600], [27577, 27578, 27601], [27578, 27602, 27601], [27578, 27579, 27603], [27578, 27603, 27602], [10881, 27580, 27604], [10881, 27604, 11010], [27580, 27581, 27604], [27581, 27605, 27604], [27581, 27582, 27606], [27581, 27606, 27605], [27582, 27583, 27606], [27583, 27607, 27606], [27583, 27584, 27608], [27583, 27608, 27607], [27584, 27585, 27608], [27585, 27609, 27608], [27585, 27586, 27610], [27585, 27610, 27609], [27586, 27587, 27610], [27587, 27611, 27610], [27587, 27588, 27612], [27587, 27612, 27611], [27588, 27589, 27612], [27589, 27613, 27612], [27589, 27590, 27614], [27589, 27614, 27613], [27590, 27591, 27614], [27591, 27615, 27614], [27591, 27592, 27616], [27591, 27616, 27615], [27592, 27593, 27616], [27593, 27617, 27616], [27593, 27594, 27618], [27593, 27618, 27617], [27594, 27595, 27618], [27595, 27619, 27618], [27595, 27596, 27620], [27595, 27620, 27619], [27596, 27597, 27620], [27597, 27621, 27620], [27597, 27598, 27622], [27597, 27622, 27621], [27598, 27599, 27622], [27599, 27623, 27622], [27599, 27600, 27624], [27599, 27624, 27623], [27600, 27601, 27624], [27601, 27625, 27624], [27601, 27602, 27626], [27601, 27626, 27625], [27602, 27603, 27626], [27603, 27627, 27626], [11010, 27604, 11139], [27604, 27628, 11139], [27604, 27605, 27629], [27604, 27629, 27628], [27605, 27606, 27629], [27606, 27630, 27629], [27606, 27607, 27631], [27606, 27631, 27630], [27607, 27608, 27631], [27608, 27632, 27631], [27608, 27609, 27633], [27608, 27633, 27632], [27609, 27610, 27633], [27610, 27634, 27633], [27610, 27611, 27635], [27610, 27635, 27634], [27611, 27612, 27635], [27612, 27636, 27635], [27612, 27613, 27637], [27612, 27637, 27636], [27613, 27614, 27637], [27614, 27638, 27637], [27614, 27615, 27639], [27614, 27639, 27638], [27615, 27616, 27639], [27616, 27640, 27639], [27616, 27617, 27641], [27616, 27641, 27640], [27617, 27618, 27641], [27618, 27642, 27641], [27618, 27619, 27643], [27618, 27643, 27642], [27619, 27620, 27643], [27620, 27644, 27643], [27620, 27621, 27645], [27620, 27645, 27644], [27621, 27622, 27645], [27622, 27646, 27645], [27622, 27623, 27647], [27622, 27647, 27646], [27623, 27624, 27647], [27624, 27648, 27647], [27624, 27625, 27649], [27624, 27649, 27648], [27625, 27626, 27649], [27626, 27650, 27649], [27626, 27627, 27651], [27626, 27651, 27650], [11139, 27628, 27652], [11139, 27652, 11268], [27628, 27629, 27652], [27629, 27653, 27652], [27629, 27630, 27654], [27629, 27654, 27653], [27630, 27631, 27654], [27631, 27655, 27654], [27631, 27632, 27656], [27631, 27656, 27655], [27632, 27633, 27656], [27633, 27657, 27656], [27633, 27634, 27658], [27633, 27658, 27657], [27634, 27635, 27658], [27635, 27659, 27658], [27635, 27636, 27660], [27635, 27660, 27659], [27636, 27637, 27660], [27637, 27661, 27660], [27637, 27638, 27662], [27637, 27662, 27661], [27638, 27639, 27662], [27639, 27663, 27662], [27639, 27640, 27664], [27639, 27664, 27663], [27640, 27641, 27664], [27641, 27665, 27664], [27641, 27642, 27666], [27641, 27666, 27665], [27642, 27643, 27666], [27643, 27667, 27666], [27643, 27644, 27668], [27643, 27668, 27667], [27644, 27645, 27668], [27645, 27669, 27668], [27645, 27646, 27670], [27645, 27670, 27669], [27646, 27647, 27670], [27647, 27671, 27670], [27647, 27648, 27672], [27647, 27672, 27671], [27648, 27649, 27672], [27649, 27673, 27672], [27649, 27650, 27674], [27649, 27674, 27673], [27650, 27651, 27674], [27651, 27675, 27674], [11268, 27652, 11397], [27652, 27676, 11397], [27652, 27653, 27677], [27652, 27677, 27676], [27653, 27654, 27677], [27654, 27678, 27677], [27654, 27655, 27679], [27654, 27679, 27678], [27655, 27656, 27679], [27656, 27680, 27679], [27656, 27657, 27681], [27656, 27681, 27680], [27657, 27658, 27681], [27658, 27682, 27681], [27658, 27659, 27683], [27658, 27683, 27682], [27659, 27660, 27683], [27660, 27684, 27683], [27660, 27661, 27685], [27660, 27685, 27684], [27661, 27662, 27685], [27662, 27686, 27685], [27662, 27663, 27687], [27662, 27687, 27686], [27663, 27664, 27687], [27664, 27688, 27687], [27664, 27665, 27689], [27664, 27689, 27688], [27665, 27666, 27689], [27666, 27690, 27689], [27666, 27667, 27691], [27666, 27691, 27690], [27667, 27668, 27691], [27668, 27692, 27691], [27668, 27669, 27693], [27668, 27693, 27692], [27669, 27670, 27693], [27670, 27694, 27693], [27670, 27671, 27695], [27670, 27695, 27694], [27671, 27672, 27695], [27672, 27696, 27695], [27672, 27673, 27697], [27672, 27697, 27696], [27673, 27674, 27697], [27674, 27698, 27697], [27674, 27675, 27699], [27674, 27699, 27698], [11397, 27676, 27700], [11397, 27700, 11526], [27676, 27677, 27700], [27677, 27701, 27700], [27677, 27678, 27702], [27677, 27702, 27701], [27678, 27679, 27702], [27679, 27703, 27702], [27679, 27680, 27704], [27679, 27704, 27703], [27680, 27681, 27704], [27681, 27705, 27704], [27681, 27682, 27706], [27681, 27706, 27705], [27682, 27683, 27706], [27683, 27707, 27706], [27683, 27684, 27708], [27683, 27708, 27707], [27684, 27685, 27708], [27685, 27709, 27708], [27685, 27686, 27710], [27685, 27710, 27709], [27686, 27687, 27710], [27687, 27711, 27710], [27687, 27688, 27712], [27687, 27712, 27711], [27688, 27689, 27712], [27689, 27713, 27712], [27689, 27690, 27714], [27689, 27714, 27713], [27690, 27691, 27714], [27691, 27715, 27714], [27691, 27692, 27716], [27691, 27716, 27715], [27692, 27693, 27716], [27693, 27717, 27716], [27693, 27694, 27718], [27693, 27718, 27717], [27694, 27695, 27718], [27695, 27719, 27718], [27695, 27696, 27720], [27695, 27720, 27719], [27696, 27697, 27720], [27697, 27721, 27720], [27697, 27698, 27722], [27697, 27722, 27721], [27698, 27699, 27722], [27699, 27723, 27722], [11526, 27700, 11655], [27700, 27724, 11655], [27700, 27701, 27725], [27700, 27725, 27724], [27701, 27702, 27725], [27702, 27726, 27725], [27702, 27703, 27727], [27702, 27727, 27726], [27703, 27704, 27727], [27704, 27728, 27727], [27704, 27705, 27729], [27704, 27729, 27728], [27705, 27706, 27729], [27706, 27730, 27729], [27706, 27707, 27731], [27706, 27731, 27730], [27707, 27708, 27731], [27708, 27732, 27731], [27708, 27709, 27733], [27708, 27733, 27732], [27709, 27710, 27733], [27710, 27734, 27733], [27710, 27711, 27735], [27710, 27735, 27734], [27711, 27712, 27735], [27712, 27736, 27735], [27712, 27713, 27737], [27712, 27737, 27736], [27713, 27714, 27737], [27714, 27738, 27737], [27714, 27715, 27739], [27714, 27739, 27738], [27715, 27716, 27739], [27716, 27740, 27739], [27716, 27717, 27741], [27716, 27741, 27740], [27717, 27718, 27741], [27718, 27742, 27741], [27718, 27719, 27743], [27718, 27743, 27742], [27719, 27720, 27743], [27720, 27744, 27743], [27720, 27721, 27745], [27720, 27745, 27744], [27721, 27722, 27745], [27722, 27746, 27745], [27722, 27723, 27747], [27722, 27747, 27746], [11655, 27724, 27748], [11655, 27748, 11784], [27724, 27725, 27748], [27725, 27749, 27748], [27725, 27726, 27750], [27725, 27750, 27749], [27726, 27727, 27750], [27727, 27751, 27750], [27727, 27728, 27752], [27727, 27752, 27751], [27728, 27729, 27752], [27729, 27753, 27752], [27729, 27730, 27754], [27729, 27754, 27753], [27730, 27731, 27754], [27731, 27755, 27754], [27731, 27732, 27756], [27731, 27756, 27755], [27732, 27733, 27756], [27733, 27757, 27756], [27733, 27734, 27758], [27733, 27758, 27757], [27734, 27735, 27758], [27735, 27759, 27758], [27735, 27736, 27760], [27735, 27760, 27759], [27736, 27737, 27760], [27737, 27761, 27760], [27737, 27738, 27762], [27737, 27762, 27761], [27738, 27739, 27762], [27739, 27763, 27762], [27739, 27740, 27764], [27739, 27764, 27763], [27740, 27741, 27764], [27741, 27765, 27764], [27741, 27742, 27766], [27741, 27766, 27765], [27742, 27743, 27766], [27743, 27767, 27766], [27743, 27744, 27768], [27743, 27768, 27767], [27744, 27745, 27768], [27745, 27769, 27768], [27745, 27746, 27770], [27745, 27770, 27769], [27746, 27747, 27770], [27747, 27771, 27770], [11784, 27748, 11913], [27748, 27772, 11913], [27748, 27749, 27773], [27748, 27773, 27772], [27749, 27750, 27773], [27750, 27774, 27773], [27750, 27751, 27775], [27750, 27775, 27774], [27751, 27752, 27775], [27752, 27776, 27775], [27752, 27753, 27777], [27752, 27777, 27776], [27753, 27754, 27777], [27754, 27778, 27777], [27754, 27755, 27779], [27754, 27779, 27778], [27755, 27756, 27779], [27756, 27780, 27779], [27756, 27757, 27781], [27756, 27781, 27780], [27757, 27758, 27781], [27758, 27782, 27781], [27758, 27759, 27783], [27758, 27783, 27782], [27759, 27760, 27783], [27760, 27784, 27783], [27760, 27761, 27785], [27760, 27785, 27784], [27761, 27762, 27785], [27762, 27786, 27785], [27762, 27763, 27787], [27762, 27787, 27786], [27763, 27764, 27787], [27764, 27788, 27787], [27764, 27765, 27789], [27764, 27789, 27788], [27765, 27766, 27789], [27766, 27790, 27789], [27766, 27767, 27791], [27766, 27791, 27790], [27767, 27768, 27791], [27768, 27792, 27791], [27768, 27769, 27793], [27768, 27793, 27792], [27769, 27770, 27793], [27770, 27794, 27793], [27770, 27771, 27795], [27770, 27795, 27794], [11913, 27772, 27796], [11913, 27796, 12042], [27772, 27773, 27796], [27773, 27797, 27796], [27773, 27774, 27798], [27773, 27798, 27797], [27774, 27775, 27798], [27775, 27799, 27798], [27775, 27776, 27800], [27775, 27800, 27799], [27776, 27777, 27800], [27777, 27801, 27800], [27777, 27778, 27802], [27777, 27802, 27801], [27778, 27779, 27802], [27779, 27803, 27802], [27779, 27780, 27804], [27779, 27804, 27803], [27780, 27781, 27804], [27781, 27805, 27804], [27781, 27782, 27806], [27781, 27806, 27805], [27782, 27783, 27806], [27783, 27807, 27806], [27783, 27784, 27808], [27783, 27808, 27807], [27784, 27785, 27808], [27785, 27809, 27808], [27785, 27786, 27810], [27785, 27810, 27809], [27786, 27787, 27810], [27787, 27811, 27810], [27787, 27788, 27812], [27787, 27812, 27811], [27788, 27789, 27812], [27789, 27813, 27812], [27789, 27790, 27814], [27789, 27814, 27813], [27790, 27791, 27814], [27791, 27815, 27814], [27791, 27792, 27816], [27791, 27816, 27815], [27792, 27793, 27816], [27793, 27817, 27816], [27793, 27794, 27818], [27793, 27818, 27817], [27794, 27795, 27818], [27795, 27819, 27818], [12042, 27796, 12171], [27796, 27820, 12171], [27796, 27797, 27821], [27796, 27821, 27820], [27797, 27798, 27821], [27798, 27822, 27821], [27798, 27799, 27823], [27798, 27823, 27822], [27799, 27800, 27823], [27800, 27824, 27823], [27800, 27801, 27825], [27800, 27825, 27824], [27801, 27802, 27825], [27802, 27826, 27825], [27802, 27803, 27827], [27802, 27827, 27826], [27803, 27804, 27827], [27804, 27828, 27827], [27804, 27805, 27829], [27804, 27829, 27828], [27805, 27806, 27829], [27806, 27830, 27829], [27806, 27807, 27831], [27806, 27831, 27830], [27807, 27808, 27831], [27808, 27832, 27831], [27808, 27809, 27833], [27808, 27833, 27832], [27809, 27810, 27833], [27810, 27834, 27833], [27810, 27811, 27835], [27810, 27835, 27834], [27811, 27812, 27835], [27812, 27836, 27835], [27812, 27813, 27837], [27812, 27837, 27836], [27813, 27814, 27837], [27814, 27838, 27837], [27814, 27815, 27839], [27814, 27839, 27838], [27815, 27816, 27839], [27816, 27840, 27839], [27816, 27817, 27841], [27816, 27841, 27840], [27817, 27818, 27841], [27818, 27842, 27841], [27818, 27819, 27843], [27818, 27843, 27842], [12171, 27820, 27844], [12171, 27844, 12300], [27820, 27821, 27844], [27821, 27845, 27844], [27821, 27822, 27846], [27821, 27846, 27845], [27822, 27823, 27846], [27823, 27847, 27846], [27823, 27824, 27848], [27823, 27848, 27847], [27824, 27825, 27848], [27825, 27849, 27848], [27825, 27826, 27850], [27825, 27850, 27849], [27826, 27827, 27850], [27827, 27851, 27850], [27827, 27828, 27852], [27827, 27852, 27851], [27828, 27829, 27852], [27829, 27853, 27852], [27829, 27830, 27854], [27829, 27854, 27853], [27830, 27831, 27854], [27831, 27855, 27854], [27831, 27832, 27856], [27831, 27856, 27855], [27832, 27833, 27856], [27833, 27857, 27856], [27833, 27834, 27858], [27833, 27858, 27857], [27834, 27835, 27858], [27835, 27859, 27858], [27835, 27836, 27860], [27835, 27860, 27859], [27836, 27837, 27860], [27837, 27861, 27860], [27837, 27838, 27862], [27837, 27862, 27861], [27838, 27839, 27862], [27839, 27863, 27862], [27839, 27840, 27864], [27839, 27864, 27863], [27840, 27841, 27864], [27841, 27865, 27864], [27841, 27842, 27866], [27841, 27866, 27865], [27842, 27843, 27866], [27843, 27867, 27866], [12300, 27844, 12429], [27844, 27868, 12429], [27844, 27845, 27869], [27844, 27869, 27868], [27845, 27846, 27869], [27846, 27870, 27869], [27846, 27847, 27871], [27846, 27871, 27870], [27847, 27848, 27871], [27848, 27872, 27871], [27848, 27849, 27873], [27848, 27873, 27872], [27849, 27850, 27873], [27850, 27874, 27873], [27850, 27851, 27875], [27850, 27875, 27874], [27851, 27852, 27875], [27852, 27876, 27875], [27852, 27853, 27877], [27852, 27877, 27876], [27853, 27854, 27877], [27854, 27878, 27877], [27854, 27855, 27879], [27854, 27879, 27878], [27855, 27856, 27879], [27856, 27880, 27879], [27856, 27857, 27881], [27856, 27881, 27880], [27857, 27858, 27881], [27858, 27882, 27881], [27858, 27859, 27883], [27858, 27883, 27882], [27859, 27860, 27883], [27860, 27884, 27883], [27860, 27861, 27885], [27860, 27885, 27884], [27861, 27862, 27885], [27862, 27886, 27885], [27862, 27863, 27887], [27862, 27887, 27886], [27863, 27864, 27887], [27864, 27888, 27887], [27864, 27865, 27889], [27864, 27889, 27888], [27865, 27866, 27889], [27866, 27890, 27889], [27866, 27867, 27891], [27866, 27891, 27890], [12429, 27868, 27892], [12429, 27892, 12558], [27868, 27869, 27892], [27869, 27893, 27892], [27869, 27870, 27894], [27869, 27894, 27893], [27870, 27871, 27894], [27871, 27895, 27894], [27871, 27872, 27896], [27871, 27896, 27895], [27872, 27873, 27896], [27873, 27897, 27896], [27873, 27874, 27898], [27873, 27898, 27897], [27874, 27875, 27898], [27875, 27899, 27898], [27875, 27876, 27900], [27875, 27900, 27899], [27876, 27877, 27900], [27877, 27901, 27900], [27877, 27878, 27902], [27877, 27902, 27901], [27878, 27879, 27902], [27879, 27903, 27902], [27879, 27880, 27904], [27879, 27904, 27903], [27880, 27881, 27904], [27881, 27905, 27904], [27881, 27882, 27906], [27881, 27906, 27905], [27882, 27883, 27906], [27883, 27907, 27906], [27883, 27884, 27908], [27883, 27908, 27907], [27884, 27885, 27908], [27885, 27909, 27908], [27885, 27886, 27910], [27885, 27910, 27909], [27886, 27887, 27910], [27887, 27911, 27910], [27887, 27888, 27912], [27887, 27912, 27911], [27888, 27889, 27912], [27889, 27913, 27912], [27889, 27890, 27914], [27889, 27914, 27913], [27890, 27891, 27914], [27891, 27915, 27914], [12558, 27892, 12687], [27892, 27916, 12687], [27892, 27893, 27917], [27892, 27917, 27916], [27893, 27894, 27917], [27894, 27918, 27917], [27894, 27895, 27919], [27894, 27919, 27918], [27895, 27896, 27919], [27896, 27920, 27919], [27896, 27897, 27921], [27896, 27921, 27920], [27897, 27898, 27921], [27898, 27922, 27921], [27898, 27899, 27923], [27898, 27923, 27922], [27899, 27900, 27923], [27900, 27924, 27923], [27900, 27901, 27925], [27900, 27925, 27924], [27901, 27902, 27925], [27902, 27926, 27925], [27902, 27903, 27927], [27902, 27927, 27926], [27903, 27904, 27927], [27904, 27928, 27927], [27904, 27905, 27929], [27904, 27929, 27928], [27905, 27906, 27929], [27906, 27930, 27929], [27906, 27907, 27931], [27906, 27931, 27930], [27907, 27908, 27931], [27908, 27932, 27931], [27908, 27909, 27933], [27908, 27933, 27932], [27909, 27910, 27933], [27910, 27934, 27933], [27910, 27911, 27935], [27910, 27935, 27934], [27911, 27912, 27935], [27912, 27936, 27935], [27912, 27913, 27937], [27912, 27937, 27936], [27913, 27914, 27937], [27914, 27938, 27937], [27914, 27915, 27939], [27914, 27939, 27938], [12687, 27916, 27940], [12687, 27940, 12816], [27916, 27917, 27940], [27917, 27941, 27940], [27917, 27918, 27942], [27917, 27942, 27941], [27918, 27919, 27942], [27919, 27943, 27942], [27919, 27920, 27944], [27919, 27944, 27943], [27920, 27921, 27944], [27921, 27945, 27944], [27921, 27922, 27946], [27921, 27946, 27945], [27922, 27923, 27946], [27923, 27947, 27946], [27923, 27924, 27948], [27923, 27948, 27947], [27924, 27925, 27948], [27925, 27949, 27948], [27925, 27926, 27950], [27925, 27950, 27949], [27926, 27927, 27950], [27927, 27951, 27950], [27927, 27928, 27952], [27927, 27952, 27951], [27928, 27929, 27952], [27929, 27953, 27952], [27929, 27930, 27954], [27929, 27954, 27953], [27930, 27931, 27954], [27931, 27955, 27954], [27931, 27932, 27956], [27931, 27956, 27955], [27932, 27933, 27956], [27933, 27957, 27956], [27933, 27934, 27958], [27933, 27958, 27957], [27934, 27935, 27958], [27935, 27959, 27958], [27935, 27936, 27960], [27935, 27960, 27959], [27936, 27937, 27960], [27937, 27961, 27960], [27937, 27938, 27962], [27937, 27962, 27961], [27938, 27939, 27962], [27939, 27963, 27962], [12816, 27940, 12945], [27940, 27964, 12945], [27940, 27941, 27965], [27940, 27965, 27964], [27941, 27942, 27965], [27942, 27966, 27965], [27942, 27943, 27967], [27942, 27967, 27966], [27943, 27944, 27967], [27944, 27968, 27967], [27944, 27945, 27969], [27944, 27969, 27968], [27945, 27946, 27969], [27946, 27970, 27969], [27946, 27947, 27971], [27946, 27971, 27970], [27947, 27948, 27971], [27948, 27972, 27971], [27948, 27949, 27973], [27948, 27973, 27972], [27949, 27950, 27973], [27950, 27974, 27973], [27950, 27951, 27975], [27950, 27975, 27974], [27951, 27952, 27975], [27952, 27976, 27975], [27952, 27953, 27977], [27952, 27977, 27976], [27953, 27954, 27977], [27954, 27978, 27977], [27954, 27955, 27979], [27954, 27979, 27978], [27955, 27956, 27979], [27956, 27980, 27979], [27956, 27957, 27981], [27956, 27981, 27980], [27957, 27958, 27981], [27958, 27982, 27981], [27958, 27959, 27983], [27958, 27983, 27982], [27959, 27960, 27983], [27960, 27984, 27983], [27960, 27961, 27985], [27960, 27985, 27984], [27961, 27962, 27985], [27962, 27986, 27985], [27962, 27963, 27987], [27962, 27987, 27986], [12945, 27964, 27988], [12945, 27988, 13074], [27964, 27965, 27988], [27965, 27989, 27988], [27965, 27966, 27990], [27965, 27990, 27989], [27966, 27967, 27990], [27967, 27991, 27990], [27967, 27968, 27992], [27967, 27992, 27991], [27968, 27969, 27992], [27969, 27993, 27992], [27969, 27970, 27994], [27969, 27994, 27993], [27970, 27971, 27994], [27971, 27995, 27994], [27971, 27972, 27996], [27971, 27996, 27995], [27972, 27973, 27996], [27973, 27997, 27996], [27973, 27974, 27998], [27973, 27998, 27997], [27974, 27975, 27998], [27975, 27999, 27998], [27975, 27976, 28000], [27975, 28000, 27999], [27976, 27977, 28000], [27977, 28001, 28000], [27977, 27978, 28002], [27977, 28002, 28001], [27978, 27979, 28002], [27979, 28003, 28002], [27979, 27980, 28004], [27979, 28004, 28003], [27980, 27981, 28004], [27981, 28005, 28004], [27981, 27982, 28006], [27981, 28006, 28005], [27982, 27983, 28006], [27983, 28007, 28006], [27983, 27984, 28008], [27983, 28008, 28007], [27984, 27985, 28008], [27985, 28009, 28008], [27985, 27986, 28010], [27985, 28010, 28009], [27986, 27987, 28010], [27987, 28011, 28010], [13074, 27988, 13203], [27988, 28012, 13203], [27988, 27989, 28013], [27988, 28013, 28012], [27989, 27990, 28013], [27990, 28014, 28013], [27990, 27991, 28015], [27990, 28015, 28014], [27991, 27992, 28015], [27992, 28016, 28015], [27992, 27993, 28017], [27992, 28017, 28016], [27993, 27994, 28017], [27994, 28018, 28017], [27994, 27995, 28019], [27994, 28019, 28018], [27995, 27996, 28019], [27996, 28020, 28019], [27996, 27997, 28021], [27996, 28021, 28020], [27997, 27998, 28021], [27998, 28022, 28021], [27998, 27999, 28023], [27998, 28023, 28022], [27999, 28000, 28023], [28000, 28024, 28023], [28000, 28001, 28025], [28000, 28025, 28024], [28001, 28002, 28025], [28002, 28026, 28025], [28002, 28003, 28027], [28002, 28027, 28026], [28003, 28004, 28027], [28004, 28028, 28027], [28004, 28005, 28029], [28004, 28029, 28028], [28005, 28006, 28029], [28006, 28030, 28029], [28006, 28007, 28031], [28006, 28031, 28030], [28007, 28008, 28031], [28008, 28032, 28031], [28008, 28009, 28033], [28008, 28033, 28032], [28009, 28010, 28033], [28010, 28034, 28033], [28010, 28011, 28035], [28010, 28035, 28034], [13203, 28012, 28036], [13203, 28036, 13332], [28012, 28013, 28036], [28013, 28037, 28036], [28013, 28014, 28038], [28013, 28038, 28037], [28014, 28015, 28038], [28015, 28039, 28038], [28015, 28016, 28040], [28015, 28040, 28039], [28016, 28017, 28040], [28017, 28041, 28040], [28017, 28018, 28042], [28017, 28042, 28041], [28018, 28019, 28042], [28019, 28043, 28042], [28019, 28020, 28044], [28019, 28044, 28043], [28020, 28021, 28044], [28021, 28045, 28044], [28021, 28022, 28046], [28021, 28046, 28045], [28022, 28023, 28046], [28023, 28047, 28046], [28023, 28024, 28048], [28023, 28048, 28047], [28024, 28025, 28048], [28025, 28049, 28048], [28025, 28026, 28050], [28025, 28050, 28049], [28026, 28027, 28050], [28027, 28051, 28050], [28027, 28028, 28052], [28027, 28052, 28051], [28028, 28029, 28052], [28029, 28053, 28052], [28029, 28030, 28054], [28029, 28054, 28053], [28030, 28031, 28054], [28031, 28055, 28054], [28031, 28032, 28056], [28031, 28056, 28055], [28032, 28033, 28056], [28033, 28057, 28056], [28033, 28034, 28058], [28033, 28058, 28057], [28034, 28035, 28058], [28035, 28059, 28058], [13332, 28036, 13461], [28036, 28060, 13461], [28036, 28037, 28061], [28036, 28061, 28060], [28037, 28038, 28061], [28038, 28062, 28061], [28038, 28039, 28063], [28038, 28063, 28062], [28039, 28040, 28063], [28040, 28064, 28063], [28040, 28041, 28065], [28040, 28065, 28064], [28041, 28042, 28065], [28042, 28066, 28065], [28042, 28043, 28067], [28042, 28067, 28066], [28043, 28044, 28067], [28044, 28068, 28067], [28044, 28045, 28069], [28044, 28069, 28068], [28045, 28046, 28069], [28046, 28070, 28069], [28046, 28047, 28071], [28046, 28071, 28070], [28047, 28048, 28071], [28048, 28072, 28071], [28048, 28049, 28073], [28048, 28073, 28072], [28049, 28050, 28073], [28050, 28074, 28073], [28050, 28051, 28075], [28050, 28075, 28074], [28051, 28052, 28075], [28052, 28076, 28075], [28052, 28053, 28077], [28052, 28077, 28076], [28053, 28054, 28077], [28054, 28078, 28077], [28054, 28055, 28079], [28054, 28079, 28078], [28055, 28056, 28079], [28056, 28080, 28079], [28056, 28057, 28081], [28056, 28081, 28080], [28057, 28058, 28081], [28058, 28082, 28081], [28058, 28059, 28083], [28058, 28083, 28082], [13461, 28060, 28084], [13461, 28084, 13590], [28060, 28061, 28084], [28061, 28085, 28084], [28061, 28062, 28086], [28061, 28086, 28085], [28062, 28063, 28086], [28063, 28087, 28086], [28063, 28064, 28088], [28063, 28088, 28087], [28064, 28065, 28088], [28065, 28089, 28088], [28065, 28066, 28090], [28065, 28090, 28089], [28066, 28067, 28090], [28067, 28091, 28090], [28067, 28068, 28092], [28067, 28092, 28091], [28068, 28069, 28092], [28069, 28093, 28092], [28069, 28070, 28094], [28069, 28094, 28093], [28070, 28071, 28094], [28071, 28095, 28094], [28071, 28072, 28096], [28071, 28096, 28095], [28072, 28073, 28096], [28073, 28097, 28096], [28073, 28074, 28098], [28073, 28098, 28097], [28074, 28075, 28098], [28075, 28099, 28098], [28075, 28076, 28100], [28075, 28100, 28099], [28076, 28077, 28100], [28077, 28101, 28100], [28077, 28078, 28102], [28077, 28102, 28101], [28078, 28079, 28102], [28079, 28103, 28102], [28079, 28080, 28104], [28079, 28104, 28103], [28080, 28081, 28104], [28081, 28105, 28104], [28081, 28082, 28106], [28081, 28106, 28105], [28082, 28083, 28106], [28083, 28107, 28106], [13590, 28084, 13719], [28084, 28108, 13719], [28084, 28085, 28109], [28084, 28109, 28108], [28085, 28086, 28109], [28086, 28110, 28109], [28086, 28087, 28111], [28086, 28111, 28110], [28087, 28088, 28111], [28088, 28112, 28111], [28088, 28089, 28113], [28088, 28113, 28112], [28089, 28090, 28113], [28090, 28114, 28113], [28090, 28091, 28115], [28090, 28115, 28114], [28091, 28092, 28115], [28092, 28116, 28115], [28092, 28093, 28117], [28092, 28117, 28116], [28093, 28094, 28117], [28094, 28118, 28117], [28094, 28095, 28119], [28094, 28119, 28118], [28095, 28096, 28119], [28096, 28120, 28119], [28096, 28097, 28121], [28096, 28121, 28120], [28097, 28098, 28121], [28098, 28122, 28121], [28098, 28099, 28123], [28098, 28123, 28122], [28099, 28100, 28123], [28100, 28124, 28123], [28100, 28101, 28125], [28100, 28125, 28124], [28101, 28102, 28125], [28102, 28126, 28125], [28102, 28103, 28127], [28102, 28127, 28126], [28103, 28104, 28127], [28104, 28128, 28127], [28104, 28105, 28129], [28104, 28129, 28128], [28105, 28106, 28129], [28106, 28130, 28129], [28106, 28107, 28131], [28106, 28131, 28130], [13719, 28108, 28132], [13719, 28132, 13848], [28108, 28109, 28132], [28109, 28133, 28132], [28109, 28110, 28134], [28109, 28134, 28133], [28110, 28111, 28134], [28111, 28135, 28134], [28111, 28112, 28136], [28111, 28136, 28135], [28112, 28113, 28136], [28113, 28137, 28136], [28113, 28114, 28138], [28113, 28138, 28137], [28114, 28115, 28138], [28115, 28139, 28138], [28115, 28116, 28140], [28115, 28140, 28139], [28116, 28117, 28140], [28117, 28141, 28140], [28117, 28118, 28142], [28117, 28142, 28141], [28118, 28119, 28142], [28119, 28143, 28142], [28119, 28120, 28144], [28119, 28144, 28143], [28120, 28121, 28144], [28121, 28145, 28144], [28121, 28122, 28146], [28121, 28146, 28145], [28122, 28123, 28146], [28123, 28147, 28146], [28123, 28124, 28148], [28123, 28148, 28147], [28124, 28125, 28148], [28125, 28149, 28148], [28125, 28126, 28150], [28125, 28150, 28149], [28126, 28127, 28150], [28127, 28151, 28150], [28127, 28128, 28152], [28127, 28152, 28151], [28128, 28129, 28152], [28129, 28153, 28152], [28129, 28130, 28154], [28129, 28154, 28153], [28130, 28131, 28154], [28131, 28155, 28154], [13848, 28132, 13977], [28132, 28156, 13977], [28132, 28133, 28157], [28132, 28157, 28156], [28133, 28134, 28157], [28134, 28158, 28157], [28134, 28135, 28159], [28134, 28159, 28158], [28135, 28136, 28159], [28136, 28160, 28159], [28136, 28137, 28161], [28136, 28161, 28160], [28137, 28138, 28161], [28138, 28162, 28161], [28138, 28139, 28163], [28138, 28163, 28162], [28139, 28140, 28163], [28140, 28164, 28163], [28140, 28141, 28165], [28140, 28165, 28164], [28141, 28142, 28165], [28142, 28166, 28165], [28142, 28143, 28167], [28142, 28167, 28166], [28143, 28144, 28167], [28144, 28168, 28167], [28144, 28145, 28169], [28144, 28169, 28168], [28145, 28146, 28169], [28146, 28170, 28169], [28146, 28147, 28171], [28146, 28171, 28170], [28147, 28148, 28171], [28148, 28172, 28171], [28148, 28149, 28173], [28148, 28173, 28172], [28149, 28150, 28173], [28150, 28174, 28173], [28150, 28151, 28175], [28150, 28175, 28174], [28151, 28152, 28175], [28152, 28176, 28175], [28152, 28153, 28177], [28152, 28177, 28176], [28153, 28154, 28177], [28154, 28178, 28177], [28154, 28155, 28179], [28154, 28179, 28178], [13977, 28156, 28180], [13977, 28180, 14106], [28156, 28157, 28180], [28157, 28181, 28180], [28157, 28158, 28182], [28157, 28182, 28181], [28158, 28159, 28182], [28159, 28183, 28182], [28159, 28160, 28184], [28159, 28184, 28183], [28160, 28161, 28184], [28161, 28185, 28184], [28161, 28162, 28186], [28161, 28186, 28185], [28162, 28163, 28186], [28163, 28187, 28186], [28163, 28164, 28188], [28163, 28188, 28187], [28164, 28165, 28188], [28165, 28189, 28188], [28165, 28166, 28190], [28165, 28190, 28189], [28166, 28167, 28190], [28167, 28191, 28190], [28167, 28168, 28192], [28167, 28192, 28191], [28168, 28169, 28192], [28169, 28193, 28192], [28169, 28170, 28194], [28169, 28194, 28193], [28170, 28171, 28194], [28171, 28195, 28194], [28171, 28172, 28196], [28171, 28196, 28195], [28172, 28173, 28196], [28173, 28197, 28196], [28173, 28174, 28198], [28173, 28198, 28197], [28174, 28175, 28198], [28175, 28199, 28198], [28175, 28176, 28200], [28175, 28200, 28199], [28176, 28177, 28200], [28177, 28201, 28200], [28177, 28178, 28202], [28177, 28202, 28201], [28178, 28179, 28202], [28179, 28203, 28202], [14106, 28180, 14235], [28180, 28204, 14235], [28180, 28181, 28205], [28180, 28205, 28204], [28181, 28182, 28205], [28182, 28206, 28205], [28182, 28183, 28207], [28182, 28207, 28206], [28183, 28184, 28207], [28184, 28208, 28207], [28184, 28185, 28209], [28184, 28209, 28208], [28185, 28186, 28209], [28186, 28210, 28209], [28186, 28187, 28211], [28186, 28211, 28210], [28187, 28188, 28211], [28188, 28212, 28211], [28188, 28189, 28213], [28188, 28213, 28212], [28189, 28190, 28213], [28190, 28214, 28213], [28190, 28191, 28215], [28190, 28215, 28214], [28191, 28192, 28215], [28192, 28216, 28215], [28192, 28193, 28217], [28192, 28217, 28216], [28193, 28194, 28217], [28194, 28218, 28217], [28194, 28195, 28219], [28194, 28219, 28218], [28195, 28196, 28219], [28196, 28220, 28219], [28196, 28197, 28221], [28196, 28221, 28220], [28197, 28198, 28221], [28198, 28222, 28221], [28198, 28199, 28223], [28198, 28223, 28222], [28199, 28200, 28223], [28200, 28224, 28223], [28200, 28201, 28225], [28200, 28225, 28224], [28201, 28202, 28225], [28202, 28226, 28225], [28202, 28203, 28227], [28202, 28227, 28226], [14235, 28204, 28228], [14235, 28228, 14364], [28204, 28205, 28228], [28205, 28229, 28228], [28205, 28206, 28230], [28205, 28230, 28229], [28206, 28207, 28230], [28207, 28231, 28230], [28207, 28208, 28232], [28207, 28232, 28231], [28208, 28209, 28232], [28209, 28233, 28232], [28209, 28210, 28234], [28209, 28234, 28233], [28210, 28211, 28234], [28211, 28235, 28234], [28211, 28212, 28236], [28211, 28236, 28235], [28212, 28213, 28236], [28213, 28237, 28236], [28213, 28214, 28238], [28213, 28238, 28237], [28214, 28215, 28238], [28215, 28239, 28238], [28215, 28216, 28240], [28215, 28240, 28239], [28216, 28217, 28240], [28217, 28241, 28240], [28217, 28218, 28242], [28217, 28242, 28241], [28218, 28219, 28242], [28219, 28243, 28242], [28219, 28220, 28244], [28219, 28244, 28243], [28220, 28221, 28244], [28221, 28245, 28244], [28221, 28222, 28246], [28221, 28246, 28245], [28222, 28223, 28246], [28223, 28247, 28246], [28223, 28224, 28248], [28223, 28248, 28247], [28224, 28225, 28248], [28225, 28249, 28248], [28225, 28226, 28250], [28225, 28250, 28249], [28226, 28227, 28250], [28227, 28251, 28250], [14364, 28228, 14493], [28228, 28252, 14493], [28228, 28229, 28253], [28228, 28253, 28252], [28229, 28230, 28253], [28230, 28254, 28253], [28230, 28231, 28255], [28230, 28255, 28254], [28231, 28232, 28255], [28232, 28256, 28255], [28232, 28233, 28257], [28232, 28257, 28256], [28233, 28234, 28257], [28234, 28258, 28257], [28234, 28235, 28259], [28234, 28259, 28258], [28235, 28236, 28259], [28236, 28260, 28259], [28236, 28237, 28261], [28236, 28261, 28260], [28237, 28238, 28261], [28238, 28262, 28261], [28238, 28239, 28263], [28238, 28263, 28262], [28239, 28240, 28263], [28240, 28264, 28263], [28240, 28241, 28265], [28240, 28265, 28264], [28241, 28242, 28265], [28242, 28266, 28265], [28242, 28243, 28267], [28242, 28267, 28266], [28243, 28244, 28267], [28244, 28268, 28267], [28244, 28245, 28269], [28244, 28269, 28268], [28245, 28246, 28269], [28246, 28270, 28269], [28246, 28247, 28271], [28246, 28271, 28270], [28247, 28248, 28271], [28248, 28272, 28271], [28248, 28249, 28273], [28248, 28273, 28272], [28249, 28250, 28273], [28250, 28274, 28273], [28250, 28251, 28275], [28250, 28275, 28274], [14493, 28252, 28276], [14493, 28276, 14622], [28252, 28253, 28276], [28253, 28277, 28276], [28253, 28254, 28278], [28253, 28278, 28277], [28254, 28255, 28278], [28255, 28279, 28278], [28255, 28256, 28280], [28255, 28280, 28279], [28256, 28257, 28280], [28257, 28281, 28280], [28257, 28258, 28282], [28257, 28282, 28281], [28258, 28259, 28282], [28259, 28283, 28282], [28259, 28260, 28284], [28259, 28284, 28283], [28260, 28261, 28284], [28261, 28285, 28284], [28261, 28262, 28286], [28261, 28286, 28285], [28262, 28263, 28286], [28263, 28287, 28286], [28263, 28264, 28288], [28263, 28288, 28287], [28264, 28265, 28288], [28265, 28289, 28288], [28265, 28266, 28290], [28265, 28290, 28289], [28266, 28267, 28290], [28267, 28291, 28290], [28267, 28268, 28292], [28267, 28292, 28291], [28268, 28269, 28292], [28269, 28293, 28292], [28269, 28270, 28294], [28269, 28294, 28293], [28270, 28271, 28294], [28271, 28295, 28294], [28271, 28272, 28296], [28271, 28296, 28295], [28272, 28273, 28296], [28273, 28297, 28296], [28273, 28274, 28298], [28273, 28298, 28297], [28274, 28275, 28298], [28275, 28299, 28298], [14622, 28276, 14751], [28276, 28300, 14751], [28276, 28277, 28301], [28276, 28301, 28300], [28277, 28278, 28301], [28278, 28302, 28301], [28278, 28279, 28303], [28278, 28303, 28302], [28279, 28280, 28303], [28280, 28304, 28303], [28280, 28281, 28305], [28280, 28305, 28304], [28281, 28282, 28305], [28282, 28306, 28305], [28282, 28283, 28307], [28282, 28307, 28306], [28283, 28284, 28307], [28284, 28308, 28307], [28284, 28285, 28309], [28284, 28309, 28308], [28285, 28286, 28309], [28286, 28310, 28309], [28286, 28287, 28311], [28286, 28311, 28310], [28287, 28288, 28311], [28288, 28312, 28311], [28288, 28289, 28313], [28288, 28313, 28312], [28289, 28290, 28313], [28290, 28314, 28313], [28290, 28291, 28315], [28290, 28315, 28314], [28291, 28292, 28315], [28292, 28316, 28315], [28292, 28293, 28317], [28292, 28317, 28316], [28293, 28294, 28317], [28294, 28318, 28317], [28294, 28295, 28319], [28294, 28319, 28318], [28295, 28296, 28319], [28296, 28320, 28319], [28296, 28297, 28321], [28296, 28321, 28320], [28297, 28298, 28321], [28298, 28322, 28321], [28298, 28299, 28323], [28298, 28323, 28322], [14751, 28300, 28324], [14751, 28324, 14880], [28300, 28301, 28324], [28301, 28325, 28324], [28301, 28302, 28326], [28301, 28326, 28325], [28302, 28303, 28326], [28303, 28327, 28326], [28303, 28304, 28328], [28303, 28328, 28327], [28304, 28305, 28328], [28305, 28329, 28328], [28305, 28306, 28330], [28305, 28330, 28329], [28306, 28307, 28330], [28307, 28331, 28330], [28307, 28308, 28332], [28307, 28332, 28331], [28308, 28309, 28332], [28309, 28333, 28332], [28309, 28310, 28334], [28309, 28334, 28333], [28310, 28311, 28334], [28311, 28335, 28334], [28311, 28312, 28336], [28311, 28336, 28335], [28312, 28313, 28336], [28313, 28337, 28336], [28313, 28314, 28338], [28313, 28338, 28337], [28314, 28315, 28338], [28315, 28339, 28338], [28315, 28316, 28340], [28315, 28340, 28339], [28316, 28317, 28340], [28317, 28341, 28340], [28317, 28318, 28342], [28317, 28342, 28341], [28318, 28319, 28342], [28319, 28343, 28342], [28319, 28320, 28344], [28319, 28344, 28343], [28320, 28321, 28344], [28321, 28345, 28344], [28321, 28322, 28346], [28321, 28346, 28345], [28322, 28323, 28346], [28323, 28347, 28346], [14880, 28324, 15009], [28324, 28348, 15009], [28324, 28325, 28349], [28324, 28349, 28348], [28325, 28326, 28349], [28326, 28350, 28349], [28326, 28327, 28351], [28326, 28351, 28350], [28327, 28328, 28351], [28328, 28352, 28351], [28328, 28329, 28353], [28328, 28353, 28352], [28329, 28330, 28353], [28330, 28354, 28353], [28330, 28331, 28355], [28330, 28355, 28354], [28331, 28332, 28355], [28332, 28356, 28355], [28332, 28333, 28357], [28332, 28357, 28356], [28333, 28334, 28357], [28334, 28358, 28357], [28334, 28335, 28359], [28334, 28359, 28358], [28335, 28336, 28359], [28336, 28360, 28359], [28336, 28337, 28361], [28336, 28361, 28360], [28337, 28338, 28361], [28338, 28362, 28361], [28338, 28339, 28363], [28338, 28363, 28362], [28339, 28340, 28363], [28340, 28364, 28363], [28340, 28341, 28365], [28340, 28365, 28364], [28341, 28342, 28365], [28342, 28366, 28365], [28342, 28343, 28367], [28342, 28367, 28366], [28343, 28344, 28367], [28344, 28368, 28367], [28344, 28345, 28369], [28344, 28369, 28368], [28345, 28346, 28369], [28346, 28370, 28369], [28346, 28347, 28371], [28346, 28371, 28370], [15009, 28348, 28372], [15009, 28372, 15138], [28348, 28349, 28372], [28349, 28373, 28372], [28349, 28350, 28374], [28349, 28374, 28373], [28350, 28351, 28374], [28351, 28375, 28374], [28351, 28352, 28376], [28351, 28376, 28375], [28352, 28353, 28376], [28353, 28377, 28376], [28353, 28354, 28378], [28353, 28378, 28377], [28354, 28355, 28378], [28355, 28379, 28378], [28355, 28356, 28380], [28355, 28380, 28379], [28356, 28357, 28380], [28357, 28381, 28380], [28357, 28358, 28382], [28357, 28382, 28381], [28358, 28359, 28382], [28359, 28383, 28382], [28359, 28360, 28384], [28359, 28384, 28383], [28360, 28361, 28384], [28361, 28385, 28384], [28361, 28362, 28386], [28361, 28386, 28385], [28362, 28363, 28386], [28363, 28387, 28386], [28363, 28364, 28388], [28363, 28388, 28387], [28364, 28365, 28388], [28365, 28389, 28388], [28365, 28366, 28390], [28365, 28390, 28389], [28366, 28367, 28390], [28367, 28391, 28390], [28367, 28368, 28392], [28367, 28392, 28391], [28368, 28369, 28392], [28369, 28393, 28392], [28369, 28370, 28394], [28369, 28394, 28393], [28370, 28371, 28394], [28371, 28395, 28394], [15138, 28372, 15267], [28372, 28396, 15267], [28372, 28373, 28397], [28372, 28397, 28396], [28373, 28374, 28397], [28374, 28398, 28397], [28374, 28375, 28399], [28374, 28399, 28398], [28375, 28376, 28399], [28376, 28400, 28399], [28376, 28377, 28401], [28376, 28401, 28400], [28377, 28378, 28401], [28378, 28402, 28401], [28378, 28379, 28403], [28378, 28403, 28402], [28379, 28380, 28403], [28380, 28404, 28403], [28380, 28381, 28405], [28380, 28405, 28404], [28381, 28382, 28405], [28382, 28406, 28405], [28382, 28383, 28407], [28382, 28407, 28406], [28383, 28384, 28407], [28384, 28408, 28407], [28384, 28385, 28409], [28384, 28409, 28408], [28385, 28386, 28409], [28386, 28410, 28409], [28386, 28387, 28411], [28386, 28411, 28410], [28387, 28388, 28411], [28388, 28412, 28411], [28388, 28389, 28413], [28388, 28413, 28412], [28389, 28390, 28413], [28390, 28414, 28413], [28390, 28391, 28415], [28390, 28415, 28414], [28391, 28392, 28415], [28392, 28416, 28415], [28392, 28393, 28417], [28392, 28417, 28416], [28393, 28394, 28417], [28394, 28418, 28417], [28394, 28395, 28419], [28394, 28419, 28418], [15267, 28396, 28420], [15267, 28420, 15396], [28396, 28397, 28420], [28397, 28421, 28420], [28397, 28398, 28422], [28397, 28422, 28421], [28398, 28399, 28422], [28399, 28423, 28422], [28399, 28400, 28424], [28399, 28424, 28423], [28400, 28401, 28424], [28401, 28425, 28424], [28401, 28402, 28426], [28401, 28426, 28425], [28402, 28403, 28426], [28403, 28427, 28426], [28403, 28404, 28428], [28403, 28428, 28427], [28404, 28405, 28428], [28405, 28429, 28428], [28405, 28406, 28430], [28405, 28430, 28429], [28406, 28407, 28430], [28407, 28431, 28430], [28407, 28408, 28432], [28407, 28432, 28431], [28408, 28409, 28432], [28409, 28433, 28432], [28409, 28410, 28434], [28409, 28434, 28433], [28410, 28411, 28434], [28411, 28435, 28434], [28411, 28412, 28436], [28411, 28436, 28435], [28412, 28413, 28436], [28413, 28437, 28436], [28413, 28414, 28438], [28413, 28438, 28437], [28414, 28415, 28438], [28415, 28439, 28438], [28415, 28416, 28440], [28415, 28440, 28439], [28416, 28417, 28440], [28417, 28441, 28440], [28417, 28418, 28442], [28417, 28442, 28441], [28418, 28419, 28442], [28419, 28443, 28442], [15396, 28420, 15525], [28420, 28444, 15525], [28420, 28421, 28445], [28420, 28445, 28444], [28421, 28422, 28445], [28422, 28446, 28445], [28422, 28423, 28447], [28422, 28447, 28446], [28423, 28424, 28447], [28424, 28448, 28447], [28424, 28425, 28449], [28424, 28449, 28448], [28425, 28426, 28449], [28426, 28450, 28449], [28426, 28427, 28451], [28426, 28451, 28450], [28427, 28428, 28451], [28428, 28452, 28451], [28428, 28429, 28453], [28428, 28453, 28452], [28429, 28430, 28453], [28430, 28454, 28453], [28430, 28431, 28455], [28430, 28455, 28454], [28431, 28432, 28455], [28432, 28456, 28455], [28432, 28433, 28457], [28432, 28457, 28456], [28433, 28434, 28457], [28434, 28458, 28457], [28434, 28435, 28459], [28434, 28459, 28458], [28435, 28436, 28459], [28436, 28460, 28459], [28436, 28437, 28461], [28436, 28461, 28460], [28437, 28438, 28461], [28438, 28462, 28461], [28438, 28439, 28463], [28438, 28463, 28462], [28439, 28440, 28463], [28440, 28464, 28463], [28440, 28441, 28465], [28440, 28465, 28464], [28441, 28442, 28465], [28442, 28466, 28465], [28442, 28443, 28467], [28442, 28467, 28466], [15525, 28444, 28468], [15525, 28468, 15654], [28444, 28445, 28468], [28445, 28469, 28468], [28445, 28446, 28470], [28445, 28470, 28469], [28446, 28447, 28470], [28447, 28471, 28470], [28447, 28448, 28472], [28447, 28472, 28471], [28448, 28449, 28472], [28449, 28473, 28472], [28449, 28450, 28474], [28449, 28474, 28473], [28450, 28451, 28474], [28451, 28475, 28474], [28451, 28452, 28476], [28451, 28476, 28475], [28452, 28453, 28476], [28453, 28477, 28476], [28453, 28454, 28478], [28453, 28478, 28477], [28454, 28455, 28478], [28455, 28479, 28478], [28455, 28456, 28480], [28455, 28480, 28479], [28456, 28457, 28480], [28457, 28481, 28480], [28457, 28458, 28482], [28457, 28482, 28481], [28458, 28459, 28482], [28459, 28483, 28482], [28459, 28460, 28484], [28459, 28484, 28483], [28460, 28461, 28484], [28461, 28485, 28484], [28461, 28462, 28486], [28461, 28486, 28485], [28462, 28463, 28486], [28463, 28487, 28486], [28463, 28464, 28488], [28463, 28488, 28487], [28464, 28465, 28488], [28465, 28489, 28488], [28465, 28466, 28490], [28465, 28490, 28489], [28466, 28467, 28490], [28467, 28491, 28490], [15654, 28468, 15783], [28468, 28492, 15783], [28468, 28469, 28493], [28468, 28493, 28492], [28469, 28470, 28493], [28470, 28494, 28493], [28470, 28471, 28495], [28470, 28495, 28494], [28471, 28472, 28495], [28472, 28496, 28495], [28472, 28473, 28497], [28472, 28497, 28496], [28473, 28474, 28497], [28474, 28498, 28497], [28474, 28475, 28499], [28474, 28499, 28498], [28475, 28476, 28499], [28476, 28500, 28499], [28476, 28477, 28501], [28476, 28501, 28500], [28477, 28478, 28501], [28478, 28502, 28501], [28478, 28479, 28503], [28478, 28503, 28502], [28479, 28480, 28503], [28480, 28504, 28503], [28480, 28481, 28505], [28480, 28505, 28504], [28481, 28482, 28505], [28482, 28506, 28505], [28482, 28483, 28507], [28482, 28507, 28506], [28483, 28484, 28507], [28484, 28508, 28507], [28484, 28485, 28509], [28484, 28509, 28508], [28485, 28486, 28509], [28486, 28510, 28509], [28486, 28487, 28511], [28486, 28511, 28510], [28487, 28488, 28511], [28488, 28512, 28511], [28488, 28489, 28513], [28488, 28513, 28512], [28489, 28490, 28513], [28490, 28514, 28513], [28490, 28491, 28515], [28490, 28515, 28514], [15783, 28492, 28516], [15783, 28516, 15912], [28492, 28493, 28516], [28493, 28517, 28516], [28493, 28494, 28518], [28493, 28518, 28517], [28494, 28495, 28518], [28495, 28519, 28518], [28495, 28496, 28520], [28495, 28520, 28519], [28496, 28497, 28520], [28497, 28521, 28520], [28497, 28498, 28522], [28497, 28522, 28521], [28498, 28499, 28522], [28499, 28523, 28522], [28499, 28500, 28524], [28499, 28524, 28523], [28500, 28501, 28524], [28501, 28525, 28524], [28501, 28502, 28526], [28501, 28526, 28525], [28502, 28503, 28526], [28503, 28527, 28526], [28503, 28504, 28528], [28503, 28528, 28527], [28504, 28505, 28528], [28505, 28529, 28528], [28505, 28506, 28530], [28505, 28530, 28529], [28506, 28507, 28530], [28507, 28531, 28530], [28507, 28508, 28532], [28507, 28532, 28531], [28508, 28509, 28532], [28509, 28533, 28532], [28509, 28510, 28534], [28509, 28534, 28533], [28510, 28511, 28534], [28511, 28535, 28534], [28511, 28512, 28536], [28511, 28536, 28535], [28512, 28513, 28536], [28513, 28537, 28536], [28513, 28514, 28538], [28513, 28538, 28537], [28514, 28515, 28538], [28515, 28539, 28538], [15912, 28516, 16041], [28516, 28540, 16041], [28516, 28517, 28541], [28516, 28541, 28540], [28517, 28518, 28541], [28518, 28542, 28541], [28518, 28519, 28543], [28518, 28543, 28542], [28519, 28520, 28543], [28520, 28544, 28543], [28520, 28521, 28545], [28520, 28545, 28544], [28521, 28522, 28545], [28522, 28546, 28545], [28522, 28523, 28547], [28522, 28547, 28546], [28523, 28524, 28547], [28524, 28548, 28547], [28524, 28525, 28549], [28524, 28549, 28548], [28525, 28526, 28549], [28526, 28550, 28549], [28526, 28527, 28551], [28526, 28551, 28550], [28527, 28528, 28551], [28528, 28552, 28551], [28528, 28529, 28553], [28528, 28553, 28552], [28529, 28530, 28553], [28530, 28554, 28553], [28530, 28531, 28555], [28530, 28555, 28554], [28531, 28532, 28555], [28532, 28556, 28555], [28532, 28533, 28557], [28532, 28557, 28556], [28533, 28534, 28557], [28534, 28558, 28557], [28534, 28535, 28559], [28534, 28559, 28558], [28535, 28536, 28559], [28536, 28560, 28559], [28536, 28537, 28561], [28536, 28561, 28560], [28537, 28538, 28561], [28538, 28562, 28561], [28538, 28539, 28563], [28538, 28563, 28562], [16041, 28540, 28564], [16041, 28564, 16170], [28540, 28541, 28564], [28541, 28565, 28564], [28541, 28542, 28566], [28541, 28566, 28565], [28542, 28543, 28566], [28543, 28567, 28566], [28543, 28544, 28568], [28543, 28568, 28567], [28544, 28545, 28568], [28545, 28569, 28568], [28545, 28546, 28570], [28545, 28570, 28569], [28546, 28547, 28570], [28547, 28571, 28570], [28547, 28548, 28572], [28547, 28572, 28571], [28548, 28549, 28572], [28549, 28573, 28572], [28549, 28550, 28574], [28549, 28574, 28573], [28550, 28551, 28574], [28551, 28575, 28574], [28551, 28552, 28576], [28551, 28576, 28575], [28552, 28553, 28576], [28553, 28577, 28576], [28553, 28554, 28578], [28553, 28578, 28577], [28554, 28555, 28578], [28555, 28579, 28578], [28555, 28556, 28580], [28555, 28580, 28579], [28556, 28557, 28580], [28557, 28581, 28580], [28557, 28558, 28582], [28557, 28582, 28581], [28558, 28559, 28582], [28559, 28583, 28582], [28559, 28560, 28584], [28559, 28584, 28583], [28560, 28561, 28584], [28561, 28585, 28584], [28561, 28562, 28586], [28561, 28586, 28585], [28562, 28563, 28586], [28563, 28587, 28586], [16170, 28564, 19395], [28564, 19524, 19395], [28564, 28565, 19653], [28564, 19653, 19524], [28565, 28566, 19653], [28566, 19782, 19653], [28566, 28567, 19911], [28566, 19911, 19782], [28567, 28568, 19911], [28568, 20040, 19911], [28568, 28569, 20169], [28568, 20169, 20040], [28569, 28570, 20169], [28570, 20298, 20169], [28570, 28571, 20427], [28570, 20427, 20298], [28571, 28572, 20427], [28572, 20556, 20427], [28572, 28573, 20685], [28572, 20685, 20556], [28573, 28574, 20685], [28574, 20814, 20685], [28574, 28575, 20943], [28574, 20943, 20814], [28575, 28576, 20943], [28576, 21072, 20943], [28576, 28577, 21201], [28576, 21201, 21072], [28577, 28578, 21201], [28578, 21330, 21201], [28578, 28579, 21459], [28578, 21459, 21330], [28579, 28580, 21459], [28580, 21588, 21459], [28580, 28581, 21717], [28580, 21717, 21588], [28581, 28582, 21717], [28582, 21846, 21717], [28582, 28583, 21975], [28582, 21975, 21846], [28583, 28584, 21975], [28584, 22104, 21975], [28584, 28585, 22233], [28584, 22233, 22104], [28585, 28586, 22233], [28586, 22362, 22233], [28586, 28587, 22491], [28586, 22491, 22362]], + "pointData": [[0.29343634843826294, 0.7976875305175781], [0.29338526725769043, 0.7971418499946594], [0.29320669174194336, 0.7957028150558472], [0.2930476665496826, 0.7944576740264893], [0.2929192781448364, 0.7928590774536133], [0.2928246259689331, 0.7913617491722107], [0.2927718162536621, 0.7896589040756226], [0.2926095724105835, 0.7881133556365967], [0.2924612760543823, 0.7862762212753296], [0.2922869920730591, 0.784586489200592], [0.2921554446220398, 0.7826892733573914], [0.2919267416000366, 0.7809277772903442], [0.291748046875, 0.7788910865783691], [0.2915372848510742, 0.77702397108078], [0.2913898229598999, 0.7749483585357666], [0.2911500930786133, 0.7730616331100464], [0.29097503423690796, 0.7709346413612366], [0.2907612919807434, 0.769012451171875], [0.29062777757644653, 0.7669004201889038], [0.29041290283203125, 0.7650294303894043], [0.2902955412864685, 0.7629247903823853], [0.2901356816291809, 0.7610551118850708], [0.2900538444519043, 0.7589761018753052], [0.2899131774902344, 0.7571933269500732], [0.2898009419441223, 0.7552094459533691], [0.28970205783843994, 0.7533215284347534], [0.28966522216796875, 0.7512550354003906], [0.28953003883361816, 0.7492280006408691], [0.28950321674346924, 0.7469640970230103], [0.2893903851509094, 0.7447917461395264], [0.28939712047576904, 0.7424519062042236], [0.28925538063049316, 0.7401570081710815], [0.2892436981201172, 0.7376655340194702], [0.2891309857368469, 0.7352476119995117], [0.2891639471054077, 0.7326480150222778], [0.28904998302459717, 0.7301967144012451], [0.28906673192977905, 0.7275114059448242], [0.28896456956863403, 0.72499680519104], [0.28899550437927246, 0.7222703099250793], [0.2888606786727905, 0.7196807861328125], [0.2888525128364563, 0.7168835401535034], [0.2887251377105713, 0.7142506837844849], [0.28873658180236816, 0.7114353179931641], [0.2885938882827759, 0.7088006734848022], [0.2885764241218567, 0.7059746980667114], [0.28842341899871826, 0.7032947540283203], [0.28840959072113037, 0.7004411220550537], [0.28825074434280396, 0.6977530717849731], [0.2882283926010132, 0.6948826909065247], [0.2880735397338867, 0.6921799182891846], [0.2880605459213257, 0.6892998218536377], [0.28790485858917236, 0.6866021156311035], [0.2878881096839905, 0.6837241053581238], [0.28774261474609375, 0.6810285449028015], [0.28773534297943115, 0.6781659126281738], [0.28758084774017334, 0.6754797697067261], [0.2875595688819885, 0.672613263130188], [0.2874143123626709, 0.6699272394180298], [0.2874103784561157, 0.6670792102813721], [0.28727108240127563, 0.6644014120101929], [0.28726840019226074, 0.6615577936172485], [0.28714519739151, 0.6589137315750122], [0.2871614694595337, 0.6561227440834045], [0.2870403528213501, 0.6535015106201172], [0.2870563268661499, 0.6507292985916138], [0.28694599866867065, 0.6481199264526367], [0.286970853805542, 0.6453746557235718], [0.2868644595146179, 0.6427729725837708], [0.28689539432525635, 0.6400290131568909], [0.2868040204048157, 0.6374356746673584], [0.28684961795806885, 0.6347053050994873], [0.28676581382751465, 0.6321284770965576], [0.2868145704269409, 0.6294214725494385], [0.2867427468299866, 0.6268541812896729], [0.2868008017539978, 0.6241665482521057], [0.286734402179718, 0.6216248273849487], [0.28679585456848145, 0.6189582943916321], [0.2867332696914673, 0.616430401802063], [0.28679394721984863, 0.6137978434562683], [0.2867380380630493, 0.6112817525863647], [0.2868075370788574, 0.6086664795875549], [0.28676754236221313, 0.606157660484314], [0.28684866428375244, 0.6035569906234741], [0.28681594133377075, 0.6010756492614746], [0.28690022230148315, 0.5985126495361328], [0.28687047958374023, 0.5960705280303955], [0.2869579792022705, 0.5935555696487427], [0.28693830966949463, 0.5911550521850586], [0.2870330214500427, 0.5886911153793335], [0.28701937198638916, 0.5863031148910522], [0.28711235523223877, 0.5838683843612671], [0.28710442781448364, 0.5815030336380005], [0.28721117973327637, 0.579094409942627], [0.28722161054611206, 0.5767409801483154], [0.2873384952545166, 0.5743693113327026], [0.2873573303222656, 0.5720498561859131], [0.287481427192688, 0.5697125792503357], [0.28751325607299805, 0.5674558877944946], [0.2876487970352173, 0.5651900768280029], [0.2876901626586914, 0.5629881024360657], [0.2878279685974121, 0.5607855916023254], [0.2878735661506653, 0.5586409568786621], [0.2880159616470337, 0.5564972162246704], [0.28807133436203003, 0.5543954968452454], [0.28821879625320435, 0.5523018836975098], [0.28828537464141846, 0.5502187013626099], [0.2884374260902405, 0.548155665397644], [0.2885095477104187, 0.5461002588272095], [0.28865981101989746, 0.5440818667411804], [0.28874027729034424, 0.5420589447021484], [0.28889936208724976, 0.5400779843330383], [0.28899550437927246, 0.538068413734436], [0.28916430473327637, 0.5361102819442749], [0.2892639636993408, 0.5341825485229492], [0.28943437337875366, 0.5323144197463989], [0.2895476818084717, 0.5304393768310547], [0.28972673416137695, 0.5286456346511841], [0.2898445725440979, 0.5268715620040894], [0.2900168299674988, 0.5251768827438354], [0.2901585102081299, 0.5234948396682739], [0.290338933467865, 0.5219536423683167], [0.29043519496917725, 0.520442008972168], [0.2905568480491638, 0.519006609916687], [0.29064691066741943, 0.5175271034240723], [0.2907676696777344, 0.516176700592041], [0.29088711738586426, 0.5148888230323792], [0.29099804162979126, 0.5138027667999268], [0.291082501411438, 0.5127396583557129], [0.2911323308944702, 0.5121843814849854], [0.29381799697875977, 0.798005223274231], [0.2944900393486023, 0.7970086336135864], [0.294609010219574, 0.7958320379257202], [0.29477012157440186, 0.794247031211853], [0.29476499557495117, 0.7928916215896606], [0.29478341341018677, 0.7912038564682007], [0.2947465181350708, 0.7896822094917297], [0.2947498559951782, 0.7878193855285645], [0.29465681314468384, 0.7862014770507812], [0.29459714889526367, 0.7842873334884644], [0.29446321725845337, 0.7825700044631958], [0.29437243938446045, 0.7805364727973938], [0.29421138763427734, 0.7787441611289978], [0.2940943241119385, 0.7766534090042114], [0.29390949010849, 0.7747827768325806], [0.29378408193588257, 0.7726374864578247], [0.29358625411987305, 0.7707527279853821], [0.2934536933898926, 0.7686301469802856], [0.29326051473617554, 0.7667393088340759], [0.29314810037612915, 0.7646340131759644], [0.2929670810699463, 0.7628010511398315], [0.2928723096847534, 0.7607421875], [0.29272031784057617, 0.7588832378387451], [0.29264503717422485, 0.7569007873535156], [0.2925589084625244, 0.7551462054252625], [0.29248934984207153, 0.7530917525291443], [0.2924036979675293, 0.7512608766555786], [0.2924247980117798, 0.749021053314209], [0.2923407554626465, 0.7469776272773743], [0.29236626625061035, 0.7446292638778687], [0.29226696491241455, 0.7424564957618713], [0.2922888398170471, 0.7399565577507019], [0.29220008850097656, 0.737696647644043], [0.29223960638046265, 0.735101580619812], [0.2921626567840576, 0.7327138781547546], [0.2922177314758301, 0.7300289869308472], [0.2921379804611206, 0.7275701761245728], [0.2921792268753052, 0.7248400449752808], [0.29207515716552734, 0.7223038673400879], [0.2921024560928345, 0.7194786667823792], [0.2919897437095642, 0.7168949842453003], [0.29200279712677, 0.714064359664917], [0.29188072681427, 0.7114472389221191], [0.2918902635574341, 0.708586573600769], [0.29175329208374023, 0.7059512734413147], [0.29174280166625977, 0.7030752897262573], [0.29160308837890625, 0.7004216909408569], [0.2915966510772705, 0.6975260376930237], [0.29145127534866333, 0.6948527097702026], [0.2914379835128784, 0.6919597387313843], [0.2912946939468384, 0.6892788410186768], [0.29128921031951904, 0.6863815784454346], [0.29114264249801636, 0.6836932301521301], [0.2911298871040344, 0.6808227300643921], [0.2909901738166809, 0.6781413555145264], [0.29098474979400635, 0.6752621531486511], [0.29084157943725586, 0.6725775003433228], [0.29083383083343506, 0.6697303056716919], [0.2906992435455322, 0.6670559048652649], [0.290701687335968, 0.6642133593559265], [0.2905789613723755, 0.6615464687347412], [0.2905915379524231, 0.6587509512901306], [0.29047536849975586, 0.6561216711997986], [0.2904970645904541, 0.6533451080322266], [0.2903905510902405, 0.6507335901260376], [0.29041779041290283, 0.6479827165603638], [0.29031550884246826, 0.6453807353973389], [0.2903447151184082, 0.6426424980163574], [0.2902514934539795, 0.6400481462478638], [0.29029011726379395, 0.6373292207717896], [0.29020386934280396, 0.6347350478172302], [0.29025089740753174, 0.6320352554321289], [0.2901759147644043, 0.6294623613357544], [0.2902272939682007, 0.626781702041626], [0.2901597023010254, 0.6242125034332275], [0.2902127504348755, 0.6215572953224182], [0.2901456952095032, 0.6190041899681091], [0.2902013063430786, 0.6163743734359741], [0.29014384746551514, 0.6138484477996826], [0.2902067303657532, 0.6112416982650757], [0.29015570878982544, 0.6087269186973572], [0.29022157192230225, 0.6061413884162903], [0.2901812791824341, 0.6036326885223389], [0.29025816917419434, 0.6010738611221313], [0.2902234196662903, 0.598587155342102], [0.2903035879135132, 0.5960817337036133], [0.29027992486953735, 0.5936436057090759], [0.2903671860694885, 0.5911835432052612], [0.29034507274627686, 0.5887770652770996], [0.29042816162109375, 0.586337685585022], [0.29040807485580444, 0.5839507579803467], [0.2904927730560303, 0.5815556645393372], [0.29048073291778564, 0.5791977643966675], [0.29057776927948, 0.5768235921859741], [0.2905777096748352, 0.5744825601577759], [0.29068106412887573, 0.572149932384491], [0.290690541267395, 0.5698386430740356], [0.2907991409301758, 0.5675793290138245], [0.29082030057907104, 0.5653368234634399], [0.2909371256828308, 0.563125729560852], [0.2909574508666992, 0.5609259605407715], [0.29106783866882324, 0.5587887167930603], [0.2910955548286438, 0.5566504001617432], [0.29121118783950806, 0.5545578002929688], [0.29124218225479126, 0.5524587631225586], [0.29135608673095703, 0.5503918528556824], [0.2913839817047119, 0.548302173614502], [0.2914917469024658, 0.5462769865989685], [0.2915302515029907, 0.544235348701477], [0.29164302349090576, 0.5422501564025879], [0.29168224334716797, 0.5402316451072693], [0.2917931079864502, 0.5382746458053589], [0.2918415069580078, 0.5362718105316162], [0.29195642471313477, 0.5343970060348511], [0.2920032739639282, 0.5324701070785522], [0.29211461544036865, 0.5306706428527832], [0.29216861724853516, 0.5288094282150269], [0.29227781295776367, 0.5270992517471313], [0.2923288345336914, 0.5253278017044067], [0.2924168109893799, 0.523735761642456], [0.2924419641494751, 0.5220689177513123], [0.2925119400024414, 0.5206151604652405], [0.29251599311828613, 0.5190527439117432], [0.2925504446029663, 0.517685055732727], [0.29254722595214844, 0.5162175893783569], [0.2925514578819275, 0.5150486826896667], [0.2924690246582031, 0.5137323141098022], [0.29238420724868774, 0.512787938117981], [0.2919166088104248, 0.5117992162704468], [0.2945878505706787, 0.7987865805625916], [0.2953340411186218, 0.7974721193313599], [0.29598867893218994, 0.7960034608840942], [0.2962515354156494, 0.7945817708969116], [0.2965582013130188, 0.792873740196228], [0.2966402769088745, 0.7913708686828613], [0.296755313873291, 0.7896035313606262], [0.2967771887779236, 0.7879343032836914], [0.2968580722808838, 0.7860372066497803], [0.2968214154243469, 0.7843095660209656], [0.29682642221450806, 0.7823308110237122], [0.2967374324798584, 0.7805032730102539], [0.2966947555541992, 0.7784451246261597], [0.29656076431274414, 0.776562511920929], [0.2964819669723511, 0.7744500637054443], [0.2963252067565918, 0.7725273370742798], [0.2962328791618347, 0.7703907489776611], [0.2960658073425293, 0.7684988975524902], [0.2959723472595215, 0.766392707824707], [0.29581063985824585, 0.7645143270492554], [0.29570943117141724, 0.7624309062957764], [0.29550617933273315, 0.760587751865387], [0.295365571975708, 0.7585514783859253], [0.29520928859710693, 0.7568180561065674], [0.2951306104660034, 0.7549452781677246], [0.29512912034988403, 0.7531846165657043], [0.29522180557250977, 0.7511608004570007], [0.2952052354812622, 0.7491228580474854], [0.2952882647514343, 0.7468562722206116], [0.2952308654785156, 0.7446919679641724], [0.29528576135635376, 0.7423101663589478], [0.2952369451522827, 0.7400500774383545], [0.29531359672546387, 0.7375715970993042], [0.2952624559402466, 0.735195517539978], [0.29534047842025757, 0.7326107025146484], [0.2952876687049866, 0.7301249504089355], [0.2953556180000305, 0.7274436950683594], [0.2952708601951599, 0.7248929142951965], [0.29530978202819824, 0.7221462726593018], [0.2952178120613098, 0.7195295095443726], [0.2952544093132019, 0.7167257070541382], [0.29514771699905396, 0.7140854597091675], [0.2951662540435791, 0.7112683057785034], [0.29504913091659546, 0.7085980176925659], [0.29505985975265503, 0.7057455778121948], [0.29493415355682373, 0.7030704021453857], [0.29493778944015503, 0.7002204656600952], [0.2948065400123596, 0.6975176334381104], [0.2948070764541626, 0.6946398019790649], [0.2946698069572449, 0.6919403076171875], [0.29466432332992554, 0.6890761256217957], [0.29452377557754517, 0.6863596439361572], [0.29451608657836914, 0.6834825873374939], [0.29437875747680664, 0.6807991862297058], [0.29437434673309326, 0.6779457926750183], [0.29424065351486206, 0.6752431392669678], [0.2942396402359009, 0.6723776459693909], [0.29410505294799805, 0.6697063446044922], [0.29410409927368164, 0.6668738126754761], [0.29398465156555176, 0.6642084717750549], [0.29400140047073364, 0.6613852977752686], [0.29389023780822754, 0.6587496995925903], [0.29391223192214966, 0.6559733152389526], [0.2938128113746643, 0.6533583402633667], [0.29384517669677734, 0.6506009697914124], [0.2937498688697815, 0.6479970216751099], [0.2937818765640259, 0.6452528834342957], [0.2936914563179016, 0.6426613330841064], [0.2937270402908325, 0.6399343013763428], [0.293637216091156, 0.6373531818389893], [0.29367852210998535, 0.6346385478973389], [0.2936035990715027, 0.6320761442184448], [0.2936559319496155, 0.6293838620185852], [0.2935868501663208, 0.6268230080604553], [0.29363536834716797, 0.6241374015808105], [0.2935619354248047, 0.6215903759002686], [0.2936074733734131, 0.6189340353012085], [0.293542742729187, 0.6164197325706482], [0.29360026121139526, 0.613803505897522], [0.29354578256607056, 0.6112929582595825], [0.29360467195510864, 0.6086868643760681], [0.29355257749557495, 0.6061934232711792], [0.2936151623725891, 0.6036168336868286], [0.29357051849365234, 0.6011356711387634], [0.2936432957649231, 0.598588228225708], [0.2936124801635742, 0.596156656742096], [0.2936927080154419, 0.5936636924743652], [0.29366451501846313, 0.5912579298019409], [0.2937427759170532, 0.5887963175773621], [0.29371845722198486, 0.5864130854606628], [0.2937944531440735, 0.5839720368385315], [0.29376548528671265, 0.5816231966018677], [0.29383862018585205, 0.5792375802993774], [0.29381680488586426, 0.5769067406654358], [0.29390180110931396, 0.574548065662384], [0.2938951253890991, 0.5722458362579346], [0.2939877510070801, 0.5699219107627869], [0.293987512588501, 0.5676827430725098], [0.29408228397369385, 0.5654412508010864], [0.2940855622291565, 0.5632318258285522], [0.29417967796325684, 0.5610252022743225], [0.2941815257072449, 0.5588908195495605], [0.29427021741867065, 0.5567628145217896], [0.29427194595336914, 0.5546561479568481], [0.2943522334098816, 0.552564799785614], [0.2943437099456787, 0.5504732131958008], [0.2944156527519226, 0.5484021902084351], [0.294413685798645, 0.5463644862174988], [0.29448652267456055, 0.5443507432937622], [0.2944842576980591, 0.5423297882080078], [0.29455268383026123, 0.5403461456298828], [0.2945505380630493, 0.5383501052856445], [0.2946177124977112, 0.5363976955413818], [0.2946122884750366, 0.5344635248184204], [0.2946709394454956, 0.5325890779495239], [0.29465967416763306, 0.5307270288467407], [0.29471009969711304, 0.5289350748062134], [0.294694721698761, 0.5271415710449219], [0.29472821950912476, 0.5254309177398682], [0.29469364881515503, 0.5237323045730591], [0.29470115900039673, 0.5221301317214966], [0.2946227192878723, 0.5205507278442383], [0.2945888042449951, 0.5190671682357788], [0.2944827079772949, 0.5175635814666748], [0.2943927049636841, 0.5161827802658081], [0.29418426752090454, 0.5147855281829834], [0.2939859628677368, 0.5135617256164551], [0.29355520009994507, 0.5122998952865601], [0.29312342405319214, 0.5110776424407959], [0.2953648567199707, 0.7996161580085754], [0.29647278785705566, 0.7981488704681396], [0.29714643955230713, 0.7965460419654846], [0.29775047302246094, 0.7948946952819824], [0.298089861869812, 0.793286144733429], [0.2984691262245178, 0.7914729714393616], [0.29861366748809814, 0.789848804473877], [0.2988046407699585, 0.7879523038864136], [0.29887741804122925, 0.7862257957458496], [0.2989999055862427, 0.7842159271240234], [0.29899752140045166, 0.7824239730834961], [0.2990441918373108, 0.7803322076797485], [0.29898297786712646, 0.7784601449966431], [0.29897236824035645, 0.7763254642486572], [0.2988707423210144, 0.7744170427322388], [0.29882681369781494, 0.7722510099411011], [0.29869306087493896, 0.7703210115432739], [0.298622190952301, 0.7681931853294373], [0.2984709143638611, 0.7663073539733887], [0.2983893156051636, 0.7642076015472412], [0.29822808504104614, 0.7623149156570435], [0.2981796860694885, 0.7602752447128296], [0.2980892062187195, 0.7584686279296875], [0.29809749126434326, 0.7566084265708923], [0.2980928421020508, 0.7549923658370972], [0.29804813861846924, 0.7530568242073059], [0.297971248626709, 0.7512837052345276], [0.29801762104034424, 0.7490443587303162], [0.297981858253479, 0.7469899654388428], [0.2980842590332031, 0.7446106672286987], [0.29807138442993164, 0.7424450516700745], [0.29817765951156616, 0.7399866580963135], [0.2981586456298828, 0.7377073764801025], [0.2982606291770935, 0.7351197004318237], [0.2982301712036133, 0.7327419519424438], [0.2983244061470032, 0.7300398349761963], [0.29827553033828735, 0.7275415658950806], [0.2983473539352417, 0.724765419960022], [0.2982773780822754, 0.7222162485122681], [0.2983320951461792, 0.7193976640701294], [0.2982470393180847, 0.7167773246765137], [0.29828405380249023, 0.7139282822608948], [0.298190176486969, 0.7113100290298462], [0.2982243299484253, 0.7084334492683411], [0.2981235980987549, 0.7057734727859497], [0.29814785718917847, 0.7028930187225342], [0.2980329394340515, 0.700230062007904], [0.2980438470840454, 0.6973351240158081], [0.29792284965515137, 0.6946427822113037], [0.297929584980011, 0.6917474269866943], [0.2978001832962036, 0.6890645623207092], [0.2978006601333618, 0.6861677169799805], [0.2976759076118469, 0.6834750175476074], [0.2976803183555603, 0.6806084513664246], [0.2975534200668335, 0.6779349446296692], [0.2975583076477051, 0.6750632524490356], [0.2974362373352051, 0.6723688840866089], [0.2974468469619751, 0.669527530670166], [0.2973330020904541, 0.6668704748153687], [0.297349214553833, 0.664054811000824], [0.297244131565094, 0.6613964438438416], [0.2972714304924011, 0.65860915184021], [0.2971763610839844, 0.6559916734695435], [0.2972075343132019, 0.6532320976257324], [0.2971174120903015, 0.6506273746490479], [0.297155499458313, 0.6478787064552307], [0.29707014560699463, 0.6452797651290894], [0.29711103439331055, 0.6425521373748779], [0.29702985286712646, 0.6399606466293335], [0.29707396030426025, 0.6372513175010681], [0.29699820280075073, 0.6346760988235474], [0.29704606533050537, 0.6319905519485474], [0.2969707250595093, 0.6294217109680176], [0.29701781272888184, 0.6267461776733398], [0.296950101852417, 0.6241744756698608], [0.29700207710266113, 0.6215159893035889], [0.29693716764450073, 0.6189687252044678], [0.2969897985458374, 0.6163638234138489], [0.2969319820404053, 0.6138486862182617], [0.2969883680343628, 0.6112492680549622], [0.296935498714447, 0.608733594417572], [0.2969954013824463, 0.6061595678329468], [0.29694491624832153, 0.60366290807724], [0.29700767993927, 0.6011196374893188], [0.29696792364120483, 0.598651647567749], [0.2970365285873413, 0.5961574912071228], [0.2969971299171448, 0.593723475933075], [0.2970614433288574, 0.5912604331970215], [0.2970265746116638, 0.5888582468032837], [0.297092080116272, 0.5864214897155762], [0.29705560207366943, 0.5840233564376831], [0.2971184253692627, 0.5816327333450317], [0.2970808744430542, 0.5792840719223022], [0.29714345932006836, 0.5769338607788086], [0.2971176505088806, 0.5746157169342041], [0.29719048738479614, 0.5722946524620056], [0.29717326164245605, 0.5699963569641113], [0.29724717140197754, 0.5677424073219299], [0.29722434282302856, 0.5655025243759155], [0.297291100025177, 0.563296914100647], [0.2972763776779175, 0.5610952973365784], [0.2973476052284241, 0.5589542388916016], [0.2973259687423706, 0.5568151473999023], [0.29738283157348633, 0.5547114610671997], [0.29734933376312256, 0.5525906085968018], [0.2973940372467041, 0.5505170822143555], [0.29736292362213135, 0.5484302639961243], [0.2974044680595398, 0.5464099049568176], [0.2973615527153015, 0.5443572998046875], [0.29738831520080566, 0.5423687696456909], [0.29734259843826294, 0.5403460264205933], [0.29736602306365967, 0.5383908748626709], [0.2973175048828125, 0.5363893508911133], [0.2973347306251526, 0.534496545791626], [0.29727447032928467, 0.532555341720581], [0.29727667570114136, 0.5307477116584778], [0.29720306396484375, 0.5288773775100708], [0.2971833348274231, 0.5271420478820801], [0.29709291458129883, 0.5253384113311768], [0.2970479130744934, 0.5236894488334656], [0.29691141843795776, 0.5219660997390747], [0.29682886600494385, 0.5204377174377441], [0.2966576814651489, 0.5188403129577637], [0.2965245246887207, 0.5173835158348083], [0.2962666153907776, 0.515819787979126], [0.29603344202041626, 0.5144287943840027], [0.2956216335296631, 0.5129795670509338], [0.2952067255973816, 0.5115927457809448], [0.2944709062576294, 0.5101975202560425], [0.29635393619537354, 0.8005430698394775], [0.29743635654449463, 0.7989255785942078], [0.29848283529281616, 0.797141969203949], [0.2990785837173462, 0.7954819798469543], [0.29969078302383423, 0.7936580181121826], [0.30006611347198486, 0.7919384241104126], [0.30049586296081543, 0.7900289297103882], [0.3007027506828308, 0.7882786393165588], [0.3009467124938965, 0.7862924933433533], [0.30104923248291016, 0.7844409942626953], [0.30120617151260376, 0.7823889255523682], [0.30123841762542725, 0.7804709076881409], [0.30132412910461426, 0.7783360481262207], [0.3012927770614624, 0.7763866186141968], [0.301318883895874, 0.7742299437522888], [0.3012431859970093, 0.7722572088241577], [0.30122631788253784, 0.7700766921043396], [0.3011045455932617, 0.768136739730835], [0.3010387420654297, 0.7660177946090698], [0.3008885979652405, 0.7641239166259766], [0.30083608627319336, 0.7620531320571899], [0.30072641372680664, 0.7602618932723999], [0.3007359504699707, 0.7583406567573547], [0.30071938037872314, 0.7567130327224731], [0.30071568489074707, 0.7548728585243225], [0.3007122278213501, 0.7530975341796875], [0.300740122795105, 0.7511442303657532], [0.30071020126342773, 0.7491960525512695], [0.30082547664642334, 0.7469544410705566], [0.30084341764450073, 0.7448140382766724], [0.3009968400001526, 0.7424429655075073], [0.30102384090423584, 0.7401779890060425], [0.3011667728424072, 0.7376902103424072], [0.30117666721343994, 0.7352897524833679], [0.30129677057266235, 0.7326931953430176], [0.30126625299453735, 0.7301662564277649], [0.30135273933410645, 0.7274612188339233], [0.30130964517593384, 0.7248724699020386], [0.30138707160949707, 0.7221102714538574], [0.301319420337677, 0.7194740772247314], [0.3013719916343689, 0.7166476249694824], [0.30130112171173096, 0.7139970064163208], [0.3013554811477661, 0.7111780643463135], [0.3012734651565552, 0.7084865570068359], [0.30131667852401733, 0.7056320905685425], [0.3012256622314453, 0.7029300928115845], [0.30125653743743896, 0.7000658512115479], [0.30114954710006714, 0.6973536014556885], [0.30116593837738037, 0.6944706439971924], [0.3010501265525818, 0.6917541027069092], [0.3010604977607727, 0.6888822317123413], [0.30094391107559204, 0.6861737966537476], [0.3009575605392456, 0.683305025100708], [0.30084216594696045, 0.680608868598938], [0.3008495569229126, 0.6777596473693848], [0.3007298707962036, 0.675064206123352], [0.300744891166687, 0.672208845615387], [0.3006429076194763, 0.6695424914360046], [0.3006693720817566, 0.6667208671569824], [0.30056893825531006, 0.6640704870223999], [0.3005986213684082, 0.66126549243927], [0.3005121350288391, 0.6586376428604126], [0.3005501627922058, 0.6558712720870972], [0.3004649877548218, 0.6532624363899231], [0.30050408840179443, 0.6505163908004761], [0.30042314529418945, 0.6479153633117676], [0.3004719614982605, 0.6451817154884338], [0.30040162801742554, 0.6425920724868774], [0.3004533052444458, 0.639869749546051], [0.30038678646087646, 0.6372959613800049], [0.3004363775253296, 0.6345897912979126], [0.30036038160324097, 0.6320209503173828], [0.30039912462234497, 0.6293317079544067], [0.3003220558166504, 0.6267784237861633], [0.3003714084625244, 0.6241114139556885], [0.30031728744506836, 0.6215660572052002], [0.30037856101989746, 0.6189101338386536], [0.30031925439834595, 0.6164029836654663], [0.3003690838813782, 0.6137959957122803], [0.30030983686447144, 0.6112875938415527], [0.3003618121147156, 0.6086939573287964], [0.30031007528305054, 0.6062031984329224], [0.3003690838813782, 0.6036349534988403], [0.30032509565353394, 0.6011688113212585], [0.3003847599029541, 0.5986360907554626], [0.3003377318382263, 0.5961999893188477], [0.3003922700881958, 0.5937067270278931], [0.30034691095352173, 0.5913031101226807], [0.30040067434310913, 0.5888485908508301], [0.3003496527671814, 0.5864498615264893], [0.30039817094802856, 0.584014892578125], [0.30035310983657837, 0.5816658735275269], [0.30040884017944336, 0.5792832374572754], [0.300368070602417, 0.5769676566123962], [0.30042219161987305, 0.574633002281189], [0.30038487911224365, 0.5723345279693604], [0.30044007301330566, 0.5700246095657349], [0.3004034161567688, 0.5677751302719116], [0.3004530072212219, 0.5655192732810974], [0.3004131317138672, 0.5633231401443481], [0.3004615306854248, 0.561132550239563], [0.30042564868927, 0.5589774250984192], [0.3004661798477173, 0.5568298697471619], [0.3004146218299866, 0.5547038316726685], [0.30043792724609375, 0.5525820255279541], [0.3003767728805542, 0.550493597984314], [0.30038905143737793, 0.5484262704849243], [0.3003197908401489, 0.5463653802871704], [0.30031609535217285, 0.5443224906921387], [0.30023252964019775, 0.5423020720481873], [0.3002166152000427, 0.5403091907501221], [0.30012452602386475, 0.5383096933364868], [0.30010122060775757, 0.5363507270812988], [0.30000388622283936, 0.5343977212905884], [0.2999683618545532, 0.5324946641921997], [0.2998519539833069, 0.5306121110916138], [0.29978859424591064, 0.5287810564041138], [0.29964637756347656, 0.5269622802734375], [0.2995535135269165, 0.5252088308334351], [0.2993741035461426, 0.5234425663948059], [0.2992405295372009, 0.521761417388916], [0.2990114688873291, 0.5201113820075989], [0.2988141179084778, 0.5185476541519165], [0.2984815239906311, 0.5169332027435303], [0.2981835603713989, 0.5154145359992981], [0.29774904251098633, 0.5138518810272217], [0.29733991622924805, 0.5123757123947144], [0.2966560125350952, 0.5107959508895874], [0.29599785804748535, 0.5092546939849854], [0.2973078489303589, 0.8015504479408264], [0.2986419200897217, 0.7997937798500061], [0.2996690273284912, 0.797947883605957], [0.30062001943588257, 0.7960960865020752], [0.3012068271636963, 0.7942739725112915], [0.3018019199371338, 0.7923658490180969], [0.3022095561027527, 0.7905493974685669], [0.3026657700538635, 0.7885123491287231], [0.3028981685638428, 0.7866508960723877], [0.30318379402160645, 0.7845536470413208], [0.30331480503082275, 0.7826515436172485], [0.3035026788711548, 0.7804775238037109], [0.30355751514434814, 0.7785134315490723], [0.30367016792297363, 0.7763020396232605], [0.3036535382270813, 0.7743219137191772], [0.30369430780410767, 0.7721035480499268], [0.3036273717880249, 0.7701045274734497], [0.30362457036972046, 0.7679123878479004], [0.3035174608230591, 0.7659725546836853], [0.3034878969192505, 0.7638757228851318], [0.3033839464187622, 0.762039303779602], [0.30339181423187256, 0.7600927352905273], [0.3033409118652344, 0.7584218978881836], [0.3033565878868103, 0.7566295862197876], [0.3033682107925415, 0.7549704909324646], [0.30336707830429077, 0.7529960870742798], [0.3033829927444458, 0.751319408416748], [0.30351918935775757, 0.7491778135299683], [0.3035350441932678, 0.7471673488616943], [0.30368322134017944, 0.7448455095291138], [0.30372101068496704, 0.742682933807373], [0.3038831353187561, 0.7402005195617676], [0.30391907691955566, 0.7379112243652344], [0.30407679080963135, 0.7352888584136963], [0.3040838837623596, 0.7328591346740723], [0.30420970916748047, 0.730116605758667], [0.3041938543319702, 0.7275949716567993], [0.30429214239120483, 0.7248024344444275], [0.3042526841163635, 0.7222191691398621], [0.3043341636657715, 0.7193723320960999], [0.304282546043396, 0.7167409062385559], [0.30435168743133545, 0.7138936519622803], [0.30428647994995117, 0.7112555503845215], [0.30434656143188477, 0.7083719968795776], [0.30427753925323486, 0.7056996822357178], [0.30432987213134766, 0.7028026580810547], [0.3042507767677307, 0.7001173496246338], [0.3042936325073242, 0.6972075700759888], [0.30420005321502686, 0.6945011615753174], [0.30422842502593994, 0.6915980577468872], [0.30412834882736206, 0.6889024376869202], [0.30415022373199463, 0.6860188245773315], [0.30404889583587646, 0.6833279132843018], [0.3040720820426941, 0.680449366569519], [0.3039642572402954, 0.6777675151824951], [0.3039833903312683, 0.6749097108840942], [0.3038867712020874, 0.6722358465194702], [0.30391955375671387, 0.6694097518920898], [0.3038313388824463, 0.6667529344558716], [0.30387091636657715, 0.6639457941055298], [0.3037905693054199, 0.6613075733184814], [0.30383849143981934, 0.658532977104187], [0.30376356840133667, 0.6559096574783325], [0.3038073778152466, 0.6531597375869751], [0.3037335276603699, 0.650556206703186], [0.30378174781799316, 0.647823691368103], [0.30371564626693726, 0.6452338695526123], [0.303769588470459, 0.6425123810768127], [0.30370545387268066, 0.6399216651916504], [0.303757905960083, 0.63721764087677], [0.30368900299072266, 0.6346303224563599], [0.30373823642730713, 0.6319370269775391], [0.3036704659461975, 0.6293617486953735], [0.3037199378013611, 0.6267048120498657], [0.30365878343582153, 0.6241585612297058], [0.30371153354644775, 0.6215167045593262], [0.3036574125289917, 0.6189621090888977], [0.3037176728248596, 0.6163501739501953], [0.30366241931915283, 0.6138318777084351], [0.3037151098251343, 0.6112403869628906], [0.30366212129592896, 0.6087324023246765], [0.30371564626693726, 0.606165885925293], [0.3036651015281677, 0.6036797761917114], [0.3037217855453491, 0.6011438965797424], [0.30367380380630493, 0.5986753702163696], [0.303727388381958, 0.5961734056472778], [0.30367833375930786, 0.5937356352806091], [0.3037242889404297, 0.5912768840789795], [0.30366814136505127, 0.5888655185699463], [0.30371034145355225, 0.5864200592041016], [0.303655207157135, 0.5840262174606323], [0.303693950176239, 0.5816479921340942], [0.30364006757736206, 0.5792986154556274], [0.3036806583404541, 0.5769544839859009], [0.30362576246261597, 0.5746403932571411], [0.30366051197052, 0.572324275970459], [0.30360424518585205, 0.5700297355651855], [0.30363577604293823, 0.5677653551101685], [0.30358028411865234, 0.5655145645141602], [0.3036090135574341, 0.5633107423782349], [0.30354589223861694, 0.5611171722412109], [0.3035609722137451, 0.5589591264724731], [0.3034900426864624, 0.5567934513092041], [0.30349403619766235, 0.5546615123748779], [0.30341076850891113, 0.5525174140930176], [0.3034017086029053, 0.5504355430603027], [0.3033047914505005, 0.5483345985412598], [0.3032742142677307, 0.5462866425514221], [0.3031681180000305, 0.5442094802856445], [0.30312973260879517, 0.5422048568725586], [0.3030080795288086, 0.5401665568351746], [0.3029518127441406, 0.5381990671157837], [0.3028157353401184, 0.5361906290054321], [0.3027452826499939, 0.5342699885368347], [0.3025875687599182, 0.5322977304458618], [0.3024904727935791, 0.5304441452026367], [0.3023069500923157, 0.5285345911979675], [0.30217915773391724, 0.5267489552497864], [0.30195677280426025, 0.5248980522155762], [0.30178773403167725, 0.5231675505638123], [0.3015144467353821, 0.5213726162910461], [0.3012962341308594, 0.5197417736053467], [0.3009449243545532, 0.5180255770683289], [0.300640344619751, 0.5164164304733276], [0.30020368099212646, 0.5147572755813599], [0.2997813820838928, 0.5131632089614868], [0.2991420030593872, 0.5115147829055786], [0.2984957695007324, 0.5098341703414917], [0.2976146936416626, 0.5081887245178223], [0.2984851598739624, 0.802585244178772], [0.29972314834594727, 0.8007293939590454], [0.3009856939315796, 0.7987564206123352], [0.30190420150756836, 0.7968907952308655], [0.3028481602668762, 0.7948528528213501], [0.30341237783432007, 0.7930199503898621], [0.30402225255966187, 0.7909857034683228], [0.30443859100341797, 0.7890354990959167], [0.3049057722091675, 0.7869117259979248], [0.3051716089248657, 0.7849560379981995], [0.3054884672164917, 0.7827879190444946], [0.3056432008743286, 0.7807695269584656], [0.3058544397354126, 0.7785506248474121], [0.3059309720993042, 0.7765074968338013], [0.3060638904571533, 0.7742550373077393], [0.30606281757354736, 0.7722183465957642], [0.30612480640411377, 0.7699695825576782], [0.3060736656188965, 0.7679599523544312], [0.30609577894210815, 0.7657830715179443], [0.3060184121131897, 0.763888955116272], [0.3060324192047119, 0.7618666887283325], [0.30594635009765625, 0.7601151466369629], [0.30589717626571655, 0.7582259774208069], [0.3057793378829956, 0.7566211223602295], [0.30572211742401123, 0.7549031972885132], [0.3058493137359619, 0.7533050179481506], [0.30609095096588135, 0.7514467239379883], [0.30620068311691284, 0.7494372725486755], [0.3063816428184509, 0.74720698595047], [0.3064332604408264, 0.7451021671295166], [0.3066072463989258, 0.7427386045455933], [0.30666834115982056, 0.7404592037200928], [0.3068494200706482, 0.737959623336792], [0.30688726902008057, 0.7355013489723206], [0.30704134702682495, 0.7328591346740723], [0.3070563077926636, 0.7302943468093872], [0.307184100151062, 0.7275593280792236], [0.30716967582702637, 0.7249388694763184], [0.30726563930511475, 0.722152054309845], [0.30723077058792114, 0.7194900512695312], [0.30731654167175293, 0.7166611552238464], [0.3072737455368042, 0.7139958739280701], [0.3073488473892212, 0.7111575603485107], [0.30729639530181885, 0.7084652185440063], [0.30736809968948364, 0.7056021094322205], [0.3073117733001709, 0.7028865814208984], [0.3073781132698059, 0.7000084519386292], [0.3073054552078247, 0.6972662806510925], [0.30735331773757935, 0.6943735480308533], [0.3072735071182251, 0.6916465759277344], [0.3073127865791321, 0.688761830329895], [0.3072211742401123, 0.6860536336898804], [0.307253360748291, 0.6831915378570557], [0.3071637749671936, 0.6804819107055664], [0.30719417333602905, 0.6776218414306641], [0.307100772857666, 0.6749398708343506], [0.30713415145874023, 0.6721124649047852], [0.30705463886260986, 0.6694537401199341], [0.3071012496948242, 0.6666430234909058], [0.30703067779541016, 0.6640000939369202], [0.3070892095565796, 0.661217451095581], [0.3070278763771057, 0.6585918068885803], [0.30708497762680054, 0.6558166742324829], [0.3070204257965088, 0.6532124280929565], [0.30707502365112305, 0.650471031665802], [0.3070172071456909, 0.647884726524353], [0.30707716941833496, 0.6451594829559326], [0.3070186376571655, 0.6425702571868896], [0.30707138776779175, 0.6398442983627319], [0.30700457096099854, 0.6372618675231934], [0.30705440044403076, 0.6345534920692444], [0.30699121952056885, 0.6319827437400818], [0.30704832077026367, 0.6292954683303833], [0.3069941997528076, 0.6267541646957397], [0.30705082416534424, 0.6240991950035095], [0.30699360370635986, 0.621559202671051], [0.3070473074913025, 0.618919312953949], [0.30699342489242554, 0.6163948774337769], [0.307050883769989, 0.6137877702713013], [0.3070007562637329, 0.6112809777259827], [0.30705416202545166, 0.6086922287940979], [0.30700427293777466, 0.6062031388282776], [0.30705559253692627, 0.603645920753479], [0.30700433254241943, 0.6011791229248047], [0.3070541024208069, 0.5986447930335999], [0.3070046305656433, 0.5962027311325073], [0.30705034732818604, 0.5937017798423767], [0.30699223279953003, 0.5912830829620361], [0.3070230484008789, 0.5888200402259827], [0.3069593906402588, 0.5864243507385254], [0.30699431896209717, 0.5839881896972656], [0.30693304538726807, 0.5816408395767212], [0.30695945024490356, 0.5792609453201294], [0.30689024925231934, 0.5769370794296265], [0.3069080114364624, 0.5745969414710999], [0.30683445930480957, 0.5722974538803101], [0.3068494200706482, 0.5699869394302368], [0.30677658319473267, 0.5677335262298584], [0.3067891597747803, 0.5654747486114502], [0.30671316385269165, 0.5632642507553101], [0.3067120313644409, 0.5610546469688416], [0.30661851167678833, 0.5588811039924622], [0.30659806728363037, 0.5567140579223633], [0.3064935803413391, 0.5545618534088135], [0.3064624071121216, 0.5524226427078247], [0.30634605884552, 0.5503098368644714], [0.3062947988510132, 0.5482078790664673], [0.3061646819114685, 0.5461318492889404], [0.3060997724533081, 0.5440744161605835], [0.30595481395721436, 0.5420215725898743], [0.30587464570999146, 0.5400059223175049], [0.3057132959365845, 0.5379825830459595], [0.30561089515686035, 0.5360032320022583], [0.30541926622390747, 0.5340104699134827], [0.30528467893600464, 0.532068133354187], [0.30506813526153564, 0.5301403999328613], [0.3049035668373108, 0.528258740901947], [0.3046470880508423, 0.5263798236846924], [0.3044372797012329, 0.5245572328567505], [0.3041301369667053, 0.5227254033088684], [0.3038691282272339, 0.5209543704986572], [0.30349433422088623, 0.5191966891288757], [0.30315983295440674, 0.5174900889396667], [0.3026924729347229, 0.5157564878463745], [0.3022587299346924, 0.5140989422798157], [0.30162572860717773, 0.5123376846313477], [0.3010213375091553, 0.5106427669525146], [0.300174355506897, 0.5088475942611694], [0.2993720769882202, 0.5070894360542297], [0.2996649146080017, 0.8037513494491577], [0.30111199617385864, 0.801741361618042], [0.3022647500038147, 0.7997194528579712], [0.3033868670463562, 0.7976886034011841], [0.3042786121368408, 0.7956675291061401], [0.305158793926239, 0.7936140298843384], [0.30573558807373047, 0.7916483879089355], [0.3063659071922302, 0.7894898653030396], [0.306795597076416, 0.7874659299850464], [0.3072798252105713, 0.7852600812911987], [0.3075665831565857, 0.7832164764404297], [0.3079078793525696, 0.7809345722198486], [0.3080734610557556, 0.7788625955581665], [0.30829501152038574, 0.7765648365020752], [0.30837559700012207, 0.7744736671447754], [0.308518648147583, 0.7721757888793945], [0.3085337281227112, 0.7701119780540466], [0.3086203336715698, 0.7678504586219788], [0.30857789516448975, 0.7658529281616211], [0.3086159825325012, 0.7637441754341125], [0.308551549911499, 0.7619137763977051], [0.30858314037323, 0.7599644660949707], [0.30855071544647217, 0.7582281827926636], [0.308626651763916, 0.7565214037895203], [0.3087189197540283, 0.7551221251487732], [0.30873173475265503, 0.7534064650535583], [0.30872809886932373, 0.7517615556716919], [0.30888891220092773, 0.7495379447937012], [0.3089749217033386, 0.7474995851516724], [0.30918967723846436, 0.7451903223991394], [0.309281587600708, 0.743033230304718], [0.30949556827545166, 0.740541934967041], [0.3095591068267822, 0.7382063865661621], [0.3097388744354248, 0.7355362772941589], [0.30977755784988403, 0.7330699563026428], [0.3099348545074463, 0.7302954196929932], [0.3099486231803894, 0.7277297973632812], [0.31007468700408936, 0.7249034643173218], [0.310070276260376, 0.7222949862480164], [0.3101837635040283, 0.7194372415542603], [0.3101697564125061, 0.7167965769767761], [0.31027328968048096, 0.7139317989349365], [0.31024467945098877, 0.7112720608711243], [0.310333251953125, 0.7083888053894043], [0.3102940320968628, 0.705710768699646], [0.31037378311157227, 0.702802300453186], [0.31032484769821167, 0.7001013159751892], [0.3103954792022705, 0.69716477394104], [0.31033557653427124, 0.6944458484649658], [0.3103930950164795, 0.6915373802185059], [0.3103228211402893, 0.6888187527656555], [0.31037116050720215, 0.6859308481216431], [0.3102959394454956, 0.6832433938980103], [0.3103417754173279, 0.6803640723228455], [0.3102661967277527, 0.6776667833328247], [0.31031250953674316, 0.6748237609863281], [0.31024205684661865, 0.672166645526886], [0.31029438972473145, 0.669356107711792], [0.31022846698760986, 0.6667028665542603], [0.3102858066558838, 0.6639181971549988], [0.31023460626602173, 0.6612975001335144], [0.3103053569793701, 0.6585197448730469], [0.31025612354278564, 0.655888557434082], [0.31032222509384155, 0.6531391143798828], [0.3102719187736511, 0.6505398750305176], [0.31033432483673096, 0.6478183269500732], [0.31027960777282715, 0.6452239751815796], [0.3103397488594055, 0.642504096031189], [0.31028932332992554, 0.639905571937561], [0.3103538751602173, 0.6371945738792419], [0.3103058934211731, 0.634613037109375], [0.3103734254837036, 0.6319284439086914], [0.3103252053260803, 0.6293548941612244], [0.31038427352905273, 0.626701831817627], [0.3103294372558594, 0.6241464614868164], [0.31038230657577515, 0.6215041875839233], [0.31033021211624146, 0.6189679503440857], [0.3103904724121094, 0.6163560152053833], [0.31034088134765625, 0.6138313412666321], [0.31039154529571533, 0.6112379431724548], [0.31033605337142944, 0.6087245941162109], [0.310380756855011, 0.60616135597229], [0.31032729148864746, 0.6036763787269592], [0.3103731870651245, 0.601140022277832], [0.3103196620941162, 0.5986728668212891], [0.31036412715911865, 0.5961654186248779], [0.31030452251434326, 0.5937107801437378], [0.31033456325531006, 0.5912286639213562], [0.31026434898376465, 0.5888029932975769], [0.3102860450744629, 0.5863747596740723], [0.310222864151001, 0.5839847922325134], [0.3102487325668335, 0.5815855264663696], [0.3101733326911926, 0.5792276859283447], [0.3101811408996582, 0.5768704414367676], [0.31009501218795776, 0.5745418071746826], [0.31009048223495483, 0.5722240209579468], [0.30999934673309326, 0.5699266195297241], [0.3099901080131531, 0.567657470703125], [0.3098950982093811, 0.5654036402702332], [0.30987805128097534, 0.563176155090332], [0.30977147817611694, 0.5609543919563293], [0.30973905324935913, 0.55876624584198], [0.30961501598358154, 0.5565784573554993], [0.3095627427101135, 0.5544304847717285], [0.30942726135253906, 0.5522656440734863], [0.30936282873153687, 0.5501549243927002], [0.30921339988708496, 0.5480185747146606], [0.3091292381286621, 0.5459511280059814], [0.3089587688446045, 0.5438514351844788], [0.30885595083236694, 0.5418196320533752], [0.30867063999176025, 0.5397543907165527], [0.3085465431213379, 0.5377490520477295], [0.30832600593566895, 0.5356943607330322], [0.30816739797592163, 0.5337297916412354], [0.3079220652580261, 0.5317173600196838], [0.30773746967315674, 0.529813826084137], [0.3074594736099243, 0.5278511643409729], [0.30723679065704346, 0.5259836912155151], [0.30690181255340576, 0.5240594148635864], [0.30662310123443604, 0.5222494602203369], [0.3062248229980469, 0.5203649997711182], [0.3058805465698242, 0.5186017751693726], [0.3053945302963257, 0.5167783498764038], [0.304953932762146, 0.515032172203064], [0.30433619022369385, 0.5132242441177368], [0.30372852087020874, 0.5114281177520752], [0.3029523491859436, 0.5096167325973511], [0.30216723680496216, 0.5077377557754517], [0.30113768577575684, 0.5058822631835938], [0.3010426163673401, 0.8048783540725708], [0.3023710250854492, 0.802809476852417], [0.3037295341491699, 0.8006410598754883], [0.3047608733177185, 0.7986063361167908], [0.3058497905731201, 0.7964507341384888], [0.3066762685775757, 0.7944043874740601], [0.30752742290496826, 0.7922259569168091], [0.3081215023994446, 0.7901730537414551], [0.3087804317474365, 0.787929892539978], [0.3092274069786072, 0.7858371734619141], [0.3097296953201294, 0.7835301160812378], [0.31003594398498535, 0.7813881635665894], [0.31038713455200195, 0.7790218591690063], [0.3105522394180298, 0.7768880128860474], [0.31078392267227173, 0.7745381593704224], [0.3108760714530945, 0.7724172472953796], [0.3110434412956238, 0.7701020836830139], [0.3110785484313965, 0.7680220603942871], [0.3111931085586548, 0.7657696008682251], [0.3111568093299866, 0.7638207674026489], [0.3111886978149414, 0.7617728114128113], [0.31109070777893066, 0.7599796056747437], [0.311098575592041, 0.7581708431243896], [0.31107836961746216, 0.7566677331924438], [0.31107431650161743, 0.7551684379577637], [0.31120729446411133, 0.7536420822143555], [0.31136560440063477, 0.7517794370651245], [0.31146156787872314, 0.749912440776825], [0.3117319941520691, 0.7476481199264526], [0.3118709325790405, 0.745568573474884], [0.3121364116668701, 0.7431684732437134], [0.3122371435165405, 0.7408453226089478], [0.31244897842407227, 0.7382763624191284], [0.31251633167266846, 0.7357887029647827], [0.3126930594444275, 0.7330994606018066], [0.3127192258834839, 0.7304911613464355], [0.31285619735717773, 0.7277226448059082], [0.31287074089050293, 0.7250802516937256], [0.3130055069923401, 0.7222785949707031], [0.31302058696746826, 0.7196089029312134], [0.31315451860427856, 0.7167737483978271], [0.3131568431854248, 0.7140802145004272], [0.31326985359191895, 0.7112252116203308], [0.31325334310531616, 0.7085198163986206], [0.313351035118103, 0.7056503891944885], [0.31331437826156616, 0.7029092311859131], [0.3133935332298279, 0.7000269889831543], [0.3133518099784851, 0.6972672343254089], [0.31343287229537964, 0.6943633556365967], [0.31338053941726685, 0.6916173696517944], [0.31344282627105713, 0.6887221336364746], [0.31338661909103394, 0.6860061883926392], [0.31344884634017944, 0.6831444501876831], [0.3133854269981384, 0.6804284453392029], [0.31344330310821533, 0.6775722503662109], [0.3133907914161682, 0.6748979687690735], [0.31345653533935547, 0.672079861164093], [0.3133995532989502, 0.669425368309021], [0.3134613037109375, 0.6666244268417358], [0.31341707706451416, 0.6640049815177917], [0.31349271535873413, 0.6612434387207031], [0.3134545087814331, 0.6586108207702637], [0.3135337233543396, 0.6558367013931274], [0.3134978413581848, 0.6532272100448608], [0.31357258558273315, 0.6504837274551392], [0.3135313391685486, 0.6478949785232544], [0.3136003613471985, 0.6451641321182251], [0.3135516047477722, 0.6425720453262329], [0.3136190176010132, 0.6398601531982422], [0.3135809898376465, 0.6372723579406738], [0.31366026401519775, 0.6345796585083008], [0.3136257529258728, 0.6320021152496338], [0.313696026802063, 0.6293109655380249], [0.31364762783050537, 0.6267555952072144], [0.3137028217315674, 0.6240906119346619], [0.31365442276000977, 0.6215583086013794], [0.31371641159057617, 0.6189277172088623], [0.31366991996765137, 0.6164103150367737], [0.3137251138687134, 0.6137863993644714], [0.3136725425720215, 0.611273467540741], [0.3137141466140747, 0.6086680293083191], [0.31365394592285156, 0.6061843037605286], [0.31369292736053467, 0.6036297082901001], [0.31363445520401, 0.601158857345581], [0.31367188692092896, 0.5986294746398926], [0.3136066198348999, 0.5961697101593018], [0.3136327266693115, 0.5936537981033325], [0.31356608867645264, 0.5912197232246399], [0.31358951330184937, 0.5887348651885986], [0.31351685523986816, 0.5863523483276367], [0.3135336637496948, 0.5839328765869141], [0.3134586811065674, 0.5815517902374268], [0.3134639263153076, 0.5791457891464233], [0.31337159872055054, 0.5768053531646729], [0.31335681676864624, 0.5744385123252869], [0.31325429677963257, 0.5721400380134583], [0.3132326602935791, 0.5698214769363403], [0.3131212592124939, 0.5675537586212158], [0.31308215856552124, 0.5652830600738525], [0.3129553198814392, 0.5630474090576172], [0.31290531158447266, 0.5608193278312683], [0.31277143955230713, 0.5586133003234863], [0.3127078413963318, 0.556412398815155], [0.31255602836608887, 0.5542424917221069], [0.3124699592590332, 0.5520802736282349], [0.3123014569282532, 0.5499371290206909], [0.31219738721847534, 0.5478092432022095], [0.31201303005218506, 0.5456967353820801], [0.3118927478790283, 0.5436127185821533], [0.31168532371520996, 0.5415281057357788], [0.31153440475463867, 0.539479672908783], [0.31129205226898193, 0.537408709526062], [0.3111081123352051, 0.5353691577911377], [0.31084173917770386, 0.5333447456359863], [0.31063246726989746, 0.5313549041748047], [0.31033265590667725, 0.5293694734573364], [0.3100849390029907, 0.527434766292572], [0.3097362518310547, 0.5254670977592468], [0.309434175491333, 0.523553729057312], [0.30902349948883057, 0.5216408967971802], [0.30865371227264404, 0.5197669863700867], [0.3081597089767456, 0.5178824663162231], [0.30770015716552734, 0.5160465240478516], [0.30706787109375, 0.514172375202179], [0.3064643144607544, 0.5123407244682312], [0.30569177865982056, 0.5104442238807678], [0.30497193336486816, 0.5086025595664978], [0.30398279428482056, 0.5066097378730774], [0.3030403256416321, 0.5046523213386536], [0.302601158618927, 0.8062763214111328], [0.3040868043899536, 0.8040238618850708], [0.30531978607177734, 0.8018273115158081], [0.3065494894981384, 0.7996123433113098], [0.3075188398361206, 0.7974652051925659], [0.3085198402404785, 0.795210063457489], [0.3092857599258423, 0.7930682897567749], [0.31009411811828613, 0.7907978296279907], [0.31068360805511475, 0.7886524796485901], [0.3113318681716919, 0.7863107919692993], [0.31177622079849243, 0.7841241359710693], [0.312272846698761, 0.7816985845565796], [0.3125568628311157, 0.779449999332428], [0.3128982186317444, 0.7770636081695557], [0.3130878806114197, 0.7748905420303345], [0.3133547306060791, 0.7725157737731934], [0.31346458196640015, 0.7703709602355957], [0.31365537643432617, 0.7680532932281494], [0.31371021270751953, 0.7659762501716614], [0.3138405680656433, 0.7637343406677246], [0.3137984871864319, 0.7618328332901001], [0.31387853622436523, 0.7598707675933838], [0.31386178731918335, 0.7582529783248901], [0.31395846605300903, 0.7567646503448486], [0.31409525871276855, 0.7554358839988708], [0.3140782117843628, 0.7536630034446716], [0.3141269087791443, 0.7521523833274841], [0.3143378496170044, 0.7500665187835693], [0.31445711851119995, 0.7480999827384949], [0.3147401809692383, 0.7457549571990967], [0.3148554563522339, 0.7435321807861328], [0.3150988817214966, 0.7409529089927673], [0.31517648696899414, 0.7385536432266235], [0.3153671622276306, 0.7358448505401611], [0.31540220975875854, 0.7333073616027832], [0.315557062625885, 0.7305030226707458], [0.31558918952941895, 0.7279175519943237], [0.3157408833503723, 0.7250900268554688], [0.31577515602111816, 0.7224750518798828], [0.31592750549316406, 0.7196167707443237], [0.3159552812576294, 0.7169614434242249], [0.3160994052886963, 0.7140688896179199], [0.31611043214797974, 0.7113845944404602], [0.3162332773208618, 0.7084885835647583], [0.31622761487960815, 0.705790638923645], [0.31633734703063965, 0.7028546333312988], [0.316312313079834, 0.7001410722732544], [0.3164013624191284, 0.6972110271453857], [0.31637251377105713, 0.6944776773452759], [0.31646090745925903, 0.6915422081947327], [0.31642305850982666, 0.6888188123703003], [0.3165000081062317, 0.6859315633773804], [0.31645655632019043, 0.6832329034805298], [0.3165314197540283, 0.6803510785102844], [0.31649136543273926, 0.6776629686355591], [0.3165682554244995, 0.6748364567756653], [0.3165295124053955, 0.6721689701080322], [0.31660789251327515, 0.6693602800369263], [0.3165780305862427, 0.6667251586914062], [0.3166614770889282, 0.6639580726623535], [0.3166256546974182, 0.6613389253616333], [0.31670451164245605, 0.6585677266120911], [0.3166770935058594, 0.6559441089630127], [0.3167656660079956, 0.653193473815918], [0.3167421817779541, 0.6505836248397827], [0.31682658195495605, 0.6478557586669922], [0.3167996406555176, 0.6452552676200867], [0.31688523292541504, 0.6425329446792603], [0.3168518543243408, 0.6399418115615845], [0.3169294595718384, 0.6372488737106323], [0.3168984651565552, 0.634669303894043], [0.3169746398925781, 0.6319749355316162], [0.3169422149658203, 0.6293860673904419], [0.31701427698135376, 0.6267138719558716], [0.3169748783111572, 0.6241500377655029], [0.3170357346534729, 0.6215187311172485], [0.31699007749557495, 0.6189837455749512], [0.3170469403266907, 0.6163691282272339], [0.3170023560523987, 0.6138355731964111], [0.31705498695373535, 0.6112241744995117], [0.31700289249420166, 0.6086944341659546], [0.31704503297805786, 0.606128454208374], [0.31698399782180786, 0.6036409139633179], [0.3170141577720642, 0.601103663444519], [0.31694698333740234, 0.5986303091049194], [0.3169712424278259, 0.5961014032363892], [0.3168978691101074, 0.5936388969421387], [0.3169119358062744, 0.5911500453948975], [0.3168331980705261, 0.5887096524238586], [0.31684112548828125, 0.5862705707550049], [0.31675052642822266, 0.5838850736618042], [0.31674373149871826, 0.5814650058746338], [0.3166500926017761, 0.5790809392929077], [0.3166359066963196, 0.5766937732696533], [0.3165246248245239, 0.5743352770805359], [0.3164934515953064, 0.572017252445221], [0.3163784146308899, 0.5697085857391357], [0.31633782386779785, 0.5674083232879639], [0.3162013292312622, 0.5651262998580933], [0.3161391615867615, 0.5628809928894043], [0.31599074602127075, 0.5606410503387451], [0.31591057777404785, 0.5584262013435364], [0.3157426118850708, 0.5562026500701904], [0.3156437873840332, 0.5540226697921753], [0.3154560327529907, 0.5518313646316528], [0.3153361678123474, 0.5496921539306641], [0.31513452529907227, 0.5475308895111084], [0.31499534845352173, 0.545424222946167], [0.31476908922195435, 0.543292760848999], [0.3146054744720459, 0.5412173271179199], [0.31434720754623413, 0.5391005873680115], [0.3141489028930664, 0.5370492935180664], [0.3138665556907654, 0.5349523425102234], [0.3136454224586487, 0.5329421758651733], [0.31332123279571533, 0.530875027179718], [0.31305813789367676, 0.5289076566696167], [0.3126866817474365, 0.5268808007240295], [0.3123718500137329, 0.5249360203742981], [0.3119460344314575, 0.5229215621948242], [0.3115749955177307, 0.5210080146789551], [0.31106770038604736, 0.5190231800079346], [0.3106096386909485, 0.5171270370483398], [0.30998945236206055, 0.515163004398346], [0.3094143867492676, 0.5132484436035156], [0.30867427587509155, 0.5112910270690918], [0.30796241760253906, 0.509364128112793], [0.30705177783966064, 0.5074059963226318], [0.3061255216598511, 0.5053550004959106], [0.305009663105011, 0.5033165216445923], [0.30436283349990845, 0.8075504302978516], [0.30568110942840576, 0.8053045272827148], [0.3070873022079468, 0.8029171228408813], [0.3081877827644348, 0.800723671913147], [0.30934786796569824, 0.7983472347259521], [0.3102251887321472, 0.7961502075195312], [0.31117331981658936, 0.7937989234924316], [0.3118942975997925, 0.7916121482849121], [0.3126814365386963, 0.789244532585144], [0.31326258182525635, 0.7870092391967773], [0.31389880180358887, 0.7845702171325684], [0.3143357038497925, 0.7822602987289429], [0.3148278594017029, 0.7797293663024902], [0.31510651111602783, 0.7775193452835083], [0.31547367572784424, 0.7751016020774841], [0.3156942129135132, 0.7729167938232422], [0.3160078525543213, 0.7705038785934448], [0.31615036725997925, 0.7683768272399902], [0.3163711428642273, 0.766026496887207], [0.31640154123306274, 0.7639102935791016], [0.3165096044540405, 0.7617393732070923], [0.3164554238319397, 0.7599893808364868], [0.31650471687316895, 0.7581890821456909], [0.3164985775947571, 0.7570072412490845], [0.31647801399230957, 0.755473256111145], [0.31667351722717285, 0.7540163993835449], [0.3169006109237671, 0.7523505091667175], [0.31707078218460083, 0.750519335269928], [0.3173764944076538, 0.7483352422714233], [0.31751543283462524, 0.7461549639701843], [0.31778234243392944, 0.7436885833740234], [0.3178790807723999, 0.7412571310997009], [0.31807345151901245, 0.7386153936386108], [0.3181026577949524, 0.7360583543777466], [0.31824278831481934, 0.7333147525787354], [0.3182803988456726, 0.7307237982749939], [0.3184484839439392, 0.7279444932937622], [0.3185044527053833, 0.7253122329711914], [0.31867313385009766, 0.7225049734115601], [0.3187220096588135, 0.7198283076286316], [0.31888318061828613, 0.7169865369796753], [0.31892210245132446, 0.714264452457428], [0.319069504737854, 0.7113828659057617], [0.31908583641052246, 0.708657443523407], [0.3192160129547119, 0.7057763338088989], [0.3192285895347595, 0.7030135989189148], [0.3193483352661133, 0.7000969648361206], [0.3193333148956299, 0.6973385810852051], [0.3194296956062317, 0.6944334506988525], [0.3194139003753662, 0.6916692852973938], [0.3195117712020874, 0.6887659430503845], [0.31948500871658325, 0.6860412359237671], [0.31957268714904785, 0.6831763982772827], [0.319549024105072, 0.6804657578468323], [0.31964224576950073, 0.677613377571106], [0.3196198344230652, 0.6749508380889893], [0.3197139501571655, 0.6721284985542297], [0.3197011351585388, 0.6694828271865845], [0.31980031728744507, 0.6666962504386902], [0.31978023052215576, 0.6640685200691223], [0.31986570358276367, 0.6612953543663025], [0.3198423981666565, 0.6586813926696777], [0.31993550062179565, 0.6559237241744995], [0.31992101669311523, 0.6533140540122986], [0.32001733779907227, 0.6505670547485352], [0.3200026750564575, 0.6479672193527222], [0.32009875774383545, 0.6452441215515137], [0.3200870156288147, 0.642645001411438], [0.3201819658279419, 0.6399191617965698], [0.3201596140861511, 0.6373462677001953], [0.3202415108680725, 0.6346464157104492], [0.32021641731262207, 0.6320687532424927], [0.3202982544898987, 0.6293665766716003], [0.3202706575393677, 0.6267932653427124], [0.320342481136322, 0.6241188049316406], [0.32030582427978516, 0.621580958366394], [0.32036590576171875, 0.6189422011375427], [0.3203146457672119, 0.616412878036499], [0.32036447525024414, 0.6137982606887817], [0.32031893730163574, 0.6112663745880127], [0.3203703761100769, 0.6086521744728088], [0.32032108306884766, 0.6061542630195618], [0.3203606605529785, 0.6035791635513306], [0.320293664932251, 0.601098895072937], [0.3203122019767761, 0.598556399345398], [0.3202331066131592, 0.5960813760757446], [0.32024264335632324, 0.593557596206665], [0.32015836238861084, 0.5911096334457397], [0.320156455039978, 0.5886211395263672], [0.32006072998046875, 0.5862100720405579], [0.320045530796051, 0.5837720632553101], [0.31994009017944336, 0.5813877582550049], [0.3199179768562317, 0.5789693593978882], [0.31980645656585693, 0.5765936374664307], [0.31976956129074097, 0.5741942524909973], [0.3196418285369873, 0.571884274482727], [0.3195897936820984, 0.5695642232894897], [0.3194567561149597, 0.5672554969787598], [0.319394052028656, 0.5649451017379761], [0.31923913955688477, 0.5626884698867798], [0.3191484212875366, 0.5604275465011597], [0.3189692497253418, 0.5581926107406616], [0.3188549876213074, 0.5559598207473755], [0.3186546564102173, 0.5537524223327637], [0.3185166120529175, 0.5515522360801697], [0.3182956576347351, 0.5493862628936768], [0.31813716888427734, 0.5472299456596375], [0.31789523363113403, 0.5450780391693115], [0.3177134394645691, 0.5429551601409912], [0.31744182109832764, 0.5408234000205994], [0.3172292709350586, 0.5387154817581177], [0.31693118810653687, 0.5366063117980957], [0.3166910409927368, 0.5345374345779419], [0.3163546919822693, 0.5324392914772034], [0.3160696029663086, 0.5303909778594971], [0.31568628549575806, 0.5283365249633789], [0.31534910202026367, 0.5263192653656006], [0.3149113655090332, 0.5242924094200134], [0.3145224452018738, 0.5222957134246826], [0.3140122890472412, 0.5202668905258179], [0.313543438911438, 0.518284797668457], [0.3129323124885559, 0.5162662267684937], [0.312359094619751, 0.5142858028411865], [0.311625599861145, 0.5122653841972351], [0.31093859672546387, 0.5102872848510742], [0.3100661635398865, 0.5082398056983948], [0.3092270493507385, 0.5062227249145508], [0.3081454634666443, 0.5040804147720337], [0.3071131110191345, 0.5019733905792236], [0.3061214089393616, 0.8089834451675415], [0.3075863718986511, 0.8065361380577087], [0.3088374733924866, 0.8041915893554688], [0.3101157546043396, 0.801766037940979], [0.31114840507507324, 0.7994189262390137], [0.31219804286956787, 0.7970051765441895], [0.3130251169204712, 0.7947372198104858], [0.31392014026641846, 0.792339563369751], [0.3146210312843323, 0.7900727391242981], [0.3153873682022095, 0.787591814994812], [0.31594204902648926, 0.785254955291748], [0.3165508508682251, 0.7826803922653198], [0.3169597387313843, 0.7802753448486328], [0.31741976737976074, 0.77782142162323], [0.3177088499069214, 0.7755764126777649], [0.3180837631225586, 0.7731648087501526], [0.31833797693252563, 0.7709616422653198], [0.3186938166618347, 0.7685492634773254], [0.31886982917785645, 0.7663931846618652], [0.3191158175468445, 0.763891339302063], [0.3190588355064392, 0.7618234753608704], [0.31912916898727417, 0.7599257230758667], [0.3191111087799072, 0.7582976818084717], [0.3191488981246948, 0.7569776773452759], [0.31926947832107544, 0.7558053731918335], [0.3192918300628662, 0.7541806697845459], [0.31941723823547363, 0.7529233694076538], [0.3198016285896301, 0.7508275508880615], [0.3200069069862366, 0.7488141059875488], [0.320367693901062, 0.7463808655738831], [0.3205023407936096, 0.7440227270126343], [0.32073283195495605, 0.7413508892059326], [0.32078683376312256, 0.7388398051261902], [0.3209448456764221, 0.7360643148422241], [0.3209804892539978, 0.7335235476493835], [0.3211354613304138, 0.7307567596435547], [0.3211873173713684, 0.7281827926635742], [0.3213655948638916, 0.725360631942749], [0.32142579555511475, 0.722737193107605], [0.3216055631637573, 0.7198727130889893], [0.32166552543640137, 0.7172117233276367], [0.32184088230133057, 0.7142980098724365], [0.3218899965286255, 0.7115871906280518], [0.3220491409301758, 0.7086665630340576], [0.32208144664764404, 0.7059658765792847], [0.32222574949264526, 0.7030137181282043], [0.3222404718399048, 0.7002581357955933], [0.32236385345458984, 0.6973085999488831], [0.3223610520362854, 0.6945746541023254], [0.3224705457687378, 0.6916429400444031], [0.32246506214141846, 0.6889030933380127], [0.32257163524627686, 0.6860044002532959], [0.32256293296813965, 0.6833083629608154], [0.32266753911972046, 0.6804395914077759], [0.32266050577163696, 0.677749752998352], [0.32276690006256104, 0.6749246716499329], [0.3227628469467163, 0.6722685098648071], [0.3228713870048523, 0.6694720983505249], [0.322867214679718, 0.6668305397033691], [0.32297182083129883, 0.6640498638153076], [0.32297050952911377, 0.6614254117012024], [0.32307881116867065, 0.658669650554657], [0.32307928800582886, 0.6560617685317993], [0.3231915235519409, 0.6533148288726807], [0.32319211959838867, 0.6507004499435425], [0.32329827547073364, 0.647965669631958], [0.32329267263412476, 0.6453713774681091], [0.3233931064605713, 0.642648458480835], [0.323386549949646, 0.6400396227836609], [0.32348310947418213, 0.6373348832130432], [0.32346487045288086, 0.6347455978393555], [0.3235492706298828, 0.6320579648017883], [0.32353317737579346, 0.6294683218002319], [0.32361990213394165, 0.6267807483673096], [0.32359975576400757, 0.6242012977600098], [0.3236725330352783, 0.621553897857666], [0.32363206148147583, 0.6189910173416138], [0.3236883878707886, 0.6163719892501831], [0.3236476182937622, 0.6138464212417603], [0.32369840145111084, 0.6112262606620789], [0.32364726066589355, 0.6086851358413696], [0.32368600368499756, 0.6061047315597534], [0.3236269950866699, 0.603590726852417], [0.3236539363861084, 0.6010227799415588], [0.3235728144645691, 0.5985245704650879], [0.32357728481292725, 0.5959941148757935], [0.3234959840774536, 0.5935236215591431], [0.3234975337982178, 0.5910099744796753], [0.32339948415756226, 0.5885521173477173], [0.3233816623687744, 0.5860944986343384], [0.32327353954315186, 0.5836781859397888], [0.32323992252349854, 0.5812517404556274], [0.32311898469924927, 0.5788624286651611], [0.3230745792388916, 0.5764449834823608], [0.32293516397476196, 0.5740500092506409], [0.32287120819091797, 0.5717082619667053], [0.32271885871887207, 0.5693944692611694], [0.3226354718208313, 0.5670688152313232], [0.3224691152572632, 0.5647487640380859], [0.3223723769187927, 0.5624664425849915], [0.3221878409385681, 0.5601876974105835], [0.32207000255584717, 0.5579379200935364], [0.321865439414978, 0.5556803345680237], [0.32172566652297974, 0.5534614324569702], [0.3214969038963318, 0.5512275695800781], [0.3213353157043457, 0.549062192440033], [0.3210878372192383, 0.5468684434890747], [0.32090163230895996, 0.544716477394104], [0.3206217885017395, 0.5425368547439575], [0.3204019069671631, 0.540413498878479], [0.3200846314430237, 0.5382431149482727], [0.3198295831680298, 0.5361518859863281], [0.31947553157806396, 0.5340094566345215], [0.31918156147003174, 0.5319319367408752], [0.31879347562789917, 0.5298027396202087], [0.31846046447753906, 0.5277516841888428], [0.3180124759674072, 0.5256396532058716], [0.3176218867301941, 0.5236243605613708], [0.3171120882034302, 0.5215359926223755], [0.3166489601135254, 0.5194970965385437], [0.3160386085510254, 0.5173967480659485], [0.3154761791229248, 0.5153664946556091], [0.314752459526062, 0.5132681727409363], [0.31408631801605225, 0.5112326145172119], [0.31325244903564453, 0.5091394782066345], [0.3124416470527649, 0.5070508718490601], [0.3114418387413025, 0.5049228668212891], [0.31044304370880127, 0.5027299523353577], [0.30922484397888184, 0.5005160570144653], [0.3080712556838989, 0.8102370500564575], [0.30934691429138184, 0.8078457117080688], [0.3107417821884155, 0.8053244352340698], [0.31192106008529663, 0.802963376045227], [0.31314146518707275, 0.8003190755844116], [0.31400907039642334, 0.7979704141616821], [0.31497901678085327, 0.7955275774002075], [0.3157862424850464, 0.7932506799697876], [0.31667453050613403, 0.7907534241676331], [0.31735479831695557, 0.7883726358413696], [0.3180887699127197, 0.7857799530029297], [0.3186225891113281, 0.7833274006843567], [0.31921684741973877, 0.7806646823883057], [0.319580614566803, 0.7783615589141846], [0.32002711296081543, 0.7758796215057373], [0.32032209634780884, 0.7736631631851196], [0.32072943449020386, 0.7712697982788086], [0.32103538513183594, 0.7690704464912415], [0.32144880294799805, 0.7666148543357849], [0.32166069746017456, 0.7642582654953003], [0.3219294548034668, 0.7616860866546631], [0.3217998147010803, 0.7599301338195801], [0.3217124938964844, 0.758105993270874], [0.32171785831451416, 0.7572752237319946], [0.32171469926834106, 0.755825400352478], [0.32187873125076294, 0.7546430826187134], [0.32209932804107666, 0.7533212900161743], [0.3224036693572998, 0.7515906691551208], [0.322990357875824, 0.7492175102233887], [0.323178768157959, 0.7467718124389648], [0.32342350482940674, 0.7441614866256714], [0.3234812617301941, 0.7415823936462402], [0.3236285448074341, 0.7388666868209839], [0.3236653804779053, 0.7362736463546753], [0.3238164186477661, 0.7335503697395325], [0.3238614797592163, 0.7309756875038147], [0.3240238428115845, 0.7282328605651855], [0.3240915536880493, 0.7256067991256714], [0.3242834806442261, 0.722804069519043], [0.32436734437942505, 0.7201234698295593], [0.32455986738204956, 0.7172710299491882], [0.3246333599090576, 0.7145354747772217], [0.3248217701911926, 0.7116376161575317], [0.32489144802093506, 0.7088945508003235], [0.3250635862350464, 0.705990731716156], [0.3250986933708191, 0.7032039761543274], [0.3252437114715576, 0.7002627849578857], [0.32526934146881104, 0.6974846720695496], [0.32540470361709595, 0.6945605278015137], [0.32541197538375854, 0.6917988061904907], [0.3255302309989929, 0.6888900995254517], [0.3255399465560913, 0.686160683631897], [0.3256610631942749, 0.6832950115203857], [0.3256683945655823, 0.6805939078330994], [0.32578855752944946, 0.6777439117431641], [0.32579755783081055, 0.6750800013542175], [0.3259183168411255, 0.672269344329834], [0.3259279727935791, 0.6696266531944275], [0.3260456919670105, 0.6668322086334229], [0.3260592818260193, 0.664207398891449], [0.3261863589286804, 0.6614426970481873], [0.32620489597320557, 0.6588274240493774], [0.32632893323898315, 0.656082272529602], [0.326343297958374, 0.6534696817398071], [0.32646501064300537, 0.6507222652435303], [0.3264785408973694, 0.6481136679649353], [0.32659298181533813, 0.6453821659088135], [0.32659369707107544, 0.6427813172340393], [0.3266977071762085, 0.6400518417358398], [0.3266950249671936, 0.6374552249908447], [0.326789915561676, 0.6347396373748779], [0.32677286863327026, 0.6321595907211304], [0.3268614411354065, 0.6294751167297363], [0.32685303688049316, 0.6268870830535889], [0.32694244384765625, 0.6241999864578247], [0.3269253969192505, 0.6216349601745605], [0.3269977569580078, 0.6189578771591187], [0.32696038484573364, 0.6164259910583496], [0.32701313495635986, 0.6138103604316711], [0.32696664333343506, 0.611262321472168], [0.327007532119751, 0.6086328029632568], [0.32694703340530396, 0.6061162948608398], [0.3269731402397156, 0.603528618812561], [0.3269040584564209, 0.6010125875473022], [0.3269195556640625, 0.5984290838241577], [0.3268340826034546, 0.595952033996582], [0.32682859897613525, 0.5934263467788696], [0.326732873916626, 0.5909497737884521], [0.32671988010406494, 0.5884342193603516], [0.3266149163246155, 0.5860055685043335], [0.3265807032585144, 0.5835353136062622], [0.32645344734191895, 0.5811253786087036], [0.3263967037200928, 0.578696608543396], [0.3262460231781006, 0.576287567615509], [0.32617104053497314, 0.5738600492477417], [0.32601314783096313, 0.5715229511260986], [0.32591789960861206, 0.5691697597503662], [0.3257327675819397, 0.5668444633483887], [0.3256177306175232, 0.5645121335983276], [0.32542645931243896, 0.5622214078903198], [0.32530277967453003, 0.5599260926246643], [0.32509249448776245, 0.5576520562171936], [0.32494521141052246, 0.5553879737854004], [0.3247125744819641, 0.5531344413757324], [0.3245425224304199, 0.5508983135223389], [0.32429075241088867, 0.5486941337585449], [0.32409971952438354, 0.5465078353881836], [0.3238180875778198, 0.5442990064620972], [0.3235934376716614, 0.542123556137085], [0.32327693700790405, 0.5399357080459595], [0.3230106234550476, 0.5377678871154785], [0.32264596223831177, 0.5356048941612244], [0.322334885597229, 0.5334762334823608], [0.3219340443611145, 0.5313338041305542], [0.32158732414245605, 0.5292259454727173], [0.3211366534233093, 0.5270794630050659], [0.32073521614074707, 0.5249801278114319], [0.32022541761398315, 0.5228655338287354], [0.31975865364074707, 0.5207892656326294], [0.31915098428726196, 0.5186372399330139], [0.31859302520751953, 0.5165356397628784], [0.31789451837539673, 0.5143911242485046], [0.31723451614379883, 0.5122733116149902], [0.31641513109207153, 0.5101339221000671], [0.31564199924468994, 0.5080275535583496], [0.31467586755752563, 0.505821943283081], [0.3137549161911011, 0.503658652305603], [0.312589168548584, 0.501363217830658], [0.31147271394729614, 0.49908602237701416], [0.31001442670822144, 0.8116682767868042], [0.3114151954650879, 0.8090438842773438], [0.31260013580322266, 0.806625485420227], [0.3138861060142517, 0.8039737343788147], [0.31495893001556396, 0.8013032674789429], [0.315993070602417, 0.7988091111183167], [0.31688034534454346, 0.7965810298919678], [0.31790709495544434, 0.7940714359283447], [0.31869620084762573, 0.7916642427444458], [0.31954342126846313, 0.7890231609344482], [0.320179283618927, 0.7865384817123413], [0.3208734393119812, 0.7838230133056641], [0.3213624954223633, 0.7812844514846802], [0.32189977169036865, 0.7787477374076843], [0.3222593665122986, 0.7764224410057068], [0.3226848840713501, 0.7739818692207336], [0.3229635953903198, 0.7717782258987427], [0.3233240246772766, 0.7693623900413513], [0.3236558437347412, 0.7672044038772583], [0.32415926456451416, 0.7644276022911072], [0.32442957162857056, 0.7622103691101074], [0.32503795623779297, 0.760054886341095], [0.3252711892127991, 0.7583478689193726], [0.32526540756225586, 0.7570499777793884], [0.325306236743927, 0.7562663555145264], [0.32539069652557373, 0.7551344633102417], [0.32553327083587646, 0.7541614770889282], [0.3257793188095093, 0.7519634366035461], [0.32568907737731934, 0.7495919466018677], [0.32592880725860596, 0.7469606399536133], [0.32601916790008545, 0.7443909645080566], [0.3261868953704834, 0.7416214346885681], [0.32621103525161743, 0.7390282154083252], [0.32633817195892334, 0.736309826374054], [0.32639992237091064, 0.733765721321106], [0.32658255100250244, 0.7310342192649841], [0.3266754746437073, 0.7284832000732422], [0.3268853425979614, 0.7256932854652405], [0.32699400186538696, 0.7230842709541321], [0.3272179365158081, 0.720213770866394], [0.3273162841796875, 0.7175322771072388], [0.3275222182273865, 0.7146124839782715], [0.32761478424072266, 0.7118985652923584], [0.3278152346611023, 0.7089609503746033], [0.32787883281707764, 0.7062071561813354], [0.3280479311943054, 0.7032306790351868], [0.3280966281890869, 0.7004657983779907], [0.32825613021850586, 0.6975081562995911], [0.32829272747039795, 0.6947436928749084], [0.3284419775009155, 0.6918068528175354], [0.3284761905670166, 0.6890714764595032], [0.3286200761795044, 0.6861750483512878], [0.32864439487457275, 0.6834642291069031], [0.32877641916275024, 0.6806074380874634], [0.32880067825317383, 0.6779168844223022], [0.3289332389831543, 0.6750984191894531], [0.3289640545845032, 0.6724520921707153], [0.3291051387786865, 0.6696501970291138], [0.3291351795196533, 0.6670085787773132], [0.32927167415618896, 0.6642428636550903], [0.3293008804321289, 0.6616235375404358], [0.32943737506866455, 0.6588672399520874], [0.329461932182312, 0.6562513113021851], [0.32959288358688354, 0.6535094976425171], [0.3296228051185608, 0.6508963108062744], [0.32975518703460693, 0.6481497287750244], [0.3297739028930664, 0.6455345153808594], [0.32989245653152466, 0.6428037285804749], [0.32990384101867676, 0.6401937007904053], [0.3300134539604187, 0.6374703645706177], [0.330013632774353, 0.6348580121994019], [0.33011317253112793, 0.6321662664413452], [0.33010876178741455, 0.6295890212059021], [0.33020246028900146, 0.626901388168335], [0.3301984667778015, 0.6243088245391846], [0.33028388023376465, 0.6216307282447815], [0.3302649259567261, 0.6190390586853027], [0.3303384780883789, 0.6163992285728455], [0.330295205116272, 0.6138461828231812], [0.33033597469329834, 0.6112171411514282], [0.3302842378616333, 0.6086531281471252], [0.33031922578811646, 0.6060543656349182], [0.3302605152130127, 0.6035324931144714], [0.33028531074523926, 0.6009397506713867], [0.3302153944969177, 0.5984092950820923], [0.33022594451904297, 0.5958506464958191], [0.3301228880882263, 0.5933472514152527], [0.3300952911376953, 0.5908294916152954], [0.32998913526535034, 0.5883551836013794], [0.32995712757110596, 0.5858588218688965], [0.32982540130615234, 0.5834054350852966], [0.32976311445236206, 0.5809444189071655], [0.3296058177947998, 0.5785204768180847], [0.3295222520828247, 0.5760817527770996], [0.32936084270477295, 0.5736745595932007], [0.3292713165283203, 0.5712944269180298], [0.32908058166503906, 0.5689210891723633], [0.32895708084106445, 0.5665781497955322], [0.32875359058380127, 0.5642432570457458], [0.3286176323890686, 0.5619410276412964], [0.328399658203125, 0.5596314072608948], [0.328247606754303, 0.5573422312736511], [0.32800519466400146, 0.5550446510314941], [0.32782578468322754, 0.5527902841567993], [0.3275681734085083, 0.5505248308181763], [0.3273698687553406, 0.5483095645904541], [0.3270726203918457, 0.5460717678070068], [0.32683616876602173, 0.5438741445541382], [0.32651180028915405, 0.5416423678398132], [0.32624363899230957, 0.5394562482833862], [0.3258786201477051, 0.5372213125228882], [0.3255760669708252, 0.5350598096847534], [0.3251686096191406, 0.5328518152236938], [0.3248192071914673, 0.5307290554046631], [0.3243648409843445, 0.528538703918457], [0.32396477460861206, 0.5264042615890503], [0.32345348596572876, 0.5242117047309875], [0.3229941725730896, 0.5220945477485657], [0.3223969340324402, 0.5199093818664551], [0.3218512535095215, 0.5177656412124634], [0.32117700576782227, 0.5155578851699829], [0.3205430507659912, 0.5133898854255676], [0.3197292685508728, 0.5111488103866577], [0.3189844489097595, 0.5089988112449646], [0.3180745244026184, 0.5067802667617798], [0.31718891859054565, 0.5045449733734131], [0.3161163926124573, 0.5022615194320679], [0.3150506019592285, 0.49991899728775024], [0.31379204988479614, 0.4975409507751465], [0.31213754415512085, 0.8128783702850342], [0.3133506774902344, 0.8103377223014832], [0.31461256742477417, 0.8076626062393188], [0.31568050384521484, 0.8049883246421814], [0.3168674111366272, 0.802151083946228], [0.31783729791641235, 0.7999144792556763], [0.3189455270767212, 0.7975419759750366], [0.31989002227783203, 0.7950705885887146], [0.3208484649658203, 0.7924048900604248], [0.32159602642059326, 0.7898629903793335], [0.32237881422042847, 0.7871043086051941], [0.3229638338088989, 0.7845185995101929], [0.3236321806907654, 0.7817418575286865], [0.3240765333175659, 0.7793735265731812], [0.3246098756790161, 0.7767926454544067], [0.32492780685424805, 0.7744861245155334], [0.3252721428871155, 0.7720181941986084], [0.32541269063949585, 0.7696787118911743], [0.3256058692932129, 0.7674887776374817], [0.3260425925254822, 0.7649747133255005], [0.32681113481521606, 0.7629701495170593], [0.32748186588287354, 0.7616089582443237], [0.32865673303604126, 0.7584025859832764], [0.328762412071228, 0.7574483752250671], [0.3288445472717285, 0.7564506530761719], [0.3289012312889099, 0.7555382251739502], [0.328976571559906, 0.7548378705978394], [0.3284132480621338, 0.7512894868850708], [0.3282027840614319, 0.749703049659729], [0.3283606767654419, 0.7473189830780029], [0.3286787271499634, 0.7444816827774048], [0.3286949396133423, 0.7417401075363159], [0.3287697434425354, 0.7390266060829163], [0.328832745552063, 0.7365283966064453], [0.3290284276008606, 0.7338513135910034], [0.32916170358657837, 0.7313244342803955], [0.32940590381622314, 0.7286021709442139], [0.329550564289093, 0.7260032296180725], [0.3298026919364929, 0.7232142686843872], [0.32993972301483154, 0.7205129861831665], [0.3301756978034973, 0.717636227607727], [0.33029675483703613, 0.714899480342865], [0.3305196762084961, 0.7119967937469482], [0.3306141495704651, 0.7092132568359375], [0.33080756664276123, 0.7062656879425049], [0.33088213205337524, 0.7034614086151123], [0.3310585021972656, 0.7005094885826111], [0.33111417293548584, 0.6977230310440063], [0.3312876224517822, 0.6947883367538452], [0.3313537836074829, 0.6920270919799805], [0.33152931928634644, 0.6891177892684937], [0.33158445358276367, 0.6863793134689331], [0.33174002170562744, 0.6834909915924072], [0.3317776918411255, 0.680799126625061], [0.3319243788719177, 0.6779524087905884], [0.33196908235549927, 0.675298810005188], [0.3321220278739929, 0.6725040674209595], [0.33216923475265503, 0.6698529124259949], [0.3323245048522949, 0.6670631170272827], [0.3323718309402466, 0.6644415855407715], [0.33251863718032837, 0.661674976348877], [0.33255892992019653, 0.6590619087219238], [0.3327066898345947, 0.6563048362731934], [0.3327460289001465, 0.6536971926689148], [0.33288609981536865, 0.6509566307067871], [0.332923948764801, 0.648328959941864], [0.33306050300598145, 0.6455804705619812], [0.33308541774749756, 0.6429622173309326], [0.33320456743240356, 0.6402286887168884], [0.33322012424468994, 0.637614369392395], [0.3333335518836975, 0.634881854057312], [0.33334290981292725, 0.6322997808456421], [0.3334499001502991, 0.6296108365058899], [0.33345139026641846, 0.6270195245742798], [0.3335415720939636, 0.6243202686309814], [0.33352911472320557, 0.6217232942581177], [0.33361268043518066, 0.6190457940101624], [0.3335965871810913, 0.6164745688438416], [0.3336604833602905, 0.6138004064559937], [0.3336143493652344, 0.6112475991249084], [0.33365118503570557, 0.6086018085479736], [0.333596408367157, 0.6060673594474792], [0.3336262106895447, 0.6034730672836304], [0.33356261253356934, 0.6009313464164734], [0.3335820436477661, 0.5983379483222961], [0.3335009813308716, 0.5958048105239868], [0.33348971605300903, 0.5932096838951111], [0.3333815932273865, 0.5907396674156189], [0.3333415389060974, 0.5882104635238647], [0.3332056999206543, 0.5857258439064026], [0.33313679695129395, 0.5832152366638184], [0.3329768180847168, 0.5807697772979736], [0.33288872241973877, 0.5782989859580994], [0.3327181935310364, 0.5758823156356812], [0.3326202630996704, 0.5734517574310303], [0.3324331045150757, 0.5710570812225342], [0.3323049545288086, 0.5686407685279846], [0.33209604024887085, 0.566301703453064], [0.33195120096206665, 0.5639441609382629], [0.33172518014907837, 0.561631441116333], [0.3315620422363281, 0.5593103170394897], [0.3313145637512207, 0.556997537612915], [0.33113032579421997, 0.5546879172325134], [0.3308659791946411, 0.552405834197998], [0.33065783977508545, 0.5501388311386108], [0.33035808801651, 0.5478693842887878], [0.33011317253112793, 0.5456224679946899], [0.3297826647758484, 0.5433839559555054], [0.3295069932937622, 0.5411568880081177], [0.32913416624069214, 0.5389018654823303], [0.3288191556930542, 0.5366863012313843], [0.32841694355010986, 0.5344571471214294], [0.3280656933784485, 0.5322491526603699], [0.327619731426239, 0.5300467014312744], [0.32721471786499023, 0.5278643369674683], [0.3267018795013428, 0.5256441831588745], [0.3262384533882141, 0.5234663486480713], [0.32565224170684814, 0.5212392210960388], [0.32511430978775024, 0.5190563797950745], [0.32445037364959717, 0.5168203115463257], [0.3238338232040405, 0.5146200656890869], [0.3230513334274292, 0.5123251676559448], [0.322309672832489, 0.510062575340271], [0.32142817974090576, 0.5078114867210388], [0.32059788703918457, 0.5055863857269287], [0.31957030296325684, 0.5032345652580261], [0.31859028339385986, 0.500918447971344], [0.317388653755188, 0.4984720051288605], [0.3162391185760498, 0.49605000019073486], [0.31430113315582275, 0.8143079876899719], [0.3156348466873169, 0.8115167617797852], [0.316747784614563, 0.8089138269424438], [0.3180202841758728, 0.8060457706451416], [0.3191359043121338, 0.8035308122634888], [0.32025283575057983, 0.8010998964309692], [0.3211434483528137, 0.7987259030342102], [0.322162926197052, 0.7959431409835815], [0.32298505306243896, 0.7933559417724609], [0.3238476514816284, 0.7905216217041016], [0.3244989514350891, 0.7878528833389282], [0.3251956105232239, 0.7850306034088135], [0.32574427127838135, 0.7824249863624573], [0.3263664245605469, 0.779839277267456], [0.32683616876602173, 0.777437150478363], [0.3273956775665283, 0.7748492360115051], [0.32773756980895996, 0.7725162506103516], [0.3281463384628296, 0.7698436379432678], [0.328228235244751, 0.7677359580993652], [0.3284416198730469, 0.7655525207519531], [0.32860374450683594, 0.7635290622711182], [0.3287053108215332, 0.7620290517807007], [0.329775333404541, 0.7590142488479614], [0.3303412199020386, 0.7575851678848267], [0.330399751663208, 0.7565817832946777], [0.3304370641708374, 0.7555997371673584], [0.3301461935043335, 0.7545495629310608], [0.3293747901916504, 0.7510772943496704], [0.3296002745628357, 0.7502570152282715], [0.33066338300704956, 0.7477604150772095], [0.33084386587142944, 0.744511604309082], [0.3309010863304138, 0.7417856454849243], [0.33106881380081177, 0.7392394542694092], [0.3313085436820984, 0.736659586429596], [0.3315097689628601, 0.7341868877410889], [0.33180683851242065, 0.7315003871917725], [0.3319970369338989, 0.7289496660232544], [0.33228254318237305, 0.7261699438095093], [0.33245420455932617, 0.7235551476478577], [0.33273226022720337, 0.7206589579582214], [0.3328859210014343, 0.7179535627365112], [0.33313941955566406, 0.7150272727012634], [0.3332705497741699, 0.712291955947876], [0.3335052728652954, 0.709305465221405], [0.3336111903190613, 0.706529974937439], [0.3338205814361572, 0.7035361528396606], [0.333895206451416, 0.7007357478141785], [0.33407795429229736, 0.6977859139442444], [0.3341575860977173, 0.6950350403785706], [0.33434998989105225, 0.6921015977859497], [0.3344227075576782, 0.6893518567085266], [0.3346017599105835, 0.6864433288574219], [0.3346683382987976, 0.6837058067321777], [0.33483976125717163, 0.6808556318283081], [0.33491021394729614, 0.6781793832778931], [0.3350864052772522, 0.6753687262535095], [0.33514708280563354, 0.6727213859558105], [0.33531343936920166, 0.6699281930923462], [0.3353808522224426, 0.6672921776771545], [0.33555006980895996, 0.664514422416687], [0.33560633659362793, 0.6618831157684326], [0.3357640504837036, 0.6591341495513916], [0.3358253836631775, 0.656525731086731], [0.3359910249710083, 0.6537708044052124], [0.3360380530357361, 0.6511504054069519], [0.3361833691596985, 0.6484012007713318], [0.336229145526886, 0.6457690596580505], [0.3363744616508484, 0.6430198550224304], [0.33641040325164795, 0.6403947472572327], [0.33654123544692993, 0.6376636624336243], [0.3365703225135803, 0.6350400447845459], [0.3366934061050415, 0.6323421001434326], [0.33670926094055176, 0.6297488212585449], [0.3368147611618042, 0.6270452737808228], [0.33681464195251465, 0.6244248151779175], [0.3369011878967285, 0.6217319965362549], [0.3368961811065674, 0.6191506385803223], [0.3369781970977783, 0.616472601890564], [0.33695757389068604, 0.6138639450073242], [0.3370172381401062, 0.6112029552459717], [0.33697187900543213, 0.6086275577545166], [0.33700788021087646, 0.6060115098953247], [0.336938738822937, 0.6034603118896484], [0.3369438052177429, 0.6008530259132385], [0.3368588089942932, 0.5983046293258667], [0.33685123920440674, 0.5956992506980896], [0.3367571234703064, 0.5931447744369507], [0.33673095703125, 0.5905884504318237], [0.3365987539291382, 0.5880796313285828], [0.33653372526168823, 0.5855363607406616], [0.336375892162323, 0.5830360651016235], [0.33628785610198975, 0.5805462598800659], [0.33611053228378296, 0.5780885219573975], [0.3360024690628052, 0.5756392478942871], [0.3358036279678345, 0.5732077360153198], [0.335671067237854, 0.5707804560661316], [0.33545923233032227, 0.5683624744415283], [0.3353126645088196, 0.5659933090209961], [0.3350790739059448, 0.5636265277862549], [0.3349090814590454, 0.5612919926643372], [0.3346523642539978, 0.5589555501937866], [0.33446043729782104, 0.5566329956054688], [0.3341851234436035, 0.5542988777160645], [0.33397477865219116, 0.5520133972167969], [0.33367496728897095, 0.5496980547904968], [0.33343207836151123, 0.5474209785461426], [0.3331005573272705, 0.5451272130012512], [0.3328288793563843, 0.5428874492645264], [0.3324551582336426, 0.5405957698822021], [0.33213937282562256, 0.5383493900299072], [0.33173418045043945, 0.5360743999481201], [0.33138811588287354, 0.5338596105575562], [0.33094632625579834, 0.5315799713134766], [0.33055663108825684, 0.5293693542480469], [0.33005642890930176, 0.5271012187004089], [0.3296080231666565, 0.5248854160308838], [0.3290339708328247, 0.5226023197174072], [0.32851213216781616, 0.5203798413276672], [0.3278632164001465, 0.5180915594100952], [0.3272644281387329, 0.5158605575561523], [0.32651519775390625, 0.5135456323623657], [0.32580482959747314, 0.5112411975860596], [0.32494068145751953, 0.5088647603988647], [0.32415229082107544, 0.5065917372703552], [0.3231881260871887, 0.5042495727539062], [0.32224708795547485, 0.5018826723098755], [0.32112157344818115, 0.49944597482681274], [0.3200223445892334, 0.49698135256767273], [0.31872832775115967, 0.4944457411766052], [0.31664472818374634, 0.815496563911438], [0.3178040385246277, 0.8128525614738464], [0.31908708810806274, 0.8100032806396484], [0.32022595405578613, 0.8074137568473816], [0.32148927450180054, 0.8047066926956177], [0.3224424123764038, 0.8022950887680054], [0.3234401345252991, 0.7995740175247192], [0.3242834806442261, 0.7969474792480469], [0.3252263069152832, 0.794082522392273], [0.32598286867141724, 0.791349470615387], [0.326740562915802, 0.7883608341217041], [0.3272826075553894, 0.7857121229171753], [0.327955961227417, 0.7829350233078003], [0.3284774422645569, 0.7805384397506714], [0.32913267612457275, 0.7779425978660583], [0.3296383023262024, 0.7755222320556641], [0.33021247386932373, 0.7728845477104187], [0.3305591940879822, 0.7703385353088379], [0.3309202194213867, 0.7677860260009766], [0.3311346769332886, 0.7663122415542603], [0.331557035446167, 0.7635550498962402], [0.33152472972869873, 0.7621555328369141], [0.3315390348434448, 0.7597086429595947], [0.3318474292755127, 0.7580528259277344], [0.3321385383605957, 0.756601870059967], [0.33207857608795166, 0.7555440664291382], [0.33213186264038086, 0.7540411353111267], [0.33267009258270264, 0.7522001266479492], [0.3328954577445984, 0.7514247894287109], [0.33264362812042236, 0.7478739023208618], [0.33259159326553345, 0.7444989681243896], [0.3328741788864136, 0.7421389818191528], [0.33330804109573364, 0.7394677996635437], [0.33364546298980713, 0.7370932102203369], [0.33405041694641113, 0.7344492673873901], [0.334330677986145, 0.7319220304489136], [0.3346836566925049, 0.7291688919067383], [0.3349047303199768, 0.7265554666519165], [0.3352128863334656, 0.723747968673706], [0.3354107141494751, 0.7210223078727722], [0.3357030153274536, 0.71811842918396], [0.33586740493774414, 0.715354323387146], [0.3361278772354126, 0.7124314308166504], [0.3362720012664795, 0.7096065878868103], [0.3365093469619751, 0.7066370248794556], [0.3366204500198364, 0.703804075717926], [0.3368362784385681, 0.7008150219917297], [0.3369371294975281, 0.6980513334274292], [0.3371424674987793, 0.6951248645782471], [0.3372300863265991, 0.6923545598983765], [0.33742421865463257, 0.6894327402114868], [0.3375127911567688, 0.686697244644165], [0.3377140760421753, 0.6837947368621826], [0.3378053307533264, 0.6811079978942871], [0.3380010724067688, 0.6782830953598022], [0.33809030055999756, 0.6756185293197632], [0.3382795453071594, 0.6728130578994751], [0.3383622169494629, 0.6701743602752686], [0.3385469317436218, 0.6673970818519592], [0.33862531185150146, 0.6647515296936035], [0.3388005495071411, 0.6619731187820435], [0.33887219429016113, 0.6593664884567261], [0.3390464782714844, 0.6566333174705505], [0.3391183018684387, 0.6539961099624634], [0.33928465843200684, 0.6512306928634644], [0.33934473991394043, 0.648612380027771], [0.33950382471084595, 0.6458574533462524], [0.3395582437515259, 0.6432161331176758], [0.3397063612937927, 0.640470027923584], [0.3397563099861145, 0.6378495693206787], [0.3399009704589844, 0.6351122856140137], [0.33993929624557495, 0.6325067281723022], [0.34005922079086304, 0.6297891139984131], [0.34007179737091064, 0.6271772384643555], [0.3401728868484497, 0.6244469881057739], [0.34018272161483765, 0.6218518018722534], [0.3402783274650574, 0.6191694140434265], [0.3402615189552307, 0.6165482997894287], [0.34032630920410156, 0.6138556003570557], [0.3403017520904541, 0.6112592220306396], [0.34035724401474, 0.608588457107544], [0.34030473232269287, 0.6060190796852112], [0.34032106399536133, 0.6033673286437988], [0.34023791551589966, 0.6008172035217285], [0.3402268886566162, 0.5981910228729248], [0.34012383222579956, 0.5956336259841919], [0.34009599685668945, 0.5930216312408447], [0.33997583389282227, 0.590476393699646], [0.3399185538291931, 0.5878934264183044], [0.3397669196128845, 0.5853747725486755], [0.33968257904052734, 0.5828163623809814], [0.3395107388496399, 0.5803417563438416], [0.3393990993499756, 0.5778322219848633], [0.3392001986503601, 0.5753928422927856], [0.33906203508377075, 0.5729148387908936], [0.33884161710739136, 0.5704951286315918], [0.3386833667755127, 0.5680551528930664], [0.3384467363357544, 0.5656698942184448], [0.33826690912246704, 0.5632797479629517], [0.33800357580184937, 0.5609250068664551], [0.3378002643585205, 0.5585745573043823], [0.33751869201660156, 0.5562376976013184], [0.3373015522956848, 0.5539004802703857], [0.33700382709503174, 0.5515793561935425], [0.33675795793533325, 0.5492552518844604], [0.33642905950546265, 0.5469354391098022], [0.33615440130233765, 0.5446373224258423], [0.335784375667572, 0.5423364639282227], [0.335469126701355, 0.5400490760803223], [0.33507388830184937, 0.5377535820007324], [0.3347291350364685, 0.5354756116867065], [0.3342863917350769, 0.5331922769546509], [0.3338947892189026, 0.5309332609176636], [0.33340442180633545, 0.5286327004432678], [0.3329651355743408, 0.526375412940979], [0.3324078917503357, 0.5240591168403625], [0.3319012522697449, 0.5217841267585754], [0.3312777280807495, 0.5194590091705322], [0.33069366216659546, 0.5171689987182617], [0.3299684524536133, 0.5148231983184814], [0.32928556203842163, 0.5125092267990112], [0.32845377922058105, 0.5101042985916138], [0.32767778635025024, 0.5077275037765503], [0.3267507553100586, 0.5053297281265259], [0.32586729526519775, 0.5029715895652771], [0.32478535175323486, 0.500492513179779], [0.323758602142334, 0.4980453848838806], [0.3225278854370117, 0.4954703152179718], [0.3213549852371216, 0.4929150938987732], [0.3189985752105713, 0.8169448375701904], [0.3202992081642151, 0.8140393495559692], [0.3214189410209656, 0.8113937377929688], [0.322645902633667, 0.8085766434669495], [0.3236880302429199, 0.8059998154640198], [0.3247984051704407, 0.8032259941101074], [0.32568085193634033, 0.8006281852722168], [0.3266369104385376, 0.797727644443512], [0.3274158239364624, 0.7950087785720825], [0.32826822996139526, 0.7919501066207886], [0.32888901233673096, 0.7890191078186035], [0.3295196294784546, 0.7862074375152588], [0.33006638288497925, 0.7836481332778931], [0.3307035565376282, 0.7810567021369934], [0.3312443494796753, 0.7786998748779297], [0.3319202661514282, 0.7759968042373657], [0.33244967460632324, 0.7736330032348633], [0.3329589366912842, 0.770422101020813], [0.33266037702560425, 0.7677822113037109], [0.3326341509819031, 0.7665442228317261], [0.3330880403518677, 0.7639318704605103], [0.33337920904159546, 0.7619926333427429], [0.333588182926178, 0.7601983547210693], [0.33381378650665283, 0.7583247423171997], [0.3338474631309509, 0.7568280696868896], [0.334070086479187, 0.7555722594261169], [0.33420413732528687, 0.7543857097625732], [0.3343566656112671, 0.7527073621749878], [0.3342660069465637, 0.7513790130615234], [0.3341636657714844, 0.7477425932884216], [0.33453547954559326, 0.7449476718902588], [0.3350366950035095, 0.7425044775009155], [0.33552873134613037, 0.7400906085968018], [0.33609527349472046, 0.7374904155731201], [0.33649182319641113, 0.7349841594696045], [0.336944580078125, 0.7322379946708679], [0.3372514247894287, 0.7296292781829834], [0.3376308083534241, 0.7268010377883911], [0.33787184953689575, 0.7241626977920532], [0.3382101058959961, 0.721235990524292], [0.3384133577346802, 0.718490481376648], [0.33871257305145264, 0.7155261039733887], [0.33888036012649536, 0.7127677202224731], [0.33914971351623535, 0.709753155708313], [0.3392866849899292, 0.7069301605224609], [0.3395261764526367, 0.7039226293563843], [0.3396648168563843, 0.701117992401123], [0.3399069309234619, 0.6981692314147949], [0.3400222063064575, 0.6954036951065063], [0.3402431607246399, 0.6924620270729065], [0.3403560519218445, 0.6897118091583252], [0.3405756950378418, 0.6868144273757935], [0.34069156646728516, 0.6840857267379761], [0.34091514348983765, 0.6812347173690796], [0.3410228490829468, 0.6785626411437988], [0.3412301540374756, 0.6757432222366333], [0.3413357138633728, 0.6730897426605225], [0.34154176712036133, 0.670299768447876], [0.34163886308670044, 0.6676627397537231], [0.34183454513549805, 0.6648712158203125], [0.3419283628463745, 0.6622314453125], [0.3421194553375244, 0.6594866514205933], [0.3422025442123413, 0.6568845510482788], [0.342382550239563, 0.6541094779968262], [0.34246134757995605, 0.6514669060707092], [0.3426319360733032, 0.6487125158309937], [0.34269851446151733, 0.6460819244384766], [0.3428645730018616, 0.6433138847351074], [0.34292638301849365, 0.640678346157074], [0.34307801723480225, 0.6379417181015015], [0.34313511848449707, 0.6353150606155396], [0.3432798385620117, 0.6325778961181641], [0.3433164358139038, 0.6299484968185425], [0.3434363603591919, 0.6272159814834595], [0.3434579372406006, 0.624587893486023], [0.3435633182525635, 0.6218882203102112], [0.34356003999710083, 0.6192682981491089], [0.3436400294303894, 0.6165478229522705], [0.343624472618103, 0.6139261722564697], [0.34368783235549927, 0.6112470626831055], [0.34365546703338623, 0.6086315512657166], [0.3436979055404663, 0.6059602499008179], [0.3436378836631775, 0.6033527851104736], [0.34364575147628784, 0.6007105112075806], [0.34355175495147705, 0.598126232624054], [0.3435279130935669, 0.5954969525337219], [0.3434075117111206, 0.5929184556007385], [0.34335261583328247, 0.5903033018112183], [0.34320366382598877, 0.5877349972724915], [0.34312117099761963, 0.5851613879203796], [0.34295153617858887, 0.582621693611145], [0.34284329414367676, 0.5800857543945312], [0.3426409959793091, 0.5775797367095947], [0.34250128269195557, 0.5750939846038818], [0.34227925539016724, 0.572628915309906], [0.34212416410446167, 0.5701701641082764], [0.341880202293396, 0.5677229762077332], [0.34169960021972656, 0.5653166770935059], [0.3414369821548462, 0.5629048347473145], [0.341235876083374, 0.5605311393737793], [0.34095048904418945, 0.558157205581665], [0.3407265543937683, 0.5558161735534668], [0.3404214382171631, 0.5534563660621643], [0.3401709794998169, 0.5511194467544556], [0.3398311138153076, 0.5487570762634277], [0.3395541310310364, 0.5464437007904053], [0.3391895294189453, 0.5440906286239624], [0.3388817310333252, 0.5417819023132324], [0.33848607540130615, 0.5394490957260132], [0.33814525604248047, 0.5371596813201904], [0.33770179748535156, 0.5348072052001953], [0.3373175859451294, 0.5325303077697754], [0.3368365168571472, 0.5302015542984009], [0.33640778064727783, 0.5279096364974976], [0.33586323261260986, 0.5255557894706726], [0.33537495136260986, 0.523255467414856], [0.33477580547332764, 0.5208849906921387], [0.3342229127883911, 0.5185580253601074], [0.3335267901420593, 0.5161443948745728], [0.3328828811645508, 0.513797402381897], [0.33209943771362305, 0.5113656520843506], [0.3313577175140381, 0.508958101272583], [0.3304572105407715, 0.5064719915390015], [0.3296229839324951, 0.5040550231933594], [0.3286026120185852, 0.5015552043914795], [0.32762062549591064, 0.4990726113319397], [0.3264743685722351, 0.49651214480400085], [0.3253493905067444, 0.4939161539077759], [0.3240293264389038, 0.4912526607513428], [0.3215150833129883, 0.8181251287460327], [0.3226391077041626, 0.815424919128418], [0.3238832950592041, 0.8125184178352356], [0.32494020462036133, 0.8098841905593872], [0.3260936141014099, 0.8070195913314819], [0.3270415663719177, 0.8043510317802429], [0.3280429244041443, 0.8014318346977234], [0.328843355178833, 0.7986426949501038], [0.3296765089035034, 0.79564368724823], [0.330361545085907, 0.7926648259162903], [0.33112001419067383, 0.7894973754882812], [0.33168256282806396, 0.7869172096252441], [0.33233189582824707, 0.7841552495956421], [0.33278995752334595, 0.7816932201385498], [0.3333169221878052, 0.7791904211044312], [0.33385443687438965, 0.7766292095184326], [0.3344056010246277, 0.7740468978881836], [0.33513569831848145, 0.7710252404212952], [0.3360579013824463, 0.7674355506896973], [0.3359411358833313, 0.7663534879684448], [0.3354695439338684, 0.7639981508255005], [0.33530062437057495, 0.7620797157287598], [0.33532679080963135, 0.7604039907455444], [0.3355039358139038, 0.7585742473602295], [0.3357633352279663, 0.7569797039031982], [0.3357827663421631, 0.75583815574646], [0.3357961177825928, 0.7543802261352539], [0.3356149196624756, 0.7526863217353821], [0.3354147672653198, 0.7511202096939087], [0.33563047647476196, 0.7481545209884644], [0.3364943265914917, 0.7454833388328552], [0.33716726303100586, 0.7432392239570618], [0.3378947377204895, 0.7406988739967346], [0.33846670389175415, 0.7381649613380432], [0.3390464186668396, 0.7354207634925842], [0.3394628167152405, 0.7328071594238281], [0.3399396538734436, 0.7299668788909912], [0.340259313583374, 0.7272837162017822], [0.3406444787979126, 0.7244254350662231], [0.340890109539032, 0.7216496467590332], [0.34122323989868164, 0.7187056541442871], [0.34143519401550293, 0.715906023979187], [0.3417363166809082, 0.7129413485527039], [0.3419094681739807, 0.7100902795791626], [0.34218132495880127, 0.7070710062980652], [0.3423401713371277, 0.7042500972747803], [0.34260231256484985, 0.7012752294540405], [0.3427497148513794, 0.6984847784042358], [0.343003511428833, 0.6955438852310181], [0.3431509733200073, 0.6927804946899414], [0.3434029817581177, 0.6898567080497742], [0.3435364365577698, 0.6871239542961121], [0.3437756299972534, 0.684242844581604], [0.343910276889801, 0.6815493106842041], [0.3441463112831116, 0.6787097454071045], [0.344274640083313, 0.6760518550872803], [0.344502329826355, 0.6732418537139893], [0.34462153911590576, 0.6705974340438843], [0.3448374271392822, 0.6678053140640259], [0.3449506163597107, 0.665158748626709], [0.3451603651046753, 0.6623769402503967], [0.3452637195587158, 0.6597611904144287], [0.34545832872390747, 0.6570146083831787], [0.3455526828765869, 0.6543712615966797], [0.3457408547401428, 0.6515911817550659], [0.34581732749938965, 0.6489444971084595], [0.34598565101623535, 0.6461937427520752], [0.34606367349624634, 0.6435534954071045], [0.3462408781051636, 0.6407891511917114], [0.34631240367889404, 0.63815838098526], [0.3464667797088623, 0.6354140043258667], [0.34651994705200195, 0.6327670812606812], [0.34665679931640625, 0.6300190091133118], [0.34669291973114014, 0.627373218536377], [0.34681034088134766, 0.6246389150619507], [0.3468218445777893, 0.6220096945762634], [0.34691548347473145, 0.6192808747291565], [0.3469165563583374, 0.6166512966156006], [0.3470001816749573, 0.6139272451400757], [0.3469822406768799, 0.6113108992576599], [0.34703880548477173, 0.6086053848266602], [0.3469988703727722, 0.6059837341308594], [0.3470280170440674, 0.6032832860946655], [0.3469581604003906, 0.6006766557693481], [0.3469501733779907, 0.5980000495910645], [0.3468359112739563, 0.5954011678695679], [0.3467838168144226, 0.5927435159683228], [0.3466418385505676, 0.5901620388031006], [0.34656602144241333, 0.5875256061553955], [0.3463994264602661, 0.5849710702896118], [0.34629249572753906, 0.582375705242157], [0.3460950255393982, 0.5798412561416626], [0.3459511995315552, 0.5772790312767029], [0.3457221984863281, 0.5747967958450317], [0.34555530548095703, 0.5723031759262085], [0.3453105092048645, 0.5698468685150146], [0.34512776136398315, 0.5673650503158569], [0.344867467880249, 0.5649505257606506], [0.3446638584136963, 0.562518835067749], [0.34438490867614746, 0.5601259469985962], [0.34416019916534424, 0.5577311515808105], [0.34385132789611816, 0.5553616285324097], [0.34358906745910645, 0.5529883503913879], [0.34324729442596436, 0.5506225824356079], [0.342960000038147, 0.5482484698295593], [0.34259653091430664, 0.5459092855453491], [0.3422902822494507, 0.5435543060302734], [0.3419010043144226, 0.5411964654922485], [0.3415601849555969, 0.5388602018356323], [0.34112322330474854, 0.5365058183670044], [0.34073591232299805, 0.5341613292694092], [0.3402668833732605, 0.5318194627761841], [0.3398463726043701, 0.5294994115829468], [0.33931875228881836, 0.5271258354187012], [0.33884382247924805, 0.5247786045074463], [0.3382660150527954, 0.5223947763442993], [0.3377354145050049, 0.5200366973876953], [0.33707118034362793, 0.5175922513008118], [0.33645355701446533, 0.515184760093689], [0.33570730686187744, 0.5127196311950684], [0.3350093960762024, 0.5103009343147278], [0.33414721488952637, 0.507760763168335], [0.3333418369293213, 0.5052849650382996], [0.3323701024055481, 0.5027344226837158], [0.33145415782928467, 0.5002259016036987], [0.33036649227142334, 0.4976140558719635], [0.329326331615448, 0.495048463344574], [0.3280460834503174, 0.4923134744167328], [0.32683271169662476, 0.4896526634693146], [0.32403361797332764, 0.8195821046829224], [0.32527410984039307, 0.8166016936302185], [0.32634711265563965, 0.8139229416847229], [0.3275347352027893, 0.81097811460495], [0.3285139799118042, 0.808265209197998], [0.3295722007751465, 0.805255651473999], [0.33041656017303467, 0.8024564385414124], [0.33133161067962646, 0.7993557453155518], [0.33203721046447754, 0.7964808344841003], [0.33285534381866455, 0.7932686805725098], [0.3335634469985962, 0.7903499603271484], [0.33426177501678467, 0.7874870300292969], [0.3347057104110718, 0.7847898602485657], [0.33516496419906616, 0.7821017503738403], [0.33548325300216675, 0.7797303199768066], [0.33586621284484863, 0.7769922614097595], [0.33618199825286865, 0.7744576930999756], [0.33665066957473755, 0.7714098691940308], [0.337444007396698, 0.7679051160812378], [0.33790117502212524, 0.7660189270973206], [0.3379117250442505, 0.7638341784477234], [0.33795368671417236, 0.762050986289978], [0.3380451202392578, 0.760711669921875], [0.3382548689842224, 0.7590059041976929], [0.3383997678756714, 0.7573733329772949], [0.3384931683540344, 0.755678653717041], [0.3383643627166748, 0.7542712092399597], [0.3383921980857849, 0.7524954676628113], [0.3383718729019165, 0.7508139610290527], [0.33835339546203613, 0.748658299446106], [0.3387678861618042, 0.7465105652809143], [0.3395463228225708, 0.7439308166503906], [0.34017300605773926, 0.7415801286697388], [0.34095871448516846, 0.7387449741363525], [0.3415157198905945, 0.7361290454864502], [0.34211868047714233, 0.733254075050354], [0.34251606464385986, 0.7305211424827576], [0.3429619073867798, 0.7276082038879395], [0.3432486057281494, 0.7248769998550415], [0.3436228036880493, 0.721898078918457], [0.34387391805648804, 0.719125509262085], [0.3442258834838867, 0.7161272168159485], [0.3444398045539856, 0.713312029838562], [0.34475189447402954, 0.7102731466293335], [0.3449515104293823, 0.7074360847473145], [0.345254123210907, 0.7044293880462646], [0.3454267978668213, 0.7016154527664185], [0.3457006812095642, 0.6986587047576904], [0.3458719849586487, 0.6958931684494019], [0.3461484909057617, 0.692961573600769], [0.34631067514419556, 0.6901929378509521], [0.3465738892555237, 0.6872948408126831], [0.34673070907592773, 0.684584379196167], [0.34699195623397827, 0.6817318201065063], [0.3471451997756958, 0.679044246673584], [0.34739428758621216, 0.6762309074401855], [0.3475397229194641, 0.6735693216323853], [0.347781777381897, 0.6707701086997986], [0.34792160987854004, 0.6681255102157593], [0.3481559753417969, 0.6653282642364502], [0.3482828140258789, 0.6626812219619751], [0.3484992980957031, 0.6599173545837402], [0.34861183166503906, 0.6572986841201782], [0.348815381526947, 0.6545184254646301], [0.34892046451568604, 0.6518621444702148], [0.34912002086639404, 0.6490740776062012], [0.34920734167099, 0.646436333656311], [0.3493843078613281, 0.643689751625061], [0.349473237991333, 0.6410391330718994], [0.3496490716934204, 0.6382738351821899], [0.34971654415130615, 0.6356233358383179], [0.34987014532089233, 0.6328650116920471], [0.3499274253845215, 0.6302053928375244], [0.35006827116012573, 0.6274528503417969], [0.35010790824890137, 0.624791145324707], [0.35022491216659546, 0.6220535635948181], [0.35024237632751465, 0.6194021701812744], [0.35033637285232544, 0.6166764497756958], [0.350334107875824, 0.6140204668045044], [0.3504049777984619, 0.6113000512123108], [0.350375771522522, 0.6086539030075073], [0.350416898727417, 0.6059370040893555], [0.3503572940826416, 0.6032780408859253], [0.3503607511520386, 0.6005803346633911], [0.3502677083015442, 0.5979344248771667], [0.3502393364906311, 0.5952445268630981], [0.35010820627212524, 0.5926101803779602], [0.3500378727912903, 0.5899628400802612], [0.34987854957580566, 0.5873523950576782], [0.34977924823760986, 0.5847249031066895], [0.34958112239837646, 0.5821362733840942], [0.349442720413208, 0.5795489549636841], [0.3492162227630615, 0.5769836902618408], [0.34905099868774414, 0.5744591951370239], [0.34880292415618896, 0.5719669461250305], [0.34861159324645996, 0.5694800615310669], [0.34834539890289307, 0.567004919052124], [0.3481379747390747, 0.5645503997802734], [0.34784388542175293, 0.5621081590652466], [0.34761106967926025, 0.5597028732299805], [0.3473002314567566, 0.5572843551635742], [0.34704411029815674, 0.5548912286758423], [0.34670406579971313, 0.552480936050415], [0.34641706943511963, 0.5501118302345276], [0.34605705738067627, 0.5477142333984375], [0.3457561135292053, 0.5453623533248901], [0.3453579545021057, 0.5429686307907104], [0.3450188636779785, 0.5406134128570557], [0.34458327293395996, 0.5382107496261597], [0.34420502185821533, 0.535862922668457], [0.3437424898147583, 0.5334654450416565], [0.34333908557891846, 0.5311297178268433], [0.3428235650062561, 0.5287203788757324], [0.342362642288208, 0.5263645648956299], [0.34180498123168945, 0.5239438414573669], [0.34130382537841797, 0.5215672254562378], [0.340671181678772, 0.5190994739532471], [0.34008848667144775, 0.5166686773300171], [0.3393837809562683, 0.5141509175300598], [0.338725209236145, 0.5116819739341736], [0.3379126787185669, 0.5091329216957092], [0.3371388912200928, 0.5066022872924805], [0.3361993432044983, 0.5039880275726318], [0.3353292942047119, 0.5014549493789673], [0.3343125581741333, 0.49882376194000244], [0.33333003520965576, 0.4962002635002136], [0.33214062452316284, 0.49347859621047974], [0.3309760093688965, 0.49074211716651917], [0.3296332359313965, 0.4879378080368042], [0.32674258947372437, 0.8207290172576904], [0.3277795910835266, 0.8179782629013062], [0.3289433717727661, 0.8150397539138794], [0.3299957513809204, 0.8122931718826294], [0.3311258554458618, 0.8092161417007446], [0.33201438188552856, 0.8063451051712036], [0.3329542875289917, 0.8032046556472778], [0.333715558052063, 0.8002738952636719], [0.33455759286880493, 0.7970944046974182], [0.33526456356048584, 0.7941408753395081], [0.3360787630081177, 0.7909913063049316], [0.33665013313293457, 0.7881843447685242], [0.3371737599372864, 0.7850860953330994], [0.3374394178390503, 0.7826026082038879], [0.33774667978286743, 0.7799294590950012], [0.3379315733909607, 0.7773442268371582], [0.33810192346572876, 0.7745705246925354], [0.33822768926620483, 0.7716977596282959], [0.33897584676742554, 0.7684983015060425], [0.33954256772994995, 0.7664216756820679], [0.34012579917907715, 0.7639374136924744], [0.34035128355026245, 0.7624503374099731], [0.3405753970146179, 0.7609760761260986], [0.3407197594642639, 0.759391725063324], [0.34088873863220215, 0.7575386762619019], [0.3409922122955322, 0.7558704614639282], [0.34116262197494507, 0.7540692090988159], [0.34115922451019287, 0.7527350187301636], [0.3411049246788025, 0.7504605054855347], [0.3411623239517212, 0.7491406202316284], [0.3414689302444458, 0.7472178936004639], [0.3419914245605469, 0.7448580265045166], [0.3427044153213501, 0.7423243522644043], [0.34338855743408203, 0.739602267742157], [0.34413111209869385, 0.7367226481437683], [0.3446711301803589, 0.7339356541633606], [0.3452383875846863, 0.7309108972549438], [0.34557074308395386, 0.7280992865562439], [0.34595155715942383, 0.7251499891281128], [0.3462420701980591, 0.7223691940307617], [0.3466455340385437, 0.7194016575813293], [0.346909761428833, 0.716546893119812], [0.3472551107406616, 0.7135329842567444], [0.34748607873916626, 0.7106711864471436], [0.34782034158706665, 0.7076600193977356], [0.3480384349822998, 0.7048145532608032], [0.3483541011810303, 0.7018108367919922], [0.34855180978775024, 0.6990387439727783], [0.3488548994064331, 0.696102499961853], [0.3490411043167114, 0.6933282613754272], [0.3493286967277527, 0.6903937458992004], [0.34951210021972656, 0.6876668930053711], [0.34980082511901855, 0.6847952604293823], [0.3499773144721985, 0.682099461555481], [0.35025298595428467, 0.6792523264884949], [0.3504204750061035, 0.6765874624252319], [0.3506852984428406, 0.6737764477729797], [0.35084712505340576, 0.6711199879646301], [0.35110461711883545, 0.6683305501937866], [0.3512599468231201, 0.6656656265258789], [0.35150545835494995, 0.6628689765930176], [0.351642370223999, 0.660231351852417], [0.3518626093864441, 0.6574642658233643], [0.35197967290878296, 0.654807984828949], [0.3521915078163147, 0.6520315408706665], [0.35231584310531616, 0.6493667960166931], [0.35252702236175537, 0.6465798020362854], [0.35262012481689453, 0.6439430117607117], [0.3527953624725342, 0.64117830991745], [0.3528776168823242, 0.6385087966918945], [0.3530477285385132, 0.6357404589653015], [0.35311996936798096, 0.6330792903900146], [0.353277325630188, 0.6303120851516724], [0.3533417582511902, 0.6276462078094482], [0.3534879684448242, 0.6248770952224731], [0.35353338718414307, 0.622207522392273], [0.35364723205566406, 0.6194465160369873], [0.3536568284034729, 0.6167834401130676], [0.35373789072036743, 0.6140345335006714], [0.35372644662857056, 0.6113717555999756], [0.353782594203949, 0.6086233854293823], [0.353738009929657, 0.605955958366394], [0.35375869274139404, 0.6031996011734009], [0.35368049144744873, 0.6005396246910095], [0.3536660671234131, 0.5978114008903503], [0.3535577654838562, 0.5951525568962097], [0.3535098433494568, 0.5924272537231445], [0.353360652923584, 0.5897985696792603], [0.3532641530036926, 0.5871208906173706], [0.35307198762893677, 0.5844991207122803], [0.35293352603912354, 0.5818405151367188], [0.3527081608772278, 0.5792680382728577], [0.35254353284835815, 0.5766550302505493], [0.35230547189712524, 0.5741381645202637], [0.35211676359176636, 0.571591317653656], [0.351839542388916, 0.5690983533859253], [0.35161471366882324, 0.5666028261184692], [0.351314902305603, 0.5641360282897949], [0.3510738015174866, 0.5616686940193176], [0.3507564067840576, 0.5592492818832397], [0.35049188137054443, 0.5568186044692993], [0.3501572012901306, 0.5544048547744751], [0.349875807762146, 0.5519793629646301], [0.3495204448699951, 0.5495765805244446], [0.3492128849029541, 0.5471830368041992], [0.3488173484802246, 0.5447757244110107], [0.3484727144241333, 0.5423839092254639], [0.34804487228393555, 0.539983332157135], [0.3476691246032715, 0.5375788807868958], [0.3472168445587158, 0.5351834893226624], [0.34681785106658936, 0.5328032970428467], [0.34632498025894165, 0.530389666557312], [0.3458751440048218, 0.5279817581176758], [0.34533369541168213, 0.5255552530288696], [0.34484559297561646, 0.5231576561927795], [0.344241738319397, 0.5206849575042725], [0.34368669986724854, 0.5182265043258667], [0.3430204391479492, 0.5157018899917603], [0.34240442514419556, 0.5132014751434326], [0.3416416645050049, 0.5105948448181152], [0.3409145474433899, 0.508036732673645], [0.3400185704231262, 0.505379319190979], [0.3391786813735962, 0.5027689933776855], [0.33820879459381104, 0.5001214742660522], [0.3372887372970581, 0.49750950932502747], [0.33616340160369873, 0.49473443627357483], [0.33508598804473877, 0.49200305342674255], [0.3338109850883484, 0.4891324043273926], [0.332594633102417, 0.4863172173500061], [0.329412579536438, 0.8221789598464966], [0.3306199312210083, 0.8191480040550232], [0.33163678646087646, 0.816522479057312], [0.33281272649765015, 0.8133729696273804], [0.333746075630188, 0.810397744178772], [0.3347178101539612, 0.8071584701538086], [0.335499107837677, 0.8041927814483643], [0.3363564610481262, 0.8009556531906128], [0.3370612859725952, 0.7979894876480103], [0.3378564715385437, 0.7947676181793213], [0.338497519493103, 0.7918165922164917], [0.33916544914245605, 0.7886054515838623], [0.3395732641220093, 0.7855750322341919], [0.33994221687316895, 0.7828396558761597], [0.34015953540802, 0.7802908420562744], [0.34036123752593994, 0.7774051427841187], [0.34033817052841187, 0.7746564149856567], [0.3405492901802063, 0.7719594240188599], [0.34104007482528687, 0.7693489789962769], [0.3417593240737915, 0.7671359777450562], [0.3424832224845886, 0.7647168636322021], [0.34297996759414673, 0.763003408908844], [0.34325075149536133, 0.7614972591400146], [0.3434979319572449, 0.7596110701560974], [0.3436262607574463, 0.7578757405281067], [0.3436664938926697, 0.7558126449584961], [0.34335029125213623, 0.7539219260215759], [0.3432955741882324, 0.7527529001235962], [0.34354549646377563, 0.7507370710372925], [0.3436996340751648, 0.7493301630020142], [0.3439443111419678, 0.7481834292411804], [0.34475821256637573, 0.745603621006012], [0.345334529876709, 0.7433494329452515], [0.34618085622787476, 0.740348219871521], [0.34675365686416626, 0.7374900579452515], [0.3473323583602905, 0.734410285949707], [0.3477393388748169, 0.7314591407775879], [0.3482084274291992, 0.7284045219421387], [0.34853971004486084, 0.725635290145874], [0.34896987676620483, 0.7226998805999756], [0.3492915630340576, 0.7198915481567383], [0.34971386194229126, 0.7168213129043579], [0.3499937057495117, 0.713977575302124], [0.3503696918487549, 0.710929811000824], [0.35061901807785034, 0.7080898284912109], [0.3509708046913147, 0.705062985420227], [0.3512062430381775, 0.702223539352417], [0.35153651237487793, 0.6992827653884888], [0.35175764560699463, 0.6965147256851196], [0.3520829677581787, 0.6935622692108154], [0.3522995114326477, 0.6908043622970581], [0.3526155352592468, 0.6879150867462158], [0.3528158664703369, 0.685195803642273], [0.3531149625778198, 0.6823388934135437], [0.3533053994178772, 0.6796433925628662], [0.35358965396881104, 0.6768195629119873], [0.3537690043449402, 0.6741584539413452], [0.3540428876876831, 0.6713505983352661], [0.3542160391807556, 0.6687044501304626], [0.35448282957077026, 0.6658880710601807], [0.35464203357696533, 0.6632171273231506], [0.3548884391784668, 0.660433292388916], [0.3550311326980591, 0.6577811241149902], [0.35526078939437866, 0.6549902558326721], [0.3553956151008606, 0.6523498296737671], [0.35562098026275635, 0.6495590209960938], [0.3557499051094055, 0.6468782424926758], [0.3559635281562805, 0.6440954208374023], [0.35606592893600464, 0.6414353847503662], [0.35625386238098145, 0.6386532783508301], [0.3563493490219116, 0.6359873414039612], [0.3565279245376587, 0.6332083940505981], [0.3566039204597473, 0.6305313110351562], [0.356758713722229, 0.6277594566345215], [0.3568266034126282, 0.6250817775726318], [0.3569718599319458, 0.6222913861274719], [0.3570074439048767, 0.6195859909057617], [0.35711222887039185, 0.616818368434906], [0.3571189045906067, 0.614128589630127], [0.3571892976760864, 0.6113676428794861], [0.3571631908416748, 0.6086685061454773], [0.35720163583755493, 0.6059026718139648], [0.3571473956108093, 0.6031956076622009], [0.35715460777282715, 0.6004345417022705], [0.35705530643463135, 0.5977356433868408], [0.3570162057876587, 0.5949999094009399], [0.35688233375549316, 0.5922946333885193], [0.35680222511291504, 0.5895748138427734], [0.3566180467605591, 0.5869022607803345], [0.3564888834953308, 0.5842171907424927], [0.3562718629837036, 0.5815632343292236], [0.35611361265182495, 0.5789369344711304], [0.3558754324913025, 0.5763445496559143], [0.35569483041763306, 0.573772668838501], [0.3554201126098633, 0.5712112188339233], [0.35520100593566895, 0.5686749219894409], [0.3548944592475891, 0.5661592483520508], [0.35464799404144287, 0.5636866092681885], [0.354333758354187, 0.5612068176269531], [0.3540753722190857, 0.5587599277496338], [0.3537362813949585, 0.5563111305236816], [0.3534511923789978, 0.5538885593414307], [0.35308629274368286, 0.5514355301856995], [0.35277700424194336, 0.5490281581878662], [0.3523830771446228, 0.5465880036354065], [0.35204559564590454, 0.5441795587539673], [0.35162830352783203, 0.5417430400848389], [0.35126614570617676, 0.5393426418304443], [0.3508092164993286, 0.536882758140564], [0.35041457414627075, 0.5345056056976318], [0.34992915391921997, 0.5320628881454468], [0.34949791431427, 0.5296574234962463], [0.3489682674407959, 0.5271747708320618], [0.3485018014907837, 0.5247664451599121], [0.3479248881340027, 0.5222771763801575], [0.3473989963531494, 0.5198178291320801], [0.34676051139831543, 0.5172721147537231], [0.3461688160896301, 0.514756977558136], [0.34543710947036743, 0.5121484398841858], [0.34474503993988037, 0.5095503926277161], [0.3438948392868042, 0.5068469047546387], [0.3431051969528198, 0.5042072534561157], [0.3421935439109802, 0.5014829635620117], [0.3413386940956116, 0.49882370233535767], [0.3402971029281616, 0.4960613250732422], [0.3392782211303711, 0.49328672885894775], [0.33808255195617676, 0.4904126524925232], [0.33692389726638794, 0.4875490069389343], [0.33556365966796875, 0.4845800995826721], [0.33227258920669556, 0.8233621120452881], [0.33333778381347656, 0.8206824660301208], [0.3345286250114441, 0.8176913857460022], [0.33553677797317505, 0.8146054148674011], [0.3365193009376526, 0.8112049698829651], [0.33730483055114746, 0.8081796765327454], [0.3381732702255249, 0.8048722743988037], [0.3388817310333252, 0.8018641471862793], [0.33967703580856323, 0.7986011505126953], [0.3403247594833374, 0.7955881953239441], [0.3410406708717346, 0.7923434376716614], [0.3415839672088623, 0.7892383337020874], [0.3421365022659302, 0.7858471274375916], [0.34242069721221924, 0.7833279967308044], [0.34277576208114624, 0.7804490327835083], [0.3429109454154968, 0.7776146531105042], [0.34291839599609375, 0.7744278907775879], [0.34283536672592163, 0.7722717523574829], [0.3431411385536194, 0.7698773145675659], [0.34361720085144043, 0.7679901123046875], [0.34454911947250366, 0.7654622197151184], [0.3450666666030884, 0.7637737393379211], [0.3455538749694824, 0.7618973255157471], [0.3458758592605591, 0.7600992918014526], [0.34614139795303345, 0.7579795718193054], [0.34629422426223755, 0.7560153007507324], [0.34664350748062134, 0.753574788570404], [0.34661179780960083, 0.752749502658844], [0.34657174348831177, 0.7511125206947327], [0.34673362970352173, 0.7498259544372559], [0.3468549847602844, 0.7489924430847168], [0.34738391637802124, 0.7465943098068237], [0.3481290340423584, 0.7442762851715088], [0.34892356395721436, 0.7412732839584351], [0.3495764136314392, 0.7379165291786194], [0.3498741388320923, 0.7349178194999695], [0.35027480125427246, 0.731779932975769], [0.3506621718406677, 0.7289696335792542], [0.3511973023414612, 0.7260047197341919], [0.35158753395080566, 0.723259687423706], [0.35207825899124146, 0.7202476263046265], [0.35242074728012085, 0.7173165082931519], [0.3528290390968323, 0.7142722606658936], [0.3531234860420227, 0.7114026546478271], [0.3535194396972656, 0.7083717584609985], [0.3537970185279846, 0.7055258750915527], [0.35417693853378296, 0.7024998664855957], [0.3544248342514038, 0.6997243165969849], [0.3547717332839966, 0.6967881917953491], [0.35502153635025024, 0.6940138936042786], [0.3553762435913086, 0.6910889744758606], [0.3556126356124878, 0.6883527040481567], [0.35594213008880615, 0.685464084148407], [0.35615867376327515, 0.6827632784843445], [0.3564680814743042, 0.6799004077911377], [0.35666757822036743, 0.6772274971008301], [0.356961190700531, 0.6744092702865601], [0.3571511507034302, 0.6717482209205627], [0.35743218660354614, 0.6689496636390686], [0.3576115369796753, 0.666266679763794], [0.35788190364837646, 0.663446843624115], [0.35804569721221924, 0.6607881784439087], [0.3582969307899475, 0.6579941511154175], [0.3584543466567993, 0.6553328037261963], [0.3586961030960083, 0.6525591611862183], [0.3588374853134155, 0.649878740310669], [0.35906660556793213, 0.6470816135406494], [0.35920220613479614, 0.644397497177124], [0.3594200015068054, 0.6416066884994507], [0.3595384359359741, 0.6389318704605103], [0.35973644256591797, 0.6361494064331055], [0.3598349094390869, 0.6334540843963623], [0.3600071668624878, 0.6306573152542114], [0.36008429527282715, 0.6279791593551636], [0.3602403402328491, 0.6252028942108154], [0.36030495166778564, 0.6224771738052368], [0.36044204235076904, 0.6196578145027161], [0.3604762554168701, 0.6169517040252686], [0.3605715036392212, 0.6141510009765625], [0.3605659008026123, 0.6114400029182434], [0.3606213331222534, 0.6086410284042358], [0.360581636428833, 0.6059225797653198], [0.3606032133102417, 0.6031262874603271], [0.360528826713562, 0.6003966927528381], [0.3605107069015503, 0.5975958108901978], [0.3603910207748413, 0.5948894023895264], [0.3603242039680481, 0.5921002626419067], [0.36016345024108887, 0.5893957018852234], [0.36005210876464844, 0.5866302251815796], [0.3598458170890808, 0.5839637517929077], [0.35969090461730957, 0.5812448263168335], [0.359453022480011, 0.5786253213882446], [0.3592677712440491, 0.5759901404380798], [0.3589925765991211, 0.5733994245529175], [0.35876619815826416, 0.5707957148551941], [0.3584643006324768, 0.5682564973831177], [0.35821598768234253, 0.565698504447937], [0.35790300369262695, 0.563230574131012], [0.3576403260231018, 0.5607343912124634], [0.3573077321052551, 0.5582652688026428], [0.3570175766944885, 0.5557897090911865], [0.3566492795944214, 0.5533430576324463], [0.3563293218612671, 0.5508840084075928], [0.3559473156929016, 0.5484540462493896], [0.3556120991706848, 0.5459978580474854], [0.35520219802856445, 0.543559193611145], [0.35484224557876587, 0.5411211252212524], [0.35439741611480713, 0.5386707186698914], [0.35400015115737915, 0.5362117290496826], [0.35352790355682373, 0.533780574798584], [0.3531021475791931, 0.5313514471054077], [0.35259079933166504, 0.5288817882537842], [0.3521322011947632, 0.5264207124710083], [0.3515833616256714, 0.5239312648773193], [0.35108238458633423, 0.521456778049469], [0.35047411918640137, 0.5189068913459778], [0.3499141335487366, 0.516383171081543], [0.34921538829803467, 0.5137443542480469], [0.34855496883392334, 0.5111443996429443], [0.3477482795715332, 0.5084357261657715], [0.3469952940940857, 0.5057439804077148], [0.3461330533027649, 0.5030025243759155], [0.34533584117889404, 0.5003082156181335], [0.344366192817688, 0.4974815845489502], [0.3434365391731262, 0.49471497535705566], [0.34229928255081177, 0.4917806088924408], [0.3412143588066101, 0.48892509937286377], [0.3399151563644409, 0.48590150475502014], [0.3386845588684082, 0.4829533100128174], [0.33493244647979736, 0.8247722387313843], [0.3360138535499573, 0.8217536211013794], [0.3369871973991394, 0.8189553022384644], [0.3381801247596741, 0.8154179453849792], [0.3391032814979553, 0.8122502565383911], [0.3400469422340393, 0.808899998664856], [0.34080708026885986, 0.805828332901001], [0.3416341543197632, 0.8025070428848267], [0.34229522943496704, 0.7994704246520996], [0.3430328965187073, 0.7961467504501343], [0.3436012268066406, 0.7930900454521179], [0.34423595666885376, 0.7896384000778198], [0.34469980001449585, 0.7864227890968323], [0.34514182806015015, 0.7836250066757202], [0.3454933762550354, 0.7810104489326477], [0.34594374895095825, 0.7776749134063721], [0.3459625840187073, 0.7743955254554749], [0.3460874557495117, 0.7724077701568604], [0.346302330493927, 0.7707484364509583], [0.34673768281936646, 0.7691335678100586], [0.3474515676498413, 0.7667942643165588], [0.3481079339981079, 0.7647106647491455], [0.3485608696937561, 0.7627946734428406], [0.34895801544189453, 0.7605284452438354], [0.34922248125076294, 0.7585165500640869], [0.3494918942451477, 0.7562307119369507], [0.34970927238464355, 0.7543402910232544], [0.3500819802284241, 0.7526600956916809], [0.35014891624450684, 0.7513152956962585], [0.35017287731170654, 0.7502573728561401], [0.3502070903778076, 0.7493102550506592], [0.3502519130706787, 0.7471675872802734], [0.3506566882133484, 0.7455912828445435], [0.3518977165222168, 0.7419734001159668], [0.35242795944213867, 0.7384467720985413], [0.3528522253036499, 0.7352113127708435], [0.353163480758667, 0.7323259115219116], [0.35356223583221436, 0.7293984889984131], [0.3538956642150879, 0.726594090461731], [0.35433775186538696, 0.7236500978469849], [0.35470759868621826, 0.7208203077316284], [0.3552159070968628, 0.7176629900932312], [0.35557353496551514, 0.7148025035858154], [0.3560364246368408, 0.7117508053779602], [0.3563637137413025, 0.7088844776153564], [0.3567812442779541, 0.7058520317077637], [0.35707879066467285, 0.7029979228973389], [0.35747039318084717, 0.7000316977500916], [0.35773396492004395, 0.6972627639770508], [0.3580969572067261, 0.694329023361206], [0.3583527207374573, 0.691562294960022], [0.358707070350647, 0.6886536478996277], [0.35895538330078125, 0.6859257817268372], [0.3592984676361084, 0.6830527782440186], [0.35953032970428467, 0.6803412437438965], [0.35985255241394043, 0.6775076389312744], [0.3600695729255676, 0.6748380064964294], [0.36037778854370117, 0.6720224618911743], [0.36057937145233154, 0.6693570613861084], [0.36087048053741455, 0.6665282249450684], [0.36106622219085693, 0.6638481616973877], [0.3613501787185669, 0.6610347032546997], [0.3615257740020752, 0.6583676934242249], [0.3617854118347168, 0.6555707454681396], [0.3619450330734253, 0.6529073119163513], [0.3621906042098999, 0.650100827217102], [0.36234670877456665, 0.6474257707595825], [0.36258864402770996, 0.6446117758750916], [0.3627285957336426, 0.6419214010238647], [0.3629501461982727, 0.6391258239746094], [0.3630760908126831, 0.6364391446113586], [0.3632813096046448, 0.6336207389831543], [0.3633911609649658, 0.6309130191802979], [0.3635726571083069, 0.628120481967926], [0.36365336179733276, 0.6254209280014038], [0.36380475759506226, 0.6225907802581787], [0.3638668656349182, 0.6198430061340332], [0.3639943599700928, 0.6170151233673096], [0.3640177249908447, 0.6142632961273193], [0.3640982508659363, 0.6114442348480225], [0.3640822172164917, 0.6086935997009277], [0.3641226887702942, 0.6058728694915771], [0.36406177282333374, 0.6031110882759094], [0.3640590310096741, 0.6002925038337708], [0.3639630079269409, 0.5975210666656494], [0.3639180064201355, 0.5947096347808838], [0.3637688159942627, 0.591942310333252], [0.3636730909347534, 0.5891560316085815], [0.36348098516464233, 0.5863993167877197], [0.3633435368537903, 0.5836555361747742], [0.3631094694137573, 0.5809417963027954], [0.36293184757232666, 0.5782672166824341], [0.3626614809036255, 0.5756183862686157], [0.36243873834609985, 0.5729763507843018], [0.3621327877044678, 0.570361852645874], [0.36188334226608276, 0.5677945613861084], [0.36156439781188965, 0.5652366876602173], [0.36129868030548096, 0.5627329349517822], [0.36095404624938965, 0.5602291822433472], [0.36066150665283203, 0.5577430725097656], [0.3602985143661499, 0.555241048336029], [0.3599904775619507, 0.552776575088501], [0.35961079597473145, 0.5502961874008179], [0.35927581787109375, 0.5478532910346985], [0.3588517904281616, 0.545362114906311], [0.35848599672317505, 0.5429262518882751], [0.35803985595703125, 0.5404481887817383], [0.35764986276626587, 0.5380010604858398], [0.3571798801422119, 0.5355008840560913], [0.3567744493484497, 0.5330725908279419], [0.3562789559364319, 0.5305829048156738], [0.35583752393722534, 0.5281317234039307], [0.35530030727386475, 0.5256022214889526], [0.35482656955718994, 0.5231322646141052], [0.35424602031707764, 0.5205680727958679], [0.35371482372283936, 0.5180381536483765], [0.35306107997894287, 0.5154072046279907], [0.35245126485824585, 0.512780487537384], [0.3517010807991028, 0.5100418329238892], [0.3510042428970337, 0.5073568820953369], [0.3501901626586914, 0.5045605301856995], [0.34944164752960205, 0.5018411874771118], [0.34852778911590576, 0.49900591373443604], [0.34764915704727173, 0.49618902802467346], [0.3465909957885742, 0.49325627088546753], [0.3455604314804077, 0.4903334081172943], [0.3443472385406494, 0.4873107969760895], [0.3431698679924011, 0.4843045473098755], [0.34181445837020874, 0.4812042713165283], [0.337793231010437, 0.8257648944854736], [0.33864855766296387, 0.8229959607124329], [0.33964598178863525, 0.8198925256729126], [0.34071624279022217, 0.8165782690048218], [0.34182727336883545, 0.8130385279655457], [0.34266847372055054, 0.8099122047424316], [0.3435644507408142, 0.8064975738525391], [0.3442705273628235, 0.8034098148345947], [0.34503793716430664, 0.8000366687774658], [0.34563982486724854, 0.7969366908073425], [0.3463062047958374, 0.7935471534729004], [0.3468344807624817, 0.7903034090995789], [0.3474152088165283, 0.7867618799209595], [0.3477436304092407, 0.7842155694961548], [0.34819185733795166, 0.7813680171966553], [0.3486611843109131, 0.7782662510871887], [0.34936511516571045, 0.774453341960907], [0.3495450019836426, 0.773253321647644], [0.34994959831237793, 0.7714235782623291], [0.35005486011505127, 0.7698721289634705], [0.3503853678703308, 0.7678163051605225], [0.3509371876716614, 0.7657299041748047], [0.35142529010772705, 0.763314962387085], [0.35180556774139404, 0.7612707614898682], [0.35224562883377075, 0.7589147686958313], [0.35260850191116333, 0.7569894790649414], [0.3530579209327698, 0.7548712491989136], [0.35321521759033203, 0.753429651260376], [0.35353660583496094, 0.7513406276702881], [0.3535885810852051, 0.7506266236305237], [0.35365575551986694, 0.7493113279342651], [0.35384315252304077, 0.7478071451187134], [0.35401856899261475, 0.7467244267463684], [0.35462749004364014, 0.7426880598068237], [0.3552151918411255, 0.7387503385543823], [0.3556133508682251, 0.7358139753341675], [0.3561093807220459, 0.7326412200927734], [0.3563849925994873, 0.729915976524353], [0.3567350506782532, 0.7268946170806885], [0.35698843002319336, 0.7241564989089966], [0.3574056625366211, 0.7212088108062744], [0.3578411340713501, 0.7182797193527222], [0.358420729637146, 0.7152396440505981], [0.3588440418243408, 0.712340235710144], [0.3593341112136841, 0.7092610597610474], [0.35966914892196655, 0.706383466720581], [0.3600929379463196, 0.7033505439758301], [0.3604009747505188, 0.7005550861358643], [0.3608049154281616, 0.6975911855697632], [0.3610800504684448, 0.6948220729827881], [0.3614516854286194, 0.691886305809021], [0.36171936988830566, 0.6891466379165649], [0.362088143825531, 0.6862567067146301], [0.3623476028442383, 0.6835297346115112], [0.3626999258995056, 0.6806588768959045], [0.3629470467567444, 0.6779765486717224], [0.3632870316505432, 0.6751493215560913], [0.36351656913757324, 0.6724664568901062], [0.36382877826690674, 0.6696419715881348], [0.36404240131378174, 0.6669543981552124], [0.36434781551361084, 0.6641396284103394], [0.3645556569099426, 0.6614466905593872], [0.36484295129776, 0.6586289405822754], [0.3650243282318115, 0.6559498310089111], [0.36528754234313965, 0.653148889541626], [0.36545878648757935, 0.6504669785499573], [0.3657148480415344, 0.6476731300354004], [0.36588233709335327, 0.6449636220932007], [0.3661279082298279, 0.6421437859535217], [0.3662703037261963, 0.6394422054290771], [0.3664918541908264, 0.6366434097290039], [0.3666297197341919, 0.6339240670204163], [0.3668471574783325, 0.6311022043228149], [0.36696571111679077, 0.6283842325210571], [0.36714625358581543, 0.6255544424057007], [0.36722350120544434, 0.6227988004684448], [0.36736637353897095, 0.6199452877044678], [0.36741673946380615, 0.6171767711639404], [0.3675307631492615, 0.6143074631690979], [0.36754393577575684, 0.6115366220474243], [0.3676089644432068, 0.6086761951446533], [0.367575466632843, 0.6058956980705261], [0.36759382486343384, 0.6030251979827881], [0.3675157427787781, 0.6002485752105713], [0.36748993396759033, 0.597381591796875], [0.36736488342285156, 0.5945860147476196], [0.36728501319885254, 0.5917196273803711], [0.36710846424102783, 0.588956356048584], [0.36698073148727417, 0.5861160755157471], [0.3667619228363037, 0.5833823680877686], [0.36659133434295654, 0.5805947780609131], [0.3663330078125, 0.5779226422309875], [0.3661169409751892, 0.5752039551734924], [0.3658180236816406, 0.5725616216659546], [0.3655635118484497, 0.569893479347229], [0.365234375, 0.5673240423202515], [0.3649507761001587, 0.5647422671318054], [0.36460185050964355, 0.5622222423553467], [0.36429738998413086, 0.5596921443939209], [0.3639305830001831, 0.5572021007537842], [0.3636164665222168, 0.5546948909759521], [0.3632446527481079, 0.5522135496139526], [0.3629199266433716, 0.5497218370437622], [0.3625181317329407, 0.5472264289855957], [0.3621436357498169, 0.5447134971618652], [0.36168885231018066, 0.5422413349151611], [0.36128556728363037, 0.539778470993042], [0.36081862449645996, 0.5373000502586365], [0.36041259765625, 0.534824013710022], [0.359938383102417, 0.5323419570922852], [0.3595120310783386, 0.5298618078231812], [0.3589975833892822, 0.5273432731628418], [0.3585338592529297, 0.5248385667800903], [0.35798490047454834, 0.5222971439361572], [0.35748112201690674, 0.5197467803955078], [0.35686832666397095, 0.5171205997467041], [0.35630035400390625, 0.5145048499107361], [0.3555968999862671, 0.5117669701576233], [0.35494399070739746, 0.5090469717979431], [0.3541886806488037, 0.5062583684921265], [0.35349375009536743, 0.5035040378570557], [0.3526478409767151, 0.5006250143051147], [0.35183942317962646, 0.49779805541038513], [0.35083794593811035, 0.49481433629989624], [0.3498803377151489, 0.4918982982635498], [0.34872376918792725, 0.4888123571872711], [0.3476296067237854, 0.4858105182647705], [0.3463318347930908, 0.48263803124427795], [0.34509438276290894, 0.4795633554458618], [0.3406141400337219, 0.8270276784896851], [0.3415830135345459, 0.8239282369613647], [0.34243953227996826, 0.8211753368377686], [0.3435693383216858, 0.8174780607223511], [0.344504177570343, 0.8141430616378784], [0.34547024965286255, 0.8106406927108765], [0.346243679523468, 0.8074593544006348], [0.3470693826675415, 0.8040233850479126], [0.34772318601608276, 0.8008819818496704], [0.34844356775283813, 0.7974527478218079], [0.3490159511566162, 0.7943040728569031], [0.34967315196990967, 0.7907232046127319], [0.3501587510108948, 0.7873953580856323], [0.3506283164024353, 0.7846039533615112], [0.3510204553604126, 0.7820671796798706], [0.351689875125885, 0.778857946395874], [0.3526707887649536, 0.7759112119674683], [0.3533817529678345, 0.77406907081604], [0.3536275625228882, 0.7722365856170654], [0.35387128591537476, 0.7698347568511963], [0.35395175218582153, 0.7689931392669678], [0.35436248779296875, 0.7663757801055908], [0.3547585606575012, 0.7642855644226074], [0.3552272319793701, 0.7618787288665771], [0.3555988073348999, 0.759833574295044], [0.35601478815078735, 0.757644534111023], [0.35632699728012085, 0.7557504773139954], [0.3567028045654297, 0.7537841796875], [0.35694169998168945, 0.7523199319839478], [0.35725998878479004, 0.7509446144104004], [0.3574204444885254, 0.7501600980758667], [0.357724666595459, 0.7483163475990295], [0.35772526264190674, 0.7469639778137207], [0.35756123065948486, 0.7428945899009705], [0.3579108715057373, 0.7393842935562134], [0.3584550619125366, 0.7362201809883118], [0.3588665723800659, 0.7332407832145691], [0.35936158895492554, 0.7302473783493042], [0.35971951484680176, 0.7274507284164429], [0.36016547679901123, 0.7245205640792847], [0.3604899048805237, 0.7218472957611084], [0.3609742522239685, 0.7188201546669006], [0.3613870143890381, 0.7159401178359985], [0.36191290616989136, 0.7127988934516907], [0.36229556798934937, 0.7098670601844788], [0.3627658486366272, 0.7067734599113464], [0.363095760345459, 0.7039085626602173], [0.36351239681243896, 0.7009342312812805], [0.36382347345352173, 0.698135256767273], [0.3642312288284302, 0.6951755285263062], [0.36452388763427734, 0.6924152970314026], [0.3649147152900696, 0.6895116567611694], [0.36519521474838257, 0.6867765784263611], [0.36557042598724365, 0.6838816404342651], [0.3658391833305359, 0.6811646819114685], [0.36619603633880615, 0.6783238649368286], [0.366449236869812, 0.6756399869918823], [0.36679309606552124, 0.6727896332740784], [0.3670288324356079, 0.6701014041900635], [0.3673500418663025, 0.667265772819519], [0.3675711154937744, 0.6645863056182861], [0.3678799867630005, 0.6617462635040283], [0.3680904507637024, 0.6590506434440613], [0.36838436126708984, 0.6562267541885376], [0.3685784339904785, 0.6535454988479614], [0.3688543438911438, 0.6507340669631958], [0.36903196573257446, 0.6480515003204346], [0.3692931532859802, 0.6452223062515259], [0.36946719884872437, 0.6425058841705322], [0.36972367763519287, 0.6396793127059937], [0.3698798418045044, 0.6369768381118774], [0.3701108694076538, 0.634150505065918], [0.3702528476715088, 0.631415605545044], [0.3704646825790405, 0.6285715103149414], [0.37058401107788086, 0.6258162260055542], [0.3707713484764099, 0.6229360103607178], [0.37084996700286865, 0.6201426982879639], [0.3709888458251953, 0.6172707080841064], [0.3710402250289917, 0.6144620180130005], [0.3711477518081665, 0.6115646362304688], [0.37114691734313965, 0.6087411046028137], [0.37119340896606445, 0.605850100517273], [0.3711404800415039, 0.6030139923095703], [0.3711376190185547, 0.6001354455947876], [0.3710362911224365, 0.5972950458526611], [0.37098026275634766, 0.5943942666053772], [0.37082093954086304, 0.5915395617485046], [0.37070798873901367, 0.5886865258216858], [0.3704984188079834, 0.5858625769615173], [0.37034082412719727, 0.5830469131469727], [0.37008655071258545, 0.580265462398529], [0.3698831796646118, 0.5775169134140015], [0.3695847988128662, 0.5747915506362915], [0.3693352937698364, 0.5720964670181274], [0.36900198459625244, 0.5694181323051453], [0.3687250018119812, 0.5668092966079712], [0.36837875843048096, 0.5642191171646118], [0.3680798411369324, 0.5616664886474609], [0.367708683013916, 0.5591214299201965], [0.36738699674606323, 0.5566154718399048], [0.3669942617416382, 0.5540996789932251], [0.36665016412734985, 0.5516141653060913], [0.36623889207839966, 0.5491048693656921], [0.3658669590950012, 0.5465860366821289], [0.36539769172668457, 0.5440120697021484], [0.36500316858291626, 0.541558027267456], [0.36454153060913086, 0.5390563011169434], [0.3641420602798462, 0.5366081595420837], [0.36366862058639526, 0.534092366695404], [0.3632563352584839, 0.5316321849822998], [0.36275362968444824, 0.5290836095809937], [0.3623088598251343, 0.5265884399414062], [0.36178886890411377, 0.5240260362625122], [0.36132240295410156, 0.5214968919754028], [0.3607417345046997, 0.5188562870025635], [0.36021292209625244, 0.5162500143051147], [0.35956037044525146, 0.5135244131088257], [0.35896140336990356, 0.5108118057250977], [0.35824722051620483, 0.50798499584198], [0.3575899600982666, 0.5052306056022644], [0.35678791999816895, 0.5023516416549683], [0.3560265302658081, 0.49949127435684204], [0.35509002208709717, 0.4964974522590637], [0.35419195890426636, 0.49353402853012085], [0.35311979055404663, 0.4904448688030243], [0.35208266973495483, 0.4873853325843811], [0.3508732318878174, 0.48421499133110046], [0.3496965765953064, 0.4810678958892822], [0.34829604625701904, 0.47779154777526855], [0.34361064434051514, 0.8279134631156921], [0.3443901538848877, 0.8251461982727051], [0.34529322385787964, 0.8220601081848145], [0.3462987542152405, 0.8186404705047607], [0.3473595976829529, 0.8149175643920898], [0.3481796979904175, 0.8116588592529297], [0.34906578063964844, 0.8081141114234924], [0.3497704267501831, 0.8049237728118896], [0.35054630041122437, 0.8014394640922546], [0.35116446018218994, 0.7982650995254517], [0.3518516421318054, 0.7947835326194763], [0.35241997241973877, 0.791451096534729], [0.3530799150466919, 0.787796139717102], [0.35346418619155884, 0.7853461503982544], [0.35399287939071655, 0.7825472354888916], [0.3545997142791748, 0.7799907922744751], [0.35557055473327637, 0.7772119045257568], [0.3562431335449219, 0.7756774425506592], [0.357221782207489, 0.7721432447433472], [0.35737138986587524, 0.7708899974822998], [0.35767340660095215, 0.769094705581665], [0.3578033447265625, 0.7672433257102966], [0.3581457734107971, 0.764805793762207], [0.3584606647491455, 0.7627640962600708], [0.35889363288879395, 0.7604187726974487], [0.35922718048095703, 0.7585098743438721], [0.35966014862060547, 0.7563525438308716], [0.35996896028518677, 0.7547149658203125], [0.3603414297103882, 0.7528870105743408], [0.3604780435562134, 0.751823902130127], [0.36056917905807495, 0.7503361701965332], [0.3605542778968811, 0.7486890554428101], [0.3604954481124878, 0.7465087175369263], [0.3605130910873413, 0.7432478070259094], [0.36091816425323486, 0.7398338317871094], [0.3612920045852661, 0.7368221879005432], [0.3618028163909912, 0.733710765838623], [0.3622407913208008, 0.7309221029281616], [0.36279982328414917, 0.7279203534126282], [0.3632131814956665, 0.7251870632171631], [0.36369502544403076, 0.7222793102264404], [0.36404281854629517, 0.7194732427597046], [0.36450469493865967, 0.7164062261581421], [0.36489009857177734, 0.7134361863136292], [0.3653825521469116, 0.7102985382080078], [0.36575841903686523, 0.7073824405670166], [0.36622554063796997, 0.7043014764785767], [0.366546630859375, 0.7014989852905273], [0.3669636845588684, 0.6985223293304443], [0.3672828674316406, 0.6957409381866455], [0.3677060008049011, 0.6928060054779053], [0.3680139183998108, 0.6900657415390015], [0.3684160113334656, 0.6871556043624878], [0.36870962381362915, 0.6844222545623779], [0.3690919876098633, 0.6815317869186401], [0.3693608045578003, 0.6788384914398193], [0.3697194457054138, 0.6759982109069824], [0.3699827194213867, 0.6732878088951111], [0.37032878398895264, 0.6704362630844116], [0.37056905031204224, 0.6677340269088745], [0.3708895444869995, 0.6649056673049927], [0.371116042137146, 0.6621951460838318], [0.3714293837547302, 0.6593631505966187], [0.37165069580078125, 0.6566615104675293], [0.371951162815094, 0.653842568397522], [0.37215250730514526, 0.6511379480361938], [0.3724272847175598, 0.6483215093612671], [0.37261390686035156, 0.645611047744751], [0.3728879690170288, 0.6427867412567139], [0.3730800747871399, 0.6400622129440308], [0.37334680557250977, 0.6372306942939758], [0.37351226806640625, 0.6344903707504272], [0.37374329566955566, 0.6316408514976501], [0.37388479709625244, 0.6288755536079407], [0.3740968108177185, 0.6260141730308533], [0.3742207884788513, 0.6231980323791504], [0.37440645694732666, 0.6202768087387085], [0.3744894862174988, 0.6174687147140503], [0.3746267557144165, 0.6145522594451904], [0.37466710805892944, 0.6116930842399597], [0.3747546672821045, 0.6087443828582764], [0.37473684549331665, 0.6058844327926636], [0.3747605085372925, 0.6029340624809265], [0.37468189001083374, 0.6000844240188599], [0.3746489882469177, 0.5971444845199585], [0.3745208978652954, 0.5942634344100952], [0.3744302988052368, 0.5912952423095703], [0.3742337226867676, 0.5884513854980469], [0.37408095598220825, 0.5855480432510376], [0.3738406300544739, 0.5827416181564331], [0.37364333868026733, 0.5798801779747009], [0.3733522295951843, 0.5771253108978271], [0.37309813499450684, 0.5743300914764404], [0.37276726961135864, 0.5716365575790405], [0.3724833130836487, 0.5689119100570679], [0.372136652469635, 0.5663016438484192], [0.3718319535255432, 0.5636789202690125], [0.3714630603790283, 0.5611106753349304], [0.37113308906555176, 0.5585387945175171], [0.3707408905029297, 0.5560154914855957], [0.3703800439834595, 0.5534720420837402], [0.3699588179588318, 0.5509777069091797], [0.3695671558380127, 0.5484481453895569], [0.3691033720970154, 0.5459092855453491], [0.3686821460723877, 0.5433146953582764], [0.36823809146881104, 0.5408797264099121], [0.3678392171859741, 0.5383651256561279], [0.36739182472229004, 0.5359088778495789], [0.36698073148727417, 0.5333832502365112], [0.3665063977241516, 0.530896782875061], [0.36606740951538086, 0.5283389687538147], [0.3655719757080078, 0.5258196592330933], [0.3651251792907715, 0.5232688188552856], [0.36458510160446167, 0.5206650495529175], [0.36407649517059326, 0.5180249214172363], [0.3634715676307678, 0.5153453350067139], [0.36291730403900146, 0.5126343965530396], [0.36225926876068115, 0.509838342666626], [0.36165183782577515, 0.5070348381996155], [0.36090415716171265, 0.5041325092315674], [0.36018985509872437, 0.501278281211853], [0.35930126905441284, 0.4982678294181824], [0.3584578037261963, 0.4952965974807739], [0.3574490547180176, 0.49218472838401794], [0.35649609565734863, 0.489122211933136], [0.35535168647766113, 0.4858846068382263], [0.35425788164138794, 0.48274439573287964], [0.35292327404022217, 0.4794034957885742], [0.3516498804092407, 0.47615665197372437], [0.34655022621154785, 0.8291199803352356], [0.3474324345588684, 0.8260177373886108], [0.3482109308242798, 0.823299765586853], [0.3492732644081116, 0.8194725513458252], [0.35016918182373047, 0.8160039186477661], [0.3511096239089966, 0.812372624874115], [0.35187190771102905, 0.8090879917144775], [0.35270047187805176, 0.8055515289306641], [0.3533772826194763, 0.8023368716239929], [0.35413050651550293, 0.7988210916519165], [0.35473012924194336, 0.7955952286720276], [0.35542595386505127, 0.7919408082962036], [0.35600030422210693, 0.7885768413543701], [0.3565373420715332, 0.7858700752258301], [0.3570120930671692, 0.7834634184837341], [0.3576396107673645, 0.7808526754379272], [0.35830438137054443, 0.7786247730255127], [0.3588786721229553, 0.7766917943954468], [0.36008262634277344, 0.7732261419296265], [0.3606724739074707, 0.77149498462677], [0.3609393239021301, 0.7697927951812744], [0.3613395690917969, 0.7675784826278687], [0.3616018295288086, 0.7656526565551758], [0.36199986934661865, 0.7633081674575806], [0.36230599880218506, 0.7613078355789185], [0.3626894950866699, 0.7590879201889038], [0.36297106742858887, 0.7572259902954102], [0.3632798194885254, 0.7552454471588135], [0.363478422164917, 0.7536365389823914], [0.36360830068588257, 0.7518608570098877], [0.363545298576355, 0.7503663301467896], [0.3635517954826355, 0.7486409544944763], [0.3636425733566284, 0.7466249465942383], [0.3638598322868347, 0.7434285283088684], [0.36409473419189453, 0.7404296398162842], [0.3645329475402832, 0.7372605800628662], [0.36492109298706055, 0.7344421148300171], [0.36543166637420654, 0.7314633727073669], [0.3658607602119446, 0.7286759614944458], [0.36638855934143066, 0.7256835699081421], [0.36677491664886475, 0.7229551076889038], [0.3672730326652527, 0.7199071049690247], [0.3676375150680542, 0.7170507907867432], [0.36811113357543945, 0.7138803601264954], [0.368478000164032, 0.7109380960464478], [0.3689481019973755, 0.7078213691711426], [0.3693068027496338, 0.7049124240875244], [0.3697577714920044, 0.7019070982933044], [0.3700847625732422, 0.6991071701049805], [0.37050777673721313, 0.6961621046066284], [0.37083399295806885, 0.6934007406234741], [0.3712582588195801, 0.6904770135879517], [0.37157583236694336, 0.6877357959747314], [0.3719886541366577, 0.6848247647285461], [0.37228894233703613, 0.6820828914642334], [0.3726732134819031, 0.6792217493057251], [0.3729535937309265, 0.6765355467796326], [0.37332338094711304, 0.6736609935760498], [0.3735867738723755, 0.6709414720535278], [0.3739335536956787, 0.668083906173706], [0.37418121099472046, 0.6653856039047241], [0.3745121359825134, 0.662535548210144], [0.3747497797012329, 0.6598367691040039], [0.375072717666626, 0.6569963693618774], [0.3753005862236023, 0.6542942523956299], [0.37561070919036865, 0.6514534950256348], [0.37582850456237793, 0.6487497091293335], [0.3761252164840698, 0.6459187269210815], [0.3763289451599121, 0.6432101726531982], [0.3766138553619385, 0.6403709650039673], [0.3768097162246704, 0.6376270055770874], [0.37707775831222534, 0.6347585916519165], [0.377252459526062, 0.6319937705993652], [0.3774930238723755, 0.6291135549545288], [0.3776370882987976, 0.6263230443000793], [0.377846896648407, 0.6233999133110046], [0.3779754638671875, 0.6205489635467529], [0.3781585693359375, 0.6176056861877441], [0.3782341480255127, 0.614739716053009], [0.3783602714538574, 0.6117607951164246], [0.37838804721832275, 0.6088488698005676], [0.3784577250480652, 0.6058591604232788], [0.3784171938896179, 0.602928638458252], [0.3784155249595642, 0.5999653935432434], [0.3783171772956848, 0.5970542430877686], [0.378256618976593, 0.5940622091293335], [0.3780936002731323, 0.5911034941673279], [0.37797635793685913, 0.5881528854370117], [0.37775522470474243, 0.5852560997009277], [0.3775745630264282, 0.5823653340339661], [0.3772927522659302, 0.5794969201087952], [0.37705695629119873, 0.5766593217849731], [0.3767264485359192, 0.5738540887832642], [0.3764474391937256, 0.5711111426353455], [0.37609726190567017, 0.5683929920196533], [0.37580060958862305, 0.5657351016998291], [0.37541913986206055, 0.5630846619606018], [0.3750842809677124, 0.5605010390281677], [0.3746962547302246, 0.5579168796539307], [0.3743482232093811, 0.5553592443466187], [0.3739250898361206, 0.5527827739715576], [0.37354326248168945, 0.5502685308456421], [0.3730955719947815, 0.547714114189148], [0.37268733978271484, 0.54515540599823], [0.37222468852996826, 0.5425733327865601], [0.3718351125717163, 0.5401298999786377], [0.37137794494628906, 0.5376201272010803], [0.37097418308258057, 0.5351495742797852], [0.3704949617385864, 0.5326099395751953], [0.37007319927215576, 0.5301129817962646], [0.36956334114074707, 0.5275235176086426], [0.3691251277923584, 0.5250303745269775], [0.36861419677734375, 0.5224449038505554], [0.36815255880355835, 0.519831120967865], [0.36757761240005493, 0.5171072483062744], [0.36705660820007324, 0.5144393444061279], [0.36642682552337646, 0.5116595029830933], [0.3658488988876343, 0.5088877081871033], [0.36512333154678345, 0.5059688687324524], [0.364446759223938, 0.503084659576416], [0.3636093735694885, 0.5000705718994141], [0.3628159761428833, 0.49709367752075195], [0.3618694543838501, 0.49397414922714233], [0.36096900701522827, 0.4908941984176636], [0.3598902225494385, 0.4876716434955597], [0.35885167121887207, 0.4844757318496704], [0.35761356353759766, 0.48113197088241577], [0.3564110994338989, 0.4778246283531189], [0.35503995418548584, 0.47439301013946533], [0.3496437668800354, 0.8299252986907959], [0.35034334659576416, 0.8271923065185547], [0.35116201639175415, 0.824118971824646], [0.352117657661438, 0.820597767829895], [0.3531363010406494, 0.8167403936386108], [0.3539356589317322, 0.8133968114852905], [0.35482144355773926, 0.8097457885742188], [0.3555373549461365, 0.8065023422241211], [0.35634493827819824, 0.80294269323349], [0.3570045232772827, 0.7997066378593445], [0.35774433612823486, 0.7961258292198181], [0.35837244987487793, 0.792791485786438], [0.35912150144577026, 0.7891138792037964], [0.3595372438430786, 0.7867237329483032], [0.3600575923919678, 0.7840722799301147], [0.3605433702468872, 0.7819466590881348], [0.36119914054870605, 0.7795799374580383], [0.3617292642593384, 0.7778749465942383], [0.36290615797042847, 0.7744497060775757], [0.3634939193725586, 0.7724863886833191], [0.36407768726348877, 0.7703515291213989], [0.36443132162094116, 0.7683981657028198], [0.36487072706222534, 0.7661078572273254], [0.3651687502861023, 0.7641043663024902], [0.3655549883842468, 0.7617655992507935], [0.36582112312316895, 0.7598398923873901], [0.3661407232284546, 0.7576196789741516], [0.3663371801376343, 0.7558650970458984], [0.3665189743041992, 0.7538179755210876], [0.36660677194595337, 0.7521404027938843], [0.3667258024215698, 0.7501420974731445], [0.3667224049568176, 0.7488648891448975], [0.366798460483551, 0.746715784072876], [0.36699163913726807, 0.7438576221466064], [0.36735016107559204, 0.7407636642456055], [0.36769503355026245, 0.7379451990127563], [0.36817628145217896, 0.7349277138710022], [0.3685567378997803, 0.7321829795837402], [0.369057297706604, 0.7292072772979736], [0.36949729919433594, 0.7264437675476074], [0.3700389862060547, 0.7234537601470947], [0.3704226016998291, 0.7205659747123718], [0.37087851762771606, 0.71746826171875], [0.3712320923805237, 0.7145044803619385], [0.37168627977371216, 0.7113581895828247], [0.37203723192214966, 0.708441972732544], [0.37250471115112305, 0.7053512334823608], [0.3728622794151306, 0.702530026435852], [0.37331879138946533, 0.6995283365249634], [0.37365293502807617, 0.6967704892158508], [0.3740863800048828, 0.6938363909721375], [0.37442129850387573, 0.6910849213600159], [0.3748536705970764, 0.6881722211837769], [0.3751758933067322, 0.6854061484336853], [0.37558555603027344, 0.6824973821640015], [0.37589311599731445, 0.6797938346862793], [0.3762860894203186, 0.6769360303878784], [0.3765740394592285, 0.6741971969604492], [0.3769434094429016, 0.6713173389434814], [0.3772125840187073, 0.6685997843742371], [0.3775637149810791, 0.6657475233078003], [0.37781989574432373, 0.663030207157135], [0.37815552949905396, 0.6601927280426025], [0.3783993124961853, 0.6574758887290955], [0.37872612476348877, 0.6546427011489868], [0.37897396087646484, 0.6519342660903931], [0.37930619716644287, 0.649100661277771], [0.37954652309417725, 0.6463781595230103], [0.3798535466194153, 0.6435340046882629], [0.38006383180618286, 0.6407977938652039], [0.3803483843803406, 0.6379339694976807], [0.38055217266082764, 0.6351639032363892], [0.38082975149154663, 0.632277250289917], [0.381015419960022, 0.6294772028923035], [0.3812609910964966, 0.6265558004379272], [0.3814113140106201, 0.6237127780914307], [0.38162553310394287, 0.620758056640625], [0.38175106048583984, 0.617860734462738], [0.38192546367645264, 0.6148632168769836], [0.3819904327392578, 0.611922025680542], [0.3820977210998535, 0.6088931560516357], [0.3821061849594116, 0.6059280633926392], [0.38215309381484985, 0.6028685569763184], [0.38209033012390137, 0.5999225378036499], [0.3820551633834839, 0.5968947410583496], [0.3819132447242737, 0.5939096212387085], [0.3818175792694092, 0.5908666849136353], [0.3816359043121338, 0.5879271030426025], [0.38149118423461914, 0.5849197506904602], [0.38122934103012085, 0.5820075273513794], [0.38099318742752075, 0.5790563821792603], [0.3806711435317993, 0.5762159824371338], [0.3803941011428833, 0.5733430981636047], [0.3800477981567383, 0.5706011056900024], [0.379740834236145, 0.5678402185440063], [0.3793596029281616, 0.5651627779006958], [0.37901192903518677, 0.5624641180038452], [0.3786231279373169, 0.5598903894424438], [0.3782714605331421, 0.5572875738143921], [0.3778628706932068, 0.5547067523002625], [0.377482533454895, 0.5520964860916138], [0.37706226110458374, 0.5495715141296387], [0.37667155265808105, 0.546994686126709], [0.3762326240539551, 0.5444265604019165], [0.3758280277252197, 0.5418359041213989], [0.3753747344017029, 0.5393632650375366], [0.374952495098114, 0.536861002445221], [0.3744785785675049, 0.534369945526123], [0.37404197454452515, 0.5318349599838257], [0.3735504150390625, 0.5293161869049072], [0.3731076717376709, 0.5267370939254761], [0.3726249933242798, 0.5242249965667725], [0.3721913695335388, 0.5216629505157471], [0.37164735794067383, 0.5189690589904785], [0.3711531162261963, 0.516266942024231], [0.3705659508705139, 0.513503909111023], [0.3700178861618042, 0.510749340057373], [0.36931824684143066, 0.5078504085540771], [0.3686683177947998, 0.5049700736999512], [0.3678863048553467, 0.5019626617431641], [0.3671382665634155, 0.49895042181015015], [0.366235613822937, 0.4958391785621643], [0.3653860092163086, 0.4927677810192108], [0.36437350511550903, 0.48952990770339966], [0.36340630054473877, 0.4863359034061432], [0.3622337579727173, 0.48296475410461426], [0.36111629009246826, 0.47965818643569946], [0.3598153591156006, 0.476173996925354], [0.3585824966430664, 0.4727957248687744], [0.3526456356048584, 0.831061840057373], [0.35344552993774414, 0.8279919624328613], [0.3541610836982727, 0.8253087401390076], [0.3551684021949768, 0.82137131690979], [0.3560340404510498, 0.8178090453147888], [0.3569543957710266, 0.8140972852706909], [0.3577154278755188, 0.8107388615608215], [0.3585541844367981, 0.8071575164794922], [0.3592638373374939, 0.8039019107818604], [0.3600740432739258, 0.8003249168395996], [0.36075228452682495, 0.7970446348190308], [0.36153316497802734, 0.7933914661407471], [0.36217331886291504, 0.7900032997131348], [0.36276501417160034, 0.7873653173446655], [0.36326420307159424, 0.7850552797317505], [0.3638034462928772, 0.7828341126441956], [0.3643662929534912, 0.7809840440750122], [0.3650045394897461, 0.7789329290390015], [0.36590152978897095, 0.7757962942123413], [0.3666418790817261, 0.7734628915786743], [0.36716198921203613, 0.7713565826416016], [0.3677176237106323, 0.7690239548683167], [0.3680870532989502, 0.7669522762298584], [0.36851823329925537, 0.7645955085754395], [0.368821918964386, 0.7625352740287781], [0.3691713809967041, 0.7602563500404358], [0.3694150447845459, 0.7582833766937256], [0.3696669936180115, 0.7561315298080444], [0.3698241710662842, 0.7542486190795898], [0.36992883682250977, 0.7521277666091919], [0.3699101209640503, 0.7503445148468018], [0.3699941635131836, 0.7488764524459839], [0.3701348900794983, 0.7471874952316284], [0.37048786878585815, 0.7441569566726685], [0.3707839846611023, 0.7414095401763916], [0.37119829654693604, 0.7384095191955566], [0.37154895067214966, 0.7356659173965454], [0.372006893157959, 0.7326655387878418], [0.37236785888671875, 0.7299683094024658], [0.3728373646736145, 0.7269753217697144], [0.37320834398269653, 0.7241634130477905], [0.37370240688323975, 0.721000075340271], [0.37407153844833374, 0.7181206941604614], [0.37455248832702637, 0.7149309515953064], [0.37490391731262207, 0.7119805216789246], [0.37535566091537476, 0.7088877558708191], [0.3757171630859375, 0.7060074806213379], [0.3761788606643677, 0.7029942274093628], [0.37653982639312744, 0.7001816034317017], [0.3769960403442383, 0.6972258687019348], [0.3773355484008789, 0.6944699883460999], [0.3777693510055542, 0.6915465593338013], [0.3781038522720337, 0.6887980699539185], [0.37853920459747314, 0.6858525276184082], [0.37886738777160645, 0.6831068992614746], [0.3792770504951477, 0.6802319288253784], [0.3795812726020813, 0.677513837814331], [0.3799723982810974, 0.6746038198471069], [0.3802618980407715, 0.6718692779541016], [0.3806350827217102, 0.668996274471283], [0.3809090852737427, 0.6662762761116028], [0.38126498460769653, 0.6634125709533691], [0.3815283179283142, 0.6607062816619873], [0.38187360763549805, 0.6578495502471924], [0.38212674856185913, 0.655144214630127], [0.38245880603790283, 0.652316153049469], [0.382713258266449, 0.6496070623397827], [0.38304775953292847, 0.6467416286468506], [0.38329023122787476, 0.6440008878707886], [0.38360750675201416, 0.6411343812942505], [0.3838347792625427, 0.6383762359619141], [0.38413316011428833, 0.6354910135269165], [0.384349524974823, 0.6327002048492432], [0.38463282585144043, 0.6297688484191895], [0.38482028245925903, 0.6269195079803467], [0.3850695490837097, 0.6239591836929321], [0.3852289915084839, 0.6210752725601196], [0.3854430913925171, 0.618057131767273], [0.3855612874031067, 0.615103006362915], [0.3857269287109375, 0.6120290756225586], [0.38578248023986816, 0.609027624130249], [0.3858736753463745, 0.605944037437439], [0.38587266206741333, 0.6029164791107178], [0.3859062194824219, 0.5998270511627197], [0.38581007719039917, 0.596785843372345], [0.38574540615081787, 0.5937035083770752], [0.3855886459350586, 0.5906789898872375], [0.38546687364578247, 0.587643027305603], [0.3852335214614868, 0.5846188068389893], [0.3850438594818115, 0.5816032886505127], [0.3847556710243225, 0.5786341428756714], [0.38451069593429565, 0.5757232904434204], [0.38417571783065796, 0.5728393793106079], [0.383891224861145, 0.5700422525405884], [0.3835166096687317, 0.5672503709793091], [0.38319069147109985, 0.5645326375961304], [0.38280272483825684, 0.5618208646774292], [0.38246214389801025, 0.5592198371887207], [0.38205528259277344, 0.556597113609314], [0.3816835284233093, 0.5539971590042114], [0.3812594413757324, 0.5513713359832764], [0.3808690309524536, 0.5488160848617554], [0.38041388988494873, 0.5462307929992676], [0.379997193813324, 0.543662965297699], [0.37952208518981934, 0.5410505533218384], [0.3791102170944214, 0.5385630130767822], [0.37864553928375244, 0.5360478162765503], [0.37822312116622925, 0.5335494875907898], [0.3777322769165039, 0.5309913158416748], [0.37729573249816895, 0.5284830331802368], [0.37680673599243164, 0.5259144306182861], [0.3763813376426697, 0.523399829864502], [0.37584394216537476, 0.520784854888916], [0.37535780668258667, 0.5181195735931396], [0.37477535009384155, 0.5153471827507019], [0.37424618005752563, 0.5126110315322876], [0.3735753297805786, 0.5097270011901855], [0.3729572892189026, 0.5068612098693848], [0.3722221851348877, 0.5038765072822571], [0.3715358376502991, 0.5008963346481323], [0.3706846237182617, 0.4977310299873352], [0.36989521980285645, 0.4946499466896057], [0.3689460754394531, 0.49142688512802124], [0.368036150932312, 0.48823094367980957], [0.36693406105041504, 0.4848584532737732], [0.36588287353515625, 0.4815295338630676], [0.3646520972251892, 0.47804564237594604], [0.3634740114212036, 0.47462719678878784], [0.36208850145339966, 0.4710422158241272], [0.3557984232902527, 0.8317822217941284], [0.35641586780548096, 0.8291041851043701], [0.35715383291244507, 0.8260684013366699], [0.3580833077430725, 0.8224718570709229], [0.35907959938049316, 0.8185182213783264], [0.35986053943634033, 0.8151242136955261], [0.3607456684112549, 0.8114013671875], [0.36146658658981323, 0.8081498146057129], [0.3623107075691223, 0.804566502571106], [0.3630390167236328, 0.8013172745704651], [0.3638916015625, 0.7977092266082764], [0.3646280765533447, 0.7943888306617737], [0.3655219078063965, 0.7907023429870605], [0.36599206924438477, 0.7883786559104919], [0.36657822132110596, 0.7858326435089111], [0.36700284481048584, 0.7838985323905945], [0.3674544095993042, 0.78202223777771], [0.36799144744873047, 0.7800752520561218], [0.3689556121826172, 0.7768092155456543], [0.36955511569976807, 0.7745901942253113], [0.37022560834884644, 0.772132396697998], [0.37070971727371216, 0.7699536681175232], [0.3712378740310669, 0.7675027847290039], [0.37159502506256104, 0.7653826475143433], [0.3720020055770874, 0.7629591822624207], [0.37227463722229004, 0.760936975479126], [0.3725886940956116, 0.7585974931716919], [0.3727918267250061, 0.756674587726593], [0.37302708625793457, 0.7544369101524353], [0.37320542335510254, 0.752500057220459], [0.37341272830963135, 0.7503114938735962], [0.3734411597251892, 0.7492589354515076], [0.37356340885162354, 0.7474460601806641], [0.3738258481025696, 0.7447837591171265], [0.3742356300354004, 0.7418041825294495], [0.3745687007904053, 0.739102303981781], [0.3750001788139343, 0.736113429069519], [0.3753551244735718, 0.7333911657333374], [0.3757946491241455, 0.7304139137268066], [0.37611424922943115, 0.7276389598846436], [0.3765357732772827, 0.7245984077453613], [0.3768923282623291, 0.7216668128967285], [0.3773645758628845, 0.7185713052749634], [0.377743124961853, 0.7155763506889343], [0.3782179355621338, 0.7124082446098328], [0.378578782081604, 0.7095457315444946], [0.3790512681007385, 0.7064726948738098], [0.3794119358062744, 0.7036564350128174], [0.3798742890357971, 0.7006604671478271], [0.3802354335784912, 0.697887659072876], [0.3806915283203125, 0.6949298977851868], [0.3810298442840576, 0.6921815872192383], [0.3814624547958374, 0.6892595291137695], [0.38181066513061523, 0.6864970326423645], [0.38225460052490234, 0.6835706233978271], [0.3825792670249939, 0.6808350086212158], [0.3829811215400696, 0.6779416799545288], [0.38328874111175537, 0.6751848459243774], [0.3836832046508789, 0.672288179397583], [0.38397425413131714, 0.6695501208305359], [0.38434475660324097, 0.666680097579956], [0.38462716341018677, 0.6639513373374939], [0.3849872946739197, 0.6611051559448242], [0.38525843620300293, 0.6583706140518188], [0.38560450077056885, 0.6555308103561401], [0.38586270809173584, 0.6528223156929016], [0.3861958980560303, 0.6499987244606018], [0.3864601254463196, 0.6472433805465698], [0.3868008852005005, 0.644383430480957], [0.38705873489379883, 0.6416168212890625], [0.38738638162612915, 0.6387397646903992], [0.387631893157959, 0.635949969291687], [0.3879411816596985, 0.6330422163009644], [0.38816356658935547, 0.6301833987236023], [0.3884490728378296, 0.6272166967391968], [0.3886507749557495, 0.6243399381637573], [0.3889128565788269, 0.6213316321372986], [0.3890795111656189, 0.6183667182922363], [0.3892928957939148, 0.6152900457382202], [0.389401376247406, 0.6122426986694336], [0.38954538106918335, 0.6091113090515137], [0.3895837664604187, 0.6060491800308228], [0.38965892791748047, 0.6029242277145386], [0.3896288275718689, 0.5998201370239258], [0.3896273374557495, 0.5966484546661377], [0.38951438665390015, 0.5935690999031067], [0.3894243836402893, 0.5904419422149658], [0.3892197012901306, 0.5873782634735107], [0.38904619216918945, 0.5842699408531189], [0.3887861371040344, 0.5812559127807617], [0.3885654807090759, 0.5781998634338379], [0.38825517892837524, 0.575272798538208], [0.38798004388809204, 0.572324275970459], [0.38761723041534424, 0.5694918036460876], [0.38729017972946167, 0.5666577219963074], [0.38691604137420654, 0.5639376640319824], [0.38657617568969727, 0.56119304895401], [0.3861897587776184, 0.5585637092590332], [0.38582825660705566, 0.5559155941009521], [0.38542640209198, 0.5533052682876587], [0.38504093885421753, 0.5506519079208374], [0.3846111297607422, 0.548060953617096], [0.3841902017593384, 0.5454379320144653], [0.38371938467025757, 0.5428649187088013], [0.3832842707633972, 0.5402554869651794], [0.3828349709510803, 0.5377658605575562], [0.3824131488800049, 0.5352350473403931], [0.381930947303772, 0.5327095985412598], [0.3814859390258789, 0.5301586389541626], [0.3810052275657654, 0.527641773223877], [0.38056737184524536, 0.5250992774963379], [0.3800472021102905, 0.52252197265625], [0.37956303358078003, 0.5199019908905029], [0.3789863586425781, 0.5172107219696045], [0.3784545660018921, 0.5144524574279785], [0.3778226971626282, 0.5116428136825562], [0.37722814083099365, 0.5087553262710571], [0.3765348792076111, 0.5058236122131348], [0.37588322162628174, 0.5028535723686218], [0.37507927417755127, 0.4997554421424866], [0.37432706356048584, 0.49663254618644714], [0.3734610080718994, 0.4934215545654297], [0.37262099981307983, 0.49019867181777954], [0.3715808391571045, 0.48683086037635803], [0.3705824613571167, 0.4835019111633301], [0.36941492557525635, 0.4800183176994324], [0.3683028221130371, 0.4765831232070923], [0.3669929504394531, 0.4729750156402588], [0.36573219299316406, 0.4694373309612274], [0.35886603593826294, 0.8328353762626648], [0.3595719337463379, 0.8298289179801941], [0.36021924018859863, 0.8272141814231873], [0.36116182804107666, 0.8231909275054932], [0.36199676990509033, 0.8195613622665405], [0.3629041314125061, 0.8158055543899536], [0.36367619037628174, 0.8124210238456726], [0.3645487427711487, 0.8088482618331909], [0.36530256271362305, 0.8056075572967529], [0.36618804931640625, 0.8020595908164978], [0.3669785261154175, 0.7987908124923706], [0.3679008483886719, 0.7951580286026001], [0.3686666488647461, 0.791771650314331], [0.3693658709526062, 0.7891971468925476], [0.3699052333831787, 0.7869384288787842], [0.3704814910888672, 0.7848726511001587], [0.3709230422973633, 0.783177375793457], [0.371404230594635, 0.7810366153717041], [0.37215089797973633, 0.7780406475067139], [0.3728353977203369, 0.7755054831504822], [0.3733726143836975, 0.7732052803039551], [0.3739553689956665, 0.7706484794616699], [0.3743845224380493, 0.7683930397033691], [0.3748586177825928, 0.7659045457839966], [0.3751985430717468, 0.7637110948562622], [0.37556856870651245, 0.7613441944122314], [0.3758429288864136, 0.7592521905899048], [0.3761433959007263, 0.7569866180419922], [0.3763573169708252, 0.7549890279769897], [0.37657904624938965, 0.7527328729629517], [0.3767129182815552, 0.750735878944397], [0.3768523931503296, 0.7493612766265869], [0.37698501348495483, 0.7480495572090149], [0.3773810863494873, 0.745195746421814], [0.3777235150337219, 0.7425367832183838], [0.378162145614624, 0.7395416498184204], [0.3784993290901184, 0.7368371486663818], [0.37892013788223267, 0.7338434457778931], [0.3792450428009033, 0.7310985326766968], [0.3796609044075012, 0.7280310392379761], [0.3799628019332886, 0.7252564430236816], [0.3803985118865967, 0.7221179008483887], [0.3807283043861389, 0.7192367315292358], [0.3811839818954468, 0.7160337567329407], [0.38155651092529297, 0.7130935788154602], [0.38202619552612305, 0.7100232839584351], [0.3823933005332947, 0.7071630954742432], [0.3828611969947815, 0.7041443586349487], [0.38322359323501587, 0.7013508081436157], [0.3836817145347595, 0.6983851194381714], [0.38404643535614014, 0.6956186294555664], [0.3845065236091614, 0.6926606893539429], [0.38484668731689453, 0.6899216175079346], [0.38527995347976685, 0.6869888305664062], [0.3856203556060791, 0.6842225790023804], [0.386044979095459, 0.6812882423400879], [0.38635939359664917, 0.6785465478897095], [0.38676172494888306, 0.6756291389465332], [0.3870682120323181, 0.6728805303573608], [0.3874642848968506, 0.6699819564819336], [0.38776302337646484, 0.6672554016113281], [0.38814347982406616, 0.6643794775009155], [0.3884279131889343, 0.6616567373275757], [0.3887937068939209, 0.658793032169342], [0.38908082246780396, 0.6560820937156677], [0.3894428610801697, 0.6532349586486816], [0.3897130489349365, 0.6505304574966431], [0.3900609612464905, 0.6476608514785767], [0.3903314471244812, 0.6449116468429565], [0.39067721366882324, 0.6420232057571411], [0.3909417390823364, 0.639248251914978], [0.39127278327941895, 0.6363338232040405], [0.39151865243911743, 0.633510410785675], [0.3918348550796509, 0.630536675453186], [0.39207255840301514, 0.6276584267616272], [0.39236706495285034, 0.6246587038040161], [0.39257097244262695, 0.6217159032821655], [0.3928285837173462, 0.6186169385910034], [0.39298295974731445, 0.615585207939148], [0.39318811893463135, 0.6124088764190674], [0.3932840824127197, 0.6092953085899353], [0.39340996742248535, 0.6061092615127563], [0.3934229612350464, 0.6029924750328064], [0.3934662342071533, 0.5997724533081055], [0.39340609312057495, 0.5966007709503174], [0.39337706565856934, 0.5934063196182251], [0.3932396173477173, 0.5902596116065979], [0.39313286542892456, 0.5870876312255859], [0.3929215669631958, 0.5839682817459106], [0.3927437663078308, 0.5808723568916321], [0.3924621343612671, 0.5777920484542847], [0.3922252058982849, 0.5747925639152527], [0.391893208026886, 0.5718122124671936], [0.3916115164756775, 0.5689160227775574], [0.391251802444458, 0.5660531520843506], [0.39092910289764404, 0.5633009672164917], [0.3905392289161682, 0.5605324506759644], [0.390194833278656, 0.5578784942626953], [0.38979971408843994, 0.5552061796188354], [0.38942790031433105, 0.5525754690170288], [0.3890025019645691, 0.5498936772346497], [0.3885999321937561, 0.5472674369812012], [0.3881303071975708, 0.5445923209190369], [0.387698769569397, 0.5420125722885132], [0.38722407817840576, 0.5394089818000793], [0.3867889642715454, 0.5369019508361816], [0.3862851858139038, 0.5343478918075562], [0.3858262300491333, 0.5318261384963989], [0.3853353261947632, 0.5292893648147583], [0.3848925828933716, 0.5267753005027771], [0.3843742609024048, 0.5242000818252563], [0.38390541076660156, 0.5216224193572998], [0.38335806131362915, 0.5189636945724487], [0.38286322355270386, 0.5162789821624756], [0.3822730779647827, 0.5134783983230591], [0.3817347288131714, 0.5106549859046936], [0.3810718059539795, 0.5076967477798462], [0.3804703950881958, 0.5047791004180908], [0.3797076344490051, 0.5016796588897705], [0.37900233268737793, 0.4986100196838379], [0.37818098068237305, 0.49540406465530396], [0.3774096965789795, 0.49221014976501465], [0.3764157295227051, 0.4887934923171997], [0.375476598739624, 0.4854596257209778], [0.3743772506713867, 0.4819691777229309], [0.37332892417907715, 0.4785325229167938], [0.3720775842666626, 0.47491177916526794], [0.3708772659301758, 0.47136083245277405], [0.36947304010391235, 0.4676249027252197], [0.3620818257331848, 0.8334560990333557], [0.3626018166542053, 0.8308624625205994], [0.3632308840751648, 0.8278906345367432], [0.3641008734703064, 0.824234127998352], [0.36505115032196045, 0.8202380537986755], [0.36582398414611816, 0.8168389797210693], [0.3667393922805786, 0.8131289482116699], [0.3674997091293335, 0.809906005859375], [0.3684074282646179, 0.8063621520996094], [0.3692076802253723, 0.8031864166259766], [0.3701897859573364, 0.7996410727500916], [0.3710837960243225, 0.7963780760765076], [0.37219732999801636, 0.7927220463752747], [0.37275630235671997, 0.7904199361801147], [0.3734358549118042, 0.7878519296646118], [0.37379390001296997, 0.7858725786209106], [0.3741464614868164, 0.7839683890342712], [0.3746265172958374, 0.7822191715240479], [0.3755021095275879, 0.7789387702941895], [0.37603235244750977, 0.7765860557556152], [0.37661194801330566, 0.7739076614379883], [0.37705618143081665, 0.7715590000152588], [0.3775603771209717, 0.7689653038978577], [0.3779425621032715, 0.7667074203491211], [0.3783814311027527, 0.7641685009002686], [0.37869125604629517, 0.7620550394058228], [0.3790523409843445, 0.7596292495727539], [0.3793080449104309, 0.7576208710670471], [0.3796161413192749, 0.7552984356880188], [0.37985241413116455, 0.7532843947410583], [0.38013970851898193, 0.7509658336639404], [0.3802504539489746, 0.7500704526901245], [0.3805106282234192, 0.7484483122825623], [0.3808314800262451, 0.7459303140640259], [0.3812624216079712, 0.7429821491241455], [0.3816179633140564, 0.7402834892272949], [0.3820595145225525, 0.737261176109314], [0.38238561153411865, 0.7345334887504578], [0.3828005790710449, 0.7315106987953186], [0.38312065601348877, 0.7286819219589233], [0.3835097551345825, 0.7256207466125488], [0.38377654552459717, 0.7227240800857544], [0.38416868448257446, 0.7196441888809204], [0.3845233917236328, 0.716714084148407], [0.38499873876571655, 0.7135694622993469], [0.38536882400512695, 0.7107155323028564], [0.385841965675354, 0.707650899887085], [0.38621383905410767, 0.7048488855361938], [0.3866850733757019, 0.7018454074859619], [0.38704586029052734, 0.6990774273872375], [0.3875054121017456, 0.6961299777030945], [0.38787662982940674, 0.6933525204658508], [0.3883324861526489, 0.6904011964797974], [0.3886685371398926, 0.6876389980316162], [0.38909149169921875, 0.6846957206726074], [0.3894236087799072, 0.6819188594818115], [0.38983774185180664, 0.6789968013763428], [0.3901515007019043, 0.6762315034866333], [0.390552282333374, 0.673333466053009], [0.39086616039276123, 0.6705814599990845], [0.39126354455947876, 0.6677056550979614], [0.39156973361968994, 0.6649596691131592], [0.39195317029953003, 0.6620925664901733], [0.3922523260116577, 0.6593672037124634], [0.3926258683204651, 0.6565322875976562], [0.39292246103286743, 0.6537935137748718], [0.3932839632034302, 0.6509560346603394], [0.3935755491256714, 0.6482215523719788], [0.3939440846443176, 0.6453413367271423], [0.39422452449798584, 0.6425576210021973], [0.39456671476364136, 0.6396548748016357], [0.39482927322387695, 0.6368325352668762], [0.3951610326766968, 0.6338969469070435], [0.3954293727874756, 0.6310329437255859], [0.39576947689056396, 0.6280393600463867], [0.3960192799568176, 0.6250994205474854], [0.3963162899017334, 0.6220290660858154], [0.39651840925216675, 0.6189804077148438], [0.39677202701568604, 0.6158262491226196], [0.3969104290008545, 0.6126627326011658], [0.3970850706100464, 0.6094367504119873], [0.3971670866012573, 0.6062648296356201], [0.3972722887992859, 0.6030091643333435], [0.39725226163864136, 0.5997862815856934], [0.3972615599632263, 0.5965148210525513], [0.3971732258796692, 0.5933157205581665], [0.39712637662887573, 0.5900801420211792], [0.3969752788543701, 0.5868798494338989], [0.39685070514678955, 0.5836726427078247], [0.3966202735900879, 0.5805370807647705], [0.396418035030365, 0.5773848295211792], [0.39612358808517456, 0.5743468403816223], [0.395860493183136, 0.5713073015213013], [0.39550089836120605, 0.5683614611625671], [0.3951703906059265, 0.5654633045196533], [0.39480555057525635, 0.5626959204673767], [0.3944610357284546, 0.5598821043968201], [0.39409011602401733, 0.5572322607040405], [0.393726110458374, 0.5545076131820679], [0.3933444023132324, 0.551889181137085], [0.3929583430290222, 0.5491411089897156], [0.39255809783935547, 0.5465301871299744], [0.392156720161438, 0.5437649488449097], [0.3917192220687866, 0.5411769151687622], [0.39126819372177124, 0.5385109186172485], [0.3907615542411804, 0.5359653234481812], [0.3902682662010193, 0.5333926677703857], [0.3897572159767151, 0.5308997631072998], [0.3892934322357178, 0.5283908247947693], [0.38876986503601074, 0.52583909034729], [0.38829219341278076, 0.523286759853363], [0.3877575397491455, 0.5206859111785889], [0.38727861642837524, 0.5180544853210449], [0.3867074251174927, 0.5153151750564575], [0.3861980438232422, 0.5125621557235718], [0.3855716586112976, 0.5096393823623657], [0.385004460811615, 0.5067245960235596], [0.38430047035217285, 0.5036709308624268], [0.383647620677948, 0.5006062984466553], [0.38287246227264404, 0.4974174499511719], [0.3821532726287842, 0.49427053332328796], [0.3812101483345032, 0.4908704161643982], [0.3803083896636963, 0.4875035881996155], [0.3792688846588135, 0.4840146005153656], [0.37829190492630005, 0.48058459162712097], [0.3771040439605713, 0.4769405126571655], [0.3759673833847046, 0.47338446974754333], [0.3746137022972107, 0.4696224629878998], [0.37332355976104736, 0.46597525477409363], [0.365189790725708, 0.834423840045929], [0.36581432819366455, 0.8315032720565796], [0.36639636754989624, 0.8289793133735657], [0.367264986038208, 0.824905514717102], [0.36804860830307007, 0.8212710618972778], [0.36893200874328613, 0.8175469636917114], [0.36970698833465576, 0.8141940236091614], [0.37060678005218506, 0.810670018196106], [0.37140458822250366, 0.8074911832809448], [0.37235546112060547, 0.8040469884872437], [0.37323933839797974, 0.8008946776390076], [0.37432098388671875, 0.7973591089248657], [0.3752775192260742, 0.7940216064453125], [0.3761632442474365, 0.7914241552352905], [0.3768094778060913, 0.7891031503677368], [0.37754368782043457, 0.7867501974105835], [0.3779420256614685, 0.7849953770637512], [0.3781754970550537, 0.7829397916793823], [0.3786860704421997, 0.7800130844116211], [0.37927883863449097, 0.7772523164749146], [0.37973523139953613, 0.7748111486434937], [0.38025832176208496, 0.7721525430679321], [0.38067328929901123, 0.7698030471801758], [0.3811416029930115, 0.7672335505485535], [0.3815206289291382, 0.7649558186531067], [0.3819364309310913, 0.7625070214271545], [0.38225293159484863, 0.7603363990783691], [0.38259923458099365, 0.7580161094665527], [0.38287198543548584, 0.7559667229652405], [0.38319748640060425, 0.7536637187004089], [0.38345640897750854, 0.7516018152236938], [0.3836044669151306, 0.7504866123199463], [0.383785605430603, 0.749298095703125], [0.3843476176261902, 0.7464137077331543], [0.38474589586257935, 0.7437690496444702], [0.38522112369537354, 0.7407543659210205], [0.38557982444763184, 0.7379890084266663], [0.3860025405883789, 0.7349361181259155], [0.3863065838813782, 0.7321822047233582], [0.38669753074645996, 0.729045033454895], [0.38693320751190186, 0.7261438369750977], [0.3872910141944885, 0.7230933904647827], [0.387622594833374, 0.7203256487846375], [0.3880876302719116, 0.7171931266784668], [0.38846099376678467, 0.7142887115478516], [0.3889307975769043, 0.7112127542495728], [0.38930314779281616, 0.7083779573440552], [0.3897740840911865, 0.7053673267364502], [0.390144407749176, 0.70256507396698], [0.39060771465301514, 0.6995905041694641], [0.3909648060798645, 0.6968421936035156], [0.391417920589447, 0.6938692331314087], [0.3917704224586487, 0.6910791397094727], [0.392211377620697, 0.6881231069564819], [0.3925544023513794, 0.685355544090271], [0.3929867148399353, 0.6824055910110474], [0.3933217525482178, 0.6796387434005737], [0.39374470710754395, 0.6767114400863647], [0.3940713405609131, 0.6739693880081177], [0.3944849967956543, 0.6710705161094666], [0.39480751752853394, 0.6683355569839478], [0.3952130079269409, 0.6654396653175354], [0.395527720451355, 0.662710428237915], [0.39592015743255615, 0.6598466634750366], [0.3962245583534241, 0.6571347713470459], [0.39659738540649414, 0.6542549133300781], [0.39688533544540405, 0.6515294313430786], [0.39723360538482666, 0.6486697196960449], [0.3975175619125366, 0.6459109783172607], [0.39788365364074707, 0.6429920196533203], [0.3981701731681824, 0.6401968002319336], [0.3985276222229004, 0.6372551918029785], [0.398806631565094, 0.6344223618507385], [0.399151086807251, 0.6314613819122314], [0.399433970451355, 0.6285605430603027], [0.39977848529815674, 0.6254783868789673], [0.4000239372253418, 0.6224602460861206], [0.40032267570495605, 0.6192904710769653], [0.4005190134048462, 0.6161751747131348], [0.40077292919158936, 0.6128803491592407], [0.4009090065956116, 0.6096707582473755], [0.40106600522994995, 0.6063815355300903], [0.4011192321777344, 0.603118360042572], [0.4012051820755005, 0.5997718572616577], [0.40117597579956055, 0.5964963436126709], [0.4011794328689575, 0.5932092666625977], [0.4010816812515259, 0.5899636149406433], [0.4010177254676819, 0.5866718292236328], [0.4008442163467407, 0.5834212303161621], [0.40070557594299316, 0.5802134275436401], [0.40047526359558105, 0.5770248174667358], [0.4002866744995117, 0.5739181637763977], [0.39999574422836304, 0.5708138942718506], [0.3997533321380615, 0.5678157806396484], [0.39943230152130127, 0.5648453235626221], [0.3991142511367798, 0.56203293800354], [0.3987383246421814, 0.5591968297958374], [0.39838963747024536, 0.5565049648284912], [0.39800190925598145, 0.5537793040275574], [0.39762938022613525, 0.551121711730957], [0.3972373604774475, 0.5484075546264648], [0.39685171842575073, 0.5457104444503784], [0.3964047431945801, 0.5429666638374329], [0.3959704637527466, 0.5402642488479614], [0.39540207386016846, 0.5374956130981445], [0.3948591351509094, 0.5349175930023193], [0.3942995071411133, 0.5323804616928101], [0.39379048347473145, 0.5298924446105957], [0.3932309150695801, 0.527395486831665], [0.3927231431007385, 0.5248718857765198], [0.3921862840652466, 0.5223333239555359], [0.39170539379119873, 0.5197575092315674], [0.39116567373275757, 0.5171212553977966], [0.39068490266799927, 0.5143988132476807], [0.39010000228881836, 0.5115768909454346], [0.389568567276001, 0.5086978077888489], [0.3889188766479492, 0.505675733089447], [0.3883277177810669, 0.5026512742042542], [0.387604296207428, 0.4994674324989319], [0.3869324326515198, 0.4963204860687256], [0.38607150316238403, 0.49298542737960815], [0.385248601436615, 0.48961585760116577], [0.38426631689071655, 0.48607972264289856], [0.3833429217338562, 0.4826517105102539], [0.3822195529937744, 0.4790291488170624], [0.3811395764350891, 0.4754410982131958], [0.37985605001449585, 0.47167426347732544], [0.37862658500671387, 0.46798327565193176], [0.3771805763244629, 0.464105486869812], [0.36845463514328003, 0.8349639177322388], [0.36889636516571045, 0.832472026348114], [0.36944061517715454, 0.8295959830284119], [0.37027615308761597, 0.8259246945381165], [0.3711931109428406, 0.8219318389892578], [0.371920108795166, 0.8185856342315674], [0.3728101849555969, 0.81493079662323], [0.3735769987106323, 0.8117859959602356], [0.37451881170272827, 0.8083274364471436], [0.37535619735717773, 0.8052650690078735], [0.37638771533966064, 0.8018835783004761], [0.3773895502090454, 0.798757791519165], [0.3787250518798828, 0.7952890396118164], [0.37950730323791504, 0.7929872274398804], [0.38047629594802856, 0.7902431488037109], [0.3810311555862427, 0.7881088256835938], [0.381502628326416, 0.7853833436965942], [0.3817218542098999, 0.7835677862167358], [0.3820772171020508, 0.7806448936462402], [0.3824840784072876, 0.7781322002410889], [0.38297951221466064, 0.7753894329071045], [0.38338249921798706, 0.7729894518852234], [0.3838598132133484, 0.7703533172607422], [0.38425683975219727, 0.7680476307868958], [0.3847121596336365, 0.7654694318771362], [0.3850690722465515, 0.7632793188095093], [0.3854835033416748, 0.7607811093330383], [0.3857879042625427, 0.7587423324584961], [0.38615119457244873, 0.7563791275024414], [0.38643527030944824, 0.7543807029724121], [0.38680750131607056, 0.7520325183868408], [0.38692665100097656, 0.7514806985855103], [0.38729846477508545, 0.7499598860740662], [0.3877522945404053, 0.7472931146621704], [0.388282835483551, 0.7442792654037476], [0.38868236541748047, 0.7415407299995422], [0.38916540145874023, 0.7384346127510071], [0.3895053267478943, 0.7356090545654297], [0.3898662328720093, 0.7324919104576111], [0.39008450508117676, 0.7295749187469482], [0.39043527841567993, 0.7264281511306763], [0.3907259702682495, 0.7237669229507446], [0.3911669850349426, 0.7208024263381958], [0.39154356718063354, 0.7179051637649536], [0.3920168876647949, 0.7147819995880127], [0.39239251613616943, 0.7119385004043579], [0.3928648829460144, 0.7088847756385803], [0.3932316303253174, 0.7060906291007996], [0.39370042085647583, 0.7030831575393677], [0.39406925439834595, 0.7003076076507568], [0.39452093839645386, 0.6973425149917603], [0.3948700428009033, 0.694557249546051], [0.395316481590271, 0.6915757656097412], [0.3956683278083801, 0.6888062357902527], [0.39611172676086426, 0.6858554482460022], [0.3964628577232361, 0.6830770969390869], [0.3969048261642456, 0.6801443099975586], [0.39725613594055176, 0.6773823499679565], [0.39769041538238525, 0.6744744777679443], [0.3980250358581543, 0.6717193722724915], [0.398436963558197, 0.6688359975814819], [0.3987692594528198, 0.6660823822021484], [0.399178147315979, 0.6632086038589478], [0.3995012044906616, 0.6604758501052856], [0.3998897075653076, 0.657609224319458], [0.40019989013671875, 0.6548520922660828], [0.4005606174468994, 0.651962399482727], [0.4008325934410095, 0.6492152214050293], [0.40116965770721436, 0.6463501453399658], [0.40145355463027954, 0.6435515880584717], [0.4018195867538452, 0.6406499147415161], [0.4021306037902832, 0.637826144695282], [0.40250730514526367, 0.6348618865013123], [0.40280282497406006, 0.6320087909698486], [0.4031592011451721, 0.6289913058280945], [0.4034518003463745, 0.6259925365447998], [0.4038088321685791, 0.6228391528129578], [0.40407299995422363, 0.6197347640991211], [0.404388427734375, 0.6164792776107788], [0.40457409620285034, 0.6132007241249084], [0.40479159355163574, 0.6098589897155762], [0.4049035310745239, 0.6065779328346252], [0.405046284198761, 0.6032043695449829], [0.4050877094268799, 0.599863588809967], [0.4051649570465088, 0.596473753452301], [0.4051232933998108, 0.5931720733642578], [0.4051052927970886, 0.5898301601409912], [0.40498197078704834, 0.5865233540534973], [0.40489447116851807, 0.5831824541091919], [0.4047224521636963, 0.5799681544303894], [0.404593825340271, 0.5767152309417725], [0.4043811559677124, 0.5735774040222168], [0.4042094945907593, 0.5704058408737183], [0.40394866466522217, 0.5673614740371704], [0.40367043018341064, 0.5642787218093872], [0.40331321954727173, 0.5613741278648376], [0.4029250144958496, 0.5584875345230103], [0.4025436043739319, 0.5558167099952698], [0.4021427631378174, 0.5530210733413696], [0.4018038511276245, 0.55048006772995], [0.4014641046524048, 0.5476837158203125], [0.40117043256759644, 0.5450891256332397], [0.40084290504455566, 0.5421240329742432], [0.40028220415115356, 0.539211630821228], [0.39964306354522705, 0.5363175272941589], [0.3990270495414734, 0.5337649583816528], [0.39844948053359985, 0.5312447547912598], [0.3978433609008789, 0.5287683010101318], [0.3972816467285156, 0.5262871980667114], [0.39671069383621216, 0.5238358378410339], [0.3962135910987854, 0.5213512182235718], [0.39567315578460693, 0.5187855958938599], [0.39521145820617676, 0.5162129402160645], [0.3946360945701599, 0.513427734375], [0.3941338062286377, 0.5106532573699951], [0.3935176730155945, 0.5076954364776611], [0.3929741382598877, 0.5047317743301392], [0.3922985792160034, 0.5015780925750732], [0.39168059825897217, 0.49845027923583984], [0.3908843994140625, 0.49511075019836426], [0.3901350498199463, 0.4918157160282135], [0.389201819896698, 0.48828476667404175], [0.3883333206176758, 0.4848196506500244], [0.3872741460800171, 0.481167197227478], [0.38626229763031006, 0.4776144027709961], [0.385028600692749, 0.4738121032714844], [0.38385331630706787, 0.4701225161552429], [0.38246726989746094, 0.46622434258461], [0.3811483383178711, 0.46242547035217285], [0.37169718742370605, 0.8358815908432007], [0.3722461462020874, 0.8330575227737427], [0.3727583885192871, 0.830659806728363], [0.37354183197021484, 0.8265631198883057], [0.3742627501487732, 0.8229498267173767], [0.37510430812835693, 0.8192825317382812], [0.37584394216537476, 0.8160185813903809], [0.37672513723373413, 0.8125993609428406], [0.3775259852409363, 0.8095157146453857], [0.3784920573234558, 0.806206226348877], [0.37938570976257324, 0.8032025098800659], [0.38049232959747314, 0.7998871803283691], [0.3815842270851135, 0.7968875169754028], [0.38268667459487915, 0.7941014766693115], [0.38343507051467896, 0.7915472984313965], [0.38416582345962524, 0.7887580394744873], [0.38464051485061646, 0.7860755920410156], [0.38488632440567017, 0.783757746219635], [0.38518035411834717, 0.7813973426818848], [0.3856412172317505, 0.7786484956741333], [0.38603901863098145, 0.7761682271957397], [0.3865147829055786, 0.7735353708267212], [0.38694220781326294, 0.7711882591247559], [0.38742387294769287, 0.7685834765434265], [0.3878258466720581, 0.7662820219993591], [0.3882709741592407, 0.763791561126709], [0.38865089416503906, 0.7615870237350464], [0.389082133769989, 0.7592620849609375], [0.3894505500793457, 0.7572023272514343], [0.38991016149520874, 0.7550203800201416], [0.3903762102127075, 0.7531664371490479], [0.3905666470527649, 0.7520840167999268], [0.3907053470611572, 0.7508975267410278], [0.3912789225578308, 0.7478904724121094], [0.3917403817176819, 0.7451522350311279], [0.39227819442749023, 0.7420466542243958], [0.39268577098846436, 0.739234209060669], [0.39317435026168823, 0.7360051870346069], [0.39344465732574463, 0.7330049276351929], [0.3937883973121643, 0.7298887968063354], [0.3941088914871216, 0.7271430492401123], [0.3945378065109253, 0.7242286801338196], [0.3948320150375366, 0.7215096950531006], [0.39523839950561523, 0.7183732986450195], [0.395577073097229, 0.7155048847198486], [0.3960179090499878, 0.7124329805374146], [0.3963724374771118, 0.7096085548400879], [0.3968272805213928, 0.7065983414649963], [0.39719462394714355, 0.7038215398788452], [0.39766067266464233, 0.7008225321769714], [0.39801567792892456, 0.6980350613594055], [0.39846062660217285, 0.6950616836547852], [0.39882421493530273, 0.6922869682312012], [0.3992825746536255, 0.6893267631530762], [0.39964091777801514, 0.686553955078125], [0.40009450912475586, 0.6836100816726685], [0.40045833587646484, 0.680852472782135], [0.40091317892074585, 0.6779255867004395], [0.40127211809158325, 0.6751662492752075], [0.40171825885772705, 0.6722524166107178], [0.4020705223083496, 0.6695168614387512], [0.40250247716903687, 0.6666215658187866], [0.4028513431549072, 0.6638891696929932], [0.40327513217926025, 0.6610070466995239], [0.40361517667770386, 0.6582682132720947], [0.4040273427963257, 0.6553746461868286], [0.4043717384338379, 0.6526006460189819], [0.4047720432281494, 0.6497064828872681], [0.4050830602645874, 0.6469666957855225], [0.4054669737815857, 0.6440783739089966], [0.40578538179397583, 0.641295313835144], [0.4061639904975891, 0.6383579969406128], [0.40649205446243286, 0.6355165839195251], [0.40688228607177734, 0.632509708404541], [0.4071897864341736, 0.6295962929725647], [0.40755695104599, 0.6264616250991821], [0.40784841775894165, 0.6234071850776672], [0.4082026481628418, 0.6201379299163818], [0.4084290862083435, 0.6169075965881348], [0.4087238311767578, 0.6134943962097168], [0.4089053273200989, 0.6101642847061157], [0.4091002941131592, 0.6067392826080322], [0.40917885303497314, 0.6033735275268555], [0.4092937111854553, 0.5999315977096558], [0.40930747985839844, 0.5965365767478943], [0.4093473553657532, 0.5931093692779541], [0.4092698097229004, 0.5897436738014221], [0.4092259407043457, 0.5863553285598755], [0.4091038703918457, 0.5830204486846924], [0.4090272784233093, 0.5797344446182251], [0.4088669419288635, 0.5764772891998291], [0.4087538719177246, 0.5732743144035339], [0.4085543155670166, 0.5700807571411133], [0.4083937406539917, 0.5669524669647217], [0.4081430435180664, 0.5637601017951965], [0.40787720680236816, 0.5607292652130127], [0.4075314998626709, 0.557725191116333], [0.4071964621543884, 0.5550597906112671], [0.4068877100944519, 0.5523017644882202], [0.4065355658531189, 0.5496575832366943], [0.40621668100357056, 0.5470755100250244], [0.40585052967071533, 0.5442891716957092], [0.4052547216415405, 0.5412250757217407], [0.4046832323074341, 0.5380910634994507], [0.40387940406799316, 0.5349223017692566], [0.4031869173049927, 0.5324726700782776], [0.40252846479415894, 0.5300197601318359], [0.40192174911499023, 0.527547299861908], [0.40131813287734985, 0.5251317024230957], [0.40078479051589966, 0.5227325558662415], [0.4002411365509033, 0.520331859588623], [0.3997535705566406, 0.5178045630455017], [0.3991941213607788, 0.5152308940887451], [0.3986949920654297, 0.5124952793121338], [0.3981117606163025, 0.5096697807312012], [0.39758872985839844, 0.5067669153213501], [0.39697539806365967, 0.5037461519241333], [0.39642250537872314, 0.5006214380264282], [0.3957056999206543, 0.49732834100723267], [0.395033061504364, 0.49402445554733276], [0.39418280124664307, 0.49053052067756653], [0.39337068796157837, 0.4870665967464447], [0.3923773169517517, 0.4834059476852417], [0.39142709970474243, 0.4798045754432678], [0.3902733325958252, 0.47602379322052], [0.38916015625, 0.47229892015457153], [0.38785213232040405, 0.4683809280395508], [0.3866010308265686, 0.4645651578903198], [0.3851618766784668, 0.46055418252944946], [0.37509095668792725, 0.8363593816757202], [0.37546056509017944, 0.8339875936508179], [0.3759083151817322, 0.8312009572982788], [0.37665122747421265, 0.827531099319458], [0.3774949908256531, 0.8235902786254883], [0.378173828125, 0.8203209638595581], [0.3790243864059448, 0.8167670965194702], [0.37975555658340454, 0.8137472867965698], [0.3806813359260559, 0.8104032278060913], [0.3815077543258667, 0.8074892163276672], [0.3825349807739258, 0.8042405247688293], [0.3834938406944275, 0.8013612031936646], [0.38469982147216797, 0.7982215881347656], [0.38565659523010254, 0.7955973148345947], [0.3867366313934326, 0.7925096154212952], [0.38743793964385986, 0.7899566292762756], [0.38815951347351074, 0.7864649295806885], [0.38829106092453003, 0.7840477228164673], [0.38843536376953125, 0.7818865776062012], [0.38880008459091187, 0.7793874740600586], [0.3892591595649719, 0.7767360806465149], [0.38968145847320557, 0.7743697166442871], [0.39014434814453125, 0.771763801574707], [0.39056503772735596, 0.7694212794303894], [0.39103031158447266, 0.7668237686157227], [0.39142030477523804, 0.7646369338035583], [0.39189237356185913, 0.7621675133705139], [0.39227575063705444, 0.7601655721664429], [0.39275991916656494, 0.7578438520431519], [0.39312565326690674, 0.7560166120529175], [0.3935307264328003, 0.7539077997207642], [0.3937545418739319, 0.7531021237373352], [0.39421385526657104, 0.75153648853302], [0.3947201371192932, 0.748868465423584], [0.3953273296356201, 0.7457348108291626], [0.39576590061187744, 0.7428736686706543], [0.3962618112564087, 0.739716649055481], [0.39661550521850586, 0.736664891242981], [0.3970705270767212, 0.7333210706710815], [0.39741992950439453, 0.7306432127952576], [0.3978954553604126, 0.7276383638381958], [0.39819586277008057, 0.7248983383178711], [0.39854246377944946, 0.7218449115753174], [0.39881086349487305, 0.7190337181091309], [0.3992079496383667, 0.7159345149993896], [0.3995334506034851, 0.7131338119506836], [0.39997410774230957, 0.7100887894630432], [0.40032386779785156, 0.7073177099227905], [0.4007762670516968, 0.7043373584747314], [0.4011404514312744, 0.7015359401702881], [0.4015982151031494, 0.6985359787940979], [0.40196096897125244, 0.695775032043457], [0.40242111682891846, 0.6928240656852722], [0.4027951955795288, 0.6900449395179749], [0.4032618999481201, 0.6870948076248169], [0.4036353826522827, 0.6843335032463074], [0.4041023850440979, 0.6814171671867371], [0.4044836163520813, 0.6786516904830933], [0.40495383739471436, 0.6757400035858154], [0.405336856842041, 0.672980546951294], [0.40579700469970703, 0.6700881123542786], [0.4061681032180786, 0.667329728603363], [0.40660518407821655, 0.6644580364227295], [0.4069662094116211, 0.6617029309272766], [0.4073980450630188, 0.6588339805603027], [0.40776997804641724, 0.6560866832733154], [0.408224880695343, 0.6532031893730164], [0.4086208939552307, 0.6504273414611816], [0.4090665578842163, 0.6475305557250977], [0.40940284729003906, 0.6447409391403198], [0.40978360176086426, 0.6418287754058838], [0.4101221561431885, 0.6390405893325806], [0.4105318784713745, 0.6360816955566406], [0.4108765125274658, 0.6331601142883301], [0.4112688899040222, 0.6301062107086182], [0.41159236431121826, 0.6270571947097778], [0.4119674563407898, 0.623882532119751], [0.41225147247314453, 0.6206386685371399], [0.41259270906448364, 0.6172707080841064], [0.41281020641326904, 0.6138901710510254], [0.4130679965019226, 0.6104234457015991], [0.41322124004364014, 0.6069907546043396], [0.413390576839447, 0.6034929752349854], [0.41343796253204346, 0.6000503301620483], [0.4135178327560425, 0.5965578556060791], [0.41349780559539795, 0.5931121110916138], [0.41350460052490234, 0.5896362066268921], [0.41341108083724976, 0.586249828338623], [0.4133606553077698, 0.5828632116317749], [0.4132359027862549, 0.5795682668685913], [0.4131723642349243, 0.5762885808944702], [0.4130580425262451, 0.5730834007263184], [0.41297703981399536, 0.5698071718215942], [0.4127790927886963, 0.5666095614433289], [0.4125903844833374, 0.5633680820465088], [0.4123571515083313, 0.5602285861968994], [0.41204333305358887, 0.5570508241653442], [0.4116535782814026, 0.5542688965797424], [0.4112470746040344, 0.5516524314880371], [0.41115283966064453, 0.5493580102920532], [0.41100752353668213, 0.5463781952857971], [0.41047203540802, 0.5432429909706116], [0.4098184108734131, 0.5400354862213135], [0.4089575409889221, 0.536838173866272], [0.4080439805984497, 0.5334652066230774], [0.40739160776138306, 0.5310748219490051], [0.40670859813690186, 0.5286152362823486], [0.40605175495147705, 0.526268720626831], [0.40547770261764526, 0.5239086747169495], [0.4049283266067505, 0.5216053128242493], [0.40443867444992065, 0.5192453265190125], [0.4038679599761963, 0.5167420506477356], [0.4033699035644531, 0.514196515083313], [0.4027734398841858, 0.5114890336990356], [0.40226006507873535, 0.5087094902992249], [0.40166622400283813, 0.505790114402771], [0.40116429328918457, 0.502837061882019], [0.4004906415939331, 0.49958354234695435], [0.3998875021934509, 0.49633270502090454], [0.39910411834716797, 0.49284911155700684], [0.3983718156814575, 0.48940181732177734], [0.397438108921051, 0.48572254180908203], [0.39655566215515137, 0.4821377992630005], [0.3954581022262573, 0.47830313444137573], [0.39441800117492676, 0.474595844745636], [0.3931598663330078, 0.4706418514251709], [0.39196741580963135, 0.4668276309967041], [0.39057457447052, 0.46278250217437744], [0.3892638087272644, 0.4588991701602936], [0.3786240816116333, 0.8372606039047241], [0.37907201051712036, 0.8344777822494507], [0.3794674873352051, 0.8321661949157715], [0.38008999824523926, 0.8281041383743286], [0.38070857524871826, 0.8245960474014282], [0.38149046897888184, 0.8210256695747375], [0.38218194246292114, 0.8178734183311462], [0.3830142617225647, 0.8145915269851685], [0.38378679752349854, 0.8116625547409058], [0.384723424911499, 0.8084689974784851], [0.3855755925178528, 0.8056095838546753], [0.3866000771522522, 0.802487313747406], [0.3875250816345215, 0.7996686697006226], [0.3885653614997864, 0.7966504096984863], [0.3894857168197632, 0.793769359588623], [0.3903777599334717, 0.7906537055969238], [0.3911074995994568, 0.7872961759567261], [0.3917909264564514, 0.7842689752578735], [0.3919394612312317, 0.7822661995887756], [0.3920832872390747, 0.7798632383346558], [0.3924400806427002, 0.777550458908081], [0.3928893208503723, 0.7749300003051758], [0.39331430196762085, 0.7726117968559265], [0.39377158880233765, 0.7699520587921143], [0.3941664695739746, 0.7676529288291931], [0.3946061134338379, 0.7651957273483276], [0.39500200748443604, 0.763055682182312], [0.3954678773880005, 0.7608270049095154], [0.3959125876426697, 0.7588809728622437], [0.39646899700164795, 0.7568790912628174], [0.3970106840133667, 0.7551848888397217], [0.3972730040550232, 0.7538942098617554], [0.39748460054397583, 0.7526528239250183], [0.39824140071868896, 0.7495497465133667], [0.39875102043151855, 0.7466107606887817], [0.3992970585823059, 0.743371307849884], [0.39967936277389526, 0.7404359579086304], [0.4001721739768982, 0.737065315246582], [0.4005711078643799, 0.7340837121009827], [0.40102511644363403, 0.7311170101165771], [0.4013403058052063, 0.7283275127410889], [0.4017447829246521, 0.7252421379089355], [0.4020400047302246, 0.7224419116973877], [0.4024285674095154, 0.7194128036499023], [0.40274578332901, 0.7166074514389038], [0.4031718969345093, 0.7135963439941406], [0.40352094173431396, 0.7108092904090881], [0.4039766788482666, 0.7078306674957275], [0.4043402671813965, 0.7050735950469971], [0.40481340885162354, 0.7020789384841919], [0.40520405769348145, 0.6992951035499573], [0.4056903123855591, 0.6963498592376709], [0.40608370304107666, 0.6935991644859314], [0.40657609701156616, 0.6906473636627197], [0.406980037689209, 0.6878849864006042], [0.40747928619384766, 0.6849640607833862], [0.4078860282897949, 0.6822230815887451], [0.40838682651519775, 0.6793041229248047], [0.4087980389595032, 0.6765596270561218], [0.40929698944091797, 0.6736444234848022], [0.40969860553741455, 0.6708908081054688], [0.4101797342300415, 0.6679842472076416], [0.4105721712112427, 0.6652442812919617], [0.41103702783584595, 0.662360429763794], [0.41143155097961426, 0.6596396565437317], [0.4119006395339966, 0.6567822694778442], [0.41231322288513184, 0.6540595889091492], [0.4127991795539856, 0.6511424779891968], [0.41320961713790894, 0.6483426094055176], [0.4136805534362793, 0.6453689336776733], [0.41404616832733154, 0.6425625085830688], [0.41442424058914185, 0.639625072479248], [0.4147331118583679, 0.6367787718772888], [0.41511011123657227, 0.6337138414382935], [0.4154243469238281, 0.630753219127655], [0.4158083200454712, 0.6275783777236938], [0.416115403175354, 0.6244668364524841], [0.4164925813674927, 0.6210817098617554], [0.41676032543182373, 0.6177600622177124], [0.41710102558135986, 0.614237904548645], [0.4173036217689514, 0.61076420545578], [0.41751235723495483, 0.6071913242340088], [0.4176151752471924, 0.6036930680274963], [0.41774964332580566, 0.6001246571540833], [0.41777509450912476, 0.5966383218765259], [0.4178338050842285, 0.5930911302566528], [0.41779375076293945, 0.5896017551422119], [0.41780567169189453, 0.5861600041389465], [0.4177401065826416, 0.5827639102935791], [0.41773271560668945, 0.5794549584388733], [0.41767996549606323, 0.576200008392334], [0.4176684021949768, 0.572958767414093], [0.41759657859802246, 0.5696624517440796], [0.4175914525985718, 0.5663901567459106], [0.4175114631652832, 0.5630911588668823], [0.4174795150756836, 0.5599921345710754], [0.4174848794937134, 0.5566498041152954], [0.41749364137649536, 0.553728461265564], [0.4174323081970215, 0.550882875919342], [0.41686928272247314, 0.5482833385467529], [0.4160027503967285, 0.5452796220779419], [0.4151603579521179, 0.5418754816055298], [0.41422849893569946, 0.5386556386947632], [0.41341841220855713, 0.5354043245315552], [0.41257691383361816, 0.5320266485214233], [0.41189754009246826, 0.5295100808143616], [0.41101527214050293, 0.5268746614456177], [0.41029781103134155, 0.5247640609741211], [0.4096829295158386, 0.5226452350616455], [0.40912795066833496, 0.5203808546066284], [0.4085705876350403, 0.5181351900100708], [0.4080679416656494, 0.5156572461128235], [0.4075205326080322, 0.5131554007530212], [0.40704137086868286, 0.5104764103889465], [0.4064847230911255, 0.507708728313446], [0.40599095821380615, 0.5048328638076782], [0.40537261962890625, 0.5018261671066284], [0.4048043489456177, 0.4986001253128052], [0.4040759801864624, 0.4952208399772644], [0.40339982509613037, 0.4917770028114319], [0.4025263786315918, 0.48813164234161377], [0.4017016887664795, 0.48451417684555054], [0.4006737470626831, 0.48069727420806885], [0.39968955516815186, 0.47694242000579834], [0.3985080122947693, 0.47299498319625854], [0.39736711978912354, 0.46913063526153564], [0.396038293838501, 0.4650840163230896], [0.39476656913757324, 0.4611489772796631], [0.393305242061615, 0.45703303813934326], [0.3823138475418091, 0.8376069664955139], [0.3825554847717285, 0.8352822065353394], [0.3828451633453369, 0.8325351476669312], [0.3833787441253662, 0.8289666175842285], [0.3840794563293457, 0.8252201080322266], [0.3847042918205261, 0.8220804929733276], [0.38551366329193115, 0.8186442852020264], [0.38619643449783325, 0.8157773613929749], [0.38704735040664673, 0.81257164478302], [0.38781440258026123, 0.8097514510154724], [0.3887450098991394, 0.8066128492355347], [0.3895726203918457, 0.8038297295570374], [0.39056622982025146, 0.8007200956344604], [0.3914201259613037, 0.7979483604431152], [0.3924592137336731, 0.7947872877120972], [0.3933989405632019, 0.7918688654899597], [0.39449286460876465, 0.7880347967147827], [0.3949772119522095, 0.7850933074951172], [0.3953840732574463, 0.7823221683502197], [0.39548176527023315, 0.7802517414093018], [0.3956751823425293, 0.778163492679596], [0.3961479663848877, 0.7757911682128906], [0.39657580852508545, 0.7731332778930664], [0.3969661593437195, 0.7707569003105164], [0.39738738536834717, 0.7681680917739868], [0.3977552652359009, 0.7660549879074097], [0.39822161197662354, 0.7636913657188416], [0.3986165523529053, 0.7618346214294434], [0.39913690090179443, 0.7597386837005615], [0.3995600938796997, 0.7580775618553162], [0.4000440835952759, 0.7561217546463013], [0.4004167318344116, 0.7551168203353882], [0.40098029375076294, 0.7536149024963379], [0.40165644884109497, 0.7505509853363037], [0.40229058265686035, 0.7471431493759155], [0.4027038812637329, 0.744133710861206], [0.4031965136528015, 0.7408671379089355], [0.40359610319137573, 0.7378052473068237], [0.40409326553344727, 0.7345693111419678], [0.4044235944747925, 0.7317754030227661], [0.40482616424560547, 0.7287001609802246], [0.40514087677001953, 0.7258885502815247], [0.40556544065475464, 0.7228319644927979], [0.405889093875885, 0.7200687527656555], [0.4063182473182678, 0.7170544266700745], [0.40666258335113525, 0.714296817779541], [0.4071175456047058, 0.7113181352615356], [0.4074878692626953, 0.7085698843002319], [0.40796422958374023, 0.7056182622909546], [0.40836453437805176, 0.7028625011444092], [0.40887731313705444, 0.699901282787323], [0.4093015789985657, 0.6971693634986877], [0.40982693433761597, 0.6942310929298401], [0.4102588891983032, 0.6914877891540527], [0.41079235076904297, 0.6885522603988647], [0.4112277626991272, 0.685825765132904], [0.41176193952560425, 0.6829158067703247], [0.41220593452453613, 0.6801820993423462], [0.41273975372314453, 0.6772684454917908], [0.4131803512573242, 0.6745178699493408], [0.4137037992477417, 0.6715869903564453], [0.41412097215652466, 0.6688247919082642], [0.41460752487182617, 0.6659274101257324], [0.4150158762931824, 0.6631984114646912], [0.41549330949783325, 0.660345196723938], [0.41591542959213257, 0.6576628684997559], [0.41641879081726074, 0.6548154354095459], [0.41688311100006104, 0.6520670652389526], [0.41743022203445435, 0.6491085290908813], [0.41788482666015625, 0.6462323665618896], [0.41833895444869995, 0.6431479454040527], [0.4186432361602783, 0.6402541399002075], [0.41895055770874023, 0.6372445821762085], [0.4192126989364624, 0.6343262195587158], [0.41956281661987305, 0.6312606334686279], [0.41989630460739136, 0.6282298564910889], [0.4203195571899414, 0.6249809265136719], [0.4206700325012207, 0.621679425239563], [0.42107832431793213, 0.6181964874267578], [0.42132556438446045, 0.6146706342697144], [0.4215954542160034, 0.6110272407531738], [0.4217529296875, 0.6074597835540771], [0.42192959785461426, 0.603837251663208], [0.4219890236854553, 0.6002603769302368], [0.42207860946655273, 0.5966759920120239], [0.4220700263977051, 0.593122124671936], [0.4221189618110657, 0.5895863175392151], [0.4221000671386719, 0.5861595869064331], [0.4221500754356384, 0.5827450752258301], [0.4221438765525818, 0.5794479846954346], [0.42218220233917236, 0.5761876702308655], [0.42215847969055176, 0.572907030582428], [0.4222012162208557, 0.569673478603363], [0.4222254753112793, 0.566405177116394], [0.42230820655822754, 0.5630863308906555], [0.42230814695358276, 0.5599296689033508], [0.42239856719970703, 0.5569100379943848], [0.42271167039871216, 0.5542051792144775], [0.4230390787124634, 0.5509149432182312], [0.42220765352249146, 0.5456725358963013], [0.42099952697753906, 0.5431102514266968], [0.4197743535041809, 0.5401533842086792], [0.4187849164009094, 0.5369963645935059], [0.4177646040916443, 0.5339004397392273], [0.4167899489402771, 0.5306536555290222], [0.41596436500549316, 0.5279567241668701], [0.4150276780128479, 0.5251332521438599], [0.4145188331604004, 0.5233478546142578], [0.41394078731536865, 0.5211760401725769], [0.41341841220855713, 0.5191845893859863], [0.41292524337768555, 0.5168868899345398], [0.41240066289901733, 0.5145970582962036], [0.41195106506347656, 0.5120885372161865], [0.4114140272140503, 0.5094659328460693], [0.4109576940536499, 0.5067317485809326], [0.4103624224662781, 0.5037798881530762], [0.4098471403121948, 0.5008252859115601], [0.4091032147407532, 0.497464120388031], [0.40845149755477905, 0.49416738748550415], [0.40757930278778076, 0.4905405044555664], [0.40679556131362915, 0.48698967695236206], [0.40582120418548584, 0.48317015171051025], [0.4049086570739746, 0.4794206917285919], [0.4037916660308838, 0.47544240951538086], [0.40272462368011475, 0.4715713858604431], [0.4014398455619812, 0.46746259927749634], [0.40022122859954834, 0.46353042125701904], [0.39879167079925537, 0.4593525528907776], [0.39745092391967773, 0.4553719758987427], [0.38630402088165283, 0.8383669853210449], [0.3865807056427002, 0.8355162739753723], [0.38678646087646484, 0.8332507610321045], [0.38717055320739746, 0.829447865486145], [0.3876262903213501, 0.8261951208114624], [0.38826608657836914, 0.8227988481521606], [0.3888912796974182, 0.8198068737983704], [0.3896569013595581, 0.8166261911392212], [0.3903411030769348, 0.8138242959976196], [0.39117157459259033, 0.8106951713562012], [0.39191925525665283, 0.8079303503036499], [0.39281147718429565, 0.8048096895217896], [0.3935847282409668, 0.8020156025886536], [0.39447200298309326, 0.7988505363464355], [0.39521682262420654, 0.7959511280059814], [0.3960595726966858, 0.7926331758499146], [0.39697206020355225, 0.7889394164085388], [0.3978346586227417, 0.785527229309082], [0.39805400371551514, 0.7826423645019531], [0.39844274520874023, 0.7808526754379272], [0.3989896774291992, 0.7791367173194885], [0.39947837591171265, 0.7763515710830688], [0.39988017082214355, 0.7738869786262512], [0.4002305269241333, 0.7711963653564453], [0.4005674123764038, 0.7689616680145264], [0.40095120668411255, 0.7665755748748779], [0.40128564834594727, 0.7645554542541504], [0.4016897678375244, 0.7625238299369812], [0.40208518505096436, 0.7608131170272827], [0.40260642766952515, 0.7589936256408691], [0.403145968914032, 0.7575167417526245], [0.40351206064224243, 0.7561534643173218], [0.40383708477020264, 0.754806399345398], [0.4048747420310974, 0.7512457966804504], [0.4055091142654419, 0.74793541431427], [0.40607714653015137, 0.7445781826972961], [0.40646088123321533, 0.7415441274642944], [0.4069399833679199, 0.738247275352478], [0.407318651676178, 0.735247015953064], [0.4077727794647217, 0.7321681976318359], [0.40812617540359497, 0.7293471097946167], [0.40857523679733276, 0.7263147234916687], [0.4089295268058777, 0.723504900932312], [0.40937936305999756, 0.7205217480659485], [0.4097396731376648, 0.7177505493164062], [0.4101996421813965, 0.7147988677024841], [0.4105808734893799, 0.7120649814605713], [0.41107064485549927, 0.7091264724731445], [0.4114740490913391, 0.7064014673233032], [0.41198408603668213, 0.7034785747528076], [0.41242289543151855, 0.7007461786270142], [0.4129636287689209, 0.6978316307067871], [0.41342079639434814, 0.6951048970222473], [0.4139823913574219, 0.6921850442886353], [0.41445088386535645, 0.6894452571868896], [0.4150170683860779, 0.6865575909614563], [0.4154977798461914, 0.6838420629501343], [0.4160805940628052, 0.680956244468689], [0.4165804982185364, 0.6782231330871582], [0.4171733856201172, 0.6753140687942505], [0.41767561435699463, 0.6725420355796814], [0.41826331615448, 0.669622004032135], [0.4187423586845398, 0.6668586730957031], [0.4192727208137512, 0.6639902591705322], [0.4197271466255188, 0.661300539970398], [0.42023879289627075, 0.6584901809692383], [0.420701265335083, 0.65583336353302], [0.42123210430145264, 0.6529088020324707], [0.42169153690338135, 0.6500988006591797], [0.42221903800964355, 0.6469665765762329], [0.42262476682662964, 0.6438838243484497], [0.42302626371383667, 0.6407625675201416], [0.42333441972732544, 0.6378490328788757], [0.42368263006210327, 0.6348415613174438], [0.4239993095397949, 0.6319654583930969], [0.42438220977783203, 0.6287915110588074], [0.4246867299079895, 0.625645637512207], [0.4250587224960327, 0.6221610307693481], [0.42529237270355225, 0.6186796426773071], [0.4256013035774231, 0.6149904727935791], [0.42580175399780273, 0.6113767623901367], [0.4260287880897522, 0.6076605319976807], [0.42613422870635986, 0.6040365099906921], [0.4262767434120178, 0.6003538370132446], [0.4263080954551697, 0.5967546701431274], [0.4263840913772583, 0.5931469202041626], [0.4263920783996582, 0.5896432399749756], [0.4264637231826782, 0.5861876606941223], [0.426472544670105, 0.5828047394752502], [0.4265558123588562, 0.5795193910598755], [0.4266003966331482, 0.5762495994567871], [0.4267103672027588, 0.5729967951774597], [0.4267982840538025, 0.5698082447052002], [0.4269702434539795, 0.5666269063949585], [0.4271438717842102, 0.5633136630058289], [0.42732417583465576, 0.5600405931472778], [0.4273167848587036, 0.5569598078727722], [0.42691802978515625, 0.5540565252304077], [0.42616772651672363, 0.5505883693695068], [0.42537105083465576, 0.5448470115661621], [0.42501211166381836, 0.5414087176322937], [0.42426764965057373, 0.5384300351142883], [0.42336326837539673, 0.5353434681892395], [0.4225056767463684, 0.5321462154388428], [0.4214431047439575, 0.528830885887146], [0.4205629825592041, 0.5261709690093994], [0.41984260082244873, 0.5234686732292175], [0.4192596673965454, 0.5215600728988647], [0.418670117855072, 0.5196361541748047], [0.4181128740310669, 0.517619252204895], [0.41754937171936035, 0.5156152248382568], [0.41705071926116943, 0.5133302211761475], [0.41649889945983887, 0.5110020637512207], [0.4160400629043579, 0.5084093809127808], [0.4154505729675293, 0.5056570172309875], [0.4149240255355835, 0.5027377605438232], [0.4142106771469116, 0.4996493458747864], [0.4135565757751465, 0.49637091159820557], [0.41272950172424316, 0.4929179549217224], [0.4119666814804077, 0.4893747866153717], [0.4110453724861145, 0.4856683313846588], [0.410178005695343, 0.4819300174713135], [0.409115195274353, 0.4779716730117798], [0.4080994725227356, 0.4740716516971588], [0.4068554639816284, 0.46995270252227783], [0.4056687355041504, 0.4659613370895386], [0.40429043769836426, 0.4617767632007599], [0.4029783606529236, 0.4577370285987854], [0.40150660276412964, 0.4535188376903534], [0.39034390449523926, 0.8384695649147034], [0.39046376943588257, 0.836121678352356], [0.3906117081642151, 0.8333359956741333], [0.39085841178894043, 0.830176591873169], [0.39132845401763916, 0.8267227411270142], [0.3918076157569885, 0.8238332271575928], [0.39246314764022827, 0.8205612897872925], [0.39305585622787476, 0.8178129196166992], [0.3938025236129761, 0.8146688938140869], [0.39446383714675903, 0.8119488954544067], [0.3952714800834656, 0.8088428974151611], [0.3959841728210449, 0.8060523271560669], [0.39680224657058716, 0.8028718829154968], [0.39747321605682373, 0.7999148368835449], [0.3981499671936035, 0.7965710759162903], [0.39859330654144287, 0.7933000326156616], [0.3992772698402405, 0.7895466685295105], [0.4001005291938782, 0.786347508430481], [0.4014025926589966, 0.7835949659347534], [0.4017524719238281, 0.7814533710479736], [0.4018581509590149, 0.7799699306488037], [0.4025983214378357, 0.7770951986312866], [0.40297502279281616, 0.7742821574211121], [0.4034118056297302, 0.7720670104026794], [0.40387195348739624, 0.7693691253662109], [0.4041255712509155, 0.7672811150550842], [0.4044666886329651, 0.7651110887527466], [0.404768705368042, 0.7634047865867615], [0.4051404595375061, 0.7615655660629272], [0.4054826498031616, 0.7601557970046997], [0.40588700771331787, 0.7584127187728882], [0.4062504768371582, 0.7575129270553589], [0.40697962045669556, 0.7559213638305664], [0.4079933762550354, 0.7523288726806641], [0.40882623195648193, 0.748451828956604], [0.40926581621170044, 0.7452824711799622], [0.4097564220428467, 0.7419238090515137], [0.4101144075393677, 0.738906741142273], [0.4105914831161499, 0.7356699109077454], [0.4109645485877991, 0.7328481674194336], [0.41144973039627075, 0.7298036217689514], [0.41183966398239136, 0.7270168662071228], [0.41233229637145996, 0.7239810228347778], [0.41271477937698364, 0.7212392687797546], [0.41320228576660156, 0.7182563543319702], [0.413593590259552, 0.7155500650405884], [0.41409265995025635, 0.7126287221908569], [0.4145117402076721, 0.7099301218986511], [0.415041446685791, 0.707014799118042], [0.41548556089401245, 0.7043343782424927], [0.41604191064834595, 0.7014281749725342], [0.41651612520217896, 0.698733925819397], [0.4170985221862793, 0.6958386898040771], [0.41759467124938965, 0.693122148513794], [0.41819870471954346, 0.6902197599411011], [0.41870391368865967, 0.6875284910202026], [0.41931164264678955, 0.6846602559089661], [0.41983091831207275, 0.6819628477096558], [0.42046642303466797, 0.6791209578514099], [0.42105746269226074, 0.6764308214187622], [0.4217700958251953, 0.6734986305236816], [0.4223414659500122, 0.670684814453125], [0.4229525327682495, 0.6677314639091492], [0.42343854904174805, 0.6649616956710815], [0.42394983768463135, 0.6621328592300415], [0.42440932989120483, 0.659477710723877], [0.42492955923080444, 0.6567164063453674], [0.4254700541496277, 0.6539945006370544], [0.4260849356651306, 0.6509466171264648], [0.4265742301940918, 0.6478668451309204], [0.4270756244659424, 0.6445040702819824], [0.42739325761795044, 0.6413812637329102], [0.4277087450027466, 0.638360321521759], [0.4279807209968567, 0.6354366540908813], [0.4282923936843872, 0.6325169801712036], [0.4286426305770874, 0.6294697523117065], [0.4290553331375122, 0.6261371374130249], [0.4293561577796936, 0.622698187828064], [0.4296833276748657, 0.6189709901809692], [0.4298171401023865, 0.6153297424316406], [0.4300292134284973, 0.6116252541542053], [0.4301750063896179, 0.6079226732254028], [0.4303629398345947, 0.6041847467422485], [0.4304426312446594, 0.6005104780197144], [0.43056297302246094, 0.5968138575553894], [0.43058741092681885, 0.593226969242096], [0.4306652545928955, 0.5896929502487183], [0.4306696653366089, 0.5862460136413574], [0.4307432174682617, 0.582876443862915], [0.4307628870010376, 0.5796051025390625], [0.4308738112449646, 0.5764217376708984], [0.43102651834487915, 0.5732535719871521], [0.4312567710876465, 0.570098340511322], [0.4314500689506531, 0.5669646263122559], [0.4317147135734558, 0.5637266039848328], [0.43207281827926636, 0.5605913400650024], [0.43228763341903687, 0.556713342666626], [0.43182373046875, 0.5527510046958923], [0.43099892139434814, 0.5492154359817505], [0.4302749037742615, 0.5444694757461548], [0.4298185110092163, 0.5404903888702393], [0.4291837811470032, 0.5368112921714783], [0.4283711910247803, 0.5336309671401978], [0.42717230319976807, 0.5302020311355591], [0.4259014129638672, 0.5268649458885193], [0.4251708984375, 0.5245447158813477], [0.4243810772895813, 0.521918773651123], [0.4238940477371216, 0.5199781060218811], [0.42336010932922363, 0.5179600715637207], [0.4228094816207886, 0.5160460472106934], [0.4222724437713623, 0.5140314102172852], [0.4217526912689209, 0.51209557056427], [0.42130112648010254, 0.5097435116767883], [0.4207066297531128, 0.5072506666183472], [0.42017364501953125, 0.5044811964035034], [0.4194566011428833, 0.501517653465271], [0.41881680488586426, 0.49842292070388794], [0.4179607629776001, 0.49508053064346313], [0.4172009229660034, 0.49168992042541504], [0.41627079248428345, 0.48806387186050415], [0.4154406785964966, 0.48443880677223206], [0.41440171003341675, 0.48052701354026794], [0.41342777013778687, 0.4766579270362854], [0.4122242331504822, 0.47252851724624634], [0.4110645651817322, 0.46849334239959717], [0.409717321395874, 0.4642917215824127], [0.4084380865097046, 0.46021848917007446], [0.40699535608291626, 0.45596814155578613], [0.40564483404159546, 0.4518930912017822], [0.39467060565948486, 0.8390508890151978], [0.39479124546051025, 0.8360286951065063], [0.39479881525039673, 0.8337097764015198], [0.3949586749076843, 0.8304550647735596], [0.39525163173675537, 0.8276064991950989], [0.39570295810699463, 0.8244329690933228], [0.3961818814277649, 0.82170569896698], [0.39681148529052734, 0.8185912370681763], [0.3973851203918457, 0.8159114122390747], [0.3981051445007324, 0.81281977891922], [0.39874207973480225, 0.8101285696029663], [0.3995342254638672, 0.8069379329681396], [0.40021443367004395, 0.804099440574646], [0.40104418992996216, 0.8007071614265442], [0.40169429779052734, 0.7975599765777588], [0.40247631072998047, 0.7938820719718933], [0.4031063914299011, 0.7907931804656982], [0.40407776832580566, 0.7880169749259949], [0.40489304065704346, 0.785571813583374], [0.40621668100357056, 0.7818710207939148], [0.40635740756988525, 0.7805055379867554], [0.40612781047821045, 0.7774431705474854], [0.40655142068862915, 0.7754645943641663], [0.40702134370803833, 0.7724556922912598], [0.40717822313308716, 0.7699520587921143], [0.4074622392654419, 0.7677130699157715], [0.4077163338661194, 0.7658621668815613], [0.408048152923584, 0.7641342878341675], [0.4084181785583496, 0.7626489400863647], [0.40881800651550293, 0.7611197233200073], [0.40923595428466797, 0.7601368427276611], [0.4096609354019165, 0.758660078048706], [0.40994709730148315, 0.7573574781417847], [0.41119611263275146, 0.7530595064163208], [0.41183656454086304, 0.7491600513458252], [0.412372350692749, 0.7456711530685425], [0.4127730131149292, 0.7425611019134521], [0.4132883548736572, 0.7393210530281067], [0.41370153427124023, 0.7363556623458862], [0.4142056703567505, 0.7333240509033203], [0.4146202802658081, 0.730533242225647], [0.41513729095458984, 0.7275227904319763], [0.41555386781692505, 0.7247104644775391], [0.4160614013671875, 0.7217657566070557], [0.4164873957633972, 0.7190208435058594], [0.41701042652130127, 0.7161203622817993], [0.4174419045448303, 0.713426947593689], [0.4179738163948059, 0.7105549573898315], [0.4184349775314331, 0.7078707218170166], [0.41899633407592773, 0.7050122618675232], [0.4194835424423218, 0.7023383378982544], [0.4200776219367981, 0.6994696855545044], [0.4205852150917053, 0.6967832446098328], [0.4212031960487366, 0.6939084529876709], [0.42173874378204346, 0.6912102699279785], [0.42237722873687744, 0.6883595585823059], [0.42292320728302, 0.6856787204742432], [0.4235708713531494, 0.6828420162200928], [0.4241291284561157, 0.6802154779434204], [0.42480146884918213, 0.6774352788925171], [0.42546629905700684, 0.6747151613235474], [0.4263201951980591, 0.6717897057533264], [0.4270806312561035, 0.6690112352371216], [0.42790108919143677, 0.6660581827163696], [0.42852258682250977, 0.6632639169692993], [0.42909854650497437, 0.6605249047279358], [0.42962074279785156, 0.6578845977783203], [0.4301031827926636, 0.6548891067504883], [0.4305441379547119, 0.6520282030105591], [0.43110716342926025, 0.6485582590103149], [0.4314601421356201, 0.6451963186264038], [0.43190997838974, 0.6420196890830994], [0.4322846531867981, 0.6389869451522827], [0.43263864517211914, 0.6360179781913757], [0.43299221992492676, 0.6332885026931763], [0.433335542678833, 0.6299610137939453], [0.4335450530052185, 0.6266945600509644], [0.4337958097457886, 0.6229971051216125], [0.43385452032089233, 0.6192116737365723], [0.4340120553970337, 0.6154994964599609], [0.43413281440734863, 0.6119115352630615], [0.4343376159667969, 0.6081116199493408], [0.4344584345817566, 0.6044114828109741], [0.4346376657485962, 0.6006399393081665], [0.43471992015838623, 0.596960186958313], [0.4348520040512085, 0.5933060646057129], [0.4348851442337036, 0.5897723436355591], [0.4349808692932129, 0.5863236784934998], [0.43502718210220337, 0.5829836130142212], [0.43515467643737793, 0.5797423124313354], [0.4352366328239441, 0.5766066908836365], [0.4353811740875244, 0.5735857486724854], [0.43557578325271606, 0.5704821944236755], [0.4358106851577759, 0.5673047304153442], [0.43602871894836426, 0.564190149307251], [0.4361104965209961, 0.5608444213867188], [0.4360215663909912, 0.5566307902336121], [0.4359791874885559, 0.5522037744522095], [0.4356311559677124, 0.5480287671089172], [0.43509721755981445, 0.5440158843994141], [0.4346144199371338, 0.5396157503128052], [0.4339883327484131, 0.5357264876365662], [0.4332178831100464, 0.5319762229919434], [0.4323059916496277, 0.5282365679740906], [0.43147140741348267, 0.5247691869735718], [0.4306930899620056, 0.522331714630127], [0.4300042390823364, 0.5200268030166626], [0.4294276833534241, 0.5181069374084473], [0.42871397733688354, 0.5159029960632324], [0.42821550369262695, 0.5142369270324707], [0.4276692867279053, 0.512249231338501], [0.42714881896972656, 0.5103216171264648], [0.4264918565750122, 0.508231520652771], [0.4258958101272583, 0.505732536315918], [0.42509734630584717, 0.5029990077018738], [0.4244101643562317, 0.5001027584075928], [0.4235001802444458, 0.49692681431770325], [0.422707200050354, 0.49368762969970703], [0.42174649238586426, 0.4902241826057434], [0.4208962321281433, 0.4867278039455414], [0.4198717474937439, 0.4829845726490021], [0.4189246892929077, 0.4791674017906189], [0.41776055097579956, 0.47509896755218506], [0.41665470600128174, 0.47105932235717773], [0.4153209924697876, 0.46677231788635254], [0.41406822204589844, 0.46270179748535156], [0.41264045238494873, 0.45841261744499207], [0.4113096594810486, 0.45431268215179443], [0.4098138213157654, 0.45001164078712463], [0.39900439977645874, 0.8389341235160828], [0.39905035495758057, 0.8364449739456177], [0.39912158250808716, 0.8334357738494873], [0.39911097288131714, 0.8309450745582581], [0.3992806673049927, 0.8279184103012085], [0.39958107471466064, 0.825371265411377], [0.40004289150238037, 0.8223246335983276], [0.4005032777786255, 0.8197177052497864], [0.4011077880859375, 0.8166619539260864], [0.40165430307388306, 0.8140228986740112], [0.402341365814209, 0.8109476566314697], [0.4029766917228699, 0.8081689476966858], [0.4037679433822632, 0.804956316947937], [0.4045022130012512, 0.8019458055496216], [0.4054645299911499, 0.7984404563903809], [0.40633732080459595, 0.7953051924705505], [0.4073583483695984, 0.7918260097503662], [0.4076974391937256, 0.7892436981201172], [0.4080491065979004, 0.7862297296524048], [0.40856969356536865, 0.7828482389450073], [0.40906280279159546, 0.7804423570632935], [0.40967291593551636, 0.778638482093811], [0.4100341796875, 0.775759220123291], [0.4101678133010864, 0.7729716300964355], [0.41054773330688477, 0.7702354788780212], [0.41074514389038086, 0.768372654914856], [0.41107630729675293, 0.7664748430252075], [0.4113013744354248, 0.7648582458496094], [0.41151487827301025, 0.7635054588317871], [0.41179412603378296, 0.7628685235977173], [0.41235172748565674, 0.7614235877990723], [0.41290491819381714, 0.7600060701370239], [0.41348445415496826, 0.7584599256515503], [0.4143107533454895, 0.7538779973983765], [0.41489797830581665, 0.7494821548461914], [0.4152652621269226, 0.7463229894638062], [0.41582024097442627, 0.7429805994033813], [0.41625726222991943, 0.7400292754173279], [0.4168384075164795, 0.7368514537811279], [0.417283296585083, 0.7340660691261292], [0.4178274869918823, 0.7310613989830017], [0.41827166080474854, 0.7282777428627014], [0.418825626373291, 0.7252568602561951], [0.4192696213722229, 0.7225511074066162], [0.4198251962661743, 0.7196190357208252], [0.4202859401702881, 0.7169477939605713], [0.4208507537841797, 0.714056670665741], [0.42131632566452026, 0.7114167809486389], [0.4218876361846924, 0.7085585594177246], [0.42238086462020874, 0.7059288620948792], [0.42298412322998047, 0.7030884027481079], [0.4235078692436218, 0.7004325985908508], [0.4241408109664917, 0.6975816488265991], [0.4246865510940552, 0.6949161291122437], [0.4253442883491516, 0.6920714378356934], [0.4259161949157715, 0.6894190311431885], [0.4265977144241333, 0.6865844130516052], [0.4271801710128784, 0.683931291103363], [0.4278287887573242, 0.6811098456382751], [0.4283524751663208, 0.6784982085227966], [0.429047167301178, 0.675889253616333], [0.4299013614654541, 0.6734102368354797], [0.4310845136642456, 0.6705942153930664], [0.43216395378112793, 0.667902410030365], [0.4332352876663208, 0.6642270088195801], [0.4336487650871277, 0.6616625785827637], [0.43416476249694824, 0.6588937640190125], [0.43475162982940674, 0.6558778285980225], [0.4352145195007324, 0.6527789831161499], [0.435583233833313, 0.649306058883667], [0.4360637664794922, 0.6458007097244263], [0.43639278411865234, 0.6428080201148987], [0.43685901165008545, 0.6397267580032349], [0.43733835220336914, 0.6369228363037109], [0.43772125244140625, 0.6338592767715454], [0.4378476142883301, 0.6301721334457397], [0.4379177689552307, 0.626956582069397], [0.43796688318252563, 0.6231558918952942], [0.43805134296417236, 0.6192479133605957], [0.4380735754966736, 0.6157030463218689], [0.4382237195968628, 0.6120889782905579], [0.43834131956100464, 0.608351469039917], [0.4385376572608948, 0.6045945882797241], [0.43866729736328125, 0.6008618474006653], [0.43885642290115356, 0.5971052646636963], [0.4389413595199585, 0.5934569239616394], [0.4390600919723511, 0.5898394584655762], [0.4390783905982971, 0.5864191055297852], [0.43917250633239746, 0.583107590675354], [0.4392617344856262, 0.5799550414085388], [0.43940436840057373, 0.5767573118209839], [0.4395267963409424, 0.5738921165466309], [0.43973875045776367, 0.5708544254302979], [0.4400864839553833, 0.5679280757904053], [0.440440833568573, 0.5644896030426025], [0.44030630588531494, 0.5605087280273438], [0.4400695562362671, 0.5565569400787354], [0.43996506929397583, 0.5521095395088196], [0.43992918729782104, 0.5476535558700562], [0.4397934079170227, 0.5434674024581909], [0.4395256042480469, 0.5391577482223511], [0.43928468227386475, 0.5348116159439087], [0.43885648250579834, 0.5308037400245667], [0.43812716007232666, 0.5264930725097656], [0.4372427463531494, 0.5230072736740112], [0.4365183115005493, 0.5200640559196472], [0.4357609152793884, 0.5178014636039734], [0.435017466545105, 0.5157390832901001], [0.43436968326568604, 0.5137739181518555], [0.43363136053085327, 0.5118004083633423], [0.43306028842926025, 0.5103201866149902], [0.43227189779281616, 0.5081652402877808], [0.4316244125366211, 0.5062683820724487], [0.4307713508605957, 0.5039687752723694], [0.4300151467323303, 0.5013085603713989], [0.42907267808914185, 0.49846139550209045], [0.4282093644142151, 0.4953399896621704], [0.427217960357666, 0.49214065074920654], [0.42633986473083496, 0.48877477645874023], [0.42531806230545044, 0.48523569107055664], [0.42438656091690063, 0.48158329725265503], [0.4232330322265625, 0.47764158248901367], [0.4221547842025757, 0.4736601710319519], [0.42085790634155273, 0.4694369435310364], [0.4196048974990845, 0.4652317762374878], [0.41820502281188965, 0.46095988154411316], [0.41686534881591797, 0.45680367946624756], [0.4153904914855957, 0.4525136649608612], [0.4140225648880005, 0.44837242364883423], [0.40343618392944336, 0.8393737077713013], [0.4034273624420166, 0.836101233959198], [0.4032648801803589, 0.8335525393486023], [0.4032939076423645, 0.8308663368225098], [0.4034360647201538, 0.8286013603210449], [0.4036780595779419, 0.8257195949554443], [0.40397316217422485, 0.8233140707015991], [0.4044290781021118, 0.8203201293945312], [0.404865562915802, 0.8177955150604248], [0.40545547008514404, 0.8147546648979187], [0.405988872051239, 0.8121682405471802], [0.40670788288116455, 0.8090215921401978], [0.40736162662506104, 0.8062714338302612], [0.40825146436691284, 0.8029274940490723], [0.40908730030059814, 0.7999781370162964], [0.4101784825325012, 0.7964580059051514], [0.4110422730445862, 0.7932687401771545], [0.4120258688926697, 0.7897530198097229], [0.41238880157470703, 0.7870509028434753], [0.41269028186798096, 0.7832520008087158], [0.41263842582702637, 0.7808785438537598], [0.41253721714019775, 0.7792224884033203], [0.4129948616027832, 0.7761490345001221], [0.4134799838066101, 0.7734625935554504], [0.41390007734298706, 0.7710115909576416], [0.41440558433532715, 0.7693467140197754], [0.41491079330444336, 0.767722487449646], [0.41551709175109863, 0.7660444378852844], [0.41591525077819824, 0.7647162675857544], [0.4158715009689331, 0.7636212110519409], [0.41593772172927856, 0.7627438902854919], [0.4161285161972046, 0.760695219039917], [0.4162377119064331, 0.7593808770179749], [0.417026162147522, 0.7540828585624695], [0.41748398542404175, 0.7500734329223633], [0.41811782121658325, 0.7467466592788696], [0.4186049699783325, 0.7436739206314087], [0.41921937465667725, 0.7405474185943604], [0.41973811388015747, 0.7376234531402588], [0.4203501343727112, 0.7346212863922119], [0.42083609104156494, 0.7318233251571655], [0.4214150905609131, 0.7288523316383362], [0.4219161868095398, 0.7260669469833374], [0.42250633239746094, 0.7231656312942505], [0.42299890518188477, 0.7204599380493164], [0.4235835075378418, 0.7176037430763245], [0.42408454418182373, 0.7149279117584229], [0.42467397451400757, 0.7121015787124634], [0.4251805543899536, 0.7094689607620239], [0.42577892541885376, 0.7066650390625], [0.4263007640838623, 0.7040392160415649], [0.4269294738769531, 0.7012189626693726], [0.4274832010269165, 0.6985810995101929], [0.4281497001647949, 0.6957567930221558], [0.42872774600982666, 0.6931139230728149], [0.4294092655181885, 0.6903127431869507], [0.43001145124435425, 0.6876674294471741], [0.43072426319122314, 0.684874951839447], [0.43136459589004517, 0.6821986436843872], [0.4320855736732483, 0.6794867515563965], [0.4326478838920593, 0.6771951913833618], [0.4332452416419983, 0.6745196580886841], [0.43382203578948975, 0.6717774868011475], [0.4344531297683716, 0.6685808300971985], [0.43513762950897217, 0.664753794670105], [0.4356916546821594, 0.6620087027549744], [0.4363081455230713, 0.6597403287887573], [0.4373554587364197, 0.6565076112747192], [0.43796300888061523, 0.6532418727874756], [0.43853479623794556, 0.6497962474822998], [0.4391127824783325, 0.6464493274688721], [0.43970203399658203, 0.6433172225952148], [0.4401283264160156, 0.6404154300689697], [0.44049960374832153, 0.6374020576477051], [0.4406866431236267, 0.6340650320053101], [0.44090163707733154, 0.6303062438964844], [0.4411526918411255, 0.6272183656692505], [0.4414726495742798, 0.6232813596725464], [0.44156116247177124, 0.6193077564239502], [0.4416617751121521, 0.6157993078231812], [0.44175004959106445, 0.6122791767120361], [0.4419422745704651, 0.608528196811676], [0.4421256184577942, 0.6048626899719238], [0.4423931837081909, 0.6010688543319702], [0.442588210105896, 0.597365140914917], [0.44282835721969604, 0.5936053991317749], [0.44292962551116943, 0.5899750590324402], [0.44306641817092896, 0.586501955986023], [0.4430999755859375, 0.5832071304321289], [0.44319528341293335, 0.5801471471786499], [0.44335174560546875, 0.5770199298858643], [0.4435306787490845, 0.5741477012634277], [0.44376564025878906, 0.5713530778884888], [0.44393229484558105, 0.5682702660560608], [0.4439082145690918, 0.5645854473114014], [0.4440643787384033, 0.5604279637336731], [0.444078266620636, 0.5563600063323975], [0.4441258907318115, 0.5522563457489014], [0.44432735443115234, 0.5476872324943542], [0.44452500343322754, 0.5435246229171753], [0.4446214437484741, 0.5388296246528625], [0.4445134401321411, 0.534633994102478], [0.44411563873291016, 0.5297573208808899], [0.4433121681213379, 0.5253081321716309], [0.4425477981567383, 0.5213173627853394], [0.44155317544937134, 0.5181939601898193], [0.4407658576965332, 0.5156770944595337], [0.4401620030403137, 0.513717770576477], [0.4394272565841675, 0.5115771293640137], [0.43890440464019775, 0.5097925662994385], [0.43816936016082764, 0.507663369178772], [0.4374709725379944, 0.5059453248977661], [0.43677347898483276, 0.5040178298950195], [0.4360748529434204, 0.5018194913864136], [0.4351006746292114, 0.4992417097091675], [0.4342232942581177, 0.49647194147109985], [0.4331103563308716, 0.4933972954750061], [0.43219494819641113, 0.4903773069381714], [0.4311189651489258, 0.48704493045806885], [0.4301927089691162, 0.4836503863334656], [0.42903947830200195, 0.4798869490623474], [0.4280017614364624, 0.4760795831680298], [0.4267345666885376, 0.4719218611717224], [0.425531804561615, 0.46778929233551025], [0.4240838885307312, 0.46336761116981506], [0.4227515459060669, 0.4592317044734955], [0.42121225595474243, 0.4548562169075012], [0.41982805728912354, 0.4507846236228943], [0.41833555698394775, 0.44647207856178284], [0.4078829884529114, 0.8390386700630188], [0.40781235694885254, 0.8362733721733093], [0.4077473282814026, 0.8329241275787354], [0.40759843587875366, 0.8310905694961548], [0.40755361318588257, 0.8286166191101074], [0.40770959854125977, 0.8264792561531067], [0.40799635648727417, 0.8236712217330933], [0.40826892852783203, 0.8212202191352844], [0.4086553454399109, 0.818341076374054], [0.40907561779022217, 0.8158446550369263], [0.4096606373786926, 0.8128906488418579], [0.4102284908294678, 0.8102477788925171], [0.4109814167022705, 0.8071527481079102], [0.4117136597633362, 0.8043162226676941], [0.41267216205596924, 0.8011060953140259], [0.4136093258857727, 0.7979221940040588], [0.4147505760192871, 0.7944008111953735], [0.4156830310821533, 0.7911942601203918], [0.41659390926361084, 0.7873445749282837], [0.4169697165489197, 0.7839463949203491], [0.41706228256225586, 0.7801811695098877], [0.4167134761810303, 0.7785060405731201], [0.41647815704345703, 0.7767980098724365], [0.41692066192626953, 0.7743407487869263], [0.41756248474121094, 0.7721723318099976], [0.4179574251174927, 0.7707476615905762], [0.418485164642334, 0.7690191268920898], [0.41883915662765503, 0.7682671546936035], [0.4199565052986145, 0.7663243412971497], [0.4208611249923706, 0.7631574273109436], [0.42048394680023193, 0.7619308233261108], [0.41978001594543457, 0.7601128816604614], [0.41947609186172485, 0.7590878009796143], [0.419317364692688, 0.7546095252037048], [0.42019903659820557, 0.75055330991745], [0.42075079679489136, 0.747474193572998], [0.4214705228805542, 0.7442194223403931], [0.42201387882232666, 0.7413328886032104], [0.4226975440979004, 0.7382420301437378], [0.42325639724731445, 0.7354522943496704], [0.4239192008972168, 0.7324330806732178], [0.424446165561676, 0.7296891212463379], [0.4250810742378235, 0.7267274856567383], [0.42561089992523193, 0.7240407466888428], [0.426242470741272, 0.72114098072052], [0.42676109075546265, 0.7184958457946777], [0.4273796081542969, 0.7156386971473694], [0.42790645360946655, 0.7130287885665894], [0.4285286068916321, 0.710209846496582], [0.4290546774864197, 0.7076057195663452], [0.4296751618385315, 0.7048178315162659], [0.43022215366363525, 0.7022004127502441], [0.4308750629425049, 0.6994261741638184], [0.4314613938331604, 0.6967912912368774], [0.43214941024780273, 0.6940016746520996], [0.43275195360183716, 0.6913833618164062], [0.4334595203399658, 0.688607394695282], [0.43409085273742676, 0.6860131025314331], [0.43487250804901123, 0.6832712888717651], [0.43559229373931885, 0.6808156371116638], [0.436262845993042, 0.6779736280441284], [0.43655627965927124, 0.6751218438148499], [0.4366588592529297, 0.6721082925796509], [0.43673408031463623, 0.6689249277114868], [0.43720144033432007, 0.6652169227600098], [0.4378265142440796, 0.6627097725868225], [0.43855738639831543, 0.660822868347168], [0.43974071741104126, 0.6572926640510559], [0.4405924081802368, 0.6537591218948364], [0.4412599802017212, 0.6504940986633301], [0.442047119140625, 0.6471724510192871], [0.4427976608276367, 0.6442331075668335], [0.4434492588043213, 0.6408183574676514], [0.4435749053955078, 0.6376368999481201], [0.44373035430908203, 0.6342141032218933], [0.44415879249572754, 0.6308867931365967], [0.4446604251861572, 0.6276025176048279], [0.44484764337539673, 0.6234298944473267], [0.44496166706085205, 0.6193718910217285], [0.4450620412826538, 0.6159740686416626], [0.4452340602874756, 0.6124173402786255], [0.4454101324081421, 0.6088123321533203], [0.44570356607437134, 0.6051427125930786], [0.4459747076034546, 0.6014129519462585], [0.446308970451355, 0.597657322883606], [0.4465429186820984, 0.5938817262649536], [0.4467843770980835, 0.5901287794113159], [0.4468904733657837, 0.5866730213165283], [0.44698846340179443, 0.5832204818725586], [0.44700050354003906, 0.5802918672561646], [0.4471248984336853, 0.5773051381111145], [0.447435200214386, 0.5747207403182983], [0.44778573513031006, 0.5717673301696777], [0.44778650999069214, 0.5681239366531372], [0.44767051935195923, 0.564551830291748], [0.4475914239883423, 0.5605340003967285], [0.44774389266967773, 0.5564358830451965], [0.4479603171348572, 0.5524866580963135], [0.44837063550949097, 0.5481351613998413], [0.4488093852996826, 0.5438323020935059], [0.4492548704147339, 0.5391613245010376], [0.4496234655380249, 0.5346074104309082], [0.44980359077453613, 0.5294691920280457], [0.449609637260437, 0.5239540338516235], [0.448561429977417, 0.5194746255874634], [0.4476819634437561, 0.5158506631851196], [0.446877121925354, 0.5133068561553955], [0.44610595703125, 0.5111571550369263], [0.4455360770225525, 0.5093194842338562], [0.44460099935531616, 0.5070149898529053], [0.44394445419311523, 0.5053113698959351], [0.4429158568382263, 0.5027405023574829], [0.44243139028549194, 0.5016745328903198], [0.44106990098953247, 0.49892330169677734], [0.4400160312652588, 0.4968174993991852], [0.4388601779937744, 0.4943311810493469], [0.43783485889434814, 0.49143511056900024], [0.4367738962173462, 0.48852574825286865], [0.4357861280441284, 0.4853237271308899], [0.4346603751182556, 0.48196572065353394], [0.4336480498313904, 0.4783269166946411], [0.4324798583984375, 0.47444355487823486], [0.4313424825668335, 0.47032833099365234], [0.4299080967903137, 0.4659680724143982], [0.42850470542907715, 0.4616689682006836], [0.4269486665725708, 0.45733433961868286], [0.42548978328704834, 0.4531295895576477], [0.42399704456329346, 0.44891276955604553], [0.42262983322143555, 0.4448227882385254], [0.41236329078674316, 0.8392413854598999], [0.4121864438056946, 0.8356749415397644], [0.4118725061416626, 0.8328282833099365], [0.4118090867996216, 0.8306226134300232], [0.4118204712867737, 0.8290294408798218], [0.4118335247039795, 0.826479434967041], [0.4119300842285156, 0.8243860006332397], [0.41222113370895386, 0.8215433359146118], [0.4125173091888428, 0.819281816482544], [0.4129508137702942, 0.8164286613464355], [0.41336894035339355, 0.8140283823013306], [0.41396570205688477, 0.8109964728355408], [0.41450852155685425, 0.8084112405776978], [0.4152751564979553, 0.8052346110343933], [0.4159694314002991, 0.8024916648864746], [0.4169785976409912, 0.7988923788070679], [0.41783368587493896, 0.795783281326294], [0.41887784004211426, 0.7920019030570984], [0.4198354482650757, 0.7885785698890686], [0.42070531845092773, 0.7839301824569702], [0.4205639362335205, 0.7801381945610046], [0.4208884835243225, 0.7785303592681885], [0.4211238622665405, 0.7767246961593628], [0.4210554361343384, 0.7751047611236572], [0.4213199019432068, 0.7737630605697632], [0.42180705070495605, 0.7714446783065796], [0.4219738841056824, 0.770331621170044], [0.4219266176223755, 0.7686868906021118], [0.42198240756988525, 0.7675435543060303], [0.4220505952835083, 0.7629258036613464], [0.42223626375198364, 0.7621089220046997], [0.42306292057037354, 0.7595616579055786], [0.4229494333267212, 0.7580931782722473], [0.42257624864578247, 0.7549183368682861], [0.42292606830596924, 0.7513497471809387], [0.4236729145050049, 0.748087465763092], [0.4242668151855469, 0.7450626492500305], [0.4249953031539917, 0.7419940829277039], [0.42559611797332764, 0.7391293048858643], [0.4262934923171997, 0.7361431121826172], [0.4268876314163208, 0.7333213090896606], [0.4275662899017334, 0.7303769588470459], [0.4281347990036011, 0.7276301383972168], [0.42878806591033936, 0.7247617244720459], [0.4293549060821533, 0.722065806388855], [0.43000686168670654, 0.7192322611808777], [0.43056923151016235, 0.7165931463241577], [0.43121588230133057, 0.713802695274353], [0.4317793846130371, 0.7111846208572388], [0.43242937326431274, 0.7084003686904907], [0.4329873323440552, 0.7058166265487671], [0.4336429238319397, 0.7030400037765503], [0.43420445919036865, 0.7004529237747192], [0.43486475944519043, 0.6976630091667175], [0.43544626235961914, 0.6950510740280151], [0.436116099357605, 0.6922556757926941], [0.4366919994354248, 0.6896531581878662], [0.4373443126678467, 0.6868723034858704], [0.43789100646972656, 0.6843171119689941], [0.43845224380493164, 0.681570827960968], [0.4389757513999939, 0.6786293983459473], [0.43951529264450073, 0.6753909587860107], [0.43991124629974365, 0.672485888004303], [0.44041311740875244, 0.6693190336227417], [0.4410700798034668, 0.6665210723876953], [0.44183266162872314, 0.664199709892273], [0.4422526955604553, 0.6620609164237976], [0.4428642988204956, 0.6581122875213623], [0.4435783624649048, 0.6545989513397217], [0.44430017471313477, 0.6511969566345215], [0.44490206241607666, 0.6479969620704651], [0.4454326629638672, 0.6448418498039246], [0.4458792209625244, 0.6411867141723633], [0.4463139772415161, 0.6377869844436646], [0.4467071294784546, 0.6346763968467712], [0.44711631536483765, 0.6313443183898926], [0.4473347067832947, 0.627892255783081], [0.44764548540115356, 0.6235277652740479], [0.4479207396507263, 0.619623064994812], [0.44820481538772583, 0.6161386966705322], [0.44842207431793213, 0.6126834154129028], [0.4487264156341553, 0.6090825796127319], [0.4490247368812561, 0.6055387258529663], [0.44942158460617065, 0.6017563343048096], [0.4497624635696411, 0.5980802774429321], [0.4501500129699707, 0.5941545963287354], [0.4503490924835205, 0.5903671979904175], [0.4505319595336914, 0.5868001580238342], [0.4505886435508728, 0.5832886695861816], [0.4507030248641968, 0.5804262161254883], [0.4508906602859497, 0.5776753425598145], [0.45096564292907715, 0.5750188827514648], [0.45095646381378174, 0.5719428062438965], [0.45111143589019775, 0.5681084990501404], [0.451160192489624, 0.5644832253456116], [0.4512823820114136, 0.5606642961502075], [0.4515262246131897, 0.5567605495452881], [0.45195841789245605, 0.5529757142066956], [0.45257091522216797, 0.5488665103912354], [0.4533957839012146, 0.5447269678115845], [0.4543324112892151, 0.5399819612503052], [0.4551756978034973, 0.5355576276779175], [0.4557458162307739, 0.5293769240379333], [0.45585596561431885, 0.5239438414573669], [0.4542313814163208, 0.5161964893341064], [0.4524812698364258, 0.5135394334793091], [0.45153266191482544, 0.5113142728805542], [0.45090770721435547, 0.5092889070510864], [0.45012080669403076, 0.5072200894355774], [0.44948744773864746, 0.5050710439682007], [0.44859665632247925, 0.5027496218681335], [0.44788551330566406, 0.5008015632629395], [0.44683313369750977, 0.49843499064445496], [0.44562309980392456, 0.4963156580924988], [0.4448060989379883, 0.49427682161331177], [0.4439733028411865, 0.49192655086517334], [0.4428948163986206, 0.4891792833805084], [0.4419463872909546, 0.4863780736923218], [0.44074052572250366, 0.4832006096839905], [0.43974727392196655, 0.48007732629776], [0.43864870071411133, 0.4765567183494568], [0.43764978647232056, 0.4727221727371216], [0.43619048595428467, 0.46832168102264404], [0.43478482961654663, 0.4640547037124634], [0.433063268661499, 0.4595089256763458], [0.43155574798583984, 0.45541417598724365], [0.42996007204055786, 0.45106959342956543], [0.42857789993286133, 0.4470871388912201], [0.42706453800201416, 0.4428328275680542], [0.4168127179145813, 0.8386638760566711], [0.41657060384750366, 0.835553765296936], [0.416279137134552, 0.8319190740585327], [0.41612082719802856, 0.8306753635406494], [0.4159175157546997, 0.8285436630249023], [0.41586804389953613, 0.8268330097198486], [0.4159197211265564, 0.8243684768676758], [0.4160239100456238, 0.8222711682319641], [0.41630446910858154, 0.819670557975769], [0.4166296720504761, 0.817379355430603], [0.4170607924461365, 0.8145642280578613], [0.4174734354019165, 0.8120393753051758], [0.41801929473876953, 0.8090704083442688], [0.41854041814804077, 0.8063608407974243], [0.4192391633987427, 0.8033010959625244], [0.41998493671417236, 0.8000203371047974], [0.42079800367355347, 0.7965240478515625], [0.42157554626464844, 0.7929617166519165], [0.42240703105926514, 0.7892575263977051], [0.4237020015716553, 0.7849134206771851], [0.42528092861175537, 0.7803245782852173], [0.4252910017967224, 0.7795025110244751], [0.425309419631958, 0.776512622833252], [0.425315797328949, 0.7751189470291138], [0.424985408782959, 0.77364182472229], [0.4250475764274597, 0.7726653814315796], [0.425367534160614, 0.7702739834785461], [0.4251999855041504, 0.7682846188545227], [0.4249963164329529, 0.7673253417015076], [0.42526161670684814, 0.7634890079498291], [0.42531949281692505, 0.7626990079879761], [0.42530685663223267, 0.7599822282791138], [0.42555707693099976, 0.7577142715454102], [0.42566514015197754, 0.7550733685493469], [0.4259911775588989, 0.7519655227661133], [0.4265005588531494, 0.7488831281661987], [0.42720943689346313, 0.7457677125930786], [0.42783522605895996, 0.7428980469703674], [0.4285879135131836, 0.7398595213890076], [0.42921197414398193, 0.7370636463165283], [0.429939866065979, 0.7340579032897949], [0.43055295944213867, 0.731317400932312], [0.4312670826911926, 0.7283766269683838], [0.43185293674468994, 0.725711464881897], [0.4325411319732666, 0.7228404879570007], [0.43313300609588623, 0.7202168703079224], [0.4338204860687256, 0.7173944115638733], [0.43440163135528564, 0.7147929668426514], [0.43507182598114014, 0.7120052576065063], [0.43565917015075684, 0.7094162702560425], [0.43633294105529785, 0.7066569328308105], [0.4369131922721863, 0.7040578722953796], [0.43756985664367676, 0.7012825608253479], [0.4381369352340698, 0.6986759901046753], [0.43878453969955444, 0.6958832144737244], [0.4393507242202759, 0.6932497620582581], [0.43998080492019653, 0.6904464364051819], [0.440513014793396, 0.6877961158752441], [0.44104260206222534, 0.6849499940872192], [0.4414220452308655, 0.6822201013565063], [0.4418957233428955, 0.6791910529136658], [0.44249558448791504, 0.6761428713798523], [0.44320571422576904, 0.6731699705123901], [0.4438973665237427, 0.6702949404716492], [0.4445669651031494, 0.6676287055015564], [0.44497203826904297, 0.6650583744049072], [0.4452775716781616, 0.6623921394348145], [0.4457956552505493, 0.6588802337646484], [0.4465116262435913, 0.6552813053131104], [0.4471479654312134, 0.6519362926483154], [0.4477459192276001, 0.6485309600830078], [0.4481413960456848, 0.6452927589416504], [0.4485989809036255, 0.6415515542030334], [0.44909507036209106, 0.6383808851242065], [0.449668288230896, 0.6351486444473267], [0.4500216245651245, 0.6316782236099243], [0.4502866268157959, 0.6280504465103149], [0.4505794644355774, 0.623876690864563], [0.4510037899017334, 0.6199322938919067], [0.4512990117073059, 0.6164560317993164], [0.45162129402160645, 0.6129672527313232], [0.4519274830818176, 0.6094688177108765], [0.4523196220397949, 0.6059333086013794], [0.4527146816253662, 0.6022107601165771], [0.4531528353691101, 0.5984922647476196], [0.45356690883636475, 0.5945942401885986], [0.4539908766746521, 0.5906121134757996], [0.4541093707084656, 0.5869215726852417], [0.45415031909942627, 0.5832903981208801], [0.45425915718078613, 0.5807864665985107], [0.4544948935508728, 0.5779872536659241], [0.45457369089126587, 0.5749416351318359], [0.4544999599456787, 0.5719025135040283], [0.4544360637664795, 0.5682365298271179], [0.4545881152153015, 0.5645862817764282], [0.45480096340179443, 0.5609449744224548], [0.4551575183868408, 0.5571766495704651], [0.45553773641586304, 0.5535194873809814], [0.4562082290649414, 0.5497861504554749], [0.4572269320487976, 0.5459303259849548], [0.4587327241897583, 0.541479229927063], [0.45974069833755493, 0.5361422300338745], [0.4607229232788086, 0.5303403735160828], [0.4607892632484436, 0.5231660008430481], [0.45812320709228516, 0.5143327713012695], [0.4578491449356079, 0.5115047693252563], [0.45736080408096313, 0.5090967416763306], [0.4563649892807007, 0.5066121816635132], [0.45578569173812866, 0.5049457550048828], [0.45461177825927734, 0.5023969411849976], [0.45372945070266724, 0.5003319382667542], [0.4525109529495239, 0.49797987937927246], [0.4515543580055237, 0.4959770441055298], [0.45060670375823975, 0.4933047294616699], [0.4502127766609192, 0.4922856390476227], [0.44886475801467896, 0.48905789852142334], [0.4478398561477661, 0.486866295337677], [0.44652366638183594, 0.4840274751186371], [0.44537585973739624, 0.48115548491477966], [0.4443485140800476, 0.4782881736755371], [0.4435509443283081, 0.475090891122818], [0.44232428073883057, 0.4709296226501465], [0.44095849990844727, 0.4664400815963745], [0.4391218423843384, 0.46186932921409607], [0.4374236464500427, 0.45757216215133667], [0.4357680082321167, 0.4533519148826599], [0.4342859983444214, 0.4493008255958557], [0.43279242515563965, 0.44514986872673035], [0.43144339323043823, 0.4411334991455078], [0.42131710052490234, 0.8387318849563599], [0.42109012603759766, 0.8347373008728027], [0.42073291540145874, 0.8316459655761719], [0.4205384850502014, 0.8297111988067627], [0.42044180631637573, 0.828534722328186], [0.4202750325202942, 0.8262858390808105], [0.4202048182487488, 0.8247633576393127], [0.42029255628585815, 0.8223332166671753], [0.4204064607620239, 0.8204587697982788], [0.420604944229126, 0.8176593780517578], [0.42081254720687866, 0.8154340386390686], [0.4211564064025879, 0.8124507665634155], [0.4214581251144409, 0.8099757432937622], [0.4219033718109131, 0.8068650960922241], [0.42228662967681885, 0.8042446970939636], [0.42292046546936035, 0.8005545139312744], [0.42343223094940186, 0.7973018884658813], [0.424080491065979, 0.7934093475341797], [0.42475318908691406, 0.7900664806365967], [0.425351083278656, 0.7851947546005249], [0.4258854389190674, 0.7805590629577637], [0.42612600326538086, 0.779416024684906], [0.4263145923614502, 0.7767107486724854], [0.4262206554412842, 0.7748874425888062], [0.42628276348114014, 0.7737009525299072], [0.4261174201965332, 0.7725186347961426], [0.426521360874176, 0.7711018323898315], [0.42785441875457764, 0.7674216032028198], [0.4277423620223999, 0.7668429017066956], [0.42771798372268677, 0.7641114592552185], [0.4278789162635803, 0.7626261711120605], [0.42808783054351807, 0.7603647708892822], [0.42833411693573, 0.7580699920654297], [0.4286748170852661, 0.7552945613861084], [0.42901086807250977, 0.7525922060012817], [0.4295468330383301, 0.7495238184928894], [0.4301115870475769, 0.7466701865196228], [0.430838942527771, 0.7436614632606506], [0.43150001764297485, 0.7408382892608643], [0.4322720766067505, 0.7378460168838501], [0.43293386697769165, 0.7350478172302246], [0.4336824417114258, 0.7321302890777588], [0.43433451652526855, 0.72938472032547], [0.43506354093551636, 0.7265334129333496], [0.4356963038444519, 0.7238811254501343], [0.4364132881164551, 0.721074640750885], [0.4370361566543579, 0.7184456586837769], [0.437738835811615, 0.7156496047973633], [0.43834149837493896, 0.7130603194236755], [0.4390234351158142, 0.710286021232605], [0.43961066007614136, 0.7077007293701172], [0.4402810335159302, 0.7049025297164917], [0.44085633754730225, 0.7022994756698608], [0.4415067434310913, 0.6994959115982056], [0.442066490650177, 0.6968741416931152], [0.4426969289779663, 0.6940394639968872], [0.44323575496673584, 0.6913809776306152], [0.4438251852989197, 0.6884961724281311], [0.44432079792022705, 0.6857186555862427], [0.4448663592338562, 0.682790994644165], [0.4453624486923218, 0.6800475120544434], [0.44597291946411133, 0.6769734621047974], [0.44652509689331055, 0.6741374731063843], [0.4471537470817566, 0.6711499094963074], [0.44763386249542236, 0.6684240102767944], [0.4481402635574341, 0.6655375957489014], [0.4485158920288086, 0.6629228591918945], [0.448968768119812, 0.6594430804252625], [0.44946956634521484, 0.6560026407241821], [0.45004862546920776, 0.6524962186813354], [0.4505084753036499, 0.6490404009819031], [0.4509543180465698, 0.6456522941589355], [0.45141881704330444, 0.6420573592185974], [0.4518892765045166, 0.6389353275299072], [0.4523226022720337, 0.6356214880943298], [0.4527592658996582, 0.631934404373169], [0.4531087875366211, 0.6283750534057617], [0.4535457491874695, 0.6241728663444519], [0.4538787007331848, 0.620326042175293], [0.4542670249938965, 0.6167584657669067], [0.45459645986557007, 0.6133502721786499], [0.4549940228462219, 0.6098310947418213], [0.45537352561950684, 0.606397807598114], [0.4558364152908325, 0.6026124954223633], [0.45624029636383057, 0.5989542007446289], [0.4566919803619385, 0.5949713587760925], [0.457144558429718, 0.5910898447036743], [0.45752400159835815, 0.5869244337081909], [0.4573761224746704, 0.5831724405288696], [0.4573003053665161, 0.5809366703033447], [0.45742082595825195, 0.5783055424690247], [0.4576253890991211, 0.5749409794807434], [0.4576817750930786, 0.5718786716461182], [0.4578234553337097, 0.5683612823486328], [0.45802634954452515, 0.5648775100708008], [0.4583779573440552, 0.5613361597061157], [0.4588521122932434, 0.5578053593635559], [0.4595034122467041, 0.5542385578155518], [0.4601781964302063, 0.5509348511695862], [0.46120524406433105, 0.5475709438323975], [0.46250343322753906, 0.5427209138870239], [0.46571141481399536, 0.5235980749130249], [0.46511054039001465, 0.5105642080307007], [0.4636151194572449, 0.5087868571281433], [0.46211671829223633, 0.5065299272537231], [0.46156400442123413, 0.5048059821128845], [0.4606516361236572, 0.5023301243782043], [0.4596957564353943, 0.4998595714569092], [0.4584693908691406, 0.49735337495803833], [0.45749062299728394, 0.49519020318984985], [0.45645296573638916, 0.4930016100406647], [0.4555867314338684, 0.4912092983722687], [0.4544822573661804, 0.4891695976257324], [0.45298486948013306, 0.4867976903915405], [0.4521671533584595, 0.4846952557563782], [0.45132821798324585, 0.4819421172142029], [0.4503434896469116, 0.47890159487724304], [0.44947004318237305, 0.4762771725654602], [0.4486415386199951, 0.47359365224838257], [0.4476357698440552, 0.46911725401878357], [0.4453296661376953, 0.46390485763549805], [0.4434790015220642, 0.4598245918750763], [0.44162875413894653, 0.45536160469055176], [0.4401097297668457, 0.45146745443344116], [0.4385576844215393, 0.44728732109069824], [0.4372132420539856, 0.44340789318084717], [0.43579477071762085, 0.4392349421977997], [0.4258098602294922, 0.8381579518318176], [0.4255707263946533, 0.8346189856529236], [0.4251324534416199, 0.8305510878562927], [0.4250258207321167, 0.8296247124671936], [0.4248315095901489, 0.8276474475860596], [0.42472225427627563, 0.8263193368911743], [0.4246314764022827, 0.8242456912994385], [0.4245370030403137, 0.8226953148841858], [0.42445361614227295, 0.8202505707740784], [0.4244731068611145, 0.8182390332221985], [0.4246026277542114, 0.8155273199081421], [0.42473524808883667, 0.8131078481674194], [0.4249473214149475, 0.8101716637611389], [0.4251210689544678, 0.8075043559074402], [0.42537325620651245, 0.8045080900192261], [0.4256153702735901, 0.8011255264282227], [0.4260239601135254, 0.7976272702217102], [0.42645466327667236, 0.7939333915710449], [0.4268273115158081, 0.7903887033462524], [0.42711377143859863, 0.7853063344955444], [0.4274986982345581, 0.7808591723442078], [0.42766088247299194, 0.7798185348510742], [0.42831361293792725, 0.7769082188606262], [0.42841416597366333, 0.7746492624282837], [0.42760491371154785, 0.7731248736381531], [0.42734259366989136, 0.7723370790481567], [0.4273841381072998, 0.771630048751831], [0.42882204055786133, 0.7677080035209656], [0.4292246699333191, 0.7668814063072205], [0.42984235286712646, 0.7647935748100281], [0.43034106492996216, 0.7630025148391724], [0.4307311177253723, 0.7608396410942078], [0.4310746192932129, 0.7584178447723389], [0.43145042657852173, 0.7558571696281433], [0.43194013833999634, 0.7530817985534668], [0.43246614933013916, 0.7503499388694763], [0.4331452250480652, 0.7474113702774048], [0.4337614178657532, 0.7445962429046631], [0.4344977140426636, 0.7416687607765198], [0.4352090358734131, 0.7388777732849121], [0.4360262155532837, 0.7359055876731873], [0.4367130994796753, 0.7331759929656982], [0.4374949336051941, 0.7302587032318115], [0.43816524744033813, 0.7276080846786499], [0.4389249086380005, 0.7247731685638428], [0.43957453966140747, 0.7221478223800659], [0.44030725955963135, 0.7193354964256287], [0.44094669818878174, 0.7167274951934814], [0.44165897369384766, 0.7139362096786499], [0.4422647953033447, 0.711327314376831], [0.44293493032455444, 0.7085458040237427], [0.4435230493545532, 0.7059171199798584], [0.44417673349380493, 0.7031151056289673], [0.4447433352470398, 0.7004683017730713], [0.4453677535057068, 0.6976480484008789], [0.4459136724472046, 0.6949605941772461], [0.4465080499649048, 0.6920943856239319], [0.44702088832855225, 0.689320981502533], [0.4475860595703125, 0.6863586902618408], [0.44808053970336914, 0.6835980415344238], [0.44862937927246094, 0.6807193756103516], [0.44914954900741577, 0.6777795553207397], [0.44969308376312256, 0.674803614616394], [0.45016777515411377, 0.6718924045562744], [0.45066606998443604, 0.6689705848693848], [0.4510684013366699, 0.6661478281021118], [0.4514440894126892, 0.6633075475692749], [0.4518481492996216, 0.6599458456039429], [0.45230531692504883, 0.6564749479293823], [0.4527820944786072, 0.6530585289001465], [0.4533146023750305, 0.6494311690330505], [0.4536922574043274, 0.6460895538330078], [0.45412641763687134, 0.6424539685249329], [0.45451462268829346, 0.6394487619400024], [0.4550641179084778, 0.636074423789978], [0.45560216903686523, 0.6323814392089844], [0.45603328943252563, 0.6286879777908325], [0.45641404390335083, 0.6245521306991577], [0.456853985786438, 0.6206632852554321], [0.45720505714416504, 0.6171778440475464], [0.45762181282043457, 0.6137287616729736], [0.4579991102218628, 0.6102691888809204], [0.4584152102470398, 0.6068199872970581], [0.4588524103164673, 0.603074848651886], [0.4593046307563782, 0.5993611216545105], [0.459736704826355, 0.5953943729400635], [0.4601949453353882, 0.591552734375], [0.4606339931488037, 0.5871789455413818], [0.46073853969573975, 0.5828373432159424], [0.46073687076568604, 0.5811271667480469], [0.46080005168914795, 0.5784096717834473], [0.4608514904975891, 0.5751413702964783], [0.46103519201278687, 0.5719899535179138], [0.46118229627609253, 0.5685888528823853], [0.46146607398986816, 0.5652530193328857], [0.4618443250656128, 0.561848521232605], [0.46238720417022705, 0.5585918426513672], [0.4630746841430664, 0.5551027059555054], [0.46371084451675415, 0.5518060922622681], [0.46420133113861084, 0.5484808683395386], [0.4658910632133484, 0.5446531176567078], [0.46839630603790283, 0.5078350305557251], [0.46902209520339966, 0.50464928150177], [0.4672091603279114, 0.5007476806640625], [0.4657588005065918, 0.49894827604293823], [0.4637514352798462, 0.4966088533401489], [0.4627155065536499, 0.4949459433555603], [0.4615105390548706, 0.49264827370643616], [0.46052372455596924, 0.49069392681121826], [0.45940297842025757, 0.4885965585708618], [0.4584389925003052, 0.48673707246780396], [0.4575323462486267, 0.484184205532074], [0.45716798305511475, 0.48309630155563354], [0.4562031030654907, 0.479875385761261], [0.45567142963409424, 0.4775483012199402], [0.4550023078918457, 0.4741070866584778], [0.45435816049575806, 0.4717567265033722], [0.4515019655227661, 0.4666738510131836], [0.44904202222824097, 0.4618985056877136], [0.44723087549209595, 0.4577522873878479], [0.4455556869506836, 0.45359328389167786], [0.44407224655151367, 0.44956207275390625], [0.442715048789978, 0.44563472270965576], [0.4413427710533142, 0.4415763318538666], [0.4401038885116577, 0.4376750588417053], [0.4300272464752197, 0.8382328748703003], [0.42979544401168823, 0.8339157700538635], [0.42949581146240234, 0.8302558660507202], [0.4292289614677429, 0.8285189867019653], [0.4290899634361267, 0.8275147676467896], [0.42893558740615845, 0.8256087303161621], [0.42884111404418945, 0.8243368864059448], [0.4287281036376953, 0.8220399618148804], [0.4286305904388428, 0.8204138278961182], [0.4285152554512024, 0.8179053068161011], [0.4284823536872864, 0.8159899115562439], [0.4285407066345215, 0.8130069375038147], [0.42854785919189453, 0.8105959892272949], [0.4285919666290283, 0.8074367642402649], [0.428539514541626, 0.8048150539398193], [0.4286031126976013, 0.8011655807495117], [0.42869073152542114, 0.7980639934539795], [0.4289933443069458, 0.7941232919692993], [0.4288557171821594, 0.7904033660888672], [0.4293231964111328, 0.7857183218002319], [0.43044328689575195, 0.7815855741500854], [0.4306826591491699, 0.7804064750671387], [0.4302321672439575, 0.7767645120620728], [0.42979979515075684, 0.7745085954666138], [0.430391788482666, 0.773159384727478], [0.43062692880630493, 0.7726001739501953], [0.4306883215904236, 0.7721415162086487], [0.43136996030807495, 0.769265353679657], [0.43182647228240967, 0.767973005771637], [0.4325517416000366, 0.7657091617584229], [0.43300360441207886, 0.7637689113616943], [0.4334704875946045, 0.761298418045044], [0.4338794946670532, 0.7590616941452026], [0.43438106775283813, 0.7563894987106323], [0.43487632274627686, 0.7538981437683105], [0.43552207946777344, 0.7510851621627808], [0.43613648414611816, 0.7483668923377991], [0.4369015097618103, 0.745448648929596], [0.437599778175354, 0.7427761554718018], [0.43841856718063354, 0.7398353815078735], [0.43916040658950806, 0.7370477914810181], [0.4399840235710144, 0.734130859375], [0.4407041072845459, 0.7314193248748779], [0.4414960741996765, 0.7285811305046082], [0.44218623638153076, 0.7259315252304077], [0.4429536461830139, 0.7231025695800781], [0.44361889362335205, 0.7204889059066772], [0.4443591833114624, 0.7176696062088013], [0.44499361515045166, 0.7150440216064453], [0.44569575786590576, 0.7122134566307068], [0.44628679752349854, 0.7095950841903687], [0.44694268703460693, 0.7067601084709167], [0.4475080966949463, 0.7041175365447998], [0.44813573360443115, 0.701256275177002], [0.4486730098724365, 0.6985915899276733], [0.44927287101745605, 0.6956971883773804], [0.4497928023338318, 0.6929755210876465], [0.45038169622421265, 0.6900030970573425], [0.4508957862854004, 0.6872031688690186], [0.45145249366760254, 0.6842672824859619], [0.45192116498947144, 0.6815212965011597], [0.4524633288383484, 0.6784019470214844], [0.4529017210006714, 0.675512433052063], [0.4533824324607849, 0.6724472045898438], [0.4537842869758606, 0.6696254014968872], [0.4542142152786255, 0.6665875315666199], [0.45453333854675293, 0.6638079881668091], [0.45491623878479004, 0.6603180170059204], [0.4552351236343384, 0.6569275856018066], [0.45560741424560547, 0.65348219871521], [0.4559912085533142, 0.6498556137084961], [0.45641809701919556, 0.6464589238166809], [0.45684993267059326, 0.642883837223053], [0.4573094844818115, 0.639999270439148], [0.4579830765724182, 0.6368449926376343], [0.4587290287017822, 0.6328349113464355], [0.45908689498901367, 0.6290308237075806], [0.4594351053237915, 0.6248666048049927], [0.4597542881965637, 0.6210575699806213], [0.4601486921310425, 0.6175515651702881], [0.46051013469696045, 0.6141663193702698], [0.46093541383743286, 0.6106564998626709], [0.4613116383552551, 0.6072553396224976], [0.4617515206336975, 0.6034704446792603], [0.46215105056762695, 0.5997712016105652], [0.4626132845878601, 0.5957752466201782], [0.46295928955078125, 0.5918891429901123], [0.4634432792663574, 0.5875714421272278], [0.4642397165298462, 0.583227813243866], [0.4644150733947754, 0.5810854434967041], [0.4642783999443054, 0.5783325433731079], [0.46425867080688477, 0.5752578377723694], [0.464263916015625, 0.5721127986907959], [0.46443235874176025, 0.5688477754592896], [0.46469175815582275, 0.5656508803367615], [0.46511101722717285, 0.5624141693115234], [0.46560490131378174, 0.5593178272247314], [0.4663011431694031, 0.5559870004653931], [0.4671006202697754, 0.5526806712150574], [0.47628188133239746, 0.510709285736084], [0.4738929867744446, 0.5023058652877808], [0.47104156017303467, 0.49753016233444214], [0.46866971254348755, 0.49500328302383423], [0.46775829792022705, 0.4937882423400879], [0.46688199043273926, 0.4920818507671356], [0.46577560901641846, 0.4900265336036682], [0.4646261930465698, 0.48792123794555664], [0.4636560082435608, 0.4860227108001709], [0.46267789602279663, 0.48403963446617126], [0.4618874788284302, 0.4824272394180298], [0.460951030254364, 0.4806363582611084], [0.4599326252937317, 0.47861307859420776], [0.4594739079475403, 0.4766641855239868], [0.45885974168777466, 0.4732409715652466], [0.45699453353881836, 0.4696725010871887], [0.45439612865448, 0.4649813771247864], [0.4523872137069702, 0.46006882190704346], [0.4508601427078247, 0.45608800649642944], [0.4493350386619568, 0.4518072307109833], [0.4480476379394531, 0.448000431060791], [0.44670283794403076, 0.44390153884887695], [0.4455167055130005, 0.44007521867752075], [0.4442570209503174, 0.4359798729419708], [0.43417614698410034, 0.8375664949417114], [0.43388915061950684, 0.8339024782180786], [0.43357592821121216, 0.8295029401779175], [0.4336056113243103, 0.8286956548690796], [0.43358534574508667, 0.8265171051025391], [0.43350934982299805, 0.82565838098526], [0.4333242177963257, 0.8234217166900635], [0.4331551194190979, 0.8220138549804688], [0.4328565001487732, 0.8195083141326904], [0.432708203792572, 0.8180360794067383], [0.43258172273635864, 0.8155258893966675], [0.4324002265930176, 0.8131670355796814], [0.432226300239563, 0.8101959228515625], [0.4320213794708252, 0.8075399398803711], [0.43185216188430786, 0.8044164180755615], [0.4316544532775879, 0.8013523817062378], [0.43157440423965454, 0.7979082465171814], [0.4313170909881592, 0.7942636609077454], [0.43120455741882324, 0.7900785207748413], [0.4313521385192871, 0.7863575220108032], [0.4315873384475708, 0.7816756963729858], [0.4314146637916565, 0.78040611743927], [0.43091970682144165, 0.7765489816665649], [0.4312317371368408, 0.7749652862548828], [0.43156343698501587, 0.773855447769165], [0.4318642020225525, 0.7731788158416748], [0.43217259645462036, 0.7725076675415039], [0.43311405181884766, 0.7702398300170898], [0.433779776096344, 0.769091010093689], [0.43491947650909424, 0.7666232585906982], [0.4355162978172302, 0.764327883720398], [0.4360150694847107, 0.7619832754135132], [0.4365314245223999, 0.7596724033355713], [0.43712329864501953, 0.7571858167648315], [0.43775665760040283, 0.754646897315979], [0.4384056329727173, 0.7520177364349365], [0.439162015914917, 0.7492399215698242], [0.4399130344390869, 0.7465953826904297], [0.44077199697494507, 0.7437726855278015], [0.4415544867515564, 0.7410058975219727], [0.44240671396255493, 0.7380475997924805], [0.4431638717651367, 0.7353414297103882], [0.44401490688323975, 0.732428789138794], [0.4447365999221802, 0.7297835350036621], [0.445540189743042, 0.726917028427124], [0.4462326765060425, 0.7242857813835144], [0.4469987154006958, 0.7214493751525879], [0.4476590156555176, 0.7187997698783875], [0.4483826160430908, 0.7159476280212402], [0.44900649785995483, 0.7132866382598877], [0.4496830105781555, 0.7104251384735107], [0.4502529501914978, 0.7077526450157166], [0.4508715867996216, 0.7048875093460083], [0.45140719413757324, 0.7021769881248474], [0.4519859552383423, 0.6993006467819214], [0.4524925947189331, 0.6965503096580505], [0.45304709672927856, 0.6936405897140503], [0.45355623960494995, 0.6908267736434937], [0.45412611961364746, 0.6878570914268494], [0.45461350679397583, 0.6850262880325317], [0.45510607957839966, 0.682080864906311], [0.4555528163909912, 0.6790775060653687], [0.4560253620147705, 0.67600417137146], [0.45641571283340454, 0.6730395555496216], [0.4568089246749878, 0.6700555682182312], [0.4571279287338257, 0.6670548915863037], [0.45741987228393555, 0.6641055345535278], [0.4577452540397644, 0.6607391834259033], [0.4581184387207031, 0.6572084426879883], [0.4583624005317688, 0.6538189053535461], [0.4586489796638489, 0.6501308679580688], [0.45900559425354004, 0.6469416618347168], [0.4596290588378906, 0.6433740258216858], [0.4602052569389343, 0.6408671140670776], [0.4610084891319275, 0.6376646161079407], [0.46174705028533936, 0.6333365440368652], [0.4620887041091919, 0.6291866898536682], [0.46231532096862793, 0.6252123117446899], [0.4627169370651245, 0.6213910579681396], [0.46304965019226074, 0.617966890335083], [0.4634486436843872, 0.6145552396774292], [0.46383094787597656, 0.6110920906066895], [0.4642341732978821, 0.607633113861084], [0.46464329957962036, 0.6038930416107178], [0.46507561206817627, 0.6001470685005188], [0.4654936194419861, 0.596184492111206], [0.46593379974365234, 0.5921989679336548], [0.46634864807128906, 0.5880193710327148], [0.466968297958374, 0.5839322805404663], [0.4673306345939636, 0.5810854434967041], [0.4674333333969116, 0.578285813331604], [0.4674057364463806, 0.5752408504486084], [0.4674602150917053, 0.5722104907035828], [0.4675859808921814, 0.5691026449203491], [0.46787238121032715, 0.5660849809646606], [0.46830523014068604, 0.563040554523468], [0.4688984751701355, 0.5601470470428467], [0.46969449520111084, 0.5570789575576782], [0.47074222564697266, 0.5542271137237549], [0.4715367555618286, 0.4921276271343231], [0.4705801010131836, 0.49102723598480225], [0.4699368476867676, 0.4902040362358093], [0.46870994567871094, 0.48793601989746094], [0.4677891135215759, 0.4861030578613281], [0.4668459892272949, 0.4840547442436218], [0.46602320671081543, 0.4822321832180023], [0.46519577503204346, 0.480394184589386], [0.4644675850868225, 0.47869929671287537], [0.46379369497299194, 0.47674301266670227], [0.46354568004608154, 0.47591927647590637], [0.46183067560195923, 0.4716460108757019], [0.4596976041793823, 0.46761417388916016], [0.45748084783554077, 0.4632743000984192], [0.4557226300239563, 0.4586744010448456], [0.4543468952178955, 0.4544944167137146], [0.4530404806137085, 0.4504258632659912], [0.45179861783981323, 0.4464147388935089], [0.4506393074989319, 0.44254177808761597], [0.44947075843811035, 0.43852370977401733], [0.44841402769088745, 0.4346541166305542], [0.4378173351287842, 0.8370338678359985], [0.43736475706100464, 0.8335306644439697], [0.4377809166908264, 0.830233097076416], [0.43784016370773315, 0.8283225893974304], [0.437796950340271, 0.8266973495483398], [0.43772268295288086, 0.824815034866333], [0.437549352645874, 0.8232535719871521], [0.43729472160339355, 0.820992112159729], [0.43705976009368896, 0.8192305564880371], [0.436755895614624, 0.8170953989028931], [0.43654584884643555, 0.8155357837677002], [0.43636244535446167, 0.8124909400939941], [0.4360438585281372, 0.8099753856658936], [0.43567442893981934, 0.8068737983703613], [0.43538379669189453, 0.8043251037597656], [0.435080349445343, 0.8008090853691101], [0.43471288681030273, 0.7978372573852539], [0.4344184398651123, 0.793790340423584], [0.43430376052856445, 0.7903181314468384], [0.43459463119506836, 0.7865771651268005], [0.43474793434143066, 0.7817766666412354], [0.43447160720825195, 0.7793487310409546], [0.4346144199371338, 0.7775968313217163], [0.43502628803253174, 0.7763997316360474], [0.4353677034378052, 0.7754014730453491], [0.43553704023361206, 0.7743998765945435], [0.43561798334121704, 0.7738093137741089], [0.4362379312515259, 0.7724258899688721], [0.4368455410003662, 0.7714417576789856], [0.4381904602050781, 0.7674534320831299], [0.43864285945892334, 0.7653540372848511], [0.4393500089645386, 0.7629733085632324], [0.439971387386322, 0.7607854604721069], [0.44065749645233154, 0.7582499980926514], [0.4413156509399414, 0.7558630704879761], [0.4420616626739502, 0.7530829310417175], [0.44273698329925537, 0.7505670785903931], [0.4435262680053711, 0.7477657198905945], [0.4442446827888489, 0.7450772523880005], [0.4450608491897583, 0.7420824766159058], [0.4458147883415222, 0.7393615245819092], [0.4466603994369507, 0.7364402413368225], [0.4474128484725952, 0.7337344884872437], [0.4482381343841553, 0.7308412194252014], [0.4489511251449585, 0.7281752824783325], [0.4497343897819519, 0.7253016233444214], [0.4504063129425049, 0.7226420640945435], [0.45114684104919434, 0.7197446823120117], [0.45178210735321045, 0.7170718908309937], [0.45247894525527954, 0.7141628265380859], [0.45307254791259766, 0.7114630937576294], [0.45371848344802856, 0.708544135093689], [0.4542597532272339, 0.7058387994766235], [0.4548514485359192, 0.7029005289077759], [0.45534831285476685, 0.700170636177063], [0.4558947682380676, 0.6972150206565857], [0.45636528730392456, 0.6944588422775269], [0.45689356327056885, 0.6914726495742798], [0.4573791027069092, 0.68868488073349], [0.4579213857650757, 0.6856017708778381], [0.45832300186157227, 0.6827284693717957], [0.45876842737197876, 0.6795781254768372], [0.45916658639907837, 0.6766229867935181], [0.45960235595703125, 0.6734704971313477], [0.45993572473526, 0.6705760955810547], [0.46029502153396606, 0.6673811674118042], [0.46053093671798706, 0.6644681096076965], [0.4608163833618164, 0.6610643863677979], [0.4611549973487854, 0.6576461791992188], [0.46158838272094727, 0.6541296243667603], [0.46191537380218506, 0.6504826545715332], [0.462261438369751, 0.647506058216095], [0.46299034357070923, 0.6445220112800598], [0.46370089054107666, 0.6417737603187561], [0.4643186330795288, 0.6386111974716187], [0.4651206135749817, 0.6337745189666748], [0.4653644561767578, 0.6293087005615234], [0.465503454208374, 0.6254605054855347], [0.4657062888145447, 0.6217392086982727], [0.4660186767578125, 0.6183110475540161], [0.4663301110267639, 0.6149611473083496], [0.46670377254486084, 0.6114550828933716], [0.46705567836761475, 0.6080321669578552], [0.46746373176574707, 0.6042488813400269], [0.46782350540161133, 0.600509762763977], [0.468234121799469, 0.5965268611907959], [0.4686150550842285, 0.5925387740135193], [0.4691288471221924, 0.5884367823600769], [0.4696609377861023, 0.584469735622406], [0.47004610300064087, 0.5813886523246765], [0.4702014923095703, 0.5782687067985535], [0.47026193141937256, 0.5753004550933838], [0.47033047676086426, 0.572288453578949], [0.470483660697937, 0.5693553686141968], [0.4707317352294922, 0.5664659738540649], [0.4711211323738098, 0.563633918762207], [0.47163259983062744, 0.5608776807785034], [0.4723733067512512, 0.558101236820221], [0.47325754165649414, 0.5554146766662598], [0.47314316034317017, 0.48758888244628906], [0.47207850217819214, 0.4858725368976593], [0.471155047416687, 0.4840243458747864], [0.4702826142311096, 0.4822101593017578], [0.46946001052856445, 0.4803283214569092], [0.46874189376831055, 0.4785987138748169], [0.46803581714630127, 0.4768528938293457], [0.4674564003944397, 0.4753519296646118], [0.4667961001396179, 0.47400230169296265], [0.46453189849853516, 0.4700431823730469], [0.46246397495269775, 0.465803325176239], [0.46070653200149536, 0.46167606115341187], [0.45909714698791504, 0.4572521448135376], [0.4578474760055542, 0.453227698802948], [0.45662128925323486, 0.4490259289741516], [0.4555467367172241, 0.44517672061920166], [0.4544321298599243, 0.44111987948417664], [0.45343148708343506, 0.4373193383216858], [0.45237934589385986, 0.43326449394226074], [0.44182562828063965, 0.8356701135635376], [0.4416853189468384, 0.8338623046875], [0.44175541400909424, 0.830651581287384], [0.44198232889175415, 0.8286000490188599], [0.442038893699646, 0.8264140486717224], [0.44194722175598145, 0.8246231079101562], [0.4417600631713867, 0.8224314451217651], [0.4415382742881775, 0.8205982446670532], [0.4412570595741272, 0.8183037638664246], [0.44104504585266113, 0.8166226148605347], [0.44081538915634155, 0.8146844506263733], [0.4403254985809326, 0.8120756149291992], [0.439852237701416, 0.8090518712997437], [0.43950051069259644, 0.8065677285194397], [0.4391126036643982, 0.8035203218460083], [0.43866193294525146, 0.8005335330963135], [0.4381716847419739, 0.7969905138015747], [0.43773216009140015, 0.7937437891960144], [0.4373020529747009, 0.7898364663124084], [0.4369569420814514, 0.7868445515632629], [0.4372047781944275, 0.7817445993423462], [0.4377157688140869, 0.7802903056144714], [0.4383656978607178, 0.7786880731582642], [0.4385596513748169, 0.7778099775314331], [0.438854455947876, 0.7763115167617798], [0.43910694122314453, 0.77535480260849], [0.43930280208587646, 0.774260401725769], [0.4393044114112854, 0.7736756801605225], [0.4393150210380554, 0.772156834602356], [0.44021856784820557, 0.7682814598083496], [0.44113224744796753, 0.7665820121765137], [0.4422568678855896, 0.7643864154815674], [0.4432360529899597, 0.7619392275810242], [0.4440479278564453, 0.7596321105957031], [0.44487178325653076, 0.7569757103919983], [0.4456327557563782, 0.7544655799865723], [0.446432888507843, 0.7516481876373291], [0.44708818197250366, 0.749055027961731], [0.4478382468223572, 0.7460675239562988], [0.44853413105010986, 0.7433948516845703], [0.4493620991706848, 0.740459680557251], [0.45010507106781006, 0.7377538084983826], [0.45093291997909546, 0.7348154783248901], [0.4516606330871582, 0.7321155071258545], [0.4524576663970947, 0.7292033433914185], [0.45314639806747437, 0.7265077829360962], [0.4538949131965637, 0.7235950231552124], [0.45454537868499756, 0.7208744287490845], [0.45524632930755615, 0.7179569005966187], [0.45585542917251587, 0.7152050137519836], [0.4565070867538452, 0.7122697830200195], [0.45706915855407715, 0.7094952464103699], [0.4576617479324341, 0.7065540552139282], [0.45816659927368164, 0.7037504315376282], [0.4586961269378662, 0.7008018493652344], [0.45915883779525757, 0.6979967355728149], [0.459656298160553, 0.6950440406799316], [0.46009528636932373, 0.6922010183334351], [0.4605633616447449, 0.6892670392990112], [0.461003839969635, 0.6862514019012451], [0.46145081520080566, 0.6831656098365784], [0.46182912588119507, 0.6801480054855347], [0.4622325301170349, 0.6770522594451904], [0.46259093284606934, 0.6739834547042847], [0.46294283866882324, 0.6709108352661133], [0.4632573127746582, 0.6678268909454346], [0.46355170011520386, 0.6646623611450195], [0.4637898802757263, 0.661446213722229], [0.46417826414108276, 0.6580528616905212], [0.4646683931350708, 0.6547356843948364], [0.4653245210647583, 0.6509063243865967], [0.4657139778137207, 0.6481685042381287], [0.46609067916870117, 0.6453667879104614], [0.4666714668273926, 0.6426958441734314], [0.46750152111053467, 0.6393610239028931], [0.468347430229187, 0.634332537651062], [0.4686886668205261, 0.6292365789413452], [0.4686198830604553, 0.6255915760993958], [0.4687630534172058, 0.6219654083251953], [0.4689713716506958, 0.6186451315879822], [0.4692656397819519, 0.6152818202972412], [0.46958428621292114, 0.611841082572937], [0.4699348211288452, 0.6083694100379944], [0.47030580043792725, 0.6046350002288818], [0.47069811820983887, 0.6008243560791016], [0.47104644775390625, 0.5968754291534424], [0.4714622497558594, 0.59288489818573], [0.47188878059387207, 0.5888814330101013], [0.47245490550994873, 0.5849789977073669], [0.4727253317832947, 0.5815685987472534], [0.4728490710258484, 0.5783829689025879], [0.4729432463645935, 0.5753511190414429], [0.4730391502380371, 0.5724245309829712], [0.4732002019882202, 0.5695809125900269], [0.4734492897987366, 0.5668317079544067], [0.4737986922264099, 0.5641496181488037], [0.4742773175239563, 0.5615741014480591], [0.4748632311820984, 0.5589324235916138], [0.475569486618042, 0.5563547611236572], [0.47560954093933105, 0.4858573377132416], [0.474808931350708, 0.4841524362564087], [0.4739682078361511, 0.4824119210243225], [0.47317034006118774, 0.4805995225906372], [0.4724217653274536, 0.47884732484817505], [0.4716692566871643, 0.477058470249176], [0.47096121311187744, 0.4754088521003723], [0.4701468348503113, 0.47376835346221924], [0.4692766070365906, 0.4722420573234558], [0.4672132730484009, 0.4683283567428589], [0.46538662910461426, 0.46429726481437683], [0.46382254362106323, 0.46019503474235535], [0.46250098943710327, 0.45602744817733765], [0.46130168437957764, 0.4519154727458954], [0.4602278470993042, 0.44792065024375916], [0.45919227600097656, 0.4438927173614502], [0.4582369327545166, 0.44002848863601685], [0.45726680755615234, 0.4360322654247284], [0.4563813805580139, 0.43220436573028564], [0.4464361071586609, 0.8359841108322144], [0.4465733766555786, 0.8333865404129028], [0.4465526342391968, 0.8311862945556641], [0.4464564919471741, 0.8285143375396729], [0.44637399911880493, 0.8264204263687134], [0.44621288776397705, 0.8240026235580444], [0.44603002071380615, 0.8220969438552856], [0.4457666873931885, 0.8197271823883057], [0.44552063941955566, 0.8178490400314331], [0.4451909065246582, 0.8156588673591614], [0.44481420516967773, 0.8139511942863464], [0.44428908824920654, 0.8110213279724121], [0.44396352767944336, 0.8086509108543396], [0.44356775283813477, 0.8056086301803589], [0.44318991899490356, 0.8030872344970703], [0.4427223205566406, 0.7995540499687195], [0.44237256050109863, 0.7966576814651489], [0.4420148730278015, 0.7928768396377563], [0.4418528079986572, 0.7898349761962891], [0.4417591691017151, 0.7860243320465088], [0.4420604109764099, 0.7836956977844238], [0.44250643253326416, 0.7818324565887451], [0.4427725076675415, 0.7805267572402954], [0.4432622790336609, 0.7786972522735596], [0.4434630870819092, 0.7777409553527832], [0.44368505477905273, 0.7762612700462341], [0.44382643699645996, 0.7749450206756592], [0.4439733624458313, 0.7736327648162842], [0.44404077529907227, 0.7726225852966309], [0.4445781111717224, 0.770734429359436], [0.4452834129333496, 0.7687686681747437], [0.4460548162460327, 0.7660560607910156], [0.4468749761581421, 0.7636165022850037], [0.447704017162323, 0.7609045505523682], [0.4484601616859436, 0.7584364414215088], [0.4492650032043457, 0.7556162476539612], [0.45000410079956055, 0.7529898881912231], [0.45080798864364624, 0.750077486038208], [0.4515286684036255, 0.7474108934402466], [0.45232999324798584, 0.7445216178894043], [0.45304369926452637, 0.7418398857116699], [0.4538532495498657, 0.7388706803321838], [0.4545663595199585, 0.7361553907394409], [0.4553576111793518, 0.7331907749176025], [0.4560457468032837, 0.7304760217666626], [0.45680248737335205, 0.727505624294281], [0.45744967460632324, 0.7247753143310547], [0.45815837383270264, 0.7217933535575867], [0.45876407623291016, 0.7190465331077576], [0.4594285488128662, 0.7160436511039734], [0.4599950313568115, 0.7132714986801147], [0.46061235666275024, 0.7102516889572144], [0.4611302614212036, 0.7074538469314575], [0.46169066429138184, 0.70441734790802], [0.46215784549713135, 0.7016125917434692], [0.46265578269958496, 0.6985982656478882], [0.46308183670043945, 0.6957973837852478], [0.4635475277900696, 0.6927466988563538], [0.4639103412628174, 0.6899144649505615], [0.4643169641494751, 0.6867263317108154], [0.4646744728088379, 0.6837469339370728], [0.46506398916244507, 0.680549144744873], [0.4653679132461548, 0.6775509119033813], [0.46569204330444336, 0.6743197441101074], [0.4659285545349121, 0.6712884902954102], [0.46615952253341675, 0.6680821180343628], [0.46639227867126465, 0.6649961471557617], [0.46671104431152344, 0.6617200374603271], [0.46713531017303467, 0.6586990356445312], [0.4678698778152466, 0.6555256843566895], [0.4689023494720459, 0.6520770788192749], [0.46985602378845215, 0.6492024660110474], [0.4705841541290283, 0.6464035511016846], [0.47098851203918457, 0.643622875213623], [0.4713253378868103, 0.6401455402374268], [0.47189438343048096, 0.6348036527633667], [0.4717888832092285, 0.6290075778961182], [0.47163838148117065, 0.6255844831466675], [0.4716811776161194, 0.622154712677002], [0.4718554615974426, 0.6188700199127197], [0.4720865488052368, 0.6156076788902283], [0.47238612174987793, 0.6121342182159424], [0.472667396068573, 0.6086927652359009], [0.4729899764060974, 0.6049297451972961], [0.47330242395401, 0.601127028465271], [0.4736654758453369, 0.5971516966819763], [0.4740082621574402, 0.5932100415229797], [0.47441530227661133, 0.5892055034637451], [0.47470128536224365, 0.5852881669998169], [0.4750593304634094, 0.5817704796791077], [0.47525495290756226, 0.5784634351730347], [0.47535640001296997, 0.5754669904708862], [0.4754928946495056, 0.5725441575050354], [0.4756587743759155, 0.569818377494812], [0.4759051203727722, 0.5671430826187134], [0.47622430324554443, 0.5646092891693115], [0.476635217666626, 0.562144935131073], [0.47720271348953247, 0.559661865234375], [0.4778597950935364, 0.5571396946907043], [0.478643536567688, 0.4839286208152771], [0.4777263402938843, 0.48231106996536255], [0.47691404819488525, 0.4806993305683136], [0.4761260747909546, 0.4790211319923401], [0.475360631942749, 0.4772855341434479], [0.4746057391166687, 0.4755457639694214], [0.4738062620162964, 0.4737855792045593], [0.4729839563369751, 0.4720301926136017], [0.4720630645751953, 0.4702373743057251], [0.47019290924072266, 0.4666375517845154], [0.4685381054878235, 0.4627033472061157], [0.4672035574913025, 0.458876371383667], [0.46590912342071533, 0.45472365617752075], [0.4648374319076538, 0.4508180320262909], [0.46378540992736816, 0.44672679901123047], [0.462862491607666, 0.44288724660873413], [0.46192312240600586, 0.4388608932495117], [0.4610743522644043, 0.4350636899471283], [0.46017181873321533, 0.43104439973831177], [0.4504927396774292, 0.8362597227096558], [0.450689435005188, 0.8337671756744385], [0.4507294297218323, 0.830826461315155], [0.45069271326065063, 0.8286014199256897], [0.45058465003967285, 0.8259254693984985], [0.45044946670532227, 0.8238158226013184], [0.45023322105407715, 0.8213382959365845], [0.4500078558921814, 0.8193260431289673], [0.4497067332267761, 0.8169283866882324], [0.44941192865371704, 0.8150076866149902], [0.44900381565093994, 0.8125916719436646], [0.44861841201782227, 0.8104841709136963], [0.448203980922699, 0.8076927661895752], [0.4478929042816162, 0.8052191734313965], [0.4475146532058716, 0.8020486235618591], [0.44718992710113525, 0.7992526292800903], [0.446887731552124, 0.7958102226257324], [0.44677746295928955, 0.7929793000221252], [0.4468334913253784, 0.7895063161849976], [0.44717127084732056, 0.7871817350387573], [0.4476456642150879, 0.7847017049789429], [0.4478369951248169, 0.7833656072616577], [0.44815778732299805, 0.7814316749572754], [0.44830334186553955, 0.780263364315033], [0.4485898017883301, 0.7785134315490723], [0.44876331090927124, 0.7773745059967041], [0.44908469915390015, 0.7758148312568665], [0.4492946267127991, 0.7746509313583374], [0.4495513439178467, 0.77335524559021], [0.4495943784713745, 0.7719725370407104], [0.4496934413909912, 0.7698131799697876], [0.45015060901641846, 0.7675044536590576], [0.4507299065589905, 0.7648813724517822], [0.4514402747154236, 0.7623750567436218], [0.452192485332489, 0.7596299648284912], [0.4529433250427246, 0.7570239305496216], [0.45375144481658936, 0.7541459798812866], [0.45451080799102783, 0.7514781951904297], [0.45533496141433716, 0.7485566139221191], [0.4560507535934448, 0.7458876371383667], [0.45683616399765015, 0.7429263591766357], [0.4575340151786804, 0.7401992678642273], [0.4583098888397217, 0.7372074127197266], [0.45898962020874023, 0.7344561219215393], [0.4597313404083252, 0.7314648628234863], [0.4603797197341919, 0.7286821603775024], [0.46107858419418335, 0.7256791591644287], [0.4616849422454834, 0.7228739261627197], [0.46233487129211426, 0.7198721170425415], [0.46290504932403564, 0.7170374393463135], [0.4635135531425476, 0.7140243053436279], [0.46404576301574707, 0.7111554145812988], [0.4646066427230835, 0.7081212401390076], [0.4650878310203552, 0.7052216529846191], [0.46558159589767456, 0.7021902799606323], [0.4660069942474365, 0.6993165016174316], [0.4664459228515625, 0.6963105797767639], [0.46681898832321167, 0.6933645009994507], [0.4671913981437683, 0.6903044581413269], [0.46750932931900024, 0.6872498989105225], [0.4678438901901245, 0.6841119527816772], [0.4681396484375, 0.6810104250907898], [0.46844160556793213, 0.6778343915939331], [0.46870672702789307, 0.6747241616249084], [0.46897077560424805, 0.6714791059494019], [0.46914875507354736, 0.6683651804924011], [0.4693082571029663, 0.66517174243927], [0.4695392847061157, 0.6621500253677368], [0.4698832631111145, 0.6591427326202393], [0.47047168016433716, 0.6565950512886047], [0.4718571901321411, 0.6536300182342529], [0.4729524254798889, 0.6500093936920166], [0.4733298420906067, 0.6469801664352417], [0.4740447402000427, 0.6446791887283325], [0.4751318693161011, 0.6405113935470581], [0.4751152992248535, 0.6348979473114014], [0.4747579097747803, 0.628648042678833], [0.47457289695739746, 0.6255619525909424], [0.47458118200302124, 0.6222333908081055], [0.4746984839439392, 0.6191043853759766], [0.4749152660369873, 0.615856409072876], [0.47518646717071533, 0.6124535202980042], [0.47546762228012085, 0.6089301109313965], [0.4757106900215149, 0.6051998138427734], [0.47598934173583984, 0.6013733148574829], [0.4762955904006958, 0.5974533557891846], [0.47664356231689453, 0.5934754610061646], [0.47696787118911743, 0.5895220041275024], [0.4773795008659363, 0.5855377912521362], [0.47747182846069336, 0.5819175839424133], [0.4775615334510803, 0.578565239906311], [0.4776977300643921, 0.5755883455276489], [0.47783535718917847, 0.5726970434188843], [0.4780224561691284, 0.5700411796569824], [0.47825783491134644, 0.5674515962600708], [0.4785628318786621, 0.5650191307067871], [0.4789465665817261, 0.5626803636550903], [0.47940999269485474, 0.560293436050415], [0.4800373315811157, 0.5579217672348022], [0.4811546206474304, 0.481952428817749], [0.48034071922302246, 0.4806044399738312], [0.4795385003089905, 0.47908997535705566], [0.47874289751052856, 0.4775048792362213], [0.4779949188232422, 0.475894033908844], [0.4771971106529236, 0.47411048412323], [0.4764235019683838, 0.47236037254333496], [0.47554725408554077, 0.4704819619655609], [0.47462886571884155, 0.4686233401298523], [0.4730510711669922, 0.4650605320930481], [0.4716949462890625, 0.46138498187065125], [0.47043079137802124, 0.4575386047363281], [0.4693179726600647, 0.45364174246788025], [0.46828263998031616, 0.4496527314186096], [0.4673638939857483, 0.4457712471485138], [0.466458261013031, 0.441783607006073], [0.4656298756599426, 0.4379577040672302], [0.464780330657959, 0.43397682905197144], [0.4640129804611206, 0.43016940355300903], [0.45486462116241455, 0.8370480537414551], [0.45505571365356445, 0.833719789981842], [0.45512425899505615, 0.8310832977294922], [0.45509254932403564, 0.828200101852417], [0.4549923539161682, 0.8258712291717529], [0.4548113942146301, 0.8231704235076904], [0.4546278119087219, 0.8210464715957642], [0.4543588161468506, 0.8184633851051331], [0.45410555601119995, 0.816440761089325], [0.45375263690948486, 0.8139249086380005], [0.4534494876861572, 0.8119388818740845], [0.4530676603317261, 0.8093404173851013], [0.4528491497039795, 0.8073880672454834], [0.45253145694732666, 0.8042347431182861], [0.4522296190261841, 0.8016694784164429], [0.45190250873565674, 0.7983871102333069], [0.4517413377761841, 0.7957979440689087], [0.4516412615776062, 0.7925790548324585], [0.45172905921936035, 0.7902276515960693], [0.4518635869026184, 0.7876270413398743], [0.4520258903503418, 0.785783052444458], [0.4522979259490967, 0.7838168144226074], [0.4524514079093933, 0.7824705243110657], [0.45272982120513916, 0.7807616591453552], [0.4528844356536865, 0.7795816659927368], [0.45314735174179077, 0.7780113220214844], [0.4532862901687622, 0.7769180536270142], [0.4535559415817261, 0.7754232883453369], [0.4537062644958496, 0.7742656469345093], [0.4541151523590088, 0.7723158597946167], [0.45440673828125, 0.7709048986434937], [0.4548037052154541, 0.76850426197052], [0.4552748203277588, 0.7663626670837402], [0.45585381984710693, 0.7636115550994873], [0.45646166801452637, 0.7611675262451172], [0.4571693539619446, 0.7582629919052124], [0.4578481912612915, 0.7556662559509277], [0.4586167335510254, 0.7526976466178894], [0.45931506156921387, 0.750023603439331], [0.4600949287414551, 0.747024655342102], [0.4607766270637512, 0.7443116903305054], [0.4615333080291748, 0.7412796020507812], [0.46218931674957275, 0.7385218143463135], [0.46291589736938477, 0.735467791557312], [0.463539719581604, 0.7326852083206177], [0.4642268419265747, 0.7296092510223389], [0.46481484174728394, 0.7268006205558777], [0.4654604196548462, 0.7237167954444885], [0.4660073518753052, 0.7208967804908752], [0.466606080532074, 0.717801570892334], [0.4671143889427185, 0.714957594871521], [0.46767157316207886, 0.711840808391571], [0.4681450128555298, 0.7089651226997375], [0.46865981817245483, 0.7058182954788208], [0.46908098459243774, 0.7029253840446472], [0.46951472759246826, 0.6998229622840881], [0.46986865997314453, 0.6969404816627502], [0.47024697065353394, 0.6937724351882935], [0.47055524587631226, 0.6908352971076965], [0.47089219093322754, 0.6875814199447632], [0.47114789485931396, 0.6845594644546509], [0.4714229702949524, 0.6812921166419983], [0.47164440155029297, 0.6782119274139404], [0.4718834161758423, 0.6749728918075562], [0.47212064266204834, 0.6718350052833557], [0.47237688302993774, 0.6685382127761841], [0.47252315282821655, 0.6653748154640198], [0.47273069620132446, 0.6625374555587769], [0.4731931686401367, 0.6599200963973999], [0.47365355491638184, 0.6574912667274475], [0.4743648171424866, 0.6558206677436829], [0.4767535924911499, 0.6505095958709717], [0.4772544503211975, 0.6480358839035034], [0.4777235984802246, 0.646570086479187], [0.47763025760650635, 0.6400359869003296], [0.4769037365913391, 0.6349221467971802], [0.4770679473876953, 0.6285267472267151], [0.47707611322402954, 0.6255223751068115], [0.47714489698410034, 0.6223479509353638], [0.47728627920150757, 0.6192559003829956], [0.47748804092407227, 0.6161169409751892], [0.4777336120605469, 0.6126695871353149], [0.47796595096588135, 0.6091619729995728], [0.47822463512420654, 0.6053811311721802], [0.47846823930740356, 0.6016093492507935], [0.47873079776763916, 0.5976539850234985], [0.47896021604537964, 0.5936932563781738], [0.47919535636901855, 0.589698851108551], [0.4792863130569458, 0.5856791138648987], [0.4795418977737427, 0.5820084810256958], [0.47971296310424805, 0.5786747932434082], [0.47984713315963745, 0.5757213830947876], [0.48003244400024414, 0.572860836982727], [0.4802267551422119, 0.5702730417251587], [0.48048126697540283, 0.5677393674850464], [0.48076218366622925, 0.565399706363678], [0.4811059236526489, 0.5631437301635742], [0.48156505823135376, 0.5608831644058228], [0.482144296169281, 0.5587212443351746], [0.4832337498664856, 0.48051032423973083], [0.4825791120529175, 0.4791695475578308], [0.4818597435951233, 0.4776867926120758], [0.48119306564331055, 0.4761643409729004], [0.4804997444152832, 0.4745327830314636], [0.4797874093055725, 0.4727967083454132], [0.47902852296829224, 0.4709519147872925], [0.47823500633239746, 0.46903765201568604], [0.4773491621017456, 0.46699264645576477], [0.47599029541015625, 0.46372276544570923], [0.47471296787261963, 0.46009039878845215], [0.4736405611038208, 0.4564616084098816], [0.4726144075393677, 0.4525349736213684], [0.4717339873313904, 0.4487360715866089], [0.47085100412368774, 0.4447367787361145], [0.47006356716156006, 0.44093650579452515], [0.469246506690979, 0.43693679571151733], [0.4685068726539612, 0.4331613779067993], [0.46775150299072266, 0.4291872978210449], [0.4593465328216553, 0.837083101272583], [0.4594517946243286, 0.8341870307922363], [0.4594964385032654, 0.8308355212211609], [0.459467351436615, 0.8283218145370483], [0.4593556523323059, 0.825323224067688], [0.45923417806625366, 0.8230643272399902], [0.45904839038848877, 0.8203189373016357], [0.4588713049888611, 0.8181674480438232], [0.4586246609687805, 0.8155266046524048], [0.45841360092163086, 0.8134715557098389], [0.4581448435783386, 0.8109135627746582], [0.45793724060058594, 0.8089011907577515], [0.45768725872039795, 0.8065365552902222], [0.4573926329612732, 0.8038325309753418], [0.45706063508987427, 0.8007394075393677], [0.45688557624816895, 0.7982892990112305], [0.4567616581916809, 0.795265793800354], [0.45680487155914307, 0.7930465936660767], [0.4569532871246338, 0.7903528213500977], [0.45714902877807617, 0.7885274887084961], [0.45744478702545166, 0.7863553762435913], [0.45761656761169434, 0.784956693649292], [0.4579024314880371, 0.7831032872200012], [0.4580349326133728, 0.7819520235061646], [0.45829665660858154, 0.7802897095680237], [0.45843029022216797, 0.7791821956634521], [0.458678662776947, 0.7776299715042114], [0.4587879180908203, 0.7765060663223267], [0.45899635553359985, 0.7748267650604248], [0.45909595489501953, 0.7735296487808228], [0.4592739939689636, 0.7714716196060181], [0.4595097303390503, 0.7696613073348999], [0.4598402976989746, 0.767237663269043], [0.4602818489074707, 0.7649564146995544], [0.4608214497566223, 0.7622390985488892], [0.4614132046699524, 0.7596988677978516], [0.4620855450630188, 0.7568049430847168], [0.46274006366729736, 0.7541362643241882], [0.46346694231033325, 0.7511523365974426], [0.46412986516952515, 0.7484056949615479], [0.4648512005805969, 0.7453658580780029], [0.4654887914657593, 0.7425686717033386], [0.46618181467056274, 0.7394970655441284], [0.46678704023361206, 0.736660361289978], [0.46743857860565186, 0.7335718870162964], [0.46800780296325684, 0.7307003736495972], [0.46861952543258667, 0.7276090383529663], [0.4691541790962219, 0.7247122526168823], [0.46972155570983887, 0.7216208577156067], [0.47021621465682983, 0.7187002897262573], [0.4707369804382324, 0.7156022787094116], [0.4711940884590149, 0.712651789188385], [0.47167879343032837, 0.7095463275909424], [0.47211116552352905, 0.7065497636795044], [0.4725492000579834, 0.7034074068069458], [0.47290217876434326, 0.7004202604293823], [0.47324806451797485, 0.6973200440406799], [0.47354793548583984, 0.6942811012268066], [0.4738543629646301, 0.691152036190033], [0.47411322593688965, 0.6880022287368774], [0.47436773777008057, 0.6847898960113525], [0.47457361221313477, 0.6816409826278687], [0.47478723526000977, 0.6783984899520874], [0.47497665882110596, 0.6752974987030029], [0.47521018981933594, 0.6720752716064453], [0.4754696488380432, 0.6688990592956543], [0.4757578372955322, 0.6655232906341553], [0.47602230310440063, 0.6631253957748413], [0.47647345066070557, 0.6608490347862244], [0.47720324993133545, 0.6591452360153198], [0.4777185916900635, 0.6581345796585083], [0.47863495349884033, 0.650587797164917], [0.4786677360534668, 0.6483538150787354], [0.47873568534851074, 0.6467822194099426], [0.4787020683288574, 0.6397867202758789], [0.4789709448814392, 0.6351851224899292], [0.4794250726699829, 0.6284995079040527], [0.4795094132423401, 0.6256429553031921], [0.47967541217803955, 0.6224533319473267], [0.4798498749732971, 0.6194741725921631], [0.48006200790405273, 0.6163153648376465], [0.4802825450897217, 0.6128943562507629], [0.48049914836883545, 0.6093364953994751], [0.480724036693573, 0.6055989265441895], [0.48096251487731934, 0.6017826795578003], [0.481157124042511, 0.5978275537490845], [0.48132210969924927, 0.5938130617141724], [0.48147445917129517, 0.5898522138595581], [0.481695294380188, 0.5857451558113098], [0.48171019554138184, 0.582141637802124], [0.4818229675292969, 0.5787670612335205], [0.4819772243499756, 0.5758848190307617], [0.48216676712036133, 0.5730443000793457], [0.48238998651504517, 0.5705176591873169], [0.4826359748840332, 0.5680241584777832], [0.4829179048538208, 0.565758466720581], [0.4832373857498169, 0.5635703206062317], [0.4836146831512451, 0.5614221096038818], [0.4840623736381531, 0.5593599081039429], [0.48523736000061035, 0.47945716977119446], [0.48463380336761475, 0.477985143661499], [0.48405253887176514, 0.47655820846557617], [0.483429491519928, 0.475009560585022], [0.4828283190727234, 0.47343504428863525], [0.4821513891220093, 0.471630722284317], [0.48148393630981445, 0.46982091665267944], [0.48072314262390137, 0.46777939796447754], [0.4799380302429199, 0.465720534324646], [0.47876912355422974, 0.4624512195587158], [0.47772711515426636, 0.459041029214859], [0.47674500942230225, 0.4553903043270111], [0.4758928418159485, 0.4516910910606384], [0.47508448362350464, 0.4477907419204712], [0.4743528366088867, 0.44396013021469116], [0.4736045002937317, 0.43999797105789185], [0.47290223836898804, 0.4361706078052521], [0.47218137979507446, 0.4322199821472168], [0.4715226888656616, 0.42846620082855225], [0.46395230293273926, 0.8375742435455322], [0.4640212655067444, 0.834018349647522], [0.46405231952667236, 0.831133246421814], [0.4640318751335144, 0.8279275894165039], [0.46397387981414795, 0.825376570224762], [0.4638567566871643, 0.8224799633026123], [0.4637254476547241, 0.820186972618103], [0.4635288119316101, 0.8174245357513428], [0.4633557200431824, 0.815267026424408], [0.46312689781188965, 0.8126360774040222], [0.4629417061805725, 0.8106024861335754], [0.462693989276886, 0.8080452680587769], [0.46245843172073364, 0.8060714602470398], [0.4620859622955322, 0.8029743432998657], [0.4619329571723938, 0.8005950450897217], [0.4617878198623657, 0.7977076768875122], [0.46176207065582275, 0.7955283522605896], [0.46176743507385254, 0.7929227352142334], [0.46183866262435913, 0.7910270690917969], [0.46195197105407715, 0.7888015508651733], [0.46204686164855957, 0.7871902585029602], [0.4622100591659546, 0.7853113412857056], [0.4623079299926758, 0.7839949131011963], [0.462507426738739, 0.7823535799980164], [0.46261322498321533, 0.7812306880950928], [0.4628429412841797, 0.7796846032142639], [0.4629700183868408, 0.7785963416099548], [0.4632478952407837, 0.776980996131897], [0.46341705322265625, 0.7758098244667053], [0.4637443423271179, 0.7739349603652954], [0.4639803171157837, 0.7725182771682739], [0.4643319249153137, 0.7703062295913696], [0.464663565158844, 0.7684606909751892], [0.4650980234146118, 0.7658791542053223], [0.4655444622039795, 0.7636350393295288], [0.46609818935394287, 0.7607690691947937], [0.46663594245910645, 0.758251428604126], [0.4672715663909912, 0.7552374601364136], [0.4678545594215393, 0.7525697946548462], [0.46852630376815796, 0.7494587898254395], [0.4691128134727478, 0.7466869354248047], [0.4697701930999756, 0.7435431480407715], [0.4703364372253418, 0.7407194972038269], [0.47096437215805054, 0.7375446557998657], [0.47149479389190674, 0.7346803545951843], [0.4720773696899414, 0.7314971685409546], [0.47256696224212646, 0.728611409664154], [0.473103404045105, 0.7254241704940796], [0.47355520725250244, 0.7225201725959778], [0.4740455150604248, 0.7193225622177124], [0.47445619106292725, 0.7164021134376526], [0.474902868270874, 0.7131974697113037], [0.4752753973007202, 0.7102614641189575], [0.4756823778152466, 0.7070271968841553], [0.4760289192199707, 0.7040258049964905], [0.4763895273208618, 0.7007869482040405], [0.47667211294174194, 0.6978253722190857], [0.4769730567932129, 0.6945842504501343], [0.47721391916275024, 0.6915854215621948], [0.4774821400642395, 0.6882427930831909], [0.4777013063430786, 0.6851629614830017], [0.47793465852737427, 0.6818268299102783], [0.47811245918273926, 0.6787125468254089], [0.47831231355667114, 0.6754894852638245], [0.4784659743309021, 0.672370433807373], [0.47866326570510864, 0.669174313545227], [0.47894030809402466, 0.6659095883369446], [0.47919607162475586, 0.6636052131652832], [0.47947561740875244, 0.661649763584137], [0.47962117195129395, 0.6599854230880737], [0.4796181917190552, 0.6585753560066223], [0.4791933298110962, 0.650506854057312], [0.4791550636291504, 0.6484216451644897], [0.4793565273284912, 0.6468249559402466], [0.48034101724624634, 0.6399475932121277], [0.48071372509002686, 0.6353131532669067], [0.48128849267959595, 0.6287153959274292], [0.4816405773162842, 0.6257878541946411], [0.48193538188934326, 0.6227056980133057], [0.4822293519973755, 0.6196656227111816], [0.48248475790023804, 0.6165624856948853], [0.48274391889572144, 0.6130632758140564], [0.4829532504081726, 0.6095279455184937], [0.4831582307815552, 0.6057547926902771], [0.4833376407623291, 0.6019478440284729], [0.4834851026535034, 0.5979145765304565], [0.4835538864135742, 0.5938881635665894], [0.48357367515563965, 0.5898975133895874], [0.48353421688079834, 0.5858038663864136], [0.48368775844573975, 0.5821939706802368], [0.483850359916687, 0.5789156556129456], [0.48404765129089355, 0.5760464668273926], [0.4842882752418518, 0.5732765197753906], [0.48452603816986084, 0.5707669258117676], [0.4847898483276367, 0.5683153867721558], [0.4850420355796814, 0.566093921661377], [0.48532557487487793, 0.5639503598213196], [0.4856417179107666, 0.5618706941604614], [0.48600006103515625, 0.5598689317703247], [0.4869871139526367, 0.47863367199897766], [0.4865154027938843, 0.47717970609664917], [0.48603588342666626, 0.47565531730651855], [0.48555976152420044, 0.47412341833114624], [0.4850414991378784, 0.47246798872947693], [0.4844815731048584, 0.4706854224205017], [0.4838705062866211, 0.46877220273017883], [0.4832155108451843, 0.4667499363422394], [0.4824839234352112, 0.46454331278800964], [0.48152631521224976, 0.4614654779434204], [0.4806041121482849, 0.4580402970314026], [0.4798109531402588, 0.4545837640762329], [0.4790400266647339, 0.45083338022232056], [0.4783645272254944, 0.44712090492248535], [0.47769618034362793, 0.4431632161140442], [0.4770928621292114, 0.43934929370880127], [0.4764588475227356, 0.43533840775489807], [0.4758675694465637, 0.43155449628829956], [0.47520673274993896, 0.4275822043418884], [0.4686189293861389, 0.837360143661499], [0.46864670515060425, 0.8343889713287354], [0.46866077184677124, 0.8308526277542114], [0.46865785121917725, 0.8281618356704712], [0.46861588954925537, 0.8249419927597046], [0.46854841709136963, 0.8225204944610596], [0.46841961145401, 0.8195682764053345], [0.46829748153686523, 0.8173134326934814], [0.46811795234680176, 0.8145350813865662], [0.46796727180480957, 0.812430739402771], [0.4677533507347107, 0.8097929954528809], [0.46757274866104126, 0.8077499270439148], [0.4673113226890564, 0.8050737380981445], [0.46715784072875977, 0.8028317093849182], [0.4670199155807495, 0.8000342845916748], [0.46700209379196167, 0.7979000806808472], [0.46702057123184204, 0.7953100204467773], [0.46710264682769775, 0.7934402823448181], [0.46724337339401245, 0.7911679744720459], [0.4673786759376526, 0.7895523309707642], [0.46759235858917236, 0.7875595092773438], [0.4677310585975647, 0.7862393260002136], [0.46796607971191406, 0.7845086455345154], [0.46808290481567383, 0.7833880186080933], [0.46830087900161743, 0.7817901372909546], [0.46839386224746704, 0.7807556390762329], [0.4685875177383423, 0.7791681289672852], [0.46867281198501587, 0.7780928015708923], [0.46885401010513306, 0.7763680219650269], [0.4689679741859436, 0.7750767469406128], [0.46916860342025757, 0.7730949521064758], [0.4693623185157776, 0.7714544534683228], [0.46963948011398315, 0.7691667079925537], [0.46995609998703003, 0.7671072483062744], [0.47035449743270874, 0.7644999027252197], [0.47079211473464966, 0.7620731592178345], [0.4713042378425598, 0.7592065930366516], [0.47181451320648193, 0.7565509080886841], [0.4723854660987854, 0.7535308599472046], [0.472922682762146, 0.7507154941558838], [0.47350990772247314, 0.7475917339324951], [0.4740346670150757, 0.7447061538696289], [0.4745998978614807, 0.7415478229522705], [0.4750993847846985, 0.7385988235473633], [0.47562867403030396, 0.7354140281677246], [0.47608834505081177, 0.732441782951355], [0.4765735864639282, 0.7292556166648865], [0.47699111700057983, 0.7262625694274902], [0.47742557525634766, 0.7230767607688904], [0.47780168056488037, 0.7200582027435303], [0.47819018363952637, 0.7168799638748169], [0.4785277843475342, 0.7138433456420898], [0.47887134552001953, 0.7106614708900452], [0.4791675806045532, 0.7075847387313843], [0.47947847843170166, 0.7043764591217041], [0.47975873947143555, 0.701274037361145], [0.4800344705581665, 0.6980947256088257], [0.4802555441856384, 0.6949770450592041], [0.48048025369644165, 0.6918038129806519], [0.48068898916244507, 0.6885961890220642], [0.4809064269065857, 0.6853569746017456], [0.4810962677001953, 0.6821315288543701], [0.4812781810760498, 0.6788479685783386], [0.48141688108444214, 0.6757602691650391], [0.4816044569015503, 0.6725122928619385], [0.4818207025527954, 0.6695502996444702], [0.4821639657020569, 0.6662791967391968], [0.4824144244194031, 0.6641309261322021], [0.4825555682182312, 0.6619316339492798], [0.48240065574645996, 0.6597803831100464], [0.4821130037307739, 0.6582029461860657], [0.479958713054657, 0.6503182649612427], [0.4797913432121277, 0.648491382598877], [0.4800354242324829, 0.6469911336898804], [0.4815860390663147, 0.6402109265327454], [0.48263585567474365, 0.6356290578842163], [0.48354923725128174, 0.6289675235748291], [0.48382675647735596, 0.6261254549026489], [0.48425883054733276, 0.623016357421875], [0.48462218046188354, 0.6199768781661987], [0.4849299192428589, 0.6167834997177124], [0.4851871728897095, 0.6132817268371582], [0.4854092597961426, 0.6096738576889038], [0.48558318614959717, 0.6059053540229797], [0.4857257008552551, 0.6020471453666687], [0.48581618070602417, 0.5979838371276855], [0.48582154512405396, 0.5938735008239746], [0.48571622371673584, 0.5898520350456238], [0.48563265800476074, 0.585771918296814], [0.4856696128845215, 0.5823533535003662], [0.48591792583465576, 0.5790706276893616], [0.4861440658569336, 0.5762722492218018], [0.48639923334121704, 0.5735143423080444], [0.48664700984954834, 0.5710326433181763], [0.4868970513343811, 0.5685969591140747], [0.4871510863304138, 0.5664033889770508], [0.48740053176879883, 0.5642842054367065], [0.4876675009727478, 0.5622460842132568], [0.4879491329193115, 0.5602802038192749], [0.48870790004730225, 0.4780961573123932], [0.4883754253387451, 0.47653597593307495], [0.4880245327949524, 0.4750182628631592], [0.48762184381484985, 0.47337600588798523], [0.4872007369995117, 0.47174203395843506], [0.4866964817047119, 0.46985113620758057], [0.4861809015274048, 0.46796801686286926], [0.4855843782424927, 0.46582359075546265], [0.48497581481933594, 0.4636520743370056], [0.48417723178863525, 0.46056067943573], [0.4834628105163574, 0.457305908203125], [0.48277735710144043, 0.4537957012653351], [0.4821699261665344, 0.4502241015434265], [0.4815678000450134, 0.44640636444091797], [0.4810333847999573, 0.4426395297050476], [0.4805101156234741, 0.43866556882858276], [0.4800095558166504, 0.43480217456817627], [0.47943609952926636, 0.4307798743247986], [0.47886407375335693, 0.42696234583854675], [0.47353529930114746, 0.8377546668052673], [0.4735666513442993, 0.8341087102890015], [0.47357887029647827, 0.8311763405799866], [0.473569393157959, 0.8278205990791321], [0.47354328632354736, 0.8251410722732544], [0.4734763503074646, 0.8220276236534119], [0.4733942747116089, 0.8195877075195312], [0.47326338291168213, 0.8166927099227905], [0.47314906120300293, 0.8144438862800598], [0.4729856848716736, 0.8117159605026245], [0.47285783290863037, 0.8096226453781128], [0.47267478704452515, 0.8069428205490112], [0.47255295515060425, 0.8048856258392334], [0.4723885655403137, 0.8021933436393738], [0.47232258319854736, 0.8001439571380615], [0.47226375341415405, 0.7975935935974121], [0.47227704524993896, 0.7957022190093994], [0.47230422496795654, 0.7934143543243408], [0.4723502993583679, 0.7917498350143433], [0.47242486476898193, 0.7897297143936157], [0.4724869728088379, 0.7882840633392334], [0.47259974479675293, 0.7865387797355652], [0.47266799211502075, 0.7853198647499084], [0.4728150963783264, 0.7837173938751221], [0.47289133071899414, 0.782628059387207], [0.47307151556015015, 0.7811024188995361], [0.47317516803741455, 0.7800894379615784], [0.4734124541282654, 0.7784498929977417], [0.47355127334594727, 0.7773362398147583], [0.47382164001464844, 0.7754731178283691], [0.4740084409713745, 0.7741167545318604], [0.47430264949798584, 0.7719631791114807], [0.4745495319366455, 0.7702641487121582], [0.4748861789703369, 0.7677541971206665], [0.4751991033554077, 0.7656562924385071], [0.47559988498687744, 0.7628316283226013], [0.47598332166671753, 0.7603942155838013], [0.4764518141746521, 0.757347822189331], [0.47687816619873047, 0.7546757459640503], [0.47738027572631836, 0.7514908313751221], [0.47782593965530396, 0.7486680746078491], [0.4783332943916321, 0.7454235553741455], [0.47876495122909546, 0.7425230741500854], [0.4792467951774597, 0.7392423152923584], [0.47965502738952637, 0.7362881302833557], [0.4801022410392761, 0.7330081462860107], [0.4804772138595581, 0.7300330400466919], [0.48088252544403076, 0.7267475128173828], [0.4812172055244446, 0.7237541079521179], [0.4815754294395447, 0.7204703092575073], [0.4818733334541321, 0.7174752950668335], [0.48218947649002075, 0.714187502861023], [0.4824486970901489, 0.711172342300415], [0.48272669315338135, 0.7078683972358704], [0.48295825719833374, 0.7048344016075134], [0.48320478200912476, 0.7015206813812256], [0.4834010601043701, 0.6984741687774658], [0.4836123585700989, 0.6951569318771362], [0.48378586769104004, 0.6921435594558716], [0.4839847683906555, 0.6887620687484741], [0.48414361476898193, 0.6856601238250732], [0.4843226671218872, 0.6822754144668579], [0.48447543382644653, 0.6791083216667175], [0.48466217517852783, 0.6759108304977417], [0.48488283157348633, 0.6728819608688354], [0.4851268529891968, 0.6697980165481567], [0.48529672622680664, 0.6666513085365295], [0.4853670597076416, 0.6643136143684387], [0.48529577255249023, 0.6618766784667969], [0.48483264446258545, 0.6591105461120605], [0.48392850160598755, 0.6572381258010864], [0.48110103607177734, 0.6499910354614258], [0.48117947578430176, 0.648845911026001], [0.48147106170654297, 0.6472314596176147], [0.48320579528808594, 0.640956699848175], [0.4844353199005127, 0.6358628869056702], [0.4853746294975281, 0.6293601989746094], [0.4860207438468933, 0.6265305280685425], [0.4865671396255493, 0.6234878301620483], [0.4870299696922302, 0.6202657222747803], [0.4873312711715698, 0.6170508861541748], [0.4875943660736084, 0.6134494543075562], [0.48779189586639404, 0.6098440885543823], [0.4879603385925293, 0.6059998273849487], [0.48808979988098145, 0.6021549701690674], [0.4881711006164551, 0.5980002880096436], [0.4881901741027832, 0.5938720107078552], [0.48810875415802, 0.5897209644317627], [0.4879322052001953, 0.585711658000946], [0.48793548345565796, 0.5824464559555054], [0.4880809783935547, 0.5793042182922363], [0.48832571506500244, 0.5764880776405334], [0.48856544494628906, 0.5737771391868591], [0.4888032078742981, 0.5712888836860657], [0.48905110359191895, 0.5688818693161011], [0.4892765283584595, 0.5666935443878174], [0.48950159549713135, 0.5645708441734314], [0.48972123861312866, 0.5625758171081543], [0.48996734619140625, 0.560620129108429], [0.4905548095703125, 0.4776868224143982], [0.49029332399368286, 0.4760798215866089], [0.48999738693237305, 0.4744776487350464], [0.48967742919921875, 0.47283482551574707], [0.48932182788848877, 0.4710979461669922], [0.48892176151275635, 0.4692215323448181], [0.4884892702102661, 0.4672289192676544], [0.48801708221435547, 0.4651108384132385], [0.48749589920043945, 0.462821364402771], [0.486869752407074, 0.4598919749259949], [0.4862469434738159, 0.4565986692905426], [0.4857010841369629, 0.4532559812068939], [0.48517942428588867, 0.44962066411972046], [0.4847249388694763, 0.44595327973365784], [0.4842756986618042, 0.442076712846756], [0.48387157917022705, 0.4382712244987488], [0.4834251403808594, 0.43419766426086426], [0.48297709226608276, 0.43029263615608215], [0.4824502468109131, 0.4262242317199707], [0.47850334644317627, 0.8374704718589783], [0.47852933406829834, 0.8344929218292236], [0.4785480499267578, 0.8308541774749756], [0.47854167222976685, 0.8281062245368958], [0.4785066843032837, 0.8247383832931519], [0.47846806049346924, 0.8222015500068665], [0.47839295864105225, 0.8190649151802063], [0.478324294090271, 0.8167507648468018], [0.4782111644744873, 0.8138327598571777], [0.47812116146087646, 0.811687707901001], [0.4779871106147766, 0.808961033821106], [0.47790056467056274, 0.8069020509719849], [0.4777910113334656, 0.8042819499969482], [0.47775065898895264, 0.802270233631134], [0.47772055864334106, 0.7997794151306152], [0.47774267196655273, 0.797881007194519], [0.47778642177581787, 0.7955961227416992], [0.4778556823730469, 0.7938709259033203], [0.4779565930366516, 0.7918239831924438], [0.4780481457710266, 0.7903248071670532], [0.4781876802444458, 0.7885309457778931], [0.47828245162963867, 0.7872582077980042], [0.47843360900878906, 0.7856616377830505], [0.4785173535346985, 0.7845349907875061], [0.478668212890625, 0.7829991579055786], [0.47873377799987793, 0.7820154428482056], [0.47886693477630615, 0.7805138826370239], [0.47893309593200684, 0.779421329498291], [0.4790651798248291, 0.7777515053749084], [0.4791531562805176, 0.7764598727226257], [0.47930359840393066, 0.7745506763458252], [0.47944289445877075, 0.7729467153549194], [0.4796374440193176, 0.770758867263794], [0.4798564910888672, 0.7687502503395081], [0.4801245927810669, 0.7662156820297241], [0.48041391372680664, 0.7638378739356995], [0.48074960708618164, 0.7610202431678772], [0.48109543323516846, 0.7583465576171875], [0.48148083686828613, 0.755308985710144], [0.4818507432937622, 0.7524440884590149], [0.48225510120391846, 0.7492824792861938], [0.48262691497802734, 0.7463040351867676], [0.483024001121521, 0.7430751919746399], [0.48337846994400024, 0.7400379776954651], [0.48375529050827026, 0.7367807626724243], [0.48408567905426025, 0.7337185740470886], [0.4844304323196411, 0.7304568290710449], [0.4847286343574524, 0.7273684740066528], [0.4850345849990845, 0.7241047620773315], [0.4852975606918335, 0.7210105061531067], [0.48556315898895264, 0.7177648544311523], [0.4857898950576782, 0.7146467566490173], [0.4860144853591919, 0.7114001512527466], [0.4862048029899597, 0.7082595229148865], [0.48639464378356934, 0.7050206661224365], [0.48655760288238525, 0.7018526792526245], [0.48671793937683105, 0.6986131072044373], [0.4868546724319458, 0.6954421401023865], [0.48699402809143066, 0.6922643780708313], [0.48711204528808594, 0.6890053749084473], [0.4872351884841919, 0.6857611536979675], [0.4873635768890381, 0.6825175285339355], [0.48752361536026, 0.6792315244674683], [0.48770636320114136, 0.6762300729751587], [0.487953245639801, 0.6731384992599487], [0.48817455768585205, 0.6701087951660156], [0.4883674383163452, 0.666762113571167], [0.4883376359939575, 0.664268970489502], [0.48816990852355957, 0.6615984439849854], [0.48772990703582764, 0.658311128616333], [0.4873366355895996, 0.655616283416748], [0.4878729581832886, 0.6509164571762085], [0.48799842596054077, 0.6490614414215088], [0.48664402961730957, 0.6457347869873047], [0.48496896028518677, 0.6412714719772339], [0.4856281876564026, 0.6361902952194214], [0.4877222776412964, 0.6301100254058838], [0.48845183849334717, 0.6271747946739197], [0.48904991149902344, 0.6239440441131592], [0.4894646406173706, 0.6206172108650208], [0.48978209495544434, 0.617266058921814], [0.49000799655914307, 0.6136358976364136], [0.4901854991912842, 0.6099651455879211], [0.4903148412704468, 0.6061035990715027], [0.49041569232940674, 0.6022273302078247], [0.49051713943481445, 0.5980510711669922], [0.4905480742454529, 0.5938581824302673], [0.4904673099517822, 0.5896225571632385], [0.49027252197265625, 0.5855512022972107], [0.4901784062385559, 0.5825141668319702], [0.49033427238464355, 0.5795087218284607], [0.4905385971069336, 0.5767173767089844], [0.490764856338501, 0.5740163326263428], [0.4909837245941162, 0.5715398788452148], [0.49119848012924194, 0.569144606590271], [0.491399347782135, 0.566936194896698], [0.49158477783203125, 0.5648305416107178], [0.491779625415802, 0.562840461730957], [0.4919918179512024, 0.5609536170959473], [0.49253708124160767, 0.47729116678237915], [0.4922376871109009, 0.4756450057029724], [0.4919924736022949, 0.474088579416275], [0.49172407388687134, 0.4723549485206604], [0.49144411087036133, 0.4706398546695709], [0.4911154508590698, 0.46867063641548157], [0.4907824993133545, 0.4667183756828308], [0.4903916120529175, 0.46447908878326416], [0.48999202251434326, 0.4622456133365631], [0.48948293924331665, 0.4592602252960205], [0.48900771141052246, 0.4561179578304291], [0.4885514974594116, 0.4527119994163513], [0.4881667494773865, 0.44925981760025024], [0.4878085255622864, 0.44549351930618286], [0.487506628036499, 0.44178032875061035], [0.487168550491333, 0.4377976655960083], [0.4868142604827881, 0.43385910987854004], [0.4864322543144226, 0.4297522306442261], [0.48605531454086304, 0.4257979393005371], [0.4837567210197449, 0.8378835916519165], [0.4837793707847595, 0.8341816663742065], [0.483789324760437, 0.8312140703201294], [0.48379069566726685, 0.8277390003204346], [0.48377394676208496, 0.8249939680099487], [0.4837343692779541, 0.8217732906341553], [0.4837028980255127, 0.8192530870437622], [0.4836421012878418, 0.8162421584129333], [0.48358452320098877, 0.813920795917511], [0.4834972023963928, 0.811121940612793], [0.4834325313568115, 0.8089948892593384], [0.4833391308784485, 0.8063567280769348], [0.48329174518585205, 0.8043821454048157], [0.48323339223861694, 0.8018842935562134], [0.4832172393798828, 0.8000199198722839], [0.48320460319519043, 0.7976902723312378], [0.4832192063331604, 0.7959638237953186], [0.4832378029823303, 0.7938363552093506], [0.48326045274734497, 0.7922667264938354], [0.48329758644104004, 0.7904148101806641], [0.48333030939102173, 0.7890828847885132], [0.4833942651748657, 0.7874248623847961], [0.48343127965927124, 0.7862697243690491], [0.4835178852081299, 0.7847336530685425], [0.48357266187667847, 0.7837156057357788], [0.48369914293289185, 0.7822154760360718], [0.4837648272514343, 0.7812592387199402], [0.48391950130462646, 0.779615044593811], [0.48400866985321045, 0.7785377502441406], [0.4841855764389038, 0.7766736745834351], [0.4843021631240845, 0.7753633856773376], [0.48449254035949707, 0.7732226848602295], [0.4846421480178833, 0.771591067314148], [0.4848482608795166, 0.7691011428833008], [0.48503607511520386, 0.7670583724975586], [0.4852810502052307, 0.7642581462860107], [0.48551130294799805, 0.7618625164031982], [0.485795259475708, 0.7588010430335999], [0.48605775833129883, 0.7561248540878296], [0.48637157678604126, 0.7528995275497437], [0.4866486191749573, 0.7500488758087158], [0.4869682192802429, 0.7467296719551086], [0.48723965883255005, 0.743767499923706], [0.4875437617301941, 0.7404194474220276], [0.48780393600463867, 0.737409234046936], [0.4880892038345337, 0.73404860496521], [0.48832613229751587, 0.7310087084770203], [0.48858165740966797, 0.7276425957679749], [0.48879122734069824, 0.7245805263519287], [0.489010751247406, 0.7212302088737488], [0.4891888499259949, 0.7181686162948608], [0.48937439918518066, 0.7148146629333496], [0.48952388763427734, 0.711739182472229], [0.4896770119667053, 0.7083804607391357], [0.4897991418838501, 0.7053093910217285], [0.489926815032959, 0.7019396424293518], [0.4900249242782593, 0.698853611946106], [0.4901334047317505, 0.6955114603042603], [0.4902254343032837, 0.6924868226051331], [0.490328848361969, 0.6890487670898438], [0.490411639213562, 0.6859673261642456], [0.49051904678344727, 0.6825921535491943], [0.4906337261199951, 0.679480791091919], [0.49080127477645874, 0.676424503326416], [0.4910240173339844, 0.6735332012176514], [0.4913017749786377, 0.6703221797943115], [0.49160826206207275, 0.6671504974365234], [0.4918643832206726, 0.6641709208488464], [0.4918215274810791, 0.6613469123840332], [0.49168485403060913, 0.657755970954895], [0.49131566286087036, 0.654839813709259], [0.4908507466316223, 0.6514203548431396], [0.49032390117645264, 0.6483594179153442], [0.4893600344657898, 0.6443943977355957], [0.48878002166748047, 0.6406528949737549], [0.48858124017715454, 0.6369551420211792], [0.49002963304519653, 0.6312068104743958], [0.4911344051361084, 0.6278066635131836], [0.49162906408309937, 0.6243768930435181], [0.4919840097427368, 0.6208988428115845], [0.49223196506500244, 0.6174939870834351], [0.49244076013565063, 0.6137655973434448], [0.4925934076309204, 0.6100984811782837], [0.4927239418029785, 0.6061694025993347], [0.4928438067436218, 0.6023283004760742], [0.49293625354766846, 0.5980886220932007], [0.49303919076919556, 0.5939134359359741], [0.493009090423584, 0.5894895792007446], [0.49275678396224976, 0.5853473544120789], [0.4926801323890686, 0.5825439095497131], [0.4927365779876709, 0.5796904563903809], [0.49288034439086914, 0.5769022703170776], [0.49304068088531494, 0.5742371678352356], [0.49321186542510986, 0.5717542171478271], [0.49337905645370483, 0.5693581104278564], [0.49353140592575073, 0.5671497583389282], [0.4936857223510742, 0.5650354027748108], [0.49382543563842773, 0.5630666017532349], [0.4939612150192261, 0.5611780881881714], [0.4944920539855957, 0.4767819046974182], [0.4942326545715332, 0.475294828414917], [0.49402469396591187, 0.473720908164978], [0.49380791187286377, 0.47201502323150635], [0.4935804605484009, 0.47022542357444763], [0.49333280324935913, 0.4683000445365906], [0.49306774139404297, 0.466256707906723], [0.4927770495414734, 0.4640647768974304], [0.4924691915512085, 0.46173107624053955], [0.4921119809150696, 0.45886048674583435], [0.4917486906051636, 0.4556448459625244], [0.4914308786392212, 0.4523935914039612], [0.4911274313926697, 0.4488658905029297], [0.4908677935600281, 0.44527578353881836], [0.4906480312347412, 0.4414654076099396], [0.4904423952102661, 0.4375772476196289], [0.4901575446128845, 0.43342918157577515], [0.4898744225502014, 0.4295027256011963], [0.4896025061607361, 0.42535293102264404], [0.48903369903564453, 0.8375468850135803], [0.48904383182525635, 0.83457350730896], [0.4890589714050293, 0.8308762907981873], [0.48906415700912476, 0.8280855417251587], [0.48906397819519043, 0.8246119022369385], [0.489055335521698, 0.8220492601394653], [0.4890247583389282, 0.8188387155532837], [0.4889979362487793, 0.8164466023445129], [0.48894691467285156, 0.8134464025497437], [0.4889088273048401, 0.8112705945968628], [0.48884904384613037, 0.8085057139396667], [0.4888174533843994, 0.8065011501312256], [0.4887770414352417, 0.803973913192749], [0.48876333236694336, 0.8020817041397095], [0.4887538552284241, 0.7997421622276306], [0.48876482248306274, 0.7979682683944702], [0.4887886047363281, 0.7958252429962158], [0.48882460594177246, 0.7941797971725464], [0.4888789653778076, 0.7922176122665405], [0.4889254570007324, 0.7908269166946411], [0.4889947772026062, 0.7891291379928589], [0.48904281854629517, 0.7879149913787842], [0.48912209272384644, 0.7863620519638062], [0.4891662001609802, 0.785321056842804], [0.4892446994781494, 0.7838882207870483], [0.48928380012512207, 0.7828840017318726], [0.48935580253601074, 0.781402051448822], [0.48939085006713867, 0.7803167104721069], [0.4894580841064453, 0.7786732912063599], [0.48950421810150146, 0.7773898839950562], [0.4895825982093811, 0.7755041718482971], [0.48965346813201904, 0.7739289999008179], [0.4897528886795044, 0.7717607617378235], [0.4898637533187866, 0.7697844505310059], [0.49000120162963867, 0.7672832012176514], [0.49014949798583984, 0.7649320363998413], [0.4903205633163452, 0.762130081653595], [0.49049901962280273, 0.759454607963562], [0.49069857597351074, 0.7564183473587036], [0.4908941388130188, 0.7535229921340942], [0.49110710620880127, 0.7503362894058228], [0.4913043975830078, 0.7473034262657166], [0.4915139675140381, 0.7440230846405029], [0.4917001724243164, 0.7409367561340332], [0.4918954372406006, 0.7376354932785034], [0.4920670986175537, 0.7345061302185059], [0.49224406480789185, 0.7311936616897583], [0.49239808320999146, 0.728042483329773], [0.4925541281700134, 0.7247235774993896], [0.4926866292953491, 0.7215713262557983], [0.4928162097930908, 0.7182697057723999], [0.4929225444793701, 0.7150984406471252], [0.493023157119751, 0.7118028402328491], [0.49310415983200073, 0.7086126804351807], [0.4931764602661133, 0.705340564250946], [0.49322670698165894, 0.7021218538284302], [0.49327677488327026, 0.6988659501075745], [0.493327260017395, 0.6956853866577148], [0.4933825731277466, 0.6925036311149597], [0.49342602491378784, 0.6891930103302002], [0.4934503436088562, 0.6859575510025024], [0.49345874786376953, 0.682722270488739], [0.4935075640678406, 0.6795410513877869], [0.4936142563819885, 0.6766805052757263], [0.49379193782806396, 0.6737518310546875], [0.4940336346626282, 0.6706546545028687], [0.49431848526000977, 0.6674392819404602], [0.49451833963394165, 0.6643773317337036], [0.49455976486206055, 0.6611288785934448], [0.49455398321151733, 0.6578407287597656], [0.49451130628585815, 0.6542034149169922], [0.4943169951438904, 0.6512470245361328], [0.4939948320388794, 0.6473889350891113], [0.4937610626220703, 0.6438888311386108], [0.49354565143585205, 0.6399814486503601], [0.49314987659454346, 0.6366308927536011], [0.4931250810623169, 0.6323833465576172], [0.4937959909439087, 0.6283969879150391], [0.4942667484283447, 0.6246709823608398], [0.4945222735404968, 0.6211566925048828], [0.49472326040267944, 0.6176403164863586], [0.4948650598526001, 0.6139001846313477], [0.4949856400489807, 0.610187292098999], [0.49509984254837036, 0.6062606573104858], [0.49520230293273926, 0.6024059057235718], [0.49531859159469604, 0.5981509685516357], [0.4954105615615845, 0.5939881801605225], [0.4955211877822876, 0.5894609093666077], [0.49544811248779297, 0.5851247310638428], [0.49526524543762207, 0.5824518203735352], [0.4951959252357483, 0.5797625184059143], [0.495241641998291, 0.5770286917686462], [0.4953479766845703, 0.5744005441665649], [0.49545925855636597, 0.5719059109687805], [0.4955640435218811, 0.5695153474807739], [0.4956706166267395, 0.5672997236251831], [0.49577367305755615, 0.5652042031288147], [0.49586737155914307, 0.5631927251815796], [0.4959262013435364, 0.5613078474998474], [0.4963427186012268, 0.476441890001297], [0.4961881637573242, 0.47498250007629395], [0.4960365891456604, 0.47347187995910645], [0.49587905406951904, 0.4717182219028473], [0.49572432041168213, 0.46996960043907166], [0.4955454468727112, 0.46796971559524536], [0.49536454677581787, 0.465981662273407], [0.4951564073562622, 0.46369433403015137], [0.4949502944946289, 0.4614400863647461], [0.4947032928466797, 0.45849740505218506], [0.4944852590560913, 0.45541268587112427], [0.4942745566368103, 0.452070415019989], [0.4940897822380066, 0.44869375228881836], [0.4939171075820923, 0.4450046420097351], [0.4937892556190491, 0.44138967990875244], [0.49365025758743286, 0.4373091161251068], [0.49348700046539307, 0.43326592445373535], [0.49328792095184326, 0.4291607141494751], [0.49311256408691406, 0.4252336025238037], [0.4945129156112671, 0.837953507900238], [0.49452531337738037, 0.83421790599823], [0.49452513456344604, 0.8312522172927856], [0.4945262670516968, 0.8277304172515869], [0.49452805519104004, 0.8249648809432983], [0.49452370405197144, 0.8216708302497864], [0.49451208114624023, 0.8191171884536743], [0.49448806047439575, 0.8160456418991089], [0.4944741129875183, 0.8136881589889526], [0.49445080757141113, 0.8108589053153992], [0.4944354295730591, 0.8087167739868164], [0.49441206455230713, 0.8061261177062988], [0.49439722299575806, 0.8041810989379883], [0.49437880516052246, 0.8017779588699341], [0.4943743944168091, 0.7999945282936096], [0.4943718910217285, 0.797757625579834], [0.49437153339385986, 0.7961037158966064], [0.49437278509140015, 0.7940621972084045], [0.4943838119506836, 0.7925653457641602], [0.4943996071815491, 0.7907705307006836], [0.49440622329711914, 0.7894835472106934], [0.4944216012954712, 0.7878941893577576], [0.49443739652633667, 0.7868040800094604], [0.4944736957550049, 0.7853215336799622], [0.494493305683136, 0.7843673229217529], [0.4945366382598877, 0.7828450202941895], [0.4945583939552307, 0.781891942024231], [0.4946112036705017, 0.7802424430847168], [0.4946408271789551, 0.779180645942688], [0.49469953775405884, 0.7773175239562988], [0.49473947286605835, 0.7760304808616638], [0.49480563402175903, 0.7738897800445557], [0.4948556423187256, 0.7722755670547485], [0.4949249029159546, 0.769802451133728], [0.49498915672302246, 0.7677925229072571], [0.4950716495513916, 0.7649948596954346], [0.49514931440353394, 0.7626186609268188], [0.49524617195129395, 0.7595525979995728], [0.4953368902206421, 0.7568846940994263], [0.4954451322555542, 0.7536334991455078], [0.49553966522216797, 0.7507661581039429], [0.495649516582489, 0.7474083304405212], [0.4957458972930908, 0.7444151043891907], [0.4958527684211731, 0.7410245537757874], [0.4959413409233093, 0.7379834651947021], [0.49603867530822754, 0.7345753908157349], [0.49611860513687134, 0.7314990758895874], [0.4962043762207031, 0.7280929088592529], [0.49627500772476196, 0.7249884605407715], [0.4963475465774536, 0.7215997576713562], [0.496407151222229, 0.718495786190033], [0.4964674115180969, 0.7151021957397461], [0.4965135455131531, 0.7119893431663513], [0.49655890464782715, 0.7085959911346436], [0.496590793132782, 0.7054888010025024], [0.49662327766418457, 0.7020829319953918], [0.49665117263793945, 0.6990018486976624], [0.49668365716934204, 0.6956592798233032], [0.49670571088790894, 0.6926262378692627], [0.4967273473739624, 0.6891529560089111], [0.4967321753501892, 0.6860303282737732], [0.4967437982559204, 0.6826775074005127], [0.496803879737854, 0.679726779460907], [0.49689072370529175, 0.6767672300338745], [0.4969666004180908, 0.6740075349807739], [0.49707889556884766, 0.6708363890647888], [0.497189998626709, 0.6677010655403137], [0.4972827434539795, 0.6644459962844849], [0.49728673696517944, 0.6611045598983765], [0.4972500801086426, 0.6578249335289001], [0.4972448945045471, 0.654172420501709], [0.4971717596054077, 0.6509722471237183], [0.4970477819442749, 0.647242546081543], [0.49693983793258667, 0.6436668038368225], [0.49667519330978394, 0.6397271156311035], [0.49655890464782715, 0.6362781524658203], [0.49666285514831543, 0.6328529119491577], [0.4968634843826294, 0.6288093328475952], [0.4970959424972534, 0.6249322891235352], [0.49725860357284546, 0.6213018298149109], [0.4973525404930115, 0.6177683472633362], [0.4974343776702881, 0.6139644980430603], [0.49749696254730225, 0.6102732419967651], [0.49755728244781494, 0.6063140630722046], [0.4976074695587158, 0.6024684906005859], [0.4976608157157898, 0.5982076525688171], [0.49774253368377686, 0.5940501689910889], [0.49774712324142456, 0.5894744396209717], [0.4976545572280884, 0.5850057005882263], [0.4976266622543335, 0.5823521614074707], [0.4976035952568054, 0.579757809638977], [0.49761122465133667, 0.5770940184593201], [0.49766552448272705, 0.5745084285736084], [0.49773257970809937, 0.5720016360282898], [0.4977837800979614, 0.5695978999137878], [0.4978303909301758, 0.5674046277999878], [0.49788331985473633, 0.565290093421936], [0.49792593717575073, 0.5632818937301636], [0.497944176197052, 0.5613207221031189], [0.4981074929237366, 0.4763174057006836], [0.49808138608932495, 0.4748312830924988], [0.49801433086395264, 0.4732813835144043], [0.4979364275932312, 0.47157660126686096], [0.49785685539245605, 0.46976882219314575], [0.4977729916572571, 0.46782010793685913], [0.49767976999282837, 0.4657464027404785], [0.4975786805152893, 0.46353453397750854], [0.4974689483642578, 0.4611772894859314], [0.4973433017730713, 0.4583604335784912], [0.49723416566848755, 0.4551987946033478], [0.49714016914367676, 0.45197391510009766], [0.49704277515411377, 0.44849157333374023], [0.49696648120880127, 0.44496095180511475], [0.4968903064727783, 0.441219300031662], [0.4968240261077881, 0.43728742003440857], [0.49674808979034424, 0.4330519437789917], [0.49666351079940796, 0.4290979504585266], [0.4965711832046509, 0.42500945925712585], [0.5, 0.8375762701034546], [0.5, 0.8346104621887207], [0.5, 0.8308656215667725], [0.5, 0.8281131386756897], [0.5, 0.8245925903320312], [0.5, 0.8220284581184387], [0.5, 0.8187342882156372], [0.5, 0.816371500492096], [0.5, 0.813327431678772], [0.5, 0.8111585974693298], [0.5, 0.8083727955818176], [0.5, 0.806403398513794], [0.5, 0.803856372833252], [0.5, 0.802044689655304], [0.5, 0.7997166514396667], [0.5, 0.7980217337608337], [0.5, 0.7958446741104126], [0.5, 0.7943187952041626], [0.5, 0.7923669815063477], [0.5, 0.7910227179527283], [0.5, 0.7892726063728333], [0.5, 0.7881647944450378], [0.5, 0.7866391539573669], [0.5, 0.7856452465057373], [0.5, 0.7841900587081909], [0.5, 0.7832058668136597], [0.5, 0.7816920280456543], [0.5, 0.7806459665298462], [0.5, 0.7789693474769592], [0.5, 0.7777257561683655], [0.5, 0.775831937789917], [0.5, 0.7742772698402405], [0.5, 0.7720930576324463], [0.5, 0.7701512575149536], [0.5, 0.7676498889923096], [0.5, 0.7653085589408875], [0.5, 0.762509286403656], [0.5, 0.7598389387130737], [0.5, 0.7568031549453735], [0.5, 0.7538943886756897], [0.5, 0.7506963014602661], [0.5, 0.7476466298103333], [0.5, 0.7443546056747437], [0.5, 0.7412393093109131], [0.5, 0.7379226684570312], [0.5, 0.7347692847251892], [0.5, 0.7314370274543762], [0.5, 0.7282666563987732], [0.5, 0.7249259352684021], [0.5, 0.7217530012130737], [0.5, 0.7184339761734009], [0.5, 0.7152358293533325], [0.5, 0.7119244933128357], [0.5, 0.708712637424469], [0.5, 0.7054200768470764], [0.5, 0.7021865844726562], [0.5, 0.6989396214485168], [0.5, 0.6957566142082214], [0.5, 0.6925625205039978], [0.5, 0.689237654209137], [0.5, 0.6859557032585144], [0.5, 0.6827622652053833], [0.5, 0.6797268986701965], [0.5, 0.6768754720687866], [0.5, 0.6740075349807739], [0.5, 0.6709601283073425], [0.5, 0.6677070260047913], [0.5, 0.6645321249961853], [0.5, 0.6610256433486938], [0.5, 0.6578639149665833], [0.5, 0.6541314125061035], [0.5, 0.6509321928024292], [0.5, 0.647167444229126], [0.5, 0.643627941608429], [0.5, 0.6394992470741272], [0.5, 0.6363675594329834], [0.5, 0.6328938007354736], [0.5, 0.6289581060409546], [0.5, 0.6250113844871521], [0.5, 0.6213738322257996], [0.5, 0.6177881956100464], [0.5, 0.6140115261077881], [0.5, 0.6102820038795471], [0.5, 0.606346607208252], [0.5, 0.6024779677391052], [0.5, 0.5982329249382019], [0.5, 0.5940824151039124], [0.5, 0.5894423127174377], [0.5, 0.5849822163581848], [0.5, 0.582343339920044], [0.5, 0.5797429084777832], [0.5, 0.5771143436431885], [0.5, 0.5745508074760437], [0.5, 0.5720304846763611], [0.5, 0.5696310997009277], [0.5, 0.5674276947975159], [0.5, 0.5653355121612549], [0.5, 0.5632899403572083], [0.5, 0.561339259147644], [0.5, 0.4763392210006714], [0.5, 0.4747655987739563], [0.5, 0.47324657440185547], [0.5, 0.4714938700199127], [0.5, 0.46974194049835205], [0.5, 0.4677276313304901], [0.5, 0.46571633219718933], [0.5, 0.4634280502796173], [0.5, 0.461147278547287], [0.5, 0.4582425653934479], [0.5, 0.4552074074745178], [0.5, 0.4518583416938782], [0.5, 0.44851166009902954], [0.5, 0.44485682249069214], [0.5, 0.44125115871429443], [0.5, 0.4371848404407501], [0.5, 0.4330880045890808], [0.5, 0.42896705865859985], [0.5, 0.4250517189502716], [0.5054870843887329, 0.837953507900238], [0.5054746866226196, 0.83421790599823], [0.505474865436554, 0.8312522172927856], [0.5054737329483032, 0.8277304172515869], [0.50547194480896, 0.8249648809432983], [0.5054762959480286, 0.8216708302497864], [0.5054879188537598, 0.8191171884536743], [0.5055119395256042, 0.8160456418991089], [0.5055258870124817, 0.8136881589889526], [0.5055491924285889, 0.8108589053153992], [0.5055645704269409, 0.8087167739868164], [0.5055879354476929, 0.8061261177062988], [0.5056027770042419, 0.8041810989379883], [0.5056211948394775, 0.8017779588699341], [0.5056256055831909, 0.7999945282936096], [0.5056281089782715, 0.797757625579834], [0.5056284666061401, 0.7961037158966064], [0.5056272149085999, 0.7940621972084045], [0.5056161880493164, 0.7925653457641602], [0.5056003928184509, 0.7907705307006836], [0.5055937767028809, 0.7894835472106934], [0.5055783987045288, 0.7878941893577576], [0.5055626034736633, 0.7868040800094604], [0.5055263042449951, 0.7853215336799622], [0.505506694316864, 0.7843673229217529], [0.5054633617401123, 0.7828450202941895], [0.5054416060447693, 0.781891942024231], [0.5053887963294983, 0.7802424430847168], [0.5053591728210449, 0.779180645942688], [0.5053004622459412, 0.7773175239562988], [0.5052605271339417, 0.7760304808616638], [0.505194365978241, 0.7738897800445557], [0.5051443576812744, 0.7722755670547485], [0.5050750970840454, 0.769802451133728], [0.5050108432769775, 0.7677925229072571], [0.5049283504486084, 0.7649948596954346], [0.5048506855964661, 0.7626186609268188], [0.504753828048706, 0.7595525979995728], [0.5046631097793579, 0.7568846940994263], [0.5045548677444458, 0.7536334991455078], [0.504460334777832, 0.7507661581039429], [0.504350483417511, 0.7474083304405212], [0.5042541027069092, 0.7444151043891907], [0.5041472315788269, 0.7410245537757874], [0.5040586590766907, 0.7379834651947021], [0.5039613246917725, 0.7345753908157349], [0.5038813948631287, 0.7314990758895874], [0.5037956237792969, 0.7280929088592529], [0.503724992275238, 0.7249884605407715], [0.5036524534225464, 0.7215997576713562], [0.503592848777771, 0.718495786190033], [0.5035325884819031, 0.7151021957397461], [0.5034864544868469, 0.7119893431663513], [0.5034410953521729, 0.7085959911346436], [0.503409206867218, 0.7054888010025024], [0.5033767223358154, 0.7020829319953918], [0.5033488273620605, 0.6990018486976624], [0.503316342830658, 0.6956592798233032], [0.5032942891120911, 0.6926262378692627], [0.5032726526260376, 0.6891529560089111], [0.5032678246498108, 0.6860303282737732], [0.5032562017440796, 0.6826775074005127], [0.503196120262146, 0.679726779460907], [0.5031092762947083, 0.6767672300338745], [0.5030333995819092, 0.6740075349807739], [0.5029211044311523, 0.6708363890647888], [0.502810001373291, 0.6677010655403137], [0.5027172565460205, 0.6644459962844849], [0.5027132630348206, 0.6611045598983765], [0.5027499198913574, 0.6578249335289001], [0.5027551054954529, 0.654172420501709], [0.5028282403945923, 0.6509722471237183], [0.5029522180557251, 0.647242546081543], [0.5030601620674133, 0.6436668038368225], [0.5033248066902161, 0.6397271156311035], [0.5034410953521729, 0.6362781524658203], [0.5033371448516846, 0.6328529119491577], [0.5031365156173706, 0.6288093328475952], [0.5029040575027466, 0.6249322891235352], [0.5027413964271545, 0.6213018298149109], [0.5026474595069885, 0.6177683472633362], [0.5025656223297119, 0.6139644980430603], [0.5025030374526978, 0.6102732419967651], [0.5024427175521851, 0.6063140630722046], [0.5023925304412842, 0.6024684906005859], [0.5023391842842102, 0.5982076525688171], [0.5022574663162231, 0.5940501689910889], [0.5022528767585754, 0.5894744396209717], [0.5023454427719116, 0.5850057005882263], [0.5023733377456665, 0.5823521614074707], [0.5023964047431946, 0.579757809638977], [0.5023887753486633, 0.5770940184593201], [0.502334475517273, 0.5745084285736084], [0.5022674202919006, 0.5720016360282898], [0.5022162199020386, 0.5695978999137878], [0.5021696090698242, 0.5674046277999878], [0.5021166801452637, 0.565290093421936], [0.5020740628242493, 0.5632818937301636], [0.502055823802948, 0.5613207221031189], [0.5018925070762634, 0.4763174057006836], [0.501918613910675, 0.4748312830924988], [0.5019856691360474, 0.4732813835144043], [0.5020635724067688, 0.47157660126686096], [0.502143144607544, 0.46976882219314575], [0.5022270083427429, 0.46782010793685913], [0.5023202300071716, 0.4657464027404785], [0.5024213194847107, 0.46353453397750854], [0.5025310516357422, 0.4611772894859314], [0.5026566982269287, 0.4583604335784912], [0.5027658343315125, 0.4551987946033478], [0.5028598308563232, 0.45197391510009766], [0.5029572248458862, 0.44849157333374023], [0.5030335187911987, 0.44496095180511475], [0.5031096935272217, 0.441219300031662], [0.5031759738922119, 0.43728742003440857], [0.5032519102096558, 0.4330519437789917], [0.503336489200592, 0.4290979504585266], [0.5034288167953491, 0.42500945925712585], [0.5109663009643555, 0.8375468850135803], [0.5109561681747437, 0.83457350730896], [0.5109410285949707, 0.8308762907981873], [0.5109358429908752, 0.8280855417251587], [0.5109360218048096, 0.8246119022369385], [0.510944664478302, 0.8220492601394653], [0.5109752416610718, 0.8188387155532837], [0.5110020637512207, 0.8164466023445129], [0.5110530853271484, 0.8134464025497437], [0.5110911726951599, 0.8112705945968628], [0.5111509561538696, 0.8085057139396667], [0.5111825466156006, 0.8065011501312256], [0.5112229585647583, 0.803973913192749], [0.5112366676330566, 0.8020817041397095], [0.5112461447715759, 0.7997421622276306], [0.5112351775169373, 0.7979682683944702], [0.5112113952636719, 0.7958252429962158], [0.5111753940582275, 0.7941797971725464], [0.5111210346221924, 0.7922176122665405], [0.5110745429992676, 0.7908269166946411], [0.5110052227973938, 0.7891291379928589], [0.5109571814537048, 0.7879149913787842], [0.5108779072761536, 0.7863620519638062], [0.5108337998390198, 0.785321056842804], [0.5107553005218506, 0.7838882207870483], [0.5107161998748779, 0.7828840017318726], [0.5106441974639893, 0.781402051448822], [0.5106091499328613, 0.7803167104721069], [0.5105419158935547, 0.7786732912063599], [0.5104957818984985, 0.7773898839950562], [0.5104174017906189, 0.7755041718482971], [0.510346531867981, 0.7739289999008179], [0.5102471113204956, 0.7717607617378235], [0.5101362466812134, 0.7697844505310059], [0.5099987983703613, 0.7672832012176514], [0.5098505020141602, 0.7649320363998413], [0.5096794366836548, 0.762130081653595], [0.5095009803771973, 0.759454607963562], [0.5093014240264893, 0.7564183473587036], [0.5091058611869812, 0.7535229921340942], [0.5088928937911987, 0.7503362894058228], [0.5086956024169922, 0.7473034262657166], [0.5084860324859619, 0.7440230846405029], [0.5082998275756836, 0.7409367561340332], [0.5081045627593994, 0.7376354932785034], [0.5079329013824463, 0.7345061302185059], [0.5077559351921082, 0.7311936616897583], [0.5076019167900085, 0.728042483329773], [0.5074458718299866, 0.7247235774993896], [0.5073133707046509, 0.7215713262557983], [0.5071837902069092, 0.7182697057723999], [0.5070774555206299, 0.7150984406471252], [0.506976842880249, 0.7118028402328491], [0.5068958401679993, 0.7086126804351807], [0.5068235397338867, 0.705340564250946], [0.5067732930183411, 0.7021218538284302], [0.5067232251167297, 0.6988659501075745], [0.506672739982605, 0.6956853866577148], [0.5066174268722534, 0.6925036311149597], [0.5065739750862122, 0.6891930103302002], [0.5065496563911438, 0.6859575510025024], [0.5065412521362305, 0.682722270488739], [0.5064924359321594, 0.6795410513877869], [0.5063857436180115, 0.6766805052757263], [0.506208062171936, 0.6737518310546875], [0.5059663653373718, 0.6706546545028687], [0.5056815147399902, 0.6674392819404602], [0.5054816603660583, 0.6643773317337036], [0.5054402351379395, 0.6611288785934448], [0.5054460167884827, 0.6578407287597656], [0.5054886937141418, 0.6542034149169922], [0.5056830048561096, 0.6512470245361328], [0.5060051679611206, 0.6473889350891113], [0.5062389373779297, 0.6438888311386108], [0.506454348564148, 0.6399814486503601], [0.5068501234054565, 0.6366308927536011], [0.5068749189376831, 0.6323833465576172], [0.5062040090560913, 0.6283969879150391], [0.5057332515716553, 0.6246709823608398], [0.5054777264595032, 0.6211566925048828], [0.5052767395973206, 0.6176403164863586], [0.5051349401473999, 0.6139001846313477], [0.5050143599510193, 0.610187292098999], [0.5049001574516296, 0.6062606573104858], [0.5047976970672607, 0.6024059057235718], [0.504681408405304, 0.5981509685516357], [0.5045894384384155, 0.5939881801605225], [0.5044788122177124, 0.5894609093666077], [0.504551887512207, 0.5851247310638428], [0.5047347545623779, 0.5824518203735352], [0.5048040747642517, 0.5797625184059143], [0.504758358001709, 0.5770286917686462], [0.5046520233154297, 0.5744005441665649], [0.504540741443634, 0.5719059109687805], [0.5044359564781189, 0.5695153474807739], [0.5043293833732605, 0.5672997236251831], [0.5042263269424438, 0.5652042031288147], [0.5041326284408569, 0.5631927251815796], [0.5040737986564636, 0.5613078474998474], [0.5036572813987732, 0.476441890001297], [0.5038118362426758, 0.47498250007629395], [0.5039634108543396, 0.47347187995910645], [0.504120945930481, 0.4717182219028473], [0.5042756795883179, 0.46996960043907166], [0.5044545531272888, 0.46796971559524536], [0.5046354532241821, 0.465981662273407], [0.5048435926437378, 0.46369433403015137], [0.5050497055053711, 0.4614400863647461], [0.5052967071533203, 0.45849740505218506], [0.5055147409439087, 0.45541268587112427], [0.5057254433631897, 0.452070415019989], [0.5059102177619934, 0.44869375228881836], [0.5060828924179077, 0.4450046420097351], [0.5062107443809509, 0.44138967990875244], [0.5063497424125671, 0.4373091161251068], [0.5065129995346069, 0.43326592445373535], [0.5067120790481567, 0.4291607141494751], [0.5068874359130859, 0.4252336025238037], [0.5162432789802551, 0.8378835916519165], [0.5162206292152405, 0.8341816663742065], [0.516210675239563, 0.8312140703201294], [0.5162093043327332, 0.8277390003204346], [0.516226053237915, 0.8249939680099487], [0.5162656307220459, 0.8217732906341553], [0.5162971019744873, 0.8192530870437622], [0.5163578987121582, 0.8162421584129333], [0.5164154767990112, 0.813920795917511], [0.5165027976036072, 0.811121940612793], [0.5165674686431885, 0.8089948892593384], [0.5166608691215515, 0.8063567280769348], [0.516708254814148, 0.8043821454048157], [0.5167666077613831, 0.8018842935562134], [0.5167827606201172, 0.8000199198722839], [0.5167953968048096, 0.7976902723312378], [0.5167807936668396, 0.7959638237953186], [0.5167621970176697, 0.7938363552093506], [0.516739547252655, 0.7922667264938354], [0.51670241355896, 0.7904148101806641], [0.5166696906089783, 0.7890828847885132], [0.5166057348251343, 0.7874248623847961], [0.5165687203407288, 0.7862697243690491], [0.5164821147918701, 0.7847336530685425], [0.5164273381233215, 0.7837156057357788], [0.5163008570671082, 0.7822154760360718], [0.5162351727485657, 0.7812592387199402], [0.5160804986953735, 0.779615044593811], [0.5159913301467896, 0.7785377502441406], [0.5158144235610962, 0.7766736745834351], [0.5156978368759155, 0.7753633856773376], [0.5155074596405029, 0.7732226848602295], [0.5153578519821167, 0.771591067314148], [0.5151517391204834, 0.7691011428833008], [0.5149639248847961, 0.7670583724975586], [0.5147189497947693, 0.7642581462860107], [0.514488697052002, 0.7618625164031982], [0.514204740524292, 0.7588010430335999], [0.5139422416687012, 0.7561248540878296], [0.5136284232139587, 0.7528995275497437], [0.5133513808250427, 0.7500488758087158], [0.5130317807197571, 0.7467296719551086], [0.51276034116745, 0.743767499923706], [0.5124562382698059, 0.7404194474220276], [0.5121960639953613, 0.737409234046936], [0.5119107961654663, 0.73404860496521], [0.5116738677024841, 0.7310087084770203], [0.511418342590332, 0.7276425957679749], [0.5112087726593018, 0.7245805263519287], [0.510989248752594, 0.7212302088737488], [0.5108111500740051, 0.7181686162948608], [0.5106256008148193, 0.7148146629333496], [0.5104761123657227, 0.711739182472229], [0.5103229880332947, 0.7083804607391357], [0.5102008581161499, 0.7053093910217285], [0.510073184967041, 0.7019396424293518], [0.5099750757217407, 0.698853611946106], [0.5098665952682495, 0.6955114603042603], [0.5097745656967163, 0.6924868226051331], [0.509671151638031, 0.6890487670898438], [0.509588360786438, 0.6859673261642456], [0.5094809532165527, 0.6825921535491943], [0.5093662738800049, 0.679480791091919], [0.5091987252235413, 0.676424503326416], [0.5089759826660156, 0.6735332012176514], [0.5086982250213623, 0.6703221797943115], [0.5083917379379272, 0.6671504974365234], [0.5081356167793274, 0.6641709208488464], [0.5081784725189209, 0.6613469123840332], [0.5083151459693909, 0.657755970954895], [0.5086843371391296, 0.654839813709259], [0.5091492533683777, 0.6514203548431396], [0.5096760988235474, 0.6483594179153442], [0.5106399655342102, 0.6443943977355957], [0.5112199783325195, 0.6406528949737549], [0.5114187598228455, 0.6369551420211792], [0.5099703669548035, 0.6312068104743958], [0.5088655948638916, 0.6278066635131836], [0.5083709359169006, 0.6243768930435181], [0.5080159902572632, 0.6208988428115845], [0.5077680349349976, 0.6174939870834351], [0.5075592398643494, 0.6137655973434448], [0.5074065923690796, 0.6100984811782837], [0.5072760581970215, 0.6061694025993347], [0.5071561932563782, 0.6023283004760742], [0.5070637464523315, 0.5980886220932007], [0.5069608092308044, 0.5939134359359741], [0.506990909576416, 0.5894895792007446], [0.5072432160377502, 0.5853473544120789], [0.5073198676109314, 0.5825439095497131], [0.5072634220123291, 0.5796904563903809], [0.5071196556091309, 0.5769022703170776], [0.5069593191146851, 0.5742371678352356], [0.5067881345748901, 0.5717542171478271], [0.5066209435462952, 0.5693581104278564], [0.5064685940742493, 0.5671497583389282], [0.5063142776489258, 0.5650354027748108], [0.5061745643615723, 0.5630666017532349], [0.5060387849807739, 0.5611780881881714], [0.5055079460144043, 0.4767819046974182], [0.5057673454284668, 0.475294828414917], [0.5059753060340881, 0.473720908164978], [0.5061920881271362, 0.47201502323150635], [0.5064195394515991, 0.47022542357444763], [0.5066671967506409, 0.4683000445365906], [0.506932258605957, 0.466256707906723], [0.5072229504585266, 0.4640647768974304], [0.5075308084487915, 0.46173107624053955], [0.5078880190849304, 0.45886048674583435], [0.5082513093948364, 0.4556448459625244], [0.5085691213607788, 0.4523935914039612], [0.5088725686073303, 0.4488658905029297], [0.5091322064399719, 0.44527578353881836], [0.5093519687652588, 0.4414654076099396], [0.5095576047897339, 0.4375772476196289], [0.5098424553871155, 0.43342918157577515], [0.5101255774497986, 0.4295027256011963], [0.5103974938392639, 0.42535293102264404], [0.5214966535568237, 0.8374704718589783], [0.5214706659317017, 0.8344929218292236], [0.5214519500732422, 0.8308541774749756], [0.5214583277702332, 0.8281062245368958], [0.5214933156967163, 0.8247383832931519], [0.5215319395065308, 0.8222015500068665], [0.5216070413589478, 0.8190649151802063], [0.521675705909729, 0.8167507648468018], [0.5217888355255127, 0.8138327598571777], [0.5218788385391235, 0.811687707901001], [0.5220128893852234, 0.808961033821106], [0.5220994353294373, 0.8069020509719849], [0.5222089886665344, 0.8042819499969482], [0.5222493410110474, 0.802270233631134], [0.5222794413566589, 0.7997794151306152], [0.5222573280334473, 0.797881007194519], [0.5222135782241821, 0.7955961227416992], [0.5221443176269531, 0.7938709259033203], [0.5220434069633484, 0.7918239831924438], [0.5219518542289734, 0.7903248071670532], [0.5218123197555542, 0.7885309457778931], [0.5217175483703613, 0.7872582077980042], [0.5215663909912109, 0.7856616377830505], [0.5214826464653015, 0.7845349907875061], [0.521331787109375, 0.7829991579055786], [0.5212662220001221, 0.7820154428482056], [0.5211330652236938, 0.7805138826370239], [0.5210669040679932, 0.779421329498291], [0.5209348201751709, 0.7777515053749084], [0.5208468437194824, 0.7764598727226257], [0.5206964015960693, 0.7745506763458252], [0.5205571055412292, 0.7729467153549194], [0.5203625559806824, 0.770758867263794], [0.5201435089111328, 0.7687502503395081], [0.5198754072189331, 0.7662156820297241], [0.5195860862731934, 0.7638378739356995], [0.5192503929138184, 0.7610202431678772], [0.5189045667648315, 0.7583465576171875], [0.5185191631317139, 0.755308985710144], [0.5181492567062378, 0.7524440884590149], [0.5177448987960815, 0.7492824792861938], [0.5173730850219727, 0.7463040351867676], [0.516975998878479, 0.7430751919746399], [0.5166215300559998, 0.7400379776954651], [0.5162447094917297, 0.7367807626724243], [0.5159143209457397, 0.7337185740470886], [0.5155695676803589, 0.7304568290710449], [0.5152713656425476, 0.7273684740066528], [0.5149654150009155, 0.7241047620773315], [0.5147024393081665, 0.7210105061531067], [0.5144368410110474, 0.7177648544311523], [0.5142101049423218, 0.7146467566490173], [0.5139855146408081, 0.7114001512527466], [0.5137951970100403, 0.7082595229148865], [0.5136053562164307, 0.7050206661224365], [0.5134423971176147, 0.7018526792526245], [0.513282060623169, 0.6986131072044373], [0.5131453275680542, 0.6954421401023865], [0.5130059719085693, 0.6922643780708313], [0.5128879547119141, 0.6890053749084473], [0.5127648115158081, 0.6857611536979675], [0.5126364231109619, 0.6825175285339355], [0.51247638463974, 0.6792315244674683], [0.5122936367988586, 0.6762300729751587], [0.512046754360199, 0.6731384992599487], [0.511825442314148, 0.6701087951660156], [0.5116325616836548, 0.666762113571167], [0.5116623640060425, 0.664268970489502], [0.5118300914764404, 0.6615984439849854], [0.5122700929641724, 0.658311128616333], [0.5126633644104004, 0.655616283416748], [0.5121270418167114, 0.6509164571762085], [0.5120015740394592, 0.6490614414215088], [0.5133559703826904, 0.6457347869873047], [0.5150310397148132, 0.6412714719772339], [0.5143718123435974, 0.6361902952194214], [0.5122777223587036, 0.6301100254058838], [0.5115481615066528, 0.6271747946739197], [0.5109500885009766, 0.6239440441131592], [0.5105353593826294, 0.6206172108650208], [0.5102179050445557, 0.617266058921814], [0.5099920034408569, 0.6136358976364136], [0.5098145008087158, 0.6099651455879211], [0.5096851587295532, 0.6061035990715027], [0.5095843076705933, 0.6022273302078247], [0.5094828605651855, 0.5980510711669922], [0.5094519257545471, 0.5938581824302673], [0.5095326900482178, 0.5896225571632385], [0.5097274780273438, 0.5855512022972107], [0.5098215937614441, 0.5825141668319702], [0.5096657276153564, 0.5795087218284607], [0.5094614028930664, 0.5767173767089844], [0.509235143661499, 0.5740163326263428], [0.5090162754058838, 0.5715398788452148], [0.5088015198707581, 0.569144606590271], [0.508600652217865, 0.566936194896698], [0.5084152221679688, 0.5648305416107178], [0.508220374584198, 0.562840461730957], [0.5080081820487976, 0.5609536170959473], [0.5074629187583923, 0.47729116678237915], [0.5077623128890991, 0.4756450057029724], [0.5080075263977051, 0.474088579416275], [0.5082759261131287, 0.4723549485206604], [0.5085558891296387, 0.4706398546695709], [0.5088845491409302, 0.46867063641548157], [0.5092175006866455, 0.4667183756828308], [0.5096083879470825, 0.46447908878326416], [0.5100079774856567, 0.4622456133365631], [0.5105170607566833, 0.4592602252960205], [0.5109922885894775, 0.4561179578304291], [0.5114485025405884, 0.4527119994163513], [0.5118332505226135, 0.44925981760025024], [0.5121914744377136, 0.44549351930618286], [0.512493371963501, 0.44178032875061035], [0.512831449508667, 0.4377976655960083], [0.5131857395172119, 0.43385910987854004], [0.5135677456855774, 0.4297522306442261], [0.513944685459137, 0.4257979393005371], [0.5264647006988525, 0.8377546668052673], [0.5264333486557007, 0.8341087102890015], [0.5264211297035217, 0.8311763405799866], [0.526430606842041, 0.8278205990791321], [0.5264567136764526, 0.8251410722732544], [0.5265236496925354, 0.8220276236534119], [0.5266057252883911, 0.8195877075195312], [0.5267366170883179, 0.8166927099227905], [0.5268509387969971, 0.8144438862800598], [0.5270143151283264, 0.8117159605026245], [0.5271421670913696, 0.8096226453781128], [0.5273252129554749, 0.8069428205490112], [0.5274470448493958, 0.8048856258392334], [0.5276114344596863, 0.8021933436393738], [0.5276774168014526, 0.8001439571380615], [0.527736246585846, 0.7975935935974121], [0.527722954750061, 0.7957022190093994], [0.5276957750320435, 0.7934143543243408], [0.5276497006416321, 0.7917498350143433], [0.5275751352310181, 0.7897297143936157], [0.5275130271911621, 0.7882840633392334], [0.5274002552032471, 0.7865387797355652], [0.5273320078849792, 0.7853198647499084], [0.5271849036216736, 0.7837173938751221], [0.5271086692810059, 0.782628059387207], [0.5269284844398499, 0.7811024188995361], [0.5268248319625854, 0.7800894379615784], [0.5265875458717346, 0.7784498929977417], [0.5264487266540527, 0.7773362398147583], [0.5261783599853516, 0.7754731178283691], [0.5259915590286255, 0.7741167545318604], [0.5256973505020142, 0.7719631791114807], [0.5254504680633545, 0.7702641487121582], [0.5251138210296631, 0.7677541971206665], [0.5248008966445923, 0.7656562924385071], [0.5244001150131226, 0.7628316283226013], [0.5240166783332825, 0.7603942155838013], [0.5235481858253479, 0.757347822189331], [0.5231218338012695, 0.7546757459640503], [0.5226197242736816, 0.7514908313751221], [0.522174060344696, 0.7486680746078491], [0.5216667056083679, 0.7454235553741455], [0.5212350487709045, 0.7425230741500854], [0.5207532048225403, 0.7392423152923584], [0.5203449726104736, 0.7362881302833557], [0.5198977589607239, 0.7330081462860107], [0.5195227861404419, 0.7300330400466919], [0.5191174745559692, 0.7267475128173828], [0.5187827944755554, 0.7237541079521179], [0.5184245705604553, 0.7204703092575073], [0.5181266665458679, 0.7174752950668335], [0.5178105235099792, 0.714187502861023], [0.5175513029098511, 0.711172342300415], [0.5172733068466187, 0.7078683972358704], [0.5170417428016663, 0.7048344016075134], [0.5167952179908752, 0.7015206813812256], [0.5165989398956299, 0.6984741687774658], [0.5163876414299011, 0.6951569318771362], [0.51621413230896, 0.6921435594558716], [0.5160152316093445, 0.6887620687484741], [0.5158563852310181, 0.6856601238250732], [0.5156773328781128, 0.6822754144668579], [0.5155245661735535, 0.6791083216667175], [0.5153378248214722, 0.6759108304977417], [0.5151171684265137, 0.6728819608688354], [0.5148731470108032, 0.6697980165481567], [0.5147032737731934, 0.6666513085365295], [0.5146329402923584, 0.6643136143684387], [0.5147042274475098, 0.6618766784667969], [0.5151673555374146, 0.6591105461120605], [0.5160714983940125, 0.6572381258010864], [0.5188989639282227, 0.6499910354614258], [0.5188205242156982, 0.648845911026001], [0.518528938293457, 0.6472314596176147], [0.5167942047119141, 0.640956699848175], [0.5155646800994873, 0.6358628869056702], [0.5146253705024719, 0.6293601989746094], [0.5139792561531067, 0.6265305280685425], [0.5134328603744507, 0.6234878301620483], [0.5129700303077698, 0.6202657222747803], [0.5126687288284302, 0.6170508861541748], [0.5124056339263916, 0.6134494543075562], [0.512208104133606, 0.6098440885543823], [0.5120396614074707, 0.6059998273849487], [0.5119102001190186, 0.6021549701690674], [0.5118288993835449, 0.5980002880096436], [0.5118098258972168, 0.5938720107078552], [0.51189124584198, 0.5897209644317627], [0.5120677947998047, 0.585711658000946], [0.512064516544342, 0.5824464559555054], [0.5119190216064453, 0.5793042182922363], [0.5116742849349976, 0.5764880776405334], [0.5114345550537109, 0.5737771391868591], [0.5111967921257019, 0.5712888836860657], [0.510948896408081, 0.5688818693161011], [0.5107234716415405, 0.5666935443878174], [0.5104984045028687, 0.5645708441734314], [0.5102787613868713, 0.5625758171081543], [0.5100326538085938, 0.560620129108429], [0.5094451904296875, 0.4776868224143982], [0.5097066760063171, 0.4760798215866089], [0.510002613067627, 0.4744776487350464], [0.5103225708007812, 0.47283482551574707], [0.5106781721115112, 0.4710979461669922], [0.5110782384872437, 0.4692215323448181], [0.5115107297897339, 0.4672289192676544], [0.5119829177856445, 0.4651108384132385], [0.5125041007995605, 0.462821364402771], [0.513130247592926, 0.4598919749259949], [0.5137530565261841, 0.4565986692905426], [0.5142989158630371, 0.4532559812068939], [0.5148205757141113, 0.44962066411972046], [0.5152750611305237, 0.44595327973365784], [0.5157243013381958, 0.442076712846756], [0.516128420829773, 0.4382712244987488], [0.5165748596191406, 0.43419766426086426], [0.5170229077339172, 0.43029263615608215], [0.5175497531890869, 0.4262242317199707], [0.5313810706138611, 0.837360143661499], [0.5313532948493958, 0.8343889713287354], [0.5313392281532288, 0.8308526277542114], [0.5313421487808228, 0.8281618356704712], [0.5313841104507446, 0.8249419927597046], [0.5314515829086304, 0.8225204944610596], [0.53158038854599, 0.8195682764053345], [0.5317025184631348, 0.8173134326934814], [0.5318820476531982, 0.8145350813865662], [0.5320327281951904, 0.812430739402771], [0.5322466492652893, 0.8097929954528809], [0.5324272513389587, 0.8077499270439148], [0.5326886773109436, 0.8050737380981445], [0.5328421592712402, 0.8028317093849182], [0.5329800844192505, 0.8000342845916748], [0.5329979062080383, 0.7979000806808472], [0.532979428768158, 0.7953100204467773], [0.5328973531723022, 0.7934402823448181], [0.5327566266059875, 0.7911679744720459], [0.5326213240623474, 0.7895523309707642], [0.5324076414108276, 0.7875595092773438], [0.5322689414024353, 0.7862393260002136], [0.5320339202880859, 0.7845086455345154], [0.5319170951843262, 0.7833880186080933], [0.5316991209983826, 0.7817901372909546], [0.531606137752533, 0.7807556390762329], [0.5314124822616577, 0.7791681289672852], [0.5313271880149841, 0.7780928015708923], [0.5311459898948669, 0.7763680219650269], [0.5310320258140564, 0.7750767469406128], [0.5308313965797424, 0.7730949521064758], [0.5306376814842224, 0.7714544534683228], [0.5303605198860168, 0.7691667079925537], [0.53004390001297, 0.7671072483062744], [0.5296455025672913, 0.7644999027252197], [0.5292078852653503, 0.7620731592178345], [0.5286957621574402, 0.7592065930366516], [0.5281854867935181, 0.7565509080886841], [0.5276145339012146, 0.7535308599472046], [0.527077317237854, 0.7507154941558838], [0.5264900922775269, 0.7475917339324951], [0.5259653329849243, 0.7447061538696289], [0.5254001021385193, 0.7415478229522705], [0.5249006152153015, 0.7385988235473633], [0.524371325969696, 0.7354140281677246], [0.5239116549491882, 0.732441782951355], [0.5234264135360718, 0.7292556166648865], [0.5230088829994202, 0.7262625694274902], [0.5225744247436523, 0.7230767607688904], [0.5221983194351196, 0.7200582027435303], [0.5218098163604736, 0.7168799638748169], [0.5214722156524658, 0.7138433456420898], [0.5211286544799805, 0.7106614708900452], [0.5208324193954468, 0.7075847387313843], [0.5205215215682983, 0.7043764591217041], [0.5202412605285645, 0.701274037361145], [0.5199655294418335, 0.6980947256088257], [0.5197444558143616, 0.6949770450592041], [0.5195197463035583, 0.6918038129806519], [0.5193110108375549, 0.6885961890220642], [0.5190935730934143, 0.6853569746017456], [0.5189037322998047, 0.6821315288543701], [0.5187218189239502, 0.6788479685783386], [0.5185831189155579, 0.6757602691650391], [0.5183955430984497, 0.6725122928619385], [0.5181792974472046, 0.6695502996444702], [0.5178360342979431, 0.6662791967391968], [0.5175855755805969, 0.6641309261322021], [0.5174444317817688, 0.6619316339492798], [0.51759934425354, 0.6597803831100464], [0.5178869962692261, 0.6582029461860657], [0.520041286945343, 0.6503182649612427], [0.5202086567878723, 0.648491382598877], [0.5199645757675171, 0.6469911336898804], [0.5184139609336853, 0.6402109265327454], [0.5173641443252563, 0.6356290578842163], [0.5164507627487183, 0.6289675235748291], [0.516173243522644, 0.6261254549026489], [0.5157411694526672, 0.623016357421875], [0.5153778195381165, 0.6199768781661987], [0.5150700807571411, 0.6167834997177124], [0.5148128271102905, 0.6132817268371582], [0.5145907402038574, 0.6096738576889038], [0.5144168138504028, 0.6059053540229797], [0.5142742991447449, 0.6020471453666687], [0.5141838192939758, 0.5979838371276855], [0.514178454875946, 0.5938735008239746], [0.5142837762832642, 0.5898520350456238], [0.5143673419952393, 0.585771918296814], [0.5143303871154785, 0.5823533535003662], [0.5140820741653442, 0.5790706276893616], [0.5138559341430664, 0.5762722492218018], [0.513600766658783, 0.5735143423080444], [0.5133529901504517, 0.5710326433181763], [0.5131029486656189, 0.5685969591140747], [0.5128489136695862, 0.5664033889770508], [0.5125994682312012, 0.5642842054367065], [0.5123324990272522, 0.5622460842132568], [0.5120508670806885, 0.5602802038192749], [0.5112920999526978, 0.4780961573123932], [0.5116245746612549, 0.47653597593307495], [0.5119754672050476, 0.4750182628631592], [0.5123781561851501, 0.47337600588798523], [0.5127992630004883, 0.47174203395843506], [0.5133035182952881, 0.46985113620758057], [0.5138190984725952, 0.46796801686286926], [0.5144156217575073, 0.46582359075546265], [0.5150241851806641, 0.4636520743370056], [0.5158227682113647, 0.46056067943573], [0.5165371894836426, 0.457305908203125], [0.5172226428985596, 0.4537957012653351], [0.5178300738334656, 0.4502241015434265], [0.5184321999549866, 0.44640636444091797], [0.5189666152000427, 0.4426395297050476], [0.5194898843765259, 0.43866556882858276], [0.5199904441833496, 0.43480217456817627], [0.5205639004707336, 0.4307798743247986], [0.5211359262466431, 0.42696234583854675], [0.5360476970672607, 0.8375742435455322], [0.5359787344932556, 0.834018349647522], [0.5359476804733276, 0.831133246421814], [0.5359681248664856, 0.8279275894165039], [0.536026120185852, 0.825376570224762], [0.5361432433128357, 0.8224799633026123], [0.5362745523452759, 0.820186972618103], [0.5364711880683899, 0.8174245357513428], [0.5366442799568176, 0.815267026424408], [0.5368731021881104, 0.8126360774040222], [0.5370582938194275, 0.8106024861335754], [0.537306010723114, 0.8080452680587769], [0.5375415682792664, 0.8060714602470398], [0.5379140377044678, 0.8029743432998657], [0.5380670428276062, 0.8005950450897217], [0.5382121801376343, 0.7977076768875122], [0.5382379293441772, 0.7955283522605896], [0.5382325649261475, 0.7929227352142334], [0.5381613373756409, 0.7910270690917969], [0.5380480289459229, 0.7888015508651733], [0.5379531383514404, 0.7871902585029602], [0.5377899408340454, 0.7853113412857056], [0.5376920700073242, 0.7839949131011963], [0.537492573261261, 0.7823535799980164], [0.5373867750167847, 0.7812306880950928], [0.5371570587158203, 0.7796846032142639], [0.5370299816131592, 0.7785963416099548], [0.5367521047592163, 0.776980996131897], [0.5365829467773438, 0.7758098244667053], [0.5362556576728821, 0.7739349603652954], [0.5360196828842163, 0.7725182771682739], [0.5356680750846863, 0.7703062295913696], [0.535336434841156, 0.7684606909751892], [0.5349019765853882, 0.7658791542053223], [0.5344555377960205, 0.7636350393295288], [0.5339018106460571, 0.7607690691947937], [0.5333640575408936, 0.758251428604126], [0.5327284336090088, 0.7552374601364136], [0.5321454405784607, 0.7525697946548462], [0.531473696231842, 0.7494587898254395], [0.5308871865272522, 0.7466869354248047], [0.5302298069000244, 0.7435431480407715], [0.5296635627746582, 0.7407194972038269], [0.5290356278419495, 0.7375446557998657], [0.5285052061080933, 0.7346803545951843], [0.5279226303100586, 0.7314971685409546], [0.5274330377578735, 0.728611409664154], [0.526896595954895, 0.7254241704940796], [0.5264447927474976, 0.7225201725959778], [0.5259544849395752, 0.7193225622177124], [0.5255438089370728, 0.7164021134376526], [0.525097131729126, 0.7131974697113037], [0.5247246026992798, 0.7102614641189575], [0.5243176221847534, 0.7070271968841553], [0.5239710807800293, 0.7040258049964905], [0.5236104726791382, 0.7007869482040405], [0.5233278870582581, 0.6978253722190857], [0.5230269432067871, 0.6945842504501343], [0.5227860808372498, 0.6915854215621948], [0.5225178599357605, 0.6882427930831909], [0.5222986936569214, 0.6851629614830017], [0.5220653414726257, 0.6818268299102783], [0.5218875408172607, 0.6787125468254089], [0.5216876864433289, 0.6754894852638245], [0.5215340256690979, 0.672370433807373], [0.5213367342948914, 0.669174313545227], [0.5210596919059753, 0.6659095883369446], [0.5208039283752441, 0.6636052131652832], [0.5205243825912476, 0.661649763584137], [0.520378828048706, 0.6599854230880737], [0.5203818082809448, 0.6585753560066223], [0.5208066701889038, 0.650506854057312], [0.5208449363708496, 0.6484216451644897], [0.5206434726715088, 0.6468249559402466], [0.5196589827537537, 0.6399475932121277], [0.5192862749099731, 0.6353131532669067], [0.518711507320404, 0.6287153959274292], [0.5183594226837158, 0.6257878541946411], [0.5180646181106567, 0.6227056980133057], [0.5177706480026245, 0.6196656227111816], [0.517515242099762, 0.6165624856948853], [0.5172560811042786, 0.6130632758140564], [0.5170467495918274, 0.6095279455184937], [0.5168417692184448, 0.6057547926902771], [0.5166623592376709, 0.6019478440284729], [0.5165148973464966, 0.5979145765304565], [0.5164461135864258, 0.5938881635665894], [0.5164263248443604, 0.5898975133895874], [0.5164657831192017, 0.5858038663864136], [0.5163122415542603, 0.5821939706802368], [0.516149640083313, 0.5789156556129456], [0.5159523487091064, 0.5760464668273926], [0.5157117247581482, 0.5732765197753906], [0.5154739618301392, 0.5707669258117676], [0.5152101516723633, 0.5683153867721558], [0.5149579644203186, 0.566093921661377], [0.5146744251251221, 0.5639503598213196], [0.5143582820892334, 0.5618706941604614], [0.5139999389648438, 0.5598689317703247], [0.5130128860473633, 0.47863367199897766], [0.5134845972061157, 0.47717970609664917], [0.5139641165733337, 0.47565531730651855], [0.5144402384757996, 0.47412341833114624], [0.5149585008621216, 0.47246798872947693], [0.5155184268951416, 0.4706854224205017], [0.5161294937133789, 0.46877220273017883], [0.5167844891548157, 0.4667499363422394], [0.5175160765647888, 0.46454331278800964], [0.5184736847877502, 0.4614654779434204], [0.5193958878517151, 0.4580402970314026], [0.5201890468597412, 0.4545837640762329], [0.5209599733352661, 0.45083338022232056], [0.5216354727745056, 0.44712090492248535], [0.5223038196563721, 0.4431632161140442], [0.5229071378707886, 0.43934929370880127], [0.5235411524772644, 0.43533840775489807], [0.5241324305534363, 0.43155449628829956], [0.524793267250061, 0.4275822043418884], [0.5406534671783447, 0.837083101272583], [0.5405482053756714, 0.8341870307922363], [0.5405035614967346, 0.8308355212211609], [0.540532648563385, 0.8283218145370483], [0.5406443476676941, 0.825323224067688], [0.5407658219337463, 0.8230643272399902], [0.5409516096115112, 0.8203189373016357], [0.5411286950111389, 0.8181674480438232], [0.5413753390312195, 0.8155266046524048], [0.5415863990783691, 0.8134715557098389], [0.5418551564216614, 0.8109135627746582], [0.5420627593994141, 0.8089011907577515], [0.542312741279602, 0.8065365552902222], [0.5426073670387268, 0.8038325309753418], [0.5429393649101257, 0.8007394075393677], [0.543114423751831, 0.7982892990112305], [0.5432383418083191, 0.795265793800354], [0.5431951284408569, 0.7930465936660767], [0.5430467128753662, 0.7903528213500977], [0.5428509712219238, 0.7885274887084961], [0.5425552129745483, 0.7863553762435913], [0.5423834323883057, 0.784956693649292], [0.5420975685119629, 0.7831032872200012], [0.5419650673866272, 0.7819520235061646], [0.5417033433914185, 0.7802897095680237], [0.541569709777832, 0.7791821956634521], [0.541321337223053, 0.7776299715042114], [0.5412120819091797, 0.7765060663223267], [0.5410036444664001, 0.7748267650604248], [0.5409040451049805, 0.7735296487808228], [0.5407260060310364, 0.7714716196060181], [0.5404902696609497, 0.7696613073348999], [0.5401597023010254, 0.767237663269043], [0.5397181510925293, 0.7649564146995544], [0.5391785502433777, 0.7622390985488892], [0.5385867953300476, 0.7596988677978516], [0.5379144549369812, 0.7568049430847168], [0.5372599363327026, 0.7541362643241882], [0.5365330576896667, 0.7511523365974426], [0.5358701348304749, 0.7484056949615479], [0.5351487994194031, 0.7453658580780029], [0.5345112085342407, 0.7425686717033386], [0.5338181853294373, 0.7394970655441284], [0.5332129597663879, 0.736660361289978], [0.5325614213943481, 0.7335718870162964], [0.5319921970367432, 0.7307003736495972], [0.5313804745674133, 0.7276090383529663], [0.5308458209037781, 0.7247122526168823], [0.5302784442901611, 0.7216208577156067], [0.5297837853431702, 0.7187002897262573], [0.5292630195617676, 0.7156022787094116], [0.5288059115409851, 0.712651789188385], [0.5283212065696716, 0.7095463275909424], [0.527888834476471, 0.7065497636795044], [0.5274507999420166, 0.7034074068069458], [0.5270978212356567, 0.7004202604293823], [0.5267519354820251, 0.6973200440406799], [0.5264520645141602, 0.6942811012268066], [0.5261456370353699, 0.691152036190033], [0.5258867740631104, 0.6880022287368774], [0.5256322622299194, 0.6847898960113525], [0.5254263877868652, 0.6816409826278687], [0.5252127647399902, 0.6783984899520874], [0.525023341178894, 0.6752974987030029], [0.5247898101806641, 0.6720752716064453], [0.5245303511619568, 0.6688990592956543], [0.5242421627044678, 0.6655232906341553], [0.5239776968955994, 0.6631253957748413], [0.5235265493392944, 0.6608490347862244], [0.5227967500686646, 0.6591452360153198], [0.5222814083099365, 0.6581345796585083], [0.5213650465011597, 0.650587797164917], [0.5213322639465332, 0.6483538150787354], [0.5212643146514893, 0.6467822194099426], [0.5212979316711426, 0.6397867202758789], [0.5210290551185608, 0.6351851224899292], [0.5205749273300171, 0.6284995079040527], [0.5204905867576599, 0.6256429553031921], [0.5203245878219604, 0.6224533319473267], [0.5201501250267029, 0.6194741725921631], [0.5199379920959473, 0.6163153648376465], [0.5197174549102783, 0.6128943562507629], [0.5195008516311646, 0.6093364953994751], [0.519275963306427, 0.6055989265441895], [0.5190374851226807, 0.6017826795578003], [0.518842875957489, 0.5978275537490845], [0.5186778903007507, 0.5938130617141724], [0.5185255408287048, 0.5898522138595581], [0.518304705619812, 0.5857451558113098], [0.5182898044586182, 0.582141637802124], [0.5181770324707031, 0.5787670612335205], [0.5180227756500244, 0.5758848190307617], [0.5178332328796387, 0.5730443000793457], [0.5176100134849548, 0.5705176591873169], [0.5173640251159668, 0.5680241584777832], [0.5170820951461792, 0.565758466720581], [0.5167626142501831, 0.5635703206062317], [0.5163853168487549, 0.5614221096038818], [0.5159376263618469, 0.5593599081039429], [0.5147626399993896, 0.47945716977119446], [0.5153661966323853, 0.477985143661499], [0.5159474611282349, 0.47655820846557617], [0.516570508480072, 0.475009560585022], [0.5171716809272766, 0.47343504428863525], [0.5178486108779907, 0.471630722284317], [0.5185160636901855, 0.46982091665267944], [0.5192768573760986, 0.46777939796447754], [0.5200619697570801, 0.465720534324646], [0.5212308764457703, 0.4624512195587158], [0.5222728848457336, 0.459041029214859], [0.5232549905776978, 0.4553903043270111], [0.5241071581840515, 0.4516910910606384], [0.5249155163764954, 0.4477907419204712], [0.5256471633911133, 0.44396013021469116], [0.5263954997062683, 0.43999797105789185], [0.527097761631012, 0.4361706078052521], [0.5278186202049255, 0.4322199821472168], [0.5284773111343384, 0.42846620082855225], [0.5451353788375854, 0.8370480537414551], [0.5449442863464355, 0.833719789981842], [0.5448757410049438, 0.8310832977294922], [0.5449074506759644, 0.828200101852417], [0.5450076460838318, 0.8258712291717529], [0.5451886057853699, 0.8231704235076904], [0.5453721880912781, 0.8210464715957642], [0.5456411838531494, 0.8184633851051331], [0.5458944439888, 0.816440761089325], [0.5462473630905151, 0.8139249086380005], [0.5465505123138428, 0.8119388818740845], [0.5469323396682739, 0.8093404173851013], [0.5471508502960205, 0.8073880672454834], [0.5474685430526733, 0.8042347431182861], [0.5477703809738159, 0.8016694784164429], [0.5480974912643433, 0.7983871102333069], [0.5482586622238159, 0.7957979440689087], [0.5483587384223938, 0.7925790548324585], [0.5482709407806396, 0.7902276515960693], [0.5481364130973816, 0.7876270413398743], [0.5479741096496582, 0.785783052444458], [0.5477020740509033, 0.7838168144226074], [0.5475485920906067, 0.7824705243110657], [0.5472701787948608, 0.7807616591453552], [0.5471155643463135, 0.7795816659927368], [0.5468526482582092, 0.7780113220214844], [0.5467137098312378, 0.7769180536270142], [0.5464440584182739, 0.7754232883453369], [0.5462937355041504, 0.7742656469345093], [0.5458848476409912, 0.7723158597946167], [0.54559326171875, 0.7709048986434937], [0.5451962947845459, 0.76850426197052], [0.5447251796722412, 0.7663626670837402], [0.5441461801528931, 0.7636115550994873], [0.5435383319854736, 0.7611675262451172], [0.5428306460380554, 0.7582629919052124], [0.5421518087387085, 0.7556662559509277], [0.5413832664489746, 0.7526976466178894], [0.5406849384307861, 0.750023603439331], [0.5399050712585449, 0.747024655342102], [0.5392233729362488, 0.7443116903305054], [0.5384666919708252, 0.7412796020507812], [0.5378106832504272, 0.7385218143463135], [0.5370841026306152, 0.735467791557312], [0.536460280418396, 0.7326852083206177], [0.5357731580734253, 0.7296092510223389], [0.5351851582527161, 0.7268006205558777], [0.5345395803451538, 0.7237167954444885], [0.5339926481246948, 0.7208967804908752], [0.533393919467926, 0.717801570892334], [0.5328856110572815, 0.714957594871521], [0.5323284268379211, 0.711840808391571], [0.5318549871444702, 0.7089651226997375], [0.5313401818275452, 0.7058182954788208], [0.5309190154075623, 0.7029253840446472], [0.5304852724075317, 0.6998229622840881], [0.5301313400268555, 0.6969404816627502], [0.5297530293464661, 0.6937724351882935], [0.5294447541236877, 0.6908352971076965], [0.5291078090667725, 0.6875814199447632], [0.528852105140686, 0.6845594644546509], [0.5285770297050476, 0.6812921166419983], [0.528355598449707, 0.6782119274139404], [0.5281165838241577, 0.6749728918075562], [0.5278793573379517, 0.6718350052833557], [0.5276231169700623, 0.6685382127761841], [0.5274768471717834, 0.6653748154640198], [0.5272693037986755, 0.6625374555587769], [0.5268068313598633, 0.6599200963973999], [0.5263464450836182, 0.6574912667274475], [0.5256351828575134, 0.6558206677436829], [0.5232464075088501, 0.6505095958709717], [0.5227455496788025, 0.6480358839035034], [0.5222764015197754, 0.646570086479187], [0.5223697423934937, 0.6400359869003296], [0.5230962634086609, 0.6349221467971802], [0.5229320526123047, 0.6285267472267151], [0.5229238867759705, 0.6255223751068115], [0.5228551030158997, 0.6223479509353638], [0.5227137207984924, 0.6192559003829956], [0.5225119590759277, 0.6161169409751892], [0.5222663879394531, 0.6126695871353149], [0.5220340490341187, 0.6091619729995728], [0.5217753648757935, 0.6053811311721802], [0.5215317606925964, 0.6016093492507935], [0.5212692022323608, 0.5976539850234985], [0.5210397839546204, 0.5936932563781738], [0.5208046436309814, 0.589698851108551], [0.5207136869430542, 0.5856791138648987], [0.5204581022262573, 0.5820084810256958], [0.520287036895752, 0.5786747932434082], [0.5201528668403625, 0.5757213830947876], [0.5199675559997559, 0.572860836982727], [0.5197732448577881, 0.5702730417251587], [0.5195187330245972, 0.5677393674850464], [0.5192378163337708, 0.565399706363678], [0.5188940763473511, 0.5631437301635742], [0.5184349417686462, 0.5608831644058228], [0.517855703830719, 0.5587212443351746], [0.5167662501335144, 0.48051032423973083], [0.5174208879470825, 0.4791695475578308], [0.5181402564048767, 0.4776867926120758], [0.5188069343566895, 0.4761643409729004], [0.5195002555847168, 0.4745327830314636], [0.5202125906944275, 0.4727967083454132], [0.5209714770317078, 0.4709519147872925], [0.5217649936676025, 0.46903765201568604], [0.5226508378982544, 0.46699264645576477], [0.5240097045898438, 0.46372276544570923], [0.5252870321273804, 0.46009039878845215], [0.5263594388961792, 0.4564616084098816], [0.5273855924606323, 0.4525349736213684], [0.5282660126686096, 0.4487360715866089], [0.5291489958763123, 0.4447367787361145], [0.5299364328384399, 0.44093650579452515], [0.530753493309021, 0.43693679571151733], [0.5314931273460388, 0.4331613779067993], [0.5322484970092773, 0.4291872978210449], [0.5495072603225708, 0.8362597227096558], [0.549310564994812, 0.8337671756744385], [0.5492705702781677, 0.830826461315155], [0.5493072867393494, 0.8286014199256897], [0.5494153499603271, 0.8259254693984985], [0.5495505332946777, 0.8238158226013184], [0.5497667789459229, 0.8213382959365845], [0.5499921441078186, 0.8193260431289673], [0.5502932667732239, 0.8169283866882324], [0.550588071346283, 0.8150076866149902], [0.5509961843490601, 0.8125916719436646], [0.5513815879821777, 0.8104841709136963], [0.551796019077301, 0.8076927661895752], [0.5521070957183838, 0.8052191734313965], [0.5524853467941284, 0.8020486235618591], [0.5528100728988647, 0.7992526292800903], [0.553112268447876, 0.7958102226257324], [0.5532225370407104, 0.7929793000221252], [0.5531665086746216, 0.7895063161849976], [0.5528287291526794, 0.7871817350387573], [0.5523543357849121, 0.7847017049789429], [0.5521630048751831, 0.7833656072616577], [0.551842212677002, 0.7814316749572754], [0.5516966581344604, 0.780263364315033], [0.5514101982116699, 0.7785134315490723], [0.5512366890907288, 0.7773745059967041], [0.5509153008460999, 0.7758148312568665], [0.5507053732872009, 0.7746509313583374], [0.5504486560821533, 0.77335524559021], [0.5504056215286255, 0.7719725370407104], [0.5503065586090088, 0.7698131799697876], [0.5498493909835815, 0.7675044536590576], [0.5492700934410095, 0.7648813724517822], [0.5485597252845764, 0.7623750567436218], [0.547807514667511, 0.7596299648284912], [0.5470566749572754, 0.7570239305496216], [0.5462485551834106, 0.7541459798812866], [0.5454891920089722, 0.7514781951904297], [0.5446650385856628, 0.7485566139221191], [0.5439492464065552, 0.7458876371383667], [0.5431638360023499, 0.7429263591766357], [0.5424659848213196, 0.7401992678642273], [0.5416901111602783, 0.7372074127197266], [0.5410103797912598, 0.7344561219215393], [0.5402686595916748, 0.7314648628234863], [0.5396202802658081, 0.7286821603775024], [0.5389214158058167, 0.7256791591644287], [0.5383150577545166, 0.7228739261627197], [0.5376651287078857, 0.7198721170425415], [0.5370949506759644, 0.7170374393463135], [0.5364864468574524, 0.7140243053436279], [0.5359542369842529, 0.7111554145812988], [0.5353933572769165, 0.7081212401390076], [0.5349121689796448, 0.7052216529846191], [0.5344184041023254, 0.7021902799606323], [0.5339930057525635, 0.6993165016174316], [0.5335540771484375, 0.6963105797767639], [0.5331810116767883, 0.6933645009994507], [0.5328086018562317, 0.6903044581413269], [0.5324906706809998, 0.6872498989105225], [0.5321561098098755, 0.6841119527816772], [0.5318603515625, 0.6810104250907898], [0.5315583944320679, 0.6778343915939331], [0.5312932729721069, 0.6747241616249084], [0.531029224395752, 0.6714791059494019], [0.5308512449264526, 0.6683651804924011], [0.5306917428970337, 0.66517174243927], [0.5304607152938843, 0.6621500253677368], [0.5301167368888855, 0.6591427326202393], [0.5295283198356628, 0.6565950512886047], [0.5281428098678589, 0.6536300182342529], [0.5270475745201111, 0.6500093936920166], [0.5266701579093933, 0.6469801664352417], [0.5259552597999573, 0.6446791887283325], [0.5248681306838989, 0.6405113935470581], [0.5248847007751465, 0.6348979473114014], [0.5252420902252197, 0.628648042678833], [0.5254271030426025, 0.6255619525909424], [0.5254188179969788, 0.6222333908081055], [0.5253015160560608, 0.6191043853759766], [0.5250847339630127, 0.615856409072876], [0.5248135328292847, 0.6124535202980042], [0.5245323777198792, 0.6089301109313965], [0.5242893099784851, 0.6051998138427734], [0.5240106582641602, 0.6013733148574829], [0.5237044095993042, 0.5974533557891846], [0.5233564376831055, 0.5934754610061646], [0.5230321288108826, 0.5895220041275024], [0.5226204991340637, 0.5855377912521362], [0.5225281715393066, 0.5819175839424133], [0.5224384665489197, 0.578565239906311], [0.5223022699356079, 0.5755883455276489], [0.5221646428108215, 0.5726970434188843], [0.5219775438308716, 0.5700411796569824], [0.5217421650886536, 0.5674515962600708], [0.5214371681213379, 0.5650191307067871], [0.5210534334182739, 0.5626803636550903], [0.5205900073051453, 0.560293436050415], [0.5199626684188843, 0.5579217672348022], [0.5188453793525696, 0.481952428817749], [0.5196592807769775, 0.4806044399738312], [0.5204614996910095, 0.47908997535705566], [0.5212571024894714, 0.4775048792362213], [0.5220050811767578, 0.475894033908844], [0.5228028893470764, 0.47411048412323], [0.5235764980316162, 0.47236037254333496], [0.5244527459144592, 0.4704819619655609], [0.5253711342811584, 0.4686233401298523], [0.5269489288330078, 0.4650605320930481], [0.5283050537109375, 0.46138498187065125], [0.5295692086219788, 0.4575386047363281], [0.5306820273399353, 0.45364174246788025], [0.5317173600196838, 0.4496527314186096], [0.5326361060142517, 0.4457712471485138], [0.533541738986969, 0.441783607006073], [0.5343701243400574, 0.4379577040672302], [0.535219669342041, 0.43397682905197144], [0.5359870195388794, 0.43016940355300903], [0.5535638928413391, 0.8359841108322144], [0.5534266233444214, 0.8333865404129028], [0.5534473657608032, 0.8311862945556641], [0.5535435080528259, 0.8285143375396729], [0.5536260008811951, 0.8264204263687134], [0.553787112236023, 0.8240026235580444], [0.5539699792861938, 0.8220969438552856], [0.5542333126068115, 0.8197271823883057], [0.5544793605804443, 0.8178490400314331], [0.5548090934753418, 0.8156588673591614], [0.5551857948303223, 0.8139511942863464], [0.5557109117507935, 0.8110213279724121], [0.5560364723205566, 0.8086509108543396], [0.5564322471618652, 0.8056086301803589], [0.5568100810050964, 0.8030872344970703], [0.5572776794433594, 0.7995540499687195], [0.5576274394989014, 0.7966576814651489], [0.5579851269721985, 0.7928768396377563], [0.5581471920013428, 0.7898349761962891], [0.5582408308982849, 0.7860243320465088], [0.5579395890235901, 0.7836956977844238], [0.5574935674667358, 0.7818324565887451], [0.5572274923324585, 0.7805267572402954], [0.5567377209663391, 0.7786972522735596], [0.5565369129180908, 0.7777409553527832], [0.5563149452209473, 0.7762612700462341], [0.55617356300354, 0.7749450206756592], [0.5560266375541687, 0.7736327648162842], [0.5559592247009277, 0.7726225852966309], [0.5554218888282776, 0.770734429359436], [0.5547165870666504, 0.7687686681747437], [0.5539451837539673, 0.7660560607910156], [0.5531250238418579, 0.7636165022850037], [0.552295982837677, 0.7609045505523682], [0.5515398383140564, 0.7584364414215088], [0.5507349967956543, 0.7556162476539612], [0.5499958992004395, 0.7529898881912231], [0.5491920113563538, 0.750077486038208], [0.5484713315963745, 0.7474108934402466], [0.5476700067520142, 0.7445216178894043], [0.5469563007354736, 0.7418398857116699], [0.5461467504501343, 0.7388706803321838], [0.5454336404800415, 0.7361553907394409], [0.5446423888206482, 0.7331907749176025], [0.5439542531967163, 0.7304760217666626], [0.543197512626648, 0.727505624294281], [0.5425503253936768, 0.7247753143310547], [0.5418416261672974, 0.7217933535575867], [0.5412359237670898, 0.7190465331077576], [0.5405714511871338, 0.7160436511039734], [0.5400049686431885, 0.7132714986801147], [0.5393876433372498, 0.7102516889572144], [0.5388697385787964, 0.7074538469314575], [0.5383093357086182, 0.70441734790802], [0.5378421545028687, 0.7016125917434692], [0.537344217300415, 0.6985982656478882], [0.5369181632995605, 0.6957973837852478], [0.5364524722099304, 0.6927466988563538], [0.5360896587371826, 0.6899144649505615], [0.5356830358505249, 0.6867263317108154], [0.5353255271911621, 0.6837469339370728], [0.5349360108375549, 0.680549144744873], [0.5346320867538452, 0.6775509119033813], [0.5343079566955566, 0.6743197441101074], [0.5340714454650879, 0.6712884902954102], [0.5338404774665833, 0.6680821180343628], [0.5336077213287354, 0.6649961471557617], [0.5332889556884766, 0.6617200374603271], [0.5328646898269653, 0.6586990356445312], [0.5321301221847534, 0.6555256843566895], [0.5310976505279541, 0.6520770788192749], [0.5301439762115479, 0.6492024660110474], [0.5294158458709717, 0.6464035511016846], [0.5290114879608154, 0.643622875213623], [0.5286746621131897, 0.6401455402374268], [0.528105616569519, 0.6348036527633667], [0.5282111167907715, 0.6290075778961182], [0.5283616185188293, 0.6255844831466675], [0.5283188223838806, 0.622154712677002], [0.5281445384025574, 0.6188700199127197], [0.5279134511947632, 0.6156076788902283], [0.5276138782501221, 0.6121342182159424], [0.527332603931427, 0.6086927652359009], [0.5270100235939026, 0.6049297451972961], [0.52669757604599, 0.601127028465271], [0.5263345241546631, 0.5971516966819763], [0.5259917378425598, 0.5932100415229797], [0.5255846977233887, 0.5892055034637451], [0.5252987146377563, 0.5852881669998169], [0.5249406695365906, 0.5817704796791077], [0.5247450470924377, 0.5784634351730347], [0.52464359998703, 0.5754669904708862], [0.5245071053504944, 0.5725441575050354], [0.5243412256240845, 0.569818377494812], [0.5240948796272278, 0.5671430826187134], [0.5237756967544556, 0.5646092891693115], [0.523364782333374, 0.562144935131073], [0.5227972865104675, 0.559661865234375], [0.5221402049064636, 0.5571396946907043], [0.521356463432312, 0.4839286208152771], [0.5222736597061157, 0.48231106996536255], [0.5230859518051147, 0.4806993305683136], [0.5238739252090454, 0.4790211319923401], [0.524639368057251, 0.4772855341434479], [0.5253942608833313, 0.4755457639694214], [0.5261937379837036, 0.4737855792045593], [0.5270160436630249, 0.4720301926136017], [0.5279369354248047, 0.4702373743057251], [0.5298070907592773, 0.4666375517845154], [0.5314618945121765, 0.4627033472061157], [0.5327964425086975, 0.458876371383667], [0.5340908765792847, 0.45472365617752075], [0.5351625680923462, 0.4508180320262909], [0.5362145900726318, 0.44672679901123047], [0.537137508392334, 0.44288724660873413], [0.5380768775939941, 0.4388608932495117], [0.5389256477355957, 0.4350636899471283], [0.5398281812667847, 0.43104439973831177], [0.5581743717193604, 0.8356701135635376], [0.5583146810531616, 0.8338623046875], [0.5582445859909058, 0.830651581287384], [0.5580176711082458, 0.8286000490188599], [0.557961106300354, 0.8264140486717224], [0.5580527782440186, 0.8246231079101562], [0.5582399368286133, 0.8224314451217651], [0.5584617257118225, 0.8205982446670532], [0.5587429404258728, 0.8183037638664246], [0.5589549541473389, 0.8166226148605347], [0.5591846108436584, 0.8146844506263733], [0.5596745014190674, 0.8120756149291992], [0.560147762298584, 0.8090518712997437], [0.5604994893074036, 0.8065677285194397], [0.5608873963356018, 0.8035203218460083], [0.5613380670547485, 0.8005335330963135], [0.5618283152580261, 0.7969905138015747], [0.5622678399085999, 0.7937437891960144], [0.5626979470252991, 0.7898364663124084], [0.5630430579185486, 0.7868445515632629], [0.5627952218055725, 0.7817445993423462], [0.5622842311859131, 0.7802903056144714], [0.5616343021392822, 0.7786880731582642], [0.5614403486251831, 0.7778099775314331], [0.561145544052124, 0.7763115167617798], [0.5608930587768555, 0.77535480260849], [0.5606971979141235, 0.774260401725769], [0.5606955885887146, 0.7736756801605225], [0.5606849789619446, 0.772156834602356], [0.5597814321517944, 0.7682814598083496], [0.5588677525520325, 0.7665820121765137], [0.5577431321144104, 0.7643864154815674], [0.5567639470100403, 0.7619392275810242], [0.5559520721435547, 0.7596321105957031], [0.5551282167434692, 0.7569757103919983], [0.5543672442436218, 0.7544655799865723], [0.553567111492157, 0.7516481876373291], [0.5529118180274963, 0.749055027961731], [0.5521617531776428, 0.7460675239562988], [0.5514658689498901, 0.7433948516845703], [0.5506379008293152, 0.740459680557251], [0.5498949289321899, 0.7377538084983826], [0.5490670800209045, 0.7348154783248901], [0.5483393669128418, 0.7321155071258545], [0.5475423336029053, 0.7292033433914185], [0.5468536019325256, 0.7265077829360962], [0.5461050868034363, 0.7235950231552124], [0.5454546213150024, 0.7208744287490845], [0.5447536706924438, 0.7179569005966187], [0.5441445708274841, 0.7152050137519836], [0.5434929132461548, 0.7122697830200195], [0.5429308414459229, 0.7094952464103699], [0.5423382520675659, 0.7065540552139282], [0.5418334007263184, 0.7037504315376282], [0.5413038730621338, 0.7008018493652344], [0.5408411622047424, 0.6979967355728149], [0.540343701839447, 0.6950440406799316], [0.5399047136306763, 0.6922010183334351], [0.5394366383552551, 0.6892670392990112], [0.538996160030365, 0.6862514019012451], [0.5385491847991943, 0.6831656098365784], [0.5381708741188049, 0.6801480054855347], [0.5377674698829651, 0.6770522594451904], [0.5374090671539307, 0.6739834547042847], [0.5370571613311768, 0.6709108352661133], [0.5367426872253418, 0.6678268909454346], [0.5364482998847961, 0.6646623611450195], [0.5362101197242737, 0.661446213722229], [0.5358217358589172, 0.6580528616905212], [0.5353316068649292, 0.6547356843948364], [0.5346754789352417, 0.6509063243865967], [0.5342860221862793, 0.6481685042381287], [0.5339093208312988, 0.6453667879104614], [0.5333285331726074, 0.6426958441734314], [0.5324984788894653, 0.6393610239028931], [0.531652569770813, 0.634332537651062], [0.5313113331794739, 0.6292365789413452], [0.5313801169395447, 0.6255915760993958], [0.5312369465827942, 0.6219654083251953], [0.5310286283493042, 0.6186451315879822], [0.5307343602180481, 0.6152818202972412], [0.5304157137870789, 0.611841082572937], [0.5300651788711548, 0.6083694100379944], [0.5296941995620728, 0.6046350002288818], [0.5293018817901611, 0.6008243560791016], [0.5289535522460938, 0.5968754291534424], [0.5285377502441406, 0.59288489818573], [0.5281112194061279, 0.5888814330101013], [0.5275450944900513, 0.5849789977073669], [0.5272746682167053, 0.5815685987472534], [0.5271509289741516, 0.5783829689025879], [0.5270567536354065, 0.5753511190414429], [0.5269608497619629, 0.5724245309829712], [0.5267997980117798, 0.5695809125900269], [0.5265507102012634, 0.5668317079544067], [0.5262013077735901, 0.5641496181488037], [0.5257226824760437, 0.5615741014480591], [0.5251367688179016, 0.5589324235916138], [0.524430513381958, 0.5563547611236572], [0.524390459060669, 0.4858573377132416], [0.525191068649292, 0.4841524362564087], [0.5260317921638489, 0.4824119210243225], [0.5268296599388123, 0.4805995225906372], [0.5275782346725464, 0.47884732484817505], [0.5283307433128357, 0.477058470249176], [0.5290387868881226, 0.4754088521003723], [0.5298531651496887, 0.47376835346221924], [0.5307233929634094, 0.4722420573234558], [0.5327867269515991, 0.4683283567428589], [0.5346133708953857, 0.46429726481437683], [0.5361774563789368, 0.46019503474235535], [0.5374990105628967, 0.45602744817733765], [0.5386983156204224, 0.4519154727458954], [0.5397721529006958, 0.44792065024375916], [0.5408077239990234, 0.4438927173614502], [0.5417630672454834, 0.44002848863601685], [0.5427331924438477, 0.4360322654247284], [0.5436186194419861, 0.43220436573028564], [0.5621826648712158, 0.8370338678359985], [0.5626352429389954, 0.8335306644439697], [0.5622190833091736, 0.830233097076416], [0.5621598362922668, 0.8283225893974304], [0.562203049659729, 0.8266973495483398], [0.5622773170471191, 0.824815034866333], [0.562450647354126, 0.8232535719871521], [0.5627052783966064, 0.820992112159729], [0.562940239906311, 0.8192305564880371], [0.563244104385376, 0.8170953989028931], [0.5634541511535645, 0.8155357837677002], [0.5636375546455383, 0.8124909400939941], [0.5639561414718628, 0.8099753856658936], [0.5643255710601807, 0.8068737983703613], [0.5646162033081055, 0.8043251037597656], [0.564919650554657, 0.8008090853691101], [0.5652871131896973, 0.7978372573852539], [0.5655815601348877, 0.793790340423584], [0.5656962394714355, 0.7903181314468384], [0.5654053688049316, 0.7865771651268005], [0.5652520656585693, 0.7817766666412354], [0.565528392791748, 0.7793487310409546], [0.5653855800628662, 0.7775968313217163], [0.5649737119674683, 0.7763997316360474], [0.5646322965621948, 0.7754014730453491], [0.5644629597663879, 0.7743998765945435], [0.564382016658783, 0.7738093137741089], [0.5637620687484741, 0.7724258899688721], [0.5631544589996338, 0.7714417576789856], [0.5618095397949219, 0.7674534320831299], [0.5613571405410767, 0.7653540372848511], [0.5606499910354614, 0.7629733085632324], [0.560028612613678, 0.7607854604721069], [0.5593425035476685, 0.7582499980926514], [0.5586843490600586, 0.7558630704879761], [0.5579383373260498, 0.7530829310417175], [0.5572630167007446, 0.7505670785903931], [0.5564737319946289, 0.7477657198905945], [0.5557553172111511, 0.7450772523880005], [0.5549391508102417, 0.7420824766159058], [0.5541852116584778, 0.7393615245819092], [0.5533396005630493, 0.7364402413368225], [0.5525871515274048, 0.7337344884872437], [0.5517618656158447, 0.7308412194252014], [0.5510488748550415, 0.7281752824783325], [0.5502656102180481, 0.7253016233444214], [0.5495936870574951, 0.7226420640945435], [0.5488531589508057, 0.7197446823120117], [0.5482178926467896, 0.7170718908309937], [0.5475210547447205, 0.7141628265380859], [0.5469274520874023, 0.7114630937576294], [0.5462815165519714, 0.708544135093689], [0.5457402467727661, 0.7058387994766235], [0.5451485514640808, 0.7029005289077759], [0.5446516871452332, 0.700170636177063], [0.5441052317619324, 0.6972150206565857], [0.5436347126960754, 0.6944588422775269], [0.5431064367294312, 0.6914726495742798], [0.5426208972930908, 0.68868488073349], [0.5420786142349243, 0.6856017708778381], [0.5416769981384277, 0.6827284693717957], [0.5412315726280212, 0.6795781254768372], [0.5408334136009216, 0.6766229867935181], [0.5403976440429688, 0.6734704971313477], [0.54006427526474, 0.6705760955810547], [0.5397049784660339, 0.6673811674118042], [0.5394690632820129, 0.6644681096076965], [0.5391836166381836, 0.6610643863677979], [0.5388450026512146, 0.6576461791992188], [0.5384116172790527, 0.6541296243667603], [0.5380846261978149, 0.6504826545715332], [0.537738561630249, 0.647506058216095], [0.5370096564292908, 0.6445220112800598], [0.5362991094589233, 0.6417737603187561], [0.5356813669204712, 0.6386111974716187], [0.5348793864250183, 0.6337745189666748], [0.5346355438232422, 0.6293087005615234], [0.534496545791626, 0.6254605054855347], [0.5342937111854553, 0.6217392086982727], [0.5339813232421875, 0.6183110475540161], [0.5336698889732361, 0.6149611473083496], [0.5332962274551392, 0.6114550828933716], [0.5329443216323853, 0.6080321669578552], [0.5325362682342529, 0.6042488813400269], [0.5321764945983887, 0.600509762763977], [0.531765878200531, 0.5965268611907959], [0.5313849449157715, 0.5925387740135193], [0.5308711528778076, 0.5884367823600769], [0.5303390622138977, 0.584469735622406], [0.5299538969993591, 0.5813886523246765], [0.5297985076904297, 0.5782687067985535], [0.5297380685806274, 0.5753004550933838], [0.5296695232391357, 0.572288453578949], [0.529516339302063, 0.5693553686141968], [0.5292682647705078, 0.5664659738540649], [0.5288788676261902, 0.563633918762207], [0.5283674001693726, 0.5608776807785034], [0.5276266932487488, 0.558101236820221], [0.5267424583435059, 0.5554146766662598], [0.5268568396568298, 0.48758888244628906], [0.5279214978218079, 0.4858725368976593], [0.528844952583313, 0.4840243458747864], [0.5297173857688904, 0.4822101593017578], [0.5305399894714355, 0.4803283214569092], [0.5312581062316895, 0.4785987138748169], [0.5319641828536987, 0.4768528938293457], [0.5325435996055603, 0.4753519296646118], [0.5332038998603821, 0.47400230169296265], [0.5354681015014648, 0.4700431823730469], [0.5375360250473022, 0.465803325176239], [0.5392934679985046, 0.46167606115341187], [0.540902853012085, 0.4572521448135376], [0.5421525239944458, 0.453227698802948], [0.5433787107467651, 0.4490259289741516], [0.5444532632827759, 0.44517672061920166], [0.5455678701400757, 0.44111987948417664], [0.5465685129165649, 0.4373193383216858], [0.5476206541061401, 0.43326449394226074], [0.5658238530158997, 0.8375664949417114], [0.5661108493804932, 0.8339024782180786], [0.5664240717887878, 0.8295029401779175], [0.5663943886756897, 0.8286956548690796], [0.5664146542549133, 0.8265171051025391], [0.566490650177002, 0.82565838098526], [0.5666757822036743, 0.8234217166900635], [0.5668448805809021, 0.8220138549804688], [0.5671434998512268, 0.8195083141326904], [0.567291796207428, 0.8180360794067383], [0.5674182772636414, 0.8155258893966675], [0.5675997734069824, 0.8131670355796814], [0.567773699760437, 0.8101959228515625], [0.5679786205291748, 0.8075399398803711], [0.5681478381156921, 0.8044164180755615], [0.5683455467224121, 0.8013523817062378], [0.5684255957603455, 0.7979082465171814], [0.5686829090118408, 0.7942636609077454], [0.5687954425811768, 0.7900785207748413], [0.5686478614807129, 0.7863575220108032], [0.5684126615524292, 0.7816756963729858], [0.5685853362083435, 0.78040611743927], [0.5690802931785583, 0.7765489816665649], [0.5687682628631592, 0.7749652862548828], [0.5684365630149841, 0.773855447769165], [0.5681357979774475, 0.7731788158416748], [0.5678274035453796, 0.7725076675415039], [0.5668859481811523, 0.7702398300170898], [0.566220223903656, 0.769091010093689], [0.5650805234909058, 0.7666232585906982], [0.5644837021827698, 0.764327883720398], [0.5639849305152893, 0.7619832754135132], [0.5634685754776001, 0.7596724033355713], [0.5628767013549805, 0.7571858167648315], [0.5622433423995972, 0.754646897315979], [0.5615943670272827, 0.7520177364349365], [0.560837984085083, 0.7492399215698242], [0.5600869655609131, 0.7465953826904297], [0.5592280030250549, 0.7437726855278015], [0.5584455132484436, 0.7410058975219727], [0.5575932860374451, 0.7380475997924805], [0.5568361282348633, 0.7353414297103882], [0.5559850931167603, 0.732428789138794], [0.5552634000778198, 0.7297835350036621], [0.554459810256958, 0.726917028427124], [0.5537673234939575, 0.7242857813835144], [0.5530012845993042, 0.7214493751525879], [0.5523409843444824, 0.7187997698783875], [0.5516173839569092, 0.7159476280212402], [0.5509935021400452, 0.7132866382598877], [0.5503169894218445, 0.7104251384735107], [0.5497470498085022, 0.7077526450157166], [0.5491284132003784, 0.7048875093460083], [0.5485928058624268, 0.7021769881248474], [0.5480140447616577, 0.6993006467819214], [0.5475074052810669, 0.6965503096580505], [0.5469529032707214, 0.6936405897140503], [0.54644376039505, 0.6908267736434937], [0.5458738803863525, 0.6878570914268494], [0.5453864932060242, 0.6850262880325317], [0.5448939204216003, 0.682080864906311], [0.5444471836090088, 0.6790775060653687], [0.5439746379852295, 0.67600417137146], [0.5435842871665955, 0.6730395555496216], [0.5431910753250122, 0.6700555682182312], [0.5428720712661743, 0.6670548915863037], [0.5425801277160645, 0.6641055345535278], [0.5422547459602356, 0.6607391834259033], [0.5418815612792969, 0.6572084426879883], [0.5416375994682312, 0.6538189053535461], [0.5413510203361511, 0.6501308679580688], [0.54099440574646, 0.6469416618347168], [0.5403709411621094, 0.6433740258216858], [0.5397947430610657, 0.6408671140670776], [0.5389915108680725, 0.6376646161079407], [0.5382529497146606, 0.6333365440368652], [0.5379112958908081, 0.6291866898536682], [0.5376846790313721, 0.6252123117446899], [0.5372830629348755, 0.6213910579681396], [0.5369503498077393, 0.617966890335083], [0.5365513563156128, 0.6145552396774292], [0.5361690521240234, 0.6110920906066895], [0.5357658267021179, 0.607633113861084], [0.5353567004203796, 0.6038930416107178], [0.5349243879318237, 0.6001470685005188], [0.5345063805580139, 0.596184492111206], [0.5340662002563477, 0.5921989679336548], [0.5336513519287109, 0.5880193710327148], [0.533031702041626, 0.5839322805404663], [0.5326693654060364, 0.5810854434967041], [0.5325666666030884, 0.578285813331604], [0.5325942635536194, 0.5752408504486084], [0.5325397849082947, 0.5722104907035828], [0.5324140191078186, 0.5691026449203491], [0.5321276187896729, 0.5660849809646606], [0.531694769859314, 0.563040554523468], [0.5311015248298645, 0.5601470470428467], [0.5303055047988892, 0.5570789575576782], [0.5292577743530273, 0.5542271137237549], [0.5284632444381714, 0.4921276271343231], [0.5294198989868164, 0.49102723598480225], [0.5300631523132324, 0.4902040362358093], [0.5312900543212891, 0.48793601989746094], [0.5322108864784241, 0.4861030578613281], [0.5331540107727051, 0.4840547442436218], [0.5339767932891846, 0.4822321832180023], [0.5348042249679565, 0.480394184589386], [0.5355324149131775, 0.47869929671287537], [0.5362063050270081, 0.47674301266670227], [0.5364543199539185, 0.47591927647590637], [0.5381693243980408, 0.4716460108757019], [0.5403023958206177, 0.46761417388916016], [0.5425191521644592, 0.4632743000984192], [0.5442773699760437, 0.4586744010448456], [0.5456531047821045, 0.4544944167137146], [0.5469595193862915, 0.4504258632659912], [0.5482013821601868, 0.4464147388935089], [0.5493606925010681, 0.44254177808761597], [0.5505292415618896, 0.43852370977401733], [0.5515859723091125, 0.4346541166305542], [0.5699727535247803, 0.8382328748703003], [0.5702045559883118, 0.8339157700538635], [0.5705041885375977, 0.8302558660507202], [0.5707710385322571, 0.8285189867019653], [0.5709100365638733, 0.8275147676467896], [0.5710644125938416, 0.8256087303161621], [0.5711588859558105, 0.8243368864059448], [0.5712718963623047, 0.8220399618148804], [0.5713694095611572, 0.8204138278961182], [0.5714847445487976, 0.8179053068161011], [0.5715176463127136, 0.8159899115562439], [0.5714592933654785, 0.8130069375038147], [0.5714521408081055, 0.8105959892272949], [0.5714080333709717, 0.8074367642402649], [0.571460485458374, 0.8048150539398193], [0.5713968873023987, 0.8011655807495117], [0.5713092684745789, 0.7980639934539795], [0.5710066556930542, 0.7941232919692993], [0.5711442828178406, 0.7904033660888672], [0.5706768035888672, 0.7857183218002319], [0.569556713104248, 0.7815855741500854], [0.5693173408508301, 0.7804064750671387], [0.5697678327560425, 0.7767645120620728], [0.5702002048492432, 0.7745085954666138], [0.569608211517334, 0.773159384727478], [0.5693730711936951, 0.7726001739501953], [0.5693116784095764, 0.7721415162086487], [0.568630039691925, 0.769265353679657], [0.5681735277175903, 0.767973005771637], [0.5674482583999634, 0.7657091617584229], [0.5669963955879211, 0.7637689113616943], [0.5665295124053955, 0.761298418045044], [0.5661205053329468, 0.7590616941452026], [0.5656189322471619, 0.7563894987106323], [0.5651236772537231, 0.7538981437683105], [0.5644779205322266, 0.7510851621627808], [0.5638635158538818, 0.7483668923377991], [0.5630984902381897, 0.745448648929596], [0.562400221824646, 0.7427761554718018], [0.5615814328193665, 0.7398353815078735], [0.5608395934104919, 0.7370477914810181], [0.5600159764289856, 0.734130859375], [0.5592958927154541, 0.7314193248748779], [0.5585039258003235, 0.7285811305046082], [0.5578137636184692, 0.7259315252304077], [0.5570463538169861, 0.7231025695800781], [0.556381106376648, 0.7204889059066772], [0.5556408166885376, 0.7176696062088013], [0.5550063848495483, 0.7150440216064453], [0.5543042421340942, 0.7122134566307068], [0.5537132024765015, 0.7095950841903687], [0.5530573129653931, 0.7067601084709167], [0.5524919033050537, 0.7041175365447998], [0.5518642663955688, 0.701256275177002], [0.5513269901275635, 0.6985915899276733], [0.550727128982544, 0.6956971883773804], [0.5502071976661682, 0.6929755210876465], [0.5496183037757874, 0.6900030970573425], [0.5491042137145996, 0.6872031688690186], [0.5485475063323975, 0.6842672824859619], [0.5480788350105286, 0.6815212965011597], [0.5475366711616516, 0.6784019470214844], [0.5470982789993286, 0.675512433052063], [0.5466175675392151, 0.6724472045898438], [0.5462157130241394, 0.6696254014968872], [0.5457857847213745, 0.6665875315666199], [0.5454666614532471, 0.6638079881668091], [0.54508376121521, 0.6603180170059204], [0.5447648763656616, 0.6569275856018066], [0.5443925857543945, 0.65348219871521], [0.5440087914466858, 0.6498556137084961], [0.5435819029808044, 0.6464589238166809], [0.5431500673294067, 0.642883837223053], [0.5426905155181885, 0.639999270439148], [0.5420169234275818, 0.6368449926376343], [0.5412709712982178, 0.6328349113464355], [0.5409131050109863, 0.6290308237075806], [0.5405648946762085, 0.6248666048049927], [0.5402457118034363, 0.6210575699806213], [0.5398513078689575, 0.6175515651702881], [0.5394898653030396, 0.6141663193702698], [0.5390645861625671, 0.6106564998626709], [0.5386883616447449, 0.6072553396224976], [0.5382484793663025, 0.6034704446792603], [0.537848949432373, 0.5997712016105652], [0.5373867154121399, 0.5957752466201782], [0.5370407104492188, 0.5918891429901123], [0.5365567207336426, 0.5875714421272278], [0.5357602834701538, 0.583227813243866], [0.5355849266052246, 0.5810854434967041], [0.5357216000556946, 0.5783325433731079], [0.5357413291931152, 0.5752578377723694], [0.535736083984375, 0.5721127986907959], [0.5355676412582397, 0.5688477754592896], [0.5353082418441772, 0.5656508803367615], [0.5348889827728271, 0.5624141693115234], [0.5343950986862183, 0.5593178272247314], [0.5336988568305969, 0.5559870004653931], [0.5328993797302246, 0.5526806712150574], [0.5289584398269653, 0.49753016233444214], [0.5313302874565125, 0.49500328302383423], [0.532241702079773, 0.4937882423400879], [0.5331180095672607, 0.4920818507671356], [0.5342243909835815, 0.4900265336036682], [0.5353738069534302, 0.48792123794555664], [0.5363439917564392, 0.4860227108001709], [0.5373221039772034, 0.48403963446617126], [0.5381125211715698, 0.4824272394180298], [0.539048969745636, 0.4806363582611084], [0.5400673747062683, 0.47861307859420776], [0.5405260920524597, 0.4766641855239868], [0.5411402583122253, 0.4732409715652466], [0.5430054664611816, 0.4696725010871887], [0.54560387134552, 0.4649813771247864], [0.5476127862930298, 0.46006882190704346], [0.5491398572921753, 0.45608800649642944], [0.5506649613380432, 0.4518072307109833], [0.5519523620605469, 0.448000431060791], [0.5532971620559692, 0.44390153884887695], [0.5544832944869995, 0.44007521867752075], [0.5557429790496826, 0.4359798729419708], [0.5741901397705078, 0.8381579518318176], [0.5744292736053467, 0.8346189856529236], [0.5748675465583801, 0.8305510878562927], [0.5749741792678833, 0.8296247124671936], [0.5751684904098511, 0.8276474475860596], [0.5752777457237244, 0.8263193368911743], [0.5753685235977173, 0.8242456912994385], [0.5754629969596863, 0.8226953148841858], [0.575546383857727, 0.8202505707740784], [0.5755268931388855, 0.8182390332221985], [0.5753973722457886, 0.8155273199081421], [0.5752647519111633, 0.8131078481674194], [0.5750526785850525, 0.8101716637611389], [0.5748789310455322, 0.8075043559074402], [0.5746267437934875, 0.8045080900192261], [0.5743846297264099, 0.8011255264282227], [0.5739760398864746, 0.7976272702217102], [0.5735453367233276, 0.7939333915710449], [0.5731726884841919, 0.7903887033462524], [0.5728862285614014, 0.7853063344955444], [0.5725013017654419, 0.7808591723442078], [0.5723391175270081, 0.7798185348510742], [0.5716863870620728, 0.7769082188606262], [0.5715858340263367, 0.7746492624282837], [0.5723950862884521, 0.7731248736381531], [0.5726574063301086, 0.7723370790481567], [0.5726158618927002, 0.771630048751831], [0.5711779594421387, 0.7677080035209656], [0.5707753300666809, 0.7668814063072205], [0.5701576471328735, 0.7647935748100281], [0.5696589350700378, 0.7630025148391724], [0.5692688822746277, 0.7608396410942078], [0.5689253807067871, 0.7584178447723389], [0.5685495734214783, 0.7558571696281433], [0.5680598616600037, 0.7530817985534668], [0.5675338506698608, 0.7503499388694763], [0.5668547749519348, 0.7474113702774048], [0.5662385821342468, 0.7445962429046631], [0.5655022859573364, 0.7416687607765198], [0.5647909641265869, 0.7388777732849121], [0.5639737844467163, 0.7359055876731873], [0.5632869005203247, 0.7331759929656982], [0.5625050663948059, 0.7302587032318115], [0.5618347525596619, 0.7276080846786499], [0.5610750913619995, 0.7247731685638428], [0.5604254603385925, 0.7221478223800659], [0.5596927404403687, 0.7193354964256287], [0.5590533018112183, 0.7167274951934814], [0.5583410263061523, 0.7139362096786499], [0.5577352046966553, 0.711327314376831], [0.5570650696754456, 0.7085458040237427], [0.5564769506454468, 0.7059171199798584], [0.5558232665061951, 0.7031151056289673], [0.5552566647529602, 0.7004683017730713], [0.5546322464942932, 0.6976480484008789], [0.5540863275527954, 0.6949605941772461], [0.5534919500350952, 0.6920943856239319], [0.5529791116714478, 0.689320981502533], [0.5524139404296875, 0.6863586902618408], [0.5519194602966309, 0.6835980415344238], [0.5513706207275391, 0.6807193756103516], [0.5508504509925842, 0.6777795553207397], [0.5503069162368774, 0.674803614616394], [0.5498322248458862, 0.6718924045562744], [0.549333930015564, 0.6689705848693848], [0.5489315986633301, 0.6661478281021118], [0.5485559105873108, 0.6633075475692749], [0.5481518507003784, 0.6599458456039429], [0.5476946830749512, 0.6564749479293823], [0.5472179055213928, 0.6530585289001465], [0.5466853976249695, 0.6494311690330505], [0.5463077425956726, 0.6460895538330078], [0.5458735823631287, 0.6424539685249329], [0.5454853773117065, 0.6394487619400024], [0.5449358820915222, 0.636074423789978], [0.5443978309631348, 0.6323814392089844], [0.5439667105674744, 0.6286879777908325], [0.5435859560966492, 0.6245521306991577], [0.543146014213562, 0.6206632852554321], [0.542794942855835, 0.6171778440475464], [0.5423781871795654, 0.6137287616729736], [0.5420008897781372, 0.6102691888809204], [0.5415847897529602, 0.6068199872970581], [0.5411475896835327, 0.603074848651886], [0.5406953692436218, 0.5993611216545105], [0.540263295173645, 0.5953943729400635], [0.5398050546646118, 0.591552734375], [0.5393660068511963, 0.5871789455413818], [0.5392614603042603, 0.5828373432159424], [0.539263129234314, 0.5811271667480469], [0.539199948310852, 0.5784096717834473], [0.5391485095024109, 0.5751413702964783], [0.5389648079872131, 0.5719899535179138], [0.5388177037239075, 0.5685888528823853], [0.5385339260101318, 0.5652530193328857], [0.5381556749343872, 0.561848521232605], [0.537612795829773, 0.5585918426513672], [0.5369253158569336, 0.5551027059555054], [0.5362891554832458, 0.5518060922622681], [0.5357986688613892, 0.5484808683395386], [0.5341089367866516, 0.5446531176567078], [0.5324646830558777, 0.5100928544998169], [0.5316036939620972, 0.5078350305557251], [0.5309779047966003, 0.50464928150177], [0.5327908396720886, 0.5007476806640625], [0.5342411994934082, 0.49894827604293823], [0.5362485647201538, 0.4966088533401489], [0.5372844934463501, 0.4949459433555603], [0.5384894609451294, 0.49264827370643616], [0.5394762754440308, 0.49069392681121826], [0.5405970215797424, 0.4885965585708618], [0.5415610074996948, 0.48673707246780396], [0.5424676537513733, 0.484184205532074], [0.5428320169448853, 0.48309630155563354], [0.5437968969345093, 0.479875385761261], [0.5443285703659058, 0.4775483012199402], [0.5449976921081543, 0.4741070866584778], [0.5456418395042419, 0.4717567265033722], [0.5484980344772339, 0.4666738510131836], [0.550957977771759, 0.4618985056877136], [0.552769124507904, 0.4577522873878479], [0.5544443130493164, 0.45359328389167786], [0.5559277534484863, 0.44956207275390625], [0.557284951210022, 0.44563472270965576], [0.5586572289466858, 0.4415763318538666], [0.5598961114883423, 0.4376750588417053], [0.5786828994750977, 0.8387318849563599], [0.5789098739624023, 0.8347373008728027], [0.5792670845985413, 0.8316459655761719], [0.5794615149497986, 0.8297111988067627], [0.5795581936836243, 0.828534722328186], [0.5797249674797058, 0.8262858390808105], [0.5797951817512512, 0.8247633576393127], [0.5797074437141418, 0.8223332166671753], [0.5795935392379761, 0.8204587697982788], [0.579395055770874, 0.8176593780517578], [0.5791874527931213, 0.8154340386390686], [0.5788435935974121, 0.8124507665634155], [0.5785418748855591, 0.8099757432937622], [0.5780966281890869, 0.8068650960922241], [0.5777133703231812, 0.8042446970939636], [0.5770795345306396, 0.8005545139312744], [0.5765677690505981, 0.7973018884658813], [0.575919508934021, 0.7934093475341797], [0.5752468109130859, 0.7900664806365967], [0.574648916721344, 0.7851947546005249], [0.5741145610809326, 0.7805590629577637], [0.5738739967346191, 0.779416024684906], [0.5736854076385498, 0.7767107486724854], [0.5737793445587158, 0.7748874425888062], [0.5737172365188599, 0.7737009525299072], [0.5738825798034668, 0.7725186347961426], [0.573478639125824, 0.7711018323898315], [0.5721455812454224, 0.7674216032028198], [0.5722576379776001, 0.7668429017066956], [0.5722820162773132, 0.7641114592552185], [0.5721210837364197, 0.7626261711120605], [0.5719121694564819, 0.7603647708892822], [0.57166588306427, 0.7580699920654297], [0.5713251829147339, 0.7552945613861084], [0.5709891319274902, 0.7525922060012817], [0.5704531669616699, 0.7495238184928894], [0.5698884129524231, 0.7466701865196228], [0.569161057472229, 0.7436614632606506], [0.5684999823570251, 0.7408382892608643], [0.5677279233932495, 0.7378460168838501], [0.5670661330223083, 0.7350478172302246], [0.5663175582885742, 0.7321302890777588], [0.5656654834747314, 0.72938472032547], [0.5649364590644836, 0.7265334129333496], [0.5643036961555481, 0.7238811254501343], [0.5635867118835449, 0.721074640750885], [0.5629638433456421, 0.7184456586837769], [0.562261164188385, 0.7156496047973633], [0.561658501625061, 0.7130603194236755], [0.5609765648841858, 0.710286021232605], [0.5603893399238586, 0.7077007293701172], [0.5597189664840698, 0.7049025297164917], [0.5591436624526978, 0.7022994756698608], [0.5584932565689087, 0.6994959115982056], [0.557933509349823, 0.6968741416931152], [0.5573030710220337, 0.6940394639968872], [0.5567642450332642, 0.6913809776306152], [0.5561748147010803, 0.6884961724281311], [0.555679202079773, 0.6857186555862427], [0.5551336407661438, 0.682790994644165], [0.5546375513076782, 0.6800475120544434], [0.5540270805358887, 0.6769734621047974], [0.5534749031066895, 0.6741374731063843], [0.5528462529182434, 0.6711499094963074], [0.5523661375045776, 0.6684240102767944], [0.5518597364425659, 0.6655375957489014], [0.5514841079711914, 0.6629228591918945], [0.551031231880188, 0.6594430804252625], [0.5505304336547852, 0.6560026407241821], [0.5499513745307922, 0.6524962186813354], [0.5494915246963501, 0.6490404009819031], [0.5490456819534302, 0.6456522941589355], [0.5485811829566956, 0.6420573592185974], [0.5481107234954834, 0.6389353275299072], [0.5476773977279663, 0.6356214880943298], [0.5472407341003418, 0.631934404373169], [0.5468912124633789, 0.6283750534057617], [0.5464542508125305, 0.6241728663444519], [0.5461212992668152, 0.620326042175293], [0.5457329750061035, 0.6167584657669067], [0.5454035401344299, 0.6133502721786499], [0.5450059771537781, 0.6098310947418213], [0.5446264743804932, 0.606397807598114], [0.5441635847091675, 0.6026124954223633], [0.5437597036361694, 0.5989542007446289], [0.5433080196380615, 0.5949713587760925], [0.542855441570282, 0.5910898447036743], [0.5424759984016418, 0.5869244337081909], [0.5426238775253296, 0.5831724405288696], [0.5426996946334839, 0.5809366703033447], [0.542579174041748, 0.5783055424690247], [0.5423746109008789, 0.5749409794807434], [0.5423182249069214, 0.5718786716461182], [0.5421765446662903, 0.5683612823486328], [0.5419736504554749, 0.5648775100708008], [0.5416220426559448, 0.5613361597061157], [0.5411478877067566, 0.5578053593635559], [0.5404965877532959, 0.5542385578155518], [0.5398218035697937, 0.5509348511695862], [0.538794755935669, 0.5475709438323975], [0.5374965667724609, 0.5427209138870239], [0.5348894596099854, 0.5105642080307007], [0.5363848805427551, 0.5087868571281433], [0.5378832817077637, 0.5065299272537231], [0.5384359955787659, 0.5048059821128845], [0.5393483638763428, 0.5023301243782043], [0.5403042435646057, 0.4998595714569092], [0.5415306091308594, 0.49735337495803833], [0.5425093770027161, 0.49519020318984985], [0.5435470342636108, 0.4930016100406647], [0.5444132685661316, 0.4912092983722687], [0.5455177426338196, 0.4891695976257324], [0.5470151305198669, 0.4867976903915405], [0.5478328466415405, 0.4846952557563782], [0.5486717820167542, 0.4819421172142029], [0.5496565103530884, 0.47890159487724304], [0.550529956817627, 0.4762771725654602], [0.5513584613800049, 0.47359365224838257], [0.5523642301559448, 0.46911725401878357], [0.5546703338623047, 0.46390485763549805], [0.5565209984779358, 0.4598245918750763], [0.5583712458610535, 0.45536160469055176], [0.5598902702331543, 0.45146745443344116], [0.5614423155784607, 0.44728732109069824], [0.5627867579460144, 0.44340789318084717], [0.5642052292823792, 0.4392349421977997], [0.5831872820854187, 0.8386638760566711], [0.5834293961524963, 0.835553765296936], [0.583720862865448, 0.8319190740585327], [0.5838791728019714, 0.8306753635406494], [0.5840824842453003, 0.8285436630249023], [0.5841319561004639, 0.8268330097198486], [0.5840802788734436, 0.8243684768676758], [0.5839760899543762, 0.8222711682319641], [0.5836955308914185, 0.819670557975769], [0.5833703279495239, 0.817379355430603], [0.5829392075538635, 0.8145642280578613], [0.5825265645980835, 0.8120393753051758], [0.5819807052612305, 0.8090704083442688], [0.5814595818519592, 0.8063608407974243], [0.5807608366012573, 0.8033010959625244], [0.5800150632858276, 0.8000203371047974], [0.5792019963264465, 0.7965240478515625], [0.5784244537353516, 0.7929617166519165], [0.5775929689407349, 0.7892575263977051], [0.5762979984283447, 0.7849134206771851], [0.5747190713882446, 0.7803245782852173], [0.5747089982032776, 0.7795025110244751], [0.574690580368042, 0.776512622833252], [0.574684202671051, 0.7751189470291138], [0.575014591217041, 0.77364182472229], [0.5749524235725403, 0.7726653814315796], [0.574632465839386, 0.7702739834785461], [0.5748000144958496, 0.7682846188545227], [0.5750036835670471, 0.7673253417015076], [0.5747383832931519, 0.7634890079498291], [0.574680507183075, 0.7626990079879761], [0.5746931433677673, 0.7599822282791138], [0.5744429230690002, 0.7577142715454102], [0.5743348598480225, 0.7550733685493469], [0.5740088224411011, 0.7519655227661133], [0.5734994411468506, 0.7488831281661987], [0.5727905631065369, 0.7457677125930786], [0.57216477394104, 0.7428980469703674], [0.5714120864868164, 0.7398595213890076], [0.5707880258560181, 0.7370636463165283], [0.570060133934021, 0.7340579032897949], [0.5694470405578613, 0.731317400932312], [0.5687329173088074, 0.7283766269683838], [0.5681470632553101, 0.725711464881897], [0.5674588680267334, 0.7228404879570007], [0.5668669939041138, 0.7202168703079224], [0.5661795139312744, 0.7173944115638733], [0.5655983686447144, 0.7147929668426514], [0.5649281740188599, 0.7120052576065063], [0.5643408298492432, 0.7094162702560425], [0.5636670589447021, 0.7066569328308105], [0.5630868077278137, 0.7040578722953796], [0.5624301433563232, 0.7012825608253479], [0.5618630647659302, 0.6986759901046753], [0.5612154603004456, 0.6958832144737244], [0.5606492757797241, 0.6932497620582581], [0.5600191950798035, 0.6904464364051819], [0.559486985206604, 0.6877961158752441], [0.5589573979377747, 0.6849499940872192], [0.5585779547691345, 0.6822201013565063], [0.5581042766571045, 0.6791910529136658], [0.557504415512085, 0.6761428713798523], [0.556794285774231, 0.6731699705123901], [0.5561026334762573, 0.6702949404716492], [0.5554330348968506, 0.6676287055015564], [0.555027961730957, 0.6650583744049072], [0.5547224283218384, 0.6623921394348145], [0.5542043447494507, 0.6588802337646484], [0.5534883737564087, 0.6552813053131104], [0.5528520345687866, 0.6519362926483154], [0.5522540807723999, 0.6485309600830078], [0.5518586039543152, 0.6452927589416504], [0.5514010190963745, 0.6415515542030334], [0.5509049296379089, 0.6383808851242065], [0.550331711769104, 0.6351486444473267], [0.5499783754348755, 0.6316782236099243], [0.5497133731842041, 0.6280504465103149], [0.5494205355644226, 0.623876690864563], [0.5489962100982666, 0.6199322938919067], [0.5487009882926941, 0.6164560317993164], [0.5483787059783936, 0.6129672527313232], [0.5480725169181824, 0.6094688177108765], [0.5476803779602051, 0.6059333086013794], [0.5472853183746338, 0.6022107601165771], [0.5468471646308899, 0.5984922647476196], [0.5464330911636353, 0.5945942401885986], [0.5460091233253479, 0.5906121134757996], [0.5458906292915344, 0.5869215726852417], [0.5458496809005737, 0.5832903981208801], [0.5457408428192139, 0.5807864665985107], [0.5455051064491272, 0.5779872536659241], [0.5454263091087341, 0.5749416351318359], [0.5455000400543213, 0.5719025135040283], [0.5455639362335205, 0.5682365298271179], [0.5454118847846985, 0.5645862817764282], [0.5451990365982056, 0.5609449744224548], [0.5448424816131592, 0.5571766495704651], [0.544462263584137, 0.5535194873809814], [0.5437917709350586, 0.5497861504554749], [0.5427730679512024, 0.5459303259849548], [0.5412672758102417, 0.541479229927063], [0.5402593016624451, 0.5361422300338745], [0.5392770767211914, 0.5303403735160828], [0.5392107367515564, 0.5231660008430481], [0.5418767929077148, 0.5143327713012695], [0.5421508550643921, 0.5115047693252563], [0.5426391959190369, 0.5090967416763306], [0.5436350107192993, 0.5066121816635132], [0.5442143082618713, 0.5049457550048828], [0.5453882217407227, 0.5023969411849976], [0.5462705492973328, 0.5003319382667542], [0.5474890470504761, 0.49797987937927246], [0.5484456419944763, 0.4959770441055298], [0.5493932962417603, 0.4933047294616699], [0.5497872233390808, 0.4922856390476227], [0.551135241985321, 0.48905789852142334], [0.5521601438522339, 0.486866295337677], [0.5534763336181641, 0.4840274751186371], [0.5546241402626038, 0.48115548491477966], [0.5556514859199524, 0.4782881736755371], [0.5564490556716919, 0.475090891122818], [0.5576757192611694, 0.4709296226501465], [0.5590415000915527, 0.4664400815963745], [0.5608781576156616, 0.46186932921409607], [0.5625763535499573, 0.45757216215133667], [0.5642319917678833, 0.4533519148826599], [0.5657140016555786, 0.4493008255958557], [0.5672075748443604, 0.44514986872673035], [0.5685566067695618, 0.4411334991455078], [0.5876367092132568, 0.8392413854598999], [0.5878135561943054, 0.8356749415397644], [0.5881274938583374, 0.8328282833099365], [0.5881909132003784, 0.8306226134300232], [0.5881795287132263, 0.8290294408798218], [0.5881664752960205, 0.826479434967041], [0.5880699157714844, 0.8243860006332397], [0.5877788662910461, 0.8215433359146118], [0.5874826908111572, 0.819281816482544], [0.5870491862297058, 0.8164286613464355], [0.5866310596466064, 0.8140283823013306], [0.5860342979431152, 0.8109964728355408], [0.5854914784431458, 0.8084112405776978], [0.5847248435020447, 0.8052346110343933], [0.5840305685997009, 0.8024916648864746], [0.5830214023590088, 0.7988923788070679], [0.582166314125061, 0.795783281326294], [0.5811221599578857, 0.7920019030570984], [0.5801645517349243, 0.7885785698890686], [0.5792946815490723, 0.7839301824569702], [0.5794360637664795, 0.7801381945610046], [0.5791115164756775, 0.7785303592681885], [0.5788761377334595, 0.7767246961593628], [0.5789445638656616, 0.7751047611236572], [0.5786800980567932, 0.7737630605697632], [0.578192949295044, 0.7714446783065796], [0.5780261158943176, 0.770331621170044], [0.5780733823776245, 0.7686868906021118], [0.5780175924301147, 0.7675435543060303], [0.5779494047164917, 0.7629258036613464], [0.5777637362480164, 0.7621089220046997], [0.5769370794296265, 0.7595616579055786], [0.5770505666732788, 0.7580931782722473], [0.5774237513542175, 0.7549183368682861], [0.5770739316940308, 0.7513497471809387], [0.5763270854949951, 0.748087465763092], [0.5757331848144531, 0.7450626492500305], [0.5750046968460083, 0.7419940829277039], [0.5744038820266724, 0.7391293048858643], [0.5737065076828003, 0.7361431121826172], [0.5731123685836792, 0.7333213090896606], [0.5724337100982666, 0.7303769588470459], [0.5718652009963989, 0.7276301383972168], [0.5712119340896606, 0.7247617244720459], [0.5706450939178467, 0.722065806388855], [0.5699931383132935, 0.7192322611808777], [0.5694307684898376, 0.7165931463241577], [0.5687841176986694, 0.713802695274353], [0.5682206153869629, 0.7111846208572388], [0.5675706267356873, 0.7084003686904907], [0.5670126676559448, 0.7058166265487671], [0.5663570761680603, 0.7030400037765503], [0.5657955408096313, 0.7004529237747192], [0.5651352405548096, 0.6976630091667175], [0.5645537376403809, 0.6950510740280151], [0.563883900642395, 0.6922556757926941], [0.5633080005645752, 0.6896531581878662], [0.5626556873321533, 0.6868723034858704], [0.5621089935302734, 0.6843171119689941], [0.5615477561950684, 0.681570827960968], [0.5610242486000061, 0.6786293983459473], [0.5604847073554993, 0.6753909587860107], [0.5600887537002563, 0.672485888004303], [0.5595868825912476, 0.6693190336227417], [0.5589299201965332, 0.6665210723876953], [0.5581673383712769, 0.664199709892273], [0.5577473044395447, 0.6620609164237976], [0.5571357011795044, 0.6581122875213623], [0.5564216375350952, 0.6545989513397217], [0.5556998252868652, 0.6511969566345215], [0.5550979375839233, 0.6479969620704651], [0.5545673370361328, 0.6448418498039246], [0.5541207790374756, 0.6411867141723633], [0.5536860227584839, 0.6377869844436646], [0.5532928705215454, 0.6346763968467712], [0.5528836846351624, 0.6313443183898926], [0.5526652932167053, 0.627892255783081], [0.5523545145988464, 0.6235277652740479], [0.5520792603492737, 0.619623064994812], [0.5517951846122742, 0.6161386966705322], [0.5515779256820679, 0.6126834154129028], [0.5512735843658447, 0.6090825796127319], [0.5509752631187439, 0.6055387258529663], [0.5505784153938293, 0.6017563343048096], [0.5502375364303589, 0.5980802774429321], [0.5498499870300293, 0.5941545963287354], [0.5496509075164795, 0.5903671979904175], [0.5494680404663086, 0.5868001580238342], [0.5494113564491272, 0.5832886695861816], [0.5492969751358032, 0.5804262161254883], [0.5491093397140503, 0.5776753425598145], [0.5490343570709229, 0.5750188827514648], [0.5490435361862183, 0.5719428062438965], [0.5488885641098022, 0.5681084990501404], [0.548839807510376, 0.5644832253456116], [0.5487176179885864, 0.5606642961502075], [0.5484737753868103, 0.5567605495452881], [0.548041582107544, 0.5529757142066956], [0.547429084777832, 0.5488665103912354], [0.5466042160987854, 0.5447269678115845], [0.5456675887107849, 0.5399819612503052], [0.5448243021965027, 0.5355576276779175], [0.5442541837692261, 0.5293769240379333], [0.5441440343856812, 0.5239438414573669], [0.5457686185836792, 0.5161964893341064], [0.5475187301635742, 0.5135394334793091], [0.5484673380851746, 0.5113142728805542], [0.5490922927856445, 0.5092889070510864], [0.5498791933059692, 0.5072200894355774], [0.5505125522613525, 0.5050710439682007], [0.5514033436775208, 0.5027496218681335], [0.5521144866943359, 0.5008015632629395], [0.5531668663024902, 0.49843499064445496], [0.5543769001960754, 0.4963156580924988], [0.5551939010620117, 0.49427682161331177], [0.5560266971588135, 0.49192655086517334], [0.5571051836013794, 0.4891792833805084], [0.5580536127090454, 0.4863780736923218], [0.5592594742774963, 0.4832006096839905], [0.5602527260780334, 0.48007732629776], [0.5613512992858887, 0.4765567183494568], [0.5623502135276794, 0.4727221727371216], [0.5638095140457153, 0.46832168102264404], [0.5652151703834534, 0.4640547037124634], [0.566936731338501, 0.4595089256763458], [0.5684442520141602, 0.45541417598724365], [0.5700399279594421, 0.45106959342956543], [0.5714221000671387, 0.4470871388912201], [0.5729354619979858, 0.4428328275680542], [0.5921170115470886, 0.8390386700630188], [0.5921876430511475, 0.8362733721733093], [0.5922526717185974, 0.8329241275787354], [0.5924015641212463, 0.8310905694961548], [0.5924463868141174, 0.8286166191101074], [0.5922904014587402, 0.8264792561531067], [0.5920036435127258, 0.8236712217330933], [0.591731071472168, 0.8212202191352844], [0.5913446545600891, 0.818341076374054], [0.5909243822097778, 0.8158446550369263], [0.5903393626213074, 0.8128906488418579], [0.5897715091705322, 0.8102477788925171], [0.5890185832977295, 0.8071527481079102], [0.5882863402366638, 0.8043162226676941], [0.5873278379440308, 0.8011060953140259], [0.5863906741142273, 0.7979221940040588], [0.5852494239807129, 0.7944008111953735], [0.5843169689178467, 0.7911942601203918], [0.5834060907363892, 0.7873445749282837], [0.5830302834510803, 0.7839463949203491], [0.5829377174377441, 0.7801811695098877], [0.5832865238189697, 0.7785060405731201], [0.583521842956543, 0.7767980098724365], [0.5830793380737305, 0.7743407487869263], [0.5824375152587891, 0.7721723318099976], [0.5820425748825073, 0.7707476615905762], [0.581514835357666, 0.7690191268920898], [0.581160843372345, 0.7682671546936035], [0.5800434947013855, 0.7663243412971497], [0.5791388750076294, 0.7631574273109436], [0.5795160531997681, 0.7619308233261108], [0.5802199840545654, 0.7601128816604614], [0.5805239081382751, 0.7590878009796143], [0.580682635307312, 0.7546095252037048], [0.5798009634017944, 0.75055330991745], [0.5792492032051086, 0.747474193572998], [0.5785294771194458, 0.7442194223403931], [0.5779861211776733, 0.7413328886032104], [0.5773024559020996, 0.7382420301437378], [0.5767436027526855, 0.7354522943496704], [0.5760807991027832, 0.7324330806732178], [0.575553834438324, 0.7296891212463379], [0.5749189257621765, 0.7267274856567383], [0.5743891000747681, 0.7240407466888428], [0.573757529258728, 0.72114098072052], [0.5732389092445374, 0.7184958457946777], [0.5726203918457031, 0.7156386971473694], [0.5720935463905334, 0.7130287885665894], [0.5714713931083679, 0.710209846496582], [0.5709453225135803, 0.7076057195663452], [0.5703248381614685, 0.7048178315162659], [0.5697778463363647, 0.7022004127502441], [0.5691249370574951, 0.6994261741638184], [0.5685386061668396, 0.6967912912368774], [0.5678505897521973, 0.6940016746520996], [0.5672480463981628, 0.6913833618164062], [0.5665404796600342, 0.688607394695282], [0.5659091472625732, 0.6860131025314331], [0.5651274919509888, 0.6832712888717651], [0.5644077062606812, 0.6808156371116638], [0.563737154006958, 0.6779736280441284], [0.5634437203407288, 0.6751218438148499], [0.5633411407470703, 0.6721082925796509], [0.5632659196853638, 0.6689249277114868], [0.5627985596656799, 0.6652169227600098], [0.5621734857559204, 0.6627097725868225], [0.5614426136016846, 0.660822868347168], [0.5602592825889587, 0.6572926640510559], [0.5594075918197632, 0.6537591218948364], [0.5587400197982788, 0.6504940986633301], [0.557952880859375, 0.6471724510192871], [0.5572023391723633, 0.6442331075668335], [0.5565507411956787, 0.6408183574676514], [0.5564250946044922, 0.6376368999481201], [0.556269645690918, 0.6342141032218933], [0.5558412075042725, 0.6308867931365967], [0.5553395748138428, 0.6276025176048279], [0.5551523566246033, 0.6234298944473267], [0.555038332939148, 0.6193718910217285], [0.5549379587173462, 0.6159740686416626], [0.5547659397125244, 0.6124173402786255], [0.5545898675918579, 0.6088123321533203], [0.5542964339256287, 0.6051427125930786], [0.5540252923965454, 0.6014129519462585], [0.553691029548645, 0.597657322883606], [0.5534570813179016, 0.5938817262649536], [0.5532156229019165, 0.5901287794113159], [0.5531095266342163, 0.5866730213165283], [0.5530115365982056, 0.5832204818725586], [0.5529994964599609, 0.5802918672561646], [0.5528751015663147, 0.5773051381111145], [0.552564799785614, 0.5747207403182983], [0.5522142648696899, 0.5717673301696777], [0.5522134900093079, 0.5681239366531372], [0.5523294806480408, 0.564551830291748], [0.5524085760116577, 0.5605340003967285], [0.5522561073303223, 0.5564358830451965], [0.5520396828651428, 0.5524866580963135], [0.551629364490509, 0.5481351613998413], [0.5511906147003174, 0.5438323020935059], [0.5507451295852661, 0.5391613245010376], [0.5503765344619751, 0.5346074104309082], [0.5501964092254639, 0.5294691920280457], [0.550390362739563, 0.5239540338516235], [0.551438570022583, 0.5194746255874634], [0.5523180365562439, 0.5158506631851196], [0.553122878074646, 0.5133068561553955], [0.55389404296875, 0.5111571550369263], [0.5544639229774475, 0.5093194842338562], [0.5553990006446838, 0.5070149898529053], [0.5560555458068848, 0.5053113698959351], [0.5570841431617737, 0.5027405023574829], [0.5575686097145081, 0.5016745328903198], [0.5589300990104675, 0.49892330169677734], [0.5599839687347412, 0.4968174993991852], [0.5611398220062256, 0.4943311810493469], [0.5621651411056519, 0.49143511056900024], [0.5632261037826538, 0.48852574825286865], [0.5642138719558716, 0.4853237271308899], [0.5653396248817444, 0.48196572065353394], [0.5663519501686096, 0.4783269166946411], [0.5675201416015625, 0.47444355487823486], [0.5686575174331665, 0.47032833099365234], [0.5700919032096863, 0.4659680724143982], [0.5714952945709229, 0.4616689682006836], [0.5730513334274292, 0.45733433961868286], [0.5745102167129517, 0.4531295895576477], [0.5760029554367065, 0.44891276955604553], [0.5773701667785645, 0.4448227882385254], [0.5965638160705566, 0.8393737077713013], [0.5965726375579834, 0.836101233959198], [0.5967351198196411, 0.8335525393486023], [0.5967060923576355, 0.8308663368225098], [0.5965639352798462, 0.8286013603210449], [0.5963219404220581, 0.8257195949554443], [0.5960268378257751, 0.8233140707015991], [0.5955709218978882, 0.8203201293945312], [0.595134437084198, 0.8177955150604248], [0.594544529914856, 0.8147546648979187], [0.594011127948761, 0.8121682405471802], [0.5932921171188354, 0.8090215921401978], [0.592638373374939, 0.8062714338302612], [0.5917485356330872, 0.8029274940490723], [0.5909126996994019, 0.7999781370162964], [0.5898215174674988, 0.7964580059051514], [0.5889577269554138, 0.7932687401771545], [0.5879741311073303, 0.7897530198097229], [0.587611198425293, 0.7870509028434753], [0.587309718132019, 0.7832520008087158], [0.5873615741729736, 0.7808785438537598], [0.5874627828598022, 0.7792224884033203], [0.5870051383972168, 0.7761490345001221], [0.5865200161933899, 0.7734625935554504], [0.5860999226570129, 0.7710115909576416], [0.5855944156646729, 0.7693467140197754], [0.5850892066955566, 0.767722487449646], [0.5844829082489014, 0.7660444378852844], [0.5840847492218018, 0.7647162675857544], [0.5841284990310669, 0.7636212110519409], [0.5840622782707214, 0.7627438902854919], [0.5838714838027954, 0.760695219039917], [0.5837622880935669, 0.7593808770179749], [0.582973837852478, 0.7540828585624695], [0.5825160145759583, 0.7500734329223633], [0.5818821787834167, 0.7467466592788696], [0.5813950300216675, 0.7436739206314087], [0.5807806253433228, 0.7405474185943604], [0.5802618861198425, 0.7376234531402588], [0.5796498656272888, 0.7346212863922119], [0.5791639089584351, 0.7318233251571655], [0.5785849094390869, 0.7288523316383362], [0.5780838131904602, 0.7260669469833374], [0.5774936676025391, 0.7231656312942505], [0.5770010948181152, 0.7204599380493164], [0.5764164924621582, 0.7176037430763245], [0.5759154558181763, 0.7149279117584229], [0.5753260254859924, 0.7121015787124634], [0.5748194456100464, 0.7094689607620239], [0.5742210745811462, 0.7066650390625], [0.5736992359161377, 0.7040392160415649], [0.5730705261230469, 0.7012189626693726], [0.5725167989730835, 0.6985810995101929], [0.5718502998352051, 0.6957567930221558], [0.5712722539901733, 0.6931139230728149], [0.5705907344818115, 0.6903127431869507], [0.5699885487556458, 0.6876674294471741], [0.5692757368087769, 0.684874951839447], [0.5686354041099548, 0.6821986436843872], [0.5679144263267517, 0.6794867515563965], [0.5673521161079407, 0.6771951913833618], [0.5667547583580017, 0.6745196580886841], [0.5661779642105103, 0.6717774868011475], [0.5655468702316284, 0.6685808300971985], [0.5648623704910278, 0.664753794670105], [0.5643083453178406, 0.6620087027549744], [0.5636918544769287, 0.6597403287887573], [0.5626445412635803, 0.6565076112747192], [0.5620369911193848, 0.6532418727874756], [0.5614652037620544, 0.6497962474822998], [0.5608872175216675, 0.6464493274688721], [0.560297966003418, 0.6433172225952148], [0.5598716735839844, 0.6404154300689697], [0.5595003962516785, 0.6374020576477051], [0.5593133568763733, 0.6340650320053101], [0.5590983629226685, 0.6303062438964844], [0.5588473081588745, 0.6272183656692505], [0.5585273504257202, 0.6232813596725464], [0.5584388375282288, 0.6193077564239502], [0.5583382248878479, 0.6157993078231812], [0.5582499504089355, 0.6122791767120361], [0.5580577254295349, 0.608528196811676], [0.5578743815422058, 0.6048626899719238], [0.5576068162918091, 0.6010688543319702], [0.557411789894104, 0.597365140914917], [0.557171642780304, 0.5936053991317749], [0.5570703744888306, 0.5899750590324402], [0.556933581829071, 0.586501955986023], [0.5569000244140625, 0.5832071304321289], [0.5568047165870667, 0.5801471471786499], [0.5566482543945312, 0.5770199298858643], [0.5564693212509155, 0.5741477012634277], [0.5562343597412109, 0.5713530778884888], [0.556067705154419, 0.5682702660560608], [0.5560917854309082, 0.5645854473114014], [0.5559356212615967, 0.5604279637336731], [0.555921733379364, 0.5563600063323975], [0.5558741092681885, 0.5522563457489014], [0.5556726455688477, 0.5476872324943542], [0.5554749965667725, 0.5435246229171753], [0.5553785562515259, 0.5388296246528625], [0.5554865598678589, 0.534633994102478], [0.5558843612670898, 0.5297573208808899], [0.5566878318786621, 0.5253081321716309], [0.5574522018432617, 0.5213173627853394], [0.5584468245506287, 0.5181939601898193], [0.5592341423034668, 0.5156770944595337], [0.5598379969596863, 0.513717770576477], [0.5605727434158325, 0.5115771293640137], [0.5610955953598022, 0.5097925662994385], [0.5618306398391724, 0.507663369178772], [0.5625290274620056, 0.5059453248977661], [0.5632265210151672, 0.5040178298950195], [0.5639251470565796, 0.5018194913864136], [0.5648993253707886, 0.4992417097091675], [0.5657767057418823, 0.49647194147109985], [0.5668896436691284, 0.4933972954750061], [0.5678050518035889, 0.4903773069381714], [0.5688810348510742, 0.48704493045806885], [0.5698072910308838, 0.4836503863334656], [0.570960521697998, 0.4798869490623474], [0.5719982385635376, 0.4760795831680298], [0.5732654333114624, 0.4719218611717224], [0.574468195438385, 0.46778929233551025], [0.5759161114692688, 0.46336761116981506], [0.5772484540939331, 0.4592317044734955], [0.5787877440452576, 0.4548562169075012], [0.5801719427108765, 0.4507846236228943], [0.5816644430160522, 0.44647207856178284], [0.6009956002235413, 0.8389341235160828], [0.6009496450424194, 0.8364449739456177], [0.6008784174919128, 0.8334357738494873], [0.6008890271186829, 0.8309450745582581], [0.6007193326950073, 0.8279184103012085], [0.6004189252853394, 0.825371265411377], [0.5999571084976196, 0.8223246335983276], [0.5994967222213745, 0.8197177052497864], [0.5988922119140625, 0.8166619539260864], [0.5983456969261169, 0.8140228986740112], [0.597658634185791, 0.8109476566314697], [0.5970233082771301, 0.8081689476966858], [0.5962320566177368, 0.804956316947937], [0.5954977869987488, 0.8019458055496216], [0.5945354700088501, 0.7984404563903809], [0.593662679195404, 0.7953051924705505], [0.5926416516304016, 0.7918260097503662], [0.5923025608062744, 0.7892436981201172], [0.5919508934020996, 0.7862297296524048], [0.5914303064346313, 0.7828482389450073], [0.5909371972084045, 0.7804423570632935], [0.5903270840644836, 0.778638482093811], [0.5899658203125, 0.775759220123291], [0.5898321866989136, 0.7729716300964355], [0.5894522666931152, 0.7702354788780212], [0.5892548561096191, 0.768372654914856], [0.5889236927032471, 0.7664748430252075], [0.5886986255645752, 0.7648582458496094], [0.5884851217269897, 0.7635054588317871], [0.588205873966217, 0.7628685235977173], [0.5876482725143433, 0.7614235877990723], [0.5870950818061829, 0.7600060701370239], [0.5865155458450317, 0.7584599256515503], [0.5856892466545105, 0.7538779973983765], [0.5851020216941833, 0.7494821548461914], [0.5847347378730774, 0.7463229894638062], [0.5841797590255737, 0.7429805994033813], [0.5837427377700806, 0.7400292754173279], [0.5831615924835205, 0.7368514537811279], [0.582716703414917, 0.7340660691261292], [0.5821725130081177, 0.7310613989830017], [0.5817283391952515, 0.7282777428627014], [0.581174373626709, 0.7252568602561951], [0.5807303786277771, 0.7225511074066162], [0.5801748037338257, 0.7196190357208252], [0.5797140598297119, 0.7169477939605713], [0.5791492462158203, 0.714056670665741], [0.5786836743354797, 0.7114167809486389], [0.5781123638153076, 0.7085585594177246], [0.5776191353797913, 0.7059288620948792], [0.5770158767700195, 0.7030884027481079], [0.5764921307563782, 0.7004325985908508], [0.5758591890335083, 0.6975816488265991], [0.5753134489059448, 0.6949161291122437], [0.5746557116508484, 0.6920714378356934], [0.5740838050842285, 0.6894190311431885], [0.5734022855758667, 0.6865844130516052], [0.5728198289871216, 0.683931291103363], [0.5721712112426758, 0.6811098456382751], [0.5716475248336792, 0.6784982085227966], [0.570952832698822, 0.675889253616333], [0.5700986385345459, 0.6734102368354797], [0.5689154863357544, 0.6705942153930664], [0.5678360462188721, 0.667902410030365], [0.5667647123336792, 0.6642270088195801], [0.5663512349128723, 0.6616625785827637], [0.5658352375030518, 0.6588937640190125], [0.5652483701705933, 0.6558778285980225], [0.5647854804992676, 0.6527789831161499], [0.564416766166687, 0.649306058883667], [0.5639362335205078, 0.6458007097244263], [0.5636072158813477, 0.6428080201148987], [0.5631409883499146, 0.6397267580032349], [0.5626616477966309, 0.6369228363037109], [0.5622787475585938, 0.6338592767715454], [0.5621523857116699, 0.6301721334457397], [0.5620822310447693, 0.626956582069397], [0.5620331168174744, 0.6231558918952942], [0.5619486570358276, 0.6192479133605957], [0.5619264245033264, 0.6157030463218689], [0.5617762804031372, 0.6120889782905579], [0.5616586804389954, 0.608351469039917], [0.5614623427391052, 0.6045945882797241], [0.5613327026367188, 0.6008618474006653], [0.5611435770988464, 0.5971052646636963], [0.5610586404800415, 0.5934569239616394], [0.5609399080276489, 0.5898394584655762], [0.5609216094017029, 0.5864191055297852], [0.5608274936676025, 0.583107590675354], [0.5607382655143738, 0.5799550414085388], [0.5605956315994263, 0.5767573118209839], [0.5604732036590576, 0.5738921165466309], [0.5602612495422363, 0.5708544254302979], [0.5599135160446167, 0.5679280757904053], [0.559559166431427, 0.5644896030426025], [0.5596936941146851, 0.5605087280273438], [0.5599304437637329, 0.5565569400787354], [0.5600349307060242, 0.5521095395088196], [0.560070812702179, 0.5476535558700562], [0.5602065920829773, 0.5434674024581909], [0.5604743957519531, 0.5391577482223511], [0.5607153177261353, 0.5348116159439087], [0.5611435174942017, 0.5308037400245667], [0.5618728399276733, 0.5264930725097656], [0.5627572536468506, 0.5230072736740112], [0.5634816884994507, 0.5200640559196472], [0.5642390847206116, 0.5178014636039734], [0.564982533454895, 0.5157390832901001], [0.565630316734314, 0.5137739181518555], [0.5663686394691467, 0.5118004083633423], [0.5669397115707397, 0.5103201866149902], [0.5677281022071838, 0.5081652402877808], [0.5683755874633789, 0.5062683820724487], [0.5692286491394043, 0.5039687752723694], [0.5699848532676697, 0.5013085603713989], [0.5709273219108582, 0.49846139550209045], [0.5717906355857849, 0.4953399896621704], [0.572782039642334, 0.49214065074920654], [0.573660135269165, 0.48877477645874023], [0.5746819376945496, 0.48523569107055664], [0.5756134390830994, 0.48158329725265503], [0.5767669677734375, 0.47764158248901367], [0.5778452157974243, 0.4736601710319519], [0.5791420936584473, 0.4694369435310364], [0.5803951025009155, 0.4652317762374878], [0.5817949771881104, 0.46095988154411316], [0.583134651184082, 0.45680367946624756], [0.5846095085144043, 0.4525136649608612], [0.5859774351119995, 0.44837242364883423], [0.6053293943405151, 0.8390508890151978], [0.6052087545394897, 0.8360286951065063], [0.6052011847496033, 0.8337097764015198], [0.6050413250923157, 0.8304550647735596], [0.6047483682632446, 0.8276064991950989], [0.6042970418930054, 0.8244329690933228], [0.6038181185722351, 0.82170569896698], [0.6031885147094727, 0.8185912370681763], [0.6026148796081543, 0.8159114122390747], [0.6018948554992676, 0.81281977891922], [0.6012579202651978, 0.8101285696029663], [0.6004657745361328, 0.8069379329681396], [0.599785566329956, 0.804099440574646], [0.5989558100700378, 0.8007071614265442], [0.5983057022094727, 0.7975599765777588], [0.5975236892700195, 0.7938820719718933], [0.5968936085700989, 0.7907931804656982], [0.5959222316741943, 0.7880169749259949], [0.5951069593429565, 0.785571813583374], [0.5937833189964294, 0.7818710207939148], [0.5936425924301147, 0.7805055379867554], [0.5938721895217896, 0.7774431705474854], [0.5934485793113708, 0.7754645943641663], [0.5929786562919617, 0.7724556922912598], [0.5928217768669128, 0.7699520587921143], [0.5925377607345581, 0.7677130699157715], [0.5922836661338806, 0.7658621668815613], [0.591951847076416, 0.7641342878341675], [0.5915818214416504, 0.7626489400863647], [0.5911819934844971, 0.7611197233200073], [0.590764045715332, 0.7601368427276611], [0.5903390645980835, 0.758660078048706], [0.5900529026985168, 0.7573574781417847], [0.5888038873672485, 0.7530595064163208], [0.588163435459137, 0.7491600513458252], [0.587627649307251, 0.7456711530685425], [0.5872269868850708, 0.7425611019134521], [0.5867116451263428, 0.7393210530281067], [0.5862984657287598, 0.7363556623458862], [0.5857943296432495, 0.7333240509033203], [0.5853797197341919, 0.730533242225647], [0.5848627090454102, 0.7275227904319763], [0.584446132183075, 0.7247104644775391], [0.5839385986328125, 0.7217657566070557], [0.5835126042366028, 0.7190208435058594], [0.5829895734786987, 0.7161203622817993], [0.5825580954551697, 0.713426947593689], [0.5820261836051941, 0.7105549573898315], [0.5815650224685669, 0.7078707218170166], [0.5810036659240723, 0.7050122618675232], [0.5805164575576782, 0.7023383378982544], [0.5799223780632019, 0.6994696855545044], [0.5794147849082947, 0.6967832446098328], [0.5787968039512634, 0.6939084529876709], [0.5782612562179565, 0.6912102699279785], [0.5776227712631226, 0.6883595585823059], [0.57707679271698, 0.6856787204742432], [0.5764291286468506, 0.6828420162200928], [0.5758708715438843, 0.6802154779434204], [0.5751985311508179, 0.6774352788925171], [0.5745337009429932, 0.6747151613235474], [0.5736798048019409, 0.6717897057533264], [0.5729193687438965, 0.6690112352371216], [0.5720989108085632, 0.6660581827163696], [0.5714774131774902, 0.6632639169692993], [0.5709014534950256, 0.6605249047279358], [0.5703792572021484, 0.6578845977783203], [0.5698968172073364, 0.6548891067504883], [0.5694558620452881, 0.6520282030105591], [0.5688928365707397, 0.6485582590103149], [0.5685398578643799, 0.6451963186264038], [0.56809002161026, 0.6420196890830994], [0.5677153468132019, 0.6389869451522827], [0.5673613548278809, 0.6360179781913757], [0.5670077800750732, 0.6332885026931763], [0.566664457321167, 0.6299610137939453], [0.5664549469947815, 0.6266945600509644], [0.5662041902542114, 0.6229971051216125], [0.5661454796791077, 0.6192116737365723], [0.5659879446029663, 0.6154994964599609], [0.5658671855926514, 0.6119115352630615], [0.5656623840332031, 0.6081116199493408], [0.5655415654182434, 0.6044114828109741], [0.5653623342514038, 0.6006399393081665], [0.5652800798416138, 0.596960186958313], [0.5651479959487915, 0.5933060646057129], [0.5651148557662964, 0.5897723436355591], [0.5650191307067871, 0.5863236784934998], [0.5649728178977966, 0.5829836130142212], [0.5648453235626221, 0.5797423124313354], [0.5647633671760559, 0.5766066908836365], [0.5646188259124756, 0.5735857486724854], [0.5644242167472839, 0.5704821944236755], [0.5641893148422241, 0.5673047304153442], [0.5639712810516357, 0.564190149307251], [0.5638895034790039, 0.5608444213867188], [0.5639784336090088, 0.5566307902336121], [0.5640208125114441, 0.5522037744522095], [0.5643688440322876, 0.5480287671089172], [0.5649027824401855, 0.5440158843994141], [0.5653855800628662, 0.5396157503128052], [0.5660116672515869, 0.5357264876365662], [0.5667821168899536, 0.5319762229919434], [0.5676940083503723, 0.5282365679740906], [0.5685285925865173, 0.5247691869735718], [0.5693069100379944, 0.522331714630127], [0.5699957609176636, 0.5200268030166626], [0.5705723166465759, 0.5181069374084473], [0.5712860226631165, 0.5159029960632324], [0.571784496307373, 0.5142369270324707], [0.5723307132720947, 0.512249231338501], [0.5728511810302734, 0.5103216171264648], [0.5735081434249878, 0.508231520652771], [0.5741041898727417, 0.505732536315918], [0.5749026536941528, 0.5029990077018738], [0.5755898356437683, 0.5001027584075928], [0.5764998197555542, 0.49692681431770325], [0.577292799949646, 0.49368762969970703], [0.5782535076141357, 0.4902241826057434], [0.5791037678718567, 0.4867278039455414], [0.5801282525062561, 0.4829845726490021], [0.5810753107070923, 0.4791674017906189], [0.5822394490242004, 0.47509896755218506], [0.5833452939987183, 0.47105932235717773], [0.5846790075302124, 0.46677231788635254], [0.5859317779541016, 0.46270179748535156], [0.5873595476150513, 0.45841261744499207], [0.5886903405189514, 0.45431268215179443], [0.5901861786842346, 0.45001164078712463], [0.6096560955047607, 0.8384695649147034], [0.6095362305641174, 0.836121678352356], [0.6093882918357849, 0.8333359956741333], [0.6091415882110596, 0.830176591873169], [0.6086715459823608, 0.8267227411270142], [0.6081923842430115, 0.8238332271575928], [0.6075368523597717, 0.8205612897872925], [0.6069441437721252, 0.8178129196166992], [0.6061974763870239, 0.8146688938140869], [0.605536162853241, 0.8119488954544067], [0.6047285199165344, 0.8088428974151611], [0.6040158271789551, 0.8060523271560669], [0.6031977534294128, 0.8028718829154968], [0.6025267839431763, 0.7999148368835449], [0.6018500328063965, 0.7965710759162903], [0.6014066934585571, 0.7933000326156616], [0.6007227301597595, 0.7895466685295105], [0.5998994708061218, 0.786347508430481], [0.5985974073410034, 0.7835949659347534], [0.5982475280761719, 0.7814533710479736], [0.5981418490409851, 0.7799699306488037], [0.5974016785621643, 0.7770951986312866], [0.5970249772071838, 0.7742821574211121], [0.5965881943702698, 0.7720670104026794], [0.5961280465126038, 0.7693691253662109], [0.5958744287490845, 0.7672811150550842], [0.5955333113670349, 0.7651110887527466], [0.595231294631958, 0.7634047865867615], [0.5948595404624939, 0.7615655660629272], [0.5945173501968384, 0.7601557970046997], [0.5941129922866821, 0.7584127187728882], [0.5937495231628418, 0.7575129270553589], [0.5930203795433044, 0.7559213638305664], [0.5920066237449646, 0.7523288726806641], [0.5911737680435181, 0.748451828956604], [0.5907341837882996, 0.7452824711799622], [0.5902435779571533, 0.7419238090515137], [0.5898855924606323, 0.738906741142273], [0.5894085168838501, 0.7356699109077454], [0.5890354514122009, 0.7328481674194336], [0.5885502696037292, 0.7298036217689514], [0.5881603360176086, 0.7270168662071228], [0.58766770362854, 0.7239810228347778], [0.5872852206230164, 0.7212392687797546], [0.5867977142333984, 0.7182563543319702], [0.586406409740448, 0.7155500650405884], [0.5859073400497437, 0.7126287221908569], [0.5854882597923279, 0.7099301218986511], [0.584958553314209, 0.707014799118042], [0.5845144391059875, 0.7043343782424927], [0.583958089351654, 0.7014281749725342], [0.583483874797821, 0.698733925819397], [0.5829014778137207, 0.6958386898040771], [0.5824053287506104, 0.693122148513794], [0.5818012952804565, 0.6902197599411011], [0.5812960863113403, 0.6875284910202026], [0.5806883573532104, 0.6846602559089661], [0.5801690816879272, 0.6819628477096558], [0.579533576965332, 0.6791209578514099], [0.5789425373077393, 0.6764308214187622], [0.5782299041748047, 0.6734986305236816], [0.5776585340499878, 0.670684814453125], [0.5770474672317505, 0.6677314639091492], [0.576561450958252, 0.6649616956710815], [0.5760501623153687, 0.6621328592300415], [0.5755906701087952, 0.659477710723877], [0.5750704407691956, 0.6567164063453674], [0.5745299458503723, 0.6539945006370544], [0.5739150643348694, 0.6509466171264648], [0.5734257698059082, 0.6478668451309204], [0.5729243755340576, 0.6445040702819824], [0.5726067423820496, 0.6413812637329102], [0.5722912549972534, 0.638360321521759], [0.5720192790031433, 0.6354366540908813], [0.5717076063156128, 0.6325169801712036], [0.5713573694229126, 0.6294697523117065], [0.5709446668624878, 0.6261371374130249], [0.5706438422203064, 0.622698187828064], [0.5703166723251343, 0.6189709901809692], [0.5701828598976135, 0.6153297424316406], [0.5699707865715027, 0.6116252541542053], [0.5698249936103821, 0.6079226732254028], [0.5696370601654053, 0.6041847467422485], [0.5695573687553406, 0.6005104780197144], [0.5694370269775391, 0.5968138575553894], [0.5694125890731812, 0.593226969242096], [0.5693347454071045, 0.5896929502487183], [0.5693303346633911, 0.5862460136413574], [0.5692567825317383, 0.582876443862915], [0.5692371129989624, 0.5796051025390625], [0.5691261887550354, 0.5764217376708984], [0.5689734816551208, 0.5732535719871521], [0.5687432289123535, 0.570098340511322], [0.5685499310493469, 0.5669646263122559], [0.5682852864265442, 0.5637266039848328], [0.5679271817207336, 0.5605913400650024], [0.5677123665809631, 0.556713342666626], [0.56817626953125, 0.5527510046958923], [0.5690010786056519, 0.5492154359817505], [0.5697250962257385, 0.5444694757461548], [0.5701814889907837, 0.5404903888702393], [0.5708162188529968, 0.5368112921714783], [0.5716288089752197, 0.5336309671401978], [0.5728276968002319, 0.5302020311355591], [0.5740985870361328, 0.5268649458885193], [0.5748291015625, 0.5245447158813477], [0.5756189227104187, 0.521918773651123], [0.5761059522628784, 0.5199781060218811], [0.5766398906707764, 0.5179600715637207], [0.5771905183792114, 0.5160460472106934], [0.5777275562286377, 0.5140314102172852], [0.5782473087310791, 0.51209557056427], [0.5786988735198975, 0.5097435116767883], [0.5792933702468872, 0.5072506666183472], [0.5798263549804688, 0.5044811964035034], [0.5805433988571167, 0.501517653465271], [0.5811831951141357, 0.49842292070388794], [0.5820392370223999, 0.49508053064346313], [0.5827990770339966, 0.49168992042541504], [0.5837292075157166, 0.48806387186050415], [0.5845593214035034, 0.48443880677223206], [0.5855982899665833, 0.48052701354026794], [0.5865722298622131, 0.4766579270362854], [0.5877757668495178, 0.47252851724624634], [0.5889354348182678, 0.46849334239959717], [0.590282678604126, 0.4642917215824127], [0.5915619134902954, 0.46021848917007446], [0.5930046439170837, 0.45596814155578613], [0.5943551659584045, 0.4518930912017822], [0.6136959791183472, 0.8383669853210449], [0.6134192943572998, 0.8355162739753723], [0.6132135391235352, 0.8332507610321045], [0.6128294467926025, 0.829447865486145], [0.6123737096786499, 0.8261951208114624], [0.6117339134216309, 0.8227988481521606], [0.6111087203025818, 0.8198068737983704], [0.6103430986404419, 0.8166261911392212], [0.6096588969230652, 0.8138242959976196], [0.6088284254074097, 0.8106951713562012], [0.6080807447433472, 0.8079303503036499], [0.6071885228157043, 0.8048096895217896], [0.6064152717590332, 0.8020156025886536], [0.6055279970169067, 0.7988505363464355], [0.6047831773757935, 0.7959511280059814], [0.6039404273033142, 0.7926331758499146], [0.6030279397964478, 0.7889394164085388], [0.6021653413772583, 0.785527229309082], [0.6019459962844849, 0.7826423645019531], [0.6015572547912598, 0.7808526754379272], [0.6010103225708008, 0.7791367173194885], [0.6005216240882874, 0.7763515710830688], [0.6001198291778564, 0.7738869786262512], [0.5997694730758667, 0.7711963653564453], [0.5994325876235962, 0.7689616680145264], [0.5990487933158875, 0.7665755748748779], [0.5987143516540527, 0.7645554542541504], [0.5983102321624756, 0.7625238299369812], [0.5979148149490356, 0.7608131170272827], [0.5973935723304749, 0.7589936256408691], [0.596854031085968, 0.7575167417526245], [0.5964879393577576, 0.7561534643173218], [0.5961629152297974, 0.754806399345398], [0.5951252579689026, 0.7512457966804504], [0.5944908857345581, 0.74793541431427], [0.5939228534698486, 0.7445781826972961], [0.5935391187667847, 0.7415441274642944], [0.5930600166320801, 0.738247275352478], [0.592681348323822, 0.735247015953064], [0.5922272205352783, 0.7321681976318359], [0.591873824596405, 0.7293471097946167], [0.5914247632026672, 0.7263147234916687], [0.5910704731941223, 0.723504900932312], [0.5906206369400024, 0.7205217480659485], [0.5902603268623352, 0.7177505493164062], [0.5898003578186035, 0.7147988677024841], [0.5894191265106201, 0.7120649814605713], [0.5889293551445007, 0.7091264724731445], [0.5885259509086609, 0.7064014673233032], [0.5880159139633179, 0.7034785747528076], [0.5875771045684814, 0.7007461786270142], [0.5870363712310791, 0.6978316307067871], [0.5865792036056519, 0.6951048970222473], [0.5860176086425781, 0.6921850442886353], [0.5855491161346436, 0.6894452571868896], [0.5849829316139221, 0.6865575909614563], [0.5845022201538086, 0.6838420629501343], [0.5839194059371948, 0.680956244468689], [0.5834195017814636, 0.6782231330871582], [0.5828266143798828, 0.6753140687942505], [0.5823243856430054, 0.6725420355796814], [0.58173668384552, 0.669622004032135], [0.5812576413154602, 0.6668586730957031], [0.5807272791862488, 0.6639902591705322], [0.5802728533744812, 0.661300539970398], [0.5797612071037292, 0.6584901809692383], [0.579298734664917, 0.65583336353302], [0.5787678956985474, 0.6529088020324707], [0.5783084630966187, 0.6500988006591797], [0.5777809619903564, 0.6469665765762329], [0.5773752331733704, 0.6438838243484497], [0.5769737362861633, 0.6407625675201416], [0.5766655802726746, 0.6378490328788757], [0.5763173699378967, 0.6348415613174438], [0.5760006904602051, 0.6319654583930969], [0.575617790222168, 0.6287915110588074], [0.5753132700920105, 0.625645637512207], [0.5749412775039673, 0.6221610307693481], [0.5747076272964478, 0.6186796426773071], [0.5743986964225769, 0.6149904727935791], [0.5741982460021973, 0.6113767623901367], [0.5739712119102478, 0.6076605319976807], [0.5738657712936401, 0.6040365099906921], [0.5737232565879822, 0.6003538370132446], [0.5736919045448303, 0.5967546701431274], [0.5736159086227417, 0.5931469202041626], [0.5736079216003418, 0.5896432399749756], [0.5735362768173218, 0.5861876606941223], [0.573527455329895, 0.5828047394752502], [0.5734441876411438, 0.5795193910598755], [0.5733996033668518, 0.5762495994567871], [0.5732896327972412, 0.5729967951774597], [0.5732017159461975, 0.5698082447052002], [0.5730297565460205, 0.5666269063949585], [0.5728561282157898, 0.5633136630058289], [0.5726758241653442, 0.5600405931472778], [0.5726832151412964, 0.5569598078727722], [0.5730819702148438, 0.5540565252304077], [0.5738322734832764, 0.5505883693695068], [0.5746289491653442, 0.5448470115661621], [0.5749878883361816, 0.5414087176322937], [0.5757323503494263, 0.5384300351142883], [0.5766367316246033, 0.5353434681892395], [0.5774943232536316, 0.5321462154388428], [0.5785568952560425, 0.528830885887146], [0.5794370174407959, 0.5261709690093994], [0.5801573991775513, 0.5234686732292175], [0.5807403326034546, 0.5215600728988647], [0.581329882144928, 0.5196361541748047], [0.5818871259689331, 0.517619252204895], [0.5824506282806396, 0.5156152248382568], [0.5829492807388306, 0.5133302211761475], [0.5835011005401611, 0.5110020637512207], [0.5839599370956421, 0.5084093809127808], [0.5845494270324707, 0.5056570172309875], [0.5850759744644165, 0.5027377605438232], [0.5857893228530884, 0.4996493458747864], [0.5864434242248535, 0.49637091159820557], [0.5872704982757568, 0.4929179549217224], [0.5880333185195923, 0.4893747866153717], [0.5889546275138855, 0.4856683313846588], [0.589821994304657, 0.4819300174713135], [0.590884804725647, 0.4779716730117798], [0.5919005274772644, 0.4740716516971588], [0.5931445360183716, 0.46995270252227783], [0.5943312644958496, 0.4659613370895386], [0.5957095623016357, 0.4617767632007599], [0.5970216393470764, 0.4577370285987854], [0.5984933972358704, 0.4535188376903534], [0.6176861524581909, 0.8376069664955139], [0.6174445152282715, 0.8352822065353394], [0.6171548366546631, 0.8325351476669312], [0.6166212558746338, 0.8289666175842285], [0.6159205436706543, 0.8252201080322266], [0.6152957081794739, 0.8220804929733276], [0.6144863367080688, 0.8186442852020264], [0.6138035655021667, 0.8157773613929749], [0.6129526495933533, 0.81257164478302], [0.6121855974197388, 0.8097514510154724], [0.6112549901008606, 0.8066128492355347], [0.6104273796081543, 0.8038297295570374], [0.6094337701797485, 0.8007200956344604], [0.6085798740386963, 0.7979483604431152], [0.6075407862663269, 0.7947872877120972], [0.6066010594367981, 0.7918688654899597], [0.6055071353912354, 0.7880347967147827], [0.6050227880477905, 0.7850933074951172], [0.6046159267425537, 0.7823221683502197], [0.6045182347297668, 0.7802517414093018], [0.6043248176574707, 0.778163492679596], [0.6038520336151123, 0.7757911682128906], [0.6034241914749146, 0.7731332778930664], [0.6030338406562805, 0.7707569003105164], [0.6026126146316528, 0.7681680917739868], [0.6022447347640991, 0.7660549879074097], [0.6017783880233765, 0.7636913657188416], [0.6013834476470947, 0.7618346214294434], [0.6008630990982056, 0.7597386837005615], [0.6004399061203003, 0.7580775618553162], [0.5999559164047241, 0.7561217546463013], [0.5995832681655884, 0.7551168203353882], [0.5990197062492371, 0.7536149024963379], [0.598343551158905, 0.7505509853363037], [0.5977094173431396, 0.7471431493759155], [0.5972961187362671, 0.744133710861206], [0.5968034863471985, 0.7408671379089355], [0.5964038968086243, 0.7378052473068237], [0.5959067344665527, 0.7345693111419678], [0.5955764055252075, 0.7317754030227661], [0.5951738357543945, 0.7287001609802246], [0.5948591232299805, 0.7258885502815247], [0.5944345593452454, 0.7228319644927979], [0.594110906124115, 0.7200687527656555], [0.5936817526817322, 0.7170544266700745], [0.5933374166488647, 0.714296817779541], [0.5928824543952942, 0.7113181352615356], [0.5925121307373047, 0.7085698843002319], [0.5920357704162598, 0.7056182622909546], [0.5916354656219482, 0.7028625011444092], [0.5911226868629456, 0.699901282787323], [0.5906984210014343, 0.6971693634986877], [0.590173065662384, 0.6942310929298401], [0.5897411108016968, 0.6914877891540527], [0.589207649230957, 0.6885522603988647], [0.5887722373008728, 0.685825765132904], [0.5882380604743958, 0.6829158067703247], [0.5877940654754639, 0.6801820993423462], [0.5872602462768555, 0.6772684454917908], [0.5868196487426758, 0.6745178699493408], [0.5862962007522583, 0.6715869903564453], [0.5858790278434753, 0.6688247919082642], [0.5853924751281738, 0.6659274101257324], [0.5849841237068176, 0.6631984114646912], [0.5845066905021667, 0.660345196723938], [0.5840845704078674, 0.6576628684997559], [0.5835812091827393, 0.6548154354095459], [0.583116888999939, 0.6520670652389526], [0.5825697779655457, 0.6491085290908813], [0.5821151733398438, 0.6462323665618896], [0.5816610455513, 0.6431479454040527], [0.5813567638397217, 0.6402541399002075], [0.5810494422912598, 0.6372445821762085], [0.5807873010635376, 0.6343262195587158], [0.580437183380127, 0.6312606334686279], [0.5801036953926086, 0.6282298564910889], [0.5796804428100586, 0.6249809265136719], [0.5793299674987793, 0.621679425239563], [0.5789216756820679, 0.6181964874267578], [0.5786744356155396, 0.6146706342697144], [0.5784045457839966, 0.6110272407531738], [0.5782470703125, 0.6074597835540771], [0.5780704021453857, 0.603837251663208], [0.5780109763145447, 0.6002603769302368], [0.5779213905334473, 0.5966759920120239], [0.5779299736022949, 0.593122124671936], [0.5778810381889343, 0.5895863175392151], [0.5778999328613281, 0.5861595869064331], [0.5778499245643616, 0.5827450752258301], [0.5778561234474182, 0.5794479846954346], [0.5778177976608276, 0.5761876702308655], [0.5778415203094482, 0.572907030582428], [0.5777987837791443, 0.569673478603363], [0.5777745246887207, 0.566405177116394], [0.5776917934417725, 0.5630863308906555], [0.5776918530464172, 0.5599296689033508], [0.577601432800293, 0.5569100379943848], [0.5772883296012878, 0.5542051792144775], [0.5769609212875366, 0.5509149432182312], [0.5777923464775085, 0.5456725358963013], [0.5790004730224609, 0.5431102514266968], [0.5802256464958191, 0.5401533842086792], [0.5812150835990906, 0.5369963645935059], [0.5822353959083557, 0.5339004397392273], [0.5832100510597229, 0.5306536555290222], [0.5840356349945068, 0.5279567241668701], [0.5849723219871521, 0.5251332521438599], [0.5854811668395996, 0.5233478546142578], [0.5860592126846313, 0.5211760401725769], [0.5865815877914429, 0.5191845893859863], [0.5870747566223145, 0.5168868899345398], [0.5875993371009827, 0.5145970582962036], [0.5880489349365234, 0.5120885372161865], [0.5885859727859497, 0.5094659328460693], [0.5890423059463501, 0.5067317485809326], [0.5896375775337219, 0.5037798881530762], [0.5901528596878052, 0.5008252859115601], [0.5908967852592468, 0.497464120388031], [0.591548502445221, 0.49416738748550415], [0.5924206972122192, 0.4905405044555664], [0.5932044386863708, 0.48698967695236206], [0.5941787958145142, 0.48317015171051025], [0.5950913429260254, 0.4794206917285919], [0.5962083339691162, 0.47544240951538086], [0.5972753763198853, 0.4715713858604431], [0.5985601544380188, 0.46746259927749634], [0.5997787714004517, 0.46353042125701904], [0.6012083292007446, 0.4593525528907776], [0.6025490760803223, 0.4553719758987427], [0.6213759183883667, 0.8372606039047241], [0.6209279894828796, 0.8344777822494507], [0.6205325126647949, 0.8321661949157715], [0.6199100017547607, 0.8281041383743286], [0.6192914247512817, 0.8245960474014282], [0.6185095310211182, 0.8210256695747375], [0.6178180575370789, 0.8178734183311462], [0.6169857382774353, 0.8145915269851685], [0.6162132024765015, 0.8116625547409058], [0.615276575088501, 0.8084689974784851], [0.6144244074821472, 0.8056095838546753], [0.6133999228477478, 0.802487313747406], [0.6124749183654785, 0.7996686697006226], [0.6114346385002136, 0.7966504096984863], [0.6105142831802368, 0.793769359588623], [0.6096222400665283, 0.7906537055969238], [0.6088925004005432, 0.7872961759567261], [0.6082090735435486, 0.7842689752578735], [0.6080605387687683, 0.7822661995887756], [0.6079167127609253, 0.7798632383346558], [0.6075599193572998, 0.777550458908081], [0.6071106791496277, 0.7749300003051758], [0.6066856980323792, 0.7726117968559265], [0.6062284111976624, 0.7699520587921143], [0.6058335304260254, 0.7676529288291931], [0.6053938865661621, 0.7651957273483276], [0.604997992515564, 0.763055682182312], [0.6045321226119995, 0.7608270049095154], [0.6040874123573303, 0.7588809728622437], [0.603531002998352, 0.7568790912628174], [0.6029893159866333, 0.7551848888397217], [0.6027269959449768, 0.7538942098617554], [0.6025153994560242, 0.7526528239250183], [0.601758599281311, 0.7495497465133667], [0.6012489795684814, 0.7466107606887817], [0.6007029414176941, 0.743371307849884], [0.6003206372261047, 0.7404359579086304], [0.5998278260231018, 0.737065315246582], [0.5994288921356201, 0.7340837121009827], [0.598974883556366, 0.7311170101165771], [0.5986596941947937, 0.7283275127410889], [0.5982552170753479, 0.7252421379089355], [0.5979599952697754, 0.7224419116973877], [0.5975714325904846, 0.7194128036499023], [0.59725421667099, 0.7166074514389038], [0.5968281030654907, 0.7135963439941406], [0.596479058265686, 0.7108092904090881], [0.5960233211517334, 0.7078306674957275], [0.5956597328186035, 0.7050735950469971], [0.5951865911483765, 0.7020789384841919], [0.5947959423065186, 0.6992951035499573], [0.5943096876144409, 0.6963498592376709], [0.5939162969589233, 0.6935991644859314], [0.5934239029884338, 0.6906473636627197], [0.593019962310791, 0.6878849864006042], [0.5925207138061523, 0.6849640607833862], [0.5921139717102051, 0.6822230815887451], [0.5916131734848022, 0.6793041229248047], [0.5912019610404968, 0.6765596270561218], [0.590703010559082, 0.6736444234848022], [0.5903013944625854, 0.6708908081054688], [0.5898202657699585, 0.6679842472076416], [0.5894278287887573, 0.6652442812919617], [0.588962972164154, 0.662360429763794], [0.5885684490203857, 0.6596396565437317], [0.5880993604660034, 0.6567822694778442], [0.5876867771148682, 0.6540595889091492], [0.5872008204460144, 0.6511424779891968], [0.5867903828620911, 0.6483426094055176], [0.5863194465637207, 0.6453689336776733], [0.5859538316726685, 0.6425625085830688], [0.5855757594108582, 0.639625072479248], [0.5852668881416321, 0.6367787718772888], [0.5848898887634277, 0.6337138414382935], [0.5845756530761719, 0.630753219127655], [0.5841916799545288, 0.6275783777236938], [0.583884596824646, 0.6244668364524841], [0.5835074186325073, 0.6210817098617554], [0.5832396745681763, 0.6177600622177124], [0.5828989744186401, 0.614237904548645], [0.5826963782310486, 0.61076420545578], [0.5824876427650452, 0.6071913242340088], [0.5823848247528076, 0.6036930680274963], [0.5822503566741943, 0.6001246571540833], [0.5822249054908752, 0.5966383218765259], [0.5821661949157715, 0.5930911302566528], [0.5822062492370605, 0.5896017551422119], [0.5821943283081055, 0.5861600041389465], [0.5822598934173584, 0.5827639102935791], [0.5822672843933105, 0.5794549584388733], [0.5823200345039368, 0.576200008392334], [0.5823315978050232, 0.572958767414093], [0.5824034214019775, 0.5696624517440796], [0.5824085474014282, 0.5663901567459106], [0.5824885368347168, 0.5630911588668823], [0.5825204849243164, 0.5599921345710754], [0.5825151205062866, 0.5566498041152954], [0.5825063586235046, 0.553728461265564], [0.5825676918029785, 0.550882875919342], [0.5831307172775269, 0.5482833385467529], [0.5839972496032715, 0.5452796220779419], [0.5848396420478821, 0.5418754816055298], [0.5857715010643005, 0.5386556386947632], [0.5865815877914429, 0.5354043245315552], [0.5874230861663818, 0.5320266485214233], [0.5881024599075317, 0.5295100808143616], [0.5889847278594971, 0.5268746614456177], [0.5897021889686584, 0.5247640609741211], [0.5903170704841614, 0.5226452350616455], [0.590872049331665, 0.5203808546066284], [0.5914294123649597, 0.5181351900100708], [0.5919320583343506, 0.5156572461128235], [0.5924794673919678, 0.5131554007530212], [0.5929586291313171, 0.5104764103889465], [0.5935152769088745, 0.507708728313446], [0.5940090417861938, 0.5048328638076782], [0.5946273803710938, 0.5018261671066284], [0.5951956510543823, 0.4986001253128052], [0.5959240198135376, 0.4952208399772644], [0.5966001749038696, 0.4917770028114319], [0.5974736213684082, 0.48813164234161377], [0.5982983112335205, 0.48451417684555054], [0.5993262529373169, 0.48069727420806885], [0.6003104448318481, 0.47694242000579834], [0.6014919877052307, 0.47299498319625854], [0.6026328802108765, 0.46913063526153564], [0.603961706161499, 0.4650840163230896], [0.6052334308624268, 0.4611489772796631], [0.606694757938385, 0.45703303813934326], [0.6249090433120728, 0.8363593816757202], [0.6245394349098206, 0.8339875936508179], [0.6240916848182678, 0.8312009572982788], [0.6233487725257874, 0.827531099319458], [0.6225050091743469, 0.8235902786254883], [0.621826171875, 0.8203209638595581], [0.6209756135940552, 0.8167670965194702], [0.6202444434165955, 0.8137472867965698], [0.6193186640739441, 0.8104032278060913], [0.6184922456741333, 0.8074892163276672], [0.6174650192260742, 0.8042405247688293], [0.6165061593055725, 0.8013612031936646], [0.615300178527832, 0.7982215881347656], [0.6143434047698975, 0.7955973148345947], [0.6132633686065674, 0.7925096154212952], [0.6125620603561401, 0.7899566292762756], [0.6118404865264893, 0.7864649295806885], [0.61170893907547, 0.7840477228164673], [0.6115646362304688, 0.7818865776062012], [0.6111999154090881, 0.7793874740600586], [0.6107408404350281, 0.7767360806465149], [0.6103185415267944, 0.7743697166442871], [0.6098556518554688, 0.771763801574707], [0.609434962272644, 0.7694212794303894], [0.6089696884155273, 0.7668237686157227], [0.608579695224762, 0.7646369338035583], [0.6081076264381409, 0.7621675133705139], [0.6077242493629456, 0.7601655721664429], [0.6072400808334351, 0.7578438520431519], [0.6068743467330933, 0.7560166120529175], [0.6064692735671997, 0.7539077997207642], [0.6062454581260681, 0.7531021237373352], [0.605786144733429, 0.75153648853302], [0.6052798628807068, 0.748868465423584], [0.6046726703643799, 0.7457348108291626], [0.6042340993881226, 0.7428736686706543], [0.6037381887435913, 0.739716649055481], [0.6033844947814941, 0.736664891242981], [0.6029294729232788, 0.7333210706710815], [0.6025800704956055, 0.7306432127952576], [0.6021045446395874, 0.7276383638381958], [0.6018041372299194, 0.7248983383178711], [0.6014575362205505, 0.7218449115753174], [0.601189136505127, 0.7190337181091309], [0.6007920503616333, 0.7159345149993896], [0.6004665493965149, 0.7131338119506836], [0.6000258922576904, 0.7100887894630432], [0.5996761322021484, 0.7073177099227905], [0.5992237329483032, 0.7043373584747314], [0.5988595485687256, 0.7015359401702881], [0.5984017848968506, 0.6985359787940979], [0.5980390310287476, 0.695775032043457], [0.5975788831710815, 0.6928240656852722], [0.5972048044204712, 0.6900449395179749], [0.5967381000518799, 0.6870948076248169], [0.5963646173477173, 0.6843335032463074], [0.5958976149559021, 0.6814171671867371], [0.5955163836479187, 0.6786516904830933], [0.5950461626052856, 0.6757400035858154], [0.594663143157959, 0.672980546951294], [0.594202995300293, 0.6700881123542786], [0.5938318967819214, 0.667329728603363], [0.5933948159217834, 0.6644580364227295], [0.5930337905883789, 0.6617029309272766], [0.5926019549369812, 0.6588339805603027], [0.5922300219535828, 0.6560866832733154], [0.591775119304657, 0.6532031893730164], [0.5913791060447693, 0.6504273414611816], [0.5909334421157837, 0.6475305557250977], [0.5905971527099609, 0.6447409391403198], [0.5902163982391357, 0.6418287754058838], [0.5898778438568115, 0.6390405893325806], [0.5894681215286255, 0.6360816955566406], [0.5891234874725342, 0.6331601142883301], [0.5887311100959778, 0.6301062107086182], [0.5884076356887817, 0.6270571947097778], [0.5880325436592102, 0.623882532119751], [0.5877485275268555, 0.6206386685371399], [0.5874072909355164, 0.6172707080841064], [0.587189793586731, 0.6138901710510254], [0.5869320034980774, 0.6104234457015991], [0.5867787599563599, 0.6069907546043396], [0.586609423160553, 0.6034929752349854], [0.5865620374679565, 0.6000503301620483], [0.5864821672439575, 0.5965578556060791], [0.586502194404602, 0.5931121110916138], [0.5864953994750977, 0.5896362066268921], [0.5865889191627502, 0.586249828338623], [0.5866393446922302, 0.5828632116317749], [0.5867640972137451, 0.5795682668685913], [0.5868276357650757, 0.5762885808944702], [0.5869419574737549, 0.5730834007263184], [0.5870229601860046, 0.5698071718215942], [0.5872209072113037, 0.5666095614433289], [0.5874096155166626, 0.5633680820465088], [0.5876428484916687, 0.5602285861968994], [0.5879566669464111, 0.5570508241653442], [0.5883464217185974, 0.5542688965797424], [0.5887529253959656, 0.5516524314880371], [0.5888471603393555, 0.5493580102920532], [0.5889924764633179, 0.5463781952857971], [0.58952796459198, 0.5432429909706116], [0.5901815891265869, 0.5400354862213135], [0.5910424590110779, 0.536838173866272], [0.5919560194015503, 0.5334652066230774], [0.5926083922386169, 0.5310748219490051], [0.5932914018630981, 0.5286152362823486], [0.593948245048523, 0.526268720626831], [0.5945222973823547, 0.5239086747169495], [0.5950716733932495, 0.5216053128242493], [0.5955613255500793, 0.5192453265190125], [0.5961320400238037, 0.5167420506477356], [0.5966300964355469, 0.514196515083313], [0.5972265601158142, 0.5114890336990356], [0.5977399349212646, 0.5087094902992249], [0.5983337759971619, 0.505790114402771], [0.5988357067108154, 0.502837061882019], [0.5995093584060669, 0.49958354234695435], [0.6001124978065491, 0.49633270502090454], [0.600895881652832, 0.49284911155700684], [0.6016281843185425, 0.48940181732177734], [0.602561891078949, 0.48572254180908203], [0.6034443378448486, 0.4821377992630005], [0.6045418977737427, 0.47830313444137573], [0.6055819988250732, 0.474595844745636], [0.6068401336669922, 0.4706418514251709], [0.6080325841903687, 0.4668276309967041], [0.60942542552948, 0.46278250217437744], [0.6107361912727356, 0.4588991701602936], [0.628302812576294, 0.8358815908432007], [0.6277538537979126, 0.8330575227737427], [0.6272416114807129, 0.830659806728363], [0.6264581680297852, 0.8265631198883057], [0.6257372498512268, 0.8229498267173767], [0.6248956918716431, 0.8192825317382812], [0.6241560578346252, 0.8160185813903809], [0.6232748627662659, 0.8125993609428406], [0.6224740147590637, 0.8095157146453857], [0.6215079426765442, 0.806206226348877], [0.6206142902374268, 0.8032025098800659], [0.6195076704025269, 0.7998871803283691], [0.6184157729148865, 0.7968875169754028], [0.6173133254051208, 0.7941014766693115], [0.616564929485321, 0.7915472984313965], [0.6158341765403748, 0.7887580394744873], [0.6153594851493835, 0.7860755920410156], [0.6151136755943298, 0.783757746219635], [0.6148196458816528, 0.7813973426818848], [0.6143587827682495, 0.7786484956741333], [0.6139609813690186, 0.7761682271957397], [0.6134852170944214, 0.7735353708267212], [0.6130577921867371, 0.7711882591247559], [0.6125761270523071, 0.7685834765434265], [0.6121741533279419, 0.7662820219993591], [0.6117290258407593, 0.763791561126709], [0.6113491058349609, 0.7615870237350464], [0.610917866230011, 0.7592620849609375], [0.6105494499206543, 0.7572023272514343], [0.6100898385047913, 0.7550203800201416], [0.6096237897872925, 0.7531664371490479], [0.6094333529472351, 0.7520840167999268], [0.6092946529388428, 0.7508975267410278], [0.6087210774421692, 0.7478904724121094], [0.6082596182823181, 0.7451522350311279], [0.6077218055725098, 0.7420466542243958], [0.6073142290115356, 0.739234209060669], [0.6068256497383118, 0.7360051870346069], [0.6065553426742554, 0.7330049276351929], [0.6062116026878357, 0.7298887968063354], [0.6058911085128784, 0.7271430492401123], [0.6054621934890747, 0.7242286801338196], [0.6051679849624634, 0.7215096950531006], [0.6047616004943848, 0.7183732986450195], [0.604422926902771, 0.7155048847198486], [0.6039820909500122, 0.7124329805374146], [0.6036275625228882, 0.7096085548400879], [0.6031727194786072, 0.7065983414649963], [0.6028053760528564, 0.7038215398788452], [0.6023393273353577, 0.7008225321769714], [0.6019843220710754, 0.6980350613594055], [0.6015393733978271, 0.6950616836547852], [0.6011757850646973, 0.6922869682312012], [0.6007174253463745, 0.6893267631530762], [0.6003590822219849, 0.686553955078125], [0.5999054908752441, 0.6836100816726685], [0.5995416641235352, 0.680852472782135], [0.5990868210792542, 0.6779255867004395], [0.5987278819084167, 0.6751662492752075], [0.598281741142273, 0.6722524166107178], [0.5979294776916504, 0.6695168614387512], [0.5974975228309631, 0.6666215658187866], [0.5971486568450928, 0.6638891696929932], [0.5967248678207397, 0.6610070466995239], [0.5963848233222961, 0.6582682132720947], [0.5959726572036743, 0.6553746461868286], [0.5956282615661621, 0.6526006460189819], [0.5952279567718506, 0.6497064828872681], [0.5949169397354126, 0.6469666957855225], [0.5945330262184143, 0.6440783739089966], [0.5942146182060242, 0.641295313835144], [0.5938360095024109, 0.6383579969406128], [0.5935079455375671, 0.6355165839195251], [0.5931177139282227, 0.632509708404541], [0.5928102135658264, 0.6295962929725647], [0.59244304895401, 0.6264616250991821], [0.5921515822410583, 0.6234071850776672], [0.5917973518371582, 0.6201379299163818], [0.5915709137916565, 0.6169075965881348], [0.5912761688232422, 0.6134943962097168], [0.5910946726799011, 0.6101642847061157], [0.5908997058868408, 0.6067392826080322], [0.5908211469650269, 0.6033735275268555], [0.5907062888145447, 0.5999315977096558], [0.5906925201416016, 0.5965365767478943], [0.5906526446342468, 0.5931093692779541], [0.5907301902770996, 0.5897436738014221], [0.5907740592956543, 0.5863553285598755], [0.5908961296081543, 0.5830204486846924], [0.5909727215766907, 0.5797344446182251], [0.5911330580711365, 0.5764772891998291], [0.5912461280822754, 0.5732743144035339], [0.5914456844329834, 0.5700807571411133], [0.5916062593460083, 0.5669524669647217], [0.5918569564819336, 0.5637601017951965], [0.5921227931976318, 0.5607292652130127], [0.5924685001373291, 0.557725191116333], [0.5928035378456116, 0.5550597906112671], [0.5931122899055481, 0.5523017644882202], [0.5934644341468811, 0.5496575832366943], [0.5937833189964294, 0.5470755100250244], [0.5941494703292847, 0.5442891716957092], [0.5947452783584595, 0.5412250757217407], [0.5953167676925659, 0.5380910634994507], [0.5961205959320068, 0.5349223017692566], [0.5968130826950073, 0.5324726700782776], [0.5974715352058411, 0.5300197601318359], [0.5980782508850098, 0.527547299861908], [0.5986818671226501, 0.5251317024230957], [0.5992152094841003, 0.5227325558662415], [0.5997588634490967, 0.520331859588623], [0.6002464294433594, 0.5178045630455017], [0.6008058786392212, 0.5152308940887451], [0.6013050079345703, 0.5124952793121338], [0.6018882393836975, 0.5096697807312012], [0.6024112701416016, 0.5067669153213501], [0.6030246019363403, 0.5037461519241333], [0.6035774946212769, 0.5006214380264282], [0.6042943000793457, 0.49732834100723267], [0.604966938495636, 0.49402445554733276], [0.6058171987533569, 0.49053052067756653], [0.6066293120384216, 0.4870665967464447], [0.6076226830482483, 0.4834059476852417], [0.6085729002952576, 0.4798045754432678], [0.6097266674041748, 0.47602379322052], [0.61083984375, 0.47229892015457153], [0.612147867679596, 0.4683809280395508], [0.6133989691734314, 0.4645651578903198], [0.6148381233215332, 0.46055418252944946], [0.63154536485672, 0.8349639177322388], [0.6311036348342896, 0.832472026348114], [0.6305593848228455, 0.8295959830284119], [0.629723846912384, 0.8259246945381165], [0.6288068890571594, 0.8219318389892578], [0.628079891204834, 0.8185856342315674], [0.6271898150444031, 0.81493079662323], [0.6264230012893677, 0.8117859959602356], [0.6254811882972717, 0.8083274364471436], [0.6246438026428223, 0.8052650690078735], [0.6236122846603394, 0.8018835783004761], [0.6226104497909546, 0.798757791519165], [0.6212749481201172, 0.7952890396118164], [0.620492696762085, 0.7929872274398804], [0.6195237040519714, 0.7902431488037109], [0.6189688444137573, 0.7881088256835938], [0.618497371673584, 0.7853833436965942], [0.6182781457901001, 0.7835677862167358], [0.6179227828979492, 0.7806448936462402], [0.6175159215927124, 0.7781322002410889], [0.6170204877853394, 0.7753894329071045], [0.6166175007820129, 0.7729894518852234], [0.6161401867866516, 0.7703533172607422], [0.6157431602478027, 0.7680476307868958], [0.6152878403663635, 0.7654694318771362], [0.6149309277534485, 0.7632793188095093], [0.6145164966583252, 0.7607811093330383], [0.6142120957374573, 0.7587423324584961], [0.6138488054275513, 0.7563791275024414], [0.6135647296905518, 0.7543807029724121], [0.6131924986839294, 0.7520325183868408], [0.6130733489990234, 0.7514806985855103], [0.6127015352249146, 0.7499598860740662], [0.6122477054595947, 0.7472931146621704], [0.611717164516449, 0.7442792654037476], [0.6113176345825195, 0.7415407299995422], [0.6108345985412598, 0.7384346127510071], [0.6104946732521057, 0.7356090545654297], [0.6101337671279907, 0.7324919104576111], [0.6099154949188232, 0.7295749187469482], [0.6095647215843201, 0.7264281511306763], [0.6092740297317505, 0.7237669229507446], [0.6088330149650574, 0.7208024263381958], [0.6084564328193665, 0.7179051637649536], [0.6079831123352051, 0.7147819995880127], [0.6076074838638306, 0.7119385004043579], [0.6071351170539856, 0.7088847756385803], [0.6067683696746826, 0.7060906291007996], [0.6062995791435242, 0.7030831575393677], [0.605930745601654, 0.7003076076507568], [0.6054790616035461, 0.6973425149917603], [0.6051299571990967, 0.694557249546051], [0.604683518409729, 0.6915757656097412], [0.6043316721916199, 0.6888062357902527], [0.6038882732391357, 0.6858554482460022], [0.6035371422767639, 0.6830770969390869], [0.6030951738357544, 0.6801443099975586], [0.6027438640594482, 0.6773823499679565], [0.6023095846176147, 0.6744744777679443], [0.6019749641418457, 0.6717193722724915], [0.601563036441803, 0.6688359975814819], [0.6012307405471802, 0.6660823822021484], [0.600821852684021, 0.6632086038589478], [0.6004987955093384, 0.6604758501052856], [0.6001102924346924, 0.657609224319458], [0.5998001098632812, 0.6548520922660828], [0.5994393825531006, 0.651962399482727], [0.5991674065589905, 0.6492152214050293], [0.5988303422927856, 0.6463501453399658], [0.5985464453697205, 0.6435515880584717], [0.5981804132461548, 0.6406499147415161], [0.5978693962097168, 0.637826144695282], [0.5974926948547363, 0.6348618865013123], [0.5971971750259399, 0.6320087909698486], [0.5968407988548279, 0.6289913058280945], [0.5965481996536255, 0.6259925365447998], [0.5961911678314209, 0.6228391528129578], [0.5959270000457764, 0.6197347640991211], [0.595611572265625, 0.6164792776107788], [0.5954259037971497, 0.6132007241249084], [0.5952084064483643, 0.6098589897155762], [0.5950964689254761, 0.6065779328346252], [0.594953715801239, 0.6032043695449829], [0.5949122905731201, 0.599863588809967], [0.5948350429534912, 0.596473753452301], [0.5948767066001892, 0.5931720733642578], [0.5948947072029114, 0.5898301601409912], [0.5950180292129517, 0.5865233540534973], [0.5951055288314819, 0.5831824541091919], [0.5952775478363037, 0.5799681544303894], [0.595406174659729, 0.5767152309417725], [0.5956188440322876, 0.5735774040222168], [0.5957905054092407, 0.5704058408737183], [0.5960513353347778, 0.5673614740371704], [0.5963295698165894, 0.5642787218093872], [0.5966867804527283, 0.5613741278648376], [0.5970749855041504, 0.5584875345230103], [0.5974563956260681, 0.5558167099952698], [0.5978572368621826, 0.5530210733413696], [0.5981961488723755, 0.55048006772995], [0.5985358953475952, 0.5476837158203125], [0.5988295674324036, 0.5450891256332397], [0.5991570949554443, 0.5421240329742432], [0.5997177958488464, 0.539211630821228], [0.600356936454773, 0.5363175272941589], [0.6009729504585266, 0.5337649583816528], [0.6015505194664001, 0.5312447547912598], [0.6021566390991211, 0.5287683010101318], [0.6027183532714844, 0.5262871980667114], [0.6032893061637878, 0.5238358378410339], [0.6037864089012146, 0.5213512182235718], [0.6043268442153931, 0.5187855958938599], [0.6047885417938232, 0.5162129402160645], [0.6053639054298401, 0.513427734375], [0.6058661937713623, 0.5106532573699951], [0.6064823269844055, 0.5076954364776611], [0.6070258617401123, 0.5047317743301392], [0.6077014207839966, 0.5015780925750732], [0.6083194017410278, 0.49845027923583984], [0.6091156005859375, 0.49511075019836426], [0.6098649501800537, 0.4918157160282135], [0.610798180103302, 0.48828476667404175], [0.6116666793823242, 0.4848196506500244], [0.6127258539199829, 0.481167197227478], [0.6137377023696899, 0.4776144027709961], [0.614971399307251, 0.4738121032714844], [0.6161466836929321, 0.4701225161552429], [0.6175327301025391, 0.46622434258461], [0.6188516616821289, 0.46242547035217285], [0.634810209274292, 0.834423840045929], [0.6341856718063354, 0.8315032720565796], [0.6336036324501038, 0.8289793133735657], [0.632735013961792, 0.824905514717102], [0.6319513916969299, 0.8212710618972778], [0.6310679912567139, 0.8175469636917114], [0.6302930116653442, 0.8141940236091614], [0.6293932199478149, 0.810670018196106], [0.6285954117774963, 0.8074911832809448], [0.6276445388793945, 0.8040469884872437], [0.6267606616020203, 0.8008946776390076], [0.6256790161132812, 0.7973591089248657], [0.6247224807739258, 0.7940216064453125], [0.6238367557525635, 0.7914241552352905], [0.6231905221939087, 0.7891031503677368], [0.6224563121795654, 0.7867501974105835], [0.6220579743385315, 0.7849953770637512], [0.6218245029449463, 0.7829397916793823], [0.6213139295578003, 0.7800130844116211], [0.620721161365509, 0.7772523164749146], [0.6202647686004639, 0.7748111486434937], [0.619741678237915, 0.7721525430679321], [0.6193267107009888, 0.7698030471801758], [0.6188583970069885, 0.7672335505485535], [0.6184793710708618, 0.7649558186531067], [0.6180635690689087, 0.7625070214271545], [0.6177470684051514, 0.7603363990783691], [0.6174007654190063, 0.7580161094665527], [0.6171280145645142, 0.7559667229652405], [0.6168025135993958, 0.7536637187004089], [0.6165435910224915, 0.7516018152236938], [0.6163955330848694, 0.7504866123199463], [0.616214394569397, 0.749298095703125], [0.6156523823738098, 0.7464137077331543], [0.6152541041374207, 0.7437690496444702], [0.6147788763046265, 0.7407543659210205], [0.6144201755523682, 0.7379890084266663], [0.6139974594116211, 0.7349361181259155], [0.6136934161186218, 0.7321822047233582], [0.61330246925354, 0.729045033454895], [0.6130667924880981, 0.7261438369750977], [0.6127089858055115, 0.7230933904647827], [0.612377405166626, 0.7203256487846375], [0.6119123697280884, 0.7171931266784668], [0.6115390062332153, 0.7142887115478516], [0.6110692024230957, 0.7112127542495728], [0.6106968522071838, 0.7083779573440552], [0.6102259159088135, 0.7053673267364502], [0.609855592250824, 0.70256507396698], [0.6093922853469849, 0.6995905041694641], [0.6090351939201355, 0.6968421936035156], [0.608582079410553, 0.6938692331314087], [0.6082295775413513, 0.6910791397094727], [0.607788622379303, 0.6881231069564819], [0.6074455976486206, 0.685355544090271], [0.6070132851600647, 0.6824055910110474], [0.6066782474517822, 0.6796387434005737], [0.606255292892456, 0.6767114400863647], [0.6059286594390869, 0.6739693880081177], [0.6055150032043457, 0.6710705161094666], [0.6051924824714661, 0.6683355569839478], [0.6047869920730591, 0.6654396653175354], [0.604472279548645, 0.662710428237915], [0.6040798425674438, 0.6598466634750366], [0.6037754416465759, 0.6571347713470459], [0.6034026145935059, 0.6542549133300781], [0.603114664554596, 0.6515294313430786], [0.6027663946151733, 0.6486697196960449], [0.6024824380874634, 0.6459109783172607], [0.6021163463592529, 0.6429920196533203], [0.6018298268318176, 0.6401968002319336], [0.6014723777770996, 0.6372551918029785], [0.601193368434906, 0.6344223618507385], [0.600848913192749, 0.6314613819122314], [0.600566029548645, 0.6285605430603027], [0.6002215147018433, 0.6254783868789673], [0.5999760627746582, 0.6224602460861206], [0.599677324295044, 0.6192904710769653], [0.5994809865951538, 0.6161751747131348], [0.5992270708084106, 0.6128803491592407], [0.5990909934043884, 0.6096707582473755], [0.59893399477005, 0.6063815355300903], [0.5988807678222656, 0.603118360042572], [0.5987948179244995, 0.5997718572616577], [0.5988240242004395, 0.5964963436126709], [0.5988205671310425, 0.5932092666625977], [0.5989183187484741, 0.5899636149406433], [0.5989822745323181, 0.5866718292236328], [0.5991557836532593, 0.5834212303161621], [0.5992944240570068, 0.5802134275436401], [0.599524736404419, 0.5770248174667358], [0.5997133255004883, 0.5739181637763977], [0.600004255771637, 0.5708138942718506], [0.6002466678619385, 0.5678157806396484], [0.6005676984786987, 0.5648453235626221], [0.6008857488632202, 0.56203293800354], [0.6012616753578186, 0.5591968297958374], [0.6016103625297546, 0.5565049648284912], [0.6019980907440186, 0.5537793040275574], [0.6023706197738647, 0.551121711730957], [0.6027626395225525, 0.5484075546264648], [0.6031482815742493, 0.5457104444503784], [0.6035952568054199, 0.5429666638374329], [0.6040295362472534, 0.5402642488479614], [0.6045979261398315, 0.5374956130981445], [0.6051408648490906, 0.5349175930023193], [0.6057004928588867, 0.5323804616928101], [0.6062095165252686, 0.5298924446105957], [0.6067690849304199, 0.527395486831665], [0.6072768568992615, 0.5248718857765198], [0.6078137159347534, 0.5223333239555359], [0.6082946062088013, 0.5197575092315674], [0.6088343262672424, 0.5171212553977966], [0.6093150973320007, 0.5143988132476807], [0.6098999977111816, 0.5115768909454346], [0.610431432723999, 0.5086978077888489], [0.6110811233520508, 0.505675733089447], [0.6116722822189331, 0.5026512742042542], [0.612395703792572, 0.4994674324989319], [0.6130675673484802, 0.4963204860687256], [0.613928496837616, 0.49298542737960815], [0.614751398563385, 0.48961585760116577], [0.6157336831092834, 0.48607972264289856], [0.6166570782661438, 0.4826517105102539], [0.6177804470062256, 0.4790291488170624], [0.6188604235649109, 0.4754410982131958], [0.6201439499855042, 0.47167426347732544], [0.6213734149932861, 0.46798327565193176], [0.6228194236755371, 0.464105486869812], [0.6379181742668152, 0.8334560990333557], [0.6373981833457947, 0.8308624625205994], [0.6367691159248352, 0.8278906345367432], [0.6358991265296936, 0.824234127998352], [0.6349488496780396, 0.8202380537986755], [0.6341760158538818, 0.8168389797210693], [0.6332606077194214, 0.8131289482116699], [0.6325002908706665, 0.809906005859375], [0.6315925717353821, 0.8063621520996094], [0.6307923197746277, 0.8031864166259766], [0.6298102140426636, 0.7996410727500916], [0.6289162039756775, 0.7963780760765076], [0.6278026700019836, 0.7927220463752747], [0.62724369764328, 0.7904199361801147], [0.6265641450881958, 0.7878519296646118], [0.62620609998703, 0.7858725786209106], [0.6258535385131836, 0.7839683890342712], [0.6253734827041626, 0.7822191715240479], [0.6244978904724121, 0.7789387702941895], [0.6239676475524902, 0.7765860557556152], [0.6233880519866943, 0.7739076614379883], [0.6229438185691833, 0.7715590000152588], [0.6224396228790283, 0.7689653038978577], [0.6220574378967285, 0.7667074203491211], [0.6216185688972473, 0.7641685009002686], [0.6213087439537048, 0.7620550394058228], [0.6209476590156555, 0.7596292495727539], [0.6206919550895691, 0.7576208710670471], [0.6203838586807251, 0.7552984356880188], [0.6201475858688354, 0.7532843947410583], [0.6198602914810181, 0.7509658336639404], [0.6197495460510254, 0.7500704526901245], [0.6194893717765808, 0.7484483122825623], [0.6191685199737549, 0.7459303140640259], [0.6187375783920288, 0.7429821491241455], [0.6183820366859436, 0.7402834892272949], [0.6179404854774475, 0.737261176109314], [0.6176143884658813, 0.7345334887504578], [0.6171994209289551, 0.7315106987953186], [0.6168793439865112, 0.7286819219589233], [0.6164902448654175, 0.7256207466125488], [0.6162234544754028, 0.7227240800857544], [0.6158313155174255, 0.7196441888809204], [0.6154766082763672, 0.716714084148407], [0.6150012612342834, 0.7135694622993469], [0.614631175994873, 0.7107155323028564], [0.614158034324646, 0.707650899887085], [0.6137861609458923, 0.7048488855361938], [0.6133149266242981, 0.7018454074859619], [0.6129541397094727, 0.6990774273872375], [0.6124945878982544, 0.6961299777030945], [0.6121233701705933, 0.6933525204658508], [0.6116675138473511, 0.6904011964797974], [0.6113314628601074, 0.6876389980316162], [0.6109085083007812, 0.6846957206726074], [0.6105763912200928, 0.6819188594818115], [0.6101622581481934, 0.6789968013763428], [0.6098484992980957, 0.6762315034866333], [0.609447717666626, 0.673333466053009], [0.6091338396072388, 0.6705814599990845], [0.6087364554405212, 0.6677056550979614], [0.6084302663803101, 0.6649596691131592], [0.60804682970047, 0.6620925664901733], [0.6077476739883423, 0.6593672037124634], [0.6073741316795349, 0.6565322875976562], [0.6070775389671326, 0.6537935137748718], [0.6067160367965698, 0.6509560346603394], [0.6064244508743286, 0.6482215523719788], [0.6060559153556824, 0.6453413367271423], [0.6057754755020142, 0.6425576210021973], [0.6054332852363586, 0.6396548748016357], [0.605170726776123, 0.6368325352668762], [0.6048389673233032, 0.6338969469070435], [0.6045706272125244, 0.6310329437255859], [0.604230523109436, 0.6280393600463867], [0.6039807200431824, 0.6250994205474854], [0.6036837100982666, 0.6220290660858154], [0.6034815907478333, 0.6189804077148438], [0.603227972984314, 0.6158262491226196], [0.6030895709991455, 0.6126627326011658], [0.6029149293899536, 0.6094367504119873], [0.6028329133987427, 0.6062648296356201], [0.6027277112007141, 0.6030091643333435], [0.6027477383613586, 0.5997862815856934], [0.6027384400367737, 0.5965148210525513], [0.6028267741203308, 0.5933157205581665], [0.6028736233711243, 0.5900801420211792], [0.6030247211456299, 0.5868798494338989], [0.6031492948532104, 0.5836726427078247], [0.6033797264099121, 0.5805370807647705], [0.603581964969635, 0.5773848295211792], [0.6038764119148254, 0.5743468403816223], [0.604139506816864, 0.5713073015213013], [0.604499101638794, 0.5683614611625671], [0.6048296093940735, 0.5654633045196533], [0.6051944494247437, 0.5626959204673767], [0.6055389642715454, 0.5598821043968201], [0.6059098839759827, 0.5572322607040405], [0.606273889541626, 0.5545076131820679], [0.6066555976867676, 0.551889181137085], [0.6070416569709778, 0.5491411089897156], [0.6074419021606445, 0.5465301871299744], [0.607843279838562, 0.5437649488449097], [0.6082807779312134, 0.5411769151687622], [0.6087318062782288, 0.5385109186172485], [0.6092384457588196, 0.5359653234481812], [0.6097317337989807, 0.5333926677703857], [0.6102427840232849, 0.5308997631072998], [0.6107065677642822, 0.5283908247947693], [0.6112301349639893, 0.52583909034729], [0.6117078065872192, 0.523286759853363], [0.6122424602508545, 0.5206859111785889], [0.6127213835716248, 0.5180544853210449], [0.6132925748825073, 0.5153151750564575], [0.6138019561767578, 0.5125621557235718], [0.6144283413887024, 0.5096393823623657], [0.614995539188385, 0.5067245960235596], [0.6156995296478271, 0.5036709308624268], [0.616352379322052, 0.5006062984466553], [0.617127537727356, 0.4974174499511719], [0.6178467273712158, 0.49427053332328796], [0.6187898516654968, 0.4908704161643982], [0.6196916103363037, 0.4875035881996155], [0.6207311153411865, 0.4840146005153656], [0.6217080950737, 0.48058459162712097], [0.6228959560394287, 0.4769405126571655], [0.6240326166152954, 0.47338446974754333], [0.6253862977027893, 0.4696224629878998], [0.6266764402389526, 0.46597525477409363], [0.6411339640617371, 0.8328353762626648], [0.6404280662536621, 0.8298289179801941], [0.6397807598114014, 0.8272141814231873], [0.6388381719589233, 0.8231909275054932], [0.6380032300949097, 0.8195613622665405], [0.6370958685874939, 0.8158055543899536], [0.6363238096237183, 0.8124210238456726], [0.6354512572288513, 0.8088482618331909], [0.634697437286377, 0.8056075572967529], [0.6338119506835938, 0.8020595908164978], [0.6330214738845825, 0.7987908124923706], [0.6320991516113281, 0.7951580286026001], [0.6313333511352539, 0.791771650314331], [0.6306341290473938, 0.7891971468925476], [0.6300947666168213, 0.7869384288787842], [0.6295185089111328, 0.7848726511001587], [0.6290769577026367, 0.783177375793457], [0.628595769405365, 0.7810366153717041], [0.6278491020202637, 0.7780406475067139], [0.6271646022796631, 0.7755054831504822], [0.6266273856163025, 0.7732052803039551], [0.6260446310043335, 0.7706484794616699], [0.6256154775619507, 0.7683930397033691], [0.6251413822174072, 0.7659045457839966], [0.6248014569282532, 0.7637110948562622], [0.6244314312934875, 0.7613441944122314], [0.6241570711135864, 0.7592521905899048], [0.6238566040992737, 0.7569866180419922], [0.6236426830291748, 0.7549890279769897], [0.6234209537506104, 0.7527328729629517], [0.6232870817184448, 0.750735878944397], [0.6231476068496704, 0.7493612766265869], [0.6230149865150452, 0.7480495572090149], [0.6226189136505127, 0.745195746421814], [0.6222764849662781, 0.7425367832183838], [0.621837854385376, 0.7395416498184204], [0.6215006709098816, 0.7368371486663818], [0.6210798621177673, 0.7338434457778931], [0.6207549571990967, 0.7310985326766968], [0.6203390955924988, 0.7280310392379761], [0.6200371980667114, 0.7252564430236816], [0.6196014881134033, 0.7221179008483887], [0.6192716956138611, 0.7192367315292358], [0.6188160181045532, 0.7160337567329407], [0.618443489074707, 0.7130935788154602], [0.617973804473877, 0.7100232839584351], [0.6176066994667053, 0.7071630954742432], [0.6171388030052185, 0.7041443586349487], [0.6167764067649841, 0.7013508081436157], [0.6163182854652405, 0.6983851194381714], [0.6159535646438599, 0.6956186294555664], [0.6154934763908386, 0.6926606893539429], [0.6151533126831055, 0.6899216175079346], [0.6147200465202332, 0.6869888305664062], [0.6143796443939209, 0.6842225790023804], [0.613955020904541, 0.6812882423400879], [0.6136406064033508, 0.6785465478897095], [0.6132382750511169, 0.6756291389465332], [0.6129317879676819, 0.6728805303573608], [0.6125357151031494, 0.6699819564819336], [0.6122369766235352, 0.6672554016113281], [0.6118565201759338, 0.6643794775009155], [0.6115720868110657, 0.6616567373275757], [0.6112062931060791, 0.658793032169342], [0.610919177532196, 0.6560820937156677], [0.6105571389198303, 0.6532349586486816], [0.6102869510650635, 0.6505304574966431], [0.6099390387535095, 0.6476608514785767], [0.6096685528755188, 0.6449116468429565], [0.6093227863311768, 0.6420232057571411], [0.6090582609176636, 0.639248251914978], [0.608727216720581, 0.6363338232040405], [0.6084813475608826, 0.633510410785675], [0.6081651449203491, 0.630536675453186], [0.6079274415969849, 0.6276584267616272], [0.6076329350471497, 0.6246587038040161], [0.607429027557373, 0.6217159032821655], [0.6071714162826538, 0.6186169385910034], [0.6070170402526855, 0.615585207939148], [0.6068118810653687, 0.6124088764190674], [0.6067159175872803, 0.6092953085899353], [0.6065900325775146, 0.6061092615127563], [0.6065770387649536, 0.6029924750328064], [0.6065337657928467, 0.5997724533081055], [0.606593906879425, 0.5966007709503174], [0.6066229343414307, 0.5934063196182251], [0.6067603826522827, 0.5902596116065979], [0.6068671345710754, 0.5870876312255859], [0.6070784330368042, 0.5839682817459106], [0.6072562336921692, 0.5808723568916321], [0.6075378656387329, 0.5777920484542847], [0.6077747941017151, 0.5747925639152527], [0.608106791973114, 0.5718122124671936], [0.6083884835243225, 0.5689160227775574], [0.608748197555542, 0.5660531520843506], [0.609070897102356, 0.5633009672164917], [0.6094607710838318, 0.5605324506759644], [0.609805166721344, 0.5578784942626953], [0.6102002859115601, 0.5552061796188354], [0.610572099685669, 0.5525754690170288], [0.6109974980354309, 0.5498936772346497], [0.6114000678062439, 0.5472674369812012], [0.6118696928024292, 0.5445923209190369], [0.612301230430603, 0.5420125722885132], [0.6127759218215942, 0.5394089818000793], [0.6132110357284546, 0.5369019508361816], [0.6137148141860962, 0.5343478918075562], [0.6141737699508667, 0.5318261384963989], [0.6146646738052368, 0.5292893648147583], [0.6151074171066284, 0.5267753005027771], [0.6156257390975952, 0.5242000818252563], [0.6160945892333984, 0.5216224193572998], [0.6166419386863708, 0.5189636945724487], [0.6171367764472961, 0.5162789821624756], [0.6177269220352173, 0.5134783983230591], [0.6182652711868286, 0.5106549859046936], [0.6189281940460205, 0.5076967477798462], [0.6195296049118042, 0.5047791004180908], [0.6202923655509949, 0.5016796588897705], [0.6209976673126221, 0.4986100196838379], [0.621819019317627, 0.49540406465530396], [0.6225903034210205, 0.49221014976501465], [0.6235842704772949, 0.4887934923171997], [0.624523401260376, 0.4854596257209778], [0.6256227493286133, 0.4819691777229309], [0.6266710758209229, 0.4785325229167938], [0.6279224157333374, 0.47491177916526794], [0.6291227340698242, 0.47136083245277405], [0.6305269598960876, 0.4676249027252197], [0.6442015767097473, 0.8317822217941284], [0.643584132194519, 0.8291041851043701], [0.6428461670875549, 0.8260684013366699], [0.6419166922569275, 0.8224718570709229], [0.6409204006195068, 0.8185182213783264], [0.6401394605636597, 0.8151242136955261], [0.6392543315887451, 0.8114013671875], [0.6385334134101868, 0.8081498146057129], [0.6376892924308777, 0.804566502571106], [0.6369609832763672, 0.8013172745704651], [0.6361083984375, 0.7977092266082764], [0.6353719234466553, 0.7943888306617737], [0.6344780921936035, 0.7907023429870605], [0.6340079307556152, 0.7883786559104919], [0.633421778678894, 0.7858326435089111], [0.6329971551895142, 0.7838985323905945], [0.6325455904006958, 0.78202223777771], [0.6320085525512695, 0.7800752520561218], [0.6310443878173828, 0.7768092155456543], [0.6304448843002319, 0.7745901942253113], [0.6297743916511536, 0.772132396697998], [0.6292902827262878, 0.7699536681175232], [0.6287621259689331, 0.7675027847290039], [0.628404974937439, 0.7653826475143433], [0.6279979944229126, 0.7629591822624207], [0.62772536277771, 0.760936975479126], [0.6274113059043884, 0.7585974931716919], [0.6272081732749939, 0.756674587726593], [0.6269729137420654, 0.7544369101524353], [0.6267945766448975, 0.752500057220459], [0.6265872716903687, 0.7503114938735962], [0.6265588402748108, 0.7492589354515076], [0.6264365911483765, 0.7474460601806641], [0.6261741518974304, 0.7447837591171265], [0.6257643699645996, 0.7418041825294495], [0.6254312992095947, 0.739102303981781], [0.6249998211860657, 0.736113429069519], [0.6246448755264282, 0.7333911657333374], [0.6242053508758545, 0.7304139137268066], [0.6238857507705688, 0.7276389598846436], [0.6234642267227173, 0.7245984077453613], [0.6231076717376709, 0.7216668128967285], [0.6226354241371155, 0.7185713052749634], [0.622256875038147, 0.7155763506889343], [0.6217820644378662, 0.7124082446098328], [0.621421217918396, 0.7095457315444946], [0.6209487318992615, 0.7064726948738098], [0.6205880641937256, 0.7036564350128174], [0.6201257109642029, 0.7006604671478271], [0.6197645664215088, 0.697887659072876], [0.6193084716796875, 0.6949298977851868], [0.6189701557159424, 0.6921815872192383], [0.6185375452041626, 0.6892595291137695], [0.6181893348693848, 0.6864970326423645], [0.6177453994750977, 0.6835706233978271], [0.6174207329750061, 0.6808350086212158], [0.6170188784599304, 0.6779416799545288], [0.6167112588882446, 0.6751848459243774], [0.6163167953491211, 0.672288179397583], [0.6160257458686829, 0.6695501208305359], [0.615655243396759, 0.666680097579956], [0.6153728365898132, 0.6639513373374939], [0.6150127053260803, 0.6611051559448242], [0.6147415637969971, 0.6583706140518188], [0.6143954992294312, 0.6555308103561401], [0.6141372919082642, 0.6528223156929016], [0.6138041019439697, 0.6499987244606018], [0.6135398745536804, 0.6472433805465698], [0.6131991147994995, 0.644383430480957], [0.6129412651062012, 0.6416168212890625], [0.6126136183738708, 0.6387397646903992], [0.612368106842041, 0.635949969291687], [0.6120588183403015, 0.6330422163009644], [0.6118364334106445, 0.6301833987236023], [0.6115509271621704, 0.6272166967391968], [0.6113492250442505, 0.6243399381637573], [0.6110871434211731, 0.6213316321372986], [0.6109204888343811, 0.6183667182922363], [0.6107071042060852, 0.6152900457382202], [0.610598623752594, 0.6122426986694336], [0.6104546189308167, 0.6091113090515137], [0.6104162335395813, 0.6060491800308228], [0.6103410720825195, 0.6029242277145386], [0.6103711724281311, 0.5998201370239258], [0.6103726625442505, 0.5966484546661377], [0.6104856133460999, 0.5935690999031067], [0.6105756163597107, 0.5904419422149658], [0.6107802987098694, 0.5873782634735107], [0.6109538078308105, 0.5842699408531189], [0.6112138628959656, 0.5812559127807617], [0.6114345192909241, 0.5781998634338379], [0.6117448210716248, 0.575272798538208], [0.612019956111908, 0.572324275970459], [0.6123827695846558, 0.5694918036460876], [0.6127098202705383, 0.5666577219963074], [0.6130839586257935, 0.5639376640319824], [0.6134238243103027, 0.56119304895401], [0.6138102412223816, 0.5585637092590332], [0.6141717433929443, 0.5559155941009521], [0.61457359790802, 0.5533052682876587], [0.6149590611457825, 0.5506519079208374], [0.6153888702392578, 0.548060953617096], [0.6158097982406616, 0.5454379320144653], [0.6162806153297424, 0.5428649187088013], [0.6167157292366028, 0.5402554869651794], [0.6171650290489197, 0.5377658605575562], [0.6175868511199951, 0.5352350473403931], [0.618069052696228, 0.5327095985412598], [0.6185140609741211, 0.5301586389541626], [0.6189947724342346, 0.527641773223877], [0.6194326281547546, 0.5250992774963379], [0.6199527978897095, 0.52252197265625], [0.62043696641922, 0.5199019908905029], [0.6210136413574219, 0.5172107219696045], [0.6215454339981079, 0.5144524574279785], [0.6221773028373718, 0.5116428136825562], [0.6227718591690063, 0.5087553262710571], [0.6234651207923889, 0.5058236122131348], [0.6241167783737183, 0.5028535723686218], [0.6249207258224487, 0.4997554421424866], [0.6256729364395142, 0.49663254618644714], [0.6265389919281006, 0.4934215545654297], [0.6273790001869202, 0.49019867181777954], [0.6284191608428955, 0.48683086037635803], [0.6294175386428833, 0.4835019111633301], [0.6305850744247437, 0.4800183176994324], [0.6316971778869629, 0.4765831232070923], [0.6330070495605469, 0.4729750156402588], [0.6342678070068359, 0.4694373309612274], [0.6473543643951416, 0.831061840057373], [0.6465544700622559, 0.8279919624328613], [0.6458389163017273, 0.8253087401390076], [0.6448315978050232, 0.82137131690979], [0.6439659595489502, 0.8178090453147888], [0.6430456042289734, 0.8140972852706909], [0.6422845721244812, 0.8107388615608215], [0.6414458155632019, 0.8071575164794922], [0.6407361626625061, 0.8039019107818604], [0.6399259567260742, 0.8003249168395996], [0.639247715473175, 0.7970446348190308], [0.6384668350219727, 0.7933914661407471], [0.637826681137085, 0.7900032997131348], [0.6372349858283997, 0.7873653173446655], [0.6367357969284058, 0.7850552797317505], [0.6361965537071228, 0.7828341126441956], [0.6356337070465088, 0.7809840440750122], [0.6349954605102539, 0.7789329290390015], [0.634098470211029, 0.7757962942123413], [0.6333581209182739, 0.7734628915786743], [0.6328380107879639, 0.7713565826416016], [0.6322823762893677, 0.7690239548683167], [0.6319129467010498, 0.7669522762298584], [0.6314817667007446, 0.7645955085754395], [0.631178081035614, 0.7625352740287781], [0.6308286190032959, 0.7602563500404358], [0.6305849552154541, 0.7582833766937256], [0.6303330063819885, 0.7561315298080444], [0.6301758289337158, 0.7542486190795898], [0.6300711631774902, 0.7521277666091919], [0.6300898790359497, 0.7503445148468018], [0.6300058364868164, 0.7488764524459839], [0.6298651099205017, 0.7471874952316284], [0.6295121312141418, 0.7441569566726685], [0.6292160153388977, 0.7414095401763916], [0.628801703453064, 0.7384095191955566], [0.6284510493278503, 0.7356659173965454], [0.627993106842041, 0.7326655387878418], [0.6276321411132812, 0.7299683094024658], [0.6271626353263855, 0.7269753217697144], [0.6267916560173035, 0.7241634130477905], [0.6262975931167603, 0.721000075340271], [0.6259284615516663, 0.7181206941604614], [0.6254475116729736, 0.7149309515953064], [0.6250960826873779, 0.7119805216789246], [0.6246443390846252, 0.7088877558708191], [0.6242828369140625, 0.7060074806213379], [0.6238211393356323, 0.7029942274093628], [0.6234601736068726, 0.7001816034317017], [0.6230039596557617, 0.6972258687019348], [0.6226644515991211, 0.6944699883460999], [0.6222306489944458, 0.6915465593338013], [0.6218961477279663, 0.6887980699539185], [0.6214607954025269, 0.6858525276184082], [0.6211326122283936, 0.6831068992614746], [0.6207229495048523, 0.6802319288253784], [0.6204187273979187, 0.677513837814331], [0.6200276017189026, 0.6746038198471069], [0.6197381019592285, 0.6718692779541016], [0.6193649172782898, 0.668996274471283], [0.6190909147262573, 0.6662762761116028], [0.6187350153923035, 0.6634125709533691], [0.6184716820716858, 0.6607062816619873], [0.618126392364502, 0.6578495502471924], [0.6178732514381409, 0.655144214630127], [0.6175411939620972, 0.652316153049469], [0.617286741733551, 0.6496070623397827], [0.6169522404670715, 0.6467416286468506], [0.6167097687721252, 0.6440008878707886], [0.6163924932479858, 0.6411343812942505], [0.6161652207374573, 0.6383762359619141], [0.6158668398857117, 0.6354910135269165], [0.615650475025177, 0.6327002048492432], [0.6153671741485596, 0.6297688484191895], [0.615179717540741, 0.6269195079803467], [0.6149304509162903, 0.6239591836929321], [0.6147710084915161, 0.6210752725601196], [0.6145569086074829, 0.618057131767273], [0.6144387125968933, 0.615103006362915], [0.6142730712890625, 0.6120290756225586], [0.6142175197601318, 0.609027624130249], [0.6141263246536255, 0.605944037437439], [0.6141273379325867, 0.6029164791107178], [0.6140937805175781, 0.5998270511627197], [0.6141899228096008, 0.596785843372345], [0.6142545938491821, 0.5937035083770752], [0.6144113540649414, 0.5906789898872375], [0.6145331263542175, 0.587643027305603], [0.6147664785385132, 0.5846188068389893], [0.6149561405181885, 0.5816032886505127], [0.6152443289756775, 0.5786341428756714], [0.6154893040657043, 0.5757232904434204], [0.615824282169342, 0.5728393793106079], [0.616108775138855, 0.5700422525405884], [0.6164833903312683, 0.5672503709793091], [0.6168093085289001, 0.5645326375961304], [0.6171972751617432, 0.5618208646774292], [0.6175378561019897, 0.5592198371887207], [0.6179447174072266, 0.556597113609314], [0.6183164715766907, 0.5539971590042114], [0.6187405586242676, 0.5513713359832764], [0.6191309690475464, 0.5488160848617554], [0.6195861101150513, 0.5462307929992676], [0.620002806186676, 0.543662965297699], [0.6204779148101807, 0.5410505533218384], [0.6208897829055786, 0.5385630130767822], [0.6213544607162476, 0.5360478162765503], [0.6217768788337708, 0.5335494875907898], [0.6222677230834961, 0.5309913158416748], [0.622704267501831, 0.5284830331802368], [0.6231932640075684, 0.5259144306182861], [0.6236186623573303, 0.523399829864502], [0.6241560578346252, 0.520784854888916], [0.6246421933174133, 0.5181195735931396], [0.6252246499061584, 0.5153471827507019], [0.6257538199424744, 0.5126110315322876], [0.6264246702194214, 0.5097270011901855], [0.6270427107810974, 0.5068612098693848], [0.6277778148651123, 0.5038765072822571], [0.6284641623497009, 0.5008963346481323], [0.6293153762817383, 0.4977310299873352], [0.6301047801971436, 0.4946499466896057], [0.6310539245605469, 0.49142688512802124], [0.631963849067688, 0.48823094367980957], [0.633065938949585, 0.4848584532737732], [0.6341171264648438, 0.4815295338630676], [0.6353479027748108, 0.47804564237594604], [0.6365259885787964, 0.47462719678878784], [0.6379114985466003, 0.4710422158241272], [0.6503562331199646, 0.8299252986907959], [0.6496566534042358, 0.8271923065185547], [0.6488379836082458, 0.824118971824646], [0.647882342338562, 0.820597767829895], [0.6468636989593506, 0.8167403936386108], [0.6460643410682678, 0.8133968114852905], [0.6451785564422607, 0.8097457885742188], [0.6444626450538635, 0.8065023422241211], [0.6436550617218018, 0.80294269323349], [0.6429954767227173, 0.7997066378593445], [0.6422556638717651, 0.7961258292198181], [0.6416275501251221, 0.792791485786438], [0.6408784985542297, 0.7891138792037964], [0.6404627561569214, 0.7867237329483032], [0.6399424076080322, 0.7840722799301147], [0.6394566297531128, 0.7819466590881348], [0.638800859451294, 0.7795799374580383], [0.6382707357406616, 0.7778749465942383], [0.6370938420295715, 0.7744497060775757], [0.6365060806274414, 0.7724863886833191], [0.6359223127365112, 0.7703515291213989], [0.6355686783790588, 0.7683981657028198], [0.6351292729377747, 0.7661078572273254], [0.6348312497138977, 0.7641043663024902], [0.6344450116157532, 0.7617655992507935], [0.634178876876831, 0.7598398923873901], [0.6338592767715454, 0.7576196789741516], [0.6336628198623657, 0.7558650970458984], [0.6334810256958008, 0.7538179755210876], [0.6333932280540466, 0.7521404027938843], [0.6332741975784302, 0.7501420974731445], [0.6332775950431824, 0.7488648891448975], [0.633201539516449, 0.746715784072876], [0.6330083608627319, 0.7438576221466064], [0.632649838924408, 0.7407636642456055], [0.6323049664497375, 0.7379451990127563], [0.631823718547821, 0.7349277138710022], [0.6314432621002197, 0.7321829795837402], [0.630942702293396, 0.7292072772979736], [0.6305027008056641, 0.7264437675476074], [0.6299610137939453, 0.7234537601470947], [0.6295773983001709, 0.7205659747123718], [0.6291214823722839, 0.71746826171875], [0.6287679076194763, 0.7145044803619385], [0.6283137202262878, 0.7113581895828247], [0.6279627680778503, 0.708441972732544], [0.627495288848877, 0.7053512334823608], [0.6271377205848694, 0.702530026435852], [0.6266812086105347, 0.6995283365249634], [0.6263470649719238, 0.6967704892158508], [0.6259136199951172, 0.6938363909721375], [0.6255787014961243, 0.6910849213600159], [0.6251463294029236, 0.6881722211837769], [0.6248241066932678, 0.6854061484336853], [0.6244144439697266, 0.6824973821640015], [0.6241068840026855, 0.6797938346862793], [0.6237139105796814, 0.6769360303878784], [0.6234259605407715, 0.6741971969604492], [0.6230565905570984, 0.6713173389434814], [0.6227874159812927, 0.6685997843742371], [0.6224362850189209, 0.6657475233078003], [0.6221801042556763, 0.663030207157135], [0.621844470500946, 0.6601927280426025], [0.6216006875038147, 0.6574758887290955], [0.6212738752365112, 0.6546427011489868], [0.6210260391235352, 0.6519342660903931], [0.6206938028335571, 0.649100661277771], [0.6204534769058228, 0.6463781595230103], [0.6201464533805847, 0.6435340046882629], [0.6199361681938171, 0.6407977938652039], [0.6196516156196594, 0.6379339694976807], [0.6194478273391724, 0.6351639032363892], [0.6191702485084534, 0.632277250289917], [0.618984580039978, 0.6294772028923035], [0.6187390089035034, 0.6265558004379272], [0.6185886859893799, 0.6237127780914307], [0.6183744668960571, 0.620758056640625], [0.6182489395141602, 0.617860734462738], [0.6180745363235474, 0.6148632168769836], [0.6180095672607422, 0.611922025680542], [0.6179022789001465, 0.6088931560516357], [0.6178938150405884, 0.6059280633926392], [0.6178469061851501, 0.6028685569763184], [0.6179096698760986, 0.5999225378036499], [0.6179448366165161, 0.5968947410583496], [0.6180867552757263, 0.5939096212387085], [0.6181824207305908, 0.5908666849136353], [0.6183640956878662, 0.5879271030426025], [0.6185088157653809, 0.5849197506904602], [0.6187706589698792, 0.5820075273513794], [0.6190068125724792, 0.5790563821792603], [0.6193288564682007, 0.5762159824371338], [0.6196058988571167, 0.5733430981636047], [0.6199522018432617, 0.5706011056900024], [0.620259165763855, 0.5678402185440063], [0.6206403970718384, 0.5651627779006958], [0.6209880709648132, 0.5624641180038452], [0.6213768720626831, 0.5598903894424438], [0.6217285394668579, 0.5572875738143921], [0.6221371293067932, 0.5547067523002625], [0.622517466545105, 0.5520964860916138], [0.6229377388954163, 0.5495715141296387], [0.623328447341919, 0.546994686126709], [0.6237673759460449, 0.5444265604019165], [0.6241719722747803, 0.5418359041213989], [0.6246252655982971, 0.5393632650375366], [0.625047504901886, 0.536861002445221], [0.6255214214324951, 0.534369945526123], [0.6259580254554749, 0.5318349599838257], [0.6264495849609375, 0.5293161869049072], [0.6268923282623291, 0.5267370939254761], [0.6273750066757202, 0.5242249965667725], [0.6278086304664612, 0.5216629505157471], [0.6283526420593262, 0.5189690589904785], [0.6288468837738037, 0.516266942024231], [0.6294340491294861, 0.513503909111023], [0.6299821138381958, 0.510749340057373], [0.6306817531585693, 0.5078504085540771], [0.6313316822052002, 0.5049700736999512], [0.6321136951446533, 0.5019626617431641], [0.6328617334365845, 0.49895042181015015], [0.633764386177063, 0.4958391785621643], [0.6346139907836914, 0.4927677810192108], [0.635626494884491, 0.48952990770339966], [0.6365936994552612, 0.4863359034061432], [0.6377662420272827, 0.48296475410461426], [0.6388837099075317, 0.47965818643569946], [0.6401846408843994, 0.476173996925354], [0.6414175033569336, 0.4727957248687744], [0.6534497737884521, 0.8291199803352356], [0.6525675654411316, 0.8260177373886108], [0.6517890691757202, 0.823299765586853], [0.6507267355918884, 0.8194725513458252], [0.6498308181762695, 0.8160039186477661], [0.6488903760910034, 0.812372624874115], [0.648128092288971, 0.8090879917144775], [0.6472995281219482, 0.8055515289306641], [0.6466227173805237, 0.8023368716239929], [0.6458694934844971, 0.7988210916519165], [0.6452698707580566, 0.7955952286720276], [0.6445740461349487, 0.7919408082962036], [0.6439996957778931, 0.7885768413543701], [0.6434626579284668, 0.7858700752258301], [0.6429879069328308, 0.7834634184837341], [0.6423603892326355, 0.7808526754379272], [0.6416956186294556, 0.7786247730255127], [0.6411213278770447, 0.7766917943954468], [0.6399173736572266, 0.7732261419296265], [0.6393275260925293, 0.77149498462677], [0.6390606760978699, 0.7697927951812744], [0.6386604309082031, 0.7675784826278687], [0.6383981704711914, 0.7656526565551758], [0.6380001306533813, 0.7633081674575806], [0.6376940011978149, 0.7613078355789185], [0.6373105049133301, 0.7590879201889038], [0.6370289325714111, 0.7572259902954102], [0.6367201805114746, 0.7552454471588135], [0.636521577835083, 0.7536365389823914], [0.6363916993141174, 0.7518608570098877], [0.636454701423645, 0.7503663301467896], [0.6364482045173645, 0.7486409544944763], [0.6363574266433716, 0.7466249465942383], [0.6361401677131653, 0.7434285283088684], [0.6359052658081055, 0.7404296398162842], [0.6354670524597168, 0.7372605800628662], [0.6350789070129395, 0.7344421148300171], [0.6345683336257935, 0.7314633727073669], [0.6341392397880554, 0.7286759614944458], [0.6336114406585693, 0.7256835699081421], [0.6332250833511353, 0.7229551076889038], [0.6327269673347473, 0.7199071049690247], [0.6323624849319458, 0.7170507907867432], [0.6318888664245605, 0.7138803601264954], [0.631521999835968, 0.7109380960464478], [0.6310518980026245, 0.7078213691711426], [0.6306931972503662, 0.7049124240875244], [0.6302422285079956, 0.7019070982933044], [0.6299152374267578, 0.6991071701049805], [0.6294922232627869, 0.6961621046066284], [0.6291660070419312, 0.6934007406234741], [0.6287417411804199, 0.6904770135879517], [0.6284241676330566, 0.6877357959747314], [0.6280113458633423, 0.6848247647285461], [0.6277110576629639, 0.6820828914642334], [0.6273267865180969, 0.6792217493057251], [0.6270464062690735, 0.6765355467796326], [0.626676619052887, 0.6736609935760498], [0.6264132261276245, 0.6709414720535278], [0.6260664463043213, 0.668083906173706], [0.6258187890052795, 0.6653856039047241], [0.6254878640174866, 0.662535548210144], [0.6252502202987671, 0.6598367691040039], [0.624927282333374, 0.6569963693618774], [0.6246994137763977, 0.6542942523956299], [0.6243892908096313, 0.6514534950256348], [0.6241714954376221, 0.6487497091293335], [0.6238747835159302, 0.6459187269210815], [0.6236710548400879, 0.6432101726531982], [0.6233861446380615, 0.6403709650039673], [0.6231902837753296, 0.6376270055770874], [0.6229222416877747, 0.6347585916519165], [0.622747540473938, 0.6319937705993652], [0.6225069761276245, 0.6291135549545288], [0.6223629117012024, 0.6263230443000793], [0.622153103351593, 0.6233999133110046], [0.6220245361328125, 0.6205489635467529], [0.6218414306640625, 0.6176056861877441], [0.6217658519744873, 0.614739716053009], [0.6216397285461426, 0.6117607951164246], [0.6216119527816772, 0.6088488698005676], [0.6215422749519348, 0.6058591604232788], [0.6215828061103821, 0.602928638458252], [0.6215844750404358, 0.5999653935432434], [0.6216828227043152, 0.5970542430877686], [0.621743381023407, 0.5940622091293335], [0.6219063997268677, 0.5911034941673279], [0.6220236420631409, 0.5881528854370117], [0.6222447752952576, 0.5852560997009277], [0.6224254369735718, 0.5823653340339661], [0.6227072477340698, 0.5794969201087952], [0.6229430437088013, 0.5766593217849731], [0.6232735514640808, 0.5738540887832642], [0.6235525608062744, 0.5711111426353455], [0.6239027380943298, 0.5683929920196533], [0.624199390411377, 0.5657351016998291], [0.6245808601379395, 0.5630846619606018], [0.6249157190322876, 0.5605010390281677], [0.6253037452697754, 0.5579168796539307], [0.6256517767906189, 0.5553592443466187], [0.6260749101638794, 0.5527827739715576], [0.6264567375183105, 0.5502685308456421], [0.6269044280052185, 0.547714114189148], [0.6273126602172852, 0.54515540599823], [0.6277753114700317, 0.5425733327865601], [0.6281648874282837, 0.5401298999786377], [0.6286220550537109, 0.5376201272010803], [0.6290258169174194, 0.5351495742797852], [0.6295050382614136, 0.5326099395751953], [0.6299268007278442, 0.5301129817962646], [0.6304366588592529, 0.5275235176086426], [0.6308748722076416, 0.5250303745269775], [0.6313858032226562, 0.5224449038505554], [0.6318474411964417, 0.519831120967865], [0.6324223875999451, 0.5171072483062744], [0.6329433917999268, 0.5144393444061279], [0.6335731744766235, 0.5116595029830933], [0.6341511011123657, 0.5088877081871033], [0.6348766684532166, 0.5059688687324524], [0.635553240776062, 0.503084659576416], [0.6363906264305115, 0.5000705718994141], [0.6371840238571167, 0.49709367752075195], [0.6381305456161499, 0.49397414922714233], [0.6390309929847717, 0.4908941984176636], [0.6401097774505615, 0.4876716434955597], [0.6411483287811279, 0.4844757318496704], [0.6423864364624023, 0.48113197088241577], [0.6435889005661011, 0.4778246283531189], [0.6449600458145142, 0.47439301013946533], [0.6563893556594849, 0.8279134631156921], [0.6556098461151123, 0.8251461982727051], [0.6547067761421204, 0.8220601081848145], [0.6537012457847595, 0.8186404705047607], [0.6526404023170471, 0.8149175643920898], [0.6518203020095825, 0.8116588592529297], [0.6509342193603516, 0.8081141114234924], [0.6502295732498169, 0.8049237728118896], [0.6494536995887756, 0.8014394640922546], [0.6488355398178101, 0.7982650995254517], [0.6481483578681946, 0.7947835326194763], [0.6475800275802612, 0.791451096534729], [0.6469200849533081, 0.787796139717102], [0.6465358138084412, 0.7853461503982544], [0.6460071206092834, 0.7825472354888916], [0.6454002857208252, 0.7799907922744751], [0.6444294452667236, 0.7772119045257568], [0.6437568664550781, 0.7756774425506592], [0.642778217792511, 0.7721432447433472], [0.6426286101341248, 0.7708899974822998], [0.6423265933990479, 0.769094705581665], [0.6421966552734375, 0.7672433257102966], [0.6418542265892029, 0.764805793762207], [0.6415393352508545, 0.7627640962600708], [0.641106367111206, 0.7604187726974487], [0.640772819519043, 0.7585098743438721], [0.6403398513793945, 0.7563525438308716], [0.6400310397148132, 0.7547149658203125], [0.6396585702896118, 0.7528870105743408], [0.6395219564437866, 0.751823902130127], [0.639430820941925, 0.7503361701965332], [0.6394457221031189, 0.7486890554428101], [0.6395045518875122, 0.7465087175369263], [0.6394869089126587, 0.7432478070259094], [0.6390818357467651, 0.7398338317871094], [0.6387079954147339, 0.7368221879005432], [0.6381971836090088, 0.733710765838623], [0.6377592086791992, 0.7309221029281616], [0.6372001767158508, 0.7279203534126282], [0.6367868185043335, 0.7251870632171631], [0.6363049745559692, 0.7222793102264404], [0.6359571814537048, 0.7194732427597046], [0.6354953050613403, 0.7164062261581421], [0.6351099014282227, 0.7134361863136292], [0.6346174478530884, 0.7102985382080078], [0.6342415809631348, 0.7073824405670166], [0.63377445936203, 0.7043014764785767], [0.633453369140625, 0.7014989852905273], [0.6330363154411316, 0.6985223293304443], [0.6327171325683594, 0.6957409381866455], [0.6322939991950989, 0.6928060054779053], [0.6319860816001892, 0.6900657415390015], [0.6315839886665344, 0.6871556043624878], [0.6312903761863708, 0.6844222545623779], [0.6309080123901367, 0.6815317869186401], [0.6306391954421997, 0.6788384914398193], [0.6302805542945862, 0.6759982109069824], [0.6300172805786133, 0.6732878088951111], [0.6296712160110474, 0.6704362630844116], [0.6294309496879578, 0.6677340269088745], [0.6291104555130005, 0.6649056673049927], [0.628883957862854, 0.6621951460838318], [0.6285706162452698, 0.6593631505966187], [0.6283493041992188, 0.6566615104675293], [0.628048837184906, 0.653842568397522], [0.6278474926948547, 0.6511379480361938], [0.6275727152824402, 0.6483215093612671], [0.6273860931396484, 0.645611047744751], [0.6271120309829712, 0.6427867412567139], [0.6269199252128601, 0.6400622129440308], [0.6266531944274902, 0.6372306942939758], [0.6264877319335938, 0.6344903707504272], [0.6262567043304443, 0.6316408514976501], [0.6261152029037476, 0.6288755536079407], [0.6259031891822815, 0.6260141730308533], [0.6257792115211487, 0.6231980323791504], [0.6255935430526733, 0.6202768087387085], [0.6255105137825012, 0.6174687147140503], [0.6253732442855835, 0.6145522594451904], [0.6253328919410706, 0.6116930842399597], [0.6252453327178955, 0.6087443828582764], [0.6252631545066833, 0.6058844327926636], [0.6252394914627075, 0.6029340624809265], [0.6253181099891663, 0.6000844240188599], [0.6253510117530823, 0.5971444845199585], [0.6254791021347046, 0.5942634344100952], [0.6255697011947632, 0.5912952423095703], [0.6257662773132324, 0.5884513854980469], [0.6259190440177917, 0.5855480432510376], [0.6261593699455261, 0.5827416181564331], [0.6263566613197327, 0.5798801779747009], [0.6266477704048157, 0.5771253108978271], [0.6269018650054932, 0.5743300914764404], [0.6272327303886414, 0.5716365575790405], [0.6275166869163513, 0.5689119100570679], [0.627863347530365, 0.5663016438484192], [0.6281680464744568, 0.5636789202690125], [0.6285369396209717, 0.5611106753349304], [0.6288669109344482, 0.5585387945175171], [0.6292591094970703, 0.5560154914855957], [0.6296199560165405, 0.5534720420837402], [0.6300411820411682, 0.5509777069091797], [0.6304328441619873, 0.5484481453895569], [0.6308966279029846, 0.5459092855453491], [0.6313178539276123, 0.5433146953582764], [0.631761908531189, 0.5408797264099121], [0.6321607828140259, 0.5383651256561279], [0.63260817527771, 0.5359088778495789], [0.6330192685127258, 0.5333832502365112], [0.6334936022758484, 0.530896782875061], [0.6339325904846191, 0.5283389687538147], [0.6344280242919922, 0.5258196592330933], [0.6348748207092285, 0.5232688188552856], [0.6354148983955383, 0.5206650495529175], [0.6359235048294067, 0.5180249214172363], [0.6365284323692322, 0.5153453350067139], [0.6370826959609985, 0.5126343965530396], [0.6377407312393188, 0.509838342666626], [0.6383481621742249, 0.5070348381996155], [0.6390958428382874, 0.5041325092315674], [0.6398101449012756, 0.501278281211853], [0.6406987309455872, 0.4982678294181824], [0.6415421962738037, 0.4952965974807739], [0.6425509452819824, 0.49218472838401794], [0.6435039043426514, 0.489122211933136], [0.6446483135223389, 0.4858846068382263], [0.6457421183586121, 0.48274439573287964], [0.6470767259597778, 0.4794034957885742], [0.6483501195907593, 0.47615665197372437], [0.6593858599662781, 0.8270276784896851], [0.6584169864654541, 0.8239282369613647], [0.6575604677200317, 0.8211753368377686], [0.6564306616783142, 0.8174780607223511], [0.655495822429657, 0.8141430616378784], [0.6545297503471375, 0.8106406927108765], [0.653756320476532, 0.8074593544006348], [0.6529306173324585, 0.8040233850479126], [0.6522768139839172, 0.8008819818496704], [0.6515564322471619, 0.7974527478218079], [0.6509840488433838, 0.7943040728569031], [0.6503268480300903, 0.7907232046127319], [0.6498412489891052, 0.7873953580856323], [0.6493716835975647, 0.7846039533615112], [0.6489795446395874, 0.7820671796798706], [0.648310124874115, 0.778857946395874], [0.6473292112350464, 0.7759112119674683], [0.6466182470321655, 0.77406907081604], [0.6463724374771118, 0.7722365856170654], [0.6461287140846252, 0.7698347568511963], [0.6460482478141785, 0.7689931392669678], [0.6456375122070312, 0.7663757801055908], [0.6452414393424988, 0.7642855644226074], [0.6447727680206299, 0.7618787288665771], [0.6444011926651001, 0.759833574295044], [0.6439852118492126, 0.757644534111023], [0.6436730027198792, 0.7557504773139954], [0.6432971954345703, 0.7537841796875], [0.6430583000183105, 0.7523199319839478], [0.64274001121521, 0.7509446144104004], [0.6425795555114746, 0.7501600980758667], [0.642275333404541, 0.7483163475990295], [0.6422747373580933, 0.7469639778137207], [0.6424387693405151, 0.7428945899009705], [0.6420891284942627, 0.7393842935562134], [0.6415449380874634, 0.7362201809883118], [0.6411334276199341, 0.7332407832145691], [0.6406384110450745, 0.7302473783493042], [0.6402804851531982, 0.7274507284164429], [0.6398345232009888, 0.7245205640792847], [0.6395100951194763, 0.7218472957611084], [0.6390257477760315, 0.7188201546669006], [0.6386129856109619, 0.7159401178359985], [0.6380870938301086, 0.7127988934516907], [0.6377044320106506, 0.7098670601844788], [0.6372341513633728, 0.7067734599113464], [0.636904239654541, 0.7039085626602173], [0.636487603187561, 0.7009342312812805], [0.6361765265464783, 0.698135256767273], [0.6357687711715698, 0.6951755285263062], [0.6354761123657227, 0.6924152970314026], [0.6350852847099304, 0.6895116567611694], [0.6348047852516174, 0.6867765784263611], [0.6344295740127563, 0.6838816404342651], [0.6341608166694641, 0.6811646819114685], [0.6338039636611938, 0.6783238649368286], [0.633550763130188, 0.6756399869918823], [0.6332069039344788, 0.6727896332740784], [0.6329711675643921, 0.6701014041900635], [0.6326499581336975, 0.667265772819519], [0.6324288845062256, 0.6645863056182861], [0.6321200132369995, 0.6617462635040283], [0.6319095492362976, 0.6590506434440613], [0.6316156387329102, 0.6562267541885376], [0.6314215660095215, 0.6535454988479614], [0.6311456561088562, 0.6507340669631958], [0.6309680342674255, 0.6480515003204346], [0.6307068467140198, 0.6452223062515259], [0.6305328011512756, 0.6425058841705322], [0.6302763223648071, 0.6396793127059937], [0.6301201581954956, 0.6369768381118774], [0.6298891305923462, 0.634150505065918], [0.6297471523284912, 0.631415605545044], [0.6295353174209595, 0.6285715103149414], [0.6294159889221191, 0.6258162260055542], [0.6292286515235901, 0.6229360103607178], [0.6291500329971313, 0.6201426982879639], [0.6290111541748047, 0.6172707080841064], [0.6289597749710083, 0.6144620180130005], [0.6288522481918335, 0.6115646362304688], [0.6288530826568604, 0.6087411046028137], [0.6288065910339355, 0.605850100517273], [0.6288595199584961, 0.6030139923095703], [0.6288623809814453, 0.6001354455947876], [0.6289637088775635, 0.5972950458526611], [0.6290197372436523, 0.5943942666053772], [0.629179060459137, 0.5915395617485046], [0.6292920112609863, 0.5886865258216858], [0.6295015811920166, 0.5858625769615173], [0.6296591758728027, 0.5830469131469727], [0.6299134492874146, 0.580265462398529], [0.6301168203353882, 0.5775169134140015], [0.6304152011871338, 0.5747915506362915], [0.6306647062301636, 0.5720964670181274], [0.6309980154037476, 0.5694181323051453], [0.6312749981880188, 0.5668092966079712], [0.631621241569519, 0.5642191171646118], [0.6319201588630676, 0.5616664886474609], [0.632291316986084, 0.5591214299201965], [0.6326130032539368, 0.5566154718399048], [0.6330057382583618, 0.5540996789932251], [0.6333498358726501, 0.5516141653060913], [0.6337611079216003, 0.5491048693656921], [0.6341330409049988, 0.5465860366821289], [0.6346023082733154, 0.5440120697021484], [0.6349968314170837, 0.541558027267456], [0.6354584693908691, 0.5390563011169434], [0.6358579397201538, 0.5366081595420837], [0.6363313794136047, 0.534092366695404], [0.6367436647415161, 0.5316321849822998], [0.6372463703155518, 0.5290836095809937], [0.6376911401748657, 0.5265884399414062], [0.6382111310958862, 0.5240260362625122], [0.6386775970458984, 0.5214968919754028], [0.6392582654953003, 0.5188562870025635], [0.6397870779037476, 0.5162500143051147], [0.6404396295547485, 0.5135244131088257], [0.6410385966300964, 0.5108118057250977], [0.6417527794837952, 0.50798499584198], [0.6424100399017334, 0.5052306056022644], [0.643212080001831, 0.5023516416549683], [0.6439734697341919, 0.49949127435684204], [0.6449099779129028, 0.4964974522590637], [0.6458080410957336, 0.49353402853012085], [0.6468802094459534, 0.4904448688030243], [0.6479173302650452, 0.4873853325843811], [0.6491267681121826, 0.48421499133110046], [0.6503034234046936, 0.4810678958892822], [0.651703953742981, 0.47779154777526855], [0.662206768989563, 0.8257648944854736], [0.6613514423370361, 0.8229959607124329], [0.6603540182113647, 0.8198925256729126], [0.6592837572097778, 0.8165782690048218], [0.6581727266311646, 0.8130385279655457], [0.6573315262794495, 0.8099122047424316], [0.6564355492591858, 0.8064975738525391], [0.6557294726371765, 0.8034098148345947], [0.6549620628356934, 0.8000366687774658], [0.6543601751327515, 0.7969366908073425], [0.6536937952041626, 0.7935471534729004], [0.6531655192375183, 0.7903034090995789], [0.6525847911834717, 0.7867618799209595], [0.6522563695907593, 0.7842155694961548], [0.6518081426620483, 0.7813680171966553], [0.6513388156890869, 0.7782662510871887], [0.6506348848342896, 0.774453341960907], [0.6504549980163574, 0.773253321647644], [0.6500504016876221, 0.7714235782623291], [0.6499451398849487, 0.7698721289634705], [0.6496146321296692, 0.7678163051605225], [0.6490628123283386, 0.7657299041748047], [0.648574709892273, 0.763314962387085], [0.648194432258606, 0.7612707614898682], [0.6477543711662292, 0.7589147686958313], [0.6473914980888367, 0.7569894790649414], [0.6469420790672302, 0.7548712491989136], [0.646784782409668, 0.753429651260376], [0.6464633941650391, 0.7513406276702881], [0.6464114189147949, 0.7506266236305237], [0.6463442444801331, 0.7493113279342651], [0.6461568474769592, 0.7478071451187134], [0.6459814310073853, 0.7467244267463684], [0.6453725099563599, 0.7426880598068237], [0.6447848081588745, 0.7387503385543823], [0.6443866491317749, 0.7358139753341675], [0.6438906192779541, 0.7326412200927734], [0.6436150074005127, 0.729915976524353], [0.6432649493217468, 0.7268946170806885], [0.6430115699768066, 0.7241564989089966], [0.6425943374633789, 0.7212088108062744], [0.6421588659286499, 0.7182797193527222], [0.641579270362854, 0.7152396440505981], [0.6411559581756592, 0.712340235710144], [0.6406658887863159, 0.7092610597610474], [0.6403308510780334, 0.706383466720581], [0.6399070620536804, 0.7033505439758301], [0.6395990252494812, 0.7005550861358643], [0.6391950845718384, 0.6975911855697632], [0.6389199495315552, 0.6948220729827881], [0.6385483145713806, 0.691886305809021], [0.6382806301116943, 0.6891466379165649], [0.637911856174469, 0.6862567067146301], [0.6376523971557617, 0.6835297346115112], [0.6373000741004944, 0.6806588768959045], [0.6370529532432556, 0.6779765486717224], [0.6367129683494568, 0.6751493215560913], [0.6364834308624268, 0.6724664568901062], [0.6361712217330933, 0.6696419715881348], [0.6359575986862183, 0.6669543981552124], [0.6356521844863892, 0.6641396284103394], [0.6354443430900574, 0.6614466905593872], [0.63515704870224, 0.6586289405822754], [0.6349756717681885, 0.6559498310089111], [0.6347124576568604, 0.653148889541626], [0.6345412135124207, 0.6504669785499573], [0.6342851519584656, 0.6476731300354004], [0.6341176629066467, 0.6449636220932007], [0.6338720917701721, 0.6421437859535217], [0.6337296962738037, 0.6394422054290771], [0.6335081458091736, 0.6366434097290039], [0.6333702802658081, 0.6339240670204163], [0.6331528425216675, 0.6311022043228149], [0.6330342888832092, 0.6283842325210571], [0.6328537464141846, 0.6255544424057007], [0.6327764987945557, 0.6227988004684448], [0.632633626461029, 0.6199452877044678], [0.6325832605361938, 0.6171767711639404], [0.6324692368507385, 0.6143074631690979], [0.6324560642242432, 0.6115366220474243], [0.6323910355567932, 0.6086761951446533], [0.632424533367157, 0.6058956980705261], [0.6324061751365662, 0.6030251979827881], [0.6324842572212219, 0.6002485752105713], [0.6325100660324097, 0.597381591796875], [0.6326351165771484, 0.5945860147476196], [0.6327149868011475, 0.5917196273803711], [0.6328915357589722, 0.588956356048584], [0.6330192685127258, 0.5861160755157471], [0.6332380771636963, 0.5833823680877686], [0.6334086656570435, 0.5805947780609131], [0.6336669921875, 0.5779226422309875], [0.6338830590248108, 0.5752039551734924], [0.6341819763183594, 0.5725616216659546], [0.6344364881515503, 0.569893479347229], [0.634765625, 0.5673240423202515], [0.6350492238998413, 0.5647422671318054], [0.6353981494903564, 0.5622222423553467], [0.6357026100158691, 0.5596921443939209], [0.6360694169998169, 0.5572021007537842], [0.6363835334777832, 0.5546948909759521], [0.6367553472518921, 0.5522135496139526], [0.6370800733566284, 0.5497218370437622], [0.6374818682670593, 0.5472264289855957], [0.6378563642501831, 0.5447134971618652], [0.6383111476898193, 0.5422413349151611], [0.6387144327163696, 0.539778470993042], [0.63918137550354, 0.5373000502586365], [0.63958740234375, 0.534824013710022], [0.640061616897583, 0.5323419570922852], [0.6404879689216614, 0.5298618078231812], [0.6410024166107178, 0.5273432731628418], [0.6414661407470703, 0.5248385667800903], [0.6420150995254517, 0.5222971439361572], [0.6425188779830933, 0.5197467803955078], [0.643131673336029, 0.5171205997467041], [0.6436996459960938, 0.5145048499107361], [0.6444031000137329, 0.5117669701576233], [0.6450560092926025, 0.5090469717979431], [0.6458113193511963, 0.5062583684921265], [0.6465062499046326, 0.5035040378570557], [0.6473521590232849, 0.5006250143051147], [0.6481605768203735, 0.49779805541038513], [0.6491620540618896, 0.49481433629989624], [0.6501196622848511, 0.4918982982635498], [0.6512762308120728, 0.4888123571872711], [0.6523703932762146, 0.4858105182647705], [0.6536681652069092, 0.48263803124427795], [0.6549056172370911, 0.4795633554458618], [0.6650675535202026, 0.8247722387313843], [0.6639861464500427, 0.8217536211013794], [0.6630128026008606, 0.8189553022384644], [0.6618198752403259, 0.8154179453849792], [0.6608967185020447, 0.8122502565383911], [0.6599530577659607, 0.808899998664856], [0.6591929197311401, 0.805828332901001], [0.6583658456802368, 0.8025070428848267], [0.657704770565033, 0.7994704246520996], [0.6569671034812927, 0.7961467504501343], [0.6563987731933594, 0.7930900454521179], [0.6557640433311462, 0.7896384000778198], [0.6553001999855042, 0.7864227890968323], [0.6548581719398499, 0.7836250066757202], [0.6545066237449646, 0.7810104489326477], [0.6540562510490417, 0.7776749134063721], [0.6540374159812927, 0.7743955254554749], [0.6539125442504883, 0.7724077701568604], [0.653697669506073, 0.7707484364509583], [0.6532623171806335, 0.7691335678100586], [0.6525484323501587, 0.7667942643165588], [0.6518920660018921, 0.7647106647491455], [0.6514391303062439, 0.7627946734428406], [0.6510419845581055, 0.7605284452438354], [0.6507775187492371, 0.7585165500640869], [0.6505081057548523, 0.7562307119369507], [0.6502907276153564, 0.7543402910232544], [0.6499180197715759, 0.7526600956916809], [0.6498510837554932, 0.7513152956962585], [0.6498271226882935, 0.7502573728561401], [0.6497929096221924, 0.7493102550506592], [0.6497480869293213, 0.7471675872802734], [0.6493433117866516, 0.7455912828445435], [0.6481022834777832, 0.7419734001159668], [0.6475720405578613, 0.7384467720985413], [0.6471477746963501, 0.7352113127708435], [0.646836519241333, 0.7323259115219116], [0.6464377641677856, 0.7293984889984131], [0.6461043357849121, 0.726594090461731], [0.645662248134613, 0.7236500978469849], [0.6452924013137817, 0.7208203077316284], [0.6447840929031372, 0.7176629900932312], [0.6444264650344849, 0.7148025035858154], [0.6439635753631592, 0.7117508053779602], [0.6436362862586975, 0.7088844776153564], [0.6432187557220459, 0.7058520317077637], [0.6429212093353271, 0.7029979228973389], [0.6425296068191528, 0.7000316977500916], [0.642266035079956, 0.6972627639770508], [0.6419030427932739, 0.694329023361206], [0.6416472792625427, 0.691562294960022], [0.641292929649353, 0.6886536478996277], [0.6410446166992188, 0.6859257817268372], [0.6407015323638916, 0.6830527782440186], [0.6404696702957153, 0.6803412437438965], [0.6401474475860596, 0.6775076389312744], [0.6399304270744324, 0.6748380064964294], [0.6396222114562988, 0.6720224618911743], [0.6394206285476685, 0.6693570613861084], [0.6391295194625854, 0.6665282249450684], [0.6389337778091431, 0.6638481616973877], [0.6386498212814331, 0.6610347032546997], [0.6384742259979248, 0.6583676934242249], [0.6382145881652832, 0.6555707454681396], [0.6380549669265747, 0.6529073119163513], [0.6378093957901001, 0.650100827217102], [0.6376532912254333, 0.6474257707595825], [0.63741135597229, 0.6446117758750916], [0.6372714042663574, 0.6419214010238647], [0.6370498538017273, 0.6391258239746094], [0.6369239091873169, 0.6364391446113586], [0.6367186903953552, 0.6336207389831543], [0.6366088390350342, 0.6309130191802979], [0.6364273428916931, 0.628120481967926], [0.6363466382026672, 0.6254209280014038], [0.6361952424049377, 0.6225907802581787], [0.6361331343650818, 0.6198430061340332], [0.6360056400299072, 0.6170151233673096], [0.6359822750091553, 0.6142632961273193], [0.6359017491340637, 0.6114442348480225], [0.6359177827835083, 0.6086935997009277], [0.6358773112297058, 0.6058728694915771], [0.6359382271766663, 0.6031110882759094], [0.6359409689903259, 0.6002925038337708], [0.6360369920730591, 0.5975210666656494], [0.6360819935798645, 0.5947096347808838], [0.6362311840057373, 0.591942310333252], [0.6363269090652466, 0.5891560316085815], [0.6365190148353577, 0.5863993167877197], [0.6366564631462097, 0.5836555361747742], [0.6368905305862427, 0.5809417963027954], [0.6370681524276733, 0.5782672166824341], [0.6373385190963745, 0.5756183862686157], [0.6375612616539001, 0.5729763507843018], [0.6378672122955322, 0.570361852645874], [0.6381166577339172, 0.5677945613861084], [0.6384356021881104, 0.5652366876602173], [0.638701319694519, 0.5627329349517822], [0.6390459537506104, 0.5602291822433472], [0.639338493347168, 0.5577430725097656], [0.6397014856338501, 0.555241048336029], [0.6400095224380493, 0.552776575088501], [0.6403892040252686, 0.5502961874008179], [0.6407241821289062, 0.5478532910346985], [0.6411482095718384, 0.545362114906311], [0.641514003276825, 0.5429262518882751], [0.6419601440429688, 0.5404481887817383], [0.6423501372337341, 0.5380010604858398], [0.6428201198577881, 0.5355008840560913], [0.6432255506515503, 0.5330725908279419], [0.6437210440635681, 0.5305829048156738], [0.6441624760627747, 0.5281317234039307], [0.6446996927261353, 0.5256022214889526], [0.6451734304428101, 0.5231322646141052], [0.6457539796829224, 0.5205680727958679], [0.6462851762771606, 0.5180381536483765], [0.6469389200210571, 0.5154072046279907], [0.6475487351417542, 0.512780487537384], [0.6482989192008972, 0.5100418329238892], [0.6489957571029663, 0.5073568820953369], [0.6498098373413086, 0.5045605301856995], [0.650558352470398, 0.5018411874771118], [0.6514722108840942, 0.49900591373443604], [0.6523508429527283, 0.49618902802467346], [0.6534090042114258, 0.49325627088546753], [0.6544395685195923, 0.4903334081172943], [0.6556527614593506, 0.4873107969760895], [0.6568301320075989, 0.4843045473098755], [0.6581855416297913, 0.4812042713165283], [0.6677274107933044, 0.8233621120452881], [0.6666622161865234, 0.8206824660301208], [0.6654713749885559, 0.8176913857460022], [0.664463222026825, 0.8146054148674011], [0.6634806990623474, 0.8112049698829651], [0.6626951694488525, 0.8081796765327454], [0.6618267297744751, 0.8048722743988037], [0.6611182689666748, 0.8018641471862793], [0.6603229641914368, 0.7986011505126953], [0.6596752405166626, 0.7955881953239441], [0.6589593291282654, 0.7923434376716614], [0.6584160327911377, 0.7892383337020874], [0.6578634977340698, 0.7858471274375916], [0.6575793027877808, 0.7833279967308044], [0.6572242379188538, 0.7804490327835083], [0.6570890545845032, 0.7776146531105042], [0.6570816040039062, 0.7744278907775879], [0.6571646332740784, 0.7722717523574829], [0.6568588614463806, 0.7698773145675659], [0.6563827991485596, 0.7679901123046875], [0.6554508805274963, 0.7654622197151184], [0.6549333333969116, 0.7637737393379211], [0.6544461250305176, 0.7618973255157471], [0.6541241407394409, 0.7600992918014526], [0.6538586020469666, 0.7579795718193054], [0.6537057757377625, 0.7560153007507324], [0.6533564925193787, 0.753574788570404], [0.6533882021903992, 0.752749502658844], [0.6534282565116882, 0.7511125206947327], [0.6532663702964783, 0.7498259544372559], [0.6531450152397156, 0.7489924430847168], [0.6526160836219788, 0.7465943098068237], [0.6518709659576416, 0.7442762851715088], [0.6510764360427856, 0.7412732839584351], [0.6504235863685608, 0.7379165291786194], [0.6501258611679077, 0.7349178194999695], [0.6497251987457275, 0.731779932975769], [0.6493378281593323, 0.7289696335792542], [0.6488026976585388, 0.7260047197341919], [0.6484124660491943, 0.723259687423706], [0.6479217410087585, 0.7202476263046265], [0.6475792527198792, 0.7173165082931519], [0.6471709609031677, 0.7142722606658936], [0.6468765139579773, 0.7114026546478271], [0.6464805603027344, 0.7083717584609985], [0.6462029814720154, 0.7055258750915527], [0.645823061466217, 0.7024998664855957], [0.6455751657485962, 0.6997243165969849], [0.6452282667160034, 0.6967881917953491], [0.6449784636497498, 0.6940138936042786], [0.6446237564086914, 0.6910889744758606], [0.6443873643875122, 0.6883527040481567], [0.6440578699111938, 0.685464084148407], [0.6438413262367249, 0.6827632784843445], [0.6435319185256958, 0.6799004077911377], [0.6433324217796326, 0.6772274971008301], [0.643038809299469, 0.6744092702865601], [0.6428488492965698, 0.6717482209205627], [0.6425678133964539, 0.6689496636390686], [0.6423884630203247, 0.666266679763794], [0.6421180963516235, 0.663446843624115], [0.6419543027877808, 0.6607881784439087], [0.6417030692100525, 0.6579941511154175], [0.6415456533432007, 0.6553328037261963], [0.6413038969039917, 0.6525591611862183], [0.6411625146865845, 0.649878740310669], [0.6409333944320679, 0.6470816135406494], [0.6407977938652039, 0.644397497177124], [0.6405799984931946, 0.6416066884994507], [0.6404615640640259, 0.6389318704605103], [0.640263557434082, 0.6361494064331055], [0.6401650905609131, 0.6334540843963623], [0.6399928331375122, 0.6306573152542114], [0.6399157047271729, 0.6279791593551636], [0.6397596597671509, 0.6252028942108154], [0.6396950483322144, 0.6224771738052368], [0.639557957649231, 0.6196578145027161], [0.6395237445831299, 0.6169517040252686], [0.6394284963607788, 0.6141510009765625], [0.6394340991973877, 0.6114400029182434], [0.6393786668777466, 0.6086410284042358], [0.639418363571167, 0.6059225797653198], [0.6393967866897583, 0.6031262874603271], [0.639471173286438, 0.6003966927528381], [0.6394892930984497, 0.5975958108901978], [0.6396089792251587, 0.5948894023895264], [0.6396757960319519, 0.5921002626419067], [0.6398365497589111, 0.5893957018852234], [0.6399478912353516, 0.5866302251815796], [0.6401541829109192, 0.5839637517929077], [0.6403090953826904, 0.5812448263168335], [0.640546977519989, 0.5786253213882446], [0.6407322287559509, 0.5759901404380798], [0.6410074234008789, 0.5733994245529175], [0.6412338018417358, 0.5707957148551941], [0.6415356993675232, 0.5682564973831177], [0.6417840123176575, 0.565698504447937], [0.642096996307373, 0.563230574131012], [0.6423596739768982, 0.5607343912124634], [0.6426922678947449, 0.5582652688026428], [0.6429824233055115, 0.5557897090911865], [0.6433507204055786, 0.5533430576324463], [0.6436706781387329, 0.5508840084075928], [0.6440526843070984, 0.5484540462493896], [0.6443879008293152, 0.5459978580474854], [0.6447978019714355, 0.543559193611145], [0.6451577544212341, 0.5411211252212524], [0.6456025838851929, 0.5386707186698914], [0.6459998488426208, 0.5362117290496826], [0.6464720964431763, 0.533780574798584], [0.6468978524208069, 0.5313514471054077], [0.647409200668335, 0.5288817882537842], [0.6478677988052368, 0.5264207124710083], [0.6484166383743286, 0.5239312648773193], [0.6489176154136658, 0.521456778049469], [0.6495258808135986, 0.5189068913459778], [0.6500858664512634, 0.516383171081543], [0.6507846117019653, 0.5137443542480469], [0.6514450311660767, 0.5111443996429443], [0.6522517204284668, 0.5084357261657715], [0.6530047059059143, 0.5057439804077148], [0.6538669466972351, 0.5030025243759155], [0.654664158821106, 0.5003082156181335], [0.655633807182312, 0.4974815845489502], [0.6565634608268738, 0.49471497535705566], [0.6577007174491882, 0.4917806088924408], [0.6587856411933899, 0.48892509937286377], [0.6600848436355591, 0.48590150475502014], [0.6613154411315918, 0.4829533100128174], [0.670587420463562, 0.8221789598464966], [0.6693800687789917, 0.8191480040550232], [0.6683632135391235, 0.816522479057312], [0.6671872735023499, 0.8133729696273804], [0.666253924369812, 0.810397744178772], [0.6652821898460388, 0.8071584701538086], [0.664500892162323, 0.8041927814483643], [0.6636435389518738, 0.8009556531906128], [0.6629387140274048, 0.7979894876480103], [0.6621435284614563, 0.7947676181793213], [0.661502480506897, 0.7918165922164917], [0.660834550857544, 0.7886054515838623], [0.6604267358779907, 0.7855750322341919], [0.660057783126831, 0.7828396558761597], [0.65984046459198, 0.7802908420562744], [0.6596387624740601, 0.7774051427841187], [0.6596618294715881, 0.7746564149856567], [0.6594507098197937, 0.7719594240188599], [0.6589599251747131, 0.7693489789962769], [0.6582406759262085, 0.7671359777450562], [0.6575167775154114, 0.7647168636322021], [0.6570200324058533, 0.763003408908844], [0.6567492485046387, 0.7614972591400146], [0.6565020680427551, 0.7596110701560974], [0.6563737392425537, 0.7578757405281067], [0.6563335061073303, 0.7558126449584961], [0.6566497087478638, 0.7539219260215759], [0.6567044258117676, 0.7527529001235962], [0.6564545035362244, 0.7507370710372925], [0.6563003659248352, 0.7493301630020142], [0.6560556888580322, 0.7481834292411804], [0.6552417874336243, 0.745603621006012], [0.654665470123291, 0.7433494329452515], [0.6538191437721252, 0.740348219871521], [0.6532463431358337, 0.7374900579452515], [0.6526676416397095, 0.734410285949707], [0.6522606611251831, 0.7314591407775879], [0.6517915725708008, 0.7284045219421387], [0.6514602899551392, 0.725635290145874], [0.6510301232337952, 0.7226998805999756], [0.6507084369659424, 0.7198915481567383], [0.6502861380577087, 0.7168213129043579], [0.6500062942504883, 0.713977575302124], [0.6496303081512451, 0.710929811000824], [0.6493809819221497, 0.7080898284912109], [0.6490291953086853, 0.705062985420227], [0.6487937569618225, 0.702223539352417], [0.6484634876251221, 0.6992827653884888], [0.6482423543930054, 0.6965147256851196], [0.6479170322418213, 0.6935622692108154], [0.6477004885673523, 0.6908043622970581], [0.6473844647407532, 0.6879150867462158], [0.6471841335296631, 0.685195803642273], [0.6468850374221802, 0.6823388934135437], [0.6466946005821228, 0.6796433925628662], [0.646410346031189, 0.6768195629119873], [0.6462309956550598, 0.6741584539413452], [0.6459571123123169, 0.6713505983352661], [0.6457839608192444, 0.6687044501304626], [0.6455171704292297, 0.6658880710601807], [0.6453579664230347, 0.6632171273231506], [0.6451115608215332, 0.660433292388916], [0.6449688673019409, 0.6577811241149902], [0.6447392106056213, 0.6549902558326721], [0.6446043848991394, 0.6523498296737671], [0.6443790197372437, 0.6495590209960938], [0.6442500948905945, 0.6468782424926758], [0.6440364718437195, 0.6440954208374023], [0.6439340710639954, 0.6414353847503662], [0.6437461376190186, 0.6386532783508301], [0.6436506509780884, 0.6359873414039612], [0.6434720754623413, 0.6332083940505981], [0.6433960795402527, 0.6305313110351562], [0.643241286277771, 0.6277594566345215], [0.6431733965873718, 0.6250817775726318], [0.6430281400680542, 0.6222913861274719], [0.6429925560951233, 0.6195859909057617], [0.6428877711296082, 0.616818368434906], [0.6428810954093933, 0.614128589630127], [0.6428107023239136, 0.6113676428794861], [0.6428368091583252, 0.6086685061454773], [0.6427983641624451, 0.6059026718139648], [0.6428526043891907, 0.6031956076622009], [0.6428453922271729, 0.6004345417022705], [0.6429446935653687, 0.5977356433868408], [0.6429837942123413, 0.5949999094009399], [0.6431176662445068, 0.5922946333885193], [0.643197774887085, 0.5895748138427734], [0.6433819532394409, 0.5869022607803345], [0.6435111165046692, 0.5842171907424927], [0.6437281370162964, 0.5815632343292236], [0.643886387348175, 0.5789369344711304], [0.6441245675086975, 0.5763445496559143], [0.6443051695823669, 0.573772668838501], [0.6445798873901367, 0.5712112188339233], [0.644798994064331, 0.5686749219894409], [0.6451055407524109, 0.5661592483520508], [0.6453520059585571, 0.5636866092681885], [0.645666241645813, 0.5612068176269531], [0.6459246277809143, 0.5587599277496338], [0.6462637186050415, 0.5563111305236816], [0.6465488076210022, 0.5538885593414307], [0.6469137072563171, 0.5514355301856995], [0.6472229957580566, 0.5490281581878662], [0.6476169228553772, 0.5465880036354065], [0.6479544043540955, 0.5441795587539673], [0.648371696472168, 0.5417430400848389], [0.6487338542938232, 0.5393426418304443], [0.6491907835006714, 0.536882758140564], [0.6495854258537292, 0.5345056056976318], [0.65007084608078, 0.5320628881454468], [0.65050208568573, 0.5296574234962463], [0.6510317325592041, 0.5271747708320618], [0.6514981985092163, 0.5247664451599121], [0.6520751118659973, 0.5222771763801575], [0.6526010036468506, 0.5198178291320801], [0.6532394886016846, 0.5172721147537231], [0.6538311839103699, 0.514756977558136], [0.6545628905296326, 0.5121484398841858], [0.6552549600601196, 0.5095503926277161], [0.6561051607131958, 0.5068469047546387], [0.6568948030471802, 0.5042072534561157], [0.6578064560890198, 0.5014829635620117], [0.6586613059043884, 0.49882370233535767], [0.6597028970718384, 0.4960613250732422], [0.6607217788696289, 0.49328672885894775], [0.6619174480438232, 0.4904126524925232], [0.6630761027336121, 0.4875490069389343], [0.6644363403320312, 0.4845800995826721], [0.6732574105262756, 0.8207290172576904], [0.6722204089164734, 0.8179782629013062], [0.6710566282272339, 0.8150397539138794], [0.6700042486190796, 0.8122931718826294], [0.6688741445541382, 0.8092161417007446], [0.6679856181144714, 0.8063451051712036], [0.6670457124710083, 0.8032046556472778], [0.666284441947937, 0.8002738952636719], [0.6654424071311951, 0.7970944046974182], [0.6647354364395142, 0.7941408753395081], [0.6639212369918823, 0.7909913063049316], [0.6633498668670654, 0.7881843447685242], [0.6628262400627136, 0.7850860953330994], [0.6625605821609497, 0.7826026082038879], [0.6622533202171326, 0.7799294590950012], [0.6620684266090393, 0.7773442268371582], [0.6618980765342712, 0.7745705246925354], [0.6617723107337952, 0.7716977596282959], [0.6610241532325745, 0.7684983015060425], [0.66045743227005, 0.7664216756820679], [0.6598742008209229, 0.7639374136924744], [0.6596487164497375, 0.7624503374099731], [0.6594246029853821, 0.7609760761260986], [0.6592802405357361, 0.759391725063324], [0.6591112613677979, 0.7575386762619019], [0.6590077877044678, 0.7558704614639282], [0.6588373780250549, 0.7540692090988159], [0.6588407754898071, 0.7527350187301636], [0.6588950753211975, 0.7504605054855347], [0.6588376760482788, 0.7491406202316284], [0.6585310697555542, 0.7472178936004639], [0.6580085754394531, 0.7448580265045166], [0.6572955846786499, 0.7423243522644043], [0.656611442565918, 0.739602267742157], [0.6558688879013062, 0.7367226481437683], [0.6553288698196411, 0.7339356541633606], [0.6547616124153137, 0.7309108972549438], [0.6544292569160461, 0.7280992865562439], [0.6540484428405762, 0.7251499891281128], [0.6537579298019409, 0.7223691940307617], [0.6533544659614563, 0.7194016575813293], [0.653090238571167, 0.716546893119812], [0.6527448892593384, 0.7135329842567444], [0.6525139212608337, 0.7106711864471436], [0.6521796584129333, 0.7076600193977356], [0.6519615650177002, 0.7048145532608032], [0.6516458988189697, 0.7018108367919922], [0.6514481902122498, 0.6990387439727783], [0.6511451005935669, 0.696102499961853], [0.6509588956832886, 0.6933282613754272], [0.6506713032722473, 0.6903937458992004], [0.6504878997802734, 0.6876668930053711], [0.6501991748809814, 0.6847952604293823], [0.6500226855278015, 0.682099461555481], [0.6497470140457153, 0.6792523264884949], [0.6495795249938965, 0.6765874624252319], [0.6493147015571594, 0.6737764477729797], [0.6491528749465942, 0.6711199879646301], [0.6488953828811646, 0.6683305501937866], [0.6487400531768799, 0.6656656265258789], [0.64849454164505, 0.6628689765930176], [0.648357629776001, 0.660231351852417], [0.6481373906135559, 0.6574642658233643], [0.648020327091217, 0.654807984828949], [0.6478084921836853, 0.6520315408706665], [0.6476841568946838, 0.6493667960166931], [0.6474729776382446, 0.6465798020362854], [0.6473798751831055, 0.6439430117607117], [0.6472046375274658, 0.64117830991745], [0.6471223831176758, 0.6385087966918945], [0.6469522714614868, 0.6357404589653015], [0.646880030632019, 0.6330792903900146], [0.646722674369812, 0.6303120851516724], [0.6466582417488098, 0.6276462078094482], [0.6465120315551758, 0.6248770952224731], [0.6464666128158569, 0.622207522392273], [0.6463527679443359, 0.6194465160369873], [0.6463431715965271, 0.6167834401130676], [0.6462621092796326, 0.6140345335006714], [0.6462735533714294, 0.6113717555999756], [0.646217405796051, 0.6086233854293823], [0.646261990070343, 0.605955958366394], [0.646241307258606, 0.6031996011734009], [0.6463195085525513, 0.6005396246910095], [0.6463339328765869, 0.5978114008903503], [0.6464422345161438, 0.5951525568962097], [0.6464901566505432, 0.5924272537231445], [0.646639347076416, 0.5897985696792603], [0.6467358469963074, 0.5871208906173706], [0.6469280123710632, 0.5844991207122803], [0.6470664739608765, 0.5818405151367188], [0.6472918391227722, 0.5792680382728577], [0.6474564671516418, 0.5766550302505493], [0.6476945281028748, 0.5741381645202637], [0.6478832364082336, 0.571591317653656], [0.648160457611084, 0.5690983533859253], [0.6483852863311768, 0.5666028261184692], [0.648685097694397, 0.5641360282897949], [0.6489261984825134, 0.5616686940193176], [0.6492435932159424, 0.5592492818832397], [0.6495081186294556, 0.5568186044692993], [0.6498427987098694, 0.5544048547744751], [0.650124192237854, 0.5519793629646301], [0.6504795551300049, 0.5495765805244446], [0.6507871150970459, 0.5471830368041992], [0.6511826515197754, 0.5447757244110107], [0.6515272855758667, 0.5423839092254639], [0.6519551277160645, 0.539983332157135], [0.6523308753967285, 0.5375788807868958], [0.6527831554412842, 0.5351834893226624], [0.6531821489334106, 0.5328032970428467], [0.6536750197410583, 0.530389666557312], [0.6541248559951782, 0.5279817581176758], [0.6546663045883179, 0.5255552530288696], [0.6551544070243835, 0.5231576561927795], [0.655758261680603, 0.5206849575042725], [0.6563133001327515, 0.5182265043258667], [0.6569795608520508, 0.5157018899917603], [0.6575955748558044, 0.5132014751434326], [0.6583583354949951, 0.5105948448181152], [0.6590854525566101, 0.508036732673645], [0.6599814295768738, 0.505379319190979], [0.6608213186264038, 0.5027689933776855], [0.661791205406189, 0.5001214742660522], [0.6627112627029419, 0.49750950932502747], [0.6638365983963013, 0.49473443627357483], [0.6649140119552612, 0.49200305342674255], [0.6661890149116516, 0.4891324043273926], [0.667405366897583, 0.4863172173500061], [0.6759663820266724, 0.8195821046829224], [0.6747258901596069, 0.8166016936302185], [0.6736528873443604, 0.8139229416847229], [0.6724652647972107, 0.81097811460495], [0.6714860200881958, 0.808265209197998], [0.6704277992248535, 0.805255651473999], [0.6695834398269653, 0.8024564385414124], [0.6686683893203735, 0.7993557453155518], [0.6679627895355225, 0.7964808344841003], [0.6671446561813354, 0.7932686805725098], [0.6664365530014038, 0.7903499603271484], [0.6657382249832153, 0.7874870300292969], [0.6652942895889282, 0.7847898602485657], [0.6648350358009338, 0.7821017503738403], [0.6645167469978333, 0.7797303199768066], [0.6641337871551514, 0.7769922614097595], [0.6638180017471313, 0.7744576930999756], [0.6633493304252625, 0.7714098691940308], [0.662555992603302, 0.7679051160812378], [0.6620988249778748, 0.7660189270973206], [0.6620882749557495, 0.7638341784477234], [0.6620463132858276, 0.762050986289978], [0.6619548797607422, 0.760711669921875], [0.6617451310157776, 0.7590059041976929], [0.6616002321243286, 0.7573733329772949], [0.6615068316459656, 0.755678653717041], [0.6616356372833252, 0.7542712092399597], [0.6616078019142151, 0.7524954676628113], [0.6616281270980835, 0.7508139610290527], [0.6616466045379639, 0.748658299446106], [0.6612321138381958, 0.7465105652809143], [0.6604536771774292, 0.7439308166503906], [0.6598269939422607, 0.7415801286697388], [0.6590412855148315, 0.7387449741363525], [0.6584842801094055, 0.7361290454864502], [0.6578813195228577, 0.733254075050354], [0.6574839353561401, 0.7305211424827576], [0.6570380926132202, 0.7276082038879395], [0.6567513942718506, 0.7248769998550415], [0.6563771963119507, 0.721898078918457], [0.656126081943512, 0.719125509262085], [0.6557741165161133, 0.7161272168159485], [0.6555601954460144, 0.713312029838562], [0.6552481055259705, 0.7102731466293335], [0.6550484895706177, 0.7074360847473145], [0.654745876789093, 0.7044293880462646], [0.6545732021331787, 0.7016154527664185], [0.6542993187904358, 0.6986587047576904], [0.6541280150413513, 0.6958931684494019], [0.6538515090942383, 0.692961573600769], [0.6536893248558044, 0.6901929378509521], [0.6534261107444763, 0.6872948408126831], [0.6532692909240723, 0.684584379196167], [0.6530080437660217, 0.6817318201065063], [0.6528548002243042, 0.679044246673584], [0.6526057124137878, 0.6762309074401855], [0.6524602770805359, 0.6735693216323853], [0.652218222618103, 0.6707701086997986], [0.65207839012146, 0.6681255102157593], [0.6518440246582031, 0.6653282642364502], [0.6517171859741211, 0.6626812219619751], [0.6515007019042969, 0.6599173545837402], [0.6513881683349609, 0.6572986841201782], [0.651184618473053, 0.6545184254646301], [0.651079535484314, 0.6518621444702148], [0.650879979133606, 0.6490740776062012], [0.65079265832901, 0.646436333656311], [0.6506156921386719, 0.643689751625061], [0.650526762008667, 0.6410391330718994], [0.6503509283065796, 0.6382738351821899], [0.6502834558486938, 0.6356233358383179], [0.6501298546791077, 0.6328650116920471], [0.6500725746154785, 0.6302053928375244], [0.6499317288398743, 0.6274528503417969], [0.6498920917510986, 0.624791145324707], [0.6497750878334045, 0.6220535635948181], [0.6497576236724854, 0.6194021701812744], [0.6496636271476746, 0.6166764497756958], [0.649665892124176, 0.6140204668045044], [0.6495950222015381, 0.6113000512123108], [0.649624228477478, 0.6086539030075073], [0.649583101272583, 0.6059370040893555], [0.6496427059173584, 0.6032780408859253], [0.6496392488479614, 0.6005803346633911], [0.6497322916984558, 0.5979344248771667], [0.6497606635093689, 0.5952445268630981], [0.6498917937278748, 0.5926101803779602], [0.6499621272087097, 0.5899628400802612], [0.6501214504241943, 0.5873523950576782], [0.6502207517623901, 0.5847249031066895], [0.6504188776016235, 0.5821362733840942], [0.650557279586792, 0.5795489549636841], [0.6507837772369385, 0.5769836902618408], [0.6509490013122559, 0.5744591951370239], [0.651197075843811, 0.5719669461250305], [0.65138840675354, 0.5694800615310669], [0.6516546010971069, 0.567004919052124], [0.6518620252609253, 0.5645503997802734], [0.6521561145782471, 0.5621081590652466], [0.6523889303207397, 0.5597028732299805], [0.6526997685432434, 0.5572843551635742], [0.6529558897018433, 0.5548912286758423], [0.6532959342002869, 0.552480936050415], [0.6535829305648804, 0.5501118302345276], [0.6539429426193237, 0.5477142333984375], [0.6542438864707947, 0.5453623533248901], [0.6546420454978943, 0.5429686307907104], [0.6549811363220215, 0.5406134128570557], [0.65541672706604, 0.5382107496261597], [0.6557949781417847, 0.535862922668457], [0.6562575101852417, 0.5334654450416565], [0.6566609144210815, 0.5311297178268433], [0.6571764349937439, 0.5287203788757324], [0.657637357711792, 0.5263645648956299], [0.6581950187683105, 0.5239438414573669], [0.658696174621582, 0.5215672254562378], [0.659328818321228, 0.5190994739532471], [0.6599115133285522, 0.5166686773300171], [0.6606162190437317, 0.5141509175300598], [0.661274790763855, 0.5116819739341736], [0.6620873212814331, 0.5091329216957092], [0.6628611087799072, 0.5066022872924805], [0.6638006567955017, 0.5039880275726318], [0.6646707057952881, 0.5014549493789673], [0.6656874418258667, 0.49882376194000244], [0.6666699647903442, 0.4962002635002136], [0.6678593754768372, 0.49347859621047974], [0.6690239906311035, 0.49074211716651917], [0.6703667640686035, 0.4879378080368042], [0.6784849166870117, 0.8181251287460327], [0.6773608922958374, 0.815424919128418], [0.6761167049407959, 0.8125184178352356], [0.6750597953796387, 0.8098841905593872], [0.6739063858985901, 0.8070195913314819], [0.6729584336280823, 0.8043510317802429], [0.6719570755958557, 0.8014318346977234], [0.671156644821167, 0.7986426949501038], [0.6703234910964966, 0.79564368724823], [0.669638454914093, 0.7926648259162903], [0.6688799858093262, 0.7894973754882812], [0.668317437171936, 0.7869172096252441], [0.6676681041717529, 0.7841552495956421], [0.667210042476654, 0.7816932201385498], [0.6666830778121948, 0.7791904211044312], [0.6661455631256104, 0.7766292095184326], [0.6655943989753723, 0.7740468978881836], [0.6648643016815186, 0.7710252404212952], [0.6639420986175537, 0.7674355506896973], [0.6640588641166687, 0.7663534879684448], [0.6645304560661316, 0.7639981508255005], [0.664699375629425, 0.7620797157287598], [0.6646732091903687, 0.7604039907455444], [0.6644960641860962, 0.7585742473602295], [0.6642366647720337, 0.7569797039031982], [0.6642172336578369, 0.75583815574646], [0.6642038822174072, 0.7543802261352539], [0.6643850803375244, 0.7526863217353821], [0.6645852327346802, 0.7511202096939087], [0.664369523525238, 0.7481545209884644], [0.6635056734085083, 0.7454833388328552], [0.6628327369689941, 0.7432392239570618], [0.6621052622795105, 0.7406988739967346], [0.6615332961082458, 0.7381649613380432], [0.6609535813331604, 0.7354207634925842], [0.6605371832847595, 0.7328071594238281], [0.6600603461265564, 0.7299668788909912], [0.659740686416626, 0.7272837162017822], [0.6593555212020874, 0.7244254350662231], [0.659109890460968, 0.7216496467590332], [0.6587767601013184, 0.7187056541442871], [0.6585648059844971, 0.715906023979187], [0.6582636833190918, 0.7129413485527039], [0.6580905318260193, 0.7100902795791626], [0.6578186750411987, 0.7070710062980652], [0.6576598286628723, 0.7042500972747803], [0.6573976874351501, 0.7012752294540405], [0.6572502851486206, 0.6984847784042358], [0.656996488571167, 0.6955438852310181], [0.6568490266799927, 0.6927804946899414], [0.6565970182418823, 0.6898567080497742], [0.6564635634422302, 0.6871239542961121], [0.6562243700027466, 0.684242844581604], [0.656089723110199, 0.6815493106842041], [0.6558536887168884, 0.6787097454071045], [0.655725359916687, 0.6760518550872803], [0.655497670173645, 0.6732418537139893], [0.6553784608840942, 0.6705974340438843], [0.6551625728607178, 0.6678053140640259], [0.6550493836402893, 0.665158748626709], [0.6548396348953247, 0.6623769402503967], [0.6547362804412842, 0.6597611904144287], [0.6545416712760925, 0.6570146083831787], [0.6544473171234131, 0.6543712615966797], [0.6542591452598572, 0.6515911817550659], [0.6541826725006104, 0.6489444971084595], [0.6540143489837646, 0.6461937427520752], [0.6539363265037537, 0.6435534954071045], [0.6537591218948364, 0.6407891511917114], [0.653687596321106, 0.63815838098526], [0.6535332202911377, 0.6354140043258667], [0.653480052947998, 0.6327670812606812], [0.6533432006835938, 0.6300190091133118], [0.6533070802688599, 0.627373218536377], [0.6531896591186523, 0.6246389150619507], [0.6531781554222107, 0.6220096945762634], [0.6530845165252686, 0.6192808747291565], [0.6530834436416626, 0.6166512966156006], [0.6529998183250427, 0.6139272451400757], [0.6530177593231201, 0.6113108992576599], [0.6529611945152283, 0.6086053848266602], [0.6530011296272278, 0.6059837341308594], [0.6529719829559326, 0.6032832860946655], [0.6530418395996094, 0.6006766557693481], [0.6530498266220093, 0.5980000495910645], [0.6531640887260437, 0.5954011678695679], [0.6532161831855774, 0.5927435159683228], [0.6533581614494324, 0.5901620388031006], [0.6534339785575867, 0.5875256061553955], [0.6536005735397339, 0.5849710702896118], [0.6537075042724609, 0.582375705242157], [0.6539049744606018, 0.5798412561416626], [0.6540488004684448, 0.5772790312767029], [0.6542778015136719, 0.5747967958450317], [0.654444694519043, 0.5723031759262085], [0.6546894907951355, 0.5698468685150146], [0.6548722386360168, 0.5673650503158569], [0.655132532119751, 0.5649505257606506], [0.6553361415863037, 0.562518835067749], [0.6556150913238525, 0.5601259469985962], [0.6558398008346558, 0.5577311515808105], [0.6561486721038818, 0.5553616285324097], [0.6564109325408936, 0.5529883503913879], [0.6567527055740356, 0.5506225824356079], [0.657039999961853, 0.5482484698295593], [0.6574034690856934, 0.5459092855453491], [0.6577097177505493, 0.5435543060302734], [0.6580989956855774, 0.5411964654922485], [0.6584398150444031, 0.5388602018356323], [0.6588767766952515, 0.5365058183670044], [0.659264087677002, 0.5341613292694092], [0.6597331166267395, 0.5318194627761841], [0.6601536273956299, 0.5294994115829468], [0.6606812477111816, 0.5271258354187012], [0.661156177520752, 0.5247786045074463], [0.6617339849472046, 0.5223947763442993], [0.6622645854949951, 0.5200366973876953], [0.6629288196563721, 0.5175922513008118], [0.6635464429855347, 0.515184760093689], [0.6642926931381226, 0.5127196311950684], [0.6649906039237976, 0.5103009343147278], [0.6658527851104736, 0.507760763168335], [0.6666581630706787, 0.5052849650382996], [0.6676298975944519, 0.5027344226837158], [0.6685458421707153, 0.5002259016036987], [0.6696335077285767, 0.4976140558719635], [0.670673668384552, 0.495048463344574], [0.6719539165496826, 0.4923134744167328], [0.6731672883033752, 0.4896526634693146], [0.6810014247894287, 0.8169448375701904], [0.6797007918357849, 0.8140393495559692], [0.6785810589790344, 0.8113937377929688], [0.677354097366333, 0.8085766434669495], [0.6763119697570801, 0.8059998154640198], [0.6752015948295593, 0.8032259941101074], [0.6743191480636597, 0.8006281852722168], [0.6733630895614624, 0.797727644443512], [0.6725841760635376, 0.7950087785720825], [0.6717317700386047, 0.7919501066207886], [0.671110987663269, 0.7890191078186035], [0.6704803705215454, 0.7862074375152588], [0.6699336171150208, 0.7836481332778931], [0.6692964434623718, 0.7810567021369934], [0.6687556505203247, 0.7786998748779297], [0.6680797338485718, 0.7759968042373657], [0.6675503253936768, 0.7736330032348633], [0.6670410633087158, 0.770422101020813], [0.6673396229743958, 0.7677822113037109], [0.6673658490180969, 0.7665442228317261], [0.6669119596481323, 0.7639318704605103], [0.6666207909584045, 0.7619926333427429], [0.666411817073822, 0.7601983547210693], [0.6661862134933472, 0.7583247423171997], [0.6661525368690491, 0.7568280696868896], [0.665929913520813, 0.7555722594261169], [0.6657958626747131, 0.7543857097625732], [0.6656433343887329, 0.7527073621749878], [0.6657339930534363, 0.7513790130615234], [0.6658363342285156, 0.7477425932884216], [0.6654645204544067, 0.7449476718902588], [0.6649633049964905, 0.7425044775009155], [0.6644712686538696, 0.7400906085968018], [0.6639047265052795, 0.7374904155731201], [0.6635081768035889, 0.7349841594696045], [0.663055419921875, 0.7322379946708679], [0.6627485752105713, 0.7296292781829834], [0.6623691916465759, 0.7268010377883911], [0.6621281504631042, 0.7241626977920532], [0.6617898941040039, 0.721235990524292], [0.6615866422653198, 0.718490481376648], [0.6612874269485474, 0.7155261039733887], [0.6611196398735046, 0.7127677202224731], [0.6608502864837646, 0.709753155708313], [0.6607133150100708, 0.7069301605224609], [0.6604738235473633, 0.7039226293563843], [0.6603351831436157, 0.701117992401123], [0.6600930690765381, 0.6981692314147949], [0.6599777936935425, 0.6954036951065063], [0.6597568392753601, 0.6924620270729065], [0.6596439480781555, 0.6897118091583252], [0.6594243049621582, 0.6868144273757935], [0.6593084335327148, 0.6840857267379761], [0.6590848565101624, 0.6812347173690796], [0.6589771509170532, 0.6785626411437988], [0.6587698459625244, 0.6757432222366333], [0.6586642861366272, 0.6730897426605225], [0.6584582328796387, 0.670299768447876], [0.6583611369132996, 0.6676627397537231], [0.658165454864502, 0.6648712158203125], [0.6580716371536255, 0.6622314453125], [0.6578805446624756, 0.6594866514205933], [0.6577974557876587, 0.6568845510482788], [0.657617449760437, 0.6541094779968262], [0.657538652420044, 0.6514669060707092], [0.6573680639266968, 0.6487125158309937], [0.6573014855384827, 0.6460819244384766], [0.6571354269981384, 0.6433138847351074], [0.6570736169815063, 0.640678346157074], [0.6569219827651978, 0.6379417181015015], [0.6568648815155029, 0.6353150606155396], [0.6567201614379883, 0.6325778961181641], [0.6566835641860962, 0.6299484968185425], [0.6565636396408081, 0.6272159814834595], [0.6565420627593994, 0.624587893486023], [0.6564366817474365, 0.6218882203102112], [0.6564399600028992, 0.6192682981491089], [0.6563599705696106, 0.6165478229522705], [0.656375527381897, 0.6139261722564697], [0.6563121676445007, 0.6112470626831055], [0.6563445329666138, 0.6086315512657166], [0.6563020944595337, 0.6059602499008179], [0.6563621163368225, 0.6033527851104736], [0.6563542485237122, 0.6007105112075806], [0.656448245048523, 0.598126232624054], [0.6564720869064331, 0.5954969525337219], [0.6565924882888794, 0.5929184556007385], [0.6566473841667175, 0.5903033018112183], [0.6567963361740112, 0.5877349972724915], [0.6568788290023804, 0.5851613879203796], [0.6570484638214111, 0.582621693611145], [0.6571567058563232, 0.5800857543945312], [0.6573590040206909, 0.5775797367095947], [0.6574987173080444, 0.5750939846038818], [0.6577207446098328, 0.572628915309906], [0.6578758358955383, 0.5701701641082764], [0.658119797706604, 0.5677229762077332], [0.6583003997802734, 0.5653166770935059], [0.6585630178451538, 0.5629048347473145], [0.658764123916626, 0.5605311393737793], [0.6590495109558105, 0.558157205581665], [0.6592734456062317, 0.5558161735534668], [0.6595785617828369, 0.5534563660621643], [0.6598290205001831, 0.5511194467544556], [0.6601688861846924, 0.5487570762634277], [0.6604458689689636, 0.5464437007904053], [0.6608104705810547, 0.5440906286239624], [0.6611182689666748, 0.5417819023132324], [0.6615139245986938, 0.5394490957260132], [0.6618547439575195, 0.5371596813201904], [0.6622982025146484, 0.5348072052001953], [0.6626824140548706, 0.5325303077697754], [0.6631634831428528, 0.5302015542984009], [0.6635922193527222, 0.5279096364974976], [0.6641367673873901, 0.5255557894706726], [0.6646250486373901, 0.523255467414856], [0.6652241945266724, 0.5208849906921387], [0.6657770872116089, 0.5185580253601074], [0.6664732098579407, 0.5161443948745728], [0.6671171188354492, 0.513797402381897], [0.667900562286377, 0.5113656520843506], [0.6686422824859619, 0.508958101272583], [0.6695427894592285, 0.5064719915390015], [0.6703770160675049, 0.5040550231933594], [0.6713973879814148, 0.5015552043914795], [0.6723793745040894, 0.4990726113319397], [0.6735256314277649, 0.49651214480400085], [0.6746506094932556, 0.4939161539077759], [0.6759706735610962, 0.4912526607513428], [0.6833552718162537, 0.815496563911438], [0.6821959614753723, 0.8128525614738464], [0.6809129118919373, 0.8100032806396484], [0.6797740459442139, 0.8074137568473816], [0.6785107254981995, 0.8047066926956177], [0.6775575876235962, 0.8022950887680054], [0.6765598654747009, 0.7995740175247192], [0.6757165193557739, 0.7969474792480469], [0.6747736930847168, 0.794082522392273], [0.6740171313285828, 0.791349470615387], [0.673259437084198, 0.7883608341217041], [0.6727173924446106, 0.7857121229171753], [0.672044038772583, 0.7829350233078003], [0.6715225577354431, 0.7805384397506714], [0.6708673238754272, 0.7779425978660583], [0.6703616976737976, 0.7755222320556641], [0.6697875261306763, 0.7728845477104187], [0.6694408059120178, 0.7703385353088379], [0.6690797805786133, 0.7677860260009766], [0.6688653230667114, 0.7663122415542603], [0.668442964553833, 0.7635550498962402], [0.6684752702713013, 0.7621555328369141], [0.6684609651565552, 0.7597086429595947], [0.6681525707244873, 0.7580528259277344], [0.6678614616394043, 0.756601870059967], [0.6679214239120483, 0.7555440664291382], [0.6678681373596191, 0.7540411353111267], [0.6673299074172974, 0.7522001266479492], [0.6671045422554016, 0.7514247894287109], [0.6673563718795776, 0.7478739023208618], [0.6674084067344666, 0.7444989681243896], [0.6671258211135864, 0.7421389818191528], [0.6666919589042664, 0.7394677996635437], [0.6663545370101929, 0.7370932102203369], [0.6659495830535889, 0.7344492673873901], [0.665669322013855, 0.7319220304489136], [0.6653163433074951, 0.7291688919067383], [0.6650952696800232, 0.7265554666519165], [0.6647871136665344, 0.723747968673706], [0.6645892858505249, 0.7210223078727722], [0.6642969846725464, 0.71811842918396], [0.6641325950622559, 0.715354323387146], [0.6638721227645874, 0.7124314308166504], [0.6637279987335205, 0.7096065878868103], [0.6634906530380249, 0.7066370248794556], [0.6633795499801636, 0.703804075717926], [0.6631637215614319, 0.7008150219917297], [0.6630628705024719, 0.6980513334274292], [0.6628575325012207, 0.6951248645782471], [0.6627699136734009, 0.6923545598983765], [0.6625757813453674, 0.6894327402114868], [0.6624872088432312, 0.686697244644165], [0.6622859239578247, 0.6837947368621826], [0.6621946692466736, 0.6811079978942871], [0.6619989275932312, 0.6782830953598022], [0.6619096994400024, 0.6756185293197632], [0.6617204546928406, 0.6728130578994751], [0.6616377830505371, 0.6701743602752686], [0.6614530682563782, 0.6673970818519592], [0.6613746881484985, 0.6647515296936035], [0.6611994504928589, 0.6619731187820435], [0.6611278057098389, 0.6593664884567261], [0.6609535217285156, 0.6566333174705505], [0.6608816981315613, 0.6539961099624634], [0.6607153415679932, 0.6512306928634644], [0.6606552600860596, 0.648612380027771], [0.660496175289154, 0.6458574533462524], [0.6604417562484741, 0.6432161331176758], [0.6602936387062073, 0.640470027923584], [0.6602436900138855, 0.6378495693206787], [0.6600990295410156, 0.6351122856140137], [0.660060703754425, 0.6325067281723022], [0.659940779209137, 0.6297891139984131], [0.6599282026290894, 0.6271772384643555], [0.6598271131515503, 0.6244469881057739], [0.6598172783851624, 0.6218518018722534], [0.6597216725349426, 0.6191694140434265], [0.6597384810447693, 0.6165482997894287], [0.6596736907958984, 0.6138556003570557], [0.6596982479095459, 0.6112592220306396], [0.65964275598526, 0.608588457107544], [0.6596952676773071, 0.6060190796852112], [0.6596789360046387, 0.6033673286437988], [0.6597620844841003, 0.6008172035217285], [0.6597731113433838, 0.5981910228729248], [0.6598761677742004, 0.5956336259841919], [0.6599040031433105, 0.5930216312408447], [0.6600241661071777, 0.590476393699646], [0.6600814461708069, 0.5878934264183044], [0.6602330803871155, 0.5853747725486755], [0.6603174209594727, 0.5828163623809814], [0.6604892611503601, 0.5803417563438416], [0.6606009006500244, 0.5778322219848633], [0.6607998013496399, 0.5753928422927856], [0.6609379649162292, 0.5729148387908936], [0.6611583828926086, 0.5704951286315918], [0.6613166332244873, 0.5680551528930664], [0.6615532636642456, 0.5656698942184448], [0.661733090877533, 0.5632797479629517], [0.6619964241981506, 0.5609250068664551], [0.6621997356414795, 0.5585745573043823], [0.6624813079833984, 0.5562376976013184], [0.6626984477043152, 0.5539004802703857], [0.6629961729049683, 0.5515793561935425], [0.6632420420646667, 0.5492552518844604], [0.6635709404945374, 0.5469354391098022], [0.6638455986976624, 0.5446373224258423], [0.664215624332428, 0.5423364639282227], [0.664530873298645, 0.5400490760803223], [0.6649261116981506, 0.5377535820007324], [0.6652708649635315, 0.5354756116867065], [0.6657136082649231, 0.5331922769546509], [0.6661052107810974, 0.5309332609176636], [0.6665955781936646, 0.5286327004432678], [0.6670348644256592, 0.526375412940979], [0.6675921082496643, 0.5240591168403625], [0.6680987477302551, 0.5217841267585754], [0.6687222719192505, 0.5194590091705322], [0.6693063378334045, 0.5171689987182617], [0.6700315475463867, 0.5148231983184814], [0.6707144379615784, 0.5125092267990112], [0.671546220779419, 0.5101042985916138], [0.6723222136497498, 0.5077275037765503], [0.6732492446899414, 0.5053297281265259], [0.6741327047348022, 0.5029715895652771], [0.6752146482467651, 0.500492513179779], [0.676241397857666, 0.4980453848838806], [0.6774721145629883, 0.4954703152179718], [0.6786450147628784, 0.4929150938987732], [0.6856988668441772, 0.8143079876899719], [0.6843651533126831, 0.8115167617797852], [0.683252215385437, 0.8089138269424438], [0.6819797158241272, 0.8060457706451416], [0.6808640956878662, 0.8035308122634888], [0.6797471642494202, 0.8010998964309692], [0.6788565516471863, 0.7987259030342102], [0.677837073802948, 0.7959431409835815], [0.677014946937561, 0.7933559417724609], [0.6761523485183716, 0.7905216217041016], [0.6755010485649109, 0.7878528833389282], [0.6748043894767761, 0.7850306034088135], [0.6742557287216187, 0.7824249863624573], [0.6736335754394531, 0.779839277267456], [0.6731638312339783, 0.777437150478363], [0.6726043224334717, 0.7748492360115051], [0.67226243019104, 0.7725162506103516], [0.6718536615371704, 0.7698436379432678], [0.671771764755249, 0.7677359580993652], [0.6715583801269531, 0.7655525207519531], [0.6713962554931641, 0.7635290622711182], [0.6712946891784668, 0.7620290517807007], [0.670224666595459, 0.7590142488479614], [0.6696587800979614, 0.7575851678848267], [0.669600248336792, 0.7565817832946777], [0.6695629358291626, 0.7555997371673584], [0.6698538064956665, 0.7545495629310608], [0.6706252098083496, 0.7510772943496704], [0.6703997254371643, 0.7502570152282715], [0.6693366169929504, 0.7477604150772095], [0.6691561341285706, 0.744511604309082], [0.6690989136695862, 0.7417856454849243], [0.6689311861991882, 0.7392394542694092], [0.6686914563179016, 0.736659586429596], [0.6684902310371399, 0.7341868877410889], [0.6681931614875793, 0.7315003871917725], [0.6680029630661011, 0.7289496660232544], [0.667717456817627, 0.7261699438095093], [0.6675457954406738, 0.7235551476478577], [0.6672677397727966, 0.7206589579582214], [0.6671140789985657, 0.7179535627365112], [0.6668605804443359, 0.7150272727012634], [0.6667294502258301, 0.712291955947876], [0.6664947271347046, 0.709305465221405], [0.6663888096809387, 0.706529974937439], [0.6661794185638428, 0.7035361528396606], [0.666104793548584, 0.7007357478141785], [0.6659220457077026, 0.6977859139442444], [0.6658424139022827, 0.6950350403785706], [0.6656500101089478, 0.6921015977859497], [0.6655772924423218, 0.6893518567085266], [0.6653982400894165, 0.6864433288574219], [0.6653316617012024, 0.6837058067321777], [0.6651602387428284, 0.6808556318283081], [0.6650897860527039, 0.6781793832778931], [0.6649135947227478, 0.6753687262535095], [0.6648529171943665, 0.6727213859558105], [0.6646865606307983, 0.6699281930923462], [0.6646191477775574, 0.6672921776771545], [0.66444993019104, 0.664514422416687], [0.6643936634063721, 0.6618831157684326], [0.6642359495162964, 0.6591341495513916], [0.6641746163368225, 0.656525731086731], [0.6640089750289917, 0.6537708044052124], [0.6639619469642639, 0.6511504054069519], [0.6638166308403015, 0.6484012007713318], [0.663770854473114, 0.6457690596580505], [0.6636255383491516, 0.6430198550224304], [0.663589596748352, 0.6403947472572327], [0.6634587645530701, 0.6376636624336243], [0.6634296774864197, 0.6350400447845459], [0.6633065938949585, 0.6323421001434326], [0.6632907390594482, 0.6297488212585449], [0.6631852388381958, 0.6270452737808228], [0.6631853580474854, 0.6244248151779175], [0.6630988121032715, 0.6217319965362549], [0.6631038188934326, 0.6191506385803223], [0.6630218029022217, 0.616472601890564], [0.663042426109314, 0.6138639450073242], [0.6629827618598938, 0.6112029552459717], [0.6630281209945679, 0.6086275577545166], [0.6629921197891235, 0.6060115098953247], [0.663061261177063, 0.6034603118896484], [0.6630561947822571, 0.6008530259132385], [0.6631411910057068, 0.5983046293258667], [0.6631487607955933, 0.5956992506980896], [0.6632428765296936, 0.5931447744369507], [0.66326904296875, 0.5905884504318237], [0.6634012460708618, 0.5880796313285828], [0.6634662747383118, 0.5855363607406616], [0.663624107837677, 0.5830360651016235], [0.6637121438980103, 0.5805462598800659], [0.663889467716217, 0.5780885219573975], [0.6639975309371948, 0.5756392478942871], [0.6641963720321655, 0.5732077360153198], [0.664328932762146, 0.5707804560661316], [0.6645407676696777, 0.5683624744415283], [0.6646873354911804, 0.5659933090209961], [0.6649209260940552, 0.5636265277862549], [0.6650909185409546, 0.5612919926643372], [0.6653476357460022, 0.5589555501937866], [0.665539562702179, 0.5566329956054688], [0.6658148765563965, 0.5542988777160645], [0.6660252213478088, 0.5520133972167969], [0.666325032711029, 0.5496980547904968], [0.6665679216384888, 0.5474209785461426], [0.6668994426727295, 0.5451272130012512], [0.6671711206436157, 0.5428874492645264], [0.6675448417663574, 0.5405957698822021], [0.6678606271743774, 0.5383493900299072], [0.6682658195495605, 0.5360743999481201], [0.6686118841171265, 0.5338596105575562], [0.6690536737442017, 0.5315799713134766], [0.6694433689117432, 0.5293693542480469], [0.6699435710906982, 0.5271012187004089], [0.6703919768333435, 0.5248854160308838], [0.6709660291671753, 0.5226023197174072], [0.6714878678321838, 0.5203798413276672], [0.6721367835998535, 0.5180915594100952], [0.6727355718612671, 0.5158605575561523], [0.6734848022460938, 0.5135456323623657], [0.6741951704025269, 0.5112411975860596], [0.6750593185424805, 0.5088647603988647], [0.6758477091789246, 0.5065917372703552], [0.6768118739128113, 0.5042495727539062], [0.6777529120445251, 0.5018826723098755], [0.6788784265518188, 0.49944597482681274], [0.6799776554107666, 0.49698135256767273], [0.6812716722488403, 0.4944457411766052], [0.6878624558448792, 0.8128783702850342], [0.6866493225097656, 0.8103377223014832], [0.6853874325752258, 0.8076626062393188], [0.6843194961547852, 0.8049883246421814], [0.6831325888633728, 0.802151083946228], [0.6821627020835876, 0.7999144792556763], [0.6810544729232788, 0.7975419759750366], [0.680109977722168, 0.7950705885887146], [0.6791515350341797, 0.7924048900604248], [0.6784039735794067, 0.7898629903793335], [0.6776211857795715, 0.7871043086051941], [0.6770361661911011, 0.7845185995101929], [0.6763678193092346, 0.7817418575286865], [0.6759234666824341, 0.7793735265731812], [0.6753901243209839, 0.7767926454544067], [0.675072193145752, 0.7744861245155334], [0.6747278571128845, 0.7720181941986084], [0.6745873093605042, 0.7696787118911743], [0.6743941307067871, 0.7674887776374817], [0.6739574074745178, 0.7649747133255005], [0.6731888651847839, 0.7629701495170593], [0.6725181341171265, 0.7616089582443237], [0.6713432669639587, 0.7584025859832764], [0.671237587928772, 0.7574483752250671], [0.6711554527282715, 0.7564506530761719], [0.6710987687110901, 0.7555382251739502], [0.671023428440094, 0.7548378705978394], [0.6715867519378662, 0.7512894868850708], [0.6717972159385681, 0.749703049659729], [0.6716393232345581, 0.7473189830780029], [0.6713212728500366, 0.7444816827774048], [0.6713050603866577, 0.7417401075363159], [0.6712302565574646, 0.7390266060829163], [0.671167254447937, 0.7365283966064453], [0.6709715723991394, 0.7338513135910034], [0.6708382964134216, 0.7313244342803955], [0.6705940961837769, 0.7286021709442139], [0.670449435710907, 0.7260032296180725], [0.6701973080635071, 0.7232142686843872], [0.6700602769851685, 0.7205129861831665], [0.6698243021965027, 0.717636227607727], [0.6697032451629639, 0.714899480342865], [0.6694803237915039, 0.7119967937469482], [0.6693858504295349, 0.7092132568359375], [0.6691924333572388, 0.7062656879425049], [0.6691178679466248, 0.7034614086151123], [0.6689414978027344, 0.7005094885826111], [0.6688858270645142, 0.6977230310440063], [0.6687123775482178, 0.6947883367538452], [0.6686462163925171, 0.6920270919799805], [0.6684706807136536, 0.6891177892684937], [0.6684155464172363, 0.6863793134689331], [0.6682599782943726, 0.6834909915924072], [0.6682223081588745, 0.680799126625061], [0.6680756211280823, 0.6779524087905884], [0.6680309176445007, 0.675298810005188], [0.6678779721260071, 0.6725040674209595], [0.667830765247345, 0.6698529124259949], [0.6676754951477051, 0.6670631170272827], [0.6676281690597534, 0.6644415855407715], [0.6674813628196716, 0.661674976348877], [0.6674410700798035, 0.6590619087219238], [0.6672933101654053, 0.6563048362731934], [0.6672539710998535, 0.6536971926689148], [0.6671139001846313, 0.6509566307067871], [0.667076051235199, 0.648328959941864], [0.6669394969940186, 0.6455804705619812], [0.6669145822525024, 0.6429622173309326], [0.6667954325675964, 0.6402286887168884], [0.6667798757553101, 0.637614369392395], [0.6666664481163025, 0.634881854057312], [0.6666570901870728, 0.6322997808456421], [0.6665500998497009, 0.6296108365058899], [0.6665486097335815, 0.6270195245742798], [0.6664584279060364, 0.6243202686309814], [0.6664708852767944, 0.6217232942581177], [0.6663873195648193, 0.6190457940101624], [0.6664034128189087, 0.6164745688438416], [0.6663395166397095, 0.6138004064559937], [0.6663856506347656, 0.6112475991249084], [0.6663488149642944, 0.6086018085479736], [0.666403591632843, 0.6060673594474792], [0.6663737893104553, 0.6034730672836304], [0.6664373874664307, 0.6009313464164734], [0.6664179563522339, 0.5983379483222961], [0.6664990186691284, 0.5958048105239868], [0.666510283946991, 0.5932096838951111], [0.6666184067726135, 0.5907396674156189], [0.6666584610939026, 0.5882104635238647], [0.6667943000793457, 0.5857258439064026], [0.666863203048706, 0.5832152366638184], [0.6670231819152832, 0.5807697772979736], [0.6671112775802612, 0.5782989859580994], [0.6672818064689636, 0.5758823156356812], [0.6673797369003296, 0.5734517574310303], [0.6675668954849243, 0.5710570812225342], [0.6676950454711914, 0.5686407685279846], [0.6679039597511292, 0.566301703453064], [0.6680487990379333, 0.5639441609382629], [0.6682748198509216, 0.561631441116333], [0.6684379577636719, 0.5593103170394897], [0.6686854362487793, 0.556997537612915], [0.66886967420578, 0.5546879172325134], [0.6691340208053589, 0.552405834197998], [0.6693421602249146, 0.5501388311386108], [0.66964191198349, 0.5478693842887878], [0.6698868274688721, 0.5456224679946899], [0.6702173352241516, 0.5433839559555054], [0.6704930067062378, 0.5411568880081177], [0.6708658337593079, 0.5389018654823303], [0.6711808443069458, 0.5366863012313843], [0.6715830564498901, 0.5344571471214294], [0.6719343066215515, 0.5322491526603699], [0.672380268573761, 0.5300467014312744], [0.6727852821350098, 0.5278643369674683], [0.6732981204986572, 0.5256441831588745], [0.6737615466117859, 0.5234663486480713], [0.6743477582931519, 0.5212392210960388], [0.6748856902122498, 0.5190563797950745], [0.6755496263504028, 0.5168203115463257], [0.6761661767959595, 0.5146200656890869], [0.6769486665725708, 0.5123251676559448], [0.677690327167511, 0.510062575340271], [0.6785718202590942, 0.5078114867210388], [0.6794021129608154, 0.5055863857269287], [0.6804296970367432, 0.5032345652580261], [0.6814097166061401, 0.500918447971344], [0.682611346244812, 0.4984720051288605], [0.6837608814239502, 0.49605000019073486], [0.6899855732917786, 0.8116682767868042], [0.6885848045349121, 0.8090438842773438], [0.6873998641967773, 0.806625485420227], [0.6861138939857483, 0.8039737343788147], [0.685041069984436, 0.8013032674789429], [0.684006929397583, 0.7988091111183167], [0.6831196546554565, 0.7965810298919678], [0.6820929050445557, 0.7940714359283447], [0.6813037991523743, 0.7916642427444458], [0.6804565787315369, 0.7890231609344482], [0.679820716381073, 0.7865384817123413], [0.6791265606880188, 0.7838230133056641], [0.6786375045776367, 0.7812844514846802], [0.6781002283096313, 0.7787477374076843], [0.6777406334877014, 0.7764224410057068], [0.6773151159286499, 0.7739818692207336], [0.6770364046096802, 0.7717782258987427], [0.6766759753227234, 0.7693623900413513], [0.6763441562652588, 0.7672044038772583], [0.6758407354354858, 0.7644276022911072], [0.6755704283714294, 0.7622103691101074], [0.674962043762207, 0.760054886341095], [0.6747288107872009, 0.7583478689193726], [0.6747345924377441, 0.7570499777793884], [0.674693763256073, 0.7562663555145264], [0.6746093034744263, 0.7551344633102417], [0.6744667291641235, 0.7541614770889282], [0.6742206811904907, 0.7519634366035461], [0.6743109226226807, 0.7495919466018677], [0.674071192741394, 0.7469606399536133], [0.6739808320999146, 0.7443909645080566], [0.6738131046295166, 0.7416214346885681], [0.6737889647483826, 0.7390282154083252], [0.6736618280410767, 0.736309826374054], [0.6736000776290894, 0.733765721321106], [0.6734174489974976, 0.7310342192649841], [0.6733245253562927, 0.7284832000732422], [0.6731146574020386, 0.7256932854652405], [0.673005998134613, 0.7230842709541321], [0.6727820634841919, 0.720213770866394], [0.6726837158203125, 0.7175322771072388], [0.6724777817726135, 0.7146124839782715], [0.6723852157592773, 0.7118985652923584], [0.6721847653388977, 0.7089609503746033], [0.6721211671829224, 0.7062071561813354], [0.6719520688056946, 0.7032306790351868], [0.6719033718109131, 0.7004657983779907], [0.6717438697814941, 0.6975081562995911], [0.671707272529602, 0.6947436928749084], [0.6715580224990845, 0.6918068528175354], [0.6715238094329834, 0.6890714764595032], [0.6713799238204956, 0.6861750483512878], [0.6713556051254272, 0.6834642291069031], [0.6712235808372498, 0.6806074380874634], [0.6711993217468262, 0.6779168844223022], [0.6710667610168457, 0.6750984191894531], [0.6710359454154968, 0.6724520921707153], [0.6708948612213135, 0.6696501970291138], [0.6708648204803467, 0.6670085787773132], [0.670728325843811, 0.6642428636550903], [0.6706991195678711, 0.6616235375404358], [0.6705626249313354, 0.6588672399520874], [0.670538067817688, 0.6562513113021851], [0.6704071164131165, 0.6535094976425171], [0.6703771948814392, 0.6508963108062744], [0.6702448129653931, 0.6481497287750244], [0.6702260971069336, 0.6455345153808594], [0.6701075434684753, 0.6428037285804749], [0.6700961589813232, 0.6401937007904053], [0.6699865460395813, 0.6374703645706177], [0.669986367225647, 0.6348580121994019], [0.6698868274688721, 0.6321662664413452], [0.6698912382125854, 0.6295890212059021], [0.6697975397109985, 0.626901388168335], [0.6698015332221985, 0.6243088245391846], [0.6697161197662354, 0.6216307282447815], [0.6697350740432739, 0.6190390586853027], [0.6696615219116211, 0.6163992285728455], [0.669704794883728, 0.6138461828231812], [0.6696640253067017, 0.6112171411514282], [0.6697157621383667, 0.6086531281471252], [0.6696807742118835, 0.6060543656349182], [0.6697394847869873, 0.6035324931144714], [0.6697146892547607, 0.6009397506713867], [0.6697846055030823, 0.5984092950820923], [0.669774055480957, 0.5958506464958191], [0.6698771119117737, 0.5933472514152527], [0.6699047088623047, 0.5908294916152954], [0.6700108647346497, 0.5883551836013794], [0.670042872428894, 0.5858588218688965], [0.6701745986938477, 0.5834054350852966], [0.6702368855476379, 0.5809444189071655], [0.6703941822052002, 0.5785204768180847], [0.6704777479171753, 0.5760817527770996], [0.670639157295227, 0.5736745595932007], [0.6707286834716797, 0.5712944269180298], [0.6709194183349609, 0.5689210891723633], [0.6710429191589355, 0.5665781497955322], [0.6712464094161987, 0.5642432570457458], [0.6713823676109314, 0.5619410276412964], [0.671600341796875, 0.5596314072608948], [0.671752393245697, 0.5573422312736511], [0.6719948053359985, 0.5550446510314941], [0.6721742153167725, 0.5527902841567993], [0.6724318265914917, 0.5505248308181763], [0.6726301312446594, 0.5483095645904541], [0.6729273796081543, 0.5460717678070068], [0.6731638312339783, 0.5438741445541382], [0.673488199710846, 0.5416423678398132], [0.6737563610076904, 0.5394562482833862], [0.6741213798522949, 0.5372213125228882], [0.6744239330291748, 0.5350598096847534], [0.6748313903808594, 0.5328518152236938], [0.6751807928085327, 0.5307290554046631], [0.6756351590156555, 0.528538703918457], [0.6760352253913879, 0.5264042615890503], [0.6765465140342712, 0.5242117047309875], [0.6770058274269104, 0.5220945477485657], [0.6776030659675598, 0.5199093818664551], [0.6781487464904785, 0.5177656412124634], [0.6788229942321777, 0.5155578851699829], [0.6794569492340088, 0.5133898854255676], [0.6802707314491272, 0.5111488103866577], [0.6810155510902405, 0.5089988112449646], [0.6819254755973816, 0.5067802667617798], [0.6828110814094543, 0.5045449733734131], [0.6838836073875427, 0.5022615194320679], [0.6849493980407715, 0.49991899728775024], [0.6862079501152039, 0.4975409507751465], [0.6919287443161011, 0.8102370500564575], [0.6906530857086182, 0.8078457117080688], [0.6892582178115845, 0.8053244352340698], [0.6880789399147034, 0.802963376045227], [0.6868585348129272, 0.8003190755844116], [0.6859909296035767, 0.7979704141616821], [0.6850209832191467, 0.7955275774002075], [0.6842137575149536, 0.7932506799697876], [0.683325469493866, 0.7907534241676331], [0.6826452016830444, 0.7883726358413696], [0.6819112300872803, 0.7857799530029297], [0.6813774108886719, 0.7833274006843567], [0.6807831525802612, 0.7806646823883057], [0.680419385433197, 0.7783615589141846], [0.6799728870391846, 0.7758796215057373], [0.6796779036521912, 0.7736631631851196], [0.6792705655097961, 0.7712697982788086], [0.6789646148681641, 0.7690704464912415], [0.678551197052002, 0.7666148543357849], [0.6783393025398254, 0.7642582654953003], [0.6780705451965332, 0.7616860866546631], [0.6782001852989197, 0.7599301338195801], [0.6782875061035156, 0.758105993270874], [0.6782821416854858, 0.7572752237319946], [0.6782853007316589, 0.755825400352478], [0.6781212687492371, 0.7546430826187134], [0.6779006719589233, 0.7533212900161743], [0.6775963306427002, 0.7515906691551208], [0.677009642124176, 0.7492175102233887], [0.676821231842041, 0.7467718124389648], [0.6765764951705933, 0.7441614866256714], [0.6765187382698059, 0.7415823936462402], [0.6763714551925659, 0.7388666868209839], [0.6763346195220947, 0.7362736463546753], [0.6761835813522339, 0.7335503697395325], [0.6761385202407837, 0.7309756875038147], [0.6759761571884155, 0.7282328605651855], [0.6759084463119507, 0.7256067991256714], [0.6757165193557739, 0.722804069519043], [0.675632655620575, 0.7201234698295593], [0.6754401326179504, 0.7172710299491882], [0.6753666400909424, 0.7145354747772217], [0.6751782298088074, 0.7116376161575317], [0.6751085519790649, 0.7088945508003235], [0.6749364137649536, 0.705990731716156], [0.6749013066291809, 0.7032039761543274], [0.6747562885284424, 0.7002627849578857], [0.674730658531189, 0.6974846720695496], [0.674595296382904, 0.6945605278015137], [0.6745880246162415, 0.6917988061904907], [0.6744697690010071, 0.6888900995254517], [0.6744600534439087, 0.686160683631897], [0.6743389368057251, 0.6832950115203857], [0.6743316054344177, 0.6805939078330994], [0.6742114424705505, 0.6777439117431641], [0.6742024421691895, 0.6750800013542175], [0.6740816831588745, 0.672269344329834], [0.6740720272064209, 0.6696266531944275], [0.6739543080329895, 0.6668322086334229], [0.6739407181739807, 0.664207398891449], [0.6738136410713196, 0.6614426970481873], [0.6737951040267944, 0.6588274240493774], [0.6736710667610168, 0.656082272529602], [0.673656702041626, 0.6534696817398071], [0.6735349893569946, 0.6507222652435303], [0.6735214591026306, 0.6481136679649353], [0.6734070181846619, 0.6453821659088135], [0.6734063029289246, 0.6427813172340393], [0.6733022928237915, 0.6400518417358398], [0.6733049750328064, 0.6374552249908447], [0.673210084438324, 0.6347396373748779], [0.6732271313667297, 0.6321595907211304], [0.6731385588645935, 0.6294751167297363], [0.6731469631195068, 0.6268870830535889], [0.6730575561523438, 0.6241999864578247], [0.6730746030807495, 0.6216349601745605], [0.6730022430419922, 0.6189578771591187], [0.6730396151542664, 0.6164259910583496], [0.6729868650436401, 0.6138103604316711], [0.6730333566665649, 0.611262321472168], [0.672992467880249, 0.6086328029632568], [0.673052966594696, 0.6061162948608398], [0.6730268597602844, 0.603528618812561], [0.6730959415435791, 0.6010125875473022], [0.6730804443359375, 0.5984290838241577], [0.6731659173965454, 0.595952033996582], [0.6731714010238647, 0.5934263467788696], [0.673267126083374, 0.5909497737884521], [0.6732801198959351, 0.5884342193603516], [0.6733850836753845, 0.5860055685043335], [0.6734192967414856, 0.5835353136062622], [0.673546552658081, 0.5811253786087036], [0.6736032962799072, 0.578696608543396], [0.6737539768218994, 0.576287567615509], [0.6738289594650269, 0.5738600492477417], [0.6739868521690369, 0.5715229511260986], [0.6740821003913879, 0.5691697597503662], [0.6742672324180603, 0.5668444633483887], [0.6743822693824768, 0.5645121335983276], [0.674573540687561, 0.5622214078903198], [0.67469722032547, 0.5599260926246643], [0.6749075055122375, 0.5576520562171936], [0.6750547885894775, 0.5553879737854004], [0.6752874255180359, 0.5531344413757324], [0.6754574775695801, 0.5508983135223389], [0.6757092475891113, 0.5486941337585449], [0.6759002804756165, 0.5465078353881836], [0.6761819124221802, 0.5442990064620972], [0.6764065623283386, 0.542123556137085], [0.676723062992096, 0.5399357080459595], [0.6769893765449524, 0.5377678871154785], [0.6773540377616882, 0.5356048941612244], [0.677665114402771, 0.5334762334823608], [0.6780659556388855, 0.5313338041305542], [0.678412675857544, 0.5292259454727173], [0.6788633465766907, 0.5270794630050659], [0.6792647838592529, 0.5249801278114319], [0.6797745823860168, 0.5228655338287354], [0.6802413463592529, 0.5207892656326294], [0.680849015712738, 0.5186372399330139], [0.6814069747924805, 0.5165356397628784], [0.6821054816246033, 0.5143911242485046], [0.6827654838562012, 0.5122733116149902], [0.6835848689079285, 0.5101339221000671], [0.6843580007553101, 0.5080275535583496], [0.6853241324424744, 0.505821943283081], [0.6862450838088989, 0.503658652305603], [0.687410831451416, 0.501363217830658], [0.6885272860527039, 0.49908602237701416], [0.6938785910606384, 0.8089834451675415], [0.6924136281013489, 0.8065361380577087], [0.6911625266075134, 0.8041915893554688], [0.6898842453956604, 0.801766037940979], [0.6888515949249268, 0.7994189262390137], [0.6878019571304321, 0.7970051765441895], [0.6869748830795288, 0.7947372198104858], [0.6860798597335815, 0.792339563369751], [0.6853789687156677, 0.7900727391242981], [0.6846126317977905, 0.787591814994812], [0.6840579509735107, 0.785254955291748], [0.6834491491317749, 0.7826803922653198], [0.6830402612686157, 0.7802753448486328], [0.6825802326202393, 0.77782142162323], [0.6822911500930786, 0.7755764126777649], [0.6819162368774414, 0.7731648087501526], [0.6816620230674744, 0.7709616422653198], [0.6813061833381653, 0.7685492634773254], [0.6811301708221436, 0.7663931846618652], [0.6808841824531555, 0.763891339302063], [0.6809411644935608, 0.7618234753608704], [0.6808708310127258, 0.7599257230758667], [0.6808888912200928, 0.7582976818084717], [0.6808511018753052, 0.7569776773452759], [0.6807305216789246, 0.7558053731918335], [0.6807081699371338, 0.7541806697845459], [0.6805827617645264, 0.7529233694076538], [0.6801983714103699, 0.7508275508880615], [0.6799930930137634, 0.7488141059875488], [0.679632306098938, 0.7463808655738831], [0.6794976592063904, 0.7440227270126343], [0.679267168045044, 0.7413508892059326], [0.6792131662368774, 0.7388398051261902], [0.6790551543235779, 0.7360643148422241], [0.6790195107460022, 0.7335235476493835], [0.6788645386695862, 0.7307567596435547], [0.6788126826286316, 0.7281827926635742], [0.6786344051361084, 0.725360631942749], [0.6785742044448853, 0.722737193107605], [0.6783944368362427, 0.7198727130889893], [0.6783344745635986, 0.7172117233276367], [0.6781591176986694, 0.7142980098724365], [0.6781100034713745, 0.7115871906280518], [0.6779508590698242, 0.7086665630340576], [0.677918553352356, 0.7059658765792847], [0.6777742505073547, 0.7030137181282043], [0.6777595281600952, 0.7002581357955933], [0.6776361465454102, 0.6973085999488831], [0.6776389479637146, 0.6945746541023254], [0.6775294542312622, 0.6916429400444031], [0.6775349378585815, 0.6889030933380127], [0.6774283647537231, 0.6860044002532959], [0.6774370670318604, 0.6833083629608154], [0.6773324608802795, 0.6804395914077759], [0.677339494228363, 0.677749752998352], [0.677233099937439, 0.6749246716499329], [0.6772371530532837, 0.6722685098648071], [0.6771286129951477, 0.6694720983505249], [0.677132785320282, 0.6668305397033691], [0.6770281791687012, 0.6640498638153076], [0.6770294904708862, 0.6614254117012024], [0.6769211888313293, 0.658669650554657], [0.6769207119941711, 0.6560617685317993], [0.6768084764480591, 0.6533148288726807], [0.6768078804016113, 0.6507004499435425], [0.6767017245292664, 0.647965669631958], [0.6767073273658752, 0.6453713774681091], [0.6766068935394287, 0.642648458480835], [0.676613450050354, 0.6400396227836609], [0.6765168905258179, 0.6373348832130432], [0.6765351295471191, 0.6347455978393555], [0.6764507293701172, 0.6320579648017883], [0.6764668226242065, 0.6294683218002319], [0.6763800978660583, 0.6267807483673096], [0.6764002442359924, 0.6242012977600098], [0.6763274669647217, 0.621553897857666], [0.6763679385185242, 0.6189910173416138], [0.6763116121292114, 0.6163719892501831], [0.6763523817062378, 0.6138464212417603], [0.6763015985488892, 0.6112262606620789], [0.6763527393341064, 0.6086851358413696], [0.6763139963150024, 0.6061047315597534], [0.6763730049133301, 0.603590726852417], [0.6763460636138916, 0.6010227799415588], [0.6764271855354309, 0.5985245704650879], [0.6764227151870728, 0.5959941148757935], [0.6765040159225464, 0.5935236215591431], [0.6765024662017822, 0.5910099744796753], [0.6766005158424377, 0.5885521173477173], [0.6766183376312256, 0.5860944986343384], [0.6767264604568481, 0.5836781859397888], [0.6767600774765015, 0.5812517404556274], [0.6768810153007507, 0.5788624286651611], [0.6769254207611084, 0.5764449834823608], [0.677064836025238, 0.5740500092506409], [0.677128791809082, 0.5717082619667053], [0.6772811412811279, 0.5693944692611694], [0.6773645281791687, 0.5670688152313232], [0.6775308847427368, 0.5647487640380859], [0.6776276230812073, 0.5624664425849915], [0.6778121590614319, 0.5601876974105835], [0.6779299974441528, 0.5579379200935364], [0.678134560585022, 0.5556803345680237], [0.6782743334770203, 0.5534614324569702], [0.6785030961036682, 0.5512275695800781], [0.6786646842956543, 0.549062192440033], [0.6789121627807617, 0.5468684434890747], [0.67909836769104, 0.544716477394104], [0.6793782114982605, 0.5425368547439575], [0.6795980930328369, 0.540413498878479], [0.6799153685569763, 0.5382431149482727], [0.6801704168319702, 0.5361518859863281], [0.680524468421936, 0.5340094566345215], [0.6808184385299683, 0.5319319367408752], [0.6812065243721008, 0.5298027396202087], [0.6815395355224609, 0.5277516841888428], [0.6819875240325928, 0.5256396532058716], [0.6823781132698059, 0.5236243605613708], [0.6828879117965698, 0.5215359926223755], [0.6833510398864746, 0.5194970965385437], [0.6839613914489746, 0.5173967480659485], [0.6845238208770752, 0.5153664946556091], [0.685247540473938, 0.5132681727409363], [0.6859136819839478, 0.5112326145172119], [0.6867475509643555, 0.5091394782066345], [0.6875583529472351, 0.5070508718490601], [0.6885581612586975, 0.5049228668212891], [0.6895569562911987, 0.5027299523353577], [0.6907751560211182, 0.5005160570144653], [0.6956371665000916, 0.8075504302978516], [0.6943188905715942, 0.8053045272827148], [0.6929126977920532, 0.8029171228408813], [0.6918122172355652, 0.800723671913147], [0.6906521320343018, 0.7983472347259521], [0.6897748112678528, 0.7961502075195312], [0.6888266801834106, 0.7937989234924316], [0.6881057024002075, 0.7916121482849121], [0.6873185634613037, 0.789244532585144], [0.6867374181747437, 0.7870092391967773], [0.6861011981964111, 0.7845702171325684], [0.6856642961502075, 0.7822602987289429], [0.6851721405982971, 0.7797293663024902], [0.6848934888839722, 0.7775193452835083], [0.6845263242721558, 0.7751016020774841], [0.6843057870864868, 0.7729167938232422], [0.6839921474456787, 0.7705038785934448], [0.6838496327400208, 0.7683768272399902], [0.6836288571357727, 0.766026496887207], [0.6835984587669373, 0.7639102935791016], [0.6834903955459595, 0.7617393732070923], [0.6835445761680603, 0.7599893808364868], [0.683495283126831, 0.7581890821456909], [0.6835014224052429, 0.7570072412490845], [0.6835219860076904, 0.755473256111145], [0.6833264827728271, 0.7540163993835449], [0.6830993890762329, 0.7523505091667175], [0.6829292178153992, 0.750519335269928], [0.6826235055923462, 0.7483352422714233], [0.6824845671653748, 0.7461549639701843], [0.6822176575660706, 0.7436885833740234], [0.6821209192276001, 0.7412571310997009], [0.6819265484809875, 0.7386153936386108], [0.6818973422050476, 0.7360583543777466], [0.6817572116851807, 0.7333147525787354], [0.6817196011543274, 0.7307237982749939], [0.6815515160560608, 0.7279444932937622], [0.6814955472946167, 0.7253122329711914], [0.6813268661499023, 0.7225049734115601], [0.6812779903411865, 0.7198283076286316], [0.6811168193817139, 0.7169865369796753], [0.6810778975486755, 0.714264452457428], [0.680930495262146, 0.7113828659057617], [0.6809141635894775, 0.708657443523407], [0.6807839870452881, 0.7057763338088989], [0.6807714104652405, 0.7030135989189148], [0.6806516647338867, 0.7000969648361206], [0.6806666851043701, 0.6973385810852051], [0.6805703043937683, 0.6944334506988525], [0.6805860996246338, 0.6916692852973938], [0.6804882287979126, 0.6887659430503845], [0.6805149912834167, 0.6860412359237671], [0.6804273128509521, 0.6831763982772827], [0.680450975894928, 0.6804657578468323], [0.6803577542304993, 0.677613377571106], [0.6803801655769348, 0.6749508380889893], [0.6802860498428345, 0.6721284985542297], [0.6802988648414612, 0.6694828271865845], [0.6801996827125549, 0.6666962504386902], [0.6802197694778442, 0.6640685200691223], [0.6801342964172363, 0.6612953543663025], [0.6801576018333435, 0.6586813926696777], [0.6800644993782043, 0.6559237241744995], [0.6800789833068848, 0.6533140540122986], [0.6799826622009277, 0.6505670547485352], [0.6799973249435425, 0.6479672193527222], [0.6799012422561646, 0.6452441215515137], [0.6799129843711853, 0.642645001411438], [0.6798180341720581, 0.6399191617965698], [0.6798403859138489, 0.6373462677001953], [0.6797584891319275, 0.6346464157104492], [0.6797835826873779, 0.6320687532424927], [0.6797017455101013, 0.6293665766716003], [0.6797293424606323, 0.6267932653427124], [0.679657518863678, 0.6241188049316406], [0.6796941757202148, 0.621580958366394], [0.6796340942382812, 0.6189422011375427], [0.6796853542327881, 0.616412878036499], [0.6796355247497559, 0.6137982606887817], [0.6796810626983643, 0.6112663745880127], [0.6796296238899231, 0.6086521744728088], [0.6796789169311523, 0.6061542630195618], [0.6796393394470215, 0.6035791635513306], [0.679706335067749, 0.601098895072937], [0.6796877980232239, 0.598556399345398], [0.6797668933868408, 0.5960813760757446], [0.6797573566436768, 0.593557596206665], [0.6798416376113892, 0.5911096334457397], [0.679843544960022, 0.5886211395263672], [0.6799392700195312, 0.5862100720405579], [0.679954469203949, 0.5837720632553101], [0.6800599098205566, 0.5813877582550049], [0.6800820231437683, 0.5789693593978882], [0.6801935434341431, 0.5765936374664307], [0.680230438709259, 0.5741942524909973], [0.6803581714630127, 0.571884274482727], [0.6804102063179016, 0.5695642232894897], [0.6805432438850403, 0.5672554969787598], [0.680605947971344, 0.5649451017379761], [0.6807608604431152, 0.5626884698867798], [0.6808515787124634, 0.5604275465011597], [0.6810307502746582, 0.5581926107406616], [0.6811450123786926, 0.5559598207473755], [0.6813453435897827, 0.5537524223327637], [0.6814833879470825, 0.5515522360801697], [0.6817043423652649, 0.5493862628936768], [0.6818628311157227, 0.5472299456596375], [0.682104766368866, 0.5450780391693115], [0.6822865605354309, 0.5429551601409912], [0.6825581789016724, 0.5408234000205994], [0.6827707290649414, 0.5387154817581177], [0.6830688118934631, 0.5366063117980957], [0.6833089590072632, 0.5345374345779419], [0.6836453080177307, 0.5324392914772034], [0.6839303970336914, 0.5303909778594971], [0.6843137145042419, 0.5283365249633789], [0.6846508979797363, 0.5263192653656006], [0.6850886344909668, 0.5242924094200134], [0.6854775547981262, 0.5222957134246826], [0.6859877109527588, 0.5202668905258179], [0.686456561088562, 0.518284797668457], [0.6870676875114441, 0.5162662267684937], [0.687640905380249, 0.5142858028411865], [0.688374400138855, 0.5122653841972351], [0.6890614032745361, 0.5102872848510742], [0.6899338364601135, 0.5082398056983948], [0.6907729506492615, 0.5062227249145508], [0.6918545365333557, 0.5040804147720337], [0.6928868889808655, 0.5019733905792236], [0.697398841381073, 0.8062763214111328], [0.6959131956100464, 0.8040238618850708], [0.6946802139282227, 0.8018273115158081], [0.6934505105018616, 0.7996123433113098], [0.6924811601638794, 0.7974652051925659], [0.6914801597595215, 0.795210063457489], [0.6907142400741577, 0.7930682897567749], [0.6899058818817139, 0.7907978296279907], [0.6893163919448853, 0.7886524796485901], [0.6886681318283081, 0.7863107919692993], [0.6882237792015076, 0.7841241359710693], [0.687727153301239, 0.7816985845565796], [0.6874431371688843, 0.779449999332428], [0.6871017813682556, 0.7770636081695557], [0.6869121193885803, 0.7748905420303345], [0.6866452693939209, 0.7725157737731934], [0.6865354180335999, 0.7703709602355957], [0.6863446235656738, 0.7680532932281494], [0.6862897872924805, 0.7659762501716614], [0.6861594319343567, 0.7637343406677246], [0.6862015128135681, 0.7618328332901001], [0.6861214637756348, 0.7598707675933838], [0.6861382126808167, 0.7582529783248901], [0.686041533946991, 0.7567646503448486], [0.6859047412872314, 0.7554358839988708], [0.6859217882156372, 0.7536630034446716], [0.6858730912208557, 0.7521523833274841], [0.6856621503829956, 0.7500665187835693], [0.6855428814888, 0.7480999827384949], [0.6852598190307617, 0.7457549571990967], [0.6851445436477661, 0.7435321807861328], [0.6849011182785034, 0.7409529089927673], [0.6848235130310059, 0.7385536432266235], [0.6846328377723694, 0.7358448505401611], [0.6845977902412415, 0.7333073616027832], [0.684442937374115, 0.7305030226707458], [0.684410810470581, 0.7279175519943237], [0.6842591166496277, 0.7250900268554688], [0.6842248439788818, 0.7224750518798828], [0.6840724945068359, 0.7196167707443237], [0.6840447187423706, 0.7169614434242249], [0.6839005947113037, 0.7140688896179199], [0.6838895678520203, 0.7113845944404602], [0.6837667226791382, 0.7084885835647583], [0.6837723851203918, 0.705790638923645], [0.6836626529693604, 0.7028546333312988], [0.683687686920166, 0.7001410722732544], [0.6835986375808716, 0.6972110271453857], [0.6836274862289429, 0.6944776773452759], [0.683539092540741, 0.6915422081947327], [0.6835769414901733, 0.6888188123703003], [0.6834999918937683, 0.6859315633773804], [0.6835434436798096, 0.6832329034805298], [0.6834685802459717, 0.6803510785102844], [0.6835086345672607, 0.6776629686355591], [0.6834317445755005, 0.6748364567756653], [0.6834704875946045, 0.6721689701080322], [0.6833921074867249, 0.6693602800369263], [0.6834219694137573, 0.6667251586914062], [0.6833385229110718, 0.6639580726623535], [0.6833743453025818, 0.6613389253616333], [0.683295488357544, 0.6585677266120911], [0.6833229064941406, 0.6559441089630127], [0.6832343339920044, 0.653193473815918], [0.6832578182220459, 0.6505836248397827], [0.683173418045044, 0.6478557586669922], [0.6832003593444824, 0.6452552676200867], [0.683114767074585, 0.6425329446792603], [0.6831481456756592, 0.6399418115615845], [0.6830705404281616, 0.6372488737106323], [0.6831015348434448, 0.634669303894043], [0.6830253601074219, 0.6319749355316162], [0.6830577850341797, 0.6293860673904419], [0.6829857230186462, 0.6267138719558716], [0.6830251216888428, 0.6241500377655029], [0.6829642653465271, 0.6215187311172485], [0.683009922504425, 0.6189837455749512], [0.6829530596733093, 0.6163691282272339], [0.6829976439476013, 0.6138355731964111], [0.6829450130462646, 0.6112241744995117], [0.6829971075057983, 0.6086944341659546], [0.6829549670219421, 0.606128454208374], [0.6830160021781921, 0.6036409139633179], [0.6829858422279358, 0.601103663444519], [0.6830530166625977, 0.5986303091049194], [0.6830287575721741, 0.5961014032363892], [0.6831021308898926, 0.5936388969421387], [0.6830880641937256, 0.5911500453948975], [0.6831668019294739, 0.5887096524238586], [0.6831588745117188, 0.5862705707550049], [0.6832494735717773, 0.5838850736618042], [0.6832562685012817, 0.5814650058746338], [0.6833499073982239, 0.5790809392929077], [0.6833640933036804, 0.5766937732696533], [0.6834753751754761, 0.5743352770805359], [0.6835065484046936, 0.572017252445221], [0.6836215853691101, 0.5697085857391357], [0.6836621761322021, 0.5674083232879639], [0.6837986707687378, 0.5651262998580933], [0.6838608384132385, 0.5628809928894043], [0.6840092539787292, 0.5606410503387451], [0.6840894222259521, 0.5584262013435364], [0.6842573881149292, 0.5562026500701904], [0.6843562126159668, 0.5540226697921753], [0.6845439672470093, 0.5518313646316528], [0.6846638321876526, 0.5496921539306641], [0.6848654747009277, 0.5475308895111084], [0.6850046515464783, 0.545424222946167], [0.6852309107780457, 0.543292760848999], [0.6853945255279541, 0.5412173271179199], [0.6856527924537659, 0.5391005873680115], [0.6858510971069336, 0.5370492935180664], [0.6861334443092346, 0.5349523425102234], [0.6863545775413513, 0.5329421758651733], [0.6866787672042847, 0.530875027179718], [0.6869418621063232, 0.5289076566696167], [0.6873133182525635, 0.5268808007240295], [0.6876281499862671, 0.5249360203742981], [0.6880539655685425, 0.5229215621948242], [0.6884250044822693, 0.5210080146789551], [0.6889322996139526, 0.5190231800079346], [0.6893903613090515, 0.5171270370483398], [0.6900105476379395, 0.515163004398346], [0.6905856132507324, 0.5132484436035156], [0.6913257241249084, 0.5112910270690918], [0.6920375823974609, 0.509364128112793], [0.6929482221603394, 0.5074059963226318], [0.6938744783401489, 0.5053550004959106], [0.694990336894989, 0.5033165216445923], [0.6989573836326599, 0.8048783540725708], [0.6976289749145508, 0.802809476852417], [0.6962704658508301, 0.8006410598754883], [0.6952391266822815, 0.7986063361167908], [0.6941502094268799, 0.7964507341384888], [0.6933237314224243, 0.7944043874740601], [0.6924725770950317, 0.7922259569168091], [0.6918784976005554, 0.7901730537414551], [0.6912195682525635, 0.787929892539978], [0.6907725930213928, 0.7858371734619141], [0.6902703046798706, 0.7835301160812378], [0.6899640560150146, 0.7813881635665894], [0.689612865447998, 0.7790218591690063], [0.6894477605819702, 0.7768880128860474], [0.6892160773277283, 0.7745381593704224], [0.6891239285469055, 0.7724172472953796], [0.6889565587043762, 0.7701020836830139], [0.6889214515686035, 0.7680220603942871], [0.6888068914413452, 0.7657696008682251], [0.6888431906700134, 0.7638207674026489], [0.6888113021850586, 0.7617728114128113], [0.6889092922210693, 0.7599796056747437], [0.688901424407959, 0.7581708431243896], [0.6889216303825378, 0.7566677331924438], [0.6889256834983826, 0.7551684379577637], [0.6887927055358887, 0.7536420822143555], [0.6886343955993652, 0.7517794370651245], [0.6885384321212769, 0.749912440776825], [0.6882680058479309, 0.7476481199264526], [0.6881290674209595, 0.745568573474884], [0.6878635883331299, 0.7431684732437134], [0.6877628564834595, 0.7408453226089478], [0.6875510215759277, 0.7382763624191284], [0.6874836683273315, 0.7357887029647827], [0.6873069405555725, 0.7330994606018066], [0.6872807741165161, 0.7304911613464355], [0.6871438026428223, 0.7277226448059082], [0.6871292591094971, 0.7250802516937256], [0.6869944930076599, 0.7222785949707031], [0.6869794130325317, 0.7196089029312134], [0.6868454813957214, 0.7167737483978271], [0.6868431568145752, 0.7140802145004272], [0.686730146408081, 0.7112252116203308], [0.6867466568946838, 0.7085198163986206], [0.686648964881897, 0.7056503891944885], [0.6866856217384338, 0.7029092311859131], [0.6866064667701721, 0.7000269889831543], [0.6866481900215149, 0.6972672343254089], [0.6865671277046204, 0.6943633556365967], [0.6866194605827332, 0.6916173696517944], [0.6865571737289429, 0.6887221336364746], [0.6866133809089661, 0.6860061883926392], [0.6865511536598206, 0.6831444501876831], [0.6866145730018616, 0.6804284453392029], [0.6865566968917847, 0.6775722503662109], [0.6866092085838318, 0.6748979687690735], [0.6865434646606445, 0.672079861164093], [0.6866004467010498, 0.669425368309021], [0.6865386962890625, 0.6666244268417358], [0.6865829229354858, 0.6640049815177917], [0.6865072846412659, 0.6612434387207031], [0.6865454912185669, 0.6586108207702637], [0.6864662766456604, 0.6558367013931274], [0.6865021586418152, 0.6532272100448608], [0.6864274144172668, 0.6504837274551392], [0.6864686608314514, 0.6478949785232544], [0.6863996386528015, 0.6451641321182251], [0.6864483952522278, 0.6425720453262329], [0.6863809823989868, 0.6398601531982422], [0.6864190101623535, 0.6372723579406738], [0.6863397359848022, 0.6345796585083008], [0.6863742470741272, 0.6320021152496338], [0.686303973197937, 0.6293109655380249], [0.6863523721694946, 0.6267555952072144], [0.6862971782684326, 0.6240906119346619], [0.6863455772399902, 0.6215583086013794], [0.6862835884094238, 0.6189277172088623], [0.6863300800323486, 0.6164103150367737], [0.6862748861312866, 0.6137863993644714], [0.6863274574279785, 0.611273467540741], [0.6862858533859253, 0.6086680293083191], [0.6863460540771484, 0.6061843037605286], [0.6863070726394653, 0.6036297082901001], [0.68636554479599, 0.601158857345581], [0.686328113079071, 0.5986294746398926], [0.6863933801651001, 0.5961697101593018], [0.6863672733306885, 0.5936537981033325], [0.6864339113235474, 0.5912197232246399], [0.6864104866981506, 0.5887348651885986], [0.6864831447601318, 0.5863523483276367], [0.6864663362503052, 0.5839328765869141], [0.6865413188934326, 0.5815517902374268], [0.6865360736846924, 0.5791457891464233], [0.6866284012794495, 0.5768053531646729], [0.6866431832313538, 0.5744385123252869], [0.6867457032203674, 0.5721400380134583], [0.6867673397064209, 0.5698214769363403], [0.6868787407875061, 0.5675537586212158], [0.6869178414344788, 0.5652830600738525], [0.6870446801185608, 0.5630474090576172], [0.6870946884155273, 0.5608193278312683], [0.6872285604476929, 0.5586133003234863], [0.6872921586036682, 0.556412398815155], [0.6874439716339111, 0.5542424917221069], [0.6875300407409668, 0.5520802736282349], [0.6876985430717468, 0.5499371290206909], [0.6878026127815247, 0.5478092432022095], [0.6879869699478149, 0.5456967353820801], [0.6881072521209717, 0.5436127185821533], [0.68831467628479, 0.5415281057357788], [0.6884655952453613, 0.539479672908783], [0.6887079477310181, 0.537408709526062], [0.6888918876647949, 0.5353691577911377], [0.6891582608222961, 0.5333447456359863], [0.6893675327301025, 0.5313549041748047], [0.6896673440933228, 0.5293694734573364], [0.6899150609970093, 0.527434766292572], [0.6902637481689453, 0.5254670977592468], [0.690565824508667, 0.523553729057312], [0.6909765005111694, 0.5216408967971802], [0.691346287727356, 0.5197669863700867], [0.6918402910232544, 0.5178824663162231], [0.6922998428344727, 0.5160465240478516], [0.69293212890625, 0.514172375202179], [0.6935356855392456, 0.5123407244682312], [0.6943082213401794, 0.5104442238807678], [0.6950280666351318, 0.5086025595664978], [0.6960172057151794, 0.5066097378730774], [0.6969596743583679, 0.5046523213386536], [0.7003350853919983, 0.8037513494491577], [0.6988880038261414, 0.801741361618042], [0.6977352499961853, 0.7997194528579712], [0.6966131329536438, 0.7976886034011841], [0.6957213878631592, 0.7956675291061401], [0.694841206073761, 0.7936140298843384], [0.6942644119262695, 0.7916483879089355], [0.6936340928077698, 0.7894898653030396], [0.693204402923584, 0.7874659299850464], [0.6927201747894287, 0.7852600812911987], [0.6924334168434143, 0.7832164764404297], [0.6920921206474304, 0.7809345722198486], [0.6919265389442444, 0.7788625955581665], [0.6917049884796143, 0.7765648365020752], [0.6916244029998779, 0.7744736671447754], [0.691481351852417, 0.7721757888793945], [0.6914662718772888, 0.7701119780540466], [0.6913796663284302, 0.7678504586219788], [0.6914221048355103, 0.7658529281616211], [0.6913840174674988, 0.7637441754341125], [0.691448450088501, 0.7619137763977051], [0.69141685962677, 0.7599644660949707], [0.6914492845535278, 0.7582281827926636], [0.691373348236084, 0.7565214037895203], [0.6912810802459717, 0.7551221251487732], [0.691268265247345, 0.7534064650535583], [0.6912719011306763, 0.7517615556716919], [0.6911110877990723, 0.7495379447937012], [0.6910250782966614, 0.7474995851516724], [0.6908103227615356, 0.7451903223991394], [0.690718412399292, 0.743033230304718], [0.6905044317245483, 0.740541934967041], [0.6904408931732178, 0.7382063865661621], [0.6902611255645752, 0.7355362772941589], [0.690222442150116, 0.7330699563026428], [0.6900651454925537, 0.7302954196929932], [0.6900513768196106, 0.7277297973632812], [0.6899253129959106, 0.7249034643173218], [0.689929723739624, 0.7222949862480164], [0.6898162364959717, 0.7194372415542603], [0.6898302435874939, 0.7167965769767761], [0.689726710319519, 0.7139317989349365], [0.6897553205490112, 0.7112720608711243], [0.689666748046875, 0.7083888053894043], [0.6897059679031372, 0.705710768699646], [0.6896262168884277, 0.702802300453186], [0.6896751523017883, 0.7001013159751892], [0.6896045207977295, 0.69716477394104], [0.6896644234657288, 0.6944458484649658], [0.6896069049835205, 0.6915373802185059], [0.6896771788597107, 0.6888187527656555], [0.6896288394927979, 0.6859308481216431], [0.6897040605545044, 0.6832433938980103], [0.6896582245826721, 0.6803640723228455], [0.6897338032722473, 0.6776667833328247], [0.6896874904632568, 0.6748237609863281], [0.6897579431533813, 0.672166645526886], [0.6897056102752686, 0.669356107711792], [0.6897715330123901, 0.6667028665542603], [0.6897141933441162, 0.6639181971549988], [0.6897653937339783, 0.6612975001335144], [0.6896946430206299, 0.6585197448730469], [0.6897438764572144, 0.655888557434082], [0.6896777749061584, 0.6531391143798828], [0.6897280812263489, 0.6505398750305176], [0.689665675163269, 0.6478183269500732], [0.6897203922271729, 0.6452239751815796], [0.6896602511405945, 0.642504096031189], [0.6897106766700745, 0.639905571937561], [0.6896461248397827, 0.6371945738792419], [0.6896941065788269, 0.634613037109375], [0.6896265745162964, 0.6319284439086914], [0.6896747946739197, 0.6293548941612244], [0.6896157264709473, 0.626701831817627], [0.6896705627441406, 0.6241464614868164], [0.6896176934242249, 0.6215041875839233], [0.6896697878837585, 0.6189679503440857], [0.6896095275878906, 0.6163560152053833], [0.6896591186523438, 0.6138313412666321], [0.6896084547042847, 0.6112379431724548], [0.6896639466285706, 0.6087245941162109], [0.689619243144989, 0.60616135597229], [0.6896727085113525, 0.6036763787269592], [0.6896268129348755, 0.601140022277832], [0.6896803379058838, 0.5986728668212891], [0.6896358728408813, 0.5961654186248779], [0.6896954774856567, 0.5937107801437378], [0.6896654367446899, 0.5912286639213562], [0.6897356510162354, 0.5888029932975769], [0.6897139549255371, 0.5863747596740723], [0.689777135848999, 0.5839847922325134], [0.6897512674331665, 0.5815855264663696], [0.6898266673088074, 0.5792276859283447], [0.6898188591003418, 0.5768704414367676], [0.6899049878120422, 0.5745418071746826], [0.6899095177650452, 0.5722240209579468], [0.6900006532669067, 0.5699266195297241], [0.6900098919868469, 0.567657470703125], [0.6901049017906189, 0.5654036402702332], [0.6901219487190247, 0.563176155090332], [0.6902285218238831, 0.5609543919563293], [0.6902609467506409, 0.55876624584198], [0.6903849840164185, 0.5565784573554993], [0.6904372572898865, 0.5544304847717285], [0.6905727386474609, 0.5522656440734863], [0.6906371712684631, 0.5501549243927002], [0.690786600112915, 0.5480185747146606], [0.6908707618713379, 0.5459511280059814], [0.6910412311553955, 0.5438514351844788], [0.6911440491676331, 0.5418196320533752], [0.6913293600082397, 0.5397543907165527], [0.6914534568786621, 0.5377490520477295], [0.691673994064331, 0.5356943607330322], [0.6918326020240784, 0.5337297916412354], [0.6920779347419739, 0.5317173600196838], [0.6922625303268433, 0.529813826084137], [0.6925405263900757, 0.5278511643409729], [0.6927632093429565, 0.5259836912155151], [0.6930981874465942, 0.5240594148635864], [0.693376898765564, 0.5222494602203369], [0.6937751770019531, 0.5203649997711182], [0.6941194534301758, 0.5186017751693726], [0.6946054697036743, 0.5167783498764038], [0.695046067237854, 0.515032172203064], [0.6956638097763062, 0.5132242441177368], [0.6962714791297913, 0.5114281177520752], [0.6970476508140564, 0.5096167325973511], [0.6978327631950378, 0.5077377557754517], [0.6988623142242432, 0.5058822631835938], [0.7015148401260376, 0.802585244178772], [0.7002768516540527, 0.8007293939590454], [0.6990143060684204, 0.7987564206123352], [0.6980957984924316, 0.7968907952308655], [0.6971518397331238, 0.7948528528213501], [0.6965876221656799, 0.7930199503898621], [0.6959777474403381, 0.7909857034683228], [0.695561408996582, 0.7890354990959167], [0.6950942277908325, 0.7869117259979248], [0.6948283910751343, 0.7849560379981995], [0.6945115327835083, 0.7827879190444946], [0.6943567991256714, 0.7807695269584656], [0.6941455602645874, 0.7785506248474121], [0.6940690279006958, 0.7765074968338013], [0.6939361095428467, 0.7742550373077393], [0.6939371824264526, 0.7722183465957642], [0.6938751935958862, 0.7699695825576782], [0.6939263343811035, 0.7679599523544312], [0.6939042210578918, 0.7657830715179443], [0.6939815878868103, 0.763888955116272], [0.6939675807952881, 0.7618666887283325], [0.6940536499023438, 0.7601151466369629], [0.6941028237342834, 0.7582259774208069], [0.6942206621170044, 0.7566211223602295], [0.6942778825759888, 0.7549031972885132], [0.6941506862640381, 0.7533050179481506], [0.6939090490341187, 0.7514467239379883], [0.6937993168830872, 0.7494372725486755], [0.6936183571815491, 0.74720698595047], [0.6935667395591736, 0.7451021671295166], [0.6933927536010742, 0.7427386045455933], [0.6933316588401794, 0.7404592037200928], [0.6931505799293518, 0.737959623336792], [0.6931127309799194, 0.7355013489723206], [0.692958652973175, 0.7328591346740723], [0.6929436922073364, 0.7302943468093872], [0.692815899848938, 0.7275593280792236], [0.6928303241729736, 0.7249388694763184], [0.6927343606948853, 0.722152054309845], [0.6927692294120789, 0.7194900512695312], [0.6926834583282471, 0.7166611552238464], [0.6927262544631958, 0.7139958739280701], [0.6926511526107788, 0.7111575603485107], [0.6927036046981812, 0.7084652185440063], [0.6926319003105164, 0.7056021094322205], [0.6926882266998291, 0.7028865814208984], [0.6926218867301941, 0.7000084519386292], [0.6926945447921753, 0.6972662806510925], [0.6926466822624207, 0.6943735480308533], [0.6927264928817749, 0.6916465759277344], [0.6926872134208679, 0.688761830329895], [0.6927788257598877, 0.6860536336898804], [0.692746639251709, 0.6831915378570557], [0.6928362250328064, 0.6804819107055664], [0.692805826663971, 0.6776218414306641], [0.692899227142334, 0.6749398708343506], [0.6928658485412598, 0.6721124649047852], [0.6929453611373901, 0.6694537401199341], [0.6928987503051758, 0.6666430234909058], [0.6929693222045898, 0.6640000939369202], [0.6929107904434204, 0.661217451095581], [0.6929721236228943, 0.6585918068885803], [0.6929150223731995, 0.6558166742324829], [0.6929795742034912, 0.6532124280929565], [0.692924976348877, 0.650471031665802], [0.6929827928543091, 0.647884726524353], [0.692922830581665, 0.6451594829559326], [0.6929813623428345, 0.6425702571868896], [0.6929286122322083, 0.6398442983627319], [0.6929954290390015, 0.6372618675231934], [0.6929455995559692, 0.6345534920692444], [0.6930087804794312, 0.6319827437400818], [0.6929516792297363, 0.6292954683303833], [0.6930058002471924, 0.6267541646957397], [0.6929491758346558, 0.6240991950035095], [0.6930063962936401, 0.621559202671051], [0.6929526925086975, 0.618919312953949], [0.6930065751075745, 0.6163948774337769], [0.692949116230011, 0.6137877702713013], [0.6929992437362671, 0.6112809777259827], [0.6929458379745483, 0.6086922287940979], [0.6929957270622253, 0.6062031388282776], [0.6929444074630737, 0.603645920753479], [0.6929956674575806, 0.6011791229248047], [0.6929458975791931, 0.5986447930335999], [0.6929953694343567, 0.5962027311325073], [0.692949652671814, 0.5937017798423767], [0.69300776720047, 0.5912830829620361], [0.6929769515991211, 0.5888200402259827], [0.6930406093597412, 0.5864243507385254], [0.6930056810379028, 0.5839881896972656], [0.6930669546127319, 0.5816408395767212], [0.6930405497550964, 0.5792609453201294], [0.6931097507476807, 0.5769370794296265], [0.6930919885635376, 0.5745969414710999], [0.6931655406951904, 0.5722974538803101], [0.6931505799293518, 0.5699869394302368], [0.6932234168052673, 0.5677335262298584], [0.6932108402252197, 0.5654747486114502], [0.6932868361473083, 0.5632642507553101], [0.6932879686355591, 0.5610546469688416], [0.6933814883232117, 0.5588811039924622], [0.6934019327163696, 0.5567140579223633], [0.6935064196586609, 0.5545618534088135], [0.6935375928878784, 0.5524226427078247], [0.69365394115448, 0.5503098368644714], [0.6937052011489868, 0.5482078790664673], [0.6938353180885315, 0.5461318492889404], [0.6939002275466919, 0.5440744161605835], [0.6940451860427856, 0.5420215725898743], [0.6941253542900085, 0.5400059223175049], [0.6942867040634155, 0.5379825830459595], [0.6943891048431396, 0.5360032320022583], [0.6945807337760925, 0.5340104699134827], [0.6947153210639954, 0.532068133354187], [0.6949318647384644, 0.5301403999328613], [0.6950964331626892, 0.528258740901947], [0.6953529119491577, 0.5263798236846924], [0.6955627202987671, 0.5245572328567505], [0.6958698630332947, 0.5227254033088684], [0.6961308717727661, 0.5209543704986572], [0.6965056657791138, 0.5191966891288757], [0.6968401670455933, 0.5174900889396667], [0.6973075270652771, 0.5157564878463745], [0.6977412700653076, 0.5140989422798157], [0.6983742713928223, 0.5123376846313477], [0.6989786624908447, 0.5106427669525146], [0.699825644493103, 0.5088475942611694], [0.7006279230117798, 0.5070894360542297], [0.7026921510696411, 0.8015504479408264], [0.7013580799102783, 0.7997937798500061], [0.7003309726715088, 0.797947883605957], [0.6993799805641174, 0.7960960865020752], [0.6987931728363037, 0.7942739725112915], [0.6981980800628662, 0.7923658490180969], [0.6977904438972473, 0.7905493974685669], [0.6973342299461365, 0.7885123491287231], [0.6971018314361572, 0.7866508960723877], [0.6968162059783936, 0.7845536470413208], [0.6966851949691772, 0.7826515436172485], [0.6964973211288452, 0.7804775238037109], [0.6964424848556519, 0.7785134315490723], [0.6963298320770264, 0.7763020396232605], [0.6963464617729187, 0.7743219137191772], [0.6963056921958923, 0.7721035480499268], [0.6963726282119751, 0.7701045274734497], [0.6963754296302795, 0.7679123878479004], [0.6964825391769409, 0.7659725546836853], [0.6965121030807495, 0.7638757228851318], [0.6966160535812378, 0.762039303779602], [0.6966081857681274, 0.7600927352905273], [0.6966590881347656, 0.7584218978881836], [0.6966434121131897, 0.7566295862197876], [0.6966317892074585, 0.7549704909324646], [0.6966329216957092, 0.7529960870742798], [0.6966170072555542, 0.751319408416748], [0.6964808106422424, 0.7491778135299683], [0.6964649558067322, 0.7471673488616943], [0.6963167786598206, 0.7448455095291138], [0.696278989315033, 0.742682933807373], [0.6961168646812439, 0.7402005195617676], [0.6960809230804443, 0.7379112243652344], [0.6959232091903687, 0.7352888584136963], [0.6959161162376404, 0.7328591346740723], [0.6957902908325195, 0.730116605758667], [0.6958061456680298, 0.7275949716567993], [0.6957078576087952, 0.7248024344444275], [0.6957473158836365, 0.7222191691398621], [0.6956658363342285, 0.7193723320960999], [0.695717453956604, 0.7167409062385559], [0.6956483125686646, 0.7138936519622803], [0.6957135200500488, 0.7112555503845215], [0.6956534385681152, 0.7083719968795776], [0.6957224607467651, 0.7056996822357178], [0.6956701278686523, 0.7028026580810547], [0.6957492232322693, 0.7001173496246338], [0.6957063674926758, 0.6972075700759888], [0.6957999467849731, 0.6945011615753174], [0.6957715749740601, 0.6915980577468872], [0.6958716511726379, 0.6889024376869202], [0.6958497762680054, 0.6860188245773315], [0.6959511041641235, 0.6833279132843018], [0.6959279179573059, 0.680449366569519], [0.6960357427597046, 0.6777675151824951], [0.6960166096687317, 0.6749097108840942], [0.6961132287979126, 0.6722358465194702], [0.6960804462432861, 0.6694097518920898], [0.6961686611175537, 0.6667529344558716], [0.6961290836334229, 0.6639457941055298], [0.6962094306945801, 0.6613075733184814], [0.6961615085601807, 0.658532977104187], [0.6962364315986633, 0.6559096574783325], [0.6961926221847534, 0.6531597375869751], [0.6962664723396301, 0.650556206703186], [0.6962182521820068, 0.647823691368103], [0.6962843537330627, 0.6452338695526123], [0.696230411529541, 0.6425123810768127], [0.6962945461273193, 0.6399216651916504], [0.696242094039917, 0.63721764087677], [0.6963109970092773, 0.6346303224563599], [0.6962617635726929, 0.6319370269775391], [0.6963295340538025, 0.6293617486953735], [0.6962800621986389, 0.6267048120498657], [0.6963412165641785, 0.6241585612297058], [0.6962884664535522, 0.6215167045593262], [0.6963425874710083, 0.6189621090888977], [0.6962823271751404, 0.6163501739501953], [0.6963375806808472, 0.6138318777084351], [0.6962848901748657, 0.6112403869628906], [0.696337878704071, 0.6087324023246765], [0.6962843537330627, 0.606165885925293], [0.6963348984718323, 0.6036797761917114], [0.6962782144546509, 0.6011438965797424], [0.6963261961936951, 0.5986753702163696], [0.696272611618042, 0.5961734056472778], [0.6963216662406921, 0.5937356352806091], [0.6962757110595703, 0.5912768840789795], [0.6963318586349487, 0.5888655185699463], [0.6962896585464478, 0.5864200592041016], [0.696344792842865, 0.5840262174606323], [0.696306049823761, 0.5816479921340942], [0.6963599324226379, 0.5792986154556274], [0.6963193416595459, 0.5769544839859009], [0.696374237537384, 0.5746403932571411], [0.69633948802948, 0.572324275970459], [0.696395754814148, 0.5700297355651855], [0.6963642239570618, 0.5677653551101685], [0.6964197158813477, 0.5655145645141602], [0.6963909864425659, 0.5633107423782349], [0.6964541077613831, 0.5611171722412109], [0.6964390277862549, 0.5589591264724731], [0.6965099573135376, 0.5567934513092041], [0.6965059638023376, 0.5546615123748779], [0.6965892314910889, 0.5525174140930176], [0.6965982913970947, 0.5504355430603027], [0.6966952085494995, 0.5483345985412598], [0.6967257857322693, 0.5462866425514221], [0.6968318819999695, 0.5442094802856445], [0.6968702673912048, 0.5422048568725586], [0.6969919204711914, 0.5401665568351746], [0.6970481872558594, 0.5381990671157837], [0.6971842646598816, 0.5361906290054321], [0.6972547173500061, 0.5342699885368347], [0.6974124312400818, 0.5322977304458618], [0.6975095272064209, 0.5304441452026367], [0.6976930499076843, 0.5285345911979675], [0.6978208422660828, 0.5267489552497864], [0.6980432271957397, 0.5248980522155762], [0.6982122659683228, 0.5231675505638123], [0.6984855532646179, 0.5213726162910461], [0.6987037658691406, 0.5197417736053467], [0.6990550756454468, 0.5180255770683289], [0.699359655380249, 0.5164164304733276], [0.6997963190078735, 0.5147572755813599], [0.7002186179161072, 0.5131632089614868], [0.7008579969406128, 0.5115147829055786], [0.7015042304992676, 0.5098341703414917], [0.7023853063583374, 0.5081887245178223], [0.7036460638046265, 0.8005430698394775], [0.7025636434555054, 0.7989255785942078], [0.7015171647071838, 0.797141969203949], [0.7009214162826538, 0.7954819798469543], [0.7003092169761658, 0.7936580181121826], [0.6999338865280151, 0.7919384241104126], [0.6995041370391846, 0.7900289297103882], [0.6992972493171692, 0.7882786393165588], [0.6990532875061035, 0.7862924933433533], [0.6989507675170898, 0.7844409942626953], [0.6987938284873962, 0.7823889255523682], [0.6987615823745728, 0.7804709076881409], [0.6986758708953857, 0.7783360481262207], [0.6987072229385376, 0.7763866186141968], [0.698681116104126, 0.7742299437522888], [0.6987568140029907, 0.7722572088241577], [0.6987736821174622, 0.7700766921043396], [0.6988954544067383, 0.768136739730835], [0.6989612579345703, 0.7660177946090698], [0.6991114020347595, 0.7641239166259766], [0.6991639137268066, 0.7620531320571899], [0.6992735862731934, 0.7602618932723999], [0.6992640495300293, 0.7583406567573547], [0.6992806196212769, 0.7567130327224731], [0.6992843151092529, 0.7548728585243225], [0.6992877721786499, 0.7530975341796875], [0.699259877204895, 0.7511442303657532], [0.6992897987365723, 0.7491960525512695], [0.6991745233535767, 0.7469544410705566], [0.6991565823554993, 0.7448140382766724], [0.6990031599998474, 0.7424429655075073], [0.6989761590957642, 0.7401779890060425], [0.6988332271575928, 0.7376902103424072], [0.6988233327865601, 0.7352897524833679], [0.6987032294273376, 0.7326931953430176], [0.6987337470054626, 0.7301662564277649], [0.6986472606658936, 0.7274612188339233], [0.6986903548240662, 0.7248724699020386], [0.6986129283905029, 0.7221102714538574], [0.698680579662323, 0.7194740772247314], [0.6986280083656311, 0.7166476249694824], [0.698698878288269, 0.7139970064163208], [0.6986445188522339, 0.7111780643463135], [0.6987265348434448, 0.7084865570068359], [0.6986833214759827, 0.7056320905685425], [0.6987743377685547, 0.7029300928115845], [0.698743462562561, 0.7000658512115479], [0.6988504528999329, 0.6973536014556885], [0.6988340616226196, 0.6944706439971924], [0.6989498734474182, 0.6917541027069092], [0.6989395022392273, 0.6888822317123413], [0.699056088924408, 0.6861737966537476], [0.6990424394607544, 0.683305025100708], [0.6991578340530396, 0.680608868598938], [0.6991504430770874, 0.6777596473693848], [0.6992701292037964, 0.675064206123352], [0.699255108833313, 0.672208845615387], [0.6993570923805237, 0.6695424914360046], [0.6993306279182434, 0.6667208671569824], [0.6994310617446899, 0.6640704870223999], [0.6994013786315918, 0.66126549243927], [0.6994878649711609, 0.6586376428604126], [0.6994498372077942, 0.6558712720870972], [0.6995350122451782, 0.6532624363899231], [0.6994959115982056, 0.6505163908004761], [0.6995768547058105, 0.6479153633117676], [0.6995280385017395, 0.6451817154884338], [0.6995983719825745, 0.6425920724868774], [0.6995466947555542, 0.639869749546051], [0.6996132135391235, 0.6372959613800049], [0.6995636224746704, 0.6345897912979126], [0.699639618396759, 0.6320209503173828], [0.699600875377655, 0.6293317079544067], [0.6996779441833496, 0.6267784237861633], [0.6996285915374756, 0.6241114139556885], [0.6996827125549316, 0.6215660572052002], [0.6996214389801025, 0.6189101338386536], [0.699680745601654, 0.6164029836654663], [0.6996309161186218, 0.6137959957122803], [0.6996901631355286, 0.6112875938415527], [0.6996381878852844, 0.6086939573287964], [0.6996899247169495, 0.6062031984329224], [0.6996309161186218, 0.6036349534988403], [0.6996749043464661, 0.6011688113212585], [0.6996152400970459, 0.5986360907554626], [0.6996622681617737, 0.5961999893188477], [0.6996077299118042, 0.5937067270278931], [0.6996530890464783, 0.5913031101226807], [0.6995993256568909, 0.5888485908508301], [0.6996503472328186, 0.5864498615264893], [0.6996018290519714, 0.584014892578125], [0.6996468901634216, 0.5816658735275269], [0.6995911598205566, 0.5792832374572754], [0.699631929397583, 0.5769676566123962], [0.699577808380127, 0.574633002281189], [0.6996151208877563, 0.5723345279693604], [0.6995599269866943, 0.5700246095657349], [0.6995965838432312, 0.5677751302719116], [0.6995469927787781, 0.5655192732810974], [0.6995868682861328, 0.5633231401443481], [0.6995384693145752, 0.561132550239563], [0.69957435131073, 0.5589774250984192], [0.6995338201522827, 0.5568298697471619], [0.6995853781700134, 0.5547038316726685], [0.6995620727539062, 0.5525820255279541], [0.6996232271194458, 0.550493597984314], [0.6996109485626221, 0.5484262704849243], [0.6996802091598511, 0.5463653802871704], [0.6996839046478271, 0.5443224906921387], [0.6997674703598022, 0.5423020720481873], [0.6997833847999573, 0.5403091907501221], [0.6998754739761353, 0.5383096933364868], [0.6998987793922424, 0.5363507270812988], [0.6999961137771606, 0.5343977212905884], [0.7000316381454468, 0.5324946641921997], [0.7001480460166931, 0.5306121110916138], [0.7002114057540894, 0.5287810564041138], [0.7003536224365234, 0.5269622802734375], [0.7004464864730835, 0.5252088308334351], [0.7006258964538574, 0.5234425663948059], [0.7007594704627991, 0.521761417388916], [0.7009885311126709, 0.5201113820075989], [0.7011858820915222, 0.5185476541519165], [0.7015184760093689, 0.5169332027435303], [0.7018164396286011, 0.5154145359992981], [0.7022509574890137, 0.5138518810272217], [0.702660083770752, 0.5123757123947144], [0.7033439874649048, 0.5107959508895874], [0.7040021419525146, 0.5092546939849854], [0.7046351432800293, 0.7996161580085754], [0.7035272121429443, 0.7981488704681396], [0.7028535604476929, 0.7965460419654846], [0.7022495269775391, 0.7948946952819824], [0.701910138130188, 0.793286144733429], [0.7015308737754822, 0.7914729714393616], [0.7013863325119019, 0.789848804473877], [0.7011953592300415, 0.7879523038864136], [0.7011225819587708, 0.7862257957458496], [0.7010000944137573, 0.7842159271240234], [0.7010024785995483, 0.7824239730834961], [0.7009558081626892, 0.7803322076797485], [0.7010170221328735, 0.7784601449966431], [0.7010276317596436, 0.7763254642486572], [0.7011292576789856, 0.7744170427322388], [0.7011731863021851, 0.7722510099411011], [0.701306939125061, 0.7703210115432739], [0.701377809047699, 0.7681931853294373], [0.7015290856361389, 0.7663073539733887], [0.7016106843948364, 0.7642076015472412], [0.7017719149589539, 0.7623149156570435], [0.7018203139305115, 0.7602752447128296], [0.7019107937812805, 0.7584686279296875], [0.7019025087356567, 0.7566084265708923], [0.7019071578979492, 0.7549923658370972], [0.7019518613815308, 0.7530568242073059], [0.702028751373291, 0.7512837052345276], [0.7019823789596558, 0.7490443587303162], [0.702018141746521, 0.7469899654388428], [0.7019157409667969, 0.7446106672286987], [0.7019286155700684, 0.7424450516700745], [0.7018223404884338, 0.7399866580963135], [0.7018413543701172, 0.7377073764801025], [0.7017393708229065, 0.7351197004318237], [0.7017698287963867, 0.7327419519424438], [0.7016755938529968, 0.7300398349761963], [0.7017244696617126, 0.7275415658950806], [0.7016526460647583, 0.724765419960022], [0.7017226219177246, 0.7222162485122681], [0.7016679048538208, 0.7193976640701294], [0.7017529606819153, 0.7167773246765137], [0.7017159461975098, 0.7139282822608948], [0.701809823513031, 0.7113100290298462], [0.7017756700515747, 0.7084334492683411], [0.7018764019012451, 0.7057734727859497], [0.7018521428108215, 0.7028930187225342], [0.7019670605659485, 0.700230062007904], [0.7019561529159546, 0.6973351240158081], [0.7020771503448486, 0.6946427822113037], [0.702070415019989, 0.6917474269866943], [0.7021998167037964, 0.6890645623207092], [0.7021993398666382, 0.6861677169799805], [0.7023240923881531, 0.6834750175476074], [0.7023196816444397, 0.6806084513664246], [0.7024465799331665, 0.6779349446296692], [0.7024416923522949, 0.6750632524490356], [0.7025637626647949, 0.6723688840866089], [0.7025531530380249, 0.669527530670166], [0.7026669979095459, 0.6668704748153687], [0.702650785446167, 0.664054811000824], [0.702755868434906, 0.6613964438438416], [0.7027285695075989, 0.65860915184021], [0.7028236389160156, 0.6559916734695435], [0.7027924656867981, 0.6532320976257324], [0.7028825879096985, 0.6506273746490479], [0.702844500541687, 0.6478787064552307], [0.7029298543930054, 0.6452797651290894], [0.7028889656066895, 0.6425521373748779], [0.7029701471328735, 0.6399606466293335], [0.7029260396957397, 0.6372513175010681], [0.7030017971992493, 0.6346760988235474], [0.7029539346694946, 0.6319905519485474], [0.7030292749404907, 0.6294217109680176], [0.7029821872711182, 0.6267461776733398], [0.703049898147583, 0.6241744756698608], [0.7029979228973389, 0.6215159893035889], [0.7030628323554993, 0.6189687252044678], [0.7030102014541626, 0.6163638234138489], [0.7030680179595947, 0.6138486862182617], [0.7030116319656372, 0.6112492680549622], [0.703064501285553, 0.608733594417572], [0.7030045986175537, 0.6061595678329468], [0.7030550837516785, 0.60366290807724], [0.70299232006073, 0.6011196374893188], [0.7030320763587952, 0.598651647567749], [0.7029634714126587, 0.5961574912071228], [0.7030028700828552, 0.593723475933075], [0.7029385566711426, 0.5912604331970215], [0.7029734253883362, 0.5888582468032837], [0.702907919883728, 0.5864214897155762], [0.7029443979263306, 0.5840233564376831], [0.7028815746307373, 0.5816327333450317], [0.7029191255569458, 0.5792840719223022], [0.7028565406799316, 0.5769338607788086], [0.7028823494911194, 0.5746157169342041], [0.7028095126152039, 0.5722946524620056], [0.702826738357544, 0.5699963569641113], [0.7027528285980225, 0.5677424073219299], [0.7027756571769714, 0.5655025243759155], [0.702708899974823, 0.563296914100647], [0.7027236223220825, 0.5610952973365784], [0.7026523947715759, 0.5589542388916016], [0.7026740312576294, 0.5568151473999023], [0.7026171684265137, 0.5547114610671997], [0.7026506662368774, 0.5525906085968018], [0.7026059627532959, 0.5505170822143555], [0.7026370763778687, 0.5484302639961243], [0.7025955319404602, 0.5464099049568176], [0.7026384472846985, 0.5443572998046875], [0.7026116847991943, 0.5423687696456909], [0.7026574015617371, 0.5403460264205933], [0.7026339769363403, 0.5383908748626709], [0.7026824951171875, 0.5363893508911133], [0.7026652693748474, 0.534496545791626], [0.7027255296707153, 0.532555341720581], [0.7027233242988586, 0.5307477116584778], [0.7027969360351562, 0.5288773775100708], [0.7028166651725769, 0.5271420478820801], [0.7029070854187012, 0.5253384113311768], [0.7029520869255066, 0.5236894488334656], [0.7030885815620422, 0.5219660997390747], [0.7031711339950562, 0.5204377174377441], [0.7033423185348511, 0.5188403129577637], [0.7034754753112793, 0.5173835158348083], [0.7037333846092224, 0.515819787979126], [0.7039665579795837, 0.5144287943840027], [0.7043783664703369, 0.5129795670509338], [0.7047932744026184, 0.5115927457809448], [0.7055290937423706, 0.5101975202560425], [0.7054121494293213, 0.7987865805625916], [0.7046659588813782, 0.7974721193313599], [0.7040113210678101, 0.7960034608840942], [0.7037484645843506, 0.7945817708969116], [0.7034417986869812, 0.792873740196228], [0.7033597230911255, 0.7913708686828613], [0.703244686126709, 0.7896035313606262], [0.7032228112220764, 0.7879343032836914], [0.7031419277191162, 0.7860372066497803], [0.7031785845756531, 0.7843095660209656], [0.7031735777854919, 0.7823308110237122], [0.7032625675201416, 0.7805032730102539], [0.7033052444458008, 0.7784451246261597], [0.7034392356872559, 0.776562511920929], [0.7035180330276489, 0.7744500637054443], [0.7036747932434082, 0.7725273370742798], [0.7037671208381653, 0.7703907489776611], [0.7039341926574707, 0.7684988975524902], [0.7040276527404785, 0.766392707824707], [0.7041893601417542, 0.7645143270492554], [0.7042905688285828, 0.7624309062957764], [0.7044938206672668, 0.760587751865387], [0.704634428024292, 0.7585514783859253], [0.7047907114028931, 0.7568180561065674], [0.7048693895339966, 0.7549452781677246], [0.704870879650116, 0.7531846165657043], [0.7047781944274902, 0.7511608004570007], [0.7047947645187378, 0.7491228580474854], [0.7047117352485657, 0.7468562722206116], [0.7047691345214844, 0.7446919679641724], [0.7047142386436462, 0.7423101663589478], [0.7047630548477173, 0.7400500774383545], [0.7046864032745361, 0.7375715970993042], [0.7047375440597534, 0.735195517539978], [0.7046595215797424, 0.7326107025146484], [0.7047123312950134, 0.7301249504089355], [0.7046443819999695, 0.7274436950683594], [0.7047291398048401, 0.7248929142951965], [0.7046902179718018, 0.7221462726593018], [0.7047821879386902, 0.7195295095443726], [0.7047455906867981, 0.7167257070541382], [0.704852283000946, 0.7140854597091675], [0.7048337459564209, 0.7112683057785034], [0.7049508690834045, 0.7085980176925659], [0.704940140247345, 0.7057455778121948], [0.7050658464431763, 0.7030704021453857], [0.705062210559845, 0.7002204656600952], [0.7051934599876404, 0.6975176334381104], [0.7051929235458374, 0.6946398019790649], [0.7053301930427551, 0.6919403076171875], [0.7053356766700745, 0.6890761256217957], [0.7054762244224548, 0.6863596439361572], [0.7054839134216309, 0.6834825873374939], [0.7056212425231934, 0.6807991862297058], [0.7056256532669067, 0.6779457926750183], [0.7057593464851379, 0.6752431392669678], [0.7057603597640991, 0.6723776459693909], [0.705894947052002, 0.6697063446044922], [0.7058959007263184, 0.6668738126754761], [0.7060153484344482, 0.6642084717750549], [0.7059985995292664, 0.6613852977752686], [0.7061097621917725, 0.6587496995925903], [0.7060877680778503, 0.6559733152389526], [0.7061871886253357, 0.6533583402633667], [0.7061548233032227, 0.6506009697914124], [0.7062501311302185, 0.6479970216751099], [0.7062181234359741, 0.6452528834342957], [0.7063085436820984, 0.6426613330841064], [0.7062729597091675, 0.6399343013763428], [0.706362783908844, 0.6373531818389893], [0.7063214778900146, 0.6346385478973389], [0.7063964009284973, 0.6320761442184448], [0.7063440680503845, 0.6293838620185852], [0.7064131498336792, 0.6268230080604553], [0.706364631652832, 0.6241374015808105], [0.7064380645751953, 0.6215903759002686], [0.7063925266265869, 0.6189340353012085], [0.706457257270813, 0.6164197325706482], [0.7063997387886047, 0.613803505897522], [0.7064542174339294, 0.6112929582595825], [0.7063953280448914, 0.6086868643760681], [0.706447422504425, 0.6061934232711792], [0.7063848376274109, 0.6036168336868286], [0.7064294815063477, 0.6011356711387634], [0.7063567042350769, 0.598588228225708], [0.7063875198364258, 0.596156656742096], [0.7063072919845581, 0.5936636924743652], [0.7063354849815369, 0.5912579298019409], [0.7062572240829468, 0.5887963175773621], [0.7062815427780151, 0.5864130854606628], [0.7062055468559265, 0.5839720368385315], [0.7062345147132874, 0.5816231966018677], [0.706161379814148, 0.5792375802993774], [0.7061831951141357, 0.5769067406654358], [0.706098198890686, 0.574548065662384], [0.7061048746109009, 0.5722458362579346], [0.7060122489929199, 0.5699219107627869], [0.706012487411499, 0.5676827430725098], [0.7059177160263062, 0.5654412508010864], [0.7059144377708435, 0.5632318258285522], [0.7058203220367432, 0.5610252022743225], [0.7058184742927551, 0.5588908195495605], [0.7057297825813293, 0.5567628145217896], [0.7057280540466309, 0.5546561479568481], [0.7056477665901184, 0.552564799785614], [0.7056562900543213, 0.5504732131958008], [0.7055843472480774, 0.5484021902084351], [0.705586314201355, 0.5463644862174988], [0.7055134773254395, 0.5443507432937622], [0.7055157423019409, 0.5423297882080078], [0.7054473161697388, 0.5403461456298828], [0.7054494619369507, 0.5383501052856445], [0.7053822875022888, 0.5363976955413818], [0.7053877115249634, 0.5344635248184204], [0.7053290605545044, 0.5325890779495239], [0.7053403258323669, 0.5307270288467407], [0.705289900302887, 0.5289350748062134], [0.705305278301239, 0.5271415710449219], [0.7052717804908752, 0.5254309177398682], [0.705306351184845, 0.5237323045730591], [0.7052988409996033, 0.5221301317214966], [0.7053772807121277, 0.5205507278442383], [0.7054111957550049, 0.5190671682357788], [0.7055172920227051, 0.5175635814666748], [0.7056072950363159, 0.5161827802658081], [0.7058157324790955, 0.5147855281829834], [0.7060140371322632, 0.5135617256164551], [0.7064447999000549, 0.5122998952865601], [0.7068765759468079, 0.5110776424407959], [0.7061820030212402, 0.798005223274231], [0.7055099606513977, 0.7970086336135864], [0.705390989780426, 0.7958320379257202], [0.7052298784255981, 0.794247031211853], [0.7052350044250488, 0.7928916215896606], [0.7052165865898132, 0.7912038564682007], [0.7052534818649292, 0.7896822094917297], [0.7052501440048218, 0.7878193855285645], [0.7053431868553162, 0.7862014770507812], [0.7054028511047363, 0.7842873334884644], [0.7055367827415466, 0.7825700044631958], [0.7056275606155396, 0.7805364727973938], [0.7057886123657227, 0.7787441611289978], [0.7059056758880615, 0.7766534090042114], [0.70609050989151, 0.7747827768325806], [0.7062159180641174, 0.7726374864578247], [0.706413745880127, 0.7707527279853821], [0.7065463066101074, 0.7686301469802856], [0.7067394852638245, 0.7667393088340759], [0.7068518996238708, 0.7646340131759644], [0.7070329189300537, 0.7628010511398315], [0.7071276903152466, 0.7607421875], [0.7072796821594238, 0.7588832378387451], [0.7073549628257751, 0.7569007873535156], [0.7074410915374756, 0.7551462054252625], [0.7075106501579285, 0.7530917525291443], [0.7075963020324707, 0.7512608766555786], [0.7075752019882202, 0.749021053314209], [0.7076592445373535, 0.7469776272773743], [0.7076337337493896, 0.7446292638778687], [0.7077330350875854, 0.7424564957618713], [0.7077111601829529, 0.7399565577507019], [0.7077999114990234, 0.737696647644043], [0.7077603936195374, 0.735101580619812], [0.7078373432159424, 0.7327138781547546], [0.7077822685241699, 0.7300289869308472], [0.7078620195388794, 0.7275701761245728], [0.7078207731246948, 0.7248400449752808], [0.7079248428344727, 0.7223038673400879], [0.7078975439071655, 0.7194786667823792], [0.7080102562904358, 0.7168949842453003], [0.70799720287323, 0.714064359664917], [0.70811927318573, 0.7114472389221191], [0.7081097364425659, 0.708586573600769], [0.7082467079162598, 0.7059512734413147], [0.7082571983337402, 0.7030752897262573], [0.7083969116210938, 0.7004216909408569], [0.7084033489227295, 0.6975260376930237], [0.7085487246513367, 0.6948527097702026], [0.7085620164871216, 0.6919597387313843], [0.7087053060531616, 0.6892788410186768], [0.708710789680481, 0.6863815784454346], [0.7088573575019836, 0.6836932301521301], [0.7088701128959656, 0.6808227300643921], [0.7090098261833191, 0.6781413555145264], [0.7090152502059937, 0.6752621531486511], [0.7091584205627441, 0.6725775003433228], [0.7091661691665649, 0.6697303056716919], [0.7093007564544678, 0.6670559048652649], [0.709298312664032, 0.6642133593559265], [0.7094210386276245, 0.6615464687347412], [0.7094084620475769, 0.6587509512901306], [0.7095246315002441, 0.6561216711997986], [0.7095029354095459, 0.6533451080322266], [0.7096094489097595, 0.6507335901260376], [0.7095822095870972, 0.6479827165603638], [0.7096844911575317, 0.6453807353973389], [0.7096552848815918, 0.6426424980163574], [0.7097485065460205, 0.6400481462478638], [0.709709882736206, 0.6373292207717896], [0.709796130657196, 0.6347350478172302], [0.7097491025924683, 0.6320352554321289], [0.7098240852355957, 0.6294623613357544], [0.7097727060317993, 0.626781702041626], [0.7098402976989746, 0.6242125034332275], [0.7097872495651245, 0.6215572953224182], [0.7098543047904968, 0.6190041899681091], [0.7097986936569214, 0.6163743734359741], [0.7098561525344849, 0.6138484477996826], [0.7097932696342468, 0.6112416982650757], [0.7098442912101746, 0.6087269186973572], [0.7097784280776978, 0.6061413884162903], [0.7098187208175659, 0.6036326885223389], [0.7097418308258057, 0.6010738611221313], [0.7097765803337097, 0.598587155342102], [0.7096964120864868, 0.5960817337036133], [0.7097200751304626, 0.5936436057090759], [0.7096328139305115, 0.5911835432052612], [0.7096549272537231, 0.5887770652770996], [0.7095718383789062, 0.586337685585022], [0.7095919251441956, 0.5839507579803467], [0.7095072269439697, 0.5815556645393372], [0.7095192670822144, 0.5791977643966675], [0.70942223072052, 0.5768235921859741], [0.7094222903251648, 0.5744825601577759], [0.7093189358711243, 0.572149932384491], [0.709309458732605, 0.5698386430740356], [0.7092008590698242, 0.5675793290138245], [0.709179699420929, 0.5653368234634399], [0.7090628743171692, 0.563125729560852], [0.7090425491333008, 0.5609259605407715], [0.7089321613311768, 0.5587887167930603], [0.7089044451713562, 0.5566504001617432], [0.7087888121604919, 0.5545578002929688], [0.7087578177452087, 0.5524587631225586], [0.708643913269043, 0.5503918528556824], [0.7086160182952881, 0.548302173614502], [0.7085082530975342, 0.5462769865989685], [0.7084697484970093, 0.544235348701477], [0.7083569765090942, 0.5422501564025879], [0.708317756652832, 0.5402316451072693], [0.7082068920135498, 0.5382746458053589], [0.7081584930419922, 0.5362718105316162], [0.7080435752868652, 0.5343970060348511], [0.7079967260360718, 0.5324701070785522], [0.7078853845596313, 0.5306706428527832], [0.7078313827514648, 0.5288094282150269], [0.7077221870422363, 0.5270992517471313], [0.7076711654663086, 0.5253278017044067], [0.7075831890106201, 0.523735761642456], [0.7075580358505249, 0.5220689177513123], [0.7074880599975586, 0.5206151604652405], [0.7074840068817139, 0.5190527439117432], [0.7074495553970337, 0.517685055732727], [0.7074527740478516, 0.5162175893783569], [0.7074485421180725, 0.5150486826896667], [0.7075309753417969, 0.5137323141098022], [0.7076157927513123, 0.512787938117981], [0.7080833911895752, 0.5117992162704468], [0.21261924505233765, 0.8557150363922119], [0.21035635471343994, 0.8522934913635254], [0.20834624767303467, 0.8483004570007324], [0.2062978744506836, 0.8448495864868164], [0.2045096755027771, 0.8408530950546265], [0.2026779055595398, 0.8373973369598389], [0.20110714435577393, 0.8334146738052368], [0.19947195053100586, 0.8299638032913208], [0.19810426235198975, 0.8260142803192139], [0.19665098190307617, 0.8225694894790649], [0.19547110795974731, 0.8186517953872681], [0.19418251514434814, 0.8152319192886353], [0.19316953420639038, 0.8113480806350708], [0.19203388690948486, 0.8079491853713989], [0.19117558002471924, 0.8041121959686279], [0.1901705265045166, 0.8007447719573975], [0.18944650888442993, 0.7969507575035095], [0.1885676383972168, 0.7936158776283264], [0.18796980381011963, 0.7898668050765991], [0.18719565868377686, 0.7865562438964844], [0.1867108941078186, 0.782842755317688], [0.18604952096939087, 0.7795656323432922], [0.1856810450553894, 0.7758962512016296], [0.1851176619529724, 0.7726616859436035], [0.18485254049301147, 0.7690443992614746], [0.184390127658844, 0.7658510208129883], [0.18422234058380127, 0.7622935771942139], [0.18384313583374023, 0.7591612339019775], [0.18375933170318604, 0.7556819319725037], [0.18346035480499268, 0.7526078224182129], [0.18345248699188232, 0.7491961717605591], [0.18321621417999268, 0.7461848258972168], [0.18327325582504272, 0.742843747138977], [0.18309813737869263, 0.7398887872695923], [0.1832122802734375, 0.7366119623184204], [0.1830843687057495, 0.7337002754211426], [0.18324029445648193, 0.7304843664169312], [0.18315428495407104, 0.7276135087013245], [0.1833498477935791, 0.7244411706924438], [0.1832868456840515, 0.7216098308563232], [0.18349909782409668, 0.7184727787971497], [0.18346184492111206, 0.715644896030426], [0.183699369430542, 0.7125180959701538], [0.18368369340896606, 0.7096859216690063], [0.18394213914871216, 0.7065634727478027], [0.18394851684570312, 0.7037240266799927], [0.1842266321182251, 0.7005990743637085], [0.1842491626739502, 0.6977548599243164], [0.18454432487487793, 0.6946282386779785], [0.1845862865447998, 0.6917687654495239], [0.18489831686019897, 0.6886336803436279], [0.18495690822601318, 0.685761570930481], [0.18528544902801514, 0.682624101638794], [0.18536561727523804, 0.6797369122505188], [0.18572497367858887, 0.676578164100647], [0.1858377456665039, 0.6736428737640381], [0.1862189769744873, 0.670416533946991], [0.18631505966186523, 0.6674346923828125], [0.18668442964553833, 0.6642341613769531], [0.18681132793426514, 0.661273717880249], [0.18721675872802734, 0.6580891013145447], [0.18738198280334473, 0.6551262140274048], [0.18782567977905273, 0.6519429683685303], [0.18802917003631592, 0.6489765644073486], [0.18850362300872803, 0.6458020806312561], [0.18872851133346558, 0.6428366899490356], [0.18921995162963867, 0.6397019028663635], [0.18949031829833984, 0.6367613077163696], [0.190027117729187, 0.6336365342140198], [0.19033241271972656, 0.6306979656219482], [0.19089818000793457, 0.6275931000709534], [0.191234290599823, 0.6246688365936279], [0.1918271780014038, 0.6215991973876953], [0.19218993186950684, 0.6186978816986084], [0.19280380010604858, 0.6156703233718872], [0.193190336227417, 0.6128139495849609], [0.19382041692733765, 0.6098408102989197], [0.19422847032546997, 0.6070146560668945], [0.19487160444259644, 0.604093074798584], [0.19529759883880615, 0.6013056635856628], [0.19594955444335938, 0.5984336137771606], [0.19638848304748535, 0.5956777334213257], [0.1970497965812683, 0.5928528308868408], [0.19751036167144775, 0.5901234149932861], [0.1981900930404663, 0.5873143672943115], [0.1986548900604248, 0.5846047401428223], [0.19933152198791504, 0.5818339586257935], [0.1998077630996704, 0.5791667699813843], [0.2004948854446411, 0.5764400362968445], [0.20098286867141724, 0.573823869228363], [0.20167696475982666, 0.5711402893066406], [0.20217818021774292, 0.5685557126998901], [0.20287954807281494, 0.5659141540527344], [0.20339488983154297, 0.5633512735366821], [0.20410394668579102, 0.5607436895370483], [0.20463991165161133, 0.5581845045089722], [0.20536088943481445, 0.5555915236473083], [0.20591169595718384, 0.5530358552932739], [0.2066444754600525, 0.5504531264305115], [0.20721107721328735, 0.5478980541229248], [0.20795631408691406, 0.5453233122825623], [0.20854151248931885, 0.5427621006965637], [0.20930010080337524, 0.5401936769485474], [0.20990800857543945, 0.5376230478286743], [0.21068668365478516, 0.5350531339645386], [0.21131795644760132, 0.5324724912643433], [0.21211957931518555, 0.5299012660980225], [0.21277862787246704, 0.5273063778877258], [0.2136082649230957, 0.5247265100479126], [0.21430432796478271, 0.522112250328064], [0.21516835689544678, 0.5195188522338867], [0.2159026861190796, 0.516879677772522], [0.21680444478988647, 0.5142737627029419], [0.21759033203125, 0.5116068124771118], [0.21854275465011597, 0.5089842081069946], [0.2193812131881714, 0.5062921047210693], [0.22038650512695312, 0.5036495327949524], [0.22128331661224365, 0.5009298324584961], [0.22235387563705444, 0.4982745945453644], [0.22333073616027832, 0.49552276730537415], [0.2244793176651001, 0.4928514063358307], [0.22554004192352295, 0.4900803565979004], [0.22677594423294067, 0.48738789558410645], [0.22792601585388184, 0.48459744453430176], [0.2292572259902954, 0.4818975627422333], [0.23057639598846436, 0.4789579510688782], [0.23208165168762207, 0.47614574432373047], [0.23351967334747314, 0.4731985330581665], [0.23514509201049805, 0.4703752100467682], [0.217077374458313, 0.8525691628456116], [0.2150285840034485, 0.8487541675567627], [0.2129284143447876, 0.845412015914917], [0.21109402179718018, 0.8415849208831787], [0.2092086672782898, 0.8382315635681152], [0.20758849382400513, 0.8344124555587769], [0.20589905977249146, 0.8310546875], [0.20447653532028198, 0.8272453546524048], [0.20297586917877197, 0.823906421661377], [0.20173585414886475, 0.8201155066490173], [0.20040184259414673, 0.8167862296104431], [0.19932842254638672, 0.8130175471305847], [0.19814807176589966, 0.8097133636474609], [0.19722974300384521, 0.8059736490249634], [0.1961832046508789, 0.8026852607727051], [0.195398211479187, 0.7989775538444519], [0.19447696208953857, 0.7957234382629395], [0.1938180923461914, 0.792053759098053], [0.1930050253868103, 0.7888228893280029], [0.19245851039886475, 0.7851810455322266], [0.1917542815208435, 0.7819852232933044], [0.19132226705551147, 0.7783883810043335], [0.19071751832962036, 0.7752184867858887], [0.19038987159729004, 0.7716610431671143], [0.18987977504730225, 0.7685354948043823], [0.18964946269989014, 0.7650334239006042], [0.1892247200012207, 0.7619367837905884], [0.18908101320266724, 0.75849449634552], [0.18873560428619385, 0.7554693222045898], [0.18866974115371704, 0.7520914077758789], [0.18839526176452637, 0.749108076095581], [0.18839675188064575, 0.7457886934280396], [0.18817973136901855, 0.7428682446479797], [0.18823999166488647, 0.7396136522293091], [0.18807268142700195, 0.7367292642593384], [0.18817901611328125, 0.7335156202316284], [0.18805384635925293, 0.730678915977478], [0.18819916248321533, 0.7275133728981018], [0.18810665607452393, 0.7246980667114258], [0.1882849931716919, 0.7215561866760254], [0.18821829557418823, 0.7187718152999878], [0.18841922283172607, 0.7156459093093872], [0.1883765459060669, 0.7128585577011108], [0.18859899044036865, 0.7097306847572327], [0.18857556581497192, 0.7069361209869385], [0.18881899118423462, 0.7038134932518005], [0.18881607055664062, 0.7010052800178528], [0.18907827138900757, 0.6978755593299866], [0.18909108638763428, 0.6950592994689941], [0.18936920166015625, 0.6919246912002563], [0.1893988847732544, 0.6890875101089478], [0.1896916627883911, 0.6859475374221802], [0.18973708152770996, 0.6831017732620239], [0.1900421380996704, 0.6799525022506714], [0.19010525941848755, 0.6770963668823242], [0.19042706489562988, 0.6738827228546143], [0.19048118591308594, 0.6708979606628418], [0.19082283973693848, 0.6677000522613525], [0.19093000888824463, 0.6647695302963257], [0.1913112998008728, 0.6616100072860718], [0.1914529800415039, 0.6586781144142151], [0.19186198711395264, 0.6555250883102417], [0.19203603267669678, 0.6525883674621582], [0.19247794151306152, 0.6494399309158325], [0.1926823854446411, 0.6464800834655762], [0.19315826892852783, 0.6433641314506531], [0.1934124231338501, 0.6404509544372559], [0.1939162015914917, 0.6373542547225952], [0.19419139623641968, 0.6344301700592041], [0.19471943378448486, 0.6313437223434448], [0.19502222537994385, 0.6284282207489014], [0.19557499885559082, 0.625367283821106], [0.19590258598327637, 0.622467041015625], [0.1964777112007141, 0.6194461584091187], [0.19683480262756348, 0.6165781021118164], [0.19742918014526367, 0.6136062145233154], [0.19780611991882324, 0.6107747554779053], [0.19841575622558594, 0.6078477501869202], [0.19881057739257812, 0.6050496697425842], [0.19943171739578247, 0.6021706461906433], [0.19984281063079834, 0.5994062423706055], [0.2004716396331787, 0.5965688824653625], [0.20089459419250488, 0.5938394665718079], [0.2015259861946106, 0.5910330414772034], [0.2019582986831665, 0.5883029699325562], [0.20260310173034668, 0.5855239033699036], [0.20305120944976807, 0.5828322172164917], [0.20369946956634521, 0.5801142454147339], [0.2041637897491455, 0.5774592161178589], [0.20481687784194946, 0.5747911930084229], [0.20529264211654663, 0.5721719264984131], [0.20595449209213257, 0.5695475339889526], [0.2064439058303833, 0.5669586062431335], [0.20711654424667358, 0.5643624067306519], [0.20761847496032715, 0.5617969036102295], [0.2083004117012024, 0.5592144727706909], [0.20882129669189453, 0.5566543340682983], [0.2095186710357666, 0.5540832877159119], [0.21005737781524658, 0.5515222549438477], [0.21076726913452148, 0.5489647388458252], [0.2113255262374878, 0.5464025735855103], [0.2120532989501953, 0.5438482165336609], [0.21263229846954346, 0.5412855744361877], [0.2133757472038269, 0.538733184337616], [0.213978111743927, 0.5361620187759399], [0.2147458791732788, 0.5336135625839233], [0.21537744998931885, 0.531033456325531], [0.2161712646484375, 0.5284788608551025], [0.21683359146118164, 0.5258888006210327], [0.21765899658203125, 0.5233243703842163], [0.21836084127426147, 0.5207129716873169], [0.21922409534454346, 0.5181378126144409], [0.2199702262878418, 0.5155150890350342], [0.22088086605072021, 0.5129249095916748], [0.22167837619781494, 0.5102757811546326], [0.22264045476913452, 0.5076730847358704], [0.22349846363067627, 0.5050041675567627], [0.22452300786972046, 0.502386212348938], [0.22544240951538086, 0.49970296025276184], [0.22653663158416748, 0.4970703423023224], [0.2275373935699463, 0.4943693280220032], [0.22871196269989014, 0.49172189831733704], [0.22980070114135742, 0.48900318145751953], [0.23106831312179565, 0.48634645342826843], [0.23224729299545288, 0.4836185574531555], [0.2336794137954712, 0.4808388948440552], [0.23502439260482788, 0.4779874086380005], [0.2365739941596985, 0.47519469261169434], [0.2380366325378418, 0.4723208546638489], [0.22161585092544556, 0.8489307165145874], [0.2194942831993103, 0.8457463979721069], [0.21759456396102905, 0.8420460820198059], [0.21568143367767334, 0.8388398885726929], [0.21399176120758057, 0.8351387977600098], [0.21227210760116577, 0.8319261074066162], [0.21078073978424072, 0.8282352089881897], [0.20924675464630127, 0.8250210285186768], [0.2079451084136963, 0.821344256401062], [0.20657336711883545, 0.8181321620941162], [0.20543694496154785, 0.8144732713699341], [0.20422029495239258, 0.8112713098526001], [0.20324087142944336, 0.8076339364051819], [0.2021559476852417, 0.8044429421424866], [0.20131123065948486, 0.8008219599723816], [0.20035475492477417, 0.797652006149292], [0.19963866472244263, 0.7940614223480225], [0.19878900051116943, 0.7909132242202759], [0.19818663597106934, 0.787347674369812], [0.197445809841156, 0.7842285633087158], [0.19695836305618286, 0.7807005643844604], [0.19631248712539673, 0.7776052951812744], [0.1959245800971985, 0.7741152048110962], [0.1953749656677246, 0.7710517644882202], [0.19508713483810425, 0.7676031589508057], [0.19461941719055176, 0.7645769715309143], [0.19441819190979004, 0.7611726522445679], [0.19403719902038574, 0.7581893801689148], [0.19391947984695435, 0.7548478841781616], [0.19360452890396118, 0.7519108057022095], [0.19355356693267822, 0.7486226558685303], [0.19330453872680664, 0.7457299828529358], [0.19331961870193481, 0.7424986958503723], [0.19311726093292236, 0.7396447658538818], [0.1931779384613037, 0.736460268497467], [0.19302380084991455, 0.7336361408233643], [0.1931297779083252, 0.7304905652999878], [0.19300460815429688, 0.727694571018219], [0.19314122200012207, 0.7245820760726929], [0.19305002689361572, 0.7217986583709717], [0.19321608543395996, 0.7187076807022095], [0.19314587116241455, 0.7159249186515808], [0.19333171844482422, 0.7128420472145081], [0.19328343868255615, 0.7100527286529541], [0.1934947967529297, 0.7069659233093262], [0.19346541166305542, 0.7041654586791992], [0.19369208812713623, 0.7010742425918579], [0.19368314743041992, 0.6982580423355103], [0.19392967224121094, 0.6951608657836914], [0.1939326524734497, 0.6923273801803589], [0.19418764114379883, 0.6892139911651611], [0.19420289993286133, 0.6863689422607422], [0.19446706771850586, 0.6832467913627625], [0.19448530673980713, 0.6803759336471558], [0.19473594427108765, 0.6772413849830627], [0.19475150108337402, 0.6743018627166748], [0.19504708051681519, 0.6710717678070068], [0.19511377811431885, 0.6682166457176208], [0.1954711675643921, 0.6650623083114624], [0.1955862045288086, 0.6621682643890381], [0.19596362113952637, 0.659022331237793], [0.19610607624053955, 0.6561283469200134], [0.1965121626853943, 0.6529936790466309], [0.19669151306152344, 0.6500965356826782], [0.19714784622192383, 0.646967887878418], [0.19737637042999268, 0.6440962553024292], [0.1978616714477539, 0.6410185098648071], [0.1981203556060791, 0.6381101608276367], [0.19861829280853271, 0.6350246667861938], [0.19888752698898315, 0.6321251392364502], [0.19939970970153809, 0.6290664672851562], [0.19969302415847778, 0.6261839270591736], [0.20023083686828613, 0.6231557130813599], [0.2005489468574524, 0.6202982068061829], [0.20110464096069336, 0.6173142194747925], [0.20144706964492798, 0.6144887208938599], [0.2020217776298523, 0.611552357673645], [0.20238590240478516, 0.608757495880127], [0.20297837257385254, 0.6058639287948608], [0.20335930585861206, 0.60309898853302], [0.2039583921432495, 0.6002447605133057], [0.20435023307800293, 0.5975061655044556], [0.20494765043258667, 0.5946860313415527], [0.20534437894821167, 0.5919630527496338], [0.2059478759765625, 0.5891653299331665], [0.2063603401184082, 0.5864843130111694], [0.2069854736328125, 0.5837286710739136], [0.20741689205169678, 0.5810915231704712], [0.20804572105407715, 0.5783766508102417], [0.20848429203033447, 0.5757788419723511], [0.20911860466003418, 0.5731121301651001], [0.20956730842590332, 0.5705451369285583], [0.21020859479904175, 0.5679227113723755], [0.21067559719085693, 0.5653712749481201], [0.21132445335388184, 0.5627741813659668], [0.211806058883667, 0.5602337121963501], [0.2124679684638977, 0.5576584339141846], [0.2129729986190796, 0.5551127195358276], [0.2136518955230713, 0.552548885345459], [0.21417641639709473, 0.5500057935714722], [0.21487045288085938, 0.5474522709846497], [0.2154179811477661, 0.5449062585830688], [0.21613097190856934, 0.5423574447631836], [0.21670103073120117, 0.5398085117340088], [0.2174360752105713, 0.5372663140296936], [0.2180323600769043, 0.5347107648849487], [0.21879154443740845, 0.5321689248085022], [0.21941924095153809, 0.5296052098274231], [0.220206618309021, 0.5270626544952393], [0.2208719253540039, 0.5244824886322021], [0.221696674823761, 0.5219334363937378], [0.22240471839904785, 0.5193434953689575], [0.22326993942260742, 0.5167897939682007], [0.2240278720855713, 0.5141787528991699], [0.22494465112686157, 0.511611819267273], [0.22575759887695312, 0.5089878439903259], [0.22673022747039795, 0.5064136981964111], [0.22760844230651855, 0.5037721395492554], [0.22864747047424316, 0.5011958479881287], [0.22959744930267334, 0.49854186177253723], [0.230707049369812, 0.4959515333175659], [0.23173725605010986, 0.4932851493358612], [0.23293077945709229, 0.49069076776504517], [0.23405438661575317, 0.48801177740097046], [0.2353419065475464, 0.48542091250419617], [0.2366182804107666, 0.48261627554893494], [0.23806977272033691, 0.47990918159484863], [0.23946630954742432, 0.47708404064178467], [0.24103975296020508, 0.4743630886077881], [0.2258363962173462, 0.8459003567695618], [0.22390389442443848, 0.8423728346824646], [0.22194361686706543, 0.8392680883407593], [0.22021484375, 0.8357263803482056], [0.21844708919525146, 0.8326077461242676], [0.21691030263900757, 0.8290669322013855], [0.21533387899398804, 0.8259549140930176], [0.21397924423217773, 0.8224163055419922], [0.2125699520111084, 0.8193070292472839], [0.21138203144073486, 0.815768837928772], [0.21013057231903076, 0.8126686215400696], [0.20910030603408813, 0.8091397285461426], [0.20798510313034058, 0.8060427904129028], [0.20709282159805298, 0.8025190830230713], [0.2061023712158203, 0.7994298934936523], [0.20533978939056396, 0.7959327697753906], [0.20445990562438965, 0.7928527593612671], [0.20381176471710205, 0.7893732786178589], [0.20303797721862793, 0.7863197922706604], [0.20250284671783447, 0.7828742265701294], [0.20182347297668457, 0.7798380851745605], [0.20138561725616455, 0.7764201164245605], [0.20080000162124634, 0.7734192609786987], [0.20046067237854004, 0.7700420618057251], [0.19995951652526855, 0.7670610547065735], [0.1997079849243164, 0.7637172937393188], [0.19928550720214844, 0.7607800960540771], [0.19911766052246094, 0.757490873336792], [0.19876748323440552, 0.7545841932296753], [0.19866788387298584, 0.7513375282287598], [0.19838255643844604, 0.7484804391860962], [0.1983473300933838, 0.7452841997146606], [0.19811320304870605, 0.7424582839012146], [0.1981307864189148, 0.7392953038215637], [0.19794386625289917, 0.7365058660507202], [0.19800621271133423, 0.7333818674087524], [0.19785547256469727, 0.730608344078064], [0.19795572757720947, 0.7275052070617676], [0.19783622026443481, 0.7247580885887146], [0.19796878099441528, 0.7216783761978149], [0.19787603616714478, 0.7189306616783142], [0.19803321361541748, 0.7158496379852295], [0.1979609727859497, 0.7131038904190063], [0.19814002513885498, 0.7100256681442261], [0.19808828830718994, 0.7072653770446777], [0.19828784465789795, 0.7041747570037842], [0.19825339317321777, 0.7014027833938599], [0.19846993684768677, 0.6983073353767395], [0.19845199584960938, 0.6955147981643677], [0.19868266582489014, 0.6924005746841431], [0.19868052005767822, 0.6895848512649536], [0.19892358779907227, 0.6864666938781738], [0.19893097877502441, 0.6836268901824951], [0.19918465614318848, 0.6804841160774231], [0.1992020606994629, 0.6776037812232971], [0.19946038722991943, 0.6744232773780823], [0.19950884580612183, 0.671544075012207], [0.19982969760894775, 0.6684591770172119], [0.19990479946136475, 0.6655882596969604], [0.20024418830871582, 0.6624674797058105], [0.2003573179244995, 0.6595936417579651], [0.20072829723358154, 0.6564863324165344], [0.20087385177612305, 0.6536121368408203], [0.20127171277999878, 0.6505213379859924], [0.20144730806350708, 0.6476589441299438], [0.20188230276107788, 0.644599199295044], [0.2020869255065918, 0.6417273283004761], [0.20253252983093262, 0.6386353969573975], [0.20276105403900146, 0.6357437968254089], [0.2032315731048584, 0.6326906681060791], [0.2034870982170105, 0.6298123002052307], [0.20397955179214478, 0.6267977356910706], [0.20426273345947266, 0.6239392757415771], [0.20478296279907227, 0.620959997177124], [0.20509207248687744, 0.6181308031082153], [0.2056288719177246, 0.615196943283081], [0.2059619426727295, 0.6124002933502197], [0.20651793479919434, 0.6095056533813477], [0.2068706750869751, 0.6067363023757935], [0.20744341611862183, 0.6038786172866821], [0.20781564712524414, 0.6011344194412231], [0.2083958387374878, 0.598305344581604], [0.2087792158126831, 0.5955747961997986], [0.20936161279678345, 0.5927777886390686], [0.20975786447525024, 0.5900781154632568], [0.21034902334213257, 0.5873415470123291], [0.2107570767402649, 0.5846678018569946], [0.21135330200195312, 0.5819669365882874], [0.21177303791046143, 0.5793260335922241], [0.2123711109161377, 0.5766690373420715], [0.21279597282409668, 0.5740694999694824], [0.21339905261993408, 0.5714566111564636], [0.21383273601531982, 0.5688903331756592], [0.21444302797317505, 0.5663046836853027], [0.21489667892456055, 0.5637558102607727], [0.2155226469039917, 0.5611903667449951], [0.2159910798072815, 0.5586502552032471], [0.2166299819946289, 0.5560949444770813], [0.21712130308151245, 0.5535570979118347], [0.21777749061584473, 0.5510126352310181], [0.21828985214233398, 0.5484752058982849], [0.21896576881408691, 0.5459409952163696], [0.21950435638427734, 0.5434005260467529], [0.22020286321640015, 0.5408748388290405], [0.22076630592346191, 0.5383313894271851], [0.22148656845092773, 0.5358080863952637], [0.22208178043365479, 0.5332624316215515], [0.22283124923706055, 0.5307417511940002], [0.223457932472229, 0.528186559677124], [0.22423970699310303, 0.5256643295288086], [0.22490942478179932, 0.523101270198822], [0.22573012113571167, 0.520576000213623], [0.22644418478012085, 0.5180047750473022], [0.22731143236160278, 0.5154734253883362], [0.22808092832565308, 0.5128870010375977], [0.22900378704071045, 0.5103521347045898], [0.2298324704170227, 0.5077527761459351], [0.23081368207931519, 0.5052176713943481], [0.23170995712280273, 0.5026167631149292], [0.2327631115913391, 0.5000665783882141], [0.23373448848724365, 0.49746042490005493], [0.23486542701721191, 0.49491119384765625], [0.2359207272529602, 0.4922962188720703], [0.23713743686676025, 0.489747017621994], [0.23828411102294922, 0.4871333837509155], [0.2396593689918518, 0.4844733774662018], [0.24096858501434326, 0.481740266084671], [0.24246418476104736, 0.479061484336853], [0.2438923716545105, 0.4763120412826538], [0.2301236391067505, 0.84242844581604], [0.22814732789993286, 0.8394757509231567], [0.22635936737060547, 0.8360552787780762], [0.22457259893417358, 0.8330771923065186], [0.2229747772216797, 0.8296475410461426], [0.22137361764907837, 0.8266646862030029], [0.2199634313583374, 0.8232450485229492], [0.21852457523345947, 0.8202567100524902], [0.21728301048278809, 0.8168419599533081], [0.21600115299224854, 0.8138489723205566], [0.21492016315460205, 0.8104392886161804], [0.21377456188201904, 0.8074443340301514], [0.21283304691314697, 0.8040337562561035], [0.21181058883666992, 0.8010340332984924], [0.21099984645843506, 0.7976253032684326], [0.2100914716720581, 0.7946375608444214], [0.20939499139785767, 0.7912395000457764], [0.20859569311141968, 0.7882633209228516], [0.20801246166229248, 0.7848856449127197], [0.2073042392730713, 0.7819302082061768], [0.20681923627853394, 0.7785699367523193], [0.20620685815811157, 0.7756450772285461], [0.2058205008506775, 0.7723188996315002], [0.20528745651245117, 0.769415020942688], [0.20498722791671753, 0.7661190629005432], [0.20453643798828125, 0.7632431983947754], [0.20432257652282715, 0.7599905729293823], [0.2039402723312378, 0.7571437358856201], [0.2037956714630127, 0.75393146276474], [0.20348137617111206, 0.7511184811592102], [0.20340698957443237, 0.747951328754425], [0.2031438946723938, 0.7451654076576233], [0.20312118530273438, 0.7420333027839661], [0.2029070258140564, 0.7392755150794983], [0.20293402671813965, 0.7361757755279541], [0.2027580738067627, 0.7334324717521667], [0.2028217315673828, 0.7303576469421387], [0.20268166065216064, 0.7276293039321899], [0.2027842402458191, 0.7245738506317139], [0.20266717672348022, 0.721848726272583], [0.20279335975646973, 0.7188023328781128], [0.20270347595214844, 0.7160687446594238], [0.20285433530807495, 0.7130239009857178], [0.20278143882751465, 0.7102742195129395], [0.20295029878616333, 0.7072274684906006], [0.20289766788482666, 0.7044612169265747], [0.2030869722366333, 0.7014023065567017], [0.20304858684539795, 0.6986154317855835], [0.20324939489364624, 0.6955448985099792], [0.20322686433792114, 0.692735493183136], [0.20344549417495728, 0.6896463632583618], [0.2034367322921753, 0.6868161559104919], [0.20366835594177246, 0.683713436126709], [0.20367646217346191, 0.6808562278747559], [0.20393192768096924, 0.6777286529541016], [0.2039813995361328, 0.6748431921005249], [0.20427298545837402, 0.6717537641525269], [0.20431816577911377, 0.6689155101776123], [0.20461052656173706, 0.6658092737197876], [0.20468056201934814, 0.6629812717437744], [0.20501339435577393, 0.659900426864624], [0.20512783527374268, 0.6570524573326111], [0.20549100637435913, 0.6539748907089233], [0.20563292503356934, 0.6511276960372925], [0.20601177215576172, 0.6480658054351807], [0.20615679025650024, 0.6452228426933289], [0.20653998851776123, 0.6421527862548828], [0.20671802759170532, 0.6392872333526611], [0.20713961124420166, 0.636232316493988], [0.2073531150817871, 0.6333833932876587], [0.20780903100967407, 0.6303572654724121], [0.20805281400680542, 0.6275253891944885], [0.20853078365325928, 0.6245373487472534], [0.20880252122879028, 0.621726393699646], [0.20930588245391846, 0.6187781095504761], [0.2096043825149536, 0.6159945726394653], [0.21012496948242188, 0.6130926609039307], [0.21044611930847168, 0.6103317737579346], [0.21098530292510986, 0.6074673533439636], [0.21132653951644897, 0.6047308444976807], [0.21188139915466309, 0.6018978953361511], [0.2122449278831482, 0.5991740822792053], [0.21281582117080688, 0.5963640809059143], [0.2131994366645813, 0.5936719179153442], [0.21378099918365479, 0.5908989310264587], [0.21416473388671875, 0.5882439613342285], [0.21474027633666992, 0.5854949951171875], [0.21512627601623535, 0.5828773975372314], [0.2157004475593567, 0.5801679491996765], [0.21609246730804443, 0.5775872468948364], [0.21667063236236572, 0.5749276876449585], [0.21706926822662354, 0.5723788738250732], [0.21764999628067017, 0.5697647929191589], [0.2180624008178711, 0.5672414302825928], [0.21865308284759521, 0.5646588206291199], [0.21908223628997803, 0.5621374845504761], [0.21968865394592285, 0.5595759153366089], [0.22013890743255615, 0.5570578575134277], [0.22075968980789185, 0.5545132160186768], [0.221235990524292, 0.5519945025444031], [0.2218773365020752, 0.5494608283042908], [0.22237783670425415, 0.5469433069229126], [0.22303855419158936, 0.5444179773330688], [0.22356444597244263, 0.5418999195098877], [0.22425103187561035, 0.5393812656402588], [0.22480618953704834, 0.5368621349334717], [0.22551918029785156, 0.5343523621559143], [0.22610723972320557, 0.531826913356781], [0.22685086727142334, 0.5293210744857788], [0.22747838497161865, 0.5267916321754456], [0.22825777530670166, 0.5242880582809448], [0.22892963886260986, 0.5217498540878296], [0.22974956035614014, 0.5192493200302124], [0.23047131299972534, 0.5167020559310913], [0.23134064674377441, 0.5142029523849487], [0.2321208119392395, 0.5116448402404785], [0.23304927349090576, 0.5091429948806763], [0.23389381170272827, 0.5065881013870239], [0.23488247394561768, 0.5040862560272217], [0.23579978942871094, 0.501521110534668], [0.2368617057800293, 0.4990265369415283], [0.2378588318824768, 0.49645864963531494], [0.23900270462036133, 0.4939645528793335], [0.24008798599243164, 0.4914032220840454], [0.24132567644119263, 0.4889206886291504], [0.2425607442855835, 0.4862399101257324], [0.24395513534545898, 0.4836454391479492], [0.24531447887420654, 0.4809533357620239], [0.24683868885040283, 0.4783453345298767], [0.23412644863128662, 0.8395204544067383], [0.23231500387191772, 0.8362635374069214], [0.23048722743988037, 0.8333802223205566], [0.2288576364517212, 0.830103874206543], [0.22721374034881592, 0.8272121548652649], [0.2257680892944336, 0.823933482170105], [0.22429555654525757, 0.8210397958755493], [0.2230139970779419, 0.8177566528320312], [0.22170007228851318, 0.8148674964904785], [0.22057604789733887, 0.8115819692611694], [0.21940481662750244, 0.8086884617805481], [0.21842080354690552, 0.8053889274597168], [0.21737754344940186, 0.8024922013282776], [0.21652519702911377, 0.7991839647293091], [0.21559429168701172, 0.7962768077850342], [0.2148609161376953, 0.7929728031158447], [0.21403664350509644, 0.7900704145431519], [0.2134164571762085, 0.7867816686630249], [0.21268606185913086, 0.7838830947875977], [0.2121652364730835, 0.7806078195571899], [0.2115260362625122, 0.7777293920516968], [0.21110153198242188, 0.7744859457015991], [0.2105414867401123, 0.7716253995895386], [0.2102004885673523, 0.7684031128883362], [0.2097190022468567, 0.7655717134475708], [0.209461510181427, 0.7623875737190247], [0.20905029773712158, 0.7595759630203247], [0.2088673710823059, 0.7564195394515991], [0.20852243900299072, 0.7536435127258301], [0.20840668678283691, 0.7505266070365906], [0.20811760425567627, 0.7477712631225586], [0.20805811882019043, 0.7446752786636353], [0.2078183889389038, 0.7419487237930298], [0.2078099250793457, 0.7388856410980225], [0.20761001110076904, 0.736170768737793], [0.2076413631439209, 0.7331205606460571], [0.20747721195220947, 0.7304232120513916], [0.2075454592704773, 0.7273944020271301], [0.2074107527732849, 0.7246960401535034], [0.20750880241394043, 0.7216646671295166], [0.20739734172821045, 0.7189697027206421], [0.20751947164535522, 0.7159422039985657], [0.20742833614349365, 0.7132304906845093], [0.2075728178024292, 0.7101866006851196], [0.20750093460083008, 0.7074666023254395], [0.2076648473739624, 0.7044166922569275], [0.20760923624038696, 0.7016695737838745], [0.20778924226760864, 0.6986015439033508], [0.20774948596954346, 0.6958361864089966], [0.20794570446014404, 0.6927582621574402], [0.2079225778579712, 0.6899622082710266], [0.20813393592834473, 0.6868659257888794], [0.20812702178955078, 0.6840540170669556], [0.2083592414855957, 0.6809481382369995], [0.20837509632110596, 0.6781120300292969], [0.20862829685211182, 0.6750093102455139], [0.20866316556930542, 0.6721659898757935], [0.2089378833770752, 0.6690831184387207], [0.20899593830108643, 0.6662614345550537], [0.20930051803588867, 0.6632167100906372], [0.20938193798065186, 0.6604040861129761], [0.2097029685974121, 0.6573457717895508], [0.20980983972549438, 0.6545161008834839], [0.21015197038650513, 0.6514625549316406], [0.2102757692337036, 0.6486194133758545], [0.21063649654388428, 0.6455769538879395], [0.21078532934188843, 0.6427347660064697], [0.21116721630096436, 0.6397066116333008], [0.21134448051452637, 0.636866569519043], [0.21175551414489746, 0.6338651776313782], [0.2119627594947815, 0.631037712097168], [0.21239960193634033, 0.6280614733695984], [0.2126343846321106, 0.6252507567405701], [0.21309340000152588, 0.6223134994506836], [0.2133539915084839, 0.6195287108421326], [0.21383321285247803, 0.616627037525177], [0.21411752700805664, 0.6138696670532227], [0.2146148681640625, 0.6110074520111084], [0.2149209976196289, 0.6082710027694702], [0.21543395519256592, 0.6054369807243347], [0.21575725078582764, 0.6027213335037231], [0.2162853479385376, 0.5999122858047485], [0.21663033962249756, 0.5972063541412354], [0.2171674370765686, 0.5944349765777588], [0.21752655506134033, 0.5917468667030334], [0.21806961297988892, 0.5890077352523804], [0.21843844652175903, 0.5863436460494995], [0.21898406744003296, 0.583652138710022], [0.21936017274856567, 0.5810202360153198], [0.21990561485290527, 0.5783793926239014], [0.22028791904449463, 0.5757861137390137], [0.22083961963653564, 0.5731903314590454], [0.2212294340133667, 0.5706350803375244], [0.22178488969802856, 0.5680735111236572], [0.22218716144561768, 0.5655422806739807], [0.22275638580322266, 0.5629980564117432], [0.22317755222320557, 0.5604777336120605], [0.2237623929977417, 0.5579482316970825], [0.2242053747177124, 0.5554349422454834], [0.2248077392578125, 0.552918553352356], [0.22527551651000977, 0.5504050850868225], [0.22589832544326782, 0.5479000806808472], [0.22639286518096924, 0.5453867316246033], [0.22703760862350464, 0.5428919792175293], [0.22756141424179077, 0.540381908416748], [0.22823452949523926, 0.5378909111022949], [0.22878587245941162, 0.535383939743042], [0.22948932647705078, 0.5329024195671082], [0.2300797700881958, 0.5303954482078552], [0.2308174967765808, 0.5279179215431213], [0.23144817352294922, 0.525406002998352], [0.2322244644165039, 0.5229349732398987], [0.23290252685546875, 0.5204203128814697], [0.23372387886047363, 0.5179513692855835], [0.23445355892181396, 0.5154356956481934], [0.23532819747924805, 0.5129712820053101], [0.2361217737197876, 0.5104532241821289], [0.23705387115478516, 0.5079928636550903], [0.23791539669036865, 0.505474865436554], [0.23891586065292358, 0.5030210018157959], [0.23985129594802856, 0.5005069971084595], [0.24092930555343628, 0.4980562627315521], [0.24194717407226562, 0.4955475926399231], [0.24310529232025146, 0.49310824275016785], [0.24421066045761108, 0.490612268447876], [0.24553066492080688, 0.4880663752555847], [0.24679851531982422, 0.48546016216278076], [0.2482326626777649, 0.48290902376174927], [0.24962812662124634, 0.48028936982154846], [0.2381877899169922, 0.8362232446670532], [0.2363497018814087, 0.83347487449646], [0.2346705198287964, 0.8303150534629822], [0.23301029205322266, 0.8275513648986816], [0.23151439428329468, 0.8243845701217651], [0.23002409934997559, 0.8216160535812378], [0.2286931276321411, 0.8184438943862915], [0.22736138105392456, 0.8156718015670776], [0.22619301080703735, 0.8125019669532776], [0.22499895095825195, 0.8097211122512817], [0.22397381067276, 0.806546688079834], [0.22290587425231934, 0.8037481904029846], [0.22201305627822876, 0.8005586862564087], [0.22105807065963745, 0.7977505326271057], [0.22028601169586182, 0.7945445775985718], [0.21943992376327515, 0.7917273044586182], [0.21878087520599365, 0.7885221242904663], [0.21802997589111328, 0.7857069969177246], [0.2174685001373291, 0.7825014591217041], [0.21680885553359985, 0.7796989679336548], [0.216344952583313, 0.7765082716941833], [0.21576541662216187, 0.7737228870391846], [0.2153843641281128, 0.7705504298210144], [0.21488231420516968, 0.7677870988845825], [0.2145862579345703, 0.7646431922912598], [0.2141507863998413, 0.7619014978408813], [0.21392804384231567, 0.7587841749191284], [0.21356141567230225, 0.7560653686523438], [0.2134106159210205, 0.7529789805412292], [0.21309971809387207, 0.7502807378768921], [0.2130061388015747, 0.7472211122512817], [0.2127484679222107, 0.7445348501205444], [0.21270954608917236, 0.7414988875389099], [0.21248996257781982, 0.7388241291046143], [0.21249288320541382, 0.7358056902885437], [0.21231257915496826, 0.7331365346908569], [0.21235454082489014, 0.7301326990127563], [0.21220004558563232, 0.7274645566940308], [0.2122703194618225, 0.7244675159454346], [0.21214181184768677, 0.7217909693717957], [0.21223986148834229, 0.7187937498092651], [0.2121317982673645, 0.7161048650741577], [0.21224939823150635, 0.713099479675293], [0.21216142177581787, 0.7103917598724365], [0.21230143308639526, 0.7073749303817749], [0.212227463722229, 0.7046465277671814], [0.21238303184509277, 0.7016156315803528], [0.2123284935951233, 0.6988629102706909], [0.2125030755996704, 0.6958185434341431], [0.21246135234832764, 0.6930404901504517], [0.21264827251434326, 0.6899774074554443], [0.21262705326080322, 0.687179446220398], [0.2128361463546753, 0.6841079592704773], [0.21283084154129028, 0.6812925338745117], [0.2130599021911621, 0.6782158613204956], [0.21307986974716187, 0.6753903031349182], [0.21333801746368408, 0.6723212003707886], [0.21338319778442383, 0.6695027351379395], [0.2136601209640503, 0.6664626598358154], [0.21371525526046753, 0.6636655330657959], [0.21399617195129395, 0.6606292724609375], [0.21406614780426025, 0.6578224897384644], [0.21436846256256104, 0.654788613319397], [0.21446263790130615, 0.6519702672958374], [0.21479201316833496, 0.6489317417144775], [0.21490848064422607, 0.6461114883422852], [0.2152537703514099, 0.6430948972702026], [0.21539849042892456, 0.6402807235717773], [0.2157716155052185, 0.6372824907302856], [0.21594280004501343, 0.6344807147979736], [0.2163395881652832, 0.6315074563026428], [0.2165384292602539, 0.6287167072296143], [0.21695828437805176, 0.6257699728012085], [0.21718043088912964, 0.6230020523071289], [0.21761852502822876, 0.6200925707817078], [0.21786391735076904, 0.617345929145813], [0.2183213233947754, 0.6144741773605347], [0.21858620643615723, 0.6117510795593262], [0.21905988454818726, 0.6089099049568176], [0.21934723854064941, 0.6062048673629761], [0.21983712911605835, 0.6033918261528015], [0.22014188766479492, 0.600698709487915], [0.22064542770385742, 0.5979047417640686], [0.22096729278564453, 0.5952339768409729], [0.22148048877716064, 0.5924630165100098], [0.221815824508667, 0.589818000793457], [0.22233867645263672, 0.5870806574821472], [0.22268515825271606, 0.5844687223434448], [0.22321295738220215, 0.5817739963531494], [0.22356712818145752, 0.5792025327682495], [0.2240980863571167, 0.5765592455863953], [0.22445684671401978, 0.5740216970443726], [0.22498929500579834, 0.5714244246482849], [0.22535955905914307, 0.5689154863357544], [0.225899338722229, 0.5663529634475708], [0.22628366947174072, 0.5638562440872192], [0.22683733701705933, 0.5613163113594055], [0.22724413871765137, 0.5588278770446777], [0.2278130054473877, 0.5563043355941772], [0.22824203968048096, 0.5538181662559509], [0.2288299798965454, 0.5513105392456055], [0.22928500175476074, 0.5488243103027344], [0.22989612817764282, 0.546328067779541], [0.23037958145141602, 0.5438438653945923], [0.23101353645324707, 0.5413658618927002], [0.23153114318847656, 0.5388789176940918], [0.23219579458236694, 0.5364111661911011], [0.23274463415145874, 0.5339301824569702], [0.233440101146698, 0.531474232673645], [0.23402917385101318, 0.5289894938468933], [0.2347632646560669, 0.5265407562255859], [0.23539745807647705, 0.5240600109100342], [0.23617303371429443, 0.521619439125061], [0.23685795068740845, 0.5191366076469421], [0.23768079280853271, 0.5167036056518555], [0.23842060565948486, 0.5142266750335693], [0.23929738998413086, 0.5118029713630676], [0.24010461568832397, 0.5093273520469666], [0.24104392528533936, 0.5069139003753662], [0.2419203519821167, 0.504443347454071], [0.24292922019958496, 0.502042293548584], [0.24388575553894043, 0.4995788633823395], [0.24497449398040771, 0.4971870183944702], [0.2460157871246338, 0.4947417974472046], [0.2471916675567627, 0.4923752546310425], [0.2483830451965332, 0.48981600999832153], [0.2497166395187378, 0.48734328150749207], [0.2510290741920471, 0.48478594422340393], [0.2524898052215576, 0.4823088049888611], [0.2419877052307129, 0.8334360122680664], [0.2402903437614441, 0.8304274082183838], [0.23859894275665283, 0.8277466893196106], [0.23707634210586548, 0.8247225880622864], [0.23555761575698853, 0.8220417499542236], [0.23420250415802002, 0.8190057277679443], [0.23284554481506348, 0.8163219690322876], [0.231645405292511, 0.8132786750793457], [0.23043179512023926, 0.810596227645874], [0.22937160730361938, 0.8075383901596069], [0.22829020023345947, 0.804853081703186], [0.22735971212387085, 0.8017740249633789], [0.22639238834381104, 0.7990683913230896], [0.2255842685699463, 0.7959671020507812], [0.22472208738327026, 0.7932489514350891], [0.22402691841125488, 0.7901335954666138], [0.22326034307479858, 0.7874020338058472], [0.22267067432403564, 0.7842814326286316], [0.22199267148971558, 0.7815417051315308], [0.22149676084518433, 0.7784328460693359], [0.22089868783950806, 0.7756993174552917], [0.22049081325531006, 0.7726013660430908], [0.2199670672416687, 0.7698787450790405], [0.21963900327682495, 0.7668064832687378], [0.2191815972328186, 0.764102578163147], [0.2189275026321411, 0.7610528469085693], [0.21853601932525635, 0.7583706378936768], [0.2183513045310974, 0.7553508281707764], [0.21801841259002686, 0.7526808977127075], [0.2178933024406433, 0.7496760487556458], [0.21761465072631836, 0.747029185295105], [0.21754419803619385, 0.7440389394760132], [0.21730637550354004, 0.7413922548294067], [0.21728181838989258, 0.7384090423583984], [0.21708202362060547, 0.7357752323150635], [0.21709740161895752, 0.7328057289123535], [0.21692901849746704, 0.7301636934280396], [0.2169751524925232, 0.7271904349327087], [0.21683049201965332, 0.7245500087738037], [0.21690469980239868, 0.7215747833251953], [0.21678102016448975, 0.7189193964004517], [0.21687829494476318, 0.7159309387207031], [0.21677589416503906, 0.713265061378479], [0.21689385175704956, 0.7102651596069336], [0.2168075442314148, 0.7075706124305725], [0.2169438600540161, 0.7045519948005676], [0.21687275171279907, 0.7018380165100098], [0.21702361106872559, 0.6988087892532349], [0.21696710586547852, 0.6960668563842773], [0.21713417768478394, 0.6930147409439087], [0.21709442138671875, 0.6902540922164917], [0.2172808051109314, 0.6871962547302246], [0.21725809574127197, 0.684410572052002], [0.2174619436264038, 0.6813464164733887], [0.2174575924873352, 0.6785541772842407], [0.2176830768585205, 0.6754964590072632], [0.21769940853118896, 0.6726981997489929], [0.21794462203979492, 0.6696518659591675], [0.21797704696655273, 0.6668609380722046], [0.218237042427063, 0.6638363003730774], [0.21828818321228027, 0.6610465049743652], [0.2185620665550232, 0.6580265760421753], [0.2186296582221985, 0.655237078666687], [0.21892118453979492, 0.6522191762924194], [0.21900725364685059, 0.6494088172912598], [0.2193211317062378, 0.6464072465896606], [0.21943747997283936, 0.6436104774475098], [0.21977394819259644, 0.6406289339065552], [0.21991604566574097, 0.6378355026245117], [0.22027379274368286, 0.6348768472671509], [0.22043675184249878, 0.6320918798446655], [0.22081464529037476, 0.6291567087173462], [0.2210015058517456, 0.6263894438743591], [0.22140073776245117, 0.6234862804412842], [0.2216089963912964, 0.6207383871078491], [0.22202515602111816, 0.617872953414917], [0.22225701808929443, 0.6151478290557861], [0.22268879413604736, 0.6123102903366089], [0.22293853759765625, 0.6096042990684509], [0.22338473796844482, 0.6067987680435181], [0.22365283966064453, 0.6041029691696167], [0.2241131067276001, 0.6013192534446716], [0.22440212965011597, 0.5986365675926208], [0.22487640380859375, 0.5958804488182068], [0.2251848578453064, 0.5932087898254395], [0.22566986083984375, 0.5904852151870728], [0.2259901762008667, 0.5878392457962036], [0.22648537158966064, 0.5851597785949707], [0.22681868076324463, 0.5825405716896057], [0.22731709480285645, 0.5799134969711304], [0.2276597023010254, 0.5773336291313171], [0.22816503047943115, 0.5747507810592651], [0.22851693630218506, 0.5722113847732544], [0.22902607917785645, 0.569664478302002], [0.22938871383666992, 0.5671521425247192], [0.22990930080413818, 0.5646328330039978], [0.23028814792633057, 0.5621358156204224], [0.23082208633422852, 0.5596355199813843], [0.23122239112854004, 0.5571460127830505], [0.23177587985992432, 0.554658055305481], [0.23219704627990723, 0.5521757006645203], [0.23276960849761963, 0.5497023463249207], [0.2332211136817932, 0.5472256541252136], [0.23381948471069336, 0.5447655320167542], [0.23429936170578003, 0.5422952175140381], [0.2349231243133545, 0.5398462414741516], [0.2354363203048706, 0.5373779535293579], [0.23609018325805664, 0.5349457859992981], [0.2366390824317932, 0.5324814319610596], [0.2373257875442505, 0.5300610661506653], [0.23791998624801636, 0.5276037454605103], [0.23864811658859253, 0.5251919031143188], [0.23928874731063843, 0.5227386951446533], [0.24006175994873047, 0.5203391313552856], [0.2407577633857727, 0.5178895592689514], [0.24158036708831787, 0.515504002571106], [0.2423325777053833, 0.5130663514137268], [0.24321424961090088, 0.5106938481330872], [0.24403715133666992, 0.5082641243934631], [0.24498313665390015, 0.5059043169021606], [0.24587798118591309, 0.5034852623939514], [0.24689507484436035, 0.501142144203186], [0.2478731870651245, 0.4987429976463318], [0.24897396564483643, 0.4964185357093811], [0.25003260374069214, 0.4940360188484192], [0.251282274723053, 0.49161696434020996], [0.25250691175460815, 0.48914164304733276], [0.25387662649154663, 0.4867189824581146], [0.25522226095199585, 0.48425063490867615], [0.24584078788757324, 0.8303009271621704], [0.24414211511611938, 0.8277499675750732], [0.2425781488418579, 0.824832558631897], [0.24105167388916016, 0.8222705125808716], [0.23965871334075928, 0.8193448781967163], [0.23828810453414917, 0.8167760968208313], [0.23705357313156128, 0.8138468265533447], [0.23582708835601807, 0.8112690448760986], [0.23473531007766724, 0.8083291053771973], [0.2336369752883911, 0.8057452440261841], [0.23267817497253418, 0.8027938604354858], [0.23169207572937012, 0.8001861572265625], [0.23085224628448486, 0.7972118854522705], [0.22997093200683594, 0.7945823669433594], [0.22924482822418213, 0.7915825843811035], [0.22845810651779175, 0.7889339923858643], [0.22783392667770386, 0.7859153151512146], [0.22713720798492432, 0.7832558751106262], [0.2266089916229248, 0.7802222967147827], [0.22599351406097412, 0.7775609493255615], [0.22555232048034668, 0.7745290994644165], [0.22501349449157715, 0.7718751430511475], [0.2246512770652771, 0.7688471674919128], [0.22417855262756348, 0.7662088871002197], [0.22389161586761475, 0.7632009983062744], [0.22348451614379883, 0.7605857849121094], [0.22326886653900146, 0.7575969696044922], [0.22292166948318481, 0.754999041557312], [0.22276943922042847, 0.7520279884338379], [0.22247761487960815, 0.74943608045578], [0.22238361835479736, 0.7464815378189087], [0.22213232517242432, 0.7438849210739136], [0.22208315134048462, 0.7409348487854004], [0.22187232971191406, 0.7383396625518799], [0.22186672687530518, 0.7353960275650024], [0.2216833233833313, 0.7327973246574402], [0.221707284450531, 0.7298535704612732], [0.22155284881591797, 0.7272393107414246], [0.22160804271697998, 0.7242941856384277], [0.22147375345230103, 0.7216674089431763], [0.22154980897903442, 0.7187108993530273], [0.22143566608428955, 0.716063380241394], [0.2215334177017212, 0.7130991220474243], [0.22143125534057617, 0.7104268074035645], [0.22154462337493896, 0.7074414491653442], [0.22145986557006836, 0.7047444581985474], [0.2215932011604309, 0.7017452716827393], [0.22152185440063477, 0.699021577835083], [0.22166579961776733, 0.6960046291351318], [0.221610426902771, 0.6932588815689087], [0.22177338600158691, 0.6902302503585815], [0.22173213958740234, 0.6874628067016602], [0.22191095352172852, 0.6844248175621033], [0.22189003229141235, 0.6816473007202148], [0.22208786010742188, 0.6786098480224609], [0.22208380699157715, 0.6758249998092651], [0.22229766845703125, 0.6727949976921082], [0.22231167554855347, 0.6700102686882019], [0.22254490852355957, 0.6669912338256836], [0.22257113456726074, 0.6642141938209534], [0.2228136658668518, 0.6612052917480469], [0.22285687923431396, 0.6584281921386719], [0.22311389446258545, 0.6554276347160339], [0.22317147254943848, 0.652643084526062], [0.2234513759613037, 0.6496414542198181], [0.22353428602218628, 0.6468762159347534], [0.22383767366409302, 0.6439007520675659], [0.22394955158233643, 0.6411314010620117], [0.22427457571029663, 0.6381710171699524], [0.22440338134765625, 0.6354023218154907], [0.2247435450553894, 0.6324663162231445], [0.22489547729492188, 0.6297157406806946], [0.22525537014007568, 0.6268095970153809], [0.22542715072631836, 0.6240743398666382], [0.22580695152282715, 0.6211955547332764], [0.2259979248046875, 0.6184861660003662], [0.22638940811157227, 0.6156436800956726], [0.22660374641418457, 0.6129477024078369], [0.2270125150680542, 0.6101317405700684], [0.22724378108978271, 0.6074541807174683], [0.227666974067688, 0.6046603322029114], [0.2279178500175476, 0.6019943952560425], [0.22835588455200195, 0.5992246866226196], [0.22862344980239868, 0.5965731143951416], [0.22907429933547974, 0.5938242673873901], [0.22936171293258667, 0.5911964178085327], [0.22982728481292725, 0.5884836316108704], [0.23012655973434448, 0.5858820676803589], [0.2306031584739685, 0.5832110047340393], [0.2309180498123169, 0.5806493759155273], [0.23140037059783936, 0.5780250430107117], [0.23172414302825928, 0.5755006074905396], [0.23221278190612793, 0.5729215145111084], [0.23254811763763428, 0.5704243779182434], [0.23304224014282227, 0.5678852796554565], [0.23339128494262695, 0.5654107928276062], [0.23389732837677002, 0.5628958344459534], [0.23426252603530884, 0.5604305267333984], [0.23478269577026367, 0.5579383373260498], [0.2351711392402649, 0.5554805994033813], [0.23571252822875977, 0.5530022382736206], [0.23612576723098755, 0.550552248954773], [0.23668694496154785, 0.5480942130088806], [0.23712879419326782, 0.5456497073173523], [0.23771607875823975, 0.5432085990905762], [0.23818981647491455, 0.5407665967941284], [0.23880743980407715, 0.5383414626121521], [0.23931747674942017, 0.5359107255935669], [0.23996776342391968, 0.5334975719451904], [0.2405145764350891, 0.5310748815536499], [0.24119693040847778, 0.528681755065918], [0.2417900562286377, 0.526260495185852], [0.24251627922058105, 0.523885190486908], [0.24316054582595825, 0.5214744806289673], [0.24393558502197266, 0.5191125273704529], [0.24463844299316406, 0.5167121887207031], [0.24546360969543457, 0.5143719911575317], [0.24622726440429688, 0.5119847059249878], [0.24711227416992188, 0.5096641182899475], [0.24794840812683105, 0.5072894096374512], [0.24890387058258057, 0.5049834847450256], [0.2498149871826172, 0.5026329159736633], [0.25084131956100464, 0.5003571510314941], [0.2518400549888611, 0.49802303314208984], [0.2529560327529907, 0.49576467275619507], [0.25409066677093506, 0.49334716796875], [0.2553558349609375, 0.4909980893135071], [0.2566172480583191, 0.4885903596878052], [0.2580114006996155, 0.48624831438064575], [0.24946093559265137, 0.8276348114013672], [0.24788594245910645, 0.8248616456985474], [0.24633216857910156, 0.822376012802124], [0.24492305517196655, 0.8195883631706238], [0.24353593587875366, 0.8171047568321228], [0.24228018522262573, 0.814298152923584], [0.2410411834716797, 0.8118139505386353], [0.2399277687072754, 0.8089925050735474], [0.23882341384887695, 0.8065043091773987], [0.2378419041633606, 0.8036612272262573], [0.2368537187576294, 0.8011653423309326], [0.23598754405975342, 0.7982927560806274], [0.23510372638702393, 0.7957772612571716], [0.23434996604919434, 0.7928733825683594], [0.2335571050643921, 0.7903296947479248], [0.23290181159973145, 0.7873996496200562], [0.23219549655914307, 0.7848360538482666], [0.2316371202468872, 0.7818870544433594], [0.2310105562210083, 0.7793042659759521], [0.23054230213165283, 0.7763453125953674], [0.22999119758605957, 0.7737569808959961], [0.22960525751113892, 0.7708019018173218], [0.22911840677261353, 0.7682046890258789], [0.22880494594573975, 0.7652621865272522], [0.22837954759597778, 0.7626805305480957], [0.2281346321105957, 0.759765625], [0.22776949405670166, 0.7572019696235657], [0.227591872215271, 0.7543006539344788], [0.22728484869003296, 0.751748263835907], [0.22716760635375977, 0.7488517165184021], [0.22690320014953613, 0.746292233467102], [0.22683238983154297, 0.743389368057251], [0.22660642862319946, 0.7408268451690674], [0.22657525539398193, 0.737928032875061], [0.22638028860092163, 0.7353540658950806], [0.22638386487960815, 0.7324472069740295], [0.2262161374092102, 0.7298691272735596], [0.2262486219406128, 0.7269533276557922], [0.22610348463058472, 0.7243571281433105], [0.22616064548492432, 0.7214256525039673], [0.22603309154510498, 0.7188129425048828], [0.22611188888549805, 0.7158670425415039], [0.2260003685951233, 0.7132290601730347], [0.22609668970108032, 0.7102625370025635], [0.2259995937347412, 0.7076021432876587], [0.22611242532730103, 0.7046197056770325], [0.22602850198745728, 0.7019327878952026], [0.22615540027618408, 0.6989234089851379], [0.22608542442321777, 0.6962119340896606], [0.22622764110565186, 0.6931990385055542], [0.22617214918136597, 0.6904623508453369], [0.22632873058319092, 0.6874359846115112], [0.22628939151763916, 0.684683084487915], [0.22646111249923706, 0.6816579103469849], [0.2264341115951538, 0.6788933277130127], [0.22662138938903809, 0.6758685111999512], [0.22661131620407104, 0.6731036305427551], [0.22681355476379395, 0.6700924634933472], [0.22681981325149536, 0.6673241853713989], [0.22703790664672852, 0.664320707321167], [0.22705864906311035, 0.6615583896636963], [0.22729134559631348, 0.6585687398910522], [0.2273274064064026, 0.6558017730712891], [0.22757577896118164, 0.652817964553833], [0.22763216495513916, 0.6500539779663086], [0.22790277004241943, 0.6471002697944641], [0.2279818058013916, 0.6443454027175903], [0.22826755046844482, 0.6413992643356323], [0.228363037109375, 0.638636589050293], [0.22866731882095337, 0.6357094049453735], [0.228787362575531, 0.6329606771469116], [0.22910892963409424, 0.630064845085144], [0.22924894094467163, 0.6273294687271118], [0.22958707809448242, 0.6244609355926514], [0.22974622249603271, 0.6217413544654846], [0.2301008105278015, 0.6189075708389282], [0.2302798628807068, 0.616213321685791], [0.23064923286437988, 0.6134042143821716], [0.23084723949432373, 0.610721230506897], [0.23122960329055786, 0.607939600944519], [0.23144513368606567, 0.6052703857421875], [0.23184216022491455, 0.6025111675262451], [0.2320770025253296, 0.5998526811599731], [0.2324899435043335, 0.5971209406852722], [0.23274612426757812, 0.5944717526435852], [0.23317265510559082, 0.5917752981185913], [0.23344504833221436, 0.5891504287719727], [0.2338874340057373, 0.5864918231964111], [0.23417603969573975, 0.5838947296142578], [0.23462551832199097, 0.5812817811965942], [0.23492753505706787, 0.5787232518196106], [0.23538506031036377, 0.5761555433273315], [0.23569798469543457, 0.573632538318634], [0.23616355657577515, 0.5711013078689575], [0.23648887872695923, 0.5686094760894775], [0.2369626760482788, 0.5661104917526245], [0.2373030185699463, 0.5636365413665771], [0.23779082298278809, 0.5611591935157776], [0.23814809322357178, 0.5587003231048584], [0.238652765750885, 0.5562459230422974], [0.23903560638427734, 0.5537937879562378], [0.23956257104873657, 0.5513557195663452], [0.23997163772583008, 0.5489164590835571], [0.24052071571350098, 0.5464982986450195], [0.2409595251083374, 0.5440658330917358], [0.24153411388397217, 0.5416632890701294], [0.24200725555419922, 0.5392400622367859], [0.24261021614074707, 0.5368587374687195], [0.24312174320220947, 0.5344423055648804], [0.24376046657562256, 0.532079815864563], [0.24431180953979492, 0.5296776294708252], [0.24498891830444336, 0.5273349285125732], [0.24559015035629272, 0.5249470472335815], [0.2463129162788391, 0.5226222276687622], [0.24696648120880127, 0.5202438235282898], [0.24773681163787842, 0.5179423093795776], [0.2484508752822876, 0.5155813694000244], [0.24927663803100586, 0.5133037567138672], [0.2500561475753784, 0.5109660625457764], [0.25094419717788696, 0.508704423904419], [0.25179845094680786, 0.5063879489898682], [0.25275737047195435, 0.5041550397872925], [0.2536865472793579, 0.5018707513809204], [0.25472092628479004, 0.49965623021125793], [0.25573503971099854, 0.4973970949649811], [0.2569185495376587, 0.4951128363609314], [0.25809335708618164, 0.49278080463409424], [0.2593879699707031, 0.4905014634132385], [0.2606840133666992, 0.4881761074066162], [0.2531285285949707, 0.8246658444404602], [0.2515754699707031, 0.8223010897636414], [0.2501332759857178, 0.8196132183074951], [0.24874341487884521, 0.8172438144683838], [0.247461199760437, 0.8145489692687988], [0.24621903896331787, 0.8121708631515503], [0.24508130550384521, 0.8094595074653625], [0.24397051334381104, 0.8070752620697021], [0.2429681420326233, 0.8043522834777832], [0.24197304248809814, 0.8019500970840454], [0.24109071493148804, 0.7992038726806641], [0.2401965856552124, 0.7967774271965027], [0.23942351341247559, 0.7940036058425903], [0.2386176586151123, 0.7915443778038025], [0.23794353008270264, 0.7887357473373413], [0.23722147941589355, 0.7862454652786255], [0.23663973808288574, 0.7834094762802124], [0.23599541187286377, 0.7808980941772461], [0.23549902439117432, 0.7780381441116333], [0.23492896556854248, 0.7755133509635925], [0.23451519012451172, 0.7726395130157471], [0.23401284217834473, 0.7701078653335571], [0.23366987705230713, 0.767220139503479], [0.23323237895965576, 0.7646952867507935], [0.2329588532447815, 0.7618124485015869], [0.23258334398269653, 0.7593223452568054], [0.23238122463226318, 0.7564620971679688], [0.2320670485496521, 0.7539823055267334], [0.23192960023880005, 0.7511245012283325], [0.23165881633758545, 0.7486315369606018], [0.23157203197479248, 0.745760977268219], [0.23134082555770874, 0.7432533502578735], [0.23129504919052124, 0.7403779625892639], [0.23109197616577148, 0.737852931022644], [0.23107945919036865, 0.7349740862846375], [0.2309044599533081, 0.7324326038360596], [0.23092114925384521, 0.7295464277267456], [0.23076462745666504, 0.7269837260246277], [0.23080414533615112, 0.7240849137306213], [0.2306671142578125, 0.7214967012405396], [0.23072850704193115, 0.7185851335525513], [0.23060643672943115, 0.7159683704376221], [0.23068439960479736, 0.713036298751831], [0.23057591915130615, 0.7103958129882812], [0.23067164421081543, 0.7074437141418457], [0.23057568073272705, 0.7047752141952515], [0.23068314790725708, 0.7018035650253296], [0.23060071468353271, 0.6991056799888611], [0.23072457313537598, 0.6961154341697693], [0.23065346479415894, 0.6933975219726562], [0.23078757524490356, 0.6903986930847168], [0.23073077201843262, 0.6876593828201294], [0.23087990283966064, 0.6846519112586975], [0.2308356761932373, 0.6818997263908386], [0.23099738359451294, 0.6788932681083679], [0.23096871376037598, 0.6761358976364136], [0.23114556074142456, 0.6731337308883667], [0.23112750053405762, 0.6703816652297974], [0.23131799697875977, 0.6673879623413086], [0.2313162088394165, 0.6646342277526855], [0.23152077198028564, 0.6616528034210205], [0.23153311014175415, 0.6589018106460571], [0.2317519187927246, 0.6559308171272278], [0.2317855954170227, 0.653181254863739], [0.23202478885650635, 0.6502252817153931], [0.23207277059555054, 0.6474883556365967], [0.2323209047317505, 0.644548773765564], [0.23238539695739746, 0.6418089866638184], [0.23265409469604492, 0.638879656791687], [0.23274075984954834, 0.6361509561538696], [0.23302888870239258, 0.6332502365112305], [0.23313766717910767, 0.6305292844772339], [0.23343974351882935, 0.6276514530181885], [0.2335646152496338, 0.6249492168426514], [0.23388242721557617, 0.6220995187759399], [0.23402506113052368, 0.6194210052490234], [0.23435765504837036, 0.6166060566902161], [0.2345198392868042, 0.6139382719993591], [0.23486506938934326, 0.611145555973053], [0.2350456714630127, 0.6084933280944824], [0.23540598154067993, 0.6057248115539551], [0.2356041669845581, 0.603085994720459], [0.23598116636276245, 0.600339412689209], [0.23619884252548218, 0.5977160930633545], [0.23659086227416992, 0.5949934720993042], [0.23682808876037598, 0.5923909544944763], [0.23723477125167847, 0.5897048711776733], [0.23748910427093506, 0.5871292352676392], [0.23791265487670898, 0.584479570388794], [0.23818403482437134, 0.581936240196228], [0.23861640691757202, 0.5793336629867554], [0.23890000581741333, 0.5768240094184875], [0.2393399477005005, 0.5742655396461487], [0.239637553691864, 0.5717867016792297], [0.24008864164352417, 0.5692662000656128], [0.24039846658706665, 0.566810667514801], [0.2408590316772461, 0.5643198490142822], [0.24118578433990479, 0.5618809461593628], [0.2416599988937378, 0.5594152212142944], [0.24200713634490967, 0.5569902062416077], [0.2424997091293335, 0.5545494556427002], [0.242875337600708, 0.5521352887153625], [0.2433915138244629, 0.5497127771377563], [0.2437913417816162, 0.5473088026046753], [0.24433112144470215, 0.5449084639549255], [0.24476397037506104, 0.5425139665603638], [0.245330810546875, 0.5401335954666138], [0.24579685926437378, 0.5377500057220459], [0.24639403820037842, 0.5353958606719971], [0.24690186977386475, 0.5330274105072021], [0.24753856658935547, 0.5306960344314575], [0.2480909824371338, 0.5283430814743042], [0.2487659454345703, 0.526038646697998], [0.24936914443969727, 0.5236976742744446], [0.2500927448272705, 0.5214130282402039], [0.25075191259384155, 0.5190972089767456], [0.2515261173248291, 0.5168399810791016], [0.25224626064300537, 0.5145456194877625], [0.2530747056007385, 0.5123167037963867], [0.25386565923690796, 0.5100452899932861], [0.25475823879241943, 0.5078451633453369], [0.2556237578392029, 0.505607008934021], [0.2565903663635254, 0.5034382343292236], [0.2575385570526123, 0.5012284517288208], [0.25858670473098755, 0.4991001486778259], [0.2596728801727295, 0.49682414531707764], [0.2608681321144104, 0.4946153461933136], [0.262076735496521, 0.4923591613769531], [0.2633986473083496, 0.49015164375305176], [0.2565898895263672, 0.8221230506896973], [0.2551462650299072, 0.8195775747299194], [0.2537389397621155, 0.8172774314880371], [0.2524501085281372, 0.8147106170654297], [0.25119996070861816, 0.8124169111251831], [0.25005483627319336, 0.8098288178443909], [0.24894022941589355, 0.807528555393219], [0.2479257583618164, 0.8049194812774658], [0.2469348907470703, 0.8026191592216492], [0.24603724479675293, 0.7999778389930725], [0.24514901638031006, 0.7976589202880859], [0.2443562150001526, 0.7949811220169067], [0.2435581088066101, 0.7926396131515503], [0.24285972118377686, 0.7899155616760254], [0.2421426773071289, 0.7875444889068604], [0.24153578281402588, 0.7847822904586792], [0.24088966846466064, 0.782377302646637], [0.2403692603111267, 0.7795898914337158], [0.23979628086090088, 0.7771610021591187], [0.2393580675125122, 0.7743537425994873], [0.23885059356689453, 0.7719039916992188], [0.23848938941955566, 0.7690807580947876], [0.23804187774658203, 0.7666107416152954], [0.2377479076385498, 0.7637946009635925], [0.23735439777374268, 0.7613157033920288], [0.23712396621704102, 0.7585388422012329], [0.23679864406585693, 0.7561107873916626], [0.23664206266403198, 0.7533260583877563], [0.23636406660079956, 0.750878095626831], [0.23625916242599487, 0.7480698227882385], [0.23601669073104858, 0.7455971837043762], [0.23595023155212402, 0.7427762150764465], [0.23573803901672363, 0.7402833700180054], [0.2357083559036255, 0.7374436855316162], [0.23552560806274414, 0.7349379062652588], [0.2355273962020874, 0.7320888638496399], [0.23536312580108643, 0.72955322265625], [0.23538696765899658, 0.7266840934753418], [0.23524105548858643, 0.7241299152374268], [0.23528575897216797, 0.7212432026863098], [0.235154390335083, 0.7186609506607056], [0.23521465063095093, 0.7157502770423889], [0.2350972294807434, 0.7131409645080566], [0.23517537117004395, 0.7102122902870178], [0.23507028818130493, 0.7075743675231934], [0.23516309261322021, 0.7046180963516235], [0.23506903648376465, 0.7019530534744263], [0.23517441749572754, 0.6989820003509521], [0.23509037494659424, 0.6962860822677612], [0.2352076768875122, 0.6932958364486694], [0.23513305187225342, 0.6905852556228638], [0.2352592945098877, 0.6875889897346497], [0.23519885540008545, 0.6848587989807129], [0.2353382110595703, 0.6818556189537048], [0.23528635501861572, 0.6791176795959473], [0.2354363203048706, 0.6761257648468018], [0.23540174961090088, 0.6733800172805786], [0.2355668544769287, 0.6703972816467285], [0.2355433702468872, 0.6676520705223083], [0.23571908473968506, 0.6646817922592163], [0.23571133613586426, 0.6619385480880737], [0.23590248823165894, 0.6589815616607666], [0.23591023683547974, 0.6562404632568359], [0.23611676692962646, 0.6532990336418152], [0.23614048957824707, 0.6505612134933472], [0.23636364936828613, 0.647636353969574], [0.23640435934066772, 0.6449024081230164], [0.23663944005966187, 0.6419901847839355], [0.23669856786727905, 0.6392626166343689], [0.2369520664215088, 0.6363743543624878], [0.23702645301818848, 0.6336554288864136], [0.23729276657104492, 0.6307846307754517], [0.23738610744476318, 0.6280797719955444], [0.23766684532165527, 0.6252396106719971], [0.2377786636352539, 0.6225502490997314], [0.2380756139755249, 0.6197465658187866], [0.2382051944732666, 0.6170791387557983], [0.23851561546325684, 0.6142947673797607], [0.2386631965637207, 0.6116381287574768], [0.23898744583129883, 0.6088821887969971], [0.23915374279022217, 0.6062381267547607], [0.2394946813583374, 0.6035081148147583], [0.23967766761779785, 0.6008725762367249], [0.24003159999847412, 0.5981717109680176], [0.24023717641830444, 0.5955523252487183], [0.24060845375061035, 0.5928794145584106], [0.24083161354064941, 0.5902825593948364], [0.24121510982513428, 0.5876473784446716], [0.24145591259002686, 0.5850827097892761], [0.24185502529144287, 0.5824865102767944], [0.24211245775222778, 0.5799499750137329], [0.24252057075500488, 0.5774002075195312], [0.24279296398162842, 0.5748993754386902], [0.2432115077972412, 0.5723906755447388], [0.2434954047203064, 0.5699124336242676], [0.24392306804656982, 0.567436933517456], [0.24422258138656616, 0.5649833679199219], [0.24466359615325928, 0.5625379085540771], [0.2449827790260315, 0.5601024031639099], [0.24544143676757812, 0.5576809644699097], [0.24578255414962769, 0.5552648901939392], [0.24625825881958008, 0.5528665781021118], [0.24662601947784424, 0.5504600405693054], [0.24712657928466797, 0.5480839014053345], [0.2475232481956482, 0.5456914901733398], [0.24804902076721191, 0.5433390140533447], [0.24848097562789917, 0.5409592390060425], [0.2490367889404297, 0.5386275053024292], [0.24950361251831055, 0.5362697243690491], [0.25008922815322876, 0.5339696407318115], [0.25060057640075684, 0.5316309928894043], [0.25122904777526855, 0.5293541550636292], [0.2517867088317871, 0.5270333886146545], [0.25245463848114014, 0.524786114692688], [0.25306689739227295, 0.522484302520752], [0.2537837624549866, 0.5202683210372925], [0.25445377826690674, 0.5179928541183472], [0.25522369146347046, 0.5158084630966187], [0.2559598684310913, 0.5135645866394043], [0.25678837299346924, 0.511410117149353], [0.2575957179069519, 0.5091989040374756], [0.2584887146949768, 0.507084310054779], [0.2593751549720764, 0.5049107074737549], [0.26034408807754517, 0.5028257966041565], [0.26130664348602295, 0.5006943941116333], [0.26241010427474976, 0.49855682253837585], [0.26353275775909424, 0.4963715970516205], [0.26475536823272705, 0.49423748254776], [0.2660011649131775, 0.49206095933914185], [0.2601032853126526, 0.819324791431427], [0.2586994767189026, 0.8171349763870239], [0.2573891878128052, 0.8146763443946838], [0.25614339113235474, 0.8124837279319763], [0.2549806237220764, 0.810007631778717], [0.25387150049209595, 0.8078126907348633], [0.25284337997436523, 0.8053123354911804], [0.25185370445251465, 0.803107738494873], [0.25094544887542725, 0.8005883693695068], [0.2500573396682739, 0.7983626127243042], [0.24926096200942993, 0.7958107590675354], [0.24845927953720093, 0.7935485243797302], [0.24775421619415283, 0.7909570336341858], [0.24703025817871094, 0.7886589765548706], [0.2464144229888916, 0.7860270738601685], [0.24575811624526978, 0.7836874723434448], [0.24522078037261963, 0.7810155153274536], [0.24463528394699097, 0.7786503434181213], [0.24417507648468018, 0.7759507894515991], [0.24365103244781494, 0.7735580205917358], [0.24326229095458984, 0.7708362340927124], [0.24279671907424927, 0.7684211134910583], [0.24247145652770996, 0.7656707763671875], [0.2420673370361328, 0.7632640600204468], [0.24181073904037476, 0.7604933381080627], [0.24148708581924438, 0.7581551671028137], [0.241316556930542, 0.755428671836853], [0.24103981256484985, 0.7530515193939209], [0.24092304706573486, 0.7502951622009277], [0.24067974090576172, 0.7478818893432617], [0.24060410261154175, 0.7450909614562988], [0.24039101600646973, 0.7426551580429077], [0.24034768342971802, 0.7398495674133301], [0.2401619553565979, 0.7373866438865662], [0.24015343189239502, 0.7345703840255737], [0.23998373746871948, 0.7320722341537476], [0.23999476432800293, 0.7292299270629883], [0.239843487739563, 0.7267061471939087], [0.23987382650375366, 0.723849892616272], [0.23973476886749268, 0.7212929725646973], [0.23978161811828613, 0.7184152603149414], [0.2396540641784668, 0.7158276438713074], [0.23971593379974365, 0.7129309177398682], [0.23959875106811523, 0.7103115320205688], [0.23967331647872925, 0.7073925733566284], [0.23956990242004395, 0.704742968082428], [0.23965758085250854, 0.701800525188446], [0.23956143856048584, 0.6991195678710938], [0.23965668678283691, 0.6961557865142822], [0.23957091569900513, 0.6934525966644287], [0.23967915773391724, 0.6904761791229248], [0.23959988355636597, 0.6877589821815491], [0.2397138476371765, 0.6847760677337646], [0.23964595794677734, 0.6820433139801025], [0.2397744059562683, 0.6790609359741211], [0.23971855640411377, 0.6763292551040649], [0.2398558259010315, 0.6733546257019043], [0.23981237411499023, 0.6706219911575317], [0.23996508121490479, 0.6676545143127441], [0.23993587493896484, 0.6649255752563477], [0.24009841680526733, 0.6619707345962524], [0.24008262157440186, 0.6592479944229126], [0.24026185274124146, 0.6563069224357605], [0.24026012420654297, 0.653586745262146], [0.24045288562774658, 0.6506623029708862], [0.24046790599822998, 0.6479461789131165], [0.2406754493713379, 0.6450338959693909], [0.24070757627487183, 0.6423234939575195], [0.2409297227859497, 0.6394309997558594], [0.24097752571105957, 0.6367283463478088], [0.24121057987213135, 0.6338468790054321], [0.24126958847045898, 0.6311560869216919], [0.2415149211883545, 0.6283082365989685], [0.24159181118011475, 0.6256345510482788], [0.24185419082641602, 0.6228147149085999], [0.2419508695602417, 0.6201639771461487], [0.24222522974014282, 0.6173762083053589], [0.24234038591384888, 0.6147339344024658], [0.2426319122314453, 0.6119687557220459], [0.24276494979858398, 0.6093419194221497], [0.24306881427764893, 0.6066001653671265], [0.2432185411453247, 0.6039891242980957], [0.2435397505760193, 0.601266086101532], [0.243708074092865, 0.598673403263092], [0.2440415620803833, 0.5959832072257996], [0.24423062801361084, 0.5934061408042908], [0.2445814609527588, 0.5907459855079651], [0.24479061365127563, 0.588196873664856], [0.24515628814697266, 0.5855790376663208], [0.24537968635559082, 0.5830556750297546], [0.24575889110565186, 0.5804755687713623], [0.24599993228912354, 0.5779891014099121], [0.24638992547988892, 0.575453519821167], [0.2466447353363037, 0.5729925632476807], [0.2470465898513794, 0.5704914331436157], [0.24731624126434326, 0.568057119846344], [0.2477288842201233, 0.5655906796455383], [0.24801528453826904, 0.5631797313690186], [0.24844300746917725, 0.5607433319091797], [0.2487502098083496, 0.558350145816803], [0.24919527769088745, 0.5559431910514832], [0.24952685832977295, 0.5535637140274048], [0.24999159574508667, 0.551180899143219], [0.25034791231155396, 0.5488173961639404], [0.2508391737937927, 0.5464615225791931], [0.251228928565979, 0.5441144108772278], [0.2517465353012085, 0.541782021522522], [0.25217193365097046, 0.5394508838653564], [0.2527216672897339, 0.5371511578559875], [0.2531850337982178, 0.5348472595214844], [0.2537670135498047, 0.532583475112915], [0.25427597761154175, 0.5302944183349609], [0.25490033626556396, 0.5280565023422241], [0.25545650720596313, 0.5257947444915771], [0.25612276792526245, 0.523593544960022], [0.25673907995224, 0.5213623046875], [0.2574596405029297, 0.5191936492919922], [0.2581350803375244, 0.5169943571090698], [0.2589094042778015, 0.514867901802063], [0.25965529680252075, 0.5127016305923462], [0.2604900598526001, 0.5106127262115479], [0.26130563020706177, 0.5084851980209351], [0.26220250129699707, 0.506441056728363], [0.263100266456604, 0.5043578147888184], [0.26407933235168457, 0.50235515832901], [0.2651022672653198, 0.5002369284629822], [0.26621854305267334, 0.4981771409511566], [0.2673739790916443, 0.49607571959495544], [0.2686227560043335, 0.49400997161865234], [0.2634166479110718, 0.8169211149215698], [0.2621092200279236, 0.8145928978919983], [0.2608572244644165, 0.8124740123748779], [0.25969624519348145, 0.8101224899291992], [0.25859153270721436, 0.8080092668533325], [0.2575622797012329, 0.8056249022483826], [0.2565830945968628, 0.8035095930099487], [0.25567448139190674, 0.8010950088500977], [0.25480401515960693, 0.7989689111709595], [0.2540023922920227, 0.796517014503479], [0.253218412399292, 0.7943681478500366], [0.2525066137313843, 0.7918626070022583], [0.2518017292022705, 0.7896813154220581], [0.2511730194091797, 0.7871239185333252], [0.250527560710907, 0.7849006652832031], [0.24997389316558838, 0.7822986841201782], [0.24939388036727905, 0.7800405621528625], [0.24891650676727295, 0.777405321598053], [0.2483973503112793, 0.7751152515411377], [0.24799013137817383, 0.7724465131759644], [0.24752533435821533, 0.7701272964477539], [0.24718356132507324, 0.7674363851547241], [0.24677568674087524, 0.7650911808013916], [0.24650591611862183, 0.7624083757400513], [0.24617290496826172, 0.7600705623626709], [0.24597811698913574, 0.7574270963668823], [0.24568748474121094, 0.7551215291023254], [0.2455456256866455, 0.7524360418319702], [0.24529331922531128, 0.7500849962234497], [0.24519819021224976, 0.7473571300506592], [0.24497836828231812, 0.7449555397033691], [0.24492233991622925, 0.742201566696167], [0.24472737312316895, 0.7397712469100952], [0.24470257759094238, 0.7370007038116455], [0.244529128074646, 0.7345420122146606], [0.2445298433303833, 0.7317308187484741], [0.24437177181243896, 0.7292340993881226], [0.24438953399658203, 0.726405918598175], [0.24424409866333008, 0.7238845825195312], [0.24427789449691772, 0.7210254669189453], [0.24414420127868652, 0.7184723019599915], [0.2441920042037964, 0.7155912518501282], [0.2440677285194397, 0.7130082845687866], [0.24412649869918823, 0.7101020812988281], [0.2440122365951538, 0.7074925303459167], [0.24408364295959473, 0.704562783241272], [0.24397742748260498, 0.7019138336181641], [0.24405699968338013, 0.6989579200744629], [0.2439594268798828, 0.6962836980819702], [0.24404776096343994, 0.6933146715164185], [0.24395620822906494, 0.6906158924102783], [0.24405276775360107, 0.6876366138458252], [0.24396848678588867, 0.68492591381073], [0.24407339096069336, 0.6819494962692261], [0.24399983882904053, 0.679227352142334], [0.24411368370056152, 0.6762587428092957], [0.24405157566070557, 0.6735363006591797], [0.24417608976364136, 0.6705796718597412], [0.24412566423416138, 0.6678556203842163], [0.24426275491714478, 0.6649109721183777], [0.2442251443862915, 0.6621907353401184], [0.2443758249282837, 0.6592634916305542], [0.24435031414031982, 0.6565444469451904], [0.24451351165771484, 0.6536337733268738], [0.24450576305389404, 0.6509242057800293], [0.24468404054641724, 0.6480245590209961], [0.24469029903411865, 0.6453176736831665], [0.2448822259902954, 0.6424343585968018], [0.24490374326705933, 0.639733076095581], [0.24510633945465088, 0.636865496635437], [0.24514049291610718, 0.634163498878479], [0.24535644054412842, 0.6313226222991943], [0.2454051375389099, 0.6286431550979614], [0.24563539028167725, 0.6258409023284912], [0.24570417404174805, 0.6231772303581238], [0.24594879150390625, 0.6204009056091309], [0.24603408575057983, 0.6177566647529602], [0.24628901481628418, 0.6150037050247192], [0.24639225006103516, 0.6123722195625305], [0.24666345119476318, 0.6096431016921997], [0.2467835545539856, 0.6070224046707153], [0.24706697463989258, 0.6043204665184021], [0.24720507860183716, 0.601715087890625], [0.24750590324401855, 0.5990371108055115], [0.2476605772972107, 0.5964469909667969], [0.24797272682189941, 0.5938057899475098], [0.24814867973327637, 0.5912388563156128], [0.24847829341888428, 0.5886303782463074], [0.24866974353790283, 0.5860881805419922], [0.24901384115219116, 0.5835205316543579], [0.24922502040863037, 0.5810098648071289], [0.24957984685897827, 0.5784826278686523], [0.24980556964874268, 0.576004147529602], [0.25017285346984863, 0.5735135078430176], [0.2504154443740845, 0.571058988571167], [0.2507931590080261, 0.5686042308807373], [0.25105130672454834, 0.5661769509315491], [0.2514418363571167, 0.5637563467025757], [0.251717209815979, 0.5613514184951782], [0.2521240711212158, 0.5589634776115417], [0.25242292881011963, 0.5565805435180664], [0.25285136699676514, 0.5542154312133789], [0.25317513942718506, 0.5518463253974915], [0.2536243796348572, 0.5495178699493408], [0.2539788484573364, 0.5471683144569397], [0.25445419549942017, 0.5448622703552246], [0.25484317541122437, 0.5425357818603516], [0.25535106658935547, 0.5402611494064331], [0.25577622652053833, 0.5379536151885986], [0.2563110589981079, 0.5357168912887573], [0.2567750811576843, 0.5334423184394836], [0.2573475241661072, 0.531235933303833], [0.2578599452972412, 0.528978705406189], [0.2584726810455322, 0.5268114805221558], [0.25903528928756714, 0.5245946645736694], [0.25969183444976807, 0.5224698185920715], [0.2603163719177246, 0.5202805399894714], [0.2610277533531189, 0.5181971788406372], [0.26171523332595825, 0.5160484910011292], [0.2624797821044922, 0.514008641242981], [0.2632420063018799, 0.5119045972824097], [0.26407426595687866, 0.5099064111709595], [0.2649104595184326, 0.5078531503677368], [0.26580530405044556, 0.5059069991111755], [0.26672279834747314, 0.5039113759994507], [0.26774948835372925, 0.501934289932251], [0.2688145637512207, 0.49990856647491455], [0.2699583172798157, 0.49792176485061646], [0.27114975452423096, 0.49590495228767395], [0.26678788661956787, 0.8143040537834167], [0.26554155349731445, 0.8122854232788086], [0.26436811685562134, 0.8100460767745972], [0.26327264308929443, 0.8080315589904785], [0.2622413635253906, 0.8057721853256226], [0.2612736225128174, 0.8037497997283936], [0.2603648900985718, 0.8014498949050903], [0.2595052719116211, 0.7994170188903809], [0.2587049603462219, 0.7970855236053467], [0.2579314112663269, 0.7950217723846436], [0.25722360610961914, 0.7926458120346069], [0.2565234899520874, 0.7905420064926147], [0.25589990615844727, 0.7881174683570862], [0.2552589178085327, 0.7859582901000977], [0.2547064423561096, 0.7834732532501221], [0.2541239261627197, 0.7812772989273071], [0.25364094972610474, 0.7787575125694275], [0.2531137466430664, 0.7765213251113892], [0.25269538164138794, 0.7739683389663696], [0.25222069025039673, 0.7716951370239258], [0.2518653869628906, 0.7691069841384888], [0.25144392251968384, 0.7668085694313049], [0.25114768743515015, 0.7641992568969727], [0.2507869005203247, 0.7619184255599976], [0.25055718421936035, 0.7593324184417725], [0.25027692317962646, 0.757095217704773], [0.25013357400894165, 0.7544842958450317], [0.2498878836631775, 0.7522010803222656], [0.24978750944137573, 0.7495330572128296], [0.24956536293029785, 0.7471929788589478], [0.24949884414672852, 0.7444863319396973], [0.24930208921432495, 0.7420997023582458], [0.2492668628692627, 0.739364504814148], [0.24909305572509766, 0.7369529008865356], [0.2490854263305664, 0.7341896295547485], [0.24892646074295044, 0.731719970703125], [0.24893641471862793, 0.7289150953292847], [0.24878549575805664, 0.7264181971549988], [0.24880659580230713, 0.723596453666687], [0.24866628646850586, 0.7210639715194702], [0.24870169162750244, 0.7182134389877319], [0.2485681176185608, 0.7156451940536499], [0.24861133098602295, 0.712772011756897], [0.24848806858062744, 0.7101765871047974], [0.24854439496994019, 0.7072840929031372], [0.24842584133148193, 0.7046487331390381], [0.24848979711532593, 0.7017273902893066], [0.24838101863861084, 0.6990598440170288], [0.24845218658447266, 0.6961143612861633], [0.24834692478179932, 0.69342041015625], [0.24842238426208496, 0.6904630064964294], [0.2483234405517578, 0.687757134437561], [0.24840879440307617, 0.68479323387146], [0.24831759929656982, 0.682077944278717], [0.24840760231018066, 0.6791160106658936], [0.24832451343536377, 0.6764051914215088], [0.2484263777732849, 0.673448920249939], [0.24835407733917236, 0.6707383394241333], [0.24846351146697998, 0.6677960157394409], [0.24840521812438965, 0.6650886535644531], [0.24852824211120605, 0.66215980052948], [0.24848198890686035, 0.6594597101211548], [0.2486191987991333, 0.656542956829071], [0.248587965965271, 0.6538533568382263], [0.24873816967010498, 0.6509539484977722], [0.24872052669525146, 0.6482614278793335], [0.24888157844543457, 0.6453762054443359], [0.24887710809707642, 0.642690896987915], [0.2490515112876892, 0.6398221850395203], [0.24906259775161743, 0.6371383666992188], [0.24924969673156738, 0.6342766284942627], [0.24927186965942383, 0.6316132545471191], [0.2494727373123169, 0.628791868686676], [0.24951040744781494, 0.6261513233184814], [0.24972134828567505, 0.6233606338500977], [0.24977660179138184, 0.6207338571548462], [0.2500009536743164, 0.6179677248001099], [0.25007307529449463, 0.6153558492660522], [0.25030970573425293, 0.6126173138618469], [0.2503979206085205, 0.6100153923034668], [0.25064969062805176, 0.6072978973388672], [0.2507554292678833, 0.6047122478485107], [0.25102198123931885, 0.6020230054855347], [0.251145601272583, 0.5994528532028198], [0.251426100730896, 0.5967838764190674], [0.2515658736228943, 0.5942416787147522], [0.2518593668937683, 0.5916138887405396], [0.2520187497138977, 0.5890895128250122], [0.25233006477355957, 0.586496889591217], [0.2525067925453186, 0.5840046405792236], [0.25283193588256836, 0.5814489126205444], [0.25302600860595703, 0.5789847373962402], [0.25336241722106934, 0.5764716267585754], [0.25357121229171753, 0.5740334987640381], [0.2539200186729431, 0.5715569257736206], [0.2541462182998657, 0.56914222240448], [0.2545057535171509, 0.5667019486427307], [0.25474846363067627, 0.5643147230148315], [0.25512343645095825, 0.5619087219238281], [0.2553842067718506, 0.5595479011535645], [0.2557758092880249, 0.5571794509887695], [0.2560651898384094, 0.554836630821228], [0.2564848065376282, 0.5524907112121582], [0.25679850578308105, 0.5501731634140015], [0.25723791122436523, 0.5478622913360596], [0.2575860619544983, 0.5455633401870728], [0.2580533027648926, 0.5432873368263245], [0.25843238830566406, 0.5410064458847046], [0.25892913341522217, 0.5387611389160156], [0.2593459486961365, 0.5365180969238281], [0.2598760724067688, 0.5343179702758789], [0.26033639907836914, 0.5320982336997986], [0.26090550422668457, 0.5299307107925415], [0.2614155411720276, 0.5277518630027771], [0.2620276212692261, 0.5256322622299194], [0.26259011030197144, 0.5234897136688232], [0.2632462978363037, 0.5214163064956665], [0.26386892795562744, 0.5193127393722534], [0.26458126306533813, 0.5172860622406006], [0.2652716040611267, 0.515232503414154], [0.26604098081588745, 0.5132648944854736], [0.26681047677993774, 0.5112584829330444], [0.2676495313644409, 0.5093419551849365], [0.2684975862503052, 0.5073965787887573], [0.26939964294433594, 0.5055431127548218], [0.2703673243522644, 0.5035816431045532], [0.27140164375305176, 0.5016770362854004], [0.2724992036819458, 0.49974676966667175], [0.27367687225341797, 0.4978311061859131], [0.269956111907959, 0.8120568990707397], [0.2687941789627075, 0.8099457025527954], [0.26770180463790894, 0.8079994916915894], [0.26668059825897217, 0.8058617115020752], [0.26573145389556885, 0.8039278388023376], [0.26483047008514404, 0.8017482757568359], [0.2639959454536438, 0.7998072504997253], [0.26320767402648926, 0.7975764870643616], [0.2624637484550476, 0.7956152558326721], [0.2617645859718323, 0.7933284044265747], [0.2610951066017151, 0.791336715221405], [0.26047348976135254, 0.7889929413795471], [0.25986093282699585, 0.7869522571563721], [0.25930601358413696, 0.784542441368103], [0.2587447166442871, 0.7824487686157227], [0.258259654045105, 0.7799882888793945], [0.25774699449539185, 0.7778544425964355], [0.25731754302978516, 0.7753516435623169], [0.2568507194519043, 0.773185133934021], [0.2564815878868103, 0.7706521153450012], [0.25606900453567505, 0.7684537768363953], [0.2557700276374817, 0.7658957242965698], [0.2554187774658203, 0.7636698484420776], [0.25518953800201416, 0.7611304521560669], [0.2548995018005371, 0.7589285373687744], [0.2547008991241455, 0.7564056515693665], [0.25443702936172485, 0.7542193531990051], [0.2543036937713623, 0.7516209483146667], [0.25407207012176514, 0.7493559122085571], [0.2539914846420288, 0.7467001676559448], [0.2537882328033447, 0.7443736791610718], [0.2537420988082886, 0.7416800260543823], [0.253564715385437, 0.7393056154251099], [0.2535433769226074, 0.7365797758102417], [0.2533798813819885, 0.7341735363006592], [0.25338298082351685, 0.7313951253890991], [0.2532306909561157, 0.7289167642593384], [0.25324302911758423, 0.7261145114898682], [0.2530990242958069, 0.7236193418502808], [0.2531239986419678, 0.7207894325256348], [0.2529863715171814, 0.7182521820068359], [0.25301915407180786, 0.7153894901275635], [0.252888560295105, 0.7128264904022217], [0.2529289722442627, 0.7099442481994629], [0.2528020143508911, 0.7073485851287842], [0.2528502941131592, 0.7044339179992676], [0.252732515335083, 0.7018060684204102], [0.25279080867767334, 0.6988697052001953], [0.2526761293411255, 0.6961972713470459], [0.2527374029159546, 0.6932384371757507], [0.25262534618377686, 0.6905500888824463], [0.2526911497116089, 0.6875935196876526], [0.2525845766067505, 0.6848907470703125], [0.2526564598083496, 0.6819292306900024], [0.2525578737258911, 0.6792237758636475], [0.25263679027557373, 0.6762786507606506], [0.252546489238739, 0.6735687255859375], [0.25263214111328125, 0.6706323027610779], [0.25255268812179565, 0.6679304838180542], [0.2526478171348572, 0.665012776851654], [0.25258004665374756, 0.6623135805130005], [0.2526892423629761, 0.6594109535217285], [0.2526349425315857, 0.656718373298645], [0.2527589797973633, 0.6538339853286743], [0.2527168393135071, 0.6511452198028564], [0.2528519034385681, 0.6482698917388916], [0.25282609462738037, 0.645587682723999], [0.25297510623931885, 0.6427278518676758], [0.2529600262641907, 0.640048623085022], [0.25311774015426636, 0.6372024416923523], [0.25311529636383057, 0.6345162391662598], [0.2532879114151001, 0.6317116618156433], [0.2533026337623596, 0.6290557384490967], [0.25348854064941406, 0.6262779235839844], [0.2535187005996704, 0.623640775680542], [0.25371289253234863, 0.6208897829055786], [0.2537580132484436, 0.6182681322097778], [0.25396549701690674, 0.6155414581298828], [0.25402653217315674, 0.6129355430603027], [0.25424492359161377, 0.610231876373291], [0.2543240785598755, 0.6076397895812988], [0.2545589208602905, 0.6049617528915405], [0.25465160608291626, 0.6023800373077393], [0.2548949718475342, 0.5997300148010254], [0.2550058364868164, 0.5971664190292358], [0.25526654720306396, 0.5945498943328857], [0.25539273023605347, 0.5920149683952332], [0.25566595792770386, 0.5894352197647095], [0.2558116912841797, 0.5869227647781372], [0.2561004161834717, 0.5843819975852966], [0.2562648057937622, 0.5818984508514404], [0.25656938552856445, 0.5793927311897278], [0.25674909353256226, 0.5769373178482056], [0.2570635676383972, 0.5744699239730835], [0.25725722312927246, 0.5720382928848267], [0.25758278369903564, 0.569605827331543], [0.25779271125793457, 0.5672012567520142], [0.25812971591949463, 0.5648085474967957], [0.2583577632904053, 0.5624302625656128], [0.2587113380432129, 0.5600771903991699], [0.25896215438842773, 0.5577313303947449], [0.2593355178833008, 0.5554136633872986], [0.25961679220199585, 0.553084671497345], [0.260018527507782, 0.550792932510376], [0.2603262662887573, 0.5484902858734131], [0.2607494592666626, 0.5462378263473511], [0.26109063625335693, 0.5439567565917969], [0.26154398918151855, 0.5417343974113464], [0.2619209289550781, 0.5394821166992188], [0.2624020576477051, 0.5373126268386841], [0.2628192901611328, 0.5350996255874634], [0.2633373737335205, 0.532960057258606], [0.2637990713119507, 0.5307759046554565], [0.2643517255783081, 0.5286943912506104], [0.26486313343048096, 0.5265532732009888], [0.2654598355293274, 0.5245122909545898], [0.2660234570503235, 0.5224128365516663], [0.2666623592376709, 0.5204377174377441], [0.26729637384414673, 0.5183876752853394], [0.2679938077926636, 0.5164676904678345], [0.2686976194381714, 0.514480710029602], [0.2694563865661621, 0.5126239061355591], [0.27024537324905396, 0.5106920003890991], [0.2710738182067871, 0.5088924169540405], [0.271941602230072, 0.5070392489433289], [0.2728809714317322, 0.5052204728126526], [0.2738892436027527, 0.5033518075942993], [0.27493685483932495, 0.501535177230835], [0.27607524394989014, 0.4996734857559204], [0.27318835258483887, 0.8096442222595215], [0.2721076011657715, 0.8077960014343262], [0.27108073234558105, 0.8057729005813599], [0.2701493501663208, 0.8039414882659912], [0.2692576050758362, 0.8018876314163208], [0.26844584941864014, 0.8000466823577881], [0.26766735315322876, 0.7979415655136108], [0.2669457793235779, 0.7960730791091919], [0.2662634253501892, 0.7939121723175049], [0.26561659574508667, 0.7920017242431641], [0.26501381397247314, 0.7897759675979614], [0.2644147276878357, 0.7878044247627258], [0.2638740539550781, 0.7855257987976074], [0.2633243203163147, 0.7834938168525696], [0.26284539699554443, 0.7811496257781982], [0.26233673095703125, 0.7790665626525879], [0.2619117498397827, 0.7766704559326172], [0.2614482641220093, 0.7745473384857178], [0.26107871532440186, 0.772110104560852], [0.26065945625305176, 0.7699621915817261], [0.26034772396087646, 0.7675107717514038], [0.2599787712097168, 0.7653404474258423], [0.2597283124923706, 0.762870192527771], [0.2594165802001953, 0.7606995105743408], [0.25920069217681885, 0.7582164406776428], [0.2589534521102905, 0.7560890316963196], [0.25880908966064453, 0.7535930275917053], [0.25857532024383545, 0.7514016032218933], [0.25848114490509033, 0.7488336563110352], [0.25827980041503906, 0.7465727925300598], [0.2582307457923889, 0.7439357042312622], [0.2580547332763672, 0.7416068315505981], [0.2580287456512451, 0.7389243841171265], [0.2578698396682739, 0.7365491390228271], [0.25786781311035156, 0.7338309288024902], [0.25770795345306396, 0.7313820123672485], [0.25771111249923706, 0.728604257106781], [0.2575650215148926, 0.7261245846748352], [0.25758272409439087, 0.7233312726020813], [0.25743788480758667, 0.7208056449890137], [0.25745707750320435, 0.7179811596870422], [0.2573200464248657, 0.715422511100769], [0.257348895072937, 0.7125734090805054], [0.25721418857574463, 0.7099827527999878], [0.2572476863861084, 0.7071103453636169], [0.2571181058883667, 0.7044864892959595], [0.25716090202331543, 0.701591432094574], [0.2570335268974304, 0.6989246606826782], [0.2570754289627075, 0.6959922909736633], [0.2569539546966553, 0.6933035850524902], [0.25700414180755615, 0.6903562545776367], [0.2568817734718323, 0.6876564025878906], [0.2569316029548645, 0.6847089529037476], [0.2568166255950928, 0.6820056438446045], [0.2568751573562622, 0.679062008857727], [0.25676506757736206, 0.676358699798584], [0.2568250894546509, 0.673424482345581], [0.2567271590232849, 0.6707314252853394], [0.2568008303642273, 0.6678097248077393], [0.25671231746673584, 0.6651227474212646], [0.2567927837371826, 0.6622203588485718], [0.2567187547683716, 0.6595419645309448], [0.25681573152542114, 0.6566569805145264], [0.25675296783447266, 0.6539837121963501], [0.25686168670654297, 0.6511077880859375], [0.25681328773498535, 0.6484402418136597], [0.2569340467453003, 0.6455798745155334], [0.25689536333084106, 0.6429111361503601], [0.2570251226425171, 0.640063464641571], [0.25699830055236816, 0.6373999118804932], [0.2571449279785156, 0.6345650553703308], [0.25713592767715454, 0.6319347620010376], [0.2572953701019287, 0.6291340589523315], [0.25729644298553467, 0.6265130639076233], [0.2574613690376282, 0.6237449645996094], [0.2574799656867981, 0.621143102645874], [0.25765883922576904, 0.6184051036834717], [0.25769317150115967, 0.6158145666122437], [0.25788432359695435, 0.6130976676940918], [0.2579341530799866, 0.6105251312255859], [0.2581369876861572, 0.6078351140022278], [0.258201003074646, 0.6052749156951904], [0.2584162950515747, 0.6025999784469604], [0.2584949731826782, 0.6000586748123169], [0.2587210536003113, 0.597419261932373], [0.2588156461715698, 0.5948988199234009], [0.25905662775039673, 0.5922980904579163], [0.2591698169708252, 0.5898092985153198], [0.2594273090362549, 0.5872430801391602], [0.2595571279525757, 0.584776759147644], [0.2598283290863037, 0.5822525024414062], [0.2599754333496094, 0.5798088908195496], [0.26025962829589844, 0.5773186683654785], [0.26042258739471436, 0.5749033093452454], [0.26071882247924805, 0.5724459290504456], [0.26089656352996826, 0.5700556039810181], [0.26120293140411377, 0.5676355361938477], [0.2613949775695801, 0.5652778148651123], [0.2617148756980896, 0.5629012584686279], [0.2619289755821228, 0.5605783462524414], [0.26226913928985596, 0.5582422018051147], [0.262504518032074, 0.5559431910514832], [0.26286423206329346, 0.5536491870880127], [0.26313531398773193, 0.551372230052948], [0.26352834701538086, 0.5491037368774414], [0.26382577419281006, 0.5468488335609436], [0.2642349600791931, 0.5446223020553589], [0.2645660638809204, 0.5423951148986816], [0.26500916481018066, 0.5402119159698486], [0.26537781953811646, 0.5380301475524902], [0.2658511996269226, 0.5358973741531372], [0.26626455783843994, 0.5337448120117188], [0.26677829027175903, 0.5316473841667175], [0.26723039150238037, 0.5295408964157104], [0.26777154207229614, 0.5275009274482727], [0.26827383041381836, 0.5254402160644531], [0.2688671946525574, 0.5234564542770386], [0.2694251537322998, 0.5214624404907227], [0.27006471157073975, 0.5195463299751282], [0.27069318294525146, 0.5176128149032593], [0.271395206451416, 0.5157726407051086], [0.27210062742233276, 0.5139037370681763], [0.2728666663169861, 0.5121217966079712], [0.2736528515815735, 0.510313868522644], [0.2744792103767395, 0.5086090564727783], [0.2753903865814209, 0.5068377256393433], [0.2763652205467224, 0.5050661563873291], [0.2773839831352234, 0.5033189058303833], [0.27847820520401, 0.5015320777893066], [0.2762071490287781, 0.807582676410675], [0.27520960569381714, 0.8056910037994385], [0.2742961645126343, 0.8039158582687378], [0.27343106269836426, 0.8019900321960449], [0.2726562023162842, 0.8002413511276245], [0.2718992233276367, 0.7982509136199951], [0.27121877670288086, 0.7964877486228943], [0.2705581784248352, 0.7944283485412598], [0.26995527744293213, 0.7926323413848877], [0.2693682312965393, 0.7904914617538452], [0.2688080668449402, 0.7886320352554321], [0.2682805061340332, 0.7864090204238892], [0.2677607536315918, 0.784497857093811], [0.2672833800315857, 0.7822164297103882], [0.2668013572692871, 0.7802454233169556], [0.2663787603378296, 0.7778950929641724], [0.26593315601348877, 0.77586829662323], [0.26555705070495605, 0.7734811305999756], [0.26514893770217896, 0.7714139223098755], [0.26482993364334106, 0.7690064311027527], [0.2644692659378052, 0.7669268846511841], [0.2642055153846741, 0.7645014524459839], [0.2638893127441406, 0.7624039649963379], [0.2636786103248596, 0.7599774599075317], [0.2634357810020447, 0.7578765153884888], [0.2632548213005066, 0.7554377317428589], [0.2630138397216797, 0.7533348798751831], [0.2628900408744812, 0.75084388256073], [0.26267898082733154, 0.748681902885437], [0.2626075744628906, 0.7461154460906982], [0.26242560148239136, 0.7438634634017944], [0.26239389181137085, 0.7412196397781372], [0.2622344493865967, 0.7388916015625], [0.26222091913223267, 0.7362021803855896], [0.2620624899864197, 0.7338140606880188], [0.2620633840560913, 0.7310560941696167], [0.2619146704673767, 0.7286093235015869], [0.261924147605896, 0.7258280515670776], [0.2617756724357605, 0.7233399152755737], [0.26178592443466187, 0.7205162048339844], [0.2616422176361084, 0.7179970741271973], [0.2616598606109619, 0.7151565551757812], [0.26152122020721436, 0.712598979473114], [0.2615431547164917, 0.7097254991531372], [0.2614046335220337, 0.7071387767791748], [0.2614302635192871, 0.7042477130889893], [0.26129579544067383, 0.7016304135322571], [0.26132845878601074, 0.698692798614502], [0.2611958384513855, 0.6960341930389404], [0.26122981309890747, 0.6930898427963257], [0.26109999418258667, 0.6903992891311646], [0.26113688945770264, 0.6874451637268066], [0.26100796461105347, 0.6847530007362366], [0.26104652881622314, 0.6818121671676636], [0.26092302799224854, 0.6791118383407593], [0.26096630096435547, 0.6761767268180847], [0.2608506679534912, 0.6734838485717773], [0.26089954376220703, 0.670574963092804], [0.2607928514480591, 0.6678856611251831], [0.26085513830184937, 0.664987325668335], [0.26075971126556396, 0.6623096466064453], [0.2608304023742676, 0.659436821937561], [0.2607470750808716, 0.6567666530609131], [0.26083076000213623, 0.6539045572280884], [0.2607630491256714, 0.6512377262115479], [0.26086097955703735, 0.6483895182609558], [0.26080358028411865, 0.6457208395004272], [0.2609100937843323, 0.6428845524787903], [0.26086390018463135, 0.6402186751365662], [0.26098084449768066, 0.6373990774154663], [0.2609502077102661, 0.6347479820251465], [0.26108384132385254, 0.6319646835327148], [0.2610616683959961, 0.6293237209320068], [0.26120179891586304, 0.6265678405761719], [0.26119911670684814, 0.6239566802978516], [0.2613537311553955, 0.6212300062179565], [0.26136207580566406, 0.6186307072639465], [0.2615256905555725, 0.6159310340881348], [0.261552631855011, 0.6133464574813843], [0.26172828674316406, 0.6106677055358887], [0.26176679134368896, 0.6080968379974365], [0.26195043325424194, 0.6054428815841675], [0.262004017829895, 0.6028816103935242], [0.26220154762268066, 0.6002544164657593], [0.26226717233657837, 0.5977119207382202], [0.26247233152389526, 0.5951259136199951], [0.2625551223754883, 0.5926158428192139], [0.2627753019332886, 0.590067446231842], [0.2628747224807739, 0.5875831246376038], [0.26311004161834717, 0.5850679874420166], [0.2632245421409607, 0.5826101303100586], [0.26347386837005615, 0.5801334381103516], [0.26360803842544556, 0.57769775390625], [0.26387012004852295, 0.5752542614936829], [0.2640175223350525, 0.572842001914978], [0.26429039239883423, 0.5704320669174194], [0.26445454359054565, 0.568048357963562], [0.2647389769554138, 0.5656838417053223], [0.26492005586624146, 0.563338577747345], [0.26521778106689453, 0.5610227584838867], [0.26541799306869507, 0.5587068796157837], [0.26573657989501953, 0.5564293265342712], [0.2659648060798645, 0.5541545152664185], [0.26630699634552, 0.5519158840179443], [0.2665674090385437, 0.5496521592140198], [0.26694053411483765, 0.5474364161491394], [0.26722973585128784, 0.5452054142951965], [0.2676246762275696, 0.5430465936660767], [0.26795369386672974, 0.5408562421798706], [0.2683812379837036, 0.5387412309646606], [0.2687416076660156, 0.5365900993347168], [0.26919448375701904, 0.5345242619514465], [0.26960068941116333, 0.5324006080627441], [0.2700895071029663, 0.5303797721862793], [0.2705312967300415, 0.5283055305480957], [0.2710450291633606, 0.5263644456863403], [0.2715423107147217, 0.5243510603904724], [0.2721068263053894, 0.5224794149398804], [0.27266454696655273, 0.5205349922180176], [0.273276686668396, 0.5187480449676514], [0.27390843629837036, 0.5168821811676025], [0.2745856046676636, 0.515160083770752], [0.27530205249786377, 0.5133552551269531], [0.2760450839996338, 0.5116982460021973], [0.27683937549591064, 0.5099849700927734], [0.27765947580337524, 0.5083354711532593], [0.27860909700393677, 0.5066016912460327], [0.2795688509941101, 0.5049285292625427], [0.2806400656700134, 0.5032371282577515], [0.2793229818344116, 0.8054046034812927], [0.27842217683792114, 0.8037310838699341], [0.27755922079086304, 0.8019347786903381], [0.2768157720565796, 0.8002899885177612], [0.27607983350753784, 0.7984397411346436], [0.27544260025024414, 0.7967804670333862], [0.27481067180633545, 0.7948470115661621], [0.2742452025413513, 0.7931463122367859], [0.27369195222854614, 0.7911357283592224], [0.2731579542160034, 0.7893447875976562], [0.27265632152557373, 0.7872495651245117], [0.27215540409088135, 0.785387396812439], [0.27169811725616455, 0.7832154035568237], [0.27122747898101807, 0.781293511390686], [0.27081602811813354, 0.7790745496749878], [0.2703784704208374, 0.7770920991897583], [0.2700137495994568, 0.7747963070869446], [0.26961249113082886, 0.772771954536438], [0.2692958116531372, 0.770445704460144], [0.2689312696456909, 0.7684003114700317], [0.2686566114425659, 0.7660584449768066], [0.26832032203674316, 0.7639830112457275], [0.26807117462158203, 0.7616233229637146], [0.26777857542037964, 0.7595792412757874], [0.26757359504699707, 0.7572499513626099], [0.2673628330230713, 0.7551705837249756], [0.2672535181045532, 0.752780020236969], [0.2670642137527466, 0.7506706118583679], [0.2669939398765564, 0.7482010126113892], [0.26681631803512573, 0.7460159659385681], [0.26678013801574707, 0.7434629201889038], [0.26662200689315796, 0.7411843538284302], [0.2666110396385193, 0.7385448217391968], [0.2664611339569092, 0.736175537109375], [0.26645904779434204, 0.7334879636764526], [0.2663065195083618, 0.7310431003570557], [0.2663055658340454, 0.728301465511322], [0.2661486268043518, 0.725810170173645], [0.2661440372467041, 0.7230370044708252], [0.2659934163093567, 0.7205145359039307], [0.2659999132156372, 0.7177124619483948], [0.26585185527801514, 0.7151594758033752], [0.26586151123046875, 0.7123323678970337], [0.26572126150131226, 0.7097400426864624], [0.265738844871521, 0.7068785429000854], [0.26559221744537354, 0.7042554616928101], [0.2656071186065674, 0.7013816237449646], [0.2654639482498169, 0.6987119913101196], [0.2654847502708435, 0.6957941055297852], [0.26534056663513184, 0.6931030750274658], [0.2653573751449585, 0.690170168876648], [0.2652197480201721, 0.6874668598175049], [0.26524364948272705, 0.6845302581787109], [0.2651045322418213, 0.6818292140960693], [0.26512575149536133, 0.6789017915725708], [0.26499342918395996, 0.6762056350708008], [0.2650240659713745, 0.6732907295227051], [0.26489710807800293, 0.6706072688102722], [0.264931857585907, 0.6677156686782837], [0.2648196220397949, 0.6650440096855164], [0.26486873626708984, 0.6621651649475098], [0.2647692561149597, 0.6595077514648438], [0.26482975482940674, 0.656646728515625], [0.26474058628082275, 0.6539947986602783], [0.26481425762176514, 0.6511502265930176], [0.26473772525787354, 0.6484949588775635], [0.2648215889930725, 0.6456552743911743], [0.2647566795349121, 0.6430041790008545], [0.26485151052474976, 0.6401786208152771], [0.26480042934417725, 0.6375350952148438], [0.2649049162864685, 0.6347379684448242], [0.26485955715179443, 0.6321123838424683], [0.2649746537208557, 0.6293329000473022], [0.2649487257003784, 0.6267422437667847], [0.26507818698883057, 0.6239998936653137], [0.2650626301765442, 0.6214143633842468], [0.26519882678985596, 0.6186964511871338], [0.2651980519294739, 0.6161354780197144], [0.26534605026245117, 0.6134456396102905], [0.265361487865448, 0.6108866333961487], [0.26551902294158936, 0.6082155704498291], [0.26554542779922485, 0.6056753396987915], [0.2657134532928467, 0.6030300259590149], [0.2657524347305298, 0.6005035638809204], [0.2659326195716858, 0.5978820323944092], [0.2659858465194702, 0.595392107963562], [0.266174852848053, 0.5928178429603577], [0.2662404775619507, 0.5903511047363281], [0.26644277572631836, 0.5878174304962158], [0.26652681827545166, 0.5853755474090576], [0.2667454481124878, 0.5828769207000732], [0.2668456435203552, 0.5804601907730103], [0.26707547903060913, 0.5779954195022583], [0.2671922445297241, 0.5755993127822876], [0.26743412017822266, 0.5731634497642517], [0.2675650119781494, 0.5707962512969971], [0.2678210139274597, 0.5684019327163696], [0.26797032356262207, 0.5660718679428101], [0.26823729276657104, 0.5637203454971313], [0.26840072870254517, 0.5614315271377563], [0.2686837911605835, 0.5591327548027039], [0.2688746452331543, 0.5568792819976807], [0.26917850971221924, 0.5546245574951172], [0.26939070224761963, 0.5523984432220459], [0.26971983909606934, 0.5501807332038879], [0.2699669599533081, 0.5479623079299927], [0.27032458782196045, 0.5457854270935059], [0.2706027626991272, 0.5436211228370667], [0.2709851861000061, 0.541504979133606], [0.2713050842285156, 0.5393717885017395], [0.27172040939331055, 0.5372913479804993], [0.2720673680305481, 0.5351994037628174], [0.27250421047210693, 0.533169150352478], [0.2728930115699768, 0.531116247177124], [0.2733670473098755, 0.5291379690170288], [0.27378714084625244, 0.5271628499031067], [0.27428746223449707, 0.5252809524536133], [0.27476656436920166, 0.5233764052391052], [0.2753182053565979, 0.5215768218040466], [0.2758610248565674, 0.5197637677192688], [0.2764655351638794, 0.5180509090423584], [0.27708494663238525, 0.5163089036941528], [0.2777596116065979, 0.5146620273590088], [0.27846604585647583, 0.5130008459091187], [0.2792035937309265, 0.5113862752914429], [0.2799937129020691, 0.5097702741622925], [0.2808345556259155, 0.5081487894058228], [0.2817801833152771, 0.5065864324569702], [0.2827998399734497, 0.504956841468811], [0.282153844833374, 0.8036133050918579], [0.2813422679901123, 0.8019319772720337], [0.2806345224380493, 0.8003373742103577], [0.2799391746520996, 0.798628032207489], [0.2793540954589844, 0.79706871509552], [0.27874428033828735, 0.7952513098716736], [0.2782320976257324, 0.793681263923645], [0.27771568298339844, 0.7917705774307251], [0.27723902463912964, 0.7900980710983276], [0.2767593264579773, 0.7880522012710571], [0.276305615901947, 0.7863112688064575], [0.2758800983428955, 0.7842005491256714], [0.27545058727264404, 0.7823635339736938], [0.27505481243133545, 0.7801706194877625], [0.2746478319168091, 0.7782995700836182], [0.27429163455963135, 0.7760640382766724], [0.2739095091819763, 0.7741162776947021], [0.273586630821228, 0.7718183994293213], [0.273232102394104, 0.7698557376861572], [0.272963285446167, 0.7675526142120361], [0.2726449966430664, 0.7655434012413025], [0.27241992950439453, 0.7632174491882324], [0.2721611261367798, 0.7612054347991943], [0.27199846506118774, 0.7589097023010254], [0.27179133892059326, 0.7569221258163452], [0.2716127634048462, 0.7545795440673828], [0.27139389514923096, 0.7525675296783447], [0.27127087116241455, 0.7501647472381592], [0.27107882499694824, 0.7480810880661011], [0.2710114121437073, 0.7455862760543823], [0.2708514332771301, 0.7434329390525818], [0.27083730697631836, 0.7408335208892822], [0.27068352699279785, 0.7385250329971313], [0.27066725492477417, 0.7358424067497253], [0.27050793170928955, 0.7334668636322021], [0.2705003023147583, 0.7307305932044983], [0.2703467607498169, 0.7282829284667969], [0.27034109830856323, 0.7254998683929443], [0.2701892852783203, 0.7230210304260254], [0.2701868414878845, 0.7202243804931641], [0.27003562450408936, 0.7177003622055054], [0.2700335383415222, 0.7148749828338623], [0.2698875665664673, 0.7123379707336426], [0.2698964476585388, 0.709477424621582], [0.269747257232666, 0.706874668598175], [0.26975107192993164, 0.7039879560470581], [0.2696017026901245, 0.7013761401176453], [0.2696077823638916, 0.6984598636627197], [0.26945823431015015, 0.695793628692627], [0.2694655656814575, 0.6928495168685913], [0.26931607723236084, 0.6901666522026062], [0.26932287216186523, 0.687231183052063], [0.26917821168899536, 0.6845309138298035], [0.26918983459472656, 0.6815937757492065], [0.2690458297729492, 0.6789023876190186], [0.2690603733062744, 0.6759905219078064], [0.2689245939254761, 0.6732997894287109], [0.26894479990005493, 0.6704043745994568], [0.26881688833236694, 0.6677346229553223], [0.26884597539901733, 0.6648685336112976], [0.26873087882995605, 0.6622046232223511], [0.2687675356864929, 0.6593532562255859], [0.2686615586280823, 0.6567010283470154], [0.26871371269226074, 0.6538692712783813], [0.2686161994934082, 0.6512177586555481], [0.2686769962310791, 0.6483928561210632], [0.268593430519104, 0.6457399129867554], [0.2686675786972046, 0.6429282426834106], [0.2685965299606323, 0.6402785778045654], [0.26867955923080444, 0.637485146522522], [0.26862043142318726, 0.6348485946655273], [0.26871365308761597, 0.6320812106132507], [0.2686690092086792, 0.6294668912887573], [0.2687755227088928, 0.6267449855804443], [0.26873892545700073, 0.6241428852081299], [0.26885098218917847, 0.6214381456375122], [0.2688331604003906, 0.6188606023788452], [0.2689604163169861, 0.6161859035491943], [0.26895201206207275, 0.6136223077774048], [0.26908570528030396, 0.6109633445739746], [0.2690922021865845, 0.6084080338478088], [0.26923656463623047, 0.6057744026184082], [0.2692524790763855, 0.6032348275184631], [0.26940417289733887, 0.600627064704895], [0.2694332003593445, 0.5981036424636841], [0.269592821598053, 0.595540463924408], [0.2696327567100525, 0.5930497646331787], [0.2698040008544922, 0.5905289649963379], [0.26986008882522583, 0.5880738496780396], [0.27004778385162354, 0.5855876207351685], [0.2701205015182495, 0.5831563472747803], [0.2703200578689575, 0.5806986689567566], [0.2704066038131714, 0.5782896280288696], [0.2706160545349121, 0.5758667588233948], [0.27071672677993774, 0.5734744071960449], [0.2709360122680664, 0.5710886120796204], [0.2710513472557068, 0.568734884262085], [0.27128326892852783, 0.5663959383964539], [0.2714148759841919, 0.5640698075294495], [0.2716594934463501, 0.56178879737854], [0.2718116044998169, 0.5595167875289917], [0.27207350730895996, 0.5572806596755981], [0.272247314453125, 0.555034875869751], [0.27252817153930664, 0.5528403520584106], [0.27272701263427734, 0.5506337881088257], [0.27304166555404663, 0.548470139503479], [0.27328312397003174, 0.5462901592254639], [0.2736252546310425, 0.5441898107528687], [0.27389973402023315, 0.5420568585395813], [0.2742617130279541, 0.5399976372718811], [0.27456748485565186, 0.5378892421722412], [0.2749575972557068, 0.5358659625053406], [0.2752886414527893, 0.5338038802146912], [0.27569568157196045, 0.5318443775177002], [0.2760618329048157, 0.5298219919204712], [0.2764921188354492, 0.5279490947723389], [0.27688872814178467, 0.5260247588157654], [0.2773459553718567, 0.5242562294006348], [0.2778036594390869, 0.5224151611328125], [0.2783060073852539, 0.520737886428833], [0.2788245677947998, 0.5189815163612366], [0.2793801426887512, 0.5173866748809814], [0.27998030185699463, 0.5156978368759155], [0.2805929183959961, 0.5141641497612], [0.2813025116920471, 0.512523889541626], [0.28199660778045654, 0.5110296607017517], [0.28283077478408813, 0.5094674825668335], [0.2836400270462036, 0.5080087184906006], [0.28465986251831055, 0.5064823627471924], [0.2851696014404297, 0.8017503023147583], [0.28446531295776367, 0.8002318143844604], [0.28374505043029785, 0.7986279129981995], [0.28317421674728394, 0.7971989512443542], [0.28259938955307007, 0.7955896854400635], [0.28217458724975586, 0.7941508293151855], [0.2817171812057495, 0.7923344969749451], [0.2812696695327759, 0.7907374501228333], [0.28082525730133057, 0.7888543605804443], [0.28042370080947876, 0.7871813774108887], [0.28004342317581177, 0.7851542234420776], [0.27962177991867065, 0.7833513617515564], [0.279241681098938, 0.781286895275116], [0.2788684368133545, 0.7794523239135742], [0.2785451412200928, 0.7772871255874634], [0.2781651020050049, 0.775367021560669], [0.2778451442718506, 0.773178219795227], [0.27751076221466064, 0.771240234375], [0.277249813079834, 0.7689918279647827], [0.27691972255706787, 0.767020046710968], [0.2766803503036499, 0.7647768259048462], [0.2764071822166443, 0.7628179788589478], [0.27622878551483154, 0.7605792880058289], [0.2759971022605896, 0.7585973739624023], [0.27582597732543945, 0.756346583366394], [0.27565091848373413, 0.7543405294418335], [0.275539755821228, 0.7520214319229126], [0.2753475308418274, 0.7499713897705078], [0.27526605129241943, 0.7476334571838379], [0.2751181721687317, 0.7455318570137024], [0.27510637044906616, 0.7430819869041443], [0.274943470954895, 0.7407824993133545], [0.2749173641204834, 0.738186240196228], [0.2747672200202942, 0.7358173727989197], [0.27475959062576294, 0.7331314086914062], [0.2745858430862427, 0.7306796312332153], [0.2745628356933594, 0.7279709577560425], [0.27440953254699707, 0.7254798412322998], [0.274402916431427, 0.7227185964584351], [0.2742390036582947, 0.720185399055481], [0.274219810962677, 0.7174034118652344], [0.27407610416412354, 0.7148724794387817], [0.274081826210022, 0.7120600938796997], [0.2739217281341553, 0.7094531655311584], [0.27391231060028076, 0.7065939903259277], [0.2737613916397095, 0.7039749026298523], [0.2737603783607483, 0.7011023759841919], [0.2735961079597473, 0.6984285712242126], [0.27358537912368774, 0.6955280303955078], [0.2734295129776001, 0.6928304433822632], [0.2734273672103882, 0.6899045705795288], [0.27326667308807373, 0.6872028708457947], [0.2732592225074768, 0.6842842102050781], [0.27310967445373535, 0.6815806031227112], [0.2731139063835144, 0.6786611080169678], [0.2729606628417969, 0.6759666204452515], [0.2729595899581909, 0.6730763912200928], [0.2728206515312195, 0.670403003692627], [0.27283334732055664, 0.6675260066986084], [0.27269792556762695, 0.6648717522621155], [0.272712767124176, 0.6620204448699951], [0.27259767055511475, 0.6593813896179199], [0.27263450622558594, 0.6565422415733337], [0.2725200653076172, 0.6539020538330078], [0.27255839109420776, 0.6510787010192871], [0.2724575996398926, 0.6484430432319641], [0.2725091576576233, 0.6456301212310791], [0.2724177837371826, 0.6429895758628845], [0.272479772567749, 0.6401938796043396], [0.2724040746688843, 0.6375681161880493], [0.27247893810272217, 0.6347864866256714], [0.2724132537841797, 0.6321768760681152], [0.2724952697753906, 0.6294305920600891], [0.27243858575820923, 0.6268531084060669], [0.27253007888793945, 0.6241267919540405], [0.2724900245666504, 0.6215671300888062], [0.2725934386253357, 0.61887526512146], [0.27256572246551514, 0.6163288354873657], [0.2726764678955078, 0.6136553287506104], [0.27265989780426025, 0.6111178398132324], [0.2727782726287842, 0.6084669828414917], [0.27277421951293945, 0.6059442758560181], [0.27290457487106323, 0.6033123731613159], [0.2729097008705139, 0.6008075475692749], [0.2730468511581421, 0.5982029438018799], [0.273060142993927, 0.5957307815551758], [0.27320581674575806, 0.5931767225265503], [0.27323460578918457, 0.5907383561134338], [0.2733914256095886, 0.5882337093353271], [0.27343589067459106, 0.5858196020126343], [0.27360719442367554, 0.5833473205566406], [0.27366530895233154, 0.5809440612792969], [0.2738445997238159, 0.5785022974014282], [0.2739145755767822, 0.5761266946792603], [0.2741047143936157, 0.5737125873565674], [0.27419018745422363, 0.5713682174682617], [0.2743898630142212, 0.5689982771873474], [0.2744874954223633, 0.5666943192481995], [0.27470195293426514, 0.5643674731254578], [0.27481967210769653, 0.5621159076690674], [0.27504974603652954, 0.5598530769348145], [0.27518224716186523, 0.5576275587081909], [0.27542227506637573, 0.5554041862487793], [0.27557581663131714, 0.553221583366394], [0.2758427858352661, 0.5510658025741577], [0.2760345935821533, 0.5489104986190796], [0.2763369083404541, 0.5467941164970398], [0.2765675187110901, 0.5446826219558716], [0.27689486742019653, 0.5426108837127686], [0.27715635299682617, 0.5405256748199463], [0.27750247716903687, 0.5384927988052368], [0.2777891159057617, 0.5364329218864441], [0.2781558036804199, 0.5344445705413818], [0.278462290763855, 0.53244549036026], [0.278847336769104, 0.5305200815200806], [0.2791772484779358, 0.5285906791687012], [0.2795678973197937, 0.5267688035964966], [0.27991533279418945, 0.5249565243721008], [0.28033435344696045, 0.5232748985290527], [0.28077346086502075, 0.521589994430542], [0.28127795457839966, 0.5199551582336426], [0.28175675868988037, 0.5182893872261047], [0.2822606563568115, 0.5167388916015625], [0.28280121088027954, 0.5151638388633728], [0.2833788990974426, 0.5136908292770386], [0.2840648889541626, 0.512251615524292], [0.28479182720184326, 0.5108158588409424], [0.28561484813690186, 0.5094442367553711], [0.2864835262298584, 0.5080060958862305], [0.287692129611969, 0.8004546165466309], [0.28711485862731934, 0.7988958954811096], [0.2866484522819519, 0.7973934412002563], [0.2861868143081665, 0.795884370803833], [0.28584277629852295, 0.794553279876709], [0.28539109230041504, 0.7929253578186035], [0.28501784801483154, 0.791499137878418], [0.28463685512542725, 0.7896084189414978], [0.2842864394187927, 0.7880496978759766], [0.2839112877845764, 0.7861326336860657], [0.2835437059402466, 0.7844558358192444], [0.2832184433937073, 0.7823583483695984], [0.2828793525695801, 0.7806028723716736], [0.2825462818145752, 0.7785123586654663], [0.2821936011314392, 0.7766824960708618], [0.2818952202796936, 0.7744662761688232], [0.2815675139427185, 0.7726000547409058], [0.2812741994857788, 0.7704133987426758], [0.2809497117996216, 0.7685039043426514], [0.28071218729019165, 0.7662584781646729], [0.28043508529663086, 0.7643582224845886], [0.2802357077598572, 0.7621704936027527], [0.2799977660179138, 0.7602643966674805], [0.27984464168548584, 0.7580450773239136], [0.27967727184295654, 0.7561248540878296], [0.2795332074165344, 0.753851056098938], [0.27935177087783813, 0.7518503665924072], [0.27927279472351074, 0.749537467956543], [0.27913129329681396, 0.7475525140762329], [0.2790796160697937, 0.7451735734939575], [0.27892011404037476, 0.7430505752563477], [0.2789064049720764, 0.7404475212097168], [0.27876758575439453, 0.7381711602210999], [0.27875781059265137, 0.7355091571807861], [0.27859580516815186, 0.7330960631370544], [0.2785911560058594, 0.7303674817085266], [0.27845191955566406, 0.7279524207115173], [0.27844899892807007, 0.7251960635185242], [0.2783016562461853, 0.7226929664611816], [0.278298556804657, 0.7198861837387085], [0.27815914154052734, 0.7173870801925659], [0.2781442403793335, 0.714582085609436], [0.2779759168624878, 0.712028980255127], [0.27795517444610596, 0.7091507911682129], [0.2777954339981079, 0.7065696716308594], [0.2777779698371887, 0.7036888599395752], [0.27760475873947144, 0.7010616660118103], [0.27758073806762695, 0.6981356143951416], [0.27741098403930664, 0.695486307144165], [0.27738869190216064, 0.692558765411377], [0.27722132205963135, 0.6898696422576904], [0.2772088646888733, 0.6869341135025024], [0.27705055475234985, 0.6842511892318726], [0.27704131603240967, 0.6813379526138306], [0.2768896222114563, 0.6786367297172546], [0.2768932580947876, 0.6757287383079529], [0.2767546772956848, 0.673058032989502], [0.2767599821090698, 0.670186460018158], [0.27662330865859985, 0.6675134897232056], [0.27662980556488037, 0.6646546125411987], [0.2764958143234253, 0.6620140671730042], [0.2764993906021118, 0.6591907739639282], [0.2763650417327881, 0.6565465927124023], [0.27638089656829834, 0.6537246704101562], [0.2762601971626282, 0.6510961055755615], [0.27628815174102783, 0.648297905921936], [0.27617931365966797, 0.645663857460022], [0.2762225270271301, 0.6428713798522949], [0.27613168954849243, 0.6402466297149658], [0.27618736028671265, 0.637478232383728], [0.2761101722717285, 0.6348555088043213], [0.2761797308921814, 0.6321126222610474], [0.2761131525039673, 0.6295071840286255], [0.2761869430541992, 0.6268008947372437], [0.27613186836242676, 0.624225914478302], [0.27621960639953613, 0.6215459108352661], [0.2761734127998352, 0.6189807653427124], [0.2762622833251953, 0.6163269281387329], [0.27623122930526733, 0.6137813329696655], [0.27633553743362427, 0.611137866973877], [0.2763100266456604, 0.6085942387580872], [0.276411771774292, 0.605984091758728], [0.2763969302177429, 0.6034587025642395], [0.2765120267868042, 0.6008656024932861], [0.27650558948516846, 0.5983527302742004], [0.27662575244903564, 0.5958136320114136], [0.2766340970993042, 0.5933470726013184], [0.27676820755004883, 0.5908505320549011], [0.2767866849899292, 0.5884135961532593], [0.27692627906799316, 0.5859593152999878], [0.276964008808136, 0.5835574865341187], [0.27712148427963257, 0.5811067819595337], [0.2771632671356201, 0.578710675239563], [0.27731776237487793, 0.5763102173805237], [0.2773764729499817, 0.5739455819129944], [0.2775457501411438, 0.571575939655304], [0.2776097059249878, 0.5692346096038818], [0.2777824401855469, 0.5669288039207458], [0.2778654098510742, 0.564636766910553], [0.2780557870864868, 0.5623894333839417], [0.2781577706336975, 0.5601460933685303], [0.27836722135543823, 0.5579347014427185], [0.2784930467605591, 0.5557237863540649], [0.2787233591079712, 0.553581953048706], [0.27887284755706787, 0.5514338612556458], [0.27912819385528564, 0.5493357181549072], [0.2793079614639282, 0.5472111105918884], [0.27958202362060547, 0.5451594591140747], [0.27979445457458496, 0.5430736541748047], [0.2800934910774231, 0.5410401821136475], [0.2803255319595337, 0.538972020149231], [0.2806375026702881, 0.5369892120361328], [0.2808966636657715, 0.5349537134170532], [0.28122591972351074, 0.5330373644828796], [0.28150618076324463, 0.5310625433921814], [0.28184962272644043, 0.5292216539382935], [0.2821388244628906, 0.5273098945617676], [0.28246891498565674, 0.5256440043449402], [0.28279244899749756, 0.5239270925521851], [0.2831341624259949, 0.5223497152328491], [0.28349244594573975, 0.5206446647644043], [0.2839092016220093, 0.519084095954895], [0.28433334827423096, 0.517459511756897], [0.28476548194885254, 0.516049861907959], [0.2852809429168701, 0.5145372748374939], [0.2857557535171509, 0.5132686495780945], [0.28645414113998413, 0.511846125125885], [0.2871129512786865, 0.5105793476104736], [0.28801578283309937, 0.5092928409576416], [0.29042351245880127, 0.7991322875022888], [0.29007434844970703, 0.79792720079422], [0.28963565826416016, 0.7962682247161865], [0.2894374132156372, 0.7951666116714478], [0.2890537977218628, 0.7933927178382874], [0.28876060247421265, 0.7920740842819214], [0.2884182333946228, 0.7904093265533447], [0.2880990505218506, 0.7889366149902344], [0.2877678871154785, 0.7870263457298279], [0.28744274377822876, 0.7854309678077698], [0.28713953495025635, 0.7835112810134888], [0.2868216633796692, 0.7817963361740112], [0.2865256071090698, 0.7796833515167236], [0.28619837760925293, 0.7778943777084351], [0.2859047055244446, 0.7757993340492249], [0.2855836749076843, 0.7739439010620117], [0.28530776500701904, 0.7717516422271729], [0.2849898934364319, 0.769870936870575], [0.28473639488220215, 0.7677210569381714], [0.28443723917007446, 0.7658216953277588], [0.2842100262641907, 0.7636444568634033], [0.28391140699386597, 0.761746346950531], [0.28368067741394043, 0.7596325278282166], [0.2834444046020508, 0.7577885389328003], [0.28329336643218994, 0.75568026304245], [0.28317582607269287, 0.7537373304367065], [0.283164381980896, 0.75152587890625], [0.28308141231536865, 0.7495071887969971], [0.28308820724487305, 0.7472586631774902], [0.282955527305603, 0.7450646162033081], [0.28292882442474365, 0.7427398562431335], [0.2827775478363037, 0.7404096722602844], [0.28277432918548584, 0.7379111051559448], [0.2826204299926758, 0.7354444265365601], [0.2826130986213684, 0.7328221797943115], [0.28248000144958496, 0.7303627729415894], [0.2824973464012146, 0.7277069091796875], [0.2823587656021118, 0.7251642942428589], [0.2823635935783386, 0.7224451303482056], [0.2822383642196655, 0.7198804616928101], [0.28224170207977295, 0.7171287536621094], [0.2820786237716675, 0.7145006656646729], [0.28203821182250977, 0.711719810962677], [0.281859815120697, 0.7090855836868286], [0.28182899951934814, 0.7062790393829346], [0.281643271446228, 0.7036046385765076], [0.281599223613739, 0.7007559537887573], [0.2814143896102905, 0.6980631947517395], [0.28137803077697754, 0.6951912641525269], [0.2811935544013977, 0.6924843788146973], [0.2811588644981384, 0.6895940899848938], [0.2809903621673584, 0.6868851184844971], [0.2809736728668213, 0.6839945912361145], [0.2808119058609009, 0.6812904477119446], [0.2808040976524353, 0.6784087419509888], [0.28066718578338623, 0.6757161617279053], [0.2806791067123413, 0.6728459596633911], [0.28054124116897583, 0.670157790184021], [0.2805405259132385, 0.6673007011413574], [0.28040170669555664, 0.664627194404602], [0.2803950309753418, 0.6617923974990845], [0.2802426815032959, 0.659141480922699], [0.2802276015281677, 0.6563342213630676], [0.28008562326431274, 0.6537045240402222], [0.2800898551940918, 0.6509130001068115], [0.27995991706848145, 0.6482911705970764], [0.2799786925315857, 0.6455137133598328], [0.2798713445663452, 0.6428967714309692], [0.2799113988876343, 0.6401288509368896], [0.27981650829315186, 0.6375184059143066], [0.27986812591552734, 0.6347655057907104], [0.2797914743423462, 0.6321759819984436], [0.27985668182373047, 0.6294375658035278], [0.27979224920272827, 0.6268723011016846], [0.279860258102417, 0.6241755485534668], [0.2797977924346924, 0.6216264963150024], [0.2798696756362915, 0.6189383864402771], [0.2798182964324951, 0.6164209842681885], [0.2798994183540344, 0.6137701272964478], [0.2798568606376648, 0.611242413520813], [0.27994346618652344, 0.6085913181304932], [0.27990788221359253, 0.6060888171195984], [0.27999722957611084, 0.6034778356552124], [0.27996915578842163, 0.6009833812713623], [0.28006911277770996, 0.5983911752700806], [0.28005534410476685, 0.5959488153457642], [0.28016626834869385, 0.5934157371520996], [0.28016412258148193, 0.5910014510154724], [0.28028351068496704, 0.5884999632835388], [0.2802940011024475, 0.5861256122589111], [0.28042131662368774, 0.5836785435676575], [0.28044456243515015, 0.5812883377075195], [0.2805827856063843, 0.5788347125053406], [0.2806112766265869, 0.5764971971511841], [0.280750036239624, 0.5741010904312134], [0.280788779258728, 0.5717731714248657], [0.280936062335968, 0.5694010257720947], [0.28098225593566895, 0.5671373605728149], [0.28113770484924316, 0.564842700958252], [0.2811993360519409, 0.5626212358474731], [0.2813723087310791, 0.5603965520858765], [0.2814602851867676, 0.5582024455070496], [0.2816619873046875, 0.5560355186462402], [0.2817840576171875, 0.5538949966430664], [0.2820062041282654, 0.5517901182174683], [0.2821444272994995, 0.5496808290481567], [0.2823755741119385, 0.5475995540618896], [0.2825304865837097, 0.5455350875854492], [0.28277587890625, 0.5435124635696411], [0.282961368560791, 0.5414479970932007], [0.2832314372062683, 0.5394405126571655], [0.28343701362609863, 0.5374231338500977], [0.2837221622467041, 0.5354633927345276], [0.283949613571167, 0.5335034728050232], [0.28425419330596924, 0.5316175222396851], [0.284506618976593, 0.5297254323959351], [0.28482961654663086, 0.5278798341751099], [0.28507059812545776, 0.5261703729629517], [0.28533899784088135, 0.5245047807693481], [0.285558819770813, 0.5228301286697388], [0.28582459688186646, 0.5212782621383667], [0.2861037254333496, 0.5196647644042969], [0.28645479679107666, 0.5182102918624878], [0.28682905435562134, 0.5167354345321655], [0.28721046447753906, 0.5153610706329346], [0.28762340545654297, 0.5140414237976074], [0.28804993629455566, 0.5128610134124756], [0.2887117266654968, 0.5117676258087158], [0.2894577980041504, 0.5106040835380554], [0.2920222282409668, 0.7984572649002075], [0.2915453314781189, 0.7972947359085083], [0.2913849949836731, 0.7961375713348389], [0.29117971658706665, 0.7946192026138306], [0.2909969091415405, 0.7931795120239258], [0.29080551862716675, 0.7914610505104065], [0.29057806730270386, 0.7900708913803101], [0.2903550863265991, 0.7883381843566895], [0.29010891914367676, 0.7867096066474915], [0.2898820638656616, 0.7847990989685059], [0.2896214723587036, 0.7831767797470093], [0.28939515352249146, 0.781200647354126], [0.28912413120269775, 0.7793639898300171], [0.2888982892036438, 0.7772662043571472], [0.28862130641937256, 0.7754511833190918], [0.2884005308151245, 0.7733466625213623], [0.28812694549560547, 0.7714207172393799], [0.28791511058807373, 0.7692620754241943], [0.28764718770980835, 0.7673896551132202], [0.2874675989151001, 0.765272319316864], [0.28722965717315674, 0.7633429765701294], [0.28709137439727783, 0.7612046599388123], [0.2869068384170532, 0.7593512535095215], [0.28679776191711426, 0.7573361992835999], [0.2866729497909546, 0.7555451989173889], [0.2865563631057739, 0.7534016370773315], [0.28641849756240845, 0.7515323162078857], [0.28638505935668945, 0.7492775321006775], [0.28624677658081055, 0.7472254037857056], [0.28621089458465576, 0.7448012232780457], [0.2860727310180664, 0.7426924109458923], [0.28606367111206055, 0.7401862144470215], [0.28592807054519653, 0.7378894090652466], [0.2859286069869995, 0.7352170348167419], [0.285807728767395, 0.7328293323516846], [0.28583210706710815, 0.7301748991012573], [0.2857084274291992, 0.7277114391326904], [0.28571873903274536, 0.7249538898468018], [0.2856002449989319, 0.7224622964859009], [0.28561437129974365, 0.7196853160858154], [0.28547734022140503, 0.717106819152832], [0.28546440601348877, 0.7142504453659058], [0.28531110286712646, 0.7116616368293762], [0.285290002822876, 0.708834171295166], [0.2851295471191406, 0.7062203884124756], [0.28510022163391113, 0.7033325433731079], [0.28493160009384155, 0.700690507888794], [0.28490251302719116, 0.697799801826477], [0.28473562002182007, 0.6951307058334351], [0.2847088575363159, 0.6922190189361572], [0.28454363346099854, 0.6895395517349243], [0.28452247381210327, 0.6866447925567627], [0.28436529636383057, 0.6839543581008911], [0.2843524217605591, 0.6810561418533325], [0.2842034101486206, 0.6783862113952637], [0.28419965505599976, 0.6755164265632629], [0.28405678272247314, 0.672828197479248], [0.28405362367630005, 0.6699526906013489], [0.2839151620864868, 0.6672777533531189], [0.28391343355178833, 0.664429247379303], [0.2837730050086975, 0.6617525815963745], [0.2837681770324707, 0.6589300036430359], [0.28363239765167236, 0.6562991738319397], [0.28363800048828125, 0.6535197496414185], [0.283511757850647, 0.6508920788764954], [0.28352487087249756, 0.6481226682662964], [0.2834126949310303, 0.6455175876617432], [0.2834423780441284, 0.6427654027938843], [0.28334176540374756, 0.6401522159576416], [0.283383309841156, 0.6374070644378662], [0.28329527378082275, 0.6348050236701965], [0.2833470106124878, 0.6320924758911133], [0.28327393531799316, 0.6294981241226196], [0.2833365797996521, 0.6268043518066406], [0.2832695245742798, 0.6242350339889526], [0.2833329439163208, 0.6215685606002808], [0.2832777500152588, 0.6190093159675598], [0.28334707021713257, 0.6163780689239502], [0.2832913398742676, 0.6138437986373901], [0.283364474773407, 0.6112189292907715], [0.28332531452178955, 0.6086819767951965], [0.28340935707092285, 0.6060798764228821], [0.2833729386329651, 0.6035640239715576], [0.2834573984146118, 0.6009918451309204], [0.2834329605102539, 0.5984973907470703], [0.2835295796394348, 0.595982015132904], [0.2835116386413574, 0.593529462814331], [0.2836107611656189, 0.5910539627075195], [0.2836083173751831, 0.588636040687561], [0.28371888399124146, 0.5861983299255371], [0.28371793031692505, 0.5838140249252319], [0.28383076190948486, 0.5813862681388855], [0.2838495969772339, 0.5789941549301147], [0.2839787006378174, 0.5766081809997559], [0.28400129079818726, 0.5742594003677368], [0.28413206338882446, 0.5719088912010193], [0.2841686010360718, 0.569575309753418], [0.28430861234664917, 0.5672965049743652], [0.28435230255126953, 0.5650290250778198], [0.28449952602386475, 0.5628137588500977], [0.28455471992492676, 0.5606082081794739], [0.28471553325653076, 0.5584467053413391], [0.284792423248291, 0.5562904477119446], [0.28496599197387695, 0.5541843771934509], [0.2850598692893982, 0.5520689487457275], [0.2852466106414795, 0.549992024898529], [0.28535932302474976, 0.5478997826576233], [0.2855560779571533, 0.5458722114562988], [0.28567588329315186, 0.5438231229782104], [0.28587639331817627, 0.5418200492858887], [0.2860203981399536, 0.5397831201553345], [0.2862395644187927, 0.5378166437149048], [0.2863927483558655, 0.5358091592788696], [0.2866138815879822, 0.5339248180389404], [0.28678399324417114, 0.5319868326187134], [0.28701847791671753, 0.5301687717437744], [0.28721821308135986, 0.5282775163650513], [0.28745925426483154, 0.5266094207763672], [0.2876468896865845, 0.5248318314552307], [0.2878504991531372, 0.5232341885566711], [0.28800690174102783, 0.521604597568512], [0.2882204055786133, 0.5201945304870605], [0.28844302892684937, 0.5186464786529541], [0.2886771559715271, 0.5173002481460571], [0.28894364833831787, 0.5157904624938965], [0.2891756296157837, 0.5146386623382568], [0.28948795795440674, 0.513374388217926], [0.2897334098815918, 0.5125218629837036], [0.29044824838638306, 0.5115946531295776], [0.7065636515617371, 0.7976875305175781], [0.7066147327423096, 0.7971418499946594], [0.7067933082580566, 0.7957028150558472], [0.7069523334503174, 0.7944576740264893], [0.7070807218551636, 0.7928590774536133], [0.7071753740310669, 0.7913617491722107], [0.7072281837463379, 0.7896589040756226], [0.7073904275894165, 0.7881133556365967], [0.7075387239456177, 0.7862762212753296], [0.7077130079269409, 0.784586489200592], [0.7078445553779602, 0.7826892733573914], [0.7080732583999634, 0.7809277772903442], [0.708251953125, 0.7788910865783691], [0.7084627151489258, 0.77702397108078], [0.7086101770401001, 0.7749483585357666], [0.7088499069213867, 0.7730616331100464], [0.709024965763092, 0.7709346413612366], [0.7092387080192566, 0.769012451171875], [0.7093722224235535, 0.7669004201889038], [0.7095870971679688, 0.7650294303894043], [0.7097044587135315, 0.7629247903823853], [0.7098643183708191, 0.7610551118850708], [0.7099461555480957, 0.7589761018753052], [0.7100868225097656, 0.7571933269500732], [0.7101990580558777, 0.7552094459533691], [0.7102979421615601, 0.7533215284347534], [0.7103347778320312, 0.7512550354003906], [0.7104699611663818, 0.7492280006408691], [0.7104967832565308, 0.7469640970230103], [0.7106096148490906, 0.7447917461395264], [0.710602879524231, 0.7424519062042236], [0.7107446193695068, 0.7401570081710815], [0.7107563018798828, 0.7376655340194702], [0.7108690142631531, 0.7352476119995117], [0.7108360528945923, 0.7326480150222778], [0.7109500169754028, 0.7301967144012451], [0.710933268070221, 0.7275114059448242], [0.711035430431366, 0.72499680519104], [0.7110044956207275, 0.7222703099250793], [0.7111393213272095, 0.7196807861328125], [0.7111474871635437, 0.7168835401535034], [0.7112748622894287, 0.7142506837844849], [0.7112634181976318, 0.7114353179931641], [0.7114061117172241, 0.7088006734848022], [0.7114235758781433, 0.7059746980667114], [0.7115765810012817, 0.7032947540283203], [0.7115904092788696, 0.7004411220550537], [0.711749255657196, 0.6977530717849731], [0.7117716073989868, 0.6948826909065247], [0.7119264602661133, 0.6921799182891846], [0.7119394540786743, 0.6892998218536377], [0.7120951414108276, 0.6866021156311035], [0.7121118903160095, 0.6837241053581238], [0.7122573852539062, 0.6810285449028015], [0.7122646570205688, 0.6781659126281738], [0.7124191522598267, 0.6754797697067261], [0.7124404311180115, 0.672613263130188], [0.7125856876373291, 0.6699272394180298], [0.7125896215438843, 0.6670792102813721], [0.7127289175987244, 0.6644014120101929], [0.7127315998077393, 0.6615577936172485], [0.71285480260849, 0.6589137315750122], [0.7128385305404663, 0.6561227440834045], [0.7129596471786499, 0.6535015106201172], [0.7129436731338501, 0.6507292985916138], [0.7130540013313293, 0.6481199264526367], [0.713029146194458, 0.6453746557235718], [0.7131355404853821, 0.6427729725837708], [0.7131046056747437, 0.6400290131568909], [0.7131959795951843, 0.6374356746673584], [0.7131503820419312, 0.6347053050994873], [0.7132341861724854, 0.6321284770965576], [0.7131854295730591, 0.6294214725494385], [0.7132572531700134, 0.6268541812896729], [0.7131991982460022, 0.6241665482521057], [0.713265597820282, 0.6216248273849487], [0.7132041454315186, 0.6189582943916321], [0.7132667303085327, 0.616430401802063], [0.7132060527801514, 0.6137978434562683], [0.7132619619369507, 0.6112817525863647], [0.7131924629211426, 0.6086664795875549], [0.7132324576377869, 0.606157660484314], [0.7131513357162476, 0.6035569906234741], [0.7131840586662292, 0.6010756492614746], [0.7130997776985168, 0.5985126495361328], [0.7131295204162598, 0.5960705280303955], [0.7130420207977295, 0.5935555696487427], [0.7130616903305054, 0.5911550521850586], [0.7129669785499573, 0.5886911153793335], [0.7129806280136108, 0.5863031148910522], [0.7128876447677612, 0.5838683843612671], [0.7128955721855164, 0.5815030336380005], [0.7127888202667236, 0.579094409942627], [0.7127783894538879, 0.5767409801483154], [0.7126615047454834, 0.5743693113327026], [0.7126426696777344, 0.5720498561859131], [0.712518572807312, 0.5697125792503357], [0.712486743927002, 0.5674558877944946], [0.7123512029647827, 0.5651900768280029], [0.7123098373413086, 0.5629881024360657], [0.7121720314025879, 0.5607855916023254], [0.7121264338493347, 0.5586409568786621], [0.7119840383529663, 0.5564972162246704], [0.71192866563797, 0.5543954968452454], [0.7117812037467957, 0.5523018836975098], [0.7117146253585815, 0.5502187013626099], [0.7115625739097595, 0.548155665397644], [0.7114904522895813, 0.5461002588272095], [0.7113401889801025, 0.5440818667411804], [0.7112597227096558, 0.5420589447021484], [0.7111006379127502, 0.5400779843330383], [0.7110044956207275, 0.538068413734436], [0.7108356952667236, 0.5361102819442749], [0.7107360363006592, 0.5341825485229492], [0.7105656266212463, 0.5323144197463989], [0.7104523181915283, 0.5304393768310547], [0.710273265838623, 0.5286456346511841], [0.7101554274559021, 0.5268715620040894], [0.7099831700325012, 0.5251768827438354], [0.7098414897918701, 0.5234948396682739], [0.709661066532135, 0.5219536423683167], [0.7095648050308228, 0.520442008972168], [0.7094431519508362, 0.519006609916687], [0.7093530893325806, 0.5175271034240723], [0.7092323303222656, 0.516176700592041], [0.7091128826141357, 0.5148888230323792], [0.7090019583702087, 0.5138027667999268], [0.708917498588562, 0.5127396583557129], [0.7088676691055298, 0.5121843814849854], [0.7079777717590332, 0.7984572649002075], [0.7084546685218811, 0.7972947359085083], [0.7086150050163269, 0.7961375713348389], [0.7088202834129333, 0.7946192026138306], [0.7090030908584595, 0.7931795120239258], [0.7091944813728333, 0.7914610505104065], [0.7094219326972961, 0.7900708913803101], [0.7096449136734009, 0.7883381843566895], [0.7098910808563232, 0.7867096066474915], [0.7101179361343384, 0.7847990989685059], [0.7103785276412964, 0.7831767797470093], [0.7106048464775085, 0.781200647354126], [0.7108758687973022, 0.7793639898300171], [0.7111017107963562, 0.7772662043571472], [0.7113786935806274, 0.7754511833190918], [0.7115994691848755, 0.7733466625213623], [0.7118730545043945, 0.7714207172393799], [0.7120848894119263, 0.7692620754241943], [0.7123528122901917, 0.7673896551132202], [0.7125324010848999, 0.765272319316864], [0.7127703428268433, 0.7633429765701294], [0.7129086256027222, 0.7612046599388123], [0.7130931615829468, 0.7593512535095215], [0.7132022380828857, 0.7573361992835999], [0.7133270502090454, 0.7555451989173889], [0.7134436368942261, 0.7534016370773315], [0.7135815024375916, 0.7515323162078857], [0.7136149406433105, 0.7492775321006775], [0.7137532234191895, 0.7472254037857056], [0.7137891054153442, 0.7448012232780457], [0.7139272689819336, 0.7426924109458923], [0.7139363288879395, 0.7401862144470215], [0.7140719294548035, 0.7378894090652466], [0.7140713930130005, 0.7352170348167419], [0.714192271232605, 0.7328293323516846], [0.7141678929328918, 0.7301748991012573], [0.7142915725708008, 0.7277114391326904], [0.7142812609672546, 0.7249538898468018], [0.7143997550010681, 0.7224622964859009], [0.7143856287002563, 0.7196853160858154], [0.714522659778595, 0.717106819152832], [0.7145355939865112, 0.7142504453659058], [0.7146888971328735, 0.7116616368293762], [0.714709997177124, 0.708834171295166], [0.7148704528808594, 0.7062203884124756], [0.7148997783660889, 0.7033325433731079], [0.7150683999061584, 0.700690507888794], [0.7150974869728088, 0.697799801826477], [0.7152643799781799, 0.6951307058334351], [0.7152911424636841, 0.6922190189361572], [0.7154563665390015, 0.6895395517349243], [0.7154775261878967, 0.6866447925567627], [0.7156347036361694, 0.6839543581008911], [0.7156475782394409, 0.6810561418533325], [0.7157965898513794, 0.6783862113952637], [0.7158003449440002, 0.6755164265632629], [0.7159432172775269, 0.672828197479248], [0.7159463763237, 0.6699526906013489], [0.7160848379135132, 0.6672777533531189], [0.7160865664482117, 0.664429247379303], [0.7162269949913025, 0.6617525815963745], [0.7162318229675293, 0.6589300036430359], [0.7163676023483276, 0.6562991738319397], [0.7163619995117188, 0.6535197496414185], [0.716488242149353, 0.6508920788764954], [0.7164751291275024, 0.6481226682662964], [0.7165873050689697, 0.6455175876617432], [0.7165576219558716, 0.6427654027938843], [0.7166582345962524, 0.6401522159576416], [0.716616690158844, 0.6374070644378662], [0.7167047262191772, 0.6348050236701965], [0.7166529893875122, 0.6320924758911133], [0.7167260646820068, 0.6294981241226196], [0.7166634202003479, 0.6268043518066406], [0.7167304754257202, 0.6242350339889526], [0.7166670560836792, 0.6215685606002808], [0.7167222499847412, 0.6190093159675598], [0.7166529297828674, 0.6163780689239502], [0.7167086601257324, 0.6138437986373901], [0.716635525226593, 0.6112189292907715], [0.7166746854782104, 0.6086819767951965], [0.7165906429290771, 0.6060798764228821], [0.7166270613670349, 0.6035640239715576], [0.7165426015853882, 0.6009918451309204], [0.7165670394897461, 0.5984973907470703], [0.7164704203605652, 0.595982015132904], [0.7164883613586426, 0.593529462814331], [0.7163892388343811, 0.5910539627075195], [0.7163916826248169, 0.588636040687561], [0.7162811160087585, 0.5861983299255371], [0.716282069683075, 0.5838140249252319], [0.7161692380905151, 0.5813862681388855], [0.7161504030227661, 0.5789941549301147], [0.7160212993621826, 0.5766081809997559], [0.7159987092018127, 0.5742594003677368], [0.7158679366111755, 0.5719088912010193], [0.7158313989639282, 0.569575309753418], [0.7156913876533508, 0.5672965049743652], [0.7156476974487305, 0.5650290250778198], [0.7155004739761353, 0.5628137588500977], [0.7154452800750732, 0.5606082081794739], [0.7152844667434692, 0.5584467053413391], [0.715207576751709, 0.5562904477119446], [0.715034008026123, 0.5541843771934509], [0.7149401307106018, 0.5520689487457275], [0.7147533893585205, 0.549992024898529], [0.7146406769752502, 0.5478997826576233], [0.7144439220428467, 0.5458722114562988], [0.7143241167068481, 0.5438231229782104], [0.7141236066818237, 0.5418200492858887], [0.7139796018600464, 0.5397831201553345], [0.7137604355812073, 0.5378166437149048], [0.7136072516441345, 0.5358091592788696], [0.7133861184120178, 0.5339248180389404], [0.7132160067558289, 0.5319868326187134], [0.7129815220832825, 0.5301687717437744], [0.7127817869186401, 0.5282775163650513], [0.7125407457351685, 0.5266094207763672], [0.7123531103134155, 0.5248318314552307], [0.7121495008468628, 0.5232341885566711], [0.7119930982589722, 0.521604597568512], [0.7117795944213867, 0.5201945304870605], [0.7115569710731506, 0.5186464786529541], [0.7113228440284729, 0.5173002481460571], [0.7110563516616821, 0.5157904624938965], [0.7108243703842163, 0.5146386623382568], [0.7105120420455933, 0.513374388217926], [0.7102665901184082, 0.5125218629837036], [0.7095517516136169, 0.5115946531295776], [0.7095764875411987, 0.7991322875022888], [0.709925651550293, 0.79792720079422], [0.7103643417358398, 0.7962682247161865], [0.7105625867843628, 0.7951666116714478], [0.7109462022781372, 0.7933927178382874], [0.7112393975257874, 0.7920740842819214], [0.7115817666053772, 0.7904093265533447], [0.7119009494781494, 0.7889366149902344], [0.7122321128845215, 0.7870263457298279], [0.7125572562217712, 0.7854309678077698], [0.7128604650497437, 0.7835112810134888], [0.7131783366203308, 0.7817963361740112], [0.7134743928909302, 0.7796833515167236], [0.7138016223907471, 0.7778943777084351], [0.7140952944755554, 0.7757993340492249], [0.7144163250923157, 0.7739439010620117], [0.714692234992981, 0.7717516422271729], [0.7150101065635681, 0.769870936870575], [0.7152636051177979, 0.7677210569381714], [0.7155627608299255, 0.7658216953277588], [0.7157899737358093, 0.7636444568634033], [0.716088593006134, 0.761746346950531], [0.7163193225860596, 0.7596325278282166], [0.7165555953979492, 0.7577885389328003], [0.7167066335678101, 0.75568026304245], [0.7168241739273071, 0.7537373304367065], [0.716835618019104, 0.75152587890625], [0.7169185876846313, 0.7495071887969971], [0.716911792755127, 0.7472586631774902], [0.717044472694397, 0.7450646162033081], [0.7170711755752563, 0.7427398562431335], [0.7172224521636963, 0.7404096722602844], [0.7172256708145142, 0.7379111051559448], [0.7173795700073242, 0.7354444265365601], [0.7173869013786316, 0.7328221797943115], [0.717519998550415, 0.7303627729415894], [0.7175026535987854, 0.7277069091796875], [0.7176412343978882, 0.7251642942428589], [0.7176364064216614, 0.7224451303482056], [0.7177616357803345, 0.7198804616928101], [0.717758297920227, 0.7171287536621094], [0.7179213762283325, 0.7145006656646729], [0.7179617881774902, 0.711719810962677], [0.718140184879303, 0.7090855836868286], [0.7181710004806519, 0.7062790393829346], [0.718356728553772, 0.7036046385765076], [0.718400776386261, 0.7007559537887573], [0.7185856103897095, 0.6980631947517395], [0.7186219692230225, 0.6951912641525269], [0.7188064455986023, 0.6924843788146973], [0.7188411355018616, 0.6895940899848938], [0.7190096378326416, 0.6868851184844971], [0.7190263271331787, 0.6839945912361145], [0.7191880941390991, 0.6812904477119446], [0.7191959023475647, 0.6784087419509888], [0.7193328142166138, 0.6757161617279053], [0.7193208932876587, 0.6728459596633911], [0.7194587588310242, 0.670157790184021], [0.7194594740867615, 0.6673007011413574], [0.7195982933044434, 0.664627194404602], [0.7196049690246582, 0.6617923974990845], [0.7197573184967041, 0.659141480922699], [0.7197723984718323, 0.6563342213630676], [0.7199143767356873, 0.6537045240402222], [0.7199101448059082, 0.6509130001068115], [0.7200400829315186, 0.6482911705970764], [0.7200213074684143, 0.6455137133598328], [0.7201286554336548, 0.6428967714309692], [0.7200886011123657, 0.6401288509368896], [0.7201834917068481, 0.6375184059143066], [0.7201318740844727, 0.6347655057907104], [0.7202085256576538, 0.6321759819984436], [0.7201433181762695, 0.6294375658035278], [0.7202077507972717, 0.6268723011016846], [0.720139741897583, 0.6241755485534668], [0.7202022075653076, 0.6216264963150024], [0.7201303243637085, 0.6189383864402771], [0.7201817035675049, 0.6164209842681885], [0.7201005816459656, 0.6137701272964478], [0.7201431393623352, 0.611242413520813], [0.7200565338134766, 0.6085913181304932], [0.7200921177864075, 0.6060888171195984], [0.7200027704238892, 0.6034778356552124], [0.7200308442115784, 0.6009833812713623], [0.71993088722229, 0.5983911752700806], [0.7199446558952332, 0.5959488153457642], [0.7198337316513062, 0.5934157371520996], [0.7198358774185181, 0.5910014510154724], [0.719716489315033, 0.5884999632835388], [0.7197059988975525, 0.5861256122589111], [0.7195786833763123, 0.5836785435676575], [0.7195554375648499, 0.5812883377075195], [0.7194172143936157, 0.5788347125053406], [0.7193887233734131, 0.5764971971511841], [0.719249963760376, 0.5741010904312134], [0.719211220741272, 0.5717731714248657], [0.719063937664032, 0.5694010257720947], [0.719017744064331, 0.5671373605728149], [0.7188622951507568, 0.564842700958252], [0.7188006639480591, 0.5626212358474731], [0.7186276912689209, 0.5603965520858765], [0.7185397148132324, 0.5582024455070496], [0.7183380126953125, 0.5560355186462402], [0.7182159423828125, 0.5538949966430664], [0.7179937958717346, 0.5517901182174683], [0.7178555727005005, 0.5496808290481567], [0.7176244258880615, 0.5475995540618896], [0.7174695134162903, 0.5455350875854492], [0.71722412109375, 0.5435124635696411], [0.717038631439209, 0.5414479970932007], [0.7167685627937317, 0.5394405126571655], [0.7165629863739014, 0.5374231338500977], [0.7162778377532959, 0.5354633927345276], [0.716050386428833, 0.5335034728050232], [0.7157458066940308, 0.5316175222396851], [0.715493381023407, 0.5297254323959351], [0.7151703834533691, 0.5278798341751099], [0.7149294018745422, 0.5261703729629517], [0.7146610021591187, 0.5245047807693481], [0.714441180229187, 0.5228301286697388], [0.7141754031181335, 0.5212782621383667], [0.7138962745666504, 0.5196647644042969], [0.7135452032089233, 0.5182102918624878], [0.7131709456443787, 0.5167354345321655], [0.7127895355224609, 0.5153610706329346], [0.712376594543457, 0.5140414237976074], [0.7119500637054443, 0.5128610134124756], [0.7112882733345032, 0.5117676258087158], [0.7105422019958496, 0.5106040835380554], [0.712307870388031, 0.8004546165466309], [0.7128851413726807, 0.7988958954811096], [0.7133515477180481, 0.7973934412002563], [0.7138131856918335, 0.795884370803833], [0.714157223701477, 0.794553279876709], [0.714608907699585, 0.7929253578186035], [0.7149821519851685, 0.791499137878418], [0.7153631448745728, 0.7896084189414978], [0.7157135605812073, 0.7880496978759766], [0.7160887122154236, 0.7861326336860657], [0.7164562940597534, 0.7844558358192444], [0.7167815566062927, 0.7823583483695984], [0.7171206474304199, 0.7806028723716736], [0.7174537181854248, 0.7785123586654663], [0.7178063988685608, 0.7766824960708618], [0.7181047797203064, 0.7744662761688232], [0.7184324860572815, 0.7726000547409058], [0.7187258005142212, 0.7704133987426758], [0.7190502882003784, 0.7685039043426514], [0.7192878127098083, 0.7662584781646729], [0.7195649147033691, 0.7643582224845886], [0.7197642922401428, 0.7621704936027527], [0.7200022339820862, 0.7602643966674805], [0.7201553583145142, 0.7580450773239136], [0.7203227281570435, 0.7561248540878296], [0.7204667925834656, 0.753851056098938], [0.7206482291221619, 0.7518503665924072], [0.7207272052764893, 0.749537467956543], [0.720868706703186, 0.7475525140762329], [0.7209203839302063, 0.7451735734939575], [0.7210798859596252, 0.7430505752563477], [0.7210935950279236, 0.7404475212097168], [0.7212324142456055, 0.7381711602210999], [0.7212421894073486, 0.7355091571807861], [0.7214041948318481, 0.7330960631370544], [0.7214088439941406, 0.7303674817085266], [0.7215480804443359, 0.7279524207115173], [0.7215510010719299, 0.7251960635185242], [0.7216983437538147, 0.7226929664611816], [0.721701443195343, 0.7198861837387085], [0.7218408584594727, 0.7173870801925659], [0.7218557596206665, 0.714582085609436], [0.7220240831375122, 0.712028980255127], [0.722044825553894, 0.7091507911682129], [0.7222045660018921, 0.7065696716308594], [0.7222220301628113, 0.7036888599395752], [0.7223952412605286, 0.7010616660118103], [0.722419261932373, 0.6981356143951416], [0.7225890159606934, 0.695486307144165], [0.7226113080978394, 0.692558765411377], [0.7227786779403687, 0.6898696422576904], [0.7227911353111267, 0.6869341135025024], [0.7229494452476501, 0.6842511892318726], [0.7229586839675903, 0.6813379526138306], [0.7231103777885437, 0.6786367297172546], [0.7231067419052124, 0.6757287383079529], [0.7232453227043152, 0.673058032989502], [0.7232400178909302, 0.670186460018158], [0.7233766913414001, 0.6675134897232056], [0.7233701944351196, 0.6646546125411987], [0.7235041856765747, 0.6620140671730042], [0.7235006093978882, 0.6591907739639282], [0.7236349582672119, 0.6565465927124023], [0.7236191034317017, 0.6537246704101562], [0.7237398028373718, 0.6510961055755615], [0.7237118482589722, 0.648297905921936], [0.723820686340332, 0.645663857460022], [0.7237774729728699, 0.6428713798522949], [0.7238683104515076, 0.6402466297149658], [0.7238126397132874, 0.637478232383728], [0.7238898277282715, 0.6348555088043213], [0.7238202691078186, 0.6321126222610474], [0.7238868474960327, 0.6295071840286255], [0.7238130569458008, 0.6268008947372437], [0.7238681316375732, 0.624225914478302], [0.7237803936004639, 0.6215459108352661], [0.7238265872001648, 0.6189807653427124], [0.7237377166748047, 0.6163269281387329], [0.7237687706947327, 0.6137813329696655], [0.7236644625663757, 0.611137866973877], [0.7236899733543396, 0.6085942387580872], [0.723588228225708, 0.605984091758728], [0.7236030697822571, 0.6034587025642395], [0.7234879732131958, 0.6008656024932861], [0.7234944105148315, 0.5983527302742004], [0.7233742475509644, 0.5958136320114136], [0.7233659029006958, 0.5933470726013184], [0.7232317924499512, 0.5908505320549011], [0.7232133150100708, 0.5884135961532593], [0.7230737209320068, 0.5859593152999878], [0.723035991191864, 0.5835574865341187], [0.7228785157203674, 0.5811067819595337], [0.7228367328643799, 0.578710675239563], [0.7226822376251221, 0.5763102173805237], [0.7226235270500183, 0.5739455819129944], [0.7224542498588562, 0.571575939655304], [0.7223902940750122, 0.5692346096038818], [0.7222175598144531, 0.5669288039207458], [0.7221345901489258, 0.564636766910553], [0.7219442129135132, 0.5623894333839417], [0.7218422293663025, 0.5601460933685303], [0.7216327786445618, 0.5579347014427185], [0.7215069532394409, 0.5557237863540649], [0.7212766408920288, 0.553581953048706], [0.7211271524429321, 0.5514338612556458], [0.7208718061447144, 0.5493357181549072], [0.7206920385360718, 0.5472111105918884], [0.7204179763793945, 0.5451594591140747], [0.720205545425415, 0.5430736541748047], [0.7199065089225769, 0.5410401821136475], [0.7196744680404663, 0.538972020149231], [0.7193624973297119, 0.5369892120361328], [0.7191033363342285, 0.5349537134170532], [0.7187740802764893, 0.5330373644828796], [0.7184938192367554, 0.5310625433921814], [0.7181503772735596, 0.5292216539382935], [0.7178611755371094, 0.5273098945617676], [0.7175310850143433, 0.5256440043449402], [0.7172075510025024, 0.5239270925521851], [0.7168658375740051, 0.5223497152328491], [0.7165075540542603, 0.5206446647644043], [0.7160907983779907, 0.519084095954895], [0.715666651725769, 0.517459511756897], [0.7152345180511475, 0.516049861907959], [0.7147190570831299, 0.5145372748374939], [0.7142442464828491, 0.5132686495780945], [0.7135458588600159, 0.511846125125885], [0.7128870487213135, 0.5105793476104736], [0.7119842171669006, 0.5092928409576416], [0.7148303985595703, 0.8017503023147583], [0.7155346870422363, 0.8002318143844604], [0.7162549495697021, 0.7986279129981995], [0.7168257832527161, 0.7971989512443542], [0.7174006104469299, 0.7955896854400635], [0.7178254127502441, 0.7941508293151855], [0.7182828187942505, 0.7923344969749451], [0.7187303304672241, 0.7907374501228333], [0.7191747426986694, 0.7888543605804443], [0.7195762991905212, 0.7871813774108887], [0.7199565768241882, 0.7851542234420776], [0.7203782200813293, 0.7833513617515564], [0.720758318901062, 0.781286895275116], [0.7211315631866455, 0.7794523239135742], [0.7214548587799072, 0.7772871255874634], [0.7218348979949951, 0.775367021560669], [0.7221548557281494, 0.773178219795227], [0.7224892377853394, 0.771240234375], [0.722750186920166, 0.7689918279647827], [0.7230802774429321, 0.767020046710968], [0.7233196496963501, 0.7647768259048462], [0.7235928177833557, 0.7628179788589478], [0.7237712144851685, 0.7605792880058289], [0.7240028977394104, 0.7585973739624023], [0.7241740226745605, 0.756346583366394], [0.7243490815162659, 0.7543405294418335], [0.724460244178772, 0.7520214319229126], [0.7246524691581726, 0.7499713897705078], [0.7247339487075806, 0.7476334571838379], [0.7248818278312683, 0.7455318570137024], [0.7248936295509338, 0.7430819869041443], [0.725056529045105, 0.7407824993133545], [0.7250826358795166, 0.738186240196228], [0.7252327799797058, 0.7358173727989197], [0.7252404093742371, 0.7331314086914062], [0.7254141569137573, 0.7306796312332153], [0.7254371643066406, 0.7279709577560425], [0.7255904674530029, 0.7254798412322998], [0.725597083568573, 0.7227185964584351], [0.7257609963417053, 0.720185399055481], [0.725780189037323, 0.7174034118652344], [0.7259238958358765, 0.7148724794387817], [0.725918173789978, 0.7120600938796997], [0.7260782718658447, 0.7094531655311584], [0.7260876893997192, 0.7065939903259277], [0.7262386083602905, 0.7039749026298523], [0.7262396216392517, 0.7011023759841919], [0.7264038920402527, 0.6984285712242126], [0.7264146208763123, 0.6955280303955078], [0.7265704870223999, 0.6928304433822632], [0.7265726327896118, 0.6899045705795288], [0.7267333269119263, 0.6872028708457947], [0.7267407774925232, 0.6842842102050781], [0.7268903255462646, 0.6815806031227112], [0.7268860936164856, 0.6786611080169678], [0.7270393371582031, 0.6759666204452515], [0.7270404100418091, 0.6730763912200928], [0.7271793484687805, 0.670403003692627], [0.7271666526794434, 0.6675260066986084], [0.727302074432373, 0.6648717522621155], [0.727287232875824, 0.6620204448699951], [0.7274023294448853, 0.6593813896179199], [0.7273654937744141, 0.6565422415733337], [0.7274799346923828, 0.6539020538330078], [0.7274416089057922, 0.6510787010192871], [0.7275424003601074, 0.6484430432319641], [0.7274908423423767, 0.6456301212310791], [0.7275822162628174, 0.6429895758628845], [0.727520227432251, 0.6401938796043396], [0.7275959253311157, 0.6375681161880493], [0.7275210618972778, 0.6347864866256714], [0.7275867462158203, 0.6321768760681152], [0.7275047302246094, 0.6294305920600891], [0.7275614142417908, 0.6268531084060669], [0.7274699211120605, 0.6241267919540405], [0.7275099754333496, 0.6215671300888062], [0.7274065613746643, 0.61887526512146], [0.7274342775344849, 0.6163288354873657], [0.7273235321044922, 0.6136553287506104], [0.7273401021957397, 0.6111178398132324], [0.7272217273712158, 0.6084669828414917], [0.7272257804870605, 0.6059442758560181], [0.7270954251289368, 0.6033123731613159], [0.7270902991294861, 0.6008075475692749], [0.7269531488418579, 0.5982029438018799], [0.726939857006073, 0.5957307815551758], [0.7267941832542419, 0.5931767225265503], [0.7267653942108154, 0.5907383561134338], [0.7266085743904114, 0.5882337093353271], [0.7265641093254089, 0.5858196020126343], [0.7263928055763245, 0.5833473205566406], [0.7263346910476685, 0.5809440612792969], [0.7261554002761841, 0.5785022974014282], [0.7260854244232178, 0.5761266946792603], [0.7258952856063843, 0.5737125873565674], [0.7258098125457764, 0.5713682174682617], [0.7256101369857788, 0.5689982771873474], [0.7255125045776367, 0.5666943192481995], [0.7252980470657349, 0.5643674731254578], [0.7251803278923035, 0.5621159076690674], [0.7249502539634705, 0.5598530769348145], [0.7248177528381348, 0.5576275587081909], [0.7245777249336243, 0.5554041862487793], [0.7244241833686829, 0.553221583366394], [0.7241572141647339, 0.5510658025741577], [0.7239654064178467, 0.5489104986190796], [0.7236630916595459, 0.5467941164970398], [0.7234324812889099, 0.5446826219558716], [0.7231051325798035, 0.5426108837127686], [0.7228436470031738, 0.5405256748199463], [0.7224975228309631, 0.5384927988052368], [0.7222108840942383, 0.5364329218864441], [0.7218441963195801, 0.5344445705413818], [0.721537709236145, 0.53244549036026], [0.721152663230896, 0.5305200815200806], [0.7208227515220642, 0.5285906791687012], [0.7204321026802063, 0.5267688035964966], [0.7200846672058105, 0.5249565243721008], [0.7196656465530396, 0.5232748985290527], [0.7192265391349792, 0.521589994430542], [0.7187220454216003, 0.5199551582336426], [0.7182432413101196, 0.5182893872261047], [0.7177393436431885, 0.5167388916015625], [0.7171987891197205, 0.5151638388633728], [0.7166211009025574, 0.5136908292770386], [0.7159351110458374, 0.512251615524292], [0.7152081727981567, 0.5108158588409424], [0.7143851518630981, 0.5094442367553711], [0.7135164737701416, 0.5080060958862305], [0.717846155166626, 0.8036133050918579], [0.7186577320098877, 0.8019319772720337], [0.7193654775619507, 0.8003373742103577], [0.7200608253479004, 0.798628032207489], [0.7206459045410156, 0.79706871509552], [0.7212557196617126, 0.7952513098716736], [0.7217679023742676, 0.793681263923645], [0.7222843170166016, 0.7917705774307251], [0.7227609753608704, 0.7900980710983276], [0.7232406735420227, 0.7880522012710571], [0.723694384098053, 0.7863112688064575], [0.7241199016571045, 0.7842005491256714], [0.724549412727356, 0.7823635339736938], [0.7249451875686646, 0.7801706194877625], [0.7253521680831909, 0.7782995700836182], [0.7257083654403687, 0.7760640382766724], [0.7260904908180237, 0.7741162776947021], [0.726413369178772, 0.7718183994293213], [0.726767897605896, 0.7698557376861572], [0.727036714553833, 0.7675526142120361], [0.7273550033569336, 0.7655434012413025], [0.7275800704956055, 0.7632174491882324], [0.7278388738632202, 0.7612054347991943], [0.7280015349388123, 0.7589097023010254], [0.7282086610794067, 0.7569221258163452], [0.7283872365951538, 0.7545795440673828], [0.728606104850769, 0.7525675296783447], [0.7287291288375854, 0.7501647472381592], [0.7289211750030518, 0.7480810880661011], [0.7289885878562927, 0.7455862760543823], [0.7291485667228699, 0.7434329390525818], [0.7291626930236816, 0.7408335208892822], [0.7293164730072021, 0.7385250329971313], [0.7293327450752258, 0.7358424067497253], [0.7294920682907104, 0.7334668636322021], [0.7294996976852417, 0.7307305932044983], [0.7296532392501831, 0.7282829284667969], [0.7296589016914368, 0.7254998683929443], [0.7298107147216797, 0.7230210304260254], [0.7298131585121155, 0.7202243804931641], [0.7299643754959106, 0.7177003622055054], [0.7299664616584778, 0.7148749828338623], [0.7301124334335327, 0.7123379707336426], [0.7301035523414612, 0.709477424621582], [0.730252742767334, 0.706874668598175], [0.7302489280700684, 0.7039879560470581], [0.7303982973098755, 0.7013761401176453], [0.7303922176361084, 0.6984598636627197], [0.7305417656898499, 0.695793628692627], [0.7305344343185425, 0.6928495168685913], [0.7306839227676392, 0.6901666522026062], [0.7306771278381348, 0.687231183052063], [0.7308217883110046, 0.6845309138298035], [0.7308101654052734, 0.6815937757492065], [0.7309541702270508, 0.6789023876190186], [0.7309396266937256, 0.6759905219078064], [0.7310754060745239, 0.6732997894287109], [0.7310552000999451, 0.6704043745994568], [0.7311831116676331, 0.6677346229553223], [0.7311540246009827, 0.6648685336112976], [0.731269121170044, 0.6622046232223511], [0.7312324643135071, 0.6593532562255859], [0.7313384413719177, 0.6567010283470154], [0.7312862873077393, 0.6538692712783813], [0.7313838005065918, 0.6512177586555481], [0.7313230037689209, 0.6483928561210632], [0.731406569480896, 0.6457399129867554], [0.7313324213027954, 0.6429282426834106], [0.7314034700393677, 0.6402785778045654], [0.7313204407691956, 0.637485146522522], [0.7313795685768127, 0.6348485946655273], [0.731286346912384, 0.6320812106132507], [0.7313309907913208, 0.6294668912887573], [0.7312244772911072, 0.6267449855804443], [0.7312610745429993, 0.6241428852081299], [0.7311490178108215, 0.6214381456375122], [0.7311668395996094, 0.6188606023788452], [0.7310395836830139, 0.6161859035491943], [0.7310479879379272, 0.6136223077774048], [0.730914294719696, 0.6109633445739746], [0.7309077978134155, 0.6084080338478088], [0.7307634353637695, 0.6057744026184082], [0.7307475209236145, 0.6032348275184631], [0.7305958271026611, 0.600627064704895], [0.7305667996406555, 0.5981036424636841], [0.730407178401947, 0.595540463924408], [0.7303672432899475, 0.5930497646331787], [0.7301959991455078, 0.5905289649963379], [0.7301399111747742, 0.5880738496780396], [0.7299522161483765, 0.5855876207351685], [0.7298794984817505, 0.5831563472747803], [0.7296799421310425, 0.5806986689567566], [0.7295933961868286, 0.5782896280288696], [0.7293839454650879, 0.5758667588233948], [0.7292832732200623, 0.5734744071960449], [0.7290639877319336, 0.5710886120796204], [0.7289486527442932, 0.568734884262085], [0.7287167310714722, 0.5663959383964539], [0.7285851240158081, 0.5640698075294495], [0.7283405065536499, 0.56178879737854], [0.7281883955001831, 0.5595167875289917], [0.72792649269104, 0.5572806596755981], [0.727752685546875, 0.555034875869751], [0.7274718284606934, 0.5528403520584106], [0.7272729873657227, 0.5506337881088257], [0.7269583344459534, 0.548470139503479], [0.7267168760299683, 0.5462901592254639], [0.7263747453689575, 0.5441898107528687], [0.7261002659797668, 0.5420568585395813], [0.7257382869720459, 0.5399976372718811], [0.7254325151443481, 0.5378892421722412], [0.7250424027442932, 0.5358659625053406], [0.7247113585472107, 0.5338038802146912], [0.7243043184280396, 0.5318443775177002], [0.7239381670951843, 0.5298219919204712], [0.7235078811645508, 0.5279490947723389], [0.7231112718582153, 0.5260247588157654], [0.7226540446281433, 0.5242562294006348], [0.7221963405609131, 0.5224151611328125], [0.7216939926147461, 0.520737886428833], [0.7211754322052002, 0.5189815163612366], [0.7206198573112488, 0.5173866748809814], [0.7200196981430054, 0.5156978368759155], [0.7194070816040039, 0.5141641497612], [0.7186974883079529, 0.512523889541626], [0.7180033922195435, 0.5110296607017517], [0.7171692252159119, 0.5094674825668335], [0.7163599729537964, 0.5080087184906006], [0.7153401374816895, 0.5064823627471924], [0.7206770181655884, 0.8054046034812927], [0.7215778231620789, 0.8037310838699341], [0.722440779209137, 0.8019347786903381], [0.7231842279434204, 0.8002899885177612], [0.7239201664924622, 0.7984397411346436], [0.7245573997497559, 0.7967804670333862], [0.7251893281936646, 0.7948470115661621], [0.7257547974586487, 0.7931463122367859], [0.7263080477714539, 0.7911357283592224], [0.7268420457839966, 0.7893447875976562], [0.7273436784744263, 0.7872495651245117], [0.7278445959091187, 0.785387396812439], [0.7283018827438354, 0.7832154035568237], [0.7287725210189819, 0.781293511390686], [0.7291839718818665, 0.7790745496749878], [0.7296215295791626, 0.7770920991897583], [0.7299862504005432, 0.7747963070869446], [0.7303875088691711, 0.772771954536438], [0.7307041883468628, 0.770445704460144], [0.7310687303543091, 0.7684003114700317], [0.7313433885574341, 0.7660584449768066], [0.7316796779632568, 0.7639830112457275], [0.731928825378418, 0.7616233229637146], [0.7322214245796204, 0.7595792412757874], [0.7324264049530029, 0.7572499513626099], [0.7326371669769287, 0.7551705837249756], [0.7327464818954468, 0.752780020236969], [0.7329357862472534, 0.7506706118583679], [0.7330060601234436, 0.7482010126113892], [0.7331836819648743, 0.7460159659385681], [0.7332198619842529, 0.7434629201889038], [0.733377993106842, 0.7411843538284302], [0.7333889603614807, 0.7385448217391968], [0.7335388660430908, 0.736175537109375], [0.733540952205658, 0.7334879636764526], [0.7336934804916382, 0.7310431003570557], [0.7336944341659546, 0.728301465511322], [0.7338513731956482, 0.725810170173645], [0.7338559627532959, 0.7230370044708252], [0.7340065836906433, 0.7205145359039307], [0.7340000867843628, 0.7177124619483948], [0.7341481447219849, 0.7151594758033752], [0.7341384887695312, 0.7123323678970337], [0.7342787384986877, 0.7097400426864624], [0.734261155128479, 0.7068785429000854], [0.7344077825546265, 0.7042554616928101], [0.7343928813934326, 0.7013816237449646], [0.7345360517501831, 0.6987119913101196], [0.7345152497291565, 0.6957941055297852], [0.7346594333648682, 0.6931030750274658], [0.7346426248550415, 0.690170168876648], [0.7347802519798279, 0.6874668598175049], [0.734756350517273, 0.6845302581787109], [0.7348954677581787, 0.6818292140960693], [0.7348742485046387, 0.6789017915725708], [0.73500657081604, 0.6762056350708008], [0.7349759340286255, 0.6732907295227051], [0.7351028919219971, 0.6706072688102722], [0.735068142414093, 0.6677156686782837], [0.7351803779602051, 0.6650440096855164], [0.7351312637329102, 0.6621651649475098], [0.7352307438850403, 0.6595077514648438], [0.7351702451705933, 0.656646728515625], [0.7352594137191772, 0.6539947986602783], [0.7351857423782349, 0.6511502265930176], [0.7352622747421265, 0.6484949588775635], [0.7351784110069275, 0.6456552743911743], [0.7352433204650879, 0.6430041790008545], [0.7351484894752502, 0.6401786208152771], [0.7351995706558228, 0.6375350952148438], [0.7350950837135315, 0.6347379684448242], [0.7351404428482056, 0.6321123838424683], [0.7350253462791443, 0.6293329000473022], [0.7350512742996216, 0.6267422437667847], [0.7349218130111694, 0.6239998936653137], [0.7349373698234558, 0.6214143633842468], [0.734801173210144, 0.6186964511871338], [0.7348019480705261, 0.6161354780197144], [0.7346539497375488, 0.6134456396102905], [0.734638512134552, 0.6108866333961487], [0.7344809770584106, 0.6082155704498291], [0.7344545722007751, 0.6056753396987915], [0.7342865467071533, 0.6030300259590149], [0.7342475652694702, 0.6005035638809204], [0.7340673804283142, 0.5978820323944092], [0.7340141534805298, 0.595392107963562], [0.733825147151947, 0.5928178429603577], [0.7337595224380493, 0.5903511047363281], [0.7335572242736816, 0.5878174304962158], [0.7334731817245483, 0.5853755474090576], [0.7332545518875122, 0.5828769207000732], [0.7331543564796448, 0.5804601907730103], [0.7329245209693909, 0.5779954195022583], [0.7328077554702759, 0.5755993127822876], [0.7325658798217773, 0.5731634497642517], [0.7324349880218506, 0.5707962512969971], [0.7321789860725403, 0.5684019327163696], [0.7320296764373779, 0.5660718679428101], [0.731762707233429, 0.5637203454971313], [0.7315992712974548, 0.5614315271377563], [0.7313162088394165, 0.5591327548027039], [0.7311253547668457, 0.5568792819976807], [0.7308214902877808, 0.5546245574951172], [0.7306092977523804, 0.5523984432220459], [0.7302801609039307, 0.5501807332038879], [0.7300330400466919, 0.5479623079299927], [0.7296754121780396, 0.5457854270935059], [0.7293972373008728, 0.5436211228370667], [0.7290148138999939, 0.541504979133606], [0.7286949157714844, 0.5393717885017395], [0.7282795906066895, 0.5372913479804993], [0.7279326319694519, 0.5351994037628174], [0.7274957895278931, 0.533169150352478], [0.7271069884300232, 0.531116247177124], [0.7266329526901245, 0.5291379690170288], [0.7262128591537476, 0.5271628499031067], [0.7257125377655029, 0.5252809524536133], [0.7252334356307983, 0.5233764052391052], [0.7246817946434021, 0.5215768218040466], [0.7241389751434326, 0.5197637677192688], [0.7235344648361206, 0.5180509090423584], [0.7229150533676147, 0.5163089036941528], [0.7222403883934021, 0.5146620273590088], [0.7215339541435242, 0.5130008459091187], [0.7207964062690735, 0.5113862752914429], [0.7200062870979309, 0.5097702741622925], [0.7191654443740845, 0.5081487894058228], [0.7182198166847229, 0.5065864324569702], [0.7172001600265503, 0.504956841468811], [0.7237928509712219, 0.807582676410675], [0.7247903943061829, 0.8056910037994385], [0.7257038354873657, 0.8039158582687378], [0.7265689373016357, 0.8019900321960449], [0.7273437976837158, 0.8002413511276245], [0.7281007766723633, 0.7982509136199951], [0.7287812232971191, 0.7964877486228943], [0.7294418215751648, 0.7944283485412598], [0.7300447225570679, 0.7926323413848877], [0.7306317687034607, 0.7904914617538452], [0.7311919331550598, 0.7886320352554321], [0.7317194938659668, 0.7864090204238892], [0.7322392463684082, 0.784497857093811], [0.7327166199684143, 0.7822164297103882], [0.7331986427307129, 0.7802454233169556], [0.7336212396621704, 0.7778950929641724], [0.7340668439865112, 0.77586829662323], [0.734442949295044, 0.7734811305999756], [0.734851062297821, 0.7714139223098755], [0.7351700663566589, 0.7690064311027527], [0.7355307340621948, 0.7669268846511841], [0.7357944846153259, 0.7645014524459839], [0.7361106872558594, 0.7624039649963379], [0.7363213896751404, 0.7599774599075317], [0.7365642189979553, 0.7578765153884888], [0.7367451786994934, 0.7554377317428589], [0.7369861602783203, 0.7533348798751831], [0.7371099591255188, 0.75084388256073], [0.7373210191726685, 0.748681902885437], [0.7373924255371094, 0.7461154460906982], [0.7375743985176086, 0.7438634634017944], [0.7376061081886292, 0.7412196397781372], [0.7377655506134033, 0.7388916015625], [0.7377790808677673, 0.7362021803855896], [0.7379375100135803, 0.7338140606880188], [0.7379366159439087, 0.7310560941696167], [0.7380853295326233, 0.7286093235015869], [0.738075852394104, 0.7258280515670776], [0.7382243275642395, 0.7233399152755737], [0.7382140755653381, 0.7205162048339844], [0.7383577823638916, 0.7179970741271973], [0.7383401393890381, 0.7151565551757812], [0.7384787797927856, 0.712598979473114], [0.7384568452835083, 0.7097254991531372], [0.7385953664779663, 0.7071387767791748], [0.7385697364807129, 0.7042477130889893], [0.7387042045593262, 0.7016304135322571], [0.7386715412139893, 0.698692798614502], [0.7388041615486145, 0.6960341930389404], [0.7387701869010925, 0.6930898427963257], [0.7389000058174133, 0.6903992891311646], [0.7388631105422974, 0.6874451637268066], [0.7389920353889465, 0.6847530007362366], [0.7389534711837769, 0.6818121671676636], [0.7390769720077515, 0.6791118383407593], [0.7390336990356445, 0.6761767268180847], [0.7391493320465088, 0.6734838485717773], [0.739100456237793, 0.670574963092804], [0.7392071485519409, 0.6678856611251831], [0.7391448616981506, 0.664987325668335], [0.739240288734436, 0.6623096466064453], [0.7391695976257324, 0.659436821937561], [0.7392529249191284, 0.6567666530609131], [0.7391692399978638, 0.6539045572280884], [0.7392369508743286, 0.6512377262115479], [0.7391390204429626, 0.6483895182609558], [0.7391964197158813, 0.6457208395004272], [0.7390899062156677, 0.6428845524787903], [0.7391360998153687, 0.6402186751365662], [0.7390191555023193, 0.6373990774154663], [0.7390497922897339, 0.6347479820251465], [0.7389161586761475, 0.6319646835327148], [0.7389383316040039, 0.6293237209320068], [0.738798201084137, 0.6265678405761719], [0.7388008832931519, 0.6239566802978516], [0.7386462688446045, 0.6212300062179565], [0.7386379241943359, 0.6186307072639465], [0.7384743094444275, 0.6159310340881348], [0.738447368144989, 0.6133464574813843], [0.7382717132568359, 0.6106677055358887], [0.738233208656311, 0.6080968379974365], [0.7380495667457581, 0.6054428815841675], [0.737995982170105, 0.6028816103935242], [0.7377984523773193, 0.6002544164657593], [0.7377328276634216, 0.5977119207382202], [0.7375276684761047, 0.5951259136199951], [0.7374448776245117, 0.5926158428192139], [0.7372246980667114, 0.590067446231842], [0.7371252775192261, 0.5875831246376038], [0.7368899583816528, 0.5850679874420166], [0.7367754578590393, 0.5826101303100586], [0.7365261316299438, 0.5801334381103516], [0.7363919615745544, 0.57769775390625], [0.736129879951477, 0.5752542614936829], [0.7359824776649475, 0.572842001914978], [0.7357096076011658, 0.5704320669174194], [0.7355454564094543, 0.568048357963562], [0.7352610230445862, 0.5656838417053223], [0.7350799441337585, 0.563338577747345], [0.7347822189331055, 0.5610227584838867], [0.7345820069313049, 0.5587068796157837], [0.7342634201049805, 0.5564293265342712], [0.7340351939201355, 0.5541545152664185], [0.73369300365448, 0.5519158840179443], [0.7334325909614563, 0.5496521592140198], [0.7330594658851624, 0.5474364161491394], [0.7327702641487122, 0.5452054142951965], [0.7323753237724304, 0.5430465936660767], [0.7320463061332703, 0.5408562421798706], [0.7316187620162964, 0.5387412309646606], [0.7312583923339844, 0.5365900993347168], [0.730805516242981, 0.5345242619514465], [0.7303993105888367, 0.5324006080627441], [0.7299104928970337, 0.5303797721862793], [0.7294687032699585, 0.5283055305480957], [0.7289549708366394, 0.5263644456863403], [0.7284576892852783, 0.5243510603904724], [0.7278931736946106, 0.5224794149398804], [0.7273354530334473, 0.5205349922180176], [0.726723313331604, 0.5187480449676514], [0.7260915637016296, 0.5168821811676025], [0.7254143953323364, 0.515160083770752], [0.7246979475021362, 0.5133552551269531], [0.7239549160003662, 0.5116982460021973], [0.7231606245040894, 0.5099849700927734], [0.7223405241966248, 0.5083354711532593], [0.7213909029960632, 0.5066016912460327], [0.7204311490058899, 0.5049285292625427], [0.7193599343299866, 0.5032371282577515], [0.7268116474151611, 0.8096442222595215], [0.7278923988342285, 0.8077960014343262], [0.728919267654419, 0.8057729005813599], [0.7298506498336792, 0.8039414882659912], [0.7307423949241638, 0.8018876314163208], [0.7315541505813599, 0.8000466823577881], [0.7323326468467712, 0.7979415655136108], [0.7330542206764221, 0.7960730791091919], [0.7337365746498108, 0.7939121723175049], [0.7343834042549133, 0.7920017242431641], [0.7349861860275269, 0.7897759675979614], [0.7355852723121643, 0.7878044247627258], [0.7361259460449219, 0.7855257987976074], [0.7366756796836853, 0.7834938168525696], [0.7371546030044556, 0.7811496257781982], [0.7376632690429688, 0.7790665626525879], [0.7380882501602173, 0.7766704559326172], [0.7385517358779907, 0.7745473384857178], [0.7389212846755981, 0.772110104560852], [0.7393405437469482, 0.7699621915817261], [0.7396522760391235, 0.7675107717514038], [0.7400212287902832, 0.7653404474258423], [0.7402716875076294, 0.762870192527771], [0.7405834197998047, 0.7606995105743408], [0.7407993078231812, 0.7582164406776428], [0.7410465478897095, 0.7560890316963196], [0.7411909103393555, 0.7535930275917053], [0.7414246797561646, 0.7514016032218933], [0.7415188550949097, 0.7488336563110352], [0.7417201995849609, 0.7465727925300598], [0.7417692542076111, 0.7439357042312622], [0.7419452667236328, 0.7416068315505981], [0.7419712543487549, 0.7389243841171265], [0.7421301603317261, 0.7365491390228271], [0.7421321868896484, 0.7338309288024902], [0.742292046546936, 0.7313820123672485], [0.7422888875007629, 0.728604257106781], [0.7424349784851074, 0.7261245846748352], [0.7424172759056091, 0.7233312726020813], [0.7425621151924133, 0.7208056449890137], [0.7425429224967957, 0.7179811596870422], [0.7426799535751343, 0.715422511100769], [0.742651104927063, 0.7125734090805054], [0.7427858114242554, 0.7099827527999878], [0.7427523136138916, 0.7071103453636169], [0.7428818941116333, 0.7044864892959595], [0.7428390979766846, 0.701591432094574], [0.7429664731025696, 0.6989246606826782], [0.7429245710372925, 0.6959922909736633], [0.7430460453033447, 0.6933035850524902], [0.7429958581924438, 0.6903562545776367], [0.7431182265281677, 0.6876564025878906], [0.7430683970451355, 0.6847089529037476], [0.7431833744049072, 0.6820056438446045], [0.7431248426437378, 0.679062008857727], [0.7432349324226379, 0.676358699798584], [0.7431749105453491, 0.673424482345581], [0.7432728409767151, 0.6707314252853394], [0.7431991696357727, 0.6678097248077393], [0.7432876825332642, 0.6651227474212646], [0.7432072162628174, 0.6622203588485718], [0.7432812452316284, 0.6595419645309448], [0.7431842684745789, 0.6566569805145264], [0.7432470321655273, 0.6539837121963501], [0.743138313293457, 0.6511077880859375], [0.7431867122650146, 0.6484402418136597], [0.7430659532546997, 0.6455798745155334], [0.7431046366691589, 0.6429111361503601], [0.7429748773574829, 0.640063464641571], [0.7430016994476318, 0.6373999118804932], [0.7428550720214844, 0.6345650553703308], [0.7428640723228455, 0.6319347620010376], [0.7427046298980713, 0.6291340589523315], [0.7427035570144653, 0.6265130639076233], [0.7425386309623718, 0.6237449645996094], [0.7425200343132019, 0.621143102645874], [0.742341160774231, 0.6184051036834717], [0.7423068284988403, 0.6158145666122437], [0.7421156764030457, 0.6130976676940918], [0.7420658469200134, 0.6105251312255859], [0.7418630123138428, 0.6078351140022278], [0.741798996925354, 0.6052749156951904], [0.7415837049484253, 0.6025999784469604], [0.7415050268173218, 0.6000586748123169], [0.7412789463996887, 0.597419261932373], [0.7411843538284302, 0.5948988199234009], [0.7409433722496033, 0.5922980904579163], [0.7408301830291748, 0.5898092985153198], [0.7405726909637451, 0.5872430801391602], [0.7404428720474243, 0.584776759147644], [0.7401716709136963, 0.5822525024414062], [0.7400245666503906, 0.5798088908195496], [0.7397403717041016, 0.5773186683654785], [0.7395774126052856, 0.5749033093452454], [0.739281177520752, 0.5724459290504456], [0.7391034364700317, 0.5700556039810181], [0.7387970685958862, 0.5676355361938477], [0.7386050224304199, 0.5652778148651123], [0.7382851243019104, 0.5629012584686279], [0.7380710244178772, 0.5605783462524414], [0.737730860710144, 0.5582422018051147], [0.737495481967926, 0.5559431910514832], [0.7371357679367065, 0.5536491870880127], [0.7368646860122681, 0.551372230052948], [0.7364716529846191, 0.5491037368774414], [0.7361742258071899, 0.5468488335609436], [0.7357650399208069, 0.5446223020553589], [0.7354339361190796, 0.5423951148986816], [0.7349908351898193, 0.5402119159698486], [0.7346221804618835, 0.5380301475524902], [0.7341488003730774, 0.5358973741531372], [0.7337354421615601, 0.5337448120117188], [0.733221709728241, 0.5316473841667175], [0.7327696084976196, 0.5295408964157104], [0.7322284579277039, 0.5275009274482727], [0.7317261695861816, 0.5254402160644531], [0.7311328053474426, 0.5234564542770386], [0.7305748462677002, 0.5214624404907227], [0.7299352884292603, 0.5195463299751282], [0.7293068170547485, 0.5176128149032593], [0.728604793548584, 0.5157726407051086], [0.7278993725776672, 0.5139037370681763], [0.7271333336830139, 0.5121217966079712], [0.7263471484184265, 0.510313868522644], [0.7255207896232605, 0.5086090564727783], [0.7246096134185791, 0.5068377256393433], [0.7236347794532776, 0.5050661563873291], [0.7226160168647766, 0.5033189058303833], [0.72152179479599, 0.5015320777893066], [0.730043888092041, 0.8120568990707397], [0.7312058210372925, 0.8099457025527954], [0.7322981953620911, 0.8079994916915894], [0.7333194017410278, 0.8058617115020752], [0.7342685461044312, 0.8039278388023376], [0.735169529914856, 0.8017482757568359], [0.7360040545463562, 0.7998072504997253], [0.7367923259735107, 0.7975764870643616], [0.7375362515449524, 0.7956152558326721], [0.7382354140281677, 0.7933284044265747], [0.7389048933982849, 0.791336715221405], [0.7395265102386475, 0.7889929413795471], [0.7401390671730042, 0.7869522571563721], [0.740693986415863, 0.784542441368103], [0.7412552833557129, 0.7824487686157227], [0.741740345954895, 0.7799882888793945], [0.7422530055046082, 0.7778544425964355], [0.7426824569702148, 0.7753516435623169], [0.7431492805480957, 0.773185133934021], [0.7435184121131897, 0.7706521153450012], [0.743930995464325, 0.7684537768363953], [0.7442299723625183, 0.7658957242965698], [0.7445812225341797, 0.7636698484420776], [0.7448104619979858, 0.7611304521560669], [0.7451004981994629, 0.7589285373687744], [0.7452991008758545, 0.7564056515693665], [0.7455629706382751, 0.7542193531990051], [0.7456963062286377, 0.7516209483146667], [0.7459279298782349, 0.7493559122085571], [0.7460085153579712, 0.7467001676559448], [0.7462117671966553, 0.7443736791610718], [0.7462579011917114, 0.7416800260543823], [0.746435284614563, 0.7393056154251099], [0.7464566230773926, 0.7365797758102417], [0.7466201186180115, 0.7341735363006592], [0.7466170191764832, 0.7313951253890991], [0.7467693090438843, 0.7289167642593384], [0.7467569708824158, 0.7261145114898682], [0.7469009757041931, 0.7236193418502808], [0.7468760013580322, 0.7207894325256348], [0.7470136284828186, 0.7182521820068359], [0.7469808459281921, 0.7153894901275635], [0.747111439704895, 0.7128264904022217], [0.7470710277557373, 0.7099442481994629], [0.7471979856491089, 0.7073485851287842], [0.7471497058868408, 0.7044339179992676], [0.747267484664917, 0.7018060684204102], [0.7472091913223267, 0.6988697052001953], [0.7473238706588745, 0.6961972713470459], [0.7472625970840454, 0.6932384371757507], [0.7473746538162231, 0.6905500888824463], [0.7473088502883911, 0.6875935196876526], [0.7474154233932495, 0.6848907470703125], [0.7473435401916504, 0.6819292306900024], [0.7474421262741089, 0.6792237758636475], [0.7473632097244263, 0.6762786507606506], [0.747453510761261, 0.6735687255859375], [0.7473678588867188, 0.6706323027610779], [0.7474473118782043, 0.6679304838180542], [0.7473521828651428, 0.665012776851654], [0.7474199533462524, 0.6623135805130005], [0.7473107576370239, 0.6594109535217285], [0.7473650574684143, 0.656718373298645], [0.7472410202026367, 0.6538339853286743], [0.7472831606864929, 0.6511452198028564], [0.7471480965614319, 0.6482698917388916], [0.7471739053726196, 0.645587682723999], [0.7470248937606812, 0.6427278518676758], [0.7470399737358093, 0.640048623085022], [0.7468822598457336, 0.6372024416923523], [0.7468847036361694, 0.6345162391662598], [0.7467120885848999, 0.6317116618156433], [0.7466973662376404, 0.6290557384490967], [0.7465114593505859, 0.6262779235839844], [0.7464812994003296, 0.623640775680542], [0.7462871074676514, 0.6208897829055786], [0.7462419867515564, 0.6182681322097778], [0.7460345029830933, 0.6155414581298828], [0.7459734678268433, 0.6129355430603027], [0.7457550764083862, 0.610231876373291], [0.7456759214401245, 0.6076397895812988], [0.7454410791397095, 0.6049617528915405], [0.7453483939170837, 0.6023800373077393], [0.7451050281524658, 0.5997300148010254], [0.7449941635131836, 0.5971664190292358], [0.744733452796936, 0.5945498943328857], [0.7446072697639465, 0.5920149683952332], [0.7443340420722961, 0.5894352197647095], [0.7441883087158203, 0.5869227647781372], [0.7438995838165283, 0.5843819975852966], [0.7437351942062378, 0.5818984508514404], [0.7434306144714355, 0.5793927311897278], [0.7432509064674377, 0.5769373178482056], [0.7429364323616028, 0.5744699239730835], [0.7427427768707275, 0.5720382928848267], [0.7424172163009644, 0.569605827331543], [0.7422072887420654, 0.5672012567520142], [0.7418702840805054, 0.5648085474967957], [0.7416422367095947, 0.5624302625656128], [0.7412886619567871, 0.5600771903991699], [0.7410378456115723, 0.5577313303947449], [0.7406644821166992, 0.5554136633872986], [0.7403832077980042, 0.553084671497345], [0.739981472492218, 0.550792932510376], [0.7396737337112427, 0.5484902858734131], [0.7392505407333374, 0.5462378263473511], [0.7389093637466431, 0.5439567565917969], [0.7384560108184814, 0.5417343974113464], [0.7380790710449219, 0.5394821166992188], [0.7375979423522949, 0.5373126268386841], [0.7371807098388672, 0.5350996255874634], [0.7366626262664795, 0.532960057258606], [0.7362009286880493, 0.5307759046554565], [0.7356482744216919, 0.5286943912506104], [0.735136866569519, 0.5265532732009888], [0.7345401644706726, 0.5245122909545898], [0.7339765429496765, 0.5224128365516663], [0.7333376407623291, 0.5204377174377441], [0.7327036261558533, 0.5183876752853394], [0.7320061922073364, 0.5164676904678345], [0.7313023805618286, 0.514480710029602], [0.7305436134338379, 0.5126239061355591], [0.729754626750946, 0.5106920003890991], [0.7289261817932129, 0.5088924169540405], [0.728058397769928, 0.5070392489433289], [0.7271190285682678, 0.5052204728126526], [0.7261107563972473, 0.5033518075942993], [0.725063145160675, 0.501535177230835], [0.7239247560501099, 0.4996734857559204], [0.7332121133804321, 0.8143040537834167], [0.7344584465026855, 0.8122854232788086], [0.7356318831443787, 0.8100460767745972], [0.7367273569107056, 0.8080315589904785], [0.7377586364746094, 0.8057721853256226], [0.7387263774871826, 0.8037497997283936], [0.7396351099014282, 0.8014498949050903], [0.7404947280883789, 0.7994170188903809], [0.7412950396537781, 0.7970855236053467], [0.7420685887336731, 0.7950217723846436], [0.7427763938903809, 0.7926458120346069], [0.7434765100479126, 0.7905420064926147], [0.7441000938415527, 0.7881174683570862], [0.7447410821914673, 0.7859582901000977], [0.7452935576438904, 0.7834732532501221], [0.7458760738372803, 0.7812772989273071], [0.7463590502738953, 0.7787575125694275], [0.7468862533569336, 0.7765213251113892], [0.7473046183586121, 0.7739683389663696], [0.7477793097496033, 0.7716951370239258], [0.7481346130371094, 0.7691069841384888], [0.7485560774803162, 0.7668085694313049], [0.7488523125648499, 0.7641992568969727], [0.7492130994796753, 0.7619184255599976], [0.7494428157806396, 0.7593324184417725], [0.7497230768203735, 0.757095217704773], [0.7498664259910583, 0.7544842958450317], [0.7501121163368225, 0.7522010803222656], [0.7502124905586243, 0.7495330572128296], [0.7504346370697021, 0.7471929788589478], [0.7505011558532715, 0.7444863319396973], [0.750697910785675, 0.7420997023582458], [0.7507331371307373, 0.739364504814148], [0.7509069442749023, 0.7369529008865356], [0.7509145736694336, 0.7341896295547485], [0.7510735392570496, 0.731719970703125], [0.7510635852813721, 0.7289150953292847], [0.7512145042419434, 0.7264181971549988], [0.7511934041976929, 0.723596453666687], [0.7513337135314941, 0.7210639715194702], [0.7512983083724976, 0.7182134389877319], [0.7514318823814392, 0.7156451940536499], [0.751388669013977, 0.712772011756897], [0.7515119314193726, 0.7101765871047974], [0.7514556050300598, 0.7072840929031372], [0.7515741586685181, 0.7046487331390381], [0.7515102028846741, 0.7017273902893066], [0.7516189813613892, 0.6990598440170288], [0.7515478134155273, 0.6961143612861633], [0.7516530752182007, 0.69342041015625], [0.751577615737915, 0.6904630064964294], [0.7516765594482422, 0.687757134437561], [0.7515912055969238, 0.68479323387146], [0.7516824007034302, 0.682077944278717], [0.7515923976898193, 0.6791160106658936], [0.7516754865646362, 0.6764051914215088], [0.7515736222267151, 0.673448920249939], [0.7516459226608276, 0.6707383394241333], [0.75153648853302, 0.6677960157394409], [0.7515947818756104, 0.6650886535644531], [0.751471757888794, 0.66215980052948], [0.7515180110931396, 0.6594597101211548], [0.7513808012008667, 0.656542956829071], [0.751412034034729, 0.6538533568382263], [0.751261830329895, 0.6509539484977722], [0.7512794733047485, 0.6482614278793335], [0.7511184215545654, 0.6453762054443359], [0.7511228919029236, 0.642690896987915], [0.7509484887123108, 0.6398221850395203], [0.7509374022483826, 0.6371383666992188], [0.7507503032684326, 0.6342766284942627], [0.7507281303405762, 0.6316132545471191], [0.7505272626876831, 0.628791868686676], [0.7504895925521851, 0.6261513233184814], [0.750278651714325, 0.6233606338500977], [0.7502233982086182, 0.6207338571548462], [0.7499990463256836, 0.6179677248001099], [0.7499269247055054, 0.6153558492660522], [0.7496902942657471, 0.6126173138618469], [0.7496020793914795, 0.6100153923034668], [0.7493503093719482, 0.6072978973388672], [0.7492445707321167, 0.6047122478485107], [0.7489780187606812, 0.6020230054855347], [0.748854398727417, 0.5994528532028198], [0.748573899269104, 0.5967838764190674], [0.7484341263771057, 0.5942416787147522], [0.7481406331062317, 0.5916138887405396], [0.7479812502861023, 0.5890895128250122], [0.7476699352264404, 0.586496889591217], [0.7474932074546814, 0.5840046405792236], [0.7471680641174316, 0.5814489126205444], [0.746973991394043, 0.5789847373962402], [0.7466375827789307, 0.5764716267585754], [0.7464287877082825, 0.5740334987640381], [0.7460799813270569, 0.5715569257736206], [0.7458537817001343, 0.56914222240448], [0.7454942464828491, 0.5667019486427307], [0.7452515363693237, 0.5643147230148315], [0.7448765635490417, 0.5619087219238281], [0.7446157932281494, 0.5595479011535645], [0.7442241907119751, 0.5571794509887695], [0.7439348101615906, 0.554836630821228], [0.7435151934623718, 0.5524907112121582], [0.743201494216919, 0.5501731634140015], [0.7427620887756348, 0.5478622913360596], [0.7424139380455017, 0.5455633401870728], [0.7419466972351074, 0.5432873368263245], [0.7415676116943359, 0.5410064458847046], [0.7410708665847778, 0.5387611389160156], [0.7406540513038635, 0.5365180969238281], [0.7401239275932312, 0.5343179702758789], [0.7396636009216309, 0.5320982336997986], [0.7390944957733154, 0.5299307107925415], [0.7385844588279724, 0.5277518630027771], [0.7379723787307739, 0.5256322622299194], [0.7374098896980286, 0.5234897136688232], [0.7367537021636963, 0.5214163064956665], [0.7361310720443726, 0.5193127393722534], [0.7354187369346619, 0.5172860622406006], [0.7347283959388733, 0.515232503414154], [0.7339590191841125, 0.5132648944854736], [0.7331895232200623, 0.5112584829330444], [0.7323504686355591, 0.5093419551849365], [0.7315024137496948, 0.5073965787887573], [0.7306003570556641, 0.5055431127548218], [0.7296326756477356, 0.5035816431045532], [0.7285983562469482, 0.5016770362854004], [0.7275007963180542, 0.49974676966667175], [0.726323127746582, 0.4978311061859131], [0.7365833520889282, 0.8169211149215698], [0.7378907799720764, 0.8145928978919983], [0.7391427755355835, 0.8124740123748779], [0.7403037548065186, 0.8101224899291992], [0.7414084672927856, 0.8080092668533325], [0.7424377202987671, 0.8056249022483826], [0.7434169054031372, 0.8035095930099487], [0.7443255186080933, 0.8010950088500977], [0.7451959848403931, 0.7989689111709595], [0.7459976077079773, 0.796517014503479], [0.746781587600708, 0.7943681478500366], [0.7474933862686157, 0.7918626070022583], [0.7481982707977295, 0.7896813154220581], [0.7488269805908203, 0.7871239185333252], [0.749472439289093, 0.7849006652832031], [0.7500261068344116, 0.7822986841201782], [0.750606119632721, 0.7800405621528625], [0.751083493232727, 0.777405321598053], [0.7516026496887207, 0.7751152515411377], [0.7520098686218262, 0.7724465131759644], [0.7524746656417847, 0.7701272964477539], [0.7528164386749268, 0.7674363851547241], [0.7532243132591248, 0.7650911808013916], [0.7534940838813782, 0.7624083757400513], [0.7538270950317383, 0.7600705623626709], [0.7540218830108643, 0.7574270963668823], [0.7543125152587891, 0.7551215291023254], [0.7544543743133545, 0.7524360418319702], [0.7547066807746887, 0.7500849962234497], [0.7548018097877502, 0.7473571300506592], [0.7550216317176819, 0.7449555397033691], [0.7550776600837708, 0.742201566696167], [0.755272626876831, 0.7397712469100952], [0.7552974224090576, 0.7370007038116455], [0.755470871925354, 0.7345420122146606], [0.7554701566696167, 0.7317308187484741], [0.755628228187561, 0.7292340993881226], [0.755610466003418, 0.726405918598175], [0.7557559013366699, 0.7238845825195312], [0.7557221055030823, 0.7210254669189453], [0.7558557987213135, 0.7184723019599915], [0.7558079957962036, 0.7155912518501282], [0.7559322714805603, 0.7130082845687866], [0.7558735013008118, 0.7101020812988281], [0.7559877634048462, 0.7074925303459167], [0.7559163570404053, 0.704562783241272], [0.756022572517395, 0.7019138336181641], [0.7559430003166199, 0.6989579200744629], [0.7560405731201172, 0.6962836980819702], [0.7559522390365601, 0.6933146715164185], [0.7560437917709351, 0.6906158924102783], [0.7559472322463989, 0.6876366138458252], [0.7560315132141113, 0.68492591381073], [0.7559266090393066, 0.6819494962692261], [0.7560001611709595, 0.679227352142334], [0.7558863162994385, 0.6762587428092957], [0.7559484243392944, 0.6735363006591797], [0.7558239102363586, 0.6705796718597412], [0.7558743357658386, 0.6678556203842163], [0.7557372450828552, 0.6649109721183777], [0.7557748556137085, 0.6621907353401184], [0.7556241750717163, 0.6592634916305542], [0.7556496858596802, 0.6565444469451904], [0.7554864883422852, 0.6536337733268738], [0.755494236946106, 0.6509242057800293], [0.7553159594535828, 0.6480245590209961], [0.7553097009658813, 0.6453176736831665], [0.7551177740097046, 0.6424343585968018], [0.7550962567329407, 0.639733076095581], [0.7548936605453491, 0.636865496635437], [0.7548595070838928, 0.634163498878479], [0.7546435594558716, 0.6313226222991943], [0.7545948624610901, 0.6286431550979614], [0.7543646097183228, 0.6258409023284912], [0.754295825958252, 0.6231772303581238], [0.7540512084960938, 0.6204009056091309], [0.7539659142494202, 0.6177566647529602], [0.7537109851837158, 0.6150037050247192], [0.7536077499389648, 0.6123722195625305], [0.7533365488052368, 0.6096431016921997], [0.7532164454460144, 0.6070224046707153], [0.7529330253601074, 0.6043204665184021], [0.7527949213981628, 0.601715087890625], [0.7524940967559814, 0.5990371108055115], [0.7523394227027893, 0.5964469909667969], [0.7520272731781006, 0.5938057899475098], [0.7518513202667236, 0.5912388563156128], [0.7515217065811157, 0.5886303782463074], [0.7513302564620972, 0.5860881805419922], [0.7509861588478088, 0.5835205316543579], [0.7507749795913696, 0.5810098648071289], [0.7504201531410217, 0.5784826278686523], [0.7501944303512573, 0.576004147529602], [0.7498271465301514, 0.5735135078430176], [0.7495845556259155, 0.571058988571167], [0.7492068409919739, 0.5686042308807373], [0.7489486932754517, 0.5661769509315491], [0.7485581636428833, 0.5637563467025757], [0.748282790184021, 0.5613514184951782], [0.7478759288787842, 0.5589634776115417], [0.7475770711898804, 0.5565805435180664], [0.7471486330032349, 0.5542154312133789], [0.7468248605728149, 0.5518463253974915], [0.7463756203651428, 0.5495178699493408], [0.7460211515426636, 0.5471683144569397], [0.7455458045005798, 0.5448622703552246], [0.7451568245887756, 0.5425357818603516], [0.7446489334106445, 0.5402611494064331], [0.7442237734794617, 0.5379536151885986], [0.7436889410018921, 0.5357168912887573], [0.7432249188423157, 0.5334423184394836], [0.7426524758338928, 0.531235933303833], [0.7421400547027588, 0.528978705406189], [0.7415273189544678, 0.5268114805221558], [0.7409647107124329, 0.5245946645736694], [0.7403081655502319, 0.5224698185920715], [0.7396836280822754, 0.5202805399894714], [0.7389722466468811, 0.5181971788406372], [0.7382847666740417, 0.5160484910011292], [0.7375202178955078, 0.514008641242981], [0.7367579936981201, 0.5119045972824097], [0.7359257340431213, 0.5099064111709595], [0.7350895404815674, 0.5078531503677368], [0.7341946959495544, 0.5059069991111755], [0.7332772016525269, 0.5039113759994507], [0.7322505116462708, 0.501934289932251], [0.7311854362487793, 0.49990856647491455], [0.7300416827201843, 0.49792176485061646], [0.728850245475769, 0.49590495228767395], [0.7398967146873474, 0.819324791431427], [0.7413005232810974, 0.8171349763870239], [0.7426108121871948, 0.8146763443946838], [0.7438566088676453, 0.8124837279319763], [0.7450193762779236, 0.810007631778717], [0.746128499507904, 0.8078126907348633], [0.7471566200256348, 0.8053123354911804], [0.7481462955474854, 0.803107738494873], [0.7490545511245728, 0.8005883693695068], [0.7499426603317261, 0.7983626127243042], [0.7507390379905701, 0.7958107590675354], [0.7515407204627991, 0.7935485243797302], [0.7522457838058472, 0.7909570336341858], [0.7529697418212891, 0.7886589765548706], [0.7535855770111084, 0.7860270738601685], [0.7542418837547302, 0.7836874723434448], [0.7547792196273804, 0.7810155153274536], [0.755364716053009, 0.7786503434181213], [0.7558249235153198, 0.7759507894515991], [0.7563489675521851, 0.7735580205917358], [0.7567377090454102, 0.7708362340927124], [0.7572032809257507, 0.7684211134910583], [0.75752854347229, 0.7656707763671875], [0.7579326629638672, 0.7632640600204468], [0.7581892609596252, 0.7604933381080627], [0.7585129141807556, 0.7581551671028137], [0.758683443069458, 0.755428671836853], [0.7589601874351501, 0.7530515193939209], [0.7590769529342651, 0.7502951622009277], [0.7593202590942383, 0.7478818893432617], [0.7593958973884583, 0.7450909614562988], [0.7596089839935303, 0.7426551580429077], [0.759652316570282, 0.7398495674133301], [0.7598380446434021, 0.7373866438865662], [0.759846568107605, 0.7345703840255737], [0.7600162625312805, 0.7320722341537476], [0.7600052356719971, 0.7292299270629883], [0.760156512260437, 0.7267061471939087], [0.7601261734962463, 0.723849892616272], [0.7602652311325073, 0.7212929725646973], [0.7602183818817139, 0.7184152603149414], [0.7603459358215332, 0.7158276438713074], [0.7602840662002563, 0.7129309177398682], [0.7604012489318848, 0.7103115320205688], [0.7603266835212708, 0.7073925733566284], [0.760430097579956, 0.704742968082428], [0.7603424191474915, 0.701800525188446], [0.7604385614395142, 0.6991195678710938], [0.7603433132171631, 0.6961557865142822], [0.7604290843009949, 0.6934525966644287], [0.7603208422660828, 0.6904761791229248], [0.760400116443634, 0.6877589821815491], [0.7602861523628235, 0.6847760677337646], [0.7603540420532227, 0.6820433139801025], [0.7602255940437317, 0.6790609359741211], [0.7602814435958862, 0.6763292551040649], [0.7601441740989685, 0.6733546257019043], [0.7601876258850098, 0.6706219911575317], [0.7600349187850952, 0.6676545143127441], [0.7600641250610352, 0.6649255752563477], [0.7599015831947327, 0.6619707345962524], [0.7599173784255981, 0.6592479944229126], [0.7597381472587585, 0.6563069224357605], [0.759739875793457, 0.653586745262146], [0.7595471143722534, 0.6506623029708862], [0.75953209400177, 0.6479461789131165], [0.7593245506286621, 0.6450338959693909], [0.7592924237251282, 0.6423234939575195], [0.7590702772140503, 0.6394309997558594], [0.7590224742889404, 0.6367283463478088], [0.7587894201278687, 0.6338468790054321], [0.758730411529541, 0.6311560869216919], [0.7584850788116455, 0.6283082365989685], [0.7584081888198853, 0.6256345510482788], [0.758145809173584, 0.6228147149085999], [0.7580491304397583, 0.6201639771461487], [0.7577747702598572, 0.6173762083053589], [0.7576596140861511, 0.6147339344024658], [0.7573680877685547, 0.6119687557220459], [0.757235050201416, 0.6093419194221497], [0.7569311857223511, 0.6066001653671265], [0.7567814588546753, 0.6039891242980957], [0.7564602494239807, 0.601266086101532], [0.756291925907135, 0.598673403263092], [0.7559584379196167, 0.5959832072257996], [0.7557693719863892, 0.5934061408042908], [0.7554185390472412, 0.5907459855079651], [0.7552093863487244, 0.588196873664856], [0.7548437118530273, 0.5855790376663208], [0.7546203136444092, 0.5830556750297546], [0.7542411088943481, 0.5804755687713623], [0.7540000677108765, 0.5779891014099121], [0.7536100745201111, 0.575453519821167], [0.7533552646636963, 0.5729925632476807], [0.7529534101486206, 0.5704914331436157], [0.7526837587356567, 0.568057119846344], [0.7522711157798767, 0.5655906796455383], [0.751984715461731, 0.5631797313690186], [0.7515569925308228, 0.5607433319091797], [0.7512497901916504, 0.558350145816803], [0.7508047223091125, 0.5559431910514832], [0.750473141670227, 0.5535637140274048], [0.7500084042549133, 0.551180899143219], [0.749652087688446, 0.5488173961639404], [0.7491608262062073, 0.5464615225791931], [0.748771071434021, 0.5441144108772278], [0.7482534646987915, 0.541782021522522], [0.7478280663490295, 0.5394508838653564], [0.7472783327102661, 0.5371511578559875], [0.7468149662017822, 0.5348472595214844], [0.7462329864501953, 0.532583475112915], [0.7457240223884583, 0.5302944183349609], [0.745099663734436, 0.5280565023422241], [0.7445434927940369, 0.5257947444915771], [0.7438772320747375, 0.523593544960022], [0.74326092004776, 0.5213623046875], [0.7425403594970703, 0.5191936492919922], [0.7418649196624756, 0.5169943571090698], [0.7410905957221985, 0.514867901802063], [0.7403447031974792, 0.5127016305923462], [0.7395099401473999, 0.5106127262115479], [0.7386943697929382, 0.5084851980209351], [0.7377974987030029, 0.506441056728363], [0.736899733543396, 0.5043578147888184], [0.7359206676483154, 0.50235515832901], [0.7348977327346802, 0.5002369284629822], [0.7337814569473267, 0.4981771409511566], [0.7326260209083557, 0.49607571959495544], [0.7313772439956665, 0.49400997161865234], [0.7434101104736328, 0.8221230506896973], [0.7448537349700928, 0.8195775747299194], [0.7462610602378845, 0.8172774314880371], [0.7475498914718628, 0.8147106170654297], [0.7488000392913818, 0.8124169111251831], [0.7499451637268066, 0.8098288178443909], [0.7510597705841064, 0.807528555393219], [0.7520742416381836, 0.8049194812774658], [0.7530651092529297, 0.8026191592216492], [0.7539627552032471, 0.7999778389930725], [0.7548509836196899, 0.7976589202880859], [0.7556437849998474, 0.7949811220169067], [0.7564418911933899, 0.7926396131515503], [0.7571402788162231, 0.7899155616760254], [0.7578573226928711, 0.7875444889068604], [0.7584642171859741, 0.7847822904586792], [0.7591103315353394, 0.782377302646637], [0.7596307396888733, 0.7795898914337158], [0.7602037191390991, 0.7771610021591187], [0.7606419324874878, 0.7743537425994873], [0.7611494064331055, 0.7719039916992188], [0.7615106105804443, 0.7690807580947876], [0.761958122253418, 0.7666107416152954], [0.7622520923614502, 0.7637946009635925], [0.7626456022262573, 0.7613157033920288], [0.762876033782959, 0.7585388422012329], [0.7632013559341431, 0.7561107873916626], [0.763357937335968, 0.7533260583877563], [0.7636359333992004, 0.750878095626831], [0.7637408375740051, 0.7480698227882385], [0.7639833092689514, 0.7455971837043762], [0.764049768447876, 0.7427762150764465], [0.7642619609832764, 0.7402833700180054], [0.7642916440963745, 0.7374436855316162], [0.7644743919372559, 0.7349379062652588], [0.7644726037979126, 0.7320888638496399], [0.7646368741989136, 0.72955322265625], [0.7646130323410034, 0.7266840934753418], [0.7647589445114136, 0.7241299152374268], [0.764714241027832, 0.7212432026863098], [0.764845609664917, 0.7186609506607056], [0.7647853493690491, 0.7157502770423889], [0.7649027705192566, 0.7131409645080566], [0.764824628829956, 0.7102122902870178], [0.7649297118186951, 0.7075743675231934], [0.7648369073867798, 0.7046180963516235], [0.7649309635162354, 0.7019530534744263], [0.7648255825042725, 0.6989820003509521], [0.7649096250534058, 0.6962860822677612], [0.7647923231124878, 0.6932958364486694], [0.7648669481277466, 0.6905852556228638], [0.7647407054901123, 0.6875889897346497], [0.7648011445999146, 0.6848587989807129], [0.7646617889404297, 0.6818556189537048], [0.7647136449813843, 0.6791176795959473], [0.7645636796951294, 0.6761257648468018], [0.7645982503890991, 0.6733800172805786], [0.7644331455230713, 0.6703972816467285], [0.7644566297531128, 0.6676520705223083], [0.7642809152603149, 0.6646817922592163], [0.7642886638641357, 0.6619385480880737], [0.7640975117683411, 0.6589815616607666], [0.7640897631645203, 0.6562404632568359], [0.7638832330703735, 0.6532990336418152], [0.7638595104217529, 0.6505612134933472], [0.7636363506317139, 0.647636353969574], [0.7635956406593323, 0.6449024081230164], [0.7633605599403381, 0.6419901847839355], [0.763301432132721, 0.6392626166343689], [0.7630479335784912, 0.6363743543624878], [0.7629735469818115, 0.6336554288864136], [0.7627072334289551, 0.6307846307754517], [0.7626138925552368, 0.6280797719955444], [0.7623331546783447, 0.6252396106719971], [0.7622213363647461, 0.6225502490997314], [0.7619243860244751, 0.6197465658187866], [0.7617948055267334, 0.6170791387557983], [0.7614843845367432, 0.6142947673797607], [0.7613368034362793, 0.6116381287574768], [0.7610125541687012, 0.6088821887969971], [0.7608462572097778, 0.6062381267547607], [0.7605053186416626, 0.6035081148147583], [0.7603223323822021, 0.6008725762367249], [0.7599684000015259, 0.5981717109680176], [0.7597628235816956, 0.5955523252487183], [0.7593915462493896, 0.5928794145584106], [0.7591683864593506, 0.5902825593948364], [0.7587848901748657, 0.5876473784446716], [0.7585440874099731, 0.5850827097892761], [0.7581449747085571, 0.5824865102767944], [0.7578875422477722, 0.5799499750137329], [0.7574794292449951, 0.5774002075195312], [0.7572070360183716, 0.5748993754386902], [0.7567884922027588, 0.5723906755447388], [0.7565045952796936, 0.5699124336242676], [0.7560769319534302, 0.567436933517456], [0.7557774186134338, 0.5649833679199219], [0.7553364038467407, 0.5625379085540771], [0.7550172209739685, 0.5601024031639099], [0.7545585632324219, 0.5576809644699097], [0.7542174458503723, 0.5552648901939392], [0.7537417411804199, 0.5528665781021118], [0.7533739805221558, 0.5504600405693054], [0.752873420715332, 0.5480839014053345], [0.7524767518043518, 0.5456914901733398], [0.7519509792327881, 0.5433390140533447], [0.7515190243721008, 0.5409592390060425], [0.7509632110595703, 0.5386275053024292], [0.7504963874816895, 0.5362697243690491], [0.7499107718467712, 0.5339696407318115], [0.7493994235992432, 0.5316309928894043], [0.7487709522247314, 0.5293541550636292], [0.7482132911682129, 0.5270333886146545], [0.7475453615188599, 0.524786114692688], [0.746933102607727, 0.522484302520752], [0.7462162375450134, 0.5202683210372925], [0.7455462217330933, 0.5179928541183472], [0.7447763085365295, 0.5158084630966187], [0.7440401315689087, 0.5135645866394043], [0.7432116270065308, 0.511410117149353], [0.7424042820930481, 0.5091989040374756], [0.7415112853050232, 0.507084310054779], [0.7406248450279236, 0.5049107074737549], [0.7396559119224548, 0.5028257966041565], [0.738693356513977, 0.5006943941116333], [0.7375898957252502, 0.49855682253837585], [0.7364672422409058, 0.4963715970516205], [0.735244631767273, 0.49423748254776], [0.7339988350868225, 0.49206095933914185], [0.7468714714050293, 0.8246658444404602], [0.7484245300292969, 0.8223010897636414], [0.7498667240142822, 0.8196132183074951], [0.7512565851211548, 0.8172438144683838], [0.752538800239563, 0.8145489692687988], [0.7537809610366821, 0.8121708631515503], [0.7549186944961548, 0.8094595074653625], [0.756029486656189, 0.8070752620697021], [0.7570318579673767, 0.8043522834777832], [0.7580269575119019, 0.8019500970840454], [0.758909285068512, 0.7992038726806641], [0.7598034143447876, 0.7967774271965027], [0.7605764865875244, 0.7940036058425903], [0.7613823413848877, 0.7915443778038025], [0.7620564699172974, 0.7887357473373413], [0.7627785205841064, 0.7862454652786255], [0.7633602619171143, 0.7834094762802124], [0.7640045881271362, 0.7808980941772461], [0.7645009756088257, 0.7780381441116333], [0.7650710344314575, 0.7755133509635925], [0.7654848098754883, 0.7726395130157471], [0.7659871578216553, 0.7701078653335571], [0.7663301229476929, 0.767220139503479], [0.7667676210403442, 0.7646952867507935], [0.7670411467552185, 0.7618124485015869], [0.7674166560173035, 0.7593223452568054], [0.7676187753677368, 0.7564620971679688], [0.7679329514503479, 0.7539823055267334], [0.7680703997612, 0.7511245012283325], [0.7683411836624146, 0.7486315369606018], [0.7684279680252075, 0.745760977268219], [0.7686591744422913, 0.7432533502578735], [0.7687049508094788, 0.7403779625892639], [0.7689080238342285, 0.737852931022644], [0.7689205408096313, 0.7349740862846375], [0.7690955400466919, 0.7324326038360596], [0.7690788507461548, 0.7295464277267456], [0.769235372543335, 0.7269837260246277], [0.7691958546638489, 0.7240849137306213], [0.7693328857421875, 0.7214967012405396], [0.7692714929580688, 0.7185851335525513], [0.7693935632705688, 0.7159683704376221], [0.7693156003952026, 0.713036298751831], [0.7694240808486938, 0.7103958129882812], [0.7693283557891846, 0.7074437141418457], [0.769424319267273, 0.7047752141952515], [0.7693168520927429, 0.7018035650253296], [0.7693992853164673, 0.6991056799888611], [0.769275426864624, 0.6961154341697693], [0.7693465352058411, 0.6933975219726562], [0.7692124247550964, 0.6903986930847168], [0.7692692279815674, 0.6876593828201294], [0.7691200971603394, 0.6846519112586975], [0.7691643238067627, 0.6818997263908386], [0.7690026164054871, 0.6788932681083679], [0.769031286239624, 0.6761358976364136], [0.7688544392585754, 0.6731337308883667], [0.7688724994659424, 0.6703816652297974], [0.7686820030212402, 0.6673879623413086], [0.7686837911605835, 0.6646342277526855], [0.7684792280197144, 0.6616528034210205], [0.7684668898582458, 0.6589018106460571], [0.7682480812072754, 0.6559308171272278], [0.7682144045829773, 0.653181254863739], [0.7679752111434937, 0.6502252817153931], [0.7679272294044495, 0.6474883556365967], [0.7676790952682495, 0.644548773765564], [0.7676146030426025, 0.6418089866638184], [0.7673459053039551, 0.638879656791687], [0.7672592401504517, 0.6361509561538696], [0.7669711112976074, 0.6332502365112305], [0.7668623328208923, 0.6305292844772339], [0.7665602564811707, 0.6276514530181885], [0.7664353847503662, 0.6249492168426514], [0.7661175727844238, 0.6220995187759399], [0.7659749388694763, 0.6194210052490234], [0.7656423449516296, 0.6166060566902161], [0.7654801607131958, 0.6139382719993591], [0.7651349306106567, 0.611145555973053], [0.7649543285369873, 0.6084933280944824], [0.7645940184593201, 0.6057248115539551], [0.7643958330154419, 0.603085994720459], [0.7640188336372375, 0.600339412689209], [0.7638011574745178, 0.5977160930633545], [0.7634091377258301, 0.5949934720993042], [0.763171911239624, 0.5923909544944763], [0.7627652287483215, 0.5897048711776733], [0.7625108957290649, 0.5871292352676392], [0.762087345123291, 0.584479570388794], [0.7618159651756287, 0.581936240196228], [0.761383593082428, 0.5793336629867554], [0.7610999941825867, 0.5768240094184875], [0.7606600522994995, 0.5742655396461487], [0.760362446308136, 0.5717867016792297], [0.7599113583564758, 0.5692662000656128], [0.7596015334129333, 0.566810667514801], [0.7591409683227539, 0.5643198490142822], [0.7588142156600952, 0.5618809461593628], [0.7583400011062622, 0.5594152212142944], [0.7579928636550903, 0.5569902062416077], [0.7575002908706665, 0.5545494556427002], [0.757124662399292, 0.5521352887153625], [0.7566084861755371, 0.5497127771377563], [0.7562086582183838, 0.5473088026046753], [0.7556688785552979, 0.5449084639549255], [0.755236029624939, 0.5425139665603638], [0.754669189453125, 0.5401335954666138], [0.7542031407356262, 0.5377500057220459], [0.7536059617996216, 0.5353958606719971], [0.7530981302261353, 0.5330274105072021], [0.7524614334106445, 0.5306960344314575], [0.7519090175628662, 0.5283430814743042], [0.7512340545654297, 0.526038646697998], [0.7506308555603027, 0.5236976742744446], [0.7499072551727295, 0.5214130282402039], [0.7492480874061584, 0.5190972089767456], [0.7484738826751709, 0.5168399810791016], [0.7477537393569946, 0.5145456194877625], [0.7469252943992615, 0.5123167037963867], [0.746134340763092, 0.5100452899932861], [0.7452417612075806, 0.5078451633453369], [0.7443762421607971, 0.505607008934021], [0.7434096336364746, 0.5034382343292236], [0.7424614429473877, 0.5012284517288208], [0.7414132952690125, 0.4991001486778259], [0.7403271198272705, 0.49682414531707764], [0.7391318678855896, 0.4946153461933136], [0.737923264503479, 0.4923591613769531], [0.7366013526916504, 0.49015164375305176], [0.7505390644073486, 0.8276348114013672], [0.7521140575408936, 0.8248616456985474], [0.7536678314208984, 0.822376012802124], [0.7550769448280334, 0.8195883631706238], [0.7564640641212463, 0.8171047568321228], [0.7577198147773743, 0.814298152923584], [0.7589588165283203, 0.8118139505386353], [0.7600722312927246, 0.8089925050735474], [0.761176586151123, 0.8065043091773987], [0.7621580958366394, 0.8036612272262573], [0.7631462812423706, 0.8011653423309326], [0.7640124559402466, 0.7982927560806274], [0.7648962736129761, 0.7957772612571716], [0.7656500339508057, 0.7928733825683594], [0.7664428949356079, 0.7903296947479248], [0.7670981884002686, 0.7873996496200562], [0.7678045034408569, 0.7848360538482666], [0.7683628797531128, 0.7818870544433594], [0.7689894437789917, 0.7793042659759521], [0.7694576978683472, 0.7763453125953674], [0.7700088024139404, 0.7737569808959961], [0.7703947424888611, 0.7708019018173218], [0.7708815932273865, 0.7682046890258789], [0.7711950540542603, 0.7652621865272522], [0.7716204524040222, 0.7626805305480957], [0.7718653678894043, 0.759765625], [0.7722305059432983, 0.7572019696235657], [0.772408127784729, 0.7543006539344788], [0.772715151309967, 0.751748263835907], [0.7728323936462402, 0.7488517165184021], [0.7730967998504639, 0.746292233467102], [0.773167610168457, 0.743389368057251], [0.7733935713768005, 0.7408268451690674], [0.7734247446060181, 0.737928032875061], [0.7736197113990784, 0.7353540658950806], [0.7736161351203918, 0.7324472069740295], [0.7737838625907898, 0.7298691272735596], [0.7737513780593872, 0.7269533276557922], [0.7738965153694153, 0.7243571281433105], [0.7738393545150757, 0.7214256525039673], [0.773966908454895, 0.7188129425048828], [0.773888111114502, 0.7158670425415039], [0.7739996314048767, 0.7132290601730347], [0.7739033102989197, 0.7102625370025635], [0.7740004062652588, 0.7076021432876587], [0.773887574672699, 0.7046197056770325], [0.7739714980125427, 0.7019327878952026], [0.7738445997238159, 0.6989234089851379], [0.7739145755767822, 0.6962119340896606], [0.7737723588943481, 0.6931990385055542], [0.773827850818634, 0.6904623508453369], [0.7736712694168091, 0.6874359846115112], [0.7737106084823608, 0.684683084487915], [0.7735388875007629, 0.6816579103469849], [0.7735658884048462, 0.6788933277130127], [0.7733786106109619, 0.6758685111999512], [0.773388683795929, 0.6731036305427551], [0.773186445236206, 0.6700924634933472], [0.7731801867485046, 0.6673241853713989], [0.7729620933532715, 0.664320707321167], [0.7729413509368896, 0.6615583896636963], [0.7727086544036865, 0.6585687398910522], [0.7726725935935974, 0.6558017730712891], [0.7724242210388184, 0.652817964553833], [0.7723678350448608, 0.6500539779663086], [0.7720972299575806, 0.6471002697944641], [0.7720181941986084, 0.6443454027175903], [0.7717324495315552, 0.6413992643356323], [0.771636962890625, 0.638636589050293], [0.7713326811790466, 0.6357094049453735], [0.771212637424469, 0.6329606771469116], [0.7708910703659058, 0.630064845085144], [0.7707510590553284, 0.6273294687271118], [0.7704129219055176, 0.6244609355926514], [0.7702537775039673, 0.6217413544654846], [0.7698991894721985, 0.6189075708389282], [0.7697201371192932, 0.616213321685791], [0.7693507671356201, 0.6134042143821716], [0.7691527605056763, 0.610721230506897], [0.7687703967094421, 0.607939600944519], [0.7685548663139343, 0.6052703857421875], [0.7681578397750854, 0.6025111675262451], [0.7679229974746704, 0.5998526811599731], [0.7675100564956665, 0.5971209406852722], [0.7672538757324219, 0.5944717526435852], [0.7668273448944092, 0.5917752981185913], [0.7665549516677856, 0.5891504287719727], [0.7661125659942627, 0.5864918231964111], [0.7658239603042603, 0.5838947296142578], [0.765374481678009, 0.5812817811965942], [0.7650724649429321, 0.5787232518196106], [0.7646149396896362, 0.5761555433273315], [0.7643020153045654, 0.573632538318634], [0.7638364434242249, 0.5711013078689575], [0.7635111212730408, 0.5686094760894775], [0.7630373239517212, 0.5661104917526245], [0.7626969814300537, 0.5636365413665771], [0.7622091770172119, 0.5611591935157776], [0.7618519067764282, 0.5587003231048584], [0.761347234249115, 0.5562459230422974], [0.7609643936157227, 0.5537937879562378], [0.7604374289512634, 0.5513557195663452], [0.7600283622741699, 0.5489164590835571], [0.759479284286499, 0.5464982986450195], [0.7590404748916626, 0.5440658330917358], [0.7584658861160278, 0.5416632890701294], [0.7579927444458008, 0.5392400622367859], [0.7573897838592529, 0.5368587374687195], [0.7568782567977905, 0.5344423055648804], [0.7562395334243774, 0.532079815864563], [0.7556881904602051, 0.5296776294708252], [0.7550110816955566, 0.5273349285125732], [0.7544098496437073, 0.5249470472335815], [0.7536870837211609, 0.5226222276687622], [0.7530335187911987, 0.5202438235282898], [0.7522631883621216, 0.5179423093795776], [0.7515491247177124, 0.5155813694000244], [0.7507233619689941, 0.5133037567138672], [0.7499438524246216, 0.5109660625457764], [0.749055802822113, 0.508704423904419], [0.7482015490531921, 0.5063879489898682], [0.7472426295280457, 0.5041550397872925], [0.7463134527206421, 0.5018707513809204], [0.74527907371521, 0.49965623021125793], [0.7442649602890015, 0.4973970949649811], [0.7430814504623413, 0.4951128363609314], [0.7419066429138184, 0.49278080463409424], [0.7406120300292969, 0.4905014634132385], [0.7393159866333008, 0.4881761074066162], [0.7541592121124268, 0.8303009271621704], [0.7558578848838806, 0.8277499675750732], [0.7574218511581421, 0.824832558631897], [0.7589483261108398, 0.8222705125808716], [0.7603412866592407, 0.8193448781967163], [0.7617118954658508, 0.8167760968208313], [0.7629464268684387, 0.8138468265533447], [0.7641729116439819, 0.8112690448760986], [0.7652646899223328, 0.8083291053771973], [0.7663630247116089, 0.8057452440261841], [0.7673218250274658, 0.8027938604354858], [0.7683079242706299, 0.8001861572265625], [0.7691477537155151, 0.7972118854522705], [0.7700290679931641, 0.7945823669433594], [0.7707551717758179, 0.7915825843811035], [0.7715418934822083, 0.7889339923858643], [0.7721660733222961, 0.7859153151512146], [0.7728627920150757, 0.7832558751106262], [0.7733910083770752, 0.7802222967147827], [0.7740064859390259, 0.7775609493255615], [0.7744476795196533, 0.7745290994644165], [0.7749865055084229, 0.7718751430511475], [0.7753487229347229, 0.7688471674919128], [0.7758214473724365, 0.7662088871002197], [0.7761083841323853, 0.7632009983062744], [0.7765154838562012, 0.7605857849121094], [0.7767311334609985, 0.7575969696044922], [0.7770783305168152, 0.754999041557312], [0.7772305607795715, 0.7520279884338379], [0.7775223851203918, 0.74943608045578], [0.7776163816452026, 0.7464815378189087], [0.7778676748275757, 0.7438849210739136], [0.7779168486595154, 0.7409348487854004], [0.7781276702880859, 0.7383396625518799], [0.7781332731246948, 0.7353960275650024], [0.7783166766166687, 0.7327973246574402], [0.778292715549469, 0.7298535704612732], [0.778447151184082, 0.7272393107414246], [0.77839195728302, 0.7242941856384277], [0.778526246547699, 0.7216674089431763], [0.7784501910209656, 0.7187108993530273], [0.7785643339157104, 0.716063380241394], [0.7784665822982788, 0.7130991220474243], [0.7785687446594238, 0.7104268074035645], [0.778455376625061, 0.7074414491653442], [0.7785401344299316, 0.7047444581985474], [0.7784067988395691, 0.7017452716827393], [0.7784781455993652, 0.699021577835083], [0.7783342003822327, 0.6960046291351318], [0.778389573097229, 0.6932588815689087], [0.7782266139984131, 0.6902302503585815], [0.7782678604125977, 0.6874628067016602], [0.7780890464782715, 0.6844248175621033], [0.7781099677085876, 0.6816473007202148], [0.7779121398925781, 0.6786098480224609], [0.7779161930084229, 0.6758249998092651], [0.7777023315429688, 0.6727949976921082], [0.7776883244514465, 0.6700102686882019], [0.7774550914764404, 0.6669912338256836], [0.7774288654327393, 0.6642141938209534], [0.7771863341331482, 0.6612052917480469], [0.777143120765686, 0.6584281921386719], [0.7768861055374146, 0.6554276347160339], [0.7768285274505615, 0.652643084526062], [0.7765486240386963, 0.6496414542198181], [0.7764657139778137, 0.6468762159347534], [0.776162326335907, 0.6439007520675659], [0.7760504484176636, 0.6411314010620117], [0.7757254242897034, 0.6381710171699524], [0.7755966186523438, 0.6354023218154907], [0.7752564549446106, 0.6324663162231445], [0.7751045227050781, 0.6297157406806946], [0.7747446298599243, 0.6268095970153809], [0.7745728492736816, 0.6240743398666382], [0.7741930484771729, 0.6211955547332764], [0.7740020751953125, 0.6184861660003662], [0.7736105918884277, 0.6156436800956726], [0.7733962535858154, 0.6129477024078369], [0.7729874849319458, 0.6101317405700684], [0.7727562189102173, 0.6074541807174683], [0.772333025932312, 0.6046603322029114], [0.7720821499824524, 0.6019943952560425], [0.771644115447998, 0.5992246866226196], [0.7713765501976013, 0.5965731143951416], [0.7709257006645203, 0.5938242673873901], [0.7706382870674133, 0.5911964178085327], [0.7701727151870728, 0.5884836316108704], [0.7698734402656555, 0.5858820676803589], [0.7693968415260315, 0.5832110047340393], [0.7690819501876831, 0.5806493759155273], [0.7685996294021606, 0.5780250430107117], [0.7682758569717407, 0.5755006074905396], [0.7677872180938721, 0.5729215145111084], [0.7674518823623657, 0.5704243779182434], [0.7669577598571777, 0.5678852796554565], [0.766608715057373, 0.5654107928276062], [0.76610267162323, 0.5628958344459534], [0.7657374739646912, 0.5604305267333984], [0.7652173042297363, 0.5579383373260498], [0.7648288607597351, 0.5554805994033813], [0.7642874717712402, 0.5530022382736206], [0.7638742327690125, 0.550552248954773], [0.7633130550384521, 0.5480942130088806], [0.7628712058067322, 0.5456497073173523], [0.7622839212417603, 0.5432085990905762], [0.7618101835250854, 0.5407665967941284], [0.7611925601959229, 0.5383414626121521], [0.7606825232505798, 0.5359107255935669], [0.7600322365760803, 0.5334975719451904], [0.7594854235649109, 0.5310748815536499], [0.7588030695915222, 0.528681755065918], [0.7582099437713623, 0.526260495185852], [0.757483720779419, 0.523885190486908], [0.7568394541740417, 0.5214744806289673], [0.7560644149780273, 0.5191125273704529], [0.7553615570068359, 0.5167121887207031], [0.7545363903045654, 0.5143719911575317], [0.7537727355957031, 0.5119847059249878], [0.7528877258300781, 0.5096641182899475], [0.752051591873169, 0.5072894096374512], [0.7510961294174194, 0.5049834847450256], [0.7501850128173828, 0.5026329159736633], [0.7491586804389954, 0.5003571510314941], [0.7481599450111389, 0.49802303314208984], [0.7470439672470093, 0.49576467275619507], [0.7459093332290649, 0.49334716796875], [0.7446441650390625, 0.4909980893135071], [0.7433827519416809, 0.4885903596878052], [0.7419885993003845, 0.48624831438064575], [0.7580122947692871, 0.8334360122680664], [0.7597096562385559, 0.8304274082183838], [0.7614010572433472, 0.8277466893196106], [0.7629236578941345, 0.8247225880622864], [0.7644423842430115, 0.8220417499542236], [0.76579749584198, 0.8190057277679443], [0.7671544551849365, 0.8163219690322876], [0.768354594707489, 0.8132786750793457], [0.7695682048797607, 0.810596227645874], [0.7706283926963806, 0.8075383901596069], [0.7717097997665405, 0.804853081703186], [0.7726402878761292, 0.8017740249633789], [0.773607611656189, 0.7990683913230896], [0.7744157314300537, 0.7959671020507812], [0.7752779126167297, 0.7932489514350891], [0.7759730815887451, 0.7901335954666138], [0.7767396569252014, 0.7874020338058472], [0.7773293256759644, 0.7842814326286316], [0.7780073285102844, 0.7815417051315308], [0.7785032391548157, 0.7784328460693359], [0.7791013121604919, 0.7756993174552917], [0.7795091867446899, 0.7726013660430908], [0.7800329327583313, 0.7698787450790405], [0.780360996723175, 0.7668064832687378], [0.7808184027671814, 0.764102578163147], [0.7810724973678589, 0.7610528469085693], [0.7814639806747437, 0.7583706378936768], [0.7816486954689026, 0.7553508281707764], [0.7819815874099731, 0.7526808977127075], [0.7821066975593567, 0.7496760487556458], [0.7823853492736816, 0.747029185295105], [0.7824558019638062, 0.7440389394760132], [0.78269362449646, 0.7413922548294067], [0.7827181816101074, 0.7384090423583984], [0.7829179763793945, 0.7357752323150635], [0.7829025983810425, 0.7328057289123535], [0.783070981502533, 0.7301636934280396], [0.7830248475074768, 0.7271904349327087], [0.7831695079803467, 0.7245500087738037], [0.7830953001976013, 0.7215747833251953], [0.7832189798355103, 0.7189193964004517], [0.7831217050552368, 0.7159309387207031], [0.7832241058349609, 0.713265061378479], [0.7831061482429504, 0.7102651596069336], [0.7831924557685852, 0.7075706124305725], [0.7830561399459839, 0.7045519948005676], [0.7831272482872009, 0.7018380165100098], [0.7829763889312744, 0.6988087892532349], [0.7830328941345215, 0.6960668563842773], [0.7828658223152161, 0.6930147409439087], [0.7829055786132812, 0.6902540922164917], [0.7827191948890686, 0.6871962547302246], [0.782741904258728, 0.684410572052002], [0.7825380563735962, 0.6813464164733887], [0.7825424075126648, 0.6785541772842407], [0.7823169231414795, 0.6754964590072632], [0.782300591468811, 0.6726981997489929], [0.7820553779602051, 0.6696518659591675], [0.7820229530334473, 0.6668609380722046], [0.781762957572937, 0.6638363003730774], [0.7817118167877197, 0.6610465049743652], [0.7814379334449768, 0.6580265760421753], [0.7813703417778015, 0.655237078666687], [0.7810788154602051, 0.6522191762924194], [0.7809927463531494, 0.6494088172912598], [0.7806788682937622, 0.6464072465896606], [0.7805625200271606, 0.6436104774475098], [0.7802260518074036, 0.6406289339065552], [0.780083954334259, 0.6378355026245117], [0.7797262072563171, 0.6348768472671509], [0.7795632481575012, 0.6320918798446655], [0.7791853547096252, 0.6291567087173462], [0.7789984941482544, 0.6263894438743591], [0.7785992622375488, 0.6234862804412842], [0.7783910036087036, 0.6207383871078491], [0.7779748439788818, 0.617872953414917], [0.7777429819107056, 0.6151478290557861], [0.7773112058639526, 0.6123102903366089], [0.7770614624023438, 0.6096042990684509], [0.7766152620315552, 0.6067987680435181], [0.7763471603393555, 0.6041029691696167], [0.7758868932723999, 0.6013192534446716], [0.775597870349884, 0.5986365675926208], [0.7751235961914062, 0.5958804488182068], [0.7748151421546936, 0.5932087898254395], [0.7743301391601562, 0.5904852151870728], [0.7740098237991333, 0.5878392457962036], [0.7735146284103394, 0.5851597785949707], [0.7731813192367554, 0.5825405716896057], [0.7726829051971436, 0.5799134969711304], [0.7723402976989746, 0.5773336291313171], [0.7718349695205688, 0.5747507810592651], [0.7714830636978149, 0.5722113847732544], [0.7709739208221436, 0.569664478302002], [0.7706112861633301, 0.5671521425247192], [0.7700906991958618, 0.5646328330039978], [0.7697118520736694, 0.5621358156204224], [0.7691779136657715, 0.5596355199813843], [0.76877760887146, 0.5571460127830505], [0.7682241201400757, 0.554658055305481], [0.7678029537200928, 0.5521757006645203], [0.7672303915023804, 0.5497023463249207], [0.7667788863182068, 0.5472256541252136], [0.7661805152893066, 0.5447655320167542], [0.76570063829422, 0.5422952175140381], [0.7650768756866455, 0.5398462414741516], [0.7645636796951294, 0.5373779535293579], [0.7639098167419434, 0.5349457859992981], [0.7633609175682068, 0.5324814319610596], [0.7626742124557495, 0.5300610661506653], [0.7620800137519836, 0.5276037454605103], [0.7613518834114075, 0.5251919031143188], [0.7607112526893616, 0.5227386951446533], [0.7599382400512695, 0.5203391313552856], [0.7592422366142273, 0.5178895592689514], [0.7584196329116821, 0.515504002571106], [0.7576674222946167, 0.5130663514137268], [0.7567857503890991, 0.5106938481330872], [0.7559628486633301, 0.5082641243934631], [0.7550168633460999, 0.5059043169021606], [0.7541220188140869, 0.5034852623939514], [0.7531049251556396, 0.501142144203186], [0.7521268129348755, 0.4987429976463318], [0.7510260343551636, 0.4964185357093811], [0.7499673962593079, 0.4940360188484192], [0.748717725276947, 0.49161696434020996], [0.7474930882453918, 0.48914164304733276], [0.7461233735084534, 0.4867189824581146], [0.7447777390480042, 0.48425063490867615], [0.7618122100830078, 0.8362232446670532], [0.7636502981185913, 0.83347487449646], [0.7653294801712036, 0.8303150534629822], [0.7669897079467773, 0.8275513648986816], [0.7684856057167053, 0.8243845701217651], [0.7699759006500244, 0.8216160535812378], [0.7713068723678589, 0.8184438943862915], [0.7726386189460754, 0.8156718015670776], [0.7738069891929626, 0.8125019669532776], [0.775001049041748, 0.8097211122512817], [0.77602618932724, 0.806546688079834], [0.7770941257476807, 0.8037481904029846], [0.7779869437217712, 0.8005586862564087], [0.7789419293403625, 0.7977505326271057], [0.7797139883041382, 0.7945445775985718], [0.7805600762367249, 0.7917273044586182], [0.7812191247940063, 0.7885221242904663], [0.7819700241088867, 0.7857069969177246], [0.7825314998626709, 0.7825014591217041], [0.7831911444664001, 0.7796989679336548], [0.783655047416687, 0.7765082716941833], [0.7842345833778381, 0.7737228870391846], [0.7846156358718872, 0.7705504298210144], [0.7851176857948303, 0.7677870988845825], [0.7854137420654297, 0.7646431922912598], [0.7858492136001587, 0.7619014978408813], [0.7860719561576843, 0.7587841749191284], [0.7864385843276978, 0.7560653686523438], [0.7865893840789795, 0.7529789805412292], [0.7869002819061279, 0.7502807378768921], [0.7869938611984253, 0.7472211122512817], [0.7872515320777893, 0.7445348501205444], [0.7872904539108276, 0.7414988875389099], [0.7875100374221802, 0.7388241291046143], [0.7875071167945862, 0.7358056902885437], [0.7876874208450317, 0.7331365346908569], [0.7876454591751099, 0.7301326990127563], [0.7877999544143677, 0.7274645566940308], [0.7877296805381775, 0.7244675159454346], [0.7878581881523132, 0.7217909693717957], [0.7877601385116577, 0.7187937498092651], [0.7878682017326355, 0.7161048650741577], [0.7877506017684937, 0.713099479675293], [0.7878385782241821, 0.7103917598724365], [0.7876985669136047, 0.7073749303817749], [0.787772536277771, 0.7046465277671814], [0.7876169681549072, 0.7016156315803528], [0.7876715064048767, 0.6988629102706909], [0.7874969244003296, 0.6958185434341431], [0.7875386476516724, 0.6930404901504517], [0.7873517274856567, 0.6899774074554443], [0.7873729467391968, 0.687179446220398], [0.7871638536453247, 0.6841079592704773], [0.7871691584587097, 0.6812925338745117], [0.7869400978088379, 0.6782158613204956], [0.7869201302528381, 0.6753903031349182], [0.7866619825363159, 0.6723212003707886], [0.7866168022155762, 0.6695027351379395], [0.7863398790359497, 0.6664626598358154], [0.7862847447395325, 0.6636655330657959], [0.786003828048706, 0.6606292724609375], [0.7859338521957397, 0.6578224897384644], [0.785631537437439, 0.654788613319397], [0.7855373620986938, 0.6519702672958374], [0.785207986831665, 0.6489317417144775], [0.7850915193557739, 0.6461114883422852], [0.7847462296485901, 0.6430948972702026], [0.7846015095710754, 0.6402807235717773], [0.7842283844947815, 0.6372824907302856], [0.7840571999549866, 0.6344807147979736], [0.7836604118347168, 0.6315074563026428], [0.7834615707397461, 0.6287167072296143], [0.7830417156219482, 0.6257699728012085], [0.7828195691108704, 0.6230020523071289], [0.7823814749717712, 0.6200925707817078], [0.782136082649231, 0.617345929145813], [0.7816786766052246, 0.6144741773605347], [0.7814137935638428, 0.6117510795593262], [0.7809401154518127, 0.6089099049568176], [0.7806527614593506, 0.6062048673629761], [0.7801628708839417, 0.6033918261528015], [0.7798581123352051, 0.600698709487915], [0.7793545722961426, 0.5979047417640686], [0.7790327072143555, 0.5952339768409729], [0.7785195112228394, 0.5924630165100098], [0.778184175491333, 0.589818000793457], [0.7776613235473633, 0.5870806574821472], [0.7773148417472839, 0.5844687223434448], [0.7767870426177979, 0.5817739963531494], [0.7764328718185425, 0.5792025327682495], [0.7759019136428833, 0.5765592455863953], [0.7755431532859802, 0.5740216970443726], [0.7750107049942017, 0.5714244246482849], [0.7746404409408569, 0.5689154863357544], [0.774100661277771, 0.5663529634475708], [0.7737163305282593, 0.5638562440872192], [0.7731626629829407, 0.5613163113594055], [0.7727558612823486, 0.5588278770446777], [0.7721869945526123, 0.5563043355941772], [0.771757960319519, 0.5538181662559509], [0.7711700201034546, 0.5513105392456055], [0.7707149982452393, 0.5488243103027344], [0.7701038718223572, 0.546328067779541], [0.769620418548584, 0.5438438653945923], [0.7689864635467529, 0.5413658618927002], [0.7684688568115234, 0.5388789176940918], [0.7678042054176331, 0.5364111661911011], [0.7672553658485413, 0.5339301824569702], [0.766559898853302, 0.531474232673645], [0.7659708261489868, 0.5289894938468933], [0.7652367353439331, 0.5265407562255859], [0.764602541923523, 0.5240600109100342], [0.7638269662857056, 0.521619439125061], [0.7631420493125916, 0.5191366076469421], [0.7623192071914673, 0.5167036056518555], [0.7615793943405151, 0.5142266750335693], [0.7607026100158691, 0.5118029713630676], [0.759895384311676, 0.5093273520469666], [0.7589560747146606, 0.5069139003753662], [0.7580796480178833, 0.504443347454071], [0.757070779800415, 0.502042293548584], [0.7561142444610596, 0.4995788633823395], [0.7550255060195923, 0.4971870183944702], [0.7539842128753662, 0.4947417974472046], [0.7528083324432373, 0.4923752546310425], [0.7516169548034668, 0.48981600999832153], [0.7502833604812622, 0.48734328150749207], [0.7489709258079529, 0.48478594422340393], [0.7475101947784424, 0.4823088049888611], [0.7658735513687134, 0.8395204544067383], [0.7676849961280823, 0.8362635374069214], [0.7695127725601196, 0.8333802223205566], [0.7711423635482788, 0.830103874206543], [0.7727862596511841, 0.8272121548652649], [0.7742319107055664, 0.823933482170105], [0.7757044434547424, 0.8210397958755493], [0.7769860029220581, 0.8177566528320312], [0.7782999277114868, 0.8148674964904785], [0.7794239521026611, 0.8115819692611694], [0.7805951833724976, 0.8086884617805481], [0.7815791964530945, 0.8053889274597168], [0.7826224565505981, 0.8024922013282776], [0.7834748029708862, 0.7991839647293091], [0.7844057083129883, 0.7962768077850342], [0.7851390838623047, 0.7929728031158447], [0.7859633564949036, 0.7900704145431519], [0.7865835428237915, 0.7867816686630249], [0.7873139381408691, 0.7838830947875977], [0.7878347635269165, 0.7806078195571899], [0.7884739637374878, 0.7777293920516968], [0.7888984680175781, 0.7744859457015991], [0.7894585132598877, 0.7716253995895386], [0.7897995114326477, 0.7684031128883362], [0.7902809977531433, 0.7655717134475708], [0.790538489818573, 0.7623875737190247], [0.7909497022628784, 0.7595759630203247], [0.7911326289176941, 0.7564195394515991], [0.7914775609970093, 0.7536435127258301], [0.7915933132171631, 0.7505266070365906], [0.7918823957443237, 0.7477712631225586], [0.7919418811798096, 0.7446752786636353], [0.7921816110610962, 0.7419487237930298], [0.7921900749206543, 0.7388856410980225], [0.792389988899231, 0.736170768737793], [0.7923586368560791, 0.7331205606460571], [0.7925227880477905, 0.7304232120513916], [0.7924545407295227, 0.7273944020271301], [0.7925892472267151, 0.7246960401535034], [0.7924911975860596, 0.7216646671295166], [0.7926026582717896, 0.7189697027206421], [0.7924805283546448, 0.7159422039985657], [0.7925716638565063, 0.7132304906845093], [0.7924271821975708, 0.7101866006851196], [0.7924990653991699, 0.7074666023254395], [0.7923351526260376, 0.7044166922569275], [0.792390763759613, 0.7016695737838745], [0.7922107577323914, 0.6986015439033508], [0.7922505140304565, 0.6958361864089966], [0.792054295539856, 0.6927582621574402], [0.7920774221420288, 0.6899622082710266], [0.7918660640716553, 0.6868659257888794], [0.7918729782104492, 0.6840540170669556], [0.7916407585144043, 0.6809481382369995], [0.791624903678894, 0.6781120300292969], [0.7913717031478882, 0.6750093102455139], [0.7913368344306946, 0.6721659898757935], [0.7910621166229248, 0.6690831184387207], [0.7910040616989136, 0.6662614345550537], [0.7906994819641113, 0.6632167100906372], [0.7906180620193481, 0.6604040861129761], [0.7902970314025879, 0.6573457717895508], [0.7901901602745056, 0.6545161008834839], [0.7898480296134949, 0.6514625549316406], [0.7897242307662964, 0.6486194133758545], [0.7893635034561157, 0.6455769538879395], [0.7892146706581116, 0.6427347660064697], [0.7888327836990356, 0.6397066116333008], [0.7886555194854736, 0.636866569519043], [0.7882444858551025, 0.6338651776313782], [0.7880372405052185, 0.631037712097168], [0.7876003980636597, 0.6280614733695984], [0.7873656153678894, 0.6252507567405701], [0.7869065999984741, 0.6223134994506836], [0.7866460084915161, 0.6195287108421326], [0.786166787147522, 0.616627037525177], [0.7858824729919434, 0.6138696670532227], [0.7853851318359375, 0.6110074520111084], [0.7850790023803711, 0.6082710027694702], [0.7845660448074341, 0.6054369807243347], [0.7842427492141724, 0.6027213335037231], [0.7837146520614624, 0.5999122858047485], [0.7833696603775024, 0.5972063541412354], [0.7828325629234314, 0.5944349765777588], [0.7824734449386597, 0.5917468667030334], [0.7819303870201111, 0.5890077352523804], [0.781561553478241, 0.5863436460494995], [0.781015932559967, 0.583652138710022], [0.7806398272514343, 0.5810202360153198], [0.7800943851470947, 0.5783793926239014], [0.7797120809555054, 0.5757861137390137], [0.7791603803634644, 0.5731903314590454], [0.7787705659866333, 0.5706350803375244], [0.7782151103019714, 0.5680735111236572], [0.7778128385543823, 0.5655422806739807], [0.7772436141967773, 0.5629980564117432], [0.7768224477767944, 0.5604777336120605], [0.7762376070022583, 0.5579482316970825], [0.7757946252822876, 0.5554349422454834], [0.7751922607421875, 0.552918553352356], [0.7747244834899902, 0.5504050850868225], [0.7741016745567322, 0.5479000806808472], [0.7736071348190308, 0.5453867316246033], [0.7729623913764954, 0.5428919792175293], [0.7724385857582092, 0.540381908416748], [0.7717654705047607, 0.5378909111022949], [0.7712141275405884, 0.535383939743042], [0.7705106735229492, 0.5329024195671082], [0.7699202299118042, 0.5303954482078552], [0.7691825032234192, 0.5279179215431213], [0.7685518264770508, 0.525406002998352], [0.7677755355834961, 0.5229349732398987], [0.7670974731445312, 0.5204203128814697], [0.7662761211395264, 0.5179513692855835], [0.765546441078186, 0.5154356956481934], [0.764671802520752, 0.5129712820053101], [0.7638782262802124, 0.5104532241821289], [0.7629461288452148, 0.5079928636550903], [0.7620846033096313, 0.505474865436554], [0.7610841393470764, 0.5030210018157959], [0.7601487040519714, 0.5005069971084595], [0.7590706944465637, 0.4980562627315521], [0.7580528259277344, 0.4955475926399231], [0.7568947076797485, 0.49310824275016785], [0.7557893395423889, 0.490612268447876], [0.7544693350791931, 0.4880663752555847], [0.7532014846801758, 0.48546016216278076], [0.7517673373222351, 0.48290902376174927], [0.7503718733787537, 0.48028936982154846], [0.7698763608932495, 0.84242844581604], [0.7718526721000671, 0.8394757509231567], [0.7736406326293945, 0.8360552787780762], [0.7754274010658264, 0.8330771923065186], [0.7770252227783203, 0.8296475410461426], [0.7786263823509216, 0.8266646862030029], [0.7800365686416626, 0.8232450485229492], [0.7814754247665405, 0.8202567100524902], [0.7827169895172119, 0.8168419599533081], [0.7839988470077515, 0.8138489723205566], [0.785079836845398, 0.8104392886161804], [0.786225438117981, 0.8074443340301514], [0.787166953086853, 0.8040337562561035], [0.7881894111633301, 0.8010340332984924], [0.7890001535415649, 0.7976253032684326], [0.7899085283279419, 0.7946375608444214], [0.7906050086021423, 0.7912395000457764], [0.7914043068885803, 0.7882633209228516], [0.7919875383377075, 0.7848856449127197], [0.7926957607269287, 0.7819302082061768], [0.7931807637214661, 0.7785699367523193], [0.7937931418418884, 0.7756450772285461], [0.7941794991493225, 0.7723188996315002], [0.7947125434875488, 0.769415020942688], [0.7950127720832825, 0.7661190629005432], [0.7954635620117188, 0.7632431983947754], [0.7956774234771729, 0.7599905729293823], [0.7960597276687622, 0.7571437358856201], [0.7962043285369873, 0.75393146276474], [0.7965186238288879, 0.7511184811592102], [0.7965930104255676, 0.747951328754425], [0.7968561053276062, 0.7451654076576233], [0.7968788146972656, 0.7420333027839661], [0.7970929741859436, 0.7392755150794983], [0.7970659732818604, 0.7361757755279541], [0.7972419261932373, 0.7334324717521667], [0.7971782684326172, 0.7303576469421387], [0.7973183393478394, 0.7276293039321899], [0.7972157597541809, 0.7245738506317139], [0.7973328232765198, 0.721848726272583], [0.7972066402435303, 0.7188023328781128], [0.7972965240478516, 0.7160687446594238], [0.797145664691925, 0.7130239009857178], [0.7972185611724854, 0.7102742195129395], [0.7970497012138367, 0.7072274684906006], [0.7971023321151733, 0.7044612169265747], [0.7969130277633667, 0.7014023065567017], [0.796951413154602, 0.6986154317855835], [0.7967506051063538, 0.6955448985099792], [0.7967731356620789, 0.692735493183136], [0.7965545058250427, 0.6896463632583618], [0.7965632677078247, 0.6868161559104919], [0.7963316440582275, 0.683713436126709], [0.7963235378265381, 0.6808562278747559], [0.7960680723190308, 0.6777286529541016], [0.7960186004638672, 0.6748431921005249], [0.795727014541626, 0.6717537641525269], [0.7956818342208862, 0.6689155101776123], [0.7953894734382629, 0.6658092737197876], [0.7953194379806519, 0.6629812717437744], [0.7949866056442261, 0.659900426864624], [0.7948721647262573, 0.6570524573326111], [0.7945089936256409, 0.6539748907089233], [0.7943670749664307, 0.6511276960372925], [0.7939882278442383, 0.6480658054351807], [0.7938432097434998, 0.6452228426933289], [0.7934600114822388, 0.6421527862548828], [0.7932819724082947, 0.6392872333526611], [0.7928603887557983, 0.636232316493988], [0.7926468849182129, 0.6333833932876587], [0.7921909689903259, 0.6303572654724121], [0.7919471859931946, 0.6275253891944885], [0.7914692163467407, 0.6245373487472534], [0.7911974787712097, 0.621726393699646], [0.7906941175460815, 0.6187781095504761], [0.7903956174850464, 0.6159945726394653], [0.7898750305175781, 0.6130926609039307], [0.7895538806915283, 0.6103317737579346], [0.7890146970748901, 0.6074673533439636], [0.788673460483551, 0.6047308444976807], [0.7881186008453369, 0.6018978953361511], [0.7877550721168518, 0.5991740822792053], [0.7871841788291931, 0.5963640809059143], [0.7868005633354187, 0.5936719179153442], [0.7862190008163452, 0.5908989310264587], [0.7858352661132812, 0.5882439613342285], [0.7852597236633301, 0.5854949951171875], [0.7848737239837646, 0.5828773975372314], [0.7842995524406433, 0.5801679491996765], [0.7839075326919556, 0.5775872468948364], [0.7833293676376343, 0.5749276876449585], [0.7829307317733765, 0.5723788738250732], [0.7823500037193298, 0.5697647929191589], [0.7819375991821289, 0.5672414302825928], [0.7813469171524048, 0.5646588206291199], [0.780917763710022, 0.5621374845504761], [0.7803113460540771, 0.5595759153366089], [0.7798610925674438, 0.5570578575134277], [0.7792403101921082, 0.5545132160186768], [0.778764009475708, 0.5519945025444031], [0.7781226634979248, 0.5494608283042908], [0.7776221632957458, 0.5469433069229126], [0.7769614458084106, 0.5444179773330688], [0.7764355540275574, 0.5418999195098877], [0.7757489681243896, 0.5393812656402588], [0.7751938104629517, 0.5368621349334717], [0.7744808197021484, 0.5343523621559143], [0.7738927602767944, 0.531826913356781], [0.7731491327285767, 0.5293210744857788], [0.7725216150283813, 0.5267916321754456], [0.7717422246932983, 0.5242880582809448], [0.7710703611373901, 0.5217498540878296], [0.7702504396438599, 0.5192493200302124], [0.7695286870002747, 0.5167020559310913], [0.7686593532562256, 0.5142029523849487], [0.7678791880607605, 0.5116448402404785], [0.7669507265090942, 0.5091429948806763], [0.7661061882972717, 0.5065881013870239], [0.7651175260543823, 0.5040862560272217], [0.7642002105712891, 0.501521110534668], [0.7631382942199707, 0.4990265369415283], [0.7621411681175232, 0.49645864963531494], [0.7609972953796387, 0.4939645528793335], [0.7599120140075684, 0.4914032220840454], [0.7586743235588074, 0.4889206886291504], [0.7574392557144165, 0.4862399101257324], [0.756044864654541, 0.4836454391479492], [0.7546855211257935, 0.4809533357620239], [0.7531613111495972, 0.4783453345298767], [0.7741636037826538, 0.8459003567695618], [0.7760961055755615, 0.8423728346824646], [0.7780563831329346, 0.8392680883407593], [0.77978515625, 0.8357263803482056], [0.7815529108047485, 0.8326077461242676], [0.7830896973609924, 0.8290669322013855], [0.784666121006012, 0.8259549140930176], [0.7860207557678223, 0.8224163055419922], [0.7874300479888916, 0.8193070292472839], [0.7886179685592651, 0.815768837928772], [0.7898694276809692, 0.8126686215400696], [0.7908996939659119, 0.8091397285461426], [0.7920148968696594, 0.8060427904129028], [0.792907178401947, 0.8025190830230713], [0.7938976287841797, 0.7994298934936523], [0.794660210609436, 0.7959327697753906], [0.7955400943756104, 0.7928527593612671], [0.796188235282898, 0.7893732786178589], [0.7969620227813721, 0.7863197922706604], [0.7974971532821655, 0.7828742265701294], [0.7981765270233154, 0.7798380851745605], [0.7986143827438354, 0.7764201164245605], [0.7991999983787537, 0.7734192609786987], [0.79953932762146, 0.7700420618057251], [0.8000404834747314, 0.7670610547065735], [0.8002920150756836, 0.7637172937393188], [0.8007144927978516, 0.7607800960540771], [0.8008823394775391, 0.757490873336792], [0.8012325167655945, 0.7545841932296753], [0.8013321161270142, 0.7513375282287598], [0.801617443561554, 0.7484804391860962], [0.8016526699066162, 0.7452841997146606], [0.801886796951294, 0.7424582839012146], [0.8018692135810852, 0.7392953038215637], [0.8020561337471008, 0.7365058660507202], [0.8019937872886658, 0.7333818674087524], [0.8021445274353027, 0.730608344078064], [0.8020442724227905, 0.7275052070617676], [0.8021637797355652, 0.7247580885887146], [0.8020312190055847, 0.7216783761978149], [0.8021239638328552, 0.7189306616783142], [0.8019667863845825, 0.7158496379852295], [0.8020390272140503, 0.7131038904190063], [0.801859974861145, 0.7100256681442261], [0.8019117116928101, 0.7072653770446777], [0.801712155342102, 0.7041747570037842], [0.8017466068267822, 0.7014027833938599], [0.8015300631523132, 0.6983073353767395], [0.8015480041503906, 0.6955147981643677], [0.8013173341751099, 0.6924005746841431], [0.8013194799423218, 0.6895848512649536], [0.8010764122009277, 0.6864666938781738], [0.8010690212249756, 0.6836268901824951], [0.8008153438568115, 0.6804841160774231], [0.8007979393005371, 0.6776037812232971], [0.8005396127700806, 0.6744232773780823], [0.8004911541938782, 0.671544075012207], [0.8001703023910522, 0.6684591770172119], [0.8000952005386353, 0.6655882596969604], [0.7997558116912842, 0.6624674797058105], [0.7996426820755005, 0.6595936417579651], [0.7992717027664185, 0.6564863324165344], [0.799126148223877, 0.6536121368408203], [0.7987282872200012, 0.6505213379859924], [0.7985526919364929, 0.6476589441299438], [0.7981176972389221, 0.644599199295044], [0.7979130744934082, 0.6417273283004761], [0.7974674701690674, 0.6386353969573975], [0.7972389459609985, 0.6357437968254089], [0.7967684268951416, 0.6326906681060791], [0.7965129017829895, 0.6298123002052307], [0.7960204482078552, 0.6267977356910706], [0.7957372665405273, 0.6239392757415771], [0.7952170372009277, 0.620959997177124], [0.7949079275131226, 0.6181308031082153], [0.7943711280822754, 0.615196943283081], [0.7940380573272705, 0.6124002933502197], [0.7934820652008057, 0.6095056533813477], [0.7931293249130249, 0.6067363023757935], [0.7925565838813782, 0.6038786172866821], [0.7921843528747559, 0.6011344194412231], [0.7916041612625122, 0.598305344581604], [0.7912207841873169, 0.5955747961997986], [0.7906383872032166, 0.5927777886390686], [0.7902421355247498, 0.5900781154632568], [0.7896509766578674, 0.5873415470123291], [0.7892429232597351, 0.5846678018569946], [0.7886466979980469, 0.5819669365882874], [0.7882269620895386, 0.5793260335922241], [0.7876288890838623, 0.5766690373420715], [0.7872040271759033, 0.5740694999694824], [0.7866009473800659, 0.5714566111564636], [0.7861672639846802, 0.5688903331756592], [0.785556972026825, 0.5663046836853027], [0.7851033210754395, 0.5637558102607727], [0.7844773530960083, 0.5611903667449951], [0.7840089201927185, 0.5586502552032471], [0.7833700180053711, 0.5560949444770813], [0.7828786969184875, 0.5535570979118347], [0.7822225093841553, 0.5510126352310181], [0.781710147857666, 0.5484752058982849], [0.7810342311859131, 0.5459409952163696], [0.7804956436157227, 0.5434005260467529], [0.7797971367835999, 0.5408748388290405], [0.7792336940765381, 0.5383313894271851], [0.7785134315490723, 0.5358080863952637], [0.7779182195663452, 0.5332624316215515], [0.7771687507629395, 0.5307417511940002], [0.776542067527771, 0.528186559677124], [0.775760293006897, 0.5256643295288086], [0.7750905752182007, 0.523101270198822], [0.7742698788642883, 0.520576000213623], [0.7735558152198792, 0.5180047750473022], [0.7726885676383972, 0.5154734253883362], [0.7719190716743469, 0.5128870010375977], [0.7709962129592896, 0.5103521347045898], [0.7701675295829773, 0.5077527761459351], [0.7691863179206848, 0.5052176713943481], [0.7682900428771973, 0.5026167631149292], [0.7672368884086609, 0.5000665783882141], [0.7662655115127563, 0.49746042490005493], [0.7651345729827881, 0.49491119384765625], [0.7640792727470398, 0.4922962188720703], [0.7628625631332397, 0.489747017621994], [0.7617158889770508, 0.4871333837509155], [0.7603406310081482, 0.4844733774662018], [0.7590314149856567, 0.481740266084671], [0.7575358152389526, 0.479061484336853], [0.7561076283454895, 0.4763120412826538], [0.7783841490745544, 0.8489307165145874], [0.7805057168006897, 0.8457463979721069], [0.782405436038971, 0.8420460820198059], [0.7843185663223267, 0.8388398885726929], [0.7860082387924194, 0.8351387977600098], [0.7877278923988342, 0.8319261074066162], [0.7892192602157593, 0.8282352089881897], [0.7907532453536987, 0.8250210285186768], [0.7920548915863037, 0.821344256401062], [0.7934266328811646, 0.8181321620941162], [0.7945630550384521, 0.8144732713699341], [0.7957797050476074, 0.8112713098526001], [0.7967591285705566, 0.8076339364051819], [0.7978440523147583, 0.8044429421424866], [0.7986887693405151, 0.8008219599723816], [0.7996452450752258, 0.797652006149292], [0.8003613352775574, 0.7940614223480225], [0.8012109994888306, 0.7909132242202759], [0.8018133640289307, 0.787347674369812], [0.802554190158844, 0.7842285633087158], [0.8030416369438171, 0.7807005643844604], [0.8036875128746033, 0.7776052951812744], [0.8040754199028015, 0.7741152048110962], [0.8046250343322754, 0.7710517644882202], [0.8049128651618958, 0.7676031589508057], [0.8053805828094482, 0.7645769715309143], [0.80558180809021, 0.7611726522445679], [0.8059628009796143, 0.7581893801689148], [0.8060805201530457, 0.7548478841781616], [0.8063954710960388, 0.7519108057022095], [0.8064464330673218, 0.7486226558685303], [0.8066954612731934, 0.7457299828529358], [0.8066803812980652, 0.7424986958503723], [0.8068827390670776, 0.7396447658538818], [0.8068220615386963, 0.736460268497467], [0.8069761991500854, 0.7336361408233643], [0.8068702220916748, 0.7304905652999878], [0.8069953918457031, 0.727694571018219], [0.8068587779998779, 0.7245820760726929], [0.8069499731063843, 0.7217986583709717], [0.80678391456604, 0.7187076807022095], [0.8068541288375854, 0.7159249186515808], [0.8066682815551758, 0.7128420472145081], [0.8067165613174438, 0.7100527286529541], [0.8065052032470703, 0.7069659233093262], [0.8065345883369446, 0.7041654586791992], [0.8063079118728638, 0.7010742425918579], [0.8063168525695801, 0.6982580423355103], [0.8060703277587891, 0.6951608657836914], [0.8060673475265503, 0.6923273801803589], [0.8058123588562012, 0.6892139911651611], [0.8057971000671387, 0.6863689422607422], [0.8055329322814941, 0.6832467913627625], [0.8055146932601929, 0.6803759336471558], [0.8052640557289124, 0.6772413849830627], [0.805248498916626, 0.6743018627166748], [0.8049529194831848, 0.6710717678070068], [0.8048862218856812, 0.6682166457176208], [0.8045288324356079, 0.6650623083114624], [0.8044137954711914, 0.6621682643890381], [0.8040363788604736, 0.659022331237793], [0.8038939237594604, 0.6561283469200134], [0.8034878373146057, 0.6529936790466309], [0.8033084869384766, 0.6500965356826782], [0.8028521537780762, 0.646967887878418], [0.8026236295700073, 0.6440962553024292], [0.8021383285522461, 0.6410185098648071], [0.8018796443939209, 0.6381101608276367], [0.8013817071914673, 0.6350246667861938], [0.8011124730110168, 0.6321251392364502], [0.8006002902984619, 0.6290664672851562], [0.8003069758415222, 0.6261839270591736], [0.7997691631317139, 0.6231557130813599], [0.7994510531425476, 0.6202982068061829], [0.7988953590393066, 0.6173142194747925], [0.798552930355072, 0.6144887208938599], [0.7979782223701477, 0.611552357673645], [0.7976140975952148, 0.608757495880127], [0.7970216274261475, 0.6058639287948608], [0.7966406941413879, 0.60309898853302], [0.7960416078567505, 0.6002447605133057], [0.7956497669219971, 0.5975061655044556], [0.7950523495674133, 0.5946860313415527], [0.7946556210517883, 0.5919630527496338], [0.7940521240234375, 0.5891653299331665], [0.7936396598815918, 0.5864843130111694], [0.7930145263671875, 0.5837286710739136], [0.7925831079483032, 0.5810915231704712], [0.7919542789459229, 0.5783766508102417], [0.7915157079696655, 0.5757788419723511], [0.7908813953399658, 0.5731121301651001], [0.7904326915740967, 0.5705451369285583], [0.7897914052009583, 0.5679227113723755], [0.7893244028091431, 0.5653712749481201], [0.7886755466461182, 0.5627741813659668], [0.788193941116333, 0.5602337121963501], [0.7875320315361023, 0.5576584339141846], [0.7870270013809204, 0.5551127195358276], [0.7863481044769287, 0.552548885345459], [0.7858235836029053, 0.5500057935714722], [0.7851295471191406, 0.5474522709846497], [0.7845820188522339, 0.5449062585830688], [0.7838690280914307, 0.5423574447631836], [0.7832989692687988, 0.5398085117340088], [0.7825639247894287, 0.5372663140296936], [0.7819676399230957, 0.5347107648849487], [0.7812084555625916, 0.5321689248085022], [0.7805807590484619, 0.5296052098274231], [0.779793381690979, 0.5270626544952393], [0.7791280746459961, 0.5244824886322021], [0.778303325176239, 0.5219334363937378], [0.7775952816009521, 0.5193434953689575], [0.7767300605773926, 0.5167897939682007], [0.7759721279144287, 0.5141787528991699], [0.7750553488731384, 0.511611819267273], [0.7742424011230469, 0.5089878439903259], [0.773269772529602, 0.5064136981964111], [0.7723915576934814, 0.5037721395492554], [0.7713525295257568, 0.5011958479881287], [0.7704025506973267, 0.49854186177253723], [0.769292950630188, 0.4959515333175659], [0.7682627439498901, 0.4932851493358612], [0.7670692205429077, 0.49069076776504517], [0.7659456133842468, 0.48801177740097046], [0.7646580934524536, 0.48542091250419617], [0.7633817195892334, 0.48261627554893494], [0.7619302272796631, 0.47990918159484863], [0.7605336904525757, 0.47708404064178467], [0.7589602470397949, 0.4743630886077881], [0.782922625541687, 0.8525691628456116], [0.7849714159965515, 0.8487541675567627], [0.7870715856552124, 0.845412015914917], [0.7889059782028198, 0.8415849208831787], [0.7907913327217102, 0.8382315635681152], [0.7924115061759949, 0.8344124555587769], [0.7941009402275085, 0.8310546875], [0.795523464679718, 0.8272453546524048], [0.797024130821228, 0.823906421661377], [0.7982641458511353, 0.8201155066490173], [0.7995981574058533, 0.8167862296104431], [0.8006715774536133, 0.8130175471305847], [0.8018519282341003, 0.8097133636474609], [0.8027702569961548, 0.8059736490249634], [0.8038167953491211, 0.8026852607727051], [0.804601788520813, 0.7989775538444519], [0.8055230379104614, 0.7957234382629395], [0.8061819076538086, 0.792053759098053], [0.8069949746131897, 0.7888228893280029], [0.8075414896011353, 0.7851810455322266], [0.8082457184791565, 0.7819852232933044], [0.8086777329444885, 0.7783883810043335], [0.8092824816703796, 0.7752184867858887], [0.80961012840271, 0.7716610431671143], [0.8101202249526978, 0.7685354948043823], [0.8103505373001099, 0.7650334239006042], [0.8107752799987793, 0.7619367837905884], [0.8109189867973328, 0.75849449634552], [0.8112643957138062, 0.7554693222045898], [0.811330258846283, 0.7520914077758789], [0.8116047382354736, 0.749108076095581], [0.8116032481193542, 0.7457886934280396], [0.8118202686309814, 0.7428682446479797], [0.8117600083351135, 0.7396136522293091], [0.811927318572998, 0.7367292642593384], [0.8118209838867188, 0.7335156202316284], [0.8119461536407471, 0.730678915977478], [0.8118008375167847, 0.7275133728981018], [0.8118933439254761, 0.7246980667114258], [0.8117150068283081, 0.7215561866760254], [0.8117817044258118, 0.7187718152999878], [0.8115807771682739, 0.7156459093093872], [0.8116234540939331, 0.7128585577011108], [0.8114010095596313, 0.7097306847572327], [0.8114244341850281, 0.7069361209869385], [0.8111810088157654, 0.7038134932518005], [0.8111839294433594, 0.7010052800178528], [0.8109217286109924, 0.6978755593299866], [0.8109089136123657, 0.6950592994689941], [0.8106307983398438, 0.6919246912002563], [0.8106011152267456, 0.6890875101089478], [0.8103083372116089, 0.6859475374221802], [0.81026291847229, 0.6831017732620239], [0.8099578619003296, 0.6799525022506714], [0.8098947405815125, 0.6770963668823242], [0.8095729351043701, 0.6738827228546143], [0.8095188140869141, 0.6708979606628418], [0.8091771602630615, 0.6677000522613525], [0.8090699911117554, 0.6647695302963257], [0.8086887001991272, 0.6616100072860718], [0.8085470199584961, 0.6586781144142151], [0.8081380128860474, 0.6555250883102417], [0.8079639673233032, 0.6525883674621582], [0.8075220584869385, 0.6494399309158325], [0.8073176145553589, 0.6464800834655762], [0.8068417310714722, 0.6433641314506531], [0.8065875768661499, 0.6404509544372559], [0.8060837984085083, 0.6373542547225952], [0.8058086037635803, 0.6344301700592041], [0.8052805662155151, 0.6313437223434448], [0.8049777746200562, 0.6284282207489014], [0.8044250011444092, 0.625367283821106], [0.8040974140167236, 0.622467041015625], [0.8035222887992859, 0.6194461584091187], [0.8031651973724365, 0.6165781021118164], [0.8025708198547363, 0.6136062145233154], [0.8021938800811768, 0.6107747554779053], [0.8015842437744141, 0.6078477501869202], [0.8011894226074219, 0.6050496697425842], [0.8005682826042175, 0.6021706461906433], [0.8001571893692017, 0.5994062423706055], [0.7995283603668213, 0.5965688824653625], [0.7991054058074951, 0.5938394665718079], [0.7984740138053894, 0.5910330414772034], [0.7980417013168335, 0.5883029699325562], [0.7973968982696533, 0.5855239033699036], [0.7969487905502319, 0.5828322172164917], [0.7963005304336548, 0.5801142454147339], [0.7958362102508545, 0.5774592161178589], [0.7951831221580505, 0.5747911930084229], [0.7947073578834534, 0.5721719264984131], [0.7940455079078674, 0.5695475339889526], [0.7935560941696167, 0.5669586062431335], [0.7928834557533264, 0.5643624067306519], [0.7923815250396729, 0.5617969036102295], [0.7916995882987976, 0.5592144727706909], [0.7911787033081055, 0.5566543340682983], [0.7904813289642334, 0.5540832877159119], [0.7899426221847534, 0.5515222549438477], [0.7892327308654785, 0.5489647388458252], [0.7886744737625122, 0.5464025735855103], [0.7879467010498047, 0.5438482165336609], [0.7873677015304565, 0.5412855744361877], [0.7866242527961731, 0.538733184337616], [0.786021888256073, 0.5361620187759399], [0.7852541208267212, 0.5336135625839233], [0.7846225500106812, 0.531033456325531], [0.7838287353515625, 0.5284788608551025], [0.7831664085388184, 0.5258888006210327], [0.7823410034179688, 0.5233243703842163], [0.7816391587257385, 0.5207129716873169], [0.7807759046554565, 0.5181378126144409], [0.7800297737121582, 0.5155150890350342], [0.7791191339492798, 0.5129249095916748], [0.7783216238021851, 0.5102757811546326], [0.7773595452308655, 0.5076730847358704], [0.7765015363693237, 0.5050041675567627], [0.7754769921302795, 0.502386212348938], [0.7745575904846191, 0.49970296025276184], [0.7734633684158325, 0.4970703423023224], [0.7724626064300537, 0.4943693280220032], [0.7712880373001099, 0.49172189831733704], [0.7701992988586426, 0.48900318145751953], [0.7689316868782043, 0.48634645342826843], [0.7677527070045471, 0.4836185574531555], [0.7663205862045288, 0.4808388948440552], [0.7649756073951721, 0.4779874086380005], [0.7634260058403015, 0.47519469261169434], [0.7619633674621582, 0.4723208546638489], [0.7873807549476624, 0.8557150363922119], [0.7896436452865601, 0.8522934913635254], [0.7916537523269653, 0.8483004570007324], [0.7937021255493164, 0.8448495864868164], [0.7954903244972229, 0.8408530950546265], [0.7973220944404602, 0.8373973369598389], [0.7988928556442261, 0.8334146738052368], [0.8005280494689941, 0.8299638032913208], [0.8018957376480103, 0.8260142803192139], [0.8033490180969238, 0.8225694894790649], [0.8045288920402527, 0.8186517953872681], [0.8058174848556519, 0.8152319192886353], [0.8068304657936096, 0.8113480806350708], [0.8079661130905151, 0.8079491853713989], [0.8088244199752808, 0.8041121959686279], [0.8098294734954834, 0.8007447719573975], [0.8105534911155701, 0.7969507575035095], [0.8114323616027832, 0.7936158776283264], [0.8120301961898804, 0.7898668050765991], [0.8128043413162231, 0.7865562438964844], [0.8132891058921814, 0.782842755317688], [0.8139504790306091, 0.7795656323432922], [0.8143189549446106, 0.7758962512016296], [0.8148823380470276, 0.7726616859436035], [0.8151474595069885, 0.7690443992614746], [0.815609872341156, 0.7658510208129883], [0.8157776594161987, 0.7622935771942139], [0.8161568641662598, 0.7591612339019775], [0.816240668296814, 0.7556819319725037], [0.8165396451950073, 0.7526078224182129], [0.8165475130081177, 0.7491961717605591], [0.8167837858200073, 0.7461848258972168], [0.8167267441749573, 0.742843747138977], [0.8169018626213074, 0.7398887872695923], [0.8167877197265625, 0.7366119623184204], [0.8169156312942505, 0.7337002754211426], [0.8167597055435181, 0.7304843664169312], [0.816845715045929, 0.7276135087013245], [0.8166501522064209, 0.7244411706924438], [0.8167131543159485, 0.7216098308563232], [0.8165009021759033, 0.7184727787971497], [0.8165381550788879, 0.715644896030426], [0.816300630569458, 0.7125180959701538], [0.8163163065910339, 0.7096859216690063], [0.8160578608512878, 0.7065634727478027], [0.8160514831542969, 0.7037240266799927], [0.8157733678817749, 0.7005990743637085], [0.8157508373260498, 0.6977548599243164], [0.8154556751251221, 0.6946282386779785], [0.8154137134552002, 0.6917687654495239], [0.815101683139801, 0.6886336803436279], [0.8150430917739868, 0.685761570930481], [0.8147145509719849, 0.682624101638794], [0.814634382724762, 0.6797369122505188], [0.8142750263214111, 0.676578164100647], [0.8141622543334961, 0.6736428737640381], [0.8137810230255127, 0.670416533946991], [0.8136849403381348, 0.6674346923828125], [0.8133155703544617, 0.6642341613769531], [0.8131886720657349, 0.661273717880249], [0.8127832412719727, 0.6580891013145447], [0.8126180171966553, 0.6551262140274048], [0.8121743202209473, 0.6519429683685303], [0.8119708299636841, 0.6489765644073486], [0.811496376991272, 0.6458020806312561], [0.8112714886665344, 0.6428366899490356], [0.8107800483703613, 0.6397019028663635], [0.8105096817016602, 0.6367613077163696], [0.809972882270813, 0.6336365342140198], [0.8096675872802734, 0.6306979656219482], [0.8091018199920654, 0.6275931000709534], [0.808765709400177, 0.6246688365936279], [0.8081728219985962, 0.6215991973876953], [0.8078100681304932, 0.6186978816986084], [0.8071961998939514, 0.6156703233718872], [0.806809663772583, 0.6128139495849609], [0.8061795830726624, 0.6098408102989197], [0.80577152967453, 0.6070146560668945], [0.8051283955574036, 0.604093074798584], [0.8047024011611938, 0.6013056635856628], [0.8040504455566406, 0.5984336137771606], [0.8036115169525146, 0.5956777334213257], [0.8029502034187317, 0.5928528308868408], [0.8024896383285522, 0.5901234149932861], [0.8018099069595337, 0.5873143672943115], [0.8013451099395752, 0.5846047401428223], [0.800668478012085, 0.5818339586257935], [0.8001922369003296, 0.5791667699813843], [0.7995051145553589, 0.5764400362968445], [0.7990171313285828, 0.573823869228363], [0.7983230352401733, 0.5711402893066406], [0.7978218197822571, 0.5685557126998901], [0.7971204519271851, 0.5659141540527344], [0.796605110168457, 0.5633512735366821], [0.795896053314209, 0.5607436895370483], [0.7953600883483887, 0.5581845045089722], [0.7946391105651855, 0.5555915236473083], [0.7940883040428162, 0.5530358552932739], [0.7933555245399475, 0.5504531264305115], [0.7927889227867126, 0.5478980541229248], [0.7920436859130859, 0.5453233122825623], [0.7914584875106812, 0.5427621006965637], [0.7906998991966248, 0.5401936769485474], [0.7900919914245605, 0.5376230478286743], [0.7893133163452148, 0.5350531339645386], [0.7886820435523987, 0.5324724912643433], [0.7878804206848145, 0.5299012660980225], [0.787221372127533, 0.5273063778877258], [0.7863917350769043, 0.5247265100479126], [0.7856956720352173, 0.522112250328064], [0.7848316431045532, 0.5195188522338867], [0.7840973138809204, 0.516879677772522], [0.7831955552101135, 0.5142737627029419], [0.78240966796875, 0.5116068124771118], [0.781457245349884, 0.5089842081069946], [0.7806187868118286, 0.5062921047210693], [0.7796134948730469, 0.5036495327949524], [0.7787166833877563, 0.5009298324584961], [0.7776461243629456, 0.4982745945453644], [0.7766692638397217, 0.49552276730537415], [0.7755206823348999, 0.4928514063358307], [0.774459958076477, 0.4900803565979004], [0.7732240557670593, 0.48738789558410645], [0.7720739841461182, 0.48459744453430176], [0.7707427740097046, 0.4818975627422333], [0.7694236040115356, 0.4789579510688782], [0.7679183483123779, 0.47614574432373047], [0.7664803266525269, 0.4731985330581665], [0.764854907989502, 0.4703752100467682], [0.2148609161376953, 0.8596943616867065], [0.2193925380706787, 0.8558772802352905], [0.22372746467590332, 0.852615237236023], [0.22800827026367188, 0.8489820957183838], [0.2321164608001709, 0.845841109752655], [0.23615145683288574, 0.8423913717269897], [0.24005341529846191, 0.8393763303756714], [0.2438698410987854, 0.8361071348190308], [0.24758291244506836, 0.8332139849662781], [0.2511981129646301, 0.8301181793212891], [0.2547433376312256, 0.8273442387580872], [0.2581758499145508, 0.8244278430938721], [0.2615770101547241, 0.821776807308197], [0.26484215259552, 0.8190493583679199], [0.26811206340789795, 0.8165323734283447], [0.2712162733078003, 0.8140164613723755], [0.27435702085494995, 0.8116517066955566], [0.27729129791259766, 0.8093796372413635], [0.2803184390068054, 0.8071956634521484], [0.2830389738082886, 0.8052566051483154], [0.28596532344818115, 0.803339958190918], [0.2882981300354004, 0.8019066452980042], [0.2909775972366333, 0.8005291223526001], [0.29250073432922363, 0.7997169494628906], [0.21734952926635742, 0.8630684614181519], [0.2216615080833435, 0.8596657514572144], [0.22605490684509277, 0.8557555079460144], [0.23014116287231445, 0.8524894118309021], [0.23429888486862183, 0.8487684726715088], [0.23815923929214478, 0.845637321472168], [0.24208605289459229, 0.8421059846878052], [0.24574792385101318, 0.8391054272651672], [0.24947035312652588, 0.835753321647644], [0.2529482841491699, 0.832883894443512], [0.25648462772369385, 0.8297056555747986], [0.2597886919975281, 0.8269620537757874], [0.26315760612487793, 0.8239623308181763], [0.26630914211273193, 0.8213573694229126], [0.2695273756980896, 0.8185491561889648], [0.2725255489349365, 0.8161056041717529], [0.27560609579086304, 0.8135047554969788], [0.2784329056739807, 0.8112447261810303], [0.281380295753479, 0.8088982105255127], [0.2839955687522888, 0.8068900108337402], [0.28678786754608154, 0.804879903793335], [0.2890430688858032, 0.8032363653182983], [0.2914304733276367, 0.8017852306365967], [0.29302310943603516, 0.8003250956535339], [0.21981358528137207, 0.8670086860656738], [0.22419941425323486, 0.8629214763641357], [0.22838330268859863, 0.8594162464141846], [0.23252153396606445, 0.8555290102958679], [0.23648977279663086, 0.8521533608436584], [0.24038541316986084, 0.8484745621681213], [0.24414288997650146, 0.8452380299568176], [0.24782323837280273, 0.8417472839355469], [0.2513895034790039, 0.8386437892913818], [0.2548694610595703, 0.8353432416915894], [0.25826776027679443, 0.8323601484298706], [0.2615617513656616, 0.8292503356933594], [0.26479947566986084, 0.8263989090919495], [0.2679232358932495, 0.8234843015670776], [0.271018385887146, 0.8207745552062988], [0.27397632598876953, 0.8180768489837646], [0.27693378925323486, 0.8155069947242737], [0.2797127962112427, 0.8130639791488647], [0.28252196311950684, 0.8106869459152222], [0.2850976586341858, 0.808563768863678], [0.2877103090286255, 0.8064252138137817], [0.2899574041366577, 0.8047494888305664], [0.2920825481414795, 0.8028691411018372], [0.2938937544822693, 0.8013700246810913], [0.22251468896865845, 0.8703094720840454], [0.22667944431304932, 0.8666670322418213], [0.23092329502105713, 0.8624958992004395], [0.2348589301109314, 0.8589993715286255], [0.23887377977371216, 0.8550353646278381], [0.24258702993392944, 0.851685643196106], [0.24636977910995483, 0.8479222059249878], [0.2498844861984253, 0.8447133302688599], [0.25346267223358154, 0.8411487340927124], [0.2567936182022095, 0.8380722999572754], [0.26018691062927246, 0.8346928358078003], [0.2633429169654846, 0.8317587375640869], [0.2665671110153198, 0.8285682201385498], [0.26955705881118774, 0.8257782459259033], [0.27262139320373535, 0.8227850794792175], [0.2754480838775635, 0.8201480507850647], [0.2783699631690979, 0.8173654079437256], [0.28101325035095215, 0.814919114112854], [0.2837730646133423, 0.8123937845230103], [0.28619998693466187, 0.8101696968078613], [0.288760244846344, 0.8079701662063599], [0.2908360958099365, 0.806026816368103], [0.2930143475532532, 0.8041088581085205], [0.29466068744659424, 0.8023420572280884], [0.22520774602890015, 0.8742044568061829], [0.22943371534347534, 0.8698575496673584], [0.2334681749343872, 0.8661108613014221], [0.237449049949646, 0.861987829208374], [0.24126845598220825, 0.8583851456642151], [0.24502015113830566, 0.8544732332229614], [0.24862313270568848, 0.8510257005691528], [0.2521620988845825, 0.8473237752914429], [0.2555752396583557, 0.8440134525299072], [0.258916437625885, 0.8405064344406128], [0.26215505599975586, 0.8373333215713501], [0.2653105854988098, 0.8340294361114502], [0.2683870792388916, 0.830990731716156], [0.271368145942688, 0.8278906345367432], [0.27428579330444336, 0.8249918222427368], [0.27709776163101196, 0.82210373878479], [0.27986061573028564, 0.8193584084510803], [0.28249621391296387, 0.8167270421981812], [0.28507936000823975, 0.8141717910766602], [0.28751933574676514, 0.8118283152580261], [0.2898576259613037, 0.8094978332519531], [0.292009174823761, 0.80745929479599], [0.29397261142730713, 0.8053534030914307], [0.29576849937438965, 0.8035087585449219], [0.22812455892562866, 0.8774241209030151], [0.2321317195892334, 0.873553454875946], [0.23622190952301025, 0.8691275715827942], [0.23999619483947754, 0.8654072284698486], [0.24385148286819458, 0.8611985445022583], [0.24741458892822266, 0.857643187046051], [0.2510513663291931, 0.853663980960846], [0.25441378355026245, 0.8502558469772339], [0.2578481435775757, 0.8464747667312622], [0.2610236406326294, 0.8432126045227051], [0.26427239179611206, 0.8396312594413757], [0.26726841926574707, 0.8365110158920288], [0.2703424096107483, 0.833128809928894], [0.27317148447036743, 0.8301577568054199], [0.27608227729797363, 0.8269717693328857], [0.2787383794784546, 0.824150562286377], [0.28148525953292847, 0.8211884498596191], [0.2839583158493042, 0.8185620903968811], [0.2865375280380249, 0.8158525824546814], [0.28879106044769287, 0.813433051109314], [0.2911476492881775, 0.8109897375106812], [0.2931077480316162, 0.8088165521621704], [0.29516464471817017, 0.8066259622573853], [0.2967836856842041, 0.8046382665634155], [0.23103082180023193, 0.8812494874000549], [0.2350931167602539, 0.8766577243804932], [0.23897385597229004, 0.8726808428764343], [0.24280011653900146, 0.8683173656463623], [0.24645471572875977, 0.8645036220550537], [0.2500506043434143, 0.8603748083114624], [0.253497838973999, 0.8567224740982056], [0.2568895220756531, 0.852807879447937], [0.2601475715637207, 0.8493028283119202], [0.263342022895813, 0.8455986976623535], [0.2664220333099365, 0.8422398567199707], [0.26943838596343994, 0.8387390375137329], [0.27234089374542236, 0.8355244398117065], [0.2751803398132324, 0.8322282433509827], [0.2779223322868347, 0.8291534185409546], [0.280592679977417, 0.8260735273361206], [0.2831576466560364, 0.8231699466705322], [0.2856454849243164, 0.8203362226486206], [0.28802430629730225, 0.8176242113113403], [0.29032665491104126, 0.8150478601455688], [0.29247093200683594, 0.8125571012496948], [0.2945190668106079, 0.8102568984031677], [0.2963714599609375, 0.8079818487167358], [0.298134446144104, 0.8058677911758423], [0.23414981365203857, 0.8843668699264526], [0.23799365758895874, 0.8802863955497742], [0.2419334053993225, 0.8756140470504761], [0.2455524206161499, 0.8716837167739868], [0.24924588203430176, 0.8672493100166321], [0.2526441216468811, 0.8634927272796631], [0.25612103939056396, 0.859287440776825], [0.2593247890472412, 0.8556925058364868], [0.262612521648407, 0.8517023921012878], [0.26562976837158203, 0.8482639789581299], [0.2687303423881531, 0.8444830179214478], [0.27157366275787354, 0.8411921262741089], [0.2744985818862915, 0.8376089334487915], [0.27715760469436646, 0.8344626426696777], [0.2799130082130432, 0.8310838341712952], [0.2824026942253113, 0.8280985355377197], [0.2849828600883484, 0.8249502182006836], [0.28728222846984863, 0.822152853012085], [0.2896808385848999, 0.819256067276001], [0.29178738594055176, 0.8166673183441162], [0.2939843535423279, 0.8140363097190857], [0.29583895206451416, 0.8116750121116638], [0.2977766990661621, 0.8092782497406006], [0.29937469959259033, 0.8071045875549316], [0.23727178573608398, 0.8881264925003052], [0.24116671085357666, 0.8833083510398865], [0.24489915370941162, 0.8791266679763794], [0.24858862161636353, 0.8745434880256653], [0.252088725566864, 0.8705555200576782], [0.2555558681488037, 0.8662147521972656], [0.2588447332382202, 0.8623969554901123], [0.2621058225631714, 0.8582812547683716], [0.2652076482772827, 0.8546142578125], [0.26827454566955566, 0.8507130146026611], [0.271192729473114, 0.8472050428390503], [0.27408522367477417, 0.8435039520263672], [0.2768288850784302, 0.8401329517364502], [0.27954280376434326, 0.8366402387619019], [0.2821052074432373, 0.8334217071533203], [0.2846450209617615, 0.8301429748535156], [0.2870246171951294, 0.8271054625511169], [0.2893860340118408, 0.824070930480957], [0.29156792163848877, 0.8212336897850037], [0.29374003410339355, 0.8184455037117004], [0.29571235179901123, 0.8158196210861206], [0.2976647615432739, 0.8132860064506531], [0.29939526319503784, 0.8108782768249512], [0.30111682415008545, 0.8085378408432007], [0.24059319496154785, 0.8911375999450684], [0.24426448345184326, 0.8868603706359863], [0.24804365634918213, 0.8819787502288818], [0.2515343427658081, 0.8778891563415527], [0.25510716438293457, 0.8732725381851196], [0.25838130712509155, 0.8693623542785645], [0.2617388963699341, 0.8649759292602539], [0.26480698585510254, 0.8612264394760132], [0.26796644926071167, 0.8570523858070374], [0.27084529399871826, 0.8534626960754395], [0.27381432056427, 0.8495068550109863], [0.27652454376220703, 0.846062958240509], [0.27932673692703247, 0.8423073291778564], [0.28185391426086426, 0.8390105962753296], [0.28447121381759644, 0.8354599475860596], [0.2868133783340454, 0.8323317766189575], [0.28924959897994995, 0.8290141820907593], [0.2914005517959595, 0.8260805606842041], [0.2936525344848633, 0.8230074048042297], [0.29560577869415283, 0.8202770948410034], [0.29765909910202026, 0.8174437880516052], [0.29938995838165283, 0.8149353265762329], [0.30122458934783936, 0.8123383522033691], [0.30274468660354614, 0.8100011944770813], [0.24389761686325073, 0.8947932720184326], [0.24762535095214844, 0.8897749185562134], [0.25119102001190186, 0.8854457139968872], [0.2547423243522644, 0.8806703090667725], [0.2581050395965576, 0.876518964767456], [0.2614448666572571, 0.8719924688339233], [0.264603853225708, 0.868010401725769], [0.26774001121520996, 0.8637173771858215], [0.27070850133895874, 0.8599027395248413], [0.27365952730178833, 0.8558274507522583], [0.2764362096786499, 0.8521688580513], [0.27920329570770264, 0.8482951521873474], [0.2818112373352051, 0.844778835773468], [0.28441011905670166, 0.8411085605621338], [0.2868322730064392, 0.8377524018287659], [0.2892445921897888, 0.8343019485473633], [0.29147207736968994, 0.8311327695846558], [0.2936975359916687, 0.8279212713241577], [0.29572951793670654, 0.8249558210372925], [0.2977718114852905, 0.8219826221466064], [0.2996019124984741, 0.8192158937454224], [0.30143094062805176, 0.816493809223175], [0.30305105447769165, 0.8139510750770569], [0.30470049381256104, 0.8114001154899597], [0.24738788604736328, 0.8976770043373108], [0.2508898377418518, 0.8932565450668335], [0.2545052766799927, 0.8882002830505371], [0.2578504681587219, 0.8839420676231384], [0.2612762451171875, 0.8791322708129883], [0.26441484689712524, 0.8750625252723694], [0.2676454782485962, 0.8704910278320312], [0.2705839276313782, 0.8665946125984192], [0.27361810207366943, 0.8622543811798096], [0.27637678384780884, 0.8585197925567627], [0.27922898530960083, 0.8543893694877625], [0.28180205821990967, 0.8508021831512451], [0.2844747304916382, 0.8468725681304932], [0.2868766784667969, 0.8434286117553711], [0.2893659472465515, 0.8397055864334106], [0.29157471656799316, 0.8364487886428833], [0.293881893157959, 0.8329626321792603], [0.29589593410491943, 0.8298925757408142], [0.29800164699554443, 0.8266510963439941], [0.2998238801956177, 0.8237801194190979], [0.30174577236175537, 0.8207707405090332], [0.30335891246795654, 0.8181307315826416], [0.30507487058639526, 0.8153674602508545], [0.3065260052680969, 0.8128684759140015], [0.2508745789527893, 0.9012503623962402], [0.25442034006118774, 0.89605712890625], [0.25781315565109253, 0.8915756940841675], [0.26121413707733154, 0.8866040706634521], [0.26442766189575195, 0.8823004364967346], [0.26763641834259033, 0.8775970935821533], [0.2706650495529175, 0.8734630346298218], [0.2736783027648926, 0.8689974546432495], [0.2765113115310669, 0.8650470972061157], [0.27934616804122925, 0.8607975244522095], [0.2819984555244446, 0.8569871783256531], [0.28464770317077637, 0.8529448509216309], [0.2871209383010864, 0.8492968082427979], [0.28960686922073364, 0.8454447984695435], [0.2918952703475952, 0.841951847076416], [0.2941799759864807, 0.8383381366729736], [0.29627543687820435, 0.8350346088409424], [0.29837870597839355, 0.8316463232040405], [0.30026674270629883, 0.8285553455352783], [0.3021811246871948, 0.8254006505012512], [0.30387943983078003, 0.8225168585777283], [0.3055996894836426, 0.8196240663528442], [0.30710136890411377, 0.8169633150100708], [0.3086669445037842, 0.8142198920249939], [0.25453734397888184, 0.903999388217926], [0.2578458786010742, 0.8994247913360596], [0.2612708806991577, 0.8941953182220459], [0.26446813344955444, 0.8897899389266968], [0.2677517533302307, 0.8848100900650024], [0.27075278759002686, 0.8805915713310242], [0.2738523483276367, 0.8758442997932434], [0.2766669988632202, 0.8718166351318359], [0.2795807123184204, 0.8673142790794373], [0.28221702575683594, 0.863426685333252], [0.28494590520858765, 0.8591160774230957], [0.2873927354812622, 0.8553899526596069], [0.28993773460388184, 0.851298451423645], [0.2922271490097046, 0.8477201461791992], [0.2946070432662964, 0.8438273072242737], [0.29668503999710083, 0.8404330015182495], [0.29885339736938477, 0.8367741107940674], [0.30074501037597656, 0.8335695266723633], [0.30272239446640015, 0.8301620483398438], [0.30442237854003906, 0.8271623253822327], [0.30621159076690674, 0.8240007162094116], [0.3077164888381958, 0.8212311267852783], [0.309317946434021, 0.8183202147483826], [0.3106845021247864, 0.815673828125], [0.2582096457481384, 0.9074636697769165], [0.26156580448150635, 0.9021034836769104], [0.26476943492889404, 0.8975009322166443], [0.2680038809776306, 0.8923530578613281], [0.27106308937072754, 0.8879044055938721], [0.2741347551345825, 0.883021354675293], [0.27702128887176514, 0.8787524700164795], [0.27990633249282837, 0.8741259574890137], [0.28261131048202515, 0.8700300455093384], [0.2853337526321411, 0.8656017780303955], [0.2878655791282654, 0.8616544008255005], [0.29040294885635376, 0.8574459552764893], [0.292746901512146, 0.8536728620529175], [0.2951163053512573, 0.8496535420417786], [0.2972908616065979, 0.8460195064544678], [0.2994678020477295, 0.8422279357910156], [0.3014295697212219, 0.8387978076934814], [0.30340927839279175, 0.8352371454238892], [0.3051692843437195, 0.8320148587226868], [0.30696070194244385, 0.8287030458450317], [0.30853569507598877, 0.8257011771202087], [0.3101440668106079, 0.8226592540740967], [0.3115438222885132, 0.8198994398117065], [0.313035249710083, 0.8169831037521362], [0.26203811168670654, 0.9100770950317383], [0.265167236328125, 0.9053895473480225], [0.26840728521347046, 0.9000064134597778], [0.2714332938194275, 0.8954563140869141], [0.27454710006713867, 0.8903021216392517], [0.2774035334587097, 0.8859436511993408], [0.28036177158355713, 0.8810358643531799], [0.28304094076156616, 0.8768678903579712], [0.28582167625427246, 0.8721901178359985], [0.28833919763565063, 0.8681646585464478], [0.2909556031227112, 0.8636921644210815], [0.2932940721511841, 0.8598397970199585], [0.29572534561157227, 0.855587363243103], [0.297892689704895, 0.8518713116645813], [0.3001541495323181, 0.8478075265884399], [0.3021320700645447, 0.8442790508270264], [0.30419111251831055, 0.8404500484466553], [0.30595850944519043, 0.8371081352233887], [0.30780524015426636, 0.8335292339324951], [0.30937671661376953, 0.8304141163825989], [0.31104612350463867, 0.8271045684814453], [0.3124430179595947, 0.824243426322937], [0.31394487619400024, 0.82120680809021], [0.3152461647987366, 0.8184438943862915], [0.2658497095108032, 0.9134274125099182], [0.2690271735191345, 0.9079265594482422], [0.2720586657524109, 0.9032053351402283], [0.27513474225997925, 0.8978912830352783], [0.27802950143814087, 0.8933165073394775], [0.28095048666000366, 0.8882738947868347], [0.2836930751800537, 0.8838762044906616], [0.28645020723342896, 0.8790816068649292], [0.2890247106552124, 0.8748531341552734], [0.29162347316741943, 0.8702574372291565], [0.2940319776535034, 0.8661813139915466], [0.2964550852775574, 0.8618065714836121], [0.29868191480636597, 0.8578970432281494], [0.30094122886657715, 0.8537100553512573], [0.3030000329017639, 0.8499535918235779], [0.305072546005249, 0.8459876775741577], [0.306926965713501, 0.8424052000045776], [0.30878835916519165, 0.838666558265686], [0.31042397022247314, 0.8353145718574524], [0.31208813190460205, 0.8318616151809692], [0.3135482668876648, 0.828769326210022], [0.31505608558654785, 0.8256120085716248], [0.3163630962371826, 0.8227736949920654], [0.31778043508529663, 0.8197217583656311], [0.26980626583099365, 0.9158927798271179], [0.27276134490966797, 0.9110945463180542], [0.2758268117904663, 0.9055750966072083], [0.2787052392959595, 0.900908887386322], [0.2816748023033142, 0.895600438117981], [0.2843811511993408, 0.8911315202713013], [0.28720080852508545, 0.8860619068145752], [0.28974586725234985, 0.8817649483680725], [0.292397677898407, 0.8769187927246094], [0.2947903871536255, 0.8727656006813049], [0.29728198051452637, 0.8681273460388184], [0.2994997501373291, 0.8641403913497925], [0.3018149733543396, 0.8597171902656555], [0.30386972427368164, 0.8558765649795532], [0.3060187101364136, 0.8516546487808228], [0.3078949451446533, 0.8479788303375244], [0.3098471164703369, 0.843956708908081], [0.31150615215301514, 0.8404743671417236], [0.31322747468948364, 0.8367209434509277], [0.314666748046875, 0.8335183262825012], [0.31622326374053955, 0.830109179019928], [0.3175314664840698, 0.8271822929382324], [0.31895023584365845, 0.8240380883216858], [0.32017868757247925, 0.8211847543716431], [0.273759126663208, 0.9191145896911621], [0.27674371004104614, 0.9134763479232788], [0.2796018123626709, 0.9086726903915405], [0.28253257274627686, 0.9032201170921326], [0.2852928638458252, 0.8985362648963928], [0.28808343410491943, 0.8933559656143188], [0.2906908392906189, 0.8888317346572876], [0.2933158874511719, 0.8838721513748169], [0.2957572937011719, 0.8795138597488403], [0.2982279062271118, 0.8747518062591553], [0.3005062937736511, 0.8705416321754456], [0.3028072118759155, 0.8659999370574951], [0.30491650104522705, 0.8619627952575684], [0.3070630431175232, 0.8576177358627319], [0.30900800228118896, 0.8537350296974182], [0.3109824061393738, 0.8495862483978271], [0.31274181604385376, 0.8458507061004639], [0.3144954442977905, 0.8419167995452881], [0.316001296043396, 0.8384019732475281], [0.31751251220703125, 0.8348637223243713], [0.31886041164398193, 0.8317381143569946], [0.3202893137931824, 0.8284941911697388], [0.3215276598930359, 0.8255873918533325], [0.32288241386413574, 0.8224175572395325], [0.27785325050354004, 0.9214061498641968], [0.2806016206741333, 0.9165062308311462], [0.2834553122520447, 0.9108904600143433], [0.28619617223739624, 0.9061571359634399], [0.28907233476638794, 0.900733470916748], [0.2916666865348816, 0.8961503505706787], [0.29435843229293823, 0.8909099698066711], [0.296769380569458, 0.886480450630188], [0.2992820143699646, 0.8814688920974731], [0.30154335498809814, 0.8771846294403076], [0.3039051294326782, 0.872377336025238], [0.30599796772003174, 0.8682581186294556], [0.3081836700439453, 0.8636784553527832], [0.3101254105567932, 0.8597092628479004], [0.312156617641449, 0.8553300499916077], [0.3139362335205078, 0.8515064716339111], [0.31579577922821045, 0.8473047614097595], [0.3173736333847046, 0.8436448574066162], [0.3189721703529358, 0.839661717414856], [0.3202331066131592, 0.8364521265029907], [0.32166415452957153, 0.8330318927764893], [0.3229132294654846, 0.8300578594207764], [0.3242757320404053, 0.826816737651825], [0.3254591226577759, 0.8239003419876099], [0.2821359634399414, 0.9246479272842407], [0.2849631905555725, 0.9188574552536011], [0.2875875234603882, 0.9140121936798096], [0.29029881954193115, 0.9084506034851074], [0.2928979992866516, 0.9036431312561035], [0.2955445647239685, 0.8982654809951782], [0.29800695180892944, 0.8935950398445129], [0.3004876971244812, 0.8884577751159668], [0.3027815818786621, 0.8839685320854187], [0.3051118850708008, 0.8790422677993774], [0.3072608709335327, 0.8747028112411499], [0.30943840742111206, 0.8699949383735657], [0.3114153742790222, 0.8658324480056763], [0.31343090534210205, 0.8613248467445374], [0.31525588035583496, 0.8573200702667236], [0.3171265721321106, 0.8529881834983826], [0.31879597902297974, 0.8491438031196594], [0.32049697637557983, 0.8449634313583374], [0.3218950629234314, 0.8411954045295715], [0.3232138752937317, 0.8377180695533752], [0.32444506883621216, 0.8346567153930664], [0.3258060812950134, 0.8313199281692505], [0.32698512077331543, 0.8283543586730957], [0.32828694581985474, 0.8251057863235474], [0.2865203022956848, 0.9269123673439026], [0.2891501188278198, 0.9219567775726318], [0.2918582558631897, 0.9161884784698486], [0.29432564973831177, 0.9113508462905884], [0.2969205379486084, 0.905712366104126], [0.29931968450546265, 0.9009650945663452], [0.30183178186416626, 0.8955349922180176], [0.30409038066864014, 0.8909653425216675], [0.30644744634628296, 0.8857910633087158], [0.3085660934448242, 0.8813790082931519], [0.31078284978866577, 0.8764142394065857], [0.31275320053100586, 0.8721604347229004], [0.3148082494735718, 0.8674167394638062], [0.316617488861084, 0.8633155822753906], [0.31850558519363403, 0.8587848544120789], [0.3201777935028076, 0.8548073172569275], [0.32190030813217163, 0.8504794239997864], [0.32343465089797974, 0.8465903997421265], [0.3250391483306885, 0.8423700332641602], [0.32615387439727783, 0.8393068313598633], [0.32743263244628906, 0.8359032869338989], [0.3285936117172241, 0.8328602313995361], [0.3298715353012085, 0.8295427560806274], [0.33099424839019775, 0.8265877962112427], [0.2908146381378174, 0.9299982786178589], [0.29347729682922363, 0.9240965843200684], [0.2959829568862915, 0.9190701246261597], [0.298517644405365, 0.9133649468421936], [0.30090731382369995, 0.9084445238113403], [0.30334651470184326, 0.9029078483581543], [0.3056381940841675, 0.8981038331985474], [0.3079562783241272, 0.8928022384643555], [0.310097336769104, 0.888190746307373], [0.3122780919075012, 0.883100152015686], [0.31427425146102905, 0.8786330223083496], [0.31630438566207886, 0.8737579584121704], [0.31815093755722046, 0.8694591522216797], [0.32003486156463623, 0.8647916316986084], [0.3217257857322693, 0.8606683611869812], [0.3234717845916748, 0.8561638593673706], [0.3249979019165039, 0.8521721363067627], [0.32659274339675903, 0.8478299379348755], [0.32803452014923096, 0.8439671993255615], [0.32933491468429565, 0.8405148983001709], [0.3304268717765808, 0.8374670147895813], [0.3316478729248047, 0.8340409994125366], [0.3327171206474304, 0.8310303688049316], [0.33390527963638306, 0.8277113437652588], [0.2952284812927246, 0.9320684671401978], [0.2976916432380676, 0.9269849061965942], [0.30024588108062744, 0.9210386276245117], [0.3025863766670227, 0.9160832166671753], [0.3050262928009033, 0.9103192090988159], [0.307256817817688, 0.9054679274559021], [0.3095937967300415, 0.8998912572860718], [0.31170475482940674, 0.8952088952064514], [0.31390833854675293, 0.8898719549179077], [0.3158750534057617, 0.8853223323822021], [0.3179219961166382, 0.8801867961883545], [0.31973862648010254, 0.8758066892623901], [0.3216480016708374, 0.8709084987640381], [0.3233373761177063, 0.8666869401931763], [0.32509660720825195, 0.8620012998580933], [0.32664817571640015, 0.8578975200653076], [0.3282662630081177, 0.8533766269683838], [0.32967859506607056, 0.8494546413421631], [0.33123189210891724, 0.8451910018920898], [0.33234214782714844, 0.8420682549476624], [0.3335537314414978, 0.8385714292526245], [0.33458447456359863, 0.8355004787445068], [0.3357234597206116, 0.832088828086853], [0.33670639991760254, 0.8290780186653137], [0.29959821701049805, 0.9349828958511353], [0.30209678411483765, 0.9289335608482361], [0.30445951223373413, 0.9237943887710571], [0.30685579776763916, 0.9179432392120361], [0.30910205841064453, 0.9129139184951782], [0.3113880157470703, 0.9072402715682983], [0.31352120637893677, 0.9023404121398926], [0.31568241119384766, 0.8968920707702637], [0.317679226398468, 0.8921471834182739], [0.3197091817855835, 0.8868812918663025], [0.3215523958206177, 0.8822835683822632], [0.3234269618988037, 0.877265214920044], [0.32513415813446045, 0.8728666305541992], [0.3268889784812927, 0.8680408000946045], [0.32845181226730347, 0.8637869358062744], [0.33006608486175537, 0.8591369390487671], [0.3315057158470154, 0.8550164699554443], [0.33299267292022705, 0.8506331443786621], [0.3343120813369751, 0.8467665910720825], [0.3355395793914795, 0.8431848287582397], [0.3365829586982727, 0.840032696723938], [0.3377125859260559, 0.8365305066108704], [0.33866703510284424, 0.833441436290741], [0.339712917804718, 0.830045759677887], [0.3040763735771179, 0.9368672370910645], [0.3063913583755493, 0.9316816329956055], [0.30879586935043335, 0.9256060719490051], [0.31100594997406006, 0.9205389618873596], [0.31330692768096924, 0.9146308302879333], [0.3153975009918213, 0.9096825122833252], [0.31758320331573486, 0.9039746522903442], [0.31954801082611084, 0.8991740942001343], [0.32159364223480225, 0.893668532371521], [0.32342445850372314, 0.8889919519424438], [0.32533693313598633, 0.8836979866027832], [0.32701021432876587, 0.8792188763618469], [0.32876336574554443, 0.8741860389709473], [0.3303215503692627, 0.869843065738678], [0.3319551944732666, 0.8649946451187134], [0.3333841562271118, 0.8607984185218811], [0.3349020481109619, 0.8561465740203857], [0.3361983299255371, 0.8521987199783325], [0.3376104235649109, 0.8478710055351257], [0.3386269211769104, 0.8446457386016846], [0.33974701166152954, 0.8410387635231018], [0.3407058119773865, 0.8378787636756897], [0.3417498469352722, 0.8343552947044373], [0.34262120723724365, 0.8313034772872925], [0.30853909254074097, 0.9396498799324036], [0.3108893036842346, 0.9334787726402283], [0.3131082057952881, 0.9282426834106445], [0.31536054611206055, 0.9222478866577148], [0.3174762725830078, 0.9171149730682373], [0.31962549686431885, 0.9113059639930725], [0.3216139078140259, 0.9063029289245605], [0.3236299753189087, 0.9006956815719604], [0.32548296451568604, 0.8958159685134888], [0.3273620009422302, 0.8904122114181519], [0.3290831446647644, 0.88569575548172], [0.33082616329193115, 0.880537748336792], [0.3323870301246643, 0.8760303258895874], [0.3339923620223999, 0.8710659146308899], [0.33543944358825684, 0.8666936159133911], [0.33693450689315796, 0.8619334697723389], [0.3382672071456909, 0.8577327728271484], [0.3396359086036682, 0.8532716035842896], [0.3408207893371582, 0.8493454456329346], [0.34193915128707886, 0.8456451892852783], [0.3428823947906494, 0.8424123525619507], [0.34390777349472046, 0.8387947082519531], [0.3447822332382202, 0.8356136083602905], [0.3457346558570862, 0.832166314125061], [0.3130916953086853, 0.9413806200027466], [0.3152722716331482, 0.9361116886138916], [0.31753426790237427, 0.9298988580703735], [0.3196101188659668, 0.9247341156005859], [0.3217700719833374, 0.9186895489692688], [0.3237309455871582, 0.913637638092041], [0.32577770948410034, 0.9077842235565186], [0.3276025056838989, 0.9028521776199341], [0.3294993042945862, 0.8971823453903198], [0.33118534088134766, 0.8924160003662109], [0.3329678177833557, 0.8869867920875549], [0.3345276713371277, 0.8823909759521484], [0.33614569902420044, 0.8772039413452148], [0.3375577926635742, 0.8727664947509766], [0.3390556573867798, 0.867790699005127], [0.340370237827301, 0.8635202050209045], [0.34177684783935547, 0.8587693572044373], [0.3429691791534424, 0.8547643423080444], [0.3442589044570923, 0.8503177762031555], [0.34516119956970215, 0.8470079898834229], [0.34615838527679443, 0.8432888984680176], [0.3470103144645691, 0.840045690536499], [0.3479512929916382, 0.8364255428314209], [0.34873372316360474, 0.833355724811554], [0.31762373447418213, 0.9440615177154541], [0.31985968351364136, 0.9377645254135132], [0.3219556212425232, 0.9324359893798828], [0.3240780830383301, 0.926306962966919], [0.3260563611984253, 0.9210647940635681], [0.3280560374259949, 0.9151169061660767], [0.32991087436676025, 0.9099968671798706], [0.33179622888565063, 0.9042186737060547], [0.33350545167922974, 0.8992106914520264], [0.33522433042526245, 0.893690288066864], [0.3368055820465088, 0.8888802528381348], [0.33841609954833984, 0.8835576772689819], [0.3398450016975403, 0.8789138793945312], [0.341305673122406, 0.8738381862640381], [0.3426194190979004, 0.8693841099739075], [0.3439849615097046, 0.8645318746566772], [0.3452029824256897, 0.8602570295333862], [0.3464459776878357, 0.8557026386260986], [0.3475193977355957, 0.8516637682914734], [0.34852975606918335, 0.8478550314903259], [0.34937506914138794, 0.8445515036582947], [0.35030102729797363, 0.8408315181732178], [0.3510725498199463, 0.8375868201255798], [0.35191261768341064, 0.8341221809387207], [0.32224392890930176, 0.9456701874732971], [0.32432734966278076, 0.9403114318847656], [0.3264796733856201, 0.9339596033096313], [0.32842856645584106, 0.928684413433075], [0.33045029640197754, 0.9224908351898193], [0.3322739601135254, 0.9173353314399719], [0.3341739773750305, 0.9113353490829468], [0.33587706089019775, 0.9062677621841431], [0.33764374256134033, 0.9004307389259338], [0.3391716480255127, 0.895565927028656], [0.34076541662216187, 0.8900108337402344], [0.3421887159347534, 0.8852826356887817], [0.34367799758911133, 0.8799405097961426], [0.3449523448944092, 0.8754169940948486], [0.3463016152381897, 0.8703416585922241], [0.34748613834381104, 0.8660115003585815], [0.34876394271850586, 0.8611642718315125], [0.3498445153236389, 0.8570836782455444], [0.35101574659347534, 0.8524929881095886], [0.3518122434616089, 0.8490889072418213], [0.3526947498321533, 0.8453054428100586], [0.3534683585166931, 0.8419877290725708], [0.3543235659599304, 0.8382803201675415], [0.3549976944923401, 0.8352144956588745], [0.3268511891365051, 0.9482591152191162], [0.3289673924446106, 0.9418308734893799], [0.33095842599868774, 0.936385989189148], [0.33297330141067505, 0.9301084280014038], [0.33483487367630005, 0.92476487159729], [0.3367137312889099, 0.9186705350875854], [0.3384251594543457, 0.9134246110916138], [0.3401588201522827, 0.9074891805648804], [0.3417496681213379, 0.9023585319519043], [0.3433490991592407, 0.8966819047927856], [0.344765305519104, 0.8917741179466248], [0.3462073802947998, 0.8863160014152527], [0.3475050926208496, 0.881550669670105], [0.3488244414329529, 0.8763484954833984], [0.34998583793640137, 0.8718204498291016], [0.3511854410171509, 0.8668835163116455], [0.3522595167160034, 0.862534761428833], [0.3533594012260437, 0.8578808307647705], [0.35431647300720215, 0.8537077903747559], [0.3552217483520508, 0.8497921228408813], [0.3559642434120178, 0.8464647531509399], [0.35679328441619873, 0.842659592628479], [0.3574819564819336, 0.8393392562866211], [0.35823071002960205, 0.8358651399612427], [0.33153361082077026, 0.9497218132019043], [0.33349573612213135, 0.9442720413208008], [0.33554065227508545, 0.9377809762954712], [0.3374013900756836, 0.9324073791503906], [0.3393353223800659, 0.9260742664337158], [0.34104567766189575, 0.920784056186676], [0.3427996039390564, 0.9145941138267517], [0.3443453311920166, 0.909418523311615], [0.3459674119949341, 0.9034510850906372], [0.34739434719085693, 0.8984612226486206], [0.34887218475341797, 0.892749547958374], [0.3501321077346802, 0.8879209756851196], [0.35145050287246704, 0.882436990737915], [0.3525901436805725, 0.8778202533721924], [0.3537883162498474, 0.8726229071617126], [0.35479772090911865, 0.8682184815406799], [0.3558803200721741, 0.8632825613021851], [0.35680848360061646, 0.8591130375862122], [0.35783851146698, 0.8543967008590698], [0.3585423231124878, 0.8509082794189453], [0.3593099117279053, 0.8470920324325562], [0.359991192817688, 0.8437039852142334], [0.36076152324676514, 0.839934229850769], [0.3613612651824951, 0.8368826508522034], [0.33621811866760254, 0.9522179365158081], [0.33823084831237793, 0.9456652402877808], [0.34011054039001465, 0.9401295185089111], [0.3420190215110779, 0.9337193369865417], [0.34378135204315186, 0.928254246711731], [0.34556806087493896, 0.9219720363616943], [0.34717726707458496, 0.9165819883346558], [0.3487972021102905, 0.9105114936828613], [0.35024547576904297, 0.9052958488464355], [0.3517075181007385, 0.8994641304016113], [0.3530076742172241, 0.8944140076637268], [0.3543180823326111, 0.8887957334518433], [0.35545241832733154, 0.8839330673217773], [0.35659658908843994, 0.8786095380783081], [0.35759949684143066, 0.8739645481109619], [0.358628511428833, 0.8689190149307251], [0.3595300316810608, 0.8645192384719849], [0.3604710102081299, 0.8597743511199951], [0.36130213737487793, 0.8555333614349365], [0.3621026873588562, 0.8515002727508545], [0.3627144694328308, 0.8481220006942749], [0.36339545249938965, 0.8442592620849609], [0.3639845848083496, 0.8409155607223511], [0.3646393418312073, 0.8374422788619995], [0.3409852981567383, 0.9535648822784424], [0.3428459167480469, 0.948032021522522], [0.34478747844696045, 0.9413984417915344], [0.3465266227722168, 0.9359188675880432], [0.3483394980430603, 0.9294305443763733], [0.34996312856674194, 0.9240003824234009], [0.35165953636169434, 0.9176552891731262], [0.35312509536743164, 0.9123809337615967], [0.35464268922805786, 0.9062596559524536], [0.3559286594390869, 0.9011490345001221], [0.3572598695755005, 0.895262598991394], [0.3583933115005493, 0.8902982473373413], [0.3595600724220276, 0.8846539258956909], [0.3605165481567383, 0.879932165145874], [0.3615145683288574, 0.8745930194854736], [0.36236101388931274, 0.8701235055923462], [0.36328184604644775, 0.8651105165481567], [0.36406058073043823, 0.8608977794647217], [0.36494356393814087, 0.8561033606529236], [0.3655446767807007, 0.8525066375732422], [0.366177499294281, 0.8485735654830933], [0.36670517921447754, 0.8451743125915527], [0.3673252463340759, 0.8413931131362915], [0.3678211569786072, 0.8383617401123047], [0.34573155641555786, 0.9559942483901978], [0.34766554832458496, 0.9493058919906616], [0.3494495153427124, 0.9436736106872559], [0.3512740135192871, 0.9371070861816406], [0.3529226779937744, 0.9315353035926819], [0.3546145558357239, 0.9251011610031128], [0.3561413884162903, 0.9196078777313232], [0.3576902151107788, 0.9133690595626831], [0.3590391278266907, 0.908013105392456], [0.36039191484451294, 0.902004599571228], [0.3615570068359375, 0.8968237638473511], [0.3627277612686157, 0.8910312652587891], [0.3637312650680542, 0.8860392570495605], [0.36473584175109863, 0.8805437088012695], [0.36556321382522583, 0.8758057951927185], [0.3663954734802246, 0.8706661462783813], [0.36710435152053833, 0.8662046194076538], [0.3678455352783203, 0.8613981008529663], [0.3685150742530823, 0.8571269512176514], [0.3691900968551636, 0.8529417514801025], [0.3696941137313843, 0.8494745492935181], [0.37025612592697144, 0.8455774784088135], [0.3707083463668823, 0.842254638671875], [0.37123703956604004, 0.8388316631317139], [0.3505765199661255, 0.9572346210479736], [0.3523516058921814, 0.9516083598136902], [0.3542182445526123, 0.9448375701904297], [0.35587549209594727, 0.9392486810684204], [0.3576207160949707, 0.9326033592224121], [0.35914283990859985, 0.9270801544189453], [0.36074280738830566, 0.9205899238586426], [0.3621227741241455, 0.915169358253479], [0.3635566830635071, 0.9088606238365173], [0.3647395372390747, 0.9035965204238892], [0.3659543991088867, 0.897529125213623], [0.36696648597717285, 0.8924427032470703], [0.36801379919052124, 0.8866362571716309], [0.36885499954223633, 0.8817685842514038], [0.3697056174278259, 0.8762608766555786], [0.37035584449768066, 0.8717129230499268], [0.37102919816970825, 0.8665750622749329], [0.3715558648109436, 0.8623137474060059], [0.37218308448791504, 0.8575249910354614], [0.37267887592315674, 0.8538292646408081], [0.3732520341873169, 0.8498415946960449], [0.3737044334411621, 0.8463853001594543], [0.3742058277130127, 0.8425939083099365], [0.3745728135108948, 0.8396514654159546], [0.35541629791259766, 0.9596300721168518], [0.35729777812957764, 0.9527885317802429], [0.35898852348327637, 0.9470662474632263], [0.3607438802719116, 0.9403446912765503], [0.3623104691505432, 0.9346634149551392], [0.363929808139801, 0.9280751943588257], [0.3653443455696106, 0.9224613308906555], [0.36679309606552124, 0.9160506725311279], [0.3680565357208252, 0.9105420112609863], [0.3693392276763916, 0.9043383002281189], [0.3704146146774292, 0.8990170955657959], [0.3714951276779175, 0.8930642604827881], [0.3723849654197693, 0.8879371285438538], [0.37326860427856445, 0.8822471499443054], [0.3739748001098633, 0.8773665428161621], [0.3746687173843384, 0.8720570206642151], [0.3751985430717468, 0.8675098419189453], [0.3757462501525879, 0.8625919818878174], [0.3761783242225647, 0.8584069013595581], [0.3766832947731018, 0.8542026281356812], [0.3770933151245117, 0.8507122993469238], [0.3775593042373657, 0.8466854095458984], [0.37786561250686646, 0.8433082103729248], [0.37825101613998413, 0.8400321006774902], [0.36037784814834595, 0.9607824087142944], [0.36207664012908936, 0.9550588130950928], [0.36389070749282837, 0.9481347799301147], [0.36545634269714355, 0.9424400329589844], [0.3671276569366455, 0.9356409311294556], [0.36855703592300415, 0.9299817085266113], [0.3700677156448364, 0.9233201742172241], [0.37133467197418213, 0.9177684783935547], [0.37267935276031494, 0.9112998247146606], [0.3738030791282654, 0.9058829545974731], [0.3749746084213257, 0.899642825126648], [0.37590885162353516, 0.8944145441055298], [0.37686145305633545, 0.8884127736091614], [0.3775975704193115, 0.8833938837051392], [0.3783450126647949, 0.8777025938034058], [0.3788825273513794, 0.8729996681213379], [0.37942105531692505, 0.867708146572113], [0.37980538606643677, 0.8633978962898254], [0.38025999069213867, 0.8585952520370483], [0.3805540204048157, 0.8549799919128418], [0.38093674182891846, 0.850950300693512], [0.38128989934921265, 0.8474436402320862], [0.38173580169677734, 0.8434982299804688], [0.3819751739501953, 0.8407896757125854], [0.3653045892715454, 0.9631434679031372], [0.3671322464942932, 0.9561436176300049], [0.3687329888343811, 0.9503053426742554], [0.37042802572250366, 0.9434324502944946], [0.3718998432159424, 0.9376300573348999], [0.37345635890960693, 0.9308778047561646], [0.374794602394104, 0.9251345992088318], [0.37619519233703613, 0.9185624122619629], [0.37737762928009033, 0.9129546880722046], [0.3786044120788574, 0.9065685272216797], [0.379615843296051, 0.9011117219924927], [0.38064849376678467, 0.8949475288391113], [0.3814626932144165, 0.8896634578704834], [0.38227367401123047, 0.883769154548645], [0.382884681224823, 0.8787461519241333], [0.3834792375564575, 0.873207688331604], [0.38388508558273315, 0.8685382008552551], [0.38428163528442383, 0.8634964227676392], [0.38454103469848633, 0.8593232035636902], [0.38489222526550293, 0.8551301956176758], [0.3851342797279358, 0.851649284362793], [0.38543540239334106, 0.8476547002792358], [0.3857148289680481, 0.8442460894584656], [0.38606005907058716, 0.8410061597824097], [0.3704017996788025, 0.9641926288604736], [0.371989369392395, 0.9583472013473511], [0.3737286329269409, 0.9512708187103271], [0.37519729137420654, 0.9454538822174072], [0.37680959701538086, 0.9385141134262085], [0.37816959619522095, 0.9327414035797119], [0.37965619564056396, 0.9259370565414429], [0.3808841109275818, 0.9202741384506226], [0.38220369815826416, 0.9136468172073364], [0.3832598328590393, 0.9080924987792969], [0.38437336683273315, 0.9016695022583008], [0.3852452039718628, 0.8962662220001221], [0.38615214824676514, 0.8900647163391113], [0.38683176040649414, 0.884884238243103], [0.3875120282173157, 0.8789721131324768], [0.3879588842391968, 0.8740811347961426], [0.388391375541687, 0.8685859441757202], [0.38864779472351074, 0.8641796112060547], [0.38894832134246826, 0.859310507774353], [0.38909852504730225, 0.8557941317558289], [0.3893536329269409, 0.8516705632209778], [0.38957124948501587, 0.8483469486236572], [0.3899143934249878, 0.8444229364395142], [0.3901106119155884, 0.8417262434959412], [0.3754584789276123, 0.966502845287323], [0.3772238492965698, 0.9593299627304077], [0.37870609760284424, 0.9533827900886536], [0.38033318519592285, 0.9463648796081543], [0.3817107677459717, 0.9404628276824951], [0.383212685585022, 0.9335766434669495], [0.3844848871231079, 0.9277348518371582], [0.3858511447906494, 0.921013355255127], [0.38697999715805054, 0.9152618646621704], [0.38817286491394043, 0.9086976051330566], [0.38912343978881836, 0.9030881524085999], [0.39011359214782715, 0.8967270851135254], [0.3908841609954834, 0.8912909030914307], [0.39166271686553955, 0.885182797908783], [0.3922187089920044, 0.8799589276313782], [0.3927571773529053, 0.8741724491119385], [0.39308375120162964, 0.8693356513977051], [0.3933885097503662, 0.8641078472137451], [0.3934882879257202, 0.8598501086235046], [0.39367997646331787, 0.8557597398757935], [0.3938583731651306, 0.8523309826850891], [0.3941020965576172, 0.8483983278274536], [0.39429813623428345, 0.8451690673828125], [0.3945438861846924, 0.841769814491272], [0.38074350357055664, 0.9674409627914429], [0.3822029232978821, 0.9614717960357666], [0.3838762044906616, 0.9542545080184937], [0.38523560762405396, 0.9483349323272705], [0.3867717981338501, 0.9412636756896973], [0.38804012537002563, 0.9354019165039062], [0.38947010040283203, 0.9284741878509521], [0.39062196016311646, 0.9226675033569336], [0.3918847441673279, 0.9158888459205627], [0.39289653301239014, 0.9101821184158325], [0.39399152994155884, 0.9035840034484863], [0.39482808113098145, 0.8980206251144409], [0.39570748805999756, 0.8916391134262085], [0.3963424563407898, 0.8862555027008057], [0.3969962000846863, 0.880125880241394], [0.39743369817733765, 0.8750345706939697], [0.3978539705276489, 0.8692912459373474], [0.3980216979980469, 0.8646865487098694], [0.39821726083755493, 0.8596270084381104], [0.39827680587768555, 0.8563403487205505], [0.39842891693115234, 0.8522605895996094], [0.39855486154556274, 0.8490155339241028], [0.398750364780426, 0.8451663255691528], [0.3988611698150635, 0.8423608541488647], [0.3859533667564392, 0.9696493148803711], [0.38760828971862793, 0.9623504877090454], [0.3889658451080322, 0.9562983512878418], [0.39051365852355957, 0.9491415023803711], [0.3917747735977173, 0.9431393146514893], [0.3931903839111328, 0.9361478686332703], [0.3943657875061035, 0.9302060604095459], [0.3956791162490845, 0.923319935798645], [0.39673054218292236, 0.9174448847770691], [0.3978769779205322, 0.9107329845428467], [0.39879417419433594, 0.9049739837646484], [0.39977723360061646, 0.8984338045120239], [0.4005163908004761, 0.892831563949585], [0.40129202604293823, 0.8865059614181519], [0.40184080600738525, 0.881118655204773], [0.4023871421813965, 0.8751062154769897], [0.40270137786865234, 0.8700211048126221], [0.40298646688461304, 0.8644845485687256], [0.40302783250808716, 0.8601021766662598], [0.4031140208244324, 0.8561050891876221], [0.4031500816345215, 0.8527637720108032], [0.40322911739349365, 0.8488640785217285], [0.40330129861831665, 0.8457217216491699], [0.40340566635131836, 0.8421998023986816], [0.39140069484710693, 0.9704549312591553], [0.3927114009857178, 0.9643961191177368], [0.3942752480506897, 0.9570603966712952], [0.3955169916152954, 0.9510362148284912], [0.3969707489013672, 0.9438323974609375], [0.3981066346168518, 0.937880277633667], [0.3994330167770386, 0.9308508634567261], [0.4004977345466614, 0.9249048829078674], [0.4017093777656555, 0.9179959297180176], [0.4026542901992798, 0.9121595621109009], [0.40371090173721313, 0.9054340124130249], [0.4045306444168091, 0.8997060060501099], [0.4054245352745056, 0.8931518793106079], [0.40607625246047974, 0.8876040577888489], [0.4067636728286743, 0.8812739849090576], [0.4071962833404541, 0.8759706020355225], [0.4076235294342041, 0.869951069355011], [0.40782666206359863, 0.865104079246521], [0.40797072649002075, 0.8596951365470886], [0.40790605545043945, 0.8565008640289307], [0.40787553787231445, 0.8523969054222107], [0.4078679084777832, 0.8493205308914185], [0.40791070461273193, 0.8454675078392029], [0.40789735317230225, 0.8425792455673218], [0.3966658115386963, 0.9725399017333984], [0.3982117176055908, 0.9651278853416443], [0.3994274139404297, 0.9589974880218506], [0.4008769392967224, 0.9517238140106201], [0.40201497077941895, 0.9455997943878174], [0.4033430814743042, 0.9385042786598206], [0.4043903350830078, 0.9324756860733032], [0.40560728311538696, 0.9254679679870605], [0.40657752752304077, 0.9194821119308472], [0.4076777696609497, 0.9126362800598145], [0.4085289239883423, 0.9067672491073608], [0.40947020053863525, 0.9000766277313232], [0.4101821780204773, 0.8943132162094116], [0.41094058752059937, 0.8878316283226013], [0.41148459911346436, 0.8822451233863831], [0.41203153133392334, 0.8759905099868774], [0.41233545541763306, 0.870669960975647], [0.41255831718444824, 0.8648557662963867], [0.41248995065689087, 0.8599969148635864], [0.41246962547302246, 0.8560571670532227], [0.41245609521865845, 0.8527736663818359], [0.4124884605407715, 0.8489794135093689], [0.41247063875198364, 0.845829963684082], [0.41245412826538086, 0.8421703577041626], [0.40221714973449707, 0.9732054471969604], [0.40338802337646484, 0.967065691947937], [0.4048416018486023, 0.9596229791641235], [0.40592825412750244, 0.9534896612167358], [0.4072798490524292, 0.9461838006973267], [0.40830039978027344, 0.9401313066482544], [0.40953224897384644, 0.9329994916915894], [0.4104810953140259, 0.9269589185714722], [0.411604106426239, 0.9199539422988892], [0.4124687910079956, 0.9139907360076904], [0.4134610891342163, 0.9071477651596069], [0.4142078161239624, 0.9012717604637146], [0.41503196954727173, 0.8945741057395935], [0.415627121925354, 0.8888517618179321], [0.41627007722854614, 0.8823710680007935], [0.4167114496231079, 0.8768247961997986], [0.4171189069747925, 0.8705453872680664], [0.4172753095626831, 0.865371584892273], [0.41731584072113037, 0.8594304323196411], [0.4171640872955322, 0.856293797492981], [0.41709578037261963, 0.852421760559082], [0.4170567989349365, 0.8492988348007202], [0.41702765226364136, 0.8454416990280151], [0.41694921255111694, 0.8423824310302734], [0.4074872136116028, 0.9752019643783569], [0.4089760184288025, 0.9676526784896851], [0.4100204110145569, 0.9613999128341675], [0.4113525152206421, 0.9540450572967529], [0.4123579263687134, 0.9478408098220825], [0.41359400749206543, 0.9406439065933228], [0.41452157497406006, 0.9345090389251709], [0.4156365990638733, 0.9274091720581055], [0.4164769649505615, 0.9213178157806396], [0.41746050119400024, 0.9143667817115784], [0.4182100296020508, 0.9083658456802368], [0.41905879974365234, 0.9015406370162964], [0.41968899965286255, 0.8956298232078552], [0.42037278413772583, 0.8889948129653931], [0.4208502769470215, 0.8832558989524841], [0.4213144779205322, 0.8768027424812317], [0.42155879735946655, 0.8711525797843933], [0.4216412901878357, 0.8650143146514893], [0.4214205741882324, 0.859591007232666], [0.4214099645614624, 0.8558660745620728], [0.42140620946884155, 0.8527517318725586], [0.4214470386505127, 0.8489599227905273], [0.42145323753356934, 0.8457474708557129], [0.4214329719543457, 0.8419076204299927], [0.41318267583847046, 0.9757261276245117], [0.4141501188278198, 0.9694198369979858], [0.4155048131942749, 0.9618946313858032], [0.41645264625549316, 0.9556888341903687], [0.41769909858703613, 0.9483184814453125], [0.41859114170074463, 0.9421464204788208], [0.4197092056274414, 0.9349191188812256], [0.42052578926086426, 0.9287670850753784], [0.4215196967124939, 0.9216544032096863], [0.4222429394721985, 0.9155797958374023], [0.4230998158454895, 0.9086324572563171], [0.42372703552246094, 0.9025945067405701], [0.4244290590286255, 0.8957949876785278], [0.4249359369277954, 0.8898931741714478], [0.4254600405693054, 0.8832787275314331], [0.42579150199890137, 0.8774878978729248], [0.4260685443878174, 0.8709431886672974], [0.42611533403396606, 0.8653382062911987], [0.42601680755615234, 0.8589452505111694], [0.42582255601882935, 0.8560953140258789], [0.4258398413658142, 0.852547287940979], [0.4259207248687744, 0.8493581414222717], [0.42596960067749023, 0.8454302549362183], [0.4259280562400818, 0.8421194553375244], [0.41844236850738525, 0.9774341583251953], [0.41975295543670654, 0.9698784351348877], [0.42066025733947754, 0.9635300636291504], [0.4218933582305908, 0.956123411655426], [0.42275500297546387, 0.9498199224472046], [0.4238671064376831, 0.9425244331359863], [0.42465412616729736, 0.9362730979919434], [0.4256417155265808, 0.929082453250885], [0.42634904384613037, 0.9228613972663879], [0.4272024631500244, 0.915818452835083], [0.42781388759613037, 0.9096980690956116], [0.4285241365432739, 0.9027353525161743], [0.42900550365448, 0.8966870307922363], [0.4295070767402649, 0.889909029006958], [0.4298214912414551, 0.8839677572250366], [0.4300522804260254, 0.877302885055542], [0.43019336462020874, 0.8714559674263], [0.4302296042442322, 0.8649020195007324], [0.42979925870895386, 0.858935534954071], [0.4298442006111145, 0.8558462858200073], [0.430022656917572, 0.8531708717346191], [0.430228590965271, 0.8491264581680298], [0.4302576184272766, 0.8457661867141724], [0.43018752336502075, 0.8416361808776855], [0.424108624458313, 0.9778263568878174], [0.424932599067688, 0.9714572429656982], [0.4261462688446045, 0.9639310836791992], [0.4269672632217407, 0.9576081037521362], [0.42808347940444946, 0.950157642364502], [0.428835391998291, 0.9438763856887817], [0.42983806133270264, 0.9365564584732056], [0.4305136203765869, 0.9302847385406494], [0.43137258291244507, 0.9230736494064331], [0.43195170164108276, 0.9168701171875], [0.43265587091445923, 0.9098276495933533], [0.4331432580947876, 0.9036364555358887], [0.43368959426879883, 0.8966943025588989], [0.4339962601661682, 0.8905829191207886], [0.43424296379089355, 0.8837961554527283], [0.43427538871765137, 0.877669632434845], [0.43418002128601074, 0.8711609840393066], [0.4341394901275635, 0.8651816844940186], [0.4345804452896118, 0.8585669994354248], [0.4346727728843689, 0.8565791845321655], [0.43478620052337646, 0.8530338406562805], [0.43468165397644043, 0.8493723273277283], [0.43453335762023926, 0.8453829884529114], [0.43442368507385254, 0.8417656421661377], [0.42928117513656616, 0.9792840480804443], [0.43039941787719727, 0.9718114137649536], [0.4312170743942261, 0.9654110670089722], [0.43234115839004517, 0.9579097032546997], [0.4330676198005676, 0.9514927864074707], [0.43406105041503906, 0.9441298246383667], [0.4347098469734192, 0.9377501010894775], [0.435573935508728, 0.9304662942886353], [0.43613582849502563, 0.9241166114807129], [0.43684542179107666, 0.9169675707817078], [0.43729037046432495, 0.9107085466384888], [0.43782126903533936, 0.9036421775817871], [0.43814414739608765, 0.8973996043205261], [0.4384692907333374, 0.8904150724411011], [0.4385541081428528, 0.8842189311981201], [0.43853330612182617, 0.8772695660591125], [0.4381325840950012, 0.8711410760879517], [0.43805527687072754, 0.865111231803894], [0.43872761726379395, 0.8596100211143494], [0.4390993118286133, 0.856360137462616], [0.4390973448753357, 0.8534084558486938], [0.4391351342201233, 0.8488786220550537], [0.4389973282814026, 0.8454476594924927], [0.4386014938354492, 0.8410862684249878], [0.4346308708190918, 0.9796076416969299], [0.4355000853538513, 0.9733160734176636], [0.4366486668586731, 0.9656902551651001], [0.43735724687576294, 0.9592607021331787], [0.4383864402770996, 0.951711118221283], [0.4389840364456177, 0.9453110694885254], [0.43984371423721313, 0.9379069209098816], [0.4403703212738037, 0.9315030574798584], [0.4410979747772217, 0.9242041110992432], [0.44153499603271484, 0.9178532361984253], [0.4420812129974365, 0.910682201385498], [0.4423799514770508, 0.9043488502502441], [0.44272714853286743, 0.8972630500793457], [0.44286584854125977, 0.8909111022949219], [0.44295549392700195, 0.8838696479797363], [0.44283705949783325, 0.8774919509887695], [0.44251275062561035, 0.8703190684318542], [0.4422494173049927, 0.8655374050140381], [0.44259828329086304, 0.8601057529449463], [0.44287949800491333, 0.8567953109741211], [0.44304144382476807, 0.853173017501831], [0.44318580627441406, 0.8493461012840271], [0.44326943159103394, 0.8448446989059448], [0.4427286982536316, 0.8405658602714539], [0.4397454857826233, 0.9812871217727661], [0.4409865140914917, 0.9735625982284546], [0.44168734550476074, 0.9670725464820862], [0.44274526834487915, 0.9594508409500122], [0.4433210492134094, 0.952897846698761], [0.4442078471183777, 0.945431113243103], [0.44469118118286133, 0.938928484916687], [0.44540905952453613, 0.9315646886825562], [0.4458199143409729, 0.9250914454460144], [0.44638973474502563, 0.9178285598754883], [0.4466884732246399, 0.9113949537277222], [0.4470590353012085, 0.9041999578475952], [0.4471970200538635, 0.89778733253479], [0.44731754064559937, 0.8906066417694092], [0.4472247362136841, 0.8841489553451538], [0.4470120668411255, 0.8769760131835938], [0.4465745687484741, 0.8702659606933594], [0.4464876055717468, 0.8652207851409912], [0.44660675525665283, 0.8608570098876953], [0.4467884302139282, 0.8566884398460388], [0.44670701026916504, 0.8533601760864258], [0.44651705026626587, 0.8491498231887817], [0.4462697505950928, 0.8448740243911743], [0.44613099098205566, 0.8398892879486084], [0.4453642964363098, 0.9814983606338501], [0.4460519552230835, 0.975045919418335], [0.4472046494483948, 0.9672122001647949], [0.44771134853363037, 0.9606432914733887], [0.4486408233642578, 0.9530074596405029], [0.4490898847579956, 0.9464529752731323], [0.44982749223709106, 0.9389564990997314], [0.4501909613609314, 0.9324376583099365], [0.4507666230201721, 0.9250596761703491], [0.451058030128479, 0.9185630083084106], [0.45147329568862915, 0.9112581014633179], [0.451632022857666, 0.9047533273696899], [0.4517977237701416, 0.8974865674972534], [0.451752245426178, 0.8909462690353394], [0.4516338109970093, 0.8836497664451599], [0.45128965377807617, 0.8770249485969543], [0.45091480016708374, 0.8696016669273376], [0.45069897174835205, 0.8654962182044983], [0.45072442293167114, 0.8607577085494995], [0.4507463574409485, 0.85701984167099], [0.4505975842475891, 0.8527524471282959], [0.450265109539032, 0.8491029739379883], [0.44996964931488037, 0.8443766832351685], [0.45010656118392944, 0.8404538631439209], [0.450406551361084, 0.9831827282905579], [0.451793909072876, 0.9751353859901428], [0.45219749212265015, 0.9684077501296997], [0.4531644582748413, 0.9607130289077759], [0.4535484313964844, 0.9540239572525024], [0.45431584119796753, 0.9464741349220276], [0.45464277267456055, 0.9398074150085449], [0.45522046089172363, 0.9323844909667969], [0.4554826617240906, 0.9257887601852417], [0.4559069871902466, 0.9184319972991943], [0.45608752965927124, 0.9118565320968628], [0.4563401937484741, 0.9044884443283081], [0.4563448429107666, 0.8978691697120667], [0.4563342332839966, 0.8905179500579834], [0.45613187551498413, 0.8838080167770386], [0.45584458112716675, 0.8763607740402222], [0.45537376403808594, 0.8695885539054871], [0.45518505573272705, 0.8649978041648865], [0.4550231099128723, 0.8610421419143677], [0.4548872113227844, 0.8566234707832336], [0.45475953817367554, 0.8527971506118774], [0.45465636253356934, 0.8484402894973755], [0.4546082019805908, 0.8446406126022339], [0.45466554164886475, 0.8404715061187744], [0.4565649628639221, 0.9832282066345215], [0.4567580819129944, 0.9763190746307373], [0.4578348994255066, 0.9684408903121948], [0.45810699462890625, 0.9616885185241699], [0.45889246463775635, 0.9540296196937561], [0.459161639213562, 0.9473103284835815], [0.45976126194000244, 0.9397411346435547], [0.45999670028686523, 0.9331079125404358], [0.4604378938674927, 0.9256483316421509], [0.46059828996658325, 0.9190322160720825], [0.4608738422393799, 0.9116314649581909], [0.46092844009399414, 0.9049393534660339], [0.46101129055023193, 0.8974852561950684], [0.46089673042297363, 0.8907850980758667], [0.4607512354850769, 0.8832615613937378], [0.46047544479370117, 0.8764529824256897], [0.4600834846496582, 0.8687978982925415], [0.45973145961761475, 0.8650060892105103], [0.45945096015930176, 0.8604785203933716], [0.45928674936294556, 0.8567714691162109], [0.4591728448867798, 0.8523330092430115], [0.4591141939163208, 0.8486859202384949], [0.45912373065948486, 0.844338595867157], [0.45920050144195557, 0.8409744501113892], [0.46150344610214233, 0.9842674732208252], [0.46266698837280273, 0.9763538837432861], [0.4627990126609802, 0.9693537354469299], [0.4636094570159912, 0.9616719484329224], [0.4638018012046814, 0.9548279047012329], [0.46440768241882324, 0.9472391605377197], [0.4646098017692566, 0.9404528737068176], [0.4650668501853943, 0.9329724907875061], [0.46521127223968506, 0.926243782043457], [0.4655005931854248, 0.9188077449798584], [0.46555399894714355, 0.9121005535125732], [0.46567970514297485, 0.9046136140823364], [0.46563857793807983, 0.8978266716003418], [0.4655975103378296, 0.8903121948242188], [0.46538859605789185, 0.8834383487701416], [0.4650684595108032, 0.8758156299591064], [0.46448153257369995, 0.868681788444519], [0.4642297625541687, 0.8642985224723816], [0.46405184268951416, 0.8605740666389465], [0.4639173746109009, 0.8562507629394531], [0.463819682598114, 0.8525522351264954], [0.46377551555633545, 0.8483193516731262], [0.46380186080932617, 0.8447567224502563], [0.46388256549835205, 0.8408103585243225], [0.46775323152542114, 0.9843435287475586], [0.46766752004623413, 0.9771127700805664], [0.4684544801712036, 0.9693371057510376], [0.4685519337654114, 0.9624071717262268], [0.4691575765609741, 0.9547455310821533], [0.4693019390106201, 0.947918176651001], [0.4697679877281189, 0.9403218030929565], [0.4698892831802368, 0.9335577487945557], [0.4701823592185974, 0.9260170459747314], [0.47023534774780273, 0.9192906022071838], [0.4703865051269531, 0.9117805361747742], [0.470363974571228, 0.9049967527389526], [0.4703858494758606, 0.8974254131317139], [0.47024786472320557, 0.8905532360076904], [0.47005951404571533, 0.8828849196434021], [0.4696913957595825, 0.8758231401443481], [0.46923017501831055, 0.8678585886955261], [0.4689669609069824, 0.8643409013748169], [0.4687761068344116, 0.8600040078163147], [0.46865010261535645, 0.8564295768737793], [0.4685518741607666, 0.852092981338501], [0.4685128331184387, 0.8486464619636536], [0.46853381395339966, 0.8445077538490295], [0.4685680866241455, 0.8412300944328308], [0.472827672958374, 0.9848152995109558], [0.47349095344543457, 0.9771068096160889], [0.4734804630279541, 0.969950795173645], [0.47403883934020996, 0.9623322486877441], [0.4741137623786926, 0.9553554058074951], [0.47454404830932617, 0.947780430316925], [0.47463709115982056, 0.9408934116363525], [0.4749516248703003, 0.9333411455154419], [0.47500813007354736, 0.9264988899230957], [0.4751865267753601, 0.9189943671226501], [0.4751894474029541, 0.9121841192245483], [0.47525161504745483, 0.9046208262443542], [0.47515642642974854, 0.8977232575416565], [0.47506117820739746, 0.8900650143623352], [0.4748321771621704, 0.883028507232666], [0.4745292663574219, 0.8751425743103027], [0.47407835721969604, 0.8678222894668579], [0.4738897681236267, 0.8636560440063477], [0.47373461723327637, 0.8601484298706055], [0.47360509634017944, 0.8558902740478516], [0.47353118658065796, 0.8523617386817932], [0.4735003709793091, 0.8482671976089478], [0.4734851121902466, 0.8448823094367981], [0.4735076427459717, 0.8409603238105774], [0.4786533713340759, 0.9848268032073975], [0.4785803556442261, 0.9775637984275818], [0.4790691137313843, 0.9698814153671265], [0.4790758490562439, 0.9628483057022095], [0.4794466495513916, 0.9552052617073059], [0.47948288917541504, 0.9482901096343994], [0.4797595143318176, 0.9406863451004028], [0.4798067808151245, 0.9338211417198181], [0.48000335693359375, 0.9262208938598633], [0.48001694679260254, 0.9194107055664062], [0.48010051250457764, 0.9118350744247437], [0.48004382848739624, 0.9049559831619263], [0.480016827583313, 0.8972849249839783], [0.47987639904022217, 0.8903008699417114], [0.4797002077102661, 0.8824652433395386], [0.4794226884841919, 0.8752403855323792], [0.47909724712371826, 0.8670992851257324], [0.47890597581863403, 0.8637410402297974], [0.4787259101867676, 0.8595342636108398], [0.4786107540130615, 0.8561055660247803], [0.4785328507423401, 0.8519091010093689], [0.47848933935165405, 0.8485863208770752], [0.47847455739974976, 0.8445186614990234], [0.4784733057022095, 0.8413496017456055], [0.4838275909423828, 0.985095739364624], [0.48416954278945923, 0.9775189757347107], [0.48414039611816406, 0.9702891707420349], [0.48445838689804077, 0.9627044200897217], [0.4844707250595093, 0.9556448459625244], [0.4847095012664795, 0.9480741024017334], [0.4847338795661926, 0.9411300420761108], [0.48490703105926514, 0.9335587620735168], [0.48493170738220215, 0.9266476631164551], [0.4850430488586426, 0.9190841913223267], [0.48501354455947876, 0.9121894836425781], [0.4850224256515503, 0.9045590162277222], [0.4849514961242676, 0.897589921951294], [0.4848836064338684, 0.8898190259933472], [0.4847038984298706, 0.8826486468315125], [0.4844837188720703, 0.8746494054794312], [0.4842138886451721, 0.8672109842300415], [0.4840952754020691, 0.8630853891372681], [0.48397696018218994, 0.8596898317337036], [0.4838712215423584, 0.8555722236633301], [0.48380768299102783, 0.8521952033042908], [0.48376643657684326, 0.8481547832489014], [0.483742356300354, 0.8448723554611206], [0.48374515771865845, 0.8410034775733948], [0.4894034266471863, 0.9850403070449829], [0.48933982849121094, 0.9777953624725342], [0.48955249786376953, 0.9701644778251648], [0.48953986167907715, 0.9630661010742188], [0.48971956968307495, 0.9554404616355896], [0.4897322654724121, 0.9484689831733704], [0.4898672103881836, 0.9408667087554932], [0.48988068103790283, 0.9339606761932373], [0.4899836778640747, 0.9263511896133423], [0.48998332023620605, 0.9194577932357788], [0.4900134801864624, 0.9118149280548096], [0.4899775981903076, 0.904900074005127], [0.4899650812149048, 0.8971775770187378], [0.4898703694343567, 0.8900882005691528], [0.4897419810295105, 0.8821265697479248], [0.4895815849304199, 0.8748604655265808], [0.4894087314605713, 0.8666350841522217], [0.48931455612182617, 0.8632919788360596], [0.48922234773635864, 0.8591199517250061], [0.48915600776672363, 0.8558416366577148], [0.489102303981781, 0.8517236709594727], [0.4890672564506531, 0.848483681678772], [0.48904329538345337, 0.8444628715515137], [0.4890286922454834, 0.8413795232772827], [0.49462956190109253, 0.985194742679596], [0.494728684425354, 0.9776721000671387], [0.4947161078453064, 0.9704408645629883], [0.4948195815086365, 0.9628667831420898], [0.4948214292526245, 0.9557769894599915], [0.4949043393135071, 0.9482145309448242], [0.4949081540107727, 0.9412230849266052], [0.4949629306793213, 0.9336644411087036], [0.49496781826019287, 0.9267235994338989], [0.49500298500061035, 0.9191164970397949], [0.4949949383735657, 0.9121654033660889], [0.4949997663497925, 0.904524028301239], [0.49496883153915405, 0.8975094556808472], [0.4949344992637634, 0.8896471261978149], [0.4948706030845642, 0.8824102878570557], [0.49479418992996216, 0.8743754029273987], [0.49468129873275757, 0.8668856620788574], [0.4946308135986328, 0.8627808094024658], [0.4946027994155884, 0.859429121017456], [0.49458038806915283, 0.8553640842437744], [0.4945593476295471, 0.8520668148994446], [0.4945449233055115, 0.8480480313301086], [0.4945211410522461, 0.844802975654602], [0.4945071339607239, 0.8409883975982666], [0.5, 0.9850869178771973], [0.5, 0.977856457233429], [0.5, 0.970248818397522], [0.5, 0.9631357789039612], [0.5, 0.955515444278717], [0.5, 0.948534369468689], [0.5, 0.9409127235412598], [0.5, 0.9340108633041382], [0.5, 0.9263874292373657], [0.5, 0.9194731712341309], [0.5, 0.911806583404541], [0.5, 0.904880702495575], [0.5, 0.8971289992332458], [0.5, 0.8899973034858704], [0.5, 0.8820056915283203], [0.5, 0.8747190237045288], [0.5, 0.8664416670799255], [0.5, 0.8631423711776733], [0.5, 0.8590160012245178], [0.5, 0.8557363152503967], [0.5, 0.8516563177108765], [0.5, 0.8484296798706055], [0.5, 0.8443767428398132], [0.5, 0.8413850665092468], [0.5053704380989075, 0.985194742679596], [0.505271315574646, 0.9776721000671387], [0.5052838921546936, 0.9704408645629883], [0.5051804184913635, 0.9628667831420898], [0.5051785707473755, 0.9557769894599915], [0.5050956606864929, 0.9482145309448242], [0.5050918459892273, 0.9412230849266052], [0.5050370693206787, 0.9336644411087036], [0.5050321817398071, 0.9267235994338989], [0.5049970149993896, 0.9191164970397949], [0.5050050616264343, 0.9121654033660889], [0.5050002336502075, 0.904524028301239], [0.505031168460846, 0.8975094556808472], [0.5050655007362366, 0.8896471261978149], [0.5051293969154358, 0.8824102878570557], [0.5052058100700378, 0.8743754029273987], [0.5053187012672424, 0.8668856620788574], [0.5053691864013672, 0.8627808094024658], [0.5053972005844116, 0.859429121017456], [0.5054196119308472, 0.8553640842437744], [0.5054406523704529, 0.8520668148994446], [0.5054550766944885, 0.8480480313301086], [0.5054788589477539, 0.844802975654602], [0.5054928660392761, 0.8409883975982666], [0.5105965733528137, 0.9850403070449829], [0.5106601715087891, 0.9777953624725342], [0.5104475021362305, 0.9701644778251648], [0.5104601383209229, 0.9630661010742188], [0.510280430316925, 0.9554404616355896], [0.5102677345275879, 0.9484689831733704], [0.5101327896118164, 0.9408667087554932], [0.5101193189620972, 0.9339606761932373], [0.5100163221359253, 0.9263511896133423], [0.510016679763794, 0.9194577932357788], [0.5099865198135376, 0.9118149280548096], [0.5100224018096924, 0.904900074005127], [0.5100349187850952, 0.8971775770187378], [0.5101296305656433, 0.8900882005691528], [0.5102580189704895, 0.8821265697479248], [0.5104184150695801, 0.8748604655265808], [0.5105912685394287, 0.8666350841522217], [0.5106854438781738, 0.8632919788360596], [0.5107776522636414, 0.8591199517250061], [0.5108439922332764, 0.8558416366577148], [0.510897696018219, 0.8517236709594727], [0.5109327435493469, 0.848483681678772], [0.5109567046165466, 0.8444628715515137], [0.5109713077545166, 0.8413795232772827], [0.5161724090576172, 0.985095739364624], [0.5158304572105408, 0.9775189757347107], [0.5158596038818359, 0.9702891707420349], [0.5155416131019592, 0.9627044200897217], [0.5155292749404907, 0.9556448459625244], [0.5152904987335205, 0.9480741024017334], [0.5152661204338074, 0.9411300420761108], [0.5150929689407349, 0.9335587620735168], [0.5150682926177979, 0.9266476631164551], [0.5149569511413574, 0.9190841913223267], [0.5149864554405212, 0.9121894836425781], [0.5149775743484497, 0.9045590162277222], [0.5150485038757324, 0.897589921951294], [0.5151163935661316, 0.8898190259933472], [0.5152961015701294, 0.8826486468315125], [0.5155162811279297, 0.8746494054794312], [0.5157861113548279, 0.8672109842300415], [0.5159047245979309, 0.8630853891372681], [0.5160230398178101, 0.8596898317337036], [0.5161287784576416, 0.8555722236633301], [0.5161923170089722, 0.8521952033042908], [0.5162335634231567, 0.8481547832489014], [0.516257643699646, 0.8448723554611206], [0.5162548422813416, 0.8410034775733948], [0.5213466286659241, 0.9848268032073975], [0.5214196443557739, 0.9775637984275818], [0.5209308862686157, 0.9698814153671265], [0.5209241509437561, 0.9628483057022095], [0.5205533504486084, 0.9552052617073059], [0.520517110824585, 0.9482901096343994], [0.5202404856681824, 0.9406863451004028], [0.5201932191848755, 0.9338211417198181], [0.5199966430664062, 0.9262208938598633], [0.5199830532073975, 0.9194107055664062], [0.5198994874954224, 0.9118350744247437], [0.5199561715126038, 0.9049559831619263], [0.519983172416687, 0.8972849249839783], [0.5201236009597778, 0.8903008699417114], [0.5202997922897339, 0.8824652433395386], [0.5205773115158081, 0.8752403855323792], [0.5209027528762817, 0.8670992851257324], [0.521094024181366, 0.8637410402297974], [0.5212740898132324, 0.8595342636108398], [0.5213892459869385, 0.8561055660247803], [0.5214671492576599, 0.8519091010093689], [0.521510660648346, 0.8485863208770752], [0.5215254426002502, 0.8445186614990234], [0.5215266942977905, 0.8413496017456055], [0.527172327041626, 0.9848152995109558], [0.5265090465545654, 0.9771068096160889], [0.5265195369720459, 0.969950795173645], [0.52596116065979, 0.9623322486877441], [0.5258862376213074, 0.9553554058074951], [0.5254559516906738, 0.947780430316925], [0.5253629088401794, 0.9408934116363525], [0.5250483751296997, 0.9333411455154419], [0.5249918699264526, 0.9264988899230957], [0.5248134732246399, 0.9189943671226501], [0.5248105525970459, 0.9121841192245483], [0.5247483849525452, 0.9046208262443542], [0.5248435735702515, 0.8977232575416565], [0.5249388217926025, 0.8900650143623352], [0.5251678228378296, 0.883028507232666], [0.5254707336425781, 0.8751425743103027], [0.525921642780304, 0.8678222894668579], [0.5261102318763733, 0.8636560440063477], [0.5262653827667236, 0.8601484298706055], [0.5263949036598206, 0.8558902740478516], [0.526468813419342, 0.8523617386817932], [0.5264996290206909, 0.8482671976089478], [0.5265148878097534, 0.8448823094367981], [0.5264923572540283, 0.8409603238105774], [0.5322467684745789, 0.9843435287475586], [0.5323324799537659, 0.9771127700805664], [0.5315455198287964, 0.9693371057510376], [0.5314480662345886, 0.9624071717262268], [0.5308424234390259, 0.9547455310821533], [0.5306980609893799, 0.947918176651001], [0.5302320122718811, 0.9403218030929565], [0.5301107168197632, 0.9335577487945557], [0.5298176407814026, 0.9260170459747314], [0.5297646522521973, 0.9192906022071838], [0.5296134948730469, 0.9117805361747742], [0.529636025428772, 0.9049967527389526], [0.5296141505241394, 0.8974254131317139], [0.5297521352767944, 0.8905532360076904], [0.5299404859542847, 0.8828849196434021], [0.5303086042404175, 0.8758231401443481], [0.5307698249816895, 0.8678585886955261], [0.5310330390930176, 0.8643409013748169], [0.5312238931655884, 0.8600040078163147], [0.5313498973846436, 0.8564295768737793], [0.5314481258392334, 0.852092981338501], [0.5314871668815613, 0.8486464619636536], [0.5314661860466003, 0.8445077538490295], [0.5314319133758545, 0.8412300944328308], [0.5384965538978577, 0.9842674732208252], [0.5373330116271973, 0.9763538837432861], [0.5372009873390198, 0.9693537354469299], [0.5363905429840088, 0.9616719484329224], [0.5361981987953186, 0.9548279047012329], [0.5355923175811768, 0.9472391605377197], [0.5353901982307434, 0.9404528737068176], [0.5349331498146057, 0.9329724907875061], [0.5347887277603149, 0.926243782043457], [0.5344994068145752, 0.9188077449798584], [0.5344460010528564, 0.9121005535125732], [0.5343202948570251, 0.9046136140823364], [0.5343614220619202, 0.8978266716003418], [0.5344024896621704, 0.8903121948242188], [0.5346114039421082, 0.8834383487701416], [0.5349315404891968, 0.8758156299591064], [0.5355184674263, 0.868681788444519], [0.5357702374458313, 0.8642985224723816], [0.5359481573104858, 0.8605740666389465], [0.5360826253890991, 0.8562507629394531], [0.536180317401886, 0.8525522351264954], [0.5362244844436646, 0.8483193516731262], [0.5361981391906738, 0.8447567224502563], [0.536117434501648, 0.8408103585243225], [0.5434350371360779, 0.9832282066345215], [0.5432419180870056, 0.9763190746307373], [0.5421651005744934, 0.9684408903121948], [0.5418930053710938, 0.9616885185241699], [0.5411075353622437, 0.9540296196937561], [0.540838360786438, 0.9473103284835815], [0.5402387380599976, 0.9397411346435547], [0.5400032997131348, 0.9331079125404358], [0.5395621061325073, 0.9256483316421509], [0.5394017100334167, 0.9190322160720825], [0.5391261577606201, 0.9116314649581909], [0.5390715599060059, 0.9049393534660339], [0.5389887094497681, 0.8974852561950684], [0.5391032695770264, 0.8907850980758667], [0.5392487645149231, 0.8832615613937378], [0.5395245552062988, 0.8764529824256897], [0.5399165153503418, 0.8687978982925415], [0.5402685403823853, 0.8650060892105103], [0.5405490398406982, 0.8604785203933716], [0.5407132506370544, 0.8567714691162109], [0.5408271551132202, 0.8523330092430115], [0.5408858060836792, 0.8486859202384949], [0.5408762693405151, 0.844338595867157], [0.5407994985580444, 0.8409744501113892], [0.549593448638916, 0.9831827282905579], [0.548206090927124, 0.9751353859901428], [0.5478025078773499, 0.9684077501296997], [0.5468355417251587, 0.9607130289077759], [0.5464515686035156, 0.9540239572525024], [0.5456841588020325, 0.9464741349220276], [0.5453572273254395, 0.9398074150085449], [0.5447795391082764, 0.9323844909667969], [0.5445173382759094, 0.9257887601852417], [0.5440930128097534, 0.9184319972991943], [0.5439124703407288, 0.9118565320968628], [0.5436598062515259, 0.9044884443283081], [0.5436551570892334, 0.8978691697120667], [0.5436657667160034, 0.8905179500579834], [0.5438681244850159, 0.8838080167770386], [0.5441554188728333, 0.8763607740402222], [0.5446262359619141, 0.8695885539054871], [0.544814944267273, 0.8649978041648865], [0.5449768900871277, 0.8610421419143677], [0.5451127886772156, 0.8566234707832336], [0.5452404618263245, 0.8527971506118774], [0.5453436374664307, 0.8484402894973755], [0.5453917980194092, 0.8446406126022339], [0.5453344583511353, 0.8404715061187744], [0.5546357035636902, 0.9814983606338501], [0.5539480447769165, 0.975045919418335], [0.5527953505516052, 0.9672122001647949], [0.5522886514663696, 0.9606432914733887], [0.5513591766357422, 0.9530074596405029], [0.5509101152420044, 0.9464529752731323], [0.5501725077629089, 0.9389564990997314], [0.5498090386390686, 0.9324376583099365], [0.5492333769798279, 0.9250596761703491], [0.548941969871521, 0.9185630083084106], [0.5485267043113708, 0.9112581014633179], [0.548367977142334, 0.9047533273696899], [0.5482022762298584, 0.8974865674972534], [0.548247754573822, 0.8909462690353394], [0.5483661890029907, 0.8836497664451599], [0.5487103462219238, 0.8770249485969543], [0.5490851998329163, 0.8696016669273376], [0.549301028251648, 0.8654962182044983], [0.5492755770683289, 0.8607577085494995], [0.5492536425590515, 0.85701984167099], [0.5494024157524109, 0.8527524471282959], [0.549734890460968, 0.8491029739379883], [0.5500303506851196, 0.8443766832351685], [0.5498934388160706, 0.8404538631439209], [0.5602545142173767, 0.9812871217727661], [0.5590134859085083, 0.9735625982284546], [0.5583126544952393, 0.9670725464820862], [0.5572547316551208, 0.9594508409500122], [0.5566789507865906, 0.952897846698761], [0.5557921528816223, 0.945431113243103], [0.5553088188171387, 0.938928484916687], [0.5545909404754639, 0.9315646886825562], [0.5541800856590271, 0.9250914454460144], [0.5536102652549744, 0.9178285598754883], [0.5533115267753601, 0.9113949537277222], [0.5529409646987915, 0.9041999578475952], [0.5528029799461365, 0.89778733253479], [0.5526824593544006, 0.8906066417694092], [0.5527752637863159, 0.8841489553451538], [0.5529879331588745, 0.8769760131835938], [0.5534254312515259, 0.8702659606933594], [0.5535123944282532, 0.8652207851409912], [0.5533932447433472, 0.8608570098876953], [0.5532115697860718, 0.8566884398460388], [0.553292989730835, 0.8533601760864258], [0.5534829497337341, 0.8491498231887817], [0.5537302494049072, 0.8448740243911743], [0.5538690090179443, 0.8398892879486084], [0.5653691291809082, 0.9796076416969299], [0.5644999146461487, 0.9733160734176636], [0.5633513331413269, 0.9656902551651001], [0.5626427531242371, 0.9592607021331787], [0.5616135597229004, 0.951711118221283], [0.5610159635543823, 0.9453110694885254], [0.5601562857627869, 0.9379069209098816], [0.5596296787261963, 0.9315030574798584], [0.5589020252227783, 0.9242041110992432], [0.5584650039672852, 0.9178532361984253], [0.5579187870025635, 0.910682201385498], [0.5576200485229492, 0.9043488502502441], [0.5572728514671326, 0.8972630500793457], [0.5571341514587402, 0.8909111022949219], [0.557044506072998, 0.8838696479797363], [0.5571629405021667, 0.8774919509887695], [0.5574872493743896, 0.8703190684318542], [0.5577505826950073, 0.8655374050140381], [0.557401716709137, 0.8601057529449463], [0.5571205019950867, 0.8567953109741211], [0.5569585561752319, 0.853173017501831], [0.5568141937255859, 0.8493461012840271], [0.5567305684089661, 0.8448446989059448], [0.5572713017463684, 0.8405658602714539], [0.5707188248634338, 0.9792840480804443], [0.5696005821228027, 0.9718114137649536], [0.5687829256057739, 0.9654110670089722], [0.5676588416099548, 0.9579097032546997], [0.5669323801994324, 0.9514927864074707], [0.5659389495849609, 0.9441298246383667], [0.5652901530265808, 0.9377501010894775], [0.564426064491272, 0.9304662942886353], [0.5638641715049744, 0.9241166114807129], [0.5631545782089233, 0.9169675707817078], [0.562709629535675, 0.9107085466384888], [0.5621787309646606, 0.9036421775817871], [0.5618558526039124, 0.8973996043205261], [0.5615307092666626, 0.8904150724411011], [0.5614458918571472, 0.8842189311981201], [0.5614666938781738, 0.8772695660591125], [0.5618674159049988, 0.8711410760879517], [0.5619447231292725, 0.865111231803894], [0.561272382736206, 0.8596100211143494], [0.5609006881713867, 0.856360137462616], [0.5609026551246643, 0.8534084558486938], [0.5608648657798767, 0.8488786220550537], [0.5610026717185974, 0.8454476594924927], [0.5613985061645508, 0.8410862684249878], [0.575891375541687, 0.9778263568878174], [0.575067400932312, 0.9714572429656982], [0.5738537311553955, 0.9639310836791992], [0.5730327367782593, 0.9576081037521362], [0.5719165205955505, 0.950157642364502], [0.571164608001709, 0.9438763856887817], [0.5701619386672974, 0.9365564584732056], [0.5694863796234131, 0.9302847385406494], [0.5686274170875549, 0.9230736494064331], [0.5680482983589172, 0.9168701171875], [0.5673441290855408, 0.9098276495933533], [0.5668567419052124, 0.9036364555358887], [0.5663104057312012, 0.8966943025588989], [0.5660037398338318, 0.8905829191207886], [0.5657570362091064, 0.8837961554527283], [0.5657246112823486, 0.877669632434845], [0.5658199787139893, 0.8711609840393066], [0.5658605098724365, 0.8651816844940186], [0.5654195547103882, 0.8585669994354248], [0.5653272271156311, 0.8565791845321655], [0.5652137994766235, 0.8530338406562805], [0.5653183460235596, 0.8493723273277283], [0.5654666423797607, 0.8453829884529114], [0.5655763149261475, 0.8417656421661377], [0.5815576314926147, 0.9774341583251953], [0.5802470445632935, 0.9698784351348877], [0.5793397426605225, 0.9635300636291504], [0.5781066417694092, 0.956123411655426], [0.5772449970245361, 0.9498199224472046], [0.5761328935623169, 0.9425244331359863], [0.5753458738327026, 0.9362730979919434], [0.5743582844734192, 0.929082453250885], [0.5736509561538696, 0.9228613972663879], [0.5727975368499756, 0.915818452835083], [0.5721861124038696, 0.9096980690956116], [0.5714758634567261, 0.9027353525161743], [0.57099449634552, 0.8966870307922363], [0.5704929232597351, 0.889909029006958], [0.5701785087585449, 0.8839677572250366], [0.5699477195739746, 0.877302885055542], [0.5698066353797913, 0.8714559674263], [0.5697703957557678, 0.8649020195007324], [0.5702007412910461, 0.858935534954071], [0.5701557993888855, 0.8558462858200073], [0.569977343082428, 0.8531708717346191], [0.569771409034729, 0.8491264581680298], [0.5697423815727234, 0.8457661867141724], [0.5698124766349792, 0.8416361808776855], [0.5868173241615295, 0.9757261276245117], [0.5858498811721802, 0.9694198369979858], [0.5844951868057251, 0.9618946313858032], [0.5835473537445068, 0.9556888341903687], [0.5823009014129639, 0.9483184814453125], [0.5814088582992554, 0.9421464204788208], [0.5802907943725586, 0.9349191188812256], [0.5794742107391357, 0.9287670850753784], [0.5784803032875061, 0.9216544032096863], [0.5777570605278015, 0.9155797958374023], [0.5769001841545105, 0.9086324572563171], [0.5762729644775391, 0.9025945067405701], [0.5755709409713745, 0.8957949876785278], [0.5750640630722046, 0.8898931741714478], [0.5745399594306946, 0.8832787275314331], [0.5742084980010986, 0.8774878978729248], [0.5739314556121826, 0.8709431886672974], [0.5738846659660339, 0.8653382062911987], [0.5739831924438477, 0.8589452505111694], [0.5741774439811707, 0.8560953140258789], [0.5741601586341858, 0.852547287940979], [0.5740792751312256, 0.8493581414222717], [0.5740303993225098, 0.8454302549362183], [0.5740719437599182, 0.8421194553375244], [0.5925127863883972, 0.9752019643783569], [0.5910239815711975, 0.9676526784896851], [0.5899795889854431, 0.9613999128341675], [0.5886474847793579, 0.9540450572967529], [0.5876420736312866, 0.9478408098220825], [0.5864059925079346, 0.9406439065933228], [0.5854784250259399, 0.9345090389251709], [0.5843634009361267, 0.9274091720581055], [0.5835230350494385, 0.9213178157806396], [0.5825394988059998, 0.9143667817115784], [0.5817899703979492, 0.9083658456802368], [0.5809412002563477, 0.9015406370162964], [0.5803110003471375, 0.8956298232078552], [0.5796272158622742, 0.8889948129653931], [0.5791497230529785, 0.8832558989524841], [0.5786855220794678, 0.8768027424812317], [0.5784412026405334, 0.8711525797843933], [0.5783587098121643, 0.8650143146514893], [0.5785794258117676, 0.859591007232666], [0.5785900354385376, 0.8558660745620728], [0.5785937905311584, 0.8527517318725586], [0.5785529613494873, 0.8489599227905273], [0.5785467624664307, 0.8457474708557129], [0.5785670280456543, 0.8419076204299927], [0.5977828502655029, 0.9732054471969604], [0.5966119766235352, 0.967065691947937], [0.5951583981513977, 0.9596229791641235], [0.5940717458724976, 0.9534896612167358], [0.5927201509475708, 0.9461838006973267], [0.5916996002197266, 0.9401313066482544], [0.5904677510261536, 0.9329994916915894], [0.5895189046859741, 0.9269589185714722], [0.588395893573761, 0.9199539422988892], [0.5875312089920044, 0.9139907360076904], [0.5865389108657837, 0.9071477651596069], [0.5857921838760376, 0.9012717604637146], [0.5849680304527283, 0.8945741057395935], [0.584372878074646, 0.8888517618179321], [0.5837299227714539, 0.8823710680007935], [0.5832885503768921, 0.8768247961997986], [0.5828810930252075, 0.8705453872680664], [0.5827246904373169, 0.865371584892273], [0.5826841592788696, 0.8594304323196411], [0.5828359127044678, 0.856293797492981], [0.5829042196273804, 0.852421760559082], [0.5829432010650635, 0.8492988348007202], [0.5829723477363586, 0.8454416990280151], [0.5830507874488831, 0.8423824310302734], [0.6033341884613037, 0.9725399017333984], [0.6017882823944092, 0.9651278853416443], [0.6005725860595703, 0.9589974880218506], [0.5991230607032776, 0.9517238140106201], [0.597985029220581, 0.9455997943878174], [0.5966569185256958, 0.9385042786598206], [0.5956096649169922, 0.9324756860733032], [0.594392716884613, 0.9254679679870605], [0.5934224724769592, 0.9194821119308472], [0.5923222303390503, 0.9126362800598145], [0.5914710760116577, 0.9067672491073608], [0.5905297994613647, 0.9000766277313232], [0.5898178219795227, 0.8943132162094116], [0.5890594124794006, 0.8878316283226013], [0.5885154008865356, 0.8822451233863831], [0.5879684686660767, 0.8759905099868774], [0.5876645445823669, 0.870669960975647], [0.5874416828155518, 0.8648557662963867], [0.5875100493431091, 0.8599969148635864], [0.5875303745269775, 0.8560571670532227], [0.5875439047813416, 0.8527736663818359], [0.5875115394592285, 0.8489794135093689], [0.5875293612480164, 0.845829963684082], [0.5875458717346191, 0.8421703577041626], [0.6085993051528931, 0.9704549312591553], [0.6072885990142822, 0.9643961191177368], [0.6057247519493103, 0.9570603966712952], [0.6044830083847046, 0.9510362148284912], [0.6030292510986328, 0.9438323974609375], [0.6018933653831482, 0.937880277633667], [0.6005669832229614, 0.9308508634567261], [0.5995022654533386, 0.9249048829078674], [0.5982906222343445, 0.9179959297180176], [0.5973457098007202, 0.9121595621109009], [0.5962890982627869, 0.9054340124130249], [0.5954693555831909, 0.8997060060501099], [0.5945754647254944, 0.8931518793106079], [0.5939237475395203, 0.8876040577888489], [0.5932363271713257, 0.8812739849090576], [0.5928037166595459, 0.8759706020355225], [0.5923764705657959, 0.869951069355011], [0.5921733379364014, 0.865104079246521], [0.5920292735099792, 0.8596951365470886], [0.5920939445495605, 0.8565008640289307], [0.5921244621276855, 0.8523969054222107], [0.5921320915222168, 0.8493205308914185], [0.5920892953872681, 0.8454675078392029], [0.5921026468276978, 0.8425792455673218], [0.6140466332435608, 0.9696493148803711], [0.6123917102813721, 0.9623504877090454], [0.6110341548919678, 0.9562983512878418], [0.6094863414764404, 0.9491415023803711], [0.6082252264022827, 0.9431393146514893], [0.6068096160888672, 0.9361478686332703], [0.6056342124938965, 0.9302060604095459], [0.6043208837509155, 0.923319935798645], [0.6032694578170776, 0.9174448847770691], [0.6021230220794678, 0.9107329845428467], [0.6012058258056641, 0.9049739837646484], [0.6002227663993835, 0.8984338045120239], [0.5994836091995239, 0.892831563949585], [0.5987079739570618, 0.8865059614181519], [0.5981591939926147, 0.881118655204773], [0.5976128578186035, 0.8751062154769897], [0.5972986221313477, 0.8700211048126221], [0.597013533115387, 0.8644845485687256], [0.5969721674919128, 0.8601021766662598], [0.5968859791755676, 0.8561050891876221], [0.5968499183654785, 0.8527637720108032], [0.5967708826065063, 0.8488640785217285], [0.5966987013816833, 0.8457217216491699], [0.5965943336486816, 0.8421998023986816], [0.6192564964294434, 0.9674409627914429], [0.6177970767021179, 0.9614717960357666], [0.6161237955093384, 0.9542545080184937], [0.614764392375946, 0.9483349323272705], [0.6132282018661499, 0.9412636756896973], [0.6119598746299744, 0.9354019165039062], [0.610529899597168, 0.9284741878509521], [0.6093780398368835, 0.9226675033569336], [0.6081152558326721, 0.9158888459205627], [0.6071034669876099, 0.9101821184158325], [0.6060084700584412, 0.9035840034484863], [0.6051719188690186, 0.8980206251144409], [0.6042925119400024, 0.8916391134262085], [0.6036575436592102, 0.8862555027008057], [0.6030037999153137, 0.880125880241394], [0.6025663018226624, 0.8750345706939697], [0.6021460294723511, 0.8692912459373474], [0.6019783020019531, 0.8646865487098694], [0.6017827391624451, 0.8596270084381104], [0.6017231941223145, 0.8563403487205505], [0.6015710830688477, 0.8522605895996094], [0.6014451384544373, 0.8490155339241028], [0.601249635219574, 0.8451663255691528], [0.6011388301849365, 0.8423608541488647], [0.6245415210723877, 0.966502845287323], [0.6227761507034302, 0.9593299627304077], [0.6212939023971558, 0.9533827900886536], [0.6196668148040771, 0.9463648796081543], [0.6182892322540283, 0.9404628276824951], [0.616787314414978, 0.9335766434669495], [0.6155151128768921, 0.9277348518371582], [0.6141488552093506, 0.921013355255127], [0.6130200028419495, 0.9152618646621704], [0.6118271350860596, 0.9086976051330566], [0.6108765602111816, 0.9030881524085999], [0.6098864078521729, 0.8967270851135254], [0.6091158390045166, 0.8912909030914307], [0.6083372831344604, 0.885182797908783], [0.6077812910079956, 0.8799589276313782], [0.6072428226470947, 0.8741724491119385], [0.6069162487983704, 0.8693356513977051], [0.6066114902496338, 0.8641078472137451], [0.6065117120742798, 0.8598501086235046], [0.6063200235366821, 0.8557597398757935], [0.6061416268348694, 0.8523309826850891], [0.6058979034423828, 0.8483983278274536], [0.6057018637657166, 0.8451690673828125], [0.6054561138153076, 0.841769814491272], [0.6295982003211975, 0.9641926288604736], [0.628010630607605, 0.9583472013473511], [0.6262713670730591, 0.9512708187103271], [0.6248027086257935, 0.9454538822174072], [0.6231904029846191, 0.9385141134262085], [0.621830403804779, 0.9327414035797119], [0.620343804359436, 0.9259370565414429], [0.6191158890724182, 0.9202741384506226], [0.6177963018417358, 0.9136468172073364], [0.6167401671409607, 0.9080924987792969], [0.6156266331672668, 0.9016695022583008], [0.6147547960281372, 0.8962662220001221], [0.6138478517532349, 0.8900647163391113], [0.6131682395935059, 0.884884238243103], [0.6124879717826843, 0.8789721131324768], [0.6120411157608032, 0.8740811347961426], [0.611608624458313, 0.8685859441757202], [0.6113522052764893, 0.8641796112060547], [0.6110516786575317, 0.859310507774353], [0.6109014749526978, 0.8557941317558289], [0.6106463670730591, 0.8516705632209778], [0.6104287505149841, 0.8483469486236572], [0.6100856065750122, 0.8444229364395142], [0.6098893880844116, 0.8417262434959412], [0.6346954107284546, 0.9631434679031372], [0.6328677535057068, 0.9561436176300049], [0.6312670111656189, 0.9503053426742554], [0.6295719742774963, 0.9434324502944946], [0.6281001567840576, 0.9376300573348999], [0.6265436410903931, 0.9308778047561646], [0.625205397605896, 0.9251345992088318], [0.6238048076629639, 0.9185624122619629], [0.6226223707199097, 0.9129546880722046], [0.6213955879211426, 0.9065685272216797], [0.620384156703949, 0.9011117219924927], [0.6193515062332153, 0.8949475288391113], [0.6185373067855835, 0.8896634578704834], [0.6177263259887695, 0.883769154548645], [0.617115318775177, 0.8787461519241333], [0.6165207624435425, 0.873207688331604], [0.6161149144172668, 0.8685382008552551], [0.6157183647155762, 0.8634964227676392], [0.6154589653015137, 0.8593232035636902], [0.6151077747344971, 0.8551301956176758], [0.6148657202720642, 0.851649284362793], [0.6145645976066589, 0.8476547002792358], [0.6142851710319519, 0.8442460894584656], [0.6139399409294128, 0.8410061597824097], [0.639622151851654, 0.9607824087142944], [0.6379233598709106, 0.9550588130950928], [0.6361092925071716, 0.9481347799301147], [0.6345436573028564, 0.9424400329589844], [0.6328723430633545, 0.9356409311294556], [0.6314429640769958, 0.9299817085266113], [0.6299322843551636, 0.9233201742172241], [0.6286653280258179, 0.9177684783935547], [0.6273206472396851, 0.9112998247146606], [0.6261969208717346, 0.9058829545974731], [0.6250253915786743, 0.899642825126648], [0.6240911483764648, 0.8944145441055298], [0.6231385469436646, 0.8884127736091614], [0.6224024295806885, 0.8833938837051392], [0.6216549873352051, 0.8777025938034058], [0.6211174726486206, 0.8729996681213379], [0.620578944683075, 0.867708146572113], [0.6201946139335632, 0.8633978962898254], [0.6197400093078613, 0.8585952520370483], [0.6194459795951843, 0.8549799919128418], [0.6190632581710815, 0.850950300693512], [0.6187101006507874, 0.8474436402320862], [0.6182641983032227, 0.8434982299804688], [0.6180248260498047, 0.8407896757125854], [0.6445837020874023, 0.9596300721168518], [0.6427022218704224, 0.9527885317802429], [0.6410114765167236, 0.9470662474632263], [0.6392561197280884, 0.9403446912765503], [0.6376895308494568, 0.9346634149551392], [0.636070191860199, 0.9280751943588257], [0.6346556544303894, 0.9224613308906555], [0.6332069039344788, 0.9160506725311279], [0.6319434642791748, 0.9105420112609863], [0.6306607723236084, 0.9043383002281189], [0.6295853853225708, 0.8990170955657959], [0.6285048723220825, 0.8930642604827881], [0.6276150345802307, 0.8879371285438538], [0.6267313957214355, 0.8822471499443054], [0.6260251998901367, 0.8773665428161621], [0.6253312826156616, 0.8720570206642151], [0.6248014569282532, 0.8675098419189453], [0.6242537498474121, 0.8625919818878174], [0.6238216757774353, 0.8584069013595581], [0.6233167052268982, 0.8542026281356812], [0.6229066848754883, 0.8507122993469238], [0.6224406957626343, 0.8466854095458984], [0.6221343874931335, 0.8433082103729248], [0.6217489838600159, 0.8400321006774902], [0.6494234800338745, 0.9572346210479736], [0.6476483941078186, 0.9516083598136902], [0.6457817554473877, 0.9448375701904297], [0.6441245079040527, 0.9392486810684204], [0.6423792839050293, 0.9326033592224121], [0.6408571600914001, 0.9270801544189453], [0.6392571926116943, 0.9205899238586426], [0.6378772258758545, 0.915169358253479], [0.6364433169364929, 0.9088606238365173], [0.6352604627609253, 0.9035965204238892], [0.6340456008911133, 0.897529125213623], [0.6330335140228271, 0.8924427032470703], [0.6319862008094788, 0.8866362571716309], [0.6311450004577637, 0.8817685842514038], [0.6302943825721741, 0.8762608766555786], [0.6296441555023193, 0.8717129230499268], [0.6289708018302917, 0.8665750622749329], [0.6284441351890564, 0.8623137474060059], [0.627816915512085, 0.8575249910354614], [0.6273211240768433, 0.8538292646408081], [0.6267479658126831, 0.8498415946960449], [0.6262955665588379, 0.8463853001594543], [0.6257941722869873, 0.8425939083099365], [0.6254271864891052, 0.8396514654159546], [0.6542684435844421, 0.9559942483901978], [0.652334451675415, 0.9493058919906616], [0.6505504846572876, 0.9436736106872559], [0.6487259864807129, 0.9371070861816406], [0.6470773220062256, 0.9315353035926819], [0.6453854441642761, 0.9251011610031128], [0.6438586115837097, 0.9196078777313232], [0.6423097848892212, 0.9133690595626831], [0.6409608721733093, 0.908013105392456], [0.6396080851554871, 0.902004599571228], [0.6384429931640625, 0.8968237638473511], [0.6372722387313843, 0.8910312652587891], [0.6362687349319458, 0.8860392570495605], [0.6352641582489014, 0.8805437088012695], [0.6344367861747742, 0.8758057951927185], [0.6336045265197754, 0.8706661462783813], [0.6328956484794617, 0.8662046194076538], [0.6321544647216797, 0.8613981008529663], [0.6314849257469177, 0.8571269512176514], [0.6308099031448364, 0.8529417514801025], [0.6303058862686157, 0.8494745492935181], [0.6297438740730286, 0.8455774784088135], [0.6292916536331177, 0.842254638671875], [0.62876296043396, 0.8388316631317139], [0.6590147018432617, 0.9535648822784424], [0.6571540832519531, 0.948032021522522], [0.6552125215530396, 0.9413984417915344], [0.6534733772277832, 0.9359188675880432], [0.6516605019569397, 0.9294305443763733], [0.6500368714332581, 0.9240003824234009], [0.6483404636383057, 0.9176552891731262], [0.6468749046325684, 0.9123809337615967], [0.6453573107719421, 0.9062596559524536], [0.6440713405609131, 0.9011490345001221], [0.6427401304244995, 0.895262598991394], [0.6416066884994507, 0.8902982473373413], [0.6404399275779724, 0.8846539258956909], [0.6394834518432617, 0.879932165145874], [0.6384854316711426, 0.8745930194854736], [0.6376389861106873, 0.8701235055923462], [0.6367181539535522, 0.8651105165481567], [0.6359394192695618, 0.8608977794647217], [0.6350564360618591, 0.8561033606529236], [0.6344553232192993, 0.8525066375732422], [0.633822500705719, 0.8485735654830933], [0.6332948207855225, 0.8451743125915527], [0.6326747536659241, 0.8413931131362915], [0.6321788430213928, 0.8383617401123047], [0.6637818813323975, 0.9522179365158081], [0.6617691516876221, 0.9456652402877808], [0.6598894596099854, 0.9401295185089111], [0.6579809784889221, 0.9337193369865417], [0.6562186479568481, 0.928254246711731], [0.654431939125061, 0.9219720363616943], [0.652822732925415, 0.9165819883346558], [0.6512027978897095, 0.9105114936828613], [0.649754524230957, 0.9052958488464355], [0.6482924818992615, 0.8994641304016113], [0.6469923257827759, 0.8944140076637268], [0.6456819176673889, 0.8887957334518433], [0.6445475816726685, 0.8839330673217773], [0.6434034109115601, 0.8786095380783081], [0.6424005031585693, 0.8739645481109619], [0.641371488571167, 0.8689190149307251], [0.6404699683189392, 0.8645192384719849], [0.6395289897918701, 0.8597743511199951], [0.6386978626251221, 0.8555333614349365], [0.6378973126411438, 0.8515002727508545], [0.6372855305671692, 0.8481220006942749], [0.6366045475006104, 0.8442592620849609], [0.6360154151916504, 0.8409155607223511], [0.6353606581687927, 0.8374422788619995], [0.6684663891792297, 0.9497218132019043], [0.6665042638778687, 0.9442720413208008], [0.6644593477249146, 0.9377809762954712], [0.6625986099243164, 0.9324073791503906], [0.6606646776199341, 0.9260742664337158], [0.6589543223381042, 0.920784056186676], [0.6572003960609436, 0.9145941138267517], [0.6556546688079834, 0.909418523311615], [0.6540325880050659, 0.9034510850906372], [0.6526056528091431, 0.8984612226486206], [0.651127815246582, 0.892749547958374], [0.6498678922653198, 0.8879209756851196], [0.648549497127533, 0.882436990737915], [0.6474098563194275, 0.8778202533721924], [0.6462116837501526, 0.8726229071617126], [0.6452022790908813, 0.8682184815406799], [0.6441196799278259, 0.8632825613021851], [0.6431915163993835, 0.8591130375862122], [0.64216148853302, 0.8543967008590698], [0.6414576768875122, 0.8509082794189453], [0.6406900882720947, 0.8470920324325562], [0.640008807182312, 0.8437039852142334], [0.6392384767532349, 0.839934229850769], [0.6386387348175049, 0.8368826508522034], [0.6731488108634949, 0.9482591152191162], [0.6710326075553894, 0.9418308734893799], [0.6690415740013123, 0.936385989189148], [0.667026698589325, 0.9301084280014038], [0.6651651263237, 0.92476487159729], [0.6632862687110901, 0.9186705350875854], [0.6615748405456543, 0.9134246110916138], [0.6598411798477173, 0.9074891805648804], [0.6582503318786621, 0.9023585319519043], [0.6566509008407593, 0.8966819047927856], [0.655234694480896, 0.8917741179466248], [0.6537926197052002, 0.8863160014152527], [0.6524949073791504, 0.881550669670105], [0.6511755585670471, 0.8763484954833984], [0.6500141620635986, 0.8718204498291016], [0.6488145589828491, 0.8668835163116455], [0.6477404832839966, 0.862534761428833], [0.6466405987739563, 0.8578808307647705], [0.6456835269927979, 0.8537077903747559], [0.6447782516479492, 0.8497921228408813], [0.6440357565879822, 0.8464647531509399], [0.6432067155838013, 0.842659592628479], [0.6425180435180664, 0.8393392562866211], [0.641769289970398, 0.8358651399612427], [0.6777560710906982, 0.9456701874732971], [0.6756726503372192, 0.9403114318847656], [0.6735203266143799, 0.9339596033096313], [0.6715714335441589, 0.928684413433075], [0.6695497035980225, 0.9224908351898193], [0.6677260398864746, 0.9173353314399719], [0.6658260226249695, 0.9113353490829468], [0.6641229391098022, 0.9062677621841431], [0.6623562574386597, 0.9004307389259338], [0.6608283519744873, 0.895565927028656], [0.6592345833778381, 0.8900108337402344], [0.6578112840652466, 0.8852826356887817], [0.6563220024108887, 0.8799405097961426], [0.6550476551055908, 0.8754169940948486], [0.6536983847618103, 0.8703416585922241], [0.652513861656189, 0.8660115003585815], [0.6512360572814941, 0.8611642718315125], [0.6501554846763611, 0.8570836782455444], [0.6489842534065247, 0.8524929881095886], [0.6481877565383911, 0.8490889072418213], [0.6473052501678467, 0.8453054428100586], [0.6465316414833069, 0.8419877290725708], [0.6456764340400696, 0.8382803201675415], [0.6450023055076599, 0.8352144956588745], [0.6823762655258179, 0.9440615177154541], [0.6801403164863586, 0.9377645254135132], [0.6780443787574768, 0.9324359893798828], [0.6759219169616699, 0.926306962966919], [0.6739436388015747, 0.9210647940635681], [0.6719439625740051, 0.9151169061660767], [0.6700891256332397, 0.9099968671798706], [0.6682037711143494, 0.9042186737060547], [0.6664945483207703, 0.8992106914520264], [0.6647756695747375, 0.893690288066864], [0.6631944179534912, 0.8888802528381348], [0.6615839004516602, 0.8835576772689819], [0.6601549983024597, 0.8789138793945312], [0.658694326877594, 0.8738381862640381], [0.6573805809020996, 0.8693841099739075], [0.6560150384902954, 0.8645318746566772], [0.6547970175743103, 0.8602570295333862], [0.6535540223121643, 0.8557026386260986], [0.6524806022644043, 0.8516637682914734], [0.6514702439308167, 0.8478550314903259], [0.6506249308586121, 0.8445515036582947], [0.6496989727020264, 0.8408315181732178], [0.6489274501800537, 0.8375868201255798], [0.6480873823165894, 0.8341221809387207], [0.6869083046913147, 0.9413806200027466], [0.6847277283668518, 0.9361116886138916], [0.6824657320976257, 0.9298988580703735], [0.6803898811340332, 0.9247341156005859], [0.6782299280166626, 0.9186895489692688], [0.6762690544128418, 0.913637638092041], [0.6742222905158997, 0.9077842235565186], [0.6723974943161011, 0.9028521776199341], [0.6705006957054138, 0.8971823453903198], [0.6688146591186523, 0.8924160003662109], [0.6670321822166443, 0.8869867920875549], [0.6654723286628723, 0.8823909759521484], [0.6638543009757996, 0.8772039413452148], [0.6624422073364258, 0.8727664947509766], [0.6609443426132202, 0.867790699005127], [0.659629762172699, 0.8635202050209045], [0.6582231521606445, 0.8587693572044373], [0.6570308208465576, 0.8547643423080444], [0.6557410955429077, 0.8503177762031555], [0.6548388004302979, 0.8470079898834229], [0.6538416147232056, 0.8432888984680176], [0.6529896855354309, 0.840045690536499], [0.6520487070083618, 0.8364255428314209], [0.6512662768363953, 0.833355724811554], [0.691460907459259, 0.9396498799324036], [0.6891106963157654, 0.9334787726402283], [0.6868917942047119, 0.9282426834106445], [0.6846394538879395, 0.9222478866577148], [0.6825237274169922, 0.9171149730682373], [0.6803745031356812, 0.9113059639930725], [0.6783860921859741, 0.9063029289245605], [0.6763700246810913, 0.9006956815719604], [0.674517035484314, 0.8958159685134888], [0.6726379990577698, 0.8904122114181519], [0.6709168553352356, 0.88569575548172], [0.6691738367080688, 0.880537748336792], [0.6676129698753357, 0.8760303258895874], [0.6660076379776001, 0.8710659146308899], [0.6645605564117432, 0.8666936159133911], [0.663065493106842, 0.8619334697723389], [0.6617327928543091, 0.8577327728271484], [0.6603640913963318, 0.8532716035842896], [0.6591792106628418, 0.8493454456329346], [0.6580608487129211, 0.8456451892852783], [0.6571176052093506, 0.8424123525619507], [0.6560922265052795, 0.8387947082519531], [0.6552177667617798, 0.8356136083602905], [0.6542653441429138, 0.832166314125061], [0.6959236264228821, 0.9368672370910645], [0.6936086416244507, 0.9316816329956055], [0.6912041306495667, 0.9256060719490051], [0.6889940500259399, 0.9205389618873596], [0.6866930723190308, 0.9146308302879333], [0.6846024990081787, 0.9096825122833252], [0.6824167966842651, 0.9039746522903442], [0.6804519891738892, 0.8991740942001343], [0.6784063577651978, 0.893668532371521], [0.6765755414962769, 0.8889919519424438], [0.6746630668640137, 0.8836979866027832], [0.6729897856712341, 0.8792188763618469], [0.6712366342544556, 0.8741860389709473], [0.6696784496307373, 0.869843065738678], [0.6680448055267334, 0.8649946451187134], [0.6666158437728882, 0.8607984185218811], [0.6650979518890381, 0.8561465740203857], [0.6638016700744629, 0.8521987199783325], [0.6623895764350891, 0.8478710055351257], [0.6613730788230896, 0.8446457386016846], [0.6602529883384705, 0.8410387635231018], [0.6592941880226135, 0.8378787636756897], [0.6582501530647278, 0.8343552947044373], [0.6573787927627563, 0.8313034772872925], [0.700401782989502, 0.9349828958511353], [0.6979032158851624, 0.9289335608482361], [0.6955404877662659, 0.9237943887710571], [0.6931442022323608, 0.9179432392120361], [0.6908979415893555, 0.9129139184951782], [0.6886119842529297, 0.9072402715682983], [0.6864787936210632, 0.9023404121398926], [0.6843175888061523, 0.8968920707702637], [0.682320773601532, 0.8921471834182739], [0.6802908182144165, 0.8868812918663025], [0.6784476041793823, 0.8822835683822632], [0.6765730381011963, 0.877265214920044], [0.6748658418655396, 0.8728666305541992], [0.6731110215187073, 0.8680408000946045], [0.6715481877326965, 0.8637869358062744], [0.6699339151382446, 0.8591369390487671], [0.6684942841529846, 0.8550164699554443], [0.667007327079773, 0.8506331443786621], [0.6656879186630249, 0.8467665910720825], [0.6644604206085205, 0.8431848287582397], [0.6634170413017273, 0.840032696723938], [0.6622874140739441, 0.8365305066108704], [0.6613329648971558, 0.833441436290741], [0.660287082195282, 0.830045759677887], [0.7047715187072754, 0.9320684671401978], [0.7023083567619324, 0.9269849061965942], [0.6997541189193726, 0.9210386276245117], [0.6974136233329773, 0.9160832166671753], [0.6949737071990967, 0.9103192090988159], [0.692743182182312, 0.9054679274559021], [0.6904062032699585, 0.8998912572860718], [0.6882952451705933, 0.8952088952064514], [0.6860916614532471, 0.8898719549179077], [0.6841249465942383, 0.8853223323822021], [0.6820780038833618, 0.8801867961883545], [0.6802613735198975, 0.8758066892623901], [0.6783519983291626, 0.8709084987640381], [0.6766626238822937, 0.8666869401931763], [0.674903392791748, 0.8620012998580933], [0.6733518242835999, 0.8578975200653076], [0.6717337369918823, 0.8533766269683838], [0.6703214049339294, 0.8494546413421631], [0.6687681078910828, 0.8451910018920898], [0.6676578521728516, 0.8420682549476624], [0.6664462685585022, 0.8385714292526245], [0.6654155254364014, 0.8355004787445068], [0.6642765402793884, 0.832088828086853], [0.6632936000823975, 0.8290780186653137], [0.7091853618621826, 0.9299982786178589], [0.7065227031707764, 0.9240965843200684], [0.7040170431137085, 0.9190701246261597], [0.701482355594635, 0.9133649468421936], [0.6990926861763, 0.9084445238113403], [0.6966534852981567, 0.9029078483581543], [0.6943618059158325, 0.8981038331985474], [0.6920437216758728, 0.8928022384643555], [0.689902663230896, 0.888190746307373], [0.6877219080924988, 0.883100152015686], [0.685725748538971, 0.8786330223083496], [0.6836956143379211, 0.8737579584121704], [0.6818490624427795, 0.8694591522216797], [0.6799651384353638, 0.8647916316986084], [0.6782742142677307, 0.8606683611869812], [0.6765282154083252, 0.8561638593673706], [0.6750020980834961, 0.8521721363067627], [0.673407256603241, 0.8478299379348755], [0.671965479850769, 0.8439671993255615], [0.6706650853157043, 0.8405148983001709], [0.6695731282234192, 0.8374670147895813], [0.6683521270751953, 0.8340409994125366], [0.6672828793525696, 0.8310303688049316], [0.6660947203636169, 0.8277113437652588], [0.7134796977043152, 0.9269123673439026], [0.7108498811721802, 0.9219567775726318], [0.7081417441368103, 0.9161884784698486], [0.7056743502616882, 0.9113508462905884], [0.7030794620513916, 0.905712366104126], [0.7006803154945374, 0.9009650945663452], [0.6981682181358337, 0.8955349922180176], [0.6959096193313599, 0.8909653425216675], [0.693552553653717, 0.8857910633087158], [0.6914339065551758, 0.8813790082931519], [0.6892171502113342, 0.8764142394065857], [0.6872467994689941, 0.8721604347229004], [0.6851917505264282, 0.8674167394638062], [0.683382511138916, 0.8633155822753906], [0.681494414806366, 0.8587848544120789], [0.6798222064971924, 0.8548073172569275], [0.6780996918678284, 0.8504794239997864], [0.6765653491020203, 0.8465903997421265], [0.6749608516693115, 0.8423700332641602], [0.6738461256027222, 0.8393068313598633], [0.6725673675537109, 0.8359032869338989], [0.6714063882827759, 0.8328602313995361], [0.6701284646987915, 0.8295427560806274], [0.6690057516098022, 0.8265877962112427], [0.7178640365600586, 0.9246479272842407], [0.7150368094444275, 0.9188574552536011], [0.7124124765396118, 0.9140121936798096], [0.7097011804580688, 0.9084506034851074], [0.7071020007133484, 0.9036431312561035], [0.7044554352760315, 0.8982654809951782], [0.7019930481910706, 0.8935950398445129], [0.6995123028755188, 0.8884577751159668], [0.6972184181213379, 0.8839685320854187], [0.6948881149291992, 0.8790422677993774], [0.6927391290664673, 0.8747028112411499], [0.6905615925788879, 0.8699949383735657], [0.6885846257209778, 0.8658324480056763], [0.686569094657898, 0.8613248467445374], [0.684744119644165, 0.8573200702667236], [0.6828734278678894, 0.8529881834983826], [0.6812040209770203, 0.8491438031196594], [0.6795030236244202, 0.8449634313583374], [0.6781049370765686, 0.8411954045295715], [0.6767861247062683, 0.8377180695533752], [0.6755549311637878, 0.8346567153930664], [0.6741939187049866, 0.8313199281692505], [0.6730148792266846, 0.8283543586730957], [0.6717130541801453, 0.8251057863235474], [0.72214674949646, 0.9214061498641968], [0.7193983793258667, 0.9165062308311462], [0.7165446877479553, 0.9108904600143433], [0.7138038277626038, 0.9061571359634399], [0.7109276652336121, 0.900733470916748], [0.7083333134651184, 0.8961503505706787], [0.7056415677070618, 0.8909099698066711], [0.703230619430542, 0.886480450630188], [0.7007179856300354, 0.8814688920974731], [0.6984566450119019, 0.8771846294403076], [0.6960948705673218, 0.872377336025238], [0.6940020322799683, 0.8682581186294556], [0.6918163299560547, 0.8636784553527832], [0.6898745894432068, 0.8597092628479004], [0.687843382358551, 0.8553300499916077], [0.6860637664794922, 0.8515064716339111], [0.6842042207717896, 0.8473047614097595], [0.6826263666152954, 0.8436448574066162], [0.6810278296470642, 0.839661717414856], [0.6797668933868408, 0.8364521265029907], [0.6783358454704285, 0.8330318927764893], [0.6770867705345154, 0.8300578594207764], [0.6757242679595947, 0.826816737651825], [0.6745408773422241, 0.8239003419876099], [0.726240873336792, 0.9191145896911621], [0.7232562899589539, 0.9134763479232788], [0.7203981876373291, 0.9086726903915405], [0.7174674272537231, 0.9032201170921326], [0.7147071361541748, 0.8985362648963928], [0.7119165658950806, 0.8933559656143188], [0.7093091607093811, 0.8888317346572876], [0.7066841125488281, 0.8838721513748169], [0.7042427062988281, 0.8795138597488403], [0.7017720937728882, 0.8747518062591553], [0.6994937062263489, 0.8705416321754456], [0.6971927881240845, 0.8659999370574951], [0.695083498954773, 0.8619627952575684], [0.6929369568824768, 0.8576177358627319], [0.690991997718811, 0.8537350296974182], [0.6890175938606262, 0.8495862483978271], [0.6872581839561462, 0.8458507061004639], [0.6855045557022095, 0.8419167995452881], [0.683998703956604, 0.8384019732475281], [0.6824874877929688, 0.8348637223243713], [0.6811395883560181, 0.8317381143569946], [0.6797106862068176, 0.8284941911697388], [0.6784723401069641, 0.8255873918533325], [0.6771175861358643, 0.8224175572395325], [0.7301937341690063, 0.9158927798271179], [0.727238655090332, 0.9110945463180542], [0.7241731882095337, 0.9055750966072083], [0.7212947607040405, 0.900908887386322], [0.7183251976966858, 0.895600438117981], [0.7156188488006592, 0.8911315202713013], [0.7127991914749146, 0.8860619068145752], [0.7102541327476501, 0.8817649483680725], [0.707602322101593, 0.8769187927246094], [0.7052096128463745, 0.8727656006813049], [0.7027180194854736, 0.8681273460388184], [0.7005002498626709, 0.8641403913497925], [0.6981850266456604, 0.8597171902656555], [0.6961302757263184, 0.8558765649795532], [0.6939812898635864, 0.8516546487808228], [0.6921050548553467, 0.8479788303375244], [0.6901528835296631, 0.843956708908081], [0.6884938478469849, 0.8404743671417236], [0.6867725253105164, 0.8367209434509277], [0.685333251953125, 0.8335183262825012], [0.6837767362594604, 0.830109179019928], [0.6824685335159302, 0.8271822929382324], [0.6810497641563416, 0.8240380883216858], [0.6798213124275208, 0.8211847543716431], [0.7341502904891968, 0.9134274125099182], [0.7309728264808655, 0.9079265594482422], [0.7279413342475891, 0.9032053351402283], [0.7248652577400208, 0.8978912830352783], [0.7219704985618591, 0.8933165073394775], [0.7190495133399963, 0.8882738947868347], [0.7163069248199463, 0.8838762044906616], [0.713549792766571, 0.8790816068649292], [0.7109752893447876, 0.8748531341552734], [0.7083765268325806, 0.8702574372291565], [0.7059680223464966, 0.8661813139915466], [0.7035449147224426, 0.8618065714836121], [0.701318085193634, 0.8578970432281494], [0.6990587711334229, 0.8537100553512573], [0.6969999670982361, 0.8499535918235779], [0.694927453994751, 0.8459876775741577], [0.693073034286499, 0.8424052000045776], [0.6912116408348083, 0.838666558265686], [0.6895760297775269, 0.8353145718574524], [0.687911868095398, 0.8318616151809692], [0.6864517331123352, 0.828769326210022], [0.6849439144134521, 0.8256120085716248], [0.6836369037628174, 0.8227736949920654], [0.6822195649147034, 0.8197217583656311], [0.7379618883132935, 0.9100770950317383], [0.734832763671875, 0.9053895473480225], [0.7315927147865295, 0.9000064134597778], [0.7285667061805725, 0.8954563140869141], [0.7254528999328613, 0.8903021216392517], [0.7225964665412903, 0.8859436511993408], [0.7196382284164429, 0.8810358643531799], [0.7169590592384338, 0.8768678903579712], [0.7141783237457275, 0.8721901178359985], [0.7116608023643494, 0.8681646585464478], [0.7090443968772888, 0.8636921644210815], [0.7067059278488159, 0.8598397970199585], [0.7042746543884277, 0.855587363243103], [0.702107310295105, 0.8518713116645813], [0.6998458504676819, 0.8478075265884399], [0.6978679299354553, 0.8442790508270264], [0.6958088874816895, 0.8404500484466553], [0.6940414905548096, 0.8371081352233887], [0.6921947598457336, 0.8335292339324951], [0.6906232833862305, 0.8304141163825989], [0.6889538764953613, 0.8271045684814453], [0.6875569820404053, 0.824243426322937], [0.6860551238059998, 0.82120680809021], [0.6847538352012634, 0.8184438943862915], [0.7417903542518616, 0.9074636697769165], [0.7384341955184937, 0.9021034836769104], [0.735230565071106, 0.8975009322166443], [0.7319961190223694, 0.8923530578613281], [0.7289369106292725, 0.8879044055938721], [0.7258652448654175, 0.883021354675293], [0.7229787111282349, 0.8787524700164795], [0.7200936675071716, 0.8741259574890137], [0.7173886895179749, 0.8700300455093384], [0.7146662473678589, 0.8656017780303955], [0.7121344208717346, 0.8616544008255005], [0.7095970511436462, 0.8574459552764893], [0.707253098487854, 0.8536728620529175], [0.7048836946487427, 0.8496535420417786], [0.7027091383934021, 0.8460195064544678], [0.7005321979522705, 0.8422279357910156], [0.6985704302787781, 0.8387978076934814], [0.6965907216072083, 0.8352371454238892], [0.6948307156562805, 0.8320148587226868], [0.6930392980575562, 0.8287030458450317], [0.6914643049240112, 0.8257011771202087], [0.6898559331893921, 0.8226592540740967], [0.6884561777114868, 0.8198994398117065], [0.686964750289917, 0.8169831037521362], [0.7454626560211182, 0.903999388217926], [0.7421541213989258, 0.8994247913360596], [0.7387291193008423, 0.8941953182220459], [0.7355318665504456, 0.8897899389266968], [0.7322482466697693, 0.8848100900650024], [0.7292472124099731, 0.8805915713310242], [0.7261476516723633, 0.8758442997932434], [0.7233330011367798, 0.8718166351318359], [0.7204192876815796, 0.8673142790794373], [0.7177829742431641, 0.863426685333252], [0.7150540947914124, 0.8591160774230957], [0.7126072645187378, 0.8553899526596069], [0.7100622653961182, 0.851298451423645], [0.7077728509902954, 0.8477201461791992], [0.7053929567337036, 0.8438273072242737], [0.7033149600028992, 0.8404330015182495], [0.7011466026306152, 0.8367741107940674], [0.6992549896240234, 0.8335695266723633], [0.6972776055335999, 0.8301620483398438], [0.6955776214599609, 0.8271623253822327], [0.6937884092330933, 0.8240007162094116], [0.6922835111618042, 0.8212311267852783], [0.690682053565979, 0.8183202147483826], [0.6893154978752136, 0.815673828125], [0.7491254210472107, 0.9012503623962402], [0.7455796599388123, 0.89605712890625], [0.7421868443489075, 0.8915756940841675], [0.7387858629226685, 0.8866040706634521], [0.735572338104248, 0.8823004364967346], [0.7323635816574097, 0.8775970935821533], [0.7293349504470825, 0.8734630346298218], [0.7263216972351074, 0.8689974546432495], [0.7234886884689331, 0.8650470972061157], [0.7206538319587708, 0.8607975244522095], [0.7180015444755554, 0.8569871783256531], [0.7153522968292236, 0.8529448509216309], [0.7128790616989136, 0.8492968082427979], [0.7103931307792664, 0.8454447984695435], [0.7081047296524048, 0.841951847076416], [0.7058200240135193, 0.8383381366729736], [0.7037245631217957, 0.8350346088409424], [0.7016212940216064, 0.8316463232040405], [0.6997332572937012, 0.8285553455352783], [0.6978188753128052, 0.8254006505012512], [0.69612056016922, 0.8225168585777283], [0.6944003105163574, 0.8196240663528442], [0.6928986310958862, 0.8169633150100708], [0.6913330554962158, 0.8142198920249939], [0.7526121139526367, 0.8976770043373108], [0.7491101622581482, 0.8932565450668335], [0.7454947233200073, 0.8882002830505371], [0.7421495318412781, 0.8839420676231384], [0.7387237548828125, 0.8791322708129883], [0.7355851531028748, 0.8750625252723694], [0.7323545217514038, 0.8704910278320312], [0.7294160723686218, 0.8665946125984192], [0.7263818979263306, 0.8622543811798096], [0.7236232161521912, 0.8585197925567627], [0.7207710146903992, 0.8543893694877625], [0.7181979417800903, 0.8508021831512451], [0.7155252695083618, 0.8468725681304932], [0.7131233215332031, 0.8434286117553711], [0.7106340527534485, 0.8397055864334106], [0.7084252834320068, 0.8364487886428833], [0.706118106842041, 0.8329626321792603], [0.7041040658950806, 0.8298925757408142], [0.7019983530044556, 0.8266510963439941], [0.7001761198043823, 0.8237801194190979], [0.6982542276382446, 0.8207707405090332], [0.6966410875320435, 0.8181307315826416], [0.6949251294136047, 0.8153674602508545], [0.6934739947319031, 0.8128684759140015], [0.7561023831367493, 0.8947932720184326], [0.7523746490478516, 0.8897749185562134], [0.7488089799880981, 0.8854457139968872], [0.7452576756477356, 0.8806703090667725], [0.7418949604034424, 0.876518964767456], [0.7385551333427429, 0.8719924688339233], [0.735396146774292, 0.868010401725769], [0.73225998878479, 0.8637173771858215], [0.7292914986610413, 0.8599027395248413], [0.7263404726982117, 0.8558274507522583], [0.7235637903213501, 0.8521688580513], [0.7207967042922974, 0.8482951521873474], [0.7181887626647949, 0.844778835773468], [0.7155898809432983, 0.8411085605621338], [0.7131677269935608, 0.8377524018287659], [0.7107554078102112, 0.8343019485473633], [0.7085279226303101, 0.8311327695846558], [0.7063024640083313, 0.8279212713241577], [0.7042704820632935, 0.8249558210372925], [0.7022281885147095, 0.8219826221466064], [0.7003980875015259, 0.8192158937454224], [0.6985690593719482, 0.816493809223175], [0.6969489455223083, 0.8139510750770569], [0.695299506187439, 0.8114001154899597], [0.7594068050384521, 0.8911375999450684], [0.7557355165481567, 0.8868603706359863], [0.7519563436508179, 0.8819787502288818], [0.7484656572341919, 0.8778891563415527], [0.7448928356170654, 0.8732725381851196], [0.7416186928749084, 0.8693623542785645], [0.7382611036300659, 0.8649759292602539], [0.7351930141448975, 0.8612264394760132], [0.7320335507392883, 0.8570523858070374], [0.7291547060012817, 0.8534626960754395], [0.72618567943573, 0.8495068550109863], [0.723475456237793, 0.846062958240509], [0.7206732630729675, 0.8423073291778564], [0.7181460857391357, 0.8390105962753296], [0.7155287861824036, 0.8354599475860596], [0.7131866216659546, 0.8323317766189575], [0.71075040102005, 0.8290141820907593], [0.7085994482040405, 0.8260805606842041], [0.7063474655151367, 0.8230074048042297], [0.7043942213058472, 0.8202770948410034], [0.7023409008979797, 0.8174437880516052], [0.7006100416183472, 0.8149353265762329], [0.6987754106521606, 0.8123383522033691], [0.6972553133964539, 0.8100011944770813], [0.762728214263916, 0.8881264925003052], [0.7588332891464233, 0.8833083510398865], [0.7551008462905884, 0.8791266679763794], [0.7514113783836365, 0.8745434880256653], [0.747911274433136, 0.8705555200576782], [0.7444441318511963, 0.8662147521972656], [0.7411552667617798, 0.8623969554901123], [0.7378941774368286, 0.8582812547683716], [0.7347923517227173, 0.8546142578125], [0.7317254543304443, 0.8507130146026611], [0.728807270526886, 0.8472050428390503], [0.7259147763252258, 0.8435039520263672], [0.7231711149215698, 0.8401329517364502], [0.7204571962356567, 0.8366402387619019], [0.7178947925567627, 0.8334217071533203], [0.7153549790382385, 0.8301429748535156], [0.7129753828048706, 0.8271054625511169], [0.7106139659881592, 0.824070930480957], [0.7084320783615112, 0.8212336897850037], [0.7062599658966064, 0.8184455037117004], [0.7042876482009888, 0.8158196210861206], [0.7023352384567261, 0.8132860064506531], [0.7006047368049622, 0.8108782768249512], [0.6988831758499146, 0.8085378408432007], [0.7658501863479614, 0.8843668699264526], [0.7620063424110413, 0.8802863955497742], [0.7580665946006775, 0.8756140470504761], [0.7544475793838501, 0.8716837167739868], [0.7507541179656982, 0.8672493100166321], [0.7473558783531189, 0.8634927272796631], [0.743878960609436, 0.859287440776825], [0.7406752109527588, 0.8556925058364868], [0.737387478351593, 0.8517023921012878], [0.734370231628418, 0.8482639789581299], [0.7312696576118469, 0.8444830179214478], [0.7284263372421265, 0.8411921262741089], [0.7255014181137085, 0.8376089334487915], [0.7228423953056335, 0.8344626426696777], [0.7200869917869568, 0.8310838341712952], [0.7175973057746887, 0.8280985355377197], [0.7150171399116516, 0.8249502182006836], [0.7127177715301514, 0.822152853012085], [0.7103191614151001, 0.819256067276001], [0.7082126140594482, 0.8166673183441162], [0.7060156464576721, 0.8140363097190857], [0.7041610479354858, 0.8116750121116638], [0.7022233009338379, 0.8092782497406006], [0.7006253004074097, 0.8071045875549316], [0.7689691781997681, 0.8812494874000549], [0.7649068832397461, 0.8766577243804932], [0.76102614402771, 0.8726808428764343], [0.7571998834609985, 0.8683173656463623], [0.7535452842712402, 0.8645036220550537], [0.7499493956565857, 0.8603748083114624], [0.746502161026001, 0.8567224740982056], [0.7431104779243469, 0.852807879447937], [0.7398524284362793, 0.8493028283119202], [0.736657977104187, 0.8455986976623535], [0.7335779666900635, 0.8422398567199707], [0.7305616140365601, 0.8387390375137329], [0.7276591062545776, 0.8355244398117065], [0.7248196601867676, 0.8322282433509827], [0.7220776677131653, 0.8291534185409546], [0.719407320022583, 0.8260735273361206], [0.7168423533439636, 0.8231699466705322], [0.7143545150756836, 0.8203362226486206], [0.7119756937026978, 0.8176242113113403], [0.7096733450889587, 0.8150478601455688], [0.7075290679931641, 0.8125571012496948], [0.7054809331893921, 0.8102568984031677], [0.7036285400390625, 0.8079818487167358], [0.701865553855896, 0.8058677911758423], [0.7718754410743713, 0.8774241209030151], [0.7678682804107666, 0.873553454875946], [0.7637780904769897, 0.8691275715827942], [0.7600038051605225, 0.8654072284698486], [0.7561485171318054, 0.8611985445022583], [0.7525854110717773, 0.857643187046051], [0.7489486336708069, 0.853663980960846], [0.7455862164497375, 0.8502558469772339], [0.7421518564224243, 0.8464747667312622], [0.7389763593673706, 0.8432126045227051], [0.7357276082038879, 0.8396312594413757], [0.7327315807342529, 0.8365110158920288], [0.7296575903892517, 0.833128809928894], [0.7268285155296326, 0.8301577568054199], [0.7239177227020264, 0.8269717693328857], [0.7212616205215454, 0.824150562286377], [0.7185147404670715, 0.8211884498596191], [0.7160416841506958, 0.8185620903968811], [0.7134624719619751, 0.8158525824546814], [0.7112089395523071, 0.813433051109314], [0.7088523507118225, 0.8109897375106812], [0.7068922519683838, 0.8088165521621704], [0.7048353552818298, 0.8066259622573853], [0.7032163143157959, 0.8046382665634155], [0.7747922539710999, 0.8742044568061829], [0.7705662846565247, 0.8698575496673584], [0.7665318250656128, 0.8661108613014221], [0.762550950050354, 0.861987829208374], [0.7587315440177917, 0.8583851456642151], [0.7549798488616943, 0.8544732332229614], [0.7513768672943115, 0.8510257005691528], [0.7478379011154175, 0.8473237752914429], [0.7444247603416443, 0.8440134525299072], [0.741083562374115, 0.8405064344406128], [0.7378449440002441, 0.8373333215713501], [0.7346894145011902, 0.8340294361114502], [0.7316129207611084, 0.830990731716156], [0.728631854057312, 0.8278906345367432], [0.7257142066955566, 0.8249918222427368], [0.722902238368988, 0.82210373878479], [0.7201393842697144, 0.8193584084510803], [0.7175037860870361, 0.8167270421981812], [0.7149206399917603, 0.8141717910766602], [0.7124806642532349, 0.8118283152580261], [0.7101423740386963, 0.8094978332519531], [0.707990825176239, 0.80745929479599], [0.7060273885726929, 0.8053534030914307], [0.7042315006256104, 0.8035087585449219], [0.7774853110313416, 0.8703094720840454], [0.7733205556869507, 0.8666670322418213], [0.7690767049789429, 0.8624958992004395], [0.7651410698890686, 0.8589993715286255], [0.7611262202262878, 0.8550353646278381], [0.7574129700660706, 0.851685643196106], [0.7536302208900452, 0.8479222059249878], [0.7501155138015747, 0.8447133302688599], [0.7465373277664185, 0.8411487340927124], [0.7432063817977905, 0.8380722999572754], [0.7398130893707275, 0.8346928358078003], [0.7366570830345154, 0.8317587375640869], [0.7334328889846802, 0.8285682201385498], [0.7304429411888123, 0.8257782459259033], [0.7273786067962646, 0.8227850794792175], [0.7245519161224365, 0.8201480507850647], [0.7216300368309021, 0.8173654079437256], [0.7189867496490479, 0.814919114112854], [0.7162269353866577, 0.8123937845230103], [0.7138000130653381, 0.8101696968078613], [0.711239755153656, 0.8079701662063599], [0.7091639041900635, 0.806026816368103], [0.7069856524467468, 0.8041088581085205], [0.7053393125534058, 0.8023420572280884], [0.7801864147186279, 0.8670086860656738], [0.7758005857467651, 0.8629214763641357], [0.7716166973114014, 0.8594162464141846], [0.7674784660339355, 0.8555290102958679], [0.7635102272033691, 0.8521533608436584], [0.7596145868301392, 0.8484745621681213], [0.7558571100234985, 0.8452380299568176], [0.7521767616271973, 0.8417472839355469], [0.7486104965209961, 0.8386437892913818], [0.7451305389404297, 0.8353432416915894], [0.7417322397232056, 0.8323601484298706], [0.7384382486343384, 0.8292503356933594], [0.7352005243301392, 0.8263989090919495], [0.7320767641067505, 0.8234843015670776], [0.728981614112854, 0.8207745552062988], [0.7260236740112305, 0.8180768489837646], [0.7230662107467651, 0.8155069947242737], [0.7202872037887573, 0.8130639791488647], [0.7174780368804932, 0.8106869459152222], [0.7149023413658142, 0.808563768863678], [0.7122896909713745, 0.8064252138137817], [0.7100425958633423, 0.8047494888305664], [0.7079174518585205, 0.8028691411018372], [0.7061062455177307, 0.8013700246810913], [0.7826504707336426, 0.8630684614181519], [0.7783384919166565, 0.8596657514572144], [0.7739450931549072, 0.8557555079460144], [0.7698588371276855, 0.8524894118309021], [0.7657011151313782, 0.8487684726715088], [0.7618407607078552, 0.845637321472168], [0.7579139471054077, 0.8421059846878052], [0.7542520761489868, 0.8391054272651672], [0.7505296468734741, 0.835753321647644], [0.7470517158508301, 0.832883894443512], [0.7435153722763062, 0.8297056555747986], [0.7402113080024719, 0.8269620537757874], [0.7368423938751221, 0.8239623308181763], [0.7336908578872681, 0.8213573694229126], [0.7304726243019104, 0.8185491561889648], [0.7274744510650635, 0.8161056041717529], [0.724393904209137, 0.8135047554969788], [0.7215670943260193, 0.8112447261810303], [0.718619704246521, 0.8088982105255127], [0.7160044312477112, 0.8068900108337402], [0.7132121324539185, 0.804879903793335], [0.7109569311141968, 0.8032363653182983], [0.7085695266723633, 0.8017852306365967], [0.7069768905639648, 0.8003250956535339], [0.7851390838623047, 0.8596943616867065], [0.7806074619293213, 0.8558772802352905], [0.7762725353240967, 0.852615237236023], [0.7719917297363281, 0.8489820957183838], [0.7678835391998291, 0.845841109752655], [0.7638485431671143, 0.8423913717269897], [0.7599465847015381, 0.8393763303756714], [0.7561301589012146, 0.8361071348190308], [0.7524170875549316, 0.8332139849662781], [0.7488018870353699, 0.8301181793212891], [0.7452566623687744, 0.8273442387580872], [0.7418241500854492, 0.8244278430938721], [0.7384229898452759, 0.821776807308197], [0.73515784740448, 0.8190493583679199], [0.731887936592102, 0.8165323734283447], [0.7287837266921997, 0.8140164613723755], [0.72564297914505, 0.8116517066955566], [0.7227087020874023, 0.8093796372413635], [0.7196815609931946, 0.8071956634521484], [0.7169610261917114, 0.8052566051483154], [0.7140346765518188, 0.803339958190918], [0.7117018699645996, 0.8019066452980042], [0.7090224027633667, 0.8005291223526001], [0.7074992656707764, 0.7997169494628906], [0.29130011796951294, 0.5103377103805542], [0.29041123390197754, 0.509530782699585], [0.2888980507850647, 0.5080063343048096], [0.2875116467475891, 0.5066820383071899], [0.28568851947784424, 0.5049625635147095], [0.28387773036956787, 0.5034022331237793], [0.2817409038543701, 0.5015659928321838], [0.27963268756866455, 0.49979984760284424], [0.2772744297981262, 0.49782276153564453], [0.2749071717262268, 0.49590200185775757], [0.2724331021308899, 0.49390649795532227], [0.2699125409126282, 0.4919097423553467], [0.2673608064651489, 0.4899106025695801], [0.2647390365600586, 0.48789262771606445], [0.2621164321899414, 0.4858860671520233], [0.25940626859664917, 0.48383647203445435], [0.25672632455825806, 0.4818234443664551], [0.2539365291595459, 0.47975069284439087], [0.2511923313140869, 0.4777318239212036], [0.24831503629684448, 0.47564697265625], [0.2455090880393982, 0.47363173961639404], [0.24255859851837158, 0.4715268909931183], [0.23970770835876465, 0.4695170223712921], [0.23668509721755981, 0.4674147367477417], [0.292222797870636, 0.5096890926361084], [0.29139649868011475, 0.5082237720489502], [0.2900475263595581, 0.5067874193191528], [0.2886347770690918, 0.5052001476287842], [0.2868460416793823, 0.50349360704422], [0.28504347801208496, 0.5017503499984741], [0.28296446800231934, 0.4998818635940552], [0.2808917760848999, 0.49799656867980957], [0.27854955196380615, 0.49597179889678955], [0.27623987197875977, 0.493958055973053], [0.27376341819763184, 0.4918801486492157], [0.27130818367004395, 0.48983269929885864], [0.2687392234802246, 0.4877328872680664], [0.26620638370513916, 0.48566991090774536], [0.2635534405708313, 0.48354828357696533], [0.26094573736190796, 0.48147547245025635], [0.2582087516784668, 0.4793391227722168], [0.2555343508720398, 0.4772665798664093], [0.2527122497558594, 0.47510987520217896], [0.2499648928642273, 0.4730311334133148], [0.24706536531448364, 0.4708678424358368], [0.24425989389419556, 0.46879664063453674], [0.2412879467010498, 0.46662870049476624], [0.2384241819381714, 0.46456462144851685], [0.2936598062515259, 0.5085989236831665], [0.29258179664611816, 0.5071959495544434], [0.29130852222442627, 0.5053620338439941], [0.2898423671722412, 0.5038303136825562], [0.2880833148956299, 0.5019164681434631], [0.2863042950630188, 0.500174880027771], [0.28427422046661377, 0.49813807010650635], [0.28218984603881836, 0.4962320923805237], [0.2799100875854492, 0.4941082000732422], [0.2776066064834595, 0.49203914403915405], [0.27520132064819336, 0.48985928297042847], [0.27273988723754883, 0.48771601915359497], [0.270257830619812, 0.48556065559387207], [0.2676989436149597, 0.48339253664016724], [0.2651445269584656, 0.4812344014644623], [0.26248931884765625, 0.47904059290885925], [0.2598649263381958, 0.47688379883766174], [0.25711655616760254, 0.4746783375740051], [0.25442230701446533, 0.4725315570831299], [0.25158315896987915, 0.47030341625213623], [0.24882686138153076, 0.46816110610961914], [0.24590986967086792, 0.46592918038368225], [0.24309194087982178, 0.4637991189956665], [0.24009567499160767, 0.4615676999092102], [0.2949827313423157, 0.5075969696044922], [0.2940274477005005, 0.505929708480835], [0.2925848960876465, 0.504136323928833], [0.29117822647094727, 0.5023001432418823], [0.289426326751709, 0.5004103183746338], [0.2876970171928406, 0.4984748065471649], [0.28563380241394043, 0.49644121527671814], [0.2836042642593384, 0.49439093470573425], [0.2813321352005005, 0.4922407865524292], [0.2791048288345337, 0.4900777339935303], [0.2766789197921753, 0.48783284425735474], [0.27430015802383423, 0.485615074634552], [0.27179306745529175, 0.48336148262023926], [0.2693336606025696, 0.4811471402645111], [0.26672929525375366, 0.47887495160102844], [0.2641827464103699, 0.47666335105895996], [0.26148736476898193, 0.47438082098960876], [0.25886213779449463, 0.4721689820289612], [0.25608211755752563, 0.46988028287887573], [0.25338149070739746, 0.4676719009876251], [0.2505195736885071, 0.46537092328071594], [0.24775350093841553, 0.4631713628768921], [0.24480652809143066, 0.4608702063560486], [0.24197375774383545, 0.45868945121765137], [0.29667776823043823, 0.5063742399215698], [0.29546427726745605, 0.5046900510787964], [0.29410552978515625, 0.5026903748512268], [0.2926025390625, 0.5008716583251953], [0.2909383773803711, 0.49875861406326294], [0.2891254425048828, 0.49682486057281494], [0.28713929653167725, 0.49463891983032227], [0.28509289026260376, 0.4925641417503357], [0.28290826082229614, 0.4902889132499695], [0.28064239025115967, 0.4880925416946411], [0.2783036231994629, 0.48577362298965454], [0.27589648962020874, 0.4834790825843811], [0.27348607778549194, 0.4811636805534363], [0.27098220586776733, 0.47884872555732727], [0.26848578453063965, 0.47653675079345703], [0.2658735513687134, 0.47419875860214233], [0.26330000162124634, 0.4719066023826599], [0.2605956792831421, 0.4695558547973633], [0.2579476833343506, 0.4672682285308838], [0.25514960289001465, 0.4649096727371216], [0.2524305582046509, 0.4626380205154419], [0.24954038858413696, 0.4602741003036499], [0.2467505931854248, 0.45801782608032227], [0.24377691745758057, 0.45565706491470337], [0.29821205139160156, 0.5052014589309692], [0.29708969593048096, 0.5033167600631714], [0.29561537504196167, 0.5013308525085449], [0.2942126989364624, 0.49932146072387695], [0.29243606328964233, 0.4972081184387207], [0.290705144405365, 0.49507975578308105], [0.28869420289993286, 0.49287909269332886], [0.28673404455184937, 0.4906642436981201], [0.28449559211730957, 0.4883597195148468], [0.2823195457458496, 0.4860658645629883], [0.27994728088378906, 0.4837011694908142], [0.2776392102241516, 0.4813472628593445], [0.2751842737197876, 0.47894155979156494], [0.27279090881347656, 0.4765775799751282], [0.27022606134414673, 0.47415128350257874], [0.26773202419281006, 0.47179362177848816], [0.2650783061981201, 0.46936583518981934], [0.2625052332878113, 0.4670228958129883], [0.2597564458847046, 0.464585542678833], [0.2571008801460266, 0.46224790811538696], [0.2542632818222046, 0.4598120152950287], [0.251526415348053, 0.4574894905090332], [0.24859941005706787, 0.45505762100219727], [0.24579519033432007, 0.45275431871414185], [0.30006009340286255, 0.5038682222366333], [0.2987675070762634, 0.5019287467002869], [0.29740220308303833, 0.4997928738594055], [0.29583799839019775, 0.4977796673774719], [0.2941551208496094, 0.49551165103912354], [0.29234957695007324, 0.4933643341064453], [0.2904454469680786, 0.4910137355327606], [0.2883928418159485, 0.4887664318084717], [0.2862609624862671, 0.486364483833313], [0.28404146432876587, 0.48401159048080444], [0.2817836403846741, 0.4815633296966553], [0.2794288396835327, 0.4791405200958252], [0.27707844972610474, 0.47668805718421936], [0.2746107578277588, 0.4742327332496643], [0.27215999364852905, 0.47177746891975403], [0.26958733797073364, 0.4692966938018799], [0.2670592665672302, 0.4668612480163574], [0.26439130306243896, 0.4643744230270386], [0.26178061962127686, 0.4619537591934204], [0.2590121030807495, 0.4594570994377136], [0.2563246488571167, 0.4570532441139221], [0.2534556984901428, 0.45455121994018555], [0.2506972551345825, 0.45217207074165344], [0.24774610996246338, 0.4496764838695526], [0.30180472135543823, 0.5025745630264282], [0.3006178140640259, 0.5004981756210327], [0.29910147190093994, 0.49831894040107727], [0.2976425290107727, 0.4961479604244232], [0.2958637475967407, 0.4938652515411377], [0.29416632652282715, 0.4915887415409088], [0.29216063022613525, 0.48920923471450806], [0.29021334648132324, 0.48682838678359985], [0.28803688287734985, 0.4843815863132477], [0.28593456745147705, 0.4819430112838745], [0.28361600637435913, 0.4794216752052307], [0.2813799977302551, 0.47694337368011475], [0.2789492607116699, 0.47439783811569214], [0.2765967845916748, 0.47191447019577026], [0.2740640640258789, 0.46935129165649414], [0.27161693572998047, 0.4668600559234619], [0.2689950466156006, 0.4642884433269501], [0.2664603590965271, 0.46181100606918335], [0.2637408971786499, 0.4592369794845581], [0.2611196041107178, 0.45676949620246887], [0.25830012559890747, 0.4541952610015869], [0.2555941939353943, 0.45174455642700195], [0.25269603729248047, 0.4491724371910095], [0.24992311000823975, 0.446740061044693], [0.3038640022277832, 0.5011443495750427], [0.30248820781707764, 0.49900686740875244], [0.3010737895965576, 0.49672067165374756], [0.29947376251220703, 0.4944998323917389], [0.2978137731552124, 0.4921064078807831], [0.2959887981414795, 0.48979708552360535], [0.2940964698791504, 0.48731327056884766], [0.29206740856170654, 0.484882116317749], [0.2900186777114868, 0.4823455810546875], [0.2878323197364807, 0.4798447787761688], [0.2856364846229553, 0.4772706627845764], [0.2833211421966553, 0.4747062921524048], [0.28101325035095215, 0.47211313247680664], [0.27857571840286255, 0.46951818466186523], [0.27617454528808594, 0.4669439196586609], [0.2736307382583618, 0.46432530879974365], [0.27114301919937134, 0.4617587924003601], [0.26850003004074097, 0.4591273069381714], [0.26592981815338135, 0.45657703280448914], [0.263184130191803, 0.4539380669593811], [0.26052188873291016, 0.4513997435569763], [0.2576812505722046, 0.4487646818161011], [0.25495362281799316, 0.44625943899154663], [0.2520105242729187, 0.44362956285476685], [0.3057781457901001, 0.49972930550575256], [0.3045060634613037, 0.4975014328956604], [0.3029651641845703, 0.4951634407043457], [0.3014844059944153, 0.4928252100944519], [0.29970723390579224, 0.49038103222846985], [0.29799479246139526, 0.48795461654663086], [0.2959972620010376, 0.4854317307472229], [0.29409170150756836, 0.48293018341064453], [0.29195892810821533, 0.48033469915390015], [0.28989893198013306, 0.47775566577911377], [0.2876291871070862, 0.47510379552841187], [0.28544187545776367, 0.47249361872673035], [0.2830432653427124, 0.4697955846786499], [0.28073257207870483, 0.4671732187271118], [0.27823370695114136, 0.46447664499282837], [0.2758268117904663, 0.46186310052871704], [0.27323323488235474, 0.4591522216796875], [0.27073681354522705, 0.45654308795928955], [0.26804018020629883, 0.45382606983184814], [0.2654540538787842, 0.4512307643890381], [0.26266056299209595, 0.44850122928619385], [0.25998806953430176, 0.4459177553653717], [0.25709110498428345, 0.4432143568992615], [0.25432926416397095, 0.4406663179397583], [0.3079965114593506, 0.4982028603553772], [0.30659377574920654, 0.4959072470664978], [0.3051694631576538, 0.49348437786102295], [0.30352896451950073, 0.49107980728149414], [0.30187082290649414, 0.4885600805282593], [0.3000272512435913, 0.4860607981681824], [0.2981678247451782, 0.4834558963775635], [0.29617249965667725, 0.480890691280365], [0.29416877031326294, 0.4782368540763855], [0.2920098900794983, 0.4755813777446747], [0.28986960649490356, 0.4728855490684509], [0.28757524490356445, 0.47018367052078247], [0.2853066921234131, 0.46746736764907837], [0.282889723777771, 0.46472299098968506], [0.28053051233291626, 0.4620216488838196], [0.2780219316482544, 0.45926839113235474], [0.2755696177482605, 0.45657220482826233], [0.27294719219207764, 0.45380645990371704], [0.27040326595306396, 0.4511271119117737], [0.26768195629119873, 0.44835519790649414], [0.26505154371261597, 0.44569090008735657], [0.26221394538879395, 0.4429076611995697], [0.2594950795173645, 0.44026902318000793], [0.2565675973892212, 0.43750348687171936], [0.3101212978363037, 0.49669477343559265], [0.3088405132293701, 0.49433162808418274], [0.30725663900375366, 0.4918213486671448], [0.3057337999343872, 0.4893401265144348], [0.3039528727531433, 0.48675787448883057], [0.3022453188896179, 0.4841805398464203], [0.3003016710281372, 0.48149576783180237], [0.29844552278518677, 0.4788583517074585], [0.2963232398033142, 0.4761163890361786], [0.2942962050437927, 0.47343724966049194], [0.29204046726226807, 0.47064775228500366], [0.289878785610199, 0.4679231643676758], [0.28750813007354736, 0.4650980532169342], [0.2852356433868408, 0.46234479546546936], [0.28277385234832764, 0.45950090885162354], [0.2804102301597595, 0.45676177740097046], [0.2778266668319702, 0.45390617847442627], [0.27535831928253174, 0.45117828249931335], [0.2726757526397705, 0.44832420349121094], [0.2701159715652466, 0.44561144709587097], [0.2673295736312866, 0.44275587797164917], [0.2646704912185669, 0.44004520773887634], [0.2617878317832947, 0.4371868371963501], [0.25904858112335205, 0.43450701236724854], [0.31254762411117554, 0.49511152505874634], [0.31109678745269775, 0.49266231060028076], [0.30962926149368286, 0.49008917808532715], [0.30797797441482544, 0.4875166714191437], [0.3063371181488037, 0.4848718047142029], [0.30450761318206787, 0.48221635818481445], [0.30269181728363037, 0.4794982075691223], [0.30069464445114136, 0.4767691195011139], [0.29871320724487305, 0.47398656606674194], [0.2965838313102722, 0.4712013900279999], [0.2944735288619995, 0.4683855175971985], [0.2922013998031616, 0.46554136276245117], [0.2899767756462097, 0.4627143144607544], [0.2875909209251404, 0.45983394980430603], [0.2852720022201538, 0.45700588822364807], [0.28276169300079346, 0.45410963892936707], [0.28033071756362915, 0.4512990713119507], [0.27772724628448486, 0.44839736819267273], [0.27520978450775146, 0.44559246301651], [0.27249759435653687, 0.44268327951431274], [0.26988810300827026, 0.4398999810218811], [0.26706695556640625, 0.4369804263114929], [0.2643653154373169, 0.4342097043991089], [0.2614285945892334, 0.4313015937805176], [0.31484663486480713, 0.4935142993927002], [0.3135107159614563, 0.4910121262073517], [0.31190770864486694, 0.48835980892181396], [0.31039029359817505, 0.4857422411441803], [0.30862492322921753, 0.4830010235309601], [0.30693119764328003, 0.48029589653015137], [0.3049890995025635, 0.4774842858314514], [0.3031314015388489, 0.47471922636032104], [0.3010464310646057, 0.4718366265296936], [0.2990654706954956, 0.4690242111682892], [0.29683762788772583, 0.46608126163482666], [0.2947118282318115, 0.46323397755622864], [0.2923685908317566, 0.46027272939682007], [0.29013580083847046, 0.45741385221481323], [0.28766828775405884, 0.4544307589530945], [0.2853075861930847, 0.4515681564807892], [0.28274857997894287, 0.44858819246292114], [0.28031229972839355, 0.44574111700057983], [0.2776445746421814, 0.44274067878723145], [0.2751050591468811, 0.439900279045105], [0.27232569456100464, 0.43690651655197144], [0.2696881890296936, 0.43408650159835815], [0.2667965292930603, 0.43108779191970825], [0.2640560269355774, 0.4282764792442322], [0.31745344400405884, 0.49186551570892334], [0.3159695863723755, 0.48924344778060913], [0.31450116634368896, 0.48656517267227173], [0.3128476142883301, 0.48385292291641235], [0.31121134757995605, 0.4810810387134552], [0.3093772530555725, 0.47826993465423584], [0.3075776696205139, 0.47543099522590637], [0.3055936098098755, 0.47255373001098633], [0.30365443229675293, 0.4696708917617798], [0.30153173208236694, 0.46674293279647827], [0.29944491386413574, 0.46380698680877686], [0.29719144105911255, 0.4608232378959656], [0.29499995708465576, 0.4578806161880493], [0.29262757301330566, 0.45487135648727417], [0.2903156876564026, 0.4519135653972626], [0.28782373666763306, 0.44887465238571167], [0.285430371761322, 0.4459494650363922], [0.2828410267829895, 0.44291579723358154], [0.2803417444229126, 0.43998968601226807], [0.2776343822479248, 0.4369373321533203], [0.27503567934036255, 0.4340275824069977], [0.2722163796424866, 0.43098193407058716], [0.26951801776885986, 0.4280940890312195], [0.26658356189727783, 0.42503684759140015], [0.31994396448135376, 0.4902094006538391], [0.3185883164405823, 0.48754289746284485], [0.3169896602630615, 0.4847545623779297], [0.3154667615890503, 0.48201924562454224], [0.31368476152420044, 0.4791385531425476], [0.31198936700820923, 0.47631406784057617], [0.3100685477256775, 0.4733639061450958], [0.3082316517829895, 0.47046786546707153], [0.3061584234237671, 0.4674609899520874], [0.3041790723800659, 0.46453630924224854], [0.3019697666168213, 0.4614700973033905], [0.29986488819122314, 0.4585020840167999], [0.2975314259529114, 0.455412358045578], [0.29531532526016235, 0.4524409770965576], [0.2928684949874878, 0.44931262731552124], [0.2905404567718506, 0.4463220238685608], [0.2879989743232727, 0.44320541620254517], [0.28557854890823364, 0.44023722410202026], [0.28291964530944824, 0.4371088147163391], [0.2803919315338135, 0.43414193391799927], [0.27761268615722656, 0.4310007095336914], [0.2749823331832886, 0.4280585050582886], [0.27209413051605225, 0.42492881417274475], [0.26936614513397217, 0.42199909687042236], [0.3227250576019287, 0.48853129148483276], [0.321211576461792, 0.48573946952819824], [0.31973958015441895, 0.4829425811767578], [0.31807583570480347, 0.4800872504711151], [0.3164362907409668, 0.47718942165374756], [0.3146217465400696, 0.47424638271331787], [0.3128451704978943, 0.47129008173942566], [0.31086230278015137, 0.4682645797729492], [0.3089354634284973, 0.46526041626930237], [0.3068208694458008, 0.46219462156295776], [0.30476146936416626, 0.45916038751602173], [0.30251216888427734, 0.4560391902923584], [0.3003372550010681, 0.45298150181770325], [0.2979910373687744, 0.44984501600265503], [0.2957066297531128, 0.44676673412323], [0.2932266592979431, 0.4435862600803375], [0.2908492088317871, 0.44053834676742554], [0.288260817527771, 0.4373663663864136], [0.2857767343521118, 0.4343271851539612], [0.28307366371154785, 0.431143194437027], [0.2804814577102661, 0.42810049653053284], [0.2776605486869812, 0.42490845918655396], [0.2749779224395752, 0.42189979553222656], [0.2720445394515991, 0.4187103509902954], [0.3253723382949829, 0.48681193590164185], [0.32397711277008057, 0.4840126633644104], [0.32237303256988525, 0.4811016917228699], [0.3208525776863098, 0.4782406687736511], [0.31909823417663574, 0.4752137064933777], [0.31742966175079346, 0.4722640812397003], [0.31550395488739014, 0.46916940808296204], [0.3136690855026245, 0.4661571681499481], [0.3116039037704468, 0.46301019191741943], [0.3096332550048828, 0.4599611759185791], [0.30743300914764404, 0.4567805528640747], [0.3053410053253174, 0.45370402932167053], [0.3030494451522827, 0.4504813253879547], [0.3008725047111511, 0.44738051295280457], [0.2984306812286377, 0.44411030411720276], [0.29611778259277344, 0.4410035014152527], [0.2935730218887329, 0.4377438426017761], [0.2911606431007385, 0.43465927243232727], [0.28849899768829346, 0.43139761686325073], [0.2859830856323242, 0.4283236563205719], [0.2832029461860657, 0.42504483461380005], [0.2805830240249634, 0.4219675064086914], [0.27770066261291504, 0.4186971187591553], [0.2749824523925781, 0.4156430959701538], [0.3283078670501709, 0.48509061336517334], [0.32678455114364624, 0.48215237259864807], [0.3253186345100403, 0.4792470932006836], [0.3236686587333679, 0.476263165473938], [0.32204949855804443, 0.4732479751110077], [0.32021814584732056, 0.4701533913612366], [0.31843674182891846, 0.46707525849342346], [0.31647372245788574, 0.46391820907592773], [0.3145636320114136, 0.46078500151634216], [0.3124505877494812, 0.45756733417510986], [0.31041407585144043, 0.4544232487678528], [0.3081963062286377, 0.45117881894111633], [0.3060586452484131, 0.4480234384536743], [0.3036993145942688, 0.44474703073501587], [0.3014231324195862, 0.44155097007751465], [0.2989389896392822, 0.4382355809211731], [0.2965685725212097, 0.4350687265396118], [0.29398471117019653, 0.4317564070224762], [0.29151421785354614, 0.42859309911727905], [0.2888100743293762, 0.4252725839614868], [0.2862324118614197, 0.42212367057800293], [0.2834106683731079, 0.418786883354187], [0.28073710203170776, 0.4156531095504761], [0.27780449390411377, 0.412322074174881], [0.3311300277709961, 0.48332393169403076], [0.32973724603652954, 0.4804011583328247], [0.3281513452529907, 0.4773567020893097], [0.32664430141448975, 0.47438788414001465], [0.3248671293258667, 0.4712212383747101], [0.3231746554374695, 0.4681452810764313], [0.3212639093399048, 0.46492427587509155], [0.31945836544036865, 0.46180206537246704], [0.31740802526474, 0.45850032567977905], [0.31545937061309814, 0.455313116312027], [0.31329065561294556, 0.451985627412796], [0.311232328414917, 0.4488033056259155], [0.30891358852386475, 0.44544970989227295], [0.3067200183868408, 0.4422600567340851], [0.30427855253219604, 0.4388725459575653], [0.3019672632217407, 0.4356396496295929], [0.29942649602890015, 0.4322522282600403], [0.29702889919281006, 0.4290507435798645], [0.29437482357025146, 0.4256435036659241], [0.29186779260635376, 0.42244040966033936], [0.28910183906555176, 0.4190289378166199], [0.2864903211593628, 0.4158298969268799], [0.2836018204689026, 0.4124135375022888], [0.2808876633644104, 0.40924155712127686], [0.33423179388046265, 0.4815901517868042], [0.3327209949493408, 0.47852033376693726], [0.3312658667564392, 0.47549861669540405], [0.32959580421447754, 0.4723714590072632], [0.32795560359954834, 0.4692316949367523], [0.32613086700439453, 0.4660002887248993], [0.32437199354171753, 0.4628182053565979], [0.3224172592163086, 0.4595394730567932], [0.3205168843269348, 0.45628678798675537], [0.318417489528656, 0.4529260993003845], [0.31640398502349854, 0.4496609568595886], [0.3141658306121826, 0.44627511501312256], [0.3120173215866089, 0.4429996907711029], [0.3096805810928345, 0.43959665298461914], [0.3074265718460083, 0.43629300594329834], [0.3049428462982178, 0.432833194732666], [0.30258476734161377, 0.42954808473587036], [0.3000098466873169, 0.4261111617088318], [0.29754728078842163, 0.42282605171203613], [0.29484909772872925, 0.4193626642227173], [0.2922874689102173, 0.41609081625938416], [0.28946197032928467, 0.41261225938796997], [0.2867925763130188, 0.40935125946998596], [0.28386056423187256, 0.4058741331100464], [0.33723360300064087, 0.4798094630241394], [0.3358577489852905, 0.47675877809524536], [0.33425045013427734, 0.473563015460968], [0.33271461725234985, 0.47046300768852234], [0.33093690872192383, 0.4671661853790283], [0.3292574882507324, 0.46398410201072693], [0.3273482322692871, 0.460632860660553], [0.32554638385772705, 0.45741015672683716], [0.3234953284263611, 0.45398253202438354], [0.32154786586761475, 0.4506855607032776], [0.319354772567749, 0.4472331404685974], [0.3172816038131714, 0.44392329454421997], [0.31500667333602905, 0.44044432044029236], [0.3128530979156494, 0.4371204078197479], [0.3104274272918701, 0.43358445167541504], [0.30813026428222656, 0.43022990226745605], [0.3055996298789978, 0.426695853471756], [0.3032146692276001, 0.42337697744369507], [0.30055707693099976, 0.4198376536369324], [0.298064649105072, 0.4165192246437073], [0.29529476165771484, 0.4129660725593567], [0.29268765449523926, 0.40964269638061523], [0.28980767726898193, 0.40608417987823486], [0.28710776567459106, 0.40277907252311707], [0.3404828906059265, 0.4780893921852112], [0.33894991874694824, 0.4748716354370117], [0.3374732732772827, 0.47170478105545044], [0.33579838275909424, 0.46842673420906067], [0.3341737985610962, 0.4651801586151123], [0.33235907554626465, 0.4618188738822937], [0.3306100368499756, 0.45851725339889526], [0.32866203784942627, 0.45510417222976685], [0.32677161693573, 0.45174509286880493], [0.3246537446975708, 0.44824421405792236], [0.322629451751709, 0.4448598623275757], [0.3204241394996643, 0.4413522779941559], [0.3183176517486572, 0.4379715919494629], [0.3159714341163635, 0.4344269633293152], [0.31371814012527466, 0.43100500106811523], [0.31124937534332275, 0.4274120330810547], [0.3089061975479126, 0.4239972233772278], [0.3063225746154785, 0.42040932178497314], [0.30387479066848755, 0.4170146584510803], [0.30118507146835327, 0.41341912746429443], [0.2986295819282532, 0.4100182056427002], [0.29581063985824585, 0.4063962697982788], [0.29315924644470215, 0.4030165672302246], [0.2902296781539917, 0.3993896543979645], [0.3436025381088257, 0.47627341747283936], [0.3421940207481384, 0.47310274839401245], [0.34057503938674927, 0.4697689414024353], [0.33904194831848145, 0.4665377140045166], [0.33728551864624023, 0.46311789751052856], [0.3356305956840515, 0.4598148465156555], [0.3337448239326477, 0.45631712675094604], [0.3319551348686218, 0.45296287536621094], [0.3298923969268799, 0.44939684867858887], [0.32793790102005005, 0.44597992300987244], [0.32578611373901367, 0.4423915147781372], [0.32376253604888916, 0.4389716386795044], [0.3214646577835083, 0.43534988164901733], [0.3192827105522156, 0.43191856145858765], [0.31684815883636475, 0.42827287316322327], [0.314563512802124, 0.4248211979866028], [0.3120373487472534, 0.4211519956588745], [0.3096579909324646, 0.41768679022789], [0.30702871084213257, 0.4140143394470215], [0.3045508861541748, 0.4105684161186218], [0.30178505182266235, 0.406868040561676], [0.29919230937957764, 0.40342265367507935], [0.29631930589675903, 0.3997231125831604], [0.2936241030693054, 0.39629170298576355], [0.3469424843788147, 0.4745553731918335], [0.3454321026802063, 0.47120606899261475], [0.34397053718566895, 0.4679020643234253], [0.3423043489456177, 0.46447890996932983], [0.3406970500946045, 0.4611246585845947], [0.33889222145080566, 0.45763787627220154], [0.3371438980102539, 0.45421624183654785], [0.3351789712905884, 0.4506534934043884], [0.3332786560058594, 0.4471641778945923], [0.3311864733695984, 0.44353580474853516], [0.3292001485824585, 0.44006019830703735], [0.3269977569580078, 0.43641963601112366], [0.3248748779296875, 0.43288177251815796], [0.32253384590148926, 0.4291924834251404], [0.3203006982803345, 0.42566490173339844], [0.3178362250328064, 0.42195528745651245], [0.31550365686416626, 0.4184415340423584], [0.3129357695579529, 0.4147094488143921], [0.31051158905029297, 0.41118544340133667], [0.3078154921531677, 0.40744054317474365], [0.30526864528656006, 0.40391969680786133], [0.30247044563293457, 0.4001595973968506], [0.29983264207839966, 0.39664024114608765], [0.2968965172767639, 0.3928649425506592], [0.35019832849502563, 0.47276097536087036], [0.3488401770591736, 0.4694775640964508], [0.34724825620651245, 0.46595990657806396], [0.3457344174385071, 0.4625823199748993], [0.3439754247665405, 0.4590177536010742], [0.34231114387512207, 0.4556160271167755], [0.3403971791267395, 0.45198890566825867], [0.33858615159988403, 0.44851192831993103], [0.33653879165649414, 0.4448140263557434], [0.3346100449562073, 0.44128990173339844], [0.33245033025741577, 0.43758225440979004], [0.33041590452194214, 0.43406790494918823], [0.32812976837158203, 0.4302942752838135], [0.325988233089447, 0.4267176687717438], [0.32358384132385254, 0.4229118227958679], [0.3213108777999878, 0.419333815574646], [0.31879520416259766, 0.4155482053756714], [0.31643444299697876, 0.4119887351989746], [0.3137974143028259, 0.40816664695739746], [0.3113185167312622, 0.40458863973617554], [0.3085777163505554, 0.40075618028640747], [0.30601346492767334, 0.39718925952911377], [0.3031359910964966, 0.3933296799659729], [0.3004438281059265, 0.3897569477558136], [0.35372740030288696, 0.47105103731155396], [0.3522157669067383, 0.4675871729850769], [0.3507436513900757, 0.4641474485397339], [0.34907031059265137, 0.4605606198310852], [0.347453236579895, 0.45705968141555786], [0.34564757347106934, 0.45344266295433044], [0.34389370679855347, 0.4498904347419739], [0.3419376015663147, 0.4461958408355713], [0.3400571346282959, 0.4426020383834839], [0.33796966075897217, 0.4388458728790283], [0.3359833359718323, 0.4352424740791321], [0.33378803730010986, 0.43148189783096313], [0.3316885828971863, 0.4278550148010254], [0.3293642997741699, 0.4240272641181946], [0.3271421790122986, 0.4203535318374634], [0.32468777894973755, 0.4164853096008301], [0.3223798871040344, 0.41285645961761475], [0.31982386112213135, 0.40900248289108276], [0.31740468740463257, 0.40535640716552734], [0.31473076343536377, 0.4014585316181183], [0.31222009658813477, 0.39782094955444336], [0.309423565864563, 0.39391449093818665], [0.3067913055419922, 0.3902634084224701], [0.3038700819015503, 0.3863234519958496], [0.3571295738220215, 0.4692364037036896], [0.3557395935058594, 0.46582651138305664], [0.35410642623901367, 0.4621996581554413], [0.3525627851486206, 0.4587007761001587], [0.35082101821899414, 0.4549962282180786], [0.3491731286048889, 0.45144468545913696], [0.34726452827453613, 0.4476631283760071], [0.34545767307281494, 0.44406992197036743], [0.3434131145477295, 0.44025856256484985], [0.34149783849716187, 0.4366268515586853], [0.3393630385398865, 0.4327758848667145], [0.33734554052352905, 0.42912518978118896], [0.33507537841796875, 0.4252488911151886], [0.33293616771698, 0.4215697944164276], [0.33053427934646606, 0.41762417554855347], [0.328277051448822, 0.4139080047607422], [0.32578569650650024, 0.40997958183288574], [0.32344263792037964, 0.40629470348358154], [0.32082879543304443, 0.40233951807022095], [0.3183799982070923, 0.3986397981643677], [0.3156505823135376, 0.3946607708930969], [0.31309109926223755, 0.3909595310688019], [0.3102424144744873, 0.3869571387767792], [0.3075817823410034, 0.3832402229309082], [0.3607543706893921, 0.4675447940826416], [0.35922521352767944, 0.4639061689376831], [0.35774117708206177, 0.46034467220306396], [0.35607844591140747, 0.456645667552948], [0.35447824001312256, 0.45304030179977417], [0.35266435146331787, 0.44926148653030396], [0.3509119749069214, 0.44556885957717896], [0.3489595651626587, 0.4417269825935364], [0.3470938205718994, 0.43802833557128906], [0.34503841400146484, 0.43416231870651245], [0.34307408332824707, 0.4304390549659729], [0.34087979793548584, 0.42651742696762085], [0.3387953042984009, 0.42278072237968445], [0.33647269010543823, 0.4188319742679596], [0.3342630863189697, 0.4150548577308655], [0.33182477951049805, 0.41104745864868164], [0.3295402526855469, 0.4072904586791992], [0.32700008153915405, 0.4032847285270691], [0.32461267709732056, 0.3995260000228882], [0.3219590187072754, 0.39549124240875244], [0.31946420669555664, 0.391715943813324], [0.31668621301651, 0.3876510262489319], [0.3140941858291626, 0.38388437032699585], [0.3111993074417114, 0.37978917360305786], [0.36427491903305054, 0.4657391309738159], [0.3628755211830139, 0.4621541500091553], [0.3612585663795471, 0.45838063955307007], [0.35974031686782837, 0.4547805190086365], [0.3579863905906677, 0.4509442448616028], [0.35631585121154785, 0.4472653567790985], [0.35441118478775024, 0.4433487057685852], [0.352627694606781, 0.4396206736564636], [0.35063624382019043, 0.43567055463790894], [0.34876060485839844, 0.4319283366203308], [0.346622109413147, 0.42793235182762146], [0.34460240602493286, 0.4241555333137512], [0.34233027696609497, 0.4201393127441406], [0.3401920199394226, 0.4163540303707123], [0.33780598640441895, 0.41230201721191406], [0.33557724952697754, 0.40847986936569214], [0.3331097960472107, 0.4044051170349121], [0.3307875394821167, 0.40058207511901855], [0.3281998038291931, 0.39649176597595215], [0.3257777690887451, 0.3926713764667511], [0.32307374477386475, 0.38853752613067627], [0.3205447196960449, 0.38469740748405457], [0.3177264928817749, 0.3805501461029053], [0.31509703397750854, 0.3767104744911194], [0.36813485622406006, 0.4640020728111267], [0.3665757179260254, 0.46018505096435547], [0.3650907278060913, 0.4565122127532959], [0.3634335994720459, 0.45269525051116943], [0.361819863319397, 0.44894203543663025], [0.3599815368652344, 0.44501984119415283], [0.3582301139831543, 0.44124287366867065], [0.3563014268875122, 0.43728697299957275], [0.35446715354919434, 0.4334770441055298], [0.35241764783859253, 0.42947036027908325], [0.35045337677001953, 0.425607830286026], [0.34827083349227905, 0.4215444326400757], [0.3461962938308716, 0.417672723531723], [0.34388649463653564, 0.41358065605163574], [0.34171104431152344, 0.40971818566322327], [0.3393068313598633, 0.4055958688259125], [0.33704686164855957, 0.4017147123813629], [0.3345212936401367, 0.39755499362945557], [0.33216196298599243, 0.39367517828941345], [0.32953500747680664, 0.3895009756088257], [0.3270718455314636, 0.3856040835380554], [0.32431650161743164, 0.3813791871070862], [0.3217601180076599, 0.37748563289642334], [0.31890320777893066, 0.3732404112815857], [0.3718222379684448, 0.46213746070861816], [0.37040042877197266, 0.45841461420059204], [0.3688030242919922, 0.45451635122299194], [0.3672837018966675, 0.4507932662963867], [0.3654841184616089, 0.4467920660972595], [0.3637850284576416, 0.44298672676086426], [0.361902117729187, 0.43898868560791016], [0.36013156175613403, 0.4351717233657837], [0.3581262230873108, 0.4311111569404602], [0.35624146461486816, 0.4272567629814148], [0.3541191816329956, 0.42312300205230713], [0.3521215319633484, 0.4192110002040863], [0.34987741708755493, 0.4150417447090149], [0.34777313470840454, 0.41111475229263306], [0.34543824195861816, 0.40693849325180054], [0.3432406187057495, 0.4030150771141052], [0.34077316522598267, 0.3988049030303955], [0.338469922542572, 0.39486628770828247], [0.3359181880950928, 0.3906360864639282], [0.33352744579315186, 0.386685848236084], [0.33084845542907715, 0.3824153542518616], [0.328349232673645, 0.37844419479370117], [0.32558178901672363, 0.374148428440094], [0.3230012059211731, 0.37017107009887695], [0.37581032514572144, 0.4603702425956726], [0.3742602467536926, 0.4564416706562042], [0.3727835416793823, 0.45265185832977295], [0.37107229232788086, 0.4486669898033142], [0.3694247007369995, 0.4447857141494751], [0.36762332916259766, 0.44073569774627686], [0.3659132719039917, 0.4368598163127899], [0.3639879822731018, 0.43278101086616516], [0.3621509075164795, 0.42886048555374146], [0.3601052761077881, 0.424738347530365], [0.358157217502594, 0.4207780361175537], [0.35597652196884155, 0.4165714383125305], [0.3539217710494995, 0.4125869870185852], [0.35165804624557495, 0.4083567261695862], [0.3495277166366577, 0.4043644070625305], [0.3471335768699646, 0.400097131729126], [0.3448958396911621, 0.3961024284362793], [0.34241926670074463, 0.3918117880821228], [0.3401055335998535, 0.3878079056739807], [0.33749687671661377, 0.38347136974334717], [0.33507412672042847, 0.3794569969177246], [0.3323673605918884, 0.37508636713027954], [0.3298688530921936, 0.3710692524909973], [0.32704681158065796, 0.36665740609169006], [0.3796561360359192, 0.4584786295890808], [0.378227174282074, 0.4546578526496887], [0.3765677809715271, 0.4506263732910156], [0.3749917149543762, 0.4467516243457794], [0.3732375502586365, 0.44265592098236084], [0.37160396575927734, 0.4387498199939728], [0.3697444796562195, 0.43458858132362366], [0.3679901361465454, 0.4306548833847046], [0.3659970164299011, 0.4264470338821411], [0.36412131786346436, 0.4224861264228821], [0.36198896169662476, 0.4182426929473877], [0.3599926233291626, 0.4142296612262726], [0.3577858805656433, 0.40994518995285034], [0.3557243347167969, 0.4059164226055145], [0.35340243577957153, 0.40158823132514954], [0.3512285351753235, 0.3975279629230499], [0.34883034229278564, 0.39318543672561646], [0.3465905785560608, 0.3891269266605377], [0.3440672755241394, 0.3847368359565735], [0.341705858707428, 0.38064801692962646], [0.3390846252441406, 0.3762371242046356], [0.3366442918777466, 0.37214359641075134], [0.3339168429374695, 0.3676977753639221], [0.33138036727905273, 0.3635859191417694], [0.3838006258010864, 0.4566993713378906], [0.38219982385635376, 0.4526224136352539], [0.38068026304244995, 0.4487072825431824], [0.3789899945259094, 0.4445984661579132], [0.3773927092552185, 0.4406532645225525], [0.3755919933319092, 0.4364975094795227], [0.37387895584106445, 0.432494193315506], [0.37196671962738037, 0.4282744824886322], [0.37014639377593994, 0.42423269152641296], [0.36810189485549927, 0.4199540615081787], [0.36617136001586914, 0.41589218378067017], [0.3640444278717041, 0.41157639026641846], [0.3620423674583435, 0.4074894189834595], [0.359794557094574, 0.40312471985816956], [0.3576852083206177, 0.399016797542572], [0.35534507036209106, 0.39460518956184387], [0.35317426919937134, 0.3905066251754761], [0.3507283926010132, 0.3860660791397095], [0.3484441637992859, 0.38192808628082275], [0.34589093923568726, 0.3774338960647583], [0.34353482723236084, 0.37329941987991333], [0.3408752679824829, 0.3687741160392761], [0.3384263515472412, 0.36462700366973877], [0.3356667160987854, 0.3600614666938782], [0.38773834705352783, 0.45478421449661255], [0.3862799406051636, 0.450810045003891], [0.3846379518508911, 0.4466540813446045], [0.3830943703651428, 0.44269680976867676], [0.38133561611175537, 0.43849021196365356], [0.3796879053115845, 0.43450045585632324], [0.37782150506973267, 0.43022704124450684], [0.3760718107223511, 0.4261818528175354], [0.3740939497947693, 0.4218445420265198], [0.37223297357559204, 0.417738676071167], [0.370172381401062, 0.4133768379688263], [0.368249773979187, 0.4092681109905243], [0.3660777807235718, 0.4048393964767456], [0.3640378713607788, 0.40068185329437256], [0.3617677688598633, 0.39622950553894043], [0.3596538305282593, 0.39206168055534363], [0.3572908043861389, 0.38758355379104614], [0.3550822138786316, 0.3833991289138794], [0.3526138663291931, 0.37886226177215576], [0.350317120552063, 0.37464576959609985], [0.34775424003601074, 0.37008199095726013], [0.3453696370124817, 0.36585286259651184], [0.3427048325538635, 0.3612479567527771], [0.3402353525161743, 0.3570122718811035], [0.39191269874572754, 0.45303547382354736], [0.3903261423110962, 0.44881194829940796], [0.3888370394706726, 0.44479212164878845], [0.3871511220932007, 0.44056379795074463], [0.38555335998535156, 0.4365106523036957], [0.3837723135948181, 0.4322468638420105], [0.3820773959159851, 0.42814311385154724], [0.38018345832824707, 0.4238032400608063], [0.3783862590789795, 0.41965097188949585], [0.3763844966888428, 0.4152340292930603], [0.37451428174972534, 0.4110841453075409], [0.37243014574050903, 0.4066507816314697], [0.3704584836959839, 0.4024354815483093], [0.36826056241989136, 0.39792221784591675], [0.36621689796447754, 0.3937108516693115], [0.36392486095428467, 0.38916683197021484], [0.3617982864379883, 0.3849419355392456], [0.3594188094139099, 0.38035649061203003], [0.35720449686050415, 0.37609434127807617], [0.35470426082611084, 0.3714478015899658], [0.35240602493286133, 0.3671893775463104], [0.3498075008392334, 0.362504243850708], [0.3474254608154297, 0.35822999477386475], [0.3447207808494568, 0.3535032570362091], [0.3958991765975952, 0.4511319696903229], [0.39444661140441895, 0.4470764994621277], [0.39282310009002686, 0.44279760122299194], [0.39129364490509033, 0.4387129843235016], [0.38957077264785767, 0.43438971042633057], [0.38795387744903564, 0.4302927255630493], [0.3861215114593506, 0.4259040057659149], [0.38440150022506714, 0.4217451214790344], [0.38246750831604004, 0.41729211807250977], [0.3806535601615906, 0.4130789637565613], [0.37864190340042114, 0.408601313829422], [0.37675321102142334, 0.40438610315322876], [0.37462449073791504, 0.3998293876647949], [0.3726435899734497, 0.39555656909942627], [0.37044596672058105, 0.3909762501716614], [0.368388295173645, 0.38667672872543335], [0.3660983443260193, 0.3820575475692749], [0.36396366357803345, 0.3777517080307007], [0.3615655303001404, 0.3730619251728058], [0.35932689905166626, 0.3687018156051636], [0.35682809352874756, 0.36399316787719727], [0.35450440645217896, 0.35963335633277893], [0.351906418800354, 0.3548809587955475], [0.34950053691864014, 0.35049790143966675], [0.4001041054725647, 0.4494101107120514], [0.39853620529174805, 0.4451161026954651], [0.39706122875213623, 0.4409973621368408], [0.3954032063484192, 0.43664371967315674], [0.3938407301902771, 0.43248945474624634], [0.39209216833114624, 0.42810118198394775], [0.3904329538345337, 0.42389750480651855], [0.38859182596206665, 0.41943830251693726], [0.3868507146835327, 0.4151783883571625], [0.3849112391471863, 0.41064125299453735], [0.38309580087661743, 0.4063759446144104], [0.3810569643974304, 0.40180522203445435], [0.3791457414627075, 0.3974955677986145], [0.3770231008529663, 0.3928564190864563], [0.37505054473876953, 0.3885318636894226], [0.3728344440460205, 0.3838370442390442], [0.37077993154525757, 0.37948891520500183], [0.368466854095459, 0.374763548374176], [0.3663172721862793, 0.3703767955303192], [0.3638848066329956, 0.36556363105773926], [0.3616589307785034, 0.3611702620983124], [0.3591357469558716, 0.3563287854194641], [0.356828510761261, 0.3519250154495239], [0.3542028069496155, 0.34703582525253296], [0.40411216020584106, 0.44756019115448], [0.4026855230331421, 0.4434243440628052], [0.4010855555534363, 0.4390560984611511], [0.3995753526687622, 0.43486812710762024], [0.39789170026779175, 0.4304444193840027], [0.39630579948425293, 0.42623063921928406], [0.39453279972076416, 0.421739786863327], [0.3928752541542053, 0.417481005191803], [0.39101773500442505, 0.41290736198425293], [0.3892723321914673, 0.40857917070388794], [0.38732779026031494, 0.40396827459335327], [0.38549500703811646, 0.3996153771877289], [0.3834573030471802, 0.3949539065361023], [0.3815566897392273, 0.39057236909866333], [0.3794342875480652, 0.38585513830184937], [0.3774508833885193, 0.3814396262168884], [0.37523752450942993, 0.37667036056518555], [0.3731699585914612, 0.3722296953201294], [0.370844304561615, 0.3674032688140869], [0.368679404258728, 0.3629089593887329], [0.36627018451690674, 0.3580412268638611], [0.3640311360359192, 0.35353684425354004], [0.3615185022354126, 0.34862470626831055], [0.35919368267059326, 0.34410667419433594], [0.4084094762802124, 0.44585710763931274], [0.40686869621276855, 0.44149404764175415], [0.4054133892059326, 0.43728065490722656], [0.40377891063690186, 0.4328312277793884], [0.40224534273147583, 0.4285944104194641], [0.40053868293762207, 0.4240965247154236], [0.39893436431884766, 0.41981106996536255], [0.39715665578842163, 0.4152553081512451], [0.39548128843307495, 0.410902202129364], [0.393604576587677, 0.40623602271080017], [0.3918553590774536, 0.401857852935791], [0.3899008631706238, 0.3971434235572815], [0.38808774948120117, 0.39273685216903687], [0.38605695962905884, 0.38796958327293396], [0.3841642737388611, 0.3835170269012451], [0.3820320963859558, 0.37868669629096985], [0.3800528049468994, 0.3742125332355499], [0.37780696153640747, 0.3693189322948456], [0.3757363557815552, 0.3648207485675812], [0.3734009265899658, 0.359866201877594], [0.37126171588897705, 0.3553374409675598], [0.36882734298706055, 0.35032951831817627], [0.3666055202484131, 0.34578412771224976], [0.3640602231025696, 0.34072819352149963], [0.41251057386398315, 0.44400548934936523], [0.4110988974571228, 0.43982458114624023], [0.40950632095336914, 0.4353637099266052], [0.4080122113227844, 0.43109434843063354], [0.40636032819747925, 0.42659494280815125], [0.4048100709915161, 0.42229753732681274], [0.4031033515930176, 0.4177336096763611], [0.4014984965324402, 0.41338208317756653], [0.399702787399292, 0.40872445702552795], [0.39801764488220215, 0.4042847752571106], [0.39616644382476807, 0.39956796169281006], [0.3944178819656372, 0.39508718252182007], [0.39249342679977417, 0.39032286405563354], [0.3906909227371216, 0.3858179450035095], [0.3886798620223999, 0.3809559941291809], [0.38677990436553955, 0.37638959288597107], [0.3846425414085388, 0.37147825956344604], [0.3826364278793335, 0.3668895363807678], [0.38041114807128906, 0.36194002628326416], [0.37834012508392334, 0.3573310077190399], [0.3760249614715576, 0.3523105978965759], [0.37387293577194214, 0.34766095876693726], [0.37145674228668213, 0.3425866961479187], [0.3692120909690857, 0.33790868520736694], [0.41695231199264526, 0.44228237867355347], [0.4154071807861328, 0.43785709142684937], [0.4139633774757385, 0.43359947204589844], [0.4123750925064087, 0.4290674328804016], [0.4108942151069641, 0.4247550964355469], [0.4092422127723694, 0.42016977071762085], [0.4077034592628479, 0.41583335399627686], [0.4059765338897705, 0.41116511821746826], [0.40436601638793945, 0.40674787759780884], [0.40258628129959106, 0.4019893705844879], [0.4009277820587158, 0.3975095748901367], [0.399042546749115, 0.3926575183868408], [0.3973134160041809, 0.3881847560405731], [0.3953635096549988, 0.3832995593547821], [0.39354372024536133, 0.37873417139053345], [0.39147210121154785, 0.37373751401901245], [0.3895606994628906, 0.36913183331489563], [0.38740837574005127, 0.36409157514572144], [0.3854341506958008, 0.35948002338409424], [0.3831878900527954, 0.3543926477432251], [0.3811367154121399, 0.349748432636261], [0.3788028359413147, 0.344583123922348], [0.37667423486709595, 0.3398914933204651], [0.37423306703567505, 0.3346553444862366], [0.4211111068725586, 0.440427303314209], [0.41971397399902344, 0.4361942410469055], [0.41817963123321533, 0.431701123714447], [0.4167494773864746, 0.4273832440376282], [0.41516923904418945, 0.4227876663208008], [0.41368579864501953, 0.41840922832489014], [0.41204267740249634, 0.4137696623802185], [0.41049492359161377, 0.40933412313461304], [0.4087933897972107, 0.40460020303726196], [0.40719908475875854, 0.400107204914093], [0.4054182767868042, 0.3952672481536865], [0.40373510122299194, 0.39066463708877563], [0.40188688039779663, 0.3858112394809723], [0.40012961626052856, 0.3812105059623718], [0.3981574773788452, 0.3762412667274475], [0.3963029384613037, 0.3715507984161377], [0.39426863193511963, 0.36651867628097534], [0.39235788583755493, 0.3617933988571167], [0.39022117853164673, 0.3567075729370117], [0.3882312774658203, 0.35197877883911133], [0.38601797819137573, 0.3468436002731323], [0.38396120071411133, 0.3420693576335907], [0.3816640377044678, 0.336838960647583], [0.37953323125839233, 0.332000732421875], [0.42567873001098633, 0.43867602944374084], [0.4241870641708374, 0.4342436194419861], [0.4228070378303528, 0.42995890974998474], [0.42126989364624023, 0.42538052797317505], [0.4198387861251831, 0.42101502418518066], [0.4182525873184204, 0.4163373112678528], [0.41677772998809814, 0.41192516684532166], [0.41513633728027344, 0.4071793854236603], [0.4136066436767578, 0.40270012617111206], [0.4118741750717163, 0.39785659313201904], [0.41026365756988525, 0.3933109641075134], [0.4084659814834595, 0.3883432149887085], [0.40681278705596924, 0.3837546110153198], [0.40491271018981934, 0.37873560190200806], [0.4031505584716797, 0.3740807771682739], [0.40116047859191895, 0.3689681589603424], [0.39933842420578003, 0.3642820417881012], [0.3972383141517639, 0.35908931493759155], [0.3953360915184021, 0.35437583923339844], [0.3931999206542969, 0.3491666316986084], [0.3912504315376282, 0.3444095551967621], [0.3890225887298584, 0.3391181528568268], [0.38700222969055176, 0.33431845903396606], [0.38468873500823975, 0.3289209008216858], [0.42996901273727417, 0.43679380416870117], [0.42862939834594727, 0.43261605501174927], [0.4271339774131775, 0.42807912826538086], [0.42575234174728394, 0.4237361252307892], [0.4242372512817383, 0.41909289360046387], [0.4228123426437378, 0.4146456718444824], [0.42123669385910034, 0.4099189043045044], [0.4197601079940796, 0.405434250831604], [0.41812247037887573, 0.40062078833580017], [0.41656720638275146, 0.3960185647010803], [0.4148513674736023, 0.39112982153892517], [0.41323554515838623, 0.3864665925502777], [0.41145581007003784, 0.381484717130661], [0.4097665548324585, 0.37673962116241455], [0.40788978338241577, 0.3716656565666199], [0.40610527992248535, 0.3668721616268158], [0.40412116050720215, 0.3617447018623352], [0.4022507071495056, 0.356894314289093], [0.400235652923584, 0.3517358899116516], [0.3983539342880249, 0.3468944728374481], [0.3962467908859253, 0.3416316509246826], [0.3942826986312866, 0.33673614263534546], [0.392093300819397, 0.3314109146595001], [0.3900824785232544, 0.3264698386192322], [0.43449312448501587, 0.43512919545173645], [0.43306994438171387, 0.4307337999343872], [0.4317395091056824, 0.4264284372329712], [0.43028879165649414, 0.4218292832374573], [0.42893385887145996, 0.417421817779541], [0.4274200201034546, 0.4126662313938141], [0.42601001262664795, 0.40818288922309875], [0.4244469404220581, 0.4033718407154083], [0.4229798913002014, 0.3988172113895416], [0.42132270336151123, 0.3938538432121277], [0.4197957515716553, 0.3892463743686676], [0.4180558919906616, 0.38420408964157104], [0.41645604372024536, 0.3795393705368042], [0.41464245319366455, 0.3743894100189209], [0.41297125816345215, 0.369625449180603], [0.4110514521598816, 0.3643694519996643], [0.40930384397506714, 0.359585165977478], [0.40733885765075684, 0.3542829751968384], [0.40556973218917847, 0.3494988679885864], [0.40352463722229004, 0.34415483474731445], [0.4016709327697754, 0.3393019139766693], [0.3995567560195923, 0.3338719606399536], [0.39764297008514404, 0.328968346118927], [0.3954499363899231, 0.32349804043769836], [0.4387224316596985, 0.4333520531654358], [0.43747079372406006, 0.4292065501213074], [0.43608176708221436, 0.4246559143066406], [0.4347935914993286, 0.4202979803085327], [0.43334174156188965, 0.4155857264995575], [0.4319790005683899, 0.41109251976013184], [0.4304956793785095, 0.4062929153442383], [0.42910027503967285, 0.40173426270484924], [0.4275336265563965, 0.396828830242157], [0.42604875564575195, 0.3921487331390381], [0.42442989349365234, 0.3871755599975586], [0.422885000705719, 0.38240522146224976], [0.4211884140968323, 0.37735122442245483], [0.4195699691772461, 0.3725194036960602], [0.4177854657173157, 0.36733612418174744], [0.41609513759613037, 0.36239635944366455], [0.414264976978302, 0.3571639657020569], [0.4125404953956604, 0.3522298336029053], [0.41062796115875244, 0.34694191813468933], [0.4088289737701416, 0.3419853448867798], [0.4068441390991211, 0.3366278409957886], [0.40500903129577637, 0.3316313624382019], [0.40295809507369995, 0.3261515498161316], [0.40101295709609985, 0.3210906982421875], [0.4430714249610901, 0.43188461661338806], [0.44179868698120117, 0.42751288414001465], [0.44058287143707275, 0.4231855571269989], [0.4392356872558594, 0.41854918003082275], [0.4379534125328064, 0.4140513837337494], [0.436535120010376, 0.40925443172454834], [0.4352046251296997, 0.40471673011779785], [0.43371647596359253, 0.39981573820114136], [0.4323223829269409, 0.3951798975467682], [0.43076449632644653, 0.39013946056365967], [0.42932915687561035, 0.38546422123908997], [0.427707314491272, 0.3803042769432068], [0.4262169599533081, 0.3755452036857605], [0.4245028495788574, 0.37028229236602783], [0.4229361414909363, 0.3654508590698242], [0.4211620092391968, 0.36006665229797363], [0.41956061124801636, 0.3551945388317108], [0.4177156686782837, 0.3497665226459503], [0.4160516858100891, 0.3448628783226013], [0.41415250301361084, 0.3393808603286743], [0.41243600845336914, 0.33444133400917053], [0.4104645848274231, 0.3289058804512024], [0.40867096185684204, 0.32388055324554443], [0.406535804271698, 0.31814223527908325], [0.44721877574920654, 0.43032339215278625], [0.44611912965774536, 0.4261903762817383], [0.4448438882827759, 0.42157137393951416], [0.44364845752716064, 0.4171810448169708], [0.44229912757873535, 0.41237521171569824], [0.4410301446914673, 0.40782445669174194], [0.4396260380744934, 0.40296071767807007], [0.4382948875427246, 0.39832156896591187], [0.43683314323425293, 0.3933534026145935], [0.4354515075683594, 0.38859620690345764], [0.4339498281478882, 0.3835518956184387], [0.4325180649757385, 0.37870192527770996], [0.43095535039901733, 0.37353700399398804], [0.4294583201408386, 0.36857396364212036], [0.4278332591056824, 0.36332279443740845], [0.4262855052947998, 0.35829126834869385], [0.42459434270858765, 0.3529440760612488], [0.42299187183380127, 0.34787997603416443], [0.4212387204170227, 0.3424805998802185], [0.4196010231971741, 0.3374112844467163], [0.41779035329818726, 0.33191752433776855], [0.41607171297073364, 0.32679492235183716], [0.4140974283218384, 0.32118678092956543], [0.4123062491416931, 0.31595057249069214], [0.45136022567749023, 0.4291267991065979], [0.4502323865890503, 0.4247363209724426], [0.4491202235221863, 0.4203181266784668], [0.44789910316467285, 0.4156397581100464], [0.446713924407959, 0.41105929017066956], [0.44541919231414795, 0.40619200468063354], [0.444192111492157, 0.4015626907348633], [0.44283026456832886, 0.39658433198928833], [0.44155430793762207, 0.3918892443180084], [0.4401358366012573, 0.38677704334259033], [0.4388289451599121, 0.38202399015426636], [0.43734216690063477, 0.3767794668674469], [0.4359825849533081, 0.37194329500198364], [0.4344383478164673, 0.36655229330062866], [0.43303680419921875, 0.3616417348384857], [0.43141472339630127, 0.35614609718322754], [0.4299473166465759, 0.35117337107658386], [0.42826712131500244, 0.34561043977737427], [0.4267570972442627, 0.3406144380569458], [0.42503345012664795, 0.3350263833999634], [0.42348045110702515, 0.32997050881385803], [0.42165815830230713, 0.32422053813934326], [0.4200540781021118, 0.3190808892250061], [0.41830605268478394, 0.3133143186569214], [0.45533353090286255, 0.4278125464916229], [0.4543558359146118, 0.423624724149704], [0.4532109498977661, 0.4189281463623047], [0.4521293640136719, 0.414468914270401], [0.4509207606315613, 0.409601628780365], [0.4497835636138916, 0.40497082471847534], [0.44853150844573975, 0.4000129699707031], [0.44733506441116333, 0.3952862024307251], [0.4460188150405884, 0.39024853706359863], [0.44477593898773193, 0.38543570041656494], [0.44343113899230957, 0.38031262159347534], [0.44214481115341187, 0.37536656856536865], [0.4407513737678528, 0.37013256549835205], [0.43941813707351685, 0.3650834560394287], [0.43798327445983887, 0.35973167419433594], [0.4365973472595215, 0.3545692563056946], [0.43506717681884766, 0.34910666942596436], [0.43360990285873413, 0.34393975138664246], [0.4320356249809265, 0.3384424149990082], [0.4305574297904968, 0.3332756757736206], [0.42891091108322144, 0.3276745080947876], [0.42740291357040405, 0.32240286469459534], [0.42584872245788574, 0.3167358338832855], [0.42447662353515625, 0.3115251958370209], [0.45926809310913086, 0.4268324077129364], [0.45829296112060547, 0.42240697145462036], [0.4573129415512085, 0.41788381338119507], [0.45624321699142456, 0.413147896528244], [0.45519423484802246, 0.40849509835243225], [0.454043984413147, 0.40356552600860596], [0.4529486894607544, 0.39884746074676514], [0.45174145698547363, 0.3937848210334778], [0.4506167769432068, 0.3890169858932495], [0.4493774175643921, 0.3838490843772888], [0.44823014736175537, 0.37902572751045227], [0.44693952798843384, 0.3736920952796936], [0.44575101137161255, 0.3687692880630493], [0.4443759322166443, 0.36329296231269836], [0.44312310218811035, 0.3583133816719055], [0.44167637825012207, 0.35268616676330566], [0.4403762221336365, 0.34759193658828735], [0.4389077425003052, 0.341924786567688], [0.43760740756988525, 0.33685559034347534], [0.4361227750778198, 0.3311261236667633], [0.4348064661026001, 0.3259808421134949], [0.4333432912826538, 0.32017940282821655], [0.4320838451385498, 0.31508293747901917], [0.43060892820358276, 0.3092525899410248], [0.46309274435043335, 0.42572253942489624], [0.4622412323951721, 0.4215092062950134], [0.46125704050064087, 0.4167312979698181], [0.4603397250175476, 0.41219669580459595], [0.459287166595459, 0.40724149346351624], [0.4582711458206177, 0.4025349020957947], [0.45715630054473877, 0.3975212872028351], [0.4561113715171814, 0.3927217721939087], [0.45501232147216797, 0.3876422047615051], [0.453971803188324, 0.3827419877052307], [0.45282435417175293, 0.37754303216934204], [0.45171964168548584, 0.37252819538116455], [0.45052671432495117, 0.3671978712081909], [0.44934993982315063, 0.36201733350753784], [0.44802457094192505, 0.3565829396247864], [0.4467499852180481, 0.35136058926582336], [0.4454473853111267, 0.3458499014377594], [0.44424867630004883, 0.340575635433197], [0.4429740905761719, 0.3350062072277069], [0.44178855419158936, 0.32972097396850586], [0.4405267834663391, 0.32403701543807983], [0.43937504291534424, 0.3187524378299713], [0.43808263540267944, 0.3130131959915161], [0.4368804693222046, 0.30772286653518677], [0.4669686555862427, 0.4248952269554138], [0.46611541509628296, 0.42045754194259644], [0.46525895595550537, 0.41589224338531494], [0.4643576741218567, 0.4111095368862152], [0.46347033977508545, 0.40633535385131836], [0.46248340606689453, 0.40131038427352905], [0.4615440368652344, 0.39654281735420227], [0.4605240225791931, 0.3914383053779602], [0.45957934856414795, 0.3866555094718933], [0.4585338830947876, 0.38141578435897827], [0.4575727581977844, 0.3765037953853607], [0.45647960901260376, 0.37109601497650146], [0.4554712772369385, 0.36610764265060425], [0.4543216824531555, 0.36047470569610596], [0.4532673954963684, 0.35532939434051514], [0.45207953453063965, 0.34966224431991577], [0.4510326385498047, 0.3445979356765747], [0.44984591007232666, 0.33884525299072266], [0.4488084316253662, 0.3337271213531494], [0.4476228952407837, 0.32792389392852783], [0.4465857744216919, 0.32276201248168945], [0.445392370223999, 0.3168973922729492], [0.44433915615081787, 0.3116706609725952], [0.4430953860282898, 0.305716872215271], [0.47071731090545654, 0.4239485263824463], [0.46997755765914917, 0.4196999669075012], [0.46912091970443726, 0.41490837931632996], [0.46834492683410645, 0.41037172079086304], [0.4674908518791199, 0.40532147884368896], [0.466678261756897, 0.4004965126514435], [0.4657824635505676, 0.39541855454444885], [0.46490800380706787, 0.3905637264251709], [0.4639583230018616, 0.38546285033226013], [0.46306657791137695, 0.3805369734764099], [0.4621487259864807, 0.3753007650375366], [0.461270809173584, 0.3701690435409546], [0.46026837825775146, 0.364751398563385], [0.45929861068725586, 0.359510600566864], [0.4583216905593872, 0.3539430499076843], [0.4574097990989685, 0.3486362099647522], [0.4564172625541687, 0.3430789113044739], [0.45546644926071167, 0.33777520060539246], [0.4544405937194824, 0.33213359117507935], [0.45348286628723145, 0.32682162523269653], [0.45243871212005615, 0.32109957933425903], [0.4514780044555664, 0.3157655596733093], [0.45040810108184814, 0.3099246025085449], [0.44942307472229004, 0.30451035499572754], [0.4745051860809326, 0.4232743978500366], [0.4737887382507324, 0.418811559677124], [0.47308409214019775, 0.4142228066921234], [0.47235095500946045, 0.4094621241092682], [0.47162991762161255, 0.40464580059051514], [0.4708647131919861, 0.39954131841659546], [0.4701351523399353, 0.3946740925312042], [0.46930038928985596, 0.38947224617004395], [0.46852242946624756, 0.38465553522109985], [0.4676569104194641, 0.37939658761024475], [0.4668771028518677, 0.3745325207710266], [0.46604156494140625, 0.3690441846847534], [0.4652843475341797, 0.3638942539691925], [0.46441709995269775, 0.3582521378993988], [0.46363013982772827, 0.353111207485199], [0.4627411961555481, 0.34733372926712036], [0.4619516134262085, 0.34218376874923706], [0.4610391855239868, 0.3363717794418335], [0.46022695302963257, 0.33119189739227295], [0.45929282903671265, 0.3253359794616699], [0.4584686756134033, 0.32012563943862915], [0.4575190544128418, 0.31420737504959106], [0.45667529106140137, 0.30891236662864685], [0.455696702003479, 0.3028649687767029], [0.4781982898712158, 0.4224843382835388], [0.4776197075843811, 0.41823893785476685], [0.47696298360824585, 0.4134192168712616], [0.47636377811431885, 0.40888920426368713], [0.47568029165267944, 0.4038068652153015], [0.47503894567489624, 0.39896512031555176], [0.474346399307251, 0.39378613233566284], [0.4736773371696472, 0.38881558179855347], [0.47295546531677246, 0.38367176055908203], [0.4722393751144409, 0.37868666648864746], [0.471473753452301, 0.3735048174858093], [0.4707978367805481, 0.3684442639350891], [0.4701310992240906, 0.3629087209701538], [0.4694936275482178, 0.35758543014526367], [0.4687838554382324, 0.3520090579986572], [0.4680945873260498, 0.34663158655166626], [0.46735072135925293, 0.3409930169582367], [0.46663498878479004, 0.33561772108078003], [0.4658623933792114, 0.3299185633659363], [0.4651377201080322, 0.3245493471622467], [0.46434664726257324, 0.3187790513038635], [0.4636141061782837, 0.31339186429977417], [0.4627952575683594, 0.30748701095581055], [0.46204376220703125, 0.3020173907279968], [0.48190373182296753, 0.42198923230171204], [0.4813726544380188, 0.41755515336990356], [0.4808504581451416, 0.41294124722480774], [0.4803192615509033, 0.40818798542022705], [0.47978758811950684, 0.4033118486404419], [0.4791998863220215, 0.3981984853744507], [0.47864091396331787, 0.39327922463417053], [0.47803372144699097, 0.38799262046813965], [0.4774820804595947, 0.38312435150146484], [0.4768572449684143, 0.3777654767036438], [0.4762994050979614, 0.3729155957698822], [0.47572141885757446, 0.3675782084465027], [0.47519177198410034, 0.36241987347602844], [0.47457659244537354, 0.3566586375236511], [0.47402423620224, 0.35146844387054443], [0.47339749336242676, 0.34563755989074707], [0.4728339910507202, 0.3404121994972229], [0.4721813201904297, 0.3345370888710022], [0.47159892320632935, 0.3293067216873169], [0.4709314703941345, 0.32339733839035034], [0.4703395366668701, 0.31813913583755493], [0.46965640783309937, 0.3121703863143921], [0.46904754638671875, 0.30681896209716797], [0.46833324432373047, 0.3007162809371948], [0.48558372259140015, 0.42135894298553467], [0.48514801263809204, 0.41716936230659485], [0.48469680547714233, 0.41235339641571045], [0.4842974543571472, 0.40782463550567627], [0.48381274938583374, 0.4026711881160736], [0.4833364486694336, 0.39778417348861694], [0.48283231258392334, 0.39260876178741455], [0.48238956928253174, 0.38760584592819214], [0.48193299770355225, 0.38241344690322876], [0.481486976146698, 0.37734776735305786], [0.4810370206832886, 0.37216413021087646], [0.48061513900756836, 0.3671993017196655], [0.4801340103149414, 0.3616456091403961], [0.4796712398529053, 0.35625600814819336], [0.4791833162307739, 0.3506474196910858], [0.478718101978302, 0.34522706270217896], [0.47822141647338867, 0.3395311236381531], [0.4777485132217407, 0.3340979814529419], [0.4772367477416992, 0.32835662364959717], [0.4767533540725708, 0.322940468788147], [0.47622525691986084, 0.31712374091148376], [0.4757333993911743, 0.31169408559799194], [0.47518348693847656, 0.3057386875152588], [0.474675714969635, 0.300213098526001], [0.4893200993537903, 0.4210454225540161], [0.48898202180862427, 0.4166373014450073], [0.48862773180007935, 0.4120767116546631], [0.48824894428253174, 0.4072888493537903], [0.48785877227783203, 0.4023643732070923], [0.48743516206741333, 0.3971880376338959], [0.4870542883872986, 0.3923172950744629], [0.48671579360961914, 0.38708674907684326], [0.4864288568496704, 0.3821522891521454], [0.48610013723373413, 0.3767666220664978], [0.48580509424209595, 0.3719065189361572], [0.4854522943496704, 0.36655962467193604], [0.4851096272468567, 0.36137568950653076], [0.48473823070526123, 0.3555925190448761], [0.4844105839729309, 0.3503763675689697], [0.4840289354324341, 0.3445125222206116], [0.48368632793426514, 0.3392527997493744], [0.48329824209213257, 0.3333333134651184], [0.4829528331756592, 0.3280636668205261], [0.4825509786605835, 0.32211506366729736], [0.4821934700012207, 0.3168187737464905], [0.4817805290222168, 0.3108145296573639], [0.4814104437828064, 0.30542027950286865], [0.48098158836364746, 0.2992761433124542], [0.4929419755935669, 0.4207039773464203], [0.4927555322647095, 0.4164399206638336], [0.4924736022949219, 0.4116307497024536], [0.4921831488609314, 0.40705984830856323], [0.49188172817230225, 0.4019118845462799], [0.49161505699157715, 0.3969655930995941], [0.4913520812988281, 0.39185768365859985], [0.4911198616027832, 0.3869553804397583], [0.49088555574417114, 0.3817175030708313], [0.4907150864601135, 0.3766854405403137], [0.4905385971069336, 0.37143659591674805], [0.4903179407119751, 0.3663942515850067], [0.49005603790283203, 0.3608500361442566], [0.4898252487182617, 0.35547077655792236], [0.4895840883255005, 0.34982573986053467], [0.4893519878387451, 0.3443792462348938], [0.489102840423584, 0.3386614918708801], [0.48886632919311523, 0.3332033157348633], [0.48861074447631836, 0.32742685079574585], [0.48836928606033325, 0.32197827100753784], [0.488103985786438, 0.316131591796875], [0.4878547191619873, 0.3106732964515686], [0.48757457733154297, 0.3046826124191284], [0.48731833696365356, 0.2991379499435425], [0.4964700937271118, 0.4206721782684326], [0.4963606595993042, 0.4161683917045593], [0.4962362051010132, 0.4115258455276489], [0.4960889220237732, 0.4067142605781555], [0.49593842029571533, 0.4018321633338928], [0.4958034157752991, 0.3966350555419922], [0.495691180229187, 0.39180731773376465], [0.4955161213874817, 0.3865658938884735], [0.49537062644958496, 0.3817105293273926], [0.49533241987228394, 0.37645477056503296], [0.49529892206192017, 0.3714432120323181], [0.49516773223876953, 0.3659796118736267], [0.49503493309020996, 0.36083489656448364], [0.4949079751968384, 0.35507476329803467], [0.49479806423187256, 0.3498309552669525], [0.49467307329177856, 0.3439483344554901], [0.49456095695495605, 0.3386748433113098], [0.49442994594573975, 0.3327368497848511], [0.4943135976791382, 0.32744699716567993], [0.4941810369491577, 0.32147639989852905], [0.494062602519989, 0.3161580562591553], [0.49392223358154297, 0.3101283311843872], [0.4937962293624878, 0.30471593141555786], [0.4936547875404358, 0.2985583245754242], [0.5, 0.4205237627029419], [0.5, 0.41621729731559753], [0.5, 0.411348432302475], [0.5, 0.40675634145736694], [0.5, 0.4016290009021759], [0.5, 0.3967132270336151], [0.5, 0.39159807562828064], [0.5, 0.38660475611686707], [0.5, 0.3814938962459564], [0.5, 0.37663522362709045], [0.5, 0.3712173402309418], [0.5, 0.36607545614242554], [0.5, 0.36057406663894653], [0.5, 0.355209618806839], [0.5, 0.34955310821533203], [0.5, 0.3441009819507599], [0.5, 0.33837297558784485], [0.5, 0.3329021632671356], [0.5, 0.32711946964263916], [0.5, 0.3216616213321686], [0.5, 0.3157995939254761], [0.5, 0.3103272020816803], [0.5, 0.30432990193367004], [0.5, 0.2987808585166931], [0.5035299062728882, 0.4206721782684326], [0.5036393404006958, 0.4161683917045593], [0.5037637948989868, 0.4115258455276489], [0.5039110779762268, 0.4067142605781555], [0.5040615797042847, 0.4018321633338928], [0.5041965842247009, 0.3966350555419922], [0.504308819770813, 0.39180731773376465], [0.5044838786125183, 0.3865658938884735], [0.504629373550415, 0.3817105293273926], [0.5046675801277161, 0.37645477056503296], [0.5047010779380798, 0.3714432120323181], [0.5048322677612305, 0.3659796118736267], [0.50496506690979, 0.36083489656448364], [0.5050920248031616, 0.35507476329803467], [0.5052019357681274, 0.3498309552669525], [0.5053269267082214, 0.3439483344554901], [0.505439043045044, 0.3386748433113098], [0.5055700540542603, 0.3327368497848511], [0.5056864023208618, 0.32744699716567993], [0.5058189630508423, 0.32147639989852905], [0.505937397480011, 0.3161580562591553], [0.506077766418457, 0.3101283311843872], [0.5062037706375122, 0.30471593141555786], [0.5063452124595642, 0.2985583245754242], [0.5070580244064331, 0.4207039773464203], [0.5072444677352905, 0.4164399206638336], [0.5075263977050781, 0.4116307497024536], [0.5078168511390686, 0.40705984830856323], [0.5081182718276978, 0.4019118845462799], [0.5083849430084229, 0.3969655930995941], [0.5086479187011719, 0.39185768365859985], [0.5088801383972168, 0.3869553804397583], [0.5091144442558289, 0.3817175030708313], [0.5092849135398865, 0.3766854405403137], [0.5094614028930664, 0.37143659591674805], [0.5096820592880249, 0.3663942515850067], [0.509943962097168, 0.3608500361442566], [0.5101747512817383, 0.35547077655792236], [0.5104159116744995, 0.34982573986053467], [0.5106480121612549, 0.3443792462348938], [0.510897159576416, 0.3386614918708801], [0.5111336708068848, 0.3332033157348633], [0.5113892555236816, 0.32742685079574585], [0.5116307139396667, 0.32197827100753784], [0.511896014213562, 0.316131591796875], [0.5121452808380127, 0.3106732964515686], [0.512425422668457, 0.3046826124191284], [0.5126816630363464, 0.2991379499435425], [0.5106799006462097, 0.4210454225540161], [0.5110179781913757, 0.4166373014450073], [0.5113722681999207, 0.4120767116546631], [0.5117510557174683, 0.4072888493537903], [0.512141227722168, 0.4023643732070923], [0.5125648379325867, 0.3971880376338959], [0.5129457116127014, 0.3923172950744629], [0.5132842063903809, 0.38708674907684326], [0.5135711431503296, 0.3821522891521454], [0.5138998627662659, 0.3767666220664978], [0.514194905757904, 0.3719065189361572], [0.5145477056503296, 0.36655962467193604], [0.5148903727531433, 0.36137568950653076], [0.5152617692947388, 0.3555925190448761], [0.5155894160270691, 0.3503763675689697], [0.5159710645675659, 0.3445125222206116], [0.5163136720657349, 0.3392527997493744], [0.5167017579078674, 0.3333333134651184], [0.5170471668243408, 0.3280636668205261], [0.5174490213394165, 0.32211506366729736], [0.5178065299987793, 0.3168187737464905], [0.5182194709777832, 0.3108145296573639], [0.5185895562171936, 0.30542027950286865], [0.5190184116363525, 0.2992761433124542], [0.5144162774085999, 0.42135894298553467], [0.514851987361908, 0.41716936230659485], [0.5153031945228577, 0.41235339641571045], [0.5157025456428528, 0.40782463550567627], [0.5161872506141663, 0.4026711881160736], [0.5166635513305664, 0.39778417348861694], [0.5171676874160767, 0.39260876178741455], [0.5176104307174683, 0.38760584592819214], [0.5180670022964478, 0.38241344690322876], [0.518513023853302, 0.37734776735305786], [0.5189629793167114, 0.37216413021087646], [0.5193848609924316, 0.3671993017196655], [0.5198659896850586, 0.3616456091403961], [0.5203287601470947, 0.35625600814819336], [0.5208166837692261, 0.3506474196910858], [0.521281898021698, 0.34522706270217896], [0.5217785835266113, 0.3395311236381531], [0.5222514867782593, 0.3340979814529419], [0.5227632522583008, 0.32835662364959717], [0.5232466459274292, 0.322940468788147], [0.5237747430801392, 0.31712374091148376], [0.5242666006088257, 0.31169408559799194], [0.5248165130615234, 0.3057386875152588], [0.525324285030365, 0.300213098526001], [0.5180962681770325, 0.42198923230171204], [0.5186273455619812, 0.41755515336990356], [0.5191495418548584, 0.41294124722480774], [0.5196807384490967, 0.40818798542022705], [0.5202124118804932, 0.4033118486404419], [0.5208001136779785, 0.3981984853744507], [0.5213590860366821, 0.39327922463417053], [0.521966278553009, 0.38799262046813965], [0.5225179195404053, 0.38312435150146484], [0.5231427550315857, 0.3777654767036438], [0.5237005949020386, 0.3729155957698822], [0.5242785811424255, 0.3675782084465027], [0.5248082280158997, 0.36241987347602844], [0.5254234075546265, 0.3566586375236511], [0.52597576379776, 0.35146844387054443], [0.5266025066375732, 0.34563755989074707], [0.5271660089492798, 0.3404121994972229], [0.5278186798095703, 0.3345370888710022], [0.5284010767936707, 0.3293067216873169], [0.5290685296058655, 0.32339733839035034], [0.5296604633331299, 0.31813913583755493], [0.5303435921669006, 0.3121703863143921], [0.5309524536132812, 0.30681896209716797], [0.5316667556762695, 0.3007162809371948], [0.5218017101287842, 0.4224843382835388], [0.5223802924156189, 0.41823893785476685], [0.5230370163917542, 0.4134192168712616], [0.5236362218856812, 0.40888920426368713], [0.5243197083473206, 0.4038068652153015], [0.5249610543251038, 0.39896512031555176], [0.525653600692749, 0.39378613233566284], [0.5263226628303528, 0.38881558179855347], [0.5270445346832275, 0.38367176055908203], [0.5277606248855591, 0.37868666648864746], [0.528526246547699, 0.3735048174858093], [0.5292021632194519, 0.3684442639350891], [0.5298689007759094, 0.3629087209701538], [0.5305063724517822, 0.35758543014526367], [0.5312161445617676, 0.3520090579986572], [0.5319054126739502, 0.34663158655166626], [0.5326492786407471, 0.3409930169582367], [0.53336501121521, 0.33561772108078003], [0.5341376066207886, 0.3299185633659363], [0.5348622798919678, 0.3245493471622467], [0.5356533527374268, 0.3187790513038635], [0.5363858938217163, 0.31339186429977417], [0.5372047424316406, 0.30748701095581055], [0.5379562377929688, 0.3020173907279968], [0.5254948139190674, 0.4232743978500366], [0.5262112617492676, 0.418811559677124], [0.5269159078598022, 0.4142228066921234], [0.5276490449905396, 0.4094621241092682], [0.5283700823783875, 0.40464580059051514], [0.5291352868080139, 0.39954131841659546], [0.5298648476600647, 0.3946740925312042], [0.530699610710144, 0.38947224617004395], [0.5314775705337524, 0.38465553522109985], [0.5323430895805359, 0.37939658761024475], [0.5331228971481323, 0.3745325207710266], [0.5339584350585938, 0.3690441846847534], [0.5347156524658203, 0.3638942539691925], [0.5355829000473022, 0.3582521378993988], [0.5363698601722717, 0.353111207485199], [0.5372588038444519, 0.34733372926712036], [0.5380483865737915, 0.34218376874923706], [0.5389608144760132, 0.3363717794418335], [0.5397730469703674, 0.33119189739227295], [0.5407071709632874, 0.3253359794616699], [0.5415313243865967, 0.32012563943862915], [0.5424809455871582, 0.31420737504959106], [0.5433247089385986, 0.30891236662864685], [0.544303297996521, 0.3028649687767029], [0.5292826890945435, 0.4239485263824463], [0.5300224423408508, 0.4196999669075012], [0.5308790802955627, 0.41490837931632996], [0.5316550731658936, 0.41037172079086304], [0.5325091481208801, 0.40532147884368896], [0.533321738243103, 0.4004965126514435], [0.5342175364494324, 0.39541855454444885], [0.5350919961929321, 0.3905637264251709], [0.5360416769981384, 0.38546285033226013], [0.536933422088623, 0.3805369734764099], [0.5378512740135193, 0.3753007650375366], [0.538729190826416, 0.3701690435409546], [0.5397316217422485, 0.364751398563385], [0.5407013893127441, 0.359510600566864], [0.5416783094406128, 0.3539430499076843], [0.5425902009010315, 0.3486362099647522], [0.5435827374458313, 0.3430789113044739], [0.5445335507392883, 0.33777520060539246], [0.5455594062805176, 0.33213359117507935], [0.5465171337127686, 0.32682162523269653], [0.5475612878799438, 0.32109957933425903], [0.5485219955444336, 0.3157655596733093], [0.5495918989181519, 0.3099246025085449], [0.55057692527771, 0.30451035499572754], [0.5330313444137573, 0.4248952269554138], [0.533884584903717, 0.42045754194259644], [0.5347410440444946, 0.41589224338531494], [0.5356423258781433, 0.4111095368862152], [0.5365296602249146, 0.40633535385131836], [0.5375165939331055, 0.40131038427352905], [0.5384559631347656, 0.39654281735420227], [0.5394759774208069, 0.3914383053779602], [0.540420651435852, 0.3866555094718933], [0.5414661169052124, 0.38141578435897827], [0.5424272418022156, 0.3765037953853607], [0.5435203909873962, 0.37109601497650146], [0.5445287227630615, 0.36610764265060425], [0.5456783175468445, 0.36047470569610596], [0.5467326045036316, 0.35532939434051514], [0.5479204654693604, 0.34966224431991577], [0.5489673614501953, 0.3445979356765747], [0.5501540899276733, 0.33884525299072266], [0.5511915683746338, 0.3337271213531494], [0.5523771047592163, 0.32792389392852783], [0.5534142255783081, 0.32276201248168945], [0.554607629776001, 0.3168973922729492], [0.5556608438491821, 0.3116706609725952], [0.5569046139717102, 0.305716872215271], [0.5369072556495667, 0.42572253942489624], [0.5377587676048279, 0.4215092062950134], [0.5387429594993591, 0.4167312979698181], [0.5396602749824524, 0.41219669580459595], [0.540712833404541, 0.40724149346351624], [0.5417288541793823, 0.4025349020957947], [0.5428436994552612, 0.3975212872028351], [0.5438886284828186, 0.3927217721939087], [0.544987678527832, 0.3876422047615051], [0.546028196811676, 0.3827419877052307], [0.5471756458282471, 0.37754303216934204], [0.5482803583145142, 0.37252819538116455], [0.5494732856750488, 0.3671978712081909], [0.5506500601768494, 0.36201733350753784], [0.551975429058075, 0.3565829396247864], [0.5532500147819519, 0.35136058926582336], [0.5545526146888733, 0.3458499014377594], [0.5557513236999512, 0.340575635433197], [0.5570259094238281, 0.3350062072277069], [0.5582114458084106, 0.32972097396850586], [0.5594732165336609, 0.32403701543807983], [0.5606249570846558, 0.3187524378299713], [0.5619173645973206, 0.3130131959915161], [0.5631195306777954, 0.30772286653518677], [0.5407319068908691, 0.4268324077129364], [0.5417070388793945, 0.42240697145462036], [0.5426870584487915, 0.41788381338119507], [0.5437567830085754, 0.413147896528244], [0.5448057651519775, 0.40849509835243225], [0.545956015586853, 0.40356552600860596], [0.5470513105392456, 0.39884746074676514], [0.5482585430145264, 0.3937848210334778], [0.5493832230567932, 0.3890169858932495], [0.5506225824356079, 0.3838490843772888], [0.5517698526382446, 0.37902572751045227], [0.5530604720115662, 0.3736920952796936], [0.5542489886283875, 0.3687692880630493], [0.5556240677833557, 0.36329296231269836], [0.5568768978118896, 0.3583133816719055], [0.5583236217498779, 0.35268616676330566], [0.5596237778663635, 0.34759193658828735], [0.5610922574996948, 0.341924786567688], [0.5623925924301147, 0.33685559034347534], [0.5638772249221802, 0.3311261236667633], [0.5651935338973999, 0.3259808421134949], [0.5666567087173462, 0.32017940282821655], [0.5679161548614502, 0.31508293747901917], [0.5693910717964172, 0.3092525899410248], [0.5446664690971375, 0.4278125464916229], [0.5456441640853882, 0.423624724149704], [0.5467890501022339, 0.4189281463623047], [0.5478706359863281, 0.414468914270401], [0.5490792393684387, 0.409601628780365], [0.5502164363861084, 0.40497082471847534], [0.5514684915542603, 0.4000129699707031], [0.5526649355888367, 0.3952862024307251], [0.5539811849594116, 0.39024853706359863], [0.5552240610122681, 0.38543570041656494], [0.5565688610076904, 0.38031262159347534], [0.5578551888465881, 0.37536656856536865], [0.5592486262321472, 0.37013256549835205], [0.5605818629264832, 0.3650834560394287], [0.5620167255401611, 0.35973167419433594], [0.5634026527404785, 0.3545692563056946], [0.5649328231811523, 0.34910666942596436], [0.5663900971412659, 0.34393975138664246], [0.5679643750190735, 0.3384424149990082], [0.5694425702095032, 0.3332756757736206], [0.5710890889167786, 0.3276745080947876], [0.572597086429596, 0.32240286469459534], [0.5741512775421143, 0.3167358338832855], [0.5755233764648438, 0.3115251958370209], [0.5486397743225098, 0.4291267991065979], [0.5497676134109497, 0.4247363209724426], [0.5508797764778137, 0.4203181266784668], [0.5521008968353271, 0.4156397581100464], [0.553286075592041, 0.41105929017066956], [0.554580807685852, 0.40619200468063354], [0.555807888507843, 0.4015626907348633], [0.5571697354316711, 0.39658433198928833], [0.5584456920623779, 0.3918892443180084], [0.5598641633987427, 0.38677704334259033], [0.5611710548400879, 0.38202399015426636], [0.5626578330993652, 0.3767794668674469], [0.5640174150466919, 0.37194329500198364], [0.5655616521835327, 0.36655229330062866], [0.5669631958007812, 0.3616417348384857], [0.5685852766036987, 0.35614609718322754], [0.5700526833534241, 0.35117337107658386], [0.5717328786849976, 0.34561043977737427], [0.5732429027557373, 0.3406144380569458], [0.574966549873352, 0.3350263833999634], [0.5765195488929749, 0.32997050881385803], [0.5783418416976929, 0.32422053813934326], [0.5799459218978882, 0.3190808892250061], [0.5816939473152161, 0.3133143186569214], [0.5527812242507935, 0.43032339215278625], [0.5538808703422546, 0.4261903762817383], [0.5551561117172241, 0.42157137393951416], [0.5563515424728394, 0.4171810448169708], [0.5577008724212646, 0.41237521171569824], [0.5589698553085327, 0.40782445669174194], [0.5603739619255066, 0.40296071767807007], [0.5617051124572754, 0.39832156896591187], [0.5631668567657471, 0.3933534026145935], [0.5645484924316406, 0.38859620690345764], [0.5660501718521118, 0.3835518956184387], [0.5674819350242615, 0.37870192527770996], [0.5690446496009827, 0.37353700399398804], [0.5705416798591614, 0.36857396364212036], [0.5721667408943176, 0.36332279443740845], [0.5737144947052002, 0.35829126834869385], [0.5754056572914124, 0.3529440760612488], [0.5770081281661987, 0.34787997603416443], [0.5787612795829773, 0.3424805998802185], [0.5803989768028259, 0.3374112844467163], [0.5822096467018127, 0.33191752433776855], [0.5839282870292664, 0.32679492235183716], [0.5859025716781616, 0.32118678092956543], [0.5876937508583069, 0.31595057249069214], [0.5569285750389099, 0.43188461661338806], [0.5582013130187988, 0.42751288414001465], [0.5594171285629272, 0.4231855571269989], [0.5607643127441406, 0.41854918003082275], [0.5620465874671936, 0.4140513837337494], [0.563464879989624, 0.40925443172454834], [0.5647953748703003, 0.40471673011779785], [0.5662835240364075, 0.39981573820114136], [0.5676776170730591, 0.3951798975467682], [0.5692355036735535, 0.39013946056365967], [0.5706708431243896, 0.38546422123908997], [0.572292685508728, 0.3803042769432068], [0.5737830400466919, 0.3755452036857605], [0.5754971504211426, 0.37028229236602783], [0.5770638585090637, 0.3654508590698242], [0.5788379907608032, 0.36006665229797363], [0.5804393887519836, 0.3551945388317108], [0.5822843313217163, 0.3497665226459503], [0.5839483141899109, 0.3448628783226013], [0.5858474969863892, 0.3393808603286743], [0.5875639915466309, 0.33444133400917053], [0.5895354151725769, 0.3289058804512024], [0.591329038143158, 0.32388055324554443], [0.593464195728302, 0.31814223527908325], [0.5612775683403015, 0.4333520531654358], [0.5625292062759399, 0.4292065501213074], [0.5639182329177856, 0.4246559143066406], [0.5652064085006714, 0.4202979803085327], [0.5666582584381104, 0.4155857264995575], [0.5680209994316101, 0.41109251976013184], [0.5695043206214905, 0.4062929153442383], [0.5708997249603271, 0.40173426270484924], [0.5724663734436035, 0.396828830242157], [0.573951244354248, 0.3921487331390381], [0.5755701065063477, 0.3871755599975586], [0.577114999294281, 0.38240522146224976], [0.5788115859031677, 0.37735122442245483], [0.5804300308227539, 0.3725194036960602], [0.5822145342826843, 0.36733612418174744], [0.5839048624038696, 0.36239635944366455], [0.585735023021698, 0.3571639657020569], [0.5874595046043396, 0.3522298336029053], [0.5893720388412476, 0.34694191813468933], [0.5911710262298584, 0.3419853448867798], [0.5931558609008789, 0.3366278409957886], [0.5949909687042236, 0.3316313624382019], [0.5970419049263, 0.3261515498161316], [0.5989870429039001, 0.3210906982421875], [0.5655068755149841, 0.43512919545173645], [0.5669300556182861, 0.4307337999343872], [0.5682604908943176, 0.4264284372329712], [0.5697112083435059, 0.4218292832374573], [0.57106614112854, 0.417421817779541], [0.5725799798965454, 0.4126662313938141], [0.573989987373352, 0.40818288922309875], [0.5755530595779419, 0.4033718407154083], [0.5770201086997986, 0.3988172113895416], [0.5786772966384888, 0.3938538432121277], [0.5802042484283447, 0.3892463743686676], [0.5819441080093384, 0.38420408964157104], [0.5835439562797546, 0.3795393705368042], [0.5853575468063354, 0.3743894100189209], [0.5870287418365479, 0.369625449180603], [0.5889485478401184, 0.3643694519996643], [0.5906961560249329, 0.359585165977478], [0.5926611423492432, 0.3542829751968384], [0.5944302678108215, 0.3494988679885864], [0.59647536277771, 0.34415483474731445], [0.5983290672302246, 0.3393019139766693], [0.6004432439804077, 0.3338719606399536], [0.602357029914856, 0.328968346118927], [0.6045500636100769, 0.32349804043769836], [0.5700309872627258, 0.43679380416870117], [0.5713706016540527, 0.43261605501174927], [0.5728660225868225, 0.42807912826538086], [0.5742476582527161, 0.4237361252307892], [0.5757627487182617, 0.41909289360046387], [0.5771876573562622, 0.4146456718444824], [0.5787633061408997, 0.4099189043045044], [0.5802398920059204, 0.405434250831604], [0.5818775296211243, 0.40062078833580017], [0.5834327936172485, 0.3960185647010803], [0.5851486325263977, 0.39112982153892517], [0.5867644548416138, 0.3864665925502777], [0.5885441899299622, 0.381484717130661], [0.5902334451675415, 0.37673962116241455], [0.5921102166175842, 0.3716656565666199], [0.5938947200775146, 0.3668721616268158], [0.5958788394927979, 0.3617447018623352], [0.5977492928504944, 0.356894314289093], [0.599764347076416, 0.3517358899116516], [0.6016460657119751, 0.3468944728374481], [0.6037532091140747, 0.3416316509246826], [0.6057173013687134, 0.33673614263534546], [0.607906699180603, 0.3314109146595001], [0.6099175214767456, 0.3264698386192322], [0.5743212699890137, 0.43867602944374084], [0.5758129358291626, 0.4342436194419861], [0.5771929621696472, 0.42995890974998474], [0.5787301063537598, 0.42538052797317505], [0.5801612138748169, 0.42101502418518066], [0.5817474126815796, 0.4163373112678528], [0.5832222700119019, 0.41192516684532166], [0.5848636627197266, 0.4071793854236603], [0.5863933563232422, 0.40270012617111206], [0.5881258249282837, 0.39785659313201904], [0.5897363424301147, 0.3933109641075134], [0.5915340185165405, 0.3883432149887085], [0.5931872129440308, 0.3837546110153198], [0.5950872898101807, 0.37873560190200806], [0.5968494415283203, 0.3740807771682739], [0.598839521408081, 0.3689681589603424], [0.60066157579422, 0.3642820417881012], [0.6027616858482361, 0.35908931493759155], [0.6046639084815979, 0.35437583923339844], [0.6068000793457031, 0.3491666316986084], [0.6087495684623718, 0.3444095551967621], [0.6109774112701416, 0.3391181528568268], [0.6129977703094482, 0.33431845903396606], [0.6153112649917603, 0.3289209008216858], [0.5788888931274414, 0.440427303314209], [0.5802860260009766, 0.4361942410469055], [0.5818203687667847, 0.431701123714447], [0.5832505226135254, 0.4273832440376282], [0.5848307609558105, 0.4227876663208008], [0.5863142013549805, 0.41840922832489014], [0.5879573225975037, 0.4137696623802185], [0.5895050764083862, 0.40933412313461304], [0.5912066102027893, 0.40460020303726196], [0.5928009152412415, 0.400107204914093], [0.5945817232131958, 0.3952672481536865], [0.5962648987770081, 0.39066463708877563], [0.5981131196022034, 0.3858112394809723], [0.5998703837394714, 0.3812105059623718], [0.6018425226211548, 0.3762412667274475], [0.6036970615386963, 0.3715507984161377], [0.6057313680648804, 0.36651867628097534], [0.6076421141624451, 0.3617933988571167], [0.6097788214683533, 0.3567075729370117], [0.6117687225341797, 0.35197877883911133], [0.6139820218086243, 0.3468436002731323], [0.6160387992858887, 0.3420693576335907], [0.6183359622955322, 0.336838960647583], [0.6204667687416077, 0.332000732421875], [0.5830476880073547, 0.44228237867355347], [0.5845928192138672, 0.43785709142684937], [0.5860366225242615, 0.43359947204589844], [0.5876249074935913, 0.4290674328804016], [0.5891057848930359, 0.4247550964355469], [0.5907577872276306, 0.42016977071762085], [0.5922965407371521, 0.41583335399627686], [0.5940234661102295, 0.41116511821746826], [0.5956339836120605, 0.40674787759780884], [0.5974137187004089, 0.4019893705844879], [0.5990722179412842, 0.3975095748901367], [0.600957453250885, 0.3926575183868408], [0.6026865839958191, 0.3881847560405731], [0.6046364903450012, 0.3832995593547821], [0.6064562797546387, 0.37873417139053345], [0.6085278987884521, 0.37373751401901245], [0.6104393005371094, 0.36913183331489563], [0.6125916242599487, 0.36409157514572144], [0.6145658493041992, 0.35948002338409424], [0.6168121099472046, 0.3543926477432251], [0.6188632845878601, 0.349748432636261], [0.6211971640586853, 0.344583123922348], [0.623325765132904, 0.3398914933204651], [0.625766932964325, 0.3346553444862366], [0.5874894261360168, 0.44400548934936523], [0.5889011025428772, 0.43982458114624023], [0.5904936790466309, 0.4353637099266052], [0.5919877886772156, 0.43109434843063354], [0.5936396718025208, 0.42659494280815125], [0.5951899290084839, 0.42229753732681274], [0.5968966484069824, 0.4177336096763611], [0.5985015034675598, 0.41338208317756653], [0.600297212600708, 0.40872445702552795], [0.6019823551177979, 0.4042847752571106], [0.6038335561752319, 0.39956796169281006], [0.6055821180343628, 0.39508718252182007], [0.6075065732002258, 0.39032286405563354], [0.6093090772628784, 0.3858179450035095], [0.6113201379776001, 0.3809559941291809], [0.6132200956344604, 0.37638959288597107], [0.6153574585914612, 0.37147825956344604], [0.6173635721206665, 0.3668895363807678], [0.6195888519287109, 0.36194002628326416], [0.6216598749160767, 0.3573310077190399], [0.6239750385284424, 0.3523105978965759], [0.6261270642280579, 0.34766095876693726], [0.6285432577133179, 0.3425866961479187], [0.6307879090309143, 0.33790868520736694], [0.5915905237197876, 0.44585710763931274], [0.5931313037872314, 0.44149404764175415], [0.5945866107940674, 0.43728065490722656], [0.5962210893630981, 0.4328312277793884], [0.5977546572685242, 0.4285944104194641], [0.5994613170623779, 0.4240965247154236], [0.6010656356811523, 0.41981106996536255], [0.6028433442115784, 0.4152553081512451], [0.604518711566925, 0.410902202129364], [0.606395423412323, 0.40623602271080017], [0.6081446409225464, 0.401857852935791], [0.6100991368293762, 0.3971434235572815], [0.6119122505187988, 0.39273685216903687], [0.6139430403709412, 0.38796958327293396], [0.6158357262611389, 0.3835170269012451], [0.6179679036140442, 0.37868669629096985], [0.6199471950531006, 0.3742125332355499], [0.6221930384635925, 0.3693189322948456], [0.6242636442184448, 0.3648207485675812], [0.6265990734100342, 0.359866201877594], [0.628738284111023, 0.3553374409675598], [0.6311726570129395, 0.35032951831817627], [0.6333944797515869, 0.34578412771224976], [0.6359397768974304, 0.34072819352149963], [0.5958878397941589, 0.44756019115448], [0.5973144769668579, 0.4434243440628052], [0.5989144444465637, 0.4390560984611511], [0.6004246473312378, 0.43486812710762024], [0.6021082997322083, 0.4304444193840027], [0.6036942005157471, 0.42623063921928406], [0.6054672002792358, 0.421739786863327], [0.6071247458457947, 0.417481005191803], [0.608982264995575, 0.41290736198425293], [0.6107276678085327, 0.40857917070388794], [0.6126722097396851, 0.40396827459335327], [0.6145049929618835, 0.3996153771877289], [0.6165426969528198, 0.3949539065361023], [0.6184433102607727, 0.39057236909866333], [0.6205657124519348, 0.38585513830184937], [0.6225491166114807, 0.3814396262168884], [0.6247624754905701, 0.37667036056518555], [0.6268300414085388, 0.3722296953201294], [0.629155695438385, 0.3674032688140869], [0.631320595741272, 0.3629089593887329], [0.6337298154830933, 0.3580412268638611], [0.6359688639640808, 0.35353684425354004], [0.6384814977645874, 0.34862470626831055], [0.6408063173294067, 0.34410667419433594], [0.5998958945274353, 0.4494101107120514], [0.601463794708252, 0.4451161026954651], [0.6029387712478638, 0.4409973621368408], [0.6045967936515808, 0.43664371967315674], [0.6061592698097229, 0.43248945474624634], [0.6079078316688538, 0.42810118198394775], [0.6095670461654663, 0.42389750480651855], [0.6114081740379333, 0.41943830251693726], [0.6131492853164673, 0.4151783883571625], [0.6150887608528137, 0.41064125299453735], [0.6169041991233826, 0.4063759446144104], [0.6189430356025696, 0.40180522203445435], [0.6208542585372925, 0.3974955677986145], [0.6229768991470337, 0.3928564190864563], [0.6249494552612305, 0.3885318636894226], [0.6271655559539795, 0.3838370442390442], [0.6292200684547424, 0.37948891520500183], [0.631533145904541, 0.374763548374176], [0.6336827278137207, 0.3703767955303192], [0.6361151933670044, 0.36556363105773926], [0.6383410692214966, 0.3611702620983124], [0.6408642530441284, 0.3563287854194641], [0.643171489238739, 0.3519250154495239], [0.6457971930503845, 0.34703582525253296], [0.6041008234024048, 0.4511319696903229], [0.605553388595581, 0.4470764994621277], [0.6071768999099731, 0.44279760122299194], [0.6087063550949097, 0.4387129843235016], [0.6104292273521423, 0.43438971042633057], [0.6120461225509644, 0.4302927255630493], [0.6138784885406494, 0.4259040057659149], [0.6155984997749329, 0.4217451214790344], [0.61753249168396, 0.41729211807250977], [0.6193464398384094, 0.4130789637565613], [0.6213580965995789, 0.408601313829422], [0.6232467889785767, 0.40438610315322876], [0.625375509262085, 0.3998293876647949], [0.6273564100265503, 0.39555656909942627], [0.629554033279419, 0.3909762501716614], [0.631611704826355, 0.38667672872543335], [0.6339016556739807, 0.3820575475692749], [0.6360363364219666, 0.3777517080307007], [0.6384344696998596, 0.3730619251728058], [0.6406731009483337, 0.3687018156051636], [0.6431719064712524, 0.36399316787719727], [0.645495593547821, 0.35963335633277893], [0.648093581199646, 0.3548809587955475], [0.6504994630813599, 0.35049790143966675], [0.6080873012542725, 0.45303547382354736], [0.6096738576889038, 0.44881194829940796], [0.6111629605293274, 0.44479212164878845], [0.6128488779067993, 0.44056379795074463], [0.6144466400146484, 0.4365106523036957], [0.6162276864051819, 0.4322468638420105], [0.6179226040840149, 0.42814311385154724], [0.6198165416717529, 0.4238032400608063], [0.6216137409210205, 0.41965097188949585], [0.6236155033111572, 0.4152340292930603], [0.6254857182502747, 0.4110841453075409], [0.627569854259491, 0.4066507816314697], [0.6295415163040161, 0.4024354815483093], [0.6317394375801086, 0.39792221784591675], [0.6337831020355225, 0.3937108516693115], [0.6360751390457153, 0.38916683197021484], [0.6382017135620117, 0.3849419355392456], [0.6405811905860901, 0.38035649061203003], [0.6427955031394958, 0.37609434127807617], [0.6452957391738892, 0.3714478015899658], [0.6475939750671387, 0.3671893775463104], [0.6501924991607666, 0.362504243850708], [0.6525745391845703, 0.35822999477386475], [0.6552792191505432, 0.3535032570362091], [0.6122616529464722, 0.45478421449661255], [0.6137200593948364, 0.450810045003891], [0.6153620481491089, 0.4466540813446045], [0.6169056296348572, 0.44269680976867676], [0.6186643838882446, 0.43849021196365356], [0.6203120946884155, 0.43450045585632324], [0.6221784949302673, 0.43022704124450684], [0.6239281892776489, 0.4261818528175354], [0.6259060502052307, 0.4218445420265198], [0.627767026424408, 0.417738676071167], [0.629827618598938, 0.4133768379688263], [0.631750226020813, 0.4092681109905243], [0.6339222192764282, 0.4048393964767456], [0.6359621286392212, 0.40068185329437256], [0.6382322311401367, 0.39622950553894043], [0.6403461694717407, 0.39206168055534363], [0.6427091956138611, 0.38758355379104614], [0.6449177861213684, 0.3833991289138794], [0.6473861336708069, 0.37886226177215576], [0.649682879447937, 0.37464576959609985], [0.6522457599639893, 0.37008199095726013], [0.6546303629875183, 0.36585286259651184], [0.6572951674461365, 0.3612479567527771], [0.6597646474838257, 0.3570122718811035], [0.6161993741989136, 0.4566993713378906], [0.6178001761436462, 0.4526224136352539], [0.61931973695755, 0.4487072825431824], [0.6210100054740906, 0.4445984661579132], [0.6226072907447815, 0.4406532645225525], [0.6244080066680908, 0.4364975094795227], [0.6261210441589355, 0.432494193315506], [0.6280332803726196, 0.4282744824886322], [0.6298536062240601, 0.42423269152641296], [0.6318981051445007, 0.4199540615081787], [0.6338286399841309, 0.41589218378067017], [0.6359555721282959, 0.41157639026641846], [0.6379576325416565, 0.4074894189834595], [0.640205442905426, 0.40312471985816956], [0.6423147916793823, 0.399016797542572], [0.6446549296379089, 0.39460518956184387], [0.6468257308006287, 0.3905066251754761], [0.6492716073989868, 0.3860660791397095], [0.6515558362007141, 0.38192808628082275], [0.6541090607643127, 0.3774338960647583], [0.6564651727676392, 0.37329941987991333], [0.6591247320175171, 0.3687741160392761], [0.6615736484527588, 0.36462700366973877], [0.6643332839012146, 0.3600614666938782], [0.6203438639640808, 0.4584786295890808], [0.621772825717926, 0.4546578526496887], [0.6234322190284729, 0.4506263732910156], [0.6250082850456238, 0.4467516243457794], [0.6267624497413635, 0.44265592098236084], [0.6283960342407227, 0.4387498199939728], [0.6302555203437805, 0.43458858132362366], [0.6320098638534546, 0.4306548833847046], [0.6340029835700989, 0.4264470338821411], [0.6358786821365356, 0.4224861264228821], [0.6380110383033752, 0.4182426929473877], [0.6400073766708374, 0.4142296612262726], [0.6422141194343567, 0.40994518995285034], [0.6442756652832031, 0.4059164226055145], [0.6465975642204285, 0.40158823132514954], [0.6487714648246765, 0.3975279629230499], [0.6511696577072144, 0.39318543672561646], [0.6534094214439392, 0.3891269266605377], [0.6559327244758606, 0.3847368359565735], [0.658294141292572, 0.38064801692962646], [0.6609153747558594, 0.3762371242046356], [0.6633557081222534, 0.37214359641075134], [0.6660831570625305, 0.3676977753639221], [0.6686196327209473, 0.3635859191417694], [0.6241896748542786, 0.4603702425956726], [0.6257397532463074, 0.4564416706562042], [0.6272164583206177, 0.45265185832977295], [0.6289277076721191, 0.4486669898033142], [0.6305752992630005, 0.4447857141494751], [0.6323766708374023, 0.44073569774627686], [0.6340867280960083, 0.4368598163127899], [0.6360120177268982, 0.43278101086616516], [0.6378490924835205, 0.42886048555374146], [0.6398947238922119, 0.424738347530365], [0.641842782497406, 0.4207780361175537], [0.6440234780311584, 0.4165714383125305], [0.6460782289505005, 0.4125869870185852], [0.648341953754425, 0.4083567261695862], [0.6504722833633423, 0.4043644070625305], [0.6528664231300354, 0.400097131729126], [0.6551041603088379, 0.3961024284362793], [0.6575807332992554, 0.3918117880821228], [0.6598944664001465, 0.3878079056739807], [0.6625031232833862, 0.38347136974334717], [0.6649258732795715, 0.3794569969177246], [0.6676326394081116, 0.37508636713027954], [0.6701311469078064, 0.3710692524909973], [0.672953188419342, 0.36665740609169006], [0.6281777620315552, 0.46213746070861816], [0.6295995712280273, 0.45841461420059204], [0.6311969757080078, 0.45451635122299194], [0.6327162981033325, 0.4507932662963867], [0.6345158815383911, 0.4467920660972595], [0.6362149715423584, 0.44298672676086426], [0.638097882270813, 0.43898868560791016], [0.639868438243866, 0.4351717233657837], [0.6418737769126892, 0.4311111569404602], [0.6437585353851318, 0.4272567629814148], [0.6458808183670044, 0.42312300205230713], [0.6478784680366516, 0.4192110002040863], [0.6501225829124451, 0.4150417447090149], [0.6522268652915955, 0.41111475229263306], [0.6545617580413818, 0.40693849325180054], [0.6567593812942505, 0.4030150771141052], [0.6592268347740173, 0.3988049030303955], [0.661530077457428, 0.39486628770828247], [0.6640818119049072, 0.3906360864639282], [0.6664725542068481, 0.386685848236084], [0.6691515445709229, 0.3824153542518616], [0.671650767326355, 0.37844419479370117], [0.6744182109832764, 0.374148428440094], [0.6769987940788269, 0.37017107009887695], [0.6318651437759399, 0.4640020728111267], [0.6334242820739746, 0.46018505096435547], [0.6349092721939087, 0.4565122127532959], [0.6365664005279541, 0.45269525051116943], [0.638180136680603, 0.44894203543663025], [0.6400184631347656, 0.44501984119415283], [0.6417698860168457, 0.44124287366867065], [0.6436985731124878, 0.43728697299957275], [0.6455328464508057, 0.4334770441055298], [0.6475823521614075, 0.42947036027908325], [0.6495466232299805, 0.425607830286026], [0.651729166507721, 0.4215444326400757], [0.6538037061691284, 0.417672723531723], [0.6561135053634644, 0.41358065605163574], [0.6582889556884766, 0.40971818566322327], [0.6606931686401367, 0.4055958688259125], [0.6629531383514404, 0.4017147123813629], [0.6654787063598633, 0.39755499362945557], [0.6678380370140076, 0.39367517828941345], [0.6704649925231934, 0.3895009756088257], [0.6729281544685364, 0.3856040835380554], [0.6756834983825684, 0.3813791871070862], [0.6782398819923401, 0.37748563289642334], [0.6810967922210693, 0.3732404112815857], [0.6357250809669495, 0.4657391309738159], [0.6371244788169861, 0.4621541500091553], [0.6387414336204529, 0.45838063955307007], [0.6402596831321716, 0.4547805190086365], [0.6420136094093323, 0.4509442448616028], [0.6436841487884521, 0.4472653567790985], [0.6455888152122498, 0.4433487057685852], [0.647372305393219, 0.4396206736564636], [0.6493637561798096, 0.43567055463790894], [0.6512393951416016, 0.4319283366203308], [0.653377890586853, 0.42793235182762146], [0.6553975939750671, 0.4241555333137512], [0.657669723033905, 0.4201393127441406], [0.6598079800605774, 0.4163540303707123], [0.662194013595581, 0.41230201721191406], [0.6644227504730225, 0.40847986936569214], [0.6668902039527893, 0.4044051170349121], [0.6692124605178833, 0.40058207511901855], [0.6718001961708069, 0.39649176597595215], [0.6742222309112549, 0.3926713764667511], [0.6769262552261353, 0.38853752613067627], [0.6794552803039551, 0.38469740748405457], [0.6822735071182251, 0.3805501461029053], [0.6849029660224915, 0.3767104744911194], [0.6392456293106079, 0.4675447940826416], [0.6407747864723206, 0.4639061689376831], [0.6422588229179382, 0.46034467220306396], [0.6439215540885925, 0.456645667552948], [0.6455217599868774, 0.45304030179977417], [0.6473356485366821, 0.44926148653030396], [0.6490880250930786, 0.44556885957717896], [0.6510404348373413, 0.4417269825935364], [0.6529061794281006, 0.43802833557128906], [0.6549615859985352, 0.43416231870651245], [0.6569259166717529, 0.4304390549659729], [0.6591202020645142, 0.42651742696762085], [0.6612046957015991, 0.42278072237968445], [0.6635273098945618, 0.4188319742679596], [0.6657369136810303, 0.4150548577308655], [0.668175220489502, 0.41104745864868164], [0.6704597473144531, 0.4072904586791992], [0.672999918460846, 0.4032847285270691], [0.6753873229026794, 0.3995260000228882], [0.6780409812927246, 0.39549124240875244], [0.6805357933044434, 0.391715943813324], [0.68331378698349, 0.3876510262489319], [0.6859058141708374, 0.38388437032699585], [0.6888006925582886, 0.37978917360305786], [0.6428704261779785, 0.4692364037036896], [0.6442604064941406, 0.46582651138305664], [0.6458935737609863, 0.4621996581554413], [0.6474372148513794, 0.4587007761001587], [0.6491789817810059, 0.4549962282180786], [0.6508268713951111, 0.45144468545913696], [0.6527354717254639, 0.4476631283760071], [0.6545423269271851, 0.44406992197036743], [0.6565868854522705, 0.44025856256484985], [0.6585021615028381, 0.4366268515586853], [0.6606369614601135, 0.4327758848667145], [0.662654459476471, 0.42912518978118896], [0.6649246215820312, 0.4252488911151886], [0.66706383228302, 0.4215697944164276], [0.6694657206535339, 0.41762417554855347], [0.671722948551178, 0.4139080047607422], [0.6742143034934998, 0.40997958183288574], [0.6765573620796204, 0.40629470348358154], [0.6791712045669556, 0.40233951807022095], [0.6816200017929077, 0.3986397981643677], [0.6843494176864624, 0.3946607708930969], [0.6869089007377625, 0.3909595310688019], [0.6897575855255127, 0.3869571387767792], [0.6924182176589966, 0.3832402229309082], [0.646272599697113, 0.47105103731155396], [0.6477842330932617, 0.4675871729850769], [0.6492563486099243, 0.4641474485397339], [0.6509296894073486, 0.4605606198310852], [0.652546763420105, 0.45705968141555786], [0.6543524265289307, 0.45344266295433044], [0.6561062932014465, 0.4498904347419739], [0.6580623984336853, 0.4461958408355713], [0.6599428653717041, 0.4426020383834839], [0.6620303392410278, 0.4388458728790283], [0.6640166640281677, 0.4352424740791321], [0.6662119626998901, 0.43148189783096313], [0.6683114171028137, 0.4278550148010254], [0.6706357002258301, 0.4240272641181946], [0.6728578209877014, 0.4203535318374634], [0.6753122210502625, 0.4164853096008301], [0.6776201128959656, 0.41285645961761475], [0.6801761388778687, 0.40900248289108276], [0.6825953125953674, 0.40535640716552734], [0.6852692365646362, 0.4014585316181183], [0.6877799034118652, 0.39782094955444336], [0.690576434135437, 0.39391449093818665], [0.6932086944580078, 0.3902634084224701], [0.6961299180984497, 0.3863234519958496], [0.6498016715049744, 0.47276097536087036], [0.6511598229408264, 0.4694775640964508], [0.6527517437934875, 0.46595990657806396], [0.6542655825614929, 0.4625823199748993], [0.6560245752334595, 0.4590177536010742], [0.6576888561248779, 0.4556160271167755], [0.6596028208732605, 0.45198890566825867], [0.661413848400116, 0.44851192831993103], [0.6634612083435059, 0.4448140263557434], [0.6653899550437927, 0.44128990173339844], [0.6675496697425842, 0.43758225440979004], [0.6695840954780579, 0.43406790494918823], [0.671870231628418, 0.4302942752838135], [0.674011766910553, 0.4267176687717438], [0.6764161586761475, 0.4229118227958679], [0.6786891222000122, 0.419333815574646], [0.6812047958374023, 0.4155482053756714], [0.6835655570030212, 0.4119887351989746], [0.6862025856971741, 0.40816664695739746], [0.6886814832687378, 0.40458863973617554], [0.6914222836494446, 0.40075618028640747], [0.6939865350723267, 0.39718925952911377], [0.6968640089035034, 0.3933296799659729], [0.6995561718940735, 0.3897569477558136], [0.6530575156211853, 0.4745553731918335], [0.6545678973197937, 0.47120606899261475], [0.656029462814331, 0.4679020643234253], [0.6576956510543823, 0.46447890996932983], [0.6593029499053955, 0.4611246585845947], [0.6611077785491943, 0.45763787627220154], [0.6628561019897461, 0.45421624183654785], [0.6648210287094116, 0.4506534934043884], [0.6667213439941406, 0.4471641778945923], [0.6688135266304016, 0.44353580474853516], [0.6707998514175415, 0.44006019830703735], [0.6730022430419922, 0.43641963601112366], [0.6751251220703125, 0.43288177251815796], [0.6774661540985107, 0.4291924834251404], [0.6796993017196655, 0.42566490173339844], [0.6821637749671936, 0.42195528745651245], [0.6844963431358337, 0.4184415340423584], [0.6870642304420471, 0.4147094488143921], [0.689488410949707, 0.41118544340133667], [0.6921845078468323, 0.40744054317474365], [0.6947313547134399, 0.40391969680786133], [0.6975295543670654, 0.4001595973968506], [0.7001673579216003, 0.39664024114608765], [0.7031034827232361, 0.3928649425506592], [0.6563974618911743, 0.47627341747283936], [0.6578059792518616, 0.47310274839401245], [0.6594249606132507, 0.4697689414024353], [0.6609580516815186, 0.4665377140045166], [0.6627144813537598, 0.46311789751052856], [0.6643694043159485, 0.4598148465156555], [0.6662551760673523, 0.45631712675094604], [0.6680448651313782, 0.45296287536621094], [0.6701076030731201, 0.44939684867858887], [0.67206209897995, 0.44597992300987244], [0.6742138862609863, 0.4423915147781372], [0.6762374639511108, 0.4389716386795044], [0.6785353422164917, 0.43534988164901733], [0.6807172894477844, 0.43191856145858765], [0.6831518411636353, 0.42827287316322327], [0.685436487197876, 0.4248211979866028], [0.6879626512527466, 0.4211519956588745], [0.6903420090675354, 0.41768679022789], [0.6929712891578674, 0.4140143394470215], [0.6954491138458252, 0.4105684161186218], [0.6982149481773376, 0.406868040561676], [0.7008076906204224, 0.40342265367507935], [0.703680694103241, 0.3997231125831604], [0.7063758969306946, 0.39629170298576355], [0.6595171093940735, 0.4780893921852112], [0.6610500812530518, 0.4748716354370117], [0.6625267267227173, 0.47170478105545044], [0.6642016172409058, 0.46842673420906067], [0.6658262014389038, 0.4651801586151123], [0.6676409244537354, 0.4618188738822937], [0.6693899631500244, 0.45851725339889526], [0.6713379621505737, 0.45510417222976685], [0.67322838306427, 0.45174509286880493], [0.6753462553024292, 0.44824421405792236], [0.677370548248291, 0.4448598623275757], [0.6795758605003357, 0.4413522779941559], [0.6816823482513428, 0.4379715919494629], [0.6840285658836365, 0.4344269633293152], [0.6862818598747253, 0.43100500106811523], [0.6887506246566772, 0.4274120330810547], [0.6910938024520874, 0.4239972233772278], [0.6936774253845215, 0.42040932178497314], [0.6961252093315125, 0.4170146584510803], [0.6988149285316467, 0.41341912746429443], [0.7013704180717468, 0.4100182056427002], [0.7041893601417542, 0.4063962697982788], [0.7068407535552979, 0.4030165672302246], [0.7097703218460083, 0.3993896543979645], [0.6627663969993591, 0.4798094630241394], [0.6641422510147095, 0.47675877809524536], [0.6657495498657227, 0.473563015460968], [0.6672853827476501, 0.47046300768852234], [0.6690630912780762, 0.4671661853790283], [0.6707425117492676, 0.46398410201072693], [0.6726517677307129, 0.460632860660553], [0.674453616142273, 0.45741015672683716], [0.6765046715736389, 0.45398253202438354], [0.6784521341323853, 0.4506855607032776], [0.680645227432251, 0.4472331404685974], [0.6827183961868286, 0.44392329454421997], [0.684993326663971, 0.44044432044029236], [0.6871469020843506, 0.4371204078197479], [0.6895725727081299, 0.43358445167541504], [0.6918697357177734, 0.43022990226745605], [0.6944003701210022, 0.426695853471756], [0.6967853307723999, 0.42337697744369507], [0.6994429230690002, 0.4198376536369324], [0.701935350894928, 0.4165192246437073], [0.7047052383422852, 0.4129660725593567], [0.7073123455047607, 0.40964269638061523], [0.7101923227310181, 0.40608417987823486], [0.7128922343254089, 0.40277907252311707], [0.6657682061195374, 0.4815901517868042], [0.6672790050506592, 0.47852033376693726], [0.6687341332435608, 0.47549861669540405], [0.6704041957855225, 0.4723714590072632], [0.6720443964004517, 0.4692316949367523], [0.6738691329956055, 0.4660002887248993], [0.6756280064582825, 0.4628182053565979], [0.6775827407836914, 0.4595394730567932], [0.6794831156730652, 0.45628678798675537], [0.681582510471344, 0.4529260993003845], [0.6835960149765015, 0.4496609568595886], [0.6858341693878174, 0.44627511501312256], [0.6879826784133911, 0.4429996907711029], [0.6903194189071655, 0.43959665298461914], [0.6925734281539917, 0.43629300594329834], [0.6950571537017822, 0.432833194732666], [0.6974152326583862, 0.42954808473587036], [0.6999901533126831, 0.4261111617088318], [0.7024527192115784, 0.42282605171203613], [0.7051509022712708, 0.4193626642227173], [0.7077125310897827, 0.41609081625938416], [0.7105380296707153, 0.41261225938796997], [0.7132074236869812, 0.40935125946998596], [0.7161394357681274, 0.4058741331100464], [0.6688699722290039, 0.48332393169403076], [0.6702627539634705, 0.4804011583328247], [0.6718486547470093, 0.4773567020893097], [0.6733556985855103, 0.47438788414001465], [0.6751328706741333, 0.4712212383747101], [0.6768253445625305, 0.4681452810764313], [0.6787360906600952, 0.46492427587509155], [0.6805416345596313, 0.46180206537246704], [0.68259197473526, 0.45850032567977905], [0.6845406293869019, 0.455313116312027], [0.6867093443870544, 0.451985627412796], [0.688767671585083, 0.4488033056259155], [0.6910864114761353, 0.44544970989227295], [0.6932799816131592, 0.4422600567340851], [0.695721447467804, 0.4388725459575653], [0.6980327367782593, 0.4356396496295929], [0.7005735039710999, 0.4322522282600403], [0.7029711008071899, 0.4290507435798645], [0.7056251764297485, 0.4256435036659241], [0.7081322073936462, 0.42244040966033936], [0.7108981609344482, 0.4190289378166199], [0.7135096788406372, 0.4158298969268799], [0.7163981795310974, 0.4124135375022888], [0.7191123366355896, 0.40924155712127686], [0.6716921329498291, 0.48509061336517334], [0.6732154488563538, 0.48215237259864807], [0.6746813654899597, 0.4792470932006836], [0.6763313412666321, 0.476263165473938], [0.6779505014419556, 0.4732479751110077], [0.6797818541526794, 0.4701533913612366], [0.6815632581710815, 0.46707525849342346], [0.6835262775421143, 0.46391820907592773], [0.6854363679885864, 0.46078500151634216], [0.6875494122505188, 0.45756733417510986], [0.6895859241485596, 0.4544232487678528], [0.6918036937713623, 0.45117881894111633], [0.6939413547515869, 0.4480234384536743], [0.6963006854057312, 0.44474703073501587], [0.6985768675804138, 0.44155097007751465], [0.7010610103607178, 0.4382355809211731], [0.7034314274787903, 0.4350687265396118], [0.7060152888298035, 0.4317564070224762], [0.7084857821464539, 0.42859309911727905], [0.7111899256706238, 0.4252725839614868], [0.7137675881385803, 0.42212367057800293], [0.7165893316268921, 0.418786883354187], [0.7192628979682922, 0.4156531095504761], [0.7221955060958862, 0.412322074174881], [0.6746276617050171, 0.48681193590164185], [0.6760228872299194, 0.4840126633644104], [0.6776269674301147, 0.4811016917228699], [0.6791474223136902, 0.4782406687736511], [0.6809017658233643, 0.4752137064933777], [0.6825703382492065, 0.4722640812397003], [0.6844960451126099, 0.46916940808296204], [0.6863309144973755, 0.4661571681499481], [0.6883960962295532, 0.46301019191741943], [0.6903667449951172, 0.4599611759185791], [0.692566990852356, 0.4567805528640747], [0.6946589946746826, 0.45370402932167053], [0.6969505548477173, 0.4504813253879547], [0.6991274952888489, 0.44738051295280457], [0.7015693187713623, 0.44411030411720276], [0.7038822174072266, 0.4410035014152527], [0.7064269781112671, 0.4377438426017761], [0.7088393568992615, 0.43465927243232727], [0.7115010023117065, 0.43139761686325073], [0.7140169143676758, 0.4283236563205719], [0.7167970538139343, 0.42504483461380005], [0.7194169759750366, 0.4219675064086914], [0.722299337387085, 0.4186971187591553], [0.7250175476074219, 0.4156430959701538], [0.6772749423980713, 0.48853129148483276], [0.678788423538208, 0.48573946952819824], [0.680260419845581, 0.4829425811767578], [0.6819241642951965, 0.4800872504711151], [0.6835637092590332, 0.47718942165374756], [0.6853782534599304, 0.47424638271331787], [0.6871548295021057, 0.47129008173942566], [0.6891376972198486, 0.4682645797729492], [0.6910645365715027, 0.46526041626930237], [0.6931791305541992, 0.46219462156295776], [0.6952385306358337, 0.45916038751602173], [0.6974878311157227, 0.4560391902923584], [0.6996627449989319, 0.45298150181770325], [0.7020089626312256, 0.44984501600265503], [0.7042933702468872, 0.44676673412323], [0.7067733407020569, 0.4435862600803375], [0.7091507911682129, 0.44053834676742554], [0.711739182472229, 0.4373663663864136], [0.7142232656478882, 0.4343271851539612], [0.7169263362884521, 0.431143194437027], [0.7195185422897339, 0.42810049653053284], [0.7223394513130188, 0.42490845918655396], [0.7250220775604248, 0.42189979553222656], [0.7279554605484009, 0.4187103509902954], [0.6800560355186462, 0.4902094006538391], [0.6814116835594177, 0.48754289746284485], [0.6830103397369385, 0.4847545623779297], [0.6845332384109497, 0.48201924562454224], [0.6863152384757996, 0.4791385531425476], [0.6880106329917908, 0.47631406784057617], [0.6899314522743225, 0.4733639061450958], [0.6917683482170105, 0.47046786546707153], [0.6938415765762329, 0.4674609899520874], [0.6958209276199341, 0.46453630924224854], [0.6980302333831787, 0.4614700973033905], [0.7001351118087769, 0.4585020840167999], [0.7024685740470886, 0.455412358045578], [0.7046846747398376, 0.4524409770965576], [0.7071315050125122, 0.44931262731552124], [0.7094595432281494, 0.4463220238685608], [0.7120010256767273, 0.44320541620254517], [0.7144214510917664, 0.44023722410202026], [0.7170803546905518, 0.4371088147163391], [0.7196080684661865, 0.43414193391799927], [0.7223873138427734, 0.4310007095336914], [0.7250176668167114, 0.4280585050582886], [0.7279058694839478, 0.42492881417274475], [0.7306338548660278, 0.42199909687042236], [0.6825465559959412, 0.49186551570892334], [0.6840304136276245, 0.48924344778060913], [0.685498833656311, 0.48656517267227173], [0.6871523857116699, 0.48385292291641235], [0.688788652420044, 0.4810810387134552], [0.6906227469444275, 0.47826993465423584], [0.6924223303794861, 0.47543099522590637], [0.6944063901901245, 0.47255373001098633], [0.6963455677032471, 0.4696708917617798], [0.6984682679176331, 0.46674293279647827], [0.7005550861358643, 0.46380698680877686], [0.7028085589408875, 0.4608232378959656], [0.7050000429153442, 0.4578806161880493], [0.7073724269866943, 0.45487135648727417], [0.7096843123435974, 0.4519135653972626], [0.7121762633323669, 0.44887465238571167], [0.714569628238678, 0.4459494650363922], [0.7171589732170105, 0.44291579723358154], [0.7196582555770874, 0.43998968601226807], [0.7223656177520752, 0.4369373321533203], [0.7249643206596375, 0.4340275824069977], [0.7277836203575134, 0.43098193407058716], [0.7304819822311401, 0.4280940890312195], [0.7334164381027222, 0.42503684759140015], [0.6851533651351929, 0.4935142993927002], [0.6864892840385437, 0.4910121262073517], [0.6880922913551331, 0.48835980892181396], [0.689609706401825, 0.4857422411441803], [0.6913750767707825, 0.4830010235309601], [0.69306880235672, 0.48029589653015137], [0.6950109004974365, 0.4774842858314514], [0.6968685984611511, 0.47471922636032104], [0.6989535689353943, 0.4718366265296936], [0.7009345293045044, 0.4690242111682892], [0.7031623721122742, 0.46608126163482666], [0.7052881717681885, 0.46323397755622864], [0.7076314091682434, 0.46027272939682007], [0.7098641991615295, 0.45741385221481323], [0.7123317122459412, 0.4544307589530945], [0.7146924138069153, 0.4515681564807892], [0.7172514200210571, 0.44858819246292114], [0.7196877002716064, 0.44574111700057983], [0.7223554253578186, 0.44274067878723145], [0.7248949408531189, 0.439900279045105], [0.7276743054389954, 0.43690651655197144], [0.7303118109703064, 0.43408650159835815], [0.7332034707069397, 0.43108779191970825], [0.7359439730644226, 0.4282764792442322], [0.6874523758888245, 0.49511152505874634], [0.6889032125473022, 0.49266231060028076], [0.6903707385063171, 0.49008917808532715], [0.6920220255851746, 0.4875166714191437], [0.6936628818511963, 0.4848718047142029], [0.6954923868179321, 0.48221635818481445], [0.6973081827163696, 0.4794982075691223], [0.6993053555488586, 0.4767691195011139], [0.701286792755127, 0.47398656606674194], [0.7034161686897278, 0.4712013900279999], [0.7055264711380005, 0.4683855175971985], [0.7077986001968384, 0.46554136276245117], [0.7100232243537903, 0.4627143144607544], [0.7124090790748596, 0.45983394980430603], [0.7147279977798462, 0.45700588822364807], [0.7172383069992065, 0.45410963892936707], [0.7196692824363708, 0.4512990713119507], [0.7222727537155151, 0.44839736819267273], [0.7247902154922485, 0.44559246301651], [0.7275024056434631, 0.44268327951431274], [0.7301118969917297, 0.4398999810218811], [0.7329330444335938, 0.4369804263114929], [0.7356346845626831, 0.4342097043991089], [0.7385714054107666, 0.4313015937805176], [0.6898787021636963, 0.49669477343559265], [0.6911594867706299, 0.49433162808418274], [0.6927433609962463, 0.4918213486671448], [0.6942662000656128, 0.4893401265144348], [0.6960471272468567, 0.48675787448883057], [0.6977546811103821, 0.4841805398464203], [0.6996983289718628, 0.48149576783180237], [0.7015544772148132, 0.4788583517074585], [0.7036767601966858, 0.4761163890361786], [0.7057037949562073, 0.47343724966049194], [0.7079595327377319, 0.47064775228500366], [0.710121214389801, 0.4679231643676758], [0.7124918699264526, 0.4650980532169342], [0.7147643566131592, 0.46234479546546936], [0.7172261476516724, 0.45950090885162354], [0.7195897698402405, 0.45676177740097046], [0.7221733331680298, 0.45390617847442627], [0.7246416807174683, 0.45117828249931335], [0.7273242473602295, 0.44832420349121094], [0.7298840284347534, 0.44561144709587097], [0.7326704263687134, 0.44275587797164917], [0.7353295087814331, 0.44004520773887634], [0.7382121682167053, 0.4371868371963501], [0.740951418876648, 0.43450701236724854], [0.6920034885406494, 0.4982028603553772], [0.6934062242507935, 0.4959072470664978], [0.6948305368423462, 0.49348437786102295], [0.6964710354804993, 0.49107980728149414], [0.6981291770935059, 0.4885600805282593], [0.6999727487564087, 0.4860607981681824], [0.7018321752548218, 0.4834558963775635], [0.7038275003433228, 0.480890691280365], [0.7058312296867371, 0.4782368540763855], [0.7079901099205017, 0.4755813777446747], [0.7101303935050964, 0.4728855490684509], [0.7124247550964355, 0.47018367052078247], [0.7146933078765869, 0.46746736764907837], [0.717110276222229, 0.46472299098968506], [0.7194694876670837, 0.4620216488838196], [0.7219780683517456, 0.45926839113235474], [0.7244303822517395, 0.45657220482826233], [0.7270528078079224, 0.45380645990371704], [0.729596734046936, 0.4511271119117737], [0.7323180437088013, 0.44835519790649414], [0.734948456287384, 0.44569090008735657], [0.737786054611206, 0.4429076611995697], [0.7405049204826355, 0.44026902318000793], [0.7434324026107788, 0.43750348687171936], [0.6942218542098999, 0.49972930550575256], [0.6954939365386963, 0.4975014328956604], [0.6970348358154297, 0.4951634407043457], [0.6985155940055847, 0.4928252100944519], [0.7002927660942078, 0.49038103222846985], [0.7020052075386047, 0.48795461654663086], [0.7040027379989624, 0.4854317307472229], [0.7059082984924316, 0.48293018341064453], [0.7080410718917847, 0.48033469915390015], [0.7101010680198669, 0.47775566577911377], [0.7123708128929138, 0.47510379552841187], [0.7145581245422363, 0.47249361872673035], [0.7169567346572876, 0.4697955846786499], [0.7192674279212952, 0.4671732187271118], [0.7217662930488586, 0.46447664499282837], [0.7241731882095337, 0.46186310052871704], [0.7267667651176453, 0.4591522216796875], [0.729263186454773, 0.45654308795928955], [0.7319598197937012, 0.45382606983184814], [0.7345459461212158, 0.4512307643890381], [0.737339437007904, 0.44850122928619385], [0.7400119304656982, 0.4459177553653717], [0.7429088950157166, 0.4432143568992615], [0.745670735836029, 0.4406663179397583], [0.6961359977722168, 0.5011443495750427], [0.6975117921829224, 0.49900686740875244], [0.6989262104034424, 0.49672067165374756], [0.700526237487793, 0.4944998323917389], [0.7021862268447876, 0.4921064078807831], [0.7040112018585205, 0.48979708552360535], [0.7059035301208496, 0.48731327056884766], [0.7079325914382935, 0.484882116317749], [0.7099813222885132, 0.4823455810546875], [0.7121676802635193, 0.4798447787761688], [0.7143635153770447, 0.4772706627845764], [0.7166788578033447, 0.4747062921524048], [0.7189867496490479, 0.47211313247680664], [0.7214242815971375, 0.46951818466186523], [0.7238254547119141, 0.4669439196586609], [0.7263692617416382, 0.46432530879974365], [0.7288569808006287, 0.4617587924003601], [0.731499969959259, 0.4591273069381714], [0.7340701818466187, 0.45657703280448914], [0.736815869808197, 0.4539380669593811], [0.7394781112670898, 0.4513997435569763], [0.7423187494277954, 0.4487646818161011], [0.7450463771820068, 0.44625943899154663], [0.7479894757270813, 0.44362956285476685], [0.6981952786445618, 0.5025745630264282], [0.6993821859359741, 0.5004981756210327], [0.7008985280990601, 0.49831894040107727], [0.7023574709892273, 0.4961479604244232], [0.7041362524032593, 0.4938652515411377], [0.7058336734771729, 0.4915887415409088], [0.7078393697738647, 0.48920923471450806], [0.7097866535186768, 0.48682838678359985], [0.7119631171226501, 0.4843815863132477], [0.714065432548523, 0.4819430112838745], [0.7163839936256409, 0.4794216752052307], [0.7186200022697449, 0.47694337368011475], [0.7210507392883301, 0.47439783811569214], [0.7234032154083252, 0.47191447019577026], [0.7259359359741211, 0.46935129165649414], [0.7283830642700195, 0.4668600559234619], [0.7310049533843994, 0.4642884433269501], [0.7335396409034729, 0.46181100606918335], [0.7362591028213501, 0.4592369794845581], [0.7388803958892822, 0.45676949620246887], [0.7416998744010925, 0.4541952610015869], [0.7444058060646057, 0.45174455642700195], [0.7473039627075195, 0.4491724371910095], [0.7500768899917603, 0.446740061044693], [0.6999399065971375, 0.5038682222366333], [0.7012324929237366, 0.5019287467002869], [0.7025977969169617, 0.4997928738594055], [0.7041620016098022, 0.4977796673774719], [0.7058448791503906, 0.49551165103912354], [0.7076504230499268, 0.4933643341064453], [0.7095545530319214, 0.4910137355327606], [0.7116071581840515, 0.4887664318084717], [0.7137390375137329, 0.486364483833313], [0.7159585356712341, 0.48401159048080444], [0.7182163596153259, 0.4815633296966553], [0.7205711603164673, 0.4791405200958252], [0.7229215502738953, 0.47668805718421936], [0.7253892421722412, 0.4742327332496643], [0.727840006351471, 0.47177746891975403], [0.7304126620292664, 0.4692966938018799], [0.7329407334327698, 0.4668612480163574], [0.735608696937561, 0.4643744230270386], [0.7382193803787231, 0.4619537591934204], [0.7409878969192505, 0.4594570994377136], [0.7436753511428833, 0.4570532441139221], [0.7465443015098572, 0.45455121994018555], [0.7493027448654175, 0.45217207074165344], [0.7522538900375366, 0.4496764838695526], [0.7017879486083984, 0.5052014589309692], [0.702910304069519, 0.5033167600631714], [0.7043846249580383, 0.5013308525085449], [0.7057873010635376, 0.49932146072387695], [0.7075639367103577, 0.4972081184387207], [0.709294855594635, 0.49507975578308105], [0.7113057971000671, 0.49287909269332886], [0.7132659554481506, 0.4906642436981201], [0.7155044078826904, 0.4883597195148468], [0.7176804542541504, 0.4860658645629883], [0.7200527191162109, 0.4837011694908142], [0.7223607897758484, 0.4813472628593445], [0.7248157262802124, 0.47894155979156494], [0.7272090911865234, 0.4765775799751282], [0.7297739386558533, 0.47415128350257874], [0.7322679758071899, 0.47179362177848816], [0.7349216938018799, 0.46936583518981934], [0.7374947667121887, 0.4670228958129883], [0.7402435541152954, 0.464585542678833], [0.7428991198539734, 0.46224790811538696], [0.7457367181777954, 0.4598120152950287], [0.748473584651947, 0.4574894905090332], [0.7514005899429321, 0.45505762100219727], [0.7542048096656799, 0.45275431871414185], [0.7033222317695618, 0.5063742399215698], [0.704535722732544, 0.5046900510787964], [0.7058944702148438, 0.5026903748512268], [0.7073974609375, 0.5008716583251953], [0.7090616226196289, 0.49875861406326294], [0.7108745574951172, 0.49682486057281494], [0.7128607034683228, 0.49463891983032227], [0.7149071097373962, 0.4925641417503357], [0.7170917391777039, 0.4902889132499695], [0.7193576097488403, 0.4880925416946411], [0.7216963768005371, 0.48577362298965454], [0.7241035103797913, 0.4834790825843811], [0.7265139222145081, 0.4811636805534363], [0.7290177941322327, 0.47884872555732727], [0.7315142154693604, 0.47653675079345703], [0.7341264486312866, 0.47419875860214233], [0.7366999983787537, 0.4719066023826599], [0.7394043207168579, 0.4695558547973633], [0.7420523166656494, 0.4672682285308838], [0.7448503971099854, 0.4649096727371216], [0.7475694417953491, 0.4626380205154419], [0.750459611415863, 0.4602741003036499], [0.7532494068145752, 0.45801782608032227], [0.7562230825424194, 0.45565706491470337], [0.7050172686576843, 0.5075969696044922], [0.7059725522994995, 0.505929708480835], [0.7074151039123535, 0.504136323928833], [0.7088217735290527, 0.5023001432418823], [0.710573673248291, 0.5004103183746338], [0.7123029828071594, 0.4984748065471649], [0.7143661975860596, 0.49644121527671814], [0.7163957357406616, 0.49439093470573425], [0.7186678647994995, 0.4922407865524292], [0.7208951711654663, 0.4900777339935303], [0.7233210802078247, 0.48783284425735474], [0.7256998419761658, 0.485615074634552], [0.7282069325447083, 0.48336148262023926], [0.7306663393974304, 0.4811471402645111], [0.7332707047462463, 0.47887495160102844], [0.7358172535896301, 0.47666335105895996], [0.7385126352310181, 0.47438082098960876], [0.7411378622055054, 0.4721689820289612], [0.7439178824424744, 0.46988028287887573], [0.7466185092926025, 0.4676719009876251], [0.7494804263114929, 0.46537092328071594], [0.7522464990615845, 0.4631713628768921], [0.7551934719085693, 0.4608702063560486], [0.7580262422561646, 0.45868945121765137], [0.7063401937484741, 0.5085989236831665], [0.7074182033538818, 0.5071959495544434], [0.7086914777755737, 0.5053620338439941], [0.7101576328277588, 0.5038303136825562], [0.7119166851043701, 0.5019164681434631], [0.7136957049369812, 0.500174880027771], [0.7157257795333862, 0.49813807010650635], [0.7178101539611816, 0.4962320923805237], [0.7200899124145508, 0.4941082000732422], [0.7223933935165405, 0.49203914403915405], [0.7247986793518066, 0.48985928297042847], [0.7272601127624512, 0.48771601915359497], [0.729742169380188, 0.48556065559387207], [0.7323010563850403, 0.48339253664016724], [0.7348554730415344, 0.4812344014644623], [0.7375106811523438, 0.47904059290885925], [0.7401350736618042, 0.47688379883766174], [0.7428834438323975, 0.4746783375740051], [0.7455776929855347, 0.4725315570831299], [0.7484168410301208, 0.47030341625213623], [0.7511731386184692, 0.46816110610961914], [0.7540901303291321, 0.46592918038368225], [0.7569080591201782, 0.4637991189956665], [0.7599043250083923, 0.4615676999092102], [0.707777202129364, 0.5096890926361084], [0.7086035013198853, 0.5082237720489502], [0.7099524736404419, 0.5067874193191528], [0.7113652229309082, 0.5052001476287842], [0.7131539583206177, 0.50349360704422], [0.714956521987915, 0.5017503499984741], [0.7170355319976807, 0.4998818635940552], [0.7191082239151001, 0.49799656867980957], [0.7214504480361938, 0.49597179889678955], [0.7237601280212402, 0.493958055973053], [0.7262365818023682, 0.4918801486492157], [0.728691816329956, 0.48983269929885864], [0.7312607765197754, 0.4877328872680664], [0.7337936162948608, 0.48566991090774536], [0.7364465594291687, 0.48354828357696533], [0.739054262638092, 0.48147547245025635], [0.7417912483215332, 0.4793391227722168], [0.7444656491279602, 0.4772665798664093], [0.7472877502441406, 0.47510987520217896], [0.7500351071357727, 0.4730311334133148], [0.7529346346855164, 0.4708678424358368], [0.7557401061058044, 0.46879664063453674], [0.7587120532989502, 0.46662870049476624], [0.7615758180618286, 0.46456462144851685], [0.7086998820304871, 0.5103377103805542], [0.7095887660980225, 0.509530782699585], [0.7111019492149353, 0.5080063343048096], [0.7124883532524109, 0.5066820383071899], [0.7143114805221558, 0.5049625635147095], [0.7161222696304321, 0.5034022331237793], [0.7182590961456299, 0.5015659928321838], [0.7203673124313354, 0.49979984760284424], [0.7227255702018738, 0.49782276153564453], [0.7250928282737732, 0.49590200185775757], [0.7275668978691101, 0.49390649795532227], [0.7300874590873718, 0.4919097423553467], [0.7326391935348511, 0.4899106025695801], [0.7352609634399414, 0.48789262771606445], [0.7378835678100586, 0.4858860671520233], [0.7405937314033508, 0.48383647203445435], [0.7432736754417419, 0.4818234443664551], [0.7460634708404541, 0.47975069284439087], [0.7488076686859131, 0.4777318239212036], [0.7516849637031555, 0.47564697265625], [0.7544909119606018, 0.47363173961639404], [0.7574414014816284, 0.4715268909931183], [0.7602922916412354, 0.4695170223712921], [0.7633149027824402, 0.4674147367477417]], + "triangleIndex": [[0, 1, 130], [0, 130, 129], [1, 2, 130], [2, 131, 130], [2, 3, 132], [2, 132, 131], [3, 4, 132], [4, 133, 132], [4, 5, 134], [4, 134, 133], [5, 6, 134], [6, 135, 134], [6, 7, 136], [6, 136, 135], [7, 8, 136], [8, 137, 136], [8, 9, 138], [8, 138, 137], [9, 10, 138], [10, 139, 138], [10, 11, 140], [10, 140, 139], [11, 12, 140], [12, 141, 140], [12, 13, 142], [12, 142, 141], [13, 14, 142], [14, 143, 142], [14, 15, 144], [14, 144, 143], [15, 16, 144], [16, 145, 144], [16, 17, 146], [16, 146, 145], [17, 18, 146], [18, 147, 146], [18, 19, 148], [18, 148, 147], [19, 20, 148], [20, 149, 148], [20, 21, 150], [20, 150, 149], [21, 22, 150], [22, 151, 150], [22, 23, 152], [22, 152, 151], [23, 24, 152], [24, 153, 152], [24, 25, 154], [24, 154, 153], [25, 26, 154], [26, 155, 154], [26, 27, 156], [26, 156, 155], [27, 28, 156], [28, 157, 156], [28, 29, 158], [28, 158, 157], [29, 30, 158], [30, 159, 158], [30, 31, 160], [30, 160, 159], [31, 32, 160], [32, 161, 160], [32, 33, 162], [32, 162, 161], [33, 34, 162], [34, 163, 162], [34, 35, 164], [34, 164, 163], [35, 36, 164], [36, 165, 164], [36, 37, 166], [36, 166, 165], [37, 38, 166], [38, 167, 166], [38, 39, 168], [38, 168, 167], [39, 40, 168], [40, 169, 168], [40, 41, 170], [40, 170, 169], [41, 42, 170], [42, 171, 170], [42, 43, 172], [42, 172, 171], [43, 44, 172], [44, 173, 172], [44, 45, 174], [44, 174, 173], [45, 46, 174], [46, 175, 174], [46, 47, 176], [46, 176, 175], [47, 48, 176], [48, 177, 176], [48, 49, 178], [48, 178, 177], [49, 50, 178], [50, 179, 178], [50, 51, 180], [50, 180, 179], [51, 52, 180], [52, 181, 180], [52, 53, 182], [52, 182, 181], [53, 54, 182], [54, 183, 182], [54, 55, 184], [54, 184, 183], [55, 56, 184], [56, 185, 184], [56, 57, 186], [56, 186, 185], [57, 58, 186], [58, 187, 186], [58, 59, 188], [58, 188, 187], [59, 60, 188], [60, 189, 188], [60, 61, 190], [60, 190, 189], [61, 62, 190], [62, 191, 190], [62, 63, 192], [62, 192, 191], [63, 64, 192], [64, 193, 192], [64, 65, 194], [64, 194, 193], [65, 66, 194], [66, 195, 194], [66, 67, 196], [66, 196, 195], [67, 68, 196], [68, 197, 196], [68, 69, 198], [68, 198, 197], [69, 70, 198], [70, 199, 198], [70, 71, 200], [70, 200, 199], [71, 72, 200], [72, 201, 200], [72, 73, 202], [72, 202, 201], [73, 74, 202], [74, 203, 202], [74, 75, 204], [74, 204, 203], [75, 76, 204], [76, 205, 204], [76, 77, 206], [76, 206, 205], [77, 78, 206], [78, 207, 206], [78, 79, 208], [78, 208, 207], [79, 80, 208], [80, 209, 208], [80, 81, 210], [80, 210, 209], [81, 82, 210], [82, 211, 210], [82, 83, 212], [82, 212, 211], [83, 84, 212], [84, 213, 212], [84, 85, 214], [84, 214, 213], [85, 86, 214], [86, 215, 214], [86, 87, 216], [86, 216, 215], [87, 88, 216], [88, 217, 216], [88, 89, 218], [88, 218, 217], [89, 90, 218], [90, 219, 218], [90, 91, 220], [90, 220, 219], [91, 92, 220], [92, 221, 220], [92, 93, 222], [92, 222, 221], [93, 94, 222], [94, 223, 222], [94, 95, 224], [94, 224, 223], [95, 96, 224], [96, 225, 224], [96, 97, 226], [96, 226, 225], [97, 98, 226], [98, 227, 226], [98, 99, 228], [98, 228, 227], [99, 100, 228], [100, 229, 228], [100, 101, 230], [100, 230, 229], [101, 102, 230], [102, 231, 230], [102, 103, 232], [102, 232, 231], [103, 104, 232], [104, 233, 232], [104, 105, 234], [104, 234, 233], [105, 106, 234], [106, 235, 234], [106, 107, 236], [106, 236, 235], [107, 108, 236], [108, 237, 236], [108, 109, 238], [108, 238, 237], [109, 110, 238], [110, 239, 238], [110, 111, 240], [110, 240, 239], [111, 112, 240], [112, 241, 240], [112, 113, 242], [112, 242, 241], [113, 114, 242], [114, 243, 242], [114, 115, 244], [114, 244, 243], [115, 116, 244], [116, 245, 244], [116, 117, 246], [116, 246, 245], [117, 118, 246], [118, 247, 246], [118, 119, 248], [118, 248, 247], [119, 120, 248], [120, 249, 248], [120, 121, 250], [120, 250, 249], [121, 122, 250], [122, 251, 250], [122, 123, 252], [122, 252, 251], [123, 124, 252], [124, 253, 252], [124, 125, 254], [124, 254, 253], [125, 126, 254], [126, 255, 254], [126, 127, 256], [126, 256, 255], [127, 128, 256], [128, 257, 256], [129, 130, 258], [130, 259, 258], [130, 131, 260], [130, 260, 259], [131, 132, 260], [132, 261, 260], [132, 133, 262], [132, 262, 261], [133, 134, 262], [134, 263, 262], [134, 135, 264], [134, 264, 263], [135, 136, 264], [136, 265, 264], [136, 137, 266], [136, 266, 265], [137, 138, 266], [138, 267, 266], [138, 139, 268], [138, 268, 267], [139, 140, 268], [140, 269, 268], [140, 141, 270], [140, 270, 269], [141, 142, 270], [142, 271, 270], [142, 143, 272], [142, 272, 271], [143, 144, 272], [144, 273, 272], [144, 145, 274], [144, 274, 273], [145, 146, 274], [146, 275, 274], [146, 147, 276], [146, 276, 275], [147, 148, 276], [148, 277, 276], [148, 149, 278], [148, 278, 277], [149, 150, 278], [150, 279, 278], [150, 151, 280], [150, 280, 279], [151, 152, 280], [152, 281, 280], [152, 153, 282], [152, 282, 281], [153, 154, 282], [154, 283, 282], [154, 155, 284], [154, 284, 283], [155, 156, 284], [156, 285, 284], [156, 157, 286], [156, 286, 285], [157, 158, 286], [158, 287, 286], [158, 159, 288], [158, 288, 287], [159, 160, 288], [160, 289, 288], [160, 161, 290], [160, 290, 289], [161, 162, 290], [162, 291, 290], [162, 163, 292], [162, 292, 291], [163, 164, 292], [164, 293, 292], [164, 165, 294], [164, 294, 293], [165, 166, 294], [166, 295, 294], [166, 167, 296], [166, 296, 295], [167, 168, 296], [168, 297, 296], [168, 169, 298], [168, 298, 297], [169, 170, 298], [170, 299, 298], [170, 171, 300], [170, 300, 299], [171, 172, 300], [172, 301, 300], [172, 173, 302], [172, 302, 301], [173, 174, 302], [174, 303, 302], [174, 175, 304], [174, 304, 303], [175, 176, 304], [176, 305, 304], [176, 177, 306], [176, 306, 305], [177, 178, 306], [178, 307, 306], [178, 179, 308], [178, 308, 307], [179, 180, 308], [180, 309, 308], [180, 181, 310], [180, 310, 309], [181, 182, 310], [182, 311, 310], [182, 183, 312], [182, 312, 311], [183, 184, 312], [184, 313, 312], [184, 185, 314], [184, 314, 313], [185, 186, 314], [186, 315, 314], [186, 187, 316], [186, 316, 315], [187, 188, 316], [188, 317, 316], [188, 189, 318], [188, 318, 317], [189, 190, 318], [190, 319, 318], [190, 191, 320], [190, 320, 319], [191, 192, 320], [192, 321, 320], [192, 193, 322], [192, 322, 321], [193, 194, 322], [194, 323, 322], [194, 195, 324], [194, 324, 323], [195, 196, 324], [196, 325, 324], [196, 197, 326], [196, 326, 325], [197, 198, 326], [198, 327, 326], [198, 199, 328], [198, 328, 327], [199, 200, 328], [200, 329, 328], [200, 201, 330], [200, 330, 329], [201, 202, 330], [202, 331, 330], [202, 203, 332], [202, 332, 331], [203, 204, 332], [204, 333, 332], [204, 205, 334], [204, 334, 333], [205, 206, 334], [206, 335, 334], [206, 207, 336], [206, 336, 335], [207, 208, 336], [208, 337, 336], [208, 209, 338], [208, 338, 337], [209, 210, 338], [210, 339, 338], [210, 211, 340], [210, 340, 339], [211, 212, 340], [212, 341, 340], [212, 213, 342], [212, 342, 341], [213, 214, 342], [214, 343, 342], [214, 215, 344], [214, 344, 343], [215, 216, 344], [216, 345, 344], [216, 217, 346], [216, 346, 345], [217, 218, 346], [218, 347, 346], [218, 219, 348], [218, 348, 347], [219, 220, 348], [220, 349, 348], [220, 221, 350], [220, 350, 349], [221, 222, 350], [222, 351, 350], [222, 223, 352], [222, 352, 351], [223, 224, 352], [224, 353, 352], [224, 225, 354], [224, 354, 353], [225, 226, 354], [226, 355, 354], [226, 227, 356], [226, 356, 355], [227, 228, 356], [228, 357, 356], [228, 229, 358], [228, 358, 357], [229, 230, 358], [230, 359, 358], [230, 231, 360], [230, 360, 359], [231, 232, 360], [232, 361, 360], [232, 233, 362], [232, 362, 361], [233, 234, 362], [234, 363, 362], [234, 235, 364], [234, 364, 363], [235, 236, 364], [236, 365, 364], [236, 237, 366], [236, 366, 365], [237, 238, 366], [238, 367, 366], [238, 239, 368], [238, 368, 367], [239, 240, 368], [240, 369, 368], [240, 241, 370], [240, 370, 369], [241, 242, 370], [242, 371, 370], [242, 243, 372], [242, 372, 371], [243, 244, 372], [244, 373, 372], [244, 245, 374], [244, 374, 373], [245, 246, 374], [246, 375, 374], [246, 247, 376], [246, 376, 375], [247, 248, 376], [248, 377, 376], [248, 249, 378], [248, 378, 377], [249, 250, 378], [250, 379, 378], [250, 251, 380], [250, 380, 379], [251, 252, 380], [252, 381, 380], [252, 253, 382], [252, 382, 381], [253, 254, 382], [254, 383, 382], [254, 255, 384], [254, 384, 383], [255, 256, 384], [256, 385, 384], [256, 257, 386], [256, 386, 385], [258, 259, 388], [258, 388, 387], [259, 260, 388], [260, 389, 388], [260, 261, 390], [260, 390, 389], [261, 262, 390], [262, 391, 390], [262, 263, 392], [262, 392, 391], [263, 264, 392], [264, 393, 392], [264, 265, 394], [264, 394, 393], [265, 266, 394], [266, 395, 394], [266, 267, 396], [266, 396, 395], [267, 268, 396], [268, 397, 396], [268, 269, 398], [268, 398, 397], [269, 270, 398], [270, 399, 398], [270, 271, 400], [270, 400, 399], [271, 272, 400], [272, 401, 400], [272, 273, 402], [272, 402, 401], [273, 274, 402], [274, 403, 402], [274, 275, 404], [274, 404, 403], [275, 276, 404], [276, 405, 404], [276, 277, 406], [276, 406, 405], [277, 278, 406], [278, 407, 406], [278, 279, 408], [278, 408, 407], [279, 280, 408], [280, 409, 408], [280, 281, 410], [280, 410, 409], [281, 282, 410], [282, 411, 410], [282, 283, 412], [282, 412, 411], [283, 284, 412], [284, 413, 412], [284, 285, 414], [284, 414, 413], [285, 286, 414], [286, 415, 414], [286, 287, 416], [286, 416, 415], [287, 288, 416], [288, 417, 416], [288, 289, 418], [288, 418, 417], [289, 290, 418], [290, 419, 418], [290, 291, 420], [290, 420, 419], [291, 292, 420], [292, 421, 420], [292, 293, 422], [292, 422, 421], [293, 294, 422], [294, 423, 422], [294, 295, 424], [294, 424, 423], [295, 296, 424], [296, 425, 424], [296, 297, 426], [296, 426, 425], [297, 298, 426], [298, 427, 426], [298, 299, 428], [298, 428, 427], [299, 300, 428], [300, 429, 428], [300, 301, 430], [300, 430, 429], [301, 302, 430], [302, 431, 430], [302, 303, 432], [302, 432, 431], [303, 304, 432], [304, 433, 432], [304, 305, 434], [304, 434, 433], [305, 306, 434], [306, 435, 434], [306, 307, 436], [306, 436, 435], [307, 308, 436], [308, 437, 436], [308, 309, 438], [308, 438, 437], [309, 310, 438], [310, 439, 438], [310, 311, 440], [310, 440, 439], [311, 312, 440], [312, 441, 440], [312, 313, 442], [312, 442, 441], [313, 314, 442], [314, 443, 442], [314, 315, 444], [314, 444, 443], [315, 316, 444], [316, 445, 444], [316, 317, 446], [316, 446, 445], [317, 318, 446], [318, 447, 446], [318, 319, 448], [318, 448, 447], [319, 320, 448], [320, 449, 448], [320, 321, 450], [320, 450, 449], [321, 322, 450], [322, 451, 450], [322, 323, 452], [322, 452, 451], [323, 324, 452], [324, 453, 452], [324, 325, 454], [324, 454, 453], [325, 326, 454], [326, 455, 454], [326, 327, 456], [326, 456, 455], [327, 328, 456], [328, 457, 456], [328, 329, 458], [328, 458, 457], [329, 330, 458], [330, 459, 458], [330, 331, 460], [330, 460, 459], [331, 332, 460], [332, 461, 460], [332, 333, 462], [332, 462, 461], [333, 334, 462], [334, 463, 462], [334, 335, 464], [334, 464, 463], [335, 336, 464], [336, 465, 464], [336, 337, 466], [336, 466, 465], [337, 338, 466], [338, 467, 466], [338, 339, 468], [338, 468, 467], [339, 340, 468], [340, 469, 468], [340, 341, 470], [340, 470, 469], [341, 342, 470], [342, 471, 470], [342, 343, 472], [342, 472, 471], [343, 344, 472], [344, 473, 472], [344, 345, 474], [344, 474, 473], [345, 346, 474], [346, 475, 474], [346, 347, 476], [346, 476, 475], [347, 348, 476], [348, 477, 476], [348, 349, 478], [348, 478, 477], [349, 350, 478], [350, 479, 478], [350, 351, 480], [350, 480, 479], [351, 352, 480], [352, 481, 480], [352, 353, 482], [352, 482, 481], [353, 354, 482], [354, 483, 482], [354, 355, 484], [354, 484, 483], [355, 356, 484], [356, 485, 484], [356, 357, 486], [356, 486, 485], [357, 358, 486], [358, 487, 486], [358, 359, 488], [358, 488, 487], [359, 360, 488], [360, 489, 488], [360, 361, 490], [360, 490, 489], [361, 362, 490], [362, 491, 490], [362, 363, 492], [362, 492, 491], [363, 364, 492], [364, 493, 492], [364, 365, 494], [364, 494, 493], [365, 366, 494], [366, 495, 494], [366, 367, 496], [366, 496, 495], [367, 368, 496], [368, 497, 496], [368, 369, 498], [368, 498, 497], [369, 370, 498], [370, 499, 498], [370, 371, 500], [370, 500, 499], [371, 372, 500], [372, 501, 500], [372, 373, 502], [372, 502, 501], [373, 374, 502], [374, 503, 502], [374, 375, 504], [374, 504, 503], [375, 376, 504], [376, 505, 504], [376, 377, 506], [376, 506, 505], [377, 378, 506], [378, 507, 506], [378, 379, 508], [378, 508, 507], [379, 380, 508], [380, 509, 508], [380, 381, 510], [380, 510, 509], [381, 382, 510], [382, 511, 510], [382, 383, 512], [382, 512, 511], [383, 384, 512], [384, 513, 512], [384, 385, 514], [384, 514, 513], [385, 386, 514], [386, 515, 514], [387, 388, 516], [388, 517, 516], [388, 389, 518], [388, 518, 517], [389, 390, 518], [390, 519, 518], [390, 391, 520], [390, 520, 519], [391, 392, 520], [392, 521, 520], [392, 393, 522], [392, 522, 521], [393, 394, 522], [394, 523, 522], [394, 395, 524], [394, 524, 523], [395, 396, 524], [396, 525, 524], [396, 397, 526], [396, 526, 525], [397, 398, 526], [398, 527, 526], [398, 399, 528], [398, 528, 527], [399, 400, 528], [400, 529, 528], [400, 401, 530], [400, 530, 529], [401, 402, 530], [402, 531, 530], [402, 403, 532], [402, 532, 531], [403, 404, 532], [404, 533, 532], [404, 405, 534], [404, 534, 533], [405, 406, 534], [406, 535, 534], [406, 407, 536], [406, 536, 535], [407, 408, 536], [408, 537, 536], [408, 409, 538], [408, 538, 537], [409, 410, 538], [410, 539, 538], [410, 411, 540], [410, 540, 539], [411, 412, 540], [412, 541, 540], [412, 413, 542], [412, 542, 541], [413, 414, 542], [414, 543, 542], [414, 415, 544], [414, 544, 543], [415, 416, 544], [416, 545, 544], [416, 417, 546], [416, 546, 545], [417, 418, 546], [418, 547, 546], [418, 419, 548], [418, 548, 547], [419, 420, 548], [420, 549, 548], [420, 421, 550], [420, 550, 549], [421, 422, 550], [422, 551, 550], [422, 423, 552], [422, 552, 551], [423, 424, 552], [424, 553, 552], [424, 425, 554], [424, 554, 553], [425, 426, 554], [426, 555, 554], [426, 427, 556], [426, 556, 555], [427, 428, 556], [428, 557, 556], [428, 429, 558], [428, 558, 557], [429, 430, 558], [430, 559, 558], [430, 431, 560], [430, 560, 559], [431, 432, 560], [432, 561, 560], [432, 433, 562], [432, 562, 561], [433, 434, 562], [434, 563, 562], [434, 435, 564], [434, 564, 563], [435, 436, 564], [436, 565, 564], [436, 437, 566], [436, 566, 565], [437, 438, 566], [438, 567, 566], [438, 439, 568], [438, 568, 567], [439, 440, 568], [440, 569, 568], [440, 441, 570], [440, 570, 569], [441, 442, 570], [442, 571, 570], [442, 443, 572], [442, 572, 571], [443, 444, 572], [444, 573, 572], [444, 445, 574], [444, 574, 573], [445, 446, 574], [446, 575, 574], [446, 447, 576], [446, 576, 575], [447, 448, 576], [448, 577, 576], [448, 449, 578], [448, 578, 577], [449, 450, 578], [450, 579, 578], [450, 451, 580], [450, 580, 579], [451, 452, 580], [452, 581, 580], [452, 453, 582], [452, 582, 581], [453, 454, 582], [454, 583, 582], [454, 455, 584], [454, 584, 583], [455, 456, 584], [456, 585, 584], [456, 457, 586], [456, 586, 585], [457, 458, 586], [458, 587, 586], [458, 459, 588], [458, 588, 587], [459, 460, 588], [460, 589, 588], [460, 461, 590], [460, 590, 589], [461, 462, 590], [462, 591, 590], [462, 463, 592], [462, 592, 591], [463, 464, 592], [464, 593, 592], [464, 465, 594], [464, 594, 593], [465, 466, 594], [466, 595, 594], [466, 467, 596], [466, 596, 595], [467, 468, 596], [468, 597, 596], [468, 469, 598], [468, 598, 597], [469, 470, 598], [470, 599, 598], [470, 471, 600], [470, 600, 599], [471, 472, 600], [472, 601, 600], [472, 473, 602], [472, 602, 601], [473, 474, 602], [474, 603, 602], [474, 475, 604], [474, 604, 603], [475, 476, 604], [476, 605, 604], [476, 477, 606], [476, 606, 605], [477, 478, 606], [478, 607, 606], [478, 479, 608], [478, 608, 607], [479, 480, 608], [480, 609, 608], [480, 481, 610], [480, 610, 609], [481, 482, 610], [482, 611, 610], [482, 483, 612], [482, 612, 611], [483, 484, 612], [484, 613, 612], [484, 485, 614], [484, 614, 613], [485, 486, 614], [486, 615, 614], [486, 487, 616], [486, 616, 615], [487, 488, 616], [488, 617, 616], [488, 489, 618], [488, 618, 617], [489, 490, 618], [490, 619, 618], [490, 491, 620], [490, 620, 619], [491, 492, 620], [492, 621, 620], [492, 493, 622], [492, 622, 621], [493, 494, 622], [494, 623, 622], [494, 495, 624], [494, 624, 623], [495, 496, 624], [496, 625, 624], [496, 497, 626], [496, 626, 625], [497, 498, 626], [498, 627, 626], [498, 499, 628], [498, 628, 627], [499, 500, 628], [500, 629, 628], [500, 501, 630], [500, 630, 629], [501, 502, 630], [502, 631, 630], [502, 503, 632], [502, 632, 631], [503, 504, 632], [504, 633, 632], [504, 505, 634], [504, 634, 633], [505, 506, 634], [506, 635, 634], [506, 507, 636], [506, 636, 635], [507, 508, 636], [508, 637, 636], [508, 509, 638], [508, 638, 637], [509, 510, 638], [510, 639, 638], [510, 511, 640], [510, 640, 639], [511, 512, 640], [512, 641, 640], [512, 513, 642], [512, 642, 641], [513, 514, 642], [514, 643, 642], [514, 515, 644], [514, 644, 643], [516, 517, 646], [516, 646, 645], [517, 518, 646], [518, 647, 646], [518, 519, 648], [518, 648, 647], [519, 520, 648], [520, 649, 648], [520, 521, 650], [520, 650, 649], [521, 522, 650], [522, 651, 650], [522, 523, 652], [522, 652, 651], [523, 524, 652], [524, 653, 652], [524, 525, 654], [524, 654, 653], [525, 526, 654], [526, 655, 654], [526, 527, 656], [526, 656, 655], [527, 528, 656], [528, 657, 656], [528, 529, 658], [528, 658, 657], [529, 530, 658], [530, 659, 658], [530, 531, 660], [530, 660, 659], [531, 532, 660], [532, 661, 660], [532, 533, 662], [532, 662, 661], [533, 534, 662], [534, 663, 662], [534, 535, 664], [534, 664, 663], [535, 536, 664], [536, 665, 664], [536, 537, 666], [536, 666, 665], [537, 538, 666], [538, 667, 666], [538, 539, 668], [538, 668, 667], [539, 540, 668], [540, 669, 668], [540, 541, 670], [540, 670, 669], [541, 542, 670], [542, 671, 670], [542, 543, 672], [542, 672, 671], [543, 544, 672], [544, 673, 672], [544, 545, 674], [544, 674, 673], [545, 546, 674], [546, 675, 674], [546, 547, 676], [546, 676, 675], [547, 548, 676], [548, 677, 676], [548, 549, 678], [548, 678, 677], [549, 550, 678], [550, 679, 678], [550, 551, 680], [550, 680, 679], [551, 552, 680], [552, 681, 680], [552, 553, 682], [552, 682, 681], [553, 554, 682], [554, 683, 682], [554, 555, 684], [554, 684, 683], [555, 556, 684], [556, 685, 684], [556, 557, 686], [556, 686, 685], [557, 558, 686], [558, 687, 686], [558, 559, 688], [558, 688, 687], [559, 560, 688], [560, 689, 688], [560, 561, 690], [560, 690, 689], [561, 562, 690], [562, 691, 690], [562, 563, 692], [562, 692, 691], [563, 564, 692], [564, 693, 692], [564, 565, 694], [564, 694, 693], [565, 566, 694], [566, 695, 694], [566, 567, 696], [566, 696, 695], [567, 568, 696], [568, 697, 696], [568, 569, 698], [568, 698, 697], [569, 570, 698], [570, 699, 698], [570, 571, 700], [570, 700, 699], [571, 572, 700], [572, 701, 700], [572, 573, 702], [572, 702, 701], [573, 574, 702], [574, 703, 702], [574, 575, 704], [574, 704, 703], [575, 576, 704], [576, 705, 704], [576, 577, 706], [576, 706, 705], [577, 578, 706], [578, 707, 706], [578, 579, 708], [578, 708, 707], [579, 580, 708], [580, 709, 708], [580, 581, 710], [580, 710, 709], [581, 582, 710], [582, 711, 710], [582, 583, 712], [582, 712, 711], [583, 584, 712], [584, 713, 712], [584, 585, 714], [584, 714, 713], [585, 586, 714], [586, 715, 714], [586, 587, 716], [586, 716, 715], [587, 588, 716], [588, 717, 716], [588, 589, 718], [588, 718, 717], [589, 590, 718], [590, 719, 718], [590, 591, 720], [590, 720, 719], [591, 592, 720], [592, 721, 720], [592, 593, 722], [592, 722, 721], [593, 594, 722], [594, 723, 722], [594, 595, 724], [594, 724, 723], [595, 596, 724], [596, 725, 724], [596, 597, 726], [596, 726, 725], [597, 598, 726], [598, 727, 726], [598, 599, 728], [598, 728, 727], [599, 600, 728], [600, 729, 728], [600, 601, 730], [600, 730, 729], [601, 602, 730], [602, 731, 730], [602, 603, 732], [602, 732, 731], [603, 604, 732], [604, 733, 732], [604, 605, 734], [604, 734, 733], [605, 606, 734], [606, 735, 734], [606, 607, 736], [606, 736, 735], [607, 608, 736], [608, 737, 736], [608, 609, 738], [608, 738, 737], [609, 610, 738], [610, 739, 738], [610, 611, 740], [610, 740, 739], [611, 612, 740], [612, 741, 740], [612, 613, 742], [612, 742, 741], [613, 614, 742], [614, 743, 742], [614, 615, 744], [614, 744, 743], [615, 616, 744], [616, 745, 744], [616, 617, 746], [616, 746, 745], [617, 618, 746], [618, 747, 746], [618, 619, 748], [618, 748, 747], [619, 620, 748], [620, 749, 748], [620, 621, 750], [620, 750, 749], [621, 622, 750], [622, 751, 750], [622, 623, 752], [622, 752, 751], [623, 624, 752], [624, 753, 752], [624, 625, 754], [624, 754, 753], [625, 626, 754], [626, 755, 754], [626, 627, 756], [626, 756, 755], [627, 628, 756], [628, 757, 756], [628, 629, 758], [628, 758, 757], [629, 630, 758], [630, 759, 758], [630, 631, 760], [630, 760, 759], [631, 632, 760], [632, 761, 760], [632, 633, 762], [632, 762, 761], [633, 634, 762], [634, 763, 762], [634, 635, 764], [634, 764, 763], [635, 636, 764], [636, 765, 764], [636, 637, 766], [636, 766, 765], [637, 638, 766], [638, 767, 766], [638, 639, 768], [638, 768, 767], [639, 640, 768], [640, 769, 768], [640, 641, 770], [640, 770, 769], [641, 642, 770], [642, 771, 770], [642, 643, 772], [642, 772, 771], [643, 644, 772], [644, 773, 772], [645, 646, 774], [646, 775, 774], [646, 647, 776], [646, 776, 775], [647, 648, 776], [648, 777, 776], [648, 649, 778], [648, 778, 777], [649, 650, 778], [650, 779, 778], [650, 651, 780], [650, 780, 779], [651, 652, 780], [652, 781, 780], [652, 653, 782], [652, 782, 781], [653, 654, 782], [654, 783, 782], [654, 655, 784], [654, 784, 783], [655, 656, 784], [656, 785, 784], [656, 657, 786], [656, 786, 785], [657, 658, 786], [658, 787, 786], [658, 659, 788], [658, 788, 787], [659, 660, 788], [660, 789, 788], [660, 661, 790], [660, 790, 789], [661, 662, 790], [662, 791, 790], [662, 663, 792], [662, 792, 791], [663, 664, 792], [664, 793, 792], [664, 665, 794], [664, 794, 793], [665, 666, 794], [666, 795, 794], [666, 667, 796], [666, 796, 795], [667, 668, 796], [668, 797, 796], [668, 669, 798], [668, 798, 797], [669, 670, 798], [670, 799, 798], [670, 671, 800], [670, 800, 799], [671, 672, 800], [672, 801, 800], [672, 673, 802], [672, 802, 801], [673, 674, 802], [674, 803, 802], [674, 675, 804], [674, 804, 803], [675, 676, 804], [676, 805, 804], [676, 677, 806], [676, 806, 805], [677, 678, 806], [678, 807, 806], [678, 679, 808], [678, 808, 807], [679, 680, 808], [680, 809, 808], [680, 681, 810], [680, 810, 809], [681, 682, 810], [682, 811, 810], [682, 683, 812], [682, 812, 811], [683, 684, 812], [684, 813, 812], [684, 685, 814], [684, 814, 813], [685, 686, 814], [686, 815, 814], [686, 687, 816], [686, 816, 815], [687, 688, 816], [688, 817, 816], [688, 689, 818], [688, 818, 817], [689, 690, 818], [690, 819, 818], [690, 691, 820], [690, 820, 819], [691, 692, 820], [692, 821, 820], [692, 693, 822], [692, 822, 821], [693, 694, 822], [694, 823, 822], [694, 695, 824], [694, 824, 823], [695, 696, 824], [696, 825, 824], [696, 697, 826], [696, 826, 825], [697, 698, 826], [698, 827, 826], [698, 699, 828], [698, 828, 827], [699, 700, 828], [700, 829, 828], [700, 701, 830], [700, 830, 829], [701, 702, 830], [702, 831, 830], [702, 703, 832], [702, 832, 831], [703, 704, 832], [704, 833, 832], [704, 705, 834], [704, 834, 833], [705, 706, 834], [706, 835, 834], [706, 707, 836], [706, 836, 835], [707, 708, 836], [708, 837, 836], [708, 709, 838], [708, 838, 837], [709, 710, 838], [710, 839, 838], [710, 711, 840], [710, 840, 839], [711, 712, 840], [712, 841, 840], [712, 713, 842], [712, 842, 841], [713, 714, 842], [714, 843, 842], [714, 715, 844], [714, 844, 843], [715, 716, 844], [716, 845, 844], [716, 717, 846], [716, 846, 845], [717, 718, 846], [718, 847, 846], [718, 719, 848], [718, 848, 847], [719, 720, 848], [720, 849, 848], [720, 721, 850], [720, 850, 849], [721, 722, 850], [722, 851, 850], [722, 723, 852], [722, 852, 851], [723, 724, 852], [724, 853, 852], [724, 725, 854], [724, 854, 853], [725, 726, 854], [726, 855, 854], [726, 727, 856], [726, 856, 855], [727, 728, 856], [728, 857, 856], [728, 729, 858], [728, 858, 857], [729, 730, 858], [730, 859, 858], [730, 731, 860], [730, 860, 859], [731, 732, 860], [732, 861, 860], [732, 733, 862], [732, 862, 861], [733, 734, 862], [734, 863, 862], [734, 735, 864], [734, 864, 863], [735, 736, 864], [736, 865, 864], [736, 737, 866], [736, 866, 865], [737, 738, 866], [738, 867, 866], [738, 739, 868], [738, 868, 867], [739, 740, 868], [740, 869, 868], [740, 741, 870], [740, 870, 869], [741, 742, 870], [742, 871, 870], [742, 743, 872], [742, 872, 871], [743, 744, 872], [744, 873, 872], [744, 745, 874], [744, 874, 873], [745, 746, 874], [746, 875, 874], [746, 747, 876], [746, 876, 875], [747, 748, 876], [748, 877, 876], [748, 749, 878], [748, 878, 877], [749, 750, 878], [750, 879, 878], [750, 751, 880], [750, 880, 879], [751, 752, 880], [752, 881, 880], [752, 753, 882], [752, 882, 881], [753, 754, 882], [754, 883, 882], [754, 755, 884], [754, 884, 883], [755, 756, 884], [756, 885, 884], [756, 757, 886], [756, 886, 885], [757, 758, 886], [758, 887, 886], [758, 759, 888], [758, 888, 887], [759, 760, 888], [760, 889, 888], [760, 761, 890], [760, 890, 889], [761, 762, 890], [762, 891, 890], [762, 763, 892], [762, 892, 891], [763, 764, 892], [764, 893, 892], [764, 765, 894], [764, 894, 893], [765, 766, 894], [766, 895, 894], [766, 767, 896], [766, 896, 895], [767, 768, 896], [768, 897, 896], [768, 769, 898], [768, 898, 897], [769, 770, 898], [770, 899, 898], [770, 771, 900], [770, 900, 899], [771, 772, 900], [772, 901, 900], [772, 773, 902], [772, 902, 901], [774, 775, 904], [774, 904, 903], [775, 776, 904], [776, 905, 904], [776, 777, 906], [776, 906, 905], [777, 778, 906], [778, 907, 906], [778, 779, 908], [778, 908, 907], [779, 780, 908], [780, 909, 908], [780, 781, 910], [780, 910, 909], [781, 782, 910], [782, 911, 910], [782, 783, 912], [782, 912, 911], [783, 784, 912], [784, 913, 912], [784, 785, 914], [784, 914, 913], [785, 786, 914], [786, 915, 914], [786, 787, 916], [786, 916, 915], [787, 788, 916], [788, 917, 916], [788, 789, 918], [788, 918, 917], [789, 790, 918], [790, 919, 918], [790, 791, 920], [790, 920, 919], [791, 792, 920], [792, 921, 920], [792, 793, 922], [792, 922, 921], [793, 794, 922], [794, 923, 922], [794, 795, 924], [794, 924, 923], [795, 796, 924], [796, 925, 924], [796, 797, 926], [796, 926, 925], [797, 798, 926], [798, 927, 926], [798, 799, 928], [798, 928, 927], [799, 800, 928], [800, 929, 928], [800, 801, 930], [800, 930, 929], [801, 802, 930], [802, 931, 930], [802, 803, 932], [802, 932, 931], [803, 804, 932], [804, 933, 932], [804, 805, 934], [804, 934, 933], [805, 806, 934], [806, 935, 934], [806, 807, 936], [806, 936, 935], [807, 808, 936], [808, 937, 936], [808, 809, 938], [808, 938, 937], [809, 810, 938], [810, 939, 938], [810, 811, 940], [810, 940, 939], [811, 812, 940], [812, 941, 940], [812, 813, 942], [812, 942, 941], [813, 814, 942], [814, 943, 942], [814, 815, 944], [814, 944, 943], [815, 816, 944], [816, 945, 944], [816, 817, 946], [816, 946, 945], [817, 818, 946], [818, 947, 946], [818, 819, 948], [818, 948, 947], [819, 820, 948], [820, 949, 948], [820, 821, 950], [820, 950, 949], [821, 822, 950], [822, 951, 950], [822, 823, 952], [822, 952, 951], [823, 824, 952], [824, 953, 952], [824, 825, 954], [824, 954, 953], [825, 826, 954], [826, 955, 954], [826, 827, 956], [826, 956, 955], [827, 828, 956], [828, 957, 956], [828, 829, 958], [828, 958, 957], [829, 830, 958], [830, 959, 958], [830, 831, 960], [830, 960, 959], [831, 832, 960], [832, 961, 960], [832, 833, 962], [832, 962, 961], [833, 834, 962], [834, 963, 962], [834, 835, 964], [834, 964, 963], [835, 836, 964], [836, 965, 964], [836, 837, 966], [836, 966, 965], [837, 838, 966], [838, 967, 966], [838, 839, 968], [838, 968, 967], [839, 840, 968], [840, 969, 968], [840, 841, 970], [840, 970, 969], [841, 842, 970], [842, 971, 970], [842, 843, 972], [842, 972, 971], [843, 844, 972], [844, 973, 972], [844, 845, 974], [844, 974, 973], [845, 846, 974], [846, 975, 974], [846, 847, 976], [846, 976, 975], [847, 848, 976], [848, 977, 976], [848, 849, 978], [848, 978, 977], [849, 850, 978], [850, 979, 978], [850, 851, 980], [850, 980, 979], [851, 852, 980], [852, 981, 980], [852, 853, 982], [852, 982, 981], [853, 854, 982], [854, 983, 982], [854, 855, 984], [854, 984, 983], [855, 856, 984], [856, 985, 984], [856, 857, 986], [856, 986, 985], [857, 858, 986], [858, 987, 986], [858, 859, 988], [858, 988, 987], [859, 860, 988], [860, 989, 988], [860, 861, 990], [860, 990, 989], [861, 862, 990], [862, 991, 990], [862, 863, 992], [862, 992, 991], [863, 864, 992], [864, 993, 992], [864, 865, 994], [864, 994, 993], [865, 866, 994], [866, 995, 994], [866, 867, 996], [866, 996, 995], [867, 868, 996], [868, 997, 996], [868, 869, 998], [868, 998, 997], [869, 870, 998], [870, 999, 998], [870, 871, 1000], [870, 1000, 999], [871, 872, 1000], [872, 1001, 1000], [872, 873, 1002], [872, 1002, 1001], [873, 874, 1002], [874, 1003, 1002], [874, 875, 1004], [874, 1004, 1003], [875, 876, 1004], [876, 1005, 1004], [876, 877, 1006], [876, 1006, 1005], [877, 878, 1006], [878, 1007, 1006], [878, 879, 1008], [878, 1008, 1007], [879, 880, 1008], [880, 1009, 1008], [880, 881, 1010], [880, 1010, 1009], [881, 882, 1010], [882, 1011, 1010], [882, 883, 1012], [882, 1012, 1011], [883, 884, 1012], [884, 1013, 1012], [884, 885, 1014], [884, 1014, 1013], [885, 886, 1014], [886, 1015, 1014], [886, 887, 1016], [886, 1016, 1015], [887, 888, 1016], [888, 1017, 1016], [888, 889, 1018], [888, 1018, 1017], [889, 890, 1018], [890, 1019, 1018], [890, 891, 1020], [890, 1020, 1019], [891, 892, 1020], [892, 1021, 1020], [892, 893, 1022], [892, 1022, 1021], [893, 894, 1022], [894, 1023, 1022], [894, 895, 1024], [894, 1024, 1023], [895, 896, 1024], [896, 1025, 1024], [896, 897, 1026], [896, 1026, 1025], [897, 898, 1026], [898, 1027, 1026], [898, 899, 1028], [898, 1028, 1027], [899, 900, 1028], [900, 1029, 1028], [900, 901, 1030], [900, 1030, 1029], [901, 902, 1030], [902, 1031, 1030], [903, 904, 1032], [904, 1033, 1032], [904, 905, 1034], [904, 1034, 1033], [905, 906, 1034], [906, 1035, 1034], [906, 907, 1036], [906, 1036, 1035], [907, 908, 1036], [908, 1037, 1036], [908, 909, 1038], [908, 1038, 1037], [909, 910, 1038], [910, 1039, 1038], [910, 911, 1040], [910, 1040, 1039], [911, 912, 1040], [912, 1041, 1040], [912, 913, 1042], [912, 1042, 1041], [913, 914, 1042], [914, 1043, 1042], [914, 915, 1044], [914, 1044, 1043], [915, 916, 1044], [916, 1045, 1044], [916, 917, 1046], [916, 1046, 1045], [917, 918, 1046], [918, 1047, 1046], [918, 919, 1048], [918, 1048, 1047], [919, 920, 1048], [920, 1049, 1048], [920, 921, 1050], [920, 1050, 1049], [921, 922, 1050], [922, 1051, 1050], [922, 923, 1052], [922, 1052, 1051], [923, 924, 1052], [924, 1053, 1052], [924, 925, 1054], [924, 1054, 1053], [925, 926, 1054], [926, 1055, 1054], [926, 927, 1056], [926, 1056, 1055], [927, 928, 1056], [928, 1057, 1056], [928, 929, 1058], [928, 1058, 1057], [929, 930, 1058], [930, 1059, 1058], [930, 931, 1060], [930, 1060, 1059], [931, 932, 1060], [932, 1061, 1060], [932, 933, 1062], [932, 1062, 1061], [933, 934, 1062], [934, 1063, 1062], [934, 935, 1064], [934, 1064, 1063], [935, 936, 1064], [936, 1065, 1064], [936, 937, 1066], [936, 1066, 1065], [937, 938, 1066], [938, 1067, 1066], [938, 939, 1068], [938, 1068, 1067], [939, 940, 1068], [940, 1069, 1068], [940, 941, 1070], [940, 1070, 1069], [941, 942, 1070], [942, 1071, 1070], [942, 943, 1072], [942, 1072, 1071], [943, 944, 1072], [944, 1073, 1072], [944, 945, 1074], [944, 1074, 1073], [945, 946, 1074], [946, 1075, 1074], [946, 947, 1076], [946, 1076, 1075], [947, 948, 1076], [948, 1077, 1076], [948, 949, 1078], [948, 1078, 1077], [949, 950, 1078], [950, 1079, 1078], [950, 951, 1080], [950, 1080, 1079], [951, 952, 1080], [952, 1081, 1080], [952, 953, 1082], [952, 1082, 1081], [953, 954, 1082], [954, 1083, 1082], [954, 955, 1084], [954, 1084, 1083], [955, 956, 1084], [956, 1085, 1084], [956, 957, 1086], [956, 1086, 1085], [957, 958, 1086], [958, 1087, 1086], [958, 959, 1088], [958, 1088, 1087], [959, 960, 1088], [960, 1089, 1088], [960, 961, 1090], [960, 1090, 1089], [961, 962, 1090], [962, 1091, 1090], [962, 963, 1092], [962, 1092, 1091], [963, 964, 1092], [964, 1093, 1092], [964, 965, 1094], [964, 1094, 1093], [965, 966, 1094], [966, 1095, 1094], [966, 967, 1096], [966, 1096, 1095], [967, 968, 1096], [968, 1097, 1096], [968, 969, 1098], [968, 1098, 1097], [969, 970, 1098], [970, 1099, 1098], [970, 971, 1100], [970, 1100, 1099], [971, 972, 1100], [972, 1101, 1100], [972, 973, 1102], [972, 1102, 1101], [973, 974, 1102], [974, 1103, 1102], [974, 975, 1104], [974, 1104, 1103], [975, 976, 1104], [976, 1105, 1104], [976, 977, 1106], [976, 1106, 1105], [977, 978, 1106], [978, 1107, 1106], [978, 979, 1108], [978, 1108, 1107], [979, 980, 1108], [980, 1109, 1108], [980, 981, 1110], [980, 1110, 1109], [981, 982, 1110], [982, 1111, 1110], [982, 983, 1112], [982, 1112, 1111], [983, 984, 1112], [984, 1113, 1112], [984, 985, 1114], [984, 1114, 1113], [985, 986, 1114], [986, 1115, 1114], [986, 987, 1116], [986, 1116, 1115], [987, 988, 1116], [988, 1117, 1116], [988, 989, 1118], [988, 1118, 1117], [989, 990, 1118], [990, 1119, 1118], [990, 991, 1120], [990, 1120, 1119], [991, 992, 1120], [992, 1121, 1120], [992, 993, 1122], [992, 1122, 1121], [993, 994, 1122], [994, 1123, 1122], [994, 995, 1124], [994, 1124, 1123], [995, 996, 1124], [996, 1125, 1124], [996, 997, 1126], [996, 1126, 1125], [997, 998, 1126], [998, 1127, 1126], [998, 999, 1128], [998, 1128, 1127], [999, 1000, 1128], [1000, 1129, 1128], [1000, 1001, 1130], [1000, 1130, 1129], [1001, 1002, 1130], [1002, 1131, 1130], [1002, 1003, 1132], [1002, 1132, 1131], [1003, 1004, 1132], [1004, 1133, 1132], [1004, 1005, 1134], [1004, 1134, 1133], [1005, 1006, 1134], [1006, 1135, 1134], [1006, 1007, 1136], [1006, 1136, 1135], [1007, 1008, 1136], [1008, 1137, 1136], [1008, 1009, 1138], [1008, 1138, 1137], [1009, 1010, 1138], [1010, 1139, 1138], [1010, 1011, 1140], [1010, 1140, 1139], [1011, 1012, 1140], [1012, 1141, 1140], [1012, 1013, 1142], [1012, 1142, 1141], [1013, 1014, 1142], [1014, 1143, 1142], [1014, 1015, 1144], [1014, 1144, 1143], [1015, 1016, 1144], [1016, 1145, 1144], [1016, 1017, 1146], [1016, 1146, 1145], [1017, 1018, 1146], [1018, 1147, 1146], [1018, 1019, 1148], [1018, 1148, 1147], [1019, 1020, 1148], [1020, 1149, 1148], [1020, 1021, 1150], [1020, 1150, 1149], [1021, 1022, 1150], [1022, 1151, 1150], [1022, 1023, 1152], [1022, 1152, 1151], [1023, 1024, 1152], [1024, 1153, 1152], [1024, 1025, 1154], [1024, 1154, 1153], [1025, 1026, 1154], [1026, 1155, 1154], [1026, 1027, 1156], [1026, 1156, 1155], [1027, 1028, 1156], [1028, 1157, 1156], [1028, 1029, 1158], [1028, 1158, 1157], [1029, 1030, 1158], [1030, 1159, 1158], [1030, 1031, 1160], [1030, 1160, 1159], [1032, 1033, 1162], [1032, 1162, 1161], [1033, 1034, 1162], [1034, 1163, 1162], [1034, 1035, 1164], [1034, 1164, 1163], [1035, 1036, 1164], [1036, 1165, 1164], [1036, 1037, 1166], [1036, 1166, 1165], [1037, 1038, 1166], [1038, 1167, 1166], [1038, 1039, 1168], [1038, 1168, 1167], [1039, 1040, 1168], [1040, 1169, 1168], [1040, 1041, 1170], [1040, 1170, 1169], [1041, 1042, 1170], [1042, 1171, 1170], [1042, 1043, 1172], [1042, 1172, 1171], [1043, 1044, 1172], [1044, 1173, 1172], [1044, 1045, 1174], [1044, 1174, 1173], [1045, 1046, 1174], [1046, 1175, 1174], [1046, 1047, 1176], [1046, 1176, 1175], [1047, 1048, 1176], [1048, 1177, 1176], [1048, 1049, 1178], [1048, 1178, 1177], [1049, 1050, 1178], [1050, 1179, 1178], [1050, 1051, 1180], [1050, 1180, 1179], [1051, 1052, 1180], [1052, 1181, 1180], [1052, 1053, 1182], [1052, 1182, 1181], [1053, 1054, 1182], [1054, 1183, 1182], [1054, 1055, 1184], [1054, 1184, 1183], [1055, 1056, 1184], [1056, 1185, 1184], [1056, 1057, 1186], [1056, 1186, 1185], [1057, 1058, 1186], [1058, 1187, 1186], [1058, 1059, 1188], [1058, 1188, 1187], [1059, 1060, 1188], [1060, 1189, 1188], [1060, 1061, 1190], [1060, 1190, 1189], [1061, 1062, 1190], [1062, 1191, 1190], [1062, 1063, 1192], [1062, 1192, 1191], [1063, 1064, 1192], [1064, 1193, 1192], [1064, 1065, 1194], [1064, 1194, 1193], [1065, 1066, 1194], [1066, 1195, 1194], [1066, 1067, 1196], [1066, 1196, 1195], [1067, 1068, 1196], [1068, 1197, 1196], [1068, 1069, 1198], [1068, 1198, 1197], [1069, 1070, 1198], [1070, 1199, 1198], [1070, 1071, 1200], [1070, 1200, 1199], [1071, 1072, 1200], [1072, 1201, 1200], [1072, 1073, 1202], [1072, 1202, 1201], [1073, 1074, 1202], [1074, 1203, 1202], [1074, 1075, 1204], [1074, 1204, 1203], [1075, 1076, 1204], [1076, 1205, 1204], [1076, 1077, 1206], [1076, 1206, 1205], [1077, 1078, 1206], [1078, 1207, 1206], [1078, 1079, 1208], [1078, 1208, 1207], [1079, 1080, 1208], [1080, 1209, 1208], [1080, 1081, 1210], [1080, 1210, 1209], [1081, 1082, 1210], [1082, 1211, 1210], [1082, 1083, 1212], [1082, 1212, 1211], [1083, 1084, 1212], [1084, 1213, 1212], [1084, 1085, 1214], [1084, 1214, 1213], [1085, 1086, 1214], [1086, 1215, 1214], [1086, 1087, 1216], [1086, 1216, 1215], [1087, 1088, 1216], [1088, 1217, 1216], [1088, 1089, 1218], [1088, 1218, 1217], [1089, 1090, 1218], [1090, 1219, 1218], [1090, 1091, 1220], [1090, 1220, 1219], [1091, 1092, 1220], [1092, 1221, 1220], [1092, 1093, 1222], [1092, 1222, 1221], [1093, 1094, 1222], [1094, 1223, 1222], [1094, 1095, 1224], [1094, 1224, 1223], [1095, 1096, 1224], [1096, 1225, 1224], [1096, 1097, 1226], [1096, 1226, 1225], [1097, 1098, 1226], [1098, 1227, 1226], [1098, 1099, 1228], [1098, 1228, 1227], [1099, 1100, 1228], [1100, 1229, 1228], [1100, 1101, 1230], [1100, 1230, 1229], [1101, 1102, 1230], [1102, 1231, 1230], [1102, 1103, 1232], [1102, 1232, 1231], [1103, 1104, 1232], [1104, 1233, 1232], [1104, 1105, 1234], [1104, 1234, 1233], [1105, 1106, 1234], [1106, 1235, 1234], [1106, 1107, 1236], [1106, 1236, 1235], [1107, 1108, 1236], [1108, 1237, 1236], [1108, 1109, 1238], [1108, 1238, 1237], [1109, 1110, 1238], [1110, 1239, 1238], [1110, 1111, 1240], [1110, 1240, 1239], [1111, 1112, 1240], [1112, 1241, 1240], [1112, 1113, 1242], [1112, 1242, 1241], [1113, 1114, 1242], [1114, 1243, 1242], [1114, 1115, 1244], [1114, 1244, 1243], [1115, 1116, 1244], [1116, 1245, 1244], [1116, 1117, 1246], [1116, 1246, 1245], [1117, 1118, 1246], [1118, 1247, 1246], [1118, 1119, 1248], [1118, 1248, 1247], [1119, 1120, 1248], [1120, 1249, 1248], [1120, 1121, 1250], [1120, 1250, 1249], [1121, 1122, 1250], [1122, 1251, 1250], [1122, 1123, 1252], [1122, 1252, 1251], [1123, 1124, 1252], [1124, 1253, 1252], [1124, 1125, 1254], [1124, 1254, 1253], [1125, 1126, 1254], [1126, 1255, 1254], [1126, 1127, 1256], [1126, 1256, 1255], [1127, 1128, 1256], [1128, 1257, 1256], [1128, 1129, 1258], [1128, 1258, 1257], [1129, 1130, 1258], [1130, 1259, 1258], [1130, 1131, 1260], [1130, 1260, 1259], [1131, 1132, 1260], [1132, 1261, 1260], [1132, 1133, 1262], [1132, 1262, 1261], [1133, 1134, 1262], [1134, 1263, 1262], [1134, 1135, 1264], [1134, 1264, 1263], [1135, 1136, 1264], [1136, 1265, 1264], [1136, 1137, 1266], [1136, 1266, 1265], [1137, 1138, 1266], [1138, 1267, 1266], [1138, 1139, 1268], [1138, 1268, 1267], [1139, 1140, 1268], [1140, 1269, 1268], [1140, 1141, 1270], [1140, 1270, 1269], [1141, 1142, 1270], [1142, 1271, 1270], [1142, 1143, 1272], [1142, 1272, 1271], [1143, 1144, 1272], [1144, 1273, 1272], [1144, 1145, 1274], [1144, 1274, 1273], [1145, 1146, 1274], [1146, 1275, 1274], [1146, 1147, 1276], [1146, 1276, 1275], [1147, 1148, 1276], [1148, 1277, 1276], [1148, 1149, 1278], [1148, 1278, 1277], [1149, 1150, 1278], [1150, 1279, 1278], [1150, 1151, 1280], [1150, 1280, 1279], [1151, 1152, 1280], [1152, 1281, 1280], [1152, 1153, 1282], [1152, 1282, 1281], [1153, 1154, 1282], [1154, 1283, 1282], [1154, 1155, 1284], [1154, 1284, 1283], [1155, 1156, 1284], [1156, 1285, 1284], [1156, 1157, 1286], [1156, 1286, 1285], [1157, 1158, 1286], [1158, 1287, 1286], [1158, 1159, 1288], [1158, 1288, 1287], [1159, 1160, 1288], [1160, 1289, 1288], [1161, 1162, 1290], [1162, 1291, 1290], [1162, 1163, 1292], [1162, 1292, 1291], [1163, 1164, 1292], [1164, 1293, 1292], [1164, 1165, 1294], [1164, 1294, 1293], [1165, 1166, 1294], [1166, 1295, 1294], [1166, 1167, 1296], [1166, 1296, 1295], [1167, 1168, 1296], [1168, 1297, 1296], [1168, 1169, 1298], [1168, 1298, 1297], [1169, 1170, 1298], [1170, 1299, 1298], [1170, 1171, 1300], [1170, 1300, 1299], [1171, 1172, 1300], [1172, 1301, 1300], [1172, 1173, 1302], [1172, 1302, 1301], [1173, 1174, 1302], [1174, 1303, 1302], [1174, 1175, 1304], [1174, 1304, 1303], [1175, 1176, 1304], [1176, 1305, 1304], [1176, 1177, 1306], [1176, 1306, 1305], [1177, 1178, 1306], [1178, 1307, 1306], [1178, 1179, 1308], [1178, 1308, 1307], [1179, 1180, 1308], [1180, 1309, 1308], [1180, 1181, 1310], [1180, 1310, 1309], [1181, 1182, 1310], [1182, 1311, 1310], [1182, 1183, 1312], [1182, 1312, 1311], [1183, 1184, 1312], [1184, 1313, 1312], [1184, 1185, 1314], [1184, 1314, 1313], [1185, 1186, 1314], [1186, 1315, 1314], [1186, 1187, 1316], [1186, 1316, 1315], [1187, 1188, 1316], [1188, 1317, 1316], [1188, 1189, 1318], [1188, 1318, 1317], [1189, 1190, 1318], [1190, 1319, 1318], [1190, 1191, 1320], [1190, 1320, 1319], [1191, 1192, 1320], [1192, 1321, 1320], [1192, 1193, 1322], [1192, 1322, 1321], [1193, 1194, 1322], [1194, 1323, 1322], [1194, 1195, 1324], [1194, 1324, 1323], [1195, 1196, 1324], [1196, 1325, 1324], [1196, 1197, 1326], [1196, 1326, 1325], [1197, 1198, 1326], [1198, 1327, 1326], [1198, 1199, 1328], [1198, 1328, 1327], [1199, 1200, 1328], [1200, 1329, 1328], [1200, 1201, 1330], [1200, 1330, 1329], [1201, 1202, 1330], [1202, 1331, 1330], [1202, 1203, 1332], [1202, 1332, 1331], [1203, 1204, 1332], [1204, 1333, 1332], [1204, 1205, 1334], [1204, 1334, 1333], [1205, 1206, 1334], [1206, 1335, 1334], [1206, 1207, 1336], [1206, 1336, 1335], [1207, 1208, 1336], [1208, 1337, 1336], [1208, 1209, 1338], [1208, 1338, 1337], [1209, 1210, 1338], [1210, 1339, 1338], [1210, 1211, 1340], [1210, 1340, 1339], [1211, 1212, 1340], [1212, 1341, 1340], [1212, 1213, 1342], [1212, 1342, 1341], [1213, 1214, 1342], [1214, 1343, 1342], [1214, 1215, 1344], [1214, 1344, 1343], [1215, 1216, 1344], [1216, 1345, 1344], [1216, 1217, 1346], [1216, 1346, 1345], [1217, 1218, 1346], [1218, 1347, 1346], [1218, 1219, 1348], [1218, 1348, 1347], [1219, 1220, 1348], [1220, 1349, 1348], [1220, 1221, 1350], [1220, 1350, 1349], [1221, 1222, 1350], [1222, 1351, 1350], [1222, 1223, 1352], [1222, 1352, 1351], [1223, 1224, 1352], [1224, 1353, 1352], [1224, 1225, 1354], [1224, 1354, 1353], [1225, 1226, 1354], [1226, 1355, 1354], [1226, 1227, 1356], [1226, 1356, 1355], [1227, 1228, 1356], [1228, 1357, 1356], [1228, 1229, 1358], [1228, 1358, 1357], [1229, 1230, 1358], [1230, 1359, 1358], [1230, 1231, 1360], [1230, 1360, 1359], [1231, 1232, 1360], [1232, 1361, 1360], [1232, 1233, 1362], [1232, 1362, 1361], [1233, 1234, 1362], [1234, 1363, 1362], [1234, 1235, 1364], [1234, 1364, 1363], [1235, 1236, 1364], [1236, 1365, 1364], [1236, 1237, 1366], [1236, 1366, 1365], [1237, 1238, 1366], [1238, 1367, 1366], [1238, 1239, 1368], [1238, 1368, 1367], [1239, 1240, 1368], [1240, 1369, 1368], [1240, 1241, 1370], [1240, 1370, 1369], [1241, 1242, 1370], [1242, 1371, 1370], [1242, 1243, 1372], [1242, 1372, 1371], [1243, 1244, 1372], [1244, 1373, 1372], [1244, 1245, 1374], [1244, 1374, 1373], [1245, 1246, 1374], [1246, 1375, 1374], [1246, 1247, 1376], [1246, 1376, 1375], [1247, 1248, 1376], [1248, 1377, 1376], [1248, 1249, 1378], [1248, 1378, 1377], [1249, 1250, 1378], [1250, 1379, 1378], [1250, 1251, 1380], [1250, 1380, 1379], [1251, 1252, 1380], [1252, 1381, 1380], [1252, 1253, 1382], [1252, 1382, 1381], [1253, 1254, 1382], [1254, 1383, 1382], [1254, 1255, 1384], [1254, 1384, 1383], [1255, 1256, 1384], [1256, 1385, 1384], [1256, 1257, 1386], [1256, 1386, 1385], [1257, 1258, 1386], [1258, 1387, 1386], [1258, 1259, 1388], [1258, 1388, 1387], [1259, 1260, 1388], [1260, 1389, 1388], [1260, 1261, 1390], [1260, 1390, 1389], [1261, 1262, 1390], [1262, 1391, 1390], [1262, 1263, 1392], [1262, 1392, 1391], [1263, 1264, 1392], [1264, 1393, 1392], [1264, 1265, 1394], [1264, 1394, 1393], [1265, 1266, 1394], [1266, 1395, 1394], [1266, 1267, 1396], [1266, 1396, 1395], [1267, 1268, 1396], [1268, 1397, 1396], [1268, 1269, 1398], [1268, 1398, 1397], [1269, 1270, 1398], [1270, 1399, 1398], [1270, 1271, 1400], [1270, 1400, 1399], [1271, 1272, 1400], [1272, 1401, 1400], [1272, 1273, 1402], [1272, 1402, 1401], [1273, 1274, 1402], [1274, 1403, 1402], [1274, 1275, 1404], [1274, 1404, 1403], [1275, 1276, 1404], [1276, 1405, 1404], [1276, 1277, 1406], [1276, 1406, 1405], [1277, 1278, 1406], [1278, 1407, 1406], [1278, 1279, 1408], [1278, 1408, 1407], [1279, 1280, 1408], [1280, 1409, 1408], [1280, 1281, 1410], [1280, 1410, 1409], [1281, 1282, 1410], [1282, 1411, 1410], [1282, 1283, 1412], [1282, 1412, 1411], [1283, 1284, 1412], [1284, 1413, 1412], [1284, 1285, 1414], [1284, 1414, 1413], [1285, 1286, 1414], [1286, 1415, 1414], [1286, 1287, 1416], [1286, 1416, 1415], [1287, 1288, 1416], [1288, 1417, 1416], [1288, 1289, 1418], [1288, 1418, 1417], [1290, 1291, 1420], [1290, 1420, 1419], [1291, 1292, 1420], [1292, 1421, 1420], [1292, 1293, 1422], [1292, 1422, 1421], [1293, 1294, 1422], [1294, 1423, 1422], [1294, 1295, 1424], [1294, 1424, 1423], [1295, 1296, 1424], [1296, 1425, 1424], [1296, 1297, 1426], [1296, 1426, 1425], [1297, 1298, 1426], [1298, 1427, 1426], [1298, 1299, 1428], [1298, 1428, 1427], [1299, 1300, 1428], [1300, 1429, 1428], [1300, 1301, 1430], [1300, 1430, 1429], [1301, 1302, 1430], [1302, 1431, 1430], [1302, 1303, 1432], [1302, 1432, 1431], [1303, 1304, 1432], [1304, 1433, 1432], [1304, 1305, 1434], [1304, 1434, 1433], [1305, 1306, 1434], [1306, 1435, 1434], [1306, 1307, 1436], [1306, 1436, 1435], [1307, 1308, 1436], [1308, 1437, 1436], [1308, 1309, 1438], [1308, 1438, 1437], [1309, 1310, 1438], [1310, 1439, 1438], [1310, 1311, 1440], [1310, 1440, 1439], [1311, 1312, 1440], [1312, 1441, 1440], [1312, 1313, 1442], [1312, 1442, 1441], [1313, 1314, 1442], [1314, 1443, 1442], [1314, 1315, 1444], [1314, 1444, 1443], [1315, 1316, 1444], [1316, 1445, 1444], [1316, 1317, 1446], [1316, 1446, 1445], [1317, 1318, 1446], [1318, 1447, 1446], [1318, 1319, 1448], [1318, 1448, 1447], [1319, 1320, 1448], [1320, 1449, 1448], [1320, 1321, 1450], [1320, 1450, 1449], [1321, 1322, 1450], [1322, 1451, 1450], [1322, 1323, 1452], [1322, 1452, 1451], [1323, 1324, 1452], [1324, 1453, 1452], [1324, 1325, 1454], [1324, 1454, 1453], [1325, 1326, 1454], [1326, 1455, 1454], [1326, 1327, 1456], [1326, 1456, 1455], [1327, 1328, 1456], [1328, 1457, 1456], [1328, 1329, 1458], [1328, 1458, 1457], [1329, 1330, 1458], [1330, 1459, 1458], [1330, 1331, 1460], [1330, 1460, 1459], [1331, 1332, 1460], [1332, 1461, 1460], [1332, 1333, 1462], [1332, 1462, 1461], [1333, 1334, 1462], [1334, 1463, 1462], [1334, 1335, 1464], [1334, 1464, 1463], [1335, 1336, 1464], [1336, 1465, 1464], [1336, 1337, 1466], [1336, 1466, 1465], [1337, 1338, 1466], [1338, 1467, 1466], [1338, 1339, 1468], [1338, 1468, 1467], [1339, 1340, 1468], [1340, 1469, 1468], [1340, 1341, 1470], [1340, 1470, 1469], [1341, 1342, 1470], [1342, 1471, 1470], [1342, 1343, 1472], [1342, 1472, 1471], [1343, 1344, 1472], [1344, 1473, 1472], [1344, 1345, 1474], [1344, 1474, 1473], [1345, 1346, 1474], [1346, 1475, 1474], [1346, 1347, 1476], [1346, 1476, 1475], [1347, 1348, 1476], [1348, 1477, 1476], [1348, 1349, 1478], [1348, 1478, 1477], [1349, 1350, 1478], [1350, 1479, 1478], [1350, 1351, 1480], [1350, 1480, 1479], [1351, 1352, 1480], [1352, 1481, 1480], [1352, 1353, 1482], [1352, 1482, 1481], [1353, 1354, 1482], [1354, 1483, 1482], [1354, 1355, 1484], [1354, 1484, 1483], [1355, 1356, 1484], [1356, 1485, 1484], [1356, 1357, 1486], [1356, 1486, 1485], [1357, 1358, 1486], [1358, 1487, 1486], [1358, 1359, 1488], [1358, 1488, 1487], [1359, 1360, 1488], [1360, 1489, 1488], [1360, 1361, 1490], [1360, 1490, 1489], [1361, 1362, 1490], [1362, 1491, 1490], [1362, 1363, 1492], [1362, 1492, 1491], [1363, 1364, 1492], [1364, 1493, 1492], [1364, 1365, 1494], [1364, 1494, 1493], [1365, 1366, 1494], [1366, 1495, 1494], [1366, 1367, 1496], [1366, 1496, 1495], [1367, 1368, 1496], [1368, 1497, 1496], [1368, 1369, 1498], [1368, 1498, 1497], [1369, 1370, 1498], [1370, 1499, 1498], [1370, 1371, 1500], [1370, 1500, 1499], [1371, 1372, 1500], [1372, 1501, 1500], [1372, 1373, 1502], [1372, 1502, 1501], [1373, 1374, 1502], [1374, 1503, 1502], [1374, 1375, 1504], [1374, 1504, 1503], [1375, 1376, 1504], [1376, 1505, 1504], [1376, 1377, 1506], [1376, 1506, 1505], [1377, 1378, 1506], [1378, 1507, 1506], [1378, 1379, 1508], [1378, 1508, 1507], [1379, 1380, 1508], [1380, 1509, 1508], [1380, 1381, 1510], [1380, 1510, 1509], [1381, 1382, 1510], [1382, 1511, 1510], [1382, 1383, 1512], [1382, 1512, 1511], [1383, 1384, 1512], [1384, 1513, 1512], [1384, 1385, 1514], [1384, 1514, 1513], [1385, 1386, 1514], [1386, 1515, 1514], [1386, 1387, 1516], [1386, 1516, 1515], [1387, 1388, 1516], [1388, 1517, 1516], [1388, 1389, 1518], [1388, 1518, 1517], [1389, 1390, 1518], [1390, 1519, 1518], [1390, 1391, 1520], [1390, 1520, 1519], [1391, 1392, 1520], [1392, 1521, 1520], [1392, 1393, 1522], [1392, 1522, 1521], [1393, 1394, 1522], [1394, 1523, 1522], [1394, 1395, 1524], [1394, 1524, 1523], [1395, 1396, 1524], [1396, 1525, 1524], [1396, 1397, 1526], [1396, 1526, 1525], [1397, 1398, 1526], [1398, 1527, 1526], [1398, 1399, 1528], [1398, 1528, 1527], [1399, 1400, 1528], [1400, 1529, 1528], [1400, 1401, 1530], [1400, 1530, 1529], [1401, 1402, 1530], [1402, 1531, 1530], [1402, 1403, 1532], [1402, 1532, 1531], [1403, 1404, 1532], [1404, 1533, 1532], [1404, 1405, 1534], [1404, 1534, 1533], [1405, 1406, 1534], [1406, 1535, 1534], [1406, 1407, 1536], [1406, 1536, 1535], [1407, 1408, 1536], [1408, 1537, 1536], [1408, 1409, 1538], [1408, 1538, 1537], [1409, 1410, 1538], [1410, 1539, 1538], [1410, 1411, 1540], [1410, 1540, 1539], [1411, 1412, 1540], [1412, 1541, 1540], [1412, 1413, 1542], [1412, 1542, 1541], [1413, 1414, 1542], [1414, 1543, 1542], [1414, 1415, 1544], [1414, 1544, 1543], [1415, 1416, 1544], [1416, 1545, 1544], [1416, 1417, 1546], [1416, 1546, 1545], [1417, 1418, 1546], [1418, 1547, 1546], [1419, 1420, 1548], [1420, 1549, 1548], [1420, 1421, 1550], [1420, 1550, 1549], [1421, 1422, 1550], [1422, 1551, 1550], [1422, 1423, 1552], [1422, 1552, 1551], [1423, 1424, 1552], [1424, 1553, 1552], [1424, 1425, 1554], [1424, 1554, 1553], [1425, 1426, 1554], [1426, 1555, 1554], [1426, 1427, 1556], [1426, 1556, 1555], [1427, 1428, 1556], [1428, 1557, 1556], [1428, 1429, 1558], [1428, 1558, 1557], [1429, 1430, 1558], [1430, 1559, 1558], [1430, 1431, 1560], [1430, 1560, 1559], [1431, 1432, 1560], [1432, 1561, 1560], [1432, 1433, 1562], [1432, 1562, 1561], [1433, 1434, 1562], [1434, 1563, 1562], [1434, 1435, 1564], [1434, 1564, 1563], [1435, 1436, 1564], [1436, 1565, 1564], [1436, 1437, 1566], [1436, 1566, 1565], [1437, 1438, 1566], [1438, 1567, 1566], [1438, 1439, 1568], [1438, 1568, 1567], [1439, 1440, 1568], [1440, 1569, 1568], [1440, 1441, 1570], [1440, 1570, 1569], [1441, 1442, 1570], [1442, 1571, 1570], [1442, 1443, 1572], [1442, 1572, 1571], [1443, 1444, 1572], [1444, 1573, 1572], [1444, 1445, 1574], [1444, 1574, 1573], [1445, 1446, 1574], [1446, 1575, 1574], [1446, 1447, 1576], [1446, 1576, 1575], [1447, 1448, 1576], [1448, 1577, 1576], [1448, 1449, 1578], [1448, 1578, 1577], [1449, 1450, 1578], [1450, 1579, 1578], [1450, 1451, 1580], [1450, 1580, 1579], [1451, 1452, 1580], [1452, 1581, 1580], [1452, 1453, 1582], [1452, 1582, 1581], [1453, 1454, 1582], [1454, 1583, 1582], [1454, 1455, 1584], [1454, 1584, 1583], [1455, 1456, 1584], [1456, 1585, 1584], [1456, 1457, 1586], [1456, 1586, 1585], [1457, 1458, 1586], [1458, 1587, 1586], [1458, 1459, 1588], [1458, 1588, 1587], [1459, 1460, 1588], [1460, 1589, 1588], [1460, 1461, 1590], [1460, 1590, 1589], [1461, 1462, 1590], [1462, 1591, 1590], [1462, 1463, 1592], [1462, 1592, 1591], [1463, 1464, 1592], [1464, 1593, 1592], [1464, 1465, 1594], [1464, 1594, 1593], [1465, 1466, 1594], [1466, 1595, 1594], [1466, 1467, 1596], [1466, 1596, 1595], [1467, 1468, 1596], [1468, 1597, 1596], [1468, 1469, 1598], [1468, 1598, 1597], [1469, 1470, 1598], [1470, 1599, 1598], [1470, 1471, 1600], [1470, 1600, 1599], [1471, 1472, 1600], [1472, 1601, 1600], [1472, 1473, 1602], [1472, 1602, 1601], [1473, 1474, 1602], [1474, 1603, 1602], [1474, 1475, 1604], [1474, 1604, 1603], [1475, 1476, 1604], [1476, 1605, 1604], [1476, 1477, 1606], [1476, 1606, 1605], [1477, 1478, 1606], [1478, 1607, 1606], [1478, 1479, 1608], [1478, 1608, 1607], [1479, 1480, 1608], [1480, 1609, 1608], [1480, 1481, 1610], [1480, 1610, 1609], [1481, 1482, 1610], [1482, 1611, 1610], [1482, 1483, 1612], [1482, 1612, 1611], [1483, 1484, 1612], [1484, 1613, 1612], [1484, 1485, 1614], [1484, 1614, 1613], [1485, 1486, 1614], [1486, 1615, 1614], [1486, 1487, 1616], [1486, 1616, 1615], [1487, 1488, 1616], [1488, 1617, 1616], [1488, 1489, 1618], [1488, 1618, 1617], [1489, 1490, 1618], [1490, 1619, 1618], [1490, 1491, 1620], [1490, 1620, 1619], [1491, 1492, 1620], [1492, 1621, 1620], [1492, 1493, 1622], [1492, 1622, 1621], [1493, 1494, 1622], [1494, 1623, 1622], [1494, 1495, 1624], [1494, 1624, 1623], [1495, 1496, 1624], [1496, 1625, 1624], [1496, 1497, 1626], [1496, 1626, 1625], [1497, 1498, 1626], [1498, 1627, 1626], [1498, 1499, 1628], [1498, 1628, 1627], [1499, 1500, 1628], [1500, 1629, 1628], [1500, 1501, 1630], [1500, 1630, 1629], [1501, 1502, 1630], [1502, 1631, 1630], [1502, 1503, 1632], [1502, 1632, 1631], [1503, 1504, 1632], [1504, 1633, 1632], [1504, 1505, 1634], [1504, 1634, 1633], [1505, 1506, 1634], [1506, 1635, 1634], [1506, 1507, 1636], [1506, 1636, 1635], [1507, 1508, 1636], [1508, 1637, 1636], [1508, 1509, 1638], [1508, 1638, 1637], [1509, 1510, 1638], [1510, 1639, 1638], [1510, 1511, 1640], [1510, 1640, 1639], [1511, 1512, 1640], [1512, 1641, 1640], [1512, 1513, 1642], [1512, 1642, 1641], [1513, 1514, 1642], [1514, 1643, 1642], [1514, 1515, 1644], [1514, 1644, 1643], [1515, 1516, 1644], [1516, 1645, 1644], [1516, 1517, 1646], [1516, 1646, 1645], [1517, 1518, 1646], [1518, 1647, 1646], [1518, 1519, 1648], [1518, 1648, 1647], [1519, 1520, 1648], [1520, 1649, 1648], [1520, 1521, 1650], [1520, 1650, 1649], [1521, 1522, 1650], [1522, 1651, 1650], [1522, 1523, 1652], [1522, 1652, 1651], [1523, 1524, 1652], [1524, 1653, 1652], [1524, 1525, 1654], [1524, 1654, 1653], [1525, 1526, 1654], [1526, 1655, 1654], [1526, 1527, 1656], [1526, 1656, 1655], [1527, 1528, 1656], [1528, 1657, 1656], [1528, 1529, 1658], [1528, 1658, 1657], [1529, 1530, 1658], [1530, 1659, 1658], [1530, 1531, 1660], [1530, 1660, 1659], [1531, 1532, 1660], [1532, 1661, 1660], [1532, 1533, 1662], [1532, 1662, 1661], [1533, 1534, 1662], [1534, 1663, 1662], [1534, 1535, 1664], [1534, 1664, 1663], [1535, 1536, 1664], [1536, 1665, 1664], [1536, 1537, 1666], [1536, 1666, 1665], [1537, 1538, 1666], [1538, 1667, 1666], [1538, 1539, 1668], [1538, 1668, 1667], [1539, 1540, 1668], [1540, 1669, 1668], [1540, 1541, 1670], [1540, 1670, 1669], [1541, 1542, 1670], [1542, 1671, 1670], [1542, 1543, 1672], [1542, 1672, 1671], [1543, 1544, 1672], [1544, 1673, 1672], [1544, 1545, 1674], [1544, 1674, 1673], [1545, 1546, 1674], [1546, 1675, 1674], [1546, 1547, 1676], [1546, 1676, 1675], [1548, 1549, 1678], [1548, 1678, 1677], [1549, 1550, 1678], [1550, 1679, 1678], [1550, 1551, 1680], [1550, 1680, 1679], [1551, 1552, 1680], [1552, 1681, 1680], [1552, 1553, 1682], [1552, 1682, 1681], [1553, 1554, 1682], [1554, 1683, 1682], [1554, 1555, 1684], [1554, 1684, 1683], [1555, 1556, 1684], [1556, 1685, 1684], [1556, 1557, 1686], [1556, 1686, 1685], [1557, 1558, 1686], [1558, 1687, 1686], [1558, 1559, 1688], [1558, 1688, 1687], [1559, 1560, 1688], [1560, 1689, 1688], [1560, 1561, 1690], [1560, 1690, 1689], [1561, 1562, 1690], [1562, 1691, 1690], [1562, 1563, 1692], [1562, 1692, 1691], [1563, 1564, 1692], [1564, 1693, 1692], [1564, 1565, 1694], [1564, 1694, 1693], [1565, 1566, 1694], [1566, 1695, 1694], [1566, 1567, 1696], [1566, 1696, 1695], [1567, 1568, 1696], [1568, 1697, 1696], [1568, 1569, 1698], [1568, 1698, 1697], [1569, 1570, 1698], [1570, 1699, 1698], [1570, 1571, 1700], [1570, 1700, 1699], [1571, 1572, 1700], [1572, 1701, 1700], [1572, 1573, 1702], [1572, 1702, 1701], [1573, 1574, 1702], [1574, 1703, 1702], [1574, 1575, 1704], [1574, 1704, 1703], [1575, 1576, 1704], [1576, 1705, 1704], [1576, 1577, 1706], [1576, 1706, 1705], [1577, 1578, 1706], [1578, 1707, 1706], [1578, 1579, 1708], [1578, 1708, 1707], [1579, 1580, 1708], [1580, 1709, 1708], [1580, 1581, 1710], [1580, 1710, 1709], [1581, 1582, 1710], [1582, 1711, 1710], [1582, 1583, 1712], [1582, 1712, 1711], [1583, 1584, 1712], [1584, 1713, 1712], [1584, 1585, 1714], [1584, 1714, 1713], [1585, 1586, 1714], [1586, 1715, 1714], [1586, 1587, 1716], [1586, 1716, 1715], [1587, 1588, 1716], [1588, 1717, 1716], [1588, 1589, 1718], [1588, 1718, 1717], [1589, 1590, 1718], [1590, 1719, 1718], [1590, 1591, 1720], [1590, 1720, 1719], [1591, 1592, 1720], [1592, 1721, 1720], [1592, 1593, 1722], [1592, 1722, 1721], [1593, 1594, 1722], [1594, 1723, 1722], [1594, 1595, 1724], [1594, 1724, 1723], [1595, 1596, 1724], [1596, 1725, 1724], [1596, 1597, 1726], [1596, 1726, 1725], [1597, 1598, 1726], [1598, 1727, 1726], [1598, 1599, 1728], [1598, 1728, 1727], [1599, 1600, 1728], [1600, 1729, 1728], [1600, 1601, 1730], [1600, 1730, 1729], [1601, 1602, 1730], [1602, 1731, 1730], [1602, 1603, 1732], [1602, 1732, 1731], [1603, 1604, 1732], [1604, 1733, 1732], [1604, 1605, 1734], [1604, 1734, 1733], [1605, 1606, 1734], [1606, 1735, 1734], [1606, 1607, 1736], [1606, 1736, 1735], [1607, 1608, 1736], [1608, 1737, 1736], [1608, 1609, 1738], [1608, 1738, 1737], [1609, 1610, 1738], [1610, 1739, 1738], [1610, 1611, 1740], [1610, 1740, 1739], [1611, 1612, 1740], [1612, 1741, 1740], [1612, 1613, 1742], [1612, 1742, 1741], [1613, 1614, 1742], [1614, 1743, 1742], [1614, 1615, 1744], [1614, 1744, 1743], [1615, 1616, 1744], [1616, 1745, 1744], [1616, 1617, 1746], [1616, 1746, 1745], [1617, 1618, 1746], [1618, 1747, 1746], [1618, 1619, 1748], [1618, 1748, 1747], [1619, 1620, 1748], [1620, 1749, 1748], [1620, 1621, 1750], [1620, 1750, 1749], [1621, 1622, 1750], [1622, 1751, 1750], [1622, 1623, 1752], [1622, 1752, 1751], [1623, 1624, 1752], [1624, 1753, 1752], [1624, 1625, 1754], [1624, 1754, 1753], [1625, 1626, 1754], [1626, 1755, 1754], [1626, 1627, 1756], [1626, 1756, 1755], [1627, 1628, 1756], [1628, 1757, 1756], [1628, 1629, 1758], [1628, 1758, 1757], [1629, 1630, 1758], [1630, 1759, 1758], [1630, 1631, 1760], [1630, 1760, 1759], [1631, 1632, 1760], [1632, 1761, 1760], [1632, 1633, 1762], [1632, 1762, 1761], [1633, 1634, 1762], [1634, 1763, 1762], [1634, 1635, 1764], [1634, 1764, 1763], [1635, 1636, 1764], [1636, 1765, 1764], [1636, 1637, 1766], [1636, 1766, 1765], [1637, 1638, 1766], [1638, 1767, 1766], [1638, 1639, 1768], [1638, 1768, 1767], [1639, 1640, 1768], [1640, 1769, 1768], [1640, 1641, 1770], [1640, 1770, 1769], [1641, 1642, 1770], [1642, 1771, 1770], [1642, 1643, 1772], [1642, 1772, 1771], [1643, 1644, 1772], [1644, 1773, 1772], [1644, 1645, 1774], [1644, 1774, 1773], [1645, 1646, 1774], [1646, 1775, 1774], [1646, 1647, 1776], [1646, 1776, 1775], [1647, 1648, 1776], [1648, 1777, 1776], [1648, 1649, 1778], [1648, 1778, 1777], [1649, 1650, 1778], [1650, 1779, 1778], [1650, 1651, 1780], [1650, 1780, 1779], [1651, 1652, 1780], [1652, 1781, 1780], [1652, 1653, 1782], [1652, 1782, 1781], [1653, 1654, 1782], [1654, 1783, 1782], [1654, 1655, 1784], [1654, 1784, 1783], [1655, 1656, 1784], [1656, 1785, 1784], [1656, 1657, 1786], [1656, 1786, 1785], [1657, 1658, 1786], [1658, 1787, 1786], [1658, 1659, 1788], [1658, 1788, 1787], [1659, 1660, 1788], [1660, 1789, 1788], [1660, 1661, 1790], [1660, 1790, 1789], [1661, 1662, 1790], [1662, 1791, 1790], [1662, 1663, 1792], [1662, 1792, 1791], [1663, 1664, 1792], [1664, 1793, 1792], [1664, 1665, 1794], [1664, 1794, 1793], [1665, 1666, 1794], [1666, 1795, 1794], [1666, 1667, 1796], [1666, 1796, 1795], [1667, 1668, 1796], [1668, 1797, 1796], [1668, 1669, 1798], [1668, 1798, 1797], [1669, 1670, 1798], [1670, 1799, 1798], [1670, 1671, 1800], [1670, 1800, 1799], [1671, 1672, 1800], [1672, 1801, 1800], [1672, 1673, 1802], [1672, 1802, 1801], [1673, 1674, 1802], [1674, 1803, 1802], [1674, 1675, 1804], [1674, 1804, 1803], [1675, 1676, 1804], [1676, 1805, 1804], [1677, 1678, 1806], [1678, 1807, 1806], [1678, 1679, 1808], [1678, 1808, 1807], [1679, 1680, 1808], [1680, 1809, 1808], [1680, 1681, 1810], [1680, 1810, 1809], [1681, 1682, 1810], [1682, 1811, 1810], [1682, 1683, 1812], [1682, 1812, 1811], [1683, 1684, 1812], [1684, 1813, 1812], [1684, 1685, 1814], [1684, 1814, 1813], [1685, 1686, 1814], [1686, 1815, 1814], [1686, 1687, 1816], [1686, 1816, 1815], [1687, 1688, 1816], [1688, 1817, 1816], [1688, 1689, 1818], [1688, 1818, 1817], [1689, 1690, 1818], [1690, 1819, 1818], [1690, 1691, 1820], [1690, 1820, 1819], [1691, 1692, 1820], [1692, 1821, 1820], [1692, 1693, 1822], [1692, 1822, 1821], [1693, 1694, 1822], [1694, 1823, 1822], [1694, 1695, 1824], [1694, 1824, 1823], [1695, 1696, 1824], [1696, 1825, 1824], [1696, 1697, 1826], [1696, 1826, 1825], [1697, 1698, 1826], [1698, 1827, 1826], [1698, 1699, 1828], [1698, 1828, 1827], [1699, 1700, 1828], [1700, 1829, 1828], [1700, 1701, 1830], [1700, 1830, 1829], [1701, 1702, 1830], [1702, 1831, 1830], [1702, 1703, 1832], [1702, 1832, 1831], [1703, 1704, 1832], [1704, 1833, 1832], [1704, 1705, 1834], [1704, 1834, 1833], [1705, 1706, 1834], [1706, 1835, 1834], [1706, 1707, 1836], [1706, 1836, 1835], [1707, 1708, 1836], [1708, 1837, 1836], [1708, 1709, 1838], [1708, 1838, 1837], [1709, 1710, 1838], [1710, 1839, 1838], [1710, 1711, 1840], [1710, 1840, 1839], [1711, 1712, 1840], [1712, 1841, 1840], [1712, 1713, 1842], [1712, 1842, 1841], [1713, 1714, 1842], [1714, 1843, 1842], [1714, 1715, 1844], [1714, 1844, 1843], [1715, 1716, 1844], [1716, 1845, 1844], [1716, 1717, 1846], [1716, 1846, 1845], [1717, 1718, 1846], [1718, 1847, 1846], [1718, 1719, 1848], [1718, 1848, 1847], [1719, 1720, 1848], [1720, 1849, 1848], [1720, 1721, 1850], [1720, 1850, 1849], [1721, 1722, 1850], [1722, 1851, 1850], [1722, 1723, 1852], [1722, 1852, 1851], [1723, 1724, 1852], [1724, 1853, 1852], [1724, 1725, 1854], [1724, 1854, 1853], [1725, 1726, 1854], [1726, 1855, 1854], [1726, 1727, 1856], [1726, 1856, 1855], [1727, 1728, 1856], [1728, 1857, 1856], [1728, 1729, 1858], [1728, 1858, 1857], [1729, 1730, 1858], [1730, 1859, 1858], [1730, 1731, 1860], [1730, 1860, 1859], [1731, 1732, 1860], [1732, 1861, 1860], [1732, 1733, 1862], [1732, 1862, 1861], [1733, 1734, 1862], [1734, 1863, 1862], [1734, 1735, 1864], [1734, 1864, 1863], [1735, 1736, 1864], [1736, 1865, 1864], [1736, 1737, 1866], [1736, 1866, 1865], [1737, 1738, 1866], [1738, 1867, 1866], [1738, 1739, 1868], [1738, 1868, 1867], [1739, 1740, 1868], [1740, 1869, 1868], [1740, 1741, 1870], [1740, 1870, 1869], [1741, 1742, 1870], [1742, 1871, 1870], [1742, 1743, 1872], [1742, 1872, 1871], [1743, 1744, 1872], [1744, 1873, 1872], [1744, 1745, 1874], [1744, 1874, 1873], [1745, 1746, 1874], [1746, 1875, 1874], [1746, 1747, 1876], [1746, 1876, 1875], [1747, 1748, 1876], [1748, 1877, 1876], [1748, 1749, 1878], [1748, 1878, 1877], [1749, 1750, 1878], [1750, 1879, 1878], [1750, 1751, 1880], [1750, 1880, 1879], [1751, 1752, 1880], [1752, 1881, 1880], [1752, 1753, 1882], [1752, 1882, 1881], [1753, 1754, 1882], [1754, 1883, 1882], [1754, 1755, 1884], [1754, 1884, 1883], [1755, 1756, 1884], [1756, 1885, 1884], [1756, 1757, 1886], [1756, 1886, 1885], [1757, 1758, 1886], [1758, 1887, 1886], [1758, 1759, 1888], [1758, 1888, 1887], [1759, 1760, 1888], [1760, 1889, 1888], [1760, 1761, 1890], [1760, 1890, 1889], [1761, 1762, 1890], [1762, 1891, 1890], [1762, 1763, 1892], [1762, 1892, 1891], [1763, 1764, 1892], [1764, 1893, 1892], [1764, 1765, 1894], [1764, 1894, 1893], [1765, 1766, 1894], [1766, 1895, 1894], [1766, 1767, 1896], [1766, 1896, 1895], [1767, 1768, 1896], [1768, 1897, 1896], [1768, 1769, 1898], [1768, 1898, 1897], [1769, 1770, 1898], [1770, 1899, 1898], [1770, 1771, 1900], [1770, 1900, 1899], [1771, 1772, 1900], [1772, 1901, 1900], [1772, 1773, 1902], [1772, 1902, 1901], [1773, 1774, 1902], [1774, 1903, 1902], [1774, 1775, 1904], [1774, 1904, 1903], [1775, 1776, 1904], [1776, 1905, 1904], [1776, 1777, 1906], [1776, 1906, 1905], [1777, 1778, 1906], [1778, 1907, 1906], [1778, 1779, 1908], [1778, 1908, 1907], [1779, 1780, 1908], [1780, 1909, 1908], [1780, 1781, 1910], [1780, 1910, 1909], [1781, 1782, 1910], [1782, 1911, 1910], [1782, 1783, 1912], [1782, 1912, 1911], [1783, 1784, 1912], [1784, 1913, 1912], [1784, 1785, 1914], [1784, 1914, 1913], [1785, 1786, 1914], [1786, 1915, 1914], [1786, 1787, 1916], [1786, 1916, 1915], [1787, 1788, 1916], [1788, 1917, 1916], [1788, 1789, 1918], [1788, 1918, 1917], [1789, 1790, 1918], [1790, 1919, 1918], [1790, 1791, 1920], [1790, 1920, 1919], [1791, 1792, 1920], [1792, 1921, 1920], [1792, 1793, 1922], [1792, 1922, 1921], [1793, 1794, 1922], [1794, 1923, 1922], [1794, 1795, 1924], [1794, 1924, 1923], [1795, 1796, 1924], [1796, 1925, 1924], [1796, 1797, 1926], [1796, 1926, 1925], [1797, 1798, 1926], [1798, 1927, 1926], [1798, 1799, 1928], [1798, 1928, 1927], [1799, 1800, 1928], [1800, 1929, 1928], [1800, 1801, 1930], [1800, 1930, 1929], [1801, 1802, 1930], [1802, 1931, 1930], [1802, 1803, 1932], [1802, 1932, 1931], [1803, 1804, 1932], [1804, 1933, 1932], [1804, 1805, 1934], [1804, 1934, 1933], [1806, 1807, 1936], [1806, 1936, 1935], [1807, 1808, 1936], [1808, 1937, 1936], [1808, 1809, 1938], [1808, 1938, 1937], [1809, 1810, 1938], [1810, 1939, 1938], [1810, 1811, 1940], [1810, 1940, 1939], [1811, 1812, 1940], [1812, 1941, 1940], [1812, 1813, 1942], [1812, 1942, 1941], [1813, 1814, 1942], [1814, 1943, 1942], [1814, 1815, 1944], [1814, 1944, 1943], [1815, 1816, 1944], [1816, 1945, 1944], [1816, 1817, 1946], [1816, 1946, 1945], [1817, 1818, 1946], [1818, 1947, 1946], [1818, 1819, 1948], [1818, 1948, 1947], [1819, 1820, 1948], [1820, 1949, 1948], [1820, 1821, 1950], [1820, 1950, 1949], [1821, 1822, 1950], [1822, 1951, 1950], [1822, 1823, 1952], [1822, 1952, 1951], [1823, 1824, 1952], [1824, 1953, 1952], [1824, 1825, 1954], [1824, 1954, 1953], [1825, 1826, 1954], [1826, 1955, 1954], [1826, 1827, 1956], [1826, 1956, 1955], [1827, 1828, 1956], [1828, 1957, 1956], [1828, 1829, 1958], [1828, 1958, 1957], [1829, 1830, 1958], [1830, 1959, 1958], [1830, 1831, 1960], [1830, 1960, 1959], [1831, 1832, 1960], [1832, 1961, 1960], [1832, 1833, 1962], [1832, 1962, 1961], [1833, 1834, 1962], [1834, 1963, 1962], [1834, 1835, 1964], [1834, 1964, 1963], [1835, 1836, 1964], [1836, 1965, 1964], [1836, 1837, 1966], [1836, 1966, 1965], [1837, 1838, 1966], [1838, 1967, 1966], [1838, 1839, 1968], [1838, 1968, 1967], [1839, 1840, 1968], [1840, 1969, 1968], [1840, 1841, 1970], [1840, 1970, 1969], [1841, 1842, 1970], [1842, 1971, 1970], [1842, 1843, 1972], [1842, 1972, 1971], [1843, 1844, 1972], [1844, 1973, 1972], [1844, 1845, 1974], [1844, 1974, 1973], [1845, 1846, 1974], [1846, 1975, 1974], [1846, 1847, 1976], [1846, 1976, 1975], [1847, 1848, 1976], [1848, 1977, 1976], [1848, 1849, 1978], [1848, 1978, 1977], [1849, 1850, 1978], [1850, 1979, 1978], [1850, 1851, 1980], [1850, 1980, 1979], [1851, 1852, 1980], [1852, 1981, 1980], [1852, 1853, 1982], [1852, 1982, 1981], [1853, 1854, 1982], [1854, 1983, 1982], [1854, 1855, 1984], [1854, 1984, 1983], [1855, 1856, 1984], [1856, 1985, 1984], [1856, 1857, 1986], [1856, 1986, 1985], [1857, 1858, 1986], [1858, 1987, 1986], [1858, 1859, 1988], [1858, 1988, 1987], [1859, 1860, 1988], [1860, 1989, 1988], [1860, 1861, 1990], [1860, 1990, 1989], [1861, 1862, 1990], [1862, 1991, 1990], [1862, 1863, 1992], [1862, 1992, 1991], [1863, 1864, 1992], [1864, 1993, 1992], [1864, 1865, 1994], [1864, 1994, 1993], [1865, 1866, 1994], [1866, 1995, 1994], [1866, 1867, 1996], [1866, 1996, 1995], [1867, 1868, 1996], [1868, 1997, 1996], [1868, 1869, 1998], [1868, 1998, 1997], [1869, 1870, 1998], [1870, 1999, 1998], [1870, 1871, 2000], [1870, 2000, 1999], [1871, 1872, 2000], [1872, 2001, 2000], [1872, 1873, 2002], [1872, 2002, 2001], [1873, 1874, 2002], [1874, 2003, 2002], [1874, 1875, 2004], [1874, 2004, 2003], [1875, 1876, 2004], [1876, 2005, 2004], [1876, 1877, 2006], [1876, 2006, 2005], [1877, 1878, 2006], [1878, 2007, 2006], [1878, 1879, 2008], [1878, 2008, 2007], [1879, 1880, 2008], [1880, 2009, 2008], [1880, 1881, 2010], [1880, 2010, 2009], [1881, 1882, 2010], [1882, 2011, 2010], [1882, 1883, 2012], [1882, 2012, 2011], [1883, 1884, 2012], [1884, 2013, 2012], [1884, 1885, 2014], [1884, 2014, 2013], [1885, 1886, 2014], [1886, 2015, 2014], [1886, 1887, 2016], [1886, 2016, 2015], [1887, 1888, 2016], [1888, 2017, 2016], [1888, 1889, 2018], [1888, 2018, 2017], [1889, 1890, 2018], [1890, 2019, 2018], [1890, 1891, 2020], [1890, 2020, 2019], [1891, 1892, 2020], [1892, 2021, 2020], [1892, 1893, 2022], [1892, 2022, 2021], [1893, 1894, 2022], [1894, 2023, 2022], [1894, 1895, 2024], [1894, 2024, 2023], [1895, 1896, 2024], [1896, 2025, 2024], [1896, 1897, 2026], [1896, 2026, 2025], [1897, 1898, 2026], [1898, 2027, 2026], [1898, 1899, 2028], [1898, 2028, 2027], [1899, 1900, 2028], [1900, 2029, 2028], [1900, 1901, 2030], [1900, 2030, 2029], [1901, 1902, 2030], [1902, 2031, 2030], [1902, 1903, 2032], [1902, 2032, 2031], [1903, 1904, 2032], [1904, 2033, 2032], [1904, 1905, 2034], [1904, 2034, 2033], [1905, 1906, 2034], [1906, 2035, 2034], [1906, 1907, 2036], [1906, 2036, 2035], [1907, 1908, 2036], [1908, 2037, 2036], [1908, 1909, 2038], [1908, 2038, 2037], [1909, 1910, 2038], [1910, 2039, 2038], [1910, 1911, 2040], [1910, 2040, 2039], [1911, 1912, 2040], [1912, 2041, 2040], [1912, 1913, 2042], [1912, 2042, 2041], [1913, 1914, 2042], [1914, 2043, 2042], [1914, 1915, 2044], [1914, 2044, 2043], [1915, 1916, 2044], [1916, 2045, 2044], [1916, 1917, 2046], [1916, 2046, 2045], [1917, 1918, 2046], [1918, 2047, 2046], [1918, 1919, 2048], [1918, 2048, 2047], [1919, 1920, 2048], [1920, 2049, 2048], [1920, 1921, 2050], [1920, 2050, 2049], [1921, 1922, 2050], [1922, 2051, 2050], [1922, 1923, 2052], [1922, 2052, 2051], [1923, 1924, 2052], [1924, 2053, 2052], [1924, 1925, 2054], [1924, 2054, 2053], [1925, 1926, 2054], [1926, 2055, 2054], [1926, 1927, 2056], [1926, 2056, 2055], [1927, 1928, 2056], [1928, 2057, 2056], [1928, 1929, 2058], [1928, 2058, 2057], [1929, 1930, 2058], [1930, 2059, 2058], [1930, 1931, 2060], [1930, 2060, 2059], [1931, 1932, 2060], [1932, 2061, 2060], [1932, 1933, 2062], [1932, 2062, 2061], [1933, 1934, 2062], [1934, 2063, 2062], [1935, 1936, 2064], [1936, 2065, 2064], [1936, 1937, 2066], [1936, 2066, 2065], [1937, 1938, 2066], [1938, 2067, 2066], [1938, 1939, 2068], [1938, 2068, 2067], [1939, 1940, 2068], [1940, 2069, 2068], [1940, 1941, 2070], [1940, 2070, 2069], [1941, 1942, 2070], [1942, 2071, 2070], [1942, 1943, 2072], [1942, 2072, 2071], [1943, 1944, 2072], [1944, 2073, 2072], [1944, 1945, 2074], [1944, 2074, 2073], [1945, 1946, 2074], [1946, 2075, 2074], [1946, 1947, 2076], [1946, 2076, 2075], [1947, 1948, 2076], [1948, 2077, 2076], [1948, 1949, 2078], [1948, 2078, 2077], [1949, 1950, 2078], [1950, 2079, 2078], [1950, 1951, 2080], [1950, 2080, 2079], [1951, 1952, 2080], [1952, 2081, 2080], [1952, 1953, 2082], [1952, 2082, 2081], [1953, 1954, 2082], [1954, 2083, 2082], [1954, 1955, 2084], [1954, 2084, 2083], [1955, 1956, 2084], [1956, 2085, 2084], [1956, 1957, 2086], [1956, 2086, 2085], [1957, 1958, 2086], [1958, 2087, 2086], [1958, 1959, 2088], [1958, 2088, 2087], [1959, 1960, 2088], [1960, 2089, 2088], [1960, 1961, 2090], [1960, 2090, 2089], [1961, 1962, 2090], [1962, 2091, 2090], [1962, 1963, 2092], [1962, 2092, 2091], [1963, 1964, 2092], [1964, 2093, 2092], [1964, 1965, 2094], [1964, 2094, 2093], [1965, 1966, 2094], [1966, 2095, 2094], [1966, 1967, 2096], [1966, 2096, 2095], [1967, 1968, 2096], [1968, 2097, 2096], [1968, 1969, 2098], [1968, 2098, 2097], [1969, 1970, 2098], [1970, 2099, 2098], [1970, 1971, 2100], [1970, 2100, 2099], [1971, 1972, 2100], [1972, 2101, 2100], [1972, 1973, 2102], [1972, 2102, 2101], [1973, 1974, 2102], [1974, 2103, 2102], [1974, 1975, 2104], [1974, 2104, 2103], [1975, 1976, 2104], [1976, 2105, 2104], [1976, 1977, 2106], [1976, 2106, 2105], [1977, 1978, 2106], [1978, 2107, 2106], [1978, 1979, 2108], [1978, 2108, 2107], [1979, 1980, 2108], [1980, 2109, 2108], [1980, 1981, 2110], [1980, 2110, 2109], [1981, 1982, 2110], [1982, 2111, 2110], [1982, 1983, 2112], [1982, 2112, 2111], [1983, 1984, 2112], [1984, 2113, 2112], [1984, 1985, 2114], [1984, 2114, 2113], [1985, 1986, 2114], [1986, 2115, 2114], [1986, 1987, 2116], [1986, 2116, 2115], [1987, 1988, 2116], [1988, 2117, 2116], [1988, 1989, 2118], [1988, 2118, 2117], [1989, 1990, 2118], [1990, 2119, 2118], [1990, 1991, 2120], [1990, 2120, 2119], [1991, 1992, 2120], [1992, 2121, 2120], [1992, 1993, 2122], [1992, 2122, 2121], [1993, 1994, 2122], [1994, 2123, 2122], [1994, 1995, 2124], [1994, 2124, 2123], [1995, 1996, 2124], [1996, 2125, 2124], [1996, 1997, 2126], [1996, 2126, 2125], [1997, 1998, 2126], [1998, 2127, 2126], [1998, 1999, 2128], [1998, 2128, 2127], [1999, 2000, 2128], [2000, 2129, 2128], [2000, 2001, 2130], [2000, 2130, 2129], [2001, 2002, 2130], [2002, 2131, 2130], [2002, 2003, 2132], [2002, 2132, 2131], [2003, 2004, 2132], [2004, 2133, 2132], [2004, 2005, 2134], [2004, 2134, 2133], [2005, 2006, 2134], [2006, 2135, 2134], [2006, 2007, 2136], [2006, 2136, 2135], [2007, 2008, 2136], [2008, 2137, 2136], [2008, 2009, 2138], [2008, 2138, 2137], [2009, 2010, 2138], [2010, 2139, 2138], [2010, 2011, 2140], [2010, 2140, 2139], [2011, 2012, 2140], [2012, 2141, 2140], [2012, 2013, 2142], [2012, 2142, 2141], [2013, 2014, 2142], [2014, 2143, 2142], [2014, 2015, 2144], [2014, 2144, 2143], [2015, 2016, 2144], [2016, 2145, 2144], [2016, 2017, 2146], [2016, 2146, 2145], [2017, 2018, 2146], [2018, 2147, 2146], [2018, 2019, 2148], [2018, 2148, 2147], [2019, 2020, 2148], [2020, 2149, 2148], [2020, 2021, 2150], [2020, 2150, 2149], [2021, 2022, 2150], [2022, 2151, 2150], [2022, 2023, 2152], [2022, 2152, 2151], [2023, 2024, 2152], [2024, 2153, 2152], [2024, 2025, 2154], [2024, 2154, 2153], [2025, 2026, 2154], [2026, 2155, 2154], [2026, 2027, 2156], [2026, 2156, 2155], [2027, 2028, 2156], [2028, 2157, 2156], [2028, 2029, 2158], [2028, 2158, 2157], [2029, 2030, 2158], [2030, 2159, 2158], [2030, 2031, 2160], [2030, 2160, 2159], [2031, 2032, 2160], [2032, 2161, 2160], [2032, 2033, 2162], [2032, 2162, 2161], [2033, 2034, 2162], [2034, 2163, 2162], [2034, 2035, 2164], [2034, 2164, 2163], [2035, 2036, 2164], [2036, 2165, 2164], [2036, 2037, 2166], [2036, 2166, 2165], [2037, 2038, 2166], [2038, 2167, 2166], [2038, 2039, 2168], [2038, 2168, 2167], [2039, 2040, 2168], [2040, 2169, 2168], [2040, 2041, 2170], [2040, 2170, 2169], [2041, 2042, 2170], [2042, 2171, 2170], [2042, 2043, 2172], [2042, 2172, 2171], [2043, 2044, 2172], [2044, 2173, 2172], [2044, 2045, 2174], [2044, 2174, 2173], [2045, 2046, 2174], [2046, 2175, 2174], [2046, 2047, 2176], [2046, 2176, 2175], [2047, 2048, 2176], [2048, 2177, 2176], [2048, 2049, 2178], [2048, 2178, 2177], [2049, 2050, 2178], [2050, 2179, 2178], [2050, 2051, 2180], [2050, 2180, 2179], [2051, 2052, 2180], [2052, 2181, 2180], [2052, 2053, 2182], [2052, 2182, 2181], [2053, 2054, 2182], [2054, 2183, 2182], [2054, 2055, 2184], [2054, 2184, 2183], [2055, 2056, 2184], [2056, 2185, 2184], [2056, 2057, 2186], [2056, 2186, 2185], [2057, 2058, 2186], [2058, 2187, 2186], [2058, 2059, 2188], [2058, 2188, 2187], [2059, 2060, 2188], [2060, 2189, 2188], [2060, 2061, 2190], [2060, 2190, 2189], [2061, 2062, 2190], [2062, 2191, 2190], [2062, 2063, 2192], [2062, 2192, 2191], [2064, 2065, 2194], [2064, 2194, 2193], [2065, 2066, 2194], [2066, 2195, 2194], [2066, 2067, 2196], [2066, 2196, 2195], [2067, 2068, 2196], [2068, 2197, 2196], [2068, 2069, 2198], [2068, 2198, 2197], [2069, 2070, 2198], [2070, 2199, 2198], [2070, 2071, 2200], [2070, 2200, 2199], [2071, 2072, 2200], [2072, 2201, 2200], [2072, 2073, 2202], [2072, 2202, 2201], [2073, 2074, 2202], [2074, 2203, 2202], [2074, 2075, 2204], [2074, 2204, 2203], [2075, 2076, 2204], [2076, 2205, 2204], [2076, 2077, 2206], [2076, 2206, 2205], [2077, 2078, 2206], [2078, 2207, 2206], [2078, 2079, 2208], [2078, 2208, 2207], [2079, 2080, 2208], [2080, 2209, 2208], [2080, 2081, 2210], [2080, 2210, 2209], [2081, 2082, 2210], [2082, 2211, 2210], [2082, 2083, 2212], [2082, 2212, 2211], [2083, 2084, 2212], [2084, 2213, 2212], [2084, 2085, 2214], [2084, 2214, 2213], [2085, 2086, 2214], [2086, 2215, 2214], [2086, 2087, 2216], [2086, 2216, 2215], [2087, 2088, 2216], [2088, 2217, 2216], [2088, 2089, 2218], [2088, 2218, 2217], [2089, 2090, 2218], [2090, 2219, 2218], [2090, 2091, 2220], [2090, 2220, 2219], [2091, 2092, 2220], [2092, 2221, 2220], [2092, 2093, 2222], [2092, 2222, 2221], [2093, 2094, 2222], [2094, 2223, 2222], [2094, 2095, 2224], [2094, 2224, 2223], [2095, 2096, 2224], [2096, 2225, 2224], [2096, 2097, 2226], [2096, 2226, 2225], [2097, 2098, 2226], [2098, 2227, 2226], [2098, 2099, 2228], [2098, 2228, 2227], [2099, 2100, 2228], [2100, 2229, 2228], [2100, 2101, 2230], [2100, 2230, 2229], [2101, 2102, 2230], [2102, 2231, 2230], [2102, 2103, 2232], [2102, 2232, 2231], [2103, 2104, 2232], [2104, 2233, 2232], [2104, 2105, 2234], [2104, 2234, 2233], [2105, 2106, 2234], [2106, 2235, 2234], [2106, 2107, 2236], [2106, 2236, 2235], [2107, 2108, 2236], [2108, 2237, 2236], [2108, 2109, 2238], [2108, 2238, 2237], [2109, 2110, 2238], [2110, 2239, 2238], [2110, 2111, 2240], [2110, 2240, 2239], [2111, 2112, 2240], [2112, 2241, 2240], [2112, 2113, 2242], [2112, 2242, 2241], [2113, 2114, 2242], [2114, 2243, 2242], [2114, 2115, 2244], [2114, 2244, 2243], [2115, 2116, 2244], [2116, 2245, 2244], [2116, 2117, 2246], [2116, 2246, 2245], [2117, 2118, 2246], [2118, 2247, 2246], [2118, 2119, 2248], [2118, 2248, 2247], [2119, 2120, 2248], [2120, 2249, 2248], [2120, 2121, 2250], [2120, 2250, 2249], [2121, 2122, 2250], [2122, 2251, 2250], [2122, 2123, 2252], [2122, 2252, 2251], [2123, 2124, 2252], [2124, 2253, 2252], [2124, 2125, 2254], [2124, 2254, 2253], [2125, 2126, 2254], [2126, 2255, 2254], [2126, 2127, 2256], [2126, 2256, 2255], [2127, 2128, 2256], [2128, 2257, 2256], [2128, 2129, 2258], [2128, 2258, 2257], [2129, 2130, 2258], [2130, 2259, 2258], [2130, 2131, 2260], [2130, 2260, 2259], [2131, 2132, 2260], [2132, 2261, 2260], [2132, 2133, 2262], [2132, 2262, 2261], [2133, 2134, 2262], [2134, 2263, 2262], [2134, 2135, 2264], [2134, 2264, 2263], [2135, 2136, 2264], [2136, 2265, 2264], [2136, 2137, 2266], [2136, 2266, 2265], [2137, 2138, 2266], [2138, 2267, 2266], [2138, 2139, 2268], [2138, 2268, 2267], [2139, 2140, 2268], [2140, 2269, 2268], [2140, 2141, 2270], [2140, 2270, 2269], [2141, 2142, 2270], [2142, 2271, 2270], [2142, 2143, 2272], [2142, 2272, 2271], [2143, 2144, 2272], [2144, 2273, 2272], [2144, 2145, 2274], [2144, 2274, 2273], [2145, 2146, 2274], [2146, 2275, 2274], [2146, 2147, 2276], [2146, 2276, 2275], [2147, 2148, 2276], [2148, 2277, 2276], [2148, 2149, 2278], [2148, 2278, 2277], [2149, 2150, 2278], [2150, 2279, 2278], [2150, 2151, 2280], [2150, 2280, 2279], [2151, 2152, 2280], [2152, 2281, 2280], [2152, 2153, 2282], [2152, 2282, 2281], [2153, 2154, 2282], [2154, 2283, 2282], [2154, 2155, 2284], [2154, 2284, 2283], [2155, 2156, 2284], [2156, 2285, 2284], [2156, 2157, 2286], [2156, 2286, 2285], [2157, 2158, 2286], [2158, 2287, 2286], [2158, 2159, 2288], [2158, 2288, 2287], [2159, 2160, 2288], [2160, 2289, 2288], [2160, 2161, 2290], [2160, 2290, 2289], [2161, 2162, 2290], [2162, 2291, 2290], [2162, 2163, 2292], [2162, 2292, 2291], [2163, 2164, 2292], [2164, 2293, 2292], [2164, 2165, 2294], [2164, 2294, 2293], [2165, 2166, 2294], [2166, 2295, 2294], [2166, 2167, 2296], [2166, 2296, 2295], [2167, 2168, 2296], [2168, 2297, 2296], [2168, 2169, 2298], [2168, 2298, 2297], [2169, 2170, 2298], [2170, 2299, 2298], [2170, 2171, 2300], [2170, 2300, 2299], [2171, 2172, 2300], [2172, 2301, 2300], [2172, 2173, 2302], [2172, 2302, 2301], [2173, 2174, 2302], [2174, 2303, 2302], [2174, 2175, 2304], [2174, 2304, 2303], [2175, 2176, 2304], [2176, 2305, 2304], [2176, 2177, 2306], [2176, 2306, 2305], [2177, 2178, 2306], [2178, 2307, 2306], [2178, 2179, 2308], [2178, 2308, 2307], [2179, 2180, 2308], [2180, 2309, 2308], [2180, 2181, 2310], [2180, 2310, 2309], [2181, 2182, 2310], [2182, 2311, 2310], [2182, 2183, 2312], [2182, 2312, 2311], [2183, 2184, 2312], [2184, 2313, 2312], [2184, 2185, 2314], [2184, 2314, 2313], [2185, 2186, 2314], [2186, 2315, 2314], [2186, 2187, 2316], [2186, 2316, 2315], [2187, 2188, 2316], [2188, 2317, 2316], [2188, 2189, 2318], [2188, 2318, 2317], [2189, 2190, 2318], [2190, 2319, 2318], [2190, 2191, 2320], [2190, 2320, 2319], [2191, 2192, 2320], [2192, 2321, 2320], [2193, 2194, 2322], [2194, 2323, 2322], [2194, 2195, 2324], [2194, 2324, 2323], [2195, 2196, 2324], [2196, 2325, 2324], [2196, 2197, 2326], [2196, 2326, 2325], [2197, 2198, 2326], [2198, 2327, 2326], [2198, 2199, 2328], [2198, 2328, 2327], [2199, 2200, 2328], [2200, 2329, 2328], [2200, 2201, 2330], [2200, 2330, 2329], [2201, 2202, 2330], [2202, 2331, 2330], [2202, 2203, 2332], [2202, 2332, 2331], [2203, 2204, 2332], [2204, 2333, 2332], [2204, 2205, 2334], [2204, 2334, 2333], [2205, 2206, 2334], [2206, 2335, 2334], [2206, 2207, 2336], [2206, 2336, 2335], [2207, 2208, 2336], [2208, 2337, 2336], [2208, 2209, 2338], [2208, 2338, 2337], [2209, 2210, 2338], [2210, 2339, 2338], [2210, 2211, 2340], [2210, 2340, 2339], [2211, 2212, 2340], [2212, 2341, 2340], [2212, 2213, 2342], [2212, 2342, 2341], [2213, 2214, 2342], [2214, 2343, 2342], [2214, 2215, 2344], [2214, 2344, 2343], [2215, 2216, 2344], [2216, 2345, 2344], [2216, 2217, 2346], [2216, 2346, 2345], [2217, 2218, 2346], [2218, 2347, 2346], [2218, 2219, 2348], [2218, 2348, 2347], [2219, 2220, 2348], [2220, 2349, 2348], [2220, 2221, 2350], [2220, 2350, 2349], [2221, 2222, 2350], [2222, 2351, 2350], [2222, 2223, 2352], [2222, 2352, 2351], [2223, 2224, 2352], [2224, 2353, 2352], [2224, 2225, 2354], [2224, 2354, 2353], [2225, 2226, 2354], [2226, 2355, 2354], [2226, 2227, 2356], [2226, 2356, 2355], [2227, 2228, 2356], [2228, 2357, 2356], [2228, 2229, 2358], [2228, 2358, 2357], [2229, 2230, 2358], [2230, 2359, 2358], [2230, 2231, 2360], [2230, 2360, 2359], [2231, 2232, 2360], [2232, 2361, 2360], [2232, 2233, 2362], [2232, 2362, 2361], [2233, 2234, 2362], [2234, 2363, 2362], [2234, 2235, 2364], [2234, 2364, 2363], [2235, 2236, 2364], [2236, 2365, 2364], [2236, 2237, 2366], [2236, 2366, 2365], [2237, 2238, 2366], [2238, 2367, 2366], [2238, 2239, 2368], [2238, 2368, 2367], [2239, 2240, 2368], [2240, 2369, 2368], [2240, 2241, 2370], [2240, 2370, 2369], [2241, 2242, 2370], [2242, 2371, 2370], [2242, 2243, 2372], [2242, 2372, 2371], [2243, 2244, 2372], [2244, 2373, 2372], [2244, 2245, 2374], [2244, 2374, 2373], [2245, 2246, 2374], [2246, 2375, 2374], [2246, 2247, 2376], [2246, 2376, 2375], [2247, 2248, 2376], [2248, 2377, 2376], [2248, 2249, 2378], [2248, 2378, 2377], [2249, 2250, 2378], [2250, 2379, 2378], [2250, 2251, 2380], [2250, 2380, 2379], [2251, 2252, 2380], [2252, 2381, 2380], [2252, 2253, 2382], [2252, 2382, 2381], [2253, 2254, 2382], [2254, 2383, 2382], [2254, 2255, 2384], [2254, 2384, 2383], [2255, 2256, 2384], [2256, 2385, 2384], [2256, 2257, 2386], [2256, 2386, 2385], [2257, 2258, 2386], [2258, 2387, 2386], [2258, 2259, 2388], [2258, 2388, 2387], [2259, 2260, 2388], [2260, 2389, 2388], [2260, 2261, 2390], [2260, 2390, 2389], [2261, 2262, 2390], [2262, 2391, 2390], [2262, 2263, 2392], [2262, 2392, 2391], [2263, 2264, 2392], [2264, 2393, 2392], [2264, 2265, 2394], [2264, 2394, 2393], [2265, 2266, 2394], [2266, 2395, 2394], [2266, 2267, 2396], [2266, 2396, 2395], [2267, 2268, 2396], [2268, 2397, 2396], [2268, 2269, 2398], [2268, 2398, 2397], [2269, 2270, 2398], [2270, 2399, 2398], [2270, 2271, 2400], [2270, 2400, 2399], [2271, 2272, 2400], [2272, 2401, 2400], [2272, 2273, 2402], [2272, 2402, 2401], [2273, 2274, 2402], [2274, 2403, 2402], [2274, 2275, 2404], [2274, 2404, 2403], [2275, 2276, 2404], [2276, 2405, 2404], [2276, 2277, 2406], [2276, 2406, 2405], [2277, 2278, 2406], [2278, 2407, 2406], [2278, 2279, 2408], [2278, 2408, 2407], [2279, 2280, 2408], [2280, 2409, 2408], [2280, 2281, 2410], [2280, 2410, 2409], [2281, 2282, 2410], [2282, 2411, 2410], [2282, 2283, 2412], [2282, 2412, 2411], [2283, 2284, 2412], [2284, 2413, 2412], [2284, 2285, 2414], [2284, 2414, 2413], [2285, 2286, 2414], [2286, 2415, 2414], [2286, 2287, 2416], [2286, 2416, 2415], [2287, 2288, 2416], [2288, 2417, 2416], [2288, 2289, 2418], [2288, 2418, 2417], [2289, 2290, 2418], [2290, 2419, 2418], [2290, 2291, 2420], [2290, 2420, 2419], [2291, 2292, 2420], [2292, 2421, 2420], [2292, 2293, 2422], [2292, 2422, 2421], [2293, 2294, 2422], [2294, 2423, 2422], [2294, 2295, 2424], [2294, 2424, 2423], [2295, 2296, 2424], [2296, 2425, 2424], [2296, 2297, 2426], [2296, 2426, 2425], [2297, 2298, 2426], [2298, 2427, 2426], [2298, 2299, 2428], [2298, 2428, 2427], [2299, 2300, 2428], [2300, 2429, 2428], [2300, 2301, 2430], [2300, 2430, 2429], [2301, 2302, 2430], [2302, 2431, 2430], [2302, 2303, 2432], [2302, 2432, 2431], [2303, 2304, 2432], [2304, 2433, 2432], [2304, 2305, 2434], [2304, 2434, 2433], [2305, 2306, 2434], [2306, 2435, 2434], [2306, 2307, 2436], [2306, 2436, 2435], [2307, 2308, 2436], [2308, 2437, 2436], [2308, 2309, 2438], [2308, 2438, 2437], [2309, 2310, 2438], [2310, 2439, 2438], [2310, 2311, 2440], [2310, 2440, 2439], [2311, 2312, 2440], [2312, 2441, 2440], [2312, 2313, 2442], [2312, 2442, 2441], [2313, 2314, 2442], [2314, 2443, 2442], [2314, 2315, 2444], [2314, 2444, 2443], [2315, 2316, 2444], [2316, 2445, 2444], [2316, 2317, 2446], [2316, 2446, 2445], [2317, 2318, 2446], [2318, 2447, 2446], [2318, 2319, 2448], [2318, 2448, 2447], [2319, 2320, 2448], [2320, 2449, 2448], [2320, 2321, 2450], [2320, 2450, 2449], [2322, 2323, 2452], [2322, 2452, 2451], [2323, 2324, 2452], [2324, 2453, 2452], [2324, 2325, 2454], [2324, 2454, 2453], [2325, 2326, 2454], [2326, 2455, 2454], [2326, 2327, 2456], [2326, 2456, 2455], [2327, 2328, 2456], [2328, 2457, 2456], [2328, 2329, 2458], [2328, 2458, 2457], [2329, 2330, 2458], [2330, 2459, 2458], [2330, 2331, 2460], [2330, 2460, 2459], [2331, 2332, 2460], [2332, 2461, 2460], [2332, 2333, 2462], [2332, 2462, 2461], [2333, 2334, 2462], [2334, 2463, 2462], [2334, 2335, 2464], [2334, 2464, 2463], [2335, 2336, 2464], [2336, 2465, 2464], [2336, 2337, 2466], [2336, 2466, 2465], [2337, 2338, 2466], [2338, 2467, 2466], [2338, 2339, 2468], [2338, 2468, 2467], [2339, 2340, 2468], [2340, 2469, 2468], [2340, 2341, 2470], [2340, 2470, 2469], [2341, 2342, 2470], [2342, 2471, 2470], [2342, 2343, 2472], [2342, 2472, 2471], [2343, 2344, 2472], [2344, 2473, 2472], [2344, 2345, 2474], [2344, 2474, 2473], [2345, 2346, 2474], [2346, 2475, 2474], [2346, 2347, 2476], [2346, 2476, 2475], [2347, 2348, 2476], [2348, 2477, 2476], [2348, 2349, 2478], [2348, 2478, 2477], [2349, 2350, 2478], [2350, 2479, 2478], [2350, 2351, 2480], [2350, 2480, 2479], [2351, 2352, 2480], [2352, 2481, 2480], [2352, 2353, 2482], [2352, 2482, 2481], [2353, 2354, 2482], [2354, 2483, 2482], [2354, 2355, 2484], [2354, 2484, 2483], [2355, 2356, 2484], [2356, 2485, 2484], [2356, 2357, 2486], [2356, 2486, 2485], [2357, 2358, 2486], [2358, 2487, 2486], [2358, 2359, 2488], [2358, 2488, 2487], [2359, 2360, 2488], [2360, 2489, 2488], [2360, 2361, 2490], [2360, 2490, 2489], [2361, 2362, 2490], [2362, 2491, 2490], [2362, 2363, 2492], [2362, 2492, 2491], [2363, 2364, 2492], [2364, 2493, 2492], [2364, 2365, 2494], [2364, 2494, 2493], [2365, 2366, 2494], [2366, 2495, 2494], [2366, 2367, 2496], [2366, 2496, 2495], [2367, 2368, 2496], [2368, 2497, 2496], [2368, 2369, 2498], [2368, 2498, 2497], [2369, 2370, 2498], [2370, 2499, 2498], [2370, 2371, 2500], [2370, 2500, 2499], [2371, 2372, 2500], [2372, 2501, 2500], [2372, 2373, 2502], [2372, 2502, 2501], [2373, 2374, 2502], [2374, 2503, 2502], [2374, 2375, 2504], [2374, 2504, 2503], [2375, 2376, 2504], [2376, 2505, 2504], [2376, 2377, 2506], [2376, 2506, 2505], [2377, 2378, 2506], [2378, 2507, 2506], [2378, 2379, 2508], [2378, 2508, 2507], [2379, 2380, 2508], [2380, 2509, 2508], [2380, 2381, 2510], [2380, 2510, 2509], [2381, 2382, 2510], [2382, 2511, 2510], [2382, 2383, 2512], [2382, 2512, 2511], [2383, 2384, 2512], [2384, 2513, 2512], [2384, 2385, 2514], [2384, 2514, 2513], [2385, 2386, 2514], [2386, 2515, 2514], [2386, 2387, 2516], [2386, 2516, 2515], [2387, 2388, 2516], [2388, 2517, 2516], [2388, 2389, 2518], [2388, 2518, 2517], [2389, 2390, 2518], [2390, 2519, 2518], [2390, 2391, 2520], [2390, 2520, 2519], [2391, 2392, 2520], [2392, 2521, 2520], [2392, 2393, 2522], [2392, 2522, 2521], [2393, 2394, 2522], [2394, 2523, 2522], [2394, 2395, 2524], [2394, 2524, 2523], [2395, 2396, 2524], [2396, 2525, 2524], [2396, 2397, 2526], [2396, 2526, 2525], [2397, 2398, 2526], [2398, 2527, 2526], [2398, 2399, 2528], [2398, 2528, 2527], [2399, 2400, 2528], [2400, 2529, 2528], [2400, 2401, 2530], [2400, 2530, 2529], [2401, 2402, 2530], [2402, 2531, 2530], [2402, 2403, 2532], [2402, 2532, 2531], [2403, 2404, 2532], [2404, 2533, 2532], [2404, 2405, 2534], [2404, 2534, 2533], [2405, 2406, 2534], [2406, 2535, 2534], [2406, 2407, 2536], [2406, 2536, 2535], [2407, 2408, 2536], [2408, 2537, 2536], [2408, 2409, 2538], [2408, 2538, 2537], [2409, 2410, 2538], [2410, 2539, 2538], [2410, 2411, 2540], [2410, 2540, 2539], [2411, 2412, 2540], [2412, 2541, 2540], [2412, 2413, 2542], [2412, 2542, 2541], [2413, 2414, 2542], [2414, 2543, 2542], [2414, 2415, 2544], [2414, 2544, 2543], [2415, 2416, 2544], [2416, 2545, 2544], [2416, 2417, 2546], [2416, 2546, 2545], [2417, 2418, 2546], [2418, 2547, 2546], [2418, 2419, 2548], [2418, 2548, 2547], [2419, 2420, 2548], [2420, 2549, 2548], [2420, 2421, 2550], [2420, 2550, 2549], [2421, 2422, 2550], [2422, 2551, 2550], [2422, 2423, 2552], [2422, 2552, 2551], [2423, 2424, 2552], [2424, 2553, 2552], [2424, 2425, 2554], [2424, 2554, 2553], [2425, 2426, 2554], [2426, 2555, 2554], [2426, 2427, 2556], [2426, 2556, 2555], [2427, 2428, 2556], [2428, 2557, 2556], [2428, 2429, 2558], [2428, 2558, 2557], [2429, 2430, 2558], [2430, 2559, 2558], [2430, 2431, 2560], [2430, 2560, 2559], [2431, 2432, 2560], [2432, 2561, 2560], [2432, 2433, 2562], [2432, 2562, 2561], [2433, 2434, 2562], [2434, 2563, 2562], [2434, 2435, 2564], [2434, 2564, 2563], [2435, 2436, 2564], [2436, 2565, 2564], [2436, 2437, 2566], [2436, 2566, 2565], [2437, 2438, 2566], [2438, 2567, 2566], [2438, 2439, 2568], [2438, 2568, 2567], [2439, 2440, 2568], [2440, 2569, 2568], [2440, 2441, 2570], [2440, 2570, 2569], [2441, 2442, 2570], [2442, 2571, 2570], [2442, 2443, 2572], [2442, 2572, 2571], [2443, 2444, 2572], [2444, 2573, 2572], [2444, 2445, 2574], [2444, 2574, 2573], [2445, 2446, 2574], [2446, 2575, 2574], [2446, 2447, 2576], [2446, 2576, 2575], [2447, 2448, 2576], [2448, 2577, 2576], [2448, 2449, 2578], [2448, 2578, 2577], [2449, 2450, 2578], [2450, 2579, 2578], [2451, 2452, 2580], [2452, 2581, 2580], [2452, 2453, 2582], [2452, 2582, 2581], [2453, 2454, 2582], [2454, 2583, 2582], [2454, 2455, 2584], [2454, 2584, 2583], [2455, 2456, 2584], [2456, 2585, 2584], [2456, 2457, 2586], [2456, 2586, 2585], [2457, 2458, 2586], [2458, 2587, 2586], [2458, 2459, 2588], [2458, 2588, 2587], [2459, 2460, 2588], [2460, 2589, 2588], [2460, 2461, 2590], [2460, 2590, 2589], [2461, 2462, 2590], [2462, 2591, 2590], [2462, 2463, 2592], [2462, 2592, 2591], [2463, 2464, 2592], [2464, 2593, 2592], [2464, 2465, 2594], [2464, 2594, 2593], [2465, 2466, 2594], [2466, 2595, 2594], [2466, 2467, 2596], [2466, 2596, 2595], [2467, 2468, 2596], [2468, 2597, 2596], [2468, 2469, 2598], [2468, 2598, 2597], [2469, 2470, 2598], [2470, 2599, 2598], [2470, 2471, 2600], [2470, 2600, 2599], [2471, 2472, 2600], [2472, 2601, 2600], [2472, 2473, 2602], [2472, 2602, 2601], [2473, 2474, 2602], [2474, 2603, 2602], [2474, 2475, 2604], [2474, 2604, 2603], [2475, 2476, 2604], [2476, 2605, 2604], [2476, 2477, 2606], [2476, 2606, 2605], [2477, 2478, 2606], [2478, 2607, 2606], [2478, 2479, 2608], [2478, 2608, 2607], [2479, 2480, 2608], [2480, 2609, 2608], [2480, 2481, 2610], [2480, 2610, 2609], [2481, 2482, 2610], [2482, 2611, 2610], [2482, 2483, 2612], [2482, 2612, 2611], [2483, 2484, 2612], [2484, 2613, 2612], [2484, 2485, 2614], [2484, 2614, 2613], [2485, 2486, 2614], [2486, 2615, 2614], [2486, 2487, 2616], [2486, 2616, 2615], [2487, 2488, 2616], [2488, 2617, 2616], [2488, 2489, 2618], [2488, 2618, 2617], [2489, 2490, 2618], [2490, 2619, 2618], [2490, 2491, 2620], [2490, 2620, 2619], [2491, 2492, 2620], [2492, 2621, 2620], [2492, 2493, 2622], [2492, 2622, 2621], [2493, 2494, 2622], [2494, 2623, 2622], [2494, 2495, 2624], [2494, 2624, 2623], [2495, 2496, 2624], [2496, 2625, 2624], [2496, 2497, 2626], [2496, 2626, 2625], [2497, 2498, 2626], [2498, 2627, 2626], [2498, 2499, 2628], [2498, 2628, 2627], [2499, 2500, 2628], [2500, 2629, 2628], [2500, 2501, 2630], [2500, 2630, 2629], [2501, 2502, 2630], [2502, 2631, 2630], [2502, 2503, 2632], [2502, 2632, 2631], [2503, 2504, 2632], [2504, 2633, 2632], [2504, 2505, 2634], [2504, 2634, 2633], [2505, 2506, 2634], [2506, 2635, 2634], [2506, 2507, 2636], [2506, 2636, 2635], [2507, 2508, 2636], [2508, 2637, 2636], [2508, 2509, 2638], [2508, 2638, 2637], [2509, 2510, 2638], [2510, 2639, 2638], [2510, 2511, 2640], [2510, 2640, 2639], [2511, 2512, 2640], [2512, 2641, 2640], [2512, 2513, 2642], [2512, 2642, 2641], [2513, 2514, 2642], [2514, 2643, 2642], [2514, 2515, 2644], [2514, 2644, 2643], [2515, 2516, 2644], [2516, 2645, 2644], [2516, 2517, 2646], [2516, 2646, 2645], [2517, 2518, 2646], [2518, 2647, 2646], [2518, 2519, 2648], [2518, 2648, 2647], [2519, 2520, 2648], [2520, 2649, 2648], [2520, 2521, 2650], [2520, 2650, 2649], [2521, 2522, 2650], [2522, 2651, 2650], [2522, 2523, 2652], [2522, 2652, 2651], [2523, 2524, 2652], [2524, 2653, 2652], [2524, 2525, 2654], [2524, 2654, 2653], [2525, 2526, 2654], [2526, 2655, 2654], [2526, 2527, 2656], [2526, 2656, 2655], [2527, 2528, 2656], [2528, 2657, 2656], [2528, 2529, 2658], [2528, 2658, 2657], [2529, 2530, 2658], [2530, 2659, 2658], [2530, 2531, 2660], [2530, 2660, 2659], [2531, 2532, 2660], [2532, 2661, 2660], [2532, 2533, 2662], [2532, 2662, 2661], [2533, 2534, 2662], [2534, 2663, 2662], [2534, 2535, 2664], [2534, 2664, 2663], [2535, 2536, 2664], [2536, 2665, 2664], [2536, 2537, 2666], [2536, 2666, 2665], [2537, 2538, 2666], [2538, 2667, 2666], [2538, 2539, 2668], [2538, 2668, 2667], [2539, 2540, 2668], [2540, 2669, 2668], [2540, 2541, 2670], [2540, 2670, 2669], [2541, 2542, 2670], [2542, 2671, 2670], [2542, 2543, 2672], [2542, 2672, 2671], [2543, 2544, 2672], [2544, 2673, 2672], [2544, 2545, 2674], [2544, 2674, 2673], [2545, 2546, 2674], [2546, 2675, 2674], [2546, 2547, 2676], [2546, 2676, 2675], [2547, 2548, 2676], [2548, 2677, 2676], [2548, 2549, 2678], [2548, 2678, 2677], [2549, 2550, 2678], [2550, 2679, 2678], [2550, 2551, 2680], [2550, 2680, 2679], [2551, 2552, 2680], [2552, 2681, 2680], [2552, 2553, 2682], [2552, 2682, 2681], [2553, 2554, 2682], [2554, 2683, 2682], [2554, 2555, 2684], [2554, 2684, 2683], [2555, 2556, 2684], [2556, 2685, 2684], [2556, 2557, 2686], [2556, 2686, 2685], [2557, 2558, 2686], [2558, 2687, 2686], [2558, 2559, 2688], [2558, 2688, 2687], [2559, 2560, 2688], [2560, 2689, 2688], [2560, 2561, 2690], [2560, 2690, 2689], [2561, 2562, 2690], [2562, 2691, 2690], [2562, 2563, 2692], [2562, 2692, 2691], [2563, 2564, 2692], [2564, 2693, 2692], [2564, 2565, 2694], [2564, 2694, 2693], [2565, 2566, 2694], [2566, 2695, 2694], [2566, 2567, 2696], [2566, 2696, 2695], [2567, 2568, 2696], [2568, 2697, 2696], [2568, 2569, 2698], [2568, 2698, 2697], [2569, 2570, 2698], [2570, 2699, 2698], [2570, 2571, 2700], [2570, 2700, 2699], [2571, 2572, 2700], [2572, 2701, 2700], [2572, 2573, 2702], [2572, 2702, 2701], [2573, 2574, 2702], [2574, 2703, 2702], [2574, 2575, 2704], [2574, 2704, 2703], [2575, 2576, 2704], [2576, 2705, 2704], [2576, 2577, 2706], [2576, 2706, 2705], [2577, 2578, 2706], [2578, 2707, 2706], [2578, 2579, 2708], [2578, 2708, 2707], [2580, 2581, 2710], [2580, 2710, 2709], [2581, 2582, 2710], [2582, 2711, 2710], [2582, 2583, 2712], [2582, 2712, 2711], [2583, 2584, 2712], [2584, 2713, 2712], [2584, 2585, 2714], [2584, 2714, 2713], [2585, 2586, 2714], [2586, 2715, 2714], [2586, 2587, 2716], [2586, 2716, 2715], [2587, 2588, 2716], [2588, 2717, 2716], [2588, 2589, 2718], [2588, 2718, 2717], [2589, 2590, 2718], [2590, 2719, 2718], [2590, 2591, 2720], [2590, 2720, 2719], [2591, 2592, 2720], [2592, 2721, 2720], [2592, 2593, 2722], [2592, 2722, 2721], [2593, 2594, 2722], [2594, 2723, 2722], [2594, 2595, 2724], [2594, 2724, 2723], [2595, 2596, 2724], [2596, 2725, 2724], [2596, 2597, 2726], [2596, 2726, 2725], [2597, 2598, 2726], [2598, 2727, 2726], [2598, 2599, 2728], [2598, 2728, 2727], [2599, 2600, 2728], [2600, 2729, 2728], [2600, 2601, 2730], [2600, 2730, 2729], [2601, 2602, 2730], [2602, 2731, 2730], [2602, 2603, 2732], [2602, 2732, 2731], [2603, 2604, 2732], [2604, 2733, 2732], [2604, 2605, 2734], [2604, 2734, 2733], [2605, 2606, 2734], [2606, 2735, 2734], [2606, 2607, 2736], [2606, 2736, 2735], [2607, 2608, 2736], [2608, 2737, 2736], [2608, 2609, 2738], [2608, 2738, 2737], [2609, 2610, 2738], [2610, 2739, 2738], [2610, 2611, 2740], [2610, 2740, 2739], [2611, 2612, 2740], [2612, 2741, 2740], [2612, 2613, 2742], [2612, 2742, 2741], [2613, 2614, 2742], [2614, 2743, 2742], [2614, 2615, 2744], [2614, 2744, 2743], [2615, 2616, 2744], [2616, 2745, 2744], [2616, 2617, 2746], [2616, 2746, 2745], [2617, 2618, 2746], [2618, 2747, 2746], [2618, 2619, 2748], [2618, 2748, 2747], [2619, 2620, 2748], [2620, 2749, 2748], [2620, 2621, 2750], [2620, 2750, 2749], [2621, 2622, 2750], [2622, 2751, 2750], [2622, 2623, 2752], [2622, 2752, 2751], [2623, 2624, 2752], [2624, 2753, 2752], [2624, 2625, 2754], [2624, 2754, 2753], [2625, 2626, 2754], [2626, 2755, 2754], [2626, 2627, 2756], [2626, 2756, 2755], [2627, 2628, 2756], [2628, 2757, 2756], [2628, 2629, 2758], [2628, 2758, 2757], [2629, 2630, 2758], [2630, 2759, 2758], [2630, 2631, 2760], [2630, 2760, 2759], [2631, 2632, 2760], [2632, 2761, 2760], [2632, 2633, 2762], [2632, 2762, 2761], [2633, 2634, 2762], [2634, 2763, 2762], [2634, 2635, 2764], [2634, 2764, 2763], [2635, 2636, 2764], [2636, 2765, 2764], [2636, 2637, 2766], [2636, 2766, 2765], [2637, 2638, 2766], [2638, 2767, 2766], [2638, 2639, 2768], [2638, 2768, 2767], [2639, 2640, 2768], [2640, 2769, 2768], [2640, 2641, 2770], [2640, 2770, 2769], [2641, 2642, 2770], [2642, 2771, 2770], [2642, 2643, 2772], [2642, 2772, 2771], [2643, 2644, 2772], [2644, 2773, 2772], [2644, 2645, 2774], [2644, 2774, 2773], [2645, 2646, 2774], [2646, 2775, 2774], [2646, 2647, 2776], [2646, 2776, 2775], [2647, 2648, 2776], [2648, 2777, 2776], [2648, 2649, 2778], [2648, 2778, 2777], [2649, 2650, 2778], [2650, 2779, 2778], [2650, 2651, 2780], [2650, 2780, 2779], [2651, 2652, 2780], [2652, 2781, 2780], [2652, 2653, 2782], [2652, 2782, 2781], [2653, 2654, 2782], [2654, 2783, 2782], [2654, 2655, 2784], [2654, 2784, 2783], [2655, 2656, 2784], [2656, 2785, 2784], [2656, 2657, 2786], [2656, 2786, 2785], [2657, 2658, 2786], [2658, 2787, 2786], [2658, 2659, 2788], [2658, 2788, 2787], [2659, 2660, 2788], [2660, 2789, 2788], [2660, 2661, 2790], [2660, 2790, 2789], [2661, 2662, 2790], [2662, 2791, 2790], [2662, 2663, 2792], [2662, 2792, 2791], [2663, 2664, 2792], [2664, 2793, 2792], [2664, 2665, 2794], [2664, 2794, 2793], [2665, 2666, 2794], [2666, 2795, 2794], [2666, 2667, 2796], [2666, 2796, 2795], [2667, 2668, 2796], [2668, 2797, 2796], [2668, 2669, 2798], [2668, 2798, 2797], [2669, 2670, 2798], [2670, 2799, 2798], [2670, 2671, 2800], [2670, 2800, 2799], [2671, 2672, 2800], [2672, 2801, 2800], [2672, 2673, 2802], [2672, 2802, 2801], [2673, 2674, 2802], [2674, 2803, 2802], [2674, 2675, 2804], [2674, 2804, 2803], [2675, 2676, 2804], [2676, 2805, 2804], [2676, 2677, 2806], [2676, 2806, 2805], [2677, 2678, 2806], [2678, 2807, 2806], [2678, 2679, 2808], [2678, 2808, 2807], [2679, 2680, 2808], [2680, 2809, 2808], [2680, 2681, 2810], [2680, 2810, 2809], [2681, 2682, 2810], [2682, 2811, 2810], [2682, 2683, 2812], [2682, 2812, 2811], [2683, 2684, 2812], [2684, 2813, 2812], [2684, 2685, 2814], [2684, 2814, 2813], [2685, 2686, 2814], [2686, 2815, 2814], [2686, 2687, 2816], [2686, 2816, 2815], [2687, 2688, 2816], [2688, 2817, 2816], [2688, 2689, 2818], [2688, 2818, 2817], [2689, 2690, 2818], [2690, 2819, 2818], [2690, 2691, 2820], [2690, 2820, 2819], [2691, 2692, 2820], [2692, 2821, 2820], [2692, 2693, 2822], [2692, 2822, 2821], [2693, 2694, 2822], [2694, 2823, 2822], [2694, 2695, 2824], [2694, 2824, 2823], [2695, 2696, 2824], [2696, 2825, 2824], [2696, 2697, 2826], [2696, 2826, 2825], [2697, 2698, 2826], [2698, 2827, 2826], [2698, 2699, 2828], [2698, 2828, 2827], [2699, 2700, 2828], [2700, 2829, 2828], [2700, 2701, 2830], [2700, 2830, 2829], [2701, 2702, 2830], [2702, 2831, 2830], [2702, 2703, 2832], [2702, 2832, 2831], [2703, 2704, 2832], [2704, 2833, 2832], [2704, 2705, 2834], [2704, 2834, 2833], [2705, 2706, 2834], [2706, 2835, 2834], [2706, 2707, 2836], [2706, 2836, 2835], [2707, 2708, 2836], [2708, 2837, 2836], [2709, 2710, 2838], [2710, 2839, 2838], [2710, 2711, 2840], [2710, 2840, 2839], [2711, 2712, 2840], [2712, 2841, 2840], [2712, 2713, 2842], [2712, 2842, 2841], [2713, 2714, 2842], [2714, 2843, 2842], [2714, 2715, 2844], [2714, 2844, 2843], [2715, 2716, 2844], [2716, 2845, 2844], [2716, 2717, 2846], [2716, 2846, 2845], [2717, 2718, 2846], [2718, 2847, 2846], [2718, 2719, 2848], [2718, 2848, 2847], [2719, 2720, 2848], [2720, 2849, 2848], [2720, 2721, 2850], [2720, 2850, 2849], [2721, 2722, 2850], [2722, 2851, 2850], [2722, 2723, 2852], [2722, 2852, 2851], [2723, 2724, 2852], [2724, 2853, 2852], [2724, 2725, 2854], [2724, 2854, 2853], [2725, 2726, 2854], [2726, 2855, 2854], [2726, 2727, 2856], [2726, 2856, 2855], [2727, 2728, 2856], [2728, 2857, 2856], [2728, 2729, 2858], [2728, 2858, 2857], [2729, 2730, 2858], [2730, 2859, 2858], [2730, 2731, 2860], [2730, 2860, 2859], [2731, 2732, 2860], [2732, 2861, 2860], [2732, 2733, 2862], [2732, 2862, 2861], [2733, 2734, 2862], [2734, 2863, 2862], [2734, 2735, 2864], [2734, 2864, 2863], [2735, 2736, 2864], [2736, 2865, 2864], [2736, 2737, 2866], [2736, 2866, 2865], [2737, 2738, 2866], [2738, 2867, 2866], [2738, 2739, 2868], [2738, 2868, 2867], [2739, 2740, 2868], [2740, 2869, 2868], [2740, 2741, 2870], [2740, 2870, 2869], [2741, 2742, 2870], [2742, 2871, 2870], [2742, 2743, 2872], [2742, 2872, 2871], [2743, 2744, 2872], [2744, 2873, 2872], [2744, 2745, 2874], [2744, 2874, 2873], [2745, 2746, 2874], [2746, 2875, 2874], [2746, 2747, 2876], [2746, 2876, 2875], [2747, 2748, 2876], [2748, 2877, 2876], [2748, 2749, 2878], [2748, 2878, 2877], [2749, 2750, 2878], [2750, 2879, 2878], [2750, 2751, 2880], [2750, 2880, 2879], [2751, 2752, 2880], [2752, 2881, 2880], [2752, 2753, 2882], [2752, 2882, 2881], [2753, 2754, 2882], [2754, 2883, 2882], [2754, 2755, 2884], [2754, 2884, 2883], [2755, 2756, 2884], [2756, 2885, 2884], [2756, 2757, 2886], [2756, 2886, 2885], [2757, 2758, 2886], [2758, 2887, 2886], [2758, 2759, 2888], [2758, 2888, 2887], [2759, 2760, 2888], [2760, 2889, 2888], [2760, 2761, 2890], [2760, 2890, 2889], [2761, 2762, 2890], [2762, 2891, 2890], [2762, 2763, 2892], [2762, 2892, 2891], [2763, 2764, 2892], [2764, 2893, 2892], [2764, 2765, 2894], [2764, 2894, 2893], [2765, 2766, 2894], [2766, 2895, 2894], [2766, 2767, 2896], [2766, 2896, 2895], [2767, 2768, 2896], [2768, 2897, 2896], [2768, 2769, 2898], [2768, 2898, 2897], [2769, 2770, 2898], [2770, 2899, 2898], [2770, 2771, 2900], [2770, 2900, 2899], [2771, 2772, 2900], [2772, 2901, 2900], [2772, 2773, 2902], [2772, 2902, 2901], [2773, 2774, 2902], [2774, 2903, 2902], [2774, 2775, 2904], [2774, 2904, 2903], [2775, 2776, 2904], [2776, 2905, 2904], [2776, 2777, 2906], [2776, 2906, 2905], [2777, 2778, 2906], [2778, 2907, 2906], [2778, 2779, 2908], [2778, 2908, 2907], [2779, 2780, 2908], [2780, 2909, 2908], [2780, 2781, 2910], [2780, 2910, 2909], [2781, 2782, 2910], [2782, 2911, 2910], [2782, 2783, 2912], [2782, 2912, 2911], [2783, 2784, 2912], [2784, 2913, 2912], [2784, 2785, 2914], [2784, 2914, 2913], [2785, 2786, 2914], [2786, 2915, 2914], [2786, 2787, 2916], [2786, 2916, 2915], [2787, 2788, 2916], [2788, 2917, 2916], [2788, 2789, 2918], [2788, 2918, 2917], [2789, 2790, 2918], [2790, 2919, 2918], [2790, 2791, 2920], [2790, 2920, 2919], [2791, 2792, 2920], [2792, 2921, 2920], [2792, 2793, 2922], [2792, 2922, 2921], [2793, 2794, 2922], [2794, 2923, 2922], [2794, 2795, 2924], [2794, 2924, 2923], [2795, 2796, 2924], [2796, 2925, 2924], [2796, 2797, 2926], [2796, 2926, 2925], [2797, 2798, 2926], [2798, 2927, 2926], [2798, 2799, 2928], [2798, 2928, 2927], [2799, 2800, 2928], [2800, 2929, 2928], [2800, 2801, 2930], [2800, 2930, 2929], [2801, 2802, 2930], [2802, 2931, 2930], [2802, 2803, 2932], [2802, 2932, 2931], [2803, 2804, 2932], [2804, 2933, 2932], [2804, 2805, 2934], [2804, 2934, 2933], [2805, 2806, 2934], [2806, 2935, 2934], [2806, 2807, 2936], [2806, 2936, 2935], [2807, 2808, 2936], [2808, 2937, 2936], [2808, 2809, 2938], [2808, 2938, 2937], [2809, 2810, 2938], [2810, 2939, 2938], [2810, 2811, 2940], [2810, 2940, 2939], [2811, 2812, 2940], [2812, 2941, 2940], [2812, 2813, 2942], [2812, 2942, 2941], [2813, 2814, 2942], [2814, 2943, 2942], [2814, 2815, 2944], [2814, 2944, 2943], [2815, 2816, 2944], [2816, 2945, 2944], [2816, 2817, 2946], [2816, 2946, 2945], [2817, 2818, 2946], [2818, 2947, 2946], [2818, 2819, 2948], [2818, 2948, 2947], [2819, 2820, 2948], [2820, 2949, 2948], [2820, 2821, 2950], [2820, 2950, 2949], [2821, 2822, 2950], [2822, 2951, 2950], [2822, 2823, 2952], [2822, 2952, 2951], [2823, 2824, 2952], [2824, 2953, 2952], [2824, 2825, 2954], [2824, 2954, 2953], [2825, 2826, 2954], [2826, 2955, 2954], [2826, 2827, 2956], [2826, 2956, 2955], [2827, 2828, 2956], [2828, 2957, 2956], [2828, 2829, 2958], [2828, 2958, 2957], [2829, 2830, 2958], [2830, 2959, 2958], [2830, 2831, 2960], [2830, 2960, 2959], [2831, 2832, 2960], [2832, 2961, 2960], [2832, 2833, 2962], [2832, 2962, 2961], [2833, 2834, 2962], [2834, 2963, 2962], [2834, 2835, 2964], [2834, 2964, 2963], [2835, 2836, 2964], [2836, 2965, 2964], [2836, 2837, 2966], [2836, 2966, 2965], [2838, 2839, 2968], [2838, 2968, 2967], [2839, 2840, 2968], [2840, 2969, 2968], [2840, 2841, 2970], [2840, 2970, 2969], [2841, 2842, 2970], [2842, 2971, 2970], [2842, 2843, 2972], [2842, 2972, 2971], [2843, 2844, 2972], [2844, 2973, 2972], [2844, 2845, 2974], [2844, 2974, 2973], [2845, 2846, 2974], [2846, 2975, 2974], [2846, 2847, 2976], [2846, 2976, 2975], [2847, 2848, 2976], [2848, 2977, 2976], [2848, 2849, 2978], [2848, 2978, 2977], [2849, 2850, 2978], [2850, 2979, 2978], [2850, 2851, 2980], [2850, 2980, 2979], [2851, 2852, 2980], [2852, 2981, 2980], [2852, 2853, 2982], [2852, 2982, 2981], [2853, 2854, 2982], [2854, 2983, 2982], [2854, 2855, 2984], [2854, 2984, 2983], [2855, 2856, 2984], [2856, 2985, 2984], [2856, 2857, 2986], [2856, 2986, 2985], [2857, 2858, 2986], [2858, 2987, 2986], [2858, 2859, 2988], [2858, 2988, 2987], [2859, 2860, 2988], [2860, 2989, 2988], [2860, 2861, 2990], [2860, 2990, 2989], [2861, 2862, 2990], [2862, 2991, 2990], [2862, 2863, 2992], [2862, 2992, 2991], [2863, 2864, 2992], [2864, 2993, 2992], [2864, 2865, 2994], [2864, 2994, 2993], [2865, 2866, 2994], [2866, 2995, 2994], [2866, 2867, 2996], [2866, 2996, 2995], [2867, 2868, 2996], [2868, 2997, 2996], [2868, 2869, 2998], [2868, 2998, 2997], [2869, 2870, 2998], [2870, 2999, 2998], [2870, 2871, 3000], [2870, 3000, 2999], [2871, 2872, 3000], [2872, 3001, 3000], [2872, 2873, 3002], [2872, 3002, 3001], [2873, 2874, 3002], [2874, 3003, 3002], [2874, 2875, 3004], [2874, 3004, 3003], [2875, 2876, 3004], [2876, 3005, 3004], [2876, 2877, 3006], [2876, 3006, 3005], [2877, 2878, 3006], [2878, 3007, 3006], [2878, 2879, 3008], [2878, 3008, 3007], [2879, 2880, 3008], [2880, 3009, 3008], [2880, 2881, 3010], [2880, 3010, 3009], [2881, 2882, 3010], [2882, 3011, 3010], [2882, 2883, 3012], [2882, 3012, 3011], [2883, 2884, 3012], [2884, 3013, 3012], [2884, 2885, 3014], [2884, 3014, 3013], [2885, 2886, 3014], [2886, 3015, 3014], [2886, 2887, 3016], [2886, 3016, 3015], [2887, 2888, 3016], [2888, 3017, 3016], [2888, 2889, 3018], [2888, 3018, 3017], [2889, 2890, 3018], [2890, 3019, 3018], [2890, 2891, 3020], [2890, 3020, 3019], [2891, 2892, 3020], [2892, 3021, 3020], [2892, 2893, 3022], [2892, 3022, 3021], [2893, 2894, 3022], [2894, 3023, 3022], [2894, 2895, 3024], [2894, 3024, 3023], [2895, 2896, 3024], [2896, 3025, 3024], [2896, 2897, 3026], [2896, 3026, 3025], [2897, 2898, 3026], [2898, 3027, 3026], [2898, 2899, 3028], [2898, 3028, 3027], [2899, 2900, 3028], [2900, 3029, 3028], [2900, 2901, 3030], [2900, 3030, 3029], [2901, 2902, 3030], [2902, 3031, 3030], [2902, 2903, 3032], [2902, 3032, 3031], [2903, 2904, 3032], [2904, 3033, 3032], [2904, 2905, 3034], [2904, 3034, 3033], [2905, 2906, 3034], [2906, 3035, 3034], [2906, 2907, 3036], [2906, 3036, 3035], [2907, 2908, 3036], [2908, 3037, 3036], [2908, 2909, 3038], [2908, 3038, 3037], [2909, 2910, 3038], [2910, 3039, 3038], [2910, 2911, 3040], [2910, 3040, 3039], [2911, 2912, 3040], [2912, 3041, 3040], [2912, 2913, 3042], [2912, 3042, 3041], [2913, 2914, 3042], [2914, 3043, 3042], [2914, 2915, 3044], [2914, 3044, 3043], [2915, 2916, 3044], [2916, 3045, 3044], [2916, 2917, 3046], [2916, 3046, 3045], [2917, 2918, 3046], [2918, 3047, 3046], [2918, 2919, 3048], [2918, 3048, 3047], [2919, 2920, 3048], [2920, 3049, 3048], [2920, 2921, 3050], [2920, 3050, 3049], [2921, 2922, 3050], [2922, 3051, 3050], [2922, 2923, 3052], [2922, 3052, 3051], [2923, 2924, 3052], [2924, 3053, 3052], [2924, 2925, 3054], [2924, 3054, 3053], [2925, 2926, 3054], [2926, 3055, 3054], [2926, 2927, 3056], [2926, 3056, 3055], [2927, 2928, 3056], [2928, 3057, 3056], [2928, 2929, 3058], [2928, 3058, 3057], [2929, 2930, 3058], [2930, 3059, 3058], [2930, 2931, 3060], [2930, 3060, 3059], [2931, 2932, 3060], [2932, 3061, 3060], [2932, 2933, 3062], [2932, 3062, 3061], [2933, 2934, 3062], [2934, 3063, 3062], [2934, 2935, 3064], [2934, 3064, 3063], [2935, 2936, 3064], [2936, 3065, 3064], [2936, 2937, 3066], [2936, 3066, 3065], [2937, 2938, 3066], [2938, 3067, 3066], [2938, 2939, 3068], [2938, 3068, 3067], [2939, 2940, 3068], [2940, 3069, 3068], [2940, 2941, 3070], [2940, 3070, 3069], [2941, 2942, 3070], [2942, 3071, 3070], [2942, 2943, 3072], [2942, 3072, 3071], [2943, 2944, 3072], [2944, 3073, 3072], [2944, 2945, 3074], [2944, 3074, 3073], [2945, 2946, 3074], [2946, 3075, 3074], [2946, 2947, 3076], [2946, 3076, 3075], [2947, 2948, 3076], [2948, 3077, 3076], [2948, 2949, 3078], [2948, 3078, 3077], [2949, 2950, 3078], [2950, 3079, 3078], [2950, 2951, 3080], [2950, 3080, 3079], [2951, 2952, 3080], [2952, 3081, 3080], [2952, 2953, 3082], [2952, 3082, 3081], [2953, 2954, 3082], [2954, 3083, 3082], [2954, 2955, 3084], [2954, 3084, 3083], [2955, 2956, 3084], [2956, 3085, 3084], [2956, 2957, 3086], [2956, 3086, 3085], [2957, 2958, 3086], [2958, 3087, 3086], [2958, 2959, 3088], [2958, 3088, 3087], [2959, 2960, 3088], [2960, 3089, 3088], [2960, 2961, 3090], [2960, 3090, 3089], [2961, 2962, 3090], [2962, 3091, 3090], [2962, 2963, 3092], [2962, 3092, 3091], [2963, 2964, 3092], [2964, 3093, 3092], [2964, 2965, 3094], [2964, 3094, 3093], [2965, 2966, 3094], [2966, 3095, 3094], [2967, 2968, 3096], [2968, 3097, 3096], [2968, 2969, 3098], [2968, 3098, 3097], [2969, 2970, 3098], [2970, 3099, 3098], [2970, 2971, 3100], [2970, 3100, 3099], [2971, 2972, 3100], [2972, 3101, 3100], [2972, 2973, 3102], [2972, 3102, 3101], [2973, 2974, 3102], [2974, 3103, 3102], [2974, 2975, 3104], [2974, 3104, 3103], [2975, 2976, 3104], [2976, 3105, 3104], [2976, 2977, 3106], [2976, 3106, 3105], [2977, 2978, 3106], [2978, 3107, 3106], [2978, 2979, 3108], [2978, 3108, 3107], [2979, 2980, 3108], [2980, 3109, 3108], [2980, 2981, 3110], [2980, 3110, 3109], [2981, 2982, 3110], [2982, 3111, 3110], [2982, 2983, 3112], [2982, 3112, 3111], [2983, 2984, 3112], [2984, 3113, 3112], [2984, 2985, 3114], [2984, 3114, 3113], [2985, 2986, 3114], [2986, 3115, 3114], [2986, 2987, 3116], [2986, 3116, 3115], [2987, 2988, 3116], [2988, 3117, 3116], [2988, 2989, 3118], [2988, 3118, 3117], [2989, 2990, 3118], [2990, 3119, 3118], [2990, 2991, 3120], [2990, 3120, 3119], [2991, 2992, 3120], [2992, 3121, 3120], [2992, 2993, 3122], [2992, 3122, 3121], [2993, 2994, 3122], [2994, 3123, 3122], [2994, 2995, 3124], [2994, 3124, 3123], [2995, 2996, 3124], [2996, 3125, 3124], [2996, 2997, 3126], [2996, 3126, 3125], [2997, 2998, 3126], [2998, 3127, 3126], [2998, 2999, 3128], [2998, 3128, 3127], [2999, 3000, 3128], [3000, 3129, 3128], [3000, 3001, 3130], [3000, 3130, 3129], [3001, 3002, 3130], [3002, 3131, 3130], [3002, 3003, 3132], [3002, 3132, 3131], [3003, 3004, 3132], [3004, 3133, 3132], [3004, 3005, 3134], [3004, 3134, 3133], [3005, 3006, 3134], [3006, 3135, 3134], [3006, 3007, 3136], [3006, 3136, 3135], [3007, 3008, 3136], [3008, 3137, 3136], [3008, 3009, 3138], [3008, 3138, 3137], [3009, 3010, 3138], [3010, 3139, 3138], [3010, 3011, 3140], [3010, 3140, 3139], [3011, 3012, 3140], [3012, 3141, 3140], [3012, 3013, 3142], [3012, 3142, 3141], [3013, 3014, 3142], [3014, 3143, 3142], [3014, 3015, 3144], [3014, 3144, 3143], [3015, 3016, 3144], [3016, 3145, 3144], [3016, 3017, 3146], [3016, 3146, 3145], [3017, 3018, 3146], [3018, 3147, 3146], [3018, 3019, 3148], [3018, 3148, 3147], [3019, 3020, 3148], [3020, 3149, 3148], [3020, 3021, 3150], [3020, 3150, 3149], [3021, 3022, 3150], [3022, 3151, 3150], [3022, 3023, 3152], [3022, 3152, 3151], [3023, 3024, 3152], [3024, 3153, 3152], [3024, 3025, 3154], [3024, 3154, 3153], [3025, 3026, 3154], [3026, 3155, 3154], [3026, 3027, 3156], [3026, 3156, 3155], [3027, 3028, 3156], [3028, 3157, 3156], [3028, 3029, 3158], [3028, 3158, 3157], [3029, 3030, 3158], [3030, 3159, 3158], [3030, 3031, 3160], [3030, 3160, 3159], [3031, 3032, 3160], [3032, 3161, 3160], [3032, 3033, 3162], [3032, 3162, 3161], [3033, 3034, 3162], [3034, 3163, 3162], [3034, 3035, 3164], [3034, 3164, 3163], [3035, 3036, 3164], [3036, 3165, 3164], [3036, 3037, 3166], [3036, 3166, 3165], [3037, 3038, 3166], [3038, 3167, 3166], [3038, 3039, 3168], [3038, 3168, 3167], [3039, 3040, 3168], [3040, 3169, 3168], [3040, 3041, 3170], [3040, 3170, 3169], [3041, 3042, 3170], [3042, 3171, 3170], [3042, 3043, 3172], [3042, 3172, 3171], [3043, 3044, 3172], [3044, 3173, 3172], [3044, 3045, 3174], [3044, 3174, 3173], [3045, 3046, 3174], [3046, 3175, 3174], [3046, 3047, 3176], [3046, 3176, 3175], [3047, 3048, 3176], [3048, 3177, 3176], [3048, 3049, 3178], [3048, 3178, 3177], [3049, 3050, 3178], [3050, 3179, 3178], [3050, 3051, 3180], [3050, 3180, 3179], [3051, 3052, 3180], [3052, 3181, 3180], [3052, 3053, 3182], [3052, 3182, 3181], [3053, 3054, 3182], [3054, 3183, 3182], [3054, 3055, 3184], [3054, 3184, 3183], [3055, 3056, 3184], [3056, 3185, 3184], [3056, 3057, 3186], [3056, 3186, 3185], [3057, 3058, 3186], [3058, 3187, 3186], [3058, 3059, 3188], [3058, 3188, 3187], [3059, 3060, 3188], [3060, 3189, 3188], [3060, 3061, 3190], [3060, 3190, 3189], [3061, 3062, 3190], [3062, 3191, 3190], [3062, 3063, 3192], [3062, 3192, 3191], [3063, 3064, 3192], [3064, 3193, 3192], [3064, 3065, 3194], [3064, 3194, 3193], [3065, 3066, 3194], [3066, 3195, 3194], [3066, 3067, 3196], [3066, 3196, 3195], [3067, 3068, 3196], [3068, 3197, 3196], [3068, 3069, 3198], [3068, 3198, 3197], [3069, 3070, 3198], [3070, 3199, 3198], [3070, 3071, 3200], [3070, 3200, 3199], [3071, 3072, 3200], [3072, 3201, 3200], [3072, 3073, 3202], [3072, 3202, 3201], [3073, 3074, 3202], [3074, 3203, 3202], [3074, 3075, 3204], [3074, 3204, 3203], [3075, 3076, 3204], [3076, 3205, 3204], [3076, 3077, 3206], [3076, 3206, 3205], [3077, 3078, 3206], [3078, 3207, 3206], [3078, 3079, 3208], [3078, 3208, 3207], [3079, 3080, 3208], [3080, 3209, 3208], [3080, 3081, 3210], [3080, 3210, 3209], [3081, 3082, 3210], [3082, 3211, 3210], [3082, 3083, 3212], [3082, 3212, 3211], [3083, 3084, 3212], [3084, 3213, 3212], [3084, 3085, 3214], [3084, 3214, 3213], [3085, 3086, 3214], [3086, 3215, 3214], [3086, 3087, 3216], [3086, 3216, 3215], [3087, 3088, 3216], [3088, 3217, 3216], [3088, 3089, 3218], [3088, 3218, 3217], [3089, 3090, 3218], [3090, 3219, 3218], [3090, 3091, 3220], [3090, 3220, 3219], [3091, 3092, 3220], [3092, 3221, 3220], [3092, 3093, 3222], [3092, 3222, 3221], [3093, 3094, 3222], [3094, 3223, 3222], [3094, 3095, 3224], [3094, 3224, 3223], [3096, 3097, 3226], [3096, 3226, 3225], [3097, 3098, 3226], [3098, 3227, 3226], [3098, 3099, 3228], [3098, 3228, 3227], [3099, 3100, 3228], [3100, 3229, 3228], [3100, 3101, 3230], [3100, 3230, 3229], [3101, 3102, 3230], [3102, 3231, 3230], [3102, 3103, 3232], [3102, 3232, 3231], [3103, 3104, 3232], [3104, 3233, 3232], [3104, 3105, 3234], [3104, 3234, 3233], [3105, 3106, 3234], [3106, 3235, 3234], [3106, 3107, 3236], [3106, 3236, 3235], [3107, 3108, 3236], [3108, 3237, 3236], [3108, 3109, 3238], [3108, 3238, 3237], [3109, 3110, 3238], [3110, 3239, 3238], [3110, 3111, 3240], [3110, 3240, 3239], [3111, 3112, 3240], [3112, 3241, 3240], [3112, 3113, 3242], [3112, 3242, 3241], [3113, 3114, 3242], [3114, 3243, 3242], [3114, 3115, 3244], [3114, 3244, 3243], [3115, 3116, 3244], [3116, 3245, 3244], [3116, 3117, 3246], [3116, 3246, 3245], [3117, 3118, 3246], [3118, 3247, 3246], [3118, 3119, 3248], [3118, 3248, 3247], [3119, 3120, 3248], [3120, 3249, 3248], [3120, 3121, 3250], [3120, 3250, 3249], [3121, 3122, 3250], [3122, 3251, 3250], [3122, 3123, 3252], [3122, 3252, 3251], [3123, 3124, 3252], [3124, 3253, 3252], [3124, 3125, 3254], [3124, 3254, 3253], [3125, 3126, 3254], [3126, 3255, 3254], [3126, 3127, 3256], [3126, 3256, 3255], [3127, 3128, 3256], [3128, 3257, 3256], [3128, 3129, 3258], [3128, 3258, 3257], [3129, 3130, 3258], [3130, 3259, 3258], [3130, 3131, 3260], [3130, 3260, 3259], [3131, 3132, 3260], [3132, 3261, 3260], [3132, 3133, 3262], [3132, 3262, 3261], [3133, 3134, 3262], [3134, 3263, 3262], [3134, 3135, 3264], [3134, 3264, 3263], [3135, 3136, 3264], [3136, 3265, 3264], [3136, 3137, 3266], [3136, 3266, 3265], [3137, 3138, 3266], [3138, 3267, 3266], [3138, 3139, 3268], [3138, 3268, 3267], [3139, 3140, 3268], [3140, 3269, 3268], [3140, 3141, 3270], [3140, 3270, 3269], [3141, 3142, 3270], [3142, 3271, 3270], [3142, 3143, 3272], [3142, 3272, 3271], [3143, 3144, 3272], [3144, 3273, 3272], [3144, 3145, 3274], [3144, 3274, 3273], [3145, 3146, 3274], [3146, 3275, 3274], [3146, 3147, 3276], [3146, 3276, 3275], [3147, 3148, 3276], [3148, 3277, 3276], [3148, 3149, 3278], [3148, 3278, 3277], [3149, 3150, 3278], [3150, 3279, 3278], [3150, 3151, 3280], [3150, 3280, 3279], [3151, 3152, 3280], [3152, 3281, 3280], [3152, 3153, 3282], [3152, 3282, 3281], [3153, 3154, 3282], [3154, 3283, 3282], [3154, 3155, 3284], [3154, 3284, 3283], [3155, 3156, 3284], [3156, 3285, 3284], [3156, 3157, 3286], [3156, 3286, 3285], [3157, 3158, 3286], [3158, 3287, 3286], [3158, 3159, 3288], [3158, 3288, 3287], [3159, 3160, 3288], [3160, 3289, 3288], [3160, 3161, 3290], [3160, 3290, 3289], [3161, 3162, 3290], [3162, 3291, 3290], [3162, 3163, 3292], [3162, 3292, 3291], [3163, 3164, 3292], [3164, 3293, 3292], [3164, 3165, 3294], [3164, 3294, 3293], [3165, 3166, 3294], [3166, 3295, 3294], [3166, 3167, 3296], [3166, 3296, 3295], [3167, 3168, 3296], [3168, 3297, 3296], [3168, 3169, 3298], [3168, 3298, 3297], [3169, 3170, 3298], [3170, 3299, 3298], [3170, 3171, 3300], [3170, 3300, 3299], [3171, 3172, 3300], [3172, 3301, 3300], [3172, 3173, 3302], [3172, 3302, 3301], [3173, 3174, 3302], [3174, 3303, 3302], [3174, 3175, 3304], [3174, 3304, 3303], [3175, 3176, 3304], [3176, 3305, 3304], [3176, 3177, 3306], [3176, 3306, 3305], [3177, 3178, 3306], [3178, 3307, 3306], [3178, 3179, 3308], [3178, 3308, 3307], [3179, 3180, 3308], [3180, 3309, 3308], [3180, 3181, 3310], [3180, 3310, 3309], [3181, 3182, 3310], [3182, 3311, 3310], [3182, 3183, 3312], [3182, 3312, 3311], [3183, 3184, 3312], [3184, 3313, 3312], [3184, 3185, 3314], [3184, 3314, 3313], [3185, 3186, 3314], [3186, 3315, 3314], [3186, 3187, 3316], [3186, 3316, 3315], [3187, 3188, 3316], [3188, 3317, 3316], [3188, 3189, 3318], [3188, 3318, 3317], [3189, 3190, 3318], [3190, 3319, 3318], [3190, 3191, 3320], [3190, 3320, 3319], [3191, 3192, 3320], [3192, 3321, 3320], [3192, 3193, 3322], [3192, 3322, 3321], [3193, 3194, 3322], [3194, 3323, 3322], [3194, 3195, 3324], [3194, 3324, 3323], [3195, 3196, 3324], [3196, 3325, 3324], [3196, 3197, 3326], [3196, 3326, 3325], [3197, 3198, 3326], [3198, 3327, 3326], [3198, 3199, 3328], [3198, 3328, 3327], [3199, 3200, 3328], [3200, 3329, 3328], [3200, 3201, 3330], [3200, 3330, 3329], [3201, 3202, 3330], [3202, 3331, 3330], [3202, 3203, 3332], [3202, 3332, 3331], [3203, 3204, 3332], [3204, 3333, 3332], [3204, 3205, 3334], [3204, 3334, 3333], [3205, 3206, 3334], [3206, 3335, 3334], [3206, 3207, 3336], [3206, 3336, 3335], [3207, 3208, 3336], [3208, 3337, 3336], [3208, 3209, 3338], [3208, 3338, 3337], [3209, 3210, 3338], [3210, 3339, 3338], [3210, 3211, 3340], [3210, 3340, 3339], [3211, 3212, 3340], [3212, 3341, 3340], [3212, 3213, 3342], [3212, 3342, 3341], [3213, 3214, 3342], [3214, 3343, 3342], [3214, 3215, 3344], [3214, 3344, 3343], [3215, 3216, 3344], [3216, 3345, 3344], [3216, 3217, 3346], [3216, 3346, 3345], [3217, 3218, 3346], [3218, 3347, 3346], [3218, 3219, 3348], [3218, 3348, 3347], [3219, 3220, 3348], [3220, 3349, 3348], [3220, 3221, 3350], [3220, 3350, 3349], [3221, 3222, 3350], [3222, 3351, 3350], [3222, 3223, 3352], [3222, 3352, 3351], [3223, 3224, 3352], [3224, 3353, 3352], [3225, 3226, 3354], [3226, 3355, 3354], [3226, 3227, 3356], [3226, 3356, 3355], [3227, 3228, 3356], [3228, 3357, 3356], [3228, 3229, 3358], [3228, 3358, 3357], [3229, 3230, 3358], [3230, 3359, 3358], [3230, 3231, 3360], [3230, 3360, 3359], [3231, 3232, 3360], [3232, 3361, 3360], [3232, 3233, 3362], [3232, 3362, 3361], [3233, 3234, 3362], [3234, 3363, 3362], [3234, 3235, 3364], [3234, 3364, 3363], [3235, 3236, 3364], [3236, 3365, 3364], [3236, 3237, 3366], [3236, 3366, 3365], [3237, 3238, 3366], [3238, 3367, 3366], [3238, 3239, 3368], [3238, 3368, 3367], [3239, 3240, 3368], [3240, 3369, 3368], [3240, 3241, 3370], [3240, 3370, 3369], [3241, 3242, 3370], [3242, 3371, 3370], [3242, 3243, 3372], [3242, 3372, 3371], [3243, 3244, 3372], [3244, 3373, 3372], [3244, 3245, 3374], [3244, 3374, 3373], [3245, 3246, 3374], [3246, 3375, 3374], [3246, 3247, 3376], [3246, 3376, 3375], [3247, 3248, 3376], [3248, 3377, 3376], [3248, 3249, 3378], [3248, 3378, 3377], [3249, 3250, 3378], [3250, 3379, 3378], [3250, 3251, 3380], [3250, 3380, 3379], [3251, 3252, 3380], [3252, 3381, 3380], [3252, 3253, 3382], [3252, 3382, 3381], [3253, 3254, 3382], [3254, 3383, 3382], [3254, 3255, 3384], [3254, 3384, 3383], [3255, 3256, 3384], [3256, 3385, 3384], [3256, 3257, 3386], [3256, 3386, 3385], [3257, 3258, 3386], [3258, 3387, 3386], [3258, 3259, 3388], [3258, 3388, 3387], [3259, 3260, 3388], [3260, 3389, 3388], [3260, 3261, 3390], [3260, 3390, 3389], [3261, 3262, 3390], [3262, 3391, 3390], [3262, 3263, 3392], [3262, 3392, 3391], [3263, 3264, 3392], [3264, 3393, 3392], [3264, 3265, 3394], [3264, 3394, 3393], [3265, 3266, 3394], [3266, 3395, 3394], [3266, 3267, 3396], [3266, 3396, 3395], [3267, 3268, 3396], [3268, 3397, 3396], [3268, 3269, 3398], [3268, 3398, 3397], [3269, 3270, 3398], [3270, 3399, 3398], [3270, 3271, 3400], [3270, 3400, 3399], [3271, 3272, 3400], [3272, 3401, 3400], [3272, 3273, 3402], [3272, 3402, 3401], [3273, 3274, 3402], [3274, 3403, 3402], [3274, 3275, 3404], [3274, 3404, 3403], [3275, 3276, 3404], [3276, 3405, 3404], [3276, 3277, 3406], [3276, 3406, 3405], [3277, 3278, 3406], [3278, 3407, 3406], [3278, 3279, 3408], [3278, 3408, 3407], [3279, 3280, 3408], [3280, 3409, 3408], [3280, 3281, 3410], [3280, 3410, 3409], [3281, 3282, 3410], [3282, 3411, 3410], [3282, 3283, 3412], [3282, 3412, 3411], [3283, 3284, 3412], [3284, 3413, 3412], [3284, 3285, 3414], [3284, 3414, 3413], [3285, 3286, 3414], [3286, 3415, 3414], [3286, 3287, 3416], [3286, 3416, 3415], [3287, 3288, 3416], [3288, 3417, 3416], [3288, 3289, 3418], [3288, 3418, 3417], [3289, 3290, 3418], [3290, 3419, 3418], [3290, 3291, 3420], [3290, 3420, 3419], [3291, 3292, 3420], [3292, 3421, 3420], [3292, 3293, 3422], [3292, 3422, 3421], [3293, 3294, 3422], [3294, 3423, 3422], [3294, 3295, 3424], [3294, 3424, 3423], [3295, 3296, 3424], [3296, 3425, 3424], [3296, 3297, 3426], [3296, 3426, 3425], [3297, 3298, 3426], [3298, 3427, 3426], [3298, 3299, 3428], [3298, 3428, 3427], [3299, 3300, 3428], [3300, 3429, 3428], [3300, 3301, 3430], [3300, 3430, 3429], [3301, 3302, 3430], [3302, 3431, 3430], [3302, 3303, 3432], [3302, 3432, 3431], [3303, 3304, 3432], [3304, 3433, 3432], [3304, 3305, 3434], [3304, 3434, 3433], [3305, 3306, 3434], [3306, 3435, 3434], [3306, 3307, 3436], [3306, 3436, 3435], [3307, 3308, 3436], [3308, 3437, 3436], [3308, 3309, 3438], [3308, 3438, 3437], [3309, 3310, 3438], [3310, 3439, 3438], [3310, 3311, 3440], [3310, 3440, 3439], [3311, 3312, 3440], [3312, 3441, 3440], [3312, 3313, 3442], [3312, 3442, 3441], [3313, 3314, 3442], [3314, 3443, 3442], [3314, 3315, 3444], [3314, 3444, 3443], [3315, 3316, 3444], [3316, 3445, 3444], [3316, 3317, 3446], [3316, 3446, 3445], [3317, 3318, 3446], [3318, 3447, 3446], [3318, 3319, 3448], [3318, 3448, 3447], [3319, 3320, 3448], [3320, 3449, 3448], [3320, 3321, 3450], [3320, 3450, 3449], [3321, 3322, 3450], [3322, 3451, 3450], [3322, 3323, 3452], [3322, 3452, 3451], [3323, 3324, 3452], [3324, 3453, 3452], [3324, 3325, 3454], [3324, 3454, 3453], [3325, 3326, 3454], [3326, 3455, 3454], [3326, 3327, 3456], [3326, 3456, 3455], [3327, 3328, 3456], [3328, 3457, 3456], [3328, 3329, 3458], [3328, 3458, 3457], [3329, 3330, 3458], [3330, 3459, 3458], [3330, 3331, 3460], [3330, 3460, 3459], [3331, 3332, 3460], [3332, 3461, 3460], [3332, 3333, 3462], [3332, 3462, 3461], [3333, 3334, 3462], [3334, 3463, 3462], [3334, 3335, 3464], [3334, 3464, 3463], [3335, 3336, 3464], [3336, 3465, 3464], [3336, 3337, 3466], [3336, 3466, 3465], [3337, 3338, 3466], [3338, 3467, 3466], [3338, 3339, 3468], [3338, 3468, 3467], [3339, 3340, 3468], [3340, 3469, 3468], [3340, 3341, 3470], [3340, 3470, 3469], [3341, 3342, 3470], [3342, 3471, 3470], [3342, 3343, 3472], [3342, 3472, 3471], [3343, 3344, 3472], [3344, 3473, 3472], [3344, 3345, 3474], [3344, 3474, 3473], [3345, 3346, 3474], [3346, 3475, 3474], [3346, 3347, 3476], [3346, 3476, 3475], [3347, 3348, 3476], [3348, 3477, 3476], [3348, 3349, 3478], [3348, 3478, 3477], [3349, 3350, 3478], [3350, 3479, 3478], [3350, 3351, 3480], [3350, 3480, 3479], [3351, 3352, 3480], [3352, 3481, 3480], [3352, 3353, 3482], [3352, 3482, 3481], [3354, 3355, 3484], [3354, 3484, 3483], [3355, 3356, 3484], [3356, 3485, 3484], [3356, 3357, 3486], [3356, 3486, 3485], [3357, 3358, 3486], [3358, 3487, 3486], [3358, 3359, 3488], [3358, 3488, 3487], [3359, 3360, 3488], [3360, 3489, 3488], [3360, 3361, 3490], [3360, 3490, 3489], [3361, 3362, 3490], [3362, 3491, 3490], [3362, 3363, 3492], [3362, 3492, 3491], [3363, 3364, 3492], [3364, 3493, 3492], [3364, 3365, 3494], [3364, 3494, 3493], [3365, 3366, 3494], [3366, 3495, 3494], [3366, 3367, 3496], [3366, 3496, 3495], [3367, 3368, 3496], [3368, 3497, 3496], [3368, 3369, 3498], [3368, 3498, 3497], [3369, 3370, 3498], [3370, 3499, 3498], [3370, 3371, 3500], [3370, 3500, 3499], [3371, 3372, 3500], [3372, 3501, 3500], [3372, 3373, 3502], [3372, 3502, 3501], [3373, 3374, 3502], [3374, 3503, 3502], [3374, 3375, 3504], [3374, 3504, 3503], [3375, 3376, 3504], [3376, 3505, 3504], [3376, 3377, 3506], [3376, 3506, 3505], [3377, 3378, 3506], [3378, 3507, 3506], [3378, 3379, 3508], [3378, 3508, 3507], [3379, 3380, 3508], [3380, 3509, 3508], [3380, 3381, 3510], [3380, 3510, 3509], [3381, 3382, 3510], [3382, 3511, 3510], [3382, 3383, 3512], [3382, 3512, 3511], [3383, 3384, 3512], [3384, 3513, 3512], [3384, 3385, 3514], [3384, 3514, 3513], [3385, 3386, 3514], [3386, 3515, 3514], [3386, 3387, 3516], [3386, 3516, 3515], [3387, 3388, 3516], [3388, 3517, 3516], [3388, 3389, 3518], [3388, 3518, 3517], [3389, 3390, 3518], [3390, 3519, 3518], [3390, 3391, 3520], [3390, 3520, 3519], [3391, 3392, 3520], [3392, 3521, 3520], [3392, 3393, 3522], [3392, 3522, 3521], [3393, 3394, 3522], [3394, 3523, 3522], [3394, 3395, 3524], [3394, 3524, 3523], [3395, 3396, 3524], [3396, 3525, 3524], [3396, 3397, 3526], [3396, 3526, 3525], [3397, 3398, 3526], [3398, 3527, 3526], [3398, 3399, 3528], [3398, 3528, 3527], [3399, 3400, 3528], [3400, 3529, 3528], [3400, 3401, 3530], [3400, 3530, 3529], [3401, 3402, 3530], [3402, 3531, 3530], [3402, 3403, 3532], [3402, 3532, 3531], [3403, 3404, 3532], [3404, 3533, 3532], [3404, 3405, 3534], [3404, 3534, 3533], [3405, 3406, 3534], [3406, 3535, 3534], [3406, 3407, 3536], [3406, 3536, 3535], [3407, 3408, 3536], [3408, 3537, 3536], [3408, 3409, 3538], [3408, 3538, 3537], [3409, 3410, 3538], [3410, 3539, 3538], [3410, 3411, 3540], [3410, 3540, 3539], [3411, 3412, 3540], [3412, 3541, 3540], [3412, 3413, 3542], [3412, 3542, 3541], [3413, 3414, 3542], [3414, 3543, 3542], [3414, 3415, 3544], [3414, 3544, 3543], [3415, 3416, 3544], [3416, 3545, 3544], [3416, 3417, 3546], [3416, 3546, 3545], [3417, 3418, 3546], [3418, 3547, 3546], [3418, 3419, 3548], [3418, 3548, 3547], [3419, 3420, 3548], [3420, 3549, 3548], [3420, 3421, 3550], [3420, 3550, 3549], [3421, 3422, 3550], [3422, 3551, 3550], [3422, 3423, 3552], [3422, 3552, 3551], [3423, 3424, 3552], [3424, 3553, 3552], [3424, 3425, 3554], [3424, 3554, 3553], [3425, 3426, 3554], [3426, 3555, 3554], [3426, 3427, 3556], [3426, 3556, 3555], [3427, 3428, 3556], [3428, 3557, 3556], [3428, 3429, 3558], [3428, 3558, 3557], [3429, 3430, 3558], [3430, 3559, 3558], [3430, 3431, 3560], [3430, 3560, 3559], [3431, 3432, 3560], [3432, 3561, 3560], [3432, 3433, 3562], [3432, 3562, 3561], [3433, 3434, 3562], [3434, 3563, 3562], [3434, 3435, 3564], [3434, 3564, 3563], [3435, 3436, 3564], [3436, 3565, 3564], [3436, 3437, 3566], [3436, 3566, 3565], [3437, 3438, 3566], [3438, 3567, 3566], [3438, 3439, 3568], [3438, 3568, 3567], [3439, 3440, 3568], [3440, 3569, 3568], [3440, 3441, 3570], [3440, 3570, 3569], [3441, 3442, 3570], [3442, 3571, 3570], [3442, 3443, 3572], [3442, 3572, 3571], [3443, 3444, 3572], [3444, 3573, 3572], [3444, 3445, 3574], [3444, 3574, 3573], [3445, 3446, 3574], [3446, 3575, 3574], [3446, 3447, 3576], [3446, 3576, 3575], [3447, 3448, 3576], [3448, 3577, 3576], [3448, 3449, 3578], [3448, 3578, 3577], [3449, 3450, 3578], [3450, 3579, 3578], [3450, 3451, 3580], [3450, 3580, 3579], [3451, 3452, 3580], [3452, 3581, 3580], [3452, 3453, 3582], [3452, 3582, 3581], [3453, 3454, 3582], [3454, 3583, 3582], [3454, 3455, 3584], [3454, 3584, 3583], [3455, 3456, 3584], [3456, 3585, 3584], [3456, 3457, 3586], [3456, 3586, 3585], [3457, 3458, 3586], [3458, 3587, 3586], [3458, 3459, 3588], [3458, 3588, 3587], [3459, 3460, 3588], [3460, 3589, 3588], [3460, 3461, 3590], [3460, 3590, 3589], [3461, 3462, 3590], [3462, 3591, 3590], [3462, 3463, 3592], [3462, 3592, 3591], [3463, 3464, 3592], [3464, 3593, 3592], [3464, 3465, 3594], [3464, 3594, 3593], [3465, 3466, 3594], [3466, 3595, 3594], [3466, 3467, 3596], [3466, 3596, 3595], [3467, 3468, 3596], [3468, 3597, 3596], [3468, 3469, 3598], [3468, 3598, 3597], [3469, 3470, 3598], [3470, 3599, 3598], [3470, 3471, 3600], [3470, 3600, 3599], [3471, 3472, 3600], [3472, 3601, 3600], [3472, 3473, 3602], [3472, 3602, 3601], [3473, 3474, 3602], [3474, 3603, 3602], [3474, 3475, 3604], [3474, 3604, 3603], [3475, 3476, 3604], [3476, 3605, 3604], [3476, 3477, 3606], [3476, 3606, 3605], [3477, 3478, 3606], [3478, 3607, 3606], [3478, 3479, 3608], [3478, 3608, 3607], [3479, 3480, 3608], [3480, 3609, 3608], [3480, 3481, 3610], [3480, 3610, 3609], [3481, 3482, 3610], [3482, 3611, 3610], [3483, 3484, 3612], [3484, 3613, 3612], [3484, 3485, 3614], [3484, 3614, 3613], [3485, 3486, 3614], [3486, 3615, 3614], [3486, 3487, 3616], [3486, 3616, 3615], [3487, 3488, 3616], [3488, 3617, 3616], [3488, 3489, 3618], [3488, 3618, 3617], [3489, 3490, 3618], [3490, 3619, 3618], [3490, 3491, 3620], [3490, 3620, 3619], [3491, 3492, 3620], [3492, 3621, 3620], [3492, 3493, 3622], [3492, 3622, 3621], [3493, 3494, 3622], [3494, 3623, 3622], [3494, 3495, 3624], [3494, 3624, 3623], [3495, 3496, 3624], [3496, 3625, 3624], [3496, 3497, 3626], [3496, 3626, 3625], [3497, 3498, 3626], [3498, 3627, 3626], [3498, 3499, 3628], [3498, 3628, 3627], [3499, 3500, 3628], [3500, 3629, 3628], [3500, 3501, 3630], [3500, 3630, 3629], [3501, 3502, 3630], [3502, 3631, 3630], [3502, 3503, 3632], [3502, 3632, 3631], [3503, 3504, 3632], [3504, 3633, 3632], [3504, 3505, 3634], [3504, 3634, 3633], [3505, 3506, 3634], [3506, 3635, 3634], [3506, 3507, 3636], [3506, 3636, 3635], [3507, 3508, 3636], [3508, 3637, 3636], [3508, 3509, 3638], [3508, 3638, 3637], [3509, 3510, 3638], [3510, 3639, 3638], [3510, 3511, 3640], [3510, 3640, 3639], [3511, 3512, 3640], [3512, 3641, 3640], [3512, 3513, 3642], [3512, 3642, 3641], [3513, 3514, 3642], [3514, 3643, 3642], [3514, 3515, 3644], [3514, 3644, 3643], [3515, 3516, 3644], [3516, 3645, 3644], [3516, 3517, 3646], [3516, 3646, 3645], [3517, 3518, 3646], [3518, 3647, 3646], [3518, 3519, 3648], [3518, 3648, 3647], [3519, 3520, 3648], [3520, 3649, 3648], [3520, 3521, 3650], [3520, 3650, 3649], [3521, 3522, 3650], [3522, 3651, 3650], [3522, 3523, 3652], [3522, 3652, 3651], [3523, 3524, 3652], [3524, 3653, 3652], [3524, 3525, 3654], [3524, 3654, 3653], [3525, 3526, 3654], [3526, 3655, 3654], [3526, 3527, 3656], [3526, 3656, 3655], [3527, 3528, 3656], [3528, 3657, 3656], [3528, 3529, 3658], [3528, 3658, 3657], [3529, 3530, 3658], [3530, 3659, 3658], [3530, 3531, 3660], [3530, 3660, 3659], [3531, 3532, 3660], [3532, 3661, 3660], [3532, 3533, 3662], [3532, 3662, 3661], [3533, 3534, 3662], [3534, 3663, 3662], [3534, 3535, 3664], [3534, 3664, 3663], [3535, 3536, 3664], [3536, 3665, 3664], [3536, 3537, 3666], [3536, 3666, 3665], [3537, 3538, 3666], [3538, 3667, 3666], [3538, 3539, 3668], [3538, 3668, 3667], [3539, 3540, 3668], [3540, 3669, 3668], [3540, 3541, 3670], [3540, 3670, 3669], [3541, 3542, 3670], [3542, 3671, 3670], [3542, 3543, 3672], [3542, 3672, 3671], [3543, 3544, 3672], [3544, 3673, 3672], [3544, 3545, 3674], [3544, 3674, 3673], [3545, 3546, 3674], [3546, 3675, 3674], [3546, 3547, 3676], [3546, 3676, 3675], [3547, 3548, 3676], [3548, 3677, 3676], [3548, 3549, 3678], [3548, 3678, 3677], [3549, 3550, 3678], [3550, 3679, 3678], [3550, 3551, 3680], [3550, 3680, 3679], [3551, 3552, 3680], [3552, 3681, 3680], [3552, 3553, 3682], [3552, 3682, 3681], [3553, 3554, 3682], [3554, 3683, 3682], [3554, 3555, 3684], [3554, 3684, 3683], [3555, 3556, 3684], [3556, 3685, 3684], [3556, 3557, 3686], [3556, 3686, 3685], [3557, 3558, 3686], [3558, 3687, 3686], [3558, 3559, 3688], [3558, 3688, 3687], [3559, 3560, 3688], [3560, 3689, 3688], [3560, 3561, 3690], [3560, 3690, 3689], [3561, 3562, 3690], [3562, 3691, 3690], [3562, 3563, 3692], [3562, 3692, 3691], [3563, 3564, 3692], [3564, 3693, 3692], [3564, 3565, 3694], [3564, 3694, 3693], [3565, 3566, 3694], [3566, 3695, 3694], [3566, 3567, 3696], [3566, 3696, 3695], [3567, 3568, 3696], [3568, 3697, 3696], [3568, 3569, 3698], [3568, 3698, 3697], [3569, 3570, 3698], [3570, 3699, 3698], [3570, 3571, 3700], [3570, 3700, 3699], [3571, 3572, 3700], [3572, 3701, 3700], [3572, 3573, 3702], [3572, 3702, 3701], [3573, 3574, 3702], [3574, 3703, 3702], [3574, 3575, 3704], [3574, 3704, 3703], [3575, 3576, 3704], [3576, 3705, 3704], [3576, 3577, 3706], [3576, 3706, 3705], [3577, 3578, 3706], [3578, 3707, 3706], [3578, 3579, 3708], [3578, 3708, 3707], [3579, 3580, 3708], [3580, 3709, 3708], [3580, 3581, 3710], [3580, 3710, 3709], [3581, 3582, 3710], [3582, 3711, 3710], [3582, 3583, 3712], [3582, 3712, 3711], [3583, 3584, 3712], [3584, 3713, 3712], [3584, 3585, 3714], [3584, 3714, 3713], [3585, 3586, 3714], [3586, 3715, 3714], [3586, 3587, 3716], [3586, 3716, 3715], [3587, 3588, 3716], [3588, 3717, 3716], [3588, 3589, 3718], [3588, 3718, 3717], [3589, 3590, 3718], [3590, 3719, 3718], [3590, 3591, 3720], [3590, 3720, 3719], [3591, 3592, 3720], [3592, 3721, 3720], [3592, 3593, 3722], [3592, 3722, 3721], [3593, 3594, 3722], [3594, 3723, 3722], [3594, 3595, 3724], [3594, 3724, 3723], [3595, 3596, 3724], [3596, 3725, 3724], [3596, 3597, 3726], [3596, 3726, 3725], [3597, 3598, 3726], [3598, 3727, 3726], [3598, 3599, 3728], [3598, 3728, 3727], [3599, 3600, 3728], [3600, 3729, 3728], [3600, 3601, 3730], [3600, 3730, 3729], [3601, 3602, 3730], [3602, 3731, 3730], [3602, 3603, 3732], [3602, 3732, 3731], [3603, 3604, 3732], [3604, 3733, 3732], [3604, 3605, 3734], [3604, 3734, 3733], [3605, 3606, 3734], [3606, 3735, 3734], [3606, 3607, 3736], [3606, 3736, 3735], [3607, 3608, 3736], [3608, 3737, 3736], [3608, 3609, 3738], [3608, 3738, 3737], [3609, 3610, 3738], [3610, 3739, 3738], [3610, 3611, 3740], [3610, 3740, 3739], [3612, 3613, 3742], [3612, 3742, 3741], [3613, 3614, 3742], [3614, 3743, 3742], [3614, 3615, 3744], [3614, 3744, 3743], [3615, 3616, 3744], [3616, 3745, 3744], [3616, 3617, 3746], [3616, 3746, 3745], [3617, 3618, 3746], [3618, 3747, 3746], [3618, 3619, 3748], [3618, 3748, 3747], [3619, 3620, 3748], [3620, 3749, 3748], [3620, 3621, 3750], [3620, 3750, 3749], [3621, 3622, 3750], [3622, 3751, 3750], [3622, 3623, 3752], [3622, 3752, 3751], [3623, 3624, 3752], [3624, 3753, 3752], [3624, 3625, 3754], [3624, 3754, 3753], [3625, 3626, 3754], [3626, 3755, 3754], [3626, 3627, 3756], [3626, 3756, 3755], [3627, 3628, 3756], [3628, 3757, 3756], [3628, 3629, 3758], [3628, 3758, 3757], [3629, 3630, 3758], [3630, 3759, 3758], [3630, 3631, 3760], [3630, 3760, 3759], [3631, 3632, 3760], [3632, 3761, 3760], [3632, 3633, 3762], [3632, 3762, 3761], [3633, 3634, 3762], [3634, 3763, 3762], [3634, 3635, 3764], [3634, 3764, 3763], [3635, 3636, 3764], [3636, 3765, 3764], [3636, 3637, 3766], [3636, 3766, 3765], [3637, 3638, 3766], [3638, 3767, 3766], [3638, 3639, 3768], [3638, 3768, 3767], [3639, 3640, 3768], [3640, 3769, 3768], [3640, 3641, 3770], [3640, 3770, 3769], [3641, 3642, 3770], [3642, 3771, 3770], [3642, 3643, 3772], [3642, 3772, 3771], [3643, 3644, 3772], [3644, 3773, 3772], [3644, 3645, 3774], [3644, 3774, 3773], [3645, 3646, 3774], [3646, 3775, 3774], [3646, 3647, 3776], [3646, 3776, 3775], [3647, 3648, 3776], [3648, 3777, 3776], [3648, 3649, 3778], [3648, 3778, 3777], [3649, 3650, 3778], [3650, 3779, 3778], [3650, 3651, 3780], [3650, 3780, 3779], [3651, 3652, 3780], [3652, 3781, 3780], [3652, 3653, 3782], [3652, 3782, 3781], [3653, 3654, 3782], [3654, 3783, 3782], [3654, 3655, 3784], [3654, 3784, 3783], [3655, 3656, 3784], [3656, 3785, 3784], [3656, 3657, 3786], [3656, 3786, 3785], [3657, 3658, 3786], [3658, 3787, 3786], [3658, 3659, 3788], [3658, 3788, 3787], [3659, 3660, 3788], [3660, 3789, 3788], [3660, 3661, 3790], [3660, 3790, 3789], [3661, 3662, 3790], [3662, 3791, 3790], [3662, 3663, 3792], [3662, 3792, 3791], [3663, 3664, 3792], [3664, 3793, 3792], [3664, 3665, 3794], [3664, 3794, 3793], [3665, 3666, 3794], [3666, 3795, 3794], [3666, 3667, 3796], [3666, 3796, 3795], [3667, 3668, 3796], [3668, 3797, 3796], [3668, 3669, 3798], [3668, 3798, 3797], [3669, 3670, 3798], [3670, 3799, 3798], [3670, 3671, 3800], [3670, 3800, 3799], [3671, 3672, 3800], [3672, 3801, 3800], [3672, 3673, 3802], [3672, 3802, 3801], [3673, 3674, 3802], [3674, 3803, 3802], [3674, 3675, 3804], [3674, 3804, 3803], [3675, 3676, 3804], [3676, 3805, 3804], [3676, 3677, 3806], [3676, 3806, 3805], [3677, 3678, 3806], [3678, 3807, 3806], [3678, 3679, 3808], [3678, 3808, 3807], [3679, 3680, 3808], [3680, 3809, 3808], [3680, 3681, 3810], [3680, 3810, 3809], [3681, 3682, 3810], [3682, 3811, 3810], [3682, 3683, 3812], [3682, 3812, 3811], [3683, 3684, 3812], [3684, 3813, 3812], [3684, 3685, 3814], [3684, 3814, 3813], [3685, 3686, 3814], [3686, 3815, 3814], [3686, 3687, 3816], [3686, 3816, 3815], [3687, 3688, 3816], [3688, 3817, 3816], [3688, 3689, 3818], [3688, 3818, 3817], [3689, 3690, 3818], [3690, 3819, 3818], [3690, 3691, 3820], [3690, 3820, 3819], [3691, 3692, 3820], [3692, 3821, 3820], [3692, 3693, 3822], [3692, 3822, 3821], [3693, 3694, 3822], [3694, 3823, 3822], [3694, 3695, 3824], [3694, 3824, 3823], [3695, 3696, 3824], [3696, 3825, 3824], [3696, 3697, 3826], [3696, 3826, 3825], [3697, 3698, 3826], [3698, 3827, 3826], [3698, 3699, 3828], [3698, 3828, 3827], [3699, 3700, 3828], [3700, 3829, 3828], [3700, 3701, 3830], [3700, 3830, 3829], [3701, 3702, 3830], [3702, 3831, 3830], [3702, 3703, 3832], [3702, 3832, 3831], [3703, 3704, 3832], [3704, 3833, 3832], [3704, 3705, 3834], [3704, 3834, 3833], [3705, 3706, 3834], [3706, 3835, 3834], [3706, 3707, 3836], [3706, 3836, 3835], [3707, 3708, 3836], [3708, 3837, 3836], [3708, 3709, 3838], [3708, 3838, 3837], [3709, 3710, 3838], [3710, 3839, 3838], [3710, 3711, 3840], [3710, 3840, 3839], [3711, 3712, 3840], [3712, 3841, 3840], [3712, 3713, 3842], [3712, 3842, 3841], [3713, 3714, 3842], [3714, 3843, 3842], [3714, 3715, 3844], [3714, 3844, 3843], [3715, 3716, 3844], [3716, 3845, 3844], [3716, 3717, 3846], [3716, 3846, 3845], [3717, 3718, 3846], [3718, 3847, 3846], [3718, 3719, 3848], [3718, 3848, 3847], [3719, 3720, 3848], [3720, 3849, 3848], [3720, 3721, 3850], [3720, 3850, 3849], [3721, 3722, 3850], [3722, 3851, 3850], [3722, 3723, 3852], [3722, 3852, 3851], [3723, 3724, 3852], [3724, 3853, 3852], [3724, 3725, 3854], [3724, 3854, 3853], [3725, 3726, 3854], [3726, 3855, 3854], [3726, 3727, 3856], [3726, 3856, 3855], [3727, 3728, 3856], [3728, 3857, 3856], [3728, 3729, 3858], [3728, 3858, 3857], [3729, 3730, 3858], [3730, 3859, 3858], [3730, 3731, 3860], [3730, 3860, 3859], [3731, 3732, 3860], [3732, 3861, 3860], [3732, 3733, 3862], [3732, 3862, 3861], [3733, 3734, 3862], [3734, 3863, 3862], [3734, 3735, 3864], [3734, 3864, 3863], [3735, 3736, 3864], [3736, 3865, 3864], [3736, 3737, 3866], [3736, 3866, 3865], [3737, 3738, 3866], [3738, 3867, 3866], [3738, 3739, 3868], [3738, 3868, 3867], [3739, 3740, 3868], [3740, 3869, 3868], [3741, 3742, 3870], [3742, 3871, 3870], [3742, 3743, 3872], [3742, 3872, 3871], [3743, 3744, 3872], [3744, 3873, 3872], [3744, 3745, 3874], [3744, 3874, 3873], [3745, 3746, 3874], [3746, 3875, 3874], [3746, 3747, 3876], [3746, 3876, 3875], [3747, 3748, 3876], [3748, 3877, 3876], [3748, 3749, 3878], [3748, 3878, 3877], [3749, 3750, 3878], [3750, 3879, 3878], [3750, 3751, 3880], [3750, 3880, 3879], [3751, 3752, 3880], [3752, 3881, 3880], [3752, 3753, 3882], [3752, 3882, 3881], [3753, 3754, 3882], [3754, 3883, 3882], [3754, 3755, 3884], [3754, 3884, 3883], [3755, 3756, 3884], [3756, 3885, 3884], [3756, 3757, 3886], [3756, 3886, 3885], [3757, 3758, 3886], [3758, 3887, 3886], [3758, 3759, 3888], [3758, 3888, 3887], [3759, 3760, 3888], [3760, 3889, 3888], [3760, 3761, 3890], [3760, 3890, 3889], [3761, 3762, 3890], [3762, 3891, 3890], [3762, 3763, 3892], [3762, 3892, 3891], [3763, 3764, 3892], [3764, 3893, 3892], [3764, 3765, 3894], [3764, 3894, 3893], [3765, 3766, 3894], [3766, 3895, 3894], [3766, 3767, 3896], [3766, 3896, 3895], [3767, 3768, 3896], [3768, 3897, 3896], [3768, 3769, 3898], [3768, 3898, 3897], [3769, 3770, 3898], [3770, 3899, 3898], [3770, 3771, 3900], [3770, 3900, 3899], [3771, 3772, 3900], [3772, 3901, 3900], [3772, 3773, 3902], [3772, 3902, 3901], [3773, 3774, 3902], [3774, 3903, 3902], [3774, 3775, 3904], [3774, 3904, 3903], [3775, 3776, 3904], [3776, 3905, 3904], [3776, 3777, 3906], [3776, 3906, 3905], [3777, 3778, 3906], [3778, 3907, 3906], [3778, 3779, 3908], [3778, 3908, 3907], [3779, 3780, 3908], [3780, 3909, 3908], [3780, 3781, 3910], [3780, 3910, 3909], [3781, 3782, 3910], [3782, 3911, 3910], [3782, 3783, 3912], [3782, 3912, 3911], [3783, 3784, 3912], [3784, 3913, 3912], [3784, 3785, 3914], [3784, 3914, 3913], [3785, 3786, 3914], [3786, 3915, 3914], [3786, 3787, 3916], [3786, 3916, 3915], [3787, 3788, 3916], [3788, 3917, 3916], [3788, 3789, 3918], [3788, 3918, 3917], [3789, 3790, 3918], [3790, 3919, 3918], [3790, 3791, 3920], [3790, 3920, 3919], [3791, 3792, 3920], [3792, 3921, 3920], [3792, 3793, 3922], [3792, 3922, 3921], [3793, 3794, 3922], [3794, 3923, 3922], [3794, 3795, 3924], [3794, 3924, 3923], [3795, 3796, 3924], [3796, 3925, 3924], [3796, 3797, 3926], [3796, 3926, 3925], [3797, 3798, 3926], [3798, 3927, 3926], [3798, 3799, 3928], [3798, 3928, 3927], [3799, 3800, 3928], [3800, 3929, 3928], [3800, 3801, 3930], [3800, 3930, 3929], [3801, 3802, 3930], [3802, 3931, 3930], [3802, 3803, 3932], [3802, 3932, 3931], [3803, 3804, 3932], [3804, 3933, 3932], [3804, 3805, 3934], [3804, 3934, 3933], [3805, 3806, 3934], [3806, 3935, 3934], [3806, 3807, 3936], [3806, 3936, 3935], [3807, 3808, 3936], [3808, 3937, 3936], [3808, 3809, 3938], [3808, 3938, 3937], [3809, 3810, 3938], [3810, 3939, 3938], [3810, 3811, 3940], [3810, 3940, 3939], [3811, 3812, 3940], [3812, 3941, 3940], [3812, 3813, 3942], [3812, 3942, 3941], [3813, 3814, 3942], [3814, 3943, 3942], [3814, 3815, 3944], [3814, 3944, 3943], [3815, 3816, 3944], [3816, 3945, 3944], [3816, 3817, 3946], [3816, 3946, 3945], [3817, 3818, 3946], [3818, 3947, 3946], [3818, 3819, 3948], [3818, 3948, 3947], [3819, 3820, 3948], [3820, 3949, 3948], [3820, 3821, 3950], [3820, 3950, 3949], [3821, 3822, 3950], [3822, 3951, 3950], [3822, 3823, 3952], [3822, 3952, 3951], [3823, 3824, 3952], [3824, 3953, 3952], [3824, 3825, 3954], [3824, 3954, 3953], [3825, 3826, 3954], [3826, 3955, 3954], [3826, 3827, 3956], [3826, 3956, 3955], [3827, 3828, 3956], [3828, 3957, 3956], [3828, 3829, 3958], [3828, 3958, 3957], [3829, 3830, 3958], [3830, 3959, 3958], [3830, 3831, 3960], [3830, 3960, 3959], [3831, 3832, 3960], [3832, 3961, 3960], [3832, 3833, 3962], [3832, 3962, 3961], [3833, 3834, 3962], [3834, 3963, 3962], [3834, 3835, 3964], [3834, 3964, 3963], [3835, 3836, 3964], [3836, 3965, 3964], [3836, 3837, 3966], [3836, 3966, 3965], [3837, 3838, 3966], [3838, 3967, 3966], [3838, 3839, 3968], [3838, 3968, 3967], [3839, 3840, 3968], [3840, 3969, 3968], [3840, 3841, 3970], [3840, 3970, 3969], [3841, 3842, 3970], [3842, 3971, 3970], [3842, 3843, 3972], [3842, 3972, 3971], [3843, 3844, 3972], [3844, 3973, 3972], [3844, 3845, 3974], [3844, 3974, 3973], [3845, 3846, 3974], [3846, 3975, 3974], [3846, 3847, 3976], [3846, 3976, 3975], [3847, 3848, 3976], [3848, 3977, 3976], [3848, 3849, 3978], [3848, 3978, 3977], [3849, 3850, 3978], [3850, 3979, 3978], [3850, 3851, 3980], [3850, 3980, 3979], [3851, 3852, 3980], [3852, 3981, 3980], [3852, 3853, 3982], [3852, 3982, 3981], [3853, 3854, 3982], [3854, 3983, 3982], [3854, 3855, 3984], [3854, 3984, 3983], [3855, 3856, 3984], [3856, 3985, 3984], [3856, 3857, 3986], [3856, 3986, 3985], [3857, 3858, 3986], [3858, 3987, 3986], [3858, 3859, 3988], [3858, 3988, 3987], [3859, 3860, 3988], [3860, 3989, 3988], [3860, 3861, 3990], [3860, 3990, 3989], [3861, 3862, 3990], [3862, 3991, 3990], [3862, 3863, 3992], [3862, 3992, 3991], [3863, 3864, 3992], [3864, 3993, 3992], [3864, 3865, 3994], [3864, 3994, 3993], [3865, 3866, 3994], [3866, 3995, 3994], [3866, 3867, 3996], [3866, 3996, 3995], [3867, 3868, 3996], [3868, 3997, 3996], [3868, 3869, 3998], [3868, 3998, 3997], [3870, 3871, 4000], [3870, 4000, 3999], [3871, 3872, 4000], [3872, 4001, 4000], [3872, 3873, 4002], [3872, 4002, 4001], [3873, 3874, 4002], [3874, 4003, 4002], [3874, 3875, 4004], [3874, 4004, 4003], [3875, 3876, 4004], [3876, 4005, 4004], [3876, 3877, 4006], [3876, 4006, 4005], [3877, 3878, 4006], [3878, 4007, 4006], [3878, 3879, 4008], [3878, 4008, 4007], [3879, 3880, 4008], [3880, 4009, 4008], [3880, 3881, 4010], [3880, 4010, 4009], [3881, 3882, 4010], [3882, 4011, 4010], [3882, 3883, 4012], [3882, 4012, 4011], [3883, 3884, 4012], [3884, 4013, 4012], [3884, 3885, 4014], [3884, 4014, 4013], [3885, 3886, 4014], [3886, 4015, 4014], [3886, 3887, 4016], [3886, 4016, 4015], [3887, 3888, 4016], [3888, 4017, 4016], [3888, 3889, 4018], [3888, 4018, 4017], [3889, 3890, 4018], [3890, 4019, 4018], [3890, 3891, 4020], [3890, 4020, 4019], [3891, 3892, 4020], [3892, 4021, 4020], [3892, 3893, 4022], [3892, 4022, 4021], [3893, 3894, 4022], [3894, 4023, 4022], [3894, 3895, 4024], [3894, 4024, 4023], [3895, 3896, 4024], [3896, 4025, 4024], [3896, 3897, 4026], [3896, 4026, 4025], [3897, 3898, 4026], [3898, 4027, 4026], [3898, 3899, 4028], [3898, 4028, 4027], [3899, 3900, 4028], [3900, 4029, 4028], [3900, 3901, 4030], [3900, 4030, 4029], [3901, 3902, 4030], [3902, 4031, 4030], [3902, 3903, 4032], [3902, 4032, 4031], [3903, 3904, 4032], [3904, 4033, 4032], [3904, 3905, 4034], [3904, 4034, 4033], [3905, 3906, 4034], [3906, 4035, 4034], [3906, 3907, 4036], [3906, 4036, 4035], [3907, 3908, 4036], [3908, 4037, 4036], [3908, 3909, 4038], [3908, 4038, 4037], [3909, 3910, 4038], [3910, 4039, 4038], [3910, 3911, 4040], [3910, 4040, 4039], [3911, 3912, 4040], [3912, 4041, 4040], [3912, 3913, 4042], [3912, 4042, 4041], [3913, 3914, 4042], [3914, 4043, 4042], [3914, 3915, 4044], [3914, 4044, 4043], [3915, 3916, 4044], [3916, 4045, 4044], [3916, 3917, 4046], [3916, 4046, 4045], [3917, 3918, 4046], [3918, 4047, 4046], [3918, 3919, 4048], [3918, 4048, 4047], [3919, 3920, 4048], [3920, 4049, 4048], [3920, 3921, 4050], [3920, 4050, 4049], [3921, 3922, 4050], [3922, 4051, 4050], [3922, 3923, 4052], [3922, 4052, 4051], [3923, 3924, 4052], [3924, 4053, 4052], [3924, 3925, 4054], [3924, 4054, 4053], [3925, 3926, 4054], [3926, 4055, 4054], [3926, 3927, 4056], [3926, 4056, 4055], [3927, 3928, 4056], [3928, 4057, 4056], [3928, 3929, 4058], [3928, 4058, 4057], [3929, 3930, 4058], [3930, 4059, 4058], [3930, 3931, 4060], [3930, 4060, 4059], [3931, 3932, 4060], [3932, 4061, 4060], [3932, 3933, 4062], [3932, 4062, 4061], [3933, 3934, 4062], [3934, 4063, 4062], [3934, 3935, 4064], [3934, 4064, 4063], [3935, 3936, 4064], [3936, 4065, 4064], [3936, 3937, 4066], [3936, 4066, 4065], [3937, 3938, 4066], [3938, 4067, 4066], [3938, 3939, 4068], [3938, 4068, 4067], [3939, 3940, 4068], [3940, 4069, 4068], [3940, 3941, 4070], [3940, 4070, 4069], [3941, 3942, 4070], [3942, 4071, 4070], [3942, 3943, 4072], [3942, 4072, 4071], [3943, 3944, 4072], [3944, 4073, 4072], [3944, 3945, 4074], [3944, 4074, 4073], [3945, 3946, 4074], [3946, 4075, 4074], [3946, 3947, 4076], [3946, 4076, 4075], [3947, 3948, 4076], [3948, 4077, 4076], [3948, 3949, 4078], [3948, 4078, 4077], [3949, 3950, 4078], [3950, 4079, 4078], [3950, 3951, 4080], [3950, 4080, 4079], [3951, 3952, 4080], [3952, 4081, 4080], [3952, 3953, 4082], [3952, 4082, 4081], [3953, 3954, 4082], [3954, 4083, 4082], [3954, 3955, 4084], [3954, 4084, 4083], [3955, 3956, 4084], [3956, 4085, 4084], [3956, 3957, 4086], [3956, 4086, 4085], [3957, 3958, 4086], [3958, 4087, 4086], [3958, 3959, 4088], [3958, 4088, 4087], [3959, 3960, 4088], [3960, 4089, 4088], [3960, 3961, 4090], [3960, 4090, 4089], [3961, 3962, 4090], [3962, 4091, 4090], [3962, 3963, 4092], [3962, 4092, 4091], [3963, 3964, 4092], [3964, 4093, 4092], [3964, 3965, 4094], [3964, 4094, 4093], [3965, 3966, 4094], [3966, 4095, 4094], [3966, 3967, 4096], [3966, 4096, 4095], [3967, 3968, 4096], [3968, 4097, 4096], [3968, 3969, 4098], [3968, 4098, 4097], [3969, 3970, 4098], [3970, 4099, 4098], [3970, 3971, 4100], [3970, 4100, 4099], [3971, 3972, 4100], [3972, 4101, 4100], [3972, 3973, 4102], [3972, 4102, 4101], [3973, 3974, 4102], [3974, 4103, 4102], [3974, 3975, 4104], [3974, 4104, 4103], [3975, 3976, 4104], [3976, 4105, 4104], [3976, 3977, 4106], [3976, 4106, 4105], [3977, 3978, 4106], [3978, 4107, 4106], [3978, 3979, 4108], [3978, 4108, 4107], [3979, 3980, 4108], [3980, 4109, 4108], [3980, 3981, 4110], [3980, 4110, 4109], [3981, 3982, 4110], [3982, 4111, 4110], [3982, 3983, 4112], [3982, 4112, 4111], [3983, 3984, 4112], [3984, 4113, 4112], [3984, 3985, 4114], [3984, 4114, 4113], [3985, 3986, 4114], [3986, 4115, 4114], [3986, 3987, 4116], [3986, 4116, 4115], [3987, 3988, 4116], [3988, 4117, 4116], [3988, 3989, 4118], [3988, 4118, 4117], [3989, 3990, 4118], [3990, 4119, 4118], [3990, 3991, 4120], [3990, 4120, 4119], [3991, 3992, 4120], [3992, 4121, 4120], [3992, 3993, 4122], [3992, 4122, 4121], [3993, 3994, 4122], [3994, 4123, 4122], [3994, 3995, 4124], [3994, 4124, 4123], [3995, 3996, 4124], [3996, 4125, 4124], [3996, 3997, 4126], [3996, 4126, 4125], [3997, 3998, 4126], [3998, 4127, 4126], [3999, 4000, 4128], [4000, 4129, 4128], [4000, 4001, 4130], [4000, 4130, 4129], [4001, 4002, 4130], [4002, 4131, 4130], [4002, 4003, 4132], [4002, 4132, 4131], [4003, 4004, 4132], [4004, 4133, 4132], [4004, 4005, 4134], [4004, 4134, 4133], [4005, 4006, 4134], [4006, 4135, 4134], [4006, 4007, 4136], [4006, 4136, 4135], [4007, 4008, 4136], [4008, 4137, 4136], [4008, 4009, 4138], [4008, 4138, 4137], [4009, 4010, 4138], [4010, 4139, 4138], [4010, 4011, 4140], [4010, 4140, 4139], [4011, 4012, 4140], [4012, 4141, 4140], [4012, 4013, 4142], [4012, 4142, 4141], [4013, 4014, 4142], [4014, 4143, 4142], [4014, 4015, 4144], [4014, 4144, 4143], [4015, 4016, 4144], [4016, 4145, 4144], [4016, 4017, 4146], [4016, 4146, 4145], [4017, 4018, 4146], [4018, 4147, 4146], [4018, 4019, 4148], [4018, 4148, 4147], [4019, 4020, 4148], [4020, 4149, 4148], [4020, 4021, 4150], [4020, 4150, 4149], [4021, 4022, 4150], [4022, 4151, 4150], [4022, 4023, 4152], [4022, 4152, 4151], [4023, 4024, 4152], [4024, 4153, 4152], [4024, 4025, 4154], [4024, 4154, 4153], [4025, 4026, 4154], [4026, 4155, 4154], [4026, 4027, 4156], [4026, 4156, 4155], [4027, 4028, 4156], [4028, 4157, 4156], [4028, 4029, 4158], [4028, 4158, 4157], [4029, 4030, 4158], [4030, 4159, 4158], [4030, 4031, 4160], [4030, 4160, 4159], [4031, 4032, 4160], [4032, 4161, 4160], [4032, 4033, 4162], [4032, 4162, 4161], [4033, 4034, 4162], [4034, 4163, 4162], [4034, 4035, 4164], [4034, 4164, 4163], [4035, 4036, 4164], [4036, 4165, 4164], [4036, 4037, 4166], [4036, 4166, 4165], [4037, 4038, 4166], [4038, 4167, 4166], [4038, 4039, 4168], [4038, 4168, 4167], [4039, 4040, 4168], [4040, 4169, 4168], [4040, 4041, 4170], [4040, 4170, 4169], [4041, 4042, 4170], [4042, 4171, 4170], [4042, 4043, 4172], [4042, 4172, 4171], [4043, 4044, 4172], [4044, 4173, 4172], [4044, 4045, 4174], [4044, 4174, 4173], [4045, 4046, 4174], [4046, 4175, 4174], [4046, 4047, 4176], [4046, 4176, 4175], [4047, 4048, 4176], [4048, 4177, 4176], [4048, 4049, 4178], [4048, 4178, 4177], [4049, 4050, 4178], [4050, 4179, 4178], [4050, 4051, 4180], [4050, 4180, 4179], [4051, 4052, 4180], [4052, 4181, 4180], [4052, 4053, 4182], [4052, 4182, 4181], [4053, 4054, 4182], [4054, 4183, 4182], [4054, 4055, 4184], [4054, 4184, 4183], [4055, 4056, 4184], [4056, 4185, 4184], [4056, 4057, 4186], [4056, 4186, 4185], [4057, 4058, 4186], [4058, 4187, 4186], [4058, 4059, 4188], [4058, 4188, 4187], [4059, 4060, 4188], [4060, 4189, 4188], [4060, 4061, 4190], [4060, 4190, 4189], [4061, 4062, 4190], [4062, 4191, 4190], [4062, 4063, 4192], [4062, 4192, 4191], [4063, 4064, 4192], [4064, 4193, 4192], [4064, 4065, 4194], [4064, 4194, 4193], [4065, 4066, 4194], [4066, 4195, 4194], [4066, 4067, 4196], [4066, 4196, 4195], [4067, 4068, 4196], [4068, 4197, 4196], [4068, 4069, 4198], [4068, 4198, 4197], [4069, 4070, 4198], [4070, 4199, 4198], [4070, 4071, 4200], [4070, 4200, 4199], [4071, 4072, 4200], [4072, 4201, 4200], [4072, 4073, 4202], [4072, 4202, 4201], [4073, 4074, 4202], [4074, 4203, 4202], [4074, 4075, 4204], [4074, 4204, 4203], [4075, 4076, 4204], [4076, 4205, 4204], [4076, 4077, 4206], [4076, 4206, 4205], [4077, 4078, 4206], [4078, 4207, 4206], [4078, 4079, 4208], [4078, 4208, 4207], [4079, 4080, 4208], [4080, 4209, 4208], [4080, 4081, 4210], [4080, 4210, 4209], [4081, 4082, 4210], [4082, 4211, 4210], [4082, 4083, 4212], [4082, 4212, 4211], [4083, 4084, 4212], [4084, 4213, 4212], [4084, 4085, 4214], [4084, 4214, 4213], [4085, 4086, 4214], [4086, 4215, 4214], [4086, 4087, 4216], [4086, 4216, 4215], [4087, 4088, 4216], [4088, 4217, 4216], [4088, 4089, 4218], [4088, 4218, 4217], [4089, 4090, 4218], [4090, 4219, 4218], [4090, 4091, 4220], [4090, 4220, 4219], [4091, 4092, 4220], [4092, 4221, 4220], [4092, 4093, 4222], [4092, 4222, 4221], [4093, 4094, 4222], [4094, 4223, 4222], [4094, 4095, 4224], [4094, 4224, 4223], [4095, 4096, 4224], [4096, 4225, 4224], [4096, 4097, 4226], [4096, 4226, 4225], [4097, 4098, 4226], [4098, 4227, 4226], [4098, 4099, 4228], [4098, 4228, 4227], [4099, 4100, 4228], [4100, 4229, 4228], [4100, 4101, 4230], [4100, 4230, 4229], [4101, 4102, 4230], [4102, 4231, 4230], [4102, 4103, 4232], [4102, 4232, 4231], [4103, 4104, 4232], [4104, 4233, 4232], [4104, 4105, 4234], [4104, 4234, 4233], [4105, 4106, 4234], [4106, 4235, 4234], [4106, 4107, 4236], [4106, 4236, 4235], [4107, 4108, 4236], [4108, 4237, 4236], [4108, 4109, 4238], [4108, 4238, 4237], [4109, 4110, 4238], [4110, 4239, 4238], [4110, 4111, 4240], [4110, 4240, 4239], [4111, 4112, 4240], [4112, 4241, 4240], [4112, 4113, 4242], [4112, 4242, 4241], [4113, 4114, 4242], [4114, 4243, 4242], [4114, 4115, 4244], [4114, 4244, 4243], [4115, 4116, 4244], [4116, 4245, 4244], [4116, 4117, 4246], [4116, 4246, 4245], [4117, 4118, 4246], [4118, 4247, 4246], [4118, 4119, 4248], [4118, 4248, 4247], [4119, 4120, 4248], [4120, 4249, 4248], [4120, 4121, 4250], [4120, 4250, 4249], [4121, 4122, 4250], [4122, 4251, 4250], [4122, 4123, 4252], [4122, 4252, 4251], [4123, 4124, 4252], [4124, 4253, 4252], [4124, 4125, 4254], [4124, 4254, 4253], [4125, 4126, 4254], [4126, 4255, 4254], [4126, 4127, 4256], [4126, 4256, 4255], [4128, 4129, 4258], [4128, 4258, 4257], [4129, 4130, 4258], [4130, 4259, 4258], [4130, 4131, 4260], [4130, 4260, 4259], [4131, 4132, 4260], [4132, 4261, 4260], [4132, 4133, 4262], [4132, 4262, 4261], [4133, 4134, 4262], [4134, 4263, 4262], [4134, 4135, 4264], [4134, 4264, 4263], [4135, 4136, 4264], [4136, 4265, 4264], [4136, 4137, 4266], [4136, 4266, 4265], [4137, 4138, 4266], [4138, 4267, 4266], [4138, 4139, 4268], [4138, 4268, 4267], [4139, 4140, 4268], [4140, 4269, 4268], [4140, 4141, 4270], [4140, 4270, 4269], [4141, 4142, 4270], [4142, 4271, 4270], [4142, 4143, 4272], [4142, 4272, 4271], [4143, 4144, 4272], [4144, 4273, 4272], [4144, 4145, 4274], [4144, 4274, 4273], [4145, 4146, 4274], [4146, 4275, 4274], [4146, 4147, 4276], [4146, 4276, 4275], [4147, 4148, 4276], [4148, 4277, 4276], [4148, 4149, 4278], [4148, 4278, 4277], [4149, 4150, 4278], [4150, 4279, 4278], [4150, 4151, 4280], [4150, 4280, 4279], [4151, 4152, 4280], [4152, 4281, 4280], [4152, 4153, 4282], [4152, 4282, 4281], [4153, 4154, 4282], [4154, 4283, 4282], [4154, 4155, 4284], [4154, 4284, 4283], [4155, 4156, 4284], [4156, 4285, 4284], [4156, 4157, 4286], [4156, 4286, 4285], [4157, 4158, 4286], [4158, 4287, 4286], [4158, 4159, 4288], [4158, 4288, 4287], [4159, 4160, 4288], [4160, 4289, 4288], [4160, 4161, 4290], [4160, 4290, 4289], [4161, 4162, 4290], [4162, 4291, 4290], [4162, 4163, 4292], [4162, 4292, 4291], [4163, 4164, 4292], [4164, 4293, 4292], [4164, 4165, 4294], [4164, 4294, 4293], [4165, 4166, 4294], [4166, 4295, 4294], [4166, 4167, 4296], [4166, 4296, 4295], [4167, 4168, 4296], [4168, 4297, 4296], [4168, 4169, 4298], [4168, 4298, 4297], [4169, 4170, 4298], [4170, 4299, 4298], [4170, 4171, 4300], [4170, 4300, 4299], [4171, 4172, 4300], [4172, 4301, 4300], [4172, 4173, 4302], [4172, 4302, 4301], [4173, 4174, 4302], [4174, 4303, 4302], [4174, 4175, 4304], [4174, 4304, 4303], [4175, 4176, 4304], [4176, 4305, 4304], [4176, 4177, 4306], [4176, 4306, 4305], [4177, 4178, 4306], [4178, 4307, 4306], [4178, 4179, 4308], [4178, 4308, 4307], [4179, 4180, 4308], [4180, 4309, 4308], [4180, 4181, 4310], [4180, 4310, 4309], [4181, 4182, 4310], [4182, 4311, 4310], [4182, 4183, 4312], [4182, 4312, 4311], [4183, 4184, 4312], [4184, 4313, 4312], [4184, 4185, 4314], [4184, 4314, 4313], [4185, 4186, 4314], [4186, 4315, 4314], [4186, 4187, 4316], [4186, 4316, 4315], [4187, 4188, 4316], [4188, 4317, 4316], [4188, 4189, 4318], [4188, 4318, 4317], [4189, 4190, 4318], [4190, 4319, 4318], [4190, 4191, 4320], [4190, 4320, 4319], [4191, 4192, 4320], [4192, 4321, 4320], [4192, 4193, 4322], [4192, 4322, 4321], [4193, 4194, 4322], [4194, 4323, 4322], [4194, 4195, 4324], [4194, 4324, 4323], [4195, 4196, 4324], [4196, 4325, 4324], [4196, 4197, 4326], [4196, 4326, 4325], [4197, 4198, 4326], [4198, 4327, 4326], [4198, 4199, 4328], [4198, 4328, 4327], [4199, 4200, 4328], [4200, 4329, 4328], [4200, 4201, 4330], [4200, 4330, 4329], [4201, 4202, 4330], [4202, 4331, 4330], [4202, 4203, 4332], [4202, 4332, 4331], [4203, 4204, 4332], [4204, 4333, 4332], [4204, 4205, 4334], [4204, 4334, 4333], [4205, 4206, 4334], [4206, 4335, 4334], [4206, 4207, 4336], [4206, 4336, 4335], [4207, 4208, 4336], [4208, 4337, 4336], [4208, 4209, 4338], [4208, 4338, 4337], [4209, 4210, 4338], [4210, 4339, 4338], [4210, 4211, 4340], [4210, 4340, 4339], [4211, 4212, 4340], [4212, 4341, 4340], [4212, 4213, 4342], [4212, 4342, 4341], [4213, 4214, 4342], [4214, 4343, 4342], [4214, 4215, 4344], [4214, 4344, 4343], [4215, 4216, 4344], [4216, 4345, 4344], [4216, 4217, 4346], [4216, 4346, 4345], [4217, 4218, 4346], [4218, 4347, 4346], [4218, 4219, 4348], [4218, 4348, 4347], [4219, 4220, 4348], [4220, 4349, 4348], [4220, 4221, 4350], [4220, 4350, 4349], [4221, 4222, 4350], [4222, 4351, 4350], [4222, 4223, 4352], [4222, 4352, 4351], [4223, 4224, 4352], [4224, 4353, 4352], [4224, 4225, 4354], [4224, 4354, 4353], [4225, 4226, 4354], [4226, 4355, 4354], [4226, 4227, 4356], [4226, 4356, 4355], [4227, 4228, 4356], [4228, 4357, 4356], [4228, 4229, 4358], [4228, 4358, 4357], [4229, 4230, 4358], [4230, 4359, 4358], [4230, 4231, 4360], [4230, 4360, 4359], [4231, 4232, 4360], [4232, 4361, 4360], [4232, 4233, 4362], [4232, 4362, 4361], [4233, 4234, 4362], [4234, 4363, 4362], [4234, 4235, 4364], [4234, 4364, 4363], [4235, 4236, 4364], [4236, 4365, 4364], [4236, 4237, 4366], [4236, 4366, 4365], [4237, 4238, 4366], [4238, 4367, 4366], [4238, 4239, 4368], [4238, 4368, 4367], [4239, 4240, 4368], [4240, 4369, 4368], [4240, 4241, 4370], [4240, 4370, 4369], [4241, 4242, 4370], [4242, 4371, 4370], [4242, 4243, 4372], [4242, 4372, 4371], [4243, 4244, 4372], [4244, 4373, 4372], [4244, 4245, 4374], [4244, 4374, 4373], [4245, 4246, 4374], [4246, 4375, 4374], [4246, 4247, 4376], [4246, 4376, 4375], [4247, 4248, 4376], [4248, 4377, 4376], [4248, 4249, 4378], [4248, 4378, 4377], [4249, 4250, 4378], [4250, 4379, 4378], [4250, 4251, 4380], [4250, 4380, 4379], [4251, 4252, 4380], [4252, 4381, 4380], [4252, 4253, 4382], [4252, 4382, 4381], [4253, 4254, 4382], [4254, 4383, 4382], [4254, 4255, 4384], [4254, 4384, 4383], [4255, 4256, 4384], [4256, 4385, 4384], [4257, 4258, 4386], [4258, 4387, 4386], [4258, 4259, 4388], [4258, 4388, 4387], [4259, 4260, 4388], [4260, 4389, 4388], [4260, 4261, 4390], [4260, 4390, 4389], [4261, 4262, 4390], [4262, 4391, 4390], [4262, 4263, 4392], [4262, 4392, 4391], [4263, 4264, 4392], [4264, 4393, 4392], [4264, 4265, 4394], [4264, 4394, 4393], [4265, 4266, 4394], [4266, 4395, 4394], [4266, 4267, 4396], [4266, 4396, 4395], [4267, 4268, 4396], [4268, 4397, 4396], [4268, 4269, 4398], [4268, 4398, 4397], [4269, 4270, 4398], [4270, 4399, 4398], [4270, 4271, 4400], [4270, 4400, 4399], [4271, 4272, 4400], [4272, 4401, 4400], [4272, 4273, 4402], [4272, 4402, 4401], [4273, 4274, 4402], [4274, 4403, 4402], [4274, 4275, 4404], [4274, 4404, 4403], [4275, 4276, 4404], [4276, 4405, 4404], [4276, 4277, 4406], [4276, 4406, 4405], [4277, 4278, 4406], [4278, 4407, 4406], [4278, 4279, 4408], [4278, 4408, 4407], [4279, 4280, 4408], [4280, 4409, 4408], [4280, 4281, 4410], [4280, 4410, 4409], [4281, 4282, 4410], [4282, 4411, 4410], [4282, 4283, 4412], [4282, 4412, 4411], [4283, 4284, 4412], [4284, 4413, 4412], [4284, 4285, 4414], [4284, 4414, 4413], [4285, 4286, 4414], [4286, 4415, 4414], [4286, 4287, 4416], [4286, 4416, 4415], [4287, 4288, 4416], [4288, 4417, 4416], [4288, 4289, 4418], [4288, 4418, 4417], [4289, 4290, 4418], [4290, 4419, 4418], [4290, 4291, 4420], [4290, 4420, 4419], [4291, 4292, 4420], [4292, 4421, 4420], [4292, 4293, 4422], [4292, 4422, 4421], [4293, 4294, 4422], [4294, 4423, 4422], [4294, 4295, 4424], [4294, 4424, 4423], [4295, 4296, 4424], [4296, 4425, 4424], [4296, 4297, 4426], [4296, 4426, 4425], [4297, 4298, 4426], [4298, 4427, 4426], [4298, 4299, 4428], [4298, 4428, 4427], [4299, 4300, 4428], [4300, 4429, 4428], [4300, 4301, 4430], [4300, 4430, 4429], [4301, 4302, 4430], [4302, 4431, 4430], [4302, 4303, 4432], [4302, 4432, 4431], [4303, 4304, 4432], [4304, 4433, 4432], [4304, 4305, 4434], [4304, 4434, 4433], [4305, 4306, 4434], [4306, 4435, 4434], [4306, 4307, 4436], [4306, 4436, 4435], [4307, 4308, 4436], [4308, 4437, 4436], [4308, 4309, 4438], [4308, 4438, 4437], [4309, 4310, 4438], [4310, 4439, 4438], [4310, 4311, 4440], [4310, 4440, 4439], [4311, 4312, 4440], [4312, 4441, 4440], [4312, 4313, 4442], [4312, 4442, 4441], [4313, 4314, 4442], [4314, 4443, 4442], [4314, 4315, 4444], [4314, 4444, 4443], [4315, 4316, 4444], [4316, 4445, 4444], [4316, 4317, 4446], [4316, 4446, 4445], [4317, 4318, 4446], [4318, 4447, 4446], [4318, 4319, 4448], [4318, 4448, 4447], [4319, 4320, 4448], [4320, 4449, 4448], [4320, 4321, 4450], [4320, 4450, 4449], [4321, 4322, 4450], [4322, 4451, 4450], [4322, 4323, 4452], [4322, 4452, 4451], [4323, 4324, 4452], [4324, 4453, 4452], [4324, 4325, 4454], [4324, 4454, 4453], [4325, 4326, 4454], [4326, 4455, 4454], [4326, 4327, 4456], [4326, 4456, 4455], [4327, 4328, 4456], [4328, 4457, 4456], [4328, 4329, 4458], [4328, 4458, 4457], [4329, 4330, 4458], [4330, 4459, 4458], [4330, 4331, 4460], [4330, 4460, 4459], [4331, 4332, 4460], [4332, 4461, 4460], [4332, 4333, 4462], [4332, 4462, 4461], [4333, 4334, 4462], [4334, 4463, 4462], [4334, 4335, 4464], [4334, 4464, 4463], [4335, 4336, 4464], [4336, 4465, 4464], [4336, 4337, 4466], [4336, 4466, 4465], [4337, 4338, 4466], [4338, 4467, 4466], [4338, 4339, 4468], [4338, 4468, 4467], [4339, 4340, 4468], [4340, 4469, 4468], [4340, 4341, 4470], [4340, 4470, 4469], [4341, 4342, 4470], [4342, 4471, 4470], [4342, 4343, 4472], [4342, 4472, 4471], [4343, 4344, 4472], [4344, 4473, 4472], [4344, 4345, 4474], [4344, 4474, 4473], [4345, 4346, 4474], [4346, 4475, 4474], [4346, 4347, 4476], [4346, 4476, 4475], [4347, 4348, 4476], [4348, 4477, 4476], [4348, 4349, 4478], [4348, 4478, 4477], [4349, 4350, 4478], [4350, 4479, 4478], [4350, 4351, 4480], [4350, 4480, 4479], [4351, 4352, 4480], [4352, 4481, 4480], [4352, 4353, 4482], [4352, 4482, 4481], [4353, 4354, 4482], [4354, 4483, 4482], [4354, 4355, 4484], [4354, 4484, 4483], [4355, 4356, 4484], [4356, 4485, 4484], [4356, 4357, 4486], [4356, 4486, 4485], [4357, 4358, 4486], [4358, 4487, 4486], [4358, 4359, 4488], [4358, 4488, 4487], [4359, 4360, 4488], [4360, 4489, 4488], [4360, 4361, 4490], [4360, 4490, 4489], [4361, 4362, 4490], [4362, 4491, 4490], [4362, 4363, 4492], [4362, 4492, 4491], [4363, 4364, 4492], [4364, 4493, 4492], [4364, 4365, 4494], [4364, 4494, 4493], [4365, 4366, 4494], [4366, 4495, 4494], [4366, 4367, 4496], [4366, 4496, 4495], [4367, 4368, 4496], [4368, 4497, 4496], [4368, 4369, 4498], [4368, 4498, 4497], [4369, 4370, 4498], [4370, 4499, 4498], [4370, 4371, 4500], [4370, 4500, 4499], [4371, 4372, 4500], [4372, 4501, 4500], [4372, 4373, 4502], [4372, 4502, 4501], [4373, 4374, 4502], [4374, 4503, 4502], [4374, 4375, 4504], [4374, 4504, 4503], [4375, 4376, 4504], [4376, 4505, 4504], [4376, 4377, 4506], [4376, 4506, 4505], [4377, 4378, 4506], [4378, 4507, 4506], [4378, 4379, 4508], [4378, 4508, 4507], [4379, 4380, 4508], [4380, 4509, 4508], [4380, 4381, 4510], [4380, 4510, 4509], [4381, 4382, 4510], [4382, 4511, 4510], [4382, 4383, 4512], [4382, 4512, 4511], [4383, 4384, 4512], [4384, 4513, 4512], [4384, 4385, 4514], [4384, 4514, 4513], [4386, 4387, 4516], [4386, 4516, 4515], [4387, 4388, 4516], [4388, 4517, 4516], [4388, 4389, 4518], [4388, 4518, 4517], [4389, 4390, 4518], [4390, 4519, 4518], [4390, 4391, 4520], [4390, 4520, 4519], [4391, 4392, 4520], [4392, 4521, 4520], [4392, 4393, 4522], [4392, 4522, 4521], [4393, 4394, 4522], [4394, 4523, 4522], [4394, 4395, 4524], [4394, 4524, 4523], [4395, 4396, 4524], [4396, 4525, 4524], [4396, 4397, 4526], [4396, 4526, 4525], [4397, 4398, 4526], [4398, 4527, 4526], [4398, 4399, 4528], [4398, 4528, 4527], [4399, 4400, 4528], [4400, 4529, 4528], [4400, 4401, 4530], [4400, 4530, 4529], [4401, 4402, 4530], [4402, 4531, 4530], [4402, 4403, 4532], [4402, 4532, 4531], [4403, 4404, 4532], [4404, 4533, 4532], [4404, 4405, 4534], [4404, 4534, 4533], [4405, 4406, 4534], [4406, 4535, 4534], [4406, 4407, 4536], [4406, 4536, 4535], [4407, 4408, 4536], [4408, 4537, 4536], [4408, 4409, 4538], [4408, 4538, 4537], [4409, 4410, 4538], [4410, 4539, 4538], [4410, 4411, 4540], [4410, 4540, 4539], [4411, 4412, 4540], [4412, 4541, 4540], [4412, 4413, 4542], [4412, 4542, 4541], [4413, 4414, 4542], [4414, 4543, 4542], [4414, 4415, 4544], [4414, 4544, 4543], [4415, 4416, 4544], [4416, 4545, 4544], [4416, 4417, 4546], [4416, 4546, 4545], [4417, 4418, 4546], [4418, 4547, 4546], [4418, 4419, 4548], [4418, 4548, 4547], [4419, 4420, 4548], [4420, 4549, 4548], [4420, 4421, 4550], [4420, 4550, 4549], [4421, 4422, 4550], [4422, 4551, 4550], [4422, 4423, 4552], [4422, 4552, 4551], [4423, 4424, 4552], [4424, 4553, 4552], [4424, 4425, 4554], [4424, 4554, 4553], [4425, 4426, 4554], [4426, 4555, 4554], [4426, 4427, 4556], [4426, 4556, 4555], [4427, 4428, 4556], [4428, 4557, 4556], [4428, 4429, 4558], [4428, 4558, 4557], [4429, 4430, 4558], [4430, 4559, 4558], [4430, 4431, 4560], [4430, 4560, 4559], [4431, 4432, 4560], [4432, 4561, 4560], [4432, 4433, 4562], [4432, 4562, 4561], [4433, 4434, 4562], [4434, 4563, 4562], [4434, 4435, 4564], [4434, 4564, 4563], [4435, 4436, 4564], [4436, 4565, 4564], [4436, 4437, 4566], [4436, 4566, 4565], [4437, 4438, 4566], [4438, 4567, 4566], [4438, 4439, 4568], [4438, 4568, 4567], [4439, 4440, 4568], [4440, 4569, 4568], [4440, 4441, 4570], [4440, 4570, 4569], [4441, 4442, 4570], [4442, 4571, 4570], [4442, 4443, 4572], [4442, 4572, 4571], [4443, 4444, 4572], [4444, 4573, 4572], [4444, 4445, 4574], [4444, 4574, 4573], [4445, 4446, 4574], [4446, 4575, 4574], [4446, 4447, 4576], [4446, 4576, 4575], [4447, 4448, 4576], [4448, 4577, 4576], [4448, 4449, 4578], [4448, 4578, 4577], [4449, 4450, 4578], [4450, 4579, 4578], [4450, 4451, 4580], [4450, 4580, 4579], [4451, 4452, 4580], [4452, 4581, 4580], [4452, 4453, 4582], [4452, 4582, 4581], [4453, 4454, 4582], [4454, 4583, 4582], [4454, 4455, 4584], [4454, 4584, 4583], [4455, 4456, 4584], [4456, 4585, 4584], [4456, 4457, 4586], [4456, 4586, 4585], [4457, 4458, 4586], [4458, 4587, 4586], [4458, 4459, 4588], [4458, 4588, 4587], [4459, 4460, 4588], [4460, 4589, 4588], [4460, 4461, 4590], [4460, 4590, 4589], [4461, 4462, 4590], [4462, 4591, 4590], [4462, 4463, 4592], [4462, 4592, 4591], [4463, 4464, 4592], [4464, 4593, 4592], [4464, 4465, 4594], [4464, 4594, 4593], [4465, 4466, 4594], [4466, 4595, 4594], [4466, 4467, 4596], [4466, 4596, 4595], [4467, 4468, 4596], [4468, 4597, 4596], [4468, 4469, 4598], [4468, 4598, 4597], [4469, 4470, 4598], [4470, 4599, 4598], [4470, 4471, 4600], [4470, 4600, 4599], [4471, 4472, 4600], [4472, 4601, 4600], [4472, 4473, 4602], [4472, 4602, 4601], [4473, 4474, 4602], [4474, 4603, 4602], [4474, 4475, 4604], [4474, 4604, 4603], [4475, 4476, 4604], [4476, 4605, 4604], [4476, 4477, 4606], [4476, 4606, 4605], [4477, 4478, 4606], [4478, 4607, 4606], [4478, 4479, 4608], [4478, 4608, 4607], [4479, 4480, 4608], [4480, 4609, 4608], [4480, 4481, 4610], [4480, 4610, 4609], [4481, 4482, 4610], [4482, 4611, 4610], [4482, 4483, 4612], [4482, 4612, 4611], [4483, 4484, 4612], [4484, 4613, 4612], [4484, 4485, 4614], [4484, 4614, 4613], [4485, 4486, 4614], [4486, 4615, 4614], [4486, 4487, 4616], [4486, 4616, 4615], [4487, 4488, 4616], [4488, 4617, 4616], [4488, 4489, 4618], [4488, 4618, 4617], [4489, 4490, 4618], [4490, 4619, 4618], [4490, 4491, 4620], [4490, 4620, 4619], [4491, 4492, 4620], [4492, 4621, 4620], [4492, 4493, 4622], [4492, 4622, 4621], [4493, 4494, 4622], [4494, 4623, 4622], [4494, 4495, 4624], [4494, 4624, 4623], [4495, 4496, 4624], [4496, 4625, 4624], [4496, 4497, 4626], [4496, 4626, 4625], [4497, 4498, 4626], [4498, 4627, 4626], [4498, 4499, 4628], [4498, 4628, 4627], [4499, 4500, 4628], [4500, 4629, 4628], [4500, 4501, 4630], [4500, 4630, 4629], [4501, 4502, 4630], [4502, 4631, 4630], [4502, 4503, 4632], [4502, 4632, 4631], [4503, 4504, 4632], [4504, 4633, 4632], [4504, 4505, 4634], [4504, 4634, 4633], [4505, 4506, 4634], [4506, 4635, 4634], [4506, 4507, 4636], [4506, 4636, 4635], [4507, 4508, 4636], [4508, 4637, 4636], [4508, 4509, 4638], [4508, 4638, 4637], [4509, 4510, 4638], [4510, 4639, 4638], [4510, 4511, 4640], [4510, 4640, 4639], [4511, 4512, 4640], [4512, 4641, 4640], [4512, 4513, 4642], [4512, 4642, 4641], [4513, 4514, 4642], [4514, 4643, 4642], [4515, 4516, 4644], [4516, 4645, 4644], [4516, 4517, 4646], [4516, 4646, 4645], [4517, 4518, 4646], [4518, 4647, 4646], [4518, 4519, 4648], [4518, 4648, 4647], [4519, 4520, 4648], [4520, 4649, 4648], [4520, 4521, 4650], [4520, 4650, 4649], [4521, 4522, 4650], [4522, 4651, 4650], [4522, 4523, 4652], [4522, 4652, 4651], [4523, 4524, 4652], [4524, 4653, 4652], [4524, 4525, 4654], [4524, 4654, 4653], [4525, 4526, 4654], [4526, 4655, 4654], [4526, 4527, 4656], [4526, 4656, 4655], [4527, 4528, 4656], [4528, 4657, 4656], [4528, 4529, 4658], [4528, 4658, 4657], [4529, 4530, 4658], [4530, 4659, 4658], [4530, 4531, 4660], [4530, 4660, 4659], [4531, 4532, 4660], [4532, 4661, 4660], [4532, 4533, 4662], [4532, 4662, 4661], [4533, 4534, 4662], [4534, 4663, 4662], [4534, 4535, 4664], [4534, 4664, 4663], [4535, 4536, 4664], [4536, 4665, 4664], [4536, 4537, 4666], [4536, 4666, 4665], [4537, 4538, 4666], [4538, 4667, 4666], [4538, 4539, 4668], [4538, 4668, 4667], [4539, 4540, 4668], [4540, 4669, 4668], [4540, 4541, 4670], [4540, 4670, 4669], [4541, 4542, 4670], [4542, 4671, 4670], [4542, 4543, 4672], [4542, 4672, 4671], [4543, 4544, 4672], [4544, 4673, 4672], [4544, 4545, 4674], [4544, 4674, 4673], [4545, 4546, 4674], [4546, 4675, 4674], [4546, 4547, 4676], [4546, 4676, 4675], [4547, 4548, 4676], [4548, 4677, 4676], [4548, 4549, 4678], [4548, 4678, 4677], [4549, 4550, 4678], [4550, 4679, 4678], [4550, 4551, 4680], [4550, 4680, 4679], [4551, 4552, 4680], [4552, 4681, 4680], [4552, 4553, 4682], [4552, 4682, 4681], [4553, 4554, 4682], [4554, 4683, 4682], [4554, 4555, 4684], [4554, 4684, 4683], [4555, 4556, 4684], [4556, 4685, 4684], [4556, 4557, 4686], [4556, 4686, 4685], [4557, 4558, 4686], [4558, 4687, 4686], [4558, 4559, 4688], [4558, 4688, 4687], [4559, 4560, 4688], [4560, 4689, 4688], [4560, 4561, 4690], [4560, 4690, 4689], [4561, 4562, 4690], [4562, 4691, 4690], [4562, 4563, 4692], [4562, 4692, 4691], [4563, 4564, 4692], [4564, 4693, 4692], [4564, 4565, 4694], [4564, 4694, 4693], [4565, 4566, 4694], [4566, 4695, 4694], [4566, 4567, 4696], [4566, 4696, 4695], [4567, 4568, 4696], [4568, 4697, 4696], [4568, 4569, 4698], [4568, 4698, 4697], [4569, 4570, 4698], [4570, 4699, 4698], [4570, 4571, 4700], [4570, 4700, 4699], [4571, 4572, 4700], [4572, 4701, 4700], [4572, 4573, 4702], [4572, 4702, 4701], [4573, 4574, 4702], [4574, 4703, 4702], [4574, 4575, 4704], [4574, 4704, 4703], [4575, 4576, 4704], [4576, 4705, 4704], [4576, 4577, 4706], [4576, 4706, 4705], [4577, 4578, 4706], [4578, 4707, 4706], [4578, 4579, 4708], [4578, 4708, 4707], [4579, 4580, 4708], [4580, 4709, 4708], [4580, 4581, 4710], [4580, 4710, 4709], [4581, 4582, 4710], [4582, 4711, 4710], [4582, 4583, 4712], [4582, 4712, 4711], [4583, 4584, 4712], [4584, 4713, 4712], [4584, 4585, 4714], [4584, 4714, 4713], [4585, 4586, 4714], [4586, 4715, 4714], [4586, 4587, 4716], [4586, 4716, 4715], [4587, 4588, 4716], [4588, 4717, 4716], [4588, 4589, 4718], [4588, 4718, 4717], [4589, 4590, 4718], [4590, 4719, 4718], [4590, 4591, 4720], [4590, 4720, 4719], [4591, 4592, 4720], [4592, 4721, 4720], [4592, 4593, 4722], [4592, 4722, 4721], [4593, 4594, 4722], [4594, 4723, 4722], [4594, 4595, 4724], [4594, 4724, 4723], [4595, 4596, 4724], [4596, 4725, 4724], [4596, 4597, 4726], [4596, 4726, 4725], [4597, 4598, 4726], [4598, 4727, 4726], [4598, 4599, 4728], [4598, 4728, 4727], [4599, 4600, 4728], [4600, 4729, 4728], [4600, 4601, 4730], [4600, 4730, 4729], [4601, 4602, 4730], [4602, 4731, 4730], [4602, 4603, 4732], [4602, 4732, 4731], [4603, 4604, 4732], [4604, 4733, 4732], [4604, 4605, 4734], [4604, 4734, 4733], [4605, 4606, 4734], [4606, 4735, 4734], [4606, 4607, 4736], [4606, 4736, 4735], [4607, 4608, 4736], [4608, 4737, 4736], [4608, 4609, 4738], [4608, 4738, 4737], [4609, 4610, 4738], [4610, 4739, 4738], [4610, 4611, 4740], [4610, 4740, 4739], [4611, 4612, 4740], [4612, 4741, 4740], [4612, 4613, 4742], [4612, 4742, 4741], [4613, 4614, 4742], [4614, 4743, 4742], [4614, 4615, 4744], [4614, 4744, 4743], [4615, 4616, 4744], [4616, 4745, 4744], [4616, 4617, 4746], [4616, 4746, 4745], [4617, 4618, 4746], [4618, 4747, 4746], [4618, 4619, 4748], [4618, 4748, 4747], [4619, 4620, 4748], [4620, 4749, 4748], [4620, 4621, 4750], [4620, 4750, 4749], [4621, 4622, 4750], [4622, 4751, 4750], [4622, 4623, 4752], [4622, 4752, 4751], [4623, 4624, 4752], [4624, 4753, 4752], [4624, 4625, 4754], [4624, 4754, 4753], [4625, 4626, 4754], [4626, 4755, 4754], [4626, 4627, 4756], [4626, 4756, 4755], [4627, 4628, 4756], [4628, 4757, 4756], [4628, 4629, 4758], [4628, 4758, 4757], [4629, 4630, 4758], [4630, 4759, 4758], [4630, 4631, 4760], [4630, 4760, 4759], [4631, 4632, 4760], [4632, 4761, 4760], [4632, 4633, 4762], [4632, 4762, 4761], [4633, 4634, 4762], [4634, 4763, 4762], [4634, 4635, 4764], [4634, 4764, 4763], [4635, 4636, 4764], [4636, 4765, 4764], [4636, 4637, 4766], [4636, 4766, 4765], [4637, 4638, 4766], [4638, 4767, 4766], [4638, 4639, 4768], [4638, 4768, 4767], [4639, 4640, 4768], [4640, 4769, 4768], [4640, 4641, 4770], [4640, 4770, 4769], [4641, 4642, 4770], [4642, 4771, 4770], [4642, 4643, 4772], [4642, 4772, 4771], [4644, 4645, 4774], [4644, 4774, 4773], [4645, 4646, 4774], [4646, 4775, 4774], [4646, 4647, 4776], [4646, 4776, 4775], [4647, 4648, 4776], [4648, 4777, 4776], [4648, 4649, 4778], [4648, 4778, 4777], [4649, 4650, 4778], [4650, 4779, 4778], [4650, 4651, 4780], [4650, 4780, 4779], [4651, 4652, 4780], [4652, 4781, 4780], [4652, 4653, 4782], [4652, 4782, 4781], [4653, 4654, 4782], [4654, 4783, 4782], [4654, 4655, 4784], [4654, 4784, 4783], [4655, 4656, 4784], [4656, 4785, 4784], [4656, 4657, 4786], [4656, 4786, 4785], [4657, 4658, 4786], [4658, 4787, 4786], [4658, 4659, 4788], [4658, 4788, 4787], [4659, 4660, 4788], [4660, 4789, 4788], [4660, 4661, 4790], [4660, 4790, 4789], [4661, 4662, 4790], [4662, 4791, 4790], [4662, 4663, 4792], [4662, 4792, 4791], [4663, 4664, 4792], [4664, 4793, 4792], [4664, 4665, 4794], [4664, 4794, 4793], [4665, 4666, 4794], [4666, 4795, 4794], [4666, 4667, 4796], [4666, 4796, 4795], [4667, 4668, 4796], [4668, 4797, 4796], [4668, 4669, 4798], [4668, 4798, 4797], [4669, 4670, 4798], [4670, 4799, 4798], [4670, 4671, 4800], [4670, 4800, 4799], [4671, 4672, 4800], [4672, 4801, 4800], [4672, 4673, 4802], [4672, 4802, 4801], [4673, 4674, 4802], [4674, 4803, 4802], [4674, 4675, 4804], [4674, 4804, 4803], [4675, 4676, 4804], [4676, 4805, 4804], [4676, 4677, 4806], [4676, 4806, 4805], [4677, 4678, 4806], [4678, 4807, 4806], [4678, 4679, 4808], [4678, 4808, 4807], [4679, 4680, 4808], [4680, 4809, 4808], [4680, 4681, 4810], [4680, 4810, 4809], [4681, 4682, 4810], [4682, 4811, 4810], [4682, 4683, 4812], [4682, 4812, 4811], [4683, 4684, 4812], [4684, 4813, 4812], [4684, 4685, 4814], [4684, 4814, 4813], [4685, 4686, 4814], [4686, 4815, 4814], [4686, 4687, 4816], [4686, 4816, 4815], [4687, 4688, 4816], [4688, 4817, 4816], [4688, 4689, 4818], [4688, 4818, 4817], [4689, 4690, 4818], [4690, 4819, 4818], [4690, 4691, 4820], [4690, 4820, 4819], [4691, 4692, 4820], [4692, 4821, 4820], [4692, 4693, 4822], [4692, 4822, 4821], [4693, 4694, 4822], [4694, 4823, 4822], [4694, 4695, 4824], [4694, 4824, 4823], [4695, 4696, 4824], [4696, 4825, 4824], [4696, 4697, 4826], [4696, 4826, 4825], [4697, 4698, 4826], [4698, 4827, 4826], [4698, 4699, 4828], [4698, 4828, 4827], [4699, 4700, 4828], [4700, 4829, 4828], [4700, 4701, 4830], [4700, 4830, 4829], [4701, 4702, 4830], [4702, 4831, 4830], [4702, 4703, 4832], [4702, 4832, 4831], [4703, 4704, 4832], [4704, 4833, 4832], [4704, 4705, 4834], [4704, 4834, 4833], [4705, 4706, 4834], [4706, 4835, 4834], [4706, 4707, 4836], [4706, 4836, 4835], [4707, 4708, 4836], [4708, 4837, 4836], [4708, 4709, 4838], [4708, 4838, 4837], [4709, 4710, 4838], [4710, 4839, 4838], [4710, 4711, 4840], [4710, 4840, 4839], [4711, 4712, 4840], [4712, 4841, 4840], [4712, 4713, 4842], [4712, 4842, 4841], [4713, 4714, 4842], [4714, 4843, 4842], [4714, 4715, 4844], [4714, 4844, 4843], [4715, 4716, 4844], [4716, 4845, 4844], [4716, 4717, 4846], [4716, 4846, 4845], [4717, 4718, 4846], [4718, 4847, 4846], [4718, 4719, 4848], [4718, 4848, 4847], [4719, 4720, 4848], [4720, 4849, 4848], [4720, 4721, 4850], [4720, 4850, 4849], [4721, 4722, 4850], [4722, 4851, 4850], [4722, 4723, 4852], [4722, 4852, 4851], [4723, 4724, 4852], [4724, 4853, 4852], [4724, 4725, 4854], [4724, 4854, 4853], [4725, 4726, 4854], [4726, 4855, 4854], [4726, 4727, 4856], [4726, 4856, 4855], [4727, 4728, 4856], [4728, 4857, 4856], [4728, 4729, 4858], [4728, 4858, 4857], [4729, 4730, 4858], [4730, 4859, 4858], [4730, 4731, 4860], [4730, 4860, 4859], [4731, 4732, 4860], [4732, 4861, 4860], [4732, 4733, 4862], [4732, 4862, 4861], [4733, 4734, 4862], [4734, 4863, 4862], [4734, 4735, 4864], [4734, 4864, 4863], [4735, 4736, 4864], [4736, 4865, 4864], [4736, 4737, 4866], [4736, 4866, 4865], [4737, 4738, 4866], [4738, 4867, 4866], [4738, 4739, 4868], [4738, 4868, 4867], [4739, 4740, 4868], [4740, 4869, 4868], [4740, 4741, 4870], [4740, 4870, 4869], [4741, 4742, 4870], [4742, 4871, 4870], [4742, 4743, 4872], [4742, 4872, 4871], [4743, 4744, 4872], [4744, 4873, 4872], [4744, 4745, 4874], [4744, 4874, 4873], [4745, 4746, 4874], [4746, 4875, 4874], [4746, 4747, 4876], [4746, 4876, 4875], [4747, 4748, 4876], [4748, 4877, 4876], [4748, 4749, 4878], [4748, 4878, 4877], [4749, 4750, 4878], [4750, 4879, 4878], [4750, 4751, 4880], [4750, 4880, 4879], [4751, 4752, 4880], [4752, 4881, 4880], [4752, 4753, 4882], [4752, 4882, 4881], [4753, 4754, 4882], [4754, 4883, 4882], [4754, 4755, 4884], [4754, 4884, 4883], [4755, 4756, 4884], [4756, 4885, 4884], [4756, 4757, 4886], [4756, 4886, 4885], [4757, 4758, 4886], [4758, 4887, 4886], [4758, 4759, 4888], [4758, 4888, 4887], [4759, 4760, 4888], [4760, 4889, 4888], [4760, 4761, 4890], [4760, 4890, 4889], [4761, 4762, 4890], [4762, 4891, 4890], [4762, 4763, 4892], [4762, 4892, 4891], [4763, 4764, 4892], [4764, 4893, 4892], [4764, 4765, 4894], [4764, 4894, 4893], [4765, 4766, 4894], [4766, 4895, 4894], [4766, 4767, 4896], [4766, 4896, 4895], [4767, 4768, 4896], [4768, 4897, 4896], [4768, 4769, 4898], [4768, 4898, 4897], [4769, 4770, 4898], [4770, 4899, 4898], [4770, 4771, 4900], [4770, 4900, 4899], [4771, 4772, 4900], [4772, 4901, 4900], [4773, 4774, 4902], [4774, 4903, 4902], [4774, 4775, 4904], [4774, 4904, 4903], [4775, 4776, 4904], [4776, 4905, 4904], [4776, 4777, 4906], [4776, 4906, 4905], [4777, 4778, 4906], [4778, 4907, 4906], [4778, 4779, 4908], [4778, 4908, 4907], [4779, 4780, 4908], [4780, 4909, 4908], [4780, 4781, 4910], [4780, 4910, 4909], [4781, 4782, 4910], [4782, 4911, 4910], [4782, 4783, 4912], [4782, 4912, 4911], [4783, 4784, 4912], [4784, 4913, 4912], [4784, 4785, 4914], [4784, 4914, 4913], [4785, 4786, 4914], [4786, 4915, 4914], [4786, 4787, 4916], [4786, 4916, 4915], [4787, 4788, 4916], [4788, 4917, 4916], [4788, 4789, 4918], [4788, 4918, 4917], [4789, 4790, 4918], [4790, 4919, 4918], [4790, 4791, 4920], [4790, 4920, 4919], [4791, 4792, 4920], [4792, 4921, 4920], [4792, 4793, 4922], [4792, 4922, 4921], [4793, 4794, 4922], [4794, 4923, 4922], [4794, 4795, 4924], [4794, 4924, 4923], [4795, 4796, 4924], [4796, 4925, 4924], [4796, 4797, 4926], [4796, 4926, 4925], [4797, 4798, 4926], [4798, 4927, 4926], [4798, 4799, 4928], [4798, 4928, 4927], [4799, 4800, 4928], [4800, 4929, 4928], [4800, 4801, 4930], [4800, 4930, 4929], [4801, 4802, 4930], [4802, 4931, 4930], [4802, 4803, 4932], [4802, 4932, 4931], [4803, 4804, 4932], [4804, 4933, 4932], [4804, 4805, 4934], [4804, 4934, 4933], [4805, 4806, 4934], [4806, 4935, 4934], [4806, 4807, 4936], [4806, 4936, 4935], [4807, 4808, 4936], [4808, 4937, 4936], [4808, 4809, 4938], [4808, 4938, 4937], [4809, 4810, 4938], [4810, 4939, 4938], [4810, 4811, 4940], [4810, 4940, 4939], [4811, 4812, 4940], [4812, 4941, 4940], [4812, 4813, 4942], [4812, 4942, 4941], [4813, 4814, 4942], [4814, 4943, 4942], [4814, 4815, 4944], [4814, 4944, 4943], [4815, 4816, 4944], [4816, 4945, 4944], [4816, 4817, 4946], [4816, 4946, 4945], [4817, 4818, 4946], [4818, 4947, 4946], [4818, 4819, 4948], [4818, 4948, 4947], [4819, 4820, 4948], [4820, 4949, 4948], [4820, 4821, 4950], [4820, 4950, 4949], [4821, 4822, 4950], [4822, 4951, 4950], [4822, 4823, 4952], [4822, 4952, 4951], [4823, 4824, 4952], [4824, 4953, 4952], [4824, 4825, 4954], [4824, 4954, 4953], [4825, 4826, 4954], [4826, 4955, 4954], [4826, 4827, 4956], [4826, 4956, 4955], [4827, 4828, 4956], [4828, 4957, 4956], [4828, 4829, 4958], [4828, 4958, 4957], [4829, 4830, 4958], [4830, 4959, 4958], [4830, 4831, 4960], [4830, 4960, 4959], [4831, 4832, 4960], [4832, 4961, 4960], [4832, 4833, 4962], [4832, 4962, 4961], [4833, 4834, 4962], [4834, 4963, 4962], [4834, 4835, 4964], [4834, 4964, 4963], [4835, 4836, 4964], [4836, 4965, 4964], [4836, 4837, 4966], [4836, 4966, 4965], [4837, 4838, 4966], [4838, 4967, 4966], [4838, 4839, 4968], [4838, 4968, 4967], [4839, 4840, 4968], [4840, 4969, 4968], [4840, 4841, 4970], [4840, 4970, 4969], [4841, 4842, 4970], [4842, 4971, 4970], [4842, 4843, 4972], [4842, 4972, 4971], [4843, 4844, 4972], [4844, 4973, 4972], [4844, 4845, 4974], [4844, 4974, 4973], [4845, 4846, 4974], [4846, 4975, 4974], [4846, 4847, 4976], [4846, 4976, 4975], [4847, 4848, 4976], [4848, 4977, 4976], [4848, 4849, 4978], [4848, 4978, 4977], [4849, 4850, 4978], [4850, 4979, 4978], [4850, 4851, 4980], [4850, 4980, 4979], [4851, 4852, 4980], [4852, 4981, 4980], [4852, 4853, 4982], [4852, 4982, 4981], [4853, 4854, 4982], [4854, 4983, 4982], [4854, 4855, 4984], [4854, 4984, 4983], [4855, 4856, 4984], [4856, 4985, 4984], [4856, 4857, 4986], [4856, 4986, 4985], [4857, 4858, 4986], [4858, 4987, 4986], [4858, 4859, 4988], [4858, 4988, 4987], [4859, 4860, 4988], [4860, 4989, 4988], [4860, 4861, 4990], [4860, 4990, 4989], [4861, 4862, 4990], [4862, 4991, 4990], [4862, 4863, 4992], [4862, 4992, 4991], [4863, 4864, 4992], [4864, 4993, 4992], [4864, 4865, 4994], [4864, 4994, 4993], [4865, 4866, 4994], [4866, 4995, 4994], [4866, 4867, 4996], [4866, 4996, 4995], [4867, 4868, 4996], [4868, 4997, 4996], [4868, 4869, 4998], [4868, 4998, 4997], [4869, 4870, 4998], [4870, 4999, 4998], [4870, 4871, 5000], [4870, 5000, 4999], [4871, 4872, 5000], [4872, 5001, 5000], [4872, 4873, 5002], [4872, 5002, 5001], [4873, 4874, 5002], [4874, 5003, 5002], [4874, 4875, 5004], [4874, 5004, 5003], [4875, 4876, 5004], [4876, 5005, 5004], [4876, 4877, 5006], [4876, 5006, 5005], [4877, 4878, 5006], [4878, 5007, 5006], [4878, 4879, 5008], [4878, 5008, 5007], [4879, 4880, 5008], [4880, 5009, 5008], [4880, 4881, 5010], [4880, 5010, 5009], [4881, 4882, 5010], [4882, 5011, 5010], [4882, 4883, 5012], [4882, 5012, 5011], [4883, 4884, 5012], [4884, 5013, 5012], [4884, 4885, 5014], [4884, 5014, 5013], [4885, 4886, 5014], [4886, 5015, 5014], [4886, 4887, 5016], [4886, 5016, 5015], [4887, 4888, 5016], [4888, 5017, 5016], [4888, 4889, 5018], [4888, 5018, 5017], [4889, 4890, 5018], [4890, 5019, 5018], [4890, 4891, 5020], [4890, 5020, 5019], [4891, 4892, 5020], [4892, 5021, 5020], [4892, 4893, 5022], [4892, 5022, 5021], [4893, 4894, 5022], [4894, 5023, 5022], [4894, 4895, 5024], [4894, 5024, 5023], [4895, 4896, 5024], [4896, 5025, 5024], [4896, 4897, 5026], [4896, 5026, 5025], [4897, 4898, 5026], [4898, 5027, 5026], [4898, 4899, 5028], [4898, 5028, 5027], [4899, 4900, 5028], [4900, 5029, 5028], [4900, 4901, 5030], [4900, 5030, 5029], [4902, 4903, 5032], [4902, 5032, 5031], [4903, 4904, 5032], [4904, 5033, 5032], [4904, 4905, 5034], [4904, 5034, 5033], [4905, 4906, 5034], [4906, 5035, 5034], [4906, 4907, 5036], [4906, 5036, 5035], [4907, 4908, 5036], [4908, 5037, 5036], [4908, 4909, 5038], [4908, 5038, 5037], [4909, 4910, 5038], [4910, 5039, 5038], [4910, 4911, 5040], [4910, 5040, 5039], [4911, 4912, 5040], [4912, 5041, 5040], [4912, 4913, 5042], [4912, 5042, 5041], [4913, 4914, 5042], [4914, 5043, 5042], [4914, 4915, 5044], [4914, 5044, 5043], [4915, 4916, 5044], [4916, 5045, 5044], [4916, 4917, 5046], [4916, 5046, 5045], [4917, 4918, 5046], [4918, 5047, 5046], [4918, 4919, 5048], [4918, 5048, 5047], [4919, 4920, 5048], [4920, 5049, 5048], [4920, 4921, 5050], [4920, 5050, 5049], [4921, 4922, 5050], [4922, 5051, 5050], [4922, 4923, 5052], [4922, 5052, 5051], [4923, 4924, 5052], [4924, 5053, 5052], [4924, 4925, 5054], [4924, 5054, 5053], [4925, 4926, 5054], [4926, 5055, 5054], [4926, 4927, 5056], [4926, 5056, 5055], [4927, 4928, 5056], [4928, 5057, 5056], [4928, 4929, 5058], [4928, 5058, 5057], [4929, 4930, 5058], [4930, 5059, 5058], [4930, 4931, 5060], [4930, 5060, 5059], [4931, 4932, 5060], [4932, 5061, 5060], [4932, 4933, 5062], [4932, 5062, 5061], [4933, 4934, 5062], [4934, 5063, 5062], [4934, 4935, 5064], [4934, 5064, 5063], [4935, 4936, 5064], [4936, 5065, 5064], [4936, 4937, 5066], [4936, 5066, 5065], [4937, 4938, 5066], [4938, 5067, 5066], [4938, 4939, 5068], [4938, 5068, 5067], [4939, 4940, 5068], [4940, 5069, 5068], [4940, 4941, 5070], [4940, 5070, 5069], [4941, 4942, 5070], [4942, 5071, 5070], [4942, 4943, 5072], [4942, 5072, 5071], [4943, 4944, 5072], [4944, 5073, 5072], [4944, 4945, 5074], [4944, 5074, 5073], [4945, 4946, 5074], [4946, 5075, 5074], [4946, 4947, 5076], [4946, 5076, 5075], [4947, 4948, 5076], [4948, 5077, 5076], [4948, 4949, 5078], [4948, 5078, 5077], [4949, 4950, 5078], [4950, 5079, 5078], [4950, 4951, 5080], [4950, 5080, 5079], [4951, 4952, 5080], [4952, 5081, 5080], [4952, 4953, 5082], [4952, 5082, 5081], [4953, 4954, 5082], [4954, 5083, 5082], [4954, 4955, 5084], [4954, 5084, 5083], [4955, 4956, 5084], [4956, 5085, 5084], [4956, 4957, 5086], [4956, 5086, 5085], [4957, 4958, 5086], [4958, 5087, 5086], [4958, 4959, 5088], [4958, 5088, 5087], [4959, 4960, 5088], [4960, 5089, 5088], [4960, 4961, 5090], [4960, 5090, 5089], [4961, 4962, 5090], [4962, 5091, 5090], [4962, 4963, 5092], [4962, 5092, 5091], [4963, 4964, 5092], [4964, 5093, 5092], [4964, 4965, 5094], [4964, 5094, 5093], [4965, 4966, 5094], [4966, 5095, 5094], [4966, 4967, 5096], [4966, 5096, 5095], [4967, 4968, 5096], [4968, 5097, 5096], [4968, 4969, 5098], [4968, 5098, 5097], [4969, 4970, 5098], [4970, 5099, 5098], [4970, 4971, 5100], [4970, 5100, 5099], [4971, 4972, 5100], [4972, 5101, 5100], [4972, 4973, 5102], [4972, 5102, 5101], [4973, 4974, 5102], [4974, 5103, 5102], [4974, 4975, 5104], [4974, 5104, 5103], [4975, 4976, 5104], [4976, 5105, 5104], [4976, 4977, 5106], [4976, 5106, 5105], [4977, 4978, 5106], [4978, 5107, 5106], [4978, 4979, 5108], [4978, 5108, 5107], [4979, 4980, 5108], [4980, 5109, 5108], [4980, 4981, 5110], [4980, 5110, 5109], [4981, 4982, 5110], [4982, 5111, 5110], [4982, 4983, 5112], [4982, 5112, 5111], [4983, 4984, 5112], [4984, 5113, 5112], [4984, 4985, 5114], [4984, 5114, 5113], [4985, 4986, 5114], [4986, 5115, 5114], [4986, 4987, 5116], [4986, 5116, 5115], [4987, 4988, 5116], [4988, 5117, 5116], [4988, 4989, 5118], [4988, 5118, 5117], [4989, 4990, 5118], [4990, 5119, 5118], [4990, 4991, 5120], [4990, 5120, 5119], [4991, 4992, 5120], [4992, 5121, 5120], [4992, 4993, 5122], [4992, 5122, 5121], [4993, 4994, 5122], [4994, 5123, 5122], [4994, 4995, 5124], [4994, 5124, 5123], [4995, 4996, 5124], [4996, 5125, 5124], [4996, 4997, 5126], [4996, 5126, 5125], [4997, 4998, 5126], [4998, 5127, 5126], [4998, 4999, 5128], [4998, 5128, 5127], [4999, 5000, 5128], [5000, 5129, 5128], [5000, 5001, 5130], [5000, 5130, 5129], [5001, 5002, 5130], [5002, 5131, 5130], [5002, 5003, 5132], [5002, 5132, 5131], [5003, 5004, 5132], [5004, 5133, 5132], [5004, 5005, 5134], [5004, 5134, 5133], [5005, 5006, 5134], [5006, 5135, 5134], [5006, 5007, 5136], [5006, 5136, 5135], [5007, 5008, 5136], [5008, 5137, 5136], [5008, 5009, 5138], [5008, 5138, 5137], [5009, 5010, 5138], [5010, 5139, 5138], [5010, 5011, 5140], [5010, 5140, 5139], [5011, 5012, 5140], [5012, 5141, 5140], [5012, 5013, 5142], [5012, 5142, 5141], [5013, 5014, 5142], [5014, 5143, 5142], [5014, 5015, 5144], [5014, 5144, 5143], [5015, 5016, 5144], [5016, 5145, 5144], [5016, 5017, 5146], [5016, 5146, 5145], [5017, 5018, 5146], [5018, 5147, 5146], [5018, 5019, 5148], [5018, 5148, 5147], [5019, 5020, 5148], [5020, 5149, 5148], [5020, 5021, 5150], [5020, 5150, 5149], [5021, 5022, 5150], [5022, 5151, 5150], [5022, 5023, 5152], [5022, 5152, 5151], [5023, 5024, 5152], [5024, 5153, 5152], [5024, 5025, 5154], [5024, 5154, 5153], [5025, 5026, 5154], [5026, 5155, 5154], [5026, 5027, 5156], [5026, 5156, 5155], [5027, 5028, 5156], [5028, 5157, 5156], [5028, 5029, 5158], [5028, 5158, 5157], [5029, 5030, 5158], [5030, 5159, 5158], [5031, 5032, 5160], [5032, 5161, 5160], [5032, 5033, 5162], [5032, 5162, 5161], [5033, 5034, 5162], [5034, 5163, 5162], [5034, 5035, 5164], [5034, 5164, 5163], [5035, 5036, 5164], [5036, 5165, 5164], [5036, 5037, 5166], [5036, 5166, 5165], [5037, 5038, 5166], [5038, 5167, 5166], [5038, 5039, 5168], [5038, 5168, 5167], [5039, 5040, 5168], [5040, 5169, 5168], [5040, 5041, 5170], [5040, 5170, 5169], [5041, 5042, 5170], [5042, 5171, 5170], [5042, 5043, 5172], [5042, 5172, 5171], [5043, 5044, 5172], [5044, 5173, 5172], [5044, 5045, 5174], [5044, 5174, 5173], [5045, 5046, 5174], [5046, 5175, 5174], [5046, 5047, 5176], [5046, 5176, 5175], [5047, 5048, 5176], [5048, 5177, 5176], [5048, 5049, 5178], [5048, 5178, 5177], [5049, 5050, 5178], [5050, 5179, 5178], [5050, 5051, 5180], [5050, 5180, 5179], [5051, 5052, 5180], [5052, 5181, 5180], [5052, 5053, 5182], [5052, 5182, 5181], [5053, 5054, 5182], [5054, 5183, 5182], [5054, 5055, 5184], [5054, 5184, 5183], [5055, 5056, 5184], [5056, 5185, 5184], [5056, 5057, 5186], [5056, 5186, 5185], [5057, 5058, 5186], [5058, 5187, 5186], [5058, 5059, 5188], [5058, 5188, 5187], [5059, 5060, 5188], [5060, 5189, 5188], [5060, 5061, 5190], [5060, 5190, 5189], [5061, 5062, 5190], [5062, 5191, 5190], [5062, 5063, 5192], [5062, 5192, 5191], [5063, 5064, 5192], [5064, 5193, 5192], [5064, 5065, 5194], [5064, 5194, 5193], [5065, 5066, 5194], [5066, 5195, 5194], [5066, 5067, 5196], [5066, 5196, 5195], [5067, 5068, 5196], [5068, 5197, 5196], [5068, 5069, 5198], [5068, 5198, 5197], [5069, 5070, 5198], [5070, 5199, 5198], [5070, 5071, 5200], [5070, 5200, 5199], [5071, 5072, 5200], [5072, 5201, 5200], [5072, 5073, 5202], [5072, 5202, 5201], [5073, 5074, 5202], [5074, 5203, 5202], [5074, 5075, 5204], [5074, 5204, 5203], [5075, 5076, 5204], [5076, 5205, 5204], [5076, 5077, 5206], [5076, 5206, 5205], [5077, 5078, 5206], [5078, 5207, 5206], [5078, 5079, 5208], [5078, 5208, 5207], [5079, 5080, 5208], [5080, 5209, 5208], [5080, 5081, 5210], [5080, 5210, 5209], [5081, 5082, 5210], [5082, 5211, 5210], [5082, 5083, 5212], [5082, 5212, 5211], [5083, 5084, 5212], [5084, 5213, 5212], [5084, 5085, 5214], [5084, 5214, 5213], [5085, 5086, 5214], [5086, 5215, 5214], [5086, 5087, 5216], [5086, 5216, 5215], [5087, 5088, 5216], [5088, 5217, 5216], [5088, 5089, 5218], [5088, 5218, 5217], [5089, 5090, 5218], [5090, 5219, 5218], [5090, 5091, 5220], [5090, 5220, 5219], [5091, 5092, 5220], [5092, 5221, 5220], [5092, 5093, 5222], [5092, 5222, 5221], [5093, 5094, 5222], [5094, 5223, 5222], [5094, 5095, 5224], [5094, 5224, 5223], [5095, 5096, 5224], [5096, 5225, 5224], [5096, 5097, 5226], [5096, 5226, 5225], [5097, 5098, 5226], [5098, 5227, 5226], [5098, 5099, 5228], [5098, 5228, 5227], [5099, 5100, 5228], [5100, 5229, 5228], [5100, 5101, 5230], [5100, 5230, 5229], [5101, 5102, 5230], [5102, 5231, 5230], [5102, 5103, 5232], [5102, 5232, 5231], [5103, 5104, 5232], [5104, 5233, 5232], [5104, 5105, 5234], [5104, 5234, 5233], [5105, 5106, 5234], [5106, 5235, 5234], [5106, 5107, 5236], [5106, 5236, 5235], [5107, 5108, 5236], [5108, 5237, 5236], [5108, 5109, 5238], [5108, 5238, 5237], [5109, 5110, 5238], [5110, 5239, 5238], [5110, 5111, 5240], [5110, 5240, 5239], [5111, 5112, 5240], [5112, 5241, 5240], [5112, 5113, 5242], [5112, 5242, 5241], [5113, 5114, 5242], [5114, 5243, 5242], [5114, 5115, 5244], [5114, 5244, 5243], [5115, 5116, 5244], [5116, 5245, 5244], [5116, 5117, 5246], [5116, 5246, 5245], [5117, 5118, 5246], [5118, 5247, 5246], [5118, 5119, 5248], [5118, 5248, 5247], [5119, 5120, 5248], [5120, 5249, 5248], [5120, 5121, 5250], [5120, 5250, 5249], [5121, 5122, 5250], [5122, 5251, 5250], [5122, 5123, 5252], [5122, 5252, 5251], [5123, 5124, 5252], [5124, 5253, 5252], [5124, 5125, 5254], [5124, 5254, 5253], [5125, 5126, 5254], [5126, 5255, 5254], [5126, 5127, 5256], [5126, 5256, 5255], [5127, 5128, 5256], [5128, 5257, 5256], [5128, 5129, 5258], [5128, 5258, 5257], [5129, 5130, 5258], [5130, 5259, 5258], [5130, 5131, 5260], [5130, 5260, 5259], [5131, 5132, 5260], [5132, 5261, 5260], [5132, 5133, 5262], [5132, 5262, 5261], [5133, 5134, 5262], [5134, 5263, 5262], [5134, 5135, 5264], [5134, 5264, 5263], [5135, 5136, 5264], [5136, 5265, 5264], [5136, 5137, 5266], [5136, 5266, 5265], [5137, 5138, 5266], [5138, 5267, 5266], [5138, 5139, 5268], [5138, 5268, 5267], [5139, 5140, 5268], [5140, 5269, 5268], [5140, 5141, 5270], [5140, 5270, 5269], [5141, 5142, 5270], [5142, 5271, 5270], [5142, 5143, 5272], [5142, 5272, 5271], [5143, 5144, 5272], [5144, 5273, 5272], [5144, 5145, 5274], [5144, 5274, 5273], [5145, 5146, 5274], [5146, 5275, 5274], [5146, 5147, 5276], [5146, 5276, 5275], [5147, 5148, 5276], [5148, 5277, 5276], [5148, 5149, 5278], [5148, 5278, 5277], [5149, 5150, 5278], [5150, 5279, 5278], [5150, 5151, 5280], [5150, 5280, 5279], [5151, 5152, 5280], [5152, 5281, 5280], [5152, 5153, 5282], [5152, 5282, 5281], [5153, 5154, 5282], [5154, 5283, 5282], [5154, 5155, 5284], [5154, 5284, 5283], [5155, 5156, 5284], [5156, 5285, 5284], [5156, 5157, 5286], [5156, 5286, 5285], [5157, 5158, 5286], [5158, 5287, 5286], [5158, 5159, 5288], [5158, 5288, 5287], [5160, 5161, 5290], [5160, 5290, 5289], [5161, 5162, 5290], [5162, 5291, 5290], [5162, 5163, 5292], [5162, 5292, 5291], [5163, 5164, 5292], [5164, 5293, 5292], [5164, 5165, 5294], [5164, 5294, 5293], [5165, 5166, 5294], [5166, 5295, 5294], [5166, 5167, 5296], [5166, 5296, 5295], [5167, 5168, 5296], [5168, 5297, 5296], [5168, 5169, 5298], [5168, 5298, 5297], [5169, 5170, 5298], [5170, 5299, 5298], [5170, 5171, 5300], [5170, 5300, 5299], [5171, 5172, 5300], [5172, 5301, 5300], [5172, 5173, 5302], [5172, 5302, 5301], [5173, 5174, 5302], [5174, 5303, 5302], [5174, 5175, 5304], [5174, 5304, 5303], [5175, 5176, 5304], [5176, 5305, 5304], [5176, 5177, 5306], [5176, 5306, 5305], [5177, 5178, 5306], [5178, 5307, 5306], [5178, 5179, 5308], [5178, 5308, 5307], [5179, 5180, 5308], [5180, 5309, 5308], [5180, 5181, 5310], [5180, 5310, 5309], [5181, 5182, 5310], [5182, 5311, 5310], [5182, 5183, 5312], [5182, 5312, 5311], [5183, 5184, 5312], [5184, 5313, 5312], [5184, 5185, 5314], [5184, 5314, 5313], [5185, 5186, 5314], [5186, 5315, 5314], [5186, 5187, 5316], [5186, 5316, 5315], [5187, 5188, 5316], [5188, 5317, 5316], [5188, 5189, 5318], [5188, 5318, 5317], [5189, 5190, 5318], [5190, 5319, 5318], [5190, 5191, 5320], [5190, 5320, 5319], [5191, 5192, 5320], [5192, 5321, 5320], [5192, 5193, 5322], [5192, 5322, 5321], [5193, 5194, 5322], [5194, 5323, 5322], [5194, 5195, 5324], [5194, 5324, 5323], [5195, 5196, 5324], [5196, 5325, 5324], [5196, 5197, 5326], [5196, 5326, 5325], [5197, 5198, 5326], [5198, 5327, 5326], [5198, 5199, 5328], [5198, 5328, 5327], [5199, 5200, 5328], [5200, 5329, 5328], [5200, 5201, 5330], [5200, 5330, 5329], [5201, 5202, 5330], [5202, 5331, 5330], [5202, 5203, 5332], [5202, 5332, 5331], [5203, 5204, 5332], [5204, 5333, 5332], [5204, 5205, 5334], [5204, 5334, 5333], [5205, 5206, 5334], [5206, 5335, 5334], [5206, 5207, 5336], [5206, 5336, 5335], [5207, 5208, 5336], [5208, 5337, 5336], [5208, 5209, 5338], [5208, 5338, 5337], [5209, 5210, 5338], [5210, 5339, 5338], [5210, 5211, 5340], [5210, 5340, 5339], [5211, 5212, 5340], [5212, 5341, 5340], [5212, 5213, 5342], [5212, 5342, 5341], [5213, 5214, 5342], [5214, 5343, 5342], [5214, 5215, 5344], [5214, 5344, 5343], [5215, 5216, 5344], [5216, 5345, 5344], [5216, 5217, 5346], [5216, 5346, 5345], [5217, 5218, 5346], [5218, 5347, 5346], [5218, 5219, 5348], [5218, 5348, 5347], [5219, 5220, 5348], [5220, 5349, 5348], [5220, 5221, 5350], [5220, 5350, 5349], [5221, 5222, 5350], [5222, 5351, 5350], [5222, 5223, 5352], [5222, 5352, 5351], [5223, 5224, 5352], [5224, 5353, 5352], [5224, 5225, 5354], [5224, 5354, 5353], [5225, 5226, 5354], [5226, 5355, 5354], [5226, 5227, 5356], [5226, 5356, 5355], [5227, 5228, 5356], [5228, 5357, 5356], [5228, 5229, 5358], [5228, 5358, 5357], [5229, 5230, 5358], [5230, 5359, 5358], [5230, 5231, 5360], [5230, 5360, 5359], [5231, 5232, 5360], [5232, 5361, 5360], [5232, 5233, 5362], [5232, 5362, 5361], [5233, 5234, 5362], [5234, 5363, 5362], [5234, 5235, 5364], [5234, 5364, 5363], [5235, 5236, 5364], [5236, 5365, 5364], [5236, 5237, 5366], [5236, 5366, 5365], [5237, 5238, 5366], [5238, 5367, 5366], [5238, 5239, 5368], [5238, 5368, 5367], [5239, 5240, 5368], [5240, 5369, 5368], [5240, 5241, 5370], [5240, 5370, 5369], [5241, 5242, 5370], [5242, 5371, 5370], [5242, 5243, 5372], [5242, 5372, 5371], [5243, 5244, 5372], [5244, 5373, 5372], [5244, 5245, 5374], [5244, 5374, 5373], [5245, 5246, 5374], [5246, 5375, 5374], [5246, 5247, 5376], [5246, 5376, 5375], [5247, 5248, 5376], [5248, 5377, 5376], [5248, 5249, 5378], [5248, 5378, 5377], [5249, 5250, 5378], [5250, 5379, 5378], [5250, 5251, 5380], [5250, 5380, 5379], [5251, 5252, 5380], [5252, 5381, 5380], [5252, 5253, 5382], [5252, 5382, 5381], [5253, 5254, 5382], [5254, 5383, 5382], [5254, 5255, 5384], [5254, 5384, 5383], [5255, 5256, 5384], [5256, 5385, 5384], [5256, 5257, 5386], [5256, 5386, 5385], [5257, 5258, 5386], [5258, 5387, 5386], [5258, 5259, 5388], [5258, 5388, 5387], [5259, 5260, 5388], [5260, 5389, 5388], [5260, 5261, 5390], [5260, 5390, 5389], [5261, 5262, 5390], [5262, 5391, 5390], [5262, 5263, 5392], [5262, 5392, 5391], [5263, 5264, 5392], [5264, 5393, 5392], [5264, 5265, 5394], [5264, 5394, 5393], [5265, 5266, 5394], [5266, 5395, 5394], [5266, 5267, 5396], [5266, 5396, 5395], [5267, 5268, 5396], [5268, 5397, 5396], [5268, 5269, 5398], [5268, 5398, 5397], [5269, 5270, 5398], [5270, 5399, 5398], [5270, 5271, 5400], [5270, 5400, 5399], [5271, 5272, 5400], [5272, 5401, 5400], [5272, 5273, 5402], [5272, 5402, 5401], [5273, 5274, 5402], [5274, 5403, 5402], [5274, 5275, 5404], [5274, 5404, 5403], [5275, 5276, 5404], [5276, 5405, 5404], [5276, 5277, 5406], [5276, 5406, 5405], [5277, 5278, 5406], [5278, 5407, 5406], [5278, 5279, 5408], [5278, 5408, 5407], [5279, 5280, 5408], [5280, 5409, 5408], [5280, 5281, 5410], [5280, 5410, 5409], [5281, 5282, 5410], [5282, 5411, 5410], [5282, 5283, 5412], [5282, 5412, 5411], [5283, 5284, 5412], [5284, 5413, 5412], [5284, 5285, 5414], [5284, 5414, 5413], [5285, 5286, 5414], [5286, 5415, 5414], [5286, 5287, 5416], [5286, 5416, 5415], [5287, 5288, 5416], [5288, 5417, 5416], [5289, 5290, 5418], [5290, 5419, 5418], [5290, 5291, 5420], [5290, 5420, 5419], [5291, 5292, 5420], [5292, 5421, 5420], [5292, 5293, 5422], [5292, 5422, 5421], [5293, 5294, 5422], [5294, 5423, 5422], [5294, 5295, 5424], [5294, 5424, 5423], [5295, 5296, 5424], [5296, 5425, 5424], [5296, 5297, 5426], [5296, 5426, 5425], [5297, 5298, 5426], [5298, 5427, 5426], [5298, 5299, 5428], [5298, 5428, 5427], [5299, 5300, 5428], [5300, 5429, 5428], [5300, 5301, 5430], [5300, 5430, 5429], [5301, 5302, 5430], [5302, 5431, 5430], [5302, 5303, 5432], [5302, 5432, 5431], [5303, 5304, 5432], [5304, 5433, 5432], [5304, 5305, 5434], [5304, 5434, 5433], [5305, 5306, 5434], [5306, 5435, 5434], [5306, 5307, 5436], [5306, 5436, 5435], [5307, 5308, 5436], [5308, 5437, 5436], [5308, 5309, 5438], [5308, 5438, 5437], [5309, 5310, 5438], [5310, 5439, 5438], [5310, 5311, 5440], [5310, 5440, 5439], [5311, 5312, 5440], [5312, 5441, 5440], [5312, 5313, 5442], [5312, 5442, 5441], [5313, 5314, 5442], [5314, 5443, 5442], [5314, 5315, 5444], [5314, 5444, 5443], [5315, 5316, 5444], [5316, 5445, 5444], [5316, 5317, 5446], [5316, 5446, 5445], [5317, 5318, 5446], [5318, 5447, 5446], [5318, 5319, 5448], [5318, 5448, 5447], [5319, 5320, 5448], [5320, 5449, 5448], [5320, 5321, 5450], [5320, 5450, 5449], [5321, 5322, 5450], [5322, 5451, 5450], [5322, 5323, 5452], [5322, 5452, 5451], [5323, 5324, 5452], [5324, 5453, 5452], [5324, 5325, 5454], [5324, 5454, 5453], [5325, 5326, 5454], [5326, 5455, 5454], [5326, 5327, 5456], [5326, 5456, 5455], [5327, 5328, 5456], [5328, 5457, 5456], [5328, 5329, 5458], [5328, 5458, 5457], [5329, 5330, 5458], [5330, 5459, 5458], [5330, 5331, 5460], [5330, 5460, 5459], [5331, 5332, 5460], [5332, 5461, 5460], [5332, 5333, 5462], [5332, 5462, 5461], [5333, 5334, 5462], [5334, 5463, 5462], [5334, 5335, 5464], [5334, 5464, 5463], [5335, 5336, 5464], [5336, 5465, 5464], [5336, 5337, 5466], [5336, 5466, 5465], [5337, 5338, 5466], [5338, 5467, 5466], [5338, 5339, 5468], [5338, 5468, 5467], [5339, 5340, 5468], [5340, 5469, 5468], [5340, 5341, 5470], [5340, 5470, 5469], [5341, 5342, 5470], [5342, 5471, 5470], [5342, 5343, 5472], [5342, 5472, 5471], [5343, 5344, 5472], [5344, 5473, 5472], [5344, 5345, 5474], [5344, 5474, 5473], [5345, 5346, 5474], [5346, 5475, 5474], [5346, 5347, 5476], [5346, 5476, 5475], [5347, 5348, 5476], [5348, 5477, 5476], [5348, 5349, 5478], [5348, 5478, 5477], [5349, 5350, 5478], [5350, 5479, 5478], [5350, 5351, 5480], [5350, 5480, 5479], [5351, 5352, 5480], [5352, 5481, 5480], [5352, 5353, 5482], [5352, 5482, 5481], [5353, 5354, 5482], [5354, 5483, 5482], [5354, 5355, 5484], [5354, 5484, 5483], [5355, 5356, 5484], [5356, 5485, 5484], [5356, 5357, 5486], [5356, 5486, 5485], [5357, 5358, 5486], [5358, 5487, 5486], [5358, 5359, 5488], [5358, 5488, 5487], [5359, 5360, 5488], [5360, 5489, 5488], [5360, 5361, 5490], [5360, 5490, 5489], [5361, 5362, 5490], [5362, 5491, 5490], [5362, 5363, 5492], [5362, 5492, 5491], [5363, 5364, 5492], [5364, 5493, 5492], [5364, 5365, 5494], [5364, 5494, 5493], [5365, 5366, 5494], [5366, 5495, 5494], [5366, 5367, 5496], [5366, 5496, 5495], [5367, 5368, 5496], [5368, 5497, 5496], [5368, 5369, 5498], [5368, 5498, 5497], [5369, 5370, 5498], [5370, 5499, 5498], [5370, 5371, 5500], [5370, 5500, 5499], [5371, 5372, 5500], [5372, 5501, 5500], [5372, 5373, 5502], [5372, 5502, 5501], [5373, 5374, 5502], [5374, 5503, 5502], [5374, 5375, 5504], [5374, 5504, 5503], [5375, 5376, 5504], [5376, 5505, 5504], [5376, 5377, 5506], [5376, 5506, 5505], [5377, 5378, 5506], [5378, 5507, 5506], [5378, 5379, 5508], [5378, 5508, 5507], [5379, 5380, 5508], [5380, 5509, 5508], [5380, 5381, 5510], [5380, 5510, 5509], [5381, 5382, 5510], [5382, 5511, 5510], [5382, 5383, 5512], [5382, 5512, 5511], [5383, 5384, 5512], [5384, 5513, 5512], [5384, 5385, 5514], [5384, 5514, 5513], [5385, 5386, 5514], [5386, 5515, 5514], [5386, 5387, 5516], [5386, 5516, 5515], [5387, 5388, 5516], [5388, 5517, 5516], [5388, 5389, 5518], [5388, 5518, 5517], [5389, 5390, 5518], [5390, 5519, 5518], [5390, 5391, 5520], [5390, 5520, 5519], [5391, 5392, 5520], [5392, 5521, 5520], [5392, 5393, 5522], [5392, 5522, 5521], [5393, 5394, 5522], [5394, 5523, 5522], [5394, 5395, 5524], [5394, 5524, 5523], [5395, 5396, 5524], [5396, 5525, 5524], [5396, 5397, 5526], [5396, 5526, 5525], [5397, 5398, 5526], [5398, 5527, 5526], [5398, 5399, 5528], [5398, 5528, 5527], [5399, 5400, 5528], [5400, 5529, 5528], [5400, 5401, 5530], [5400, 5530, 5529], [5401, 5402, 5530], [5402, 5531, 5530], [5402, 5403, 5532], [5402, 5532, 5531], [5403, 5404, 5532], [5404, 5533, 5532], [5404, 5405, 5534], [5404, 5534, 5533], [5405, 5406, 5534], [5406, 5535, 5534], [5406, 5407, 5536], [5406, 5536, 5535], [5407, 5408, 5536], [5408, 5537, 5536], [5408, 5409, 5538], [5408, 5538, 5537], [5409, 5410, 5538], [5410, 5539, 5538], [5410, 5411, 5540], [5410, 5540, 5539], [5411, 5412, 5540], [5412, 5541, 5540], [5412, 5413, 5542], [5412, 5542, 5541], [5413, 5414, 5542], [5414, 5543, 5542], [5414, 5415, 5544], [5414, 5544, 5543], [5415, 5416, 5544], [5416, 5545, 5544], [5416, 5417, 5546], [5416, 5546, 5545], [5418, 5419, 5548], [5418, 5548, 5547], [5419, 5420, 5548], [5420, 5549, 5548], [5420, 5421, 5550], [5420, 5550, 5549], [5421, 5422, 5550], [5422, 5551, 5550], [5422, 5423, 5552], [5422, 5552, 5551], [5423, 5424, 5552], [5424, 5553, 5552], [5424, 5425, 5554], [5424, 5554, 5553], [5425, 5426, 5554], [5426, 5555, 5554], [5426, 5427, 5556], [5426, 5556, 5555], [5427, 5428, 5556], [5428, 5557, 5556], [5428, 5429, 5558], [5428, 5558, 5557], [5429, 5430, 5558], [5430, 5559, 5558], [5430, 5431, 5560], [5430, 5560, 5559], [5431, 5432, 5560], [5432, 5561, 5560], [5432, 5433, 5562], [5432, 5562, 5561], [5433, 5434, 5562], [5434, 5563, 5562], [5434, 5435, 5564], [5434, 5564, 5563], [5435, 5436, 5564], [5436, 5565, 5564], [5436, 5437, 5566], [5436, 5566, 5565], [5437, 5438, 5566], [5438, 5567, 5566], [5438, 5439, 5568], [5438, 5568, 5567], [5439, 5440, 5568], [5440, 5569, 5568], [5440, 5441, 5570], [5440, 5570, 5569], [5441, 5442, 5570], [5442, 5571, 5570], [5442, 5443, 5572], [5442, 5572, 5571], [5443, 5444, 5572], [5444, 5573, 5572], [5444, 5445, 5574], [5444, 5574, 5573], [5445, 5446, 5574], [5446, 5575, 5574], [5446, 5447, 5576], [5446, 5576, 5575], [5447, 5448, 5576], [5448, 5577, 5576], [5448, 5449, 5578], [5448, 5578, 5577], [5449, 5450, 5578], [5450, 5579, 5578], [5450, 5451, 5580], [5450, 5580, 5579], [5451, 5452, 5580], [5452, 5581, 5580], [5452, 5453, 5582], [5452, 5582, 5581], [5453, 5454, 5582], [5454, 5583, 5582], [5454, 5455, 5584], [5454, 5584, 5583], [5455, 5456, 5584], [5456, 5585, 5584], [5456, 5457, 5586], [5456, 5586, 5585], [5457, 5458, 5586], [5458, 5587, 5586], [5458, 5459, 5588], [5458, 5588, 5587], [5459, 5460, 5588], [5460, 5589, 5588], [5460, 5461, 5590], [5460, 5590, 5589], [5461, 5462, 5590], [5462, 5591, 5590], [5462, 5463, 5592], [5462, 5592, 5591], [5463, 5464, 5592], [5464, 5593, 5592], [5464, 5465, 5594], [5464, 5594, 5593], [5465, 5466, 5594], [5466, 5595, 5594], [5466, 5467, 5596], [5466, 5596, 5595], [5467, 5468, 5596], [5468, 5597, 5596], [5468, 5469, 5598], [5468, 5598, 5597], [5469, 5470, 5598], [5470, 5599, 5598], [5470, 5471, 5600], [5470, 5600, 5599], [5471, 5472, 5600], [5472, 5601, 5600], [5472, 5473, 5602], [5472, 5602, 5601], [5473, 5474, 5602], [5474, 5603, 5602], [5474, 5475, 5604], [5474, 5604, 5603], [5475, 5476, 5604], [5476, 5605, 5604], [5476, 5477, 5606], [5476, 5606, 5605], [5477, 5478, 5606], [5478, 5607, 5606], [5478, 5479, 5608], [5478, 5608, 5607], [5479, 5480, 5608], [5480, 5609, 5608], [5480, 5481, 5610], [5480, 5610, 5609], [5481, 5482, 5610], [5482, 5611, 5610], [5482, 5483, 5612], [5482, 5612, 5611], [5483, 5484, 5612], [5484, 5613, 5612], [5484, 5485, 5614], [5484, 5614, 5613], [5485, 5486, 5614], [5486, 5615, 5614], [5486, 5487, 5616], [5486, 5616, 5615], [5487, 5488, 5616], [5488, 5617, 5616], [5488, 5489, 5618], [5488, 5618, 5617], [5489, 5490, 5618], [5490, 5619, 5618], [5490, 5491, 5620], [5490, 5620, 5619], [5491, 5492, 5620], [5492, 5621, 5620], [5492, 5493, 5622], [5492, 5622, 5621], [5493, 5494, 5622], [5494, 5623, 5622], [5494, 5495, 5624], [5494, 5624, 5623], [5495, 5496, 5624], [5496, 5625, 5624], [5496, 5497, 5626], [5496, 5626, 5625], [5497, 5498, 5626], [5498, 5627, 5626], [5498, 5499, 5628], [5498, 5628, 5627], [5499, 5500, 5628], [5500, 5629, 5628], [5500, 5501, 5630], [5500, 5630, 5629], [5501, 5502, 5630], [5502, 5631, 5630], [5502, 5503, 5632], [5502, 5632, 5631], [5503, 5504, 5632], [5504, 5633, 5632], [5504, 5505, 5634], [5504, 5634, 5633], [5505, 5506, 5634], [5506, 5635, 5634], [5506, 5507, 5636], [5506, 5636, 5635], [5507, 5508, 5636], [5508, 5637, 5636], [5508, 5509, 5638], [5508, 5638, 5637], [5509, 5510, 5638], [5510, 5639, 5638], [5510, 5511, 5640], [5510, 5640, 5639], [5511, 5512, 5640], [5512, 5641, 5640], [5512, 5513, 5642], [5512, 5642, 5641], [5513, 5514, 5642], [5514, 5643, 5642], [5514, 5515, 5644], [5514, 5644, 5643], [5515, 5516, 5644], [5516, 5645, 5644], [5516, 5517, 5646], [5516, 5646, 5645], [5517, 5518, 5646], [5518, 5647, 5646], [5518, 5519, 5648], [5518, 5648, 5647], [5519, 5520, 5648], [5520, 5649, 5648], [5520, 5521, 5650], [5520, 5650, 5649], [5521, 5522, 5650], [5522, 5651, 5650], [5522, 5523, 5652], [5522, 5652, 5651], [5523, 5524, 5652], [5524, 5653, 5652], [5524, 5525, 5654], [5524, 5654, 5653], [5525, 5526, 5654], [5526, 5655, 5654], [5526, 5527, 5656], [5526, 5656, 5655], [5527, 5528, 5656], [5528, 5657, 5656], [5528, 5529, 5658], [5528, 5658, 5657], [5529, 5530, 5658], [5530, 5659, 5658], [5530, 5531, 5660], [5530, 5660, 5659], [5531, 5532, 5660], [5532, 5661, 5660], [5532, 5533, 5662], [5532, 5662, 5661], [5533, 5534, 5662], [5534, 5663, 5662], [5534, 5535, 5664], [5534, 5664, 5663], [5535, 5536, 5664], [5536, 5665, 5664], [5536, 5537, 5666], [5536, 5666, 5665], [5537, 5538, 5666], [5538, 5667, 5666], [5538, 5539, 5668], [5538, 5668, 5667], [5539, 5540, 5668], [5540, 5669, 5668], [5540, 5541, 5670], [5540, 5670, 5669], [5541, 5542, 5670], [5542, 5671, 5670], [5542, 5543, 5672], [5542, 5672, 5671], [5543, 5544, 5672], [5544, 5673, 5672], [5544, 5545, 5674], [5544, 5674, 5673], [5545, 5546, 5674], [5546, 5675, 5674], [5547, 5548, 5676], [5548, 5677, 5676], [5548, 5549, 5678], [5548, 5678, 5677], [5549, 5550, 5678], [5550, 5679, 5678], [5550, 5551, 5680], [5550, 5680, 5679], [5551, 5552, 5680], [5552, 5681, 5680], [5552, 5553, 5682], [5552, 5682, 5681], [5553, 5554, 5682], [5554, 5683, 5682], [5554, 5555, 5684], [5554, 5684, 5683], [5555, 5556, 5684], [5556, 5685, 5684], [5556, 5557, 5686], [5556, 5686, 5685], [5557, 5558, 5686], [5558, 5687, 5686], [5558, 5559, 5688], [5558, 5688, 5687], [5559, 5560, 5688], [5560, 5689, 5688], [5560, 5561, 5690], [5560, 5690, 5689], [5561, 5562, 5690], [5562, 5691, 5690], [5562, 5563, 5692], [5562, 5692, 5691], [5563, 5564, 5692], [5564, 5693, 5692], [5564, 5565, 5694], [5564, 5694, 5693], [5565, 5566, 5694], [5566, 5695, 5694], [5566, 5567, 5696], [5566, 5696, 5695], [5567, 5568, 5696], [5568, 5697, 5696], [5568, 5569, 5698], [5568, 5698, 5697], [5569, 5570, 5698], [5570, 5699, 5698], [5570, 5571, 5700], [5570, 5700, 5699], [5571, 5572, 5700], [5572, 5701, 5700], [5572, 5573, 5702], [5572, 5702, 5701], [5573, 5574, 5702], [5574, 5703, 5702], [5574, 5575, 5704], [5574, 5704, 5703], [5575, 5576, 5704], [5576, 5705, 5704], [5576, 5577, 5706], [5576, 5706, 5705], [5577, 5578, 5706], [5578, 5707, 5706], [5578, 5579, 5708], [5578, 5708, 5707], [5579, 5580, 5708], [5580, 5709, 5708], [5580, 5581, 5710], [5580, 5710, 5709], [5581, 5582, 5710], [5582, 5711, 5710], [5582, 5583, 5712], [5582, 5712, 5711], [5583, 5584, 5712], [5584, 5713, 5712], [5584, 5585, 5714], [5584, 5714, 5713], [5585, 5586, 5714], [5586, 5715, 5714], [5586, 5587, 5716], [5586, 5716, 5715], [5587, 5588, 5716], [5588, 5717, 5716], [5588, 5589, 5718], [5588, 5718, 5717], [5589, 5590, 5718], [5590, 5719, 5718], [5590, 5591, 5720], [5590, 5720, 5719], [5591, 5592, 5720], [5592, 5721, 5720], [5592, 5593, 5722], [5592, 5722, 5721], [5593, 5594, 5722], [5594, 5723, 5722], [5594, 5595, 5724], [5594, 5724, 5723], [5595, 5596, 5724], [5596, 5725, 5724], [5596, 5597, 5726], [5596, 5726, 5725], [5597, 5598, 5726], [5598, 5727, 5726], [5598, 5599, 5728], [5598, 5728, 5727], [5599, 5600, 5728], [5600, 5729, 5728], [5600, 5601, 5730], [5600, 5730, 5729], [5601, 5602, 5730], [5602, 5731, 5730], [5602, 5603, 5732], [5602, 5732, 5731], [5603, 5604, 5732], [5604, 5733, 5732], [5604, 5605, 5734], [5604, 5734, 5733], [5605, 5606, 5734], [5606, 5735, 5734], [5606, 5607, 5736], [5606, 5736, 5735], [5607, 5608, 5736], [5608, 5737, 5736], [5608, 5609, 5738], [5608, 5738, 5737], [5609, 5610, 5738], [5610, 5739, 5738], [5610, 5611, 5740], [5610, 5740, 5739], [5611, 5612, 5740], [5612, 5741, 5740], [5612, 5613, 5742], [5612, 5742, 5741], [5613, 5614, 5742], [5614, 5743, 5742], [5614, 5615, 5744], [5614, 5744, 5743], [5615, 5616, 5744], [5616, 5745, 5744], [5616, 5617, 5746], [5616, 5746, 5745], [5617, 5618, 5746], [5618, 5747, 5746], [5618, 5619, 5748], [5618, 5748, 5747], [5619, 5620, 5748], [5620, 5749, 5748], [5620, 5621, 5750], [5620, 5750, 5749], [5621, 5622, 5750], [5622, 5751, 5750], [5622, 5623, 5752], [5622, 5752, 5751], [5623, 5624, 5752], [5624, 5753, 5752], [5624, 5625, 5754], [5624, 5754, 5753], [5625, 5626, 5754], [5626, 5755, 5754], [5626, 5627, 5756], [5626, 5756, 5755], [5627, 5628, 5756], [5628, 5757, 5756], [5628, 5629, 5758], [5628, 5758, 5757], [5629, 5630, 5758], [5630, 5759, 5758], [5630, 5631, 5760], [5630, 5760, 5759], [5631, 5632, 5760], [5632, 5761, 5760], [5632, 5633, 5762], [5632, 5762, 5761], [5633, 5634, 5762], [5634, 5763, 5762], [5634, 5635, 5764], [5634, 5764, 5763], [5635, 5636, 5764], [5636, 5765, 5764], [5636, 5637, 5766], [5636, 5766, 5765], [5637, 5638, 5766], [5638, 5767, 5766], [5638, 5639, 5768], [5638, 5768, 5767], [5639, 5640, 5768], [5640, 5769, 5768], [5640, 5641, 5770], [5640, 5770, 5769], [5641, 5642, 5770], [5642, 5771, 5770], [5642, 5643, 5772], [5642, 5772, 5771], [5643, 5644, 5772], [5644, 5773, 5772], [5644, 5645, 5774], [5644, 5774, 5773], [5645, 5646, 5774], [5646, 5775, 5774], [5646, 5647, 5776], [5646, 5776, 5775], [5647, 5648, 5776], [5648, 5777, 5776], [5648, 5649, 5778], [5648, 5778, 5777], [5649, 5650, 5778], [5650, 5779, 5778], [5650, 5651, 5780], [5650, 5780, 5779], [5651, 5652, 5780], [5652, 5781, 5780], [5652, 5653, 5782], [5652, 5782, 5781], [5653, 5654, 5782], [5654, 5783, 5782], [5654, 5655, 5784], [5654, 5784, 5783], [5655, 5656, 5784], [5656, 5785, 5784], [5656, 5657, 5786], [5656, 5786, 5785], [5657, 5658, 5786], [5658, 5787, 5786], [5658, 5659, 5788], [5658, 5788, 5787], [5659, 5660, 5788], [5660, 5789, 5788], [5660, 5661, 5790], [5660, 5790, 5789], [5661, 5662, 5790], [5662, 5791, 5790], [5662, 5663, 5792], [5662, 5792, 5791], [5663, 5664, 5792], [5664, 5793, 5792], [5664, 5665, 5794], [5664, 5794, 5793], [5665, 5666, 5794], [5666, 5795, 5794], [5666, 5667, 5796], [5666, 5796, 5795], [5667, 5668, 5796], [5668, 5797, 5796], [5668, 5669, 5798], [5668, 5798, 5797], [5669, 5670, 5798], [5670, 5799, 5798], [5670, 5671, 5800], [5670, 5800, 5799], [5671, 5672, 5800], [5672, 5801, 5800], [5672, 5673, 5802], [5672, 5802, 5801], [5673, 5674, 5802], [5674, 5803, 5802], [5674, 5675, 5804], [5674, 5804, 5803], [5676, 5677, 5806], [5676, 5806, 5805], [5677, 5678, 5806], [5678, 5807, 5806], [5678, 5679, 5808], [5678, 5808, 5807], [5679, 5680, 5808], [5680, 5809, 5808], [5680, 5681, 5810], [5680, 5810, 5809], [5681, 5682, 5810], [5682, 5811, 5810], [5682, 5683, 5812], [5682, 5812, 5811], [5683, 5684, 5812], [5684, 5813, 5812], [5684, 5685, 5814], [5684, 5814, 5813], [5685, 5686, 5814], [5686, 5815, 5814], [5686, 5687, 5816], [5686, 5816, 5815], [5687, 5688, 5816], [5688, 5817, 5816], [5688, 5689, 5818], [5688, 5818, 5817], [5689, 5690, 5818], [5690, 5819, 5818], [5690, 5691, 5820], [5690, 5820, 5819], [5691, 5692, 5820], [5692, 5821, 5820], [5692, 5693, 5822], [5692, 5822, 5821], [5693, 5694, 5822], [5694, 5823, 5822], [5694, 5695, 5824], [5694, 5824, 5823], [5695, 5696, 5824], [5696, 5825, 5824], [5696, 5697, 5826], [5696, 5826, 5825], [5697, 5698, 5826], [5698, 5827, 5826], [5698, 5699, 5828], [5698, 5828, 5827], [5699, 5700, 5828], [5700, 5829, 5828], [5700, 5701, 5830], [5700, 5830, 5829], [5701, 5702, 5830], [5702, 5831, 5830], [5702, 5703, 5832], [5702, 5832, 5831], [5703, 5704, 5832], [5704, 5833, 5832], [5704, 5705, 5834], [5704, 5834, 5833], [5705, 5706, 5834], [5706, 5835, 5834], [5706, 5707, 5836], [5706, 5836, 5835], [5707, 5708, 5836], [5708, 5837, 5836], [5708, 5709, 5838], [5708, 5838, 5837], [5709, 5710, 5838], [5710, 5839, 5838], [5710, 5711, 5840], [5710, 5840, 5839], [5711, 5712, 5840], [5712, 5841, 5840], [5712, 5713, 5842], [5712, 5842, 5841], [5713, 5714, 5842], [5714, 5843, 5842], [5714, 5715, 5844], [5714, 5844, 5843], [5715, 5716, 5844], [5716, 5845, 5844], [5716, 5717, 5846], [5716, 5846, 5845], [5717, 5718, 5846], [5718, 5847, 5846], [5718, 5719, 5848], [5718, 5848, 5847], [5719, 5720, 5848], [5720, 5849, 5848], [5720, 5721, 5850], [5720, 5850, 5849], [5721, 5722, 5850], [5722, 5851, 5850], [5722, 5723, 5852], [5722, 5852, 5851], [5723, 5724, 5852], [5724, 5853, 5852], [5724, 5725, 5854], [5724, 5854, 5853], [5725, 5726, 5854], [5726, 5855, 5854], [5726, 5727, 5856], [5726, 5856, 5855], [5727, 5728, 5856], [5728, 5857, 5856], [5728, 5729, 5858], [5728, 5858, 5857], [5729, 5730, 5858], [5730, 5859, 5858], [5730, 5731, 5860], [5730, 5860, 5859], [5731, 5732, 5860], [5732, 5861, 5860], [5732, 5733, 5862], [5732, 5862, 5861], [5733, 5734, 5862], [5734, 5863, 5862], [5734, 5735, 5864], [5734, 5864, 5863], [5735, 5736, 5864], [5736, 5865, 5864], [5736, 5737, 5866], [5736, 5866, 5865], [5737, 5738, 5866], [5738, 5867, 5866], [5738, 5739, 5868], [5738, 5868, 5867], [5739, 5740, 5868], [5740, 5869, 5868], [5740, 5741, 5870], [5740, 5870, 5869], [5741, 5742, 5870], [5742, 5871, 5870], [5742, 5743, 5872], [5742, 5872, 5871], [5743, 5744, 5872], [5744, 5873, 5872], [5744, 5745, 5874], [5744, 5874, 5873], [5745, 5746, 5874], [5746, 5875, 5874], [5746, 5747, 5876], [5746, 5876, 5875], [5747, 5748, 5876], [5748, 5877, 5876], [5748, 5749, 5878], [5748, 5878, 5877], [5749, 5750, 5878], [5750, 5879, 5878], [5750, 5751, 5880], [5750, 5880, 5879], [5751, 5752, 5880], [5752, 5881, 5880], [5752, 5753, 5882], [5752, 5882, 5881], [5753, 5754, 5882], [5754, 5883, 5882], [5754, 5755, 5884], [5754, 5884, 5883], [5755, 5756, 5884], [5756, 5885, 5884], [5756, 5757, 5886], [5756, 5886, 5885], [5757, 5758, 5886], [5758, 5887, 5886], [5758, 5759, 5888], [5758, 5888, 5887], [5759, 5760, 5888], [5760, 5889, 5888], [5760, 5761, 5890], [5760, 5890, 5889], [5761, 5762, 5890], [5762, 5891, 5890], [5762, 5763, 5892], [5762, 5892, 5891], [5763, 5764, 5892], [5764, 5893, 5892], [5764, 5765, 5894], [5764, 5894, 5893], [5765, 5766, 5894], [5766, 5895, 5894], [5766, 5767, 5896], [5766, 5896, 5895], [5767, 5768, 5896], [5768, 5897, 5896], [5768, 5769, 5898], [5768, 5898, 5897], [5769, 5770, 5898], [5770, 5899, 5898], [5770, 5771, 5900], [5770, 5900, 5899], [5771, 5772, 5900], [5772, 5901, 5900], [5772, 5773, 5902], [5772, 5902, 5901], [5773, 5774, 5902], [5774, 5903, 5902], [5774, 5775, 5904], [5774, 5904, 5903], [5775, 5776, 5904], [5776, 5905, 5904], [5776, 5777, 5906], [5776, 5906, 5905], [5777, 5778, 5906], [5778, 5907, 5906], [5778, 5779, 5908], [5778, 5908, 5907], [5779, 5780, 5908], [5780, 5909, 5908], [5780, 5781, 5910], [5780, 5910, 5909], [5781, 5782, 5910], [5782, 5911, 5910], [5782, 5783, 5912], [5782, 5912, 5911], [5783, 5784, 5912], [5784, 5913, 5912], [5784, 5785, 5914], [5784, 5914, 5913], [5785, 5786, 5914], [5786, 5915, 5914], [5786, 5787, 5916], [5786, 5916, 5915], [5787, 5788, 5916], [5788, 5917, 5916], [5788, 5789, 5918], [5788, 5918, 5917], [5789, 5790, 5918], [5790, 5919, 5918], [5790, 5791, 5920], [5790, 5920, 5919], [5791, 5792, 5920], [5792, 5921, 5920], [5792, 5793, 5922], [5792, 5922, 5921], [5793, 5794, 5922], [5794, 5923, 5922], [5794, 5795, 5924], [5794, 5924, 5923], [5795, 5796, 5924], [5796, 5925, 5924], [5796, 5797, 5926], [5796, 5926, 5925], [5797, 5798, 5926], [5798, 5927, 5926], [5798, 5799, 5928], [5798, 5928, 5927], [5799, 5800, 5928], [5800, 5929, 5928], [5800, 5801, 5930], [5800, 5930, 5929], [5801, 5802, 5930], [5802, 5931, 5930], [5802, 5803, 5932], [5802, 5932, 5931], [5803, 5804, 5932], [5804, 5933, 5932], [5805, 5806, 5934], [5806, 5935, 5934], [5806, 5807, 5936], [5806, 5936, 5935], [5807, 5808, 5936], [5808, 5937, 5936], [5808, 5809, 5938], [5808, 5938, 5937], [5809, 5810, 5938], [5810, 5939, 5938], [5810, 5811, 5940], [5810, 5940, 5939], [5811, 5812, 5940], [5812, 5941, 5940], [5812, 5813, 5942], [5812, 5942, 5941], [5813, 5814, 5942], [5814, 5943, 5942], [5814, 5815, 5944], [5814, 5944, 5943], [5815, 5816, 5944], [5816, 5945, 5944], [5816, 5817, 5946], [5816, 5946, 5945], [5817, 5818, 5946], [5818, 5947, 5946], [5818, 5819, 5948], [5818, 5948, 5947], [5819, 5820, 5948], [5820, 5949, 5948], [5820, 5821, 5950], [5820, 5950, 5949], [5821, 5822, 5950], [5822, 5951, 5950], [5822, 5823, 5952], [5822, 5952, 5951], [5823, 5824, 5952], [5824, 5953, 5952], [5824, 5825, 5954], [5824, 5954, 5953], [5825, 5826, 5954], [5826, 5955, 5954], [5826, 5827, 5956], [5826, 5956, 5955], [5827, 5828, 5956], [5828, 5957, 5956], [5828, 5829, 5958], [5828, 5958, 5957], [5829, 5830, 5958], [5830, 5959, 5958], [5830, 5831, 5960], [5830, 5960, 5959], [5831, 5832, 5960], [5832, 5961, 5960], [5832, 5833, 5962], [5832, 5962, 5961], [5833, 5834, 5962], [5834, 5963, 5962], [5834, 5835, 5964], [5834, 5964, 5963], [5835, 5836, 5964], [5836, 5965, 5964], [5836, 5837, 5966], [5836, 5966, 5965], [5837, 5838, 5966], [5838, 5967, 5966], [5838, 5839, 5968], [5838, 5968, 5967], [5839, 5840, 5968], [5840, 5969, 5968], [5840, 5841, 5970], [5840, 5970, 5969], [5841, 5842, 5970], [5842, 5971, 5970], [5842, 5843, 5972], [5842, 5972, 5971], [5843, 5844, 5972], [5844, 5973, 5972], [5844, 5845, 5974], [5844, 5974, 5973], [5845, 5846, 5974], [5846, 5975, 5974], [5846, 5847, 5976], [5846, 5976, 5975], [5847, 5848, 5976], [5848, 5977, 5976], [5848, 5849, 5978], [5848, 5978, 5977], [5849, 5850, 5978], [5850, 5979, 5978], [5850, 5851, 5980], [5850, 5980, 5979], [5851, 5852, 5980], [5852, 5981, 5980], [5852, 5853, 5982], [5852, 5982, 5981], [5853, 5854, 5982], [5854, 5983, 5982], [5854, 5855, 5984], [5854, 5984, 5983], [5855, 5856, 5984], [5856, 5985, 5984], [5856, 5857, 5986], [5856, 5986, 5985], [5857, 5858, 5986], [5858, 5987, 5986], [5858, 5859, 5988], [5858, 5988, 5987], [5859, 5860, 5988], [5860, 5989, 5988], [5860, 5861, 5990], [5860, 5990, 5989], [5861, 5862, 5990], [5862, 5991, 5990], [5862, 5863, 5992], [5862, 5992, 5991], [5863, 5864, 5992], [5864, 5993, 5992], [5864, 5865, 5994], [5864, 5994, 5993], [5865, 5866, 5994], [5866, 5995, 5994], [5866, 5867, 5996], [5866, 5996, 5995], [5867, 5868, 5996], [5868, 5997, 5996], [5868, 5869, 5998], [5868, 5998, 5997], [5869, 5870, 5998], [5870, 5999, 5998], [5870, 5871, 6000], [5870, 6000, 5999], [5871, 5872, 6000], [5872, 6001, 6000], [5872, 5873, 6002], [5872, 6002, 6001], [5873, 5874, 6002], [5874, 6003, 6002], [5874, 5875, 6004], [5874, 6004, 6003], [5875, 5876, 6004], [5876, 6005, 6004], [5876, 5877, 6006], [5876, 6006, 6005], [5877, 5878, 6006], [5878, 6007, 6006], [5878, 5879, 6008], [5878, 6008, 6007], [5879, 5880, 6008], [5880, 6009, 6008], [5880, 5881, 6010], [5880, 6010, 6009], [5881, 5882, 6010], [5882, 6011, 6010], [5882, 5883, 6012], [5882, 6012, 6011], [5883, 5884, 6012], [5884, 6013, 6012], [5884, 5885, 6014], [5884, 6014, 6013], [5885, 5886, 6014], [5886, 6015, 6014], [5886, 5887, 6016], [5886, 6016, 6015], [5887, 5888, 6016], [5888, 6017, 6016], [5888, 5889, 6018], [5888, 6018, 6017], [5889, 5890, 6018], [5890, 6019, 6018], [5890, 5891, 6020], [5890, 6020, 6019], [5891, 5892, 6020], [5892, 6021, 6020], [5892, 5893, 6022], [5892, 6022, 6021], [5893, 5894, 6022], [5894, 6023, 6022], [5894, 5895, 6024], [5894, 6024, 6023], [5895, 5896, 6024], [5896, 6025, 6024], [5896, 5897, 6026], [5896, 6026, 6025], [5897, 5898, 6026], [5898, 6027, 6026], [5898, 5899, 6028], [5898, 6028, 6027], [5899, 5900, 6028], [5900, 6029, 6028], [5900, 5901, 6030], [5900, 6030, 6029], [5901, 5902, 6030], [5902, 6031, 6030], [5902, 5903, 6032], [5902, 6032, 6031], [5903, 5904, 6032], [5904, 6033, 6032], [5904, 5905, 6034], [5904, 6034, 6033], [5905, 5906, 6034], [5906, 6035, 6034], [5906, 5907, 6036], [5906, 6036, 6035], [5907, 5908, 6036], [5908, 6037, 6036], [5908, 5909, 6038], [5908, 6038, 6037], [5909, 5910, 6038], [5910, 6039, 6038], [5910, 5911, 6040], [5910, 6040, 6039], [5911, 5912, 6040], [5912, 6041, 6040], [5912, 5913, 6042], [5912, 6042, 6041], [5913, 5914, 6042], [5914, 6043, 6042], [5914, 5915, 6044], [5914, 6044, 6043], [5915, 5916, 6044], [5916, 6045, 6044], [5916, 5917, 6046], [5916, 6046, 6045], [5917, 5918, 6046], [5918, 6047, 6046], [5918, 5919, 6048], [5918, 6048, 6047], [5919, 5920, 6048], [5920, 6049, 6048], [5920, 5921, 6050], [5920, 6050, 6049], [5921, 5922, 6050], [5922, 6051, 6050], [5922, 5923, 6052], [5922, 6052, 6051], [5923, 5924, 6052], [5924, 6053, 6052], [5924, 5925, 6054], [5924, 6054, 6053], [5925, 5926, 6054], [5926, 6055, 6054], [5926, 5927, 6056], [5926, 6056, 6055], [5927, 5928, 6056], [5928, 6057, 6056], [5928, 5929, 6058], [5928, 6058, 6057], [5929, 5930, 6058], [5930, 6059, 6058], [5930, 5931, 6060], [5930, 6060, 6059], [5931, 5932, 6060], [5932, 6061, 6060], [5932, 5933, 6062], [5932, 6062, 6061], [5934, 5935, 6064], [5934, 6064, 6063], [5935, 5936, 6064], [5936, 6065, 6064], [5936, 5937, 6066], [5936, 6066, 6065], [5937, 5938, 6066], [5938, 6067, 6066], [5938, 5939, 6068], [5938, 6068, 6067], [5939, 5940, 6068], [5940, 6069, 6068], [5940, 5941, 6070], [5940, 6070, 6069], [5941, 5942, 6070], [5942, 6071, 6070], [5942, 5943, 6072], [5942, 6072, 6071], [5943, 5944, 6072], [5944, 6073, 6072], [5944, 5945, 6074], [5944, 6074, 6073], [5945, 5946, 6074], [5946, 6075, 6074], [5946, 5947, 6076], [5946, 6076, 6075], [5947, 5948, 6076], [5948, 6077, 6076], [5948, 5949, 6078], [5948, 6078, 6077], [5949, 5950, 6078], [5950, 6079, 6078], [5950, 5951, 6080], [5950, 6080, 6079], [5951, 5952, 6080], [5952, 6081, 6080], [5952, 5953, 6082], [5952, 6082, 6081], [5953, 5954, 6082], [5954, 6083, 6082], [5954, 5955, 6084], [5954, 6084, 6083], [5955, 5956, 6084], [5956, 6085, 6084], [5956, 5957, 6086], [5956, 6086, 6085], [5957, 5958, 6086], [5958, 6087, 6086], [5958, 5959, 6088], [5958, 6088, 6087], [5959, 5960, 6088], [5960, 6089, 6088], [5960, 5961, 6090], [5960, 6090, 6089], [5961, 5962, 6090], [5962, 6091, 6090], [5962, 5963, 6092], [5962, 6092, 6091], [5963, 5964, 6092], [5964, 6093, 6092], [5964, 5965, 6094], [5964, 6094, 6093], [5965, 5966, 6094], [5966, 6095, 6094], [5966, 5967, 6096], [5966, 6096, 6095], [5967, 5968, 6096], [5968, 6097, 6096], [5968, 5969, 6098], [5968, 6098, 6097], [5969, 5970, 6098], [5970, 6099, 6098], [5970, 5971, 6100], [5970, 6100, 6099], [5971, 5972, 6100], [5972, 6101, 6100], [5972, 5973, 6102], [5972, 6102, 6101], [5973, 5974, 6102], [5974, 6103, 6102], [5974, 5975, 6104], [5974, 6104, 6103], [5975, 5976, 6104], [5976, 6105, 6104], [5976, 5977, 6106], [5976, 6106, 6105], [5977, 5978, 6106], [5978, 6107, 6106], [5978, 5979, 6108], [5978, 6108, 6107], [5979, 5980, 6108], [5980, 6109, 6108], [5980, 5981, 6110], [5980, 6110, 6109], [5981, 5982, 6110], [5982, 6111, 6110], [5982, 5983, 6112], [5982, 6112, 6111], [5983, 5984, 6112], [5984, 6113, 6112], [5984, 5985, 6114], [5984, 6114, 6113], [5985, 5986, 6114], [5986, 6115, 6114], [5986, 5987, 6116], [5986, 6116, 6115], [5987, 5988, 6116], [5988, 6117, 6116], [5988, 5989, 6118], [5988, 6118, 6117], [5989, 5990, 6118], [5990, 6119, 6118], [5990, 5991, 6120], [5990, 6120, 6119], [5991, 5992, 6120], [5992, 6121, 6120], [5992, 5993, 6122], [5992, 6122, 6121], [5993, 5994, 6122], [5994, 6123, 6122], [5994, 5995, 6124], [5994, 6124, 6123], [5995, 5996, 6124], [5996, 6125, 6124], [5996, 5997, 6126], [5996, 6126, 6125], [5997, 5998, 6126], [5998, 6127, 6126], [5998, 5999, 6128], [5998, 6128, 6127], [5999, 6000, 6128], [6000, 6129, 6128], [6000, 6001, 6130], [6000, 6130, 6129], [6001, 6002, 6130], [6002, 6131, 6130], [6002, 6003, 6132], [6002, 6132, 6131], [6003, 6004, 6132], [6004, 6133, 6132], [6004, 6005, 6134], [6004, 6134, 6133], [6005, 6006, 6134], [6006, 6135, 6134], [6006, 6007, 6136], [6006, 6136, 6135], [6007, 6008, 6136], [6008, 6137, 6136], [6008, 6009, 6138], [6008, 6138, 6137], [6009, 6010, 6138], [6010, 6139, 6138], [6010, 6011, 6140], [6010, 6140, 6139], [6011, 6012, 6140], [6012, 6141, 6140], [6012, 6013, 6142], [6012, 6142, 6141], [6013, 6014, 6142], [6014, 6143, 6142], [6014, 6015, 6144], [6014, 6144, 6143], [6015, 6016, 6144], [6016, 6145, 6144], [6016, 6017, 6146], [6016, 6146, 6145], [6017, 6018, 6146], [6018, 6147, 6146], [6018, 6019, 6148], [6018, 6148, 6147], [6019, 6020, 6148], [6020, 6149, 6148], [6020, 6021, 6150], [6020, 6150, 6149], [6021, 6022, 6150], [6022, 6151, 6150], [6022, 6023, 6152], [6022, 6152, 6151], [6023, 6024, 6152], [6024, 6153, 6152], [6024, 6025, 6154], [6024, 6154, 6153], [6025, 6026, 6154], [6026, 6155, 6154], [6026, 6027, 6156], [6026, 6156, 6155], [6027, 6028, 6156], [6028, 6157, 6156], [6028, 6029, 6158], [6028, 6158, 6157], [6029, 6030, 6158], [6030, 6159, 6158], [6030, 6031, 6160], [6030, 6160, 6159], [6031, 6032, 6160], [6032, 6161, 6160], [6032, 6033, 6162], [6032, 6162, 6161], [6033, 6034, 6162], [6034, 6163, 6162], [6036, 6037, 6164], [6037, 6038, 6164], [6038, 6165, 6164], [6038, 6039, 6166], [6038, 6166, 6165], [6039, 6040, 6166], [6040, 6167, 6166], [6040, 6041, 6168], [6040, 6168, 6167], [6041, 6042, 6168], [6042, 6169, 6168], [6042, 6043, 6170], [6042, 6170, 6169], [6043, 6044, 6170], [6044, 6171, 6170], [6044, 6045, 6172], [6044, 6172, 6171], [6045, 6046, 6172], [6046, 6173, 6172], [6046, 6047, 6174], [6046, 6174, 6173], [6047, 6048, 6174], [6048, 6175, 6174], [6048, 6049, 6176], [6048, 6176, 6175], [6049, 6050, 6176], [6050, 6177, 6176], [6050, 6051, 6178], [6050, 6178, 6177], [6051, 6052, 6178], [6052, 6179, 6178], [6052, 6053, 6180], [6052, 6180, 6179], [6053, 6054, 6180], [6054, 6181, 6180], [6054, 6055, 6182], [6054, 6182, 6181], [6055, 6056, 6182], [6056, 6183, 6182], [6056, 6057, 6184], [6056, 6184, 6183], [6057, 6058, 6184], [6058, 6185, 6184], [6058, 6059, 6186], [6058, 6186, 6185], [6059, 6060, 6186], [6060, 6187, 6186], [6060, 6061, 6188], [6060, 6188, 6187], [6061, 6062, 6188], [6062, 6189, 6188], [6063, 6064, 6190], [6064, 6191, 6190], [6064, 6065, 6192], [6064, 6192, 6191], [6065, 6066, 6192], [6066, 6193, 6192], [6066, 6067, 6194], [6066, 6194, 6193], [6067, 6068, 6194], [6068, 6195, 6194], [6068, 6069, 6196], [6068, 6196, 6195], [6069, 6070, 6196], [6070, 6197, 6196], [6070, 6071, 6198], [6070, 6198, 6197], [6071, 6072, 6198], [6072, 6199, 6198], [6072, 6073, 6200], [6072, 6200, 6199], [6073, 6074, 6200], [6074, 6201, 6200], [6074, 6075, 6202], [6074, 6202, 6201], [6075, 6076, 6202], [6076, 6203, 6202], [6076, 6077, 6204], [6076, 6204, 6203], [6077, 6078, 6204], [6078, 6205, 6204], [6078, 6079, 6206], [6078, 6206, 6205], [6079, 6080, 6206], [6080, 6207, 6206], [6080, 6081, 6208], [6080, 6208, 6207], [6081, 6082, 6208], [6082, 6209, 6208], [6082, 6083, 6210], [6082, 6210, 6209], [6083, 6084, 6210], [6084, 6211, 6210], [6084, 6085, 6212], [6084, 6212, 6211], [6085, 6086, 6212], [6086, 6213, 6212], [6086, 6087, 6214], [6086, 6214, 6213], [6087, 6088, 6214], [6088, 6215, 6214], [6088, 6089, 6216], [6088, 6216, 6215], [6089, 6090, 6216], [6090, 6217, 6216], [6090, 6091, 6218], [6090, 6218, 6217], [6091, 6092, 6218], [6092, 6219, 6218], [6092, 6093, 6220], [6092, 6220, 6219], [6093, 6094, 6220], [6094, 6221, 6220], [6094, 6095, 6222], [6094, 6222, 6221], [6095, 6096, 6222], [6096, 6223, 6222], [6096, 6097, 6224], [6096, 6224, 6223], [6097, 6098, 6224], [6098, 6225, 6224], [6098, 6099, 6226], [6098, 6226, 6225], [6099, 6100, 6226], [6100, 6227, 6226], [6100, 6101, 6228], [6100, 6228, 6227], [6101, 6102, 6228], [6102, 6229, 6228], [6102, 6103, 6230], [6102, 6230, 6229], [6103, 6104, 6230], [6104, 6231, 6230], [6104, 6105, 6232], [6104, 6232, 6231], [6105, 6106, 6232], [6106, 6233, 6232], [6106, 6107, 6234], [6106, 6234, 6233], [6107, 6108, 6234], [6108, 6235, 6234], [6108, 6109, 6236], [6108, 6236, 6235], [6109, 6110, 6236], [6110, 6237, 6236], [6110, 6111, 6238], [6110, 6238, 6237], [6111, 6112, 6238], [6112, 6239, 6238], [6112, 6113, 6240], [6112, 6240, 6239], [6113, 6114, 6240], [6114, 6241, 6240], [6114, 6115, 6242], [6114, 6242, 6241], [6115, 6116, 6242], [6116, 6243, 6242], [6116, 6117, 6244], [6116, 6244, 6243], [6117, 6118, 6244], [6118, 6245, 6244], [6118, 6119, 6246], [6118, 6246, 6245], [6119, 6120, 6246], [6120, 6247, 6246], [6120, 6121, 6248], [6120, 6248, 6247], [6121, 6122, 6248], [6122, 6249, 6248], [6122, 6123, 6250], [6122, 6250, 6249], [6123, 6124, 6250], [6124, 6251, 6250], [6124, 6125, 6252], [6124, 6252, 6251], [6125, 6126, 6252], [6126, 6253, 6252], [6126, 6127, 6254], [6126, 6254, 6253], [6127, 6128, 6254], [6128, 6255, 6254], [6128, 6129, 6256], [6128, 6256, 6255], [6129, 6130, 6256], [6130, 6257, 6256], [6130, 6131, 6258], [6130, 6258, 6257], [6131, 6132, 6258], [6132, 6259, 6258], [6132, 6133, 6260], [6132, 6260, 6259], [6133, 6134, 6260], [6134, 6261, 6260], [6134, 6135, 6262], [6134, 6262, 6261], [6135, 6136, 6262], [6136, 6263, 6262], [6136, 6137, 6264], [6136, 6264, 6263], [6137, 6138, 6264], [6138, 6265, 6264], [6138, 6139, 6266], [6138, 6266, 6265], [6139, 6140, 6266], [6140, 6267, 6266], [6140, 6141, 6268], [6140, 6268, 6267], [6141, 6142, 6268], [6142, 6269, 6268], [6142, 6143, 6270], [6142, 6270, 6269], [6143, 6144, 6270], [6144, 6271, 6270], [6144, 6145, 6272], [6144, 6272, 6271], [6145, 6146, 6272], [6146, 6273, 6272], [6146, 6147, 6274], [6146, 6274, 6273], [6147, 6148, 6274], [6148, 6275, 6274], [6148, 6149, 6276], [6148, 6276, 6275], [6149, 6150, 6276], [6150, 6277, 6276], [6150, 6151, 6278], [6150, 6278, 6277], [6151, 6152, 6278], [6152, 6279, 6278], [6152, 6153, 6280], [6152, 6280, 6279], [6153, 6154, 6280], [6154, 6281, 6280], [6154, 6155, 6282], [6154, 6282, 6281], [6155, 6156, 6282], [6156, 6283, 6282], [6156, 6157, 6284], [6156, 6284, 6283], [6157, 6158, 6284], [6158, 6285, 6284], [6158, 6159, 6286], [6158, 6286, 6285], [6159, 6160, 6286], [6160, 6287, 6286], [6160, 6161, 6288], [6160, 6288, 6287], [6161, 6162, 6288], [6162, 6289, 6288], [6162, 6163, 6290], [6162, 6290, 6289], [6166, 6167, 6292], [6166, 6292, 6291], [6167, 6168, 6292], [6168, 6293, 6292], [6168, 6169, 6294], [6168, 6294, 6293], [6169, 6170, 6294], [6170, 6295, 6294], [6170, 6171, 6296], [6170, 6296, 6295], [6171, 6172, 6296], [6172, 6297, 6296], [6172, 6173, 6298], [6172, 6298, 6297], [6173, 6174, 6298], [6174, 6299, 6298], [6174, 6175, 6300], [6174, 6300, 6299], [6175, 6176, 6300], [6176, 6301, 6300], [6176, 6177, 6302], [6176, 6302, 6301], [6177, 6178, 6302], [6178, 6303, 6302], [6178, 6179, 6304], [6178, 6304, 6303], [6179, 6180, 6304], [6180, 6305, 6304], [6180, 6181, 6306], [6180, 6306, 6305], [6181, 6182, 6306], [6182, 6307, 6306], [6182, 6183, 6308], [6182, 6308, 6307], [6183, 6184, 6308], [6184, 6309, 6308], [6184, 6185, 6310], [6184, 6310, 6309], [6185, 6186, 6310], [6186, 6311, 6310], [6186, 6187, 6312], [6186, 6312, 6311], [6187, 6188, 6312], [6188, 6313, 6312], [6188, 6189, 6314], [6188, 6314, 6313], [6190, 6191, 6316], [6190, 6316, 6315], [6191, 6192, 6316], [6192, 6317, 6316], [6192, 6193, 6318], [6192, 6318, 6317], [6193, 6194, 6318], [6194, 6319, 6318], [6194, 6195, 6320], [6194, 6320, 6319], [6195, 6196, 6320], [6196, 6321, 6320], [6196, 6197, 6322], [6196, 6322, 6321], [6197, 6198, 6322], [6198, 6323, 6322], [6198, 6199, 6324], [6198, 6324, 6323], [6199, 6200, 6324], [6200, 6325, 6324], [6200, 6201, 6326], [6200, 6326, 6325], [6201, 6202, 6326], [6202, 6327, 6326], [6202, 6203, 6328], [6202, 6328, 6327], [6203, 6204, 6328], [6204, 6329, 6328], [6204, 6205, 6330], [6204, 6330, 6329], [6205, 6206, 6330], [6206, 6331, 6330], [6206, 6207, 6332], [6206, 6332, 6331], [6207, 6208, 6332], [6208, 6333, 6332], [6208, 6209, 6334], [6208, 6334, 6333], [6209, 6210, 6334], [6210, 6335, 6334], [6210, 6211, 6336], [6210, 6336, 6335], [6211, 6212, 6336], [6212, 6337, 6336], [6212, 6213, 6338], [6212, 6338, 6337], [6213, 6214, 6338], [6214, 6339, 6338], [6214, 6215, 6340], [6214, 6340, 6339], [6215, 6216, 6340], [6216, 6341, 6340], [6216, 6217, 6342], [6216, 6342, 6341], [6217, 6218, 6342], [6218, 6343, 6342], [6218, 6219, 6344], [6218, 6344, 6343], [6219, 6220, 6344], [6220, 6345, 6344], [6220, 6221, 6346], [6220, 6346, 6345], [6221, 6222, 6346], [6222, 6347, 6346], [6222, 6223, 6348], [6222, 6348, 6347], [6223, 6224, 6348], [6224, 6349, 6348], [6224, 6225, 6350], [6224, 6350, 6349], [6225, 6226, 6350], [6226, 6351, 6350], [6226, 6227, 6352], [6226, 6352, 6351], [6227, 6228, 6352], [6228, 6353, 6352], [6228, 6229, 6354], [6228, 6354, 6353], [6229, 6230, 6354], [6230, 6355, 6354], [6230, 6231, 6356], [6230, 6356, 6355], [6231, 6232, 6356], [6232, 6357, 6356], [6232, 6233, 6358], [6232, 6358, 6357], [6233, 6234, 6358], [6234, 6359, 6358], [6234, 6235, 6360], [6234, 6360, 6359], [6235, 6236, 6360], [6236, 6361, 6360], [6236, 6237, 6362], [6236, 6362, 6361], [6237, 6238, 6362], [6238, 6363, 6362], [6238, 6239, 6364], [6238, 6364, 6363], [6239, 6240, 6364], [6240, 6365, 6364], [6240, 6241, 6366], [6240, 6366, 6365], [6241, 6242, 6366], [6242, 6367, 6366], [6242, 6243, 6368], [6242, 6368, 6367], [6243, 6244, 6368], [6244, 6369, 6368], [6244, 6245, 6370], [6244, 6370, 6369], [6245, 6246, 6370], [6246, 6371, 6370], [6246, 6247, 6372], [6246, 6372, 6371], [6247, 6248, 6372], [6248, 6373, 6372], [6248, 6249, 6374], [6248, 6374, 6373], [6249, 6250, 6374], [6250, 6375, 6374], [6250, 6251, 6376], [6250, 6376, 6375], [6251, 6252, 6376], [6252, 6377, 6376], [6252, 6253, 6378], [6252, 6378, 6377], [6253, 6254, 6378], [6254, 6379, 6378], [6254, 6255, 6380], [6254, 6380, 6379], [6255, 6256, 6380], [6256, 6381, 6380], [6256, 6257, 6382], [6256, 6382, 6381], [6257, 6258, 6382], [6258, 6383, 6382], [6258, 6259, 6384], [6258, 6384, 6383], [6259, 6260, 6384], [6260, 6385, 6384], [6260, 6261, 6386], [6260, 6386, 6385], [6261, 6262, 6386], [6262, 6387, 6386], [6262, 6263, 6388], [6262, 6388, 6387], [6263, 6264, 6388], [6264, 6389, 6388], [6264, 6265, 6390], [6264, 6390, 6389], [6265, 6266, 6390], [6266, 6391, 6390], [6266, 6267, 6392], [6266, 6392, 6391], [6267, 6268, 6392], [6268, 6393, 6392], [6268, 6269, 6394], [6268, 6394, 6393], [6269, 6270, 6394], [6270, 6395, 6394], [6270, 6271, 6396], [6270, 6396, 6395], [6271, 6272, 6396], [6272, 6397, 6396], [6272, 6273, 6398], [6272, 6398, 6397], [6273, 6274, 6398], [6274, 6399, 6398], [6274, 6275, 6400], [6274, 6400, 6399], [6275, 6276, 6400], [6276, 6401, 6400], [6276, 6277, 6402], [6276, 6402, 6401], [6277, 6278, 6402], [6278, 6403, 6402], [6278, 6279, 6404], [6278, 6404, 6403], [6279, 6280, 6404], [6280, 6405, 6404], [6280, 6281, 6406], [6280, 6406, 6405], [6281, 6282, 6406], [6282, 6407, 6406], [6282, 6283, 6408], [6282, 6408, 6407], [6283, 6284, 6408], [6284, 6409, 6408], [6284, 6285, 6410], [6284, 6410, 6409], [6285, 6286, 6410], [6286, 6411, 6410], [6286, 6287, 6412], [6286, 6412, 6411], [6287, 6288, 6412], [6288, 6413, 6412], [6291, 6292, 6414], [6292, 6415, 6414], [6292, 6293, 6416], [6292, 6416, 6415], [6293, 6294, 6416], [6294, 6417, 6416], [6294, 6295, 6418], [6294, 6418, 6417], [6295, 6296, 6418], [6296, 6419, 6418], [6296, 6297, 6420], [6296, 6420, 6419], [6297, 6298, 6420], [6298, 6421, 6420], [6298, 6299, 6422], [6298, 6422, 6421], [6299, 6300, 6422], [6300, 6423, 6422], [6300, 6301, 6424], [6300, 6424, 6423], [6301, 6302, 6424], [6302, 6425, 6424], [6302, 6303, 6426], [6302, 6426, 6425], [6303, 6304, 6426], [6304, 6427, 6426], [6304, 6305, 6428], [6304, 6428, 6427], [6305, 6306, 6428], [6306, 6429, 6428], [6306, 6307, 6430], [6306, 6430, 6429], [6307, 6308, 6430], [6308, 6431, 6430], [6308, 6309, 6432], [6308, 6432, 6431], [6309, 6310, 6432], [6310, 6433, 6432], [6310, 6311, 6434], [6310, 6434, 6433], [6311, 6312, 6434], [6312, 6435, 6434], [6312, 6313, 6436], [6312, 6436, 6435], [6313, 6314, 6436], [6314, 6437, 6436], [6315, 6316, 6438], [6316, 6439, 6438], [6316, 6317, 6440], [6316, 6440, 6439], [6317, 6318, 6440], [6318, 6441, 6440], [6318, 6319, 6442], [6318, 6442, 6441], [6319, 6320, 6442], [6320, 6443, 6442], [6320, 6321, 6444], [6320, 6444, 6443], [6321, 6322, 6444], [6322, 6445, 6444], [6322, 6323, 6446], [6322, 6446, 6445], [6323, 6324, 6446], [6324, 6447, 6446], [6324, 6325, 6448], [6324, 6448, 6447], [6325, 6326, 6448], [6326, 6449, 6448], [6326, 6327, 6450], [6326, 6450, 6449], [6327, 6328, 6450], [6328, 6451, 6450], [6328, 6329, 6452], [6328, 6452, 6451], [6329, 6330, 6452], [6330, 6453, 6452], [6330, 6331, 6454], [6330, 6454, 6453], [6331, 6332, 6454], [6332, 6455, 6454], [6332, 6333, 6456], [6332, 6456, 6455], [6333, 6334, 6456], [6334, 6457, 6456], [6334, 6335, 6458], [6334, 6458, 6457], [6335, 6336, 6458], [6336, 6459, 6458], [6336, 6337, 6460], [6336, 6460, 6459], [6337, 6338, 6460], [6338, 6461, 6460], [6338, 6339, 6462], [6338, 6462, 6461], [6339, 6340, 6462], [6340, 6463, 6462], [6340, 6341, 6464], [6340, 6464, 6463], [6341, 6342, 6464], [6342, 6465, 6464], [6342, 6343, 6466], [6342, 6466, 6465], [6343, 6344, 6466], [6344, 6467, 6466], [6344, 6345, 6468], [6344, 6468, 6467], [6345, 6346, 6468], [6346, 6469, 6468], [6346, 6347, 6470], [6346, 6470, 6469], [6347, 6348, 6470], [6348, 6471, 6470], [6348, 6349, 6472], [6348, 6472, 6471], [6349, 6350, 6472], [6350, 6473, 6472], [6350, 6351, 6474], [6350, 6474, 6473], [6351, 6352, 6474], [6352, 6475, 6474], [6352, 6353, 6476], [6352, 6476, 6475], [6353, 6354, 6476], [6354, 6477, 6476], [6354, 6355, 6478], [6354, 6478, 6477], [6355, 6356, 6478], [6356, 6479, 6478], [6356, 6357, 6480], [6356, 6480, 6479], [6357, 6358, 6480], [6358, 6481, 6480], [6358, 6359, 6482], [6358, 6482, 6481], [6359, 6360, 6482], [6360, 6483, 6482], [6360, 6361, 6484], [6360, 6484, 6483], [6361, 6362, 6484], [6362, 6485, 6484], [6362, 6363, 6486], [6362, 6486, 6485], [6363, 6364, 6486], [6364, 6487, 6486], [6364, 6365, 6488], [6364, 6488, 6487], [6365, 6366, 6488], [6366, 6489, 6488], [6366, 6367, 6490], [6366, 6490, 6489], [6367, 6368, 6490], [6368, 6491, 6490], [6368, 6369, 6492], [6368, 6492, 6491], [6369, 6370, 6492], [6370, 6493, 6492], [6370, 6371, 6494], [6370, 6494, 6493], [6371, 6372, 6494], [6372, 6495, 6494], [6372, 6373, 6496], [6372, 6496, 6495], [6373, 6374, 6496], [6374, 6497, 6496], [6374, 6375, 6498], [6374, 6498, 6497], [6375, 6376, 6498], [6376, 6499, 6498], [6376, 6377, 6500], [6376, 6500, 6499], [6377, 6378, 6500], [6378, 6501, 6500], [6378, 6379, 6502], [6378, 6502, 6501], [6379, 6380, 6502], [6380, 6503, 6502], [6380, 6381, 6504], [6380, 6504, 6503], [6381, 6382, 6504], [6382, 6505, 6504], [6382, 6383, 6506], [6382, 6506, 6505], [6383, 6384, 6506], [6384, 6507, 6506], [6384, 6385, 6508], [6384, 6508, 6507], [6385, 6386, 6508], [6386, 6509, 6508], [6386, 6387, 6510], [6386, 6510, 6509], [6387, 6388, 6510], [6388, 6511, 6510], [6388, 6389, 6512], [6388, 6512, 6511], [6389, 6390, 6512], [6390, 6513, 6512], [6390, 6391, 6514], [6390, 6514, 6513], [6391, 6392, 6514], [6392, 6515, 6514], [6392, 6393, 6516], [6392, 6516, 6515], [6393, 6394, 6516], [6394, 6517, 6516], [6394, 6395, 6518], [6394, 6518, 6517], [6395, 6396, 6518], [6396, 6519, 6518], [6396, 6397, 6520], [6396, 6520, 6519], [6397, 6398, 6520], [6398, 6521, 6520], [6398, 6399, 6522], [6398, 6522, 6521], [6399, 6400, 6522], [6400, 6523, 6522], [6400, 6401, 6524], [6400, 6524, 6523], [6401, 6402, 6524], [6402, 6525, 6524], [6402, 6403, 6526], [6402, 6526, 6525], [6403, 6404, 6526], [6404, 6527, 6526], [6404, 6405, 6528], [6404, 6528, 6527], [6405, 6406, 6528], [6406, 6529, 6528], [6406, 6407, 6530], [6406, 6530, 6529], [6407, 6408, 6530], [6408, 6531, 6530], [6408, 6409, 6532], [6408, 6532, 6531], [6409, 6410, 6532], [6410, 6533, 6532], [6410, 6411, 6534], [6410, 6534, 6533], [6411, 6412, 6534], [6412, 6535, 6534], [6412, 6413, 6536], [6412, 6536, 6535], [6416, 6417, 6537], [6417, 6418, 6537], [6418, 6538, 6537], [6418, 6419, 6539], [6418, 6539, 6538], [6419, 6420, 6539], [6420, 6540, 6539], [6420, 6421, 6541], [6420, 6541, 6540], [6421, 6422, 6541], [6422, 6542, 6541], [6422, 6423, 6543], [6422, 6543, 6542], [6423, 6424, 6543], [6424, 6544, 6543], [6424, 6425, 6545], [6424, 6545, 6544], [6425, 6426, 6545], [6426, 6546, 6545], [6426, 6427, 6547], [6426, 6547, 6546], [6427, 6428, 6547], [6428, 6548, 6547], [6428, 6429, 6549], [6428, 6549, 6548], [6429, 6430, 6549], [6430, 6550, 6549], [6430, 6431, 6551], [6430, 6551, 6550], [6431, 6432, 6551], [6432, 6552, 6551], [6432, 6433, 6553], [6432, 6553, 6552], [6433, 6434, 6553], [6434, 6554, 6553], [6434, 6435, 6555], [6434, 6555, 6554], [6435, 6436, 6555], [6436, 6556, 6555], [6436, 6437, 6557], [6436, 6557, 6556], [6438, 6439, 6559], [6438, 6559, 6558], [6439, 6440, 6559], [6440, 6560, 6559], [6440, 6441, 6561], [6440, 6561, 6560], [6441, 6442, 6561], [6442, 6562, 6561], [6442, 6443, 6563], [6442, 6563, 6562], [6443, 6444, 6563], [6444, 6564, 6563], [6444, 6445, 6565], [6444, 6565, 6564], [6445, 6446, 6565], [6446, 6566, 6565], [6446, 6447, 6567], [6446, 6567, 6566], [6447, 6448, 6567], [6448, 6568, 6567], [6448, 6449, 6569], [6448, 6569, 6568], [6449, 6450, 6569], [6450, 6570, 6569], [6450, 6451, 6571], [6450, 6571, 6570], [6451, 6452, 6571], [6452, 6572, 6571], [6452, 6453, 6573], [6452, 6573, 6572], [6453, 6454, 6573], [6454, 6574, 6573], [6454, 6455, 6575], [6454, 6575, 6574], [6455, 6456, 6575], [6456, 6576, 6575], [6456, 6457, 6577], [6456, 6577, 6576], [6457, 6458, 6577], [6458, 6578, 6577], [6458, 6459, 6579], [6458, 6579, 6578], [6459, 6460, 6579], [6460, 6580, 6579], [6460, 6461, 6581], [6460, 6581, 6580], [6461, 6462, 6581], [6462, 6582, 6581], [6462, 6463, 6583], [6462, 6583, 6582], [6463, 6464, 6583], [6464, 6584, 6583], [6464, 6465, 6585], [6464, 6585, 6584], [6465, 6466, 6585], [6466, 6586, 6585], [6466, 6467, 6587], [6466, 6587, 6586], [6467, 6468, 6587], [6468, 6588, 6587], [6468, 6469, 6589], [6468, 6589, 6588], [6469, 6470, 6589], [6470, 6590, 6589], [6470, 6471, 6591], [6470, 6591, 6590], [6471, 6472, 6591], [6472, 6592, 6591], [6472, 6473, 6593], [6472, 6593, 6592], [6473, 6474, 6593], [6474, 6594, 6593], [6474, 6475, 6595], [6474, 6595, 6594], [6475, 6476, 6595], [6476, 6596, 6595], [6476, 6477, 6597], [6476, 6597, 6596], [6477, 6478, 6597], [6478, 6598, 6597], [6478, 6479, 6599], [6478, 6599, 6598], [6479, 6480, 6599], [6480, 6600, 6599], [6480, 6481, 6601], [6480, 6601, 6600], [6481, 6482, 6601], [6482, 6602, 6601], [6482, 6483, 6603], [6482, 6603, 6602], [6483, 6484, 6603], [6484, 6604, 6603], [6484, 6485, 6605], [6484, 6605, 6604], [6485, 6486, 6605], [6486, 6606, 6605], [6486, 6487, 6607], [6486, 6607, 6606], [6487, 6488, 6607], [6488, 6608, 6607], [6488, 6489, 6609], [6488, 6609, 6608], [6489, 6490, 6609], [6490, 6610, 6609], [6490, 6491, 6611], [6490, 6611, 6610], [6491, 6492, 6611], [6492, 6612, 6611], [6492, 6493, 6613], [6492, 6613, 6612], [6493, 6494, 6613], [6494, 6614, 6613], [6494, 6495, 6615], [6494, 6615, 6614], [6495, 6496, 6615], [6496, 6616, 6615], [6496, 6497, 6617], [6496, 6617, 6616], [6497, 6498, 6617], [6498, 6618, 6617], [6498, 6499, 6619], [6498, 6619, 6618], [6499, 6500, 6619], [6500, 6620, 6619], [6500, 6501, 6621], [6500, 6621, 6620], [6501, 6502, 6621], [6502, 6622, 6621], [6502, 6503, 6623], [6502, 6623, 6622], [6503, 6504, 6623], [6504, 6624, 6623], [6504, 6505, 6625], [6504, 6625, 6624], [6505, 6506, 6625], [6506, 6626, 6625], [6506, 6507, 6627], [6506, 6627, 6626], [6507, 6508, 6627], [6508, 6628, 6627], [6508, 6509, 6629], [6508, 6629, 6628], [6509, 6510, 6629], [6510, 6630, 6629], [6510, 6511, 6631], [6510, 6631, 6630], [6511, 6512, 6631], [6512, 6632, 6631], [6512, 6513, 6633], [6512, 6633, 6632], [6513, 6514, 6633], [6514, 6634, 6633], [6514, 6515, 6635], [6514, 6635, 6634], [6515, 6516, 6635], [6516, 6636, 6635], [6516, 6517, 6637], [6516, 6637, 6636], [6517, 6518, 6637], [6518, 6638, 6637], [6518, 6519, 6639], [6518, 6639, 6638], [6519, 6520, 6639], [6520, 6640, 6639], [6520, 6521, 6641], [6520, 6641, 6640], [6521, 6522, 6641], [6522, 6642, 6641], [6522, 6523, 6643], [6522, 6643, 6642], [6523, 6524, 6643], [6524, 6644, 6643], [6524, 6525, 6645], [6524, 6645, 6644], [6525, 6526, 6645], [6526, 6646, 6645], [6526, 6527, 6647], [6526, 6647, 6646], [6527, 6528, 6647], [6528, 6648, 6647], [6528, 6529, 6649], [6528, 6649, 6648], [6529, 6530, 6649], [6530, 6650, 6649], [6530, 6531, 6651], [6530, 6651, 6650], [6531, 6532, 6651], [6532, 6652, 6651], [6532, 6533, 6653], [6532, 6653, 6652], [6533, 6534, 6653], [6534, 6654, 6653], [6534, 6535, 6655], [6534, 6655, 6654], [6535, 6536, 6655], [6536, 6656, 6655], [6539, 6540, 6658], [6539, 6658, 6657], [6540, 6541, 6658], [6541, 6659, 6658], [6541, 6542, 6660], [6541, 6660, 6659], [6542, 6543, 6660], [6543, 6661, 6660], [6543, 6544, 6662], [6543, 6662, 6661], [6544, 6545, 6662], [6545, 6663, 6662], [6545, 6546, 6664], [6545, 6664, 6663], [6546, 6547, 6664], [6547, 6665, 6664], [6547, 6548, 6666], [6547, 6666, 6665], [6548, 6549, 6666], [6549, 6667, 6666], [6549, 6550, 6668], [6549, 6668, 6667], [6550, 6551, 6668], [6551, 6669, 6668], [6551, 6552, 6670], [6551, 6670, 6669], [6552, 6553, 6670], [6553, 6671, 6670], [6553, 6554, 6672], [6553, 6672, 6671], [6554, 6555, 6672], [6555, 6673, 6672], [6555, 6556, 6674], [6555, 6674, 6673], [6556, 6557, 6674], [6557, 6675, 6674], [6558, 6559, 6676], [6559, 6677, 6676], [6559, 6560, 6678], [6559, 6678, 6677], [6560, 6561, 6678], [6561, 6679, 6678], [6561, 6562, 6680], [6561, 6680, 6679], [6562, 6563, 6680], [6563, 6681, 6680], [6563, 6564, 6682], [6563, 6682, 6681], [6564, 6565, 6682], [6565, 6683, 6682], [6565, 6566, 6684], [6565, 6684, 6683], [6566, 6567, 6684], [6567, 6685, 6684], [6567, 6568, 6686], [6567, 6686, 6685], [6568, 6569, 6686], [6569, 6687, 6686], [6569, 6570, 6688], [6569, 6688, 6687], [6570, 6571, 6688], [6571, 6689, 6688], [6571, 6572, 6690], [6571, 6690, 6689], [6572, 6573, 6690], [6573, 6691, 6690], [6573, 6574, 6692], [6573, 6692, 6691], [6574, 6575, 6692], [6575, 6693, 6692], [6575, 6576, 6694], [6575, 6694, 6693], [6576, 6577, 6694], [6577, 6695, 6694], [6577, 6578, 6696], [6577, 6696, 6695], [6578, 6579, 6696], [6579, 6697, 6696], [6579, 6580, 6698], [6579, 6698, 6697], [6580, 6581, 6698], [6581, 6699, 6698], [6581, 6582, 6700], [6581, 6700, 6699], [6582, 6583, 6700], [6583, 6701, 6700], [6583, 6584, 6702], [6583, 6702, 6701], [6584, 6585, 6702], [6585, 6703, 6702], [6585, 6586, 6704], [6585, 6704, 6703], [6586, 6587, 6704], [6587, 6705, 6704], [6587, 6588, 6706], [6587, 6706, 6705], [6588, 6589, 6706], [6589, 6707, 6706], [6589, 6590, 6708], [6589, 6708, 6707], [6590, 6591, 6708], [6591, 6709, 6708], [6591, 6592, 6710], [6591, 6710, 6709], [6592, 6593, 6710], [6593, 6711, 6710], [6593, 6594, 6712], [6593, 6712, 6711], [6594, 6595, 6712], [6595, 6713, 6712], [6595, 6596, 6714], [6595, 6714, 6713], [6596, 6597, 6714], [6597, 6715, 6714], [6597, 6598, 6716], [6597, 6716, 6715], [6598, 6599, 6716], [6599, 6717, 6716], [6599, 6600, 6718], [6599, 6718, 6717], [6600, 6601, 6718], [6601, 6719, 6718], [6601, 6602, 6720], [6601, 6720, 6719], [6602, 6603, 6720], [6603, 6721, 6720], [6603, 6604, 6722], [6603, 6722, 6721], [6604, 6605, 6722], [6605, 6723, 6722], [6605, 6606, 6724], [6605, 6724, 6723], [6606, 6607, 6724], [6607, 6725, 6724], [6607, 6608, 6726], [6607, 6726, 6725], [6608, 6609, 6726], [6609, 6727, 6726], [6609, 6610, 6728], [6609, 6728, 6727], [6610, 6611, 6728], [6611, 6729, 6728], [6611, 6612, 6730], [6611, 6730, 6729], [6612, 6613, 6730], [6613, 6731, 6730], [6613, 6614, 6732], [6613, 6732, 6731], [6614, 6615, 6732], [6615, 6733, 6732], [6615, 6616, 6734], [6615, 6734, 6733], [6616, 6617, 6734], [6617, 6735, 6734], [6617, 6618, 6736], [6617, 6736, 6735], [6618, 6619, 6736], [6619, 6737, 6736], [6619, 6620, 6738], [6619, 6738, 6737], [6620, 6621, 6738], [6621, 6739, 6738], [6621, 6622, 6740], [6621, 6740, 6739], [6622, 6623, 6740], [6623, 6741, 6740], [6623, 6624, 6742], [6623, 6742, 6741], [6624, 6625, 6742], [6625, 6743, 6742], [6625, 6626, 6744], [6625, 6744, 6743], [6626, 6627, 6744], [6627, 6745, 6744], [6627, 6628, 6746], [6627, 6746, 6745], [6628, 6629, 6746], [6629, 6747, 6746], [6629, 6630, 6748], [6629, 6748, 6747], [6630, 6631, 6748], [6631, 6749, 6748], [6631, 6632, 6750], [6631, 6750, 6749], [6632, 6633, 6750], [6633, 6751, 6750], [6633, 6634, 6752], [6633, 6752, 6751], [6634, 6635, 6752], [6635, 6753, 6752], [6635, 6636, 6754], [6635, 6754, 6753], [6636, 6637, 6754], [6637, 6755, 6754], [6637, 6638, 6756], [6637, 6756, 6755], [6638, 6639, 6756], [6639, 6757, 6756], [6639, 6640, 6758], [6639, 6758, 6757], [6640, 6641, 6758], [6641, 6759, 6758], [6641, 6642, 6760], [6641, 6760, 6759], [6642, 6643, 6760], [6643, 6761, 6760], [6643, 6644, 6762], [6643, 6762, 6761], [6644, 6645, 6762], [6645, 6763, 6762], [6645, 6646, 6764], [6645, 6764, 6763], [6646, 6647, 6764], [6647, 6765, 6764], [6647, 6648, 6766], [6647, 6766, 6765], [6648, 6649, 6766], [6649, 6767, 6766], [6649, 6650, 6768], [6649, 6768, 6767], [6650, 6651, 6768], [6651, 6769, 6768], [6651, 6652, 6770], [6651, 6770, 6769], [6652, 6653, 6770], [6653, 6771, 6770], [6653, 6654, 6772], [6653, 6772, 6771], [6654, 6655, 6772], [6655, 6773, 6772], [6655, 6656, 6774], [6655, 6774, 6773], [6657, 6658, 6775], [6658, 6776, 6775], [6658, 6659, 6777], [6658, 6777, 6776], [6659, 6660, 6777], [6660, 6778, 6777], [6660, 6661, 6779], [6660, 6779, 6778], [6661, 6662, 6779], [6662, 6780, 6779], [6662, 6663, 6781], [6662, 6781, 6780], [6663, 6664, 6781], [6664, 6782, 6781], [6664, 6665, 6783], [6664, 6783, 6782], [6665, 6666, 6783], [6666, 6784, 6783], [6666, 6667, 6785], [6666, 6785, 6784], [6667, 6668, 6785], [6668, 6786, 6785], [6668, 6669, 6787], [6668, 6787, 6786], [6669, 6670, 6787], [6670, 6788, 6787], [6670, 6671, 6789], [6670, 6789, 6788], [6671, 6672, 6789], [6672, 6790, 6789], [6672, 6673, 6791], [6672, 6791, 6790], [6673, 6674, 6791], [6674, 6792, 6791], [6674, 6675, 6793], [6674, 6793, 6792], [6676, 6677, 6795], [6676, 6795, 6794], [6677, 6678, 6795], [6678, 6796, 6795], [6678, 6679, 6797], [6678, 6797, 6796], [6679, 6680, 6797], [6680, 6798, 6797], [6680, 6681, 6799], [6680, 6799, 6798], [6681, 6682, 6799], [6682, 6800, 6799], [6682, 6683, 6801], [6682, 6801, 6800], [6683, 6684, 6801], [6684, 6802, 6801], [6684, 6685, 6803], [6684, 6803, 6802], [6685, 6686, 6803], [6686, 6804, 6803], [6686, 6687, 6805], [6686, 6805, 6804], [6687, 6688, 6805], [6688, 6806, 6805], [6688, 6689, 6807], [6688, 6807, 6806], [6689, 6690, 6807], [6690, 6808, 6807], [6690, 6691, 6809], [6690, 6809, 6808], [6691, 6692, 6809], [6692, 6810, 6809], [6692, 6693, 6811], [6692, 6811, 6810], [6693, 6694, 6811], [6694, 6812, 6811], [6694, 6695, 6813], [6694, 6813, 6812], [6695, 6696, 6813], [6696, 6814, 6813], [6696, 6697, 6815], [6696, 6815, 6814], [6697, 6698, 6815], [6698, 6816, 6815], [6698, 6699, 6817], [6698, 6817, 6816], [6699, 6700, 6817], [6700, 6818, 6817], [6700, 6701, 6819], [6700, 6819, 6818], [6701, 6702, 6819], [6702, 6820, 6819], [6702, 6703, 6821], [6702, 6821, 6820], [6703, 6704, 6821], [6704, 6822, 6821], [6704, 6705, 6823], [6704, 6823, 6822], [6705, 6706, 6823], [6706, 6824, 6823], [6706, 6707, 6825], [6706, 6825, 6824], [6707, 6708, 6825], [6708, 6826, 6825], [6708, 6709, 6827], [6708, 6827, 6826], [6709, 6710, 6827], [6710, 6828, 6827], [6710, 6711, 6829], [6710, 6829, 6828], [6711, 6712, 6829], [6712, 6830, 6829], [6712, 6713, 6831], [6712, 6831, 6830], [6713, 6714, 6831], [6714, 6832, 6831], [6714, 6715, 6833], [6714, 6833, 6832], [6715, 6716, 6833], [6716, 6834, 6833], [6716, 6717, 6835], [6716, 6835, 6834], [6717, 6718, 6835], [6718, 6836, 6835], [6718, 6719, 6837], [6718, 6837, 6836], [6719, 6720, 6837], [6720, 6838, 6837], [6720, 6721, 6839], [6720, 6839, 6838], [6721, 6722, 6839], [6722, 6840, 6839], [6722, 6723, 6841], [6722, 6841, 6840], [6723, 6724, 6841], [6724, 6842, 6841], [6724, 6725, 6843], [6724, 6843, 6842], [6725, 6726, 6843], [6726, 6844, 6843], [6726, 6727, 6845], [6726, 6845, 6844], [6727, 6728, 6845], [6728, 6846, 6845], [6728, 6729, 6847], [6728, 6847, 6846], [6729, 6730, 6847], [6730, 6848, 6847], [6730, 6731, 6849], [6730, 6849, 6848], [6731, 6732, 6849], [6732, 6850, 6849], [6732, 6733, 6851], [6732, 6851, 6850], [6733, 6734, 6851], [6734, 6852, 6851], [6734, 6735, 6853], [6734, 6853, 6852], [6735, 6736, 6853], [6736, 6854, 6853], [6736, 6737, 6855], [6736, 6855, 6854], [6737, 6738, 6855], [6738, 6856, 6855], [6738, 6739, 6857], [6738, 6857, 6856], [6739, 6740, 6857], [6740, 6858, 6857], [6740, 6741, 6859], [6740, 6859, 6858], [6741, 6742, 6859], [6742, 6860, 6859], [6742, 6743, 6861], [6742, 6861, 6860], [6743, 6744, 6861], [6744, 6862, 6861], [6744, 6745, 6863], [6744, 6863, 6862], [6745, 6746, 6863], [6746, 6864, 6863], [6746, 6747, 6865], [6746, 6865, 6864], [6747, 6748, 6865], [6748, 6866, 6865], [6748, 6749, 6867], [6748, 6867, 6866], [6749, 6750, 6867], [6750, 6868, 6867], [6750, 6751, 6869], [6750, 6869, 6868], [6751, 6752, 6869], [6752, 6870, 6869], [6752, 6753, 6871], [6752, 6871, 6870], [6753, 6754, 6871], [6754, 6872, 6871], [6754, 6755, 6873], [6754, 6873, 6872], [6755, 6756, 6873], [6756, 6874, 6873], [6756, 6757, 6875], [6756, 6875, 6874], [6757, 6758, 6875], [6758, 6876, 6875], [6758, 6759, 6877], [6758, 6877, 6876], [6759, 6760, 6877], [6760, 6878, 6877], [6760, 6761, 6879], [6760, 6879, 6878], [6761, 6762, 6879], [6762, 6880, 6879], [6762, 6763, 6881], [6762, 6881, 6880], [6763, 6764, 6881], [6764, 6882, 6881], [6764, 6765, 6883], [6764, 6883, 6882], [6765, 6766, 6883], [6766, 6884, 6883], [6766, 6767, 6885], [6766, 6885, 6884], [6767, 6768, 6885], [6768, 6886, 6885], [6768, 6769, 6887], [6768, 6887, 6886], [6769, 6770, 6887], [6770, 6888, 6887], [6770, 6771, 6889], [6770, 6889, 6888], [6771, 6772, 6889], [6772, 6890, 6889], [6772, 6773, 6891], [6772, 6891, 6890], [6773, 6774, 6891], [6774, 6892, 6891], [6775, 6776, 6894], [6775, 6894, 6893], [6776, 6777, 6894], [6777, 6895, 6894], [6777, 6778, 6896], [6777, 6896, 6895], [6778, 6779, 6896], [6779, 6897, 6896], [6779, 6780, 6898], [6779, 6898, 6897], [6780, 6781, 6898], [6781, 6899, 6898], [6781, 6782, 6900], [6781, 6900, 6899], [6782, 6783, 6900], [6783, 6901, 6900], [6783, 6784, 6902], [6783, 6902, 6901], [6784, 6785, 6902], [6785, 6903, 6902], [6785, 6786, 6904], [6785, 6904, 6903], [6786, 6787, 6904], [6787, 6905, 6904], [6787, 6788, 6906], [6787, 6906, 6905], [6788, 6789, 6906], [6789, 6907, 6906], [6789, 6790, 6908], [6789, 6908, 6907], [6790, 6791, 6908], [6791, 6909, 6908], [6791, 6792, 6910], [6791, 6910, 6909], [6792, 6793, 6910], [6793, 6911, 6910], [6794, 6795, 6912], [6795, 6913, 6912], [6795, 6796, 6914], [6795, 6914, 6913], [6796, 6797, 6914], [6797, 6915, 6914], [6797, 6798, 6916], [6797, 6916, 6915], [6798, 6799, 6916], [6799, 6917, 6916], [6799, 6800, 6918], [6799, 6918, 6917], [6800, 6801, 6918], [6801, 6919, 6918], [6801, 6802, 6920], [6801, 6920, 6919], [6802, 6803, 6920], [6803, 6921, 6920], [6803, 6804, 6922], [6803, 6922, 6921], [6804, 6805, 6922], [6805, 6923, 6922], [6805, 6806, 6924], [6805, 6924, 6923], [6806, 6807, 6924], [6807, 6925, 6924], [6807, 6808, 6926], [6807, 6926, 6925], [6808, 6809, 6926], [6809, 6927, 6926], [6809, 6810, 6928], [6809, 6928, 6927], [6810, 6811, 6928], [6811, 6929, 6928], [6811, 6812, 6930], [6811, 6930, 6929], [6812, 6813, 6930], [6813, 6931, 6930], [6813, 6814, 6932], [6813, 6932, 6931], [6814, 6815, 6932], [6815, 6933, 6932], [6815, 6816, 6934], [6815, 6934, 6933], [6816, 6817, 6934], [6817, 6935, 6934], [6817, 6818, 6936], [6817, 6936, 6935], [6818, 6819, 6936], [6819, 6937, 6936], [6819, 6820, 6938], [6819, 6938, 6937], [6820, 6821, 6938], [6821, 6939, 6938], [6821, 6822, 6940], [6821, 6940, 6939], [6822, 6823, 6940], [6823, 6941, 6940], [6823, 6824, 6942], [6823, 6942, 6941], [6824, 6825, 6942], [6825, 6943, 6942], [6825, 6826, 6944], [6825, 6944, 6943], [6826, 6827, 6944], [6827, 6945, 6944], [6827, 6828, 6946], [6827, 6946, 6945], [6828, 6829, 6946], [6829, 6947, 6946], [6829, 6830, 6948], [6829, 6948, 6947], [6830, 6831, 6948], [6831, 6949, 6948], [6831, 6832, 6950], [6831, 6950, 6949], [6832, 6833, 6950], [6833, 6951, 6950], [6833, 6834, 6952], [6833, 6952, 6951], [6834, 6835, 6952], [6835, 6953, 6952], [6835, 6836, 6954], [6835, 6954, 6953], [6836, 6837, 6954], [6837, 6955, 6954], [6837, 6838, 6956], [6837, 6956, 6955], [6838, 6839, 6956], [6839, 6957, 6956], [6839, 6840, 6958], [6839, 6958, 6957], [6840, 6841, 6958], [6841, 6959, 6958], [6841, 6842, 6960], [6841, 6960, 6959], [6842, 6843, 6960], [6843, 6961, 6960], [6843, 6844, 6962], [6843, 6962, 6961], [6844, 6845, 6962], [6845, 6963, 6962], [6845, 6846, 6964], [6845, 6964, 6963], [6846, 6847, 6964], [6847, 6965, 6964], [6847, 6848, 6966], [6847, 6966, 6965], [6848, 6849, 6966], [6849, 6967, 6966], [6849, 6850, 6968], [6849, 6968, 6967], [6850, 6851, 6968], [6851, 6969, 6968], [6851, 6852, 6970], [6851, 6970, 6969], [6852, 6853, 6970], [6853, 6971, 6970], [6853, 6854, 6972], [6853, 6972, 6971], [6854, 6855, 6972], [6855, 6973, 6972], [6855, 6856, 6974], [6855, 6974, 6973], [6856, 6857, 6974], [6857, 6975, 6974], [6857, 6858, 6976], [6857, 6976, 6975], [6858, 6859, 6976], [6859, 6977, 6976], [6859, 6860, 6978], [6859, 6978, 6977], [6860, 6861, 6978], [6861, 6979, 6978], [6861, 6862, 6980], [6861, 6980, 6979], [6862, 6863, 6980], [6863, 6981, 6980], [6863, 6864, 6982], [6863, 6982, 6981], [6864, 6865, 6982], [6865, 6983, 6982], [6865, 6866, 6984], [6865, 6984, 6983], [6866, 6867, 6984], [6867, 6985, 6984], [6867, 6868, 6986], [6867, 6986, 6985], [6868, 6869, 6986], [6869, 6987, 6986], [6869, 6870, 6988], [6869, 6988, 6987], [6870, 6871, 6988], [6871, 6989, 6988], [6871, 6872, 6990], [6871, 6990, 6989], [6872, 6873, 6990], [6873, 6991, 6990], [6873, 6874, 6992], [6873, 6992, 6991], [6874, 6875, 6992], [6875, 6993, 6992], [6875, 6876, 6994], [6875, 6994, 6993], [6876, 6877, 6994], [6877, 6995, 6994], [6877, 6878, 6996], [6877, 6996, 6995], [6878, 6879, 6996], [6879, 6997, 6996], [6879, 6880, 6998], [6879, 6998, 6997], [6880, 6881, 6998], [6881, 6999, 6998], [6881, 6882, 7000], [6881, 7000, 6999], [6882, 6883, 7000], [6883, 7001, 7000], [6883, 6884, 7002], [6883, 7002, 7001], [6884, 6885, 7002], [6885, 7003, 7002], [6885, 6886, 7004], [6885, 7004, 7003], [6886, 6887, 7004], [6887, 7005, 7004], [6887, 6888, 7006], [6887, 7006, 7005], [6888, 6889, 7006], [6889, 7007, 7006], [6889, 6890, 7008], [6889, 7008, 7007], [6890, 6891, 7008], [6891, 7009, 7008], [6891, 6892, 7010], [6891, 7010, 7009], [6893, 6894, 7011], [6894, 7012, 7011], [6894, 6895, 7013], [6894, 7013, 7012], [6895, 6896, 7013], [6896, 7014, 7013], [6896, 6897, 7015], [6896, 7015, 7014], [6897, 6898, 7015], [6898, 7016, 7015], [6898, 6899, 7017], [6898, 7017, 7016], [6899, 6900, 7017], [6900, 7018, 7017], [6900, 6901, 7019], [6900, 7019, 7018], [6901, 6902, 7019], [6902, 7020, 7019], [6902, 6903, 7021], [6902, 7021, 7020], [6903, 6904, 7021], [6904, 7022, 7021], [6904, 6905, 7023], [6904, 7023, 7022], [6905, 6906, 7023], [6906, 7024, 7023], [6906, 6907, 7025], [6906, 7025, 7024], [6907, 6908, 7025], [6908, 7026, 7025], [6908, 6909, 7027], [6908, 7027, 7026], [6909, 6910, 7027], [6910, 7028, 7027], [6910, 6911, 7029], [6910, 7029, 7028], [6912, 6913, 7031], [6912, 7031, 7030], [6913, 6914, 7031], [6914, 7032, 7031], [6914, 6915, 7033], [6914, 7033, 7032], [6915, 6916, 7033], [6916, 7034, 7033], [6916, 6917, 7035], [6916, 7035, 7034], [6917, 6918, 7035], [6918, 7036, 7035], [6918, 6919, 7037], [6918, 7037, 7036], [6919, 6920, 7037], [6920, 7038, 7037], [6920, 6921, 7039], [6920, 7039, 7038], [6921, 6922, 7039], [6922, 7040, 7039], [6922, 6923, 7041], [6922, 7041, 7040], [6923, 6924, 7041], [6924, 7042, 7041], [6924, 6925, 7043], [6924, 7043, 7042], [6925, 6926, 7043], [6926, 7044, 7043], [6926, 6927, 7045], [6926, 7045, 7044], [6927, 6928, 7045], [6928, 7046, 7045], [6928, 6929, 7047], [6928, 7047, 7046], [6929, 6930, 7047], [6930, 7048, 7047], [6930, 6931, 7049], [6930, 7049, 7048], [6931, 6932, 7049], [6932, 7050, 7049], [6932, 6933, 7051], [6932, 7051, 7050], [6933, 6934, 7051], [6934, 7052, 7051], [6934, 6935, 7053], [6934, 7053, 7052], [6935, 6936, 7053], [6936, 7054, 7053], [6936, 6937, 7055], [6936, 7055, 7054], [6937, 6938, 7055], [6938, 7056, 7055], [6938, 6939, 7057], [6938, 7057, 7056], [6939, 6940, 7057], [6940, 7058, 7057], [6940, 6941, 7059], [6940, 7059, 7058], [6941, 6942, 7059], [6942, 7060, 7059], [6942, 6943, 7061], [6942, 7061, 7060], [6943, 6944, 7061], [6944, 7062, 7061], [6944, 6945, 7063], [6944, 7063, 7062], [6945, 6946, 7063], [6946, 7064, 7063], [6946, 6947, 7065], [6946, 7065, 7064], [6947, 6948, 7065], [6948, 7066, 7065], [6948, 6949, 7067], [6948, 7067, 7066], [6949, 6950, 7067], [6950, 7068, 7067], [6950, 6951, 7069], [6950, 7069, 7068], [6951, 6952, 7069], [6952, 7070, 7069], [6952, 6953, 7071], [6952, 7071, 7070], [6953, 6954, 7071], [6954, 7072, 7071], [6954, 6955, 7073], [6954, 7073, 7072], [6955, 6956, 7073], [6956, 7074, 7073], [6956, 6957, 7075], [6956, 7075, 7074], [6957, 6958, 7075], [6958, 7076, 7075], [6958, 6959, 7077], [6958, 7077, 7076], [6959, 6960, 7077], [6960, 7078, 7077], [6960, 6961, 7079], [6960, 7079, 7078], [6961, 6962, 7079], [6962, 7080, 7079], [6962, 6963, 7081], [6962, 7081, 7080], [6963, 6964, 7081], [6964, 7082, 7081], [6964, 6965, 7083], [6964, 7083, 7082], [6965, 6966, 7083], [6966, 7084, 7083], [6966, 6967, 7085], [6966, 7085, 7084], [6967, 6968, 7085], [6968, 7086, 7085], [6968, 6969, 7087], [6968, 7087, 7086], [6969, 6970, 7087], [6970, 7088, 7087], [6970, 6971, 7089], [6970, 7089, 7088], [6971, 6972, 7089], [6972, 7090, 7089], [6972, 6973, 7091], [6972, 7091, 7090], [6973, 6974, 7091], [6974, 7092, 7091], [6974, 6975, 7093], [6974, 7093, 7092], [6975, 6976, 7093], [6976, 7094, 7093], [6976, 6977, 7095], [6976, 7095, 7094], [6977, 6978, 7095], [6978, 7096, 7095], [6978, 6979, 7097], [6978, 7097, 7096], [6979, 6980, 7097], [6980, 7098, 7097], [6980, 6981, 7099], [6980, 7099, 7098], [6981, 6982, 7099], [6982, 7100, 7099], [6982, 6983, 7101], [6982, 7101, 7100], [6983, 6984, 7101], [6984, 7102, 7101], [6984, 6985, 7103], [6984, 7103, 7102], [6985, 6986, 7103], [6986, 7104, 7103], [6986, 6987, 7105], [6986, 7105, 7104], [6987, 6988, 7105], [6988, 7106, 7105], [6988, 6989, 7107], [6988, 7107, 7106], [6989, 6990, 7107], [6990, 7108, 7107], [6990, 6991, 7109], [6990, 7109, 7108], [6991, 6992, 7109], [6992, 7110, 7109], [6992, 6993, 7111], [6992, 7111, 7110], [6993, 6994, 7111], [6994, 7112, 7111], [6994, 6995, 7113], [6994, 7113, 7112], [6995, 6996, 7113], [6996, 7114, 7113], [6996, 6997, 7115], [6996, 7115, 7114], [6997, 6998, 7115], [6998, 7116, 7115], [6998, 6999, 7117], [6998, 7117, 7116], [6999, 7000, 7117], [7000, 7118, 7117], [7000, 7001, 7119], [7000, 7119, 7118], [7001, 7002, 7119], [7002, 7120, 7119], [7002, 7003, 7121], [7002, 7121, 7120], [7003, 7004, 7121], [7004, 7122, 7121], [7004, 7005, 7123], [7004, 7123, 7122], [7005, 7006, 7123], [7006, 7124, 7123], [7006, 7007, 7125], [7006, 7125, 7124], [7007, 7008, 7125], [7008, 7126, 7125], [7008, 7009, 7127], [7008, 7127, 7126], [7009, 7010, 7127], [7010, 7128, 7127], [7011, 7012, 7130], [7011, 7130, 7129], [7012, 7013, 7130], [7013, 7131, 7130], [7013, 7014, 7132], [7013, 7132, 7131], [7014, 7015, 7132], [7015, 7133, 7132], [7015, 7016, 7134], [7015, 7134, 7133], [7016, 7017, 7134], [7017, 7135, 7134], [7017, 7018, 7136], [7017, 7136, 7135], [7018, 7019, 7136], [7019, 7137, 7136], [7019, 7020, 7138], [7019, 7138, 7137], [7020, 7021, 7138], [7021, 7139, 7138], [7021, 7022, 7140], [7021, 7140, 7139], [7022, 7023, 7140], [7023, 7141, 7140], [7023, 7024, 7142], [7023, 7142, 7141], [7024, 7025, 7142], [7025, 7143, 7142], [7025, 7026, 7144], [7025, 7144, 7143], [7026, 7027, 7144], [7027, 7145, 7144], [7027, 7028, 7146], [7027, 7146, 7145], [7028, 7029, 7146], [7029, 7147, 7146], [7030, 7031, 7148], [7031, 7149, 7148], [7031, 7032, 7150], [7031, 7150, 7149], [7032, 7033, 7150], [7033, 7151, 7150], [7033, 7034, 7152], [7033, 7152, 7151], [7034, 7035, 7152], [7035, 7153, 7152], [7035, 7036, 7154], [7035, 7154, 7153], [7036, 7037, 7154], [7037, 7155, 7154], [7037, 7038, 7156], [7037, 7156, 7155], [7038, 7039, 7156], [7039, 7157, 7156], [7039, 7040, 7158], [7039, 7158, 7157], [7040, 7041, 7158], [7041, 7159, 7158], [7041, 7042, 7160], [7041, 7160, 7159], [7042, 7043, 7160], [7043, 7161, 7160], [7043, 7044, 7162], [7043, 7162, 7161], [7044, 7045, 7162], [7045, 7163, 7162], [7045, 7046, 7164], [7045, 7164, 7163], [7046, 7047, 7164], [7047, 7165, 7164], [7047, 7048, 7166], [7047, 7166, 7165], [7048, 7049, 7166], [7049, 7167, 7166], [7049, 7050, 7168], [7049, 7168, 7167], [7050, 7051, 7168], [7051, 7169, 7168], [7051, 7052, 7170], [7051, 7170, 7169], [7052, 7053, 7170], [7053, 7171, 7170], [7053, 7054, 7172], [7053, 7172, 7171], [7054, 7055, 7172], [7055, 7173, 7172], [7055, 7056, 7174], [7055, 7174, 7173], [7056, 7057, 7174], [7057, 7175, 7174], [7057, 7058, 7176], [7057, 7176, 7175], [7058, 7059, 7176], [7059, 7177, 7176], [7059, 7060, 7178], [7059, 7178, 7177], [7060, 7061, 7178], [7061, 7179, 7178], [7061, 7062, 7180], [7061, 7180, 7179], [7062, 7063, 7180], [7063, 7181, 7180], [7063, 7064, 7182], [7063, 7182, 7181], [7064, 7065, 7182], [7065, 7183, 7182], [7065, 7066, 7184], [7065, 7184, 7183], [7066, 7067, 7184], [7067, 7185, 7184], [7067, 7068, 7186], [7067, 7186, 7185], [7068, 7069, 7186], [7069, 7187, 7186], [7069, 7070, 7188], [7069, 7188, 7187], [7070, 7071, 7188], [7071, 7189, 7188], [7071, 7072, 7190], [7071, 7190, 7189], [7072, 7073, 7190], [7073, 7191, 7190], [7073, 7074, 7192], [7073, 7192, 7191], [7074, 7075, 7192], [7075, 7193, 7192], [7075, 7076, 7194], [7075, 7194, 7193], [7076, 7077, 7194], [7077, 7195, 7194], [7077, 7078, 7196], [7077, 7196, 7195], [7078, 7079, 7196], [7079, 7197, 7196], [7079, 7080, 7198], [7079, 7198, 7197], [7080, 7081, 7198], [7081, 7199, 7198], [7081, 7082, 7200], [7081, 7200, 7199], [7082, 7083, 7200], [7083, 7201, 7200], [7083, 7084, 7202], [7083, 7202, 7201], [7084, 7085, 7202], [7085, 7203, 7202], [7085, 7086, 7204], [7085, 7204, 7203], [7086, 7087, 7204], [7087, 7205, 7204], [7087, 7088, 7206], [7087, 7206, 7205], [7088, 7089, 7206], [7089, 7207, 7206], [7089, 7090, 7208], [7089, 7208, 7207], [7090, 7091, 7208], [7091, 7209, 7208], [7091, 7092, 7210], [7091, 7210, 7209], [7092, 7093, 7210], [7093, 7211, 7210], [7093, 7094, 7212], [7093, 7212, 7211], [7094, 7095, 7212], [7095, 7213, 7212], [7095, 7096, 7214], [7095, 7214, 7213], [7096, 7097, 7214], [7097, 7215, 7214], [7097, 7098, 7216], [7097, 7216, 7215], [7098, 7099, 7216], [7099, 7217, 7216], [7099, 7100, 7218], [7099, 7218, 7217], [7100, 7101, 7218], [7101, 7219, 7218], [7101, 7102, 7220], [7101, 7220, 7219], [7102, 7103, 7220], [7103, 7221, 7220], [7103, 7104, 7222], [7103, 7222, 7221], [7104, 7105, 7222], [7105, 7223, 7222], [7105, 7106, 7224], [7105, 7224, 7223], [7106, 7107, 7224], [7107, 7225, 7224], [7107, 7108, 7226], [7107, 7226, 7225], [7108, 7109, 7226], [7109, 7227, 7226], [7109, 7110, 7228], [7109, 7228, 7227], [7110, 7111, 7228], [7111, 7229, 7228], [7111, 7112, 7230], [7111, 7230, 7229], [7112, 7113, 7230], [7113, 7231, 7230], [7113, 7114, 7232], [7113, 7232, 7231], [7114, 7115, 7232], [7115, 7233, 7232], [7115, 7116, 7234], [7115, 7234, 7233], [7116, 7117, 7234], [7117, 7235, 7234], [7117, 7118, 7236], [7117, 7236, 7235], [7118, 7119, 7236], [7119, 7237, 7236], [7119, 7120, 7238], [7119, 7238, 7237], [7120, 7121, 7238], [7121, 7239, 7238], [7121, 7122, 7240], [7121, 7240, 7239], [7122, 7123, 7240], [7123, 7241, 7240], [7123, 7124, 7242], [7123, 7242, 7241], [7124, 7125, 7242], [7125, 7243, 7242], [7125, 7126, 7244], [7125, 7244, 7243], [7126, 7127, 7244], [7127, 7245, 7244], [7127, 7128, 7246], [7127, 7246, 7245], [7129, 7130, 7247], [7130, 7248, 7247], [7130, 7131, 7249], [7130, 7249, 7248], [7131, 7132, 7249], [7132, 7250, 7249], [7132, 7133, 7251], [7132, 7251, 7250], [7133, 7134, 7251], [7134, 7252, 7251], [7134, 7135, 7253], [7134, 7253, 7252], [7135, 7136, 7253], [7136, 7254, 7253], [7136, 7137, 7255], [7136, 7255, 7254], [7137, 7138, 7255], [7138, 7256, 7255], [7138, 7139, 7257], [7138, 7257, 7256], [7139, 7140, 7257], [7140, 7258, 7257], [7140, 7141, 7259], [7140, 7259, 7258], [7141, 7142, 7259], [7142, 7260, 7259], [7142, 7143, 7261], [7142, 7261, 7260], [7143, 7144, 7261], [7144, 7262, 7261], [7144, 7145, 7263], [7144, 7263, 7262], [7145, 7146, 7263], [7146, 7264, 7263], [7146, 7147, 7265], [7146, 7265, 7264], [7148, 7149, 7267], [7148, 7267, 7266], [7149, 7150, 7267], [7150, 7268, 7267], [7150, 7151, 7269], [7150, 7269, 7268], [7151, 7152, 7269], [7152, 7270, 7269], [7152, 7153, 7271], [7152, 7271, 7270], [7153, 7154, 7271], [7154, 7272, 7271], [7154, 7155, 7273], [7154, 7273, 7272], [7155, 7156, 7273], [7156, 7274, 7273], [7156, 7157, 7275], [7156, 7275, 7274], [7157, 7158, 7275], [7158, 7276, 7275], [7158, 7159, 7277], [7158, 7277, 7276], [7159, 7160, 7277], [7160, 7278, 7277], [7160, 7161, 7279], [7160, 7279, 7278], [7161, 7162, 7279], [7162, 7280, 7279], [7162, 7163, 7281], [7162, 7281, 7280], [7163, 7164, 7281], [7164, 7282, 7281], [7164, 7165, 7283], [7164, 7283, 7282], [7165, 7166, 7283], [7166, 7284, 7283], [7166, 7167, 7285], [7166, 7285, 7284], [7167, 7168, 7285], [7168, 7286, 7285], [7168, 7169, 7287], [7168, 7287, 7286], [7169, 7170, 7287], [7170, 7288, 7287], [7170, 7171, 7289], [7170, 7289, 7288], [7171, 7172, 7289], [7172, 7290, 7289], [7172, 7173, 7291], [7172, 7291, 7290], [7173, 7174, 7291], [7174, 7292, 7291], [7174, 7175, 7293], [7174, 7293, 7292], [7175, 7176, 7293], [7176, 7294, 7293], [7176, 7177, 7295], [7176, 7295, 7294], [7177, 7178, 7295], [7178, 7296, 7295], [7178, 7179, 7297], [7178, 7297, 7296], [7179, 7180, 7297], [7180, 7298, 7297], [7180, 7181, 7299], [7180, 7299, 7298], [7181, 7182, 7299], [7182, 7300, 7299], [7182, 7183, 7301], [7182, 7301, 7300], [7183, 7184, 7301], [7184, 7302, 7301], [7184, 7185, 7303], [7184, 7303, 7302], [7185, 7186, 7303], [7186, 7304, 7303], [7186, 7187, 7305], [7186, 7305, 7304], [7187, 7188, 7305], [7188, 7306, 7305], [7188, 7189, 7307], [7188, 7307, 7306], [7189, 7190, 7307], [7190, 7308, 7307], [7190, 7191, 7309], [7190, 7309, 7308], [7191, 7192, 7309], [7192, 7310, 7309], [7192, 7193, 7311], [7192, 7311, 7310], [7193, 7194, 7311], [7194, 7312, 7311], [7194, 7195, 7313], [7194, 7313, 7312], [7195, 7196, 7313], [7196, 7314, 7313], [7196, 7197, 7315], [7196, 7315, 7314], [7197, 7198, 7315], [7198, 7316, 7315], [7198, 7199, 7317], [7198, 7317, 7316], [7199, 7200, 7317], [7200, 7318, 7317], [7200, 7201, 7319], [7200, 7319, 7318], [7201, 7202, 7319], [7202, 7320, 7319], [7202, 7203, 7321], [7202, 7321, 7320], [7203, 7204, 7321], [7204, 7322, 7321], [7204, 7205, 7323], [7204, 7323, 7322], [7205, 7206, 7323], [7206, 7324, 7323], [7206, 7207, 7325], [7206, 7325, 7324], [7207, 7208, 7325], [7208, 7326, 7325], [7208, 7209, 7327], [7208, 7327, 7326], [7209, 7210, 7327], [7210, 7328, 7327], [7210, 7211, 7329], [7210, 7329, 7328], [7211, 7212, 7329], [7212, 7330, 7329], [7212, 7213, 7331], [7212, 7331, 7330], [7213, 7214, 7331], [7214, 7332, 7331], [7214, 7215, 7333], [7214, 7333, 7332], [7215, 7216, 7333], [7216, 7334, 7333], [7216, 7217, 7335], [7216, 7335, 7334], [7217, 7218, 7335], [7218, 7336, 7335], [7218, 7219, 7337], [7218, 7337, 7336], [7219, 7220, 7337], [7220, 7338, 7337], [7220, 7221, 7339], [7220, 7339, 7338], [7221, 7222, 7339], [7222, 7340, 7339], [7222, 7223, 7341], [7222, 7341, 7340], [7223, 7224, 7341], [7224, 7342, 7341], [7224, 7225, 7343], [7224, 7343, 7342], [7225, 7226, 7343], [7226, 7344, 7343], [7226, 7227, 7345], [7226, 7345, 7344], [7227, 7228, 7345], [7228, 7346, 7345], [7228, 7229, 7347], [7228, 7347, 7346], [7229, 7230, 7347], [7230, 7348, 7347], [7230, 7231, 7349], [7230, 7349, 7348], [7231, 7232, 7349], [7232, 7350, 7349], [7232, 7233, 7351], [7232, 7351, 7350], [7233, 7234, 7351], [7234, 7352, 7351], [7234, 7235, 7353], [7234, 7353, 7352], [7235, 7236, 7353], [7236, 7354, 7353], [7236, 7237, 7355], [7236, 7355, 7354], [7237, 7238, 7355], [7238, 7356, 7355], [7238, 7239, 7357], [7238, 7357, 7356], [7239, 7240, 7357], [7240, 7358, 7357], [7240, 7241, 7359], [7240, 7359, 7358], [7241, 7242, 7359], [7242, 7360, 7359], [7242, 7243, 7361], [7242, 7361, 7360], [7243, 7244, 7361], [7244, 7362, 7361], [7244, 7245, 7363], [7244, 7363, 7362], [7245, 7246, 7363], [7246, 7364, 7363], [7247, 7248, 7366], [7247, 7366, 7365], [7248, 7249, 7366], [7249, 7367, 7366], [7249, 7250, 7368], [7249, 7368, 7367], [7250, 7251, 7368], [7251, 7369, 7368], [7251, 7252, 7370], [7251, 7370, 7369], [7252, 7253, 7370], [7253, 7371, 7370], [7253, 7254, 7372], [7253, 7372, 7371], [7254, 7255, 7372], [7255, 7373, 7372], [7255, 7256, 7374], [7255, 7374, 7373], [7256, 7257, 7374], [7257, 7375, 7374], [7257, 7258, 7376], [7257, 7376, 7375], [7258, 7259, 7376], [7259, 7377, 7376], [7259, 7260, 7378], [7259, 7378, 7377], [7260, 7261, 7378], [7261, 7379, 7378], [7261, 7262, 7380], [7261, 7380, 7379], [7262, 7263, 7380], [7263, 7381, 7380], [7263, 7264, 7382], [7263, 7382, 7381], [7264, 7265, 7382], [7265, 7383, 7382], [7266, 7267, 7384], [7267, 7385, 7384], [7267, 7268, 7386], [7267, 7386, 7385], [7268, 7269, 7386], [7269, 7387, 7386], [7269, 7270, 7388], [7269, 7388, 7387], [7270, 7271, 7388], [7271, 7389, 7388], [7271, 7272, 7390], [7271, 7390, 7389], [7272, 7273, 7390], [7273, 7391, 7390], [7273, 7274, 7392], [7273, 7392, 7391], [7274, 7275, 7392], [7275, 7393, 7392], [7275, 7276, 7394], [7275, 7394, 7393], [7276, 7277, 7394], [7277, 7395, 7394], [7277, 7278, 7396], [7277, 7396, 7395], [7278, 7279, 7396], [7279, 7397, 7396], [7279, 7280, 7398], [7279, 7398, 7397], [7280, 7281, 7398], [7281, 7399, 7398], [7281, 7282, 7400], [7281, 7400, 7399], [7282, 7283, 7400], [7283, 7401, 7400], [7283, 7284, 7402], [7283, 7402, 7401], [7284, 7285, 7402], [7285, 7403, 7402], [7285, 7286, 7404], [7285, 7404, 7403], [7286, 7287, 7404], [7287, 7405, 7404], [7287, 7288, 7406], [7287, 7406, 7405], [7288, 7289, 7406], [7289, 7407, 7406], [7289, 7290, 7408], [7289, 7408, 7407], [7290, 7291, 7408], [7291, 7409, 7408], [7291, 7292, 7410], [7291, 7410, 7409], [7292, 7293, 7410], [7293, 7411, 7410], [7293, 7294, 7412], [7293, 7412, 7411], [7294, 7295, 7412], [7295, 7413, 7412], [7295, 7296, 7414], [7295, 7414, 7413], [7296, 7297, 7414], [7297, 7415, 7414], [7297, 7298, 7416], [7297, 7416, 7415], [7298, 7299, 7416], [7299, 7417, 7416], [7299, 7300, 7418], [7299, 7418, 7417], [7300, 7301, 7418], [7301, 7419, 7418], [7301, 7302, 7420], [7301, 7420, 7419], [7302, 7303, 7420], [7303, 7421, 7420], [7303, 7304, 7422], [7303, 7422, 7421], [7304, 7305, 7422], [7305, 7423, 7422], [7305, 7306, 7424], [7305, 7424, 7423], [7306, 7307, 7424], [7307, 7425, 7424], [7307, 7308, 7426], [7307, 7426, 7425], [7308, 7309, 7426], [7309, 7427, 7426], [7309, 7310, 7428], [7309, 7428, 7427], [7310, 7311, 7428], [7311, 7429, 7428], [7311, 7312, 7430], [7311, 7430, 7429], [7312, 7313, 7430], [7313, 7431, 7430], [7313, 7314, 7432], [7313, 7432, 7431], [7314, 7315, 7432], [7315, 7433, 7432], [7315, 7316, 7434], [7315, 7434, 7433], [7316, 7317, 7434], [7317, 7435, 7434], [7317, 7318, 7436], [7317, 7436, 7435], [7318, 7319, 7436], [7319, 7437, 7436], [7319, 7320, 7438], [7319, 7438, 7437], [7320, 7321, 7438], [7321, 7439, 7438], [7321, 7322, 7440], [7321, 7440, 7439], [7322, 7323, 7440], [7323, 7441, 7440], [7323, 7324, 7442], [7323, 7442, 7441], [7324, 7325, 7442], [7325, 7443, 7442], [7325, 7326, 7444], [7325, 7444, 7443], [7326, 7327, 7444], [7327, 7445, 7444], [7327, 7328, 7446], [7327, 7446, 7445], [7328, 7329, 7446], [7329, 7447, 7446], [7329, 7330, 7448], [7329, 7448, 7447], [7330, 7331, 7448], [7331, 7449, 7448], [7331, 7332, 7450], [7331, 7450, 7449], [7332, 7333, 7450], [7333, 7451, 7450], [7333, 7334, 7452], [7333, 7452, 7451], [7334, 7335, 7452], [7335, 7453, 7452], [7335, 7336, 7454], [7335, 7454, 7453], [7336, 7337, 7454], [7337, 7455, 7454], [7337, 7338, 7456], [7337, 7456, 7455], [7338, 7339, 7456], [7339, 7457, 7456], [7339, 7340, 7458], [7339, 7458, 7457], [7340, 7341, 7458], [7341, 7459, 7458], [7341, 7342, 7460], [7341, 7460, 7459], [7342, 7343, 7460], [7343, 7461, 7460], [7343, 7344, 7462], [7343, 7462, 7461], [7344, 7345, 7462], [7345, 7463, 7462], [7345, 7346, 7464], [7345, 7464, 7463], [7346, 7347, 7464], [7347, 7465, 7464], [7347, 7348, 7466], [7347, 7466, 7465], [7348, 7349, 7466], [7349, 7467, 7466], [7349, 7350, 7468], [7349, 7468, 7467], [7350, 7351, 7468], [7351, 7469, 7468], [7351, 7352, 7470], [7351, 7470, 7469], [7352, 7353, 7470], [7353, 7471, 7470], [7353, 7354, 7472], [7353, 7472, 7471], [7354, 7355, 7472], [7355, 7473, 7472], [7355, 7356, 7474], [7355, 7474, 7473], [7356, 7357, 7474], [7357, 7475, 7474], [7357, 7358, 7476], [7357, 7476, 7475], [7358, 7359, 7476], [7359, 7477, 7476], [7359, 7360, 7478], [7359, 7478, 7477], [7360, 7361, 7478], [7361, 7479, 7478], [7361, 7362, 7480], [7361, 7480, 7479], [7362, 7363, 7480], [7363, 7481, 7480], [7363, 7364, 7482], [7363, 7482, 7481], [7365, 7366, 7483], [7366, 7484, 7483], [7366, 7367, 7485], [7366, 7485, 7484], [7367, 7368, 7485], [7368, 7486, 7485], [7368, 7369, 7487], [7368, 7487, 7486], [7369, 7370, 7487], [7370, 7488, 7487], [7370, 7371, 7489], [7370, 7489, 7488], [7371, 7372, 7489], [7372, 7490, 7489], [7372, 7373, 7491], [7372, 7491, 7490], [7373, 7374, 7491], [7374, 7492, 7491], [7374, 7375, 7493], [7374, 7493, 7492], [7375, 7376, 7493], [7376, 7494, 7493], [7376, 7377, 7495], [7376, 7495, 7494], [7377, 7378, 7495], [7378, 7496, 7495], [7378, 7379, 7497], [7378, 7497, 7496], [7379, 7380, 7497], [7380, 7498, 7497], [7380, 7381, 7499], [7380, 7499, 7498], [7381, 7382, 7499], [7382, 7500, 7499], [7382, 7383, 7501], [7382, 7501, 7500], [7384, 7385, 7503], [7384, 7503, 7502], [7385, 7386, 7503], [7386, 7504, 7503], [7386, 7387, 7505], [7386, 7505, 7504], [7387, 7388, 7505], [7388, 7506, 7505], [7388, 7389, 7507], [7388, 7507, 7506], [7389, 7390, 7507], [7390, 7508, 7507], [7390, 7391, 7509], [7390, 7509, 7508], [7391, 7392, 7509], [7392, 7510, 7509], [7392, 7393, 7511], [7392, 7511, 7510], [7393, 7394, 7511], [7394, 7512, 7511], [7394, 7395, 7513], [7394, 7513, 7512], [7395, 7396, 7513], [7396, 7514, 7513], [7396, 7397, 7515], [7396, 7515, 7514], [7397, 7398, 7515], [7398, 7516, 7515], [7398, 7399, 7517], [7398, 7517, 7516], [7399, 7400, 7517], [7400, 7518, 7517], [7400, 7401, 7519], [7400, 7519, 7518], [7401, 7402, 7519], [7402, 7520, 7519], [7402, 7403, 7521], [7402, 7521, 7520], [7403, 7404, 7521], [7404, 7522, 7521], [7404, 7405, 7523], [7404, 7523, 7522], [7405, 7406, 7523], [7406, 7524, 7523], [7406, 7407, 7525], [7406, 7525, 7524], [7407, 7408, 7525], [7408, 7526, 7525], [7408, 7409, 7527], [7408, 7527, 7526], [7409, 7410, 7527], [7410, 7528, 7527], [7410, 7411, 7529], [7410, 7529, 7528], [7411, 7412, 7529], [7412, 7530, 7529], [7412, 7413, 7531], [7412, 7531, 7530], [7413, 7414, 7531], [7414, 7532, 7531], [7414, 7415, 7533], [7414, 7533, 7532], [7415, 7416, 7533], [7416, 7534, 7533], [7416, 7417, 7535], [7416, 7535, 7534], [7417, 7418, 7535], [7418, 7536, 7535], [7418, 7419, 7537], [7418, 7537, 7536], [7419, 7420, 7537], [7420, 7538, 7537], [7420, 7421, 7539], [7420, 7539, 7538], [7421, 7422, 7539], [7422, 7540, 7539], [7422, 7423, 7541], [7422, 7541, 7540], [7423, 7424, 7541], [7424, 7542, 7541], [7424, 7425, 7543], [7424, 7543, 7542], [7425, 7426, 7543], [7426, 7544, 7543], [7426, 7427, 7545], [7426, 7545, 7544], [7427, 7428, 7545], [7428, 7546, 7545], [7428, 7429, 7547], [7428, 7547, 7546], [7429, 7430, 7547], [7430, 7548, 7547], [7430, 7431, 7549], [7430, 7549, 7548], [7431, 7432, 7549], [7432, 7550, 7549], [7432, 7433, 7551], [7432, 7551, 7550], [7433, 7434, 7551], [7434, 7552, 7551], [7434, 7435, 7553], [7434, 7553, 7552], [7435, 7436, 7553], [7436, 7554, 7553], [7436, 7437, 7555], [7436, 7555, 7554], [7437, 7438, 7555], [7438, 7556, 7555], [7438, 7439, 7557], [7438, 7557, 7556], [7439, 7440, 7557], [7440, 7558, 7557], [7440, 7441, 7559], [7440, 7559, 7558], [7441, 7442, 7559], [7442, 7560, 7559], [7442, 7443, 7561], [7442, 7561, 7560], [7443, 7444, 7561], [7444, 7562, 7561], [7444, 7445, 7563], [7444, 7563, 7562], [7445, 7446, 7563], [7446, 7564, 7563], [7446, 7447, 7565], [7446, 7565, 7564], [7447, 7448, 7565], [7448, 7566, 7565], [7448, 7449, 7567], [7448, 7567, 7566], [7449, 7450, 7567], [7450, 7568, 7567], [7450, 7451, 7569], [7450, 7569, 7568], [7451, 7452, 7569], [7452, 7570, 7569], [7452, 7453, 7571], [7452, 7571, 7570], [7453, 7454, 7571], [7454, 7572, 7571], [7454, 7455, 7573], [7454, 7573, 7572], [7455, 7456, 7573], [7456, 7574, 7573], [7456, 7457, 7575], [7456, 7575, 7574], [7457, 7458, 7575], [7458, 7576, 7575], [7458, 7459, 7577], [7458, 7577, 7576], [7459, 7460, 7577], [7460, 7578, 7577], [7460, 7461, 7579], [7460, 7579, 7578], [7461, 7462, 7579], [7462, 7580, 7579], [7462, 7463, 7581], [7462, 7581, 7580], [7463, 7464, 7581], [7464, 7582, 7581], [7464, 7465, 7583], [7464, 7583, 7582], [7465, 7466, 7583], [7466, 7584, 7583], [7466, 7467, 7585], [7466, 7585, 7584], [7467, 7468, 7585], [7468, 7586, 7585], [7468, 7469, 7587], [7468, 7587, 7586], [7469, 7470, 7587], [7470, 7588, 7587], [7470, 7471, 7589], [7470, 7589, 7588], [7471, 7472, 7589], [7472, 7590, 7589], [7472, 7473, 7591], [7472, 7591, 7590], [7473, 7474, 7591], [7474, 7592, 7591], [7474, 7475, 7593], [7474, 7593, 7592], [7475, 7476, 7593], [7476, 7594, 7593], [7476, 7477, 7595], [7476, 7595, 7594], [7477, 7478, 7595], [7478, 7596, 7595], [7478, 7479, 7597], [7478, 7597, 7596], [7479, 7480, 7597], [7480, 7598, 7597], [7480, 7481, 7599], [7480, 7599, 7598], [7481, 7482, 7599], [7482, 7600, 7599], [7483, 7484, 7602], [7483, 7602, 7601], [7484, 7485, 7602], [7485, 7603, 7602], [7485, 7486, 7604], [7485, 7604, 7603], [7486, 7487, 7604], [7487, 7605, 7604], [7487, 7488, 7606], [7487, 7606, 7605], [7488, 7489, 7606], [7489, 7607, 7606], [7489, 7490, 7608], [7489, 7608, 7607], [7490, 7491, 7608], [7491, 7609, 7608], [7491, 7492, 7610], [7491, 7610, 7609], [7492, 7493, 7610], [7493, 7611, 7610], [7493, 7494, 7612], [7493, 7612, 7611], [7494, 7495, 7612], [7495, 7613, 7612], [7495, 7496, 7614], [7495, 7614, 7613], [7496, 7497, 7614], [7497, 7615, 7614], [7497, 7498, 7616], [7497, 7616, 7615], [7498, 7499, 7616], [7499, 7617, 7616], [7499, 7500, 7618], [7499, 7618, 7617], [7500, 7501, 7618], [7501, 7619, 7618], [7502, 7503, 7620], [7503, 7621, 7620], [7503, 7504, 7622], [7503, 7622, 7621], [7504, 7505, 7622], [7505, 7623, 7622], [7505, 7506, 7624], [7505, 7624, 7623], [7506, 7507, 7624], [7507, 7625, 7624], [7507, 7508, 7626], [7507, 7626, 7625], [7508, 7509, 7626], [7509, 7627, 7626], [7509, 7510, 7628], [7509, 7628, 7627], [7510, 7511, 7628], [7511, 7629, 7628], [7511, 7512, 7630], [7511, 7630, 7629], [7512, 7513, 7630], [7513, 7631, 7630], [7513, 7514, 7632], [7513, 7632, 7631], [7514, 7515, 7632], [7515, 7633, 7632], [7515, 7516, 7634], [7515, 7634, 7633], [7516, 7517, 7634], [7517, 7635, 7634], [7517, 7518, 7636], [7517, 7636, 7635], [7518, 7519, 7636], [7519, 7637, 7636], [7519, 7520, 7638], [7519, 7638, 7637], [7520, 7521, 7638], [7521, 7639, 7638], [7521, 7522, 7640], [7521, 7640, 7639], [7522, 7523, 7640], [7523, 7641, 7640], [7523, 7524, 7642], [7523, 7642, 7641], [7524, 7525, 7642], [7525, 7643, 7642], [7525, 7526, 7644], [7525, 7644, 7643], [7526, 7527, 7644], [7527, 7645, 7644], [7527, 7528, 7646], [7527, 7646, 7645], [7528, 7529, 7646], [7529, 7647, 7646], [7529, 7530, 7648], [7529, 7648, 7647], [7530, 7531, 7648], [7531, 7649, 7648], [7531, 7532, 7650], [7531, 7650, 7649], [7532, 7533, 7650], [7533, 7651, 7650], [7533, 7534, 7652], [7533, 7652, 7651], [7534, 7535, 7652], [7535, 7653, 7652], [7535, 7536, 7654], [7535, 7654, 7653], [7536, 7537, 7654], [7537, 7655, 7654], [7537, 7538, 7656], [7537, 7656, 7655], [7538, 7539, 7656], [7539, 7657, 7656], [7539, 7540, 7658], [7539, 7658, 7657], [7540, 7541, 7658], [7541, 7659, 7658], [7541, 7542, 7660], [7541, 7660, 7659], [7542, 7543, 7660], [7543, 7661, 7660], [7543, 7544, 7662], [7543, 7662, 7661], [7544, 7545, 7662], [7545, 7663, 7662], [7545, 7546, 7664], [7545, 7664, 7663], [7546, 7547, 7664], [7547, 7665, 7664], [7547, 7548, 7666], [7547, 7666, 7665], [7548, 7549, 7666], [7549, 7667, 7666], [7549, 7550, 7668], [7549, 7668, 7667], [7550, 7551, 7668], [7551, 7669, 7668], [7551, 7552, 7670], [7551, 7670, 7669], [7552, 7553, 7670], [7553, 7671, 7670], [7553, 7554, 7672], [7553, 7672, 7671], [7554, 7555, 7672], [7555, 7673, 7672], [7555, 7556, 7674], [7555, 7674, 7673], [7556, 7557, 7674], [7557, 7675, 7674], [7557, 7558, 7676], [7557, 7676, 7675], [7558, 7559, 7676], [7559, 7677, 7676], [7559, 7560, 7678], [7559, 7678, 7677], [7560, 7561, 7678], [7561, 7679, 7678], [7561, 7562, 7680], [7561, 7680, 7679], [7562, 7563, 7680], [7563, 7681, 7680], [7563, 7564, 7682], [7563, 7682, 7681], [7564, 7565, 7682], [7565, 7683, 7682], [7565, 7566, 7684], [7565, 7684, 7683], [7566, 7567, 7684], [7567, 7685, 7684], [7567, 7568, 7686], [7567, 7686, 7685], [7568, 7569, 7686], [7569, 7687, 7686], [7569, 7570, 7688], [7569, 7688, 7687], [7570, 7571, 7688], [7571, 7689, 7688], [7571, 7572, 7690], [7571, 7690, 7689], [7572, 7573, 7690], [7573, 7691, 7690], [7573, 7574, 7692], [7573, 7692, 7691], [7574, 7575, 7692], [7575, 7693, 7692], [7575, 7576, 7694], [7575, 7694, 7693], [7576, 7577, 7694], [7577, 7695, 7694], [7577, 7578, 7696], [7577, 7696, 7695], [7578, 7579, 7696], [7579, 7697, 7696], [7579, 7580, 7698], [7579, 7698, 7697], [7580, 7581, 7698], [7581, 7699, 7698], [7581, 7582, 7700], [7581, 7700, 7699], [7582, 7583, 7700], [7583, 7701, 7700], [7583, 7584, 7702], [7583, 7702, 7701], [7584, 7585, 7702], [7585, 7703, 7702], [7585, 7586, 7704], [7585, 7704, 7703], [7586, 7587, 7704], [7587, 7705, 7704], [7587, 7588, 7706], [7587, 7706, 7705], [7588, 7589, 7706], [7589, 7707, 7706], [7589, 7590, 7708], [7589, 7708, 7707], [7590, 7591, 7708], [7591, 7709, 7708], [7591, 7592, 7710], [7591, 7710, 7709], [7592, 7593, 7710], [7593, 7711, 7710], [7593, 7594, 7712], [7593, 7712, 7711], [7594, 7595, 7712], [7595, 7713, 7712], [7595, 7596, 7714], [7595, 7714, 7713], [7596, 7597, 7714], [7597, 7715, 7714], [7597, 7598, 7716], [7597, 7716, 7715], [7598, 7599, 7716], [7599, 7717, 7716], [7599, 7600, 7718], [7599, 7718, 7717], [7601, 7602, 7719], [7602, 7720, 7719], [7602, 7603, 7721], [7602, 7721, 7720], [7603, 7604, 7721], [7604, 7722, 7721], [7604, 7605, 7723], [7604, 7723, 7722], [7605, 7606, 7723], [7606, 7724, 7723], [7606, 7607, 7725], [7606, 7725, 7724], [7607, 7608, 7725], [7608, 7726, 7725], [7608, 7609, 7727], [7608, 7727, 7726], [7609, 7610, 7727], [7610, 7728, 7727], [7610, 7611, 7729], [7610, 7729, 7728], [7611, 7612, 7729], [7612, 7730, 7729], [7612, 7613, 7731], [7612, 7731, 7730], [7613, 7614, 7731], [7614, 7732, 7731], [7614, 7615, 7733], [7614, 7733, 7732], [7615, 7616, 7733], [7616, 7734, 7733], [7616, 7617, 7735], [7616, 7735, 7734], [7617, 7618, 7735], [7618, 7736, 7735], [7618, 7619, 7737], [7618, 7737, 7736], [7620, 7621, 7739], [7620, 7739, 7738], [7621, 7622, 7739], [7622, 7740, 7739], [7622, 7623, 7741], [7622, 7741, 7740], [7623, 7624, 7741], [7624, 7742, 7741], [7624, 7625, 7743], [7624, 7743, 7742], [7625, 7626, 7743], [7626, 7744, 7743], [7626, 7627, 7745], [7626, 7745, 7744], [7627, 7628, 7745], [7628, 7746, 7745], [7628, 7629, 7747], [7628, 7747, 7746], [7629, 7630, 7747], [7630, 7748, 7747], [7630, 7631, 7749], [7630, 7749, 7748], [7631, 7632, 7749], [7632, 7750, 7749], [7632, 7633, 7751], [7632, 7751, 7750], [7633, 7634, 7751], [7634, 7752, 7751], [7634, 7635, 7753], [7634, 7753, 7752], [7635, 7636, 7753], [7636, 7754, 7753], [7636, 7637, 7755], [7636, 7755, 7754], [7637, 7638, 7755], [7638, 7756, 7755], [7638, 7639, 7757], [7638, 7757, 7756], [7639, 7640, 7757], [7640, 7758, 7757], [7640, 7641, 7759], [7640, 7759, 7758], [7641, 7642, 7759], [7642, 7760, 7759], [7642, 7643, 7761], [7642, 7761, 7760], [7643, 7644, 7761], [7644, 7762, 7761], [7644, 7645, 7763], [7644, 7763, 7762], [7645, 7646, 7763], [7646, 7764, 7763], [7646, 7647, 7765], [7646, 7765, 7764], [7647, 7648, 7765], [7648, 7766, 7765], [7648, 7649, 7767], [7648, 7767, 7766], [7649, 7650, 7767], [7650, 7768, 7767], [7650, 7651, 7769], [7650, 7769, 7768], [7651, 7652, 7769], [7652, 7770, 7769], [7652, 7653, 7771], [7652, 7771, 7770], [7653, 7654, 7771], [7654, 7772, 7771], [7654, 7655, 7773], [7654, 7773, 7772], [7655, 7656, 7773], [7656, 7774, 7773], [7656, 7657, 7775], [7656, 7775, 7774], [7657, 7658, 7775], [7658, 7776, 7775], [7658, 7659, 7777], [7658, 7777, 7776], [7659, 7660, 7777], [7660, 7778, 7777], [7660, 7661, 7779], [7660, 7779, 7778], [7661, 7662, 7779], [7662, 7780, 7779], [7662, 7663, 7781], [7662, 7781, 7780], [7663, 7664, 7781], [7664, 7782, 7781], [7664, 7665, 7783], [7664, 7783, 7782], [7665, 7666, 7783], [7666, 7784, 7783], [7666, 7667, 7785], [7666, 7785, 7784], [7667, 7668, 7785], [7668, 7786, 7785], [7668, 7669, 7787], [7668, 7787, 7786], [7669, 7670, 7787], [7670, 7788, 7787], [7670, 7671, 7789], [7670, 7789, 7788], [7671, 7672, 7789], [7672, 7790, 7789], [7672, 7673, 7791], [7672, 7791, 7790], [7673, 7674, 7791], [7674, 7792, 7791], [7674, 7675, 7793], [7674, 7793, 7792], [7675, 7676, 7793], [7676, 7794, 7793], [7676, 7677, 7795], [7676, 7795, 7794], [7677, 7678, 7795], [7678, 7796, 7795], [7678, 7679, 7797], [7678, 7797, 7796], [7679, 7680, 7797], [7680, 7798, 7797], [7680, 7681, 7799], [7680, 7799, 7798], [7681, 7682, 7799], [7682, 7800, 7799], [7682, 7683, 7801], [7682, 7801, 7800], [7683, 7684, 7801], [7684, 7802, 7801], [7684, 7685, 7803], [7684, 7803, 7802], [7685, 7686, 7803], [7686, 7804, 7803], [7686, 7687, 7805], [7686, 7805, 7804], [7687, 7688, 7805], [7688, 7806, 7805], [7688, 7689, 7807], [7688, 7807, 7806], [7689, 7690, 7807], [7690, 7808, 7807], [7690, 7691, 7809], [7690, 7809, 7808], [7691, 7692, 7809], [7692, 7810, 7809], [7692, 7693, 7811], [7692, 7811, 7810], [7693, 7694, 7811], [7694, 7812, 7811], [7694, 7695, 7813], [7694, 7813, 7812], [7695, 7696, 7813], [7696, 7814, 7813], [7696, 7697, 7815], [7696, 7815, 7814], [7697, 7698, 7815], [7698, 7816, 7815], [7698, 7699, 7817], [7698, 7817, 7816], [7699, 7700, 7817], [7700, 7818, 7817], [7700, 7701, 7819], [7700, 7819, 7818], [7701, 7702, 7819], [7702, 7820, 7819], [7702, 7703, 7821], [7702, 7821, 7820], [7703, 7704, 7821], [7704, 7822, 7821], [7704, 7705, 7823], [7704, 7823, 7822], [7705, 7706, 7823], [7706, 7824, 7823], [7706, 7707, 7825], [7706, 7825, 7824], [7707, 7708, 7825], [7708, 7826, 7825], [7708, 7709, 7827], [7708, 7827, 7826], [7709, 7710, 7827], [7710, 7828, 7827], [7710, 7711, 7829], [7710, 7829, 7828], [7711, 7712, 7829], [7712, 7830, 7829], [7712, 7713, 7831], [7712, 7831, 7830], [7713, 7714, 7831], [7714, 7832, 7831], [7714, 7715, 7833], [7714, 7833, 7832], [7715, 7716, 7833], [7716, 7834, 7833], [7716, 7717, 7835], [7716, 7835, 7834], [7717, 7718, 7835], [7718, 7836, 7835], [7719, 7720, 7838], [7719, 7838, 7837], [7720, 7721, 7838], [7721, 7839, 7838], [7721, 7722, 7840], [7721, 7840, 7839], [7722, 7723, 7840], [7723, 7841, 7840], [7723, 7724, 7842], [7723, 7842, 7841], [7724, 7725, 7842], [7725, 7843, 7842], [7725, 7726, 7844], [7725, 7844, 7843], [7726, 7727, 7844], [7727, 7845, 7844], [7727, 7728, 7846], [7727, 7846, 7845], [7728, 7729, 7846], [7729, 7847, 7846], [7729, 7730, 7848], [7729, 7848, 7847], [7730, 7731, 7848], [7731, 7849, 7848], [7731, 7732, 7850], [7731, 7850, 7849], [7732, 7733, 7850], [7733, 7851, 7850], [7733, 7734, 7852], [7733, 7852, 7851], [7734, 7735, 7852], [7735, 7853, 7852], [7735, 7736, 7854], [7735, 7854, 7853], [7736, 7737, 7854], [7737, 7855, 7854], [7738, 7739, 7856], [7739, 7857, 7856], [7739, 7740, 7858], [7739, 7858, 7857], [7740, 7741, 7858], [7741, 7859, 7858], [7741, 7742, 7860], [7741, 7860, 7859], [7742, 7743, 7860], [7743, 7861, 7860], [7743, 7744, 7862], [7743, 7862, 7861], [7744, 7745, 7862], [7745, 7863, 7862], [7745, 7746, 7864], [7745, 7864, 7863], [7746, 7747, 7864], [7747, 7865, 7864], [7747, 7748, 7866], [7747, 7866, 7865], [7748, 7749, 7866], [7749, 7867, 7866], [7749, 7750, 7868], [7749, 7868, 7867], [7750, 7751, 7868], [7751, 7869, 7868], [7751, 7752, 7870], [7751, 7870, 7869], [7752, 7753, 7870], [7753, 7871, 7870], [7753, 7754, 7872], [7753, 7872, 7871], [7754, 7755, 7872], [7755, 7873, 7872], [7755, 7756, 7874], [7755, 7874, 7873], [7756, 7757, 7874], [7757, 7875, 7874], [7757, 7758, 7876], [7757, 7876, 7875], [7758, 7759, 7876], [7759, 7877, 7876], [7759, 7760, 7878], [7759, 7878, 7877], [7760, 7761, 7878], [7761, 7879, 7878], [7761, 7762, 7880], [7761, 7880, 7879], [7762, 7763, 7880], [7763, 7881, 7880], [7763, 7764, 7882], [7763, 7882, 7881], [7764, 7765, 7882], [7765, 7883, 7882], [7765, 7766, 7884], [7765, 7884, 7883], [7766, 7767, 7884], [7767, 7885, 7884], [7767, 7768, 7886], [7767, 7886, 7885], [7768, 7769, 7886], [7769, 7887, 7886], [7769, 7770, 7888], [7769, 7888, 7887], [7770, 7771, 7888], [7771, 7889, 7888], [7771, 7772, 7890], [7771, 7890, 7889], [7772, 7773, 7890], [7773, 7891, 7890], [7773, 7774, 7892], [7773, 7892, 7891], [7774, 7775, 7892], [7775, 7893, 7892], [7775, 7776, 7894], [7775, 7894, 7893], [7776, 7777, 7894], [7777, 7895, 7894], [7777, 7778, 7896], [7777, 7896, 7895], [7778, 7779, 7896], [7779, 7897, 7896], [7779, 7780, 7898], [7779, 7898, 7897], [7780, 7781, 7898], [7781, 7899, 7898], [7781, 7782, 7900], [7781, 7900, 7899], [7782, 7783, 7900], [7783, 7901, 7900], [7783, 7784, 7902], [7783, 7902, 7901], [7784, 7785, 7902], [7785, 7903, 7902], [7785, 7786, 7904], [7785, 7904, 7903], [7786, 7787, 7904], [7787, 7905, 7904], [7787, 7788, 7906], [7787, 7906, 7905], [7788, 7789, 7906], [7789, 7907, 7906], [7789, 7790, 7908], [7789, 7908, 7907], [7790, 7791, 7908], [7791, 7909, 7908], [7791, 7792, 7910], [7791, 7910, 7909], [7792, 7793, 7910], [7793, 7911, 7910], [7793, 7794, 7912], [7793, 7912, 7911], [7794, 7795, 7912], [7795, 7913, 7912], [7795, 7796, 7914], [7795, 7914, 7913], [7796, 7797, 7914], [7797, 7915, 7914], [7797, 7798, 7916], [7797, 7916, 7915], [7798, 7799, 7916], [7799, 7917, 7916], [7799, 7800, 7918], [7799, 7918, 7917], [7800, 7801, 7918], [7801, 7919, 7918], [7801, 7802, 7920], [7801, 7920, 7919], [7802, 7803, 7920], [7803, 7921, 7920], [7803, 7804, 7922], [7803, 7922, 7921], [7804, 7805, 7922], [7805, 7923, 7922], [7805, 7806, 7924], [7805, 7924, 7923], [7806, 7807, 7924], [7807, 7925, 7924], [7807, 7808, 7926], [7807, 7926, 7925], [7808, 7809, 7926], [7809, 7927, 7926], [7809, 7810, 7928], [7809, 7928, 7927], [7810, 7811, 7928], [7811, 7929, 7928], [7811, 7812, 7930], [7811, 7930, 7929], [7812, 7813, 7930], [7813, 7931, 7930], [7813, 7814, 7932], [7813, 7932, 7931], [7814, 7815, 7932], [7815, 7933, 7932], [7815, 7816, 7934], [7815, 7934, 7933], [7816, 7817, 7934], [7817, 7935, 7934], [7817, 7818, 7936], [7817, 7936, 7935], [7818, 7819, 7936], [7819, 7937, 7936], [7819, 7820, 7938], [7819, 7938, 7937], [7820, 7821, 7938], [7821, 7939, 7938], [7821, 7822, 7940], [7821, 7940, 7939], [7822, 7823, 7940], [7823, 7941, 7940], [7823, 7824, 7942], [7823, 7942, 7941], [7824, 7825, 7942], [7825, 7943, 7942], [7825, 7826, 7944], [7825, 7944, 7943], [7826, 7827, 7944], [7827, 7945, 7944], [7827, 7828, 7946], [7827, 7946, 7945], [7828, 7829, 7946], [7829, 7947, 7946], [7829, 7830, 7948], [7829, 7948, 7947], [7830, 7831, 7948], [7831, 7949, 7948], [7831, 7832, 7950], [7831, 7950, 7949], [7832, 7833, 7950], [7833, 7951, 7950], [7833, 7834, 7952], [7833, 7952, 7951], [7834, 7835, 7952], [7835, 7953, 7952], [7835, 7836, 7954], [7835, 7954, 7953], [7837, 7838, 7955], [7838, 7956, 7955], [7838, 7839, 7957], [7838, 7957, 7956], [7839, 7840, 7957], [7840, 7958, 7957], [7840, 7841, 7959], [7840, 7959, 7958], [7841, 7842, 7959], [7842, 7960, 7959], [7842, 7843, 7961], [7842, 7961, 7960], [7843, 7844, 7961], [7844, 7962, 7961], [7844, 7845, 7963], [7844, 7963, 7962], [7845, 7846, 7963], [7846, 7964, 7963], [7846, 7847, 7965], [7846, 7965, 7964], [7847, 7848, 7965], [7848, 7966, 7965], [7848, 7849, 7967], [7848, 7967, 7966], [7849, 7850, 7967], [7850, 7968, 7967], [7850, 7851, 7969], [7850, 7969, 7968], [7851, 7852, 7969], [7852, 7970, 7969], [7852, 7853, 7971], [7852, 7971, 7970], [7853, 7854, 7971], [7854, 7972, 7971], [7854, 7855, 7973], [7854, 7973, 7972], [7856, 7857, 7975], [7856, 7975, 7974], [7857, 7858, 7975], [7858, 7976, 7975], [7858, 7859, 7977], [7858, 7977, 7976], [7859, 7860, 7977], [7860, 7978, 7977], [7860, 7861, 7979], [7860, 7979, 7978], [7861, 7862, 7979], [7862, 7980, 7979], [7862, 7863, 7981], [7862, 7981, 7980], [7863, 7864, 7981], [7864, 7982, 7981], [7864, 7865, 7983], [7864, 7983, 7982], [7865, 7866, 7983], [7866, 7984, 7983], [7866, 7867, 7985], [7866, 7985, 7984], [7867, 7868, 7985], [7868, 7986, 7985], [7868, 7869, 7987], [7868, 7987, 7986], [7869, 7870, 7987], [7870, 7988, 7987], [7870, 7871, 7989], [7870, 7989, 7988], [7871, 7872, 7989], [7872, 7990, 7989], [7872, 7873, 7991], [7872, 7991, 7990], [7873, 7874, 7991], [7874, 7992, 7991], [7874, 7875, 7993], [7874, 7993, 7992], [7875, 7876, 7993], [7876, 7994, 7993], [7876, 7877, 7995], [7876, 7995, 7994], [7877, 7878, 7995], [7878, 7996, 7995], [7878, 7879, 7997], [7878, 7997, 7996], [7879, 7880, 7997], [7880, 7998, 7997], [7880, 7881, 7999], [7880, 7999, 7998], [7881, 7882, 7999], [7882, 8000, 7999], [7882, 7883, 8001], [7882, 8001, 8000], [7883, 7884, 8001], [7884, 8002, 8001], [7884, 7885, 8003], [7884, 8003, 8002], [7885, 7886, 8003], [7886, 8004, 8003], [7886, 7887, 8005], [7886, 8005, 8004], [7887, 7888, 8005], [7888, 8006, 8005], [7888, 7889, 8007], [7888, 8007, 8006], [7889, 7890, 8007], [7890, 8008, 8007], [7890, 7891, 8009], [7890, 8009, 8008], [7891, 7892, 8009], [7892, 8010, 8009], [7892, 7893, 8011], [7892, 8011, 8010], [7893, 7894, 8011], [7894, 8012, 8011], [7894, 7895, 8013], [7894, 8013, 8012], [7895, 7896, 8013], [7896, 8014, 8013], [7896, 7897, 8015], [7896, 8015, 8014], [7897, 7898, 8015], [7898, 8016, 8015], [7898, 7899, 8017], [7898, 8017, 8016], [7899, 7900, 8017], [7900, 8018, 8017], [7900, 7901, 8019], [7900, 8019, 8018], [7901, 7902, 8019], [7902, 8020, 8019], [7902, 7903, 8021], [7902, 8021, 8020], [7903, 7904, 8021], [7904, 8022, 8021], [7904, 7905, 8023], [7904, 8023, 8022], [7905, 7906, 8023], [7906, 8024, 8023], [7906, 7907, 8025], [7906, 8025, 8024], [7907, 7908, 8025], [7908, 8026, 8025], [7908, 7909, 8027], [7908, 8027, 8026], [7909, 7910, 8027], [7910, 8028, 8027], [7910, 7911, 8029], [7910, 8029, 8028], [7911, 7912, 8029], [7912, 8030, 8029], [7912, 7913, 8031], [7912, 8031, 8030], [7913, 7914, 8031], [7914, 8032, 8031], [7914, 7915, 8033], [7914, 8033, 8032], [7915, 7916, 8033], [7916, 8034, 8033], [7916, 7917, 8035], [7916, 8035, 8034], [7917, 7918, 8035], [7918, 8036, 8035], [7918, 7919, 8037], [7918, 8037, 8036], [7919, 7920, 8037], [7920, 8038, 8037], [7920, 7921, 8039], [7920, 8039, 8038], [7921, 7922, 8039], [7922, 8040, 8039], [7922, 7923, 8041], [7922, 8041, 8040], [7923, 7924, 8041], [7924, 8042, 8041], [7924, 7925, 8043], [7924, 8043, 8042], [7925, 7926, 8043], [7926, 8044, 8043], [7926, 7927, 8045], [7926, 8045, 8044], [7927, 7928, 8045], [7928, 8046, 8045], [7928, 7929, 8047], [7928, 8047, 8046], [7929, 7930, 8047], [7930, 8048, 8047], [7930, 7931, 8049], [7930, 8049, 8048], [7931, 7932, 8049], [7932, 8050, 8049], [7932, 7933, 8051], [7932, 8051, 8050], [7933, 7934, 8051], [7934, 8052, 8051], [7934, 7935, 8053], [7934, 8053, 8052], [7935, 7936, 8053], [7936, 8054, 8053], [7936, 7937, 8055], [7936, 8055, 8054], [7937, 7938, 8055], [7938, 8056, 8055], [7938, 7939, 8057], [7938, 8057, 8056], [7939, 7940, 8057], [7940, 8058, 8057], [7940, 7941, 8059], [7940, 8059, 8058], [7941, 7942, 8059], [7942, 8060, 8059], [7942, 7943, 8061], [7942, 8061, 8060], [7943, 7944, 8061], [7944, 8062, 8061], [7944, 7945, 8063], [7944, 8063, 8062], [7945, 7946, 8063], [7946, 8064, 8063], [7946, 7947, 8065], [7946, 8065, 8064], [7947, 7948, 8065], [7948, 8066, 8065], [7948, 7949, 8067], [7948, 8067, 8066], [7949, 7950, 8067], [7950, 8068, 8067], [7950, 7951, 8069], [7950, 8069, 8068], [7951, 7952, 8069], [7952, 8070, 8069], [7952, 7953, 8071], [7952, 8071, 8070], [7953, 7954, 8071], [7954, 8072, 8071], [7955, 7956, 8074], [7955, 8074, 8073], [7956, 7957, 8074], [7957, 8075, 8074], [7957, 7958, 8076], [7957, 8076, 8075], [7958, 7959, 8076], [7959, 8077, 8076], [7959, 7960, 8078], [7959, 8078, 8077], [7960, 7961, 8078], [7961, 8079, 8078], [7961, 7962, 8080], [7961, 8080, 8079], [7962, 7963, 8080], [7963, 8081, 8080], [7963, 7964, 8082], [7963, 8082, 8081], [7964, 7965, 8082], [7965, 8083, 8082], [7965, 7966, 8084], [7965, 8084, 8083], [7966, 7967, 8084], [7967, 8085, 8084], [7967, 7968, 8086], [7967, 8086, 8085], [7968, 7969, 8086], [7969, 8087, 8086], [7969, 7970, 8088], [7969, 8088, 8087], [7970, 7971, 8088], [7971, 8089, 8088], [7971, 7972, 8090], [7971, 8090, 8089], [7972, 7973, 8090], [7973, 8091, 8090], [7974, 7975, 8092], [7975, 8093, 8092], [7975, 7976, 8094], [7975, 8094, 8093], [7976, 7977, 8094], [7977, 8095, 8094], [7977, 7978, 8096], [7977, 8096, 8095], [7978, 7979, 8096], [7979, 8097, 8096], [7979, 7980, 8098], [7979, 8098, 8097], [7980, 7981, 8098], [7981, 8099, 8098], [7981, 7982, 8100], [7981, 8100, 8099], [7982, 7983, 8100], [7983, 8101, 8100], [7983, 7984, 8102], [7983, 8102, 8101], [7984, 7985, 8102], [7985, 8103, 8102], [7985, 7986, 8104], [7985, 8104, 8103], [7986, 7987, 8104], [7987, 8105, 8104], [7987, 7988, 8106], [7987, 8106, 8105], [7988, 7989, 8106], [7989, 8107, 8106], [7989, 7990, 8108], [7989, 8108, 8107], [7990, 7991, 8108], [7991, 8109, 8108], [7991, 7992, 8110], [7991, 8110, 8109], [7992, 7993, 8110], [7993, 8111, 8110], [7993, 7994, 8112], [7993, 8112, 8111], [7994, 7995, 8112], [7995, 8113, 8112], [7995, 7996, 8114], [7995, 8114, 8113], [7996, 7997, 8114], [7997, 8115, 8114], [7997, 7998, 8116], [7997, 8116, 8115], [7998, 7999, 8116], [7999, 8117, 8116], [7999, 8000, 8118], [7999, 8118, 8117], [8000, 8001, 8118], [8001, 8119, 8118], [8001, 8002, 8120], [8001, 8120, 8119], [8002, 8003, 8120], [8003, 8121, 8120], [8003, 8004, 8122], [8003, 8122, 8121], [8004, 8005, 8122], [8005, 8123, 8122], [8005, 8006, 8124], [8005, 8124, 8123], [8006, 8007, 8124], [8007, 8125, 8124], [8007, 8008, 8126], [8007, 8126, 8125], [8008, 8009, 8126], [8009, 8127, 8126], [8009, 8010, 8128], [8009, 8128, 8127], [8010, 8011, 8128], [8011, 8129, 8128], [8011, 8012, 8130], [8011, 8130, 8129], [8012, 8013, 8130], [8013, 8131, 8130], [8013, 8014, 8132], [8013, 8132, 8131], [8014, 8015, 8132], [8015, 8133, 8132], [8015, 8016, 8134], [8015, 8134, 8133], [8016, 8017, 8134], [8017, 8135, 8134], [8017, 8018, 8136], [8017, 8136, 8135], [8018, 8019, 8136], [8019, 8137, 8136], [8019, 8020, 8138], [8019, 8138, 8137], [8020, 8021, 8138], [8021, 8139, 8138], [8021, 8022, 8140], [8021, 8140, 8139], [8022, 8023, 8140], [8023, 8141, 8140], [8023, 8024, 8142], [8023, 8142, 8141], [8024, 8025, 8142], [8025, 8143, 8142], [8025, 8026, 8144], [8025, 8144, 8143], [8026, 8027, 8144], [8027, 8145, 8144], [8027, 8028, 8146], [8027, 8146, 8145], [8028, 8029, 8146], [8029, 8147, 8146], [8029, 8030, 8148], [8029, 8148, 8147], [8030, 8031, 8148], [8031, 8149, 8148], [8031, 8032, 8150], [8031, 8150, 8149], [8032, 8033, 8150], [8033, 8151, 8150], [8033, 8034, 8152], [8033, 8152, 8151], [8034, 8035, 8152], [8035, 8153, 8152], [8035, 8036, 8154], [8035, 8154, 8153], [8036, 8037, 8154], [8037, 8155, 8154], [8037, 8038, 8156], [8037, 8156, 8155], [8038, 8039, 8156], [8039, 8157, 8156], [8039, 8040, 8158], [8039, 8158, 8157], [8040, 8041, 8158], [8041, 8159, 8158], [8041, 8042, 8160], [8041, 8160, 8159], [8042, 8043, 8160], [8043, 8161, 8160], [8043, 8044, 8162], [8043, 8162, 8161], [8044, 8045, 8162], [8045, 8163, 8162], [8045, 8046, 8164], [8045, 8164, 8163], [8046, 8047, 8164], [8047, 8165, 8164], [8047, 8048, 8166], [8047, 8166, 8165], [8048, 8049, 8166], [8049, 8167, 8166], [8049, 8050, 8168], [8049, 8168, 8167], [8050, 8051, 8168], [8051, 8169, 8168], [8051, 8052, 8170], [8051, 8170, 8169], [8052, 8053, 8170], [8053, 8171, 8170], [8053, 8054, 8172], [8053, 8172, 8171], [8054, 8055, 8172], [8055, 8173, 8172], [8055, 8056, 8174], [8055, 8174, 8173], [8056, 8057, 8174], [8057, 8175, 8174], [8057, 8058, 8176], [8057, 8176, 8175], [8058, 8059, 8176], [8059, 8177, 8176], [8059, 8060, 8178], [8059, 8178, 8177], [8060, 8061, 8178], [8061, 8179, 8178], [8061, 8062, 8180], [8061, 8180, 8179], [8062, 8063, 8180], [8063, 8181, 8180], [8063, 8064, 8182], [8063, 8182, 8181], [8064, 8065, 8182], [8065, 8183, 8182], [8065, 8066, 8184], [8065, 8184, 8183], [8066, 8067, 8184], [8067, 8185, 8184], [8067, 8068, 8186], [8067, 8186, 8185], [8068, 8069, 8186], [8069, 8187, 8186], [8069, 8070, 8188], [8069, 8188, 8187], [8070, 8071, 8188], [8071, 8189, 8188], [8071, 8072, 8190], [8071, 8190, 8189], [8073, 8074, 8191], [8074, 8192, 8191], [8074, 8075, 8193], [8074, 8193, 8192], [8075, 8076, 8193], [8076, 8194, 8193], [8076, 8077, 8195], [8076, 8195, 8194], [8077, 8078, 8195], [8078, 8196, 8195], [8078, 8079, 8197], [8078, 8197, 8196], [8079, 8080, 8197], [8080, 8198, 8197], [8080, 8081, 8199], [8080, 8199, 8198], [8081, 8082, 8199], [8082, 8200, 8199], [8082, 8083, 8201], [8082, 8201, 8200], [8083, 8084, 8201], [8084, 8202, 8201], [8084, 8085, 8203], [8084, 8203, 8202], [8085, 8086, 8203], [8086, 8204, 8203], [8086, 8087, 8205], [8086, 8205, 8204], [8087, 8088, 8205], [8088, 8206, 8205], [8088, 8089, 8207], [8088, 8207, 8206], [8089, 8090, 8207], [8090, 8208, 8207], [8090, 8091, 8209], [8090, 8209, 8208], [8092, 8093, 8211], [8092, 8211, 8210], [8093, 8094, 8211], [8094, 8212, 8211], [8094, 8095, 8213], [8094, 8213, 8212], [8095, 8096, 8213], [8096, 8214, 8213], [8096, 8097, 8215], [8096, 8215, 8214], [8097, 8098, 8215], [8098, 8216, 8215], [8098, 8099, 8217], [8098, 8217, 8216], [8099, 8100, 8217], [8100, 8218, 8217], [8100, 8101, 8219], [8100, 8219, 8218], [8101, 8102, 8219], [8102, 8220, 8219], [8102, 8103, 8221], [8102, 8221, 8220], [8103, 8104, 8221], [8104, 8222, 8221], [8104, 8105, 8223], [8104, 8223, 8222], [8105, 8106, 8223], [8106, 8224, 8223], [8106, 8107, 8225], [8106, 8225, 8224], [8107, 8108, 8225], [8108, 8226, 8225], [8108, 8109, 8227], [8108, 8227, 8226], [8109, 8110, 8227], [8110, 8228, 8227], [8110, 8111, 8229], [8110, 8229, 8228], [8111, 8112, 8229], [8112, 8230, 8229], [8112, 8113, 8231], [8112, 8231, 8230], [8113, 8114, 8231], [8114, 8232, 8231], [8114, 8115, 8233], [8114, 8233, 8232], [8115, 8116, 8233], [8116, 8234, 8233], [8116, 8117, 8235], [8116, 8235, 8234], [8117, 8118, 8235], [8118, 8236, 8235], [8118, 8119, 8237], [8118, 8237, 8236], [8119, 8120, 8237], [8120, 8238, 8237], [8120, 8121, 8239], [8120, 8239, 8238], [8121, 8122, 8239], [8122, 8240, 8239], [8122, 8123, 8241], [8122, 8241, 8240], [8123, 8124, 8241], [8124, 8242, 8241], [8124, 8125, 8243], [8124, 8243, 8242], [8125, 8126, 8243], [8126, 8244, 8243], [8126, 8127, 8245], [8126, 8245, 8244], [8127, 8128, 8245], [8128, 8246, 8245], [8128, 8129, 8247], [8128, 8247, 8246], [8129, 8130, 8247], [8130, 8248, 8247], [8130, 8131, 8249], [8130, 8249, 8248], [8131, 8132, 8249], [8132, 8250, 8249], [8132, 8133, 8251], [8132, 8251, 8250], [8133, 8134, 8251], [8134, 8252, 8251], [8134, 8135, 8253], [8134, 8253, 8252], [8135, 8136, 8253], [8136, 8254, 8253], [8136, 8137, 8255], [8136, 8255, 8254], [8137, 8138, 8255], [8138, 8256, 8255], [8138, 8139, 8257], [8138, 8257, 8256], [8139, 8140, 8257], [8140, 8258, 8257], [8140, 8141, 8259], [8140, 8259, 8258], [8141, 8142, 8259], [8142, 8260, 8259], [8142, 8143, 8261], [8142, 8261, 8260], [8143, 8144, 8261], [8144, 8262, 8261], [8144, 8145, 8263], [8144, 8263, 8262], [8145, 8146, 8263], [8146, 8264, 8263], [8146, 8147, 8265], [8146, 8265, 8264], [8147, 8148, 8265], [8148, 8266, 8265], [8148, 8149, 8267], [8148, 8267, 8266], [8149, 8150, 8267], [8150, 8268, 8267], [8150, 8151, 8269], [8150, 8269, 8268], [8151, 8152, 8269], [8152, 8270, 8269], [8152, 8153, 8271], [8152, 8271, 8270], [8153, 8154, 8271], [8154, 8272, 8271], [8154, 8155, 8273], [8154, 8273, 8272], [8155, 8156, 8273], [8156, 8274, 8273], [8156, 8157, 8275], [8156, 8275, 8274], [8157, 8158, 8275], [8158, 8276, 8275], [8158, 8159, 8277], [8158, 8277, 8276], [8159, 8160, 8277], [8160, 8278, 8277], [8160, 8161, 8279], [8160, 8279, 8278], [8161, 8162, 8279], [8162, 8280, 8279], [8162, 8163, 8281], [8162, 8281, 8280], [8163, 8164, 8281], [8164, 8282, 8281], [8164, 8165, 8283], [8164, 8283, 8282], [8165, 8166, 8283], [8166, 8284, 8283], [8166, 8167, 8285], [8166, 8285, 8284], [8167, 8168, 8285], [8168, 8286, 8285], [8168, 8169, 8287], [8168, 8287, 8286], [8169, 8170, 8287], [8170, 8288, 8287], [8170, 8171, 8289], [8170, 8289, 8288], [8171, 8172, 8289], [8172, 8290, 8289], [8172, 8173, 8291], [8172, 8291, 8290], [8173, 8174, 8291], [8174, 8292, 8291], [8174, 8175, 8293], [8174, 8293, 8292], [8175, 8176, 8293], [8176, 8294, 8293], [8176, 8177, 8295], [8176, 8295, 8294], [8177, 8178, 8295], [8178, 8296, 8295], [8178, 8179, 8297], [8178, 8297, 8296], [8179, 8180, 8297], [8180, 8298, 8297], [8180, 8181, 8299], [8180, 8299, 8298], [8181, 8182, 8299], [8182, 8300, 8299], [8182, 8183, 8301], [8182, 8301, 8300], [8183, 8184, 8301], [8184, 8302, 8301], [8184, 8185, 8303], [8184, 8303, 8302], [8185, 8186, 8303], [8186, 8304, 8303], [8186, 8187, 8305], [8186, 8305, 8304], [8187, 8188, 8305], [8188, 8306, 8305], [8188, 8189, 8307], [8188, 8307, 8306], [8189, 8190, 8307], [8190, 8308, 8307], [8191, 8192, 8310], [8191, 8310, 8309], [8192, 8193, 8310], [8193, 8311, 8310], [8193, 8194, 8312], [8193, 8312, 8311], [8194, 8195, 8312], [8195, 8313, 8312], [8195, 8196, 8314], [8195, 8314, 8313], [8196, 8197, 8314], [8197, 8315, 8314], [8197, 8198, 8316], [8197, 8316, 8315], [8198, 8199, 8316], [8199, 8317, 8316], [8199, 8200, 8318], [8199, 8318, 8317], [8200, 8201, 8318], [8201, 8319, 8318], [8201, 8202, 8320], [8201, 8320, 8319], [8202, 8203, 8320], [8203, 8321, 8320], [8203, 8204, 8322], [8203, 8322, 8321], [8204, 8205, 8322], [8205, 8323, 8322], [8205, 8206, 8324], [8205, 8324, 8323], [8206, 8207, 8324], [8207, 8325, 8324], [8207, 8208, 8326], [8207, 8326, 8325], [8208, 8209, 8326], [8209, 8327, 8326], [8210, 8211, 8328], [8211, 8329, 8328], [8211, 8212, 8330], [8211, 8330, 8329], [8212, 8213, 8330], [8213, 8331, 8330], [8213, 8214, 8332], [8213, 8332, 8331], [8214, 8215, 8332], [8215, 8333, 8332], [8215, 8216, 8334], [8215, 8334, 8333], [8216, 8217, 8334], [8217, 8335, 8334], [8217, 8218, 8336], [8217, 8336, 8335], [8218, 8219, 8336], [8219, 8337, 8336], [8219, 8220, 8338], [8219, 8338, 8337], [8220, 8221, 8338], [8221, 8339, 8338], [8221, 8222, 8340], [8221, 8340, 8339], [8222, 8223, 8340], [8223, 8341, 8340], [8223, 8224, 8342], [8223, 8342, 8341], [8224, 8225, 8342], [8225, 8343, 8342], [8225, 8226, 8344], [8225, 8344, 8343], [8226, 8227, 8344], [8227, 8345, 8344], [8227, 8228, 8346], [8227, 8346, 8345], [8228, 8229, 8346], [8229, 8347, 8346], [8229, 8230, 8348], [8229, 8348, 8347], [8230, 8231, 8348], [8231, 8349, 8348], [8231, 8232, 8350], [8231, 8350, 8349], [8232, 8233, 8350], [8233, 8351, 8350], [8233, 8234, 8352], [8233, 8352, 8351], [8234, 8235, 8352], [8235, 8353, 8352], [8235, 8236, 8354], [8235, 8354, 8353], [8236, 8237, 8354], [8237, 8355, 8354], [8237, 8238, 8356], [8237, 8356, 8355], [8238, 8239, 8356], [8239, 8357, 8356], [8239, 8240, 8358], [8239, 8358, 8357], [8240, 8241, 8358], [8241, 8359, 8358], [8241, 8242, 8360], [8241, 8360, 8359], [8242, 8243, 8360], [8243, 8361, 8360], [8243, 8244, 8362], [8243, 8362, 8361], [8244, 8245, 8362], [8245, 8363, 8362], [8245, 8246, 8364], [8245, 8364, 8363], [8246, 8247, 8364], [8247, 8365, 8364], [8247, 8248, 8366], [8247, 8366, 8365], [8248, 8249, 8366], [8249, 8367, 8366], [8249, 8250, 8368], [8249, 8368, 8367], [8250, 8251, 8368], [8251, 8369, 8368], [8251, 8252, 8370], [8251, 8370, 8369], [8252, 8253, 8370], [8253, 8371, 8370], [8253, 8254, 8372], [8253, 8372, 8371], [8254, 8255, 8372], [8255, 8373, 8372], [8255, 8256, 8374], [8255, 8374, 8373], [8256, 8257, 8374], [8257, 8375, 8374], [8257, 8258, 8376], [8257, 8376, 8375], [8258, 8259, 8376], [8259, 8377, 8376], [8259, 8260, 8378], [8259, 8378, 8377], [8260, 8261, 8378], [8261, 8379, 8378], [8261, 8262, 8380], [8261, 8380, 8379], [8262, 8263, 8380], [8263, 8381, 8380], [8263, 8264, 8382], [8263, 8382, 8381], [8264, 8265, 8382], [8265, 8383, 8382], [8265, 8266, 8384], [8265, 8384, 8383], [8266, 8267, 8384], [8267, 8385, 8384], [8267, 8268, 8386], [8267, 8386, 8385], [8268, 8269, 8386], [8269, 8387, 8386], [8269, 8270, 8388], [8269, 8388, 8387], [8270, 8271, 8388], [8271, 8389, 8388], [8271, 8272, 8390], [8271, 8390, 8389], [8272, 8273, 8390], [8273, 8391, 8390], [8273, 8274, 8392], [8273, 8392, 8391], [8274, 8275, 8392], [8275, 8393, 8392], [8275, 8276, 8394], [8275, 8394, 8393], [8276, 8277, 8394], [8277, 8395, 8394], [8277, 8278, 8396], [8277, 8396, 8395], [8278, 8279, 8396], [8279, 8397, 8396], [8279, 8280, 8398], [8279, 8398, 8397], [8280, 8281, 8398], [8281, 8399, 8398], [8281, 8282, 8400], [8281, 8400, 8399], [8282, 8283, 8400], [8283, 8401, 8400], [8283, 8284, 8402], [8283, 8402, 8401], [8284, 8285, 8402], [8285, 8403, 8402], [8285, 8286, 8404], [8285, 8404, 8403], [8286, 8287, 8404], [8287, 8405, 8404], [8287, 8288, 8406], [8287, 8406, 8405], [8288, 8289, 8406], [8289, 8407, 8406], [8289, 8290, 8408], [8289, 8408, 8407], [8290, 8291, 8408], [8291, 8409, 8408], [8291, 8292, 8410], [8291, 8410, 8409], [8292, 8293, 8410], [8293, 8411, 8410], [8293, 8294, 8412], [8293, 8412, 8411], [8294, 8295, 8412], [8295, 8413, 8412], [8295, 8296, 8414], [8295, 8414, 8413], [8296, 8297, 8414], [8297, 8415, 8414], [8297, 8298, 8416], [8297, 8416, 8415], [8298, 8299, 8416], [8299, 8417, 8416], [8299, 8300, 8418], [8299, 8418, 8417], [8300, 8301, 8418], [8301, 8419, 8418], [8301, 8302, 8420], [8301, 8420, 8419], [8302, 8303, 8420], [8303, 8421, 8420], [8303, 8304, 8422], [8303, 8422, 8421], [8304, 8305, 8422], [8305, 8423, 8422], [8305, 8306, 8424], [8305, 8424, 8423], [8306, 8307, 8424], [8307, 8425, 8424], [8307, 8308, 8426], [8307, 8426, 8425], [8309, 8310, 8427], [8310, 8428, 8427], [8310, 8311, 8429], [8310, 8429, 8428], [8311, 8312, 8429], [8312, 8430, 8429], [8312, 8313, 8431], [8312, 8431, 8430], [8313, 8314, 8431], [8314, 8432, 8431], [8314, 8315, 8433], [8314, 8433, 8432], [8315, 8316, 8433], [8316, 8434, 8433], [8316, 8317, 8435], [8316, 8435, 8434], [8317, 8318, 8435], [8318, 8436, 8435], [8318, 8319, 8437], [8318, 8437, 8436], [8319, 8320, 8437], [8320, 8438, 8437], [8320, 8321, 8439], [8320, 8439, 8438], [8321, 8322, 8439], [8322, 8440, 8439], [8322, 8323, 8441], [8322, 8441, 8440], [8323, 8324, 8441], [8324, 8442, 8441], [8324, 8325, 8443], [8324, 8443, 8442], [8325, 8326, 8443], [8326, 8444, 8443], [8326, 8327, 8445], [8326, 8445, 8444], [8328, 8329, 8447], [8328, 8447, 8446], [8329, 8330, 8447], [8330, 8448, 8447], [8330, 8331, 8449], [8330, 8449, 8448], [8331, 8332, 8449], [8332, 8450, 8449], [8332, 8333, 8451], [8332, 8451, 8450], [8333, 8334, 8451], [8334, 8452, 8451], [8334, 8335, 8453], [8334, 8453, 8452], [8335, 8336, 8453], [8336, 8454, 8453], [8336, 8337, 8455], [8336, 8455, 8454], [8337, 8338, 8455], [8338, 8456, 8455], [8338, 8339, 8457], [8338, 8457, 8456], [8339, 8340, 8457], [8340, 8458, 8457], [8340, 8341, 8459], [8340, 8459, 8458], [8341, 8342, 8459], [8342, 8460, 8459], [8342, 8343, 8461], [8342, 8461, 8460], [8343, 8344, 8461], [8344, 8462, 8461], [8344, 8345, 8463], [8344, 8463, 8462], [8345, 8346, 8463], [8346, 8464, 8463], [8346, 8347, 8465], [8346, 8465, 8464], [8347, 8348, 8465], [8348, 8466, 8465], [8348, 8349, 8467], [8348, 8467, 8466], [8349, 8350, 8467], [8350, 8468, 8467], [8350, 8351, 8469], [8350, 8469, 8468], [8351, 8352, 8469], [8352, 8470, 8469], [8352, 8353, 8471], [8352, 8471, 8470], [8353, 8354, 8471], [8354, 8472, 8471], [8354, 8355, 8473], [8354, 8473, 8472], [8355, 8356, 8473], [8356, 8474, 8473], [8356, 8357, 8475], [8356, 8475, 8474], [8357, 8358, 8475], [8358, 8476, 8475], [8358, 8359, 8477], [8358, 8477, 8476], [8359, 8360, 8477], [8360, 8478, 8477], [8360, 8361, 8479], [8360, 8479, 8478], [8361, 8362, 8479], [8362, 8480, 8479], [8362, 8363, 8481], [8362, 8481, 8480], [8363, 8364, 8481], [8364, 8482, 8481], [8364, 8365, 8483], [8364, 8483, 8482], [8365, 8366, 8483], [8366, 8484, 8483], [8366, 8367, 8485], [8366, 8485, 8484], [8367, 8368, 8485], [8368, 8486, 8485], [8368, 8369, 8487], [8368, 8487, 8486], [8369, 8370, 8487], [8370, 8488, 8487], [8370, 8371, 8489], [8370, 8489, 8488], [8371, 8372, 8489], [8372, 8490, 8489], [8372, 8373, 8491], [8372, 8491, 8490], [8373, 8374, 8491], [8374, 8492, 8491], [8374, 8375, 8493], [8374, 8493, 8492], [8375, 8376, 8493], [8376, 8494, 8493], [8376, 8377, 8495], [8376, 8495, 8494], [8377, 8378, 8495], [8378, 8496, 8495], [8378, 8379, 8497], [8378, 8497, 8496], [8379, 8380, 8497], [8380, 8498, 8497], [8380, 8381, 8499], [8380, 8499, 8498], [8381, 8382, 8499], [8382, 8500, 8499], [8382, 8383, 8501], [8382, 8501, 8500], [8383, 8384, 8501], [8384, 8502, 8501], [8384, 8385, 8503], [8384, 8503, 8502], [8385, 8386, 8503], [8386, 8504, 8503], [8386, 8387, 8505], [8386, 8505, 8504], [8387, 8388, 8505], [8388, 8506, 8505], [8388, 8389, 8507], [8388, 8507, 8506], [8389, 8390, 8507], [8390, 8508, 8507], [8390, 8391, 8509], [8390, 8509, 8508], [8391, 8392, 8509], [8392, 8510, 8509], [8392, 8393, 8511], [8392, 8511, 8510], [8393, 8394, 8511], [8394, 8512, 8511], [8394, 8395, 8513], [8394, 8513, 8512], [8395, 8396, 8513], [8396, 8514, 8513], [8396, 8397, 8515], [8396, 8515, 8514], [8397, 8398, 8515], [8398, 8516, 8515], [8398, 8399, 8517], [8398, 8517, 8516], [8399, 8400, 8517], [8400, 8518, 8517], [8400, 8401, 8519], [8400, 8519, 8518], [8401, 8402, 8519], [8402, 8520, 8519], [8402, 8403, 8521], [8402, 8521, 8520], [8403, 8404, 8521], [8404, 8522, 8521], [8404, 8405, 8523], [8404, 8523, 8522], [8405, 8406, 8523], [8406, 8524, 8523], [8406, 8407, 8525], [8406, 8525, 8524], [8407, 8408, 8525], [8408, 8526, 8525], [8408, 8409, 8527], [8408, 8527, 8526], [8409, 8410, 8527], [8410, 8528, 8527], [8410, 8411, 8529], [8410, 8529, 8528], [8411, 8412, 8529], [8412, 8530, 8529], [8412, 8413, 8531], [8412, 8531, 8530], [8413, 8414, 8531], [8414, 8532, 8531], [8414, 8415, 8533], [8414, 8533, 8532], [8415, 8416, 8533], [8416, 8534, 8533], [8416, 8417, 8535], [8416, 8535, 8534], [8417, 8418, 8535], [8418, 8536, 8535], [8418, 8419, 8537], [8418, 8537, 8536], [8419, 8420, 8537], [8420, 8538, 8537], [8420, 8421, 8539], [8420, 8539, 8538], [8421, 8422, 8539], [8422, 8540, 8539], [8422, 8423, 8541], [8422, 8541, 8540], [8423, 8424, 8541], [8424, 8542, 8541], [8424, 8425, 8543], [8424, 8543, 8542], [8425, 8426, 8543], [8426, 8544, 8543], [8427, 8428, 8546], [8427, 8546, 8545], [8428, 8429, 8546], [8429, 8547, 8546], [8429, 8430, 8548], [8429, 8548, 8547], [8430, 8431, 8548], [8431, 8549, 8548], [8431, 8432, 8550], [8431, 8550, 8549], [8432, 8433, 8550], [8433, 8551, 8550], [8433, 8434, 8552], [8433, 8552, 8551], [8434, 8435, 8552], [8435, 8553, 8552], [8435, 8436, 8554], [8435, 8554, 8553], [8436, 8437, 8554], [8437, 8555, 8554], [8437, 8438, 8556], [8437, 8556, 8555], [8438, 8439, 8556], [8439, 8557, 8556], [8439, 8440, 8558], [8439, 8558, 8557], [8440, 8441, 8558], [8441, 8559, 8558], [8441, 8442, 8560], [8441, 8560, 8559], [8442, 8443, 8560], [8443, 8561, 8560], [8443, 8444, 8562], [8443, 8562, 8561], [8444, 8445, 8562], [8445, 8563, 8562], [8446, 8447, 8564], [8447, 8565, 8564], [8447, 8448, 8566], [8447, 8566, 8565], [8448, 8449, 8566], [8449, 8567, 8566], [8449, 8450, 8568], [8449, 8568, 8567], [8450, 8451, 8568], [8451, 8569, 8568], [8451, 8452, 8570], [8451, 8570, 8569], [8452, 8453, 8570], [8453, 8571, 8570], [8453, 8454, 8572], [8453, 8572, 8571], [8454, 8455, 8572], [8455, 8573, 8572], [8455, 8456, 8574], [8455, 8574, 8573], [8456, 8457, 8574], [8457, 8575, 8574], [8457, 8458, 8576], [8457, 8576, 8575], [8458, 8459, 8576], [8459, 8577, 8576], [8459, 8460, 8578], [8459, 8578, 8577], [8460, 8461, 8578], [8461, 8579, 8578], [8461, 8462, 8580], [8461, 8580, 8579], [8462, 8463, 8580], [8463, 8581, 8580], [8463, 8464, 8582], [8463, 8582, 8581], [8464, 8465, 8582], [8465, 8583, 8582], [8465, 8466, 8584], [8465, 8584, 8583], [8466, 8467, 8584], [8467, 8585, 8584], [8467, 8468, 8586], [8467, 8586, 8585], [8468, 8469, 8586], [8469, 8587, 8586], [8469, 8470, 8588], [8469, 8588, 8587], [8470, 8471, 8588], [8471, 8589, 8588], [8471, 8472, 8590], [8471, 8590, 8589], [8472, 8473, 8590], [8473, 8591, 8590], [8473, 8474, 8592], [8473, 8592, 8591], [8474, 8475, 8592], [8475, 8593, 8592], [8475, 8476, 8594], [8475, 8594, 8593], [8476, 8477, 8594], [8477, 8595, 8594], [8477, 8478, 8596], [8477, 8596, 8595], [8478, 8479, 8596], [8479, 8597, 8596], [8479, 8480, 8598], [8479, 8598, 8597], [8480, 8481, 8598], [8481, 8599, 8598], [8481, 8482, 8600], [8481, 8600, 8599], [8482, 8483, 8600], [8483, 8601, 8600], [8483, 8484, 8602], [8483, 8602, 8601], [8484, 8485, 8602], [8485, 8603, 8602], [8485, 8486, 8604], [8485, 8604, 8603], [8486, 8487, 8604], [8487, 8605, 8604], [8487, 8488, 8606], [8487, 8606, 8605], [8488, 8489, 8606], [8489, 8607, 8606], [8489, 8490, 8608], [8489, 8608, 8607], [8490, 8491, 8608], [8491, 8609, 8608], [8491, 8492, 8610], [8491, 8610, 8609], [8492, 8493, 8610], [8493, 8611, 8610], [8493, 8494, 8612], [8493, 8612, 8611], [8494, 8495, 8612], [8495, 8613, 8612], [8495, 8496, 8614], [8495, 8614, 8613], [8496, 8497, 8614], [8497, 8615, 8614], [8497, 8498, 8616], [8497, 8616, 8615], [8498, 8499, 8616], [8499, 8617, 8616], [8499, 8500, 8618], [8499, 8618, 8617], [8500, 8501, 8618], [8501, 8619, 8618], [8501, 8502, 8620], [8501, 8620, 8619], [8502, 8503, 8620], [8503, 8621, 8620], [8503, 8504, 8622], [8503, 8622, 8621], [8504, 8505, 8622], [8505, 8623, 8622], [8505, 8506, 8624], [8505, 8624, 8623], [8506, 8507, 8624], [8507, 8625, 8624], [8507, 8508, 8626], [8507, 8626, 8625], [8508, 8509, 8626], [8509, 8627, 8626], [8509, 8510, 8628], [8509, 8628, 8627], [8510, 8511, 8628], [8511, 8629, 8628], [8511, 8512, 8630], [8511, 8630, 8629], [8512, 8513, 8630], [8513, 8631, 8630], [8513, 8514, 8632], [8513, 8632, 8631], [8514, 8515, 8632], [8515, 8633, 8632], [8515, 8516, 8634], [8515, 8634, 8633], [8516, 8517, 8634], [8517, 8635, 8634], [8517, 8518, 8636], [8517, 8636, 8635], [8518, 8519, 8636], [8519, 8637, 8636], [8519, 8520, 8638], [8519, 8638, 8637], [8520, 8521, 8638], [8521, 8639, 8638], [8521, 8522, 8640], [8521, 8640, 8639], [8522, 8523, 8640], [8523, 8641, 8640], [8523, 8524, 8642], [8523, 8642, 8641], [8524, 8525, 8642], [8525, 8643, 8642], [8525, 8526, 8644], [8525, 8644, 8643], [8526, 8527, 8644], [8527, 8645, 8644], [8527, 8528, 8646], [8527, 8646, 8645], [8528, 8529, 8646], [8529, 8647, 8646], [8529, 8530, 8648], [8529, 8648, 8647], [8530, 8531, 8648], [8531, 8649, 8648], [8531, 8532, 8650], [8531, 8650, 8649], [8532, 8533, 8650], [8533, 8651, 8650], [8533, 8534, 8652], [8533, 8652, 8651], [8534, 8535, 8652], [8535, 8653, 8652], [8535, 8536, 8654], [8535, 8654, 8653], [8536, 8537, 8654], [8537, 8655, 8654], [8537, 8538, 8656], [8537, 8656, 8655], [8538, 8539, 8656], [8539, 8657, 8656], [8539, 8540, 8658], [8539, 8658, 8657], [8540, 8541, 8658], [8541, 8659, 8658], [8541, 8542, 8660], [8541, 8660, 8659], [8542, 8543, 8660], [8543, 8661, 8660], [8543, 8544, 8662], [8543, 8662, 8661], [8545, 8546, 8663], [8546, 8664, 8663], [8546, 8547, 8665], [8546, 8665, 8664], [8547, 8548, 8665], [8548, 8666, 8665], [8548, 8549, 8667], [8548, 8667, 8666], [8549, 8550, 8667], [8550, 8668, 8667], [8550, 8551, 8669], [8550, 8669, 8668], [8551, 8552, 8669], [8552, 8670, 8669], [8552, 8553, 8671], [8552, 8671, 8670], [8553, 8554, 8671], [8554, 8672, 8671], [8554, 8555, 8673], [8554, 8673, 8672], [8555, 8556, 8673], [8556, 8674, 8673], [8556, 8557, 8675], [8556, 8675, 8674], [8557, 8558, 8675], [8558, 8676, 8675], [8558, 8559, 8677], [8558, 8677, 8676], [8559, 8560, 8677], [8560, 8678, 8677], [8560, 8561, 8679], [8560, 8679, 8678], [8561, 8562, 8679], [8562, 8680, 8679], [8562, 8563, 8681], [8562, 8681, 8680], [8564, 8565, 8683], [8564, 8683, 8682], [8565, 8566, 8683], [8566, 8684, 8683], [8566, 8567, 8685], [8566, 8685, 8684], [8567, 8568, 8685], [8568, 8686, 8685], [8568, 8569, 8687], [8568, 8687, 8686], [8569, 8570, 8687], [8570, 8688, 8687], [8570, 8571, 8689], [8570, 8689, 8688], [8571, 8572, 8689], [8572, 8690, 8689], [8572, 8573, 8691], [8572, 8691, 8690], [8573, 8574, 8691], [8574, 8692, 8691], [8574, 8575, 8693], [8574, 8693, 8692], [8575, 8576, 8693], [8576, 8694, 8693], [8576, 8577, 8695], [8576, 8695, 8694], [8577, 8578, 8695], [8578, 8696, 8695], [8578, 8579, 8697], [8578, 8697, 8696], [8579, 8580, 8697], [8580, 8698, 8697], [8580, 8581, 8699], [8580, 8699, 8698], [8581, 8582, 8699], [8582, 8700, 8699], [8582, 8583, 8701], [8582, 8701, 8700], [8583, 8584, 8701], [8584, 8702, 8701], [8584, 8585, 8703], [8584, 8703, 8702], [8585, 8586, 8703], [8586, 8704, 8703], [8586, 8587, 8705], [8586, 8705, 8704], [8587, 8588, 8705], [8588, 8706, 8705], [8588, 8589, 8707], [8588, 8707, 8706], [8589, 8590, 8707], [8590, 8708, 8707], [8590, 8591, 8709], [8590, 8709, 8708], [8591, 8592, 8709], [8592, 8710, 8709], [8592, 8593, 8711], [8592, 8711, 8710], [8593, 8594, 8711], [8594, 8712, 8711], [8594, 8595, 8713], [8594, 8713, 8712], [8595, 8596, 8713], [8596, 8714, 8713], [8596, 8597, 8715], [8596, 8715, 8714], [8597, 8598, 8715], [8598, 8716, 8715], [8598, 8599, 8717], [8598, 8717, 8716], [8599, 8600, 8717], [8600, 8718, 8717], [8600, 8601, 8719], [8600, 8719, 8718], [8601, 8602, 8719], [8602, 8720, 8719], [8602, 8603, 8721], [8602, 8721, 8720], [8603, 8604, 8721], [8604, 8722, 8721], [8604, 8605, 8723], [8604, 8723, 8722], [8605, 8606, 8723], [8606, 8724, 8723], [8606, 8607, 8725], [8606, 8725, 8724], [8607, 8608, 8725], [8608, 8726, 8725], [8608, 8609, 8727], [8608, 8727, 8726], [8609, 8610, 8727], [8610, 8728, 8727], [8610, 8611, 8729], [8610, 8729, 8728], [8611, 8612, 8729], [8612, 8730, 8729], [8612, 8613, 8731], [8612, 8731, 8730], [8613, 8614, 8731], [8614, 8732, 8731], [8614, 8615, 8733], [8614, 8733, 8732], [8615, 8616, 8733], [8616, 8734, 8733], [8616, 8617, 8735], [8616, 8735, 8734], [8617, 8618, 8735], [8618, 8736, 8735], [8618, 8619, 8737], [8618, 8737, 8736], [8619, 8620, 8737], [8620, 8738, 8737], [8620, 8621, 8739], [8620, 8739, 8738], [8621, 8622, 8739], [8622, 8740, 8739], [8622, 8623, 8741], [8622, 8741, 8740], [8623, 8624, 8741], [8624, 8742, 8741], [8624, 8625, 8743], [8624, 8743, 8742], [8625, 8626, 8743], [8626, 8744, 8743], [8626, 8627, 8745], [8626, 8745, 8744], [8627, 8628, 8745], [8628, 8746, 8745], [8628, 8629, 8747], [8628, 8747, 8746], [8629, 8630, 8747], [8630, 8748, 8747], [8630, 8631, 8749], [8630, 8749, 8748], [8631, 8632, 8749], [8632, 8750, 8749], [8632, 8633, 8751], [8632, 8751, 8750], [8633, 8634, 8751], [8634, 8752, 8751], [8634, 8635, 8753], [8634, 8753, 8752], [8635, 8636, 8753], [8636, 8754, 8753], [8636, 8637, 8755], [8636, 8755, 8754], [8637, 8638, 8755], [8638, 8756, 8755], [8638, 8639, 8757], [8638, 8757, 8756], [8639, 8640, 8757], [8640, 8758, 8757], [8640, 8641, 8759], [8640, 8759, 8758], [8641, 8642, 8759], [8642, 8760, 8759], [8642, 8643, 8761], [8642, 8761, 8760], [8643, 8644, 8761], [8644, 8762, 8761], [8644, 8645, 8763], [8644, 8763, 8762], [8645, 8646, 8763], [8646, 8764, 8763], [8646, 8647, 8765], [8646, 8765, 8764], [8647, 8648, 8765], [8648, 8766, 8765], [8648, 8649, 8767], [8648, 8767, 8766], [8649, 8650, 8767], [8650, 8768, 8767], [8650, 8651, 8769], [8650, 8769, 8768], [8651, 8652, 8769], [8652, 8770, 8769], [8652, 8653, 8771], [8652, 8771, 8770], [8653, 8654, 8771], [8654, 8772, 8771], [8654, 8655, 8773], [8654, 8773, 8772], [8655, 8656, 8773], [8656, 8774, 8773], [8656, 8657, 8775], [8656, 8775, 8774], [8657, 8658, 8775], [8658, 8776, 8775], [8658, 8659, 8777], [8658, 8777, 8776], [8659, 8660, 8777], [8660, 8778, 8777], [8660, 8661, 8779], [8660, 8779, 8778], [8661, 8662, 8779], [8662, 8780, 8779], [8663, 8664, 8782], [8663, 8782, 8781], [8664, 8665, 8782], [8665, 8783, 8782], [8665, 8666, 8784], [8665, 8784, 8783], [8666, 8667, 8784], [8667, 8785, 8784], [8667, 8668, 8786], [8667, 8786, 8785], [8668, 8669, 8786], [8669, 8787, 8786], [8669, 8670, 8788], [8669, 8788, 8787], [8670, 8671, 8788], [8671, 8789, 8788], [8671, 8672, 8790], [8671, 8790, 8789], [8672, 8673, 8790], [8673, 8791, 8790], [8673, 8674, 8792], [8673, 8792, 8791], [8674, 8675, 8792], [8675, 8793, 8792], [8675, 8676, 8794], [8675, 8794, 8793], [8676, 8677, 8794], [8677, 8795, 8794], [8677, 8678, 8796], [8677, 8796, 8795], [8678, 8679, 8796], [8679, 8797, 8796], [8679, 8680, 8798], [8679, 8798, 8797], [8680, 8681, 8798], [8681, 8799, 8798], [8682, 8683, 8800], [8683, 8801, 8800], [8683, 8684, 8802], [8683, 8802, 8801], [8684, 8685, 8802], [8685, 8803, 8802], [8685, 8686, 8804], [8685, 8804, 8803], [8686, 8687, 8804], [8687, 8805, 8804], [8687, 8688, 8806], [8687, 8806, 8805], [8688, 8689, 8806], [8689, 8807, 8806], [8689, 8690, 8808], [8689, 8808, 8807], [8690, 8691, 8808], [8691, 8809, 8808], [8691, 8692, 8810], [8691, 8810, 8809], [8692, 8693, 8810], [8693, 8811, 8810], [8693, 8694, 8812], [8693, 8812, 8811], [8694, 8695, 8812], [8695, 8813, 8812], [8695, 8696, 8814], [8695, 8814, 8813], [8696, 8697, 8814], [8697, 8815, 8814], [8697, 8698, 8816], [8697, 8816, 8815], [8698, 8699, 8816], [8699, 8817, 8816], [8699, 8700, 8818], [8699, 8818, 8817], [8700, 8701, 8818], [8701, 8819, 8818], [8701, 8702, 8820], [8701, 8820, 8819], [8702, 8703, 8820], [8703, 8821, 8820], [8703, 8704, 8822], [8703, 8822, 8821], [8704, 8705, 8822], [8705, 8823, 8822], [8705, 8706, 8824], [8705, 8824, 8823], [8706, 8707, 8824], [8707, 8825, 8824], [8707, 8708, 8826], [8707, 8826, 8825], [8708, 8709, 8826], [8709, 8827, 8826], [8709, 8710, 8828], [8709, 8828, 8827], [8710, 8711, 8828], [8711, 8829, 8828], [8711, 8712, 8830], [8711, 8830, 8829], [8712, 8713, 8830], [8713, 8831, 8830], [8713, 8714, 8832], [8713, 8832, 8831], [8714, 8715, 8832], [8715, 8833, 8832], [8715, 8716, 8834], [8715, 8834, 8833], [8716, 8717, 8834], [8717, 8835, 8834], [8717, 8718, 8836], [8717, 8836, 8835], [8718, 8719, 8836], [8719, 8837, 8836], [8719, 8720, 8838], [8719, 8838, 8837], [8720, 8721, 8838], [8721, 8839, 8838], [8721, 8722, 8840], [8721, 8840, 8839], [8722, 8723, 8840], [8723, 8841, 8840], [8723, 8724, 8842], [8723, 8842, 8841], [8724, 8725, 8842], [8725, 8843, 8842], [8725, 8726, 8844], [8725, 8844, 8843], [8726, 8727, 8844], [8727, 8845, 8844], [8727, 8728, 8846], [8727, 8846, 8845], [8728, 8729, 8846], [8729, 8847, 8846], [8729, 8730, 8848], [8729, 8848, 8847], [8730, 8731, 8848], [8731, 8849, 8848], [8731, 8732, 8850], [8731, 8850, 8849], [8732, 8733, 8850], [8733, 8851, 8850], [8733, 8734, 8852], [8733, 8852, 8851], [8734, 8735, 8852], [8735, 8853, 8852], [8735, 8736, 8854], [8735, 8854, 8853], [8736, 8737, 8854], [8737, 8855, 8854], [8737, 8738, 8856], [8737, 8856, 8855], [8738, 8739, 8856], [8739, 8857, 8856], [8739, 8740, 8858], [8739, 8858, 8857], [8740, 8741, 8858], [8741, 8859, 8858], [8741, 8742, 8860], [8741, 8860, 8859], [8742, 8743, 8860], [8743, 8861, 8860], [8743, 8744, 8862], [8743, 8862, 8861], [8744, 8745, 8862], [8745, 8863, 8862], [8745, 8746, 8864], [8745, 8864, 8863], [8746, 8747, 8864], [8747, 8865, 8864], [8747, 8748, 8866], [8747, 8866, 8865], [8748, 8749, 8866], [8749, 8867, 8866], [8749, 8750, 8868], [8749, 8868, 8867], [8750, 8751, 8868], [8751, 8869, 8868], [8751, 8752, 8870], [8751, 8870, 8869], [8752, 8753, 8870], [8753, 8871, 8870], [8753, 8754, 8872], [8753, 8872, 8871], [8754, 8755, 8872], [8755, 8873, 8872], [8755, 8756, 8874], [8755, 8874, 8873], [8756, 8757, 8874], [8757, 8875, 8874], [8757, 8758, 8876], [8757, 8876, 8875], [8758, 8759, 8876], [8759, 8877, 8876], [8759, 8760, 8878], [8759, 8878, 8877], [8760, 8761, 8878], [8761, 8879, 8878], [8761, 8762, 8880], [8761, 8880, 8879], [8762, 8763, 8880], [8763, 8881, 8880], [8763, 8764, 8882], [8763, 8882, 8881], [8764, 8765, 8882], [8765, 8883, 8882], [8765, 8766, 8884], [8765, 8884, 8883], [8766, 8767, 8884], [8767, 8885, 8884], [8767, 8768, 8886], [8767, 8886, 8885], [8768, 8769, 8886], [8769, 8887, 8886], [8769, 8770, 8888], [8769, 8888, 8887], [8770, 8771, 8888], [8771, 8889, 8888], [8771, 8772, 8890], [8771, 8890, 8889], [8772, 8773, 8890], [8773, 8891, 8890], [8773, 8774, 8892], [8773, 8892, 8891], [8774, 8775, 8892], [8775, 8893, 8892], [8775, 8776, 8894], [8775, 8894, 8893], [8776, 8777, 8894], [8777, 8895, 8894], [8777, 8778, 8896], [8777, 8896, 8895], [8778, 8779, 8896], [8779, 8897, 8896], [8779, 8780, 8898], [8779, 8898, 8897], [8781, 8782, 8899], [8782, 8900, 8899], [8782, 8783, 8901], [8782, 8901, 8900], [8783, 8784, 8901], [8784, 8902, 8901], [8784, 8785, 8903], [8784, 8903, 8902], [8785, 8786, 8903], [8786, 8904, 8903], [8786, 8787, 8905], [8786, 8905, 8904], [8787, 8788, 8905], [8788, 8906, 8905], [8788, 8789, 8907], [8788, 8907, 8906], [8789, 8790, 8907], [8790, 8908, 8907], [8790, 8791, 8909], [8790, 8909, 8908], [8791, 8792, 8909], [8792, 8910, 8909], [8792, 8793, 8911], [8792, 8911, 8910], [8793, 8794, 8911], [8794, 8912, 8911], [8794, 8795, 8913], [8794, 8913, 8912], [8795, 8796, 8913], [8796, 8914, 8913], [8796, 8797, 8915], [8796, 8915, 8914], [8797, 8798, 8915], [8798, 8916, 8915], [8798, 8799, 8917], [8798, 8917, 8916], [8800, 8801, 8919], [8800, 8919, 8918], [8801, 8802, 8919], [8802, 8920, 8919], [8802, 8803, 8921], [8802, 8921, 8920], [8803, 8804, 8921], [8804, 8922, 8921], [8804, 8805, 8923], [8804, 8923, 8922], [8805, 8806, 8923], [8806, 8924, 8923], [8806, 8807, 8925], [8806, 8925, 8924], [8807, 8808, 8925], [8808, 8926, 8925], [8808, 8809, 8927], [8808, 8927, 8926], [8809, 8810, 8927], [8810, 8928, 8927], [8810, 8811, 8929], [8810, 8929, 8928], [8811, 8812, 8929], [8812, 8930, 8929], [8812, 8813, 8931], [8812, 8931, 8930], [8813, 8814, 8931], [8814, 8932, 8931], [8814, 8815, 8933], [8814, 8933, 8932], [8815, 8816, 8933], [8816, 8934, 8933], [8816, 8817, 8935], [8816, 8935, 8934], [8817, 8818, 8935], [8818, 8936, 8935], [8818, 8819, 8937], [8818, 8937, 8936], [8819, 8820, 8937], [8820, 8938, 8937], [8820, 8821, 8939], [8820, 8939, 8938], [8821, 8822, 8939], [8822, 8940, 8939], [8822, 8823, 8941], [8822, 8941, 8940], [8823, 8824, 8941], [8824, 8942, 8941], [8824, 8825, 8943], [8824, 8943, 8942], [8825, 8826, 8943], [8826, 8944, 8943], [8826, 8827, 8945], [8826, 8945, 8944], [8827, 8828, 8945], [8828, 8946, 8945], [8828, 8829, 8947], [8828, 8947, 8946], [8829, 8830, 8947], [8830, 8948, 8947], [8830, 8831, 8949], [8830, 8949, 8948], [8831, 8832, 8949], [8832, 8950, 8949], [8832, 8833, 8951], [8832, 8951, 8950], [8833, 8834, 8951], [8834, 8952, 8951], [8834, 8835, 8953], [8834, 8953, 8952], [8835, 8836, 8953], [8836, 8954, 8953], [8836, 8837, 8955], [8836, 8955, 8954], [8837, 8838, 8955], [8838, 8956, 8955], [8838, 8839, 8957], [8838, 8957, 8956], [8839, 8840, 8957], [8840, 8958, 8957], [8840, 8841, 8959], [8840, 8959, 8958], [8841, 8842, 8959], [8842, 8960, 8959], [8842, 8843, 8961], [8842, 8961, 8960], [8843, 8844, 8961], [8844, 8962, 8961], [8844, 8845, 8963], [8844, 8963, 8962], [8845, 8846, 8963], [8846, 8964, 8963], [8846, 8847, 8965], [8846, 8965, 8964], [8847, 8848, 8965], [8848, 8966, 8965], [8848, 8849, 8967], [8848, 8967, 8966], [8849, 8850, 8967], [8850, 8968, 8967], [8850, 8851, 8969], [8850, 8969, 8968], [8851, 8852, 8969], [8852, 8970, 8969], [8852, 8853, 8971], [8852, 8971, 8970], [8853, 8854, 8971], [8854, 8972, 8971], [8854, 8855, 8973], [8854, 8973, 8972], [8855, 8856, 8973], [8856, 8974, 8973], [8856, 8857, 8975], [8856, 8975, 8974], [8857, 8858, 8975], [8858, 8976, 8975], [8858, 8859, 8977], [8858, 8977, 8976], [8859, 8860, 8977], [8860, 8978, 8977], [8860, 8861, 8979], [8860, 8979, 8978], [8861, 8862, 8979], [8862, 8980, 8979], [8862, 8863, 8981], [8862, 8981, 8980], [8863, 8864, 8981], [8864, 8982, 8981], [8864, 8865, 8983], [8864, 8983, 8982], [8865, 8866, 8983], [8866, 8984, 8983], [8866, 8867, 8985], [8866, 8985, 8984], [8867, 8868, 8985], [8868, 8986, 8985], [8868, 8869, 8987], [8868, 8987, 8986], [8869, 8870, 8987], [8870, 8988, 8987], [8870, 8871, 8989], [8870, 8989, 8988], [8871, 8872, 8989], [8872, 8990, 8989], [8872, 8873, 8991], [8872, 8991, 8990], [8873, 8874, 8991], [8874, 8992, 8991], [8874, 8875, 8993], [8874, 8993, 8992], [8875, 8876, 8993], [8876, 8994, 8993], [8876, 8877, 8995], [8876, 8995, 8994], [8877, 8878, 8995], [8878, 8996, 8995], [8878, 8879, 8997], [8878, 8997, 8996], [8879, 8880, 8997], [8880, 8998, 8997], [8880, 8881, 8999], [8880, 8999, 8998], [8881, 8882, 8999], [8882, 9000, 8999], [8882, 8883, 9001], [8882, 9001, 9000], [8883, 8884, 9001], [8884, 9002, 9001], [8884, 8885, 9003], [8884, 9003, 9002], [8885, 8886, 9003], [8886, 9004, 9003], [8886, 8887, 9005], [8886, 9005, 9004], [8887, 8888, 9005], [8888, 9006, 9005], [8888, 8889, 9007], [8888, 9007, 9006], [8889, 8890, 9007], [8890, 9008, 9007], [8890, 8891, 9009], [8890, 9009, 9008], [8891, 8892, 9009], [8892, 9010, 9009], [8892, 8893, 9011], [8892, 9011, 9010], [8893, 8894, 9011], [8894, 9012, 9011], [8894, 8895, 9013], [8894, 9013, 9012], [8895, 8896, 9013], [8896, 9014, 9013], [8896, 8897, 9015], [8896, 9015, 9014], [8897, 8898, 9015], [8898, 9016, 9015], [8899, 8900, 9018], [8899, 9018, 9017], [8900, 8901, 9018], [8901, 9019, 9018], [8901, 8902, 9020], [8901, 9020, 9019], [8902, 8903, 9020], [8903, 9021, 9020], [8903, 8904, 9022], [8903, 9022, 9021], [8904, 8905, 9022], [8905, 9023, 9022], [8905, 8906, 9024], [8905, 9024, 9023], [8906, 8907, 9024], [8907, 9025, 9024], [8907, 8908, 9026], [8907, 9026, 9025], [8908, 8909, 9026], [8909, 9027, 9026], [8909, 8910, 9028], [8909, 9028, 9027], [8910, 8911, 9028], [8911, 9029, 9028], [8911, 8912, 9030], [8911, 9030, 9029], [8912, 8913, 9030], [8913, 9031, 9030], [8913, 8914, 9032], [8913, 9032, 9031], [8914, 8915, 9032], [8915, 9033, 9032], [8915, 8916, 9034], [8915, 9034, 9033], [8916, 8917, 9034], [8917, 9035, 9034], [8918, 8919, 9036], [8919, 9037, 9036], [8919, 8920, 9038], [8919, 9038, 9037], [8920, 8921, 9038], [8921, 9039, 9038], [8921, 8922, 9040], [8921, 9040, 9039], [8922, 8923, 9040], [8923, 9041, 9040], [8923, 8924, 9042], [8923, 9042, 9041], [8924, 8925, 9042], [8925, 9043, 9042], [8925, 8926, 9044], [8925, 9044, 9043], [8926, 8927, 9044], [8927, 9045, 9044], [8927, 8928, 9046], [8927, 9046, 9045], [8928, 8929, 9046], [8929, 9047, 9046], [8929, 8930, 9048], [8929, 9048, 9047], [8930, 8931, 9048], [8931, 9049, 9048], [8931, 8932, 9050], [8931, 9050, 9049], [8932, 8933, 9050], [8933, 9051, 9050], [8933, 8934, 9052], [8933, 9052, 9051], [8934, 8935, 9052], [8935, 9053, 9052], [8935, 8936, 9054], [8935, 9054, 9053], [8936, 8937, 9054], [8937, 9055, 9054], [8937, 8938, 9056], [8937, 9056, 9055], [8938, 8939, 9056], [8939, 9057, 9056], [8939, 8940, 9058], [8939, 9058, 9057], [8940, 8941, 9058], [8941, 9059, 9058], [8941, 8942, 9060], [8941, 9060, 9059], [8942, 8943, 9060], [8943, 9061, 9060], [8943, 8944, 9062], [8943, 9062, 9061], [8944, 8945, 9062], [8945, 9063, 9062], [8945, 8946, 9064], [8945, 9064, 9063], [8946, 8947, 9064], [8947, 9065, 9064], [8947, 8948, 9066], [8947, 9066, 9065], [8948, 8949, 9066], [8949, 9067, 9066], [8949, 8950, 9068], [8949, 9068, 9067], [8950, 8951, 9068], [8951, 9069, 9068], [8951, 8952, 9070], [8951, 9070, 9069], [8952, 8953, 9070], [8953, 9071, 9070], [8953, 8954, 9072], [8953, 9072, 9071], [8954, 8955, 9072], [8955, 9073, 9072], [8955, 8956, 9074], [8955, 9074, 9073], [8956, 8957, 9074], [8957, 9075, 9074], [8957, 8958, 9076], [8957, 9076, 9075], [8958, 8959, 9076], [8959, 9077, 9076], [8959, 8960, 9078], [8959, 9078, 9077], [8960, 8961, 9078], [8961, 9079, 9078], [8961, 8962, 9080], [8961, 9080, 9079], [8962, 8963, 9080], [8963, 9081, 9080], [8963, 8964, 9082], [8963, 9082, 9081], [8964, 8965, 9082], [8965, 9083, 9082], [8965, 8966, 9084], [8965, 9084, 9083], [8966, 8967, 9084], [8967, 9085, 9084], [8967, 8968, 9086], [8967, 9086, 9085], [8968, 8969, 9086], [8969, 9087, 9086], [8969, 8970, 9088], [8969, 9088, 9087], [8970, 8971, 9088], [8971, 9089, 9088], [8971, 8972, 9090], [8971, 9090, 9089], [8972, 8973, 9090], [8973, 9091, 9090], [8973, 8974, 9092], [8973, 9092, 9091], [8974, 8975, 9092], [8975, 9093, 9092], [8975, 8976, 9094], [8975, 9094, 9093], [8976, 8977, 9094], [8977, 9095, 9094], [8977, 8978, 9096], [8977, 9096, 9095], [8978, 8979, 9096], [8979, 9097, 9096], [8979, 8980, 9098], [8979, 9098, 9097], [8980, 8981, 9098], [8981, 9099, 9098], [8981, 8982, 9100], [8981, 9100, 9099], [8982, 8983, 9100], [8983, 9101, 9100], [8983, 8984, 9102], [8983, 9102, 9101], [8984, 8985, 9102], [8985, 9103, 9102], [8985, 8986, 9104], [8985, 9104, 9103], [8986, 8987, 9104], [8987, 9105, 9104], [8987, 8988, 9106], [8987, 9106, 9105], [8988, 8989, 9106], [8989, 9107, 9106], [8989, 8990, 9108], [8989, 9108, 9107], [8990, 8991, 9108], [8991, 9109, 9108], [8991, 8992, 9110], [8991, 9110, 9109], [8992, 8993, 9110], [8993, 9111, 9110], [8993, 8994, 9112], [8993, 9112, 9111], [8994, 8995, 9112], [8995, 9113, 9112], [8995, 8996, 9114], [8995, 9114, 9113], [8996, 8997, 9114], [8997, 9115, 9114], [8997, 8998, 9116], [8997, 9116, 9115], [8998, 8999, 9116], [8999, 9117, 9116], [8999, 9000, 9118], [8999, 9118, 9117], [9000, 9001, 9118], [9001, 9119, 9118], [9001, 9002, 9120], [9001, 9120, 9119], [9002, 9003, 9120], [9003, 9121, 9120], [9003, 9004, 9122], [9003, 9122, 9121], [9004, 9005, 9122], [9005, 9123, 9122], [9005, 9006, 9124], [9005, 9124, 9123], [9006, 9007, 9124], [9007, 9125, 9124], [9007, 9008, 9126], [9007, 9126, 9125], [9008, 9009, 9126], [9009, 9127, 9126], [9009, 9010, 9128], [9009, 9128, 9127], [9010, 9011, 9128], [9011, 9129, 9128], [9011, 9012, 9130], [9011, 9130, 9129], [9012, 9013, 9130], [9013, 9131, 9130], [9013, 9014, 9132], [9013, 9132, 9131], [9014, 9015, 9132], [9015, 9133, 9132], [9015, 9016, 9134], [9015, 9134, 9133], [9017, 9018, 9135], [9018, 9136, 9135], [9018, 9019, 9137], [9018, 9137, 9136], [9019, 9020, 9137], [9020, 9138, 9137], [9020, 9021, 9139], [9020, 9139, 9138], [9021, 9022, 9139], [9022, 9140, 9139], [9022, 9023, 9141], [9022, 9141, 9140], [9023, 9024, 9141], [9024, 9142, 9141], [9024, 9025, 9143], [9024, 9143, 9142], [9025, 9026, 9143], [9026, 9144, 9143], [9026, 9027, 9145], [9026, 9145, 9144], [9027, 9028, 9145], [9028, 9146, 9145], [9028, 9029, 9147], [9028, 9147, 9146], [9029, 9030, 9147], [9030, 9148, 9147], [9030, 9031, 9149], [9030, 9149, 9148], [9031, 9032, 9149], [9032, 9150, 9149], [9032, 9033, 9151], [9032, 9151, 9150], [9033, 9034, 9151], [9034, 9152, 9151], [9034, 9035, 9153], [9034, 9153, 9152], [9036, 9037, 9155], [9036, 9155, 9154], [9037, 9038, 9155], [9038, 9156, 9155], [9038, 9039, 9157], [9038, 9157, 9156], [9039, 9040, 9157], [9040, 9158, 9157], [9040, 9041, 9159], [9040, 9159, 9158], [9041, 9042, 9159], [9042, 9160, 9159], [9042, 9043, 9161], [9042, 9161, 9160], [9043, 9044, 9161], [9044, 9162, 9161], [9044, 9045, 9163], [9044, 9163, 9162], [9045, 9046, 9163], [9046, 9164, 9163], [9046, 9047, 9165], [9046, 9165, 9164], [9047, 9048, 9165], [9048, 9166, 9165], [9048, 9049, 9167], [9048, 9167, 9166], [9049, 9050, 9167], [9050, 9168, 9167], [9050, 9051, 9169], [9050, 9169, 9168], [9051, 9052, 9169], [9052, 9170, 9169], [9052, 9053, 9171], [9052, 9171, 9170], [9053, 9054, 9171], [9054, 9172, 9171], [9054, 9055, 9173], [9054, 9173, 9172], [9055, 9056, 9173], [9056, 9174, 9173], [9056, 9057, 9175], [9056, 9175, 9174], [9057, 9058, 9175], [9058, 9176, 9175], [9058, 9059, 9177], [9058, 9177, 9176], [9059, 9060, 9177], [9060, 9178, 9177], [9060, 9061, 9179], [9060, 9179, 9178], [9061, 9062, 9179], [9062, 9180, 9179], [9062, 9063, 9181], [9062, 9181, 9180], [9063, 9064, 9181], [9064, 9182, 9181], [9064, 9065, 9183], [9064, 9183, 9182], [9065, 9066, 9183], [9066, 9184, 9183], [9066, 9067, 9185], [9066, 9185, 9184], [9067, 9068, 9185], [9068, 9186, 9185], [9068, 9069, 9187], [9068, 9187, 9186], [9069, 9070, 9187], [9070, 9188, 9187], [9070, 9071, 9189], [9070, 9189, 9188], [9071, 9072, 9189], [9072, 9190, 9189], [9072, 9073, 9191], [9072, 9191, 9190], [9073, 9074, 9191], [9074, 9192, 9191], [9074, 9075, 9193], [9074, 9193, 9192], [9075, 9076, 9193], [9076, 9194, 9193], [9076, 9077, 9195], [9076, 9195, 9194], [9077, 9078, 9195], [9078, 9196, 9195], [9078, 9079, 9197], [9078, 9197, 9196], [9079, 9080, 9197], [9080, 9198, 9197], [9080, 9081, 9199], [9080, 9199, 9198], [9081, 9082, 9199], [9082, 9200, 9199], [9082, 9083, 9201], [9082, 9201, 9200], [9083, 9084, 9201], [9084, 9202, 9201], [9084, 9085, 9203], [9084, 9203, 9202], [9085, 9086, 9203], [9086, 9204, 9203], [9086, 9087, 9205], [9086, 9205, 9204], [9087, 9088, 9205], [9088, 9206, 9205], [9088, 9089, 9207], [9088, 9207, 9206], [9089, 9090, 9207], [9090, 9208, 9207], [9090, 9091, 9209], [9090, 9209, 9208], [9091, 9092, 9209], [9092, 9210, 9209], [9092, 9093, 9211], [9092, 9211, 9210], [9093, 9094, 9211], [9094, 9212, 9211], [9094, 9095, 9213], [9094, 9213, 9212], [9095, 9096, 9213], [9096, 9214, 9213], [9096, 9097, 9215], [9096, 9215, 9214], [9097, 9098, 9215], [9098, 9216, 9215], [9098, 9099, 9217], [9098, 9217, 9216], [9099, 9100, 9217], [9100, 9218, 9217], [9100, 9101, 9219], [9100, 9219, 9218], [9101, 9102, 9219], [9102, 9220, 9219], [9102, 9103, 9221], [9102, 9221, 9220], [9103, 9104, 9221], [9104, 9222, 9221], [9104, 9105, 9223], [9104, 9223, 9222], [9105, 9106, 9223], [9106, 9224, 9223], [9106, 9107, 9225], [9106, 9225, 9224], [9107, 9108, 9225], [9108, 9226, 9225], [9108, 9109, 9227], [9108, 9227, 9226], [9109, 9110, 9227], [9110, 9228, 9227], [9110, 9111, 9229], [9110, 9229, 9228], [9111, 9112, 9229], [9112, 9230, 9229], [9112, 9113, 9231], [9112, 9231, 9230], [9113, 9114, 9231], [9114, 9232, 9231], [9114, 9115, 9233], [9114, 9233, 9232], [9115, 9116, 9233], [9116, 9234, 9233], [9116, 9117, 9235], [9116, 9235, 9234], [9117, 9118, 9235], [9118, 9236, 9235], [9118, 9119, 9237], [9118, 9237, 9236], [9119, 9120, 9237], [9120, 9238, 9237], [9120, 9121, 9239], [9120, 9239, 9238], [9121, 9122, 9239], [9122, 9240, 9239], [9122, 9123, 9241], [9122, 9241, 9240], [9123, 9124, 9241], [9124, 9242, 9241], [9124, 9125, 9243], [9124, 9243, 9242], [9125, 9126, 9243], [9126, 9244, 9243], [9126, 9127, 9245], [9126, 9245, 9244], [9127, 9128, 9245], [9128, 9246, 9245], [9128, 9129, 9247], [9128, 9247, 9246], [9129, 9130, 9247], [9130, 9248, 9247], [9130, 9131, 9249], [9130, 9249, 9248], [9131, 9132, 9249], [9132, 9250, 9249], [9132, 9133, 9251], [9132, 9251, 9250], [9133, 9134, 9251], [9134, 9252, 9251], [9135, 9136, 9254], [9135, 9254, 9253], [9136, 9137, 9254], [9137, 9255, 9254], [9137, 9138, 9256], [9137, 9256, 9255], [9138, 9139, 9256], [9139, 9257, 9256], [9139, 9140, 9258], [9139, 9258, 9257], [9140, 9141, 9258], [9141, 9259, 9258], [9141, 9142, 9260], [9141, 9260, 9259], [9142, 9143, 9260], [9143, 9261, 9260], [9143, 9144, 9262], [9143, 9262, 9261], [9144, 9145, 9262], [9145, 9263, 9262], [9145, 9146, 9264], [9145, 9264, 9263], [9146, 9147, 9264], [9147, 9265, 9264], [9147, 9148, 9266], [9147, 9266, 9265], [9148, 9149, 9266], [9149, 9267, 9266], [9149, 9150, 9268], [9149, 9268, 9267], [9150, 9151, 9268], [9151, 9269, 9268], [9151, 9152, 9270], [9151, 9270, 9269], [9152, 9153, 9270], [9153, 9271, 9270], [9154, 9155, 9272], [9155, 9273, 9272], [9155, 9156, 9274], [9155, 9274, 9273], [9156, 9157, 9274], [9157, 9275, 9274], [9157, 9158, 9276], [9157, 9276, 9275], [9158, 9159, 9276], [9159, 9277, 9276], [9159, 9160, 9278], [9159, 9278, 9277], [9160, 9161, 9278], [9161, 9279, 9278], [9161, 9162, 9280], [9161, 9280, 9279], [9162, 9163, 9280], [9163, 9281, 9280], [9163, 9164, 9282], [9163, 9282, 9281], [9164, 9165, 9282], [9165, 9283, 9282], [9165, 9166, 9284], [9165, 9284, 9283], [9166, 9167, 9284], [9167, 9285, 9284], [9167, 9168, 9286], [9167, 9286, 9285], [9168, 9169, 9286], [9169, 9287, 9286], [9169, 9170, 9288], [9169, 9288, 9287], [9170, 9171, 9288], [9171, 9289, 9288], [9171, 9172, 9290], [9171, 9290, 9289], [9172, 9173, 9290], [9173, 9291, 9290], [9173, 9174, 9292], [9173, 9292, 9291], [9174, 9175, 9292], [9175, 9293, 9292], [9175, 9176, 9294], [9175, 9294, 9293], [9176, 9177, 9294], [9177, 9295, 9294], [9177, 9178, 9296], [9177, 9296, 9295], [9178, 9179, 9296], [9179, 9297, 9296], [9179, 9180, 9298], [9179, 9298, 9297], [9180, 9181, 9298], [9181, 9299, 9298], [9181, 9182, 9300], [9181, 9300, 9299], [9182, 9183, 9300], [9183, 9301, 9300], [9183, 9184, 9302], [9183, 9302, 9301], [9184, 9185, 9302], [9185, 9303, 9302], [9185, 9186, 9304], [9185, 9304, 9303], [9186, 9187, 9304], [9187, 9305, 9304], [9187, 9188, 9306], [9187, 9306, 9305], [9188, 9189, 9306], [9189, 9307, 9306], [9189, 9190, 9308], [9189, 9308, 9307], [9190, 9191, 9308], [9191, 9309, 9308], [9191, 9192, 9310], [9191, 9310, 9309], [9192, 9193, 9310], [9193, 9311, 9310], [9193, 9194, 9312], [9193, 9312, 9311], [9194, 9195, 9312], [9195, 9313, 9312], [9195, 9196, 9314], [9195, 9314, 9313], [9196, 9197, 9314], [9197, 9315, 9314], [9197, 9198, 9316], [9197, 9316, 9315], [9198, 9199, 9316], [9199, 9317, 9316], [9199, 9200, 9318], [9199, 9318, 9317], [9200, 9201, 9318], [9201, 9319, 9318], [9201, 9202, 9320], [9201, 9320, 9319], [9202, 9203, 9320], [9203, 9321, 9320], [9203, 9204, 9322], [9203, 9322, 9321], [9204, 9205, 9322], [9205, 9323, 9322], [9205, 9206, 9324], [9205, 9324, 9323], [9206, 9207, 9324], [9207, 9325, 9324], [9207, 9208, 9326], [9207, 9326, 9325], [9208, 9209, 9326], [9209, 9327, 9326], [9209, 9210, 9328], [9209, 9328, 9327], [9210, 9211, 9328], [9211, 9329, 9328], [9211, 9212, 9330], [9211, 9330, 9329], [9212, 9213, 9330], [9213, 9331, 9330], [9213, 9214, 9332], [9213, 9332, 9331], [9214, 9215, 9332], [9215, 9333, 9332], [9215, 9216, 9334], [9215, 9334, 9333], [9216, 9217, 9334], [9217, 9335, 9334], [9217, 9218, 9336], [9217, 9336, 9335], [9218, 9219, 9336], [9219, 9337, 9336], [9219, 9220, 9338], [9219, 9338, 9337], [9220, 9221, 9338], [9221, 9339, 9338], [9221, 9222, 9340], [9221, 9340, 9339], [9222, 9223, 9340], [9223, 9341, 9340], [9223, 9224, 9342], [9223, 9342, 9341], [9224, 9225, 9342], [9225, 9343, 9342], [9225, 9226, 9344], [9225, 9344, 9343], [9226, 9227, 9344], [9227, 9345, 9344], [9227, 9228, 9346], [9227, 9346, 9345], [9228, 9229, 9346], [9229, 9347, 9346], [9229, 9230, 9348], [9229, 9348, 9347], [9230, 9231, 9348], [9231, 9349, 9348], [9231, 9232, 9350], [9231, 9350, 9349], [9232, 9233, 9350], [9233, 9351, 9350], [9233, 9234, 9352], [9233, 9352, 9351], [9234, 9235, 9352], [9235, 9353, 9352], [9235, 9236, 9354], [9235, 9354, 9353], [9236, 9237, 9354], [9237, 9355, 9354], [9237, 9238, 9356], [9237, 9356, 9355], [9238, 9239, 9356], [9239, 9357, 9356], [9239, 9240, 9358], [9239, 9358, 9357], [9240, 9241, 9358], [9241, 9359, 9358], [9241, 9242, 9360], [9241, 9360, 9359], [9242, 9243, 9360], [9243, 9361, 9360], [9243, 9244, 9362], [9243, 9362, 9361], [9244, 9245, 9362], [9245, 9363, 9362], [9245, 9246, 9364], [9245, 9364, 9363], [9246, 9247, 9364], [9247, 9365, 9364], [9247, 9248, 9366], [9247, 9366, 9365], [9248, 9249, 9366], [9249, 9367, 9366], [9249, 9250, 9368], [9249, 9368, 9367], [9250, 9251, 9368], [9251, 9369, 9368], [9251, 9252, 9370], [9251, 9370, 9369], [9253, 9254, 9371], [9254, 9372, 9371], [9254, 9255, 9373], [9254, 9373, 9372], [9255, 9256, 9373], [9256, 9374, 9373], [9256, 9257, 9375], [9256, 9375, 9374], [9257, 9258, 9375], [9258, 9376, 9375], [9258, 9259, 9377], [9258, 9377, 9376], [9259, 9260, 9377], [9260, 9378, 9377], [9260, 9261, 9379], [9260, 9379, 9378], [9261, 9262, 9379], [9262, 9380, 9379], [9262, 9263, 9381], [9262, 9381, 9380], [9263, 9264, 9381], [9264, 9382, 9381], [9264, 9265, 9383], [9264, 9383, 9382], [9265, 9266, 9383], [9266, 9384, 9383], [9266, 9267, 9385], [9266, 9385, 9384], [9267, 9268, 9385], [9268, 9386, 9385], [9268, 9269, 9387], [9268, 9387, 9386], [9269, 9270, 9387], [9270, 9388, 9387], [9270, 9271, 9389], [9270, 9389, 9388], [9272, 9273, 9391], [9272, 9391, 9390], [9273, 9274, 9391], [9274, 9392, 9391], [9274, 9275, 9393], [9274, 9393, 9392], [9275, 9276, 9393], [9276, 9394, 9393], [9276, 9277, 9395], [9276, 9395, 9394], [9277, 9278, 9395], [9278, 9396, 9395], [9278, 9279, 9397], [9278, 9397, 9396], [9279, 9280, 9397], [9280, 9398, 9397], [9280, 9281, 9399], [9280, 9399, 9398], [9281, 9282, 9399], [9282, 9400, 9399], [9282, 9283, 9401], [9282, 9401, 9400], [9283, 9284, 9401], [9284, 9402, 9401], [9284, 9285, 9403], [9284, 9403, 9402], [9285, 9286, 9403], [9286, 9404, 9403], [9286, 9287, 9405], [9286, 9405, 9404], [9287, 9288, 9405], [9288, 9406, 9405], [9288, 9289, 9407], [9288, 9407, 9406], [9289, 9290, 9407], [9290, 9408, 9407], [9290, 9291, 9409], [9290, 9409, 9408], [9291, 9292, 9409], [9292, 9410, 9409], [9292, 9293, 9411], [9292, 9411, 9410], [9293, 9294, 9411], [9294, 9412, 9411], [9294, 9295, 9413], [9294, 9413, 9412], [9295, 9296, 9413], [9296, 9414, 9413], [9296, 9297, 9415], [9296, 9415, 9414], [9297, 9298, 9415], [9298, 9416, 9415], [9298, 9299, 9417], [9298, 9417, 9416], [9299, 9300, 9417], [9300, 9418, 9417], [9300, 9301, 9419], [9300, 9419, 9418], [9301, 9302, 9419], [9302, 9420, 9419], [9302, 9303, 9421], [9302, 9421, 9420], [9303, 9304, 9421], [9304, 9422, 9421], [9304, 9305, 9423], [9304, 9423, 9422], [9305, 9306, 9423], [9306, 9424, 9423], [9306, 9307, 9425], [9306, 9425, 9424], [9307, 9308, 9425], [9308, 9426, 9425], [9308, 9309, 9427], [9308, 9427, 9426], [9309, 9310, 9427], [9310, 9428, 9427], [9310, 9311, 9429], [9310, 9429, 9428], [9311, 9312, 9429], [9312, 9430, 9429], [9312, 9313, 9431], [9312, 9431, 9430], [9313, 9314, 9431], [9314, 9432, 9431], [9314, 9315, 9433], [9314, 9433, 9432], [9315, 9316, 9433], [9316, 9434, 9433], [9316, 9317, 9435], [9316, 9435, 9434], [9317, 9318, 9435], [9318, 9436, 9435], [9318, 9319, 9437], [9318, 9437, 9436], [9319, 9320, 9437], [9320, 9438, 9437], [9320, 9321, 9439], [9320, 9439, 9438], [9321, 9322, 9439], [9322, 9440, 9439], [9322, 9323, 9441], [9322, 9441, 9440], [9323, 9324, 9441], [9324, 9442, 9441], [9324, 9325, 9443], [9324, 9443, 9442], [9325, 9326, 9443], [9326, 9444, 9443], [9326, 9327, 9445], [9326, 9445, 9444], [9327, 9328, 9445], [9328, 9446, 9445], [9328, 9329, 9447], [9328, 9447, 9446], [9329, 9330, 9447], [9330, 9448, 9447], [9330, 9331, 9449], [9330, 9449, 9448], [9331, 9332, 9449], [9332, 9450, 9449], [9332, 9333, 9451], [9332, 9451, 9450], [9333, 9334, 9451], [9334, 9452, 9451], [9334, 9335, 9453], [9334, 9453, 9452], [9335, 9336, 9453], [9336, 9454, 9453], [9336, 9337, 9455], [9336, 9455, 9454], [9337, 9338, 9455], [9338, 9456, 9455], [9338, 9339, 9457], [9338, 9457, 9456], [9339, 9340, 9457], [9340, 9458, 9457], [9340, 9341, 9459], [9340, 9459, 9458], [9341, 9342, 9459], [9342, 9460, 9459], [9342, 9343, 9461], [9342, 9461, 9460], [9343, 9344, 9461], [9344, 9462, 9461], [9344, 9345, 9463], [9344, 9463, 9462], [9345, 9346, 9463], [9346, 9464, 9463], [9346, 9347, 9465], [9346, 9465, 9464], [9347, 9348, 9465], [9348, 9466, 9465], [9348, 9349, 9467], [9348, 9467, 9466], [9349, 9350, 9467], [9350, 9468, 9467], [9350, 9351, 9469], [9350, 9469, 9468], [9351, 9352, 9469], [9352, 9470, 9469], [9352, 9353, 9471], [9352, 9471, 9470], [9353, 9354, 9471], [9354, 9472, 9471], [9354, 9355, 9473], [9354, 9473, 9472], [9355, 9356, 9473], [9356, 9474, 9473], [9356, 9357, 9475], [9356, 9475, 9474], [9357, 9358, 9475], [9358, 9476, 9475], [9358, 9359, 9477], [9358, 9477, 9476], [9359, 9360, 9477], [9360, 9478, 9477], [9360, 9361, 9479], [9360, 9479, 9478], [9361, 9362, 9479], [9362, 9480, 9479], [9362, 9363, 9481], [9362, 9481, 9480], [9363, 9364, 9481], [9364, 9482, 9481], [9364, 9365, 9483], [9364, 9483, 9482], [9365, 9366, 9483], [9366, 9484, 9483], [9366, 9367, 9485], [9366, 9485, 9484], [9367, 9368, 9485], [9368, 9486, 9485], [9368, 9369, 9487], [9368, 9487, 9486], [9369, 9370, 9487], [9370, 9488, 9487], [9371, 9372, 9490], [9371, 9490, 9489], [9372, 9373, 9490], [9373, 9491, 9490], [9373, 9374, 9492], [9373, 9492, 9491], [9374, 9375, 9492], [9375, 9493, 9492], [9375, 9376, 9494], [9375, 9494, 9493], [9376, 9377, 9494], [9377, 9495, 9494], [9377, 9378, 9496], [9377, 9496, 9495], [9378, 9379, 9496], [9379, 9497, 9496], [9379, 9380, 9498], [9379, 9498, 9497], [9380, 9381, 9498], [9381, 9499, 9498], [9381, 9382, 9500], [9381, 9500, 9499], [9382, 9383, 9500], [9383, 9501, 9500], [9383, 9384, 9502], [9383, 9502, 9501], [9384, 9385, 9502], [9385, 9503, 9502], [9385, 9386, 9504], [9385, 9504, 9503], [9386, 9387, 9504], [9387, 9505, 9504], [9387, 9388, 9506], [9387, 9506, 9505], [9388, 9389, 9506], [9389, 9507, 9506], [9390, 9391, 9508], [9391, 9509, 9508], [9391, 9392, 9510], [9391, 9510, 9509], [9392, 9393, 9510], [9393, 9511, 9510], [9393, 9394, 9512], [9393, 9512, 9511], [9394, 9395, 9512], [9395, 9513, 9512], [9395, 9396, 9514], [9395, 9514, 9513], [9396, 9397, 9514], [9397, 9515, 9514], [9397, 9398, 9516], [9397, 9516, 9515], [9398, 9399, 9516], [9399, 9517, 9516], [9399, 9400, 9518], [9399, 9518, 9517], [9400, 9401, 9518], [9401, 9519, 9518], [9401, 9402, 9520], [9401, 9520, 9519], [9402, 9403, 9520], [9403, 9521, 9520], [9403, 9404, 9522], [9403, 9522, 9521], [9404, 9405, 9522], [9405, 9523, 9522], [9405, 9406, 9524], [9405, 9524, 9523], [9406, 9407, 9524], [9407, 9525, 9524], [9407, 9408, 9526], [9407, 9526, 9525], [9408, 9409, 9526], [9409, 9527, 9526], [9409, 9410, 9528], [9409, 9528, 9527], [9410, 9411, 9528], [9411, 9529, 9528], [9411, 9412, 9530], [9411, 9530, 9529], [9412, 9413, 9530], [9413, 9531, 9530], [9413, 9414, 9532], [9413, 9532, 9531], [9414, 9415, 9532], [9415, 9533, 9532], [9415, 9416, 9534], [9415, 9534, 9533], [9416, 9417, 9534], [9417, 9535, 9534], [9417, 9418, 9536], [9417, 9536, 9535], [9418, 9419, 9536], [9419, 9537, 9536], [9419, 9420, 9538], [9419, 9538, 9537], [9420, 9421, 9538], [9421, 9539, 9538], [9421, 9422, 9540], [9421, 9540, 9539], [9422, 9423, 9540], [9423, 9541, 9540], [9423, 9424, 9542], [9423, 9542, 9541], [9424, 9425, 9542], [9425, 9543, 9542], [9425, 9426, 9544], [9425, 9544, 9543], [9426, 9427, 9544], [9427, 9545, 9544], [9427, 9428, 9546], [9427, 9546, 9545], [9428, 9429, 9546], [9429, 9547, 9546], [9429, 9430, 9548], [9429, 9548, 9547], [9430, 9431, 9548], [9431, 9549, 9548], [9431, 9432, 9550], [9431, 9550, 9549], [9432, 9433, 9550], [9433, 9551, 9550], [9433, 9434, 9552], [9433, 9552, 9551], [9434, 9435, 9552], [9435, 9553, 9552], [9435, 9436, 9554], [9435, 9554, 9553], [9436, 9437, 9554], [9437, 9555, 9554], [9437, 9438, 9556], [9437, 9556, 9555], [9438, 9439, 9556], [9439, 9557, 9556], [9439, 9440, 9558], [9439, 9558, 9557], [9440, 9441, 9558], [9441, 9559, 9558], [9441, 9442, 9560], [9441, 9560, 9559], [9442, 9443, 9560], [9443, 9561, 9560], [9443, 9444, 9562], [9443, 9562, 9561], [9444, 9445, 9562], [9445, 9563, 9562], [9445, 9446, 9564], [9445, 9564, 9563], [9446, 9447, 9564], [9447, 9565, 9564], [9447, 9448, 9566], [9447, 9566, 9565], [9448, 9449, 9566], [9449, 9567, 9566], [9449, 9450, 9568], [9449, 9568, 9567], [9450, 9451, 9568], [9451, 9569, 9568], [9451, 9452, 9570], [9451, 9570, 9569], [9452, 9453, 9570], [9453, 9571, 9570], [9453, 9454, 9572], [9453, 9572, 9571], [9454, 9455, 9572], [9455, 9573, 9572], [9455, 9456, 9574], [9455, 9574, 9573], [9456, 9457, 9574], [9457, 9575, 9574], [9457, 9458, 9576], [9457, 9576, 9575], [9458, 9459, 9576], [9459, 9577, 9576], [9459, 9460, 9578], [9459, 9578, 9577], [9460, 9461, 9578], [9461, 9579, 9578], [9461, 9462, 9580], [9461, 9580, 9579], [9462, 9463, 9580], [9463, 9581, 9580], [9463, 9464, 9582], [9463, 9582, 9581], [9464, 9465, 9582], [9465, 9583, 9582], [9465, 9466, 9584], [9465, 9584, 9583], [9466, 9467, 9584], [9467, 9585, 9584], [9467, 9468, 9586], [9467, 9586, 9585], [9468, 9469, 9586], [9469, 9587, 9586], [9469, 9470, 9588], [9469, 9588, 9587], [9470, 9471, 9588], [9471, 9589, 9588], [9471, 9472, 9590], [9471, 9590, 9589], [9472, 9473, 9590], [9473, 9591, 9590], [9473, 9474, 9592], [9473, 9592, 9591], [9474, 9475, 9592], [9475, 9593, 9592], [9475, 9476, 9594], [9475, 9594, 9593], [9476, 9477, 9594], [9477, 9595, 9594], [9477, 9478, 9596], [9477, 9596, 9595], [9478, 9479, 9596], [9479, 9597, 9596], [9479, 9480, 9598], [9479, 9598, 9597], [9480, 9481, 9598], [9481, 9599, 9598], [9481, 9482, 9600], [9481, 9600, 9599], [9482, 9483, 9600], [9483, 9601, 9600], [9483, 9484, 9602], [9483, 9602, 9601], [9484, 9485, 9602], [9485, 9603, 9602], [9485, 9486, 9604], [9485, 9604, 9603], [9486, 9487, 9604], [9487, 9605, 9604], [9487, 9488, 9606], [9487, 9606, 9605], [9489, 9490, 9607], [9490, 9608, 9607], [9490, 9491, 9609], [9490, 9609, 9608], [9491, 9492, 9609], [9492, 9610, 9609], [9492, 9493, 9611], [9492, 9611, 9610], [9493, 9494, 9611], [9494, 9612, 9611], [9494, 9495, 9613], [9494, 9613, 9612], [9495, 9496, 9613], [9496, 9614, 9613], [9496, 9497, 9615], [9496, 9615, 9614], [9497, 9498, 9615], [9498, 9616, 9615], [9498, 9499, 9617], [9498, 9617, 9616], [9499, 9500, 9617], [9500, 9618, 9617], [9500, 9501, 9619], [9500, 9619, 9618], [9501, 9502, 9619], [9502, 9620, 9619], [9502, 9503, 9621], [9502, 9621, 9620], [9503, 9504, 9621], [9504, 9622, 9621], [9504, 9505, 9623], [9504, 9623, 9622], [9505, 9506, 9623], [9506, 9624, 9623], [9506, 9507, 9625], [9506, 9625, 9624], [9508, 9509, 9627], [9508, 9627, 9626], [9509, 9510, 9627], [9510, 9628, 9627], [9510, 9511, 9629], [9510, 9629, 9628], [9511, 9512, 9629], [9512, 9630, 9629], [9512, 9513, 9631], [9512, 9631, 9630], [9513, 9514, 9631], [9514, 9632, 9631], [9514, 9515, 9633], [9514, 9633, 9632], [9515, 9516, 9633], [9516, 9634, 9633], [9516, 9517, 9635], [9516, 9635, 9634], [9517, 9518, 9635], [9518, 9636, 9635], [9518, 9519, 9637], [9518, 9637, 9636], [9519, 9520, 9637], [9520, 9638, 9637], [9520, 9521, 9639], [9520, 9639, 9638], [9521, 9522, 9639], [9522, 9640, 9639], [9522, 9523, 9641], [9522, 9641, 9640], [9523, 9524, 9641], [9524, 9642, 9641], [9524, 9525, 9643], [9524, 9643, 9642], [9525, 9526, 9643], [9526, 9644, 9643], [9526, 9527, 9645], [9526, 9645, 9644], [9527, 9528, 9645], [9528, 9646, 9645], [9528, 9529, 9647], [9528, 9647, 9646], [9529, 9530, 9647], [9530, 9648, 9647], [9530, 9531, 9649], [9530, 9649, 9648], [9531, 9532, 9649], [9532, 9650, 9649], [9532, 9533, 9651], [9532, 9651, 9650], [9533, 9534, 9651], [9534, 9652, 9651], [9534, 9535, 9653], [9534, 9653, 9652], [9535, 9536, 9653], [9536, 9654, 9653], [9536, 9537, 9655], [9536, 9655, 9654], [9537, 9538, 9655], [9538, 9656, 9655], [9538, 9539, 9657], [9538, 9657, 9656], [9539, 9540, 9657], [9540, 9658, 9657], [9540, 9541, 9659], [9540, 9659, 9658], [9541, 9542, 9659], [9542, 9660, 9659], [9542, 9543, 9661], [9542, 9661, 9660], [9543, 9544, 9661], [9544, 9662, 9661], [9544, 9545, 9663], [9544, 9663, 9662], [9545, 9546, 9663], [9546, 9664, 9663], [9546, 9547, 9665], [9546, 9665, 9664], [9547, 9548, 9665], [9548, 9666, 9665], [9548, 9549, 9667], [9548, 9667, 9666], [9549, 9550, 9667], [9550, 9668, 9667], [9550, 9551, 9669], [9550, 9669, 9668], [9551, 9552, 9669], [9552, 9670, 9669], [9552, 9553, 9671], [9552, 9671, 9670], [9553, 9554, 9671], [9554, 9672, 9671], [9554, 9555, 9673], [9554, 9673, 9672], [9555, 9556, 9673], [9556, 9674, 9673], [9556, 9557, 9675], [9556, 9675, 9674], [9557, 9558, 9675], [9558, 9676, 9675], [9558, 9559, 9677], [9558, 9677, 9676], [9559, 9560, 9677], [9560, 9678, 9677], [9560, 9561, 9679], [9560, 9679, 9678], [9561, 9562, 9679], [9562, 9680, 9679], [9562, 9563, 9681], [9562, 9681, 9680], [9563, 9564, 9681], [9564, 9682, 9681], [9564, 9565, 9683], [9564, 9683, 9682], [9565, 9566, 9683], [9566, 9684, 9683], [9566, 9567, 9685], [9566, 9685, 9684], [9567, 9568, 9685], [9568, 9686, 9685], [9568, 9569, 9687], [9568, 9687, 9686], [9569, 9570, 9687], [9570, 9688, 9687], [9570, 9571, 9689], [9570, 9689, 9688], [9571, 9572, 9689], [9572, 9690, 9689], [9572, 9573, 9691], [9572, 9691, 9690], [9573, 9574, 9691], [9574, 9692, 9691], [9574, 9575, 9693], [9574, 9693, 9692], [9575, 9576, 9693], [9576, 9694, 9693], [9576, 9577, 9695], [9576, 9695, 9694], [9577, 9578, 9695], [9578, 9696, 9695], [9578, 9579, 9697], [9578, 9697, 9696], [9579, 9580, 9697], [9580, 9698, 9697], [9580, 9581, 9699], [9580, 9699, 9698], [9581, 9582, 9699], [9582, 9700, 9699], [9582, 9583, 9701], [9582, 9701, 9700], [9583, 9584, 9701], [9584, 9702, 9701], [9584, 9585, 9703], [9584, 9703, 9702], [9585, 9586, 9703], [9586, 9704, 9703], [9586, 9587, 9705], [9586, 9705, 9704], [9587, 9588, 9705], [9588, 9706, 9705], [9588, 9589, 9707], [9588, 9707, 9706], [9589, 9590, 9707], [9590, 9708, 9707], [9590, 9591, 9709], [9590, 9709, 9708], [9591, 9592, 9709], [9592, 9710, 9709], [9592, 9593, 9711], [9592, 9711, 9710], [9593, 9594, 9711], [9594, 9712, 9711], [9594, 9595, 9713], [9594, 9713, 9712], [9595, 9596, 9713], [9596, 9714, 9713], [9596, 9597, 9715], [9596, 9715, 9714], [9597, 9598, 9715], [9598, 9716, 9715], [9598, 9599, 9717], [9598, 9717, 9716], [9599, 9600, 9717], [9600, 9718, 9717], [9600, 9601, 9719], [9600, 9719, 9718], [9601, 9602, 9719], [9602, 9720, 9719], [9602, 9603, 9721], [9602, 9721, 9720], [9603, 9604, 9721], [9604, 9722, 9721], [9604, 9605, 9723], [9604, 9723, 9722], [9605, 9606, 9723], [9606, 9724, 9723], [9607, 9608, 9726], [9607, 9726, 9725], [9608, 9609, 9726], [9609, 9727, 9726], [9609, 9610, 9728], [9609, 9728, 9727], [9610, 9611, 9728], [9611, 9729, 9728], [9611, 9612, 9730], [9611, 9730, 9729], [9612, 9613, 9730], [9613, 9731, 9730], [9613, 9614, 9732], [9613, 9732, 9731], [9614, 9615, 9732], [9615, 9733, 9732], [9615, 9616, 9734], [9615, 9734, 9733], [9616, 9617, 9734], [9617, 9735, 9734], [9617, 9618, 9736], [9617, 9736, 9735], [9618, 9619, 9736], [9619, 9737, 9736], [9619, 9620, 9738], [9619, 9738, 9737], [9620, 9621, 9738], [9621, 9739, 9738], [9621, 9622, 9740], [9621, 9740, 9739], [9622, 9623, 9740], [9623, 9741, 9740], [9623, 9624, 9742], [9623, 9742, 9741], [9624, 9625, 9742], [9625, 9743, 9742], [9626, 9627, 9744], [9627, 9745, 9744], [9627, 9628, 9746], [9627, 9746, 9745], [9628, 9629, 9746], [9629, 9747, 9746], [9629, 9630, 9748], [9629, 9748, 9747], [9630, 9631, 9748], [9631, 9749, 9748], [9631, 9632, 9750], [9631, 9750, 9749], [9632, 9633, 9750], [9633, 9751, 9750], [9633, 9634, 9752], [9633, 9752, 9751], [9634, 9635, 9752], [9635, 9753, 9752], [9635, 9636, 9754], [9635, 9754, 9753], [9636, 9637, 9754], [9637, 9755, 9754], [9637, 9638, 9756], [9637, 9756, 9755], [9638, 9639, 9756], [9639, 9757, 9756], [9639, 9640, 9758], [9639, 9758, 9757], [9640, 9641, 9758], [9641, 9759, 9758], [9641, 9642, 9760], [9641, 9760, 9759], [9642, 9643, 9760], [9643, 9761, 9760], [9643, 9644, 9762], [9643, 9762, 9761], [9644, 9645, 9762], [9645, 9763, 9762], [9645, 9646, 9764], [9645, 9764, 9763], [9646, 9647, 9764], [9647, 9765, 9764], [9647, 9648, 9766], [9647, 9766, 9765], [9648, 9649, 9766], [9649, 9767, 9766], [9649, 9650, 9768], [9649, 9768, 9767], [9650, 9651, 9768], [9651, 9769, 9768], [9651, 9652, 9770], [9651, 9770, 9769], [9652, 9653, 9770], [9653, 9771, 9770], [9653, 9654, 9772], [9653, 9772, 9771], [9654, 9655, 9772], [9655, 9773, 9772], [9655, 9656, 9774], [9655, 9774, 9773], [9656, 9657, 9774], [9657, 9775, 9774], [9657, 9658, 9776], [9657, 9776, 9775], [9658, 9659, 9776], [9659, 9777, 9776], [9659, 9660, 9778], [9659, 9778, 9777], [9660, 9661, 9778], [9661, 9779, 9778], [9661, 9662, 9780], [9661, 9780, 9779], [9662, 9663, 9780], [9663, 9781, 9780], [9663, 9664, 9782], [9663, 9782, 9781], [9664, 9665, 9782], [9665, 9783, 9782], [9665, 9666, 9784], [9665, 9784, 9783], [9666, 9667, 9784], [9667, 9785, 9784], [9667, 9668, 9786], [9667, 9786, 9785], [9668, 9669, 9786], [9669, 9787, 9786], [9669, 9670, 9788], [9669, 9788, 9787], [9670, 9671, 9788], [9671, 9789, 9788], [9671, 9672, 9790], [9671, 9790, 9789], [9672, 9673, 9790], [9673, 9791, 9790], [9673, 9674, 9792], [9673, 9792, 9791], [9674, 9675, 9792], [9675, 9793, 9792], [9675, 9676, 9794], [9675, 9794, 9793], [9676, 9677, 9794], [9677, 9795, 9794], [9677, 9678, 9796], [9677, 9796, 9795], [9678, 9679, 9796], [9679, 9797, 9796], [9679, 9680, 9798], [9679, 9798, 9797], [9680, 9681, 9798], [9681, 9799, 9798], [9681, 9682, 9800], [9681, 9800, 9799], [9682, 9683, 9800], [9683, 9801, 9800], [9683, 9684, 9802], [9683, 9802, 9801], [9684, 9685, 9802], [9685, 9803, 9802], [9685, 9686, 9804], [9685, 9804, 9803], [9686, 9687, 9804], [9687, 9805, 9804], [9687, 9688, 9806], [9687, 9806, 9805], [9688, 9689, 9806], [9689, 9807, 9806], [9689, 9690, 9808], [9689, 9808, 9807], [9690, 9691, 9808], [9691, 9809, 9808], [9691, 9692, 9810], [9691, 9810, 9809], [9692, 9693, 9810], [9693, 9811, 9810], [9693, 9694, 9812], [9693, 9812, 9811], [9694, 9695, 9812], [9695, 9813, 9812], [9695, 9696, 9814], [9695, 9814, 9813], [9696, 9697, 9814], [9697, 9815, 9814], [9697, 9698, 9816], [9697, 9816, 9815], [9698, 9699, 9816], [9699, 9817, 9816], [9699, 9700, 9818], [9699, 9818, 9817], [9700, 9701, 9818], [9701, 9819, 9818], [9701, 9702, 9820], [9701, 9820, 9819], [9702, 9703, 9820], [9703, 9821, 9820], [9703, 9704, 9822], [9703, 9822, 9821], [9704, 9705, 9822], [9705, 9823, 9822], [9705, 9706, 9824], [9705, 9824, 9823], [9706, 9707, 9824], [9707, 9825, 9824], [9707, 9708, 9826], [9707, 9826, 9825], [9708, 9709, 9826], [9709, 9827, 9826], [9709, 9710, 9828], [9709, 9828, 9827], [9710, 9711, 9828], [9711, 9829, 9828], [9711, 9712, 9830], [9711, 9830, 9829], [9712, 9713, 9830], [9713, 9831, 9830], [9713, 9714, 9832], [9713, 9832, 9831], [9714, 9715, 9832], [9715, 9833, 9832], [9715, 9716, 9834], [9715, 9834, 9833], [9716, 9717, 9834], [9717, 9835, 9834], [9717, 9718, 9836], [9717, 9836, 9835], [9718, 9719, 9836], [9719, 9837, 9836], [9719, 9720, 9838], [9719, 9838, 9837], [9720, 9721, 9838], [9721, 9839, 9838], [9721, 9722, 9840], [9721, 9840, 9839], [9722, 9723, 9840], [9723, 9841, 9840], [9723, 9724, 9842], [9723, 9842, 9841], [9725, 9726, 9845], [9726, 9846, 9845], [9726, 9727, 9847], [9726, 9847, 9846], [9727, 9728, 9847], [9728, 9848, 9847], [9728, 9729, 9849], [9728, 9849, 9848], [9729, 9730, 9849], [9730, 9850, 9849], [9730, 9731, 9851], [9730, 9851, 9850], [9731, 9732, 9851], [9732, 9852, 9851], [9732, 9733, 9853], [9732, 9853, 9852], [9733, 9734, 9853], [9734, 9854, 9853], [9734, 9735, 9855], [9734, 9855, 9854], [9735, 9736, 9855], [9736, 9856, 9855], [9736, 9737, 9857], [9736, 9857, 9856], [9737, 9738, 9857], [9738, 9858, 9857], [9738, 9739, 9859], [9738, 9859, 9858], [9739, 9740, 9859], [9740, 9860, 9859], [9740, 9741, 9861], [9740, 9861, 9860], [9741, 9742, 9861], [9742, 9862, 9861], [9742, 9743, 9863], [9742, 9863, 9862], [9744, 9745, 9865], [9744, 9865, 9864], [9745, 9746, 9865], [9746, 9866, 9865], [9746, 9747, 9867], [9746, 9867, 9866], [9747, 9748, 9867], [9748, 9868, 9867], [9748, 9749, 9869], [9748, 9869, 9868], [9749, 9750, 9869], [9750, 9870, 9869], [9750, 9751, 9871], [9750, 9871, 9870], [9751, 9752, 9871], [9752, 9872, 9871], [9752, 9753, 9873], [9752, 9873, 9872], [9753, 9754, 9873], [9754, 9874, 9873], [9754, 9755, 9875], [9754, 9875, 9874], [9755, 9756, 9875], [9756, 9876, 9875], [9756, 9757, 9877], [9756, 9877, 9876], [9757, 9758, 9877], [9758, 9878, 9877], [9758, 9759, 9879], [9758, 9879, 9878], [9759, 9760, 9879], [9760, 9880, 9879], [9760, 9761, 9881], [9760, 9881, 9880], [9761, 9762, 9881], [9762, 9882, 9881], [9762, 9763, 9883], [9762, 9883, 9882], [9763, 9764, 9883], [9764, 9884, 9883], [9764, 9765, 9885], [9764, 9885, 9884], [9765, 9766, 9885], [9766, 9886, 9885], [9766, 9767, 9887], [9766, 9887, 9886], [9767, 9768, 9887], [9768, 9888, 9887], [9768, 9769, 9889], [9768, 9889, 9888], [9769, 9770, 9889], [9770, 9890, 9889], [9770, 9771, 9891], [9770, 9891, 9890], [9771, 9772, 9891], [9772, 9892, 9891], [9772, 9773, 9893], [9772, 9893, 9892], [9773, 9774, 9893], [9774, 9894, 9893], [9774, 9775, 9895], [9774, 9895, 9894], [9775, 9776, 9895], [9776, 9896, 9895], [9776, 9777, 9897], [9776, 9897, 9896], [9777, 9778, 9897], [9778, 9898, 9897], [9778, 9779, 9899], [9778, 9899, 9898], [9779, 9780, 9899], [9780, 9900, 9899], [9780, 9781, 9901], [9780, 9901, 9900], [9781, 9782, 9901], [9782, 9902, 9901], [9782, 9783, 9903], [9782, 9903, 9902], [9783, 9784, 9903], [9784, 9904, 9903], [9784, 9785, 9905], [9784, 9905, 9904], [9785, 9786, 9905], [9786, 9906, 9905], [9786, 9787, 9907], [9786, 9907, 9906], [9787, 9788, 9907], [9788, 9908, 9907], [9788, 9789, 9909], [9788, 9909, 9908], [9789, 9790, 9909], [9790, 9910, 9909], [9790, 9791, 9911], [9790, 9911, 9910], [9791, 9792, 9911], [9792, 9912, 9911], [9792, 9793, 9913], [9792, 9913, 9912], [9793, 9794, 9913], [9794, 9914, 9913], [9794, 9795, 9915], [9794, 9915, 9914], [9795, 9796, 9915], [9796, 9916, 9915], [9796, 9797, 9917], [9796, 9917, 9916], [9797, 9798, 9917], [9798, 9918, 9917], [9798, 9799, 9919], [9798, 9919, 9918], [9799, 9800, 9919], [9800, 9920, 9919], [9800, 9801, 9921], [9800, 9921, 9920], [9801, 9802, 9921], [9802, 9922, 9921], [9802, 9803, 9923], [9802, 9923, 9922], [9803, 9804, 9923], [9804, 9924, 9923], [9804, 9805, 9925], [9804, 9925, 9924], [9805, 9806, 9925], [9806, 9926, 9925], [9806, 9807, 9927], [9806, 9927, 9926], [9807, 9808, 9927], [9808, 9928, 9927], [9808, 9809, 9929], [9808, 9929, 9928], [9809, 9810, 9929], [9810, 9930, 9929], [9810, 9811, 9931], [9810, 9931, 9930], [9811, 9812, 9931], [9812, 9932, 9931], [9812, 9813, 9933], [9812, 9933, 9932], [9813, 9814, 9933], [9814, 9934, 9933], [9814, 9815, 9935], [9814, 9935, 9934], [9815, 9816, 9935], [9816, 9936, 9935], [9816, 9817, 9937], [9816, 9937, 9936], [9817, 9818, 9937], [9818, 9938, 9937], [9818, 9819, 9939], [9818, 9939, 9938], [9819, 9820, 9939], [9820, 9940, 9939], [9820, 9821, 9941], [9820, 9941, 9940], [9821, 9822, 9941], [9822, 9942, 9941], [9822, 9823, 9943], [9822, 9943, 9942], [9823, 9824, 9943], [9824, 9944, 9943], [9824, 9825, 9945], [9824, 9945, 9944], [9825, 9826, 9945], [9826, 9946, 9945], [9826, 9827, 9947], [9826, 9947, 9946], [9827, 9828, 9947], [9828, 9948, 9947], [9828, 9829, 9949], [9828, 9949, 9948], [9829, 9830, 9949], [9830, 9950, 9949], [9830, 9831, 9951], [9830, 9951, 9950], [9831, 9832, 9951], [9832, 9952, 9951], [9832, 9833, 9953], [9832, 9953, 9952], [9833, 9834, 9953], [9834, 9954, 9953], [9834, 9835, 9955], [9834, 9955, 9954], [9835, 9836, 9955], [9836, 9956, 9955], [9836, 9837, 9957], [9836, 9957, 9956], [9837, 9838, 9957], [9838, 9958, 9957], [9838, 9839, 9959], [9838, 9959, 9958], [9839, 9840, 9959], [9840, 9960, 9959], [9840, 9841, 9961], [9840, 9961, 9960], [9841, 9842, 9961], [9842, 9962, 9961], [9843, 9964, 9963], [9843, 9844, 9965], [9843, 9965, 9964], [9844, 9845, 9965], [9845, 9966, 9965], [9845, 9846, 9967], [9845, 9967, 9966], [9846, 9847, 9967], [9847, 9968, 9967], [9847, 9848, 9969], [9847, 9969, 9968], [9848, 9849, 9969], [9849, 9970, 9969], [9849, 9850, 9971], [9849, 9971, 9970], [9850, 9851, 9971], [9851, 9972, 9971], [9851, 9852, 9973], [9851, 9973, 9972], [9852, 9853, 9973], [9853, 9974, 9973], [9853, 9854, 9975], [9853, 9975, 9974], [9854, 9855, 9975], [9855, 9976, 9975], [9855, 9856, 9977], [9855, 9977, 9976], [9856, 9857, 9977], [9857, 9978, 9977], [9857, 9858, 9979], [9857, 9979, 9978], [9858, 9859, 9979], [9859, 9980, 9979], [9859, 9860, 9981], [9859, 9981, 9980], [9860, 9861, 9981], [9861, 9982, 9981], [9861, 9862, 9983], [9861, 9983, 9982], [9862, 9863, 9983], [9863, 9984, 9983], [9864, 9865, 9985], [9865, 9986, 9985], [9865, 9866, 9987], [9865, 9987, 9986], [9866, 9867, 9987], [9867, 9988, 9987], [9867, 9868, 9989], [9867, 9989, 9988], [9868, 9869, 9989], [9869, 9990, 9989], [9869, 9870, 9991], [9869, 9991, 9990], [9870, 9871, 9991], [9871, 9992, 9991], [9871, 9872, 9993], [9871, 9993, 9992], [9872, 9873, 9993], [9873, 9994, 9993], [9873, 9874, 9995], [9873, 9995, 9994], [9874, 9875, 9995], [9875, 9996, 9995], [9875, 9876, 9997], [9875, 9997, 9996], [9876, 9877, 9997], [9877, 9998, 9997], [9877, 9878, 9999], [9877, 9999, 9998], [9878, 9879, 9999], [9879, 10000, 9999], [9879, 9880, 10001], [9879, 10001, 10000], [9880, 9881, 10001], [9881, 10002, 10001], [9881, 9882, 10003], [9881, 10003, 10002], [9882, 9883, 10003], [9883, 10004, 10003], [9883, 9884, 10005], [9883, 10005, 10004], [9884, 9885, 10005], [9885, 10006, 10005], [9885, 9886, 10007], [9885, 10007, 10006], [9886, 9887, 10007], [9887, 10008, 10007], [9887, 9888, 10009], [9887, 10009, 10008], [9888, 9889, 10009], [9889, 10010, 10009], [9889, 9890, 10011], [9889, 10011, 10010], [9890, 9891, 10011], [9891, 10012, 10011], [9891, 9892, 10013], [9891, 10013, 10012], [9892, 9893, 10013], [9893, 10014, 10013], [9893, 9894, 10015], [9893, 10015, 10014], [9894, 9895, 10015], [9895, 10016, 10015], [9895, 9896, 10017], [9895, 10017, 10016], [9896, 9897, 10017], [9897, 10018, 10017], [9897, 9898, 10019], [9897, 10019, 10018], [9898, 9899, 10019], [9899, 10020, 10019], [9899, 9900, 10021], [9899, 10021, 10020], [9900, 9901, 10021], [9901, 10022, 10021], [9901, 9902, 10023], [9901, 10023, 10022], [9902, 9903, 10023], [9903, 10024, 10023], [9903, 9904, 10025], [9903, 10025, 10024], [9904, 9905, 10025], [9905, 10026, 10025], [9905, 9906, 10027], [9905, 10027, 10026], [9906, 9907, 10027], [9907, 10028, 10027], [9907, 9908, 10029], [9907, 10029, 10028], [9908, 9909, 10029], [9909, 10030, 10029], [9909, 9910, 10031], [9909, 10031, 10030], [9910, 9911, 10031], [9911, 10032, 10031], [9911, 9912, 10033], [9911, 10033, 10032], [9912, 9913, 10033], [9913, 10034, 10033], [9913, 9914, 10035], [9913, 10035, 10034], [9914, 9915, 10035], [9915, 10036, 10035], [9915, 9916, 10037], [9915, 10037, 10036], [9916, 9917, 10037], [9917, 10038, 10037], [9917, 9918, 10039], [9917, 10039, 10038], [9918, 9919, 10039], [9919, 10040, 10039], [9919, 9920, 10041], [9919, 10041, 10040], [9920, 9921, 10041], [9921, 10042, 10041], [9921, 9922, 10043], [9921, 10043, 10042], [9922, 9923, 10043], [9923, 10044, 10043], [9923, 9924, 10045], [9923, 10045, 10044], [9924, 9925, 10045], [9925, 10046, 10045], [9925, 9926, 10047], [9925, 10047, 10046], [9926, 9927, 10047], [9927, 10048, 10047], [9927, 9928, 10049], [9927, 10049, 10048], [9928, 9929, 10049], [9929, 10050, 10049], [9929, 9930, 10051], [9929, 10051, 10050], [9930, 9931, 10051], [9931, 10052, 10051], [9931, 9932, 10053], [9931, 10053, 10052], [9932, 9933, 10053], [9933, 10054, 10053], [9933, 9934, 10055], [9933, 10055, 10054], [9934, 9935, 10055], [9935, 10056, 10055], [9935, 9936, 10057], [9935, 10057, 10056], [9936, 9937, 10057], [9937, 10058, 10057], [9937, 9938, 10059], [9937, 10059, 10058], [9938, 9939, 10059], [9939, 10060, 10059], [9939, 9940, 10061], [9939, 10061, 10060], [9940, 9941, 10061], [9941, 10062, 10061], [9941, 9942, 10063], [9941, 10063, 10062], [9942, 9943, 10063], [9943, 10064, 10063], [9943, 9944, 10065], [9943, 10065, 10064], [9944, 9945, 10065], [9945, 10066, 10065], [9945, 9946, 10067], [9945, 10067, 10066], [9946, 9947, 10067], [9947, 10068, 10067], [9947, 9948, 10069], [9947, 10069, 10068], [9948, 9949, 10069], [9949, 10070, 10069], [9949, 9950, 10071], [9949, 10071, 10070], [9950, 9951, 10071], [9951, 10072, 10071], [9951, 9952, 10073], [9951, 10073, 10072], [9952, 9953, 10073], [9953, 10074, 10073], [9953, 9954, 10075], [9953, 10075, 10074], [9954, 9955, 10075], [9955, 10076, 10075], [9955, 9956, 10077], [9955, 10077, 10076], [9956, 9957, 10077], [9957, 10078, 10077], [9957, 9958, 10079], [9957, 10079, 10078], [9958, 9959, 10079], [9959, 10080, 10079], [9959, 9960, 10081], [9959, 10081, 10080], [9960, 9961, 10081], [9961, 10082, 10081], [9961, 9962, 10083], [9961, 10083, 10082], [9963, 10089, 10088], [9963, 9964, 10090], [9963, 10090, 10089], [9964, 9965, 10090], [9965, 10091, 10090], [9965, 9966, 10092], [9965, 10092, 10091], [9966, 9967, 10092], [9967, 10093, 10092], [9967, 9968, 10094], [9967, 10094, 10093], [9968, 9969, 10094], [9969, 10095, 10094], [9969, 9970, 10096], [9969, 10096, 10095], [9970, 9971, 10096], [9971, 10097, 10096], [9971, 9972, 10098], [9971, 10098, 10097], [9972, 9973, 10098], [9973, 10099, 10098], [9973, 9974, 10100], [9973, 10100, 10099], [9974, 9975, 10100], [9975, 10101, 10100], [9975, 9976, 10102], [9975, 10102, 10101], [9976, 9977, 10102], [9977, 10103, 10102], [9977, 9978, 10104], [9977, 10104, 10103], [9978, 9979, 10104], [9979, 10105, 10104], [9979, 9980, 10106], [9979, 10106, 10105], [9980, 9981, 10106], [9981, 10107, 10106], [9981, 9982, 10108], [9981, 10108, 10107], [9982, 9983, 10108], [9983, 10109, 10108], [9983, 9984, 10110], [9983, 10110, 10109], [9985, 9986, 10112], [9985, 10112, 10111], [9986, 9987, 10112], [9987, 10113, 10112], [9987, 9988, 10114], [9987, 10114, 10113], [9988, 9989, 10114], [9989, 10115, 10114], [9989, 9990, 10116], [9989, 10116, 10115], [9990, 9991, 10116], [9991, 10117, 10116], [9991, 9992, 10118], [9991, 10118, 10117], [9992, 9993, 10118], [9993, 10119, 10118], [9993, 9994, 10120], [9993, 10120, 10119], [9994, 9995, 10120], [9995, 10121, 10120], [9995, 9996, 10122], [9995, 10122, 10121], [9996, 9997, 10122], [9997, 10123, 10122], [9997, 9998, 10124], [9997, 10124, 10123], [9998, 9999, 10124], [9999, 10125, 10124], [9999, 10000, 10126], [9999, 10126, 10125], [10000, 10001, 10126], [10001, 10127, 10126], [10001, 10002, 10128], [10001, 10128, 10127], [10002, 10003, 10128], [10003, 10129, 10128], [10003, 10004, 10130], [10003, 10130, 10129], [10004, 10005, 10130], [10005, 10131, 10130], [10005, 10006, 10132], [10005, 10132, 10131], [10006, 10007, 10132], [10007, 10133, 10132], [10007, 10008, 10134], [10007, 10134, 10133], [10008, 10009, 10134], [10009, 10135, 10134], [10009, 10010, 10136], [10009, 10136, 10135], [10010, 10011, 10136], [10011, 10137, 10136], [10011, 10012, 10138], [10011, 10138, 10137], [10012, 10013, 10138], [10013, 10139, 10138], [10013, 10014, 10140], [10013, 10140, 10139], [10014, 10015, 10140], [10015, 10141, 10140], [10015, 10016, 10142], [10015, 10142, 10141], [10016, 10017, 10142], [10017, 10143, 10142], [10017, 10018, 10144], [10017, 10144, 10143], [10018, 10019, 10144], [10019, 10145, 10144], [10019, 10020, 10146], [10019, 10146, 10145], [10020, 10021, 10146], [10021, 10147, 10146], [10021, 10022, 10148], [10021, 10148, 10147], [10022, 10023, 10148], [10023, 10149, 10148], [10023, 10024, 10150], [10023, 10150, 10149], [10024, 10025, 10150], [10025, 10151, 10150], [10025, 10026, 10152], [10025, 10152, 10151], [10026, 10027, 10152], [10027, 10153, 10152], [10027, 10028, 10154], [10027, 10154, 10153], [10028, 10029, 10154], [10029, 10155, 10154], [10029, 10030, 10156], [10029, 10156, 10155], [10030, 10031, 10156], [10031, 10157, 10156], [10031, 10032, 10158], [10031, 10158, 10157], [10032, 10033, 10158], [10033, 10159, 10158], [10033, 10034, 10160], [10033, 10160, 10159], [10034, 10035, 10160], [10035, 10161, 10160], [10035, 10036, 10162], [10035, 10162, 10161], [10036, 10037, 10162], [10037, 10163, 10162], [10037, 10038, 10164], [10037, 10164, 10163], [10038, 10039, 10164], [10039, 10165, 10164], [10039, 10040, 10166], [10039, 10166, 10165], [10040, 10041, 10166], [10041, 10167, 10166], [10041, 10042, 10168], [10041, 10168, 10167], [10042, 10043, 10168], [10043, 10169, 10168], [10043, 10044, 10170], [10043, 10170, 10169], [10044, 10045, 10170], [10045, 10171, 10170], [10045, 10046, 10172], [10045, 10172, 10171], [10046, 10047, 10172], [10047, 10173, 10172], [10047, 10048, 10174], [10047, 10174, 10173], [10048, 10049, 10174], [10049, 10175, 10174], [10049, 10050, 10176], [10049, 10176, 10175], [10050, 10051, 10176], [10051, 10177, 10176], [10051, 10052, 10178], [10051, 10178, 10177], [10052, 10053, 10178], [10053, 10179, 10178], [10053, 10054, 10180], [10053, 10180, 10179], [10054, 10055, 10180], [10055, 10181, 10180], [10055, 10056, 10182], [10055, 10182, 10181], [10056, 10057, 10182], [10057, 10183, 10182], [10057, 10058, 10184], [10057, 10184, 10183], [10058, 10059, 10184], [10059, 10185, 10184], [10059, 10060, 10186], [10059, 10186, 10185], [10060, 10061, 10186], [10061, 10187, 10186], [10061, 10062, 10188], [10061, 10188, 10187], [10062, 10063, 10188], [10063, 10189, 10188], [10063, 10064, 10190], [10063, 10190, 10189], [10064, 10065, 10190], [10065, 10191, 10190], [10065, 10066, 10192], [10065, 10192, 10191], [10066, 10067, 10192], [10067, 10193, 10192], [10067, 10068, 10194], [10067, 10194, 10193], [10068, 10069, 10194], [10069, 10195, 10194], [10069, 10070, 10196], [10069, 10196, 10195], [10070, 10071, 10196], [10071, 10197, 10196], [10071, 10072, 10198], [10071, 10198, 10197], [10072, 10073, 10198], [10073, 10199, 10198], [10073, 10074, 10200], [10073, 10200, 10199], [10074, 10075, 10200], [10075, 10201, 10200], [10075, 10076, 10202], [10075, 10202, 10201], [10076, 10077, 10202], [10077, 10203, 10202], [10077, 10078, 10204], [10077, 10204, 10203], [10078, 10079, 10204], [10079, 10205, 10204], [10079, 10080, 10206], [10079, 10206, 10205], [10080, 10081, 10206], [10081, 10207, 10206], [10081, 10082, 10208], [10081, 10208, 10207], [10082, 10083, 10208], [10083, 10209, 10208], [10083, 10084, 10210], [10083, 10210, 10209], [10084, 10085, 10210], [10085, 10211, 10210], [10086, 10087, 10213], [10086, 10213, 10212], [10087, 10088, 10213], [10088, 10214, 10213], [10088, 10089, 10215], [10088, 10215, 10214], [10089, 10090, 10215], [10090, 10216, 10215], [10090, 10091, 10217], [10090, 10217, 10216], [10091, 10092, 10217], [10092, 10218, 10217], [10092, 10093, 10219], [10092, 10219, 10218], [10093, 10094, 10219], [10094, 10220, 10219], [10094, 10095, 10221], [10094, 10221, 10220], [10095, 10096, 10221], [10096, 10222, 10221], [10096, 10097, 10223], [10096, 10223, 10222], [10097, 10098, 10223], [10098, 10224, 10223], [10098, 10099, 10225], [10098, 10225, 10224], [10099, 10100, 10225], [10100, 10226, 10225], [10100, 10101, 10227], [10100, 10227, 10226], [10101, 10102, 10227], [10102, 10228, 10227], [10102, 10103, 10229], [10102, 10229, 10228], [10103, 10104, 10229], [10104, 10230, 10229], [10104, 10105, 10231], [10104, 10231, 10230], [10105, 10106, 10231], [10106, 10232, 10231], [10106, 10107, 10233], [10106, 10233, 10232], [10107, 10108, 10233], [10108, 10234, 10233], [10108, 10109, 10235], [10108, 10235, 10234], [10109, 10110, 10235], [10110, 10236, 10235], [10111, 10112, 10237], [10112, 10238, 10237], [10112, 10113, 10239], [10112, 10239, 10238], [10113, 10114, 10239], [10114, 10240, 10239], [10114, 10115, 10241], [10114, 10241, 10240], [10115, 10116, 10241], [10116, 10242, 10241], [10116, 10117, 10243], [10116, 10243, 10242], [10117, 10118, 10243], [10118, 10244, 10243], [10118, 10119, 10245], [10118, 10245, 10244], [10119, 10120, 10245], [10120, 10246, 10245], [10120, 10121, 10247], [10120, 10247, 10246], [10121, 10122, 10247], [10122, 10248, 10247], [10122, 10123, 10249], [10122, 10249, 10248], [10123, 10124, 10249], [10124, 10250, 10249], [10124, 10125, 10251], [10124, 10251, 10250], [10125, 10126, 10251], [10126, 10252, 10251], [10126, 10127, 10253], [10126, 10253, 10252], [10127, 10128, 10253], [10128, 10254, 10253], [10128, 10129, 10255], [10128, 10255, 10254], [10129, 10130, 10255], [10130, 10256, 10255], [10130, 10131, 10257], [10130, 10257, 10256], [10131, 10132, 10257], [10132, 10258, 10257], [10132, 10133, 10259], [10132, 10259, 10258], [10133, 10134, 10259], [10134, 10260, 10259], [10134, 10135, 10261], [10134, 10261, 10260], [10135, 10136, 10261], [10136, 10262, 10261], [10136, 10137, 10263], [10136, 10263, 10262], [10137, 10138, 10263], [10138, 10264, 10263], [10138, 10139, 10265], [10138, 10265, 10264], [10139, 10140, 10265], [10140, 10266, 10265], [10140, 10141, 10267], [10140, 10267, 10266], [10141, 10142, 10267], [10142, 10268, 10267], [10142, 10143, 10269], [10142, 10269, 10268], [10143, 10144, 10269], [10144, 10270, 10269], [10144, 10145, 10271], [10144, 10271, 10270], [10145, 10146, 10271], [10146, 10272, 10271], [10146, 10147, 10273], [10146, 10273, 10272], [10147, 10148, 10273], [10148, 10274, 10273], [10148, 10149, 10275], [10148, 10275, 10274], [10149, 10150, 10275], [10150, 10276, 10275], [10150, 10151, 10277], [10150, 10277, 10276], [10151, 10152, 10277], [10152, 10278, 10277], [10152, 10153, 10279], [10152, 10279, 10278], [10153, 10154, 10279], [10154, 10280, 10279], [10154, 10155, 10281], [10154, 10281, 10280], [10155, 10156, 10281], [10156, 10282, 10281], [10156, 10157, 10283], [10156, 10283, 10282], [10157, 10158, 10283], [10158, 10284, 10283], [10158, 10159, 10285], [10158, 10285, 10284], [10159, 10160, 10285], [10160, 10286, 10285], [10160, 10161, 10287], [10160, 10287, 10286], [10161, 10162, 10287], [10162, 10288, 10287], [10162, 10163, 10289], [10162, 10289, 10288], [10163, 10164, 10289], [10164, 10290, 10289], [10164, 10165, 10291], [10164, 10291, 10290], [10165, 10166, 10291], [10166, 10292, 10291], [10166, 10167, 10293], [10166, 10293, 10292], [10167, 10168, 10293], [10168, 10294, 10293], [10168, 10169, 10295], [10168, 10295, 10294], [10169, 10170, 10295], [10170, 10296, 10295], [10170, 10171, 10297], [10170, 10297, 10296], [10171, 10172, 10297], [10172, 10298, 10297], [10172, 10173, 10299], [10172, 10299, 10298], [10173, 10174, 10299], [10174, 10300, 10299], [10174, 10175, 10301], [10174, 10301, 10300], [10175, 10176, 10301], [10176, 10302, 10301], [10176, 10177, 10303], [10176, 10303, 10302], [10177, 10178, 10303], [10178, 10304, 10303], [10178, 10179, 10305], [10178, 10305, 10304], [10179, 10180, 10305], [10180, 10306, 10305], [10180, 10181, 10307], [10180, 10307, 10306], [10181, 10182, 10307], [10182, 10308, 10307], [10182, 10183, 10309], [10182, 10309, 10308], [10183, 10184, 10309], [10184, 10310, 10309], [10184, 10185, 10311], [10184, 10311, 10310], [10185, 10186, 10311], [10186, 10312, 10311], [10186, 10187, 10313], [10186, 10313, 10312], [10187, 10188, 10313], [10188, 10314, 10313], [10188, 10189, 10315], [10188, 10315, 10314], [10189, 10190, 10315], [10190, 10316, 10315], [10190, 10191, 10317], [10190, 10317, 10316], [10191, 10192, 10317], [10192, 10318, 10317], [10192, 10193, 10319], [10192, 10319, 10318], [10193, 10194, 10319], [10194, 10320, 10319], [10194, 10195, 10321], [10194, 10321, 10320], [10195, 10196, 10321], [10196, 10322, 10321], [10196, 10197, 10323], [10196, 10323, 10322], [10197, 10198, 10323], [10198, 10324, 10323], [10198, 10199, 10325], [10198, 10325, 10324], [10199, 10200, 10325], [10200, 10326, 10325], [10200, 10201, 10327], [10200, 10327, 10326], [10201, 10202, 10327], [10202, 10328, 10327], [10202, 10203, 10329], [10202, 10329, 10328], [10203, 10204, 10329], [10204, 10330, 10329], [10204, 10205, 10331], [10204, 10331, 10330], [10205, 10206, 10331], [10206, 10332, 10331], [10206, 10207, 10333], [10206, 10333, 10332], [10207, 10208, 10333], [10208, 10334, 10333], [10208, 10209, 10335], [10208, 10335, 10334], [10209, 10210, 10335], [10210, 10336, 10335], [10210, 10211, 10337], [10210, 10337, 10336], [10212, 10213, 10341], [10213, 10342, 10341], [10213, 10214, 10343], [10213, 10343, 10342], [10214, 10215, 10343], [10215, 10344, 10343], [10215, 10216, 10345], [10215, 10345, 10344], [10216, 10217, 10345], [10217, 10346, 10345], [10217, 10218, 10347], [10217, 10347, 10346], [10218, 10219, 10347], [10219, 10348, 10347], [10219, 10220, 10349], [10219, 10349, 10348], [10220, 10221, 10349], [10221, 10350, 10349], [10221, 10222, 10351], [10221, 10351, 10350], [10222, 10223, 10351], [10223, 10352, 10351], [10223, 10224, 10353], [10223, 10353, 10352], [10224, 10225, 10353], [10225, 10354, 10353], [10225, 10226, 10355], [10225, 10355, 10354], [10226, 10227, 10355], [10227, 10356, 10355], [10227, 10228, 10357], [10227, 10357, 10356], [10228, 10229, 10357], [10229, 10358, 10357], [10229, 10230, 10359], [10229, 10359, 10358], [10230, 10231, 10359], [10231, 10360, 10359], [10231, 10232, 10361], [10231, 10361, 10360], [10232, 10233, 10361], [10233, 10362, 10361], [10233, 10234, 10363], [10233, 10363, 10362], [10234, 10235, 10363], [10235, 10364, 10363], [10235, 10236, 10365], [10235, 10365, 10364], [10237, 10238, 10367], [10237, 10367, 10366], [10238, 10239, 10367], [10239, 10368, 10367], [10239, 10240, 10369], [10239, 10369, 10368], [10240, 10241, 10369], [10241, 10370, 10369], [10241, 10242, 10371], [10241, 10371, 10370], [10242, 10243, 10371], [10243, 10372, 10371], [10243, 10244, 10373], [10243, 10373, 10372], [10244, 10245, 10373], [10245, 10374, 10373], [10245, 10246, 10375], [10245, 10375, 10374], [10246, 10247, 10375], [10247, 10376, 10375], [10247, 10248, 10377], [10247, 10377, 10376], [10248, 10249, 10377], [10249, 10378, 10377], [10249, 10250, 10379], [10249, 10379, 10378], [10250, 10251, 10379], [10251, 10380, 10379], [10251, 10252, 10381], [10251, 10381, 10380], [10252, 10253, 10381], [10253, 10382, 10381], [10253, 10254, 10383], [10253, 10383, 10382], [10254, 10255, 10383], [10255, 10384, 10383], [10255, 10256, 10385], [10255, 10385, 10384], [10256, 10257, 10385], [10257, 10386, 10385], [10257, 10258, 10387], [10257, 10387, 10386], [10258, 10259, 10387], [10259, 10388, 10387], [10259, 10260, 10389], [10259, 10389, 10388], [10260, 10261, 10389], [10261, 10390, 10389], [10261, 10262, 10391], [10261, 10391, 10390], [10262, 10263, 10391], [10263, 10392, 10391], [10263, 10264, 10393], [10263, 10393, 10392], [10264, 10265, 10393], [10265, 10394, 10393], [10265, 10266, 10395], [10265, 10395, 10394], [10266, 10267, 10395], [10267, 10396, 10395], [10267, 10268, 10397], [10267, 10397, 10396], [10268, 10269, 10397], [10269, 10398, 10397], [10269, 10270, 10399], [10269, 10399, 10398], [10270, 10271, 10399], [10271, 10400, 10399], [10271, 10272, 10401], [10271, 10401, 10400], [10272, 10273, 10401], [10273, 10402, 10401], [10273, 10274, 10403], [10273, 10403, 10402], [10274, 10275, 10403], [10275, 10404, 10403], [10275, 10276, 10405], [10275, 10405, 10404], [10276, 10277, 10405], [10277, 10406, 10405], [10277, 10278, 10407], [10277, 10407, 10406], [10278, 10279, 10407], [10279, 10408, 10407], [10279, 10280, 10409], [10279, 10409, 10408], [10280, 10281, 10409], [10281, 10410, 10409], [10281, 10282, 10411], [10281, 10411, 10410], [10282, 10283, 10411], [10283, 10412, 10411], [10283, 10284, 10413], [10283, 10413, 10412], [10284, 10285, 10413], [10285, 10414, 10413], [10285, 10286, 10415], [10285, 10415, 10414], [10286, 10287, 10415], [10287, 10416, 10415], [10287, 10288, 10417], [10287, 10417, 10416], [10288, 10289, 10417], [10289, 10418, 10417], [10289, 10290, 10419], [10289, 10419, 10418], [10290, 10291, 10419], [10291, 10420, 10419], [10291, 10292, 10421], [10291, 10421, 10420], [10292, 10293, 10421], [10293, 10422, 10421], [10293, 10294, 10423], [10293, 10423, 10422], [10294, 10295, 10423], [10295, 10424, 10423], [10295, 10296, 10425], [10295, 10425, 10424], [10296, 10297, 10425], [10297, 10426, 10425], [10297, 10298, 10427], [10297, 10427, 10426], [10298, 10299, 10427], [10299, 10428, 10427], [10299, 10300, 10429], [10299, 10429, 10428], [10300, 10301, 10429], [10301, 10430, 10429], [10301, 10302, 10431], [10301, 10431, 10430], [10302, 10303, 10431], [10303, 10432, 10431], [10303, 10304, 10433], [10303, 10433, 10432], [10304, 10305, 10433], [10305, 10434, 10433], [10305, 10306, 10435], [10305, 10435, 10434], [10306, 10307, 10435], [10307, 10436, 10435], [10307, 10308, 10437], [10307, 10437, 10436], [10308, 10309, 10437], [10309, 10438, 10437], [10309, 10310, 10439], [10309, 10439, 10438], [10310, 10311, 10439], [10311, 10440, 10439], [10311, 10312, 10441], [10311, 10441, 10440], [10312, 10313, 10441], [10313, 10442, 10441], [10313, 10314, 10443], [10313, 10443, 10442], [10314, 10315, 10443], [10315, 10444, 10443], [10315, 10316, 10445], [10315, 10445, 10444], [10316, 10317, 10445], [10317, 10446, 10445], [10317, 10318, 10447], [10317, 10447, 10446], [10318, 10319, 10447], [10319, 10448, 10447], [10319, 10320, 10449], [10319, 10449, 10448], [10320, 10321, 10449], [10321, 10450, 10449], [10321, 10322, 10451], [10321, 10451, 10450], [10322, 10323, 10451], [10323, 10452, 10451], [10323, 10324, 10453], [10323, 10453, 10452], [10324, 10325, 10453], [10325, 10454, 10453], [10325, 10326, 10455], [10325, 10455, 10454], [10326, 10327, 10455], [10327, 10456, 10455], [10327, 10328, 10457], [10327, 10457, 10456], [10328, 10329, 10457], [10329, 10458, 10457], [10329, 10330, 10459], [10329, 10459, 10458], [10330, 10331, 10459], [10331, 10460, 10459], [10331, 10332, 10461], [10331, 10461, 10460], [10332, 10333, 10461], [10333, 10462, 10461], [10333, 10334, 10463], [10333, 10463, 10462], [10334, 10335, 10463], [10335, 10464, 10463], [10335, 10336, 10465], [10335, 10465, 10464], [10336, 10337, 10465], [10337, 10466, 10465], [10337, 10338, 10467], [10337, 10467, 10466], [10338, 10339, 10467], [10339, 10468, 10467], [10339, 10340, 10469], [10339, 10469, 10468], [10340, 10341, 10469], [10341, 10470, 10469], [10341, 10342, 10471], [10341, 10471, 10470], [10342, 10343, 10471], [10343, 10472, 10471], [10343, 10344, 10473], [10343, 10473, 10472], [10344, 10345, 10473], [10345, 10474, 10473], [10345, 10346, 10475], [10345, 10475, 10474], [10346, 10347, 10475], [10347, 10476, 10475], [10347, 10348, 10477], [10347, 10477, 10476], [10348, 10349, 10477], [10349, 10478, 10477], [10349, 10350, 10479], [10349, 10479, 10478], [10350, 10351, 10479], [10351, 10480, 10479], [10351, 10352, 10481], [10351, 10481, 10480], [10352, 10353, 10481], [10353, 10482, 10481], [10353, 10354, 10483], [10353, 10483, 10482], [10354, 10355, 10483], [10355, 10484, 10483], [10355, 10356, 10485], [10355, 10485, 10484], [10356, 10357, 10485], [10357, 10486, 10485], [10357, 10358, 10487], [10357, 10487, 10486], [10358, 10359, 10487], [10359, 10488, 10487], [10359, 10360, 10489], [10359, 10489, 10488], [10360, 10361, 10489], [10361, 10490, 10489], [10361, 10362, 10491], [10361, 10491, 10490], [10362, 10363, 10491], [10363, 10492, 10491], [10363, 10364, 10493], [10363, 10493, 10492], [10364, 10365, 10493], [10365, 10494, 10493], [10366, 10367, 10495], [10367, 10496, 10495], [10367, 10368, 10497], [10367, 10497, 10496], [10368, 10369, 10497], [10369, 10498, 10497], [10369, 10370, 10499], [10369, 10499, 10498], [10370, 10371, 10499], [10371, 10500, 10499], [10371, 10372, 10501], [10371, 10501, 10500], [10372, 10373, 10501], [10373, 10502, 10501], [10373, 10374, 10503], [10373, 10503, 10502], [10374, 10375, 10503], [10375, 10504, 10503], [10375, 10376, 10505], [10375, 10505, 10504], [10376, 10377, 10505], [10377, 10506, 10505], [10377, 10378, 10507], [10377, 10507, 10506], [10378, 10379, 10507], [10379, 10508, 10507], [10379, 10380, 10509], [10379, 10509, 10508], [10380, 10381, 10509], [10381, 10510, 10509], [10381, 10382, 10511], [10381, 10511, 10510], [10382, 10383, 10511], [10383, 10512, 10511], [10383, 10384, 10513], [10383, 10513, 10512], [10384, 10385, 10513], [10385, 10514, 10513], [10385, 10386, 10515], [10385, 10515, 10514], [10386, 10387, 10515], [10387, 10516, 10515], [10387, 10388, 10517], [10387, 10517, 10516], [10388, 10389, 10517], [10389, 10518, 10517], [10389, 10390, 10519], [10389, 10519, 10518], [10390, 10391, 10519], [10391, 10520, 10519], [10391, 10392, 10521], [10391, 10521, 10520], [10392, 10393, 10521], [10393, 10522, 10521], [10393, 10394, 10523], [10393, 10523, 10522], [10394, 10395, 10523], [10395, 10524, 10523], [10395, 10396, 10525], [10395, 10525, 10524], [10396, 10397, 10525], [10397, 10526, 10525], [10397, 10398, 10527], [10397, 10527, 10526], [10398, 10399, 10527], [10399, 10528, 10527], [10399, 10400, 10529], [10399, 10529, 10528], [10400, 10401, 10529], [10401, 10530, 10529], [10401, 10402, 10531], [10401, 10531, 10530], [10402, 10403, 10531], [10403, 10532, 10531], [10403, 10404, 10533], [10403, 10533, 10532], [10404, 10405, 10533], [10405, 10534, 10533], [10405, 10406, 10535], [10405, 10535, 10534], [10406, 10407, 10535], [10407, 10536, 10535], [10407, 10408, 10537], [10407, 10537, 10536], [10408, 10409, 10537], [10409, 10538, 10537], [10409, 10410, 10539], [10409, 10539, 10538], [10410, 10411, 10539], [10411, 10540, 10539], [10411, 10412, 10541], [10411, 10541, 10540], [10412, 10413, 10541], [10413, 10542, 10541], [10413, 10414, 10543], [10413, 10543, 10542], [10414, 10415, 10543], [10415, 10544, 10543], [10415, 10416, 10545], [10415, 10545, 10544], [10416, 10417, 10545], [10417, 10546, 10545], [10417, 10418, 10547], [10417, 10547, 10546], [10418, 10419, 10547], [10419, 10548, 10547], [10419, 10420, 10549], [10419, 10549, 10548], [10420, 10421, 10549], [10421, 10550, 10549], [10421, 10422, 10551], [10421, 10551, 10550], [10422, 10423, 10551], [10423, 10552, 10551], [10423, 10424, 10553], [10423, 10553, 10552], [10424, 10425, 10553], [10425, 10554, 10553], [10425, 10426, 10555], [10425, 10555, 10554], [10426, 10427, 10555], [10427, 10556, 10555], [10427, 10428, 10557], [10427, 10557, 10556], [10428, 10429, 10557], [10429, 10558, 10557], [10429, 10430, 10559], [10429, 10559, 10558], [10430, 10431, 10559], [10431, 10560, 10559], [10431, 10432, 10561], [10431, 10561, 10560], [10432, 10433, 10561], [10433, 10562, 10561], [10433, 10434, 10563], [10433, 10563, 10562], [10434, 10435, 10563], [10435, 10564, 10563], [10435, 10436, 10565], [10435, 10565, 10564], [10436, 10437, 10565], [10437, 10566, 10565], [10437, 10438, 10567], [10437, 10567, 10566], [10438, 10439, 10567], [10439, 10568, 10567], [10439, 10440, 10569], [10439, 10569, 10568], [10440, 10441, 10569], [10441, 10570, 10569], [10441, 10442, 10571], [10441, 10571, 10570], [10442, 10443, 10571], [10443, 10572, 10571], [10443, 10444, 10573], [10443, 10573, 10572], [10444, 10445, 10573], [10445, 10574, 10573], [10445, 10446, 10575], [10445, 10575, 10574], [10446, 10447, 10575], [10447, 10576, 10575], [10447, 10448, 10577], [10447, 10577, 10576], [10448, 10449, 10577], [10449, 10578, 10577], [10449, 10450, 10579], [10449, 10579, 10578], [10450, 10451, 10579], [10451, 10580, 10579], [10451, 10452, 10581], [10451, 10581, 10580], [10452, 10453, 10581], [10453, 10582, 10581], [10453, 10454, 10583], [10453, 10583, 10582], [10454, 10455, 10583], [10455, 10584, 10583], [10455, 10456, 10585], [10455, 10585, 10584], [10456, 10457, 10585], [10457, 10586, 10585], [10457, 10458, 10587], [10457, 10587, 10586], [10458, 10459, 10587], [10459, 10588, 10587], [10459, 10460, 10589], [10459, 10589, 10588], [10460, 10461, 10589], [10461, 10590, 10589], [10461, 10462, 10591], [10461, 10591, 10590], [10462, 10463, 10591], [10463, 10592, 10591], [10463, 10464, 10593], [10463, 10593, 10592], [10464, 10465, 10593], [10465, 10594, 10593], [10465, 10466, 10595], [10465, 10595, 10594], [10466, 10467, 10595], [10467, 10596, 10595], [10467, 10468, 10597], [10467, 10597, 10596], [10468, 10469, 10597], [10469, 10598, 10597], [10469, 10470, 10599], [10469, 10599, 10598], [10470, 10471, 10599], [10471, 10600, 10599], [10471, 10472, 10601], [10471, 10601, 10600], [10472, 10473, 10601], [10473, 10602, 10601], [10473, 10474, 10603], [10473, 10603, 10602], [10474, 10475, 10603], [10475, 10604, 10603], [10475, 10476, 10605], [10475, 10605, 10604], [10476, 10477, 10605], [10477, 10606, 10605], [10477, 10478, 10607], [10477, 10607, 10606], [10478, 10479, 10607], [10479, 10608, 10607], [10479, 10480, 10609], [10479, 10609, 10608], [10480, 10481, 10609], [10481, 10610, 10609], [10481, 10482, 10611], [10481, 10611, 10610], [10482, 10483, 10611], [10483, 10612, 10611], [10483, 10484, 10613], [10483, 10613, 10612], [10484, 10485, 10613], [10485, 10614, 10613], [10485, 10486, 10615], [10485, 10615, 10614], [10486, 10487, 10615], [10487, 10616, 10615], [10487, 10488, 10617], [10487, 10617, 10616], [10488, 10489, 10617], [10489, 10618, 10617], [10489, 10490, 10619], [10489, 10619, 10618], [10490, 10491, 10619], [10491, 10620, 10619], [10491, 10492, 10621], [10491, 10621, 10620], [10492, 10493, 10621], [10493, 10622, 10621], [10493, 10494, 10623], [10493, 10623, 10622], [10495, 10496, 10625], [10495, 10625, 10624], [10496, 10497, 10625], [10497, 10626, 10625], [10497, 10498, 10627], [10497, 10627, 10626], [10498, 10499, 10627], [10499, 10628, 10627], [10499, 10500, 10629], [10499, 10629, 10628], [10500, 10501, 10629], [10501, 10630, 10629], [10501, 10502, 10631], [10501, 10631, 10630], [10502, 10503, 10631], [10503, 10632, 10631], [10503, 10504, 10633], [10503, 10633, 10632], [10504, 10505, 10633], [10505, 10634, 10633], [10505, 10506, 10635], [10505, 10635, 10634], [10506, 10507, 10635], [10507, 10636, 10635], [10507, 10508, 10637], [10507, 10637, 10636], [10508, 10509, 10637], [10509, 10638, 10637], [10509, 10510, 10639], [10509, 10639, 10638], [10510, 10511, 10639], [10511, 10640, 10639], [10511, 10512, 10641], [10511, 10641, 10640], [10512, 10513, 10641], [10513, 10642, 10641], [10513, 10514, 10643], [10513, 10643, 10642], [10514, 10515, 10643], [10515, 10644, 10643], [10515, 10516, 10645], [10515, 10645, 10644], [10516, 10517, 10645], [10517, 10646, 10645], [10517, 10518, 10647], [10517, 10647, 10646], [10518, 10519, 10647], [10519, 10648, 10647], [10519, 10520, 10649], [10519, 10649, 10648], [10520, 10521, 10649], [10521, 10650, 10649], [10521, 10522, 10651], [10521, 10651, 10650], [10522, 10523, 10651], [10523, 10652, 10651], [10523, 10524, 10653], [10523, 10653, 10652], [10524, 10525, 10653], [10525, 10654, 10653], [10525, 10526, 10655], [10525, 10655, 10654], [10526, 10527, 10655], [10527, 10656, 10655], [10527, 10528, 10657], [10527, 10657, 10656], [10528, 10529, 10657], [10529, 10658, 10657], [10529, 10530, 10659], [10529, 10659, 10658], [10530, 10531, 10659], [10531, 10660, 10659], [10531, 10532, 10661], [10531, 10661, 10660], [10532, 10533, 10661], [10533, 10662, 10661], [10533, 10534, 10663], [10533, 10663, 10662], [10534, 10535, 10663], [10535, 10664, 10663], [10535, 10536, 10665], [10535, 10665, 10664], [10536, 10537, 10665], [10537, 10666, 10665], [10537, 10538, 10667], [10537, 10667, 10666], [10538, 10539, 10667], [10539, 10668, 10667], [10539, 10540, 10669], [10539, 10669, 10668], [10540, 10541, 10669], [10541, 10670, 10669], [10541, 10542, 10671], [10541, 10671, 10670], [10542, 10543, 10671], [10543, 10672, 10671], [10543, 10544, 10673], [10543, 10673, 10672], [10544, 10545, 10673], [10545, 10674, 10673], [10545, 10546, 10675], [10545, 10675, 10674], [10546, 10547, 10675], [10547, 10676, 10675], [10547, 10548, 10677], [10547, 10677, 10676], [10548, 10549, 10677], [10549, 10678, 10677], [10549, 10550, 10679], [10549, 10679, 10678], [10550, 10551, 10679], [10551, 10680, 10679], [10551, 10552, 10681], [10551, 10681, 10680], [10552, 10553, 10681], [10553, 10682, 10681], [10553, 10554, 10683], [10553, 10683, 10682], [10554, 10555, 10683], [10555, 10684, 10683], [10555, 10556, 10685], [10555, 10685, 10684], [10556, 10557, 10685], [10557, 10686, 10685], [10557, 10558, 10687], [10557, 10687, 10686], [10558, 10559, 10687], [10559, 10688, 10687], [10559, 10560, 10689], [10559, 10689, 10688], [10560, 10561, 10689], [10561, 10690, 10689], [10561, 10562, 10691], [10561, 10691, 10690], [10562, 10563, 10691], [10563, 10692, 10691], [10563, 10564, 10693], [10563, 10693, 10692], [10564, 10565, 10693], [10565, 10694, 10693], [10565, 10566, 10695], [10565, 10695, 10694], [10566, 10567, 10695], [10567, 10696, 10695], [10567, 10568, 10697], [10567, 10697, 10696], [10568, 10569, 10697], [10569, 10698, 10697], [10569, 10570, 10699], [10569, 10699, 10698], [10570, 10571, 10699], [10571, 10700, 10699], [10571, 10572, 10701], [10571, 10701, 10700], [10572, 10573, 10701], [10573, 10702, 10701], [10573, 10574, 10703], [10573, 10703, 10702], [10574, 10575, 10703], [10575, 10704, 10703], [10575, 10576, 10705], [10575, 10705, 10704], [10576, 10577, 10705], [10577, 10706, 10705], [10577, 10578, 10707], [10577, 10707, 10706], [10578, 10579, 10707], [10579, 10708, 10707], [10579, 10580, 10709], [10579, 10709, 10708], [10580, 10581, 10709], [10581, 10710, 10709], [10581, 10582, 10711], [10581, 10711, 10710], [10582, 10583, 10711], [10583, 10712, 10711], [10583, 10584, 10713], [10583, 10713, 10712], [10584, 10585, 10713], [10585, 10714, 10713], [10585, 10586, 10715], [10585, 10715, 10714], [10586, 10587, 10715], [10587, 10716, 10715], [10587, 10588, 10717], [10587, 10717, 10716], [10588, 10589, 10717], [10589, 10718, 10717], [10589, 10590, 10719], [10589, 10719, 10718], [10590, 10591, 10719], [10591, 10720, 10719], [10591, 10592, 10721], [10591, 10721, 10720], [10592, 10593, 10721], [10593, 10722, 10721], [10593, 10594, 10723], [10593, 10723, 10722], [10594, 10595, 10723], [10595, 10724, 10723], [10595, 10596, 10725], [10595, 10725, 10724], [10596, 10597, 10725], [10597, 10726, 10725], [10597, 10598, 10727], [10597, 10727, 10726], [10598, 10599, 10727], [10599, 10728, 10727], [10599, 10600, 10729], [10599, 10729, 10728], [10600, 10601, 10729], [10601, 10730, 10729], [10601, 10602, 10731], [10601, 10731, 10730], [10602, 10603, 10731], [10603, 10732, 10731], [10603, 10604, 10733], [10603, 10733, 10732], [10604, 10605, 10733], [10605, 10734, 10733], [10605, 10606, 10735], [10605, 10735, 10734], [10606, 10607, 10735], [10607, 10736, 10735], [10607, 10608, 10737], [10607, 10737, 10736], [10608, 10609, 10737], [10609, 10738, 10737], [10609, 10610, 10739], [10609, 10739, 10738], [10610, 10611, 10739], [10611, 10740, 10739], [10611, 10612, 10741], [10611, 10741, 10740], [10612, 10613, 10741], [10613, 10742, 10741], [10613, 10614, 10743], [10613, 10743, 10742], [10614, 10615, 10743], [10615, 10744, 10743], [10615, 10616, 10745], [10615, 10745, 10744], [10616, 10617, 10745], [10617, 10746, 10745], [10617, 10618, 10747], [10617, 10747, 10746], [10618, 10619, 10747], [10619, 10748, 10747], [10619, 10620, 10749], [10619, 10749, 10748], [10620, 10621, 10749], [10621, 10750, 10749], [10621, 10622, 10751], [10621, 10751, 10750], [10622, 10623, 10751], [10623, 10752, 10751], [10624, 10625, 10753], [10625, 10754, 10753], [10625, 10626, 10755], [10625, 10755, 10754], [10626, 10627, 10755], [10627, 10756, 10755], [10627, 10628, 10757], [10627, 10757, 10756], [10628, 10629, 10757], [10629, 10758, 10757], [10629, 10630, 10759], [10629, 10759, 10758], [10630, 10631, 10759], [10631, 10760, 10759], [10631, 10632, 10761], [10631, 10761, 10760], [10632, 10633, 10761], [10633, 10762, 10761], [10633, 10634, 10763], [10633, 10763, 10762], [10634, 10635, 10763], [10635, 10764, 10763], [10635, 10636, 10765], [10635, 10765, 10764], [10636, 10637, 10765], [10637, 10766, 10765], [10637, 10638, 10767], [10637, 10767, 10766], [10638, 10639, 10767], [10639, 10768, 10767], [10639, 10640, 10769], [10639, 10769, 10768], [10640, 10641, 10769], [10641, 10770, 10769], [10641, 10642, 10771], [10641, 10771, 10770], [10642, 10643, 10771], [10643, 10772, 10771], [10643, 10644, 10773], [10643, 10773, 10772], [10644, 10645, 10773], [10645, 10774, 10773], [10645, 10646, 10775], [10645, 10775, 10774], [10646, 10647, 10775], [10647, 10776, 10775], [10647, 10648, 10777], [10647, 10777, 10776], [10648, 10649, 10777], [10649, 10778, 10777], [10649, 10650, 10779], [10649, 10779, 10778], [10650, 10651, 10779], [10651, 10780, 10779], [10651, 10652, 10781], [10651, 10781, 10780], [10652, 10653, 10781], [10653, 10782, 10781], [10653, 10654, 10783], [10653, 10783, 10782], [10654, 10655, 10783], [10655, 10784, 10783], [10655, 10656, 10785], [10655, 10785, 10784], [10656, 10657, 10785], [10657, 10786, 10785], [10657, 10658, 10787], [10657, 10787, 10786], [10658, 10659, 10787], [10659, 10788, 10787], [10659, 10660, 10789], [10659, 10789, 10788], [10660, 10661, 10789], [10661, 10790, 10789], [10661, 10662, 10791], [10661, 10791, 10790], [10662, 10663, 10791], [10663, 10792, 10791], [10663, 10664, 10793], [10663, 10793, 10792], [10664, 10665, 10793], [10665, 10794, 10793], [10665, 10666, 10795], [10665, 10795, 10794], [10666, 10667, 10795], [10667, 10796, 10795], [10667, 10668, 10797], [10667, 10797, 10796], [10668, 10669, 10797], [10669, 10798, 10797], [10669, 10670, 10799], [10669, 10799, 10798], [10670, 10671, 10799], [10671, 10800, 10799], [10671, 10672, 10801], [10671, 10801, 10800], [10672, 10673, 10801], [10673, 10802, 10801], [10673, 10674, 10803], [10673, 10803, 10802], [10674, 10675, 10803], [10675, 10804, 10803], [10675, 10676, 10805], [10675, 10805, 10804], [10676, 10677, 10805], [10677, 10806, 10805], [10677, 10678, 10807], [10677, 10807, 10806], [10678, 10679, 10807], [10679, 10808, 10807], [10679, 10680, 10809], [10679, 10809, 10808], [10680, 10681, 10809], [10681, 10810, 10809], [10681, 10682, 10811], [10681, 10811, 10810], [10682, 10683, 10811], [10683, 10812, 10811], [10683, 10684, 10813], [10683, 10813, 10812], [10684, 10685, 10813], [10685, 10814, 10813], [10685, 10686, 10815], [10685, 10815, 10814], [10686, 10687, 10815], [10687, 10816, 10815], [10687, 10688, 10817], [10687, 10817, 10816], [10688, 10689, 10817], [10689, 10818, 10817], [10689, 10690, 10819], [10689, 10819, 10818], [10690, 10691, 10819], [10691, 10820, 10819], [10691, 10692, 10821], [10691, 10821, 10820], [10692, 10693, 10821], [10693, 10822, 10821], [10693, 10694, 10823], [10693, 10823, 10822], [10694, 10695, 10823], [10695, 10824, 10823], [10695, 10696, 10825], [10695, 10825, 10824], [10696, 10697, 10825], [10697, 10826, 10825], [10697, 10698, 10827], [10697, 10827, 10826], [10698, 10699, 10827], [10699, 10828, 10827], [10699, 10700, 10829], [10699, 10829, 10828], [10700, 10701, 10829], [10701, 10830, 10829], [10701, 10702, 10831], [10701, 10831, 10830], [10702, 10703, 10831], [10703, 10832, 10831], [10703, 10704, 10833], [10703, 10833, 10832], [10704, 10705, 10833], [10705, 10834, 10833], [10705, 10706, 10835], [10705, 10835, 10834], [10706, 10707, 10835], [10707, 10836, 10835], [10707, 10708, 10837], [10707, 10837, 10836], [10708, 10709, 10837], [10709, 10838, 10837], [10709, 10710, 10839], [10709, 10839, 10838], [10710, 10711, 10839], [10711, 10840, 10839], [10711, 10712, 10841], [10711, 10841, 10840], [10712, 10713, 10841], [10713, 10842, 10841], [10713, 10714, 10843], [10713, 10843, 10842], [10714, 10715, 10843], [10715, 10844, 10843], [10715, 10716, 10845], [10715, 10845, 10844], [10716, 10717, 10845], [10717, 10846, 10845], [10717, 10718, 10847], [10717, 10847, 10846], [10718, 10719, 10847], [10719, 10848, 10847], [10719, 10720, 10849], [10719, 10849, 10848], [10720, 10721, 10849], [10721, 10850, 10849], [10721, 10722, 10851], [10721, 10851, 10850], [10722, 10723, 10851], [10723, 10852, 10851], [10723, 10724, 10853], [10723, 10853, 10852], [10724, 10725, 10853], [10725, 10854, 10853], [10725, 10726, 10855], [10725, 10855, 10854], [10726, 10727, 10855], [10727, 10856, 10855], [10727, 10728, 10857], [10727, 10857, 10856], [10728, 10729, 10857], [10729, 10858, 10857], [10729, 10730, 10859], [10729, 10859, 10858], [10730, 10731, 10859], [10731, 10860, 10859], [10731, 10732, 10861], [10731, 10861, 10860], [10732, 10733, 10861], [10733, 10862, 10861], [10733, 10734, 10863], [10733, 10863, 10862], [10734, 10735, 10863], [10735, 10864, 10863], [10735, 10736, 10865], [10735, 10865, 10864], [10736, 10737, 10865], [10737, 10866, 10865], [10737, 10738, 10867], [10737, 10867, 10866], [10738, 10739, 10867], [10739, 10868, 10867], [10739, 10740, 10869], [10739, 10869, 10868], [10740, 10741, 10869], [10741, 10870, 10869], [10741, 10742, 10871], [10741, 10871, 10870], [10742, 10743, 10871], [10743, 10872, 10871], [10743, 10744, 10873], [10743, 10873, 10872], [10744, 10745, 10873], [10745, 10874, 10873], [10745, 10746, 10875], [10745, 10875, 10874], [10746, 10747, 10875], [10747, 10876, 10875], [10747, 10748, 10877], [10747, 10877, 10876], [10748, 10749, 10877], [10749, 10878, 10877], [10749, 10750, 10879], [10749, 10879, 10878], [10750, 10751, 10879], [10751, 10880, 10879], [10751, 10752, 10881], [10751, 10881, 10880], [10753, 10754, 10883], [10753, 10883, 10882], [10754, 10755, 10883], [10755, 10884, 10883], [10755, 10756, 10885], [10755, 10885, 10884], [10756, 10757, 10885], [10757, 10886, 10885], [10757, 10758, 10887], [10757, 10887, 10886], [10758, 10759, 10887], [10759, 10888, 10887], [10759, 10760, 10889], [10759, 10889, 10888], [10760, 10761, 10889], [10761, 10890, 10889], [10761, 10762, 10891], [10761, 10891, 10890], [10762, 10763, 10891], [10763, 10892, 10891], [10763, 10764, 10893], [10763, 10893, 10892], [10764, 10765, 10893], [10765, 10894, 10893], [10765, 10766, 10895], [10765, 10895, 10894], [10766, 10767, 10895], [10767, 10896, 10895], [10767, 10768, 10897], [10767, 10897, 10896], [10768, 10769, 10897], [10769, 10898, 10897], [10769, 10770, 10899], [10769, 10899, 10898], [10770, 10771, 10899], [10771, 10900, 10899], [10771, 10772, 10901], [10771, 10901, 10900], [10772, 10773, 10901], [10773, 10902, 10901], [10773, 10774, 10903], [10773, 10903, 10902], [10774, 10775, 10903], [10775, 10904, 10903], [10775, 10776, 10905], [10775, 10905, 10904], [10776, 10777, 10905], [10777, 10906, 10905], [10777, 10778, 10907], [10777, 10907, 10906], [10778, 10779, 10907], [10779, 10908, 10907], [10779, 10780, 10909], [10779, 10909, 10908], [10780, 10781, 10909], [10781, 10910, 10909], [10781, 10782, 10911], [10781, 10911, 10910], [10782, 10783, 10911], [10783, 10912, 10911], [10783, 10784, 10913], [10783, 10913, 10912], [10784, 10785, 10913], [10785, 10914, 10913], [10785, 10786, 10915], [10785, 10915, 10914], [10786, 10787, 10915], [10787, 10916, 10915], [10787, 10788, 10917], [10787, 10917, 10916], [10788, 10789, 10917], [10789, 10918, 10917], [10789, 10790, 10919], [10789, 10919, 10918], [10790, 10791, 10919], [10791, 10920, 10919], [10791, 10792, 10921], [10791, 10921, 10920], [10792, 10793, 10921], [10793, 10922, 10921], [10793, 10794, 10923], [10793, 10923, 10922], [10794, 10795, 10923], [10795, 10924, 10923], [10795, 10796, 10925], [10795, 10925, 10924], [10796, 10797, 10925], [10797, 10926, 10925], [10797, 10798, 10927], [10797, 10927, 10926], [10798, 10799, 10927], [10799, 10928, 10927], [10799, 10800, 10929], [10799, 10929, 10928], [10800, 10801, 10929], [10801, 10930, 10929], [10801, 10802, 10931], [10801, 10931, 10930], [10802, 10803, 10931], [10803, 10932, 10931], [10803, 10804, 10933], [10803, 10933, 10932], [10804, 10805, 10933], [10805, 10934, 10933], [10805, 10806, 10935], [10805, 10935, 10934], [10806, 10807, 10935], [10807, 10936, 10935], [10807, 10808, 10937], [10807, 10937, 10936], [10808, 10809, 10937], [10809, 10938, 10937], [10809, 10810, 10939], [10809, 10939, 10938], [10810, 10811, 10939], [10811, 10940, 10939], [10811, 10812, 10941], [10811, 10941, 10940], [10812, 10813, 10941], [10813, 10942, 10941], [10813, 10814, 10943], [10813, 10943, 10942], [10814, 10815, 10943], [10815, 10944, 10943], [10815, 10816, 10945], [10815, 10945, 10944], [10816, 10817, 10945], [10817, 10946, 10945], [10817, 10818, 10947], [10817, 10947, 10946], [10818, 10819, 10947], [10819, 10948, 10947], [10819, 10820, 10949], [10819, 10949, 10948], [10820, 10821, 10949], [10821, 10950, 10949], [10821, 10822, 10951], [10821, 10951, 10950], [10822, 10823, 10951], [10823, 10952, 10951], [10823, 10824, 10953], [10823, 10953, 10952], [10824, 10825, 10953], [10825, 10954, 10953], [10825, 10826, 10955], [10825, 10955, 10954], [10826, 10827, 10955], [10827, 10956, 10955], [10827, 10828, 10957], [10827, 10957, 10956], [10828, 10829, 10957], [10829, 10958, 10957], [10829, 10830, 10959], [10829, 10959, 10958], [10830, 10831, 10959], [10831, 10960, 10959], [10831, 10832, 10961], [10831, 10961, 10960], [10832, 10833, 10961], [10833, 10962, 10961], [10833, 10834, 10963], [10833, 10963, 10962], [10834, 10835, 10963], [10835, 10964, 10963], [10835, 10836, 10965], [10835, 10965, 10964], [10836, 10837, 10965], [10837, 10966, 10965], [10837, 10838, 10967], [10837, 10967, 10966], [10838, 10839, 10967], [10839, 10968, 10967], [10839, 10840, 10969], [10839, 10969, 10968], [10840, 10841, 10969], [10841, 10970, 10969], [10841, 10842, 10971], [10841, 10971, 10970], [10842, 10843, 10971], [10843, 10972, 10971], [10843, 10844, 10973], [10843, 10973, 10972], [10844, 10845, 10973], [10845, 10974, 10973], [10845, 10846, 10975], [10845, 10975, 10974], [10846, 10847, 10975], [10847, 10976, 10975], [10847, 10848, 10977], [10847, 10977, 10976], [10848, 10849, 10977], [10849, 10978, 10977], [10849, 10850, 10979], [10849, 10979, 10978], [10850, 10851, 10979], [10851, 10980, 10979], [10851, 10852, 10981], [10851, 10981, 10980], [10852, 10853, 10981], [10853, 10982, 10981], [10853, 10854, 10983], [10853, 10983, 10982], [10854, 10855, 10983], [10855, 10984, 10983], [10855, 10856, 10985], [10855, 10985, 10984], [10856, 10857, 10985], [10857, 10986, 10985], [10857, 10858, 10987], [10857, 10987, 10986], [10858, 10859, 10987], [10859, 10988, 10987], [10859, 10860, 10989], [10859, 10989, 10988], [10860, 10861, 10989], [10861, 10990, 10989], [10861, 10862, 10991], [10861, 10991, 10990], [10862, 10863, 10991], [10863, 10992, 10991], [10863, 10864, 10993], [10863, 10993, 10992], [10864, 10865, 10993], [10865, 10994, 10993], [10865, 10866, 10995], [10865, 10995, 10994], [10866, 10867, 10995], [10867, 10996, 10995], [10867, 10868, 10997], [10867, 10997, 10996], [10868, 10869, 10997], [10869, 10998, 10997], [10869, 10870, 10999], [10869, 10999, 10998], [10870, 10871, 10999], [10871, 11000, 10999], [10871, 10872, 11001], [10871, 11001, 11000], [10872, 10873, 11001], [10873, 11002, 11001], [10873, 10874, 11003], [10873, 11003, 11002], [10874, 10875, 11003], [10875, 11004, 11003], [10875, 10876, 11005], [10875, 11005, 11004], [10876, 10877, 11005], [10877, 11006, 11005], [10877, 10878, 11007], [10877, 11007, 11006], [10878, 10879, 11007], [10879, 11008, 11007], [10879, 10880, 11009], [10879, 11009, 11008], [10880, 10881, 11009], [10881, 11010, 11009], [10882, 10883, 11011], [10883, 11012, 11011], [10883, 10884, 11013], [10883, 11013, 11012], [10884, 10885, 11013], [10885, 11014, 11013], [10885, 10886, 11015], [10885, 11015, 11014], [10886, 10887, 11015], [10887, 11016, 11015], [10887, 10888, 11017], [10887, 11017, 11016], [10888, 10889, 11017], [10889, 11018, 11017], [10889, 10890, 11019], [10889, 11019, 11018], [10890, 10891, 11019], [10891, 11020, 11019], [10891, 10892, 11021], [10891, 11021, 11020], [10892, 10893, 11021], [10893, 11022, 11021], [10893, 10894, 11023], [10893, 11023, 11022], [10894, 10895, 11023], [10895, 11024, 11023], [10895, 10896, 11025], [10895, 11025, 11024], [10896, 10897, 11025], [10897, 11026, 11025], [10897, 10898, 11027], [10897, 11027, 11026], [10898, 10899, 11027], [10899, 11028, 11027], [10899, 10900, 11029], [10899, 11029, 11028], [10900, 10901, 11029], [10901, 11030, 11029], [10901, 10902, 11031], [10901, 11031, 11030], [10902, 10903, 11031], [10903, 11032, 11031], [10903, 10904, 11033], [10903, 11033, 11032], [10904, 10905, 11033], [10905, 11034, 11033], [10905, 10906, 11035], [10905, 11035, 11034], [10906, 10907, 11035], [10907, 11036, 11035], [10907, 10908, 11037], [10907, 11037, 11036], [10908, 10909, 11037], [10909, 11038, 11037], [10909, 10910, 11039], [10909, 11039, 11038], [10910, 10911, 11039], [10911, 11040, 11039], [10911, 10912, 11041], [10911, 11041, 11040], [10912, 10913, 11041], [10913, 11042, 11041], [10913, 10914, 11043], [10913, 11043, 11042], [10914, 10915, 11043], [10915, 11044, 11043], [10915, 10916, 11045], [10915, 11045, 11044], [10916, 10917, 11045], [10917, 11046, 11045], [10917, 10918, 11047], [10917, 11047, 11046], [10918, 10919, 11047], [10919, 11048, 11047], [10919, 10920, 11049], [10919, 11049, 11048], [10920, 10921, 11049], [10921, 11050, 11049], [10921, 10922, 11051], [10921, 11051, 11050], [10922, 10923, 11051], [10923, 11052, 11051], [10923, 10924, 11053], [10923, 11053, 11052], [10924, 10925, 11053], [10925, 11054, 11053], [10925, 10926, 11055], [10925, 11055, 11054], [10926, 10927, 11055], [10927, 11056, 11055], [10927, 10928, 11057], [10927, 11057, 11056], [10928, 10929, 11057], [10929, 11058, 11057], [10929, 10930, 11059], [10929, 11059, 11058], [10930, 10931, 11059], [10931, 11060, 11059], [10931, 10932, 11061], [10931, 11061, 11060], [10932, 10933, 11061], [10933, 11062, 11061], [10933, 10934, 11063], [10933, 11063, 11062], [10934, 10935, 11063], [10935, 11064, 11063], [10935, 10936, 11065], [10935, 11065, 11064], [10936, 10937, 11065], [10937, 11066, 11065], [10937, 10938, 11067], [10937, 11067, 11066], [10938, 10939, 11067], [10939, 11068, 11067], [10939, 10940, 11069], [10939, 11069, 11068], [10940, 10941, 11069], [10941, 11070, 11069], [10941, 10942, 11071], [10941, 11071, 11070], [10942, 10943, 11071], [10943, 11072, 11071], [10943, 10944, 11073], [10943, 11073, 11072], [10944, 10945, 11073], [10945, 11074, 11073], [10945, 10946, 11075], [10945, 11075, 11074], [10946, 10947, 11075], [10947, 11076, 11075], [10947, 10948, 11077], [10947, 11077, 11076], [10948, 10949, 11077], [10949, 11078, 11077], [10949, 10950, 11079], [10949, 11079, 11078], [10950, 10951, 11079], [10951, 11080, 11079], [10951, 10952, 11081], [10951, 11081, 11080], [10952, 10953, 11081], [10953, 11082, 11081], [10953, 10954, 11083], [10953, 11083, 11082], [10954, 10955, 11083], [10955, 11084, 11083], [10955, 10956, 11085], [10955, 11085, 11084], [10956, 10957, 11085], [10957, 11086, 11085], [10957, 10958, 11087], [10957, 11087, 11086], [10958, 10959, 11087], [10959, 11088, 11087], [10959, 10960, 11089], [10959, 11089, 11088], [10960, 10961, 11089], [10961, 11090, 11089], [10961, 10962, 11091], [10961, 11091, 11090], [10962, 10963, 11091], [10963, 11092, 11091], [10963, 10964, 11093], [10963, 11093, 11092], [10964, 10965, 11093], [10965, 11094, 11093], [10965, 10966, 11095], [10965, 11095, 11094], [10966, 10967, 11095], [10967, 11096, 11095], [10967, 10968, 11097], [10967, 11097, 11096], [10968, 10969, 11097], [10969, 11098, 11097], [10969, 10970, 11099], [10969, 11099, 11098], [10970, 10971, 11099], [10971, 11100, 11099], [10971, 10972, 11101], [10971, 11101, 11100], [10972, 10973, 11101], [10973, 11102, 11101], [10973, 10974, 11103], [10973, 11103, 11102], [10974, 10975, 11103], [10975, 11104, 11103], [10975, 10976, 11105], [10975, 11105, 11104], [10976, 10977, 11105], [10977, 11106, 11105], [10977, 10978, 11107], [10977, 11107, 11106], [10978, 10979, 11107], [10979, 11108, 11107], [10979, 10980, 11109], [10979, 11109, 11108], [10980, 10981, 11109], [10981, 11110, 11109], [10981, 10982, 11111], [10981, 11111, 11110], [10982, 10983, 11111], [10983, 11112, 11111], [10983, 10984, 11113], [10983, 11113, 11112], [10984, 10985, 11113], [10985, 11114, 11113], [10985, 10986, 11115], [10985, 11115, 11114], [10986, 10987, 11115], [10987, 11116, 11115], [10987, 10988, 11117], [10987, 11117, 11116], [10988, 10989, 11117], [10989, 11118, 11117], [10989, 10990, 11119], [10989, 11119, 11118], [10990, 10991, 11119], [10991, 11120, 11119], [10991, 10992, 11121], [10991, 11121, 11120], [10992, 10993, 11121], [10993, 11122, 11121], [10993, 10994, 11123], [10993, 11123, 11122], [10994, 10995, 11123], [10995, 11124, 11123], [10995, 10996, 11125], [10995, 11125, 11124], [10996, 10997, 11125], [10997, 11126, 11125], [10997, 10998, 11127], [10997, 11127, 11126], [10998, 10999, 11127], [10999, 11128, 11127], [10999, 11000, 11129], [10999, 11129, 11128], [11000, 11001, 11129], [11001, 11130, 11129], [11001, 11002, 11131], [11001, 11131, 11130], [11002, 11003, 11131], [11003, 11132, 11131], [11003, 11004, 11133], [11003, 11133, 11132], [11004, 11005, 11133], [11005, 11134, 11133], [11005, 11006, 11135], [11005, 11135, 11134], [11006, 11007, 11135], [11007, 11136, 11135], [11007, 11008, 11137], [11007, 11137, 11136], [11008, 11009, 11137], [11009, 11138, 11137], [11009, 11010, 11139], [11009, 11139, 11138], [11011, 11012, 11141], [11011, 11141, 11140], [11012, 11013, 11141], [11013, 11142, 11141], [11013, 11014, 11143], [11013, 11143, 11142], [11014, 11015, 11143], [11015, 11144, 11143], [11015, 11016, 11145], [11015, 11145, 11144], [11016, 11017, 11145], [11017, 11146, 11145], [11017, 11018, 11147], [11017, 11147, 11146], [11018, 11019, 11147], [11019, 11148, 11147], [11019, 11020, 11149], [11019, 11149, 11148], [11020, 11021, 11149], [11021, 11150, 11149], [11021, 11022, 11151], [11021, 11151, 11150], [11022, 11023, 11151], [11023, 11152, 11151], [11023, 11024, 11153], [11023, 11153, 11152], [11024, 11025, 11153], [11025, 11154, 11153], [11025, 11026, 11155], [11025, 11155, 11154], [11026, 11027, 11155], [11027, 11156, 11155], [11027, 11028, 11157], [11027, 11157, 11156], [11028, 11029, 11157], [11029, 11158, 11157], [11029, 11030, 11159], [11029, 11159, 11158], [11030, 11031, 11159], [11031, 11160, 11159], [11031, 11032, 11161], [11031, 11161, 11160], [11032, 11033, 11161], [11033, 11162, 11161], [11033, 11034, 11163], [11033, 11163, 11162], [11034, 11035, 11163], [11035, 11164, 11163], [11035, 11036, 11165], [11035, 11165, 11164], [11036, 11037, 11165], [11037, 11166, 11165], [11037, 11038, 11167], [11037, 11167, 11166], [11038, 11039, 11167], [11039, 11168, 11167], [11039, 11040, 11169], [11039, 11169, 11168], [11040, 11041, 11169], [11041, 11170, 11169], [11041, 11042, 11171], [11041, 11171, 11170], [11042, 11043, 11171], [11043, 11172, 11171], [11043, 11044, 11173], [11043, 11173, 11172], [11044, 11045, 11173], [11045, 11174, 11173], [11045, 11046, 11175], [11045, 11175, 11174], [11046, 11047, 11175], [11047, 11176, 11175], [11047, 11048, 11177], [11047, 11177, 11176], [11048, 11049, 11177], [11049, 11178, 11177], [11049, 11050, 11179], [11049, 11179, 11178], [11050, 11051, 11179], [11051, 11180, 11179], [11051, 11052, 11181], [11051, 11181, 11180], [11052, 11053, 11181], [11053, 11182, 11181], [11053, 11054, 11183], [11053, 11183, 11182], [11054, 11055, 11183], [11055, 11184, 11183], [11055, 11056, 11185], [11055, 11185, 11184], [11056, 11057, 11185], [11057, 11186, 11185], [11057, 11058, 11187], [11057, 11187, 11186], [11058, 11059, 11187], [11059, 11188, 11187], [11059, 11060, 11189], [11059, 11189, 11188], [11060, 11061, 11189], [11061, 11190, 11189], [11061, 11062, 11191], [11061, 11191, 11190], [11062, 11063, 11191], [11063, 11192, 11191], [11063, 11064, 11193], [11063, 11193, 11192], [11064, 11065, 11193], [11065, 11194, 11193], [11065, 11066, 11195], [11065, 11195, 11194], [11066, 11067, 11195], [11067, 11196, 11195], [11067, 11068, 11197], [11067, 11197, 11196], [11068, 11069, 11197], [11069, 11198, 11197], [11069, 11070, 11199], [11069, 11199, 11198], [11070, 11071, 11199], [11071, 11200, 11199], [11071, 11072, 11201], [11071, 11201, 11200], [11072, 11073, 11201], [11073, 11202, 11201], [11073, 11074, 11203], [11073, 11203, 11202], [11074, 11075, 11203], [11075, 11204, 11203], [11075, 11076, 11205], [11075, 11205, 11204], [11076, 11077, 11205], [11077, 11206, 11205], [11077, 11078, 11207], [11077, 11207, 11206], [11078, 11079, 11207], [11079, 11208, 11207], [11079, 11080, 11209], [11079, 11209, 11208], [11080, 11081, 11209], [11081, 11210, 11209], [11081, 11082, 11211], [11081, 11211, 11210], [11082, 11083, 11211], [11083, 11212, 11211], [11083, 11084, 11213], [11083, 11213, 11212], [11084, 11085, 11213], [11085, 11214, 11213], [11085, 11086, 11215], [11085, 11215, 11214], [11086, 11087, 11215], [11087, 11216, 11215], [11087, 11088, 11217], [11087, 11217, 11216], [11088, 11089, 11217], [11089, 11218, 11217], [11089, 11090, 11219], [11089, 11219, 11218], [11090, 11091, 11219], [11091, 11220, 11219], [11091, 11092, 11221], [11091, 11221, 11220], [11092, 11093, 11221], [11093, 11222, 11221], [11093, 11094, 11223], [11093, 11223, 11222], [11094, 11095, 11223], [11095, 11224, 11223], [11095, 11096, 11225], [11095, 11225, 11224], [11096, 11097, 11225], [11097, 11226, 11225], [11097, 11098, 11227], [11097, 11227, 11226], [11098, 11099, 11227], [11099, 11228, 11227], [11099, 11100, 11229], [11099, 11229, 11228], [11100, 11101, 11229], [11101, 11230, 11229], [11101, 11102, 11231], [11101, 11231, 11230], [11102, 11103, 11231], [11103, 11232, 11231], [11103, 11104, 11233], [11103, 11233, 11232], [11104, 11105, 11233], [11105, 11234, 11233], [11105, 11106, 11235], [11105, 11235, 11234], [11106, 11107, 11235], [11107, 11236, 11235], [11107, 11108, 11237], [11107, 11237, 11236], [11108, 11109, 11237], [11109, 11238, 11237], [11109, 11110, 11239], [11109, 11239, 11238], [11110, 11111, 11239], [11111, 11240, 11239], [11111, 11112, 11241], [11111, 11241, 11240], [11112, 11113, 11241], [11113, 11242, 11241], [11113, 11114, 11243], [11113, 11243, 11242], [11114, 11115, 11243], [11115, 11244, 11243], [11115, 11116, 11245], [11115, 11245, 11244], [11116, 11117, 11245], [11117, 11246, 11245], [11117, 11118, 11247], [11117, 11247, 11246], [11118, 11119, 11247], [11119, 11248, 11247], [11119, 11120, 11249], [11119, 11249, 11248], [11120, 11121, 11249], [11121, 11250, 11249], [11121, 11122, 11251], [11121, 11251, 11250], [11122, 11123, 11251], [11123, 11252, 11251], [11123, 11124, 11253], [11123, 11253, 11252], [11124, 11125, 11253], [11125, 11254, 11253], [11125, 11126, 11255], [11125, 11255, 11254], [11126, 11127, 11255], [11127, 11256, 11255], [11127, 11128, 11257], [11127, 11257, 11256], [11128, 11129, 11257], [11129, 11258, 11257], [11129, 11130, 11259], [11129, 11259, 11258], [11130, 11131, 11259], [11131, 11260, 11259], [11131, 11132, 11261], [11131, 11261, 11260], [11132, 11133, 11261], [11133, 11262, 11261], [11133, 11134, 11263], [11133, 11263, 11262], [11134, 11135, 11263], [11135, 11264, 11263], [11135, 11136, 11265], [11135, 11265, 11264], [11136, 11137, 11265], [11137, 11266, 11265], [11137, 11138, 11267], [11137, 11267, 11266], [11138, 11139, 11267], [11139, 11268, 11267], [11140, 11141, 11269], [11141, 11270, 11269], [11141, 11142, 11271], [11141, 11271, 11270], [11142, 11143, 11271], [11143, 11272, 11271], [11143, 11144, 11273], [11143, 11273, 11272], [11144, 11145, 11273], [11145, 11274, 11273], [11145, 11146, 11275], [11145, 11275, 11274], [11146, 11147, 11275], [11147, 11276, 11275], [11147, 11148, 11277], [11147, 11277, 11276], [11148, 11149, 11277], [11149, 11278, 11277], [11149, 11150, 11279], [11149, 11279, 11278], [11150, 11151, 11279], [11151, 11280, 11279], [11151, 11152, 11281], [11151, 11281, 11280], [11152, 11153, 11281], [11153, 11282, 11281], [11153, 11154, 11283], [11153, 11283, 11282], [11154, 11155, 11283], [11155, 11284, 11283], [11155, 11156, 11285], [11155, 11285, 11284], [11156, 11157, 11285], [11157, 11286, 11285], [11157, 11158, 11287], [11157, 11287, 11286], [11158, 11159, 11287], [11159, 11288, 11287], [11159, 11160, 11289], [11159, 11289, 11288], [11160, 11161, 11289], [11161, 11290, 11289], [11161, 11162, 11291], [11161, 11291, 11290], [11162, 11163, 11291], [11163, 11292, 11291], [11163, 11164, 11293], [11163, 11293, 11292], [11164, 11165, 11293], [11165, 11294, 11293], [11165, 11166, 11295], [11165, 11295, 11294], [11166, 11167, 11295], [11167, 11296, 11295], [11167, 11168, 11297], [11167, 11297, 11296], [11168, 11169, 11297], [11169, 11298, 11297], [11169, 11170, 11299], [11169, 11299, 11298], [11170, 11171, 11299], [11171, 11300, 11299], [11171, 11172, 11301], [11171, 11301, 11300], [11172, 11173, 11301], [11173, 11302, 11301], [11173, 11174, 11303], [11173, 11303, 11302], [11174, 11175, 11303], [11175, 11304, 11303], [11175, 11176, 11305], [11175, 11305, 11304], [11176, 11177, 11305], [11177, 11306, 11305], [11177, 11178, 11307], [11177, 11307, 11306], [11178, 11179, 11307], [11179, 11308, 11307], [11179, 11180, 11309], [11179, 11309, 11308], [11180, 11181, 11309], [11181, 11310, 11309], [11181, 11182, 11311], [11181, 11311, 11310], [11182, 11183, 11311], [11183, 11312, 11311], [11183, 11184, 11313], [11183, 11313, 11312], [11184, 11185, 11313], [11185, 11314, 11313], [11185, 11186, 11315], [11185, 11315, 11314], [11186, 11187, 11315], [11187, 11316, 11315], [11187, 11188, 11317], [11187, 11317, 11316], [11188, 11189, 11317], [11189, 11318, 11317], [11189, 11190, 11319], [11189, 11319, 11318], [11190, 11191, 11319], [11191, 11320, 11319], [11191, 11192, 11321], [11191, 11321, 11320], [11192, 11193, 11321], [11193, 11322, 11321], [11193, 11194, 11323], [11193, 11323, 11322], [11194, 11195, 11323], [11195, 11324, 11323], [11195, 11196, 11325], [11195, 11325, 11324], [11196, 11197, 11325], [11197, 11326, 11325], [11197, 11198, 11327], [11197, 11327, 11326], [11198, 11199, 11327], [11199, 11328, 11327], [11199, 11200, 11329], [11199, 11329, 11328], [11200, 11201, 11329], [11201, 11330, 11329], [11201, 11202, 11331], [11201, 11331, 11330], [11202, 11203, 11331], [11203, 11332, 11331], [11203, 11204, 11333], [11203, 11333, 11332], [11204, 11205, 11333], [11205, 11334, 11333], [11205, 11206, 11335], [11205, 11335, 11334], [11206, 11207, 11335], [11207, 11336, 11335], [11207, 11208, 11337], [11207, 11337, 11336], [11208, 11209, 11337], [11209, 11338, 11337], [11209, 11210, 11339], [11209, 11339, 11338], [11210, 11211, 11339], [11211, 11340, 11339], [11211, 11212, 11341], [11211, 11341, 11340], [11212, 11213, 11341], [11213, 11342, 11341], [11213, 11214, 11343], [11213, 11343, 11342], [11214, 11215, 11343], [11215, 11344, 11343], [11215, 11216, 11345], [11215, 11345, 11344], [11216, 11217, 11345], [11217, 11346, 11345], [11217, 11218, 11347], [11217, 11347, 11346], [11218, 11219, 11347], [11219, 11348, 11347], [11219, 11220, 11349], [11219, 11349, 11348], [11220, 11221, 11349], [11221, 11350, 11349], [11221, 11222, 11351], [11221, 11351, 11350], [11222, 11223, 11351], [11223, 11352, 11351], [11223, 11224, 11353], [11223, 11353, 11352], [11224, 11225, 11353], [11225, 11354, 11353], [11225, 11226, 11355], [11225, 11355, 11354], [11226, 11227, 11355], [11227, 11356, 11355], [11227, 11228, 11357], [11227, 11357, 11356], [11228, 11229, 11357], [11229, 11358, 11357], [11229, 11230, 11359], [11229, 11359, 11358], [11230, 11231, 11359], [11231, 11360, 11359], [11231, 11232, 11361], [11231, 11361, 11360], [11232, 11233, 11361], [11233, 11362, 11361], [11233, 11234, 11363], [11233, 11363, 11362], [11234, 11235, 11363], [11235, 11364, 11363], [11235, 11236, 11365], [11235, 11365, 11364], [11236, 11237, 11365], [11237, 11366, 11365], [11237, 11238, 11367], [11237, 11367, 11366], [11238, 11239, 11367], [11239, 11368, 11367], [11239, 11240, 11369], [11239, 11369, 11368], [11240, 11241, 11369], [11241, 11370, 11369], [11241, 11242, 11371], [11241, 11371, 11370], [11242, 11243, 11371], [11243, 11372, 11371], [11243, 11244, 11373], [11243, 11373, 11372], [11244, 11245, 11373], [11245, 11374, 11373], [11245, 11246, 11375], [11245, 11375, 11374], [11246, 11247, 11375], [11247, 11376, 11375], [11247, 11248, 11377], [11247, 11377, 11376], [11248, 11249, 11377], [11249, 11378, 11377], [11249, 11250, 11379], [11249, 11379, 11378], [11250, 11251, 11379], [11251, 11380, 11379], [11251, 11252, 11381], [11251, 11381, 11380], [11252, 11253, 11381], [11253, 11382, 11381], [11253, 11254, 11383], [11253, 11383, 11382], [11254, 11255, 11383], [11255, 11384, 11383], [11255, 11256, 11385], [11255, 11385, 11384], [11256, 11257, 11385], [11257, 11386, 11385], [11257, 11258, 11387], [11257, 11387, 11386], [11258, 11259, 11387], [11259, 11388, 11387], [11259, 11260, 11389], [11259, 11389, 11388], [11260, 11261, 11389], [11261, 11390, 11389], [11261, 11262, 11391], [11261, 11391, 11390], [11262, 11263, 11391], [11263, 11392, 11391], [11263, 11264, 11393], [11263, 11393, 11392], [11264, 11265, 11393], [11265, 11394, 11393], [11265, 11266, 11395], [11265, 11395, 11394], [11266, 11267, 11395], [11267, 11396, 11395], [11267, 11268, 11397], [11267, 11397, 11396], [11269, 11270, 11399], [11269, 11399, 11398], [11270, 11271, 11399], [11271, 11400, 11399], [11271, 11272, 11401], [11271, 11401, 11400], [11272, 11273, 11401], [11273, 11402, 11401], [11273, 11274, 11403], [11273, 11403, 11402], [11274, 11275, 11403], [11275, 11404, 11403], [11275, 11276, 11405], [11275, 11405, 11404], [11276, 11277, 11405], [11277, 11406, 11405], [11277, 11278, 11407], [11277, 11407, 11406], [11278, 11279, 11407], [11279, 11408, 11407], [11279, 11280, 11409], [11279, 11409, 11408], [11280, 11281, 11409], [11281, 11410, 11409], [11281, 11282, 11411], [11281, 11411, 11410], [11282, 11283, 11411], [11283, 11412, 11411], [11283, 11284, 11413], [11283, 11413, 11412], [11284, 11285, 11413], [11285, 11414, 11413], [11285, 11286, 11415], [11285, 11415, 11414], [11286, 11287, 11415], [11287, 11416, 11415], [11287, 11288, 11417], [11287, 11417, 11416], [11288, 11289, 11417], [11289, 11418, 11417], [11289, 11290, 11419], [11289, 11419, 11418], [11290, 11291, 11419], [11291, 11420, 11419], [11291, 11292, 11421], [11291, 11421, 11420], [11292, 11293, 11421], [11293, 11422, 11421], [11293, 11294, 11423], [11293, 11423, 11422], [11294, 11295, 11423], [11295, 11424, 11423], [11295, 11296, 11425], [11295, 11425, 11424], [11296, 11297, 11425], [11297, 11426, 11425], [11297, 11298, 11427], [11297, 11427, 11426], [11298, 11299, 11427], [11299, 11428, 11427], [11299, 11300, 11429], [11299, 11429, 11428], [11300, 11301, 11429], [11301, 11430, 11429], [11301, 11302, 11431], [11301, 11431, 11430], [11302, 11303, 11431], [11303, 11432, 11431], [11303, 11304, 11433], [11303, 11433, 11432], [11304, 11305, 11433], [11305, 11434, 11433], [11305, 11306, 11435], [11305, 11435, 11434], [11306, 11307, 11435], [11307, 11436, 11435], [11307, 11308, 11437], [11307, 11437, 11436], [11308, 11309, 11437], [11309, 11438, 11437], [11309, 11310, 11439], [11309, 11439, 11438], [11310, 11311, 11439], [11311, 11440, 11439], [11311, 11312, 11441], [11311, 11441, 11440], [11312, 11313, 11441], [11313, 11442, 11441], [11313, 11314, 11443], [11313, 11443, 11442], [11314, 11315, 11443], [11315, 11444, 11443], [11315, 11316, 11445], [11315, 11445, 11444], [11316, 11317, 11445], [11317, 11446, 11445], [11317, 11318, 11447], [11317, 11447, 11446], [11318, 11319, 11447], [11319, 11448, 11447], [11319, 11320, 11449], [11319, 11449, 11448], [11320, 11321, 11449], [11321, 11450, 11449], [11321, 11322, 11451], [11321, 11451, 11450], [11322, 11323, 11451], [11323, 11452, 11451], [11323, 11324, 11453], [11323, 11453, 11452], [11324, 11325, 11453], [11325, 11454, 11453], [11325, 11326, 11455], [11325, 11455, 11454], [11326, 11327, 11455], [11327, 11456, 11455], [11327, 11328, 11457], [11327, 11457, 11456], [11328, 11329, 11457], [11329, 11458, 11457], [11329, 11330, 11459], [11329, 11459, 11458], [11330, 11331, 11459], [11331, 11460, 11459], [11331, 11332, 11461], [11331, 11461, 11460], [11332, 11333, 11461], [11333, 11462, 11461], [11333, 11334, 11463], [11333, 11463, 11462], [11334, 11335, 11463], [11335, 11464, 11463], [11335, 11336, 11465], [11335, 11465, 11464], [11336, 11337, 11465], [11337, 11466, 11465], [11337, 11338, 11467], [11337, 11467, 11466], [11338, 11339, 11467], [11339, 11468, 11467], [11339, 11340, 11469], [11339, 11469, 11468], [11340, 11341, 11469], [11341, 11470, 11469], [11341, 11342, 11471], [11341, 11471, 11470], [11342, 11343, 11471], [11343, 11472, 11471], [11343, 11344, 11473], [11343, 11473, 11472], [11344, 11345, 11473], [11345, 11474, 11473], [11345, 11346, 11475], [11345, 11475, 11474], [11346, 11347, 11475], [11347, 11476, 11475], [11347, 11348, 11477], [11347, 11477, 11476], [11348, 11349, 11477], [11349, 11478, 11477], [11349, 11350, 11479], [11349, 11479, 11478], [11350, 11351, 11479], [11351, 11480, 11479], [11351, 11352, 11481], [11351, 11481, 11480], [11352, 11353, 11481], [11353, 11482, 11481], [11353, 11354, 11483], [11353, 11483, 11482], [11354, 11355, 11483], [11355, 11484, 11483], [11355, 11356, 11485], [11355, 11485, 11484], [11356, 11357, 11485], [11357, 11486, 11485], [11357, 11358, 11487], [11357, 11487, 11486], [11358, 11359, 11487], [11359, 11488, 11487], [11359, 11360, 11489], [11359, 11489, 11488], [11360, 11361, 11489], [11361, 11490, 11489], [11361, 11362, 11491], [11361, 11491, 11490], [11362, 11363, 11491], [11363, 11492, 11491], [11363, 11364, 11493], [11363, 11493, 11492], [11364, 11365, 11493], [11365, 11494, 11493], [11365, 11366, 11495], [11365, 11495, 11494], [11366, 11367, 11495], [11367, 11496, 11495], [11367, 11368, 11497], [11367, 11497, 11496], [11368, 11369, 11497], [11369, 11498, 11497], [11369, 11370, 11499], [11369, 11499, 11498], [11370, 11371, 11499], [11371, 11500, 11499], [11371, 11372, 11501], [11371, 11501, 11500], [11372, 11373, 11501], [11373, 11502, 11501], [11373, 11374, 11503], [11373, 11503, 11502], [11374, 11375, 11503], [11375, 11504, 11503], [11375, 11376, 11505], [11375, 11505, 11504], [11376, 11377, 11505], [11377, 11506, 11505], [11377, 11378, 11507], [11377, 11507, 11506], [11378, 11379, 11507], [11379, 11508, 11507], [11379, 11380, 11509], [11379, 11509, 11508], [11380, 11381, 11509], [11381, 11510, 11509], [11381, 11382, 11511], [11381, 11511, 11510], [11382, 11383, 11511], [11383, 11512, 11511], [11383, 11384, 11513], [11383, 11513, 11512], [11384, 11385, 11513], [11385, 11514, 11513], [11385, 11386, 11515], [11385, 11515, 11514], [11386, 11387, 11515], [11387, 11516, 11515], [11387, 11388, 11517], [11387, 11517, 11516], [11388, 11389, 11517], [11389, 11518, 11517], [11389, 11390, 11519], [11389, 11519, 11518], [11390, 11391, 11519], [11391, 11520, 11519], [11391, 11392, 11521], [11391, 11521, 11520], [11392, 11393, 11521], [11393, 11522, 11521], [11393, 11394, 11523], [11393, 11523, 11522], [11394, 11395, 11523], [11395, 11524, 11523], [11395, 11396, 11525], [11395, 11525, 11524], [11396, 11397, 11525], [11397, 11526, 11525], [11398, 11399, 11527], [11399, 11528, 11527], [11399, 11400, 11529], [11399, 11529, 11528], [11400, 11401, 11529], [11401, 11530, 11529], [11401, 11402, 11531], [11401, 11531, 11530], [11402, 11403, 11531], [11403, 11532, 11531], [11403, 11404, 11533], [11403, 11533, 11532], [11404, 11405, 11533], [11405, 11534, 11533], [11405, 11406, 11535], [11405, 11535, 11534], [11406, 11407, 11535], [11407, 11536, 11535], [11407, 11408, 11537], [11407, 11537, 11536], [11408, 11409, 11537], [11409, 11538, 11537], [11409, 11410, 11539], [11409, 11539, 11538], [11410, 11411, 11539], [11411, 11540, 11539], [11411, 11412, 11541], [11411, 11541, 11540], [11412, 11413, 11541], [11413, 11542, 11541], [11413, 11414, 11543], [11413, 11543, 11542], [11414, 11415, 11543], [11415, 11544, 11543], [11415, 11416, 11545], [11415, 11545, 11544], [11416, 11417, 11545], [11417, 11546, 11545], [11417, 11418, 11547], [11417, 11547, 11546], [11418, 11419, 11547], [11419, 11548, 11547], [11419, 11420, 11549], [11419, 11549, 11548], [11420, 11421, 11549], [11421, 11550, 11549], [11421, 11422, 11551], [11421, 11551, 11550], [11422, 11423, 11551], [11423, 11552, 11551], [11423, 11424, 11553], [11423, 11553, 11552], [11424, 11425, 11553], [11425, 11554, 11553], [11425, 11426, 11555], [11425, 11555, 11554], [11426, 11427, 11555], [11427, 11556, 11555], [11427, 11428, 11557], [11427, 11557, 11556], [11428, 11429, 11557], [11429, 11558, 11557], [11429, 11430, 11559], [11429, 11559, 11558], [11430, 11431, 11559], [11431, 11560, 11559], [11431, 11432, 11561], [11431, 11561, 11560], [11432, 11433, 11561], [11433, 11562, 11561], [11433, 11434, 11563], [11433, 11563, 11562], [11434, 11435, 11563], [11435, 11564, 11563], [11435, 11436, 11565], [11435, 11565, 11564], [11436, 11437, 11565], [11437, 11566, 11565], [11437, 11438, 11567], [11437, 11567, 11566], [11438, 11439, 11567], [11439, 11568, 11567], [11439, 11440, 11569], [11439, 11569, 11568], [11440, 11441, 11569], [11441, 11570, 11569], [11441, 11442, 11571], [11441, 11571, 11570], [11442, 11443, 11571], [11443, 11572, 11571], [11443, 11444, 11573], [11443, 11573, 11572], [11444, 11445, 11573], [11445, 11574, 11573], [11445, 11446, 11575], [11445, 11575, 11574], [11446, 11447, 11575], [11447, 11576, 11575], [11447, 11448, 11577], [11447, 11577, 11576], [11448, 11449, 11577], [11449, 11578, 11577], [11449, 11450, 11579], [11449, 11579, 11578], [11450, 11451, 11579], [11451, 11580, 11579], [11451, 11452, 11581], [11451, 11581, 11580], [11452, 11453, 11581], [11453, 11582, 11581], [11453, 11454, 11583], [11453, 11583, 11582], [11454, 11455, 11583], [11455, 11584, 11583], [11455, 11456, 11585], [11455, 11585, 11584], [11456, 11457, 11585], [11457, 11586, 11585], [11457, 11458, 11587], [11457, 11587, 11586], [11458, 11459, 11587], [11459, 11588, 11587], [11459, 11460, 11589], [11459, 11589, 11588], [11460, 11461, 11589], [11461, 11590, 11589], [11461, 11462, 11591], [11461, 11591, 11590], [11462, 11463, 11591], [11463, 11592, 11591], [11463, 11464, 11593], [11463, 11593, 11592], [11464, 11465, 11593], [11465, 11594, 11593], [11465, 11466, 11595], [11465, 11595, 11594], [11466, 11467, 11595], [11467, 11596, 11595], [11467, 11468, 11597], [11467, 11597, 11596], [11468, 11469, 11597], [11469, 11598, 11597], [11469, 11470, 11599], [11469, 11599, 11598], [11470, 11471, 11599], [11471, 11600, 11599], [11471, 11472, 11601], [11471, 11601, 11600], [11472, 11473, 11601], [11473, 11602, 11601], [11473, 11474, 11603], [11473, 11603, 11602], [11474, 11475, 11603], [11475, 11604, 11603], [11475, 11476, 11605], [11475, 11605, 11604], [11476, 11477, 11605], [11477, 11606, 11605], [11477, 11478, 11607], [11477, 11607, 11606], [11478, 11479, 11607], [11479, 11608, 11607], [11479, 11480, 11609], [11479, 11609, 11608], [11480, 11481, 11609], [11481, 11610, 11609], [11481, 11482, 11611], [11481, 11611, 11610], [11482, 11483, 11611], [11483, 11612, 11611], [11483, 11484, 11613], [11483, 11613, 11612], [11484, 11485, 11613], [11485, 11614, 11613], [11485, 11486, 11615], [11485, 11615, 11614], [11486, 11487, 11615], [11487, 11616, 11615], [11487, 11488, 11617], [11487, 11617, 11616], [11488, 11489, 11617], [11489, 11618, 11617], [11489, 11490, 11619], [11489, 11619, 11618], [11490, 11491, 11619], [11491, 11620, 11619], [11491, 11492, 11621], [11491, 11621, 11620], [11492, 11493, 11621], [11493, 11622, 11621], [11493, 11494, 11623], [11493, 11623, 11622], [11494, 11495, 11623], [11495, 11624, 11623], [11495, 11496, 11625], [11495, 11625, 11624], [11496, 11497, 11625], [11497, 11626, 11625], [11497, 11498, 11627], [11497, 11627, 11626], [11498, 11499, 11627], [11499, 11628, 11627], [11499, 11500, 11629], [11499, 11629, 11628], [11500, 11501, 11629], [11501, 11630, 11629], [11501, 11502, 11631], [11501, 11631, 11630], [11502, 11503, 11631], [11503, 11632, 11631], [11503, 11504, 11633], [11503, 11633, 11632], [11504, 11505, 11633], [11505, 11634, 11633], [11505, 11506, 11635], [11505, 11635, 11634], [11506, 11507, 11635], [11507, 11636, 11635], [11507, 11508, 11637], [11507, 11637, 11636], [11508, 11509, 11637], [11509, 11638, 11637], [11509, 11510, 11639], [11509, 11639, 11638], [11510, 11511, 11639], [11511, 11640, 11639], [11511, 11512, 11641], [11511, 11641, 11640], [11512, 11513, 11641], [11513, 11642, 11641], [11513, 11514, 11643], [11513, 11643, 11642], [11514, 11515, 11643], [11515, 11644, 11643], [11515, 11516, 11645], [11515, 11645, 11644], [11516, 11517, 11645], [11517, 11646, 11645], [11517, 11518, 11647], [11517, 11647, 11646], [11518, 11519, 11647], [11519, 11648, 11647], [11519, 11520, 11649], [11519, 11649, 11648], [11520, 11521, 11649], [11521, 11650, 11649], [11521, 11522, 11651], [11521, 11651, 11650], [11522, 11523, 11651], [11523, 11652, 11651], [11523, 11524, 11653], [11523, 11653, 11652], [11524, 11525, 11653], [11525, 11654, 11653], [11525, 11526, 11655], [11525, 11655, 11654], [11527, 11528, 11657], [11527, 11657, 11656], [11528, 11529, 11657], [11529, 11658, 11657], [11529, 11530, 11659], [11529, 11659, 11658], [11530, 11531, 11659], [11531, 11660, 11659], [11531, 11532, 11661], [11531, 11661, 11660], [11532, 11533, 11661], [11533, 11662, 11661], [11533, 11534, 11663], [11533, 11663, 11662], [11534, 11535, 11663], [11535, 11664, 11663], [11535, 11536, 11665], [11535, 11665, 11664], [11536, 11537, 11665], [11537, 11666, 11665], [11537, 11538, 11667], [11537, 11667, 11666], [11538, 11539, 11667], [11539, 11668, 11667], [11539, 11540, 11669], [11539, 11669, 11668], [11540, 11541, 11669], [11541, 11670, 11669], [11541, 11542, 11671], [11541, 11671, 11670], [11542, 11543, 11671], [11543, 11672, 11671], [11543, 11544, 11673], [11543, 11673, 11672], [11544, 11545, 11673], [11545, 11674, 11673], [11545, 11546, 11675], [11545, 11675, 11674], [11546, 11547, 11675], [11547, 11676, 11675], [11547, 11548, 11677], [11547, 11677, 11676], [11548, 11549, 11677], [11549, 11678, 11677], [11549, 11550, 11679], [11549, 11679, 11678], [11550, 11551, 11679], [11551, 11680, 11679], [11551, 11552, 11681], [11551, 11681, 11680], [11552, 11553, 11681], [11553, 11682, 11681], [11553, 11554, 11683], [11553, 11683, 11682], [11554, 11555, 11683], [11555, 11684, 11683], [11555, 11556, 11685], [11555, 11685, 11684], [11556, 11557, 11685], [11557, 11686, 11685], [11557, 11558, 11687], [11557, 11687, 11686], [11558, 11559, 11687], [11559, 11688, 11687], [11559, 11560, 11689], [11559, 11689, 11688], [11560, 11561, 11689], [11561, 11690, 11689], [11561, 11562, 11691], [11561, 11691, 11690], [11562, 11563, 11691], [11563, 11692, 11691], [11563, 11564, 11693], [11563, 11693, 11692], [11564, 11565, 11693], [11565, 11694, 11693], [11565, 11566, 11695], [11565, 11695, 11694], [11566, 11567, 11695], [11567, 11696, 11695], [11567, 11568, 11697], [11567, 11697, 11696], [11568, 11569, 11697], [11569, 11698, 11697], [11569, 11570, 11699], [11569, 11699, 11698], [11570, 11571, 11699], [11571, 11700, 11699], [11571, 11572, 11701], [11571, 11701, 11700], [11572, 11573, 11701], [11573, 11702, 11701], [11573, 11574, 11703], [11573, 11703, 11702], [11574, 11575, 11703], [11575, 11704, 11703], [11575, 11576, 11705], [11575, 11705, 11704], [11576, 11577, 11705], [11577, 11706, 11705], [11577, 11578, 11707], [11577, 11707, 11706], [11578, 11579, 11707], [11579, 11708, 11707], [11579, 11580, 11709], [11579, 11709, 11708], [11580, 11581, 11709], [11581, 11710, 11709], [11581, 11582, 11711], [11581, 11711, 11710], [11582, 11583, 11711], [11583, 11712, 11711], [11583, 11584, 11713], [11583, 11713, 11712], [11584, 11585, 11713], [11585, 11714, 11713], [11585, 11586, 11715], [11585, 11715, 11714], [11586, 11587, 11715], [11587, 11716, 11715], [11587, 11588, 11717], [11587, 11717, 11716], [11588, 11589, 11717], [11589, 11718, 11717], [11589, 11590, 11719], [11589, 11719, 11718], [11590, 11591, 11719], [11591, 11720, 11719], [11591, 11592, 11721], [11591, 11721, 11720], [11592, 11593, 11721], [11593, 11722, 11721], [11593, 11594, 11723], [11593, 11723, 11722], [11594, 11595, 11723], [11595, 11724, 11723], [11595, 11596, 11725], [11595, 11725, 11724], [11596, 11597, 11725], [11597, 11726, 11725], [11597, 11598, 11727], [11597, 11727, 11726], [11598, 11599, 11727], [11599, 11728, 11727], [11599, 11600, 11729], [11599, 11729, 11728], [11600, 11601, 11729], [11601, 11730, 11729], [11601, 11602, 11731], [11601, 11731, 11730], [11602, 11603, 11731], [11603, 11732, 11731], [11603, 11604, 11733], [11603, 11733, 11732], [11604, 11605, 11733], [11605, 11734, 11733], [11605, 11606, 11735], [11605, 11735, 11734], [11606, 11607, 11735], [11607, 11736, 11735], [11607, 11608, 11737], [11607, 11737, 11736], [11608, 11609, 11737], [11609, 11738, 11737], [11609, 11610, 11739], [11609, 11739, 11738], [11610, 11611, 11739], [11611, 11740, 11739], [11611, 11612, 11741], [11611, 11741, 11740], [11612, 11613, 11741], [11613, 11742, 11741], [11613, 11614, 11743], [11613, 11743, 11742], [11614, 11615, 11743], [11615, 11744, 11743], [11615, 11616, 11745], [11615, 11745, 11744], [11616, 11617, 11745], [11617, 11746, 11745], [11617, 11618, 11747], [11617, 11747, 11746], [11618, 11619, 11747], [11619, 11748, 11747], [11619, 11620, 11749], [11619, 11749, 11748], [11620, 11621, 11749], [11621, 11750, 11749], [11621, 11622, 11751], [11621, 11751, 11750], [11622, 11623, 11751], [11623, 11752, 11751], [11623, 11624, 11753], [11623, 11753, 11752], [11624, 11625, 11753], [11625, 11754, 11753], [11625, 11626, 11755], [11625, 11755, 11754], [11626, 11627, 11755], [11627, 11756, 11755], [11627, 11628, 11757], [11627, 11757, 11756], [11628, 11629, 11757], [11629, 11758, 11757], [11629, 11630, 11759], [11629, 11759, 11758], [11630, 11631, 11759], [11631, 11760, 11759], [11631, 11632, 11761], [11631, 11761, 11760], [11632, 11633, 11761], [11633, 11762, 11761], [11633, 11634, 11763], [11633, 11763, 11762], [11634, 11635, 11763], [11635, 11764, 11763], [11635, 11636, 11765], [11635, 11765, 11764], [11636, 11637, 11765], [11637, 11766, 11765], [11637, 11638, 11767], [11637, 11767, 11766], [11638, 11639, 11767], [11639, 11768, 11767], [11639, 11640, 11769], [11639, 11769, 11768], [11640, 11641, 11769], [11641, 11770, 11769], [11641, 11642, 11771], [11641, 11771, 11770], [11642, 11643, 11771], [11643, 11772, 11771], [11643, 11644, 11773], [11643, 11773, 11772], [11644, 11645, 11773], [11645, 11774, 11773], [11645, 11646, 11775], [11645, 11775, 11774], [11646, 11647, 11775], [11647, 11776, 11775], [11647, 11648, 11777], [11647, 11777, 11776], [11648, 11649, 11777], [11649, 11778, 11777], [11649, 11650, 11779], [11649, 11779, 11778], [11650, 11651, 11779], [11651, 11780, 11779], [11651, 11652, 11781], [11651, 11781, 11780], [11652, 11653, 11781], [11653, 11782, 11781], [11653, 11654, 11783], [11653, 11783, 11782], [11654, 11655, 11783], [11655, 11784, 11783], [11656, 11657, 11785], [11657, 11786, 11785], [11657, 11658, 11787], [11657, 11787, 11786], [11658, 11659, 11787], [11659, 11788, 11787], [11659, 11660, 11789], [11659, 11789, 11788], [11660, 11661, 11789], [11661, 11790, 11789], [11661, 11662, 11791], [11661, 11791, 11790], [11662, 11663, 11791], [11663, 11792, 11791], [11663, 11664, 11793], [11663, 11793, 11792], [11664, 11665, 11793], [11665, 11794, 11793], [11665, 11666, 11795], [11665, 11795, 11794], [11666, 11667, 11795], [11667, 11796, 11795], [11667, 11668, 11797], [11667, 11797, 11796], [11668, 11669, 11797], [11669, 11798, 11797], [11669, 11670, 11799], [11669, 11799, 11798], [11670, 11671, 11799], [11671, 11800, 11799], [11671, 11672, 11801], [11671, 11801, 11800], [11672, 11673, 11801], [11673, 11802, 11801], [11673, 11674, 11803], [11673, 11803, 11802], [11674, 11675, 11803], [11675, 11804, 11803], [11675, 11676, 11805], [11675, 11805, 11804], [11676, 11677, 11805], [11677, 11806, 11805], [11677, 11678, 11807], [11677, 11807, 11806], [11678, 11679, 11807], [11679, 11808, 11807], [11679, 11680, 11809], [11679, 11809, 11808], [11680, 11681, 11809], [11681, 11810, 11809], [11681, 11682, 11811], [11681, 11811, 11810], [11682, 11683, 11811], [11683, 11812, 11811], [11683, 11684, 11813], [11683, 11813, 11812], [11684, 11685, 11813], [11685, 11814, 11813], [11685, 11686, 11815], [11685, 11815, 11814], [11686, 11687, 11815], [11687, 11816, 11815], [11687, 11688, 11817], [11687, 11817, 11816], [11688, 11689, 11817], [11689, 11818, 11817], [11689, 11690, 11819], [11689, 11819, 11818], [11690, 11691, 11819], [11691, 11820, 11819], [11691, 11692, 11821], [11691, 11821, 11820], [11692, 11693, 11821], [11693, 11822, 11821], [11693, 11694, 11823], [11693, 11823, 11822], [11694, 11695, 11823], [11695, 11824, 11823], [11695, 11696, 11825], [11695, 11825, 11824], [11696, 11697, 11825], [11697, 11826, 11825], [11697, 11698, 11827], [11697, 11827, 11826], [11698, 11699, 11827], [11699, 11828, 11827], [11699, 11700, 11829], [11699, 11829, 11828], [11700, 11701, 11829], [11701, 11830, 11829], [11701, 11702, 11831], [11701, 11831, 11830], [11702, 11703, 11831], [11703, 11832, 11831], [11703, 11704, 11833], [11703, 11833, 11832], [11704, 11705, 11833], [11705, 11834, 11833], [11705, 11706, 11835], [11705, 11835, 11834], [11706, 11707, 11835], [11707, 11836, 11835], [11707, 11708, 11837], [11707, 11837, 11836], [11708, 11709, 11837], [11709, 11838, 11837], [11709, 11710, 11839], [11709, 11839, 11838], [11710, 11711, 11839], [11711, 11840, 11839], [11711, 11712, 11841], [11711, 11841, 11840], [11712, 11713, 11841], [11713, 11842, 11841], [11713, 11714, 11843], [11713, 11843, 11842], [11714, 11715, 11843], [11715, 11844, 11843], [11715, 11716, 11845], [11715, 11845, 11844], [11716, 11717, 11845], [11717, 11846, 11845], [11717, 11718, 11847], [11717, 11847, 11846], [11718, 11719, 11847], [11719, 11848, 11847], [11719, 11720, 11849], [11719, 11849, 11848], [11720, 11721, 11849], [11721, 11850, 11849], [11721, 11722, 11851], [11721, 11851, 11850], [11722, 11723, 11851], [11723, 11852, 11851], [11723, 11724, 11853], [11723, 11853, 11852], [11724, 11725, 11853], [11725, 11854, 11853], [11725, 11726, 11855], [11725, 11855, 11854], [11726, 11727, 11855], [11727, 11856, 11855], [11727, 11728, 11857], [11727, 11857, 11856], [11728, 11729, 11857], [11729, 11858, 11857], [11729, 11730, 11859], [11729, 11859, 11858], [11730, 11731, 11859], [11731, 11860, 11859], [11731, 11732, 11861], [11731, 11861, 11860], [11732, 11733, 11861], [11733, 11862, 11861], [11733, 11734, 11863], [11733, 11863, 11862], [11734, 11735, 11863], [11735, 11864, 11863], [11735, 11736, 11865], [11735, 11865, 11864], [11736, 11737, 11865], [11737, 11866, 11865], [11737, 11738, 11867], [11737, 11867, 11866], [11738, 11739, 11867], [11739, 11868, 11867], [11739, 11740, 11869], [11739, 11869, 11868], [11740, 11741, 11869], [11741, 11870, 11869], [11741, 11742, 11871], [11741, 11871, 11870], [11742, 11743, 11871], [11743, 11872, 11871], [11743, 11744, 11873], [11743, 11873, 11872], [11744, 11745, 11873], [11745, 11874, 11873], [11745, 11746, 11875], [11745, 11875, 11874], [11746, 11747, 11875], [11747, 11876, 11875], [11747, 11748, 11877], [11747, 11877, 11876], [11748, 11749, 11877], [11749, 11878, 11877], [11749, 11750, 11879], [11749, 11879, 11878], [11750, 11751, 11879], [11751, 11880, 11879], [11751, 11752, 11881], [11751, 11881, 11880], [11752, 11753, 11881], [11753, 11882, 11881], [11753, 11754, 11883], [11753, 11883, 11882], [11754, 11755, 11883], [11755, 11884, 11883], [11755, 11756, 11885], [11755, 11885, 11884], [11756, 11757, 11885], [11757, 11886, 11885], [11757, 11758, 11887], [11757, 11887, 11886], [11758, 11759, 11887], [11759, 11888, 11887], [11759, 11760, 11889], [11759, 11889, 11888], [11760, 11761, 11889], [11761, 11890, 11889], [11761, 11762, 11891], [11761, 11891, 11890], [11762, 11763, 11891], [11763, 11892, 11891], [11763, 11764, 11893], [11763, 11893, 11892], [11764, 11765, 11893], [11765, 11894, 11893], [11765, 11766, 11895], [11765, 11895, 11894], [11766, 11767, 11895], [11767, 11896, 11895], [11767, 11768, 11897], [11767, 11897, 11896], [11768, 11769, 11897], [11769, 11898, 11897], [11769, 11770, 11899], [11769, 11899, 11898], [11770, 11771, 11899], [11771, 11900, 11899], [11771, 11772, 11901], [11771, 11901, 11900], [11772, 11773, 11901], [11773, 11902, 11901], [11773, 11774, 11903], [11773, 11903, 11902], [11774, 11775, 11903], [11775, 11904, 11903], [11775, 11776, 11905], [11775, 11905, 11904], [11776, 11777, 11905], [11777, 11906, 11905], [11777, 11778, 11907], [11777, 11907, 11906], [11778, 11779, 11907], [11779, 11908, 11907], [11779, 11780, 11909], [11779, 11909, 11908], [11780, 11781, 11909], [11781, 11910, 11909], [11781, 11782, 11911], [11781, 11911, 11910], [11782, 11783, 11911], [11783, 11912, 11911], [11783, 11784, 11913], [11783, 11913, 11912], [11785, 11786, 11915], [11785, 11915, 11914], [11786, 11787, 11915], [11787, 11916, 11915], [11787, 11788, 11917], [11787, 11917, 11916], [11788, 11789, 11917], [11789, 11918, 11917], [11789, 11790, 11919], [11789, 11919, 11918], [11790, 11791, 11919], [11791, 11920, 11919], [11791, 11792, 11921], [11791, 11921, 11920], [11792, 11793, 11921], [11793, 11922, 11921], [11793, 11794, 11923], [11793, 11923, 11922], [11794, 11795, 11923], [11795, 11924, 11923], [11795, 11796, 11925], [11795, 11925, 11924], [11796, 11797, 11925], [11797, 11926, 11925], [11797, 11798, 11927], [11797, 11927, 11926], [11798, 11799, 11927], [11799, 11928, 11927], [11799, 11800, 11929], [11799, 11929, 11928], [11800, 11801, 11929], [11801, 11930, 11929], [11801, 11802, 11931], [11801, 11931, 11930], [11802, 11803, 11931], [11803, 11932, 11931], [11803, 11804, 11933], [11803, 11933, 11932], [11804, 11805, 11933], [11805, 11934, 11933], [11805, 11806, 11935], [11805, 11935, 11934], [11806, 11807, 11935], [11807, 11936, 11935], [11807, 11808, 11937], [11807, 11937, 11936], [11808, 11809, 11937], [11809, 11938, 11937], [11809, 11810, 11939], [11809, 11939, 11938], [11810, 11811, 11939], [11811, 11940, 11939], [11811, 11812, 11941], [11811, 11941, 11940], [11812, 11813, 11941], [11813, 11942, 11941], [11813, 11814, 11943], [11813, 11943, 11942], [11814, 11815, 11943], [11815, 11944, 11943], [11815, 11816, 11945], [11815, 11945, 11944], [11816, 11817, 11945], [11817, 11946, 11945], [11817, 11818, 11947], [11817, 11947, 11946], [11818, 11819, 11947], [11819, 11948, 11947], [11819, 11820, 11949], [11819, 11949, 11948], [11820, 11821, 11949], [11821, 11950, 11949], [11821, 11822, 11951], [11821, 11951, 11950], [11822, 11823, 11951], [11823, 11952, 11951], [11823, 11824, 11953], [11823, 11953, 11952], [11824, 11825, 11953], [11825, 11954, 11953], [11825, 11826, 11955], [11825, 11955, 11954], [11826, 11827, 11955], [11827, 11956, 11955], [11827, 11828, 11957], [11827, 11957, 11956], [11828, 11829, 11957], [11829, 11958, 11957], [11829, 11830, 11959], [11829, 11959, 11958], [11830, 11831, 11959], [11831, 11960, 11959], [11831, 11832, 11961], [11831, 11961, 11960], [11832, 11833, 11961], [11833, 11962, 11961], [11833, 11834, 11963], [11833, 11963, 11962], [11834, 11835, 11963], [11835, 11964, 11963], [11835, 11836, 11965], [11835, 11965, 11964], [11836, 11837, 11965], [11837, 11966, 11965], [11837, 11838, 11967], [11837, 11967, 11966], [11838, 11839, 11967], [11839, 11968, 11967], [11839, 11840, 11969], [11839, 11969, 11968], [11840, 11841, 11969], [11841, 11970, 11969], [11841, 11842, 11971], [11841, 11971, 11970], [11842, 11843, 11971], [11843, 11972, 11971], [11843, 11844, 11973], [11843, 11973, 11972], [11844, 11845, 11973], [11845, 11974, 11973], [11845, 11846, 11975], [11845, 11975, 11974], [11846, 11847, 11975], [11847, 11976, 11975], [11847, 11848, 11977], [11847, 11977, 11976], [11848, 11849, 11977], [11849, 11978, 11977], [11849, 11850, 11979], [11849, 11979, 11978], [11850, 11851, 11979], [11851, 11980, 11979], [11851, 11852, 11981], [11851, 11981, 11980], [11852, 11853, 11981], [11853, 11982, 11981], [11853, 11854, 11983], [11853, 11983, 11982], [11854, 11855, 11983], [11855, 11984, 11983], [11855, 11856, 11985], [11855, 11985, 11984], [11856, 11857, 11985], [11857, 11986, 11985], [11857, 11858, 11987], [11857, 11987, 11986], [11858, 11859, 11987], [11859, 11988, 11987], [11859, 11860, 11989], [11859, 11989, 11988], [11860, 11861, 11989], [11861, 11990, 11989], [11861, 11862, 11991], [11861, 11991, 11990], [11862, 11863, 11991], [11863, 11992, 11991], [11863, 11864, 11993], [11863, 11993, 11992], [11864, 11865, 11993], [11865, 11994, 11993], [11865, 11866, 11995], [11865, 11995, 11994], [11866, 11867, 11995], [11867, 11996, 11995], [11867, 11868, 11997], [11867, 11997, 11996], [11868, 11869, 11997], [11869, 11998, 11997], [11869, 11870, 11999], [11869, 11999, 11998], [11870, 11871, 11999], [11871, 12000, 11999], [11871, 11872, 12001], [11871, 12001, 12000], [11872, 11873, 12001], [11873, 12002, 12001], [11873, 11874, 12003], [11873, 12003, 12002], [11874, 11875, 12003], [11875, 12004, 12003], [11875, 11876, 12005], [11875, 12005, 12004], [11876, 11877, 12005], [11877, 12006, 12005], [11877, 11878, 12007], [11877, 12007, 12006], [11878, 11879, 12007], [11879, 12008, 12007], [11879, 11880, 12009], [11879, 12009, 12008], [11880, 11881, 12009], [11881, 12010, 12009], [11881, 11882, 12011], [11881, 12011, 12010], [11882, 11883, 12011], [11883, 12012, 12011], [11883, 11884, 12013], [11883, 12013, 12012], [11884, 11885, 12013], [11885, 12014, 12013], [11885, 11886, 12015], [11885, 12015, 12014], [11886, 11887, 12015], [11887, 12016, 12015], [11887, 11888, 12017], [11887, 12017, 12016], [11888, 11889, 12017], [11889, 12018, 12017], [11889, 11890, 12019], [11889, 12019, 12018], [11890, 11891, 12019], [11891, 12020, 12019], [11891, 11892, 12021], [11891, 12021, 12020], [11892, 11893, 12021], [11893, 12022, 12021], [11893, 11894, 12023], [11893, 12023, 12022], [11894, 11895, 12023], [11895, 12024, 12023], [11895, 11896, 12025], [11895, 12025, 12024], [11896, 11897, 12025], [11897, 12026, 12025], [11897, 11898, 12027], [11897, 12027, 12026], [11898, 11899, 12027], [11899, 12028, 12027], [11899, 11900, 12029], [11899, 12029, 12028], [11900, 11901, 12029], [11901, 12030, 12029], [11901, 11902, 12031], [11901, 12031, 12030], [11902, 11903, 12031], [11903, 12032, 12031], [11903, 11904, 12033], [11903, 12033, 12032], [11904, 11905, 12033], [11905, 12034, 12033], [11905, 11906, 12035], [11905, 12035, 12034], [11906, 11907, 12035], [11907, 12036, 12035], [11907, 11908, 12037], [11907, 12037, 12036], [11908, 11909, 12037], [11909, 12038, 12037], [11909, 11910, 12039], [11909, 12039, 12038], [11910, 11911, 12039], [11911, 12040, 12039], [11911, 11912, 12041], [11911, 12041, 12040], [11912, 11913, 12041], [11913, 12042, 12041], [11914, 11915, 12043], [11915, 12044, 12043], [11915, 11916, 12045], [11915, 12045, 12044], [11916, 11917, 12045], [11917, 12046, 12045], [11917, 11918, 12047], [11917, 12047, 12046], [11918, 11919, 12047], [11919, 12048, 12047], [11919, 11920, 12049], [11919, 12049, 12048], [11920, 11921, 12049], [11921, 12050, 12049], [11921, 11922, 12051], [11921, 12051, 12050], [11922, 11923, 12051], [11923, 12052, 12051], [11923, 11924, 12053], [11923, 12053, 12052], [11924, 11925, 12053], [11925, 12054, 12053], [11925, 11926, 12055], [11925, 12055, 12054], [11926, 11927, 12055], [11927, 12056, 12055], [11927, 11928, 12057], [11927, 12057, 12056], [11928, 11929, 12057], [11929, 12058, 12057], [11929, 11930, 12059], [11929, 12059, 12058], [11930, 11931, 12059], [11931, 12060, 12059], [11931, 11932, 12061], [11931, 12061, 12060], [11932, 11933, 12061], [11933, 12062, 12061], [11933, 11934, 12063], [11933, 12063, 12062], [11934, 11935, 12063], [11935, 12064, 12063], [11935, 11936, 12065], [11935, 12065, 12064], [11936, 11937, 12065], [11937, 12066, 12065], [11937, 11938, 12067], [11937, 12067, 12066], [11938, 11939, 12067], [11939, 12068, 12067], [11939, 11940, 12069], [11939, 12069, 12068], [11940, 11941, 12069], [11941, 12070, 12069], [11941, 11942, 12071], [11941, 12071, 12070], [11942, 11943, 12071], [11943, 12072, 12071], [11943, 11944, 12073], [11943, 12073, 12072], [11944, 11945, 12073], [11945, 12074, 12073], [11945, 11946, 12075], [11945, 12075, 12074], [11946, 11947, 12075], [11947, 12076, 12075], [11947, 11948, 12077], [11947, 12077, 12076], [11948, 11949, 12077], [11949, 12078, 12077], [11949, 11950, 12079], [11949, 12079, 12078], [11950, 11951, 12079], [11951, 12080, 12079], [11951, 11952, 12081], [11951, 12081, 12080], [11952, 11953, 12081], [11953, 12082, 12081], [11953, 11954, 12083], [11953, 12083, 12082], [11954, 11955, 12083], [11955, 12084, 12083], [11955, 11956, 12085], [11955, 12085, 12084], [11956, 11957, 12085], [11957, 12086, 12085], [11957, 11958, 12087], [11957, 12087, 12086], [11958, 11959, 12087], [11959, 12088, 12087], [11959, 11960, 12089], [11959, 12089, 12088], [11960, 11961, 12089], [11961, 12090, 12089], [11961, 11962, 12091], [11961, 12091, 12090], [11962, 11963, 12091], [11963, 12092, 12091], [11963, 11964, 12093], [11963, 12093, 12092], [11964, 11965, 12093], [11965, 12094, 12093], [11965, 11966, 12095], [11965, 12095, 12094], [11966, 11967, 12095], [11967, 12096, 12095], [11967, 11968, 12097], [11967, 12097, 12096], [11968, 11969, 12097], [11969, 12098, 12097], [11969, 11970, 12099], [11969, 12099, 12098], [11970, 11971, 12099], [11971, 12100, 12099], [11971, 11972, 12101], [11971, 12101, 12100], [11972, 11973, 12101], [11973, 12102, 12101], [11973, 11974, 12103], [11973, 12103, 12102], [11974, 11975, 12103], [11975, 12104, 12103], [11975, 11976, 12105], [11975, 12105, 12104], [11976, 11977, 12105], [11977, 12106, 12105], [11977, 11978, 12107], [11977, 12107, 12106], [11978, 11979, 12107], [11979, 12108, 12107], [11979, 11980, 12109], [11979, 12109, 12108], [11980, 11981, 12109], [11981, 12110, 12109], [11981, 11982, 12111], [11981, 12111, 12110], [11982, 11983, 12111], [11983, 12112, 12111], [11983, 11984, 12113], [11983, 12113, 12112], [11984, 11985, 12113], [11985, 12114, 12113], [11985, 11986, 12115], [11985, 12115, 12114], [11986, 11987, 12115], [11987, 12116, 12115], [11987, 11988, 12117], [11987, 12117, 12116], [11988, 11989, 12117], [11989, 12118, 12117], [11989, 11990, 12119], [11989, 12119, 12118], [11990, 11991, 12119], [11991, 12120, 12119], [11991, 11992, 12121], [11991, 12121, 12120], [11992, 11993, 12121], [11993, 12122, 12121], [11993, 11994, 12123], [11993, 12123, 12122], [11994, 11995, 12123], [11995, 12124, 12123], [11995, 11996, 12125], [11995, 12125, 12124], [11996, 11997, 12125], [11997, 12126, 12125], [11997, 11998, 12127], [11997, 12127, 12126], [11998, 11999, 12127], [11999, 12128, 12127], [11999, 12000, 12129], [11999, 12129, 12128], [12000, 12001, 12129], [12001, 12130, 12129], [12001, 12002, 12131], [12001, 12131, 12130], [12002, 12003, 12131], [12003, 12132, 12131], [12003, 12004, 12133], [12003, 12133, 12132], [12004, 12005, 12133], [12005, 12134, 12133], [12005, 12006, 12135], [12005, 12135, 12134], [12006, 12007, 12135], [12007, 12136, 12135], [12007, 12008, 12137], [12007, 12137, 12136], [12008, 12009, 12137], [12009, 12138, 12137], [12009, 12010, 12139], [12009, 12139, 12138], [12010, 12011, 12139], [12011, 12140, 12139], [12011, 12012, 12141], [12011, 12141, 12140], [12012, 12013, 12141], [12013, 12142, 12141], [12013, 12014, 12143], [12013, 12143, 12142], [12014, 12015, 12143], [12015, 12144, 12143], [12015, 12016, 12145], [12015, 12145, 12144], [12016, 12017, 12145], [12017, 12146, 12145], [12017, 12018, 12147], [12017, 12147, 12146], [12018, 12019, 12147], [12019, 12148, 12147], [12019, 12020, 12149], [12019, 12149, 12148], [12020, 12021, 12149], [12021, 12150, 12149], [12021, 12022, 12151], [12021, 12151, 12150], [12022, 12023, 12151], [12023, 12152, 12151], [12023, 12024, 12153], [12023, 12153, 12152], [12024, 12025, 12153], [12025, 12154, 12153], [12025, 12026, 12155], [12025, 12155, 12154], [12026, 12027, 12155], [12027, 12156, 12155], [12027, 12028, 12157], [12027, 12157, 12156], [12028, 12029, 12157], [12029, 12158, 12157], [12029, 12030, 12159], [12029, 12159, 12158], [12030, 12031, 12159], [12031, 12160, 12159], [12031, 12032, 12161], [12031, 12161, 12160], [12032, 12033, 12161], [12033, 12162, 12161], [12033, 12034, 12163], [12033, 12163, 12162], [12034, 12035, 12163], [12035, 12164, 12163], [12035, 12036, 12165], [12035, 12165, 12164], [12036, 12037, 12165], [12037, 12166, 12165], [12037, 12038, 12167], [12037, 12167, 12166], [12038, 12039, 12167], [12039, 12168, 12167], [12039, 12040, 12169], [12039, 12169, 12168], [12040, 12041, 12169], [12041, 12170, 12169], [12041, 12042, 12171], [12041, 12171, 12170], [12043, 12044, 12173], [12043, 12173, 12172], [12044, 12045, 12173], [12045, 12174, 12173], [12045, 12046, 12175], [12045, 12175, 12174], [12046, 12047, 12175], [12047, 12176, 12175], [12047, 12048, 12177], [12047, 12177, 12176], [12048, 12049, 12177], [12049, 12178, 12177], [12049, 12050, 12179], [12049, 12179, 12178], [12050, 12051, 12179], [12051, 12180, 12179], [12051, 12052, 12181], [12051, 12181, 12180], [12052, 12053, 12181], [12053, 12182, 12181], [12053, 12054, 12183], [12053, 12183, 12182], [12054, 12055, 12183], [12055, 12184, 12183], [12055, 12056, 12185], [12055, 12185, 12184], [12056, 12057, 12185], [12057, 12186, 12185], [12057, 12058, 12187], [12057, 12187, 12186], [12058, 12059, 12187], [12059, 12188, 12187], [12059, 12060, 12189], [12059, 12189, 12188], [12060, 12061, 12189], [12061, 12190, 12189], [12061, 12062, 12191], [12061, 12191, 12190], [12062, 12063, 12191], [12063, 12192, 12191], [12063, 12064, 12193], [12063, 12193, 12192], [12064, 12065, 12193], [12065, 12194, 12193], [12065, 12066, 12195], [12065, 12195, 12194], [12066, 12067, 12195], [12067, 12196, 12195], [12067, 12068, 12197], [12067, 12197, 12196], [12068, 12069, 12197], [12069, 12198, 12197], [12069, 12070, 12199], [12069, 12199, 12198], [12070, 12071, 12199], [12071, 12200, 12199], [12071, 12072, 12201], [12071, 12201, 12200], [12072, 12073, 12201], [12073, 12202, 12201], [12073, 12074, 12203], [12073, 12203, 12202], [12074, 12075, 12203], [12075, 12204, 12203], [12075, 12076, 12205], [12075, 12205, 12204], [12076, 12077, 12205], [12077, 12206, 12205], [12077, 12078, 12207], [12077, 12207, 12206], [12078, 12079, 12207], [12079, 12208, 12207], [12079, 12080, 12209], [12079, 12209, 12208], [12080, 12081, 12209], [12081, 12210, 12209], [12081, 12082, 12211], [12081, 12211, 12210], [12082, 12083, 12211], [12083, 12212, 12211], [12083, 12084, 12213], [12083, 12213, 12212], [12084, 12085, 12213], [12085, 12214, 12213], [12085, 12086, 12215], [12085, 12215, 12214], [12086, 12087, 12215], [12087, 12216, 12215], [12087, 12088, 12217], [12087, 12217, 12216], [12088, 12089, 12217], [12089, 12218, 12217], [12089, 12090, 12219], [12089, 12219, 12218], [12090, 12091, 12219], [12091, 12220, 12219], [12091, 12092, 12221], [12091, 12221, 12220], [12092, 12093, 12221], [12093, 12222, 12221], [12093, 12094, 12223], [12093, 12223, 12222], [12094, 12095, 12223], [12095, 12224, 12223], [12095, 12096, 12225], [12095, 12225, 12224], [12096, 12097, 12225], [12097, 12226, 12225], [12097, 12098, 12227], [12097, 12227, 12226], [12098, 12099, 12227], [12099, 12228, 12227], [12099, 12100, 12229], [12099, 12229, 12228], [12100, 12101, 12229], [12101, 12230, 12229], [12101, 12102, 12231], [12101, 12231, 12230], [12102, 12103, 12231], [12103, 12232, 12231], [12103, 12104, 12233], [12103, 12233, 12232], [12104, 12105, 12233], [12105, 12234, 12233], [12105, 12106, 12235], [12105, 12235, 12234], [12106, 12107, 12235], [12107, 12236, 12235], [12107, 12108, 12237], [12107, 12237, 12236], [12108, 12109, 12237], [12109, 12238, 12237], [12109, 12110, 12239], [12109, 12239, 12238], [12110, 12111, 12239], [12111, 12240, 12239], [12111, 12112, 12241], [12111, 12241, 12240], [12112, 12113, 12241], [12113, 12242, 12241], [12113, 12114, 12243], [12113, 12243, 12242], [12114, 12115, 12243], [12115, 12244, 12243], [12115, 12116, 12245], [12115, 12245, 12244], [12116, 12117, 12245], [12117, 12246, 12245], [12117, 12118, 12247], [12117, 12247, 12246], [12118, 12119, 12247], [12119, 12248, 12247], [12119, 12120, 12249], [12119, 12249, 12248], [12120, 12121, 12249], [12121, 12250, 12249], [12121, 12122, 12251], [12121, 12251, 12250], [12122, 12123, 12251], [12123, 12252, 12251], [12123, 12124, 12253], [12123, 12253, 12252], [12124, 12125, 12253], [12125, 12254, 12253], [12125, 12126, 12255], [12125, 12255, 12254], [12126, 12127, 12255], [12127, 12256, 12255], [12127, 12128, 12257], [12127, 12257, 12256], [12128, 12129, 12257], [12129, 12258, 12257], [12129, 12130, 12259], [12129, 12259, 12258], [12130, 12131, 12259], [12131, 12260, 12259], [12131, 12132, 12261], [12131, 12261, 12260], [12132, 12133, 12261], [12133, 12262, 12261], [12133, 12134, 12263], [12133, 12263, 12262], [12134, 12135, 12263], [12135, 12264, 12263], [12135, 12136, 12265], [12135, 12265, 12264], [12136, 12137, 12265], [12137, 12266, 12265], [12137, 12138, 12267], [12137, 12267, 12266], [12138, 12139, 12267], [12139, 12268, 12267], [12139, 12140, 12269], [12139, 12269, 12268], [12140, 12141, 12269], [12141, 12270, 12269], [12141, 12142, 12271], [12141, 12271, 12270], [12142, 12143, 12271], [12143, 12272, 12271], [12143, 12144, 12273], [12143, 12273, 12272], [12144, 12145, 12273], [12145, 12274, 12273], [12145, 12146, 12275], [12145, 12275, 12274], [12146, 12147, 12275], [12147, 12276, 12275], [12147, 12148, 12277], [12147, 12277, 12276], [12148, 12149, 12277], [12149, 12278, 12277], [12149, 12150, 12279], [12149, 12279, 12278], [12150, 12151, 12279], [12151, 12280, 12279], [12151, 12152, 12281], [12151, 12281, 12280], [12152, 12153, 12281], [12153, 12282, 12281], [12153, 12154, 12283], [12153, 12283, 12282], [12154, 12155, 12283], [12155, 12284, 12283], [12155, 12156, 12285], [12155, 12285, 12284], [12156, 12157, 12285], [12157, 12286, 12285], [12157, 12158, 12287], [12157, 12287, 12286], [12158, 12159, 12287], [12159, 12288, 12287], [12159, 12160, 12289], [12159, 12289, 12288], [12160, 12161, 12289], [12161, 12290, 12289], [12161, 12162, 12291], [12161, 12291, 12290], [12162, 12163, 12291], [12163, 12292, 12291], [12163, 12164, 12293], [12163, 12293, 12292], [12164, 12165, 12293], [12165, 12294, 12293], [12165, 12166, 12295], [12165, 12295, 12294], [12166, 12167, 12295], [12167, 12296, 12295], [12167, 12168, 12297], [12167, 12297, 12296], [12168, 12169, 12297], [12169, 12298, 12297], [12169, 12170, 12299], [12169, 12299, 12298], [12170, 12171, 12299], [12171, 12300, 12299], [12172, 12173, 12301], [12173, 12302, 12301], [12173, 12174, 12303], [12173, 12303, 12302], [12174, 12175, 12303], [12175, 12304, 12303], [12175, 12176, 12305], [12175, 12305, 12304], [12176, 12177, 12305], [12177, 12306, 12305], [12177, 12178, 12307], [12177, 12307, 12306], [12178, 12179, 12307], [12179, 12308, 12307], [12179, 12180, 12309], [12179, 12309, 12308], [12180, 12181, 12309], [12181, 12310, 12309], [12181, 12182, 12311], [12181, 12311, 12310], [12182, 12183, 12311], [12183, 12312, 12311], [12183, 12184, 12313], [12183, 12313, 12312], [12184, 12185, 12313], [12185, 12314, 12313], [12185, 12186, 12315], [12185, 12315, 12314], [12186, 12187, 12315], [12187, 12316, 12315], [12187, 12188, 12317], [12187, 12317, 12316], [12188, 12189, 12317], [12189, 12318, 12317], [12189, 12190, 12319], [12189, 12319, 12318], [12190, 12191, 12319], [12191, 12320, 12319], [12191, 12192, 12321], [12191, 12321, 12320], [12192, 12193, 12321], [12193, 12322, 12321], [12193, 12194, 12323], [12193, 12323, 12322], [12194, 12195, 12323], [12195, 12324, 12323], [12195, 12196, 12325], [12195, 12325, 12324], [12196, 12197, 12325], [12197, 12326, 12325], [12197, 12198, 12327], [12197, 12327, 12326], [12198, 12199, 12327], [12199, 12328, 12327], [12199, 12200, 12329], [12199, 12329, 12328], [12200, 12201, 12329], [12201, 12330, 12329], [12201, 12202, 12331], [12201, 12331, 12330], [12202, 12203, 12331], [12203, 12332, 12331], [12203, 12204, 12333], [12203, 12333, 12332], [12204, 12205, 12333], [12205, 12334, 12333], [12205, 12206, 12335], [12205, 12335, 12334], [12206, 12207, 12335], [12207, 12336, 12335], [12207, 12208, 12337], [12207, 12337, 12336], [12208, 12209, 12337], [12209, 12338, 12337], [12209, 12210, 12339], [12209, 12339, 12338], [12210, 12211, 12339], [12211, 12340, 12339], [12211, 12212, 12341], [12211, 12341, 12340], [12212, 12213, 12341], [12213, 12342, 12341], [12213, 12214, 12343], [12213, 12343, 12342], [12214, 12215, 12343], [12215, 12344, 12343], [12215, 12216, 12345], [12215, 12345, 12344], [12216, 12217, 12345], [12217, 12346, 12345], [12217, 12218, 12347], [12217, 12347, 12346], [12218, 12219, 12347], [12219, 12348, 12347], [12219, 12220, 12349], [12219, 12349, 12348], [12220, 12221, 12349], [12221, 12350, 12349], [12221, 12222, 12351], [12221, 12351, 12350], [12222, 12223, 12351], [12223, 12352, 12351], [12223, 12224, 12353], [12223, 12353, 12352], [12224, 12225, 12353], [12225, 12354, 12353], [12225, 12226, 12355], [12225, 12355, 12354], [12226, 12227, 12355], [12227, 12356, 12355], [12227, 12228, 12357], [12227, 12357, 12356], [12228, 12229, 12357], [12229, 12358, 12357], [12229, 12230, 12359], [12229, 12359, 12358], [12230, 12231, 12359], [12231, 12360, 12359], [12231, 12232, 12361], [12231, 12361, 12360], [12232, 12233, 12361], [12233, 12362, 12361], [12233, 12234, 12363], [12233, 12363, 12362], [12234, 12235, 12363], [12235, 12364, 12363], [12235, 12236, 12365], [12235, 12365, 12364], [12236, 12237, 12365], [12237, 12366, 12365], [12237, 12238, 12367], [12237, 12367, 12366], [12238, 12239, 12367], [12239, 12368, 12367], [12239, 12240, 12369], [12239, 12369, 12368], [12240, 12241, 12369], [12241, 12370, 12369], [12241, 12242, 12371], [12241, 12371, 12370], [12242, 12243, 12371], [12243, 12372, 12371], [12243, 12244, 12373], [12243, 12373, 12372], [12244, 12245, 12373], [12245, 12374, 12373], [12245, 12246, 12375], [12245, 12375, 12374], [12246, 12247, 12375], [12247, 12376, 12375], [12247, 12248, 12377], [12247, 12377, 12376], [12248, 12249, 12377], [12249, 12378, 12377], [12249, 12250, 12379], [12249, 12379, 12378], [12250, 12251, 12379], [12251, 12380, 12379], [12251, 12252, 12381], [12251, 12381, 12380], [12252, 12253, 12381], [12253, 12382, 12381], [12253, 12254, 12383], [12253, 12383, 12382], [12254, 12255, 12383], [12255, 12384, 12383], [12255, 12256, 12385], [12255, 12385, 12384], [12256, 12257, 12385], [12257, 12386, 12385], [12257, 12258, 12387], [12257, 12387, 12386], [12258, 12259, 12387], [12259, 12388, 12387], [12259, 12260, 12389], [12259, 12389, 12388], [12260, 12261, 12389], [12261, 12390, 12389], [12261, 12262, 12391], [12261, 12391, 12390], [12262, 12263, 12391], [12263, 12392, 12391], [12263, 12264, 12393], [12263, 12393, 12392], [12264, 12265, 12393], [12265, 12394, 12393], [12265, 12266, 12395], [12265, 12395, 12394], [12266, 12267, 12395], [12267, 12396, 12395], [12267, 12268, 12397], [12267, 12397, 12396], [12268, 12269, 12397], [12269, 12398, 12397], [12269, 12270, 12399], [12269, 12399, 12398], [12270, 12271, 12399], [12271, 12400, 12399], [12271, 12272, 12401], [12271, 12401, 12400], [12272, 12273, 12401], [12273, 12402, 12401], [12273, 12274, 12403], [12273, 12403, 12402], [12274, 12275, 12403], [12275, 12404, 12403], [12275, 12276, 12405], [12275, 12405, 12404], [12276, 12277, 12405], [12277, 12406, 12405], [12277, 12278, 12407], [12277, 12407, 12406], [12278, 12279, 12407], [12279, 12408, 12407], [12279, 12280, 12409], [12279, 12409, 12408], [12280, 12281, 12409], [12281, 12410, 12409], [12281, 12282, 12411], [12281, 12411, 12410], [12282, 12283, 12411], [12283, 12412, 12411], [12283, 12284, 12413], [12283, 12413, 12412], [12284, 12285, 12413], [12285, 12414, 12413], [12285, 12286, 12415], [12285, 12415, 12414], [12286, 12287, 12415], [12287, 12416, 12415], [12287, 12288, 12417], [12287, 12417, 12416], [12288, 12289, 12417], [12289, 12418, 12417], [12289, 12290, 12419], [12289, 12419, 12418], [12290, 12291, 12419], [12291, 12420, 12419], [12291, 12292, 12421], [12291, 12421, 12420], [12292, 12293, 12421], [12293, 12422, 12421], [12293, 12294, 12423], [12293, 12423, 12422], [12294, 12295, 12423], [12295, 12424, 12423], [12295, 12296, 12425], [12295, 12425, 12424], [12296, 12297, 12425], [12297, 12426, 12425], [12297, 12298, 12427], [12297, 12427, 12426], [12298, 12299, 12427], [12299, 12428, 12427], [12299, 12300, 12429], [12299, 12429, 12428], [12301, 12302, 12431], [12301, 12431, 12430], [12302, 12303, 12431], [12303, 12432, 12431], [12303, 12304, 12433], [12303, 12433, 12432], [12304, 12305, 12433], [12305, 12434, 12433], [12305, 12306, 12435], [12305, 12435, 12434], [12306, 12307, 12435], [12307, 12436, 12435], [12307, 12308, 12437], [12307, 12437, 12436], [12308, 12309, 12437], [12309, 12438, 12437], [12309, 12310, 12439], [12309, 12439, 12438], [12310, 12311, 12439], [12311, 12440, 12439], [12311, 12312, 12441], [12311, 12441, 12440], [12312, 12313, 12441], [12313, 12442, 12441], [12313, 12314, 12443], [12313, 12443, 12442], [12314, 12315, 12443], [12315, 12444, 12443], [12315, 12316, 12445], [12315, 12445, 12444], [12316, 12317, 12445], [12317, 12446, 12445], [12317, 12318, 12447], [12317, 12447, 12446], [12318, 12319, 12447], [12319, 12448, 12447], [12319, 12320, 12449], [12319, 12449, 12448], [12320, 12321, 12449], [12321, 12450, 12449], [12321, 12322, 12451], [12321, 12451, 12450], [12322, 12323, 12451], [12323, 12452, 12451], [12323, 12324, 12453], [12323, 12453, 12452], [12324, 12325, 12453], [12325, 12454, 12453], [12325, 12326, 12455], [12325, 12455, 12454], [12326, 12327, 12455], [12327, 12456, 12455], [12327, 12328, 12457], [12327, 12457, 12456], [12328, 12329, 12457], [12329, 12458, 12457], [12329, 12330, 12459], [12329, 12459, 12458], [12330, 12331, 12459], [12331, 12460, 12459], [12331, 12332, 12461], [12331, 12461, 12460], [12332, 12333, 12461], [12333, 12462, 12461], [12333, 12334, 12463], [12333, 12463, 12462], [12334, 12335, 12463], [12335, 12464, 12463], [12335, 12336, 12465], [12335, 12465, 12464], [12336, 12337, 12465], [12337, 12466, 12465], [12337, 12338, 12467], [12337, 12467, 12466], [12338, 12339, 12467], [12339, 12468, 12467], [12339, 12340, 12469], [12339, 12469, 12468], [12340, 12341, 12469], [12341, 12470, 12469], [12341, 12342, 12471], [12341, 12471, 12470], [12342, 12343, 12471], [12343, 12472, 12471], [12343, 12344, 12473], [12343, 12473, 12472], [12344, 12345, 12473], [12345, 12474, 12473], [12345, 12346, 12475], [12345, 12475, 12474], [12346, 12347, 12475], [12347, 12476, 12475], [12347, 12348, 12477], [12347, 12477, 12476], [12348, 12349, 12477], [12349, 12478, 12477], [12349, 12350, 12479], [12349, 12479, 12478], [12350, 12351, 12479], [12351, 12480, 12479], [12351, 12352, 12481], [12351, 12481, 12480], [12352, 12353, 12481], [12353, 12482, 12481], [12353, 12354, 12483], [12353, 12483, 12482], [12354, 12355, 12483], [12355, 12484, 12483], [12355, 12356, 12485], [12355, 12485, 12484], [12356, 12357, 12485], [12357, 12486, 12485], [12357, 12358, 12487], [12357, 12487, 12486], [12358, 12359, 12487], [12359, 12488, 12487], [12359, 12360, 12489], [12359, 12489, 12488], [12360, 12361, 12489], [12361, 12490, 12489], [12361, 12362, 12491], [12361, 12491, 12490], [12362, 12363, 12491], [12363, 12492, 12491], [12363, 12364, 12493], [12363, 12493, 12492], [12364, 12365, 12493], [12365, 12494, 12493], [12365, 12366, 12495], [12365, 12495, 12494], [12366, 12367, 12495], [12367, 12496, 12495], [12367, 12368, 12497], [12367, 12497, 12496], [12368, 12369, 12497], [12369, 12498, 12497], [12369, 12370, 12499], [12369, 12499, 12498], [12370, 12371, 12499], [12371, 12500, 12499], [12371, 12372, 12501], [12371, 12501, 12500], [12372, 12373, 12501], [12373, 12502, 12501], [12373, 12374, 12503], [12373, 12503, 12502], [12374, 12375, 12503], [12375, 12504, 12503], [12375, 12376, 12505], [12375, 12505, 12504], [12376, 12377, 12505], [12377, 12506, 12505], [12377, 12378, 12507], [12377, 12507, 12506], [12378, 12379, 12507], [12379, 12508, 12507], [12379, 12380, 12509], [12379, 12509, 12508], [12380, 12381, 12509], [12381, 12510, 12509], [12381, 12382, 12511], [12381, 12511, 12510], [12382, 12383, 12511], [12383, 12512, 12511], [12383, 12384, 12513], [12383, 12513, 12512], [12384, 12385, 12513], [12385, 12514, 12513], [12385, 12386, 12515], [12385, 12515, 12514], [12386, 12387, 12515], [12387, 12516, 12515], [12387, 12388, 12517], [12387, 12517, 12516], [12388, 12389, 12517], [12389, 12518, 12517], [12389, 12390, 12519], [12389, 12519, 12518], [12390, 12391, 12519], [12391, 12520, 12519], [12391, 12392, 12521], [12391, 12521, 12520], [12392, 12393, 12521], [12393, 12522, 12521], [12393, 12394, 12523], [12393, 12523, 12522], [12394, 12395, 12523], [12395, 12524, 12523], [12395, 12396, 12525], [12395, 12525, 12524], [12396, 12397, 12525], [12397, 12526, 12525], [12397, 12398, 12527], [12397, 12527, 12526], [12398, 12399, 12527], [12399, 12528, 12527], [12399, 12400, 12529], [12399, 12529, 12528], [12400, 12401, 12529], [12401, 12530, 12529], [12401, 12402, 12531], [12401, 12531, 12530], [12402, 12403, 12531], [12403, 12532, 12531], [12403, 12404, 12533], [12403, 12533, 12532], [12404, 12405, 12533], [12405, 12534, 12533], [12405, 12406, 12535], [12405, 12535, 12534], [12406, 12407, 12535], [12407, 12536, 12535], [12407, 12408, 12537], [12407, 12537, 12536], [12408, 12409, 12537], [12409, 12538, 12537], [12409, 12410, 12539], [12409, 12539, 12538], [12410, 12411, 12539], [12411, 12540, 12539], [12411, 12412, 12541], [12411, 12541, 12540], [12412, 12413, 12541], [12413, 12542, 12541], [12413, 12414, 12543], [12413, 12543, 12542], [12414, 12415, 12543], [12415, 12544, 12543], [12415, 12416, 12545], [12415, 12545, 12544], [12416, 12417, 12545], [12417, 12546, 12545], [12417, 12418, 12547], [12417, 12547, 12546], [12418, 12419, 12547], [12419, 12548, 12547], [12419, 12420, 12549], [12419, 12549, 12548], [12420, 12421, 12549], [12421, 12550, 12549], [12421, 12422, 12551], [12421, 12551, 12550], [12422, 12423, 12551], [12423, 12552, 12551], [12423, 12424, 12553], [12423, 12553, 12552], [12424, 12425, 12553], [12425, 12554, 12553], [12425, 12426, 12555], [12425, 12555, 12554], [12426, 12427, 12555], [12427, 12556, 12555], [12427, 12428, 12557], [12427, 12557, 12556], [12428, 12429, 12557], [12429, 12558, 12557], [12430, 12431, 12559], [12431, 12560, 12559], [12431, 12432, 12561], [12431, 12561, 12560], [12432, 12433, 12561], [12433, 12562, 12561], [12433, 12434, 12563], [12433, 12563, 12562], [12434, 12435, 12563], [12435, 12564, 12563], [12435, 12436, 12565], [12435, 12565, 12564], [12436, 12437, 12565], [12437, 12566, 12565], [12437, 12438, 12567], [12437, 12567, 12566], [12438, 12439, 12567], [12439, 12568, 12567], [12439, 12440, 12569], [12439, 12569, 12568], [12440, 12441, 12569], [12441, 12570, 12569], [12441, 12442, 12571], [12441, 12571, 12570], [12442, 12443, 12571], [12443, 12572, 12571], [12443, 12444, 12573], [12443, 12573, 12572], [12444, 12445, 12573], [12445, 12574, 12573], [12445, 12446, 12575], [12445, 12575, 12574], [12446, 12447, 12575], [12447, 12576, 12575], [12447, 12448, 12577], [12447, 12577, 12576], [12448, 12449, 12577], [12449, 12578, 12577], [12449, 12450, 12579], [12449, 12579, 12578], [12450, 12451, 12579], [12451, 12580, 12579], [12451, 12452, 12581], [12451, 12581, 12580], [12452, 12453, 12581], [12453, 12582, 12581], [12453, 12454, 12583], [12453, 12583, 12582], [12454, 12455, 12583], [12455, 12584, 12583], [12455, 12456, 12585], [12455, 12585, 12584], [12456, 12457, 12585], [12457, 12586, 12585], [12457, 12458, 12587], [12457, 12587, 12586], [12458, 12459, 12587], [12459, 12588, 12587], [12459, 12460, 12589], [12459, 12589, 12588], [12460, 12461, 12589], [12461, 12590, 12589], [12461, 12462, 12591], [12461, 12591, 12590], [12462, 12463, 12591], [12463, 12592, 12591], [12463, 12464, 12593], [12463, 12593, 12592], [12464, 12465, 12593], [12465, 12594, 12593], [12465, 12466, 12595], [12465, 12595, 12594], [12466, 12467, 12595], [12467, 12596, 12595], [12467, 12468, 12597], [12467, 12597, 12596], [12468, 12469, 12597], [12469, 12598, 12597], [12469, 12470, 12599], [12469, 12599, 12598], [12470, 12471, 12599], [12471, 12600, 12599], [12471, 12472, 12601], [12471, 12601, 12600], [12472, 12473, 12601], [12473, 12602, 12601], [12473, 12474, 12603], [12473, 12603, 12602], [12474, 12475, 12603], [12475, 12604, 12603], [12475, 12476, 12605], [12475, 12605, 12604], [12476, 12477, 12605], [12477, 12606, 12605], [12477, 12478, 12607], [12477, 12607, 12606], [12478, 12479, 12607], [12479, 12608, 12607], [12479, 12480, 12609], [12479, 12609, 12608], [12480, 12481, 12609], [12481, 12610, 12609], [12481, 12482, 12611], [12481, 12611, 12610], [12482, 12483, 12611], [12483, 12612, 12611], [12483, 12484, 12613], [12483, 12613, 12612], [12484, 12485, 12613], [12485, 12614, 12613], [12485, 12486, 12615], [12485, 12615, 12614], [12486, 12487, 12615], [12487, 12616, 12615], [12487, 12488, 12617], [12487, 12617, 12616], [12488, 12489, 12617], [12489, 12618, 12617], [12489, 12490, 12619], [12489, 12619, 12618], [12490, 12491, 12619], [12491, 12620, 12619], [12491, 12492, 12621], [12491, 12621, 12620], [12492, 12493, 12621], [12493, 12622, 12621], [12493, 12494, 12623], [12493, 12623, 12622], [12494, 12495, 12623], [12495, 12624, 12623], [12495, 12496, 12625], [12495, 12625, 12624], [12496, 12497, 12625], [12497, 12626, 12625], [12497, 12498, 12627], [12497, 12627, 12626], [12498, 12499, 12627], [12499, 12628, 12627], [12499, 12500, 12629], [12499, 12629, 12628], [12500, 12501, 12629], [12501, 12630, 12629], [12501, 12502, 12631], [12501, 12631, 12630], [12502, 12503, 12631], [12503, 12632, 12631], [12503, 12504, 12633], [12503, 12633, 12632], [12504, 12505, 12633], [12505, 12634, 12633], [12505, 12506, 12635], [12505, 12635, 12634], [12506, 12507, 12635], [12507, 12636, 12635], [12507, 12508, 12637], [12507, 12637, 12636], [12508, 12509, 12637], [12509, 12638, 12637], [12509, 12510, 12639], [12509, 12639, 12638], [12510, 12511, 12639], [12511, 12640, 12639], [12511, 12512, 12641], [12511, 12641, 12640], [12512, 12513, 12641], [12513, 12642, 12641], [12513, 12514, 12643], [12513, 12643, 12642], [12514, 12515, 12643], [12515, 12644, 12643], [12515, 12516, 12645], [12515, 12645, 12644], [12516, 12517, 12645], [12517, 12646, 12645], [12517, 12518, 12647], [12517, 12647, 12646], [12518, 12519, 12647], [12519, 12648, 12647], [12519, 12520, 12649], [12519, 12649, 12648], [12520, 12521, 12649], [12521, 12650, 12649], [12521, 12522, 12651], [12521, 12651, 12650], [12522, 12523, 12651], [12523, 12652, 12651], [12523, 12524, 12653], [12523, 12653, 12652], [12524, 12525, 12653], [12525, 12654, 12653], [12525, 12526, 12655], [12525, 12655, 12654], [12526, 12527, 12655], [12527, 12656, 12655], [12527, 12528, 12657], [12527, 12657, 12656], [12528, 12529, 12657], [12529, 12658, 12657], [12529, 12530, 12659], [12529, 12659, 12658], [12530, 12531, 12659], [12531, 12660, 12659], [12531, 12532, 12661], [12531, 12661, 12660], [12532, 12533, 12661], [12533, 12662, 12661], [12533, 12534, 12663], [12533, 12663, 12662], [12534, 12535, 12663], [12535, 12664, 12663], [12535, 12536, 12665], [12535, 12665, 12664], [12536, 12537, 12665], [12537, 12666, 12665], [12537, 12538, 12667], [12537, 12667, 12666], [12538, 12539, 12667], [12539, 12668, 12667], [12539, 12540, 12669], [12539, 12669, 12668], [12540, 12541, 12669], [12541, 12670, 12669], [12541, 12542, 12671], [12541, 12671, 12670], [12542, 12543, 12671], [12543, 12672, 12671], [12543, 12544, 12673], [12543, 12673, 12672], [12544, 12545, 12673], [12545, 12674, 12673], [12545, 12546, 12675], [12545, 12675, 12674], [12546, 12547, 12675], [12547, 12676, 12675], [12547, 12548, 12677], [12547, 12677, 12676], [12548, 12549, 12677], [12549, 12678, 12677], [12549, 12550, 12679], [12549, 12679, 12678], [12550, 12551, 12679], [12551, 12680, 12679], [12551, 12552, 12681], [12551, 12681, 12680], [12552, 12553, 12681], [12553, 12682, 12681], [12553, 12554, 12683], [12553, 12683, 12682], [12554, 12555, 12683], [12555, 12684, 12683], [12555, 12556, 12685], [12555, 12685, 12684], [12556, 12557, 12685], [12557, 12686, 12685], [12557, 12558, 12687], [12557, 12687, 12686], [12559, 12560, 12689], [12559, 12689, 12688], [12560, 12561, 12689], [12561, 12690, 12689], [12561, 12562, 12691], [12561, 12691, 12690], [12562, 12563, 12691], [12563, 12692, 12691], [12563, 12564, 12693], [12563, 12693, 12692], [12564, 12565, 12693], [12565, 12694, 12693], [12565, 12566, 12695], [12565, 12695, 12694], [12566, 12567, 12695], [12567, 12696, 12695], [12567, 12568, 12697], [12567, 12697, 12696], [12568, 12569, 12697], [12569, 12698, 12697], [12569, 12570, 12699], [12569, 12699, 12698], [12570, 12571, 12699], [12571, 12700, 12699], [12571, 12572, 12701], [12571, 12701, 12700], [12572, 12573, 12701], [12573, 12702, 12701], [12573, 12574, 12703], [12573, 12703, 12702], [12574, 12575, 12703], [12575, 12704, 12703], [12575, 12576, 12705], [12575, 12705, 12704], [12576, 12577, 12705], [12577, 12706, 12705], [12577, 12578, 12707], [12577, 12707, 12706], [12578, 12579, 12707], [12579, 12708, 12707], [12579, 12580, 12709], [12579, 12709, 12708], [12580, 12581, 12709], [12581, 12710, 12709], [12581, 12582, 12711], [12581, 12711, 12710], [12582, 12583, 12711], [12583, 12712, 12711], [12583, 12584, 12713], [12583, 12713, 12712], [12584, 12585, 12713], [12585, 12714, 12713], [12585, 12586, 12715], [12585, 12715, 12714], [12586, 12587, 12715], [12587, 12716, 12715], [12587, 12588, 12717], [12587, 12717, 12716], [12588, 12589, 12717], [12589, 12718, 12717], [12589, 12590, 12719], [12589, 12719, 12718], [12590, 12591, 12719], [12591, 12720, 12719], [12591, 12592, 12721], [12591, 12721, 12720], [12592, 12593, 12721], [12593, 12722, 12721], [12593, 12594, 12723], [12593, 12723, 12722], [12594, 12595, 12723], [12595, 12724, 12723], [12595, 12596, 12725], [12595, 12725, 12724], [12596, 12597, 12725], [12597, 12726, 12725], [12597, 12598, 12727], [12597, 12727, 12726], [12598, 12599, 12727], [12599, 12728, 12727], [12599, 12600, 12729], [12599, 12729, 12728], [12600, 12601, 12729], [12601, 12730, 12729], [12601, 12602, 12731], [12601, 12731, 12730], [12602, 12603, 12731], [12603, 12732, 12731], [12603, 12604, 12733], [12603, 12733, 12732], [12604, 12605, 12733], [12605, 12734, 12733], [12605, 12606, 12735], [12605, 12735, 12734], [12606, 12607, 12735], [12607, 12736, 12735], [12607, 12608, 12737], [12607, 12737, 12736], [12608, 12609, 12737], [12609, 12738, 12737], [12609, 12610, 12739], [12609, 12739, 12738], [12610, 12611, 12739], [12611, 12740, 12739], [12611, 12612, 12741], [12611, 12741, 12740], [12612, 12613, 12741], [12613, 12742, 12741], [12613, 12614, 12743], [12613, 12743, 12742], [12614, 12615, 12743], [12615, 12744, 12743], [12615, 12616, 12745], [12615, 12745, 12744], [12616, 12617, 12745], [12617, 12746, 12745], [12617, 12618, 12747], [12617, 12747, 12746], [12618, 12619, 12747], [12619, 12748, 12747], [12619, 12620, 12749], [12619, 12749, 12748], [12620, 12621, 12749], [12621, 12750, 12749], [12621, 12622, 12751], [12621, 12751, 12750], [12622, 12623, 12751], [12623, 12752, 12751], [12623, 12624, 12753], [12623, 12753, 12752], [12624, 12625, 12753], [12625, 12754, 12753], [12625, 12626, 12755], [12625, 12755, 12754], [12626, 12627, 12755], [12627, 12756, 12755], [12627, 12628, 12757], [12627, 12757, 12756], [12628, 12629, 12757], [12629, 12758, 12757], [12629, 12630, 12759], [12629, 12759, 12758], [12630, 12631, 12759], [12631, 12760, 12759], [12631, 12632, 12761], [12631, 12761, 12760], [12632, 12633, 12761], [12633, 12762, 12761], [12633, 12634, 12763], [12633, 12763, 12762], [12634, 12635, 12763], [12635, 12764, 12763], [12635, 12636, 12765], [12635, 12765, 12764], [12636, 12637, 12765], [12637, 12766, 12765], [12637, 12638, 12767], [12637, 12767, 12766], [12638, 12639, 12767], [12639, 12768, 12767], [12639, 12640, 12769], [12639, 12769, 12768], [12640, 12641, 12769], [12641, 12770, 12769], [12641, 12642, 12771], [12641, 12771, 12770], [12642, 12643, 12771], [12643, 12772, 12771], [12643, 12644, 12773], [12643, 12773, 12772], [12644, 12645, 12773], [12645, 12774, 12773], [12645, 12646, 12775], [12645, 12775, 12774], [12646, 12647, 12775], [12647, 12776, 12775], [12647, 12648, 12777], [12647, 12777, 12776], [12648, 12649, 12777], [12649, 12778, 12777], [12649, 12650, 12779], [12649, 12779, 12778], [12650, 12651, 12779], [12651, 12780, 12779], [12651, 12652, 12781], [12651, 12781, 12780], [12652, 12653, 12781], [12653, 12782, 12781], [12653, 12654, 12783], [12653, 12783, 12782], [12654, 12655, 12783], [12655, 12784, 12783], [12655, 12656, 12785], [12655, 12785, 12784], [12656, 12657, 12785], [12657, 12786, 12785], [12657, 12658, 12787], [12657, 12787, 12786], [12658, 12659, 12787], [12659, 12788, 12787], [12659, 12660, 12789], [12659, 12789, 12788], [12660, 12661, 12789], [12661, 12790, 12789], [12661, 12662, 12791], [12661, 12791, 12790], [12662, 12663, 12791], [12663, 12792, 12791], [12663, 12664, 12793], [12663, 12793, 12792], [12664, 12665, 12793], [12665, 12794, 12793], [12665, 12666, 12795], [12665, 12795, 12794], [12666, 12667, 12795], [12667, 12796, 12795], [12667, 12668, 12797], [12667, 12797, 12796], [12668, 12669, 12797], [12669, 12798, 12797], [12669, 12670, 12799], [12669, 12799, 12798], [12670, 12671, 12799], [12671, 12800, 12799], [12671, 12672, 12801], [12671, 12801, 12800], [12672, 12673, 12801], [12673, 12802, 12801], [12673, 12674, 12803], [12673, 12803, 12802], [12674, 12675, 12803], [12675, 12804, 12803], [12675, 12676, 12805], [12675, 12805, 12804], [12676, 12677, 12805], [12677, 12806, 12805], [12677, 12678, 12807], [12677, 12807, 12806], [12678, 12679, 12807], [12679, 12808, 12807], [12679, 12680, 12809], [12679, 12809, 12808], [12680, 12681, 12809], [12681, 12810, 12809], [12681, 12682, 12811], [12681, 12811, 12810], [12682, 12683, 12811], [12683, 12812, 12811], [12683, 12684, 12813], [12683, 12813, 12812], [12684, 12685, 12813], [12685, 12814, 12813], [12685, 12686, 12815], [12685, 12815, 12814], [12686, 12687, 12815], [12687, 12816, 12815], [12688, 12689, 12817], [12689, 12818, 12817], [12689, 12690, 12819], [12689, 12819, 12818], [12690, 12691, 12819], [12691, 12820, 12819], [12691, 12692, 12821], [12691, 12821, 12820], [12692, 12693, 12821], [12693, 12822, 12821], [12693, 12694, 12823], [12693, 12823, 12822], [12694, 12695, 12823], [12695, 12824, 12823], [12695, 12696, 12825], [12695, 12825, 12824], [12696, 12697, 12825], [12697, 12826, 12825], [12697, 12698, 12827], [12697, 12827, 12826], [12698, 12699, 12827], [12699, 12828, 12827], [12699, 12700, 12829], [12699, 12829, 12828], [12700, 12701, 12829], [12701, 12830, 12829], [12701, 12702, 12831], [12701, 12831, 12830], [12702, 12703, 12831], [12703, 12832, 12831], [12703, 12704, 12833], [12703, 12833, 12832], [12704, 12705, 12833], [12705, 12834, 12833], [12705, 12706, 12835], [12705, 12835, 12834], [12706, 12707, 12835], [12707, 12836, 12835], [12707, 12708, 12837], [12707, 12837, 12836], [12708, 12709, 12837], [12709, 12838, 12837], [12709, 12710, 12839], [12709, 12839, 12838], [12710, 12711, 12839], [12711, 12840, 12839], [12711, 12712, 12841], [12711, 12841, 12840], [12712, 12713, 12841], [12713, 12842, 12841], [12713, 12714, 12843], [12713, 12843, 12842], [12714, 12715, 12843], [12715, 12844, 12843], [12715, 12716, 12845], [12715, 12845, 12844], [12716, 12717, 12845], [12717, 12846, 12845], [12717, 12718, 12847], [12717, 12847, 12846], [12718, 12719, 12847], [12719, 12848, 12847], [12719, 12720, 12849], [12719, 12849, 12848], [12720, 12721, 12849], [12721, 12850, 12849], [12721, 12722, 12851], [12721, 12851, 12850], [12722, 12723, 12851], [12723, 12852, 12851], [12723, 12724, 12853], [12723, 12853, 12852], [12724, 12725, 12853], [12725, 12854, 12853], [12725, 12726, 12855], [12725, 12855, 12854], [12726, 12727, 12855], [12727, 12856, 12855], [12727, 12728, 12857], [12727, 12857, 12856], [12728, 12729, 12857], [12729, 12858, 12857], [12729, 12730, 12859], [12729, 12859, 12858], [12730, 12731, 12859], [12731, 12860, 12859], [12731, 12732, 12861], [12731, 12861, 12860], [12732, 12733, 12861], [12733, 12862, 12861], [12733, 12734, 12863], [12733, 12863, 12862], [12734, 12735, 12863], [12735, 12864, 12863], [12735, 12736, 12865], [12735, 12865, 12864], [12736, 12737, 12865], [12737, 12866, 12865], [12737, 12738, 12867], [12737, 12867, 12866], [12738, 12739, 12867], [12739, 12868, 12867], [12739, 12740, 12869], [12739, 12869, 12868], [12740, 12741, 12869], [12741, 12870, 12869], [12741, 12742, 12871], [12741, 12871, 12870], [12742, 12743, 12871], [12743, 12872, 12871], [12743, 12744, 12873], [12743, 12873, 12872], [12744, 12745, 12873], [12745, 12874, 12873], [12745, 12746, 12875], [12745, 12875, 12874], [12746, 12747, 12875], [12747, 12876, 12875], [12747, 12748, 12877], [12747, 12877, 12876], [12748, 12749, 12877], [12749, 12878, 12877], [12749, 12750, 12879], [12749, 12879, 12878], [12750, 12751, 12879], [12751, 12880, 12879], [12751, 12752, 12881], [12751, 12881, 12880], [12752, 12753, 12881], [12753, 12882, 12881], [12753, 12754, 12883], [12753, 12883, 12882], [12754, 12755, 12883], [12755, 12884, 12883], [12755, 12756, 12885], [12755, 12885, 12884], [12756, 12757, 12885], [12757, 12886, 12885], [12757, 12758, 12887], [12757, 12887, 12886], [12758, 12759, 12887], [12759, 12888, 12887], [12759, 12760, 12889], [12759, 12889, 12888], [12760, 12761, 12889], [12761, 12890, 12889], [12761, 12762, 12891], [12761, 12891, 12890], [12762, 12763, 12891], [12763, 12892, 12891], [12763, 12764, 12893], [12763, 12893, 12892], [12764, 12765, 12893], [12765, 12894, 12893], [12765, 12766, 12895], [12765, 12895, 12894], [12766, 12767, 12895], [12767, 12896, 12895], [12767, 12768, 12897], [12767, 12897, 12896], [12768, 12769, 12897], [12769, 12898, 12897], [12769, 12770, 12899], [12769, 12899, 12898], [12770, 12771, 12899], [12771, 12900, 12899], [12771, 12772, 12901], [12771, 12901, 12900], [12772, 12773, 12901], [12773, 12902, 12901], [12773, 12774, 12903], [12773, 12903, 12902], [12774, 12775, 12903], [12775, 12904, 12903], [12775, 12776, 12905], [12775, 12905, 12904], [12776, 12777, 12905], [12777, 12906, 12905], [12777, 12778, 12907], [12777, 12907, 12906], [12778, 12779, 12907], [12779, 12908, 12907], [12779, 12780, 12909], [12779, 12909, 12908], [12780, 12781, 12909], [12781, 12910, 12909], [12781, 12782, 12911], [12781, 12911, 12910], [12782, 12783, 12911], [12783, 12912, 12911], [12783, 12784, 12913], [12783, 12913, 12912], [12784, 12785, 12913], [12785, 12914, 12913], [12785, 12786, 12915], [12785, 12915, 12914], [12786, 12787, 12915], [12787, 12916, 12915], [12787, 12788, 12917], [12787, 12917, 12916], [12788, 12789, 12917], [12789, 12918, 12917], [12789, 12790, 12919], [12789, 12919, 12918], [12790, 12791, 12919], [12791, 12920, 12919], [12791, 12792, 12921], [12791, 12921, 12920], [12792, 12793, 12921], [12793, 12922, 12921], [12793, 12794, 12923], [12793, 12923, 12922], [12794, 12795, 12923], [12795, 12924, 12923], [12795, 12796, 12925], [12795, 12925, 12924], [12796, 12797, 12925], [12797, 12926, 12925], [12797, 12798, 12927], [12797, 12927, 12926], [12798, 12799, 12927], [12799, 12928, 12927], [12799, 12800, 12929], [12799, 12929, 12928], [12800, 12801, 12929], [12801, 12930, 12929], [12801, 12802, 12931], [12801, 12931, 12930], [12802, 12803, 12931], [12803, 12932, 12931], [12803, 12804, 12933], [12803, 12933, 12932], [12804, 12805, 12933], [12805, 12934, 12933], [12805, 12806, 12935], [12805, 12935, 12934], [12806, 12807, 12935], [12807, 12936, 12935], [12807, 12808, 12937], [12807, 12937, 12936], [12808, 12809, 12937], [12809, 12938, 12937], [12809, 12810, 12939], [12809, 12939, 12938], [12810, 12811, 12939], [12811, 12940, 12939], [12811, 12812, 12941], [12811, 12941, 12940], [12812, 12813, 12941], [12813, 12942, 12941], [12813, 12814, 12943], [12813, 12943, 12942], [12814, 12815, 12943], [12815, 12944, 12943], [12815, 12816, 12945], [12815, 12945, 12944], [12817, 12818, 12947], [12817, 12947, 12946], [12818, 12819, 12947], [12819, 12948, 12947], [12819, 12820, 12949], [12819, 12949, 12948], [12820, 12821, 12949], [12821, 12950, 12949], [12821, 12822, 12951], [12821, 12951, 12950], [12822, 12823, 12951], [12823, 12952, 12951], [12823, 12824, 12953], [12823, 12953, 12952], [12824, 12825, 12953], [12825, 12954, 12953], [12825, 12826, 12955], [12825, 12955, 12954], [12826, 12827, 12955], [12827, 12956, 12955], [12827, 12828, 12957], [12827, 12957, 12956], [12828, 12829, 12957], [12829, 12958, 12957], [12829, 12830, 12959], [12829, 12959, 12958], [12830, 12831, 12959], [12831, 12960, 12959], [12831, 12832, 12961], [12831, 12961, 12960], [12832, 12833, 12961], [12833, 12962, 12961], [12833, 12834, 12963], [12833, 12963, 12962], [12834, 12835, 12963], [12835, 12964, 12963], [12835, 12836, 12965], [12835, 12965, 12964], [12836, 12837, 12965], [12837, 12966, 12965], [12837, 12838, 12967], [12837, 12967, 12966], [12838, 12839, 12967], [12839, 12968, 12967], [12839, 12840, 12969], [12839, 12969, 12968], [12840, 12841, 12969], [12841, 12970, 12969], [12841, 12842, 12971], [12841, 12971, 12970], [12842, 12843, 12971], [12843, 12972, 12971], [12843, 12844, 12973], [12843, 12973, 12972], [12844, 12845, 12973], [12845, 12974, 12973], [12845, 12846, 12975], [12845, 12975, 12974], [12846, 12847, 12975], [12847, 12976, 12975], [12847, 12848, 12977], [12847, 12977, 12976], [12848, 12849, 12977], [12849, 12978, 12977], [12849, 12850, 12979], [12849, 12979, 12978], [12850, 12851, 12979], [12851, 12980, 12979], [12851, 12852, 12981], [12851, 12981, 12980], [12852, 12853, 12981], [12853, 12982, 12981], [12853, 12854, 12983], [12853, 12983, 12982], [12854, 12855, 12983], [12855, 12984, 12983], [12855, 12856, 12985], [12855, 12985, 12984], [12856, 12857, 12985], [12857, 12986, 12985], [12857, 12858, 12987], [12857, 12987, 12986], [12858, 12859, 12987], [12859, 12988, 12987], [12859, 12860, 12989], [12859, 12989, 12988], [12860, 12861, 12989], [12861, 12990, 12989], [12861, 12862, 12991], [12861, 12991, 12990], [12862, 12863, 12991], [12863, 12992, 12991], [12863, 12864, 12993], [12863, 12993, 12992], [12864, 12865, 12993], [12865, 12994, 12993], [12865, 12866, 12995], [12865, 12995, 12994], [12866, 12867, 12995], [12867, 12996, 12995], [12867, 12868, 12997], [12867, 12997, 12996], [12868, 12869, 12997], [12869, 12998, 12997], [12869, 12870, 12999], [12869, 12999, 12998], [12870, 12871, 12999], [12871, 13000, 12999], [12871, 12872, 13001], [12871, 13001, 13000], [12872, 12873, 13001], [12873, 13002, 13001], [12873, 12874, 13003], [12873, 13003, 13002], [12874, 12875, 13003], [12875, 13004, 13003], [12875, 12876, 13005], [12875, 13005, 13004], [12876, 12877, 13005], [12877, 13006, 13005], [12877, 12878, 13007], [12877, 13007, 13006], [12878, 12879, 13007], [12879, 13008, 13007], [12879, 12880, 13009], [12879, 13009, 13008], [12880, 12881, 13009], [12881, 13010, 13009], [12881, 12882, 13011], [12881, 13011, 13010], [12882, 12883, 13011], [12883, 13012, 13011], [12883, 12884, 13013], [12883, 13013, 13012], [12884, 12885, 13013], [12885, 13014, 13013], [12885, 12886, 13015], [12885, 13015, 13014], [12886, 12887, 13015], [12887, 13016, 13015], [12887, 12888, 13017], [12887, 13017, 13016], [12888, 12889, 13017], [12889, 13018, 13017], [12889, 12890, 13019], [12889, 13019, 13018], [12890, 12891, 13019], [12891, 13020, 13019], [12891, 12892, 13021], [12891, 13021, 13020], [12892, 12893, 13021], [12893, 13022, 13021], [12893, 12894, 13023], [12893, 13023, 13022], [12894, 12895, 13023], [12895, 13024, 13023], [12895, 12896, 13025], [12895, 13025, 13024], [12896, 12897, 13025], [12897, 13026, 13025], [12897, 12898, 13027], [12897, 13027, 13026], [12898, 12899, 13027], [12899, 13028, 13027], [12899, 12900, 13029], [12899, 13029, 13028], [12900, 12901, 13029], [12901, 13030, 13029], [12901, 12902, 13031], [12901, 13031, 13030], [12902, 12903, 13031], [12903, 13032, 13031], [12903, 12904, 13033], [12903, 13033, 13032], [12904, 12905, 13033], [12905, 13034, 13033], [12905, 12906, 13035], [12905, 13035, 13034], [12906, 12907, 13035], [12907, 13036, 13035], [12907, 12908, 13037], [12907, 13037, 13036], [12908, 12909, 13037], [12909, 13038, 13037], [12909, 12910, 13039], [12909, 13039, 13038], [12910, 12911, 13039], [12911, 13040, 13039], [12911, 12912, 13041], [12911, 13041, 13040], [12912, 12913, 13041], [12913, 13042, 13041], [12913, 12914, 13043], [12913, 13043, 13042], [12914, 12915, 13043], [12915, 13044, 13043], [12915, 12916, 13045], [12915, 13045, 13044], [12916, 12917, 13045], [12917, 13046, 13045], [12917, 12918, 13047], [12917, 13047, 13046], [12918, 12919, 13047], [12919, 13048, 13047], [12919, 12920, 13049], [12919, 13049, 13048], [12920, 12921, 13049], [12921, 13050, 13049], [12921, 12922, 13051], [12921, 13051, 13050], [12922, 12923, 13051], [12923, 13052, 13051], [12923, 12924, 13053], [12923, 13053, 13052], [12924, 12925, 13053], [12925, 13054, 13053], [12925, 12926, 13055], [12925, 13055, 13054], [12926, 12927, 13055], [12927, 13056, 13055], [12927, 12928, 13057], [12927, 13057, 13056], [12928, 12929, 13057], [12929, 13058, 13057], [12929, 12930, 13059], [12929, 13059, 13058], [12930, 12931, 13059], [12931, 13060, 13059], [12931, 12932, 13061], [12931, 13061, 13060], [12932, 12933, 13061], [12933, 13062, 13061], [12933, 12934, 13063], [12933, 13063, 13062], [12934, 12935, 13063], [12935, 13064, 13063], [12935, 12936, 13065], [12935, 13065, 13064], [12936, 12937, 13065], [12937, 13066, 13065], [12937, 12938, 13067], [12937, 13067, 13066], [12938, 12939, 13067], [12939, 13068, 13067], [12939, 12940, 13069], [12939, 13069, 13068], [12940, 12941, 13069], [12941, 13070, 13069], [12941, 12942, 13071], [12941, 13071, 13070], [12942, 12943, 13071], [12943, 13072, 13071], [12943, 12944, 13073], [12943, 13073, 13072], [12944, 12945, 13073], [12945, 13074, 13073], [12946, 12947, 13075], [12947, 13076, 13075], [12947, 12948, 13077], [12947, 13077, 13076], [12948, 12949, 13077], [12949, 13078, 13077], [12949, 12950, 13079], [12949, 13079, 13078], [12950, 12951, 13079], [12951, 13080, 13079], [12951, 12952, 13081], [12951, 13081, 13080], [12952, 12953, 13081], [12953, 13082, 13081], [12953, 12954, 13083], [12953, 13083, 13082], [12954, 12955, 13083], [12955, 13084, 13083], [12955, 12956, 13085], [12955, 13085, 13084], [12956, 12957, 13085], [12957, 13086, 13085], [12957, 12958, 13087], [12957, 13087, 13086], [12958, 12959, 13087], [12959, 13088, 13087], [12959, 12960, 13089], [12959, 13089, 13088], [12960, 12961, 13089], [12961, 13090, 13089], [12961, 12962, 13091], [12961, 13091, 13090], [12962, 12963, 13091], [12963, 13092, 13091], [12963, 12964, 13093], [12963, 13093, 13092], [12964, 12965, 13093], [12965, 13094, 13093], [12965, 12966, 13095], [12965, 13095, 13094], [12966, 12967, 13095], [12967, 13096, 13095], [12967, 12968, 13097], [12967, 13097, 13096], [12968, 12969, 13097], [12969, 13098, 13097], [12969, 12970, 13099], [12969, 13099, 13098], [12970, 12971, 13099], [12971, 13100, 13099], [12971, 12972, 13101], [12971, 13101, 13100], [12972, 12973, 13101], [12973, 13102, 13101], [12973, 12974, 13103], [12973, 13103, 13102], [12974, 12975, 13103], [12975, 13104, 13103], [12975, 12976, 13105], [12975, 13105, 13104], [12976, 12977, 13105], [12977, 13106, 13105], [12977, 12978, 13107], [12977, 13107, 13106], [12978, 12979, 13107], [12979, 13108, 13107], [12979, 12980, 13109], [12979, 13109, 13108], [12980, 12981, 13109], [12981, 13110, 13109], [12981, 12982, 13111], [12981, 13111, 13110], [12982, 12983, 13111], [12983, 13112, 13111], [12983, 12984, 13113], [12983, 13113, 13112], [12984, 12985, 13113], [12985, 13114, 13113], [12985, 12986, 13115], [12985, 13115, 13114], [12986, 12987, 13115], [12987, 13116, 13115], [12987, 12988, 13117], [12987, 13117, 13116], [12988, 12989, 13117], [12989, 13118, 13117], [12989, 12990, 13119], [12989, 13119, 13118], [12990, 12991, 13119], [12991, 13120, 13119], [12991, 12992, 13121], [12991, 13121, 13120], [12992, 12993, 13121], [12993, 13122, 13121], [12993, 12994, 13123], [12993, 13123, 13122], [12994, 12995, 13123], [12995, 13124, 13123], [12995, 12996, 13125], [12995, 13125, 13124], [12996, 12997, 13125], [12997, 13126, 13125], [12997, 12998, 13127], [12997, 13127, 13126], [12998, 12999, 13127], [12999, 13128, 13127], [12999, 13000, 13129], [12999, 13129, 13128], [13000, 13001, 13129], [13001, 13130, 13129], [13001, 13002, 13131], [13001, 13131, 13130], [13002, 13003, 13131], [13003, 13132, 13131], [13003, 13004, 13133], [13003, 13133, 13132], [13004, 13005, 13133], [13005, 13134, 13133], [13005, 13006, 13135], [13005, 13135, 13134], [13006, 13007, 13135], [13007, 13136, 13135], [13007, 13008, 13137], [13007, 13137, 13136], [13008, 13009, 13137], [13009, 13138, 13137], [13009, 13010, 13139], [13009, 13139, 13138], [13010, 13011, 13139], [13011, 13140, 13139], [13011, 13012, 13141], [13011, 13141, 13140], [13012, 13013, 13141], [13013, 13142, 13141], [13013, 13014, 13143], [13013, 13143, 13142], [13014, 13015, 13143], [13015, 13144, 13143], [13015, 13016, 13145], [13015, 13145, 13144], [13016, 13017, 13145], [13017, 13146, 13145], [13017, 13018, 13147], [13017, 13147, 13146], [13018, 13019, 13147], [13019, 13148, 13147], [13019, 13020, 13149], [13019, 13149, 13148], [13020, 13021, 13149], [13021, 13150, 13149], [13021, 13022, 13151], [13021, 13151, 13150], [13022, 13023, 13151], [13023, 13152, 13151], [13023, 13024, 13153], [13023, 13153, 13152], [13024, 13025, 13153], [13025, 13154, 13153], [13025, 13026, 13155], [13025, 13155, 13154], [13026, 13027, 13155], [13027, 13156, 13155], [13027, 13028, 13157], [13027, 13157, 13156], [13028, 13029, 13157], [13029, 13158, 13157], [13029, 13030, 13159], [13029, 13159, 13158], [13030, 13031, 13159], [13031, 13160, 13159], [13031, 13032, 13161], [13031, 13161, 13160], [13032, 13033, 13161], [13033, 13162, 13161], [13033, 13034, 13163], [13033, 13163, 13162], [13034, 13035, 13163], [13035, 13164, 13163], [13035, 13036, 13165], [13035, 13165, 13164], [13036, 13037, 13165], [13037, 13166, 13165], [13037, 13038, 13167], [13037, 13167, 13166], [13038, 13039, 13167], [13039, 13168, 13167], [13039, 13040, 13169], [13039, 13169, 13168], [13040, 13041, 13169], [13041, 13170, 13169], [13041, 13042, 13171], [13041, 13171, 13170], [13042, 13043, 13171], [13043, 13172, 13171], [13043, 13044, 13173], [13043, 13173, 13172], [13044, 13045, 13173], [13045, 13174, 13173], [13045, 13046, 13175], [13045, 13175, 13174], [13046, 13047, 13175], [13047, 13176, 13175], [13047, 13048, 13177], [13047, 13177, 13176], [13048, 13049, 13177], [13049, 13178, 13177], [13049, 13050, 13179], [13049, 13179, 13178], [13050, 13051, 13179], [13051, 13180, 13179], [13051, 13052, 13181], [13051, 13181, 13180], [13052, 13053, 13181], [13053, 13182, 13181], [13053, 13054, 13183], [13053, 13183, 13182], [13054, 13055, 13183], [13055, 13184, 13183], [13055, 13056, 13185], [13055, 13185, 13184], [13056, 13057, 13185], [13057, 13186, 13185], [13057, 13058, 13187], [13057, 13187, 13186], [13058, 13059, 13187], [13059, 13188, 13187], [13059, 13060, 13189], [13059, 13189, 13188], [13060, 13061, 13189], [13061, 13190, 13189], [13061, 13062, 13191], [13061, 13191, 13190], [13062, 13063, 13191], [13063, 13192, 13191], [13063, 13064, 13193], [13063, 13193, 13192], [13064, 13065, 13193], [13065, 13194, 13193], [13065, 13066, 13195], [13065, 13195, 13194], [13066, 13067, 13195], [13067, 13196, 13195], [13067, 13068, 13197], [13067, 13197, 13196], [13068, 13069, 13197], [13069, 13198, 13197], [13069, 13070, 13199], [13069, 13199, 13198], [13070, 13071, 13199], [13071, 13200, 13199], [13071, 13072, 13201], [13071, 13201, 13200], [13072, 13073, 13201], [13073, 13202, 13201], [13073, 13074, 13203], [13073, 13203, 13202], [13075, 13076, 13205], [13075, 13205, 13204], [13076, 13077, 13205], [13077, 13206, 13205], [13077, 13078, 13207], [13077, 13207, 13206], [13078, 13079, 13207], [13079, 13208, 13207], [13079, 13080, 13209], [13079, 13209, 13208], [13080, 13081, 13209], [13081, 13210, 13209], [13081, 13082, 13211], [13081, 13211, 13210], [13082, 13083, 13211], [13083, 13212, 13211], [13083, 13084, 13213], [13083, 13213, 13212], [13084, 13085, 13213], [13085, 13214, 13213], [13085, 13086, 13215], [13085, 13215, 13214], [13086, 13087, 13215], [13087, 13216, 13215], [13087, 13088, 13217], [13087, 13217, 13216], [13088, 13089, 13217], [13089, 13218, 13217], [13089, 13090, 13219], [13089, 13219, 13218], [13090, 13091, 13219], [13091, 13220, 13219], [13091, 13092, 13221], [13091, 13221, 13220], [13092, 13093, 13221], [13093, 13222, 13221], [13093, 13094, 13223], [13093, 13223, 13222], [13094, 13095, 13223], [13095, 13224, 13223], [13095, 13096, 13225], [13095, 13225, 13224], [13096, 13097, 13225], [13097, 13226, 13225], [13097, 13098, 13227], [13097, 13227, 13226], [13098, 13099, 13227], [13099, 13228, 13227], [13099, 13100, 13229], [13099, 13229, 13228], [13100, 13101, 13229], [13101, 13230, 13229], [13101, 13102, 13231], [13101, 13231, 13230], [13102, 13103, 13231], [13103, 13232, 13231], [13103, 13104, 13233], [13103, 13233, 13232], [13104, 13105, 13233], [13105, 13234, 13233], [13105, 13106, 13235], [13105, 13235, 13234], [13106, 13107, 13235], [13107, 13236, 13235], [13107, 13108, 13237], [13107, 13237, 13236], [13108, 13109, 13237], [13109, 13238, 13237], [13109, 13110, 13239], [13109, 13239, 13238], [13110, 13111, 13239], [13111, 13240, 13239], [13111, 13112, 13241], [13111, 13241, 13240], [13112, 13113, 13241], [13113, 13242, 13241], [13113, 13114, 13243], [13113, 13243, 13242], [13114, 13115, 13243], [13115, 13244, 13243], [13115, 13116, 13245], [13115, 13245, 13244], [13116, 13117, 13245], [13117, 13246, 13245], [13117, 13118, 13247], [13117, 13247, 13246], [13118, 13119, 13247], [13119, 13248, 13247], [13119, 13120, 13249], [13119, 13249, 13248], [13120, 13121, 13249], [13121, 13250, 13249], [13121, 13122, 13251], [13121, 13251, 13250], [13122, 13123, 13251], [13123, 13252, 13251], [13123, 13124, 13253], [13123, 13253, 13252], [13124, 13125, 13253], [13125, 13254, 13253], [13125, 13126, 13255], [13125, 13255, 13254], [13126, 13127, 13255], [13127, 13256, 13255], [13127, 13128, 13257], [13127, 13257, 13256], [13128, 13129, 13257], [13129, 13258, 13257], [13129, 13130, 13259], [13129, 13259, 13258], [13130, 13131, 13259], [13131, 13260, 13259], [13131, 13132, 13261], [13131, 13261, 13260], [13132, 13133, 13261], [13133, 13262, 13261], [13133, 13134, 13263], [13133, 13263, 13262], [13134, 13135, 13263], [13135, 13264, 13263], [13135, 13136, 13265], [13135, 13265, 13264], [13136, 13137, 13265], [13137, 13266, 13265], [13137, 13138, 13267], [13137, 13267, 13266], [13138, 13139, 13267], [13139, 13268, 13267], [13139, 13140, 13269], [13139, 13269, 13268], [13140, 13141, 13269], [13141, 13270, 13269], [13141, 13142, 13271], [13141, 13271, 13270], [13142, 13143, 13271], [13143, 13272, 13271], [13143, 13144, 13273], [13143, 13273, 13272], [13144, 13145, 13273], [13145, 13274, 13273], [13145, 13146, 13275], [13145, 13275, 13274], [13146, 13147, 13275], [13147, 13276, 13275], [13147, 13148, 13277], [13147, 13277, 13276], [13148, 13149, 13277], [13149, 13278, 13277], [13149, 13150, 13279], [13149, 13279, 13278], [13150, 13151, 13279], [13151, 13280, 13279], [13151, 13152, 13281], [13151, 13281, 13280], [13152, 13153, 13281], [13153, 13282, 13281], [13153, 13154, 13283], [13153, 13283, 13282], [13154, 13155, 13283], [13155, 13284, 13283], [13155, 13156, 13285], [13155, 13285, 13284], [13156, 13157, 13285], [13157, 13286, 13285], [13157, 13158, 13287], [13157, 13287, 13286], [13158, 13159, 13287], [13159, 13288, 13287], [13159, 13160, 13289], [13159, 13289, 13288], [13160, 13161, 13289], [13161, 13290, 13289], [13161, 13162, 13291], [13161, 13291, 13290], [13162, 13163, 13291], [13163, 13292, 13291], [13163, 13164, 13293], [13163, 13293, 13292], [13164, 13165, 13293], [13165, 13294, 13293], [13165, 13166, 13295], [13165, 13295, 13294], [13166, 13167, 13295], [13167, 13296, 13295], [13167, 13168, 13297], [13167, 13297, 13296], [13168, 13169, 13297], [13169, 13298, 13297], [13169, 13170, 13299], [13169, 13299, 13298], [13170, 13171, 13299], [13171, 13300, 13299], [13171, 13172, 13301], [13171, 13301, 13300], [13172, 13173, 13301], [13173, 13302, 13301], [13173, 13174, 13303], [13173, 13303, 13302], [13174, 13175, 13303], [13175, 13304, 13303], [13175, 13176, 13305], [13175, 13305, 13304], [13176, 13177, 13305], [13177, 13306, 13305], [13177, 13178, 13307], [13177, 13307, 13306], [13178, 13179, 13307], [13179, 13308, 13307], [13179, 13180, 13309], [13179, 13309, 13308], [13180, 13181, 13309], [13181, 13310, 13309], [13181, 13182, 13311], [13181, 13311, 13310], [13182, 13183, 13311], [13183, 13312, 13311], [13183, 13184, 13313], [13183, 13313, 13312], [13184, 13185, 13313], [13185, 13314, 13313], [13185, 13186, 13315], [13185, 13315, 13314], [13186, 13187, 13315], [13187, 13316, 13315], [13187, 13188, 13317], [13187, 13317, 13316], [13188, 13189, 13317], [13189, 13318, 13317], [13189, 13190, 13319], [13189, 13319, 13318], [13190, 13191, 13319], [13191, 13320, 13319], [13191, 13192, 13321], [13191, 13321, 13320], [13192, 13193, 13321], [13193, 13322, 13321], [13193, 13194, 13323], [13193, 13323, 13322], [13194, 13195, 13323], [13195, 13324, 13323], [13195, 13196, 13325], [13195, 13325, 13324], [13196, 13197, 13325], [13197, 13326, 13325], [13197, 13198, 13327], [13197, 13327, 13326], [13198, 13199, 13327], [13199, 13328, 13327], [13199, 13200, 13329], [13199, 13329, 13328], [13200, 13201, 13329], [13201, 13330, 13329], [13201, 13202, 13331], [13201, 13331, 13330], [13202, 13203, 13331], [13203, 13332, 13331], [13204, 13205, 13333], [13205, 13334, 13333], [13205, 13206, 13335], [13205, 13335, 13334], [13206, 13207, 13335], [13207, 13336, 13335], [13207, 13208, 13337], [13207, 13337, 13336], [13208, 13209, 13337], [13209, 13338, 13337], [13209, 13210, 13339], [13209, 13339, 13338], [13210, 13211, 13339], [13211, 13340, 13339], [13211, 13212, 13341], [13211, 13341, 13340], [13212, 13213, 13341], [13213, 13342, 13341], [13213, 13214, 13343], [13213, 13343, 13342], [13214, 13215, 13343], [13215, 13344, 13343], [13215, 13216, 13345], [13215, 13345, 13344], [13216, 13217, 13345], [13217, 13346, 13345], [13217, 13218, 13347], [13217, 13347, 13346], [13218, 13219, 13347], [13219, 13348, 13347], [13219, 13220, 13349], [13219, 13349, 13348], [13220, 13221, 13349], [13221, 13350, 13349], [13221, 13222, 13351], [13221, 13351, 13350], [13222, 13223, 13351], [13223, 13352, 13351], [13223, 13224, 13353], [13223, 13353, 13352], [13224, 13225, 13353], [13225, 13354, 13353], [13225, 13226, 13355], [13225, 13355, 13354], [13226, 13227, 13355], [13227, 13356, 13355], [13227, 13228, 13357], [13227, 13357, 13356], [13228, 13229, 13357], [13229, 13358, 13357], [13229, 13230, 13359], [13229, 13359, 13358], [13230, 13231, 13359], [13231, 13360, 13359], [13231, 13232, 13361], [13231, 13361, 13360], [13232, 13233, 13361], [13233, 13362, 13361], [13233, 13234, 13363], [13233, 13363, 13362], [13234, 13235, 13363], [13235, 13364, 13363], [13235, 13236, 13365], [13235, 13365, 13364], [13236, 13237, 13365], [13237, 13366, 13365], [13237, 13238, 13367], [13237, 13367, 13366], [13238, 13239, 13367], [13239, 13368, 13367], [13239, 13240, 13369], [13239, 13369, 13368], [13240, 13241, 13369], [13241, 13370, 13369], [13241, 13242, 13371], [13241, 13371, 13370], [13242, 13243, 13371], [13243, 13372, 13371], [13243, 13244, 13373], [13243, 13373, 13372], [13244, 13245, 13373], [13245, 13374, 13373], [13245, 13246, 13375], [13245, 13375, 13374], [13246, 13247, 13375], [13247, 13376, 13375], [13247, 13248, 13377], [13247, 13377, 13376], [13248, 13249, 13377], [13249, 13378, 13377], [13249, 13250, 13379], [13249, 13379, 13378], [13250, 13251, 13379], [13251, 13380, 13379], [13251, 13252, 13381], [13251, 13381, 13380], [13252, 13253, 13381], [13253, 13382, 13381], [13253, 13254, 13383], [13253, 13383, 13382], [13254, 13255, 13383], [13255, 13384, 13383], [13255, 13256, 13385], [13255, 13385, 13384], [13256, 13257, 13385], [13257, 13386, 13385], [13257, 13258, 13387], [13257, 13387, 13386], [13258, 13259, 13387], [13259, 13388, 13387], [13259, 13260, 13389], [13259, 13389, 13388], [13260, 13261, 13389], [13261, 13390, 13389], [13261, 13262, 13391], [13261, 13391, 13390], [13262, 13263, 13391], [13263, 13392, 13391], [13263, 13264, 13393], [13263, 13393, 13392], [13264, 13265, 13393], [13265, 13394, 13393], [13265, 13266, 13395], [13265, 13395, 13394], [13266, 13267, 13395], [13267, 13396, 13395], [13267, 13268, 13397], [13267, 13397, 13396], [13268, 13269, 13397], [13269, 13398, 13397], [13269, 13270, 13399], [13269, 13399, 13398], [13270, 13271, 13399], [13271, 13400, 13399], [13271, 13272, 13401], [13271, 13401, 13400], [13272, 13273, 13401], [13273, 13402, 13401], [13273, 13274, 13403], [13273, 13403, 13402], [13274, 13275, 13403], [13275, 13404, 13403], [13275, 13276, 13405], [13275, 13405, 13404], [13276, 13277, 13405], [13277, 13406, 13405], [13277, 13278, 13407], [13277, 13407, 13406], [13278, 13279, 13407], [13279, 13408, 13407], [13279, 13280, 13409], [13279, 13409, 13408], [13280, 13281, 13409], [13281, 13410, 13409], [13281, 13282, 13411], [13281, 13411, 13410], [13282, 13283, 13411], [13283, 13412, 13411], [13283, 13284, 13413], [13283, 13413, 13412], [13284, 13285, 13413], [13285, 13414, 13413], [13285, 13286, 13415], [13285, 13415, 13414], [13286, 13287, 13415], [13287, 13416, 13415], [13287, 13288, 13417], [13287, 13417, 13416], [13288, 13289, 13417], [13289, 13418, 13417], [13289, 13290, 13419], [13289, 13419, 13418], [13290, 13291, 13419], [13291, 13420, 13419], [13291, 13292, 13421], [13291, 13421, 13420], [13292, 13293, 13421], [13293, 13422, 13421], [13293, 13294, 13423], [13293, 13423, 13422], [13294, 13295, 13423], [13295, 13424, 13423], [13295, 13296, 13425], [13295, 13425, 13424], [13296, 13297, 13425], [13297, 13426, 13425], [13297, 13298, 13427], [13297, 13427, 13426], [13298, 13299, 13427], [13299, 13428, 13427], [13299, 13300, 13429], [13299, 13429, 13428], [13300, 13301, 13429], [13301, 13430, 13429], [13301, 13302, 13431], [13301, 13431, 13430], [13302, 13303, 13431], [13303, 13432, 13431], [13303, 13304, 13433], [13303, 13433, 13432], [13304, 13305, 13433], [13305, 13434, 13433], [13305, 13306, 13435], [13305, 13435, 13434], [13306, 13307, 13435], [13307, 13436, 13435], [13307, 13308, 13437], [13307, 13437, 13436], [13308, 13309, 13437], [13309, 13438, 13437], [13309, 13310, 13439], [13309, 13439, 13438], [13310, 13311, 13439], [13311, 13440, 13439], [13311, 13312, 13441], [13311, 13441, 13440], [13312, 13313, 13441], [13313, 13442, 13441], [13313, 13314, 13443], [13313, 13443, 13442], [13314, 13315, 13443], [13315, 13444, 13443], [13315, 13316, 13445], [13315, 13445, 13444], [13316, 13317, 13445], [13317, 13446, 13445], [13317, 13318, 13447], [13317, 13447, 13446], [13318, 13319, 13447], [13319, 13448, 13447], [13319, 13320, 13449], [13319, 13449, 13448], [13320, 13321, 13449], [13321, 13450, 13449], [13321, 13322, 13451], [13321, 13451, 13450], [13322, 13323, 13451], [13323, 13452, 13451], [13323, 13324, 13453], [13323, 13453, 13452], [13324, 13325, 13453], [13325, 13454, 13453], [13325, 13326, 13455], [13325, 13455, 13454], [13326, 13327, 13455], [13327, 13456, 13455], [13327, 13328, 13457], [13327, 13457, 13456], [13328, 13329, 13457], [13329, 13458, 13457], [13329, 13330, 13459], [13329, 13459, 13458], [13330, 13331, 13459], [13331, 13460, 13459], [13331, 13332, 13461], [13331, 13461, 13460], [13333, 13334, 13463], [13333, 13463, 13462], [13334, 13335, 13463], [13335, 13464, 13463], [13335, 13336, 13465], [13335, 13465, 13464], [13336, 13337, 13465], [13337, 13466, 13465], [13337, 13338, 13467], [13337, 13467, 13466], [13338, 13339, 13467], [13339, 13468, 13467], [13339, 13340, 13469], [13339, 13469, 13468], [13340, 13341, 13469], [13341, 13470, 13469], [13341, 13342, 13471], [13341, 13471, 13470], [13342, 13343, 13471], [13343, 13472, 13471], [13343, 13344, 13473], [13343, 13473, 13472], [13344, 13345, 13473], [13345, 13474, 13473], [13345, 13346, 13475], [13345, 13475, 13474], [13346, 13347, 13475], [13347, 13476, 13475], [13347, 13348, 13477], [13347, 13477, 13476], [13348, 13349, 13477], [13349, 13478, 13477], [13349, 13350, 13479], [13349, 13479, 13478], [13350, 13351, 13479], [13351, 13480, 13479], [13351, 13352, 13481], [13351, 13481, 13480], [13352, 13353, 13481], [13353, 13482, 13481], [13353, 13354, 13483], [13353, 13483, 13482], [13354, 13355, 13483], [13355, 13484, 13483], [13355, 13356, 13485], [13355, 13485, 13484], [13356, 13357, 13485], [13357, 13486, 13485], [13357, 13358, 13487], [13357, 13487, 13486], [13358, 13359, 13487], [13359, 13488, 13487], [13359, 13360, 13489], [13359, 13489, 13488], [13360, 13361, 13489], [13361, 13490, 13489], [13361, 13362, 13491], [13361, 13491, 13490], [13362, 13363, 13491], [13363, 13492, 13491], [13363, 13364, 13493], [13363, 13493, 13492], [13364, 13365, 13493], [13365, 13494, 13493], [13365, 13366, 13495], [13365, 13495, 13494], [13366, 13367, 13495], [13367, 13496, 13495], [13367, 13368, 13497], [13367, 13497, 13496], [13368, 13369, 13497], [13369, 13498, 13497], [13369, 13370, 13499], [13369, 13499, 13498], [13370, 13371, 13499], [13371, 13500, 13499], [13371, 13372, 13501], [13371, 13501, 13500], [13372, 13373, 13501], [13373, 13502, 13501], [13373, 13374, 13503], [13373, 13503, 13502], [13374, 13375, 13503], [13375, 13504, 13503], [13375, 13376, 13505], [13375, 13505, 13504], [13376, 13377, 13505], [13377, 13506, 13505], [13377, 13378, 13507], [13377, 13507, 13506], [13378, 13379, 13507], [13379, 13508, 13507], [13379, 13380, 13509], [13379, 13509, 13508], [13380, 13381, 13509], [13381, 13510, 13509], [13381, 13382, 13511], [13381, 13511, 13510], [13382, 13383, 13511], [13383, 13512, 13511], [13383, 13384, 13513], [13383, 13513, 13512], [13384, 13385, 13513], [13385, 13514, 13513], [13385, 13386, 13515], [13385, 13515, 13514], [13386, 13387, 13515], [13387, 13516, 13515], [13387, 13388, 13517], [13387, 13517, 13516], [13388, 13389, 13517], [13389, 13518, 13517], [13389, 13390, 13519], [13389, 13519, 13518], [13390, 13391, 13519], [13391, 13520, 13519], [13391, 13392, 13521], [13391, 13521, 13520], [13392, 13393, 13521], [13393, 13522, 13521], [13393, 13394, 13523], [13393, 13523, 13522], [13394, 13395, 13523], [13395, 13524, 13523], [13395, 13396, 13525], [13395, 13525, 13524], [13396, 13397, 13525], [13397, 13526, 13525], [13397, 13398, 13527], [13397, 13527, 13526], [13398, 13399, 13527], [13399, 13528, 13527], [13399, 13400, 13529], [13399, 13529, 13528], [13400, 13401, 13529], [13401, 13530, 13529], [13401, 13402, 13531], [13401, 13531, 13530], [13402, 13403, 13531], [13403, 13532, 13531], [13403, 13404, 13533], [13403, 13533, 13532], [13404, 13405, 13533], [13405, 13534, 13533], [13405, 13406, 13535], [13405, 13535, 13534], [13406, 13407, 13535], [13407, 13536, 13535], [13407, 13408, 13537], [13407, 13537, 13536], [13408, 13409, 13537], [13409, 13538, 13537], [13409, 13410, 13539], [13409, 13539, 13538], [13410, 13411, 13539], [13411, 13540, 13539], [13411, 13412, 13541], [13411, 13541, 13540], [13412, 13413, 13541], [13413, 13542, 13541], [13413, 13414, 13543], [13413, 13543, 13542], [13414, 13415, 13543], [13415, 13544, 13543], [13415, 13416, 13545], [13415, 13545, 13544], [13416, 13417, 13545], [13417, 13546, 13545], [13417, 13418, 13547], [13417, 13547, 13546], [13418, 13419, 13547], [13419, 13548, 13547], [13419, 13420, 13549], [13419, 13549, 13548], [13420, 13421, 13549], [13421, 13550, 13549], [13421, 13422, 13551], [13421, 13551, 13550], [13422, 13423, 13551], [13423, 13552, 13551], [13423, 13424, 13553], [13423, 13553, 13552], [13424, 13425, 13553], [13425, 13554, 13553], [13425, 13426, 13555], [13425, 13555, 13554], [13426, 13427, 13555], [13427, 13556, 13555], [13427, 13428, 13557], [13427, 13557, 13556], [13428, 13429, 13557], [13429, 13558, 13557], [13429, 13430, 13559], [13429, 13559, 13558], [13430, 13431, 13559], [13431, 13560, 13559], [13431, 13432, 13561], [13431, 13561, 13560], [13432, 13433, 13561], [13433, 13562, 13561], [13433, 13434, 13563], [13433, 13563, 13562], [13434, 13435, 13563], [13435, 13564, 13563], [13435, 13436, 13565], [13435, 13565, 13564], [13436, 13437, 13565], [13437, 13566, 13565], [13437, 13438, 13567], [13437, 13567, 13566], [13438, 13439, 13567], [13439, 13568, 13567], [13439, 13440, 13569], [13439, 13569, 13568], [13440, 13441, 13569], [13441, 13570, 13569], [13441, 13442, 13571], [13441, 13571, 13570], [13442, 13443, 13571], [13443, 13572, 13571], [13443, 13444, 13573], [13443, 13573, 13572], [13444, 13445, 13573], [13445, 13574, 13573], [13445, 13446, 13575], [13445, 13575, 13574], [13446, 13447, 13575], [13447, 13576, 13575], [13447, 13448, 13577], [13447, 13577, 13576], [13448, 13449, 13577], [13449, 13578, 13577], [13449, 13450, 13579], [13449, 13579, 13578], [13450, 13451, 13579], [13451, 13580, 13579], [13451, 13452, 13581], [13451, 13581, 13580], [13452, 13453, 13581], [13453, 13582, 13581], [13453, 13454, 13583], [13453, 13583, 13582], [13454, 13455, 13583], [13455, 13584, 13583], [13455, 13456, 13585], [13455, 13585, 13584], [13456, 13457, 13585], [13457, 13586, 13585], [13457, 13458, 13587], [13457, 13587, 13586], [13458, 13459, 13587], [13459, 13588, 13587], [13459, 13460, 13589], [13459, 13589, 13588], [13460, 13461, 13589], [13461, 13590, 13589], [13462, 13463, 13591], [13463, 13592, 13591], [13463, 13464, 13593], [13463, 13593, 13592], [13464, 13465, 13593], [13465, 13594, 13593], [13465, 13466, 13595], [13465, 13595, 13594], [13466, 13467, 13595], [13467, 13596, 13595], [13467, 13468, 13597], [13467, 13597, 13596], [13468, 13469, 13597], [13469, 13598, 13597], [13469, 13470, 13599], [13469, 13599, 13598], [13470, 13471, 13599], [13471, 13600, 13599], [13471, 13472, 13601], [13471, 13601, 13600], [13472, 13473, 13601], [13473, 13602, 13601], [13473, 13474, 13603], [13473, 13603, 13602], [13474, 13475, 13603], [13475, 13604, 13603], [13475, 13476, 13605], [13475, 13605, 13604], [13476, 13477, 13605], [13477, 13606, 13605], [13477, 13478, 13607], [13477, 13607, 13606], [13478, 13479, 13607], [13479, 13608, 13607], [13479, 13480, 13609], [13479, 13609, 13608], [13480, 13481, 13609], [13481, 13610, 13609], [13481, 13482, 13611], [13481, 13611, 13610], [13482, 13483, 13611], [13483, 13612, 13611], [13483, 13484, 13613], [13483, 13613, 13612], [13484, 13485, 13613], [13485, 13614, 13613], [13485, 13486, 13615], [13485, 13615, 13614], [13486, 13487, 13615], [13487, 13616, 13615], [13487, 13488, 13617], [13487, 13617, 13616], [13488, 13489, 13617], [13489, 13618, 13617], [13489, 13490, 13619], [13489, 13619, 13618], [13490, 13491, 13619], [13491, 13620, 13619], [13491, 13492, 13621], [13491, 13621, 13620], [13492, 13493, 13621], [13493, 13622, 13621], [13493, 13494, 13623], [13493, 13623, 13622], [13494, 13495, 13623], [13495, 13624, 13623], [13495, 13496, 13625], [13495, 13625, 13624], [13496, 13497, 13625], [13497, 13626, 13625], [13497, 13498, 13627], [13497, 13627, 13626], [13498, 13499, 13627], [13499, 13628, 13627], [13499, 13500, 13629], [13499, 13629, 13628], [13500, 13501, 13629], [13501, 13630, 13629], [13501, 13502, 13631], [13501, 13631, 13630], [13502, 13503, 13631], [13503, 13632, 13631], [13503, 13504, 13633], [13503, 13633, 13632], [13504, 13505, 13633], [13505, 13634, 13633], [13505, 13506, 13635], [13505, 13635, 13634], [13506, 13507, 13635], [13507, 13636, 13635], [13507, 13508, 13637], [13507, 13637, 13636], [13508, 13509, 13637], [13509, 13638, 13637], [13509, 13510, 13639], [13509, 13639, 13638], [13510, 13511, 13639], [13511, 13640, 13639], [13511, 13512, 13641], [13511, 13641, 13640], [13512, 13513, 13641], [13513, 13642, 13641], [13513, 13514, 13643], [13513, 13643, 13642], [13514, 13515, 13643], [13515, 13644, 13643], [13515, 13516, 13645], [13515, 13645, 13644], [13516, 13517, 13645], [13517, 13646, 13645], [13517, 13518, 13647], [13517, 13647, 13646], [13518, 13519, 13647], [13519, 13648, 13647], [13519, 13520, 13649], [13519, 13649, 13648], [13520, 13521, 13649], [13521, 13650, 13649], [13521, 13522, 13651], [13521, 13651, 13650], [13522, 13523, 13651], [13523, 13652, 13651], [13523, 13524, 13653], [13523, 13653, 13652], [13524, 13525, 13653], [13525, 13654, 13653], [13525, 13526, 13655], [13525, 13655, 13654], [13526, 13527, 13655], [13527, 13656, 13655], [13527, 13528, 13657], [13527, 13657, 13656], [13528, 13529, 13657], [13529, 13658, 13657], [13529, 13530, 13659], [13529, 13659, 13658], [13530, 13531, 13659], [13531, 13660, 13659], [13531, 13532, 13661], [13531, 13661, 13660], [13532, 13533, 13661], [13533, 13662, 13661], [13533, 13534, 13663], [13533, 13663, 13662], [13534, 13535, 13663], [13535, 13664, 13663], [13535, 13536, 13665], [13535, 13665, 13664], [13536, 13537, 13665], [13537, 13666, 13665], [13537, 13538, 13667], [13537, 13667, 13666], [13538, 13539, 13667], [13539, 13668, 13667], [13539, 13540, 13669], [13539, 13669, 13668], [13540, 13541, 13669], [13541, 13670, 13669], [13541, 13542, 13671], [13541, 13671, 13670], [13542, 13543, 13671], [13543, 13672, 13671], [13543, 13544, 13673], [13543, 13673, 13672], [13544, 13545, 13673], [13545, 13674, 13673], [13545, 13546, 13675], [13545, 13675, 13674], [13546, 13547, 13675], [13547, 13676, 13675], [13547, 13548, 13677], [13547, 13677, 13676], [13548, 13549, 13677], [13549, 13678, 13677], [13549, 13550, 13679], [13549, 13679, 13678], [13550, 13551, 13679], [13551, 13680, 13679], [13551, 13552, 13681], [13551, 13681, 13680], [13552, 13553, 13681], [13553, 13682, 13681], [13553, 13554, 13683], [13553, 13683, 13682], [13554, 13555, 13683], [13555, 13684, 13683], [13555, 13556, 13685], [13555, 13685, 13684], [13556, 13557, 13685], [13557, 13686, 13685], [13557, 13558, 13687], [13557, 13687, 13686], [13558, 13559, 13687], [13559, 13688, 13687], [13559, 13560, 13689], [13559, 13689, 13688], [13560, 13561, 13689], [13561, 13690, 13689], [13561, 13562, 13691], [13561, 13691, 13690], [13562, 13563, 13691], [13563, 13692, 13691], [13563, 13564, 13693], [13563, 13693, 13692], [13564, 13565, 13693], [13565, 13694, 13693], [13565, 13566, 13695], [13565, 13695, 13694], [13566, 13567, 13695], [13567, 13696, 13695], [13567, 13568, 13697], [13567, 13697, 13696], [13568, 13569, 13697], [13569, 13698, 13697], [13569, 13570, 13699], [13569, 13699, 13698], [13570, 13571, 13699], [13571, 13700, 13699], [13571, 13572, 13701], [13571, 13701, 13700], [13572, 13573, 13701], [13573, 13702, 13701], [13573, 13574, 13703], [13573, 13703, 13702], [13574, 13575, 13703], [13575, 13704, 13703], [13575, 13576, 13705], [13575, 13705, 13704], [13576, 13577, 13705], [13577, 13706, 13705], [13577, 13578, 13707], [13577, 13707, 13706], [13578, 13579, 13707], [13579, 13708, 13707], [13579, 13580, 13709], [13579, 13709, 13708], [13580, 13581, 13709], [13581, 13710, 13709], [13581, 13582, 13711], [13581, 13711, 13710], [13582, 13583, 13711], [13583, 13712, 13711], [13583, 13584, 13713], [13583, 13713, 13712], [13584, 13585, 13713], [13585, 13714, 13713], [13585, 13586, 13715], [13585, 13715, 13714], [13586, 13587, 13715], [13587, 13716, 13715], [13587, 13588, 13717], [13587, 13717, 13716], [13588, 13589, 13717], [13589, 13718, 13717], [13589, 13590, 13719], [13589, 13719, 13718], [13591, 13592, 13721], [13591, 13721, 13720], [13592, 13593, 13721], [13593, 13722, 13721], [13593, 13594, 13723], [13593, 13723, 13722], [13594, 13595, 13723], [13595, 13724, 13723], [13595, 13596, 13725], [13595, 13725, 13724], [13596, 13597, 13725], [13597, 13726, 13725], [13597, 13598, 13727], [13597, 13727, 13726], [13598, 13599, 13727], [13599, 13728, 13727], [13599, 13600, 13729], [13599, 13729, 13728], [13600, 13601, 13729], [13601, 13730, 13729], [13601, 13602, 13731], [13601, 13731, 13730], [13602, 13603, 13731], [13603, 13732, 13731], [13603, 13604, 13733], [13603, 13733, 13732], [13604, 13605, 13733], [13605, 13734, 13733], [13605, 13606, 13735], [13605, 13735, 13734], [13606, 13607, 13735], [13607, 13736, 13735], [13607, 13608, 13737], [13607, 13737, 13736], [13608, 13609, 13737], [13609, 13738, 13737], [13609, 13610, 13739], [13609, 13739, 13738], [13610, 13611, 13739], [13611, 13740, 13739], [13611, 13612, 13741], [13611, 13741, 13740], [13612, 13613, 13741], [13613, 13742, 13741], [13613, 13614, 13743], [13613, 13743, 13742], [13614, 13615, 13743], [13615, 13744, 13743], [13615, 13616, 13745], [13615, 13745, 13744], [13616, 13617, 13745], [13617, 13746, 13745], [13617, 13618, 13747], [13617, 13747, 13746], [13618, 13619, 13747], [13619, 13748, 13747], [13619, 13620, 13749], [13619, 13749, 13748], [13620, 13621, 13749], [13621, 13750, 13749], [13621, 13622, 13751], [13621, 13751, 13750], [13622, 13623, 13751], [13623, 13752, 13751], [13623, 13624, 13753], [13623, 13753, 13752], [13624, 13625, 13753], [13625, 13754, 13753], [13625, 13626, 13755], [13625, 13755, 13754], [13626, 13627, 13755], [13627, 13756, 13755], [13627, 13628, 13757], [13627, 13757, 13756], [13628, 13629, 13757], [13629, 13758, 13757], [13629, 13630, 13759], [13629, 13759, 13758], [13630, 13631, 13759], [13631, 13760, 13759], [13631, 13632, 13761], [13631, 13761, 13760], [13632, 13633, 13761], [13633, 13762, 13761], [13633, 13634, 13763], [13633, 13763, 13762], [13634, 13635, 13763], [13635, 13764, 13763], [13635, 13636, 13765], [13635, 13765, 13764], [13636, 13637, 13765], [13637, 13766, 13765], [13637, 13638, 13767], [13637, 13767, 13766], [13638, 13639, 13767], [13639, 13768, 13767], [13639, 13640, 13769], [13639, 13769, 13768], [13640, 13641, 13769], [13641, 13770, 13769], [13641, 13642, 13771], [13641, 13771, 13770], [13642, 13643, 13771], [13643, 13772, 13771], [13643, 13644, 13773], [13643, 13773, 13772], [13644, 13645, 13773], [13645, 13774, 13773], [13645, 13646, 13775], [13645, 13775, 13774], [13646, 13647, 13775], [13647, 13776, 13775], [13647, 13648, 13777], [13647, 13777, 13776], [13648, 13649, 13777], [13649, 13778, 13777], [13649, 13650, 13779], [13649, 13779, 13778], [13650, 13651, 13779], [13651, 13780, 13779], [13651, 13652, 13781], [13651, 13781, 13780], [13652, 13653, 13781], [13653, 13782, 13781], [13653, 13654, 13783], [13653, 13783, 13782], [13654, 13655, 13783], [13655, 13784, 13783], [13655, 13656, 13785], [13655, 13785, 13784], [13656, 13657, 13785], [13657, 13786, 13785], [13657, 13658, 13787], [13657, 13787, 13786], [13658, 13659, 13787], [13659, 13788, 13787], [13659, 13660, 13789], [13659, 13789, 13788], [13660, 13661, 13789], [13661, 13790, 13789], [13661, 13662, 13791], [13661, 13791, 13790], [13662, 13663, 13791], [13663, 13792, 13791], [13663, 13664, 13793], [13663, 13793, 13792], [13664, 13665, 13793], [13665, 13794, 13793], [13665, 13666, 13795], [13665, 13795, 13794], [13666, 13667, 13795], [13667, 13796, 13795], [13667, 13668, 13797], [13667, 13797, 13796], [13668, 13669, 13797], [13669, 13798, 13797], [13669, 13670, 13799], [13669, 13799, 13798], [13670, 13671, 13799], [13671, 13800, 13799], [13671, 13672, 13801], [13671, 13801, 13800], [13672, 13673, 13801], [13673, 13802, 13801], [13673, 13674, 13803], [13673, 13803, 13802], [13674, 13675, 13803], [13675, 13804, 13803], [13675, 13676, 13805], [13675, 13805, 13804], [13676, 13677, 13805], [13677, 13806, 13805], [13677, 13678, 13807], [13677, 13807, 13806], [13678, 13679, 13807], [13679, 13808, 13807], [13679, 13680, 13809], [13679, 13809, 13808], [13680, 13681, 13809], [13681, 13810, 13809], [13681, 13682, 13811], [13681, 13811, 13810], [13682, 13683, 13811], [13683, 13812, 13811], [13683, 13684, 13813], [13683, 13813, 13812], [13684, 13685, 13813], [13685, 13814, 13813], [13685, 13686, 13815], [13685, 13815, 13814], [13686, 13687, 13815], [13687, 13816, 13815], [13687, 13688, 13817], [13687, 13817, 13816], [13688, 13689, 13817], [13689, 13818, 13817], [13689, 13690, 13819], [13689, 13819, 13818], [13690, 13691, 13819], [13691, 13820, 13819], [13691, 13692, 13821], [13691, 13821, 13820], [13692, 13693, 13821], [13693, 13822, 13821], [13693, 13694, 13823], [13693, 13823, 13822], [13694, 13695, 13823], [13695, 13824, 13823], [13695, 13696, 13825], [13695, 13825, 13824], [13696, 13697, 13825], [13697, 13826, 13825], [13697, 13698, 13827], [13697, 13827, 13826], [13698, 13699, 13827], [13699, 13828, 13827], [13699, 13700, 13829], [13699, 13829, 13828], [13700, 13701, 13829], [13701, 13830, 13829], [13701, 13702, 13831], [13701, 13831, 13830], [13702, 13703, 13831], [13703, 13832, 13831], [13703, 13704, 13833], [13703, 13833, 13832], [13704, 13705, 13833], [13705, 13834, 13833], [13705, 13706, 13835], [13705, 13835, 13834], [13706, 13707, 13835], [13707, 13836, 13835], [13707, 13708, 13837], [13707, 13837, 13836], [13708, 13709, 13837], [13709, 13838, 13837], [13709, 13710, 13839], [13709, 13839, 13838], [13710, 13711, 13839], [13711, 13840, 13839], [13711, 13712, 13841], [13711, 13841, 13840], [13712, 13713, 13841], [13713, 13842, 13841], [13713, 13714, 13843], [13713, 13843, 13842], [13714, 13715, 13843], [13715, 13844, 13843], [13715, 13716, 13845], [13715, 13845, 13844], [13716, 13717, 13845], [13717, 13846, 13845], [13717, 13718, 13847], [13717, 13847, 13846], [13718, 13719, 13847], [13719, 13848, 13847], [13720, 13721, 13849], [13721, 13850, 13849], [13721, 13722, 13851], [13721, 13851, 13850], [13722, 13723, 13851], [13723, 13852, 13851], [13723, 13724, 13853], [13723, 13853, 13852], [13724, 13725, 13853], [13725, 13854, 13853], [13725, 13726, 13855], [13725, 13855, 13854], [13726, 13727, 13855], [13727, 13856, 13855], [13727, 13728, 13857], [13727, 13857, 13856], [13728, 13729, 13857], [13729, 13858, 13857], [13729, 13730, 13859], [13729, 13859, 13858], [13730, 13731, 13859], [13731, 13860, 13859], [13731, 13732, 13861], [13731, 13861, 13860], [13732, 13733, 13861], [13733, 13862, 13861], [13733, 13734, 13863], [13733, 13863, 13862], [13734, 13735, 13863], [13735, 13864, 13863], [13735, 13736, 13865], [13735, 13865, 13864], [13736, 13737, 13865], [13737, 13866, 13865], [13737, 13738, 13867], [13737, 13867, 13866], [13738, 13739, 13867], [13739, 13868, 13867], [13739, 13740, 13869], [13739, 13869, 13868], [13740, 13741, 13869], [13741, 13870, 13869], [13741, 13742, 13871], [13741, 13871, 13870], [13742, 13743, 13871], [13743, 13872, 13871], [13743, 13744, 13873], [13743, 13873, 13872], [13744, 13745, 13873], [13745, 13874, 13873], [13745, 13746, 13875], [13745, 13875, 13874], [13746, 13747, 13875], [13747, 13876, 13875], [13747, 13748, 13877], [13747, 13877, 13876], [13748, 13749, 13877], [13749, 13878, 13877], [13749, 13750, 13879], [13749, 13879, 13878], [13750, 13751, 13879], [13751, 13880, 13879], [13751, 13752, 13881], [13751, 13881, 13880], [13752, 13753, 13881], [13753, 13882, 13881], [13753, 13754, 13883], [13753, 13883, 13882], [13754, 13755, 13883], [13755, 13884, 13883], [13755, 13756, 13885], [13755, 13885, 13884], [13756, 13757, 13885], [13757, 13886, 13885], [13757, 13758, 13887], [13757, 13887, 13886], [13758, 13759, 13887], [13759, 13888, 13887], [13759, 13760, 13889], [13759, 13889, 13888], [13760, 13761, 13889], [13761, 13890, 13889], [13761, 13762, 13891], [13761, 13891, 13890], [13762, 13763, 13891], [13763, 13892, 13891], [13763, 13764, 13893], [13763, 13893, 13892], [13764, 13765, 13893], [13765, 13894, 13893], [13765, 13766, 13895], [13765, 13895, 13894], [13766, 13767, 13895], [13767, 13896, 13895], [13767, 13768, 13897], [13767, 13897, 13896], [13768, 13769, 13897], [13769, 13898, 13897], [13769, 13770, 13899], [13769, 13899, 13898], [13770, 13771, 13899], [13771, 13900, 13899], [13771, 13772, 13901], [13771, 13901, 13900], [13772, 13773, 13901], [13773, 13902, 13901], [13773, 13774, 13903], [13773, 13903, 13902], [13774, 13775, 13903], [13775, 13904, 13903], [13775, 13776, 13905], [13775, 13905, 13904], [13776, 13777, 13905], [13777, 13906, 13905], [13777, 13778, 13907], [13777, 13907, 13906], [13778, 13779, 13907], [13779, 13908, 13907], [13779, 13780, 13909], [13779, 13909, 13908], [13780, 13781, 13909], [13781, 13910, 13909], [13781, 13782, 13911], [13781, 13911, 13910], [13782, 13783, 13911], [13783, 13912, 13911], [13783, 13784, 13913], [13783, 13913, 13912], [13784, 13785, 13913], [13785, 13914, 13913], [13785, 13786, 13915], [13785, 13915, 13914], [13786, 13787, 13915], [13787, 13916, 13915], [13787, 13788, 13917], [13787, 13917, 13916], [13788, 13789, 13917], [13789, 13918, 13917], [13789, 13790, 13919], [13789, 13919, 13918], [13790, 13791, 13919], [13791, 13920, 13919], [13791, 13792, 13921], [13791, 13921, 13920], [13792, 13793, 13921], [13793, 13922, 13921], [13793, 13794, 13923], [13793, 13923, 13922], [13794, 13795, 13923], [13795, 13924, 13923], [13795, 13796, 13925], [13795, 13925, 13924], [13796, 13797, 13925], [13797, 13926, 13925], [13797, 13798, 13927], [13797, 13927, 13926], [13798, 13799, 13927], [13799, 13928, 13927], [13799, 13800, 13929], [13799, 13929, 13928], [13800, 13801, 13929], [13801, 13930, 13929], [13801, 13802, 13931], [13801, 13931, 13930], [13802, 13803, 13931], [13803, 13932, 13931], [13803, 13804, 13933], [13803, 13933, 13932], [13804, 13805, 13933], [13805, 13934, 13933], [13805, 13806, 13935], [13805, 13935, 13934], [13806, 13807, 13935], [13807, 13936, 13935], [13807, 13808, 13937], [13807, 13937, 13936], [13808, 13809, 13937], [13809, 13938, 13937], [13809, 13810, 13939], [13809, 13939, 13938], [13810, 13811, 13939], [13811, 13940, 13939], [13811, 13812, 13941], [13811, 13941, 13940], [13812, 13813, 13941], [13813, 13942, 13941], [13813, 13814, 13943], [13813, 13943, 13942], [13814, 13815, 13943], [13815, 13944, 13943], [13815, 13816, 13945], [13815, 13945, 13944], [13816, 13817, 13945], [13817, 13946, 13945], [13817, 13818, 13947], [13817, 13947, 13946], [13818, 13819, 13947], [13819, 13948, 13947], [13819, 13820, 13949], [13819, 13949, 13948], [13820, 13821, 13949], [13821, 13950, 13949], [13821, 13822, 13951], [13821, 13951, 13950], [13822, 13823, 13951], [13823, 13952, 13951], [13823, 13824, 13953], [13823, 13953, 13952], [13824, 13825, 13953], [13825, 13954, 13953], [13825, 13826, 13955], [13825, 13955, 13954], [13826, 13827, 13955], [13827, 13956, 13955], [13827, 13828, 13957], [13827, 13957, 13956], [13828, 13829, 13957], [13829, 13958, 13957], [13829, 13830, 13959], [13829, 13959, 13958], [13830, 13831, 13959], [13831, 13960, 13959], [13831, 13832, 13961], [13831, 13961, 13960], [13832, 13833, 13961], [13833, 13962, 13961], [13833, 13834, 13963], [13833, 13963, 13962], [13834, 13835, 13963], [13835, 13964, 13963], [13835, 13836, 13965], [13835, 13965, 13964], [13836, 13837, 13965], [13837, 13966, 13965], [13837, 13838, 13967], [13837, 13967, 13966], [13838, 13839, 13967], [13839, 13968, 13967], [13839, 13840, 13969], [13839, 13969, 13968], [13840, 13841, 13969], [13841, 13970, 13969], [13841, 13842, 13971], [13841, 13971, 13970], [13842, 13843, 13971], [13843, 13972, 13971], [13843, 13844, 13973], [13843, 13973, 13972], [13844, 13845, 13973], [13845, 13974, 13973], [13845, 13846, 13975], [13845, 13975, 13974], [13846, 13847, 13975], [13847, 13976, 13975], [13847, 13848, 13977], [13847, 13977, 13976], [13849, 13850, 13979], [13849, 13979, 13978], [13850, 13851, 13979], [13851, 13980, 13979], [13851, 13852, 13981], [13851, 13981, 13980], [13852, 13853, 13981], [13853, 13982, 13981], [13853, 13854, 13983], [13853, 13983, 13982], [13854, 13855, 13983], [13855, 13984, 13983], [13855, 13856, 13985], [13855, 13985, 13984], [13856, 13857, 13985], [13857, 13986, 13985], [13857, 13858, 13987], [13857, 13987, 13986], [13858, 13859, 13987], [13859, 13988, 13987], [13859, 13860, 13989], [13859, 13989, 13988], [13860, 13861, 13989], [13861, 13990, 13989], [13861, 13862, 13991], [13861, 13991, 13990], [13862, 13863, 13991], [13863, 13992, 13991], [13863, 13864, 13993], [13863, 13993, 13992], [13864, 13865, 13993], [13865, 13994, 13993], [13865, 13866, 13995], [13865, 13995, 13994], [13866, 13867, 13995], [13867, 13996, 13995], [13867, 13868, 13997], [13867, 13997, 13996], [13868, 13869, 13997], [13869, 13998, 13997], [13869, 13870, 13999], [13869, 13999, 13998], [13870, 13871, 13999], [13871, 14000, 13999], [13871, 13872, 14001], [13871, 14001, 14000], [13872, 13873, 14001], [13873, 14002, 14001], [13873, 13874, 14003], [13873, 14003, 14002], [13874, 13875, 14003], [13875, 14004, 14003], [13875, 13876, 14005], [13875, 14005, 14004], [13876, 13877, 14005], [13877, 14006, 14005], [13877, 13878, 14007], [13877, 14007, 14006], [13878, 13879, 14007], [13879, 14008, 14007], [13879, 13880, 14009], [13879, 14009, 14008], [13880, 13881, 14009], [13881, 14010, 14009], [13881, 13882, 14011], [13881, 14011, 14010], [13882, 13883, 14011], [13883, 14012, 14011], [13883, 13884, 14013], [13883, 14013, 14012], [13884, 13885, 14013], [13885, 14014, 14013], [13885, 13886, 14015], [13885, 14015, 14014], [13886, 13887, 14015], [13887, 14016, 14015], [13887, 13888, 14017], [13887, 14017, 14016], [13888, 13889, 14017], [13889, 14018, 14017], [13889, 13890, 14019], [13889, 14019, 14018], [13890, 13891, 14019], [13891, 14020, 14019], [13891, 13892, 14021], [13891, 14021, 14020], [13892, 13893, 14021], [13893, 14022, 14021], [13893, 13894, 14023], [13893, 14023, 14022], [13894, 13895, 14023], [13895, 14024, 14023], [13895, 13896, 14025], [13895, 14025, 14024], [13896, 13897, 14025], [13897, 14026, 14025], [13897, 13898, 14027], [13897, 14027, 14026], [13898, 13899, 14027], [13899, 14028, 14027], [13899, 13900, 14029], [13899, 14029, 14028], [13900, 13901, 14029], [13901, 14030, 14029], [13901, 13902, 14031], [13901, 14031, 14030], [13902, 13903, 14031], [13903, 14032, 14031], [13903, 13904, 14033], [13903, 14033, 14032], [13904, 13905, 14033], [13905, 14034, 14033], [13905, 13906, 14035], [13905, 14035, 14034], [13906, 13907, 14035], [13907, 14036, 14035], [13907, 13908, 14037], [13907, 14037, 14036], [13908, 13909, 14037], [13909, 14038, 14037], [13909, 13910, 14039], [13909, 14039, 14038], [13910, 13911, 14039], [13911, 14040, 14039], [13911, 13912, 14041], [13911, 14041, 14040], [13912, 13913, 14041], [13913, 14042, 14041], [13913, 13914, 14043], [13913, 14043, 14042], [13914, 13915, 14043], [13915, 14044, 14043], [13915, 13916, 14045], [13915, 14045, 14044], [13916, 13917, 14045], [13917, 14046, 14045], [13917, 13918, 14047], [13917, 14047, 14046], [13918, 13919, 14047], [13919, 14048, 14047], [13919, 13920, 14049], [13919, 14049, 14048], [13920, 13921, 14049], [13921, 14050, 14049], [13921, 13922, 14051], [13921, 14051, 14050], [13922, 13923, 14051], [13923, 14052, 14051], [13923, 13924, 14053], [13923, 14053, 14052], [13924, 13925, 14053], [13925, 14054, 14053], [13925, 13926, 14055], [13925, 14055, 14054], [13926, 13927, 14055], [13927, 14056, 14055], [13927, 13928, 14057], [13927, 14057, 14056], [13928, 13929, 14057], [13929, 14058, 14057], [13929, 13930, 14059], [13929, 14059, 14058], [13930, 13931, 14059], [13931, 14060, 14059], [13931, 13932, 14061], [13931, 14061, 14060], [13932, 13933, 14061], [13933, 14062, 14061], [13933, 13934, 14063], [13933, 14063, 14062], [13934, 13935, 14063], [13935, 14064, 14063], [13935, 13936, 14065], [13935, 14065, 14064], [13936, 13937, 14065], [13937, 14066, 14065], [13937, 13938, 14067], [13937, 14067, 14066], [13938, 13939, 14067], [13939, 14068, 14067], [13939, 13940, 14069], [13939, 14069, 14068], [13940, 13941, 14069], [13941, 14070, 14069], [13941, 13942, 14071], [13941, 14071, 14070], [13942, 13943, 14071], [13943, 14072, 14071], [13943, 13944, 14073], [13943, 14073, 14072], [13944, 13945, 14073], [13945, 14074, 14073], [13945, 13946, 14075], [13945, 14075, 14074], [13946, 13947, 14075], [13947, 14076, 14075], [13947, 13948, 14077], [13947, 14077, 14076], [13948, 13949, 14077], [13949, 14078, 14077], [13949, 13950, 14079], [13949, 14079, 14078], [13950, 13951, 14079], [13951, 14080, 14079], [13951, 13952, 14081], [13951, 14081, 14080], [13952, 13953, 14081], [13953, 14082, 14081], [13953, 13954, 14083], [13953, 14083, 14082], [13954, 13955, 14083], [13955, 14084, 14083], [13955, 13956, 14085], [13955, 14085, 14084], [13956, 13957, 14085], [13957, 14086, 14085], [13957, 13958, 14087], [13957, 14087, 14086], [13958, 13959, 14087], [13959, 14088, 14087], [13959, 13960, 14089], [13959, 14089, 14088], [13960, 13961, 14089], [13961, 14090, 14089], [13961, 13962, 14091], [13961, 14091, 14090], [13962, 13963, 14091], [13963, 14092, 14091], [13963, 13964, 14093], [13963, 14093, 14092], [13964, 13965, 14093], [13965, 14094, 14093], [13965, 13966, 14095], [13965, 14095, 14094], [13966, 13967, 14095], [13967, 14096, 14095], [13967, 13968, 14097], [13967, 14097, 14096], [13968, 13969, 14097], [13969, 14098, 14097], [13969, 13970, 14099], [13969, 14099, 14098], [13970, 13971, 14099], [13971, 14100, 14099], [13971, 13972, 14101], [13971, 14101, 14100], [13972, 13973, 14101], [13973, 14102, 14101], [13973, 13974, 14103], [13973, 14103, 14102], [13974, 13975, 14103], [13975, 14104, 14103], [13975, 13976, 14105], [13975, 14105, 14104], [13976, 13977, 14105], [13977, 14106, 14105], [13978, 13979, 14107], [13979, 14108, 14107], [13979, 13980, 14109], [13979, 14109, 14108], [13980, 13981, 14109], [13981, 14110, 14109], [13981, 13982, 14111], [13981, 14111, 14110], [13982, 13983, 14111], [13983, 14112, 14111], [13983, 13984, 14113], [13983, 14113, 14112], [13984, 13985, 14113], [13985, 14114, 14113], [13985, 13986, 14115], [13985, 14115, 14114], [13986, 13987, 14115], [13987, 14116, 14115], [13987, 13988, 14117], [13987, 14117, 14116], [13988, 13989, 14117], [13989, 14118, 14117], [13989, 13990, 14119], [13989, 14119, 14118], [13990, 13991, 14119], [13991, 14120, 14119], [13991, 13992, 14121], [13991, 14121, 14120], [13992, 13993, 14121], [13993, 14122, 14121], [13993, 13994, 14123], [13993, 14123, 14122], [13994, 13995, 14123], [13995, 14124, 14123], [13995, 13996, 14125], [13995, 14125, 14124], [13996, 13997, 14125], [13997, 14126, 14125], [13997, 13998, 14127], [13997, 14127, 14126], [13998, 13999, 14127], [13999, 14128, 14127], [13999, 14000, 14129], [13999, 14129, 14128], [14000, 14001, 14129], [14001, 14130, 14129], [14001, 14002, 14131], [14001, 14131, 14130], [14002, 14003, 14131], [14003, 14132, 14131], [14003, 14004, 14133], [14003, 14133, 14132], [14004, 14005, 14133], [14005, 14134, 14133], [14005, 14006, 14135], [14005, 14135, 14134], [14006, 14007, 14135], [14007, 14136, 14135], [14007, 14008, 14137], [14007, 14137, 14136], [14008, 14009, 14137], [14009, 14138, 14137], [14009, 14010, 14139], [14009, 14139, 14138], [14010, 14011, 14139], [14011, 14140, 14139], [14011, 14012, 14141], [14011, 14141, 14140], [14012, 14013, 14141], [14013, 14142, 14141], [14013, 14014, 14143], [14013, 14143, 14142], [14014, 14015, 14143], [14015, 14144, 14143], [14015, 14016, 14145], [14015, 14145, 14144], [14016, 14017, 14145], [14017, 14146, 14145], [14017, 14018, 14147], [14017, 14147, 14146], [14018, 14019, 14147], [14019, 14148, 14147], [14019, 14020, 14149], [14019, 14149, 14148], [14020, 14021, 14149], [14021, 14150, 14149], [14021, 14022, 14151], [14021, 14151, 14150], [14022, 14023, 14151], [14023, 14152, 14151], [14023, 14024, 14153], [14023, 14153, 14152], [14024, 14025, 14153], [14025, 14154, 14153], [14025, 14026, 14155], [14025, 14155, 14154], [14026, 14027, 14155], [14027, 14156, 14155], [14027, 14028, 14157], [14027, 14157, 14156], [14028, 14029, 14157], [14029, 14158, 14157], [14029, 14030, 14159], [14029, 14159, 14158], [14030, 14031, 14159], [14031, 14160, 14159], [14031, 14032, 14161], [14031, 14161, 14160], [14032, 14033, 14161], [14033, 14162, 14161], [14033, 14034, 14163], [14033, 14163, 14162], [14034, 14035, 14163], [14035, 14164, 14163], [14035, 14036, 14165], [14035, 14165, 14164], [14036, 14037, 14165], [14037, 14166, 14165], [14037, 14038, 14167], [14037, 14167, 14166], [14038, 14039, 14167], [14039, 14168, 14167], [14039, 14040, 14169], [14039, 14169, 14168], [14040, 14041, 14169], [14041, 14170, 14169], [14041, 14042, 14171], [14041, 14171, 14170], [14042, 14043, 14171], [14043, 14172, 14171], [14043, 14044, 14173], [14043, 14173, 14172], [14044, 14045, 14173], [14045, 14174, 14173], [14045, 14046, 14175], [14045, 14175, 14174], [14046, 14047, 14175], [14047, 14176, 14175], [14047, 14048, 14177], [14047, 14177, 14176], [14048, 14049, 14177], [14049, 14178, 14177], [14049, 14050, 14179], [14049, 14179, 14178], [14050, 14051, 14179], [14051, 14180, 14179], [14051, 14052, 14181], [14051, 14181, 14180], [14052, 14053, 14181], [14053, 14182, 14181], [14053, 14054, 14183], [14053, 14183, 14182], [14054, 14055, 14183], [14055, 14184, 14183], [14055, 14056, 14185], [14055, 14185, 14184], [14056, 14057, 14185], [14057, 14186, 14185], [14057, 14058, 14187], [14057, 14187, 14186], [14058, 14059, 14187], [14059, 14188, 14187], [14059, 14060, 14189], [14059, 14189, 14188], [14060, 14061, 14189], [14061, 14190, 14189], [14061, 14062, 14191], [14061, 14191, 14190], [14062, 14063, 14191], [14063, 14192, 14191], [14063, 14064, 14193], [14063, 14193, 14192], [14064, 14065, 14193], [14065, 14194, 14193], [14065, 14066, 14195], [14065, 14195, 14194], [14066, 14067, 14195], [14067, 14196, 14195], [14067, 14068, 14197], [14067, 14197, 14196], [14068, 14069, 14197], [14069, 14198, 14197], [14069, 14070, 14199], [14069, 14199, 14198], [14070, 14071, 14199], [14071, 14200, 14199], [14071, 14072, 14201], [14071, 14201, 14200], [14072, 14073, 14201], [14073, 14202, 14201], [14073, 14074, 14203], [14073, 14203, 14202], [14074, 14075, 14203], [14075, 14204, 14203], [14075, 14076, 14205], [14075, 14205, 14204], [14076, 14077, 14205], [14077, 14206, 14205], [14077, 14078, 14207], [14077, 14207, 14206], [14078, 14079, 14207], [14079, 14208, 14207], [14079, 14080, 14209], [14079, 14209, 14208], [14080, 14081, 14209], [14081, 14210, 14209], [14081, 14082, 14211], [14081, 14211, 14210], [14082, 14083, 14211], [14083, 14212, 14211], [14083, 14084, 14213], [14083, 14213, 14212], [14084, 14085, 14213], [14085, 14214, 14213], [14085, 14086, 14215], [14085, 14215, 14214], [14086, 14087, 14215], [14087, 14216, 14215], [14087, 14088, 14217], [14087, 14217, 14216], [14088, 14089, 14217], [14089, 14218, 14217], [14089, 14090, 14219], [14089, 14219, 14218], [14090, 14091, 14219], [14091, 14220, 14219], [14091, 14092, 14221], [14091, 14221, 14220], [14092, 14093, 14221], [14093, 14222, 14221], [14093, 14094, 14223], [14093, 14223, 14222], [14094, 14095, 14223], [14095, 14224, 14223], [14095, 14096, 14225], [14095, 14225, 14224], [14096, 14097, 14225], [14097, 14226, 14225], [14097, 14098, 14227], [14097, 14227, 14226], [14098, 14099, 14227], [14099, 14228, 14227], [14099, 14100, 14229], [14099, 14229, 14228], [14100, 14101, 14229], [14101, 14230, 14229], [14101, 14102, 14231], [14101, 14231, 14230], [14102, 14103, 14231], [14103, 14232, 14231], [14103, 14104, 14233], [14103, 14233, 14232], [14104, 14105, 14233], [14105, 14234, 14233], [14105, 14106, 14235], [14105, 14235, 14234], [14107, 14108, 14237], [14107, 14237, 14236], [14108, 14109, 14237], [14109, 14238, 14237], [14109, 14110, 14239], [14109, 14239, 14238], [14110, 14111, 14239], [14111, 14240, 14239], [14111, 14112, 14241], [14111, 14241, 14240], [14112, 14113, 14241], [14113, 14242, 14241], [14113, 14114, 14243], [14113, 14243, 14242], [14114, 14115, 14243], [14115, 14244, 14243], [14115, 14116, 14245], [14115, 14245, 14244], [14116, 14117, 14245], [14117, 14246, 14245], [14117, 14118, 14247], [14117, 14247, 14246], [14118, 14119, 14247], [14119, 14248, 14247], [14119, 14120, 14249], [14119, 14249, 14248], [14120, 14121, 14249], [14121, 14250, 14249], [14121, 14122, 14251], [14121, 14251, 14250], [14122, 14123, 14251], [14123, 14252, 14251], [14123, 14124, 14253], [14123, 14253, 14252], [14124, 14125, 14253], [14125, 14254, 14253], [14125, 14126, 14255], [14125, 14255, 14254], [14126, 14127, 14255], [14127, 14256, 14255], [14127, 14128, 14257], [14127, 14257, 14256], [14128, 14129, 14257], [14129, 14258, 14257], [14129, 14130, 14259], [14129, 14259, 14258], [14130, 14131, 14259], [14131, 14260, 14259], [14131, 14132, 14261], [14131, 14261, 14260], [14132, 14133, 14261], [14133, 14262, 14261], [14133, 14134, 14263], [14133, 14263, 14262], [14134, 14135, 14263], [14135, 14264, 14263], [14135, 14136, 14265], [14135, 14265, 14264], [14136, 14137, 14265], [14137, 14266, 14265], [14137, 14138, 14267], [14137, 14267, 14266], [14138, 14139, 14267], [14139, 14268, 14267], [14139, 14140, 14269], [14139, 14269, 14268], [14140, 14141, 14269], [14141, 14270, 14269], [14141, 14142, 14271], [14141, 14271, 14270], [14142, 14143, 14271], [14143, 14272, 14271], [14143, 14144, 14273], [14143, 14273, 14272], [14144, 14145, 14273], [14145, 14274, 14273], [14145, 14146, 14275], [14145, 14275, 14274], [14146, 14147, 14275], [14147, 14276, 14275], [14147, 14148, 14277], [14147, 14277, 14276], [14148, 14149, 14277], [14149, 14278, 14277], [14149, 14150, 14279], [14149, 14279, 14278], [14150, 14151, 14279], [14151, 14280, 14279], [14151, 14152, 14281], [14151, 14281, 14280], [14152, 14153, 14281], [14153, 14282, 14281], [14153, 14154, 14283], [14153, 14283, 14282], [14154, 14155, 14283], [14155, 14284, 14283], [14155, 14156, 14285], [14155, 14285, 14284], [14156, 14157, 14285], [14157, 14286, 14285], [14157, 14158, 14287], [14157, 14287, 14286], [14158, 14159, 14287], [14159, 14288, 14287], [14159, 14160, 14289], [14159, 14289, 14288], [14160, 14161, 14289], [14161, 14290, 14289], [14161, 14162, 14291], [14161, 14291, 14290], [14162, 14163, 14291], [14163, 14292, 14291], [14163, 14164, 14293], [14163, 14293, 14292], [14164, 14165, 14293], [14165, 14294, 14293], [14165, 14166, 14295], [14165, 14295, 14294], [14166, 14167, 14295], [14167, 14296, 14295], [14167, 14168, 14297], [14167, 14297, 14296], [14168, 14169, 14297], [14169, 14298, 14297], [14169, 14170, 14299], [14169, 14299, 14298], [14170, 14171, 14299], [14171, 14300, 14299], [14171, 14172, 14301], [14171, 14301, 14300], [14172, 14173, 14301], [14173, 14302, 14301], [14173, 14174, 14303], [14173, 14303, 14302], [14174, 14175, 14303], [14175, 14304, 14303], [14175, 14176, 14305], [14175, 14305, 14304], [14176, 14177, 14305], [14177, 14306, 14305], [14177, 14178, 14307], [14177, 14307, 14306], [14178, 14179, 14307], [14179, 14308, 14307], [14179, 14180, 14309], [14179, 14309, 14308], [14180, 14181, 14309], [14181, 14310, 14309], [14181, 14182, 14311], [14181, 14311, 14310], [14182, 14183, 14311], [14183, 14312, 14311], [14183, 14184, 14313], [14183, 14313, 14312], [14184, 14185, 14313], [14185, 14314, 14313], [14185, 14186, 14315], [14185, 14315, 14314], [14186, 14187, 14315], [14187, 14316, 14315], [14187, 14188, 14317], [14187, 14317, 14316], [14188, 14189, 14317], [14189, 14318, 14317], [14189, 14190, 14319], [14189, 14319, 14318], [14190, 14191, 14319], [14191, 14320, 14319], [14191, 14192, 14321], [14191, 14321, 14320], [14192, 14193, 14321], [14193, 14322, 14321], [14193, 14194, 14323], [14193, 14323, 14322], [14194, 14195, 14323], [14195, 14324, 14323], [14195, 14196, 14325], [14195, 14325, 14324], [14196, 14197, 14325], [14197, 14326, 14325], [14197, 14198, 14327], [14197, 14327, 14326], [14198, 14199, 14327], [14199, 14328, 14327], [14199, 14200, 14329], [14199, 14329, 14328], [14200, 14201, 14329], [14201, 14330, 14329], [14201, 14202, 14331], [14201, 14331, 14330], [14202, 14203, 14331], [14203, 14332, 14331], [14203, 14204, 14333], [14203, 14333, 14332], [14204, 14205, 14333], [14205, 14334, 14333], [14205, 14206, 14335], [14205, 14335, 14334], [14206, 14207, 14335], [14207, 14336, 14335], [14207, 14208, 14337], [14207, 14337, 14336], [14208, 14209, 14337], [14209, 14338, 14337], [14209, 14210, 14339], [14209, 14339, 14338], [14210, 14211, 14339], [14211, 14340, 14339], [14211, 14212, 14341], [14211, 14341, 14340], [14212, 14213, 14341], [14213, 14342, 14341], [14213, 14214, 14343], [14213, 14343, 14342], [14214, 14215, 14343], [14215, 14344, 14343], [14215, 14216, 14345], [14215, 14345, 14344], [14216, 14217, 14345], [14217, 14346, 14345], [14217, 14218, 14347], [14217, 14347, 14346], [14218, 14219, 14347], [14219, 14348, 14347], [14219, 14220, 14349], [14219, 14349, 14348], [14220, 14221, 14349], [14221, 14350, 14349], [14221, 14222, 14351], [14221, 14351, 14350], [14222, 14223, 14351], [14223, 14352, 14351], [14223, 14224, 14353], [14223, 14353, 14352], [14224, 14225, 14353], [14225, 14354, 14353], [14225, 14226, 14355], [14225, 14355, 14354], [14226, 14227, 14355], [14227, 14356, 14355], [14227, 14228, 14357], [14227, 14357, 14356], [14228, 14229, 14357], [14229, 14358, 14357], [14229, 14230, 14359], [14229, 14359, 14358], [14230, 14231, 14359], [14231, 14360, 14359], [14231, 14232, 14361], [14231, 14361, 14360], [14232, 14233, 14361], [14233, 14362, 14361], [14233, 14234, 14363], [14233, 14363, 14362], [14234, 14235, 14363], [14235, 14364, 14363], [14236, 14237, 14365], [14237, 14366, 14365], [14237, 14238, 14367], [14237, 14367, 14366], [14238, 14239, 14367], [14239, 14368, 14367], [14239, 14240, 14369], [14239, 14369, 14368], [14240, 14241, 14369], [14241, 14370, 14369], [14241, 14242, 14371], [14241, 14371, 14370], [14242, 14243, 14371], [14243, 14372, 14371], [14243, 14244, 14373], [14243, 14373, 14372], [14244, 14245, 14373], [14245, 14374, 14373], [14245, 14246, 14375], [14245, 14375, 14374], [14246, 14247, 14375], [14247, 14376, 14375], [14247, 14248, 14377], [14247, 14377, 14376], [14248, 14249, 14377], [14249, 14378, 14377], [14249, 14250, 14379], [14249, 14379, 14378], [14250, 14251, 14379], [14251, 14380, 14379], [14251, 14252, 14381], [14251, 14381, 14380], [14252, 14253, 14381], [14253, 14382, 14381], [14253, 14254, 14383], [14253, 14383, 14382], [14254, 14255, 14383], [14255, 14384, 14383], [14255, 14256, 14385], [14255, 14385, 14384], [14256, 14257, 14385], [14257, 14386, 14385], [14257, 14258, 14387], [14257, 14387, 14386], [14258, 14259, 14387], [14259, 14388, 14387], [14259, 14260, 14389], [14259, 14389, 14388], [14260, 14261, 14389], [14261, 14390, 14389], [14261, 14262, 14391], [14261, 14391, 14390], [14262, 14263, 14391], [14263, 14392, 14391], [14263, 14264, 14393], [14263, 14393, 14392], [14264, 14265, 14393], [14265, 14394, 14393], [14265, 14266, 14395], [14265, 14395, 14394], [14266, 14267, 14395], [14267, 14396, 14395], [14267, 14268, 14397], [14267, 14397, 14396], [14268, 14269, 14397], [14269, 14398, 14397], [14269, 14270, 14399], [14269, 14399, 14398], [14270, 14271, 14399], [14271, 14400, 14399], [14271, 14272, 14401], [14271, 14401, 14400], [14272, 14273, 14401], [14273, 14402, 14401], [14273, 14274, 14403], [14273, 14403, 14402], [14274, 14275, 14403], [14275, 14404, 14403], [14275, 14276, 14405], [14275, 14405, 14404], [14276, 14277, 14405], [14277, 14406, 14405], [14277, 14278, 14407], [14277, 14407, 14406], [14278, 14279, 14407], [14279, 14408, 14407], [14279, 14280, 14409], [14279, 14409, 14408], [14280, 14281, 14409], [14281, 14410, 14409], [14281, 14282, 14411], [14281, 14411, 14410], [14282, 14283, 14411], [14283, 14412, 14411], [14283, 14284, 14413], [14283, 14413, 14412], [14284, 14285, 14413], [14285, 14414, 14413], [14285, 14286, 14415], [14285, 14415, 14414], [14286, 14287, 14415], [14287, 14416, 14415], [14287, 14288, 14417], [14287, 14417, 14416], [14288, 14289, 14417], [14289, 14418, 14417], [14289, 14290, 14419], [14289, 14419, 14418], [14290, 14291, 14419], [14291, 14420, 14419], [14291, 14292, 14421], [14291, 14421, 14420], [14292, 14293, 14421], [14293, 14422, 14421], [14293, 14294, 14423], [14293, 14423, 14422], [14294, 14295, 14423], [14295, 14424, 14423], [14295, 14296, 14425], [14295, 14425, 14424], [14296, 14297, 14425], [14297, 14426, 14425], [14297, 14298, 14427], [14297, 14427, 14426], [14298, 14299, 14427], [14299, 14428, 14427], [14299, 14300, 14429], [14299, 14429, 14428], [14300, 14301, 14429], [14301, 14430, 14429], [14301, 14302, 14431], [14301, 14431, 14430], [14302, 14303, 14431], [14303, 14432, 14431], [14303, 14304, 14433], [14303, 14433, 14432], [14304, 14305, 14433], [14305, 14434, 14433], [14305, 14306, 14435], [14305, 14435, 14434], [14306, 14307, 14435], [14307, 14436, 14435], [14307, 14308, 14437], [14307, 14437, 14436], [14308, 14309, 14437], [14309, 14438, 14437], [14309, 14310, 14439], [14309, 14439, 14438], [14310, 14311, 14439], [14311, 14440, 14439], [14311, 14312, 14441], [14311, 14441, 14440], [14312, 14313, 14441], [14313, 14442, 14441], [14313, 14314, 14443], [14313, 14443, 14442], [14314, 14315, 14443], [14315, 14444, 14443], [14315, 14316, 14445], [14315, 14445, 14444], [14316, 14317, 14445], [14317, 14446, 14445], [14317, 14318, 14447], [14317, 14447, 14446], [14318, 14319, 14447], [14319, 14448, 14447], [14319, 14320, 14449], [14319, 14449, 14448], [14320, 14321, 14449], [14321, 14450, 14449], [14321, 14322, 14451], [14321, 14451, 14450], [14322, 14323, 14451], [14323, 14452, 14451], [14323, 14324, 14453], [14323, 14453, 14452], [14324, 14325, 14453], [14325, 14454, 14453], [14325, 14326, 14455], [14325, 14455, 14454], [14326, 14327, 14455], [14327, 14456, 14455], [14327, 14328, 14457], [14327, 14457, 14456], [14328, 14329, 14457], [14329, 14458, 14457], [14329, 14330, 14459], [14329, 14459, 14458], [14330, 14331, 14459], [14331, 14460, 14459], [14331, 14332, 14461], [14331, 14461, 14460], [14332, 14333, 14461], [14333, 14462, 14461], [14333, 14334, 14463], [14333, 14463, 14462], [14334, 14335, 14463], [14335, 14464, 14463], [14335, 14336, 14465], [14335, 14465, 14464], [14336, 14337, 14465], [14337, 14466, 14465], [14337, 14338, 14467], [14337, 14467, 14466], [14338, 14339, 14467], [14339, 14468, 14467], [14339, 14340, 14469], [14339, 14469, 14468], [14340, 14341, 14469], [14341, 14470, 14469], [14341, 14342, 14471], [14341, 14471, 14470], [14342, 14343, 14471], [14343, 14472, 14471], [14343, 14344, 14473], [14343, 14473, 14472], [14344, 14345, 14473], [14345, 14474, 14473], [14345, 14346, 14475], [14345, 14475, 14474], [14346, 14347, 14475], [14347, 14476, 14475], [14347, 14348, 14477], [14347, 14477, 14476], [14348, 14349, 14477], [14349, 14478, 14477], [14349, 14350, 14479], [14349, 14479, 14478], [14350, 14351, 14479], [14351, 14480, 14479], [14351, 14352, 14481], [14351, 14481, 14480], [14352, 14353, 14481], [14353, 14482, 14481], [14353, 14354, 14483], [14353, 14483, 14482], [14354, 14355, 14483], [14355, 14484, 14483], [14355, 14356, 14485], [14355, 14485, 14484], [14356, 14357, 14485], [14357, 14486, 14485], [14357, 14358, 14487], [14357, 14487, 14486], [14358, 14359, 14487], [14359, 14488, 14487], [14359, 14360, 14489], [14359, 14489, 14488], [14360, 14361, 14489], [14361, 14490, 14489], [14361, 14362, 14491], [14361, 14491, 14490], [14362, 14363, 14491], [14363, 14492, 14491], [14363, 14364, 14493], [14363, 14493, 14492], [14365, 14366, 14495], [14365, 14495, 14494], [14366, 14367, 14495], [14367, 14496, 14495], [14367, 14368, 14497], [14367, 14497, 14496], [14368, 14369, 14497], [14369, 14498, 14497], [14369, 14370, 14499], [14369, 14499, 14498], [14370, 14371, 14499], [14371, 14500, 14499], [14371, 14372, 14501], [14371, 14501, 14500], [14372, 14373, 14501], [14373, 14502, 14501], [14373, 14374, 14503], [14373, 14503, 14502], [14374, 14375, 14503], [14375, 14504, 14503], [14375, 14376, 14505], [14375, 14505, 14504], [14376, 14377, 14505], [14377, 14506, 14505], [14377, 14378, 14507], [14377, 14507, 14506], [14378, 14379, 14507], [14379, 14508, 14507], [14379, 14380, 14509], [14379, 14509, 14508], [14380, 14381, 14509], [14381, 14510, 14509], [14381, 14382, 14511], [14381, 14511, 14510], [14382, 14383, 14511], [14383, 14512, 14511], [14383, 14384, 14513], [14383, 14513, 14512], [14384, 14385, 14513], [14385, 14514, 14513], [14385, 14386, 14515], [14385, 14515, 14514], [14386, 14387, 14515], [14387, 14516, 14515], [14387, 14388, 14517], [14387, 14517, 14516], [14388, 14389, 14517], [14389, 14518, 14517], [14389, 14390, 14519], [14389, 14519, 14518], [14390, 14391, 14519], [14391, 14520, 14519], [14391, 14392, 14521], [14391, 14521, 14520], [14392, 14393, 14521], [14393, 14522, 14521], [14393, 14394, 14523], [14393, 14523, 14522], [14394, 14395, 14523], [14395, 14524, 14523], [14395, 14396, 14525], [14395, 14525, 14524], [14396, 14397, 14525], [14397, 14526, 14525], [14397, 14398, 14527], [14397, 14527, 14526], [14398, 14399, 14527], [14399, 14528, 14527], [14399, 14400, 14529], [14399, 14529, 14528], [14400, 14401, 14529], [14401, 14530, 14529], [14401, 14402, 14531], [14401, 14531, 14530], [14402, 14403, 14531], [14403, 14532, 14531], [14403, 14404, 14533], [14403, 14533, 14532], [14404, 14405, 14533], [14405, 14534, 14533], [14405, 14406, 14535], [14405, 14535, 14534], [14406, 14407, 14535], [14407, 14536, 14535], [14407, 14408, 14537], [14407, 14537, 14536], [14408, 14409, 14537], [14409, 14538, 14537], [14409, 14410, 14539], [14409, 14539, 14538], [14410, 14411, 14539], [14411, 14540, 14539], [14411, 14412, 14541], [14411, 14541, 14540], [14412, 14413, 14541], [14413, 14542, 14541], [14413, 14414, 14543], [14413, 14543, 14542], [14414, 14415, 14543], [14415, 14544, 14543], [14415, 14416, 14545], [14415, 14545, 14544], [14416, 14417, 14545], [14417, 14546, 14545], [14417, 14418, 14547], [14417, 14547, 14546], [14418, 14419, 14547], [14419, 14548, 14547], [14419, 14420, 14549], [14419, 14549, 14548], [14420, 14421, 14549], [14421, 14550, 14549], [14421, 14422, 14551], [14421, 14551, 14550], [14422, 14423, 14551], [14423, 14552, 14551], [14423, 14424, 14553], [14423, 14553, 14552], [14424, 14425, 14553], [14425, 14554, 14553], [14425, 14426, 14555], [14425, 14555, 14554], [14426, 14427, 14555], [14427, 14556, 14555], [14427, 14428, 14557], [14427, 14557, 14556], [14428, 14429, 14557], [14429, 14558, 14557], [14429, 14430, 14559], [14429, 14559, 14558], [14430, 14431, 14559], [14431, 14560, 14559], [14431, 14432, 14561], [14431, 14561, 14560], [14432, 14433, 14561], [14433, 14562, 14561], [14433, 14434, 14563], [14433, 14563, 14562], [14434, 14435, 14563], [14435, 14564, 14563], [14435, 14436, 14565], [14435, 14565, 14564], [14436, 14437, 14565], [14437, 14566, 14565], [14437, 14438, 14567], [14437, 14567, 14566], [14438, 14439, 14567], [14439, 14568, 14567], [14439, 14440, 14569], [14439, 14569, 14568], [14440, 14441, 14569], [14441, 14570, 14569], [14441, 14442, 14571], [14441, 14571, 14570], [14442, 14443, 14571], [14443, 14572, 14571], [14443, 14444, 14573], [14443, 14573, 14572], [14444, 14445, 14573], [14445, 14574, 14573], [14445, 14446, 14575], [14445, 14575, 14574], [14446, 14447, 14575], [14447, 14576, 14575], [14447, 14448, 14577], [14447, 14577, 14576], [14448, 14449, 14577], [14449, 14578, 14577], [14449, 14450, 14579], [14449, 14579, 14578], [14450, 14451, 14579], [14451, 14580, 14579], [14451, 14452, 14581], [14451, 14581, 14580], [14452, 14453, 14581], [14453, 14582, 14581], [14453, 14454, 14583], [14453, 14583, 14582], [14454, 14455, 14583], [14455, 14584, 14583], [14455, 14456, 14585], [14455, 14585, 14584], [14456, 14457, 14585], [14457, 14586, 14585], [14457, 14458, 14587], [14457, 14587, 14586], [14458, 14459, 14587], [14459, 14588, 14587], [14459, 14460, 14589], [14459, 14589, 14588], [14460, 14461, 14589], [14461, 14590, 14589], [14461, 14462, 14591], [14461, 14591, 14590], [14462, 14463, 14591], [14463, 14592, 14591], [14463, 14464, 14593], [14463, 14593, 14592], [14464, 14465, 14593], [14465, 14594, 14593], [14465, 14466, 14595], [14465, 14595, 14594], [14466, 14467, 14595], [14467, 14596, 14595], [14467, 14468, 14597], [14467, 14597, 14596], [14468, 14469, 14597], [14469, 14598, 14597], [14469, 14470, 14599], [14469, 14599, 14598], [14470, 14471, 14599], [14471, 14600, 14599], [14471, 14472, 14601], [14471, 14601, 14600], [14472, 14473, 14601], [14473, 14602, 14601], [14473, 14474, 14603], [14473, 14603, 14602], [14474, 14475, 14603], [14475, 14604, 14603], [14475, 14476, 14605], [14475, 14605, 14604], [14476, 14477, 14605], [14477, 14606, 14605], [14477, 14478, 14607], [14477, 14607, 14606], [14478, 14479, 14607], [14479, 14608, 14607], [14479, 14480, 14609], [14479, 14609, 14608], [14480, 14481, 14609], [14481, 14610, 14609], [14481, 14482, 14611], [14481, 14611, 14610], [14482, 14483, 14611], [14483, 14612, 14611], [14483, 14484, 14613], [14483, 14613, 14612], [14484, 14485, 14613], [14485, 14614, 14613], [14485, 14486, 14615], [14485, 14615, 14614], [14486, 14487, 14615], [14487, 14616, 14615], [14487, 14488, 14617], [14487, 14617, 14616], [14488, 14489, 14617], [14489, 14618, 14617], [14489, 14490, 14619], [14489, 14619, 14618], [14490, 14491, 14619], [14491, 14620, 14619], [14491, 14492, 14621], [14491, 14621, 14620], [14492, 14493, 14621], [14493, 14622, 14621], [14494, 14495, 14623], [14495, 14624, 14623], [14495, 14496, 14625], [14495, 14625, 14624], [14496, 14497, 14625], [14497, 14626, 14625], [14497, 14498, 14627], [14497, 14627, 14626], [14498, 14499, 14627], [14499, 14628, 14627], [14499, 14500, 14629], [14499, 14629, 14628], [14500, 14501, 14629], [14501, 14630, 14629], [14501, 14502, 14631], [14501, 14631, 14630], [14502, 14503, 14631], [14503, 14632, 14631], [14503, 14504, 14633], [14503, 14633, 14632], [14504, 14505, 14633], [14505, 14634, 14633], [14505, 14506, 14635], [14505, 14635, 14634], [14506, 14507, 14635], [14507, 14636, 14635], [14507, 14508, 14637], [14507, 14637, 14636], [14508, 14509, 14637], [14509, 14638, 14637], [14509, 14510, 14639], [14509, 14639, 14638], [14510, 14511, 14639], [14511, 14640, 14639], [14511, 14512, 14641], [14511, 14641, 14640], [14512, 14513, 14641], [14513, 14642, 14641], [14513, 14514, 14643], [14513, 14643, 14642], [14514, 14515, 14643], [14515, 14644, 14643], [14515, 14516, 14645], [14515, 14645, 14644], [14516, 14517, 14645], [14517, 14646, 14645], [14517, 14518, 14647], [14517, 14647, 14646], [14518, 14519, 14647], [14519, 14648, 14647], [14519, 14520, 14649], [14519, 14649, 14648], [14520, 14521, 14649], [14521, 14650, 14649], [14521, 14522, 14651], [14521, 14651, 14650], [14522, 14523, 14651], [14523, 14652, 14651], [14523, 14524, 14653], [14523, 14653, 14652], [14524, 14525, 14653], [14525, 14654, 14653], [14525, 14526, 14655], [14525, 14655, 14654], [14526, 14527, 14655], [14527, 14656, 14655], [14527, 14528, 14657], [14527, 14657, 14656], [14528, 14529, 14657], [14529, 14658, 14657], [14529, 14530, 14659], [14529, 14659, 14658], [14530, 14531, 14659], [14531, 14660, 14659], [14531, 14532, 14661], [14531, 14661, 14660], [14532, 14533, 14661], [14533, 14662, 14661], [14533, 14534, 14663], [14533, 14663, 14662], [14534, 14535, 14663], [14535, 14664, 14663], [14535, 14536, 14665], [14535, 14665, 14664], [14536, 14537, 14665], [14537, 14666, 14665], [14537, 14538, 14667], [14537, 14667, 14666], [14538, 14539, 14667], [14539, 14668, 14667], [14539, 14540, 14669], [14539, 14669, 14668], [14540, 14541, 14669], [14541, 14670, 14669], [14541, 14542, 14671], [14541, 14671, 14670], [14542, 14543, 14671], [14543, 14672, 14671], [14543, 14544, 14673], [14543, 14673, 14672], [14544, 14545, 14673], [14545, 14674, 14673], [14545, 14546, 14675], [14545, 14675, 14674], [14546, 14547, 14675], [14547, 14676, 14675], [14547, 14548, 14677], [14547, 14677, 14676], [14548, 14549, 14677], [14549, 14678, 14677], [14549, 14550, 14679], [14549, 14679, 14678], [14550, 14551, 14679], [14551, 14680, 14679], [14551, 14552, 14681], [14551, 14681, 14680], [14552, 14553, 14681], [14553, 14682, 14681], [14553, 14554, 14683], [14553, 14683, 14682], [14554, 14555, 14683], [14555, 14684, 14683], [14555, 14556, 14685], [14555, 14685, 14684], [14556, 14557, 14685], [14557, 14686, 14685], [14557, 14558, 14687], [14557, 14687, 14686], [14558, 14559, 14687], [14559, 14688, 14687], [14559, 14560, 14689], [14559, 14689, 14688], [14560, 14561, 14689], [14561, 14690, 14689], [14561, 14562, 14691], [14561, 14691, 14690], [14562, 14563, 14691], [14563, 14692, 14691], [14563, 14564, 14693], [14563, 14693, 14692], [14564, 14565, 14693], [14565, 14694, 14693], [14565, 14566, 14695], [14565, 14695, 14694], [14566, 14567, 14695], [14567, 14696, 14695], [14567, 14568, 14697], [14567, 14697, 14696], [14568, 14569, 14697], [14569, 14698, 14697], [14569, 14570, 14699], [14569, 14699, 14698], [14570, 14571, 14699], [14571, 14700, 14699], [14571, 14572, 14701], [14571, 14701, 14700], [14572, 14573, 14701], [14573, 14702, 14701], [14573, 14574, 14703], [14573, 14703, 14702], [14574, 14575, 14703], [14575, 14704, 14703], [14575, 14576, 14705], [14575, 14705, 14704], [14576, 14577, 14705], [14577, 14706, 14705], [14577, 14578, 14707], [14577, 14707, 14706], [14578, 14579, 14707], [14579, 14708, 14707], [14579, 14580, 14709], [14579, 14709, 14708], [14580, 14581, 14709], [14581, 14710, 14709], [14581, 14582, 14711], [14581, 14711, 14710], [14582, 14583, 14711], [14583, 14712, 14711], [14583, 14584, 14713], [14583, 14713, 14712], [14584, 14585, 14713], [14585, 14714, 14713], [14585, 14586, 14715], [14585, 14715, 14714], [14586, 14587, 14715], [14587, 14716, 14715], [14587, 14588, 14717], [14587, 14717, 14716], [14588, 14589, 14717], [14589, 14718, 14717], [14589, 14590, 14719], [14589, 14719, 14718], [14590, 14591, 14719], [14591, 14720, 14719], [14591, 14592, 14721], [14591, 14721, 14720], [14592, 14593, 14721], [14593, 14722, 14721], [14593, 14594, 14723], [14593, 14723, 14722], [14594, 14595, 14723], [14595, 14724, 14723], [14595, 14596, 14725], [14595, 14725, 14724], [14596, 14597, 14725], [14597, 14726, 14725], [14597, 14598, 14727], [14597, 14727, 14726], [14598, 14599, 14727], [14599, 14728, 14727], [14599, 14600, 14729], [14599, 14729, 14728], [14600, 14601, 14729], [14601, 14730, 14729], [14601, 14602, 14731], [14601, 14731, 14730], [14602, 14603, 14731], [14603, 14732, 14731], [14603, 14604, 14733], [14603, 14733, 14732], [14604, 14605, 14733], [14605, 14734, 14733], [14605, 14606, 14735], [14605, 14735, 14734], [14606, 14607, 14735], [14607, 14736, 14735], [14607, 14608, 14737], [14607, 14737, 14736], [14608, 14609, 14737], [14609, 14738, 14737], [14609, 14610, 14739], [14609, 14739, 14738], [14610, 14611, 14739], [14611, 14740, 14739], [14611, 14612, 14741], [14611, 14741, 14740], [14612, 14613, 14741], [14613, 14742, 14741], [14613, 14614, 14743], [14613, 14743, 14742], [14614, 14615, 14743], [14615, 14744, 14743], [14615, 14616, 14745], [14615, 14745, 14744], [14616, 14617, 14745], [14617, 14746, 14745], [14617, 14618, 14747], [14617, 14747, 14746], [14618, 14619, 14747], [14619, 14748, 14747], [14619, 14620, 14749], [14619, 14749, 14748], [14620, 14621, 14749], [14621, 14750, 14749], [14621, 14622, 14751], [14621, 14751, 14750], [14623, 14624, 14753], [14623, 14753, 14752], [14624, 14625, 14753], [14625, 14754, 14753], [14625, 14626, 14755], [14625, 14755, 14754], [14626, 14627, 14755], [14627, 14756, 14755], [14627, 14628, 14757], [14627, 14757, 14756], [14628, 14629, 14757], [14629, 14758, 14757], [14629, 14630, 14759], [14629, 14759, 14758], [14630, 14631, 14759], [14631, 14760, 14759], [14631, 14632, 14761], [14631, 14761, 14760], [14632, 14633, 14761], [14633, 14762, 14761], [14633, 14634, 14763], [14633, 14763, 14762], [14634, 14635, 14763], [14635, 14764, 14763], [14635, 14636, 14765], [14635, 14765, 14764], [14636, 14637, 14765], [14637, 14766, 14765], [14637, 14638, 14767], [14637, 14767, 14766], [14638, 14639, 14767], [14639, 14768, 14767], [14639, 14640, 14769], [14639, 14769, 14768], [14640, 14641, 14769], [14641, 14770, 14769], [14641, 14642, 14771], [14641, 14771, 14770], [14642, 14643, 14771], [14643, 14772, 14771], [14643, 14644, 14773], [14643, 14773, 14772], [14644, 14645, 14773], [14645, 14774, 14773], [14645, 14646, 14775], [14645, 14775, 14774], [14646, 14647, 14775], [14647, 14776, 14775], [14647, 14648, 14777], [14647, 14777, 14776], [14648, 14649, 14777], [14649, 14778, 14777], [14649, 14650, 14779], [14649, 14779, 14778], [14650, 14651, 14779], [14651, 14780, 14779], [14651, 14652, 14781], [14651, 14781, 14780], [14652, 14653, 14781], [14653, 14782, 14781], [14653, 14654, 14783], [14653, 14783, 14782], [14654, 14655, 14783], [14655, 14784, 14783], [14655, 14656, 14785], [14655, 14785, 14784], [14656, 14657, 14785], [14657, 14786, 14785], [14657, 14658, 14787], [14657, 14787, 14786], [14658, 14659, 14787], [14659, 14788, 14787], [14659, 14660, 14789], [14659, 14789, 14788], [14660, 14661, 14789], [14661, 14790, 14789], [14661, 14662, 14791], [14661, 14791, 14790], [14662, 14663, 14791], [14663, 14792, 14791], [14663, 14664, 14793], [14663, 14793, 14792], [14664, 14665, 14793], [14665, 14794, 14793], [14665, 14666, 14795], [14665, 14795, 14794], [14666, 14667, 14795], [14667, 14796, 14795], [14667, 14668, 14797], [14667, 14797, 14796], [14668, 14669, 14797], [14669, 14798, 14797], [14669, 14670, 14799], [14669, 14799, 14798], [14670, 14671, 14799], [14671, 14800, 14799], [14671, 14672, 14801], [14671, 14801, 14800], [14672, 14673, 14801], [14673, 14802, 14801], [14673, 14674, 14803], [14673, 14803, 14802], [14674, 14675, 14803], [14675, 14804, 14803], [14675, 14676, 14805], [14675, 14805, 14804], [14676, 14677, 14805], [14677, 14806, 14805], [14677, 14678, 14807], [14677, 14807, 14806], [14678, 14679, 14807], [14679, 14808, 14807], [14679, 14680, 14809], [14679, 14809, 14808], [14680, 14681, 14809], [14681, 14810, 14809], [14681, 14682, 14811], [14681, 14811, 14810], [14682, 14683, 14811], [14683, 14812, 14811], [14683, 14684, 14813], [14683, 14813, 14812], [14684, 14685, 14813], [14685, 14814, 14813], [14685, 14686, 14815], [14685, 14815, 14814], [14686, 14687, 14815], [14687, 14816, 14815], [14687, 14688, 14817], [14687, 14817, 14816], [14688, 14689, 14817], [14689, 14818, 14817], [14689, 14690, 14819], [14689, 14819, 14818], [14690, 14691, 14819], [14691, 14820, 14819], [14691, 14692, 14821], [14691, 14821, 14820], [14692, 14693, 14821], [14693, 14822, 14821], [14693, 14694, 14823], [14693, 14823, 14822], [14694, 14695, 14823], [14695, 14824, 14823], [14695, 14696, 14825], [14695, 14825, 14824], [14696, 14697, 14825], [14697, 14826, 14825], [14697, 14698, 14827], [14697, 14827, 14826], [14698, 14699, 14827], [14699, 14828, 14827], [14699, 14700, 14829], [14699, 14829, 14828], [14700, 14701, 14829], [14701, 14830, 14829], [14701, 14702, 14831], [14701, 14831, 14830], [14702, 14703, 14831], [14703, 14832, 14831], [14703, 14704, 14833], [14703, 14833, 14832], [14704, 14705, 14833], [14705, 14834, 14833], [14705, 14706, 14835], [14705, 14835, 14834], [14706, 14707, 14835], [14707, 14836, 14835], [14707, 14708, 14837], [14707, 14837, 14836], [14708, 14709, 14837], [14709, 14838, 14837], [14709, 14710, 14839], [14709, 14839, 14838], [14710, 14711, 14839], [14711, 14840, 14839], [14711, 14712, 14841], [14711, 14841, 14840], [14712, 14713, 14841], [14713, 14842, 14841], [14713, 14714, 14843], [14713, 14843, 14842], [14714, 14715, 14843], [14715, 14844, 14843], [14715, 14716, 14845], [14715, 14845, 14844], [14716, 14717, 14845], [14717, 14846, 14845], [14717, 14718, 14847], [14717, 14847, 14846], [14718, 14719, 14847], [14719, 14848, 14847], [14719, 14720, 14849], [14719, 14849, 14848], [14720, 14721, 14849], [14721, 14850, 14849], [14721, 14722, 14851], [14721, 14851, 14850], [14722, 14723, 14851], [14723, 14852, 14851], [14723, 14724, 14853], [14723, 14853, 14852], [14724, 14725, 14853], [14725, 14854, 14853], [14725, 14726, 14855], [14725, 14855, 14854], [14726, 14727, 14855], [14727, 14856, 14855], [14727, 14728, 14857], [14727, 14857, 14856], [14728, 14729, 14857], [14729, 14858, 14857], [14729, 14730, 14859], [14729, 14859, 14858], [14730, 14731, 14859], [14731, 14860, 14859], [14731, 14732, 14861], [14731, 14861, 14860], [14732, 14733, 14861], [14733, 14862, 14861], [14733, 14734, 14863], [14733, 14863, 14862], [14734, 14735, 14863], [14735, 14864, 14863], [14735, 14736, 14865], [14735, 14865, 14864], [14736, 14737, 14865], [14737, 14866, 14865], [14737, 14738, 14867], [14737, 14867, 14866], [14738, 14739, 14867], [14739, 14868, 14867], [14739, 14740, 14869], [14739, 14869, 14868], [14740, 14741, 14869], [14741, 14870, 14869], [14741, 14742, 14871], [14741, 14871, 14870], [14742, 14743, 14871], [14743, 14872, 14871], [14743, 14744, 14873], [14743, 14873, 14872], [14744, 14745, 14873], [14745, 14874, 14873], [14745, 14746, 14875], [14745, 14875, 14874], [14746, 14747, 14875], [14747, 14876, 14875], [14747, 14748, 14877], [14747, 14877, 14876], [14748, 14749, 14877], [14749, 14878, 14877], [14749, 14750, 14879], [14749, 14879, 14878], [14750, 14751, 14879], [14751, 14880, 14879], [14752, 14753, 14881], [14753, 14882, 14881], [14753, 14754, 14883], [14753, 14883, 14882], [14754, 14755, 14883], [14755, 14884, 14883], [14755, 14756, 14885], [14755, 14885, 14884], [14756, 14757, 14885], [14757, 14886, 14885], [14757, 14758, 14887], [14757, 14887, 14886], [14758, 14759, 14887], [14759, 14888, 14887], [14759, 14760, 14889], [14759, 14889, 14888], [14760, 14761, 14889], [14761, 14890, 14889], [14761, 14762, 14891], [14761, 14891, 14890], [14762, 14763, 14891], [14763, 14892, 14891], [14763, 14764, 14893], [14763, 14893, 14892], [14764, 14765, 14893], [14765, 14894, 14893], [14765, 14766, 14895], [14765, 14895, 14894], [14766, 14767, 14895], [14767, 14896, 14895], [14767, 14768, 14897], [14767, 14897, 14896], [14768, 14769, 14897], [14769, 14898, 14897], [14769, 14770, 14899], [14769, 14899, 14898], [14770, 14771, 14899], [14771, 14900, 14899], [14771, 14772, 14901], [14771, 14901, 14900], [14772, 14773, 14901], [14773, 14902, 14901], [14773, 14774, 14903], [14773, 14903, 14902], [14774, 14775, 14903], [14775, 14904, 14903], [14775, 14776, 14905], [14775, 14905, 14904], [14776, 14777, 14905], [14777, 14906, 14905], [14777, 14778, 14907], [14777, 14907, 14906], [14778, 14779, 14907], [14779, 14908, 14907], [14779, 14780, 14909], [14779, 14909, 14908], [14780, 14781, 14909], [14781, 14910, 14909], [14781, 14782, 14911], [14781, 14911, 14910], [14782, 14783, 14911], [14783, 14912, 14911], [14783, 14784, 14913], [14783, 14913, 14912], [14784, 14785, 14913], [14785, 14914, 14913], [14785, 14786, 14915], [14785, 14915, 14914], [14786, 14787, 14915], [14787, 14916, 14915], [14787, 14788, 14917], [14787, 14917, 14916], [14788, 14789, 14917], [14789, 14918, 14917], [14789, 14790, 14919], [14789, 14919, 14918], [14790, 14791, 14919], [14791, 14920, 14919], [14791, 14792, 14921], [14791, 14921, 14920], [14792, 14793, 14921], [14793, 14922, 14921], [14793, 14794, 14923], [14793, 14923, 14922], [14794, 14795, 14923], [14795, 14924, 14923], [14795, 14796, 14925], [14795, 14925, 14924], [14796, 14797, 14925], [14797, 14926, 14925], [14797, 14798, 14927], [14797, 14927, 14926], [14798, 14799, 14927], [14799, 14928, 14927], [14799, 14800, 14929], [14799, 14929, 14928], [14800, 14801, 14929], [14801, 14930, 14929], [14801, 14802, 14931], [14801, 14931, 14930], [14802, 14803, 14931], [14803, 14932, 14931], [14803, 14804, 14933], [14803, 14933, 14932], [14804, 14805, 14933], [14805, 14934, 14933], [14805, 14806, 14935], [14805, 14935, 14934], [14806, 14807, 14935], [14807, 14936, 14935], [14807, 14808, 14937], [14807, 14937, 14936], [14808, 14809, 14937], [14809, 14938, 14937], [14809, 14810, 14939], [14809, 14939, 14938], [14810, 14811, 14939], [14811, 14940, 14939], [14811, 14812, 14941], [14811, 14941, 14940], [14812, 14813, 14941], [14813, 14942, 14941], [14813, 14814, 14943], [14813, 14943, 14942], [14814, 14815, 14943], [14815, 14944, 14943], [14815, 14816, 14945], [14815, 14945, 14944], [14816, 14817, 14945], [14817, 14946, 14945], [14817, 14818, 14947], [14817, 14947, 14946], [14818, 14819, 14947], [14819, 14948, 14947], [14819, 14820, 14949], [14819, 14949, 14948], [14820, 14821, 14949], [14821, 14950, 14949], [14821, 14822, 14951], [14821, 14951, 14950], [14822, 14823, 14951], [14823, 14952, 14951], [14823, 14824, 14953], [14823, 14953, 14952], [14824, 14825, 14953], [14825, 14954, 14953], [14825, 14826, 14955], [14825, 14955, 14954], [14826, 14827, 14955], [14827, 14956, 14955], [14827, 14828, 14957], [14827, 14957, 14956], [14828, 14829, 14957], [14829, 14958, 14957], [14829, 14830, 14959], [14829, 14959, 14958], [14830, 14831, 14959], [14831, 14960, 14959], [14831, 14832, 14961], [14831, 14961, 14960], [14832, 14833, 14961], [14833, 14962, 14961], [14833, 14834, 14963], [14833, 14963, 14962], [14834, 14835, 14963], [14835, 14964, 14963], [14835, 14836, 14965], [14835, 14965, 14964], [14836, 14837, 14965], [14837, 14966, 14965], [14837, 14838, 14967], [14837, 14967, 14966], [14838, 14839, 14967], [14839, 14968, 14967], [14839, 14840, 14969], [14839, 14969, 14968], [14840, 14841, 14969], [14841, 14970, 14969], [14841, 14842, 14971], [14841, 14971, 14970], [14842, 14843, 14971], [14843, 14972, 14971], [14843, 14844, 14973], [14843, 14973, 14972], [14844, 14845, 14973], [14845, 14974, 14973], [14845, 14846, 14975], [14845, 14975, 14974], [14846, 14847, 14975], [14847, 14976, 14975], [14847, 14848, 14977], [14847, 14977, 14976], [14848, 14849, 14977], [14849, 14978, 14977], [14849, 14850, 14979], [14849, 14979, 14978], [14850, 14851, 14979], [14851, 14980, 14979], [14851, 14852, 14981], [14851, 14981, 14980], [14852, 14853, 14981], [14853, 14982, 14981], [14853, 14854, 14983], [14853, 14983, 14982], [14854, 14855, 14983], [14855, 14984, 14983], [14855, 14856, 14985], [14855, 14985, 14984], [14856, 14857, 14985], [14857, 14986, 14985], [14857, 14858, 14987], [14857, 14987, 14986], [14858, 14859, 14987], [14859, 14988, 14987], [14859, 14860, 14989], [14859, 14989, 14988], [14860, 14861, 14989], [14861, 14990, 14989], [14861, 14862, 14991], [14861, 14991, 14990], [14862, 14863, 14991], [14863, 14992, 14991], [14863, 14864, 14993], [14863, 14993, 14992], [14864, 14865, 14993], [14865, 14994, 14993], [14865, 14866, 14995], [14865, 14995, 14994], [14866, 14867, 14995], [14867, 14996, 14995], [14867, 14868, 14997], [14867, 14997, 14996], [14868, 14869, 14997], [14869, 14998, 14997], [14869, 14870, 14999], [14869, 14999, 14998], [14870, 14871, 14999], [14871, 15000, 14999], [14871, 14872, 15001], [14871, 15001, 15000], [14872, 14873, 15001], [14873, 15002, 15001], [14873, 14874, 15003], [14873, 15003, 15002], [14874, 14875, 15003], [14875, 15004, 15003], [14875, 14876, 15005], [14875, 15005, 15004], [14876, 14877, 15005], [14877, 15006, 15005], [14877, 14878, 15007], [14877, 15007, 15006], [14878, 14879, 15007], [14879, 15008, 15007], [14879, 14880, 15009], [14879, 15009, 15008], [14881, 14882, 15011], [14881, 15011, 15010], [14882, 14883, 15011], [14883, 15012, 15011], [14883, 14884, 15013], [14883, 15013, 15012], [14884, 14885, 15013], [14885, 15014, 15013], [14885, 14886, 15015], [14885, 15015, 15014], [14886, 14887, 15015], [14887, 15016, 15015], [14887, 14888, 15017], [14887, 15017, 15016], [14888, 14889, 15017], [14889, 15018, 15017], [14889, 14890, 15019], [14889, 15019, 15018], [14890, 14891, 15019], [14891, 15020, 15019], [14891, 14892, 15021], [14891, 15021, 15020], [14892, 14893, 15021], [14893, 15022, 15021], [14893, 14894, 15023], [14893, 15023, 15022], [14894, 14895, 15023], [14895, 15024, 15023], [14895, 14896, 15025], [14895, 15025, 15024], [14896, 14897, 15025], [14897, 15026, 15025], [14897, 14898, 15027], [14897, 15027, 15026], [14898, 14899, 15027], [14899, 15028, 15027], [14899, 14900, 15029], [14899, 15029, 15028], [14900, 14901, 15029], [14901, 15030, 15029], [14901, 14902, 15031], [14901, 15031, 15030], [14902, 14903, 15031], [14903, 15032, 15031], [14903, 14904, 15033], [14903, 15033, 15032], [14904, 14905, 15033], [14905, 15034, 15033], [14905, 14906, 15035], [14905, 15035, 15034], [14906, 14907, 15035], [14907, 15036, 15035], [14907, 14908, 15037], [14907, 15037, 15036], [14908, 14909, 15037], [14909, 15038, 15037], [14909, 14910, 15039], [14909, 15039, 15038], [14910, 14911, 15039], [14911, 15040, 15039], [14911, 14912, 15041], [14911, 15041, 15040], [14912, 14913, 15041], [14913, 15042, 15041], [14913, 14914, 15043], [14913, 15043, 15042], [14914, 14915, 15043], [14915, 15044, 15043], [14915, 14916, 15045], [14915, 15045, 15044], [14916, 14917, 15045], [14917, 15046, 15045], [14917, 14918, 15047], [14917, 15047, 15046], [14918, 14919, 15047], [14919, 15048, 15047], [14919, 14920, 15049], [14919, 15049, 15048], [14920, 14921, 15049], [14921, 15050, 15049], [14921, 14922, 15051], [14921, 15051, 15050], [14922, 14923, 15051], [14923, 15052, 15051], [14923, 14924, 15053], [14923, 15053, 15052], [14924, 14925, 15053], [14925, 15054, 15053], [14925, 14926, 15055], [14925, 15055, 15054], [14926, 14927, 15055], [14927, 15056, 15055], [14927, 14928, 15057], [14927, 15057, 15056], [14928, 14929, 15057], [14929, 15058, 15057], [14929, 14930, 15059], [14929, 15059, 15058], [14930, 14931, 15059], [14931, 15060, 15059], [14931, 14932, 15061], [14931, 15061, 15060], [14932, 14933, 15061], [14933, 15062, 15061], [14933, 14934, 15063], [14933, 15063, 15062], [14934, 14935, 15063], [14935, 15064, 15063], [14935, 14936, 15065], [14935, 15065, 15064], [14936, 14937, 15065], [14937, 15066, 15065], [14937, 14938, 15067], [14937, 15067, 15066], [14938, 14939, 15067], [14939, 15068, 15067], [14939, 14940, 15069], [14939, 15069, 15068], [14940, 14941, 15069], [14941, 15070, 15069], [14941, 14942, 15071], [14941, 15071, 15070], [14942, 14943, 15071], [14943, 15072, 15071], [14943, 14944, 15073], [14943, 15073, 15072], [14944, 14945, 15073], [14945, 15074, 15073], [14945, 14946, 15075], [14945, 15075, 15074], [14946, 14947, 15075], [14947, 15076, 15075], [14947, 14948, 15077], [14947, 15077, 15076], [14948, 14949, 15077], [14949, 15078, 15077], [14949, 14950, 15079], [14949, 15079, 15078], [14950, 14951, 15079], [14951, 15080, 15079], [14951, 14952, 15081], [14951, 15081, 15080], [14952, 14953, 15081], [14953, 15082, 15081], [14953, 14954, 15083], [14953, 15083, 15082], [14954, 14955, 15083], [14955, 15084, 15083], [14955, 14956, 15085], [14955, 15085, 15084], [14956, 14957, 15085], [14957, 15086, 15085], [14957, 14958, 15087], [14957, 15087, 15086], [14958, 14959, 15087], [14959, 15088, 15087], [14959, 14960, 15089], [14959, 15089, 15088], [14960, 14961, 15089], [14961, 15090, 15089], [14961, 14962, 15091], [14961, 15091, 15090], [14962, 14963, 15091], [14963, 15092, 15091], [14963, 14964, 15093], [14963, 15093, 15092], [14964, 14965, 15093], [14965, 15094, 15093], [14965, 14966, 15095], [14965, 15095, 15094], [14966, 14967, 15095], [14967, 15096, 15095], [14967, 14968, 15097], [14967, 15097, 15096], [14968, 14969, 15097], [14969, 15098, 15097], [14969, 14970, 15099], [14969, 15099, 15098], [14970, 14971, 15099], [14971, 15100, 15099], [14971, 14972, 15101], [14971, 15101, 15100], [14972, 14973, 15101], [14973, 15102, 15101], [14973, 14974, 15103], [14973, 15103, 15102], [14974, 14975, 15103], [14975, 15104, 15103], [14975, 14976, 15105], [14975, 15105, 15104], [14976, 14977, 15105], [14977, 15106, 15105], [14977, 14978, 15107], [14977, 15107, 15106], [14978, 14979, 15107], [14979, 15108, 15107], [14979, 14980, 15109], [14979, 15109, 15108], [14980, 14981, 15109], [14981, 15110, 15109], [14981, 14982, 15111], [14981, 15111, 15110], [14982, 14983, 15111], [14983, 15112, 15111], [14983, 14984, 15113], [14983, 15113, 15112], [14984, 14985, 15113], [14985, 15114, 15113], [14985, 14986, 15115], [14985, 15115, 15114], [14986, 14987, 15115], [14987, 15116, 15115], [14987, 14988, 15117], [14987, 15117, 15116], [14988, 14989, 15117], [14989, 15118, 15117], [14989, 14990, 15119], [14989, 15119, 15118], [14990, 14991, 15119], [14991, 15120, 15119], [14991, 14992, 15121], [14991, 15121, 15120], [14992, 14993, 15121], [14993, 15122, 15121], [14993, 14994, 15123], [14993, 15123, 15122], [14994, 14995, 15123], [14995, 15124, 15123], [14995, 14996, 15125], [14995, 15125, 15124], [14996, 14997, 15125], [14997, 15126, 15125], [14997, 14998, 15127], [14997, 15127, 15126], [14998, 14999, 15127], [14999, 15128, 15127], [14999, 15000, 15129], [14999, 15129, 15128], [15000, 15001, 15129], [15001, 15130, 15129], [15001, 15002, 15131], [15001, 15131, 15130], [15002, 15003, 15131], [15003, 15132, 15131], [15003, 15004, 15133], [15003, 15133, 15132], [15004, 15005, 15133], [15005, 15134, 15133], [15005, 15006, 15135], [15005, 15135, 15134], [15006, 15007, 15135], [15007, 15136, 15135], [15007, 15008, 15137], [15007, 15137, 15136], [15008, 15009, 15137], [15009, 15138, 15137], [15010, 15011, 15139], [15011, 15140, 15139], [15011, 15012, 15141], [15011, 15141, 15140], [15012, 15013, 15141], [15013, 15142, 15141], [15013, 15014, 15143], [15013, 15143, 15142], [15014, 15015, 15143], [15015, 15144, 15143], [15015, 15016, 15145], [15015, 15145, 15144], [15016, 15017, 15145], [15017, 15146, 15145], [15017, 15018, 15147], [15017, 15147, 15146], [15018, 15019, 15147], [15019, 15148, 15147], [15019, 15020, 15149], [15019, 15149, 15148], [15020, 15021, 15149], [15021, 15150, 15149], [15021, 15022, 15151], [15021, 15151, 15150], [15022, 15023, 15151], [15023, 15152, 15151], [15023, 15024, 15153], [15023, 15153, 15152], [15024, 15025, 15153], [15025, 15154, 15153], [15025, 15026, 15155], [15025, 15155, 15154], [15026, 15027, 15155], [15027, 15156, 15155], [15027, 15028, 15157], [15027, 15157, 15156], [15028, 15029, 15157], [15029, 15158, 15157], [15029, 15030, 15159], [15029, 15159, 15158], [15030, 15031, 15159], [15031, 15160, 15159], [15031, 15032, 15161], [15031, 15161, 15160], [15032, 15033, 15161], [15033, 15162, 15161], [15033, 15034, 15163], [15033, 15163, 15162], [15034, 15035, 15163], [15035, 15164, 15163], [15035, 15036, 15165], [15035, 15165, 15164], [15036, 15037, 15165], [15037, 15166, 15165], [15037, 15038, 15167], [15037, 15167, 15166], [15038, 15039, 15167], [15039, 15168, 15167], [15039, 15040, 15169], [15039, 15169, 15168], [15040, 15041, 15169], [15041, 15170, 15169], [15041, 15042, 15171], [15041, 15171, 15170], [15042, 15043, 15171], [15043, 15172, 15171], [15043, 15044, 15173], [15043, 15173, 15172], [15044, 15045, 15173], [15045, 15174, 15173], [15045, 15046, 15175], [15045, 15175, 15174], [15046, 15047, 15175], [15047, 15176, 15175], [15047, 15048, 15177], [15047, 15177, 15176], [15048, 15049, 15177], [15049, 15178, 15177], [15049, 15050, 15179], [15049, 15179, 15178], [15050, 15051, 15179], [15051, 15180, 15179], [15051, 15052, 15181], [15051, 15181, 15180], [15052, 15053, 15181], [15053, 15182, 15181], [15053, 15054, 15183], [15053, 15183, 15182], [15054, 15055, 15183], [15055, 15184, 15183], [15055, 15056, 15185], [15055, 15185, 15184], [15056, 15057, 15185], [15057, 15186, 15185], [15057, 15058, 15187], [15057, 15187, 15186], [15058, 15059, 15187], [15059, 15188, 15187], [15059, 15060, 15189], [15059, 15189, 15188], [15060, 15061, 15189], [15061, 15190, 15189], [15061, 15062, 15191], [15061, 15191, 15190], [15062, 15063, 15191], [15063, 15192, 15191], [15063, 15064, 15193], [15063, 15193, 15192], [15064, 15065, 15193], [15065, 15194, 15193], [15065, 15066, 15195], [15065, 15195, 15194], [15066, 15067, 15195], [15067, 15196, 15195], [15067, 15068, 15197], [15067, 15197, 15196], [15068, 15069, 15197], [15069, 15198, 15197], [15069, 15070, 15199], [15069, 15199, 15198], [15070, 15071, 15199], [15071, 15200, 15199], [15071, 15072, 15201], [15071, 15201, 15200], [15072, 15073, 15201], [15073, 15202, 15201], [15073, 15074, 15203], [15073, 15203, 15202], [15074, 15075, 15203], [15075, 15204, 15203], [15075, 15076, 15205], [15075, 15205, 15204], [15076, 15077, 15205], [15077, 15206, 15205], [15077, 15078, 15207], [15077, 15207, 15206], [15078, 15079, 15207], [15079, 15208, 15207], [15079, 15080, 15209], [15079, 15209, 15208], [15080, 15081, 15209], [15081, 15210, 15209], [15081, 15082, 15211], [15081, 15211, 15210], [15082, 15083, 15211], [15083, 15212, 15211], [15083, 15084, 15213], [15083, 15213, 15212], [15084, 15085, 15213], [15085, 15214, 15213], [15085, 15086, 15215], [15085, 15215, 15214], [15086, 15087, 15215], [15087, 15216, 15215], [15087, 15088, 15217], [15087, 15217, 15216], [15088, 15089, 15217], [15089, 15218, 15217], [15089, 15090, 15219], [15089, 15219, 15218], [15090, 15091, 15219], [15091, 15220, 15219], [15091, 15092, 15221], [15091, 15221, 15220], [15092, 15093, 15221], [15093, 15222, 15221], [15093, 15094, 15223], [15093, 15223, 15222], [15094, 15095, 15223], [15095, 15224, 15223], [15095, 15096, 15225], [15095, 15225, 15224], [15096, 15097, 15225], [15097, 15226, 15225], [15097, 15098, 15227], [15097, 15227, 15226], [15098, 15099, 15227], [15099, 15228, 15227], [15099, 15100, 15229], [15099, 15229, 15228], [15100, 15101, 15229], [15101, 15230, 15229], [15101, 15102, 15231], [15101, 15231, 15230], [15102, 15103, 15231], [15103, 15232, 15231], [15103, 15104, 15233], [15103, 15233, 15232], [15104, 15105, 15233], [15105, 15234, 15233], [15105, 15106, 15235], [15105, 15235, 15234], [15106, 15107, 15235], [15107, 15236, 15235], [15107, 15108, 15237], [15107, 15237, 15236], [15108, 15109, 15237], [15109, 15238, 15237], [15109, 15110, 15239], [15109, 15239, 15238], [15110, 15111, 15239], [15111, 15240, 15239], [15111, 15112, 15241], [15111, 15241, 15240], [15112, 15113, 15241], [15113, 15242, 15241], [15113, 15114, 15243], [15113, 15243, 15242], [15114, 15115, 15243], [15115, 15244, 15243], [15115, 15116, 15245], [15115, 15245, 15244], [15116, 15117, 15245], [15117, 15246, 15245], [15117, 15118, 15247], [15117, 15247, 15246], [15118, 15119, 15247], [15119, 15248, 15247], [15119, 15120, 15249], [15119, 15249, 15248], [15120, 15121, 15249], [15121, 15250, 15249], [15121, 15122, 15251], [15121, 15251, 15250], [15122, 15123, 15251], [15123, 15252, 15251], [15123, 15124, 15253], [15123, 15253, 15252], [15124, 15125, 15253], [15125, 15254, 15253], [15125, 15126, 15255], [15125, 15255, 15254], [15126, 15127, 15255], [15127, 15256, 15255], [15127, 15128, 15257], [15127, 15257, 15256], [15128, 15129, 15257], [15129, 15258, 15257], [15129, 15130, 15259], [15129, 15259, 15258], [15130, 15131, 15259], [15131, 15260, 15259], [15131, 15132, 15261], [15131, 15261, 15260], [15132, 15133, 15261], [15133, 15262, 15261], [15133, 15134, 15263], [15133, 15263, 15262], [15134, 15135, 15263], [15135, 15264, 15263], [15135, 15136, 15265], [15135, 15265, 15264], [15136, 15137, 15265], [15137, 15266, 15265], [15137, 15138, 15267], [15137, 15267, 15266], [15139, 15140, 15269], [15139, 15269, 15268], [15140, 15141, 15269], [15141, 15270, 15269], [15141, 15142, 15271], [15141, 15271, 15270], [15142, 15143, 15271], [15143, 15272, 15271], [15143, 15144, 15273], [15143, 15273, 15272], [15144, 15145, 15273], [15145, 15274, 15273], [15145, 15146, 15275], [15145, 15275, 15274], [15146, 15147, 15275], [15147, 15276, 15275], [15147, 15148, 15277], [15147, 15277, 15276], [15148, 15149, 15277], [15149, 15278, 15277], [15149, 15150, 15279], [15149, 15279, 15278], [15150, 15151, 15279], [15151, 15280, 15279], [15151, 15152, 15281], [15151, 15281, 15280], [15152, 15153, 15281], [15153, 15282, 15281], [15153, 15154, 15283], [15153, 15283, 15282], [15154, 15155, 15283], [15155, 15284, 15283], [15155, 15156, 15285], [15155, 15285, 15284], [15156, 15157, 15285], [15157, 15286, 15285], [15157, 15158, 15287], [15157, 15287, 15286], [15158, 15159, 15287], [15159, 15288, 15287], [15159, 15160, 15289], [15159, 15289, 15288], [15160, 15161, 15289], [15161, 15290, 15289], [15161, 15162, 15291], [15161, 15291, 15290], [15162, 15163, 15291], [15163, 15292, 15291], [15163, 15164, 15293], [15163, 15293, 15292], [15164, 15165, 15293], [15165, 15294, 15293], [15165, 15166, 15295], [15165, 15295, 15294], [15166, 15167, 15295], [15167, 15296, 15295], [15167, 15168, 15297], [15167, 15297, 15296], [15168, 15169, 15297], [15169, 15298, 15297], [15169, 15170, 15299], [15169, 15299, 15298], [15170, 15171, 15299], [15171, 15300, 15299], [15171, 15172, 15301], [15171, 15301, 15300], [15172, 15173, 15301], [15173, 15302, 15301], [15173, 15174, 15303], [15173, 15303, 15302], [15174, 15175, 15303], [15175, 15304, 15303], [15175, 15176, 15305], [15175, 15305, 15304], [15176, 15177, 15305], [15177, 15306, 15305], [15177, 15178, 15307], [15177, 15307, 15306], [15178, 15179, 15307], [15179, 15308, 15307], [15179, 15180, 15309], [15179, 15309, 15308], [15180, 15181, 15309], [15181, 15310, 15309], [15181, 15182, 15311], [15181, 15311, 15310], [15182, 15183, 15311], [15183, 15312, 15311], [15183, 15184, 15313], [15183, 15313, 15312], [15184, 15185, 15313], [15185, 15314, 15313], [15185, 15186, 15315], [15185, 15315, 15314], [15186, 15187, 15315], [15187, 15316, 15315], [15187, 15188, 15317], [15187, 15317, 15316], [15188, 15189, 15317], [15189, 15318, 15317], [15189, 15190, 15319], [15189, 15319, 15318], [15190, 15191, 15319], [15191, 15320, 15319], [15191, 15192, 15321], [15191, 15321, 15320], [15192, 15193, 15321], [15193, 15322, 15321], [15193, 15194, 15323], [15193, 15323, 15322], [15194, 15195, 15323], [15195, 15324, 15323], [15195, 15196, 15325], [15195, 15325, 15324], [15196, 15197, 15325], [15197, 15326, 15325], [15197, 15198, 15327], [15197, 15327, 15326], [15198, 15199, 15327], [15199, 15328, 15327], [15199, 15200, 15329], [15199, 15329, 15328], [15200, 15201, 15329], [15201, 15330, 15329], [15201, 15202, 15331], [15201, 15331, 15330], [15202, 15203, 15331], [15203, 15332, 15331], [15203, 15204, 15333], [15203, 15333, 15332], [15204, 15205, 15333], [15205, 15334, 15333], [15205, 15206, 15335], [15205, 15335, 15334], [15206, 15207, 15335], [15207, 15336, 15335], [15207, 15208, 15337], [15207, 15337, 15336], [15208, 15209, 15337], [15209, 15338, 15337], [15209, 15210, 15339], [15209, 15339, 15338], [15210, 15211, 15339], [15211, 15340, 15339], [15211, 15212, 15341], [15211, 15341, 15340], [15212, 15213, 15341], [15213, 15342, 15341], [15213, 15214, 15343], [15213, 15343, 15342], [15214, 15215, 15343], [15215, 15344, 15343], [15215, 15216, 15345], [15215, 15345, 15344], [15216, 15217, 15345], [15217, 15346, 15345], [15217, 15218, 15347], [15217, 15347, 15346], [15218, 15219, 15347], [15219, 15348, 15347], [15219, 15220, 15349], [15219, 15349, 15348], [15220, 15221, 15349], [15221, 15350, 15349], [15221, 15222, 15351], [15221, 15351, 15350], [15222, 15223, 15351], [15223, 15352, 15351], [15223, 15224, 15353], [15223, 15353, 15352], [15224, 15225, 15353], [15225, 15354, 15353], [15225, 15226, 15355], [15225, 15355, 15354], [15226, 15227, 15355], [15227, 15356, 15355], [15227, 15228, 15357], [15227, 15357, 15356], [15228, 15229, 15357], [15229, 15358, 15357], [15229, 15230, 15359], [15229, 15359, 15358], [15230, 15231, 15359], [15231, 15360, 15359], [15231, 15232, 15361], [15231, 15361, 15360], [15232, 15233, 15361], [15233, 15362, 15361], [15233, 15234, 15363], [15233, 15363, 15362], [15234, 15235, 15363], [15235, 15364, 15363], [15235, 15236, 15365], [15235, 15365, 15364], [15236, 15237, 15365], [15237, 15366, 15365], [15237, 15238, 15367], [15237, 15367, 15366], [15238, 15239, 15367], [15239, 15368, 15367], [15239, 15240, 15369], [15239, 15369, 15368], [15240, 15241, 15369], [15241, 15370, 15369], [15241, 15242, 15371], [15241, 15371, 15370], [15242, 15243, 15371], [15243, 15372, 15371], [15243, 15244, 15373], [15243, 15373, 15372], [15244, 15245, 15373], [15245, 15374, 15373], [15245, 15246, 15375], [15245, 15375, 15374], [15246, 15247, 15375], [15247, 15376, 15375], [15247, 15248, 15377], [15247, 15377, 15376], [15248, 15249, 15377], [15249, 15378, 15377], [15249, 15250, 15379], [15249, 15379, 15378], [15250, 15251, 15379], [15251, 15380, 15379], [15251, 15252, 15381], [15251, 15381, 15380], [15252, 15253, 15381], [15253, 15382, 15381], [15253, 15254, 15383], [15253, 15383, 15382], [15254, 15255, 15383], [15255, 15384, 15383], [15255, 15256, 15385], [15255, 15385, 15384], [15256, 15257, 15385], [15257, 15386, 15385], [15257, 15258, 15387], [15257, 15387, 15386], [15258, 15259, 15387], [15259, 15388, 15387], [15259, 15260, 15389], [15259, 15389, 15388], [15260, 15261, 15389], [15261, 15390, 15389], [15261, 15262, 15391], [15261, 15391, 15390], [15262, 15263, 15391], [15263, 15392, 15391], [15263, 15264, 15393], [15263, 15393, 15392], [15264, 15265, 15393], [15265, 15394, 15393], [15265, 15266, 15395], [15265, 15395, 15394], [15266, 15267, 15395], [15267, 15396, 15395], [15268, 15269, 15397], [15269, 15398, 15397], [15269, 15270, 15399], [15269, 15399, 15398], [15270, 15271, 15399], [15271, 15400, 15399], [15271, 15272, 15401], [15271, 15401, 15400], [15272, 15273, 15401], [15273, 15402, 15401], [15273, 15274, 15403], [15273, 15403, 15402], [15274, 15275, 15403], [15275, 15404, 15403], [15275, 15276, 15405], [15275, 15405, 15404], [15276, 15277, 15405], [15277, 15406, 15405], [15277, 15278, 15407], [15277, 15407, 15406], [15278, 15279, 15407], [15279, 15408, 15407], [15279, 15280, 15409], [15279, 15409, 15408], [15280, 15281, 15409], [15281, 15410, 15409], [15281, 15282, 15411], [15281, 15411, 15410], [15282, 15283, 15411], [15283, 15412, 15411], [15283, 15284, 15413], [15283, 15413, 15412], [15284, 15285, 15413], [15285, 15414, 15413], [15285, 15286, 15415], [15285, 15415, 15414], [15286, 15287, 15415], [15287, 15416, 15415], [15287, 15288, 15417], [15287, 15417, 15416], [15288, 15289, 15417], [15289, 15418, 15417], [15289, 15290, 15419], [15289, 15419, 15418], [15290, 15291, 15419], [15291, 15420, 15419], [15291, 15292, 15421], [15291, 15421, 15420], [15292, 15293, 15421], [15293, 15422, 15421], [15293, 15294, 15423], [15293, 15423, 15422], [15294, 15295, 15423], [15295, 15424, 15423], [15295, 15296, 15425], [15295, 15425, 15424], [15296, 15297, 15425], [15297, 15426, 15425], [15297, 15298, 15427], [15297, 15427, 15426], [15298, 15299, 15427], [15299, 15428, 15427], [15299, 15300, 15429], [15299, 15429, 15428], [15300, 15301, 15429], [15301, 15430, 15429], [15301, 15302, 15431], [15301, 15431, 15430], [15302, 15303, 15431], [15303, 15432, 15431], [15303, 15304, 15433], [15303, 15433, 15432], [15304, 15305, 15433], [15305, 15434, 15433], [15305, 15306, 15435], [15305, 15435, 15434], [15306, 15307, 15435], [15307, 15436, 15435], [15307, 15308, 15437], [15307, 15437, 15436], [15308, 15309, 15437], [15309, 15438, 15437], [15309, 15310, 15439], [15309, 15439, 15438], [15310, 15311, 15439], [15311, 15440, 15439], [15311, 15312, 15441], [15311, 15441, 15440], [15312, 15313, 15441], [15313, 15442, 15441], [15313, 15314, 15443], [15313, 15443, 15442], [15314, 15315, 15443], [15315, 15444, 15443], [15315, 15316, 15445], [15315, 15445, 15444], [15316, 15317, 15445], [15317, 15446, 15445], [15317, 15318, 15447], [15317, 15447, 15446], [15318, 15319, 15447], [15319, 15448, 15447], [15319, 15320, 15449], [15319, 15449, 15448], [15320, 15321, 15449], [15321, 15450, 15449], [15321, 15322, 15451], [15321, 15451, 15450], [15322, 15323, 15451], [15323, 15452, 15451], [15323, 15324, 15453], [15323, 15453, 15452], [15324, 15325, 15453], [15325, 15454, 15453], [15325, 15326, 15455], [15325, 15455, 15454], [15326, 15327, 15455], [15327, 15456, 15455], [15327, 15328, 15457], [15327, 15457, 15456], [15328, 15329, 15457], [15329, 15458, 15457], [15329, 15330, 15459], [15329, 15459, 15458], [15330, 15331, 15459], [15331, 15460, 15459], [15331, 15332, 15461], [15331, 15461, 15460], [15332, 15333, 15461], [15333, 15462, 15461], [15333, 15334, 15463], [15333, 15463, 15462], [15334, 15335, 15463], [15335, 15464, 15463], [15335, 15336, 15465], [15335, 15465, 15464], [15336, 15337, 15465], [15337, 15466, 15465], [15337, 15338, 15467], [15337, 15467, 15466], [15338, 15339, 15467], [15339, 15468, 15467], [15339, 15340, 15469], [15339, 15469, 15468], [15340, 15341, 15469], [15341, 15470, 15469], [15341, 15342, 15471], [15341, 15471, 15470], [15342, 15343, 15471], [15343, 15472, 15471], [15343, 15344, 15473], [15343, 15473, 15472], [15344, 15345, 15473], [15345, 15474, 15473], [15345, 15346, 15475], [15345, 15475, 15474], [15346, 15347, 15475], [15347, 15476, 15475], [15347, 15348, 15477], [15347, 15477, 15476], [15348, 15349, 15477], [15349, 15478, 15477], [15349, 15350, 15479], [15349, 15479, 15478], [15350, 15351, 15479], [15351, 15480, 15479], [15351, 15352, 15481], [15351, 15481, 15480], [15352, 15353, 15481], [15353, 15482, 15481], [15353, 15354, 15483], [15353, 15483, 15482], [15354, 15355, 15483], [15355, 15484, 15483], [15355, 15356, 15485], [15355, 15485, 15484], [15356, 15357, 15485], [15357, 15486, 15485], [15357, 15358, 15487], [15357, 15487, 15486], [15358, 15359, 15487], [15359, 15488, 15487], [15359, 15360, 15489], [15359, 15489, 15488], [15360, 15361, 15489], [15361, 15490, 15489], [15361, 15362, 15491], [15361, 15491, 15490], [15362, 15363, 15491], [15363, 15492, 15491], [15363, 15364, 15493], [15363, 15493, 15492], [15364, 15365, 15493], [15365, 15494, 15493], [15365, 15366, 15495], [15365, 15495, 15494], [15366, 15367, 15495], [15367, 15496, 15495], [15367, 15368, 15497], [15367, 15497, 15496], [15368, 15369, 15497], [15369, 15498, 15497], [15369, 15370, 15499], [15369, 15499, 15498], [15370, 15371, 15499], [15371, 15500, 15499], [15371, 15372, 15501], [15371, 15501, 15500], [15372, 15373, 15501], [15373, 15502, 15501], [15373, 15374, 15503], [15373, 15503, 15502], [15374, 15375, 15503], [15375, 15504, 15503], [15375, 15376, 15505], [15375, 15505, 15504], [15376, 15377, 15505], [15377, 15506, 15505], [15377, 15378, 15507], [15377, 15507, 15506], [15378, 15379, 15507], [15379, 15508, 15507], [15379, 15380, 15509], [15379, 15509, 15508], [15380, 15381, 15509], [15381, 15510, 15509], [15381, 15382, 15511], [15381, 15511, 15510], [15382, 15383, 15511], [15383, 15512, 15511], [15383, 15384, 15513], [15383, 15513, 15512], [15384, 15385, 15513], [15385, 15514, 15513], [15385, 15386, 15515], [15385, 15515, 15514], [15386, 15387, 15515], [15387, 15516, 15515], [15387, 15388, 15517], [15387, 15517, 15516], [15388, 15389, 15517], [15389, 15518, 15517], [15389, 15390, 15519], [15389, 15519, 15518], [15390, 15391, 15519], [15391, 15520, 15519], [15391, 15392, 15521], [15391, 15521, 15520], [15392, 15393, 15521], [15393, 15522, 15521], [15393, 15394, 15523], [15393, 15523, 15522], [15394, 15395, 15523], [15395, 15524, 15523], [15395, 15396, 15525], [15395, 15525, 15524], [15397, 15398, 15527], [15397, 15527, 15526], [15398, 15399, 15527], [15399, 15528, 15527], [15399, 15400, 15529], [15399, 15529, 15528], [15400, 15401, 15529], [15401, 15530, 15529], [15401, 15402, 15531], [15401, 15531, 15530], [15402, 15403, 15531], [15403, 15532, 15531], [15403, 15404, 15533], [15403, 15533, 15532], [15404, 15405, 15533], [15405, 15534, 15533], [15405, 15406, 15535], [15405, 15535, 15534], [15406, 15407, 15535], [15407, 15536, 15535], [15407, 15408, 15537], [15407, 15537, 15536], [15408, 15409, 15537], [15409, 15538, 15537], [15409, 15410, 15539], [15409, 15539, 15538], [15410, 15411, 15539], [15411, 15540, 15539], [15411, 15412, 15541], [15411, 15541, 15540], [15412, 15413, 15541], [15413, 15542, 15541], [15413, 15414, 15543], [15413, 15543, 15542], [15414, 15415, 15543], [15415, 15544, 15543], [15415, 15416, 15545], [15415, 15545, 15544], [15416, 15417, 15545], [15417, 15546, 15545], [15417, 15418, 15547], [15417, 15547, 15546], [15418, 15419, 15547], [15419, 15548, 15547], [15419, 15420, 15549], [15419, 15549, 15548], [15420, 15421, 15549], [15421, 15550, 15549], [15421, 15422, 15551], [15421, 15551, 15550], [15422, 15423, 15551], [15423, 15552, 15551], [15423, 15424, 15553], [15423, 15553, 15552], [15424, 15425, 15553], [15425, 15554, 15553], [15425, 15426, 15555], [15425, 15555, 15554], [15426, 15427, 15555], [15427, 15556, 15555], [15427, 15428, 15557], [15427, 15557, 15556], [15428, 15429, 15557], [15429, 15558, 15557], [15429, 15430, 15559], [15429, 15559, 15558], [15430, 15431, 15559], [15431, 15560, 15559], [15431, 15432, 15561], [15431, 15561, 15560], [15432, 15433, 15561], [15433, 15562, 15561], [15433, 15434, 15563], [15433, 15563, 15562], [15434, 15435, 15563], [15435, 15564, 15563], [15435, 15436, 15565], [15435, 15565, 15564], [15436, 15437, 15565], [15437, 15566, 15565], [15437, 15438, 15567], [15437, 15567, 15566], [15438, 15439, 15567], [15439, 15568, 15567], [15439, 15440, 15569], [15439, 15569, 15568], [15440, 15441, 15569], [15441, 15570, 15569], [15441, 15442, 15571], [15441, 15571, 15570], [15442, 15443, 15571], [15443, 15572, 15571], [15443, 15444, 15573], [15443, 15573, 15572], [15444, 15445, 15573], [15445, 15574, 15573], [15445, 15446, 15575], [15445, 15575, 15574], [15446, 15447, 15575], [15447, 15576, 15575], [15447, 15448, 15577], [15447, 15577, 15576], [15448, 15449, 15577], [15449, 15578, 15577], [15449, 15450, 15579], [15449, 15579, 15578], [15450, 15451, 15579], [15451, 15580, 15579], [15451, 15452, 15581], [15451, 15581, 15580], [15452, 15453, 15581], [15453, 15582, 15581], [15453, 15454, 15583], [15453, 15583, 15582], [15454, 15455, 15583], [15455, 15584, 15583], [15455, 15456, 15585], [15455, 15585, 15584], [15456, 15457, 15585], [15457, 15586, 15585], [15457, 15458, 15587], [15457, 15587, 15586], [15458, 15459, 15587], [15459, 15588, 15587], [15459, 15460, 15589], [15459, 15589, 15588], [15460, 15461, 15589], [15461, 15590, 15589], [15461, 15462, 15591], [15461, 15591, 15590], [15462, 15463, 15591], [15463, 15592, 15591], [15463, 15464, 15593], [15463, 15593, 15592], [15464, 15465, 15593], [15465, 15594, 15593], [15465, 15466, 15595], [15465, 15595, 15594], [15466, 15467, 15595], [15467, 15596, 15595], [15467, 15468, 15597], [15467, 15597, 15596], [15468, 15469, 15597], [15469, 15598, 15597], [15469, 15470, 15599], [15469, 15599, 15598], [15470, 15471, 15599], [15471, 15600, 15599], [15471, 15472, 15601], [15471, 15601, 15600], [15472, 15473, 15601], [15473, 15602, 15601], [15473, 15474, 15603], [15473, 15603, 15602], [15474, 15475, 15603], [15475, 15604, 15603], [15475, 15476, 15605], [15475, 15605, 15604], [15476, 15477, 15605], [15477, 15606, 15605], [15477, 15478, 15607], [15477, 15607, 15606], [15478, 15479, 15607], [15479, 15608, 15607], [15479, 15480, 15609], [15479, 15609, 15608], [15480, 15481, 15609], [15481, 15610, 15609], [15481, 15482, 15611], [15481, 15611, 15610], [15482, 15483, 15611], [15483, 15612, 15611], [15483, 15484, 15613], [15483, 15613, 15612], [15484, 15485, 15613], [15485, 15614, 15613], [15485, 15486, 15615], [15485, 15615, 15614], [15486, 15487, 15615], [15487, 15616, 15615], [15487, 15488, 15617], [15487, 15617, 15616], [15488, 15489, 15617], [15489, 15618, 15617], [15489, 15490, 15619], [15489, 15619, 15618], [15490, 15491, 15619], [15491, 15620, 15619], [15491, 15492, 15621], [15491, 15621, 15620], [15492, 15493, 15621], [15493, 15622, 15621], [15493, 15494, 15623], [15493, 15623, 15622], [15494, 15495, 15623], [15495, 15624, 15623], [15495, 15496, 15625], [15495, 15625, 15624], [15496, 15497, 15625], [15497, 15626, 15625], [15497, 15498, 15627], [15497, 15627, 15626], [15498, 15499, 15627], [15499, 15628, 15627], [15499, 15500, 15629], [15499, 15629, 15628], [15500, 15501, 15629], [15501, 15630, 15629], [15501, 15502, 15631], [15501, 15631, 15630], [15502, 15503, 15631], [15503, 15632, 15631], [15503, 15504, 15633], [15503, 15633, 15632], [15504, 15505, 15633], [15505, 15634, 15633], [15505, 15506, 15635], [15505, 15635, 15634], [15506, 15507, 15635], [15507, 15636, 15635], [15507, 15508, 15637], [15507, 15637, 15636], [15508, 15509, 15637], [15509, 15638, 15637], [15509, 15510, 15639], [15509, 15639, 15638], [15510, 15511, 15639], [15511, 15640, 15639], [15511, 15512, 15641], [15511, 15641, 15640], [15512, 15513, 15641], [15513, 15642, 15641], [15513, 15514, 15643], [15513, 15643, 15642], [15514, 15515, 15643], [15515, 15644, 15643], [15515, 15516, 15645], [15515, 15645, 15644], [15516, 15517, 15645], [15517, 15646, 15645], [15517, 15518, 15647], [15517, 15647, 15646], [15518, 15519, 15647], [15519, 15648, 15647], [15519, 15520, 15649], [15519, 15649, 15648], [15520, 15521, 15649], [15521, 15650, 15649], [15521, 15522, 15651], [15521, 15651, 15650], [15522, 15523, 15651], [15523, 15652, 15651], [15523, 15524, 15653], [15523, 15653, 15652], [15524, 15525, 15653], [15525, 15654, 15653], [15526, 15527, 15655], [15527, 15656, 15655], [15527, 15528, 15657], [15527, 15657, 15656], [15528, 15529, 15657], [15529, 15658, 15657], [15529, 15530, 15659], [15529, 15659, 15658], [15530, 15531, 15659], [15531, 15660, 15659], [15531, 15532, 15661], [15531, 15661, 15660], [15532, 15533, 15661], [15533, 15662, 15661], [15533, 15534, 15663], [15533, 15663, 15662], [15534, 15535, 15663], [15535, 15664, 15663], [15535, 15536, 15665], [15535, 15665, 15664], [15536, 15537, 15665], [15537, 15666, 15665], [15537, 15538, 15667], [15537, 15667, 15666], [15538, 15539, 15667], [15539, 15668, 15667], [15539, 15540, 15669], [15539, 15669, 15668], [15540, 15541, 15669], [15541, 15670, 15669], [15541, 15542, 15671], [15541, 15671, 15670], [15542, 15543, 15671], [15543, 15672, 15671], [15543, 15544, 15673], [15543, 15673, 15672], [15544, 15545, 15673], [15545, 15674, 15673], [15545, 15546, 15675], [15545, 15675, 15674], [15546, 15547, 15675], [15547, 15676, 15675], [15547, 15548, 15677], [15547, 15677, 15676], [15548, 15549, 15677], [15549, 15678, 15677], [15549, 15550, 15679], [15549, 15679, 15678], [15550, 15551, 15679], [15551, 15680, 15679], [15551, 15552, 15681], [15551, 15681, 15680], [15552, 15553, 15681], [15553, 15682, 15681], [15553, 15554, 15683], [15553, 15683, 15682], [15554, 15555, 15683], [15555, 15684, 15683], [15555, 15556, 15685], [15555, 15685, 15684], [15556, 15557, 15685], [15557, 15686, 15685], [15557, 15558, 15687], [15557, 15687, 15686], [15558, 15559, 15687], [15559, 15688, 15687], [15559, 15560, 15689], [15559, 15689, 15688], [15560, 15561, 15689], [15561, 15690, 15689], [15561, 15562, 15691], [15561, 15691, 15690], [15562, 15563, 15691], [15563, 15692, 15691], [15563, 15564, 15693], [15563, 15693, 15692], [15564, 15565, 15693], [15565, 15694, 15693], [15565, 15566, 15695], [15565, 15695, 15694], [15566, 15567, 15695], [15567, 15696, 15695], [15567, 15568, 15697], [15567, 15697, 15696], [15568, 15569, 15697], [15569, 15698, 15697], [15569, 15570, 15699], [15569, 15699, 15698], [15570, 15571, 15699], [15571, 15700, 15699], [15571, 15572, 15701], [15571, 15701, 15700], [15572, 15573, 15701], [15573, 15702, 15701], [15573, 15574, 15703], [15573, 15703, 15702], [15574, 15575, 15703], [15575, 15704, 15703], [15575, 15576, 15705], [15575, 15705, 15704], [15576, 15577, 15705], [15577, 15706, 15705], [15577, 15578, 15707], [15577, 15707, 15706], [15578, 15579, 15707], [15579, 15708, 15707], [15579, 15580, 15709], [15579, 15709, 15708], [15580, 15581, 15709], [15581, 15710, 15709], [15581, 15582, 15711], [15581, 15711, 15710], [15582, 15583, 15711], [15583, 15712, 15711], [15583, 15584, 15713], [15583, 15713, 15712], [15584, 15585, 15713], [15585, 15714, 15713], [15585, 15586, 15715], [15585, 15715, 15714], [15586, 15587, 15715], [15587, 15716, 15715], [15587, 15588, 15717], [15587, 15717, 15716], [15588, 15589, 15717], [15589, 15718, 15717], [15589, 15590, 15719], [15589, 15719, 15718], [15590, 15591, 15719], [15591, 15720, 15719], [15591, 15592, 15721], [15591, 15721, 15720], [15592, 15593, 15721], [15593, 15722, 15721], [15593, 15594, 15723], [15593, 15723, 15722], [15594, 15595, 15723], [15595, 15724, 15723], [15595, 15596, 15725], [15595, 15725, 15724], [15596, 15597, 15725], [15597, 15726, 15725], [15597, 15598, 15727], [15597, 15727, 15726], [15598, 15599, 15727], [15599, 15728, 15727], [15599, 15600, 15729], [15599, 15729, 15728], [15600, 15601, 15729], [15601, 15730, 15729], [15601, 15602, 15731], [15601, 15731, 15730], [15602, 15603, 15731], [15603, 15732, 15731], [15603, 15604, 15733], [15603, 15733, 15732], [15604, 15605, 15733], [15605, 15734, 15733], [15605, 15606, 15735], [15605, 15735, 15734], [15606, 15607, 15735], [15607, 15736, 15735], [15607, 15608, 15737], [15607, 15737, 15736], [15608, 15609, 15737], [15609, 15738, 15737], [15609, 15610, 15739], [15609, 15739, 15738], [15610, 15611, 15739], [15611, 15740, 15739], [15611, 15612, 15741], [15611, 15741, 15740], [15612, 15613, 15741], [15613, 15742, 15741], [15613, 15614, 15743], [15613, 15743, 15742], [15614, 15615, 15743], [15615, 15744, 15743], [15615, 15616, 15745], [15615, 15745, 15744], [15616, 15617, 15745], [15617, 15746, 15745], [15617, 15618, 15747], [15617, 15747, 15746], [15618, 15619, 15747], [15619, 15748, 15747], [15619, 15620, 15749], [15619, 15749, 15748], [15620, 15621, 15749], [15621, 15750, 15749], [15621, 15622, 15751], [15621, 15751, 15750], [15622, 15623, 15751], [15623, 15752, 15751], [15623, 15624, 15753], [15623, 15753, 15752], [15624, 15625, 15753], [15625, 15754, 15753], [15625, 15626, 15755], [15625, 15755, 15754], [15626, 15627, 15755], [15627, 15756, 15755], [15627, 15628, 15757], [15627, 15757, 15756], [15628, 15629, 15757], [15629, 15758, 15757], [15629, 15630, 15759], [15629, 15759, 15758], [15630, 15631, 15759], [15631, 15760, 15759], [15631, 15632, 15761], [15631, 15761, 15760], [15632, 15633, 15761], [15633, 15762, 15761], [15633, 15634, 15763], [15633, 15763, 15762], [15634, 15635, 15763], [15635, 15764, 15763], [15635, 15636, 15765], [15635, 15765, 15764], [15636, 15637, 15765], [15637, 15766, 15765], [15637, 15638, 15767], [15637, 15767, 15766], [15638, 15639, 15767], [15639, 15768, 15767], [15639, 15640, 15769], [15639, 15769, 15768], [15640, 15641, 15769], [15641, 15770, 15769], [15641, 15642, 15771], [15641, 15771, 15770], [15642, 15643, 15771], [15643, 15772, 15771], [15643, 15644, 15773], [15643, 15773, 15772], [15644, 15645, 15773], [15645, 15774, 15773], [15645, 15646, 15775], [15645, 15775, 15774], [15646, 15647, 15775], [15647, 15776, 15775], [15647, 15648, 15777], [15647, 15777, 15776], [15648, 15649, 15777], [15649, 15778, 15777], [15649, 15650, 15779], [15649, 15779, 15778], [15650, 15651, 15779], [15651, 15780, 15779], [15651, 15652, 15781], [15651, 15781, 15780], [15652, 15653, 15781], [15653, 15782, 15781], [15653, 15654, 15783], [15653, 15783, 15782], [15655, 15656, 15785], [15655, 15785, 15784], [15656, 15657, 15785], [15657, 15786, 15785], [15657, 15658, 15787], [15657, 15787, 15786], [15658, 15659, 15787], [15659, 15788, 15787], [15659, 15660, 15789], [15659, 15789, 15788], [15660, 15661, 15789], [15661, 15790, 15789], [15661, 15662, 15791], [15661, 15791, 15790], [15662, 15663, 15791], [15663, 15792, 15791], [15663, 15664, 15793], [15663, 15793, 15792], [15664, 15665, 15793], [15665, 15794, 15793], [15665, 15666, 15795], [15665, 15795, 15794], [15666, 15667, 15795], [15667, 15796, 15795], [15667, 15668, 15797], [15667, 15797, 15796], [15668, 15669, 15797], [15669, 15798, 15797], [15669, 15670, 15799], [15669, 15799, 15798], [15670, 15671, 15799], [15671, 15800, 15799], [15671, 15672, 15801], [15671, 15801, 15800], [15672, 15673, 15801], [15673, 15802, 15801], [15673, 15674, 15803], [15673, 15803, 15802], [15674, 15675, 15803], [15675, 15804, 15803], [15675, 15676, 15805], [15675, 15805, 15804], [15676, 15677, 15805], [15677, 15806, 15805], [15677, 15678, 15807], [15677, 15807, 15806], [15678, 15679, 15807], [15679, 15808, 15807], [15679, 15680, 15809], [15679, 15809, 15808], [15680, 15681, 15809], [15681, 15810, 15809], [15681, 15682, 15811], [15681, 15811, 15810], [15682, 15683, 15811], [15683, 15812, 15811], [15683, 15684, 15813], [15683, 15813, 15812], [15684, 15685, 15813], [15685, 15814, 15813], [15685, 15686, 15815], [15685, 15815, 15814], [15686, 15687, 15815], [15687, 15816, 15815], [15687, 15688, 15817], [15687, 15817, 15816], [15688, 15689, 15817], [15689, 15818, 15817], [15689, 15690, 15819], [15689, 15819, 15818], [15690, 15691, 15819], [15691, 15820, 15819], [15691, 15692, 15821], [15691, 15821, 15820], [15692, 15693, 15821], [15693, 15822, 15821], [15693, 15694, 15823], [15693, 15823, 15822], [15694, 15695, 15823], [15695, 15824, 15823], [15695, 15696, 15825], [15695, 15825, 15824], [15696, 15697, 15825], [15697, 15826, 15825], [15697, 15698, 15827], [15697, 15827, 15826], [15698, 15699, 15827], [15699, 15828, 15827], [15699, 15700, 15829], [15699, 15829, 15828], [15700, 15701, 15829], [15701, 15830, 15829], [15701, 15702, 15831], [15701, 15831, 15830], [15702, 15703, 15831], [15703, 15832, 15831], [15703, 15704, 15833], [15703, 15833, 15832], [15704, 15705, 15833], [15705, 15834, 15833], [15705, 15706, 15835], [15705, 15835, 15834], [15706, 15707, 15835], [15707, 15836, 15835], [15707, 15708, 15837], [15707, 15837, 15836], [15708, 15709, 15837], [15709, 15838, 15837], [15709, 15710, 15839], [15709, 15839, 15838], [15710, 15711, 15839], [15711, 15840, 15839], [15711, 15712, 15841], [15711, 15841, 15840], [15712, 15713, 15841], [15713, 15842, 15841], [15713, 15714, 15843], [15713, 15843, 15842], [15714, 15715, 15843], [15715, 15844, 15843], [15715, 15716, 15845], [15715, 15845, 15844], [15716, 15717, 15845], [15717, 15846, 15845], [15717, 15718, 15847], [15717, 15847, 15846], [15718, 15719, 15847], [15719, 15848, 15847], [15719, 15720, 15849], [15719, 15849, 15848], [15720, 15721, 15849], [15721, 15850, 15849], [15721, 15722, 15851], [15721, 15851, 15850], [15722, 15723, 15851], [15723, 15852, 15851], [15723, 15724, 15853], [15723, 15853, 15852], [15724, 15725, 15853], [15725, 15854, 15853], [15725, 15726, 15855], [15725, 15855, 15854], [15726, 15727, 15855], [15727, 15856, 15855], [15727, 15728, 15857], [15727, 15857, 15856], [15728, 15729, 15857], [15729, 15858, 15857], [15729, 15730, 15859], [15729, 15859, 15858], [15730, 15731, 15859], [15731, 15860, 15859], [15731, 15732, 15861], [15731, 15861, 15860], [15732, 15733, 15861], [15733, 15862, 15861], [15733, 15734, 15863], [15733, 15863, 15862], [15734, 15735, 15863], [15735, 15864, 15863], [15735, 15736, 15865], [15735, 15865, 15864], [15736, 15737, 15865], [15737, 15866, 15865], [15737, 15738, 15867], [15737, 15867, 15866], [15738, 15739, 15867], [15739, 15868, 15867], [15739, 15740, 15869], [15739, 15869, 15868], [15740, 15741, 15869], [15741, 15870, 15869], [15741, 15742, 15871], [15741, 15871, 15870], [15742, 15743, 15871], [15743, 15872, 15871], [15743, 15744, 15873], [15743, 15873, 15872], [15744, 15745, 15873], [15745, 15874, 15873], [15745, 15746, 15875], [15745, 15875, 15874], [15746, 15747, 15875], [15747, 15876, 15875], [15747, 15748, 15877], [15747, 15877, 15876], [15748, 15749, 15877], [15749, 15878, 15877], [15749, 15750, 15879], [15749, 15879, 15878], [15750, 15751, 15879], [15751, 15880, 15879], [15751, 15752, 15881], [15751, 15881, 15880], [15752, 15753, 15881], [15753, 15882, 15881], [15753, 15754, 15883], [15753, 15883, 15882], [15754, 15755, 15883], [15755, 15884, 15883], [15755, 15756, 15885], [15755, 15885, 15884], [15756, 15757, 15885], [15757, 15886, 15885], [15757, 15758, 15887], [15757, 15887, 15886], [15758, 15759, 15887], [15759, 15888, 15887], [15759, 15760, 15889], [15759, 15889, 15888], [15760, 15761, 15889], [15761, 15890, 15889], [15761, 15762, 15891], [15761, 15891, 15890], [15762, 15763, 15891], [15763, 15892, 15891], [15763, 15764, 15893], [15763, 15893, 15892], [15764, 15765, 15893], [15765, 15894, 15893], [15765, 15766, 15895], [15765, 15895, 15894], [15766, 15767, 15895], [15767, 15896, 15895], [15767, 15768, 15897], [15767, 15897, 15896], [15768, 15769, 15897], [15769, 15898, 15897], [15769, 15770, 15899], [15769, 15899, 15898], [15770, 15771, 15899], [15771, 15900, 15899], [15771, 15772, 15901], [15771, 15901, 15900], [15772, 15773, 15901], [15773, 15902, 15901], [15773, 15774, 15903], [15773, 15903, 15902], [15774, 15775, 15903], [15775, 15904, 15903], [15775, 15776, 15905], [15775, 15905, 15904], [15776, 15777, 15905], [15777, 15906, 15905], [15777, 15778, 15907], [15777, 15907, 15906], [15778, 15779, 15907], [15779, 15908, 15907], [15779, 15780, 15909], [15779, 15909, 15908], [15780, 15781, 15909], [15781, 15910, 15909], [15781, 15782, 15911], [15781, 15911, 15910], [15782, 15783, 15911], [15783, 15912, 15911], [15784, 15785, 15913], [15785, 15914, 15913], [15785, 15786, 15915], [15785, 15915, 15914], [15786, 15787, 15915], [15787, 15916, 15915], [15787, 15788, 15917], [15787, 15917, 15916], [15788, 15789, 15917], [15789, 15918, 15917], [15789, 15790, 15919], [15789, 15919, 15918], [15790, 15791, 15919], [15791, 15920, 15919], [15791, 15792, 15921], [15791, 15921, 15920], [15792, 15793, 15921], [15793, 15922, 15921], [15793, 15794, 15923], [15793, 15923, 15922], [15794, 15795, 15923], [15795, 15924, 15923], [15795, 15796, 15925], [15795, 15925, 15924], [15796, 15797, 15925], [15797, 15926, 15925], [15797, 15798, 15927], [15797, 15927, 15926], [15798, 15799, 15927], [15799, 15928, 15927], [15799, 15800, 15929], [15799, 15929, 15928], [15800, 15801, 15929], [15801, 15930, 15929], [15801, 15802, 15931], [15801, 15931, 15930], [15802, 15803, 15931], [15803, 15932, 15931], [15803, 15804, 15933], [15803, 15933, 15932], [15804, 15805, 15933], [15805, 15934, 15933], [15805, 15806, 15935], [15805, 15935, 15934], [15806, 15807, 15935], [15807, 15936, 15935], [15807, 15808, 15937], [15807, 15937, 15936], [15808, 15809, 15937], [15809, 15938, 15937], [15809, 15810, 15939], [15809, 15939, 15938], [15810, 15811, 15939], [15811, 15940, 15939], [15811, 15812, 15941], [15811, 15941, 15940], [15812, 15813, 15941], [15813, 15942, 15941], [15813, 15814, 15943], [15813, 15943, 15942], [15814, 15815, 15943], [15815, 15944, 15943], [15815, 15816, 15945], [15815, 15945, 15944], [15816, 15817, 15945], [15817, 15946, 15945], [15817, 15818, 15947], [15817, 15947, 15946], [15818, 15819, 15947], [15819, 15948, 15947], [15819, 15820, 15949], [15819, 15949, 15948], [15820, 15821, 15949], [15821, 15950, 15949], [15821, 15822, 15951], [15821, 15951, 15950], [15822, 15823, 15951], [15823, 15952, 15951], [15823, 15824, 15953], [15823, 15953, 15952], [15824, 15825, 15953], [15825, 15954, 15953], [15825, 15826, 15955], [15825, 15955, 15954], [15826, 15827, 15955], [15827, 15956, 15955], [15827, 15828, 15957], [15827, 15957, 15956], [15828, 15829, 15957], [15829, 15958, 15957], [15829, 15830, 15959], [15829, 15959, 15958], [15830, 15831, 15959], [15831, 15960, 15959], [15831, 15832, 15961], [15831, 15961, 15960], [15832, 15833, 15961], [15833, 15962, 15961], [15833, 15834, 15963], [15833, 15963, 15962], [15834, 15835, 15963], [15835, 15964, 15963], [15835, 15836, 15965], [15835, 15965, 15964], [15836, 15837, 15965], [15837, 15966, 15965], [15837, 15838, 15967], [15837, 15967, 15966], [15838, 15839, 15967], [15839, 15968, 15967], [15839, 15840, 15969], [15839, 15969, 15968], [15840, 15841, 15969], [15841, 15970, 15969], [15841, 15842, 15971], [15841, 15971, 15970], [15842, 15843, 15971], [15843, 15972, 15971], [15843, 15844, 15973], [15843, 15973, 15972], [15844, 15845, 15973], [15845, 15974, 15973], [15845, 15846, 15975], [15845, 15975, 15974], [15846, 15847, 15975], [15847, 15976, 15975], [15847, 15848, 15977], [15847, 15977, 15976], [15848, 15849, 15977], [15849, 15978, 15977], [15849, 15850, 15979], [15849, 15979, 15978], [15850, 15851, 15979], [15851, 15980, 15979], [15851, 15852, 15981], [15851, 15981, 15980], [15852, 15853, 15981], [15853, 15982, 15981], [15853, 15854, 15983], [15853, 15983, 15982], [15854, 15855, 15983], [15855, 15984, 15983], [15855, 15856, 15985], [15855, 15985, 15984], [15856, 15857, 15985], [15857, 15986, 15985], [15857, 15858, 15987], [15857, 15987, 15986], [15858, 15859, 15987], [15859, 15988, 15987], [15859, 15860, 15989], [15859, 15989, 15988], [15860, 15861, 15989], [15861, 15990, 15989], [15861, 15862, 15991], [15861, 15991, 15990], [15862, 15863, 15991], [15863, 15992, 15991], [15863, 15864, 15993], [15863, 15993, 15992], [15864, 15865, 15993], [15865, 15994, 15993], [15865, 15866, 15995], [15865, 15995, 15994], [15866, 15867, 15995], [15867, 15996, 15995], [15867, 15868, 15997], [15867, 15997, 15996], [15868, 15869, 15997], [15869, 15998, 15997], [15869, 15870, 15999], [15869, 15999, 15998], [15870, 15871, 15999], [15871, 16000, 15999], [15871, 15872, 16001], [15871, 16001, 16000], [15872, 15873, 16001], [15873, 16002, 16001], [15873, 15874, 16003], [15873, 16003, 16002], [15874, 15875, 16003], [15875, 16004, 16003], [15875, 15876, 16005], [15875, 16005, 16004], [15876, 15877, 16005], [15877, 16006, 16005], [15877, 15878, 16007], [15877, 16007, 16006], [15878, 15879, 16007], [15879, 16008, 16007], [15879, 15880, 16009], [15879, 16009, 16008], [15880, 15881, 16009], [15881, 16010, 16009], [15881, 15882, 16011], [15881, 16011, 16010], [15882, 15883, 16011], [15883, 16012, 16011], [15883, 15884, 16013], [15883, 16013, 16012], [15884, 15885, 16013], [15885, 16014, 16013], [15885, 15886, 16015], [15885, 16015, 16014], [15886, 15887, 16015], [15887, 16016, 16015], [15887, 15888, 16017], [15887, 16017, 16016], [15888, 15889, 16017], [15889, 16018, 16017], [15889, 15890, 16019], [15889, 16019, 16018], [15890, 15891, 16019], [15891, 16020, 16019], [15891, 15892, 16021], [15891, 16021, 16020], [15892, 15893, 16021], [15893, 16022, 16021], [15893, 15894, 16023], [15893, 16023, 16022], [15894, 15895, 16023], [15895, 16024, 16023], [15895, 15896, 16025], [15895, 16025, 16024], [15896, 15897, 16025], [15897, 16026, 16025], [15897, 15898, 16027], [15897, 16027, 16026], [15898, 15899, 16027], [15899, 16028, 16027], [15899, 15900, 16029], [15899, 16029, 16028], [15900, 15901, 16029], [15901, 16030, 16029], [15901, 15902, 16031], [15901, 16031, 16030], [15902, 15903, 16031], [15903, 16032, 16031], [15903, 15904, 16033], [15903, 16033, 16032], [15904, 15905, 16033], [15905, 16034, 16033], [15905, 15906, 16035], [15905, 16035, 16034], [15906, 15907, 16035], [15907, 16036, 16035], [15907, 15908, 16037], [15907, 16037, 16036], [15908, 15909, 16037], [15909, 16038, 16037], [15909, 15910, 16039], [15909, 16039, 16038], [15910, 15911, 16039], [15911, 16040, 16039], [15911, 15912, 16041], [15911, 16041, 16040], [15913, 15914, 16043], [15913, 16043, 16042], [15914, 15915, 16043], [15915, 16044, 16043], [15915, 15916, 16045], [15915, 16045, 16044], [15916, 15917, 16045], [15917, 16046, 16045], [15917, 15918, 16047], [15917, 16047, 16046], [15918, 15919, 16047], [15919, 16048, 16047], [15919, 15920, 16049], [15919, 16049, 16048], [15920, 15921, 16049], [15921, 16050, 16049], [15921, 15922, 16051], [15921, 16051, 16050], [15922, 15923, 16051], [15923, 16052, 16051], [15923, 15924, 16053], [15923, 16053, 16052], [15924, 15925, 16053], [15925, 16054, 16053], [15925, 15926, 16055], [15925, 16055, 16054], [15926, 15927, 16055], [15927, 16056, 16055], [15927, 15928, 16057], [15927, 16057, 16056], [15928, 15929, 16057], [15929, 16058, 16057], [15929, 15930, 16059], [15929, 16059, 16058], [15930, 15931, 16059], [15931, 16060, 16059], [15931, 15932, 16061], [15931, 16061, 16060], [15932, 15933, 16061], [15933, 16062, 16061], [15933, 15934, 16063], [15933, 16063, 16062], [15934, 15935, 16063], [15935, 16064, 16063], [15935, 15936, 16065], [15935, 16065, 16064], [15936, 15937, 16065], [15937, 16066, 16065], [15937, 15938, 16067], [15937, 16067, 16066], [15938, 15939, 16067], [15939, 16068, 16067], [15939, 15940, 16069], [15939, 16069, 16068], [15940, 15941, 16069], [15941, 16070, 16069], [15941, 15942, 16071], [15941, 16071, 16070], [15942, 15943, 16071], [15943, 16072, 16071], [15943, 15944, 16073], [15943, 16073, 16072], [15944, 15945, 16073], [15945, 16074, 16073], [15945, 15946, 16075], [15945, 16075, 16074], [15946, 15947, 16075], [15947, 16076, 16075], [15947, 15948, 16077], [15947, 16077, 16076], [15948, 15949, 16077], [15949, 16078, 16077], [15949, 15950, 16079], [15949, 16079, 16078], [15950, 15951, 16079], [15951, 16080, 16079], [15951, 15952, 16081], [15951, 16081, 16080], [15952, 15953, 16081], [15953, 16082, 16081], [15953, 15954, 16083], [15953, 16083, 16082], [15954, 15955, 16083], [15955, 16084, 16083], [15955, 15956, 16085], [15955, 16085, 16084], [15956, 15957, 16085], [15957, 16086, 16085], [15957, 15958, 16087], [15957, 16087, 16086], [15958, 15959, 16087], [15959, 16088, 16087], [15959, 15960, 16089], [15959, 16089, 16088], [15960, 15961, 16089], [15961, 16090, 16089], [15961, 15962, 16091], [15961, 16091, 16090], [15962, 15963, 16091], [15963, 16092, 16091], [15963, 15964, 16093], [15963, 16093, 16092], [15964, 15965, 16093], [15965, 16094, 16093], [15965, 15966, 16095], [15965, 16095, 16094], [15966, 15967, 16095], [15967, 16096, 16095], [15967, 15968, 16097], [15967, 16097, 16096], [15968, 15969, 16097], [15969, 16098, 16097], [15969, 15970, 16099], [15969, 16099, 16098], [15970, 15971, 16099], [15971, 16100, 16099], [15971, 15972, 16101], [15971, 16101, 16100], [15972, 15973, 16101], [15973, 16102, 16101], [15973, 15974, 16103], [15973, 16103, 16102], [15974, 15975, 16103], [15975, 16104, 16103], [15975, 15976, 16105], [15975, 16105, 16104], [15976, 15977, 16105], [15977, 16106, 16105], [15977, 15978, 16107], [15977, 16107, 16106], [15978, 15979, 16107], [15979, 16108, 16107], [15979, 15980, 16109], [15979, 16109, 16108], [15980, 15981, 16109], [15981, 16110, 16109], [15981, 15982, 16111], [15981, 16111, 16110], [15982, 15983, 16111], [15983, 16112, 16111], [15983, 15984, 16113], [15983, 16113, 16112], [15984, 15985, 16113], [15985, 16114, 16113], [15985, 15986, 16115], [15985, 16115, 16114], [15986, 15987, 16115], [15987, 16116, 16115], [15987, 15988, 16117], [15987, 16117, 16116], [15988, 15989, 16117], [15989, 16118, 16117], [15989, 15990, 16119], [15989, 16119, 16118], [15990, 15991, 16119], [15991, 16120, 16119], [15991, 15992, 16121], [15991, 16121, 16120], [15992, 15993, 16121], [15993, 16122, 16121], [15993, 15994, 16123], [15993, 16123, 16122], [15994, 15995, 16123], [15995, 16124, 16123], [15995, 15996, 16125], [15995, 16125, 16124], [15996, 15997, 16125], [15997, 16126, 16125], [15997, 15998, 16127], [15997, 16127, 16126], [15998, 15999, 16127], [15999, 16128, 16127], [15999, 16000, 16129], [15999, 16129, 16128], [16000, 16001, 16129], [16001, 16130, 16129], [16001, 16002, 16131], [16001, 16131, 16130], [16002, 16003, 16131], [16003, 16132, 16131], [16003, 16004, 16133], [16003, 16133, 16132], [16004, 16005, 16133], [16005, 16134, 16133], [16005, 16006, 16135], [16005, 16135, 16134], [16006, 16007, 16135], [16007, 16136, 16135], [16007, 16008, 16137], [16007, 16137, 16136], [16008, 16009, 16137], [16009, 16138, 16137], [16009, 16010, 16139], [16009, 16139, 16138], [16010, 16011, 16139], [16011, 16140, 16139], [16011, 16012, 16141], [16011, 16141, 16140], [16012, 16013, 16141], [16013, 16142, 16141], [16013, 16014, 16143], [16013, 16143, 16142], [16014, 16015, 16143], [16015, 16144, 16143], [16015, 16016, 16145], [16015, 16145, 16144], [16016, 16017, 16145], [16017, 16146, 16145], [16017, 16018, 16147], [16017, 16147, 16146], [16018, 16019, 16147], [16019, 16148, 16147], [16019, 16020, 16149], [16019, 16149, 16148], [16020, 16021, 16149], [16021, 16150, 16149], [16021, 16022, 16151], [16021, 16151, 16150], [16022, 16023, 16151], [16023, 16152, 16151], [16023, 16024, 16153], [16023, 16153, 16152], [16024, 16025, 16153], [16025, 16154, 16153], [16025, 16026, 16155], [16025, 16155, 16154], [16026, 16027, 16155], [16027, 16156, 16155], [16027, 16028, 16157], [16027, 16157, 16156], [16028, 16029, 16157], [16029, 16158, 16157], [16029, 16030, 16159], [16029, 16159, 16158], [16030, 16031, 16159], [16031, 16160, 16159], [16031, 16032, 16161], [16031, 16161, 16160], [16032, 16033, 16161], [16033, 16162, 16161], [16033, 16034, 16163], [16033, 16163, 16162], [16034, 16035, 16163], [16035, 16164, 16163], [16035, 16036, 16165], [16035, 16165, 16164], [16036, 16037, 16165], [16037, 16166, 16165], [16037, 16038, 16167], [16037, 16167, 16166], [16038, 16039, 16167], [16039, 16168, 16167], [16039, 16040, 16169], [16039, 16169, 16168], [16040, 16041, 16169], [16041, 16170, 16169], [16042, 16043, 19267], [16043, 19268, 19267], [16043, 16044, 19269], [16043, 19269, 19268], [16044, 16045, 19269], [16045, 19270, 19269], [16045, 16046, 19271], [16045, 19271, 19270], [16046, 16047, 19271], [16047, 19272, 19271], [16047, 16048, 19273], [16047, 19273, 19272], [16048, 16049, 19273], [16049, 19274, 19273], [16049, 16050, 19275], [16049, 19275, 19274], [16050, 16051, 19275], [16051, 19276, 19275], [16051, 16052, 19277], [16051, 19277, 19276], [16052, 16053, 19277], [16053, 19278, 19277], [16053, 16054, 19279], [16053, 19279, 19278], [16054, 16055, 19279], [16055, 19280, 19279], [16055, 16056, 19281], [16055, 19281, 19280], [16056, 16057, 19281], [16057, 19282, 19281], [16057, 16058, 19283], [16057, 19283, 19282], [16058, 16059, 19283], [16059, 19284, 19283], [16059, 16060, 19285], [16059, 19285, 19284], [16060, 16061, 19285], [16061, 19286, 19285], [16061, 16062, 19287], [16061, 19287, 19286], [16062, 16063, 19287], [16063, 19288, 19287], [16063, 16064, 19289], [16063, 19289, 19288], [16064, 16065, 19289], [16065, 19290, 19289], [16065, 16066, 19291], [16065, 19291, 19290], [16066, 16067, 19291], [16067, 19292, 19291], [16067, 16068, 19293], [16067, 19293, 19292], [16068, 16069, 19293], [16069, 19294, 19293], [16069, 16070, 19295], [16069, 19295, 19294], [16070, 16071, 19295], [16071, 19296, 19295], [16071, 16072, 19297], [16071, 19297, 19296], [16072, 16073, 19297], [16073, 19298, 19297], [16073, 16074, 19299], [16073, 19299, 19298], [16074, 16075, 19299], [16075, 19300, 19299], [16075, 16076, 19301], [16075, 19301, 19300], [16076, 16077, 19301], [16077, 19302, 19301], [16077, 16078, 19303], [16077, 19303, 19302], [16078, 16079, 19303], [16079, 19304, 19303], [16079, 16080, 19305], [16079, 19305, 19304], [16080, 16081, 19305], [16081, 19306, 19305], [16081, 16082, 19307], [16081, 19307, 19306], [16082, 16083, 19307], [16083, 19308, 19307], [16083, 16084, 19309], [16083, 19309, 19308], [16084, 16085, 19309], [16085, 19310, 19309], [16085, 16086, 19311], [16085, 19311, 19310], [16086, 16087, 19311], [16087, 19312, 19311], [16087, 16088, 19313], [16087, 19313, 19312], [16088, 16089, 19313], [16089, 19314, 19313], [16089, 16090, 19315], [16089, 19315, 19314], [16090, 16091, 19315], [16091, 19316, 19315], [16091, 16092, 19317], [16091, 19317, 19316], [16092, 16093, 19317], [16093, 19318, 19317], [16093, 16094, 19319], [16093, 19319, 19318], [16094, 16095, 19319], [16095, 19320, 19319], [16095, 16096, 19321], [16095, 19321, 19320], [16096, 16097, 19321], [16097, 19322, 19321], [16097, 16098, 19323], [16097, 19323, 19322], [16098, 16099, 19323], [16099, 19324, 19323], [16099, 16100, 19325], [16099, 19325, 19324], [16100, 16101, 19325], [16101, 19326, 19325], [16101, 16102, 19327], [16101, 19327, 19326], [16102, 16103, 19327], [16103, 19328, 19327], [16103, 16104, 19329], [16103, 19329, 19328], [16104, 16105, 19329], [16105, 19330, 19329], [16105, 16106, 19331], [16105, 19331, 19330], [16106, 16107, 19331], [16107, 19332, 19331], [16107, 16108, 19333], [16107, 19333, 19332], [16108, 16109, 19333], [16109, 19334, 19333], [16109, 16110, 19335], [16109, 19335, 19334], [16110, 16111, 19335], [16111, 19336, 19335], [16111, 16112, 19337], [16111, 19337, 19336], [16112, 16113, 19337], [16113, 19338, 19337], [16113, 16114, 19339], [16113, 19339, 19338], [16114, 16115, 19339], [16115, 19340, 19339], [16115, 16116, 19341], [16115, 19341, 19340], [16116, 16117, 19341], [16117, 19342, 19341], [16117, 16118, 19343], [16117, 19343, 19342], [16118, 16119, 19343], [16119, 19344, 19343], [16119, 16120, 19345], [16119, 19345, 19344], [16120, 16121, 19345], [16121, 19346, 19345], [16121, 16122, 19347], [16121, 19347, 19346], [16122, 16123, 19347], [16123, 19348, 19347], [16123, 16124, 19349], [16123, 19349, 19348], [16124, 16125, 19349], [16125, 19350, 19349], [16125, 16126, 19351], [16125, 19351, 19350], [16126, 16127, 19351], [16127, 19352, 19351], [16127, 16128, 19353], [16127, 19353, 19352], [16128, 16129, 19353], [16129, 19354, 19353], [16129, 16130, 19355], [16129, 19355, 19354], [16130, 16131, 19355], [16131, 19356, 19355], [16131, 16132, 19357], [16131, 19357, 19356], [16132, 16133, 19357], [16133, 19358, 19357], [16133, 16134, 19359], [16133, 19359, 19358], [16134, 16135, 19359], [16135, 19360, 19359], [16135, 16136, 19361], [16135, 19361, 19360], [16136, 16137, 19361], [16137, 19362, 19361], [16137, 16138, 19363], [16137, 19363, 19362], [16138, 16139, 19363], [16139, 19364, 19363], [16139, 16140, 19365], [16139, 19365, 19364], [16140, 16141, 19365], [16141, 19366, 19365], [16141, 16142, 19367], [16141, 19367, 19366], [16142, 16143, 19367], [16143, 19368, 19367], [16143, 16144, 19369], [16143, 19369, 19368], [16144, 16145, 19369], [16145, 19370, 19369], [16145, 16146, 19371], [16145, 19371, 19370], [16146, 16147, 19371], [16147, 19372, 19371], [16147, 16148, 19373], [16147, 19373, 19372], [16148, 16149, 19373], [16149, 19374, 19373], [16149, 16150, 19375], [16149, 19375, 19374], [16150, 16151, 19375], [16151, 19376, 19375], [16151, 16152, 19377], [16151, 19377, 19376], [16152, 16153, 19377], [16153, 19378, 19377], [16153, 16154, 19379], [16153, 19379, 19378], [16154, 16155, 19379], [16155, 19380, 19379], [16155, 16156, 19381], [16155, 19381, 19380], [16156, 16157, 19381], [16157, 19382, 19381], [16157, 16158, 19383], [16157, 19383, 19382], [16158, 16159, 19383], [16159, 19384, 19383], [16159, 16160, 19385], [16159, 19385, 19384], [16160, 16161, 19385], [16161, 19386, 19385], [16161, 16162, 19387], [16161, 19387, 19386], [16162, 16163, 19387], [16163, 19388, 19387], [16163, 16164, 19389], [16163, 19389, 19388], [16164, 16165, 19389], [16165, 19390, 19389], [16165, 16166, 19391], [16165, 19391, 19390], [16166, 16167, 19391], [16167, 19392, 19391], [16167, 16168, 19393], [16167, 19393, 19392], [16168, 16169, 19393], [16169, 19394, 19393], [16169, 16170, 19395], [16169, 19395, 19394], [16171, 16172, 16301], [16171, 16301, 16300], [16172, 16173, 16301], [16173, 16302, 16301], [16173, 16174, 16303], [16173, 16303, 16302], [16174, 16175, 16303], [16175, 16304, 16303], [16175, 16176, 16305], [16175, 16305, 16304], [16176, 16177, 16305], [16177, 16306, 16305], [16177, 16178, 16307], [16177, 16307, 16306], [16178, 16179, 16307], [16179, 16308, 16307], [16179, 16180, 16309], [16179, 16309, 16308], [16180, 16181, 16309], [16181, 16310, 16309], [16181, 16182, 16311], [16181, 16311, 16310], [16182, 16183, 16311], [16183, 16312, 16311], [16183, 16184, 16313], [16183, 16313, 16312], [16184, 16185, 16313], [16185, 16314, 16313], [16185, 16186, 16315], [16185, 16315, 16314], [16186, 16187, 16315], [16187, 16316, 16315], [16187, 16188, 16317], [16187, 16317, 16316], [16188, 16189, 16317], [16189, 16318, 16317], [16189, 16190, 16319], [16189, 16319, 16318], [16190, 16191, 16319], [16191, 16320, 16319], [16191, 16192, 16321], [16191, 16321, 16320], [16192, 16193, 16321], [16193, 16322, 16321], [16193, 16194, 16323], [16193, 16323, 16322], [16194, 16195, 16323], [16195, 16324, 16323], [16195, 16196, 16325], [16195, 16325, 16324], [16196, 16197, 16325], [16197, 16326, 16325], [16197, 16198, 16327], [16197, 16327, 16326], [16198, 16199, 16327], [16199, 16328, 16327], [16199, 16200, 16329], [16199, 16329, 16328], [16200, 16201, 16329], [16201, 16330, 16329], [16201, 16202, 16331], [16201, 16331, 16330], [16202, 16203, 16331], [16203, 16332, 16331], [16203, 16204, 16333], [16203, 16333, 16332], [16204, 16205, 16333], [16205, 16334, 16333], [16205, 16206, 16335], [16205, 16335, 16334], [16206, 16207, 16335], [16207, 16336, 16335], [16207, 16208, 16337], [16207, 16337, 16336], [16208, 16209, 16337], [16209, 16338, 16337], [16209, 16210, 16339], [16209, 16339, 16338], [16210, 16211, 16339], [16211, 16340, 16339], [16211, 16212, 16341], [16211, 16341, 16340], [16212, 16213, 16341], [16213, 16342, 16341], [16213, 16214, 16343], [16213, 16343, 16342], [16214, 16215, 16343], [16215, 16344, 16343], [16215, 16216, 16345], [16215, 16345, 16344], [16216, 16217, 16345], [16217, 16346, 16345], [16217, 16218, 16347], [16217, 16347, 16346], [16218, 16219, 16347], [16219, 16348, 16347], [16219, 16220, 16349], [16219, 16349, 16348], [16220, 16221, 16349], [16221, 16350, 16349], [16221, 16222, 16351], [16221, 16351, 16350], [16222, 16223, 16351], [16223, 16352, 16351], [16223, 16224, 16353], [16223, 16353, 16352], [16224, 16225, 16353], [16225, 16354, 16353], [16225, 16226, 16355], [16225, 16355, 16354], [16226, 16227, 16355], [16227, 16356, 16355], [16227, 16228, 16357], [16227, 16357, 16356], [16228, 16229, 16357], [16229, 16358, 16357], [16229, 16230, 16359], [16229, 16359, 16358], [16230, 16231, 16359], [16231, 16360, 16359], [16231, 16232, 16361], [16231, 16361, 16360], [16232, 16233, 16361], [16233, 16362, 16361], [16233, 16234, 16363], [16233, 16363, 16362], [16234, 16235, 16363], [16235, 16364, 16363], [16235, 16236, 16365], [16235, 16365, 16364], [16236, 16237, 16365], [16237, 16366, 16365], [16237, 16238, 16367], [16237, 16367, 16366], [16238, 16239, 16367], [16239, 16368, 16367], [16239, 16240, 16369], [16239, 16369, 16368], [16240, 16241, 16369], [16241, 16370, 16369], [16241, 16242, 16371], [16241, 16371, 16370], [16242, 16243, 16371], [16243, 16372, 16371], [16243, 16244, 16373], [16243, 16373, 16372], [16244, 16245, 16373], [16245, 16374, 16373], [16245, 16246, 16375], [16245, 16375, 16374], [16246, 16247, 16375], [16247, 16376, 16375], [16247, 16248, 16377], [16247, 16377, 16376], [16248, 16249, 16377], [16249, 16378, 16377], [16249, 16250, 16379], [16249, 16379, 16378], [16250, 16251, 16379], [16251, 16380, 16379], [16251, 16252, 16381], [16251, 16381, 16380], [16252, 16253, 16381], [16253, 16382, 16381], [16253, 16254, 16383], [16253, 16383, 16382], [16254, 16255, 16383], [16255, 16384, 16383], [16255, 16256, 16385], [16255, 16385, 16384], [16256, 16257, 16385], [16257, 16386, 16385], [16257, 16258, 16387], [16257, 16387, 16386], [16258, 16259, 16387], [16259, 16388, 16387], [16259, 16260, 16389], [16259, 16389, 16388], [16260, 16261, 16389], [16261, 16390, 16389], [16261, 16262, 16391], [16261, 16391, 16390], [16262, 16263, 16391], [16263, 16392, 16391], [16263, 16264, 16393], [16263, 16393, 16392], [16264, 16265, 16393], [16265, 16394, 16393], [16265, 16266, 16395], [16265, 16395, 16394], [16266, 16267, 16395], [16267, 16396, 16395], [16267, 16268, 16397], [16267, 16397, 16396], [16268, 16269, 16397], [16269, 16398, 16397], [16269, 16270, 16399], [16269, 16399, 16398], [16270, 16271, 16399], [16271, 16400, 16399], [16271, 16272, 16401], [16271, 16401, 16400], [16272, 16273, 16401], [16273, 16402, 16401], [16273, 16274, 16403], [16273, 16403, 16402], [16274, 16275, 16403], [16275, 16404, 16403], [16275, 16276, 16405], [16275, 16405, 16404], [16276, 16277, 16405], [16277, 16406, 16405], [16277, 16278, 16407], [16277, 16407, 16406], [16278, 16279, 16407], [16279, 16408, 16407], [16279, 16280, 16409], [16279, 16409, 16408], [16280, 16281, 16409], [16281, 16410, 16409], [16281, 16282, 16411], [16281, 16411, 16410], [16282, 16283, 16411], [16283, 16412, 16411], [16283, 16284, 16413], [16283, 16413, 16412], [16284, 16285, 16413], [16285, 16414, 16413], [16285, 16286, 16415], [16285, 16415, 16414], [16286, 16287, 16415], [16287, 16416, 16415], [16287, 16288, 16417], [16287, 16417, 16416], [16288, 16289, 16417], [16289, 16418, 16417], [16289, 16290, 16419], [16289, 16419, 16418], [16290, 16291, 16419], [16291, 16420, 16419], [16291, 16292, 16421], [16291, 16421, 16420], [16292, 16293, 16421], [16293, 16422, 16421], [16293, 16294, 16423], [16293, 16423, 16422], [16294, 16295, 16423], [16295, 16424, 16423], [16295, 16296, 16425], [16295, 16425, 16424], [16296, 16297, 16425], [16297, 16426, 16425], [16297, 16298, 16427], [16297, 16427, 16426], [16298, 16299, 16427], [16299, 16428, 16427], [16300, 16301, 16429], [16301, 16430, 16429], [16301, 16302, 16431], [16301, 16431, 16430], [16302, 16303, 16431], [16303, 16432, 16431], [16303, 16304, 16433], [16303, 16433, 16432], [16304, 16305, 16433], [16305, 16434, 16433], [16305, 16306, 16435], [16305, 16435, 16434], [16306, 16307, 16435], [16307, 16436, 16435], [16307, 16308, 16437], [16307, 16437, 16436], [16308, 16309, 16437], [16309, 16438, 16437], [16309, 16310, 16439], [16309, 16439, 16438], [16310, 16311, 16439], [16311, 16440, 16439], [16311, 16312, 16441], [16311, 16441, 16440], [16312, 16313, 16441], [16313, 16442, 16441], [16313, 16314, 16443], [16313, 16443, 16442], [16314, 16315, 16443], [16315, 16444, 16443], [16315, 16316, 16445], [16315, 16445, 16444], [16316, 16317, 16445], [16317, 16446, 16445], [16317, 16318, 16447], [16317, 16447, 16446], [16318, 16319, 16447], [16319, 16448, 16447], [16319, 16320, 16449], [16319, 16449, 16448], [16320, 16321, 16449], [16321, 16450, 16449], [16321, 16322, 16451], [16321, 16451, 16450], [16322, 16323, 16451], [16323, 16452, 16451], [16323, 16324, 16453], [16323, 16453, 16452], [16324, 16325, 16453], [16325, 16454, 16453], [16325, 16326, 16455], [16325, 16455, 16454], [16326, 16327, 16455], [16327, 16456, 16455], [16327, 16328, 16457], [16327, 16457, 16456], [16328, 16329, 16457], [16329, 16458, 16457], [16329, 16330, 16459], [16329, 16459, 16458], [16330, 16331, 16459], [16331, 16460, 16459], [16331, 16332, 16461], [16331, 16461, 16460], [16332, 16333, 16461], [16333, 16462, 16461], [16333, 16334, 16463], [16333, 16463, 16462], [16334, 16335, 16463], [16335, 16464, 16463], [16335, 16336, 16465], [16335, 16465, 16464], [16336, 16337, 16465], [16337, 16466, 16465], [16337, 16338, 16467], [16337, 16467, 16466], [16338, 16339, 16467], [16339, 16468, 16467], [16339, 16340, 16469], [16339, 16469, 16468], [16340, 16341, 16469], [16341, 16470, 16469], [16341, 16342, 16471], [16341, 16471, 16470], [16342, 16343, 16471], [16343, 16472, 16471], [16343, 16344, 16473], [16343, 16473, 16472], [16344, 16345, 16473], [16345, 16474, 16473], [16345, 16346, 16475], [16345, 16475, 16474], [16346, 16347, 16475], [16347, 16476, 16475], [16347, 16348, 16477], [16347, 16477, 16476], [16348, 16349, 16477], [16349, 16478, 16477], [16349, 16350, 16479], [16349, 16479, 16478], [16350, 16351, 16479], [16351, 16480, 16479], [16351, 16352, 16481], [16351, 16481, 16480], [16352, 16353, 16481], [16353, 16482, 16481], [16353, 16354, 16483], [16353, 16483, 16482], [16354, 16355, 16483], [16355, 16484, 16483], [16355, 16356, 16485], [16355, 16485, 16484], [16356, 16357, 16485], [16357, 16486, 16485], [16357, 16358, 16487], [16357, 16487, 16486], [16358, 16359, 16487], [16359, 16488, 16487], [16359, 16360, 16489], [16359, 16489, 16488], [16360, 16361, 16489], [16361, 16490, 16489], [16361, 16362, 16491], [16361, 16491, 16490], [16362, 16363, 16491], [16363, 16492, 16491], [16363, 16364, 16493], [16363, 16493, 16492], [16364, 16365, 16493], [16365, 16494, 16493], [16365, 16366, 16495], [16365, 16495, 16494], [16366, 16367, 16495], [16367, 16496, 16495], [16367, 16368, 16497], [16367, 16497, 16496], [16368, 16369, 16497], [16369, 16498, 16497], [16369, 16370, 16499], [16369, 16499, 16498], [16370, 16371, 16499], [16371, 16500, 16499], [16371, 16372, 16501], [16371, 16501, 16500], [16372, 16373, 16501], [16373, 16502, 16501], [16373, 16374, 16503], [16373, 16503, 16502], [16374, 16375, 16503], [16375, 16504, 16503], [16375, 16376, 16505], [16375, 16505, 16504], [16376, 16377, 16505], [16377, 16506, 16505], [16377, 16378, 16507], [16377, 16507, 16506], [16378, 16379, 16507], [16379, 16508, 16507], [16379, 16380, 16509], [16379, 16509, 16508], [16380, 16381, 16509], [16381, 16510, 16509], [16381, 16382, 16511], [16381, 16511, 16510], [16382, 16383, 16511], [16383, 16512, 16511], [16383, 16384, 16513], [16383, 16513, 16512], [16384, 16385, 16513], [16385, 16514, 16513], [16385, 16386, 16515], [16385, 16515, 16514], [16386, 16387, 16515], [16387, 16516, 16515], [16387, 16388, 16517], [16387, 16517, 16516], [16388, 16389, 16517], [16389, 16518, 16517], [16389, 16390, 16519], [16389, 16519, 16518], [16390, 16391, 16519], [16391, 16520, 16519], [16391, 16392, 16521], [16391, 16521, 16520], [16392, 16393, 16521], [16393, 16522, 16521], [16393, 16394, 16523], [16393, 16523, 16522], [16394, 16395, 16523], [16395, 16524, 16523], [16395, 16396, 16525], [16395, 16525, 16524], [16396, 16397, 16525], [16397, 16526, 16525], [16397, 16398, 16527], [16397, 16527, 16526], [16398, 16399, 16527], [16399, 16528, 16527], [16399, 16400, 16529], [16399, 16529, 16528], [16400, 16401, 16529], [16401, 16530, 16529], [16401, 16402, 16531], [16401, 16531, 16530], [16402, 16403, 16531], [16403, 16532, 16531], [16403, 16404, 16533], [16403, 16533, 16532], [16404, 16405, 16533], [16405, 16534, 16533], [16405, 16406, 16535], [16405, 16535, 16534], [16406, 16407, 16535], [16407, 16536, 16535], [16407, 16408, 16537], [16407, 16537, 16536], [16408, 16409, 16537], [16409, 16538, 16537], [16409, 16410, 16539], [16409, 16539, 16538], [16410, 16411, 16539], [16411, 16540, 16539], [16411, 16412, 16541], [16411, 16541, 16540], [16412, 16413, 16541], [16413, 16542, 16541], [16413, 16414, 16543], [16413, 16543, 16542], [16414, 16415, 16543], [16415, 16544, 16543], [16415, 16416, 16545], [16415, 16545, 16544], [16416, 16417, 16545], [16417, 16546, 16545], [16417, 16418, 16547], [16417, 16547, 16546], [16418, 16419, 16547], [16419, 16548, 16547], [16419, 16420, 16549], [16419, 16549, 16548], [16420, 16421, 16549], [16421, 16550, 16549], [16421, 16422, 16551], [16421, 16551, 16550], [16422, 16423, 16551], [16423, 16552, 16551], [16423, 16424, 16553], [16423, 16553, 16552], [16424, 16425, 16553], [16425, 16554, 16553], [16425, 16426, 16555], [16425, 16555, 16554], [16426, 16427, 16555], [16427, 16556, 16555], [16427, 16428, 16557], [16427, 16557, 16556], [16429, 16430, 16559], [16429, 16559, 16558], [16430, 16431, 16559], [16431, 16560, 16559], [16431, 16432, 16561], [16431, 16561, 16560], [16432, 16433, 16561], [16433, 16562, 16561], [16433, 16434, 16563], [16433, 16563, 16562], [16434, 16435, 16563], [16435, 16564, 16563], [16435, 16436, 16565], [16435, 16565, 16564], [16436, 16437, 16565], [16437, 16566, 16565], [16437, 16438, 16567], [16437, 16567, 16566], [16438, 16439, 16567], [16439, 16568, 16567], [16439, 16440, 16569], [16439, 16569, 16568], [16440, 16441, 16569], [16441, 16570, 16569], [16441, 16442, 16571], [16441, 16571, 16570], [16442, 16443, 16571], [16443, 16572, 16571], [16443, 16444, 16573], [16443, 16573, 16572], [16444, 16445, 16573], [16445, 16574, 16573], [16445, 16446, 16575], [16445, 16575, 16574], [16446, 16447, 16575], [16447, 16576, 16575], [16447, 16448, 16577], [16447, 16577, 16576], [16448, 16449, 16577], [16449, 16578, 16577], [16449, 16450, 16579], [16449, 16579, 16578], [16450, 16451, 16579], [16451, 16580, 16579], [16451, 16452, 16581], [16451, 16581, 16580], [16452, 16453, 16581], [16453, 16582, 16581], [16453, 16454, 16583], [16453, 16583, 16582], [16454, 16455, 16583], [16455, 16584, 16583], [16455, 16456, 16585], [16455, 16585, 16584], [16456, 16457, 16585], [16457, 16586, 16585], [16457, 16458, 16587], [16457, 16587, 16586], [16458, 16459, 16587], [16459, 16588, 16587], [16459, 16460, 16589], [16459, 16589, 16588], [16460, 16461, 16589], [16461, 16590, 16589], [16461, 16462, 16591], [16461, 16591, 16590], [16462, 16463, 16591], [16463, 16592, 16591], [16463, 16464, 16593], [16463, 16593, 16592], [16464, 16465, 16593], [16465, 16594, 16593], [16465, 16466, 16595], [16465, 16595, 16594], [16466, 16467, 16595], [16467, 16596, 16595], [16467, 16468, 16597], [16467, 16597, 16596], [16468, 16469, 16597], [16469, 16598, 16597], [16469, 16470, 16599], [16469, 16599, 16598], [16470, 16471, 16599], [16471, 16600, 16599], [16471, 16472, 16601], [16471, 16601, 16600], [16472, 16473, 16601], [16473, 16602, 16601], [16473, 16474, 16603], [16473, 16603, 16602], [16474, 16475, 16603], [16475, 16604, 16603], [16475, 16476, 16605], [16475, 16605, 16604], [16476, 16477, 16605], [16477, 16606, 16605], [16477, 16478, 16607], [16477, 16607, 16606], [16478, 16479, 16607], [16479, 16608, 16607], [16479, 16480, 16609], [16479, 16609, 16608], [16480, 16481, 16609], [16481, 16610, 16609], [16481, 16482, 16611], [16481, 16611, 16610], [16482, 16483, 16611], [16483, 16612, 16611], [16483, 16484, 16613], [16483, 16613, 16612], [16484, 16485, 16613], [16485, 16614, 16613], [16485, 16486, 16615], [16485, 16615, 16614], [16486, 16487, 16615], [16487, 16616, 16615], [16487, 16488, 16617], [16487, 16617, 16616], [16488, 16489, 16617], [16489, 16618, 16617], [16489, 16490, 16619], [16489, 16619, 16618], [16490, 16491, 16619], [16491, 16620, 16619], [16491, 16492, 16621], [16491, 16621, 16620], [16492, 16493, 16621], [16493, 16622, 16621], [16493, 16494, 16623], [16493, 16623, 16622], [16494, 16495, 16623], [16495, 16624, 16623], [16495, 16496, 16625], [16495, 16625, 16624], [16496, 16497, 16625], [16497, 16626, 16625], [16497, 16498, 16627], [16497, 16627, 16626], [16498, 16499, 16627], [16499, 16628, 16627], [16499, 16500, 16629], [16499, 16629, 16628], [16500, 16501, 16629], [16501, 16630, 16629], [16501, 16502, 16631], [16501, 16631, 16630], [16502, 16503, 16631], [16503, 16632, 16631], [16503, 16504, 16633], [16503, 16633, 16632], [16504, 16505, 16633], [16505, 16634, 16633], [16505, 16506, 16635], [16505, 16635, 16634], [16506, 16507, 16635], [16507, 16636, 16635], [16507, 16508, 16637], [16507, 16637, 16636], [16508, 16509, 16637], [16509, 16638, 16637], [16509, 16510, 16639], [16509, 16639, 16638], [16510, 16511, 16639], [16511, 16640, 16639], [16511, 16512, 16641], [16511, 16641, 16640], [16512, 16513, 16641], [16513, 16642, 16641], [16513, 16514, 16643], [16513, 16643, 16642], [16514, 16515, 16643], [16515, 16644, 16643], [16515, 16516, 16645], [16515, 16645, 16644], [16516, 16517, 16645], [16517, 16646, 16645], [16517, 16518, 16647], [16517, 16647, 16646], [16518, 16519, 16647], [16519, 16648, 16647], [16519, 16520, 16649], [16519, 16649, 16648], [16520, 16521, 16649], [16521, 16650, 16649], [16521, 16522, 16651], [16521, 16651, 16650], [16522, 16523, 16651], [16523, 16652, 16651], [16523, 16524, 16653], [16523, 16653, 16652], [16524, 16525, 16653], [16525, 16654, 16653], [16525, 16526, 16655], [16525, 16655, 16654], [16526, 16527, 16655], [16527, 16656, 16655], [16527, 16528, 16657], [16527, 16657, 16656], [16528, 16529, 16657], [16529, 16658, 16657], [16529, 16530, 16659], [16529, 16659, 16658], [16530, 16531, 16659], [16531, 16660, 16659], [16531, 16532, 16661], [16531, 16661, 16660], [16532, 16533, 16661], [16533, 16662, 16661], [16533, 16534, 16663], [16533, 16663, 16662], [16534, 16535, 16663], [16535, 16664, 16663], [16535, 16536, 16665], [16535, 16665, 16664], [16536, 16537, 16665], [16537, 16666, 16665], [16537, 16538, 16667], [16537, 16667, 16666], [16538, 16539, 16667], [16539, 16668, 16667], [16539, 16540, 16669], [16539, 16669, 16668], [16540, 16541, 16669], [16541, 16670, 16669], [16541, 16542, 16671], [16541, 16671, 16670], [16542, 16543, 16671], [16543, 16672, 16671], [16543, 16544, 16673], [16543, 16673, 16672], [16544, 16545, 16673], [16545, 16674, 16673], [16545, 16546, 16675], [16545, 16675, 16674], [16546, 16547, 16675], [16547, 16676, 16675], [16547, 16548, 16677], [16547, 16677, 16676], [16548, 16549, 16677], [16549, 16678, 16677], [16549, 16550, 16679], [16549, 16679, 16678], [16550, 16551, 16679], [16551, 16680, 16679], [16551, 16552, 16681], [16551, 16681, 16680], [16552, 16553, 16681], [16553, 16682, 16681], [16553, 16554, 16683], [16553, 16683, 16682], [16554, 16555, 16683], [16555, 16684, 16683], [16555, 16556, 16685], [16555, 16685, 16684], [16556, 16557, 16685], [16557, 16686, 16685], [16558, 16559, 16687], [16559, 16688, 16687], [16559, 16560, 16689], [16559, 16689, 16688], [16560, 16561, 16689], [16561, 16690, 16689], [16561, 16562, 16691], [16561, 16691, 16690], [16562, 16563, 16691], [16563, 16692, 16691], [16563, 16564, 16693], [16563, 16693, 16692], [16564, 16565, 16693], [16565, 16694, 16693], [16565, 16566, 16695], [16565, 16695, 16694], [16566, 16567, 16695], [16567, 16696, 16695], [16567, 16568, 16697], [16567, 16697, 16696], [16568, 16569, 16697], [16569, 16698, 16697], [16569, 16570, 16699], [16569, 16699, 16698], [16570, 16571, 16699], [16571, 16700, 16699], [16571, 16572, 16701], [16571, 16701, 16700], [16572, 16573, 16701], [16573, 16702, 16701], [16573, 16574, 16703], [16573, 16703, 16702], [16574, 16575, 16703], [16575, 16704, 16703], [16575, 16576, 16705], [16575, 16705, 16704], [16576, 16577, 16705], [16577, 16706, 16705], [16577, 16578, 16707], [16577, 16707, 16706], [16578, 16579, 16707], [16579, 16708, 16707], [16579, 16580, 16709], [16579, 16709, 16708], [16580, 16581, 16709], [16581, 16710, 16709], [16581, 16582, 16711], [16581, 16711, 16710], [16582, 16583, 16711], [16583, 16712, 16711], [16583, 16584, 16713], [16583, 16713, 16712], [16584, 16585, 16713], [16585, 16714, 16713], [16585, 16586, 16715], [16585, 16715, 16714], [16586, 16587, 16715], [16587, 16716, 16715], [16587, 16588, 16717], [16587, 16717, 16716], [16588, 16589, 16717], [16589, 16718, 16717], [16589, 16590, 16719], [16589, 16719, 16718], [16590, 16591, 16719], [16591, 16720, 16719], [16591, 16592, 16721], [16591, 16721, 16720], [16592, 16593, 16721], [16593, 16722, 16721], [16593, 16594, 16723], [16593, 16723, 16722], [16594, 16595, 16723], [16595, 16724, 16723], [16595, 16596, 16725], [16595, 16725, 16724], [16596, 16597, 16725], [16597, 16726, 16725], [16597, 16598, 16727], [16597, 16727, 16726], [16598, 16599, 16727], [16599, 16728, 16727], [16599, 16600, 16729], [16599, 16729, 16728], [16600, 16601, 16729], [16601, 16730, 16729], [16601, 16602, 16731], [16601, 16731, 16730], [16602, 16603, 16731], [16603, 16732, 16731], [16603, 16604, 16733], [16603, 16733, 16732], [16604, 16605, 16733], [16605, 16734, 16733], [16605, 16606, 16735], [16605, 16735, 16734], [16606, 16607, 16735], [16607, 16736, 16735], [16607, 16608, 16737], [16607, 16737, 16736], [16608, 16609, 16737], [16609, 16738, 16737], [16609, 16610, 16739], [16609, 16739, 16738], [16610, 16611, 16739], [16611, 16740, 16739], [16611, 16612, 16741], [16611, 16741, 16740], [16612, 16613, 16741], [16613, 16742, 16741], [16613, 16614, 16743], [16613, 16743, 16742], [16614, 16615, 16743], [16615, 16744, 16743], [16615, 16616, 16745], [16615, 16745, 16744], [16616, 16617, 16745], [16617, 16746, 16745], [16617, 16618, 16747], [16617, 16747, 16746], [16618, 16619, 16747], [16619, 16748, 16747], [16619, 16620, 16749], [16619, 16749, 16748], [16620, 16621, 16749], [16621, 16750, 16749], [16621, 16622, 16751], [16621, 16751, 16750], [16622, 16623, 16751], [16623, 16752, 16751], [16623, 16624, 16753], [16623, 16753, 16752], [16624, 16625, 16753], [16625, 16754, 16753], [16625, 16626, 16755], [16625, 16755, 16754], [16626, 16627, 16755], [16627, 16756, 16755], [16627, 16628, 16757], [16627, 16757, 16756], [16628, 16629, 16757], [16629, 16758, 16757], [16629, 16630, 16759], [16629, 16759, 16758], [16630, 16631, 16759], [16631, 16760, 16759], [16631, 16632, 16761], [16631, 16761, 16760], [16632, 16633, 16761], [16633, 16762, 16761], [16633, 16634, 16763], [16633, 16763, 16762], [16634, 16635, 16763], [16635, 16764, 16763], [16635, 16636, 16765], [16635, 16765, 16764], [16636, 16637, 16765], [16637, 16766, 16765], [16637, 16638, 16767], [16637, 16767, 16766], [16638, 16639, 16767], [16639, 16768, 16767], [16639, 16640, 16769], [16639, 16769, 16768], [16640, 16641, 16769], [16641, 16770, 16769], [16641, 16642, 16771], [16641, 16771, 16770], [16642, 16643, 16771], [16643, 16772, 16771], [16643, 16644, 16773], [16643, 16773, 16772], [16644, 16645, 16773], [16645, 16774, 16773], [16645, 16646, 16775], [16645, 16775, 16774], [16646, 16647, 16775], [16647, 16776, 16775], [16647, 16648, 16777], [16647, 16777, 16776], [16648, 16649, 16777], [16649, 16778, 16777], [16649, 16650, 16779], [16649, 16779, 16778], [16650, 16651, 16779], [16651, 16780, 16779], [16651, 16652, 16781], [16651, 16781, 16780], [16652, 16653, 16781], [16653, 16782, 16781], [16653, 16654, 16783], [16653, 16783, 16782], [16654, 16655, 16783], [16655, 16784, 16783], [16655, 16656, 16785], [16655, 16785, 16784], [16656, 16657, 16785], [16657, 16786, 16785], [16657, 16658, 16787], [16657, 16787, 16786], [16658, 16659, 16787], [16659, 16788, 16787], [16659, 16660, 16789], [16659, 16789, 16788], [16660, 16661, 16789], [16661, 16790, 16789], [16661, 16662, 16791], [16661, 16791, 16790], [16662, 16663, 16791], [16663, 16792, 16791], [16663, 16664, 16793], [16663, 16793, 16792], [16664, 16665, 16793], [16665, 16794, 16793], [16665, 16666, 16795], [16665, 16795, 16794], [16666, 16667, 16795], [16667, 16796, 16795], [16667, 16668, 16797], [16667, 16797, 16796], [16668, 16669, 16797], [16669, 16798, 16797], [16669, 16670, 16799], [16669, 16799, 16798], [16670, 16671, 16799], [16671, 16800, 16799], [16671, 16672, 16801], [16671, 16801, 16800], [16672, 16673, 16801], [16673, 16802, 16801], [16673, 16674, 16803], [16673, 16803, 16802], [16674, 16675, 16803], [16675, 16804, 16803], [16675, 16676, 16805], [16675, 16805, 16804], [16676, 16677, 16805], [16677, 16806, 16805], [16677, 16678, 16807], [16677, 16807, 16806], [16678, 16679, 16807], [16679, 16808, 16807], [16679, 16680, 16809], [16679, 16809, 16808], [16680, 16681, 16809], [16681, 16810, 16809], [16681, 16682, 16811], [16681, 16811, 16810], [16682, 16683, 16811], [16683, 16812, 16811], [16683, 16684, 16813], [16683, 16813, 16812], [16684, 16685, 16813], [16685, 16814, 16813], [16685, 16686, 16815], [16685, 16815, 16814], [16687, 16688, 16817], [16687, 16817, 16816], [16688, 16689, 16817], [16689, 16818, 16817], [16689, 16690, 16819], [16689, 16819, 16818], [16690, 16691, 16819], [16691, 16820, 16819], [16691, 16692, 16821], [16691, 16821, 16820], [16692, 16693, 16821], [16693, 16822, 16821], [16693, 16694, 16823], [16693, 16823, 16822], [16694, 16695, 16823], [16695, 16824, 16823], [16695, 16696, 16825], [16695, 16825, 16824], [16696, 16697, 16825], [16697, 16826, 16825], [16697, 16698, 16827], [16697, 16827, 16826], [16698, 16699, 16827], [16699, 16828, 16827], [16699, 16700, 16829], [16699, 16829, 16828], [16700, 16701, 16829], [16701, 16830, 16829], [16701, 16702, 16831], [16701, 16831, 16830], [16702, 16703, 16831], [16703, 16832, 16831], [16703, 16704, 16833], [16703, 16833, 16832], [16704, 16705, 16833], [16705, 16834, 16833], [16705, 16706, 16835], [16705, 16835, 16834], [16706, 16707, 16835], [16707, 16836, 16835], [16707, 16708, 16837], [16707, 16837, 16836], [16708, 16709, 16837], [16709, 16838, 16837], [16709, 16710, 16839], [16709, 16839, 16838], [16710, 16711, 16839], [16711, 16840, 16839], [16711, 16712, 16841], [16711, 16841, 16840], [16712, 16713, 16841], [16713, 16842, 16841], [16713, 16714, 16843], [16713, 16843, 16842], [16714, 16715, 16843], [16715, 16844, 16843], [16715, 16716, 16845], [16715, 16845, 16844], [16716, 16717, 16845], [16717, 16846, 16845], [16717, 16718, 16847], [16717, 16847, 16846], [16718, 16719, 16847], [16719, 16848, 16847], [16719, 16720, 16849], [16719, 16849, 16848], [16720, 16721, 16849], [16721, 16850, 16849], [16721, 16722, 16851], [16721, 16851, 16850], [16722, 16723, 16851], [16723, 16852, 16851], [16723, 16724, 16853], [16723, 16853, 16852], [16724, 16725, 16853], [16725, 16854, 16853], [16725, 16726, 16855], [16725, 16855, 16854], [16726, 16727, 16855], [16727, 16856, 16855], [16727, 16728, 16857], [16727, 16857, 16856], [16728, 16729, 16857], [16729, 16858, 16857], [16729, 16730, 16859], [16729, 16859, 16858], [16730, 16731, 16859], [16731, 16860, 16859], [16731, 16732, 16861], [16731, 16861, 16860], [16732, 16733, 16861], [16733, 16862, 16861], [16733, 16734, 16863], [16733, 16863, 16862], [16734, 16735, 16863], [16735, 16864, 16863], [16735, 16736, 16865], [16735, 16865, 16864], [16736, 16737, 16865], [16737, 16866, 16865], [16737, 16738, 16867], [16737, 16867, 16866], [16738, 16739, 16867], [16739, 16868, 16867], [16739, 16740, 16869], [16739, 16869, 16868], [16740, 16741, 16869], [16741, 16870, 16869], [16741, 16742, 16871], [16741, 16871, 16870], [16742, 16743, 16871], [16743, 16872, 16871], [16743, 16744, 16873], [16743, 16873, 16872], [16744, 16745, 16873], [16745, 16874, 16873], [16745, 16746, 16875], [16745, 16875, 16874], [16746, 16747, 16875], [16747, 16876, 16875], [16747, 16748, 16877], [16747, 16877, 16876], [16748, 16749, 16877], [16749, 16878, 16877], [16749, 16750, 16879], [16749, 16879, 16878], [16750, 16751, 16879], [16751, 16880, 16879], [16751, 16752, 16881], [16751, 16881, 16880], [16752, 16753, 16881], [16753, 16882, 16881], [16753, 16754, 16883], [16753, 16883, 16882], [16754, 16755, 16883], [16755, 16884, 16883], [16755, 16756, 16885], [16755, 16885, 16884], [16756, 16757, 16885], [16757, 16886, 16885], [16757, 16758, 16887], [16757, 16887, 16886], [16758, 16759, 16887], [16759, 16888, 16887], [16759, 16760, 16889], [16759, 16889, 16888], [16760, 16761, 16889], [16761, 16890, 16889], [16761, 16762, 16891], [16761, 16891, 16890], [16762, 16763, 16891], [16763, 16892, 16891], [16763, 16764, 16893], [16763, 16893, 16892], [16764, 16765, 16893], [16765, 16894, 16893], [16765, 16766, 16895], [16765, 16895, 16894], [16766, 16767, 16895], [16767, 16896, 16895], [16767, 16768, 16897], [16767, 16897, 16896], [16768, 16769, 16897], [16769, 16898, 16897], [16769, 16770, 16899], [16769, 16899, 16898], [16770, 16771, 16899], [16771, 16900, 16899], [16771, 16772, 16901], [16771, 16901, 16900], [16772, 16773, 16901], [16773, 16902, 16901], [16773, 16774, 16903], [16773, 16903, 16902], [16774, 16775, 16903], [16775, 16904, 16903], [16775, 16776, 16905], [16775, 16905, 16904], [16776, 16777, 16905], [16777, 16906, 16905], [16777, 16778, 16907], [16777, 16907, 16906], [16778, 16779, 16907], [16779, 16908, 16907], [16779, 16780, 16909], [16779, 16909, 16908], [16780, 16781, 16909], [16781, 16910, 16909], [16781, 16782, 16911], [16781, 16911, 16910], [16782, 16783, 16911], [16783, 16912, 16911], [16783, 16784, 16913], [16783, 16913, 16912], [16784, 16785, 16913], [16785, 16914, 16913], [16785, 16786, 16915], [16785, 16915, 16914], [16786, 16787, 16915], [16787, 16916, 16915], [16787, 16788, 16917], [16787, 16917, 16916], [16788, 16789, 16917], [16789, 16918, 16917], [16789, 16790, 16919], [16789, 16919, 16918], [16790, 16791, 16919], [16791, 16920, 16919], [16791, 16792, 16921], [16791, 16921, 16920], [16792, 16793, 16921], [16793, 16922, 16921], [16793, 16794, 16923], [16793, 16923, 16922], [16794, 16795, 16923], [16795, 16924, 16923], [16795, 16796, 16925], [16795, 16925, 16924], [16796, 16797, 16925], [16797, 16926, 16925], [16797, 16798, 16927], [16797, 16927, 16926], [16798, 16799, 16927], [16799, 16928, 16927], [16799, 16800, 16929], [16799, 16929, 16928], [16800, 16801, 16929], [16801, 16930, 16929], [16801, 16802, 16931], [16801, 16931, 16930], [16802, 16803, 16931], [16803, 16932, 16931], [16803, 16804, 16933], [16803, 16933, 16932], [16804, 16805, 16933], [16805, 16934, 16933], [16805, 16806, 16935], [16805, 16935, 16934], [16806, 16807, 16935], [16807, 16936, 16935], [16807, 16808, 16937], [16807, 16937, 16936], [16808, 16809, 16937], [16809, 16938, 16937], [16809, 16810, 16939], [16809, 16939, 16938], [16810, 16811, 16939], [16811, 16940, 16939], [16811, 16812, 16941], [16811, 16941, 16940], [16812, 16813, 16941], [16813, 16942, 16941], [16813, 16814, 16943], [16813, 16943, 16942], [16814, 16815, 16943], [16815, 16944, 16943], [16816, 16817, 16945], [16817, 16946, 16945], [16817, 16818, 16947], [16817, 16947, 16946], [16818, 16819, 16947], [16819, 16948, 16947], [16819, 16820, 16949], [16819, 16949, 16948], [16820, 16821, 16949], [16821, 16950, 16949], [16821, 16822, 16951], [16821, 16951, 16950], [16822, 16823, 16951], [16823, 16952, 16951], [16823, 16824, 16953], [16823, 16953, 16952], [16824, 16825, 16953], [16825, 16954, 16953], [16825, 16826, 16955], [16825, 16955, 16954], [16826, 16827, 16955], [16827, 16956, 16955], [16827, 16828, 16957], [16827, 16957, 16956], [16828, 16829, 16957], [16829, 16958, 16957], [16829, 16830, 16959], [16829, 16959, 16958], [16830, 16831, 16959], [16831, 16960, 16959], [16831, 16832, 16961], [16831, 16961, 16960], [16832, 16833, 16961], [16833, 16962, 16961], [16833, 16834, 16963], [16833, 16963, 16962], [16834, 16835, 16963], [16835, 16964, 16963], [16835, 16836, 16965], [16835, 16965, 16964], [16836, 16837, 16965], [16837, 16966, 16965], [16837, 16838, 16967], [16837, 16967, 16966], [16838, 16839, 16967], [16839, 16968, 16967], [16839, 16840, 16969], [16839, 16969, 16968], [16840, 16841, 16969], [16841, 16970, 16969], [16841, 16842, 16971], [16841, 16971, 16970], [16842, 16843, 16971], [16843, 16972, 16971], [16843, 16844, 16973], [16843, 16973, 16972], [16844, 16845, 16973], [16845, 16974, 16973], [16845, 16846, 16975], [16845, 16975, 16974], [16846, 16847, 16975], [16847, 16976, 16975], [16847, 16848, 16977], [16847, 16977, 16976], [16848, 16849, 16977], [16849, 16978, 16977], [16849, 16850, 16979], [16849, 16979, 16978], [16850, 16851, 16979], [16851, 16980, 16979], [16851, 16852, 16981], [16851, 16981, 16980], [16852, 16853, 16981], [16853, 16982, 16981], [16853, 16854, 16983], [16853, 16983, 16982], [16854, 16855, 16983], [16855, 16984, 16983], [16855, 16856, 16985], [16855, 16985, 16984], [16856, 16857, 16985], [16857, 16986, 16985], [16857, 16858, 16987], [16857, 16987, 16986], [16858, 16859, 16987], [16859, 16988, 16987], [16859, 16860, 16989], [16859, 16989, 16988], [16860, 16861, 16989], [16861, 16990, 16989], [16861, 16862, 16991], [16861, 16991, 16990], [16862, 16863, 16991], [16863, 16992, 16991], [16863, 16864, 16993], [16863, 16993, 16992], [16864, 16865, 16993], [16865, 16994, 16993], [16865, 16866, 16995], [16865, 16995, 16994], [16866, 16867, 16995], [16867, 16996, 16995], [16867, 16868, 16997], [16867, 16997, 16996], [16868, 16869, 16997], [16869, 16998, 16997], [16869, 16870, 16999], [16869, 16999, 16998], [16870, 16871, 16999], [16871, 17000, 16999], [16871, 16872, 17001], [16871, 17001, 17000], [16872, 16873, 17001], [16873, 17002, 17001], [16873, 16874, 17003], [16873, 17003, 17002], [16874, 16875, 17003], [16875, 17004, 17003], [16875, 16876, 17005], [16875, 17005, 17004], [16876, 16877, 17005], [16877, 17006, 17005], [16877, 16878, 17007], [16877, 17007, 17006], [16878, 16879, 17007], [16879, 17008, 17007], [16879, 16880, 17009], [16879, 17009, 17008], [16880, 16881, 17009], [16881, 17010, 17009], [16881, 16882, 17011], [16881, 17011, 17010], [16882, 16883, 17011], [16883, 17012, 17011], [16883, 16884, 17013], [16883, 17013, 17012], [16884, 16885, 17013], [16885, 17014, 17013], [16885, 16886, 17015], [16885, 17015, 17014], [16886, 16887, 17015], [16887, 17016, 17015], [16887, 16888, 17017], [16887, 17017, 17016], [16888, 16889, 17017], [16889, 17018, 17017], [16889, 16890, 17019], [16889, 17019, 17018], [16890, 16891, 17019], [16891, 17020, 17019], [16891, 16892, 17021], [16891, 17021, 17020], [16892, 16893, 17021], [16893, 17022, 17021], [16893, 16894, 17023], [16893, 17023, 17022], [16894, 16895, 17023], [16895, 17024, 17023], [16895, 16896, 17025], [16895, 17025, 17024], [16896, 16897, 17025], [16897, 17026, 17025], [16897, 16898, 17027], [16897, 17027, 17026], [16898, 16899, 17027], [16899, 17028, 17027], [16899, 16900, 17029], [16899, 17029, 17028], [16900, 16901, 17029], [16901, 17030, 17029], [16901, 16902, 17031], [16901, 17031, 17030], [16902, 16903, 17031], [16903, 17032, 17031], [16903, 16904, 17033], [16903, 17033, 17032], [16904, 16905, 17033], [16905, 17034, 17033], [16905, 16906, 17035], [16905, 17035, 17034], [16906, 16907, 17035], [16907, 17036, 17035], [16907, 16908, 17037], [16907, 17037, 17036], [16908, 16909, 17037], [16909, 17038, 17037], [16909, 16910, 17039], [16909, 17039, 17038], [16910, 16911, 17039], [16911, 17040, 17039], [16911, 16912, 17041], [16911, 17041, 17040], [16912, 16913, 17041], [16913, 17042, 17041], [16913, 16914, 17043], [16913, 17043, 17042], [16914, 16915, 17043], [16915, 17044, 17043], [16915, 16916, 17045], [16915, 17045, 17044], [16916, 16917, 17045], [16917, 17046, 17045], [16917, 16918, 17047], [16917, 17047, 17046], [16918, 16919, 17047], [16919, 17048, 17047], [16919, 16920, 17049], [16919, 17049, 17048], [16920, 16921, 17049], [16921, 17050, 17049], [16921, 16922, 17051], [16921, 17051, 17050], [16922, 16923, 17051], [16923, 17052, 17051], [16923, 16924, 17053], [16923, 17053, 17052], [16924, 16925, 17053], [16925, 17054, 17053], [16925, 16926, 17055], [16925, 17055, 17054], [16926, 16927, 17055], [16927, 17056, 17055], [16927, 16928, 17057], [16927, 17057, 17056], [16928, 16929, 17057], [16929, 17058, 17057], [16929, 16930, 17059], [16929, 17059, 17058], [16930, 16931, 17059], [16931, 17060, 17059], [16931, 16932, 17061], [16931, 17061, 17060], [16932, 16933, 17061], [16933, 17062, 17061], [16933, 16934, 17063], [16933, 17063, 17062], [16934, 16935, 17063], [16935, 17064, 17063], [16935, 16936, 17065], [16935, 17065, 17064], [16936, 16937, 17065], [16937, 17066, 17065], [16937, 16938, 17067], [16937, 17067, 17066], [16938, 16939, 17067], [16939, 17068, 17067], [16939, 16940, 17069], [16939, 17069, 17068], [16940, 16941, 17069], [16941, 17070, 17069], [16941, 16942, 17071], [16941, 17071, 17070], [16942, 16943, 17071], [16943, 17072, 17071], [16943, 16944, 17073], [16943, 17073, 17072], [16945, 16946, 17075], [16945, 17075, 17074], [16946, 16947, 17075], [16947, 17076, 17075], [16947, 16948, 17077], [16947, 17077, 17076], [16948, 16949, 17077], [16949, 17078, 17077], [16949, 16950, 17079], [16949, 17079, 17078], [16950, 16951, 17079], [16951, 17080, 17079], [16951, 16952, 17081], [16951, 17081, 17080], [16952, 16953, 17081], [16953, 17082, 17081], [16953, 16954, 17083], [16953, 17083, 17082], [16954, 16955, 17083], [16955, 17084, 17083], [16955, 16956, 17085], [16955, 17085, 17084], [16956, 16957, 17085], [16957, 17086, 17085], [16957, 16958, 17087], [16957, 17087, 17086], [16958, 16959, 17087], [16959, 17088, 17087], [16959, 16960, 17089], [16959, 17089, 17088], [16960, 16961, 17089], [16961, 17090, 17089], [16961, 16962, 17091], [16961, 17091, 17090], [16962, 16963, 17091], [16963, 17092, 17091], [16963, 16964, 17093], [16963, 17093, 17092], [16964, 16965, 17093], [16965, 17094, 17093], [16965, 16966, 17095], [16965, 17095, 17094], [16966, 16967, 17095], [16967, 17096, 17095], [16967, 16968, 17097], [16967, 17097, 17096], [16968, 16969, 17097], [16969, 17098, 17097], [16969, 16970, 17099], [16969, 17099, 17098], [16970, 16971, 17099], [16971, 17100, 17099], [16971, 16972, 17101], [16971, 17101, 17100], [16972, 16973, 17101], [16973, 17102, 17101], [16973, 16974, 17103], [16973, 17103, 17102], [16974, 16975, 17103], [16975, 17104, 17103], [16975, 16976, 17105], [16975, 17105, 17104], [16976, 16977, 17105], [16977, 17106, 17105], [16977, 16978, 17107], [16977, 17107, 17106], [16978, 16979, 17107], [16979, 17108, 17107], [16979, 16980, 17109], [16979, 17109, 17108], [16980, 16981, 17109], [16981, 17110, 17109], [16981, 16982, 17111], [16981, 17111, 17110], [16982, 16983, 17111], [16983, 17112, 17111], [16983, 16984, 17113], [16983, 17113, 17112], [16984, 16985, 17113], [16985, 17114, 17113], [16985, 16986, 17115], [16985, 17115, 17114], [16986, 16987, 17115], [16987, 17116, 17115], [16987, 16988, 17117], [16987, 17117, 17116], [16988, 16989, 17117], [16989, 17118, 17117], [16989, 16990, 17119], [16989, 17119, 17118], [16990, 16991, 17119], [16991, 17120, 17119], [16991, 16992, 17121], [16991, 17121, 17120], [16992, 16993, 17121], [16993, 17122, 17121], [16993, 16994, 17123], [16993, 17123, 17122], [16994, 16995, 17123], [16995, 17124, 17123], [16995, 16996, 17125], [16995, 17125, 17124], [16996, 16997, 17125], [16997, 17126, 17125], [16997, 16998, 17127], [16997, 17127, 17126], [16998, 16999, 17127], [16999, 17128, 17127], [16999, 17000, 17129], [16999, 17129, 17128], [17000, 17001, 17129], [17001, 17130, 17129], [17001, 17002, 17131], [17001, 17131, 17130], [17002, 17003, 17131], [17003, 17132, 17131], [17003, 17004, 17133], [17003, 17133, 17132], [17004, 17005, 17133], [17005, 17134, 17133], [17005, 17006, 17135], [17005, 17135, 17134], [17006, 17007, 17135], [17007, 17136, 17135], [17007, 17008, 17137], [17007, 17137, 17136], [17008, 17009, 17137], [17009, 17138, 17137], [17009, 17010, 17139], [17009, 17139, 17138], [17010, 17011, 17139], [17011, 17140, 17139], [17011, 17012, 17141], [17011, 17141, 17140], [17012, 17013, 17141], [17013, 17142, 17141], [17013, 17014, 17143], [17013, 17143, 17142], [17014, 17015, 17143], [17015, 17144, 17143], [17015, 17016, 17145], [17015, 17145, 17144], [17016, 17017, 17145], [17017, 17146, 17145], [17017, 17018, 17147], [17017, 17147, 17146], [17018, 17019, 17147], [17019, 17148, 17147], [17019, 17020, 17149], [17019, 17149, 17148], [17020, 17021, 17149], [17021, 17150, 17149], [17021, 17022, 17151], [17021, 17151, 17150], [17022, 17023, 17151], [17023, 17152, 17151], [17023, 17024, 17153], [17023, 17153, 17152], [17024, 17025, 17153], [17025, 17154, 17153], [17025, 17026, 17155], [17025, 17155, 17154], [17026, 17027, 17155], [17027, 17156, 17155], [17027, 17028, 17157], [17027, 17157, 17156], [17028, 17029, 17157], [17029, 17158, 17157], [17029, 17030, 17159], [17029, 17159, 17158], [17030, 17031, 17159], [17031, 17160, 17159], [17031, 17032, 17161], [17031, 17161, 17160], [17032, 17033, 17161], [17033, 17162, 17161], [17033, 17034, 17163], [17033, 17163, 17162], [17034, 17035, 17163], [17035, 17164, 17163], [17035, 17036, 17165], [17035, 17165, 17164], [17036, 17037, 17165], [17037, 17166, 17165], [17037, 17038, 17167], [17037, 17167, 17166], [17038, 17039, 17167], [17039, 17168, 17167], [17039, 17040, 17169], [17039, 17169, 17168], [17040, 17041, 17169], [17041, 17170, 17169], [17041, 17042, 17171], [17041, 17171, 17170], [17042, 17043, 17171], [17043, 17172, 17171], [17043, 17044, 17173], [17043, 17173, 17172], [17044, 17045, 17173], [17045, 17174, 17173], [17045, 17046, 17175], [17045, 17175, 17174], [17046, 17047, 17175], [17047, 17176, 17175], [17047, 17048, 17177], [17047, 17177, 17176], [17048, 17049, 17177], [17049, 17178, 17177], [17049, 17050, 17179], [17049, 17179, 17178], [17050, 17051, 17179], [17051, 17180, 17179], [17051, 17052, 17181], [17051, 17181, 17180], [17052, 17053, 17181], [17053, 17182, 17181], [17053, 17054, 17183], [17053, 17183, 17182], [17054, 17055, 17183], [17055, 17184, 17183], [17055, 17056, 17185], [17055, 17185, 17184], [17056, 17057, 17185], [17057, 17186, 17185], [17057, 17058, 17187], [17057, 17187, 17186], [17058, 17059, 17187], [17059, 17188, 17187], [17059, 17060, 17189], [17059, 17189, 17188], [17060, 17061, 17189], [17061, 17190, 17189], [17061, 17062, 17191], [17061, 17191, 17190], [17062, 17063, 17191], [17063, 17192, 17191], [17063, 17064, 17193], [17063, 17193, 17192], [17064, 17065, 17193], [17065, 17194, 17193], [17065, 17066, 17195], [17065, 17195, 17194], [17066, 17067, 17195], [17067, 17196, 17195], [17067, 17068, 17197], [17067, 17197, 17196], [17068, 17069, 17197], [17069, 17198, 17197], [17069, 17070, 17199], [17069, 17199, 17198], [17070, 17071, 17199], [17071, 17200, 17199], [17071, 17072, 17201], [17071, 17201, 17200], [17072, 17073, 17201], [17073, 17202, 17201], [17074, 17075, 17203], [17075, 17204, 17203], [17075, 17076, 17205], [17075, 17205, 17204], [17076, 17077, 17205], [17077, 17206, 17205], [17077, 17078, 17207], [17077, 17207, 17206], [17078, 17079, 17207], [17079, 17208, 17207], [17079, 17080, 17209], [17079, 17209, 17208], [17080, 17081, 17209], [17081, 17210, 17209], [17081, 17082, 17211], [17081, 17211, 17210], [17082, 17083, 17211], [17083, 17212, 17211], [17083, 17084, 17213], [17083, 17213, 17212], [17084, 17085, 17213], [17085, 17214, 17213], [17085, 17086, 17215], [17085, 17215, 17214], [17086, 17087, 17215], [17087, 17216, 17215], [17087, 17088, 17217], [17087, 17217, 17216], [17088, 17089, 17217], [17089, 17218, 17217], [17089, 17090, 17219], [17089, 17219, 17218], [17090, 17091, 17219], [17091, 17220, 17219], [17091, 17092, 17221], [17091, 17221, 17220], [17092, 17093, 17221], [17093, 17222, 17221], [17093, 17094, 17223], [17093, 17223, 17222], [17094, 17095, 17223], [17095, 17224, 17223], [17095, 17096, 17225], [17095, 17225, 17224], [17096, 17097, 17225], [17097, 17226, 17225], [17097, 17098, 17227], [17097, 17227, 17226], [17098, 17099, 17227], [17099, 17228, 17227], [17099, 17100, 17229], [17099, 17229, 17228], [17100, 17101, 17229], [17101, 17230, 17229], [17101, 17102, 17231], [17101, 17231, 17230], [17102, 17103, 17231], [17103, 17232, 17231], [17103, 17104, 17233], [17103, 17233, 17232], [17104, 17105, 17233], [17105, 17234, 17233], [17105, 17106, 17235], [17105, 17235, 17234], [17106, 17107, 17235], [17107, 17236, 17235], [17107, 17108, 17237], [17107, 17237, 17236], [17108, 17109, 17237], [17109, 17238, 17237], [17109, 17110, 17239], [17109, 17239, 17238], [17110, 17111, 17239], [17111, 17240, 17239], [17111, 17112, 17241], [17111, 17241, 17240], [17112, 17113, 17241], [17113, 17242, 17241], [17113, 17114, 17243], [17113, 17243, 17242], [17114, 17115, 17243], [17115, 17244, 17243], [17115, 17116, 17245], [17115, 17245, 17244], [17116, 17117, 17245], [17117, 17246, 17245], [17117, 17118, 17247], [17117, 17247, 17246], [17118, 17119, 17247], [17119, 17248, 17247], [17119, 17120, 17249], [17119, 17249, 17248], [17120, 17121, 17249], [17121, 17250, 17249], [17121, 17122, 17251], [17121, 17251, 17250], [17122, 17123, 17251], [17123, 17252, 17251], [17123, 17124, 17253], [17123, 17253, 17252], [17124, 17125, 17253], [17125, 17254, 17253], [17125, 17126, 17255], [17125, 17255, 17254], [17126, 17127, 17255], [17127, 17256, 17255], [17127, 17128, 17257], [17127, 17257, 17256], [17128, 17129, 17257], [17129, 17258, 17257], [17129, 17130, 17259], [17129, 17259, 17258], [17130, 17131, 17259], [17131, 17260, 17259], [17131, 17132, 17261], [17131, 17261, 17260], [17132, 17133, 17261], [17133, 17262, 17261], [17133, 17134, 17263], [17133, 17263, 17262], [17134, 17135, 17263], [17135, 17264, 17263], [17135, 17136, 17265], [17135, 17265, 17264], [17136, 17137, 17265], [17137, 17266, 17265], [17137, 17138, 17267], [17137, 17267, 17266], [17138, 17139, 17267], [17139, 17268, 17267], [17139, 17140, 17269], [17139, 17269, 17268], [17140, 17141, 17269], [17141, 17270, 17269], [17141, 17142, 17271], [17141, 17271, 17270], [17142, 17143, 17271], [17143, 17272, 17271], [17143, 17144, 17273], [17143, 17273, 17272], [17144, 17145, 17273], [17145, 17274, 17273], [17145, 17146, 17275], [17145, 17275, 17274], [17146, 17147, 17275], [17147, 17276, 17275], [17147, 17148, 17277], [17147, 17277, 17276], [17148, 17149, 17277], [17149, 17278, 17277], [17149, 17150, 17279], [17149, 17279, 17278], [17150, 17151, 17279], [17151, 17280, 17279], [17151, 17152, 17281], [17151, 17281, 17280], [17152, 17153, 17281], [17153, 17282, 17281], [17153, 17154, 17283], [17153, 17283, 17282], [17154, 17155, 17283], [17155, 17284, 17283], [17155, 17156, 17285], [17155, 17285, 17284], [17156, 17157, 17285], [17157, 17286, 17285], [17157, 17158, 17287], [17157, 17287, 17286], [17158, 17159, 17287], [17159, 17288, 17287], [17159, 17160, 17289], [17159, 17289, 17288], [17160, 17161, 17289], [17161, 17290, 17289], [17161, 17162, 17291], [17161, 17291, 17290], [17162, 17163, 17291], [17163, 17292, 17291], [17163, 17164, 17293], [17163, 17293, 17292], [17164, 17165, 17293], [17165, 17294, 17293], [17165, 17166, 17295], [17165, 17295, 17294], [17166, 17167, 17295], [17167, 17296, 17295], [17167, 17168, 17297], [17167, 17297, 17296], [17168, 17169, 17297], [17169, 17298, 17297], [17169, 17170, 17299], [17169, 17299, 17298], [17170, 17171, 17299], [17171, 17300, 17299], [17171, 17172, 17301], [17171, 17301, 17300], [17172, 17173, 17301], [17173, 17302, 17301], [17173, 17174, 17303], [17173, 17303, 17302], [17174, 17175, 17303], [17175, 17304, 17303], [17175, 17176, 17305], [17175, 17305, 17304], [17176, 17177, 17305], [17177, 17306, 17305], [17177, 17178, 17307], [17177, 17307, 17306], [17178, 17179, 17307], [17179, 17308, 17307], [17179, 17180, 17309], [17179, 17309, 17308], [17180, 17181, 17309], [17181, 17310, 17309], [17181, 17182, 17311], [17181, 17311, 17310], [17182, 17183, 17311], [17183, 17312, 17311], [17183, 17184, 17313], [17183, 17313, 17312], [17184, 17185, 17313], [17185, 17314, 17313], [17185, 17186, 17315], [17185, 17315, 17314], [17186, 17187, 17315], [17187, 17316, 17315], [17187, 17188, 17317], [17187, 17317, 17316], [17188, 17189, 17317], [17189, 17318, 17317], [17189, 17190, 17319], [17189, 17319, 17318], [17190, 17191, 17319], [17191, 17320, 17319], [17191, 17192, 17321], [17191, 17321, 17320], [17192, 17193, 17321], [17193, 17322, 17321], [17193, 17194, 17323], [17193, 17323, 17322], [17194, 17195, 17323], [17195, 17324, 17323], [17195, 17196, 17325], [17195, 17325, 17324], [17196, 17197, 17325], [17197, 17326, 17325], [17197, 17198, 17327], [17197, 17327, 17326], [17198, 17199, 17327], [17199, 17328, 17327], [17199, 17200, 17329], [17199, 17329, 17328], [17200, 17201, 17329], [17201, 17330, 17329], [17201, 17202, 17331], [17201, 17331, 17330], [17203, 17204, 17333], [17203, 17333, 17332], [17204, 17205, 17333], [17205, 17334, 17333], [17205, 17206, 17335], [17205, 17335, 17334], [17206, 17207, 17335], [17207, 17336, 17335], [17207, 17208, 17337], [17207, 17337, 17336], [17208, 17209, 17337], [17209, 17338, 17337], [17209, 17210, 17339], [17209, 17339, 17338], [17210, 17211, 17339], [17211, 17340, 17339], [17211, 17212, 17341], [17211, 17341, 17340], [17212, 17213, 17341], [17213, 17342, 17341], [17213, 17214, 17343], [17213, 17343, 17342], [17214, 17215, 17343], [17215, 17344, 17343], [17215, 17216, 17345], [17215, 17345, 17344], [17216, 17217, 17345], [17217, 17346, 17345], [17217, 17218, 17347], [17217, 17347, 17346], [17218, 17219, 17347], [17219, 17348, 17347], [17219, 17220, 17349], [17219, 17349, 17348], [17220, 17221, 17349], [17221, 17350, 17349], [17221, 17222, 17351], [17221, 17351, 17350], [17222, 17223, 17351], [17223, 17352, 17351], [17223, 17224, 17353], [17223, 17353, 17352], [17224, 17225, 17353], [17225, 17354, 17353], [17225, 17226, 17355], [17225, 17355, 17354], [17226, 17227, 17355], [17227, 17356, 17355], [17227, 17228, 17357], [17227, 17357, 17356], [17228, 17229, 17357], [17229, 17358, 17357], [17229, 17230, 17359], [17229, 17359, 17358], [17230, 17231, 17359], [17231, 17360, 17359], [17231, 17232, 17361], [17231, 17361, 17360], [17232, 17233, 17361], [17233, 17362, 17361], [17233, 17234, 17363], [17233, 17363, 17362], [17234, 17235, 17363], [17235, 17364, 17363], [17235, 17236, 17365], [17235, 17365, 17364], [17236, 17237, 17365], [17237, 17366, 17365], [17237, 17238, 17367], [17237, 17367, 17366], [17238, 17239, 17367], [17239, 17368, 17367], [17239, 17240, 17369], [17239, 17369, 17368], [17240, 17241, 17369], [17241, 17370, 17369], [17241, 17242, 17371], [17241, 17371, 17370], [17242, 17243, 17371], [17243, 17372, 17371], [17243, 17244, 17373], [17243, 17373, 17372], [17244, 17245, 17373], [17245, 17374, 17373], [17245, 17246, 17375], [17245, 17375, 17374], [17246, 17247, 17375], [17247, 17376, 17375], [17247, 17248, 17377], [17247, 17377, 17376], [17248, 17249, 17377], [17249, 17378, 17377], [17249, 17250, 17379], [17249, 17379, 17378], [17250, 17251, 17379], [17251, 17380, 17379], [17251, 17252, 17381], [17251, 17381, 17380], [17252, 17253, 17381], [17253, 17382, 17381], [17253, 17254, 17383], [17253, 17383, 17382], [17254, 17255, 17383], [17255, 17384, 17383], [17255, 17256, 17385], [17255, 17385, 17384], [17256, 17257, 17385], [17257, 17386, 17385], [17257, 17258, 17387], [17257, 17387, 17386], [17258, 17259, 17387], [17259, 17388, 17387], [17259, 17260, 17389], [17259, 17389, 17388], [17260, 17261, 17389], [17261, 17390, 17389], [17261, 17262, 17391], [17261, 17391, 17390], [17262, 17263, 17391], [17263, 17392, 17391], [17263, 17264, 17393], [17263, 17393, 17392], [17264, 17265, 17393], [17265, 17394, 17393], [17265, 17266, 17395], [17265, 17395, 17394], [17266, 17267, 17395], [17267, 17396, 17395], [17267, 17268, 17397], [17267, 17397, 17396], [17268, 17269, 17397], [17269, 17398, 17397], [17269, 17270, 17399], [17269, 17399, 17398], [17270, 17271, 17399], [17271, 17400, 17399], [17271, 17272, 17401], [17271, 17401, 17400], [17272, 17273, 17401], [17273, 17402, 17401], [17273, 17274, 17403], [17273, 17403, 17402], [17274, 17275, 17403], [17275, 17404, 17403], [17275, 17276, 17405], [17275, 17405, 17404], [17276, 17277, 17405], [17277, 17406, 17405], [17277, 17278, 17407], [17277, 17407, 17406], [17278, 17279, 17407], [17279, 17408, 17407], [17279, 17280, 17409], [17279, 17409, 17408], [17280, 17281, 17409], [17281, 17410, 17409], [17281, 17282, 17411], [17281, 17411, 17410], [17282, 17283, 17411], [17283, 17412, 17411], [17283, 17284, 17413], [17283, 17413, 17412], [17284, 17285, 17413], [17285, 17414, 17413], [17285, 17286, 17415], [17285, 17415, 17414], [17286, 17287, 17415], [17287, 17416, 17415], [17287, 17288, 17417], [17287, 17417, 17416], [17288, 17289, 17417], [17289, 17418, 17417], [17289, 17290, 17419], [17289, 17419, 17418], [17290, 17291, 17419], [17291, 17420, 17419], [17291, 17292, 17421], [17291, 17421, 17420], [17292, 17293, 17421], [17293, 17422, 17421], [17293, 17294, 17423], [17293, 17423, 17422], [17294, 17295, 17423], [17295, 17424, 17423], [17295, 17296, 17425], [17295, 17425, 17424], [17296, 17297, 17425], [17297, 17426, 17425], [17297, 17298, 17427], [17297, 17427, 17426], [17298, 17299, 17427], [17299, 17428, 17427], [17299, 17300, 17429], [17299, 17429, 17428], [17300, 17301, 17429], [17301, 17430, 17429], [17301, 17302, 17431], [17301, 17431, 17430], [17302, 17303, 17431], [17303, 17432, 17431], [17303, 17304, 17433], [17303, 17433, 17432], [17304, 17305, 17433], [17305, 17434, 17433], [17305, 17306, 17435], [17305, 17435, 17434], [17306, 17307, 17435], [17307, 17436, 17435], [17307, 17308, 17437], [17307, 17437, 17436], [17308, 17309, 17437], [17309, 17438, 17437], [17309, 17310, 17439], [17309, 17439, 17438], [17310, 17311, 17439], [17311, 17440, 17439], [17311, 17312, 17441], [17311, 17441, 17440], [17312, 17313, 17441], [17313, 17442, 17441], [17313, 17314, 17443], [17313, 17443, 17442], [17314, 17315, 17443], [17315, 17444, 17443], [17315, 17316, 17445], [17315, 17445, 17444], [17316, 17317, 17445], [17317, 17446, 17445], [17317, 17318, 17447], [17317, 17447, 17446], [17318, 17319, 17447], [17319, 17448, 17447], [17319, 17320, 17449], [17319, 17449, 17448], [17320, 17321, 17449], [17321, 17450, 17449], [17321, 17322, 17451], [17321, 17451, 17450], [17322, 17323, 17451], [17323, 17452, 17451], [17323, 17324, 17453], [17323, 17453, 17452], [17324, 17325, 17453], [17325, 17454, 17453], [17325, 17326, 17455], [17325, 17455, 17454], [17326, 17327, 17455], [17327, 17456, 17455], [17327, 17328, 17457], [17327, 17457, 17456], [17328, 17329, 17457], [17329, 17458, 17457], [17329, 17330, 17459], [17329, 17459, 17458], [17330, 17331, 17459], [17331, 17460, 17459], [17332, 17333, 17461], [17333, 17462, 17461], [17333, 17334, 17463], [17333, 17463, 17462], [17334, 17335, 17463], [17335, 17464, 17463], [17335, 17336, 17465], [17335, 17465, 17464], [17336, 17337, 17465], [17337, 17466, 17465], [17337, 17338, 17467], [17337, 17467, 17466], [17338, 17339, 17467], [17339, 17468, 17467], [17339, 17340, 17469], [17339, 17469, 17468], [17340, 17341, 17469], [17341, 17470, 17469], [17341, 17342, 17471], [17341, 17471, 17470], [17342, 17343, 17471], [17343, 17472, 17471], [17343, 17344, 17473], [17343, 17473, 17472], [17344, 17345, 17473], [17345, 17474, 17473], [17345, 17346, 17475], [17345, 17475, 17474], [17346, 17347, 17475], [17347, 17476, 17475], [17347, 17348, 17477], [17347, 17477, 17476], [17348, 17349, 17477], [17349, 17478, 17477], [17349, 17350, 17479], [17349, 17479, 17478], [17350, 17351, 17479], [17351, 17480, 17479], [17351, 17352, 17481], [17351, 17481, 17480], [17352, 17353, 17481], [17353, 17482, 17481], [17353, 17354, 17483], [17353, 17483, 17482], [17354, 17355, 17483], [17355, 17484, 17483], [17355, 17356, 17485], [17355, 17485, 17484], [17356, 17357, 17485], [17357, 17486, 17485], [17357, 17358, 17487], [17357, 17487, 17486], [17358, 17359, 17487], [17359, 17488, 17487], [17359, 17360, 17489], [17359, 17489, 17488], [17360, 17361, 17489], [17361, 17490, 17489], [17361, 17362, 17491], [17361, 17491, 17490], [17362, 17363, 17491], [17363, 17492, 17491], [17363, 17364, 17493], [17363, 17493, 17492], [17364, 17365, 17493], [17365, 17494, 17493], [17365, 17366, 17495], [17365, 17495, 17494], [17366, 17367, 17495], [17367, 17496, 17495], [17367, 17368, 17497], [17367, 17497, 17496], [17368, 17369, 17497], [17369, 17498, 17497], [17369, 17370, 17499], [17369, 17499, 17498], [17370, 17371, 17499], [17371, 17500, 17499], [17371, 17372, 17501], [17371, 17501, 17500], [17372, 17373, 17501], [17373, 17502, 17501], [17373, 17374, 17503], [17373, 17503, 17502], [17374, 17375, 17503], [17375, 17504, 17503], [17375, 17376, 17505], [17375, 17505, 17504], [17376, 17377, 17505], [17377, 17506, 17505], [17377, 17378, 17507], [17377, 17507, 17506], [17378, 17379, 17507], [17379, 17508, 17507], [17379, 17380, 17509], [17379, 17509, 17508], [17380, 17381, 17509], [17381, 17510, 17509], [17381, 17382, 17511], [17381, 17511, 17510], [17382, 17383, 17511], [17383, 17512, 17511], [17383, 17384, 17513], [17383, 17513, 17512], [17384, 17385, 17513], [17385, 17514, 17513], [17385, 17386, 17515], [17385, 17515, 17514], [17386, 17387, 17515], [17387, 17516, 17515], [17387, 17388, 17517], [17387, 17517, 17516], [17388, 17389, 17517], [17389, 17518, 17517], [17389, 17390, 17519], [17389, 17519, 17518], [17390, 17391, 17519], [17391, 17520, 17519], [17391, 17392, 17521], [17391, 17521, 17520], [17392, 17393, 17521], [17393, 17522, 17521], [17393, 17394, 17523], [17393, 17523, 17522], [17394, 17395, 17523], [17395, 17524, 17523], [17395, 17396, 17525], [17395, 17525, 17524], [17396, 17397, 17525], [17397, 17526, 17525], [17397, 17398, 17527], [17397, 17527, 17526], [17398, 17399, 17527], [17399, 17528, 17527], [17399, 17400, 17529], [17399, 17529, 17528], [17400, 17401, 17529], [17401, 17530, 17529], [17401, 17402, 17531], [17401, 17531, 17530], [17402, 17403, 17531], [17403, 17532, 17531], [17403, 17404, 17533], [17403, 17533, 17532], [17404, 17405, 17533], [17405, 17534, 17533], [17405, 17406, 17535], [17405, 17535, 17534], [17406, 17407, 17535], [17407, 17536, 17535], [17407, 17408, 17537], [17407, 17537, 17536], [17408, 17409, 17537], [17409, 17538, 17537], [17409, 17410, 17539], [17409, 17539, 17538], [17410, 17411, 17539], [17411, 17540, 17539], [17411, 17412, 17541], [17411, 17541, 17540], [17412, 17413, 17541], [17413, 17542, 17541], [17413, 17414, 17543], [17413, 17543, 17542], [17414, 17415, 17543], [17415, 17544, 17543], [17415, 17416, 17545], [17415, 17545, 17544], [17416, 17417, 17545], [17417, 17546, 17545], [17417, 17418, 17547], [17417, 17547, 17546], [17418, 17419, 17547], [17419, 17548, 17547], [17419, 17420, 17549], [17419, 17549, 17548], [17420, 17421, 17549], [17421, 17550, 17549], [17421, 17422, 17551], [17421, 17551, 17550], [17422, 17423, 17551], [17423, 17552, 17551], [17423, 17424, 17553], [17423, 17553, 17552], [17424, 17425, 17553], [17425, 17554, 17553], [17425, 17426, 17555], [17425, 17555, 17554], [17426, 17427, 17555], [17427, 17556, 17555], [17427, 17428, 17557], [17427, 17557, 17556], [17428, 17429, 17557], [17429, 17558, 17557], [17429, 17430, 17559], [17429, 17559, 17558], [17430, 17431, 17559], [17431, 17560, 17559], [17431, 17432, 17561], [17431, 17561, 17560], [17432, 17433, 17561], [17433, 17562, 17561], [17433, 17434, 17563], [17433, 17563, 17562], [17434, 17435, 17563], [17435, 17564, 17563], [17435, 17436, 17565], [17435, 17565, 17564], [17436, 17437, 17565], [17437, 17566, 17565], [17437, 17438, 17567], [17437, 17567, 17566], [17438, 17439, 17567], [17439, 17568, 17567], [17439, 17440, 17569], [17439, 17569, 17568], [17440, 17441, 17569], [17441, 17570, 17569], [17441, 17442, 17571], [17441, 17571, 17570], [17442, 17443, 17571], [17443, 17572, 17571], [17443, 17444, 17573], [17443, 17573, 17572], [17444, 17445, 17573], [17445, 17574, 17573], [17445, 17446, 17575], [17445, 17575, 17574], [17446, 17447, 17575], [17447, 17576, 17575], [17447, 17448, 17577], [17447, 17577, 17576], [17448, 17449, 17577], [17449, 17578, 17577], [17449, 17450, 17579], [17449, 17579, 17578], [17450, 17451, 17579], [17451, 17580, 17579], [17451, 17452, 17581], [17451, 17581, 17580], [17452, 17453, 17581], [17453, 17582, 17581], [17453, 17454, 17583], [17453, 17583, 17582], [17454, 17455, 17583], [17455, 17584, 17583], [17455, 17456, 17585], [17455, 17585, 17584], [17456, 17457, 17585], [17457, 17586, 17585], [17457, 17458, 17587], [17457, 17587, 17586], [17458, 17459, 17587], [17459, 17588, 17587], [17459, 17460, 17589], [17459, 17589, 17588], [17461, 17462, 17591], [17461, 17591, 17590], [17462, 17463, 17591], [17463, 17592, 17591], [17463, 17464, 17593], [17463, 17593, 17592], [17464, 17465, 17593], [17465, 17594, 17593], [17465, 17466, 17595], [17465, 17595, 17594], [17466, 17467, 17595], [17467, 17596, 17595], [17467, 17468, 17597], [17467, 17597, 17596], [17468, 17469, 17597], [17469, 17598, 17597], [17469, 17470, 17599], [17469, 17599, 17598], [17470, 17471, 17599], [17471, 17600, 17599], [17471, 17472, 17601], [17471, 17601, 17600], [17472, 17473, 17601], [17473, 17602, 17601], [17473, 17474, 17603], [17473, 17603, 17602], [17474, 17475, 17603], [17475, 17604, 17603], [17475, 17476, 17605], [17475, 17605, 17604], [17476, 17477, 17605], [17477, 17606, 17605], [17477, 17478, 17607], [17477, 17607, 17606], [17478, 17479, 17607], [17479, 17608, 17607], [17479, 17480, 17609], [17479, 17609, 17608], [17480, 17481, 17609], [17481, 17610, 17609], [17481, 17482, 17611], [17481, 17611, 17610], [17482, 17483, 17611], [17483, 17612, 17611], [17483, 17484, 17613], [17483, 17613, 17612], [17484, 17485, 17613], [17485, 17614, 17613], [17485, 17486, 17615], [17485, 17615, 17614], [17486, 17487, 17615], [17487, 17616, 17615], [17487, 17488, 17617], [17487, 17617, 17616], [17488, 17489, 17617], [17489, 17618, 17617], [17489, 17490, 17619], [17489, 17619, 17618], [17490, 17491, 17619], [17491, 17620, 17619], [17491, 17492, 17621], [17491, 17621, 17620], [17492, 17493, 17621], [17493, 17622, 17621], [17493, 17494, 17623], [17493, 17623, 17622], [17494, 17495, 17623], [17495, 17624, 17623], [17495, 17496, 17625], [17495, 17625, 17624], [17496, 17497, 17625], [17497, 17626, 17625], [17497, 17498, 17627], [17497, 17627, 17626], [17498, 17499, 17627], [17499, 17628, 17627], [17499, 17500, 17629], [17499, 17629, 17628], [17500, 17501, 17629], [17501, 17630, 17629], [17501, 17502, 17631], [17501, 17631, 17630], [17502, 17503, 17631], [17503, 17632, 17631], [17503, 17504, 17633], [17503, 17633, 17632], [17504, 17505, 17633], [17505, 17634, 17633], [17505, 17506, 17635], [17505, 17635, 17634], [17506, 17507, 17635], [17507, 17636, 17635], [17507, 17508, 17637], [17507, 17637, 17636], [17508, 17509, 17637], [17509, 17638, 17637], [17509, 17510, 17639], [17509, 17639, 17638], [17510, 17511, 17639], [17511, 17640, 17639], [17511, 17512, 17641], [17511, 17641, 17640], [17512, 17513, 17641], [17513, 17642, 17641], [17513, 17514, 17643], [17513, 17643, 17642], [17514, 17515, 17643], [17515, 17644, 17643], [17515, 17516, 17645], [17515, 17645, 17644], [17516, 17517, 17645], [17517, 17646, 17645], [17517, 17518, 17647], [17517, 17647, 17646], [17518, 17519, 17647], [17519, 17648, 17647], [17519, 17520, 17649], [17519, 17649, 17648], [17520, 17521, 17649], [17521, 17650, 17649], [17521, 17522, 17651], [17521, 17651, 17650], [17522, 17523, 17651], [17523, 17652, 17651], [17523, 17524, 17653], [17523, 17653, 17652], [17524, 17525, 17653], [17525, 17654, 17653], [17525, 17526, 17655], [17525, 17655, 17654], [17526, 17527, 17655], [17527, 17656, 17655], [17527, 17528, 17657], [17527, 17657, 17656], [17528, 17529, 17657], [17529, 17658, 17657], [17529, 17530, 17659], [17529, 17659, 17658], [17530, 17531, 17659], [17531, 17660, 17659], [17531, 17532, 17661], [17531, 17661, 17660], [17532, 17533, 17661], [17533, 17662, 17661], [17533, 17534, 17663], [17533, 17663, 17662], [17534, 17535, 17663], [17535, 17664, 17663], [17535, 17536, 17665], [17535, 17665, 17664], [17536, 17537, 17665], [17537, 17666, 17665], [17537, 17538, 17667], [17537, 17667, 17666], [17538, 17539, 17667], [17539, 17668, 17667], [17539, 17540, 17669], [17539, 17669, 17668], [17540, 17541, 17669], [17541, 17670, 17669], [17541, 17542, 17671], [17541, 17671, 17670], [17542, 17543, 17671], [17543, 17672, 17671], [17543, 17544, 17673], [17543, 17673, 17672], [17544, 17545, 17673], [17545, 17674, 17673], [17545, 17546, 17675], [17545, 17675, 17674], [17546, 17547, 17675], [17547, 17676, 17675], [17547, 17548, 17677], [17547, 17677, 17676], [17548, 17549, 17677], [17549, 17678, 17677], [17549, 17550, 17679], [17549, 17679, 17678], [17550, 17551, 17679], [17551, 17680, 17679], [17551, 17552, 17681], [17551, 17681, 17680], [17552, 17553, 17681], [17553, 17682, 17681], [17553, 17554, 17683], [17553, 17683, 17682], [17554, 17555, 17683], [17555, 17684, 17683], [17555, 17556, 17685], [17555, 17685, 17684], [17556, 17557, 17685], [17557, 17686, 17685], [17557, 17558, 17687], [17557, 17687, 17686], [17558, 17559, 17687], [17559, 17688, 17687], [17559, 17560, 17689], [17559, 17689, 17688], [17560, 17561, 17689], [17561, 17690, 17689], [17561, 17562, 17691], [17561, 17691, 17690], [17562, 17563, 17691], [17563, 17692, 17691], [17563, 17564, 17693], [17563, 17693, 17692], [17564, 17565, 17693], [17565, 17694, 17693], [17565, 17566, 17695], [17565, 17695, 17694], [17566, 17567, 17695], [17567, 17696, 17695], [17567, 17568, 17697], [17567, 17697, 17696], [17568, 17569, 17697], [17569, 17698, 17697], [17569, 17570, 17699], [17569, 17699, 17698], [17570, 17571, 17699], [17571, 17700, 17699], [17571, 17572, 17701], [17571, 17701, 17700], [17572, 17573, 17701], [17573, 17702, 17701], [17573, 17574, 17703], [17573, 17703, 17702], [17574, 17575, 17703], [17575, 17704, 17703], [17575, 17576, 17705], [17575, 17705, 17704], [17576, 17577, 17705], [17577, 17706, 17705], [17577, 17578, 17707], [17577, 17707, 17706], [17578, 17579, 17707], [17579, 17708, 17707], [17579, 17580, 17709], [17579, 17709, 17708], [17580, 17581, 17709], [17581, 17710, 17709], [17581, 17582, 17711], [17581, 17711, 17710], [17582, 17583, 17711], [17583, 17712, 17711], [17583, 17584, 17713], [17583, 17713, 17712], [17584, 17585, 17713], [17585, 17714, 17713], [17585, 17586, 17715], [17585, 17715, 17714], [17586, 17587, 17715], [17587, 17716, 17715], [17587, 17588, 17717], [17587, 17717, 17716], [17588, 17589, 17717], [17589, 17718, 17717], [17590, 17591, 17719], [17591, 17720, 17719], [17591, 17592, 17721], [17591, 17721, 17720], [17592, 17593, 17721], [17593, 17722, 17721], [17593, 17594, 17723], [17593, 17723, 17722], [17594, 17595, 17723], [17595, 17724, 17723], [17595, 17596, 17725], [17595, 17725, 17724], [17596, 17597, 17725], [17597, 17726, 17725], [17597, 17598, 17727], [17597, 17727, 17726], [17598, 17599, 17727], [17599, 17728, 17727], [17599, 17600, 17729], [17599, 17729, 17728], [17600, 17601, 17729], [17601, 17730, 17729], [17601, 17602, 17731], [17601, 17731, 17730], [17602, 17603, 17731], [17603, 17732, 17731], [17603, 17604, 17733], [17603, 17733, 17732], [17604, 17605, 17733], [17605, 17734, 17733], [17605, 17606, 17735], [17605, 17735, 17734], [17606, 17607, 17735], [17607, 17736, 17735], [17607, 17608, 17737], [17607, 17737, 17736], [17608, 17609, 17737], [17609, 17738, 17737], [17609, 17610, 17739], [17609, 17739, 17738], [17610, 17611, 17739], [17611, 17740, 17739], [17611, 17612, 17741], [17611, 17741, 17740], [17612, 17613, 17741], [17613, 17742, 17741], [17613, 17614, 17743], [17613, 17743, 17742], [17614, 17615, 17743], [17615, 17744, 17743], [17615, 17616, 17745], [17615, 17745, 17744], [17616, 17617, 17745], [17617, 17746, 17745], [17617, 17618, 17747], [17617, 17747, 17746], [17618, 17619, 17747], [17619, 17748, 17747], [17619, 17620, 17749], [17619, 17749, 17748], [17620, 17621, 17749], [17621, 17750, 17749], [17621, 17622, 17751], [17621, 17751, 17750], [17622, 17623, 17751], [17623, 17752, 17751], [17623, 17624, 17753], [17623, 17753, 17752], [17624, 17625, 17753], [17625, 17754, 17753], [17625, 17626, 17755], [17625, 17755, 17754], [17626, 17627, 17755], [17627, 17756, 17755], [17627, 17628, 17757], [17627, 17757, 17756], [17628, 17629, 17757], [17629, 17758, 17757], [17629, 17630, 17759], [17629, 17759, 17758], [17630, 17631, 17759], [17631, 17760, 17759], [17631, 17632, 17761], [17631, 17761, 17760], [17632, 17633, 17761], [17633, 17762, 17761], [17633, 17634, 17763], [17633, 17763, 17762], [17634, 17635, 17763], [17635, 17764, 17763], [17635, 17636, 17765], [17635, 17765, 17764], [17636, 17637, 17765], [17637, 17766, 17765], [17637, 17638, 17767], [17637, 17767, 17766], [17638, 17639, 17767], [17639, 17768, 17767], [17639, 17640, 17769], [17639, 17769, 17768], [17640, 17641, 17769], [17641, 17770, 17769], [17641, 17642, 17771], [17641, 17771, 17770], [17642, 17643, 17771], [17643, 17772, 17771], [17643, 17644, 17773], [17643, 17773, 17772], [17644, 17645, 17773], [17645, 17774, 17773], [17645, 17646, 17775], [17645, 17775, 17774], [17646, 17647, 17775], [17647, 17776, 17775], [17647, 17648, 17777], [17647, 17777, 17776], [17648, 17649, 17777], [17649, 17778, 17777], [17649, 17650, 17779], [17649, 17779, 17778], [17650, 17651, 17779], [17651, 17780, 17779], [17651, 17652, 17781], [17651, 17781, 17780], [17652, 17653, 17781], [17653, 17782, 17781], [17653, 17654, 17783], [17653, 17783, 17782], [17654, 17655, 17783], [17655, 17784, 17783], [17655, 17656, 17785], [17655, 17785, 17784], [17656, 17657, 17785], [17657, 17786, 17785], [17657, 17658, 17787], [17657, 17787, 17786], [17658, 17659, 17787], [17659, 17788, 17787], [17659, 17660, 17789], [17659, 17789, 17788], [17660, 17661, 17789], [17661, 17790, 17789], [17661, 17662, 17791], [17661, 17791, 17790], [17662, 17663, 17791], [17663, 17792, 17791], [17663, 17664, 17793], [17663, 17793, 17792], [17664, 17665, 17793], [17665, 17794, 17793], [17665, 17666, 17795], [17665, 17795, 17794], [17666, 17667, 17795], [17667, 17796, 17795], [17667, 17668, 17797], [17667, 17797, 17796], [17668, 17669, 17797], [17669, 17798, 17797], [17669, 17670, 17799], [17669, 17799, 17798], [17670, 17671, 17799], [17671, 17800, 17799], [17671, 17672, 17801], [17671, 17801, 17800], [17672, 17673, 17801], [17673, 17802, 17801], [17673, 17674, 17803], [17673, 17803, 17802], [17674, 17675, 17803], [17675, 17804, 17803], [17675, 17676, 17805], [17675, 17805, 17804], [17676, 17677, 17805], [17677, 17806, 17805], [17677, 17678, 17807], [17677, 17807, 17806], [17678, 17679, 17807], [17679, 17808, 17807], [17679, 17680, 17809], [17679, 17809, 17808], [17680, 17681, 17809], [17681, 17810, 17809], [17681, 17682, 17811], [17681, 17811, 17810], [17682, 17683, 17811], [17683, 17812, 17811], [17683, 17684, 17813], [17683, 17813, 17812], [17684, 17685, 17813], [17685, 17814, 17813], [17685, 17686, 17815], [17685, 17815, 17814], [17686, 17687, 17815], [17687, 17816, 17815], [17687, 17688, 17817], [17687, 17817, 17816], [17688, 17689, 17817], [17689, 17818, 17817], [17689, 17690, 17819], [17689, 17819, 17818], [17690, 17691, 17819], [17691, 17820, 17819], [17691, 17692, 17821], [17691, 17821, 17820], [17692, 17693, 17821], [17693, 17822, 17821], [17693, 17694, 17823], [17693, 17823, 17822], [17694, 17695, 17823], [17695, 17824, 17823], [17695, 17696, 17825], [17695, 17825, 17824], [17696, 17697, 17825], [17697, 17826, 17825], [17697, 17698, 17827], [17697, 17827, 17826], [17698, 17699, 17827], [17699, 17828, 17827], [17699, 17700, 17829], [17699, 17829, 17828], [17700, 17701, 17829], [17701, 17830, 17829], [17701, 17702, 17831], [17701, 17831, 17830], [17702, 17703, 17831], [17703, 17832, 17831], [17703, 17704, 17833], [17703, 17833, 17832], [17704, 17705, 17833], [17705, 17834, 17833], [17705, 17706, 17835], [17705, 17835, 17834], [17706, 17707, 17835], [17707, 17836, 17835], [17707, 17708, 17837], [17707, 17837, 17836], [17708, 17709, 17837], [17709, 17838, 17837], [17709, 17710, 17839], [17709, 17839, 17838], [17710, 17711, 17839], [17711, 17840, 17839], [17711, 17712, 17841], [17711, 17841, 17840], [17712, 17713, 17841], [17713, 17842, 17841], [17713, 17714, 17843], [17713, 17843, 17842], [17714, 17715, 17843], [17715, 17844, 17843], [17715, 17716, 17845], [17715, 17845, 17844], [17716, 17717, 17845], [17717, 17846, 17845], [17717, 17718, 17847], [17717, 17847, 17846], [17719, 17720, 17849], [17719, 17849, 17848], [17720, 17721, 17849], [17721, 17850, 17849], [17721, 17722, 17851], [17721, 17851, 17850], [17722, 17723, 17851], [17723, 17852, 17851], [17723, 17724, 17853], [17723, 17853, 17852], [17724, 17725, 17853], [17725, 17854, 17853], [17725, 17726, 17855], [17725, 17855, 17854], [17726, 17727, 17855], [17727, 17856, 17855], [17727, 17728, 17857], [17727, 17857, 17856], [17728, 17729, 17857], [17729, 17858, 17857], [17729, 17730, 17859], [17729, 17859, 17858], [17730, 17731, 17859], [17731, 17860, 17859], [17731, 17732, 17861], [17731, 17861, 17860], [17732, 17733, 17861], [17733, 17862, 17861], [17733, 17734, 17863], [17733, 17863, 17862], [17734, 17735, 17863], [17735, 17864, 17863], [17735, 17736, 17865], [17735, 17865, 17864], [17736, 17737, 17865], [17737, 17866, 17865], [17737, 17738, 17867], [17737, 17867, 17866], [17738, 17739, 17867], [17739, 17868, 17867], [17739, 17740, 17869], [17739, 17869, 17868], [17740, 17741, 17869], [17741, 17870, 17869], [17741, 17742, 17871], [17741, 17871, 17870], [17742, 17743, 17871], [17743, 17872, 17871], [17743, 17744, 17873], [17743, 17873, 17872], [17744, 17745, 17873], [17745, 17874, 17873], [17745, 17746, 17875], [17745, 17875, 17874], [17746, 17747, 17875], [17747, 17876, 17875], [17747, 17748, 17877], [17747, 17877, 17876], [17748, 17749, 17877], [17749, 17878, 17877], [17749, 17750, 17879], [17749, 17879, 17878], [17750, 17751, 17879], [17751, 17880, 17879], [17751, 17752, 17881], [17751, 17881, 17880], [17752, 17753, 17881], [17753, 17882, 17881], [17753, 17754, 17883], [17753, 17883, 17882], [17754, 17755, 17883], [17755, 17884, 17883], [17755, 17756, 17885], [17755, 17885, 17884], [17756, 17757, 17885], [17757, 17886, 17885], [17757, 17758, 17887], [17757, 17887, 17886], [17758, 17759, 17887], [17759, 17888, 17887], [17759, 17760, 17889], [17759, 17889, 17888], [17760, 17761, 17889], [17761, 17890, 17889], [17761, 17762, 17891], [17761, 17891, 17890], [17762, 17763, 17891], [17763, 17892, 17891], [17763, 17764, 17893], [17763, 17893, 17892], [17764, 17765, 17893], [17765, 17894, 17893], [17765, 17766, 17895], [17765, 17895, 17894], [17766, 17767, 17895], [17767, 17896, 17895], [17767, 17768, 17897], [17767, 17897, 17896], [17768, 17769, 17897], [17769, 17898, 17897], [17769, 17770, 17899], [17769, 17899, 17898], [17770, 17771, 17899], [17771, 17900, 17899], [17771, 17772, 17901], [17771, 17901, 17900], [17772, 17773, 17901], [17773, 17902, 17901], [17773, 17774, 17903], [17773, 17903, 17902], [17774, 17775, 17903], [17775, 17904, 17903], [17775, 17776, 17905], [17775, 17905, 17904], [17776, 17777, 17905], [17777, 17906, 17905], [17777, 17778, 17907], [17777, 17907, 17906], [17778, 17779, 17907], [17779, 17908, 17907], [17779, 17780, 17909], [17779, 17909, 17908], [17780, 17781, 17909], [17781, 17910, 17909], [17781, 17782, 17911], [17781, 17911, 17910], [17782, 17783, 17911], [17783, 17912, 17911], [17783, 17784, 17913], [17783, 17913, 17912], [17784, 17785, 17913], [17785, 17914, 17913], [17785, 17786, 17915], [17785, 17915, 17914], [17786, 17787, 17915], [17787, 17916, 17915], [17787, 17788, 17917], [17787, 17917, 17916], [17788, 17789, 17917], [17789, 17918, 17917], [17789, 17790, 17919], [17789, 17919, 17918], [17790, 17791, 17919], [17791, 17920, 17919], [17791, 17792, 17921], [17791, 17921, 17920], [17792, 17793, 17921], [17793, 17922, 17921], [17793, 17794, 17923], [17793, 17923, 17922], [17794, 17795, 17923], [17795, 17924, 17923], [17795, 17796, 17925], [17795, 17925, 17924], [17796, 17797, 17925], [17797, 17926, 17925], [17797, 17798, 17927], [17797, 17927, 17926], [17798, 17799, 17927], [17799, 17928, 17927], [17799, 17800, 17929], [17799, 17929, 17928], [17800, 17801, 17929], [17801, 17930, 17929], [17801, 17802, 17931], [17801, 17931, 17930], [17802, 17803, 17931], [17803, 17932, 17931], [17803, 17804, 17933], [17803, 17933, 17932], [17804, 17805, 17933], [17805, 17934, 17933], [17805, 17806, 17935], [17805, 17935, 17934], [17806, 17807, 17935], [17807, 17936, 17935], [17807, 17808, 17937], [17807, 17937, 17936], [17808, 17809, 17937], [17809, 17938, 17937], [17809, 17810, 17939], [17809, 17939, 17938], [17810, 17811, 17939], [17811, 17940, 17939], [17811, 17812, 17941], [17811, 17941, 17940], [17812, 17813, 17941], [17813, 17942, 17941], [17813, 17814, 17943], [17813, 17943, 17942], [17814, 17815, 17943], [17815, 17944, 17943], [17815, 17816, 17945], [17815, 17945, 17944], [17816, 17817, 17945], [17817, 17946, 17945], [17817, 17818, 17947], [17817, 17947, 17946], [17818, 17819, 17947], [17819, 17948, 17947], [17819, 17820, 17949], [17819, 17949, 17948], [17820, 17821, 17949], [17821, 17950, 17949], [17821, 17822, 17951], [17821, 17951, 17950], [17822, 17823, 17951], [17823, 17952, 17951], [17823, 17824, 17953], [17823, 17953, 17952], [17824, 17825, 17953], [17825, 17954, 17953], [17825, 17826, 17955], [17825, 17955, 17954], [17826, 17827, 17955], [17827, 17956, 17955], [17827, 17828, 17957], [17827, 17957, 17956], [17828, 17829, 17957], [17829, 17958, 17957], [17829, 17830, 17959], [17829, 17959, 17958], [17830, 17831, 17959], [17831, 17960, 17959], [17831, 17832, 17961], [17831, 17961, 17960], [17832, 17833, 17961], [17833, 17962, 17961], [17833, 17834, 17963], [17833, 17963, 17962], [17834, 17835, 17963], [17835, 17964, 17963], [17835, 17836, 17965], [17835, 17965, 17964], [17836, 17837, 17965], [17837, 17966, 17965], [17837, 17838, 17967], [17837, 17967, 17966], [17838, 17839, 17967], [17839, 17968, 17967], [17839, 17840, 17969], [17839, 17969, 17968], [17840, 17841, 17969], [17841, 17970, 17969], [17841, 17842, 17971], [17841, 17971, 17970], [17842, 17843, 17971], [17843, 17972, 17971], [17843, 17844, 17973], [17843, 17973, 17972], [17844, 17845, 17973], [17845, 17974, 17973], [17845, 17846, 17975], [17845, 17975, 17974], [17846, 17847, 17975], [17847, 17976, 17975], [17848, 17849, 17977], [17849, 17978, 17977], [17849, 17850, 17979], [17849, 17979, 17978], [17850, 17851, 17979], [17851, 17980, 17979], [17851, 17852, 17981], [17851, 17981, 17980], [17852, 17853, 17981], [17853, 17982, 17981], [17853, 17854, 17983], [17853, 17983, 17982], [17854, 17855, 17983], [17855, 17984, 17983], [17855, 17856, 17985], [17855, 17985, 17984], [17856, 17857, 17985], [17857, 17986, 17985], [17857, 17858, 17987], [17857, 17987, 17986], [17858, 17859, 17987], [17859, 17988, 17987], [17859, 17860, 17989], [17859, 17989, 17988], [17860, 17861, 17989], [17861, 17990, 17989], [17861, 17862, 17991], [17861, 17991, 17990], [17862, 17863, 17991], [17863, 17992, 17991], [17863, 17864, 17993], [17863, 17993, 17992], [17864, 17865, 17993], [17865, 17994, 17993], [17865, 17866, 17995], [17865, 17995, 17994], [17866, 17867, 17995], [17867, 17996, 17995], [17867, 17868, 17997], [17867, 17997, 17996], [17868, 17869, 17997], [17869, 17998, 17997], [17869, 17870, 17999], [17869, 17999, 17998], [17870, 17871, 17999], [17871, 18000, 17999], [17871, 17872, 18001], [17871, 18001, 18000], [17872, 17873, 18001], [17873, 18002, 18001], [17873, 17874, 18003], [17873, 18003, 18002], [17874, 17875, 18003], [17875, 18004, 18003], [17875, 17876, 18005], [17875, 18005, 18004], [17876, 17877, 18005], [17877, 18006, 18005], [17877, 17878, 18007], [17877, 18007, 18006], [17878, 17879, 18007], [17879, 18008, 18007], [17879, 17880, 18009], [17879, 18009, 18008], [17880, 17881, 18009], [17881, 18010, 18009], [17881, 17882, 18011], [17881, 18011, 18010], [17882, 17883, 18011], [17883, 18012, 18011], [17883, 17884, 18013], [17883, 18013, 18012], [17884, 17885, 18013], [17885, 18014, 18013], [17885, 17886, 18015], [17885, 18015, 18014], [17886, 17887, 18015], [17887, 18016, 18015], [17887, 17888, 18017], [17887, 18017, 18016], [17888, 17889, 18017], [17889, 18018, 18017], [17889, 17890, 18019], [17889, 18019, 18018], [17890, 17891, 18019], [17891, 18020, 18019], [17891, 17892, 18021], [17891, 18021, 18020], [17892, 17893, 18021], [17893, 18022, 18021], [17893, 17894, 18023], [17893, 18023, 18022], [17894, 17895, 18023], [17895, 18024, 18023], [17895, 17896, 18025], [17895, 18025, 18024], [17896, 17897, 18025], [17897, 18026, 18025], [17897, 17898, 18027], [17897, 18027, 18026], [17898, 17899, 18027], [17899, 18028, 18027], [17899, 17900, 18029], [17899, 18029, 18028], [17900, 17901, 18029], [17901, 18030, 18029], [17901, 17902, 18031], [17901, 18031, 18030], [17902, 17903, 18031], [17903, 18032, 18031], [17903, 17904, 18033], [17903, 18033, 18032], [17904, 17905, 18033], [17905, 18034, 18033], [17905, 17906, 18035], [17905, 18035, 18034], [17906, 17907, 18035], [17907, 18036, 18035], [17907, 17908, 18037], [17907, 18037, 18036], [17908, 17909, 18037], [17909, 18038, 18037], [17909, 17910, 18039], [17909, 18039, 18038], [17910, 17911, 18039], [17911, 18040, 18039], [17911, 17912, 18041], [17911, 18041, 18040], [17912, 17913, 18041], [17913, 18042, 18041], [17913, 17914, 18043], [17913, 18043, 18042], [17914, 17915, 18043], [17915, 18044, 18043], [17915, 17916, 18045], [17915, 18045, 18044], [17916, 17917, 18045], [17917, 18046, 18045], [17917, 17918, 18047], [17917, 18047, 18046], [17918, 17919, 18047], [17919, 18048, 18047], [17919, 17920, 18049], [17919, 18049, 18048], [17920, 17921, 18049], [17921, 18050, 18049], [17921, 17922, 18051], [17921, 18051, 18050], [17922, 17923, 18051], [17923, 18052, 18051], [17923, 17924, 18053], [17923, 18053, 18052], [17924, 17925, 18053], [17925, 18054, 18053], [17925, 17926, 18055], [17925, 18055, 18054], [17926, 17927, 18055], [17927, 18056, 18055], [17927, 17928, 18057], [17927, 18057, 18056], [17928, 17929, 18057], [17929, 18058, 18057], [17929, 17930, 18059], [17929, 18059, 18058], [17930, 17931, 18059], [17931, 18060, 18059], [17931, 17932, 18061], [17931, 18061, 18060], [17932, 17933, 18061], [17933, 18062, 18061], [17933, 17934, 18063], [17933, 18063, 18062], [17934, 17935, 18063], [17935, 18064, 18063], [17935, 17936, 18065], [17935, 18065, 18064], [17936, 17937, 18065], [17937, 18066, 18065], [17937, 17938, 18067], [17937, 18067, 18066], [17938, 17939, 18067], [17939, 18068, 18067], [17939, 17940, 18069], [17939, 18069, 18068], [17940, 17941, 18069], [17941, 18070, 18069], [17941, 17942, 18071], [17941, 18071, 18070], [17942, 17943, 18071], [17943, 18072, 18071], [17943, 17944, 18073], [17943, 18073, 18072], [17944, 17945, 18073], [17945, 18074, 18073], [17945, 17946, 18075], [17945, 18075, 18074], [17946, 17947, 18075], [17947, 18076, 18075], [17947, 17948, 18077], [17947, 18077, 18076], [17948, 17949, 18077], [17949, 18078, 18077], [17949, 17950, 18079], [17949, 18079, 18078], [17950, 17951, 18079], [17951, 18080, 18079], [17951, 17952, 18081], [17951, 18081, 18080], [17952, 17953, 18081], [17953, 18082, 18081], [17953, 17954, 18083], [17953, 18083, 18082], [17954, 17955, 18083], [17955, 18084, 18083], [17955, 17956, 18085], [17955, 18085, 18084], [17956, 17957, 18085], [17957, 18086, 18085], [17957, 17958, 18087], [17957, 18087, 18086], [17958, 17959, 18087], [17959, 18088, 18087], [17959, 17960, 18089], [17959, 18089, 18088], [17960, 17961, 18089], [17961, 18090, 18089], [17961, 17962, 18091], [17961, 18091, 18090], [17962, 17963, 18091], [17963, 18092, 18091], [17963, 17964, 18093], [17963, 18093, 18092], [17964, 17965, 18093], [17965, 18094, 18093], [17965, 17966, 18095], [17965, 18095, 18094], [17966, 17967, 18095], [17967, 18096, 18095], [17967, 17968, 18097], [17967, 18097, 18096], [17968, 17969, 18097], [17969, 18098, 18097], [17969, 17970, 18099], [17969, 18099, 18098], [17970, 17971, 18099], [17971, 18100, 18099], [17971, 17972, 18101], [17971, 18101, 18100], [17972, 17973, 18101], [17973, 18102, 18101], [17973, 17974, 18103], [17973, 18103, 18102], [17974, 17975, 18103], [17975, 18104, 18103], [17975, 17976, 18105], [17975, 18105, 18104], [17977, 17978, 18107], [17977, 18107, 18106], [17978, 17979, 18107], [17979, 18108, 18107], [17979, 17980, 18109], [17979, 18109, 18108], [17980, 17981, 18109], [17981, 18110, 18109], [17981, 17982, 18111], [17981, 18111, 18110], [17982, 17983, 18111], [17983, 18112, 18111], [17983, 17984, 18113], [17983, 18113, 18112], [17984, 17985, 18113], [17985, 18114, 18113], [17985, 17986, 18115], [17985, 18115, 18114], [17986, 17987, 18115], [17987, 18116, 18115], [17987, 17988, 18117], [17987, 18117, 18116], [17988, 17989, 18117], [17989, 18118, 18117], [17989, 17990, 18119], [17989, 18119, 18118], [17990, 17991, 18119], [17991, 18120, 18119], [17991, 17992, 18121], [17991, 18121, 18120], [17992, 17993, 18121], [17993, 18122, 18121], [17993, 17994, 18123], [17993, 18123, 18122], [17994, 17995, 18123], [17995, 18124, 18123], [17995, 17996, 18125], [17995, 18125, 18124], [17996, 17997, 18125], [17997, 18126, 18125], [17997, 17998, 18127], [17997, 18127, 18126], [17998, 17999, 18127], [17999, 18128, 18127], [17999, 18000, 18129], [17999, 18129, 18128], [18000, 18001, 18129], [18001, 18130, 18129], [18001, 18002, 18131], [18001, 18131, 18130], [18002, 18003, 18131], [18003, 18132, 18131], [18003, 18004, 18133], [18003, 18133, 18132], [18004, 18005, 18133], [18005, 18134, 18133], [18005, 18006, 18135], [18005, 18135, 18134], [18006, 18007, 18135], [18007, 18136, 18135], [18007, 18008, 18137], [18007, 18137, 18136], [18008, 18009, 18137], [18009, 18138, 18137], [18009, 18010, 18139], [18009, 18139, 18138], [18010, 18011, 18139], [18011, 18140, 18139], [18011, 18012, 18141], [18011, 18141, 18140], [18012, 18013, 18141], [18013, 18142, 18141], [18013, 18014, 18143], [18013, 18143, 18142], [18014, 18015, 18143], [18015, 18144, 18143], [18015, 18016, 18145], [18015, 18145, 18144], [18016, 18017, 18145], [18017, 18146, 18145], [18017, 18018, 18147], [18017, 18147, 18146], [18018, 18019, 18147], [18019, 18148, 18147], [18019, 18020, 18149], [18019, 18149, 18148], [18020, 18021, 18149], [18021, 18150, 18149], [18021, 18022, 18151], [18021, 18151, 18150], [18022, 18023, 18151], [18023, 18152, 18151], [18023, 18024, 18153], [18023, 18153, 18152], [18024, 18025, 18153], [18025, 18154, 18153], [18025, 18026, 18155], [18025, 18155, 18154], [18026, 18027, 18155], [18027, 18156, 18155], [18027, 18028, 18157], [18027, 18157, 18156], [18028, 18029, 18157], [18029, 18158, 18157], [18029, 18030, 18159], [18029, 18159, 18158], [18030, 18031, 18159], [18031, 18160, 18159], [18031, 18032, 18161], [18031, 18161, 18160], [18032, 18033, 18161], [18033, 18162, 18161], [18033, 18034, 18163], [18033, 18163, 18162], [18034, 18035, 18163], [18035, 18164, 18163], [18035, 18036, 18165], [18035, 18165, 18164], [18036, 18037, 18165], [18037, 18166, 18165], [18037, 18038, 18167], [18037, 18167, 18166], [18038, 18039, 18167], [18039, 18168, 18167], [18039, 18040, 18169], [18039, 18169, 18168], [18040, 18041, 18169], [18041, 18170, 18169], [18041, 18042, 18171], [18041, 18171, 18170], [18042, 18043, 18171], [18043, 18172, 18171], [18043, 18044, 18173], [18043, 18173, 18172], [18044, 18045, 18173], [18045, 18174, 18173], [18045, 18046, 18175], [18045, 18175, 18174], [18046, 18047, 18175], [18047, 18176, 18175], [18047, 18048, 18177], [18047, 18177, 18176], [18048, 18049, 18177], [18049, 18178, 18177], [18049, 18050, 18179], [18049, 18179, 18178], [18050, 18051, 18179], [18051, 18180, 18179], [18051, 18052, 18181], [18051, 18181, 18180], [18052, 18053, 18181], [18053, 18182, 18181], [18053, 18054, 18183], [18053, 18183, 18182], [18054, 18055, 18183], [18055, 18184, 18183], [18055, 18056, 18185], [18055, 18185, 18184], [18056, 18057, 18185], [18057, 18186, 18185], [18057, 18058, 18187], [18057, 18187, 18186], [18058, 18059, 18187], [18059, 18188, 18187], [18059, 18060, 18189], [18059, 18189, 18188], [18060, 18061, 18189], [18061, 18190, 18189], [18061, 18062, 18191], [18061, 18191, 18190], [18062, 18063, 18191], [18063, 18192, 18191], [18063, 18064, 18193], [18063, 18193, 18192], [18064, 18065, 18193], [18065, 18194, 18193], [18065, 18066, 18195], [18065, 18195, 18194], [18066, 18067, 18195], [18067, 18196, 18195], [18067, 18068, 18197], [18067, 18197, 18196], [18068, 18069, 18197], [18069, 18198, 18197], [18069, 18070, 18199], [18069, 18199, 18198], [18070, 18071, 18199], [18071, 18200, 18199], [18071, 18072, 18201], [18071, 18201, 18200], [18072, 18073, 18201], [18073, 18202, 18201], [18073, 18074, 18203], [18073, 18203, 18202], [18074, 18075, 18203], [18075, 18204, 18203], [18075, 18076, 18205], [18075, 18205, 18204], [18076, 18077, 18205], [18077, 18206, 18205], [18077, 18078, 18207], [18077, 18207, 18206], [18078, 18079, 18207], [18079, 18208, 18207], [18079, 18080, 18209], [18079, 18209, 18208], [18080, 18081, 18209], [18081, 18210, 18209], [18081, 18082, 18211], [18081, 18211, 18210], [18082, 18083, 18211], [18083, 18212, 18211], [18083, 18084, 18213], [18083, 18213, 18212], [18084, 18085, 18213], [18085, 18214, 18213], [18085, 18086, 18215], [18085, 18215, 18214], [18086, 18087, 18215], [18087, 18216, 18215], [18087, 18088, 18217], [18087, 18217, 18216], [18088, 18089, 18217], [18089, 18218, 18217], [18089, 18090, 18219], [18089, 18219, 18218], [18090, 18091, 18219], [18091, 18220, 18219], [18091, 18092, 18221], [18091, 18221, 18220], [18092, 18093, 18221], [18093, 18222, 18221], [18093, 18094, 18223], [18093, 18223, 18222], [18094, 18095, 18223], [18095, 18224, 18223], [18095, 18096, 18225], [18095, 18225, 18224], [18096, 18097, 18225], [18097, 18226, 18225], [18097, 18098, 18227], [18097, 18227, 18226], [18098, 18099, 18227], [18099, 18228, 18227], [18099, 18100, 18229], [18099, 18229, 18228], [18100, 18101, 18229], [18101, 18230, 18229], [18101, 18102, 18231], [18101, 18231, 18230], [18102, 18103, 18231], [18103, 18232, 18231], [18103, 18104, 18233], [18103, 18233, 18232], [18104, 18105, 18233], [18105, 18234, 18233], [18106, 18107, 18235], [18107, 18236, 18235], [18107, 18108, 18237], [18107, 18237, 18236], [18108, 18109, 18237], [18109, 18238, 18237], [18109, 18110, 18239], [18109, 18239, 18238], [18110, 18111, 18239], [18111, 18240, 18239], [18111, 18112, 18241], [18111, 18241, 18240], [18112, 18113, 18241], [18113, 18242, 18241], [18113, 18114, 18243], [18113, 18243, 18242], [18114, 18115, 18243], [18115, 18244, 18243], [18115, 18116, 18245], [18115, 18245, 18244], [18116, 18117, 18245], [18117, 18246, 18245], [18117, 18118, 18247], [18117, 18247, 18246], [18118, 18119, 18247], [18119, 18248, 18247], [18119, 18120, 18249], [18119, 18249, 18248], [18120, 18121, 18249], [18121, 18250, 18249], [18121, 18122, 18251], [18121, 18251, 18250], [18122, 18123, 18251], [18123, 18252, 18251], [18123, 18124, 18253], [18123, 18253, 18252], [18124, 18125, 18253], [18125, 18254, 18253], [18125, 18126, 18255], [18125, 18255, 18254], [18126, 18127, 18255], [18127, 18256, 18255], [18127, 18128, 18257], [18127, 18257, 18256], [18128, 18129, 18257], [18129, 18258, 18257], [18129, 18130, 18259], [18129, 18259, 18258], [18130, 18131, 18259], [18131, 18260, 18259], [18131, 18132, 18261], [18131, 18261, 18260], [18132, 18133, 18261], [18133, 18262, 18261], [18133, 18134, 18263], [18133, 18263, 18262], [18134, 18135, 18263], [18135, 18264, 18263], [18135, 18136, 18265], [18135, 18265, 18264], [18136, 18137, 18265], [18137, 18266, 18265], [18137, 18138, 18267], [18137, 18267, 18266], [18138, 18139, 18267], [18139, 18268, 18267], [18139, 18140, 18269], [18139, 18269, 18268], [18140, 18141, 18269], [18141, 18270, 18269], [18141, 18142, 18271], [18141, 18271, 18270], [18142, 18143, 18271], [18143, 18272, 18271], [18143, 18144, 18273], [18143, 18273, 18272], [18144, 18145, 18273], [18145, 18274, 18273], [18145, 18146, 18275], [18145, 18275, 18274], [18146, 18147, 18275], [18147, 18276, 18275], [18147, 18148, 18277], [18147, 18277, 18276], [18148, 18149, 18277], [18149, 18278, 18277], [18149, 18150, 18279], [18149, 18279, 18278], [18150, 18151, 18279], [18151, 18280, 18279], [18151, 18152, 18281], [18151, 18281, 18280], [18152, 18153, 18281], [18153, 18282, 18281], [18153, 18154, 18283], [18153, 18283, 18282], [18154, 18155, 18283], [18155, 18284, 18283], [18155, 18156, 18285], [18155, 18285, 18284], [18156, 18157, 18285], [18157, 18286, 18285], [18157, 18158, 18287], [18157, 18287, 18286], [18158, 18159, 18287], [18159, 18288, 18287], [18159, 18160, 18289], [18159, 18289, 18288], [18160, 18161, 18289], [18161, 18290, 18289], [18161, 18162, 18291], [18161, 18291, 18290], [18162, 18163, 18291], [18163, 18292, 18291], [18163, 18164, 18293], [18163, 18293, 18292], [18164, 18165, 18293], [18165, 18294, 18293], [18165, 18166, 18295], [18165, 18295, 18294], [18166, 18167, 18295], [18167, 18296, 18295], [18167, 18168, 18297], [18167, 18297, 18296], [18168, 18169, 18297], [18169, 18298, 18297], [18169, 18170, 18299], [18169, 18299, 18298], [18170, 18171, 18299], [18171, 18300, 18299], [18171, 18172, 18301], [18171, 18301, 18300], [18172, 18173, 18301], [18173, 18302, 18301], [18173, 18174, 18303], [18173, 18303, 18302], [18174, 18175, 18303], [18175, 18304, 18303], [18175, 18176, 18305], [18175, 18305, 18304], [18176, 18177, 18305], [18177, 18306, 18305], [18177, 18178, 18307], [18177, 18307, 18306], [18178, 18179, 18307], [18179, 18308, 18307], [18179, 18180, 18309], [18179, 18309, 18308], [18180, 18181, 18309], [18181, 18310, 18309], [18181, 18182, 18311], [18181, 18311, 18310], [18182, 18183, 18311], [18183, 18312, 18311], [18183, 18184, 18313], [18183, 18313, 18312], [18184, 18185, 18313], [18185, 18314, 18313], [18185, 18186, 18315], [18185, 18315, 18314], [18186, 18187, 18315], [18187, 18316, 18315], [18187, 18188, 18317], [18187, 18317, 18316], [18188, 18189, 18317], [18189, 18318, 18317], [18189, 18190, 18319], [18189, 18319, 18318], [18190, 18191, 18319], [18191, 18320, 18319], [18191, 18192, 18321], [18191, 18321, 18320], [18192, 18193, 18321], [18193, 18322, 18321], [18193, 18194, 18323], [18193, 18323, 18322], [18194, 18195, 18323], [18195, 18324, 18323], [18195, 18196, 18325], [18195, 18325, 18324], [18196, 18197, 18325], [18197, 18326, 18325], [18197, 18198, 18327], [18197, 18327, 18326], [18198, 18199, 18327], [18199, 18328, 18327], [18199, 18200, 18329], [18199, 18329, 18328], [18200, 18201, 18329], [18201, 18330, 18329], [18201, 18202, 18331], [18201, 18331, 18330], [18202, 18203, 18331], [18203, 18332, 18331], [18203, 18204, 18333], [18203, 18333, 18332], [18204, 18205, 18333], [18205, 18334, 18333], [18205, 18206, 18335], [18205, 18335, 18334], [18206, 18207, 18335], [18207, 18336, 18335], [18207, 18208, 18337], [18207, 18337, 18336], [18208, 18209, 18337], [18209, 18338, 18337], [18209, 18210, 18339], [18209, 18339, 18338], [18210, 18211, 18339], [18211, 18340, 18339], [18211, 18212, 18341], [18211, 18341, 18340], [18212, 18213, 18341], [18213, 18342, 18341], [18213, 18214, 18343], [18213, 18343, 18342], [18214, 18215, 18343], [18215, 18344, 18343], [18215, 18216, 18345], [18215, 18345, 18344], [18216, 18217, 18345], [18217, 18346, 18345], [18217, 18218, 18347], [18217, 18347, 18346], [18218, 18219, 18347], [18219, 18348, 18347], [18219, 18220, 18349], [18219, 18349, 18348], [18220, 18221, 18349], [18221, 18350, 18349], [18221, 18222, 18351], [18221, 18351, 18350], [18222, 18223, 18351], [18223, 18352, 18351], [18223, 18224, 18353], [18223, 18353, 18352], [18224, 18225, 18353], [18225, 18354, 18353], [18225, 18226, 18355], [18225, 18355, 18354], [18226, 18227, 18355], [18227, 18356, 18355], [18227, 18228, 18357], [18227, 18357, 18356], [18228, 18229, 18357], [18229, 18358, 18357], [18229, 18230, 18359], [18229, 18359, 18358], [18230, 18231, 18359], [18231, 18360, 18359], [18231, 18232, 18361], [18231, 18361, 18360], [18232, 18233, 18361], [18233, 18362, 18361], [18233, 18234, 18363], [18233, 18363, 18362], [18235, 18236, 18365], [18235, 18365, 18364], [18236, 18237, 18365], [18237, 18366, 18365], [18237, 18238, 18367], [18237, 18367, 18366], [18238, 18239, 18367], [18239, 18368, 18367], [18239, 18240, 18369], [18239, 18369, 18368], [18240, 18241, 18369], [18241, 18370, 18369], [18241, 18242, 18371], [18241, 18371, 18370], [18242, 18243, 18371], [18243, 18372, 18371], [18243, 18244, 18373], [18243, 18373, 18372], [18244, 18245, 18373], [18245, 18374, 18373], [18245, 18246, 18375], [18245, 18375, 18374], [18246, 18247, 18375], [18247, 18376, 18375], [18247, 18248, 18377], [18247, 18377, 18376], [18248, 18249, 18377], [18249, 18378, 18377], [18249, 18250, 18379], [18249, 18379, 18378], [18250, 18251, 18379], [18251, 18380, 18379], [18251, 18252, 18381], [18251, 18381, 18380], [18252, 18253, 18381], [18253, 18382, 18381], [18253, 18254, 18383], [18253, 18383, 18382], [18254, 18255, 18383], [18255, 18384, 18383], [18255, 18256, 18385], [18255, 18385, 18384], [18256, 18257, 18385], [18257, 18386, 18385], [18257, 18258, 18387], [18257, 18387, 18386], [18258, 18259, 18387], [18259, 18388, 18387], [18259, 18260, 18389], [18259, 18389, 18388], [18260, 18261, 18389], [18261, 18390, 18389], [18261, 18262, 18391], [18261, 18391, 18390], [18262, 18263, 18391], [18263, 18392, 18391], [18263, 18264, 18393], [18263, 18393, 18392], [18264, 18265, 18393], [18265, 18394, 18393], [18265, 18266, 18395], [18265, 18395, 18394], [18266, 18267, 18395], [18267, 18396, 18395], [18267, 18268, 18397], [18267, 18397, 18396], [18268, 18269, 18397], [18269, 18398, 18397], [18269, 18270, 18399], [18269, 18399, 18398], [18270, 18271, 18399], [18271, 18400, 18399], [18271, 18272, 18401], [18271, 18401, 18400], [18272, 18273, 18401], [18273, 18402, 18401], [18273, 18274, 18403], [18273, 18403, 18402], [18274, 18275, 18403], [18275, 18404, 18403], [18275, 18276, 18405], [18275, 18405, 18404], [18276, 18277, 18405], [18277, 18406, 18405], [18277, 18278, 18407], [18277, 18407, 18406], [18278, 18279, 18407], [18279, 18408, 18407], [18279, 18280, 18409], [18279, 18409, 18408], [18280, 18281, 18409], [18281, 18410, 18409], [18281, 18282, 18411], [18281, 18411, 18410], [18282, 18283, 18411], [18283, 18412, 18411], [18283, 18284, 18413], [18283, 18413, 18412], [18284, 18285, 18413], [18285, 18414, 18413], [18285, 18286, 18415], [18285, 18415, 18414], [18286, 18287, 18415], [18287, 18416, 18415], [18287, 18288, 18417], [18287, 18417, 18416], [18288, 18289, 18417], [18289, 18418, 18417], [18289, 18290, 18419], [18289, 18419, 18418], [18290, 18291, 18419], [18291, 18420, 18419], [18291, 18292, 18421], [18291, 18421, 18420], [18292, 18293, 18421], [18293, 18422, 18421], [18293, 18294, 18423], [18293, 18423, 18422], [18294, 18295, 18423], [18295, 18424, 18423], [18295, 18296, 18425], [18295, 18425, 18424], [18296, 18297, 18425], [18297, 18426, 18425], [18297, 18298, 18427], [18297, 18427, 18426], [18298, 18299, 18427], [18299, 18428, 18427], [18299, 18300, 18429], [18299, 18429, 18428], [18300, 18301, 18429], [18301, 18430, 18429], [18301, 18302, 18431], [18301, 18431, 18430], [18302, 18303, 18431], [18303, 18432, 18431], [18303, 18304, 18433], [18303, 18433, 18432], [18304, 18305, 18433], [18305, 18434, 18433], [18305, 18306, 18435], [18305, 18435, 18434], [18306, 18307, 18435], [18307, 18436, 18435], [18307, 18308, 18437], [18307, 18437, 18436], [18308, 18309, 18437], [18309, 18438, 18437], [18309, 18310, 18439], [18309, 18439, 18438], [18310, 18311, 18439], [18311, 18440, 18439], [18311, 18312, 18441], [18311, 18441, 18440], [18312, 18313, 18441], [18313, 18442, 18441], [18313, 18314, 18443], [18313, 18443, 18442], [18314, 18315, 18443], [18315, 18444, 18443], [18315, 18316, 18445], [18315, 18445, 18444], [18316, 18317, 18445], [18317, 18446, 18445], [18317, 18318, 18447], [18317, 18447, 18446], [18318, 18319, 18447], [18319, 18448, 18447], [18319, 18320, 18449], [18319, 18449, 18448], [18320, 18321, 18449], [18321, 18450, 18449], [18321, 18322, 18451], [18321, 18451, 18450], [18322, 18323, 18451], [18323, 18452, 18451], [18323, 18324, 18453], [18323, 18453, 18452], [18324, 18325, 18453], [18325, 18454, 18453], [18325, 18326, 18455], [18325, 18455, 18454], [18326, 18327, 18455], [18327, 18456, 18455], [18327, 18328, 18457], [18327, 18457, 18456], [18328, 18329, 18457], [18329, 18458, 18457], [18329, 18330, 18459], [18329, 18459, 18458], [18330, 18331, 18459], [18331, 18460, 18459], [18331, 18332, 18461], [18331, 18461, 18460], [18332, 18333, 18461], [18333, 18462, 18461], [18333, 18334, 18463], [18333, 18463, 18462], [18334, 18335, 18463], [18335, 18464, 18463], [18335, 18336, 18465], [18335, 18465, 18464], [18336, 18337, 18465], [18337, 18466, 18465], [18337, 18338, 18467], [18337, 18467, 18466], [18338, 18339, 18467], [18339, 18468, 18467], [18339, 18340, 18469], [18339, 18469, 18468], [18340, 18341, 18469], [18341, 18470, 18469], [18341, 18342, 18471], [18341, 18471, 18470], [18342, 18343, 18471], [18343, 18472, 18471], [18343, 18344, 18473], [18343, 18473, 18472], [18344, 18345, 18473], [18345, 18474, 18473], [18345, 18346, 18475], [18345, 18475, 18474], [18346, 18347, 18475], [18347, 18476, 18475], [18347, 18348, 18477], [18347, 18477, 18476], [18348, 18349, 18477], [18349, 18478, 18477], [18349, 18350, 18479], [18349, 18479, 18478], [18350, 18351, 18479], [18351, 18480, 18479], [18351, 18352, 18481], [18351, 18481, 18480], [18352, 18353, 18481], [18353, 18482, 18481], [18353, 18354, 18483], [18353, 18483, 18482], [18354, 18355, 18483], [18355, 18484, 18483], [18355, 18356, 18485], [18355, 18485, 18484], [18356, 18357, 18485], [18357, 18486, 18485], [18357, 18358, 18487], [18357, 18487, 18486], [18358, 18359, 18487], [18359, 18488, 18487], [18359, 18360, 18489], [18359, 18489, 18488], [18360, 18361, 18489], [18361, 18490, 18489], [18361, 18362, 18491], [18361, 18491, 18490], [18362, 18363, 18491], [18363, 18492, 18491], [18364, 18365, 18493], [18365, 18494, 18493], [18365, 18366, 18495], [18365, 18495, 18494], [18366, 18367, 18495], [18367, 18496, 18495], [18367, 18368, 18497], [18367, 18497, 18496], [18368, 18369, 18497], [18369, 18498, 18497], [18369, 18370, 18499], [18369, 18499, 18498], [18370, 18371, 18499], [18371, 18500, 18499], [18371, 18372, 18501], [18371, 18501, 18500], [18372, 18373, 18501], [18373, 18502, 18501], [18373, 18374, 18503], [18373, 18503, 18502], [18374, 18375, 18503], [18375, 18504, 18503], [18375, 18376, 18505], [18375, 18505, 18504], [18376, 18377, 18505], [18377, 18506, 18505], [18377, 18378, 18507], [18377, 18507, 18506], [18378, 18379, 18507], [18379, 18508, 18507], [18379, 18380, 18509], [18379, 18509, 18508], [18380, 18381, 18509], [18381, 18510, 18509], [18381, 18382, 18511], [18381, 18511, 18510], [18382, 18383, 18511], [18383, 18512, 18511], [18383, 18384, 18513], [18383, 18513, 18512], [18384, 18385, 18513], [18385, 18514, 18513], [18385, 18386, 18515], [18385, 18515, 18514], [18386, 18387, 18515], [18387, 18516, 18515], [18387, 18388, 18517], [18387, 18517, 18516], [18388, 18389, 18517], [18389, 18518, 18517], [18389, 18390, 18519], [18389, 18519, 18518], [18390, 18391, 18519], [18391, 18520, 18519], [18391, 18392, 18521], [18391, 18521, 18520], [18392, 18393, 18521], [18393, 18522, 18521], [18393, 18394, 18523], [18393, 18523, 18522], [18394, 18395, 18523], [18395, 18524, 18523], [18395, 18396, 18525], [18395, 18525, 18524], [18396, 18397, 18525], [18397, 18526, 18525], [18397, 18398, 18527], [18397, 18527, 18526], [18398, 18399, 18527], [18399, 18528, 18527], [18399, 18400, 18529], [18399, 18529, 18528], [18400, 18401, 18529], [18401, 18530, 18529], [18401, 18402, 18531], [18401, 18531, 18530], [18402, 18403, 18531], [18403, 18532, 18531], [18403, 18404, 18533], [18403, 18533, 18532], [18404, 18405, 18533], [18405, 18534, 18533], [18405, 18406, 18535], [18405, 18535, 18534], [18406, 18407, 18535], [18407, 18536, 18535], [18407, 18408, 18537], [18407, 18537, 18536], [18408, 18409, 18537], [18409, 18538, 18537], [18409, 18410, 18539], [18409, 18539, 18538], [18410, 18411, 18539], [18411, 18540, 18539], [18411, 18412, 18541], [18411, 18541, 18540], [18412, 18413, 18541], [18413, 18542, 18541], [18413, 18414, 18543], [18413, 18543, 18542], [18414, 18415, 18543], [18415, 18544, 18543], [18415, 18416, 18545], [18415, 18545, 18544], [18416, 18417, 18545], [18417, 18546, 18545], [18417, 18418, 18547], [18417, 18547, 18546], [18418, 18419, 18547], [18419, 18548, 18547], [18419, 18420, 18549], [18419, 18549, 18548], [18420, 18421, 18549], [18421, 18550, 18549], [18421, 18422, 18551], [18421, 18551, 18550], [18422, 18423, 18551], [18423, 18552, 18551], [18423, 18424, 18553], [18423, 18553, 18552], [18424, 18425, 18553], [18425, 18554, 18553], [18425, 18426, 18555], [18425, 18555, 18554], [18426, 18427, 18555], [18427, 18556, 18555], [18427, 18428, 18557], [18427, 18557, 18556], [18428, 18429, 18557], [18429, 18558, 18557], [18429, 18430, 18559], [18429, 18559, 18558], [18430, 18431, 18559], [18431, 18560, 18559], [18431, 18432, 18561], [18431, 18561, 18560], [18432, 18433, 18561], [18433, 18562, 18561], [18433, 18434, 18563], [18433, 18563, 18562], [18434, 18435, 18563], [18435, 18564, 18563], [18435, 18436, 18565], [18435, 18565, 18564], [18436, 18437, 18565], [18437, 18566, 18565], [18437, 18438, 18567], [18437, 18567, 18566], [18438, 18439, 18567], [18439, 18568, 18567], [18439, 18440, 18569], [18439, 18569, 18568], [18440, 18441, 18569], [18441, 18570, 18569], [18441, 18442, 18571], [18441, 18571, 18570], [18442, 18443, 18571], [18443, 18572, 18571], [18443, 18444, 18573], [18443, 18573, 18572], [18444, 18445, 18573], [18445, 18574, 18573], [18445, 18446, 18575], [18445, 18575, 18574], [18446, 18447, 18575], [18447, 18576, 18575], [18447, 18448, 18577], [18447, 18577, 18576], [18448, 18449, 18577], [18449, 18578, 18577], [18449, 18450, 18579], [18449, 18579, 18578], [18450, 18451, 18579], [18451, 18580, 18579], [18451, 18452, 18581], [18451, 18581, 18580], [18452, 18453, 18581], [18453, 18582, 18581], [18453, 18454, 18583], [18453, 18583, 18582], [18454, 18455, 18583], [18455, 18584, 18583], [18455, 18456, 18585], [18455, 18585, 18584], [18456, 18457, 18585], [18457, 18586, 18585], [18457, 18458, 18587], [18457, 18587, 18586], [18458, 18459, 18587], [18459, 18588, 18587], [18459, 18460, 18589], [18459, 18589, 18588], [18460, 18461, 18589], [18461, 18590, 18589], [18461, 18462, 18591], [18461, 18591, 18590], [18462, 18463, 18591], [18463, 18592, 18591], [18463, 18464, 18593], [18463, 18593, 18592], [18464, 18465, 18593], [18465, 18594, 18593], [18465, 18466, 18595], [18465, 18595, 18594], [18466, 18467, 18595], [18467, 18596, 18595], [18467, 18468, 18597], [18467, 18597, 18596], [18468, 18469, 18597], [18469, 18598, 18597], [18469, 18470, 18599], [18469, 18599, 18598], [18470, 18471, 18599], [18471, 18600, 18599], [18471, 18472, 18601], [18471, 18601, 18600], [18472, 18473, 18601], [18473, 18602, 18601], [18473, 18474, 18603], [18473, 18603, 18602], [18474, 18475, 18603], [18475, 18604, 18603], [18475, 18476, 18605], [18475, 18605, 18604], [18476, 18477, 18605], [18477, 18606, 18605], [18477, 18478, 18607], [18477, 18607, 18606], [18478, 18479, 18607], [18479, 18608, 18607], [18479, 18480, 18609], [18479, 18609, 18608], [18480, 18481, 18609], [18481, 18610, 18609], [18481, 18482, 18611], [18481, 18611, 18610], [18482, 18483, 18611], [18483, 18612, 18611], [18483, 18484, 18613], [18483, 18613, 18612], [18484, 18485, 18613], [18485, 18614, 18613], [18485, 18486, 18615], [18485, 18615, 18614], [18486, 18487, 18615], [18487, 18616, 18615], [18487, 18488, 18617], [18487, 18617, 18616], [18488, 18489, 18617], [18489, 18618, 18617], [18489, 18490, 18619], [18489, 18619, 18618], [18490, 18491, 18619], [18491, 18620, 18619], [18491, 18492, 18621], [18491, 18621, 18620], [18493, 18494, 18623], [18493, 18623, 18622], [18494, 18495, 18623], [18495, 18624, 18623], [18495, 18496, 18625], [18495, 18625, 18624], [18496, 18497, 18625], [18497, 18626, 18625], [18497, 18498, 18627], [18497, 18627, 18626], [18498, 18499, 18627], [18499, 18628, 18627], [18499, 18500, 18629], [18499, 18629, 18628], [18500, 18501, 18629], [18501, 18630, 18629], [18501, 18502, 18631], [18501, 18631, 18630], [18502, 18503, 18631], [18503, 18632, 18631], [18503, 18504, 18633], [18503, 18633, 18632], [18504, 18505, 18633], [18505, 18634, 18633], [18505, 18506, 18635], [18505, 18635, 18634], [18506, 18507, 18635], [18507, 18636, 18635], [18507, 18508, 18637], [18507, 18637, 18636], [18508, 18509, 18637], [18509, 18638, 18637], [18509, 18510, 18639], [18509, 18639, 18638], [18510, 18511, 18639], [18511, 18640, 18639], [18511, 18512, 18641], [18511, 18641, 18640], [18512, 18513, 18641], [18513, 18642, 18641], [18513, 18514, 18643], [18513, 18643, 18642], [18514, 18515, 18643], [18515, 18644, 18643], [18515, 18516, 18645], [18515, 18645, 18644], [18516, 18517, 18645], [18517, 18646, 18645], [18517, 18518, 18647], [18517, 18647, 18646], [18518, 18519, 18647], [18519, 18648, 18647], [18519, 18520, 18649], [18519, 18649, 18648], [18520, 18521, 18649], [18521, 18650, 18649], [18521, 18522, 18651], [18521, 18651, 18650], [18522, 18523, 18651], [18523, 18652, 18651], [18523, 18524, 18653], [18523, 18653, 18652], [18524, 18525, 18653], [18525, 18654, 18653], [18525, 18526, 18655], [18525, 18655, 18654], [18526, 18527, 18655], [18527, 18656, 18655], [18527, 18528, 18657], [18527, 18657, 18656], [18528, 18529, 18657], [18529, 18658, 18657], [18529, 18530, 18659], [18529, 18659, 18658], [18530, 18531, 18659], [18531, 18660, 18659], [18531, 18532, 18661], [18531, 18661, 18660], [18532, 18533, 18661], [18533, 18662, 18661], [18533, 18534, 18663], [18533, 18663, 18662], [18534, 18535, 18663], [18535, 18664, 18663], [18535, 18536, 18665], [18535, 18665, 18664], [18536, 18537, 18665], [18537, 18666, 18665], [18537, 18538, 18667], [18537, 18667, 18666], [18538, 18539, 18667], [18539, 18668, 18667], [18539, 18540, 18669], [18539, 18669, 18668], [18540, 18541, 18669], [18541, 18670, 18669], [18541, 18542, 18671], [18541, 18671, 18670], [18542, 18543, 18671], [18543, 18672, 18671], [18543, 18544, 18673], [18543, 18673, 18672], [18544, 18545, 18673], [18545, 18674, 18673], [18545, 18546, 18675], [18545, 18675, 18674], [18546, 18547, 18675], [18547, 18676, 18675], [18547, 18548, 18677], [18547, 18677, 18676], [18548, 18549, 18677], [18549, 18678, 18677], [18549, 18550, 18679], [18549, 18679, 18678], [18550, 18551, 18679], [18551, 18680, 18679], [18551, 18552, 18681], [18551, 18681, 18680], [18552, 18553, 18681], [18553, 18682, 18681], [18553, 18554, 18683], [18553, 18683, 18682], [18554, 18555, 18683], [18555, 18684, 18683], [18555, 18556, 18685], [18555, 18685, 18684], [18556, 18557, 18685], [18557, 18686, 18685], [18557, 18558, 18687], [18557, 18687, 18686], [18558, 18559, 18687], [18559, 18688, 18687], [18559, 18560, 18689], [18559, 18689, 18688], [18560, 18561, 18689], [18561, 18690, 18689], [18561, 18562, 18691], [18561, 18691, 18690], [18562, 18563, 18691], [18563, 18692, 18691], [18563, 18564, 18693], [18563, 18693, 18692], [18564, 18565, 18693], [18565, 18694, 18693], [18565, 18566, 18695], [18565, 18695, 18694], [18566, 18567, 18695], [18567, 18696, 18695], [18567, 18568, 18697], [18567, 18697, 18696], [18568, 18569, 18697], [18569, 18698, 18697], [18569, 18570, 18699], [18569, 18699, 18698], [18570, 18571, 18699], [18571, 18700, 18699], [18571, 18572, 18701], [18571, 18701, 18700], [18572, 18573, 18701], [18573, 18702, 18701], [18573, 18574, 18703], [18573, 18703, 18702], [18574, 18575, 18703], [18575, 18704, 18703], [18575, 18576, 18705], [18575, 18705, 18704], [18576, 18577, 18705], [18577, 18706, 18705], [18577, 18578, 18707], [18577, 18707, 18706], [18578, 18579, 18707], [18579, 18708, 18707], [18579, 18580, 18709], [18579, 18709, 18708], [18580, 18581, 18709], [18581, 18710, 18709], [18581, 18582, 18711], [18581, 18711, 18710], [18582, 18583, 18711], [18583, 18712, 18711], [18583, 18584, 18713], [18583, 18713, 18712], [18584, 18585, 18713], [18585, 18714, 18713], [18585, 18586, 18715], [18585, 18715, 18714], [18586, 18587, 18715], [18587, 18716, 18715], [18587, 18588, 18717], [18587, 18717, 18716], [18588, 18589, 18717], [18589, 18718, 18717], [18589, 18590, 18719], [18589, 18719, 18718], [18590, 18591, 18719], [18591, 18720, 18719], [18591, 18592, 18721], [18591, 18721, 18720], [18592, 18593, 18721], [18593, 18722, 18721], [18593, 18594, 18723], [18593, 18723, 18722], [18594, 18595, 18723], [18595, 18724, 18723], [18595, 18596, 18725], [18595, 18725, 18724], [18596, 18597, 18725], [18597, 18726, 18725], [18597, 18598, 18727], [18597, 18727, 18726], [18598, 18599, 18727], [18599, 18728, 18727], [18599, 18600, 18729], [18599, 18729, 18728], [18600, 18601, 18729], [18601, 18730, 18729], [18601, 18602, 18731], [18601, 18731, 18730], [18602, 18603, 18731], [18603, 18732, 18731], [18603, 18604, 18733], [18603, 18733, 18732], [18604, 18605, 18733], [18605, 18734, 18733], [18605, 18606, 18735], [18605, 18735, 18734], [18606, 18607, 18735], [18607, 18736, 18735], [18607, 18608, 18737], [18607, 18737, 18736], [18608, 18609, 18737], [18609, 18738, 18737], [18609, 18610, 18739], [18609, 18739, 18738], [18610, 18611, 18739], [18611, 18740, 18739], [18611, 18612, 18741], [18611, 18741, 18740], [18612, 18613, 18741], [18613, 18742, 18741], [18613, 18614, 18743], [18613, 18743, 18742], [18614, 18615, 18743], [18615, 18744, 18743], [18615, 18616, 18745], [18615, 18745, 18744], [18616, 18617, 18745], [18617, 18746, 18745], [18617, 18618, 18747], [18617, 18747, 18746], [18618, 18619, 18747], [18619, 18748, 18747], [18619, 18620, 18749], [18619, 18749, 18748], [18620, 18621, 18749], [18621, 18750, 18749], [18622, 18623, 18751], [18623, 18752, 18751], [18623, 18624, 18753], [18623, 18753, 18752], [18624, 18625, 18753], [18625, 18754, 18753], [18625, 18626, 18755], [18625, 18755, 18754], [18626, 18627, 18755], [18627, 18756, 18755], [18627, 18628, 18757], [18627, 18757, 18756], [18628, 18629, 18757], [18629, 18758, 18757], [18629, 18630, 18759], [18629, 18759, 18758], [18630, 18631, 18759], [18631, 18760, 18759], [18631, 18632, 18761], [18631, 18761, 18760], [18632, 18633, 18761], [18633, 18762, 18761], [18633, 18634, 18763], [18633, 18763, 18762], [18634, 18635, 18763], [18635, 18764, 18763], [18635, 18636, 18765], [18635, 18765, 18764], [18636, 18637, 18765], [18637, 18766, 18765], [18637, 18638, 18767], [18637, 18767, 18766], [18638, 18639, 18767], [18639, 18768, 18767], [18639, 18640, 18769], [18639, 18769, 18768], [18640, 18641, 18769], [18641, 18770, 18769], [18641, 18642, 18771], [18641, 18771, 18770], [18642, 18643, 18771], [18643, 18772, 18771], [18643, 18644, 18773], [18643, 18773, 18772], [18644, 18645, 18773], [18645, 18774, 18773], [18645, 18646, 18775], [18645, 18775, 18774], [18646, 18647, 18775], [18647, 18776, 18775], [18647, 18648, 18777], [18647, 18777, 18776], [18648, 18649, 18777], [18649, 18778, 18777], [18649, 18650, 18779], [18649, 18779, 18778], [18650, 18651, 18779], [18651, 18780, 18779], [18651, 18652, 18781], [18651, 18781, 18780], [18652, 18653, 18781], [18653, 18782, 18781], [18653, 18654, 18783], [18653, 18783, 18782], [18654, 18655, 18783], [18655, 18784, 18783], [18655, 18656, 18785], [18655, 18785, 18784], [18656, 18657, 18785], [18657, 18786, 18785], [18657, 18658, 18787], [18657, 18787, 18786], [18658, 18659, 18787], [18659, 18788, 18787], [18659, 18660, 18789], [18659, 18789, 18788], [18660, 18661, 18789], [18661, 18790, 18789], [18661, 18662, 18791], [18661, 18791, 18790], [18662, 18663, 18791], [18663, 18792, 18791], [18663, 18664, 18793], [18663, 18793, 18792], [18664, 18665, 18793], [18665, 18794, 18793], [18665, 18666, 18795], [18665, 18795, 18794], [18666, 18667, 18795], [18667, 18796, 18795], [18667, 18668, 18797], [18667, 18797, 18796], [18668, 18669, 18797], [18669, 18798, 18797], [18669, 18670, 18799], [18669, 18799, 18798], [18670, 18671, 18799], [18671, 18800, 18799], [18671, 18672, 18801], [18671, 18801, 18800], [18672, 18673, 18801], [18673, 18802, 18801], [18673, 18674, 18803], [18673, 18803, 18802], [18674, 18675, 18803], [18675, 18804, 18803], [18675, 18676, 18805], [18675, 18805, 18804], [18676, 18677, 18805], [18677, 18806, 18805], [18677, 18678, 18807], [18677, 18807, 18806], [18678, 18679, 18807], [18679, 18808, 18807], [18679, 18680, 18809], [18679, 18809, 18808], [18680, 18681, 18809], [18681, 18810, 18809], [18681, 18682, 18811], [18681, 18811, 18810], [18682, 18683, 18811], [18683, 18812, 18811], [18683, 18684, 18813], [18683, 18813, 18812], [18684, 18685, 18813], [18685, 18814, 18813], [18685, 18686, 18815], [18685, 18815, 18814], [18686, 18687, 18815], [18687, 18816, 18815], [18687, 18688, 18817], [18687, 18817, 18816], [18688, 18689, 18817], [18689, 18818, 18817], [18689, 18690, 18819], [18689, 18819, 18818], [18690, 18691, 18819], [18691, 18820, 18819], [18691, 18692, 18821], [18691, 18821, 18820], [18692, 18693, 18821], [18693, 18822, 18821], [18693, 18694, 18823], [18693, 18823, 18822], [18694, 18695, 18823], [18695, 18824, 18823], [18695, 18696, 18825], [18695, 18825, 18824], [18696, 18697, 18825], [18697, 18826, 18825], [18697, 18698, 18827], [18697, 18827, 18826], [18698, 18699, 18827], [18699, 18828, 18827], [18699, 18700, 18829], [18699, 18829, 18828], [18700, 18701, 18829], [18701, 18830, 18829], [18701, 18702, 18831], [18701, 18831, 18830], [18702, 18703, 18831], [18703, 18832, 18831], [18703, 18704, 18833], [18703, 18833, 18832], [18704, 18705, 18833], [18705, 18834, 18833], [18705, 18706, 18835], [18705, 18835, 18834], [18706, 18707, 18835], [18707, 18836, 18835], [18707, 18708, 18837], [18707, 18837, 18836], [18708, 18709, 18837], [18709, 18838, 18837], [18709, 18710, 18839], [18709, 18839, 18838], [18710, 18711, 18839], [18711, 18840, 18839], [18711, 18712, 18841], [18711, 18841, 18840], [18712, 18713, 18841], [18713, 18842, 18841], [18713, 18714, 18843], [18713, 18843, 18842], [18714, 18715, 18843], [18715, 18844, 18843], [18715, 18716, 18845], [18715, 18845, 18844], [18716, 18717, 18845], [18717, 18846, 18845], [18717, 18718, 18847], [18717, 18847, 18846], [18718, 18719, 18847], [18719, 18848, 18847], [18719, 18720, 18849], [18719, 18849, 18848], [18720, 18721, 18849], [18721, 18850, 18849], [18721, 18722, 18851], [18721, 18851, 18850], [18722, 18723, 18851], [18723, 18852, 18851], [18723, 18724, 18853], [18723, 18853, 18852], [18724, 18725, 18853], [18725, 18854, 18853], [18725, 18726, 18855], [18725, 18855, 18854], [18726, 18727, 18855], [18727, 18856, 18855], [18727, 18728, 18857], [18727, 18857, 18856], [18728, 18729, 18857], [18729, 18858, 18857], [18729, 18730, 18859], [18729, 18859, 18858], [18730, 18731, 18859], [18731, 18860, 18859], [18731, 18732, 18861], [18731, 18861, 18860], [18732, 18733, 18861], [18733, 18862, 18861], [18733, 18734, 18863], [18733, 18863, 18862], [18734, 18735, 18863], [18735, 18864, 18863], [18735, 18736, 18865], [18735, 18865, 18864], [18736, 18737, 18865], [18737, 18866, 18865], [18737, 18738, 18867], [18737, 18867, 18866], [18738, 18739, 18867], [18739, 18868, 18867], [18739, 18740, 18869], [18739, 18869, 18868], [18740, 18741, 18869], [18741, 18870, 18869], [18741, 18742, 18871], [18741, 18871, 18870], [18742, 18743, 18871], [18743, 18872, 18871], [18743, 18744, 18873], [18743, 18873, 18872], [18744, 18745, 18873], [18745, 18874, 18873], [18745, 18746, 18875], [18745, 18875, 18874], [18746, 18747, 18875], [18747, 18876, 18875], [18747, 18748, 18877], [18747, 18877, 18876], [18748, 18749, 18877], [18749, 18878, 18877], [18749, 18750, 18879], [18749, 18879, 18878], [18751, 18752, 18881], [18751, 18881, 18880], [18752, 18753, 18881], [18753, 18882, 18881], [18753, 18754, 18883], [18753, 18883, 18882], [18754, 18755, 18883], [18755, 18884, 18883], [18755, 18756, 18885], [18755, 18885, 18884], [18756, 18757, 18885], [18757, 18886, 18885], [18757, 18758, 18887], [18757, 18887, 18886], [18758, 18759, 18887], [18759, 18888, 18887], [18759, 18760, 18889], [18759, 18889, 18888], [18760, 18761, 18889], [18761, 18890, 18889], [18761, 18762, 18891], [18761, 18891, 18890], [18762, 18763, 18891], [18763, 18892, 18891], [18763, 18764, 18893], [18763, 18893, 18892], [18764, 18765, 18893], [18765, 18894, 18893], [18765, 18766, 18895], [18765, 18895, 18894], [18766, 18767, 18895], [18767, 18896, 18895], [18767, 18768, 18897], [18767, 18897, 18896], [18768, 18769, 18897], [18769, 18898, 18897], [18769, 18770, 18899], [18769, 18899, 18898], [18770, 18771, 18899], [18771, 18900, 18899], [18771, 18772, 18901], [18771, 18901, 18900], [18772, 18773, 18901], [18773, 18902, 18901], [18773, 18774, 18903], [18773, 18903, 18902], [18774, 18775, 18903], [18775, 18904, 18903], [18775, 18776, 18905], [18775, 18905, 18904], [18776, 18777, 18905], [18777, 18906, 18905], [18777, 18778, 18907], [18777, 18907, 18906], [18778, 18779, 18907], [18779, 18908, 18907], [18779, 18780, 18909], [18779, 18909, 18908], [18780, 18781, 18909], [18781, 18910, 18909], [18781, 18782, 18911], [18781, 18911, 18910], [18782, 18783, 18911], [18783, 18912, 18911], [18783, 18784, 18913], [18783, 18913, 18912], [18784, 18785, 18913], [18785, 18914, 18913], [18785, 18786, 18915], [18785, 18915, 18914], [18786, 18787, 18915], [18787, 18916, 18915], [18787, 18788, 18917], [18787, 18917, 18916], [18788, 18789, 18917], [18789, 18918, 18917], [18789, 18790, 18919], [18789, 18919, 18918], [18790, 18791, 18919], [18791, 18920, 18919], [18791, 18792, 18921], [18791, 18921, 18920], [18792, 18793, 18921], [18793, 18922, 18921], [18793, 18794, 18923], [18793, 18923, 18922], [18794, 18795, 18923], [18795, 18924, 18923], [18795, 18796, 18925], [18795, 18925, 18924], [18796, 18797, 18925], [18797, 18926, 18925], [18797, 18798, 18927], [18797, 18927, 18926], [18798, 18799, 18927], [18799, 18928, 18927], [18799, 18800, 18929], [18799, 18929, 18928], [18800, 18801, 18929], [18801, 18930, 18929], [18801, 18802, 18931], [18801, 18931, 18930], [18802, 18803, 18931], [18803, 18932, 18931], [18803, 18804, 18933], [18803, 18933, 18932], [18804, 18805, 18933], [18805, 18934, 18933], [18805, 18806, 18935], [18805, 18935, 18934], [18806, 18807, 18935], [18807, 18936, 18935], [18807, 18808, 18937], [18807, 18937, 18936], [18808, 18809, 18937], [18809, 18938, 18937], [18809, 18810, 18939], [18809, 18939, 18938], [18810, 18811, 18939], [18811, 18940, 18939], [18811, 18812, 18941], [18811, 18941, 18940], [18812, 18813, 18941], [18813, 18942, 18941], [18813, 18814, 18943], [18813, 18943, 18942], [18814, 18815, 18943], [18815, 18944, 18943], [18815, 18816, 18945], [18815, 18945, 18944], [18816, 18817, 18945], [18817, 18946, 18945], [18817, 18818, 18947], [18817, 18947, 18946], [18818, 18819, 18947], [18819, 18948, 18947], [18819, 18820, 18949], [18819, 18949, 18948], [18820, 18821, 18949], [18821, 18950, 18949], [18821, 18822, 18951], [18821, 18951, 18950], [18822, 18823, 18951], [18823, 18952, 18951], [18823, 18824, 18953], [18823, 18953, 18952], [18824, 18825, 18953], [18825, 18954, 18953], [18825, 18826, 18955], [18825, 18955, 18954], [18826, 18827, 18955], [18827, 18956, 18955], [18827, 18828, 18957], [18827, 18957, 18956], [18828, 18829, 18957], [18829, 18958, 18957], [18829, 18830, 18959], [18829, 18959, 18958], [18830, 18831, 18959], [18831, 18960, 18959], [18831, 18832, 18961], [18831, 18961, 18960], [18832, 18833, 18961], [18833, 18962, 18961], [18833, 18834, 18963], [18833, 18963, 18962], [18834, 18835, 18963], [18835, 18964, 18963], [18835, 18836, 18965], [18835, 18965, 18964], [18836, 18837, 18965], [18837, 18966, 18965], [18837, 18838, 18967], [18837, 18967, 18966], [18838, 18839, 18967], [18839, 18968, 18967], [18839, 18840, 18969], [18839, 18969, 18968], [18840, 18841, 18969], [18841, 18970, 18969], [18841, 18842, 18971], [18841, 18971, 18970], [18842, 18843, 18971], [18843, 18972, 18971], [18843, 18844, 18973], [18843, 18973, 18972], [18844, 18845, 18973], [18845, 18974, 18973], [18845, 18846, 18975], [18845, 18975, 18974], [18846, 18847, 18975], [18847, 18976, 18975], [18847, 18848, 18977], [18847, 18977, 18976], [18848, 18849, 18977], [18849, 18978, 18977], [18849, 18850, 18979], [18849, 18979, 18978], [18850, 18851, 18979], [18851, 18980, 18979], [18851, 18852, 18981], [18851, 18981, 18980], [18852, 18853, 18981], [18853, 18982, 18981], [18853, 18854, 18983], [18853, 18983, 18982], [18854, 18855, 18983], [18855, 18984, 18983], [18855, 18856, 18985], [18855, 18985, 18984], [18856, 18857, 18985], [18857, 18986, 18985], [18857, 18858, 18987], [18857, 18987, 18986], [18858, 18859, 18987], [18859, 18988, 18987], [18859, 18860, 18989], [18859, 18989, 18988], [18860, 18861, 18989], [18861, 18990, 18989], [18861, 18862, 18991], [18861, 18991, 18990], [18862, 18863, 18991], [18863, 18992, 18991], [18863, 18864, 18993], [18863, 18993, 18992], [18864, 18865, 18993], [18865, 18994, 18993], [18865, 18866, 18995], [18865, 18995, 18994], [18866, 18867, 18995], [18867, 18996, 18995], [18867, 18868, 18997], [18867, 18997, 18996], [18868, 18869, 18997], [18869, 18998, 18997], [18869, 18870, 18999], [18869, 18999, 18998], [18870, 18871, 18999], [18871, 19000, 18999], [18871, 18872, 19001], [18871, 19001, 19000], [18872, 18873, 19001], [18873, 19002, 19001], [18873, 18874, 19003], [18873, 19003, 19002], [18874, 18875, 19003], [18875, 19004, 19003], [18875, 18876, 19005], [18875, 19005, 19004], [18876, 18877, 19005], [18877, 19006, 19005], [18877, 18878, 19007], [18877, 19007, 19006], [18878, 18879, 19007], [18879, 19008, 19007], [18880, 18881, 19009], [18881, 19010, 19009], [18881, 18882, 19011], [18881, 19011, 19010], [18882, 18883, 19011], [18883, 19012, 19011], [18883, 18884, 19013], [18883, 19013, 19012], [18884, 18885, 19013], [18885, 19014, 19013], [18885, 18886, 19015], [18885, 19015, 19014], [18886, 18887, 19015], [18887, 19016, 19015], [18887, 18888, 19017], [18887, 19017, 19016], [18888, 18889, 19017], [18889, 19018, 19017], [18889, 18890, 19019], [18889, 19019, 19018], [18890, 18891, 19019], [18891, 19020, 19019], [18891, 18892, 19021], [18891, 19021, 19020], [18892, 18893, 19021], [18893, 19022, 19021], [18893, 18894, 19023], [18893, 19023, 19022], [18894, 18895, 19023], [18895, 19024, 19023], [18895, 18896, 19025], [18895, 19025, 19024], [18896, 18897, 19025], [18897, 19026, 19025], [18897, 18898, 19027], [18897, 19027, 19026], [18898, 18899, 19027], [18899, 19028, 19027], [18899, 18900, 19029], [18899, 19029, 19028], [18900, 18901, 19029], [18901, 19030, 19029], [18901, 18902, 19031], [18901, 19031, 19030], [18902, 18903, 19031], [18903, 19032, 19031], [18903, 18904, 19033], [18903, 19033, 19032], [18904, 18905, 19033], [18905, 19034, 19033], [18905, 18906, 19035], [18905, 19035, 19034], [18906, 18907, 19035], [18907, 19036, 19035], [18907, 18908, 19037], [18907, 19037, 19036], [18908, 18909, 19037], [18909, 19038, 19037], [18909, 18910, 19039], [18909, 19039, 19038], [18910, 18911, 19039], [18911, 19040, 19039], [18911, 18912, 19041], [18911, 19041, 19040], [18912, 18913, 19041], [18913, 19042, 19041], [18913, 18914, 19043], [18913, 19043, 19042], [18914, 18915, 19043], [18915, 19044, 19043], [18915, 18916, 19045], [18915, 19045, 19044], [18916, 18917, 19045], [18917, 19046, 19045], [18917, 18918, 19047], [18917, 19047, 19046], [18918, 18919, 19047], [18919, 19048, 19047], [18919, 18920, 19049], [18919, 19049, 19048], [18920, 18921, 19049], [18921, 19050, 19049], [18921, 18922, 19051], [18921, 19051, 19050], [18922, 18923, 19051], [18923, 19052, 19051], [18923, 18924, 19053], [18923, 19053, 19052], [18924, 18925, 19053], [18925, 19054, 19053], [18925, 18926, 19055], [18925, 19055, 19054], [18926, 18927, 19055], [18927, 19056, 19055], [18927, 18928, 19057], [18927, 19057, 19056], [18928, 18929, 19057], [18929, 19058, 19057], [18929, 18930, 19059], [18929, 19059, 19058], [18930, 18931, 19059], [18931, 19060, 19059], [18931, 18932, 19061], [18931, 19061, 19060], [18932, 18933, 19061], [18933, 19062, 19061], [18933, 18934, 19063], [18933, 19063, 19062], [18934, 18935, 19063], [18935, 19064, 19063], [18935, 18936, 19065], [18935, 19065, 19064], [18936, 18937, 19065], [18937, 19066, 19065], [18937, 18938, 19067], [18937, 19067, 19066], [18938, 18939, 19067], [18939, 19068, 19067], [18939, 18940, 19069], [18939, 19069, 19068], [18940, 18941, 19069], [18941, 19070, 19069], [18941, 18942, 19071], [18941, 19071, 19070], [18942, 18943, 19071], [18943, 19072, 19071], [18943, 18944, 19073], [18943, 19073, 19072], [18944, 18945, 19073], [18945, 19074, 19073], [18945, 18946, 19075], [18945, 19075, 19074], [18946, 18947, 19075], [18947, 19076, 19075], [18947, 18948, 19077], [18947, 19077, 19076], [18948, 18949, 19077], [18949, 19078, 19077], [18949, 18950, 19079], [18949, 19079, 19078], [18950, 18951, 19079], [18951, 19080, 19079], [18951, 18952, 19081], [18951, 19081, 19080], [18952, 18953, 19081], [18953, 19082, 19081], [18953, 18954, 19083], [18953, 19083, 19082], [18954, 18955, 19083], [18955, 19084, 19083], [18955, 18956, 19085], [18955, 19085, 19084], [18956, 18957, 19085], [18957, 19086, 19085], [18957, 18958, 19087], [18957, 19087, 19086], [18958, 18959, 19087], [18959, 19088, 19087], [18959, 18960, 19089], [18959, 19089, 19088], [18960, 18961, 19089], [18961, 19090, 19089], [18961, 18962, 19091], [18961, 19091, 19090], [18962, 18963, 19091], [18963, 19092, 19091], [18963, 18964, 19093], [18963, 19093, 19092], [18964, 18965, 19093], [18965, 19094, 19093], [18965, 18966, 19095], [18965, 19095, 19094], [18966, 18967, 19095], [18967, 19096, 19095], [18967, 18968, 19097], [18967, 19097, 19096], [18968, 18969, 19097], [18969, 19098, 19097], [18969, 18970, 19099], [18969, 19099, 19098], [18970, 18971, 19099], [18971, 19100, 19099], [18971, 18972, 19101], [18971, 19101, 19100], [18972, 18973, 19101], [18973, 19102, 19101], [18973, 18974, 19103], [18973, 19103, 19102], [18974, 18975, 19103], [18975, 19104, 19103], [18975, 18976, 19105], [18975, 19105, 19104], [18976, 18977, 19105], [18977, 19106, 19105], [18977, 18978, 19107], [18977, 19107, 19106], [18978, 18979, 19107], [18979, 19108, 19107], [18979, 18980, 19109], [18979, 19109, 19108], [18980, 18981, 19109], [18981, 19110, 19109], [18981, 18982, 19111], [18981, 19111, 19110], [18982, 18983, 19111], [18983, 19112, 19111], [18983, 18984, 19113], [18983, 19113, 19112], [18984, 18985, 19113], [18985, 19114, 19113], [18985, 18986, 19115], [18985, 19115, 19114], [18986, 18987, 19115], [18987, 19116, 19115], [18987, 18988, 19117], [18987, 19117, 19116], [18988, 18989, 19117], [18989, 19118, 19117], [18989, 18990, 19119], [18989, 19119, 19118], [18990, 18991, 19119], [18991, 19120, 19119], [18991, 18992, 19121], [18991, 19121, 19120], [18992, 18993, 19121], [18993, 19122, 19121], [18993, 18994, 19123], [18993, 19123, 19122], [18994, 18995, 19123], [18995, 19124, 19123], [18995, 18996, 19125], [18995, 19125, 19124], [18996, 18997, 19125], [18997, 19126, 19125], [18997, 18998, 19127], [18997, 19127, 19126], [18998, 18999, 19127], [18999, 19128, 19127], [18999, 19000, 19129], [18999, 19129, 19128], [19000, 19001, 19129], [19001, 19130, 19129], [19001, 19002, 19131], [19001, 19131, 19130], [19002, 19003, 19131], [19003, 19132, 19131], [19003, 19004, 19133], [19003, 19133, 19132], [19004, 19005, 19133], [19005, 19134, 19133], [19005, 19006, 19135], [19005, 19135, 19134], [19006, 19007, 19135], [19007, 19136, 19135], [19007, 19008, 19137], [19007, 19137, 19136], [19009, 19010, 19139], [19009, 19139, 19138], [19010, 19011, 19139], [19011, 19140, 19139], [19011, 19012, 19141], [19011, 19141, 19140], [19012, 19013, 19141], [19013, 19142, 19141], [19013, 19014, 19143], [19013, 19143, 19142], [19014, 19015, 19143], [19015, 19144, 19143], [19015, 19016, 19145], [19015, 19145, 19144], [19016, 19017, 19145], [19017, 19146, 19145], [19017, 19018, 19147], [19017, 19147, 19146], [19018, 19019, 19147], [19019, 19148, 19147], [19019, 19020, 19149], [19019, 19149, 19148], [19020, 19021, 19149], [19021, 19150, 19149], [19021, 19022, 19151], [19021, 19151, 19150], [19022, 19023, 19151], [19023, 19152, 19151], [19023, 19024, 19153], [19023, 19153, 19152], [19024, 19025, 19153], [19025, 19154, 19153], [19025, 19026, 19155], [19025, 19155, 19154], [19026, 19027, 19155], [19027, 19156, 19155], [19027, 19028, 19157], [19027, 19157, 19156], [19028, 19029, 19157], [19029, 19158, 19157], [19029, 19030, 19159], [19029, 19159, 19158], [19030, 19031, 19159], [19031, 19160, 19159], [19031, 19032, 19161], [19031, 19161, 19160], [19032, 19033, 19161], [19033, 19162, 19161], [19033, 19034, 19163], [19033, 19163, 19162], [19034, 19035, 19163], [19035, 19164, 19163], [19035, 19036, 19165], [19035, 19165, 19164], [19036, 19037, 19165], [19037, 19166, 19165], [19037, 19038, 19167], [19037, 19167, 19166], [19038, 19039, 19167], [19039, 19168, 19167], [19039, 19040, 19169], [19039, 19169, 19168], [19040, 19041, 19169], [19041, 19170, 19169], [19041, 19042, 19171], [19041, 19171, 19170], [19042, 19043, 19171], [19043, 19172, 19171], [19043, 19044, 19173], [19043, 19173, 19172], [19044, 19045, 19173], [19045, 19174, 19173], [19045, 19046, 19175], [19045, 19175, 19174], [19046, 19047, 19175], [19047, 19176, 19175], [19047, 19048, 19177], [19047, 19177, 19176], [19048, 19049, 19177], [19049, 19178, 19177], [19049, 19050, 19179], [19049, 19179, 19178], [19050, 19051, 19179], [19051, 19180, 19179], [19051, 19052, 19181], [19051, 19181, 19180], [19052, 19053, 19181], [19053, 19182, 19181], [19053, 19054, 19183], [19053, 19183, 19182], [19054, 19055, 19183], [19055, 19184, 19183], [19055, 19056, 19185], [19055, 19185, 19184], [19056, 19057, 19185], [19057, 19186, 19185], [19057, 19058, 19187], [19057, 19187, 19186], [19058, 19059, 19187], [19059, 19188, 19187], [19059, 19060, 19189], [19059, 19189, 19188], [19060, 19061, 19189], [19061, 19190, 19189], [19061, 19062, 19191], [19061, 19191, 19190], [19062, 19063, 19191], [19063, 19192, 19191], [19063, 19064, 19193], [19063, 19193, 19192], [19064, 19065, 19193], [19065, 19194, 19193], [19065, 19066, 19195], [19065, 19195, 19194], [19066, 19067, 19195], [19067, 19196, 19195], [19067, 19068, 19197], [19067, 19197, 19196], [19068, 19069, 19197], [19069, 19198, 19197], [19069, 19070, 19199], [19069, 19199, 19198], [19070, 19071, 19199], [19071, 19200, 19199], [19071, 19072, 19201], [19071, 19201, 19200], [19072, 19073, 19201], [19073, 19202, 19201], [19073, 19074, 19203], [19073, 19203, 19202], [19074, 19075, 19203], [19075, 19204, 19203], [19075, 19076, 19205], [19075, 19205, 19204], [19076, 19077, 19205], [19077, 19206, 19205], [19077, 19078, 19207], [19077, 19207, 19206], [19078, 19079, 19207], [19079, 19208, 19207], [19079, 19080, 19209], [19079, 19209, 19208], [19080, 19081, 19209], [19081, 19210, 19209], [19081, 19082, 19211], [19081, 19211, 19210], [19082, 19083, 19211], [19083, 19212, 19211], [19083, 19084, 19213], [19083, 19213, 19212], [19084, 19085, 19213], [19085, 19214, 19213], [19085, 19086, 19215], [19085, 19215, 19214], [19086, 19087, 19215], [19087, 19216, 19215], [19087, 19088, 19217], [19087, 19217, 19216], [19088, 19089, 19217], [19089, 19218, 19217], [19089, 19090, 19219], [19089, 19219, 19218], [19090, 19091, 19219], [19091, 19220, 19219], [19091, 19092, 19221], [19091, 19221, 19220], [19092, 19093, 19221], [19093, 19222, 19221], [19093, 19094, 19223], [19093, 19223, 19222], [19094, 19095, 19223], [19095, 19224, 19223], [19095, 19096, 19225], [19095, 19225, 19224], [19096, 19097, 19225], [19097, 19226, 19225], [19097, 19098, 19227], [19097, 19227, 19226], [19098, 19099, 19227], [19099, 19228, 19227], [19099, 19100, 19229], [19099, 19229, 19228], [19100, 19101, 19229], [19101, 19230, 19229], [19101, 19102, 19231], [19101, 19231, 19230], [19102, 19103, 19231], [19103, 19232, 19231], [19103, 19104, 19233], [19103, 19233, 19232], [19104, 19105, 19233], [19105, 19234, 19233], [19105, 19106, 19235], [19105, 19235, 19234], [19106, 19107, 19235], [19107, 19236, 19235], [19107, 19108, 19237], [19107, 19237, 19236], [19108, 19109, 19237], [19109, 19238, 19237], [19109, 19110, 19239], [19109, 19239, 19238], [19110, 19111, 19239], [19111, 19240, 19239], [19111, 19112, 19241], [19111, 19241, 19240], [19112, 19113, 19241], [19113, 19242, 19241], [19113, 19114, 19243], [19113, 19243, 19242], [19114, 19115, 19243], [19115, 19244, 19243], [19115, 19116, 19245], [19115, 19245, 19244], [19116, 19117, 19245], [19117, 19246, 19245], [19117, 19118, 19247], [19117, 19247, 19246], [19118, 19119, 19247], [19119, 19248, 19247], [19119, 19120, 19249], [19119, 19249, 19248], [19120, 19121, 19249], [19121, 19250, 19249], [19121, 19122, 19251], [19121, 19251, 19250], [19122, 19123, 19251], [19123, 19252, 19251], [19123, 19124, 19253], [19123, 19253, 19252], [19124, 19125, 19253], [19125, 19254, 19253], [19125, 19126, 19255], [19125, 19255, 19254], [19126, 19127, 19255], [19127, 19256, 19255], [19127, 19128, 19257], [19127, 19257, 19256], [19128, 19129, 19257], [19129, 19258, 19257], [19129, 19130, 19259], [19129, 19259, 19258], [19130, 19131, 19259], [19131, 19260, 19259], [19131, 19132, 19261], [19131, 19261, 19260], [19132, 19133, 19261], [19133, 19262, 19261], [19133, 19134, 19263], [19133, 19263, 19262], [19134, 19135, 19263], [19135, 19264, 19263], [19135, 19136, 19265], [19135, 19265, 19264], [19136, 19137, 19265], [19137, 19266, 19265], [19138, 19139, 0], [19139, 1, 0], [19139, 19140, 2], [19139, 2, 1], [19140, 19141, 2], [19141, 3, 2], [19141, 19142, 4], [19141, 4, 3], [19142, 19143, 4], [19143, 5, 4], [19143, 19144, 6], [19143, 6, 5], [19144, 19145, 6], [19145, 7, 6], [19145, 19146, 8], [19145, 8, 7], [19146, 19147, 8], [19147, 9, 8], [19147, 19148, 10], [19147, 10, 9], [19148, 19149, 10], [19149, 11, 10], [19149, 19150, 12], [19149, 12, 11], [19150, 19151, 12], [19151, 13, 12], [19151, 19152, 14], [19151, 14, 13], [19152, 19153, 14], [19153, 15, 14], [19153, 19154, 16], [19153, 16, 15], [19154, 19155, 16], [19155, 17, 16], [19155, 19156, 18], [19155, 18, 17], [19156, 19157, 18], [19157, 19, 18], [19157, 19158, 20], [19157, 20, 19], [19158, 19159, 20], [19159, 21, 20], [19159, 19160, 22], [19159, 22, 21], [19160, 19161, 22], [19161, 23, 22], [19161, 19162, 24], [19161, 24, 23], [19162, 19163, 24], [19163, 25, 24], [19163, 19164, 26], [19163, 26, 25], [19164, 19165, 26], [19165, 27, 26], [19165, 19166, 28], [19165, 28, 27], [19166, 19167, 28], [19167, 29, 28], [19167, 19168, 30], [19167, 30, 29], [19168, 19169, 30], [19169, 31, 30], [19169, 19170, 32], [19169, 32, 31], [19170, 19171, 32], [19171, 33, 32], [19171, 19172, 34], [19171, 34, 33], [19172, 19173, 34], [19173, 35, 34], [19173, 19174, 36], [19173, 36, 35], [19174, 19175, 36], [19175, 37, 36], [19175, 19176, 38], [19175, 38, 37], [19176, 19177, 38], [19177, 39, 38], [19177, 19178, 40], [19177, 40, 39], [19178, 19179, 40], [19179, 41, 40], [19179, 19180, 42], [19179, 42, 41], [19180, 19181, 42], [19181, 43, 42], [19181, 19182, 44], [19181, 44, 43], [19182, 19183, 44], [19183, 45, 44], [19183, 19184, 46], [19183, 46, 45], [19184, 19185, 46], [19185, 47, 46], [19185, 19186, 48], [19185, 48, 47], [19186, 19187, 48], [19187, 49, 48], [19187, 19188, 50], [19187, 50, 49], [19188, 19189, 50], [19189, 51, 50], [19189, 19190, 52], [19189, 52, 51], [19190, 19191, 52], [19191, 53, 52], [19191, 19192, 54], [19191, 54, 53], [19192, 19193, 54], [19193, 55, 54], [19193, 19194, 56], [19193, 56, 55], [19194, 19195, 56], [19195, 57, 56], [19195, 19196, 58], [19195, 58, 57], [19196, 19197, 58], [19197, 59, 58], [19197, 19198, 60], [19197, 60, 59], [19198, 19199, 60], [19199, 61, 60], [19199, 19200, 62], [19199, 62, 61], [19200, 19201, 62], [19201, 63, 62], [19201, 19202, 64], [19201, 64, 63], [19202, 19203, 64], [19203, 65, 64], [19203, 19204, 66], [19203, 66, 65], [19204, 19205, 66], [19205, 67, 66], [19205, 19206, 68], [19205, 68, 67], [19206, 19207, 68], [19207, 69, 68], [19207, 19208, 70], [19207, 70, 69], [19208, 19209, 70], [19209, 71, 70], [19209, 19210, 72], [19209, 72, 71], [19210, 19211, 72], [19211, 73, 72], [19211, 19212, 74], [19211, 74, 73], [19212, 19213, 74], [19213, 75, 74], [19213, 19214, 76], [19213, 76, 75], [19214, 19215, 76], [19215, 77, 76], [19215, 19216, 78], [19215, 78, 77], [19216, 19217, 78], [19217, 79, 78], [19217, 19218, 80], [19217, 80, 79], [19218, 19219, 80], [19219, 81, 80], [19219, 19220, 82], [19219, 82, 81], [19220, 19221, 82], [19221, 83, 82], [19221, 19222, 84], [19221, 84, 83], [19222, 19223, 84], [19223, 85, 84], [19223, 19224, 86], [19223, 86, 85], [19224, 19225, 86], [19225, 87, 86], [19225, 19226, 88], [19225, 88, 87], [19226, 19227, 88], [19227, 89, 88], [19227, 19228, 90], [19227, 90, 89], [19228, 19229, 90], [19229, 91, 90], [19229, 19230, 92], [19229, 92, 91], [19230, 19231, 92], [19231, 93, 92], [19231, 19232, 94], [19231, 94, 93], [19232, 19233, 94], [19233, 95, 94], [19233, 19234, 96], [19233, 96, 95], [19234, 19235, 96], [19235, 97, 96], [19235, 19236, 98], [19235, 98, 97], [19236, 19237, 98], [19237, 99, 98], [19237, 19238, 100], [19237, 100, 99], [19238, 19239, 100], [19239, 101, 100], [19239, 19240, 102], [19239, 102, 101], [19240, 19241, 102], [19241, 103, 102], [19241, 19242, 104], [19241, 104, 103], [19242, 19243, 104], [19243, 105, 104], [19243, 19244, 106], [19243, 106, 105], [19244, 19245, 106], [19245, 107, 106], [19245, 19246, 108], [19245, 108, 107], [19246, 19247, 108], [19247, 109, 108], [19247, 19248, 110], [19247, 110, 109], [19248, 19249, 110], [19249, 111, 110], [19249, 19250, 112], [19249, 112, 111], [19250, 19251, 112], [19251, 113, 112], [19251, 19252, 114], [19251, 114, 113], [19252, 19253, 114], [19253, 115, 114], [19253, 19254, 116], [19253, 116, 115], [19254, 19255, 116], [19255, 117, 116], [19255, 19256, 118], [19255, 118, 117], [19256, 19257, 118], [19257, 119, 118], [19257, 19258, 120], [19257, 120, 119], [19258, 19259, 120], [19259, 121, 120], [19259, 19260, 122], [19259, 122, 121], [19260, 19261, 122], [19261, 123, 122], [19261, 19262, 124], [19261, 124, 123], [19262, 19263, 124], [19263, 125, 124], [19263, 19264, 126], [19263, 126, 125], [19264, 19265, 126], [19265, 127, 126], [19265, 19266, 128], [19265, 128, 127], [19267, 19268, 19397], [19267, 19397, 19396], [19268, 19269, 19397], [19269, 19398, 19397], [19269, 19270, 19399], [19269, 19399, 19398], [19270, 19271, 19399], [19271, 19400, 19399], [19271, 19272, 19401], [19271, 19401, 19400], [19272, 19273, 19401], [19273, 19402, 19401], [19273, 19274, 19403], [19273, 19403, 19402], [19274, 19275, 19403], [19275, 19404, 19403], [19275, 19276, 19405], [19275, 19405, 19404], [19276, 19277, 19405], [19277, 19406, 19405], [19277, 19278, 19407], [19277, 19407, 19406], [19278, 19279, 19407], [19279, 19408, 19407], [19279, 19280, 19409], [19279, 19409, 19408], [19280, 19281, 19409], [19281, 19410, 19409], [19281, 19282, 19411], [19281, 19411, 19410], [19282, 19283, 19411], [19283, 19412, 19411], [19283, 19284, 19413], [19283, 19413, 19412], [19284, 19285, 19413], [19285, 19414, 19413], [19285, 19286, 19415], [19285, 19415, 19414], [19286, 19287, 19415], [19287, 19416, 19415], [19287, 19288, 19417], [19287, 19417, 19416], [19288, 19289, 19417], [19289, 19418, 19417], [19289, 19290, 19419], [19289, 19419, 19418], [19290, 19291, 19419], [19291, 19420, 19419], [19291, 19292, 19421], [19291, 19421, 19420], [19292, 19293, 19421], [19293, 19422, 19421], [19293, 19294, 19423], [19293, 19423, 19422], [19294, 19295, 19423], [19295, 19424, 19423], [19295, 19296, 19425], [19295, 19425, 19424], [19296, 19297, 19425], [19297, 19426, 19425], [19297, 19298, 19427], [19297, 19427, 19426], [19298, 19299, 19427], [19299, 19428, 19427], [19299, 19300, 19429], [19299, 19429, 19428], [19300, 19301, 19429], [19301, 19430, 19429], [19301, 19302, 19431], [19301, 19431, 19430], [19302, 19303, 19431], [19303, 19432, 19431], [19303, 19304, 19433], [19303, 19433, 19432], [19304, 19305, 19433], [19305, 19434, 19433], [19305, 19306, 19435], [19305, 19435, 19434], [19306, 19307, 19435], [19307, 19436, 19435], [19307, 19308, 19437], [19307, 19437, 19436], [19308, 19309, 19437], [19309, 19438, 19437], [19309, 19310, 19439], [19309, 19439, 19438], [19310, 19311, 19439], [19311, 19440, 19439], [19311, 19312, 19441], [19311, 19441, 19440], [19312, 19313, 19441], [19313, 19442, 19441], [19313, 19314, 19443], [19313, 19443, 19442], [19314, 19315, 19443], [19315, 19444, 19443], [19315, 19316, 19445], [19315, 19445, 19444], [19316, 19317, 19445], [19317, 19446, 19445], [19317, 19318, 19447], [19317, 19447, 19446], [19318, 19319, 19447], [19319, 19448, 19447], [19319, 19320, 19449], [19319, 19449, 19448], [19320, 19321, 19449], [19321, 19450, 19449], [19321, 19322, 19451], [19321, 19451, 19450], [19322, 19323, 19451], [19323, 19452, 19451], [19323, 19324, 19453], [19323, 19453, 19452], [19324, 19325, 19453], [19325, 19454, 19453], [19325, 19326, 19455], [19325, 19455, 19454], [19326, 19327, 19455], [19327, 19456, 19455], [19327, 19328, 19457], [19327, 19457, 19456], [19328, 19329, 19457], [19329, 19458, 19457], [19329, 19330, 19459], [19329, 19459, 19458], [19330, 19331, 19459], [19331, 19460, 19459], [19331, 19332, 19461], [19331, 19461, 19460], [19332, 19333, 19461], [19333, 19462, 19461], [19333, 19334, 19463], [19333, 19463, 19462], [19334, 19335, 19463], [19335, 19464, 19463], [19335, 19336, 19465], [19335, 19465, 19464], [19336, 19337, 19465], [19337, 19466, 19465], [19337, 19338, 19467], [19337, 19467, 19466], [19338, 19339, 19467], [19339, 19468, 19467], [19339, 19340, 19469], [19339, 19469, 19468], [19340, 19341, 19469], [19341, 19470, 19469], [19341, 19342, 19471], [19341, 19471, 19470], [19342, 19343, 19471], [19343, 19472, 19471], [19343, 19344, 19473], [19343, 19473, 19472], [19344, 19345, 19473], [19345, 19474, 19473], [19345, 19346, 19475], [19345, 19475, 19474], [19346, 19347, 19475], [19347, 19476, 19475], [19347, 19348, 19477], [19347, 19477, 19476], [19348, 19349, 19477], [19349, 19478, 19477], [19349, 19350, 19479], [19349, 19479, 19478], [19350, 19351, 19479], [19351, 19480, 19479], [19351, 19352, 19481], [19351, 19481, 19480], [19352, 19353, 19481], [19353, 19482, 19481], [19353, 19354, 19483], [19353, 19483, 19482], [19354, 19355, 19483], [19355, 19484, 19483], [19355, 19356, 19485], [19355, 19485, 19484], [19356, 19357, 19485], [19357, 19486, 19485], [19357, 19358, 19487], [19357, 19487, 19486], [19358, 19359, 19487], [19359, 19488, 19487], [19359, 19360, 19489], [19359, 19489, 19488], [19360, 19361, 19489], [19361, 19490, 19489], [19361, 19362, 19491], [19361, 19491, 19490], [19362, 19363, 19491], [19363, 19492, 19491], [19363, 19364, 19493], [19363, 19493, 19492], [19364, 19365, 19493], [19365, 19494, 19493], [19365, 19366, 19495], [19365, 19495, 19494], [19366, 19367, 19495], [19367, 19496, 19495], [19367, 19368, 19497], [19367, 19497, 19496], [19368, 19369, 19497], [19369, 19498, 19497], [19369, 19370, 19499], [19369, 19499, 19498], [19370, 19371, 19499], [19371, 19500, 19499], [19371, 19372, 19501], [19371, 19501, 19500], [19372, 19373, 19501], [19373, 19502, 19501], [19373, 19374, 19503], [19373, 19503, 19502], [19374, 19375, 19503], [19375, 19504, 19503], [19375, 19376, 19505], [19375, 19505, 19504], [19376, 19377, 19505], [19377, 19506, 19505], [19377, 19378, 19507], [19377, 19507, 19506], [19378, 19379, 19507], [19379, 19508, 19507], [19379, 19380, 19509], [19379, 19509, 19508], [19380, 19381, 19509], [19381, 19510, 19509], [19381, 19382, 19511], [19381, 19511, 19510], [19382, 19383, 19511], [19383, 19512, 19511], [19383, 19384, 19513], [19383, 19513, 19512], [19384, 19385, 19513], [19385, 19514, 19513], [19385, 19386, 19515], [19385, 19515, 19514], [19386, 19387, 19515], [19387, 19516, 19515], [19387, 19388, 19517], [19387, 19517, 19516], [19388, 19389, 19517], [19389, 19518, 19517], [19389, 19390, 19519], [19389, 19519, 19518], [19390, 19391, 19519], [19391, 19520, 19519], [19391, 19392, 19521], [19391, 19521, 19520], [19392, 19393, 19521], [19393, 19522, 19521], [19393, 19394, 19523], [19393, 19523, 19522], [19394, 19395, 19523], [19395, 19524, 19523], [19396, 19397, 19525], [19397, 19526, 19525], [19397, 19398, 19527], [19397, 19527, 19526], [19398, 19399, 19527], [19399, 19528, 19527], [19399, 19400, 19529], [19399, 19529, 19528], [19400, 19401, 19529], [19401, 19530, 19529], [19401, 19402, 19531], [19401, 19531, 19530], [19402, 19403, 19531], [19403, 19532, 19531], [19403, 19404, 19533], [19403, 19533, 19532], [19404, 19405, 19533], [19405, 19534, 19533], [19405, 19406, 19535], [19405, 19535, 19534], [19406, 19407, 19535], [19407, 19536, 19535], [19407, 19408, 19537], [19407, 19537, 19536], [19408, 19409, 19537], [19409, 19538, 19537], [19409, 19410, 19539], [19409, 19539, 19538], [19410, 19411, 19539], [19411, 19540, 19539], [19411, 19412, 19541], [19411, 19541, 19540], [19412, 19413, 19541], [19413, 19542, 19541], [19413, 19414, 19543], [19413, 19543, 19542], [19414, 19415, 19543], [19415, 19544, 19543], [19415, 19416, 19545], [19415, 19545, 19544], [19416, 19417, 19545], [19417, 19546, 19545], [19417, 19418, 19547], [19417, 19547, 19546], [19418, 19419, 19547], [19419, 19548, 19547], [19419, 19420, 19549], [19419, 19549, 19548], [19420, 19421, 19549], [19421, 19550, 19549], [19421, 19422, 19551], [19421, 19551, 19550], [19422, 19423, 19551], [19423, 19552, 19551], [19423, 19424, 19553], [19423, 19553, 19552], [19424, 19425, 19553], [19425, 19554, 19553], [19425, 19426, 19555], [19425, 19555, 19554], [19426, 19427, 19555], [19427, 19556, 19555], [19427, 19428, 19557], [19427, 19557, 19556], [19428, 19429, 19557], [19429, 19558, 19557], [19429, 19430, 19559], [19429, 19559, 19558], [19430, 19431, 19559], [19431, 19560, 19559], [19431, 19432, 19561], [19431, 19561, 19560], [19432, 19433, 19561], [19433, 19562, 19561], [19433, 19434, 19563], [19433, 19563, 19562], [19434, 19435, 19563], [19435, 19564, 19563], [19435, 19436, 19565], [19435, 19565, 19564], [19436, 19437, 19565], [19437, 19566, 19565], [19437, 19438, 19567], [19437, 19567, 19566], [19438, 19439, 19567], [19439, 19568, 19567], [19439, 19440, 19569], [19439, 19569, 19568], [19440, 19441, 19569], [19441, 19570, 19569], [19441, 19442, 19571], [19441, 19571, 19570], [19442, 19443, 19571], [19443, 19572, 19571], [19443, 19444, 19573], [19443, 19573, 19572], [19444, 19445, 19573], [19445, 19574, 19573], [19445, 19446, 19575], [19445, 19575, 19574], [19446, 19447, 19575], [19447, 19576, 19575], [19447, 19448, 19577], [19447, 19577, 19576], [19448, 19449, 19577], [19449, 19578, 19577], [19449, 19450, 19579], [19449, 19579, 19578], [19450, 19451, 19579], [19451, 19580, 19579], [19451, 19452, 19581], [19451, 19581, 19580], [19452, 19453, 19581], [19453, 19582, 19581], [19453, 19454, 19583], [19453, 19583, 19582], [19454, 19455, 19583], [19455, 19584, 19583], [19455, 19456, 19585], [19455, 19585, 19584], [19456, 19457, 19585], [19457, 19586, 19585], [19457, 19458, 19587], [19457, 19587, 19586], [19458, 19459, 19587], [19459, 19588, 19587], [19459, 19460, 19589], [19459, 19589, 19588], [19460, 19461, 19589], [19461, 19590, 19589], [19461, 19462, 19591], [19461, 19591, 19590], [19462, 19463, 19591], [19463, 19592, 19591], [19463, 19464, 19593], [19463, 19593, 19592], [19464, 19465, 19593], [19465, 19594, 19593], [19465, 19466, 19595], [19465, 19595, 19594], [19466, 19467, 19595], [19467, 19596, 19595], [19467, 19468, 19597], [19467, 19597, 19596], [19468, 19469, 19597], [19469, 19598, 19597], [19469, 19470, 19599], [19469, 19599, 19598], [19470, 19471, 19599], [19471, 19600, 19599], [19471, 19472, 19601], [19471, 19601, 19600], [19472, 19473, 19601], [19473, 19602, 19601], [19473, 19474, 19603], [19473, 19603, 19602], [19474, 19475, 19603], [19475, 19604, 19603], [19475, 19476, 19605], [19475, 19605, 19604], [19476, 19477, 19605], [19477, 19606, 19605], [19477, 19478, 19607], [19477, 19607, 19606], [19478, 19479, 19607], [19479, 19608, 19607], [19479, 19480, 19609], [19479, 19609, 19608], [19480, 19481, 19609], [19481, 19610, 19609], [19481, 19482, 19611], [19481, 19611, 19610], [19482, 19483, 19611], [19483, 19612, 19611], [19483, 19484, 19613], [19483, 19613, 19612], [19484, 19485, 19613], [19485, 19614, 19613], [19485, 19486, 19615], [19485, 19615, 19614], [19486, 19487, 19615], [19487, 19616, 19615], [19487, 19488, 19617], [19487, 19617, 19616], [19488, 19489, 19617], [19489, 19618, 19617], [19489, 19490, 19619], [19489, 19619, 19618], [19490, 19491, 19619], [19491, 19620, 19619], [19491, 19492, 19621], [19491, 19621, 19620], [19492, 19493, 19621], [19493, 19622, 19621], [19493, 19494, 19623], [19493, 19623, 19622], [19494, 19495, 19623], [19495, 19624, 19623], [19495, 19496, 19625], [19495, 19625, 19624], [19496, 19497, 19625], [19497, 19626, 19625], [19497, 19498, 19627], [19497, 19627, 19626], [19498, 19499, 19627], [19499, 19628, 19627], [19499, 19500, 19629], [19499, 19629, 19628], [19500, 19501, 19629], [19501, 19630, 19629], [19501, 19502, 19631], [19501, 19631, 19630], [19502, 19503, 19631], [19503, 19632, 19631], [19503, 19504, 19633], [19503, 19633, 19632], [19504, 19505, 19633], [19505, 19634, 19633], [19505, 19506, 19635], [19505, 19635, 19634], [19506, 19507, 19635], [19507, 19636, 19635], [19507, 19508, 19637], [19507, 19637, 19636], [19508, 19509, 19637], [19509, 19638, 19637], [19509, 19510, 19639], [19509, 19639, 19638], [19510, 19511, 19639], [19511, 19640, 19639], [19511, 19512, 19641], [19511, 19641, 19640], [19512, 19513, 19641], [19513, 19642, 19641], [19513, 19514, 19643], [19513, 19643, 19642], [19514, 19515, 19643], [19515, 19644, 19643], [19515, 19516, 19645], [19515, 19645, 19644], [19516, 19517, 19645], [19517, 19646, 19645], [19517, 19518, 19647], [19517, 19647, 19646], [19518, 19519, 19647], [19519, 19648, 19647], [19519, 19520, 19649], [19519, 19649, 19648], [19520, 19521, 19649], [19521, 19650, 19649], [19521, 19522, 19651], [19521, 19651, 19650], [19522, 19523, 19651], [19523, 19652, 19651], [19523, 19524, 19653], [19523, 19653, 19652], [19525, 19526, 19655], [19525, 19655, 19654], [19526, 19527, 19655], [19527, 19656, 19655], [19527, 19528, 19657], [19527, 19657, 19656], [19528, 19529, 19657], [19529, 19658, 19657], [19529, 19530, 19659], [19529, 19659, 19658], [19530, 19531, 19659], [19531, 19660, 19659], [19531, 19532, 19661], [19531, 19661, 19660], [19532, 19533, 19661], [19533, 19662, 19661], [19533, 19534, 19663], [19533, 19663, 19662], [19534, 19535, 19663], [19535, 19664, 19663], [19535, 19536, 19665], [19535, 19665, 19664], [19536, 19537, 19665], [19537, 19666, 19665], [19537, 19538, 19667], [19537, 19667, 19666], [19538, 19539, 19667], [19539, 19668, 19667], [19539, 19540, 19669], [19539, 19669, 19668], [19540, 19541, 19669], [19541, 19670, 19669], [19541, 19542, 19671], [19541, 19671, 19670], [19542, 19543, 19671], [19543, 19672, 19671], [19543, 19544, 19673], [19543, 19673, 19672], [19544, 19545, 19673], [19545, 19674, 19673], [19545, 19546, 19675], [19545, 19675, 19674], [19546, 19547, 19675], [19547, 19676, 19675], [19547, 19548, 19677], [19547, 19677, 19676], [19548, 19549, 19677], [19549, 19678, 19677], [19549, 19550, 19679], [19549, 19679, 19678], [19550, 19551, 19679], [19551, 19680, 19679], [19551, 19552, 19681], [19551, 19681, 19680], [19552, 19553, 19681], [19553, 19682, 19681], [19553, 19554, 19683], [19553, 19683, 19682], [19554, 19555, 19683], [19555, 19684, 19683], [19555, 19556, 19685], [19555, 19685, 19684], [19556, 19557, 19685], [19557, 19686, 19685], [19557, 19558, 19687], [19557, 19687, 19686], [19558, 19559, 19687], [19559, 19688, 19687], [19559, 19560, 19689], [19559, 19689, 19688], [19560, 19561, 19689], [19561, 19690, 19689], [19561, 19562, 19691], [19561, 19691, 19690], [19562, 19563, 19691], [19563, 19692, 19691], [19563, 19564, 19693], [19563, 19693, 19692], [19564, 19565, 19693], [19565, 19694, 19693], [19565, 19566, 19695], [19565, 19695, 19694], [19566, 19567, 19695], [19567, 19696, 19695], [19567, 19568, 19697], [19567, 19697, 19696], [19568, 19569, 19697], [19569, 19698, 19697], [19569, 19570, 19699], [19569, 19699, 19698], [19570, 19571, 19699], [19571, 19700, 19699], [19571, 19572, 19701], [19571, 19701, 19700], [19572, 19573, 19701], [19573, 19702, 19701], [19573, 19574, 19703], [19573, 19703, 19702], [19574, 19575, 19703], [19575, 19704, 19703], [19575, 19576, 19705], [19575, 19705, 19704], [19576, 19577, 19705], [19577, 19706, 19705], [19577, 19578, 19707], [19577, 19707, 19706], [19578, 19579, 19707], [19579, 19708, 19707], [19579, 19580, 19709], [19579, 19709, 19708], [19580, 19581, 19709], [19581, 19710, 19709], [19581, 19582, 19711], [19581, 19711, 19710], [19582, 19583, 19711], [19583, 19712, 19711], [19583, 19584, 19713], [19583, 19713, 19712], [19584, 19585, 19713], [19585, 19714, 19713], [19585, 19586, 19715], [19585, 19715, 19714], [19586, 19587, 19715], [19587, 19716, 19715], [19587, 19588, 19717], [19587, 19717, 19716], [19588, 19589, 19717], [19589, 19718, 19717], [19589, 19590, 19719], [19589, 19719, 19718], [19590, 19591, 19719], [19591, 19720, 19719], [19591, 19592, 19721], [19591, 19721, 19720], [19592, 19593, 19721], [19593, 19722, 19721], [19593, 19594, 19723], [19593, 19723, 19722], [19594, 19595, 19723], [19595, 19724, 19723], [19595, 19596, 19725], [19595, 19725, 19724], [19596, 19597, 19725], [19597, 19726, 19725], [19597, 19598, 19727], [19597, 19727, 19726], [19598, 19599, 19727], [19599, 19728, 19727], [19599, 19600, 19729], [19599, 19729, 19728], [19600, 19601, 19729], [19601, 19730, 19729], [19601, 19602, 19731], [19601, 19731, 19730], [19602, 19603, 19731], [19603, 19732, 19731], [19603, 19604, 19733], [19603, 19733, 19732], [19604, 19605, 19733], [19605, 19734, 19733], [19605, 19606, 19735], [19605, 19735, 19734], [19606, 19607, 19735], [19607, 19736, 19735], [19607, 19608, 19737], [19607, 19737, 19736], [19608, 19609, 19737], [19609, 19738, 19737], [19609, 19610, 19739], [19609, 19739, 19738], [19610, 19611, 19739], [19611, 19740, 19739], [19611, 19612, 19741], [19611, 19741, 19740], [19612, 19613, 19741], [19613, 19742, 19741], [19613, 19614, 19743], [19613, 19743, 19742], [19614, 19615, 19743], [19615, 19744, 19743], [19615, 19616, 19745], [19615, 19745, 19744], [19616, 19617, 19745], [19617, 19746, 19745], [19617, 19618, 19747], [19617, 19747, 19746], [19618, 19619, 19747], [19619, 19748, 19747], [19619, 19620, 19749], [19619, 19749, 19748], [19620, 19621, 19749], [19621, 19750, 19749], [19621, 19622, 19751], [19621, 19751, 19750], [19622, 19623, 19751], [19623, 19752, 19751], [19623, 19624, 19753], [19623, 19753, 19752], [19624, 19625, 19753], [19625, 19754, 19753], [19625, 19626, 19755], [19625, 19755, 19754], [19626, 19627, 19755], [19627, 19756, 19755], [19627, 19628, 19757], [19627, 19757, 19756], [19628, 19629, 19757], [19629, 19758, 19757], [19629, 19630, 19759], [19629, 19759, 19758], [19630, 19631, 19759], [19631, 19760, 19759], [19631, 19632, 19761], [19631, 19761, 19760], [19632, 19633, 19761], [19633, 19762, 19761], [19633, 19634, 19763], [19633, 19763, 19762], [19634, 19635, 19763], [19635, 19764, 19763], [19635, 19636, 19765], [19635, 19765, 19764], [19636, 19637, 19765], [19637, 19766, 19765], [19637, 19638, 19767], [19637, 19767, 19766], [19638, 19639, 19767], [19639, 19768, 19767], [19639, 19640, 19769], [19639, 19769, 19768], [19640, 19641, 19769], [19641, 19770, 19769], [19641, 19642, 19771], [19641, 19771, 19770], [19642, 19643, 19771], [19643, 19772, 19771], [19643, 19644, 19773], [19643, 19773, 19772], [19644, 19645, 19773], [19645, 19774, 19773], [19645, 19646, 19775], [19645, 19775, 19774], [19646, 19647, 19775], [19647, 19776, 19775], [19647, 19648, 19777], [19647, 19777, 19776], [19648, 19649, 19777], [19649, 19778, 19777], [19649, 19650, 19779], [19649, 19779, 19778], [19650, 19651, 19779], [19651, 19780, 19779], [19651, 19652, 19781], [19651, 19781, 19780], [19652, 19653, 19781], [19653, 19782, 19781], [19654, 19655, 19783], [19655, 19784, 19783], [19655, 19656, 19785], [19655, 19785, 19784], [19656, 19657, 19785], [19657, 19786, 19785], [19657, 19658, 19787], [19657, 19787, 19786], [19658, 19659, 19787], [19659, 19788, 19787], [19659, 19660, 19789], [19659, 19789, 19788], [19660, 19661, 19789], [19661, 19790, 19789], [19661, 19662, 19791], [19661, 19791, 19790], [19662, 19663, 19791], [19663, 19792, 19791], [19663, 19664, 19793], [19663, 19793, 19792], [19664, 19665, 19793], [19665, 19794, 19793], [19665, 19666, 19795], [19665, 19795, 19794], [19666, 19667, 19795], [19667, 19796, 19795], [19667, 19668, 19797], [19667, 19797, 19796], [19668, 19669, 19797], [19669, 19798, 19797], [19669, 19670, 19799], [19669, 19799, 19798], [19670, 19671, 19799], [19671, 19800, 19799], [19671, 19672, 19801], [19671, 19801, 19800], [19672, 19673, 19801], [19673, 19802, 19801], [19673, 19674, 19803], [19673, 19803, 19802], [19674, 19675, 19803], [19675, 19804, 19803], [19675, 19676, 19805], [19675, 19805, 19804], [19676, 19677, 19805], [19677, 19806, 19805], [19677, 19678, 19807], [19677, 19807, 19806], [19678, 19679, 19807], [19679, 19808, 19807], [19679, 19680, 19809], [19679, 19809, 19808], [19680, 19681, 19809], [19681, 19810, 19809], [19681, 19682, 19811], [19681, 19811, 19810], [19682, 19683, 19811], [19683, 19812, 19811], [19683, 19684, 19813], [19683, 19813, 19812], [19684, 19685, 19813], [19685, 19814, 19813], [19685, 19686, 19815], [19685, 19815, 19814], [19686, 19687, 19815], [19687, 19816, 19815], [19687, 19688, 19817], [19687, 19817, 19816], [19688, 19689, 19817], [19689, 19818, 19817], [19689, 19690, 19819], [19689, 19819, 19818], [19690, 19691, 19819], [19691, 19820, 19819], [19691, 19692, 19821], [19691, 19821, 19820], [19692, 19693, 19821], [19693, 19822, 19821], [19693, 19694, 19823], [19693, 19823, 19822], [19694, 19695, 19823], [19695, 19824, 19823], [19695, 19696, 19825], [19695, 19825, 19824], [19696, 19697, 19825], [19697, 19826, 19825], [19697, 19698, 19827], [19697, 19827, 19826], [19698, 19699, 19827], [19699, 19828, 19827], [19699, 19700, 19829], [19699, 19829, 19828], [19700, 19701, 19829], [19701, 19830, 19829], [19701, 19702, 19831], [19701, 19831, 19830], [19702, 19703, 19831], [19703, 19832, 19831], [19703, 19704, 19833], [19703, 19833, 19832], [19704, 19705, 19833], [19705, 19834, 19833], [19705, 19706, 19835], [19705, 19835, 19834], [19706, 19707, 19835], [19707, 19836, 19835], [19707, 19708, 19837], [19707, 19837, 19836], [19708, 19709, 19837], [19709, 19838, 19837], [19709, 19710, 19839], [19709, 19839, 19838], [19710, 19711, 19839], [19711, 19840, 19839], [19711, 19712, 19841], [19711, 19841, 19840], [19712, 19713, 19841], [19713, 19842, 19841], [19713, 19714, 19843], [19713, 19843, 19842], [19714, 19715, 19843], [19715, 19844, 19843], [19715, 19716, 19845], [19715, 19845, 19844], [19716, 19717, 19845], [19717, 19846, 19845], [19717, 19718, 19847], [19717, 19847, 19846], [19718, 19719, 19847], [19719, 19848, 19847], [19719, 19720, 19849], [19719, 19849, 19848], [19720, 19721, 19849], [19721, 19850, 19849], [19721, 19722, 19851], [19721, 19851, 19850], [19722, 19723, 19851], [19723, 19852, 19851], [19723, 19724, 19853], [19723, 19853, 19852], [19724, 19725, 19853], [19725, 19854, 19853], [19725, 19726, 19855], [19725, 19855, 19854], [19726, 19727, 19855], [19727, 19856, 19855], [19727, 19728, 19857], [19727, 19857, 19856], [19728, 19729, 19857], [19729, 19858, 19857], [19729, 19730, 19859], [19729, 19859, 19858], [19730, 19731, 19859], [19731, 19860, 19859], [19731, 19732, 19861], [19731, 19861, 19860], [19732, 19733, 19861], [19733, 19862, 19861], [19733, 19734, 19863], [19733, 19863, 19862], [19734, 19735, 19863], [19735, 19864, 19863], [19735, 19736, 19865], [19735, 19865, 19864], [19736, 19737, 19865], [19737, 19866, 19865], [19737, 19738, 19867], [19737, 19867, 19866], [19738, 19739, 19867], [19739, 19868, 19867], [19739, 19740, 19869], [19739, 19869, 19868], [19740, 19741, 19869], [19741, 19870, 19869], [19741, 19742, 19871], [19741, 19871, 19870], [19742, 19743, 19871], [19743, 19872, 19871], [19743, 19744, 19873], [19743, 19873, 19872], [19744, 19745, 19873], [19745, 19874, 19873], [19745, 19746, 19875], [19745, 19875, 19874], [19746, 19747, 19875], [19747, 19876, 19875], [19747, 19748, 19877], [19747, 19877, 19876], [19748, 19749, 19877], [19749, 19878, 19877], [19749, 19750, 19879], [19749, 19879, 19878], [19750, 19751, 19879], [19751, 19880, 19879], [19751, 19752, 19881], [19751, 19881, 19880], [19752, 19753, 19881], [19753, 19882, 19881], [19753, 19754, 19883], [19753, 19883, 19882], [19754, 19755, 19883], [19755, 19884, 19883], [19755, 19756, 19885], [19755, 19885, 19884], [19756, 19757, 19885], [19757, 19886, 19885], [19757, 19758, 19887], [19757, 19887, 19886], [19758, 19759, 19887], [19759, 19888, 19887], [19759, 19760, 19889], [19759, 19889, 19888], [19760, 19761, 19889], [19761, 19890, 19889], [19761, 19762, 19891], [19761, 19891, 19890], [19762, 19763, 19891], [19763, 19892, 19891], [19763, 19764, 19893], [19763, 19893, 19892], [19764, 19765, 19893], [19765, 19894, 19893], [19765, 19766, 19895], [19765, 19895, 19894], [19766, 19767, 19895], [19767, 19896, 19895], [19767, 19768, 19897], [19767, 19897, 19896], [19768, 19769, 19897], [19769, 19898, 19897], [19769, 19770, 19899], [19769, 19899, 19898], [19770, 19771, 19899], [19771, 19900, 19899], [19771, 19772, 19901], [19771, 19901, 19900], [19772, 19773, 19901], [19773, 19902, 19901], [19773, 19774, 19903], [19773, 19903, 19902], [19774, 19775, 19903], [19775, 19904, 19903], [19775, 19776, 19905], [19775, 19905, 19904], [19776, 19777, 19905], [19777, 19906, 19905], [19777, 19778, 19907], [19777, 19907, 19906], [19778, 19779, 19907], [19779, 19908, 19907], [19779, 19780, 19909], [19779, 19909, 19908], [19780, 19781, 19909], [19781, 19910, 19909], [19781, 19782, 19911], [19781, 19911, 19910], [19783, 19784, 19913], [19783, 19913, 19912], [19784, 19785, 19913], [19785, 19914, 19913], [19785, 19786, 19915], [19785, 19915, 19914], [19786, 19787, 19915], [19787, 19916, 19915], [19787, 19788, 19917], [19787, 19917, 19916], [19788, 19789, 19917], [19789, 19918, 19917], [19789, 19790, 19919], [19789, 19919, 19918], [19790, 19791, 19919], [19791, 19920, 19919], [19791, 19792, 19921], [19791, 19921, 19920], [19792, 19793, 19921], [19793, 19922, 19921], [19793, 19794, 19923], [19793, 19923, 19922], [19794, 19795, 19923], [19795, 19924, 19923], [19795, 19796, 19925], [19795, 19925, 19924], [19796, 19797, 19925], [19797, 19926, 19925], [19797, 19798, 19927], [19797, 19927, 19926], [19798, 19799, 19927], [19799, 19928, 19927], [19799, 19800, 19929], [19799, 19929, 19928], [19800, 19801, 19929], [19801, 19930, 19929], [19801, 19802, 19931], [19801, 19931, 19930], [19802, 19803, 19931], [19803, 19932, 19931], [19803, 19804, 19933], [19803, 19933, 19932], [19804, 19805, 19933], [19805, 19934, 19933], [19805, 19806, 19935], [19805, 19935, 19934], [19806, 19807, 19935], [19807, 19936, 19935], [19807, 19808, 19937], [19807, 19937, 19936], [19808, 19809, 19937], [19809, 19938, 19937], [19809, 19810, 19939], [19809, 19939, 19938], [19810, 19811, 19939], [19811, 19940, 19939], [19811, 19812, 19941], [19811, 19941, 19940], [19812, 19813, 19941], [19813, 19942, 19941], [19813, 19814, 19943], [19813, 19943, 19942], [19814, 19815, 19943], [19815, 19944, 19943], [19815, 19816, 19945], [19815, 19945, 19944], [19816, 19817, 19945], [19817, 19946, 19945], [19817, 19818, 19947], [19817, 19947, 19946], [19818, 19819, 19947], [19819, 19948, 19947], [19819, 19820, 19949], [19819, 19949, 19948], [19820, 19821, 19949], [19821, 19950, 19949], [19821, 19822, 19951], [19821, 19951, 19950], [19822, 19823, 19951], [19823, 19952, 19951], [19823, 19824, 19953], [19823, 19953, 19952], [19824, 19825, 19953], [19825, 19954, 19953], [19825, 19826, 19955], [19825, 19955, 19954], [19826, 19827, 19955], [19827, 19956, 19955], [19827, 19828, 19957], [19827, 19957, 19956], [19828, 19829, 19957], [19829, 19958, 19957], [19829, 19830, 19959], [19829, 19959, 19958], [19830, 19831, 19959], [19831, 19960, 19959], [19831, 19832, 19961], [19831, 19961, 19960], [19832, 19833, 19961], [19833, 19962, 19961], [19833, 19834, 19963], [19833, 19963, 19962], [19834, 19835, 19963], [19835, 19964, 19963], [19835, 19836, 19965], [19835, 19965, 19964], [19836, 19837, 19965], [19837, 19966, 19965], [19837, 19838, 19967], [19837, 19967, 19966], [19838, 19839, 19967], [19839, 19968, 19967], [19839, 19840, 19969], [19839, 19969, 19968], [19840, 19841, 19969], [19841, 19970, 19969], [19841, 19842, 19971], [19841, 19971, 19970], [19842, 19843, 19971], [19843, 19972, 19971], [19843, 19844, 19973], [19843, 19973, 19972], [19844, 19845, 19973], [19845, 19974, 19973], [19845, 19846, 19975], [19845, 19975, 19974], [19846, 19847, 19975], [19847, 19976, 19975], [19847, 19848, 19977], [19847, 19977, 19976], [19848, 19849, 19977], [19849, 19978, 19977], [19849, 19850, 19979], [19849, 19979, 19978], [19850, 19851, 19979], [19851, 19980, 19979], [19851, 19852, 19981], [19851, 19981, 19980], [19852, 19853, 19981], [19853, 19982, 19981], [19853, 19854, 19983], [19853, 19983, 19982], [19854, 19855, 19983], [19855, 19984, 19983], [19855, 19856, 19985], [19855, 19985, 19984], [19856, 19857, 19985], [19857, 19986, 19985], [19857, 19858, 19987], [19857, 19987, 19986], [19858, 19859, 19987], [19859, 19988, 19987], [19859, 19860, 19989], [19859, 19989, 19988], [19860, 19861, 19989], [19861, 19990, 19989], [19861, 19862, 19991], [19861, 19991, 19990], [19862, 19863, 19991], [19863, 19992, 19991], [19863, 19864, 19993], [19863, 19993, 19992], [19864, 19865, 19993], [19865, 19994, 19993], [19865, 19866, 19995], [19865, 19995, 19994], [19866, 19867, 19995], [19867, 19996, 19995], [19867, 19868, 19997], [19867, 19997, 19996], [19868, 19869, 19997], [19869, 19998, 19997], [19869, 19870, 19999], [19869, 19999, 19998], [19870, 19871, 19999], [19871, 20000, 19999], [19871, 19872, 20001], [19871, 20001, 20000], [19872, 19873, 20001], [19873, 20002, 20001], [19873, 19874, 20003], [19873, 20003, 20002], [19874, 19875, 20003], [19875, 20004, 20003], [19875, 19876, 20005], [19875, 20005, 20004], [19876, 19877, 20005], [19877, 20006, 20005], [19877, 19878, 20007], [19877, 20007, 20006], [19878, 19879, 20007], [19879, 20008, 20007], [19879, 19880, 20009], [19879, 20009, 20008], [19880, 19881, 20009], [19881, 20010, 20009], [19881, 19882, 20011], [19881, 20011, 20010], [19882, 19883, 20011], [19883, 20012, 20011], [19883, 19884, 20013], [19883, 20013, 20012], [19884, 19885, 20013], [19885, 20014, 20013], [19885, 19886, 20015], [19885, 20015, 20014], [19886, 19887, 20015], [19887, 20016, 20015], [19887, 19888, 20017], [19887, 20017, 20016], [19888, 19889, 20017], [19889, 20018, 20017], [19889, 19890, 20019], [19889, 20019, 20018], [19890, 19891, 20019], [19891, 20020, 20019], [19891, 19892, 20021], [19891, 20021, 20020], [19892, 19893, 20021], [19893, 20022, 20021], [19893, 19894, 20023], [19893, 20023, 20022], [19894, 19895, 20023], [19895, 20024, 20023], [19895, 19896, 20025], [19895, 20025, 20024], [19896, 19897, 20025], [19897, 20026, 20025], [19897, 19898, 20027], [19897, 20027, 20026], [19898, 19899, 20027], [19899, 20028, 20027], [19899, 19900, 20029], [19899, 20029, 20028], [19900, 19901, 20029], [19901, 20030, 20029], [19901, 19902, 20031], [19901, 20031, 20030], [19902, 19903, 20031], [19903, 20032, 20031], [19903, 19904, 20033], [19903, 20033, 20032], [19904, 19905, 20033], [19905, 20034, 20033], [19905, 19906, 20035], [19905, 20035, 20034], [19906, 19907, 20035], [19907, 20036, 20035], [19907, 19908, 20037], [19907, 20037, 20036], [19908, 19909, 20037], [19909, 20038, 20037], [19909, 19910, 20039], [19909, 20039, 20038], [19910, 19911, 20039], [19911, 20040, 20039], [19912, 19913, 20041], [19913, 20042, 20041], [19913, 19914, 20043], [19913, 20043, 20042], [19914, 19915, 20043], [19915, 20044, 20043], [19915, 19916, 20045], [19915, 20045, 20044], [19916, 19917, 20045], [19917, 20046, 20045], [19917, 19918, 20047], [19917, 20047, 20046], [19918, 19919, 20047], [19919, 20048, 20047], [19919, 19920, 20049], [19919, 20049, 20048], [19920, 19921, 20049], [19921, 20050, 20049], [19921, 19922, 20051], [19921, 20051, 20050], [19922, 19923, 20051], [19923, 20052, 20051], [19923, 19924, 20053], [19923, 20053, 20052], [19924, 19925, 20053], [19925, 20054, 20053], [19925, 19926, 20055], [19925, 20055, 20054], [19926, 19927, 20055], [19927, 20056, 20055], [19927, 19928, 20057], [19927, 20057, 20056], [19928, 19929, 20057], [19929, 20058, 20057], [19929, 19930, 20059], [19929, 20059, 20058], [19930, 19931, 20059], [19931, 20060, 20059], [19931, 19932, 20061], [19931, 20061, 20060], [19932, 19933, 20061], [19933, 20062, 20061], [19933, 19934, 20063], [19933, 20063, 20062], [19934, 19935, 20063], [19935, 20064, 20063], [19935, 19936, 20065], [19935, 20065, 20064], [19936, 19937, 20065], [19937, 20066, 20065], [19937, 19938, 20067], [19937, 20067, 20066], [19938, 19939, 20067], [19939, 20068, 20067], [19939, 19940, 20069], [19939, 20069, 20068], [19940, 19941, 20069], [19941, 20070, 20069], [19941, 19942, 20071], [19941, 20071, 20070], [19942, 19943, 20071], [19943, 20072, 20071], [19943, 19944, 20073], [19943, 20073, 20072], [19944, 19945, 20073], [19945, 20074, 20073], [19945, 19946, 20075], [19945, 20075, 20074], [19946, 19947, 20075], [19947, 20076, 20075], [19947, 19948, 20077], [19947, 20077, 20076], [19948, 19949, 20077], [19949, 20078, 20077], [19949, 19950, 20079], [19949, 20079, 20078], [19950, 19951, 20079], [19951, 20080, 20079], [19951, 19952, 20081], [19951, 20081, 20080], [19952, 19953, 20081], [19953, 20082, 20081], [19953, 19954, 20083], [19953, 20083, 20082], [19954, 19955, 20083], [19955, 20084, 20083], [19955, 19956, 20085], [19955, 20085, 20084], [19956, 19957, 20085], [19957, 20086, 20085], [19957, 19958, 20087], [19957, 20087, 20086], [19958, 19959, 20087], [19959, 20088, 20087], [19959, 19960, 20089], [19959, 20089, 20088], [19960, 19961, 20089], [19961, 20090, 20089], [19961, 19962, 20091], [19961, 20091, 20090], [19962, 19963, 20091], [19963, 20092, 20091], [19963, 19964, 20093], [19963, 20093, 20092], [19964, 19965, 20093], [19965, 20094, 20093], [19965, 19966, 20095], [19965, 20095, 20094], [19966, 19967, 20095], [19967, 20096, 20095], [19967, 19968, 20097], [19967, 20097, 20096], [19968, 19969, 20097], [19969, 20098, 20097], [19969, 19970, 20099], [19969, 20099, 20098], [19970, 19971, 20099], [19971, 20100, 20099], [19971, 19972, 20101], [19971, 20101, 20100], [19972, 19973, 20101], [19973, 20102, 20101], [19973, 19974, 20103], [19973, 20103, 20102], [19974, 19975, 20103], [19975, 20104, 20103], [19975, 19976, 20105], [19975, 20105, 20104], [19976, 19977, 20105], [19977, 20106, 20105], [19977, 19978, 20107], [19977, 20107, 20106], [19978, 19979, 20107], [19979, 20108, 20107], [19979, 19980, 20109], [19979, 20109, 20108], [19980, 19981, 20109], [19981, 20110, 20109], [19981, 19982, 20111], [19981, 20111, 20110], [19982, 19983, 20111], [19983, 20112, 20111], [19983, 19984, 20113], [19983, 20113, 20112], [19984, 19985, 20113], [19985, 20114, 20113], [19985, 19986, 20115], [19985, 20115, 20114], [19986, 19987, 20115], [19987, 20116, 20115], [19987, 19988, 20117], [19987, 20117, 20116], [19988, 19989, 20117], [19989, 20118, 20117], [19989, 19990, 20119], [19989, 20119, 20118], [19990, 19991, 20119], [19991, 20120, 20119], [19991, 19992, 20121], [19991, 20121, 20120], [19992, 19993, 20121], [19993, 20122, 20121], [19993, 19994, 20123], [19993, 20123, 20122], [19994, 19995, 20123], [19995, 20124, 20123], [19995, 19996, 20125], [19995, 20125, 20124], [19996, 19997, 20125], [19997, 20126, 20125], [19997, 19998, 20127], [19997, 20127, 20126], [19998, 19999, 20127], [19999, 20128, 20127], [19999, 20000, 20129], [19999, 20129, 20128], [20000, 20001, 20129], [20001, 20130, 20129], [20001, 20002, 20131], [20001, 20131, 20130], [20002, 20003, 20131], [20003, 20132, 20131], [20003, 20004, 20133], [20003, 20133, 20132], [20004, 20005, 20133], [20005, 20134, 20133], [20005, 20006, 20135], [20005, 20135, 20134], [20006, 20007, 20135], [20007, 20136, 20135], [20007, 20008, 20137], [20007, 20137, 20136], [20008, 20009, 20137], [20009, 20138, 20137], [20009, 20010, 20139], [20009, 20139, 20138], [20010, 20011, 20139], [20011, 20140, 20139], [20011, 20012, 20141], [20011, 20141, 20140], [20012, 20013, 20141], [20013, 20142, 20141], [20013, 20014, 20143], [20013, 20143, 20142], [20014, 20015, 20143], [20015, 20144, 20143], [20015, 20016, 20145], [20015, 20145, 20144], [20016, 20017, 20145], [20017, 20146, 20145], [20017, 20018, 20147], [20017, 20147, 20146], [20018, 20019, 20147], [20019, 20148, 20147], [20019, 20020, 20149], [20019, 20149, 20148], [20020, 20021, 20149], [20021, 20150, 20149], [20021, 20022, 20151], [20021, 20151, 20150], [20022, 20023, 20151], [20023, 20152, 20151], [20023, 20024, 20153], [20023, 20153, 20152], [20024, 20025, 20153], [20025, 20154, 20153], [20025, 20026, 20155], [20025, 20155, 20154], [20026, 20027, 20155], [20027, 20156, 20155], [20027, 20028, 20157], [20027, 20157, 20156], [20028, 20029, 20157], [20029, 20158, 20157], [20029, 20030, 20159], [20029, 20159, 20158], [20030, 20031, 20159], [20031, 20160, 20159], [20031, 20032, 20161], [20031, 20161, 20160], [20032, 20033, 20161], [20033, 20162, 20161], [20033, 20034, 20163], [20033, 20163, 20162], [20034, 20035, 20163], [20035, 20164, 20163], [20035, 20036, 20165], [20035, 20165, 20164], [20036, 20037, 20165], [20037, 20166, 20165], [20037, 20038, 20167], [20037, 20167, 20166], [20038, 20039, 20167], [20039, 20168, 20167], [20039, 20040, 20169], [20039, 20169, 20168], [20041, 20042, 20171], [20041, 20171, 20170], [20042, 20043, 20171], [20043, 20172, 20171], [20043, 20044, 20173], [20043, 20173, 20172], [20044, 20045, 20173], [20045, 20174, 20173], [20045, 20046, 20175], [20045, 20175, 20174], [20046, 20047, 20175], [20047, 20176, 20175], [20047, 20048, 20177], [20047, 20177, 20176], [20048, 20049, 20177], [20049, 20178, 20177], [20049, 20050, 20179], [20049, 20179, 20178], [20050, 20051, 20179], [20051, 20180, 20179], [20051, 20052, 20181], [20051, 20181, 20180], [20052, 20053, 20181], [20053, 20182, 20181], [20053, 20054, 20183], [20053, 20183, 20182], [20054, 20055, 20183], [20055, 20184, 20183], [20055, 20056, 20185], [20055, 20185, 20184], [20056, 20057, 20185], [20057, 20186, 20185], [20057, 20058, 20187], [20057, 20187, 20186], [20058, 20059, 20187], [20059, 20188, 20187], [20059, 20060, 20189], [20059, 20189, 20188], [20060, 20061, 20189], [20061, 20190, 20189], [20061, 20062, 20191], [20061, 20191, 20190], [20062, 20063, 20191], [20063, 20192, 20191], [20063, 20064, 20193], [20063, 20193, 20192], [20064, 20065, 20193], [20065, 20194, 20193], [20065, 20066, 20195], [20065, 20195, 20194], [20066, 20067, 20195], [20067, 20196, 20195], [20067, 20068, 20197], [20067, 20197, 20196], [20068, 20069, 20197], [20069, 20198, 20197], [20069, 20070, 20199], [20069, 20199, 20198], [20070, 20071, 20199], [20071, 20200, 20199], [20071, 20072, 20201], [20071, 20201, 20200], [20072, 20073, 20201], [20073, 20202, 20201], [20073, 20074, 20203], [20073, 20203, 20202], [20074, 20075, 20203], [20075, 20204, 20203], [20075, 20076, 20205], [20075, 20205, 20204], [20076, 20077, 20205], [20077, 20206, 20205], [20077, 20078, 20207], [20077, 20207, 20206], [20078, 20079, 20207], [20079, 20208, 20207], [20079, 20080, 20209], [20079, 20209, 20208], [20080, 20081, 20209], [20081, 20210, 20209], [20081, 20082, 20211], [20081, 20211, 20210], [20082, 20083, 20211], [20083, 20212, 20211], [20083, 20084, 20213], [20083, 20213, 20212], [20084, 20085, 20213], [20085, 20214, 20213], [20085, 20086, 20215], [20085, 20215, 20214], [20086, 20087, 20215], [20087, 20216, 20215], [20087, 20088, 20217], [20087, 20217, 20216], [20088, 20089, 20217], [20089, 20218, 20217], [20089, 20090, 20219], [20089, 20219, 20218], [20090, 20091, 20219], [20091, 20220, 20219], [20091, 20092, 20221], [20091, 20221, 20220], [20092, 20093, 20221], [20093, 20222, 20221], [20093, 20094, 20223], [20093, 20223, 20222], [20094, 20095, 20223], [20095, 20224, 20223], [20095, 20096, 20225], [20095, 20225, 20224], [20096, 20097, 20225], [20097, 20226, 20225], [20097, 20098, 20227], [20097, 20227, 20226], [20098, 20099, 20227], [20099, 20228, 20227], [20099, 20100, 20229], [20099, 20229, 20228], [20100, 20101, 20229], [20101, 20230, 20229], [20101, 20102, 20231], [20101, 20231, 20230], [20102, 20103, 20231], [20103, 20232, 20231], [20103, 20104, 20233], [20103, 20233, 20232], [20104, 20105, 20233], [20105, 20234, 20233], [20105, 20106, 20235], [20105, 20235, 20234], [20106, 20107, 20235], [20107, 20236, 20235], [20107, 20108, 20237], [20107, 20237, 20236], [20108, 20109, 20237], [20109, 20238, 20237], [20109, 20110, 20239], [20109, 20239, 20238], [20110, 20111, 20239], [20111, 20240, 20239], [20111, 20112, 20241], [20111, 20241, 20240], [20112, 20113, 20241], [20113, 20242, 20241], [20113, 20114, 20243], [20113, 20243, 20242], [20114, 20115, 20243], [20115, 20244, 20243], [20115, 20116, 20245], [20115, 20245, 20244], [20116, 20117, 20245], [20117, 20246, 20245], [20117, 20118, 20247], [20117, 20247, 20246], [20118, 20119, 20247], [20119, 20248, 20247], [20119, 20120, 20249], [20119, 20249, 20248], [20120, 20121, 20249], [20121, 20250, 20249], [20121, 20122, 20251], [20121, 20251, 20250], [20122, 20123, 20251], [20123, 20252, 20251], [20123, 20124, 20253], [20123, 20253, 20252], [20124, 20125, 20253], [20125, 20254, 20253], [20125, 20126, 20255], [20125, 20255, 20254], [20126, 20127, 20255], [20127, 20256, 20255], [20127, 20128, 20257], [20127, 20257, 20256], [20128, 20129, 20257], [20129, 20258, 20257], [20129, 20130, 20259], [20129, 20259, 20258], [20130, 20131, 20259], [20131, 20260, 20259], [20131, 20132, 20261], [20131, 20261, 20260], [20132, 20133, 20261], [20133, 20262, 20261], [20133, 20134, 20263], [20133, 20263, 20262], [20134, 20135, 20263], [20135, 20264, 20263], [20135, 20136, 20265], [20135, 20265, 20264], [20136, 20137, 20265], [20137, 20266, 20265], [20137, 20138, 20267], [20137, 20267, 20266], [20138, 20139, 20267], [20139, 20268, 20267], [20139, 20140, 20269], [20139, 20269, 20268], [20140, 20141, 20269], [20141, 20270, 20269], [20141, 20142, 20271], [20141, 20271, 20270], [20142, 20143, 20271], [20143, 20272, 20271], [20143, 20144, 20273], [20143, 20273, 20272], [20144, 20145, 20273], [20145, 20274, 20273], [20145, 20146, 20275], [20145, 20275, 20274], [20146, 20147, 20275], [20147, 20276, 20275], [20147, 20148, 20277], [20147, 20277, 20276], [20148, 20149, 20277], [20149, 20278, 20277], [20149, 20150, 20279], [20149, 20279, 20278], [20150, 20151, 20279], [20151, 20280, 20279], [20151, 20152, 20281], [20151, 20281, 20280], [20152, 20153, 20281], [20153, 20282, 20281], [20153, 20154, 20283], [20153, 20283, 20282], [20154, 20155, 20283], [20155, 20284, 20283], [20155, 20156, 20285], [20155, 20285, 20284], [20156, 20157, 20285], [20157, 20286, 20285], [20157, 20158, 20287], [20157, 20287, 20286], [20158, 20159, 20287], [20159, 20288, 20287], [20159, 20160, 20289], [20159, 20289, 20288], [20160, 20161, 20289], [20161, 20290, 20289], [20161, 20162, 20291], [20161, 20291, 20290], [20162, 20163, 20291], [20163, 20292, 20291], [20163, 20164, 20293], [20163, 20293, 20292], [20164, 20165, 20293], [20165, 20294, 20293], [20165, 20166, 20295], [20165, 20295, 20294], [20166, 20167, 20295], [20167, 20296, 20295], [20167, 20168, 20297], [20167, 20297, 20296], [20168, 20169, 20297], [20169, 20298, 20297], [20170, 20171, 20299], [20171, 20300, 20299], [20171, 20172, 20301], [20171, 20301, 20300], [20172, 20173, 20301], [20173, 20302, 20301], [20173, 20174, 20303], [20173, 20303, 20302], [20174, 20175, 20303], [20175, 20304, 20303], [20175, 20176, 20305], [20175, 20305, 20304], [20176, 20177, 20305], [20177, 20306, 20305], [20177, 20178, 20307], [20177, 20307, 20306], [20178, 20179, 20307], [20179, 20308, 20307], [20179, 20180, 20309], [20179, 20309, 20308], [20180, 20181, 20309], [20181, 20310, 20309], [20181, 20182, 20311], [20181, 20311, 20310], [20182, 20183, 20311], [20183, 20312, 20311], [20183, 20184, 20313], [20183, 20313, 20312], [20184, 20185, 20313], [20185, 20314, 20313], [20185, 20186, 20315], [20185, 20315, 20314], [20186, 20187, 20315], [20187, 20316, 20315], [20187, 20188, 20317], [20187, 20317, 20316], [20188, 20189, 20317], [20189, 20318, 20317], [20189, 20190, 20319], [20189, 20319, 20318], [20190, 20191, 20319], [20191, 20320, 20319], [20191, 20192, 20321], [20191, 20321, 20320], [20192, 20193, 20321], [20193, 20322, 20321], [20193, 20194, 20323], [20193, 20323, 20322], [20194, 20195, 20323], [20195, 20324, 20323], [20195, 20196, 20325], [20195, 20325, 20324], [20196, 20197, 20325], [20197, 20326, 20325], [20197, 20198, 20327], [20197, 20327, 20326], [20198, 20199, 20327], [20199, 20328, 20327], [20199, 20200, 20329], [20199, 20329, 20328], [20200, 20201, 20329], [20201, 20330, 20329], [20201, 20202, 20331], [20201, 20331, 20330], [20202, 20203, 20331], [20203, 20332, 20331], [20203, 20204, 20333], [20203, 20333, 20332], [20204, 20205, 20333], [20205, 20334, 20333], [20205, 20206, 20335], [20205, 20335, 20334], [20206, 20207, 20335], [20207, 20336, 20335], [20207, 20208, 20337], [20207, 20337, 20336], [20208, 20209, 20337], [20209, 20338, 20337], [20209, 20210, 20339], [20209, 20339, 20338], [20210, 20211, 20339], [20211, 20340, 20339], [20211, 20212, 20341], [20211, 20341, 20340], [20212, 20213, 20341], [20213, 20342, 20341], [20213, 20214, 20343], [20213, 20343, 20342], [20214, 20215, 20343], [20215, 20344, 20343], [20215, 20216, 20345], [20215, 20345, 20344], [20216, 20217, 20345], [20217, 20346, 20345], [20217, 20218, 20347], [20217, 20347, 20346], [20218, 20219, 20347], [20219, 20348, 20347], [20219, 20220, 20349], [20219, 20349, 20348], [20220, 20221, 20349], [20221, 20350, 20349], [20221, 20222, 20351], [20221, 20351, 20350], [20222, 20223, 20351], [20223, 20352, 20351], [20223, 20224, 20353], [20223, 20353, 20352], [20224, 20225, 20353], [20225, 20354, 20353], [20225, 20226, 20355], [20225, 20355, 20354], [20226, 20227, 20355], [20227, 20356, 20355], [20227, 20228, 20357], [20227, 20357, 20356], [20228, 20229, 20357], [20229, 20358, 20357], [20229, 20230, 20359], [20229, 20359, 20358], [20230, 20231, 20359], [20231, 20360, 20359], [20231, 20232, 20361], [20231, 20361, 20360], [20232, 20233, 20361], [20233, 20362, 20361], [20233, 20234, 20363], [20233, 20363, 20362], [20234, 20235, 20363], [20235, 20364, 20363], [20235, 20236, 20365], [20235, 20365, 20364], [20236, 20237, 20365], [20237, 20366, 20365], [20237, 20238, 20367], [20237, 20367, 20366], [20238, 20239, 20367], [20239, 20368, 20367], [20239, 20240, 20369], [20239, 20369, 20368], [20240, 20241, 20369], [20241, 20370, 20369], [20241, 20242, 20371], [20241, 20371, 20370], [20242, 20243, 20371], [20243, 20372, 20371], [20243, 20244, 20373], [20243, 20373, 20372], [20244, 20245, 20373], [20245, 20374, 20373], [20245, 20246, 20375], [20245, 20375, 20374], [20246, 20247, 20375], [20247, 20376, 20375], [20247, 20248, 20377], [20247, 20377, 20376], [20248, 20249, 20377], [20249, 20378, 20377], [20249, 20250, 20379], [20249, 20379, 20378], [20250, 20251, 20379], [20251, 20380, 20379], [20251, 20252, 20381], [20251, 20381, 20380], [20252, 20253, 20381], [20253, 20382, 20381], [20253, 20254, 20383], [20253, 20383, 20382], [20254, 20255, 20383], [20255, 20384, 20383], [20255, 20256, 20385], [20255, 20385, 20384], [20256, 20257, 20385], [20257, 20386, 20385], [20257, 20258, 20387], [20257, 20387, 20386], [20258, 20259, 20387], [20259, 20388, 20387], [20259, 20260, 20389], [20259, 20389, 20388], [20260, 20261, 20389], [20261, 20390, 20389], [20261, 20262, 20391], [20261, 20391, 20390], [20262, 20263, 20391], [20263, 20392, 20391], [20263, 20264, 20393], [20263, 20393, 20392], [20264, 20265, 20393], [20265, 20394, 20393], [20265, 20266, 20395], [20265, 20395, 20394], [20266, 20267, 20395], [20267, 20396, 20395], [20267, 20268, 20397], [20267, 20397, 20396], [20268, 20269, 20397], [20269, 20398, 20397], [20269, 20270, 20399], [20269, 20399, 20398], [20270, 20271, 20399], [20271, 20400, 20399], [20271, 20272, 20401], [20271, 20401, 20400], [20272, 20273, 20401], [20273, 20402, 20401], [20273, 20274, 20403], [20273, 20403, 20402], [20274, 20275, 20403], [20275, 20404, 20403], [20275, 20276, 20405], [20275, 20405, 20404], [20276, 20277, 20405], [20277, 20406, 20405], [20277, 20278, 20407], [20277, 20407, 20406], [20278, 20279, 20407], [20279, 20408, 20407], [20279, 20280, 20409], [20279, 20409, 20408], [20280, 20281, 20409], [20281, 20410, 20409], [20281, 20282, 20411], [20281, 20411, 20410], [20282, 20283, 20411], [20283, 20412, 20411], [20283, 20284, 20413], [20283, 20413, 20412], [20284, 20285, 20413], [20285, 20414, 20413], [20285, 20286, 20415], [20285, 20415, 20414], [20286, 20287, 20415], [20287, 20416, 20415], [20287, 20288, 20417], [20287, 20417, 20416], [20288, 20289, 20417], [20289, 20418, 20417], [20289, 20290, 20419], [20289, 20419, 20418], [20290, 20291, 20419], [20291, 20420, 20419], [20291, 20292, 20421], [20291, 20421, 20420], [20292, 20293, 20421], [20293, 20422, 20421], [20293, 20294, 20423], [20293, 20423, 20422], [20294, 20295, 20423], [20295, 20424, 20423], [20295, 20296, 20425], [20295, 20425, 20424], [20296, 20297, 20425], [20297, 20426, 20425], [20297, 20298, 20427], [20297, 20427, 20426], [20299, 20300, 20429], [20299, 20429, 20428], [20300, 20301, 20429], [20301, 20430, 20429], [20301, 20302, 20431], [20301, 20431, 20430], [20302, 20303, 20431], [20303, 20432, 20431], [20303, 20304, 20433], [20303, 20433, 20432], [20304, 20305, 20433], [20305, 20434, 20433], [20305, 20306, 20435], [20305, 20435, 20434], [20306, 20307, 20435], [20307, 20436, 20435], [20307, 20308, 20437], [20307, 20437, 20436], [20308, 20309, 20437], [20309, 20438, 20437], [20309, 20310, 20439], [20309, 20439, 20438], [20310, 20311, 20439], [20311, 20440, 20439], [20311, 20312, 20441], [20311, 20441, 20440], [20312, 20313, 20441], [20313, 20442, 20441], [20313, 20314, 20443], [20313, 20443, 20442], [20314, 20315, 20443], [20315, 20444, 20443], [20315, 20316, 20445], [20315, 20445, 20444], [20316, 20317, 20445], [20317, 20446, 20445], [20317, 20318, 20447], [20317, 20447, 20446], [20318, 20319, 20447], [20319, 20448, 20447], [20319, 20320, 20449], [20319, 20449, 20448], [20320, 20321, 20449], [20321, 20450, 20449], [20321, 20322, 20451], [20321, 20451, 20450], [20322, 20323, 20451], [20323, 20452, 20451], [20323, 20324, 20453], [20323, 20453, 20452], [20324, 20325, 20453], [20325, 20454, 20453], [20325, 20326, 20455], [20325, 20455, 20454], [20326, 20327, 20455], [20327, 20456, 20455], [20327, 20328, 20457], [20327, 20457, 20456], [20328, 20329, 20457], [20329, 20458, 20457], [20329, 20330, 20459], [20329, 20459, 20458], [20330, 20331, 20459], [20331, 20460, 20459], [20331, 20332, 20461], [20331, 20461, 20460], [20332, 20333, 20461], [20333, 20462, 20461], [20333, 20334, 20463], [20333, 20463, 20462], [20334, 20335, 20463], [20335, 20464, 20463], [20335, 20336, 20465], [20335, 20465, 20464], [20336, 20337, 20465], [20337, 20466, 20465], [20337, 20338, 20467], [20337, 20467, 20466], [20338, 20339, 20467], [20339, 20468, 20467], [20339, 20340, 20469], [20339, 20469, 20468], [20340, 20341, 20469], [20341, 20470, 20469], [20341, 20342, 20471], [20341, 20471, 20470], [20342, 20343, 20471], [20343, 20472, 20471], [20343, 20344, 20473], [20343, 20473, 20472], [20344, 20345, 20473], [20345, 20474, 20473], [20345, 20346, 20475], [20345, 20475, 20474], [20346, 20347, 20475], [20347, 20476, 20475], [20347, 20348, 20477], [20347, 20477, 20476], [20348, 20349, 20477], [20349, 20478, 20477], [20349, 20350, 20479], [20349, 20479, 20478], [20350, 20351, 20479], [20351, 20480, 20479], [20351, 20352, 20481], [20351, 20481, 20480], [20352, 20353, 20481], [20353, 20482, 20481], [20353, 20354, 20483], [20353, 20483, 20482], [20354, 20355, 20483], [20355, 20484, 20483], [20355, 20356, 20485], [20355, 20485, 20484], [20356, 20357, 20485], [20357, 20486, 20485], [20357, 20358, 20487], [20357, 20487, 20486], [20358, 20359, 20487], [20359, 20488, 20487], [20359, 20360, 20489], [20359, 20489, 20488], [20360, 20361, 20489], [20361, 20490, 20489], [20361, 20362, 20491], [20361, 20491, 20490], [20362, 20363, 20491], [20363, 20492, 20491], [20363, 20364, 20493], [20363, 20493, 20492], [20364, 20365, 20493], [20365, 20494, 20493], [20365, 20366, 20495], [20365, 20495, 20494], [20366, 20367, 20495], [20367, 20496, 20495], [20367, 20368, 20497], [20367, 20497, 20496], [20368, 20369, 20497], [20369, 20498, 20497], [20369, 20370, 20499], [20369, 20499, 20498], [20370, 20371, 20499], [20371, 20500, 20499], [20371, 20372, 20501], [20371, 20501, 20500], [20372, 20373, 20501], [20373, 20502, 20501], [20373, 20374, 20503], [20373, 20503, 20502], [20374, 20375, 20503], [20375, 20504, 20503], [20375, 20376, 20505], [20375, 20505, 20504], [20376, 20377, 20505], [20377, 20506, 20505], [20377, 20378, 20507], [20377, 20507, 20506], [20378, 20379, 20507], [20379, 20508, 20507], [20379, 20380, 20509], [20379, 20509, 20508], [20380, 20381, 20509], [20381, 20510, 20509], [20381, 20382, 20511], [20381, 20511, 20510], [20382, 20383, 20511], [20383, 20512, 20511], [20383, 20384, 20513], [20383, 20513, 20512], [20384, 20385, 20513], [20385, 20514, 20513], [20385, 20386, 20515], [20385, 20515, 20514], [20386, 20387, 20515], [20387, 20516, 20515], [20387, 20388, 20517], [20387, 20517, 20516], [20388, 20389, 20517], [20389, 20518, 20517], [20389, 20390, 20519], [20389, 20519, 20518], [20390, 20391, 20519], [20391, 20520, 20519], [20391, 20392, 20521], [20391, 20521, 20520], [20392, 20393, 20521], [20393, 20522, 20521], [20393, 20394, 20523], [20393, 20523, 20522], [20394, 20395, 20523], [20395, 20524, 20523], [20395, 20396, 20525], [20395, 20525, 20524], [20396, 20397, 20525], [20397, 20526, 20525], [20397, 20398, 20527], [20397, 20527, 20526], [20398, 20399, 20527], [20399, 20528, 20527], [20399, 20400, 20529], [20399, 20529, 20528], [20400, 20401, 20529], [20401, 20530, 20529], [20401, 20402, 20531], [20401, 20531, 20530], [20402, 20403, 20531], [20403, 20532, 20531], [20403, 20404, 20533], [20403, 20533, 20532], [20404, 20405, 20533], [20405, 20534, 20533], [20405, 20406, 20535], [20405, 20535, 20534], [20406, 20407, 20535], [20407, 20536, 20535], [20407, 20408, 20537], [20407, 20537, 20536], [20408, 20409, 20537], [20409, 20538, 20537], [20409, 20410, 20539], [20409, 20539, 20538], [20410, 20411, 20539], [20411, 20540, 20539], [20411, 20412, 20541], [20411, 20541, 20540], [20412, 20413, 20541], [20413, 20542, 20541], [20413, 20414, 20543], [20413, 20543, 20542], [20414, 20415, 20543], [20415, 20544, 20543], [20415, 20416, 20545], [20415, 20545, 20544], [20416, 20417, 20545], [20417, 20546, 20545], [20417, 20418, 20547], [20417, 20547, 20546], [20418, 20419, 20547], [20419, 20548, 20547], [20419, 20420, 20549], [20419, 20549, 20548], [20420, 20421, 20549], [20421, 20550, 20549], [20421, 20422, 20551], [20421, 20551, 20550], [20422, 20423, 20551], [20423, 20552, 20551], [20423, 20424, 20553], [20423, 20553, 20552], [20424, 20425, 20553], [20425, 20554, 20553], [20425, 20426, 20555], [20425, 20555, 20554], [20426, 20427, 20555], [20427, 20556, 20555], [20428, 20429, 20557], [20429, 20558, 20557], [20429, 20430, 20559], [20429, 20559, 20558], [20430, 20431, 20559], [20431, 20560, 20559], [20431, 20432, 20561], [20431, 20561, 20560], [20432, 20433, 20561], [20433, 20562, 20561], [20433, 20434, 20563], [20433, 20563, 20562], [20434, 20435, 20563], [20435, 20564, 20563], [20435, 20436, 20565], [20435, 20565, 20564], [20436, 20437, 20565], [20437, 20566, 20565], [20437, 20438, 20567], [20437, 20567, 20566], [20438, 20439, 20567], [20439, 20568, 20567], [20439, 20440, 20569], [20439, 20569, 20568], [20440, 20441, 20569], [20441, 20570, 20569], [20441, 20442, 20571], [20441, 20571, 20570], [20442, 20443, 20571], [20443, 20572, 20571], [20443, 20444, 20573], [20443, 20573, 20572], [20444, 20445, 20573], [20445, 20574, 20573], [20445, 20446, 20575], [20445, 20575, 20574], [20446, 20447, 20575], [20447, 20576, 20575], [20447, 20448, 20577], [20447, 20577, 20576], [20448, 20449, 20577], [20449, 20578, 20577], [20449, 20450, 20579], [20449, 20579, 20578], [20450, 20451, 20579], [20451, 20580, 20579], [20451, 20452, 20581], [20451, 20581, 20580], [20452, 20453, 20581], [20453, 20582, 20581], [20453, 20454, 20583], [20453, 20583, 20582], [20454, 20455, 20583], [20455, 20584, 20583], [20455, 20456, 20585], [20455, 20585, 20584], [20456, 20457, 20585], [20457, 20586, 20585], [20457, 20458, 20587], [20457, 20587, 20586], [20458, 20459, 20587], [20459, 20588, 20587], [20459, 20460, 20589], [20459, 20589, 20588], [20460, 20461, 20589], [20461, 20590, 20589], [20461, 20462, 20591], [20461, 20591, 20590], [20462, 20463, 20591], [20463, 20592, 20591], [20463, 20464, 20593], [20463, 20593, 20592], [20464, 20465, 20593], [20465, 20594, 20593], [20465, 20466, 20595], [20465, 20595, 20594], [20466, 20467, 20595], [20467, 20596, 20595], [20467, 20468, 20597], [20467, 20597, 20596], [20468, 20469, 20597], [20469, 20598, 20597], [20469, 20470, 20599], [20469, 20599, 20598], [20470, 20471, 20599], [20471, 20600, 20599], [20471, 20472, 20601], [20471, 20601, 20600], [20472, 20473, 20601], [20473, 20602, 20601], [20473, 20474, 20603], [20473, 20603, 20602], [20474, 20475, 20603], [20475, 20604, 20603], [20475, 20476, 20605], [20475, 20605, 20604], [20476, 20477, 20605], [20477, 20606, 20605], [20477, 20478, 20607], [20477, 20607, 20606], [20478, 20479, 20607], [20479, 20608, 20607], [20479, 20480, 20609], [20479, 20609, 20608], [20480, 20481, 20609], [20481, 20610, 20609], [20481, 20482, 20611], [20481, 20611, 20610], [20482, 20483, 20611], [20483, 20612, 20611], [20483, 20484, 20613], [20483, 20613, 20612], [20484, 20485, 20613], [20485, 20614, 20613], [20485, 20486, 20615], [20485, 20615, 20614], [20486, 20487, 20615], [20487, 20616, 20615], [20487, 20488, 20617], [20487, 20617, 20616], [20488, 20489, 20617], [20489, 20618, 20617], [20489, 20490, 20619], [20489, 20619, 20618], [20490, 20491, 20619], [20491, 20620, 20619], [20491, 20492, 20621], [20491, 20621, 20620], [20492, 20493, 20621], [20493, 20622, 20621], [20493, 20494, 20623], [20493, 20623, 20622], [20494, 20495, 20623], [20495, 20624, 20623], [20495, 20496, 20625], [20495, 20625, 20624], [20496, 20497, 20625], [20497, 20626, 20625], [20497, 20498, 20627], [20497, 20627, 20626], [20498, 20499, 20627], [20499, 20628, 20627], [20499, 20500, 20629], [20499, 20629, 20628], [20500, 20501, 20629], [20501, 20630, 20629], [20501, 20502, 20631], [20501, 20631, 20630], [20502, 20503, 20631], [20503, 20632, 20631], [20503, 20504, 20633], [20503, 20633, 20632], [20504, 20505, 20633], [20505, 20634, 20633], [20505, 20506, 20635], [20505, 20635, 20634], [20506, 20507, 20635], [20507, 20636, 20635], [20507, 20508, 20637], [20507, 20637, 20636], [20508, 20509, 20637], [20509, 20638, 20637], [20509, 20510, 20639], [20509, 20639, 20638], [20510, 20511, 20639], [20511, 20640, 20639], [20511, 20512, 20641], [20511, 20641, 20640], [20512, 20513, 20641], [20513, 20642, 20641], [20513, 20514, 20643], [20513, 20643, 20642], [20514, 20515, 20643], [20515, 20644, 20643], [20515, 20516, 20645], [20515, 20645, 20644], [20516, 20517, 20645], [20517, 20646, 20645], [20517, 20518, 20647], [20517, 20647, 20646], [20518, 20519, 20647], [20519, 20648, 20647], [20519, 20520, 20649], [20519, 20649, 20648], [20520, 20521, 20649], [20521, 20650, 20649], [20521, 20522, 20651], [20521, 20651, 20650], [20522, 20523, 20651], [20523, 20652, 20651], [20523, 20524, 20653], [20523, 20653, 20652], [20524, 20525, 20653], [20525, 20654, 20653], [20525, 20526, 20655], [20525, 20655, 20654], [20526, 20527, 20655], [20527, 20656, 20655], [20527, 20528, 20657], [20527, 20657, 20656], [20528, 20529, 20657], [20529, 20658, 20657], [20529, 20530, 20659], [20529, 20659, 20658], [20530, 20531, 20659], [20531, 20660, 20659], [20531, 20532, 20661], [20531, 20661, 20660], [20532, 20533, 20661], [20533, 20662, 20661], [20533, 20534, 20663], [20533, 20663, 20662], [20534, 20535, 20663], [20535, 20664, 20663], [20535, 20536, 20665], [20535, 20665, 20664], [20536, 20537, 20665], [20537, 20666, 20665], [20537, 20538, 20667], [20537, 20667, 20666], [20538, 20539, 20667], [20539, 20668, 20667], [20539, 20540, 20669], [20539, 20669, 20668], [20540, 20541, 20669], [20541, 20670, 20669], [20541, 20542, 20671], [20541, 20671, 20670], [20542, 20543, 20671], [20543, 20672, 20671], [20543, 20544, 20673], [20543, 20673, 20672], [20544, 20545, 20673], [20545, 20674, 20673], [20545, 20546, 20675], [20545, 20675, 20674], [20546, 20547, 20675], [20547, 20676, 20675], [20547, 20548, 20677], [20547, 20677, 20676], [20548, 20549, 20677], [20549, 20678, 20677], [20549, 20550, 20679], [20549, 20679, 20678], [20550, 20551, 20679], [20551, 20680, 20679], [20551, 20552, 20681], [20551, 20681, 20680], [20552, 20553, 20681], [20553, 20682, 20681], [20553, 20554, 20683], [20553, 20683, 20682], [20554, 20555, 20683], [20555, 20684, 20683], [20555, 20556, 20685], [20555, 20685, 20684], [20557, 20558, 20687], [20557, 20687, 20686], [20558, 20559, 20687], [20559, 20688, 20687], [20559, 20560, 20689], [20559, 20689, 20688], [20560, 20561, 20689], [20561, 20690, 20689], [20561, 20562, 20691], [20561, 20691, 20690], [20562, 20563, 20691], [20563, 20692, 20691], [20563, 20564, 20693], [20563, 20693, 20692], [20564, 20565, 20693], [20565, 20694, 20693], [20565, 20566, 20695], [20565, 20695, 20694], [20566, 20567, 20695], [20567, 20696, 20695], [20567, 20568, 20697], [20567, 20697, 20696], [20568, 20569, 20697], [20569, 20698, 20697], [20569, 20570, 20699], [20569, 20699, 20698], [20570, 20571, 20699], [20571, 20700, 20699], [20571, 20572, 20701], [20571, 20701, 20700], [20572, 20573, 20701], [20573, 20702, 20701], [20573, 20574, 20703], [20573, 20703, 20702], [20574, 20575, 20703], [20575, 20704, 20703], [20575, 20576, 20705], [20575, 20705, 20704], [20576, 20577, 20705], [20577, 20706, 20705], [20577, 20578, 20707], [20577, 20707, 20706], [20578, 20579, 20707], [20579, 20708, 20707], [20579, 20580, 20709], [20579, 20709, 20708], [20580, 20581, 20709], [20581, 20710, 20709], [20581, 20582, 20711], [20581, 20711, 20710], [20582, 20583, 20711], [20583, 20712, 20711], [20583, 20584, 20713], [20583, 20713, 20712], [20584, 20585, 20713], [20585, 20714, 20713], [20585, 20586, 20715], [20585, 20715, 20714], [20586, 20587, 20715], [20587, 20716, 20715], [20587, 20588, 20717], [20587, 20717, 20716], [20588, 20589, 20717], [20589, 20718, 20717], [20589, 20590, 20719], [20589, 20719, 20718], [20590, 20591, 20719], [20591, 20720, 20719], [20591, 20592, 20721], [20591, 20721, 20720], [20592, 20593, 20721], [20593, 20722, 20721], [20593, 20594, 20723], [20593, 20723, 20722], [20594, 20595, 20723], [20595, 20724, 20723], [20595, 20596, 20725], [20595, 20725, 20724], [20596, 20597, 20725], [20597, 20726, 20725], [20597, 20598, 20727], [20597, 20727, 20726], [20598, 20599, 20727], [20599, 20728, 20727], [20599, 20600, 20729], [20599, 20729, 20728], [20600, 20601, 20729], [20601, 20730, 20729], [20601, 20602, 20731], [20601, 20731, 20730], [20602, 20603, 20731], [20603, 20732, 20731], [20603, 20604, 20733], [20603, 20733, 20732], [20604, 20605, 20733], [20605, 20734, 20733], [20605, 20606, 20735], [20605, 20735, 20734], [20606, 20607, 20735], [20607, 20736, 20735], [20607, 20608, 20737], [20607, 20737, 20736], [20608, 20609, 20737], [20609, 20738, 20737], [20609, 20610, 20739], [20609, 20739, 20738], [20610, 20611, 20739], [20611, 20740, 20739], [20611, 20612, 20741], [20611, 20741, 20740], [20612, 20613, 20741], [20613, 20742, 20741], [20613, 20614, 20743], [20613, 20743, 20742], [20614, 20615, 20743], [20615, 20744, 20743], [20615, 20616, 20745], [20615, 20745, 20744], [20616, 20617, 20745], [20617, 20746, 20745], [20617, 20618, 20747], [20617, 20747, 20746], [20618, 20619, 20747], [20619, 20748, 20747], [20619, 20620, 20749], [20619, 20749, 20748], [20620, 20621, 20749], [20621, 20750, 20749], [20621, 20622, 20751], [20621, 20751, 20750], [20622, 20623, 20751], [20623, 20752, 20751], [20623, 20624, 20753], [20623, 20753, 20752], [20624, 20625, 20753], [20625, 20754, 20753], [20625, 20626, 20755], [20625, 20755, 20754], [20626, 20627, 20755], [20627, 20756, 20755], [20627, 20628, 20757], [20627, 20757, 20756], [20628, 20629, 20757], [20629, 20758, 20757], [20629, 20630, 20759], [20629, 20759, 20758], [20630, 20631, 20759], [20631, 20760, 20759], [20631, 20632, 20761], [20631, 20761, 20760], [20632, 20633, 20761], [20633, 20762, 20761], [20633, 20634, 20763], [20633, 20763, 20762], [20634, 20635, 20763], [20635, 20764, 20763], [20635, 20636, 20765], [20635, 20765, 20764], [20636, 20637, 20765], [20637, 20766, 20765], [20637, 20638, 20767], [20637, 20767, 20766], [20638, 20639, 20767], [20639, 20768, 20767], [20639, 20640, 20769], [20639, 20769, 20768], [20640, 20641, 20769], [20641, 20770, 20769], [20641, 20642, 20771], [20641, 20771, 20770], [20642, 20643, 20771], [20643, 20772, 20771], [20643, 20644, 20773], [20643, 20773, 20772], [20644, 20645, 20773], [20645, 20774, 20773], [20645, 20646, 20775], [20645, 20775, 20774], [20646, 20647, 20775], [20647, 20776, 20775], [20647, 20648, 20777], [20647, 20777, 20776], [20648, 20649, 20777], [20649, 20778, 20777], [20649, 20650, 20779], [20649, 20779, 20778], [20650, 20651, 20779], [20651, 20780, 20779], [20651, 20652, 20781], [20651, 20781, 20780], [20652, 20653, 20781], [20653, 20782, 20781], [20653, 20654, 20783], [20653, 20783, 20782], [20654, 20655, 20783], [20655, 20784, 20783], [20655, 20656, 20785], [20655, 20785, 20784], [20656, 20657, 20785], [20657, 20786, 20785], [20657, 20658, 20787], [20657, 20787, 20786], [20658, 20659, 20787], [20659, 20788, 20787], [20659, 20660, 20789], [20659, 20789, 20788], [20660, 20661, 20789], [20661, 20790, 20789], [20661, 20662, 20791], [20661, 20791, 20790], [20662, 20663, 20791], [20663, 20792, 20791], [20663, 20664, 20793], [20663, 20793, 20792], [20664, 20665, 20793], [20665, 20794, 20793], [20665, 20666, 20795], [20665, 20795, 20794], [20666, 20667, 20795], [20667, 20796, 20795], [20667, 20668, 20797], [20667, 20797, 20796], [20668, 20669, 20797], [20669, 20798, 20797], [20669, 20670, 20799], [20669, 20799, 20798], [20670, 20671, 20799], [20671, 20800, 20799], [20671, 20672, 20801], [20671, 20801, 20800], [20672, 20673, 20801], [20673, 20802, 20801], [20673, 20674, 20803], [20673, 20803, 20802], [20674, 20675, 20803], [20675, 20804, 20803], [20675, 20676, 20805], [20675, 20805, 20804], [20676, 20677, 20805], [20677, 20806, 20805], [20677, 20678, 20807], [20677, 20807, 20806], [20678, 20679, 20807], [20679, 20808, 20807], [20679, 20680, 20809], [20679, 20809, 20808], [20680, 20681, 20809], [20681, 20810, 20809], [20681, 20682, 20811], [20681, 20811, 20810], [20682, 20683, 20811], [20683, 20812, 20811], [20683, 20684, 20813], [20683, 20813, 20812], [20684, 20685, 20813], [20685, 20814, 20813], [20686, 20687, 20815], [20687, 20816, 20815], [20687, 20688, 20817], [20687, 20817, 20816], [20688, 20689, 20817], [20689, 20818, 20817], [20689, 20690, 20819], [20689, 20819, 20818], [20690, 20691, 20819], [20691, 20820, 20819], [20691, 20692, 20821], [20691, 20821, 20820], [20692, 20693, 20821], [20693, 20822, 20821], [20693, 20694, 20823], [20693, 20823, 20822], [20694, 20695, 20823], [20695, 20824, 20823], [20695, 20696, 20825], [20695, 20825, 20824], [20696, 20697, 20825], [20697, 20826, 20825], [20697, 20698, 20827], [20697, 20827, 20826], [20698, 20699, 20827], [20699, 20828, 20827], [20699, 20700, 20829], [20699, 20829, 20828], [20700, 20701, 20829], [20701, 20830, 20829], [20701, 20702, 20831], [20701, 20831, 20830], [20702, 20703, 20831], [20703, 20832, 20831], [20703, 20704, 20833], [20703, 20833, 20832], [20704, 20705, 20833], [20705, 20834, 20833], [20705, 20706, 20835], [20705, 20835, 20834], [20706, 20707, 20835], [20707, 20836, 20835], [20707, 20708, 20837], [20707, 20837, 20836], [20708, 20709, 20837], [20709, 20838, 20837], [20709, 20710, 20839], [20709, 20839, 20838], [20710, 20711, 20839], [20711, 20840, 20839], [20711, 20712, 20841], [20711, 20841, 20840], [20712, 20713, 20841], [20713, 20842, 20841], [20713, 20714, 20843], [20713, 20843, 20842], [20714, 20715, 20843], [20715, 20844, 20843], [20715, 20716, 20845], [20715, 20845, 20844], [20716, 20717, 20845], [20717, 20846, 20845], [20717, 20718, 20847], [20717, 20847, 20846], [20718, 20719, 20847], [20719, 20848, 20847], [20719, 20720, 20849], [20719, 20849, 20848], [20720, 20721, 20849], [20721, 20850, 20849], [20721, 20722, 20851], [20721, 20851, 20850], [20722, 20723, 20851], [20723, 20852, 20851], [20723, 20724, 20853], [20723, 20853, 20852], [20724, 20725, 20853], [20725, 20854, 20853], [20725, 20726, 20855], [20725, 20855, 20854], [20726, 20727, 20855], [20727, 20856, 20855], [20727, 20728, 20857], [20727, 20857, 20856], [20728, 20729, 20857], [20729, 20858, 20857], [20729, 20730, 20859], [20729, 20859, 20858], [20730, 20731, 20859], [20731, 20860, 20859], [20731, 20732, 20861], [20731, 20861, 20860], [20732, 20733, 20861], [20733, 20862, 20861], [20733, 20734, 20863], [20733, 20863, 20862], [20734, 20735, 20863], [20735, 20864, 20863], [20735, 20736, 20865], [20735, 20865, 20864], [20736, 20737, 20865], [20737, 20866, 20865], [20737, 20738, 20867], [20737, 20867, 20866], [20738, 20739, 20867], [20739, 20868, 20867], [20739, 20740, 20869], [20739, 20869, 20868], [20740, 20741, 20869], [20741, 20870, 20869], [20741, 20742, 20871], [20741, 20871, 20870], [20742, 20743, 20871], [20743, 20872, 20871], [20743, 20744, 20873], [20743, 20873, 20872], [20744, 20745, 20873], [20745, 20874, 20873], [20745, 20746, 20875], [20745, 20875, 20874], [20746, 20747, 20875], [20747, 20876, 20875], [20747, 20748, 20877], [20747, 20877, 20876], [20748, 20749, 20877], [20749, 20878, 20877], [20749, 20750, 20879], [20749, 20879, 20878], [20750, 20751, 20879], [20751, 20880, 20879], [20751, 20752, 20881], [20751, 20881, 20880], [20752, 20753, 20881], [20753, 20882, 20881], [20753, 20754, 20883], [20753, 20883, 20882], [20754, 20755, 20883], [20755, 20884, 20883], [20755, 20756, 20885], [20755, 20885, 20884], [20756, 20757, 20885], [20757, 20886, 20885], [20757, 20758, 20887], [20757, 20887, 20886], [20758, 20759, 20887], [20759, 20888, 20887], [20759, 20760, 20889], [20759, 20889, 20888], [20760, 20761, 20889], [20761, 20890, 20889], [20761, 20762, 20891], [20761, 20891, 20890], [20762, 20763, 20891], [20763, 20892, 20891], [20763, 20764, 20893], [20763, 20893, 20892], [20764, 20765, 20893], [20765, 20894, 20893], [20765, 20766, 20895], [20765, 20895, 20894], [20766, 20767, 20895], [20767, 20896, 20895], [20767, 20768, 20897], [20767, 20897, 20896], [20768, 20769, 20897], [20769, 20898, 20897], [20769, 20770, 20899], [20769, 20899, 20898], [20770, 20771, 20899], [20771, 20900, 20899], [20771, 20772, 20901], [20771, 20901, 20900], [20772, 20773, 20901], [20773, 20902, 20901], [20773, 20774, 20903], [20773, 20903, 20902], [20774, 20775, 20903], [20775, 20904, 20903], [20775, 20776, 20905], [20775, 20905, 20904], [20776, 20777, 20905], [20777, 20906, 20905], [20777, 20778, 20907], [20777, 20907, 20906], [20778, 20779, 20907], [20779, 20908, 20907], [20779, 20780, 20909], [20779, 20909, 20908], [20780, 20781, 20909], [20781, 20910, 20909], [20781, 20782, 20911], [20781, 20911, 20910], [20782, 20783, 20911], [20783, 20912, 20911], [20783, 20784, 20913], [20783, 20913, 20912], [20784, 20785, 20913], [20785, 20914, 20913], [20785, 20786, 20915], [20785, 20915, 20914], [20786, 20787, 20915], [20787, 20916, 20915], [20787, 20788, 20917], [20787, 20917, 20916], [20788, 20789, 20917], [20789, 20918, 20917], [20789, 20790, 20919], [20789, 20919, 20918], [20790, 20791, 20919], [20791, 20920, 20919], [20791, 20792, 20921], [20791, 20921, 20920], [20792, 20793, 20921], [20793, 20922, 20921], [20793, 20794, 20923], [20793, 20923, 20922], [20794, 20795, 20923], [20795, 20924, 20923], [20795, 20796, 20925], [20795, 20925, 20924], [20796, 20797, 20925], [20797, 20926, 20925], [20797, 20798, 20927], [20797, 20927, 20926], [20798, 20799, 20927], [20799, 20928, 20927], [20799, 20800, 20929], [20799, 20929, 20928], [20800, 20801, 20929], [20801, 20930, 20929], [20801, 20802, 20931], [20801, 20931, 20930], [20802, 20803, 20931], [20803, 20932, 20931], [20803, 20804, 20933], [20803, 20933, 20932], [20804, 20805, 20933], [20805, 20934, 20933], [20805, 20806, 20935], [20805, 20935, 20934], [20806, 20807, 20935], [20807, 20936, 20935], [20807, 20808, 20937], [20807, 20937, 20936], [20808, 20809, 20937], [20809, 20938, 20937], [20809, 20810, 20939], [20809, 20939, 20938], [20810, 20811, 20939], [20811, 20940, 20939], [20811, 20812, 20941], [20811, 20941, 20940], [20812, 20813, 20941], [20813, 20942, 20941], [20813, 20814, 20943], [20813, 20943, 20942], [20815, 20816, 20945], [20815, 20945, 20944], [20816, 20817, 20945], [20817, 20946, 20945], [20817, 20818, 20947], [20817, 20947, 20946], [20818, 20819, 20947], [20819, 20948, 20947], [20819, 20820, 20949], [20819, 20949, 20948], [20820, 20821, 20949], [20821, 20950, 20949], [20821, 20822, 20951], [20821, 20951, 20950], [20822, 20823, 20951], [20823, 20952, 20951], [20823, 20824, 20953], [20823, 20953, 20952], [20824, 20825, 20953], [20825, 20954, 20953], [20825, 20826, 20955], [20825, 20955, 20954], [20826, 20827, 20955], [20827, 20956, 20955], [20827, 20828, 20957], [20827, 20957, 20956], [20828, 20829, 20957], [20829, 20958, 20957], [20829, 20830, 20959], [20829, 20959, 20958], [20830, 20831, 20959], [20831, 20960, 20959], [20831, 20832, 20961], [20831, 20961, 20960], [20832, 20833, 20961], [20833, 20962, 20961], [20833, 20834, 20963], [20833, 20963, 20962], [20834, 20835, 20963], [20835, 20964, 20963], [20835, 20836, 20965], [20835, 20965, 20964], [20836, 20837, 20965], [20837, 20966, 20965], [20837, 20838, 20967], [20837, 20967, 20966], [20838, 20839, 20967], [20839, 20968, 20967], [20839, 20840, 20969], [20839, 20969, 20968], [20840, 20841, 20969], [20841, 20970, 20969], [20841, 20842, 20971], [20841, 20971, 20970], [20842, 20843, 20971], [20843, 20972, 20971], [20843, 20844, 20973], [20843, 20973, 20972], [20844, 20845, 20973], [20845, 20974, 20973], [20845, 20846, 20975], [20845, 20975, 20974], [20846, 20847, 20975], [20847, 20976, 20975], [20847, 20848, 20977], [20847, 20977, 20976], [20848, 20849, 20977], [20849, 20978, 20977], [20849, 20850, 20979], [20849, 20979, 20978], [20850, 20851, 20979], [20851, 20980, 20979], [20851, 20852, 20981], [20851, 20981, 20980], [20852, 20853, 20981], [20853, 20982, 20981], [20853, 20854, 20983], [20853, 20983, 20982], [20854, 20855, 20983], [20855, 20984, 20983], [20855, 20856, 20985], [20855, 20985, 20984], [20856, 20857, 20985], [20857, 20986, 20985], [20857, 20858, 20987], [20857, 20987, 20986], [20858, 20859, 20987], [20859, 20988, 20987], [20859, 20860, 20989], [20859, 20989, 20988], [20860, 20861, 20989], [20861, 20990, 20989], [20861, 20862, 20991], [20861, 20991, 20990], [20862, 20863, 20991], [20863, 20992, 20991], [20863, 20864, 20993], [20863, 20993, 20992], [20864, 20865, 20993], [20865, 20994, 20993], [20865, 20866, 20995], [20865, 20995, 20994], [20866, 20867, 20995], [20867, 20996, 20995], [20867, 20868, 20997], [20867, 20997, 20996], [20868, 20869, 20997], [20869, 20998, 20997], [20869, 20870, 20999], [20869, 20999, 20998], [20870, 20871, 20999], [20871, 21000, 20999], [20871, 20872, 21001], [20871, 21001, 21000], [20872, 20873, 21001], [20873, 21002, 21001], [20873, 20874, 21003], [20873, 21003, 21002], [20874, 20875, 21003], [20875, 21004, 21003], [20875, 20876, 21005], [20875, 21005, 21004], [20876, 20877, 21005], [20877, 21006, 21005], [20877, 20878, 21007], [20877, 21007, 21006], [20878, 20879, 21007], [20879, 21008, 21007], [20879, 20880, 21009], [20879, 21009, 21008], [20880, 20881, 21009], [20881, 21010, 21009], [20881, 20882, 21011], [20881, 21011, 21010], [20882, 20883, 21011], [20883, 21012, 21011], [20883, 20884, 21013], [20883, 21013, 21012], [20884, 20885, 21013], [20885, 21014, 21013], [20885, 20886, 21015], [20885, 21015, 21014], [20886, 20887, 21015], [20887, 21016, 21015], [20887, 20888, 21017], [20887, 21017, 21016], [20888, 20889, 21017], [20889, 21018, 21017], [20889, 20890, 21019], [20889, 21019, 21018], [20890, 20891, 21019], [20891, 21020, 21019], [20891, 20892, 21021], [20891, 21021, 21020], [20892, 20893, 21021], [20893, 21022, 21021], [20893, 20894, 21023], [20893, 21023, 21022], [20894, 20895, 21023], [20895, 21024, 21023], [20895, 20896, 21025], [20895, 21025, 21024], [20896, 20897, 21025], [20897, 21026, 21025], [20897, 20898, 21027], [20897, 21027, 21026], [20898, 20899, 21027], [20899, 21028, 21027], [20899, 20900, 21029], [20899, 21029, 21028], [20900, 20901, 21029], [20901, 21030, 21029], [20901, 20902, 21031], [20901, 21031, 21030], [20902, 20903, 21031], [20903, 21032, 21031], [20903, 20904, 21033], [20903, 21033, 21032], [20904, 20905, 21033], [20905, 21034, 21033], [20905, 20906, 21035], [20905, 21035, 21034], [20906, 20907, 21035], [20907, 21036, 21035], [20907, 20908, 21037], [20907, 21037, 21036], [20908, 20909, 21037], [20909, 21038, 21037], [20909, 20910, 21039], [20909, 21039, 21038], [20910, 20911, 21039], [20911, 21040, 21039], [20911, 20912, 21041], [20911, 21041, 21040], [20912, 20913, 21041], [20913, 21042, 21041], [20913, 20914, 21043], [20913, 21043, 21042], [20914, 20915, 21043], [20915, 21044, 21043], [20915, 20916, 21045], [20915, 21045, 21044], [20916, 20917, 21045], [20917, 21046, 21045], [20917, 20918, 21047], [20917, 21047, 21046], [20918, 20919, 21047], [20919, 21048, 21047], [20919, 20920, 21049], [20919, 21049, 21048], [20920, 20921, 21049], [20921, 21050, 21049], [20921, 20922, 21051], [20921, 21051, 21050], [20922, 20923, 21051], [20923, 21052, 21051], [20923, 20924, 21053], [20923, 21053, 21052], [20924, 20925, 21053], [20925, 21054, 21053], [20925, 20926, 21055], [20925, 21055, 21054], [20926, 20927, 21055], [20927, 21056, 21055], [20927, 20928, 21057], [20927, 21057, 21056], [20928, 20929, 21057], [20929, 21058, 21057], [20929, 20930, 21059], [20929, 21059, 21058], [20930, 20931, 21059], [20931, 21060, 21059], [20931, 20932, 21061], [20931, 21061, 21060], [20932, 20933, 21061], [20933, 21062, 21061], [20933, 20934, 21063], [20933, 21063, 21062], [20934, 20935, 21063], [20935, 21064, 21063], [20935, 20936, 21065], [20935, 21065, 21064], [20936, 20937, 21065], [20937, 21066, 21065], [20937, 20938, 21067], [20937, 21067, 21066], [20938, 20939, 21067], [20939, 21068, 21067], [20939, 20940, 21069], [20939, 21069, 21068], [20940, 20941, 21069], [20941, 21070, 21069], [20941, 20942, 21071], [20941, 21071, 21070], [20942, 20943, 21071], [20943, 21072, 21071], [20944, 20945, 21073], [20945, 21074, 21073], [20945, 20946, 21075], [20945, 21075, 21074], [20946, 20947, 21075], [20947, 21076, 21075], [20947, 20948, 21077], [20947, 21077, 21076], [20948, 20949, 21077], [20949, 21078, 21077], [20949, 20950, 21079], [20949, 21079, 21078], [20950, 20951, 21079], [20951, 21080, 21079], [20951, 20952, 21081], [20951, 21081, 21080], [20952, 20953, 21081], [20953, 21082, 21081], [20953, 20954, 21083], [20953, 21083, 21082], [20954, 20955, 21083], [20955, 21084, 21083], [20955, 20956, 21085], [20955, 21085, 21084], [20956, 20957, 21085], [20957, 21086, 21085], [20957, 20958, 21087], [20957, 21087, 21086], [20958, 20959, 21087], [20959, 21088, 21087], [20959, 20960, 21089], [20959, 21089, 21088], [20960, 20961, 21089], [20961, 21090, 21089], [20961, 20962, 21091], [20961, 21091, 21090], [20962, 20963, 21091], [20963, 21092, 21091], [20963, 20964, 21093], [20963, 21093, 21092], [20964, 20965, 21093], [20965, 21094, 21093], [20965, 20966, 21095], [20965, 21095, 21094], [20966, 20967, 21095], [20967, 21096, 21095], [20967, 20968, 21097], [20967, 21097, 21096], [20968, 20969, 21097], [20969, 21098, 21097], [20969, 20970, 21099], [20969, 21099, 21098], [20970, 20971, 21099], [20971, 21100, 21099], [20971, 20972, 21101], [20971, 21101, 21100], [20972, 20973, 21101], [20973, 21102, 21101], [20973, 20974, 21103], [20973, 21103, 21102], [20974, 20975, 21103], [20975, 21104, 21103], [20975, 20976, 21105], [20975, 21105, 21104], [20976, 20977, 21105], [20977, 21106, 21105], [20977, 20978, 21107], [20977, 21107, 21106], [20978, 20979, 21107], [20979, 21108, 21107], [20979, 20980, 21109], [20979, 21109, 21108], [20980, 20981, 21109], [20981, 21110, 21109], [20981, 20982, 21111], [20981, 21111, 21110], [20982, 20983, 21111], [20983, 21112, 21111], [20983, 20984, 21113], [20983, 21113, 21112], [20984, 20985, 21113], [20985, 21114, 21113], [20985, 20986, 21115], [20985, 21115, 21114], [20986, 20987, 21115], [20987, 21116, 21115], [20987, 20988, 21117], [20987, 21117, 21116], [20988, 20989, 21117], [20989, 21118, 21117], [20989, 20990, 21119], [20989, 21119, 21118], [20990, 20991, 21119], [20991, 21120, 21119], [20991, 20992, 21121], [20991, 21121, 21120], [20992, 20993, 21121], [20993, 21122, 21121], [20993, 20994, 21123], [20993, 21123, 21122], [20994, 20995, 21123], [20995, 21124, 21123], [20995, 20996, 21125], [20995, 21125, 21124], [20996, 20997, 21125], [20997, 21126, 21125], [20997, 20998, 21127], [20997, 21127, 21126], [20998, 20999, 21127], [20999, 21128, 21127], [20999, 21000, 21129], [20999, 21129, 21128], [21000, 21001, 21129], [21001, 21130, 21129], [21001, 21002, 21131], [21001, 21131, 21130], [21002, 21003, 21131], [21003, 21132, 21131], [21003, 21004, 21133], [21003, 21133, 21132], [21004, 21005, 21133], [21005, 21134, 21133], [21005, 21006, 21135], [21005, 21135, 21134], [21006, 21007, 21135], [21007, 21136, 21135], [21007, 21008, 21137], [21007, 21137, 21136], [21008, 21009, 21137], [21009, 21138, 21137], [21009, 21010, 21139], [21009, 21139, 21138], [21010, 21011, 21139], [21011, 21140, 21139], [21011, 21012, 21141], [21011, 21141, 21140], [21012, 21013, 21141], [21013, 21142, 21141], [21013, 21014, 21143], [21013, 21143, 21142], [21014, 21015, 21143], [21015, 21144, 21143], [21015, 21016, 21145], [21015, 21145, 21144], [21016, 21017, 21145], [21017, 21146, 21145], [21017, 21018, 21147], [21017, 21147, 21146], [21018, 21019, 21147], [21019, 21148, 21147], [21019, 21020, 21149], [21019, 21149, 21148], [21020, 21021, 21149], [21021, 21150, 21149], [21021, 21022, 21151], [21021, 21151, 21150], [21022, 21023, 21151], [21023, 21152, 21151], [21023, 21024, 21153], [21023, 21153, 21152], [21024, 21025, 21153], [21025, 21154, 21153], [21025, 21026, 21155], [21025, 21155, 21154], [21026, 21027, 21155], [21027, 21156, 21155], [21027, 21028, 21157], [21027, 21157, 21156], [21028, 21029, 21157], [21029, 21158, 21157], [21029, 21030, 21159], [21029, 21159, 21158], [21030, 21031, 21159], [21031, 21160, 21159], [21031, 21032, 21161], [21031, 21161, 21160], [21032, 21033, 21161], [21033, 21162, 21161], [21033, 21034, 21163], [21033, 21163, 21162], [21034, 21035, 21163], [21035, 21164, 21163], [21035, 21036, 21165], [21035, 21165, 21164], [21036, 21037, 21165], [21037, 21166, 21165], [21037, 21038, 21167], [21037, 21167, 21166], [21038, 21039, 21167], [21039, 21168, 21167], [21039, 21040, 21169], [21039, 21169, 21168], [21040, 21041, 21169], [21041, 21170, 21169], [21041, 21042, 21171], [21041, 21171, 21170], [21042, 21043, 21171], [21043, 21172, 21171], [21043, 21044, 21173], [21043, 21173, 21172], [21044, 21045, 21173], [21045, 21174, 21173], [21045, 21046, 21175], [21045, 21175, 21174], [21046, 21047, 21175], [21047, 21176, 21175], [21047, 21048, 21177], [21047, 21177, 21176], [21048, 21049, 21177], [21049, 21178, 21177], [21049, 21050, 21179], [21049, 21179, 21178], [21050, 21051, 21179], [21051, 21180, 21179], [21051, 21052, 21181], [21051, 21181, 21180], [21052, 21053, 21181], [21053, 21182, 21181], [21053, 21054, 21183], [21053, 21183, 21182], [21054, 21055, 21183], [21055, 21184, 21183], [21055, 21056, 21185], [21055, 21185, 21184], [21056, 21057, 21185], [21057, 21186, 21185], [21057, 21058, 21187], [21057, 21187, 21186], [21058, 21059, 21187], [21059, 21188, 21187], [21059, 21060, 21189], [21059, 21189, 21188], [21060, 21061, 21189], [21061, 21190, 21189], [21061, 21062, 21191], [21061, 21191, 21190], [21062, 21063, 21191], [21063, 21192, 21191], [21063, 21064, 21193], [21063, 21193, 21192], [21064, 21065, 21193], [21065, 21194, 21193], [21065, 21066, 21195], [21065, 21195, 21194], [21066, 21067, 21195], [21067, 21196, 21195], [21067, 21068, 21197], [21067, 21197, 21196], [21068, 21069, 21197], [21069, 21198, 21197], [21069, 21070, 21199], [21069, 21199, 21198], [21070, 21071, 21199], [21071, 21200, 21199], [21071, 21072, 21201], [21071, 21201, 21200], [21073, 21074, 21203], [21073, 21203, 21202], [21074, 21075, 21203], [21075, 21204, 21203], [21075, 21076, 21205], [21075, 21205, 21204], [21076, 21077, 21205], [21077, 21206, 21205], [21077, 21078, 21207], [21077, 21207, 21206], [21078, 21079, 21207], [21079, 21208, 21207], [21079, 21080, 21209], [21079, 21209, 21208], [21080, 21081, 21209], [21081, 21210, 21209], [21081, 21082, 21211], [21081, 21211, 21210], [21082, 21083, 21211], [21083, 21212, 21211], [21083, 21084, 21213], [21083, 21213, 21212], [21084, 21085, 21213], [21085, 21214, 21213], [21085, 21086, 21215], [21085, 21215, 21214], [21086, 21087, 21215], [21087, 21216, 21215], [21087, 21088, 21217], [21087, 21217, 21216], [21088, 21089, 21217], [21089, 21218, 21217], [21089, 21090, 21219], [21089, 21219, 21218], [21090, 21091, 21219], [21091, 21220, 21219], [21091, 21092, 21221], [21091, 21221, 21220], [21092, 21093, 21221], [21093, 21222, 21221], [21093, 21094, 21223], [21093, 21223, 21222], [21094, 21095, 21223], [21095, 21224, 21223], [21095, 21096, 21225], [21095, 21225, 21224], [21096, 21097, 21225], [21097, 21226, 21225], [21097, 21098, 21227], [21097, 21227, 21226], [21098, 21099, 21227], [21099, 21228, 21227], [21099, 21100, 21229], [21099, 21229, 21228], [21100, 21101, 21229], [21101, 21230, 21229], [21101, 21102, 21231], [21101, 21231, 21230], [21102, 21103, 21231], [21103, 21232, 21231], [21103, 21104, 21233], [21103, 21233, 21232], [21104, 21105, 21233], [21105, 21234, 21233], [21105, 21106, 21235], [21105, 21235, 21234], [21106, 21107, 21235], [21107, 21236, 21235], [21107, 21108, 21237], [21107, 21237, 21236], [21108, 21109, 21237], [21109, 21238, 21237], [21109, 21110, 21239], [21109, 21239, 21238], [21110, 21111, 21239], [21111, 21240, 21239], [21111, 21112, 21241], [21111, 21241, 21240], [21112, 21113, 21241], [21113, 21242, 21241], [21113, 21114, 21243], [21113, 21243, 21242], [21114, 21115, 21243], [21115, 21244, 21243], [21115, 21116, 21245], [21115, 21245, 21244], [21116, 21117, 21245], [21117, 21246, 21245], [21117, 21118, 21247], [21117, 21247, 21246], [21118, 21119, 21247], [21119, 21248, 21247], [21119, 21120, 21249], [21119, 21249, 21248], [21120, 21121, 21249], [21121, 21250, 21249], [21121, 21122, 21251], [21121, 21251, 21250], [21122, 21123, 21251], [21123, 21252, 21251], [21123, 21124, 21253], [21123, 21253, 21252], [21124, 21125, 21253], [21125, 21254, 21253], [21125, 21126, 21255], [21125, 21255, 21254], [21126, 21127, 21255], [21127, 21256, 21255], [21127, 21128, 21257], [21127, 21257, 21256], [21128, 21129, 21257], [21129, 21258, 21257], [21129, 21130, 21259], [21129, 21259, 21258], [21130, 21131, 21259], [21131, 21260, 21259], [21131, 21132, 21261], [21131, 21261, 21260], [21132, 21133, 21261], [21133, 21262, 21261], [21133, 21134, 21263], [21133, 21263, 21262], [21134, 21135, 21263], [21135, 21264, 21263], [21135, 21136, 21265], [21135, 21265, 21264], [21136, 21137, 21265], [21137, 21266, 21265], [21137, 21138, 21267], [21137, 21267, 21266], [21138, 21139, 21267], [21139, 21268, 21267], [21139, 21140, 21269], [21139, 21269, 21268], [21140, 21141, 21269], [21141, 21270, 21269], [21141, 21142, 21271], [21141, 21271, 21270], [21142, 21143, 21271], [21143, 21272, 21271], [21143, 21144, 21273], [21143, 21273, 21272], [21144, 21145, 21273], [21145, 21274, 21273], [21145, 21146, 21275], [21145, 21275, 21274], [21146, 21147, 21275], [21147, 21276, 21275], [21147, 21148, 21277], [21147, 21277, 21276], [21148, 21149, 21277], [21149, 21278, 21277], [21149, 21150, 21279], [21149, 21279, 21278], [21150, 21151, 21279], [21151, 21280, 21279], [21151, 21152, 21281], [21151, 21281, 21280], [21152, 21153, 21281], [21153, 21282, 21281], [21153, 21154, 21283], [21153, 21283, 21282], [21154, 21155, 21283], [21155, 21284, 21283], [21155, 21156, 21285], [21155, 21285, 21284], [21156, 21157, 21285], [21157, 21286, 21285], [21157, 21158, 21287], [21157, 21287, 21286], [21158, 21159, 21287], [21159, 21288, 21287], [21159, 21160, 21289], [21159, 21289, 21288], [21160, 21161, 21289], [21161, 21290, 21289], [21161, 21162, 21291], [21161, 21291, 21290], [21162, 21163, 21291], [21163, 21292, 21291], [21163, 21164, 21293], [21163, 21293, 21292], [21164, 21165, 21293], [21165, 21294, 21293], [21165, 21166, 21295], [21165, 21295, 21294], [21166, 21167, 21295], [21167, 21296, 21295], [21167, 21168, 21297], [21167, 21297, 21296], [21168, 21169, 21297], [21169, 21298, 21297], [21169, 21170, 21299], [21169, 21299, 21298], [21170, 21171, 21299], [21171, 21300, 21299], [21171, 21172, 21301], [21171, 21301, 21300], [21172, 21173, 21301], [21173, 21302, 21301], [21173, 21174, 21303], [21173, 21303, 21302], [21174, 21175, 21303], [21175, 21304, 21303], [21175, 21176, 21305], [21175, 21305, 21304], [21176, 21177, 21305], [21177, 21306, 21305], [21177, 21178, 21307], [21177, 21307, 21306], [21178, 21179, 21307], [21179, 21308, 21307], [21179, 21180, 21309], [21179, 21309, 21308], [21180, 21181, 21309], [21181, 21310, 21309], [21181, 21182, 21311], [21181, 21311, 21310], [21182, 21183, 21311], [21183, 21312, 21311], [21183, 21184, 21313], [21183, 21313, 21312], [21184, 21185, 21313], [21185, 21314, 21313], [21185, 21186, 21315], [21185, 21315, 21314], [21186, 21187, 21315], [21187, 21316, 21315], [21187, 21188, 21317], [21187, 21317, 21316], [21188, 21189, 21317], [21189, 21318, 21317], [21189, 21190, 21319], [21189, 21319, 21318], [21190, 21191, 21319], [21191, 21320, 21319], [21191, 21192, 21321], [21191, 21321, 21320], [21192, 21193, 21321], [21193, 21322, 21321], [21193, 21194, 21323], [21193, 21323, 21322], [21194, 21195, 21323], [21195, 21324, 21323], [21195, 21196, 21325], [21195, 21325, 21324], [21196, 21197, 21325], [21197, 21326, 21325], [21197, 21198, 21327], [21197, 21327, 21326], [21198, 21199, 21327], [21199, 21328, 21327], [21199, 21200, 21329], [21199, 21329, 21328], [21200, 21201, 21329], [21201, 21330, 21329], [21202, 21203, 21331], [21203, 21332, 21331], [21203, 21204, 21333], [21203, 21333, 21332], [21204, 21205, 21333], [21205, 21334, 21333], [21205, 21206, 21335], [21205, 21335, 21334], [21206, 21207, 21335], [21207, 21336, 21335], [21207, 21208, 21337], [21207, 21337, 21336], [21208, 21209, 21337], [21209, 21338, 21337], [21209, 21210, 21339], [21209, 21339, 21338], [21210, 21211, 21339], [21211, 21340, 21339], [21211, 21212, 21341], [21211, 21341, 21340], [21212, 21213, 21341], [21213, 21342, 21341], [21213, 21214, 21343], [21213, 21343, 21342], [21214, 21215, 21343], [21215, 21344, 21343], [21215, 21216, 21345], [21215, 21345, 21344], [21216, 21217, 21345], [21217, 21346, 21345], [21217, 21218, 21347], [21217, 21347, 21346], [21218, 21219, 21347], [21219, 21348, 21347], [21219, 21220, 21349], [21219, 21349, 21348], [21220, 21221, 21349], [21221, 21350, 21349], [21221, 21222, 21351], [21221, 21351, 21350], [21222, 21223, 21351], [21223, 21352, 21351], [21223, 21224, 21353], [21223, 21353, 21352], [21224, 21225, 21353], [21225, 21354, 21353], [21225, 21226, 21355], [21225, 21355, 21354], [21226, 21227, 21355], [21227, 21356, 21355], [21227, 21228, 21357], [21227, 21357, 21356], [21228, 21229, 21357], [21229, 21358, 21357], [21229, 21230, 21359], [21229, 21359, 21358], [21230, 21231, 21359], [21231, 21360, 21359], [21231, 21232, 21361], [21231, 21361, 21360], [21232, 21233, 21361], [21233, 21362, 21361], [21233, 21234, 21363], [21233, 21363, 21362], [21234, 21235, 21363], [21235, 21364, 21363], [21235, 21236, 21365], [21235, 21365, 21364], [21236, 21237, 21365], [21237, 21366, 21365], [21237, 21238, 21367], [21237, 21367, 21366], [21238, 21239, 21367], [21239, 21368, 21367], [21239, 21240, 21369], [21239, 21369, 21368], [21240, 21241, 21369], [21241, 21370, 21369], [21241, 21242, 21371], [21241, 21371, 21370], [21242, 21243, 21371], [21243, 21372, 21371], [21243, 21244, 21373], [21243, 21373, 21372], [21244, 21245, 21373], [21245, 21374, 21373], [21245, 21246, 21375], [21245, 21375, 21374], [21246, 21247, 21375], [21247, 21376, 21375], [21247, 21248, 21377], [21247, 21377, 21376], [21248, 21249, 21377], [21249, 21378, 21377], [21249, 21250, 21379], [21249, 21379, 21378], [21250, 21251, 21379], [21251, 21380, 21379], [21251, 21252, 21381], [21251, 21381, 21380], [21252, 21253, 21381], [21253, 21382, 21381], [21253, 21254, 21383], [21253, 21383, 21382], [21254, 21255, 21383], [21255, 21384, 21383], [21255, 21256, 21385], [21255, 21385, 21384], [21256, 21257, 21385], [21257, 21386, 21385], [21257, 21258, 21387], [21257, 21387, 21386], [21258, 21259, 21387], [21259, 21388, 21387], [21259, 21260, 21389], [21259, 21389, 21388], [21260, 21261, 21389], [21261, 21390, 21389], [21261, 21262, 21391], [21261, 21391, 21390], [21262, 21263, 21391], [21263, 21392, 21391], [21263, 21264, 21393], [21263, 21393, 21392], [21264, 21265, 21393], [21265, 21394, 21393], [21265, 21266, 21395], [21265, 21395, 21394], [21266, 21267, 21395], [21267, 21396, 21395], [21267, 21268, 21397], [21267, 21397, 21396], [21268, 21269, 21397], [21269, 21398, 21397], [21269, 21270, 21399], [21269, 21399, 21398], [21270, 21271, 21399], [21271, 21400, 21399], [21271, 21272, 21401], [21271, 21401, 21400], [21272, 21273, 21401], [21273, 21402, 21401], [21273, 21274, 21403], [21273, 21403, 21402], [21274, 21275, 21403], [21275, 21404, 21403], [21275, 21276, 21405], [21275, 21405, 21404], [21276, 21277, 21405], [21277, 21406, 21405], [21277, 21278, 21407], [21277, 21407, 21406], [21278, 21279, 21407], [21279, 21408, 21407], [21279, 21280, 21409], [21279, 21409, 21408], [21280, 21281, 21409], [21281, 21410, 21409], [21281, 21282, 21411], [21281, 21411, 21410], [21282, 21283, 21411], [21283, 21412, 21411], [21283, 21284, 21413], [21283, 21413, 21412], [21284, 21285, 21413], [21285, 21414, 21413], [21285, 21286, 21415], [21285, 21415, 21414], [21286, 21287, 21415], [21287, 21416, 21415], [21287, 21288, 21417], [21287, 21417, 21416], [21288, 21289, 21417], [21289, 21418, 21417], [21289, 21290, 21419], [21289, 21419, 21418], [21290, 21291, 21419], [21291, 21420, 21419], [21291, 21292, 21421], [21291, 21421, 21420], [21292, 21293, 21421], [21293, 21422, 21421], [21293, 21294, 21423], [21293, 21423, 21422], [21294, 21295, 21423], [21295, 21424, 21423], [21295, 21296, 21425], [21295, 21425, 21424], [21296, 21297, 21425], [21297, 21426, 21425], [21297, 21298, 21427], [21297, 21427, 21426], [21298, 21299, 21427], [21299, 21428, 21427], [21299, 21300, 21429], [21299, 21429, 21428], [21300, 21301, 21429], [21301, 21430, 21429], [21301, 21302, 21431], [21301, 21431, 21430], [21302, 21303, 21431], [21303, 21432, 21431], [21303, 21304, 21433], [21303, 21433, 21432], [21304, 21305, 21433], [21305, 21434, 21433], [21305, 21306, 21435], [21305, 21435, 21434], [21306, 21307, 21435], [21307, 21436, 21435], [21307, 21308, 21437], [21307, 21437, 21436], [21308, 21309, 21437], [21309, 21438, 21437], [21309, 21310, 21439], [21309, 21439, 21438], [21310, 21311, 21439], [21311, 21440, 21439], [21311, 21312, 21441], [21311, 21441, 21440], [21312, 21313, 21441], [21313, 21442, 21441], [21313, 21314, 21443], [21313, 21443, 21442], [21314, 21315, 21443], [21315, 21444, 21443], [21315, 21316, 21445], [21315, 21445, 21444], [21316, 21317, 21445], [21317, 21446, 21445], [21317, 21318, 21447], [21317, 21447, 21446], [21318, 21319, 21447], [21319, 21448, 21447], [21319, 21320, 21449], [21319, 21449, 21448], [21320, 21321, 21449], [21321, 21450, 21449], [21321, 21322, 21451], [21321, 21451, 21450], [21322, 21323, 21451], [21323, 21452, 21451], [21323, 21324, 21453], [21323, 21453, 21452], [21324, 21325, 21453], [21325, 21454, 21453], [21325, 21326, 21455], [21325, 21455, 21454], [21326, 21327, 21455], [21327, 21456, 21455], [21327, 21328, 21457], [21327, 21457, 21456], [21328, 21329, 21457], [21329, 21458, 21457], [21329, 21330, 21459], [21329, 21459, 21458], [21331, 21332, 21461], [21331, 21461, 21460], [21332, 21333, 21461], [21333, 21462, 21461], [21333, 21334, 21463], [21333, 21463, 21462], [21334, 21335, 21463], [21335, 21464, 21463], [21335, 21336, 21465], [21335, 21465, 21464], [21336, 21337, 21465], [21337, 21466, 21465], [21337, 21338, 21467], [21337, 21467, 21466], [21338, 21339, 21467], [21339, 21468, 21467], [21339, 21340, 21469], [21339, 21469, 21468], [21340, 21341, 21469], [21341, 21470, 21469], [21341, 21342, 21471], [21341, 21471, 21470], [21342, 21343, 21471], [21343, 21472, 21471], [21343, 21344, 21473], [21343, 21473, 21472], [21344, 21345, 21473], [21345, 21474, 21473], [21345, 21346, 21475], [21345, 21475, 21474], [21346, 21347, 21475], [21347, 21476, 21475], [21347, 21348, 21477], [21347, 21477, 21476], [21348, 21349, 21477], [21349, 21478, 21477], [21349, 21350, 21479], [21349, 21479, 21478], [21350, 21351, 21479], [21351, 21480, 21479], [21351, 21352, 21481], [21351, 21481, 21480], [21352, 21353, 21481], [21353, 21482, 21481], [21353, 21354, 21483], [21353, 21483, 21482], [21354, 21355, 21483], [21355, 21484, 21483], [21355, 21356, 21485], [21355, 21485, 21484], [21356, 21357, 21485], [21357, 21486, 21485], [21357, 21358, 21487], [21357, 21487, 21486], [21358, 21359, 21487], [21359, 21488, 21487], [21359, 21360, 21489], [21359, 21489, 21488], [21360, 21361, 21489], [21361, 21490, 21489], [21361, 21362, 21491], [21361, 21491, 21490], [21362, 21363, 21491], [21363, 21492, 21491], [21363, 21364, 21493], [21363, 21493, 21492], [21364, 21365, 21493], [21365, 21494, 21493], [21365, 21366, 21495], [21365, 21495, 21494], [21366, 21367, 21495], [21367, 21496, 21495], [21367, 21368, 21497], [21367, 21497, 21496], [21368, 21369, 21497], [21369, 21498, 21497], [21369, 21370, 21499], [21369, 21499, 21498], [21370, 21371, 21499], [21371, 21500, 21499], [21371, 21372, 21501], [21371, 21501, 21500], [21372, 21373, 21501], [21373, 21502, 21501], [21373, 21374, 21503], [21373, 21503, 21502], [21374, 21375, 21503], [21375, 21504, 21503], [21375, 21376, 21505], [21375, 21505, 21504], [21376, 21377, 21505], [21377, 21506, 21505], [21377, 21378, 21507], [21377, 21507, 21506], [21378, 21379, 21507], [21379, 21508, 21507], [21379, 21380, 21509], [21379, 21509, 21508], [21380, 21381, 21509], [21381, 21510, 21509], [21381, 21382, 21511], [21381, 21511, 21510], [21382, 21383, 21511], [21383, 21512, 21511], [21383, 21384, 21513], [21383, 21513, 21512], [21384, 21385, 21513], [21385, 21514, 21513], [21385, 21386, 21515], [21385, 21515, 21514], [21386, 21387, 21515], [21387, 21516, 21515], [21387, 21388, 21517], [21387, 21517, 21516], [21388, 21389, 21517], [21389, 21518, 21517], [21389, 21390, 21519], [21389, 21519, 21518], [21390, 21391, 21519], [21391, 21520, 21519], [21391, 21392, 21521], [21391, 21521, 21520], [21392, 21393, 21521], [21393, 21522, 21521], [21393, 21394, 21523], [21393, 21523, 21522], [21394, 21395, 21523], [21395, 21524, 21523], [21395, 21396, 21525], [21395, 21525, 21524], [21396, 21397, 21525], [21397, 21526, 21525], [21397, 21398, 21527], [21397, 21527, 21526], [21398, 21399, 21527], [21399, 21528, 21527], [21399, 21400, 21529], [21399, 21529, 21528], [21400, 21401, 21529], [21401, 21530, 21529], [21401, 21402, 21531], [21401, 21531, 21530], [21402, 21403, 21531], [21403, 21532, 21531], [21403, 21404, 21533], [21403, 21533, 21532], [21404, 21405, 21533], [21405, 21534, 21533], [21405, 21406, 21535], [21405, 21535, 21534], [21406, 21407, 21535], [21407, 21536, 21535], [21407, 21408, 21537], [21407, 21537, 21536], [21408, 21409, 21537], [21409, 21538, 21537], [21409, 21410, 21539], [21409, 21539, 21538], [21410, 21411, 21539], [21411, 21540, 21539], [21411, 21412, 21541], [21411, 21541, 21540], [21412, 21413, 21541], [21413, 21542, 21541], [21413, 21414, 21543], [21413, 21543, 21542], [21414, 21415, 21543], [21415, 21544, 21543], [21415, 21416, 21545], [21415, 21545, 21544], [21416, 21417, 21545], [21417, 21546, 21545], [21417, 21418, 21547], [21417, 21547, 21546], [21418, 21419, 21547], [21419, 21548, 21547], [21419, 21420, 21549], [21419, 21549, 21548], [21420, 21421, 21549], [21421, 21550, 21549], [21421, 21422, 21551], [21421, 21551, 21550], [21422, 21423, 21551], [21423, 21552, 21551], [21423, 21424, 21553], [21423, 21553, 21552], [21424, 21425, 21553], [21425, 21554, 21553], [21425, 21426, 21555], [21425, 21555, 21554], [21426, 21427, 21555], [21427, 21556, 21555], [21427, 21428, 21557], [21427, 21557, 21556], [21428, 21429, 21557], [21429, 21558, 21557], [21429, 21430, 21559], [21429, 21559, 21558], [21430, 21431, 21559], [21431, 21560, 21559], [21431, 21432, 21561], [21431, 21561, 21560], [21432, 21433, 21561], [21433, 21562, 21561], [21433, 21434, 21563], [21433, 21563, 21562], [21434, 21435, 21563], [21435, 21564, 21563], [21435, 21436, 21565], [21435, 21565, 21564], [21436, 21437, 21565], [21437, 21566, 21565], [21437, 21438, 21567], [21437, 21567, 21566], [21438, 21439, 21567], [21439, 21568, 21567], [21439, 21440, 21569], [21439, 21569, 21568], [21440, 21441, 21569], [21441, 21570, 21569], [21441, 21442, 21571], [21441, 21571, 21570], [21442, 21443, 21571], [21443, 21572, 21571], [21443, 21444, 21573], [21443, 21573, 21572], [21444, 21445, 21573], [21445, 21574, 21573], [21445, 21446, 21575], [21445, 21575, 21574], [21446, 21447, 21575], [21447, 21576, 21575], [21447, 21448, 21577], [21447, 21577, 21576], [21448, 21449, 21577], [21449, 21578, 21577], [21449, 21450, 21579], [21449, 21579, 21578], [21450, 21451, 21579], [21451, 21580, 21579], [21451, 21452, 21581], [21451, 21581, 21580], [21452, 21453, 21581], [21453, 21582, 21581], [21453, 21454, 21583], [21453, 21583, 21582], [21454, 21455, 21583], [21455, 21584, 21583], [21455, 21456, 21585], [21455, 21585, 21584], [21456, 21457, 21585], [21457, 21586, 21585], [21457, 21458, 21587], [21457, 21587, 21586], [21458, 21459, 21587], [21459, 21588, 21587], [21460, 21461, 21589], [21461, 21590, 21589], [21461, 21462, 21591], [21461, 21591, 21590], [21462, 21463, 21591], [21463, 21592, 21591], [21463, 21464, 21593], [21463, 21593, 21592], [21464, 21465, 21593], [21465, 21594, 21593], [21465, 21466, 21595], [21465, 21595, 21594], [21466, 21467, 21595], [21467, 21596, 21595], [21467, 21468, 21597], [21467, 21597, 21596], [21468, 21469, 21597], [21469, 21598, 21597], [21469, 21470, 21599], [21469, 21599, 21598], [21470, 21471, 21599], [21471, 21600, 21599], [21471, 21472, 21601], [21471, 21601, 21600], [21472, 21473, 21601], [21473, 21602, 21601], [21473, 21474, 21603], [21473, 21603, 21602], [21474, 21475, 21603], [21475, 21604, 21603], [21475, 21476, 21605], [21475, 21605, 21604], [21476, 21477, 21605], [21477, 21606, 21605], [21477, 21478, 21607], [21477, 21607, 21606], [21478, 21479, 21607], [21479, 21608, 21607], [21479, 21480, 21609], [21479, 21609, 21608], [21480, 21481, 21609], [21481, 21610, 21609], [21481, 21482, 21611], [21481, 21611, 21610], [21482, 21483, 21611], [21483, 21612, 21611], [21483, 21484, 21613], [21483, 21613, 21612], [21484, 21485, 21613], [21485, 21614, 21613], [21485, 21486, 21615], [21485, 21615, 21614], [21486, 21487, 21615], [21487, 21616, 21615], [21487, 21488, 21617], [21487, 21617, 21616], [21488, 21489, 21617], [21489, 21618, 21617], [21489, 21490, 21619], [21489, 21619, 21618], [21490, 21491, 21619], [21491, 21620, 21619], [21491, 21492, 21621], [21491, 21621, 21620], [21492, 21493, 21621], [21493, 21622, 21621], [21493, 21494, 21623], [21493, 21623, 21622], [21494, 21495, 21623], [21495, 21624, 21623], [21495, 21496, 21625], [21495, 21625, 21624], [21496, 21497, 21625], [21497, 21626, 21625], [21497, 21498, 21627], [21497, 21627, 21626], [21498, 21499, 21627], [21499, 21628, 21627], [21499, 21500, 21629], [21499, 21629, 21628], [21500, 21501, 21629], [21501, 21630, 21629], [21501, 21502, 21631], [21501, 21631, 21630], [21502, 21503, 21631], [21503, 21632, 21631], [21503, 21504, 21633], [21503, 21633, 21632], [21504, 21505, 21633], [21505, 21634, 21633], [21505, 21506, 21635], [21505, 21635, 21634], [21506, 21507, 21635], [21507, 21636, 21635], [21507, 21508, 21637], [21507, 21637, 21636], [21508, 21509, 21637], [21509, 21638, 21637], [21509, 21510, 21639], [21509, 21639, 21638], [21510, 21511, 21639], [21511, 21640, 21639], [21511, 21512, 21641], [21511, 21641, 21640], [21512, 21513, 21641], [21513, 21642, 21641], [21513, 21514, 21643], [21513, 21643, 21642], [21514, 21515, 21643], [21515, 21644, 21643], [21515, 21516, 21645], [21515, 21645, 21644], [21516, 21517, 21645], [21517, 21646, 21645], [21517, 21518, 21647], [21517, 21647, 21646], [21518, 21519, 21647], [21519, 21648, 21647], [21519, 21520, 21649], [21519, 21649, 21648], [21520, 21521, 21649], [21521, 21650, 21649], [21521, 21522, 21651], [21521, 21651, 21650], [21522, 21523, 21651], [21523, 21652, 21651], [21523, 21524, 21653], [21523, 21653, 21652], [21524, 21525, 21653], [21525, 21654, 21653], [21525, 21526, 21655], [21525, 21655, 21654], [21526, 21527, 21655], [21527, 21656, 21655], [21527, 21528, 21657], [21527, 21657, 21656], [21528, 21529, 21657], [21529, 21658, 21657], [21529, 21530, 21659], [21529, 21659, 21658], [21530, 21531, 21659], [21531, 21660, 21659], [21531, 21532, 21661], [21531, 21661, 21660], [21532, 21533, 21661], [21533, 21662, 21661], [21533, 21534, 21663], [21533, 21663, 21662], [21534, 21535, 21663], [21535, 21664, 21663], [21535, 21536, 21665], [21535, 21665, 21664], [21536, 21537, 21665], [21537, 21666, 21665], [21537, 21538, 21667], [21537, 21667, 21666], [21538, 21539, 21667], [21539, 21668, 21667], [21539, 21540, 21669], [21539, 21669, 21668], [21540, 21541, 21669], [21541, 21670, 21669], [21541, 21542, 21671], [21541, 21671, 21670], [21542, 21543, 21671], [21543, 21672, 21671], [21543, 21544, 21673], [21543, 21673, 21672], [21544, 21545, 21673], [21545, 21674, 21673], [21545, 21546, 21675], [21545, 21675, 21674], [21546, 21547, 21675], [21547, 21676, 21675], [21547, 21548, 21677], [21547, 21677, 21676], [21548, 21549, 21677], [21549, 21678, 21677], [21549, 21550, 21679], [21549, 21679, 21678], [21550, 21551, 21679], [21551, 21680, 21679], [21551, 21552, 21681], [21551, 21681, 21680], [21552, 21553, 21681], [21553, 21682, 21681], [21553, 21554, 21683], [21553, 21683, 21682], [21554, 21555, 21683], [21555, 21684, 21683], [21555, 21556, 21685], [21555, 21685, 21684], [21556, 21557, 21685], [21557, 21686, 21685], [21557, 21558, 21687], [21557, 21687, 21686], [21558, 21559, 21687], [21559, 21688, 21687], [21559, 21560, 21689], [21559, 21689, 21688], [21560, 21561, 21689], [21561, 21690, 21689], [21561, 21562, 21691], [21561, 21691, 21690], [21562, 21563, 21691], [21563, 21692, 21691], [21563, 21564, 21693], [21563, 21693, 21692], [21564, 21565, 21693], [21565, 21694, 21693], [21565, 21566, 21695], [21565, 21695, 21694], [21566, 21567, 21695], [21567, 21696, 21695], [21567, 21568, 21697], [21567, 21697, 21696], [21568, 21569, 21697], [21569, 21698, 21697], [21569, 21570, 21699], [21569, 21699, 21698], [21570, 21571, 21699], [21571, 21700, 21699], [21571, 21572, 21701], [21571, 21701, 21700], [21572, 21573, 21701], [21573, 21702, 21701], [21573, 21574, 21703], [21573, 21703, 21702], [21574, 21575, 21703], [21575, 21704, 21703], [21575, 21576, 21705], [21575, 21705, 21704], [21576, 21577, 21705], [21577, 21706, 21705], [21577, 21578, 21707], [21577, 21707, 21706], [21578, 21579, 21707], [21579, 21708, 21707], [21579, 21580, 21709], [21579, 21709, 21708], [21580, 21581, 21709], [21581, 21710, 21709], [21581, 21582, 21711], [21581, 21711, 21710], [21582, 21583, 21711], [21583, 21712, 21711], [21583, 21584, 21713], [21583, 21713, 21712], [21584, 21585, 21713], [21585, 21714, 21713], [21585, 21586, 21715], [21585, 21715, 21714], [21586, 21587, 21715], [21587, 21716, 21715], [21587, 21588, 21717], [21587, 21717, 21716], [21589, 21590, 21719], [21589, 21719, 21718], [21590, 21591, 21719], [21591, 21720, 21719], [21591, 21592, 21721], [21591, 21721, 21720], [21592, 21593, 21721], [21593, 21722, 21721], [21593, 21594, 21723], [21593, 21723, 21722], [21594, 21595, 21723], [21595, 21724, 21723], [21595, 21596, 21725], [21595, 21725, 21724], [21596, 21597, 21725], [21597, 21726, 21725], [21597, 21598, 21727], [21597, 21727, 21726], [21598, 21599, 21727], [21599, 21728, 21727], [21599, 21600, 21729], [21599, 21729, 21728], [21600, 21601, 21729], [21601, 21730, 21729], [21601, 21602, 21731], [21601, 21731, 21730], [21602, 21603, 21731], [21603, 21732, 21731], [21603, 21604, 21733], [21603, 21733, 21732], [21604, 21605, 21733], [21605, 21734, 21733], [21605, 21606, 21735], [21605, 21735, 21734], [21606, 21607, 21735], [21607, 21736, 21735], [21607, 21608, 21737], [21607, 21737, 21736], [21608, 21609, 21737], [21609, 21738, 21737], [21609, 21610, 21739], [21609, 21739, 21738], [21610, 21611, 21739], [21611, 21740, 21739], [21611, 21612, 21741], [21611, 21741, 21740], [21612, 21613, 21741], [21613, 21742, 21741], [21613, 21614, 21743], [21613, 21743, 21742], [21614, 21615, 21743], [21615, 21744, 21743], [21615, 21616, 21745], [21615, 21745, 21744], [21616, 21617, 21745], [21617, 21746, 21745], [21617, 21618, 21747], [21617, 21747, 21746], [21618, 21619, 21747], [21619, 21748, 21747], [21619, 21620, 21749], [21619, 21749, 21748], [21620, 21621, 21749], [21621, 21750, 21749], [21621, 21622, 21751], [21621, 21751, 21750], [21622, 21623, 21751], [21623, 21752, 21751], [21623, 21624, 21753], [21623, 21753, 21752], [21624, 21625, 21753], [21625, 21754, 21753], [21625, 21626, 21755], [21625, 21755, 21754], [21626, 21627, 21755], [21627, 21756, 21755], [21627, 21628, 21757], [21627, 21757, 21756], [21628, 21629, 21757], [21629, 21758, 21757], [21629, 21630, 21759], [21629, 21759, 21758], [21630, 21631, 21759], [21631, 21760, 21759], [21631, 21632, 21761], [21631, 21761, 21760], [21632, 21633, 21761], [21633, 21762, 21761], [21633, 21634, 21763], [21633, 21763, 21762], [21634, 21635, 21763], [21635, 21764, 21763], [21635, 21636, 21765], [21635, 21765, 21764], [21636, 21637, 21765], [21637, 21766, 21765], [21637, 21638, 21767], [21637, 21767, 21766], [21638, 21639, 21767], [21639, 21768, 21767], [21639, 21640, 21769], [21639, 21769, 21768], [21640, 21641, 21769], [21641, 21770, 21769], [21641, 21642, 21771], [21641, 21771, 21770], [21642, 21643, 21771], [21643, 21772, 21771], [21643, 21644, 21773], [21643, 21773, 21772], [21644, 21645, 21773], [21645, 21774, 21773], [21645, 21646, 21775], [21645, 21775, 21774], [21646, 21647, 21775], [21647, 21776, 21775], [21647, 21648, 21777], [21647, 21777, 21776], [21648, 21649, 21777], [21649, 21778, 21777], [21649, 21650, 21779], [21649, 21779, 21778], [21650, 21651, 21779], [21651, 21780, 21779], [21651, 21652, 21781], [21651, 21781, 21780], [21652, 21653, 21781], [21653, 21782, 21781], [21653, 21654, 21783], [21653, 21783, 21782], [21654, 21655, 21783], [21655, 21784, 21783], [21655, 21656, 21785], [21655, 21785, 21784], [21656, 21657, 21785], [21657, 21786, 21785], [21657, 21658, 21787], [21657, 21787, 21786], [21658, 21659, 21787], [21659, 21788, 21787], [21659, 21660, 21789], [21659, 21789, 21788], [21660, 21661, 21789], [21661, 21790, 21789], [21661, 21662, 21791], [21661, 21791, 21790], [21662, 21663, 21791], [21663, 21792, 21791], [21663, 21664, 21793], [21663, 21793, 21792], [21664, 21665, 21793], [21665, 21794, 21793], [21665, 21666, 21795], [21665, 21795, 21794], [21666, 21667, 21795], [21667, 21796, 21795], [21667, 21668, 21797], [21667, 21797, 21796], [21668, 21669, 21797], [21669, 21798, 21797], [21669, 21670, 21799], [21669, 21799, 21798], [21670, 21671, 21799], [21671, 21800, 21799], [21671, 21672, 21801], [21671, 21801, 21800], [21672, 21673, 21801], [21673, 21802, 21801], [21673, 21674, 21803], [21673, 21803, 21802], [21674, 21675, 21803], [21675, 21804, 21803], [21675, 21676, 21805], [21675, 21805, 21804], [21676, 21677, 21805], [21677, 21806, 21805], [21677, 21678, 21807], [21677, 21807, 21806], [21678, 21679, 21807], [21679, 21808, 21807], [21679, 21680, 21809], [21679, 21809, 21808], [21680, 21681, 21809], [21681, 21810, 21809], [21681, 21682, 21811], [21681, 21811, 21810], [21682, 21683, 21811], [21683, 21812, 21811], [21683, 21684, 21813], [21683, 21813, 21812], [21684, 21685, 21813], [21685, 21814, 21813], [21685, 21686, 21815], [21685, 21815, 21814], [21686, 21687, 21815], [21687, 21816, 21815], [21687, 21688, 21817], [21687, 21817, 21816], [21688, 21689, 21817], [21689, 21818, 21817], [21689, 21690, 21819], [21689, 21819, 21818], [21690, 21691, 21819], [21691, 21820, 21819], [21691, 21692, 21821], [21691, 21821, 21820], [21692, 21693, 21821], [21693, 21822, 21821], [21693, 21694, 21823], [21693, 21823, 21822], [21694, 21695, 21823], [21695, 21824, 21823], [21695, 21696, 21825], [21695, 21825, 21824], [21696, 21697, 21825], [21697, 21826, 21825], [21697, 21698, 21827], [21697, 21827, 21826], [21698, 21699, 21827], [21699, 21828, 21827], [21699, 21700, 21829], [21699, 21829, 21828], [21700, 21701, 21829], [21701, 21830, 21829], [21701, 21702, 21831], [21701, 21831, 21830], [21702, 21703, 21831], [21703, 21832, 21831], [21703, 21704, 21833], [21703, 21833, 21832], [21704, 21705, 21833], [21705, 21834, 21833], [21705, 21706, 21835], [21705, 21835, 21834], [21706, 21707, 21835], [21707, 21836, 21835], [21707, 21708, 21837], [21707, 21837, 21836], [21708, 21709, 21837], [21709, 21838, 21837], [21709, 21710, 21839], [21709, 21839, 21838], [21710, 21711, 21839], [21711, 21840, 21839], [21711, 21712, 21841], [21711, 21841, 21840], [21712, 21713, 21841], [21713, 21842, 21841], [21713, 21714, 21843], [21713, 21843, 21842], [21714, 21715, 21843], [21715, 21844, 21843], [21715, 21716, 21845], [21715, 21845, 21844], [21716, 21717, 21845], [21717, 21846, 21845], [21718, 21719, 21847], [21719, 21848, 21847], [21719, 21720, 21849], [21719, 21849, 21848], [21720, 21721, 21849], [21721, 21850, 21849], [21721, 21722, 21851], [21721, 21851, 21850], [21722, 21723, 21851], [21723, 21852, 21851], [21723, 21724, 21853], [21723, 21853, 21852], [21724, 21725, 21853], [21725, 21854, 21853], [21725, 21726, 21855], [21725, 21855, 21854], [21726, 21727, 21855], [21727, 21856, 21855], [21727, 21728, 21857], [21727, 21857, 21856], [21728, 21729, 21857], [21729, 21858, 21857], [21729, 21730, 21859], [21729, 21859, 21858], [21730, 21731, 21859], [21731, 21860, 21859], [21731, 21732, 21861], [21731, 21861, 21860], [21732, 21733, 21861], [21733, 21862, 21861], [21733, 21734, 21863], [21733, 21863, 21862], [21734, 21735, 21863], [21735, 21864, 21863], [21735, 21736, 21865], [21735, 21865, 21864], [21736, 21737, 21865], [21737, 21866, 21865], [21737, 21738, 21867], [21737, 21867, 21866], [21738, 21739, 21867], [21739, 21868, 21867], [21739, 21740, 21869], [21739, 21869, 21868], [21740, 21741, 21869], [21741, 21870, 21869], [21741, 21742, 21871], [21741, 21871, 21870], [21742, 21743, 21871], [21743, 21872, 21871], [21743, 21744, 21873], [21743, 21873, 21872], [21744, 21745, 21873], [21745, 21874, 21873], [21745, 21746, 21875], [21745, 21875, 21874], [21746, 21747, 21875], [21747, 21876, 21875], [21747, 21748, 21877], [21747, 21877, 21876], [21748, 21749, 21877], [21749, 21878, 21877], [21749, 21750, 21879], [21749, 21879, 21878], [21750, 21751, 21879], [21751, 21880, 21879], [21751, 21752, 21881], [21751, 21881, 21880], [21752, 21753, 21881], [21753, 21882, 21881], [21753, 21754, 21883], [21753, 21883, 21882], [21754, 21755, 21883], [21755, 21884, 21883], [21755, 21756, 21885], [21755, 21885, 21884], [21756, 21757, 21885], [21757, 21886, 21885], [21757, 21758, 21887], [21757, 21887, 21886], [21758, 21759, 21887], [21759, 21888, 21887], [21759, 21760, 21889], [21759, 21889, 21888], [21760, 21761, 21889], [21761, 21890, 21889], [21761, 21762, 21891], [21761, 21891, 21890], [21762, 21763, 21891], [21763, 21892, 21891], [21763, 21764, 21893], [21763, 21893, 21892], [21764, 21765, 21893], [21765, 21894, 21893], [21765, 21766, 21895], [21765, 21895, 21894], [21766, 21767, 21895], [21767, 21896, 21895], [21767, 21768, 21897], [21767, 21897, 21896], [21768, 21769, 21897], [21769, 21898, 21897], [21769, 21770, 21899], [21769, 21899, 21898], [21770, 21771, 21899], [21771, 21900, 21899], [21771, 21772, 21901], [21771, 21901, 21900], [21772, 21773, 21901], [21773, 21902, 21901], [21773, 21774, 21903], [21773, 21903, 21902], [21774, 21775, 21903], [21775, 21904, 21903], [21775, 21776, 21905], [21775, 21905, 21904], [21776, 21777, 21905], [21777, 21906, 21905], [21777, 21778, 21907], [21777, 21907, 21906], [21778, 21779, 21907], [21779, 21908, 21907], [21779, 21780, 21909], [21779, 21909, 21908], [21780, 21781, 21909], [21781, 21910, 21909], [21781, 21782, 21911], [21781, 21911, 21910], [21782, 21783, 21911], [21783, 21912, 21911], [21783, 21784, 21913], [21783, 21913, 21912], [21784, 21785, 21913], [21785, 21914, 21913], [21785, 21786, 21915], [21785, 21915, 21914], [21786, 21787, 21915], [21787, 21916, 21915], [21787, 21788, 21917], [21787, 21917, 21916], [21788, 21789, 21917], [21789, 21918, 21917], [21789, 21790, 21919], [21789, 21919, 21918], [21790, 21791, 21919], [21791, 21920, 21919], [21791, 21792, 21921], [21791, 21921, 21920], [21792, 21793, 21921], [21793, 21922, 21921], [21793, 21794, 21923], [21793, 21923, 21922], [21794, 21795, 21923], [21795, 21924, 21923], [21795, 21796, 21925], [21795, 21925, 21924], [21796, 21797, 21925], [21797, 21926, 21925], [21797, 21798, 21927], [21797, 21927, 21926], [21798, 21799, 21927], [21799, 21928, 21927], [21799, 21800, 21929], [21799, 21929, 21928], [21800, 21801, 21929], [21801, 21930, 21929], [21801, 21802, 21931], [21801, 21931, 21930], [21802, 21803, 21931], [21803, 21932, 21931], [21803, 21804, 21933], [21803, 21933, 21932], [21804, 21805, 21933], [21805, 21934, 21933], [21805, 21806, 21935], [21805, 21935, 21934], [21806, 21807, 21935], [21807, 21936, 21935], [21807, 21808, 21937], [21807, 21937, 21936], [21808, 21809, 21937], [21809, 21938, 21937], [21809, 21810, 21939], [21809, 21939, 21938], [21810, 21811, 21939], [21811, 21940, 21939], [21811, 21812, 21941], [21811, 21941, 21940], [21812, 21813, 21941], [21813, 21942, 21941], [21813, 21814, 21943], [21813, 21943, 21942], [21814, 21815, 21943], [21815, 21944, 21943], [21815, 21816, 21945], [21815, 21945, 21944], [21816, 21817, 21945], [21817, 21946, 21945], [21817, 21818, 21947], [21817, 21947, 21946], [21818, 21819, 21947], [21819, 21948, 21947], [21819, 21820, 21949], [21819, 21949, 21948], [21820, 21821, 21949], [21821, 21950, 21949], [21821, 21822, 21951], [21821, 21951, 21950], [21822, 21823, 21951], [21823, 21952, 21951], [21823, 21824, 21953], [21823, 21953, 21952], [21824, 21825, 21953], [21825, 21954, 21953], [21825, 21826, 21955], [21825, 21955, 21954], [21826, 21827, 21955], [21827, 21956, 21955], [21827, 21828, 21957], [21827, 21957, 21956], [21828, 21829, 21957], [21829, 21958, 21957], [21829, 21830, 21959], [21829, 21959, 21958], [21830, 21831, 21959], [21831, 21960, 21959], [21831, 21832, 21961], [21831, 21961, 21960], [21832, 21833, 21961], [21833, 21962, 21961], [21833, 21834, 21963], [21833, 21963, 21962], [21834, 21835, 21963], [21835, 21964, 21963], [21835, 21836, 21965], [21835, 21965, 21964], [21836, 21837, 21965], [21837, 21966, 21965], [21837, 21838, 21967], [21837, 21967, 21966], [21838, 21839, 21967], [21839, 21968, 21967], [21839, 21840, 21969], [21839, 21969, 21968], [21840, 21841, 21969], [21841, 21970, 21969], [21841, 21842, 21971], [21841, 21971, 21970], [21842, 21843, 21971], [21843, 21972, 21971], [21843, 21844, 21973], [21843, 21973, 21972], [21844, 21845, 21973], [21845, 21974, 21973], [21845, 21846, 21975], [21845, 21975, 21974], [21847, 21848, 21977], [21847, 21977, 21976], [21848, 21849, 21977], [21849, 21978, 21977], [21849, 21850, 21979], [21849, 21979, 21978], [21850, 21851, 21979], [21851, 21980, 21979], [21851, 21852, 21981], [21851, 21981, 21980], [21852, 21853, 21981], [21853, 21982, 21981], [21853, 21854, 21983], [21853, 21983, 21982], [21854, 21855, 21983], [21855, 21984, 21983], [21855, 21856, 21985], [21855, 21985, 21984], [21856, 21857, 21985], [21857, 21986, 21985], [21857, 21858, 21987], [21857, 21987, 21986], [21858, 21859, 21987], [21859, 21988, 21987], [21859, 21860, 21989], [21859, 21989, 21988], [21860, 21861, 21989], [21861, 21990, 21989], [21861, 21862, 21991], [21861, 21991, 21990], [21862, 21863, 21991], [21863, 21992, 21991], [21863, 21864, 21993], [21863, 21993, 21992], [21864, 21865, 21993], [21865, 21994, 21993], [21865, 21866, 21995], [21865, 21995, 21994], [21866, 21867, 21995], [21867, 21996, 21995], [21867, 21868, 21997], [21867, 21997, 21996], [21868, 21869, 21997], [21869, 21998, 21997], [21869, 21870, 21999], [21869, 21999, 21998], [21870, 21871, 21999], [21871, 22000, 21999], [21871, 21872, 22001], [21871, 22001, 22000], [21872, 21873, 22001], [21873, 22002, 22001], [21873, 21874, 22003], [21873, 22003, 22002], [21874, 21875, 22003], [21875, 22004, 22003], [21875, 21876, 22005], [21875, 22005, 22004], [21876, 21877, 22005], [21877, 22006, 22005], [21877, 21878, 22007], [21877, 22007, 22006], [21878, 21879, 22007], [21879, 22008, 22007], [21879, 21880, 22009], [21879, 22009, 22008], [21880, 21881, 22009], [21881, 22010, 22009], [21881, 21882, 22011], [21881, 22011, 22010], [21882, 21883, 22011], [21883, 22012, 22011], [21883, 21884, 22013], [21883, 22013, 22012], [21884, 21885, 22013], [21885, 22014, 22013], [21885, 21886, 22015], [21885, 22015, 22014], [21886, 21887, 22015], [21887, 22016, 22015], [21887, 21888, 22017], [21887, 22017, 22016], [21888, 21889, 22017], [21889, 22018, 22017], [21889, 21890, 22019], [21889, 22019, 22018], [21890, 21891, 22019], [21891, 22020, 22019], [21891, 21892, 22021], [21891, 22021, 22020], [21892, 21893, 22021], [21893, 22022, 22021], [21893, 21894, 22023], [21893, 22023, 22022], [21894, 21895, 22023], [21895, 22024, 22023], [21895, 21896, 22025], [21895, 22025, 22024], [21896, 21897, 22025], [21897, 22026, 22025], [21897, 21898, 22027], [21897, 22027, 22026], [21898, 21899, 22027], [21899, 22028, 22027], [21899, 21900, 22029], [21899, 22029, 22028], [21900, 21901, 22029], [21901, 22030, 22029], [21901, 21902, 22031], [21901, 22031, 22030], [21902, 21903, 22031], [21903, 22032, 22031], [21903, 21904, 22033], [21903, 22033, 22032], [21904, 21905, 22033], [21905, 22034, 22033], [21905, 21906, 22035], [21905, 22035, 22034], [21906, 21907, 22035], [21907, 22036, 22035], [21907, 21908, 22037], [21907, 22037, 22036], [21908, 21909, 22037], [21909, 22038, 22037], [21909, 21910, 22039], [21909, 22039, 22038], [21910, 21911, 22039], [21911, 22040, 22039], [21911, 21912, 22041], [21911, 22041, 22040], [21912, 21913, 22041], [21913, 22042, 22041], [21913, 21914, 22043], [21913, 22043, 22042], [21914, 21915, 22043], [21915, 22044, 22043], [21915, 21916, 22045], [21915, 22045, 22044], [21916, 21917, 22045], [21917, 22046, 22045], [21917, 21918, 22047], [21917, 22047, 22046], [21918, 21919, 22047], [21919, 22048, 22047], [21919, 21920, 22049], [21919, 22049, 22048], [21920, 21921, 22049], [21921, 22050, 22049], [21921, 21922, 22051], [21921, 22051, 22050], [21922, 21923, 22051], [21923, 22052, 22051], [21923, 21924, 22053], [21923, 22053, 22052], [21924, 21925, 22053], [21925, 22054, 22053], [21925, 21926, 22055], [21925, 22055, 22054], [21926, 21927, 22055], [21927, 22056, 22055], [21927, 21928, 22057], [21927, 22057, 22056], [21928, 21929, 22057], [21929, 22058, 22057], [21929, 21930, 22059], [21929, 22059, 22058], [21930, 21931, 22059], [21931, 22060, 22059], [21931, 21932, 22061], [21931, 22061, 22060], [21932, 21933, 22061], [21933, 22062, 22061], [21933, 21934, 22063], [21933, 22063, 22062], [21934, 21935, 22063], [21935, 22064, 22063], [21935, 21936, 22065], [21935, 22065, 22064], [21936, 21937, 22065], [21937, 22066, 22065], [21937, 21938, 22067], [21937, 22067, 22066], [21938, 21939, 22067], [21939, 22068, 22067], [21939, 21940, 22069], [21939, 22069, 22068], [21940, 21941, 22069], [21941, 22070, 22069], [21941, 21942, 22071], [21941, 22071, 22070], [21942, 21943, 22071], [21943, 22072, 22071], [21943, 21944, 22073], [21943, 22073, 22072], [21944, 21945, 22073], [21945, 22074, 22073], [21945, 21946, 22075], [21945, 22075, 22074], [21946, 21947, 22075], [21947, 22076, 22075], [21947, 21948, 22077], [21947, 22077, 22076], [21948, 21949, 22077], [21949, 22078, 22077], [21949, 21950, 22079], [21949, 22079, 22078], [21950, 21951, 22079], [21951, 22080, 22079], [21951, 21952, 22081], [21951, 22081, 22080], [21952, 21953, 22081], [21953, 22082, 22081], [21953, 21954, 22083], [21953, 22083, 22082], [21954, 21955, 22083], [21955, 22084, 22083], [21955, 21956, 22085], [21955, 22085, 22084], [21956, 21957, 22085], [21957, 22086, 22085], [21957, 21958, 22087], [21957, 22087, 22086], [21958, 21959, 22087], [21959, 22088, 22087], [21959, 21960, 22089], [21959, 22089, 22088], [21960, 21961, 22089], [21961, 22090, 22089], [21961, 21962, 22091], [21961, 22091, 22090], [21962, 21963, 22091], [21963, 22092, 22091], [21963, 21964, 22093], [21963, 22093, 22092], [21964, 21965, 22093], [21965, 22094, 22093], [21965, 21966, 22095], [21965, 22095, 22094], [21966, 21967, 22095], [21967, 22096, 22095], [21967, 21968, 22097], [21967, 22097, 22096], [21968, 21969, 22097], [21969, 22098, 22097], [21969, 21970, 22099], [21969, 22099, 22098], [21970, 21971, 22099], [21971, 22100, 22099], [21971, 21972, 22101], [21971, 22101, 22100], [21972, 21973, 22101], [21973, 22102, 22101], [21973, 21974, 22103], [21973, 22103, 22102], [21974, 21975, 22103], [21975, 22104, 22103], [21976, 21977, 22105], [21977, 22106, 22105], [21977, 21978, 22107], [21977, 22107, 22106], [21978, 21979, 22107], [21979, 22108, 22107], [21979, 21980, 22109], [21979, 22109, 22108], [21980, 21981, 22109], [21981, 22110, 22109], [21981, 21982, 22111], [21981, 22111, 22110], [21982, 21983, 22111], [21983, 22112, 22111], [21983, 21984, 22113], [21983, 22113, 22112], [21984, 21985, 22113], [21985, 22114, 22113], [21985, 21986, 22115], [21985, 22115, 22114], [21986, 21987, 22115], [21987, 22116, 22115], [21987, 21988, 22117], [21987, 22117, 22116], [21988, 21989, 22117], [21989, 22118, 22117], [21989, 21990, 22119], [21989, 22119, 22118], [21990, 21991, 22119], [21991, 22120, 22119], [21991, 21992, 22121], [21991, 22121, 22120], [21992, 21993, 22121], [21993, 22122, 22121], [21993, 21994, 22123], [21993, 22123, 22122], [21994, 21995, 22123], [21995, 22124, 22123], [21995, 21996, 22125], [21995, 22125, 22124], [21996, 21997, 22125], [21997, 22126, 22125], [21997, 21998, 22127], [21997, 22127, 22126], [21998, 21999, 22127], [21999, 22128, 22127], [21999, 22000, 22129], [21999, 22129, 22128], [22000, 22001, 22129], [22001, 22130, 22129], [22001, 22002, 22131], [22001, 22131, 22130], [22002, 22003, 22131], [22003, 22132, 22131], [22003, 22004, 22133], [22003, 22133, 22132], [22004, 22005, 22133], [22005, 22134, 22133], [22005, 22006, 22135], [22005, 22135, 22134], [22006, 22007, 22135], [22007, 22136, 22135], [22007, 22008, 22137], [22007, 22137, 22136], [22008, 22009, 22137], [22009, 22138, 22137], [22009, 22010, 22139], [22009, 22139, 22138], [22010, 22011, 22139], [22011, 22140, 22139], [22011, 22012, 22141], [22011, 22141, 22140], [22012, 22013, 22141], [22013, 22142, 22141], [22013, 22014, 22143], [22013, 22143, 22142], [22014, 22015, 22143], [22015, 22144, 22143], [22015, 22016, 22145], [22015, 22145, 22144], [22016, 22017, 22145], [22017, 22146, 22145], [22017, 22018, 22147], [22017, 22147, 22146], [22018, 22019, 22147], [22019, 22148, 22147], [22019, 22020, 22149], [22019, 22149, 22148], [22020, 22021, 22149], [22021, 22150, 22149], [22021, 22022, 22151], [22021, 22151, 22150], [22022, 22023, 22151], [22023, 22152, 22151], [22023, 22024, 22153], [22023, 22153, 22152], [22024, 22025, 22153], [22025, 22154, 22153], [22025, 22026, 22155], [22025, 22155, 22154], [22026, 22027, 22155], [22027, 22156, 22155], [22027, 22028, 22157], [22027, 22157, 22156], [22028, 22029, 22157], [22029, 22158, 22157], [22029, 22030, 22159], [22029, 22159, 22158], [22030, 22031, 22159], [22031, 22160, 22159], [22031, 22032, 22161], [22031, 22161, 22160], [22032, 22033, 22161], [22033, 22162, 22161], [22033, 22034, 22163], [22033, 22163, 22162], [22034, 22035, 22163], [22035, 22164, 22163], [22035, 22036, 22165], [22035, 22165, 22164], [22036, 22037, 22165], [22037, 22166, 22165], [22037, 22038, 22167], [22037, 22167, 22166], [22038, 22039, 22167], [22039, 22168, 22167], [22039, 22040, 22169], [22039, 22169, 22168], [22040, 22041, 22169], [22041, 22170, 22169], [22041, 22042, 22171], [22041, 22171, 22170], [22042, 22043, 22171], [22043, 22172, 22171], [22043, 22044, 22173], [22043, 22173, 22172], [22044, 22045, 22173], [22045, 22174, 22173], [22045, 22046, 22175], [22045, 22175, 22174], [22046, 22047, 22175], [22047, 22176, 22175], [22047, 22048, 22177], [22047, 22177, 22176], [22048, 22049, 22177], [22049, 22178, 22177], [22049, 22050, 22179], [22049, 22179, 22178], [22050, 22051, 22179], [22051, 22180, 22179], [22051, 22052, 22181], [22051, 22181, 22180], [22052, 22053, 22181], [22053, 22182, 22181], [22053, 22054, 22183], [22053, 22183, 22182], [22054, 22055, 22183], [22055, 22184, 22183], [22055, 22056, 22185], [22055, 22185, 22184], [22056, 22057, 22185], [22057, 22186, 22185], [22057, 22058, 22187], [22057, 22187, 22186], [22058, 22059, 22187], [22059, 22188, 22187], [22059, 22060, 22189], [22059, 22189, 22188], [22060, 22061, 22189], [22061, 22190, 22189], [22061, 22062, 22191], [22061, 22191, 22190], [22062, 22063, 22191], [22063, 22192, 22191], [22063, 22064, 22193], [22063, 22193, 22192], [22064, 22065, 22193], [22065, 22194, 22193], [22065, 22066, 22195], [22065, 22195, 22194], [22066, 22067, 22195], [22067, 22196, 22195], [22067, 22068, 22197], [22067, 22197, 22196], [22068, 22069, 22197], [22069, 22198, 22197], [22069, 22070, 22199], [22069, 22199, 22198], [22070, 22071, 22199], [22071, 22200, 22199], [22071, 22072, 22201], [22071, 22201, 22200], [22072, 22073, 22201], [22073, 22202, 22201], [22073, 22074, 22203], [22073, 22203, 22202], [22074, 22075, 22203], [22075, 22204, 22203], [22075, 22076, 22205], [22075, 22205, 22204], [22076, 22077, 22205], [22077, 22206, 22205], [22077, 22078, 22207], [22077, 22207, 22206], [22078, 22079, 22207], [22079, 22208, 22207], [22079, 22080, 22209], [22079, 22209, 22208], [22080, 22081, 22209], [22081, 22210, 22209], [22081, 22082, 22211], [22081, 22211, 22210], [22082, 22083, 22211], [22083, 22212, 22211], [22083, 22084, 22213], [22083, 22213, 22212], [22084, 22085, 22213], [22085, 22214, 22213], [22085, 22086, 22215], [22085, 22215, 22214], [22086, 22087, 22215], [22087, 22216, 22215], [22087, 22088, 22217], [22087, 22217, 22216], [22088, 22089, 22217], [22089, 22218, 22217], [22089, 22090, 22219], [22089, 22219, 22218], [22090, 22091, 22219], [22091, 22220, 22219], [22091, 22092, 22221], [22091, 22221, 22220], [22092, 22093, 22221], [22093, 22222, 22221], [22093, 22094, 22223], [22093, 22223, 22222], [22094, 22095, 22223], [22095, 22224, 22223], [22095, 22096, 22225], [22095, 22225, 22224], [22096, 22097, 22225], [22097, 22226, 22225], [22097, 22098, 22227], [22097, 22227, 22226], [22098, 22099, 22227], [22099, 22228, 22227], [22099, 22100, 22229], [22099, 22229, 22228], [22100, 22101, 22229], [22101, 22230, 22229], [22101, 22102, 22231], [22101, 22231, 22230], [22102, 22103, 22231], [22103, 22232, 22231], [22103, 22104, 22233], [22103, 22233, 22232], [22105, 22106, 22235], [22105, 22235, 22234], [22106, 22107, 22235], [22107, 22236, 22235], [22107, 22108, 22237], [22107, 22237, 22236], [22108, 22109, 22237], [22109, 22238, 22237], [22109, 22110, 22239], [22109, 22239, 22238], [22110, 22111, 22239], [22111, 22240, 22239], [22111, 22112, 22241], [22111, 22241, 22240], [22112, 22113, 22241], [22113, 22242, 22241], [22113, 22114, 22243], [22113, 22243, 22242], [22114, 22115, 22243], [22115, 22244, 22243], [22115, 22116, 22245], [22115, 22245, 22244], [22116, 22117, 22245], [22117, 22246, 22245], [22117, 22118, 22247], [22117, 22247, 22246], [22118, 22119, 22247], [22119, 22248, 22247], [22119, 22120, 22249], [22119, 22249, 22248], [22120, 22121, 22249], [22121, 22250, 22249], [22121, 22122, 22251], [22121, 22251, 22250], [22122, 22123, 22251], [22123, 22252, 22251], [22123, 22124, 22253], [22123, 22253, 22252], [22124, 22125, 22253], [22125, 22254, 22253], [22125, 22126, 22255], [22125, 22255, 22254], [22126, 22127, 22255], [22127, 22256, 22255], [22127, 22128, 22257], [22127, 22257, 22256], [22128, 22129, 22257], [22129, 22258, 22257], [22129, 22130, 22259], [22129, 22259, 22258], [22130, 22131, 22259], [22131, 22260, 22259], [22131, 22132, 22261], [22131, 22261, 22260], [22132, 22133, 22261], [22133, 22262, 22261], [22133, 22134, 22263], [22133, 22263, 22262], [22134, 22135, 22263], [22135, 22264, 22263], [22135, 22136, 22265], [22135, 22265, 22264], [22136, 22137, 22265], [22137, 22266, 22265], [22137, 22138, 22267], [22137, 22267, 22266], [22138, 22139, 22267], [22139, 22268, 22267], [22139, 22140, 22269], [22139, 22269, 22268], [22140, 22141, 22269], [22141, 22270, 22269], [22141, 22142, 22271], [22141, 22271, 22270], [22142, 22143, 22271], [22143, 22272, 22271], [22143, 22144, 22273], [22143, 22273, 22272], [22144, 22145, 22273], [22145, 22274, 22273], [22145, 22146, 22275], [22145, 22275, 22274], [22146, 22147, 22275], [22147, 22276, 22275], [22147, 22148, 22277], [22147, 22277, 22276], [22148, 22149, 22277], [22149, 22278, 22277], [22149, 22150, 22279], [22149, 22279, 22278], [22150, 22151, 22279], [22151, 22280, 22279], [22151, 22152, 22281], [22151, 22281, 22280], [22152, 22153, 22281], [22153, 22282, 22281], [22153, 22154, 22283], [22153, 22283, 22282], [22154, 22155, 22283], [22155, 22284, 22283], [22155, 22156, 22285], [22155, 22285, 22284], [22156, 22157, 22285], [22157, 22286, 22285], [22157, 22158, 22287], [22157, 22287, 22286], [22158, 22159, 22287], [22159, 22288, 22287], [22159, 22160, 22289], [22159, 22289, 22288], [22160, 22161, 22289], [22161, 22290, 22289], [22161, 22162, 22291], [22161, 22291, 22290], [22162, 22163, 22291], [22163, 22292, 22291], [22163, 22164, 22293], [22163, 22293, 22292], [22164, 22165, 22293], [22165, 22294, 22293], [22165, 22166, 22295], [22165, 22295, 22294], [22166, 22167, 22295], [22167, 22296, 22295], [22167, 22168, 22297], [22167, 22297, 22296], [22168, 22169, 22297], [22169, 22298, 22297], [22169, 22170, 22299], [22169, 22299, 22298], [22170, 22171, 22299], [22171, 22300, 22299], [22171, 22172, 22301], [22171, 22301, 22300], [22172, 22173, 22301], [22173, 22302, 22301], [22173, 22174, 22303], [22173, 22303, 22302], [22174, 22175, 22303], [22175, 22304, 22303], [22175, 22176, 22305], [22175, 22305, 22304], [22176, 22177, 22305], [22177, 22306, 22305], [22177, 22178, 22307], [22177, 22307, 22306], [22178, 22179, 22307], [22179, 22308, 22307], [22179, 22180, 22309], [22179, 22309, 22308], [22180, 22181, 22309], [22181, 22310, 22309], [22181, 22182, 22311], [22181, 22311, 22310], [22182, 22183, 22311], [22183, 22312, 22311], [22183, 22184, 22313], [22183, 22313, 22312], [22184, 22185, 22313], [22185, 22314, 22313], [22185, 22186, 22315], [22185, 22315, 22314], [22186, 22187, 22315], [22187, 22316, 22315], [22187, 22188, 22317], [22187, 22317, 22316], [22188, 22189, 22317], [22189, 22318, 22317], [22189, 22190, 22319], [22189, 22319, 22318], [22190, 22191, 22319], [22191, 22320, 22319], [22191, 22192, 22321], [22191, 22321, 22320], [22192, 22193, 22321], [22193, 22322, 22321], [22193, 22194, 22323], [22193, 22323, 22322], [22194, 22195, 22323], [22195, 22324, 22323], [22195, 22196, 22325], [22195, 22325, 22324], [22196, 22197, 22325], [22197, 22326, 22325], [22197, 22198, 22327], [22197, 22327, 22326], [22198, 22199, 22327], [22199, 22328, 22327], [22199, 22200, 22329], [22199, 22329, 22328], [22200, 22201, 22329], [22201, 22330, 22329], [22201, 22202, 22331], [22201, 22331, 22330], [22202, 22203, 22331], [22203, 22332, 22331], [22203, 22204, 22333], [22203, 22333, 22332], [22204, 22205, 22333], [22205, 22334, 22333], [22205, 22206, 22335], [22205, 22335, 22334], [22206, 22207, 22335], [22207, 22336, 22335], [22207, 22208, 22337], [22207, 22337, 22336], [22208, 22209, 22337], [22209, 22338, 22337], [22209, 22210, 22339], [22209, 22339, 22338], [22210, 22211, 22339], [22211, 22340, 22339], [22211, 22212, 22341], [22211, 22341, 22340], [22212, 22213, 22341], [22213, 22342, 22341], [22213, 22214, 22343], [22213, 22343, 22342], [22214, 22215, 22343], [22215, 22344, 22343], [22215, 22216, 22345], [22215, 22345, 22344], [22216, 22217, 22345], [22217, 22346, 22345], [22217, 22218, 22347], [22217, 22347, 22346], [22218, 22219, 22347], [22219, 22348, 22347], [22219, 22220, 22349], [22219, 22349, 22348], [22220, 22221, 22349], [22221, 22350, 22349], [22221, 22222, 22351], [22221, 22351, 22350], [22222, 22223, 22351], [22223, 22352, 22351], [22223, 22224, 22353], [22223, 22353, 22352], [22224, 22225, 22353], [22225, 22354, 22353], [22225, 22226, 22355], [22225, 22355, 22354], [22226, 22227, 22355], [22227, 22356, 22355], [22227, 22228, 22357], [22227, 22357, 22356], [22228, 22229, 22357], [22229, 22358, 22357], [22229, 22230, 22359], [22229, 22359, 22358], [22230, 22231, 22359], [22231, 22360, 22359], [22231, 22232, 22361], [22231, 22361, 22360], [22232, 22233, 22361], [22233, 22362, 22361], [22234, 22235, 22363], [22235, 22364, 22363], [22235, 22236, 22365], [22235, 22365, 22364], [22236, 22237, 22365], [22237, 22366, 22365], [22237, 22238, 22367], [22237, 22367, 22366], [22238, 22239, 22367], [22239, 22368, 22367], [22239, 22240, 22369], [22239, 22369, 22368], [22240, 22241, 22369], [22241, 22370, 22369], [22241, 22242, 22371], [22241, 22371, 22370], [22242, 22243, 22371], [22243, 22372, 22371], [22243, 22244, 22373], [22243, 22373, 22372], [22244, 22245, 22373], [22245, 22374, 22373], [22245, 22246, 22375], [22245, 22375, 22374], [22246, 22247, 22375], [22247, 22376, 22375], [22247, 22248, 22377], [22247, 22377, 22376], [22248, 22249, 22377], [22249, 22378, 22377], [22249, 22250, 22379], [22249, 22379, 22378], [22250, 22251, 22379], [22251, 22380, 22379], [22251, 22252, 22381], [22251, 22381, 22380], [22252, 22253, 22381], [22253, 22382, 22381], [22253, 22254, 22383], [22253, 22383, 22382], [22254, 22255, 22383], [22255, 22384, 22383], [22255, 22256, 22385], [22255, 22385, 22384], [22256, 22257, 22385], [22257, 22386, 22385], [22257, 22258, 22387], [22257, 22387, 22386], [22258, 22259, 22387], [22259, 22388, 22387], [22259, 22260, 22389], [22259, 22389, 22388], [22260, 22261, 22389], [22261, 22390, 22389], [22261, 22262, 22391], [22261, 22391, 22390], [22262, 22263, 22391], [22263, 22392, 22391], [22263, 22264, 22393], [22263, 22393, 22392], [22264, 22265, 22393], [22265, 22394, 22393], [22265, 22266, 22395], [22265, 22395, 22394], [22266, 22267, 22395], [22267, 22396, 22395], [22267, 22268, 22397], [22267, 22397, 22396], [22268, 22269, 22397], [22269, 22398, 22397], [22269, 22270, 22399], [22269, 22399, 22398], [22270, 22271, 22399], [22271, 22400, 22399], [22271, 22272, 22401], [22271, 22401, 22400], [22272, 22273, 22401], [22273, 22402, 22401], [22273, 22274, 22403], [22273, 22403, 22402], [22274, 22275, 22403], [22275, 22404, 22403], [22275, 22276, 22405], [22275, 22405, 22404], [22276, 22277, 22405], [22277, 22406, 22405], [22277, 22278, 22407], [22277, 22407, 22406], [22278, 22279, 22407], [22279, 22408, 22407], [22279, 22280, 22409], [22279, 22409, 22408], [22280, 22281, 22409], [22281, 22410, 22409], [22281, 22282, 22411], [22281, 22411, 22410], [22282, 22283, 22411], [22283, 22412, 22411], [22283, 22284, 22413], [22283, 22413, 22412], [22284, 22285, 22413], [22285, 22414, 22413], [22285, 22286, 22415], [22285, 22415, 22414], [22286, 22287, 22415], [22287, 22416, 22415], [22287, 22288, 22417], [22287, 22417, 22416], [22288, 22289, 22417], [22289, 22418, 22417], [22289, 22290, 22419], [22289, 22419, 22418], [22290, 22291, 22419], [22291, 22420, 22419], [22291, 22292, 22421], [22291, 22421, 22420], [22292, 22293, 22421], [22293, 22422, 22421], [22293, 22294, 22423], [22293, 22423, 22422], [22294, 22295, 22423], [22295, 22424, 22423], [22295, 22296, 22425], [22295, 22425, 22424], [22296, 22297, 22425], [22297, 22426, 22425], [22297, 22298, 22427], [22297, 22427, 22426], [22298, 22299, 22427], [22299, 22428, 22427], [22299, 22300, 22429], [22299, 22429, 22428], [22300, 22301, 22429], [22301, 22430, 22429], [22301, 22302, 22431], [22301, 22431, 22430], [22302, 22303, 22431], [22303, 22432, 22431], [22303, 22304, 22433], [22303, 22433, 22432], [22304, 22305, 22433], [22305, 22434, 22433], [22305, 22306, 22435], [22305, 22435, 22434], [22306, 22307, 22435], [22307, 22436, 22435], [22307, 22308, 22437], [22307, 22437, 22436], [22308, 22309, 22437], [22309, 22438, 22437], [22309, 22310, 22439], [22309, 22439, 22438], [22310, 22311, 22439], [22311, 22440, 22439], [22311, 22312, 22441], [22311, 22441, 22440], [22312, 22313, 22441], [22313, 22442, 22441], [22313, 22314, 22443], [22313, 22443, 22442], [22314, 22315, 22443], [22315, 22444, 22443], [22315, 22316, 22445], [22315, 22445, 22444], [22316, 22317, 22445], [22317, 22446, 22445], [22317, 22318, 22447], [22317, 22447, 22446], [22318, 22319, 22447], [22319, 22448, 22447], [22319, 22320, 22449], [22319, 22449, 22448], [22320, 22321, 22449], [22321, 22450, 22449], [22321, 22322, 22451], [22321, 22451, 22450], [22322, 22323, 22451], [22323, 22452, 22451], [22323, 22324, 22453], [22323, 22453, 22452], [22324, 22325, 22453], [22325, 22454, 22453], [22325, 22326, 22455], [22325, 22455, 22454], [22326, 22327, 22455], [22327, 22456, 22455], [22327, 22328, 22457], [22327, 22457, 22456], [22328, 22329, 22457], [22329, 22458, 22457], [22329, 22330, 22459], [22329, 22459, 22458], [22330, 22331, 22459], [22331, 22460, 22459], [22331, 22332, 22461], [22331, 22461, 22460], [22332, 22333, 22461], [22333, 22462, 22461], [22333, 22334, 22463], [22333, 22463, 22462], [22334, 22335, 22463], [22335, 22464, 22463], [22335, 22336, 22465], [22335, 22465, 22464], [22336, 22337, 22465], [22337, 22466, 22465], [22337, 22338, 22467], [22337, 22467, 22466], [22338, 22339, 22467], [22339, 22468, 22467], [22339, 22340, 22469], [22339, 22469, 22468], [22340, 22341, 22469], [22341, 22470, 22469], [22341, 22342, 22471], [22341, 22471, 22470], [22342, 22343, 22471], [22343, 22472, 22471], [22343, 22344, 22473], [22343, 22473, 22472], [22344, 22345, 22473], [22345, 22474, 22473], [22345, 22346, 22475], [22345, 22475, 22474], [22346, 22347, 22475], [22347, 22476, 22475], [22347, 22348, 22477], [22347, 22477, 22476], [22348, 22349, 22477], [22349, 22478, 22477], [22349, 22350, 22479], [22349, 22479, 22478], [22350, 22351, 22479], [22351, 22480, 22479], [22351, 22352, 22481], [22351, 22481, 22480], [22352, 22353, 22481], [22353, 22482, 22481], [22353, 22354, 22483], [22353, 22483, 22482], [22354, 22355, 22483], [22355, 22484, 22483], [22355, 22356, 22485], [22355, 22485, 22484], [22356, 22357, 22485], [22357, 22486, 22485], [22357, 22358, 22487], [22357, 22487, 22486], [22358, 22359, 22487], [22359, 22488, 22487], [22359, 22360, 22489], [22359, 22489, 22488], [22360, 22361, 22489], [22361, 22490, 22489], [22361, 22362, 22491], [22361, 22491, 22490], [16171, 16300, 22493], [16171, 22493, 22492], [16300, 16429, 22493], [16429, 22494, 22493], [16429, 16558, 22495], [16429, 22495, 22494], [16558, 16687, 22495], [16687, 22496, 22495], [16687, 16816, 22497], [16687, 22497, 22496], [16816, 16945, 22497], [16945, 22498, 22497], [16945, 17074, 22499], [16945, 22499, 22498], [17074, 17203, 22499], [17203, 22500, 22499], [17203, 17332, 22501], [17203, 22501, 22500], [17332, 17461, 22501], [17461, 22502, 22501], [17461, 17590, 22503], [17461, 22503, 22502], [17590, 17719, 22503], [17719, 22504, 22503], [17719, 17848, 22505], [17719, 22505, 22504], [17848, 17977, 22505], [17977, 22506, 22505], [17977, 18106, 22507], [17977, 22507, 22506], [18106, 18235, 22507], [18235, 22508, 22507], [18235, 18364, 22509], [18235, 22509, 22508], [18364, 18493, 22509], [18493, 22510, 22509], [18493, 18622, 22511], [18493, 22511, 22510], [18622, 18751, 22511], [18751, 22512, 22511], [18751, 18880, 22513], [18751, 22513, 22512], [18880, 19009, 22513], [19009, 22514, 22513], [19009, 19138, 22515], [19009, 22515, 22514], [19138, 0, 22515], [0, 129, 22515], [22492, 22493, 22516], [22493, 22517, 22516], [22493, 22494, 22518], [22493, 22518, 22517], [22494, 22495, 22518], [22495, 22519, 22518], [22495, 22496, 22520], [22495, 22520, 22519], [22496, 22497, 22520], [22497, 22521, 22520], [22497, 22498, 22522], [22497, 22522, 22521], [22498, 22499, 22522], [22499, 22523, 22522], [22499, 22500, 22524], [22499, 22524, 22523], [22500, 22501, 22524], [22501, 22525, 22524], [22501, 22502, 22526], [22501, 22526, 22525], [22502, 22503, 22526], [22503, 22527, 22526], [22503, 22504, 22528], [22503, 22528, 22527], [22504, 22505, 22528], [22505, 22529, 22528], [22505, 22506, 22530], [22505, 22530, 22529], [22506, 22507, 22530], [22507, 22531, 22530], [22507, 22508, 22532], [22507, 22532, 22531], [22508, 22509, 22532], [22509, 22533, 22532], [22509, 22510, 22534], [22509, 22534, 22533], [22510, 22511, 22534], [22511, 22535, 22534], [22511, 22512, 22536], [22511, 22536, 22535], [22512, 22513, 22536], [22513, 22537, 22536], [22513, 22514, 22538], [22513, 22538, 22537], [22514, 22515, 22538], [22515, 22539, 22538], [22515, 129, 258], [22515, 258, 22539], [22516, 22517, 22541], [22516, 22541, 22540], [22517, 22518, 22541], [22518, 22542, 22541], [22518, 22519, 22543], [22518, 22543, 22542], [22519, 22520, 22543], [22520, 22544, 22543], [22520, 22521, 22545], [22520, 22545, 22544], [22521, 22522, 22545], [22522, 22546, 22545], [22522, 22523, 22547], [22522, 22547, 22546], [22523, 22524, 22547], [22524, 22548, 22547], [22524, 22525, 22549], [22524, 22549, 22548], [22525, 22526, 22549], [22526, 22550, 22549], [22526, 22527, 22551], [22526, 22551, 22550], [22527, 22528, 22551], [22528, 22552, 22551], [22528, 22529, 22553], [22528, 22553, 22552], [22529, 22530, 22553], [22530, 22554, 22553], [22530, 22531, 22555], [22530, 22555, 22554], [22531, 22532, 22555], [22532, 22556, 22555], [22532, 22533, 22557], [22532, 22557, 22556], [22533, 22534, 22557], [22534, 22558, 22557], [22534, 22535, 22559], [22534, 22559, 22558], [22535, 22536, 22559], [22536, 22560, 22559], [22536, 22537, 22561], [22536, 22561, 22560], [22537, 22538, 22561], [22538, 22562, 22561], [22538, 22539, 22563], [22538, 22563, 22562], [22539, 258, 22563], [258, 387, 22563], [22540, 22541, 22564], [22541, 22565, 22564], [22541, 22542, 22566], [22541, 22566, 22565], [22542, 22543, 22566], [22543, 22567, 22566], [22543, 22544, 22568], [22543, 22568, 22567], [22544, 22545, 22568], [22545, 22569, 22568], [22545, 22546, 22570], [22545, 22570, 22569], [22546, 22547, 22570], [22547, 22571, 22570], [22547, 22548, 22572], [22547, 22572, 22571], [22548, 22549, 22572], [22549, 22573, 22572], [22549, 22550, 22574], [22549, 22574, 22573], [22550, 22551, 22574], [22551, 22575, 22574], [22551, 22552, 22576], [22551, 22576, 22575], [22552, 22553, 22576], [22553, 22577, 22576], [22553, 22554, 22578], [22553, 22578, 22577], [22554, 22555, 22578], [22555, 22579, 22578], [22555, 22556, 22580], [22555, 22580, 22579], [22556, 22557, 22580], [22557, 22581, 22580], [22557, 22558, 22582], [22557, 22582, 22581], [22558, 22559, 22582], [22559, 22583, 22582], [22559, 22560, 22584], [22559, 22584, 22583], [22560, 22561, 22584], [22561, 22585, 22584], [22561, 22562, 22586], [22561, 22586, 22585], [22562, 22563, 22586], [22563, 22587, 22586], [22563, 387, 516], [22563, 516, 22587], [22564, 22565, 22589], [22564, 22589, 22588], [22565, 22566, 22589], [22566, 22590, 22589], [22566, 22567, 22591], [22566, 22591, 22590], [22567, 22568, 22591], [22568, 22592, 22591], [22568, 22569, 22593], [22568, 22593, 22592], [22569, 22570, 22593], [22570, 22594, 22593], [22570, 22571, 22595], [22570, 22595, 22594], [22571, 22572, 22595], [22572, 22596, 22595], [22572, 22573, 22597], [22572, 22597, 22596], [22573, 22574, 22597], [22574, 22598, 22597], [22574, 22575, 22599], [22574, 22599, 22598], [22575, 22576, 22599], [22576, 22600, 22599], [22576, 22577, 22601], [22576, 22601, 22600], [22577, 22578, 22601], [22578, 22602, 22601], [22578, 22579, 22603], [22578, 22603, 22602], [22579, 22580, 22603], [22580, 22604, 22603], [22580, 22581, 22605], [22580, 22605, 22604], [22581, 22582, 22605], [22582, 22606, 22605], [22582, 22583, 22607], [22582, 22607, 22606], [22583, 22584, 22607], [22584, 22608, 22607], [22584, 22585, 22609], [22584, 22609, 22608], [22585, 22586, 22609], [22586, 22610, 22609], [22586, 22587, 22611], [22586, 22611, 22610], [22587, 516, 22611], [516, 645, 22611], [22588, 22589, 22612], [22589, 22613, 22612], [22589, 22590, 22614], [22589, 22614, 22613], [22590, 22591, 22614], [22591, 22615, 22614], [22591, 22592, 22616], [22591, 22616, 22615], [22592, 22593, 22616], [22593, 22617, 22616], [22593, 22594, 22618], [22593, 22618, 22617], [22594, 22595, 22618], [22595, 22619, 22618], [22595, 22596, 22620], [22595, 22620, 22619], [22596, 22597, 22620], [22597, 22621, 22620], [22597, 22598, 22622], [22597, 22622, 22621], [22598, 22599, 22622], [22599, 22623, 22622], [22599, 22600, 22624], [22599, 22624, 22623], [22600, 22601, 22624], [22601, 22625, 22624], [22601, 22602, 22626], [22601, 22626, 22625], [22602, 22603, 22626], [22603, 22627, 22626], [22603, 22604, 22628], [22603, 22628, 22627], [22604, 22605, 22628], [22605, 22629, 22628], [22605, 22606, 22630], [22605, 22630, 22629], [22606, 22607, 22630], [22607, 22631, 22630], [22607, 22608, 22632], [22607, 22632, 22631], [22608, 22609, 22632], [22609, 22633, 22632], [22609, 22610, 22634], [22609, 22634, 22633], [22610, 22611, 22634], [22611, 22635, 22634], [22611, 645, 774], [22611, 774, 22635], [22612, 22613, 22637], [22612, 22637, 22636], [22613, 22614, 22637], [22614, 22638, 22637], [22614, 22615, 22639], [22614, 22639, 22638], [22615, 22616, 22639], [22616, 22640, 22639], [22616, 22617, 22641], [22616, 22641, 22640], [22617, 22618, 22641], [22618, 22642, 22641], [22618, 22619, 22643], [22618, 22643, 22642], [22619, 22620, 22643], [22620, 22644, 22643], [22620, 22621, 22645], [22620, 22645, 22644], [22621, 22622, 22645], [22622, 22646, 22645], [22622, 22623, 22647], [22622, 22647, 22646], [22623, 22624, 22647], [22624, 22648, 22647], [22624, 22625, 22649], [22624, 22649, 22648], [22625, 22626, 22649], [22626, 22650, 22649], [22626, 22627, 22651], [22626, 22651, 22650], [22627, 22628, 22651], [22628, 22652, 22651], [22628, 22629, 22653], [22628, 22653, 22652], [22629, 22630, 22653], [22630, 22654, 22653], [22630, 22631, 22655], [22630, 22655, 22654], [22631, 22632, 22655], [22632, 22656, 22655], [22632, 22633, 22657], [22632, 22657, 22656], [22633, 22634, 22657], [22634, 22658, 22657], [22634, 22635, 22659], [22634, 22659, 22658], [22635, 774, 22659], [774, 903, 22659], [22636, 22637, 22660], [22637, 22661, 22660], [22637, 22638, 22662], [22637, 22662, 22661], [22638, 22639, 22662], [22639, 22663, 22662], [22639, 22640, 22664], [22639, 22664, 22663], [22640, 22641, 22664], [22641, 22665, 22664], [22641, 22642, 22666], [22641, 22666, 22665], [22642, 22643, 22666], [22643, 22667, 22666], [22643, 22644, 22668], [22643, 22668, 22667], [22644, 22645, 22668], [22645, 22669, 22668], [22645, 22646, 22670], [22645, 22670, 22669], [22646, 22647, 22670], [22647, 22671, 22670], [22647, 22648, 22672], [22647, 22672, 22671], [22648, 22649, 22672], [22649, 22673, 22672], [22649, 22650, 22674], [22649, 22674, 22673], [22650, 22651, 22674], [22651, 22675, 22674], [22651, 22652, 22676], [22651, 22676, 22675], [22652, 22653, 22676], [22653, 22677, 22676], [22653, 22654, 22678], [22653, 22678, 22677], [22654, 22655, 22678], [22655, 22679, 22678], [22655, 22656, 22680], [22655, 22680, 22679], [22656, 22657, 22680], [22657, 22681, 22680], [22657, 22658, 22682], [22657, 22682, 22681], [22658, 22659, 22682], [22659, 22683, 22682], [22659, 903, 1032], [22659, 1032, 22683], [22660, 22661, 22685], [22660, 22685, 22684], [22661, 22662, 22685], [22662, 22686, 22685], [22662, 22663, 22687], [22662, 22687, 22686], [22663, 22664, 22687], [22664, 22688, 22687], [22664, 22665, 22689], [22664, 22689, 22688], [22665, 22666, 22689], [22666, 22690, 22689], [22666, 22667, 22691], [22666, 22691, 22690], [22667, 22668, 22691], [22668, 22692, 22691], [22668, 22669, 22693], [22668, 22693, 22692], [22669, 22670, 22693], [22670, 22694, 22693], [22670, 22671, 22695], [22670, 22695, 22694], [22671, 22672, 22695], [22672, 22696, 22695], [22672, 22673, 22697], [22672, 22697, 22696], [22673, 22674, 22697], [22674, 22698, 22697], [22674, 22675, 22699], [22674, 22699, 22698], [22675, 22676, 22699], [22676, 22700, 22699], [22676, 22677, 22701], [22676, 22701, 22700], [22677, 22678, 22701], [22678, 22702, 22701], [22678, 22679, 22703], [22678, 22703, 22702], [22679, 22680, 22703], [22680, 22704, 22703], [22680, 22681, 22705], [22680, 22705, 22704], [22681, 22682, 22705], [22682, 22706, 22705], [22682, 22683, 22707], [22682, 22707, 22706], [22683, 1032, 22707], [1032, 1161, 22707], [22684, 22685, 22708], [22685, 22709, 22708], [22685, 22686, 22710], [22685, 22710, 22709], [22686, 22687, 22710], [22687, 22711, 22710], [22687, 22688, 22712], [22687, 22712, 22711], [22688, 22689, 22712], [22689, 22713, 22712], [22689, 22690, 22714], [22689, 22714, 22713], [22690, 22691, 22714], [22691, 22715, 22714], [22691, 22692, 22716], [22691, 22716, 22715], [22692, 22693, 22716], [22693, 22717, 22716], [22693, 22694, 22718], [22693, 22718, 22717], [22694, 22695, 22718], [22695, 22719, 22718], [22695, 22696, 22720], [22695, 22720, 22719], [22696, 22697, 22720], [22697, 22721, 22720], [22697, 22698, 22722], [22697, 22722, 22721], [22698, 22699, 22722], [22699, 22723, 22722], [22699, 22700, 22724], [22699, 22724, 22723], [22700, 22701, 22724], [22701, 22725, 22724], [22701, 22702, 22726], [22701, 22726, 22725], [22702, 22703, 22726], [22703, 22727, 22726], [22703, 22704, 22728], [22703, 22728, 22727], [22704, 22705, 22728], [22705, 22729, 22728], [22705, 22706, 22730], [22705, 22730, 22729], [22706, 22707, 22730], [22707, 22731, 22730], [22707, 1161, 1290], [22707, 1290, 22731], [22708, 22709, 22733], [22708, 22733, 22732], [22709, 22710, 22733], [22710, 22734, 22733], [22710, 22711, 22735], [22710, 22735, 22734], [22711, 22712, 22735], [22712, 22736, 22735], [22712, 22713, 22737], [22712, 22737, 22736], [22713, 22714, 22737], [22714, 22738, 22737], [22714, 22715, 22739], [22714, 22739, 22738], [22715, 22716, 22739], [22716, 22740, 22739], [22716, 22717, 22741], [22716, 22741, 22740], [22717, 22718, 22741], [22718, 22742, 22741], [22718, 22719, 22743], [22718, 22743, 22742], [22719, 22720, 22743], [22720, 22744, 22743], [22720, 22721, 22745], [22720, 22745, 22744], [22721, 22722, 22745], [22722, 22746, 22745], [22722, 22723, 22747], [22722, 22747, 22746], [22723, 22724, 22747], [22724, 22748, 22747], [22724, 22725, 22749], [22724, 22749, 22748], [22725, 22726, 22749], [22726, 22750, 22749], [22726, 22727, 22751], [22726, 22751, 22750], [22727, 22728, 22751], [22728, 22752, 22751], [22728, 22729, 22753], [22728, 22753, 22752], [22729, 22730, 22753], [22730, 22754, 22753], [22730, 22731, 22755], [22730, 22755, 22754], [22731, 1290, 22755], [1290, 1419, 22755], [22732, 22733, 22756], [22733, 22757, 22756], [22733, 22734, 22758], [22733, 22758, 22757], [22734, 22735, 22758], [22735, 22759, 22758], [22735, 22736, 22760], [22735, 22760, 22759], [22736, 22737, 22760], [22737, 22761, 22760], [22737, 22738, 22762], [22737, 22762, 22761], [22738, 22739, 22762], [22739, 22763, 22762], [22739, 22740, 22764], [22739, 22764, 22763], [22740, 22741, 22764], [22741, 22765, 22764], [22741, 22742, 22766], [22741, 22766, 22765], [22742, 22743, 22766], [22743, 22767, 22766], [22743, 22744, 22768], [22743, 22768, 22767], [22744, 22745, 22768], [22745, 22769, 22768], [22745, 22746, 22770], [22745, 22770, 22769], [22746, 22747, 22770], [22747, 22771, 22770], [22747, 22748, 22772], [22747, 22772, 22771], [22748, 22749, 22772], [22749, 22773, 22772], [22749, 22750, 22774], [22749, 22774, 22773], [22750, 22751, 22774], [22751, 22775, 22774], [22751, 22752, 22776], [22751, 22776, 22775], [22752, 22753, 22776], [22753, 22777, 22776], [22753, 22754, 22778], [22753, 22778, 22777], [22754, 22755, 22778], [22755, 22779, 22778], [22755, 1419, 1548], [22755, 1548, 22779], [22756, 22757, 22781], [22756, 22781, 22780], [22757, 22758, 22781], [22758, 22782, 22781], [22758, 22759, 22783], [22758, 22783, 22782], [22759, 22760, 22783], [22760, 22784, 22783], [22760, 22761, 22785], [22760, 22785, 22784], [22761, 22762, 22785], [22762, 22786, 22785], [22762, 22763, 22787], [22762, 22787, 22786], [22763, 22764, 22787], [22764, 22788, 22787], [22764, 22765, 22789], [22764, 22789, 22788], [22765, 22766, 22789], [22766, 22790, 22789], [22766, 22767, 22791], [22766, 22791, 22790], [22767, 22768, 22791], [22768, 22792, 22791], [22768, 22769, 22793], [22768, 22793, 22792], [22769, 22770, 22793], [22770, 22794, 22793], [22770, 22771, 22795], [22770, 22795, 22794], [22771, 22772, 22795], [22772, 22796, 22795], [22772, 22773, 22797], [22772, 22797, 22796], [22773, 22774, 22797], [22774, 22798, 22797], [22774, 22775, 22799], [22774, 22799, 22798], [22775, 22776, 22799], [22776, 22800, 22799], [22776, 22777, 22801], [22776, 22801, 22800], [22777, 22778, 22801], [22778, 22802, 22801], [22778, 22779, 22803], [22778, 22803, 22802], [22779, 1548, 22803], [1548, 1677, 22803], [22780, 22781, 22804], [22781, 22805, 22804], [22781, 22782, 22806], [22781, 22806, 22805], [22782, 22783, 22806], [22783, 22807, 22806], [22783, 22784, 22808], [22783, 22808, 22807], [22784, 22785, 22808], [22785, 22809, 22808], [22785, 22786, 22810], [22785, 22810, 22809], [22786, 22787, 22810], [22787, 22811, 22810], [22787, 22788, 22812], [22787, 22812, 22811], [22788, 22789, 22812], [22789, 22813, 22812], [22789, 22790, 22814], [22789, 22814, 22813], [22790, 22791, 22814], [22791, 22815, 22814], [22791, 22792, 22816], [22791, 22816, 22815], [22792, 22793, 22816], [22793, 22817, 22816], [22793, 22794, 22818], [22793, 22818, 22817], [22794, 22795, 22818], [22795, 22819, 22818], [22795, 22796, 22820], [22795, 22820, 22819], [22796, 22797, 22820], [22797, 22821, 22820], [22797, 22798, 22822], [22797, 22822, 22821], [22798, 22799, 22822], [22799, 22823, 22822], [22799, 22800, 22824], [22799, 22824, 22823], [22800, 22801, 22824], [22801, 22825, 22824], [22801, 22802, 22826], [22801, 22826, 22825], [22802, 22803, 22826], [22803, 22827, 22826], [22803, 1677, 1806], [22803, 1806, 22827], [22804, 22805, 22829], [22804, 22829, 22828], [22805, 22806, 22829], [22806, 22830, 22829], [22806, 22807, 22831], [22806, 22831, 22830], [22807, 22808, 22831], [22808, 22832, 22831], [22808, 22809, 22833], [22808, 22833, 22832], [22809, 22810, 22833], [22810, 22834, 22833], [22810, 22811, 22835], [22810, 22835, 22834], [22811, 22812, 22835], [22812, 22836, 22835], [22812, 22813, 22837], [22812, 22837, 22836], [22813, 22814, 22837], [22814, 22838, 22837], [22814, 22815, 22839], [22814, 22839, 22838], [22815, 22816, 22839], [22816, 22840, 22839], [22816, 22817, 22841], [22816, 22841, 22840], [22817, 22818, 22841], [22818, 22842, 22841], [22818, 22819, 22843], [22818, 22843, 22842], [22819, 22820, 22843], [22820, 22844, 22843], [22820, 22821, 22845], [22820, 22845, 22844], [22821, 22822, 22845], [22822, 22846, 22845], [22822, 22823, 22847], [22822, 22847, 22846], [22823, 22824, 22847], [22824, 22848, 22847], [22824, 22825, 22849], [22824, 22849, 22848], [22825, 22826, 22849], [22826, 22850, 22849], [22826, 22827, 22851], [22826, 22851, 22850], [22827, 1806, 22851], [1806, 1935, 22851], [22828, 22829, 22852], [22829, 22853, 22852], [22829, 22830, 22854], [22829, 22854, 22853], [22830, 22831, 22854], [22831, 22855, 22854], [22831, 22832, 22856], [22831, 22856, 22855], [22832, 22833, 22856], [22833, 22857, 22856], [22833, 22834, 22858], [22833, 22858, 22857], [22834, 22835, 22858], [22835, 22859, 22858], [22835, 22836, 22860], [22835, 22860, 22859], [22836, 22837, 22860], [22837, 22861, 22860], [22837, 22838, 22862], [22837, 22862, 22861], [22838, 22839, 22862], [22839, 22863, 22862], [22839, 22840, 22864], [22839, 22864, 22863], [22840, 22841, 22864], [22841, 22865, 22864], [22841, 22842, 22866], [22841, 22866, 22865], [22842, 22843, 22866], [22843, 22867, 22866], [22843, 22844, 22868], [22843, 22868, 22867], [22844, 22845, 22868], [22845, 22869, 22868], [22845, 22846, 22870], [22845, 22870, 22869], [22846, 22847, 22870], [22847, 22871, 22870], [22847, 22848, 22872], [22847, 22872, 22871], [22848, 22849, 22872], [22849, 22873, 22872], [22849, 22850, 22874], [22849, 22874, 22873], [22850, 22851, 22874], [22851, 22875, 22874], [22851, 1935, 2064], [22851, 2064, 22875], [22852, 22853, 22877], [22852, 22877, 22876], [22853, 22854, 22877], [22854, 22878, 22877], [22854, 22855, 22879], [22854, 22879, 22878], [22855, 22856, 22879], [22856, 22880, 22879], [22856, 22857, 22881], [22856, 22881, 22880], [22857, 22858, 22881], [22858, 22882, 22881], [22858, 22859, 22883], [22858, 22883, 22882], [22859, 22860, 22883], [22860, 22884, 22883], [22860, 22861, 22885], [22860, 22885, 22884], [22861, 22862, 22885], [22862, 22886, 22885], [22862, 22863, 22887], [22862, 22887, 22886], [22863, 22864, 22887], [22864, 22888, 22887], [22864, 22865, 22889], [22864, 22889, 22888], [22865, 22866, 22889], [22866, 22890, 22889], [22866, 22867, 22891], [22866, 22891, 22890], [22867, 22868, 22891], [22868, 22892, 22891], [22868, 22869, 22893], [22868, 22893, 22892], [22869, 22870, 22893], [22870, 22894, 22893], [22870, 22871, 22895], [22870, 22895, 22894], [22871, 22872, 22895], [22872, 22896, 22895], [22872, 22873, 22897], [22872, 22897, 22896], [22873, 22874, 22897], [22874, 22898, 22897], [22874, 22875, 22899], [22874, 22899, 22898], [22875, 2064, 22899], [2064, 2193, 22899], [22876, 22877, 22900], [22877, 22901, 22900], [22877, 22878, 22902], [22877, 22902, 22901], [22878, 22879, 22902], [22879, 22903, 22902], [22879, 22880, 22904], [22879, 22904, 22903], [22880, 22881, 22904], [22881, 22905, 22904], [22881, 22882, 22906], [22881, 22906, 22905], [22882, 22883, 22906], [22883, 22907, 22906], [22883, 22884, 22908], [22883, 22908, 22907], [22884, 22885, 22908], [22885, 22909, 22908], [22885, 22886, 22910], [22885, 22910, 22909], [22886, 22887, 22910], [22887, 22911, 22910], [22887, 22888, 22912], [22887, 22912, 22911], [22888, 22889, 22912], [22889, 22913, 22912], [22889, 22890, 22914], [22889, 22914, 22913], [22890, 22891, 22914], [22891, 22915, 22914], [22891, 22892, 22916], [22891, 22916, 22915], [22892, 22893, 22916], [22893, 22917, 22916], [22893, 22894, 22918], [22893, 22918, 22917], [22894, 22895, 22918], [22895, 22919, 22918], [22895, 22896, 22920], [22895, 22920, 22919], [22896, 22897, 22920], [22897, 22921, 22920], [22897, 22898, 22922], [22897, 22922, 22921], [22898, 22899, 22922], [22899, 22923, 22922], [22899, 2193, 2322], [22899, 2322, 22923], [22900, 22901, 22925], [22900, 22925, 22924], [22901, 22902, 22925], [22902, 22926, 22925], [22902, 22903, 22927], [22902, 22927, 22926], [22903, 22904, 22927], [22904, 22928, 22927], [22904, 22905, 22929], [22904, 22929, 22928], [22905, 22906, 22929], [22906, 22930, 22929], [22906, 22907, 22931], [22906, 22931, 22930], [22907, 22908, 22931], [22908, 22932, 22931], [22908, 22909, 22933], [22908, 22933, 22932], [22909, 22910, 22933], [22910, 22934, 22933], [22910, 22911, 22935], [22910, 22935, 22934], [22911, 22912, 22935], [22912, 22936, 22935], [22912, 22913, 22937], [22912, 22937, 22936], [22913, 22914, 22937], [22914, 22938, 22937], [22914, 22915, 22939], [22914, 22939, 22938], [22915, 22916, 22939], [22916, 22940, 22939], [22916, 22917, 22941], [22916, 22941, 22940], [22917, 22918, 22941], [22918, 22942, 22941], [22918, 22919, 22943], [22918, 22943, 22942], [22919, 22920, 22943], [22920, 22944, 22943], [22920, 22921, 22945], [22920, 22945, 22944], [22921, 22922, 22945], [22922, 22946, 22945], [22922, 22923, 22947], [22922, 22947, 22946], [22923, 2322, 22947], [2322, 2451, 22947], [22924, 22925, 22948], [22925, 22949, 22948], [22925, 22926, 22950], [22925, 22950, 22949], [22926, 22927, 22950], [22927, 22951, 22950], [22927, 22928, 22952], [22927, 22952, 22951], [22928, 22929, 22952], [22929, 22953, 22952], [22929, 22930, 22954], [22929, 22954, 22953], [22930, 22931, 22954], [22931, 22955, 22954], [22931, 22932, 22956], [22931, 22956, 22955], [22932, 22933, 22956], [22933, 22957, 22956], [22933, 22934, 22958], [22933, 22958, 22957], [22934, 22935, 22958], [22935, 22959, 22958], [22935, 22936, 22960], [22935, 22960, 22959], [22936, 22937, 22960], [22937, 22961, 22960], [22937, 22938, 22962], [22937, 22962, 22961], [22938, 22939, 22962], [22939, 22963, 22962], [22939, 22940, 22964], [22939, 22964, 22963], [22940, 22941, 22964], [22941, 22965, 22964], [22941, 22942, 22966], [22941, 22966, 22965], [22942, 22943, 22966], [22943, 22967, 22966], [22943, 22944, 22968], [22943, 22968, 22967], [22944, 22945, 22968], [22945, 22969, 22968], [22945, 22946, 22970], [22945, 22970, 22969], [22946, 22947, 22970], [22947, 22971, 22970], [22947, 2451, 2580], [22947, 2580, 22971], [22948, 22949, 22973], [22948, 22973, 22972], [22949, 22950, 22973], [22950, 22974, 22973], [22950, 22951, 22975], [22950, 22975, 22974], [22951, 22952, 22975], [22952, 22976, 22975], [22952, 22953, 22977], [22952, 22977, 22976], [22953, 22954, 22977], [22954, 22978, 22977], [22954, 22955, 22979], [22954, 22979, 22978], [22955, 22956, 22979], [22956, 22980, 22979], [22956, 22957, 22981], [22956, 22981, 22980], [22957, 22958, 22981], [22958, 22982, 22981], [22958, 22959, 22983], [22958, 22983, 22982], [22959, 22960, 22983], [22960, 22984, 22983], [22960, 22961, 22985], [22960, 22985, 22984], [22961, 22962, 22985], [22962, 22986, 22985], [22962, 22963, 22987], [22962, 22987, 22986], [22963, 22964, 22987], [22964, 22988, 22987], [22964, 22965, 22989], [22964, 22989, 22988], [22965, 22966, 22989], [22966, 22990, 22989], [22966, 22967, 22991], [22966, 22991, 22990], [22967, 22968, 22991], [22968, 22992, 22991], [22968, 22969, 22993], [22968, 22993, 22992], [22969, 22970, 22993], [22970, 22994, 22993], [22970, 22971, 22995], [22970, 22995, 22994], [22971, 2580, 22995], [2580, 2709, 22995], [22972, 22973, 22996], [22973, 22997, 22996], [22973, 22974, 22998], [22973, 22998, 22997], [22974, 22975, 22998], [22975, 22999, 22998], [22975, 22976, 23000], [22975, 23000, 22999], [22976, 22977, 23000], [22977, 23001, 23000], [22977, 22978, 23002], [22977, 23002, 23001], [22978, 22979, 23002], [22979, 23003, 23002], [22979, 22980, 23004], [22979, 23004, 23003], [22980, 22981, 23004], [22981, 23005, 23004], [22981, 22982, 23006], [22981, 23006, 23005], [22982, 22983, 23006], [22983, 23007, 23006], [22983, 22984, 23008], [22983, 23008, 23007], [22984, 22985, 23008], [22985, 23009, 23008], [22985, 22986, 23010], [22985, 23010, 23009], [22986, 22987, 23010], [22987, 23011, 23010], [22987, 22988, 23012], [22987, 23012, 23011], [22988, 22989, 23012], [22989, 23013, 23012], [22989, 22990, 23014], [22989, 23014, 23013], [22990, 22991, 23014], [22991, 23015, 23014], [22991, 22992, 23016], [22991, 23016, 23015], [22992, 22993, 23016], [22993, 23017, 23016], [22993, 22994, 23018], [22993, 23018, 23017], [22994, 22995, 23018], [22995, 23019, 23018], [22995, 2709, 2838], [22995, 2838, 23019], [22996, 22997, 23021], [22996, 23021, 23020], [22997, 22998, 23021], [22998, 23022, 23021], [22998, 22999, 23023], [22998, 23023, 23022], [22999, 23000, 23023], [23000, 23024, 23023], [23000, 23001, 23025], [23000, 23025, 23024], [23001, 23002, 23025], [23002, 23026, 23025], [23002, 23003, 23027], [23002, 23027, 23026], [23003, 23004, 23027], [23004, 23028, 23027], [23004, 23005, 23029], [23004, 23029, 23028], [23005, 23006, 23029], [23006, 23030, 23029], [23006, 23007, 23031], [23006, 23031, 23030], [23007, 23008, 23031], [23008, 23032, 23031], [23008, 23009, 23033], [23008, 23033, 23032], [23009, 23010, 23033], [23010, 23034, 23033], [23010, 23011, 23035], [23010, 23035, 23034], [23011, 23012, 23035], [23012, 23036, 23035], [23012, 23013, 23037], [23012, 23037, 23036], [23013, 23014, 23037], [23014, 23038, 23037], [23014, 23015, 23039], [23014, 23039, 23038], [23015, 23016, 23039], [23016, 23040, 23039], [23016, 23017, 23041], [23016, 23041, 23040], [23017, 23018, 23041], [23018, 23042, 23041], [23018, 23019, 23043], [23018, 23043, 23042], [23019, 2838, 23043], [2838, 2967, 23043], [23020, 23021, 23044], [23021, 23045, 23044], [23021, 23022, 23046], [23021, 23046, 23045], [23022, 23023, 23046], [23023, 23047, 23046], [23023, 23024, 23048], [23023, 23048, 23047], [23024, 23025, 23048], [23025, 23049, 23048], [23025, 23026, 23050], [23025, 23050, 23049], [23026, 23027, 23050], [23027, 23051, 23050], [23027, 23028, 23052], [23027, 23052, 23051], [23028, 23029, 23052], [23029, 23053, 23052], [23029, 23030, 23054], [23029, 23054, 23053], [23030, 23031, 23054], [23031, 23055, 23054], [23031, 23032, 23056], [23031, 23056, 23055], [23032, 23033, 23056], [23033, 23057, 23056], [23033, 23034, 23058], [23033, 23058, 23057], [23034, 23035, 23058], [23035, 23059, 23058], [23035, 23036, 23060], [23035, 23060, 23059], [23036, 23037, 23060], [23037, 23061, 23060], [23037, 23038, 23062], [23037, 23062, 23061], [23038, 23039, 23062], [23039, 23063, 23062], [23039, 23040, 23064], [23039, 23064, 23063], [23040, 23041, 23064], [23041, 23065, 23064], [23041, 23042, 23066], [23041, 23066, 23065], [23042, 23043, 23066], [23043, 23067, 23066], [23043, 2967, 3096], [23043, 3096, 23067], [23044, 23045, 23069], [23044, 23069, 23068], [23045, 23046, 23069], [23046, 23070, 23069], [23046, 23047, 23071], [23046, 23071, 23070], [23047, 23048, 23071], [23048, 23072, 23071], [23048, 23049, 23073], [23048, 23073, 23072], [23049, 23050, 23073], [23050, 23074, 23073], [23050, 23051, 23075], [23050, 23075, 23074], [23051, 23052, 23075], [23052, 23076, 23075], [23052, 23053, 23077], [23052, 23077, 23076], [23053, 23054, 23077], [23054, 23078, 23077], [23054, 23055, 23079], [23054, 23079, 23078], [23055, 23056, 23079], [23056, 23080, 23079], [23056, 23057, 23081], [23056, 23081, 23080], [23057, 23058, 23081], [23058, 23082, 23081], [23058, 23059, 23083], [23058, 23083, 23082], [23059, 23060, 23083], [23060, 23084, 23083], [23060, 23061, 23085], [23060, 23085, 23084], [23061, 23062, 23085], [23062, 23086, 23085], [23062, 23063, 23087], [23062, 23087, 23086], [23063, 23064, 23087], [23064, 23088, 23087], [23064, 23065, 23089], [23064, 23089, 23088], [23065, 23066, 23089], [23066, 23090, 23089], [23066, 23067, 23091], [23066, 23091, 23090], [23067, 3096, 23091], [3096, 3225, 23091], [23068, 23069, 23092], [23069, 23093, 23092], [23069, 23070, 23094], [23069, 23094, 23093], [23070, 23071, 23094], [23071, 23095, 23094], [23071, 23072, 23096], [23071, 23096, 23095], [23072, 23073, 23096], [23073, 23097, 23096], [23073, 23074, 23098], [23073, 23098, 23097], [23074, 23075, 23098], [23075, 23099, 23098], [23075, 23076, 23100], [23075, 23100, 23099], [23076, 23077, 23100], [23077, 23101, 23100], [23077, 23078, 23102], [23077, 23102, 23101], [23078, 23079, 23102], [23079, 23103, 23102], [23079, 23080, 23104], [23079, 23104, 23103], [23080, 23081, 23104], [23081, 23105, 23104], [23081, 23082, 23106], [23081, 23106, 23105], [23082, 23083, 23106], [23083, 23107, 23106], [23083, 23084, 23108], [23083, 23108, 23107], [23084, 23085, 23108], [23085, 23109, 23108], [23085, 23086, 23110], [23085, 23110, 23109], [23086, 23087, 23110], [23087, 23111, 23110], [23087, 23088, 23112], [23087, 23112, 23111], [23088, 23089, 23112], [23089, 23113, 23112], [23089, 23090, 23114], [23089, 23114, 23113], [23090, 23091, 23114], [23091, 23115, 23114], [23091, 3225, 3354], [23091, 3354, 23115], [23092, 23093, 23117], [23092, 23117, 23116], [23093, 23094, 23117], [23094, 23118, 23117], [23094, 23095, 23119], [23094, 23119, 23118], [23095, 23096, 23119], [23096, 23120, 23119], [23096, 23097, 23121], [23096, 23121, 23120], [23097, 23098, 23121], [23098, 23122, 23121], [23098, 23099, 23123], [23098, 23123, 23122], [23099, 23100, 23123], [23100, 23124, 23123], [23100, 23101, 23125], [23100, 23125, 23124], [23101, 23102, 23125], [23102, 23126, 23125], [23102, 23103, 23127], [23102, 23127, 23126], [23103, 23104, 23127], [23104, 23128, 23127], [23104, 23105, 23129], [23104, 23129, 23128], [23105, 23106, 23129], [23106, 23130, 23129], [23106, 23107, 23131], [23106, 23131, 23130], [23107, 23108, 23131], [23108, 23132, 23131], [23108, 23109, 23133], [23108, 23133, 23132], [23109, 23110, 23133], [23110, 23134, 23133], [23110, 23111, 23135], [23110, 23135, 23134], [23111, 23112, 23135], [23112, 23136, 23135], [23112, 23113, 23137], [23112, 23137, 23136], [23113, 23114, 23137], [23114, 23138, 23137], [23114, 23115, 23139], [23114, 23139, 23138], [23115, 3354, 23139], [3354, 3483, 23139], [23116, 23117, 23140], [23117, 23141, 23140], [23117, 23118, 23142], [23117, 23142, 23141], [23118, 23119, 23142], [23119, 23143, 23142], [23119, 23120, 23144], [23119, 23144, 23143], [23120, 23121, 23144], [23121, 23145, 23144], [23121, 23122, 23146], [23121, 23146, 23145], [23122, 23123, 23146], [23123, 23147, 23146], [23123, 23124, 23148], [23123, 23148, 23147], [23124, 23125, 23148], [23125, 23149, 23148], [23125, 23126, 23150], [23125, 23150, 23149], [23126, 23127, 23150], [23127, 23151, 23150], [23127, 23128, 23152], [23127, 23152, 23151], [23128, 23129, 23152], [23129, 23153, 23152], [23129, 23130, 23154], [23129, 23154, 23153], [23130, 23131, 23154], [23131, 23155, 23154], [23131, 23132, 23156], [23131, 23156, 23155], [23132, 23133, 23156], [23133, 23157, 23156], [23133, 23134, 23158], [23133, 23158, 23157], [23134, 23135, 23158], [23135, 23159, 23158], [23135, 23136, 23160], [23135, 23160, 23159], [23136, 23137, 23160], [23137, 23161, 23160], [23137, 23138, 23162], [23137, 23162, 23161], [23138, 23139, 23162], [23139, 23163, 23162], [23139, 3483, 3612], [23139, 3612, 23163], [23140, 23141, 23165], [23140, 23165, 23164], [23141, 23142, 23165], [23142, 23166, 23165], [23142, 23143, 23167], [23142, 23167, 23166], [23143, 23144, 23167], [23144, 23168, 23167], [23144, 23145, 23169], [23144, 23169, 23168], [23145, 23146, 23169], [23146, 23170, 23169], [23146, 23147, 23171], [23146, 23171, 23170], [23147, 23148, 23171], [23148, 23172, 23171], [23148, 23149, 23173], [23148, 23173, 23172], [23149, 23150, 23173], [23150, 23174, 23173], [23150, 23151, 23175], [23150, 23175, 23174], [23151, 23152, 23175], [23152, 23176, 23175], [23152, 23153, 23177], [23152, 23177, 23176], [23153, 23154, 23177], [23154, 23178, 23177], [23154, 23155, 23179], [23154, 23179, 23178], [23155, 23156, 23179], [23156, 23180, 23179], [23156, 23157, 23181], [23156, 23181, 23180], [23157, 23158, 23181], [23158, 23182, 23181], [23158, 23159, 23183], [23158, 23183, 23182], [23159, 23160, 23183], [23160, 23184, 23183], [23160, 23161, 23185], [23160, 23185, 23184], [23161, 23162, 23185], [23162, 23186, 23185], [23162, 23163, 23187], [23162, 23187, 23186], [23163, 3612, 23187], [3612, 3741, 23187], [23164, 23165, 23188], [23165, 23189, 23188], [23165, 23166, 23190], [23165, 23190, 23189], [23166, 23167, 23190], [23167, 23191, 23190], [23167, 23168, 23192], [23167, 23192, 23191], [23168, 23169, 23192], [23169, 23193, 23192], [23169, 23170, 23194], [23169, 23194, 23193], [23170, 23171, 23194], [23171, 23195, 23194], [23171, 23172, 23196], [23171, 23196, 23195], [23172, 23173, 23196], [23173, 23197, 23196], [23173, 23174, 23198], [23173, 23198, 23197], [23174, 23175, 23198], [23175, 23199, 23198], [23175, 23176, 23200], [23175, 23200, 23199], [23176, 23177, 23200], [23177, 23201, 23200], [23177, 23178, 23202], [23177, 23202, 23201], [23178, 23179, 23202], [23179, 23203, 23202], [23179, 23180, 23204], [23179, 23204, 23203], [23180, 23181, 23204], [23181, 23205, 23204], [23181, 23182, 23206], [23181, 23206, 23205], [23182, 23183, 23206], [23183, 23207, 23206], [23183, 23184, 23208], [23183, 23208, 23207], [23184, 23185, 23208], [23185, 23209, 23208], [23185, 23186, 23210], [23185, 23210, 23209], [23186, 23187, 23210], [23187, 23211, 23210], [23187, 3741, 3870], [23187, 3870, 23211], [23188, 23189, 23213], [23188, 23213, 23212], [23189, 23190, 23213], [23190, 23214, 23213], [23190, 23191, 23215], [23190, 23215, 23214], [23191, 23192, 23215], [23192, 23216, 23215], [23192, 23193, 23217], [23192, 23217, 23216], [23193, 23194, 23217], [23194, 23218, 23217], [23194, 23195, 23219], [23194, 23219, 23218], [23195, 23196, 23219], [23196, 23220, 23219], [23196, 23197, 23221], [23196, 23221, 23220], [23197, 23198, 23221], [23198, 23222, 23221], [23198, 23199, 23223], [23198, 23223, 23222], [23199, 23200, 23223], [23200, 23224, 23223], [23200, 23201, 23225], [23200, 23225, 23224], [23201, 23202, 23225], [23202, 23226, 23225], [23202, 23203, 23227], [23202, 23227, 23226], [23203, 23204, 23227], [23204, 23228, 23227], [23204, 23205, 23229], [23204, 23229, 23228], [23205, 23206, 23229], [23206, 23230, 23229], [23206, 23207, 23231], [23206, 23231, 23230], [23207, 23208, 23231], [23208, 23232, 23231], [23208, 23209, 23233], [23208, 23233, 23232], [23209, 23210, 23233], [23210, 23234, 23233], [23210, 23211, 23235], [23210, 23235, 23234], [23211, 3870, 23235], [3870, 3999, 23235], [23212, 23213, 23236], [23213, 23237, 23236], [23213, 23214, 23238], [23213, 23238, 23237], [23214, 23215, 23238], [23215, 23239, 23238], [23215, 23216, 23240], [23215, 23240, 23239], [23216, 23217, 23240], [23217, 23241, 23240], [23217, 23218, 23242], [23217, 23242, 23241], [23218, 23219, 23242], [23219, 23243, 23242], [23219, 23220, 23244], [23219, 23244, 23243], [23220, 23221, 23244], [23221, 23245, 23244], [23221, 23222, 23246], [23221, 23246, 23245], [23222, 23223, 23246], [23223, 23247, 23246], [23223, 23224, 23248], [23223, 23248, 23247], [23224, 23225, 23248], [23225, 23249, 23248], [23225, 23226, 23250], [23225, 23250, 23249], [23226, 23227, 23250], [23227, 23251, 23250], [23227, 23228, 23252], [23227, 23252, 23251], [23228, 23229, 23252], [23229, 23253, 23252], [23229, 23230, 23254], [23229, 23254, 23253], [23230, 23231, 23254], [23231, 23255, 23254], [23231, 23232, 23256], [23231, 23256, 23255], [23232, 23233, 23256], [23233, 23257, 23256], [23233, 23234, 23258], [23233, 23258, 23257], [23234, 23235, 23258], [23235, 23259, 23258], [23235, 3999, 4128], [23235, 4128, 23259], [23236, 23237, 23261], [23236, 23261, 23260], [23237, 23238, 23261], [23238, 23262, 23261], [23238, 23239, 23263], [23238, 23263, 23262], [23239, 23240, 23263], [23240, 23264, 23263], [23240, 23241, 23265], [23240, 23265, 23264], [23241, 23242, 23265], [23242, 23266, 23265], [23242, 23243, 23267], [23242, 23267, 23266], [23243, 23244, 23267], [23244, 23268, 23267], [23244, 23245, 23269], [23244, 23269, 23268], [23245, 23246, 23269], [23246, 23270, 23269], [23246, 23247, 23271], [23246, 23271, 23270], [23247, 23248, 23271], [23248, 23272, 23271], [23248, 23249, 23273], [23248, 23273, 23272], [23249, 23250, 23273], [23250, 23274, 23273], [23250, 23251, 23275], [23250, 23275, 23274], [23251, 23252, 23275], [23252, 23276, 23275], [23252, 23253, 23277], [23252, 23277, 23276], [23253, 23254, 23277], [23254, 23278, 23277], [23254, 23255, 23279], [23254, 23279, 23278], [23255, 23256, 23279], [23256, 23280, 23279], [23256, 23257, 23281], [23256, 23281, 23280], [23257, 23258, 23281], [23258, 23282, 23281], [23258, 23259, 23283], [23258, 23283, 23282], [23259, 4128, 23283], [4128, 4257, 23283], [23260, 23261, 23284], [23261, 23285, 23284], [23261, 23262, 23286], [23261, 23286, 23285], [23262, 23263, 23286], [23263, 23287, 23286], [23263, 23264, 23288], [23263, 23288, 23287], [23264, 23265, 23288], [23265, 23289, 23288], [23265, 23266, 23290], [23265, 23290, 23289], [23266, 23267, 23290], [23267, 23291, 23290], [23267, 23268, 23292], [23267, 23292, 23291], [23268, 23269, 23292], [23269, 23293, 23292], [23269, 23270, 23294], [23269, 23294, 23293], [23270, 23271, 23294], [23271, 23295, 23294], [23271, 23272, 23296], [23271, 23296, 23295], [23272, 23273, 23296], [23273, 23297, 23296], [23273, 23274, 23298], [23273, 23298, 23297], [23274, 23275, 23298], [23275, 23299, 23298], [23275, 23276, 23300], [23275, 23300, 23299], [23276, 23277, 23300], [23277, 23301, 23300], [23277, 23278, 23302], [23277, 23302, 23301], [23278, 23279, 23302], [23279, 23303, 23302], [23279, 23280, 23304], [23279, 23304, 23303], [23280, 23281, 23304], [23281, 23305, 23304], [23281, 23282, 23306], [23281, 23306, 23305], [23282, 23283, 23306], [23283, 23307, 23306], [23283, 4257, 4386], [23283, 4386, 23307], [23284, 23285, 23309], [23284, 23309, 23308], [23285, 23286, 23309], [23286, 23310, 23309], [23286, 23287, 23311], [23286, 23311, 23310], [23287, 23288, 23311], [23288, 23312, 23311], [23288, 23289, 23313], [23288, 23313, 23312], [23289, 23290, 23313], [23290, 23314, 23313], [23290, 23291, 23315], [23290, 23315, 23314], [23291, 23292, 23315], [23292, 23316, 23315], [23292, 23293, 23317], [23292, 23317, 23316], [23293, 23294, 23317], [23294, 23318, 23317], [23294, 23295, 23319], [23294, 23319, 23318], [23295, 23296, 23319], [23296, 23320, 23319], [23296, 23297, 23321], [23296, 23321, 23320], [23297, 23298, 23321], [23298, 23322, 23321], [23298, 23299, 23323], [23298, 23323, 23322], [23299, 23300, 23323], [23300, 23324, 23323], [23300, 23301, 23325], [23300, 23325, 23324], [23301, 23302, 23325], [23302, 23326, 23325], [23302, 23303, 23327], [23302, 23327, 23326], [23303, 23304, 23327], [23304, 23328, 23327], [23304, 23305, 23329], [23304, 23329, 23328], [23305, 23306, 23329], [23306, 23330, 23329], [23306, 23307, 23331], [23306, 23331, 23330], [23307, 4386, 23331], [4386, 4515, 23331], [23308, 23309, 23332], [23309, 23333, 23332], [23309, 23310, 23334], [23309, 23334, 23333], [23310, 23311, 23334], [23311, 23335, 23334], [23311, 23312, 23336], [23311, 23336, 23335], [23312, 23313, 23336], [23313, 23337, 23336], [23313, 23314, 23338], [23313, 23338, 23337], [23314, 23315, 23338], [23315, 23339, 23338], [23315, 23316, 23340], [23315, 23340, 23339], [23316, 23317, 23340], [23317, 23341, 23340], [23317, 23318, 23342], [23317, 23342, 23341], [23318, 23319, 23342], [23319, 23343, 23342], [23319, 23320, 23344], [23319, 23344, 23343], [23320, 23321, 23344], [23321, 23345, 23344], [23321, 23322, 23346], [23321, 23346, 23345], [23322, 23323, 23346], [23323, 23347, 23346], [23323, 23324, 23348], [23323, 23348, 23347], [23324, 23325, 23348], [23325, 23349, 23348], [23325, 23326, 23350], [23325, 23350, 23349], [23326, 23327, 23350], [23327, 23351, 23350], [23327, 23328, 23352], [23327, 23352, 23351], [23328, 23329, 23352], [23329, 23353, 23352], [23329, 23330, 23354], [23329, 23354, 23353], [23330, 23331, 23354], [23331, 23355, 23354], [23331, 4515, 4644], [23331, 4644, 23355], [23332, 23333, 23357], [23332, 23357, 23356], [23333, 23334, 23357], [23334, 23358, 23357], [23334, 23335, 23359], [23334, 23359, 23358], [23335, 23336, 23359], [23336, 23360, 23359], [23336, 23337, 23361], [23336, 23361, 23360], [23337, 23338, 23361], [23338, 23362, 23361], [23338, 23339, 23363], [23338, 23363, 23362], [23339, 23340, 23363], [23340, 23364, 23363], [23340, 23341, 23365], [23340, 23365, 23364], [23341, 23342, 23365], [23342, 23366, 23365], [23342, 23343, 23367], [23342, 23367, 23366], [23343, 23344, 23367], [23344, 23368, 23367], [23344, 23345, 23369], [23344, 23369, 23368], [23345, 23346, 23369], [23346, 23370, 23369], [23346, 23347, 23371], [23346, 23371, 23370], [23347, 23348, 23371], [23348, 23372, 23371], [23348, 23349, 23373], [23348, 23373, 23372], [23349, 23350, 23373], [23350, 23374, 23373], [23350, 23351, 23375], [23350, 23375, 23374], [23351, 23352, 23375], [23352, 23376, 23375], [23352, 23353, 23377], [23352, 23377, 23376], [23353, 23354, 23377], [23354, 23378, 23377], [23354, 23355, 23379], [23354, 23379, 23378], [23355, 4644, 23379], [4644, 4773, 23379], [23356, 23357, 23380], [23357, 23381, 23380], [23357, 23358, 23382], [23357, 23382, 23381], [23358, 23359, 23382], [23359, 23383, 23382], [23359, 23360, 23384], [23359, 23384, 23383], [23360, 23361, 23384], [23361, 23385, 23384], [23361, 23362, 23386], [23361, 23386, 23385], [23362, 23363, 23386], [23363, 23387, 23386], [23363, 23364, 23388], [23363, 23388, 23387], [23364, 23365, 23388], [23365, 23389, 23388], [23365, 23366, 23390], [23365, 23390, 23389], [23366, 23367, 23390], [23367, 23391, 23390], [23367, 23368, 23392], [23367, 23392, 23391], [23368, 23369, 23392], [23369, 23393, 23392], [23369, 23370, 23394], [23369, 23394, 23393], [23370, 23371, 23394], [23371, 23395, 23394], [23371, 23372, 23396], [23371, 23396, 23395], [23372, 23373, 23396], [23373, 23397, 23396], [23373, 23374, 23398], [23373, 23398, 23397], [23374, 23375, 23398], [23375, 23399, 23398], [23375, 23376, 23400], [23375, 23400, 23399], [23376, 23377, 23400], [23377, 23401, 23400], [23377, 23378, 23402], [23377, 23402, 23401], [23378, 23379, 23402], [23379, 23403, 23402], [23379, 4773, 4902], [23379, 4902, 23403], [23380, 23381, 23405], [23380, 23405, 23404], [23381, 23382, 23405], [23382, 23406, 23405], [23382, 23383, 23407], [23382, 23407, 23406], [23383, 23384, 23407], [23384, 23408, 23407], [23384, 23385, 23409], [23384, 23409, 23408], [23385, 23386, 23409], [23386, 23410, 23409], [23386, 23387, 23411], [23386, 23411, 23410], [23387, 23388, 23411], [23388, 23412, 23411], [23388, 23389, 23413], [23388, 23413, 23412], [23389, 23390, 23413], [23390, 23414, 23413], [23390, 23391, 23415], [23390, 23415, 23414], [23391, 23392, 23415], [23392, 23416, 23415], [23392, 23393, 23417], [23392, 23417, 23416], [23393, 23394, 23417], [23394, 23418, 23417], [23394, 23395, 23419], [23394, 23419, 23418], [23395, 23396, 23419], [23396, 23420, 23419], [23396, 23397, 23421], [23396, 23421, 23420], [23397, 23398, 23421], [23398, 23422, 23421], [23398, 23399, 23423], [23398, 23423, 23422], [23399, 23400, 23423], [23400, 23424, 23423], [23400, 23401, 23425], [23400, 23425, 23424], [23401, 23402, 23425], [23402, 23426, 23425], [23402, 23403, 23427], [23402, 23427, 23426], [23403, 4902, 23427], [4902, 5031, 23427], [23404, 23405, 23428], [23405, 23429, 23428], [23405, 23406, 23430], [23405, 23430, 23429], [23406, 23407, 23430], [23407, 23431, 23430], [23407, 23408, 23432], [23407, 23432, 23431], [23408, 23409, 23432], [23409, 23433, 23432], [23409, 23410, 23434], [23409, 23434, 23433], [23410, 23411, 23434], [23411, 23435, 23434], [23411, 23412, 23436], [23411, 23436, 23435], [23412, 23413, 23436], [23413, 23437, 23436], [23413, 23414, 23438], [23413, 23438, 23437], [23414, 23415, 23438], [23415, 23439, 23438], [23415, 23416, 23440], [23415, 23440, 23439], [23416, 23417, 23440], [23417, 23441, 23440], [23417, 23418, 23442], [23417, 23442, 23441], [23418, 23419, 23442], [23419, 23443, 23442], [23419, 23420, 23444], [23419, 23444, 23443], [23420, 23421, 23444], [23421, 23445, 23444], [23421, 23422, 23446], [23421, 23446, 23445], [23422, 23423, 23446], [23423, 23447, 23446], [23423, 23424, 23448], [23423, 23448, 23447], [23424, 23425, 23448], [23425, 23449, 23448], [23425, 23426, 23450], [23425, 23450, 23449], [23426, 23427, 23450], [23427, 23451, 23450], [23427, 5031, 5160], [23427, 5160, 23451], [23428, 23429, 23453], [23428, 23453, 23452], [23429, 23430, 23453], [23430, 23454, 23453], [23430, 23431, 23455], [23430, 23455, 23454], [23431, 23432, 23455], [23432, 23456, 23455], [23432, 23433, 23457], [23432, 23457, 23456], [23433, 23434, 23457], [23434, 23458, 23457], [23434, 23435, 23459], [23434, 23459, 23458], [23435, 23436, 23459], [23436, 23460, 23459], [23436, 23437, 23461], [23436, 23461, 23460], [23437, 23438, 23461], [23438, 23462, 23461], [23438, 23439, 23463], [23438, 23463, 23462], [23439, 23440, 23463], [23440, 23464, 23463], [23440, 23441, 23465], [23440, 23465, 23464], [23441, 23442, 23465], [23442, 23466, 23465], [23442, 23443, 23467], [23442, 23467, 23466], [23443, 23444, 23467], [23444, 23468, 23467], [23444, 23445, 23469], [23444, 23469, 23468], [23445, 23446, 23469], [23446, 23470, 23469], [23446, 23447, 23471], [23446, 23471, 23470], [23447, 23448, 23471], [23448, 23472, 23471], [23448, 23449, 23473], [23448, 23473, 23472], [23449, 23450, 23473], [23450, 23474, 23473], [23450, 23451, 23475], [23450, 23475, 23474], [23451, 5160, 23475], [5160, 5289, 23475], [23452, 23453, 23476], [23453, 23477, 23476], [23453, 23454, 23478], [23453, 23478, 23477], [23454, 23455, 23478], [23455, 23479, 23478], [23455, 23456, 23480], [23455, 23480, 23479], [23456, 23457, 23480], [23457, 23481, 23480], [23457, 23458, 23482], [23457, 23482, 23481], [23458, 23459, 23482], [23459, 23483, 23482], [23459, 23460, 23484], [23459, 23484, 23483], [23460, 23461, 23484], [23461, 23485, 23484], [23461, 23462, 23486], [23461, 23486, 23485], [23462, 23463, 23486], [23463, 23487, 23486], [23463, 23464, 23488], [23463, 23488, 23487], [23464, 23465, 23488], [23465, 23489, 23488], [23465, 23466, 23490], [23465, 23490, 23489], [23466, 23467, 23490], [23467, 23491, 23490], [23467, 23468, 23492], [23467, 23492, 23491], [23468, 23469, 23492], [23469, 23493, 23492], [23469, 23470, 23494], [23469, 23494, 23493], [23470, 23471, 23494], [23471, 23495, 23494], [23471, 23472, 23496], [23471, 23496, 23495], [23472, 23473, 23496], [23473, 23497, 23496], [23473, 23474, 23498], [23473, 23498, 23497], [23474, 23475, 23498], [23475, 23499, 23498], [23475, 5289, 5418], [23475, 5418, 23499], [23476, 23477, 23501], [23476, 23501, 23500], [23477, 23478, 23501], [23478, 23502, 23501], [23478, 23479, 23503], [23478, 23503, 23502], [23479, 23480, 23503], [23480, 23504, 23503], [23480, 23481, 23505], [23480, 23505, 23504], [23481, 23482, 23505], [23482, 23506, 23505], [23482, 23483, 23507], [23482, 23507, 23506], [23483, 23484, 23507], [23484, 23508, 23507], [23484, 23485, 23509], [23484, 23509, 23508], [23485, 23486, 23509], [23486, 23510, 23509], [23486, 23487, 23511], [23486, 23511, 23510], [23487, 23488, 23511], [23488, 23512, 23511], [23488, 23489, 23513], [23488, 23513, 23512], [23489, 23490, 23513], [23490, 23514, 23513], [23490, 23491, 23515], [23490, 23515, 23514], [23491, 23492, 23515], [23492, 23516, 23515], [23492, 23493, 23517], [23492, 23517, 23516], [23493, 23494, 23517], [23494, 23518, 23517], [23494, 23495, 23519], [23494, 23519, 23518], [23495, 23496, 23519], [23496, 23520, 23519], [23496, 23497, 23521], [23496, 23521, 23520], [23497, 23498, 23521], [23498, 23522, 23521], [23498, 23499, 23523], [23498, 23523, 23522], [23499, 5418, 23523], [5418, 5547, 23523], [23500, 23501, 23524], [23501, 23525, 23524], [23501, 23502, 23526], [23501, 23526, 23525], [23502, 23503, 23526], [23503, 23527, 23526], [23503, 23504, 23528], [23503, 23528, 23527], [23504, 23505, 23528], [23505, 23529, 23528], [23505, 23506, 23530], [23505, 23530, 23529], [23506, 23507, 23530], [23507, 23531, 23530], [23507, 23508, 23532], [23507, 23532, 23531], [23508, 23509, 23532], [23509, 23533, 23532], [23509, 23510, 23534], [23509, 23534, 23533], [23510, 23511, 23534], [23511, 23535, 23534], [23511, 23512, 23536], [23511, 23536, 23535], [23512, 23513, 23536], [23513, 23537, 23536], [23513, 23514, 23538], [23513, 23538, 23537], [23514, 23515, 23538], [23515, 23539, 23538], [23515, 23516, 23540], [23515, 23540, 23539], [23516, 23517, 23540], [23517, 23541, 23540], [23517, 23518, 23542], [23517, 23542, 23541], [23518, 23519, 23542], [23519, 23543, 23542], [23519, 23520, 23544], [23519, 23544, 23543], [23520, 23521, 23544], [23521, 23545, 23544], [23521, 23522, 23546], [23521, 23546, 23545], [23522, 23523, 23546], [23523, 23547, 23546], [23523, 5547, 5676], [23523, 5676, 23547], [23524, 23525, 23549], [23524, 23549, 23548], [23525, 23526, 23549], [23526, 23550, 23549], [23526, 23527, 23551], [23526, 23551, 23550], [23527, 23528, 23551], [23528, 23552, 23551], [23528, 23529, 23553], [23528, 23553, 23552], [23529, 23530, 23553], [23530, 23554, 23553], [23530, 23531, 23555], [23530, 23555, 23554], [23531, 23532, 23555], [23532, 23556, 23555], [23532, 23533, 23557], [23532, 23557, 23556], [23533, 23534, 23557], [23534, 23558, 23557], [23534, 23535, 23559], [23534, 23559, 23558], [23535, 23536, 23559], [23536, 23560, 23559], [23536, 23537, 23561], [23536, 23561, 23560], [23537, 23538, 23561], [23538, 23562, 23561], [23538, 23539, 23563], [23538, 23563, 23562], [23539, 23540, 23563], [23540, 23564, 23563], [23540, 23541, 23565], [23540, 23565, 23564], [23541, 23542, 23565], [23542, 23566, 23565], [23542, 23543, 23567], [23542, 23567, 23566], [23543, 23544, 23567], [23544, 23568, 23567], [23544, 23545, 23569], [23544, 23569, 23568], [23545, 23546, 23569], [23546, 23570, 23569], [23546, 23547, 23571], [23546, 23571, 23570], [23547, 5676, 23571], [5676, 5805, 23571], [23548, 23549, 23572], [23549, 23573, 23572], [23549, 23550, 23574], [23549, 23574, 23573], [23550, 23551, 23574], [23551, 23575, 23574], [23551, 23552, 23576], [23551, 23576, 23575], [23552, 23553, 23576], [23553, 23577, 23576], [23553, 23554, 23578], [23553, 23578, 23577], [23554, 23555, 23578], [23555, 23579, 23578], [23555, 23556, 23580], [23555, 23580, 23579], [23556, 23557, 23580], [23557, 23581, 23580], [23557, 23558, 23582], [23557, 23582, 23581], [23558, 23559, 23582], [23559, 23583, 23582], [23559, 23560, 23584], [23559, 23584, 23583], [23560, 23561, 23584], [23561, 23585, 23584], [23561, 23562, 23586], [23561, 23586, 23585], [23562, 23563, 23586], [23563, 23587, 23586], [23563, 23564, 23588], [23563, 23588, 23587], [23564, 23565, 23588], [23565, 23589, 23588], [23565, 23566, 23590], [23565, 23590, 23589], [23566, 23567, 23590], [23567, 23591, 23590], [23567, 23568, 23592], [23567, 23592, 23591], [23568, 23569, 23592], [23569, 23593, 23592], [23569, 23570, 23594], [23569, 23594, 23593], [23570, 23571, 23594], [23571, 23595, 23594], [23571, 5805, 5934], [23571, 5934, 23595], [23572, 23573, 23597], [23572, 23597, 23596], [23573, 23574, 23597], [23574, 23598, 23597], [23574, 23575, 23599], [23574, 23599, 23598], [23575, 23576, 23599], [23576, 23600, 23599], [23576, 23577, 23601], [23576, 23601, 23600], [23577, 23578, 23601], [23578, 23602, 23601], [23578, 23579, 23603], [23578, 23603, 23602], [23579, 23580, 23603], [23580, 23604, 23603], [23580, 23581, 23605], [23580, 23605, 23604], [23581, 23582, 23605], [23582, 23606, 23605], [23582, 23583, 23607], [23582, 23607, 23606], [23583, 23584, 23607], [23584, 23608, 23607], [23584, 23585, 23609], [23584, 23609, 23608], [23585, 23586, 23609], [23586, 23610, 23609], [23586, 23587, 23611], [23586, 23611, 23610], [23587, 23588, 23611], [23588, 23612, 23611], [23588, 23589, 23613], [23588, 23613, 23612], [23589, 23590, 23613], [23590, 23614, 23613], [23590, 23591, 23615], [23590, 23615, 23614], [23591, 23592, 23615], [23592, 23616, 23615], [23592, 23593, 23617], [23592, 23617, 23616], [23593, 23594, 23617], [23594, 23618, 23617], [23594, 23595, 23619], [23594, 23619, 23618], [23595, 5934, 23619], [5934, 6063, 23619], [23596, 23597, 23620], [23597, 23621, 23620], [23597, 23598, 23622], [23597, 23622, 23621], [23598, 23599, 23622], [23599, 23623, 23622], [23599, 23600, 23624], [23599, 23624, 23623], [23600, 23601, 23624], [23601, 23625, 23624], [23601, 23602, 23626], [23601, 23626, 23625], [23602, 23603, 23626], [23603, 23627, 23626], [23603, 23604, 23628], [23603, 23628, 23627], [23604, 23605, 23628], [23605, 23629, 23628], [23605, 23606, 23630], [23605, 23630, 23629], [23606, 23607, 23630], [23607, 23631, 23630], [23607, 23608, 23632], [23607, 23632, 23631], [23608, 23609, 23632], [23609, 23633, 23632], [23609, 23610, 23634], [23609, 23634, 23633], [23610, 23611, 23634], [23611, 23635, 23634], [23611, 23612, 23636], [23611, 23636, 23635], [23612, 23613, 23636], [23613, 23637, 23636], [23613, 23614, 23638], [23613, 23638, 23637], [23614, 23615, 23638], [23615, 23639, 23638], [23615, 23616, 23640], [23615, 23640, 23639], [23616, 23617, 23640], [23617, 23641, 23640], [23617, 23618, 23642], [23617, 23642, 23641], [23618, 23619, 23642], [23619, 23643, 23642], [23619, 6063, 6190], [23619, 6190, 23643], [23620, 23621, 23645], [23620, 23645, 23644], [23621, 23622, 23645], [23622, 23646, 23645], [23622, 23623, 23647], [23622, 23647, 23646], [23623, 23624, 23647], [23624, 23648, 23647], [23624, 23625, 23649], [23624, 23649, 23648], [23625, 23626, 23649], [23626, 23650, 23649], [23626, 23627, 23651], [23626, 23651, 23650], [23627, 23628, 23651], [23628, 23652, 23651], [23628, 23629, 23653], [23628, 23653, 23652], [23629, 23630, 23653], [23630, 23654, 23653], [23630, 23631, 23655], [23630, 23655, 23654], [23631, 23632, 23655], [23632, 23656, 23655], [23632, 23633, 23657], [23632, 23657, 23656], [23633, 23634, 23657], [23634, 23658, 23657], [23634, 23635, 23659], [23634, 23659, 23658], [23635, 23636, 23659], [23636, 23660, 23659], [23636, 23637, 23661], [23636, 23661, 23660], [23637, 23638, 23661], [23638, 23662, 23661], [23638, 23639, 23663], [23638, 23663, 23662], [23639, 23640, 23663], [23640, 23664, 23663], [23640, 23641, 23665], [23640, 23665, 23664], [23641, 23642, 23665], [23642, 23666, 23665], [23642, 23643, 23667], [23642, 23667, 23666], [23643, 6190, 23667], [6190, 6315, 23667], [23644, 23645, 23668], [23645, 23669, 23668], [23645, 23646, 23670], [23645, 23670, 23669], [23646, 23647, 23670], [23647, 23671, 23670], [23647, 23648, 23672], [23647, 23672, 23671], [23648, 23649, 23672], [23649, 23673, 23672], [23649, 23650, 23674], [23649, 23674, 23673], [23650, 23651, 23674], [23651, 23675, 23674], [23651, 23652, 23676], [23651, 23676, 23675], [23652, 23653, 23676], [23653, 23677, 23676], [23653, 23654, 23678], [23653, 23678, 23677], [23654, 23655, 23678], [23655, 23679, 23678], [23655, 23656, 23680], [23655, 23680, 23679], [23656, 23657, 23680], [23657, 23681, 23680], [23657, 23658, 23682], [23657, 23682, 23681], [23658, 23659, 23682], [23659, 23683, 23682], [23659, 23660, 23684], [23659, 23684, 23683], [23660, 23661, 23684], [23661, 23685, 23684], [23661, 23662, 23686], [23661, 23686, 23685], [23662, 23663, 23686], [23663, 23687, 23686], [23663, 23664, 23688], [23663, 23688, 23687], [23664, 23665, 23688], [23665, 23689, 23688], [23665, 23666, 23690], [23665, 23690, 23689], [23666, 23667, 23690], [23667, 23691, 23690], [23667, 6315, 6438], [23667, 6438, 23691], [23668, 23669, 23693], [23668, 23693, 23692], [23669, 23670, 23693], [23670, 23694, 23693], [23670, 23671, 23695], [23670, 23695, 23694], [23671, 23672, 23695], [23672, 23696, 23695], [23672, 23673, 23697], [23672, 23697, 23696], [23673, 23674, 23697], [23674, 23698, 23697], [23674, 23675, 23699], [23674, 23699, 23698], [23675, 23676, 23699], [23676, 23700, 23699], [23676, 23677, 23701], [23676, 23701, 23700], [23677, 23678, 23701], [23678, 23702, 23701], [23678, 23679, 23703], [23678, 23703, 23702], [23679, 23680, 23703], [23680, 23704, 23703], [23680, 23681, 23705], [23680, 23705, 23704], [23681, 23682, 23705], [23682, 23706, 23705], [23682, 23683, 23707], [23682, 23707, 23706], [23683, 23684, 23707], [23684, 23708, 23707], [23684, 23685, 23709], [23684, 23709, 23708], [23685, 23686, 23709], [23686, 23710, 23709], [23686, 23687, 23711], [23686, 23711, 23710], [23687, 23688, 23711], [23688, 23712, 23711], [23688, 23689, 23713], [23688, 23713, 23712], [23689, 23690, 23713], [23690, 23714, 23713], [23690, 23691, 23715], [23690, 23715, 23714], [23691, 6438, 23715], [6438, 6558, 23715], [23692, 23693, 23716], [23693, 23717, 23716], [23693, 23694, 23718], [23693, 23718, 23717], [23694, 23695, 23718], [23695, 23719, 23718], [23695, 23696, 23720], [23695, 23720, 23719], [23696, 23697, 23720], [23697, 23721, 23720], [23697, 23698, 23722], [23697, 23722, 23721], [23698, 23699, 23722], [23699, 23723, 23722], [23699, 23700, 23724], [23699, 23724, 23723], [23700, 23701, 23724], [23701, 23725, 23724], [23701, 23702, 23726], [23701, 23726, 23725], [23702, 23703, 23726], [23703, 23727, 23726], [23703, 23704, 23728], [23703, 23728, 23727], [23704, 23705, 23728], [23705, 23729, 23728], [23705, 23706, 23730], [23705, 23730, 23729], [23706, 23707, 23730], [23707, 23731, 23730], [23707, 23708, 23732], [23707, 23732, 23731], [23708, 23709, 23732], [23709, 23733, 23732], [23709, 23710, 23734], [23709, 23734, 23733], [23710, 23711, 23734], [23711, 23735, 23734], [23711, 23712, 23736], [23711, 23736, 23735], [23712, 23713, 23736], [23713, 23737, 23736], [23713, 23714, 23738], [23713, 23738, 23737], [23714, 23715, 23738], [23715, 23739, 23738], [23715, 6558, 6676], [23715, 6676, 23739], [23716, 23717, 23741], [23716, 23741, 23740], [23717, 23718, 23741], [23718, 23742, 23741], [23718, 23719, 23743], [23718, 23743, 23742], [23719, 23720, 23743], [23720, 23744, 23743], [23720, 23721, 23745], [23720, 23745, 23744], [23721, 23722, 23745], [23722, 23746, 23745], [23722, 23723, 23747], [23722, 23747, 23746], [23723, 23724, 23747], [23724, 23748, 23747], [23724, 23725, 23749], [23724, 23749, 23748], [23725, 23726, 23749], [23726, 23750, 23749], [23726, 23727, 23751], [23726, 23751, 23750], [23727, 23728, 23751], [23728, 23752, 23751], [23728, 23729, 23753], [23728, 23753, 23752], [23729, 23730, 23753], [23730, 23754, 23753], [23730, 23731, 23755], [23730, 23755, 23754], [23731, 23732, 23755], [23732, 23756, 23755], [23732, 23733, 23757], [23732, 23757, 23756], [23733, 23734, 23757], [23734, 23758, 23757], [23734, 23735, 23759], [23734, 23759, 23758], [23735, 23736, 23759], [23736, 23760, 23759], [23736, 23737, 23761], [23736, 23761, 23760], [23737, 23738, 23761], [23738, 23762, 23761], [23738, 23739, 23763], [23738, 23763, 23762], [23739, 6676, 23763], [6676, 6794, 23763], [23740, 23741, 23764], [23741, 23765, 23764], [23741, 23742, 23766], [23741, 23766, 23765], [23742, 23743, 23766], [23743, 23767, 23766], [23743, 23744, 23768], [23743, 23768, 23767], [23744, 23745, 23768], [23745, 23769, 23768], [23745, 23746, 23770], [23745, 23770, 23769], [23746, 23747, 23770], [23747, 23771, 23770], [23747, 23748, 23772], [23747, 23772, 23771], [23748, 23749, 23772], [23749, 23773, 23772], [23749, 23750, 23774], [23749, 23774, 23773], [23750, 23751, 23774], [23751, 23775, 23774], [23751, 23752, 23776], [23751, 23776, 23775], [23752, 23753, 23776], [23753, 23777, 23776], [23753, 23754, 23778], [23753, 23778, 23777], [23754, 23755, 23778], [23755, 23779, 23778], [23755, 23756, 23780], [23755, 23780, 23779], [23756, 23757, 23780], [23757, 23781, 23780], [23757, 23758, 23782], [23757, 23782, 23781], [23758, 23759, 23782], [23759, 23783, 23782], [23759, 23760, 23784], [23759, 23784, 23783], [23760, 23761, 23784], [23761, 23785, 23784], [23761, 23762, 23786], [23761, 23786, 23785], [23762, 23763, 23786], [23763, 23787, 23786], [23763, 6794, 6912], [23763, 6912, 23787], [23764, 23765, 23789], [23764, 23789, 23788], [23765, 23766, 23789], [23766, 23790, 23789], [23766, 23767, 23791], [23766, 23791, 23790], [23767, 23768, 23791], [23768, 23792, 23791], [23768, 23769, 23793], [23768, 23793, 23792], [23769, 23770, 23793], [23770, 23794, 23793], [23770, 23771, 23795], [23770, 23795, 23794], [23771, 23772, 23795], [23772, 23796, 23795], [23772, 23773, 23797], [23772, 23797, 23796], [23773, 23774, 23797], [23774, 23798, 23797], [23774, 23775, 23799], [23774, 23799, 23798], [23775, 23776, 23799], [23776, 23800, 23799], [23776, 23777, 23801], [23776, 23801, 23800], [23777, 23778, 23801], [23778, 23802, 23801], [23778, 23779, 23803], [23778, 23803, 23802], [23779, 23780, 23803], [23780, 23804, 23803], [23780, 23781, 23805], [23780, 23805, 23804], [23781, 23782, 23805], [23782, 23806, 23805], [23782, 23783, 23807], [23782, 23807, 23806], [23783, 23784, 23807], [23784, 23808, 23807], [23784, 23785, 23809], [23784, 23809, 23808], [23785, 23786, 23809], [23786, 23810, 23809], [23786, 23787, 23811], [23786, 23811, 23810], [23787, 6912, 23811], [6912, 7030, 23811], [23788, 23789, 23812], [23789, 23813, 23812], [23789, 23790, 23814], [23789, 23814, 23813], [23790, 23791, 23814], [23791, 23815, 23814], [23791, 23792, 23816], [23791, 23816, 23815], [23792, 23793, 23816], [23793, 23817, 23816], [23793, 23794, 23818], [23793, 23818, 23817], [23794, 23795, 23818], [23795, 23819, 23818], [23795, 23796, 23820], [23795, 23820, 23819], [23796, 23797, 23820], [23797, 23821, 23820], [23797, 23798, 23822], [23797, 23822, 23821], [23798, 23799, 23822], [23799, 23823, 23822], [23799, 23800, 23824], [23799, 23824, 23823], [23800, 23801, 23824], [23801, 23825, 23824], [23801, 23802, 23826], [23801, 23826, 23825], [23802, 23803, 23826], [23803, 23827, 23826], [23803, 23804, 23828], [23803, 23828, 23827], [23804, 23805, 23828], [23805, 23829, 23828], [23805, 23806, 23830], [23805, 23830, 23829], [23806, 23807, 23830], [23807, 23831, 23830], [23807, 23808, 23832], [23807, 23832, 23831], [23808, 23809, 23832], [23809, 23833, 23832], [23809, 23810, 23834], [23809, 23834, 23833], [23810, 23811, 23834], [23811, 23835, 23834], [23811, 7030, 7148], [23811, 7148, 23835], [23812, 23813, 23837], [23812, 23837, 23836], [23813, 23814, 23837], [23814, 23838, 23837], [23814, 23815, 23839], [23814, 23839, 23838], [23815, 23816, 23839], [23816, 23840, 23839], [23816, 23817, 23841], [23816, 23841, 23840], [23817, 23818, 23841], [23818, 23842, 23841], [23818, 23819, 23843], [23818, 23843, 23842], [23819, 23820, 23843], [23820, 23844, 23843], [23820, 23821, 23845], [23820, 23845, 23844], [23821, 23822, 23845], [23822, 23846, 23845], [23822, 23823, 23847], [23822, 23847, 23846], [23823, 23824, 23847], [23824, 23848, 23847], [23824, 23825, 23849], [23824, 23849, 23848], [23825, 23826, 23849], [23826, 23850, 23849], [23826, 23827, 23851], [23826, 23851, 23850], [23827, 23828, 23851], [23828, 23852, 23851], [23828, 23829, 23853], [23828, 23853, 23852], [23829, 23830, 23853], [23830, 23854, 23853], [23830, 23831, 23855], [23830, 23855, 23854], [23831, 23832, 23855], [23832, 23856, 23855], [23832, 23833, 23857], [23832, 23857, 23856], [23833, 23834, 23857], [23834, 23858, 23857], [23834, 23835, 23859], [23834, 23859, 23858], [23835, 7148, 23859], [7148, 7266, 23859], [23836, 23837, 23860], [23837, 23861, 23860], [23837, 23838, 23862], [23837, 23862, 23861], [23838, 23839, 23862], [23839, 23863, 23862], [23839, 23840, 23864], [23839, 23864, 23863], [23840, 23841, 23864], [23841, 23865, 23864], [23841, 23842, 23866], [23841, 23866, 23865], [23842, 23843, 23866], [23843, 23867, 23866], [23843, 23844, 23868], [23843, 23868, 23867], [23844, 23845, 23868], [23845, 23869, 23868], [23845, 23846, 23870], [23845, 23870, 23869], [23846, 23847, 23870], [23847, 23871, 23870], [23847, 23848, 23872], [23847, 23872, 23871], [23848, 23849, 23872], [23849, 23873, 23872], [23849, 23850, 23874], [23849, 23874, 23873], [23850, 23851, 23874], [23851, 23875, 23874], [23851, 23852, 23876], [23851, 23876, 23875], [23852, 23853, 23876], [23853, 23877, 23876], [23853, 23854, 23878], [23853, 23878, 23877], [23854, 23855, 23878], [23855, 23879, 23878], [23855, 23856, 23880], [23855, 23880, 23879], [23856, 23857, 23880], [23857, 23881, 23880], [23857, 23858, 23882], [23857, 23882, 23881], [23858, 23859, 23882], [23859, 23883, 23882], [23859, 7266, 7384], [23859, 7384, 23883], [23860, 23861, 23885], [23860, 23885, 23884], [23861, 23862, 23885], [23862, 23886, 23885], [23862, 23863, 23887], [23862, 23887, 23886], [23863, 23864, 23887], [23864, 23888, 23887], [23864, 23865, 23889], [23864, 23889, 23888], [23865, 23866, 23889], [23866, 23890, 23889], [23866, 23867, 23891], [23866, 23891, 23890], [23867, 23868, 23891], [23868, 23892, 23891], [23868, 23869, 23893], [23868, 23893, 23892], [23869, 23870, 23893], [23870, 23894, 23893], [23870, 23871, 23895], [23870, 23895, 23894], [23871, 23872, 23895], [23872, 23896, 23895], [23872, 23873, 23897], [23872, 23897, 23896], [23873, 23874, 23897], [23874, 23898, 23897], [23874, 23875, 23899], [23874, 23899, 23898], [23875, 23876, 23899], [23876, 23900, 23899], [23876, 23877, 23901], [23876, 23901, 23900], [23877, 23878, 23901], [23878, 23902, 23901], [23878, 23879, 23903], [23878, 23903, 23902], [23879, 23880, 23903], [23880, 23904, 23903], [23880, 23881, 23905], [23880, 23905, 23904], [23881, 23882, 23905], [23882, 23906, 23905], [23882, 23883, 23907], [23882, 23907, 23906], [23883, 7384, 23907], [7384, 7502, 23907], [23884, 23885, 23908], [23885, 23909, 23908], [23885, 23886, 23910], [23885, 23910, 23909], [23886, 23887, 23910], [23887, 23911, 23910], [23887, 23888, 23912], [23887, 23912, 23911], [23888, 23889, 23912], [23889, 23913, 23912], [23889, 23890, 23914], [23889, 23914, 23913], [23890, 23891, 23914], [23891, 23915, 23914], [23891, 23892, 23916], [23891, 23916, 23915], [23892, 23893, 23916], [23893, 23917, 23916], [23893, 23894, 23918], [23893, 23918, 23917], [23894, 23895, 23918], [23895, 23919, 23918], [23895, 23896, 23920], [23895, 23920, 23919], [23896, 23897, 23920], [23897, 23921, 23920], [23897, 23898, 23922], [23897, 23922, 23921], [23898, 23899, 23922], [23899, 23923, 23922], [23899, 23900, 23924], [23899, 23924, 23923], [23900, 23901, 23924], [23901, 23925, 23924], [23901, 23902, 23926], [23901, 23926, 23925], [23902, 23903, 23926], [23903, 23927, 23926], [23903, 23904, 23928], [23903, 23928, 23927], [23904, 23905, 23928], [23905, 23929, 23928], [23905, 23906, 23930], [23905, 23930, 23929], [23906, 23907, 23930], [23907, 23931, 23930], [23907, 7502, 7620], [23907, 7620, 23931], [23908, 23909, 23933], [23908, 23933, 23932], [23909, 23910, 23933], [23910, 23934, 23933], [23910, 23911, 23935], [23910, 23935, 23934], [23911, 23912, 23935], [23912, 23936, 23935], [23912, 23913, 23937], [23912, 23937, 23936], [23913, 23914, 23937], [23914, 23938, 23937], [23914, 23915, 23939], [23914, 23939, 23938], [23915, 23916, 23939], [23916, 23940, 23939], [23916, 23917, 23941], [23916, 23941, 23940], [23917, 23918, 23941], [23918, 23942, 23941], [23918, 23919, 23943], [23918, 23943, 23942], [23919, 23920, 23943], [23920, 23944, 23943], [23920, 23921, 23945], [23920, 23945, 23944], [23921, 23922, 23945], [23922, 23946, 23945], [23922, 23923, 23947], [23922, 23947, 23946], [23923, 23924, 23947], [23924, 23948, 23947], [23924, 23925, 23949], [23924, 23949, 23948], [23925, 23926, 23949], [23926, 23950, 23949], [23926, 23927, 23951], [23926, 23951, 23950], [23927, 23928, 23951], [23928, 23952, 23951], [23928, 23929, 23953], [23928, 23953, 23952], [23929, 23930, 23953], [23930, 23954, 23953], [23930, 23931, 23955], [23930, 23955, 23954], [23931, 7620, 23955], [7620, 7738, 23955], [23932, 23933, 23956], [23933, 23957, 23956], [23933, 23934, 23958], [23933, 23958, 23957], [23934, 23935, 23958], [23935, 23959, 23958], [23935, 23936, 23960], [23935, 23960, 23959], [23936, 23937, 23960], [23937, 23961, 23960], [23937, 23938, 23962], [23937, 23962, 23961], [23938, 23939, 23962], [23939, 23963, 23962], [23939, 23940, 23964], [23939, 23964, 23963], [23940, 23941, 23964], [23941, 23965, 23964], [23941, 23942, 23966], [23941, 23966, 23965], [23942, 23943, 23966], [23943, 23967, 23966], [23943, 23944, 23968], [23943, 23968, 23967], [23944, 23945, 23968], [23945, 23969, 23968], [23945, 23946, 23970], [23945, 23970, 23969], [23946, 23947, 23970], [23947, 23971, 23970], [23947, 23948, 23972], [23947, 23972, 23971], [23948, 23949, 23972], [23949, 23973, 23972], [23949, 23950, 23974], [23949, 23974, 23973], [23950, 23951, 23974], [23951, 23975, 23974], [23951, 23952, 23976], [23951, 23976, 23975], [23952, 23953, 23976], [23953, 23977, 23976], [23953, 23954, 23978], [23953, 23978, 23977], [23954, 23955, 23978], [23955, 23979, 23978], [23955, 7738, 7856], [23955, 7856, 23979], [23956, 23957, 23981], [23956, 23981, 23980], [23957, 23958, 23981], [23958, 23982, 23981], [23958, 23959, 23983], [23958, 23983, 23982], [23959, 23960, 23983], [23960, 23984, 23983], [23960, 23961, 23985], [23960, 23985, 23984], [23961, 23962, 23985], [23962, 23986, 23985], [23962, 23963, 23987], [23962, 23987, 23986], [23963, 23964, 23987], [23964, 23988, 23987], [23964, 23965, 23989], [23964, 23989, 23988], [23965, 23966, 23989], [23966, 23990, 23989], [23966, 23967, 23991], [23966, 23991, 23990], [23967, 23968, 23991], [23968, 23992, 23991], [23968, 23969, 23993], [23968, 23993, 23992], [23969, 23970, 23993], [23970, 23994, 23993], [23970, 23971, 23995], [23970, 23995, 23994], [23971, 23972, 23995], [23972, 23996, 23995], [23972, 23973, 23997], [23972, 23997, 23996], [23973, 23974, 23997], [23974, 23998, 23997], [23974, 23975, 23999], [23974, 23999, 23998], [23975, 23976, 23999], [23976, 24000, 23999], [23976, 23977, 24001], [23976, 24001, 24000], [23977, 23978, 24001], [23978, 24002, 24001], [23978, 23979, 24003], [23978, 24003, 24002], [23979, 7856, 24003], [7856, 7974, 24003], [23980, 23981, 24004], [23981, 24005, 24004], [23981, 23982, 24006], [23981, 24006, 24005], [23982, 23983, 24006], [23983, 24007, 24006], [23983, 23984, 24008], [23983, 24008, 24007], [23984, 23985, 24008], [23985, 24009, 24008], [23985, 23986, 24010], [23985, 24010, 24009], [23986, 23987, 24010], [23987, 24011, 24010], [23987, 23988, 24012], [23987, 24012, 24011], [23988, 23989, 24012], [23989, 24013, 24012], [23989, 23990, 24014], [23989, 24014, 24013], [23990, 23991, 24014], [23991, 24015, 24014], [23991, 23992, 24016], [23991, 24016, 24015], [23992, 23993, 24016], [23993, 24017, 24016], [23993, 23994, 24018], [23993, 24018, 24017], [23994, 23995, 24018], [23995, 24019, 24018], [23995, 23996, 24020], [23995, 24020, 24019], [23996, 23997, 24020], [23997, 24021, 24020], [23997, 23998, 24022], [23997, 24022, 24021], [23998, 23999, 24022], [23999, 24023, 24022], [23999, 24000, 24024], [23999, 24024, 24023], [24000, 24001, 24024], [24001, 24025, 24024], [24001, 24002, 24026], [24001, 24026, 24025], [24002, 24003, 24026], [24003, 24027, 24026], [24003, 7974, 8092], [24003, 8092, 24027], [24004, 24005, 24029], [24004, 24029, 24028], [24005, 24006, 24029], [24006, 24030, 24029], [24006, 24007, 24031], [24006, 24031, 24030], [24007, 24008, 24031], [24008, 24032, 24031], [24008, 24009, 24033], [24008, 24033, 24032], [24009, 24010, 24033], [24010, 24034, 24033], [24010, 24011, 24035], [24010, 24035, 24034], [24011, 24012, 24035], [24012, 24036, 24035], [24012, 24013, 24037], [24012, 24037, 24036], [24013, 24014, 24037], [24014, 24038, 24037], [24014, 24015, 24039], [24014, 24039, 24038], [24015, 24016, 24039], [24016, 24040, 24039], [24016, 24017, 24041], [24016, 24041, 24040], [24017, 24018, 24041], [24018, 24042, 24041], [24018, 24019, 24043], [24018, 24043, 24042], [24019, 24020, 24043], [24020, 24044, 24043], [24020, 24021, 24045], [24020, 24045, 24044], [24021, 24022, 24045], [24022, 24046, 24045], [24022, 24023, 24047], [24022, 24047, 24046], [24023, 24024, 24047], [24024, 24048, 24047], [24024, 24025, 24049], [24024, 24049, 24048], [24025, 24026, 24049], [24026, 24050, 24049], [24026, 24027, 24051], [24026, 24051, 24050], [24027, 8092, 24051], [8092, 8210, 24051], [24028, 24029, 24052], [24029, 24053, 24052], [24029, 24030, 24054], [24029, 24054, 24053], [24030, 24031, 24054], [24031, 24055, 24054], [24031, 24032, 24056], [24031, 24056, 24055], [24032, 24033, 24056], [24033, 24057, 24056], [24033, 24034, 24058], [24033, 24058, 24057], [24034, 24035, 24058], [24035, 24059, 24058], [24035, 24036, 24060], [24035, 24060, 24059], [24036, 24037, 24060], [24037, 24061, 24060], [24037, 24038, 24062], [24037, 24062, 24061], [24038, 24039, 24062], [24039, 24063, 24062], [24039, 24040, 24064], [24039, 24064, 24063], [24040, 24041, 24064], [24041, 24065, 24064], [24041, 24042, 24066], [24041, 24066, 24065], [24042, 24043, 24066], [24043, 24067, 24066], [24043, 24044, 24068], [24043, 24068, 24067], [24044, 24045, 24068], [24045, 24069, 24068], [24045, 24046, 24070], [24045, 24070, 24069], [24046, 24047, 24070], [24047, 24071, 24070], [24047, 24048, 24072], [24047, 24072, 24071], [24048, 24049, 24072], [24049, 24073, 24072], [24049, 24050, 24074], [24049, 24074, 24073], [24050, 24051, 24074], [24051, 24075, 24074], [24051, 8210, 8328], [24051, 8328, 24075], [24052, 24053, 24077], [24052, 24077, 24076], [24053, 24054, 24077], [24054, 24078, 24077], [24054, 24055, 24079], [24054, 24079, 24078], [24055, 24056, 24079], [24056, 24080, 24079], [24056, 24057, 24081], [24056, 24081, 24080], [24057, 24058, 24081], [24058, 24082, 24081], [24058, 24059, 24083], [24058, 24083, 24082], [24059, 24060, 24083], [24060, 24084, 24083], [24060, 24061, 24085], [24060, 24085, 24084], [24061, 24062, 24085], [24062, 24086, 24085], [24062, 24063, 24087], [24062, 24087, 24086], [24063, 24064, 24087], [24064, 24088, 24087], [24064, 24065, 24089], [24064, 24089, 24088], [24065, 24066, 24089], [24066, 24090, 24089], [24066, 24067, 24091], [24066, 24091, 24090], [24067, 24068, 24091], [24068, 24092, 24091], [24068, 24069, 24093], [24068, 24093, 24092], [24069, 24070, 24093], [24070, 24094, 24093], [24070, 24071, 24095], [24070, 24095, 24094], [24071, 24072, 24095], [24072, 24096, 24095], [24072, 24073, 24097], [24072, 24097, 24096], [24073, 24074, 24097], [24074, 24098, 24097], [24074, 24075, 24099], [24074, 24099, 24098], [24075, 8328, 24099], [8328, 8446, 24099], [24076, 24077, 24100], [24077, 24101, 24100], [24077, 24078, 24102], [24077, 24102, 24101], [24078, 24079, 24102], [24079, 24103, 24102], [24079, 24080, 24104], [24079, 24104, 24103], [24080, 24081, 24104], [24081, 24105, 24104], [24081, 24082, 24106], [24081, 24106, 24105], [24082, 24083, 24106], [24083, 24107, 24106], [24083, 24084, 24108], [24083, 24108, 24107], [24084, 24085, 24108], [24085, 24109, 24108], [24085, 24086, 24110], [24085, 24110, 24109], [24086, 24087, 24110], [24087, 24111, 24110], [24087, 24088, 24112], [24087, 24112, 24111], [24088, 24089, 24112], [24089, 24113, 24112], [24089, 24090, 24114], [24089, 24114, 24113], [24090, 24091, 24114], [24091, 24115, 24114], [24091, 24092, 24116], [24091, 24116, 24115], [24092, 24093, 24116], [24093, 24117, 24116], [24093, 24094, 24118], [24093, 24118, 24117], [24094, 24095, 24118], [24095, 24119, 24118], [24095, 24096, 24120], [24095, 24120, 24119], [24096, 24097, 24120], [24097, 24121, 24120], [24097, 24098, 24122], [24097, 24122, 24121], [24098, 24099, 24122], [24099, 24123, 24122], [24099, 8446, 8564], [24099, 8564, 24123], [24100, 24101, 24125], [24100, 24125, 24124], [24101, 24102, 24125], [24102, 24126, 24125], [24102, 24103, 24127], [24102, 24127, 24126], [24103, 24104, 24127], [24104, 24128, 24127], [24104, 24105, 24129], [24104, 24129, 24128], [24105, 24106, 24129], [24106, 24130, 24129], [24106, 24107, 24131], [24106, 24131, 24130], [24107, 24108, 24131], [24108, 24132, 24131], [24108, 24109, 24133], [24108, 24133, 24132], [24109, 24110, 24133], [24110, 24134, 24133], [24110, 24111, 24135], [24110, 24135, 24134], [24111, 24112, 24135], [24112, 24136, 24135], [24112, 24113, 24137], [24112, 24137, 24136], [24113, 24114, 24137], [24114, 24138, 24137], [24114, 24115, 24139], [24114, 24139, 24138], [24115, 24116, 24139], [24116, 24140, 24139], [24116, 24117, 24141], [24116, 24141, 24140], [24117, 24118, 24141], [24118, 24142, 24141], [24118, 24119, 24143], [24118, 24143, 24142], [24119, 24120, 24143], [24120, 24144, 24143], [24120, 24121, 24145], [24120, 24145, 24144], [24121, 24122, 24145], [24122, 24146, 24145], [24122, 24123, 24147], [24122, 24147, 24146], [24123, 8564, 24147], [8564, 8682, 24147], [24124, 24125, 24148], [24125, 24149, 24148], [24125, 24126, 24150], [24125, 24150, 24149], [24126, 24127, 24150], [24127, 24151, 24150], [24127, 24128, 24152], [24127, 24152, 24151], [24128, 24129, 24152], [24129, 24153, 24152], [24129, 24130, 24154], [24129, 24154, 24153], [24130, 24131, 24154], [24131, 24155, 24154], [24131, 24132, 24156], [24131, 24156, 24155], [24132, 24133, 24156], [24133, 24157, 24156], [24133, 24134, 24158], [24133, 24158, 24157], [24134, 24135, 24158], [24135, 24159, 24158], [24135, 24136, 24160], [24135, 24160, 24159], [24136, 24137, 24160], [24137, 24161, 24160], [24137, 24138, 24162], [24137, 24162, 24161], [24138, 24139, 24162], [24139, 24163, 24162], [24139, 24140, 24164], [24139, 24164, 24163], [24140, 24141, 24164], [24141, 24165, 24164], [24141, 24142, 24166], [24141, 24166, 24165], [24142, 24143, 24166], [24143, 24167, 24166], [24143, 24144, 24168], [24143, 24168, 24167], [24144, 24145, 24168], [24145, 24169, 24168], [24145, 24146, 24170], [24145, 24170, 24169], [24146, 24147, 24170], [24147, 24171, 24170], [24147, 8682, 8800], [24147, 8800, 24171], [24148, 24149, 24173], [24148, 24173, 24172], [24149, 24150, 24173], [24150, 24174, 24173], [24150, 24151, 24175], [24150, 24175, 24174], [24151, 24152, 24175], [24152, 24176, 24175], [24152, 24153, 24177], [24152, 24177, 24176], [24153, 24154, 24177], [24154, 24178, 24177], [24154, 24155, 24179], [24154, 24179, 24178], [24155, 24156, 24179], [24156, 24180, 24179], [24156, 24157, 24181], [24156, 24181, 24180], [24157, 24158, 24181], [24158, 24182, 24181], [24158, 24159, 24183], [24158, 24183, 24182], [24159, 24160, 24183], [24160, 24184, 24183], [24160, 24161, 24185], [24160, 24185, 24184], [24161, 24162, 24185], [24162, 24186, 24185], [24162, 24163, 24187], [24162, 24187, 24186], [24163, 24164, 24187], [24164, 24188, 24187], [24164, 24165, 24189], [24164, 24189, 24188], [24165, 24166, 24189], [24166, 24190, 24189], [24166, 24167, 24191], [24166, 24191, 24190], [24167, 24168, 24191], [24168, 24192, 24191], [24168, 24169, 24193], [24168, 24193, 24192], [24169, 24170, 24193], [24170, 24194, 24193], [24170, 24171, 24195], [24170, 24195, 24194], [24171, 8800, 24195], [8800, 8918, 24195], [24172, 24173, 24196], [24173, 24197, 24196], [24173, 24174, 24198], [24173, 24198, 24197], [24174, 24175, 24198], [24175, 24199, 24198], [24175, 24176, 24200], [24175, 24200, 24199], [24176, 24177, 24200], [24177, 24201, 24200], [24177, 24178, 24202], [24177, 24202, 24201], [24178, 24179, 24202], [24179, 24203, 24202], [24179, 24180, 24204], [24179, 24204, 24203], [24180, 24181, 24204], [24181, 24205, 24204], [24181, 24182, 24206], [24181, 24206, 24205], [24182, 24183, 24206], [24183, 24207, 24206], [24183, 24184, 24208], [24183, 24208, 24207], [24184, 24185, 24208], [24185, 24209, 24208], [24185, 24186, 24210], [24185, 24210, 24209], [24186, 24187, 24210], [24187, 24211, 24210], [24187, 24188, 24212], [24187, 24212, 24211], [24188, 24189, 24212], [24189, 24213, 24212], [24189, 24190, 24214], [24189, 24214, 24213], [24190, 24191, 24214], [24191, 24215, 24214], [24191, 24192, 24216], [24191, 24216, 24215], [24192, 24193, 24216], [24193, 24217, 24216], [24193, 24194, 24218], [24193, 24218, 24217], [24194, 24195, 24218], [24195, 24219, 24218], [24195, 8918, 9036], [24195, 9036, 24219], [24196, 24197, 24221], [24196, 24221, 24220], [24197, 24198, 24221], [24198, 24222, 24221], [24198, 24199, 24223], [24198, 24223, 24222], [24199, 24200, 24223], [24200, 24224, 24223], [24200, 24201, 24225], [24200, 24225, 24224], [24201, 24202, 24225], [24202, 24226, 24225], [24202, 24203, 24227], [24202, 24227, 24226], [24203, 24204, 24227], [24204, 24228, 24227], [24204, 24205, 24229], [24204, 24229, 24228], [24205, 24206, 24229], [24206, 24230, 24229], [24206, 24207, 24231], [24206, 24231, 24230], [24207, 24208, 24231], [24208, 24232, 24231], [24208, 24209, 24233], [24208, 24233, 24232], [24209, 24210, 24233], [24210, 24234, 24233], [24210, 24211, 24235], [24210, 24235, 24234], [24211, 24212, 24235], [24212, 24236, 24235], [24212, 24213, 24237], [24212, 24237, 24236], [24213, 24214, 24237], [24214, 24238, 24237], [24214, 24215, 24239], [24214, 24239, 24238], [24215, 24216, 24239], [24216, 24240, 24239], [24216, 24217, 24241], [24216, 24241, 24240], [24217, 24218, 24241], [24218, 24242, 24241], [24218, 24219, 24243], [24218, 24243, 24242], [24219, 9036, 24243], [9036, 9154, 24243], [24220, 24221, 24244], [24221, 24245, 24244], [24221, 24222, 24246], [24221, 24246, 24245], [24222, 24223, 24246], [24223, 24247, 24246], [24223, 24224, 24248], [24223, 24248, 24247], [24224, 24225, 24248], [24225, 24249, 24248], [24225, 24226, 24250], [24225, 24250, 24249], [24226, 24227, 24250], [24227, 24251, 24250], [24227, 24228, 24252], [24227, 24252, 24251], [24228, 24229, 24252], [24229, 24253, 24252], [24229, 24230, 24254], [24229, 24254, 24253], [24230, 24231, 24254], [24231, 24255, 24254], [24231, 24232, 24256], [24231, 24256, 24255], [24232, 24233, 24256], [24233, 24257, 24256], [24233, 24234, 24258], [24233, 24258, 24257], [24234, 24235, 24258], [24235, 24259, 24258], [24235, 24236, 24260], [24235, 24260, 24259], [24236, 24237, 24260], [24237, 24261, 24260], [24237, 24238, 24262], [24237, 24262, 24261], [24238, 24239, 24262], [24239, 24263, 24262], [24239, 24240, 24264], [24239, 24264, 24263], [24240, 24241, 24264], [24241, 24265, 24264], [24241, 24242, 24266], [24241, 24266, 24265], [24242, 24243, 24266], [24243, 24267, 24266], [24243, 9154, 9272], [24243, 9272, 24267], [24244, 24245, 24269], [24244, 24269, 24268], [24245, 24246, 24269], [24246, 24270, 24269], [24246, 24247, 24271], [24246, 24271, 24270], [24247, 24248, 24271], [24248, 24272, 24271], [24248, 24249, 24273], [24248, 24273, 24272], [24249, 24250, 24273], [24250, 24274, 24273], [24250, 24251, 24275], [24250, 24275, 24274], [24251, 24252, 24275], [24252, 24276, 24275], [24252, 24253, 24277], [24252, 24277, 24276], [24253, 24254, 24277], [24254, 24278, 24277], [24254, 24255, 24279], [24254, 24279, 24278], [24255, 24256, 24279], [24256, 24280, 24279], [24256, 24257, 24281], [24256, 24281, 24280], [24257, 24258, 24281], [24258, 24282, 24281], [24258, 24259, 24283], [24258, 24283, 24282], [24259, 24260, 24283], [24260, 24284, 24283], [24260, 24261, 24285], [24260, 24285, 24284], [24261, 24262, 24285], [24262, 24286, 24285], [24262, 24263, 24287], [24262, 24287, 24286], [24263, 24264, 24287], [24264, 24288, 24287], [24264, 24265, 24289], [24264, 24289, 24288], [24265, 24266, 24289], [24266, 24290, 24289], [24266, 24267, 24291], [24266, 24291, 24290], [24267, 9272, 24291], [9272, 9390, 24291], [24268, 24269, 24292], [24269, 24293, 24292], [24269, 24270, 24294], [24269, 24294, 24293], [24270, 24271, 24294], [24271, 24295, 24294], [24271, 24272, 24296], [24271, 24296, 24295], [24272, 24273, 24296], [24273, 24297, 24296], [24273, 24274, 24298], [24273, 24298, 24297], [24274, 24275, 24298], [24275, 24299, 24298], [24275, 24276, 24300], [24275, 24300, 24299], [24276, 24277, 24300], [24277, 24301, 24300], [24277, 24278, 24302], [24277, 24302, 24301], [24278, 24279, 24302], [24279, 24303, 24302], [24279, 24280, 24304], [24279, 24304, 24303], [24280, 24281, 24304], [24281, 24305, 24304], [24281, 24282, 24306], [24281, 24306, 24305], [24282, 24283, 24306], [24283, 24307, 24306], [24283, 24284, 24308], [24283, 24308, 24307], [24284, 24285, 24308], [24285, 24309, 24308], [24285, 24286, 24310], [24285, 24310, 24309], [24286, 24287, 24310], [24287, 24311, 24310], [24287, 24288, 24312], [24287, 24312, 24311], [24288, 24289, 24312], [24289, 24313, 24312], [24289, 24290, 24314], [24289, 24314, 24313], [24290, 24291, 24314], [24291, 24315, 24314], [24291, 9390, 9508], [24291, 9508, 24315], [24292, 24293, 24317], [24292, 24317, 24316], [24293, 24294, 24317], [24294, 24318, 24317], [24294, 24295, 24319], [24294, 24319, 24318], [24295, 24296, 24319], [24296, 24320, 24319], [24296, 24297, 24321], [24296, 24321, 24320], [24297, 24298, 24321], [24298, 24322, 24321], [24298, 24299, 24323], [24298, 24323, 24322], [24299, 24300, 24323], [24300, 24324, 24323], [24300, 24301, 24325], [24300, 24325, 24324], [24301, 24302, 24325], [24302, 24326, 24325], [24302, 24303, 24327], [24302, 24327, 24326], [24303, 24304, 24327], [24304, 24328, 24327], [24304, 24305, 24329], [24304, 24329, 24328], [24305, 24306, 24329], [24306, 24330, 24329], [24306, 24307, 24331], [24306, 24331, 24330], [24307, 24308, 24331], [24308, 24332, 24331], [24308, 24309, 24333], [24308, 24333, 24332], [24309, 24310, 24333], [24310, 24334, 24333], [24310, 24311, 24335], [24310, 24335, 24334], [24311, 24312, 24335], [24312, 24336, 24335], [24312, 24313, 24337], [24312, 24337, 24336], [24313, 24314, 24337], [24314, 24338, 24337], [24314, 24315, 24339], [24314, 24339, 24338], [24315, 9508, 24339], [9508, 9626, 24339], [24316, 24317, 24340], [24317, 24341, 24340], [24317, 24318, 24342], [24317, 24342, 24341], [24318, 24319, 24342], [24319, 24343, 24342], [24319, 24320, 24344], [24319, 24344, 24343], [24320, 24321, 24344], [24321, 24345, 24344], [24321, 24322, 24346], [24321, 24346, 24345], [24322, 24323, 24346], [24323, 24347, 24346], [24323, 24324, 24348], [24323, 24348, 24347], [24324, 24325, 24348], [24325, 24349, 24348], [24325, 24326, 24350], [24325, 24350, 24349], [24326, 24327, 24350], [24327, 24351, 24350], [24327, 24328, 24352], [24327, 24352, 24351], [24328, 24329, 24352], [24329, 24353, 24352], [24329, 24330, 24354], [24329, 24354, 24353], [24330, 24331, 24354], [24331, 24355, 24354], [24331, 24332, 24356], [24331, 24356, 24355], [24332, 24333, 24356], [24333, 24357, 24356], [24333, 24334, 24358], [24333, 24358, 24357], [24334, 24335, 24358], [24335, 24359, 24358], [24335, 24336, 24360], [24335, 24360, 24359], [24336, 24337, 24360], [24337, 24361, 24360], [24337, 24338, 24362], [24337, 24362, 24361], [24338, 24339, 24362], [24339, 24363, 24362], [24339, 9626, 9744], [24339, 9744, 24363], [24340, 24341, 24365], [24340, 24365, 24364], [24341, 24342, 24365], [24342, 24366, 24365], [24342, 24343, 24367], [24342, 24367, 24366], [24343, 24344, 24367], [24344, 24368, 24367], [24344, 24345, 24369], [24344, 24369, 24368], [24345, 24346, 24369], [24346, 24370, 24369], [24346, 24347, 24371], [24346, 24371, 24370], [24347, 24348, 24371], [24348, 24372, 24371], [24348, 24349, 24373], [24348, 24373, 24372], [24349, 24350, 24373], [24350, 24374, 24373], [24350, 24351, 24375], [24350, 24375, 24374], [24351, 24352, 24375], [24352, 24376, 24375], [24352, 24353, 24377], [24352, 24377, 24376], [24353, 24354, 24377], [24354, 24378, 24377], [24354, 24355, 24379], [24354, 24379, 24378], [24355, 24356, 24379], [24356, 24380, 24379], [24356, 24357, 24381], [24356, 24381, 24380], [24357, 24358, 24381], [24358, 24382, 24381], [24358, 24359, 24383], [24358, 24383, 24382], [24359, 24360, 24383], [24360, 24384, 24383], [24360, 24361, 24385], [24360, 24385, 24384], [24361, 24362, 24385], [24362, 24386, 24385], [24362, 24363, 24387], [24362, 24387, 24386], [24363, 9744, 24387], [9744, 9864, 24387], [24364, 24365, 24388], [24365, 24389, 24388], [24365, 24366, 24390], [24365, 24390, 24389], [24366, 24367, 24390], [24367, 24391, 24390], [24367, 24368, 24392], [24367, 24392, 24391], [24368, 24369, 24392], [24369, 24393, 24392], [24369, 24370, 24394], [24369, 24394, 24393], [24370, 24371, 24394], [24371, 24395, 24394], [24371, 24372, 24396], [24371, 24396, 24395], [24372, 24373, 24396], [24373, 24397, 24396], [24373, 24374, 24398], [24373, 24398, 24397], [24374, 24375, 24398], [24375, 24399, 24398], [24375, 24376, 24400], [24375, 24400, 24399], [24376, 24377, 24400], [24377, 24401, 24400], [24377, 24378, 24402], [24377, 24402, 24401], [24378, 24379, 24402], [24379, 24403, 24402], [24379, 24380, 24404], [24379, 24404, 24403], [24380, 24381, 24404], [24381, 24405, 24404], [24381, 24382, 24406], [24381, 24406, 24405], [24382, 24383, 24406], [24383, 24407, 24406], [24383, 24384, 24408], [24383, 24408, 24407], [24384, 24385, 24408], [24385, 24409, 24408], [24385, 24386, 24410], [24385, 24410, 24409], [24386, 24387, 24410], [24387, 24411, 24410], [24387, 9864, 9985], [24387, 9985, 24411], [24388, 24389, 24413], [24388, 24413, 24412], [24389, 24390, 24413], [24390, 24414, 24413], [24390, 24391, 24415], [24390, 24415, 24414], [24391, 24392, 24415], [24392, 24416, 24415], [24392, 24393, 24417], [24392, 24417, 24416], [24393, 24394, 24417], [24394, 24418, 24417], [24394, 24395, 24419], [24394, 24419, 24418], [24395, 24396, 24419], [24396, 24420, 24419], [24396, 24397, 24421], [24396, 24421, 24420], [24397, 24398, 24421], [24398, 24422, 24421], [24398, 24399, 24423], [24398, 24423, 24422], [24399, 24400, 24423], [24400, 24424, 24423], [24400, 24401, 24425], [24400, 24425, 24424], [24401, 24402, 24425], [24402, 24426, 24425], [24402, 24403, 24427], [24402, 24427, 24426], [24403, 24404, 24427], [24404, 24428, 24427], [24404, 24405, 24429], [24404, 24429, 24428], [24405, 24406, 24429], [24406, 24430, 24429], [24406, 24407, 24431], [24406, 24431, 24430], [24407, 24408, 24431], [24408, 24432, 24431], [24408, 24409, 24433], [24408, 24433, 24432], [24409, 24410, 24433], [24410, 24434, 24433], [24410, 24411, 24435], [24410, 24435, 24434], [24411, 9985, 24435], [9985, 10111, 24435], [24412, 24413, 24436], [24413, 24437, 24436], [24413, 24414, 24438], [24413, 24438, 24437], [24414, 24415, 24438], [24415, 24439, 24438], [24415, 24416, 24440], [24415, 24440, 24439], [24416, 24417, 24440], [24417, 24441, 24440], [24417, 24418, 24442], [24417, 24442, 24441], [24418, 24419, 24442], [24419, 24443, 24442], [24419, 24420, 24444], [24419, 24444, 24443], [24420, 24421, 24444], [24421, 24445, 24444], [24421, 24422, 24446], [24421, 24446, 24445], [24422, 24423, 24446], [24423, 24447, 24446], [24423, 24424, 24448], [24423, 24448, 24447], [24424, 24425, 24448], [24425, 24449, 24448], [24425, 24426, 24450], [24425, 24450, 24449], [24426, 24427, 24450], [24427, 24451, 24450], [24427, 24428, 24452], [24427, 24452, 24451], [24428, 24429, 24452], [24429, 24453, 24452], [24429, 24430, 24454], [24429, 24454, 24453], [24430, 24431, 24454], [24431, 24455, 24454], [24431, 24432, 24456], [24431, 24456, 24455], [24432, 24433, 24456], [24433, 24457, 24456], [24433, 24434, 24458], [24433, 24458, 24457], [24434, 24435, 24458], [24435, 24459, 24458], [24435, 10111, 10237], [24435, 10237, 24459], [24436, 24437, 24461], [24436, 24461, 24460], [24437, 24438, 24461], [24438, 24462, 24461], [24438, 24439, 24463], [24438, 24463, 24462], [24439, 24440, 24463], [24440, 24464, 24463], [24440, 24441, 24465], [24440, 24465, 24464], [24441, 24442, 24465], [24442, 24466, 24465], [24442, 24443, 24467], [24442, 24467, 24466], [24443, 24444, 24467], [24444, 24468, 24467], [24444, 24445, 24469], [24444, 24469, 24468], [24445, 24446, 24469], [24446, 24470, 24469], [24446, 24447, 24471], [24446, 24471, 24470], [24447, 24448, 24471], [24448, 24472, 24471], [24448, 24449, 24473], [24448, 24473, 24472], [24449, 24450, 24473], [24450, 24474, 24473], [24450, 24451, 24475], [24450, 24475, 24474], [24451, 24452, 24475], [24452, 24476, 24475], [24452, 24453, 24477], [24452, 24477, 24476], [24453, 24454, 24477], [24454, 24478, 24477], [24454, 24455, 24479], [24454, 24479, 24478], [24455, 24456, 24479], [24456, 24480, 24479], [24456, 24457, 24481], [24456, 24481, 24480], [24457, 24458, 24481], [24458, 24482, 24481], [24458, 24459, 24483], [24458, 24483, 24482], [24459, 10237, 24483], [10237, 10366, 24483], [24460, 24461, 24484], [24461, 24485, 24484], [24461, 24462, 24486], [24461, 24486, 24485], [24462, 24463, 24486], [24463, 24487, 24486], [24463, 24464, 24488], [24463, 24488, 24487], [24464, 24465, 24488], [24465, 24489, 24488], [24465, 24466, 24490], [24465, 24490, 24489], [24466, 24467, 24490], [24467, 24491, 24490], [24467, 24468, 24492], [24467, 24492, 24491], [24468, 24469, 24492], [24469, 24493, 24492], [24469, 24470, 24494], [24469, 24494, 24493], [24470, 24471, 24494], [24471, 24495, 24494], [24471, 24472, 24496], [24471, 24496, 24495], [24472, 24473, 24496], [24473, 24497, 24496], [24473, 24474, 24498], [24473, 24498, 24497], [24474, 24475, 24498], [24475, 24499, 24498], [24475, 24476, 24500], [24475, 24500, 24499], [24476, 24477, 24500], [24477, 24501, 24500], [24477, 24478, 24502], [24477, 24502, 24501], [24478, 24479, 24502], [24479, 24503, 24502], [24479, 24480, 24504], [24479, 24504, 24503], [24480, 24481, 24504], [24481, 24505, 24504], [24481, 24482, 24506], [24481, 24506, 24505], [24482, 24483, 24506], [24483, 24507, 24506], [24483, 10366, 10495], [24483, 10495, 24507], [24484, 24485, 24509], [24484, 24509, 24508], [24485, 24486, 24509], [24486, 24510, 24509], [24486, 24487, 24511], [24486, 24511, 24510], [24487, 24488, 24511], [24488, 24512, 24511], [24488, 24489, 24513], [24488, 24513, 24512], [24489, 24490, 24513], [24490, 24514, 24513], [24490, 24491, 24515], [24490, 24515, 24514], [24491, 24492, 24515], [24492, 24516, 24515], [24492, 24493, 24517], [24492, 24517, 24516], [24493, 24494, 24517], [24494, 24518, 24517], [24494, 24495, 24519], [24494, 24519, 24518], [24495, 24496, 24519], [24496, 24520, 24519], [24496, 24497, 24521], [24496, 24521, 24520], [24497, 24498, 24521], [24498, 24522, 24521], [24498, 24499, 24523], [24498, 24523, 24522], [24499, 24500, 24523], [24500, 24524, 24523], [24500, 24501, 24525], [24500, 24525, 24524], [24501, 24502, 24525], [24502, 24526, 24525], [24502, 24503, 24527], [24502, 24527, 24526], [24503, 24504, 24527], [24504, 24528, 24527], [24504, 24505, 24529], [24504, 24529, 24528], [24505, 24506, 24529], [24506, 24530, 24529], [24506, 24507, 24531], [24506, 24531, 24530], [24507, 10495, 24531], [10495, 10624, 24531], [24508, 24509, 24532], [24509, 24533, 24532], [24509, 24510, 24534], [24509, 24534, 24533], [24510, 24511, 24534], [24511, 24535, 24534], [24511, 24512, 24536], [24511, 24536, 24535], [24512, 24513, 24536], [24513, 24537, 24536], [24513, 24514, 24538], [24513, 24538, 24537], [24514, 24515, 24538], [24515, 24539, 24538], [24515, 24516, 24540], [24515, 24540, 24539], [24516, 24517, 24540], [24517, 24541, 24540], [24517, 24518, 24542], [24517, 24542, 24541], [24518, 24519, 24542], [24519, 24543, 24542], [24519, 24520, 24544], [24519, 24544, 24543], [24520, 24521, 24544], [24521, 24545, 24544], [24521, 24522, 24546], [24521, 24546, 24545], [24522, 24523, 24546], [24523, 24547, 24546], [24523, 24524, 24548], [24523, 24548, 24547], [24524, 24525, 24548], [24525, 24549, 24548], [24525, 24526, 24550], [24525, 24550, 24549], [24526, 24527, 24550], [24527, 24551, 24550], [24527, 24528, 24552], [24527, 24552, 24551], [24528, 24529, 24552], [24529, 24553, 24552], [24529, 24530, 24554], [24529, 24554, 24553], [24530, 24531, 24554], [24531, 24555, 24554], [24531, 10624, 10753], [24531, 10753, 24555], [24532, 24533, 24557], [24532, 24557, 24556], [24533, 24534, 24557], [24534, 24558, 24557], [24534, 24535, 24559], [24534, 24559, 24558], [24535, 24536, 24559], [24536, 24560, 24559], [24536, 24537, 24561], [24536, 24561, 24560], [24537, 24538, 24561], [24538, 24562, 24561], [24538, 24539, 24563], [24538, 24563, 24562], [24539, 24540, 24563], [24540, 24564, 24563], [24540, 24541, 24565], [24540, 24565, 24564], [24541, 24542, 24565], [24542, 24566, 24565], [24542, 24543, 24567], [24542, 24567, 24566], [24543, 24544, 24567], [24544, 24568, 24567], [24544, 24545, 24569], [24544, 24569, 24568], [24545, 24546, 24569], [24546, 24570, 24569], [24546, 24547, 24571], [24546, 24571, 24570], [24547, 24548, 24571], [24548, 24572, 24571], [24548, 24549, 24573], [24548, 24573, 24572], [24549, 24550, 24573], [24550, 24574, 24573], [24550, 24551, 24575], [24550, 24575, 24574], [24551, 24552, 24575], [24552, 24576, 24575], [24552, 24553, 24577], [24552, 24577, 24576], [24553, 24554, 24577], [24554, 24578, 24577], [24554, 24555, 24579], [24554, 24579, 24578], [24555, 10753, 24579], [10753, 10882, 24579], [24556, 24557, 24580], [24557, 24581, 24580], [24557, 24558, 24582], [24557, 24582, 24581], [24558, 24559, 24582], [24559, 24583, 24582], [24559, 24560, 24584], [24559, 24584, 24583], [24560, 24561, 24584], [24561, 24585, 24584], [24561, 24562, 24586], [24561, 24586, 24585], [24562, 24563, 24586], [24563, 24587, 24586], [24563, 24564, 24588], [24563, 24588, 24587], [24564, 24565, 24588], [24565, 24589, 24588], [24565, 24566, 24590], [24565, 24590, 24589], [24566, 24567, 24590], [24567, 24591, 24590], [24567, 24568, 24592], [24567, 24592, 24591], [24568, 24569, 24592], [24569, 24593, 24592], [24569, 24570, 24594], [24569, 24594, 24593], [24570, 24571, 24594], [24571, 24595, 24594], [24571, 24572, 24596], [24571, 24596, 24595], [24572, 24573, 24596], [24573, 24597, 24596], [24573, 24574, 24598], [24573, 24598, 24597], [24574, 24575, 24598], [24575, 24599, 24598], [24575, 24576, 24600], [24575, 24600, 24599], [24576, 24577, 24600], [24577, 24601, 24600], [24577, 24578, 24602], [24577, 24602, 24601], [24578, 24579, 24602], [24579, 24603, 24602], [24579, 10882, 11011], [24579, 11011, 24603], [24580, 24581, 24605], [24580, 24605, 24604], [24581, 24582, 24605], [24582, 24606, 24605], [24582, 24583, 24607], [24582, 24607, 24606], [24583, 24584, 24607], [24584, 24608, 24607], [24584, 24585, 24609], [24584, 24609, 24608], [24585, 24586, 24609], [24586, 24610, 24609], [24586, 24587, 24611], [24586, 24611, 24610], [24587, 24588, 24611], [24588, 24612, 24611], [24588, 24589, 24613], [24588, 24613, 24612], [24589, 24590, 24613], [24590, 24614, 24613], [24590, 24591, 24615], [24590, 24615, 24614], [24591, 24592, 24615], [24592, 24616, 24615], [24592, 24593, 24617], [24592, 24617, 24616], [24593, 24594, 24617], [24594, 24618, 24617], [24594, 24595, 24619], [24594, 24619, 24618], [24595, 24596, 24619], [24596, 24620, 24619], [24596, 24597, 24621], [24596, 24621, 24620], [24597, 24598, 24621], [24598, 24622, 24621], [24598, 24599, 24623], [24598, 24623, 24622], [24599, 24600, 24623], [24600, 24624, 24623], [24600, 24601, 24625], [24600, 24625, 24624], [24601, 24602, 24625], [24602, 24626, 24625], [24602, 24603, 24627], [24602, 24627, 24626], [24603, 11011, 24627], [11011, 11140, 24627], [24604, 24605, 24628], [24605, 24629, 24628], [24605, 24606, 24630], [24605, 24630, 24629], [24606, 24607, 24630], [24607, 24631, 24630], [24607, 24608, 24632], [24607, 24632, 24631], [24608, 24609, 24632], [24609, 24633, 24632], [24609, 24610, 24634], [24609, 24634, 24633], [24610, 24611, 24634], [24611, 24635, 24634], [24611, 24612, 24636], [24611, 24636, 24635], [24612, 24613, 24636], [24613, 24637, 24636], [24613, 24614, 24638], [24613, 24638, 24637], [24614, 24615, 24638], [24615, 24639, 24638], [24615, 24616, 24640], [24615, 24640, 24639], [24616, 24617, 24640], [24617, 24641, 24640], [24617, 24618, 24642], [24617, 24642, 24641], [24618, 24619, 24642], [24619, 24643, 24642], [24619, 24620, 24644], [24619, 24644, 24643], [24620, 24621, 24644], [24621, 24645, 24644], [24621, 24622, 24646], [24621, 24646, 24645], [24622, 24623, 24646], [24623, 24647, 24646], [24623, 24624, 24648], [24623, 24648, 24647], [24624, 24625, 24648], [24625, 24649, 24648], [24625, 24626, 24650], [24625, 24650, 24649], [24626, 24627, 24650], [24627, 24651, 24650], [24627, 11140, 11269], [24627, 11269, 24651], [24628, 24629, 24653], [24628, 24653, 24652], [24629, 24630, 24653], [24630, 24654, 24653], [24630, 24631, 24655], [24630, 24655, 24654], [24631, 24632, 24655], [24632, 24656, 24655], [24632, 24633, 24657], [24632, 24657, 24656], [24633, 24634, 24657], [24634, 24658, 24657], [24634, 24635, 24659], [24634, 24659, 24658], [24635, 24636, 24659], [24636, 24660, 24659], [24636, 24637, 24661], [24636, 24661, 24660], [24637, 24638, 24661], [24638, 24662, 24661], [24638, 24639, 24663], [24638, 24663, 24662], [24639, 24640, 24663], [24640, 24664, 24663], [24640, 24641, 24665], [24640, 24665, 24664], [24641, 24642, 24665], [24642, 24666, 24665], [24642, 24643, 24667], [24642, 24667, 24666], [24643, 24644, 24667], [24644, 24668, 24667], [24644, 24645, 24669], [24644, 24669, 24668], [24645, 24646, 24669], [24646, 24670, 24669], [24646, 24647, 24671], [24646, 24671, 24670], [24647, 24648, 24671], [24648, 24672, 24671], [24648, 24649, 24673], [24648, 24673, 24672], [24649, 24650, 24673], [24650, 24674, 24673], [24650, 24651, 24675], [24650, 24675, 24674], [24651, 11269, 24675], [11269, 11398, 24675], [24652, 24653, 24676], [24653, 24677, 24676], [24653, 24654, 24678], [24653, 24678, 24677], [24654, 24655, 24678], [24655, 24679, 24678], [24655, 24656, 24680], [24655, 24680, 24679], [24656, 24657, 24680], [24657, 24681, 24680], [24657, 24658, 24682], [24657, 24682, 24681], [24658, 24659, 24682], [24659, 24683, 24682], [24659, 24660, 24684], [24659, 24684, 24683], [24660, 24661, 24684], [24661, 24685, 24684], [24661, 24662, 24686], [24661, 24686, 24685], [24662, 24663, 24686], [24663, 24687, 24686], [24663, 24664, 24688], [24663, 24688, 24687], [24664, 24665, 24688], [24665, 24689, 24688], [24665, 24666, 24690], [24665, 24690, 24689], [24666, 24667, 24690], [24667, 24691, 24690], [24667, 24668, 24692], [24667, 24692, 24691], [24668, 24669, 24692], [24669, 24693, 24692], [24669, 24670, 24694], [24669, 24694, 24693], [24670, 24671, 24694], [24671, 24695, 24694], [24671, 24672, 24696], [24671, 24696, 24695], [24672, 24673, 24696], [24673, 24697, 24696], [24673, 24674, 24698], [24673, 24698, 24697], [24674, 24675, 24698], [24675, 24699, 24698], [24675, 11398, 11527], [24675, 11527, 24699], [24676, 24677, 24701], [24676, 24701, 24700], [24677, 24678, 24701], [24678, 24702, 24701], [24678, 24679, 24703], [24678, 24703, 24702], [24679, 24680, 24703], [24680, 24704, 24703], [24680, 24681, 24705], [24680, 24705, 24704], [24681, 24682, 24705], [24682, 24706, 24705], [24682, 24683, 24707], [24682, 24707, 24706], [24683, 24684, 24707], [24684, 24708, 24707], [24684, 24685, 24709], [24684, 24709, 24708], [24685, 24686, 24709], [24686, 24710, 24709], [24686, 24687, 24711], [24686, 24711, 24710], [24687, 24688, 24711], [24688, 24712, 24711], [24688, 24689, 24713], [24688, 24713, 24712], [24689, 24690, 24713], [24690, 24714, 24713], [24690, 24691, 24715], [24690, 24715, 24714], [24691, 24692, 24715], [24692, 24716, 24715], [24692, 24693, 24717], [24692, 24717, 24716], [24693, 24694, 24717], [24694, 24718, 24717], [24694, 24695, 24719], [24694, 24719, 24718], [24695, 24696, 24719], [24696, 24720, 24719], [24696, 24697, 24721], [24696, 24721, 24720], [24697, 24698, 24721], [24698, 24722, 24721], [24698, 24699, 24723], [24698, 24723, 24722], [24699, 11527, 24723], [11527, 11656, 24723], [24700, 24701, 24724], [24701, 24725, 24724], [24701, 24702, 24726], [24701, 24726, 24725], [24702, 24703, 24726], [24703, 24727, 24726], [24703, 24704, 24728], [24703, 24728, 24727], [24704, 24705, 24728], [24705, 24729, 24728], [24705, 24706, 24730], [24705, 24730, 24729], [24706, 24707, 24730], [24707, 24731, 24730], [24707, 24708, 24732], [24707, 24732, 24731], [24708, 24709, 24732], [24709, 24733, 24732], [24709, 24710, 24734], [24709, 24734, 24733], [24710, 24711, 24734], [24711, 24735, 24734], [24711, 24712, 24736], [24711, 24736, 24735], [24712, 24713, 24736], [24713, 24737, 24736], [24713, 24714, 24738], [24713, 24738, 24737], [24714, 24715, 24738], [24715, 24739, 24738], [24715, 24716, 24740], [24715, 24740, 24739], [24716, 24717, 24740], [24717, 24741, 24740], [24717, 24718, 24742], [24717, 24742, 24741], [24718, 24719, 24742], [24719, 24743, 24742], [24719, 24720, 24744], [24719, 24744, 24743], [24720, 24721, 24744], [24721, 24745, 24744], [24721, 24722, 24746], [24721, 24746, 24745], [24722, 24723, 24746], [24723, 24747, 24746], [24723, 11656, 11785], [24723, 11785, 24747], [24724, 24725, 24749], [24724, 24749, 24748], [24725, 24726, 24749], [24726, 24750, 24749], [24726, 24727, 24751], [24726, 24751, 24750], [24727, 24728, 24751], [24728, 24752, 24751], [24728, 24729, 24753], [24728, 24753, 24752], [24729, 24730, 24753], [24730, 24754, 24753], [24730, 24731, 24755], [24730, 24755, 24754], [24731, 24732, 24755], [24732, 24756, 24755], [24732, 24733, 24757], [24732, 24757, 24756], [24733, 24734, 24757], [24734, 24758, 24757], [24734, 24735, 24759], [24734, 24759, 24758], [24735, 24736, 24759], [24736, 24760, 24759], [24736, 24737, 24761], [24736, 24761, 24760], [24737, 24738, 24761], [24738, 24762, 24761], [24738, 24739, 24763], [24738, 24763, 24762], [24739, 24740, 24763], [24740, 24764, 24763], [24740, 24741, 24765], [24740, 24765, 24764], [24741, 24742, 24765], [24742, 24766, 24765], [24742, 24743, 24767], [24742, 24767, 24766], [24743, 24744, 24767], [24744, 24768, 24767], [24744, 24745, 24769], [24744, 24769, 24768], [24745, 24746, 24769], [24746, 24770, 24769], [24746, 24747, 24771], [24746, 24771, 24770], [24747, 11785, 24771], [11785, 11914, 24771], [24748, 24749, 24772], [24749, 24773, 24772], [24749, 24750, 24774], [24749, 24774, 24773], [24750, 24751, 24774], [24751, 24775, 24774], [24751, 24752, 24776], [24751, 24776, 24775], [24752, 24753, 24776], [24753, 24777, 24776], [24753, 24754, 24778], [24753, 24778, 24777], [24754, 24755, 24778], [24755, 24779, 24778], [24755, 24756, 24780], [24755, 24780, 24779], [24756, 24757, 24780], [24757, 24781, 24780], [24757, 24758, 24782], [24757, 24782, 24781], [24758, 24759, 24782], [24759, 24783, 24782], [24759, 24760, 24784], [24759, 24784, 24783], [24760, 24761, 24784], [24761, 24785, 24784], [24761, 24762, 24786], [24761, 24786, 24785], [24762, 24763, 24786], [24763, 24787, 24786], [24763, 24764, 24788], [24763, 24788, 24787], [24764, 24765, 24788], [24765, 24789, 24788], [24765, 24766, 24790], [24765, 24790, 24789], [24766, 24767, 24790], [24767, 24791, 24790], [24767, 24768, 24792], [24767, 24792, 24791], [24768, 24769, 24792], [24769, 24793, 24792], [24769, 24770, 24794], [24769, 24794, 24793], [24770, 24771, 24794], [24771, 24795, 24794], [24771, 11914, 12043], [24771, 12043, 24795], [24772, 24773, 24797], [24772, 24797, 24796], [24773, 24774, 24797], [24774, 24798, 24797], [24774, 24775, 24799], [24774, 24799, 24798], [24775, 24776, 24799], [24776, 24800, 24799], [24776, 24777, 24801], [24776, 24801, 24800], [24777, 24778, 24801], [24778, 24802, 24801], [24778, 24779, 24803], [24778, 24803, 24802], [24779, 24780, 24803], [24780, 24804, 24803], [24780, 24781, 24805], [24780, 24805, 24804], [24781, 24782, 24805], [24782, 24806, 24805], [24782, 24783, 24807], [24782, 24807, 24806], [24783, 24784, 24807], [24784, 24808, 24807], [24784, 24785, 24809], [24784, 24809, 24808], [24785, 24786, 24809], [24786, 24810, 24809], [24786, 24787, 24811], [24786, 24811, 24810], [24787, 24788, 24811], [24788, 24812, 24811], [24788, 24789, 24813], [24788, 24813, 24812], [24789, 24790, 24813], [24790, 24814, 24813], [24790, 24791, 24815], [24790, 24815, 24814], [24791, 24792, 24815], [24792, 24816, 24815], [24792, 24793, 24817], [24792, 24817, 24816], [24793, 24794, 24817], [24794, 24818, 24817], [24794, 24795, 24819], [24794, 24819, 24818], [24795, 12043, 24819], [12043, 12172, 24819], [24796, 24797, 24820], [24797, 24821, 24820], [24797, 24798, 24822], [24797, 24822, 24821], [24798, 24799, 24822], [24799, 24823, 24822], [24799, 24800, 24824], [24799, 24824, 24823], [24800, 24801, 24824], [24801, 24825, 24824], [24801, 24802, 24826], [24801, 24826, 24825], [24802, 24803, 24826], [24803, 24827, 24826], [24803, 24804, 24828], [24803, 24828, 24827], [24804, 24805, 24828], [24805, 24829, 24828], [24805, 24806, 24830], [24805, 24830, 24829], [24806, 24807, 24830], [24807, 24831, 24830], [24807, 24808, 24832], [24807, 24832, 24831], [24808, 24809, 24832], [24809, 24833, 24832], [24809, 24810, 24834], [24809, 24834, 24833], [24810, 24811, 24834], [24811, 24835, 24834], [24811, 24812, 24836], [24811, 24836, 24835], [24812, 24813, 24836], [24813, 24837, 24836], [24813, 24814, 24838], [24813, 24838, 24837], [24814, 24815, 24838], [24815, 24839, 24838], [24815, 24816, 24840], [24815, 24840, 24839], [24816, 24817, 24840], [24817, 24841, 24840], [24817, 24818, 24842], [24817, 24842, 24841], [24818, 24819, 24842], [24819, 24843, 24842], [24819, 12172, 12301], [24819, 12301, 24843], [24820, 24821, 24845], [24820, 24845, 24844], [24821, 24822, 24845], [24822, 24846, 24845], [24822, 24823, 24847], [24822, 24847, 24846], [24823, 24824, 24847], [24824, 24848, 24847], [24824, 24825, 24849], [24824, 24849, 24848], [24825, 24826, 24849], [24826, 24850, 24849], [24826, 24827, 24851], [24826, 24851, 24850], [24827, 24828, 24851], [24828, 24852, 24851], [24828, 24829, 24853], [24828, 24853, 24852], [24829, 24830, 24853], [24830, 24854, 24853], [24830, 24831, 24855], [24830, 24855, 24854], [24831, 24832, 24855], [24832, 24856, 24855], [24832, 24833, 24857], [24832, 24857, 24856], [24833, 24834, 24857], [24834, 24858, 24857], [24834, 24835, 24859], [24834, 24859, 24858], [24835, 24836, 24859], [24836, 24860, 24859], [24836, 24837, 24861], [24836, 24861, 24860], [24837, 24838, 24861], [24838, 24862, 24861], [24838, 24839, 24863], [24838, 24863, 24862], [24839, 24840, 24863], [24840, 24864, 24863], [24840, 24841, 24865], [24840, 24865, 24864], [24841, 24842, 24865], [24842, 24866, 24865], [24842, 24843, 24867], [24842, 24867, 24866], [24843, 12301, 24867], [12301, 12430, 24867], [24844, 24845, 24868], [24845, 24869, 24868], [24845, 24846, 24870], [24845, 24870, 24869], [24846, 24847, 24870], [24847, 24871, 24870], [24847, 24848, 24872], [24847, 24872, 24871], [24848, 24849, 24872], [24849, 24873, 24872], [24849, 24850, 24874], [24849, 24874, 24873], [24850, 24851, 24874], [24851, 24875, 24874], [24851, 24852, 24876], [24851, 24876, 24875], [24852, 24853, 24876], [24853, 24877, 24876], [24853, 24854, 24878], [24853, 24878, 24877], [24854, 24855, 24878], [24855, 24879, 24878], [24855, 24856, 24880], [24855, 24880, 24879], [24856, 24857, 24880], [24857, 24881, 24880], [24857, 24858, 24882], [24857, 24882, 24881], [24858, 24859, 24882], [24859, 24883, 24882], [24859, 24860, 24884], [24859, 24884, 24883], [24860, 24861, 24884], [24861, 24885, 24884], [24861, 24862, 24886], [24861, 24886, 24885], [24862, 24863, 24886], [24863, 24887, 24886], [24863, 24864, 24888], [24863, 24888, 24887], [24864, 24865, 24888], [24865, 24889, 24888], [24865, 24866, 24890], [24865, 24890, 24889], [24866, 24867, 24890], [24867, 24891, 24890], [24867, 12430, 12559], [24867, 12559, 24891], [24868, 24869, 24893], [24868, 24893, 24892], [24869, 24870, 24893], [24870, 24894, 24893], [24870, 24871, 24895], [24870, 24895, 24894], [24871, 24872, 24895], [24872, 24896, 24895], [24872, 24873, 24897], [24872, 24897, 24896], [24873, 24874, 24897], [24874, 24898, 24897], [24874, 24875, 24899], [24874, 24899, 24898], [24875, 24876, 24899], [24876, 24900, 24899], [24876, 24877, 24901], [24876, 24901, 24900], [24877, 24878, 24901], [24878, 24902, 24901], [24878, 24879, 24903], [24878, 24903, 24902], [24879, 24880, 24903], [24880, 24904, 24903], [24880, 24881, 24905], [24880, 24905, 24904], [24881, 24882, 24905], [24882, 24906, 24905], [24882, 24883, 24907], [24882, 24907, 24906], [24883, 24884, 24907], [24884, 24908, 24907], [24884, 24885, 24909], [24884, 24909, 24908], [24885, 24886, 24909], [24886, 24910, 24909], [24886, 24887, 24911], [24886, 24911, 24910], [24887, 24888, 24911], [24888, 24912, 24911], [24888, 24889, 24913], [24888, 24913, 24912], [24889, 24890, 24913], [24890, 24914, 24913], [24890, 24891, 24915], [24890, 24915, 24914], [24891, 12559, 24915], [12559, 12688, 24915], [24892, 24893, 24916], [24893, 24917, 24916], [24893, 24894, 24918], [24893, 24918, 24917], [24894, 24895, 24918], [24895, 24919, 24918], [24895, 24896, 24920], [24895, 24920, 24919], [24896, 24897, 24920], [24897, 24921, 24920], [24897, 24898, 24922], [24897, 24922, 24921], [24898, 24899, 24922], [24899, 24923, 24922], [24899, 24900, 24924], [24899, 24924, 24923], [24900, 24901, 24924], [24901, 24925, 24924], [24901, 24902, 24926], [24901, 24926, 24925], [24902, 24903, 24926], [24903, 24927, 24926], [24903, 24904, 24928], [24903, 24928, 24927], [24904, 24905, 24928], [24905, 24929, 24928], [24905, 24906, 24930], [24905, 24930, 24929], [24906, 24907, 24930], [24907, 24931, 24930], [24907, 24908, 24932], [24907, 24932, 24931], [24908, 24909, 24932], [24909, 24933, 24932], [24909, 24910, 24934], [24909, 24934, 24933], [24910, 24911, 24934], [24911, 24935, 24934], [24911, 24912, 24936], [24911, 24936, 24935], [24912, 24913, 24936], [24913, 24937, 24936], [24913, 24914, 24938], [24913, 24938, 24937], [24914, 24915, 24938], [24915, 24939, 24938], [24915, 12688, 12817], [24915, 12817, 24939], [24916, 24917, 24941], [24916, 24941, 24940], [24917, 24918, 24941], [24918, 24942, 24941], [24918, 24919, 24943], [24918, 24943, 24942], [24919, 24920, 24943], [24920, 24944, 24943], [24920, 24921, 24945], [24920, 24945, 24944], [24921, 24922, 24945], [24922, 24946, 24945], [24922, 24923, 24947], [24922, 24947, 24946], [24923, 24924, 24947], [24924, 24948, 24947], [24924, 24925, 24949], [24924, 24949, 24948], [24925, 24926, 24949], [24926, 24950, 24949], [24926, 24927, 24951], [24926, 24951, 24950], [24927, 24928, 24951], [24928, 24952, 24951], [24928, 24929, 24953], [24928, 24953, 24952], [24929, 24930, 24953], [24930, 24954, 24953], [24930, 24931, 24955], [24930, 24955, 24954], [24931, 24932, 24955], [24932, 24956, 24955], [24932, 24933, 24957], [24932, 24957, 24956], [24933, 24934, 24957], [24934, 24958, 24957], [24934, 24935, 24959], [24934, 24959, 24958], [24935, 24936, 24959], [24936, 24960, 24959], [24936, 24937, 24961], [24936, 24961, 24960], [24937, 24938, 24961], [24938, 24962, 24961], [24938, 24939, 24963], [24938, 24963, 24962], [24939, 12817, 24963], [12817, 12946, 24963], [24940, 24941, 24964], [24941, 24965, 24964], [24941, 24942, 24966], [24941, 24966, 24965], [24942, 24943, 24966], [24943, 24967, 24966], [24943, 24944, 24968], [24943, 24968, 24967], [24944, 24945, 24968], [24945, 24969, 24968], [24945, 24946, 24970], [24945, 24970, 24969], [24946, 24947, 24970], [24947, 24971, 24970], [24947, 24948, 24972], [24947, 24972, 24971], [24948, 24949, 24972], [24949, 24973, 24972], [24949, 24950, 24974], [24949, 24974, 24973], [24950, 24951, 24974], [24951, 24975, 24974], [24951, 24952, 24976], [24951, 24976, 24975], [24952, 24953, 24976], [24953, 24977, 24976], [24953, 24954, 24978], [24953, 24978, 24977], [24954, 24955, 24978], [24955, 24979, 24978], [24955, 24956, 24980], [24955, 24980, 24979], [24956, 24957, 24980], [24957, 24981, 24980], [24957, 24958, 24982], [24957, 24982, 24981], [24958, 24959, 24982], [24959, 24983, 24982], [24959, 24960, 24984], [24959, 24984, 24983], [24960, 24961, 24984], [24961, 24985, 24984], [24961, 24962, 24986], [24961, 24986, 24985], [24962, 24963, 24986], [24963, 24987, 24986], [24963, 12946, 13075], [24963, 13075, 24987], [24964, 24965, 24989], [24964, 24989, 24988], [24965, 24966, 24989], [24966, 24990, 24989], [24966, 24967, 24991], [24966, 24991, 24990], [24967, 24968, 24991], [24968, 24992, 24991], [24968, 24969, 24993], [24968, 24993, 24992], [24969, 24970, 24993], [24970, 24994, 24993], [24970, 24971, 24995], [24970, 24995, 24994], [24971, 24972, 24995], [24972, 24996, 24995], [24972, 24973, 24997], [24972, 24997, 24996], [24973, 24974, 24997], [24974, 24998, 24997], [24974, 24975, 24999], [24974, 24999, 24998], [24975, 24976, 24999], [24976, 25000, 24999], [24976, 24977, 25001], [24976, 25001, 25000], [24977, 24978, 25001], [24978, 25002, 25001], [24978, 24979, 25003], [24978, 25003, 25002], [24979, 24980, 25003], [24980, 25004, 25003], [24980, 24981, 25005], [24980, 25005, 25004], [24981, 24982, 25005], [24982, 25006, 25005], [24982, 24983, 25007], [24982, 25007, 25006], [24983, 24984, 25007], [24984, 25008, 25007], [24984, 24985, 25009], [24984, 25009, 25008], [24985, 24986, 25009], [24986, 25010, 25009], [24986, 24987, 25011], [24986, 25011, 25010], [24987, 13075, 25011], [13075, 13204, 25011], [24988, 24989, 25012], [24989, 25013, 25012], [24989, 24990, 25014], [24989, 25014, 25013], [24990, 24991, 25014], [24991, 25015, 25014], [24991, 24992, 25016], [24991, 25016, 25015], [24992, 24993, 25016], [24993, 25017, 25016], [24993, 24994, 25018], [24993, 25018, 25017], [24994, 24995, 25018], [24995, 25019, 25018], [24995, 24996, 25020], [24995, 25020, 25019], [24996, 24997, 25020], [24997, 25021, 25020], [24997, 24998, 25022], [24997, 25022, 25021], [24998, 24999, 25022], [24999, 25023, 25022], [24999, 25000, 25024], [24999, 25024, 25023], [25000, 25001, 25024], [25001, 25025, 25024], [25001, 25002, 25026], [25001, 25026, 25025], [25002, 25003, 25026], [25003, 25027, 25026], [25003, 25004, 25028], [25003, 25028, 25027], [25004, 25005, 25028], [25005, 25029, 25028], [25005, 25006, 25030], [25005, 25030, 25029], [25006, 25007, 25030], [25007, 25031, 25030], [25007, 25008, 25032], [25007, 25032, 25031], [25008, 25009, 25032], [25009, 25033, 25032], [25009, 25010, 25034], [25009, 25034, 25033], [25010, 25011, 25034], [25011, 25035, 25034], [25011, 13204, 13333], [25011, 13333, 25035], [25012, 25013, 25037], [25012, 25037, 25036], [25013, 25014, 25037], [25014, 25038, 25037], [25014, 25015, 25039], [25014, 25039, 25038], [25015, 25016, 25039], [25016, 25040, 25039], [25016, 25017, 25041], [25016, 25041, 25040], [25017, 25018, 25041], [25018, 25042, 25041], [25018, 25019, 25043], [25018, 25043, 25042], [25019, 25020, 25043], [25020, 25044, 25043], [25020, 25021, 25045], [25020, 25045, 25044], [25021, 25022, 25045], [25022, 25046, 25045], [25022, 25023, 25047], [25022, 25047, 25046], [25023, 25024, 25047], [25024, 25048, 25047], [25024, 25025, 25049], [25024, 25049, 25048], [25025, 25026, 25049], [25026, 25050, 25049], [25026, 25027, 25051], [25026, 25051, 25050], [25027, 25028, 25051], [25028, 25052, 25051], [25028, 25029, 25053], [25028, 25053, 25052], [25029, 25030, 25053], [25030, 25054, 25053], [25030, 25031, 25055], [25030, 25055, 25054], [25031, 25032, 25055], [25032, 25056, 25055], [25032, 25033, 25057], [25032, 25057, 25056], [25033, 25034, 25057], [25034, 25058, 25057], [25034, 25035, 25059], [25034, 25059, 25058], [25035, 13333, 25059], [13333, 13462, 25059], [25036, 25037, 25060], [25037, 25061, 25060], [25037, 25038, 25062], [25037, 25062, 25061], [25038, 25039, 25062], [25039, 25063, 25062], [25039, 25040, 25064], [25039, 25064, 25063], [25040, 25041, 25064], [25041, 25065, 25064], [25041, 25042, 25066], [25041, 25066, 25065], [25042, 25043, 25066], [25043, 25067, 25066], [25043, 25044, 25068], [25043, 25068, 25067], [25044, 25045, 25068], [25045, 25069, 25068], [25045, 25046, 25070], [25045, 25070, 25069], [25046, 25047, 25070], [25047, 25071, 25070], [25047, 25048, 25072], [25047, 25072, 25071], [25048, 25049, 25072], [25049, 25073, 25072], [25049, 25050, 25074], [25049, 25074, 25073], [25050, 25051, 25074], [25051, 25075, 25074], [25051, 25052, 25076], [25051, 25076, 25075], [25052, 25053, 25076], [25053, 25077, 25076], [25053, 25054, 25078], [25053, 25078, 25077], [25054, 25055, 25078], [25055, 25079, 25078], [25055, 25056, 25080], [25055, 25080, 25079], [25056, 25057, 25080], [25057, 25081, 25080], [25057, 25058, 25082], [25057, 25082, 25081], [25058, 25059, 25082], [25059, 25083, 25082], [25059, 13462, 13591], [25059, 13591, 25083], [25060, 25061, 25085], [25060, 25085, 25084], [25061, 25062, 25085], [25062, 25086, 25085], [25062, 25063, 25087], [25062, 25087, 25086], [25063, 25064, 25087], [25064, 25088, 25087], [25064, 25065, 25089], [25064, 25089, 25088], [25065, 25066, 25089], [25066, 25090, 25089], [25066, 25067, 25091], [25066, 25091, 25090], [25067, 25068, 25091], [25068, 25092, 25091], [25068, 25069, 25093], [25068, 25093, 25092], [25069, 25070, 25093], [25070, 25094, 25093], [25070, 25071, 25095], [25070, 25095, 25094], [25071, 25072, 25095], [25072, 25096, 25095], [25072, 25073, 25097], [25072, 25097, 25096], [25073, 25074, 25097], [25074, 25098, 25097], [25074, 25075, 25099], [25074, 25099, 25098], [25075, 25076, 25099], [25076, 25100, 25099], [25076, 25077, 25101], [25076, 25101, 25100], [25077, 25078, 25101], [25078, 25102, 25101], [25078, 25079, 25103], [25078, 25103, 25102], [25079, 25080, 25103], [25080, 25104, 25103], [25080, 25081, 25105], [25080, 25105, 25104], [25081, 25082, 25105], [25082, 25106, 25105], [25082, 25083, 25107], [25082, 25107, 25106], [25083, 13591, 25107], [13591, 13720, 25107], [25084, 25085, 25108], [25085, 25109, 25108], [25085, 25086, 25110], [25085, 25110, 25109], [25086, 25087, 25110], [25087, 25111, 25110], [25087, 25088, 25112], [25087, 25112, 25111], [25088, 25089, 25112], [25089, 25113, 25112], [25089, 25090, 25114], [25089, 25114, 25113], [25090, 25091, 25114], [25091, 25115, 25114], [25091, 25092, 25116], [25091, 25116, 25115], [25092, 25093, 25116], [25093, 25117, 25116], [25093, 25094, 25118], [25093, 25118, 25117], [25094, 25095, 25118], [25095, 25119, 25118], [25095, 25096, 25120], [25095, 25120, 25119], [25096, 25097, 25120], [25097, 25121, 25120], [25097, 25098, 25122], [25097, 25122, 25121], [25098, 25099, 25122], [25099, 25123, 25122], [25099, 25100, 25124], [25099, 25124, 25123], [25100, 25101, 25124], [25101, 25125, 25124], [25101, 25102, 25126], [25101, 25126, 25125], [25102, 25103, 25126], [25103, 25127, 25126], [25103, 25104, 25128], [25103, 25128, 25127], [25104, 25105, 25128], [25105, 25129, 25128], [25105, 25106, 25130], [25105, 25130, 25129], [25106, 25107, 25130], [25107, 25131, 25130], [25107, 13720, 13849], [25107, 13849, 25131], [25108, 25109, 25133], [25108, 25133, 25132], [25109, 25110, 25133], [25110, 25134, 25133], [25110, 25111, 25135], [25110, 25135, 25134], [25111, 25112, 25135], [25112, 25136, 25135], [25112, 25113, 25137], [25112, 25137, 25136], [25113, 25114, 25137], [25114, 25138, 25137], [25114, 25115, 25139], [25114, 25139, 25138], [25115, 25116, 25139], [25116, 25140, 25139], [25116, 25117, 25141], [25116, 25141, 25140], [25117, 25118, 25141], [25118, 25142, 25141], [25118, 25119, 25143], [25118, 25143, 25142], [25119, 25120, 25143], [25120, 25144, 25143], [25120, 25121, 25145], [25120, 25145, 25144], [25121, 25122, 25145], [25122, 25146, 25145], [25122, 25123, 25147], [25122, 25147, 25146], [25123, 25124, 25147], [25124, 25148, 25147], [25124, 25125, 25149], [25124, 25149, 25148], [25125, 25126, 25149], [25126, 25150, 25149], [25126, 25127, 25151], [25126, 25151, 25150], [25127, 25128, 25151], [25128, 25152, 25151], [25128, 25129, 25153], [25128, 25153, 25152], [25129, 25130, 25153], [25130, 25154, 25153], [25130, 25131, 25155], [25130, 25155, 25154], [25131, 13849, 25155], [13849, 13978, 25155], [25132, 25133, 25156], [25133, 25157, 25156], [25133, 25134, 25158], [25133, 25158, 25157], [25134, 25135, 25158], [25135, 25159, 25158], [25135, 25136, 25160], [25135, 25160, 25159], [25136, 25137, 25160], [25137, 25161, 25160], [25137, 25138, 25162], [25137, 25162, 25161], [25138, 25139, 25162], [25139, 25163, 25162], [25139, 25140, 25164], [25139, 25164, 25163], [25140, 25141, 25164], [25141, 25165, 25164], [25141, 25142, 25166], [25141, 25166, 25165], [25142, 25143, 25166], [25143, 25167, 25166], [25143, 25144, 25168], [25143, 25168, 25167], [25144, 25145, 25168], [25145, 25169, 25168], [25145, 25146, 25170], [25145, 25170, 25169], [25146, 25147, 25170], [25147, 25171, 25170], [25147, 25148, 25172], [25147, 25172, 25171], [25148, 25149, 25172], [25149, 25173, 25172], [25149, 25150, 25174], [25149, 25174, 25173], [25150, 25151, 25174], [25151, 25175, 25174], [25151, 25152, 25176], [25151, 25176, 25175], [25152, 25153, 25176], [25153, 25177, 25176], [25153, 25154, 25178], [25153, 25178, 25177], [25154, 25155, 25178], [25155, 25179, 25178], [25155, 13978, 14107], [25155, 14107, 25179], [25156, 25157, 25181], [25156, 25181, 25180], [25157, 25158, 25181], [25158, 25182, 25181], [25158, 25159, 25183], [25158, 25183, 25182], [25159, 25160, 25183], [25160, 25184, 25183], [25160, 25161, 25185], [25160, 25185, 25184], [25161, 25162, 25185], [25162, 25186, 25185], [25162, 25163, 25187], [25162, 25187, 25186], [25163, 25164, 25187], [25164, 25188, 25187], [25164, 25165, 25189], [25164, 25189, 25188], [25165, 25166, 25189], [25166, 25190, 25189], [25166, 25167, 25191], [25166, 25191, 25190], [25167, 25168, 25191], [25168, 25192, 25191], [25168, 25169, 25193], [25168, 25193, 25192], [25169, 25170, 25193], [25170, 25194, 25193], [25170, 25171, 25195], [25170, 25195, 25194], [25171, 25172, 25195], [25172, 25196, 25195], [25172, 25173, 25197], [25172, 25197, 25196], [25173, 25174, 25197], [25174, 25198, 25197], [25174, 25175, 25199], [25174, 25199, 25198], [25175, 25176, 25199], [25176, 25200, 25199], [25176, 25177, 25201], [25176, 25201, 25200], [25177, 25178, 25201], [25178, 25202, 25201], [25178, 25179, 25203], [25178, 25203, 25202], [25179, 14107, 25203], [14107, 14236, 25203], [25180, 25181, 25204], [25181, 25205, 25204], [25181, 25182, 25206], [25181, 25206, 25205], [25182, 25183, 25206], [25183, 25207, 25206], [25183, 25184, 25208], [25183, 25208, 25207], [25184, 25185, 25208], [25185, 25209, 25208], [25185, 25186, 25210], [25185, 25210, 25209], [25186, 25187, 25210], [25187, 25211, 25210], [25187, 25188, 25212], [25187, 25212, 25211], [25188, 25189, 25212], [25189, 25213, 25212], [25189, 25190, 25214], [25189, 25214, 25213], [25190, 25191, 25214], [25191, 25215, 25214], [25191, 25192, 25216], [25191, 25216, 25215], [25192, 25193, 25216], [25193, 25217, 25216], [25193, 25194, 25218], [25193, 25218, 25217], [25194, 25195, 25218], [25195, 25219, 25218], [25195, 25196, 25220], [25195, 25220, 25219], [25196, 25197, 25220], [25197, 25221, 25220], [25197, 25198, 25222], [25197, 25222, 25221], [25198, 25199, 25222], [25199, 25223, 25222], [25199, 25200, 25224], [25199, 25224, 25223], [25200, 25201, 25224], [25201, 25225, 25224], [25201, 25202, 25226], [25201, 25226, 25225], [25202, 25203, 25226], [25203, 25227, 25226], [25203, 14236, 14365], [25203, 14365, 25227], [25204, 25205, 25229], [25204, 25229, 25228], [25205, 25206, 25229], [25206, 25230, 25229], [25206, 25207, 25231], [25206, 25231, 25230], [25207, 25208, 25231], [25208, 25232, 25231], [25208, 25209, 25233], [25208, 25233, 25232], [25209, 25210, 25233], [25210, 25234, 25233], [25210, 25211, 25235], [25210, 25235, 25234], [25211, 25212, 25235], [25212, 25236, 25235], [25212, 25213, 25237], [25212, 25237, 25236], [25213, 25214, 25237], [25214, 25238, 25237], [25214, 25215, 25239], [25214, 25239, 25238], [25215, 25216, 25239], [25216, 25240, 25239], [25216, 25217, 25241], [25216, 25241, 25240], [25217, 25218, 25241], [25218, 25242, 25241], [25218, 25219, 25243], [25218, 25243, 25242], [25219, 25220, 25243], [25220, 25244, 25243], [25220, 25221, 25245], [25220, 25245, 25244], [25221, 25222, 25245], [25222, 25246, 25245], [25222, 25223, 25247], [25222, 25247, 25246], [25223, 25224, 25247], [25224, 25248, 25247], [25224, 25225, 25249], [25224, 25249, 25248], [25225, 25226, 25249], [25226, 25250, 25249], [25226, 25227, 25251], [25226, 25251, 25250], [25227, 14365, 25251], [14365, 14494, 25251], [25228, 25229, 25252], [25229, 25253, 25252], [25229, 25230, 25254], [25229, 25254, 25253], [25230, 25231, 25254], [25231, 25255, 25254], [25231, 25232, 25256], [25231, 25256, 25255], [25232, 25233, 25256], [25233, 25257, 25256], [25233, 25234, 25258], [25233, 25258, 25257], [25234, 25235, 25258], [25235, 25259, 25258], [25235, 25236, 25260], [25235, 25260, 25259], [25236, 25237, 25260], [25237, 25261, 25260], [25237, 25238, 25262], [25237, 25262, 25261], [25238, 25239, 25262], [25239, 25263, 25262], [25239, 25240, 25264], [25239, 25264, 25263], [25240, 25241, 25264], [25241, 25265, 25264], [25241, 25242, 25266], [25241, 25266, 25265], [25242, 25243, 25266], [25243, 25267, 25266], [25243, 25244, 25268], [25243, 25268, 25267], [25244, 25245, 25268], [25245, 25269, 25268], [25245, 25246, 25270], [25245, 25270, 25269], [25246, 25247, 25270], [25247, 25271, 25270], [25247, 25248, 25272], [25247, 25272, 25271], [25248, 25249, 25272], [25249, 25273, 25272], [25249, 25250, 25274], [25249, 25274, 25273], [25250, 25251, 25274], [25251, 25275, 25274], [25251, 14494, 14623], [25251, 14623, 25275], [25252, 25253, 25277], [25252, 25277, 25276], [25253, 25254, 25277], [25254, 25278, 25277], [25254, 25255, 25279], [25254, 25279, 25278], [25255, 25256, 25279], [25256, 25280, 25279], [25256, 25257, 25281], [25256, 25281, 25280], [25257, 25258, 25281], [25258, 25282, 25281], [25258, 25259, 25283], [25258, 25283, 25282], [25259, 25260, 25283], [25260, 25284, 25283], [25260, 25261, 25285], [25260, 25285, 25284], [25261, 25262, 25285], [25262, 25286, 25285], [25262, 25263, 25287], [25262, 25287, 25286], [25263, 25264, 25287], [25264, 25288, 25287], [25264, 25265, 25289], [25264, 25289, 25288], [25265, 25266, 25289], [25266, 25290, 25289], [25266, 25267, 25291], [25266, 25291, 25290], [25267, 25268, 25291], [25268, 25292, 25291], [25268, 25269, 25293], [25268, 25293, 25292], [25269, 25270, 25293], [25270, 25294, 25293], [25270, 25271, 25295], [25270, 25295, 25294], [25271, 25272, 25295], [25272, 25296, 25295], [25272, 25273, 25297], [25272, 25297, 25296], [25273, 25274, 25297], [25274, 25298, 25297], [25274, 25275, 25299], [25274, 25299, 25298], [25275, 14623, 25299], [14623, 14752, 25299], [25276, 25277, 25300], [25277, 25301, 25300], [25277, 25278, 25302], [25277, 25302, 25301], [25278, 25279, 25302], [25279, 25303, 25302], [25279, 25280, 25304], [25279, 25304, 25303], [25280, 25281, 25304], [25281, 25305, 25304], [25281, 25282, 25306], [25281, 25306, 25305], [25282, 25283, 25306], [25283, 25307, 25306], [25283, 25284, 25308], [25283, 25308, 25307], [25284, 25285, 25308], [25285, 25309, 25308], [25285, 25286, 25310], [25285, 25310, 25309], [25286, 25287, 25310], [25287, 25311, 25310], [25287, 25288, 25312], [25287, 25312, 25311], [25288, 25289, 25312], [25289, 25313, 25312], [25289, 25290, 25314], [25289, 25314, 25313], [25290, 25291, 25314], [25291, 25315, 25314], [25291, 25292, 25316], [25291, 25316, 25315], [25292, 25293, 25316], [25293, 25317, 25316], [25293, 25294, 25318], [25293, 25318, 25317], [25294, 25295, 25318], [25295, 25319, 25318], [25295, 25296, 25320], [25295, 25320, 25319], [25296, 25297, 25320], [25297, 25321, 25320], [25297, 25298, 25322], [25297, 25322, 25321], [25298, 25299, 25322], [25299, 25323, 25322], [25299, 14752, 14881], [25299, 14881, 25323], [25300, 25301, 25325], [25300, 25325, 25324], [25301, 25302, 25325], [25302, 25326, 25325], [25302, 25303, 25327], [25302, 25327, 25326], [25303, 25304, 25327], [25304, 25328, 25327], [25304, 25305, 25329], [25304, 25329, 25328], [25305, 25306, 25329], [25306, 25330, 25329], [25306, 25307, 25331], [25306, 25331, 25330], [25307, 25308, 25331], [25308, 25332, 25331], [25308, 25309, 25333], [25308, 25333, 25332], [25309, 25310, 25333], [25310, 25334, 25333], [25310, 25311, 25335], [25310, 25335, 25334], [25311, 25312, 25335], [25312, 25336, 25335], [25312, 25313, 25337], [25312, 25337, 25336], [25313, 25314, 25337], [25314, 25338, 25337], [25314, 25315, 25339], [25314, 25339, 25338], [25315, 25316, 25339], [25316, 25340, 25339], [25316, 25317, 25341], [25316, 25341, 25340], [25317, 25318, 25341], [25318, 25342, 25341], [25318, 25319, 25343], [25318, 25343, 25342], [25319, 25320, 25343], [25320, 25344, 25343], [25320, 25321, 25345], [25320, 25345, 25344], [25321, 25322, 25345], [25322, 25346, 25345], [25322, 25323, 25347], [25322, 25347, 25346], [25323, 14881, 25347], [14881, 15010, 25347], [25324, 25325, 25348], [25325, 25349, 25348], [25325, 25326, 25350], [25325, 25350, 25349], [25326, 25327, 25350], [25327, 25351, 25350], [25327, 25328, 25352], [25327, 25352, 25351], [25328, 25329, 25352], [25329, 25353, 25352], [25329, 25330, 25354], [25329, 25354, 25353], [25330, 25331, 25354], [25331, 25355, 25354], [25331, 25332, 25356], [25331, 25356, 25355], [25332, 25333, 25356], [25333, 25357, 25356], [25333, 25334, 25358], [25333, 25358, 25357], [25334, 25335, 25358], [25335, 25359, 25358], [25335, 25336, 25360], [25335, 25360, 25359], [25336, 25337, 25360], [25337, 25361, 25360], [25337, 25338, 25362], [25337, 25362, 25361], [25338, 25339, 25362], [25339, 25363, 25362], [25339, 25340, 25364], [25339, 25364, 25363], [25340, 25341, 25364], [25341, 25365, 25364], [25341, 25342, 25366], [25341, 25366, 25365], [25342, 25343, 25366], [25343, 25367, 25366], [25343, 25344, 25368], [25343, 25368, 25367], [25344, 25345, 25368], [25345, 25369, 25368], [25345, 25346, 25370], [25345, 25370, 25369], [25346, 25347, 25370], [25347, 25371, 25370], [25347, 15010, 15139], [25347, 15139, 25371], [25348, 25349, 25373], [25348, 25373, 25372], [25349, 25350, 25373], [25350, 25374, 25373], [25350, 25351, 25375], [25350, 25375, 25374], [25351, 25352, 25375], [25352, 25376, 25375], [25352, 25353, 25377], [25352, 25377, 25376], [25353, 25354, 25377], [25354, 25378, 25377], [25354, 25355, 25379], [25354, 25379, 25378], [25355, 25356, 25379], [25356, 25380, 25379], [25356, 25357, 25381], [25356, 25381, 25380], [25357, 25358, 25381], [25358, 25382, 25381], [25358, 25359, 25383], [25358, 25383, 25382], [25359, 25360, 25383], [25360, 25384, 25383], [25360, 25361, 25385], [25360, 25385, 25384], [25361, 25362, 25385], [25362, 25386, 25385], [25362, 25363, 25387], [25362, 25387, 25386], [25363, 25364, 25387], [25364, 25388, 25387], [25364, 25365, 25389], [25364, 25389, 25388], [25365, 25366, 25389], [25366, 25390, 25389], [25366, 25367, 25391], [25366, 25391, 25390], [25367, 25368, 25391], [25368, 25392, 25391], [25368, 25369, 25393], [25368, 25393, 25392], [25369, 25370, 25393], [25370, 25394, 25393], [25370, 25371, 25395], [25370, 25395, 25394], [25371, 15139, 25395], [15139, 15268, 25395], [25372, 25373, 25396], [25373, 25397, 25396], [25373, 25374, 25398], [25373, 25398, 25397], [25374, 25375, 25398], [25375, 25399, 25398], [25375, 25376, 25400], [25375, 25400, 25399], [25376, 25377, 25400], [25377, 25401, 25400], [25377, 25378, 25402], [25377, 25402, 25401], [25378, 25379, 25402], [25379, 25403, 25402], [25379, 25380, 25404], [25379, 25404, 25403], [25380, 25381, 25404], [25381, 25405, 25404], [25381, 25382, 25406], [25381, 25406, 25405], [25382, 25383, 25406], [25383, 25407, 25406], [25383, 25384, 25408], [25383, 25408, 25407], [25384, 25385, 25408], [25385, 25409, 25408], [25385, 25386, 25410], [25385, 25410, 25409], [25386, 25387, 25410], [25387, 25411, 25410], [25387, 25388, 25412], [25387, 25412, 25411], [25388, 25389, 25412], [25389, 25413, 25412], [25389, 25390, 25414], [25389, 25414, 25413], [25390, 25391, 25414], [25391, 25415, 25414], [25391, 25392, 25416], [25391, 25416, 25415], [25392, 25393, 25416], [25393, 25417, 25416], [25393, 25394, 25418], [25393, 25418, 25417], [25394, 25395, 25418], [25395, 25419, 25418], [25395, 15268, 15397], [25395, 15397, 25419], [25396, 25397, 25421], [25396, 25421, 25420], [25397, 25398, 25421], [25398, 25422, 25421], [25398, 25399, 25423], [25398, 25423, 25422], [25399, 25400, 25423], [25400, 25424, 25423], [25400, 25401, 25425], [25400, 25425, 25424], [25401, 25402, 25425], [25402, 25426, 25425], [25402, 25403, 25427], [25402, 25427, 25426], [25403, 25404, 25427], [25404, 25428, 25427], [25404, 25405, 25429], [25404, 25429, 25428], [25405, 25406, 25429], [25406, 25430, 25429], [25406, 25407, 25431], [25406, 25431, 25430], [25407, 25408, 25431], [25408, 25432, 25431], [25408, 25409, 25433], [25408, 25433, 25432], [25409, 25410, 25433], [25410, 25434, 25433], [25410, 25411, 25435], [25410, 25435, 25434], [25411, 25412, 25435], [25412, 25436, 25435], [25412, 25413, 25437], [25412, 25437, 25436], [25413, 25414, 25437], [25414, 25438, 25437], [25414, 25415, 25439], [25414, 25439, 25438], [25415, 25416, 25439], [25416, 25440, 25439], [25416, 25417, 25441], [25416, 25441, 25440], [25417, 25418, 25441], [25418, 25442, 25441], [25418, 25419, 25443], [25418, 25443, 25442], [25419, 15397, 25443], [15397, 15526, 25443], [25420, 25421, 25444], [25421, 25445, 25444], [25421, 25422, 25446], [25421, 25446, 25445], [25422, 25423, 25446], [25423, 25447, 25446], [25423, 25424, 25448], [25423, 25448, 25447], [25424, 25425, 25448], [25425, 25449, 25448], [25425, 25426, 25450], [25425, 25450, 25449], [25426, 25427, 25450], [25427, 25451, 25450], [25427, 25428, 25452], [25427, 25452, 25451], [25428, 25429, 25452], [25429, 25453, 25452], [25429, 25430, 25454], [25429, 25454, 25453], [25430, 25431, 25454], [25431, 25455, 25454], [25431, 25432, 25456], [25431, 25456, 25455], [25432, 25433, 25456], [25433, 25457, 25456], [25433, 25434, 25458], [25433, 25458, 25457], [25434, 25435, 25458], [25435, 25459, 25458], [25435, 25436, 25460], [25435, 25460, 25459], [25436, 25437, 25460], [25437, 25461, 25460], [25437, 25438, 25462], [25437, 25462, 25461], [25438, 25439, 25462], [25439, 25463, 25462], [25439, 25440, 25464], [25439, 25464, 25463], [25440, 25441, 25464], [25441, 25465, 25464], [25441, 25442, 25466], [25441, 25466, 25465], [25442, 25443, 25466], [25443, 25467, 25466], [25443, 15526, 15655], [25443, 15655, 25467], [25444, 25445, 25469], [25444, 25469, 25468], [25445, 25446, 25469], [25446, 25470, 25469], [25446, 25447, 25471], [25446, 25471, 25470], [25447, 25448, 25471], [25448, 25472, 25471], [25448, 25449, 25473], [25448, 25473, 25472], [25449, 25450, 25473], [25450, 25474, 25473], [25450, 25451, 25475], [25450, 25475, 25474], [25451, 25452, 25475], [25452, 25476, 25475], [25452, 25453, 25477], [25452, 25477, 25476], [25453, 25454, 25477], [25454, 25478, 25477], [25454, 25455, 25479], [25454, 25479, 25478], [25455, 25456, 25479], [25456, 25480, 25479], [25456, 25457, 25481], [25456, 25481, 25480], [25457, 25458, 25481], [25458, 25482, 25481], [25458, 25459, 25483], [25458, 25483, 25482], [25459, 25460, 25483], [25460, 25484, 25483], [25460, 25461, 25485], [25460, 25485, 25484], [25461, 25462, 25485], [25462, 25486, 25485], [25462, 25463, 25487], [25462, 25487, 25486], [25463, 25464, 25487], [25464, 25488, 25487], [25464, 25465, 25489], [25464, 25489, 25488], [25465, 25466, 25489], [25466, 25490, 25489], [25466, 25467, 25491], [25466, 25491, 25490], [25467, 15655, 25491], [15655, 15784, 25491], [25468, 25469, 25492], [25469, 25493, 25492], [25469, 25470, 25494], [25469, 25494, 25493], [25470, 25471, 25494], [25471, 25495, 25494], [25471, 25472, 25496], [25471, 25496, 25495], [25472, 25473, 25496], [25473, 25497, 25496], [25473, 25474, 25498], [25473, 25498, 25497], [25474, 25475, 25498], [25475, 25499, 25498], [25475, 25476, 25500], [25475, 25500, 25499], [25476, 25477, 25500], [25477, 25501, 25500], [25477, 25478, 25502], [25477, 25502, 25501], [25478, 25479, 25502], [25479, 25503, 25502], [25479, 25480, 25504], [25479, 25504, 25503], [25480, 25481, 25504], [25481, 25505, 25504], [25481, 25482, 25506], [25481, 25506, 25505], [25482, 25483, 25506], [25483, 25507, 25506], [25483, 25484, 25508], [25483, 25508, 25507], [25484, 25485, 25508], [25485, 25509, 25508], [25485, 25486, 25510], [25485, 25510, 25509], [25486, 25487, 25510], [25487, 25511, 25510], [25487, 25488, 25512], [25487, 25512, 25511], [25488, 25489, 25512], [25489, 25513, 25512], [25489, 25490, 25514], [25489, 25514, 25513], [25490, 25491, 25514], [25491, 25515, 25514], [25491, 15784, 15913], [25491, 15913, 25515], [25492, 25493, 25517], [25492, 25517, 25516], [25493, 25494, 25517], [25494, 25518, 25517], [25494, 25495, 25519], [25494, 25519, 25518], [25495, 25496, 25519], [25496, 25520, 25519], [25496, 25497, 25521], [25496, 25521, 25520], [25497, 25498, 25521], [25498, 25522, 25521], [25498, 25499, 25523], [25498, 25523, 25522], [25499, 25500, 25523], [25500, 25524, 25523], [25500, 25501, 25525], [25500, 25525, 25524], [25501, 25502, 25525], [25502, 25526, 25525], [25502, 25503, 25527], [25502, 25527, 25526], [25503, 25504, 25527], [25504, 25528, 25527], [25504, 25505, 25529], [25504, 25529, 25528], [25505, 25506, 25529], [25506, 25530, 25529], [25506, 25507, 25531], [25506, 25531, 25530], [25507, 25508, 25531], [25508, 25532, 25531], [25508, 25509, 25533], [25508, 25533, 25532], [25509, 25510, 25533], [25510, 25534, 25533], [25510, 25511, 25535], [25510, 25535, 25534], [25511, 25512, 25535], [25512, 25536, 25535], [25512, 25513, 25537], [25512, 25537, 25536], [25513, 25514, 25537], [25514, 25538, 25537], [25514, 25515, 25539], [25514, 25539, 25538], [25515, 15913, 25539], [15913, 16042, 25539], [25516, 25517, 22363], [25517, 22234, 22363], [25517, 25518, 22105], [25517, 22105, 22234], [25518, 25519, 22105], [25519, 21976, 22105], [25519, 25520, 21847], [25519, 21847, 21976], [25520, 25521, 21847], [25521, 21718, 21847], [25521, 25522, 21589], [25521, 21589, 21718], [25522, 25523, 21589], [25523, 21460, 21589], [25523, 25524, 21331], [25523, 21331, 21460], [25524, 25525, 21331], [25525, 21202, 21331], [25525, 25526, 21073], [25525, 21073, 21202], [25526, 25527, 21073], [25527, 20944, 21073], [25527, 25528, 20815], [25527, 20815, 20944], [25528, 25529, 20815], [25529, 20686, 20815], [25529, 25530, 20557], [25529, 20557, 20686], [25530, 25531, 20557], [25531, 20428, 20557], [25531, 25532, 20299], [25531, 20299, 20428], [25532, 25533, 20299], [25533, 20170, 20299], [25533, 25534, 20041], [25533, 20041, 20170], [25534, 25535, 20041], [25535, 19912, 20041], [25535, 25536, 19783], [25535, 19783, 19912], [25536, 25537, 19783], [25537, 19654, 19783], [25537, 25538, 19525], [25537, 19525, 19654], [25538, 25539, 19525], [25539, 19396, 19525], [25539, 16042, 19267], [25539, 19267, 19396], [128, 19266, 25540], [128, 25540, 257], [19266, 19137, 25540], [19137, 25541, 25540], [19137, 19008, 25542], [19137, 25542, 25541], [19008, 18879, 25542], [18879, 25543, 25542], [18879, 18750, 25544], [18879, 25544, 25543], [18750, 18621, 25544], [18621, 25545, 25544], [18621, 18492, 25546], [18621, 25546, 25545], [18492, 18363, 25546], [18363, 25547, 25546], [18363, 18234, 25548], [18363, 25548, 25547], [18234, 18105, 25548], [18105, 25549, 25548], [18105, 17976, 25550], [18105, 25550, 25549], [17976, 17847, 25550], [17847, 25551, 25550], [17847, 17718, 25552], [17847, 25552, 25551], [17718, 17589, 25552], [17589, 25553, 25552], [17589, 17460, 25554], [17589, 25554, 25553], [17460, 17331, 25554], [17331, 25555, 25554], [17331, 17202, 25556], [17331, 25556, 25555], [17202, 17073, 25556], [17073, 25557, 25556], [17073, 16944, 25558], [17073, 25558, 25557], [16944, 16815, 25558], [16815, 25559, 25558], [16815, 16686, 25560], [16815, 25560, 25559], [16686, 16557, 25560], [16557, 25561, 25560], [16557, 16428, 25562], [16557, 25562, 25561], [16428, 16299, 25562], [16299, 25563, 25562], [257, 25540, 386], [25540, 25564, 386], [25540, 25541, 25565], [25540, 25565, 25564], [25541, 25542, 25565], [25542, 25566, 25565], [25542, 25543, 25567], [25542, 25567, 25566], [25543, 25544, 25567], [25544, 25568, 25567], [25544, 25545, 25569], [25544, 25569, 25568], [25545, 25546, 25569], [25546, 25570, 25569], [25546, 25547, 25571], [25546, 25571, 25570], [25547, 25548, 25571], [25548, 25572, 25571], [25548, 25549, 25573], [25548, 25573, 25572], [25549, 25550, 25573], [25550, 25574, 25573], [25550, 25551, 25575], [25550, 25575, 25574], [25551, 25552, 25575], [25552, 25576, 25575], [25552, 25553, 25577], [25552, 25577, 25576], [25553, 25554, 25577], [25554, 25578, 25577], [25554, 25555, 25579], [25554, 25579, 25578], [25555, 25556, 25579], [25556, 25580, 25579], [25556, 25557, 25581], [25556, 25581, 25580], [25557, 25558, 25581], [25558, 25582, 25581], [25558, 25559, 25583], [25558, 25583, 25582], [25559, 25560, 25583], [25560, 25584, 25583], [25560, 25561, 25585], [25560, 25585, 25584], [25561, 25562, 25585], [25562, 25586, 25585], [25562, 25563, 25587], [25562, 25587, 25586], [386, 25564, 25588], [386, 25588, 515], [25564, 25565, 25588], [25565, 25589, 25588], [25565, 25566, 25590], [25565, 25590, 25589], [25566, 25567, 25590], [25567, 25591, 25590], [25567, 25568, 25592], [25567, 25592, 25591], [25568, 25569, 25592], [25569, 25593, 25592], [25569, 25570, 25594], [25569, 25594, 25593], [25570, 25571, 25594], [25571, 25595, 25594], [25571, 25572, 25596], [25571, 25596, 25595], [25572, 25573, 25596], [25573, 25597, 25596], [25573, 25574, 25598], [25573, 25598, 25597], [25574, 25575, 25598], [25575, 25599, 25598], [25575, 25576, 25600], [25575, 25600, 25599], [25576, 25577, 25600], [25577, 25601, 25600], [25577, 25578, 25602], [25577, 25602, 25601], [25578, 25579, 25602], [25579, 25603, 25602], [25579, 25580, 25604], [25579, 25604, 25603], [25580, 25581, 25604], [25581, 25605, 25604], [25581, 25582, 25606], [25581, 25606, 25605], [25582, 25583, 25606], [25583, 25607, 25606], [25583, 25584, 25608], [25583, 25608, 25607], [25584, 25585, 25608], [25585, 25609, 25608], [25585, 25586, 25610], [25585, 25610, 25609], [25586, 25587, 25610], [25587, 25611, 25610], [515, 25588, 644], [25588, 25612, 644], [25588, 25589, 25613], [25588, 25613, 25612], [25589, 25590, 25613], [25590, 25614, 25613], [25590, 25591, 25615], [25590, 25615, 25614], [25591, 25592, 25615], [25592, 25616, 25615], [25592, 25593, 25617], [25592, 25617, 25616], [25593, 25594, 25617], [25594, 25618, 25617], [25594, 25595, 25619], [25594, 25619, 25618], [25595, 25596, 25619], [25596, 25620, 25619], [25596, 25597, 25621], [25596, 25621, 25620], [25597, 25598, 25621], [25598, 25622, 25621], [25598, 25599, 25623], [25598, 25623, 25622], [25599, 25600, 25623], [25600, 25624, 25623], [25600, 25601, 25625], [25600, 25625, 25624], [25601, 25602, 25625], [25602, 25626, 25625], [25602, 25603, 25627], [25602, 25627, 25626], [25603, 25604, 25627], [25604, 25628, 25627], [25604, 25605, 25629], [25604, 25629, 25628], [25605, 25606, 25629], [25606, 25630, 25629], [25606, 25607, 25631], [25606, 25631, 25630], [25607, 25608, 25631], [25608, 25632, 25631], [25608, 25609, 25633], [25608, 25633, 25632], [25609, 25610, 25633], [25610, 25634, 25633], [25610, 25611, 25635], [25610, 25635, 25634], [644, 25612, 25636], [644, 25636, 773], [25612, 25613, 25636], [25613, 25637, 25636], [25613, 25614, 25638], [25613, 25638, 25637], [25614, 25615, 25638], [25615, 25639, 25638], [25615, 25616, 25640], [25615, 25640, 25639], [25616, 25617, 25640], [25617, 25641, 25640], [25617, 25618, 25642], [25617, 25642, 25641], [25618, 25619, 25642], [25619, 25643, 25642], [25619, 25620, 25644], [25619, 25644, 25643], [25620, 25621, 25644], [25621, 25645, 25644], [25621, 25622, 25646], [25621, 25646, 25645], [25622, 25623, 25646], [25623, 25647, 25646], [25623, 25624, 25648], [25623, 25648, 25647], [25624, 25625, 25648], [25625, 25649, 25648], [25625, 25626, 25650], [25625, 25650, 25649], [25626, 25627, 25650], [25627, 25651, 25650], [25627, 25628, 25652], [25627, 25652, 25651], [25628, 25629, 25652], [25629, 25653, 25652], [25629, 25630, 25654], [25629, 25654, 25653], [25630, 25631, 25654], [25631, 25655, 25654], [25631, 25632, 25656], [25631, 25656, 25655], [25632, 25633, 25656], [25633, 25657, 25656], [25633, 25634, 25658], [25633, 25658, 25657], [25634, 25635, 25658], [25635, 25659, 25658], [773, 25636, 902], [25636, 25660, 902], [25636, 25637, 25661], [25636, 25661, 25660], [25637, 25638, 25661], [25638, 25662, 25661], [25638, 25639, 25663], [25638, 25663, 25662], [25639, 25640, 25663], [25640, 25664, 25663], [25640, 25641, 25665], [25640, 25665, 25664], [25641, 25642, 25665], [25642, 25666, 25665], [25642, 25643, 25667], [25642, 25667, 25666], [25643, 25644, 25667], [25644, 25668, 25667], [25644, 25645, 25669], [25644, 25669, 25668], [25645, 25646, 25669], [25646, 25670, 25669], [25646, 25647, 25671], [25646, 25671, 25670], [25647, 25648, 25671], [25648, 25672, 25671], [25648, 25649, 25673], [25648, 25673, 25672], [25649, 25650, 25673], [25650, 25674, 25673], [25650, 25651, 25675], [25650, 25675, 25674], [25651, 25652, 25675], [25652, 25676, 25675], [25652, 25653, 25677], [25652, 25677, 25676], [25653, 25654, 25677], [25654, 25678, 25677], [25654, 25655, 25679], [25654, 25679, 25678], [25655, 25656, 25679], [25656, 25680, 25679], [25656, 25657, 25681], [25656, 25681, 25680], [25657, 25658, 25681], [25658, 25682, 25681], [25658, 25659, 25683], [25658, 25683, 25682], [902, 25660, 25684], [902, 25684, 1031], [25660, 25661, 25684], [25661, 25685, 25684], [25661, 25662, 25686], [25661, 25686, 25685], [25662, 25663, 25686], [25663, 25687, 25686], [25663, 25664, 25688], [25663, 25688, 25687], [25664, 25665, 25688], [25665, 25689, 25688], [25665, 25666, 25690], [25665, 25690, 25689], [25666, 25667, 25690], [25667, 25691, 25690], [25667, 25668, 25692], [25667, 25692, 25691], [25668, 25669, 25692], [25669, 25693, 25692], [25669, 25670, 25694], [25669, 25694, 25693], [25670, 25671, 25694], [25671, 25695, 25694], [25671, 25672, 25696], [25671, 25696, 25695], [25672, 25673, 25696], [25673, 25697, 25696], [25673, 25674, 25698], [25673, 25698, 25697], [25674, 25675, 25698], [25675, 25699, 25698], [25675, 25676, 25700], [25675, 25700, 25699], [25676, 25677, 25700], [25677, 25701, 25700], [25677, 25678, 25702], [25677, 25702, 25701], [25678, 25679, 25702], [25679, 25703, 25702], [25679, 25680, 25704], [25679, 25704, 25703], [25680, 25681, 25704], [25681, 25705, 25704], [25681, 25682, 25706], [25681, 25706, 25705], [25682, 25683, 25706], [25683, 25707, 25706], [1031, 25684, 1160], [25684, 25708, 1160], [25684, 25685, 25709], [25684, 25709, 25708], [25685, 25686, 25709], [25686, 25710, 25709], [25686, 25687, 25711], [25686, 25711, 25710], [25687, 25688, 25711], [25688, 25712, 25711], [25688, 25689, 25713], [25688, 25713, 25712], [25689, 25690, 25713], [25690, 25714, 25713], [25690, 25691, 25715], [25690, 25715, 25714], [25691, 25692, 25715], [25692, 25716, 25715], [25692, 25693, 25717], [25692, 25717, 25716], [25693, 25694, 25717], [25694, 25718, 25717], [25694, 25695, 25719], [25694, 25719, 25718], [25695, 25696, 25719], [25696, 25720, 25719], [25696, 25697, 25721], [25696, 25721, 25720], [25697, 25698, 25721], [25698, 25722, 25721], [25698, 25699, 25723], [25698, 25723, 25722], [25699, 25700, 25723], [25700, 25724, 25723], [25700, 25701, 25725], [25700, 25725, 25724], [25701, 25702, 25725], [25702, 25726, 25725], [25702, 25703, 25727], [25702, 25727, 25726], [25703, 25704, 25727], [25704, 25728, 25727], [25704, 25705, 25729], [25704, 25729, 25728], [25705, 25706, 25729], [25706, 25730, 25729], [25706, 25707, 25731], [25706, 25731, 25730], [1160, 25708, 25732], [1160, 25732, 1289], [25708, 25709, 25732], [25709, 25733, 25732], [25709, 25710, 25734], [25709, 25734, 25733], [25710, 25711, 25734], [25711, 25735, 25734], [25711, 25712, 25736], [25711, 25736, 25735], [25712, 25713, 25736], [25713, 25737, 25736], [25713, 25714, 25738], [25713, 25738, 25737], [25714, 25715, 25738], [25715, 25739, 25738], [25715, 25716, 25740], [25715, 25740, 25739], [25716, 25717, 25740], [25717, 25741, 25740], [25717, 25718, 25742], [25717, 25742, 25741], [25718, 25719, 25742], [25719, 25743, 25742], [25719, 25720, 25744], [25719, 25744, 25743], [25720, 25721, 25744], [25721, 25745, 25744], [25721, 25722, 25746], [25721, 25746, 25745], [25722, 25723, 25746], [25723, 25747, 25746], [25723, 25724, 25748], [25723, 25748, 25747], [25724, 25725, 25748], [25725, 25749, 25748], [25725, 25726, 25750], [25725, 25750, 25749], [25726, 25727, 25750], [25727, 25751, 25750], [25727, 25728, 25752], [25727, 25752, 25751], [25728, 25729, 25752], [25729, 25753, 25752], [25729, 25730, 25754], [25729, 25754, 25753], [25730, 25731, 25754], [25731, 25755, 25754], [1289, 25732, 1418], [25732, 25756, 1418], [25732, 25733, 25757], [25732, 25757, 25756], [25733, 25734, 25757], [25734, 25758, 25757], [25734, 25735, 25759], [25734, 25759, 25758], [25735, 25736, 25759], [25736, 25760, 25759], [25736, 25737, 25761], [25736, 25761, 25760], [25737, 25738, 25761], [25738, 25762, 25761], [25738, 25739, 25763], [25738, 25763, 25762], [25739, 25740, 25763], [25740, 25764, 25763], [25740, 25741, 25765], [25740, 25765, 25764], [25741, 25742, 25765], [25742, 25766, 25765], [25742, 25743, 25767], [25742, 25767, 25766], [25743, 25744, 25767], [25744, 25768, 25767], [25744, 25745, 25769], [25744, 25769, 25768], [25745, 25746, 25769], [25746, 25770, 25769], [25746, 25747, 25771], [25746, 25771, 25770], [25747, 25748, 25771], [25748, 25772, 25771], [25748, 25749, 25773], [25748, 25773, 25772], [25749, 25750, 25773], [25750, 25774, 25773], [25750, 25751, 25775], [25750, 25775, 25774], [25751, 25752, 25775], [25752, 25776, 25775], [25752, 25753, 25777], [25752, 25777, 25776], [25753, 25754, 25777], [25754, 25778, 25777], [25754, 25755, 25779], [25754, 25779, 25778], [1418, 25756, 25780], [1418, 25780, 1547], [25756, 25757, 25780], [25757, 25781, 25780], [25757, 25758, 25782], [25757, 25782, 25781], [25758, 25759, 25782], [25759, 25783, 25782], [25759, 25760, 25784], [25759, 25784, 25783], [25760, 25761, 25784], [25761, 25785, 25784], [25761, 25762, 25786], [25761, 25786, 25785], [25762, 25763, 25786], [25763, 25787, 25786], [25763, 25764, 25788], [25763, 25788, 25787], [25764, 25765, 25788], [25765, 25789, 25788], [25765, 25766, 25790], [25765, 25790, 25789], [25766, 25767, 25790], [25767, 25791, 25790], [25767, 25768, 25792], [25767, 25792, 25791], [25768, 25769, 25792], [25769, 25793, 25792], [25769, 25770, 25794], [25769, 25794, 25793], [25770, 25771, 25794], [25771, 25795, 25794], [25771, 25772, 25796], [25771, 25796, 25795], [25772, 25773, 25796], [25773, 25797, 25796], [25773, 25774, 25798], [25773, 25798, 25797], [25774, 25775, 25798], [25775, 25799, 25798], [25775, 25776, 25800], [25775, 25800, 25799], [25776, 25777, 25800], [25777, 25801, 25800], [25777, 25778, 25802], [25777, 25802, 25801], [25778, 25779, 25802], [25779, 25803, 25802], [1547, 25780, 1676], [25780, 25804, 1676], [25780, 25781, 25805], [25780, 25805, 25804], [25781, 25782, 25805], [25782, 25806, 25805], [25782, 25783, 25807], [25782, 25807, 25806], [25783, 25784, 25807], [25784, 25808, 25807], [25784, 25785, 25809], [25784, 25809, 25808], [25785, 25786, 25809], [25786, 25810, 25809], [25786, 25787, 25811], [25786, 25811, 25810], [25787, 25788, 25811], [25788, 25812, 25811], [25788, 25789, 25813], [25788, 25813, 25812], [25789, 25790, 25813], [25790, 25814, 25813], [25790, 25791, 25815], [25790, 25815, 25814], [25791, 25792, 25815], [25792, 25816, 25815], [25792, 25793, 25817], [25792, 25817, 25816], [25793, 25794, 25817], [25794, 25818, 25817], [25794, 25795, 25819], [25794, 25819, 25818], [25795, 25796, 25819], [25796, 25820, 25819], [25796, 25797, 25821], [25796, 25821, 25820], [25797, 25798, 25821], [25798, 25822, 25821], [25798, 25799, 25823], [25798, 25823, 25822], [25799, 25800, 25823], [25800, 25824, 25823], [25800, 25801, 25825], [25800, 25825, 25824], [25801, 25802, 25825], [25802, 25826, 25825], [25802, 25803, 25827], [25802, 25827, 25826], [1676, 25804, 25828], [1676, 25828, 1805], [25804, 25805, 25828], [25805, 25829, 25828], [25805, 25806, 25830], [25805, 25830, 25829], [25806, 25807, 25830], [25807, 25831, 25830], [25807, 25808, 25832], [25807, 25832, 25831], [25808, 25809, 25832], [25809, 25833, 25832], [25809, 25810, 25834], [25809, 25834, 25833], [25810, 25811, 25834], [25811, 25835, 25834], [25811, 25812, 25836], [25811, 25836, 25835], [25812, 25813, 25836], [25813, 25837, 25836], [25813, 25814, 25838], [25813, 25838, 25837], [25814, 25815, 25838], [25815, 25839, 25838], [25815, 25816, 25840], [25815, 25840, 25839], [25816, 25817, 25840], [25817, 25841, 25840], [25817, 25818, 25842], [25817, 25842, 25841], [25818, 25819, 25842], [25819, 25843, 25842], [25819, 25820, 25844], [25819, 25844, 25843], [25820, 25821, 25844], [25821, 25845, 25844], [25821, 25822, 25846], [25821, 25846, 25845], [25822, 25823, 25846], [25823, 25847, 25846], [25823, 25824, 25848], [25823, 25848, 25847], [25824, 25825, 25848], [25825, 25849, 25848], [25825, 25826, 25850], [25825, 25850, 25849], [25826, 25827, 25850], [25827, 25851, 25850], [1805, 25828, 1934], [25828, 25852, 1934], [25828, 25829, 25853], [25828, 25853, 25852], [25829, 25830, 25853], [25830, 25854, 25853], [25830, 25831, 25855], [25830, 25855, 25854], [25831, 25832, 25855], [25832, 25856, 25855], [25832, 25833, 25857], [25832, 25857, 25856], [25833, 25834, 25857], [25834, 25858, 25857], [25834, 25835, 25859], [25834, 25859, 25858], [25835, 25836, 25859], [25836, 25860, 25859], [25836, 25837, 25861], [25836, 25861, 25860], [25837, 25838, 25861], [25838, 25862, 25861], [25838, 25839, 25863], [25838, 25863, 25862], [25839, 25840, 25863], [25840, 25864, 25863], [25840, 25841, 25865], [25840, 25865, 25864], [25841, 25842, 25865], [25842, 25866, 25865], [25842, 25843, 25867], [25842, 25867, 25866], [25843, 25844, 25867], [25844, 25868, 25867], [25844, 25845, 25869], [25844, 25869, 25868], [25845, 25846, 25869], [25846, 25870, 25869], [25846, 25847, 25871], [25846, 25871, 25870], [25847, 25848, 25871], [25848, 25872, 25871], [25848, 25849, 25873], [25848, 25873, 25872], [25849, 25850, 25873], [25850, 25874, 25873], [25850, 25851, 25875], [25850, 25875, 25874], [1934, 25852, 25876], [1934, 25876, 2063], [25852, 25853, 25876], [25853, 25877, 25876], [25853, 25854, 25878], [25853, 25878, 25877], [25854, 25855, 25878], [25855, 25879, 25878], [25855, 25856, 25880], [25855, 25880, 25879], [25856, 25857, 25880], [25857, 25881, 25880], [25857, 25858, 25882], [25857, 25882, 25881], [25858, 25859, 25882], [25859, 25883, 25882], [25859, 25860, 25884], [25859, 25884, 25883], [25860, 25861, 25884], [25861, 25885, 25884], [25861, 25862, 25886], [25861, 25886, 25885], [25862, 25863, 25886], [25863, 25887, 25886], [25863, 25864, 25888], [25863, 25888, 25887], [25864, 25865, 25888], [25865, 25889, 25888], [25865, 25866, 25890], [25865, 25890, 25889], [25866, 25867, 25890], [25867, 25891, 25890], [25867, 25868, 25892], [25867, 25892, 25891], [25868, 25869, 25892], [25869, 25893, 25892], [25869, 25870, 25894], [25869, 25894, 25893], [25870, 25871, 25894], [25871, 25895, 25894], [25871, 25872, 25896], [25871, 25896, 25895], [25872, 25873, 25896], [25873, 25897, 25896], [25873, 25874, 25898], [25873, 25898, 25897], [25874, 25875, 25898], [25875, 25899, 25898], [2063, 25876, 2192], [25876, 25900, 2192], [25876, 25877, 25901], [25876, 25901, 25900], [25877, 25878, 25901], [25878, 25902, 25901], [25878, 25879, 25903], [25878, 25903, 25902], [25879, 25880, 25903], [25880, 25904, 25903], [25880, 25881, 25905], [25880, 25905, 25904], [25881, 25882, 25905], [25882, 25906, 25905], [25882, 25883, 25907], [25882, 25907, 25906], [25883, 25884, 25907], [25884, 25908, 25907], [25884, 25885, 25909], [25884, 25909, 25908], [25885, 25886, 25909], [25886, 25910, 25909], [25886, 25887, 25911], [25886, 25911, 25910], [25887, 25888, 25911], [25888, 25912, 25911], [25888, 25889, 25913], [25888, 25913, 25912], [25889, 25890, 25913], [25890, 25914, 25913], [25890, 25891, 25915], [25890, 25915, 25914], [25891, 25892, 25915], [25892, 25916, 25915], [25892, 25893, 25917], [25892, 25917, 25916], [25893, 25894, 25917], [25894, 25918, 25917], [25894, 25895, 25919], [25894, 25919, 25918], [25895, 25896, 25919], [25896, 25920, 25919], [25896, 25897, 25921], [25896, 25921, 25920], [25897, 25898, 25921], [25898, 25922, 25921], [25898, 25899, 25923], [25898, 25923, 25922], [2192, 25900, 25924], [2192, 25924, 2321], [25900, 25901, 25924], [25901, 25925, 25924], [25901, 25902, 25926], [25901, 25926, 25925], [25902, 25903, 25926], [25903, 25927, 25926], [25903, 25904, 25928], [25903, 25928, 25927], [25904, 25905, 25928], [25905, 25929, 25928], [25905, 25906, 25930], [25905, 25930, 25929], [25906, 25907, 25930], [25907, 25931, 25930], [25907, 25908, 25932], [25907, 25932, 25931], [25908, 25909, 25932], [25909, 25933, 25932], [25909, 25910, 25934], [25909, 25934, 25933], [25910, 25911, 25934], [25911, 25935, 25934], [25911, 25912, 25936], [25911, 25936, 25935], [25912, 25913, 25936], [25913, 25937, 25936], [25913, 25914, 25938], [25913, 25938, 25937], [25914, 25915, 25938], [25915, 25939, 25938], [25915, 25916, 25940], [25915, 25940, 25939], [25916, 25917, 25940], [25917, 25941, 25940], [25917, 25918, 25942], [25917, 25942, 25941], [25918, 25919, 25942], [25919, 25943, 25942], [25919, 25920, 25944], [25919, 25944, 25943], [25920, 25921, 25944], [25921, 25945, 25944], [25921, 25922, 25946], [25921, 25946, 25945], [25922, 25923, 25946], [25923, 25947, 25946], [2321, 25924, 2450], [25924, 25948, 2450], [25924, 25925, 25949], [25924, 25949, 25948], [25925, 25926, 25949], [25926, 25950, 25949], [25926, 25927, 25951], [25926, 25951, 25950], [25927, 25928, 25951], [25928, 25952, 25951], [25928, 25929, 25953], [25928, 25953, 25952], [25929, 25930, 25953], [25930, 25954, 25953], [25930, 25931, 25955], [25930, 25955, 25954], [25931, 25932, 25955], [25932, 25956, 25955], [25932, 25933, 25957], [25932, 25957, 25956], [25933, 25934, 25957], [25934, 25958, 25957], [25934, 25935, 25959], [25934, 25959, 25958], [25935, 25936, 25959], [25936, 25960, 25959], [25936, 25937, 25961], [25936, 25961, 25960], [25937, 25938, 25961], [25938, 25962, 25961], [25938, 25939, 25963], [25938, 25963, 25962], [25939, 25940, 25963], [25940, 25964, 25963], [25940, 25941, 25965], [25940, 25965, 25964], [25941, 25942, 25965], [25942, 25966, 25965], [25942, 25943, 25967], [25942, 25967, 25966], [25943, 25944, 25967], [25944, 25968, 25967], [25944, 25945, 25969], [25944, 25969, 25968], [25945, 25946, 25969], [25946, 25970, 25969], [25946, 25947, 25971], [25946, 25971, 25970], [2450, 25948, 25972], [2450, 25972, 2579], [25948, 25949, 25972], [25949, 25973, 25972], [25949, 25950, 25974], [25949, 25974, 25973], [25950, 25951, 25974], [25951, 25975, 25974], [25951, 25952, 25976], [25951, 25976, 25975], [25952, 25953, 25976], [25953, 25977, 25976], [25953, 25954, 25978], [25953, 25978, 25977], [25954, 25955, 25978], [25955, 25979, 25978], [25955, 25956, 25980], [25955, 25980, 25979], [25956, 25957, 25980], [25957, 25981, 25980], [25957, 25958, 25982], [25957, 25982, 25981], [25958, 25959, 25982], [25959, 25983, 25982], [25959, 25960, 25984], [25959, 25984, 25983], [25960, 25961, 25984], [25961, 25985, 25984], [25961, 25962, 25986], [25961, 25986, 25985], [25962, 25963, 25986], [25963, 25987, 25986], [25963, 25964, 25988], [25963, 25988, 25987], [25964, 25965, 25988], [25965, 25989, 25988], [25965, 25966, 25990], [25965, 25990, 25989], [25966, 25967, 25990], [25967, 25991, 25990], [25967, 25968, 25992], [25967, 25992, 25991], [25968, 25969, 25992], [25969, 25993, 25992], [25969, 25970, 25994], [25969, 25994, 25993], [25970, 25971, 25994], [25971, 25995, 25994], [2579, 25972, 2708], [25972, 25996, 2708], [25972, 25973, 25997], [25972, 25997, 25996], [25973, 25974, 25997], [25974, 25998, 25997], [25974, 25975, 25999], [25974, 25999, 25998], [25975, 25976, 25999], [25976, 26000, 25999], [25976, 25977, 26001], [25976, 26001, 26000], [25977, 25978, 26001], [25978, 26002, 26001], [25978, 25979, 26003], [25978, 26003, 26002], [25979, 25980, 26003], [25980, 26004, 26003], [25980, 25981, 26005], [25980, 26005, 26004], [25981, 25982, 26005], [25982, 26006, 26005], [25982, 25983, 26007], [25982, 26007, 26006], [25983, 25984, 26007], [25984, 26008, 26007], [25984, 25985, 26009], [25984, 26009, 26008], [25985, 25986, 26009], [25986, 26010, 26009], [25986, 25987, 26011], [25986, 26011, 26010], [25987, 25988, 26011], [25988, 26012, 26011], [25988, 25989, 26013], [25988, 26013, 26012], [25989, 25990, 26013], [25990, 26014, 26013], [25990, 25991, 26015], [25990, 26015, 26014], [25991, 25992, 26015], [25992, 26016, 26015], [25992, 25993, 26017], [25992, 26017, 26016], [25993, 25994, 26017], [25994, 26018, 26017], [25994, 25995, 26019], [25994, 26019, 26018], [2708, 25996, 26020], [2708, 26020, 2837], [25996, 25997, 26020], [25997, 26021, 26020], [25997, 25998, 26022], [25997, 26022, 26021], [25998, 25999, 26022], [25999, 26023, 26022], [25999, 26000, 26024], [25999, 26024, 26023], [26000, 26001, 26024], [26001, 26025, 26024], [26001, 26002, 26026], [26001, 26026, 26025], [26002, 26003, 26026], [26003, 26027, 26026], [26003, 26004, 26028], [26003, 26028, 26027], [26004, 26005, 26028], [26005, 26029, 26028], [26005, 26006, 26030], [26005, 26030, 26029], [26006, 26007, 26030], [26007, 26031, 26030], [26007, 26008, 26032], [26007, 26032, 26031], [26008, 26009, 26032], [26009, 26033, 26032], [26009, 26010, 26034], [26009, 26034, 26033], [26010, 26011, 26034], [26011, 26035, 26034], [26011, 26012, 26036], [26011, 26036, 26035], [26012, 26013, 26036], [26013, 26037, 26036], [26013, 26014, 26038], [26013, 26038, 26037], [26014, 26015, 26038], [26015, 26039, 26038], [26015, 26016, 26040], [26015, 26040, 26039], [26016, 26017, 26040], [26017, 26041, 26040], [26017, 26018, 26042], [26017, 26042, 26041], [26018, 26019, 26042], [26019, 26043, 26042], [2837, 26020, 2966], [26020, 26044, 2966], [26020, 26021, 26045], [26020, 26045, 26044], [26021, 26022, 26045], [26022, 26046, 26045], [26022, 26023, 26047], [26022, 26047, 26046], [26023, 26024, 26047], [26024, 26048, 26047], [26024, 26025, 26049], [26024, 26049, 26048], [26025, 26026, 26049], [26026, 26050, 26049], [26026, 26027, 26051], [26026, 26051, 26050], [26027, 26028, 26051], [26028, 26052, 26051], [26028, 26029, 26053], [26028, 26053, 26052], [26029, 26030, 26053], [26030, 26054, 26053], [26030, 26031, 26055], [26030, 26055, 26054], [26031, 26032, 26055], [26032, 26056, 26055], [26032, 26033, 26057], [26032, 26057, 26056], [26033, 26034, 26057], [26034, 26058, 26057], [26034, 26035, 26059], [26034, 26059, 26058], [26035, 26036, 26059], [26036, 26060, 26059], [26036, 26037, 26061], [26036, 26061, 26060], [26037, 26038, 26061], [26038, 26062, 26061], [26038, 26039, 26063], [26038, 26063, 26062], [26039, 26040, 26063], [26040, 26064, 26063], [26040, 26041, 26065], [26040, 26065, 26064], [26041, 26042, 26065], [26042, 26066, 26065], [26042, 26043, 26067], [26042, 26067, 26066], [2966, 26044, 26068], [2966, 26068, 3095], [26044, 26045, 26068], [26045, 26069, 26068], [26045, 26046, 26070], [26045, 26070, 26069], [26046, 26047, 26070], [26047, 26071, 26070], [26047, 26048, 26072], [26047, 26072, 26071], [26048, 26049, 26072], [26049, 26073, 26072], [26049, 26050, 26074], [26049, 26074, 26073], [26050, 26051, 26074], [26051, 26075, 26074], [26051, 26052, 26076], [26051, 26076, 26075], [26052, 26053, 26076], [26053, 26077, 26076], [26053, 26054, 26078], [26053, 26078, 26077], [26054, 26055, 26078], [26055, 26079, 26078], [26055, 26056, 26080], [26055, 26080, 26079], [26056, 26057, 26080], [26057, 26081, 26080], [26057, 26058, 26082], [26057, 26082, 26081], [26058, 26059, 26082], [26059, 26083, 26082], [26059, 26060, 26084], [26059, 26084, 26083], [26060, 26061, 26084], [26061, 26085, 26084], [26061, 26062, 26086], [26061, 26086, 26085], [26062, 26063, 26086], [26063, 26087, 26086], [26063, 26064, 26088], [26063, 26088, 26087], [26064, 26065, 26088], [26065, 26089, 26088], [26065, 26066, 26090], [26065, 26090, 26089], [26066, 26067, 26090], [26067, 26091, 26090], [3095, 26068, 3224], [26068, 26092, 3224], [26068, 26069, 26093], [26068, 26093, 26092], [26069, 26070, 26093], [26070, 26094, 26093], [26070, 26071, 26095], [26070, 26095, 26094], [26071, 26072, 26095], [26072, 26096, 26095], [26072, 26073, 26097], [26072, 26097, 26096], [26073, 26074, 26097], [26074, 26098, 26097], [26074, 26075, 26099], [26074, 26099, 26098], [26075, 26076, 26099], [26076, 26100, 26099], [26076, 26077, 26101], [26076, 26101, 26100], [26077, 26078, 26101], [26078, 26102, 26101], [26078, 26079, 26103], [26078, 26103, 26102], [26079, 26080, 26103], [26080, 26104, 26103], [26080, 26081, 26105], [26080, 26105, 26104], [26081, 26082, 26105], [26082, 26106, 26105], [26082, 26083, 26107], [26082, 26107, 26106], [26083, 26084, 26107], [26084, 26108, 26107], [26084, 26085, 26109], [26084, 26109, 26108], [26085, 26086, 26109], [26086, 26110, 26109], [26086, 26087, 26111], [26086, 26111, 26110], [26087, 26088, 26111], [26088, 26112, 26111], [26088, 26089, 26113], [26088, 26113, 26112], [26089, 26090, 26113], [26090, 26114, 26113], [26090, 26091, 26115], [26090, 26115, 26114], [3224, 26092, 26116], [3224, 26116, 3353], [26092, 26093, 26116], [26093, 26117, 26116], [26093, 26094, 26118], [26093, 26118, 26117], [26094, 26095, 26118], [26095, 26119, 26118], [26095, 26096, 26120], [26095, 26120, 26119], [26096, 26097, 26120], [26097, 26121, 26120], [26097, 26098, 26122], [26097, 26122, 26121], [26098, 26099, 26122], [26099, 26123, 26122], [26099, 26100, 26124], [26099, 26124, 26123], [26100, 26101, 26124], [26101, 26125, 26124], [26101, 26102, 26126], [26101, 26126, 26125], [26102, 26103, 26126], [26103, 26127, 26126], [26103, 26104, 26128], [26103, 26128, 26127], [26104, 26105, 26128], [26105, 26129, 26128], [26105, 26106, 26130], [26105, 26130, 26129], [26106, 26107, 26130], [26107, 26131, 26130], [26107, 26108, 26132], [26107, 26132, 26131], [26108, 26109, 26132], [26109, 26133, 26132], [26109, 26110, 26134], [26109, 26134, 26133], [26110, 26111, 26134], [26111, 26135, 26134], [26111, 26112, 26136], [26111, 26136, 26135], [26112, 26113, 26136], [26113, 26137, 26136], [26113, 26114, 26138], [26113, 26138, 26137], [26114, 26115, 26138], [26115, 26139, 26138], [3353, 26116, 3482], [26116, 26140, 3482], [26116, 26117, 26141], [26116, 26141, 26140], [26117, 26118, 26141], [26118, 26142, 26141], [26118, 26119, 26143], [26118, 26143, 26142], [26119, 26120, 26143], [26120, 26144, 26143], [26120, 26121, 26145], [26120, 26145, 26144], [26121, 26122, 26145], [26122, 26146, 26145], [26122, 26123, 26147], [26122, 26147, 26146], [26123, 26124, 26147], [26124, 26148, 26147], [26124, 26125, 26149], [26124, 26149, 26148], [26125, 26126, 26149], [26126, 26150, 26149], [26126, 26127, 26151], [26126, 26151, 26150], [26127, 26128, 26151], [26128, 26152, 26151], [26128, 26129, 26153], [26128, 26153, 26152], [26129, 26130, 26153], [26130, 26154, 26153], [26130, 26131, 26155], [26130, 26155, 26154], [26131, 26132, 26155], [26132, 26156, 26155], [26132, 26133, 26157], [26132, 26157, 26156], [26133, 26134, 26157], [26134, 26158, 26157], [26134, 26135, 26159], [26134, 26159, 26158], [26135, 26136, 26159], [26136, 26160, 26159], [26136, 26137, 26161], [26136, 26161, 26160], [26137, 26138, 26161], [26138, 26162, 26161], [26138, 26139, 26163], [26138, 26163, 26162], [3482, 26140, 26164], [3482, 26164, 3611], [26140, 26141, 26164], [26141, 26165, 26164], [26141, 26142, 26166], [26141, 26166, 26165], [26142, 26143, 26166], [26143, 26167, 26166], [26143, 26144, 26168], [26143, 26168, 26167], [26144, 26145, 26168], [26145, 26169, 26168], [26145, 26146, 26170], [26145, 26170, 26169], [26146, 26147, 26170], [26147, 26171, 26170], [26147, 26148, 26172], [26147, 26172, 26171], [26148, 26149, 26172], [26149, 26173, 26172], [26149, 26150, 26174], [26149, 26174, 26173], [26150, 26151, 26174], [26151, 26175, 26174], [26151, 26152, 26176], [26151, 26176, 26175], [26152, 26153, 26176], [26153, 26177, 26176], [26153, 26154, 26178], [26153, 26178, 26177], [26154, 26155, 26178], [26155, 26179, 26178], [26155, 26156, 26180], [26155, 26180, 26179], [26156, 26157, 26180], [26157, 26181, 26180], [26157, 26158, 26182], [26157, 26182, 26181], [26158, 26159, 26182], [26159, 26183, 26182], [26159, 26160, 26184], [26159, 26184, 26183], [26160, 26161, 26184], [26161, 26185, 26184], [26161, 26162, 26186], [26161, 26186, 26185], [26162, 26163, 26186], [26163, 26187, 26186], [3611, 26164, 3740], [26164, 26188, 3740], [26164, 26165, 26189], [26164, 26189, 26188], [26165, 26166, 26189], [26166, 26190, 26189], [26166, 26167, 26191], [26166, 26191, 26190], [26167, 26168, 26191], [26168, 26192, 26191], [26168, 26169, 26193], [26168, 26193, 26192], [26169, 26170, 26193], [26170, 26194, 26193], [26170, 26171, 26195], [26170, 26195, 26194], [26171, 26172, 26195], [26172, 26196, 26195], [26172, 26173, 26197], [26172, 26197, 26196], [26173, 26174, 26197], [26174, 26198, 26197], [26174, 26175, 26199], [26174, 26199, 26198], [26175, 26176, 26199], [26176, 26200, 26199], [26176, 26177, 26201], [26176, 26201, 26200], [26177, 26178, 26201], [26178, 26202, 26201], [26178, 26179, 26203], [26178, 26203, 26202], [26179, 26180, 26203], [26180, 26204, 26203], [26180, 26181, 26205], [26180, 26205, 26204], [26181, 26182, 26205], [26182, 26206, 26205], [26182, 26183, 26207], [26182, 26207, 26206], [26183, 26184, 26207], [26184, 26208, 26207], [26184, 26185, 26209], [26184, 26209, 26208], [26185, 26186, 26209], [26186, 26210, 26209], [26186, 26187, 26211], [26186, 26211, 26210], [3740, 26188, 26212], [3740, 26212, 3869], [26188, 26189, 26212], [26189, 26213, 26212], [26189, 26190, 26214], [26189, 26214, 26213], [26190, 26191, 26214], [26191, 26215, 26214], [26191, 26192, 26216], [26191, 26216, 26215], [26192, 26193, 26216], [26193, 26217, 26216], [26193, 26194, 26218], [26193, 26218, 26217], [26194, 26195, 26218], [26195, 26219, 26218], [26195, 26196, 26220], [26195, 26220, 26219], [26196, 26197, 26220], [26197, 26221, 26220], [26197, 26198, 26222], [26197, 26222, 26221], [26198, 26199, 26222], [26199, 26223, 26222], [26199, 26200, 26224], [26199, 26224, 26223], [26200, 26201, 26224], [26201, 26225, 26224], [26201, 26202, 26226], [26201, 26226, 26225], [26202, 26203, 26226], [26203, 26227, 26226], [26203, 26204, 26228], [26203, 26228, 26227], [26204, 26205, 26228], [26205, 26229, 26228], [26205, 26206, 26230], [26205, 26230, 26229], [26206, 26207, 26230], [26207, 26231, 26230], [26207, 26208, 26232], [26207, 26232, 26231], [26208, 26209, 26232], [26209, 26233, 26232], [26209, 26210, 26234], [26209, 26234, 26233], [26210, 26211, 26234], [26211, 26235, 26234], [3869, 26212, 3998], [26212, 26236, 3998], [26212, 26213, 26237], [26212, 26237, 26236], [26213, 26214, 26237], [26214, 26238, 26237], [26214, 26215, 26239], [26214, 26239, 26238], [26215, 26216, 26239], [26216, 26240, 26239], [26216, 26217, 26241], [26216, 26241, 26240], [26217, 26218, 26241], [26218, 26242, 26241], [26218, 26219, 26243], [26218, 26243, 26242], [26219, 26220, 26243], [26220, 26244, 26243], [26220, 26221, 26245], [26220, 26245, 26244], [26221, 26222, 26245], [26222, 26246, 26245], [26222, 26223, 26247], [26222, 26247, 26246], [26223, 26224, 26247], [26224, 26248, 26247], [26224, 26225, 26249], [26224, 26249, 26248], [26225, 26226, 26249], [26226, 26250, 26249], [26226, 26227, 26251], [26226, 26251, 26250], [26227, 26228, 26251], [26228, 26252, 26251], [26228, 26229, 26253], [26228, 26253, 26252], [26229, 26230, 26253], [26230, 26254, 26253], [26230, 26231, 26255], [26230, 26255, 26254], [26231, 26232, 26255], [26232, 26256, 26255], [26232, 26233, 26257], [26232, 26257, 26256], [26233, 26234, 26257], [26234, 26258, 26257], [26234, 26235, 26259], [26234, 26259, 26258], [3998, 26236, 26260], [3998, 26260, 4127], [26236, 26237, 26260], [26237, 26261, 26260], [26237, 26238, 26262], [26237, 26262, 26261], [26238, 26239, 26262], [26239, 26263, 26262], [26239, 26240, 26264], [26239, 26264, 26263], [26240, 26241, 26264], [26241, 26265, 26264], [26241, 26242, 26266], [26241, 26266, 26265], [26242, 26243, 26266], [26243, 26267, 26266], [26243, 26244, 26268], [26243, 26268, 26267], [26244, 26245, 26268], [26245, 26269, 26268], [26245, 26246, 26270], [26245, 26270, 26269], [26246, 26247, 26270], [26247, 26271, 26270], [26247, 26248, 26272], [26247, 26272, 26271], [26248, 26249, 26272], [26249, 26273, 26272], [26249, 26250, 26274], [26249, 26274, 26273], [26250, 26251, 26274], [26251, 26275, 26274], [26251, 26252, 26276], [26251, 26276, 26275], [26252, 26253, 26276], [26253, 26277, 26276], [26253, 26254, 26278], [26253, 26278, 26277], [26254, 26255, 26278], [26255, 26279, 26278], [26255, 26256, 26280], [26255, 26280, 26279], [26256, 26257, 26280], [26257, 26281, 26280], [26257, 26258, 26282], [26257, 26282, 26281], [26258, 26259, 26282], [26259, 26283, 26282], [4127, 26260, 4256], [26260, 26284, 4256], [26260, 26261, 26285], [26260, 26285, 26284], [26261, 26262, 26285], [26262, 26286, 26285], [26262, 26263, 26287], [26262, 26287, 26286], [26263, 26264, 26287], [26264, 26288, 26287], [26264, 26265, 26289], [26264, 26289, 26288], [26265, 26266, 26289], [26266, 26290, 26289], [26266, 26267, 26291], [26266, 26291, 26290], [26267, 26268, 26291], [26268, 26292, 26291], [26268, 26269, 26293], [26268, 26293, 26292], [26269, 26270, 26293], [26270, 26294, 26293], [26270, 26271, 26295], [26270, 26295, 26294], [26271, 26272, 26295], [26272, 26296, 26295], [26272, 26273, 26297], [26272, 26297, 26296], [26273, 26274, 26297], [26274, 26298, 26297], [26274, 26275, 26299], [26274, 26299, 26298], [26275, 26276, 26299], [26276, 26300, 26299], [26276, 26277, 26301], [26276, 26301, 26300], [26277, 26278, 26301], [26278, 26302, 26301], [26278, 26279, 26303], [26278, 26303, 26302], [26279, 26280, 26303], [26280, 26304, 26303], [26280, 26281, 26305], [26280, 26305, 26304], [26281, 26282, 26305], [26282, 26306, 26305], [26282, 26283, 26307], [26282, 26307, 26306], [4256, 26284, 26308], [4256, 26308, 4385], [26284, 26285, 26308], [26285, 26309, 26308], [26285, 26286, 26310], [26285, 26310, 26309], [26286, 26287, 26310], [26287, 26311, 26310], [26287, 26288, 26312], [26287, 26312, 26311], [26288, 26289, 26312], [26289, 26313, 26312], [26289, 26290, 26314], [26289, 26314, 26313], [26290, 26291, 26314], [26291, 26315, 26314], [26291, 26292, 26316], [26291, 26316, 26315], [26292, 26293, 26316], [26293, 26317, 26316], [26293, 26294, 26318], [26293, 26318, 26317], [26294, 26295, 26318], [26295, 26319, 26318], [26295, 26296, 26320], [26295, 26320, 26319], [26296, 26297, 26320], [26297, 26321, 26320], [26297, 26298, 26322], [26297, 26322, 26321], [26298, 26299, 26322], [26299, 26323, 26322], [26299, 26300, 26324], [26299, 26324, 26323], [26300, 26301, 26324], [26301, 26325, 26324], [26301, 26302, 26326], [26301, 26326, 26325], [26302, 26303, 26326], [26303, 26327, 26326], [26303, 26304, 26328], [26303, 26328, 26327], [26304, 26305, 26328], [26305, 26329, 26328], [26305, 26306, 26330], [26305, 26330, 26329], [26306, 26307, 26330], [26307, 26331, 26330], [4385, 26308, 4514], [26308, 26332, 4514], [26308, 26309, 26333], [26308, 26333, 26332], [26309, 26310, 26333], [26310, 26334, 26333], [26310, 26311, 26335], [26310, 26335, 26334], [26311, 26312, 26335], [26312, 26336, 26335], [26312, 26313, 26337], [26312, 26337, 26336], [26313, 26314, 26337], [26314, 26338, 26337], [26314, 26315, 26339], [26314, 26339, 26338], [26315, 26316, 26339], [26316, 26340, 26339], [26316, 26317, 26341], [26316, 26341, 26340], [26317, 26318, 26341], [26318, 26342, 26341], [26318, 26319, 26343], [26318, 26343, 26342], [26319, 26320, 26343], [26320, 26344, 26343], [26320, 26321, 26345], [26320, 26345, 26344], [26321, 26322, 26345], [26322, 26346, 26345], [26322, 26323, 26347], [26322, 26347, 26346], [26323, 26324, 26347], [26324, 26348, 26347], [26324, 26325, 26349], [26324, 26349, 26348], [26325, 26326, 26349], [26326, 26350, 26349], [26326, 26327, 26351], [26326, 26351, 26350], [26327, 26328, 26351], [26328, 26352, 26351], [26328, 26329, 26353], [26328, 26353, 26352], [26329, 26330, 26353], [26330, 26354, 26353], [26330, 26331, 26355], [26330, 26355, 26354], [4514, 26332, 26356], [4514, 26356, 4643], [26332, 26333, 26356], [26333, 26357, 26356], [26333, 26334, 26358], [26333, 26358, 26357], [26334, 26335, 26358], [26335, 26359, 26358], [26335, 26336, 26360], [26335, 26360, 26359], [26336, 26337, 26360], [26337, 26361, 26360], [26337, 26338, 26362], [26337, 26362, 26361], [26338, 26339, 26362], [26339, 26363, 26362], [26339, 26340, 26364], [26339, 26364, 26363], [26340, 26341, 26364], [26341, 26365, 26364], [26341, 26342, 26366], [26341, 26366, 26365], [26342, 26343, 26366], [26343, 26367, 26366], [26343, 26344, 26368], [26343, 26368, 26367], [26344, 26345, 26368], [26345, 26369, 26368], [26345, 26346, 26370], [26345, 26370, 26369], [26346, 26347, 26370], [26347, 26371, 26370], [26347, 26348, 26372], [26347, 26372, 26371], [26348, 26349, 26372], [26349, 26373, 26372], [26349, 26350, 26374], [26349, 26374, 26373], [26350, 26351, 26374], [26351, 26375, 26374], [26351, 26352, 26376], [26351, 26376, 26375], [26352, 26353, 26376], [26353, 26377, 26376], [26353, 26354, 26378], [26353, 26378, 26377], [26354, 26355, 26378], [26355, 26379, 26378], [4643, 26356, 4772], [26356, 26380, 4772], [26356, 26357, 26381], [26356, 26381, 26380], [26357, 26358, 26381], [26358, 26382, 26381], [26358, 26359, 26383], [26358, 26383, 26382], [26359, 26360, 26383], [26360, 26384, 26383], [26360, 26361, 26385], [26360, 26385, 26384], [26361, 26362, 26385], [26362, 26386, 26385], [26362, 26363, 26387], [26362, 26387, 26386], [26363, 26364, 26387], [26364, 26388, 26387], [26364, 26365, 26389], [26364, 26389, 26388], [26365, 26366, 26389], [26366, 26390, 26389], [26366, 26367, 26391], [26366, 26391, 26390], [26367, 26368, 26391], [26368, 26392, 26391], [26368, 26369, 26393], [26368, 26393, 26392], [26369, 26370, 26393], [26370, 26394, 26393], [26370, 26371, 26395], [26370, 26395, 26394], [26371, 26372, 26395], [26372, 26396, 26395], [26372, 26373, 26397], [26372, 26397, 26396], [26373, 26374, 26397], [26374, 26398, 26397], [26374, 26375, 26399], [26374, 26399, 26398], [26375, 26376, 26399], [26376, 26400, 26399], [26376, 26377, 26401], [26376, 26401, 26400], [26377, 26378, 26401], [26378, 26402, 26401], [26378, 26379, 26403], [26378, 26403, 26402], [4772, 26380, 26404], [4772, 26404, 4901], [26380, 26381, 26404], [26381, 26405, 26404], [26381, 26382, 26406], [26381, 26406, 26405], [26382, 26383, 26406], [26383, 26407, 26406], [26383, 26384, 26408], [26383, 26408, 26407], [26384, 26385, 26408], [26385, 26409, 26408], [26385, 26386, 26410], [26385, 26410, 26409], [26386, 26387, 26410], [26387, 26411, 26410], [26387, 26388, 26412], [26387, 26412, 26411], [26388, 26389, 26412], [26389, 26413, 26412], [26389, 26390, 26414], [26389, 26414, 26413], [26390, 26391, 26414], [26391, 26415, 26414], [26391, 26392, 26416], [26391, 26416, 26415], [26392, 26393, 26416], [26393, 26417, 26416], [26393, 26394, 26418], [26393, 26418, 26417], [26394, 26395, 26418], [26395, 26419, 26418], [26395, 26396, 26420], [26395, 26420, 26419], [26396, 26397, 26420], [26397, 26421, 26420], [26397, 26398, 26422], [26397, 26422, 26421], [26398, 26399, 26422], [26399, 26423, 26422], [26399, 26400, 26424], [26399, 26424, 26423], [26400, 26401, 26424], [26401, 26425, 26424], [26401, 26402, 26426], [26401, 26426, 26425], [26402, 26403, 26426], [26403, 26427, 26426], [4901, 26404, 5030], [26404, 26428, 5030], [26404, 26405, 26429], [26404, 26429, 26428], [26405, 26406, 26429], [26406, 26430, 26429], [26406, 26407, 26431], [26406, 26431, 26430], [26407, 26408, 26431], [26408, 26432, 26431], [26408, 26409, 26433], [26408, 26433, 26432], [26409, 26410, 26433], [26410, 26434, 26433], [26410, 26411, 26435], [26410, 26435, 26434], [26411, 26412, 26435], [26412, 26436, 26435], [26412, 26413, 26437], [26412, 26437, 26436], [26413, 26414, 26437], [26414, 26438, 26437], [26414, 26415, 26439], [26414, 26439, 26438], [26415, 26416, 26439], [26416, 26440, 26439], [26416, 26417, 26441], [26416, 26441, 26440], [26417, 26418, 26441], [26418, 26442, 26441], [26418, 26419, 26443], [26418, 26443, 26442], [26419, 26420, 26443], [26420, 26444, 26443], [26420, 26421, 26445], [26420, 26445, 26444], [26421, 26422, 26445], [26422, 26446, 26445], [26422, 26423, 26447], [26422, 26447, 26446], [26423, 26424, 26447], [26424, 26448, 26447], [26424, 26425, 26449], [26424, 26449, 26448], [26425, 26426, 26449], [26426, 26450, 26449], [26426, 26427, 26451], [26426, 26451, 26450], [5030, 26428, 26452], [5030, 26452, 5159], [26428, 26429, 26452], [26429, 26453, 26452], [26429, 26430, 26454], [26429, 26454, 26453], [26430, 26431, 26454], [26431, 26455, 26454], [26431, 26432, 26456], [26431, 26456, 26455], [26432, 26433, 26456], [26433, 26457, 26456], [26433, 26434, 26458], [26433, 26458, 26457], [26434, 26435, 26458], [26435, 26459, 26458], [26435, 26436, 26460], [26435, 26460, 26459], [26436, 26437, 26460], [26437, 26461, 26460], [26437, 26438, 26462], [26437, 26462, 26461], [26438, 26439, 26462], [26439, 26463, 26462], [26439, 26440, 26464], [26439, 26464, 26463], [26440, 26441, 26464], [26441, 26465, 26464], [26441, 26442, 26466], [26441, 26466, 26465], [26442, 26443, 26466], [26443, 26467, 26466], [26443, 26444, 26468], [26443, 26468, 26467], [26444, 26445, 26468], [26445, 26469, 26468], [26445, 26446, 26470], [26445, 26470, 26469], [26446, 26447, 26470], [26447, 26471, 26470], [26447, 26448, 26472], [26447, 26472, 26471], [26448, 26449, 26472], [26449, 26473, 26472], [26449, 26450, 26474], [26449, 26474, 26473], [26450, 26451, 26474], [26451, 26475, 26474], [5159, 26452, 5288], [26452, 26476, 5288], [26452, 26453, 26477], [26452, 26477, 26476], [26453, 26454, 26477], [26454, 26478, 26477], [26454, 26455, 26479], [26454, 26479, 26478], [26455, 26456, 26479], [26456, 26480, 26479], [26456, 26457, 26481], [26456, 26481, 26480], [26457, 26458, 26481], [26458, 26482, 26481], [26458, 26459, 26483], [26458, 26483, 26482], [26459, 26460, 26483], [26460, 26484, 26483], [26460, 26461, 26485], [26460, 26485, 26484], [26461, 26462, 26485], [26462, 26486, 26485], [26462, 26463, 26487], [26462, 26487, 26486], [26463, 26464, 26487], [26464, 26488, 26487], [26464, 26465, 26489], [26464, 26489, 26488], [26465, 26466, 26489], [26466, 26490, 26489], [26466, 26467, 26491], [26466, 26491, 26490], [26467, 26468, 26491], [26468, 26492, 26491], [26468, 26469, 26493], [26468, 26493, 26492], [26469, 26470, 26493], [26470, 26494, 26493], [26470, 26471, 26495], [26470, 26495, 26494], [26471, 26472, 26495], [26472, 26496, 26495], [26472, 26473, 26497], [26472, 26497, 26496], [26473, 26474, 26497], [26474, 26498, 26497], [26474, 26475, 26499], [26474, 26499, 26498], [5288, 26476, 26500], [5288, 26500, 5417], [26476, 26477, 26500], [26477, 26501, 26500], [26477, 26478, 26502], [26477, 26502, 26501], [26478, 26479, 26502], [26479, 26503, 26502], [26479, 26480, 26504], [26479, 26504, 26503], [26480, 26481, 26504], [26481, 26505, 26504], [26481, 26482, 26506], [26481, 26506, 26505], [26482, 26483, 26506], [26483, 26507, 26506], [26483, 26484, 26508], [26483, 26508, 26507], [26484, 26485, 26508], [26485, 26509, 26508], [26485, 26486, 26510], [26485, 26510, 26509], [26486, 26487, 26510], [26487, 26511, 26510], [26487, 26488, 26512], [26487, 26512, 26511], [26488, 26489, 26512], [26489, 26513, 26512], [26489, 26490, 26514], [26489, 26514, 26513], [26490, 26491, 26514], [26491, 26515, 26514], [26491, 26492, 26516], [26491, 26516, 26515], [26492, 26493, 26516], [26493, 26517, 26516], [26493, 26494, 26518], [26493, 26518, 26517], [26494, 26495, 26518], [26495, 26519, 26518], [26495, 26496, 26520], [26495, 26520, 26519], [26496, 26497, 26520], [26497, 26521, 26520], [26497, 26498, 26522], [26497, 26522, 26521], [26498, 26499, 26522], [26499, 26523, 26522], [5417, 26500, 5546], [26500, 26524, 5546], [26500, 26501, 26525], [26500, 26525, 26524], [26501, 26502, 26525], [26502, 26526, 26525], [26502, 26503, 26527], [26502, 26527, 26526], [26503, 26504, 26527], [26504, 26528, 26527], [26504, 26505, 26529], [26504, 26529, 26528], [26505, 26506, 26529], [26506, 26530, 26529], [26506, 26507, 26531], [26506, 26531, 26530], [26507, 26508, 26531], [26508, 26532, 26531], [26508, 26509, 26533], [26508, 26533, 26532], [26509, 26510, 26533], [26510, 26534, 26533], [26510, 26511, 26535], [26510, 26535, 26534], [26511, 26512, 26535], [26512, 26536, 26535], [26512, 26513, 26537], [26512, 26537, 26536], [26513, 26514, 26537], [26514, 26538, 26537], [26514, 26515, 26539], [26514, 26539, 26538], [26515, 26516, 26539], [26516, 26540, 26539], [26516, 26517, 26541], [26516, 26541, 26540], [26517, 26518, 26541], [26518, 26542, 26541], [26518, 26519, 26543], [26518, 26543, 26542], [26519, 26520, 26543], [26520, 26544, 26543], [26520, 26521, 26545], [26520, 26545, 26544], [26521, 26522, 26545], [26522, 26546, 26545], [26522, 26523, 26547], [26522, 26547, 26546], [5546, 26524, 26548], [5546, 26548, 5675], [26524, 26525, 26548], [26525, 26549, 26548], [26525, 26526, 26550], [26525, 26550, 26549], [26526, 26527, 26550], [26527, 26551, 26550], [26527, 26528, 26552], [26527, 26552, 26551], [26528, 26529, 26552], [26529, 26553, 26552], [26529, 26530, 26554], [26529, 26554, 26553], [26530, 26531, 26554], [26531, 26555, 26554], [26531, 26532, 26556], [26531, 26556, 26555], [26532, 26533, 26556], [26533, 26557, 26556], [26533, 26534, 26558], [26533, 26558, 26557], [26534, 26535, 26558], [26535, 26559, 26558], [26535, 26536, 26560], [26535, 26560, 26559], [26536, 26537, 26560], [26537, 26561, 26560], [26537, 26538, 26562], [26537, 26562, 26561], [26538, 26539, 26562], [26539, 26563, 26562], [26539, 26540, 26564], [26539, 26564, 26563], [26540, 26541, 26564], [26541, 26565, 26564], [26541, 26542, 26566], [26541, 26566, 26565], [26542, 26543, 26566], [26543, 26567, 26566], [26543, 26544, 26568], [26543, 26568, 26567], [26544, 26545, 26568], [26545, 26569, 26568], [26545, 26546, 26570], [26545, 26570, 26569], [26546, 26547, 26570], [26547, 26571, 26570], [5675, 26548, 5804], [26548, 26572, 5804], [26548, 26549, 26573], [26548, 26573, 26572], [26549, 26550, 26573], [26550, 26574, 26573], [26550, 26551, 26575], [26550, 26575, 26574], [26551, 26552, 26575], [26552, 26576, 26575], [26552, 26553, 26577], [26552, 26577, 26576], [26553, 26554, 26577], [26554, 26578, 26577], [26554, 26555, 26579], [26554, 26579, 26578], [26555, 26556, 26579], [26556, 26580, 26579], [26556, 26557, 26581], [26556, 26581, 26580], [26557, 26558, 26581], [26558, 26582, 26581], [26558, 26559, 26583], [26558, 26583, 26582], [26559, 26560, 26583], [26560, 26584, 26583], [26560, 26561, 26585], [26560, 26585, 26584], [26561, 26562, 26585], [26562, 26586, 26585], [26562, 26563, 26587], [26562, 26587, 26586], [26563, 26564, 26587], [26564, 26588, 26587], [26564, 26565, 26589], [26564, 26589, 26588], [26565, 26566, 26589], [26566, 26590, 26589], [26566, 26567, 26591], [26566, 26591, 26590], [26567, 26568, 26591], [26568, 26592, 26591], [26568, 26569, 26593], [26568, 26593, 26592], [26569, 26570, 26593], [26570, 26594, 26593], [26570, 26571, 26595], [26570, 26595, 26594], [5804, 26572, 26596], [5804, 26596, 5933], [26572, 26573, 26596], [26573, 26597, 26596], [26573, 26574, 26598], [26573, 26598, 26597], [26574, 26575, 26598], [26575, 26599, 26598], [26575, 26576, 26600], [26575, 26600, 26599], [26576, 26577, 26600], [26577, 26601, 26600], [26577, 26578, 26602], [26577, 26602, 26601], [26578, 26579, 26602], [26579, 26603, 26602], [26579, 26580, 26604], [26579, 26604, 26603], [26580, 26581, 26604], [26581, 26605, 26604], [26581, 26582, 26606], [26581, 26606, 26605], [26582, 26583, 26606], [26583, 26607, 26606], [26583, 26584, 26608], [26583, 26608, 26607], [26584, 26585, 26608], [26585, 26609, 26608], [26585, 26586, 26610], [26585, 26610, 26609], [26586, 26587, 26610], [26587, 26611, 26610], [26587, 26588, 26612], [26587, 26612, 26611], [26588, 26589, 26612], [26589, 26613, 26612], [26589, 26590, 26614], [26589, 26614, 26613], [26590, 26591, 26614], [26591, 26615, 26614], [26591, 26592, 26616], [26591, 26616, 26615], [26592, 26593, 26616], [26593, 26617, 26616], [26593, 26594, 26618], [26593, 26618, 26617], [26594, 26595, 26618], [26595, 26619, 26618], [5933, 26596, 6062], [26596, 26620, 6062], [26596, 26597, 26621], [26596, 26621, 26620], [26597, 26598, 26621], [26598, 26622, 26621], [26598, 26599, 26623], [26598, 26623, 26622], [26599, 26600, 26623], [26600, 26624, 26623], [26600, 26601, 26625], [26600, 26625, 26624], [26601, 26602, 26625], [26602, 26626, 26625], [26602, 26603, 26627], [26602, 26627, 26626], [26603, 26604, 26627], [26604, 26628, 26627], [26604, 26605, 26629], [26604, 26629, 26628], [26605, 26606, 26629], [26606, 26630, 26629], [26606, 26607, 26631], [26606, 26631, 26630], [26607, 26608, 26631], [26608, 26632, 26631], [26608, 26609, 26633], [26608, 26633, 26632], [26609, 26610, 26633], [26610, 26634, 26633], [26610, 26611, 26635], [26610, 26635, 26634], [26611, 26612, 26635], [26612, 26636, 26635], [26612, 26613, 26637], [26612, 26637, 26636], [26613, 26614, 26637], [26614, 26638, 26637], [26614, 26615, 26639], [26614, 26639, 26638], [26615, 26616, 26639], [26616, 26640, 26639], [26616, 26617, 26641], [26616, 26641, 26640], [26617, 26618, 26641], [26618, 26642, 26641], [26618, 26619, 26643], [26618, 26643, 26642], [6062, 26620, 26644], [6062, 26644, 6189], [26620, 26621, 26644], [26621, 26645, 26644], [26621, 26622, 26646], [26621, 26646, 26645], [26622, 26623, 26646], [26623, 26647, 26646], [26623, 26624, 26648], [26623, 26648, 26647], [26624, 26625, 26648], [26625, 26649, 26648], [26625, 26626, 26650], [26625, 26650, 26649], [26626, 26627, 26650], [26627, 26651, 26650], [26627, 26628, 26652], [26627, 26652, 26651], [26628, 26629, 26652], [26629, 26653, 26652], [26629, 26630, 26654], [26629, 26654, 26653], [26630, 26631, 26654], [26631, 26655, 26654], [26631, 26632, 26656], [26631, 26656, 26655], [26632, 26633, 26656], [26633, 26657, 26656], [26633, 26634, 26658], [26633, 26658, 26657], [26634, 26635, 26658], [26635, 26659, 26658], [26635, 26636, 26660], [26635, 26660, 26659], [26636, 26637, 26660], [26637, 26661, 26660], [26637, 26638, 26662], [26637, 26662, 26661], [26638, 26639, 26662], [26639, 26663, 26662], [26639, 26640, 26664], [26639, 26664, 26663], [26640, 26641, 26664], [26641, 26665, 26664], [26641, 26642, 26666], [26641, 26666, 26665], [26642, 26643, 26666], [26643, 26667, 26666], [6189, 26644, 6314], [26644, 26668, 6314], [26644, 26645, 26669], [26644, 26669, 26668], [26645, 26646, 26669], [26646, 26670, 26669], [26646, 26647, 26671], [26646, 26671, 26670], [26647, 26648, 26671], [26648, 26672, 26671], [26648, 26649, 26673], [26648, 26673, 26672], [26649, 26650, 26673], [26650, 26674, 26673], [26650, 26651, 26675], [26650, 26675, 26674], [26651, 26652, 26675], [26652, 26676, 26675], [26652, 26653, 26677], [26652, 26677, 26676], [26653, 26654, 26677], [26654, 26678, 26677], [26654, 26655, 26679], [26654, 26679, 26678], [26655, 26656, 26679], [26656, 26680, 26679], [26656, 26657, 26681], [26656, 26681, 26680], [26657, 26658, 26681], [26658, 26682, 26681], [26658, 26659, 26683], [26658, 26683, 26682], [26659, 26660, 26683], [26660, 26684, 26683], [26660, 26661, 26685], [26660, 26685, 26684], [26661, 26662, 26685], [26662, 26686, 26685], [26662, 26663, 26687], [26662, 26687, 26686], [26663, 26664, 26687], [26664, 26688, 26687], [26664, 26665, 26689], [26664, 26689, 26688], [26665, 26666, 26689], [26666, 26690, 26689], [26666, 26667, 26691], [26666, 26691, 26690], [6314, 26668, 26692], [6314, 26692, 6437], [26668, 26669, 26692], [26669, 26693, 26692], [26669, 26670, 26694], [26669, 26694, 26693], [26670, 26671, 26694], [26671, 26695, 26694], [26671, 26672, 26696], [26671, 26696, 26695], [26672, 26673, 26696], [26673, 26697, 26696], [26673, 26674, 26698], [26673, 26698, 26697], [26674, 26675, 26698], [26675, 26699, 26698], [26675, 26676, 26700], [26675, 26700, 26699], [26676, 26677, 26700], [26677, 26701, 26700], [26677, 26678, 26702], [26677, 26702, 26701], [26678, 26679, 26702], [26679, 26703, 26702], [26679, 26680, 26704], [26679, 26704, 26703], [26680, 26681, 26704], [26681, 26705, 26704], [26681, 26682, 26706], [26681, 26706, 26705], [26682, 26683, 26706], [26683, 26707, 26706], [26683, 26684, 26708], [26683, 26708, 26707], [26684, 26685, 26708], [26685, 26709, 26708], [26685, 26686, 26710], [26685, 26710, 26709], [26686, 26687, 26710], [26687, 26711, 26710], [26687, 26688, 26712], [26687, 26712, 26711], [26688, 26689, 26712], [26689, 26713, 26712], [26689, 26690, 26714], [26689, 26714, 26713], [26690, 26691, 26714], [26691, 26715, 26714], [6437, 26692, 6557], [26692, 26716, 6557], [26692, 26693, 26717], [26692, 26717, 26716], [26693, 26694, 26717], [26694, 26718, 26717], [26694, 26695, 26719], [26694, 26719, 26718], [26695, 26696, 26719], [26696, 26720, 26719], [26696, 26697, 26721], [26696, 26721, 26720], [26697, 26698, 26721], [26698, 26722, 26721], [26698, 26699, 26723], [26698, 26723, 26722], [26699, 26700, 26723], [26700, 26724, 26723], [26700, 26701, 26725], [26700, 26725, 26724], [26701, 26702, 26725], [26702, 26726, 26725], [26702, 26703, 26727], [26702, 26727, 26726], [26703, 26704, 26727], [26704, 26728, 26727], [26704, 26705, 26729], [26704, 26729, 26728], [26705, 26706, 26729], [26706, 26730, 26729], [26706, 26707, 26731], [26706, 26731, 26730], [26707, 26708, 26731], [26708, 26732, 26731], [26708, 26709, 26733], [26708, 26733, 26732], [26709, 26710, 26733], [26710, 26734, 26733], [26710, 26711, 26735], [26710, 26735, 26734], [26711, 26712, 26735], [26712, 26736, 26735], [26712, 26713, 26737], [26712, 26737, 26736], [26713, 26714, 26737], [26714, 26738, 26737], [26714, 26715, 26739], [26714, 26739, 26738], [6557, 26716, 26740], [6557, 26740, 6675], [26716, 26717, 26740], [26717, 26741, 26740], [26717, 26718, 26742], [26717, 26742, 26741], [26718, 26719, 26742], [26719, 26743, 26742], [26719, 26720, 26744], [26719, 26744, 26743], [26720, 26721, 26744], [26721, 26745, 26744], [26721, 26722, 26746], [26721, 26746, 26745], [26722, 26723, 26746], [26723, 26747, 26746], [26723, 26724, 26748], [26723, 26748, 26747], [26724, 26725, 26748], [26725, 26749, 26748], [26725, 26726, 26750], [26725, 26750, 26749], [26726, 26727, 26750], [26727, 26751, 26750], [26727, 26728, 26752], [26727, 26752, 26751], [26728, 26729, 26752], [26729, 26753, 26752], [26729, 26730, 26754], [26729, 26754, 26753], [26730, 26731, 26754], [26731, 26755, 26754], [26731, 26732, 26756], [26731, 26756, 26755], [26732, 26733, 26756], [26733, 26757, 26756], [26733, 26734, 26758], [26733, 26758, 26757], [26734, 26735, 26758], [26735, 26759, 26758], [26735, 26736, 26760], [26735, 26760, 26759], [26736, 26737, 26760], [26737, 26761, 26760], [26737, 26738, 26762], [26737, 26762, 26761], [26738, 26739, 26762], [26739, 26763, 26762], [6675, 26740, 6793], [26740, 26764, 6793], [26740, 26741, 26765], [26740, 26765, 26764], [26741, 26742, 26765], [26742, 26766, 26765], [26742, 26743, 26767], [26742, 26767, 26766], [26743, 26744, 26767], [26744, 26768, 26767], [26744, 26745, 26769], [26744, 26769, 26768], [26745, 26746, 26769], [26746, 26770, 26769], [26746, 26747, 26771], [26746, 26771, 26770], [26747, 26748, 26771], [26748, 26772, 26771], [26748, 26749, 26773], [26748, 26773, 26772], [26749, 26750, 26773], [26750, 26774, 26773], [26750, 26751, 26775], [26750, 26775, 26774], [26751, 26752, 26775], [26752, 26776, 26775], [26752, 26753, 26777], [26752, 26777, 26776], [26753, 26754, 26777], [26754, 26778, 26777], [26754, 26755, 26779], [26754, 26779, 26778], [26755, 26756, 26779], [26756, 26780, 26779], [26756, 26757, 26781], [26756, 26781, 26780], [26757, 26758, 26781], [26758, 26782, 26781], [26758, 26759, 26783], [26758, 26783, 26782], [26759, 26760, 26783], [26760, 26784, 26783], [26760, 26761, 26785], [26760, 26785, 26784], [26761, 26762, 26785], [26762, 26786, 26785], [26762, 26763, 26787], [26762, 26787, 26786], [6793, 26764, 26788], [6793, 26788, 6911], [26764, 26765, 26788], [26765, 26789, 26788], [26765, 26766, 26790], [26765, 26790, 26789], [26766, 26767, 26790], [26767, 26791, 26790], [26767, 26768, 26792], [26767, 26792, 26791], [26768, 26769, 26792], [26769, 26793, 26792], [26769, 26770, 26794], [26769, 26794, 26793], [26770, 26771, 26794], [26771, 26795, 26794], [26771, 26772, 26796], [26771, 26796, 26795], [26772, 26773, 26796], [26773, 26797, 26796], [26773, 26774, 26798], [26773, 26798, 26797], [26774, 26775, 26798], [26775, 26799, 26798], [26775, 26776, 26800], [26775, 26800, 26799], [26776, 26777, 26800], [26777, 26801, 26800], [26777, 26778, 26802], [26777, 26802, 26801], [26778, 26779, 26802], [26779, 26803, 26802], [26779, 26780, 26804], [26779, 26804, 26803], [26780, 26781, 26804], [26781, 26805, 26804], [26781, 26782, 26806], [26781, 26806, 26805], [26782, 26783, 26806], [26783, 26807, 26806], [26783, 26784, 26808], [26783, 26808, 26807], [26784, 26785, 26808], [26785, 26809, 26808], [26785, 26786, 26810], [26785, 26810, 26809], [26786, 26787, 26810], [26787, 26811, 26810], [6911, 26788, 7029], [26788, 26812, 7029], [26788, 26789, 26813], [26788, 26813, 26812], [26789, 26790, 26813], [26790, 26814, 26813], [26790, 26791, 26815], [26790, 26815, 26814], [26791, 26792, 26815], [26792, 26816, 26815], [26792, 26793, 26817], [26792, 26817, 26816], [26793, 26794, 26817], [26794, 26818, 26817], [26794, 26795, 26819], [26794, 26819, 26818], [26795, 26796, 26819], [26796, 26820, 26819], [26796, 26797, 26821], [26796, 26821, 26820], [26797, 26798, 26821], [26798, 26822, 26821], [26798, 26799, 26823], [26798, 26823, 26822], [26799, 26800, 26823], [26800, 26824, 26823], [26800, 26801, 26825], [26800, 26825, 26824], [26801, 26802, 26825], [26802, 26826, 26825], [26802, 26803, 26827], [26802, 26827, 26826], [26803, 26804, 26827], [26804, 26828, 26827], [26804, 26805, 26829], [26804, 26829, 26828], [26805, 26806, 26829], [26806, 26830, 26829], [26806, 26807, 26831], [26806, 26831, 26830], [26807, 26808, 26831], [26808, 26832, 26831], [26808, 26809, 26833], [26808, 26833, 26832], [26809, 26810, 26833], [26810, 26834, 26833], [26810, 26811, 26835], [26810, 26835, 26834], [7029, 26812, 26836], [7029, 26836, 7147], [26812, 26813, 26836], [26813, 26837, 26836], [26813, 26814, 26838], [26813, 26838, 26837], [26814, 26815, 26838], [26815, 26839, 26838], [26815, 26816, 26840], [26815, 26840, 26839], [26816, 26817, 26840], [26817, 26841, 26840], [26817, 26818, 26842], [26817, 26842, 26841], [26818, 26819, 26842], [26819, 26843, 26842], [26819, 26820, 26844], [26819, 26844, 26843], [26820, 26821, 26844], [26821, 26845, 26844], [26821, 26822, 26846], [26821, 26846, 26845], [26822, 26823, 26846], [26823, 26847, 26846], [26823, 26824, 26848], [26823, 26848, 26847], [26824, 26825, 26848], [26825, 26849, 26848], [26825, 26826, 26850], [26825, 26850, 26849], [26826, 26827, 26850], [26827, 26851, 26850], [26827, 26828, 26852], [26827, 26852, 26851], [26828, 26829, 26852], [26829, 26853, 26852], [26829, 26830, 26854], [26829, 26854, 26853], [26830, 26831, 26854], [26831, 26855, 26854], [26831, 26832, 26856], [26831, 26856, 26855], [26832, 26833, 26856], [26833, 26857, 26856], [26833, 26834, 26858], [26833, 26858, 26857], [26834, 26835, 26858], [26835, 26859, 26858], [7147, 26836, 7265], [26836, 26860, 7265], [26836, 26837, 26861], [26836, 26861, 26860], [26837, 26838, 26861], [26838, 26862, 26861], [26838, 26839, 26863], [26838, 26863, 26862], [26839, 26840, 26863], [26840, 26864, 26863], [26840, 26841, 26865], [26840, 26865, 26864], [26841, 26842, 26865], [26842, 26866, 26865], [26842, 26843, 26867], [26842, 26867, 26866], [26843, 26844, 26867], [26844, 26868, 26867], [26844, 26845, 26869], [26844, 26869, 26868], [26845, 26846, 26869], [26846, 26870, 26869], [26846, 26847, 26871], [26846, 26871, 26870], [26847, 26848, 26871], [26848, 26872, 26871], [26848, 26849, 26873], [26848, 26873, 26872], [26849, 26850, 26873], [26850, 26874, 26873], [26850, 26851, 26875], [26850, 26875, 26874], [26851, 26852, 26875], [26852, 26876, 26875], [26852, 26853, 26877], [26852, 26877, 26876], [26853, 26854, 26877], [26854, 26878, 26877], [26854, 26855, 26879], [26854, 26879, 26878], [26855, 26856, 26879], [26856, 26880, 26879], [26856, 26857, 26881], [26856, 26881, 26880], [26857, 26858, 26881], [26858, 26882, 26881], [26858, 26859, 26883], [26858, 26883, 26882], [7265, 26860, 26884], [7265, 26884, 7383], [26860, 26861, 26884], [26861, 26885, 26884], [26861, 26862, 26886], [26861, 26886, 26885], [26862, 26863, 26886], [26863, 26887, 26886], [26863, 26864, 26888], [26863, 26888, 26887], [26864, 26865, 26888], [26865, 26889, 26888], [26865, 26866, 26890], [26865, 26890, 26889], [26866, 26867, 26890], [26867, 26891, 26890], [26867, 26868, 26892], [26867, 26892, 26891], [26868, 26869, 26892], [26869, 26893, 26892], [26869, 26870, 26894], [26869, 26894, 26893], [26870, 26871, 26894], [26871, 26895, 26894], [26871, 26872, 26896], [26871, 26896, 26895], [26872, 26873, 26896], [26873, 26897, 26896], [26873, 26874, 26898], [26873, 26898, 26897], [26874, 26875, 26898], [26875, 26899, 26898], [26875, 26876, 26900], [26875, 26900, 26899], [26876, 26877, 26900], [26877, 26901, 26900], [26877, 26878, 26902], [26877, 26902, 26901], [26878, 26879, 26902], [26879, 26903, 26902], [26879, 26880, 26904], [26879, 26904, 26903], [26880, 26881, 26904], [26881, 26905, 26904], [26881, 26882, 26906], [26881, 26906, 26905], [26882, 26883, 26906], [26883, 26907, 26906], [7383, 26884, 7501], [26884, 26908, 7501], [26884, 26885, 26909], [26884, 26909, 26908], [26885, 26886, 26909], [26886, 26910, 26909], [26886, 26887, 26911], [26886, 26911, 26910], [26887, 26888, 26911], [26888, 26912, 26911], [26888, 26889, 26913], [26888, 26913, 26912], [26889, 26890, 26913], [26890, 26914, 26913], [26890, 26891, 26915], [26890, 26915, 26914], [26891, 26892, 26915], [26892, 26916, 26915], [26892, 26893, 26917], [26892, 26917, 26916], [26893, 26894, 26917], [26894, 26918, 26917], [26894, 26895, 26919], [26894, 26919, 26918], [26895, 26896, 26919], [26896, 26920, 26919], [26896, 26897, 26921], [26896, 26921, 26920], [26897, 26898, 26921], [26898, 26922, 26921], [26898, 26899, 26923], [26898, 26923, 26922], [26899, 26900, 26923], [26900, 26924, 26923], [26900, 26901, 26925], [26900, 26925, 26924], [26901, 26902, 26925], [26902, 26926, 26925], [26902, 26903, 26927], [26902, 26927, 26926], [26903, 26904, 26927], [26904, 26928, 26927], [26904, 26905, 26929], [26904, 26929, 26928], [26905, 26906, 26929], [26906, 26930, 26929], [26906, 26907, 26931], [26906, 26931, 26930], [7501, 26908, 26932], [7501, 26932, 7619], [26908, 26909, 26932], [26909, 26933, 26932], [26909, 26910, 26934], [26909, 26934, 26933], [26910, 26911, 26934], [26911, 26935, 26934], [26911, 26912, 26936], [26911, 26936, 26935], [26912, 26913, 26936], [26913, 26937, 26936], [26913, 26914, 26938], [26913, 26938, 26937], [26914, 26915, 26938], [26915, 26939, 26938], [26915, 26916, 26940], [26915, 26940, 26939], [26916, 26917, 26940], [26917, 26941, 26940], [26917, 26918, 26942], [26917, 26942, 26941], [26918, 26919, 26942], [26919, 26943, 26942], [26919, 26920, 26944], [26919, 26944, 26943], [26920, 26921, 26944], [26921, 26945, 26944], [26921, 26922, 26946], [26921, 26946, 26945], [26922, 26923, 26946], [26923, 26947, 26946], [26923, 26924, 26948], [26923, 26948, 26947], [26924, 26925, 26948], [26925, 26949, 26948], [26925, 26926, 26950], [26925, 26950, 26949], [26926, 26927, 26950], [26927, 26951, 26950], [26927, 26928, 26952], [26927, 26952, 26951], [26928, 26929, 26952], [26929, 26953, 26952], [26929, 26930, 26954], [26929, 26954, 26953], [26930, 26931, 26954], [26931, 26955, 26954], [7619, 26932, 7737], [26932, 26956, 7737], [26932, 26933, 26957], [26932, 26957, 26956], [26933, 26934, 26957], [26934, 26958, 26957], [26934, 26935, 26959], [26934, 26959, 26958], [26935, 26936, 26959], [26936, 26960, 26959], [26936, 26937, 26961], [26936, 26961, 26960], [26937, 26938, 26961], [26938, 26962, 26961], [26938, 26939, 26963], [26938, 26963, 26962], [26939, 26940, 26963], [26940, 26964, 26963], [26940, 26941, 26965], [26940, 26965, 26964], [26941, 26942, 26965], [26942, 26966, 26965], [26942, 26943, 26967], [26942, 26967, 26966], [26943, 26944, 26967], [26944, 26968, 26967], [26944, 26945, 26969], [26944, 26969, 26968], [26945, 26946, 26969], [26946, 26970, 26969], [26946, 26947, 26971], [26946, 26971, 26970], [26947, 26948, 26971], [26948, 26972, 26971], [26948, 26949, 26973], [26948, 26973, 26972], [26949, 26950, 26973], [26950, 26974, 26973], [26950, 26951, 26975], [26950, 26975, 26974], [26951, 26952, 26975], [26952, 26976, 26975], [26952, 26953, 26977], [26952, 26977, 26976], [26953, 26954, 26977], [26954, 26978, 26977], [26954, 26955, 26979], [26954, 26979, 26978], [7737, 26956, 26980], [7737, 26980, 7855], [26956, 26957, 26980], [26957, 26981, 26980], [26957, 26958, 26982], [26957, 26982, 26981], [26958, 26959, 26982], [26959, 26983, 26982], [26959, 26960, 26984], [26959, 26984, 26983], [26960, 26961, 26984], [26961, 26985, 26984], [26961, 26962, 26986], [26961, 26986, 26985], [26962, 26963, 26986], [26963, 26987, 26986], [26963, 26964, 26988], [26963, 26988, 26987], [26964, 26965, 26988], [26965, 26989, 26988], [26965, 26966, 26990], [26965, 26990, 26989], [26966, 26967, 26990], [26967, 26991, 26990], [26967, 26968, 26992], [26967, 26992, 26991], [26968, 26969, 26992], [26969, 26993, 26992], [26969, 26970, 26994], [26969, 26994, 26993], [26970, 26971, 26994], [26971, 26995, 26994], [26971, 26972, 26996], [26971, 26996, 26995], [26972, 26973, 26996], [26973, 26997, 26996], [26973, 26974, 26998], [26973, 26998, 26997], [26974, 26975, 26998], [26975, 26999, 26998], [26975, 26976, 27000], [26975, 27000, 26999], [26976, 26977, 27000], [26977, 27001, 27000], [26977, 26978, 27002], [26977, 27002, 27001], [26978, 26979, 27002], [26979, 27003, 27002], [7855, 26980, 7973], [26980, 27004, 7973], [26980, 26981, 27005], [26980, 27005, 27004], [26981, 26982, 27005], [26982, 27006, 27005], [26982, 26983, 27007], [26982, 27007, 27006], [26983, 26984, 27007], [26984, 27008, 27007], [26984, 26985, 27009], [26984, 27009, 27008], [26985, 26986, 27009], [26986, 27010, 27009], [26986, 26987, 27011], [26986, 27011, 27010], [26987, 26988, 27011], [26988, 27012, 27011], [26988, 26989, 27013], [26988, 27013, 27012], [26989, 26990, 27013], [26990, 27014, 27013], [26990, 26991, 27015], [26990, 27015, 27014], [26991, 26992, 27015], [26992, 27016, 27015], [26992, 26993, 27017], [26992, 27017, 27016], [26993, 26994, 27017], [26994, 27018, 27017], [26994, 26995, 27019], [26994, 27019, 27018], [26995, 26996, 27019], [26996, 27020, 27019], [26996, 26997, 27021], [26996, 27021, 27020], [26997, 26998, 27021], [26998, 27022, 27021], [26998, 26999, 27023], [26998, 27023, 27022], [26999, 27000, 27023], [27000, 27024, 27023], [27000, 27001, 27025], [27000, 27025, 27024], [27001, 27002, 27025], [27002, 27026, 27025], [27002, 27003, 27027], [27002, 27027, 27026], [7973, 27004, 27028], [7973, 27028, 8091], [27004, 27005, 27028], [27005, 27029, 27028], [27005, 27006, 27030], [27005, 27030, 27029], [27006, 27007, 27030], [27007, 27031, 27030], [27007, 27008, 27032], [27007, 27032, 27031], [27008, 27009, 27032], [27009, 27033, 27032], [27009, 27010, 27034], [27009, 27034, 27033], [27010, 27011, 27034], [27011, 27035, 27034], [27011, 27012, 27036], [27011, 27036, 27035], [27012, 27013, 27036], [27013, 27037, 27036], [27013, 27014, 27038], [27013, 27038, 27037], [27014, 27015, 27038], [27015, 27039, 27038], [27015, 27016, 27040], [27015, 27040, 27039], [27016, 27017, 27040], [27017, 27041, 27040], [27017, 27018, 27042], [27017, 27042, 27041], [27018, 27019, 27042], [27019, 27043, 27042], [27019, 27020, 27044], [27019, 27044, 27043], [27020, 27021, 27044], [27021, 27045, 27044], [27021, 27022, 27046], [27021, 27046, 27045], [27022, 27023, 27046], [27023, 27047, 27046], [27023, 27024, 27048], [27023, 27048, 27047], [27024, 27025, 27048], [27025, 27049, 27048], [27025, 27026, 27050], [27025, 27050, 27049], [27026, 27027, 27050], [27027, 27051, 27050], [8091, 27028, 8209], [27028, 27052, 8209], [27028, 27029, 27053], [27028, 27053, 27052], [27029, 27030, 27053], [27030, 27054, 27053], [27030, 27031, 27055], [27030, 27055, 27054], [27031, 27032, 27055], [27032, 27056, 27055], [27032, 27033, 27057], [27032, 27057, 27056], [27033, 27034, 27057], [27034, 27058, 27057], [27034, 27035, 27059], [27034, 27059, 27058], [27035, 27036, 27059], [27036, 27060, 27059], [27036, 27037, 27061], [27036, 27061, 27060], [27037, 27038, 27061], [27038, 27062, 27061], [27038, 27039, 27063], [27038, 27063, 27062], [27039, 27040, 27063], [27040, 27064, 27063], [27040, 27041, 27065], [27040, 27065, 27064], [27041, 27042, 27065], [27042, 27066, 27065], [27042, 27043, 27067], [27042, 27067, 27066], [27043, 27044, 27067], [27044, 27068, 27067], [27044, 27045, 27069], [27044, 27069, 27068], [27045, 27046, 27069], [27046, 27070, 27069], [27046, 27047, 27071], [27046, 27071, 27070], [27047, 27048, 27071], [27048, 27072, 27071], [27048, 27049, 27073], [27048, 27073, 27072], [27049, 27050, 27073], [27050, 27074, 27073], [27050, 27051, 27075], [27050, 27075, 27074], [8209, 27052, 27076], [8209, 27076, 8327], [27052, 27053, 27076], [27053, 27077, 27076], [27053, 27054, 27078], [27053, 27078, 27077], [27054, 27055, 27078], [27055, 27079, 27078], [27055, 27056, 27080], [27055, 27080, 27079], [27056, 27057, 27080], [27057, 27081, 27080], [27057, 27058, 27082], [27057, 27082, 27081], [27058, 27059, 27082], [27059, 27083, 27082], [27059, 27060, 27084], [27059, 27084, 27083], [27060, 27061, 27084], [27061, 27085, 27084], [27061, 27062, 27086], [27061, 27086, 27085], [27062, 27063, 27086], [27063, 27087, 27086], [27063, 27064, 27088], [27063, 27088, 27087], [27064, 27065, 27088], [27065, 27089, 27088], [27065, 27066, 27090], [27065, 27090, 27089], [27066, 27067, 27090], [27067, 27091, 27090], [27067, 27068, 27092], [27067, 27092, 27091], [27068, 27069, 27092], [27069, 27093, 27092], [27069, 27070, 27094], [27069, 27094, 27093], [27070, 27071, 27094], [27071, 27095, 27094], [27071, 27072, 27096], [27071, 27096, 27095], [27072, 27073, 27096], [27073, 27097, 27096], [27073, 27074, 27098], [27073, 27098, 27097], [27074, 27075, 27098], [27075, 27099, 27098], [8327, 27076, 8445], [27076, 27100, 8445], [27076, 27077, 27101], [27076, 27101, 27100], [27077, 27078, 27101], [27078, 27102, 27101], [27078, 27079, 27103], [27078, 27103, 27102], [27079, 27080, 27103], [27080, 27104, 27103], [27080, 27081, 27105], [27080, 27105, 27104], [27081, 27082, 27105], [27082, 27106, 27105], [27082, 27083, 27107], [27082, 27107, 27106], [27083, 27084, 27107], [27084, 27108, 27107], [27084, 27085, 27109], [27084, 27109, 27108], [27085, 27086, 27109], [27086, 27110, 27109], [27086, 27087, 27111], [27086, 27111, 27110], [27087, 27088, 27111], [27088, 27112, 27111], [27088, 27089, 27113], [27088, 27113, 27112], [27089, 27090, 27113], [27090, 27114, 27113], [27090, 27091, 27115], [27090, 27115, 27114], [27091, 27092, 27115], [27092, 27116, 27115], [27092, 27093, 27117], [27092, 27117, 27116], [27093, 27094, 27117], [27094, 27118, 27117], [27094, 27095, 27119], [27094, 27119, 27118], [27095, 27096, 27119], [27096, 27120, 27119], [27096, 27097, 27121], [27096, 27121, 27120], [27097, 27098, 27121], [27098, 27122, 27121], [27098, 27099, 27123], [27098, 27123, 27122], [8445, 27100, 27124], [8445, 27124, 8563], [27100, 27101, 27124], [27101, 27125, 27124], [27101, 27102, 27126], [27101, 27126, 27125], [27102, 27103, 27126], [27103, 27127, 27126], [27103, 27104, 27128], [27103, 27128, 27127], [27104, 27105, 27128], [27105, 27129, 27128], [27105, 27106, 27130], [27105, 27130, 27129], [27106, 27107, 27130], [27107, 27131, 27130], [27107, 27108, 27132], [27107, 27132, 27131], [27108, 27109, 27132], [27109, 27133, 27132], [27109, 27110, 27134], [27109, 27134, 27133], [27110, 27111, 27134], [27111, 27135, 27134], [27111, 27112, 27136], [27111, 27136, 27135], [27112, 27113, 27136], [27113, 27137, 27136], [27113, 27114, 27138], [27113, 27138, 27137], [27114, 27115, 27138], [27115, 27139, 27138], [27115, 27116, 27140], [27115, 27140, 27139], [27116, 27117, 27140], [27117, 27141, 27140], [27117, 27118, 27142], [27117, 27142, 27141], [27118, 27119, 27142], [27119, 27143, 27142], [27119, 27120, 27144], [27119, 27144, 27143], [27120, 27121, 27144], [27121, 27145, 27144], [27121, 27122, 27146], [27121, 27146, 27145], [27122, 27123, 27146], [27123, 27147, 27146], [8563, 27124, 8681], [27124, 27148, 8681], [27124, 27125, 27149], [27124, 27149, 27148], [27125, 27126, 27149], [27126, 27150, 27149], [27126, 27127, 27151], [27126, 27151, 27150], [27127, 27128, 27151], [27128, 27152, 27151], [27128, 27129, 27153], [27128, 27153, 27152], [27129, 27130, 27153], [27130, 27154, 27153], [27130, 27131, 27155], [27130, 27155, 27154], [27131, 27132, 27155], [27132, 27156, 27155], [27132, 27133, 27157], [27132, 27157, 27156], [27133, 27134, 27157], [27134, 27158, 27157], [27134, 27135, 27159], [27134, 27159, 27158], [27135, 27136, 27159], [27136, 27160, 27159], [27136, 27137, 27161], [27136, 27161, 27160], [27137, 27138, 27161], [27138, 27162, 27161], [27138, 27139, 27163], [27138, 27163, 27162], [27139, 27140, 27163], [27140, 27164, 27163], [27140, 27141, 27165], [27140, 27165, 27164], [27141, 27142, 27165], [27142, 27166, 27165], [27142, 27143, 27167], [27142, 27167, 27166], [27143, 27144, 27167], [27144, 27168, 27167], [27144, 27145, 27169], [27144, 27169, 27168], [27145, 27146, 27169], [27146, 27170, 27169], [27146, 27147, 27171], [27146, 27171, 27170], [8681, 27148, 27172], [8681, 27172, 8799], [27148, 27149, 27172], [27149, 27173, 27172], [27149, 27150, 27174], [27149, 27174, 27173], [27150, 27151, 27174], [27151, 27175, 27174], [27151, 27152, 27176], [27151, 27176, 27175], [27152, 27153, 27176], [27153, 27177, 27176], [27153, 27154, 27178], [27153, 27178, 27177], [27154, 27155, 27178], [27155, 27179, 27178], [27155, 27156, 27180], [27155, 27180, 27179], [27156, 27157, 27180], [27157, 27181, 27180], [27157, 27158, 27182], [27157, 27182, 27181], [27158, 27159, 27182], [27159, 27183, 27182], [27159, 27160, 27184], [27159, 27184, 27183], [27160, 27161, 27184], [27161, 27185, 27184], [27161, 27162, 27186], [27161, 27186, 27185], [27162, 27163, 27186], [27163, 27187, 27186], [27163, 27164, 27188], [27163, 27188, 27187], [27164, 27165, 27188], [27165, 27189, 27188], [27165, 27166, 27190], [27165, 27190, 27189], [27166, 27167, 27190], [27167, 27191, 27190], [27167, 27168, 27192], [27167, 27192, 27191], [27168, 27169, 27192], [27169, 27193, 27192], [27169, 27170, 27194], [27169, 27194, 27193], [27170, 27171, 27194], [27171, 27195, 27194], [8799, 27172, 8917], [27172, 27196, 8917], [27172, 27173, 27197], [27172, 27197, 27196], [27173, 27174, 27197], [27174, 27198, 27197], [27174, 27175, 27199], [27174, 27199, 27198], [27175, 27176, 27199], [27176, 27200, 27199], [27176, 27177, 27201], [27176, 27201, 27200], [27177, 27178, 27201], [27178, 27202, 27201], [27178, 27179, 27203], [27178, 27203, 27202], [27179, 27180, 27203], [27180, 27204, 27203], [27180, 27181, 27205], [27180, 27205, 27204], [27181, 27182, 27205], [27182, 27206, 27205], [27182, 27183, 27207], [27182, 27207, 27206], [27183, 27184, 27207], [27184, 27208, 27207], [27184, 27185, 27209], [27184, 27209, 27208], [27185, 27186, 27209], [27186, 27210, 27209], [27186, 27187, 27211], [27186, 27211, 27210], [27187, 27188, 27211], [27188, 27212, 27211], [27188, 27189, 27213], [27188, 27213, 27212], [27189, 27190, 27213], [27190, 27214, 27213], [27190, 27191, 27215], [27190, 27215, 27214], [27191, 27192, 27215], [27192, 27216, 27215], [27192, 27193, 27217], [27192, 27217, 27216], [27193, 27194, 27217], [27194, 27218, 27217], [27194, 27195, 27219], [27194, 27219, 27218], [8917, 27196, 27220], [8917, 27220, 9035], [27196, 27197, 27220], [27197, 27221, 27220], [27197, 27198, 27222], [27197, 27222, 27221], [27198, 27199, 27222], [27199, 27223, 27222], [27199, 27200, 27224], [27199, 27224, 27223], [27200, 27201, 27224], [27201, 27225, 27224], [27201, 27202, 27226], [27201, 27226, 27225], [27202, 27203, 27226], [27203, 27227, 27226], [27203, 27204, 27228], [27203, 27228, 27227], [27204, 27205, 27228], [27205, 27229, 27228], [27205, 27206, 27230], [27205, 27230, 27229], [27206, 27207, 27230], [27207, 27231, 27230], [27207, 27208, 27232], [27207, 27232, 27231], [27208, 27209, 27232], [27209, 27233, 27232], [27209, 27210, 27234], [27209, 27234, 27233], [27210, 27211, 27234], [27211, 27235, 27234], [27211, 27212, 27236], [27211, 27236, 27235], [27212, 27213, 27236], [27213, 27237, 27236], [27213, 27214, 27238], [27213, 27238, 27237], [27214, 27215, 27238], [27215, 27239, 27238], [27215, 27216, 27240], [27215, 27240, 27239], [27216, 27217, 27240], [27217, 27241, 27240], [27217, 27218, 27242], [27217, 27242, 27241], [27218, 27219, 27242], [27219, 27243, 27242], [9035, 27220, 9153], [27220, 27244, 9153], [27220, 27221, 27245], [27220, 27245, 27244], [27221, 27222, 27245], [27222, 27246, 27245], [27222, 27223, 27247], [27222, 27247, 27246], [27223, 27224, 27247], [27224, 27248, 27247], [27224, 27225, 27249], [27224, 27249, 27248], [27225, 27226, 27249], [27226, 27250, 27249], [27226, 27227, 27251], [27226, 27251, 27250], [27227, 27228, 27251], [27228, 27252, 27251], [27228, 27229, 27253], [27228, 27253, 27252], [27229, 27230, 27253], [27230, 27254, 27253], [27230, 27231, 27255], [27230, 27255, 27254], [27231, 27232, 27255], [27232, 27256, 27255], [27232, 27233, 27257], [27232, 27257, 27256], [27233, 27234, 27257], [27234, 27258, 27257], [27234, 27235, 27259], [27234, 27259, 27258], [27235, 27236, 27259], [27236, 27260, 27259], [27236, 27237, 27261], [27236, 27261, 27260], [27237, 27238, 27261], [27238, 27262, 27261], [27238, 27239, 27263], [27238, 27263, 27262], [27239, 27240, 27263], [27240, 27264, 27263], [27240, 27241, 27265], [27240, 27265, 27264], [27241, 27242, 27265], [27242, 27266, 27265], [27242, 27243, 27267], [27242, 27267, 27266], [9153, 27244, 27268], [9153, 27268, 9271], [27244, 27245, 27268], [27245, 27269, 27268], [27245, 27246, 27270], [27245, 27270, 27269], [27246, 27247, 27270], [27247, 27271, 27270], [27247, 27248, 27272], [27247, 27272, 27271], [27248, 27249, 27272], [27249, 27273, 27272], [27249, 27250, 27274], [27249, 27274, 27273], [27250, 27251, 27274], [27251, 27275, 27274], [27251, 27252, 27276], [27251, 27276, 27275], [27252, 27253, 27276], [27253, 27277, 27276], [27253, 27254, 27278], [27253, 27278, 27277], [27254, 27255, 27278], [27255, 27279, 27278], [27255, 27256, 27280], [27255, 27280, 27279], [27256, 27257, 27280], [27257, 27281, 27280], [27257, 27258, 27282], [27257, 27282, 27281], [27258, 27259, 27282], [27259, 27283, 27282], [27259, 27260, 27284], [27259, 27284, 27283], [27260, 27261, 27284], [27261, 27285, 27284], [27261, 27262, 27286], [27261, 27286, 27285], [27262, 27263, 27286], [27263, 27287, 27286], [27263, 27264, 27288], [27263, 27288, 27287], [27264, 27265, 27288], [27265, 27289, 27288], [27265, 27266, 27290], [27265, 27290, 27289], [27266, 27267, 27290], [27267, 27291, 27290], [9271, 27268, 9389], [27268, 27292, 9389], [27268, 27269, 27293], [27268, 27293, 27292], [27269, 27270, 27293], [27270, 27294, 27293], [27270, 27271, 27295], [27270, 27295, 27294], [27271, 27272, 27295], [27272, 27296, 27295], [27272, 27273, 27297], [27272, 27297, 27296], [27273, 27274, 27297], [27274, 27298, 27297], [27274, 27275, 27299], [27274, 27299, 27298], [27275, 27276, 27299], [27276, 27300, 27299], [27276, 27277, 27301], [27276, 27301, 27300], [27277, 27278, 27301], [27278, 27302, 27301], [27278, 27279, 27303], [27278, 27303, 27302], [27279, 27280, 27303], [27280, 27304, 27303], [27280, 27281, 27305], [27280, 27305, 27304], [27281, 27282, 27305], [27282, 27306, 27305], [27282, 27283, 27307], [27282, 27307, 27306], [27283, 27284, 27307], [27284, 27308, 27307], [27284, 27285, 27309], [27284, 27309, 27308], [27285, 27286, 27309], [27286, 27310, 27309], [27286, 27287, 27311], [27286, 27311, 27310], [27287, 27288, 27311], [27288, 27312, 27311], [27288, 27289, 27313], [27288, 27313, 27312], [27289, 27290, 27313], [27290, 27314, 27313], [27290, 27291, 27315], [27290, 27315, 27314], [9389, 27292, 27316], [9389, 27316, 9507], [27292, 27293, 27316], [27293, 27317, 27316], [27293, 27294, 27318], [27293, 27318, 27317], [27294, 27295, 27318], [27295, 27319, 27318], [27295, 27296, 27320], [27295, 27320, 27319], [27296, 27297, 27320], [27297, 27321, 27320], [27297, 27298, 27322], [27297, 27322, 27321], [27298, 27299, 27322], [27299, 27323, 27322], [27299, 27300, 27324], [27299, 27324, 27323], [27300, 27301, 27324], [27301, 27325, 27324], [27301, 27302, 27326], [27301, 27326, 27325], [27302, 27303, 27326], [27303, 27327, 27326], [27303, 27304, 27328], [27303, 27328, 27327], [27304, 27305, 27328], [27305, 27329, 27328], [27305, 27306, 27330], [27305, 27330, 27329], [27306, 27307, 27330], [27307, 27331, 27330], [27307, 27308, 27332], [27307, 27332, 27331], [27308, 27309, 27332], [27309, 27333, 27332], [27309, 27310, 27334], [27309, 27334, 27333], [27310, 27311, 27334], [27311, 27335, 27334], [27311, 27312, 27336], [27311, 27336, 27335], [27312, 27313, 27336], [27313, 27337, 27336], [27313, 27314, 27338], [27313, 27338, 27337], [27314, 27315, 27338], [27315, 27339, 27338], [9507, 27316, 9625], [27316, 27340, 9625], [27316, 27317, 27341], [27316, 27341, 27340], [27317, 27318, 27341], [27318, 27342, 27341], [27318, 27319, 27343], [27318, 27343, 27342], [27319, 27320, 27343], [27320, 27344, 27343], [27320, 27321, 27345], [27320, 27345, 27344], [27321, 27322, 27345], [27322, 27346, 27345], [27322, 27323, 27347], [27322, 27347, 27346], [27323, 27324, 27347], [27324, 27348, 27347], [27324, 27325, 27349], [27324, 27349, 27348], [27325, 27326, 27349], [27326, 27350, 27349], [27326, 27327, 27351], [27326, 27351, 27350], [27327, 27328, 27351], [27328, 27352, 27351], [27328, 27329, 27353], [27328, 27353, 27352], [27329, 27330, 27353], [27330, 27354, 27353], [27330, 27331, 27355], [27330, 27355, 27354], [27331, 27332, 27355], [27332, 27356, 27355], [27332, 27333, 27357], [27332, 27357, 27356], [27333, 27334, 27357], [27334, 27358, 27357], [27334, 27335, 27359], [27334, 27359, 27358], [27335, 27336, 27359], [27336, 27360, 27359], [27336, 27337, 27361], [27336, 27361, 27360], [27337, 27338, 27361], [27338, 27362, 27361], [27338, 27339, 27363], [27338, 27363, 27362], [9625, 27340, 27364], [9625, 27364, 9743], [27340, 27341, 27364], [27341, 27365, 27364], [27341, 27342, 27366], [27341, 27366, 27365], [27342, 27343, 27366], [27343, 27367, 27366], [27343, 27344, 27368], [27343, 27368, 27367], [27344, 27345, 27368], [27345, 27369, 27368], [27345, 27346, 27370], [27345, 27370, 27369], [27346, 27347, 27370], [27347, 27371, 27370], [27347, 27348, 27372], [27347, 27372, 27371], [27348, 27349, 27372], [27349, 27373, 27372], [27349, 27350, 27374], [27349, 27374, 27373], [27350, 27351, 27374], [27351, 27375, 27374], [27351, 27352, 27376], [27351, 27376, 27375], [27352, 27353, 27376], [27353, 27377, 27376], [27353, 27354, 27378], [27353, 27378, 27377], [27354, 27355, 27378], [27355, 27379, 27378], [27355, 27356, 27380], [27355, 27380, 27379], [27356, 27357, 27380], [27357, 27381, 27380], [27357, 27358, 27382], [27357, 27382, 27381], [27358, 27359, 27382], [27359, 27383, 27382], [27359, 27360, 27384], [27359, 27384, 27383], [27360, 27361, 27384], [27361, 27385, 27384], [27361, 27362, 27386], [27361, 27386, 27385], [27362, 27363, 27386], [27363, 27387, 27386], [9743, 27364, 9863], [27364, 27388, 9863], [27364, 27365, 27389], [27364, 27389, 27388], [27365, 27366, 27389], [27366, 27390, 27389], [27366, 27367, 27391], [27366, 27391, 27390], [27367, 27368, 27391], [27368, 27392, 27391], [27368, 27369, 27393], [27368, 27393, 27392], [27369, 27370, 27393], [27370, 27394, 27393], [27370, 27371, 27395], [27370, 27395, 27394], [27371, 27372, 27395], [27372, 27396, 27395], [27372, 27373, 27397], [27372, 27397, 27396], [27373, 27374, 27397], [27374, 27398, 27397], [27374, 27375, 27399], [27374, 27399, 27398], [27375, 27376, 27399], [27376, 27400, 27399], [27376, 27377, 27401], [27376, 27401, 27400], [27377, 27378, 27401], [27378, 27402, 27401], [27378, 27379, 27403], [27378, 27403, 27402], [27379, 27380, 27403], [27380, 27404, 27403], [27380, 27381, 27405], [27380, 27405, 27404], [27381, 27382, 27405], [27382, 27406, 27405], [27382, 27383, 27407], [27382, 27407, 27406], [27383, 27384, 27407], [27384, 27408, 27407], [27384, 27385, 27409], [27384, 27409, 27408], [27385, 27386, 27409], [27386, 27410, 27409], [27386, 27387, 27411], [27386, 27411, 27410], [9863, 27388, 27412], [9863, 27412, 9984], [27388, 27389, 27412], [27389, 27413, 27412], [27389, 27390, 27414], [27389, 27414, 27413], [27390, 27391, 27414], [27391, 27415, 27414], [27391, 27392, 27416], [27391, 27416, 27415], [27392, 27393, 27416], [27393, 27417, 27416], [27393, 27394, 27418], [27393, 27418, 27417], [27394, 27395, 27418], [27395, 27419, 27418], [27395, 27396, 27420], [27395, 27420, 27419], [27396, 27397, 27420], [27397, 27421, 27420], [27397, 27398, 27422], [27397, 27422, 27421], [27398, 27399, 27422], [27399, 27423, 27422], [27399, 27400, 27424], [27399, 27424, 27423], [27400, 27401, 27424], [27401, 27425, 27424], [27401, 27402, 27426], [27401, 27426, 27425], [27402, 27403, 27426], [27403, 27427, 27426], [27403, 27404, 27428], [27403, 27428, 27427], [27404, 27405, 27428], [27405, 27429, 27428], [27405, 27406, 27430], [27405, 27430, 27429], [27406, 27407, 27430], [27407, 27431, 27430], [27407, 27408, 27432], [27407, 27432, 27431], [27408, 27409, 27432], [27409, 27433, 27432], [27409, 27410, 27434], [27409, 27434, 27433], [27410, 27411, 27434], [27411, 27435, 27434], [9984, 27412, 10110], [27412, 27436, 10110], [27412, 27413, 27437], [27412, 27437, 27436], [27413, 27414, 27437], [27414, 27438, 27437], [27414, 27415, 27439], [27414, 27439, 27438], [27415, 27416, 27439], [27416, 27440, 27439], [27416, 27417, 27441], [27416, 27441, 27440], [27417, 27418, 27441], [27418, 27442, 27441], [27418, 27419, 27443], [27418, 27443, 27442], [27419, 27420, 27443], [27420, 27444, 27443], [27420, 27421, 27445], [27420, 27445, 27444], [27421, 27422, 27445], [27422, 27446, 27445], [27422, 27423, 27447], [27422, 27447, 27446], [27423, 27424, 27447], [27424, 27448, 27447], [27424, 27425, 27449], [27424, 27449, 27448], [27425, 27426, 27449], [27426, 27450, 27449], [27426, 27427, 27451], [27426, 27451, 27450], [27427, 27428, 27451], [27428, 27452, 27451], [27428, 27429, 27453], [27428, 27453, 27452], [27429, 27430, 27453], [27430, 27454, 27453], [27430, 27431, 27455], [27430, 27455, 27454], [27431, 27432, 27455], [27432, 27456, 27455], [27432, 27433, 27457], [27432, 27457, 27456], [27433, 27434, 27457], [27434, 27458, 27457], [27434, 27435, 27459], [27434, 27459, 27458], [10110, 27436, 27460], [10110, 27460, 10236], [27436, 27437, 27460], [27437, 27461, 27460], [27437, 27438, 27462], [27437, 27462, 27461], [27438, 27439, 27462], [27439, 27463, 27462], [27439, 27440, 27464], [27439, 27464, 27463], [27440, 27441, 27464], [27441, 27465, 27464], [27441, 27442, 27466], [27441, 27466, 27465], [27442, 27443, 27466], [27443, 27467, 27466], [27443, 27444, 27468], [27443, 27468, 27467], [27444, 27445, 27468], [27445, 27469, 27468], [27445, 27446, 27470], [27445, 27470, 27469], [27446, 27447, 27470], [27447, 27471, 27470], [27447, 27448, 27472], [27447, 27472, 27471], [27448, 27449, 27472], [27449, 27473, 27472], [27449, 27450, 27474], [27449, 27474, 27473], [27450, 27451, 27474], [27451, 27475, 27474], [27451, 27452, 27476], [27451, 27476, 27475], [27452, 27453, 27476], [27453, 27477, 27476], [27453, 27454, 27478], [27453, 27478, 27477], [27454, 27455, 27478], [27455, 27479, 27478], [27455, 27456, 27480], [27455, 27480, 27479], [27456, 27457, 27480], [27457, 27481, 27480], [27457, 27458, 27482], [27457, 27482, 27481], [27458, 27459, 27482], [27459, 27483, 27482], [10236, 27460, 10365], [27460, 27484, 10365], [27460, 27461, 27485], [27460, 27485, 27484], [27461, 27462, 27485], [27462, 27486, 27485], [27462, 27463, 27487], [27462, 27487, 27486], [27463, 27464, 27487], [27464, 27488, 27487], [27464, 27465, 27489], [27464, 27489, 27488], [27465, 27466, 27489], [27466, 27490, 27489], [27466, 27467, 27491], [27466, 27491, 27490], [27467, 27468, 27491], [27468, 27492, 27491], [27468, 27469, 27493], [27468, 27493, 27492], [27469, 27470, 27493], [27470, 27494, 27493], [27470, 27471, 27495], [27470, 27495, 27494], [27471, 27472, 27495], [27472, 27496, 27495], [27472, 27473, 27497], [27472, 27497, 27496], [27473, 27474, 27497], [27474, 27498, 27497], [27474, 27475, 27499], [27474, 27499, 27498], [27475, 27476, 27499], [27476, 27500, 27499], [27476, 27477, 27501], [27476, 27501, 27500], [27477, 27478, 27501], [27478, 27502, 27501], [27478, 27479, 27503], [27478, 27503, 27502], [27479, 27480, 27503], [27480, 27504, 27503], [27480, 27481, 27505], [27480, 27505, 27504], [27481, 27482, 27505], [27482, 27506, 27505], [27482, 27483, 27507], [27482, 27507, 27506], [10365, 27484, 27508], [10365, 27508, 10494], [27484, 27485, 27508], [27485, 27509, 27508], [27485, 27486, 27510], [27485, 27510, 27509], [27486, 27487, 27510], [27487, 27511, 27510], [27487, 27488, 27512], [27487, 27512, 27511], [27488, 27489, 27512], [27489, 27513, 27512], [27489, 27490, 27514], [27489, 27514, 27513], [27490, 27491, 27514], [27491, 27515, 27514], [27491, 27492, 27516], [27491, 27516, 27515], [27492, 27493, 27516], [27493, 27517, 27516], [27493, 27494, 27518], [27493, 27518, 27517], [27494, 27495, 27518], [27495, 27519, 27518], [27495, 27496, 27520], [27495, 27520, 27519], [27496, 27497, 27520], [27497, 27521, 27520], [27497, 27498, 27522], [27497, 27522, 27521], [27498, 27499, 27522], [27499, 27523, 27522], [27499, 27500, 27524], [27499, 27524, 27523], [27500, 27501, 27524], [27501, 27525, 27524], [27501, 27502, 27526], [27501, 27526, 27525], [27502, 27503, 27526], [27503, 27527, 27526], [27503, 27504, 27528], [27503, 27528, 27527], [27504, 27505, 27528], [27505, 27529, 27528], [27505, 27506, 27530], [27505, 27530, 27529], [27506, 27507, 27530], [27507, 27531, 27530], [10494, 27508, 10623], [27508, 27532, 10623], [27508, 27509, 27533], [27508, 27533, 27532], [27509, 27510, 27533], [27510, 27534, 27533], [27510, 27511, 27535], [27510, 27535, 27534], [27511, 27512, 27535], [27512, 27536, 27535], [27512, 27513, 27537], [27512, 27537, 27536], [27513, 27514, 27537], [27514, 27538, 27537], [27514, 27515, 27539], [27514, 27539, 27538], [27515, 27516, 27539], [27516, 27540, 27539], [27516, 27517, 27541], [27516, 27541, 27540], [27517, 27518, 27541], [27518, 27542, 27541], [27518, 27519, 27543], [27518, 27543, 27542], [27519, 27520, 27543], [27520, 27544, 27543], [27520, 27521, 27545], [27520, 27545, 27544], [27521, 27522, 27545], [27522, 27546, 27545], [27522, 27523, 27547], [27522, 27547, 27546], [27523, 27524, 27547], [27524, 27548, 27547], [27524, 27525, 27549], [27524, 27549, 27548], [27525, 27526, 27549], [27526, 27550, 27549], [27526, 27527, 27551], [27526, 27551, 27550], [27527, 27528, 27551], [27528, 27552, 27551], [27528, 27529, 27553], [27528, 27553, 27552], [27529, 27530, 27553], [27530, 27554, 27553], [27530, 27531, 27555], [27530, 27555, 27554], [10623, 27532, 27556], [10623, 27556, 10752], [27532, 27533, 27556], [27533, 27557, 27556], [27533, 27534, 27558], [27533, 27558, 27557], [27534, 27535, 27558], [27535, 27559, 27558], [27535, 27536, 27560], [27535, 27560, 27559], [27536, 27537, 27560], [27537, 27561, 27560], [27537, 27538, 27562], [27537, 27562, 27561], [27538, 27539, 27562], [27539, 27563, 27562], [27539, 27540, 27564], [27539, 27564, 27563], [27540, 27541, 27564], [27541, 27565, 27564], [27541, 27542, 27566], [27541, 27566, 27565], [27542, 27543, 27566], [27543, 27567, 27566], [27543, 27544, 27568], [27543, 27568, 27567], [27544, 27545, 27568], [27545, 27569, 27568], [27545, 27546, 27570], [27545, 27570, 27569], [27546, 27547, 27570], [27547, 27571, 27570], [27547, 27548, 27572], [27547, 27572, 27571], [27548, 27549, 27572], [27549, 27573, 27572], [27549, 27550, 27574], [27549, 27574, 27573], [27550, 27551, 27574], [27551, 27575, 27574], [27551, 27552, 27576], [27551, 27576, 27575], [27552, 27553, 27576], [27553, 27577, 27576], [27553, 27554, 27578], [27553, 27578, 27577], [27554, 27555, 27578], [27555, 27579, 27578], [10752, 27556, 10881], [27556, 27580, 10881], [27556, 27557, 27581], [27556, 27581, 27580], [27557, 27558, 27581], [27558, 27582, 27581], [27558, 27559, 27583], [27558, 27583, 27582], [27559, 27560, 27583], [27560, 27584, 27583], [27560, 27561, 27585], [27560, 27585, 27584], [27561, 27562, 27585], [27562, 27586, 27585], [27562, 27563, 27587], [27562, 27587, 27586], [27563, 27564, 27587], [27564, 27588, 27587], [27564, 27565, 27589], [27564, 27589, 27588], [27565, 27566, 27589], [27566, 27590, 27589], [27566, 27567, 27591], [27566, 27591, 27590], [27567, 27568, 27591], [27568, 27592, 27591], [27568, 27569, 27593], [27568, 27593, 27592], [27569, 27570, 27593], [27570, 27594, 27593], [27570, 27571, 27595], [27570, 27595, 27594], [27571, 27572, 27595], [27572, 27596, 27595], [27572, 27573, 27597], [27572, 27597, 27596], [27573, 27574, 27597], [27574, 27598, 27597], [27574, 27575, 27599], [27574, 27599, 27598], [27575, 27576, 27599], [27576, 27600, 27599], [27576, 27577, 27601], [27576, 27601, 27600], [27577, 27578, 27601], [27578, 27602, 27601], [27578, 27579, 27603], [27578, 27603, 27602], [10881, 27580, 27604], [10881, 27604, 11010], [27580, 27581, 27604], [27581, 27605, 27604], [27581, 27582, 27606], [27581, 27606, 27605], [27582, 27583, 27606], [27583, 27607, 27606], [27583, 27584, 27608], [27583, 27608, 27607], [27584, 27585, 27608], [27585, 27609, 27608], [27585, 27586, 27610], [27585, 27610, 27609], [27586, 27587, 27610], [27587, 27611, 27610], [27587, 27588, 27612], [27587, 27612, 27611], [27588, 27589, 27612], [27589, 27613, 27612], [27589, 27590, 27614], [27589, 27614, 27613], [27590, 27591, 27614], [27591, 27615, 27614], [27591, 27592, 27616], [27591, 27616, 27615], [27592, 27593, 27616], [27593, 27617, 27616], [27593, 27594, 27618], [27593, 27618, 27617], [27594, 27595, 27618], [27595, 27619, 27618], [27595, 27596, 27620], [27595, 27620, 27619], [27596, 27597, 27620], [27597, 27621, 27620], [27597, 27598, 27622], [27597, 27622, 27621], [27598, 27599, 27622], [27599, 27623, 27622], [27599, 27600, 27624], [27599, 27624, 27623], [27600, 27601, 27624], [27601, 27625, 27624], [27601, 27602, 27626], [27601, 27626, 27625], [27602, 27603, 27626], [27603, 27627, 27626], [11010, 27604, 11139], [27604, 27628, 11139], [27604, 27605, 27629], [27604, 27629, 27628], [27605, 27606, 27629], [27606, 27630, 27629], [27606, 27607, 27631], [27606, 27631, 27630], [27607, 27608, 27631], [27608, 27632, 27631], [27608, 27609, 27633], [27608, 27633, 27632], [27609, 27610, 27633], [27610, 27634, 27633], [27610, 27611, 27635], [27610, 27635, 27634], [27611, 27612, 27635], [27612, 27636, 27635], [27612, 27613, 27637], [27612, 27637, 27636], [27613, 27614, 27637], [27614, 27638, 27637], [27614, 27615, 27639], [27614, 27639, 27638], [27615, 27616, 27639], [27616, 27640, 27639], [27616, 27617, 27641], [27616, 27641, 27640], [27617, 27618, 27641], [27618, 27642, 27641], [27618, 27619, 27643], [27618, 27643, 27642], [27619, 27620, 27643], [27620, 27644, 27643], [27620, 27621, 27645], [27620, 27645, 27644], [27621, 27622, 27645], [27622, 27646, 27645], [27622, 27623, 27647], [27622, 27647, 27646], [27623, 27624, 27647], [27624, 27648, 27647], [27624, 27625, 27649], [27624, 27649, 27648], [27625, 27626, 27649], [27626, 27650, 27649], [27626, 27627, 27651], [27626, 27651, 27650], [11139, 27628, 27652], [11139, 27652, 11268], [27628, 27629, 27652], [27629, 27653, 27652], [27629, 27630, 27654], [27629, 27654, 27653], [27630, 27631, 27654], [27631, 27655, 27654], [27631, 27632, 27656], [27631, 27656, 27655], [27632, 27633, 27656], [27633, 27657, 27656], [27633, 27634, 27658], [27633, 27658, 27657], [27634, 27635, 27658], [27635, 27659, 27658], [27635, 27636, 27660], [27635, 27660, 27659], [27636, 27637, 27660], [27637, 27661, 27660], [27637, 27638, 27662], [27637, 27662, 27661], [27638, 27639, 27662], [27639, 27663, 27662], [27639, 27640, 27664], [27639, 27664, 27663], [27640, 27641, 27664], [27641, 27665, 27664], [27641, 27642, 27666], [27641, 27666, 27665], [27642, 27643, 27666], [27643, 27667, 27666], [27643, 27644, 27668], [27643, 27668, 27667], [27644, 27645, 27668], [27645, 27669, 27668], [27645, 27646, 27670], [27645, 27670, 27669], [27646, 27647, 27670], [27647, 27671, 27670], [27647, 27648, 27672], [27647, 27672, 27671], [27648, 27649, 27672], [27649, 27673, 27672], [27649, 27650, 27674], [27649, 27674, 27673], [27650, 27651, 27674], [27651, 27675, 27674], [11268, 27652, 11397], [27652, 27676, 11397], [27652, 27653, 27677], [27652, 27677, 27676], [27653, 27654, 27677], [27654, 27678, 27677], [27654, 27655, 27679], [27654, 27679, 27678], [27655, 27656, 27679], [27656, 27680, 27679], [27656, 27657, 27681], [27656, 27681, 27680], [27657, 27658, 27681], [27658, 27682, 27681], [27658, 27659, 27683], [27658, 27683, 27682], [27659, 27660, 27683], [27660, 27684, 27683], [27660, 27661, 27685], [27660, 27685, 27684], [27661, 27662, 27685], [27662, 27686, 27685], [27662, 27663, 27687], [27662, 27687, 27686], [27663, 27664, 27687], [27664, 27688, 27687], [27664, 27665, 27689], [27664, 27689, 27688], [27665, 27666, 27689], [27666, 27690, 27689], [27666, 27667, 27691], [27666, 27691, 27690], [27667, 27668, 27691], [27668, 27692, 27691], [27668, 27669, 27693], [27668, 27693, 27692], [27669, 27670, 27693], [27670, 27694, 27693], [27670, 27671, 27695], [27670, 27695, 27694], [27671, 27672, 27695], [27672, 27696, 27695], [27672, 27673, 27697], [27672, 27697, 27696], [27673, 27674, 27697], [27674, 27698, 27697], [27674, 27675, 27699], [27674, 27699, 27698], [11397, 27676, 27700], [11397, 27700, 11526], [27676, 27677, 27700], [27677, 27701, 27700], [27677, 27678, 27702], [27677, 27702, 27701], [27678, 27679, 27702], [27679, 27703, 27702], [27679, 27680, 27704], [27679, 27704, 27703], [27680, 27681, 27704], [27681, 27705, 27704], [27681, 27682, 27706], [27681, 27706, 27705], [27682, 27683, 27706], [27683, 27707, 27706], [27683, 27684, 27708], [27683, 27708, 27707], [27684, 27685, 27708], [27685, 27709, 27708], [27685, 27686, 27710], [27685, 27710, 27709], [27686, 27687, 27710], [27687, 27711, 27710], [27687, 27688, 27712], [27687, 27712, 27711], [27688, 27689, 27712], [27689, 27713, 27712], [27689, 27690, 27714], [27689, 27714, 27713], [27690, 27691, 27714], [27691, 27715, 27714], [27691, 27692, 27716], [27691, 27716, 27715], [27692, 27693, 27716], [27693, 27717, 27716], [27693, 27694, 27718], [27693, 27718, 27717], [27694, 27695, 27718], [27695, 27719, 27718], [27695, 27696, 27720], [27695, 27720, 27719], [27696, 27697, 27720], [27697, 27721, 27720], [27697, 27698, 27722], [27697, 27722, 27721], [27698, 27699, 27722], [27699, 27723, 27722], [11526, 27700, 11655], [27700, 27724, 11655], [27700, 27701, 27725], [27700, 27725, 27724], [27701, 27702, 27725], [27702, 27726, 27725], [27702, 27703, 27727], [27702, 27727, 27726], [27703, 27704, 27727], [27704, 27728, 27727], [27704, 27705, 27729], [27704, 27729, 27728], [27705, 27706, 27729], [27706, 27730, 27729], [27706, 27707, 27731], [27706, 27731, 27730], [27707, 27708, 27731], [27708, 27732, 27731], [27708, 27709, 27733], [27708, 27733, 27732], [27709, 27710, 27733], [27710, 27734, 27733], [27710, 27711, 27735], [27710, 27735, 27734], [27711, 27712, 27735], [27712, 27736, 27735], [27712, 27713, 27737], [27712, 27737, 27736], [27713, 27714, 27737], [27714, 27738, 27737], [27714, 27715, 27739], [27714, 27739, 27738], [27715, 27716, 27739], [27716, 27740, 27739], [27716, 27717, 27741], [27716, 27741, 27740], [27717, 27718, 27741], [27718, 27742, 27741], [27718, 27719, 27743], [27718, 27743, 27742], [27719, 27720, 27743], [27720, 27744, 27743], [27720, 27721, 27745], [27720, 27745, 27744], [27721, 27722, 27745], [27722, 27746, 27745], [27722, 27723, 27747], [27722, 27747, 27746], [11655, 27724, 27748], [11655, 27748, 11784], [27724, 27725, 27748], [27725, 27749, 27748], [27725, 27726, 27750], [27725, 27750, 27749], [27726, 27727, 27750], [27727, 27751, 27750], [27727, 27728, 27752], [27727, 27752, 27751], [27728, 27729, 27752], [27729, 27753, 27752], [27729, 27730, 27754], [27729, 27754, 27753], [27730, 27731, 27754], [27731, 27755, 27754], [27731, 27732, 27756], [27731, 27756, 27755], [27732, 27733, 27756], [27733, 27757, 27756], [27733, 27734, 27758], [27733, 27758, 27757], [27734, 27735, 27758], [27735, 27759, 27758], [27735, 27736, 27760], [27735, 27760, 27759], [27736, 27737, 27760], [27737, 27761, 27760], [27737, 27738, 27762], [27737, 27762, 27761], [27738, 27739, 27762], [27739, 27763, 27762], [27739, 27740, 27764], [27739, 27764, 27763], [27740, 27741, 27764], [27741, 27765, 27764], [27741, 27742, 27766], [27741, 27766, 27765], [27742, 27743, 27766], [27743, 27767, 27766], [27743, 27744, 27768], [27743, 27768, 27767], [27744, 27745, 27768], [27745, 27769, 27768], [27745, 27746, 27770], [27745, 27770, 27769], [27746, 27747, 27770], [27747, 27771, 27770], [11784, 27748, 11913], [27748, 27772, 11913], [27748, 27749, 27773], [27748, 27773, 27772], [27749, 27750, 27773], [27750, 27774, 27773], [27750, 27751, 27775], [27750, 27775, 27774], [27751, 27752, 27775], [27752, 27776, 27775], [27752, 27753, 27777], [27752, 27777, 27776], [27753, 27754, 27777], [27754, 27778, 27777], [27754, 27755, 27779], [27754, 27779, 27778], [27755, 27756, 27779], [27756, 27780, 27779], [27756, 27757, 27781], [27756, 27781, 27780], [27757, 27758, 27781], [27758, 27782, 27781], [27758, 27759, 27783], [27758, 27783, 27782], [27759, 27760, 27783], [27760, 27784, 27783], [27760, 27761, 27785], [27760, 27785, 27784], [27761, 27762, 27785], [27762, 27786, 27785], [27762, 27763, 27787], [27762, 27787, 27786], [27763, 27764, 27787], [27764, 27788, 27787], [27764, 27765, 27789], [27764, 27789, 27788], [27765, 27766, 27789], [27766, 27790, 27789], [27766, 27767, 27791], [27766, 27791, 27790], [27767, 27768, 27791], [27768, 27792, 27791], [27768, 27769, 27793], [27768, 27793, 27792], [27769, 27770, 27793], [27770, 27794, 27793], [27770, 27771, 27795], [27770, 27795, 27794], [11913, 27772, 27796], [11913, 27796, 12042], [27772, 27773, 27796], [27773, 27797, 27796], [27773, 27774, 27798], [27773, 27798, 27797], [27774, 27775, 27798], [27775, 27799, 27798], [27775, 27776, 27800], [27775, 27800, 27799], [27776, 27777, 27800], [27777, 27801, 27800], [27777, 27778, 27802], [27777, 27802, 27801], [27778, 27779, 27802], [27779, 27803, 27802], [27779, 27780, 27804], [27779, 27804, 27803], [27780, 27781, 27804], [27781, 27805, 27804], [27781, 27782, 27806], [27781, 27806, 27805], [27782, 27783, 27806], [27783, 27807, 27806], [27783, 27784, 27808], [27783, 27808, 27807], [27784, 27785, 27808], [27785, 27809, 27808], [27785, 27786, 27810], [27785, 27810, 27809], [27786, 27787, 27810], [27787, 27811, 27810], [27787, 27788, 27812], [27787, 27812, 27811], [27788, 27789, 27812], [27789, 27813, 27812], [27789, 27790, 27814], [27789, 27814, 27813], [27790, 27791, 27814], [27791, 27815, 27814], [27791, 27792, 27816], [27791, 27816, 27815], [27792, 27793, 27816], [27793, 27817, 27816], [27793, 27794, 27818], [27793, 27818, 27817], [27794, 27795, 27818], [27795, 27819, 27818], [12042, 27796, 12171], [27796, 27820, 12171], [27796, 27797, 27821], [27796, 27821, 27820], [27797, 27798, 27821], [27798, 27822, 27821], [27798, 27799, 27823], [27798, 27823, 27822], [27799, 27800, 27823], [27800, 27824, 27823], [27800, 27801, 27825], [27800, 27825, 27824], [27801, 27802, 27825], [27802, 27826, 27825], [27802, 27803, 27827], [27802, 27827, 27826], [27803, 27804, 27827], [27804, 27828, 27827], [27804, 27805, 27829], [27804, 27829, 27828], [27805, 27806, 27829], [27806, 27830, 27829], [27806, 27807, 27831], [27806, 27831, 27830], [27807, 27808, 27831], [27808, 27832, 27831], [27808, 27809, 27833], [27808, 27833, 27832], [27809, 27810, 27833], [27810, 27834, 27833], [27810, 27811, 27835], [27810, 27835, 27834], [27811, 27812, 27835], [27812, 27836, 27835], [27812, 27813, 27837], [27812, 27837, 27836], [27813, 27814, 27837], [27814, 27838, 27837], [27814, 27815, 27839], [27814, 27839, 27838], [27815, 27816, 27839], [27816, 27840, 27839], [27816, 27817, 27841], [27816, 27841, 27840], [27817, 27818, 27841], [27818, 27842, 27841], [27818, 27819, 27843], [27818, 27843, 27842], [12171, 27820, 27844], [12171, 27844, 12300], [27820, 27821, 27844], [27821, 27845, 27844], [27821, 27822, 27846], [27821, 27846, 27845], [27822, 27823, 27846], [27823, 27847, 27846], [27823, 27824, 27848], [27823, 27848, 27847], [27824, 27825, 27848], [27825, 27849, 27848], [27825, 27826, 27850], [27825, 27850, 27849], [27826, 27827, 27850], [27827, 27851, 27850], [27827, 27828, 27852], [27827, 27852, 27851], [27828, 27829, 27852], [27829, 27853, 27852], [27829, 27830, 27854], [27829, 27854, 27853], [27830, 27831, 27854], [27831, 27855, 27854], [27831, 27832, 27856], [27831, 27856, 27855], [27832, 27833, 27856], [27833, 27857, 27856], [27833, 27834, 27858], [27833, 27858, 27857], [27834, 27835, 27858], [27835, 27859, 27858], [27835, 27836, 27860], [27835, 27860, 27859], [27836, 27837, 27860], [27837, 27861, 27860], [27837, 27838, 27862], [27837, 27862, 27861], [27838, 27839, 27862], [27839, 27863, 27862], [27839, 27840, 27864], [27839, 27864, 27863], [27840, 27841, 27864], [27841, 27865, 27864], [27841, 27842, 27866], [27841, 27866, 27865], [27842, 27843, 27866], [27843, 27867, 27866], [12300, 27844, 12429], [27844, 27868, 12429], [27844, 27845, 27869], [27844, 27869, 27868], [27845, 27846, 27869], [27846, 27870, 27869], [27846, 27847, 27871], [27846, 27871, 27870], [27847, 27848, 27871], [27848, 27872, 27871], [27848, 27849, 27873], [27848, 27873, 27872], [27849, 27850, 27873], [27850, 27874, 27873], [27850, 27851, 27875], [27850, 27875, 27874], [27851, 27852, 27875], [27852, 27876, 27875], [27852, 27853, 27877], [27852, 27877, 27876], [27853, 27854, 27877], [27854, 27878, 27877], [27854, 27855, 27879], [27854, 27879, 27878], [27855, 27856, 27879], [27856, 27880, 27879], [27856, 27857, 27881], [27856, 27881, 27880], [27857, 27858, 27881], [27858, 27882, 27881], [27858, 27859, 27883], [27858, 27883, 27882], [27859, 27860, 27883], [27860, 27884, 27883], [27860, 27861, 27885], [27860, 27885, 27884], [27861, 27862, 27885], [27862, 27886, 27885], [27862, 27863, 27887], [27862, 27887, 27886], [27863, 27864, 27887], [27864, 27888, 27887], [27864, 27865, 27889], [27864, 27889, 27888], [27865, 27866, 27889], [27866, 27890, 27889], [27866, 27867, 27891], [27866, 27891, 27890], [12429, 27868, 27892], [12429, 27892, 12558], [27868, 27869, 27892], [27869, 27893, 27892], [27869, 27870, 27894], [27869, 27894, 27893], [27870, 27871, 27894], [27871, 27895, 27894], [27871, 27872, 27896], [27871, 27896, 27895], [27872, 27873, 27896], [27873, 27897, 27896], [27873, 27874, 27898], [27873, 27898, 27897], [27874, 27875, 27898], [27875, 27899, 27898], [27875, 27876, 27900], [27875, 27900, 27899], [27876, 27877, 27900], [27877, 27901, 27900], [27877, 27878, 27902], [27877, 27902, 27901], [27878, 27879, 27902], [27879, 27903, 27902], [27879, 27880, 27904], [27879, 27904, 27903], [27880, 27881, 27904], [27881, 27905, 27904], [27881, 27882, 27906], [27881, 27906, 27905], [27882, 27883, 27906], [27883, 27907, 27906], [27883, 27884, 27908], [27883, 27908, 27907], [27884, 27885, 27908], [27885, 27909, 27908], [27885, 27886, 27910], [27885, 27910, 27909], [27886, 27887, 27910], [27887, 27911, 27910], [27887, 27888, 27912], [27887, 27912, 27911], [27888, 27889, 27912], [27889, 27913, 27912], [27889, 27890, 27914], [27889, 27914, 27913], [27890, 27891, 27914], [27891, 27915, 27914], [12558, 27892, 12687], [27892, 27916, 12687], [27892, 27893, 27917], [27892, 27917, 27916], [27893, 27894, 27917], [27894, 27918, 27917], [27894, 27895, 27919], [27894, 27919, 27918], [27895, 27896, 27919], [27896, 27920, 27919], [27896, 27897, 27921], [27896, 27921, 27920], [27897, 27898, 27921], [27898, 27922, 27921], [27898, 27899, 27923], [27898, 27923, 27922], [27899, 27900, 27923], [27900, 27924, 27923], [27900, 27901, 27925], [27900, 27925, 27924], [27901, 27902, 27925], [27902, 27926, 27925], [27902, 27903, 27927], [27902, 27927, 27926], [27903, 27904, 27927], [27904, 27928, 27927], [27904, 27905, 27929], [27904, 27929, 27928], [27905, 27906, 27929], [27906, 27930, 27929], [27906, 27907, 27931], [27906, 27931, 27930], [27907, 27908, 27931], [27908, 27932, 27931], [27908, 27909, 27933], [27908, 27933, 27932], [27909, 27910, 27933], [27910, 27934, 27933], [27910, 27911, 27935], [27910, 27935, 27934], [27911, 27912, 27935], [27912, 27936, 27935], [27912, 27913, 27937], [27912, 27937, 27936], [27913, 27914, 27937], [27914, 27938, 27937], [27914, 27915, 27939], [27914, 27939, 27938], [12687, 27916, 27940], [12687, 27940, 12816], [27916, 27917, 27940], [27917, 27941, 27940], [27917, 27918, 27942], [27917, 27942, 27941], [27918, 27919, 27942], [27919, 27943, 27942], [27919, 27920, 27944], [27919, 27944, 27943], [27920, 27921, 27944], [27921, 27945, 27944], [27921, 27922, 27946], [27921, 27946, 27945], [27922, 27923, 27946], [27923, 27947, 27946], [27923, 27924, 27948], [27923, 27948, 27947], [27924, 27925, 27948], [27925, 27949, 27948], [27925, 27926, 27950], [27925, 27950, 27949], [27926, 27927, 27950], [27927, 27951, 27950], [27927, 27928, 27952], [27927, 27952, 27951], [27928, 27929, 27952], [27929, 27953, 27952], [27929, 27930, 27954], [27929, 27954, 27953], [27930, 27931, 27954], [27931, 27955, 27954], [27931, 27932, 27956], [27931, 27956, 27955], [27932, 27933, 27956], [27933, 27957, 27956], [27933, 27934, 27958], [27933, 27958, 27957], [27934, 27935, 27958], [27935, 27959, 27958], [27935, 27936, 27960], [27935, 27960, 27959], [27936, 27937, 27960], [27937, 27961, 27960], [27937, 27938, 27962], [27937, 27962, 27961], [27938, 27939, 27962], [27939, 27963, 27962], [12816, 27940, 12945], [27940, 27964, 12945], [27940, 27941, 27965], [27940, 27965, 27964], [27941, 27942, 27965], [27942, 27966, 27965], [27942, 27943, 27967], [27942, 27967, 27966], [27943, 27944, 27967], [27944, 27968, 27967], [27944, 27945, 27969], [27944, 27969, 27968], [27945, 27946, 27969], [27946, 27970, 27969], [27946, 27947, 27971], [27946, 27971, 27970], [27947, 27948, 27971], [27948, 27972, 27971], [27948, 27949, 27973], [27948, 27973, 27972], [27949, 27950, 27973], [27950, 27974, 27973], [27950, 27951, 27975], [27950, 27975, 27974], [27951, 27952, 27975], [27952, 27976, 27975], [27952, 27953, 27977], [27952, 27977, 27976], [27953, 27954, 27977], [27954, 27978, 27977], [27954, 27955, 27979], [27954, 27979, 27978], [27955, 27956, 27979], [27956, 27980, 27979], [27956, 27957, 27981], [27956, 27981, 27980], [27957, 27958, 27981], [27958, 27982, 27981], [27958, 27959, 27983], [27958, 27983, 27982], [27959, 27960, 27983], [27960, 27984, 27983], [27960, 27961, 27985], [27960, 27985, 27984], [27961, 27962, 27985], [27962, 27986, 27985], [27962, 27963, 27987], [27962, 27987, 27986], [12945, 27964, 27988], [12945, 27988, 13074], [27964, 27965, 27988], [27965, 27989, 27988], [27965, 27966, 27990], [27965, 27990, 27989], [27966, 27967, 27990], [27967, 27991, 27990], [27967, 27968, 27992], [27967, 27992, 27991], [27968, 27969, 27992], [27969, 27993, 27992], [27969, 27970, 27994], [27969, 27994, 27993], [27970, 27971, 27994], [27971, 27995, 27994], [27971, 27972, 27996], [27971, 27996, 27995], [27972, 27973, 27996], [27973, 27997, 27996], [27973, 27974, 27998], [27973, 27998, 27997], [27974, 27975, 27998], [27975, 27999, 27998], [27975, 27976, 28000], [27975, 28000, 27999], [27976, 27977, 28000], [27977, 28001, 28000], [27977, 27978, 28002], [27977, 28002, 28001], [27978, 27979, 28002], [27979, 28003, 28002], [27979, 27980, 28004], [27979, 28004, 28003], [27980, 27981, 28004], [27981, 28005, 28004], [27981, 27982, 28006], [27981, 28006, 28005], [27982, 27983, 28006], [27983, 28007, 28006], [27983, 27984, 28008], [27983, 28008, 28007], [27984, 27985, 28008], [27985, 28009, 28008], [27985, 27986, 28010], [27985, 28010, 28009], [27986, 27987, 28010], [27987, 28011, 28010], [13074, 27988, 13203], [27988, 28012, 13203], [27988, 27989, 28013], [27988, 28013, 28012], [27989, 27990, 28013], [27990, 28014, 28013], [27990, 27991, 28015], [27990, 28015, 28014], [27991, 27992, 28015], [27992, 28016, 28015], [27992, 27993, 28017], [27992, 28017, 28016], [27993, 27994, 28017], [27994, 28018, 28017], [27994, 27995, 28019], [27994, 28019, 28018], [27995, 27996, 28019], [27996, 28020, 28019], [27996, 27997, 28021], [27996, 28021, 28020], [27997, 27998, 28021], [27998, 28022, 28021], [27998, 27999, 28023], [27998, 28023, 28022], [27999, 28000, 28023], [28000, 28024, 28023], [28000, 28001, 28025], [28000, 28025, 28024], [28001, 28002, 28025], [28002, 28026, 28025], [28002, 28003, 28027], [28002, 28027, 28026], [28003, 28004, 28027], [28004, 28028, 28027], [28004, 28005, 28029], [28004, 28029, 28028], [28005, 28006, 28029], [28006, 28030, 28029], [28006, 28007, 28031], [28006, 28031, 28030], [28007, 28008, 28031], [28008, 28032, 28031], [28008, 28009, 28033], [28008, 28033, 28032], [28009, 28010, 28033], [28010, 28034, 28033], [28010, 28011, 28035], [28010, 28035, 28034], [13203, 28012, 28036], [13203, 28036, 13332], [28012, 28013, 28036], [28013, 28037, 28036], [28013, 28014, 28038], [28013, 28038, 28037], [28014, 28015, 28038], [28015, 28039, 28038], [28015, 28016, 28040], [28015, 28040, 28039], [28016, 28017, 28040], [28017, 28041, 28040], [28017, 28018, 28042], [28017, 28042, 28041], [28018, 28019, 28042], [28019, 28043, 28042], [28019, 28020, 28044], [28019, 28044, 28043], [28020, 28021, 28044], [28021, 28045, 28044], [28021, 28022, 28046], [28021, 28046, 28045], [28022, 28023, 28046], [28023, 28047, 28046], [28023, 28024, 28048], [28023, 28048, 28047], [28024, 28025, 28048], [28025, 28049, 28048], [28025, 28026, 28050], [28025, 28050, 28049], [28026, 28027, 28050], [28027, 28051, 28050], [28027, 28028, 28052], [28027, 28052, 28051], [28028, 28029, 28052], [28029, 28053, 28052], [28029, 28030, 28054], [28029, 28054, 28053], [28030, 28031, 28054], [28031, 28055, 28054], [28031, 28032, 28056], [28031, 28056, 28055], [28032, 28033, 28056], [28033, 28057, 28056], [28033, 28034, 28058], [28033, 28058, 28057], [28034, 28035, 28058], [28035, 28059, 28058], [13332, 28036, 13461], [28036, 28060, 13461], [28036, 28037, 28061], [28036, 28061, 28060], [28037, 28038, 28061], [28038, 28062, 28061], [28038, 28039, 28063], [28038, 28063, 28062], [28039, 28040, 28063], [28040, 28064, 28063], [28040, 28041, 28065], [28040, 28065, 28064], [28041, 28042, 28065], [28042, 28066, 28065], [28042, 28043, 28067], [28042, 28067, 28066], [28043, 28044, 28067], [28044, 28068, 28067], [28044, 28045, 28069], [28044, 28069, 28068], [28045, 28046, 28069], [28046, 28070, 28069], [28046, 28047, 28071], [28046, 28071, 28070], [28047, 28048, 28071], [28048, 28072, 28071], [28048, 28049, 28073], [28048, 28073, 28072], [28049, 28050, 28073], [28050, 28074, 28073], [28050, 28051, 28075], [28050, 28075, 28074], [28051, 28052, 28075], [28052, 28076, 28075], [28052, 28053, 28077], [28052, 28077, 28076], [28053, 28054, 28077], [28054, 28078, 28077], [28054, 28055, 28079], [28054, 28079, 28078], [28055, 28056, 28079], [28056, 28080, 28079], [28056, 28057, 28081], [28056, 28081, 28080], [28057, 28058, 28081], [28058, 28082, 28081], [28058, 28059, 28083], [28058, 28083, 28082], [13461, 28060, 28084], [13461, 28084, 13590], [28060, 28061, 28084], [28061, 28085, 28084], [28061, 28062, 28086], [28061, 28086, 28085], [28062, 28063, 28086], [28063, 28087, 28086], [28063, 28064, 28088], [28063, 28088, 28087], [28064, 28065, 28088], [28065, 28089, 28088], [28065, 28066, 28090], [28065, 28090, 28089], [28066, 28067, 28090], [28067, 28091, 28090], [28067, 28068, 28092], [28067, 28092, 28091], [28068, 28069, 28092], [28069, 28093, 28092], [28069, 28070, 28094], [28069, 28094, 28093], [28070, 28071, 28094], [28071, 28095, 28094], [28071, 28072, 28096], [28071, 28096, 28095], [28072, 28073, 28096], [28073, 28097, 28096], [28073, 28074, 28098], [28073, 28098, 28097], [28074, 28075, 28098], [28075, 28099, 28098], [28075, 28076, 28100], [28075, 28100, 28099], [28076, 28077, 28100], [28077, 28101, 28100], [28077, 28078, 28102], [28077, 28102, 28101], [28078, 28079, 28102], [28079, 28103, 28102], [28079, 28080, 28104], [28079, 28104, 28103], [28080, 28081, 28104], [28081, 28105, 28104], [28081, 28082, 28106], [28081, 28106, 28105], [28082, 28083, 28106], [28083, 28107, 28106], [13590, 28084, 13719], [28084, 28108, 13719], [28084, 28085, 28109], [28084, 28109, 28108], [28085, 28086, 28109], [28086, 28110, 28109], [28086, 28087, 28111], [28086, 28111, 28110], [28087, 28088, 28111], [28088, 28112, 28111], [28088, 28089, 28113], [28088, 28113, 28112], [28089, 28090, 28113], [28090, 28114, 28113], [28090, 28091, 28115], [28090, 28115, 28114], [28091, 28092, 28115], [28092, 28116, 28115], [28092, 28093, 28117], [28092, 28117, 28116], [28093, 28094, 28117], [28094, 28118, 28117], [28094, 28095, 28119], [28094, 28119, 28118], [28095, 28096, 28119], [28096, 28120, 28119], [28096, 28097, 28121], [28096, 28121, 28120], [28097, 28098, 28121], [28098, 28122, 28121], [28098, 28099, 28123], [28098, 28123, 28122], [28099, 28100, 28123], [28100, 28124, 28123], [28100, 28101, 28125], [28100, 28125, 28124], [28101, 28102, 28125], [28102, 28126, 28125], [28102, 28103, 28127], [28102, 28127, 28126], [28103, 28104, 28127], [28104, 28128, 28127], [28104, 28105, 28129], [28104, 28129, 28128], [28105, 28106, 28129], [28106, 28130, 28129], [28106, 28107, 28131], [28106, 28131, 28130], [13719, 28108, 28132], [13719, 28132, 13848], [28108, 28109, 28132], [28109, 28133, 28132], [28109, 28110, 28134], [28109, 28134, 28133], [28110, 28111, 28134], [28111, 28135, 28134], [28111, 28112, 28136], [28111, 28136, 28135], [28112, 28113, 28136], [28113, 28137, 28136], [28113, 28114, 28138], [28113, 28138, 28137], [28114, 28115, 28138], [28115, 28139, 28138], [28115, 28116, 28140], [28115, 28140, 28139], [28116, 28117, 28140], [28117, 28141, 28140], [28117, 28118, 28142], [28117, 28142, 28141], [28118, 28119, 28142], [28119, 28143, 28142], [28119, 28120, 28144], [28119, 28144, 28143], [28120, 28121, 28144], [28121, 28145, 28144], [28121, 28122, 28146], [28121, 28146, 28145], [28122, 28123, 28146], [28123, 28147, 28146], [28123, 28124, 28148], [28123, 28148, 28147], [28124, 28125, 28148], [28125, 28149, 28148], [28125, 28126, 28150], [28125, 28150, 28149], [28126, 28127, 28150], [28127, 28151, 28150], [28127, 28128, 28152], [28127, 28152, 28151], [28128, 28129, 28152], [28129, 28153, 28152], [28129, 28130, 28154], [28129, 28154, 28153], [28130, 28131, 28154], [28131, 28155, 28154], [13848, 28132, 13977], [28132, 28156, 13977], [28132, 28133, 28157], [28132, 28157, 28156], [28133, 28134, 28157], [28134, 28158, 28157], [28134, 28135, 28159], [28134, 28159, 28158], [28135, 28136, 28159], [28136, 28160, 28159], [28136, 28137, 28161], [28136, 28161, 28160], [28137, 28138, 28161], [28138, 28162, 28161], [28138, 28139, 28163], [28138, 28163, 28162], [28139, 28140, 28163], [28140, 28164, 28163], [28140, 28141, 28165], [28140, 28165, 28164], [28141, 28142, 28165], [28142, 28166, 28165], [28142, 28143, 28167], [28142, 28167, 28166], [28143, 28144, 28167], [28144, 28168, 28167], [28144, 28145, 28169], [28144, 28169, 28168], [28145, 28146, 28169], [28146, 28170, 28169], [28146, 28147, 28171], [28146, 28171, 28170], [28147, 28148, 28171], [28148, 28172, 28171], [28148, 28149, 28173], [28148, 28173, 28172], [28149, 28150, 28173], [28150, 28174, 28173], [28150, 28151, 28175], [28150, 28175, 28174], [28151, 28152, 28175], [28152, 28176, 28175], [28152, 28153, 28177], [28152, 28177, 28176], [28153, 28154, 28177], [28154, 28178, 28177], [28154, 28155, 28179], [28154, 28179, 28178], [13977, 28156, 28180], [13977, 28180, 14106], [28156, 28157, 28180], [28157, 28181, 28180], [28157, 28158, 28182], [28157, 28182, 28181], [28158, 28159, 28182], [28159, 28183, 28182], [28159, 28160, 28184], [28159, 28184, 28183], [28160, 28161, 28184], [28161, 28185, 28184], [28161, 28162, 28186], [28161, 28186, 28185], [28162, 28163, 28186], [28163, 28187, 28186], [28163, 28164, 28188], [28163, 28188, 28187], [28164, 28165, 28188], [28165, 28189, 28188], [28165, 28166, 28190], [28165, 28190, 28189], [28166, 28167, 28190], [28167, 28191, 28190], [28167, 28168, 28192], [28167, 28192, 28191], [28168, 28169, 28192], [28169, 28193, 28192], [28169, 28170, 28194], [28169, 28194, 28193], [28170, 28171, 28194], [28171, 28195, 28194], [28171, 28172, 28196], [28171, 28196, 28195], [28172, 28173, 28196], [28173, 28197, 28196], [28173, 28174, 28198], [28173, 28198, 28197], [28174, 28175, 28198], [28175, 28199, 28198], [28175, 28176, 28200], [28175, 28200, 28199], [28176, 28177, 28200], [28177, 28201, 28200], [28177, 28178, 28202], [28177, 28202, 28201], [28178, 28179, 28202], [28179, 28203, 28202], [14106, 28180, 14235], [28180, 28204, 14235], [28180, 28181, 28205], [28180, 28205, 28204], [28181, 28182, 28205], [28182, 28206, 28205], [28182, 28183, 28207], [28182, 28207, 28206], [28183, 28184, 28207], [28184, 28208, 28207], [28184, 28185, 28209], [28184, 28209, 28208], [28185, 28186, 28209], [28186, 28210, 28209], [28186, 28187, 28211], [28186, 28211, 28210], [28187, 28188, 28211], [28188, 28212, 28211], [28188, 28189, 28213], [28188, 28213, 28212], [28189, 28190, 28213], [28190, 28214, 28213], [28190, 28191, 28215], [28190, 28215, 28214], [28191, 28192, 28215], [28192, 28216, 28215], [28192, 28193, 28217], [28192, 28217, 28216], [28193, 28194, 28217], [28194, 28218, 28217], [28194, 28195, 28219], [28194, 28219, 28218], [28195, 28196, 28219], [28196, 28220, 28219], [28196, 28197, 28221], [28196, 28221, 28220], [28197, 28198, 28221], [28198, 28222, 28221], [28198, 28199, 28223], [28198, 28223, 28222], [28199, 28200, 28223], [28200, 28224, 28223], [28200, 28201, 28225], [28200, 28225, 28224], [28201, 28202, 28225], [28202, 28226, 28225], [28202, 28203, 28227], [28202, 28227, 28226], [14235, 28204, 28228], [14235, 28228, 14364], [28204, 28205, 28228], [28205, 28229, 28228], [28205, 28206, 28230], [28205, 28230, 28229], [28206, 28207, 28230], [28207, 28231, 28230], [28207, 28208, 28232], [28207, 28232, 28231], [28208, 28209, 28232], [28209, 28233, 28232], [28209, 28210, 28234], [28209, 28234, 28233], [28210, 28211, 28234], [28211, 28235, 28234], [28211, 28212, 28236], [28211, 28236, 28235], [28212, 28213, 28236], [28213, 28237, 28236], [28213, 28214, 28238], [28213, 28238, 28237], [28214, 28215, 28238], [28215, 28239, 28238], [28215, 28216, 28240], [28215, 28240, 28239], [28216, 28217, 28240], [28217, 28241, 28240], [28217, 28218, 28242], [28217, 28242, 28241], [28218, 28219, 28242], [28219, 28243, 28242], [28219, 28220, 28244], [28219, 28244, 28243], [28220, 28221, 28244], [28221, 28245, 28244], [28221, 28222, 28246], [28221, 28246, 28245], [28222, 28223, 28246], [28223, 28247, 28246], [28223, 28224, 28248], [28223, 28248, 28247], [28224, 28225, 28248], [28225, 28249, 28248], [28225, 28226, 28250], [28225, 28250, 28249], [28226, 28227, 28250], [28227, 28251, 28250], [14364, 28228, 14493], [28228, 28252, 14493], [28228, 28229, 28253], [28228, 28253, 28252], [28229, 28230, 28253], [28230, 28254, 28253], [28230, 28231, 28255], [28230, 28255, 28254], [28231, 28232, 28255], [28232, 28256, 28255], [28232, 28233, 28257], [28232, 28257, 28256], [28233, 28234, 28257], [28234, 28258, 28257], [28234, 28235, 28259], [28234, 28259, 28258], [28235, 28236, 28259], [28236, 28260, 28259], [28236, 28237, 28261], [28236, 28261, 28260], [28237, 28238, 28261], [28238, 28262, 28261], [28238, 28239, 28263], [28238, 28263, 28262], [28239, 28240, 28263], [28240, 28264, 28263], [28240, 28241, 28265], [28240, 28265, 28264], [28241, 28242, 28265], [28242, 28266, 28265], [28242, 28243, 28267], [28242, 28267, 28266], [28243, 28244, 28267], [28244, 28268, 28267], [28244, 28245, 28269], [28244, 28269, 28268], [28245, 28246, 28269], [28246, 28270, 28269], [28246, 28247, 28271], [28246, 28271, 28270], [28247, 28248, 28271], [28248, 28272, 28271], [28248, 28249, 28273], [28248, 28273, 28272], [28249, 28250, 28273], [28250, 28274, 28273], [28250, 28251, 28275], [28250, 28275, 28274], [14493, 28252, 28276], [14493, 28276, 14622], [28252, 28253, 28276], [28253, 28277, 28276], [28253, 28254, 28278], [28253, 28278, 28277], [28254, 28255, 28278], [28255, 28279, 28278], [28255, 28256, 28280], [28255, 28280, 28279], [28256, 28257, 28280], [28257, 28281, 28280], [28257, 28258, 28282], [28257, 28282, 28281], [28258, 28259, 28282], [28259, 28283, 28282], [28259, 28260, 28284], [28259, 28284, 28283], [28260, 28261, 28284], [28261, 28285, 28284], [28261, 28262, 28286], [28261, 28286, 28285], [28262, 28263, 28286], [28263, 28287, 28286], [28263, 28264, 28288], [28263, 28288, 28287], [28264, 28265, 28288], [28265, 28289, 28288], [28265, 28266, 28290], [28265, 28290, 28289], [28266, 28267, 28290], [28267, 28291, 28290], [28267, 28268, 28292], [28267, 28292, 28291], [28268, 28269, 28292], [28269, 28293, 28292], [28269, 28270, 28294], [28269, 28294, 28293], [28270, 28271, 28294], [28271, 28295, 28294], [28271, 28272, 28296], [28271, 28296, 28295], [28272, 28273, 28296], [28273, 28297, 28296], [28273, 28274, 28298], [28273, 28298, 28297], [28274, 28275, 28298], [28275, 28299, 28298], [14622, 28276, 14751], [28276, 28300, 14751], [28276, 28277, 28301], [28276, 28301, 28300], [28277, 28278, 28301], [28278, 28302, 28301], [28278, 28279, 28303], [28278, 28303, 28302], [28279, 28280, 28303], [28280, 28304, 28303], [28280, 28281, 28305], [28280, 28305, 28304], [28281, 28282, 28305], [28282, 28306, 28305], [28282, 28283, 28307], [28282, 28307, 28306], [28283, 28284, 28307], [28284, 28308, 28307], [28284, 28285, 28309], [28284, 28309, 28308], [28285, 28286, 28309], [28286, 28310, 28309], [28286, 28287, 28311], [28286, 28311, 28310], [28287, 28288, 28311], [28288, 28312, 28311], [28288, 28289, 28313], [28288, 28313, 28312], [28289, 28290, 28313], [28290, 28314, 28313], [28290, 28291, 28315], [28290, 28315, 28314], [28291, 28292, 28315], [28292, 28316, 28315], [28292, 28293, 28317], [28292, 28317, 28316], [28293, 28294, 28317], [28294, 28318, 28317], [28294, 28295, 28319], [28294, 28319, 28318], [28295, 28296, 28319], [28296, 28320, 28319], [28296, 28297, 28321], [28296, 28321, 28320], [28297, 28298, 28321], [28298, 28322, 28321], [28298, 28299, 28323], [28298, 28323, 28322], [14751, 28300, 28324], [14751, 28324, 14880], [28300, 28301, 28324], [28301, 28325, 28324], [28301, 28302, 28326], [28301, 28326, 28325], [28302, 28303, 28326], [28303, 28327, 28326], [28303, 28304, 28328], [28303, 28328, 28327], [28304, 28305, 28328], [28305, 28329, 28328], [28305, 28306, 28330], [28305, 28330, 28329], [28306, 28307, 28330], [28307, 28331, 28330], [28307, 28308, 28332], [28307, 28332, 28331], [28308, 28309, 28332], [28309, 28333, 28332], [28309, 28310, 28334], [28309, 28334, 28333], [28310, 28311, 28334], [28311, 28335, 28334], [28311, 28312, 28336], [28311, 28336, 28335], [28312, 28313, 28336], [28313, 28337, 28336], [28313, 28314, 28338], [28313, 28338, 28337], [28314, 28315, 28338], [28315, 28339, 28338], [28315, 28316, 28340], [28315, 28340, 28339], [28316, 28317, 28340], [28317, 28341, 28340], [28317, 28318, 28342], [28317, 28342, 28341], [28318, 28319, 28342], [28319, 28343, 28342], [28319, 28320, 28344], [28319, 28344, 28343], [28320, 28321, 28344], [28321, 28345, 28344], [28321, 28322, 28346], [28321, 28346, 28345], [28322, 28323, 28346], [28323, 28347, 28346], [14880, 28324, 15009], [28324, 28348, 15009], [28324, 28325, 28349], [28324, 28349, 28348], [28325, 28326, 28349], [28326, 28350, 28349], [28326, 28327, 28351], [28326, 28351, 28350], [28327, 28328, 28351], [28328, 28352, 28351], [28328, 28329, 28353], [28328, 28353, 28352], [28329, 28330, 28353], [28330, 28354, 28353], [28330, 28331, 28355], [28330, 28355, 28354], [28331, 28332, 28355], [28332, 28356, 28355], [28332, 28333, 28357], [28332, 28357, 28356], [28333, 28334, 28357], [28334, 28358, 28357], [28334, 28335, 28359], [28334, 28359, 28358], [28335, 28336, 28359], [28336, 28360, 28359], [28336, 28337, 28361], [28336, 28361, 28360], [28337, 28338, 28361], [28338, 28362, 28361], [28338, 28339, 28363], [28338, 28363, 28362], [28339, 28340, 28363], [28340, 28364, 28363], [28340, 28341, 28365], [28340, 28365, 28364], [28341, 28342, 28365], [28342, 28366, 28365], [28342, 28343, 28367], [28342, 28367, 28366], [28343, 28344, 28367], [28344, 28368, 28367], [28344, 28345, 28369], [28344, 28369, 28368], [28345, 28346, 28369], [28346, 28370, 28369], [28346, 28347, 28371], [28346, 28371, 28370], [15009, 28348, 28372], [15009, 28372, 15138], [28348, 28349, 28372], [28349, 28373, 28372], [28349, 28350, 28374], [28349, 28374, 28373], [28350, 28351, 28374], [28351, 28375, 28374], [28351, 28352, 28376], [28351, 28376, 28375], [28352, 28353, 28376], [28353, 28377, 28376], [28353, 28354, 28378], [28353, 28378, 28377], [28354, 28355, 28378], [28355, 28379, 28378], [28355, 28356, 28380], [28355, 28380, 28379], [28356, 28357, 28380], [28357, 28381, 28380], [28357, 28358, 28382], [28357, 28382, 28381], [28358, 28359, 28382], [28359, 28383, 28382], [28359, 28360, 28384], [28359, 28384, 28383], [28360, 28361, 28384], [28361, 28385, 28384], [28361, 28362, 28386], [28361, 28386, 28385], [28362, 28363, 28386], [28363, 28387, 28386], [28363, 28364, 28388], [28363, 28388, 28387], [28364, 28365, 28388], [28365, 28389, 28388], [28365, 28366, 28390], [28365, 28390, 28389], [28366, 28367, 28390], [28367, 28391, 28390], [28367, 28368, 28392], [28367, 28392, 28391], [28368, 28369, 28392], [28369, 28393, 28392], [28369, 28370, 28394], [28369, 28394, 28393], [28370, 28371, 28394], [28371, 28395, 28394], [15138, 28372, 15267], [28372, 28396, 15267], [28372, 28373, 28397], [28372, 28397, 28396], [28373, 28374, 28397], [28374, 28398, 28397], [28374, 28375, 28399], [28374, 28399, 28398], [28375, 28376, 28399], [28376, 28400, 28399], [28376, 28377, 28401], [28376, 28401, 28400], [28377, 28378, 28401], [28378, 28402, 28401], [28378, 28379, 28403], [28378, 28403, 28402], [28379, 28380, 28403], [28380, 28404, 28403], [28380, 28381, 28405], [28380, 28405, 28404], [28381, 28382, 28405], [28382, 28406, 28405], [28382, 28383, 28407], [28382, 28407, 28406], [28383, 28384, 28407], [28384, 28408, 28407], [28384, 28385, 28409], [28384, 28409, 28408], [28385, 28386, 28409], [28386, 28410, 28409], [28386, 28387, 28411], [28386, 28411, 28410], [28387, 28388, 28411], [28388, 28412, 28411], [28388, 28389, 28413], [28388, 28413, 28412], [28389, 28390, 28413], [28390, 28414, 28413], [28390, 28391, 28415], [28390, 28415, 28414], [28391, 28392, 28415], [28392, 28416, 28415], [28392, 28393, 28417], [28392, 28417, 28416], [28393, 28394, 28417], [28394, 28418, 28417], [28394, 28395, 28419], [28394, 28419, 28418], [15267, 28396, 28420], [15267, 28420, 15396], [28396, 28397, 28420], [28397, 28421, 28420], [28397, 28398, 28422], [28397, 28422, 28421], [28398, 28399, 28422], [28399, 28423, 28422], [28399, 28400, 28424], [28399, 28424, 28423], [28400, 28401, 28424], [28401, 28425, 28424], [28401, 28402, 28426], [28401, 28426, 28425], [28402, 28403, 28426], [28403, 28427, 28426], [28403, 28404, 28428], [28403, 28428, 28427], [28404, 28405, 28428], [28405, 28429, 28428], [28405, 28406, 28430], [28405, 28430, 28429], [28406, 28407, 28430], [28407, 28431, 28430], [28407, 28408, 28432], [28407, 28432, 28431], [28408, 28409, 28432], [28409, 28433, 28432], [28409, 28410, 28434], [28409, 28434, 28433], [28410, 28411, 28434], [28411, 28435, 28434], [28411, 28412, 28436], [28411, 28436, 28435], [28412, 28413, 28436], [28413, 28437, 28436], [28413, 28414, 28438], [28413, 28438, 28437], [28414, 28415, 28438], [28415, 28439, 28438], [28415, 28416, 28440], [28415, 28440, 28439], [28416, 28417, 28440], [28417, 28441, 28440], [28417, 28418, 28442], [28417, 28442, 28441], [28418, 28419, 28442], [28419, 28443, 28442], [15396, 28420, 15525], [28420, 28444, 15525], [28420, 28421, 28445], [28420, 28445, 28444], [28421, 28422, 28445], [28422, 28446, 28445], [28422, 28423, 28447], [28422, 28447, 28446], [28423, 28424, 28447], [28424, 28448, 28447], [28424, 28425, 28449], [28424, 28449, 28448], [28425, 28426, 28449], [28426, 28450, 28449], [28426, 28427, 28451], [28426, 28451, 28450], [28427, 28428, 28451], [28428, 28452, 28451], [28428, 28429, 28453], [28428, 28453, 28452], [28429, 28430, 28453], [28430, 28454, 28453], [28430, 28431, 28455], [28430, 28455, 28454], [28431, 28432, 28455], [28432, 28456, 28455], [28432, 28433, 28457], [28432, 28457, 28456], [28433, 28434, 28457], [28434, 28458, 28457], [28434, 28435, 28459], [28434, 28459, 28458], [28435, 28436, 28459], [28436, 28460, 28459], [28436, 28437, 28461], [28436, 28461, 28460], [28437, 28438, 28461], [28438, 28462, 28461], [28438, 28439, 28463], [28438, 28463, 28462], [28439, 28440, 28463], [28440, 28464, 28463], [28440, 28441, 28465], [28440, 28465, 28464], [28441, 28442, 28465], [28442, 28466, 28465], [28442, 28443, 28467], [28442, 28467, 28466], [15525, 28444, 28468], [15525, 28468, 15654], [28444, 28445, 28468], [28445, 28469, 28468], [28445, 28446, 28470], [28445, 28470, 28469], [28446, 28447, 28470], [28447, 28471, 28470], [28447, 28448, 28472], [28447, 28472, 28471], [28448, 28449, 28472], [28449, 28473, 28472], [28449, 28450, 28474], [28449, 28474, 28473], [28450, 28451, 28474], [28451, 28475, 28474], [28451, 28452, 28476], [28451, 28476, 28475], [28452, 28453, 28476], [28453, 28477, 28476], [28453, 28454, 28478], [28453, 28478, 28477], [28454, 28455, 28478], [28455, 28479, 28478], [28455, 28456, 28480], [28455, 28480, 28479], [28456, 28457, 28480], [28457, 28481, 28480], [28457, 28458, 28482], [28457, 28482, 28481], [28458, 28459, 28482], [28459, 28483, 28482], [28459, 28460, 28484], [28459, 28484, 28483], [28460, 28461, 28484], [28461, 28485, 28484], [28461, 28462, 28486], [28461, 28486, 28485], [28462, 28463, 28486], [28463, 28487, 28486], [28463, 28464, 28488], [28463, 28488, 28487], [28464, 28465, 28488], [28465, 28489, 28488], [28465, 28466, 28490], [28465, 28490, 28489], [28466, 28467, 28490], [28467, 28491, 28490], [15654, 28468, 15783], [28468, 28492, 15783], [28468, 28469, 28493], [28468, 28493, 28492], [28469, 28470, 28493], [28470, 28494, 28493], [28470, 28471, 28495], [28470, 28495, 28494], [28471, 28472, 28495], [28472, 28496, 28495], [28472, 28473, 28497], [28472, 28497, 28496], [28473, 28474, 28497], [28474, 28498, 28497], [28474, 28475, 28499], [28474, 28499, 28498], [28475, 28476, 28499], [28476, 28500, 28499], [28476, 28477, 28501], [28476, 28501, 28500], [28477, 28478, 28501], [28478, 28502, 28501], [28478, 28479, 28503], [28478, 28503, 28502], [28479, 28480, 28503], [28480, 28504, 28503], [28480, 28481, 28505], [28480, 28505, 28504], [28481, 28482, 28505], [28482, 28506, 28505], [28482, 28483, 28507], [28482, 28507, 28506], [28483, 28484, 28507], [28484, 28508, 28507], [28484, 28485, 28509], [28484, 28509, 28508], [28485, 28486, 28509], [28486, 28510, 28509], [28486, 28487, 28511], [28486, 28511, 28510], [28487, 28488, 28511], [28488, 28512, 28511], [28488, 28489, 28513], [28488, 28513, 28512], [28489, 28490, 28513], [28490, 28514, 28513], [28490, 28491, 28515], [28490, 28515, 28514], [15783, 28492, 28516], [15783, 28516, 15912], [28492, 28493, 28516], [28493, 28517, 28516], [28493, 28494, 28518], [28493, 28518, 28517], [28494, 28495, 28518], [28495, 28519, 28518], [28495, 28496, 28520], [28495, 28520, 28519], [28496, 28497, 28520], [28497, 28521, 28520], [28497, 28498, 28522], [28497, 28522, 28521], [28498, 28499, 28522], [28499, 28523, 28522], [28499, 28500, 28524], [28499, 28524, 28523], [28500, 28501, 28524], [28501, 28525, 28524], [28501, 28502, 28526], [28501, 28526, 28525], [28502, 28503, 28526], [28503, 28527, 28526], [28503, 28504, 28528], [28503, 28528, 28527], [28504, 28505, 28528], [28505, 28529, 28528], [28505, 28506, 28530], [28505, 28530, 28529], [28506, 28507, 28530], [28507, 28531, 28530], [28507, 28508, 28532], [28507, 28532, 28531], [28508, 28509, 28532], [28509, 28533, 28532], [28509, 28510, 28534], [28509, 28534, 28533], [28510, 28511, 28534], [28511, 28535, 28534], [28511, 28512, 28536], [28511, 28536, 28535], [28512, 28513, 28536], [28513, 28537, 28536], [28513, 28514, 28538], [28513, 28538, 28537], [28514, 28515, 28538], [28515, 28539, 28538], [15912, 28516, 16041], [28516, 28540, 16041], [28516, 28517, 28541], [28516, 28541, 28540], [28517, 28518, 28541], [28518, 28542, 28541], [28518, 28519, 28543], [28518, 28543, 28542], [28519, 28520, 28543], [28520, 28544, 28543], [28520, 28521, 28545], [28520, 28545, 28544], [28521, 28522, 28545], [28522, 28546, 28545], [28522, 28523, 28547], [28522, 28547, 28546], [28523, 28524, 28547], [28524, 28548, 28547], [28524, 28525, 28549], [28524, 28549, 28548], [28525, 28526, 28549], [28526, 28550, 28549], [28526, 28527, 28551], [28526, 28551, 28550], [28527, 28528, 28551], [28528, 28552, 28551], [28528, 28529, 28553], [28528, 28553, 28552], [28529, 28530, 28553], [28530, 28554, 28553], [28530, 28531, 28555], [28530, 28555, 28554], [28531, 28532, 28555], [28532, 28556, 28555], [28532, 28533, 28557], [28532, 28557, 28556], [28533, 28534, 28557], [28534, 28558, 28557], [28534, 28535, 28559], [28534, 28559, 28558], [28535, 28536, 28559], [28536, 28560, 28559], [28536, 28537, 28561], [28536, 28561, 28560], [28537, 28538, 28561], [28538, 28562, 28561], [28538, 28539, 28563], [28538, 28563, 28562], [16041, 28540, 28564], [16041, 28564, 16170], [28540, 28541, 28564], [28541, 28565, 28564], [28541, 28542, 28566], [28541, 28566, 28565], [28542, 28543, 28566], [28543, 28567, 28566], [28543, 28544, 28568], [28543, 28568, 28567], [28544, 28545, 28568], [28545, 28569, 28568], [28545, 28546, 28570], [28545, 28570, 28569], [28546, 28547, 28570], [28547, 28571, 28570], [28547, 28548, 28572], [28547, 28572, 28571], [28548, 28549, 28572], [28549, 28573, 28572], [28549, 28550, 28574], [28549, 28574, 28573], [28550, 28551, 28574], [28551, 28575, 28574], [28551, 28552, 28576], [28551, 28576, 28575], [28552, 28553, 28576], [28553, 28577, 28576], [28553, 28554, 28578], [28553, 28578, 28577], [28554, 28555, 28578], [28555, 28579, 28578], [28555, 28556, 28580], [28555, 28580, 28579], [28556, 28557, 28580], [28557, 28581, 28580], [28557, 28558, 28582], [28557, 28582, 28581], [28558, 28559, 28582], [28559, 28583, 28582], [28559, 28560, 28584], [28559, 28584, 28583], [28560, 28561, 28584], [28561, 28585, 28584], [28561, 28562, 28586], [28561, 28586, 28585], [28562, 28563, 28586], [28563, 28587, 28586], [16170, 28564, 19395], [28564, 19524, 19395], [28564, 28565, 19653], [28564, 19653, 19524], [28565, 28566, 19653], [28566, 19782, 19653], [28566, 28567, 19911], [28566, 19911, 19782], [28567, 28568, 19911], [28568, 20040, 19911], [28568, 28569, 20169], [28568, 20169, 20040], [28569, 28570, 20169], [28570, 20298, 20169], [28570, 28571, 20427], [28570, 20427, 20298], [28571, 28572, 20427], [28572, 20556, 20427], [28572, 28573, 20685], [28572, 20685, 20556], [28573, 28574, 20685], [28574, 20814, 20685], [28574, 28575, 20943], [28574, 20943, 20814], [28575, 28576, 20943], [28576, 21072, 20943], [28576, 28577, 21201], [28576, 21201, 21072], [28577, 28578, 21201], [28578, 21330, 21201], [28578, 28579, 21459], [28578, 21459, 21330], [28579, 28580, 21459], [28580, 21588, 21459], [28580, 28581, 21717], [28580, 21717, 21588], [28581, 28582, 21717], [28582, 21846, 21717], [28582, 28583, 21975], [28582, 21975, 21846], [28583, 28584, 21975], [28584, 22104, 21975], [28584, 28585, 22233], [28584, 22233, 22104], [28585, 28586, 22233], [28586, 22362, 22233], [28586, 28587, 22491], [28586, 22491, 22362]], + "@type": "VertexPropertyPerTriangle" + }, + "@type": "TextureMappedProperty" +} \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/autils/get_one_frame.py b/NeuralVoicePuppetry/neural-code/autils/get_one_frame.py new file mode 100644 index 0000000..0838f73 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/autils/get_one_frame.py @@ -0,0 +1,30 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from moviepy.video.io.ffmpeg_tools import ffmpeg_extract_subclip +import os +from moviepy.editor import * + +dataset = 'SRF_anchor' +data_path = f'/home/alberto/data/videosynth/{dataset}' +target_base = f'/home/alberto/data/videosynth/{dataset}_frames' + +for sub in ['Close', 'Halbtotale', 'Totale']: + os.makedirs(os.path.join(target_base, sub), exist_ok=True) + +video_list = ['Close/355_9105.mp4', 'Close/355_9106.mp4', + 'Halbtotale/355_9414.mp4', 'Halbtotale/355_9415.mp4', + 'Totale/355_7615.mp4', 'Totale/355_7616.mp4'] + +# Start and end time in minutes +frame_n = 200 + +for video in video_list: + input_file = os.path.join(data_path, video) + output_file = os.path.join(target_base, video) + # print(os.path.join(target_base, video)) + # ffmpeg_extract_subclip(os.path.join(data_path, video), start_time*60, end_time*60, + # targetname=os.path.join(target_base, video)) + + clip = VideoFileClip(input_file) + clip.save_frame(f'{target_base}/{video[:-4]}.jpeg', t=frame_n) # save frame at t=2 as JPEG diff --git a/NeuralVoicePuppetry/neural-code/autils/make_h5py.py b/NeuralVoicePuppetry/neural-code/autils/make_h5py.py new file mode 100644 index 0000000..a545724 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/autils/make_h5py.py @@ -0,0 +1,120 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +import h5py +import numpy as np +from PIL import Image +from tqdm import tqdm +import shutil + +def remove_folders(folders): + print('\n\nCleaning folders..\n\n') + for folder in folders: + try: + shutil.rmtree(folder) + + except: + print(folder) + pass + return + +def create_h5py_dataset(target_dir, clean=False): + + ds_path = os.path.join(target_dir, 'audio_feature') + expr_path = os.path.join(target_dir, 'expressions') + frames_path = os.path.join(target_dir, 'frames') + uv_path = os.path.join(target_dir, 'uv') + mask_path = os.path.join(target_dir, 'mask_mouth') + deca_details_path = os.path.join(target_dir, 'deca_details') + + h5py_out_file = os.path.join(target_dir, f'{target_dir.split("/")[-1]}.h5') + if os.path.isfile(h5py_out_file): + print('H5py file already exists.') + if clean: + folders = [ds_path, expr_path, frames_path, uv_path, mask_path, deca_details_path] + remove_folders(folders) + return + + f = h5py.File(h5py_out_file, 'w') + + dataset_size = len(os.listdir(ds_path)) - 1 + print(f'Dataset size: {dataset_size}') + + # Load deepspeech data + dsf_data = [] + for i in tqdm(range(dataset_size)): + dsf_data.append(np.load(os.path.join(ds_path, f'{i}.deepspeech.npy'))) + + dsf_data = np.array(dsf_data) + grp = f.create_dataset("dsf", dsf_data.shape, data=dsf_data) + + # Load expression data + try: + ep_data = [] + for i in tqdm(range(dataset_size)): + ep_data.append(np.load(os.path.join(expr_path, f'expr_{i}.npy'))) + + ep_data = np.array(ep_data) + grp = f.create_dataset("ep", ep_data.shape, data=ep_data) + + except FileNotFoundError: + pass + + try: + frame_data = [] + grp = f.create_dataset("frame", (dataset_size, 224, 224, 3)) + for i in tqdm(range(dataset_size)): + im = np.asarray(Image.open(os.path.join(frames_path, '%04d.jpg' % i))) + f["frame"][i] = im + + except FileNotFoundError: + pass + + try: + uv_data = [] + grp = f.create_dataset("uv", (dataset_size, 224, 224, 2)) + for i in tqdm(range(dataset_size)): + im = np.asarray(np.load(os.path.join(uv_path, f'uv_{i}.npy'))) + f["uv"][i] = im + + except FileNotFoundError: + pass + + try: + mask_data = [] + grp = f.create_dataset("mask", (dataset_size, 224, 224)) + for i in tqdm(range(dataset_size)): + im = np.asarray(np.load(os.path.join(mask_path, f'mask_{i}.npy'))) + f["mask"][i] = im + + except FileNotFoundError: + pass + + try: + deca_details_data = [] + grp = f.create_dataset("deca_details", (dataset_size, 224, 224, 3)) + for i in tqdm(range(dataset_size)): + im = np.asarray(np.load(os.path.join(deca_details_path, f'deca_details_{i}.npy'))) + f["deca_details"][i] = im + + except FileNotFoundError: + pass + + f.close() + + print('Created h5 file at : ', h5py_out_file) + + if clean: + folders = [ds_path, expr_path, frames_path, uv_path, mask_path, deca_details_path] + remove_folders(folders) + + + +if __name__ == '__main__': + + dataset_type = 'SRF_anchor_short' + dataset_name = 'Halbtotale_355_9414' + dataset_path = '/home/alberto/NeuralVoicePuppetry/datasets' + + create_h5py_dataset(dataset_type, dataset_name, dataset_path) diff --git a/NeuralVoicePuppetry/neural-code/autils/make_video_from_frames.py b/NeuralVoicePuppetry/neural-code/autils/make_video_from_frames.py new file mode 100644 index 0000000..e2e220d --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/autils/make_video_from_frames.py @@ -0,0 +1,67 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +from psbody.mesh import Mesh +import cv2 +import pyrender +import trimesh +import tempfile +import numpy as np +import matplotlib as mpl +import matplotlib.pyplot as plt +import matplotlib.cm as cm +from psbody.mesh import Mesh +import tempfile +from subprocess import call +from tqdm import tqdm +from PIL import Image + + +def make_video_seq(audio_fname, images_out_path, video_out_path, name): + + image = cv2.imread(os.path.join(images_out_path, sorted(os.listdir(images_out_path))[0])) + size = (image.shape[1], image.shape[0]) + print('Video size: ', size) + + tmp_video_file = tempfile.NamedTemporaryFile('w', suffix='.mp4', dir=video_out_path) + if int(cv2.__version__[0]) < 3: + writer = cv2.VideoWriter(tmp_video_file.name, cv2.cv.CV_FOURCC(*'mp4v'), 25, size, True) + else: + writer = cv2.VideoWriter(tmp_video_file.name, cv2.VideoWriter_fourcc(*'mp4v'), 25, size, True) + + images_list = sorted(os.listdir(images_out_path)) + indexes = [] + + for i in images_list: + if i.startswith('.'): + images_list.remove(i) + + else: + indexes.append(int(i[:-4])) + + print(np.max(indexes)) + + for i in tqdm(range(np.max(indexes))): + + file_name = os.path.join(images_out_path, '%04d.jpg' % i) + im = cv2.imread(file_name) + writer.write(im) + + writer.release() + + video_fname = os.path.join(video_out_path, name) + cmd = ('ffmpeg' + ' -i {0} -i {1} -vcodec h264 -ac 2 -channel_layout stereo -pix_fmt yuv420p {2}'.format( + audio_fname, tmp_video_file.name, video_fname)).split() + call(cmd) + + +if __name__ == '__main__': + audio_fname = '/home/alberto/data/videosynth/SRF_anchor_short/Halbtotale/355_9415.wav' + video_out_path = 'results/face_reconstruction/video' + os.makedirs(video_out_path, exist_ok=True) + # images_out_path = 'results/face_reconstruction/images' + images_out_path = 'results/face_reconstruction/combined_images' + os.makedirs(images_out_path, exist_ok=True) + + make_video_seq(audio_fname, images_out_path, video_out_path) diff --git a/NeuralVoicePuppetry/neural-code/autils/renderer.py b/NeuralVoicePuppetry/neural-code/autils/renderer.py new file mode 100644 index 0000000..81495af --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/autils/renderer.py @@ -0,0 +1,258 @@ +from typing import Union, List, Tuple + +import json +import numpy as np +import torch +from PIL import Image +from pytorch3d.io import load_obj +from pytorch3d.ops import interpolate_face_attributes +from pytorch3d.renderer import ( + look_at_view_transform, + FoVPerspectiveCameras, + FoVOrthographicCameras, + PointLights, + RasterizationSettings, + MeshRenderer, + MeshRasterizer, + SoftPhongShader, + Textures, TexturesUV +) +from pytorch3d.structures import Meshes + + +def to_tensor(a, device, dtype=None): + if isinstance(a, torch.Tensor): + return a.to(device) + return torch.from_numpy(np.array(a)).to(device) + + +class Renderer: + def __init__(self, device, dist=2, elev=0, azimuth=180, fov=40, image_size=256, aspect_ratio=1, R=None, T=None, K=None, cameras=None): + # If you provide R and T, you don't need dist, elev, azimuth, fov + self.device = device + + # Data structures and functions for rendering + if R is None and T is None: + R, T = look_at_view_transform(dist, elev, azimuth) + if cameras is None: + if K is None: + cameras = FoVPerspectiveCameras(device=device, R=R, T=T, znear=1, zfar=10000, aspect_ratio=aspect_ratio, + fov=fov, degrees=True) + else: + cameras = FoVOrthographicCameras(device=device, R=R, T=T, K=K) + + raster_settings = RasterizationSettings( + image_size=image_size, + blur_radius=0.0, + faces_per_pixel=1, + perspective_correct=False + ) + + dist = cameras.T # Place lights at the same point as the camera + lights = PointLights(ambient_color=((0.3, 0.3, 0.3),), diffuse_color=((0.7, 0.7, 0.7),), device=device, + location=dist) + + # Create a phong renderer by composing a rasterizer and a shader. The textured phong shader will + # interpolate the texture uv coordinates for each vertex, sample from a texture image and + # apply the Phong lighting model + self.mesh_rasterizer = MeshRasterizer( + cameras=cameras, + raster_settings=raster_settings + ) + self._renderer = MeshRenderer( + rasterizer=self.mesh_rasterizer, + shader=SoftPhongShader( + device=device, + cameras=cameras, + lights=lights + ) + ) + + def _flatten(self, a): + return torch.from_numpy(np.array(a)).reshape(-1, 1).to(self.device) + + def render_bfm(self, sp, ep, shape_mu, shape_pcab, expression_pcab, faces, save_to=None): + sp = self._flatten(sp).double() + ep = self._flatten(ep).double() + shape_mu = self._flatten(shape_mu).double() + shape_pcab = torch.from_numpy(shape_pcab).to(self.device).double() + expression_pcab = torch.from_numpy(expression_pcab).to(self.device).double() + faces = torch.from_numpy(np.array(faces)).to(self.device) + + verts_flat = shape_mu + \ + shape_pcab.mm(sp) + \ + expression_pcab.mm(ep) + verts_flat = verts_flat.view(1, -1, 3) + faces = faces.view(1, -1, 3) + return self.render_mesh(verts_flat, faces, save_to) + + def render_mesh(self, verts, faces, textures=None, save_to=None, return_format="pil", standardize=False, device="cuda" if torch.cuda.is_available() else "cpu"): + if textures is not None: + assert len(textures.shape) == 3, "Please use batches" + textures = to_tensor(textures, device) + verts = to_tensor(verts, device, dtype=np.float) + faces = to_tensor(faces, device, dtype=np.long) + + assert len(verts.shape) == 3, "Please use batches" + assert len(faces.shape) == 3, "Please use batches" + bs = verts.shape[0] + + # device + if verts.device != self.device: + verts = verts.to(self.device) + if faces.device != self.device: + faces = faces.to(self.device) + + # verts = verts / verts.abs().max() + # standardize + if standardize: + verts = verts - verts.mean(1) + verts = verts / verts.abs().max() + + # Initialize a camera. + # With world coordinates +Y up, +X left and +Z in, the front of the cow is facing the -Z direction. + # So we move the camera by 180 in the azimuth direction so it is facing the front of the cow. + verts = verts.float() + faces = faces.int() + verts_rgb = torch.stack([torch.ones_like(verts[i]) for i in range(bs)]) # (1, V, 3) + if textures is None: + textures = Textures(verts_rgb=verts_rgb.to(self.device)) + mesh = Meshes(verts=verts, faces=faces, textures=textures) + + images = self._renderer(mesh) + image = images[0, ..., :3].detach().cpu().numpy() + + image = (image.clip(0, 1) * 255).astype(np.uint8) + image_pil = Image.fromarray(image) + if save_to: + image_pil.save(save_to) + if return_format.lower() == "pil": + return image_pil + elif return_format.lower() == "np": + return image + return image + + +class ImagelessTexturesUV(TexturesUV): + def __init__(self, + faces_uvs: Union[torch.Tensor, List[torch.Tensor], Tuple[torch.Tensor]], + verts_uvs: Union[torch.Tensor, List[torch.Tensor], Tuple[torch.Tensor]], + padding_mode: str = "border", + align_corners: bool = True, + ): + maps = torch.zeros(1, 2,2, 3).to(faces_uvs[0].device) # This is simply to instantiate a texture, but it is not used. + super().__init__(maps=maps, faces_uvs=faces_uvs, verts_uvs=verts_uvs, padding_mode=padding_mode, align_corners=align_corners) + + def sample_pixel_uvs(self, fragments, **kwargs) -> torch.Tensor: + """ + Copied from super().sample_textures and adapted to output pixel_uvs instead of the sampled texture. + + Args: + fragments: + The outputs of rasterization. From this we use + + - pix_to_face: LongTensor of shape (N, H, W, K) specifying the indices + of the faces (in the packed representation) which + overlap each pixel in the image. + - barycentric_coords: FloatTensor of shape (N, H, W, K, 3) specifying + the barycentric coordianates of each pixel + relative to the faces (in the packed + representation) which overlap the pixel. + + Returns: + texels: tensor of shape (N, H, W, K, C) giving the interpolated + texture for each pixel in the rasterized image. + """ + if self.isempty(): + faces_verts_uvs = torch.zeros( + (self._N, 3, 2), dtype=torch.float32, device=self.device + ) + else: + packing_list = [ + i[j] for i, j in zip(self.verts_uvs_list(), self.faces_uvs_list()) + ] + faces_verts_uvs = torch.cat(packing_list) + texture_maps = self.maps_padded() + + # pixel_uvs: (N, H, W, K, 2) + pixel_uvs = interpolate_face_attributes( + fragments.pix_to_face, fragments.bary_coords, faces_verts_uvs + ) + + N, H_out, W_out, K = fragments.pix_to_face.shape + N, H_in, W_in, C = texture_maps.shape # 3 for RGB + + # pixel_uvs: (N, H, W, K, 2) -> (N, K, H, W, 2) -> (NK, H, W, 2) + pixel_uvs = pixel_uvs.permute(0, 3, 1, 2, 4).reshape(N * K, H_out, W_out, 2) + + # textures.map: + # (N, H, W, C) -> (N, C, H, W) -> (1, N, C, H, W) + # -> expand (K, N, C, H, W) -> reshape (N*K, C, H, W) + texture_maps = ( + texture_maps.permute(0, 3, 1, 2)[None, ...] + .expand(K, -1, -1, -1, -1) + .transpose(0, 1) + .reshape(N * K, C, H_in, W_in) + ) + + # Textures: (N*K, C, H, W), pixel_uvs: (N*K, H, W, 2) + # Now need to format the pixel uvs and the texture map correctly! + # From pytorch docs, grid_sample takes `grid` and `input`: + # grid specifies the sampling pixel locations normalized by + # the input spatial dimensions It should have most + # values in the range of [-1, 1]. Values x = -1, y = -1 + # is the left-top pixel of input, and values x = 1, y = 1 is the + # right-bottom pixel of input. + + pixel_uvs = pixel_uvs * 2.0 - 1.0 + return pixel_uvs + + +class UVRenderer(Renderer): + def __init__(self, obj_path, device, dist=2, elev=0, azimuth=180, fov=40, image_size=256, R=None, T=None, cameras=None): + # obj path should be the path to the obj file with the UV parametrization + super().__init__(device=device, dist=dist, elev=elev, azimuth=azimuth, fov=fov, image_size=image_size, R=R, T=T, cameras=cameras) + # p = "/media/data/facemodels/basel_facemodel/xucong_bfm/BFM 2017/BFM_face_2017.obj" + v, f, aux = load_obj(obj_path) + verts_uvs = aux.verts_uvs.to(device) # (V, 2) + faces_uvs = f.textures_idx.to(device) # (F, 3) + self.texture = ImagelessTexturesUV(verts_uvs=[verts_uvs], faces_uvs=[faces_uvs]) + + def render(self, meshes): + # Currently only supports one mesh in meshes + fragments = self.mesh_rasterizer(meshes) + rendered_uv = self.texture.sample_pixel_uvs(fragments).detach().cpu().numpy()[0] + return rendered_uv + + +class BFMUVRenderer(Renderer): + def __init__(self, json_path, device, dist=2, elev=0, azimuth=180, fov=40, image_size=256, R=None, T=None, cameras=None): + # json_path = ".../face12.json" + super().__init__(device=device, dist=dist, elev=elev, azimuth=azimuth, fov=fov, image_size=image_size, R=R, T=T, cameras=cameras) + + with open(json_path, 'r') as f: + uv_para = json.load(f) + verts_uvs = np.array(uv_para['textureMapping']['pointData']) + faces_uvs = np.array(uv_para['textureMapping']['triangles']) + verts_uvs = to_tensor(verts_uvs, device).float() + faces_uvs = to_tensor(faces_uvs, device) + self.texture = ImagelessTexturesUV(verts_uvs=[verts_uvs], faces_uvs=[faces_uvs]) + + def render(self, meshes): + # Currently only supports one mesh in meshes + fragments = self.mesh_rasterizer(meshes) + rendered_uv = self.texture.sample_pixel_uvs(fragments).detach().cpu().numpy()[0] + return rendered_uv + + +if __name__ == "__main__": + height = 256 + path_uv = "face12.json" + new_bfm_vertices = ... # vertices of BFM + bfm_faces_tensor = ... # triangle faces of BFM + + R, T = look_at_view_transform(eye=((0, 0, 0),), at=((0, 0, -1),), up=((0, 1, 0),)) + uv_renderer = BFMUVRenderer(path_uv, device="cuda" if torch.cuda.is_available() else "cpu", fov=63, image_size=height, R=R, T=T) + new_bfm_mesh = Meshes(new_bfm_vertices, bfm_faces_tensor) + rendered_uv_np = uv_renderer.render(new_bfm_mesh) + diff --git a/NeuralVoicePuppetry/neural-code/autils/resample_audio.py b/NeuralVoicePuppetry/neural-code/autils/resample_audio.py new file mode 100644 index 0000000..980ce2f --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/autils/resample_audio.py @@ -0,0 +1,13 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from scipy.io import wavfile + +audio_fname = '/home/alberto/data/videosynth/SRF_anchor_short/Halbtotale/355_9415.wav' +new_audio = '/home/alberto/data/videosynth/SRF_anchor_short/Halbtotale/355_9415_short_Clara.wav' + +samplerate, data = wavfile.read(audio_fname) + +new_data = data[:20*samplerate, :] + +wavfile.write(new_audio, samplerate, new_data) diff --git a/NeuralVoicePuppetry/neural-code/autils/voca_helpers.py b/NeuralVoicePuppetry/neural-code/autils/voca_helpers.py new file mode 100644 index 0000000..5820b2a --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/autils/voca_helpers.py @@ -0,0 +1,183 @@ +from __future__ import division +import os +os.environ['PYOPENGL_PLATFORM'] = 'egl' # Uncommnet this line while running remotely +import cv2 +import pyrender +import trimesh +import tempfile +import numpy as np +import matplotlib as mpl +import matplotlib.pyplot as plt +import matplotlib.cm as cm +from psbody.mesh import Mesh +import tempfile +from subprocess import call +from tqdm import tqdm +from PIL import Image + +def get_unit_factor(unit): + if unit == 'mm': + return 1000.0 + elif unit == 'cm': + return 100.0 + elif unit == 'm': + return 1.0 + else: + raise ValueError('Unit not supported') + + +def render_mesh_helper(mesh, t_center, rot=np.zeros(3), tex_img=None, v_colors=None, errors=None, error_unit='m', min_dist_in_mm=0.0, max_dist_in_mm=3.0, z_offset=0): + camera_params = {'c': np.array([400, 400]), + 'k': np.array([-0.19816071, 0.92822711, 0, 0, 0]), + 'f': np.array([4754.97941935 / 2, 4754.97941935 / 2])} + + frustum = {'near': 0.01, 'far': 3.0, 'height': 800, 'width': 800} + + mesh_copy = Mesh(mesh.v, mesh.f) + mesh_copy.v[:] = cv2.Rodrigues(rot)[0].dot((mesh_copy.v-t_center).T).T+t_center + + texture_rendering = tex_img is not None and hasattr(mesh, 'vt') and hasattr(mesh, 'ft') + if texture_rendering: + intensity = 0.5 + tex = pyrender.Texture(source=tex_img, source_channels='RGB') + material = pyrender.material.MetallicRoughnessMaterial(baseColorTexture=tex) + + # Workaround as pyrender requires number of vertices and uv coordinates to be the same + temp_filename = '%s.obj' % next(tempfile._get_candidate_names()) + mesh.write_obj(temp_filename) + tri_mesh = trimesh.load(temp_filename, process=False) + try: + os.remove(temp_filename) + except: + print('Failed deleting temporary file - %s' % temp_filename) + render_mesh = pyrender.Mesh.from_trimesh(tri_mesh, material=material) + elif errors is not None: + intensity = 0.5 + unit_factor = get_unit_factor('mm')/get_unit_factor(error_unit) + errors = unit_factor*errors + + norm = mpl.colors.Normalize(vmin=min_dist_in_mm, vmax=max_dist_in_mm) + cmap = cm.get_cmap(name='jet') + colormapper = cm.ScalarMappable(norm=norm, cmap=cmap) + rgba_per_v = colormapper.to_rgba(errors) + rgb_per_v = rgba_per_v[:, 0:3] + elif v_colors is not None: + intensity = 0.5 + rgb_per_v = v_colors + else: + intensity = 1.5 + rgb_per_v = None + + if not texture_rendering: + tri_mesh = trimesh.Trimesh(vertices=mesh_copy.v, faces=mesh_copy.f, vertex_colors=rgb_per_v) + render_mesh = pyrender.Mesh.from_trimesh(tri_mesh, smooth=True) + + scene = pyrender.Scene(ambient_light=[.2, .2, .2], bg_color=[255, 255, 255]) + camera = pyrender.IntrinsicsCamera(fx=camera_params['f'][0], + fy=camera_params['f'][1], + cx=camera_params['c'][0], + cy=camera_params['c'][1], + znear=frustum['near'], + zfar=frustum['far']) + + scene.add(render_mesh, pose=np.eye(4)) + + camera_pose = np.eye(4) + camera_pose[:3,3] = np.array([0, 0, 1.0-z_offset]) + scene.add(camera, pose=[[1, 0, 0, 0], + [0, 1, 0, 0], + [0, 0, 1, 1], + [0, 0, 0, 1]]) + + angle = np.pi / 6.0 + pos = camera_pose[:3,3] + light_color = np.array([1., 1., 1.]) + light = pyrender.PointLight(color=light_color, intensity=intensity) + + light_pose = np.eye(4) + light_pose[:3,3] = pos + scene.add(light, pose=light_pose.copy()) + + light_pose[:3,3] = cv2.Rodrigues(np.array([angle, 0, 0]))[0].dot(pos) + scene.add(light, pose=light_pose.copy()) + + light_pose[:3,3] = cv2.Rodrigues(np.array([-angle, 0, 0]))[0].dot(pos) + scene.add(light, pose=light_pose.copy()) + + light_pose[:3,3] = cv2.Rodrigues(np.array([0, -angle, 0]))[0].dot(pos) + scene.add(light, pose=light_pose.copy()) + + light_pose[:3,3] = cv2.Rodrigues(np.array([0, angle, 0]))[0].dot(pos) + scene.add(light, pose=light_pose.copy()) + + flags = pyrender.RenderFlags.SKIP_CULL_FACES + try: + r = pyrender.OffscreenRenderer(viewport_width=frustum['width'], viewport_height=frustum['height']) + color, _ = r.render(scene, flags=flags) + except: + print('pyrender: Failed rendering frame') + color = np.zeros((frustum['height'], frustum['width'], 3), dtype='uint8') + + return color[..., ::-1] + + +def render_sequence_meshes(audio_fname, sequence_vertices, template, images_out_path, video_out_path, + uv_template_fname='', texture_img_fname=''): + + if not os.path.exists(video_out_path): + os.makedirs(video_out_path) + + if not os.path.exists(images_out_path): + os.makedirs(images_out_path) + + # tmp_video_file = tempfile.NamedTemporaryFile('w', suffix='.mp4', dir=video_out_path) + # if int(cv2.__version__[0]) < 3: + # writer = cv2.VideoWriter(tmp_video_file.name, cv2.cv.CV_FOURCC(*'mp4v'), 60, (800, 800), True) + # else: + # writer = cv2.VideoWriter(tmp_video_file.name, cv2.VideoWriter_fourcc(*'mp4v'), 60, (800, 800), True) + + if os.path.exists(uv_template_fname) and os.path.exists(texture_img_fname): + uv_template = Mesh(filename=uv_template_fname) + vt, ft = uv_template.vt, uv_template.ft + tex_img = cv2.imread(texture_img_fname)[:,:,::-1] + else: + vt, ft = None, None + tex_img = None + + num_frames = sequence_vertices.shape[0] + center = np.mean(sequence_vertices[0], axis=0) + for i_frame in tqdm(range(num_frames)): + + if i_frame >= 0: + + render_mesh = Mesh(sequence_vertices[i_frame], template.f) + if vt is not None and ft is not None: + render_mesh.vt, render_mesh.ft = vt, ft + + im_tar_path = os.path.join(images_out_path, f"frame_{i_frame}.png") + + if os.path.isfile(im_tar_path): + + im = Image.open(im_tar_path) + singular = np.unique(im) + + if len(singular) >= 2: + pass + + else: + print(f"Rendering {i_frame} because it's black") + + + img = render_mesh_helper(render_mesh, center, tex_img=tex_img) + + im = Image.fromarray(img) + im.save(im_tar_path) + + # writer.write(img) + + # writer.release() + # + # video_fname = os.path.join(out_path, 'video.mp4') + # cmd = ('ffmpeg' + ' -i {0} -i {1} -vcodec h264 -ac 2 -channel_layout stereo -pix_fmt yuv420p {2}'.format( + # audio_fname, tmp_video_file.name, video_fname)).split() + # call(cmd) diff --git a/NeuralVoicePuppetry/neural-code/base_options.py b/NeuralVoicePuppetry/neural-code/base_options.py new file mode 100644 index 0000000..ac18b11 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/base_options.py @@ -0,0 +1,111 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import argparse +import os + +class Options(): + def __init__(self): + self.initialized = False + + def initialize(self, parser): + self.initialized = True + return parser + + def gather_options(self): + # initialize parser with basic options + if not self.initialized: + parser = argparse.ArgumentParser( + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser = self.initialize(parser) + + # get the basic options + opt, _ = parser.parse_known_args() + + self.parser = parser + + return parser.parse_args() + + def print_options(self, opt): + message = '' + message += '----------------- Options ---------------\n' + for k, v in sorted(vars(opt).items()): + comment = '' + default = self.parser.get_default(k) + if v != default: + comment = '\t[default: %s]' % str(default) + message += '{:>25}: {:<30}{}\n'.format(str(k), str(v), comment) + message += '----------------- End -------------------' + print(message) + + + def parse(self): + + opt = self.gather_options() + + self.print_options(opt) + + self.opt = opt + return self.opt + +class PostprocessingOptions(Options): + + def initialize(self, parser): + + parser = Options.initialize(self, parser) + parser.add_argument('--file_id_source', required=True, help='Name of source') + parser.add_argument('--file_id_target', required=True, help='Name of target') + parser.add_argument('--model_name', required=True, help='Name of the rendering model') + + parser.add_argument('--frames_path', required=True, help='path to target orignal frames') + parser.add_argument('--audio_fname', required=True, help='path to input audio file') + parser.add_argument('--dataset_target', required=True, help='path to target dataset folder') + parser.add_argument('--target_fps', type=int, default=25, help='Fps of target video') + parser.add_argument('--clean', action='store_true', help='True to clean all data and inference folders') + parser.add_argument('--results_out_dir', required=True, help='path to output folder') + + return parser + + +class PreprocessingOptions(Options): + + def initialize(self, parser): + + parser = Options.initialize(self, parser) + parser.add_argument('--dataroot', required=True, default='/home/alberto/data/videosynth/', + help='path to data folder') + parser.add_argument('--dataset_path', required=True, default='/home/alberto/NeuralVoicePuppetry/datasets/', + help='path to the dataset folder') + parser.add_argument('--dataset', required=True, help='name of the dataset cathegory') + parser.add_argument('--name', required=True, help='name of video/audio to process') + + parser.add_argument('--preprocess_ds', action='store_true', help='True to run deepspeach preprocessing') + parser.add_argument('--preprocess_tracking', action='store_true', help='True to run tracking preprocessing') + parser.add_argument('--skip_h5py', action='store_true', help='True to skip h5py creation') + parser.add_argument('--target_fps', type=int, default=25, help='Fps of target video') + parser.add_argument('--clean', action='store_true', help='True to clean all datset folders and leave only h5py') + + return parser + +class FaceReconstructionOptions(Options): + + def initialize(self, parser): + parser = Options.initialize(self, parser) + parser.add_argument('--source_name', required=True, help='name of source dataset') + parser.add_argument('--target_name', required=True, help='name of target dataset') + parser.add_argument('--audio_path', required=True, help='path to source audio file') + parser.add_argument('--expression_path', required=True, help='path to expressions dir') + parser.add_argument('--codedicts_path', required=True, help='path to codedicts dir') + parser.add_argument('--tform_path', required=True, help='path to tform file') + parser.add_argument('--frames_path', required=True, help='path to frames dir') + + return parser + +class Text2SpeachOptions(PreprocessingOptions): + def initialize(self, parser): + + parser = PreprocessingOptions.initialize(self, parser) + parser.add_argument('--language_source', type=str, default='en', help='Original language of text file') + parser.add_argument('--language_target', type=str, default='en', help='Target language for audio file') + + return parser diff --git a/NeuralVoicePuppetry/neural-code/codedump/NeRF_audio2expression.sh b/NeuralVoicePuppetry/neural-code/codedump/NeRF_audio2expression.sh new file mode 100644 index 0000000..82d39ab --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/codedump/NeRF_audio2expression.sh @@ -0,0 +1,57 @@ +GPUID=0 + +BASE=`pwd` +echo $BASE + +# DATA PATHS +DATAROOT=/home/alberto/data/videosynth/ +DATASETPATH=/home/alberto/NeuralVoicePuppetry/datasets/ + +### SOURCE ### + +# Severin Videos +DATASET_SOURCE=External +VIDEOTYPE_SOURCE=Severin_videos +NAME_SOURCE_LIST=( transformers_lecture ) + +TARGET_FPS=25 +##################### PREPROCESSING ##################### +for NAME_SOURCE in "${NAME_SOURCE_LIST[@]}" +do + echo 'Preprocessing source video: '$DATAROOT$DATASET_SOURCE/$VIDEOTYPE_SOURCE/$NAME_SOURCE + python preprocessing.py \ + --dataroot $DATAROOT \ + --dataset_path $DATASETPATH \ + --dataset $DATASET_SOURCE \ + --video_type $VIDEOTYPE_SOURCE \ + --name $NAME_SOURCE \ + --preprocess_ds \ + --target_fps $TARGET_FPS \ + --clean \ + + + ##################### AUDIO2EXPRESSION ##################### + echo -e '\n--------------AUDIO2EXPRESSION---------------\n' + OBJECT=ARD_ZDF + N_ITER=150 + N_ITER_LR_DECAY=50 + RENDERER=$OBJECT + EROSION=1.0 + BATCH_SIZE=16 + MODEL=audio2ExpressionsAttentionTMP4 + RENDERER_TYPE=estimatorAttention + DATASET_MODE=multi_face_audio_eq_tmp_cached + LOSS=RMS + SEQ_LEN=8 + DATE_WITH_TIME=20191105-115332 + A2E_NAME=$MODEL-$RENDERER_TYPE-SL$SEQ_LEN-BS$BATCH_SIZE-$OBJECT-$DATASET_MODE-$LOSS-$DATE_WITH_TIME-look_ahead + EPOCH=latest + + # SOURCE + SOURCE_ACTOR=$DATASETPATH$DATASET_SOURCE/$VIDEOTYPE_SOURCE'_'$NAME_SOURCE + MAPPING_PATH=/home/alberto/NeuralVoicePuppetry/mappings/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/mapping_Severin_videos_SC + OUTDIR=/home/alberto/data/dave_fxfy/audio2exprNVP/ + + cd Audio2ExpressionNet/Inference/ + python audio2expr_with_map.py --use_mapping --mapping_path $MAPPING_PATH --out_dir $OUTDIR --look_ahead --seq_len $SEQ_LEN --source_actor $SOURCE_ACTOR --write_no_images --name $A2E_NAME --erosionFactor $EROSION --epoch $EPOCH --display_winsize 512 --rendererType $RENDERER_TYPE --lossType $LOSS --dataroot $DATASETS_DIR/$OBJECT --model $MODEL --netG unet_256 --dataset_mode $DATASET_MODE --norm instance --gpu_ids $GPUID +done diff --git a/NeuralVoicePuppetry/neural-code/codedump/__init__.py b/NeuralVoicePuppetry/neural-code/codedump/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/codedump/find_closest_frames.py b/NeuralVoicePuppetry/neural-code/codedump/find_closest_frames.py new file mode 100644 index 0000000..8c2cd61 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/codedump/find_closest_frames.py @@ -0,0 +1,83 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +import cv2 +from skimage.metrics import structural_similarity +import matplotlib.pyplot as plt +from tqdm import tqdm +import sys, getopt + +def compute_diff(image1, image2): + # Convert images to grayscale + image1 = cv2.cvtColor(image1, cv2.COLOR_BGR2GRAY) + image2 = cv2.cvtColor(image2, cv2.COLOR_BGR2GRAY) + + # Compute SSIM between two images + (score, diff) = structural_similarity(image1, image2, full=True) + diff = (diff * 255).astype("uint8") + + return score, diff + +def main(argv): + + fake_video_path = argv[0] + real_video_path = argv[1] + + print(f'Fake video path: {fake_video_path}') + print(f'Real video path: {real_video_path}') + + name = fake_video_path.split('/')[-1][:-4] + + vidcap_fake = cv2.VideoCapture(fake_video_path) + vidcap_real = cv2.VideoCapture(real_video_path) + + length = min(int(vidcap_fake.get(cv2.CAP_PROP_FRAME_COUNT)), int(vidcap_real.get(cv2.CAP_PROP_FRAME_COUNT))) + + success_fake,image_fake = vidcap_fake.read() + success_real,image_real = vidcap_real.read() + + best_pair = { + 'index': 0, + 'score': -1, + 'diff': None, + 'real_img': None, + 'fake_img': None, + } + + for count in tqdm(range(length)): + + score, diff = compute_diff(image_fake, image_real) + if score > best_pair["score"]: + best_pair["index"] = count + best_pair["score"] = score + best_pair["diff"] = diff + best_pair["real_img"] = image_real + best_pair["fake_img"] = image_fake + + success_fake, image_fake = vidcap_fake.read() + success_real, image_real = vidcap_real.read() + + # best_pair["real_img"] = cv2.cvtColor(best_pair["real_img"], cv2.COLOR_BGR2RGB) + # best_pair["fake_img"] = cv2.cvtColor(best_pair["fake_img"], cv2.COLOR_BGR2RGB) + + # fig=plt.figure(figsize=(14, 6)) + # fig.suptitle(f'Index: {best_pair["index"]}, Score: {best_pair["score"]}') + # fig.add_subplot(131) + # plt.imshow(best_pair["real_img"]) + # fig.add_subplot(132) + # plt.imshow(best_pair["fake_img"]) + # fig.add_subplot(133) + # plt.imshow(best_pair["diff"]) + # plt.show() + + os.makedirs('../results/differences/', exist_ok=True) + + cv2.imwrite('results/differences/' + name + '_real_img.png', best_pair["real_img"]) + cv2.imwrite('results/differences/' + name + '_fake_img.png', best_pair["fake_img"]) + cv2.imwrite('results/differences/' + name + '_diff_img.png', best_pair["diff"]) + + +# python find_closest_frames.py '/home/alberto/NeuralVoicePuppetry/results/inference/DynamicNeuralTextures-Demo_Male_moderator_1/Demo_Male_moderator_1_to_Demo_Male_moderator_1/Demo_Male_moderator_1_to_Demo_Male_moderator_1.mp4' '/home/alberto/data/videosynth/External/Demo/Male_moderator_1.mp4' +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/NeuralVoicePuppetry/neural-code/codedump/find_closest_frames.sh b/NeuralVoicePuppetry/neural-code/codedump/find_closest_frames.sh new file mode 100644 index 0000000..a3c4662 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/codedump/find_closest_frames.sh @@ -0,0 +1,10 @@ + +DATASET_SOURCE=Original_NVP +VIDEOTYPE_SOURCE=Videos +NAME_SOURCE_LIST=( mpi_franziska_neutral_eng obama sequence002 ) + +for NAME in "${NAME_SOURCE_LIST[@]}" +do + VIDEONAME=$VIDEOTYPE_SOURCE'_'$NAME + python find_closest_frames.py '/home/alberto/NeuralVoicePuppetry/results/inference/DynamicNeuralTextures-'$VIDEONAME/$VIDEONAME'_to_'$VIDEONAME/$VIDEONAME'_to_'$VIDEONAME'.mp4' '/home/alberto/data/videosynth/'$DATASET_SOURCE/$VIDEOTYPE_SOURCE/$NAME'.mp4' +done \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/codedump/make_various_audios.py b/NeuralVoicePuppetry/neural-code/codedump/make_various_audios.py new file mode 100644 index 0000000..bb931fc --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/codedump/make_various_audios.py @@ -0,0 +1,13 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from subprocess import call + +name = "transformers_lecture" +file_path = "/home/alberto/data/videosynth/External/Severin_videos/" + +durations = [ 10 ] + +for dur in durations: + cmd = (f"ffmpeg -i {file_path}{name}.wav -ss 0 -to {dur} -c copy {file_path}{name}_{dur}.wav").split() + call(cmd) diff --git a/NeuralVoicePuppetry/neural-code/codedump/reconstruct_faces.py b/NeuralVoicePuppetry/neural-code/codedump/reconstruct_faces.py new file mode 100644 index 0000000..8b7661f --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/codedump/reconstruct_faces.py @@ -0,0 +1,170 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +import tqdm +import torch +import numpy as np +import json +from PIL import Image +from psbody.mesh import Mesh +from autils.voca_helpers import * +from autils.make_video_from_frames import make_video_seq +from third.DECA.decalib.utils.config import cfg +from third.DECA.decalib.utils.renderer import SRenderY +from third.DECA.decalib.models.FLAME import FLAME +from third.DECA.decalib.utils.util import write_obj, batch_orth_proj +from skimage.transform import warp +from skimage.transform import SimilarityTransform +from base_options import FaceReconstructionOptions + +def warp_back(image, oldimage, tform): + + alpha = 0.6 + + oldimage = oldimage.astype(np.float64) /255. + new_size = oldimage.shape + + dst_image = warp(image, tform, output_shape=new_size) + + # Mask of non-black pixels. + mask = np.where(np.all(dst_image <= [0.2, 0.2, 0.2], axis=-1)) + dst_image[mask] = oldimage[mask] + + res = cv2.addWeighted(oldimage, 1 - alpha, dst_image, alpha, 0) + res = res[:, :, ::-1] + + return res + +if __name__ == '__main__': + opt = FaceReconstructionOptions().parse() + + source_name = opt.source_name + target_name = opt.target_name + expression_path = opt.expression_path + pd_files_path = opt.codedicts_path + audio_fname = opt.audio_input_dir + frames_path = opt.frames_path + tform_path = opt.tform_path + + transfer_name = source_name+'--'target_name + + meshes_out_path = f'results/face_reconstruction/{transfer_name}/meshes' + os.makedirs(meshes_out_path, exist_ok=True) + video_out_path = f'results/face_reconstruction/{transfer_name}/video' + os.makedirs(video_out_path, exist_ok=True) + images_out_path = f'results/face_reconstruction/{transfer_name}/images' + os.makedirs(images_out_path, exist_ok=True) + json_out_path = f'results/face_reconstruction/{transfer_name}/talking_head.json' + + # deca or voca + mode = 'deca' + # mode = 'voca' + + # skip saving obj + skip_obj = True + + # initialize flame + model_cfg = cfg.model + flame = FLAME(model_cfg).cuda() + image_size = cfg.dataset.image_size + render = SRenderY(image_size, obj_filename=model_cfg.topology_path, uv_size=model_cfg.uv_size).cuda() + faces = render.faces[0].cpu().numpy() + + # get mean shape + print('Getting mean shape..') + pd_files = [f for f in sorted(os.listdir(pd_files_path)) + if os.path.isfile(os.path.join(pd_files_path, f))] + + shapes = torch.tensor([]).cuda() + codedicts = {} + + for pd_file in tqdm(pd_files): + codedict = torch.load(os.path.join(pd_files_path, pd_file)) + index = int(pd_file[9:-3]) + codedicts[index] = codedict + shape = codedict['shape'] + shapes = torch.cat((shapes, shape)) + + mean_shape = torch.mean(shapes, dim=0) + mean_shape = torch.unsqueeze(mean_shape, dim=0) + + + # get expressions + num_expressions = len([name for name in os.listdir(expression_path)]) + sequence_vertices = [] + + # make objs + print('Making meshes..') + + for index in tqdm(range(num_expressions)): + + codedict = codedicts[index] + expression = np.load(os.path.join(expression_path, f'expr_{index}.npy'))[0] + + pose = codedict['pose'][0, :-3] + expr = torch.unsqueeze(torch.tensor(expression[:-3], dtype=torch.float32), dim=0).cuda() + jaw_pose = torch.tensor(expression[-3:], dtype=torch.float32).cuda() + new_pose = torch.unsqueeze(torch.cat((pose, jaw_pose), dim=0), dim=0) + + # get flame model + vertices, landmarks2d, landmarks3d = flame(mean_shape, expr, new_pose) + + if not skip_obj: + filename = os.path.join(meshes_out_path, '%04d.obj'%index) + write_obj(filename, vertices[0], faces) + + if mode == 'voca': + # append vertices + frame = Mesh(filename=filename) + sequence_vertices.append(frame.v) + + elif mode == 'deca': + # save images + trans_verts = batch_orth_proj(vertices, codedict['cam']) + trans_verts[:, :, 1:] = -trans_verts[:, :, 1:] + shape_images = render.render_shape(vertices, trans_verts)[0] + shape_images = shape_images.permute(1,2,0).cpu().numpy() + shape_images = shape_images * 255. + shape_images = shape_images.astype(np.uint8) + + filename = os.path.join(images_out_path, '%04d.jpg' % index) + + img = Image.fromarray(shape_images) + img.save(filename) + + # Render images + print('Rendering images and making video..') + if mode == 'voca': + template = Mesh(sequence_vertices[0], f) + sequence_vertices = np.stack(sequence_vertices) + render_sequence_meshes(audio_fname, sequence_vertices, template, images_out_path, video_out_path, + uv_template_fname='', texture_img_fname='') + + make_video_seq(audio_fname, images_out_path, video_out_path, 'talking_head.mp4') + + + combined_images_out_path = f'results/face_reconstruction/{transfer_name}/combined_images' + os.makedirs(combined_images_out_path, exist_ok=True) + + tform = np.load(tform_path) + + for image_path, frame in zip(tqdm(sorted(os.listdir(images_out_path))), sorted(os.listdir(frames_path))): + + index = int(image_path[:-4]) + f_tform = SimilarityTransform(matrix=tform[index]) + + image = cv2.imread(os.path.join(images_out_path,image_path)) + old_image = cv2.imread(os.path.join(frames_path, frame)) + + res = warp_back(image, old_image, f_tform) + res = res * 255. + res = res.astype('uint8') + res = cv2.cvtColor(res, cv2.COLOR_BGR2RGB) + + + file_name = os.path.join(combined_images_out_path, '%04d.jpg' % index) + cv2.imwrite(file_name, res) + + make_video_seq(audio_fname, combined_images_out_path, video_out_path, 'video_combined.mp4') + diff --git a/NeuralVoicePuppetry/neural-code/codedump/reconstruct_faces.sh b/NeuralVoicePuppetry/neural-code/codedump/reconstruct_faces.sh new file mode 100644 index 0000000..bfd9955 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/codedump/reconstruct_faces.sh @@ -0,0 +1,37 @@ +GPUID=0 + +BASE=`pwd` +# DATA PATHS +DATAROOT=/home/alberto/data/videosynth/ +DATASETPATH=/home/alberto/NeuralVoicePuppetry/datasets/ + +# SOURCE +DATASET_SOURCE=SRF_anchor_short +VIDEOTYPE_SOURCE=Halbtotale +NAME_SOURCE=355_9415 + +# TARGET +DATASET_TARGET=SRF_anchor_short +VIDEOTYPE_TARGET=Close +NAME_TARGET=355_9105 + +############################################# +SOURCE_NAME=$VIDEOTYPE_SOURCE'_'$NAME_SOURCE +TARGET_NAME=$VIDEOTYPE_TARGET'_'$NAME_TARGET +MODEL='TRANSFERS/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead' + +AUDIO_PATH = $DATAROOT$DATASET_SOURCE/$VIDEOTYPE_SOURCE/$NAME_SOURCE'.wav' +EXPRESSION_PATH = $DATASETPATH$MODEL/$SOURCE_NAME'--'$TARGET_NAME/'expressions' +CODEDICTS_PATH = $DATASETPATH$DATASET_TARGET/$TARGET_NAME/'DECA_codedicts' +TFORM_PATH = $DATASETPATH$DATASET_TARGET/$TARGET_NAME/'tform.npy' +FRAMES_PATH = $DATAROOT$DATASET_TARGET/$VIDEOTYPE_TARGET/$NAME_TARGET + +echo 'Reconstructing face: '$SOURCE_NAME'--'$TARGET_NAME +python reconstruct_faces.py \ +--source_name $SOURCE_NAME \ +--target_name $TARGET_NAME +--audio_path $AUDIO_PATH \ +--expression_path $EXPRESSION_PATH \ +--codedicts_path $CODEDICTS_PATH \ +--tform_path $TFORM_PATH \ +--frames_path $FRAMES_PATH \ diff --git a/NeuralVoicePuppetry/neural-code/codedump/test_compression.py b/NeuralVoicePuppetry/neural-code/codedump/test_compression.py new file mode 100644 index 0000000..899b654 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/codedump/test_compression.py @@ -0,0 +1,59 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +from subprocess import call +import cv2 +import matplotlib.pyplot as plt +from tqdm import tqdm +from skimage.metrics import structural_similarity + + +def check_mkdir(path): + if not os.path.exists(path): + print('creating %s' % path) + os.makedirs(path) + + +def video2sequence_lossless(video_path): + videofolder = video_path.split('.')[0] + check_mkdir(videofolder) + video_name = video_path.split('/')[-1].split('.')[0] + cmd = (f'ffmpeg -i {video_path} -vf fps=25 {videofolder}/{video_name}_frame%04d.png').split() + call(cmd) + imagepath_list = [os.path.join(videofolder, f) for f in os.listdir(videofolder)] + print('video frames are stored in {}'.format(videofolder)) + return imagepath_list + + +def compute_diff(image1, image2, text): + # Convert images to grayscale + image1 = cv2.cvtColor(image1, cv2.COLOR_BGR2GRAY) + image2 = cv2.cvtColor(image2, cv2.COLOR_BGR2GRAY) + + # Compute SSIM between two images + (score, diff) = structural_similarity(image1, image2, full=True) + print(text, ' SSIM: ', score) + diff = (diff * 255).astype("uint8") + + plt.imshow(diff) + plt.show() + + +NAME = 'DynamicNeuralTextures-Demo_Female_moderator_1' +FILE_ID_SOURCE = 'Clara_audios_Resemble_clara' +FILE_ID_TARGET = 'Demo_Female_moderator_1' + +video_out_path = f'results/inference/{NAME}/{FILE_ID_SOURCE}_to_{FILE_ID_TARGET}/' +video_fname = os.path.join(video_out_path, f'{FILE_ID_SOURCE}_to_{FILE_ID_TARGET}.mp4') +frames_path = '/home/alberto/data/videosynth/External/Demo/Female_moderator_1/' + +imagepath_list = video2sequence_lossless(video_fname) + +for gen_frame, old_frame in zip(tqdm(sorted(imagepath_list)), sorted(os.listdir(frames_path))): + + old_image = cv2.imread(os.path.join(frames_path, old_frame)) + gen_image = cv2.imread(gen_frame) + + compute_diff(old_image, gen_image, 'old_image - gen_image') + diff --git a/NeuralVoicePuppetry/neural-code/full_pipeline_nvp.sh b/NeuralVoicePuppetry/neural-code/full_pipeline_nvp.sh new file mode 100755 index 0000000..fe28d6c --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/full_pipeline_nvp.sh @@ -0,0 +1,279 @@ +#!/bin/bash + +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +set -e +eval "$(conda shell.bash hook)" +# cd ../neural-code + +GPUID=0 +BASE_DIR="${3:-$(pwd)}" +echo "BASE_DIR=$BASE_DIR" + +NAME_AUDIO_FILE=$1 +NAME_VIDEO_FILE=$2 +echo "NAME_AUDIO_FILE: $1" +echo "NAME_VIDEO_FILE: $2" + +# DATA PATHS +INPUTDATAPATH=$BASE_DIR/input_data +OUTPUTDATAPATH=$BASE_DIR/output_data +FEATURESPATH=features + +#Testing output path +OUTPUT_TOOL_DATA_PATH=$BASE_DIR/output_data/videos +#mkdir -p $OUTPUT_TOOL_DATA_PATH + +IMAGES_TARGET_DIR=$OUTPUT_TOOL_DATA_PATH + +###AUDIO### +AUDIO_PATH=audio +NAME_AUDIOS_LIST=($NAME_AUDIO_FILE) + +### VIDEO ### +VIDEO_PATH=video +NAME_VIDEOS_LIST=($NAME_VIDEO_FILE) + +TARGET_FPS=25 + +##################### PREPROCESSING ##################### +for NAME_AUDIO in "${NAME_AUDIOS_LIST[@]}"; do + for NAME_VIDEO in "${NAME_VIDEOS_LIST[@]}"; do + # Set time variable + SECONDS=0 + + conda activate deepspeech + if [ $NAME_AUDIO != $NAME_VIDEO ]; then + echo 'Preprocessing source audio: '$INPUTDATAPATH/$AUDIO_PATH/$NAME_AUDIO + python preprocessing.py \ + --dataroot $INPUTDATAPATH \ + --dataset_path $OUTPUTDATAPATH/$FEATURESPATH \ + --dataset $AUDIO_PATH \ + --name $NAME_AUDIO \ + --preprocess_ds \ + --target_fps $TARGET_FPS \ + --clean + + else + echo $NAME_AUDIO'='$NAME_VIDEO + + fi + + echo 'Preprocessing target video: '$INPUTDATAPATH/$VIDEO_PATH/$NAME_VIDEO + python preprocessing.py \ + --dataroot $INPUTDATAPATH \ + --dataset_path $OUTPUTDATAPATH/$FEATURESPATH \ + --dataset $VIDEO_PATH \ + --name $NAME_VIDEO \ + --preprocess_ds \ + --target_fps $TARGET_FPS \ + --skip_h5py \ + + conda deactivate + + # Twice for memory issues + conda activate pyenv + python preprocessing.py \ + --dataroot $INPUTDATAPATH \ + --dataset_path $OUTPUTDATAPATH/$FEATURESPATH \ + --dataset $VIDEO_PATH \ + --name $NAME_VIDEO \ + --preprocess_tracking \ + --target_fps $TARGET_FPS \ + --clean \ + + ##################### AUDIO2EXPRESSION ##################### + echo -e '\n--------------AUDIO2EXPRESSION---------------\n' + OBJECT=ARD_ZDF + LR=0.00001 + N_ITER=150 + N_ITER_LR_DECAY=50 + RENDERER=$OBJECT + EROSION=1.0 + BATCH_SIZE=16 + MODEL=audio2ExpressionsAttentionTMP4 + RENDERER_TYPE=estimatorAttention + DATASET_MODE=multi_face_audio_eq_tmp_cached + LOSS=RMS + SEQ_LEN=8 + DATE_WITH_TIME=20191105-115332 + A2E_NAME=$MODEL-$RENDERER_TYPE-SL$SEQ_LEN-BS$BATCH_SIZE-$OBJECT-$DATASET_MODE-$LOSS-$DATE_WITH_TIME-look_ahead + EPOCH=latest + + DT=$OUTPUTDATAPATH/'TRANSFERS' + TRANSFER_PATH=$DT/$A2E_NAME + + # AUDIO + AUDIO_LIST=($OUTPUTDATAPATH/$FEATURESPATH/$NAME_AUDIO) + + # VIDEO + VIDEO_LIST=($OUTPUTDATAPATH/$FEATURESPATH/$NAME_VIDEO) + + rm -f $OUTPUTDATAPATH/TRANSFERS/$NAME/list_transfer.txt + cd Audio2ExpressionNet/Inference/ + pwd + for TARGET_ACTOR in "${VIDEO_LIST[@]}"; do + echo 'Training for Target: '$TARGET_ACTOR + + for SOURCE_ACTOR in "${AUDIO_LIST[@]}"; do + echo 'Training for Source: '$SOURCE_ACTOR + # --look_ahead + python transfer.py --look_ahead --base_path $OUTPUTDATAPATH --seq_len $SEQ_LEN --source_actor $SOURCE_ACTOR --target_actor $TARGET_ACTOR --write_no_images --name $A2E_NAME --erosionFactor $EROSION --epoch $EPOCH --display_winsize 512 --rendererType $RENDERER_TYPE --lossType $LOSS --dataroot $DATASETS_DIR/$OBJECT --model $MODEL --netG unet_256 --dataset_mode $DATASET_MODE --norm instance --gpu_ids $GPUID --transfer_path $TRANSFER_PATH + done + done + cd .. + cd .. + + ##################### NEURAL RENDERING ##################### + + echo -e '\n--------------Neural Rendering Training---------------\n' + ############ TRAINING ############ + DP=$OUTPUTDATAPATH/$FEATURESPATH + DATASETS_DIR=$DP/$NAME_VIDEO + OBJECT=$NAME_VIDEO'.h5' + + ############# CHECKPOINTS PATH ############ + CHECKPOINT_DIR=$OUTPUTDATAPATH/checkpoints + + #dataset mode + DATASET_MODE=custom_aligned + # neural texture, not used here + TEX_DIM=128 + TEX_FEATURES=16 + INPUT_NC=5 + NUM_THREADS=1 + # loss + LOSS=VGG + # models + TEXTUREMODEL=DynamicNeuralTextureExpression + # TEXTUREMODEL=DynamicNeuralTextureAudio + MODEL=DynamicNeuralTextures + RENDERER_TYPE=UNET_6_level + # optimizer parameters + LR=0.0001 + N_ITER=50 + N_ITER_LR_DECAY=50 + BATCH_SIZE=8 + SEQ_LEN=8 + RENDERER=$OBJECT + EROSION=0.6 + + # Model name + NAME=$MODEL-$NAME_VIDEO + + DISPLAY_NAME=$MODEL-$NAME_VIDEO + DISPLAY_ID=0 + + cd NeuralRenderingNetwork + + if [ -f "$CHECKPOINT_DIR/$NAME/latest_texture.pth" ]; then + echo "Model $NAME already exists." + + else + echo "Training model $NAME..." + + CUDA_VISIBLE_DEVICES=0 python \ + train_renderer.py \ + --textureModel $TEXTUREMODEL \ + --num_threads $NUM_THREADS \ + --input_nc $INPUT_NC \ + --checkpoints_dir $CHECKPOINT_DIR \ + --display_id $DISPLAY_ID \ + --look_ahead \ + --seq_len $SEQ_LEN \ + --save_latest_freq 100000 \ + --no_augmentation \ + --name $NAME \ + --erosionFactor $EROSION \ + --tex_dim $TEX_DIM \ + --tex_features $TEX_FEATURES \ + --rendererType $RENDERER_TYPE \ + --lossType $LOSS \ + --display_env $DISPLAY_NAME \ + --niter $N_ITER \ + --niter_decay $N_ITER_LR_DECAY \ + --dataroot $DATASETS_DIR/$OBJECT \ + --model $MODEL \ + --netG unet_256 \ + --lambda_L1 100 \ + --dataset_mode $DATASET_MODE \ + --no_lsgan \ + --norm instance \ + --pool_size 0 \ + --gpu_ids $GPUID \ + --lr $LR \ + --batch_size $BATCH_SIZE \ + --deca_details + #--continue_train \ + #--epoch_count 40 \ + + fi + + ############ INFERENCE ############ + echo -e '\n--------------Neural Rendering Inference---------------\n' + + FILE_ID_SOURCE=$NAME_AUDIO + FILE_ID_TARGET=$NAME_VIDEO + + DR=$OUTPUTDATAPATH/$FEATURESPATH/$FILE_ID_TARGET/ + SOURCE_DATAROOT=$OUTPUTDATAPATH/$FEATURESPATH/$FILE_ID_SOURCE/$FILE_ID_SOURCE.h5 + TARGET_DATAROOT=$OUTPUTDATAPATH/$FEATURESPATH/$FILE_ID_TARGET/$FILE_ID_TARGET.h5 + EXPR_PATH=$TRANSFER_PATH/$FILE_ID_SOURCE--$FILE_ID_TARGET/expressions + + # Used as start and end + FRAME_ID_SOURCE=-1 + FRAME_ID_TARGET=-1 + + CUDA_VISIBLE_DEVICES=0 python \ + inference_renderer.py \ + --num_threads $NUM_THREADS \ + --input_nc $INPUT_NC \ + --look_ahead \ + --seq_len $SEQ_LEN \ + --no_augmentation \ + --name $NAME \ + --checkpoints_dir $CHECKPOINT_DIR \ + --erosionFactor $EROSION \ + --tex_dim $TEX_DIM \ + --tex_features $TEX_FEATURES \ + --rendererType $RENDERER_TYPE \ + --lossType $LOSS \ + --model $MODEL \ + --netG unet_256 \ + --dataset_mode $DATASET_MODE \ + --norm instance \ + --gpu_ids $GPUID \ + --batch_size $BATCH_SIZE \ + --epoch latest \ + --textureModel $TEXTUREMODEL \ + --dataroot $DR \ + --target_dataroot $TARGET_DATAROOT \ + --source_dataroot $SOURCE_DATAROOT \ + --expr_path $EXPR_PATH \ + --images_target_dir $OUTPUTDATAPATH/$FEATURESPATH \ + --frame_id_source $FRAME_ID_SOURCE \ + --frame_id_target $FRAME_ID_TARGET \ + --deca_details + + #--images_target_dir $IMAGES_TARGET_DIR \ + cd .. + + ##################### POSTPROCESSING ##################### + echo -e '\n--------------Postprocessing---------------\n' + python postprocessing.py \ + --file_id_source $FILE_ID_SOURCE \ + --file_id_target $FILE_ID_TARGET \ + --model_name $NAME \ + --frames_path $OUTPUTDATAPATH/$FEATURESPATH/$NAME_VIDEO/'og_frames/' \ + --audio_fname $INPUTDATAPATH/$AUDIO_PATH/$NAME_AUDIO/$NAME_AUDIO'.wav' \ + --dataset_target $OUTPUTDATAPATH/$FEATURESPATH \ + --target_fps $TARGET_FPS \ + --results_out_dir $IMAGES_TARGET_DIR \ + --clean + + done + +done + +conda deactivate diff --git a/NeuralVoicePuppetry/neural-code/full_pipeline_nvp.sh.sb-4428aa14-X9ZNZh/.fuse_hidden0000003300000001 b/NeuralVoicePuppetry/neural-code/full_pipeline_nvp.sh.sb-4428aa14-X9ZNZh/.fuse_hidden0000003300000001 new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/postprocessing.py b/NeuralVoicePuppetry/neural-code/postprocessing.py new file mode 100644 index 0000000..527e192 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/postprocessing.py @@ -0,0 +1,155 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +import shutil +import subprocess + +import cv2 +import h5py +import numpy as np +from scipy.ndimage.morphology import binary_erosion +from skimage.metrics import structural_similarity +from skimage.transform import SimilarityTransform +from skimage.transform import warp +from tqdm import tqdm + +from base_options import PostprocessingOptions + + +def warp_back(image, oldimage, tform): + alpha = 1 + new_size = oldimage.shape + + dst_image = (warp(image, tform, output_shape=new_size) * 255.).astype(np.uint8) + + # Mask of non-black pixels. + ones = np.ones_like(dst_image[:, :, 0]).astype(np.uint8) + indexes = np.where(np.all(dst_image == [0, 0, 0], axis=-1)) + ones[indexes] = 0 + eroded_mask = binary_erosion(ones, iterations=5).astype(np.uint8) + + dst_image[eroded_mask == 0] = oldimage[eroded_mask == 0] + + return dst_image + + +def compute_diff(image1, image2, text): + # Convert images to grayscale + image1 = cv2.cvtColor(image1, cv2.COLOR_BGR2GRAY) + image2 = cv2.cvtColor(image2, cv2.COLOR_BGR2GRAY) + + # Compute SSIM between two images + (score, diff) = structural_similarity(image1, image2, full=True) + print(text, ' SSIM: ', score) + diff = (diff * 255).astype("uint8") + + +def write_video_with_audio(save_root, audio_path, output_path, h=512, w=512, fps=25): + font = cv2.FONT_HERSHEY_SIMPLEX + fontScale = 1.0 + fontColor = (255, 255, 255) + thickness = 1 + lineType = 2 + text = 'This video has been manipulated.' + + label_width, label_height = cv2.getTextSize(text, font, 1, 2)[0] + print(label_width, label_height) + bottomLeftCornerOfText = (int((w - label_width) / 2), int(h - label_height - 20)) + + fourcc = cv2.VideoWriter_fourcc(*'DIVX') + # fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G') + video_tmp_path = os.path.join(save_root, 'tmp.avi') + out = cv2.VideoWriter(video_tmp_path, fourcc, fps, (w, h)) + for j in tqdm(range(num_images), position=0, desc='writing video'): + img = cv2.imread(os.path.join(save_root, '%05d.png' % j)) + img = cv2.putText( + img, + text, + bottomLeftCornerOfText, + font, + fontScale, + fontColor, + thickness, + lineType, + ) + out.write(img) + + out.release() + + print("ffmpeg version:") + subprocess.call("ffmpeg -version", shell=True) + + # TODO: Set proper size of video: [-s {w}x{h}] + cmd = f'ffmpeg -y -i "{video_tmp_path}" -i "{audio_path}" -acodec aac -vcodec h264 -shortest -threads 0 -s {w}x{h} "{output_path}"' + # "-pix_fmt yuv420p -profile:v baseline -level 3" + + print(f"ffmpeg cmd: {cmd}") + return_code = subprocess.call(cmd, shell=True) + + if return_code > 0: + raise Exception(f"An error occurred when assembling the output video: ffmpeg return_code={return_code}") + + try: + os.remove(video_tmp_path) # remove the template video + + except FileNotFoundError: + return + + +if __name__ == '__main__': + + opt = PostprocessingOptions().parse() + + DP = opt.dataset_target + NAME = opt.model_name + FILE_ID_SOURCE = opt.file_id_source + FILE_ID_TARGET = opt.file_id_target + frames_path = opt.frames_path + audio_fname = opt.audio_fname + target_fps = opt.target_fps + results_out_dir = opt.results_out_dir + + h5py_path = os.path.join(DP, FILE_ID_TARGET, FILE_ID_TARGET + '.h5') + + cropped = h5py.File(h5py_path, 'r')["frame"] + crop_image_shape = cropped[0].shape[:-1] + + images_out_path = os.path.join(DP, 'images') # results_out_dir + tform_path = os.path.join(DP, FILE_ID_TARGET, 'tform.npy') + + tform = np.load(tform_path) + + final_file = os.path.join(results_out_dir, f'{FILE_ID_SOURCE}_to_{FILE_ID_TARGET}.mp4') + frames_out_path = os.path.join(DP, 'generated_frames') # results_out_dir + os.makedirs(frames_out_path, exist_ok=True) + + num_images = int(len(os.listdir(images_out_path))) + + for index, frame in zip(tqdm(range(num_images)), sorted(os.listdir(frames_path))): + image_path = '%05d.png' % index + + image = cv2.imread(os.path.join(images_out_path, image_path)) + # resize image to cropped image size + image = cv2.resize(image, crop_image_shape) + + old_image = cv2.imread(os.path.join(frames_path, frame)) + + f_tform = SimilarityTransform(matrix=tform[index]) + + res = warp_back(image, old_image, f_tform) + + cv2.imwrite(os.path.join(frames_out_path, image_path), res) + + h, w, _ = res.shape + + write_video_with_audio(frames_out_path, audio_fname, final_file, h=h, w=w, fps=target_fps) + + try: + if opt.clean: + shutil.rmtree(images_out_path) + shutil.rmtree(frames_out_path) + shutil.rmtree(frames_path) + + except Exception as e: + print(e) diff --git a/NeuralVoicePuppetry/neural-code/preprocessing.py b/NeuralVoicePuppetry/neural-code/preprocessing.py new file mode 100644 index 0000000..3bd9622 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/preprocessing.py @@ -0,0 +1,209 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details +import os +import numpy as np +import h5py +import torch +from PIL import Image +from tqdm import tqdm + +from autils.make_h5py import create_h5py_dataset +from base_options import PreprocessingOptions + + +def find_file_ext(data_path, dataset_name, name): + dataset = dataset_name + '/' + name + tmp = os.path.join(data_path, dataset, name + '.mp4') + + if os.path.isfile(tmp): + return name + '.mp4' + + else: + tmp = os.path.join(data_path, dataset, name + '.mp3') + + if os.path.isfile(tmp): + return name + '.mp3' + + else: + tmp = os.path.join(data_path, dataset, name + '.wav') + if os.path.isfile(tmp): + return name + '.wav' + + else: + tmp = os.path.join(data_path, dataset, name + '.avi') + + if os.path.isfile(tmp): + return name + '.avi' + + else: + print('No input file with given name') + exit() + + +def deepspeech_preprocessing(name, target_dir, folder_videos, video_name, target_base, type, target_fps): + from autils.deepspeech_features import extract_ds + print(f'\n\nExtracting DeepSpeech features for {name}..\n\n') + + if not os.path.isfile(os.path.join(target_dir, target_dir.split("/")[-1] + '.h5')): + extract_ds(folder_videos=folder_videos, + file_id=video_name.split("/")[-1][:-4], + target_base=target_base, + target_name=name, + type=type, + target_fps=target_fps) + + else: + print('Already done.') + + +def video_tracking(tracker, target_dir): + + exp_dir = os.path.join(target_dir, 'expressions') + os.makedirs(exp_dir, exist_ok=True) + + h5_path = os.path.join(target_dir, target_dir.split("/")[-1] + '.h5') + if os.path.isfile(h5_path): + data = h5py.File(h5_path, 'r') + if "ep" in data.keys(): + print('Already done.') + return + + codedict_dir = os.path.join(target_dir, 'DECA_codedicts') + os.makedirs(codedict_dir, exist_ok=True) + + uv_dir = os.path.join(target_dir, 'uv') + os.makedirs(uv_dir, exist_ok=True) + + frames_dir = os.path.join(target_dir, 'frames') + os.makedirs(frames_dir, exist_ok=True) + + deca_details_dir = os.path.join(target_dir, 'deca_details') + os.makedirs(deca_details_dir, exist_ok=True) + + mask_dir = os.path.join(target_dir, 'mask_mouth') + os.makedirs(mask_dir, exist_ok=True) + + tforms_file = os.path.join(target_dir, 'tform.npy') + + expressions = [] + tforms = [] + + # Load video + for i in tqdm(range(len(tracker.testdata))): + images = tracker.testdata[i]['image'].to(tracker.device)[None, ...] + tform = tracker.testdata[i]['tform'] + tform = np.array(tform.params) + # og_image = tracker.testdata[i]['original_image'].cpu() + name = tracker.testdata[i]['imagename'] + + # save image and tfrom + to_save = images[0].permute(1, 2, 0).cpu().numpy() + to_save = to_save * 255. + to_save = to_save.astype(np.uint8) + + filename = os.path.join(frames_dir, '%04d.jpg' % i) + img = Image.fromarray(to_save) + img.save(filename) + + tforms.append(tform) + + # call deca + codedict, opdict, mask = tracker(images) + + # Save codedict + pt_file = os.path.join(codedict_dir, f'codedict_{i}.pt') + torch.save(codedict, pt_file) + + # Save expressions + expression_params = codedict['exp'].cpu().numpy()[0] + pose_params = codedict['pose'].cpu().numpy()[0, 3:] + new_expr = np.concatenate((expression_params, pose_params), axis=0) + + exp_file = os.path.join(exp_dir, f'expr_{i}.npy') + np.save(exp_file, new_expr) + + # Save uv + uv_img = opdict['grid'].cpu()[0] + npy_file = os.path.join(uv_dir, f'uv_{i}.npy') + np.save(npy_file, uv_img) + + # Save mask + mask_img = mask.cpu() + npy_file = os.path.join(mask_dir, f'mask_{i}.npy') + np.save(npy_file, mask_img) + + # Save deca details + deca_details_img = opdict['detail_normal_images'][0].permute(1, 2, 0).cpu().numpy() + npy_file = os.path.join(deca_details_dir, f'deca_details_{i}.npy') + np.save(npy_file, deca_details_img) + + tforms = np.array(tforms) + print(tforms.shape) + np.save(tforms_file, tforms) + + +def preprocess_video(name, folder_videos, target_base, video_name, preprocess_ds, preprocess_tracking, skip_h5py, type, target_fps, clean): + print('Video name: ', video_name) + target_dir = os.path.join(target_base, name) + print('target_dir: ', target_dir) + target_file_name = video_name.split("/")[-1][:-4] + print('target_file_name: ', target_file_name) + print('folder_videos: ', folder_videos) + + frames_folder = target_base + '/' + target_file_name + '/og_frames' + print('Video folder: ', frames_folder) + + if preprocess_ds: + deepspeech_preprocessing(name, target_dir, folder_videos, video_name, target_base, type, target_fps) + + if preprocess_tracking: + from autils.deca_flame_fitting import DECA_tracker + + print(f'\n\nExtracting Tracking information for {name}..\n\n') + if os.path.isdir(frames_folder): + + tracker = DECA_tracker(frames_folder) + + else: + tracker = DECA_tracker(folder_videos + '/' + target_file_name + '.mp4', target_dir=frames_folder) + + video_tracking(tracker, target_dir) + + if skip_h5py: + return + + else: + create_h5py_dataset(target_dir, clean) + + +if __name__ == '__main__': + + opt = PreprocessingOptions().parse() + + data_path = opt.dataroot + dataset_path = opt.dataset_path + dataset = opt.dataset + name = opt.name + target_fps = opt.target_fps + skip_h5py = opt.skip_h5py + clean = opt.clean + + full_path = find_file_ext(data_path, dataset, name) + + target_base = dataset_path + + # Flags to control preprocessing + preprocess_ds = opt.preprocess_ds + preprocess_tracking = opt.preprocess_tracking + + file_name = f'{full_path.split("/")[-1][:-4]}' + print(file_name) + folder_videos = os.path.join(data_path, dataset, file_name) + + if full_path.endswith("mp3") or full_path.endswith("wav"): + preprocess_video(file_name, folder_videos, target_base, full_path, + preprocess_ds, False, skip_h5py, 'audio', target_fps, clean) + + else: + preprocess_video(file_name, folder_videos, target_base, full_path, + preprocess_ds, preprocess_tracking, skip_h5py, 'video', target_fps, clean) diff --git a/NeuralVoicePuppetry/neural-code/preprocessing.sh b/NeuralVoicePuppetry/neural-code/preprocessing.sh new file mode 100644 index 0000000..d53f59d --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/preprocessing.sh @@ -0,0 +1,25 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +GPUID=0 + +BASE=`pwd` +# DATA PATHS +DATAROOT=/home/alberto/data/videosynth/ +DATASETPATH=/home/alberto/NeuralVoicePuppetry/datasets/ + +# SOURCE +DATASET_SOURCE=External +VIDEOTYPE_SOURCE=Youtube +#NAME_SOURCE=Russian_guy +NAME_SOURCE=Bill_Maher + +##################### PREPROCESSING ##################### + +echo 'Preprocessing video: '$DATAROOT$DATASET_SOURCE/$VIDEOTYPE_SOURCE/$NAME_SOURCE +python preprocessing.py \ +--dataroot $DATAROOT \ +--dataset_path $DATASETPATH \ +--dataset $DATASET_SOURCE \ +--video_type $VIDEOTYPE_SOURCE \ +--name $NAME_SOURCE \ diff --git a/NeuralVoicePuppetry/neural-code/tests/DECA_uv_test.py b/NeuralVoicePuppetry/neural-code/tests/DECA_uv_test.py new file mode 100644 index 0000000..80426e6 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/tests/DECA_uv_test.py @@ -0,0 +1,25 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +import subprocess +from autils.deca_flame_fitting import * +import cv2 +from tqdm import tqdm +import json +import torch + + +test_sample = '/home/alberto/NeuralVoicePuppetry/third/DECA/TestSamples/SRF/355_9414.jpg' +# Flags to control preprocessing + +# Create Tracker object +tracker = DECA_tracker(test_sample) + +# extract face tracking information + + +# Load video +for i in tqdm(range(len(tracker.testdata))): + images = tracker.testdata[i]['image'].to(tracker.device)[None, ...] + codedict = tracker(images) diff --git a/NeuralVoicePuppetry/neural-code/tests/__init__.py b/NeuralVoicePuppetry/neural-code/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/tests/test_rendering_3DDFA.py b/NeuralVoicePuppetry/neural-code/tests/test_rendering_3DDFA.py new file mode 100644 index 0000000..c62d5ee --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/tests/test_rendering_3DDFA.py @@ -0,0 +1,96 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import torch + +from autils.eos_tracker import * +from third._3DDFA_V2.DDVa_tracker import * +from autils.renderer import Renderer +from pytorch3d.renderer import look_at_view_transform + + +def call_renderer(mesh, R, T, K=None): + new_bfm_vertices = torch.FloatTensor(mesh.vertices).unsqueeze(0) + try: + bfm_faces_tensor = torch.FloatTensor(mesh.tvi).unsqueeze(0) + + except AttributeError: + bfm_faces_tensor = torch.FloatTensor(mesh.faces).unsqueeze(0) + + R = torch.FloatTensor(R).unsqueeze(0) + T = torch.FloatTensor(T).unsqueeze(0) + if K is not None: + K = torch.FloatTensor(K).unsqueeze(0) + + print(f'Vertices shape: {new_bfm_vertices.shape}') + print(f'Faces shape: {bfm_faces_tensor.shape}') + print(f'R shape: {R.shape}') + print(f'T shape: {T.shape}') + + # Render with white texture to check if R and T are correct + mesh_renderer = Renderer(device="cuda" if torch.cuda.is_available() else "cpu", fov=60, aspect_ratio=h/w, image_size=(h, w), R=R, T=T, K=K) + rendered_np = mesh_renderer.render_mesh(new_bfm_vertices.cuda(), bfm_faces_tensor.cuda()) + + return rendered_np + + +def combine(img, face_render, alpha=0.6): + final_img = np.array(img) + face_render = np.array(face_render) + mask = np.where(np.all(face_render != (255,255,255), axis=-1)) + final_img[mask] = (1-alpha) * final_img[mask] + (alpha) * face_render[mask] + return final_img + +os.environ['PYOPENGL_PLATFORM'] = 'egl' + +img_path = 'obama_test.jpeg' +img = imread(img_path) +h, w = img.shape[:2] + +print(f'Height: {h}, Width: {w}') +# +# 3DDVA tracker +_3ddva_tracker = DDVA_Tracker() +mesh_tri, R, T, alpha_shp, alpha_exp = _3ddva_tracker(img) +mesh_tri.show() +print(f'\nRotation:\n{R}') +print(f'\nTranslation matrix:\n{T}') + + +# rendered_np = call_renderer(mesh_tri, R, T) +# plt.imshow(rendered_np) +# plt.savefig('render.png') +# plt.show() +# +# rendered_np = call_renderer(mesh_tri, np.linalg.inv(R), -T) +# plt.imshow(rendered_np) +# plt.savefig('render.png') +# plt.show() + +camera = trimesh.scene.cameras.Camera(resolution=(h, w), fov=(63, 63), z_near=0.01, z_far=1000.0) +transform = camera.look_at(mesh_tri.vertices) + +R = np.identity(3) +print(f'\nR:\n{R}') + +T = np.zeros(3) +print(f'\nT:\n{T}') + +rendered_np = call_renderer(mesh_tri, R, T) +plt.imshow(rendered_np) +plt.savefig('render.png') +plt.show() +# +# for i in range(3): +# R = - R +# R[i,i] = - R[i,i] +# rendered_np = call_renderer(mesh_tri, R, T) +# plt.imshow(rendered_np) +# plt.savefig('render.png') +# plt.show() + +########################## + + + + diff --git a/NeuralVoicePuppetry/neural-code/tests/test_rendering_EOS.py b/NeuralVoicePuppetry/neural-code/tests/test_rendering_EOS.py new file mode 100644 index 0000000..a8424cf --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/tests/test_rendering_EOS.py @@ -0,0 +1,122 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import torch + +from autils.eos_tracker import * +from third._3DDFA_V2.DDVa_tracker import * +from autils.renderer import Renderer +from pytorch3d.renderer import look_at_view_transform +import pytorch3d +from scipy.spatial.transform import Rotation + + +def call_renderer(mesh, R, T, K=None): + new_bfm_vertices = torch.FloatTensor(mesh.vertices).unsqueeze(0) + try: + bfm_faces_tensor = torch.FloatTensor(mesh.tvi).unsqueeze(0) + + except AttributeError: + bfm_faces_tensor = torch.FloatTensor(mesh.faces).unsqueeze(0) + + R = torch.FloatTensor(R).unsqueeze(0) + T = torch.FloatTensor(T).unsqueeze(0) + if K is not None: + K = torch.FloatTensor(K).unsqueeze(0) + + print(f'Vertices shape: {new_bfm_vertices.shape}') + print(f'Faces shape: {bfm_faces_tensor.shape}') + print(f'R shape: {R.shape}') + print(f'T shape: {T.shape}') + + # Render with white texture to check if R and T are correct + mesh_renderer = Renderer(device="cuda" if torch.cuda.is_available() else "cpu", fov=60, aspect_ratio=h/w, image_size=(h, w), R=R, T=T, K=K) + rendered_np = mesh_renderer.render_mesh(new_bfm_vertices.cuda(), bfm_faces_tensor.cuda()) + + return rendered_np + + +def combine(img, face_render, alpha=0.6): + final_img = np.array(img) + face_render = np.array(face_render) + mask = np.where(np.all(face_render != (255,255,255), axis=-1)) + final_img[mask] = (1-alpha) * final_img[mask] + (alpha) * face_render[mask] + return final_img + +os.environ['PYOPENGL_PLATFORM'] = 'egl' + +img_path = 'obama_test.jpeg' +img = imread(img_path) +h, w = img.shape[:2] + +print(f'Height: {h}, Width: {w}') +# +# Launch tracker to get BFM model +eos_tracker = EOS_Tracker(PATH_TO_EOS, PREDICTOR_PATH) +mesh, pose, shape_coeffs, blendshape_coeffs = eos_tracker(img) + +# # Store mesh in trimesh +mesh_tri = trimesh.Trimesh(vertices=mesh.vertices, faces=mesh.tvi) + +camera = trimesh.scene.cameras.Camera(resolution=(h,w), fov=(63, 63), z_near=0.01, z_far=1000.0) +transform = camera.look_at(mesh_tri.vertices) + +# R_trimesh = - transform[0:3, 0:3] +# R_trimesh[1,1] = - R_trimesh[1,1] # flip z to look from other side +# print(f'\nR_trimesh:\n{R_trimesh}') +# +# T_trimesh = transform[0:3, -1] +# print(f'\nT_trimesh:\n{T_trimesh}') + +p = pose.get_projection() # from world coordinates to view coordinates +mv = pose.get_modelview() # From object to world coordinates +mv_inverse = np.linalg.inv(mv) +rot = pose.get_rotation() +vm = viewport_matrix(w, h) +fm = vm @ p @ mv + +v = np.asarray(mesh.vertices) +v = np.hstack([v, np.ones(v.shape[0])]) +print(v.shape) +print(v[0]) + +# # Add rotation +# R_z = Rotation.from_euler('xyz', [0, 0, 90], degrees=True).as_matrix() +# R_y = Rotation.from_euler('xyz', [0, -180, 0], degrees=True).as_matrix() +# +# # R = mv_inverse[0:3, 0:3] @ R_z +# R = mv[0:3, 0:3] @ R_z +# # R = R_pytorch +# +# # T = mv_inverse[0:3, -1] @ R_z +# T = mv[0:3, -1] @ R_z +# # T = T_trimesh + +R = np.identity(3) +print(f'\nR:\n{R}') + +T = np.zeros(3) +print(f'\nT:\n{T}') + +print(f'\nRotation:\n{R}') +print(f'\nTranslation matrix:\n{T}') + +rendered_np = call_renderer(mesh, R, T) +plt.imshow(rendered_np) +plt.savefig('render.png') +plt.show() + +# v = np.asarray(mesh.vertices) +# R_z = Rotation.from_euler('xyz', [0, 0, 90], degrees=True).as_matrix() +# v2 = v @ mv[0:3,0:3] @ p[0:3,0:3] @ R_z +# plt.figure(figsize=(5,5)) +# plt.axis('equal') +# plt.scatter(v2[:,0], v2[:,1], s=0.5) +# plt.savefig('adios.png') +# plt.show() + +# ''' try combine function to put mask on img ''' +# combined_img = combine(img, rendered_np) +# plt.imshow(combined_img) +# plt.show() + diff --git a/NeuralVoicePuppetry/neural-code/text_to_face_pipeline.sh b/NeuralVoicePuppetry/neural-code/text_to_face_pipeline.sh new file mode 100644 index 0000000..fa2cdf8 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/text_to_face_pipeline.sh @@ -0,0 +1,293 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +GPUID=0 + +BASE=`pwd` +echo $BASE +export GOOGLE_APPLICATION_CREDENTIALS="/home/alberto/ttsnvp-a5777a0c2445.json" + +# DATA PATHS +DATAROOT=/home/alberto/data/videosynth/ +DATASETPATH=/home/alberto/NeuralVoicePuppetry/datasets/ + +### SOURCE ### +# Italian +DATASET_SOURCE=Synthetic +VIDEOTYPE_SOURCE=Italian +NAME_SOURCE_LIST=( clara ) +LANGUAGE_SOURCE='it' +LANGUAGE_TARGET_LIST=( 'it' 'en' 'es' ) + +## English +#DATASET_SOURCE=Synthetic +#VIDEOTYPE_SOURCE=English +#NAME_SOURCE_LIST=( how_are_you ) +#LANGUAGE_SOURCE='en' +#LANGUAGE_TARGET='it' + +###################################### + +### TARGET ### +# Jennifer +DATASET_TARGET=SRF_anchor_short +VIDEOTYPE_TARGET=Halbtotale +NAME_TARGET=355_9415 + +## Vicky +#DATASET_TARGET=External +#VIDEOTYPE_TARGET=Italian +#NAME_TARGET=Vicky_EAC + +TARGET_FPS=25 + +##################### PREPROCESSING ##################### +for NAME_SOURCE in "${NAME_SOURCE_LIST[@]}" +do + OG_NAME=$NAME_SOURCE + for LANGUAGE_TARGET in "${LANGUAGE_TARGET_LIST[@]}" + do + echo 'Generating audio from source text: '$DATAROOT$DATASET_SOURCE/$VIDEOTYPE_SOURCE/$NAME_SOURCE + python text_to_speach.py \ + --dataroot $DATAROOT \ + --dataset_path $DATASETPATH \ + --dataset $DATASET_SOURCE \ + --video_type $VIDEOTYPE_SOURCE \ + --name $NAME_SOURCE \ + --language_source $LANGUAGE_SOURCE \ + --language_target $LANGUAGE_TARGET \ + + NAME_SOURCE=$OG_NAME'_'$LANGUAGE_TARGET + + echo 'Preprocessing source video: '$DATAROOT$DATASET_SOURCE/$VIDEOTYPE_SOURCE/$NAME_SOURCE + python preprocessing.py \ + --dataroot $DATAROOT \ + --dataset_path $DATASETPATH \ + --dataset $DATASET_SOURCE \ + --video_type $VIDEOTYPE_SOURCE \ + --name $NAME_SOURCE \ + --preprocess_ds \ + --target_fps $TARGET_FPS \ + + echo 'Preprocessing target video: '$DATAROOT$DATASET_TARGET/$VIDEOTYPE_TARGET/$NAME_TARGET + python preprocessing.py \ + --dataroot $DATAROOT \ + --dataset_path $DATASETPATH \ + --dataset $DATASET_TARGET \ + --video_type $VIDEOTYPE_TARGET \ + --name $NAME_TARGET \ + --preprocess_ds \ + --target_fps $TARGET_FPS \ + --skip_h5py \ + + # Twice for memory issues + python preprocessing.py \ + --dataroot $DATAROOT \ + --dataset_path $DATASETPATH \ + --dataset $DATASET_TARGET \ + --video_type $VIDEOTYPE_TARGET \ + --name $NAME_TARGET \ + --preprocess_tracking \ + --target_fps $TARGET_FPS \ + + ##################### AUDIO2EXPRESSION ##################### + echo -e '\n--------------AUDIO2EXPRESSION---------------\n' + OBJECT=ARD_ZDF + LR=0.00001 + N_ITER=150 + N_ITER_LR_DECAY=50 + RENDERER=$OBJECT + EROSION=1.0 + BATCH_SIZE=16 + MODEL=audio2ExpressionsAttentionTMP4 + RENDERER_TYPE=estimatorAttention + DATASET_MODE=multi_face_audio_eq_tmp_cached + LOSS=RMS + SEQ_LEN=8 + DATE_WITH_TIME=20191105-115332 + A2E_NAME=$MODEL-$RENDERER_TYPE-SL$SEQ_LEN-BS$BATCH_SIZE-$OBJECT-$DATASET_MODE-$LOSS-$DATE_WITH_TIME-look_ahead + EPOCH=latest + + # SOURCE + SOURCE_ACTOR_LIST=( \ + $DATASETPATH$DATASET_SOURCE/$VIDEOTYPE_SOURCE'_'$NAME_SOURCE \ + ) + + # TARGET + TARGET_ACTOR_LIST=( \ + $DATASETPATH$DATASET_TARGET/$VIDEOTYPE_TARGET'_'$NAME_TARGET \ + ) + + rm -f ./datasets/TRANSFERS/$NAME/list_transfer.txt + cd Audio2ExpressionNet/Inference/ + pwd + for TARGET_ACTOR in "${TARGET_ACTOR_LIST[@]}" + do + echo 'Training for Target: '$TARGET_ACTOR + + for SOURCE_ACTOR in "${SOURCE_ACTOR_LIST[@]}" + do + echo 'Training for Source: '$SOURCE_ACTOR + # --look_ahead + python transfer.py --look_ahead --seq_len $SEQ_LEN --source_actor $SOURCE_ACTOR --target_actor $TARGET_ACTOR --write_no_images --name $A2E_NAME --erosionFactor $EROSION --epoch $EPOCH --display_winsize 512 --rendererType $RENDERER_TYPE --lossType $LOSS --dataroot $DATASETS_DIR/$OBJECT --model $MODEL --netG unet_256 --dataset_mode $DATASET_MODE --norm instance --gpu_ids $GPUID + done + done + cd .. + cd .. + + ##################### NEURAL RENDERING ##################### + + echo -e '\n--------------Neural Rendering Training---------------\n' + ############ TRAINING ############ + DP=$DATASETPATH$DATASET_TARGET + SUB=$VIDEOTYPE_TARGET + DATASETS_DIR=$DP/$VIDEOTYPE_TARGET'_'$NAME_TARGET + OBJECT=$VIDEOTYPE_TARGET'_'$NAME_TARGET'.h5' + + #dataset mode + DATASET_MODE=custom_aligned + # neural texture, not used here + TEX_DIM=128 + TEX_FEATURES=16 + INPUT_NC=5 + NUM_THREADS=1 + # loss + LOSS=VGG + # models + TEXTUREMODEL=DynamicNeuralTextureExpression + # TEXTUREMODEL=DynamicNeuralTextureAudio + MODEL=DynamicNeuralTextures + RENDERER_TYPE=UNET_6_level + # optimizer parameters + LR=0.0001 + N_ITER=50 + N_ITER_LR_DECAY=50 + BATCH_SIZE=8 + SEQ_LEN=8 + RENDERER=$OBJECT + EROSION=0.6 + + # Model name + NAME=$MODEL-$VIDEOTYPE_TARGET'_'$NAME_TARGET + #NAME=$TEXTUREMODEL-$VIDEOTYPE_TARGET'_'$NAME_TARGET + #NAME=DynamicNeuralTextures-Halbtotale_355_9415 + + DISPLAY_NAME=$MODEL-$VIDEOTYPE_TARGET'_'$NAME_TARGET + #DISPLAY_NAME=$TEXTUREMODEL-$VIDEOTYPE_TARGET'_'$NAME_TARGET + DISPLAY_ID=0 + #NAME=DynamicNeuralTextures-UNET_6_level-DynamicNeuralTextureExpression-SL8-BS8-Halbtotale_355_9415.h5-custom_aligned-VGG-20210311-095742-look_ahead_mask_mouth + + cd NeuralRenderingNetwork + + if [ -d "checkpoints/$NAME" ]; then + echo "Model $NAME already exists." + + + else + echo "Training model $NAME..." + + CUDA_VISIBLE_DEVICES=0 python \ + train_renderer.py \ + --textureModel $TEXTUREMODEL \ + --num_threads $NUM_THREADS \ + --input_nc $INPUT_NC \ + --display_id $DISPLAY_ID \ + --look_ahead \ + --seq_len $SEQ_LEN \ + --save_latest_freq 100000 \ + --no_augmentation \ + --name $NAME \ + --erosionFactor $EROSION \ + --tex_dim $TEX_DIM \ + --tex_features $TEX_FEATURES \ + --rendererType $RENDERER_TYPE \ + --lossType $LOSS \ + --display_env $DISPLAY_NAME \ + --niter $N_ITER \ + --niter_decay $N_ITER_LR_DECAY \ + --dataroot $DATASETS_DIR/$OBJECT \ + --model $MODEL \ + --netG unet_256 \ + --lambda_L1 100 \ + --dataset_mode $DATASET_MODE \ + --no_lsgan \ + --norm instance \ + --pool_size 0 \ + --gpu_ids $GPUID \ + --lr $LR \ + --batch_size $BATCH_SIZE \ + --deca_details \ + #--continue_train \ + #--epoch_count 15 \ + + fi + + ############ INFERENCE ############ + echo -e '\n--------------Neural Rendering Inference---------------\n' + + DT=$DATASETPATH'TRANSFERS' + TRANSFER_PATH=$DT/$A2E_NAME + CHECKPOINT_DIR=$BASE/NeuralRenderingNetwork/checkpoints + + FILE_ID_SOURCE=$VIDEOTYPE_SOURCE'_'$NAME_SOURCE + FILE_ID_TARGET=$VIDEOTYPE_TARGET'_'$NAME_TARGET + + DR=$DATASETPATH$DATASET_TARGET/$FILE_ID_TARGET/ + SOURCE_DATAROOT=$DATASETPATH$DATASET_SOURCE/$FILE_ID_SOURCE/$FILE_ID_SOURCE.h5 + TARGET_DATAROOT=$DATASETPATH$DATASET_TARGET/$FILE_ID_TARGET/$FILE_ID_TARGET.h5 + EXPR_PATH=$TRANSFER_PATH/$FILE_ID_SOURCE--$FILE_ID_TARGET/expressions + + IMAGES_TARGET_DIR=$BASE/results/inference/$NAME/$FILE_ID_SOURCE"_to_"$FILE_ID_TARGET + + # Used as start and end + FRAME_ID_SOURCE=-1 + FRAME_ID_TARGET=-1 + + CUDA_VISIBLE_DEVICES=0 python \ + inference_renderer.py \ + --num_threads $NUM_THREADS \ + --input_nc $INPUT_NC \ + --look_ahead \ + --seq_len $SEQ_LEN \ + --no_augmentation \ + --name $NAME \ + --checkpoints_dir $CHECKPOINT_DIR \ + --erosionFactor $EROSION \ + --tex_dim $TEX_DIM \ + --tex_features $TEX_FEATURES \ + --rendererType $RENDERER_TYPE \ + --lossType $LOSS \ + --model $MODEL \ + --netG unet_256 \ + --dataset_mode $DATASET_MODE \ + --norm instance \ + --gpu_ids $GPUID \ + --batch_size $BATCH_SIZE \ + --epoch latest \ + --textureModel $TEXTUREMODEL \ + --dataroot $DR \ + --target_dataroot $TARGET_DATAROOT \ + --source_dataroot $SOURCE_DATAROOT \ + --expr_path $EXPR_PATH \ + --images_target_dir $IMAGES_TARGET_DIR \ + --frame_id_source $FRAME_ID_SOURCE \ + --frame_id_target $FRAME_ID_TARGET \ + --deca_details \ + + cd .. + + ##################### POSTPROCESSING ##################### + echo -e '\n--------------Postprocessing---------------\n' + python postprocessing.py \ + --file_id_source $FILE_ID_SOURCE \ + --file_id_target $FILE_ID_TARGET \ + --model_name $NAME \ + --frames_path $DATAROOT$DATASET_TARGET/$VIDEOTYPE_TARGET/$NAME_TARGET \ + --audio_fname $DATAROOT$DATASET_SOURCE/$VIDEOTYPE_SOURCE/$NAME_SOURCE'.wav' \ + --dataset_target $DATASETPATH$DATASET_TARGET \ + --target_fps $TARGET_FPS + + done +done + + diff --git a/NeuralVoicePuppetry/neural-code/text_to_speach.py b/NeuralVoicePuppetry/neural-code/text_to_speach.py new file mode 100644 index 0000000..5cfc0c2 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/text_to_speach.py @@ -0,0 +1,124 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +import json +import glob +from base_options import Text2SpeachOptions +from googletrans import Translator +from google.cloud import texttospeech + + +def find_file_ext(data_path, dataset, video_type, name): + tmp = os.path.join(data_path, dataset, video_type, name + '.txt') + + if os.path.isfile(tmp): + return tmp + + else: + print('No text file!') + exit() + +def translate(source_path, target_path, language_source, language_target): + f = open(source_path, 'r') + source_txt = f.read() + f.close() + + if not(language_source==language_target): + translator = Translator() + print(f'Source text in {language_source}:\n{source_txt}') + translation = translator.translate(source_txt,src=language_source, dest=language_target).text + print(f'Translated text in {language_target}:\n{translation}') + + else: + translation = source_txt + + f = open(target_path, 'w') + f.write(translation) + f.close() + + return + +def list_voices(language_code=None): + client = texttospeech.TextToSpeechClient() + response = client.list_voices(language_code=language_code) + voices = sorted(response.voices, key=lambda voice: voice.name) + + print(f" Voices: {len(voices)} ".center(60, "-")) + for i, voice in enumerate(voices): + languages = ", ".join(voice.language_codes) + name = voice.name + gender = texttospeech.SsmlVoiceGender(voice.ssml_gender).name + rate = voice.natural_sample_rate_hertz + print(f"{i} | {languages:<8} | {name:<24} | {gender:<8} | {rate:,} Hz") + + return voices + +def text_to_wav(voice_name, text, target_audio_path): + language_code = "-".join(voice_name.split("-")[:2]) + text_input = texttospeech.SynthesisInput(text=text) + voice_params = texttospeech.VoiceSelectionParams( + language_code=language_code, name=voice_name + ) + audio_config = texttospeech.AudioConfig( + audio_encoding=texttospeech.AudioEncoding.LINEAR16 + ) + + client = texttospeech.TextToSpeechClient() + response = client.synthesize_speech( + input=text_input, voice=voice_params, audio_config=audio_config + ) + + filename = f"{target_audio_path}" + with open(filename, "wb") as out: + out.write(response.audio_content) + print(f'Audio content written to "{filename}"') + +# def text2speach(text_path, language): +# f = open(text_path, 'r') +# mytext = f.read() +# f.close() +# +# myobj = gTTS(text=mytext, lang=language, slow=False) +# target_audio_path = text_path[:-4]+'.wav' +# myobj.save(target_audio_path) +# +# return + +def text2speach(text_path, language): + f = open(text_path, 'r') + mytext = f.read() + f.close() + + voices = list_voices(language) + idx = input('Pick voice index: ') + voice = voices[int(idx)] + + target_audio_path = text_path[:-4]+'.wav' + text_to_wav(voice.name, mytext, target_audio_path) + + return + +if __name__ == '__main__': + + opt = Text2SpeachOptions().parse() + + data_path = opt.dataroot + dataset_path = opt.dataset_path + dataset = opt.dataset + video_type = opt.video_type + name = opt.name + language_source = opt.language_source + language_target = opt.language_target + + source_path = find_file_ext(data_path, dataset, video_type, name) + target_path = source_path[:-4]+'_'+language_target+'.txt' + + # Translate input txt to target language + translate(source_path, target_path, language_source, language_target) + + # Text to speech + text2speach(target_path, language_target) + + + diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/Doc/images/DECA_evaluation_github.png b/NeuralVoicePuppetry/neural-code/third/DECA/Doc/images/DECA_evaluation_github.png new file mode 100644 index 0000000..a690d88 Binary files /dev/null and b/NeuralVoicePuppetry/neural-code/third/DECA/Doc/images/DECA_evaluation_github.png differ diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/Doc/images/DECA_performance.png b/NeuralVoicePuppetry/neural-code/third/DECA/Doc/images/DECA_performance.png new file mode 100755 index 0000000..1d483c6 Binary files /dev/null and b/NeuralVoicePuppetry/neural-code/third/DECA/Doc/images/DECA_performance.png differ diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/Doc/images/soubhik.gif b/NeuralVoicePuppetry/neural-code/third/DECA/Doc/images/soubhik.gif new file mode 100644 index 0000000..9cee13f Binary files /dev/null and b/NeuralVoicePuppetry/neural-code/third/DECA/Doc/images/soubhik.gif differ diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/README.md b/NeuralVoicePuppetry/neural-code/third/DECA/README.md new file mode 100755 index 0000000..359dce6 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/README.md @@ -0,0 +1,125 @@ +# DECA: Detailed Expression Capture and Animation + +

+ +

+

input image, aligned reconstruction, animation with various poses & expressions

+ +This is the official Pytorch implementation of DECA. + +DECA reconstructs a 3D head model with detailed facial geometry from a single input image. The resulting 3D head model can be easily animated. Please refer to the [arXiv paper](https://arxiv.org/abs/2012.04012) for more details. + +The main features: + +* **Reconstruction:** produces head pose, shape, detailed face geometry, and lighting information from a single image. +* **Animation:** animate the face with realistic wrinkle deformations. +* **Robustness:** tested on facial images in unconstrained conditions. Our method is robust to various poses, illuminations and occlusions. +* **Accurate:** state-of-the-art 3D face shape reconstruction on the [NoW Challenge](https://ringnet.is.tue.mpg.de/challenge) benchmark dataset. + +## Getting Started +Clone the repo: + ```bash + git clone https://github.com/YadiraF/DECA + cd DECA + ``` + +### Requirements +* Python 3.7 (numpy, skimage, scipy, opencv) +* PyTorch >= 1.6 (pytorch3d) +* face-alignment (Optional for detecting face) + You can run + ```bash + pip install -r requirements.txt + ``` + Or use virtual environment by runing + ```bash + bash install_pip.sh + ``` + Then follow the instruction to install [pytorch3d](https://github.com/facebookresearch/pytorch3d/blob/master/INSTALL.md). + +### Usage +1. Prepare data + a. download [FLAME model](https://flame.is.tue.mpg.de/downloads), choose **FLAME 2020** and unzip it, copy 'generic_model.pkl' into ./data + b. download [DECA trained model](https://drive.google.com/file/d/1rp8kdyLPvErw2dTmqtjISRVvQLj6Yzje/view?usp=sharing), and put it in ./data (**no unzip required**) + c. (Optional) follow the instructions for the [Albedo model](https://github.com/TimoBolkart/BFM_to_FLAME) to get 'FLAME_albedo_from_BFM.npz', put it into ./data + +2. Run demos + a. **reconstruction** + ```bash + python demos/demo_reconstruct.py -i TestSamples/examples --saveDepth True --saveObj True + ``` + to visualize the predicted 2D landmanks, 3D landmarks (red means non-visible points), coarse geometry, detailed geometry, and depth. +

+ +

+

+ +

+ You can also generate an obj file (which can be opened with Meshlab) that includes extracted texture from the input image. + + Please run `python demos/demo_reconstruct.py --help` for more details. + + b. **expression transfer** + ```bash + python demos/demo_transfer.py + ``` + Given an image, you can reconstruct its 3D face, then animate it by tranfering expressions from other images. + Using Meshlab to open the detailed mesh obj file, you can see something like that: +

+ +

+ (Thank Soubhik for allowing me to use his face ^_^) + + Note that, you need to set '--useTex True' to get full texture. + + c. for the [teaser gif](https://github.com/YadiraF/DECA/results/teaser.gif) (**reposing** and **animation**) + ```bash + python demos/demo_teaser.py + ``` + + More demos and training code coming soon. + +## Evaluation +DECA (ours) achieves 9% lower mean shape reconstruction error on the [NoW Challenge](https://ringnet.is.tue.mpg.de/challenge) dataset compared to the previous state-of-the-art method. +The left figure compares the cumulative error of our approach and other recent methods (RingNet and Deng et al. have nearly identitical performance, so their curves overlap each other). Here we use point-to-surface distance as the error metric, following the NoW Challenge. +

+ +

+ +For more details of the evaluation, please check our [arXiv paper](https://arxiv.org/abs/2012.04012). + +## Citation +If you find our work useful to your research, please consider citing: +``` +@inproceedings{deca2020, + title={Learning an Animatable Detailed {3D} Face Model from In-The-Wild Images}, + author={Feng, Yao and Feng, Haiwen and Black, Michael J. and Bolkart, Timo}, + booktitle = {arxiv}, + month = {Dec}, + year = {2020} +} +``` + + + +## License +This code and model are available for non-commercial scientific research purposes as defined in the [LICENSE](https://github.com/YadiraF/DECA/blob/master/LICENSE) file. +By downloading and using the code and model you agree to the terms in the [LICENSE](https://github.com/YadiraF/DECA/blob/master/LICENSE). + +## Acknowledgements +For functions or scripts that are based on external sources, we acknowledge the origin individually in each file. +Here are some great resources we benefit: +- [FLAME_PyTorch](https://github.com/soubhiksanyal/FLAME_PyTorch) and [TF_FLAME](https://github.com/TimoBolkart/TF_FLAME) for the FLAME model +- [Pytorch3D](https://pytorch3d.org/), [neural_renderer](https://github.com/daniilidis-group/neural_renderer), [SoftRas](https://github.com/ShichenLiu/SoftRas) for rendering +- [kornia](https://github.com/kornia/kornia) for image/rotation processing +- [face-alignment](https://github.com/1adrianb/face-alignment) for cropping + +We would also like to thank other recent public 3D face reconstruction works that allow us to easily perform quantitative and qualitative comparisons :) +[RingNet](https://github.com/soubhiksanyal/RingNet), +[Deep3DFaceReconstruction](https://github.com/microsoft/Deep3DFaceReconstruction/blob/master/renderer/rasterize_triangles.py), +[Nonlinear_Face_3DMM](https://github.com/tranluan/Nonlinear_Face_3DMM), +[3DDFA-v2](https://github.com/cleardusk/3DDFA_V2), +[extreme_3d_faces](https://github.com/anhttran/extreme_3d_faces), +[facescape](https://github.com/zhuhao-nju/facescape) + diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/__init__.py b/NeuralVoicePuppetry/neural-code/third/DECA/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/deca_model.tar b/NeuralVoicePuppetry/neural-code/third/DECA/data/deca_model.tar new file mode 100644 index 0000000..c04160f --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/data/deca_model.tar @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e714ed293054cba5eea9c96bd3b6b57880074cd84b3fd00d606cbaf0bee7c5c2 +size 434142943 diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/fixed_displacement_256.npy b/NeuralVoicePuppetry/neural-code/third/DECA/data/fixed_displacement_256.npy new file mode 100755 index 0000000..a13befd --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/data/fixed_displacement_256.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:296d01113d67bdaace6f6fe741f7d855e58dc0707f0bb113758520ffa5d8cb93 +size 524416 diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/generic_model.pkl b/NeuralVoicePuppetry/neural-code/third/DECA/data/generic_model.pkl new file mode 100644 index 0000000..a1a71b0 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/data/generic_model.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:efcd14cc4a69f3a3d9af8ded80146b5b6b50df3bd74cf69108213b144eba725b +size 53023716 diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/head_template.obj b/NeuralVoicePuppetry/neural-code/third/DECA/data/head_template.obj new file mode 100755 index 0000000..49c83e8 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/data/head_template.obj @@ -0,0 +1,20122 @@ +# Blender v2.80 (sub 75) OBJ File: 'flame_new_uv_map.blend' +# www.blender.org +mtllib template.mtl +v 0.061998 1.493503 -0.027215 +v 0.066005 1.492495 -0.026312 +v 0.066500 1.493900 -0.026200 +v 0.062307 1.494799 -0.027008 +v 0.075800 1.494613 -0.035711 +v 0.075177 1.497973 -0.032283 +v 0.074438 1.500713 -0.032211 +v 0.074800 1.500600 -0.034800 +v 0.028307 1.453590 -0.092080 +v 0.013146 1.453750 -0.096146 +v 0.013086 1.443515 -0.093653 +v 0.026940 1.441069 -0.088483 +v 0.044034 1.418873 -0.070397 +v 0.039796 1.426214 -0.075181 +v 0.036540 1.415001 -0.078651 +v 0.044437 1.411710 -0.070237 +v 0.039024 1.550958 0.053657 +v 0.046783 1.549456 0.048912 +v 0.044954 1.543563 0.051419 +v -0.061998 1.493503 -0.027215 +v -0.066005 1.492495 -0.026312 +v -0.066500 1.493900 -0.026200 +v -0.062307 1.494799 -0.027008 +v -0.075800 1.494613 -0.035711 +v -0.075177 1.497973 -0.032283 +v -0.074438 1.500713 -0.032211 +v -0.074800 1.500600 -0.034800 +v 0.037682 1.544673 0.054847 +v 0.092919 1.516004 -0.048998 +v 0.093182 1.513297 -0.049973 +v 0.093400 1.511900 -0.048200 +v 0.093071 1.514299 -0.047313 +v 0.089794 1.497809 -0.046670 +v 0.089310 1.505389 -0.048436 +v 0.090352 1.508011 -0.049430 +v 0.090705 1.505796 -0.048534 +v 0.090287 1.486997 -0.043418 +v 0.089308 1.487702 -0.042889 +v 0.090700 1.492300 -0.044900 +v 0.091687 1.491903 -0.045704 +v 0.075298 1.468043 -0.021340 +v 0.078140 1.470212 -0.022933 +v 0.078493 1.467790 -0.024400 +v 0.073913 1.466336 -0.023090 +v 0.070914 1.467491 -0.024294 +v 0.075432 1.466828 -0.028178 +v 0.075074 1.469686 -0.029129 +v 0.071489 1.469689 -0.026106 +v 0.086489 1.526078 -0.035111 +v 0.086600 1.523100 -0.034700 +v 0.084675 1.522306 -0.032321 +v 0.084516 1.525303 -0.032395 +v 0.076847 1.490511 -0.045292 +v 0.075461 1.484031 -0.043370 +v 0.081300 1.489074 -0.045823 +v 0.081298 1.491995 -0.046116 +v 0.085506 1.516107 -0.041696 +v 0.086614 1.516897 -0.045188 +v 0.087200 1.514700 -0.046300 +v 0.086210 1.513201 -0.043498 +v 0.089406 1.517594 -0.050088 +v 0.089835 1.514599 -0.051186 +v 0.073155 1.476985 -0.019715 +v 0.071768 1.480608 -0.019054 +v 0.072296 1.482197 -0.020109 +v 0.073056 1.481128 -0.021346 +v 0.073953 1.494303 -0.032675 +v 0.075096 1.498497 -0.031088 +v 0.086495 1.476758 -0.038243 +v 0.086462 1.478693 -0.037171 +v 0.088511 1.482595 -0.040600 +v 0.088684 1.481204 -0.041805 +v 0.083800 1.481300 -0.034800 +v 0.086100 1.483800 -0.038300 +v 0.087015 1.484403 -0.040188 +v 0.085989 1.482300 -0.038105 +v 0.088031 1.519317 -0.041792 +v 0.085861 1.521559 -0.037113 +v 0.086699 1.521126 -0.037101 +v 0.088900 1.518700 -0.041800 +v 0.082000 1.504600 -0.045500 +v 0.080323 1.509694 -0.043474 +v 0.079604 1.508792 -0.043485 +v 0.081297 1.504301 -0.045314 +v 0.089100 1.515800 -0.048100 +v 0.089639 1.513104 -0.049003 +v 0.087992 1.512000 -0.047407 +v 0.075500 1.513617 -0.029983 +v 0.076118 1.514593 -0.031298 +v 0.076609 1.513005 -0.031696 +v 0.076371 1.511694 -0.030520 +v 0.078164 1.466576 -0.027814 +v 0.073096 1.475727 -0.036776 +v 0.074801 1.476562 -0.038030 +v 0.077596 1.480667 -0.040850 +v 0.076407 1.481515 -0.041493 +v 0.070618 1.469515 -0.019925 +v 0.073800 1.469900 -0.019600 +v 0.074701 1.468685 -0.020488 +v 0.071600 1.467919 -0.021209 +v 0.078425 1.518098 -0.031667 +v 0.080957 1.517503 -0.033837 +v 0.079092 1.516506 -0.033011 +v 0.077368 1.516811 -0.031317 +v 0.088500 1.503500 -0.046500 +v 0.087000 1.510300 -0.044900 +v 0.089148 1.494204 -0.040272 +v 0.089000 1.494300 -0.043500 +v 0.088538 1.490188 -0.041897 +v -0.028307 1.453590 -0.092080 +v -0.013146 1.453750 -0.096146 +v -0.013086 1.443515 -0.093653 +v -0.026940 1.441069 -0.088483 +v 0.088564 1.490214 -0.038795 +v 0.088334 1.502483 -0.040508 +v 0.088447 1.502416 -0.044192 +v 0.090983 1.511396 -0.046991 +v 0.091428 1.505198 -0.047205 +v 0.090384 1.512000 -0.047798 +v 0.077544 1.512600 -0.041093 +v 0.077400 1.515300 -0.044900 +v 0.077651 1.508587 -0.046651 +v 0.078383 1.510012 -0.042574 +v 0.072487 1.486189 -0.020399 +v 0.073652 1.488310 -0.021518 +v 0.072785 1.486082 -0.022770 +v 0.072332 1.483812 -0.022359 +v 0.078096 1.478882 -0.039421 +v 0.075400 1.475700 -0.037100 +v 0.075785 1.475417 -0.036252 +v 0.078507 1.477345 -0.037814 +v 0.083100 1.518700 -0.036200 +v 0.083855 1.513511 -0.036581 +v 0.082100 1.513505 -0.035117 +v 0.079400 1.482900 -0.027100 +v 0.080206 1.484210 -0.028695 +v 0.081500 1.481100 -0.030500 +v 0.080687 1.479097 -0.029207 +v 0.082571 1.470333 -0.032585 +v 0.080240 1.469896 -0.033585 +v 0.077900 1.467889 -0.030714 +v 0.080399 1.467916 -0.030093 +v 0.072675 1.482711 -0.021801 +v 0.072327 1.483599 -0.020303 +v 0.083926 1.505195 -0.047073 +v 0.082566 1.511308 -0.044839 +v 0.086087 1.511596 -0.040800 +v 0.086768 1.509089 -0.042098 +v 0.074730 1.498708 -0.025229 +v 0.072484 1.499298 -0.026263 +v 0.072424 1.498498 -0.026271 +v 0.075274 1.497098 -0.025153 +v 0.074986 1.498595 -0.024097 +v 0.075514 1.497004 -0.023998 +v 0.075085 1.496797 -0.022906 +v 0.074710 1.497703 -0.022785 +v 0.071792 1.499615 -0.026608 +v 0.072000 1.499300 -0.026400 +v 0.072388 1.499850 -0.026639 +v 0.077294 1.502724 -0.037763 +v 0.078993 1.494880 -0.039129 +v 0.077413 1.494601 -0.037981 +v 0.075700 1.501500 -0.036400 +v 0.089216 1.495402 -0.045388 +v 0.080994 1.521612 -0.028301 +v 0.079582 1.523421 -0.028204 +v 0.082400 1.526875 -0.032516 +v 0.074387 1.485551 -0.030792 +v 0.077111 1.486378 -0.028293 +v 0.075401 1.486174 -0.026902 +v 0.073776 1.485212 -0.029312 +v 0.072203 1.490294 -0.025077 +v 0.070416 1.486997 -0.024430 +v 0.074220 1.488702 -0.023891 +v 0.074537 1.504508 -0.024103 +v 0.073632 1.500887 -0.026200 +v 0.074301 1.499691 -0.025068 +v 0.074473 1.500004 -0.024007 +v 0.090300 1.486200 -0.044700 +v 0.091516 1.491394 -0.047212 +v 0.089989 1.491103 -0.048594 +v 0.088900 1.485600 -0.046100 +v 0.071174 1.473291 -0.018315 +v 0.067815 1.470535 -0.021913 +v 0.069856 1.476490 -0.018061 +v 0.091000 1.522800 -0.044100 +v 0.087700 1.524800 -0.044500 +v 0.088398 1.522717 -0.046819 +v 0.091894 1.520791 -0.046191 +v 0.072913 1.506097 -0.031592 +v 0.073018 1.504001 -0.031087 +v 0.072919 1.505397 -0.029993 +v 0.072693 1.507397 -0.030290 +v 0.053586 1.467412 -0.075433 +v 0.062458 1.468076 -0.057171 +v 0.066552 1.481198 -0.065315 +v 0.058792 1.480269 -0.080652 +v 0.083582 1.473183 -0.031719 +v 0.082493 1.471092 -0.029401 +v 0.075389 1.512005 -0.023502 +v 0.074722 1.513599 -0.022600 +v 0.075513 1.518799 -0.024222 +v 0.076219 1.515923 -0.024158 +v 0.090682 1.506499 -0.052109 +v 0.093415 1.505906 -0.050612 +v 0.077712 1.518490 -0.030205 +v 0.081200 1.521900 -0.033400 +v 0.079308 1.519492 -0.032198 +v 0.077480 1.518012 -0.030407 +v 0.077190 1.509801 -0.027990 +v 0.077694 1.511105 -0.027778 +v 0.076001 1.512096 -0.027915 +v 0.075200 1.510700 -0.027900 +v 0.074370 1.517897 -0.021713 +v 0.075297 1.522474 -0.025117 +v 0.071415 1.475105 -0.036388 +v 0.077115 1.511100 -0.025590 +v 0.077700 1.513600 -0.025600 +v 0.078380 1.512711 -0.027699 +v -0.044034 1.418873 -0.070397 +v -0.039796 1.426214 -0.075181 +v -0.036540 1.415001 -0.078651 +v -0.044437 1.411710 -0.070237 +v 0.070513 1.481494 -0.017518 +v 0.071608 1.483198 -0.019075 +v 0.076812 1.517093 -0.030192 +v 0.076143 1.515882 -0.030085 +v 0.074901 1.512051 -0.029070 +v 0.075697 1.510116 -0.030317 +v 0.083394 1.503779 -0.036850 +v 0.085900 1.495120 -0.037669 +v 0.070665 1.482116 -0.051701 +v 0.066781 1.468771 -0.037011 +v 0.074900 1.512500 -0.028100 +v 0.074900 1.512900 -0.028300 +v 0.074300 1.512000 -0.028500 +v 0.074200 1.511400 -0.028200 +v 0.081180 1.469406 -0.027314 +v 0.081392 1.510102 -0.034728 +v 0.078509 1.508234 -0.032619 +v 0.078690 1.510401 -0.032211 +v 0.091593 1.496901 -0.045991 +v 0.092481 1.504997 -0.047033 +v 0.093707 1.505301 -0.048582 +v 0.092700 1.496700 -0.047300 +v 0.069138 1.468847 -0.022896 +v 0.073401 1.503294 -0.031788 +v 0.074390 1.501599 -0.031214 +v 0.073320 1.508785 -0.030771 +v 0.073362 1.510351 -0.029068 +v 0.071539 1.486197 -0.018780 +v 0.087705 1.519915 -0.045409 +v 0.088484 1.518000 -0.046810 +v 0.086125 1.518595 -0.043989 +v 0.085717 1.519801 -0.042398 +v 0.084872 1.521507 -0.039310 +v 0.084900 1.522959 -0.038902 +v 0.086800 1.521600 -0.043400 +v 0.076910 1.509099 -0.031384 +v 0.077488 1.510807 -0.031221 +v 0.078757 1.481692 -0.026054 +v 0.079651 1.475700 -0.026960 +v 0.087515 1.486694 -0.039481 +v 0.087178 1.487520 -0.036411 +v 0.087200 1.496700 -0.049600 +v 0.086626 1.505995 -0.050271 +v 0.085270 1.505500 -0.048424 +v 0.086200 1.496700 -0.048500 +v 0.080890 1.497910 -0.046193 +v 0.077697 1.498497 -0.046178 +v 0.074100 1.498700 -0.022500 +v 0.073200 1.502400 -0.020900 +v 0.073000 1.508100 -0.030600 +v 0.073112 1.509696 -0.028998 +v 0.082101 1.481590 -0.041807 +v 0.077895 1.474733 -0.034283 +v 0.082003 1.479694 -0.039813 +v 0.073816 1.473945 -0.032524 +v 0.073304 1.473187 -0.030203 +v 0.075893 1.473216 -0.031599 +v 0.084062 1.483308 -0.033637 +v 0.085978 1.484417 -0.036390 +v 0.071500 1.499400 -0.027100 +v 0.070090 1.497912 -0.028215 +v 0.070510 1.499177 -0.026601 +v 0.086793 1.502906 -0.038129 +v 0.088239 1.494694 -0.038349 +v 0.077317 1.513394 -0.028185 +v 0.076101 1.503386 -0.027988 +v 0.076739 1.506787 -0.026397 +v 0.080088 1.516707 -0.028296 +v 0.078608 1.515193 -0.028702 +v 0.078171 1.514217 -0.028369 +v 0.079119 1.514587 -0.027733 +v 0.064970 1.489977 -0.026859 +v 0.068207 1.488286 -0.025432 +v 0.069600 1.491400 -0.025400 +v 0.083936 1.522604 -0.037179 +v 0.084376 1.520602 -0.038312 +v 0.083800 1.512300 -0.046400 +v 0.077005 1.514888 -0.032296 +v 0.077607 1.513397 -0.033092 +v 0.077512 1.523697 -0.039600 +v 0.077134 1.522214 -0.036306 +v 0.077024 1.522596 -0.035096 +v 0.077000 1.526200 -0.035400 +v 0.072194 1.470894 -0.027111 +v 0.075000 1.471200 -0.029600 +v 0.074913 1.472222 -0.030080 +v 0.072794 1.472090 -0.028308 +v 0.074614 1.485867 -0.026314 +v 0.073392 1.484991 -0.028510 +v 0.088313 1.487893 -0.042607 +v 0.087791 1.487097 -0.041406 +v 0.088800 1.491100 -0.043700 +v 0.089407 1.492300 -0.044808 +v 0.074023 1.506963 -0.033952 +v 0.073613 1.505802 -0.033402 +v 0.078264 1.516972 -0.037457 +v 0.080300 1.519500 -0.038900 +v 0.078936 1.522279 -0.035468 +v 0.077375 1.520039 -0.035263 +v 0.085184 1.513304 -0.048435 +v 0.077900 1.512000 -0.032500 +v 0.078690 1.512194 -0.033514 +v 0.077290 1.512393 -0.032109 +v 0.074693 1.509798 -0.027983 +v 0.076690 1.508602 -0.028290 +v 0.079600 1.517115 -0.029581 +v 0.083299 1.520672 -0.032822 +v 0.082605 1.520723 -0.032873 +v 0.079196 1.517891 -0.029805 +v 0.075486 1.514507 -0.028998 +v 0.014577 1.465024 -0.102244 +v 0.016400 1.474400 -0.109200 +v -0.039024 1.550958 0.053657 +v -0.046783 1.549456 0.048912 +v -0.044954 1.543563 0.051419 +v -0.037682 1.544673 0.054847 +v 0.077508 1.523187 -0.028108 +v 0.078384 1.524716 -0.032003 +v 0.079493 1.525831 -0.032370 +v 0.078124 1.524043 -0.029015 +v 0.085510 1.518598 -0.041793 +v 0.089900 1.514600 -0.047000 +v 0.075224 1.500049 -0.029836 +v 0.072310 1.497196 -0.029602 +v 0.062100 1.496100 -0.028400 +v 0.062400 1.497300 -0.028400 +v 0.062996 1.496896 -0.029588 +v 0.062700 1.495600 -0.029800 +v 0.065798 1.490998 -0.030077 +v 0.068687 1.489987 -0.028824 +v 0.067795 1.488059 -0.027571 +v 0.065087 1.488901 -0.029508 +v 0.061700 1.493000 -0.028800 +v 0.061900 1.494600 -0.028500 +v 0.062600 1.493904 -0.030024 +v 0.062510 1.492089 -0.030514 +v 0.083251 1.522548 -0.035046 +v 0.085047 1.522467 -0.036875 +v 0.091507 1.513492 -0.045983 +v 0.092000 1.511200 -0.046800 +v 0.088778 1.518688 -0.044296 +v 0.089400 1.516800 -0.045700 +v 0.073810 1.490899 -0.021494 +v 0.074915 1.491096 -0.023006 +v 0.074587 1.489510 -0.022989 +v 0.092426 1.516497 -0.046084 +v 0.092491 1.518401 -0.047705 +v 0.074023 1.473610 -0.019287 +v 0.072900 1.472100 -0.018700 +v 0.040539 1.402694 -0.008784 +v 0.048074 1.407914 -0.020517 +v 0.047013 1.398104 -0.021095 +v 0.039392 1.393104 -0.010006 +v 0.079593 1.519097 -0.027011 +v 0.078310 1.521306 -0.026782 +v 0.061832 1.491418 -0.027890 +v 0.061767 1.490564 -0.029280 +v 0.064532 1.489040 -0.028225 +v 0.066803 1.495395 -0.026325 +v 0.062692 1.496102 -0.026989 +v 0.066004 1.492689 -0.029996 +v 0.066600 1.497700 -0.029000 +v 0.066899 1.498785 -0.027594 +v 0.069870 1.484581 -0.025568 +v 0.071628 1.484108 -0.027495 +v 0.073099 1.484030 -0.024480 +v 0.062493 1.490133 -0.030449 +v 0.073605 1.507995 -0.032192 +v 0.069294 1.493493 -0.029380 +v 0.072510 1.491006 -0.030991 +v 0.072108 1.488909 -0.030290 +v 0.069088 1.491598 -0.029288 +v 0.082186 1.480509 -0.032108 +v 0.082500 1.482389 -0.031806 +v 0.075207 1.486410 -0.032571 +v 0.078600 1.486500 -0.030726 +v 0.074610 1.503003 -0.030429 +v 0.075710 1.506076 -0.029052 +v 0.072018 1.499535 -0.027245 +v 0.069600 1.495600 -0.029000 +v 0.072429 1.493400 -0.030779 +v 0.066996 1.496709 -0.026563 +v 0.070701 1.496697 -0.025915 +v 0.070496 1.497807 -0.026089 +v 0.067002 1.497703 -0.026813 +v 0.080210 1.513496 -0.034376 +v 0.077161 1.519075 -0.025362 +v 0.078600 1.516400 -0.026100 +v 0.078956 1.526993 -0.051414 +v 0.077945 1.518913 -0.056083 +v 0.077665 1.520494 -0.043294 +v 0.066104 1.494500 -0.029818 +v 0.063008 1.498217 -0.028195 +v 0.063488 1.497694 -0.027293 +v 0.087700 1.483800 -0.040300 +v 0.073996 1.483585 -0.024303 +v 0.073597 1.507916 -0.028454 +v 0.087091 1.491399 -0.048005 +v 0.086200 1.486200 -0.045800 +v 0.073000 1.509200 -0.028900 +v 0.073613 1.510299 -0.028418 +v 0.072679 1.484436 -0.028099 +v 0.074011 1.484984 -0.025493 +v 0.073800 1.510700 -0.028300 +v 0.074759 1.490612 -0.024153 +v 0.085521 1.513187 -0.039739 +v 0.078886 1.472181 -0.034028 +v 0.076900 1.470700 -0.031400 +v 0.063107 1.497087 -0.027034 +v 0.067048 1.488263 -0.026903 +v 0.066401 1.496204 -0.029511 +v 0.073083 1.494401 -0.025488 +v 0.072718 1.492295 -0.025142 +v 0.075021 1.492793 -0.024235 +v 0.080194 1.487032 -0.032232 +v 0.076200 1.488129 -0.034638 +v 0.077499 1.489024 -0.036690 +v 0.082102 1.487860 -0.033716 +v 0.084805 1.481507 -0.043287 +v 0.083016 1.477319 -0.039971 +v 0.085043 1.475936 -0.039699 +v 0.087297 1.480394 -0.043209 +v 0.079918 1.541308 -0.059991 +v 0.078910 1.532799 -0.044899 +v 0.080121 1.548101 -0.049100 +v 0.083314 1.521389 -0.044086 +v 0.083806 1.518997 -0.045690 +v 0.088991 1.520200 -0.048605 +v 0.076340 1.525030 -0.030066 +v 0.076931 1.522731 -0.033547 +v 0.077407 1.523933 -0.031651 +v 0.090305 1.496596 -0.050608 +v 0.074900 1.508300 -0.023500 +v 0.076811 1.508900 -0.025803 +v 0.081302 1.482541 -0.042839 +v 0.082741 1.485784 -0.031869 +v 0.085088 1.486284 -0.033841 +v 0.088312 1.522894 -0.037393 +v 0.085094 1.521385 -0.034799 +v 0.008841 1.394244 0.015789 +v 0.008800 1.386900 0.011600 +v 0.008589 1.498484 0.073786 +v 0.010030 1.498324 0.072305 +v 0.008757 1.500242 0.071290 +v 0.008117 1.500249 0.072013 +v 0.008946 1.489300 0.072430 +v 0.008334 1.490922 0.071314 +v 0.009315 1.490643 0.070347 +v 0.010188 1.489265 0.071512 +v 0.005681 1.499910 0.071178 +v 0.004213 1.497290 0.073791 +v 0.005785 1.498187 0.075196 +v 0.006796 1.500441 0.071916 +v 0.088400 1.525800 -0.038200 +v 0.012342 1.488981 0.072095 +v 0.011326 1.490885 0.070919 +v 0.012050 1.491341 0.072767 +v 0.013716 1.489773 0.073790 +v 0.004958 1.489999 0.080211 +v 0.006080 1.492103 0.080389 +v 0.004980 1.491307 0.078099 +v 0.004935 1.489324 0.077714 +v 0.007888 1.492561 0.079779 +v 0.009808 1.492021 0.081514 +v 0.011583 1.491985 0.079889 +v 0.009400 1.492428 0.078300 +v 0.004716 1.493621 0.073204 +v 0.006063 1.491670 0.074584 +v 0.003964 1.493789 0.076796 +v 0.012266 1.493714 0.070902 +v 0.010928 1.492691 0.069478 +v 0.009809 1.496498 0.068938 +v 0.011062 1.497282 0.069812 +v 0.045276 1.418919 -0.006497 +v 0.051316 1.421097 -0.020493 +v 0.034601 1.509504 0.051197 +v 0.033968 1.506854 0.052483 +v 0.030002 1.508004 0.053112 +v 0.030801 1.510322 0.051979 +v 0.075600 1.495000 -0.023600 +v 0.074983 1.495799 -0.022522 +v 0.084383 1.516205 -0.047117 +v 0.087769 1.520281 -0.042209 +v 0.082911 1.523192 -0.041995 +v 0.081484 1.525319 -0.037312 +v 0.080076 1.523708 -0.036314 +v 0.081315 1.521297 -0.040291 +v 0.090800 1.520100 -0.042800 +v 0.075382 1.482188 -0.024203 +v 0.084040 1.509298 -0.036335 +v 0.080484 1.525822 -0.034804 +v 0.083919 1.527606 -0.035488 +v 0.090591 1.497096 -0.045921 +v 0.091688 1.518505 -0.044615 +v 0.089812 1.517295 -0.043486 +v 0.081321 1.518998 -0.029088 +v 0.078895 1.491603 -0.038661 +v 0.085402 1.491588 -0.036554 +v 0.084007 1.489086 -0.035109 +v -0.092919 1.516004 -0.048998 +v -0.093182 1.513297 -0.049973 +v -0.093400 1.511900 -0.048200 +v -0.093071 1.514299 -0.047313 +v -0.089794 1.497809 -0.046670 +v -0.089310 1.505389 -0.048436 +v -0.090352 1.508011 -0.049430 +v -0.090705 1.505796 -0.048534 +v 0.078399 1.489817 -0.037774 +v 0.073444 1.504694 -0.032820 +v 0.071111 1.486897 -0.016909 +v 0.081578 1.475997 -0.030012 +v -0.090287 1.486997 -0.043418 +v -0.089308 1.487702 -0.042889 +v -0.090700 1.492300 -0.044900 +v -0.091687 1.491903 -0.045704 +v 0.082714 1.477004 -0.032189 +v 0.031157 1.465431 -0.096619 +v 0.034883 1.475714 -0.102887 +v 0.092400 1.496600 -0.049200 +v 0.083202 1.522130 -0.035985 +v 0.077509 1.481801 -0.025290 +v 0.077217 1.476111 -0.024483 +v 0.075527 1.476613 -0.022791 +v 0.078213 1.512608 -0.033384 +v 0.080792 1.483473 -0.043245 +v 0.081006 1.485539 -0.044522 +v 0.063300 1.497600 -0.029200 +v 0.082300 1.478600 -0.039700 +v 0.078492 1.473495 -0.034110 +v 0.054580 1.424205 -0.033907 +v 0.053617 1.426598 -0.048816 +v -0.075298 1.468043 -0.021340 +v -0.078140 1.470212 -0.022933 +v -0.078493 1.467790 -0.024400 +v -0.073913 1.466336 -0.023090 +v 0.052512 1.415702 -0.047314 +v 0.052297 1.412202 -0.033512 +v 0.072400 1.500500 -0.017900 +v 0.072720 1.493501 -0.019884 +v 0.072078 1.492403 -0.018405 +v 0.022133 1.402373 0.012551 +v 0.018991 1.390401 0.007792 +v 0.071080 1.487414 -0.013506 +v 0.070164 1.486779 -0.003273 +v 0.071773 1.502108 -0.003201 +v 0.072151 1.496898 -0.012896 +v 0.059961 1.533198 0.030682 +v 0.061666 1.533495 0.026588 +v 0.061933 1.524304 0.026724 +v 0.059859 1.525661 0.030102 +v 0.020105 1.589077 0.046472 +v 0.009713 1.590238 0.048897 +v 0.009944 1.597050 0.042825 +v 0.020552 1.595691 0.040151 +v 0.036783 1.483221 -0.106562 +v 0.038502 1.492891 -0.110712 +v 0.018483 1.490152 -0.117701 +v 0.017417 1.481271 -0.113053 +v 0.020717 1.610835 0.018436 +v 0.010055 1.612720 0.020935 +v 0.010178 1.617029 0.011196 +v -0.070914 1.467491 -0.024294 +v -0.075432 1.466828 -0.028178 +v -0.075074 1.469686 -0.029129 +v -0.071489 1.469689 -0.026106 +v 0.021004 1.614887 0.009079 +v 0.051849 1.506984 0.042450 +v 0.053308 1.500902 0.042309 +v 0.049500 1.500600 0.045600 +v 0.048611 1.506682 0.045109 +v 0.077083 1.471595 -0.021914 +v 0.075873 1.472489 -0.020821 +v -0.086489 1.526078 -0.035111 +v -0.086600 1.523100 -0.034700 +v -0.084675 1.522306 -0.032321 +v -0.084516 1.525303 -0.032395 +v -0.076847 1.490511 -0.045292 +v -0.075461 1.484031 -0.043370 +v -0.081300 1.489074 -0.045823 +v -0.081298 1.491995 -0.046116 +v 0.010363 1.620560 -0.000194 +v 0.021444 1.618288 -0.001890 +v 0.013719 1.517008 0.059295 +v -0.085506 1.516107 -0.041696 +v -0.086614 1.516897 -0.045188 +v -0.087200 1.514700 -0.046300 +v -0.086210 1.513201 -0.043498 +v -0.089406 1.517594 -0.050088 +v -0.089835 1.514599 -0.051186 +v -0.073155 1.476985 -0.019715 +v -0.071768 1.480608 -0.019054 +v -0.072296 1.482197 -0.020109 +v -0.073056 1.481128 -0.021346 +v -0.073953 1.494303 -0.032675 +v -0.075096 1.498497 -0.031088 +v -0.086495 1.476758 -0.038243 +v -0.086462 1.478693 -0.037171 +v -0.088511 1.482595 -0.040600 +v -0.088684 1.481204 -0.041805 +v 0.014127 1.518765 0.056494 +v 0.017673 1.515705 0.055114 +v 0.017351 1.513871 0.057445 +v 0.007369 1.564131 0.060202 +v 0.008331 1.571458 0.057659 +v 0.017301 1.571015 0.055837 +v 0.015753 1.563817 0.058794 +v 0.021739 1.513091 0.053661 +v 0.021012 1.511308 0.055406 +v 0.010005 1.602932 0.036161 +v 0.020698 1.601402 0.033405 +v 0.020840 1.621971 -0.050662 +v -0.083800 1.481300 -0.034800 +v -0.086100 1.483800 -0.038300 +v -0.087015 1.484403 -0.040188 +v -0.085989 1.482300 -0.038105 +v 0.021361 1.622750 -0.042579 +v 0.009988 1.625236 -0.042999 +v -0.088031 1.519317 -0.041792 +v -0.085861 1.521559 -0.037113 +v -0.086699 1.521126 -0.037101 +v -0.088900 1.518700 -0.041800 +v 0.011010 1.623798 -0.055177 +v 0.071022 1.498660 -0.072359 +v 0.074708 1.498310 -0.057287 +v 0.076813 1.509999 -0.058491 +v 0.074411 1.514719 -0.074802 +v 0.033028 1.407859 0.005141 +v 0.030039 1.395778 0.000929 +v -0.082000 1.504600 -0.045500 +v -0.080323 1.509694 -0.043474 +v -0.079604 1.508792 -0.043485 +v -0.081297 1.504301 -0.045314 +v -0.089100 1.515800 -0.048100 +v -0.089639 1.513104 -0.049003 +v -0.087992 1.512000 -0.047407 +v -0.075500 1.513617 -0.029983 +v -0.076118 1.514593 -0.031298 +v -0.076609 1.513005 -0.031696 +v -0.076371 1.511694 -0.030520 +v -0.078164 1.466576 -0.027814 +v -0.073096 1.475727 -0.036776 +v -0.074801 1.476562 -0.038030 +v -0.077596 1.480667 -0.040850 +v -0.076407 1.481515 -0.041493 +v 0.053520 1.554444 0.037880 +v 0.055167 1.559187 0.033299 +v 0.058439 1.552305 0.030019 +v 0.056797 1.548534 0.034888 +v 0.005887 1.552131 0.063546 +v 0.013242 1.551916 0.062647 +v 0.011428 1.545963 0.063453 +v 0.005203 1.545936 0.064108 +v -0.070618 1.469515 -0.019925 +v -0.073800 1.469900 -0.019600 +v -0.074701 1.468685 -0.020488 +v -0.071600 1.467919 -0.021209 +v 0.059440 1.518629 0.030726 +v 0.062023 1.515611 0.028312 +v 0.060821 1.508001 0.032915 +v 0.058003 1.511984 0.034409 +v 0.054598 1.507965 0.039780 +v -0.078425 1.518098 -0.031667 +v -0.080957 1.517503 -0.033837 +v -0.079092 1.516506 -0.033011 +v -0.077368 1.516811 -0.031317 +v 0.056600 1.502200 0.039300 +v -0.088500 1.503500 -0.046500 +v -0.087000 1.510300 -0.044900 +v 0.018904 1.579964 0.052345 +v 0.009112 1.580880 0.054387 +v 0.060600 1.543300 0.027500 +v 0.058985 1.541133 0.032384 +v 0.010699 1.519994 0.061739 +v 0.011514 1.521322 0.058249 +v 0.007996 1.525146 0.059987 +v -0.089148 1.494204 -0.040272 +v -0.089000 1.494300 -0.043500 +v -0.088538 1.490188 -0.041897 +v -0.088564 1.490214 -0.038795 +v 0.003912 1.524971 0.065569 +v 0.003648 1.528717 0.063754 +v 0.007046 1.528482 0.060248 +v 0.033993 1.562933 0.052199 +v 0.035634 1.569459 0.048664 +v 0.043291 1.567819 0.043458 +v 0.041735 1.561705 0.047287 +v 0.020698 1.606310 0.026348 +v -0.088334 1.502483 -0.040508 +v -0.088447 1.502416 -0.044192 +v 0.010005 1.608005 0.029016 +v 0.004139 1.537948 0.062980 +v 0.008537 1.537917 0.061632 +v 0.007473 1.534421 0.060282 +v -0.090983 1.511396 -0.046991 +v -0.091428 1.505198 -0.047205 +v -0.090384 1.512000 -0.047798 +v -0.077544 1.512600 -0.041093 +v -0.077400 1.515300 -0.044900 +v -0.077651 1.508587 -0.046651 +v -0.078383 1.510012 -0.042574 +v 0.003950 1.534628 0.062399 +v 0.025500 1.509400 0.054200 +v 0.026221 1.511402 0.052919 +v 0.068924 1.470783 -0.026772 +v 0.070063 1.470235 -0.025719 +v 0.073354 1.517300 -0.015507 +v 0.072714 1.507101 -0.013696 +v 0.071736 1.514897 -0.004110 +v 0.071742 1.527400 -0.006595 +v 0.073300 1.538600 -0.010500 +v 0.074599 1.546926 -0.016268 +v 0.075720 1.531749 -0.022655 +v 0.074613 1.525899 -0.018098 +v 0.049669 1.418009 -0.059856 +v 0.048608 1.427595 -0.062745 +v 0.004078 1.522301 0.068552 +v 0.004027 1.519664 0.071163 +v 0.020495 1.529703 -0.128086 +v -0.072487 1.486189 -0.020399 +v -0.073652 1.488310 -0.021518 +v -0.072785 1.486082 -0.022770 +v -0.072332 1.483812 -0.022359 +v 0.019992 1.516906 -0.126383 +v 0.040609 1.520993 -0.118122 +v 0.040804 1.533198 -0.119411 +v 0.068436 1.488481 -0.068879 +v -0.078096 1.478882 -0.039421 +v -0.075400 1.475700 -0.037100 +v -0.075785 1.475417 -0.036252 +v -0.078507 1.477345 -0.037814 +v 0.072751 1.489694 -0.054914 +v 0.010892 1.495095 0.074194 +v 0.007594 1.499713 0.069461 +v 0.007789 1.496199 0.068918 +v 0.007417 1.498889 0.069265 +v 0.010944 1.492053 0.076099 +v 0.013247 1.491279 0.077394 +v 0.008600 1.492389 0.069127 +v 0.007700 1.492600 0.069700 +v 0.006998 1.496497 0.069211 +v 0.008729 1.500115 0.070107 +v 0.001944 1.488386 0.082348 +v 0.002499 1.489875 0.085125 +v 0.006506 1.490990 0.084097 +v 0.005006 1.489613 0.082093 +v 0.005458 1.499077 0.070745 +v 0.004491 1.496811 0.071589 +v 0.005310 1.494685 0.078792 +v 0.007206 1.498428 0.074823 +v 0.007494 1.500437 0.072081 +v 0.007200 1.495300 0.078000 +v 0.006200 1.489300 0.075200 +v 0.009008 1.495404 0.076405 +v -0.083100 1.518700 -0.036200 +v -0.083855 1.513511 -0.036581 +v -0.082100 1.513505 -0.035117 +v -0.079400 1.482900 -0.027100 +v -0.080206 1.484210 -0.028695 +v -0.081500 1.481100 -0.030500 +v -0.080687 1.479097 -0.029207 +v 0.081106 1.485086 -0.030299 +v 0.055353 1.438827 -0.014305 +v 0.061802 1.452633 -0.021411 +v 0.061800 1.451269 -0.026789 +v -0.082571 1.470333 -0.032585 +v -0.080240 1.469896 -0.033585 +v -0.077900 1.467889 -0.030714 +v -0.080399 1.467916 -0.030093 +v -0.072675 1.482711 -0.021801 +v -0.072327 1.483599 -0.020303 +v -0.083926 1.505195 -0.047073 +v -0.082566 1.511308 -0.044839 +v 0.056475 1.436705 -0.023388 +v 0.080800 1.511900 -0.034700 +v 0.066012 1.558296 0.012561 +v 0.067698 1.568962 0.002878 +v 0.072355 1.557904 -0.008158 +v -0.086087 1.511596 -0.040800 +v -0.086768 1.509089 -0.042098 +v 0.071371 1.547902 -0.002509 +v 0.068379 1.577790 -0.004909 +v 0.073269 1.567598 -0.015308 +v 0.076018 1.556001 -0.024296 +v 0.077911 1.536200 -0.036897 +v 0.042165 1.521251 0.045613 +v 0.042412 1.521112 0.045815 +v 0.040900 1.520800 0.047100 +v -0.074730 1.498708 -0.025229 +v -0.072484 1.499298 -0.026263 +v -0.072424 1.498498 -0.026271 +v -0.075274 1.497098 -0.025153 +v 0.040900 1.520900 0.046900 +v 0.041684 1.521291 0.044982 +v 0.040343 1.520792 0.046058 +v -0.074986 1.498595 -0.024097 +v -0.075514 1.497004 -0.023998 +v -0.075085 1.496797 -0.022906 +v -0.074710 1.497703 -0.022785 +v 0.042542 1.521718 0.044010 +v 0.043095 1.521563 0.044535 +v 0.043365 1.521386 0.044713 +v 0.024933 1.521899 0.048906 +v 0.025588 1.522170 0.048171 +v 0.028191 1.521207 0.048430 +v 0.027547 1.521253 0.049572 +v 0.024720 1.521709 0.049378 +v 0.027503 1.521018 0.050005 +v -0.071792 1.499615 -0.026608 +v -0.072000 1.499300 -0.026400 +v -0.072388 1.499850 -0.026639 +v -0.077294 1.502724 -0.037763 +v -0.078993 1.494880 -0.039129 +v -0.077413 1.494601 -0.037981 +v -0.075700 1.501500 -0.036400 +v 0.028678 1.523900 0.042557 +v 0.028204 1.524236 0.042492 +v -0.089216 1.495402 -0.045388 +v 0.029466 1.523482 0.042681 +v 0.023235 1.523973 0.046674 +v 0.023291 1.524974 0.046778 +v -0.080994 1.521612 -0.028301 +v -0.079582 1.523421 -0.028204 +v -0.082400 1.526875 -0.032516 +v 0.027982 1.525037 0.042609 +v 0.027969 1.524510 0.042490 +v 0.023469 1.523441 0.046801 +v -0.074387 1.485551 -0.030792 +v -0.077111 1.486378 -0.028293 +v -0.075401 1.486174 -0.026902 +v -0.073776 1.485212 -0.029312 +v -0.072203 1.490294 -0.025077 +v -0.070416 1.486997 -0.024430 +v -0.074220 1.488702 -0.023891 +v -0.074537 1.504508 -0.024103 +v -0.073632 1.500887 -0.026200 +v -0.074301 1.499691 -0.025068 +v -0.074473 1.500004 -0.024007 +v -0.090300 1.486200 -0.044700 +v -0.091516 1.491394 -0.047212 +v -0.089989 1.491103 -0.048594 +v -0.088900 1.485600 -0.046100 +v 0.021958 1.523277 0.046966 +v 0.021129 1.523661 0.046653 +v -0.071174 1.473291 -0.018315 +v -0.067815 1.470535 -0.021913 +v -0.069856 1.476490 -0.018061 +v -0.091000 1.522800 -0.044100 +v -0.087700 1.524800 -0.044500 +v -0.088398 1.522717 -0.046819 +v -0.091894 1.520791 -0.046191 +v -0.072913 1.506097 -0.031592 +v -0.073018 1.504001 -0.031087 +v -0.072919 1.505397 -0.029993 +v -0.072693 1.507397 -0.030290 +v 0.021224 1.524972 0.046884 +v 0.032700 1.520100 0.050100 +v 0.032897 1.519399 0.050184 +v 0.028200 1.519704 0.050417 +v 0.027801 1.520500 0.050288 +v 0.032498 1.520485 0.049893 +v 0.035900 1.520600 0.049000 +v 0.035996 1.520197 0.049188 +v 0.036104 1.519501 0.049213 +v -0.053586 1.467412 -0.075433 +v -0.062458 1.468076 -0.057171 +v -0.066552 1.481198 -0.065315 +v -0.058792 1.480269 -0.080652 +v -0.083582 1.473183 -0.031719 +v -0.082493 1.471092 -0.029401 +v -0.075389 1.512005 -0.023502 +v -0.074722 1.513599 -0.022600 +v -0.075513 1.518799 -0.024222 +v -0.076219 1.515923 -0.024158 +v 0.024901 1.520591 0.050179 +v 0.024799 1.521400 0.049815 +v 0.036100 1.518900 0.049200 +v 0.033100 1.518800 0.050100 +v -0.090682 1.506499 -0.052109 +v -0.093415 1.505906 -0.050612 +v 0.028400 1.519100 0.050400 +v 0.024900 1.520200 0.050300 +v 0.022180 1.521429 0.049697 +v 0.023262 1.521579 0.049651 +v 0.033098 1.518102 0.049889 +v 0.033002 1.517298 0.049711 +v 0.028299 1.517601 0.050288 +v 0.028401 1.518399 0.050319 +v -0.077712 1.518490 -0.030205 +v -0.081200 1.521900 -0.033400 +v -0.079308 1.519492 -0.032198 +v -0.077480 1.518012 -0.030407 +v -0.077190 1.509801 -0.027990 +v -0.077694 1.511105 -0.027778 +v -0.076001 1.512096 -0.027915 +v -0.075200 1.510700 -0.027900 +v 0.036100 1.518200 0.048900 +v 0.036000 1.517400 0.048700 +v -0.074370 1.517897 -0.021713 +v -0.075297 1.522474 -0.025117 +v 0.044439 1.521538 0.043123 +v 0.044900 1.522000 0.042200 +v 0.044889 1.521497 0.042391 +v -0.071415 1.475105 -0.036388 +v 0.044613 1.521100 0.042908 +v 0.047300 1.519000 0.040400 +v 0.048300 1.520100 0.039500 +v 0.050100 1.519300 0.038700 +v -0.077115 1.511100 -0.025590 +v -0.077700 1.513600 -0.025600 +v -0.078380 1.512711 -0.027699 +v -0.070513 1.481494 -0.017518 +v -0.071608 1.483198 -0.019075 +v -0.076812 1.517093 -0.030192 +v -0.076143 1.515882 -0.030085 +v -0.074901 1.512051 -0.029070 +v -0.075697 1.510116 -0.030317 +v 0.048907 1.518104 0.039809 +v -0.083394 1.503779 -0.036850 +v -0.085900 1.495120 -0.037669 +v 0.056681 1.509389 0.037056 +v 0.051190 1.515294 0.039686 +v 0.052600 1.516500 0.037900 +v 0.044000 1.521300 0.043900 +v 0.043400 1.521000 0.044800 +v 0.043900 1.521600 0.043800 +v 0.044314 1.521924 0.042872 +v 0.044184 1.520701 0.043691 +v 0.043510 1.520399 0.044707 +v 0.042500 1.520800 0.045900 +v 0.042589 1.520103 0.045890 +v 0.041000 1.519800 0.047200 +v 0.040900 1.520500 0.047200 +v 0.038827 1.520294 0.048207 +v -0.070665 1.482116 -0.051701 +v -0.066781 1.468771 -0.037011 +v 0.039096 1.520691 0.047990 +v 0.038924 1.519591 0.048198 +v 0.041100 1.519200 0.047000 +v 0.039015 1.518991 0.048107 +v 0.042600 1.519500 0.045600 +v 0.043590 1.519902 0.044393 +v 0.044314 1.520298 0.043307 +v 0.044900 1.520700 0.042500 +v 0.045300 1.521300 0.041900 +v 0.044600 1.519800 0.042800 +v 0.045200 1.519100 0.042100 +v 0.044000 1.518600 0.043400 +v 0.043727 1.519374 0.043986 +v 0.045400 1.520300 0.041900 +v 0.046200 1.519700 0.041100 +v -0.074900 1.512500 -0.028100 +v -0.074900 1.512900 -0.028300 +v -0.074300 1.512000 -0.028500 +v -0.074200 1.511400 -0.028200 +v 0.042700 1.519000 0.045200 +v 0.042700 1.518300 0.044800 +v 0.041000 1.517900 0.046300 +v 0.041092 1.518606 0.046690 +v -0.081180 1.469406 -0.027314 +v -0.081392 1.510102 -0.034728 +v -0.078509 1.508234 -0.032619 +v -0.078690 1.510401 -0.032211 +v 0.039015 1.518382 0.047807 +v 0.039024 1.517589 0.047472 +v -0.091593 1.496901 -0.045991 +v -0.092481 1.504997 -0.047033 +v -0.093707 1.505301 -0.048582 +v -0.092700 1.496700 -0.047300 +v 0.032398 1.520499 0.049611 +v 0.032070 1.520462 0.048411 +v 0.035568 1.520277 0.047815 +v 0.035900 1.520600 0.048700 +v 0.039100 1.520700 0.047800 +v -0.069138 1.468847 -0.022896 +v -0.073401 1.503294 -0.031788 +v -0.074390 1.501599 -0.031214 +v 0.038514 1.520381 0.046986 +v 0.035403 1.520177 0.046662 +v 0.033667 1.522493 0.042517 +v 0.035009 1.522519 0.042154 +v 0.038077 1.520272 0.045916 +v -0.073320 1.508785 -0.030771 +v -0.073362 1.510351 -0.029068 +v 0.032250 1.520456 0.047158 +v 0.032125 1.522658 0.042749 +v 0.028946 1.521295 0.047200 +v -0.071539 1.486197 -0.018780 +v 0.030633 1.523025 0.042787 +v 0.026318 1.522169 0.047391 +v 0.033945 1.532789 0.049154 +v 0.033987 1.533023 0.049380 +v 0.038500 1.532400 0.048800 +v 0.038100 1.532100 0.048400 +v 0.051553 1.496317 -0.100345 +v 0.052901 1.510673 -0.105774 +v 0.040195 1.505800 -0.115183 +v 0.040900 1.529700 0.047500 +v 0.040908 1.530602 0.047110 +v 0.043300 1.529300 0.045900 +v 0.043109 1.528498 0.046011 +v 0.040898 1.528786 0.047906 +v -0.087705 1.519915 -0.045409 +v -0.088484 1.518000 -0.046810 +v -0.086125 1.518595 -0.043989 +v -0.085717 1.519801 -0.042398 +v -0.084872 1.521507 -0.039310 +v -0.084900 1.522959 -0.038902 +v -0.086800 1.521600 -0.043400 +v -0.076910 1.509099 -0.031384 +v -0.077488 1.510807 -0.031221 +v 0.042890 1.527512 0.046186 +v 0.038200 1.530000 0.049500 +v -0.078757 1.481692 -0.026054 +v -0.079651 1.475700 -0.026960 +v -0.087515 1.486694 -0.039481 +v -0.087178 1.487520 -0.036411 +v -0.087200 1.496700 -0.049600 +v -0.086626 1.505995 -0.050271 +v -0.085270 1.505500 -0.048424 +v -0.086200 1.496700 -0.048500 +v 0.038200 1.531000 0.049000 +v 0.038100 1.531800 0.048500 +v 0.042119 1.526072 0.042035 +v 0.041362 1.527066 0.042705 +v -0.080890 1.497910 -0.046193 +v -0.077697 1.498497 -0.046178 +v -0.074100 1.498700 -0.022500 +v -0.073200 1.502400 -0.020900 +v -0.073000 1.508100 -0.030600 +v -0.073112 1.509696 -0.028998 +v -0.082101 1.481590 -0.041807 +v -0.077895 1.474733 -0.034283 +v -0.082003 1.479694 -0.039813 +v -0.073816 1.473945 -0.032524 +v -0.073304 1.473187 -0.030203 +v -0.075893 1.473216 -0.031599 +v 0.041916 1.526935 0.044139 +v 0.042956 1.526131 0.042937 +v 0.042757 1.525369 0.041438 +v 0.043602 1.525370 0.042054 +v 0.037542 1.525216 0.039867 +v 0.037198 1.525606 0.040151 +v 0.036660 1.526085 0.040544 +v -0.084062 1.483308 -0.033637 +v -0.085978 1.484417 -0.036390 +v 0.037714 1.524891 0.039697 +v -0.071500 1.499400 -0.027100 +v -0.070090 1.497912 -0.028215 +v -0.070510 1.499177 -0.026601 +v -0.086793 1.502906 -0.038129 +v -0.088239 1.494694 -0.038349 +v -0.077317 1.513394 -0.028185 +v 0.037779 1.524597 0.039632 +v -0.076101 1.503386 -0.027988 +v -0.076739 1.506787 -0.026397 +v -0.080088 1.516707 -0.028296 +v -0.078608 1.515193 -0.028702 +v -0.078171 1.514217 -0.028369 +v -0.079119 1.514587 -0.027733 +v -0.064970 1.489977 -0.026859 +v -0.068207 1.488286 -0.025432 +v -0.069600 1.491400 -0.025400 +v 0.037758 1.524316 0.039661 +v 0.037669 1.524040 0.039811 +v 0.058518 1.493799 0.036810 +v 0.059081 1.504398 0.036285 +v -0.083936 1.522604 -0.037179 +v -0.084376 1.520602 -0.038312 +v -0.083800 1.512300 -0.046400 +v 0.061300 1.496500 0.033300 +v 0.043599 1.521847 0.043556 +v -0.077005 1.514888 -0.032296 +v -0.077607 1.513397 -0.033092 +v 0.044055 1.522221 0.042659 +v -0.077512 1.523697 -0.039600 +v -0.077134 1.522214 -0.036306 +v -0.077024 1.522596 -0.035096 +v -0.077000 1.526200 -0.035400 +v 0.043015 1.522082 0.043129 +v -0.072194 1.470894 -0.027111 +v -0.075000 1.471200 -0.029600 +v -0.074913 1.472222 -0.030080 +v -0.072794 1.472090 -0.028308 +v -0.074614 1.485867 -0.026314 +v -0.073392 1.484991 -0.028510 +v 0.043462 1.522488 0.042348 +v -0.088313 1.487893 -0.042607 +v -0.087791 1.487097 -0.041406 +v -0.088800 1.491100 -0.043700 +v -0.089407 1.492300 -0.044808 +v 0.044553 1.522761 0.041803 +v 0.044927 1.522554 0.041844 +v 0.041056 1.521240 0.044204 +v 0.036673 1.522964 0.041275 +v 0.037083 1.523262 0.040834 +v -0.074023 1.506963 -0.033952 +v -0.073613 1.505802 -0.033402 +v -0.078264 1.516972 -0.037457 +v -0.080300 1.519500 -0.038900 +v -0.078936 1.522279 -0.035468 +v -0.077375 1.520039 -0.035263 +v -0.085184 1.513304 -0.048435 +v 0.041746 1.521873 0.043317 +v 0.039788 1.520688 0.045084 +v 0.035983 1.522699 0.041727 +v 0.037308 1.523509 0.040446 +v 0.037510 1.523762 0.040093 +v -0.077900 1.512000 -0.032500 +v -0.078690 1.512194 -0.033514 +v 0.042057 1.522240 0.042582 +v -0.077290 1.512393 -0.032109 +v 0.042503 1.522667 0.041859 +v 0.043826 1.523087 0.041769 +v -0.074693 1.509798 -0.027983 +v -0.076690 1.508602 -0.028290 +v -0.079600 1.517115 -0.029581 +v -0.083299 1.520672 -0.032822 +v -0.082605 1.520723 -0.032873 +v -0.079196 1.517891 -0.029805 +v 0.043025 1.523723 0.040992 +v 0.043098 1.524246 0.040972 +v 0.044011 1.523655 0.041586 +v -0.075486 1.514507 -0.028998 +v 0.044072 1.524168 0.041487 +v 0.042854 1.523208 0.041263 +v -0.014577 1.465024 -0.102244 +v -0.016400 1.474400 -0.109200 +v 0.043014 1.524789 0.041098 +v 0.043930 1.524704 0.041661 +v 0.045170 1.522128 0.041830 +v 0.045172 1.522916 0.041508 +v 0.045689 1.523909 0.041025 +v 0.045611 1.522137 0.041396 +v -0.077508 1.523187 -0.028108 +v -0.078384 1.524716 -0.032003 +v -0.079493 1.525831 -0.032370 +v -0.078124 1.524043 -0.029015 +v -0.085510 1.518598 -0.041793 +v 0.046307 1.522104 0.040817 +v 0.045900 1.521000 0.041200 +v 0.046700 1.524488 0.040798 +v 0.048114 1.524818 0.040328 +v -0.089900 1.514600 -0.047000 +v -0.075224 1.500049 -0.029836 +v -0.072310 1.497196 -0.029602 +v 0.047605 1.521996 0.040014 +v 0.046900 1.520700 0.040400 +v 0.044824 1.523362 0.041444 +v -0.062100 1.496100 -0.028400 +v -0.062400 1.497300 -0.028400 +v -0.062996 1.496896 -0.029588 +v -0.062700 1.495600 -0.029800 +v 0.044987 1.523980 0.041208 +v 0.046091 1.525512 0.041885 +v -0.065798 1.490998 -0.030077 +v -0.068687 1.489987 -0.028824 +v -0.067795 1.488059 -0.027571 +v -0.065087 1.488901 -0.029508 +v 0.046900 1.526300 0.041900 +v 0.045406 1.524990 0.041911 +v 0.044897 1.525716 0.043091 +v 0.045400 1.526400 0.043200 +v -0.061700 1.493000 -0.028800 +v -0.061900 1.494600 -0.028500 +v -0.062600 1.493904 -0.030024 +v -0.062510 1.492089 -0.030514 +v 0.046000 1.527100 0.043200 +v 0.044489 1.527303 0.044590 +v -0.083251 1.522548 -0.035046 +v -0.085047 1.522467 -0.036875 +v 0.044900 1.528100 0.044600 +v 0.044112 1.526492 0.044515 +v 0.044792 1.524756 0.041778 +v -0.091507 1.513492 -0.045983 +v -0.092000 1.511200 -0.046800 +v -0.088778 1.518688 -0.044296 +v -0.089400 1.516800 -0.045700 +v 0.044314 1.525441 0.042693 +v 0.043559 1.526182 0.043934 +v 0.042506 1.526800 0.045709 +v -0.073810 1.490899 -0.021494 +v -0.074915 1.491096 -0.023006 +v -0.074587 1.489510 -0.022989 +v -0.092426 1.516497 -0.046084 +v -0.092491 1.518401 -0.047705 +v -0.074023 1.473610 -0.019287 +v -0.072900 1.472100 -0.018700 +v -0.040539 1.402694 -0.008784 +v -0.048074 1.407914 -0.020517 +v -0.047013 1.398104 -0.021095 +v -0.039392 1.393104 -0.010006 +v -0.079593 1.519097 -0.027011 +v -0.078310 1.521306 -0.026782 +v 0.040690 1.527910 0.047582 +v 0.039941 1.527889 0.045733 +v 0.038000 1.529200 0.049200 +v -0.061832 1.491418 -0.027890 +v -0.061767 1.490564 -0.029280 +v -0.064532 1.489040 -0.028225 +v -0.066803 1.495395 -0.026325 +v -0.062692 1.496102 -0.026989 +v 0.037203 1.528804 0.047236 +v 0.034542 1.533709 0.050445 +v 0.039499 1.533215 0.049975 +v 0.036807 1.528788 0.045465 +v 0.034347 1.526891 0.041805 +v 0.032772 1.526987 0.042325 +v 0.033546 1.528876 0.046426 +v -0.066004 1.492689 -0.029996 +v 0.023838 1.525682 0.047159 +v 0.024615 1.526505 0.047491 +v -0.066600 1.497700 -0.029000 +v -0.066899 1.498785 -0.027594 +v -0.069870 1.484581 -0.025568 +v -0.071628 1.484108 -0.027495 +v -0.073099 1.484030 -0.024480 +v -0.062493 1.490133 -0.030449 +v -0.073605 1.507995 -0.032192 +v 0.028529 1.525780 0.042811 +v 0.028182 1.525373 0.042703 +v 0.022456 1.525747 0.047856 +v 0.023742 1.526545 0.048365 +v -0.069294 1.493493 -0.029380 +v -0.072510 1.491006 -0.030991 +v -0.072108 1.488909 -0.030290 +v -0.069088 1.491598 -0.029288 +v -0.082186 1.480509 -0.032108 +v -0.082500 1.482389 -0.031806 +v -0.075207 1.486410 -0.032571 +v -0.078600 1.486500 -0.030726 +v -0.074610 1.503003 -0.030429 +v -0.075710 1.506076 -0.029052 +v -0.072018 1.499535 -0.027245 +v -0.069600 1.495600 -0.029000 +v -0.072429 1.493400 -0.030779 +v -0.066996 1.496709 -0.026563 +v -0.070701 1.496697 -0.025915 +v -0.070496 1.497807 -0.026089 +v -0.067002 1.497703 -0.026813 +v -0.080210 1.513496 -0.034376 +v -0.077161 1.519075 -0.025362 +v -0.078600 1.516400 -0.026100 +v -0.078956 1.526993 -0.051414 +v -0.077945 1.518913 -0.056083 +v -0.077665 1.520494 -0.043294 +v -0.066104 1.494500 -0.029818 +v -0.063008 1.498217 -0.028195 +v -0.063488 1.497694 -0.027293 +v -0.087700 1.483800 -0.040300 +v -0.073996 1.483585 -0.024303 +v -0.073597 1.507916 -0.028454 +v -0.087091 1.491399 -0.048005 +v -0.086200 1.486200 -0.045800 +v -0.073000 1.509200 -0.028900 +v -0.073613 1.510299 -0.028418 +v -0.072679 1.484436 -0.028099 +v -0.074011 1.484984 -0.025493 +v -0.073800 1.510700 -0.028300 +v -0.074759 1.490612 -0.024153 +v -0.085521 1.513187 -0.039739 +v 0.027850 1.528251 0.047514 +v 0.029964 1.526652 0.042905 +v -0.078886 1.472181 -0.034028 +v -0.076900 1.470700 -0.031400 +v 0.029085 1.526235 0.042905 +v -0.063107 1.497087 -0.027034 +v 0.026190 1.527374 0.047403 +v -0.067048 1.488263 -0.026903 +v -0.066401 1.496204 -0.029511 +v 0.027281 1.528690 0.049325 +v 0.025103 1.527452 0.048740 +v 0.033744 1.529320 0.048332 +v -0.073083 1.494401 -0.025488 +v -0.072718 1.492295 -0.025142 +v -0.075021 1.492793 -0.024235 +v 0.021800 1.564300 -0.125112 +v 0.021390 1.549196 -0.128268 +v -0.080194 1.487032 -0.032232 +v -0.076200 1.488129 -0.034638 +v -0.077499 1.489024 -0.036690 +v -0.082102 1.487860 -0.033716 +v -0.084805 1.481507 -0.043287 +v -0.083016 1.477319 -0.039971 +v -0.085043 1.475936 -0.039699 +v -0.087297 1.480394 -0.043209 +v 0.041100 1.551200 -0.119600 +v 0.041000 1.564300 -0.117100 +v -0.079918 1.541308 -0.059991 +v -0.078910 1.532799 -0.044899 +v -0.080121 1.548101 -0.049100 +v -0.083314 1.521389 -0.044086 +v -0.083806 1.518997 -0.045690 +v -0.088991 1.520200 -0.048605 +v -0.076340 1.525030 -0.030066 +v -0.076931 1.522731 -0.033547 +v -0.077407 1.523933 -0.031651 +v -0.090305 1.496596 -0.050608 +v 0.039290 1.528051 0.044190 +v 0.035655 1.526552 0.041178 +v 0.051007 1.565829 -0.109698 +v 0.050153 1.574170 -0.107041 +v 0.041409 1.575703 -0.112304 +v 0.037050 1.439676 -0.079925 +v 0.026375 1.429290 -0.086093 +v -0.074900 1.508300 -0.023500 +v -0.076811 1.508900 -0.025803 +v 0.020443 1.525555 0.049023 +v -0.081302 1.482541 -0.042839 +v 0.022090 1.526874 0.049265 +v -0.082741 1.485784 -0.031869 +v -0.085088 1.486284 -0.033841 +v -0.088312 1.522894 -0.037393 +v -0.085094 1.521385 -0.034799 +v 0.019381 1.524425 0.048883 +v 0.018804 1.524338 0.049175 +v 0.019820 1.525679 0.049223 +v 0.021236 1.527039 0.049389 +v 0.024082 1.528063 0.049571 +v 0.023449 1.528355 0.049996 +v 0.026775 1.529813 0.050788 +v 0.026860 1.529186 0.050504 +v 0.034030 1.529944 0.050131 +v 0.034105 1.530682 0.050431 +v 0.026377 1.530509 0.050284 +v 0.022991 1.528811 0.049615 +v 0.025891 1.531275 0.049667 +v 0.022800 1.529700 0.049200 +v 0.033943 1.532420 0.049431 +v 0.034105 1.531621 0.049993 +v 0.022009 1.522235 0.049146 +v 0.019888 1.522635 0.049121 +v 0.022491 1.522472 0.048858 +v 0.020169 1.522855 0.048732 +v 0.023596 1.522765 0.047753 +v 0.019510 1.523334 0.048695 +v 0.018853 1.523205 0.049074 +v -0.008841 1.394244 0.015789 +v -0.008800 1.386900 0.011600 +v 0.024390 1.522876 0.047222 +v 0.019200 1.525800 0.049100 +v 0.018016 1.524253 0.049349 +v 0.018201 1.526310 0.048651 +v 0.017395 1.524492 0.049158 +v 0.020237 1.528365 0.048593 +v 0.020635 1.527357 0.048964 +v 0.017989 1.523129 0.049200 +v 0.018475 1.522629 0.049200 +v 0.017396 1.523127 0.049280 +v 0.017900 1.522496 0.049287 +v 0.024700 1.519600 0.050400 +v 0.024500 1.519000 0.050400 +v 0.021512 1.520977 0.049893 +v 0.021022 1.520599 0.049909 +v 0.032795 1.516401 0.049585 +v 0.035800 1.516500 0.048500 +v 0.032503 1.515300 0.049614 +v -0.008589 1.498484 0.073786 +v -0.010030 1.498324 0.072305 +v -0.008757 1.500242 0.071290 +v -0.008117 1.500249 0.072013 +v 0.035693 1.515294 0.048482 +v 0.027697 1.515798 0.050481 +v 0.028102 1.516702 0.050317 +v 0.029412 1.387797 -0.001384 +v 0.046000 1.518200 0.041600 +v 0.047200 1.517100 0.041300 +v 0.045392 1.516396 0.042790 +v 0.044509 1.517704 0.042911 +v 0.042900 1.517300 0.044400 +v -0.008946 1.489300 0.072430 +v -0.008334 1.490922 0.071314 +v -0.009315 1.490643 0.070347 +v -0.010188 1.489265 0.071512 +v 0.043309 1.515909 0.044414 +v 0.041000 1.515600 0.046000 +v 0.040900 1.516900 0.046000 +v 0.038919 1.516689 0.047177 +v 0.038722 1.515306 0.047242 +v 0.049300 1.521500 0.038900 +v 0.050591 1.524603 0.038686 +v 0.051200 1.520900 0.037800 +v 0.052905 1.524289 0.037006 +v 0.016797 1.527194 0.048968 +v -0.005681 1.499910 0.071178 +v -0.004213 1.497290 0.073791 +v -0.005785 1.498187 0.075196 +v -0.006796 1.500441 0.071916 +v 0.016432 1.524904 0.049054 +v 0.015300 1.527900 0.050300 +v 0.015200 1.525300 0.050100 +v 0.016200 1.530800 0.050800 +v 0.018103 1.529886 0.049322 +v 0.016702 1.523141 0.049381 +v 0.017301 1.522304 0.049412 +v 0.015612 1.523203 0.049817 +v 0.016513 1.521817 0.049816 +v 0.024099 1.518299 0.050487 +v 0.023694 1.517490 0.050592 +v 0.020550 1.520111 0.050065 +v 0.019827 1.519426 0.050302 +v 0.049954 1.486090 -0.095874 +v 0.053460 1.525713 -0.108355 +v 0.047153 1.537083 0.048674 +v 0.044799 1.534344 0.048966 +v 0.041146 1.535635 0.050758 +v 0.043109 1.539081 0.051229 +v 0.050240 1.540841 0.047395 +v 0.052685 1.537388 0.044750 +v 0.049920 1.534711 0.046178 +v 0.047487 1.532583 0.046972 +v 0.051300 1.527400 0.039700 +v 0.049306 1.527394 0.041107 +v 0.049400 1.529100 0.042600 +v 0.051400 1.529700 0.041500 +v 0.053700 1.527400 0.038000 +v 0.054100 1.530500 0.039900 +v 0.050911 1.531995 0.043813 +v 0.048706 1.530695 0.044709 +v 0.053767 1.533613 0.042372 +v 0.023810 1.535193 0.052318 +v 0.024621 1.533474 0.050880 +v 0.021502 1.531689 0.049858 +v 0.020094 1.532606 0.050483 +v 0.022472 1.536856 0.053894 +v 0.017898 1.533692 0.051907 +v 0.036404 1.539832 0.053434 +v 0.035572 1.536604 0.052091 +v 0.025594 1.531753 0.049356 +v 0.023502 1.530697 0.049184 +v 0.025447 1.531989 0.049401 +v 0.022900 1.530600 0.049100 +v 0.058944 1.585628 -0.090242 +v 0.061100 1.592200 -0.078300 +v 0.044568 1.602329 -0.084574 +v 0.043693 1.594392 -0.096197 +v 0.041000 1.531000 0.047100 +v 0.041600 1.531400 0.047600 +v 0.044100 1.530100 0.046400 +v 0.043500 1.529700 0.046000 +v 0.045300 1.528600 0.044700 +v 0.045998 1.529012 0.044988 +v 0.047100 1.528000 0.043400 +v 0.046494 1.527712 0.043387 +v 0.047100 1.527000 0.042300 +v 0.047706 1.526995 0.042008 +v 0.023200 1.531500 0.049500 +v 0.025342 1.532353 0.049786 +v 0.052443 1.554395 -0.110733 +v 0.045436 1.574694 0.038505 +v 0.052333 1.571092 0.032286 +v 0.050200 1.565000 0.037900 +v 0.042799 1.532208 0.048590 +v 0.045400 1.530900 0.047000 +v 0.047100 1.529700 0.045100 +v 0.048000 1.528600 0.043300 +v 0.048394 1.527705 0.042091 +v 0.071726 1.489597 -0.018397 +v 0.072300 1.489600 -0.019800 +v 0.082403 1.497500 -0.046021 +v 0.080609 1.503982 -0.045312 +v 0.079052 1.507506 -0.044008 +v 0.078100 1.518500 -0.029900 +v 0.081582 1.521655 -0.032839 +v 0.085888 1.480697 -0.037210 +v 0.082727 1.479003 -0.033181 +v -0.088400 1.525800 -0.038200 +v 0.085882 1.481597 -0.037615 +v 0.089000 1.517700 -0.043700 +v 0.089700 1.515900 -0.045000 +v 0.090689 1.515510 -0.044722 +v 0.042993 1.466469 -0.086739 +v 0.047760 1.478256 -0.092516 +v 0.090400 1.513800 -0.046200 +v 0.074300 1.494200 -0.021900 +v 0.083479 1.497097 -0.046117 +v 0.085217 1.496703 -0.047579 +v 0.086211 1.491703 -0.047289 +v 0.085503 1.492007 -0.046781 +v 0.085486 1.486796 -0.045215 +v 0.084991 1.487411 -0.044990 +v 0.087400 1.484200 -0.040300 +v 0.088704 1.487908 -0.042783 +v 0.082840 1.522084 -0.034142 +v 0.082112 1.521223 -0.032776 +v 0.083570 1.521955 -0.034436 +v 0.075177 1.493002 -0.023111 +v 0.076500 1.516600 -0.029500 +v 0.075900 1.514300 -0.028300 +v 0.075700 1.514700 -0.028700 +v 0.071800 1.498104 -0.026083 +v 0.071802 1.498996 -0.026313 +v 0.072996 1.496503 -0.026061 +v 0.075410 1.495001 -0.024819 +v 0.069598 1.487587 -0.024692 +v 0.069300 1.486401 -0.026424 +v 0.069687 1.485404 -0.026018 +v 0.085515 1.527480 -0.038589 +v 0.075999 1.513302 -0.027984 +v 0.077199 1.514400 -0.028210 +v 0.077500 1.515700 -0.028600 +v 0.084313 1.521518 -0.034793 +v 0.077793 1.516688 -0.028928 +v 0.077100 1.516600 -0.028900 +v 0.078695 1.518295 -0.029809 +v 0.076700 1.516600 -0.029100 +v 0.077487 1.491398 -0.037513 +v 0.076038 1.491207 -0.035474 +v 0.073200 1.509900 -0.019400 +v 0.073700 1.508000 -0.021200 +v 0.081613 1.478503 -0.031092 +v 0.077071 1.514610 -0.039608 +v 0.076983 1.517291 -0.040993 +v 0.089989 1.492402 -0.044791 +v 0.083719 1.487422 -0.033473 +v 0.081400 1.486758 -0.031819 +v 0.085799 1.488493 -0.035214 +v 0.087406 1.491007 -0.036987 +v 0.073275 1.507102 -0.031910 +v 0.084591 1.511302 -0.037217 +v 0.085725 1.509009 -0.038205 +v 0.082810 1.511499 -0.035689 +v 0.077116 1.520902 -0.039194 +v 0.076866 1.521086 -0.036895 +v 0.077427 1.516146 -0.037671 +v 0.076733 1.519477 -0.036354 +v 0.071294 1.491000 -0.025011 +v 0.071539 1.485011 -0.028169 +v 0.071376 1.487003 -0.028816 +v 0.072800 1.485000 -0.029000 +v 0.073029 1.485996 -0.030176 +v 0.073697 1.487491 -0.031606 +v 0.074400 1.489400 -0.032700 +v 0.074445 1.491799 -0.033137 +v 0.070700 1.495100 -0.025700 +v 0.076554 1.521809 -0.026878 +v 0.070296 1.493304 -0.025475 +v 0.072097 1.496697 -0.025914 +v 0.072201 1.494908 -0.025566 +v 0.071902 1.492995 -0.025121 +v 0.077900 1.484000 -0.026400 +v 0.078789 1.485596 -0.028414 +v 0.076283 1.484669 -0.025833 +v 0.079808 1.486114 -0.030284 +v 0.074909 1.484910 -0.025487 +v 0.077240 1.515882 -0.038378 +v 0.076800 1.519100 -0.038300 +v 0.071770 1.493003 -0.016807 +v 0.063703 1.501321 0.029000 +v 0.065291 1.510391 0.023890 +v 0.026059 1.454896 0.049004 +v 0.025298 1.451791 0.049406 +v 0.054800 1.491500 0.040500 +v 0.049968 1.489449 0.045159 +v 0.025148 1.460346 0.061133 +v 0.024435 1.459621 0.061337 +v 0.023522 1.460187 0.061418 +v 0.024249 1.460520 0.061081 +v 0.022000 1.460772 0.059500 +v 0.022800 1.460707 0.058800 +v 0.023014 1.460696 0.059746 +v 0.022313 1.460759 0.060425 +v 0.028109 1.475652 0.062034 +v 0.031378 1.479582 0.059952 +v 0.034158 1.475322 0.058856 +v 0.030949 1.471935 0.060987 +v 0.024580 1.459509 0.051432 +v 0.024307 1.461110 0.051921 +v 0.023228 1.460994 0.055868 +v 0.023532 1.460361 0.055415 +v 0.024417 1.456932 0.051523 +v 0.023415 1.459081 0.054968 +v 0.026131 1.458845 0.048941 +v 0.025595 1.461351 0.049316 +v 0.033679 1.454181 0.057135 +v 0.030493 1.456211 0.059276 +v 0.031308 1.459310 0.059370 +v 0.034660 1.458692 0.057534 +v 0.025183 1.486692 0.060752 +v 0.028504 1.483099 0.060419 +v 0.025503 1.479201 0.062212 +v 0.022102 1.483306 0.062428 +v 0.018718 1.488020 0.063334 +v 0.021581 1.490552 0.061358 +v 0.017491 1.495490 0.066471 +v 0.017785 1.491959 0.067693 +v 0.017219 1.491111 0.070903 +v 0.017625 1.493108 0.069690 +v 0.015426 1.487303 0.069410 +v 0.017764 1.489813 0.066272 +v 0.011460 1.486297 0.069954 +v -0.012342 1.488981 0.072095 +v -0.011326 1.490885 0.070919 +v -0.012050 1.491341 0.072767 +v -0.013716 1.489773 0.073790 +v 0.011262 1.487086 0.070809 +v 0.015505 1.486658 0.068092 +v -0.004958 1.489999 0.080211 +v -0.006080 1.492103 0.080389 +v -0.004980 1.491307 0.078099 +v -0.004935 1.489324 0.077714 +v 0.015732 1.485856 0.066517 +v 0.011732 1.485545 0.068955 +v 0.010705 1.488275 0.071582 +v 0.013514 1.488048 0.071692 +v -0.007888 1.492561 0.079779 +v -0.009808 1.492021 0.081514 +v -0.011583 1.491985 0.079889 +v -0.009400 1.492428 0.078300 +v -0.004716 1.493621 0.073204 +v -0.006063 1.491670 0.074584 +v -0.003964 1.493789 0.076796 +v -0.012266 1.493714 0.070902 +v -0.010928 1.492691 0.069478 +v -0.009809 1.496498 0.068938 +v -0.011062 1.497282 0.069812 +v 0.015121 1.489854 0.073173 +v 0.004381 1.488908 0.079905 +v 0.005639 1.490149 0.081872 +v 0.010589 1.518080 0.064760 +v 0.013675 1.514833 0.061918 +v 0.010631 1.515723 0.067072 +v 0.013514 1.512714 0.064157 +v 0.014372 1.491968 0.076867 +v 0.015789 1.493572 0.075165 +v 0.011991 1.493152 0.080609 +v 0.013201 1.495495 0.080068 +v 0.003688 1.531722 0.062648 +v 0.002148 1.485462 0.075849 +v 0.002408 1.484602 0.075445 +v 0.001814 1.486575 0.077679 +v 0.022510 1.586616 -0.113415 +v 0.023000 1.599213 -0.102898 +v 0.022139 1.613460 -0.081347 +v 0.019834 1.618165 -0.069601 +v 0.005695 1.468502 0.076511 +v 0.005296 1.470999 0.076289 +v 0.012858 1.464687 0.059653 +v 0.007373 1.464960 0.061179 +v 0.006975 1.463108 0.065761 +v 0.012269 1.462964 0.064467 +v 0.016278 1.462759 0.062362 +v 0.017223 1.464311 0.057602 +v 0.013211 1.465957 0.057083 +v 0.007475 1.466294 0.058483 +v 0.011800 1.461943 0.068868 +v 0.006582 1.462229 0.070956 +v 0.006449 1.463859 0.073683 +v 0.011854 1.463360 0.071100 +v 0.018283 1.481390 0.065162 +v 0.014145 1.482119 0.067466 +v 0.009947 1.482343 0.070802 +v 0.006663 1.482219 0.073355 +v 0.006070 1.484310 0.073705 +v 0.008634 1.484916 0.071533 +v 0.005846 1.485250 0.074051 +v 0.008425 1.485773 0.072107 +v 0.008365 1.486729 0.072615 +v 0.008909 1.488251 0.072763 +v 0.007010 1.488079 0.074018 +v 0.007500 1.489300 0.073600 +v 0.005805 1.486238 0.074278 +v 0.004053 1.510006 0.078946 +v 0.003955 1.513584 0.076221 +v 0.009855 1.472515 0.074138 +v 0.005009 1.472999 0.075408 +v 0.004315 1.476815 0.074624 +v 0.008666 1.476809 0.073279 +v 0.013211 1.476061 0.071006 +v 0.014910 1.470831 0.071677 +v 0.002734 1.483714 0.075107 +v 0.010300 1.470400 0.074600 +v 0.010703 1.467995 0.074513 +v 0.015502 1.468576 0.071979 +v 0.015800 1.466400 0.071600 +v 0.019602 1.468322 0.068491 +v 0.018439 1.474099 0.068134 +v 0.023987 1.472042 0.064669 +v 0.023470 1.465647 0.065227 +v 0.031113 1.462468 0.060031 +v 0.026531 1.460082 0.060975 +v 0.026577 1.461421 0.061328 +v 0.026234 1.462618 0.062158 +v 0.030104 1.465556 0.061112 +v 0.034404 1.463254 0.058475 +v 0.033149 1.467775 0.059705 +v 0.027971 1.468600 0.062562 +v 0.025395 1.463939 0.063270 +v 0.025250 1.461104 0.061542 +v 0.024309 1.460885 0.061344 +v 0.024976 1.461816 0.062298 +v 0.024034 1.461246 0.062012 +v 0.024386 1.462489 0.063348 +v 0.023558 1.461583 0.062874 +v 0.022612 1.462197 0.064256 +v 0.023177 1.463590 0.064981 +v 0.022133 1.460920 0.059768 +v 0.021168 1.460992 0.060855 +v 0.021285 1.460964 0.062165 +v 0.022204 1.460838 0.060987 +v 0.022757 1.460837 0.059067 +v 0.022727 1.460779 0.060310 +v 0.022731 1.461571 0.056693 +v 0.021763 1.461781 0.057508 +v -0.045276 1.418919 -0.006497 +v -0.051316 1.421097 -0.020493 +v 0.023289 1.460690 0.058334 +v 0.023330 1.460718 0.059396 +v 0.023394 1.460716 0.060836 +v 0.023251 1.460914 0.061284 +v 0.022758 1.461069 0.062036 +v 0.021882 1.461334 0.063176 +v 0.015798 1.464470 0.070396 +v 0.019424 1.464378 0.068200 +v 0.019113 1.462802 0.066769 +v 0.015646 1.462491 0.068328 +v 0.018697 1.461683 0.065172 +v 0.017785 1.465453 0.055196 +v 0.015373 1.461586 0.066485 +v 0.020861 1.463425 0.055138 +v 0.021815 1.464456 0.052646 +v 0.019799 1.462183 0.059397 +v 0.023776 1.462261 0.052914 +v 0.022746 1.462430 0.053473 +v 0.024706 1.463135 0.050424 +v 0.023488 1.463681 0.051401 +v 0.018585 1.461130 0.063357 +v 0.019687 1.466161 0.068684 +v 0.011193 1.465708 0.073182 +v 0.006008 1.466276 0.075328 +v -0.034601 1.509504 0.051197 +v -0.033968 1.506854 0.052483 +v -0.030002 1.508004 0.053112 +v -0.030801 1.510322 0.051979 +v 0.004764 1.502917 0.085468 +v 0.004430 1.506626 0.081984 +v 0.022824 1.460592 0.061246 +v 0.009000 1.380900 0.008200 +v -0.075600 1.495000 -0.023600 +v -0.074983 1.495799 -0.022522 +v -0.084383 1.516205 -0.047117 +v 0.025820 1.458897 0.061092 +v 0.023838 1.454565 0.051597 +v 0.022628 1.458084 0.055319 +v 0.030940 1.449785 0.057855 +v 0.026597 1.445592 0.060153 +v 0.024565 1.449667 0.061995 +v 0.028277 1.452952 0.060029 +v 0.021911 1.455732 0.064216 +v 0.024465 1.457488 0.062231 +v -0.087769 1.520281 -0.042209 +v 0.023209 1.458786 0.062690 +v 0.020872 1.457806 0.064774 +v 0.019961 1.459373 0.065056 +v 0.022233 1.459817 0.062668 +v 0.017872 1.461109 0.063406 +v 0.013488 1.461506 0.066143 +v 0.012943 1.457844 0.061366 +v 0.017612 1.457529 0.058998 +v 0.021527 1.457717 0.056493 +v 0.021070 1.460876 0.060391 +v -0.082911 1.523192 -0.041995 +v -0.081484 1.525319 -0.037312 +v -0.080076 1.523708 -0.036314 +v -0.081315 1.521297 -0.040291 +v 0.018523 1.461113 0.064244 +v 0.021342 1.460835 0.061409 +v 0.013977 1.461497 0.067282 +v 0.018900 1.383500 0.004700 +v 0.021710 1.460566 0.062261 +v 0.019048 1.460695 0.064671 +v 0.014749 1.460724 0.068045 +v 0.015854 1.458903 0.068665 +v 0.016792 1.456693 0.068793 +v 0.017461 1.453899 0.067856 +v 0.019149 1.446712 0.065034 +v 0.020456 1.441594 0.063824 +v 0.013229 1.444892 0.067749 +v 0.014176 1.439699 0.067357 +v 0.012333 1.452486 0.071105 +v 0.012051 1.456002 0.072665 +v 0.011198 1.458638 0.072223 +v 0.010410 1.460704 0.071129 +v 0.003264 1.481942 0.074712 +v 0.005689 1.488011 0.075291 +v 0.004510 1.488195 0.077505 +v 0.007868 1.491520 0.082722 +v 0.001736 1.487415 0.079846 +v 0.010492 1.492952 0.082279 +v 0.008765 1.492452 0.083652 +v 0.009522 1.493885 0.084514 +v 0.010810 1.494494 0.083310 +v 0.007182 1.491909 0.084782 +v 0.008500 1.493700 0.085400 +v 0.002902 1.491075 0.086336 +v 0.003721 1.494335 0.088102 +v 0.004650 1.499551 0.087718 +v 0.004111 1.496344 0.088521 +v 0.009882 1.495499 0.084882 +v 0.011411 1.497274 0.083159 +v -0.090800 1.520100 -0.042800 +v 0.011743 1.500268 0.080871 +v 0.013890 1.498099 0.077459 +v 0.010602 1.625284 -0.027469 +v 0.009656 1.461739 0.070112 +v 0.008742 1.461758 0.068290 +v -0.075382 1.482188 -0.024203 +v -0.084040 1.509298 -0.036335 +v 0.012774 1.447697 0.054662 +v 0.012805 1.452064 0.057064 +v 0.008059 1.451891 0.058520 +v -0.080484 1.525822 -0.034804 +v -0.083919 1.527606 -0.035488 +v 0.008007 1.447711 0.056270 +v 0.004812 1.461758 0.069092 +v -0.090591 1.497096 -0.045921 +v -0.091688 1.518505 -0.044615 +v -0.089812 1.517295 -0.043486 +v -0.081321 1.518998 -0.029088 +v -0.078895 1.491603 -0.038661 +v -0.085402 1.491588 -0.036554 +v -0.084007 1.489086 -0.035109 +v -0.078399 1.489817 -0.037774 +v -0.073444 1.504694 -0.032820 +v 0.004543 1.458037 0.063816 +v 0.008262 1.458084 0.063082 +v 0.005072 1.461835 0.071297 +v 0.005589 1.460631 0.072613 +v 0.006193 1.458494 0.073986 +v 0.004333 1.451771 0.059202 +v 0.004401 1.447435 0.056949 +v -0.071111 1.486897 -0.016909 +v 0.022224 1.453109 0.052652 +v -0.081578 1.475997 -0.030012 +v -0.082714 1.477004 -0.032189 +v -0.031157 1.465431 -0.096619 +v -0.034883 1.475714 -0.102887 +v -0.092400 1.496600 -0.049200 +v 0.022834 1.449420 0.050597 +v 0.017680 1.452071 0.055120 +v 0.018045 1.448223 0.052946 +v 0.007817 1.439111 0.069698 +v 0.007074 1.444141 0.069216 +v 0.006581 1.455345 0.074334 +v 0.006668 1.451550 0.072522 +v 0.022923 1.607438 -0.092701 +v 0.062856 1.576001 0.009824 +v 0.061268 1.569757 0.017690 +v -0.083202 1.522130 -0.035985 +v 0.055147 1.577388 0.024227 +v 0.056734 1.582384 0.017099 +v 0.063470 1.583142 0.001913 +v 0.057200 1.588300 0.009600 +v 0.040530 1.616390 -0.045728 +v 0.041088 1.616383 -0.031901 +v 0.021913 1.622737 -0.028613 +v 0.035329 1.615763 -0.059197 +v 0.047797 1.607383 -0.067962 +v 0.050017 1.610884 -0.049176 +v 0.049914 1.611324 -0.034400 +v -0.077509 1.481801 -0.025290 +v -0.077217 1.476111 -0.024483 +v -0.075527 1.476613 -0.022791 +v -0.078213 1.512608 -0.033384 +v 0.025319 1.417296 -0.086431 +v 0.053341 1.537593 -0.109735 +v 0.048594 1.610887 -0.021003 +v 0.040400 1.615200 -0.018500 +v 0.039869 1.612797 -0.006457 +v 0.047253 1.609196 -0.009635 +v 0.021703 1.620912 -0.014487 +v 0.010484 1.623294 -0.012843 +v 0.019518 1.501968 -0.122785 +v 0.003912 1.516946 0.073584 +v 0.049036 1.451832 -0.071080 +v 0.058265 1.451117 -0.053166 +v 0.051200 1.402600 -0.033800 +v 0.072990 1.474329 -0.033592 +v 0.072822 1.474732 -0.034523 +v 0.057824 1.437788 -0.035288 +v 0.061932 1.450619 -0.035689 +v 0.039174 1.452431 -0.082310 +v 0.082400 1.485383 -0.044422 +v 0.082903 1.489094 -0.045618 +v 0.083405 1.484800 -0.043789 +v 0.084115 1.488697 -0.045292 +v 0.083998 1.492895 -0.046032 +v 0.083879 1.483300 -0.042725 +v 0.076924 1.521442 -0.034424 +v 0.078187 1.523278 -0.033695 +v -0.080792 1.483473 -0.043245 +v -0.081006 1.485539 -0.044522 +v 0.072100 1.473090 -0.029303 +v 0.072098 1.473611 -0.030997 +v -0.063300 1.497600 -0.029200 +v 0.071500 1.473300 -0.030200 +v 0.071200 1.472900 -0.028900 +v 0.071696 1.472309 -0.027792 +v 0.071001 1.472380 -0.027818 +v 0.076624 1.536198 -0.028498 +v 0.070000 1.472400 -0.031600 +v 0.072088 1.474257 -0.034077 +v 0.071908 1.473871 -0.032409 +v 0.071000 1.473200 -0.031100 +v 0.070412 1.472386 -0.029103 +v 0.070191 1.471713 -0.027395 +v 0.071000 1.471300 -0.026800 +v -0.082300 1.478600 -0.039700 +v -0.078492 1.473495 -0.034110 +v -0.054580 1.424205 -0.033907 +v -0.053617 1.426598 -0.048816 +v -0.052512 1.415702 -0.047314 +v -0.052297 1.412202 -0.033512 +v -0.072400 1.500500 -0.017900 +v -0.072720 1.493501 -0.019884 +v -0.072078 1.492403 -0.018405 +v 0.084111 1.482503 -0.042789 +v 0.076500 1.472200 -0.031600 +v -0.022133 1.402373 0.012551 +v -0.018991 1.390401 0.007792 +v 0.079079 1.513709 -0.040420 +v 0.079508 1.511895 -0.041894 +v 0.081688 1.513702 -0.043406 +v 0.081123 1.515993 -0.041982 +v 0.060712 1.487981 -0.084226 +v 0.062551 1.497993 -0.088895 +v 0.012694 1.418793 -0.090986 +v 0.012984 1.431500 -0.091585 +v 0.076230 1.530994 -0.074442 +v 0.078600 1.515400 -0.038900 +v 0.080687 1.517803 -0.040507 +v 0.082899 1.493300 -0.046163 +v 0.077782 1.514596 -0.038896 +v 0.078304 1.512904 -0.040408 +v 0.081700 1.519700 -0.042100 +v 0.082200 1.517500 -0.043700 +v 0.082816 1.515000 -0.045088 +v 0.078788 1.510996 -0.041897 +v -0.071080 1.487414 -0.013506 +v -0.070164 1.486779 -0.003273 +v -0.071773 1.502108 -0.003201 +v -0.072151 1.496898 -0.012896 +v 0.077321 1.519308 -0.041011 +v 0.080072 1.561101 -0.051900 +v 0.079500 1.554700 -0.065300 +v 0.077000 1.577000 -0.055600 +v 0.076100 1.571400 -0.070200 +v 0.071739 1.564758 -0.085415 +v 0.075100 1.546934 -0.080535 +v 0.076756 1.521093 -0.035588 +v 0.079218 1.524487 -0.034190 +v 0.064046 1.588845 -0.005609 +v 0.069100 1.583700 -0.012500 +v 0.057334 1.593424 0.002717 +v 0.057190 1.597993 -0.003706 +v 0.063487 1.594294 -0.012304 +v 0.070644 1.587414 -0.023990 +v 0.078576 1.553401 -0.037404 +v 0.078229 1.566908 -0.038395 +v 0.075235 1.571187 -0.025413 +v -0.059961 1.533198 0.030682 +v -0.061666 1.533495 0.026588 +v -0.061933 1.524304 0.026724 +v -0.059859 1.525661 0.030102 +v 0.074960 1.582288 -0.040605 +v 0.066600 1.595600 -0.030100 +v 0.070333 1.591526 -0.044305 +v 0.059609 1.603311 -0.033797 +v 0.061738 1.601056 -0.048113 +v 0.061583 1.597869 -0.063692 +v 0.071969 1.587274 -0.059096 +v 0.061700 1.599100 -0.018200 +v 0.055613 1.602114 -0.008857 +v 0.056700 1.604800 -0.021500 +v 0.053364 1.605595 -0.013183 +v 0.073321 1.572998 -0.018597 +v 0.065872 1.467058 -0.027795 +v 0.066067 1.467582 -0.022798 +v -0.020105 1.589077 0.046472 +v -0.009713 1.590238 0.048897 +v -0.009944 1.597050 0.042825 +v -0.020552 1.595691 0.040151 +v 0.069850 1.479001 -0.015004 +v -0.036783 1.483221 -0.106562 +v -0.038502 1.492891 -0.110712 +v -0.018483 1.490152 -0.117701 +v -0.017417 1.481271 -0.113053 +v 0.067976 1.503604 0.017668 +v 0.068022 1.515690 0.015397 +v 0.070799 1.507799 0.005481 +v 0.069552 1.493504 0.006914 +v -0.020717 1.610835 0.018436 +v -0.010055 1.612720 0.020935 +v -0.010178 1.617029 0.011196 +v -0.021004 1.614887 0.009079 +v -0.051849 1.506984 0.042450 +v -0.053308 1.500902 0.042309 +v -0.049500 1.500600 0.045600 +v -0.048611 1.506682 0.045109 +v 0.066865 1.528807 0.013619 +v 0.070014 1.521399 0.003997 +v 0.069755 1.535201 0.001517 +v 0.066276 1.542800 0.012494 +v 0.063123 1.558544 -0.099907 +v 0.065392 1.542129 -0.097712 +v -0.077083 1.471595 -0.021914 +v -0.075873 1.472489 -0.020821 +v 0.065894 1.530249 -0.096000 +v 0.048710 1.559533 0.042020 +v -0.010363 1.620560 -0.000194 +v -0.021444 1.618288 -0.001890 +v 0.022210 1.577716 -0.119229 +v 0.047881 1.581782 0.031191 +v 0.055375 1.577878 -0.100584 +v 0.000000 1.432410 -0.093127 +v 0.042198 1.583290 -0.106886 +v 0.071219 1.581908 -0.073609 +v 0.068054 1.575876 -0.086371 +v 0.060075 1.568989 -0.100537 +v 0.065163 1.514915 -0.094089 +v 0.047764 1.605853 -0.002383 +v 0.056288 1.527306 0.035388 +v 0.055400 1.522900 0.034700 +v 0.056800 1.532000 0.036900 +v 0.053710 1.518407 0.036312 +v 0.016988 1.511816 0.059509 +v 0.016523 1.509829 0.061527 +v 0.016060 1.507311 0.063477 +v 0.019643 1.507148 0.059252 +v 0.018886 1.504168 0.061098 +v 0.012688 1.507683 0.068746 +v 0.013226 1.510263 0.066357 +v 0.015327 1.504867 0.065498 +v 0.014758 1.501776 0.067443 +v 0.012525 1.504630 0.071081 +v 0.018383 1.489537 0.064268 +v 0.019566 1.494092 0.062319 +v 0.017300 1.498299 0.064286 +v 0.022379 1.501260 0.059636 +v 0.021217 1.498003 0.061176 +v 0.018101 1.501539 0.062795 +v 0.023651 1.504578 0.057693 +v 0.015512 1.498333 0.069921 +v 0.016336 1.495285 0.072852 +v 0.013197 1.501347 0.074285 +v 0.011289 1.503443 0.077455 +v 0.010513 1.506879 0.074245 +v 0.020300 1.509300 0.057400 +v 0.024688 1.506991 0.055978 +v 0.024313 1.494467 0.060252 +v 0.028338 1.491184 0.059583 +v 0.026548 1.498726 0.058604 +v 0.030377 1.496054 0.058135 +v 0.027823 1.502213 0.056766 +v 0.031864 1.499784 0.056280 +v 0.031980 1.487707 0.059208 +v 0.034944 1.483875 0.058162 +v 0.037789 1.479097 0.056553 +v 0.037765 1.488714 0.056258 +v 0.040839 1.483470 0.054215 +v 0.039779 1.493485 0.054171 +v 0.043257 1.488620 0.051763 +v -0.013719 1.517008 0.059295 +v -0.014127 1.518765 0.056494 +v -0.017673 1.515705 0.055114 +v -0.017351 1.513871 0.057445 +v -0.007369 1.564131 0.060202 +v -0.008331 1.571458 0.057659 +v -0.017301 1.571015 0.055837 +v -0.015753 1.563817 0.058794 +v 0.034514 1.492718 0.057558 +v 0.036283 1.496846 0.055538 +v 0.033157 1.503343 0.054269 +v 0.029010 1.505211 0.054822 +v 0.041601 1.504292 0.050690 +v 0.041085 1.498898 0.052381 +v 0.037317 1.501291 0.053567 +v 0.037974 1.505637 0.051703 +v 0.045069 1.495056 0.049656 +v 0.045400 1.502500 0.048600 +v 0.045080 1.507384 0.047479 +v 0.031897 1.513496 0.050390 +v 0.027202 1.514204 0.051209 +v 0.035292 1.513223 0.049379 +v 0.041527 1.508144 0.049267 +v 0.038279 1.508792 0.050353 +v 0.049200 1.514300 0.041600 +v 0.046800 1.513600 0.043500 +v 0.041300 1.513100 0.047100 +v 0.038537 1.513112 0.048362 +v 0.044195 1.513295 0.045391 +v 0.022901 1.515905 0.051410 +v 0.018571 1.517968 0.051684 +v 0.055257 1.438999 -0.050771 +v 0.010308 1.510155 0.071901 +v 0.010478 1.513105 0.069505 +v 0.015156 1.533977 0.054304 +v 0.013596 1.531070 0.053522 +v 0.006923 1.531511 0.060081 +v 0.019158 1.537755 0.056855 +v 0.015489 1.520401 0.051954 +v -0.021739 1.513091 0.053661 +v -0.021012 1.511308 0.055406 +v 0.013800 1.522800 0.052200 +v 0.013178 1.525174 0.052712 +v 0.012942 1.528094 0.053113 +v -0.010005 1.602932 0.036161 +v -0.020698 1.601402 0.033405 +v -0.020840 1.621971 -0.050662 +v -0.021361 1.622750 -0.042579 +v -0.009988 1.625236 -0.042999 +v -0.011010 1.623798 -0.055177 +v 0.030114 1.551419 0.058036 +v 0.026720 1.545266 0.059766 +v 0.058325 1.563672 0.026481 +v 0.061625 1.555341 0.023089 +v 0.051668 1.546035 0.044932 +v 0.054700 1.541900 0.041900 +v 0.056409 1.536994 0.039107 +v 0.063211 1.544202 0.020706 +v 0.064120 1.532200 0.020511 +v 0.064932 1.520906 0.021512 +v -0.071022 1.498660 -0.072359 +v -0.074708 1.498310 -0.057287 +v -0.076813 1.509999 -0.058491 +v -0.074411 1.514719 -0.074802 +v 0.049122 1.592694 0.017549 +v -0.033028 1.407859 0.005141 +v -0.030039 1.395778 0.000929 +v 0.000000 1.419295 -0.092376 +v 0.048919 1.597614 0.010981 +v 0.048889 1.587381 0.024404 +v 0.039547 1.609385 0.002641 +v 0.039647 1.605670 0.010726 +v 0.048516 1.602029 0.004404 +v 0.040184 1.601257 0.017716 +v 0.040667 1.596353 0.024229 +v 0.040410 1.591110 0.031115 +v 0.039626 1.585244 0.037833 +v 0.037606 1.577249 0.044470 +v 0.046691 1.439449 -0.068449 +v 0.008002 1.500332 0.070494 +v 0.007296 1.500479 0.071202 +v -0.053520 1.554444 0.037880 +v -0.055167 1.559187 0.033299 +v -0.058439 1.552305 0.030019 +v -0.056797 1.548534 0.034888 +v 0.006915 1.499983 0.069926 +v 0.006208 1.500093 0.070605 +v 0.005973 1.498711 0.070176 +v 0.007189 1.491278 0.072688 +v 0.005704 1.493209 0.071394 +v 0.005327 1.496700 0.070331 +v 0.068207 1.472506 -0.017095 +v -0.005887 1.552131 0.063546 +v -0.013242 1.551916 0.062647 +v -0.011428 1.545963 0.063453 +v -0.005203 1.545936 0.064108 +v -0.059440 1.518629 0.030726 +v -0.062023 1.515611 0.028312 +v -0.060821 1.508001 0.032915 +v -0.058003 1.511984 0.034409 +v -0.054598 1.507965 0.039780 +v -0.056600 1.502200 0.039300 +v -0.018904 1.579964 0.052345 +v -0.009112 1.580880 0.054387 +v -0.060600 1.543300 0.027500 +v -0.058985 1.541133 0.032384 +v -0.010699 1.519994 0.061739 +v -0.011514 1.521322 0.058249 +v -0.007996 1.525146 0.059987 +v -0.003912 1.524971 0.065569 +v -0.003648 1.528717 0.063754 +v -0.007046 1.528482 0.060248 +v -0.033993 1.562933 0.052199 +v -0.035634 1.569459 0.048664 +v -0.043291 1.567819 0.043458 +v -0.041735 1.561705 0.047287 +v -0.020698 1.606310 0.026348 +v -0.010005 1.608005 0.029016 +v -0.004139 1.537948 0.062980 +v -0.008537 1.537917 0.061632 +v -0.007473 1.534421 0.060282 +v -0.003950 1.534628 0.062399 +v -0.025500 1.509400 0.054200 +v -0.026221 1.511402 0.052919 +v -0.068924 1.470783 -0.026772 +v -0.070063 1.470235 -0.025719 +v -0.073354 1.517300 -0.015507 +v -0.072714 1.507101 -0.013696 +v -0.071736 1.514897 -0.004110 +v -0.071742 1.527400 -0.006595 +v -0.073300 1.538600 -0.010500 +v -0.074599 1.546926 -0.016268 +v -0.075720 1.531749 -0.022655 +v -0.074613 1.525899 -0.018098 +v -0.049669 1.418009 -0.059856 +v -0.048608 1.427595 -0.062745 +v -0.004078 1.522301 0.068552 +v -0.004027 1.519664 0.071163 +v -0.020495 1.529703 -0.128086 +v -0.019992 1.516906 -0.126383 +v -0.040609 1.520993 -0.118122 +v -0.040804 1.533198 -0.119411 +v -0.068436 1.488481 -0.068879 +v -0.072751 1.489694 -0.054914 +v -0.010892 1.495095 0.074194 +v -0.007594 1.499713 0.069461 +v -0.007789 1.496199 0.068918 +v -0.007417 1.498889 0.069265 +v -0.010944 1.492053 0.076099 +v -0.013247 1.491279 0.077394 +v -0.008600 1.492389 0.069127 +v -0.007700 1.492600 0.069700 +v -0.006998 1.496497 0.069211 +v -0.008729 1.500115 0.070107 +v -0.001944 1.488386 0.082348 +v -0.002499 1.489875 0.085125 +v -0.006506 1.490990 0.084097 +v -0.005006 1.489613 0.082093 +v -0.005458 1.499077 0.070745 +v -0.004491 1.496811 0.071589 +v -0.005310 1.494685 0.078792 +v -0.007206 1.498428 0.074823 +v -0.007494 1.500437 0.072081 +v -0.007200 1.495300 0.078000 +v -0.006200 1.489300 0.075200 +v -0.009008 1.495404 0.076405 +v -0.081106 1.485086 -0.030299 +v -0.055353 1.438827 -0.014305 +v -0.061802 1.452633 -0.021411 +v -0.061800 1.451269 -0.026789 +v -0.056475 1.436705 -0.023388 +v -0.080800 1.511900 -0.034700 +v -0.066012 1.558296 0.012561 +v -0.067698 1.568962 0.002878 +v -0.072355 1.557904 -0.008158 +v -0.071371 1.547902 -0.002509 +v -0.068379 1.577790 -0.004909 +v -0.073269 1.567598 -0.015308 +v -0.076018 1.556001 -0.024296 +v -0.077911 1.536200 -0.036897 +v -0.042165 1.521251 0.045613 +v -0.042412 1.521112 0.045815 +v -0.040900 1.520800 0.047100 +v -0.040900 1.520900 0.046900 +v -0.041684 1.521291 0.044982 +v -0.040343 1.520792 0.046058 +v -0.042542 1.521718 0.044010 +v -0.043095 1.521563 0.044535 +v -0.043365 1.521386 0.044713 +v -0.024933 1.521899 0.048906 +v -0.025588 1.522170 0.048171 +v -0.028191 1.521207 0.048430 +v -0.027547 1.521253 0.049572 +v -0.024720 1.521709 0.049378 +v -0.027503 1.521018 0.050005 +v -0.028678 1.523900 0.042557 +v -0.028204 1.524236 0.042492 +v -0.029466 1.523482 0.042681 +v -0.023235 1.523973 0.046674 +v -0.023291 1.524974 0.046778 +v -0.027982 1.525037 0.042609 +v -0.027969 1.524510 0.042490 +v -0.023469 1.523441 0.046801 +v -0.021958 1.523277 0.046966 +v -0.021129 1.523661 0.046653 +v -0.021224 1.524972 0.046884 +v -0.032700 1.520100 0.050100 +v -0.032897 1.519399 0.050184 +v -0.028200 1.519704 0.050417 +v -0.027801 1.520500 0.050288 +v -0.032498 1.520485 0.049893 +v -0.035900 1.520600 0.049000 +v -0.035996 1.520197 0.049188 +v -0.036104 1.519501 0.049213 +v -0.024901 1.520591 0.050179 +v -0.024799 1.521400 0.049815 +v -0.036100 1.518900 0.049200 +v -0.033100 1.518800 0.050100 +v -0.028400 1.519100 0.050400 +v -0.024900 1.520200 0.050300 +v -0.022180 1.521429 0.049697 +v -0.023262 1.521579 0.049651 +v -0.033098 1.518102 0.049889 +v -0.033002 1.517298 0.049711 +v -0.028299 1.517601 0.050288 +v -0.028401 1.518399 0.050319 +v -0.036100 1.518200 0.048900 +v -0.036000 1.517400 0.048700 +v -0.044439 1.521538 0.043123 +v -0.044900 1.522000 0.042200 +v -0.044889 1.521497 0.042391 +v -0.044613 1.521100 0.042908 +v -0.047300 1.519000 0.040400 +v -0.048300 1.520100 0.039500 +v -0.050100 1.519300 0.038700 +v -0.048907 1.518104 0.039809 +v -0.056681 1.509389 0.037056 +v -0.051190 1.515294 0.039686 +v -0.052600 1.516500 0.037900 +v -0.044000 1.521300 0.043900 +v -0.043400 1.521000 0.044800 +v -0.043900 1.521600 0.043800 +v -0.044314 1.521924 0.042872 +v -0.044184 1.520701 0.043691 +v -0.043510 1.520399 0.044707 +v -0.042500 1.520800 0.045900 +v -0.042589 1.520103 0.045890 +v -0.041000 1.519800 0.047200 +v -0.040900 1.520500 0.047200 +v -0.038827 1.520294 0.048207 +v -0.039096 1.520691 0.047990 +v -0.038924 1.519591 0.048198 +v -0.041100 1.519200 0.047000 +v -0.039015 1.518991 0.048107 +v -0.042600 1.519500 0.045600 +v -0.043590 1.519902 0.044393 +v -0.044314 1.520298 0.043307 +v -0.044900 1.520700 0.042500 +v -0.045300 1.521300 0.041900 +v -0.044600 1.519800 0.042800 +v -0.045200 1.519100 0.042100 +v -0.044000 1.518600 0.043400 +v -0.043727 1.519374 0.043986 +v -0.045400 1.520300 0.041900 +v -0.046200 1.519700 0.041100 +v -0.042700 1.519000 0.045200 +v -0.042700 1.518300 0.044800 +v -0.041000 1.517900 0.046300 +v -0.041092 1.518606 0.046690 +v -0.039015 1.518382 0.047807 +v -0.039024 1.517589 0.047472 +v -0.032398 1.520499 0.049611 +v -0.032070 1.520462 0.048411 +v -0.035568 1.520277 0.047815 +v -0.035900 1.520600 0.048700 +v -0.039100 1.520700 0.047800 +v -0.038514 1.520381 0.046986 +v -0.035403 1.520177 0.046662 +v -0.033667 1.522493 0.042517 +v -0.035009 1.522519 0.042154 +v -0.038077 1.520272 0.045916 +v -0.032250 1.520456 0.047158 +v -0.032125 1.522658 0.042749 +v -0.028946 1.521295 0.047200 +v -0.030633 1.523025 0.042787 +v -0.026318 1.522169 0.047391 +v -0.033945 1.532789 0.049154 +v -0.033987 1.533023 0.049380 +v -0.038500 1.532400 0.048800 +v -0.038100 1.532100 0.048400 +v -0.051553 1.496317 -0.100345 +v -0.052901 1.510673 -0.105774 +v -0.040195 1.505800 -0.115183 +v -0.040900 1.529700 0.047500 +v -0.040908 1.530602 0.047110 +v -0.043300 1.529300 0.045900 +v -0.043109 1.528498 0.046011 +v -0.040898 1.528786 0.047906 +v -0.042890 1.527512 0.046186 +v -0.038200 1.530000 0.049500 +v -0.038200 1.531000 0.049000 +v -0.038100 1.531800 0.048500 +v -0.042119 1.526072 0.042035 +v -0.041362 1.527066 0.042705 +v -0.041916 1.526935 0.044139 +v -0.042956 1.526131 0.042937 +v -0.042757 1.525369 0.041438 +v -0.043602 1.525370 0.042054 +v -0.037542 1.525216 0.039867 +v -0.037198 1.525606 0.040151 +v -0.036660 1.526085 0.040544 +v -0.037714 1.524891 0.039697 +v -0.037779 1.524597 0.039632 +v -0.037758 1.524316 0.039661 +v -0.037669 1.524040 0.039811 +v -0.058518 1.493799 0.036810 +v -0.059081 1.504398 0.036285 +v -0.061300 1.496500 0.033300 +v -0.043599 1.521847 0.043556 +v -0.044055 1.522221 0.042659 +v -0.043015 1.522082 0.043129 +v -0.043462 1.522488 0.042348 +v -0.044553 1.522761 0.041803 +v -0.044927 1.522554 0.041844 +v -0.041056 1.521240 0.044204 +v -0.036673 1.522964 0.041275 +v -0.037083 1.523262 0.040834 +v -0.041746 1.521873 0.043317 +v -0.039788 1.520688 0.045084 +v -0.035983 1.522699 0.041727 +v -0.037308 1.523509 0.040446 +v -0.037510 1.523762 0.040093 +v -0.042057 1.522240 0.042582 +v -0.042503 1.522667 0.041859 +v -0.043826 1.523087 0.041769 +v -0.043025 1.523723 0.040992 +v -0.043098 1.524246 0.040972 +v -0.044011 1.523655 0.041586 +v -0.044072 1.524168 0.041487 +v -0.042854 1.523208 0.041263 +v -0.043014 1.524789 0.041098 +v -0.043930 1.524704 0.041661 +v -0.045170 1.522128 0.041830 +v -0.045172 1.522916 0.041508 +v -0.045689 1.523909 0.041025 +v -0.045611 1.522137 0.041396 +v -0.046307 1.522104 0.040817 +v -0.045900 1.521000 0.041200 +v -0.046700 1.524488 0.040798 +v -0.048114 1.524818 0.040328 +v -0.047605 1.521996 0.040014 +v -0.046900 1.520700 0.040400 +v -0.044824 1.523362 0.041444 +v -0.044987 1.523980 0.041208 +v -0.046091 1.525512 0.041885 +v -0.046900 1.526300 0.041900 +v -0.045406 1.524990 0.041911 +v -0.044897 1.525716 0.043091 +v -0.045400 1.526400 0.043200 +v -0.046000 1.527100 0.043200 +v -0.044489 1.527303 0.044590 +v -0.044900 1.528100 0.044600 +v -0.044112 1.526492 0.044515 +v -0.044792 1.524756 0.041778 +v -0.044314 1.525441 0.042693 +v -0.043559 1.526182 0.043934 +v -0.042506 1.526800 0.045709 +v -0.040690 1.527910 0.047582 +v -0.039941 1.527889 0.045733 +v -0.038000 1.529200 0.049200 +v -0.037203 1.528804 0.047236 +v -0.034542 1.533709 0.050445 +v -0.039499 1.533215 0.049975 +v -0.036807 1.528788 0.045465 +v -0.034347 1.526891 0.041805 +v -0.032772 1.526987 0.042325 +v -0.033546 1.528876 0.046426 +v -0.023838 1.525682 0.047159 +v -0.024615 1.526505 0.047491 +v -0.028529 1.525780 0.042811 +v -0.028182 1.525373 0.042703 +v -0.022456 1.525747 0.047856 +v -0.023742 1.526545 0.048365 +v -0.027850 1.528251 0.047514 +v -0.029964 1.526652 0.042905 +v -0.029085 1.526235 0.042905 +v -0.026190 1.527374 0.047403 +v -0.027281 1.528690 0.049325 +v -0.025103 1.527452 0.048740 +v -0.033744 1.529320 0.048332 +v -0.021800 1.564300 -0.125112 +v -0.021390 1.549196 -0.128268 +v -0.041100 1.551200 -0.119600 +v -0.041000 1.564300 -0.117100 +v -0.039290 1.528051 0.044190 +v -0.035655 1.526552 0.041178 +v -0.051007 1.565829 -0.109698 +v -0.050153 1.574170 -0.107041 +v -0.041409 1.575703 -0.112304 +v -0.037050 1.439676 -0.079925 +v -0.026375 1.429290 -0.086093 +v -0.020443 1.525555 0.049023 +v -0.022090 1.526874 0.049265 +v -0.019381 1.524425 0.048883 +v -0.018804 1.524338 0.049175 +v -0.019820 1.525679 0.049223 +v -0.021236 1.527039 0.049389 +v -0.024082 1.528063 0.049571 +v -0.023449 1.528355 0.049996 +v -0.026775 1.529813 0.050788 +v -0.026860 1.529186 0.050504 +v -0.034030 1.529944 0.050131 +v -0.034105 1.530682 0.050431 +v -0.026377 1.530509 0.050284 +v -0.022991 1.528811 0.049615 +v -0.025891 1.531275 0.049667 +v -0.022800 1.529700 0.049200 +v -0.033943 1.532420 0.049431 +v -0.034105 1.531621 0.049993 +v -0.022009 1.522235 0.049146 +v -0.019888 1.522635 0.049121 +v -0.022491 1.522472 0.048858 +v -0.020169 1.522855 0.048732 +v -0.023596 1.522765 0.047753 +v -0.019510 1.523334 0.048695 +v -0.018853 1.523205 0.049074 +v -0.024390 1.522876 0.047222 +v -0.019200 1.525800 0.049100 +v -0.018016 1.524253 0.049349 +v -0.018201 1.526310 0.048651 +v -0.017395 1.524492 0.049158 +v -0.020237 1.528365 0.048593 +v -0.020635 1.527357 0.048964 +v -0.017989 1.523129 0.049200 +v -0.018475 1.522629 0.049200 +v -0.017396 1.523127 0.049280 +v -0.017900 1.522496 0.049287 +v -0.024700 1.519600 0.050400 +v -0.024500 1.519000 0.050400 +v -0.021512 1.520977 0.049893 +v -0.021022 1.520599 0.049909 +v -0.032795 1.516401 0.049585 +v -0.035800 1.516500 0.048500 +v -0.032503 1.515300 0.049614 +v -0.035693 1.515294 0.048482 +v -0.027697 1.515798 0.050481 +v -0.028102 1.516702 0.050317 +v -0.029412 1.387797 -0.001384 +v -0.046000 1.518200 0.041600 +v -0.047200 1.517100 0.041300 +v -0.045392 1.516396 0.042790 +v -0.044509 1.517704 0.042911 +v -0.042900 1.517300 0.044400 +v -0.043309 1.515909 0.044414 +v -0.041000 1.515600 0.046000 +v -0.040900 1.516900 0.046000 +v -0.038919 1.516689 0.047177 +v -0.038722 1.515306 0.047242 +v -0.049300 1.521500 0.038900 +v -0.050591 1.524603 0.038686 +v -0.051200 1.520900 0.037800 +v -0.052905 1.524289 0.037006 +v -0.016797 1.527194 0.048968 +v -0.016432 1.524904 0.049054 +v -0.015300 1.527900 0.050300 +v -0.015200 1.525300 0.050100 +v -0.016200 1.530800 0.050800 +v -0.018103 1.529886 0.049322 +v -0.016702 1.523141 0.049381 +v -0.017301 1.522304 0.049412 +v -0.015612 1.523203 0.049817 +v -0.016513 1.521817 0.049816 +v -0.024099 1.518299 0.050487 +v -0.023694 1.517490 0.050592 +v -0.020550 1.520111 0.050065 +v -0.019827 1.519426 0.050302 +v -0.049954 1.486090 -0.095874 +v -0.053460 1.525713 -0.108355 +v -0.047153 1.537083 0.048674 +v -0.044799 1.534344 0.048966 +v -0.041146 1.535635 0.050758 +v -0.043109 1.539081 0.051229 +v -0.050240 1.540841 0.047395 +v -0.052685 1.537388 0.044750 +v -0.049920 1.534711 0.046178 +v -0.047487 1.532583 0.046972 +v -0.051300 1.527400 0.039700 +v -0.049306 1.527394 0.041107 +v -0.049400 1.529100 0.042600 +v -0.051400 1.529700 0.041500 +v -0.053700 1.527400 0.038000 +v -0.054100 1.530500 0.039900 +v -0.050911 1.531995 0.043813 +v -0.048706 1.530695 0.044709 +v -0.053767 1.533613 0.042372 +v -0.023810 1.535193 0.052318 +v -0.024621 1.533474 0.050880 +v -0.021502 1.531689 0.049858 +v -0.020094 1.532606 0.050483 +v -0.022472 1.536856 0.053894 +v -0.017898 1.533692 0.051907 +v -0.036404 1.539832 0.053434 +v -0.035572 1.536604 0.052091 +v -0.025594 1.531753 0.049356 +v -0.023502 1.530697 0.049184 +v -0.025447 1.531989 0.049401 +v -0.022900 1.530600 0.049100 +v -0.058944 1.585628 -0.090242 +v -0.061100 1.592200 -0.078300 +v -0.044568 1.602329 -0.084574 +v -0.043693 1.594392 -0.096197 +v -0.041000 1.531000 0.047100 +v -0.041600 1.531400 0.047600 +v -0.044100 1.530100 0.046400 +v -0.043500 1.529700 0.046000 +v -0.045300 1.528600 0.044700 +v -0.045998 1.529012 0.044988 +v -0.047100 1.528000 0.043400 +v -0.046494 1.527712 0.043387 +v -0.047100 1.527000 0.042300 +v -0.047706 1.526995 0.042008 +v -0.023200 1.531500 0.049500 +v -0.025342 1.532353 0.049786 +v -0.052443 1.554395 -0.110733 +v -0.045436 1.574694 0.038505 +v -0.052333 1.571092 0.032286 +v -0.050200 1.565000 0.037900 +v -0.042799 1.532208 0.048590 +v -0.045400 1.530900 0.047000 +v -0.047100 1.529700 0.045100 +v -0.048000 1.528600 0.043300 +v -0.048394 1.527705 0.042091 +v -0.071726 1.489597 -0.018397 +v -0.072300 1.489600 -0.019800 +v -0.082403 1.497500 -0.046021 +v -0.080609 1.503982 -0.045312 +v -0.079052 1.507506 -0.044008 +v -0.078100 1.518500 -0.029900 +v -0.081582 1.521655 -0.032839 +v -0.085888 1.480697 -0.037210 +v -0.082727 1.479003 -0.033181 +v -0.085882 1.481597 -0.037615 +v -0.089000 1.517700 -0.043700 +v -0.089700 1.515900 -0.045000 +v -0.090689 1.515510 -0.044722 +v -0.042993 1.466469 -0.086739 +v -0.047760 1.478256 -0.092516 +v -0.090400 1.513800 -0.046200 +v -0.074300 1.494200 -0.021900 +v -0.083479 1.497097 -0.046117 +v -0.085217 1.496703 -0.047579 +v -0.086211 1.491703 -0.047289 +v -0.085503 1.492007 -0.046781 +v -0.085486 1.486796 -0.045215 +v -0.084991 1.487411 -0.044990 +v -0.087400 1.484200 -0.040300 +v -0.088704 1.487908 -0.042783 +v -0.082840 1.522084 -0.034142 +v -0.082112 1.521223 -0.032776 +v -0.083570 1.521955 -0.034436 +v -0.075177 1.493002 -0.023111 +v -0.076500 1.516600 -0.029500 +v -0.075900 1.514300 -0.028300 +v -0.075700 1.514700 -0.028700 +v -0.071800 1.498104 -0.026083 +v -0.071802 1.498996 -0.026313 +v -0.072996 1.496503 -0.026061 +v -0.075410 1.495001 -0.024819 +v -0.069598 1.487587 -0.024692 +v -0.069300 1.486401 -0.026424 +v -0.069687 1.485404 -0.026018 +v -0.085515 1.527480 -0.038589 +v -0.075999 1.513302 -0.027984 +v -0.077199 1.514400 -0.028210 +v -0.077500 1.515700 -0.028600 +v -0.084313 1.521518 -0.034793 +v -0.077793 1.516688 -0.028928 +v -0.077100 1.516600 -0.028900 +v -0.078695 1.518295 -0.029809 +v -0.076700 1.516600 -0.029100 +v -0.077487 1.491398 -0.037513 +v -0.076038 1.491207 -0.035474 +v -0.073200 1.509900 -0.019400 +v -0.073700 1.508000 -0.021200 +v -0.081613 1.478503 -0.031092 +v -0.077071 1.514610 -0.039608 +v -0.076983 1.517291 -0.040993 +v -0.089989 1.492402 -0.044791 +v -0.083719 1.487422 -0.033473 +v -0.081400 1.486758 -0.031819 +v -0.085799 1.488493 -0.035214 +v -0.087406 1.491007 -0.036987 +v -0.073275 1.507102 -0.031910 +v -0.084591 1.511302 -0.037217 +v -0.085725 1.509009 -0.038205 +v -0.082810 1.511499 -0.035689 +v -0.077116 1.520902 -0.039194 +v -0.076866 1.521086 -0.036895 +v -0.077427 1.516146 -0.037671 +v -0.076733 1.519477 -0.036354 +v -0.071294 1.491000 -0.025011 +v -0.071539 1.485011 -0.028169 +v -0.071376 1.487003 -0.028816 +v -0.072800 1.485000 -0.029000 +v -0.073029 1.485996 -0.030176 +v -0.073697 1.487491 -0.031606 +v -0.074400 1.489400 -0.032700 +v -0.074445 1.491799 -0.033137 +v -0.070700 1.495100 -0.025700 +v -0.076554 1.521809 -0.026878 +v -0.070296 1.493304 -0.025475 +v -0.072097 1.496697 -0.025914 +v -0.072201 1.494908 -0.025566 +v -0.071902 1.492995 -0.025121 +v -0.077900 1.484000 -0.026400 +v -0.078789 1.485596 -0.028414 +v -0.076283 1.484669 -0.025833 +v -0.079808 1.486114 -0.030284 +v -0.074909 1.484910 -0.025487 +v -0.077240 1.515882 -0.038378 +v -0.076800 1.519100 -0.038300 +v -0.071770 1.493003 -0.016807 +v -0.063703 1.501321 0.029000 +v -0.065291 1.510391 0.023890 +v -0.026059 1.454896 0.049004 +v -0.025298 1.451791 0.049406 +v -0.054800 1.491500 0.040500 +v -0.049968 1.489449 0.045159 +v -0.025148 1.460346 0.061133 +v -0.024435 1.459621 0.061337 +v -0.023522 1.460187 0.061418 +v -0.024249 1.460520 0.061081 +v -0.022000 1.460772 0.059500 +v -0.022800 1.460707 0.058800 +v -0.023014 1.460696 0.059746 +v -0.022313 1.460759 0.060425 +v -0.028109 1.475652 0.062034 +v -0.031378 1.479582 0.059952 +v -0.034158 1.475322 0.058856 +v -0.030949 1.471935 0.060987 +v -0.024580 1.459509 0.051432 +v -0.024307 1.461110 0.051921 +v -0.023228 1.460994 0.055868 +v -0.023532 1.460361 0.055415 +v -0.024417 1.456932 0.051523 +v -0.023415 1.459081 0.054968 +v -0.026131 1.458845 0.048941 +v -0.025595 1.461351 0.049316 +v -0.033679 1.454181 0.057135 +v -0.030493 1.456211 0.059276 +v -0.031308 1.459310 0.059370 +v -0.034660 1.458692 0.057534 +v -0.025183 1.486692 0.060752 +v -0.028504 1.483099 0.060419 +v -0.025503 1.479201 0.062212 +v -0.022102 1.483306 0.062428 +v -0.018718 1.488020 0.063334 +v -0.021581 1.490552 0.061358 +v -0.017491 1.495490 0.066471 +v -0.017785 1.491959 0.067693 +v -0.017219 1.491111 0.070903 +v -0.017625 1.493108 0.069690 +v -0.015426 1.487303 0.069410 +v -0.017764 1.489813 0.066272 +v -0.011460 1.486297 0.069954 +v -0.011262 1.487086 0.070809 +v -0.015505 1.486658 0.068092 +v -0.015732 1.485856 0.066517 +v -0.011732 1.485545 0.068955 +v -0.010705 1.488275 0.071582 +v -0.013514 1.488048 0.071692 +v -0.015121 1.489854 0.073173 +v -0.004381 1.488908 0.079905 +v -0.005639 1.490149 0.081872 +v -0.010589 1.518080 0.064760 +v -0.013675 1.514833 0.061918 +v -0.010631 1.515723 0.067072 +v -0.013514 1.512714 0.064157 +v -0.014372 1.491968 0.076867 +v -0.015789 1.493572 0.075165 +v -0.011991 1.493152 0.080609 +v -0.013201 1.495495 0.080068 +v -0.003688 1.531722 0.062648 +v -0.002148 1.485462 0.075849 +v -0.002408 1.484602 0.075445 +v -0.001814 1.486575 0.077679 +v -0.022510 1.586616 -0.113415 +v -0.023000 1.599213 -0.102898 +v -0.022139 1.613460 -0.081347 +v -0.019834 1.618165 -0.069601 +v -0.005695 1.468502 0.076511 +v -0.005296 1.470999 0.076289 +v -0.012858 1.464687 0.059653 +v -0.007373 1.464960 0.061179 +v -0.006975 1.463108 0.065761 +v -0.012269 1.462964 0.064467 +v -0.016278 1.462759 0.062362 +v -0.017223 1.464311 0.057602 +v -0.013211 1.465957 0.057083 +v -0.007475 1.466294 0.058483 +v -0.011800 1.461943 0.068868 +v -0.006582 1.462229 0.070956 +v -0.006449 1.463859 0.073683 +v -0.011854 1.463360 0.071100 +v -0.018283 1.481390 0.065162 +v -0.014145 1.482119 0.067466 +v -0.009947 1.482343 0.070802 +v -0.006663 1.482219 0.073355 +v -0.006070 1.484310 0.073705 +v -0.008634 1.484916 0.071533 +v -0.005846 1.485250 0.074051 +v -0.008425 1.485773 0.072107 +v -0.008365 1.486729 0.072615 +v -0.008909 1.488251 0.072763 +v -0.007010 1.488079 0.074018 +v -0.007500 1.489300 0.073600 +v -0.005805 1.486238 0.074278 +v -0.004053 1.510006 0.078946 +v -0.003955 1.513584 0.076221 +v -0.009855 1.472515 0.074138 +v -0.005009 1.472999 0.075408 +v -0.004315 1.476815 0.074624 +v -0.008666 1.476809 0.073279 +v -0.013211 1.476061 0.071006 +v -0.014910 1.470831 0.071677 +v -0.002734 1.483714 0.075107 +v -0.010300 1.470400 0.074600 +v -0.010703 1.467995 0.074513 +v -0.015502 1.468576 0.071979 +v -0.015800 1.466400 0.071600 +v -0.019602 1.468322 0.068491 +v -0.018439 1.474099 0.068134 +v -0.023987 1.472042 0.064669 +v -0.023470 1.465647 0.065227 +v -0.031113 1.462468 0.060031 +v -0.026531 1.460082 0.060975 +v -0.026577 1.461421 0.061328 +v -0.026234 1.462618 0.062158 +v -0.030104 1.465556 0.061112 +v -0.034404 1.463254 0.058475 +v -0.033149 1.467775 0.059705 +v -0.027971 1.468600 0.062562 +v -0.025395 1.463939 0.063270 +v -0.025250 1.461104 0.061542 +v -0.024309 1.460885 0.061344 +v -0.024976 1.461816 0.062298 +v -0.024034 1.461246 0.062012 +v -0.024386 1.462489 0.063348 +v -0.023558 1.461583 0.062874 +v -0.022612 1.462197 0.064256 +v -0.023177 1.463590 0.064981 +v -0.022133 1.460920 0.059768 +v -0.021168 1.460992 0.060855 +v -0.021285 1.460964 0.062165 +v -0.022204 1.460838 0.060987 +v -0.022757 1.460837 0.059067 +v -0.022727 1.460779 0.060310 +v -0.022731 1.461571 0.056693 +v -0.021763 1.461781 0.057508 +v -0.023289 1.460690 0.058334 +v -0.023330 1.460718 0.059396 +v -0.023394 1.460716 0.060836 +v -0.023251 1.460914 0.061284 +v -0.022758 1.461069 0.062036 +v -0.021882 1.461334 0.063176 +v -0.015798 1.464470 0.070396 +v -0.019424 1.464378 0.068200 +v -0.019113 1.462802 0.066769 +v -0.015646 1.462491 0.068328 +v -0.018697 1.461683 0.065172 +v -0.017785 1.465453 0.055196 +v -0.015373 1.461586 0.066485 +v -0.020861 1.463425 0.055138 +v -0.021815 1.464456 0.052646 +v -0.019799 1.462183 0.059397 +v -0.023776 1.462261 0.052914 +v -0.022746 1.462430 0.053473 +v -0.024706 1.463135 0.050424 +v -0.023488 1.463681 0.051401 +v -0.018585 1.461130 0.063357 +v -0.019687 1.466161 0.068684 +v -0.011193 1.465708 0.073182 +v -0.006008 1.466276 0.075328 +v -0.004764 1.502917 0.085468 +v -0.004430 1.506626 0.081984 +v -0.022824 1.460592 0.061246 +v -0.009000 1.380900 0.008200 +v -0.025820 1.458897 0.061092 +v -0.023838 1.454565 0.051597 +v -0.022628 1.458084 0.055319 +v -0.030940 1.449785 0.057855 +v -0.026597 1.445592 0.060153 +v -0.024565 1.449667 0.061995 +v -0.028277 1.452952 0.060029 +v -0.021911 1.455732 0.064216 +v -0.024465 1.457488 0.062231 +v -0.023209 1.458786 0.062690 +v -0.020872 1.457806 0.064774 +v -0.019961 1.459373 0.065056 +v -0.022233 1.459817 0.062668 +v -0.017872 1.461109 0.063406 +v -0.013488 1.461506 0.066143 +v -0.012943 1.457844 0.061366 +v -0.017612 1.457529 0.058998 +v -0.021527 1.457717 0.056493 +v -0.021070 1.460876 0.060391 +v -0.018523 1.461113 0.064244 +v -0.021342 1.460835 0.061409 +v -0.013977 1.461497 0.067282 +v -0.018900 1.383500 0.004700 +v -0.021710 1.460566 0.062261 +v -0.019048 1.460695 0.064671 +v -0.014749 1.460724 0.068045 +v -0.015854 1.458903 0.068665 +v -0.016792 1.456693 0.068793 +v -0.017461 1.453899 0.067856 +v -0.019149 1.446712 0.065034 +v -0.020456 1.441594 0.063824 +v -0.013229 1.444892 0.067749 +v -0.014176 1.439699 0.067357 +v -0.012333 1.452486 0.071105 +v -0.012051 1.456002 0.072665 +v -0.011198 1.458638 0.072223 +v -0.010410 1.460704 0.071129 +v -0.003264 1.481942 0.074712 +v -0.005689 1.488011 0.075291 +v -0.004510 1.488195 0.077505 +v -0.007868 1.491520 0.082722 +v -0.001736 1.487415 0.079846 +v -0.010492 1.492952 0.082279 +v -0.008765 1.492452 0.083652 +v -0.009522 1.493885 0.084514 +v -0.010810 1.494494 0.083310 +v -0.007182 1.491909 0.084782 +v -0.008500 1.493700 0.085400 +v -0.002902 1.491075 0.086336 +v -0.003721 1.494335 0.088102 +v -0.004650 1.499551 0.087718 +v -0.004111 1.496344 0.088521 +v -0.009882 1.495499 0.084882 +v -0.011411 1.497274 0.083159 +v -0.011743 1.500268 0.080871 +v -0.013890 1.498099 0.077459 +v -0.010602 1.625284 -0.027469 +v -0.009656 1.461739 0.070112 +v -0.008742 1.461758 0.068290 +v -0.012774 1.447697 0.054662 +v -0.012805 1.452064 0.057064 +v -0.008059 1.451891 0.058520 +v -0.008007 1.447711 0.056270 +v -0.004812 1.461758 0.069092 +v -0.004543 1.458037 0.063816 +v -0.008262 1.458084 0.063082 +v -0.005072 1.461835 0.071297 +v -0.005589 1.460631 0.072613 +v -0.006193 1.458494 0.073986 +v -0.004333 1.451771 0.059202 +v -0.004401 1.447435 0.056949 +v -0.022224 1.453109 0.052652 +v -0.022834 1.449420 0.050597 +v -0.017680 1.452071 0.055120 +v -0.018045 1.448223 0.052946 +v -0.007817 1.439111 0.069698 +v -0.007074 1.444141 0.069216 +v -0.006581 1.455345 0.074334 +v -0.006668 1.451550 0.072522 +v -0.022923 1.607438 -0.092701 +v -0.062856 1.576001 0.009824 +v -0.061268 1.569757 0.017690 +v -0.055147 1.577388 0.024227 +v -0.056734 1.582384 0.017099 +v -0.063470 1.583142 0.001913 +v -0.057200 1.588300 0.009600 +v -0.040530 1.616390 -0.045728 +v -0.041088 1.616383 -0.031901 +v -0.021913 1.622737 -0.028613 +v -0.035329 1.615763 -0.059197 +v -0.047797 1.607383 -0.067962 +v -0.050017 1.610884 -0.049176 +v -0.049914 1.611324 -0.034400 +v -0.025319 1.417296 -0.086431 +v -0.053341 1.537593 -0.109735 +v -0.048594 1.610887 -0.021003 +v -0.040400 1.615200 -0.018500 +v -0.039869 1.612797 -0.006457 +v -0.047253 1.609196 -0.009635 +v -0.021703 1.620912 -0.014487 +v -0.010484 1.623294 -0.012843 +v -0.019518 1.501968 -0.122785 +v -0.003912 1.516946 0.073584 +v -0.049036 1.451832 -0.071080 +v -0.058265 1.451117 -0.053166 +v -0.051200 1.402600 -0.033800 +v -0.072990 1.474329 -0.033592 +v -0.072822 1.474732 -0.034523 +v -0.057824 1.437788 -0.035288 +v -0.061932 1.450619 -0.035689 +v -0.039174 1.452431 -0.082310 +v -0.082400 1.485383 -0.044422 +v -0.082903 1.489094 -0.045618 +v -0.083405 1.484800 -0.043789 +v -0.084115 1.488697 -0.045292 +v -0.083998 1.492895 -0.046032 +v -0.083879 1.483300 -0.042725 +v -0.076924 1.521442 -0.034424 +v -0.078187 1.523278 -0.033695 +v -0.072100 1.473090 -0.029303 +v -0.072098 1.473611 -0.030997 +v -0.071500 1.473300 -0.030200 +v -0.071200 1.472900 -0.028900 +v -0.071696 1.472309 -0.027792 +v -0.071001 1.472380 -0.027818 +v -0.076624 1.536198 -0.028498 +v -0.070000 1.472400 -0.031600 +v -0.072088 1.474257 -0.034077 +v -0.071908 1.473871 -0.032409 +v -0.071000 1.473200 -0.031100 +v -0.070412 1.472386 -0.029103 +v -0.070191 1.471713 -0.027395 +v -0.071000 1.471300 -0.026800 +v -0.084111 1.482503 -0.042789 +v -0.076500 1.472200 -0.031600 +v -0.079079 1.513709 -0.040420 +v -0.079508 1.511895 -0.041894 +v -0.081688 1.513702 -0.043406 +v -0.081123 1.515993 -0.041982 +v -0.060712 1.487981 -0.084226 +v -0.062551 1.497993 -0.088895 +v -0.012694 1.418793 -0.090986 +v -0.012984 1.431500 -0.091585 +v -0.076230 1.530994 -0.074442 +v -0.078600 1.515400 -0.038900 +v -0.080687 1.517803 -0.040507 +v -0.082899 1.493300 -0.046163 +v -0.077782 1.514596 -0.038896 +v -0.078304 1.512904 -0.040408 +v -0.081700 1.519700 -0.042100 +v -0.082200 1.517500 -0.043700 +v -0.082816 1.515000 -0.045088 +v -0.078788 1.510996 -0.041897 +v -0.077321 1.519308 -0.041011 +v -0.080072 1.561101 -0.051900 +v -0.079500 1.554700 -0.065300 +v -0.077000 1.577000 -0.055600 +v -0.076100 1.571400 -0.070200 +v -0.071739 1.564758 -0.085415 +v -0.075100 1.546934 -0.080535 +v -0.076756 1.521093 -0.035588 +v -0.079218 1.524487 -0.034190 +v -0.064046 1.588845 -0.005609 +v -0.069100 1.583700 -0.012500 +v -0.057334 1.593424 0.002717 +v -0.057190 1.597993 -0.003706 +v -0.063487 1.594294 -0.012304 +v -0.070644 1.587414 -0.023990 +v -0.078576 1.553401 -0.037404 +v -0.078229 1.566908 -0.038395 +v -0.075235 1.571187 -0.025413 +v -0.074960 1.582288 -0.040605 +v -0.066600 1.595600 -0.030100 +v -0.070333 1.591526 -0.044305 +v -0.059609 1.603311 -0.033797 +v -0.061738 1.601056 -0.048113 +v -0.061583 1.597869 -0.063692 +v -0.071969 1.587274 -0.059096 +v -0.061700 1.599100 -0.018200 +v -0.055613 1.602114 -0.008857 +v -0.056700 1.604800 -0.021500 +v -0.053364 1.605595 -0.013183 +v -0.073321 1.572998 -0.018597 +v -0.065872 1.467058 -0.027795 +v -0.066067 1.467582 -0.022798 +v -0.069850 1.479001 -0.015004 +v -0.067976 1.503604 0.017668 +v -0.068022 1.515690 0.015397 +v -0.070799 1.507799 0.005481 +v -0.069552 1.493504 0.006914 +v -0.066865 1.528807 0.013619 +v -0.070014 1.521399 0.003997 +v -0.069755 1.535201 0.001517 +v -0.066276 1.542800 0.012494 +v -0.063123 1.558544 -0.099907 +v -0.065392 1.542129 -0.097712 +v -0.065894 1.530249 -0.096000 +v -0.048710 1.559533 0.042020 +v -0.022210 1.577716 -0.119229 +v -0.047881 1.581782 0.031191 +v -0.055375 1.577878 -0.100584 +v -0.042198 1.583290 -0.106886 +v -0.071219 1.581908 -0.073609 +v -0.068054 1.575876 -0.086371 +v -0.060075 1.568989 -0.100537 +v -0.065163 1.514915 -0.094089 +v -0.047764 1.605853 -0.002383 +v -0.056288 1.527306 0.035388 +v -0.055400 1.522900 0.034700 +v -0.056800 1.532000 0.036900 +v -0.053710 1.518407 0.036312 +v -0.016988 1.511816 0.059509 +v -0.016523 1.509829 0.061527 +v -0.016060 1.507311 0.063477 +v -0.019643 1.507148 0.059252 +v -0.018886 1.504168 0.061098 +v -0.012688 1.507683 0.068746 +v -0.013226 1.510263 0.066357 +v -0.015327 1.504867 0.065498 +v -0.014758 1.501776 0.067443 +v -0.012525 1.504630 0.071081 +v -0.018383 1.489537 0.064268 +v -0.019566 1.494092 0.062319 +v -0.017300 1.498299 0.064286 +v -0.022379 1.501260 0.059636 +v -0.021217 1.498003 0.061176 +v -0.018101 1.501539 0.062795 +v -0.023651 1.504578 0.057693 +v -0.015512 1.498333 0.069921 +v -0.016336 1.495285 0.072852 +v -0.013197 1.501347 0.074285 +v -0.011289 1.503443 0.077455 +v -0.010513 1.506879 0.074245 +v -0.020300 1.509300 0.057400 +v -0.024688 1.506991 0.055978 +v -0.024313 1.494467 0.060252 +v -0.028338 1.491184 0.059583 +v -0.026548 1.498726 0.058604 +v -0.030377 1.496054 0.058135 +v -0.027823 1.502213 0.056766 +v -0.031864 1.499784 0.056280 +v -0.031980 1.487707 0.059208 +v -0.034944 1.483875 0.058162 +v -0.037789 1.479097 0.056553 +v -0.037765 1.488714 0.056258 +v -0.040839 1.483470 0.054215 +v -0.039779 1.493485 0.054171 +v -0.043257 1.488620 0.051763 +v -0.034514 1.492718 0.057558 +v -0.036283 1.496846 0.055538 +v -0.033157 1.503343 0.054269 +v -0.029010 1.505211 0.054822 +v -0.041601 1.504292 0.050690 +v -0.041085 1.498898 0.052381 +v -0.037317 1.501291 0.053567 +v -0.037974 1.505637 0.051703 +v -0.045069 1.495056 0.049656 +v -0.045400 1.502500 0.048600 +v -0.045080 1.507384 0.047479 +v -0.031897 1.513496 0.050390 +v -0.027202 1.514204 0.051209 +v -0.035292 1.513223 0.049379 +v -0.041527 1.508144 0.049267 +v -0.038279 1.508792 0.050353 +v -0.049200 1.514300 0.041600 +v -0.046800 1.513600 0.043500 +v -0.041300 1.513100 0.047100 +v -0.038537 1.513112 0.048362 +v -0.044195 1.513295 0.045391 +v -0.022901 1.515905 0.051410 +v -0.018571 1.517968 0.051684 +v -0.055257 1.438999 -0.050771 +v -0.010308 1.510155 0.071901 +v -0.010478 1.513105 0.069505 +v -0.015156 1.533977 0.054304 +v -0.013596 1.531070 0.053522 +v -0.006923 1.531511 0.060081 +v -0.019158 1.537755 0.056855 +v -0.015489 1.520401 0.051954 +v -0.013800 1.522800 0.052200 +v -0.013178 1.525174 0.052712 +v -0.012942 1.528094 0.053113 +v -0.030114 1.551419 0.058036 +v -0.026720 1.545266 0.059766 +v -0.058325 1.563672 0.026481 +v -0.061625 1.555341 0.023089 +v -0.051668 1.546035 0.044932 +v -0.054700 1.541900 0.041900 +v -0.056409 1.536994 0.039107 +v -0.063211 1.544202 0.020706 +v -0.064120 1.532200 0.020511 +v -0.064932 1.520906 0.021512 +v -0.049122 1.592694 0.017549 +v -0.048919 1.597614 0.010981 +v -0.048889 1.587381 0.024404 +v -0.039547 1.609385 0.002641 +v -0.039647 1.605670 0.010726 +v -0.048516 1.602029 0.004404 +v -0.040184 1.601257 0.017716 +v -0.040667 1.596353 0.024229 +v -0.040410 1.591110 0.031115 +v -0.039626 1.585244 0.037833 +v -0.037606 1.577249 0.044470 +v -0.046691 1.439449 -0.068449 +v -0.008002 1.500332 0.070494 +v -0.007296 1.500479 0.071202 +v -0.006915 1.499983 0.069926 +v -0.006208 1.500093 0.070605 +v -0.005973 1.498711 0.070176 +v -0.007189 1.491278 0.072688 +v -0.005704 1.493209 0.071394 +v -0.005327 1.496700 0.070331 +v -0.068207 1.472506 -0.017095 +v -0.051800 1.406300 -0.047300 +v -0.049500 1.409300 -0.059900 +v 0.051800 1.406300 -0.047300 +v 0.049500 1.409300 -0.059900 +v 0.019200 1.377100 0.002800 +v 0.029900 1.380600 -0.002700 +v 0.031191 1.371902 -0.003814 +v 0.019808 1.368797 0.001123 +v 0.056238 1.388815 -0.051486 +v 0.052715 1.397600 -0.048798 +v 0.050621 1.400619 -0.062005 +v 0.054435 1.392053 -0.065906 +v 0.044999 1.402416 -0.073412 +v 0.048271 1.393089 -0.078193 +v 0.038400 1.392900 -0.087400 +v 0.036279 1.403790 -0.082284 +v 0.025100 1.404800 -0.088700 +v 0.026404 1.392692 -0.093192 +v 0.040100 1.385100 -0.010900 +v 0.047900 1.389700 -0.021800 +v 0.050655 1.380682 -0.023015 +v 0.042189 1.376097 -0.011909 +v 0.051989 1.393992 -0.034900 +v 0.055002 1.384985 -0.036793 +v 0.013400 1.392500 -0.096200 +v 0.012800 1.405400 -0.092300 +v 0.021630 1.347706 -0.002280 +v 0.010000 1.345800 -0.001600 +v 0.009703 1.356796 0.000912 +v 0.020700 1.358599 -0.000789 +v 0.051215 1.344489 -0.016416 +v 0.064809 1.351419 -0.027220 +v 0.071413 1.342539 -0.024854 +v 0.056256 1.334601 -0.013505 +v 0.092658 1.345253 -0.035792 +v 0.101991 1.356549 -0.055946 +v 0.036452 1.340095 -0.007796 +v 0.039614 1.330165 -0.004026 +v 0.000000 1.380304 -0.103110 +v 0.000000 1.392398 -0.097112 +v 0.092167 1.361945 -0.058015 +v 0.091650 1.365890 -0.081008 +v 0.103647 1.360106 -0.079767 +v 0.051390 1.344988 -0.117606 +v 0.051696 1.338191 -0.120202 +v 0.066194 1.342704 -0.118123 +v 0.062220 1.349185 -0.114825 +v 0.071324 1.377988 -0.076126 +v 0.061101 1.377499 -0.090389 +v 0.068719 1.369941 -0.095525 +v 0.081299 1.371989 -0.079679 +v 0.035196 1.360991 -0.111297 +v 0.052500 1.365903 -0.105617 +v 0.047106 1.374613 -0.100311 +v 0.031703 1.371412 -0.106125 +v 0.082737 1.367023 -0.058545 +v 0.072497 1.372769 -0.057489 +v 0.075085 1.358733 -0.041692 +v 0.066988 1.366168 -0.041342 +v 0.059293 1.360571 -0.026713 +v 0.053878 1.384658 -0.084389 +v 0.042301 1.383191 -0.093893 +v 0.028800 1.381600 -0.099600 +v 0.014500 1.380600 -0.102300 +v 0.000000 1.332103 -0.118329 +v 0.000000 1.344407 -0.116723 +v 0.017506 1.357805 -0.112991 +v 0.015901 1.369391 -0.108565 +v 0.083352 1.352116 -0.039990 +v 0.060082 1.375357 -0.039221 +v 0.054595 1.370684 -0.024895 +v 0.062373 1.326943 -0.004014 +v 0.079695 1.333782 -0.017820 +v 0.043486 1.322351 0.005265 +v 0.026318 1.318980 0.010419 +v 0.018804 1.345613 -0.116011 +v 0.018497 1.333611 -0.117736 +v 0.036412 1.336188 -0.118918 +v 0.039911 1.348812 -0.116108 +v 0.047810 1.355088 -0.015297 +v 0.034741 1.350700 -0.006936 +v 0.022774 1.337339 -0.002166 +v 0.061403 1.384360 -0.070911 +v 0.062762 1.380358 -0.054854 +v 0.044957 1.365794 -0.013553 +v 0.033008 1.361602 -0.005389 +v 0.009400 1.367000 0.003600 +v 0.024391 1.327745 0.002406 +v 0.092841 1.355870 -0.099836 +v 0.080101 1.348808 -0.111908 +v 0.073289 1.352758 -0.110048 +v 0.000000 1.356994 -0.113868 +v 0.000000 1.356207 0.001169 +v 0.000000 1.345096 -0.002041 +v 0.000000 1.366405 0.004280 +v 0.077652 1.361117 -0.101163 +v 0.057989 1.356635 -0.110582 +v 0.000000 1.374300 0.006800 +v 0.000000 1.405600 -0.093400 +v 0.000000 1.333470 -0.004016 +v 0.000000 1.322036 -0.000042 +v 0.000000 1.313137 0.009546 +v 0.009198 1.375001 0.005887 +v 0.011601 1.323874 0.001497 +v 0.010712 1.334590 -0.002614 +v 0.012479 1.314794 0.010120 +v 0.000000 1.368900 -0.109400 +v -0.019200 1.377100 0.002800 +v -0.029900 1.380600 -0.002700 +v -0.031191 1.371902 -0.003814 +v -0.019808 1.368797 0.001123 +v -0.056238 1.388815 -0.051486 +v -0.052715 1.397600 -0.048798 +v -0.050621 1.400619 -0.062005 +v -0.054435 1.392053 -0.065906 +v -0.044999 1.402416 -0.073412 +v -0.048271 1.393089 -0.078193 +v -0.038400 1.392900 -0.087400 +v -0.036279 1.403790 -0.082284 +v -0.025100 1.404800 -0.088700 +v -0.026404 1.392692 -0.093192 +v -0.040100 1.385100 -0.010900 +v -0.047900 1.389700 -0.021800 +v -0.050655 1.380682 -0.023015 +v -0.042189 1.376097 -0.011909 +v -0.051989 1.393992 -0.034900 +v -0.055002 1.384985 -0.036793 +v -0.013400 1.392500 -0.096200 +v -0.012800 1.405400 -0.092300 +v -0.021630 1.347706 -0.002280 +v -0.010000 1.345800 -0.001600 +v -0.009703 1.356796 0.000912 +v -0.020700 1.358599 -0.000789 +v -0.051215 1.344489 -0.016416 +v -0.064809 1.351419 -0.027220 +v -0.071413 1.342539 -0.024854 +v -0.056256 1.334601 -0.013505 +v -0.092658 1.345253 -0.035792 +v -0.101991 1.356549 -0.055946 +v -0.036452 1.340095 -0.007796 +v -0.039614 1.330165 -0.004026 +v -0.092167 1.361945 -0.058015 +v -0.091650 1.365890 -0.081008 +v -0.103647 1.360106 -0.079767 +v -0.051390 1.344988 -0.117606 +v -0.051696 1.338191 -0.120202 +v -0.066194 1.342704 -0.118123 +v -0.062220 1.349185 -0.114825 +v -0.071324 1.377988 -0.076126 +v -0.061101 1.377499 -0.090389 +v -0.068719 1.369941 -0.095525 +v -0.081299 1.371989 -0.079679 +v -0.035196 1.360991 -0.111297 +v -0.052500 1.365903 -0.105617 +v -0.047106 1.374613 -0.100311 +v -0.031703 1.371412 -0.106125 +v -0.082737 1.367023 -0.058545 +v -0.072497 1.372769 -0.057489 +v -0.075085 1.358733 -0.041692 +v -0.066988 1.366168 -0.041342 +v -0.059293 1.360571 -0.026713 +v -0.053878 1.384658 -0.084389 +v -0.042301 1.383191 -0.093893 +v -0.028800 1.381600 -0.099600 +v -0.014500 1.380600 -0.102300 +v -0.017506 1.357805 -0.112991 +v -0.015901 1.369391 -0.108565 +v -0.083352 1.352116 -0.039990 +v -0.060082 1.375357 -0.039221 +v -0.054595 1.370684 -0.024895 +v -0.062373 1.326943 -0.004014 +v -0.079695 1.333782 -0.017820 +v -0.043486 1.322351 0.005265 +v -0.026318 1.318980 0.010419 +v -0.018804 1.345613 -0.116011 +v -0.018497 1.333611 -0.117736 +v -0.036412 1.336188 -0.118918 +v -0.039911 1.348812 -0.116108 +v -0.047810 1.355088 -0.015297 +v -0.034741 1.350700 -0.006936 +v -0.022774 1.337339 -0.002166 +v -0.061403 1.384360 -0.070911 +v -0.062762 1.380358 -0.054854 +v -0.044957 1.365794 -0.013553 +v -0.033008 1.361602 -0.005389 +v -0.009400 1.367000 0.003600 +v -0.024391 1.327745 0.002406 +v -0.092841 1.355870 -0.099836 +v -0.080101 1.348808 -0.111908 +v -0.073289 1.352758 -0.110048 +v -0.077652 1.361117 -0.101163 +v -0.057989 1.356635 -0.110582 +v -0.009198 1.375001 0.005887 +v -0.011601 1.323874 0.001497 +v -0.010712 1.334590 -0.002614 +v -0.012479 1.314794 0.010120 +v -0.007181 1.498708 0.069367 +v -0.065756 1.470880 0.002952 +v -0.064678 1.463496 -0.004242 +v -0.059783 1.453210 0.002188 +v -0.061561 1.461383 0.009926 +v -0.053965 1.443825 0.009394 +v -0.057441 1.453563 0.015359 +v -0.047918 1.436419 0.017837 +v -0.052352 1.446406 0.021891 +v -0.043190 1.432359 0.027284 +v -0.047905 1.441899 0.029774 +v -0.031556 1.428128 0.049181 +v -0.029812 1.423095 0.045409 +v -0.023660 1.420365 0.052657 +v -0.024436 1.424506 0.056749 +v -0.016606 1.418589 0.059010 +v -0.017378 1.422321 0.062977 +v -0.009001 1.417534 0.062700 +v -0.009151 1.420928 0.066849 +v 0.000000 1.417091 0.064366 +v 0.000000 1.420471 0.068613 +v 0.000000 1.429788 0.071272 +v -0.008705 1.429911 0.070470 +v -0.009043 1.425262 0.069435 +v 0.000000 1.424583 0.070677 +v -0.016107 1.430898 0.066910 +v -0.017100 1.426200 0.065400 +v -0.022581 1.433356 0.062133 +v -0.023967 1.428745 0.059766 +v -0.030372 1.437649 0.055874 +v -0.031736 1.432795 0.052581 +v -0.035914 1.443090 0.051692 +v -0.038348 1.440043 0.047005 +v -0.008052 1.414547 0.056606 +v 0.000000 1.414303 0.058898 +v -0.015000 1.415300 0.053100 +v -0.021489 1.416660 0.047689 +v -0.026700 1.418600 0.041100 +v -0.037876 1.429176 0.036787 +v -0.032082 1.421210 0.032893 +v -0.038453 1.425506 0.024641 +v -0.044093 1.429078 0.014379 +v -0.051272 1.436912 0.004435 +v -0.058145 1.447580 -0.004610 +v -0.063602 1.458794 -0.011346 +v -0.067842 1.470820 -0.008907 +v -0.015270 1.435111 0.067257 +v -0.008309 1.434379 0.070327 +v -0.021438 1.437110 0.063198 +v -0.028470 1.441606 0.058224 +v -0.033392 1.446468 0.055158 +v -0.037951 1.458101 0.054503 +v -0.036662 1.452045 0.054037 +v -0.036406 1.470117 0.057341 +v -0.037754 1.464227 0.055768 +v -0.043361 1.476565 0.051520 +v -0.045063 1.468107 0.048653 +v -0.041386 1.465667 0.052396 +v -0.039988 1.472930 0.054506 +v -0.046552 1.481759 0.048340 +v -0.048779 1.472120 0.044600 +v -0.007075 1.412271 0.050914 +v 0.000000 1.411855 0.052119 +v -0.013085 1.412852 0.047489 +v -0.018439 1.413640 0.042102 +v -0.022392 1.414312 0.035486 +v -0.026870 1.415220 0.027969 +v -0.032350 1.416735 0.019836 +v -0.039581 1.420103 0.010510 +v -0.048258 1.428267 -0.000383 +v -0.056639 1.442227 -0.010608 +v -0.062381 1.454404 -0.017696 +v -0.019224 1.407282 0.021407 +v -0.015527 1.408710 0.027842 +v -0.012281 1.409694 0.033437 +v -0.009411 1.409986 0.038105 +v -0.005395 1.408971 0.037381 +v 0.000000 1.408566 0.036837 +v -0.004839 1.406112 0.028904 +v 0.000000 1.406027 0.029136 +v -0.006997 1.402824 0.023825 +v 0.000000 1.402365 0.023807 +v -0.008716 1.399498 0.019583 +v 0.000000 1.397683 0.018995 +v -0.039863 1.450124 0.050121 +v -0.043157 1.449236 0.045330 +v -0.045252 1.458819 0.046255 +v -0.049202 1.461445 0.041161 +v -0.046825 1.450025 0.039223 +v -0.041526 1.457887 0.050684 +v -0.051791 1.453260 0.031960 +v -0.053856 1.465584 0.035623 +v -0.052811 1.477332 0.040542 +v -0.057864 1.470411 0.031084 +v -0.056300 1.458400 0.025400 +v -0.056809 1.481199 0.036406 +v -0.060191 1.484902 0.032594 +v -0.060914 1.474694 0.027211 +v -0.059936 1.463921 0.021284 +v -0.040912 1.437507 0.039969 +v -0.062508 1.469594 0.016502 +v -0.062707 1.488777 0.028637 +v -0.063200 1.479400 0.022700 +v -0.065244 1.494102 0.023621 +v -0.065986 1.485178 0.015507 +v -0.065555 1.477004 0.008784 +v -0.069692 1.478726 -0.004793 +v 0.000000 1.434286 0.071004 +v -0.066436 1.467824 -0.032302 +v -0.021200 1.476200 0.065000 +v -0.016400 1.478400 0.067700 +v -0.011548 1.479216 0.070929 +v -0.007644 1.479541 0.073277 +v -0.003728 1.479322 0.074674 +v 0.000000 1.385930 0.012858 +v 0.000000 1.545795 0.064295 +v 0.000000 1.392634 0.016701 +v 0.000000 1.447350 0.057204 +v 0.000000 1.624622 -0.011960 +v 0.000000 1.439029 0.070422 +v 0.000000 1.451933 0.059578 +v 0.000000 1.507470 0.083595 +v 0.000000 1.443887 0.069414 +v 0.000000 1.454946 0.074679 +v 0.000000 1.460438 0.072980 +v 0.000000 1.581135 0.055452 +v 0.000000 1.458137 0.074512 +v 0.000000 1.481587 0.074783 +v 0.000000 1.503893 0.087054 +v 0.000000 1.461914 0.071559 +v 0.000000 1.453858 -0.096645 +v 0.000000 1.451493 0.073105 +v 0.000000 1.457993 0.064088 +v 0.000000 1.461813 0.069558 +v 0.000000 1.466261 0.058890 +v 0.000000 1.478892 0.074531 +v 0.000000 1.523678 0.069736 +v 0.000000 1.489535 -0.119515 +v 0.000000 1.520960 0.072272 +v 0.000000 1.603650 0.037414 +v 0.000000 1.590954 0.049992 +v 0.000000 1.511043 0.080323 +v 0.000000 1.548918 -0.131703 +v 0.000000 1.564870 -0.128630 +v 0.000000 1.564427 0.060719 +v 0.000000 1.619611 -0.072800 +v 0.000000 1.500275 0.089409 +v 0.000000 1.486994 0.079952 +v 0.000000 1.618215 0.012229 +v 0.000000 1.516038 -0.129465 +v 0.000000 1.609100 -0.095443 +v 0.000000 1.463596 0.074251 +v 0.000000 1.621812 0.000643 +v 0.000000 1.462127 0.071343 +v 0.000000 1.490647 0.086919 +v 0.000000 1.501198 -0.125457 +v 0.000000 1.528930 -0.131276 +v 0.000000 1.472588 0.075172 +v 0.000000 1.476298 0.074308 +v 0.000000 1.615230 -0.084480 +v 0.000000 1.552251 0.063707 +v 0.000000 1.470467 0.076486 +v 0.000000 1.531870 0.063901 +v 0.000000 1.467967 0.077060 +v 0.000000 1.380109 0.009288 +v 0.000000 1.588347 -0.116312 +v 0.000000 1.465652 0.075795 +v 0.000000 1.463525 0.066420 +v 0.000000 1.514724 0.077389 +v 0.000000 1.464844 0.061917 +v 0.000000 1.571606 0.058534 +v 0.000000 1.484386 0.075902 +v 0.000000 1.485219 0.076416 +v 0.000000 1.529107 0.065284 +v 0.000000 1.597750 0.044018 +v 0.000000 1.534589 0.063461 +v 0.000000 1.483532 0.075433 +v 0.000000 1.579233 -0.122370 +v 0.000000 1.625022 -0.056413 +v 0.000000 1.626434 -0.043475 +v 0.000000 1.526279 0.067227 +v 0.000000 1.518187 0.074678 +v 0.000000 1.444587 -0.094873 +v 0.000000 1.493944 0.089210 +v 0.000000 1.496642 0.090010 +v 0.000000 1.613719 0.022204 +v 0.000000 1.480841 -0.114686 +v 0.000000 1.626736 -0.026845 +v 0.000000 1.608841 0.030309 +v 0.000000 1.464839 -0.103477 +v 0.000000 1.473938 -0.110708 +v 0.000000 1.489364 0.085409 +v 0.000000 1.537787 0.063642 +v 0.000000 1.487900 0.082456 +v 0.000000 1.601204 -0.105587 +v 0.000000 1.486122 0.077824 +v 0.007181 1.498708 0.069367 +v 0.065756 1.470880 0.002952 +v 0.064678 1.463496 -0.004242 +v 0.059783 1.453210 0.002188 +v 0.061561 1.461383 0.009926 +v 0.053965 1.443825 0.009394 +v 0.057441 1.453563 0.015359 +v 0.047918 1.436419 0.017837 +v 0.052352 1.446406 0.021891 +v -0.004500 1.490438 0.084613 +v -0.003472 1.488993 0.082224 +v 0.043190 1.432359 0.027284 +v 0.047905 1.441899 0.029774 +v -0.016500 1.524050 0.049200 +v -0.008669 1.487703 0.072770 +v -0.007120 1.508654 0.076839 +v -0.015400 1.524250 0.049950 +v 0.031556 1.428128 0.049181 +v 0.029812 1.423095 0.045409 +v 0.023660 1.420365 0.052657 +v 0.024436 1.424506 0.056749 +v -0.010950 1.487850 0.071400 +v 0.016606 1.418589 0.059010 +v 0.017378 1.422321 0.062977 +v 0.009001 1.417534 0.062700 +v 0.009151 1.420928 0.066849 +v -0.007135 1.515470 0.071771 +v -0.009449 1.523427 0.059362 +v 0.008705 1.429911 0.070470 +v 0.009043 1.425262 0.069435 +v -0.006967 1.512062 0.074392 +v -0.016213 1.490290 0.071850 +v -0.014561 1.487764 0.070635 +v -0.007012 1.495925 0.086718 +v -0.007912 1.498518 0.085593 +v 0.016107 1.430898 0.066910 +v -0.007950 1.492950 0.085200 +v -0.005075 1.491535 0.085592 +v 0.017100 1.426200 0.065400 +v -0.017849 1.523713 0.049317 +v -0.027909 1.524762 0.042535 +v -0.006251 1.494112 0.086673 +v -0.003263 1.492383 0.087243 +v -0.020870 1.524355 0.046670 +v -0.017300 1.523850 0.049250 +v -0.012558 1.494457 0.080516 +v -0.007666 1.522431 0.063361 +v 0.022581 1.433356 0.062133 +v 0.023967 1.428745 0.059766 +v 0.030372 1.437649 0.055874 +v 0.031736 1.432795 0.052581 +v -0.015100 1.492888 0.076170 +v 0.035914 1.443090 0.051692 +v 0.038348 1.440043 0.047005 +v 0.008052 1.414547 0.056606 +v -0.018624 1.523754 0.049075 +v -0.019148 1.523864 0.048784 +v -0.007841 1.505078 0.079909 +v 0.015000 1.415300 0.053100 +v 0.021489 1.416660 0.047689 +v 0.026700 1.418600 0.041100 +v 0.037876 1.429176 0.036787 +v -0.023224 1.524491 0.046688 +v -0.003145 1.487404 0.077579 +v -0.010700 1.493800 0.082850 +v -0.009226 1.493297 0.084222 +v -0.003762 1.486831 0.075684 +v 0.032082 1.421210 0.032893 +v 0.038453 1.425506 0.024641 +v -0.013400 1.524050 0.052550 +v 0.044093 1.429078 0.014379 +v -0.007290 1.520479 0.066899 +v -0.007075 1.518086 0.069666 +v 0.051272 1.436912 0.004435 +v 0.058145 1.447580 -0.004610 +v -0.003068 1.488157 0.079873 +v 0.063602 1.458794 -0.011346 +v 0.067842 1.470820 -0.008907 +v 0.015270 1.435111 0.067257 +v 0.008309 1.434379 0.070327 +v 0.021438 1.437110 0.063198 +v -0.008204 1.501659 0.083269 +v 0.028470 1.441606 0.058224 +v 0.033392 1.446468 0.055158 +v 0.037951 1.458101 0.054503 +v -0.006298 1.487370 0.074241 +v 0.036662 1.452045 0.054037 +v 0.036406 1.470117 0.057341 +v 0.037754 1.464227 0.055768 +v 0.043361 1.476565 0.051520 +v 0.045063 1.468107 0.048653 +v 0.041386 1.465667 0.052396 +v 0.000000 1.492114 0.088144 +v -0.005607 1.492667 0.086221 +v 0.039988 1.472930 0.054506 +v 0.046552 1.481759 0.048340 +v 0.048779 1.472120 0.044600 +v 0.007075 1.412271 0.050914 +v -0.029820 1.538011 0.053479 +v -0.029975 1.535460 0.051854 +v 0.013085 1.412852 0.047489 +v 0.018439 1.413640 0.042102 +v 0.022392 1.414312 0.035486 +v 0.026870 1.415220 0.027969 +v 0.032350 1.416735 0.019836 +v 0.039581 1.420103 0.010510 +v -0.022655 1.541291 0.058383 +v -0.004631 1.541666 0.063468 +v -0.009806 1.541712 0.062495 +v 0.048258 1.428267 -0.000383 +v -0.030165 1.531058 0.050149 +v -0.030253 1.529212 0.048923 +v -0.029840 1.533259 0.050285 +v -0.030360 1.529572 0.050362 +v -0.030372 1.530239 0.050612 +v 0.056639 1.442227 -0.010608 +v -0.029637 1.532503 0.049387 +v 0.062381 1.454404 -0.017696 +v -0.029693 1.532256 0.049246 +v 0.019224 1.407282 0.021407 +v -0.027671 1.540519 0.055546 +v 0.015527 1.408710 0.027842 +v 0.012281 1.409694 0.033437 +v -0.029842 1.531837 0.049551 +v -0.030415 1.528777 0.047185 +v 0.009411 1.409986 0.038105 +v -0.031235 1.526917 0.042713 +v 0.005395 1.408971 0.037381 +v 0.000000 1.541720 0.063941 +v -0.021294 1.551703 0.061045 +v -0.052438 1.549986 0.041694 +v 0.004839 1.406112 0.028904 +v -0.055550 1.544750 0.039000 +v 0.006997 1.402824 0.023825 +v -0.047669 1.510613 0.044309 +v -0.050350 1.511400 0.042050 +v -0.018586 1.545772 0.062254 +v 0.008716 1.399498 0.019583 +v 0.039863 1.450124 0.050121 +v 0.043157 1.449236 0.045330 +v 0.045252 1.458819 0.046255 +v 0.049202 1.461445 0.041161 +v -0.026633 1.570326 0.053151 +v -0.031183 1.612253 0.006230 +v -0.025033 1.563535 0.056327 +v -0.039899 1.555892 0.051097 +v 0.046825 1.450025 0.039223 +v -0.031322 1.608404 0.015054 +v 0.041526 1.457887 0.050684 +v 0.051791 1.453260 0.031960 +v 0.053856 1.465584 0.035623 +v 0.052811 1.477332 0.040542 +v 0.057864 1.470411 0.031084 +v -0.047573 1.554241 0.045738 +v 0.056300 1.458400 0.025400 +v -0.055410 1.515906 0.035556 +v -0.006407 1.557738 0.062139 +v -0.057000 1.521250 0.033100 +v 0.056809 1.481199 0.036406 +v -0.014452 1.557484 0.060981 +v -0.027655 1.619202 -0.054491 +v 0.060191 1.484902 0.032594 +v 0.060914 1.474694 0.027211 +v 0.059936 1.463921 0.021284 +v 0.040912 1.437507 0.039969 +v -0.013492 1.537749 0.059645 +v 0.062508 1.469594 0.016502 +v -0.038491 1.511269 0.049270 +v -0.041415 1.510904 0.048099 +v 0.062707 1.488777 0.028637 +v -0.031626 1.619606 -0.043643 +v -0.054250 1.513750 0.037600 +v -0.030425 1.587622 0.043038 +v -0.009981 1.531318 0.057230 +v -0.011297 1.534252 0.057544 +v -0.031087 1.593873 0.036455 +v 0.063200 1.479400 0.022700 +v 0.065244 1.494102 0.023621 +v 0.065986 1.485178 0.015507 +v -0.031324 1.599313 0.029619 +v 0.065555 1.477004 0.008784 +v 0.069692 1.478726 -0.004793 +v -0.031546 1.615607 -0.004075 +v -0.022319 1.514517 0.052251 +v 0.066436 1.467824 -0.032302 +v 0.021200 1.476200 0.065000 +v 0.016400 1.478400 0.067700 +v -0.057500 1.538800 0.036350 +v -0.058100 1.532550 0.034450 +v -0.032263 1.619648 -0.030163 +v -0.031406 1.512072 0.051113 +v -0.057750 1.526700 0.033300 +v -0.015698 1.620824 -0.062572 +v 0.011548 1.479216 0.070929 +v -0.052650 1.512350 0.039800 +v 0.007644 1.479541 0.073277 +v 0.003728 1.479322 0.074674 +v -0.032093 1.556898 0.055351 +v -0.035048 1.511533 0.050181 +v -0.009930 1.528293 0.056852 +v -0.017992 1.516836 0.052905 +v -0.014937 1.519669 0.053809 +v -0.031324 1.604084 0.022698 +v -0.012601 1.522311 0.054996 +v -0.010574 1.525230 0.056359 +v -0.026755 1.512927 0.051954 +v -0.028743 1.578935 0.049280 +v -0.031801 1.618118 -0.016442 +v -0.044633 1.510707 0.046380 +v 0.000000 1.622074 -0.065518 +v 0.000000 1.557883 0.062415 +v -0.011454 1.523753 0.055896 +v -0.015953 1.541608 0.060927 +v -0.023334 1.557278 0.058912 +v 0.000000 1.447907 0.071016 +v -0.026419 1.465952 0.063157 +v -0.027870 1.463869 0.061808 +v -0.027772 1.457800 0.060516 +v -0.019323 1.471062 0.068309 +v -0.023570 1.468321 0.065257 +v -0.018205 1.450607 0.066524 +v -0.028489 1.461876 0.060872 +v -0.012701 1.448969 0.069362 +v -0.028519 1.459779 0.060404 +v -0.006830 1.448110 0.070648 +v 0.000000 1.474497 0.074690 +v -0.009267 1.474752 0.073658 +v -0.004619 1.474962 0.074975 +v -0.014224 1.473445 0.071283 +v -0.026055 1.455533 0.061457 +v -0.023175 1.453006 0.063253 +v 0.004500 1.490438 0.084613 +v 0.003472 1.488993 0.082224 +v 0.016500 1.524050 0.049200 +v 0.008669 1.487703 0.072770 +v 0.007120 1.508654 0.076839 +v 0.015400 1.524250 0.049950 +v 0.010950 1.487850 0.071400 +v 0.007135 1.515470 0.071771 +v 0.009449 1.523427 0.059362 +v 0.006967 1.512062 0.074392 +v 0.016213 1.490290 0.071850 +v 0.014561 1.487764 0.070635 +v 0.007012 1.495925 0.086718 +v 0.007912 1.498518 0.085593 +v 0.007950 1.492950 0.085200 +v 0.005075 1.491535 0.085592 +v 0.017849 1.523713 0.049317 +v 0.027909 1.524762 0.042535 +v 0.006251 1.494112 0.086673 +v 0.003263 1.492383 0.087243 +v 0.020870 1.524355 0.046670 +v 0.017300 1.523850 0.049250 +v 0.012558 1.494457 0.080516 +v 0.007666 1.522431 0.063361 +v 0.015100 1.492888 0.076170 +v 0.018624 1.523754 0.049075 +v 0.019148 1.523864 0.048784 +v 0.007841 1.505078 0.079909 +v 0.023224 1.524491 0.046688 +v 0.003145 1.487404 0.077579 +v 0.010700 1.493800 0.082850 +v 0.009226 1.493297 0.084222 +v 0.003762 1.486831 0.075684 +v 0.013400 1.524050 0.052550 +v 0.007290 1.520479 0.066899 +v 0.007075 1.518086 0.069666 +v 0.003068 1.488157 0.079873 +v 0.008204 1.501659 0.083269 +v 0.006298 1.487370 0.074241 +v 0.005607 1.492667 0.086221 +v 0.029820 1.538011 0.053479 +v 0.029975 1.535460 0.051854 +v 0.022655 1.541291 0.058383 +v 0.004631 1.541666 0.063468 +v 0.009806 1.541712 0.062495 +v 0.030165 1.531058 0.050149 +v 0.030253 1.529212 0.048923 +v 0.029840 1.533259 0.050285 +v 0.030360 1.529572 0.050362 +v 0.030372 1.530239 0.050612 +v 0.029637 1.532503 0.049387 +v 0.029693 1.532256 0.049246 +v 0.027671 1.540519 0.055546 +v 0.029842 1.531837 0.049551 +v 0.030415 1.528777 0.047185 +v 0.031235 1.526917 0.042713 +v 0.021294 1.551703 0.061045 +v 0.052438 1.549986 0.041694 +v 0.055550 1.544750 0.039000 +v 0.047669 1.510613 0.044309 +v 0.050350 1.511400 0.042050 +v 0.018586 1.545772 0.062254 +v 0.026633 1.570326 0.053151 +v 0.031183 1.612253 0.006230 +v 0.025033 1.563535 0.056327 +v 0.039899 1.555892 0.051097 +v 0.031322 1.608404 0.015054 +v 0.047573 1.554241 0.045738 +v 0.055410 1.515906 0.035556 +v 0.006407 1.557738 0.062139 +v 0.057000 1.521250 0.033100 +v 0.014452 1.557484 0.060981 +v 0.027655 1.619202 -0.054491 +v 0.013492 1.537749 0.059645 +v 0.038491 1.511269 0.049270 +v 0.041415 1.510904 0.048099 +v 0.031626 1.619606 -0.043643 +v 0.054250 1.513750 0.037600 +v 0.030425 1.587622 0.043038 +v 0.009981 1.531318 0.057230 +v 0.011297 1.534252 0.057544 +v 0.031087 1.593873 0.036455 +v 0.031324 1.599313 0.029619 +v 0.031546 1.615607 -0.004075 +v 0.022319 1.514517 0.052251 +v 0.057500 1.538800 0.036350 +v 0.058100 1.532550 0.034450 +v 0.032263 1.619648 -0.030163 +v 0.031406 1.512072 0.051113 +v 0.057750 1.526700 0.033300 +v 0.015698 1.620824 -0.062572 +v 0.052650 1.512350 0.039800 +v 0.032093 1.556898 0.055351 +v 0.035048 1.511533 0.050181 +v 0.009930 1.528293 0.056852 +v 0.017992 1.516836 0.052905 +v 0.014937 1.519669 0.053809 +v 0.031324 1.604084 0.022698 +v 0.012601 1.522311 0.054996 +v 0.010574 1.525230 0.056359 +v 0.026755 1.512927 0.051954 +v 0.028743 1.578935 0.049280 +v 0.031801 1.618118 -0.016442 +v 0.044633 1.510707 0.046380 +v 0.011454 1.523753 0.055896 +v 0.015953 1.541608 0.060927 +v 0.023334 1.557278 0.058912 +v 0.026419 1.465952 0.063157 +v 0.027870 1.463869 0.061808 +v 0.027772 1.457800 0.060516 +v 0.019323 1.471062 0.068309 +v 0.023570 1.468321 0.065257 +v 0.018205 1.450607 0.066524 +v 0.028489 1.461876 0.060872 +v 0.012701 1.448969 0.069362 +v 0.028519 1.459779 0.060404 +v 0.006830 1.448110 0.070648 +v 0.009267 1.474752 0.073658 +v 0.004619 1.474962 0.074975 +v 0.014224 1.473445 0.071283 +v 0.026055 1.455533 0.061457 +v 0.023175 1.453006 0.063253 +v 0.031451 1.525118 0.037401 +v -0.031451 1.525118 0.037401 +v 0.029824 1.525118 0.050617 +v 0.028260 1.525118 0.050142 +v 0.026818 1.525118 0.049372 +v 0.025609 1.525118 0.048376 +v 0.029856 1.525435 0.050617 +v 0.028321 1.525741 0.050142 +v 0.026907 1.526022 0.049372 +v 0.025722 1.526258 0.048376 +v 0.029948 1.525741 0.050617 +v 0.028503 1.526339 0.050142 +v 0.027171 1.526891 0.049372 +v 0.026054 1.527353 0.048376 +v 0.030098 1.526022 0.050617 +v 0.028798 1.526891 0.050142 +v 0.027599 1.527692 0.049372 +v 0.026594 1.528363 0.048376 +v 0.030301 1.526268 0.050617 +v 0.029195 1.527374 0.050142 +v 0.028175 1.528394 0.049372 +v 0.027320 1.529249 0.048376 +v 0.030547 1.526471 0.050617 +v 0.029678 1.527771 0.050142 +v 0.028877 1.528970 0.049372 +v 0.028206 1.529975 0.048376 +v 0.030828 1.526621 0.050617 +v 0.030230 1.528066 0.050142 +v 0.029678 1.529398 0.049372 +v 0.029216 1.530515 0.048376 +v 0.031134 1.526714 0.050617 +v 0.030828 1.528248 0.050142 +v 0.030547 1.529662 0.049372 +v 0.030311 1.530847 0.048376 +v 0.031451 1.526745 0.050617 +v 0.031451 1.528309 0.050142 +v 0.031451 1.529751 0.049372 +v 0.031451 1.530960 0.048376 +v 0.031768 1.526714 0.050617 +v 0.032074 1.528248 0.050142 +v 0.032355 1.529662 0.049372 +v 0.032591 1.530847 0.048376 +v 0.032074 1.526621 0.050617 +v 0.032672 1.528066 0.050142 +v 0.033224 1.529398 0.049372 +v 0.033686 1.530515 0.048376 +v 0.032355 1.526471 0.050617 +v 0.033224 1.527771 0.050142 +v 0.034025 1.528970 0.049372 +v 0.034696 1.529975 0.048376 +v 0.032601 1.526268 0.050617 +v 0.033707 1.527374 0.050142 +v 0.034727 1.528394 0.049372 +v 0.035582 1.529249 0.048376 +v 0.032804 1.526022 0.050617 +v 0.034104 1.526891 0.050142 +v 0.035303 1.527692 0.049372 +v 0.036308 1.528363 0.048376 +v 0.032954 1.525741 0.050617 +v 0.034399 1.526339 0.050142 +v 0.035731 1.526891 0.049372 +v 0.036848 1.527353 0.048376 +v 0.033047 1.525435 0.050617 +v 0.034581 1.525741 0.050142 +v 0.035995 1.526022 0.049372 +v 0.037180 1.526258 0.048376 +v 0.033078 1.525118 0.050617 +v 0.034642 1.525118 0.050142 +v 0.036084 1.525118 0.049372 +v 0.037293 1.525118 0.048376 +v 0.033047 1.524801 0.050617 +v 0.034581 1.524495 0.050142 +v 0.035995 1.524214 0.049372 +v 0.037180 1.523978 0.048376 +v 0.032954 1.524495 0.050617 +v 0.034399 1.523897 0.050142 +v 0.035731 1.523345 0.049372 +v 0.036848 1.522883 0.048376 +v 0.032804 1.524214 0.050617 +v 0.034104 1.523345 0.050142 +v 0.035303 1.522544 0.049372 +v 0.036308 1.521873 0.048376 +v 0.032601 1.523968 0.050617 +v 0.033707 1.522862 0.050142 +v 0.034727 1.521842 0.049372 +v 0.035582 1.520987 0.048376 +v 0.032355 1.523765 0.050617 +v 0.033224 1.522465 0.050142 +v 0.034025 1.521266 0.049372 +v 0.034696 1.520261 0.048376 +v 0.032074 1.523615 0.050617 +v 0.032672 1.522170 0.050142 +v 0.033224 1.520838 0.049372 +v 0.033686 1.519721 0.048376 +v 0.031768 1.523522 0.050617 +v 0.032074 1.521988 0.050142 +v 0.032355 1.520574 0.049372 +v 0.032591 1.519389 0.048376 +v 0.031451 1.523491 0.050617 +v 0.031451 1.521927 0.050142 +v 0.031451 1.520485 0.049372 +v 0.031451 1.519276 0.048376 +v 0.031134 1.523522 0.050617 +v 0.030828 1.521988 0.050142 +v 0.030547 1.520574 0.049372 +v 0.030311 1.519389 0.048376 +v 0.030828 1.523615 0.050617 +v 0.030230 1.522170 0.050142 +v 0.029678 1.520838 0.049372 +v 0.029216 1.519721 0.048376 +v 0.030547 1.523765 0.050617 +v 0.029678 1.522465 0.050142 +v 0.028877 1.521266 0.049372 +v 0.028206 1.520261 0.048376 +v 0.030301 1.523968 0.050617 +v 0.029195 1.522862 0.050142 +v 0.028175 1.521842 0.049372 +v 0.027320 1.520987 0.048376 +v 0.030098 1.524214 0.050617 +v 0.028798 1.523345 0.050142 +v 0.027599 1.522544 0.049372 +v 0.026594 1.521873 0.048376 +v 0.031451 1.525118 0.050777 +v 0.029948 1.524495 0.050617 +v 0.028503 1.523897 0.050142 +v 0.027171 1.523345 0.049372 +v 0.026054 1.522883 0.048376 +v 0.029856 1.524801 0.050617 +v 0.028321 1.524495 0.050142 +v 0.026907 1.524214 0.049372 +v 0.025722 1.523978 0.048376 +v 0.024430 1.525118 0.047521 +v 0.022607 1.525118 0.046071 +v 0.021051 1.525118 0.044176 +v 0.019895 1.525118 0.042014 +v 0.019184 1.525118 0.039667 +v 0.018943 1.525118 0.037227 +v 0.019184 1.525118 0.034787 +v 0.019895 1.525118 0.032441 +v 0.021051 1.525118 0.030278 +v 0.022607 1.525118 0.028383 +v 0.024502 1.525118 0.026827 +v 0.026665 1.525118 0.025671 +v 0.029011 1.525118 0.024960 +v 0.024565 1.526488 0.047521 +v 0.022777 1.526843 0.046071 +v 0.021251 1.527147 0.044176 +v 0.020117 1.527372 0.042014 +v 0.019419 1.527511 0.039667 +v 0.019184 1.527558 0.037227 +v 0.019419 1.527511 0.034787 +v 0.020117 1.527372 0.032441 +v 0.021251 1.527147 0.030278 +v 0.022777 1.526843 0.028383 +v 0.024636 1.526474 0.026827 +v 0.026756 1.526052 0.025671 +v 0.029058 1.525594 0.024960 +v 0.024965 1.527805 0.047521 +v 0.023280 1.528503 0.046071 +v 0.021843 1.529098 0.044176 +v 0.020775 1.529540 0.042013 +v 0.020117 1.529813 0.039667 +v 0.019895 1.529905 0.037227 +v 0.020117 1.529813 0.034787 +v 0.020775 1.529540 0.032441 +v 0.021843 1.529098 0.030278 +v 0.023280 1.528503 0.028383 +v 0.025031 1.527777 0.026827 +v 0.027029 1.526950 0.025671 +v 0.029197 1.526052 0.024960 +v 0.025613 1.529019 0.047521 +v 0.024097 1.530032 0.046071 +v 0.022804 1.530896 0.044176 +v 0.021843 1.531538 0.042013 +v 0.021251 1.531933 0.039667 +v 0.021051 1.532067 0.037227 +v 0.021251 1.531933 0.034787 +v 0.021843 1.531538 0.032441 +v 0.022804 1.530896 0.030278 +v 0.024097 1.530032 0.028383 +v 0.025673 1.528979 0.026827 +v 0.027471 1.527777 0.025671 +v 0.029422 1.526474 0.024960 +v 0.026486 1.530083 0.047521 +v 0.025197 1.531372 0.046071 +v 0.024097 1.532472 0.044176 +v 0.023280 1.533289 0.042013 +v 0.022777 1.533792 0.039667 +v 0.022607 1.533962 0.037227 +v 0.022777 1.533792 0.034787 +v 0.023280 1.533289 0.032441 +v 0.024097 1.532472 0.030278 +v 0.025197 1.531372 0.028383 +v 0.026537 1.530032 0.026827 +v 0.028066 1.528503 0.025671 +v 0.029726 1.526843 0.024960 +v 0.027550 1.530956 0.047521 +v 0.026537 1.532472 0.046071 +v 0.025673 1.533765 0.044176 +v 0.025031 1.534726 0.042013 +v 0.024636 1.535318 0.039667 +v 0.024502 1.535518 0.037227 +v 0.024636 1.535318 0.034787 +v 0.025031 1.534726 0.032441 +v 0.025673 1.533765 0.030278 +v 0.026537 1.532472 0.028383 +v 0.027590 1.530896 0.026827 +v 0.028792 1.529098 0.025671 +v 0.030095 1.527147 0.024960 +v 0.028764 1.531604 0.047521 +v 0.028066 1.533289 0.046071 +v 0.027471 1.534726 0.044176 +v 0.027029 1.535794 0.042013 +v 0.026756 1.536452 0.039667 +v 0.026665 1.536674 0.037227 +v 0.026756 1.536452 0.034787 +v 0.027029 1.535794 0.032441 +v 0.027471 1.534726 0.030278 +v 0.028066 1.533289 0.028383 +v 0.028792 1.531538 0.026827 +v 0.029619 1.529540 0.025671 +v 0.030517 1.527372 0.024960 +v 0.030081 1.532004 0.047521 +v 0.029726 1.533792 0.046071 +v 0.029422 1.535318 0.044176 +v 0.029197 1.536452 0.042013 +v 0.029058 1.537150 0.039667 +v 0.029011 1.537385 0.037227 +v 0.029058 1.537150 0.034787 +v 0.029197 1.536452 0.032441 +v 0.029422 1.535318 0.030278 +v 0.029726 1.533792 0.028383 +v 0.030095 1.531933 0.026827 +v 0.030517 1.529813 0.025671 +v 0.030975 1.527511 0.024960 +v 0.031451 1.532139 0.047521 +v 0.031451 1.533962 0.046071 +v 0.031451 1.535518 0.044176 +v 0.031451 1.536674 0.042013 +v 0.031451 1.537385 0.039667 +v 0.031451 1.537626 0.037227 +v 0.031451 1.537385 0.034787 +v 0.031451 1.536674 0.032441 +v 0.031451 1.535518 0.030278 +v 0.031451 1.533962 0.028383 +v 0.031451 1.532067 0.026827 +v 0.031451 1.529905 0.025671 +v 0.031451 1.527558 0.024960 +v 0.032821 1.532004 0.047521 +v 0.033176 1.533792 0.046071 +v 0.033480 1.535318 0.044176 +v 0.033705 1.536452 0.042013 +v 0.033844 1.537150 0.039667 +v 0.033891 1.537385 0.037227 +v 0.033844 1.537150 0.034787 +v 0.033705 1.536452 0.032441 +v 0.033480 1.535318 0.030278 +v 0.033176 1.533792 0.028383 +v 0.032807 1.531933 0.026827 +v 0.032385 1.529813 0.025671 +v 0.031927 1.527511 0.024960 +v 0.034138 1.531604 0.047521 +v 0.034836 1.533289 0.046071 +v 0.035431 1.534726 0.044176 +v 0.035873 1.535794 0.042013 +v 0.036146 1.536452 0.039667 +v 0.036238 1.536674 0.037227 +v 0.036146 1.536452 0.034787 +v 0.035873 1.535794 0.032441 +v 0.035431 1.534726 0.030278 +v 0.034836 1.533289 0.028383 +v 0.034110 1.531538 0.026827 +v 0.033283 1.529540 0.025671 +v 0.032385 1.527372 0.024960 +v 0.035352 1.530956 0.047521 +v 0.036365 1.532472 0.046071 +v 0.037229 1.533765 0.044176 +v 0.037871 1.534726 0.042013 +v 0.038266 1.535318 0.039667 +v 0.038400 1.535518 0.037227 +v 0.038266 1.535318 0.034787 +v 0.037871 1.534726 0.032441 +v 0.037229 1.533765 0.030278 +v 0.036365 1.532472 0.028383 +v 0.035312 1.530896 0.026827 +v 0.034110 1.529098 0.025671 +v 0.032807 1.527147 0.024960 +v 0.036416 1.530083 0.047521 +v 0.037705 1.531372 0.046071 +v 0.038805 1.532472 0.044176 +v 0.039622 1.533289 0.042013 +v 0.040125 1.533792 0.039667 +v 0.040295 1.533962 0.037227 +v 0.040125 1.533792 0.034787 +v 0.039622 1.533289 0.032441 +v 0.038805 1.532472 0.030278 +v 0.037705 1.531372 0.028383 +v 0.036365 1.530032 0.026827 +v 0.034836 1.528503 0.025671 +v 0.033176 1.526843 0.024960 +v 0.037289 1.529019 0.047521 +v 0.038805 1.530032 0.046071 +v 0.040098 1.530896 0.044176 +v 0.041059 1.531538 0.042013 +v 0.041651 1.531933 0.039667 +v 0.041851 1.532067 0.037227 +v 0.041651 1.531933 0.034787 +v 0.041059 1.531538 0.032441 +v 0.040098 1.530896 0.030278 +v 0.038805 1.530032 0.028383 +v 0.037229 1.528979 0.026827 +v 0.035431 1.527777 0.025671 +v 0.033480 1.526474 0.024960 +v 0.037937 1.527805 0.047521 +v 0.039622 1.528503 0.046071 +v 0.041059 1.529098 0.044176 +v 0.042127 1.529540 0.042013 +v 0.042785 1.529813 0.039667 +v 0.043007 1.529904 0.037227 +v 0.042785 1.529813 0.034787 +v 0.042127 1.529540 0.032441 +v 0.041059 1.529098 0.030278 +v 0.039622 1.528503 0.028383 +v 0.037871 1.527777 0.026827 +v 0.035873 1.526950 0.025671 +v 0.033705 1.526052 0.024960 +v 0.038337 1.526488 0.047521 +v 0.040125 1.526843 0.046071 +v 0.041651 1.527147 0.044176 +v 0.042785 1.527372 0.042014 +v 0.043483 1.527511 0.039667 +v 0.043718 1.527558 0.037227 +v 0.043483 1.527511 0.034787 +v 0.042785 1.527372 0.032441 +v 0.041651 1.527147 0.030278 +v 0.040125 1.526843 0.028383 +v 0.038266 1.526474 0.026827 +v 0.036146 1.526052 0.025671 +v 0.033844 1.525594 0.024960 +v 0.038472 1.525118 0.047521 +v 0.040295 1.525118 0.046071 +v 0.041851 1.525118 0.044176 +v 0.043007 1.525118 0.042014 +v 0.043718 1.525118 0.039667 +v 0.043959 1.525118 0.037227 +v 0.043718 1.525118 0.034787 +v 0.043007 1.525118 0.032441 +v 0.041851 1.525118 0.030278 +v 0.040295 1.525118 0.028383 +v 0.038400 1.525118 0.026827 +v 0.036238 1.525118 0.025671 +v 0.033891 1.525118 0.024960 +v 0.038337 1.523748 0.047521 +v 0.040125 1.523393 0.046071 +v 0.041651 1.523089 0.044176 +v 0.042785 1.522864 0.042014 +v 0.043483 1.522725 0.039667 +v 0.043718 1.522678 0.037227 +v 0.043483 1.522725 0.034787 +v 0.042785 1.522864 0.032441 +v 0.041651 1.523089 0.030278 +v 0.040125 1.523393 0.028383 +v 0.038266 1.523762 0.026827 +v 0.036146 1.524184 0.025671 +v 0.033844 1.524642 0.024960 +v 0.037937 1.522431 0.047521 +v 0.039622 1.521733 0.046071 +v 0.041059 1.521138 0.044176 +v 0.042127 1.520696 0.042014 +v 0.042785 1.520423 0.039667 +v 0.043007 1.520331 0.037227 +v 0.042785 1.520423 0.034787 +v 0.042127 1.520696 0.032441 +v 0.041059 1.521138 0.030278 +v 0.039622 1.521733 0.028383 +v 0.037871 1.522459 0.026827 +v 0.035873 1.523286 0.025671 +v 0.033705 1.524184 0.024960 +v 0.037289 1.521217 0.047521 +v 0.038805 1.520204 0.046071 +v 0.040098 1.519340 0.044176 +v 0.041059 1.518698 0.042014 +v 0.041651 1.518303 0.039667 +v 0.041851 1.518169 0.037227 +v 0.041651 1.518303 0.034787 +v 0.041059 1.518698 0.032441 +v 0.040098 1.519340 0.030278 +v 0.038805 1.520204 0.028383 +v 0.037229 1.521257 0.026827 +v 0.035431 1.522459 0.025671 +v 0.033480 1.523762 0.024960 +v 0.036416 1.520153 0.047521 +v 0.037705 1.518864 0.046071 +v 0.038805 1.517764 0.044176 +v 0.039622 1.516947 0.042014 +v 0.040125 1.516444 0.039667 +v 0.040295 1.516274 0.037227 +v 0.040125 1.516444 0.034787 +v 0.039622 1.516947 0.032441 +v 0.038805 1.517764 0.030278 +v 0.037705 1.518864 0.028383 +v 0.036365 1.520204 0.026827 +v 0.034836 1.521733 0.025671 +v 0.033176 1.523393 0.024960 +v 0.035352 1.519280 0.047521 +v 0.036365 1.517764 0.046071 +v 0.037229 1.516471 0.044176 +v 0.037871 1.515510 0.042014 +v 0.038266 1.514918 0.039667 +v 0.038400 1.514718 0.037227 +v 0.038266 1.514918 0.034787 +v 0.037871 1.515510 0.032441 +v 0.037229 1.516471 0.030278 +v 0.036365 1.517764 0.028383 +v 0.035312 1.519340 0.026827 +v 0.034110 1.521138 0.025671 +v 0.032807 1.523089 0.024960 +v 0.031451 1.525118 0.024719 +v 0.034138 1.518632 0.047521 +v 0.034836 1.516947 0.046071 +v 0.035431 1.515510 0.044176 +v 0.035873 1.514442 0.042014 +v 0.036146 1.513784 0.039667 +v 0.036238 1.513562 0.037227 +v 0.036146 1.513784 0.034787 +v 0.035873 1.514442 0.032441 +v 0.035431 1.515510 0.030278 +v 0.034836 1.516947 0.028383 +v 0.034110 1.518698 0.026827 +v 0.033283 1.520696 0.025671 +v 0.032385 1.522864 0.024960 +v 0.032821 1.518232 0.047521 +v 0.033176 1.516444 0.046071 +v 0.033480 1.514918 0.044176 +v 0.033705 1.513784 0.042014 +v 0.033844 1.513086 0.039667 +v 0.033891 1.512851 0.037227 +v 0.033844 1.513086 0.034787 +v 0.033705 1.513784 0.032441 +v 0.033480 1.514918 0.030278 +v 0.033176 1.516444 0.028383 +v 0.032807 1.518303 0.026827 +v 0.032385 1.520423 0.025671 +v 0.031927 1.522725 0.024960 +v 0.031451 1.518097 0.047521 +v 0.031451 1.516274 0.046071 +v 0.031451 1.514718 0.044176 +v 0.031451 1.513562 0.042014 +v 0.031451 1.512851 0.039667 +v 0.031451 1.512610 0.037227 +v 0.031451 1.512851 0.034787 +v 0.031451 1.513562 0.032441 +v 0.031451 1.514718 0.030278 +v 0.031451 1.516274 0.028383 +v 0.031451 1.518169 0.026827 +v 0.031451 1.520332 0.025671 +v 0.031451 1.522678 0.024960 +v 0.030081 1.518232 0.047521 +v 0.029726 1.516444 0.046071 +v 0.029422 1.514918 0.044176 +v 0.029197 1.513784 0.042014 +v 0.029058 1.513086 0.039667 +v 0.029011 1.512851 0.037227 +v 0.029058 1.513086 0.034787 +v 0.029197 1.513784 0.032441 +v 0.029422 1.514918 0.030278 +v 0.029726 1.516444 0.028383 +v 0.030095 1.518303 0.026827 +v 0.030517 1.520423 0.025671 +v 0.030975 1.522725 0.024960 +v 0.028764 1.518632 0.047521 +v 0.028066 1.516947 0.046071 +v 0.027471 1.515510 0.044176 +v 0.027029 1.514442 0.042014 +v 0.026756 1.513784 0.039667 +v 0.026665 1.513562 0.037227 +v 0.026756 1.513784 0.034787 +v 0.027029 1.514442 0.032441 +v 0.027471 1.515510 0.030278 +v 0.028066 1.516947 0.028383 +v 0.028792 1.518698 0.026827 +v 0.029619 1.520696 0.025671 +v 0.030517 1.522864 0.024960 +v 0.027550 1.519280 0.047521 +v 0.026537 1.517764 0.046071 +v 0.025673 1.516471 0.044176 +v 0.025031 1.515510 0.042014 +v 0.024636 1.514918 0.039667 +v 0.024502 1.514718 0.037227 +v 0.024636 1.514918 0.034787 +v 0.025031 1.515510 0.032441 +v 0.025673 1.516471 0.030278 +v 0.026537 1.517764 0.028383 +v 0.027590 1.519340 0.026827 +v 0.028792 1.521138 0.025671 +v 0.030095 1.523089 0.024960 +v 0.026486 1.520153 0.047521 +v 0.025197 1.518864 0.046071 +v 0.024097 1.517764 0.044176 +v 0.023280 1.516947 0.042014 +v 0.022777 1.516444 0.039667 +v 0.022607 1.516274 0.037227 +v 0.022777 1.516444 0.034787 +v 0.023280 1.516947 0.032441 +v 0.024097 1.517764 0.030278 +v 0.025197 1.518864 0.028383 +v 0.026537 1.520204 0.026827 +v 0.028066 1.521733 0.025671 +v 0.029726 1.523393 0.024960 +v 0.025613 1.521217 0.047521 +v 0.024097 1.520204 0.046071 +v 0.022804 1.519340 0.044176 +v 0.021843 1.518698 0.042014 +v 0.021251 1.518303 0.039667 +v 0.021051 1.518169 0.037227 +v 0.021251 1.518303 0.034787 +v 0.021843 1.518698 0.032441 +v 0.022804 1.519340 0.030278 +v 0.024097 1.520204 0.028383 +v 0.025673 1.521257 0.026827 +v 0.027471 1.522459 0.025671 +v 0.029422 1.523762 0.024960 +v 0.024965 1.522431 0.047521 +v 0.023280 1.521733 0.046071 +v 0.021843 1.521138 0.044176 +v 0.020775 1.520696 0.042014 +v 0.020117 1.520423 0.039667 +v 0.019895 1.520332 0.037227 +v 0.020117 1.520423 0.034787 +v 0.020775 1.520696 0.032441 +v 0.021843 1.521138 0.030278 +v 0.023280 1.521733 0.028383 +v 0.025031 1.522459 0.026827 +v 0.027029 1.523286 0.025671 +v 0.029197 1.524184 0.024960 +v 0.024565 1.523748 0.047521 +v 0.022777 1.523393 0.046071 +v 0.021251 1.523089 0.044176 +v 0.020117 1.522864 0.042014 +v 0.019419 1.522725 0.039667 +v 0.019184 1.522678 0.037227 +v 0.019419 1.522725 0.034787 +v 0.020117 1.522864 0.032441 +v 0.021251 1.523089 0.030278 +v 0.022777 1.523393 0.028383 +v 0.024636 1.523762 0.026827 +v 0.026756 1.524184 0.025671 +v 0.029058 1.524642 0.024960 +v -0.033078 1.525118 0.050617 +v -0.034642 1.525118 0.050142 +v -0.036084 1.525118 0.049372 +v -0.037293 1.525118 0.048376 +v -0.033047 1.525435 0.050617 +v -0.034581 1.525741 0.050142 +v -0.035995 1.526022 0.049372 +v -0.037180 1.526258 0.048376 +v -0.032954 1.525741 0.050617 +v -0.034399 1.526339 0.050142 +v -0.035731 1.526891 0.049372 +v -0.036848 1.527353 0.048376 +v -0.032804 1.526022 0.050617 +v -0.034104 1.526891 0.050142 +v -0.035303 1.527692 0.049372 +v -0.036308 1.528363 0.048376 +v -0.032601 1.526268 0.050617 +v -0.033707 1.527374 0.050142 +v -0.034727 1.528394 0.049372 +v -0.035582 1.529249 0.048376 +v -0.032355 1.526471 0.050617 +v -0.033224 1.527771 0.050142 +v -0.034025 1.528970 0.049372 +v -0.034696 1.529975 0.048376 +v -0.032074 1.526621 0.050617 +v -0.032672 1.528066 0.050142 +v -0.033224 1.529398 0.049372 +v -0.033686 1.530515 0.048376 +v -0.031768 1.526714 0.050617 +v -0.032074 1.528248 0.050142 +v -0.032355 1.529662 0.049372 +v -0.032591 1.530847 0.048376 +v -0.031451 1.526745 0.050617 +v -0.031451 1.528309 0.050142 +v -0.031451 1.529751 0.049372 +v -0.031451 1.530960 0.048376 +v -0.031134 1.526714 0.050617 +v -0.030828 1.528248 0.050142 +v -0.030547 1.529662 0.049372 +v -0.030311 1.530847 0.048376 +v -0.030828 1.526621 0.050617 +v -0.030230 1.528066 0.050142 +v -0.029678 1.529398 0.049372 +v -0.029216 1.530515 0.048376 +v -0.030547 1.526471 0.050617 +v -0.029678 1.527771 0.050142 +v -0.028877 1.528970 0.049372 +v -0.028206 1.529975 0.048376 +v -0.030301 1.526268 0.050617 +v -0.029195 1.527374 0.050142 +v -0.028175 1.528394 0.049372 +v -0.027320 1.529249 0.048376 +v -0.030098 1.526022 0.050617 +v -0.028798 1.526891 0.050142 +v -0.027599 1.527692 0.049372 +v -0.026594 1.528363 0.048376 +v -0.029948 1.525741 0.050617 +v -0.028503 1.526339 0.050142 +v -0.027171 1.526891 0.049372 +v -0.026054 1.527353 0.048376 +v -0.029856 1.525435 0.050617 +v -0.028321 1.525741 0.050142 +v -0.026907 1.526022 0.049372 +v -0.025722 1.526258 0.048376 +v -0.029824 1.525118 0.050617 +v -0.028260 1.525118 0.050142 +v -0.026818 1.525118 0.049372 +v -0.025609 1.525118 0.048376 +v -0.029856 1.524801 0.050617 +v -0.028321 1.524495 0.050142 +v -0.026907 1.524214 0.049372 +v -0.025722 1.523978 0.048376 +v -0.029948 1.524495 0.050617 +v -0.028503 1.523897 0.050142 +v -0.027171 1.523345 0.049372 +v -0.026054 1.522883 0.048376 +v -0.030098 1.524214 0.050617 +v -0.028798 1.523345 0.050142 +v -0.027599 1.522544 0.049372 +v -0.026594 1.521873 0.048376 +v -0.030301 1.523968 0.050617 +v -0.029195 1.522862 0.050142 +v -0.028175 1.521842 0.049372 +v -0.027320 1.520987 0.048376 +v -0.030547 1.523765 0.050617 +v -0.029678 1.522465 0.050142 +v -0.028877 1.521266 0.049372 +v -0.028206 1.520261 0.048376 +v -0.030828 1.523615 0.050617 +v -0.030230 1.522170 0.050142 +v -0.029678 1.520838 0.049372 +v -0.029216 1.519721 0.048376 +v -0.031134 1.523522 0.050617 +v -0.030828 1.521988 0.050142 +v -0.030547 1.520574 0.049372 +v -0.030311 1.519389 0.048376 +v -0.031451 1.523491 0.050617 +v -0.031451 1.521927 0.050142 +v -0.031451 1.520485 0.049372 +v -0.031451 1.519276 0.048376 +v -0.031768 1.523522 0.050617 +v -0.032074 1.521988 0.050142 +v -0.032355 1.520574 0.049372 +v -0.032591 1.519389 0.048376 +v -0.032074 1.523615 0.050617 +v -0.032672 1.522170 0.050142 +v -0.033224 1.520838 0.049372 +v -0.033686 1.519721 0.048376 +v -0.032355 1.523765 0.050617 +v -0.033224 1.522465 0.050142 +v -0.034025 1.521266 0.049372 +v -0.034696 1.520261 0.048376 +v -0.032601 1.523968 0.050617 +v -0.033707 1.522862 0.050142 +v -0.034727 1.521842 0.049372 +v -0.035582 1.520987 0.048376 +v -0.032804 1.524214 0.050617 +v -0.034104 1.523345 0.050142 +v -0.035303 1.522544 0.049372 +v -0.036308 1.521873 0.048376 +v -0.031451 1.525118 0.050777 +v -0.032954 1.524495 0.050617 +v -0.034399 1.523897 0.050142 +v -0.035731 1.523345 0.049372 +v -0.036848 1.522883 0.048376 +v -0.033047 1.524801 0.050617 +v -0.034581 1.524495 0.050142 +v -0.035995 1.524214 0.049372 +v -0.037180 1.523978 0.048376 +v -0.038472 1.525118 0.047521 +v -0.040295 1.525118 0.046071 +v -0.041851 1.525118 0.044176 +v -0.043007 1.525118 0.042014 +v -0.043718 1.525118 0.039667 +v -0.043959 1.525118 0.037227 +v -0.043718 1.525118 0.034787 +v -0.043007 1.525118 0.032441 +v -0.041851 1.525118 0.030278 +v -0.040295 1.525118 0.028383 +v -0.038400 1.525118 0.026827 +v -0.036238 1.525118 0.025671 +v -0.033891 1.525118 0.024960 +v -0.038337 1.526488 0.047521 +v -0.040125 1.526843 0.046071 +v -0.041651 1.527147 0.044176 +v -0.042785 1.527372 0.042014 +v -0.043483 1.527511 0.039667 +v -0.043718 1.527558 0.037227 +v -0.043483 1.527511 0.034787 +v -0.042785 1.527372 0.032441 +v -0.041651 1.527147 0.030278 +v -0.040125 1.526843 0.028383 +v -0.038266 1.526474 0.026827 +v -0.036146 1.526052 0.025671 +v -0.033844 1.525594 0.024960 +v -0.037937 1.527805 0.047521 +v -0.039622 1.528503 0.046071 +v -0.041059 1.529098 0.044176 +v -0.042127 1.529540 0.042013 +v -0.042785 1.529813 0.039667 +v -0.043007 1.529904 0.037227 +v -0.042785 1.529813 0.034787 +v -0.042127 1.529540 0.032441 +v -0.041059 1.529098 0.030278 +v -0.039622 1.528503 0.028383 +v -0.037871 1.527777 0.026827 +v -0.035873 1.526950 0.025671 +v -0.033705 1.526052 0.024960 +v -0.037289 1.529019 0.047521 +v -0.038805 1.530032 0.046071 +v -0.040098 1.530896 0.044176 +v -0.041059 1.531538 0.042013 +v -0.041651 1.531933 0.039667 +v -0.041851 1.532067 0.037227 +v -0.041651 1.531933 0.034787 +v -0.041059 1.531538 0.032441 +v -0.040098 1.530896 0.030278 +v -0.038805 1.530032 0.028383 +v -0.037229 1.528979 0.026827 +v -0.035431 1.527777 0.025671 +v -0.033480 1.526474 0.024960 +v -0.036416 1.530083 0.047521 +v -0.037705 1.531372 0.046071 +v -0.038805 1.532472 0.044176 +v -0.039622 1.533289 0.042013 +v -0.040125 1.533792 0.039667 +v -0.040295 1.533962 0.037227 +v -0.040125 1.533792 0.034787 +v -0.039622 1.533289 0.032441 +v -0.038805 1.532472 0.030278 +v -0.037705 1.531372 0.028383 +v -0.036365 1.530032 0.026827 +v -0.034836 1.528503 0.025671 +v -0.033176 1.526843 0.024960 +v -0.035352 1.530956 0.047521 +v -0.036365 1.532472 0.046071 +v -0.037229 1.533765 0.044176 +v -0.037871 1.534726 0.042013 +v -0.038266 1.535318 0.039667 +v -0.038400 1.535518 0.037227 +v -0.038266 1.535318 0.034787 +v -0.037871 1.534726 0.032441 +v -0.037229 1.533765 0.030278 +v -0.036365 1.532472 0.028383 +v -0.035312 1.530896 0.026827 +v -0.034110 1.529098 0.025671 +v -0.032807 1.527147 0.024960 +v -0.034138 1.531604 0.047521 +v -0.034836 1.533289 0.046071 +v -0.035431 1.534726 0.044176 +v -0.035873 1.535794 0.042013 +v -0.036146 1.536452 0.039667 +v -0.036238 1.536674 0.037227 +v -0.036146 1.536452 0.034787 +v -0.035873 1.535794 0.032441 +v -0.035431 1.534726 0.030278 +v -0.034836 1.533289 0.028383 +v -0.034110 1.531538 0.026827 +v -0.033283 1.529540 0.025671 +v -0.032385 1.527372 0.024960 +v -0.032821 1.532004 0.047521 +v -0.033176 1.533792 0.046071 +v -0.033480 1.535318 0.044176 +v -0.033705 1.536452 0.042013 +v -0.033844 1.537150 0.039667 +v -0.033891 1.537385 0.037227 +v -0.033844 1.537150 0.034787 +v -0.033705 1.536452 0.032441 +v -0.033480 1.535318 0.030278 +v -0.033176 1.533792 0.028383 +v -0.032807 1.531933 0.026827 +v -0.032385 1.529813 0.025671 +v -0.031927 1.527511 0.024960 +v -0.031451 1.532139 0.047521 +v -0.031451 1.533962 0.046071 +v -0.031451 1.535518 0.044176 +v -0.031451 1.536674 0.042013 +v -0.031451 1.537385 0.039667 +v -0.031451 1.537626 0.037227 +v -0.031451 1.537385 0.034787 +v -0.031451 1.536674 0.032441 +v -0.031451 1.535518 0.030278 +v -0.031451 1.533962 0.028383 +v -0.031451 1.532067 0.026827 +v -0.031451 1.529905 0.025671 +v -0.031451 1.527558 0.024960 +v -0.030081 1.532004 0.047521 +v -0.029726 1.533792 0.046071 +v -0.029422 1.535318 0.044176 +v -0.029197 1.536452 0.042013 +v -0.029058 1.537150 0.039667 +v -0.029011 1.537385 0.037227 +v -0.029058 1.537150 0.034787 +v -0.029197 1.536452 0.032441 +v -0.029422 1.535318 0.030278 +v -0.029726 1.533792 0.028383 +v -0.030095 1.531933 0.026827 +v -0.030517 1.529813 0.025671 +v -0.030975 1.527511 0.024960 +v -0.028764 1.531604 0.047521 +v -0.028066 1.533289 0.046071 +v -0.027471 1.534726 0.044176 +v -0.027029 1.535794 0.042013 +v -0.026756 1.536452 0.039667 +v -0.026665 1.536674 0.037227 +v -0.026756 1.536452 0.034787 +v -0.027029 1.535794 0.032441 +v -0.027471 1.534726 0.030278 +v -0.028066 1.533289 0.028383 +v -0.028792 1.531538 0.026827 +v -0.029619 1.529540 0.025671 +v -0.030517 1.527372 0.024960 +v -0.027550 1.530956 0.047521 +v -0.026537 1.532472 0.046071 +v -0.025673 1.533765 0.044176 +v -0.025031 1.534726 0.042013 +v -0.024636 1.535318 0.039667 +v -0.024502 1.535518 0.037227 +v -0.024636 1.535318 0.034787 +v -0.025031 1.534726 0.032441 +v -0.025673 1.533765 0.030278 +v -0.026537 1.532472 0.028383 +v -0.027590 1.530896 0.026827 +v -0.028792 1.529098 0.025671 +v -0.030095 1.527147 0.024960 +v -0.026486 1.530083 0.047521 +v -0.025197 1.531372 0.046071 +v -0.024097 1.532472 0.044176 +v -0.023280 1.533289 0.042013 +v -0.022777 1.533792 0.039667 +v -0.022607 1.533962 0.037227 +v -0.022777 1.533792 0.034787 +v -0.023280 1.533289 0.032441 +v -0.024097 1.532472 0.030278 +v -0.025197 1.531372 0.028383 +v -0.026537 1.530032 0.026827 +v -0.028066 1.528503 0.025671 +v -0.029726 1.526843 0.024960 +v -0.025613 1.529019 0.047521 +v -0.024097 1.530032 0.046071 +v -0.022804 1.530896 0.044176 +v -0.021843 1.531538 0.042013 +v -0.021251 1.531933 0.039667 +v -0.021051 1.532067 0.037227 +v -0.021251 1.531933 0.034787 +v -0.021843 1.531538 0.032441 +v -0.022804 1.530896 0.030278 +v -0.024097 1.530032 0.028383 +v -0.025673 1.528979 0.026827 +v -0.027471 1.527777 0.025671 +v -0.029422 1.526474 0.024960 +v -0.024965 1.527805 0.047521 +v -0.023280 1.528503 0.046071 +v -0.021843 1.529098 0.044176 +v -0.020775 1.529540 0.042013 +v -0.020117 1.529813 0.039667 +v -0.019895 1.529905 0.037227 +v -0.020117 1.529813 0.034787 +v -0.020775 1.529540 0.032441 +v -0.021843 1.529098 0.030278 +v -0.023280 1.528503 0.028383 +v -0.025031 1.527777 0.026827 +v -0.027029 1.526950 0.025671 +v -0.029197 1.526052 0.024960 +v -0.024565 1.526488 0.047521 +v -0.022777 1.526843 0.046071 +v -0.021251 1.527147 0.044176 +v -0.020117 1.527372 0.042014 +v -0.019419 1.527511 0.039667 +v -0.019184 1.527558 0.037227 +v -0.019419 1.527511 0.034787 +v -0.020117 1.527372 0.032441 +v -0.021251 1.527147 0.030278 +v -0.022777 1.526843 0.028383 +v -0.024636 1.526474 0.026827 +v -0.026756 1.526052 0.025671 +v -0.029058 1.525594 0.024960 +v -0.024430 1.525118 0.047521 +v -0.022607 1.525118 0.046071 +v -0.021051 1.525118 0.044176 +v -0.019895 1.525118 0.042014 +v -0.019184 1.525118 0.039667 +v -0.018943 1.525118 0.037227 +v -0.019184 1.525118 0.034787 +v -0.019895 1.525118 0.032441 +v -0.021051 1.525118 0.030278 +v -0.022607 1.525118 0.028383 +v -0.024502 1.525118 0.026827 +v -0.026665 1.525118 0.025671 +v -0.029011 1.525118 0.024960 +v -0.024565 1.523748 0.047521 +v -0.022777 1.523393 0.046071 +v -0.021251 1.523089 0.044176 +v -0.020117 1.522864 0.042014 +v -0.019419 1.522725 0.039667 +v -0.019184 1.522678 0.037227 +v -0.019419 1.522725 0.034787 +v -0.020117 1.522864 0.032441 +v -0.021251 1.523089 0.030278 +v -0.022777 1.523393 0.028383 +v -0.024636 1.523762 0.026827 +v -0.026756 1.524184 0.025671 +v -0.029058 1.524642 0.024960 +v -0.024965 1.522431 0.047521 +v -0.023280 1.521733 0.046071 +v -0.021843 1.521138 0.044176 +v -0.020775 1.520696 0.042014 +v -0.020117 1.520423 0.039667 +v -0.019895 1.520332 0.037227 +v -0.020117 1.520423 0.034787 +v -0.020775 1.520696 0.032441 +v -0.021843 1.521138 0.030278 +v -0.023280 1.521733 0.028383 +v -0.025031 1.522459 0.026827 +v -0.027029 1.523286 0.025671 +v -0.029197 1.524184 0.024960 +v -0.025613 1.521217 0.047521 +v -0.024097 1.520204 0.046071 +v -0.022804 1.519340 0.044176 +v -0.021843 1.518698 0.042014 +v -0.021251 1.518303 0.039667 +v -0.021051 1.518169 0.037227 +v -0.021251 1.518303 0.034787 +v -0.021843 1.518698 0.032441 +v -0.022804 1.519340 0.030278 +v -0.024097 1.520204 0.028383 +v -0.025673 1.521257 0.026827 +v -0.027471 1.522459 0.025671 +v -0.029422 1.523762 0.024960 +v -0.026486 1.520153 0.047521 +v -0.025197 1.518864 0.046071 +v -0.024097 1.517764 0.044176 +v -0.023280 1.516947 0.042014 +v -0.022777 1.516444 0.039667 +v -0.022607 1.516274 0.037227 +v -0.022777 1.516444 0.034787 +v -0.023280 1.516947 0.032441 +v -0.024097 1.517764 0.030278 +v -0.025197 1.518864 0.028383 +v -0.026537 1.520204 0.026827 +v -0.028066 1.521733 0.025671 +v -0.029726 1.523393 0.024960 +v -0.027550 1.519280 0.047521 +v -0.026537 1.517764 0.046071 +v -0.025673 1.516471 0.044176 +v -0.025031 1.515510 0.042014 +v -0.024636 1.514918 0.039667 +v -0.024502 1.514718 0.037227 +v -0.024636 1.514918 0.034787 +v -0.025031 1.515510 0.032441 +v -0.025673 1.516471 0.030278 +v -0.026537 1.517764 0.028383 +v -0.027590 1.519340 0.026827 +v -0.028792 1.521138 0.025671 +v -0.030095 1.523089 0.024960 +v -0.031451 1.525118 0.024719 +v -0.028764 1.518632 0.047521 +v -0.028066 1.516947 0.046071 +v -0.027471 1.515510 0.044176 +v -0.027029 1.514442 0.042014 +v -0.026756 1.513784 0.039667 +v -0.026665 1.513562 0.037227 +v -0.026756 1.513784 0.034787 +v -0.027029 1.514442 0.032441 +v -0.027471 1.515510 0.030278 +v -0.028066 1.516947 0.028383 +v -0.028792 1.518698 0.026827 +v -0.029619 1.520696 0.025671 +v -0.030517 1.522864 0.024960 +v -0.030081 1.518232 0.047521 +v -0.029726 1.516444 0.046071 +v -0.029422 1.514918 0.044176 +v -0.029197 1.513784 0.042014 +v -0.029058 1.513086 0.039667 +v -0.029011 1.512851 0.037227 +v -0.029058 1.513086 0.034787 +v -0.029197 1.513784 0.032441 +v -0.029422 1.514918 0.030278 +v -0.029726 1.516444 0.028383 +v -0.030095 1.518303 0.026827 +v -0.030517 1.520423 0.025671 +v -0.030975 1.522725 0.024960 +v -0.031451 1.518097 0.047521 +v -0.031451 1.516274 0.046071 +v -0.031451 1.514718 0.044176 +v -0.031451 1.513562 0.042014 +v -0.031451 1.512851 0.039667 +v -0.031451 1.512610 0.037227 +v -0.031451 1.512851 0.034787 +v -0.031451 1.513562 0.032441 +v -0.031451 1.514718 0.030278 +v -0.031451 1.516274 0.028383 +v -0.031451 1.518169 0.026827 +v -0.031451 1.520332 0.025671 +v -0.031451 1.522678 0.024960 +v -0.032821 1.518232 0.047521 +v -0.033176 1.516444 0.046071 +v -0.033480 1.514918 0.044176 +v -0.033705 1.513784 0.042014 +v -0.033844 1.513086 0.039667 +v -0.033891 1.512851 0.037227 +v -0.033844 1.513086 0.034787 +v -0.033705 1.513784 0.032441 +v -0.033480 1.514918 0.030278 +v -0.033176 1.516444 0.028383 +v -0.032807 1.518303 0.026827 +v -0.032385 1.520423 0.025671 +v -0.031927 1.522725 0.024960 +v -0.034138 1.518632 0.047521 +v -0.034836 1.516947 0.046071 +v -0.035431 1.515510 0.044176 +v -0.035873 1.514442 0.042014 +v -0.036146 1.513784 0.039667 +v -0.036238 1.513562 0.037227 +v -0.036146 1.513784 0.034787 +v -0.035873 1.514442 0.032441 +v -0.035431 1.515510 0.030278 +v -0.034836 1.516947 0.028383 +v -0.034110 1.518698 0.026827 +v -0.033283 1.520696 0.025671 +v -0.032385 1.522864 0.024960 +v -0.035352 1.519280 0.047521 +v -0.036365 1.517764 0.046071 +v -0.037229 1.516471 0.044176 +v -0.037871 1.515510 0.042014 +v -0.038266 1.514918 0.039667 +v -0.038400 1.514718 0.037227 +v -0.038266 1.514918 0.034787 +v -0.037871 1.515510 0.032441 +v -0.037229 1.516471 0.030278 +v -0.036365 1.517764 0.028383 +v -0.035312 1.519340 0.026827 +v -0.034110 1.521138 0.025671 +v -0.032807 1.523089 0.024960 +v -0.036416 1.520153 0.047521 +v -0.037705 1.518864 0.046071 +v -0.038805 1.517764 0.044176 +v -0.039622 1.516947 0.042014 +v -0.040125 1.516444 0.039667 +v -0.040295 1.516274 0.037227 +v -0.040125 1.516444 0.034787 +v -0.039622 1.516947 0.032441 +v -0.038805 1.517764 0.030278 +v -0.037705 1.518864 0.028383 +v -0.036365 1.520204 0.026827 +v -0.034836 1.521733 0.025671 +v -0.033176 1.523393 0.024960 +v -0.037289 1.521217 0.047521 +v -0.038805 1.520204 0.046071 +v -0.040098 1.519340 0.044176 +v -0.041059 1.518698 0.042014 +v -0.041651 1.518303 0.039667 +v -0.041851 1.518169 0.037227 +v -0.041651 1.518303 0.034787 +v -0.041059 1.518698 0.032441 +v -0.040098 1.519340 0.030278 +v -0.038805 1.520204 0.028383 +v -0.037229 1.521257 0.026827 +v -0.035431 1.522459 0.025671 +v -0.033480 1.523762 0.024960 +v -0.037937 1.522431 0.047521 +v -0.039622 1.521733 0.046071 +v -0.041059 1.521138 0.044176 +v -0.042127 1.520696 0.042014 +v -0.042785 1.520423 0.039667 +v -0.043007 1.520331 0.037227 +v -0.042785 1.520423 0.034787 +v -0.042127 1.520696 0.032441 +v -0.041059 1.521138 0.030278 +v -0.039622 1.521733 0.028383 +v -0.037871 1.522459 0.026827 +v -0.035873 1.523286 0.025671 +v -0.033705 1.524184 0.024960 +v -0.038337 1.523748 0.047521 +v -0.040125 1.523393 0.046071 +v -0.041651 1.523089 0.044176 +v -0.042785 1.522864 0.042014 +v -0.043483 1.522725 0.039667 +v -0.043718 1.522678 0.037227 +v -0.043483 1.522725 0.034787 +v -0.042785 1.522864 0.032441 +v -0.041651 1.523089 0.030278 +v -0.040125 1.523393 0.028383 +v -0.038266 1.523762 0.026827 +v -0.036146 1.524184 0.025671 +v -0.033844 1.524642 0.024960 +usemtl material_1 +vt 0.827841 0.456444 +vt 0.826865 0.455402 +vt 0.827907 0.456096 +vt 0.842058 0.471775 +vt 0.837976 0.467212 +vt 0.842847 0.461572 +vt 0.870015 0.218946 +vt 0.894946 0.209010 +vt 0.891536 0.243748 +vt 0.626067 0.713548 +vt 0.646852 0.690397 +vt 0.655533 0.703344 +vt 0.873927 0.494550 +vt 0.876257 0.493740 +vt 0.875382 0.496354 +vt 0.872019 0.483542 +vt 0.870940 0.482536 +vt 0.871472 0.470778 +vt 0.871509 0.449954 +vt 0.872148 0.460940 +vt 0.869884 0.450896 +vt 0.165800 0.455529 +vt 0.164845 0.456567 +vt 0.164776 0.456219 +vt 0.154869 0.467361 +vt 0.150856 0.471940 +vt 0.149956 0.461721 +vt 0.838315 0.395768 +vt 0.844026 0.400208 +vt 0.840380 0.402876 +vt 0.846600 0.382012 +vt 0.849120 0.393760 +vt 0.841435 0.384948 +vt 0.862553 0.515661 +vt 0.863353 0.509890 +vt 0.865905 0.512859 +vt 0.903197 0.443541 +vt 0.890997 0.445717 +vt 0.896335 0.420583 +vt 0.864481 0.490056 +vt 0.865943 0.493748 +vt 0.862936 0.493077 +vt 0.878909 0.496392 +vt 0.877836 0.499308 +vt 0.823238 0.423976 +vt 0.816272 0.418672 +vt 0.824230 0.409451 +vt 0.838080 0.460496 +vt 0.836168 0.468022 +vt 0.870187 0.437303 +vt 0.863523 0.429405 +vt 0.865693 0.426083 +vt 0.863829 0.437613 +vt 0.863830 0.440753 +vt 0.857959 0.433341 +vt 0.867019 0.499853 +vt 0.864036 0.505455 +vt 0.863152 0.504591 +vt 0.897221 0.488439 +vt 0.899057 0.503323 +vt 0.896823 0.503983 +vt 0.866905 0.491780 +vt 0.869888 0.490883 +vt 0.868763 0.493053 +vt 0.844697 0.494535 +vt 0.847181 0.492958 +vt 0.846961 0.495396 +vt 0.888614 0.415091 +vt 0.878802 0.397244 +vt 0.876764 0.389051 +vt 0.836173 0.388504 +vt 0.834026 0.395287 +vt 0.831825 0.387866 +vt 0.850583 0.499869 +vt 0.851510 0.496872 +vt 0.854041 0.497539 +vt 0.866125 0.487052 +vt 0.868225 0.489363 +vt 0.868953 0.463043 +vt 0.862904 0.475192 +vt 0.864392 0.461427 +vt 0.872170 0.490753 +vt 0.873166 0.482859 +vt 0.814715 0.439052 +vt 0.820247 0.442227 +vt 0.815386 0.446010 +vt 0.874814 0.407608 +vt 0.875562 0.396903 +vt 0.879540 0.411076 +vt 0.857822 0.497888 +vt 0.854636 0.491664 +vt 0.857133 0.491101 +vt 0.852684 0.402568 +vt 0.859029 0.403657 +vt 0.857200 0.408243 +vt 0.863556 0.485000 +vt 0.861944 0.487965 +vt 0.817191 0.430877 +vt 0.822995 0.435592 +vt 0.820870 0.466724 +vt 0.824843 0.464314 +vt 0.825851 0.465959 +vt 0.815771 0.465966 +vt 0.817856 0.463741 +vt 0.818798 0.467103 +vt 0.826902 0.466657 +vt 0.826344 0.465087 +vt 0.826963 0.465058 +vt 0.867416 0.476409 +vt 0.870879 0.466015 +vt 0.869476 0.479087 +vt 0.835489 0.444680 +vt 0.839104 0.439991 +vt 0.837491 0.446209 +vt 0.818719 0.449140 +vt 0.824589 0.446916 +vt 0.821894 0.452148 +vt 0.873450 0.448904 +vt 0.878909 0.459689 +vt 0.875985 0.460354 +vt 0.872027 0.503703 +vt 0.875232 0.504611 +vt 0.873706 0.507233 +vt 0.913942 0.372248 +vt 0.910810 0.410742 +vt 0.892795 0.368057 +vt 0.851839 0.409438 +vt 0.855720 0.414436 +vt 0.849343 0.501995 +vt 0.852223 0.501391 +vt 0.855562 0.503813 +vt 0.838856 0.492407 +vt 0.836210 0.497003 +vt 0.835143 0.493577 +vt 0.833641 0.524315 +vt 0.825758 0.523817 +vt 0.824494 0.509354 +vt 0.876011 0.382443 +vt 0.848993 0.398547 +vt 0.841142 0.391893 +vt 0.836186 0.380433 +vt 0.840694 0.489359 +vt 0.844147 0.488818 +vt 0.842631 0.492279 +vt 0.866551 0.496353 +vt 0.867676 0.494849 +vt 0.865297 0.497855 +vt 0.862330 0.499569 +vt 0.864020 0.496699 +vt 0.846809 0.489395 +vt 0.845915 0.486975 +vt 0.052020 0.330165 +vt 0.031772 0.301144 +vt 0.058399 0.292781 +vt 0.863402 0.453967 +vt 0.865473 0.447501 +vt 0.868216 0.455204 +vt 0.885552 0.469190 +vt 0.889406 0.487571 +vt 0.886308 0.487255 +vt 0.893600 0.452797 +vt 0.906806 0.470475 +vt 0.898399 0.469472 +vt 0.840313 0.488493 +vt 0.841474 0.486048 +vt 0.814942 0.400565 +vt 0.861999 0.391729 +vt 0.865396 0.386821 +vt 0.867006 0.399398 +vt 0.827310 0.462459 +vt 0.827830 0.464426 +vt 0.840662 0.499750 +vt 0.839852 0.495550 +vt 0.853694 0.398236 +vt 0.831159 0.479268 +vt 0.822201 0.482616 +vt 0.825611 0.471616 +vt 0.834363 0.443763 +vt 0.835726 0.439241 +vt 0.848779 0.405351 +vt 0.842170 0.480592 +vt 0.846995 0.474293 +vt 0.843139 0.481763 +vt 0.854377 0.382884 +vt 0.856214 0.389598 +vt 0.850473 0.382115 +vt 0.848576 0.488354 +vt 0.850112 0.490893 +vt 0.848590 0.490914 +vt 0.848141 0.485157 +vt 0.853582 0.390914 +vt 0.870296 0.457783 +vt 0.870133 0.493228 +vt 0.871296 0.490781 +vt 0.828116 0.456781 +vt 0.828338 0.457060 +vt 0.828099 0.457058 +vt 0.828269 0.456009 +vt 0.828574 0.456299 +vt 0.828169 0.456406 +vt 0.828739 0.454554 +vt 0.830826 0.453572 +vt 0.829438 0.455268 +vt 0.873008 0.490889 +vt 0.872106 0.493568 +vt 0.867798 0.497175 +vt 0.869023 0.495378 +vt 0.813626 0.451266 +vt 0.817093 0.450008 +vt 0.816442 0.452822 +vt 0.824350 0.395864 +vt 0.829501 0.402016 +vt 0.844427 0.472826 +vt 0.848408 0.462167 +vt 0.848950 0.500912 +vt 0.847276 0.525033 +vt 0.853080 0.519142 +vt 0.854777 0.524826 +vt 0.828156 0.455564 +vt 0.828349 0.454453 +vt 0.828385 0.455468 +vt 0.827833 0.456816 +vt 0.826692 0.456246 +vt 0.829235 0.461455 +vt 0.827896 0.458828 +vt 0.828578 0.458422 +vt 0.827866 0.444228 +vt 0.828832 0.437216 +vt 0.831484 0.443654 +vt 0.842333 0.484020 +vt 0.851557 0.433941 +vt 0.852536 0.429870 +vt 0.827043 0.458047 +vt 0.826006 0.461257 +vt 0.825145 0.460133 +vt 0.831353 0.514392 +vt 0.839442 0.512336 +vt 0.839093 0.521955 +vt 0.901202 0.540443 +vt 0.906030 0.547063 +vt 0.893380 0.565617 +vt 0.876358 0.421251 +vt 0.880120 0.426829 +vt 0.865041 0.495361 +vt 0.829413 0.457025 +vt 0.828438 0.456741 +vt 0.827875 0.457305 +vt 0.827333 0.458555 +vt 0.834400 0.484499 +vt 0.838510 0.482213 +vt 0.838422 0.486832 +vt 0.829764 0.434392 +vt 0.822729 0.430085 +vt 0.883518 0.458053 +vt 0.876382 0.447390 +vt 0.880796 0.445447 +vt 0.849181 0.493012 +vt 0.852394 0.492252 +vt 0.818204 0.452463 +vt 0.828647 0.455809 +vt 0.884507 0.499233 +vt 0.858140 0.395432 +vt 0.863034 0.401494 +vt 0.830765 0.459446 +vt 0.829107 0.457866 +vt 0.818032 0.456219 +vt 0.821361 0.455110 +vt 0.821563 0.458333 +vt 0.846693 0.444503 +vt 0.845698 0.453704 +vt 0.842916 0.451400 +vt 0.880686 0.507364 +vt 0.878487 0.510394 +vt 0.858323 0.545975 +vt 0.863625 0.537467 +vt 0.872414 0.538309 +vt 0.809760 0.419093 +vt 0.813500 0.427367 +vt 0.840782 0.491898 +vt 0.840360 0.490638 +vt 0.883175 0.428313 +vt 0.855501 0.437323 +vt 0.855438 0.443606 +vt 0.850535 0.440915 +vt 0.866251 0.507358 +vt 0.860721 0.441452 +vt 0.512697 0.504195 +vt 0.513067 0.504456 +vt 0.512684 0.504669 +vt 0.828563 0.455379 +vt 0.505058 0.505642 +vt 0.507875 0.503852 +vt 0.507972 0.507293 +vt 0.512335 0.508938 +vt 0.510077 0.512472 +vt 0.510052 0.508717 +vt 0.514515 0.503892 +vt 0.513301 0.503601 +vt 0.514308 0.502955 +vt 0.515702 0.499885 +vt 0.513324 0.500964 +vt 0.513730 0.498908 +vt 0.607350 0.540611 +vt 0.618975 0.526944 +vt 0.620706 0.532580 +vt 0.124014 0.218952 +vt 0.099625 0.209027 +vt 0.132266 0.193780 +vt 0.831926 0.429504 +vt 0.869474 0.498017 +vt 0.870133 0.501385 +vt 0.867880 0.500126 +vt 0.856221 0.485346 +vt 0.854964 0.476579 +vt 0.859501 0.475270 +vt 0.877449 0.485093 +vt 0.874834 0.491952 +vt 0.875665 0.483738 +vt 0.873360 0.501225 +vt 0.876614 0.501958 +vt 0.841095 0.479381 +vt 0.805414 0.436792 +vt 0.810321 0.436713 +vt 0.892196 0.594266 +vt 0.881916 0.571862 +vt 0.968926 0.403403 +vt 0.946795 0.365227 +vt 0.967802 0.369636 +vt 0.836877 0.428859 +vt 0.833086 0.413826 +vt 0.837725 0.414975 +vt 0.848004 0.507703 +vt 0.845494 0.509822 +vt 0.843622 0.504405 +vt 0.844002 0.416461 +vt 0.828052 0.457252 +vt 0.828261 0.457238 +vt 0.838708 0.479561 +vt 0.839042 0.477802 +vt 0.873821 0.482824 +vt 0.841574 0.492499 +vt 0.953351 0.429482 +vt 0.971103 0.462460 +vt 0.954995 0.465773 +vt 0.735380 0.595059 +vt 0.746723 0.622955 +vt 0.734142 0.625525 +vt 0.806992 0.474518 +vt 0.809147 0.456029 +vt 0.813424 0.478681 +vt 0.688497 0.515273 +vt 0.675381 0.493409 +vt 0.689271 0.492812 +vt 0.834603 0.403046 +vt 0.837606 0.403441 +vt 0.816953 0.460105 +vt 0.815875 0.463681 +vt 0.815165 0.461557 +vt 0.545483 0.595339 +vt 0.561340 0.585414 +vt 0.549221 0.600940 +vt 0.518794 0.501766 +vt 0.516426 0.504569 +vt 0.515620 0.502466 +vt 0.922067 0.475564 +vt 0.916804 0.515723 +vt 0.917837 0.459539 +vt 0.702609 0.496283 +vt 0.700502 0.518907 +vt 0.598305 0.177246 +vt 0.631221 0.160647 +vt 0.650865 0.188922 +vt 0.844660 0.377072 +vt 0.829147 0.374883 +vt 0.846930 0.370751 +vt 0.511030 0.672679 +vt 0.524864 0.660104 +vt 0.526165 0.675057 +vt 0.560268 0.788311 +vt 0.530673 0.810853 +vt 0.527456 0.791432 +vt 0.950180 0.399125 +vt 0.970058 0.431030 +vt 0.834077 0.471401 +vt 0.837115 0.474679 +vt 0.836102 0.477765 +vt 0.884644 0.415077 +vt 0.529278 0.623530 +vt 0.510111 0.632307 +vt 0.510484 0.616017 +vt 0.566976 0.806985 +vt 0.533546 0.828017 +vt 0.829741 0.487902 +vt 0.834518 0.490395 +vt 0.576070 0.568723 +vt 0.590032 0.548918 +vt 0.591544 0.553315 +vt 0.816021 0.598369 +vt 0.834984 0.572839 +vt 0.828252 0.619165 +vt 0.513764 0.505485 +vt 0.513358 0.503917 +vt 0.514804 0.507870 +vt 0.522578 0.506258 +vt 0.516757 0.510849 +vt 0.512989 0.503347 +vt 0.513175 0.502218 +vt 0.513552 0.502344 +vt 0.512822 0.504016 +vt 0.512796 0.504144 +vt 0.503668 0.508524 +vt 0.502165 0.512866 +vt 0.501425 0.507976 +vt 0.513190 0.512449 +vt 0.510196 0.504111 +vt 0.512053 0.504588 +vt 0.510245 0.505628 +vt 0.511941 0.503989 +vt 0.512554 0.503823 +vt 0.512570 0.503939 +vt 0.512653 0.504160 +vt 0.512338 0.504729 +vt 0.508944 0.499425 +vt 0.510420 0.501562 +vt 0.511322 0.506236 +vt 0.512772 0.503886 +vt 0.849040 0.430210 +vt 0.846823 0.438440 +vt 0.844002 0.435579 +vt 0.852604 0.489893 +vt 0.849713 0.491741 +vt 0.859490 0.512043 +vt 0.872856 0.496911 +vt 0.874405 0.498745 +vt 0.799000 0.494613 +vt 0.783589 0.486921 +vt 0.794853 0.465043 +vt 0.848561 0.495287 +vt 0.872259 0.469466 +vt 0.873147 0.469339 +vt 0.812520 0.657031 +vt 0.843089 0.635107 +vt 0.828194 0.673622 +vt 0.617074 0.583026 +vt 0.620821 0.582710 +vt 0.620252 0.584600 +vt 0.623581 0.584615 +vt 0.622713 0.586620 +vt 0.876467 0.554242 +vt 0.867701 0.582874 +vt 0.617641 0.580805 +vt 0.624187 0.578736 +vt 0.587850 0.592882 +vt 0.593223 0.589141 +vt 0.589369 0.594582 +vt 0.605344 0.536366 +vt 0.840205 0.489942 +vt 0.840085 0.487871 +vt 0.603224 0.575080 +vt 0.593526 0.578528 +vt 0.604336 0.570586 +vt 0.609685 0.577108 +vt 0.611579 0.573168 +vt 0.838182 0.501180 +vt 0.831447 0.499300 +vt 0.583392 0.593640 +vt 0.584890 0.585342 +vt 0.585676 0.587324 +vt 0.880876 0.470091 +vt 0.877545 0.470008 +vt 0.587059 0.590707 +vt 0.592949 0.586449 +vt 0.593757 0.574953 +vt 0.845884 0.517276 +vt 0.850768 0.513058 +vt 0.638269 0.582255 +vt 0.630538 0.582823 +vt 0.634312 0.579590 +vt 0.636311 0.572698 +vt 0.640405 0.575426 +vt 0.496576 0.591923 +vt 0.508928 0.602069 +vt 0.496593 0.603855 +vt 0.605851 0.566318 +vt 0.613550 0.568708 +vt 0.818097 0.425891 +vt 0.877311 0.433336 +vt 0.631134 0.577046 +vt 0.627535 0.580742 +vt 0.627083 0.569172 +vt 0.624238 0.563484 +vt 0.630966 0.565091 +vt 0.644022 0.578574 +vt 0.646048 0.572456 +vt 0.650388 0.576026 +vt 0.646106 0.581556 +vt 0.652481 0.581003 +vt 0.856279 0.529400 +vt 0.863956 0.531768 +vt 0.852631 0.530653 +vt 0.647358 0.565863 +vt 0.653202 0.569565 +vt 0.654610 0.562736 +vt 0.661953 0.567757 +vt 0.658639 0.574156 +vt 0.668028 0.573558 +vt 0.636508 0.566538 +vt 0.635064 0.561020 +vt 0.641742 0.562859 +vt 0.368060 0.714197 +vt 0.347288 0.690679 +vt 0.373206 0.699728 +vt 0.601030 0.582653 +vt 0.595463 0.590613 +vt 0.620449 0.577427 +vt 0.613824 0.580182 +vt 0.616262 0.576571 +vt 0.602971 0.587835 +vt 0.608858 0.595982 +vt 0.606630 0.597002 +vt 0.593675 0.595782 +vt 0.597853 0.591954 +vt 0.604755 0.598589 +vt 0.841014 0.505658 +vt 0.606845 0.561903 +vt 0.618657 0.556448 +vt 0.617669 0.559836 +vt 0.940198 0.473328 +vt 0.954814 0.509723 +vt 0.941717 0.515026 +vt 0.626720 0.634207 +vt 0.636822 0.632162 +vt 0.629149 0.638316 +vt 0.617384 0.634586 +vt 0.618989 0.639365 +vt 0.626287 0.612167 +vt 0.621924 0.613739 +vt 0.623243 0.610213 +vt 0.627677 0.607573 +vt 0.624237 0.606726 +vt 0.620364 0.643831 +vt 0.624525 0.589034 +vt 0.628073 0.589723 +vt 0.626314 0.591851 +vt 0.619302 0.586771 +vt 0.613726 0.598335 +vt 0.613253 0.597093 +vt 0.621266 0.646631 +vt 0.609782 0.653721 +vt 0.609590 0.650317 +vt 0.616548 0.585438 +vt 0.625786 0.586908 +vt 0.633766 0.586250 +vt 0.868540 0.523146 +vt 0.864092 0.520060 +vt 0.867481 0.516529 +vt 0.890929 0.502588 +vt 0.622033 0.591282 +vt 0.613989 0.600950 +vt 0.613890 0.599641 +vt 0.621140 0.589013 +vt 0.628725 0.598126 +vt 0.624071 0.596774 +vt 0.627851 0.595228 +vt 0.613987 0.604688 +vt 0.624498 0.603907 +vt 0.613902 0.605946 +vt 0.614023 0.603477 +vt 0.614042 0.602252 +vt 0.630655 0.593375 +vt 0.639684 0.593254 +vt 0.652386 0.587694 +vt 0.645961 0.597584 +vt 0.645560 0.586568 +vt 0.495457 0.167657 +vt 0.536350 0.159940 +vt 0.536360 0.173776 +vt 0.624571 0.601471 +vt 0.628661 0.603948 +vt 0.661892 0.579842 +vt 0.662230 0.587471 +vt 0.659032 0.600032 +vt 0.673155 0.589137 +vt 0.671188 0.601764 +vt 0.633314 0.599519 +vt 0.629067 0.600946 +vt 0.632284 0.596411 +vt 0.640880 0.605832 +vt 0.649401 0.607973 +vt 0.636899 0.612221 +vt 0.643092 0.615417 +vt 0.613763 0.607310 +vt 0.633479 0.628707 +vt 0.643130 0.625490 +vt 0.671183 0.580956 +vt 0.633474 0.618563 +vt 0.638340 0.622042 +vt 0.630626 0.609444 +vt 0.632324 0.604167 +vt 0.628621 0.615007 +vt 0.602197 0.606482 +vt 0.590147 0.610704 +vt 0.590076 0.608379 +vt 0.626154 0.620744 +vt 0.629593 0.624753 +vt 0.620183 0.621710 +vt 0.621962 0.626056 +vt 0.624159 0.616778 +vt 0.614824 0.625560 +vt 0.616146 0.630314 +vt 0.713764 0.682147 +vt 0.740827 0.657366 +vt 0.727510 0.687880 +vt 0.624110 0.630195 +vt 0.611706 0.659848 +vt 0.623291 0.649980 +vt 0.627693 0.656201 +vt 0.602283 0.609196 +vt 0.592142 0.616797 +vt 0.590957 0.613732 +vt 0.608369 0.623222 +vt 0.610222 0.613525 +vt 0.614127 0.621690 +vt 0.602190 0.607792 +vt 0.586891 0.613965 +vt 0.585111 0.609774 +vt 0.589288 0.617568 +vt 0.861742 0.372864 +vt 0.864851 0.355415 +vt 0.633635 0.518804 +vt 0.572079 0.738226 +vt 0.594420 0.720196 +vt 0.602804 0.733237 +vt 0.882846 0.503671 +vt 0.829832 0.493332 +vt 0.967415 0.627599 +vt 0.951869 0.602418 +vt 0.969915 0.597156 +vt 0.605852 0.614092 +vt 0.602623 0.623295 +vt 0.612999 0.610472 +vt 0.618467 0.618403 +vt 0.611880 0.612203 +vt 0.944497 0.655175 +vt 0.935769 0.634601 +vt 0.949170 0.629601 +vt 0.584545 0.618598 +vt 0.580865 0.613953 +vt 0.595131 0.619368 +vt 0.602539 0.610682 +vt 0.603082 0.612132 +vt 0.578615 0.609968 +vt 0.577721 0.617494 +vt 0.575111 0.612782 +vt 0.581432 0.622873 +vt 0.588887 0.622919 +vt 0.591747 0.620938 +vt 0.586903 0.627316 +vt 0.593472 0.633083 +vt 0.594078 0.628190 +vt 0.595608 0.625378 +vt 0.600734 0.634939 +vt 0.608236 0.632113 +vt 0.608700 0.636571 +vt 0.591943 0.638043 +vt 0.600910 0.630655 +vt 0.608193 0.627519 +vt 0.590113 0.643481 +vt 0.584901 0.632138 +vt 0.599019 0.644959 +vt 0.600064 0.639879 +vt 0.582085 0.598236 +vt 0.579986 0.594846 +vt 0.576821 0.602548 +vt 0.586195 0.604614 +vt 0.584586 0.599055 +vt 0.588547 0.600099 +vt 0.591525 0.595390 +vt 0.579988 0.603322 +vt 0.578209 0.607795 +vt 0.573368 0.610167 +vt 0.584884 0.606497 +vt 0.578919 0.605626 +vt 0.584495 0.608159 +vt 0.602828 0.602238 +vt 0.603497 0.600447 +vt 0.574563 0.620965 +vt 0.578064 0.627607 +vt 0.564820 0.611830 +vt 0.568107 0.613336 +vt 0.565571 0.616286 +vt 0.117210 0.493851 +vt 0.119524 0.494663 +vt 0.118092 0.496461 +vt 0.122329 0.482707 +vt 0.121284 0.483697 +vt 0.121663 0.470888 +vt 0.566433 0.606581 +vt 0.567821 0.609808 +vt 0.121427 0.449965 +vt 0.120919 0.460973 +vt 0.119208 0.460784 +vt 0.574783 0.635192 +vt 0.583111 0.638455 +vt 0.569461 0.605710 +vt 0.573563 0.606920 +vt 0.583094 0.583056 +vt 0.153804 0.395813 +vt 0.148166 0.400258 +vt 0.150946 0.391940 +vt 0.581892 0.581599 +vt 0.593326 0.571358 +vt 0.576582 0.594195 +vt 0.574286 0.594011 +vt 0.619552 0.552929 +vt 0.607395 0.558943 +vt 0.607740 0.555933 +vt 0.671657 0.151136 +vt 0.623360 0.140133 +vt 0.669444 0.564513 +vt 0.143038 0.393787 +vt 0.145432 0.382018 +vt 0.150594 0.384988 +vt 0.667279 0.553935 +vt 0.660996 0.559305 +vt 0.130139 0.509993 +vt 0.130976 0.515745 +vt 0.127630 0.512953 +vt 0.090000 0.443329 +vt 0.102148 0.445512 +vt 0.099618 0.452587 +vt 0.651873 0.555060 +vt 0.646749 0.559214 +vt 0.127396 0.493876 +vt 0.128806 0.490204 +vt 0.130360 0.493207 +vt 0.114594 0.496494 +vt 0.115676 0.499407 +vt 0.175956 0.418777 +vt 0.169078 0.424088 +vt 0.167946 0.409559 +vt 0.154693 0.460640 +vt 0.129095 0.429485 +vt 0.122591 0.437322 +vt 0.126922 0.426147 +vt 0.656382 0.549525 +vt 0.644927 0.547206 +vt 0.641630 0.552313 +vt 0.632092 0.551231 +vt 0.685811 0.586527 +vt 0.128901 0.440913 +vt 0.128874 0.437761 +vt 0.134637 0.433513 +vt 0.692279 0.573440 +vt 0.681662 0.578555 +vt 0.126395 0.499946 +vt 0.129429 0.505565 +vt 0.125558 0.500216 +vt 0.697529 0.581880 +vt 0.688251 0.599122 +vt 0.563943 0.624463 +vt 0.570513 0.627714 +vt 0.564925 0.633617 +vt 0.558059 0.638233 +vt 0.567246 0.644244 +vt 0.559424 0.649226 +vt 0.096339 0.488441 +vt 0.094686 0.503314 +vt 0.095359 0.488082 +vt 0.126427 0.491921 +vt 0.123469 0.491033 +vt 0.125096 0.489518 +vt 0.148451 0.494678 +vt 0.145972 0.493106 +vt 0.147733 0.491278 +vt 0.113607 0.397153 +vt 0.104166 0.414911 +vt 0.115526 0.388903 +vt 0.558152 0.628565 +vt 0.557708 0.616970 +vt 0.562570 0.619754 +vt 0.558072 0.623155 +vt 0.563594 0.607429 +vt 0.561728 0.614173 +vt 0.560378 0.607815 +vt 0.592682 0.566585 +vt 0.580395 0.580325 +vt 0.158063 0.395356 +vt 0.155868 0.388546 +vt 0.160195 0.387914 +vt 0.591798 0.564591 +vt 0.578990 0.578766 +vt 0.572042 0.593431 +vt 0.142643 0.499990 +vt 0.141702 0.497001 +vt 0.144338 0.498530 +vt 0.569248 0.592983 +vt 0.127148 0.487213 +vt 0.955551 0.539937 +vt 0.940933 0.548837 +vt 0.128552 0.461564 +vt 0.124693 0.455365 +vt 0.129459 0.454116 +vt 0.937915 0.440042 +vt 0.621501 0.346150 +vt 0.598586 0.340764 +vt 0.608331 0.329450 +vt 0.124040 0.463188 +vt 0.130206 0.475357 +vt 0.125735 0.476585 +vt 0.718346 0.475276 +vt 0.740050 0.463946 +vt 0.732541 0.489889 +vt 0.121226 0.490893 +vt 0.122083 0.490930 +vt 0.087872 0.524952 +vt 0.094456 0.509046 +vt 0.097096 0.517213 +vt 0.496638 0.645071 +vt 0.496625 0.631456 +vt 0.657002 0.667209 +vt 0.668154 0.675497 +vt 0.640877 0.678015 +vt 0.647816 0.658559 +vt 0.510348 0.455822 +vt 0.496196 0.459443 +vt 0.496203 0.455843 +vt 0.697171 0.627110 +vt 0.688575 0.612696 +vt 0.700980 0.611010 +vt 0.685173 0.626475 +vt 0.677179 0.613209 +vt 0.659318 0.648863 +vt 0.669998 0.654597 +vt 0.690573 0.643419 +vt 0.678244 0.640655 +vt 0.177725 0.439167 +vt 0.172244 0.442356 +vt 0.169441 0.435719 +vt 0.667601 0.637631 +vt 0.116806 0.396831 +vt 0.117675 0.407504 +vt 0.113061 0.410951 +vt 0.701389 0.596194 +vt 0.569947 0.656791 +vt 0.583353 0.662647 +vt 0.579974 0.670936 +vt 0.574124 0.677771 +vt 0.590610 0.692187 +vt 0.597881 0.681450 +vt 0.575217 0.651355 +vt 0.562593 0.661933 +vt 0.512004 0.500921 +vt 0.135470 0.498002 +vt 0.138567 0.491801 +vt 0.139203 0.497657 +vt 0.151168 0.432110 +vt 0.143474 0.430336 +vt 0.145449 0.425274 +vt 0.516445 0.488326 +vt 0.507223 0.483405 +vt 0.519424 0.482924 +vt 0.598283 0.649077 +vt 0.133217 0.403715 +vt 0.139551 0.402627 +vt 0.135087 0.408336 +vt 0.175168 0.430982 +vt 0.102728 0.502646 +vt 0.096889 0.504015 +vt 0.912058 0.689267 +vt 0.907357 0.733069 +vt 0.894950 0.709852 +vt 0.582130 0.645108 +vt 0.584139 0.644258 +vt 0.129668 0.485163 +vt 0.131294 0.488114 +vt 0.524213 0.609833 +vt 0.609219 0.641580 +vt 0.609321 0.646571 +vt 0.630774 0.640755 +vt 0.634069 0.644000 +vt 0.171893 0.466849 +vt 0.167908 0.464449 +vt 0.173095 0.463698 +vt 0.529130 0.575825 +vt 0.516811 0.567236 +vt 0.528800 0.565557 +vt 0.174863 0.463852 +vt 0.176972 0.466065 +vt 0.173963 0.467224 +vt 0.654247 0.621616 +vt 0.649736 0.618365 +vt 0.659657 0.615772 +vt 0.659121 0.622906 +vt 0.657806 0.611827 +vt 0.665029 0.615081 +vt 0.166425 0.465229 +vt 0.165882 0.466799 +vt 0.165807 0.465197 +vt 0.144454 0.462305 +vt 0.148523 0.472982 +vt 0.146000 0.474440 +vt 0.581755 0.650305 +vt 0.122175 0.466145 +vt 0.123731 0.479260 +vt 0.588053 0.650969 +vt 0.138723 0.524964 +vt 0.129487 0.520137 +vt 0.598879 0.670340 +vt 0.597802 0.658869 +vt 0.153467 0.440115 +vt 0.157117 0.444812 +vt 0.155139 0.446345 +vt 0.167973 0.447048 +vt 0.173838 0.449263 +vt 0.170707 0.452281 +vt 0.167236 0.471755 +vt 0.173202 0.470992 +vt 0.170725 0.482753 +vt 0.119500 0.448893 +vt 0.114219 0.459663 +vt 0.116584 0.447365 +vt 0.597704 0.652690 +vt 0.167717 0.395948 +vt 0.162728 0.374933 +vt 0.177105 0.400674 +vt 0.121480 0.503791 +vt 0.118301 0.504705 +vt 0.120134 0.501321 +vt 0.152498 0.482205 +vt 0.154490 0.482386 +vt 0.152980 0.484696 +vt 0.937403 0.610412 +vt 0.650134 0.641770 +vt 0.639670 0.649566 +vt 0.643468 0.637218 +vt 0.079885 0.372002 +vt 0.082758 0.410666 +vt 0.071206 0.415476 +vt 0.140460 0.409517 +vt 0.136635 0.414551 +vt 0.168691 0.509508 +vt 0.161920 0.514516 +vt 0.167555 0.503813 +vt 0.112781 0.486667 +vt 0.658855 0.633603 +vt 0.652118 0.630403 +vt 0.516556 0.320179 +vt 0.495554 0.309736 +vt 0.518283 0.310595 +vt 0.143891 0.502148 +vt 0.141036 0.501496 +vt 0.144269 0.501055 +vt 0.156920 0.497164 +vt 0.154239 0.492572 +vt 0.157942 0.493738 +vt 0.505405 0.363885 +vt 0.495616 0.354825 +vt 0.507392 0.356351 +vt 0.167559 0.524009 +vt 0.159730 0.524467 +vt 0.670031 0.618552 +vt 0.675006 0.626496 +vt 0.665770 0.625329 +vt 0.116235 0.382230 +vt 0.096574 0.420337 +vt 0.810210 0.446784 +vt 0.806560 0.452759 +vt 0.807268 0.445544 +vt 0.898225 0.488134 +vt 0.895254 0.468654 +vt 0.161685 0.499450 +vt 0.155004 0.501350 +vt 0.182428 0.419200 +vt 0.145511 0.499814 +vt 0.146806 0.498153 +vt 0.150486 0.492425 +vt 0.148954 0.488973 +vt 0.901599 0.500491 +vt 0.909515 0.504110 +vt 0.899334 0.509068 +vt 0.138086 0.476747 +vt 0.881500 0.520433 +vt 0.873447 0.523940 +vt 0.878700 0.516993 +vt 0.503805 0.369681 +vt 0.495608 0.363008 +vt 0.862380 0.433351 +vt 0.855370 0.427298 +vt 0.854376 0.422465 +vt 0.504671 0.536218 +vt 0.496478 0.546108 +vt 0.496452 0.536719 +vt 0.868671 0.497700 +vt 0.871113 0.493388 +vt 0.928731 0.370200 +vt 0.934982 0.410495 +vt 0.091285 0.405461 +vt 0.127450 0.355243 +vt 0.591853 0.421981 +vt 0.595072 0.441683 +vt 0.579286 0.440476 +vt 0.815461 0.468621 +vt 0.869930 0.495686 +vt 0.863322 0.495722 +vt 0.861071 0.499198 +vt 0.886046 0.457249 +vt 0.887309 0.468437 +vt 0.884949 0.457500 +vt 0.889009 0.467807 +vt 0.892258 0.488113 +vt 0.151597 0.494415 +vt 0.151537 0.492658 +vt 0.152318 0.492064 +vt 0.513983 0.490628 +vt 0.505197 0.488105 +vt 0.511731 0.495495 +vt 0.508811 0.497471 +vt 0.505713 0.495534 +vt 0.143207 0.398593 +vt 0.140342 0.487287 +vt 0.144954 0.485310 +vt 0.144548 0.488507 +vt 0.119520 0.482929 +vt 0.117974 0.469632 +vt 0.120014 0.469379 +vt 0.863027 0.435676 +vt 0.866641 0.441524 +vt 0.866689 0.442450 +vt 0.859447 0.384333 +vt 0.858188 0.388834 +vt 0.155794 0.380475 +vt 0.153921 0.477972 +vt 0.155801 0.474848 +vt 0.154272 0.479734 +vt 0.869403 0.451247 +vt 0.857741 0.505000 +vt 0.855624 0.506390 +vt 0.855346 0.505082 +vt 0.858856 0.504067 +vt 0.858772 0.505629 +vt 0.152387 0.489511 +vt 0.899227 0.487547 +vt 0.182074 0.436830 +vt 0.178802 0.427464 +vt 0.816356 0.456400 +vt 0.509151 0.448815 +vt 0.496229 0.446032 +vt 0.507452 0.445185 +vt 0.846518 0.499773 +vt 0.847690 0.499671 +vt 0.842088 0.494587 +vt 0.843983 0.497553 +vt 0.843253 0.497798 +vt 0.841529 0.494243 +vt 0.832190 0.437950 +vt 0.833032 0.443705 +vt 0.856721 0.455209 +vt 0.858014 0.462012 +vt 0.126820 0.496465 +vt 0.128304 0.495480 +vt 0.131022 0.499687 +vt 0.128072 0.497957 +vt 0.129326 0.496811 +vt 0.146313 0.489547 +vt 0.147185 0.487129 +vt 0.836807 0.395414 +vt 0.153053 0.429013 +vt 0.148345 0.416555 +vt 0.127337 0.447673 +vt 0.132593 0.447971 +vt 0.107701 0.469174 +vt 0.104093 0.487621 +vt 0.105965 0.468379 +vt 0.870823 0.495902 +vt 0.868386 0.509868 +vt 0.841661 0.497002 +vt 0.086688 0.470289 +vt 0.095034 0.469291 +vt 0.179427 0.478817 +vt 0.151585 0.486208 +vt 0.152753 0.488652 +vt 0.116334 0.421189 +vt 0.112690 0.426725 +vt 0.126773 0.386764 +vt 0.130185 0.391694 +vt 0.125281 0.399326 +vt 0.841673 0.502561 +vt 0.842580 0.500445 +vt 0.844807 0.501959 +vt 0.856067 0.507834 +vt 0.859927 0.506638 +vt 0.877670 0.540256 +vt 0.860885 0.507680 +vt 0.856924 0.509300 +vt 0.131992 0.441625 +vt 0.849388 0.503147 +vt 0.846308 0.501544 +vt 0.846301 0.500559 +vt 0.164936 0.464566 +vt 0.165437 0.462594 +vt 0.134889 0.462173 +vt 0.133575 0.475439 +vt 0.131476 0.461645 +vt 0.152519 0.499924 +vt 0.153281 0.495720 +vt 0.842990 0.456347 +vt 0.826094 0.464213 +vt 0.845951 0.457128 +vt 0.138510 0.398275 +vt 0.161772 0.479427 +vt 0.163263 0.488052 +vt 0.149623 0.504591 +vt 0.152233 0.505835 +vt 0.147824 0.510009 +vt 0.847016 0.455126 +vt 0.166130 0.451324 +vt 0.168069 0.454079 +vt 0.814213 0.500474 +vt 0.986328 0.431641 +vt 0.132405 0.502326 +vt 0.132265 0.499317 +vt 0.105872 0.501131 +vt 0.101277 0.488131 +vt 0.851078 0.425043 +vt 0.847002 0.425162 +vt 0.144619 0.495427 +vt 0.143989 0.493156 +vt 0.872905 0.435104 +vt 0.112404 0.540950 +vt 0.117463 0.554329 +vt 0.103782 0.548600 +vt 0.871495 0.460942 +vt 0.135939 0.389594 +vt 0.137708 0.382847 +vt 0.141577 0.382098 +vt 0.156819 0.439362 +vt 0.158227 0.443897 +vt 0.593085 0.568997 +vt 0.871135 0.460681 +vt 0.850295 0.419238 +vt 0.869330 0.451150 +vt 0.123564 0.451261 +vt 0.122666 0.457921 +vt 0.121885 0.460781 +vt 0.856934 0.448765 +vt 0.860183 0.447818 +vt 0.853972 0.449920 +vt 0.852838 0.445365 +vt 0.150846 0.480761 +vt 0.149900 0.481921 +vt 0.112206 0.520501 +vt 0.112749 0.532131 +vt 0.106476 0.525100 +vt 0.107156 0.487330 +vt 0.174219 0.425998 +vt 0.873566 0.396725 +vt 0.507138 0.568350 +vt 0.496506 0.557105 +vt 0.506955 0.556829 +vt 0.840048 0.484530 +vt 0.841466 0.483081 +vt 0.840797 0.485217 +vt 0.859945 0.454421 +vt 0.861443 0.461495 +vt 0.151824 0.402929 +vt 0.143469 0.405418 +vt 0.840514 0.482035 +vt 0.499339 0.502612 +vt 0.496336 0.507190 +vt 0.496309 0.502312 +vt 0.514640 0.329083 +vt 0.495611 0.336761 +vt 0.495574 0.327991 +vt 0.143041 0.491038 +vt 0.144553 0.491062 +vt 0.145261 0.491917 +vt 0.860830 0.489973 +vt 0.857758 0.487913 +vt 0.154587 0.490677 +vt 0.145308 0.507906 +vt 0.137336 0.507935 +vt 0.144156 0.506192 +vt 0.887687 0.435499 +vt 0.825316 0.448901 +vt 0.828335 0.448233 +vt 0.826493 0.451200 +vt 0.149191 0.496841 +vt 0.859013 0.484650 +vt 0.855113 0.488739 +vt 0.852810 0.487134 +vt 0.866326 0.499152 +vt 0.862646 0.501132 +vt 0.129711 0.531843 +vt 0.137262 0.529517 +vt 0.140890 0.530784 +vt 0.138571 0.390932 +vt 0.825122 0.462901 +vt 0.881463 0.540893 +vt 0.890411 0.539287 +vt 0.890185 0.548544 +vt 0.517524 0.738833 +vt 0.496711 0.756070 +vt 0.496670 0.739258 +vt 0.123253 0.493362 +vt 0.124605 0.493188 +vt 0.158790 0.471568 +vt 0.986328 0.626953 +vt 0.164575 0.456905 +vt 0.164358 0.457185 +vt 0.164254 0.456866 +vt 0.834315 0.505848 +vt 0.825587 0.503685 +vt 0.161841 0.453700 +vt 0.163932 0.454680 +vt 0.163243 0.455395 +vt 0.829213 0.397026 +vt 0.846380 0.498011 +vt 0.164414 0.456134 +vt 0.164114 0.456425 +vt 0.164036 0.455935 +vt 0.822864 0.453260 +vt 0.893447 0.467841 +vt 0.131415 0.503692 +vt 0.130725 0.501248 +vt 0.834744 0.451673 +vt 0.832660 0.448714 +vt 0.120405 0.491017 +vt 0.121315 0.493690 +vt 0.166926 0.466103 +vt 0.125595 0.497283 +vt 0.125695 0.494972 +vt 0.540716 0.609765 +vt 0.534111 0.603239 +vt 0.178927 0.451378 +vt 0.175465 0.450129 +vt 0.177125 0.446127 +vt 0.120599 0.497018 +vt 0.162636 0.402109 +vt 0.130008 0.495838 +vt 0.306173 0.175609 +vt 0.271726 0.157314 +vt 0.320525 0.151226 +vt 0.588936 0.647679 +vt 0.140367 0.519298 +vt 0.146175 0.525177 +vt 0.164522 0.455689 +vt 0.164317 0.454580 +vt 0.164884 0.454495 +vt 0.165981 0.456376 +vt 0.164857 0.456939 +vt 0.593248 0.582888 +vt 0.828108 0.446010 +vt 0.163109 0.456245 +vt 0.163515 0.461586 +vt 0.164821 0.458956 +vt 0.164685 0.444351 +vt 0.163656 0.437341 +vt 0.164294 0.455593 +vt 0.164115 0.455504 +vt 0.843973 0.496693 +vt 0.150714 0.484181 +vt 0.839266 0.456671 +vt 0.835482 0.447027 +vt 0.835077 0.458391 +vt 0.832419 0.463270 +vt 0.157130 0.455048 +vt 0.161233 0.455440 +vt 0.161263 0.457448 +vt 0.141013 0.434082 +vt 0.139986 0.430014 +vt 0.149413 0.442856 +vt 0.156841 0.477939 +vt 0.158598 0.484661 +vt 0.872401 0.421509 +vt 0.864923 0.400484 +vt 0.874771 0.420745 +vt 0.164519 0.456530 +vt 0.164859 0.465746 +vt 0.827853 0.457101 +vt 0.826782 0.457215 +vt 0.971701 0.560564 +vt 0.972241 0.531458 +vt 0.831458 0.455309 +vt 0.831996 0.445359 +vt 0.845799 0.532056 +vt 0.868129 0.448996 +vt 0.835578 0.454916 +vt 0.831453 0.457315 +vt 0.157665 0.458528 +vt 0.824571 0.453950 +vt 0.165654 0.458178 +vt 0.166721 0.461394 +vt 0.165376 0.458683 +vt 0.623362 0.573874 +vt 0.631938 0.570488 +vt 0.627607 0.575167 +vt 0.140795 0.492391 +vt 0.153858 0.512492 +vt 0.158898 0.505998 +vt 0.079974 0.536981 +vt 0.092782 0.540463 +vt 0.098862 0.565130 +vt 0.838484 0.490513 +vt 0.873879 0.460770 +vt 0.163289 0.457152 +vt 0.164823 0.457430 +vt 0.126135 0.441577 +vt 0.124716 0.439457 +vt 0.579609 0.151601 +vt 0.534965 0.142921 +vt 0.162696 0.434514 +vt 0.154613 0.486999 +vt 0.922640 0.415840 +vt 0.109627 0.458019 +vt 0.574187 0.564121 +vt 0.152968 0.488039 +vt 0.152871 0.490109 +vt 0.781214 0.438251 +vt 0.796889 0.436436 +vt 0.847887 0.491769 +vt 0.159548 0.443839 +vt 0.158218 0.438982 +vt 0.160321 0.438074 +vt 0.846460 0.502961 +vt 0.152726 0.490804 +vt 0.834025 0.445028 +vt 0.824334 0.458283 +vt 0.822403 0.455957 +vt 0.824110 0.456095 +vt 0.174386 0.452582 +vt 0.176142 0.452936 +vt 0.136091 0.491238 +vt 0.132418 0.490114 +vt 0.544515 0.865441 +vt 0.497502 0.886260 +vt 0.497474 0.866962 +vt 0.642416 0.737327 +vt 0.619326 0.759157 +vt 0.611275 0.745948 +vt 0.839408 0.428900 +vt 0.838400 0.433958 +vt 0.134055 0.395454 +vt 0.165903 0.457346 +vt 0.164841 0.457225 +vt 0.828944 0.451929 +vt 0.828070 0.452461 +vt 0.848287 0.442841 +vt 0.844662 0.440691 +vt 0.164574 0.452586 +vt 0.161973 0.459579 +vt 0.843203 0.442724 +vt 0.841510 0.438158 +vt 0.835578 0.435477 +vt 0.171270 0.455239 +vt 0.174598 0.456333 +vt 0.171104 0.458461 +vt 0.869775 0.513306 +vt 0.876361 0.513264 +vt 0.871914 0.519527 +vt 0.145959 0.444638 +vt 0.147044 0.453862 +vt 0.142286 0.446903 +vt 0.120239 0.421543 +vt 0.119883 0.435109 +vt 0.115504 0.433332 +vt 0.816892 0.493808 +vt 0.101369 0.592903 +vt 0.112268 0.571927 +vt 0.117596 0.610444 +vt 0.112914 0.507447 +vt 0.116908 0.502055 +vt 0.135412 0.546105 +vt 0.121368 0.538391 +vt 0.130077 0.537561 +vt 0.112192 0.445413 +vt 0.112362 0.470085 +vt 0.690596 0.462505 +vt 0.704884 0.467959 +vt 0.527621 0.400626 +vt 0.532344 0.403463 +vt 0.525828 0.404990 +vt 0.590835 0.856742 +vt 0.557496 0.885355 +vt 0.512562 0.412577 +vt 0.510405 0.414236 +vt 0.509920 0.412702 +vt 0.616530 0.431486 +vt 0.607875 0.448149 +vt 0.513157 0.414386 +vt 0.510772 0.415369 +vt 0.706139 0.544956 +vt 0.693071 0.552724 +vt 0.696843 0.538496 +vt 0.614056 0.371650 +vt 0.596617 0.363660 +vt 0.609313 0.355157 +vt 0.579262 0.472490 +vt 0.599593 0.462834 +vt 0.588878 0.478079 +vt 0.638273 0.556895 +vt 0.509340 0.415427 +vt 0.508982 0.414272 +vt 0.546374 0.512126 +vt 0.551629 0.507151 +vt 0.555587 0.516867 +vt 0.860967 0.502216 +vt 0.861984 0.503578 +vt 0.527162 0.499287 +vt 0.523046 0.496975 +vt 0.534908 0.497467 +vt 0.171035 0.469486 +vt 0.170221 0.493284 +vt 0.163228 0.493474 +vt 0.839064 0.452887 +vt 0.745698 0.586680 +vt 0.731815 0.564972 +vt 0.739753 0.550363 +vt 0.512583 0.506208 +vt 0.529936 0.585635 +vt 0.541892 0.590368 +vt 0.530723 0.594725 +vt 0.137131 0.437484 +vt 0.137258 0.443751 +vt 0.132572 0.507788 +vt 0.514222 0.501409 +vt 0.519405 0.598966 +vt 0.517431 0.588507 +vt 0.885098 0.429574 +vt 0.847930 0.457579 +vt 0.590053 0.603431 +vt 0.602277 0.605207 +vt 0.589971 0.605967 +vt 0.521665 0.513111 +vt 0.528992 0.506912 +vt 0.834313 0.438857 +vt 0.833330 0.436734 +vt 0.495604 0.155380 +vt 0.495763 0.140227 +vt 0.456642 0.142801 +vt 0.819626 0.463578 +vt 0.515322 0.515574 +vt 0.526392 0.515320 +vt 0.522088 0.521893 +vt 0.518357 0.518892 +vt 0.510266 0.435417 +vt 0.503397 0.432709 +vt 0.508429 0.429012 +vt 0.696546 0.704386 +vt 0.708617 0.712859 +vt 0.837775 0.862671 +vt 0.800596 0.904287 +vt 0.807062 0.878193 +vt 0.792384 0.228006 +vt 0.810809 0.205530 +vt 0.827256 0.237922 +vt 0.511452 0.502781 +vt 0.496248 0.490635 +vt 0.502170 0.493027 +vt 0.496263 0.493190 +vt 0.606835 0.873017 +vt 0.581502 0.903595 +vt 0.829584 0.456118 +vt 0.479636 0.504410 +vt 0.480006 0.504149 +vt 0.480021 0.504624 +vt 0.497596 0.905660 +vt 0.502668 0.429395 +vt 0.507072 0.426047 +vt 0.479362 0.500914 +vt 0.476979 0.499827 +vt 0.478947 0.498856 +vt 0.514408 0.430152 +vt 0.519491 0.443818 +vt 0.515806 0.440655 +vt 0.554820 0.487678 +vt 0.550678 0.479113 +vt 0.566053 0.473330 +vt 0.480131 0.503895 +vt 0.480651 0.504547 +vt 0.480085 0.504064 +vt 0.526672 0.488971 +vt 0.533357 0.482286 +vt 0.533546 0.491803 +vt 0.539318 0.489023 +vt 0.522566 0.491348 +vt 0.519136 0.498251 +vt 0.517108 0.496102 +vt 0.802223 0.635054 +vt 0.911653 0.824908 +vt 0.909457 0.776219 +vt 0.937275 0.784646 +vt 0.827920 0.465609 +vt 0.548163 0.753960 +vt 0.524567 0.772294 +vt 0.521114 0.755115 +vt 0.512800 0.503781 +vt 0.789443 0.262334 +vt 0.805713 0.314811 +vt 0.772247 0.268393 +vt 0.823787 0.461097 +vt 0.864087 0.526570 +vt 0.524935 0.455792 +vt 0.522702 0.452061 +vt 0.541686 0.452855 +vt 0.539225 0.447545 +vt 0.496233 0.488112 +vt 0.503545 0.490605 +vt 0.589534 0.391848 +vt 0.577791 0.384208 +vt 0.588611 0.380784 +vt 0.588049 0.403513 +vt 0.601352 0.390783 +vt 0.598814 0.405900 +vt 0.573652 0.431877 +vt 0.555910 0.440236 +vt 0.568941 0.424675 +vt 0.560008 0.455333 +vt 0.543218 0.459099 +vt 0.559393 0.446902 +vt 0.822721 0.458698 +vt 0.612299 0.596173 +vt 0.614196 0.389666 +vt 0.610985 0.408415 +vt 0.600915 0.376482 +vt 0.604171 0.426149 +vt 0.558153 0.401767 +vt 0.569231 0.394169 +vt 0.568140 0.401424 +vt 0.578795 0.392815 +vt 0.568586 0.388067 +vt 0.557248 0.408636 +vt 0.566482 0.409114 +vt 0.537214 0.409458 +vt 0.530182 0.416044 +vt 0.529265 0.410116 +vt 0.127244 0.507464 +vt 0.125146 0.509968 +vt 0.839953 0.448188 +vt 0.715060 0.503416 +vt 0.746934 0.525283 +vt 0.727481 0.517036 +vt 0.917567 0.439888 +vt 0.911571 0.435363 +vt 0.528917 0.405202 +vt 0.526788 0.408573 +vt 0.523558 0.407959 +vt 0.513820 0.415534 +vt 0.544497 0.397804 +vt 0.621038 0.548656 +vt 0.608100 0.552583 +vt 0.546305 0.401817 +vt 0.559107 0.395101 +vt 0.535378 0.405270 +vt 0.558542 0.392259 +vt 0.547091 0.408584 +vt 0.538944 0.416983 +vt 0.547339 0.416702 +vt 0.556224 0.417727 +vt 0.531490 0.440960 +vt 0.544932 0.433189 +vt 0.535487 0.444197 +vt 0.521170 0.434825 +vt 0.512304 0.419513 +vt 0.509637 0.422161 +vt 0.510564 0.418688 +vt 0.526373 0.436745 +vt 0.538877 0.430268 +vt 0.511441 0.424242 +vt 0.516249 0.423275 +vt 0.514804 0.416911 +vt 0.511745 0.417312 +vt 0.511303 0.416420 +vt 0.509763 0.416510 +vt 0.510192 0.417292 +vt 0.511111 0.459534 +vt 0.496174 0.464125 +vt 0.531619 0.112351 +vt 0.495872 0.125641 +vt 0.495922 0.111115 +vt 0.526250 0.426483 +vt 0.532429 0.428046 +vt 0.515662 0.418857 +vt 0.642113 0.585364 +vt 0.638014 0.590149 +vt 0.958163 0.683154 +vt 0.974667 0.727263 +vt 0.948547 0.713584 +vt 0.507264 0.580003 +vt 0.496535 0.569162 +vt 0.551165 0.435415 +vt 0.521241 0.448284 +vt 0.509893 0.452544 +vt 0.627586 0.559022 +vt 0.587537 0.351649 +vt 0.581844 0.328914 +vt 0.512456 0.410520 +vt 0.509192 0.411594 +vt 0.551217 0.387898 +vt 0.555452 0.373888 +vt 0.562644 0.382925 +vt 0.576554 0.361362 +vt 0.556818 0.358955 +vt 0.565554 0.350706 +vt 0.572917 0.377375 +vt 0.584247 0.371036 +vt 0.545776 0.378739 +vt 0.547575 0.364595 +vt 0.573372 0.455862 +vt 0.613185 0.584938 +vt 0.610876 0.595720 +vt 0.565741 0.369285 +vt 0.630099 0.555227 +vt 0.526487 0.397253 +vt 0.511446 0.403299 +vt 0.522995 0.388556 +vt 0.779178 0.865621 +vt 0.818133 0.848992 +vt 0.573557 0.340643 +vt 0.532609 0.376887 +vt 0.531879 0.390520 +vt 0.527872 0.382313 +vt 0.555904 0.330797 +vt 0.496241 0.482658 +vt 0.508994 0.477727 +vt 0.527186 0.342028 +vt 0.532102 0.333509 +vt 0.523723 0.370777 +vt 0.529186 0.363661 +vt 0.539348 0.370736 +vt 0.533212 0.396957 +vt 0.549551 0.340559 +vt 0.538715 0.313464 +vt 0.536257 0.324240 +vt 0.560010 0.318855 +vt 0.542545 0.348792 +vt 0.535360 0.355770 +vt 0.517897 0.358633 +vt 0.496217 0.449866 +vt 0.496210 0.452659 +vt 0.506048 0.501805 +vt 0.503748 0.504458 +vt 0.502951 0.498977 +vt 0.500245 0.497900 +vt 0.505238 0.500152 +vt 0.526393 0.470827 +vt 0.538676 0.474398 +vt 0.523279 0.476930 +vt 0.511587 0.515400 +vt 0.509263 0.517317 +vt 0.507926 0.519037 +vt 0.506925 0.517443 +vt 0.512468 0.517712 +vt 0.508346 0.515004 +vt 0.504635 0.508423 +vt 0.504839 0.513087 +vt 0.502799 0.515370 +vt 0.504546 0.520287 +vt 0.503523 0.517641 +vt 0.506998 0.511702 +vt 0.505680 0.515289 +vt 0.513423 0.701458 +vt 0.496676 0.721244 +vt 0.496674 0.700385 +vt 0.513281 0.519556 +vt 0.515682 0.525436 +vt 0.510898 0.522111 +vt 0.508740 0.527931 +vt 0.505990 0.523424 +vt 0.520906 0.532412 +vt 0.530520 0.528431 +vt 0.512053 0.371570 +vt 0.514715 0.366086 +vt 0.504114 0.438864 +vt 0.516244 0.382517 +vt 0.503897 0.396345 +vt 0.508839 0.377906 +vt 0.519740 0.376361 +vt 0.501592 0.402876 +vt 0.498151 0.405557 +vt 0.498760 0.402038 +vt 0.497085 0.812617 +vt 0.473892 0.501702 +vt 0.476271 0.504514 +vt 0.472945 0.505855 +vt 0.495297 0.405251 +vt 0.495398 0.401461 +vt 0.487660 0.505616 +vt 0.484833 0.503820 +vt 0.486651 0.501775 +vt 0.508756 0.410283 +vt 0.508097 0.411771 +vt 0.507332 0.410840 +vt 0.499975 0.395274 +vt 0.495532 0.394835 +vt 0.512433 0.408124 +vt 0.482669 0.512447 +vt 0.480389 0.508903 +vt 0.482675 0.508687 +vt 0.482274 0.501522 +vt 0.482508 0.504076 +vt 0.481245 0.502740 +vt 0.478183 0.503841 +vt 0.479398 0.503552 +vt 0.479343 0.503869 +vt 0.505628 0.409176 +vt 0.507281 0.407375 +vt 0.521866 0.350561 +vt 0.512286 0.337764 +vt 0.503173 0.407559 +vt 0.504859 0.404679 +vt 0.733657 0.756547 +vt 0.746327 0.721236 +vt 0.762615 0.728409 +vt 0.777683 0.738507 +vt 0.787212 0.698478 +vt 0.800741 0.715520 +vt 0.714002 0.587146 +vt 0.723974 0.602233 +vt 0.714263 0.607027 +vt 0.477070 0.502411 +vt 0.478466 0.501355 +vt 0.793385 0.840092 +vt 0.829469 0.821333 +vt 0.853506 0.871477 +vt 0.893352 0.854190 +vt 0.860410 0.887132 +vt 0.848298 0.836743 +vt 0.931401 0.257462 +vt 0.734656 0.874546 +vt 0.756593 0.892816 +vt 0.708212 0.900978 +vt 0.677667 0.927363 +vt 0.664771 0.899138 +vt 0.496881 0.792407 +vt 0.798186 0.923734 +vt 0.896755 0.321851 +vt 0.866272 0.315934 +vt 0.776610 0.825800 +vt 0.755370 0.850882 +vt 0.870889 0.385338 +vt 0.868375 0.385023 +vt 0.901829 0.405782 +vt 0.577962 0.401685 +vt 0.803765 0.454062 +vt 0.919091 0.325182 +vt 0.508141 0.399056 +vt 0.885073 0.437078 +vt 0.888007 0.447280 +vt 0.613253 0.582441 +vt 0.607792 0.580865 +vt 0.625626 0.494808 +vt 0.605805 0.500095 +vt 0.618592 0.484563 +vt 0.853467 0.379892 +vt 0.856067 0.378506 +vt 0.857310 0.380915 +vt 0.870825 0.532831 +vt 0.877254 0.535470 +vt 0.859359 0.379283 +vt 0.861605 0.381166 +vt 0.635908 0.533490 +vt 0.622640 0.542517 +vt 0.622405 0.537558 +vt 0.955576 0.744383 +vt 0.929285 0.745773 +vt 0.861451 0.377472 +vt 0.865324 0.381150 +vt 0.496610 0.617182 +vt 0.496412 0.524952 +vt 0.500919 0.521205 +vt 0.501516 0.524429 +vt 0.855772 0.375851 +vt 0.763987 0.383409 +vt 0.790724 0.381560 +vt 0.782121 0.408923 +vt 0.849921 0.378538 +vt 0.813714 0.458141 +vt 0.816383 0.266551 +vt 0.817021 0.309268 +vt 0.869605 0.382552 +vt 0.896074 0.529959 +vt 0.890763 0.525173 +vt 0.893818 0.522483 +vt 0.871300 0.485865 +vt 0.875211 0.469610 +vt 0.860547 0.393759 +vt 0.583220 0.416632 +vt 0.496262 0.430323 +vt 0.275629 0.212786 +vt 0.260447 0.184716 +vt 0.597986 0.621892 +vt 0.601471 0.627029 +vt 0.910789 0.567847 +vt 0.837477 0.449665 +vt 0.607836 0.583220 +vt 0.601547 0.585579 +vt 0.641442 0.569290 +vt 0.891717 0.457641 +vt 0.859261 0.502416 +vt 0.374032 0.527031 +vt 0.385766 0.540719 +vt 0.372388 0.532676 +vt 0.515422 0.497172 +vt 0.511436 0.498715 +vt 0.511739 0.496967 +vt 0.858895 0.244265 +vt 0.842644 0.214193 +vt 0.880655 0.486596 +vt 0.175714 0.460212 +vt 0.176836 0.463777 +vt 0.109056 0.499326 +vt 0.885539 0.447508 +vt 0.889706 0.457633 +vt 0.897327 0.511008 +vt 0.896723 0.517194 +vt 0.127065 0.499249 +vt 0.895759 0.535702 +vt 0.889118 0.533454 +vt 0.873903 0.640989 +vt 0.889781 0.659102 +vt 0.871162 0.677372 +vt 0.910038 0.601245 +vt 0.891300 0.622943 +vt 0.121708 0.519615 +vt 0.114955 0.517070 +vt 0.117247 0.513351 +vt 0.880799 0.536163 +vt 0.869393 0.527736 +vt 0.875365 0.528482 +vt 0.880137 0.433566 +vt 0.882567 0.436728 +vt 0.849390 0.504553 +vt 0.782755 0.752257 +vt 0.805886 0.729645 +vt 0.758499 0.787390 +vt 0.787620 0.765824 +vt 0.858718 0.626100 +vt 0.854055 0.659370 +vt 0.827778 0.454368 +vt 0.563991 0.419177 +vt 0.575506 0.411629 +vt 0.602036 0.579051 +vt 0.519208 0.493840 +vt 0.115956 0.485171 +vt 0.118605 0.492071 +vt 0.841335 0.431988 +vt 0.876438 0.611078 +vt 0.630971 0.887634 +vt 0.620961 0.918605 +vt 0.849757 0.698009 +vt 0.838881 0.683261 +vt 0.819717 0.731913 +vt 0.819481 0.752818 +vt 0.845681 0.722403 +vt 0.822423 0.775991 +vt 0.848132 0.752101 +vt 0.868820 0.702788 +vt 0.873191 0.733015 +vt 0.881039 0.532059 +vt 0.887296 0.525042 +vt 0.624362 0.599188 +vt 0.762704 0.798304 +vt 0.792308 0.777292 +vt 0.767442 0.808817 +vt 0.793752 0.796129 +vt 0.826779 0.691096 +vt 0.823396 0.360326 +vt 0.837432 0.353794 +vt 0.160491 0.429626 +vt 0.169641 0.430205 +vt 0.136948 0.485503 +vt 0.772334 0.682395 +vt 0.807641 0.525283 +vt 0.790903 0.526456 +vt 0.764466 0.543330 +vt 0.783086 0.555325 +vt 0.773649 0.592049 +vt 0.125089 0.523227 +vt 0.129539 0.526641 +vt 0.793765 0.599501 +vt 0.777104 0.638269 +vt 0.120883 0.469538 +vt 0.123336 0.501475 +vt 0.123957 0.498119 +vt 0.681220 0.659993 +vt 0.147499 0.517437 +vt 0.142624 0.513239 +vt 0.821763 0.469348 +vt 0.635889 0.539468 +vt 0.144874 0.457728 +vt 0.138784 0.450060 +vt 0.145754 0.455280 +vt 0.151897 0.479557 +vt 0.927190 0.559531 +vt 0.924002 0.586949 +vt 0.871632 0.499215 +vt 0.939129 0.575464 +vt 0.496775 0.773065 +vt 0.512182 0.502386 +vt 0.512249 0.503561 +vt 0.186940 0.436927 +vt 0.542242 0.495094 +vt 0.528193 0.494658 +vt 0.142102 0.419351 +vt 0.954258 0.566655 +vt 0.026627 0.403186 +vt 0.048239 0.364844 +vt 0.044804 0.398632 +vt 0.154283 0.522091 +vt 0.894604 0.516829 +vt 0.894923 0.511104 +vt 0.673244 0.725044 +vt 0.683356 0.734852 +vt 0.963330 0.658076 +vt 0.861350 0.806737 +vt 0.888892 0.810067 +vt 0.889742 0.684185 +vt 0.100373 0.367920 +vt 0.812081 0.808772 +vt 0.134091 0.502519 +vt 0.134519 0.504169 +vt 0.923271 0.621656 +vt 0.933662 0.653654 +vt 0.923245 0.644506 +vt 0.938308 0.676322 +vt 0.924428 0.665299 +vt 0.155566 0.428985 +vt 0.159172 0.413941 +vt 0.498047 0.923828 +vt 0.930272 0.522736 +vt 0.930377 0.477399 +vt 0.496558 0.581177 +vt 0.507950 0.590528 +vt 0.922426 0.526688 +vt 0.847206 0.786251 +vt 0.722598 0.578171 +vt 0.708540 0.568110 +vt 0.714984 0.554815 +vt 0.559332 0.579134 +vt 0.554907 0.557706 +vt 0.569882 0.552118 +vt 0.556184 0.566259 +vt 0.701322 0.559063 +vt 0.552834 0.550107 +vt 0.542384 0.552612 +vt 0.550708 0.540023 +vt 0.120153 0.482992 +vt 0.547982 0.492008 +vt 0.555576 0.499506 +vt 0.563006 0.495411 +vt 0.576610 0.494439 +vt 0.569573 0.508144 +vt 0.567633 0.489816 +vt 0.868056 0.439421 +vt 0.579948 0.528232 +vt 0.564701 0.534777 +vt 0.575773 0.519132 +vt 0.539218 0.520125 +vt 0.543654 0.539955 +vt 0.567410 0.542768 +vt 0.584293 0.537043 +vt 0.541460 0.508150 +vt 0.531686 0.516832 +vt 0.822714 0.461454 +vt 0.818606 0.460085 +vt 0.517337 0.578904 +vt 0.604122 0.613375 +vt 0.547154 0.528836 +vt 0.884307 0.516760 +vt 0.889853 0.520964 +vt 0.107803 0.429420 +vt 0.105305 0.435312 +vt 0.571707 0.558377 +vt 0.557819 0.572631 +vt 0.926279 0.706920 +vt 0.598489 0.487734 +vt 0.585597 0.503678 +vt 0.164138 0.458549 +vt 0.164646 0.457376 +vt 0.593609 0.513869 +vt 0.611453 0.509405 +vt 0.598275 0.522763 +vt 0.609713 0.472319 +vt 0.541731 0.581610 +vt 0.608371 0.585714 +vt 0.127342 0.400460 +vt 0.117889 0.420741 +vt 0.639461 0.634378 +vt 0.199574 0.227939 +vt 0.182088 0.205547 +vt 0.217787 0.194349 +vt 0.185766 0.474706 +vt 0.183433 0.456136 +vt 0.185966 0.452887 +vt 0.630345 0.467897 +vt 0.651839 0.461828 +vt 0.639124 0.479975 +vt 0.885984 0.506148 +vt 0.887740 0.501051 +vt 0.454855 0.159803 +vt 0.412111 0.151479 +vt 0.744264 0.829296 +vt 0.729477 0.822526 +vt 0.619233 0.456856 +vt 0.641231 0.448493 +vt 0.616449 0.518158 +vt 0.541673 0.572517 +vt 0.541304 0.563711 +vt 0.646443 0.511842 +vt 0.630504 0.506987 +vt 0.644504 0.495026 +vt 0.691977 0.669071 +vt 0.713854 0.652273 +vt 0.701432 0.675298 +vt 0.703771 0.648991 +vt 0.721128 0.627228 +vt 0.659743 0.479658 +vt 0.674902 0.459042 +vt 0.587409 0.542803 +vt 0.602181 0.530195 +vt 0.660822 0.502336 +vt 0.675279 0.514770 +vt 0.961043 0.267168 +vt 0.936587 0.292931 +vt 0.609006 0.547667 +vt 0.647505 0.522944 +vt 0.591739 0.557127 +vt 0.608494 0.544179 +vt 0.685582 0.566507 +vt 0.683470 0.547308 +vt 0.676611 0.571168 +vt 0.686388 0.534096 +vt 0.211131 0.438388 +vt 0.197752 0.465227 +vt 0.195407 0.436588 +vt 0.676801 0.559572 +vt 0.672806 0.542921 +vt 0.661263 0.540345 +vt 0.755643 0.406277 +vt 0.730348 0.416677 +vt 0.736441 0.382962 +vt 0.661990 0.530569 +vt 0.661547 0.519412 +vt 0.675141 0.530517 +vt 0.592019 0.560426 +vt 0.577554 0.575786 +vt 0.565456 0.591675 +vt 0.576644 0.572628 +vt 0.563282 0.589137 +vt 0.508508 0.412683 +vt 0.247218 0.622931 +vt 0.258273 0.595016 +vt 0.259780 0.625505 +vt 0.529295 0.554604 +vt 0.496659 0.671205 +vt 0.511232 0.658801 +vt 0.542916 0.679180 +vt 0.552290 0.664245 +vt 0.561314 0.681738 +vt 0.556113 0.605785 +vt 0.552795 0.603805 +vt 0.543467 0.738132 +vt 0.426422 0.807363 +vt 0.460836 0.828084 +vt 0.420582 0.823809 +vt 0.541049 0.622296 +vt 0.548789 0.630162 +vt 0.538146 0.628721 +vt 0.041438 0.429023 +vt 0.024268 0.462301 +vt 0.025410 0.430852 +vt 0.547808 0.640473 +vt 0.388712 0.874477 +vt 0.415288 0.904106 +vt 0.363898 0.889133 +vt 0.304520 0.515275 +vt 0.317494 0.493436 +vt 0.317700 0.514819 +vt 0.927244 0.444976 +vt 0.157559 0.403132 +vt 0.154581 0.415071 +vt 0.154579 0.403512 +vt 0.535788 0.639647 +vt 0.548903 0.651875 +vt 0.535575 0.651299 +vt 0.579010 0.751836 +vt 0.711697 0.524783 +vt 0.524105 0.648034 +vt 0.512224 0.534898 +vt 0.499157 0.507497 +vt 0.501652 0.503505 +vt 0.512037 0.686716 +vt 0.529026 0.689131 +vt 0.585882 0.766223 +vt 0.881142 0.761088 +vt 0.374813 0.919108 +vt 0.328934 0.900180 +vt 0.538760 0.383815 +vt 0.886713 0.512933 +vt 0.883579 0.510282 +vt 0.641367 0.361982 +vt 0.627414 0.366446 +vt 0.549148 0.693845 +vt 0.571604 0.695256 +vt 0.512620 0.503757 +vt 0.512710 0.503911 +vt 0.512631 0.503933 +vt 0.726896 0.716572 +vt 0.852544 0.355225 +vt 0.836203 0.309847 +vt 0.620711 0.699293 +vt 0.618691 0.685290 +vt 0.549706 0.624761 +vt 0.697435 0.743748 +vt 0.747076 0.687839 +vt 0.716922 0.750097 +vt 0.538461 0.663803 +vt 0.502742 0.376614 +vt 0.495592 0.375946 +vt 0.535992 0.506945 +vt 0.685731 0.695491 +vt 0.648277 0.531522 +vt 0.648242 0.539062 +vt 0.569919 0.615546 +vt 0.567350 0.619671 +vt 0.497402 0.847661 +vt 0.497258 0.830253 +vt 0.664094 0.715112 +vt 0.676367 0.687001 +vt 0.583199 0.707776 +vt 0.726790 0.655391 +vt 0.710965 0.627794 +vt 0.556053 0.708190 +vt 0.499857 0.515396 +vt 0.499469 0.512721 +vt 0.758679 0.652398 +vt 0.760912 0.611485 +vt 0.756318 0.568753 +vt 0.447728 0.595421 +vt 0.431841 0.585484 +vt 0.433835 0.579215 +vt 0.468951 0.772473 +vt 0.445251 0.754348 +vt 0.472306 0.755271 +vt 0.750203 0.764327 +vt 0.710324 0.803118 +vt 0.705631 0.790142 +vt 0.693924 0.779829 +vt 0.718486 0.813747 +vt 0.698215 0.846391 +vt 0.681569 0.837010 +vt 0.647750 0.854864 +vt 0.554212 0.770317 +vt 0.671969 0.823394 +vt 0.667754 0.807400 +vt 0.633210 0.840430 +vt 0.622617 0.825545 +vt 0.524926 0.635830 +vt 0.500281 0.517925 +vt 0.679911 0.770766 +vt 0.654052 0.797833 +vt 0.641391 0.786875 +vt 0.612770 0.812847 +vt 0.603576 0.798797 +vt 0.667838 0.867928 +vt 0.632621 0.725895 +vt 0.634105 0.666604 +vt 0.616235 0.672830 +vt 0.579652 0.839562 +vt 0.696245 0.874912 +vt 0.418941 0.564213 +vt 0.417085 0.568824 +vt 0.613493 0.608818 +vt 0.720721 0.851737 +vt 0.458189 0.844527 +vt 0.159468 0.862644 +vt 0.195576 0.903893 +vt 0.143250 0.871607 +vt 0.564791 0.723734 +vt 0.593965 0.782357 +vt 0.651378 0.749264 +vt 0.628274 0.773483 +vt 0.510639 0.646052 +vt 0.906951 0.664799 +vt 0.908048 0.639864 +vt 0.071181 0.475285 +vt 0.074911 0.508701 +vt 0.069036 0.522058 +vt 0.942974 0.330423 +vt 0.963798 0.301244 +vt 0.966389 0.332846 +vt 0.495570 0.319257 +vt 0.393102 0.177156 +vt 0.360617 0.160631 +vt 0.853289 0.378041 +vt 0.881084 0.281512 +vt 0.846312 0.275881 +vt 0.819607 0.470858 +vt 0.910245 0.284814 +vt 0.851133 0.375626 +vt 0.986328 0.302734 +vt 0.986328 0.236328 +vt 0.958748 0.231354 +vt 0.512617 0.504109 +vt 0.576388 0.132918 +vt 0.534131 0.127411 +vt 0.298435 0.704796 +vt 0.267123 0.687771 +vt 0.280797 0.682236 +vt 0.512865 0.503394 +vt 0.812037 0.382048 +vt 0.803885 0.407752 +vt 0.512759 0.504025 +vt 0.663098 0.760932 +vt 0.477842 0.721253 +vt 0.460166 0.703687 +vt 0.479929 0.701540 +vt 0.253594 0.550221 +vt 0.271501 0.535961 +vt 0.261620 0.564847 +vt 0.290347 0.496211 +vt 0.292582 0.518840 +vt 0.247937 0.586554 +vt 0.884227 0.537732 +vt 0.590984 0.599951 +vt 0.831816 0.185975 +vt 0.861880 0.193731 +vt 0.774460 0.194304 +vt 0.754276 0.220467 +vt 0.731676 0.184657 +vt 0.775795 0.122966 +vt 0.822321 0.156767 +vt 0.785955 0.151135 +vt 0.893631 0.173540 +vt 0.854941 0.132560 +vt 0.892355 0.137821 +vt 0.560381 0.062199 +vt 0.527974 0.080184 +vt 0.526501 0.061000 +vt 0.637070 0.047301 +vt 0.669343 0.029555 +vt 0.676534 0.048689 +vt 0.433093 0.788766 +vt 0.463324 0.810986 +vt 0.253407 0.657241 +vt 0.267390 0.655371 +vt 0.452509 0.609819 +vt 0.459101 0.603300 +vt 0.463967 0.623579 +vt 0.483142 0.632332 +vt 0.468342 0.635878 +vt 0.374469 0.760661 +vt 0.352171 0.738389 +vt 0.382516 0.747042 +vt 0.623742 0.411266 +vt 0.641198 0.389007 +vt 0.636611 0.415525 +vt 0.551280 0.618398 +vt 0.545380 0.616404 +vt 0.623233 0.593892 +vt 0.404392 0.857625 +vt 0.438369 0.885525 +vt 0.027789 0.369460 +vt 0.597273 0.045757 +vt 0.632851 0.029051 +vt 0.482293 0.672728 +vt 0.468449 0.660177 +vt 0.482070 0.658839 +vt 0.971919 0.496814 +vt 0.387708 0.536462 +vt 0.401585 0.553433 +vt 0.340867 0.188907 +vt 0.147331 0.377070 +vt 0.144984 0.370718 +vt 0.414545 0.839484 +vt 0.450491 0.865387 +vt 0.108080 0.414927 +vt 0.156670 0.468178 +vt 0.185586 0.525446 +vt 0.202288 0.526556 +vt 0.190820 0.565529 +vt 0.177875 0.598430 +vt 0.158790 0.572951 +vt 0.174423 0.553098 +vt 0.134674 0.244184 +vt 0.150865 0.214208 +vt 0.737942 0.028021 +vt 0.759766 0.009766 +vt 0.775492 0.028180 +vt 0.484258 0.602091 +vt 0.023692 0.560366 +vt 0.038944 0.539523 +vt 0.040481 0.566151 +vt 0.403055 0.549017 +vt 0.538881 0.721562 +vt 0.076371 0.440113 +vt 0.079330 0.469524 +vt 0.478943 0.505439 +vt 0.479901 0.503736 +vt 0.479930 0.503841 +vt 0.477911 0.507827 +vt 0.479709 0.503300 +vt 0.479517 0.502171 +vt 0.479833 0.503349 +vt 0.479880 0.503970 +vt 0.479906 0.504099 +vt 0.490552 0.512925 +vt 0.489026 0.508690 +vt 0.491262 0.508049 +vt 0.479550 0.512420 +vt 0.480761 0.503949 +vt 0.480146 0.503779 +vt 0.480451 0.503519 +vt 0.915584 0.060223 +vt 0.883478 0.079411 +vt 0.875332 0.056639 +vt 0.802216 0.069907 +vt 0.749708 0.046472 +vt 0.790619 0.047418 +vt 0.844160 0.071498 +vt 0.832197 0.051132 +vt 0.482467 0.505594 +vt 0.480050 0.504114 +vt 0.480368 0.504686 +vt 0.484751 0.507266 +vt 0.483070 0.499259 +vt 0.481392 0.506198 +vt 0.480130 0.506166 +vt 0.479141 0.502293 +vt 0.478386 0.502903 +vt 0.851258 0.100520 +vt 0.145774 0.438573 +vt 0.185748 0.314880 +vt 0.201964 0.262337 +vt 0.218894 0.268486 +vt 0.634048 0.546276 +vt 0.628822 0.438314 +vt 0.140573 0.490034 +vt 0.143449 0.491888 +vt 0.133987 0.512145 +vt 0.221921 0.681919 +vt 0.181820 0.656863 +vt 0.191960 0.634821 +vt 0.207008 0.698082 +vt 0.166293 0.673633 +vt 0.151279 0.635159 +vt 0.165886 0.619217 +vt 0.193909 0.494800 +vt 0.209227 0.487046 +vt 0.153841 0.537660 +vt 0.126430 0.582940 +vt 0.124200 0.423441 +vt 0.129219 0.401521 +vt 0.884363 0.532713 +vt 0.368867 0.578555 +vt 0.375378 0.580615 +vt 0.372214 0.582504 +vt 0.375955 0.582837 +vt 0.372792 0.584392 +vt 0.369475 0.584399 +vt 0.370353 0.586399 +vt 0.365542 0.580551 +vt 0.401639 0.595379 +vt 0.399902 0.589095 +vt 0.403800 0.594576 +vt 0.405330 0.592890 +vt 0.400197 0.586433 +vt 0.403200 0.610764 +vt 0.391089 0.606420 +vt 0.403230 0.608440 +vt 0.403174 0.603478 +vt 0.390971 0.605142 +vt 0.390743 0.603762 +vt 0.407057 0.604657 +vt 0.403293 0.606024 +vt 0.408219 0.609822 +vt 0.408809 0.608205 +vt 0.389877 0.575043 +vt 0.399650 0.578571 +vt 0.399914 0.582902 +vt 0.391041 0.578972 +vt 0.383370 0.577000 +vt 0.381507 0.573095 +vt 0.388794 0.570586 +vt 0.407517 0.587359 +vt 0.406127 0.590726 +vt 0.379569 0.568668 +vt 0.387308 0.566354 +vt 0.399435 0.575028 +vt 0.408312 0.585397 +vt 0.409809 0.593665 +vt 0.385817 0.559051 +vt 0.399883 0.571468 +vt 0.386347 0.561974 +vt 0.377681 0.564511 +vt 0.374564 0.556491 +vt 0.375524 0.559860 +vt 0.354885 0.582077 +vt 0.347101 0.581401 +vt 0.349159 0.578424 +vt 0.311704 0.578488 +vt 0.307677 0.566460 +vt 0.316667 0.571084 +vt 0.287043 0.544957 +vt 0.300146 0.552713 +vt 0.291941 0.559036 +vt 0.358809 0.579416 +vt 0.362564 0.582629 +vt 0.359371 0.586049 +vt 0.352744 0.575281 +vt 0.356808 0.572557 +vt 0.361159 0.570356 +vt 0.369698 0.573732 +vt 0.365466 0.575008 +vt 0.372589 0.577261 +vt 0.361961 0.576878 +vt 0.376775 0.576425 +vt 0.371953 0.567674 +vt 0.366003 0.569057 +vt 0.368880 0.563430 +vt 0.362143 0.564996 +vt 0.356613 0.566427 +vt 0.351701 0.569170 +vt 0.347127 0.572334 +vt 0.342824 0.575902 +vt 0.340761 0.580867 +vt 0.340004 0.569468 +vt 0.338597 0.562636 +vt 0.345809 0.565765 +vt 0.334612 0.574059 +vt 0.331272 0.567665 +vt 0.351403 0.562768 +vt 0.354918 0.556834 +vt 0.358069 0.560944 +vt 0.346450 0.559125 +vt 0.365562 0.558993 +vt 0.363100 0.555225 +vt 0.392024 0.582528 +vt 0.397667 0.590555 +vt 0.391532 0.585462 +vt 0.385208 0.583077 +vt 0.385231 0.580717 +vt 0.379190 0.580004 +vt 0.384692 0.585576 +vt 0.382227 0.595612 +vt 0.379858 0.584775 +vt 0.379776 0.582270 +vt 0.390122 0.587723 +vt 0.384255 0.595883 +vt 0.395275 0.591881 +vt 0.388385 0.598503 +vt 0.386498 0.596910 +vt 0.399481 0.595757 +vt 0.389659 0.600365 +vt 0.383849 0.653822 +vt 0.372418 0.646619 +vt 0.384009 0.650371 +vt 0.053832 0.472626 +vt 0.039223 0.509364 +vt 0.039515 0.465368 +vt 0.366854 0.634099 +vt 0.356777 0.632041 +vt 0.360063 0.628566 +vt 0.369411 0.630062 +vt 0.363893 0.624594 +vt 0.376155 0.634478 +vt 0.374597 0.639285 +vt 0.364481 0.638233 +vt 0.371482 0.613538 +vt 0.367084 0.611957 +vt 0.370129 0.609984 +vt 0.365645 0.607344 +vt 0.369092 0.606473 +vt 0.379607 0.607136 +vt 0.379903 0.608653 +vt 0.274395 0.475257 +vt 0.287787 0.467935 +vt 0.367294 0.586687 +vt 0.368567 0.588804 +vt 0.365040 0.589500 +vt 0.355180 0.589953 +vt 0.373746 0.586560 +vt 0.379410 0.598187 +vt 0.371931 0.588782 +vt 0.376491 0.585252 +vt 0.379863 0.596958 +vt 0.371066 0.591035 +vt 0.379198 0.600785 +vt 0.369901 0.593632 +vt 0.366813 0.591615 +vt 0.362503 0.593148 +vt 0.536499 0.844537 +vt 0.368856 0.598914 +vt 0.379321 0.604508 +vt 0.368674 0.601200 +vt 0.364176 0.600707 +vt 0.364482 0.597886 +vt 0.369104 0.596505 +vt 0.365319 0.594987 +vt 0.379223 0.603302 +vt 0.368789 0.603640 +vt 0.364617 0.603712 +vt 0.379437 0.605768 +vt 0.347666 0.586400 +vt 0.351085 0.585190 +vt 0.353534 0.593059 +vt 0.340883 0.587537 +vt 0.347343 0.597409 +vt 0.331398 0.579735 +vt 0.331097 0.587343 +vt 0.334374 0.599886 +vt 0.320193 0.589053 +vt 0.322219 0.601654 +vt 0.322123 0.580871 +vt 0.360907 0.596191 +vt 0.359913 0.599303 +vt 0.335686 0.611681 +vt 0.344013 0.607807 +vt 0.352463 0.605640 +vt 0.356490 0.612031 +vt 0.350358 0.615245 +vt 0.355156 0.621879 +vt 0.343779 0.618211 +vt 0.359965 0.618383 +vt 0.350423 0.625343 +vt 0.360949 0.603952 +vt 0.362705 0.609234 +vt 0.364761 0.614814 +vt 0.369251 0.616591 +vt 0.367272 0.620571 +vt 0.373263 0.621549 +vt 0.371501 0.625902 +vt 0.378642 0.625426 +vt 0.377343 0.630179 +vt 0.381959 0.659977 +vt 0.370444 0.650002 +vt 0.383208 0.613401 +vt 0.385090 0.623118 +vt 0.379324 0.621554 +vt 0.401270 0.616828 +vt 0.391074 0.609131 +vt 0.402424 0.613783 +vt 0.391133 0.607729 +vt 0.406473 0.614008 +vt 0.404107 0.617599 +vt 0.387578 0.613989 +vt 0.390835 0.623223 +vt 0.395466 0.621857 +vt 0.390329 0.612052 +vt 0.398305 0.619366 +vt 0.397842 0.625348 +vt 0.401672 0.620946 +vt 0.385281 0.627419 +vt 0.391996 0.626965 +vt 0.028156 0.627357 +vt 0.043123 0.601900 +vt 0.045959 0.629138 +vt 0.374968 0.618238 +vt 0.380418 0.610319 +vt 0.381547 0.612064 +vt 0.059194 0.634011 +vt 0.050725 0.654777 +vt 0.084193 0.284621 +vt 0.063580 0.257415 +vt 0.102731 0.243708 +vt 0.390849 0.610610 +vt 0.408838 0.618629 +vt 0.412484 0.613994 +vt 0.414702 0.610018 +vt 0.415611 0.617534 +vt 0.418189 0.612816 +vt 0.411931 0.622914 +vt 0.404530 0.622931 +vt 0.406501 0.627344 +vt 0.399986 0.633084 +vt 0.399384 0.628165 +vt 0.392576 0.630591 +vt 0.392759 0.634906 +vt 0.385261 0.632016 +vt 0.401506 0.638069 +vt 0.408490 0.632183 +vt 0.403325 0.643529 +vt 0.410275 0.638515 +vt 0.394483 0.644988 +vt 0.393434 0.639877 +vt 0.413222 0.594870 +vt 0.411137 0.598264 +vt 0.413273 0.603359 +vt 0.408636 0.599075 +vt 0.404663 0.600124 +vt 0.415089 0.607849 +vt 0.408396 0.606542 +vt 0.416436 0.602583 +vt 0.414361 0.605675 +vt 0.402208 0.599973 +vt 0.390352 0.602162 +vt 0.418752 0.621002 +vt 0.423363 0.615564 +vt 0.422811 0.627793 +vt 0.425940 0.619701 +vt 0.415283 0.627655 +vt 0.418600 0.635332 +vt 0.419711 0.606956 +vt 0.423797 0.605738 +vt 0.425440 0.609828 +vt 0.426856 0.606609 +vt 0.428479 0.611872 +vt 0.425164 0.613352 +vt 0.427740 0.616337 +vt 0.419916 0.610202 +vt 0.410116 0.583134 +vt 0.411329 0.581702 +vt 0.400133 0.569132 +vt 0.416623 0.594220 +vt 0.418915 0.594053 +vt 0.373634 0.552961 +vt 0.385444 0.556035 +vt 0.372148 0.548689 +vt 0.385091 0.552709 +vt 0.400517 0.566703 +vt 0.401369 0.564686 +vt 0.323811 0.564428 +vt 0.325225 0.573451 +vt 0.316416 0.559540 +vt 0.325888 0.553878 +vt 0.332202 0.559229 +vt 0.341321 0.554983 +vt 0.336758 0.549494 +vt 0.348209 0.547174 +vt 0.351547 0.552262 +vt 0.361051 0.551214 +vt 0.359074 0.546273 +vt 0.307561 0.586409 +vt 0.305282 0.599058 +vt 0.295917 0.581843 +vt 0.292214 0.596189 +vt 0.301070 0.573398 +vt 0.429350 0.624527 +vt 0.428414 0.633725 +vt 0.435177 0.628717 +vt 0.435293 0.638413 +vt 0.426131 0.644382 +vt 0.433932 0.649404 +vt 0.429706 0.607443 +vt 0.431587 0.614251 +vt 0.432935 0.607835 +vt 0.435631 0.617129 +vt 0.430745 0.619809 +vt 0.435229 0.623303 +vt 0.412827 0.580444 +vt 0.414230 0.578911 +vt 0.421145 0.593451 +vt 0.423936 0.593010 +vt 0.056260 0.439487 +vt 0.052243 0.514090 +vt 0.346023 0.658566 +vt 0.353019 0.678134 +vt 0.336934 0.667208 +vt 0.326024 0.675603 +vt 0.323928 0.654556 +vt 0.312844 0.660001 +vt 0.334525 0.648816 +vt 0.316430 0.613134 +vt 0.308577 0.626462 +vt 0.305072 0.612631 +vt 0.296667 0.627112 +vt 0.292703 0.611015 +vt 0.326210 0.637561 +vt 0.315631 0.640629 +vt 0.303367 0.643423 +vt 0.318697 0.626420 +vt 0.410092 0.662818 +vt 0.423433 0.656946 +vt 0.413433 0.671154 +vt 0.419291 0.678031 +vt 0.430767 0.662123 +vt 0.402913 0.692524 +vt 0.394659 0.670561 +vt 0.395679 0.681716 +vt 0.418169 0.651470 +vt 0.409257 0.644332 +vt 0.404495 0.647751 +vt 0.405374 0.651069 +vt 0.411257 0.645191 +vt 0.395816 0.652803 +vt 0.395227 0.649145 +vt 0.083085 0.688973 +vt 0.088339 0.733007 +vt 0.069192 0.706811 +vt 0.373267 0.643783 +vt 0.384327 0.641554 +vt 0.384248 0.646582 +vt 0.362916 0.640696 +vt 0.359676 0.643967 +vt 0.354199 0.634275 +vt 0.350247 0.637137 +vt 0.346427 0.627743 +vt 0.341549 0.630293 +vt 0.339322 0.621474 +vt 0.334495 0.622778 +vt 0.333884 0.615635 +vt 0.328541 0.614950 +vt 0.411643 0.650403 +vt 0.406540 0.654994 +vt 0.395733 0.659011 +vt 0.057367 0.609682 +vt 0.332479 0.762948 +vt 0.312630 0.735829 +vt 0.343639 0.750744 +vt 0.343634 0.641721 +vt 0.354126 0.649562 +vt 0.366088 0.656254 +vt 0.359693 0.666658 +vt 0.334862 0.633516 +vt 0.327897 0.625219 +vt 0.323594 0.618441 +vt 0.527839 0.542455 +vt 0.182288 0.446891 +vt 0.185196 0.445659 +vt 0.098124 0.468502 +vt 0.084300 0.504015 +vt 0.092151 0.500433 +vt 0.120220 0.524028 +vt 0.122015 0.486027 +vt 0.838344 0.472620 +vt 0.138008 0.505171 +vt 0.137768 0.503899 +vt 0.130264 0.433438 +vt 0.137128 0.427456 +vt 0.129647 0.435799 +vt 0.154558 0.472779 +vt 0.124742 0.497805 +vt 0.123479 0.495803 +vt 0.122603 0.496014 +vt 0.124369 0.495498 +vt 0.065654 0.369858 +vt 0.122290 0.493517 +vt 0.177510 0.461653 +vt 0.177307 0.468733 +vt 0.121831 0.499314 +vt 0.099882 0.467726 +vt 0.104290 0.467705 +vt 0.107147 0.457130 +vt 0.108220 0.457424 +vt 0.109879 0.445237 +vt 0.110751 0.444963 +vt 0.126091 0.442541 +vt 0.126256 0.443021 +vt 0.123040 0.450930 +vt 0.123508 0.451316 +vt 0.132680 0.384281 +vt 0.133964 0.388806 +vt 0.135636 0.505098 +vt 0.137753 0.506484 +vt 0.134624 0.505731 +vt 0.874110 0.839431 +vt 0.094372 0.487453 +vt 0.109688 0.428185 +vt 0.117705 0.483830 +vt 0.176267 0.456514 +vt 0.866519 0.442895 +vt 0.882261 0.445038 +vt 0.146684 0.499924 +vt 0.161083 0.443786 +vt 0.151045 0.494754 +vt 0.149918 0.497974 +vt 0.149188 0.497715 +vt 0.136106 0.455355 +vt 0.164596 0.457182 +vt 0.167616 0.463035 +vt 0.166663 0.464351 +vt 0.734644 0.921134 +vt 0.169994 0.461584 +vt 0.167278 0.449027 +vt 0.164271 0.448360 +vt 0.164469 0.446136 +vt 0.119841 0.507324 +vt 0.138060 0.422596 +vt 0.556944 0.467747 +vt 0.151494 0.497178 +vt 0.150617 0.500626 +vt 0.151543 0.502740 +vt 0.148416 0.502146 +vt 0.133499 0.506742 +vt 0.136515 0.509412 +vt 0.130282 0.504705 +vt 0.883165 0.445363 +vt 0.116153 0.540323 +vt 0.146783 0.503145 +vt 0.143882 0.504737 +vt 0.146918 0.501723 +vt 0.143864 0.503317 +vt 0.146911 0.500723 +vt 0.149789 0.451552 +vt 0.152708 0.448331 +vt 0.721730 0.536070 +vt 0.146571 0.461789 +vt 0.146831 0.457277 +vt 0.149765 0.456493 +vt 0.753802 0.495517 +vt 0.772308 0.507639 +vt 0.178832 0.500669 +vt 0.176112 0.493950 +vt 0.850412 0.446767 +vt 0.141384 0.425168 +vt 0.097803 0.529978 +vt 0.142103 0.441050 +vt 0.121550 0.461007 +vt 0.848863 0.498398 +vt 0.496396 0.521160 +vt 0.144356 0.442975 +vt 0.139862 0.445504 +vt 0.135830 0.448912 +vt 0.132895 0.454569 +vt 0.118767 0.396678 +vt 0.151563 0.483246 +vt 0.152244 0.485379 +vt 0.119078 0.498847 +vt 0.839685 0.537490 +vt 0.819097 0.552963 +vt 0.822803 0.493152 +vt 0.178908 0.458246 +vt 0.135445 0.488063 +vt 0.502940 0.529452 +vt 0.496431 0.530039 +vt 0.117122 0.460346 +vt 0.134177 0.484805 +vt 0.138075 0.488885 +vt 0.158530 0.490555 +vt 0.103493 0.539340 +vt 0.109465 0.532777 +vt 0.104249 0.525370 +vt 0.906030 0.524955 +vt 0.615473 0.564518 +vt 0.162879 0.397099 +vt 0.115667 0.470012 +vt 0.889552 0.525322 +vt 0.146209 0.495539 +vt 0.766255 0.458449 +vt 0.059317 0.410018 +vt 0.846256 0.461643 +vt 0.169759 0.453391 +vt 0.160596 0.445488 +vt 0.126092 0.516617 +vt 0.157925 0.451804 +vt 0.868419 0.423396 +vt 0.159970 0.448843 +vt 0.163699 0.452054 +vt 0.685817 0.175559 +vt 0.164438 0.457363 +vt 0.157146 0.447159 +vt 0.158575 0.445160 +vt 0.153639 0.453027 +vt 0.155185 0.449800 +vt 0.153474 0.456812 +vt 0.168348 0.458418 +vt 0.160362 0.463408 +vt 0.115119 0.510478 +vt 0.163604 0.457993 +vt 0.147704 0.532208 +vt 0.124720 0.449144 +vt 0.174068 0.460199 +vt 0.123792 0.513398 +vt 0.168923 0.461229 +vt 0.167563 0.460270 +vt 0.169957 0.458827 +vt 0.168548 0.456226 +vt 0.170244 0.456086 +vt 0.881013 0.513805 +vt 0.151055 0.438284 +vt 0.154105 0.434078 +vt 0.156925 0.435596 +vt 0.147943 0.440821 +vt 0.148552 0.435707 +vt 0.159176 0.436854 +vt 0.845394 0.491128 +vt 0.104744 0.533503 +vt 0.103062 0.525209 +vt 0.600533 0.942036 +vt 0.496260 0.439975 +vt 0.560880 0.525140 +vt 0.802747 0.565452 +vt 0.260358 0.489907 +vt 0.277956 0.503372 +vt 0.265661 0.516952 +vt 0.246263 0.525137 +vt 0.301986 0.462515 +vt 0.303640 0.492758 +vt 0.428993 0.382675 +vt 0.433181 0.391965 +vt 0.423113 0.387854 +vt 0.463984 0.399967 +vt 0.459393 0.402868 +vt 0.458403 0.396408 +vt 0.397253 0.441707 +vt 0.375651 0.431560 +vt 0.387950 0.426198 +vt 0.481147 0.413962 +vt 0.478652 0.413424 +vt 0.481306 0.412817 +vt 0.479056 0.411596 +vt 0.481552 0.411353 +vt 0.482554 0.412565 +vt 0.482829 0.411169 +vt 0.482465 0.413664 +vt 0.394955 0.363596 +vt 0.377610 0.371613 +vt 0.382211 0.355120 +vt 0.392853 0.462862 +vt 0.413287 0.472458 +vt 0.403728 0.478088 +vt 0.425142 0.489789 +vt 0.416179 0.494425 +vt 0.441499 0.507111 +vt 0.446834 0.512106 +vt 0.437469 0.516840 +vt 0.451977 0.508148 +vt 0.437452 0.499492 +vt 0.450915 0.494975 +vt 0.459206 0.491692 +vt 0.444983 0.491922 +vt 0.453383 0.488899 +vt 0.437962 0.487618 +vt 0.473547 0.498214 +vt 0.466132 0.499263 +vt 0.469710 0.496900 +vt 0.458390 0.497388 +vt 0.457841 0.506970 +vt 0.488921 0.504467 +vt 0.451328 0.590484 +vt 0.462471 0.594793 +vt 0.463236 0.585706 +vt 0.451469 0.581726 +vt 0.473782 0.599007 +vt 0.475726 0.588546 +vt 0.469002 0.609877 +vt 0.956794 0.159535 +vt 0.986328 0.138672 +vt 0.986328 0.173828 +vt 0.471479 0.512985 +vt 0.465466 0.506822 +vt 0.466847 0.515226 +vt 0.477475 0.515566 +vt 0.475967 0.510807 +vt 0.470836 0.521907 +vt 0.474502 0.518888 +vt 0.928408 0.145792 +vt 0.890108 0.104814 +vt 0.923919 0.114266 +vt 0.490343 0.492994 +vt 0.084441 0.824908 +vt 0.086900 0.776620 +vt 0.107727 0.810503 +vt 0.511313 0.468276 +vt 0.526908 0.465772 +vt 0.489247 0.432424 +vt 0.482244 0.435005 +vt 0.484203 0.428530 +vt 0.478054 0.429599 +vt 0.481129 0.423560 +vt 0.490072 0.429052 +vt 0.485669 0.425548 +vt 0.484997 0.445039 +vt 0.472916 0.443612 +vt 0.476623 0.440389 +vt 0.488408 0.438642 +vt 0.426537 0.473300 +vt 0.441959 0.479039 +vt 0.473063 0.482826 +vt 0.465895 0.488847 +vt 0.459229 0.482170 +vt 0.476032 0.488229 +vt 0.470005 0.491249 +vt 0.464590 0.494566 +vt 0.477221 0.497128 +vt 0.475496 0.496031 +vt 0.481244 0.498669 +vt 0.480726 0.496912 +vt 0.473365 0.493756 +vt 0.478486 0.490543 +vt 0.480339 0.492901 +vt 0.481063 0.468219 +vt 0.466084 0.470719 +vt 0.465565 0.465667 +vt 0.449270 0.458991 +vt 0.466012 0.460499 +vt 0.488942 0.490570 +vt 0.467465 0.455682 +vt 0.482057 0.455747 +vt 0.481285 0.459464 +vt 0.480790 0.463892 +vt 0.450735 0.452752 +vt 0.469696 0.451939 +vt 0.453135 0.447422 +vt 0.456864 0.444035 +vt 0.432401 0.455272 +vt 0.432961 0.446804 +vt 0.418553 0.431802 +vt 0.436389 0.440106 +vt 0.402261 0.391813 +vt 0.413888 0.384080 +vt 0.412987 0.392703 +vt 0.403872 0.403490 +vt 0.390438 0.390749 +vt 0.377586 0.389646 +vt 0.390772 0.376436 +vt 0.393146 0.405930 +vt 0.380973 0.408454 +vt 0.400246 0.421997 +vt 0.416443 0.411504 +vt 0.408830 0.416612 +vt 0.432664 0.394815 +vt 0.422535 0.393965 +vt 0.433702 0.401484 +vt 0.423702 0.401225 +vt 0.413922 0.401560 +vt 0.425443 0.408921 +vt 0.434692 0.408364 +vt 0.435822 0.417482 +vt 0.428049 0.419003 +vt 0.461957 0.415522 +vt 0.454777 0.408978 +vt 0.462736 0.409526 +vt 0.456518 0.404745 +vt 0.465113 0.407917 +vt 0.477248 0.416131 +vt 0.478105 0.414644 +vt 0.476539 0.418169 +vt 0.462888 0.404574 +vt 0.468260 0.407228 +vt 0.465902 0.404290 +vt 0.447259 0.397374 +vt 0.445547 0.401417 +vt 0.444867 0.408213 +vt 0.453161 0.416564 +vt 0.444727 0.416369 +vt 0.460859 0.440750 +vt 0.447324 0.432981 +vt 0.453381 0.429986 +vt 0.465989 0.436457 +vt 0.459849 0.427680 +vt 0.483037 0.421345 +vt 0.471220 0.434459 +vt 0.480075 0.418565 +vt 0.476121 0.422650 +vt 0.480432 0.416124 +vt 0.480784 0.415127 +vt 0.482419 0.414783 +vt 0.482119 0.415795 +vt 0.481882 0.417405 +vt 0.466057 0.426023 +vt 0.441089 0.435266 +vt 0.423161 0.424526 +vt 0.471157 0.448133 +vt 0.483278 0.448704 +vt 0.482521 0.452456 +vt 0.449475 0.390777 +vt 0.418686 0.377218 +vt 0.407332 0.370970 +vt 0.478931 0.409602 +vt 0.482096 0.410317 +vt 0.440434 0.387549 +vt 0.409466 0.328850 +vt 0.403905 0.351581 +vt 0.392818 0.340707 +vt 0.414924 0.361217 +vt 0.434584 0.358762 +vt 0.425759 0.369080 +vt 0.443832 0.364349 +vt 0.445729 0.378386 +vt 0.436070 0.373620 +vt 0.468340 0.388068 +vt 0.482997 0.398477 +vt 0.479737 0.402556 +vt 0.464983 0.396627 +vt 0.459604 0.390005 +vt 0.463477 0.381888 +vt 0.471525 0.376051 +vt 0.368669 0.140106 +vt 0.415446 0.132840 +vt 0.452732 0.383387 +vt 0.452036 0.370424 +vt 0.458749 0.376511 +vt 0.478819 0.407273 +vt 0.467555 0.370504 +vt 0.462116 0.363431 +vt 0.455962 0.355586 +vt 0.448776 0.348630 +vt 0.441758 0.340437 +vt 0.425808 0.350541 +vt 0.435385 0.330682 +vt 0.417795 0.340542 +vt 0.452455 0.313403 +vt 0.454982 0.324165 +vt 0.464113 0.341937 +vt 0.473366 0.358488 +vt 0.459208 0.333434 +vt 0.573146 0.823606 +vt 0.489610 0.498955 +vt 0.486774 0.495463 +vt 0.483495 0.497399 +vt 0.487875 0.513152 +vt 0.488035 0.508773 +vt 0.493284 0.502616 +vt 0.490988 0.503516 +vt 0.481189 0.515391 +vt 0.484417 0.515003 +vt 0.485747 0.511681 +vt 0.483523 0.517325 +vt 0.480335 0.517715 +vt 0.484870 0.519050 +vt 0.485849 0.517461 +vt 0.487068 0.515320 +vt 0.489942 0.515410 +vt 0.488247 0.520302 +vt 0.489242 0.517667 +vt 0.466232 0.791627 +vt 0.496669 0.686174 +vt 0.477195 0.525438 +vt 0.479548 0.519572 +vt 0.481934 0.522120 +vt 0.482886 0.518884 +vt 0.486834 0.523431 +vt 0.484121 0.527932 +vt 0.472017 0.532395 +vt 0.480684 0.534892 +vt 0.760141 0.167745 +vt 0.496649 0.657621 +vt 0.479182 0.371376 +vt 0.475013 0.382157 +vt 0.485959 0.404066 +vt 0.490078 0.405831 +vt 0.487338 0.406885 +vt 0.482373 0.377683 +vt 0.491105 0.395081 +vt 0.487209 0.395974 +vt 0.487405 0.369578 +vt 0.588711 0.456282 +vt 0.485818 0.363792 +vt 0.476530 0.365913 +vt 0.483848 0.356272 +vt 0.627507 0.388774 +vt 0.469415 0.350442 +vt 0.492426 0.405381 +vt 0.489199 0.402503 +vt 0.492027 0.401846 +vt 0.482343 0.409161 +vt 0.483123 0.410370 +vt 0.483607 0.406525 +vt 0.484990 0.408263 +vt 0.472833 0.310574 +vt 0.474628 0.320157 +vt 0.481596 0.347174 +vt 0.478933 0.337732 +vt 0.476544 0.329047 +vt 0.247734 0.719577 +vt 0.263324 0.754507 +vt 0.231354 0.726775 +vt 0.247128 0.760638 +vt 0.217012 0.738546 +vt 0.179532 0.848324 +vt 0.204440 0.838045 +vt 0.217793 0.863676 +vt 0.149027 0.836997 +vt 0.135692 0.807299 +vt 0.149351 0.786459 +vt 0.168261 0.821222 +vt 0.186054 0.808273 +vt 0.067085 0.221466 +vt 0.055313 0.574565 +vt 0.384822 0.636509 +vt 0.221675 0.822335 +vt 0.241438 0.847909 +vt 0.249783 0.827173 +vt 0.272228 0.850676 +vt 0.296562 0.875117 +vt 0.287007 0.900460 +vt 0.261170 0.872547 +vt 0.238598 0.891050 +vt 0.316505 0.927760 +vt 0.023356 0.496651 +vt 0.197908 0.923734 +vt 0.500453 0.406198 +vt 0.618250 0.572383 +vt 0.515515 0.721143 +vt 0.533200 0.703514 +vt 0.097042 0.321643 +vt 0.232429 0.167840 +vt 0.081853 0.435434 +vt 0.121310 0.385222 +vt 0.123799 0.384932 +vt 0.237492 0.220504 +vt 0.188755 0.454234 +vt 0.175337 0.266374 +vt 0.174599 0.309287 +vt 0.155778 0.309621 +vt 0.075325 0.324930 +vt 0.155295 0.395476 +vt 0.107908 0.436909 +vt 0.110379 0.436584 +vt 0.107562 0.447348 +vt 0.103515 0.457498 +vt 0.122896 0.532908 +vt 0.116536 0.535543 +vt 0.134784 0.380861 +vt 0.132734 0.379209 +vt 0.136003 0.378452 +vt 0.138595 0.379863 +vt 0.138757 0.378006 +vt 0.130510 0.381095 +vt 0.140226 0.589360 +vt 0.122581 0.382430 +vt 0.130632 0.377377 +vt 0.126815 0.381066 +vt 0.130299 0.372713 +vt 0.136282 0.375788 +vt 0.140880 0.375604 +vt 0.142104 0.378531 +vt 0.112738 0.433470 +vt 0.113549 0.432871 +vt 0.849136 0.505998 +vt 0.131645 0.393754 +vt 0.101344 0.516338 +vt 0.104352 0.508329 +vt 0.106977 0.512993 +vt 0.066521 0.444449 +vt 0.063132 0.476444 +vt 0.036868 0.231346 +vt 0.034520 0.267129 +vt 0.082140 0.564458 +vt 0.109390 0.516825 +vt 0.105120 0.447093 +vt 0.101523 0.457471 +vt 0.103917 0.521013 +vt 0.101839 0.521677 +vt 0.099999 0.522520 +vt 0.099182 0.516862 +vt 0.112637 0.513880 +vt 0.110066 0.510355 +vt 0.110742 0.503755 +vt 0.107650 0.506222 +vt 0.096440 0.511025 +vt 0.098820 0.511139 +vt 0.098149 0.535738 +vt 0.102721 0.621827 +vt 0.120358 0.640301 +vt 0.104722 0.658335 +vt 0.086360 0.638881 +vt 0.083712 0.599414 +vt 0.109626 0.537790 +vt 0.113024 0.536228 +vt 0.118358 0.528565 +vt 0.124278 0.527815 +vt 0.212810 0.752255 +vt 0.193887 0.715564 +vt 0.244211 0.774144 +vt 0.238068 0.784440 +vt 0.208256 0.765467 +vt 0.189076 0.729602 +vt 0.135508 0.625789 +vt 0.140363 0.658864 +vt 0.155645 0.682828 +vt 0.145005 0.697200 +vt 0.123454 0.676719 +vt 0.175809 0.752479 +vt 0.175444 0.731959 +vt 0.173752 0.775707 +vt 0.149266 0.721646 +vt 0.147547 0.751794 +vt 0.126060 0.702276 +vt 0.233389 0.795494 +vt 0.203572 0.776555 +vt 0.228193 0.806035 +vt 0.203920 0.794022 +vt 0.167756 0.690884 +vt 0.168419 0.360416 +vt 0.154515 0.353789 +vt 0.239122 0.495473 +vt 0.220711 0.507583 +vt 0.226297 0.458555 +vt 0.228899 0.543102 +vt 0.210405 0.555160 +vt 0.220121 0.591885 +vt 0.200116 0.599281 +vt 0.217024 0.638020 +vt 0.071286 0.620724 +vt 0.070124 0.585598 +vt 0.066553 0.557805 +vt 0.025586 0.596907 +vt 0.122725 0.839807 +vt 0.102742 0.854190 +vt 0.489920 0.529452 +vt 0.322075 0.725839 +vt 0.286462 0.713157 +vt 0.259862 0.919947 +vt 0.114847 0.761151 +vt 0.032290 0.657854 +vt 0.299425 0.744830 +vt 0.316387 0.773132 +vt 0.280198 0.749928 +vt 0.070603 0.664844 +vt 0.056974 0.676019 +vt 0.122251 0.732791 +vt 0.105091 0.683696 +vt 0.100315 0.709570 +vt 0.087781 0.664167 +vt 0.071562 0.643830 +vt 0.061406 0.653167 +vt 0.747626 0.433144 +vt 0.485203 0.590548 +vt 0.485852 0.580020 +vt 0.375075 0.685573 +vt 0.377431 0.673011 +vt 0.053268 0.547849 +vt 0.063167 0.520827 +vt 0.270877 0.578184 +vt 0.269698 0.602206 +vt 0.279417 0.606991 +vt 0.282902 0.627739 +vt 0.272748 0.627181 +vt 0.284818 0.568075 +vt 0.279522 0.587119 +vt 0.278289 0.554812 +vt 0.047189 0.713519 +vt 0.037478 0.682997 +vt 0.436948 0.566348 +vt 0.435331 0.572720 +vt 0.438201 0.557786 +vt 0.423190 0.552197 +vt 0.425631 0.542828 +vt 0.451515 0.572652 +vt 0.440249 0.550167 +vt 0.451831 0.563814 +vt 0.450642 0.552623 +vt 0.442346 0.540039 +vt 0.429875 0.495370 +vt 0.423335 0.508136 +vt 0.432134 0.525147 +vt 0.413013 0.528286 +vt 0.428319 0.534817 +vt 0.408708 0.537116 +vt 0.417156 0.519163 +vt 0.445906 0.528813 +vt 0.453938 0.520106 +vt 0.449246 0.539841 +vt 0.461531 0.516825 +vt 0.462453 0.528397 +vt 0.486577 0.546278 +vt 0.476242 0.556296 +vt 0.477008 0.544825 +vt 0.463727 0.554612 +vt 0.465110 0.542419 +vt 0.421393 0.558461 +vt 0.281467 0.524706 +vt 0.405632 0.542886 +vt 0.394183 0.487758 +vt 0.407222 0.503700 +vt 0.386947 0.500143 +vt 0.399253 0.513918 +vt 0.394644 0.522828 +vt 0.381370 0.509465 +vt 0.382817 0.472348 +vt 0.363402 0.438359 +vt 0.384485 0.448204 +vt 0.373156 0.456893 +vt 0.351155 0.448579 +vt 0.362192 0.467992 +vt 0.340658 0.461892 +vt 0.353521 0.480044 +vt 0.374031 0.484612 +vt 0.367084 0.494868 +vt 0.376456 0.518229 +vt 0.390802 0.530277 +vt 0.346438 0.511923 +vt 0.362313 0.507057 +vt 0.359305 0.518880 +vt 0.348238 0.495096 +vt 0.332942 0.479728 +vt 0.332071 0.502428 +vt 0.331418 0.519490 +vt 0.023082 0.531264 +vt 0.401375 0.557202 +vt 0.384685 0.544309 +vt 0.384183 0.547799 +vt 0.370527 0.542587 +vt 0.370749 0.537649 +vt 0.345496 0.523014 +vt 0.358353 0.526767 +vt 0.317906 0.530579 +vt 0.306671 0.534102 +vt 0.320281 0.542889 +vt 0.309697 0.547298 +vt 0.296290 0.538528 +vt 0.344853 0.539069 +vt 0.357173 0.533525 +vt 0.344789 0.531556 +vt 0.331068 0.530601 +vt 0.331831 0.540335 +vt 0.401107 0.560494 +vt 0.415664 0.575948 +vt 0.427715 0.591666 +vt 0.416550 0.572766 +vt 0.126639 0.315770 +vt 0.165540 0.237832 +vt 0.146475 0.275705 +vt 0.379174 0.602081 +vt 0.486060 0.556834 +vt 0.476264 0.567261 +vt 0.475788 0.578940 +vt 0.464011 0.575893 +vt 0.485932 0.568362 +vt 0.441046 0.664436 +vt 0.444431 0.652108 +vt 0.454851 0.664033 +vt 0.457739 0.651492 +vt 0.450392 0.679395 +vt 0.432051 0.681987 +vt 0.816524 0.128759 +vt 0.858356 0.164485 +vt 0.459068 0.617013 +vt 0.429876 0.589123 +vt 0.437190 0.605859 +vt 0.443632 0.624949 +vt 0.440475 0.603894 +vt 0.442053 0.618601 +vt 0.452260 0.622427 +vt 0.444565 0.630347 +vt 0.445537 0.640703 +vt 0.455183 0.628858 +vt 0.457542 0.639787 +vt 0.469185 0.648092 +vt 0.189637 0.877332 +vt 0.481303 0.686780 +vt 0.467171 0.675155 +vt 0.407280 0.767247 +vt 0.414337 0.752637 +vt 0.066798 0.746023 +vt 0.437330 0.708536 +vt 0.454490 0.721800 +vt 0.428623 0.724192 +vt 0.449867 0.738434 +vt 0.421323 0.738838 +vt 0.390971 0.734066 +vt 0.361644 0.726674 +vt 0.399239 0.720831 +vt 0.444203 0.694106 +vt 0.421839 0.695591 +vt 0.338887 0.703718 +vt 0.267832 0.716018 +vt 0.247351 0.687253 +vt 0.308903 0.695681 +vt 0.292923 0.675326 +vt 0.302267 0.669148 +vt 0.330712 0.715653 +vt 0.280278 0.652252 +vt 0.290245 0.648946 +vt 0.232955 0.611323 +vt 0.235511 0.652201 +vt 0.853798 0.589267 +vt 0.237216 0.568534 +vt 0.664643 0.128626 +vt 0.696788 0.115134 +vt 0.707541 0.136669 +vt 0.516777 0.556289 +vt 0.283739 0.802595 +vt 0.289154 0.790141 +vt 0.928040 0.221479 +vt 0.264347 0.820932 +vt 0.294429 0.847001 +vt 0.275310 0.812685 +vt 0.310973 0.839192 +vt 0.345714 0.857742 +vt 0.325412 0.869883 +vt 0.320392 0.826720 +vt 0.324485 0.811664 +vt 0.360236 0.844002 +vt 0.370284 0.829582 +vt 0.302195 0.781572 +vt 0.339141 0.801751 +vt 0.352085 0.789903 +vt 0.379556 0.815572 +vt 0.388897 0.800713 +vt 0.365353 0.775599 +vt 0.398844 0.783710 +vt 0.482640 0.646082 +vt 0.112716 0.281315 +vt 0.891955 0.521636 +vt 0.439178 0.770765 +vt 0.475819 0.738967 +vt 0.009766 0.302734 +vt 0.029205 0.332718 +vt 0.009766 0.333984 +vt 0.621138 0.567755 +vt 0.009766 0.271484 +vt 0.009766 0.236328 +vt 0.760109 0.069014 +vt 0.713770 0.048318 +vt 0.480023 0.504017 +vt 0.479943 0.503980 +vt 0.479991 0.503866 +vt 0.480070 0.503889 +vt 0.480684 0.500876 +vt 0.480513 0.502342 +vt 0.480210 0.503394 +vt 0.179782 0.382168 +vt 0.188135 0.407893 +vt 0.526418 0.460599 +vt 0.195600 0.177514 +vt 0.161848 0.186033 +vt 0.752965 0.777233 +vt 0.539544 0.303339 +vt 0.518510 0.292089 +vt 0.539713 0.293695 +vt 0.512680 0.504062 +vt 0.955078 0.009766 +vt 0.986328 0.041016 +vt 0.951793 0.036519 +vt 0.682381 0.068892 +vt 0.641405 0.066939 +vt 0.599883 0.064169 +vt 0.810829 0.098936 +vt 0.647262 0.088061 +vt 0.688650 0.091866 +vt 0.563247 0.081760 +vt 0.604222 0.084329 +vt 0.529700 0.098328 +vt 0.567295 0.100334 +vt 0.952361 0.065926 +vt 0.920421 0.086097 +vt 0.596067 0.028271 +vt 0.559259 0.044295 +vt 0.560091 0.026960 +vt 0.721334 0.069797 +vt 0.768993 0.096738 +vt 0.728507 0.094727 +vt 0.703755 0.029073 +vt 0.662109 0.009766 +vt 0.626953 0.009766 +vt 0.595703 0.009766 +vt 0.564453 0.009766 +vt 0.822266 0.009766 +vt 0.818791 0.028907 +vt 0.791016 0.009766 +vt 0.886942 0.020032 +vt 0.923828 0.009766 +vt 0.910268 0.032622 +vt 0.835322 0.019964 +vt 0.867338 0.036144 +vt 0.572424 0.115442 +vt 0.527057 0.025855 +vt 0.986328 0.076172 +vt 0.526242 0.042712 +vt 0.497091 0.060366 +vt 0.497433 0.042052 +vt 0.957256 0.195000 +vt 0.986328 0.205078 +vt 0.927117 0.183865 +vt 0.857422 0.009766 +vt 0.888672 0.009766 +vt 0.529297 0.009766 +vt 0.497739 0.025546 +vt 0.498047 0.009766 +vt 0.736904 0.120469 +vt 0.610153 0.103947 +vt 0.654860 0.109223 +vt 0.862237 0.021112 +vt 0.954066 0.095141 +vt 0.986328 0.107422 +vt 0.955679 0.125864 +vt 0.496282 0.097549 +vt 0.496695 0.079550 +vt 0.724609 0.009766 +vt 0.693359 0.009766 +vt 0.747939 0.144575 +vt 0.797524 0.177441 +vt 0.618216 0.120880 +vt 0.720382 0.157182 +vt 0.512796 0.503761 +vt 0.512488 0.503438 +vt 0.725821 0.329020 +vt 0.754453 0.323143 +vt 0.744227 0.353536 +vt 0.776450 0.356789 +vt 0.599789 0.279618 +vt 0.564775 0.269897 +vt 0.597053 0.264136 +vt 0.495464 0.274345 +vt 0.518094 0.281020 +vt 0.495495 0.281473 +vt 0.593746 0.305942 +vt 0.564136 0.289344 +vt 0.596567 0.293705 +vt 0.518231 0.262132 +vt 0.539898 0.272853 +vt 0.518599 0.273766 +vt 0.563037 0.256850 +vt 0.687546 0.249474 +vt 0.664434 0.270609 +vt 0.653043 0.245857 +vt 0.726618 0.268257 +vt 0.694508 0.275883 +vt 0.788018 0.336888 +vt 0.762200 0.299568 +vt 0.518466 0.301156 +vt 0.726141 0.294492 +vt 0.620759 0.243320 +vt 0.589855 0.247890 +vt 0.588225 0.317139 +vt 0.655905 0.394408 +vt 0.650256 0.424281 +vt 0.670169 0.404683 +vt 0.663177 0.438038 +vt 0.517984 0.250147 +vt 0.539632 0.260307 +vt 0.538726 0.247794 +vt 0.675353 0.221280 +vt 0.637121 0.215670 +vt 0.767547 0.280993 +vt 0.721308 0.240572 +vt 0.606383 0.220172 +vt 0.797478 0.322117 +vt 0.715973 0.212765 +vt 0.582191 0.192787 +vt 0.495525 0.286126 +vt 0.495556 0.291832 +vt 0.532675 0.226089 +vt 0.559651 0.241530 +vt 0.545774 0.216694 +vt 0.580660 0.228572 +vt 0.633855 0.269801 +vt 0.516557 0.198579 +vt 0.495202 0.221982 +vt 0.495108 0.198801 +vt 0.495271 0.182279 +vt 0.540149 0.287122 +vt 0.540347 0.281233 +vt 0.528268 0.185584 +vt 0.562211 0.204190 +vt 0.618051 0.318143 +vt 0.629571 0.305473 +vt 0.564925 0.280694 +vt 0.634289 0.337700 +vt 0.656947 0.362302 +vt 0.666538 0.329639 +vt 0.673128 0.367607 +vt 0.649254 0.331186 +vt 0.689602 0.378173 +vt 0.683636 0.419103 +vt 0.687584 0.336517 +vt 0.707032 0.349492 +vt 0.703651 0.391770 +vt 0.676317 0.300905 +vt 0.702030 0.309617 +vt 0.561247 0.307784 +vt 0.710671 0.438363 +vt 0.697093 0.428864 +vt 0.516428 0.224309 +vt 0.518259 0.287028 +vt 0.495393 0.250567 +vt 0.602469 0.603831 +vt 0.534166 0.616961 +vt 0.721582 0.366152 +vt 0.562412 0.297800 +vt 0.495430 0.264726 +vt 0.716594 0.403850 +vt 0.644476 0.293442 +vt 0.419489 0.115429 +vt 0.382549 0.103945 +vt 0.425314 0.100312 +vt 0.218106 0.123011 +vt 0.171633 0.156815 +vt 0.177851 0.128809 +vt 0.136095 0.164517 +vt 0.139829 0.132598 +vt 0.102760 0.137850 +vt 0.068170 0.183871 +vt 0.067041 0.145817 +vt 0.327612 0.128716 +vt 0.296399 0.115211 +vt 0.338026 0.109274 +vt 0.285043 0.136778 +vt 0.256643 0.120535 +vt 0.101228 0.173558 +vt 0.038982 0.159547 +vt 0.038436 0.195008 +vt 0.433765 0.062158 +vt 0.465410 0.080181 +vt 0.430143 0.081727 +vt 0.357880 0.047291 +vt 0.326241 0.029552 +vt 0.362691 0.029041 +vt 0.397584 0.045710 +vt 0.399419 0.028239 +vt 0.257804 0.028046 +vt 0.236328 0.009766 +vt 0.271484 0.009766 +vt 0.109083 0.020037 +vt 0.138672 0.009766 +vt 0.133763 0.021116 +vt 0.192978 0.069959 +vt 0.163420 0.051156 +vt 0.204910 0.047463 +vt 0.080283 0.060239 +vt 0.112070 0.079442 +vt 0.075323 0.086122 +vt 0.245663 0.046514 +vt 0.234825 0.069066 +vt 0.281396 0.048351 +vt 0.273344 0.069851 +vt 0.318508 0.048706 +vt 0.120383 0.056657 +vt 0.151195 0.071539 +vt 0.143815 0.100569 +vt 0.105235 0.104856 +vt 0.009766 0.138672 +vt 0.040174 0.125881 +vt 0.043621 0.065934 +vt 0.041855 0.095155 +vt 0.071691 0.114293 +vt 0.291901 0.029072 +vt 0.305195 0.091925 +vt 0.220340 0.028203 +vt 0.044250 0.036516 +vt 0.072266 0.009766 +vt 0.085717 0.032633 +vt 0.041016 0.009766 +vt 0.009766 0.041016 +vt 0.009766 0.009766 +vt 0.312082 0.068935 +vt 0.394301 0.064125 +vt 0.435570 0.044252 +vt 0.183959 0.098993 +vt 0.225445 0.096793 +vt 0.346342 0.088082 +vt 0.389234 0.084301 +vt 0.462872 0.098322 +vt 0.265622 0.094788 +vt 0.207524 0.151202 +vt 0.245055 0.144666 +vt 0.352886 0.066942 +vt 0.373818 0.120907 +vt 0.160679 0.019974 +vt 0.333984 0.009766 +vt 0.369141 0.009766 +vt 0.400391 0.009766 +vt 0.435378 0.026938 +vt 0.302734 0.009766 +vt 0.173828 0.009766 +vt 0.177087 0.028918 +vt 0.128543 0.036149 +vt 0.205078 0.009766 +vt 0.009766 0.076172 +vt 0.460241 0.112334 +vt 0.467660 0.060997 +vt 0.468609 0.042710 +vt 0.468420 0.025858 +vt 0.431641 0.009766 +vt 0.466797 0.009766 +vt 0.009766 0.173828 +vt 0.009766 0.107422 +vt 0.457647 0.127332 +vt 0.009766 0.205078 +vt 0.479905 0.503716 +vt 0.480080 0.503713 +vt 0.215125 0.356904 +vt 0.247322 0.353606 +vt 0.227821 0.383498 +vt 0.236813 0.323229 +vt 0.265541 0.329070 +vt 0.264912 0.294536 +vt 0.289198 0.309626 +vt 0.296495 0.275909 +vt 0.314873 0.300921 +vt 0.391272 0.279638 +vt 0.426107 0.269884 +vt 0.426046 0.280686 +vt 0.451009 0.272836 +vt 0.450635 0.281221 +vt 0.472324 0.273753 +vt 0.472894 0.281011 +vt 0.472603 0.292088 +vt 0.451402 0.293692 +vt 0.472793 0.287021 +vt 0.428717 0.297797 +vt 0.450899 0.287115 +vt 0.397442 0.305936 +vt 0.426918 0.289338 +vt 0.373231 0.318135 +vt 0.394530 0.293699 +vt 0.472613 0.262115 +vt 0.451186 0.260281 +vt 0.427743 0.256829 +vt 0.393908 0.264180 +vt 0.401007 0.247943 +vt 0.370266 0.243378 +vt 0.357202 0.269822 +vt 0.338000 0.245915 +vt 0.326601 0.270650 +vt 0.303410 0.249544 +vt 0.264180 0.268285 +vt 0.228758 0.299610 +vt 0.203345 0.336971 +vt 0.201091 0.381689 +vt 0.472651 0.301136 +vt 0.429942 0.307744 +vt 0.431207 0.318796 +vt 0.403000 0.317113 +vt 0.364229 0.366424 +vt 0.364297 0.388796 +vt 0.335975 0.394453 +vt 0.355419 0.415571 +vt 0.341860 0.424331 +vt 0.321812 0.404713 +vt 0.329083 0.438060 +vt 0.368239 0.411319 +vt 0.472784 0.250121 +vt 0.451996 0.247747 +vt 0.431014 0.241495 +vt 0.410108 0.228594 +vt 0.384584 0.220182 +vt 0.354075 0.215753 +vt 0.315946 0.221317 +vt 0.269875 0.240599 +vt 0.223519 0.281046 +vt 0.193935 0.322179 +vt 0.408859 0.192719 +vt 0.428500 0.204157 +vt 0.444635 0.216644 +vt 0.457824 0.226027 +vt 0.473998 0.224270 +vt 0.473666 0.198541 +vt 0.462324 0.185465 +vt 0.454543 0.173634 +vt 0.357133 0.337690 +vt 0.361672 0.305451 +vt 0.334646 0.362347 +vt 0.324835 0.329646 +vt 0.342120 0.331196 +vt 0.383019 0.329430 +vt 0.369962 0.346110 +vt 0.350233 0.361984 +vt 0.350618 0.389030 +vt 0.318519 0.367693 +vt 0.303869 0.336495 +vt 0.302139 0.378228 +vt 0.308496 0.419101 +vt 0.317621 0.459099 +vt 0.284535 0.349513 +vt 0.288220 0.391825 +vt 0.295169 0.428876 +vt 0.281687 0.438356 +vt 0.275391 0.403912 +vt 0.270097 0.366184 +vt 0.346710 0.293462 +vt 0.255367 0.383025 +vt 0.268074 0.448682 +vt 0.261764 0.416750 +vt 0.252617 0.463959 +vt 0.244656 0.433256 +vt 0.236384 0.406375 +vt 0.209922 0.409028 +vt 0.495526 0.300642 +vt 0.451586 0.303307 +vt 0.892414 0.516296 +vt 0.889327 0.508270 +vt 0.515950 0.544835 +vt 0.647184 0.627875 +vt 0.879292 0.432912 +vt 0.509662 0.347234 +vt 0.506382 0.546280 +vt 0.139545 0.355116 +vt 0.419030 0.455810 +vt 0.413049 0.440439 +vt 0.374813 0.572274 +vt 0.380806 0.596052 +vt 0.403693 0.456273 +vt 0.357213 0.539484 +vt 0.435582 0.467684 +vt 0.509918 0.518874 +vt 0.485259 0.483363 +vt 0.487266 0.488061 +vt 0.480690 0.495424 +vt 0.453858 0.474274 +vt 0.449813 0.465922 +vt 0.493519 0.507528 +vt 0.496358 0.512502 +vt 0.493248 0.512751 +vt 0.009766 0.400391 +vt 0.009766 0.369141 +vt 0.009766 0.431641 +vt 0.491874 0.521214 +vt 0.491306 0.524432 +vt 0.293074 0.949356 +vt 0.542160 0.391215 +vt 0.482736 0.616041 +vt 0.492312 0.497887 +vt 0.496280 0.434234 +vt 0.021308 0.727217 +vt 0.009766 0.693359 +vt 0.009766 0.662109 +vt 0.496207 0.472215 +vt 0.481546 0.472459 +vt 0.496370 0.515409 +vt 0.492883 0.515420 +vt 0.496383 0.518175 +vt 0.492483 0.517941 +vt 0.058819 0.784646 +vt 0.040518 0.744383 +vt 0.009766 0.529297 +vt 0.009766 0.498047 +vt 0.009766 0.564453 +vt 0.009766 0.626953 +vt 0.009766 0.595703 +vt 0.986328 0.271484 +vt 0.009766 0.466797 +vt 0.488448 0.376491 +vt 0.495600 0.369416 +vt 0.495625 0.345422 +vt 0.488233 0.536215 +vt 0.395560 0.942036 +vt 0.447917 0.616560 +vt 0.464292 0.565604 +vt 0.724404 0.448669 +vt 0.487337 0.500140 +vt 0.542674 0.466048 +vt 0.703020 0.949356 +vt 0.986328 0.529297 +vt 0.986328 0.564453 +vt 0.496280 0.497402 +vt 0.986328 0.693359 +vt 0.496188 0.468222 +vt 0.986328 0.400391 +vt 0.512115 0.492980 +vt 0.410289 0.708217 +vt 0.464328 0.689261 +vt 0.389303 0.613284 +vt 0.986328 0.466797 +vt 0.986328 0.369141 +vt 0.986328 0.595703 +vt 0.986328 0.662109 +vt 0.986328 0.498047 +vt 0.586890 0.654870 +vt 0.634697 0.526689 +vt 0.318047 0.687163 +vt 0.443995 0.600998 +vt 0.135684 0.887132 +vt 0.469267 0.476833 +vt 0.483490 0.477694 +vt 0.496229 0.477187 +vt 0.403101 0.380735 +vt 0.510880 0.472508 +vt 0.511591 0.463957 +vt 0.608332 0.606220 +vt 0.608067 0.614100 +vt 0.384974 0.606128 +vt 0.385361 0.613987 +vt 0.379271 0.599484 +vt 0.911391 0.942787 +vt 0.918162 0.939607 +vt 0.915141 0.945119 +vt 0.912275 0.950252 +vt 0.907647 0.947216 +vt 0.920464 0.932110 +vt 0.915739 0.937662 +vt 0.917282 0.952478 +vt 0.923343 0.933687 +vt 0.921093 0.940637 +vt 0.919172 0.946966 +vt 0.922711 0.953703 +vt 0.924951 0.934090 +vt 0.924140 0.941299 +vt 0.923462 0.947645 +vt 0.927800 0.947989 +vt 0.928305 0.953722 +vt 0.927258 0.941370 +vt 0.933810 0.952709 +vt 0.928245 0.933727 +vt 0.930297 0.940862 +vt 0.932046 0.946914 +vt 0.936157 0.945427 +vt 0.938899 0.950537 +vt 0.929791 0.933155 +vt 0.933110 0.939595 +vt 0.939784 0.942950 +vt 0.943383 0.947433 +vt 0.931207 0.932288 +vt 0.935736 0.937939 +vt 0.947467 0.943782 +vt 0.937943 0.935764 +vt 0.942992 0.939978 +vt 0.950441 0.939172 +vt 0.933308 0.929765 +vt 0.939688 0.933225 +vt 0.945289 0.936286 +vt 0.952644 0.934207 +vt 0.940878 0.930403 +vt 0.947106 0.932328 +vt 0.953886 0.928861 +vt 0.934305 0.926633 +vt 0.941513 0.927410 +vt 0.947847 0.928103 +vt 0.948179 0.923826 +vt 0.953879 0.923346 +vt 0.941578 0.924364 +vt 0.952889 0.917903 +vt 0.934034 0.923376 +vt 0.941015 0.921365 +vt 0.947101 0.919647 +vt 0.945669 0.915607 +vt 0.950756 0.912842 +vt 0.939857 0.918521 +vt 0.943168 0.911999 +vt 0.947723 0.908346 +vt 0.932524 0.920462 +vt 0.938212 0.915938 +vt 0.940278 0.908729 +vt 0.943961 0.904368 +vt 0.936060 0.913750 +vt 0.939399 0.901353 +vt 0.930047 0.918336 +vt 0.933537 0.912021 +vt 0.936568 0.906464 +vt 0.934449 0.899133 +vt 0.930696 0.910810 +vt 0.932609 0.904623 +vt 0.929108 0.897870 +vt 0.926910 0.917334 +vt 0.927694 0.910155 +vt 0.928357 0.903871 +vt 0.924019 0.903512 +vt 0.923519 0.897817 +vt 0.924629 0.910061 +vt 0.918038 0.898760 +vt 0.923633 0.917587 +vt 0.921608 0.910624 +vt 0.919825 0.904561 +vt 0.915733 0.905954 +vt 0.912942 0.900861 +vt 0.918730 0.911738 +vt 0.912127 0.908428 +vt 0.908391 0.903850 +vt 0.920699 0.919086 +vt 0.916117 0.913403 +vt 0.908842 0.911299 +vt 0.904297 0.907520 +vt 0.913879 0.915531 +vt 0.906413 0.914935 +vt 0.901320 0.912151 +vt 0.918547 0.921567 +vt 0.912092 0.918039 +vt 0.899011 0.917085 +vt 0.910875 0.920877 +vt 0.904579 0.918930 +vt 0.897697 0.922479 +vt 0.917529 0.924700 +vt 0.910211 0.923890 +vt 0.903816 0.923194 +vt 0.903467 0.927529 +vt 0.897632 0.928043 +vt 0.910152 0.926954 +vt 0.898667 0.933501 +vt 0.917767 0.927992 +vt 0.910765 0.929984 +vt 0.904552 0.931737 +vt 0.906005 0.935807 +vt 0.900805 0.938636 +vt 0.911934 0.932865 +vt 0.908462 0.939480 +vt 0.903844 0.943193 +vt 0.918429 0.929508 +vt 0.913444 0.935583 +vt 0.925956 0.925720 +vt 0.921852 0.932984 +vt 0.926612 0.933980 +vt 0.932315 0.931072 +vt 0.933892 0.928225 +vt 0.934247 0.924992 +vt 0.933340 0.921889 +vt 0.931341 0.919329 +vt 0.928500 0.917764 +vt 0.925265 0.917384 +vt 0.922140 0.918276 +vt 0.919561 0.920271 +vt 0.917955 0.923106 +vt 0.917564 0.926350 +vt 0.919365 0.930873 +vt 0.972934 0.847573 +vt 0.973280 0.844993 +vt 0.974223 0.846712 +vt 0.968500 0.842226 +vt 0.972219 0.843005 +vt 0.970244 0.844334 +vt 0.886838 0.972284 +vt 0.899797 0.973007 +vt 0.896691 0.978922 +vt 0.895318 0.961904 +vt 0.906283 0.961080 +vt 0.903074 0.967135 +vt 0.975113 0.850181 +vt 0.976566 0.851027 +vt 0.976095 0.851353 +vt 0.975058 0.848269 +vt 0.974078 0.848945 +vt 0.971699 0.846088 +vt 0.882558 0.977579 +vt 0.893864 0.984919 +vt 0.891152 0.966991 +vt 0.904179 0.951342 +vt 0.899845 0.956471 +vt 0.977273 0.852317 +vt 0.977038 0.852478 +vt 0.975837 0.849694 +vt 0.909604 0.977175 +vt 0.909635 0.955134 +vt 0.913569 0.964263 +vt 0.977095 0.850805 +vt 0.977535 0.852206 +vt 0.976638 0.849348 +vt 0.975053 0.844231 +vt 0.975643 0.846122 +vt 0.970974 0.840625 +vt 0.974407 0.842107 +vt 0.907789 0.983419 +vt 0.911695 0.970779 +vt 0.976156 0.847801 +vt 0.906134 0.989822 +vt 0.920129 0.979218 +vt 0.922156 0.959114 +vt 0.921459 0.965760 +vt 0.977813 0.852147 +vt 0.977494 0.849165 +vt 0.976943 0.843861 +vt 0.977143 0.845832 +vt 0.973658 0.839559 +vt 0.976712 0.841635 +vt 0.919448 0.985762 +vt 0.920808 0.972387 +vt 0.977655 0.850686 +vt 0.977322 0.847556 +vt 0.918974 0.992319 +vt 0.930070 0.972651 +vt 0.930728 0.979168 +vt 0.929466 0.965888 +vt 0.978228 0.850679 +vt 0.978097 0.852145 +vt 0.978511 0.847553 +vt 0.978367 0.849162 +vt 0.978664 0.845814 +vt 0.976465 0.839022 +vt 0.979071 0.841607 +vt 0.931409 0.985915 +vt 0.978857 0.843874 +vt 0.931992 0.992319 +vt 0.941090 0.977383 +vt 0.935329 0.957910 +vt 0.937242 0.964271 +vt 0.978377 0.852197 +vt 0.979677 0.847779 +vt 0.979222 0.849329 +vt 0.980166 0.846097 +vt 0.982150 0.839518 +vt 0.981376 0.842098 +vt 0.943069 0.983682 +vt 0.939117 0.970836 +vt 0.978791 0.850789 +vt 0.980732 0.844226 +vt 0.945049 0.990147 +vt 0.947709 0.967544 +vt 0.950962 0.973439 +vt 0.944690 0.961437 +vt 0.979320 0.851006 +vt 0.978640 0.852304 +vt 0.980776 0.848228 +vt 0.980028 0.849660 +vt 0.981586 0.846669 +vt 0.983547 0.842984 +vt 0.953987 0.979197 +vt 0.982515 0.844936 +vt 0.957232 0.984939 +vt 0.955465 0.962407 +vt 0.959791 0.967526 +vt 0.946844 0.951792 +vt 0.951235 0.957014 +vt 0.978878 0.852459 +vt 0.981767 0.848883 +vt 0.980750 0.850141 +vt 0.982868 0.847511 +vt 0.984799 0.840658 +vt 0.985518 0.844261 +vt 0.964002 0.972662 +vt 0.979799 0.851318 +vt 0.984104 0.846007 +vt 0.968023 0.977689 +vt 0.962121 0.955953 +vt 0.967173 0.960114 +vt 0.951551 0.947189 +vt 0.956688 0.951452 +vt 0.979081 0.852658 +vt 0.981372 0.850746 +vt 0.983967 0.848587 +vt 0.989255 0.844209 +vt 0.987175 0.845930 +vt 0.972446 0.964404 +vt 0.980205 0.851720 +vt 0.982604 0.849720 +vt 0.985440 0.847366 +vt 0.977718 0.968664 +vt 0.973152 0.951548 +vt 0.961234 0.945134 +vt 0.979241 0.852893 +vt 0.983274 0.850695 +vt 0.981855 0.851467 +vt 0.986523 0.848933 +vt 0.984820 0.849866 +vt 0.988490 0.847883 +vt 0.979030 0.954662 +vt 0.967303 0.948297 +vt 0.980528 0.852190 +vt 0.984998 0.957471 +vt 0.977271 0.941841 +vt 0.955314 0.941777 +vt 0.964390 0.937905 +vt 0.980749 0.852716 +vt 0.979352 0.853154 +vt 0.982197 0.852262 +vt 0.987278 0.850690 +vt 0.985406 0.851276 +vt 0.990840 0.846649 +vt 0.989380 0.850047 +vt 0.983485 0.943672 +vt 0.970882 0.939787 +vt 0.983739 0.851786 +vt 0.989866 0.945310 +vt 0.979325 0.931427 +vt 0.959269 0.929402 +vt 0.965895 0.930093 +vt 0.979411 0.853432 +vt 0.982377 0.853112 +vt 0.987646 0.852563 +vt 0.985697 0.852766 +vt 0.991900 0.849301 +vt 0.989852 0.852329 +vt 0.985839 0.932119 +vt 0.972515 0.930766 +vt 0.980867 0.853274 +vt 0.983983 0.852946 +vt 0.992346 0.932575 +vt 0.972758 0.921605 +vt 0.979238 0.920953 +vt 0.965994 0.922185 +vt 0.980875 0.853845 +vt 0.979413 0.853715 +vt 0.982386 0.853979 +vt 0.985722 0.854282 +vt 0.992439 0.852077 +vt 0.989886 0.854666 +vt 0.985966 0.920244 +vt 0.983986 0.854130 +vt 0.987634 0.854458 +vt 0.992346 0.919666 +vt 0.977479 0.910701 +vt 0.958068 0.916378 +vt 0.964409 0.914480 +vt 0.979361 0.853995 +vt 0.982226 0.854833 +vt 0.987298 0.856322 +vt 0.985424 0.855768 +vt 0.991969 0.857713 +vt 0.989408 0.856954 +vt 0.983760 0.908726 +vt 0.970944 0.912638 +vt 0.980767 0.854406 +vt 0.983760 0.855288 +vt 0.990191 0.906725 +vt 0.967703 0.904144 +vt 0.973579 0.900930 +vt 0.961608 0.907121 +vt 0.980551 0.854934 +vt 0.979255 0.854257 +vt 0.983318 0.856380 +vt 0.981896 0.855637 +vt 0.984867 0.857179 +vt 0.988535 0.859112 +vt 0.979319 0.897929 +vt 0.986594 0.858095 +vt 0.985030 0.894658 +vt 0.962669 0.896463 +vt 0.967725 0.892175 +vt 0.952046 0.904942 +vt 0.957268 0.900647 +vt 0.979100 0.854495 +vt 0.982670 0.857366 +vt 0.981418 0.856358 +vt 0.984037 0.858454 +vt 0.990846 0.860347 +vt 0.987274 0.861071 +vt 0.972845 0.888023 +vt 0.980241 0.855412 +vt 0.985536 0.859676 +vt 0.977852 0.883989 +vt 0.956215 0.889890 +vt 0.960381 0.884872 +vt 0.947411 0.900307 +vt 0.951699 0.895235 +vt 0.978902 0.854699 +vt 0.980817 0.856980 +vt 0.982975 0.859550 +vt 0.987336 0.864773 +vt 0.985624 0.862719 +vt 0.964680 0.879648 +vt 0.979844 0.855820 +vt 0.981842 0.858202 +vt 0.984194 0.861007 +vt 0.968953 0.874430 +vt 0.951934 0.878950 +vt 0.945417 0.890704 +vt 0.979373 0.856141 +vt 0.978668 0.854858 +vt 0.980876 0.858874 +vt 0.980101 0.857464 +vt 0.982645 0.862088 +vt 0.981709 0.860404 +vt 0.983697 0.864030 +vt 0.955085 0.873149 +vt 0.948633 0.884716 +vt 0.957954 0.867251 +vt 0.942301 0.874833 +vt 0.942053 0.896536 +vt 0.938222 0.887524 +vt 0.978848 0.856363 +vt 0.978407 0.854970 +vt 0.979308 0.857809 +vt 0.980909 0.862847 +vt 0.980313 0.860996 +vt 0.984927 0.866348 +vt 0.981560 0.864924 +vt 0.944255 0.868716 +vt 0.940161 0.881085 +vt 0.979792 0.859344 +vt 0.946065 0.862443 +vt 0.931985 0.872731 +vt 0.929668 0.892545 +vt 0.930449 0.886009 +vt 0.978129 0.855029 +vt 0.978460 0.857993 +vt 0.979057 0.863226 +vt 0.978832 0.861309 +vt 0.982313 0.867406 +vt 0.979310 0.865409 +vt 0.932846 0.866264 +vt 0.931176 0.879294 +vt 0.978292 0.856484 +vt 0.978638 0.859579 +vt 0.933719 0.859936 +vt 0.921583 0.872552 +vt 0.922496 0.885722 +vt 0.977721 0.856494 +vt 0.977846 0.855032 +vt 0.977461 0.859606 +vt 0.977594 0.858000 +vt 0.977178 0.863245 +vt 0.977321 0.861324 +vt 0.979580 0.867953 +vt 0.977004 0.865474 +vt 0.921160 0.865861 +vt 0.921957 0.879170 +vt 0.921051 0.859362 +vt 0.911245 0.874087 +vt 0.916591 0.893575 +vt 0.914715 0.887234 +vt 0.977567 0.854981 +vt 0.976741 0.857849 +vt 0.975317 0.862937 +vt 0.975840 0.861052 +vt 0.974073 0.867545 +vt 0.974727 0.865059 +vt 0.909613 0.867735 +vt 0.912928 0.880697 +vt 0.977160 0.856388 +vt 0.976301 0.859389 +vt 0.908283 0.861316 +vt 0.904347 0.883753 +vt 0.901311 0.877755 +vt 0.907257 0.889911 +vt 0.976631 0.856175 +vt 0.977304 0.854875 +vt 0.975203 0.858957 +vt 0.975935 0.857525 +vt 0.974423 0.860517 +vt 0.971430 0.866587 +vt 0.972563 0.864228 +vt 0.898596 0.871851 +vt 0.973535 0.862264 +vt 0.896051 0.865748 +vt 0.896524 0.888693 +vt 0.892381 0.883386 +vt 0.904975 0.899477 +vt 0.900655 0.894179 +vt 0.977065 0.854720 +vt 0.974208 0.858317 +vt 0.975209 0.857051 +vt 0.973131 0.859704 +vt 0.968979 0.865126 +vt 0.970572 0.863002 +vt 0.888408 0.878007 +vt 0.976150 0.855867 +vt 0.971929 0.861231 +vt 0.884747 0.872494 +vt 0.889750 0.895063 +vt 0.884845 0.890678 +vt 0.900221 0.904055 +vt 0.895115 0.899693 +vt 0.975742 0.855467 +vt 0.976862 0.854522 +vt 0.974581 0.856452 +vt 0.972011 0.858648 +vt 0.966813 0.863171 +vt 0.968853 0.861385 +vt 0.879787 0.886102 +vt 0.973360 0.857492 +vt 0.970559 0.859903 +vt 0.874777 0.881512 +vt 0.878642 0.899218 +vt 0.890453 0.905995 +vt 0.976700 0.854288 +vt 0.972675 0.856524 +vt 0.974090 0.855734 +vt 0.969434 0.858354 +vt 0.971131 0.857380 +vt 0.967466 0.859470 +vt 0.872908 0.895855 +vt 0.884404 0.902685 +vt 0.975415 0.854998 +vt 0.867347 0.892604 +vt 0.874199 0.909012 +vt 0.896411 0.909459 +vt 0.887180 0.913209 +vt 0.975191 0.854472 +vt 0.976588 0.854027 +vt 0.973738 0.854938 +vt 0.968637 0.856597 +vt 0.970515 0.855969 +vt 0.963991 0.858169 +vt 0.966520 0.857299 +vt 0.868018 0.906913 +vt 0.880601 0.911235 +vt 0.972196 0.855433 +vt 0.861836 0.904544 +vt 0.871913 0.919521 +vt 0.892270 0.921874 +vt 0.885618 0.921070 +vt 0.976529 0.853748 +vt 0.973549 0.854084 +vt 0.968235 0.854703 +vt 0.970192 0.854463 +vt 0.963275 0.855326 +vt 0.966000 0.854982 +vt 0.865320 0.918588 +vt 0.878790 0.920288 +vt 0.975068 0.853913 +vt 0.971942 0.854269 +vt 0.858584 0.917614 +vt 0.878578 0.929570 +vt 0.871924 0.930104 +vt 0.885348 0.929060 +vt 0.975058 0.853338 +vt 0.976526 0.853464 +vt 0.971918 0.853074 +vt 0.973541 0.853210 +vt 0.968212 0.852774 +vt 0.970175 0.852925 +vt 0.965908 0.852587 +vt 0.865014 0.930643 +vt 0.858540 0.931134 +vt 0.873611 0.940541 +vt 0.892280 0.928512 +vt 0.886848 0.936862 +vt 0.976577 0.853184 +vt 0.973695 0.852349 +vt 0.968541 0.850865 +vt 0.970457 0.851414 +vt 0.963733 0.849459 +vt 0.966378 0.850231 +vt 0.867256 0.942484 +vt 0.880261 0.938685 +vt 0.975165 0.852774 +vt 0.972147 0.851902 +vt 0.860755 0.944480 +vt 0.883564 0.947337 +vt 0.877612 0.950556 +vt 0.893457 0.935024 +vt 0.889718 0.944358 +vt 0.975380 0.852243 +vt 0.976684 0.852920 +vt 0.972578 0.850789 +vt 0.974024 0.851537 +vt 0.971017 0.849981 +vt 0.967265 0.848017 +vt 0.871822 0.953597 +vt 0.969252 0.849053 +vt 0.866065 0.956870 +vt 0.888742 0.955199 +vt 0.883613 0.959523 +vt 0.899459 0.946639 +vt 0.894166 0.950992 +vt 0.976839 0.852682 +vt 0.973237 0.849790 +vt 0.974506 0.850809 +vt 0.971854 0.848686 +vt 0.964893 0.846750 +vt 0.968554 0.846014 +vt 0.878454 0.963724 +vt 0.975691 0.851761 +vt 0.970328 0.847441 +vt 0.873428 0.967793 +vt 0.977971 0.853589 +vt 0.895966 0.941083 +vt 0.893847 0.915474 +vt 0.910534 0.896052 +vt 0.936024 0.894040 +vt 0.955518 0.910411 +vt 0.957780 0.935741 +vt 0.941356 0.955315 +vt 0.915729 0.957624 +vt 0.057212 0.938803 +vt 0.064416 0.936718 +vt 0.060460 0.941688 +vt 0.056792 0.946268 +vt 0.052834 0.942489 +vt 0.062248 0.934559 +vt 0.068917 0.931031 +vt 0.061268 0.949297 +vt 0.066991 0.938387 +vt 0.064029 0.944172 +vt 0.066315 0.951424 +vt 0.071804 0.932545 +vt 0.069829 0.939531 +vt 0.068072 0.945628 +vt 0.072254 0.946698 +vt 0.071766 0.952417 +vt 0.072830 0.940101 +vt 0.077314 0.952428 +vt 0.075065 0.932822 +vt 0.075882 0.940040 +vt 0.076546 0.946375 +vt 0.080780 0.945655 +vt 0.082647 0.951185 +vt 0.078883 0.939407 +vt 0.084743 0.943837 +vt 0.087629 0.949008 +vt 0.078185 0.931830 +vt 0.081698 0.938239 +vt 0.088436 0.941549 +vt 0.092251 0.946052 +vt 0.084238 0.936507 +vt 0.091425 0.938379 +vt 0.095908 0.942002 +vt 0.080712 0.929742 +vt 0.086398 0.934307 +vt 0.098984 0.937514 +vt 0.088019 0.931679 +vt 0.093863 0.934739 +vt 0.101151 0.932447 +vt 0.081567 0.928315 +vt 0.089296 0.928881 +vt 0.095363 0.930645 +vt 0.096416 0.926416 +vt 0.102144 0.926929 +vt 0.082158 0.926775 +vt 0.089829 0.925839 +vt 0.102198 0.921354 +vt 0.082507 0.923503 +vt 0.089733 0.922743 +vt 0.096107 0.922098 +vt 0.095403 0.917834 +vt 0.100966 0.915951 +vt 0.089068 0.919716 +vt 0.093604 0.913803 +vt 0.098749 0.910947 +vt 0.082105 0.921915 +vt 0.088061 0.916794 +vt 0.091293 0.910052 +vt 0.095733 0.906312 +vt 0.080540 0.919045 +vt 0.086156 0.914360 +vt 0.091750 0.902504 +vt 0.084101 0.912048 +vt 0.088036 0.907099 +vt 0.087219 0.899454 +vt 0.077978 0.917045 +vt 0.081424 0.910537 +vt 0.084396 0.904636 +vt 0.082136 0.897299 +vt 0.076492 0.916365 +vt 0.078557 0.909365 +vt 0.080342 0.903182 +vt 0.076138 0.902099 +vt 0.076685 0.896267 +vt 0.075545 0.908749 +vt 0.071121 0.896315 +vt 0.073219 0.916065 +vt 0.072473 0.908778 +vt 0.071815 0.902401 +vt 0.067548 0.903139 +vt 0.065748 0.897585 +vt 0.069444 0.909419 +vt 0.063557 0.904961 +vt 0.060803 0.899889 +vt 0.070099 0.917092 +vt 0.066611 0.910620 +vt 0.056179 0.902822 +vt 0.064088 0.912405 +vt 0.059915 0.907371 +vt 0.052483 0.906883 +vt 0.067592 0.919215 +vt 0.061953 0.914621 +vt 0.057037 0.910627 +vt 0.049498 0.911423 +vt 0.060316 0.917238 +vt 0.054576 0.914244 +vt 0.047369 0.916533 +vt 0.066111 0.922153 +vt 0.059173 0.920093 +vt 0.053136 0.918317 +vt 0.052098 0.922535 +vt 0.046422 0.922010 +vt 0.058616 0.923118 +vt 0.046462 0.927573 +vt 0.065834 0.925421 +vt 0.058695 0.926185 +vt 0.052430 0.926843 +vt 0.053159 0.931097 +vt 0.047673 0.932945 +vt 0.059326 0.929196 +vt 0.054974 0.935078 +vt 0.049845 0.937897 +vt 0.066815 0.928536 +vt 0.060506 0.932018 +vt 0.067814 0.929831 +vt 0.074142 0.924466 +vt 0.070330 0.931857 +vt 0.073428 0.932757 +vt 0.076648 0.932412 +vt 0.079512 0.930858 +vt 0.082410 0.925145 +vt 0.081416 0.920430 +vt 0.079307 0.917984 +vt 0.074865 0.916130 +vt 0.071630 0.916494 +vt 0.068783 0.918085 +vt 0.066775 0.920652 +vt 0.065897 0.923774 +vt 0.066260 0.927001 +vt 0.027180 0.846482 +vt 0.029743 0.846354 +vt 0.028244 0.847578 +vt 0.031538 0.841251 +vt 0.031480 0.844956 +vt 0.029828 0.843308 +vt 0.027873 0.963038 +vt 0.040438 0.966189 +vt 0.036215 0.971273 +vt 0.038214 0.954650 +vt 0.049014 0.955773 +vt 0.044769 0.961115 +vt 0.024052 0.850215 +vt 0.025626 0.849676 +vt 0.024449 0.850624 +vt 0.026880 0.848668 +vt 0.026049 0.847832 +vt 0.028398 0.845023 +vt 0.022589 0.967292 +vt 0.032163 0.976257 +vt 0.033146 0.958778 +vt 0.048745 0.945921 +vt 0.043629 0.950167 +vt 0.023110 0.851337 +vt 0.023308 0.851540 +vt 0.025024 0.849055 +vt 0.049202 0.972078 +vt 0.053364 0.950584 +vt 0.055497 0.960163 +vt 0.024759 0.851102 +vt 0.023463 0.851778 +vt 0.026104 0.850398 +vt 0.030802 0.847935 +vt 0.029072 0.848852 +vt 0.035056 0.845676 +vt 0.032744 0.846915 +vt 0.046148 0.977789 +vt 0.052462 0.966234 +vt 0.027532 0.849654 +vt 0.042831 0.983485 +vt 0.058996 0.975966 +vt 0.064789 0.956603 +vt 0.062860 0.962984 +vt 0.023569 0.852041 +vt 0.026432 0.851202 +vt 0.031508 0.849709 +vt 0.029634 0.850263 +vt 0.036185 0.848313 +vt 0.033620 0.849075 +vt 0.056991 0.982274 +vt 0.060989 0.969547 +vt 0.024974 0.851629 +vt 0.027963 0.850752 +vt 0.054896 0.988707 +vt 0.069226 0.977870 +vt 0.070581 0.964558 +vt 0.025083 0.852190 +vt 0.023621 0.852320 +vt 0.028192 0.851909 +vt 0.026589 0.852055 +vt 0.029932 0.851753 +vt 0.034100 0.851367 +vt 0.068499 0.984579 +vt 0.070028 0.971270 +vt 0.031845 0.851576 +vt 0.067859 0.990937 +vt 0.079728 0.977947 +vt 0.077848 0.957818 +vt 0.078551 0.964454 +vt 0.023619 0.852604 +vt 0.026588 0.852922 +vt 0.031858 0.853474 +vt 0.029907 0.853270 +vt 0.036657 0.853961 +vt 0.034066 0.853707 +vt 0.080377 0.984483 +vt 0.079146 0.971097 +vt 0.025075 0.852762 +vt 0.028190 0.853092 +vt 0.080823 0.990998 +vt 0.088168 0.969538 +vt 0.090207 0.975938 +vt 0.086355 0.962980 +vt 0.024957 0.853319 +vt 0.023560 0.852881 +vt 0.027946 0.854250 +vt 0.026407 0.853773 +vt 0.029615 0.854761 +vt 0.036115 0.856741 +vt 0.033593 0.855992 +vt 0.091997 0.982159 +vt 0.031489 0.855348 +vt 0.093625 0.988551 +vt 0.096698 0.965930 +vt 0.099930 0.971821 +vt 0.090202 0.953903 +vt 0.093527 0.959843 +vt 0.023449 0.853143 +vt 0.027482 0.855342 +vt 0.026064 0.854569 +vt 0.029028 0.856171 +vt 0.035052 0.859395 +vt 0.032700 0.858158 +vt 0.103046 0.977714 +vt 0.024736 0.853846 +vt 0.030733 0.857106 +vt 0.105849 0.983691 +vt 0.104377 0.960793 +vt 0.108533 0.965857 +vt 0.095638 0.950195 +vt 0.099896 0.955350 +vt 0.024413 0.854316 +vt 0.023289 0.853378 +vt 0.025581 0.855290 +vt 0.028175 0.857451 +vt 0.033463 0.861835 +vt 0.031383 0.860111 +vt 0.112842 0.971129 +vt 0.026812 0.856316 +vt 0.029649 0.858673 +vt 0.117098 0.976401 +vt 0.116005 0.958439 +vt 0.105479 0.949915 +vt 0.024007 0.854718 +vt 0.023086 0.853576 +vt 0.025975 0.857153 +vt 0.024958 0.855896 +vt 0.028311 0.860031 +vt 0.027075 0.858526 +vt 0.029724 0.861780 +vt 0.121144 0.962654 +vt 0.110875 0.954122 +vt 0.126159 0.966682 +vt 0.121911 0.949590 +vt 0.100238 0.945498 +vt 0.109876 0.943332 +vt 0.023528 0.855030 +vt 0.022848 0.853731 +vt 0.024236 0.856376 +vt 0.026721 0.861101 +vt 0.025793 0.859368 +vt 0.029001 0.865382 +vt 0.027752 0.863055 +vt 0.127676 0.952626 +vt 0.116003 0.946348 +vt 0.024984 0.857808 +vt 0.133435 0.955869 +vt 0.125860 0.939715 +vt 0.106314 0.933951 +vt 0.112686 0.935902 +vt 0.022585 0.853839 +vt 0.023430 0.856707 +vt 0.024937 0.861810 +vt 0.024372 0.859939 +vt 0.026350 0.866519 +vt 0.025579 0.863938 +vt 0.132177 0.941679 +vt 0.119272 0.937783 +vt 0.022999 0.855247 +vt 0.023884 0.858257 +vt 0.138657 0.943663 +vt 0.121122 0.928734 +vt 0.127653 0.929356 +vt 0.114328 0.928123 +vt 0.022437 0.855356 +vt 0.022305 0.853890 +vt 0.022575 0.856873 +vt 0.023062 0.862160 +vt 0.022871 0.860221 +vt 0.023275 0.864426 +vt 0.134419 0.930035 +vt 0.022718 0.858482 +vt 0.140835 0.930585 +vt 0.127739 0.918780 +vt 0.107614 0.920817 +vt 0.114257 0.920138 +vt 0.022021 0.853889 +vt 0.021702 0.856871 +vt 0.021149 0.862172 +vt 0.021350 0.860202 +vt 0.020667 0.867007 +vt 0.020917 0.864397 +vt 0.134278 0.918067 +vt 0.120903 0.919479 +vt 0.021863 0.855349 +vt 0.021530 0.858479 +vt 0.140835 0.917556 +vt 0.119309 0.910340 +vt 0.125699 0.908226 +vt 0.112774 0.912220 +vt 0.021303 0.855230 +vt 0.021743 0.853830 +vt 0.020365 0.858234 +vt 0.020846 0.856688 +vt 0.019851 0.859912 +vt 0.017862 0.866467 +vt 0.018614 0.863923 +vt 0.131936 0.906379 +vt 0.019260 0.861801 +vt 0.138336 0.904707 +vt 0.115645 0.901724 +vt 0.121520 0.898430 +vt 0.103634 0.908324 +vt 0.109576 0.904936 +vt 0.021481 0.853719 +vt 0.019267 0.857766 +vt 0.020046 0.856341 +vt 0.018432 0.859321 +vt 0.015183 0.865400 +vt 0.016428 0.863024 +vt 0.127431 0.895286 +vt 0.020775 0.855008 +vt 0.017489 0.861039 +vt 0.133432 0.892437 +vt 0.110462 0.893965 +vt 0.115522 0.889765 +vt 0.099874 0.902849 +vt 0.105022 0.898523 +vt 0.020303 0.854683 +vt 0.021246 0.853558 +vt 0.019322 0.855854 +vt 0.017144 0.858461 +vt 0.012716 0.863800 +vt 0.014456 0.861696 +vt 0.120807 0.885419 +vt 0.018288 0.857090 +vt 0.015909 0.859945 +vt 0.126094 0.881135 +vt 0.108072 0.882252 +vt 0.099555 0.892839 +vt 0.021047 0.853355 +vt 0.017448 0.856246 +vt 0.018715 0.855227 +vt 0.014540 0.858593 +vt 0.016063 0.857350 +vt 0.012770 0.860019 +vt 0.112282 0.877084 +vt 0.103757 0.887397 +vt 0.019900 0.854275 +vt 0.116315 0.872014 +vt 0.099157 0.876244 +vt 0.095199 0.898112 +vt 0.092953 0.888356 +vt 0.019589 0.853793 +vt 0.020892 0.853116 +vt 0.018232 0.854499 +vt 0.013465 0.856983 +vt 0.015221 0.856055 +vt 0.009117 0.859287 +vt 0.011483 0.858020 +vt 0.102173 0.870442 +vt 0.095946 0.882186 +vt 0.016793 0.855247 +vt 0.105407 0.864661 +vt 0.089155 0.872257 +vt 0.083677 0.892093 +vt 0.085489 0.885488 +vt 0.020785 0.852852 +vt 0.017901 0.853688 +vt 0.012759 0.855176 +vt 0.014657 0.854620 +vt 0.007956 0.856588 +vt 0.010596 0.855812 +vt 0.091085 0.865894 +vt 0.087314 0.878891 +vt 0.019373 0.853262 +vt 0.016349 0.854140 +vt 0.093039 0.859361 +vt 0.078243 0.877204 +vt 0.078733 0.870532 +vt 0.077153 0.890914 +vt 0.077720 0.883973 +vt 0.019266 0.852698 +vt 0.020734 0.852572 +vt 0.016126 0.852966 +vt 0.017745 0.852826 +vt 0.012419 0.853273 +vt 0.014395 0.853110 +vt 0.010120 0.853461 +vt 0.079263 0.863626 +vt 0.079726 0.857150 +vt 0.068207 0.870503 +vt 0.070534 0.890891 +vt 0.069762 0.884234 +vt 0.020737 0.852287 +vt 0.016149 0.851772 +vt 0.017761 0.851952 +vt 0.014404 0.851577 +vt 0.007481 0.850726 +vt 0.010208 0.851065 +vt 0.067247 0.863923 +vt 0.069013 0.877381 +vt 0.019277 0.852124 +vt 0.012443 0.851341 +vt 0.066247 0.857190 +vt 0.059988 0.879162 +vt 0.057726 0.872776 +vt 0.061940 0.885730 +vt 0.019399 0.851565 +vt 0.020796 0.852010 +vt 0.016404 0.850606 +vt 0.017948 0.851099 +vt 0.014724 0.850072 +vt 0.010725 0.848746 +vt 0.055606 0.866595 +vt 0.012844 0.849446 +vt 0.053223 0.860424 +vt 0.051467 0.882941 +vt 0.048005 0.877181 +vt 0.058161 0.894961 +vt 0.054769 0.888990 +vt 0.020908 0.851748 +vt 0.016882 0.849515 +vt 0.018299 0.850304 +vt 0.015339 0.848660 +vt 0.008192 0.847879 +vt 0.011670 0.846573 +vt 0.044624 0.871452 +vt 0.019623 0.851039 +vt 0.013640 0.847687 +vt 0.041333 0.865901 +vt 0.043826 0.888251 +vt 0.039455 0.883345 +vt 0.052767 0.898728 +vt 0.048441 0.893607 +vt 0.019950 0.850570 +vt 0.021069 0.851514 +vt 0.018789 0.849586 +vt 0.016217 0.847391 +vt 0.011013 0.842870 +vt 0.013056 0.844655 +vt 0.034889 0.878286 +vt 0.017567 0.848546 +vt 0.014764 0.846136 +vt 0.030298 0.873276 +vt 0.032189 0.890830 +vt 0.042918 0.899096 +vt 0.021273 0.851316 +vt 0.018414 0.847720 +vt 0.019417 0.848987 +vt 0.016133 0.844807 +vt 0.017337 0.846334 +vt 0.014775 0.843037 +vt 0.026830 0.886838 +vt 0.037473 0.894971 +vt 0.020358 0.850170 +vt 0.021321 0.883155 +vt 0.026550 0.899707 +vt 0.048138 0.903424 +vt 0.038616 0.905704 +vt 0.020838 0.849861 +vt 0.021511 0.851161 +vt 0.020142 0.848513 +vt 0.017739 0.843774 +vt 0.018628 0.845520 +vt 0.013181 0.840912 +vt 0.016766 0.841809 +vt 0.020676 0.896936 +vt 0.032516 0.902765 +vt 0.019411 0.847079 +vt 0.014599 0.894342 +vt 0.022862 0.909613 +vt 0.042214 0.915024 +vt 0.035854 0.913164 +vt 0.021774 0.851055 +vt 0.020947 0.848191 +vt 0.019520 0.843099 +vt 0.020045 0.844983 +vt 0.015633 0.839451 +vt 0.018930 0.840978 +vt 0.016530 0.907880 +vt 0.029460 0.911391 +vt 0.021367 0.849648 +vt 0.020509 0.846645 +vt 0.010159 0.906416 +vt 0.027740 0.920329 +vt 0.021155 0.919908 +vt 0.034395 0.920993 +vt 0.021928 0.849542 +vt 0.022053 0.851004 +vt 0.021799 0.848031 +vt 0.021526 0.844711 +vt 0.018275 0.838493 +vt 0.021205 0.840562 +vt 0.014498 0.919393 +vt 0.021669 0.846427 +vt 0.021381 0.842790 +vt 0.008135 0.918962 +vt 0.027802 0.929537 +vt 0.021185 0.930314 +vt 0.041125 0.928158 +vt 0.034557 0.928894 +vt 0.022337 0.851007 +vt 0.022667 0.848040 +vt 0.023034 0.844726 +vt 0.023778 0.838082 +vt 0.023510 0.840625 +vt 0.014676 0.931069 +vt 0.022499 0.849552 +vt 0.022848 0.846449 +vt 0.023259 0.842808 +vt 0.008162 0.931635 +vt 0.029533 0.938533 +vt 0.023194 0.940613 +vt 0.036012 0.936678 +vt 0.023056 0.849672 +vt 0.022614 0.851066 +vt 0.023999 0.846696 +vt 0.023515 0.848225 +vt 0.024516 0.845035 +vt 0.026509 0.838625 +vt 0.025760 0.841108 +vt 0.016991 0.942492 +vt 0.025111 0.843185 +vt 0.010609 0.944182 +vt 0.033117 0.947005 +vt 0.027275 0.950255 +vt 0.045025 0.940497 +vt 0.039148 0.943843 +vt 0.022875 0.851177 +vt 0.025083 0.847162 +vt 0.024308 0.848571 +vt 0.025914 0.845628 +vt 0.029125 0.839679 +vt 0.027898 0.841999 +vt 0.021387 0.953362 +vt 0.023580 0.849894 +vt 0.026848 0.843943 +vt 0.015406 0.956203 +vt 0.022179 0.852447 +vt 0.042583 0.934489 +vt 0.044732 0.908981 +vt 0.064153 0.892427 +vt 0.089692 0.894632 +vt 0.106146 0.914403 +vt 0.103741 0.939990 +vt 0.084203 0.956330 +vt 0.058833 0.954045 +vt 0.986328 0.333984 +vt 0.483537 0.409555 +vt 0.986328 0.009766 +vt 0.107422 0.009766 +vt 0.928788 0.958957 +vt 0.979307 0.839033 +vt 0.987145 0.842241 +vt 0.959088 0.922854 +vt 0.992439 0.854891 +vt 0.989281 0.862679 +vt 0.923091 0.892611 +vt 0.976815 0.867973 +vt 0.965194 0.860763 +vt 0.963256 0.852378 +vt 0.966503 0.844363 +vt 0.033487 0.843344 +vt 0.071271 0.957636 +vt 0.036657 0.851141 +vt 0.031349 0.863802 +vt 0.107376 0.927434 +vt 0.023507 0.867001 +vt 0.010724 0.861667 +vt 0.007470 0.853676 +vt 0.009395 0.845280 +vt 0.041240 0.921575 +vt 0.021015 0.838064 +s off +f 4/1 2/2 1/3 +f 8/4 6/5 5/6 +f 13/7 15/8 14/9 +f 17/10 19/11 18/12 +f 32/13 30/14 29/15 +f 36/16 34/17 33/18 +f 37/19 39/20 38/21 +f 21/22 23/23 20/24 +f 25/25 27/26 24/27 +f 41/28 43/29 42/30 +f 48/31 46/32 45/33 +f 52/34 50/35 49/36 +f 53/37 55/38 54/39 +f 60/40 58/41 57/42 +f 29/15 62/43 61/44 +f 66/45 64/46 63/47 +f 67/48 6/5 68/49 +f 72/50 70/51 69/52 +f 76/53 74/54 73/55 +f 77/56 79/57 78/58 +f 81/59 83/60 82/61 +f 59/62 86/63 85/64 +f 88/65 90/66 89/67 +f 96/68 94/69 93/70 +f 100/71 98/72 97/73 +f 101/74 103/75 102/76 +f 106/77 34/17 87/78 +f 108/79 115/80 107/81 +f 117/82 36/16 118/83 +f 124/84 126/85 125/86 +f 131/87 129/88 128/89 +f 132/90 134/91 133/92 +f 142/93 140/94 139/95 +f 148/96 60/40 147/97 +f 144/98 127/99 124/84 +f 149/100 151/101 150/102 +f 156/103 154/104 153/105 +f 159/106 158/107 157/108 +f 116/109 164/110 105/111 +f 171/112 169/113 168/114 +f 174/115 173/116 172/117 +f 179/118 181/119 180/120 +f 186/121 188/122 187/123 +f 194/124 196/125 195/126 +f 199/127 139/95 198/128 +f 206/129 208/130 207/131 +f 213/132 211/133 210/134 +f 202/135 214/136 201/137 +f 96/68 216/138 54/39 +f 92/139 43/29 44/140 +f 97/73 246/141 100/71 +f 250/142 229/143 228/144 +f 252/145 58/41 253/146 +f 258/147 256/148 255/149 +f 260/150 229/143 259/151 +f 110/152 112/153 113/154 +f 114/155 263/156 109/157 +f 265/158 267/159 266/160 +f 56/161 270/162 269/163 +f 274/164 249/165 250/142 +f 185/166 63/47 64/46 +f 280/167 278/168 276/169 +f 285/170 283/171 157/108 +f 288/172 211/133 212/173 +f 141/174 92/139 46/32 +f 289/175 175/176 176/177 +f 148/96 105/111 106/77 +f 312/178 170/179 171/112 +f 238/180 42/30 43/29 +f 318/181 160/182 317/183 +f 310/184 308/185 307/186 +f 241/187 325/188 324/189 +f 160/182 240/190 317/183 +f 47/191 307/186 308/185 +f 108/79 315/192 164/110 +f 345/193 86/63 119/194 +f 348/195 350/196 349/197 +f 356/198 358/199 357/200 +f 355/201 353/202 352/203 +f 363/204 32/13 362/205 +f 149/100 150/102 176/177 +f 364/206 253/146 365/207 +f 366/208 368/209 367/210 +f 183/211 371/212 63/47 +f 163/213 161/214 160/182 +f 209/215 101/74 208/130 +f 378/216 165/217 166/218 +f 379/219 381/220 380/221 +f 383/222 3/223 4/1 +f 284/224 386/225 385/226 +f 387/227 389/228 388/229 +f 391/230 229/143 249/165 +f 73/55 397/231 396/232 +f 405/233 407/234 406/235 +f 203/236 411/237 410/238 +f 414/239 413/240 412/241 +f 1/3 357/200 4/1 +f 277/242 131/87 275/243 +f 254/244 57/42 58/41 +f 358/199 415/245 351/246 +f 417/247 386/225 408/248 +f 401/249 192/250 420/251 +f 419/252 127/99 143/253 +f 421/254 182/255 422/256 +f 103/75 302/257 409/258 +f 428/259 368/209 174/115 +f 380/221 359/260 356/198 +f 62/43 266/160 323/261 +f 140/94 431/262 430/263 +f 357/200 351/246 348/195 +f 356/198 379/219 380/221 +f 415/245 403/264 434/265 +f 437/266 436/267 435/268 +f 438/269 440/270 439/271 +f 188/122 450/272 449/273 +f 452/274 454/275 453/276 +f 265/158 181/119 421/254 +f 224/277 64/46 225/278 +f 237/279 213/132 427/280 +f 275/243 128/89 458/281 +f 281/282 460/283 459/284 +f 50/35 79/57 461/285 +f 73/55 282/286 281/282 +f 468/287 466/288 465/289 +f 390/290 352/203 359/260 +f 482/291 484/292 483/293 +f 489/294 487/295 486/296 +f 493/297 495/298 494/299 +f 472/300 470/301 469/302 +f 502/303 500/304 499/305 +f 220/306 222/307 223/308 +f 512/309 143/253 66/45 +f 518/310 511/311 80/312 +f 513/313 230/314 286/315 +f 205/316 31/317 244/318 +f 189/319 451/320 188/122 +f 163/213 532/321 8/4 +f 533/322 225/278 251/323 +f 446/324 447/325 412/241 +f 335/326 540/327 334/328 +f 544/329 546/330 545/331 +f 329/332 291/333 292/334 +f 199/127 262/335 238/180 +f 385/226 416/336 550/337 +f 532/321 191/338 247/339 +f 363/204 118/83 243/340 +f 236/341 237/279 250/142 +f 578/342 580/343 579/344 +f 573/345 571/346 570/347 +f 561/348 562/349 272/350 +f 241/187 259/151 240/190 +f 590/351 592/352 591/353 +f 545/331 595/354 594/355 +f 503/356 155/357 504/358 +f 606/359 624/360 623/361 +f 478/362 480/363 479/364 +f 646/365 648/366 647/367 +f 590/351 689/368 684/369 +f 564/370 651/371 650/372 +f 728/373 184/374 727/375 +f 714/376 716/377 715/378 +f 692/379 575/380 693/381 +f 541/382 581/383 578/342 +f 346/384 248/385 400/386 +f 161/214 230/314 160/182 +f 128/89 94/69 95/387 +f 698/388 704/389 703/390 +f 574/391 576/392 575/380 +f 290/393 401/249 328/394 +f 630/395 725/396 726/397 +f 733/398 735/399 734/400 +f 755/401 496/402 493/297 +f 759/403 481/404 760/405 +f 757/406 762/407 761/408 +f 764/409 466/288 467/410 +f 768/411 3808/412 3809/413 +f 759/403 488/414 489/294 +f 492/415 475/416 771/417 +f 474/418 769/419 473/420 +f 773/421 465/289 772/422 +f 771/417 486/296 483/293 +f 484/292 775/423 491/424 +f 774/425 489/294 486/296 +f 495/298 764/409 756/426 +f 495/298 761/408 494/299 +f 137/427 784/428 136/429 +f 325/188 797/430 547/431 +f 165/217 51/432 52/34 +f 369/433 29/15 370/434 +f 730/435 568/436 569/437 +f 90/66 301/438 89/67 +f 243/340 516/439 242/440 +f 800/441 806/442 805/443 +f 817/444 808/445 816/446 +f 816/446 823/447 822/448 +f 306/449 447/325 807/450 +f 815/451 809/452 808/445 +f 829/453 828/454 825/455 +f 726/397 501/456 502/303 +f 274/164 424/457 423/458 +f 879/459 881/460 880/461 +f 884/462 879/459 885/463 +f 219/464 217/465 211/133 +f 906/466 904/467 897/468 +f 455/469 205/316 542/470 +f 898/471 830/472 829/453 +f 903/473 880/461 881/460 +f 906/466 897/468 898/471 +f 377/474 519/475 165/217 +f 923/476 948/477 946/478 +f 946/478 951/479 950/480 +f 3519/481 739/482 3517/483 +f 900/484 886/485 880/461 +f 225/278 65/486 144/98 +f 182/255 442/487 422/256 +f 809/452 947/488 824/489 +f 954/490 962/491 961/492 +f 927/493 965/494 966/495 +f 885/463 880/461 886/485 +f 925/496 966/495 967/497 +f 343/498 341/499 340/500 +f 965/494 971/501 968/502 +f 968/502 970/503 969/504 +f 972/505 969/504 973/506 +f 963/507 981/508 978/509 +f 336/510 338/511 339/512 +f 992/513 827/514 828/454 +f 810/515 996/516 959/517 +f 1007/518 1002/519 1008/520 +f 1012/521 827/514 1009/522 +f 1011/523 1007/518 1008/520 +f 294/524 292/334 291/333 +f 907/525 920/526 919/527 +f 1017/528 1019/529 1018/530 +f 1020/531 1022/532 1021/533 +f 1035/534 1020/531 1044/535 +f 1061/536 1047/537 1046/538 +f 1063/539 1046/538 1062/540 +f 1044/535 1021/533 1045/541 +f 1102/542 1097/543 1109/544 +f 1116/545 1118/546 1117/547 +f 1016/548 1014/549 1013/550 +f 881/460 904/467 903/473 +f 816/446 1127/551 817/444 +f 1094/552 949/553 1097/543 +f 514/554 167/555 515/556 +f 146/557 81/59 82/61 +f 1133/558 1130/559 1129/560 +f 1102/542 1126/561 822/448 +f 822/448 1116/545 816/446 +f 1145/562 1148/563 1136/564 +f 1076/565 1151/566 1069/567 +f 1148/563 1086/568 1087/569 +f 1109/544 1114/570 1136/564 +f 1154/571 1156/572 1155/573 +f 1153/574 967/497 1156/572 +f 3464/575 463/576 3714/577 +f 1144/578 1152/579 1151/566 +f 1156/572 1163/580 1162/581 +f 1164/582 1169/583 1165/584 +f 1145/562 1176/585 1147/586 +f 1114/570 1154/571 1171/587 +f 1183/588 1164/582 1177/589 +f 1184/590 1177/589 1185/591 +f 1069/567 1062/540 1064/592 +f 1023/593 1194/594 1022/532 +f 1162/581 1170/595 1169/583 +f 1195/596 1185/591 1191/597 +f 1183/588 1176/585 1155/573 +f 1201/598 1152/579 1196/599 +f 1061/536 1201/598 1202/600 +f 3825/601 843/602 3836/603 +f 1203/604 1195/596 1034/605 +f 1202/600 1184/590 1195/596 +f 1218/606 1203/604 1217/607 +f 1060/608 1202/600 1203/604 +f 1225/609 1217/607 1219/610 +f 671/611 694/612 670/613 +f 1219/610 1024/614 1035/534 +f 1217/607 1034/605 1024/614 +f 1226/615 1015/616 1227/617 +f 1243/618 1234/619 1233/620 +f 1231/621 1229/622 1228/623 +f 847/624 1233/620 843/602 +f 843/602 1244/625 878/626 +f 1233/620 1245/627 1244/625 +f 1923/628 233/629 216/138 +f 2100/630 499/305 500/304 +f 3914/631 2135/632 3900/633 +f 61/44 323/261 505/634 +f 210/134 217/465 457/635 +f 1299/636 1309/637 1300/638 +f 3863/639 1231/621 3862/640 +f 1066/641 1321/642 1322/643 +f 1322/643 1228/623 1229/622 +f 1060/608 1321/642 1047/537 +f 1325/644 1323/645 1310/646 +f 1244/625 1332/647 1330/648 +f 1290/649 1242/650 1288/651 +f 1337/652 1339/653 1338/654 +f 1330/648 1340/655 1339/653 +f 1341/656 1245/627 1294/657 +f 1342/658 1332/647 1341/656 +f 1343/659 1341/656 1344/660 +f 1344/660 1294/657 1293/661 +f 3857/662 1345/663 1346/664 +f 1347/665 1342/658 1343/659 +f 3856/666 1295/667 1345/663 +f 1349/668 1348/669 1347/665 +f 3861/670 1347/665 3853/671 +f 3853/671 1343/659 3857/662 +f 906/466 829/453 1353/672 +f 905/673 1353/672 1354/674 +f 865/675 1355/676 1357/677 +f 1355/676 829/453 825/455 +f 1357/677 825/455 826/678 +f 1356/679 1353/672 1355/676 +f 3834/680 1338/654 3833/681 +f 866/682 1356/679 865/675 +f 1358/683 1354/674 1356/679 +f 3828/684 1337/652 3834/680 +f 838/685 1012/521 841/686 +f 1363/687 1338/654 1339/653 +f 1368/688 1339/653 1340/655 +f 1348/669 1340/655 1342/658 +f 1371/689 3824/690 3829/691 +f 524/692 526/693 523/694 +f 528/695 530/696 527/697 +f 1372/698 1369/699 1371/689 +f 535/700 537/701 538/702 +f 1367/703 1348/669 1350/704 +f 1370/705 1359/706 1369/699 +f 1369/699 3833/681 3824/690 +f 1373/707 903/473 904/467 +f 555/708 557/709 558/710 +f 1374/711 910/712 1373/707 +f 1373/707 905/673 1375/713 +f 1374/711 1375/713 1376/714 +f 1375/713 1354/674 1370/705 +f 1378/715 908/716 1377/717 +f 376/718 651/371 1387/719 +f 1388/720 973/506 969/504 +f 586/721 588/722 585/723 +f 1390/724 1388/720 1391/725 +f 1391/725 969/504 970/503 +f 597/726 599/727 596/728 +f 600/729 602/730 603/731 +f 1392/732 970/503 979/733 +f 608/734 610/735 607/736 +f 612/737 523/694 611/738 +f 614/739 616/740 613/741 +f 617/742 25/25 24/27 +f 620/743 622/744 619/745 +f 1397/746 1391/725 1392/732 +f 1398/747 1392/732 1399/748 +f 1400/749 920/526 1378/715 +f 1402/750 1165/584 1169/583 +f 636/751 638/752 635/753 +f 930/754 1402/750 929/755 +f 641/756 643/757 644/758 +f 929/755 1169/583 1170/595 +f 1404/759 1403/760 1402/750 +f 1411/761 1365/762 1406/763 +f 1412/764 1415/765 1414/766 +f 652/767 654/768 655/769 +f 609/770 657/771 658/772 +f 659/773 661/774 662/775 +f 558/710 586/721 585/723 +f 665/776 667/777 664/778 +f 1413/779 1406/763 1412/764 +f 1418/780 3810/781 3813/782 +f 1417/783 1371/689 1416/784 +f 1416/784 3829/691 3810/781 +f 1419/785 1416/784 1418/780 +f 1386/786 1374/711 1420/787 +f 677/788 679/789 676/790 +f 1385/791 1420/787 1421/792 +f 1420/787 1376/714 1422/793 +f 685/794 687/795 688/796 +f 1421/792 1422/793 1423/797 +f 528/695 691/798 658/772 +f 1422/793 1372/698 1417/783 +f 1018/530 747/799 1425/800 +f 699/801 701/802 702/803 +f 1424/804 579/344 1017/528 +f 3663/805 1767/806 3660/807 +f 700/808 711/809 712/810 +f 1093/811 3754/812 1571/813 +f 717/814 530/696 719/815 +f 721/816 723/817 720/818 +f 704/389 3543/819 3554/820 +f 19/11 1426/821 1430/822 +f 1429/823 1427/824 1426/821 +f 1658/825 3542/826 3544/827 +f 1439/828 1434/829 1438/830 +f 1437/831 1435/832 1434/829 +f 1426/821 1433/833 1432/834 +f 1442/835 1432/834 1440/836 +f 742/837 744/838 745/839 +f 1432/834 1441/840 1440/836 +f 751/841 753/842 750/843 +f 1438/830 1403/760 1405/844 +f 1446/845 1444/846 1443/847 +f 1447/848 1446/845 1443/847 +f 3860/849 1443/847 3848/850 +f 1415/765 1445/851 1446/845 +f 1414/766 1446/845 1448/852 +f 2173/853 469/302 470/301 +f 777/854 779/855 686/856 +f 780/857 782/858 783/859 +f 1676/860 1806/861 1675/862 +f 3859/863 1349/668 3861/670 +f 789/864 791/865 788/866 +f 745/839 793/867 742/837 +f 652/767 795/868 653/869 +f 1455/870 1457/871 1456/872 +f 1454/873 1350/704 1452/874 +f 610/735 802/875 801/876 +f 3831/877 698/388 703/390 +f 1352/878 1045/541 1351/879 +f 1015/616 1459/880 1460/881 +f 811/882 813/883 814/884 +f 2118/885 3817/886 2117/887 +f 819/888 821/889 818/890 +f 1466/891 1194/594 1190/892 +f 1467/893 1465/894 1466/891 +f 1182/895 1468/896 1467/893 +f 832/897 833/898 831/899 +f 835/900 837/901 834/902 +f 1467/893 1190/892 1182/895 +f 1469/903 1444/846 1445/851 +f 840/904 712/810 690/905 +f 1453/906 1469/903 1454/873 +f 599/727 845/907 846/908 +f 1226/615 3849/909 3855/910 +f 851/911 853/912 850/913 +f 855/914 856/915 854/916 +f 858/917 860/918 857/919 +f 861/920 863/921 864/922 +f 1014/549 3855/910 3858/923 +f 867/924 868/925 869/926 +f 870/927 872/928 873/929 +f 874/930 876/931 877/932 +f 1310/646 1471/933 1309/637 +f 1476/934 1427/824 1475/935 +f 1461/936 1475/935 1460/881 +f 887/937 889/938 890/939 +f 788/866 892/940 891/941 +f 894/942 896/943 893/944 +f 1460/881 1227/617 1015/616 +f 901/945 524/692 612/737 +f 1465/894 1477/946 1464/947 +f 1865/948 3500/949 1864/950 +f 911/951 913/952 914/953 +f 916/954 918/955 915/956 +f 1850/957 3507/958 1851/959 +f 921/960 895/961 894/942 +f 1479/962 1436/963 1478/964 +f 926/965 667/777 601/966 +f 1481/967 563/968 1480/969 +f 269/163 84/970 1482/971 +f 931/972 933/973 916/954 +f 934/974 614/739 869/926 +f 936/975 688/796 937/976 +f 938/977 662/775 939/978 +f 1484/979 122/980 123/981 +f 941/982 835/900 834/902 +f 320/983 509/984 510/985 +f 1849/986 3505/987 1850/957 +f 84/970 1484/979 83/60 +f 1487/988 1488/989 539/990 +f 1757/991 3502/992 3509/993 +f 365/207 1491/994 364/206 +f 461/285 80/312 511/311 +f 117/82 362/205 1496/995 +f 1494/996 541/382 1495/997 +f 926/965 957/998 958/999 +f 1477/946 1433/833 1476/934 +f 1709/1000 1585/1001 1700/1002 +f 119/194 1496/995 345/193 +f 271/1003 504/358 156/103 +f 511/311 189/319 186/121 +f 345/193 1492/1004 365/207 +f 255/149 344/1005 254/244 +f 57/42 299/1006 132/90 +f 1501/1007 268/1008 1500/1009 +f 267/159 1499/1010 145/1011 +f 974/1012 976/1013 977/1014 +f 1678/1015 1693/1016 1676/860 +f 3846/1017 1807/1018 3840/1019 +f 557/709 663/1020 558/710 +f 983/1021 984/1022 985/1023 +f 1443/847 3849/909 3848/850 +f 421/254 268/1008 265/158 +f 989/1024 991/1025 988/1026 +f 1490/1027 418/1028 1504/1029 +f 279/1030 309/1031 310/184 +f 997/1032 676/790 679/789 +f 998/1033 999/1034 875/1035 +f 1504/1029 38/21 1505/1036 +f 1506/1037 1507/1038 1486/1039 +f 360/1040 1508/1041 1506/1037 +f 1506/1037 207/131 360/1040 +f 939/978 1006/1042 938/977 +f 270/162 1483/1043 269/163 +f 66/45 546/330 512/309 +f 1010/1044 793/867 935/1045 +f 367/210 437/266 1509/1046 +f 244/318 363/204 243/340 +f 1670/1047 3534/1048 1669/1049 +f 209/215 1510/1050 226/1051 +f 235/1052 1512/1053 1511/1054 +f 1511/1054 234/1055 235/1052 +f 388/229 426/1056 425/1057 +f 427/280 250/142 237/279 +f 161/214 521/1058 231/1059 +f 100/71 45/33 44/140 +f 1025/1060 608/734 1027/1061 +f 1029/1062 1031/1063 1028/1064 +f 939/978 1033/1065 1032/1066 +f 41/28 594/355 99/1067 +f 1036/1068 783/859 1037/1069 +f 702/803 1038/1070 1039/1071 +f 1040/1072 1042/1073 1043/1074 +f 362/205 369/433 1493/1075 +f 187/123 477/1076 186/121 +f 70/51 539/990 198/128 +f 1511/1054 1521/1077 234/1055 +f 1049/1078 603/731 1048/1079 +f 613/741 869/926 614/739 +f 860/918 1051/1080 857/919 +f 1005/1081 1053/1082 1006/1042 +f 753/842 1056/1083 1054/1084 +f 1057/1085 1059/1086 1055/1087 +f 293/1088 1522/1089 292/334 +f 288/172 1521/1077 1522/1089 +f 292/334 1523/1090 329/332 +f 331/1091 1508/1041 1524/1092 +f 306/449 453/276 305/1093 +f 100/71 41/28 99/1067 +f 462/1094 78/58 79/57 +f 330/1095 1524/1092 462/1094 +f 635/753 1068/1096 636/751 +f 1485/1097 1526/1098 1528/1099 +f 206/129 1528/1099 1510/1050 +f 1070/1100 1072/1101 831/899 +f 942/1102 1073/1103 1074/1104 +f 916/954 1075/1105 917/1106 +f 440/270 1530/1107 439/271 +f 157/108 1514/1108 285/170 +f 1529/1109 5/6 1530/1107 +f 371/212 546/330 63/47 +f 790/1110 663/1020 791/865 +f 1077/1111 857/919 1078/1112 +f 1403/760 1435/832 1165/584 +f 1080/1113 1082/1114 1079/1115 +f 531/1116 1529/1109 440/270 +f 21/22 1084/1117 1085/1118 +f 272/350 1531/1119 561/348 +f 137/427 396/232 397/231 +f 690/905 802/875 691/798 +f 335/326 3567/1120 581/383 +f 1090/1121 1029/1062 1091/1122 +f 1092/1123 794/1124 1042/1073 +f 1533/1125 137/427 138/1126 +f 661/774 1095/1127 1096/1128 +f 69/52 445/1129 72/50 +f 1099/1130 1101/1131 1098/1132 +f 38/21 1536/1133 1505/1036 +f 1104/1134 1106/1135 1103/1136 +f 852/1137 1108/1138 853/912 +f 1377/717 909/1139 1386/786 +f 291/333 411/237 294/524 +f 316/1140 516/439 33/18 +f 539/990 1533/1125 534/1141 +f 1505/1036 316/1140 313/1142 +f 1110/1143 1112/1144 1113/1145 +f 1539/1146 460/283 264/1147 +f 522/1148 1537/1149 1539/1146 +f 834/902 1120/1150 1119/1151 +f 1122/1152 1124/1153 1121/1154 +f 612/737 1041/1155 901/945 +f 935/1045 615/1156 614/739 +f 276/169 130/1157 131/87 +f 1686/1158 3522/1159 1685/1160 +f 193/1161 1541/1162 273/1163 +f 231/1059 1540/1164 287/1165 +f 556/1166 982/1167 557/709 +f 365/207 85/64 345/193 +f 190/1168 318/181 1541/1162 +f 1810/1169 3574/1170 3528/1171 +f 3924/1172 3512/1173 3791/1174 +f 1132/1175 985/1023 1131/1176 +f 208/130 102/76 132/90 +f 1134/1177 662/775 661/774 +f 1491/994 80/312 77/56 +f 429/1178 1542/1179 147/97 +f 1137/1180 915/956 918/955 +f 172/117 428/259 174/115 +f 1139/1181 1141/1182 1142/1183 +f 549/1184 54/39 55/38 +f 1517/1185 1518/1186 296/1187 +f 976/1013 1146/1188 938/977 +f 834/902 984/1022 941/982 +f 1543/1189 286/315 115/80 +f 1544/1190 239/1191 513/313 +f 133/92 1544/1190 1542/1179 +f 506/1192 257/1193 258/147 +f 420/251 328/394 401/249 +f 1158/1194 1160/1195 1157/1196 +f 587/1197 1103/1136 588/722 +f 1513/1198 1514/1108 151/101 +f 607/736 1027/1061 608/734 +f 304/1199 1545/1200 303/1201 +f 364/206 77/56 506/1192 +f 69/52 198/128 139/95 +f 1006/1042 976/1013 938/977 +f 700/808 1112/1144 701/802 +f 3877/1202 3525/1203 3787/1204 +f 1166/1205 657/771 656/1206 +f 1167/1207 858/917 1077/1111 +f 1300/638 3524/1208 1299/636 +f 1172/1209 1174/1210 1175/1211 +f 218/1212 200/1213 217/465 +f 1179/1214 1181/1215 1178/1216 +f 372/1217 97/73 98/72 +f 89/67 227/1218 88/65 +f 1186/1219 1188/1220 1189/1221 +f 172/117 1517/1185 1549/1222 +f 1482/971 81/59 1498/1223 +f 1090/1121 1193/1224 1030/1225 +f 394/1226 353/202 1551/1227 +f 526/693 1198/1228 1197/1229 +f 811/882 858/917 812/1230 +f 1199/1231 1026/1232 1025/1060 +f 606/359 697/1233 696/1234 +f 1204/1235 1206/1236 743/1237 +f 1207/1238 523/694 526/693 +f 1209/1239 867/924 613/741 +f 914/953 685/794 936/975 +f 1478/964 1441/840 1477/946 +f 1029/1062 1161/1240 1091/1122 +f 1211/1241 1213/1242 1214/1243 +f 3858/923 1451/1244 3859/863 +f 844/1245 1216/1246 845/907 +f 423/458 273/1163 274/164 +f 1220/1247 1222/1248 1083/1249 +f 22/1250 1224/1251 23/23 +f 882/1252 897/468 881/460 +f 1519/1253 173/116 387/227 +f 1189/1221 1232/1254 1178/1216 +f 349/197 550/337 416/336 +f 1071/1255 1236/1256 1072/1101 +f 1237/1257 1239/1258 745/839 +f 1221/1259 1189/1221 1240/1260 +f 1510/1050 1512/1053 333/1261 +f 939/978 1241/1262 1005/1081 +f 1556/1263 5/6 67/48 +f 1553/1264 394/1226 1551/1227 +f 404/1265 67/48 347/1266 +f 1247/1267 1249/1268 1246/1269 +f 142/93 238/180 92/139 +f 1251/1270 635/753 1250/1271 +f 850/913 1253/1272 851/911 +f 1254/1273 1077/1111 1255/1274 +f 57/42 133/92 429/1178 +f 443/1275 552/1276 551/1277 +f 1187/1278 1175/1211 1188/1220 +f 1256/1279 831/899 833/898 +f 432/1280 382/1281 383/222 +f 741/1282 747/799 746/1283 +f 434/265 351/246 415/245 +f 349/197 383/222 348/195 +f 251/323 1481/967 1480/969 +f 395/1284 352/203 353/202 +f 1518/1186 1550/1285 1551/1227 +f 1558/1286 378/216 340/500 +f 109/157 314/1287 315/192 +f 285/170 1513/1198 407/234 +f 395/1284 393/1288 392/1289 +f 202/135 410/238 1558/1286 +f 417/247 405/233 432/1280 +f 1246/1269 1258/1290 1247/1267 +f 3/223 297/1291 2/2 +f 1259/1292 1261/1293 1262/1294 +f 403/264 347/1266 284/224 +f 955/1295 953/1296 952/1297 +f 462/1094 51/432 330/1095 +f 1096/1128 687/795 1263/1298 +f 429/1178 60/40 57/42 +f 896/943 1265/1299 932/1300 +f 1267/1301 1268/1302 1266/1303 +f 423/458 327/1304 420/251 +f 37/19 180/120 40/1305 +f 20/24 1187/1278 1186/1219 +f 1256/1279 1071/1255 1070/1100 +f 155/357 156/103 504/358 +f 1188/1220 1269/1306 1232/1254 +f 1236/1256 1271/1307 1262/1294 +f 1272/1308 535/700 621/1309 +f 565/1310 463/576 464/1311 +f 1273/1312 745/839 1239/1258 +f 876/931 1255/1274 1274/1313 +f 316/1140 164/110 315/192 +f 437/266 503/356 1509/1046 +f 194/124 1495/997 197/1314 +f 1275/1315 864/922 863/921 +f 624/360 631/1316 630/395 +f 1277/1317 1137/1180 1278/1318 +f 569/437 567/1319 566/1320 +f 1481/967 125/86 366/208 +f 324/189 547/431 326/1321 +f 1279/1322 1107/1323 1280/1324 +f 1525/1325 329/332 1523/1090 +f 1281/1326 1137/1180 918/955 +f 171/112 1553/1264 1552/1327 +f 1488/989 396/232 1533/1125 +f 1557/1328 1562/1329 1559/1330 +f 1282/1331 1206/1236 1205/1332 +f 1053/1082 1278/1318 1281/1326 +f 778/1333 607/736 1283/1334 +f 713/1335 3566/1336 3569/1337 +f 251/323 144/98 124/84 +f 4/1 348/195 383/222 +f 709/1338 707/1339 706/1340 +f 728/373 45/33 246/141 +f 261/1341 1563/1342 544/329 +f 789/864 1287/1343 790/1110 +f 74/54 263/156 282/286 +f 403/264 385/226 434/265 +f 1178/1216 1240/1260 1189/1221 +f 1223/1344 1289/1345 1224/1251 +f 158/107 150/102 151/101 +f 1220/1247 1186/1219 1221/1259 +f 296/1187 354/1346 433/1347 +f 784/428 1538/1348 1566/1349 +f 1181/1215 1291/1350 1222/1248 +f 1269/1306 1257/1351 1246/1269 +f 1566/1349 438/269 399/1352 +f 136/429 1566/1349 1564/1353 +f 1563/1342 169/113 1565/1354 +f 107/81 109/157 108/79 +f 1297/1355 1298/1356 1296/1357 +f 1464/947 1476/934 1461/936 +f 1520/1358 507/1359 508/1360 +f 1301/1361 1303/1362 1304/1363 +f 1306/1364 1308/1365 1305/1366 +f 175/176 1532/1367 272/350 +f 1532/1367 200/1213 201/137 +f 1311/1368 1312/1369 1313/1370 +f 872/928 1315/1371 1316/1372 +f 1317/1373 1318/1374 1319/1375 +f 3836/603 878/626 3828/684 +f 1305/1366 864/922 1276/1376 +f 1040/1072 863/921 1320/1377 +f 1564/1353 399/1352 169/113 +f 592/352 1575/1378 591/353 +f 591/353 1088/1379 689/368 +f 1581/1380 1583/1381 1582/1382 +f 1196/599 1147/586 1176/585 +f 710/1383 583/1384 713/1335 +f 285/170 408/248 386/225 +f 953/1296 961/492 963/507 +f 1594/1385 1589/1386 1593/1387 +f 1585/1001 1587/1388 1586/1389 +f 1592/1390 1590/1391 1589/1386 +f 3885/1392 944/1393 3899/1394 +f 1600/1395 1598/1396 1597/1397 +f 1604/1398 1602/1399 1601/1400 +f 1399/748 979/733 980/1401 +f 1589/1386 1596/1402 1595/1403 +f 1610/1404 1608/1405 1607/1406 +f 298/1407 361/1408 360/1040 +f 551/1277 276/169 277/242 +f 1627/1409 3814/1410 3819/1411 +f 859/1412 818/890 860/918 +f 1078/1112 1328/1413 1329/1414 +f 1555/1415 1530/1107 1556/1263 +f 572/1416 680/1417 681/1418 +f 772/422 776/1419 774/425 +f 977/1014 918/955 917/1106 +f 404/1265 392/1289 393/1288 +f 1054/1084 750/843 753/842 +f 1644/1420 1643/1421 1642/1422 +f 932/1300 1082/1114 933/973 +f 1067/1423 1334/1424 1068/1096 +f 1540/1164 264/1147 114/155 +f 597/726 643/757 1336/1425 +f 471/1426 762/407 470/301 +f 1642/1422 3831/877 3842/1427 +f 1644/1420 3842/1427 3843/1428 +f 923/476 950/480 927/493 +f 96/68 548/1429 95/387 +f 520/1430 522/1148 521/1058 +f 445/1129 443/1275 442/487 +f 849/1431 848/1432 842/1433 +f 480/363 755/401 493/297 +f 1646/1434 481/404 1639/1435 +f 311/1436 1567/1437 170/179 +f 464/1311 3497/1438 3495/1439 +f 3497/1438 1361/1440 3495/1439 +f 234/1055 236/341 235/1052 +f 190/1168 192/250 191/338 +f 153/105 152/1441 149/100 +f 231/1059 286/315 230/314 +f 1552/1327 1551/1227 1550/1285 +f 760/405 1648/1442 488/414 +f 3832/1443 1649/1444 3830/1445 +f 828/454 826/678 825/455 +f 52/34 166/218 165/217 +f 1663/1446 1661/1447 1660/1448 +f 287/1165 115/80 286/315 +f 668/1449 670/613 669/1450 +f 634/1451 640/1452 639/1453 +f 553/1454 559/1455 554/1456 +f 118/83 33/18 516/439 +f 492/415 491/424 490/1457 +f 3552/1458 1651/1459 3553/1460 +f 582/1461 584/1462 583/1384 +f 384/1463 392/1289 415/245 +f 1381/1464 1383/1465 1380/1466 +f 1562/1329 435/268 436/267 +f 952/1297 951/479 947/488 +f 3529/1467 583/1384 584/1462 +f 1660/1448 1667/1468 1666/1469 +f 1394/1470 1396/1471 1393/1472 +f 1660/1448 1664/1473 1663/1446 +f 1671/1474 1669/1049 1668/1475 +f 1624/1476 1673/1477 1672/1478 +f 1407/1479 1409/1480 1410/1481 +f 1677/1482 1675/862 1674/1483 +f 1613/1484 1677/1482 1625/1485 +f 1679/1486 1676/860 1677/1482 +f 1626/1487 3811/1488 3814/1410 +f 534/1141 198/128 539/990 +f 176/177 150/102 159/106 +f 803/1489 734/400 800/441 +f 400/386 191/338 192/250 +f 65/486 143/253 144/98 +f 3526/1490 1656/1491 3540/1492 +f 402/1493 157/108 283/171 +f 629/1494 627/1495 626/1496 +f 461/285 49/36 50/35 +f 758/1497 495/298 756/426 +f 796/1498 786/1499 785/1500 +f 406/235 1513/1198 1560/1501 +f 342/1502 166/218 167/555 +f 1694/1503 1658/825 1695/1504 +f 1692/1505 1694/1503 1696/1506 +f 3557/1507 1652/1508 3552/1458 +f 3921/1509 1703/1510 3923/1511 +f 326/1321 302/257 90/66 +f 3916/1512 1702/1513 1706/1514 +f 3919/1515 1698/1516 1701/1517 +f 1699/1518 3927/1519 3918/1520 +f 1557/1328 1560/1501 1561/1521 +f 1127/551 1117/547 1128/1522 +f 400/386 289/175 346/384 +f 1706/1514 1707/1523 1708/1524 +f 1707/1523 1599/1525 1600/1395 +f 1709/1000 1708/1524 1588/1526 +f 1714/1527 1711/1528 1713/1529 +f 3919/1515 1709/1000 1700/1002 +f 1704/1530 1577/1531 1703/1510 +f 1716/1532 1713/1529 1715/1533 +f 1559/1330 1549/1222 297/1291 +f 1722/1534 1720/1535 1719/1536 +f 1335/1537 596/728 1489/1538 +f 410/238 377/474 378/216 +f 973/506 1163/580 972/505 +f 168/114 399/1352 398/1539 +f 1571/813 1089/1540 1093/811 +f 1572/1541 682/1542 1571/813 +f 749/1543 647/367 754/1544 +f 1730/1545 1723/1546 1729/1547 +f 1594/1385 1729/1547 1592/1390 +f 1729/1547 1591/1548 1592/1390 +f 1731/1549 1730/1545 1583/1381 +f 1384/1550 1377/717 1379/1551 +f 1732/1552 1712/1553 1714/1527 +f 1724/1554 1731/1549 1732/1552 +f 1712/1553 1731/1549 1580/1555 +f 1722/1534 1732/1552 1733/1556 +f 1721/1557 1733/1556 1734/1558 +f 1734/1558 1716/1532 1717/1559 +f 1735/1560 1736/1561 1697/1562 +f 1733/1556 1714/1527 1716/1532 +f 1663/1446 1741/1563 1668/1475 +f 1742/1564 1740/1565 1743/1566 +f 1738/1567 1737/1568 1735/1560 +f 1665/1569 1666/1469 1740/1565 +f 1665/1569 1744/1570 1664/1473 +f 1725/1571 1746/1572 1745/1573 +f 1591/1548 1745/1573 1590/1391 +f 1590/1391 1747/1574 1596/1402 +f 1745/1573 1748/1575 1747/1574 +f 1659/1576 3538/1577 3542/826 +f 3287/1578 3545/1579 3282/1580 +f 1185/591 1182/895 1190/892 +f 1664/1473 1749/1581 1741/1563 +f 1741/1563 1739/1582 1738/1567 +f 1746/1572 1743/1566 1748/1575 +f 1726/1583 1742/1564 1746/1572 +f 1153/574 924/1584 925/496 +f 1115/1585 1153/574 1154/571 +f 1654/1586 3575/1587 1655/1588 +f 1896/1589 3549/1590 1686/1158 +f 1720/1535 1744/1570 1726/1583 +f 1721/1557 1749/1581 1720/1535 +f 1750/1591 1701/1517 1698/1516 +f 1697/1562 1750/1591 1696/1506 +f 1151/566 1063/539 1062/540 +f 1751/1592 1670/1047 1671/1474 +f 1695/1504 1752/1593 1751/1592 +f 961/492 986/1594 981/508 +f 1770/1595 1768/1596 1767/806 +f 1766/1597 1593/1387 1765/1598 +f 1731/1549 1579/1599 1580/1555 +f 1579/1599 1774/1600 1578/1601 +f 3928/1602 1771/1603 3929/1604 +f 1764/1605 3928/1602 3917/1606 +f 1777/1607 1775/1608 1774/1600 +f 3762/1609 1699/1518 1700/1002 +f 1004/1610 1128/1522 1003/1611 +f 1774/1600 1771/1603 1772/1612 +f 987/1613 1399/748 980/1401 +f 1578/1601 1772/1612 1764/1605 +f 1783/1614 1781/1615 1778/1616 +f 1597/1397 1770/1595 1767/806 +f 639/1453 3895/1617 3884/1618 +f 3928/1602 1769/1619 1770/1595 +f 1793/1620 1789/1621 1788/1622 +f 3929/1604 1798/1623 1769/1619 +f 3508/1624 3773/1625 1806/861 +f 1802/1626 3924/1172 3922/1627 +f 1788/1622 1794/1628 1793/1620 +f 1793/1620 1795/1629 1776/1630 +f 3773/1625 1675/862 1806/861 +f 1584/1631 1783/1614 1789/1621 +f 1771/1603 3920/1632 3929/1604 +f 1801/1633 1798/1623 1800/1634 +f 1768/1596 1798/1623 1799/1635 +f 1775/1608 1797/1636 1771/1603 +f 508/1360 515/556 1520/1358 +f 1796/1637 1802/1626 1797/1636 +f 1800/1634 3920/1632 3922/1627 +f 1794/1628 1804/1638 1795/1629 +f 3532/1639 1752/1593 3547/1640 +f 485/1641 1807/1018 775/423 +f 3809/413 1640/1642 768/411 +f 3837/1643 1810/1169 1653/1644 +f 485/1641 1640/1642 1808/1645 +f 1690/1646 3770/1647 3772/1648 +f 487/295 1648/1442 1811/1649 +f 3839/1650 1816/1651 3822/1652 +f 3838/1653 1648/1442 3830/1445 +f 3839/1650 1811/1649 3838/1653 +f 1812/1654 487/295 1811/1649 +f 1641/1655 767/1656 768/411 +f 465/289 755/401 776/1419 +f 767/1656 3823/1657 3808/412 +f 3822/1652 3826/1658 3847/1659 +f 1809/1660 1815/1661 767/1656 +f 3837/1643 1651/1459 3840/1019 +f 675/1662 3541/1663 3496/1664 +f 1814/1665 1822/1666 1821/1667 +f 1821/1667 3821/1668 3820/1669 +f 1824/1670 1649/1444 1825/1671 +f 1827/1672 1850/957 1805/1673 +f 1668/1475 1662/1674 1663/1446 +f 1779/1675 1848/1676 1828/1677 +f 1790/1678 1805/1673 1794/1628 +f 1833/1679 1853/1680 1852/1681 +f 3521/1682 693/381 575/380 +f 1614/1683 1616/1684 1617/1685 +f 1852/1681 3498/1686 3501/1687 +f 1620/1688 1622/1689 1623/1690 +f 1855/1691 1574/1692 1861/1693 +f 1847/1694 3501/1687 3513/1695 +f 1766/1597 1855/1691 1782/1696 +f 1629/1697 1631/1698 1628/1699 +f 1633/1700 1634/1701 1632/1702 +f 1635/1703 1637/1704 1638/1705 +f 1848/1676 1852/1681 1847/1694 +f 1855/1691 1863/1706 1862/1707 +f 1803/1708 1867/1709 1802/1626 +f 1862/1707 1831/1710 1832/1711 +f 1782/1696 1862/1707 1781/1615 +f 3922/1627 1865/948 1800/1634 +f 1873/1712 1870/1713 1869/1714 +f 1874/1715 799/1716 804/1717 +f 2045/1718 3897/1719 2044/1720 +f 1219/610 1295/667 1225/609 +f 1615/1721 1396/1471 1395/1722 +f 3884/1618 1877/1723 1876/1724 +f 645/1725 3786/1726 3559/1727 +f 1345/663 1035/534 1346/664 +f 634/1451 3884/1618 3880/1728 +f 15/8 1327/1729 14/9 +f 3910/1730 1878/1731 1893/1732 +f 1894/1733 605/1734 1893/1732 +f 3506/1735 627/1495 693/381 +f 3560/1736 645/1725 3559/1727 +f 340/500 454/275 1558/1286 +f 1897/1737 195/126 1898/1738 +f 1889/1739 1877/1723 1890/1740 +f 1901/1741 129/88 1900/1742 +f 196/125 754/1544 232/1743 +f 1705/1744 3921/1509 3916/1512 +f 340/500 166/218 343/498 +f 1570/1745 563/968 561/348 +f 1904/1746 194/124 1897/1737 +f 1781/1615 1832/1711 1780/1747 +f 1905/1748 55/38 1906/1749 +f 1000/1750 995/1751 996/516 +f 986/1594 920/526 987/1613 +f 1191/597 1190/892 1194/594 +f 2094/1752 2075/1753 2093/1754 +f 1920/1755 1919/1756 1915/1757 +f 453/276 1912/1758 1911/1759 +f 1915/1757 1918/1760 1916/1761 +f 3882/1762 2106/1763 3901/1764 +f 1655/1588 3531/1765 1868/1766 +f 1916/1761 1926/1767 1925/1768 +f 704/389 3561/1769 703/390 +f 278/168 1925/1768 1900/1742 +f 3565/1770 1818/1771 1820/1772 +f 1097/543 1115/1585 1114/570 +f 1927/1773 727/375 1923/628 +f 3578/1774 3654/1775 3758/1776 +f 1920/1755 307/186 1929/1777 +f 1929/1777 48/31 728/373 +f 503/356 1497/1778 1509/1046 +f 1902/1779 787/1780 796/1498 +f 1816/1651 3820/1669 3826/1658 +f 1925/1768 1923/628 1924/1781 +f 1535/1782 1568/1783 1534/1784 +f 254/244 258/147 255/149 +f 1509/1046 366/208 367/210 +f 119/194 35/1785 36/16 +f 219/464 293/1088 294/524 +f 141/174 47/191 431/262 +f 542/470 244/318 245/1786 +f 1940/1787 430/263 431/262 +f 1779/1675 1827/1672 1790/1678 +f 1701/1517 3915/1788 3919/1515 +f 1661/1447 3515/1789 1667/1468 +f 270/162 754/1544 647/367 +f 197/1314 749/1543 196/125 +f 1727/1790 1212/1791 1211/1241 +f 53/37 232/1743 754/1544 +f 1465/894 1479/962 1478/964 +f 1284/1792 3854/1793 1293/661 +f 1951/1794 412/241 413/240 +f 168/114 1554/1795 1553/1264 +f 1486/1039 206/129 207/131 +f 506/1192 78/58 361/1408 +f 994/1796 1007/518 993/1797 +f 951/479 963/507 964/1798 +f 1013/550 3861/670 1351/879 +f 3826/1658 1820/1772 1818/1771 +f 1498/1223 1954/1799 1482/971 +f 299/1006 543/1800 132/90 +f 1754/1801 1756/1802 1753/1803 +f 1681/1804 1683/1805 1682/1806 +f 1696/1506 1695/1504 1697/1562 +f 13/7 738/1807 737/1808 +f 473/420 475/416 474/418 +f 482/291 768/411 1640/1642 +f 204/1809 30/14 205/316 +f 1761/1810 820/1811 819/888 +f 611/738 1125/1812 612/737 +f 1501/1007 1908/1813 1909/1814 +f 1074/1104 711/809 699/801 +f 1960/1815 82/61 83/60 +f 120/1816 1960/1815 123/981 +f 35/1785 87/78 34/17 +f 1773/1817 1025/1060 1031/1063 +f 1965/1818 1569/1819 1535/1782 +f 494/299 471/1426 479/364 +f 1966/1820 1969/1821 1968/1822 +f 446/324 1971/1823 1967/1824 +f 174/115 125/86 126/85 +f 1580/1555 1578/1601 1577/1531 +f 1785/1825 1787/1826 1784/1827 +f 1972/1828 304/1199 305/1093 +f 1254/1273 875/1035 999/1034 +f 1973/1829 342/1502 514/554 +f 321/1830 1973/1829 509/984 +f 1910/1831 275/243 1907/1832 +f 509/984 514/554 508/1360 +f 1486/1039 1527/1833 1485/1097 +f 1974/1834 804/1717 1975/1835 +f 1977/1836 1974/1834 1978/1837 +f 1980/1838 1966/1820 1981/1839 +f 379/219 2/2 295/1840 +f 1718/1841 1710/1842 1701/1517 +f 884/462 992/513 883/1843 +f 1680/1844 1678/1015 1679/1486 +f 902/1845 525/1846 524/692 +f 344/1005 256/148 299/1006 +f 135/1847 137/427 136/429 +f 236/341 228/144 333/1261 +f 448/1848 1967/1824 1966/1820 +f 589/1849 604/1850 584/1462 +f 910/712 908/716 907/525 +f 1981/1839 1968/1822 1987/1851 +f 1982/1852 1987/1851 1979/1853 +f 1988/1854 1987/1851 1989/1855 +f 1990/1856 1989/1855 1991/1857 +f 1991/1857 1993/1858 1992/1859 +f 322/1860 320/983 319/1861 +f 992/513 830/472 883/1843 +f 1144/578 1086/568 1143/1862 +f 1995/1863 1978/1837 1994/1864 +f 1133/558 1118/546 1126/561 +f 1997/1865 1994/1864 1996/1866 +f 870/927 1335/1537 1489/1538 +f 805/443 1982/1852 1998/1867 +f 727/375 2000/1868 1999/1869 +f 1996/1866 1988/1854 1990/1856 +f 1994/1864 1979/1853 1988/1854 +f 1829/1870 792/1871 1273/1312 +f 1830/1872 941/982 983/1021 +f 798/1873 800/441 799/1716 +f 729/1874 731/1875 730/435 +f 2011/1876 2023/1877 2022/1878 +f 1834/1879 846/908 1159/1880 +f 402/1493 284/224 347/1266 +f 2022/1878 2024/1881 2025/1882 +f 1838/1883 989/1024 988/1026 +f 810/515 952/1297 809/452 +f 1823/1884 1840/1885 644/758 +f 1430/822 1432/834 1431/1886 +f 1215/1887 1841/1888 1079/1115 +f 149/100 176/177 177/1889 +f 1316/1372 873/929 872/928 +f 2112/1890 1384/1550 2106/1763 +f 1842/1891 1844/1892 1845/1893 +f 1159/1880 845/907 1160/1195 +f 837/901 1846/1894 1120/1150 +f 1971/1823 2030/1895 2027/1896 +f 517/1897 370/434 189/319 +f 858/917 833/898 812/1230 +f 1471/933 2027/1896 1888/1898 +f 3551/1899 626/1496 627/1495 +f 2174/1900 770/1901 490/1457 +f 935/1045 1854/1902 1010/1044 +f 1613/1484 1611/1903 1618/1904 +f 1231/621 1225/609 1295/667 +f 1856/1905 891/941 892/940 +f 1300/638 748/1906 741/1282 +f 1150/1907 1858/1908 1859/1909 +f 1643/1421 696/1234 1642/1422 +f 1215/1887 1264/1910 1216/1246 +f 1320/1377 902/1845 901/945 +f 1956/1911 1944/1912 1960/1815 +f 2031/1913 669/1450 1474/1914 +f 2034/1915 1310/646 1299/636 +f 1879/1916 1656/1491 1657/1917 +f 40/1305 242/440 39/20 +f 1992/1859 2039/1918 1456/872 +f 888/1919 957/998 889/938 +f 1882/1920 1996/1866 1990/1856 +f 1078/1112 1255/1274 1077/1111 +f 1871/1921 1192/1922 1090/1121 +f 294/524 218/1212 219/464 +f 1323/645 2026/1923 1471/933 +f 123/981 83/60 1484/979 +f 1324/1924 2041/1925 1323/645 +f 384/1463 359/260 352/203 +f 1324/1924 2038/1926 2036/1927 +f 1883/1928 1885/1929 1829/1870 +f 287/1165 114/155 107/81 +f 1096/1128 1134/1177 661/774 +f 915/956 931/972 916/954 +f 806/442 1981/1839 1982/1852 +f 3533/1930 584/1462 604/1850 +f 1425/800 2042/1931 1018/530 +f 1018/530 1948/1932 1017/528 +f 3562/1933 740/1934 3519/481 +f 1948/1932 649/1935 646/365 +f 1881/1936 1990/1856 1991/1857 +f 1900/1742 130/1157 278/168 +f 992/513 994/1796 993/1797 +f 923/476 925/496 924/1584 +f 3878/1937 2047/1938 3876/1939 +f 1643/1421 625/1940 606/359 +f 2050/1941 2051/1942 2049/1943 +f 2045/1718 1404/759 2047/1938 +f 3876/1939 945/1944 3885/1392 +f 1139/1181 1079/1115 1841/1888 +f 1037/1069 892/940 982/1167 +f 2055/1945 2057/1946 2056/1947 +f 1198/1228 718/1948 717/814 +f 1619/1949 1612/1950 1611/1903 +f 2058/1951 1606/1952 2059/1953 +f 2059/1953 1607/1406 2058/1951 +f 2058/1951 1624/1476 1605/1954 +f 71/1955 179/118 37/19 +f 405/233 1557/1328 382/1281 +f 976/1013 1006/1042 977/1014 +f 322/1860 1972/1828 1911/1759 +f 2061/1956 2063/1957 2062/1958 +f 2066/1959 2067/1960 1825/1671 +f 2052/1961 2064/1962 2051/1942 +f 1609/1963 2066/1959 1647/1964 +f 1515/1965 152/1441 1516/1966 +f 1009/522 993/1797 1007/518 +f 1644/1420 3815/1967 2118/885 +f 279/1030 1916/1761 278/168 +f 1285/1968 3862/640 1284/1792 +f 1647/1964 1825/1671 1649/1444 +f 2056/1947 2067/1960 2065/1969 +f 319/1861 1953/1970 1952/1971 +f 667/777 1913/1972 1914/1973 +f 2070/1974 625/1940 2048/1975 +f 2052/1961 2055/1945 2063/1957 +f 2038/1926 1655/1588 1458/1976 +f 1606/1952 2073/1977 2072/1978 +f 1235/1979 1270/1980 1236/1256 +f 2072/1978 2075/1753 2074/1981 +f 2059/1953 2072/1978 2062/1958 +f 2074/1981 2077/1982 2076/1983 +f 1601/1400 2078/1984 2073/1977 +f 2049/1943 1643/1421 1645/1985 +f 1000/1750 1001/1986 994/1796 +f 1089/1540 684/369 689/368 +f 1931/1987 1306/1364 1930/1988 +f 1462/1989 1021/533 1022/532 +f 1932/1990 1934/1991 1935/1992 +f 875/1035 1846/1894 998/1033 +f 1936/1993 1937/1994 1938/1995 +f 44/140 46/32 92/139 +f 1534/1784 1956/1911 120/1816 +f 1541/1162 317/183 391/230 +f 2081/1996 2084/1997 2083/1998 +f 146/557 1959/1999 300/2000 +f 1360/2001 1942/2002 1361/1440 +f 985/1023 1032/1066 1033/1065 +f 1995/1863 1892/2003 2043/2004 +f 2079/2005 2082/2006 2081/1996 +f 2095/2007 2076/1983 2077/1982 +f 2081/1996 2078/1984 2079/2005 +f 2055/1945 2054/2008 2053/2009 +f 501/456 2095/2007 500/304 +f 2097/2010 2099/2011 2098/2012 +f 2098/2012 2094/1752 2083/1998 +f 2140/2013 3893/2014 3866/2015 +f 2141/2016 3894/2017 3893/2014 +f 2051/1942 2048/1975 2049/1943 +f 592/352 2101/2018 1576/2019 +f 2071/2020 2076/1983 2096/2021 +f 2102/2022 2098/2012 2101/2018 +f 593/2023 2102/2022 592/352 +f 1019/529 746/1283 747/799 +f 1950/2024 12/2025 1327/1729 +f 725/396 2096/2021 501/456 +f 1385/791 2104/2026 1379/1551 +f 1379/1551 2106/1763 1384/1550 +f 2107/2027 2100/630 2097/2010 +f 3908/2028 502/303 3896/2029 +f 3896/2029 499/305 3901/1764 +f 940/2030 2109/2031 944/1393 +f 928/2032 1170/595 973/506 +f 3868/2033 684/369 3899/1394 +f 1962/2034 1964/2035 1961/2036 +f 3878/1937 573/345 3897/1719 +f 2039/1918 1968/1822 1969/1821 +f 1389/2037 2110/2038 2109/2031 +f 1398/747 2113/2039 1397/746 +f 3757/2040 3753/2041 3743/2042 +f 3911/2043 2107/2027 2103/2044 +f 949/553 924/1584 1115/1585 +f 260/150 324/189 326/1321 +f 1390/724 2113/2039 2110/2038 +f 2070/1974 2064/1962 2071/2020 +f 3867/2045 2103/2044 593/2023 +f 2105/2046 1421/792 2114/2047 +f 2114/2047 1423/797 2115/2048 +f 3892/2049 2115/2048 3903/2050 +f 3908/2028 2114/2047 3892/2049 +f 554/1456 1902/1779 553/1454 +f 972/505 967/497 966/495 +f 1765/1598 1573/2051 1574/1692 +f 1984/2052 1986/2053 1983/2054 +f 2053/2009 2069/2055 2057/1946 +f 3573/2056 724/2057 714/376 +f 1625/1485 1674/1483 1673/1477 +f 3881/2058 2119/2059 2122/2060 +f 3903/2050 2123/2061 3904/2062 +f 56/161 1906/1749 55/38 +f 626/1496 3879/2063 629/1494 +f 2001/2064 2003/2065 2004/2066 +f 3912/2067 2127/2068 3907/2069 +f 2006/2070 2008/2071 2009/2072 +f 2127/2068 1412/764 2128/2073 +f 2014/2074 2016/2075 2017/2076 +f 2018/2077 2020/2078 2021/2079 +f 1495/997 1947/2080 197/1314 +f 1379/1551 1386/786 1385/791 +f 3814/1410 1680/1844 1618/1904 +f 2029/2081 1884/2082 2028/2083 +f 3902/2084 2120/2085 3887/2086 +f 629/1494 3914/631 3872/2087 +f 682/1542 943/2088 1089/1540 +f 724/2057 2121/2089 716/377 +f 1757/991 3821/1668 3845/2090 +f 765/2091 3844/2092 3809/413 +f 3851/2093 715/378 3852/2094 +f 706/1340 3870/2095 3872/2087 +f 981/508 987/1613 980/1401 +f 1457/871 1656/1491 1880/2096 +f 2017/2076 2032/2097 2033/2098 +f 1458/1976 1868/1766 1457/871 +f 1776/1630 1792/2099 1793/1620 +f 1959/1999 1946/2100 1958/2101 +f 3663/805 3725/2102 3661/2103 +f 3913/2104 2122/2060 3850/2105 +f 1093/811 689/368 1088/1379 +f 966/495 968/502 972/505 +f 3852/2094 3881/2058 3913/2104 +f 2172/2106 2170/2107 2171/2108 +f 670/613 2137/2109 669/1450 +f 3761/2110 1903/2111 233/629 +f 1429/823 28/2112 1449/2113 +f 964/1798 978/509 971/501 +f 3841/2114 1413/779 2127/2068 +f 1473/2115 669/1450 2137/2109 +f 3834/680 1359/706 1358/683 +f 2138/2116 1870/1713 2137/2109 +f 1872/2117 2137/2109 1870/1713 +f 715/378 3888/2118 3881/2058 +f 1881/1936 1992/1859 1880/2096 +f 3843/1428 739/482 740/1934 +f 1837/2119 3513/1695 3514/2120 +f 253/146 59/62 85/64 +f 1808/1645 3840/1019 1807/1018 +f 1647/1964 3818/2121 1609/1963 +f 3865/2122 671/611 668/1449 +f 2112/1890 3883/2123 2111/2124 +f 1405/844 2044/1720 1438/830 +f 3829/691 1364/2125 1366/2126 +f 273/1163 391/230 249/165 +f 576/392 3520/2127 3555/2128 +f 3875/2129 668/1449 2031/1913 +f 1430/822 2140/2013 2139/2130 +f 2136/2131 3860/849 28/2112 +f 1430/822 18/12 19/11 +f 3893/2014 570/347 695/2132 +f 1442/835 2046/2133 2141/2016 +f 1431/1886 2141/2016 2140/2013 +f 3869/2134 3850/2105 2136/2131 +f 3808/412 1817/2135 766/2136 +f 3866/2015 695/2132 671/611 +f 2142/2137 798/1873 2138/2116 +f 571/346 2142/2137 694/612 +f 3819/1411 1618/1904 1611/1903 +f 2143/2138 2025/1882 2142/2137 +f 694/612 2138/2116 670/613 +f 681/1418 2144/2139 572/1416 +f 2144/2139 2022/1878 2143/2138 +f 328/394 457/635 290/393 +f 1572/1541 2011/1876 2144/2139 +f 630/395 3903/2050 624/360 +f 2085/2140 2087/2141 2088/2142 +f 2090/2143 2092/2144 2089/2145 +f 572/1416 2143/2138 571/346 +f 1875/2146 2153/2147 2149/2148 +f 1873/1712 2149/2148 2154/2149 +f 2103/2044 2097/2010 2102/2022 +f 2157/2150 2155/2151 2156/2152 +f 2128/2073 1414/766 2120/2085 +f 3874/2153 589/1849 582/1461 +f 3872/2087 628/2154 629/1494 +f 2149/2148 2158/2155 2159/2156 +f 2159/2156 3905/2157 3890/2158 +f 698/388 3902/2084 705/2159 +f 3823/1657 3827/2160 1817/2135 +f 2158/2155 3874/2153 3905/2157 +f 1438/830 2046/2133 1439/828 +f 2035/2161 2160/2162 2161/2163 +f 2161/2163 3889/2164 3886/2165 +f 3871/2166 605/1734 589/1849 +f 3873/2167 18/12 3875/2129 +f 370/434 61/44 451/320 +f 649/1935 413/240 648/366 +f 2153/2147 2156/2152 2158/2155 +f 1428/2168 1449/2113 1450/2169 +f 1490/1027 73/55 1488/989 +f 3890/2158 710/1383 633/2170 +f 2155/2151 3891/2171 3871/2166 +f 1456/872 1880/2096 1992/1859 +f 2125/2172 2087/2141 2124/2173 +f 2160/2162 3890/2158 3889/2164 +f 300/2000 505/634 323/261 +f 1064/592 1046/538 1065/2174 +f 2043/2004 1891/2175 2155/2151 +f 2129/2176 2004/2066 2003/2065 +f 2131/2177 2133/2178 2134/2179 +f 3848/850 1450/2169 1449/2113 +f 1998/1867 804/1717 805/443 +f 3879/2063 3864/2180 3914/631 +f 474/418 490/1457 770/1901 +f 1164/582 1182/895 1177/589 +f 707/1339 3909/2181 3870/2095 +f 708/2182 2162/2183 707/1339 +f 2162/2183 3886/2165 3909/2181 +f 548/1429 1905/1748 458/281 +f 1145/562 1114/570 1171/587 +f 705/2159 1650/2184 704/389 +f 2043/2004 1977/1836 1995/1863 +f 737/1808 554/1456 559/1455 +f 1034/605 1191/597 1023/593 +f 458/281 1907/1832 275/243 +f 195/126 232/1743 233/629 +f 2040/2185 1969/1821 1970/2186 +f 2145/2187 2147/2188 2148/2189 +f 2054/2008 2049/1943 1645/1985 +f 3832/1443 1639/1435 3818/2121 +f 9/2190 11/2191 10/2192 +f 252/145 506/1192 258/147 +f 472/300 479/364 471/1426 +f 1865/948 3791/1174 3503/2193 +f 900/484 910/712 907/525 +f 1941/2194 2151/2195 1942/2002 +f 1921/2196 1927/1773 1919/1756 +f 2163/2197 1898/1738 2116/2198 +f 271/1003 153/105 178/2199 +f 1904/1746 12/2025 9/2190 +f 282/286 264/1147 460/283 +f 1872/2117 2154/2149 2035/2161 +f 1326/2200 1897/1737 2163/2197 +f 628/2154 693/381 627/1495 +f 1724/1554 1719/1536 1723/1546 +f 1549/1222 436/267 172/117 +f 1582/1382 1730/1545 1729/1547 +f 1921/2196 1929/1777 1928/2201 +f 3563/2202 10/2192 11/2191 +f 2152/2203 1950/2024 1949/2204 +f 475/416 774/425 771/417 +f 769/419 2171/2108 473/420 +f 2171/2108 476/2205 473/420 +f 476/2205 772/422 475/416 +f 1791/2206 464/1311 1760/2207 +f 2166/2208 2168/2209 2169/2210 +f 595/354 372/1217 98/72 +f 483/293 1641/1655 482/291 +f 762/407 2173/853 470/301 +f 771/417 484/292 492/415 +f 486/296 1809/1660 483/293 +f 297/1291 1517/1185 296/1187 +f 763/2211 2174/1900 762/407 +f 776/1419 759/403 489/294 +f 184/374 2176/2212 2000/1868 +f 412/241 303/1201 414/239 +f 2005/2213 185/166 224/277 +f 3811/1488 1682/1806 3846/1017 +f 2164/2214 756/426 764/409 +f 1472/2215 2161/2163 2162/2183 +f 950/480 964/1798 965/494 +f 2177/2216 2179/2217 2180/2218 +f 227/1218 1510/1050 333/1261 +f 764/409 467/410 2164/2214 +f 2182/2219 2184/2220 2181/2221 +f 2186/2222 2018/2077 2185/2223 +f 1985/2224 2181/2221 1986/2053 +f 214/136 729/1874 1531/1119 +f 1545/1200 1546/2225 1569/1819 +f 865/675 842/1433 866/682 +f 849/1431 1357/677 1362/2226 +f 452/274 1558/1286 454/275 +f 559/1455 3188/2227 737/1808 +f 16/2228 737/1808 3188/2227 +f 560/2229 498/2230 374/2231 +f 3193/2232 3195/2233 3194/2234 +f 3200/2235 3198/2236 3199/2237 +f 3211/2238 3213/2239 3212/2240 +f 3215/2241 3217/2242 3216/2243 +f 2187/2244 2002/2245 2001/2064 +f 2169/2210 2189/2246 2190/2247 +f 2192/2248 2085/2140 2191/2249 +f 2193/2250 2195/2251 2196/2252 +f 2198/2253 2200/2254 2197/2255 +f 1998/1867 1979/1853 1975/1835 +f 3664/2256 3668/2257 3671/2258 +f 2116/2198 1903/2111 1902/1779 +f 1859/1909 2009/2072 1150/1907 +f 3904/2062 2126/2259 3906/2260 +f 1135/2261 1087/569 1130/559 +f 2201/2262 2015/2263 2014/2074 +f 1149/2264 110/152 1858/1908 +f 3221/2265 3218/2266 3215/2241 +f 2203/2267 2205/2268 2206/2269 +f 579/344 1895/2270 1019/529 +f 1755/2271 2208/2272 1756/1802 +f 3891/2171 1893/1732 605/1734 +f 1211/1241 2150/2273 1727/1790 +f 868/925 2210/2274 2209/2275 +f 944/1393 930/754 940/2030 +f 2130/2276 2202/2277 2201/2262 +f 233/629 1898/1738 195/126 +f 665/776 750/843 666/2278 +f 1167/1207 999/1034 618/2279 +f 2211/2280 2213/2281 2214/2282 +f 2215/2283 2217/2284 2218/2285 +f 3867/2045 590/351 3868/2033 +f 2220/2286 220/306 2219/2287 +f 823/447 948/477 1094/552 +f 3225/2288 3227/2289 3226/2290 +f 266/160 455/469 265/158 +f 3882/1762 2107/2027 3883/2123 +f 2221/2291 3519/481 3517/483 +f 2223/2292 2225/2293 2226/2294 +f 2207/2295 2124/2173 2208/2272 +f 673/2296 3869/2134 3864/2180 +f 817/444 996/516 815/451 +f 2227/2297 2146/2298 2145/2187 +f 1947/2080 646/365 749/1543 +f 3900/633 17/10 3873/2167 +f 2229/2299 1638/1705 1381/1464 +f 1679/1486 1618/1904 1680/1844 +f 1637/1704 2232/2300 2230/2301 +f 2233/2302 1617/1685 1616/1684 +f 2231/2303 2236/2304 2237/2305 +f 1381/1464 2238/2306 1382/2307 +f 3586/2308 2242/2309 3587/2310 +f 1630/2311 2233/2302 1631/1698 +f 1681/1804 472/300 469/302 +f 1408/2312 2243/2313 2244/2314 +f 3236/2315 3238/2316 3237/2317 +f 3232/2318 3240/2319 3235/2320 +f 1637/1704 2238/2306 1638/1705 +f 3237/2317 3233/2321 3234/2322 +f 1409/1480 1634/1701 2245/2323 +f 1380/1466 2247/2324 2246/2325 +f 1628/1699 2245/2323 1621/2326 +f 2249/2327 1622/1689 1633/1700 +f 1632/1702 1408/2312 2244/2314 +f 2248/2328 1631/1698 2250/2329 +f 2235/2330 1637/1704 1636/2331 +f 3245/2332 3199/2237 3198/2236 +f 631/1316 2071/2020 725/396 +f 515/556 52/34 49/36 +f 782/858 2251/2333 1251/1270 +f 2253/2334 2255/2335 2252/2336 +f 397/231 459/284 784/428 +f 1456/872 2040/2185 1455/870 +f 2040/2185 2036/1927 1455/870 +f 1401/2337 2111/2124 1398/747 +f 2041/1925 1970/2186 2026/1923 +f 1535/1782 414/239 1965/1818 +f 1586/1389 2080/2338 2079/2005 +f 2256/2339 1132/1175 1886/2340 +f 1700/1002 3918/1520 3919/1515 +f 3815/1967 1686/1158 3817/886 +f 1263/1298 1886/2340 1096/1128 +f 598/2341 844/1245 599/727 +f 2257/2342 2259/2343 2260/2344 +f 2258/2345 2262/2346 2259/2343 +f 2259/2343 2263/2347 2216/2348 +f 2260/2344 2216/2348 2215/2283 +f 1967/1824 1970/2186 1969/1821 +f 2212/2349 1963/2350 2213/2281 +f 922/2351 2218/2285 2217/2284 +f 1312/1369 1101/1131 2264/2352 +f 1307/2353 1286/2354 789/864 +f 1548/2355 1546/2225 1972/1828 +f 2266/2356 2268/2357 2265/2358 +f 2265/2358 2270/2359 2269/2360 +f 2272/2361 2269/2360 2271/2362 +f 2273/2363 2265/2358 2272/2361 +f 2275/2364 2277/2365 2274/2366 +f 2278/2367 2277/2365 2279/2368 +f 2284/2369 3617/2370 3639/2371 +f 2287/2372 2286/2373 2281/2374 +f 2288/2375 2283/2376 2287/2372 +f 2290/2377 3639/2371 3620/2378 +f 2291/2379 2293/2380 2294/2381 +f 2295/2382 2294/2381 2279/2368 +f 2296/2383 2291/2379 2295/2382 +f 2297/2384 2292/2385 2291/2379 +f 2294/2381 2299/2386 2300/2387 +f 2279/2368 2300/2387 2278/2367 +f 2298/2388 2302/2389 2292/2385 +f 2292/2385 2303/2390 2293/2380 +f 2293/2380 2304/2391 2299/2386 +f 2304/2391 2306/2392 2299/2386 +f 2306/2392 2300/2387 2299/2386 +f 2308/2393 2310/2394 2307/2395 +f 2302/2389 2310/2394 2303/2390 +f 2301/2396 2307/2395 2302/2389 +f 2312/2397 2307/2395 2311/2398 +f 2313/2399 2315/2400 2316/2401 +f 2318/2402 2320/2403 2317/2404 +f 3748/2405 2322/2406 2323/2407 +f 2324/2408 2273/2363 2326/2409 +f 2313/2399 2326/2409 2327/2410 +f 2328/2411 2313/2399 2316/2401 +f 2329/2412 2324/2408 2328/2411 +f 2331/2413 2333/2414 2330/2415 +f 2330/2415 2267/2416 2266/2356 +f 2325/2417 2266/2356 2273/2363 +f 2329/2412 2330/2415 2325/2417 +f 2333/2414 2335/2418 2267/2416 +f 2336/2419 2333/2414 2332/2420 +f 2338/2421 2332/2420 2337/2422 +f 2331/2413 2337/2422 2332/2420 +f 2329/2412 2339/2423 2331/2413 +f 2328/2411 2340/2424 2329/2412 +f 2316/2401 2341/2425 2328/2411 +f 2342/2426 2315/2400 2343/2427 +f 2344/2428 2346/2429 2347/2430 +f 2341/2425 2347/2430 2340/2424 +f 2342/2426 2344/2428 2341/2425 +f 2348/2431 2345/2432 2344/2428 +f 2350/2433 2352/2434 2353/2435 +f 2339/2423 2353/2435 2337/2422 +f 2340/2424 2350/2433 2339/2423 +f 2347/2430 2351/2436 2350/2433 +f 2337/2422 2354/2437 2338/2421 +f 2353/2435 2355/2438 2354/2437 +f 2356/2439 2276/2440 2357/2441 +f 2356/2439 2358/2442 2359/2443 +f 2356/2439 2296/2383 2295/2382 +f 2279/2368 2356/2439 2295/2382 +f 2360/2444 2270/2359 2268/2357 +f 2267/2416 2360/2444 2268/2357 +f 2362/2445 2364/2446 2365/2447 +f 2362/2445 2361/2448 2358/2442 +f 2366/2449 2358/2442 2357/2441 +f 2366/2449 2363/2450 2362/2445 +f 2357/2441 2368/2451 2366/2449 +f 2366/2449 2369/2452 2367/2453 +f 2276/2440 2370/2454 2368/2451 +f 2368/2451 2282/2455 2369/2452 +f 2372/2456 2374/2457 2371/2458 +f 2375/2459 2377/2460 2007/2461 +f 2378/2462 2380/2463 2381/2464 +f 2382/2465 2381/2464 2383/2466 +f 2384/2467 2378/2462 2382/2465 +f 2385/2468 2379/2469 2378/2462 +f 2388/2470 2390/2471 2387/2472 +f 2387/2472 2392/2473 2391/2474 +f 2393/2475 2387/2472 2391/2474 +f 2394/2476 2388/2470 2387/2472 +f 2186/2222 2402/2477 2400/2478 +f 2403/2479 2327/2410 2326/2409 +f 2272/2361 2326/2409 2273/2363 +f 2271/2362 2403/2479 2272/2361 +f 2405/2480 2404/2481 2403/2479 +f 2404/2481 2408/2482 2327/2410 +f 2409/2483 2411/2484 2412/2485 +f 2409/2483 2271/2362 2269/2360 +f 2413/2486 2269/2360 2270/2359 +f 2413/2486 2410/2487 2409/2483 +f 2417/2488 2416/2489 2418/2490 +f 2417/2488 2406/2491 2405/2480 +f 2412/2485 2405/2480 2271/2362 +f 2411/2484 2417/2488 2412/2485 +f 2406/2491 2407/2492 2404/2481 +f 3569/1337 632/2493 713/1335 +f 2420/2494 2397/2495 2421/2496 +f 2420/2494 2423/2497 2422/2498 +f 2424/2499 2422/2498 2419/2500 +f 2424/2499 2398/2501 2420/2494 +f 2425/2502 2392/2473 2426/2503 +f 2421/2496 2426/2503 2423/2497 +f 2397/2495 2425/2502 2421/2496 +f 2396/2504 2391/2474 2425/2502 +f 2427/2505 2315/2400 2314/2506 +f 2427/2505 2408/2482 2428/2507 +f 2430/2508 2428/2507 2429/2509 +f 2343/2427 2427/2505 2430/2508 +f 2432/2510 2430/2508 2431/2511 +f 2431/2511 2429/2509 2433/2512 +f 2435/2513 2433/2512 2434/2514 +f 2436/2515 2431/2511 2435/2513 +f 2428/2507 2407/2492 2437/2516 +f 2422/2498 2407/2492 2419/2500 +f 2438/2517 2422/2498 2423/2497 +f 2429/2509 2437/2516 2438/2517 +f 2440/2518 2433/2512 2439/2519 +f 2441/2520 2433/2512 2429/2509 +f 2442/2521 2439/2519 2441/2520 +f 2443/2522 2440/2518 2439/2519 +f 2445/2523 2444/2524 2443/2522 +f 2447/2525 2443/2522 2442/2521 +f 2383/2466 2445/2523 2447/2525 +f 2381/2464 2446/2526 2445/2523 +f 2448/2527 2423/2497 2426/2503 +f 2449/2528 2426/2503 2392/2473 +f 2441/2520 2449/2528 2442/2521 +f 2441/2520 2438/2517 2448/2527 +f 2449/2528 2390/2471 2450/2529 +f 2450/2529 2389/2530 2451/2531 +f 2447/2525 2451/2531 2383/2466 +f 2442/2521 2450/2529 2447/2525 +f 2451/2531 2453/2532 2452/2533 +f 2452/2533 2455/2534 2454/2535 +f 2382/2465 2454/2535 2384/2467 +f 2383/2466 2452/2533 2382/2465 +f 2456/2536 2373/2537 2372/2456 +f 2459/2538 2461/2539 2458/2540 +f 2463/2541 2465/2542 2462/2543 +f 2462/2543 2285/2544 2284/2369 +f 2466/2545 2284/2369 2290/2377 +f 2467/2546 2462/2543 2466/2545 +f 2461/2539 3703/2547 3701/2548 +f 2468/2549 2470/2550 2471/2551 +f 2472/2552 2471/2551 2473/2553 +f 2474/2554 3701/2548 3688/2555 +f 2475/2556 2477/2557 2478/2558 +f 2458/2540 2453/2532 2479/2559 +f 2479/2559 2389/2530 2388/2470 +f 2395/2560 2479/2559 2388/2470 +f 2480/2561 2458/2540 2479/2559 +f 2481/2562 2483/2563 2478/2558 +f 2484/2564 2485/2565 221/2566 +f 2471/2551 2464/2567 2463/2541 +f 2473/2553 2463/2541 2467/2546 +f 2487/2568 2466/2545 2486/2569 +f 2486/2569 2290/2377 2488/2570 +f 2490/2571 2488/2570 2489/2572 +f 2491/2573 2486/2569 2490/2571 +f 2492/2574 2467/2546 2487/2568 +f 2493/2575 2487/2568 2491/2573 +f 2494/2576 2492/2574 2493/2575 +f 2495/2577 2473/2553 2492/2574 +f 3690/2578 2474/2554 3688/2555 +f 3691/2579 2496/2580 3690/2578 +f 2498/2581 2493/2575 2499/2582 +f 2500/2583 2499/2582 2501/2584 +f 3700/2585 2498/2581 2500/2583 +f 3687/2586 2494/2576 2498/2581 +f 2306/2392 2278/2367 2300/2387 +f 2305/2587 2504/2588 2306/2392 +f 2504/2588 2507/2589 2506/2590 +f 2506/2590 2288/2375 2508/2591 +f 2274/2366 2508/2591 2275/2364 +f 2278/2367 2506/2590 2274/2366 +f 3633/2592 2489/2572 2488/2570 +f 3620/2378 2488/2570 2290/2377 +f 2507/2589 2289/2593 2288/2375 +f 2505/2594 2509/2595 2507/2589 +f 2508/2591 2287/2372 2511/2596 +f 2511/2596 2281/2374 2280/2597 +f 2370/2454 2280/2597 2282/2455 +f 2275/2364 2511/2596 2370/2454 +f 2512/2598 2489/2572 2513/2599 +f 2514/2600 2513/2599 2515/2601 +f 2517/2602 2514/2600 2516/2603 +f 2517/2602 2490/2571 2512/2598 +f 2499/2582 2491/2573 2517/2602 +f 2499/2582 2516/2603 2501/2584 +f 2510/2604 2519/2605 2518/2606 +f 2518/2606 2521/2607 2520/2608 +f 3616/2609 2520/2608 3621/2610 +f 3632/2611 2518/2606 3616/2609 +f 2522/2612 2303/2390 2310/2394 +f 2523/2613 2310/2394 2309/2614 +f 2305/2587 2522/2612 2524/2615 +f 2524/2615 2523/2613 2525/2616 +f 2519/2605 2525/2616 2521/2607 +f 2505/2594 2524/2615 2519/2605 +f 2308/2393 2527/2617 2526/2618 +f 2526/2618 2529/2619 2528/2620 +f 2531/2621 2528/2620 2530/2622 +f 2309/2614 2526/2618 2531/2621 +f 1214/1243 2151/2195 1211/1241 +f 2419/2500 2418/2490 2424/2499 +f 2533/2623 2349/2624 2317/2404 +f 2534/2625 2317/2404 2320/2403 +f 2535/2626 2533/2623 2534/2625 +f 2536/2627 2345/2432 2533/2623 +f 2537/2628 2346/2429 2536/2627 +f 2538/2629 2536/2627 2535/2626 +f 2539/2630 2537/2628 2538/2629 +f 2540/2631 2351/2436 2537/2628 +f 2312/2397 2541/2632 2527/2617 +f 2527/2617 2542/2633 2529/2619 +f 2543/2634 2434/2514 2544/2635 +f 2545/2636 2544/2635 2546/2637 +f 2319/2638 2543/2634 2545/2636 +f 2318/2402 2435/2513 2543/2634 +f 2514/2600 2548/2639 2547/2640 +f 2547/2640 2550/2641 2549/2642 +f 2552/2643 2549/2642 2551/2644 +f 2552/2643 2514/2600 2547/2640 +f 2520/2608 2554/2645 2553/2646 +f 2553/2646 2556/2647 2555/2648 +f 3590/2649 2555/2648 3593/2650 +f 3621/2610 2553/2646 3590/2649 +f 2523/2613 2531/2621 2557/2651 +f 2557/2651 2530/2622 2558/2652 +f 2525/2616 2557/2651 2559/2653 +f 2559/2653 2558/2652 2560/2654 +f 2554/2645 2560/2654 2556/2647 +f 2521/2607 2559/2653 2554/2645 +f 2561/2655 2007/2461 2006/2070 +f 2376/2656 2225/2293 2377/2460 +f 2564/2657 2566/2658 2563/2659 +f 2563/2659 338/511 2567/2660 +f 2569/2661 2567/2660 2568/2662 +f 2570/2663 2563/2659 2569/2661 +f 2572/2664 2574/2665 2571/2666 +f 2571/2666 2576/2667 2575/2668 +f 2544/2635 2575/2668 2546/2637 +f 2544/2635 2572/2664 2571/2666 +f 2578/2669 2569/2661 2577/2670 +f 2579/2671 2569/2661 2568/2662 +f 2574/2665 2579/2671 2576/2667 +f 2573/2672 2577/2670 2574/2665 +f 2581/2673 2583/2674 2580/2675 +f 2584/2676 2583/2674 2585/2677 +f 3697/2678 2580/2675 2584/2676 +f 3676/2679 2580/2675 3675/2680 +f 2582/2681 2552/2643 2583/2674 +f 2583/2674 2551/2644 2585/2677 +f 2500/2583 2589/2682 2588/2683 +f 2590/2684 2589/2682 2591/2685 +f 3693/2686 2588/2683 2590/2684 +f 3695/2687 2500/2583 2588/2683 +f 2501/2584 2591/2685 2589/2682 +f 2592/2688 2594/2689 2595/2690 +f 2386/2691 2503/2692 2502/2693 +f 2596/2694 2386/2691 2374/2457 +f 2596/2694 2373/2537 2597/2695 +f 2599/2696 2597/2695 2598/2697 +f 2599/2696 2379/2469 2596/2694 +f 2600/2698 2380/2463 2599/2696 +f 2600/2698 2598/2697 2601/2699 +f 2603/2700 2601/2699 2602/2701 +f 2603/2700 2446/2526 2600/2698 +f 2604/2702 2602/2701 2605/2703 +f 2440/2518 2605/2703 2434/2514 +f 2444/2524 2604/2702 2440/2518 +f 2606/2704 2581/2673 2607/2705 +f 2606/2704 2590/2684 2591/2685 +f 2591/2685 2582/2681 2606/2704 +f 3676/2679 2456/2536 3689/2706 +f 3689/2706 2372/2456 3693/2686 +f 2478/2558 2608/2707 2481/2562 +f 2609/2708 2611/2709 2199/2710 +f 2564/2657 2613/2711 2612/2712 +f 2612/2712 2598/2697 2597/2695 +f 2457/2713 2597/2695 2373/2537 +f 2565/2714 2612/2712 2457/2713 +f 2614/2715 2570/2663 2578/2669 +f 2615/2716 2578/2669 2573/2672 +f 2602/2701 2614/2715 2615/2716 +f 2601/2699 2613/2711 2614/2715 +f 2572/2664 2605/2703 2616/2717 +f 2616/2717 2602/2701 2615/2716 +f 2573/2672 2616/2717 2615/2716 +f 1459/880 1045/541 1021/533 +f 1265/1299 1079/1115 1082/1114 +f 1612/1950 1609/1963 1611/1903 +f 2067/1960 2069/2055 2068/2718 +f 1135/2261 1136/564 1148/563 +f 1938/1995 2618/2719 2617/2720 +f 95/387 458/281 128/89 +f 2058/1951 1608/1405 1612/1950 +f 1454/873 1445/851 1367/703 +f 655/769 1048/1079 2619/2721 +f 722/2722 2621/2723 723/817 +f 1131/1176 1033/1065 1134/1177 +f 562/349 366/208 1497/1778 +f 1723/1546 1725/1571 1591/1548 +f 1786/2724 1122/1152 1787/1826 +f 267/159 323/261 266/160 +f 1325/644 1654/1586 2038/1926 +f 1468/896 1435/832 1479/962 +f 658/772 529/2725 528/695 +f 8/4 247/339 7/2726 +f 382/1281 1559/1330 3/223 +f 788/866 1307/2353 789/864 +f 940/2030 929/755 928/2032 +f 911/951 2623/2727 912/2728 +f 2621/2723 655/769 654/768 +f 2624/2729 2625/2730 2626/2731 +f 998/1033 27/26 26/2732 +f 635/753 2626/2731 2625/2730 +f 2627/2733 644/758 1840/1885 +f 2628/2734 1840/1885 2629/2735 +f 1200/2736 2627/2733 2628/2734 +f 1199/1231 641/756 2627/2733 +f 1520/1358 49/36 477/1076 +f 1452/874 1349/668 1451/1244 +f 2630/2737 1859/1909 1858/1908 +f 644/758 1335/1537 1823/1884 +f 1197/1229 717/814 2632/2738 +f 2632/2738 719/815 1166/1205 +f 2628/2734 1166/1205 1200/2736 +f 2629/2735 2632/2738 2628/2734 +f 1762/2739 1050/2740 821/889 +f 1823/1884 873/929 1839/2741 +f 2634/2742 794/1124 652/767 +f 1028/1064 1027/1061 1161/1240 +f 1042/1073 2635/2743 1043/1074 +f 93/70 1901/1741 1924/1781 +f 2036/1927 1458/1976 1455/870 +f 1527/1833 1525/1325 1526/1098 +f 877/932 1274/1313 1277/1317 +f 134/91 797/430 1544/1190 +f 1328/1413 931/972 1329/1414 +f 607/736 1091/1122 1161/1240 +f 1043/1074 2637/2744 2636/2745 +f 2636/2745 2639/2746 2638/2747 +f 1275/1315 2638/2747 1276/1376 +f 1275/1315 1043/1074 2636/2745 +f 1272/1308 2626/2731 2640/2748 +f 2640/2748 638/752 637/2749 +f 1110/1143 2640/2748 637/2749 +f 536/2750 2640/2748 2641/2751 +f 1472/2215 1474/1914 1473/2115 +f 2042/1931 1951/1794 649/1935 +f 1058/2752 1105/2753 1059/1086 +f 2642/2754 2643/2755 2644/2756 +f 1192/1922 2644/2756 1193/1224 +f 912/2728 2642/2754 1192/1922 +f 3898/2757 3526/1490 3786/1726 +f 1049/1078 2620/2758 2621/2723 +f 616/740 1885/1929 613/741 +f 1331/2759 666/2278 750/843 +f 990/2760 1198/1228 525/1846 +f 1911/1759 321/1830 322/1860 +f 1298/1356 1205/1332 2645/2761 +f 76/53 1504/1029 75/2762 +f 1502/2763 421/254 422/256 +f 610/735 658/772 691/798 +f 1146/1188 659/773 938/977 +f 2646/2764 914/953 936/975 +f 1280/1324 1238/2765 1279/1322 +f 975/2766 2647/2767 2648/2768 +f 2647/2767 975/2766 974/1012 +f 1453/906 1452/874 1451/1244 +f 1514/1108 158/107 151/101 +f 835/900 1843/2769 1842/1891 +f 1271/1307 1173/2770 1289/1345 +f 2649/2771 813/883 2650/2772 +f 1893/1732 1826/2773 1894/1733 +f 1050/2740 1937/1994 1051/1080 +f 2651/2774 814/884 813/883 +f 1006/1042 1281/1326 977/1014 +f 2653/2775 2654/2776 2655/2777 +f 1001/1986 1003/1611 1002/519 +f 585/723 679/789 558/710 +f 2624/2729 621/1309 620/743 +f 555/708 2028/2083 556/1166 +f 1207/1238 1197/1229 2629/2735 +f 1626/1487 478/362 472/300 +f 426/1056 419/252 1567/1437 +f 1030/1225 1773/1817 1031/1063 +f 3898/2757 634/1451 3880/1728 +f 534/1141 138/1126 262/335 +f 1125/1812 1042/1073 1041/1155 +f 333/1261 88/65 227/1218 +f 569/437 561/348 730/435 +f 1489/1538 871/2778 870/927 +f 1857/2779 620/743 891/941 +f 1673/1477 3770/1647 3763/2780 +f 622/744 864/922 1308/1365 +f 2647/2767 974/1012 2657/2781 +f 619/745 891/941 620/743 +f 261/1341 545/331 262/335 +f 393/1288 1556/1263 404/1265 +f 2658/2782 2647/2767 2657/2781 +f 1075/1105 2657/2781 917/1106 +f 1081/2783 2658/2782 1075/1105 +f 1080/1113 2659/2784 2658/2782 +f 2644/2756 1141/1182 2660/2785 +f 2660/2785 1140/2786 1336/1425 +f 642/2787 1336/1425 643/757 +f 1193/1224 2660/2785 642/2787 +f 679/789 555/708 558/710 +f 1503/2788 1500/1009 1502/2763 +f 1318/1374 1101/1131 1100/2789 +f 2661/2790 1139/1181 1142/1183 +f 2661/2790 2663/2791 2662/2792 +f 2662/2792 2659/2784 2661/2790 +f 2662/2792 2622/2793 2664/2794 +f 2664/2794 911/951 2646/2764 +f 2648/2768 2646/2764 1146/1188 +f 2648/2768 2662/2792 2664/2794 +f 2083/1998 2093/1754 2081/1996 +f 1442/835 1437/831 1439/828 +f 519/475 330/1095 51/432 +f 1885/1929 1209/1239 613/741 +f 799/1716 805/443 804/1717 +f 1946/2100 1952/1971 1953/1970 +f 1302/2795 1253/1272 1252/2796 +f 566/1320 224/277 533/322 +f 1389/2037 928/2032 1388/720 +f 683/2797 681/1418 680/1417 +f 836/2798 1842/1891 2665/2799 +f 2665/2799 1845/1893 1303/1362 +f 2666/2800 1303/1362 1302/2795 +f 24/27 2665/2799 2666/2800 +f 831/899 2650/2772 832/897 +f 1845/1893 1304/1363 1303/1362 +f 2010/2801 2012/2802 2011/1876 +f 1051/1080 2667/2803 2668/2804 +f 1522/1089 1511/1054 1523/1090 +f 531/1116 441/2805 522/1148 +f 782/858 1251/1270 1250/1271 +f 1250/1271 2625/2730 2669/2806 +f 2669/2806 1857/2779 1856/1905 +f 783/859 1856/1905 1037/1069 +f 782/858 2669/2806 783/859 +f 720/818 2671/2807 721/816 +f 1055/1087 1930/1988 1056/1083 +f 1333/2808 1251/1270 2251/2333 +f 619/745 1308/1365 1307/2353 +f 652/767 2619/2721 2634/2742 +f 521/1058 1539/1146 1540/1164 +f 1838/1883 537/701 2672/2809 +f 2672/2809 536/2750 2641/2751 +f 1113/1145 2641/2751 1110/1143 +f 1113/1145 1838/1883 2672/2809 +f 1016/548 1351/879 1045/541 +f 3897/1719 570/347 3894/2017 +f 226/1051 104/2810 101/74 +f 3564/2811 3827/2160 1818/1771 +f 1036/1068 1884/2082 1883/1928 +f 2674/2812 1304/1363 2673/2813 +f 2673/2813 1844/1892 2675/2814 +f 1334/1424 2675/2814 1039/1071 +f 1333/2808 2673/2813 1334/1424 +f 2676/2815 1039/1071 2675/2814 +f 1843/2769 2675/2814 1844/1892 +f 942/1102 2676/2815 1843/2769 +f 1074/1104 702/803 2676/2815 +f 811/882 859/1412 858/917 +f 752/2816 1055/1087 753/842 +f 1200/2736 656/1206 1026/1232 +f 1120/1150 874/930 2677/2817 +f 2677/2817 877/932 1052/2818 +f 1241/1262 1052/2818 1005/1081 +f 1119/1151 2677/2817 1241/1262 +f 1560/1501 151/101 1515/1965 +f 1208/2819 611/738 523/694 +f 215/2820 736/2821 214/136 +f 1460/881 1462/1989 1461/936 +f 1208/2819 1839/2741 873/929 +f 854/916 1282/1331 1297/1355 +f 72/50 182/255 179/118 +f 544/329 1565/1354 512/309 +f 217/465 456/2822 457/635 +f 2047/1938 930/754 945/1944 +f 1204/1235 1937/1994 2633/2823 +f 913/952 686/856 685/794 +f 840/904 1113/1145 1112/1144 +f 1531/1119 730/435 561/348 +f 2678/2824 1283/1334 801/876 +f 601/966 1914/1973 602/730 +f 1537/1149 459/284 460/283 +f 1819/2825 3509/993 3527/2826 +f 991/1025 862/2827 538/702 +f 2679/2828 1073/1103 1830/1872 +f 2680/2829 983/1021 2256/2339 +f 779/855 2256/2339 1263/1298 +f 778/1333 2680/2829 779/855 +f 2678/2824 1830/1872 2680/2829 +f 180/120 245/1786 40/1305 +f 1886/2340 1131/1176 1134/1177 +f 1138/2830 1274/1313 1255/1274 +f 1841/1888 1140/2786 1139/1181 +f 75/2762 314/1287 74/54 +f 193/1161 420/251 192/250 +f 2681/2831 1099/1130 1098/1132 +f 170/179 1565/1354 169/113 +f 349/197 417/247 432/1280 +f 1121/1154 2684/2832 2683/2833 +f 547/431 409/258 302/257 +f 123/981 121/2834 120/1816 +f 2643/2755 1142/1183 1141/1182 +f 1542/1179 513/313 1543/1189 +f 899/2835 907/525 919/527 +f 1210/2836 676/790 867/924 +f 791/865 982/1167 892/940 +f 24/27 837/901 836/2798 +f 893/944 932/1300 931/972 +f 1860/2837 863/921 862/2827 +f 535/700 862/2827 861/920 +f 832/897 813/883 812/1230 +f 2650/2772 813/883 832/897 +f 1569/1819 1547/2838 1568/1783 +f 181/119 542/470 180/120 +f 660/2839 937/976 688/796 +f 2013/2840 568/436 2012/2802 +f 887/937 2631/2841 2630/2737 +f 1524/1092 361/1408 78/58 +f 520/1430 162/2842 1529/1109 +f 2653/2775 854/916 2685/2843 +f 1238/2765 2655/2777 2686/2844 +f 718/1948 527/697 530/696 +f 1492/1004 518/310 1491/994 +f 792/1871 615/1156 793/867 +f 599/727 1835/2845 596/728 +f 290/393 456/2822 175/176 +f 312/178 1552/1327 425/1057 +f 1248/2846 1179/1214 1249/1268 +f 153/105 177/1889 178/2199 +f 1240/1260 1222/1248 1221/1259 +f 1257/1351 1235/1979 1071/1255 +f 1291/1350 1083/1249 1222/1248 +f 1052/2818 1277/1317 1053/1082 +f 743/1237 2618/2719 1204/1235 +f 444/2847 430/263 443/1275 +f 23/23 1172/1209 1187/1278 +f 2687/2848 1180/2849 1179/1214 +f 102/76 409/258 134/91 +f 1283/1334 610/735 801/876 +f 373/2850 650/372 651/371 +f 1180/2849 1084/1117 1291/1350 +f 855/914 2655/2777 1237/1257 +f 1173/2770 1917/2851 1174/1210 +f 2689/2852 853/912 2688/2853 +f 2688/2853 1108/1138 1279/1322 +f 2686/2844 1279/1322 1238/2765 +f 2687/2848 2688/2853 2686/2844 +f 2691/2854 1252/2796 2690/2855 +f 2690/2855 850/913 2689/2852 +f 1248/2846 2689/2852 2687/2848 +f 1248/2846 2691/2854 2690/2855 +f 2692/2856 24/27 2666/2800 +f 2691/2854 2666/2800 1302/2795 +f 1247/1267 2692/2856 2691/2854 +f 1258/1290 617/742 2692/2856 +f 394/1226 1555/1415 393/1288 +f 2693/2857 1259/1292 1223/1344 +f 1072/1101 1262/1294 1261/1293 +f 1257/1351 1168/2858 1258/1290 +f 398/1539 1555/1415 1554/1795 +f 871/2778 1314/2859 872/928 +f 1178/1216 1249/1268 1179/1214 +f 1224/1251 1173/2770 1172/1209 +f 1010/1044 2618/2719 742/837 +f 1175/1211 1292/2860 1269/1306 +f 2654/2776 2686/2844 2655/2777 +f 2694/2861 1216/1246 1264/1910 +f 71/1955 1487/988 70/51 +f 701/802 1111/2862 1038/1070 +f 1072/1101 2649/2771 2650/2772 +f 1085/1118 22/1250 21/22 +f 895/961 1264/1910 896/943 +f 1601/1400 1605/1954 1604/1398 +f 1259/1292 1271/1307 1289/1345 +f 21/22 1220/1247 1083/1249 +f 988/1026 538/702 537/701 +f 598/2341 1336/1425 1140/2786 +f 820/1811 1762/2739 821/889 +f 1050/2740 818/890 821/889 +f 1298/1356 1761/1810 2652/2863 +f 1168/2858 618/2279 617/742 +f 858/917 1256/1279 833/898 +f 39/20 516/439 1536/1133 +f 2652/2863 819/888 814/884 +f 313/1142 315/192 314/1287 +f 2026/1923 1971/1823 2027/1896 +f 2656/2864 596/728 1835/2845 +f 814/884 818/890 811/882 +f 2696/2865 813/883 2649/2771 +f 1260/2866 2649/2771 1261/1293 +f 2693/2857 2696/2865 1260/2866 +f 2697/2867 2651/2774 2696/2865 +f 381/220 390/290 380/221 +f 3818/2121 1611/1903 1609/1963 +f 1475/935 1428/2168 1227/617 +f 1292/2860 1917/2851 1235/1979 +f 1551/1227 354/1346 1518/1186 +f 1526/1098 1523/1090 1511/1054 +f 1593/1387 1595/1403 1573/2051 +f 1232/1254 1246/1269 1249/1268 +f 2695/2868 1223/1344 22/1250 +f 2698/2869 1296/1357 2697/2867 +f 2698/2869 2693/2857 2695/2868 +f 2685/2843 2695/2868 1085/1118 +f 2685/2843 1297/1355 2698/2869 +f 562/349 271/1003 272/350 +f 2652/2863 1296/1357 1298/1356 +f 1365/762 1364/2125 1363/687 +f 1958/2101 1953/1970 1957/2870 +f 585/723 2210/2274 997/1032 +f 1561/1521 1515/1965 435/268 +f 975/2766 1146/1188 976/1013 +f 347/1266 68/49 346/384 +f 1038/1070 636/751 1068/1096 +f 2700/2871 780/857 2699/2872 +f 2699/2872 1036/1068 1883/1928 +f 2701/2873 1883/1928 1829/1870 +f 851/911 2699/2872 2701/2873 +f 2674/2812 2251/2333 2702/2874 +f 2702/2874 781/2875 2700/2871 +f 1253/1272 2700/2871 851/911 +f 1301/1361 2702/2874 1253/1272 +f 1519/1253 388/229 1550/1285 +f 1273/1312 1280/1324 2703/2876 +f 2703/2876 1107/1323 852/1137 +f 2701/2873 852/1137 851/911 +f 1829/1870 2703/2876 2701/2873 +f 91/2877 326/1321 90/66 +f 1784/1827 2656/2864 1785/1825 +f 2683/2833 2705/2878 2704/2879 +f 604/1850 3499/2880 3533/1930 +f 1669/1049 3548/2881 1662/1674 +f 2062/1958 2060/2882 2059/1953 +f 621/1309 861/920 622/744 +f 2023/1877 732/2883 2024/1881 +f 686/856 1263/1298 687/795 +f 857/919 2668/2804 1328/1413 +f 883/1843 882/1252 879/459 +f 637/2749 1111/2862 1110/1143 +f 2668/2804 893/944 1328/1413 +f 2667/2803 894/942 2668/2804 +f 1039/1071 1068/1096 1334/1424 +f 313/1142 1504/1029 1505/1036 +f 1911/1759 305/1093 453/276 +f 1936/1993 1964/2035 2212/2349 +f 2707/2884 2401/2885 2183/2886 +f 2708/2887 2183/2886 2182/2219 +f 738/1807 2116/2198 554/1456 +f 2349/2624 2432/2510 2436/2515 +f 2711/2888 2020/2078 2019/2889 +f 2400/2478 2019/2889 2186/2222 +f 2714/2890 2716/2891 2713/2892 +f 2717/2893 2719/2894 2720/2895 +f 2721/2896 2723/2897 2724/2898 +f 2726/2899 2728/2900 2725/2901 +f 2725/2901 2730/2902 2729/2903 +f 2731/2904 2729/2903 2709/2905 +f 2732/2906 2725/2901 2731/2904 +f 2734/2907 2736/2908 2733/2909 +f 2738/2910 2740/2911 2737/2912 +f 2737/2912 2741/2913 2742/2914 +f 2744/2915 2746/2916 2743/2917 +f 2745/2918 2748/2919 2747/2920 +f 2749/2921 2747/2920 2751/2922 +f 2753/2923 2751/2922 2752/2924 +f 2250/2329 2246/2325 2248/2328 +f 2754/2925 1614/1683 2755/2926 +f 3598/2927 2755/2926 3609/2928 +f 3608/2929 2747/2920 3609/2928 +f 1617/1685 2755/2926 1614/1683 +f 2242/2309 1620/1688 2757/2930 +f 2191/2249 2760/2931 2759/2932 +f 2761/2933 2760/2931 2762/2934 +f 3648/2935 2761/2933 3649/2936 +f 3623/2937 2759/2932 3648/2935 +f 3209/2938 3223/2939 3224/2940 +f 1395/1722 2236/2304 2235/2330 +f 1395/1722 1636/2331 1615/1721 +f 1616/1684 2229/2299 2233/2302 +f 1617/1685 2763/2941 2756/2942 +f 2756/2942 3628/2943 3608/2929 +f 2765/2944 2234/2945 1630/2311 +f 2766/2946 3628/2943 3622/2947 +f 3202/2948 3246/2949 3247/2950 +f 1516/1966 435/268 1515/1965 +f 2768/2951 3552/1458 3553/1460 +f 1495/997 578/342 1424/804 +f 2135/632 28/2112 17/10 +f 3526/2952 2773/2953 2774/2954 +f 1690/1646 3926/2955 3925/2956 +f 2778/2957 2780/2958 2777/2959 +f 2777/2959 2781/2960 2782/2961 +f 2784/2962 2777/2959 2783/2963 +f 3555/2128 575/380 576/392 +f 1494/996 9/2190 540/327 +f 3529/1467 2015/2263 3566/1336 +f 451/320 505/634 450/272 +f 2786/2964 2788/2965 2785/2966 +f 2779/2967 2785/2966 2780/2958 +f 2789/2968 2741/2913 2740/2911 +f 2790/2969 2752/2924 2789/2968 +f 2792/2970 2794/2971 2791/2972 +f 2793/2973 2796/2974 2794/2971 +f 2796/2974 2750/2975 2749/2921 +f 2794/2971 2749/2921 2753/2923 +f 2798/2976 1396/1471 2754/2925 +f 3591/2977 2754/2925 3598/2927 +f 2798/2976 2800/2978 1393/1472 +f 3591/2977 2799/2979 2798/2976 +f 2797/2980 2795/2981 2801/2982 +f 2791/2972 2753/2923 2790/2969 +f 216/138 232/1743 54/39 +f 3804/2983 2807/2984 3803/2985 +f 3805/2986 2804/2987 3803/2985 +f 2769/2988 3557/1507 3552/1458 +f 183/211 184/374 97/73 +f 2811/2989 2775/2990 2776/2991 +f 2804/2987 2776/2991 2805/2992 +f 2809/2993 2811/2989 2804/2987 +f 2812/2994 2813/2995 2814/2996 +f 3805/2986 2816/2997 3795/2998 +f 3796/2999 2815/3000 3795/2998 +f 3798/3001 2820/3002 2821/3003 +f 3793/3004 2819/3005 3798/3001 +f 2824/3006 2735/3007 2819/3005 +f 2823/3008 2824/3006 2819/3005 +f 2825/3009 2826/3010 2724/2898 +f 3793/3004 2827/3011 3792/3012 +f 3796/2999 2826/3010 3792/3012 +f 2713/2892 2829/3013 2828/3014 +f 2828/3014 2831/3015 2830/3016 +f 2822/3017 2828/3014 2830/3016 +f 2821/3003 2713/2892 2828/3014 +f 2832/3018 2822/3017 2830/3016 +f 2833/3019 2830/3016 2831/3015 +f 2834/3020 2832/3018 2833/3019 +f 2835/3021 2827/3011 2832/3018 +f 2837/3022 2839/3023 2836/3024 +f 2836/3024 2841/3025 2840/3026 +f 2842/3027 2840/3026 2727/3028 +f 2843/3029 2836/3024 2842/3027 +f 2840/3026 2845/3030 2844/3031 +f 2718/3032 2845/3030 2719/2894 +f 2730/2902 2844/3031 2718/3032 +f 2727/3028 2844/3031 2728/2900 +f 2846/3033 2719/2894 2845/3030 +f 2846/3033 2841/3025 2847/3034 +f 2829/3013 2847/3034 2831/3015 +f 2829/3013 2716/2891 2846/3033 +f 2847/3034 2839/3023 2848/3035 +f 2848/3035 2838/3036 2849/3037 +f 2833/3019 2849/3037 2834/3020 +f 2831/3015 2848/3035 2833/3019 +f 2850/3038 2851/3039 2852/3040 +f 2853/3041 2852/3040 2854/3042 +f 2783/2963 2782/2961 2855/3043 +f 2785/2966 2853/3041 2856/3044 +f 2780/2958 2856/3044 2781/2960 +f 2857/3045 2855/3043 2782/2961 +f 2859/3046 2782/2961 2781/2960 +f 2861/3047 2842/3027 2860/3048 +f 2860/3048 2727/3028 2726/2899 +f 2862/3049 2726/2899 2732/2906 +f 2863/3050 2860/3048 2862/3049 +f 2861/3047 2858/3051 2857/3045 +f 2843/3029 2857/3045 2859/3046 +f 2856/3044 2854/3042 2864/3052 +f 2781/2960 2864/3052 2859/3046 +f 2837/3022 2859/3046 2864/3052 +f 2838/3036 2864/3052 2854/3042 +f 2852/3040 2834/3020 2849/3037 +f 2854/3042 2849/3037 2838/3036 +f 2865/3053 2818/3054 2835/3021 +f 2851/3039 2835/3021 2834/3020 +f 2813/2995 2815/3000 2865/3053 +f 2814/2996 2865/3053 2851/3039 +f 2866/3055 2814/2996 2850/3038 +f 2788/2965 2850/3038 2853/3041 +f 2866/3055 2787/3056 2867/3057 +f 2812/2994 2867/3057 2775/2990 +f 1892/2003 1890/1740 1891/2175 +f 2720/2895 2846/3033 2870/3058 +f 298/1407 256/148 257/1193 +f 2872/3059 2713/2892 2820/3002 +f 3794/3060 2735/3007 2734/2907 +f 2729/2903 2874/3061 2873/3062 +f 2846/3033 2715/3063 2870/3058 +f 2876/3064 2878/3065 2875/3066 +f 2733/2909 2878/3065 2734/2907 +f 3806/3067 2879/3068 2880/3069 +f 3806/3067 2872/3059 3794/3060 +f 2882/3070 2884/3071 2881/3072 +f 2881/3072 2715/3063 2714/2890 +f 2880/3069 2714/2890 2872/3059 +f 2879/3068 2881/3072 2880/3069 +f 2885/3073 2887/3074 2888/3075 +f 2888/3075 2890/3076 2885/3073 +f 2885/3073 2892/3077 2891/3078 +f 2893/3079 2885/3073 2891/3078 +f 1942/2002 2532/3080 2894/3081 +f 2895/3082 2720/2895 2870/3058 +f 2884/3071 2870/3058 2715/3063 +f 2883/3083 2895/3082 2884/3071 +f 2896/3084 2892/3077 2895/3082 +f 2889/3085 2717/2893 2890/3076 +f 2890/3076 2720/2895 2892/3077 +f 2897/3086 2891/3078 2896/3084 +f 2898/3087 2896/3084 2883/3083 +f 2899/3088 2883/3083 2882/3070 +f 2900/3089 2882/3070 2879/3068 +f 3797/3090 2879/3068 3807/3091 +f 2876/3064 2901/3092 2877/3093 +f 2901/3092 2904/3094 2903/3095 +f 1785/1825 1835/2845 1834/1879 +f 2905/3096 2899/3088 2900/3089 +f 2907/3097 2897/3086 2898/3087 +f 3797/3090 2903/3095 3799/3098 +f 632/2493 577/3099 633/2170 +f 3532/1639 2867/3057 2787/3056 +f 434/265 550/337 350/196 +f 2768/2951 3640/3100 3643/3101 +f 2910/3102 1623/1690 2249/2327 +f 2241/3103 2758/3104 2242/2309 +f 3640/3100 2913/3105 3652/3106 +f 1623/1690 2757/2930 1620/1688 +f 3587/2310 2757/2930 3652/3106 +f 2229/2299 1380/1466 2250/2329 +f 2765/2944 1629/1697 2914/3107 +f 2915/3108 1629/1697 2912/3109 +f 3642/3110 2914/3107 2915/3108 +f 3641/3111 2765/2944 2914/3107 +f 2919/3112 3642/3110 3613/3113 +f 2918/3114 2912/3109 2241/3103 +f 3614/3115 2241/3103 3586/2308 +f 3618/3116 3613/3113 3670/3117 +f 3541/1663 2180/2218 3496/1664 +f 3521/1682 2188/3118 3506/1735 +f 3705/3119 714/376 3851/2093 +f 2925/3120 2917/3121 2924/3122 +f 2924/3122 2916/3123 2919/3112 +f 3610/3124 2919/3112 3618/3116 +f 3611/3125 2924/3122 3610/3124 +f 2926/3126 2766/2946 2925/3120 +f 3658/3127 2925/3120 3611/3125 +f 1715/1533 1705/1744 1710/1842 +f 785/1500 498/2230 796/1498 +f 1387/719 565/1310 1791/2206 +f 374/2231 1899/3128 560/2229 +f 3573/2056 2206/2269 3556/3129 +f 2929/3130 2886/3131 2893/3079 +f 2932/3132 2934/3133 2931/3134 +f 1905/1748 1908/1813 1907/1832 +f 2930/3135 2936/3136 2937/3137 +f 2938/3138 2930/3135 2929/3130 +f 1493/1075 517/1897 518/310 +f 1603/3139 1586/1389 1602/1399 +f 2939/3140 2929/3130 2908/3141 +f 2940/3142 2908/3141 2907/3097 +f 3665/3143 1600/1395 3661/2103 +f 2886/3131 2937/3137 2887/3074 +f 2906/3144 2898/3087 2899/3088 +f 2908/3141 2893/3079 2897/3086 +f 2942/3145 2933/3146 2941/3147 +f 2941/3147 2937/3137 2936/3136 +f 3501/1687 2936/3136 3513/1695 +f 3498/1686 2941/3147 3501/1687 +f 2943/3148 2710/3149 2873/3062 +f 2943/3148 2874/3061 2889/3085 +f 2945/3150 2889/3085 2888/3075 +f 2946/3151 2943/3148 2945/3150 +f 2932/3132 2888/3075 2887/3074 +f 2931/3134 2945/3150 2932/3132 +f 727/375 1929/1777 728/373 +f 2903/3095 2947/3152 2948/3153 +f 545/331 42/30 262/335 +f 1480/969 533/322 251/323 +f 2907/3097 2949/3154 2940/3142 +f 2906/3144 2950/3155 2949/3154 +f 1558/1286 215/2820 202/135 +f 3799/3098 2948/3153 3801/3156 +f 2054/2008 1644/1420 2118/885 +f 3705/3119 2203/2267 3573/2056 +f 1828/1677 1849/986 1827/1672 +f 2953/3157 2955/3158 2952/3159 +f 2952/3159 2957/3160 2956/3161 +f 2956/3161 2258/2345 2952/3159 +f 2953/3157 2258/2345 2257/2342 +f 2454/2535 2474/2554 2496/2580 +f 3747/3162 2959/3163 3766/3164 +f 3747/3162 2131/2177 3737/3165 +f 2961/3166 2963/3167 2958/3168 +f 2958/3168 2964/3169 2959/3163 +f 222/307 2485/2565 2965/3170 +f 2477/2557 2966/3171 2608/2707 +f 2631/2841 2006/2070 1859/1909 +f 2384/2467 2496/2580 2497/3172 +f 2967/3173 2959/3163 2964/3169 +f 2968/3174 2970/3175 2969/3176 +f 3759/3177 2971/3178 3784/3179 +f 3784/3179 2960/3180 3766/3164 +f 2972/3181 2033/2098 2032/2097 +f 2007/2461 2973/3182 2008/2071 +f 3560/3183 2134/2179 2133/2178 +f 1836/3184 1832/1711 1831/1710 +f 3506/1735 2090/2143 3551/1899 +f 955/1295 959/517 956/3185 +f 672/3186 674/3187 673/2296 +f 633/2170 713/1335 632/2493 +f 2024/1881 733/398 803/1489 +f 2025/1882 803/1489 798/1873 +f 425/1057 1550/1285 388/229 +f 2975/3188 888/1919 887/937 +f 1319/1375 1157/1196 2694/2861 +f 1212/1791 2977/3189 1213/1242 +f 889/938 2228/3190 2227/2297 +f 1978/1837 1975/1835 1979/1853 +f 1138/2830 1329/1414 915/956 +f 751/841 2979/3191 2978/3192 +f 1619/1949 1625/1485 1624/1476 +f 2252/2336 1728/3193 1727/1790 +f 1157/1196 845/907 1216/1246 +f 2706/3194 1938/1995 2617/2720 +f 2980/3195 2254/3196 2981/3197 +f 2982/3198 887/937 2630/2737 +f 802/875 711/809 2679/2828 +f 678/3199 2029/2081 2028/2083 +f 527/697 690/905 528/695 +f 1839/2741 2629/2735 1840/1885 +f 2983/3200 602/730 1914/1973 +f 2983/3200 1913/1972 1331/2759 +f 2985/3201 1331/2759 1054/1084 +f 2986/3202 2983/3200 2985/3201 +f 2987/3203 2635/2743 2634/2742 +f 2985/3201 2639/2746 2986/3202 +f 2990/3204 1318/1374 2989/3205 +f 2991/3206 2993/3207 2994/3208 +f 2995/3209 2994/3208 2996/3210 +f 1106/1135 2991/3206 2995/3209 +f 1058/2752 2992/3211 2991/3206 +f 1415/765 1365/762 1367/703 +f 1317/1373 2217/2284 2997/3212 +f 1932/1990 2255/2335 2980/3195 +f 926/965 2999/3213 664/778 +f 3001/3214 2992/3211 3000/3215 +f 3000/3215 1057/1085 2978/3192 +f 2999/3213 2978/3192 2979/3191 +f 2998/3216 3000/3215 2999/3213 +f 3002/3217 2209/2275 3003/3218 +f 3002/3217 2996/3210 2994/3208 +f 2993/3207 3002/3217 2994/3208 +f 3001/3214 2998/3216 3002/3217 +f 1103/1136 2995/3209 3004/3219 +f 3004/3219 2996/3210 3003/3218 +f 2209/2275 3004/3219 3003/3218 +f 588/722 3004/3219 2210/2274 +f 295/1840 433/1347 381/220 +f 2638/2747 2988/3220 3005/3221 +f 3005/3221 1056/1083 1930/1988 +f 1305/1366 1930/1988 1306/1364 +f 1276/1376 3005/3221 1305/1366 +f 1987/1851 1993/1858 1989/1855 +f 2633/2823 1761/1810 2645/2761 +f 1423/797 1417/783 1419/785 +f 3561/1769 739/482 703/390 +f 2671/2807 2704/2879 2705/2878 +f 1473/2115 2035/2161 1472/2215 +f 922/2351 2694/2861 895/961 +f 1401/2337 1378/715 1384/1550 +f 556/1166 1884/2082 1037/1069 +f 842/1433 3825/601 3836/603 +f 1854/1902 2617/2720 1010/1044 +f 933/973 1081/2783 1075/1105 +f 1440/836 1436/963 1437/831 +f 529/2725 719/815 530/696 +f 1031/1063 1027/1061 1028/1064 +f 777/854 912/2728 1871/1921 +f 2645/2761 1204/1235 2633/2823 +f 441/2805 1538/1348 1537/1149 +f 790/1110 587/1197 586/721 +f 2065/1969 1610/1404 1607/1406 +f 1860/2837 990/2760 902/1845 +f 60/40 87/78 59/62 +f 1424/804 1948/1932 1947/2080 +f 332/3222 1507/1038 331/1091 +f 261/1341 138/1126 135/1847 +f 187/123 449/273 507/1359 +f 1719/1536 1726/1583 1725/1571 +f 1931/1987 1059/1086 3006/3223 +f 3006/3223 1105/2753 1104/1134 +f 1287/1343 1104/1134 587/1197 +f 1286/2354 3006/3223 1287/1343 +f 3007/3224 3009/3225 3010/3226 +f 3009/3225 653/869 795/868 +f 618/2279 26/2732 25/25 +f 2029/2081 1210/2836 1209/1239 +f 957/998 600/729 2228/3190 +f 1049/1078 2228/3190 600/729 +f 2631/2841 3011/3227 2561/2655 +f 890/939 2227/2297 3011/3227 +f 3011/3227 2145/2187 3012/3228 +f 2561/2655 3012/3228 2375/2459 +f 1026/1232 609/770 608/734 +f 2706/3194 1961/2036 1964/2035 +f 3013/3229 2485/2565 3014/3230 +f 2148/2189 1267/1301 3015/3231 +f 3015/3231 1266/1303 1311/1368 +f 2997/3212 1101/1131 1317/1373 +f 642/2787 1773/1817 1193/1224 +f 186/121 461/285 511/311 +f 1041/1155 1320/1377 901/945 +f 1121/1154 3017/3232 1122/1152 +f 2984/3233 2987/3203 3018/3234 +f 3018/3234 2634/2742 2619/2721 +f 1048/1079 3018/3234 2619/2721 +f 603/731 2984/3233 3018/3234 +f 2679/2828 801/876 802/875 +f 2062/1958 2074/1981 2061/1956 +f 2061/1956 2076/1983 2064/1962 +f 414/239 1545/1200 1965/1818 +f 3016/3235 2683/2833 3019/3236 +f 3019/3236 2704/2879 2670/3237 +f 3020/3238 2670/3237 720/818 +f 3007/3224 3019/3236 3020/3238 +f 1871/1921 1091/1122 777/854 +f 3021/3239 1122/1152 3017/3232 +f 3022/3240 3017/3232 3010/3226 +f 1314/2859 3022/3240 1315/1371 +f 1784/1827 3021/3239 1314/2859 +f 1316/1372 1763/3241 611/738 +f 3023/3242 3010/3226 3009/3225 +f 3023/3242 795/868 1092/1123 +f 1763/3241 1092/1123 1125/1812 +f 1315/1371 3023/3242 1763/3241 +f 3016/3235 3010/3226 3017/3232 +f 752/2816 2978/3192 1057/1085 +f 2986/3202 2637/2744 2987/3203 +f 3024/3243 653/869 3008/3244 +f 3020/3238 3008/3244 3007/3224 +f 720/818 3024/3243 3020/3238 +f 723/817 654/768 3024/3243 +f 2681/2831 1268/1302 3025/3245 +f 2671/2807 1268/1302 721/816 +f 2705/2878 3025/3245 2671/2807 +f 1313/1370 3027/3246 1311/1368 +f 3026/3247 3029/3248 3027/3246 +f 3027/3246 3030/3249 3031/3250 +f 1311/1368 3031/3250 3015/3231 +f 2012/2802 731/1875 2023/1877 +f 917/1106 974/1012 977/1014 +f 743/1237 856/915 744/838 +f 1888/1898 2030/1895 1425/800 +f 2682/3251 2684/2832 3032/3252 +f 3032/3252 1124/1153 2989/3205 +f 1100/2789 2989/3205 1318/1374 +f 1099/1130 3032/3252 1100/2789 +f 1123/3253 2989/3205 1124/1153 +f 3033/3254 1159/1880 1158/1194 +f 2990/3204 1158/1194 1319/1375 +f 1123/3253 3033/3254 2990/3204 +f 1786/2724 1834/1879 3033/3254 +f 1054/1084 2988/3220 2985/3201 +f 2979/3191 664/778 2999/3213 +f 2623/2727 2663/2791 2643/2755 +f 1095/1127 688/796 687/795 +f 3034/3255 2261/3256 2956/3161 +f 3036/3257 2956/3161 2957/3160 +f 3037/3258 3034/3255 3036/3257 +f 3038/3259 3035/3260 3034/3255 +f 3040/3261 3026/3247 1313/1370 +f 2263/2347 3041/3262 3040/3261 +f 3042/3263 3043/3264 3041/3262 +f 3041/3262 3028/3265 3026/3247 +f 3044/3266 3043/3264 3039/3267 +f 3046/3268 3045/3269 3044/3266 +f 3047/3270 3049/3271 3045/3269 +f 3043/3264 3049/3271 3028/3265 +f 3038/3259 3051/3272 3050/3273 +f 3050/3273 3053/3274 3052/3275 +f 3052/3275 3044/3266 3050/3273 +f 3039/3267 3050/3273 3044/3266 +f 3054/3276 3039/3267 3042/3263 +f 3042/3263 2262/2346 3054/3276 +f 2261/3256 3054/3276 2262/2346 +f 2216/2348 2997/3212 2217/2284 +f 2212/2349 2667/2803 1936/1993 +f 3056/3277 2209/2275 3055/3278 +f 934/974 1961/2036 1854/1902 +f 2013/2840 3757/2040 567/1319 +f 3058/3279 3060/3280 3061/3281 +f 3059/3282 3063/3283 3060/3280 +f 3060/3280 2213/2281 1963/2350 +f 3061/3281 1963/2350 1962/2034 +f 3063/3283 2214/2282 2213/2281 +f 3062/3284 3064/3285 3063/3283 +f 3065/3286 2260/2344 3064/3285 +f 3064/3285 2215/2283 2214/2282 +f 3066/3287 3031/3250 3030/3249 +f 2608/2707 3067/3288 3066/3287 +f 2966/3171 3068/3289 3067/3288 +f 3031/3250 3068/3289 3015/3231 +f 2455/2534 2461/2539 2474/2554 +f 2476/3290 2226/2294 2477/2557 +f 3551/1899 2089/2145 3525/1203 +f 3526/2952 3769/3291 3786/3292 +f 433/1347 355/201 381/220 +f 2115/2048 1419/785 2123/2061 +f 3509/993 2922/3293 3527/2826 +f 3769/3291 2131/2177 2134/2179 +f 3069/3294 2167/3295 2166/2208 +f 2971/3178 2928/3296 2960/3180 +f 2961/3166 2773/2953 2962/3297 +f 3070/3298 2478/2558 2483/2563 +f 2610/3299 3071/3300 2954/3301 +f 2502/2693 2374/2457 2386/2691 +f 3072/3302 2595/2690 3073/3303 +f 2964/3169 3052/3275 2967/3173 +f 3048/3304 3074/3305 3049/3271 +f 2593/3306 3075/3307 3074/3305 +f 3075/3307 3029/3248 3074/3305 +f 3074/3305 3028/3265 3049/3271 +f 3072/3302 3075/3307 2592/2688 +f 3076/3308 2482/3309 2481/2562 +f 2481/2562 3066/3287 3076/3308 +f 3076/3308 3030/3249 3075/3307 +f 2967/3173 3053/3274 2970/3175 +f 2482/3309 3073/3303 2483/2563 +f 479/364 493/297 494/299 +f 706/1340 3873/2167 709/1338 +f 3561/1769 2221/2291 3517/483 +f 2013/2840 3754/812 3755/3310 +f 3533/1930 2016/2075 3529/1467 +f 3562/1933 2222/3311 2974/3312 +f 1171/587 1155/573 1176/585 +f 2586/3313 2565/2714 2587/3314 +f 2982/3198 113/154 2484/2564 +f 2562/3315 3077/3316 3068/3289 +f 2376/2656 3012/3228 3077/3316 +f 3012/3228 2148/2189 3077/3316 +f 3077/3316 3015/3231 3068/3289 +f 2963/3167 3046/3268 2964/3169 +f 3051/3272 2970/3175 3053/3274 +f 1683/1805 491/424 775/423 +f 3850/2105 1447/848 3860/849 +f 1986/2053 3734/3317 3768/3318 +f 3079/3319 2546/2637 2575/2668 +f 3081/3320 2575/2668 2576/2667 +f 1983/2054 3768/3318 3765/3321 +f 3082/3322 2319/2638 2545/2636 +f 3080/3323 2545/2636 2546/2637 +f 3734/3317 3082/3322 3080/3323 +f 3732/3324 2323/2407 3082/3322 +f 1309/637 1888/1898 748/1906 +f 3073/3303 2772/3325 2771/3326 +f 2760/2931 3084/3327 2762/2934 +f 2760/2931 2088/2142 3083/3328 +f 3085/3329 3086/3330 3087/3331 +f 3089/3332 3090/3333 3088/3334 +f 3089/3332 3084/3327 3085/3329 +f 3092/3335 3090/3333 3091/3336 +f 3093/3337 2744/2915 2743/2917 +f 2751/2922 2748/2919 3093/3337 +f 3093/3337 2752/2924 2751/2922 +f 2742/2914 3093/3337 3094/3338 +f 3094/3338 2743/2917 3095/3339 +f 3096/3340 3098/3341 3087/3331 +f 3099/3342 3087/3331 3086/3330 +f 3097/3343 3095/3339 3098/3341 +f 3100/3344 2746/2916 3101/3345 +f 3101/3345 3102/3346 3100/3344 +f 2764/3347 2927/3348 3101/3345 +f 2745/2918 3101/3345 2746/2916 +f 3102/3346 3091/3336 3100/3344 +f 3100/3344 3095/3339 2743/2917 +f 2869/3349 3592/3350 3634/3351 +f 3102/3346 3104/3352 3092/3335 +f 3658/3127 2869/3349 3634/3351 +f 2927/3348 3103/3353 3102/3346 +f 3098/3341 3091/3336 3090/3333 +f 3087/3331 3090/3333 3085/3329 +f 2088/2142 3105/3354 3083/3328 +f 3083/3328 3086/3330 3084/3327 +f 2483/2563 2771/3326 3070/3298 +f 2401/2885 2185/2223 2321/3355 +f 3099/3342 3105/3354 3106/3356 +f 2125/2172 3106/3356 3105/3354 +f 3108/3357 2742/2914 3107/3358 +f 3107/3358 3094/3338 3097/3343 +f 3110/3359 3107/3358 3109/3360 +f 3109/3360 3097/3343 3096/3340 +f 3111/3361 3096/3340 3099/3342 +f 3112/3362 3109/3360 3111/3361 +f 3113/3363 2737/2912 3108/3357 +f 3115/3364 2722/3365 3114/3366 +f 3117/3367 3114/3366 3116/3368 +f 3119/3369 3116/3368 3118/3370 +f 3118/3370 3120/3371 3121/3372 +f 3116/3368 3113/3363 3120/3371 +f 3122/3373 3111/3361 3123/3374 +f 3122/3373 1755/2271 1754/1801 +f 3124/3375 3126/3376 3127/3377 +f 3125/3378 3121/3372 3126/3376 +f 3128/3379 3118/3370 3125/3378 +f 3129/3380 3125/3378 3124/3375 +f 2020/2078 3128/3379 3129/3380 +f 3853/671 1346/664 1352/878 +f 1065/2174 1047/537 1066/641 +f 1814/1665 3830/1445 1649/1444 +f 2021/2079 3129/3380 3130/3381 +f 3111/3361 3106/3356 3123/3374 +f 3123/3374 2207/2295 1755/2271 +f 2377/2460 2224/3382 2973/3182 +f 3014/3230 113/154 112/153 +f 1756/1802 3782/3383 3767/3384 +f 3131/3385 2530/2622 2528/2620 +f 3133/3386 2528/2620 2529/2619 +f 1753/1803 3767/3384 3775/3387 +f 3134/3388 3127/3377 3135/3389 +f 2327/2410 2314/2506 2313/2399 +f 2436/2515 2317/2404 2349/2624 +f 2018/2077 3711/3390 3712/3391 +f 2534/2625 3137/3392 2535/2626 +f 2320/2403 3136/3393 2534/2625 +f 2185/2223 3712/3391 3771/3394 +f 3138/3395 2542/2633 2539/2630 +f 3134/3388 3744/3396 3745/3397 +f 3785/3398 3134/3388 3745/3397 +f 3140/3399 2539/2630 2538/2629 +f 2535/2626 3140/3399 2538/2629 +f 3711/3390 3130/3381 3785/3398 +f 2558/2652 3132/3400 3141/3401 +f 3782/3383 3141/3401 3132/3400 +f 2560/2654 3141/3401 3142/3402 +f 3760/3403 3142/3402 3141/3401 +f 958/999 2976/3404 2981/3197 +f 1933/3405 2980/3195 3143/3406 +f 2399/3407 2418/2490 2416/2489 +f 2348/2431 2343/2427 2432/2510 +f 2322/2406 2319/2638 2323/2407 +f 2718/3032 2874/3061 2730/2902 +f 2709/2905 2873/3062 2710/3149 +f 2894/3081 1361/1440 1942/2002 +f 346/384 176/177 347/1266 +f 1876/1724 1882/1920 1881/1936 +f 2802/3408 3607/3409 3592/3350 +f 3104/3352 3088/3334 3092/3335 +f 2974/3312 3649/2936 3603/3410 +f 3089/3332 2761/2933 2762/2934 +f 3088/3334 3145/3411 3089/3332 +f 2803/3412 3603/3410 3607/3409 +f 3870/2095 692/379 628/2154 +f 2123/2061 1418/780 2126/2259 +f 1365/762 1368/688 1367/703 +f 1581/1380 1782/1696 1783/1614 +f 3569/1337 2129/2176 3520/2127 +f 3555/2128 2002/2245 3521/1682 +f 3146/3413 2551/2644 3147/3414 +f 3751/3415 3147/3414 3750/3416 +f 3742/3417 3146/3413 3751/3415 +f 3149/3418 2585/2677 3146/3413 +f 3196/3419 3197/3420 3195/2233 +f 3604/3421 2191/2249 3623/2937 +f 3142/3402 2556/2647 2560/2654 +f 3777/3422 3150/3423 3142/3402 +f 3646/3424 2550/2641 3593/2650 +f 3150/3423 2555/2648 2556/2647 +f 3778/3425 3151/3426 3150/3423 +f 3788/3427 3152/3428 3646/3424 +f 3153/3429 2551/2644 2549/2642 +f 3152/3428 2549/2642 2550/2641 +f 3781/3430 3153/3429 3152/3428 +f 3776/3431 3147/3414 3153/3429 +f 3148/3432 2206/2269 2205/2268 +f 2183/2886 2321/3355 2184/2220 +f 2132/3433 2928/3296 2133/2178 +f 3684/3434 2204/3435 2203/2267 +f 3719/3436 2197/2255 3721/3437 +f 2594/2689 2773/2953 2951/3438 +f 2595/2690 2951/3438 2772/3325 +f 1870/1713 799/1716 1869/1714 +f 3713/3439 2178/3440 3706/3441 +f 3706/3441 3736/3442 3790/3443 +f 336/510 3774/3444 3722/3445 +f 339/512 3154/3446 336/510 +f 3789/3447 3149/3418 3742/3417 +f 3683/3448 2584/2676 3149/3418 +f 2963/3167 3048/3304 3047/3270 +f 3722/3445 337/3449 336/510 +f 339/512 2566/2658 2586/3313 +f 3156/3450 2168/2209 2167/3295 +f 2610/3299 2167/3295 2611/2709 +f 2954/3301 3156/3450 2610/3299 +f 3157/3451 2953/3157 2257/2342 +f 3707/3452 2169/2210 3709/3453 +f 2567/2660 3159/3454 2568/2662 +f 337/3449 2567/2660 338/511 +f 3730/3455 2166/2208 3707/3452 +f 3764/3456 1983/2054 3765/3321 +f 2579/2671 3081/3320 2576/2667 +f 2568/2662 3160/3457 2579/2671 +f 3709/3453 2190/2247 3764/3456 +f 3065/3286 3162/3458 3161/3459 +f 3161/3459 1984/2052 2189/2246 +f 3157/3451 2189/2246 2168/2209 +f 2257/2342 3161/3459 3157/3451 +f 734/400 1922/3460 806/442 +f 3059/3282 2708/2887 3163/3461 +f 3163/3461 2182/2219 1985/2224 +f 3162/3458 1985/2224 1984/2052 +f 3062/3284 3163/3461 3162/3458 +f 3203/3462 3205/3463 3204/3464 +f 2122/2060 1448/852 1447/848 +f 3817/886 1685/1160 3812/3465 +f 139/95 444/2847 69/52 +f 1570/1745 566/1320 533/322 +f 2957/3160 3165/3466 3036/3257 +f 2955/3158 3164/3467 2957/3160 +f 1912/1758 341/499 1973/1829 +f 3130/3381 3124/3375 3134/3388 +f 1548/2355 319/1861 1547/2838 +f 1949/2204 1327/1729 1887/3468 +f 2969/3176 3078/3469 3167/3470 +f 3167/3470 3169/3471 3168/3472 +f 3724/3473 2017/2076 3720/3474 +f 3720/3474 2033/2098 3759/3177 +f 3168/3472 3165/3466 3170/3475 +f 3170/3475 3164/3467 3171/3476 +f 3779/3477 3171/3476 3756/3478 +f 3724/3473 3170/3475 3779/3477 +f 3171/3476 3166/3479 3172/3480 +f 3172/3480 3071/3300 3173/3481 +f 3752/3482 3173/3481 3749/3483 +f 3756/3478 3172/3480 3752/3482 +f 3169/3471 3036/3257 3165/3466 +f 2593/3306 2962/3297 2594/2689 +f 3173/3481 2609/2708 3174/3484 +f 3174/3484 2199/2710 2198/2253 +f 3783/3485 2198/2253 3719/3436 +f 3749/3483 3174/3484 3783/3485 +f 2190/2247 1984/2052 1983/2054 +f 2196/2252 2767/3486 3148/3432 +f 2119/2059 1414/766 1448/852 +f 1464/947 1466/891 1465/894 +f 2199/2710 3069/3294 2200/2254 +f 2457/2713 2587/3314 2565/2714 +f 2226/2294 2562/3315 2966/3171 +f 481/404 1627/1409 1639/1435 +f 333/1261 235/1052 236/341 +f 3078/3469 3037/3258 3169/3471 +f 3040/3261 2997/3212 2263/2347 +f 2219/2287 1933/3405 2220/2286 +f 2385/2468 2497/3172 2503/2692 +f 221/2566 3175/3487 2484/2564 +f 2220/2286 3143/3406 3175/3487 +f 3175/3487 2976/3404 2975/3188 +f 2484/2564 2975/3188 2982/3198 +f 2954/3301 3166/3479 2955/3158 +f 2630/2737 110/152 2982/3198 +f 1547/2838 1952/1971 1955/3488 +f 2091/3489 2188/3118 2187/2244 +f 3733/3490 2178/3440 2177/2216 +f 1918/1760 1927/1773 1926/1767 +f 3563/3491 111/3492 3511/3493 +f 955/1295 960/3494 954/490 +f 2037/3495 112/153 3563/3491 +f 3143/3406 2981/3197 2976/3404 +f 2218/2285 2214/2282 2215/2283 +f 672/3186 3787/1204 3541/1663 +f 2152/3496 3014/3230 2037/3495 +f 1147/586 1143/1862 1145/562 +f 2248/2328 1409/1480 2245/2323 +f 1615/1721 1635/1703 1616/1684 +f 1410/1481 2246/2325 2247/2324 +f 3241/3497 3242/3498 3240/2319 +f 3177/3499 1383/1465 1382/2307 +f 2238/2306 3176/3500 1382/2307 +f 3176/3500 2230/2301 3178/3501 +f 1410/1481 3179/3502 1407/1479 +f 3179/3502 2243/2313 1407/1479 +f 1633/1700 2800/2978 2249/2327 +f 1621/2326 2758/3104 2912/3109 +f 1622/1689 2245/2323 1634/1701 +f 2236/2304 3181/3503 3182/3504 +f 2237/2305 3182/3504 3183/3505 +f 2250/2329 2233/2302 2229/2299 +f 2912/3109 1628/1699 1621/2326 +f 135/1847 1564/1353 1563/1342 +f 1567/1437 512/309 1565/1354 +f 1085/1118 2653/2775 2685/2843 +f 3184/3506 868/925 3056/3277 +f 869/926 3057/3507 934/974 +f 1266/1303 1098/1132 1312/1369 +f 1267/1301 722/2722 721/816 +f 937/976 2646/2764 936/975 +f 2146/2298 722/2722 2147/2188 +f 2211/2280 921/960 2667/2803 +f 1957/2870 507/1359 449/273 +f 1313/1370 2264/2352 3040/3261 +f 1496/995 1493/1075 1492/1004 +f 2681/2831 2705/2878 2682/3251 +f 2887/3074 2933/3146 2932/3132 +f 1687/3508 1659/1576 1694/1503 +f 145/1011 1498/1223 81/59 +f 1317/1373 1319/1375 2694/2861 +f 37/19 418/1028 71/1955 +f 1935/1992 3185/3509 2977/3189 +f 104/2810 301/438 103/75 +f 2101/2018 2083/1998 2084/1997 +f 543/1800 360/1040 207/131 +f 1934/1991 3186/3510 3185/3509 +f 234/1055 212/173 237/279 +f 3845/2090 1822/1666 1824/1670 +f 1976/3511 1874/1715 1974/1834 +f 223/308 2219/2287 220/306 +f 3655/3512 3605/3513 3612/3514 +f 468/287 773/421 2165/3515 +f 1935/1992 1728/3193 1932/1990 +f 3240/2319 3226/2290 3235/2320 +f 3261/3516 3250/3517 3260/3518 +f 3215/2241 3244/3519 3264/3520 +f 3265/3521 3215/2241 3264/3520 +f 3267/3522 3198/2236 3196/3419 +f 3205/3463 3269/3523 3255/3524 +f 3214/3525 3265/3521 3270/3526 +f 3271/3527 3214/3525 3192/3528 +f 3251/3529 3239/3530 3236/2315 +f 3222/3531 3266/3532 3272/3533 +f 3232/2318 3234/2322 3233/2321 +f 3216/2243 3243/3534 3244/3519 +f 3196/3419 3268/3535 3267/3522 +f 3244/3519 3254/3536 3255/3524 +f 3238/2316 3247/2950 3246/2949 +f 3264/3520 3255/3524 3269/3523 +f 3253/3537 3216/2243 3217/2242 +f 3211/2238 3221/2265 3265/3521 +f 3257/3538 3218/2266 3256/3539 +f 3258/3540 3272/3533 3259/3541 +f 3274/3542 3280/3543 3273/3544 +f 3228/3545 3262/3546 3263/3547 +f 3275/3548 3281/3549 3280/3543 +f 3226/2290 3273/3544 3280/3543 +f 3236/2315 3260/3518 3251/3529 +f 3281/3549 3236/2315 3237/2317 +f 3287/1578 3192/3528 3189/3550 +f 3235/2320 3280/3543 3234/2322 +f 3234/2322 3281/3549 3237/2317 +f 3288/3551 3259/3541 3272/3533 +f 3260/3518 3276/3552 3251/3529 +f 3289/3553 3278/3554 3284/3555 +f 3210/3556 3224/2940 3283/3557 +f 3199/2237 3201/3558 3200/2235 +f 3228/3545 3230/3559 3229/3560 +f 3290/3561 3285/3562 3286/3563 +f 3282/1580 3271/3527 3287/1578 +f 3260/3518 3262/3546 3261/3516 +f 3243/3534 3268/3535 3254/3536 +f 3267/3522 3241/3497 3232/2318 +f 3194/2234 3208/3564 3193/2232 +f 3191/3565 3269/3523 3206/3566 +f 3270/3526 3192/3528 3214/3525 +f 3275/3548 3230/3559 3231/3567 +f 3264/3520 3270/3526 3265/3521 +f 3252/3568 3276/3552 3291/3569 +f 3248/3570 3291/3569 3223/2939 +f 3213/2239 3279/3571 3277/3572 +f 3272/3533 3289/3553 3288/3551 +f 3277/3572 3212/2240 3213/2239 +f 3228/3545 3281/3549 3231/3567 +f 3220/3573 3253/3537 3219/3574 +f 3219/3574 3217/2242 3257/3538 +f 3256/3539 3222/3531 3258/3540 +f 3247/2950 3252/3568 3248/3570 +f 3245/2332 3232/2318 3233/2321 +f 3266/3532 3212/2240 3289/3553 +f 3208/3564 3255/3524 3254/3536 +f 3288/3551 3284/3555 3285/3562 +f 3204/3464 3208/3564 3207/3575 +f 560/2229 3187/3576 559/1455 +f 3209/2938 3201/3558 3202/2948 +f 3189/3550 3191/3565 3190/3577 +f 448/1848 807/450 447/325 +f 647/367 122/980 270/162 +f 1833/1679 1780/1747 1832/1711 +f 510/985 508/1360 507/1359 +f 3210/3556 1887/3468 3201/3558 +f 1887/3468 3200/2235 3201/3558 +f 15/8 3197/3420 3200/2235 +f 16/2228 3195/2233 3197/3420 +f 3195/2233 3187/3576 3194/2234 +f 3194/2234 1899/3128 3207/3575 +f 3207/3575 375/3578 3204/3464 +f 387/227 126/85 127/99 +f 3189/3550 1760/2207 3287/1578 +f 3577/3579 757/406 758/1497 +f 763/2211 2172/2106 2175/3580 +f 3583/3581 3580/3582 3581/3583 +f 3581/3583 3579/3584 3578/1774 +f 3594/3585 3596/3586 3595/3587 +f 3400/3588 3602/3589 3401/3590 +f 3626/3591 3625/3592 3627/3593 +f 3631/3594 3599/3595 3601/3596 +f 3636/3597 3595/3587 3596/3586 +f 3647/3598 3588/3599 3645/3600 +f 3650/3601 3584/3602 3647/3598 +f 3653/3603 3580/3582 3651/3604 +f 3653/3603 3654/1775 3579/3584 +f 1801/1633 3656/3605 3655/3512 +f 3651/3604 3582/3606 3650/3601 +f 3644/3607 3595/3587 3637/3608 +f 1767/806 3659/3609 3660/807 +f 1597/1397 3661/2103 1600/1395 +f 1708/1524 3665/3143 3664/2256 +f 3671/2258 3667/3610 3666/3611 +f 3666/3611 3673/3612 3672/3613 +f 1588/1526 3664/2256 1587/1388 +f 2080/2338 3666/3611 2082/2006 +f 2082/2006 3672/3613 2084/1997 +f 3674/3614 3635/3615 3631/3594 +f 3677/3616 3636/3597 3635/3615 +f 3682/3617 3645/3600 3681/3618 +f 3692/3619 3650/3601 3686/3620 +f 3681/3618 3644/3607 3680/3621 +f 786/1499 3692/3619 785/1500 +f 3694/3622 3651/3604 3692/3619 +f 785/1500 3686/3620 497/3623 +f 497/3623 3682/3617 650/372 +f 650/372 3681/3618 564/370 +f 3696/3624 3681/3618 3680/3621 +f 3405/3625 3605/3513 3402/3626 +f 3702/3627 3678/3628 3677/3616 +f 3699/3629 3679/3630 3678/3628 +f 3645/3600 3638/3631 3644/3607 +f 1799/1635 3659/3609 1768/1596 +f 3708/3632 3458/3633 3460/3634 +f 3462/3635 3708/3632 3460/3634 +f 3714/577 564/370 3696/3624 +f 3615/3636 3602/3589 3600/3637 +f 3710/3638 3696/3624 3698/3639 +f 3635/3615 3596/3586 3599/3595 +f 3629/3640 3627/3593 3630/3641 +f 3597/3642 3599/3595 3596/3586 +f 3630/3641 3715/3643 3629/3640 +f 3627/3593 3597/3642 3594/3585 +f 3717/3644 3723/3645 3718/3646 +f 3715/3643 3660/807 3629/3640 +f 3716/3647 3725/2102 3715/3643 +f 3668/2257 3661/2103 3725/2102 +f 3668/2257 3717/3644 3667/3610 +f 3667/3610 3718/3646 3673/3612 +f 3727/3648 3673/3612 3718/3646 +f 3728/3649 3672/3613 3673/3612 +f 3726/3650 3718/3646 3723/3645 +f 1576/2019 2084/1997 3672/3613 +f 3727/3648 3731/3651 3729/3652 +f 3203/3462 1387/719 3190/3577 +f 3190/3577 1791/2206 3189/3550 +f 3283/3557 1949/2204 3210/3556 +f 317/183 259/151 391/230 +f 2000/1868 3653/3603 3694/3622 +f 3601/3596 3600/3637 3602/3589 +f 3589/3653 3584/3602 3585/3654 +f 3577/3579 756/426 2170/2107 +f 3585/3654 3582/3606 3583/3581 +f 3678/3628 3637/3608 3636/3597 +f 3204/3464 376/718 3203/3462 +f 1801/1633 3657/3655 1799/1635 +f 3729/3652 3728/3649 3727/3648 +f 1575/1378 3738/3656 1088/1379 +f 3735/3657 1576/2019 3728/3649 +f 1776/1630 1796/1637 1775/1608 +f 1293/661 1290/649 1284/1792 +f 1326/2200 1327/1729 12/2025 +f 1294/657 1234/619 1290/649 +f 878/626 1330/648 1337/652 +f 497/3623 374/2231 498/2230 +f 122/980 413/240 121/2834 +f 3699/3629 3704/3658 3708/3632 +f 3606/3659 3401/3590 3602/3589 +f 3708/3632 3698/3639 3699/3629 +f 3464/575 3710/3638 3462/3635 +f 3704/3658 3443/3660 3458/3633 +f 439/271 399/1352 438/269 +f 1512/1053 1526/1098 1511/1054 +f 1668/1475 1738/1567 1671/1474 +f 839/3661 1362/2226 838/685 +f 1362/2226 826/678 1012/521 +f 3843/1428 1896/1589 3815/1967 +f 2053/2009 2118/885 2117/887 +f 3888/2118 2120/2085 2119/2059 +f 696/1234 3816/3662 3831/877 +f 1826/2773 639/1453 640/1452 +f 3740/3663 3729/3652 3731/3651 +f 956/3185 886/485 960/3494 +f 1980/1838 1922/3460 807/450 +f 33/18 105/111 164/110 +f 1343/659 3856/666 3857/662 +f 3907/2069 2128/2073 3902/2084 +f 1766/1597 1582/1382 1594/1385 +f 14/9 2163/2197 738/1807 +f 595/354 99/1067 594/355 +f 3233/2321 3246/2949 3245/2332 +f 1346/664 1044/535 1352/878 +f 748/1906 1425/800 747/799 +f 3193/2232 3254/3536 3268/3535 +f 3190/3577 3206/3566 3203/3462 +f 3612/3514 3606/3659 3615/3636 +f 3624/3664 3615/3636 3625/3592 +f 3631/3594 3443/3660 3674/3614 +f 3679/3630 3644/3607 3637/3608 +f 3686/3620 3647/3598 3682/3617 +f 3702/3627 3674/3614 3704/3658 +f 3625/3592 3600/3637 3597/3642 +f 3601/3596 3415/3665 3631/3594 +f 3739/3666 3735/3657 3729/3652 +f 3630/3641 3594/3585 3741/3667 +f 3741/3667 3716/3647 3630/3641 +f 1284/1792 1288/651 1285/1968 +f 3698/3639 3680/3621 3679/3630 +f 1778/1616 1780/1747 1779/1675 +f 1792/2099 1584/1631 1789/1621 +f 695/2132 571/346 694/612 +f 3638/3631 3594/3585 3595/3587 +f 3741/3667 3588/3599 3589/3653 +f 3247/2950 3209/2938 3202/2948 +f 3292/3668 3294/3669 3295/3670 +f 3296/3671 3298/3672 3299/3673 +f 3299/3673 3300/3674 3301/3675 +f 3302/3676 3304/3677 3305/3678 +f 3306/3679 3308/3680 3309/3681 +f 3307/3682 3311/3683 3308/3680 +f 3303/3684 3301/3675 3300/3674 +f 3312/3685 3304/3677 3313/3686 +f 3314/3687 3316/3688 3317/3689 +f 3318/3690 3320/3691 3321/3692 +f 3324/3693 3321/3692 3325/3694 +f 3326/3695 3328/3696 3323/3697 +f 3329/3698 3331/3699 3332/3700 +f 3333/3701 3335/3702 3336/3703 +f 3337/3704 3339/3705 3340/3706 +f 3333/3701 3341/3707 3342/3708 +f 3342/3708 3343/3709 3344/3710 +f 3319/3711 3344/3710 3343/3709 +f 3338/3712 3334/3713 3339/3705 +f 3346/3714 3302/3676 3347/3715 +f 3305/3678 3347/3715 3302/3676 +f 3312/3685 3223/3716 3349/3717 +f 3350/3718 3340/3706 3351/3719 +f 3312/3685 3348/3720 3305/3678 +f 3352/3721 3341/3707 3326/3695 +f 3311/3683 3354/3722 3308/3680 +f 3341/3707 3327/3723 3326/3695 +f 3359/3724 3361/3725 3362/3726 +f 3360/3727 3250/3728 3249/3729 +f 3318/3690 3345/3730 3319/3711 +f 3364/3731 3318/3690 3324/3693 +f 3314/3687 3324/3693 3365/3732 +f 3366/3733 3301/3675 3346/3714 +f 3299/3673 3367/3734 3296/3671 +f 3308/3680 3368/3735 3309/3681 +f 3369/3736 3295/3670 3294/3669 +f 3317/3689 3364/3731 3314/3687 +f 3370/3737 3317/3689 3316/3688 +f 3325/3694 3365/3732 3324/3693 +f 3352/3721 3319/3711 3343/3709 +f 3296/3671 3353/3738 3311/3683 +f 3297/3739 3311/3683 3310/3740 +f 3363/3741 3354/3722 3345/3730 +f 3345/3730 3353/3738 3344/3710 +f 3344/3710 3367/3734 3342/3708 +f 3366/3733 3342/3708 3367/3734 +f 3347/3715 3334/3713 3346/3714 +f 3339/3705 3348/3720 3340/3706 +f 3348/3720 3351/3719 3340/3706 +f 3363/3741 3369/3736 3368/3735 +f 3294/3669 3368/3735 3369/3736 +f 3293/3742 3309/3681 3294/3669 +f 3331/3699 3374/3743 3332/3700 +f 3346/3714 3333/3701 3366/3733 +f 3323/3697 3352/3721 3326/3695 +f 3356/3744 3321/3692 3320/3691 +f 3355/3745 3325/3694 3321/3692 +f 3357/3746 3371/3747 3325/3694 +f 3322/3748 3320/3691 3352/3721 +f 3361/3725 3329/3698 3362/3726 +f 3373/3749 3375/3750 3374/3743 +f 3374/3743 3376/3751 3332/3700 +f 3329/3698 3376/3751 3362/3726 +f 3327/3723 3372/3752 3328/3696 +f 3336/3703 3375/3750 3327/3723 +f 3335/3702 3376/3751 3375/3750 +f 3376/3751 3337/3704 3362/3726 +f 3337/3704 3359/3724 3362/3726 +f 3359/3724 3276/3753 3250/3728 +f 3377/3754 3295/3670 3370/3737 +f 3277/3572 3315/3755 3278/3554 +f 3371/3747 3379/3756 3365/3732 +f 3378/3757 3358/3758 3380/3759 +f 3365/3732 3315/3755 3314/3687 +f 3316/3688 3279/3571 3370/3737 +f 3282/1580 3370/3737 3279/3571 +f 3313/3686 3224/3760 3312/3685 +f 3378/3757 3284/3555 3379/3756 +f 3380/3759 3285/3562 3378/3757 +f 3379/3756 3278/3554 3315/3755 +f 3276/3753 3351/3719 3291/3761 +f 3291/3761 3349/3717 3223/3716 +f 3313/3686 2965/3170 3013/3229 +f 2965/3170 3303/3684 222/307 +f 222/307 3300/3674 223/308 +f 223/308 3298/3672 3186/3510 +f 3298/3672 3185/3509 3186/3510 +f 3297/3739 2977/3189 3185/3509 +f 3310/3740 1213/1242 2977/3189 +f 3307/3682 1214/1243 1213/1242 +f 2532/3080 3306/3679 3293/3742 +f 2894/3081 3293/3742 3292/3668 +f 2871/3762 3292/3668 3377/3754 +f 3253/3537 3240/2319 3242/3498 +f 3283/3763 3013/3229 2152/3496 +f 1997/1865 1889/1739 1892/2003 +f 1237/1257 744/838 855/914 +f 1535/1782 120/1816 121/2834 +f 1531/1119 201/137 214/136 +f 1119/1151 1032/1066 984/1022 +f 946/478 824/489 947/488 +f 163/213 5/6 162/2842 +f 3589/3653 3723/3645 3741/3667 +f 1155/573 1162/581 1164/582 +f 427/280 327/1304 424/457 +f 553/1454 796/1498 498/2230 +f 3381/3764 2231/2303 2237/2305 +f 3381/3764 2230/2301 2232/2300 +f 2237/2305 3180/3765 3381/3764 +f 3383/3766 3385/3767 3382/3768 +f 3384/3769 3387/3770 3385/3767 +f 3386/3771 3389/3772 3387/3770 +f 3388/3773 3391/3774 3389/3772 +f 3392/3775 3394/3776 3395/3777 +f 3395/3777 3396/3778 3397/3779 +f 3398/3780 3397/3779 3396/3778 +f 3400/3588 3399/3781 3398/3780 +f 3403/3782 3405/3625 3402/3626 +f 3406/3783 3404/3784 3403/3782 +f 3408/3785 3407/3786 3406/3783 +f 3410/3787 3409/3788 3408/3785 +f 3412/3789 3411/3790 3410/3787 +f 3415/3665 3398/3780 3414/3791 +f 3414/3791 3396/3778 3416/3792 +f 3416/3792 3394/3776 3417/3793 +f 3417/3793 3393/3794 3418/3795 +f 3393/3794 3420/3796 3418/3795 +f 3419/3797 3421/3798 3420/3796 +f 3390/3799 3422/3800 3421/3798 +f 3388/3773 3423/3801 3422/3800 +f 3386/3771 3424/3802 3423/3801 +f 3384/3769 3425/3803 3424/3802 +f 3425/3803 3426/3804 3184/3506 +f 2904/3094 3428/3805 2947/3152 +f 3429/3806 2904/3094 2902/3807 +f 3430/3808 2902/3807 2876/3064 +f 2875/3066 3430/3808 2876/3064 +f 3432/3809 2733/2909 2736/2908 +f 2825/3009 3435/3810 2824/3006 +f 3437/3811 3439/3812 3436/3813 +f 3441/3814 3436/3813 3440/3815 +f 2724/2898 3434/3816 2825/3009 +f 3436/3813 3115/3364 3117/3367 +f 3440/3815 3117/3367 3119/3369 +f 3443/3660 3414/3791 3442/3817 +f 3442/3817 3416/3792 3444/3818 +f 3444/3818 3417/3793 3445/3819 +f 3445/3819 3418/3795 3446/3820 +f 3446/3820 3420/3796 3447/3821 +f 3420/3796 3448/3822 3447/3821 +f 3421/3798 3449/3823 3448/3822 +f 3422/3800 3450/3824 3449/3823 +f 3423/3801 3451/3825 3450/3824 +f 3424/3802 3452/3826 3451/3825 +f 3425/3803 3056/3277 3452/3826 +f 3451/3825 2253/2334 2252/2336 +f 3450/3824 2252/2336 1727/1790 +f 3449/3823 1727/1790 2150/2273 +f 3448/3822 2150/2273 1941/2194 +f 3453/3827 3448/3822 1941/2194 +f 3454/3828 3447/3821 3453/3827 +f 3455/3829 3446/3820 3454/3828 +f 3456/3830 3445/3819 3455/3829 +f 3442/3817 3456/3830 3457/3831 +f 3443/3660 3457/3831 3458/3633 +f 3459/3832 3458/3633 3457/3831 +f 3457/3831 3455/3829 3459/3832 +f 3459/3832 3454/3828 3461/3833 +f 3462/3635 3459/3832 3461/3833 +f 3461/3833 3453/3827 3463/3834 +f 3464/575 3461/3833 3463/3834 +f 3463/3834 1941/2194 1360/2001 +f 3404/3784 3401/3590 3405/3625 +f 3407/3786 3399/3781 3404/3784 +f 3409/3788 3397/3779 3407/3786 +f 3411/3790 3395/3777 3409/3788 +f 3465/3835 3413/3836 3412/3789 +f 3467/3837 3469/3838 3466/3839 +f 3465/3835 3431/3840 3433/3841 +f 3470/3842 3466/3839 3465/3835 +f 3438/3843 3432/3809 3435/3810 +f 3467/3837 3438/3843 3437/3811 +f 3468/3844 3437/3811 3441/3814 +f 3471/3845 3468/3844 3472/3846 +f 3472/3846 3441/3814 3473/3847 +f 3473/3847 3440/3815 2712/3848 +f 2712/3848 3119/3369 3128/3379 +f 3475/3849 3472/3846 3474/3850 +f 3474/3850 3473/3847 3476/3851 +f 3476/3851 2712/3848 2711/2888 +f 3477/3852 2711/2888 2400/2478 +f 3478/3853 3476/3851 3477/3852 +f 3479/3854 3474/3850 3478/3853 +f 3392/3775 3413/3836 3480/3855 +f 3466/3839 3480/3855 3413/3836 +f 3419/3797 3392/3775 3480/3855 +f 3390/3799 3480/3855 3391/3774 +f 3469/3838 3391/3774 3480/3855 +f 3389/3772 3471/3845 3475/3849 +f 3387/3770 3475/3849 3479/3854 +f 3385/3767 3479/3854 3481/3856 +f 3482/3857 2400/2478 2402/2477 +f 3483/3858 3477/3852 3482/3857 +f 3481/3856 3478/3853 3483/3858 +f 3484/3859 2402/2477 2707/2884 +f 3058/3279 2707/2884 2708/2887 +f 3061/3281 3484/3859 3058/3279 +f 3485/3860 3482/3857 3484/3859 +f 3382/3768 3481/3856 3486/3861 +f 3486/3861 3483/3858 3485/3860 +f 3426/3804 3382/3768 3487/3862 +f 3487/3862 3486/3861 1962/2034 +f 3486/3861 3061/3281 1962/2034 +f 1961/2036 3487/3862 1962/2034 +f 3057/3507 3426/3804 3487/3862 +f 3428/3805 3402/3626 3488/3863 +f 3427/3864 3403/3782 3428/3805 +f 3429/3806 3406/3783 3427/3864 +f 3430/3808 3408/3785 3429/3806 +f 3431/3840 3410/3787 3430/3808 +f 3470/3842 3433/3841 3432/3809 +f 3434/3816 3438/3843 3435/3810 +f 2723/2897 3439/3812 3434/3816 +f 2875/3066 3433/3841 3431/3840 +f 2736/2908 3435/3810 3432/3809 +f 2947/3152 3488/3863 3500/949 +f 3464/575 1360/2001 3497/1438 +f 1900/1742 1924/1781 1901/1741 +f 1736/1561 1718/1841 1750/1591 +f 1955/3488 1943/3865 1956/1911 +f 1671/1474 1735/1560 1751/1592 +f 1696/1506 1698/1516 1692/1505 +f 1800/1634 1864/950 1801/1633 +f 2063/1957 2056/1947 2060/2882 +f 1737/1568 1717/1559 1736/1561 +f 1751/1592 1697/1562 1695/1504 +f 943/2088 3899/1394 684/369 +f 3656/3605 3402/3626 3605/3513 +f 1577/1531 1764/1605 1703/1510 +f 3917/1606 1599/1525 3923/1511 +f 1945/3866 82/61 1944/1912 +f 68/49 7/2726 248/385 +f 147/97 1543/1189 148/96 +f 1109/544 1133/558 1102/542 +f 1503/2788 1907/1832 1908/1813 +f 1499/1010 1909/1814 1498/1223 +f 452/274 735/399 215/2820 +f 1824/1670 3835/3867 3845/2090 +f 1825/1671 2068/2718 1824/1670 +f 1463/3868 1022/532 1194/594 +f 841/686 1009/522 1011/523 +f 1568/1783 1955/3488 1534/1784 +f 1926/1767 1927/1773 1923/628 +f 1910/1831 1502/2763 1939/3869 +f 1939/3869 422/256 442/487 +f 309/1031 1940/1787 308/185 +f 1804/1638 1866/3870 1803/1708 +f 269/163 1954/1799 56/161 +f 2065/1969 2060/2882 2056/1947 +f 3812/3465 1758/3871 3835/3867 +f 280/167 552/1276 1940/1787 +f 1875/2146 1869/1714 1874/1715 +f 1739/1582 1734/1558 1737/1568 +f 1778/1616 1790/1678 1788/1622 +f 310/184 1915/1757 279/1030 +f 1909/1814 1906/1749 1954/1799 +f 1789/1621 1778/1616 1788/1622 +f 978/509 980/1401 979/733 +f 1584/1631 1731/1549 1583/1381 +f 425/1057 311/1436 312/178 +f 148/96 115/80 116/109 +f 2044/1720 3894/2017 2046/2133 +f 277/242 1939/3869 551/1277 +f 1943/3865 1945/3866 1944/1912 +f 1461/936 1463/3868 1464/947 +f 2253/2334 3056/3277 3055/3278 +f 2981/3197 3489/3872 958/999 +f 2209/2275 3489/3872 3055/3278 +f 2254/3196 3055/3278 3489/3872 +f 2998/3216 958/999 3489/3872 +f 2826/3010 2721/2896 2724/2898 +f 3490/3873 2721/2896 2817/3874 +f 2816/2997 3490/3873 2817/3874 +f 2334/3875 2296/2383 2335/2418 +f 2298/2388 2334/3875 2336/2419 +f 2301/2396 2336/2419 2338/2421 +f 2311/2398 2338/2421 2354/2437 +f 2312/2397 2354/2437 2355/2438 +f 2359/2443 2361/2448 2360/2444 +f 2335/2418 2359/2443 2360/2444 +f 2414/3876 2365/2447 2364/2446 +f 2413/2486 2361/2448 2365/2447 +f 2355/2438 2540/2631 2541/2632 +f 2541/2632 2539/2630 2542/2633 +f 2722/3365 2739/3877 2738/2910 +f 3114/3366 2738/2910 3113/3363 +f 3110/3359 3121/3372 3120/3371 +f 3108/3357 3120/3371 3113/3363 +f 3122/3373 3127/3377 3126/3376 +f 3112/3362 3126/3376 3121/3372 +f 3127/3377 1753/1803 3135/3389 +f 3139/3878 2529/2619 2542/2633 +f 3744/3396 3133/3386 3139/3878 +f 3490/3873 2740/2911 2739/3877 +f 3491/3879 2789/2968 3490/3873 +f 228/144 91/2877 88/65 +f 245/1786 243/340 242/440 +f 248/385 247/339 191/338 +f 2/2 296/1187 295/1840 +f 145/1011 300/2000 267/159 +f 327/1304 210/134 328/394 +f 329/332 331/1091 330/1095 +f 250/142 228/144 236/341 +f 239/1191 240/190 230/314 +f 373/2850 375/3578 374/2231 +f 178/2199 176/177 175/176 +f 1218/606 1228/623 1321/642 +f 3862/640 1295/667 3854/1793 +f 808/445 824/489 823/447 +f 3858/923 1013/550 1014/549 +f 3813/782 1411/761 1413/779 +f 2154/2149 2159/2156 2160/2162 +f 3845/2090 1758/3871 1757/991 +f 306/449 1922/3460 452/274 +f 2157/2150 1976/3511 1977/1836 +f 708/2182 2031/1913 1474/1914 +f 1450/2169 1227/617 1428/2168 +f 1672/1478 1605/1954 1624/1476 +f 1813/3880 1821/1667 1816/1651 +f 1717/1559 1715/1533 1718/1841 +f 176/177 402/1493 347/1266 +f 1710/1842 3916/1512 3915/1788 +f 1712/1553 1577/1531 1711/1528 +f 1705/1744 1711/1528 1704/1530 +f 971/501 979/733 970/503 +f 822/448 1094/552 1102/542 +f 2909/3881 2793/2973 2792/2970 +f 2810/3882 2795/2981 2793/2973 +f 2799/2979 2249/2327 2800/2978 +f 3662/3883 2910/3102 2799/2979 +f 2769/2988 2801/2982 2795/2981 +f 3177/3499 3178/3501 3179/3502 +f 3178/3501 3180/3765 3179/3502 +f 3181/3503 1393/1472 2800/2978 +f 3181/3503 1632/1702 3182/3504 +f 3182/3504 2244/2314 3183/3505 +f 3183/3505 2243/2313 3180/3765 +f 3492/3884 2790/2969 3491/3879 +f 2808/3885 3491/3879 2816/2997 +f 2239/3886 3572/3887 2240/3888 +f 1149/2264 3571/3889 3570/3890 +f 1150/1907 3567/3891 3571/3889 +f 2202/2277 3566/1336 2015/2263 +f 2921/3892 3565/1770 2923/3893 +f 3568/3894 2133/2178 2928/3296 +f 1777/1607 1759/3895 1792/2099 +f 1201/598 1183/588 1184/590 +f 3561/1769 2195/2251 2194/3896 +f 3553/1460 2770/3897 2768/2951 +f 3927/1519 1687/3508 1692/1505 +f 1516/1966 154/104 503/356 +f 3549/1590 2974/3312 2803/3412 +f 3550/3898 2779/2967 2778/2957 +f 2771/3326 3575/3899 3546/3900 +f 3070/3298 3546/3900 3558/3901 +f 3547/1640 2775/2990 2867/3057 +f 1376/714 1370/705 1372/698 +f 2767/3486 3556/3129 2206/2269 +f 2195/2251 3543/819 2767/3486 +f 2775/2990 3542/826 2776/2991 +f 3377/3754 3545/1579 2871/3762 +f 3804/2983 3539/3902 2806/3903 +f 2776/2991 3538/1577 2805/2992 +f 2240/3888 3535/3904 2920/3905 +f 2920/3905 3669/3906 3619/3907 +f 3548/2881 2786/2964 2779/2967 +f 1805/1673 1851/959 1804/1638 +f 3534/1048 2787/3056 2786/2964 +f 2951/3438 3540/3908 3531/3909 +f 2772/3325 3531/3909 3575/3899 +f 2973/3182 3530/3910 3536/3911 +f 2224/3382 3537/3912 3530/3910 +f 1957/2870 320/983 510/985 +f 306/449 304/1199 303/1201 +f 2770/3897 3528/1171 2913/3105 +f 2913/3105 3574/1170 2239/3886 +f 2923/3893 3527/2826 2922/3293 +f 308/185 431/262 47/191 +f 207/131 132/90 543/1800 +f 1879/1916 1881/1936 1880/2096 +f 3733/3490 3525/1203 2089/2145 +f 2475/2556 3558/3901 3524/3913 +f 2476/3290 3524/3913 3523/3914 +f 2223/2292 3523/3914 3537/3912 +f 3522/1159 2803/3412 2802/3408 +f 2037/3915 11/2191 1950/2024 +f 1959/1999 450/272 505/634 +f 2003/2065 3520/2127 2129/2176 +f 9/2190 334/328 540/327 +f 2009/2072 3518/3916 3567/3891 +f 2008/2071 3536/3911 3518/3916 +f 3877/1202 673/2296 3879/2063 +f 3515/1789 2778/2957 2784/2962 +f 2935/3917 3513/1695 2936/3136 +f 111/3492 3570/3890 3511/3493 +f 3510/3918 2935/3917 2938/3138 +f 683/2797 3885/1392 943/2088 +f 3508/1624 2810/3882 2909/3881 +f 736/2821 732/2883 729/1874 +f 1795/1629 1803/1708 1796/1637 +f 3505/987 2938/3138 2939/3140 +f 3507/958 2939/3140 2940/3142 +f 2950/3155 3504/3919 2949/3154 +f 2940/3142 3504/3919 3507/958 +f 3801/3156 3512/1173 2950/3155 +f 2868/3920 3502/992 2869/3349 +f 2869/3349 3522/1159 2802/3408 +f 1024/614 1023/593 1020/531 +f 3500/949 2948/3153 2947/3152 +f 551/1277 442/487 443/1275 +f 2972/3181 3568/3894 2928/3296 +f 2032/2097 3499/3921 2972/3181 +f 216/138 1924/1781 1923/628 +f 203/236 201/137 200/1213 +f 1828/1677 1847/1694 1837/2119 +f 1958/2101 449/273 450/272 +f 272/350 178/2199 175/176 +f 2871/3762 3495/1439 1361/1440 +f 3619/3907 3564/2811 2921/3892 +f 2801/2982 3643/3101 3662/3883 +f 2005/2213 3654/1775 2176/2212 +f 960/3494 899/2835 962/491 +f 3657/3655 3612/3514 3624/3664 +f 3780/3922 3646/3424 3151/3426 +f 3151/3426 3593/2650 2555/2648 +f 2193/2250 3623/2937 2194/3896 +f 3607/3409 3145/3411 3144/3923 +f 3603/3410 2761/2933 3145/3411 +f 3592/3350 3144/3923 3104/3352 +f 2926/3126 3634/3351 3103/3353 +f 3634/3351 3104/3352 3103/3353 +f 3755/3310 3746/3924 3753/2041 +f 566/1320 3758/1776 2005/2213 +f 3488/3863 1864/950 3500/949 +f 1602/1399 2079/2005 2078/1984 +f 2868/3920 3611/3125 2922/3293 +f 2922/3293 3610/3124 2923/3893 +f 2923/3893 3618/3116 2921/3892 +f 3670/3117 2918/3114 3614/3115 +f 3619/3907 3614/3115 2920/3905 +f 2921/3892 3670/3117 3619/3907 +f 2920/3905 3586/2308 2240/3888 +f 3613/3113 2915/3108 2918/3114 +f 2917/3121 3622/2947 3641/3111 +f 2916/3123 3641/3111 3642/3110 +f 2239/3886 3652/3106 2913/3105 +f 2911/3925 3652/3106 2757/2930 +f 3643/3101 2911/3925 2910/3102 +f 2797/2980 3662/3883 3591/2977 +f 2797/2980 3598/2927 2750/2975 +f 3622/2947 2763/2941 2765/2944 +f 3608/2929 2764/3347 2745/2918 +f 2194/3896 3648/2935 2221/2291 +f 2221/2291 3649/2936 2222/3311 +f 2756/2942 3609/2928 2755/2926 +f 2750/2975 3609/2928 2747/2920 +f 3669/3906 1817/2135 3827/2160 +f 3763/2780 1691/3926 1699/1518 +f 2515/2601 3590/2649 2548/2639 +f 2548/2639 3593/2650 2550/2641 +f 2513/2599 3632/2611 3616/2609 +f 2513/2599 3621/2610 2515/2601 +f 2509/2595 3620/2378 2289/2593 +f 2510/2604 3633/2592 2509/2595 +f 2289/2593 3639/2371 2283/2376 +f 2283/2376 3617/2370 2286/2373 +f 2240/3888 3587/2310 2239/3886 +f 3659/3609 3624/3664 3626/3591 +f 716/377 3887/2086 3888/2118 +f 727/375 3761/2110 1923/628 +f 2165/3515 2170/2107 2164/2214 +f 3660/807 3626/3591 3629/3640 +f 3743/2042 3578/1774 3757/2040 +f 2175/3580 769/419 770/1901 +f 2100/630 2095/2007 2099/2011 +f 3758/1776 3757/2040 3578/1774 +f 640/1452 3568/3927 1826/2773 +f 1653/1644 3553/1460 1651/1459 +f 1895/2270 3530/3928 746/1283 +f 746/1283 3537/3929 741/1282 +f 1653/1644 3528/1171 3576/3930 +f 3762/1609 1604/1398 1672/1478 +f 2093/1754 2073/1977 2078/1984 +f 959/517 995/1751 884/462 +f 962/491 919/527 986/1594 +f 1400/749 1398/747 1399/748 +f 1000/1750 1127/551 1004/1610 +f 956/3185 884/462 885/463 +f 1662/1674 3550/3898 1661/1447 +f 2034/1915 3546/3931 1654/1586 +f 3926/2955 3539/3902 3802/3932 +f 3535/3904 766/2136 1817/2135 +f 3572/3887 765/2091 766/2136 +f 334/328 3571/3933 335/326 +f 1868/1766 3540/1492 1656/1491 +f 1684/3934 1652/1508 1678/1015 +f 1682/1806 775/423 1807/1018 +f 1585/1001 3762/1609 1700/1002 +f 3763/2780 1672/1478 1673/1477 +f 2173/853 490/1457 491/424 +f 1658/825 3547/1640 1752/1593 +f 3556/3129 1650/2184 724/2057 +f 1571/813 2010/2801 1572/1541 +f 1587/1388 3671/2258 2080/2338 +f 2099/2011 2077/1982 2094/1752 +f 787/1780 1999/1869 786/1499 +f 2000/1868 786/1499 1999/1869 +f 3155/3935 3697/2678 3683/3448 +f 3713/3439 3683/3448 3789/3447 +f 2180/2218 3685/3936 3684/3434 +f 3496/1664 3684/3434 3705/3119 +f 1684/3934 3840/1019 1651/1459 +f 1760/2207 3495/1439 3545/1579 +f 2607/2705 3693/2686 2590/2684 +f 2581/2673 3689/2706 2607/2705 +f 2371/2458 3700/2585 3695/2687 +f 2371/2458 3693/2686 2372/2456 +f 2587/3314 3675/2680 2586/3313 +f 2586/3313 3697/2678 339/512 +f 2497/3172 3687/2586 2503/2692 +f 2502/2693 3687/2586 3700/2585 +f 2494/2576 3690/2578 2495/2577 +f 3690/2578 2472/2552 2495/2577 +f 3688/2555 2468/2549 2472/2552 +f 3701/2548 2469/3937 2468/2549 +f 726/397 3892/2049 630/395 +f 581/383 3518/3938 580/343 +f 10/2192 3570/3939 334/328 +f 1837/2119 3510/3918 1849/986 +f 741/1282 3523/3940 1300/638 +f 1299/636 3558/3941 2034/1915 +f 1851/959 3504/3919 1866/3870 +f 3527/2826 1820/1772 1819/2825 +f 580/343 3536/3942 1895/2270 +f 1758/3871 3522/1159 3502/992 +f 1693/1016 3508/1624 1806/861 +f 3504/3919 1867/1709 1866/3870 +f 1894/1733 3568/3927 3499/2880 +f 2126/2259 3813/782 3841/2114 +f 3906/2260 3841/2114 3912/2067 +f 2113/2039 3867/2045 2110/2038 +f 2179/2217 3789/3447 3685/3936 +f 624/360 3904/2062 623/361 +f 2192/2248 3788/3427 3780/3922 +f 2177/2216 3787/1204 3733/3490 +f 1364/2125 3833/681 1338/654 +f 3858/923 1470/3943 1453/906 +f 3135/3389 3775/3387 3744/3396 +f 2069/2055 3835/3867 2068/2718 +f 1351/879 3853/671 1352/878 +f 2108/3944 3901/1764 499/305 +f 2089/2145 3736/3442 3733/3490 +f 3828/684 1358/683 866/682 +f 2001/2064 3783/3485 2187/2244 +f 2187/2244 3719/3436 2091/3489 +f 2130/2276 3752/3482 2004/2066 +f 2004/2066 3749/3483 2001/2064 +f 2014/2074 3779/3477 2201/2262 +f 2201/2262 3756/3478 2130/2276 +f 3167/3470 3759/3177 2969/3176 +f 3168/3472 3720/3474 3167/3470 +f 3159/3454 3764/3456 3160/3457 +f 3160/3457 3765/3321 3081/3320 +f 3730/3455 3158/3945 337/3449 +f 3158/3945 3709/3453 3159/3454 +f 2200/2254 3730/3455 3722/3445 +f 3685/3936 3742/3417 2204/3435 +f 3722/3445 2197/2255 2200/2254 +f 3790/3443 2092/2144 3721/3437 +f 3774/3444 3721/3437 2197/2255 +f 3154/3446 3790/3443 3774/3444 +f 3155/3935 3706/3441 3154/3446 +f 2091/3489 3721/3437 2092/2144 +f 2196/2252 3750/3416 3776/3431 +f 2193/2250 3776/3431 3781/3430 +f 3604/3421 3781/3430 3788/3427 +f 2086/3946 3780/3922 3778/3425 +f 2087/2141 3778/3425 3777/3422 +f 2204/3435 3751/3415 2205/2268 +f 2205/2268 3750/3416 3148/3432 +f 3909/2181 574/391 692/379 +f 2124/2173 3777/3422 3760/3403 +f 2208/2272 3760/3403 3782/3383 +f 3711/3390 3140/3399 3137/3392 +f 3785/3398 3138/3395 3140/3399 +f 3745/3397 3139/3878 3138/3395 +f 3771/3394 3136/3393 2322/2406 +f 3712/3391 3137/3392 3136/3393 +f 3775/3387 3131/3385 3133/3386 +f 3767/3384 3132/3400 3131/3385 +f 1813/3880 3838/1653 1814/1665 +f 1293/661 3856/666 1344/660 +f 2184/2220 3748/2405 3732/3324 +f 2181/2221 3732/3324 3734/3317 +f 3765/3321 3079/3319 3081/3320 +f 3768/3318 3080/3323 3079/3319 +f 709/1338 3875/2129 2031/1913 +f 2774/2954 3737/3165 3769/3291 +f 3786/3292 2134/2179 3559/3947 +f 3809/413 766/2136 765/2091 +f 2968/3174 3766/3164 2959/3163 +f 2969/3176 3784/3179 2968/3174 +f 2958/3168 3737/3165 2961/3166 +f 2132/3433 3766/3164 2960/3180 +f 2321/3355 3771/3394 3748/2405 +f 3860/849 1449/2113 28/2112 +f 3905/2157 582/1461 710/1383 +f 3886/2165 577/3099 574/391 +f 3847/1659 1818/1771 3827/2160 +f 705/2159 3887/2086 2121/2089 +f 3816/3662 3907/2069 698/388 +f 2111/2124 3911/2043 2113/2039 +f 3810/781 1366/2126 1411/761 +f 1812/1654 3822/1652 1815/1661 +f 675/1662 3852/2094 674/3187 +f 3496/1664 3851/2093 675/1662 +f 2110/2038 3868/2033 2109/2031 +f 2104/2026 3901/1764 2106/1763 +f 2109/2031 3899/1394 944/1393 +f 3842/1427 703/390 739/482 +f 1680/1844 3846/1017 1684/3934 +f 1639/1435 3819/1411 3818/2121 +f 623/361 3906/2260 697/1233 +f 2139/2130 3866/2015 3865/2122 +f 1808/1645 3844/2092 3837/1643 +f 3820/1669 1819/2825 1820/1772 +f 3872/2087 3900/633 706/1340 +f 1646/1434 3830/1445 1648/1442 +f 3889/2164 633/2170 577/3099 +f 2139/2130 3875/2129 18/12 +f 2156/2152 3871/2166 3874/2153 +f 674/3187 3913/2104 3869/2134 +f 2117/887 3812/3465 2069/2055 +f 1815/1661 3847/1659 3823/1657 +f 3855/910 1444/846 1470/3943 +f 697/1233 3912/2067 3816/3662 +f 866/682 3836/603 3828/684 +f 3864/2180 2136/2131 2135/632 +f 3585/3654 3726/3650 3589/3653 +f 3583/3581 3731/3651 3585/3654 +f 3753/2041 3738/3656 3739/3666 +f 3740/3663 3581/3583 3743/2042 +f 1088/1379 3746/3924 1093/811 +f 3739/3666 3743/2042 3753/2041 +f 2791/2972 3493/3948 2792/2970 +f 3492/3884 2807/2984 3493/3948 +f 2792/2970 3494/3949 2909/3881 +f 3493/3948 2806/3903 3494/3949 +f 3508/1624 3494/3949 3516/3950 +f 3516/3950 2806/3903 3539/3902 +f 2948/3153 3791/1174 3801/3156 +f 2105/2046 3896/2029 2104/2026 +f 3538/1577 3804/2983 2805/2992 +f 680/1417 3876/1939 683/2797 +f 1891/2175 3910/1730 3891/2171 +f 2905/3096 3801/3156 2950/3155 +f 2900/3089 3799/3098 2905/3096 +f 2901/3092 3807/3091 2877/3093 +f 2734/2907 3806/3067 3794/3060 +f 2877/3093 3806/3067 2878/3065 +f 2820/3002 3794/3060 2872/3059 +f 2818/3054 3792/3012 2827/3011 +f 2823/3008 3792/3012 2826/3010 +f 2822/3017 3798/3001 2821/3003 +f 2819/3005 3800/3951 3798/3001 +f 2817/3874 3795/2998 2816/2997 +f 2809/2993 3795/2998 2815/3000 +f 2808/3885 3803/2985 2807/2984 +f 2805/2992 3803/2985 2804/2987 +f 1702/1513 3923/1511 1599/1525 +f 3922/1627 1797/1636 1802/1626 +f 3915/1788 1706/1514 1709/1000 +f 3918/1520 1692/1505 1698/1516 +f 1657/1917 3880/1728 1879/1916 +f 1890/1740 3895/1617 3910/1730 +f 3772/1648 1674/1483 1675/862 +f 1689/3952 3772/1648 3773/1625 +f 3926/2955 3538/1577 1688/3953 +f 3516/3950 1689/3952 3773/1625 +f 3917/1606 1703/1510 1764/1605 +f 1598/1396 3928/1602 1770/1595 +f 3880/1728 1876/1724 1879/1916 +f 1691/3926 3925/2956 3927/1519 +f 3925/2956 1688/3953 1687/3508 +f 1117/547 1118/546 3930/3954 +f 1066/641 1322/643 3930/3954 +f 1243/618 847/624 3930/3954 +f 3825/601 848/1432 3930/3954 +f 1128/1522 1117/547 3930/3954 +f 1129/560 1130/559 3930/3954 +f 841/686 1011/523 3930/3954 +f 1230/3955 3863/639 3930/3954 +f 1242/650 1243/618 3930/3954 +f 1322/643 1229/622 3930/3954 +f 1064/592 1065/2174 3930/3954 +f 1003/1611 1128/1522 3930/3954 +f 1229/622 1230/3955 3930/3954 +f 3863/639 1285/1968 3930/3954 +f 1285/1968 1288/651 3930/3954 +f 848/1432 839/3661 3930/3954 +f 847/624 3825/601 3930/3954 +f 1118/546 1129/560 3930/3954 +f 1065/2174 1066/641 3930/3954 +f 1087/569 1086/568 3930/3954 +f 1008/520 1002/519 3930/3954 +f 1011/523 1008/520 3930/3954 +f 1002/519 1003/1611 3930/3954 +f 1076/565 1069/567 3930/3954 +f 838/685 841/686 3930/3954 +f 839/3661 838/685 3930/3954 +f 1288/651 1242/650 3930/3954 +f 1130/559 1087/569 3930/3954 +f 1086/568 1076/565 3930/3954 +f 1069/567 1064/592 3930/3954 +f 2410/2487 2414/3876 3931/3956 +f 2411/2484 2410/2487 3931/3956 +f 3617/2370 2285/2544 3931/3956 +f 2394/2476 2393/2475 3931/3956 +f 2393/2475 2396/2504 3931/3956 +f 2281/2374 2286/2373 3931/3956 +f 2282/2455 2280/2597 3931/3956 +f 2398/2501 2399/3407 3931/3956 +f 2465/2542 2464/2567 3931/3956 +f 2396/2504 2397/2495 3931/3956 +f 2470/2550 2469/3937 3931/3956 +f 2399/3407 2416/2489 3931/3956 +f 2480/2561 2395/2560 3931/3956 +f 2459/2538 2480/2561 3931/3956 +f 2369/2452 2282/2455 3931/3956 +f 2460/3957 2459/2538 3931/3956 +f 2415/3958 2411/2484 3931/3956 +f 2416/2489 2415/3958 3931/3956 +f 2364/2446 2363/2450 3931/3956 +f 2286/2373 3617/2370 3931/3956 +f 2464/2567 2470/2550 3931/3956 +f 2469/3937 3703/2547 3931/3956 +f 2285/2544 2465/2542 3931/3956 +f 2414/3876 2364/2446 3931/3956 +f 2395/2560 2394/2476 3931/3956 +f 2363/2450 2367/2453 3931/3956 +f 2397/2495 2398/2501 3931/3956 +f 2280/2597 2281/2374 3931/3956 +f 3703/2547 2460/3957 3931/3956 +f 2367/2453 2369/2452 3931/3956 +f 3934/3959 3937/3960 3938/3961 +f 3934/3959 3939/3962 3935/3963 +f 3932/3964 3937/3960 3933/3965 +f 3938/3961 3943/3966 3939/3962 +f 3937/3960 3940/3967 3941/3968 +f 3937/3960 3942/3969 3938/3961 +f 3942/3969 3947/3970 3943/3966 +f 3941/3968 3944/3971 3945/3972 +f 3942/3969 3945/3972 3946/3973 +f 3947/3970 3950/3974 3951/3975 +f 3944/3971 3949/3976 3945/3972 +f 3945/3972 3950/3974 3946/3973 +f 3950/3974 3955/3977 3951/3975 +f 3949/3976 3952/3978 3953/3979 +f 3950/3974 3953/3979 3954/3980 +f 3955/3977 3958/3981 3959/3982 +f 3953/3979 3956/3983 3957/3984 +f 3953/3979 3958/3981 3954/3980 +f 3959/3982 3962/3985 3963/3986 +f 3957/3984 3960/3987 3961/3988 +f 3958/3981 3961/3988 3962/3985 +f 3962/3985 3967/3989 3963/3986 +f 3960/3987 3965/3990 3961/3988 +f 3961/3988 3966/3991 3962/3985 +f 3966/3991 3971/3992 3967/3989 +f 3965/3990 3968/3993 3969/3994 +f 3966/3991 3969/3994 3970/3995 +f 3970/3995 3975/3996 3971/3992 +f 3968/3993 3973/3997 3969/3994 +f 3969/3994 3974/3998 3970/3995 +f 3974/3998 3979/3999 3975/3996 +f 3973/3997 3976/4000 3977/4001 +f 3974/3998 3977/4001 3978/4002 +f 3979/3999 3982/4003 3983/4004 +f 3976/4000 3981/4005 3977/4001 +f 3977/4001 3982/4003 3978/4002 +f 3982/4003 3987/4006 3983/4004 +f 3981/4005 3984/4007 3985/4008 +f 3982/4003 3985/4008 3986/4009 +f 3987/4006 3990/4010 3991/4011 +f 3984/4007 3989/4012 3985/4008 +f 3985/4008 3990/4010 3986/4009 +f 3991/4011 3994/4013 3995/4014 +f 3989/4012 3992/4015 3993/4016 +f 3990/4010 3993/4016 3994/4013 +f 3995/4014 3998/4017 3999/4018 +f 3992/4015 3997/4019 3993/4016 +f 3993/4016 3998/4017 3994/4013 +f 3998/4017 4003/4020 3999/4018 +f 3997/4019 4000/4021 4001/4022 +f 3998/4017 4001/4022 4002/4023 +f 4002/4023 4007/4024 4003/4020 +f 4000/4021 4005/4025 4001/4022 +f 4001/4022 4006/4026 4002/4023 +f 4006/4026 4011/4027 4007/4024 +f 4005/4025 4008/4028 4009/4029 +f 4006/4026 4009/4029 4010/4030 +f 4011/4027 4014/4031 4015/4032 +f 4008/4028 4013/4033 4009/4029 +f 4009/4029 4014/4031 4010/4030 +f 4014/4031 4019/4034 4015/4032 +f 4013/4033 4016/4035 4017/4036 +f 4014/4031 4017/4036 4018/4037 +f 4019/4034 4022/4038 4023/4039 +f 4016/4035 4021/4040 4017/4036 +f 4017/4036 4022/4038 4018/4037 +f 4023/4039 4026/4041 4027/4042 +f 4021/4040 4024/4043 4025/4044 +f 4022/4038 4025/4044 4026/4041 +f 4027/4042 4030/4045 4031/4046 +f 4024/4043 4029/4047 4025/4044 +f 4025/4044 4030/4045 4026/4041 +f 4031/4046 4034/4048 4035/4049 +f 4029/4047 4032/4050 4033/4051 +f 4030/4045 4033/4051 4034/4048 +f 4034/4048 4039/4052 4035/4049 +f 4032/4050 4037/4053 4033/4051 +f 4033/4051 4038/4054 4034/4048 +f 4038/4054 4043/4055 4039/4052 +f 4037/4053 4040/4056 4041/4057 +f 4038/4054 4041/4057 4042/4058 +f 4043/4055 4046/4059 4047/4060 +f 4040/4056 4045/4061 4041/4057 +f 4041/4057 4046/4059 4042/4058 +f 4046/4059 4051/4062 4047/4060 +f 4045/4061 4048/4063 4049/4064 +f 4046/4059 4049/4064 4050/4065 +f 4051/4062 4055/4066 4056/4067 +f 4048/4063 4054/4068 4049/4064 +f 4049/4064 4055/4066 4050/4065 +f 4056/4067 4059/4069 4060/4070 +f 4053/4071 4058/4072 4054/4068 +f 4055/4066 4058/4072 4059/4069 +f 3932/3964 4052/4073 3936/4074 +f 3936/4074 4052/4073 3940/3967 +f 3940/3967 4052/4073 3944/3971 +f 3944/3971 4052/4073 3948/4075 +f 3948/4075 4052/4073 3952/3978 +f 3952/3978 4052/4073 3956/3983 +f 3956/3983 4052/4073 3960/3987 +f 3960/3987 4052/4073 3964/4076 +f 3964/4076 4052/4073 3968/3993 +f 3968/3993 4052/4073 3972/4077 +f 3972/4077 4052/4073 3976/4000 +f 3976/4000 4052/4073 3980/4078 +f 3980/4078 4052/4073 3984/4007 +f 3984/4007 4052/4073 3988/4079 +f 3988/4079 4052/4073 3992/4015 +f 3992/4015 4052/4073 3996/4080 +f 3996/4080 4052/4073 4000/4021 +f 4000/4021 4052/4073 4004/4081 +f 4004/4081 4052/4073 4008/4028 +f 4008/4028 4052/4073 4012/4082 +f 4012/4082 4052/4073 4016/4035 +f 4016/4035 4052/4073 4020/4083 +f 4020/4083 4052/4073 4024/4043 +f 4024/4043 4052/4073 4028/4084 +f 4028/4084 4052/4073 4032/4050 +f 4032/4050 4052/4073 4036/4085 +f 4036/4085 4052/4073 4040/4056 +f 4040/4056 4052/4073 4044/4086 +f 4044/4086 4052/4073 4048/4063 +f 4048/4063 4052/4073 4053/4071 +f 4053/4071 4052/4073 4057/4087 +f 4060/4070 3934/3959 3935/3963 +f 4058/4072 3932/3964 3933/3965 +f 4058/4072 3934/3959 4059/4069 +f 4057/4087 4052/4073 3932/3964 +f 4069/4088 4081/4089 4082/4090 +f 4066/4091 4080/4092 4067/4093 +f 4065/4094 4077/4095 4078/4096 +f 4063/4097 4075/4098 4076/4099 +f 4071/4100 4085/4101 4072/4102 +f 4069/4088 4083/4103 4070/4104 +f 4067/4093 4081/4089 4068/4105 +f 4066/4106 4078/4096 4079/4107 +f 4063/4097 4077/4095 4064/4108 +f 4061/4109 4075/4098 4062/4110 +f 4072/4102 4086/4111 4073/4112 +f 4071/4100 4083/4103 4084/4113 +f 4076/4099 4090/4114 4077/4095 +f 4074/4115 4088/4116 4075/4098 +f 4086/4111 4098/4117 4099/4118 +f 4083/4103 4097/4119 4084/4113 +f 4082/4090 4094/4120 4095/4121 +f 4079/4122 4093/4123 4080/4092 +f 4078/4096 4090/4114 4091/4124 +f 4076/4099 4088/4116 4089/4125 +f 4085/4101 4097/4119 4098/4117 +f 4082/4090 4096/4126 4083/4103 +f 4080/4092 4094/4120 4081/4089 +f 4079/4107 4091/4124 4092/4127 +f 4089/4125 4103/4128 4090/4114 +f 4088/4116 4100/4129 4101/4130 +f 4098/4117 4112/4131 4099/4118 +f 4096/4126 4110/4132 4097/4119 +f 4095/4121 4107/4133 4108/4134 +f 4092/4135 4106/4136 4093/4123 +f 4090/4114 4104/4137 4091/4124 +f 4089/4125 4101/4130 4102/4138 +f 4097/4119 4111/4139 4098/4117 +f 4095/4121 4109/4140 4096/4126 +f 4094/4120 4106/4136 4107/4133 +f 4092/4127 4104/4137 4105/4141 +f 4103/4128 4115/4142 4116/4143 +f 4100/4129 4114/4144 4101/4130 +f 4112/4131 4124/4145 4125/4146 +f 4110/4132 4122/4147 4123/4148 +f 4107/4133 4121/4149 4108/4134 +f 4105/4150 4119/4151 4106/4136 +f 4103/4128 4117/4152 4104/4137 +f 4101/4130 4115/4142 4102/4138 +f 4110/4132 4124/4145 4111/4139 +f 4109/4140 4121/4149 4122/4147 +f 4107/4133 4119/4151 4120/4153 +f 4105/4141 4117/4152 4118/4154 +f 4115/4142 4129/4155 4116/4143 +f 4114/4144 4126/4156 4127/4157 +f 4124/4145 4138/4158 4125/4146 +f 4123/4148 4135/4159 4136/4160 +f 4120/4153 4134/4161 4121/4149 +f 4119/4151 4131/4162 4132/4163 +f 4117/4152 4129/4155 4130/4164 +f 4114/4144 4128/4165 4115/4142 +f 4124/4145 4136/4160 4137/4166 +f 4122/4147 4134/4161 4135/4159 +f 4119/4151 4133/4167 4120/4153 +f 4117/4152 4131/4168 4118/4154 +f 4129/4155 4141/4169 4142/4170 +f 4126/4156 4140/4171 4127/4157 +f 4138/4158 4150/4172 4151/4173 +f 4136/4160 4148/4174 4149/4175 +f 4133/4167 4147/4176 4134/4161 +f 4131/4162 4145/4177 4132/4163 +f 4130/4164 4142/4170 4143/4178 +f 4127/4157 4141/4169 4128/4165 +f 4137/4166 4149/4175 4150/4172 +f 4135/4159 4147/4176 4148/4174 +f 4132/4163 4146/4179 4133/4167 +f 4131/4168 4143/4178 4144/4180 +f 4142/4170 4154/4181 4155/4182 +f 4140/4171 4152/4183 4153/4184 +f 4150/4172 4164/4185 4151/4173 +f 4149/4175 4161/4186 4162/4187 +f 4146/4179 4160/4188 4147/4176 +f 4144/4189 4158/4190 4145/4177 +f 4142/4170 4156/4191 4143/4178 +f 4140/4171 4154/4181 4141/4169 +f 4149/4175 4163/4192 4150/4172 +f 4148/4174 4160/4188 4161/4186 +f 4146/4179 4158/4190 4159/4193 +f 4144/4180 4156/4191 4157/4194 +f 4155/4182 4167/4195 4168/4196 +f 4153/4184 4165/4197 4166/4198 +f 4163/4192 4177/4199 4164/4185 +f 4161/4186 4175/4200 4162/4187 +f 4159/4193 4173/4201 4160/4188 +f 4158/4190 4170/4202 4171/4203 +f 4155/4182 4169/4204 4156/4191 +f 4153/4184 4167/4195 4154/4181 +f 4163/4192 4175/4200 4176/4205 +f 4161/4186 4173/4201 4174/4206 +f 4159/4193 4171/4203 4172/4207 +f 4156/4191 4170/4208 4157/4194 +f 4167/4195 4181/4209 4168/4196 +f 4165/4197 4179/4210 4166/4198 +f 4176/4205 4190/4211 4177/4199 +f 4175/4200 4187/4212 4188/4213 +f 4173/4201 4185/4214 4186/4215 +f 4170/4202 4184/4216 4171/4203 +f 4169/4204 4181/4209 4182/4217 +f 4167/4195 4179/4210 4180/4218 +f 4175/4200 4189/4219 4176/4205 +f 4173/4201 4187/4212 4174/4206 +f 4171/4203 4185/4214 4172/4207 +f 4170/4208 4182/4217 4183/4220 +f 4180/4218 4194/4221 4181/4209 +f 4178/4222 4192/4223 4179/4210 +f 4190/4211 4202/4224 4203/4225 +f 4187/4212 4201/4226 4188/4213 +f 4186/4215 4198/4227 4199/4228 +f 4183/4229 4197/4230 4184/4216 +f 4182/4217 4194/4221 4195/4231 +f 4180/4218 4192/4223 4193/4232 +f 4189/4219 4201/4226 4202/4224 +f 4186/4215 4200/4233 4187/4212 +f 4184/4216 4198/4227 4185/4214 +f 4183/4220 4195/4231 4196/4234 +f 4193/4232 4207/4235 4194/4221 +f 4192/4223 4204/4236 4205/4237 +f 4202/4224 4216/4238 4203/4225 +f 4200/4233 4214/4239 4201/4226 +f 4199/4228 4211/4240 4212/4241 +f 4196/4242 4210/4243 4197/4230 +f 4194/4221 4208/4244 4195/4231 +f 4193/4232 4205/4237 4206/4245 +f 4201/4226 4215/4246 4202/4224 +f 4199/4228 4213/4247 4200/4233 +f 4198/4227 4210/4243 4211/4240 +f 4196/4234 4208/4244 4209/4248 +f 4207/4235 4219/4249 4220/4250 +f 4204/4236 4218/4251 4205/4237 +f 4216/4238 4228/4252 4229/4253 +f 4213/4247 4227/4254 4214/4239 +f 4211/4240 4225/4255 4212/4241 +f 4209/4256 4223/4257 4210/4243 +f 4207/4235 4221/4258 4208/4244 +f 4205/4237 4219/4249 4206/4245 +f 4214/4239 4228/4252 4215/4246 +f 4213/4247 4225/4255 4226/4259 +f 4211/4240 4223/4257 4224/4260 +f 4209/4248 4221/4258 4222/4261 +f 4219/4249 4233/4262 4220/4250 +f 4218/4251 4230/4263 4231/4264 +f 4228/4252 4242/4265 4229/4253 +f 4226/4259 4240/4266 4227/4254 +f 4225/4255 4237/4267 4238/4268 +f 4223/4257 4235/4269 4236/4270 +f 4221/4258 4233/4262 4234/4271 +f 4218/4251 4232/4272 4219/4249 +f 4228/4252 4240/4266 4241/4273 +f 4226/4259 4238/4268 4239/4274 +f 4223/4257 4237/4267 4224/4260 +f 4221/4258 4235/4275 4222/4261 +f 4233/4262 4245/4276 4246/4277 +f 4230/4263 4244/4278 4231/4264 +f 4242/4265 4254/4279 4255/4280 +f 4240/4266 4252/4281 4253/4282 +f 4237/4267 4251/4283 4238/4268 +f 4235/4269 4249/4284 4236/4270 +f 4234/4271 4246/4277 4247/4285 +f 4231/4264 4245/4276 4232/4272 +f 4241/4273 4253/4282 4254/4279 +f 4239/4274 4251/4283 4252/4281 +f 4236/4270 4250/4286 4237/4267 +f 4235/4275 4247/4285 4248/4287 +f 4246/4277 4258/4288 4259/4289 +f 4244/4278 4256/4290 4257/4291 +f 4254/4279 4268/4292 4255/4280 +f 4253/4282 4265/4293 4266/4294 +f 4250/4286 4264/4295 4251/4283 +f 4248/4296 4262/4297 4249/4284 +f 4246/4277 4260/4298 4247/4285 +f 4244/4278 4258/4288 4245/4276 +f 4253/4282 4267/4299 4254/4279 +f 4252/4281 4264/4295 4265/4293 +f 4250/4286 4262/4297 4263/4300 +f 4248/4287 4260/4298 4261/4301 +f 4259/4289 4271/4302 4272/4303 +f 4257/4291 4269/4304 4270/4305 +f 4267/4299 4281/4306 4268/4292 +f 4265/4293 4279/4307 4266/4294 +f 4263/4300 4277/4308 4264/4295 +f 4262/4297 4274/4309 4275/4310 +f 4259/4289 4273/4311 4260/4298 +f 4257/4291 4271/4302 4258/4288 +f 4266/4294 4280/4312 4267/4299 +f 4265/4293 4277/4308 4278/4313 +f 4263/4300 4275/4310 4276/4314 +f 4260/4298 4274/4315 4261/4301 +f 4271/4302 4285/4316 4272/4303 +f 4269/4304 4283/4317 4270/4305 +f 4281/4306 4293/4318 4294/4319 +f 4279/4307 4291/4320 4292/4321 +f 4277/4308 4289/4322 4290/4323 +f 4274/4309 4288/4324 4275/4310 +f 4273/4311 4285/4316 4286/4325 +f 4271/4302 4283/4317 4284/4326 +f 4280/4312 4292/4321 4293/4318 +f 4277/4308 4291/4320 4278/4313 +f 4275/4310 4289/4322 4276/4314 +f 4274/4315 4286/4325 4287/4327 +f 4284/4326 4298/4328 4285/4316 +f 4282/4329 4296/4330 4283/4317 +f 4294/4319 4306/4331 4307/4332 +f 4291/4320 4305/4333 4292/4321 +f 4290/4323 4302/4334 4303/4335 +f 4287/4336 4301/4337 4288/4324 +f 4286/4325 4298/4328 4299/4338 +f 4284/4326 4296/4330 4297/4339 +f 4293/4318 4305/4333 4306/4331 +f 4290/4323 4304/4340 4291/4320 +f 4288/4324 4302/4334 4289/4322 +f 4287/4327 4299/4338 4300/4341 +f 4297/4339 4311/4342 4298/4328 +f 4296/4330 4308/4343 4309/4344 +f 4306/4331 4320/4345 4307/4332 +f 4304/4340 4318/4346 4305/4333 +f 4303/4335 4315/4347 4316/4348 +f 4300/4349 4314/4350 4301/4337 +f 4298/4328 4312/4351 4299/4338 +f 4297/4339 4309/4344 4310/4352 +f 4305/4333 4319/4353 4306/4331 +f 4304/4340 4316/4348 4317/4354 +f 4302/4334 4314/4350 4315/4347 +f 4300/4341 4312/4351 4313/4355 +f 4310/4352 4324/4356 4311/4342 +f 4308/4343 4322/4357 4309/4344 +f 4320/4345 4332/4358 4333/4359 +f 4318/4346 4330/4360 4331/4361 +f 4316/4348 4328/4362 4329/4363 +f 4313/4364 4327/4365 4314/4350 +f 4311/4342 4325/4366 4312/4351 +f 4310/4352 4322/4357 4323/4367 +f 4318/4346 4332/4358 4319/4353 +f 4316/4348 4330/4360 4317/4354 +f 4315/4347 4327/4365 4328/4362 +f 4312/4351 4326/4368 4313/4355 +f 4323/4367 4337/4369 4324/4356 +f 4322/4357 4334/4370 4335/4371 +f 4332/4358 4346/4372 4333/4359 +f 4330/4360 4344/4373 4331/4361 +f 4329/4363 4341/4374 4342/4375 +f 4327/4365 4339/4376 4340/4377 +f 4325/4366 4337/4369 4338/4378 +f 4322/4357 4336/4379 4323/4367 +f 4332/4358 4344/4373 4345/4380 +f 4330/4360 4342/4375 4343/4381 +f 4327/4365 4341/4374 4328/4362 +f 4325/4366 4339/4382 4326/4368 +f 4337/4369 4350/4383 4351/4384 +f 4334/4370 4349/4385 4335/4371 +f 4346/4372 4359/4386 4360/4387 +f 4344/4373 4357/4388 4358/4389 +f 4341/4374 4356/4390 4342/4375 +f 4340/4377 4353/4391 4354/4392 +f 4338/4378 4351/4384 4352/4393 +f 4335/4371 4350/4383 4336/4379 +f 4345/4380 4358/4389 4359/4386 +f 4343/4381 4356/4390 4357/4388 +f 4340/4377 4355/4394 4341/4374 +f 4338/4378 4353/4395 4339/4382 +f 4351/4384 4363/4396 4364/4397 +f 4349/4385 4361/4398 4362/4399 +f 4359/4386 4373/4400 4360/4387 +f 4358/4389 4370/4401 4371/4402 +f 4355/4394 4369/4403 4356/4390 +f 4354/4392 4366/4404 4367/4405 +f 4351/4384 4365/4406 4352/4393 +f 4349/4385 4363/4396 4350/4383 +f 4358/4389 4372/4407 4359/4386 +f 4357/4388 4369/4403 4370/4401 +f 4355/4394 4367/4405 4368/4408 +f 4352/4393 4366/4409 4353/4395 +f 4364/4397 4376/4410 4377/4411 +f 4362/4399 4374/4412 4375/4413 +f 4373/4400 4385/4414 4386/4415 +f 4370/4401 4384/4416 4371/4402 +f 4368/4408 4382/4417 4369/4403 +f 4367/4405 4379/4418 4380/4419 +f 4364/4397 4378/4420 4365/4406 +f 4362/4399 4376/4410 4363/4396 +f 4372/4407 4384/4416 4385/4414 +f 4370/4401 4382/4417 4383/4421 +f 4368/4408 4380/4419 4381/4422 +f 4365/4406 4379/4423 4366/4409 +f 4376/4410 4390/4424 4377/4411 +f 4374/4412 4388/4425 4375/4413 +f 4385/4414 4399/4426 4386/4415 +f 4384/4416 4396/4427 4397/4428 +f 4382/4417 4394/4429 4395/4430 +f 4379/4418 4393/4431 4380/4419 +f 4378/4420 4390/4424 4391/4432 +f 4376/4410 4388/4425 4389/4433 +f 4384/4416 4398/4434 4385/4414 +f 4382/4417 4396/4427 4383/4421 +f 4380/4419 4394/4429 4381/4422 +f 4379/4423 4391/4432 4392/4435 +f 4389/4433 4403/4436 4390/4424 +f 4387/4437 4401/4438 4388/4425 +f 4399/4426 4411/4439 4412/4440 +f 4396/4427 4410/4441 4397/4428 +f 4395/4430 4407/4442 4408/4443 +f 4393/4431 4405/4444 4406/4445 +f 4391/4432 4403/4436 4404/4446 +f 4389/4433 4401/4438 4402/4447 +f 4398/4434 4410/4441 4411/4439 +f 4395/4430 4409/4448 4396/4427 +f 4393/4431 4407/4442 4394/4429 +f 4391/4432 4405/4449 4392/4435 +f 4402/4447 4416/4450 4403/4436 +f 4401/4438 4413/4451 4414/4452 +f 4411/4439 4425/4453 4412/4440 +f 4409/4448 4423/4454 4410/4441 +f 4408/4443 4420/4455 4421/4456 +f 4406/4445 4418/4457 4419/4458 +f 4403/4436 4417/4459 4404/4446 +f 4402/4447 4414/4452 4415/4460 +f 4410/4441 4424/4461 4411/4439 +f 4408/4443 4422/4462 4409/4448 +f 4407/4442 4419/4458 4420/4455 +f 4404/4446 4418/4463 4405/4449 +f 4416/4450 4428/4464 4429/4465 +f 4413/4451 4427/4466 4414/4452 +f 4425/4453 4437/4467 4438/4468 +f 4423/4454 4435/4469 4436/4470 +f 4421/4456 4433/4471 4434/4472 +f 4418/4457 4432/4473 4419/4458 +f 4416/4450 4430/4474 4417/4459 +f 4415/4460 4427/4466 4428/4464 +f 4423/4454 4437/4467 4424/4461 +f 4421/4456 4435/4469 4422/4462 +f 4420/4455 4432/4473 4433/4471 +f 4418/4463 4430/4474 4431/4475 +f 4428/4464 4442/4476 4429/4465 +f 4426/4477 4440/4478 4427/4466 +f 4437/4467 4451/4479 4438/4468 +f 4435/4469 4449/4480 4436/4470 +f 4434/4472 4446/4481 4447/4482 +f 4432/4473 4444/4483 4445/4484 +f 4430/4474 4442/4476 4443/4485 +f 4427/4466 4441/4486 4428/4464 +f 4437/4467 4449/4480 4450/4487 +f 4435/4469 4447/4482 4448/4488 +f 4432/4473 4446/4481 4433/4471 +f 4430/4474 4444/4489 4431/4475 +f 4442/4476 4454/4490 4455/4491 +f 4439/4492 4453/4493 4440/4478 +f 4451/4479 4463/4494 4464/4495 +f 4449/4480 4461/4496 4462/4497 +f 4446/4481 4460/4498 4447/4482 +f 4444/4483 4458/4499 4445/4484 +f 4443/4485 4455/4491 4456/4500 +f 4440/4478 4454/4490 4441/4486 +f 4450/4487 4462/4497 4463/4494 +f 4447/4482 4461/4496 4448/4488 +f 4445/4484 4459/4501 4446/4481 +f 4444/4489 4456/4500 4457/4502 +f 4455/4491 4467/4503 4468/4504 +f 4453/4493 4465/4505 4466/4506 +f 4463/4494 4477/4507 4464/4495 +f 4462/4497 4474/4508 4475/4509 +f 4459/4501 4473/4510 4460/4498 +f 4457/4511 4471/4512 4458/4499 +f 4455/4491 4469/4513 4456/4500 +f 4453/4493 4467/4503 4454/4490 +f 4462/4497 4476/4514 4463/4494 +f 4461/4496 4473/4510 4474/4508 +f 4459/4501 4471/4512 4472/4515 +f 4457/4502 4469/4513 4470/4516 +f 4347/4517 4073/4112 4086/4111 +f 4347/4517 4086/4111 4099/4118 +f 4347/4517 4099/4118 4112/4131 +f 4347/4517 4112/4131 4125/4146 +f 4347/4517 4125/4146 4138/4158 +f 4347/4517 4138/4158 4151/4173 +f 4347/4517 4151/4173 4164/4185 +f 4347/4517 4164/4185 4177/4199 +f 4347/4517 4177/4199 4190/4211 +f 4347/4517 4190/4211 4203/4225 +f 4347/4517 4203/4225 4216/4238 +f 4347/4517 4216/4238 4229/4253 +f 4347/4517 4229/4253 4242/4265 +f 4347/4517 4242/4265 4255/4280 +f 4347/4517 4255/4280 4268/4292 +f 4347/4517 4268/4292 4281/4306 +f 4347/4517 4281/4306 4294/4319 +f 4347/4517 4294/4319 4307/4332 +f 4347/4517 4307/4332 4320/4345 +f 4347/4517 4320/4345 4333/4359 +f 4347/4517 4333/4359 4346/4372 +f 4347/4517 4346/4372 4360/4387 +f 4347/4517 4360/4387 4373/4400 +f 4347/4517 4373/4400 4386/4415 +f 4347/4517 4386/4415 4399/4426 +f 4347/4517 4399/4426 4412/4440 +f 4347/4517 4412/4440 4425/4453 +f 4347/4517 4425/4453 4438/4468 +f 4347/4517 4438/4468 4451/4479 +f 4347/4517 4451/4479 4464/4495 +f 4347/4517 4464/4495 4477/4507 +f 4468/4504 4063/4097 4064/4108 +f 4466/4506 4061/4109 4062/4110 +f 4477/4507 4072/4102 4073/4112 +f 4474/4508 4071/4100 4475/4509 +f 4472/4515 4069/4088 4473/4510 +f 4471/4512 4066/4091 4067/4093 +f 4468/4504 4065/4094 4469/4513 +f 4466/4506 4063/4097 4467/4503 +f 4347/4517 4477/4507 4073/4112 +f 4476/4514 4071/4100 4072/4102 +f 4474/4508 4069/4088 4070/4104 +f 4472/4515 4067/4093 4068/4105 +f 4469/4513 4066/4106 4470/4516 +f 3935/3963 4465/4505 4060/4070 +f 4465/4505 4056/4067 4060/4070 +f 4452/4518 4051/4062 4056/4067 +f 4439/4492 4047/4060 4051/4062 +f 4047/4060 4413/4451 4043/4055 +f 4043/4055 4400/4519 4039/4052 +f 4039/4052 4387/4437 4035/4049 +f 4387/4437 4031/4046 4035/4049 +f 4031/4046 4361/4398 4027/4042 +f 4361/4398 4023/4039 4027/4042 +f 4348/4520 4019/4034 4023/4039 +f 4334/4370 4015/4032 4019/4034 +f 4015/4032 4308/4343 4011/4027 +f 4011/4027 4295/4521 4007/4024 +f 4007/4024 4282/4329 4003/4020 +f 4282/4329 3999/4018 4003/4020 +f 3999/4018 4256/4290 3995/4014 +f 4256/4290 3991/4011 3995/4014 +f 4243/4522 3987/4006 3991/4011 +f 4230/4263 3983/4004 3987/4006 +f 3983/4004 4204/4236 3979/3999 +f 3979/3999 4191/4523 3975/3996 +f 3975/3996 4178/4222 3971/3992 +f 4178/4222 3967/3989 3971/3992 +f 3967/3989 4152/4183 3963/3986 +f 4152/4183 3959/3982 3963/3986 +f 4139/4524 3955/3977 3959/3982 +f 4126/4156 3951/3975 3955/3977 +f 3951/3975 4100/4129 3947/3970 +f 3947/3970 4087/4525 3943/3966 +f 3943/3966 4074/4115 3939/3962 +f 4074/4115 3935/3963 3939/3962 +f 4480/4526 4483/4527 4484/4528 +f 4480/4526 4485/4529 4481/4530 +f 4479/4531 4482/4532 4483/4527 +f 4484/4528 4489/4533 4485/4529 +f 4482/4532 4487/4534 4483/4527 +f 4483/4527 4488/4535 4484/4528 +f 4488/4535 4493/4536 4489/4533 +f 4487/4534 4490/4537 4491/4538 +f 4488/4535 4491/4538 4492/4539 +f 4493/4536 4496/4540 4497/4541 +f 4490/4537 4495/4542 4491/4538 +f 4491/4538 4496/4540 4492/4539 +f 4496/4540 4501/4543 4497/4541 +f 4495/4542 4498/4544 4499/4545 +f 4496/4540 4499/4545 4500/4546 +f 4501/4543 4504/4547 4505/4548 +f 4498/4544 4503/4549 4499/4545 +f 4499/4545 4504/4547 4500/4546 +f 4505/4548 4508/4550 4509/4551 +f 4503/4549 4506/4552 4507/4553 +f 4504/4547 4507/4553 4508/4550 +f 4509/4551 4512/4554 4513/4555 +f 4506/4552 4511/4556 4507/4553 +f 4507/4553 4512/4554 4508/4550 +f 4513/4555 4516/4557 4517/4558 +f 4511/4556 4514/4559 4515/4560 +f 4512/4554 4515/4560 4516/4557 +f 4516/4557 4521/4561 4517/4558 +f 4514/4559 4519/4562 4515/4560 +f 4515/4560 4520/4563 4516/4557 +f 4520/4563 4525/4564 4521/4561 +f 4518/4565 4523/4566 4519/4562 +f 4520/4563 4523/4566 4524/4567 +f 4525/4564 4528/4568 4529/4569 +f 4522/4570 4527/4571 4523/4566 +f 4523/4566 4528/4568 4524/4567 +f 4528/4568 4533/4572 4529/4569 +f 4527/4571 4530/4573 4531/4574 +f 4528/4568 4531/4574 4532/4575 +f 4533/4572 4536/4576 4537/4577 +f 4530/4573 4535/4578 4531/4574 +f 4531/4574 4536/4576 4532/4575 +f 4537/4577 4540/4579 4541/4580 +f 4534/4581 4539/4582 4535/4578 +f 4536/4576 4539/4582 4540/4579 +f 4541/4580 4544/4583 4545/4584 +f 4539/4582 4542/4585 4543/4586 +f 4539/4582 4544/4583 4540/4579 +f 4544/4583 4549/4587 4545/4584 +f 4542/4585 4547/4588 4543/4586 +f 4544/4583 4547/4588 4548/4589 +f 4548/4589 4553/4590 4549/4587 +f 4547/4588 4550/4591 4551/4592 +f 4547/4588 4552/4593 4548/4589 +f 4552/4593 4557/4594 4553/4590 +f 4551/4592 4554/4595 4555/4596 +f 4552/4593 4555/4596 4556/4597 +f 4557/4594 4560/4598 4561/4599 +f 4554/4595 4559/4600 4555/4596 +f 4555/4596 4560/4598 4556/4597 +f 4560/4598 4565/4601 4561/4599 +f 4559/4600 4562/4602 4563/4603 +f 4560/4598 4563/4603 4564/4604 +f 4565/4601 4568/4605 4569/4606 +f 4562/4602 4567/4607 4563/4603 +f 4563/4603 4568/4605 4564/4604 +f 4569/4606 4572/4608 4573/4609 +f 4567/4607 4570/4610 4571/4611 +f 4568/4605 4571/4611 4572/4608 +f 4572/4608 4577/4612 4573/4609 +f 4570/4610 4575/4613 4571/4611 +f 4571/4611 4576/4614 4572/4608 +f 4576/4614 4581/4615 4577/4612 +f 4575/4613 4578/4616 4579/4617 +f 4576/4614 4579/4617 4580/4618 +f 4580/4618 4585/4619 4581/4615 +f 4578/4616 4583/4620 4579/4617 +f 4579/4617 4584/4621 4580/4618 +f 4584/4621 4589/4622 4585/4619 +f 4583/4620 4586/4623 4587/4624 +f 4584/4621 4587/4624 4588/4625 +f 4589/4622 4592/4626 4593/4627 +f 4586/4623 4591/4628 4587/4624 +f 4587/4624 4592/4626 4588/4625 +f 4592/4626 4597/4629 4593/4627 +f 4591/4628 4594/4630 4595/4631 +f 4592/4626 4595/4631 4596/4632 +f 4597/4629 4601/4633 4602/4634 +f 4594/4630 4600/4635 4595/4631 +f 4595/4631 4601/4633 4596/4632 +f 4602/4634 4605/4636 4606/4637 +f 4600/4635 4603/4638 4604/4639 +f 4601/4633 4604/4639 4605/4636 +f 4478/4640 4598/4641 4482/4532 +f 4482/4532 4598/4641 4486/4642 +f 4486/4642 4598/4641 4490/4537 +f 4490/4537 4598/4641 4494/4643 +f 4494/4643 4598/4641 4498/4544 +f 4498/4544 4598/4641 4502/4644 +f 4502/4644 4598/4641 4506/4552 +f 4506/4552 4598/4641 4510/4645 +f 4510/4645 4598/4641 4514/4559 +f 4514/4559 4598/4641 4518/4565 +f 4518/4565 4598/4641 4522/4570 +f 4522/4570 4598/4641 4526/4646 +f 4526/4646 4598/4641 4530/4573 +f 4530/4573 4598/4641 4534/4581 +f 4534/4581 4598/4641 4538/4647 +f 4538/4647 4598/4641 4542/4585 +f 4542/4585 4598/4641 4546/4648 +f 4546/4648 4598/4641 4550/4591 +f 4550/4591 4598/4641 4554/4595 +f 4554/4595 4598/4641 4558/4649 +f 4558/4649 4598/4641 4562/4602 +f 4562/4602 4598/4641 4566/4650 +f 4566/4650 4598/4641 4570/4610 +f 4570/4610 4598/4641 4574/4651 +f 4574/4651 4598/4641 4578/4616 +f 4578/4616 4598/4641 4582/4652 +f 4582/4652 4598/4641 4586/4623 +f 4586/4623 4598/4641 4590/4653 +f 4590/4653 4598/4641 4594/4630 +f 4594/4630 4598/4641 4599/4654 +f 4599/4654 4598/4641 4603/4638 +f 4606/4637 4480/4526 4481/4530 +f 4603/4638 4479/4531 4604/4639 +f 4604/4639 4480/4526 4605/4636 +f 4603/4638 4598/4641 4478/4640 +f 4615/4655 4627/4656 4628/4657 +f 4612/4658 4626/4659 4613/4660 +f 4611/4661 4623/4662 4624/4663 +f 4609/4664 4621/4665 4622/4666 +f 4618/4667 4630/4668 4631/4669 +f 4615/4655 4629/4670 4616/4671 +f 4613/4660 4627/4656 4614/4672 +f 4612/4673 4624/4663 4625/4674 +f 4609/4664 4623/4662 4610/4675 +f 4607/4676 4621/4665 4608/4677 +f 4619/4678 4631/4669 4632/4679 +f 4617/4680 4629/4670 4630/4668 +f 4622/4666 4636/4681 4623/4662 +f 4620/4682 4634/4683 4621/4665 +f 4632/4679 4644/4684 4645/4685 +f 4629/4670 4643/4686 4630/4668 +f 4628/4657 4640/4687 4641/4688 +f 4626/4659 4638/4689 4639/4690 +f 4624/4663 4636/4681 4637/4691 +f 4622/4666 4634/4683 4635/4692 +f 4631/4669 4643/4686 4644/4684 +f 4628/4657 4642/4693 4629/4670 +f 4626/4659 4640/4687 4627/4656 +f 4624/4663 4638/4694 4625/4674 +f 4635/4692 4649/4695 4636/4681 +f 4634/4683 4646/4696 4647/4697 +f 4644/4684 4658/4698 4645/4685 +f 4642/4693 4656/4699 4643/4686 +f 4641/4688 4653/4700 4654/4701 +f 4639/4690 4651/4702 4652/4703 +f 4636/4681 4650/4704 4637/4691 +f 4635/4692 4647/4697 4648/4705 +f 4643/4686 4657/4706 4644/4684 +f 4642/4693 4654/4701 4655/4707 +f 4640/4687 4652/4703 4653/4700 +f 4637/4691 4651/4708 4638/4694 +f 4648/4705 4662/4709 4649/4695 +f 4646/4696 4660/4710 4647/4697 +f 4658/4698 4670/4711 4671/4712 +f 4656/4699 4668/4713 4669/4714 +f 4653/4700 4667/4715 4654/4701 +f 4651/4702 4665/4716 4652/4703 +f 4649/4695 4663/4717 4650/4704 +f 4648/4705 4660/4710 4661/4718 +f 4656/4699 4670/4711 4657/4706 +f 4654/4701 4668/4713 4655/4707 +f 4653/4700 4665/4716 4666/4719 +f 4651/4708 4663/4717 4664/4720 +f 4661/4718 4675/4721 4662/4709 +f 4660/4710 4672/4722 4673/4723 +f 4670/4711 4684/4724 4671/4712 +f 4668/4713 4682/4725 4669/4714 +f 4667/4715 4679/4726 4680/4727 +f 4665/4716 4677/4728 4678/4729 +f 4663/4717 4675/4721 4676/4730 +f 4661/4718 4673/4723 4674/4731 +f 4670/4711 4682/4725 4683/4732 +f 4667/4715 4681/4733 4668/4713 +f 4665/4716 4679/4726 4666/4719 +f 4663/4717 4677/4734 4664/4720 +f 4675/4721 4687/4735 4688/4736 +f 4672/4722 4686/4737 4673/4723 +f 4684/4724 4696/4738 4697/4739 +f 4682/4725 4694/4740 4695/4741 +f 4679/4726 4693/4742 4680/4727 +f 4678/4729 4690/4743 4691/4744 +f 4676/4730 4688/4736 4689/4745 +f 4673/4723 4687/4735 4674/4731 +f 4683/4732 4695/4741 4696/4738 +f 4681/4733 4693/4742 4694/4740 +f 4678/4729 4692/4746 4679/4726 +f 4676/4730 4690/4747 4677/4734 +f 4688/4736 4700/4748 4701/4749 +f 4686/4737 4698/4750 4699/4751 +f 4696/4738 4710/4752 4697/4739 +f 4695/4741 4707/4753 4708/4754 +f 4692/4746 4706/4755 4693/4742 +f 4691/4744 4703/4756 4704/4757 +f 4688/4736 4702/4758 4689/4745 +f 4686/4737 4700/4748 4687/4735 +f 4695/4741 4709/4759 4696/4738 +f 4694/4740 4706/4755 4707/4753 +f 4692/4746 4704/4757 4705/4760 +f 4689/4745 4703/4761 4690/4747 +f 4701/4749 4713/4762 4714/4763 +f 4699/4751 4711/4764 4712/4765 +f 4710/4752 4722/4766 4723/4767 +f 4707/4753 4721/4768 4708/4754 +f 4705/4760 4719/4769 4706/4755 +f 4704/4757 4716/4770 4717/4771 +f 4701/4749 4715/4772 4702/4758 +f 4699/4751 4713/4762 4700/4748 +f 4709/4759 4721/4768 4722/4766 +f 4707/4753 4719/4769 4720/4773 +f 4705/4760 4717/4771 4718/4774 +f 4702/4758 4716/4775 4703/4761 +f 4713/4762 4727/4776 4714/4763 +f 4711/4764 4725/4777 4712/4765 +f 4723/4767 4735/4778 4736/4779 +f 4721/4768 4733/4780 4734/4781 +f 4719/4769 4731/4782 4732/4783 +f 4716/4770 4730/4784 4717/4771 +f 4715/4772 4727/4776 4728/4785 +f 4713/4762 4725/4777 4726/4786 +f 4721/4768 4735/4778 4722/4766 +f 4719/4769 4733/4780 4720/4773 +f 4717/4771 4731/4782 4718/4774 +f 4716/4775 4728/4785 4729/4787 +f 4726/4786 4740/4788 4727/4776 +f 4724/4789 4738/4790 4725/4777 +f 4736/4779 4748/4791 4749/4792 +f 4733/4780 4747/4793 4734/4781 +f 4732/4783 4744/4794 4745/4795 +f 4730/4784 4742/4796 4743/4797 +f 4728/4785 4740/4788 4741/4798 +f 4726/4786 4738/4790 4739/4799 +f 4735/4778 4747/4793 4748/4791 +f 4732/4783 4746/4800 4733/4780 +f 4730/4784 4744/4794 4731/4782 +f 4728/4785 4742/4801 4729/4787 +f 4739/4799 4753/4802 4740/4788 +f 4738/4790 4750/4803 4751/4804 +f 4748/4791 4762/4805 4749/4792 +f 4746/4800 4760/4806 4747/4793 +f 4745/4795 4757/4807 4758/4808 +f 4743/4797 4755/4809 4756/4810 +f 4740/4788 4754/4811 4741/4798 +f 4739/4799 4751/4804 4752/4812 +f 4747/4793 4761/4813 4748/4791 +f 4745/4795 4759/4814 4746/4800 +f 4744/4794 4756/4810 4757/4807 +f 4741/4798 4755/4815 4742/4801 +f 4753/4802 4765/4816 4766/4817 +f 4750/4803 4764/4818 4751/4804 +f 4762/4805 4774/4819 4775/4820 +f 4759/4814 4773/4821 4760/4806 +f 4758/4808 4770/4822 4771/4823 +f 4755/4809 4769/4824 4756/4810 +f 4753/4802 4767/4825 4754/4811 +f 4752/4812 4764/4818 4765/4816 +f 4760/4806 4774/4819 4761/4813 +f 4758/4808 4772/4826 4759/4814 +f 4757/4807 4769/4824 4770/4822 +f 4755/4815 4767/4825 4768/4827 +f 4765/4816 4779/4828 4766/4817 +f 4764/4818 4776/4829 4777/4830 +f 4774/4819 4788/4831 4775/4820 +f 4772/4826 4786/4832 4773/4821 +f 4771/4823 4783/4833 4784/4834 +f 4769/4824 4781/4835 4782/4836 +f 4767/4825 4779/4828 4780/4837 +f 4765/4816 4777/4830 4778/4838 +f 4774/4819 4786/4832 4787/4839 +f 4771/4823 4785/4840 4772/4826 +f 4769/4824 4783/4833 4770/4822 +f 4767/4825 4781/4841 4768/4827 +f 4779/4828 4791/4842 4792/4843 +f 4776/4829 4790/4844 4777/4830 +f 4788/4831 4800/4845 4801/4846 +f 4786/4832 4798/4847 4799/4848 +f 4783/4833 4797/4849 4784/4834 +f 4782/4836 4794/4850 4795/4851 +f 4780/4837 4792/4843 4793/4852 +f 4777/4830 4791/4842 4778/4838 +f 4787/4839 4799/4848 4800/4845 +f 4785/4840 4797/4849 4798/4847 +f 4782/4836 4796/4853 4783/4833 +f 4780/4837 4794/4854 4781/4841 +f 4792/4843 4804/4855 4805/4856 +f 4790/4844 4802/4857 4803/4858 +f 4800/4845 4814/4859 4801/4846 +f 4799/4848 4811/4860 4812/4861 +f 4796/4853 4810/4862 4797/4849 +f 4795/4851 4807/4863 4808/4864 +f 4792/4843 4806/4865 4793/4852 +f 4790/4844 4804/4855 4791/4842 +f 4799/4848 4813/4866 4800/4845 +f 4798/4847 4810/4862 4811/4860 +f 4796/4853 4808/4864 4809/4867 +f 4793/4852 4807/4868 4794/4854 +f 4805/4856 4817/4869 4818/4870 +f 4803/4858 4815/4871 4816/4872 +f 4814/4859 4826/4873 4827/4874 +f 4811/4860 4825/4875 4812/4861 +f 4809/4867 4823/4876 4810/4862 +f 4808/4864 4820/4877 4821/4878 +f 4805/4856 4819/4879 4806/4865 +f 4803/4858 4817/4869 4804/4855 +f 4813/4866 4825/4875 4826/4873 +f 4811/4860 4823/4876 4824/4880 +f 4809/4867 4821/4878 4822/4881 +f 4806/4865 4820/4882 4807/4868 +f 4817/4869 4831/4883 4818/4870 +f 4815/4871 4829/4884 4816/4872 +f 4826/4873 4840/4885 4827/4874 +f 4825/4875 4837/4886 4838/4887 +f 4823/4876 4835/4888 4836/4889 +f 4820/4877 4834/4890 4821/4878 +f 4819/4879 4831/4883 4832/4891 +f 4817/4869 4829/4884 4830/4892 +f 4825/4875 4839/4893 4826/4873 +f 4823/4876 4837/4886 4824/4880 +f 4821/4878 4835/4888 4822/4881 +f 4820/4882 4832/4891 4833/4894 +f 4830/4892 4844/4895 4831/4883 +f 4828/4896 4842/4897 4829/4884 +f 4840/4885 4852/4898 4853/4899 +f 4837/4886 4851/4900 4838/4887 +f 4836/4889 4848/4901 4849/4902 +f 4834/4890 4846/4903 4847/4904 +f 4832/4891 4844/4895 4845/4905 +f 4830/4892 4842/4897 4843/4906 +f 4839/4893 4851/4900 4852/4898 +f 4836/4889 4850/4907 4837/4886 +f 4834/4890 4848/4901 4835/4888 +f 4832/4891 4846/4908 4833/4894 +f 4843/4906 4857/4909 4844/4895 +f 4842/4897 4854/4910 4855/4911 +f 4852/4898 4866/4912 4853/4899 +f 4850/4907 4864/4913 4851/4900 +f 4849/4902 4861/4914 4862/4915 +f 4847/4904 4859/4916 4860/4917 +f 4844/4895 4858/4918 4845/4905 +f 4843/4906 4855/4911 4856/4919 +f 4851/4900 4865/4920 4852/4898 +f 4849/4902 4863/4921 4850/4907 +f 4848/4901 4860/4917 4861/4914 +f 4845/4905 4859/4922 4846/4908 +f 4857/4909 4869/4923 4870/4924 +f 4855/4911 4867/4925 4868/4926 +f 4866/4912 4878/4927 4879/4928 +f 4864/4913 4876/4929 4877/4930 +f 4862/4915 4874/4931 4875/4932 +f 4859/4916 4873/4933 4860/4917 +f 4857/4909 4871/4934 4858/4918 +f 4856/4919 4868/4926 4869/4923 +f 4864/4913 4878/4927 4865/4920 +f 4862/4915 4876/4929 4863/4921 +f 4861/4914 4873/4933 4874/4931 +f 4859/4922 4871/4934 4872/4935 +f 4869/4923 4883/4936 4870/4924 +f 4868/4926 4880/4937 4881/4938 +f 4878/4927 4892/4939 4879/4928 +f 4877/4930 4889/4940 4890/4941 +f 4874/4931 4888/4942 4875/4932 +f 4873/4933 4885/4943 4886/4944 +f 4871/4934 4883/4936 4884/4945 +f 4868/4926 4882/4946 4869/4923 +f 4878/4927 4890/4941 4891/4947 +f 4876/4929 4888/4942 4889/4940 +f 4873/4933 4887/4948 4874/4931 +f 4871/4934 4885/4949 4872/4935 +f 4883/4936 4896/4950 4897/4951 +f 4880/4937 4895/4952 4881/4938 +f 4892/4939 4905/4953 4906/4954 +f 4890/4941 4903/4955 4904/4956 +f 4887/4948 4902/4957 4888/4942 +f 4885/4943 4900/4958 4886/4944 +f 4884/4945 4897/4951 4898/4959 +f 4881/4938 4896/4950 4882/4946 +f 4891/4947 4904/4956 4905/4953 +f 4889/4940 4902/4957 4903/4955 +f 4886/4944 4901/4960 4887/4948 +f 4885/4949 4898/4959 4899/4961 +f 4897/4951 4909/4962 4910/4963 +f 4895/4952 4907/4964 4908/4965 +f 4905/4953 4919/4966 4906/4954 +f 4904/4956 4916/4967 4917/4968 +f 4901/4960 4915/4969 4902/4957 +f 4899/4970 4913/4971 4900/4958 +f 4897/4951 4911/4972 4898/4959 +f 4895/4952 4909/4962 4896/4950 +f 4904/4956 4918/4973 4905/4953 +f 4903/4955 4915/4969 4916/4967 +f 4901/4960 4913/4971 4914/4974 +f 4899/4961 4911/4972 4912/4975 +f 4910/4963 4922/4976 4923/4977 +f 4908/4965 4920/4978 4921/4979 +f 4919/4966 4931/4980 4932/4981 +f 4916/4967 4930/4982 4917/4968 +f 4914/4974 4928/4983 4915/4969 +f 4913/4971 4925/4984 4926/4985 +f 4910/4963 4924/4986 4911/4972 +f 4908/4965 4922/4976 4909/4962 +f 4918/4973 4930/4982 4931/4980 +f 4916/4967 4928/4983 4929/4987 +f 4914/4974 4926/4985 4927/4988 +f 4911/4972 4925/4989 4912/4975 +f 4922/4976 4936/4990 4923/4977 +f 4920/4978 4934/4991 4921/4979 +f 4931/4980 4945/4992 4932/4981 +f 4930/4982 4942/4993 4943/4994 +f 4928/4983 4940/4995 4941/4996 +f 4925/4984 4939/4997 4926/4985 +f 4924/4986 4936/4990 4937/4998 +f 4922/4976 4934/4991 4935/4999 +f 4930/4982 4944/5000 4931/4980 +f 4928/4983 4942/4993 4929/4987 +f 4926/4985 4940/4995 4927/4988 +f 4925/4989 4937/4998 4938/5001 +f 4935/4999 4949/5002 4936/4990 +f 4933/5003 4947/5004 4934/4991 +f 4945/4992 4957/5005 4958/5006 +f 4942/4993 4956/5007 4943/4994 +f 4941/4996 4953/5008 4954/5009 +f 4938/5010 4952/5011 4939/4997 +f 4937/4998 4949/5002 4950/5012 +f 4935/4999 4947/5004 4948/5013 +f 4944/5000 4956/5007 4957/5005 +f 4941/4996 4955/5014 4942/4993 +f 4939/4997 4953/5008 4940/4995 +f 4938/5001 4950/5012 4951/5015 +f 4948/5013 4962/5016 4949/5002 +f 4947/5004 4959/5017 4960/5018 +f 4957/5005 4971/5019 4958/5006 +f 4955/5014 4969/5020 4956/5007 +f 4954/5009 4966/5021 4967/5022 +f 4951/5023 4965/5024 4952/5011 +f 4949/5002 4963/5025 4950/5012 +f 4948/5013 4960/5018 4961/5026 +f 4956/5007 4970/5027 4957/5005 +f 4954/5009 4968/5028 4955/5014 +f 4953/5008 4965/5024 4966/5021 +f 4951/5015 4963/5025 4964/5029 +f 4962/5016 4974/5030 4975/5031 +f 4959/5017 4973/5032 4960/5018 +f 4971/5019 4983/5033 4984/5034 +f 4968/5028 4982/5035 4969/5020 +f 4966/5021 4980/5036 4967/5022 +f 4964/5037 4978/5038 4965/5024 +f 4962/5016 4976/5039 4963/5025 +f 4960/5018 4974/5030 4961/5026 +f 4969/5020 4983/5033 4970/5027 +f 4967/5022 4981/5040 4968/5028 +f 4966/5021 4978/5038 4979/5041 +f 4964/5029 4976/5039 4977/5042 +f 4975/5031 4987/5043 4988/5044 +f 4973/5032 4985/5045 4986/5046 +f 4983/5033 4997/5047 4984/5034 +f 4981/5040 4995/5048 4982/5035 +f 4979/5041 4993/5049 4980/5036 +f 4978/5038 4990/5050 4991/5051 +f 4976/5039 4988/5044 4989/5052 +f 4973/5032 4987/5043 4974/5030 +f 4983/5033 4995/5048 4996/5053 +f 4981/5040 4993/5049 4994/5054 +f 4978/5038 4992/5055 4979/5041 +f 4976/5039 4990/5056 4977/5042 +f 4988/5044 5000/5057 5001/5058 +f 4985/5045 4999/5059 4986/5046 +f 4997/5047 5009/5060 5010/5061 +f 4995/5048 5007/5062 5008/5063 +f 4992/5055 5006/5064 4993/5049 +f 4991/5051 5003/5065 5004/5066 +f 4989/5052 5001/5058 5002/5067 +f 4986/5046 5000/5057 4987/5043 +f 4996/5053 5008/5063 5009/5060 +f 4994/5054 5006/5064 5007/5062 +f 4991/5051 5005/5068 4992/5055 +f 4989/5052 5003/5069 4990/5056 +f 5001/5058 5013/5070 5014/5071 +f 4999/5059 5011/5072 5012/5073 +f 5009/5060 5023/5074 5010/5061 +f 5008/5063 5020/5075 5021/5076 +f 5005/5068 5019/5077 5006/5064 +f 5004/5066 5016/5078 5017/5079 +f 5001/5058 5015/5080 5002/5067 +f 4999/5059 5013/5070 5000/5057 +f 5008/5063 5022/5081 5009/5060 +f 5007/5062 5019/5077 5020/5075 +f 5005/5068 5017/5079 5018/5082 +f 5002/5067 5016/5083 5003/5069 +f 4893/5084 4619/4678 4632/4679 +f 4893/5084 4632/4679 4645/4685 +f 4893/5084 4645/4685 4658/4698 +f 4893/5084 4658/4698 4671/4712 +f 4893/5084 4671/4712 4684/4724 +f 4893/5084 4684/4724 4697/4739 +f 4893/5084 4697/4739 4710/4752 +f 4893/5084 4710/4752 4723/4767 +f 4893/5084 4723/4767 4736/4779 +f 4893/5084 4736/4779 4749/4792 +f 4893/5084 4749/4792 4762/4805 +f 4893/5084 4762/4805 4775/4820 +f 4893/5084 4775/4820 4788/4831 +f 4893/5084 4788/4831 4801/4846 +f 4893/5084 4801/4846 4814/4859 +f 4893/5084 4814/4859 4827/4874 +f 4893/5084 4827/4874 4840/4885 +f 4893/5084 4840/4885 4853/4899 +f 4893/5084 4853/4899 4866/4912 +f 4893/5084 4866/4912 4879/4928 +f 4893/5084 4879/4928 4892/4939 +f 4893/5084 4892/4939 4906/4954 +f 4893/5084 4906/4954 4919/4966 +f 4893/5084 4919/4966 4932/4981 +f 4893/5084 4932/4981 4945/4992 +f 4893/5084 4945/4992 4958/5006 +f 4893/5084 4958/5006 4971/5019 +f 4893/5084 4971/5019 4984/5034 +f 4893/5084 4984/5034 4997/5047 +f 4893/5084 4997/5047 5010/5061 +f 4893/5084 5010/5061 5023/5074 +f 5014/5071 4609/4664 4610/4675 +f 5012/5073 4607/4676 4608/4677 +f 5022/5081 4619/4678 5023/5074 +f 5020/5075 4617/4680 5021/5076 +f 5018/5082 4615/4655 5019/5077 +f 5017/5079 4612/4658 4613/4660 +f 5014/5071 4611/4661 5015/5080 +f 5012/5073 4609/4664 5013/5070 +f 4893/5084 5023/5074 4619/4678 +f 5021/5076 4618/4667 5022/5081 +f 5020/5075 4615/4655 4616/4671 +f 5018/5082 4613/4660 4614/4672 +f 5015/5080 4612/4673 5016/5083 +f 4481/4530 5011/5072 4606/4637 +f 5011/5072 4602/4634 4606/4637 +f 4998/5085 4597/4629 4602/4634 +f 4985/5045 4593/4627 4597/4629 +f 4593/4627 4959/5017 4589/4622 +f 4589/4622 4946/5086 4585/4619 +f 4585/4619 4933/5003 4581/4615 +f 4933/5003 4577/4612 4581/4615 +f 4577/4612 4907/4964 4573/4609 +f 4907/4964 4569/4606 4573/4609 +f 4894/5087 4565/4601 4569/4606 +f 4880/4937 4561/4599 4565/4601 +f 4561/4599 4854/4910 4557/4594 +f 4557/4594 4841/5088 4553/4590 +f 4553/4590 4828/4896 4549/4587 +f 4828/4896 4545/4584 4549/4587 +f 4545/4584 4802/4857 4541/4580 +f 4802/4857 4537/4577 4541/4580 +f 4789/5089 4533/4572 4537/4577 +f 4776/4829 4529/4569 4533/4572 +f 4529/4569 4750/4803 4525/4564 +f 4525/4564 4737/5090 4521/4561 +f 4521/4561 4724/4789 4517/4558 +f 4724/4789 4513/4555 4517/4558 +f 4513/4555 4698/4750 4509/4551 +f 4698/4750 4505/4548 4509/4551 +f 4685/5091 4501/4543 4505/4548 +f 4672/4722 4497/4541 4501/4543 +f 4497/4541 4646/4696 4493/4536 +f 4493/4536 4633/5092 4489/4533 +f 4489/4533 4620/4682 4485/4529 +f 4620/4682 4481/4530 4485/4529 +f 4/1 3/223 2/2 +f 8/4 7/2726 6/5 +f 13/7 16/2228 15/8 +f 17/10 28/2112 19/11 +f 32/13 31/317 30/14 +f 36/16 35/1785 34/17 +f 37/19 40/1305 39/20 +f 21/22 22/1250 23/23 +f 25/25 26/2732 27/26 +f 41/28 44/140 43/29 +f 48/31 47/191 46/32 +f 52/34 51/432 50/35 +f 53/37 56/161 55/38 +f 60/40 59/62 58/41 +f 29/15 30/14 62/43 +f 66/45 65/486 64/46 +f 67/48 5/6 6/5 +f 72/50 71/1955 70/51 +f 76/53 75/2762 74/54 +f 77/56 80/312 79/57 +f 81/59 84/970 83/60 +f 59/62 87/78 86/63 +f 88/65 91/2877 90/66 +f 96/68 95/387 94/69 +f 100/71 99/1067 98/72 +f 101/74 104/2810 103/75 +f 106/77 105/111 34/17 +f 108/79 116/109 115/80 +f 117/82 119/194 36/16 +f 124/84 127/99 126/85 +f 131/87 130/1157 129/88 +f 132/90 102/76 134/91 +f 142/93 141/174 140/94 +f 148/96 106/77 60/40 +f 144/98 143/253 127/99 +f 149/100 152/1441 151/101 +f 156/103 155/357 154/104 +f 159/106 150/102 158/107 +f 116/109 108/79 164/110 +f 171/112 170/179 169/113 +f 174/115 126/85 173/116 +f 179/118 182/255 181/119 +f 186/121 189/319 188/122 +f 194/124 197/1314 196/125 +f 199/127 142/93 139/95 +f 206/129 209/215 208/130 +f 213/132 212/173 211/133 +f 202/135 215/2820 214/136 +f 96/68 93/70 216/138 +f 92/139 238/180 43/29 +f 97/73 184/374 246/141 +f 250/142 249/165 229/143 +f 252/145 254/244 58/41 +f 258/147 257/1193 256/148 +f 260/150 91/2877 229/143 +f 110/152 111/3492 112/153 +f 114/155 264/1147 263/156 +f 265/158 268/1008 267/159 +f 56/161 53/37 270/162 +f 274/164 273/1163 249/165 +f 185/166 183/211 63/47 +f 280/167 279/1030 278/168 +f 285/170 284/224 283/171 +f 288/172 219/464 211/133 +f 141/174 142/93 92/139 +f 289/175 290/393 175/176 +f 148/96 116/109 105/111 +f 312/178 311/1436 170/179 +f 238/180 262/335 42/30 +f 318/181 163/213 160/182 +f 310/184 309/1031 308/185 +f 241/187 239/1191 325/188 +f 160/182 230/314 240/190 +f 47/191 48/31 307/186 +f 108/79 109/157 315/192 +f 345/193 85/64 86/63 +f 348/195 351/246 350/196 +f 356/198 359/260 358/199 +f 355/201 354/1346 353/202 +f 363/204 31/317 32/13 +f 364/206 252/145 253/146 +f 366/208 125/86 368/209 +f 183/211 372/1217 371/212 +f 163/213 162/2842 161/214 +f 209/215 226/1051 101/74 +f 378/216 377/474 165/217 +f 379/219 295/1840 381/220 +f 383/222 382/1281 3/223 +f 284/224 285/170 386/225 +f 387/227 127/99 389/228 +f 391/230 259/151 229/143 +f 73/55 281/282 397/231 +f 405/233 408/248 407/234 +f 203/236 218/1212 411/237 +f 414/239 121/2834 413/240 +f 1/3 356/198 357/200 +f 277/242 276/169 131/87 +f 254/244 344/1005 57/42 +f 358/199 384/1463 415/245 +f 417/247 416/336 386/225 +f 401/249 400/386 192/250 +f 419/252 389/228 127/99 +f 421/254 181/119 182/255 +f 103/75 301/438 302/257 +f 428/259 367/210 368/209 +f 380/221 390/290 359/260 +f 62/43 204/1809 266/160 +f 140/94 141/174 431/262 +f 357/200 358/199 351/246 +f 356/198 1/3 379/219 +f 415/245 392/1289 403/264 +f 437/266 428/259 436/267 +f 438/269 441/2805 440/270 +f 188/122 451/320 450/272 +f 265/158 455/469 181/119 +f 224/277 185/166 64/46 +f 237/279 212/173 213/132 +f 275/243 131/87 128/89 +f 281/282 282/286 460/283 +f 50/35 462/1094 79/57 +f 73/55 74/54 282/286 +f 468/287 467/410 466/288 +f 390/290 355/201 352/203 +f 482/291 485/1641 484/292 +f 489/294 488/414 487/295 +f 493/297 496/402 495/298 +f 472/300 471/1426 470/301 +f 502/303 501/456 500/304 +f 220/306 221/2566 222/307 +f 512/309 419/252 143/253 +f 518/310 517/1897 511/311 +f 513/313 239/1191 230/314 +f 205/316 30/14 31/317 +f 189/319 370/434 451/320 +f 163/213 318/181 532/321 +f 533/322 224/277 225/278 +f 446/324 448/1848 447/325 +f 335/326 541/382 540/327 +f 544/329 512/309 546/330 +f 329/332 519/475 291/333 +f 199/127 534/1141 262/335 +f 385/226 386/225 416/336 +f 532/321 190/1168 191/338 +f 363/204 117/82 118/83 +f 578/342 581/383 580/343 +f 573/345 572/1416 571/346 +f 561/348 563/968 562/349 +f 241/187 260/150 259/151 +f 590/351 593/2023 592/352 +f 545/331 546/330 595/354 +f 503/356 154/104 155/357 +f 606/359 625/1940 624/360 +f 478/362 481/404 480/363 +f 646/365 649/1935 648/366 +f 590/351 591/353 689/368 +f 564/370 565/1310 651/371 +f 728/373 246/141 184/374 +f 714/376 724/2057 716/377 +f 692/379 574/391 575/380 +f 541/382 335/326 581/383 +f 346/384 68/49 248/385 +f 161/214 231/1059 230/314 +f 128/89 129/88 94/69 +f 698/388 705/2159 704/389 +f 574/391 577/3099 576/392 +f 290/393 289/175 401/249 +f 630/395 631/1316 725/396 +f 733/398 736/2821 735/399 +f 755/401 466/288 496/402 +f 759/403 480/363 481/404 +f 757/406 763/2211 762/407 +f 764/409 496/402 466/288 +f 768/411 767/1656 3808/412 +f 759/403 760/405 488/414 +f 492/415 474/418 475/416 +f 474/418 770/1901 769/419 +f 773/421 468/287 465/289 +f 771/417 774/425 486/296 +f 484/292 485/1641 775/423 +f 774/425 776/1419 489/294 +f 495/298 496/402 764/409 +f 495/298 757/406 761/408 +f 137/427 397/231 784/428 +f 325/188 239/1191 797/430 +f 165/217 519/475 51/432 +f 369/433 32/13 29/15 +f 730/435 731/1875 568/436 +f 90/66 302/257 301/438 +f 243/340 118/83 516/439 +f 800/441 734/400 806/442 +f 817/444 815/451 808/445 +f 816/446 808/445 823/447 +f 306/449 303/1201 447/325 +f 815/451 810/515 809/452 +f 829/453 830/472 828/454 +f 726/397 725/396 501/456 +f 274/164 427/280 424/457 +f 879/459 882/1252 881/460 +f 884/462 883/1843 879/459 +f 219/464 218/1212 217/465 +f 906/466 905/673 904/467 +f 455/469 204/1809 205/316 +f 898/471 882/1252 830/472 +f 903/473 900/484 880/461 +f 377/474 291/333 519/475 +f 923/476 949/553 948/477 +f 946/478 947/488 951/479 +f 3519/481 740/1934 739/482 +f 900/484 899/2835 886/485 +f 225/278 64/46 65/486 +f 182/255 445/1129 442/487 +f 809/452 952/1297 947/488 +f 954/490 960/3494 962/491 +f 927/493 950/480 965/494 +f 885/463 879/459 880/461 +f 925/496 927/493 966/495 +f 343/498 342/1502 341/499 +f 965/494 964/1798 971/501 +f 968/502 971/501 970/503 +f 972/505 968/502 969/504 +f 963/507 961/492 981/508 +f 336/510 337/3449 338/511 +f 992/513 993/1797 827/514 +f 810/515 815/451 996/516 +f 1007/518 1001/1986 1002/519 +f 1012/521 826/678 827/514 +f 1011/523 1009/522 1007/518 +f 294/524 293/1088 292/334 +f 907/525 908/716 920/526 +f 1017/528 579/344 1019/529 +f 1020/531 1023/593 1022/532 +f 1035/534 1024/614 1020/531 +f 1061/536 1060/608 1047/537 +f 1063/539 1061/536 1046/538 +f 1044/535 1020/531 1021/533 +f 1102/542 1094/552 1097/543 +f 1116/545 1126/561 1118/546 +f 1016/548 1015/616 1014/549 +f 881/460 897/468 904/467 +f 816/446 1116/545 1127/551 +f 1094/552 948/477 949/553 +f 514/554 342/1502 167/555 +f 146/557 145/1011 81/59 +f 1133/558 1135/2261 1130/559 +f 1102/542 1133/558 1126/561 +f 822/448 1126/561 1116/545 +f 1145/562 1143/1862 1148/563 +f 1076/565 1144/578 1151/566 +f 1148/563 1143/1862 1086/568 +f 1109/544 1097/543 1114/570 +f 1154/571 1153/574 1156/572 +f 1153/574 925/496 967/497 +f 3464/575 3497/1438 463/576 +f 1144/578 1147/586 1152/579 +f 1156/572 967/497 1163/580 +f 1164/582 1162/581 1169/583 +f 1145/562 1171/587 1176/585 +f 1114/570 1115/1585 1154/571 +f 1183/588 1155/573 1164/582 +f 1184/590 1183/588 1177/589 +f 1069/567 1151/566 1062/540 +f 1023/593 1191/597 1194/594 +f 1162/581 1163/580 1170/595 +f 1195/596 1184/590 1185/591 +f 1183/588 1196/599 1176/585 +f 1201/598 1063/539 1152/579 +f 1061/536 1063/539 1201/598 +f 3825/601 847/624 843/602 +f 1203/604 1202/600 1195/596 +f 1202/600 1201/598 1184/590 +f 1218/606 1060/608 1203/604 +f 1060/608 1061/536 1202/600 +f 1225/609 1218/606 1217/607 +f 671/611 695/2132 694/612 +f 1219/610 1217/607 1024/614 +f 1217/607 1203/604 1034/605 +f 1226/615 1014/549 1015/616 +f 1243/618 1242/650 1234/619 +f 1231/621 1230/3955 1229/622 +f 847/624 1243/618 1233/620 +f 843/602 1233/620 1244/625 +f 1233/620 1234/619 1245/627 +f 1923/628 3761/2110 233/629 +f 2100/630 2108/3944 499/305 +f 3914/631 3864/2180 2135/632 +f 61/44 62/43 323/261 +f 210/134 211/133 217/465 +f 1299/636 1310/646 1309/637 +f 3863/639 1230/3955 1231/621 +f 1066/641 1047/537 1321/642 +f 1322/643 1321/642 1228/623 +f 1060/608 1218/606 1321/642 +f 1325/644 1324/1924 1323/645 +f 1244/625 1245/627 1332/647 +f 1290/649 1234/619 1242/650 +f 1337/652 1330/648 1339/653 +f 1330/648 1332/647 1340/655 +f 1341/656 1332/647 1245/627 +f 1342/658 1340/655 1332/647 +f 1343/659 1342/658 1341/656 +f 1344/660 1341/656 1294/657 +f 3857/662 3856/666 1345/663 +f 1347/665 1348/669 1342/658 +f 3856/666 3854/1793 1295/667 +f 1349/668 1350/704 1348/669 +f 3861/670 1349/668 1347/665 +f 3853/671 1347/665 1343/659 +f 906/466 898/471 829/453 +f 905/673 906/466 1353/672 +f 865/675 1356/679 1355/676 +f 1355/676 1353/672 829/453 +f 1357/677 1355/676 825/455 +f 1356/679 1354/674 1353/672 +f 3834/680 1337/652 1338/654 +f 866/682 1358/683 1356/679 +f 1358/683 1359/706 1354/674 +f 3828/684 878/626 1337/652 +f 838/685 1362/2226 1012/521 +f 1363/687 1364/2125 1338/654 +f 1368/688 1363/687 1339/653 +f 1348/669 1368/688 1340/655 +f 1371/689 1369/699 3824/690 +f 524/692 525/1846 526/693 +f 528/695 529/2725 530/696 +f 1372/698 1370/705 1369/699 +f 535/700 536/2750 537/701 +f 1367/703 1368/688 1348/669 +f 1370/705 1354/674 1359/706 +f 1369/699 1359/706 3833/681 +f 1373/707 910/712 903/473 +f 555/708 556/1166 557/709 +f 1374/711 909/1139 910/712 +f 1373/707 904/467 905/673 +f 1374/711 1373/707 1375/713 +f 1375/713 905/673 1354/674 +f 1378/715 920/526 908/716 +f 376/718 373/2850 651/371 +f 1388/720 928/2032 973/506 +f 586/721 587/1197 588/722 +f 1390/724 1389/2037 1388/720 +f 1391/725 1388/720 969/504 +f 597/726 598/2341 599/727 +f 600/729 601/966 602/730 +f 1392/732 1391/725 970/503 +f 608/734 609/770 610/735 +f 612/737 524/692 523/694 +f 614/739 615/1156 616/740 +f 617/742 618/2279 25/25 +f 620/743 621/1309 622/744 +f 1397/746 1390/724 1391/725 +f 1398/747 1397/746 1392/732 +f 1400/749 987/1613 920/526 +f 1402/750 1403/760 1165/584 +f 636/751 637/2749 638/752 +f 930/754 1404/759 1402/750 +f 641/756 642/2787 643/757 +f 929/755 1402/750 1169/583 +f 1404/759 1405/844 1403/760 +f 1411/761 1366/2126 1365/762 +f 1412/764 1406/763 1415/765 +f 652/767 653/869 654/768 +f 609/770 656/1206 657/771 +f 659/773 660/2839 661/774 +f 558/710 663/1020 586/721 +f 665/776 666/2278 667/777 +f 1413/779 1411/761 1406/763 +f 1418/780 1416/784 3810/781 +f 1417/783 1372/698 1371/689 +f 1416/784 1371/689 3829/691 +f 1419/785 1417/783 1416/784 +f 1386/786 909/1139 1374/711 +f 677/788 678/3199 679/789 +f 1385/791 1386/786 1420/787 +f 1420/787 1374/711 1376/714 +f 685/794 686/856 687/795 +f 1421/792 1420/787 1422/793 +f 528/695 690/905 691/798 +f 1422/793 1376/714 1372/698 +f 1018/530 1019/529 747/799 +f 699/801 700/808 701/802 +f 1424/804 578/342 579/344 +f 3663/805 1597/1397 1767/806 +f 700/808 699/801 711/809 +f 1093/811 3746/3924 3754/812 +f 717/814 718/1948 530/696 +f 721/816 722/2722 723/817 +f 704/389 1650/2184 3543/819 +f 19/11 1429/823 1426/821 +f 1429/823 1428/2168 1427/824 +f 1658/825 1659/1576 3542/826 +f 1439/828 1437/831 1434/829 +f 1437/831 1436/963 1435/832 +f 1426/821 1427/824 1433/833 +f 1442/835 1431/1886 1432/834 +f 742/837 743/1237 744/838 +f 1432/834 1433/833 1441/840 +f 751/841 752/2816 753/842 +f 1438/830 1434/829 1403/760 +f 1446/845 1445/851 1444/846 +f 1447/848 1448/852 1446/845 +f 3860/849 1447/848 1443/847 +f 1415/765 1367/703 1445/851 +f 1414/766 1415/765 1446/845 +f 2173/853 1683/1805 469/302 +f 777/854 778/1333 779/855 +f 780/857 781/2875 782/858 +f 1676/860 1693/1016 1806/861 +f 3859/863 1451/1244 1349/668 +f 789/864 790/1110 791/865 +f 745/839 792/1871 793/867 +f 652/767 794/1124 795/868 +f 1455/870 1458/1976 1457/871 +f 1454/873 1367/703 1350/704 +f 610/735 691/798 802/875 +f 3831/877 3816/3662 698/388 +f 1352/878 1044/535 1045/541 +f 1015/616 1016/548 1459/880 +f 811/882 812/1230 813/883 +f 2118/885 3815/1967 3817/886 +f 819/888 820/1811 821/889 +f 1466/891 1463/3868 1194/594 +f 1467/893 1468/896 1465/894 +f 1182/895 1165/584 1468/896 +f 832/897 812/1230 833/898 +f 835/900 836/2798 837/901 +f 1467/893 1466/891 1190/892 +f 1469/903 1470/3943 1444/846 +f 840/904 700/808 712/810 +f 1453/906 1470/3943 1469/903 +f 599/727 844/1245 845/907 +f 1226/615 1450/2169 3849/909 +f 851/911 852/1137 853/912 +f 855/914 744/838 856/915 +f 858/917 859/1412 860/918 +f 861/920 862/2827 863/921 +f 1014/549 1226/615 3855/910 +f 867/924 676/790 868/925 +f 870/927 871/2778 872/928 +f 874/930 875/1035 876/931 +f 1310/646 1323/645 1471/933 +f 1476/934 1433/833 1427/824 +f 1461/936 1476/934 1475/935 +f 887/937 888/1919 889/938 +f 788/866 791/865 892/940 +f 894/942 895/961 896/943 +f 1460/881 1475/935 1227/617 +f 901/945 902/1845 524/692 +f 1465/894 1478/964 1477/946 +f 1865/948 3503/2193 3500/949 +f 911/951 912/2728 913/952 +f 916/954 917/1106 918/955 +f 1850/957 3505/987 3507/958 +f 921/960 922/2351 895/961 +f 1479/962 1435/832 1436/963 +f 926/965 664/778 667/777 +f 1481/967 562/349 563/968 +f 269/163 1483/1043 84/970 +f 931/972 932/1300 933/973 +f 934/974 935/1045 614/739 +f 936/975 685/794 688/796 +f 938/977 659/773 662/775 +f 1484/979 270/162 122/980 +f 941/982 942/1102 835/900 +f 320/983 321/1830 509/984 +f 1849/986 3510/3918 3505/987 +f 84/970 1483/1043 1484/979 +f 1487/988 1490/1027 1488/989 +f 1757/991 1758/3871 3502/992 +f 365/207 1492/1004 1491/994 +f 461/285 79/57 80/312 +f 117/82 363/204 362/205 +f 1494/996 540/327 541/382 +f 926/965 601/966 957/998 +f 1477/946 1441/840 1433/833 +f 1709/1000 1588/1526 1585/1001 +f 119/194 117/82 1496/995 +f 271/1003 1497/1778 504/358 +f 511/311 517/1897 189/319 +f 345/193 1496/995 1492/1004 +f 57/42 344/1005 299/1006 +f 1501/1007 1499/1010 268/1008 +f 267/159 268/1008 1499/1010 +f 974/1012 975/2766 976/1013 +f 1678/1015 1652/1508 1693/1016 +f 3846/1017 1682/1806 1807/1018 +f 557/709 982/1167 663/1020 +f 983/1021 941/982 984/1022 +f 1443/847 1444/846 3849/909 +f 421/254 1500/1009 268/1008 +f 989/1024 990/2760 991/1025 +f 1490/1027 1487/988 418/1028 +f 279/1030 280/167 309/1031 +f 997/1032 868/925 676/790 +f 998/1033 26/2732 999/1034 +f 1504/1029 418/1028 38/21 +f 1506/1037 1508/1041 1507/1038 +f 360/1040 361/1408 1508/1041 +f 1506/1037 1486/1039 207/131 +f 939/978 1005/1081 1006/1042 +f 270/162 1484/979 1483/1043 +f 66/45 63/47 546/330 +f 1010/1044 742/837 793/867 +f 367/210 428/259 437/266 +f 244/318 31/317 363/204 +f 1670/1047 3532/1639 3534/1048 +f 209/215 206/129 1510/1050 +f 388/229 389/228 426/1056 +f 427/280 274/164 250/142 +f 161/214 520/1430 521/1058 +f 100/71 246/141 45/33 +f 1025/1060 1026/1232 608/734 +f 1029/1062 1030/1225 1031/1063 +f 939/978 662/775 1033/1065 +f 41/28 42/30 594/355 +f 1036/1068 780/857 783/859 +f 702/803 701/802 1038/1070 +f 1040/1072 1041/1155 1042/1073 +f 362/205 32/13 369/433 +f 187/123 1520/1358 477/1076 +f 70/51 1487/988 539/990 +f 1049/1078 600/729 603/731 +f 613/741 867/924 869/926 +f 860/918 1050/2740 1051/1080 +f 1005/1081 1052/2818 1053/1082 +f 753/842 1055/1087 1056/1083 +f 1057/1085 1058/2752 1059/1086 +f 293/1088 288/172 1522/1089 +f 288/172 212/173 1521/1077 +f 292/334 1522/1089 1523/1090 +f 331/1091 1507/1038 1508/1041 +f 306/449 452/274 453/276 +f 100/71 44/140 41/28 +f 462/1094 1524/1092 78/58 +f 330/1095 331/1091 1524/1092 +f 635/753 1067/1423 1068/1096 +f 1485/1097 1527/1833 1526/1098 +f 206/129 1485/1097 1528/1099 +f 1070/1100 1071/1255 1072/1101 +f 942/1102 941/982 1073/1103 +f 916/954 933/973 1075/1105 +f 440/270 1529/1109 1530/1107 +f 157/108 158/107 1514/1108 +f 1529/1109 162/2842 5/6 +f 371/212 595/354 546/330 +f 790/1110 586/721 663/1020 +f 1077/1111 858/917 857/919 +f 1403/760 1434/829 1435/832 +f 1080/1113 1081/2783 1082/1114 +f 531/1116 520/1430 1529/1109 +f 21/22 1083/1249 1084/1117 +f 272/350 1532/1367 1531/1119 +f 690/905 712/810 802/875 +f 335/326 3571/3933 3567/1120 +f 1090/1121 1030/1225 1029/1062 +f 1092/1123 795/868 794/1124 +f 1533/1125 396/232 137/427 +f 661/774 660/2839 1095/1127 +f 69/52 444/2847 445/1129 +f 1099/1130 1100/2789 1101/1131 +f 38/21 39/20 1536/1133 +f 1104/1134 1105/2753 1106/1135 +f 852/1137 1107/1323 1108/1138 +f 1377/717 908/716 909/1139 +f 291/333 377/474 411/237 +f 316/1140 1536/1133 516/439 +f 539/990 1488/989 1533/1125 +f 1505/1036 1536/1133 316/1140 +f 1110/1143 1111/2862 1112/1144 +f 1539/1146 1537/1149 460/283 +f 522/1148 441/2805 1537/1149 +f 834/902 837/901 1120/1150 +f 1122/1152 1123/3253 1124/1153 +f 612/737 1125/1812 1041/1155 +f 935/1045 793/867 615/1156 +f 276/169 278/168 130/1157 +f 1686/1158 3549/1590 3522/1159 +f 193/1161 190/1168 1541/1162 +f 231/1059 521/1058 1540/1164 +f 556/1166 1037/1069 982/1167 +f 365/207 253/146 85/64 +f 190/1168 532/321 318/181 +f 1810/1169 765/2091 3574/1170 +f 3924/1172 1867/1709 3512/1173 +f 1132/1175 983/1021 985/1023 +f 208/130 101/74 102/76 +f 1134/1177 1033/1065 662/775 +f 1491/994 518/310 80/312 +f 429/1178 133/92 1542/1179 +f 1137/1180 1138/2830 915/956 +f 172/117 436/267 428/259 +f 1139/1181 1140/2786 1141/1182 +f 549/1184 96/68 54/39 +f 1517/1185 1519/1253 1518/1186 +f 834/902 1119/1151 984/1022 +f 1543/1189 513/313 286/315 +f 1544/1190 797/430 239/1191 +f 133/92 134/91 1544/1190 +f 506/1192 361/1408 257/1193 +f 420/251 327/1304 328/394 +f 1158/1194 1159/1880 1160/1195 +f 587/1197 1104/1134 1103/1136 +f 607/736 1161/1240 1027/1061 +f 304/1199 1546/2225 1545/1200 +f 364/206 1491/994 77/56 +f 69/52 70/51 198/128 +f 700/808 840/904 1112/1144 +f 3877/1202 626/1496 3525/1203 +f 1166/1205 719/815 657/771 +f 1167/1207 1168/2858 858/917 +f 1300/638 3523/3940 3524/1208 +f 1172/1209 1173/2770 1174/1210 +f 218/1212 203/236 200/1213 +f 1179/1214 1180/2849 1181/1215 +f 372/1217 183/211 97/73 +f 89/67 104/2810 227/1218 +f 1186/1219 1187/1278 1188/1220 +f 172/117 173/116 1517/1185 +f 1482/971 84/970 81/59 +f 1090/1121 1192/1922 1193/1224 +f 394/1226 395/1284 353/202 +f 526/693 525/1846 1198/1228 +f 1199/1231 1200/2736 1026/1232 +f 606/359 623/361 697/1233 +f 1204/1235 1205/1332 1206/1236 +f 1207/1238 1208/2819 523/694 +f 1209/1239 1210/2836 867/924 +f 914/953 913/952 685/794 +f 1478/964 1436/963 1441/840 +f 1029/1062 1028/1064 1161/1240 +f 1211/1241 1212/1791 1213/1242 +f 3858/923 1453/906 1451/1244 +f 844/1245 1215/1887 1216/1246 +f 423/458 193/1161 273/1163 +f 1220/1247 1221/1259 1222/1248 +f 22/1250 1223/1344 1224/1251 +f 882/1252 898/471 897/468 +f 1519/1253 1517/1185 173/116 +f 1189/1221 1188/1220 1232/1254 +f 349/197 350/196 550/337 +f 1071/1255 1235/1979 1236/1256 +f 1237/1257 1238/2765 1239/1258 +f 1221/1259 1186/1219 1189/1221 +f 1510/1050 1528/1099 1512/1053 +f 939/978 1032/1066 1241/1262 +f 1556/1263 1530/1107 5/6 +f 1553/1264 1554/1795 394/1226 +f 404/1265 1556/1263 67/48 +f 1247/1267 1248/2846 1249/1268 +f 142/93 199/127 238/180 +f 1251/1270 1067/1423 635/753 +f 850/913 1252/2796 1253/1272 +f 1254/1273 1167/1207 1077/1111 +f 57/42 132/90 133/92 +f 443/1275 430/263 552/1276 +f 1187/1278 1172/1209 1175/1211 +f 1256/1279 1070/1100 831/899 +f 432/1280 405/233 382/1281 +f 741/1282 748/1906 747/799 +f 434/265 350/196 351/246 +f 349/197 432/1280 383/222 +f 251/323 124/84 1481/967 +f 395/1284 384/1463 352/203 +f 1518/1186 1519/1253 1550/1285 +f 1558/1286 410/238 378/216 +f 109/157 263/156 314/1287 +f 285/170 1514/1108 1513/1198 +f 395/1284 394/1226 393/1288 +f 202/135 203/236 410/238 +f 417/247 408/248 405/233 +f 1246/1269 1257/1351 1258/1290 +f 3/223 1559/1330 297/1291 +f 1259/1292 1260/2866 1261/1293 +f 403/264 404/1265 347/1266 +f 955/1295 954/490 953/1296 +f 462/1094 50/35 51/432 +f 1096/1128 1095/1127 687/795 +f 429/1178 147/97 60/40 +f 896/943 1264/1910 1265/1299 +f 1267/1301 721/816 1268/1302 +f 423/458 424/457 327/1304 +f 37/19 179/118 180/120 +f 20/24 23/23 1187/1278 +f 1256/1279 1168/2858 1071/1255 +f 1188/1220 1175/1211 1269/1306 +f 1236/1256 1270/1980 1271/1307 +f 1272/1308 536/2750 535/700 +f 565/1310 564/370 463/576 +f 1273/1312 792/1871 745/839 +f 876/931 1254/1273 1255/1274 +f 316/1140 33/18 164/110 +f 437/266 1516/1966 503/356 +f 194/124 1494/996 1495/997 +f 1275/1315 1276/1376 864/922 +f 624/360 625/1940 631/1316 +f 1277/1317 1274/1313 1137/1180 +f 569/437 568/436 567/1319 +f 1481/967 124/84 125/86 +f 324/189 325/188 547/431 +f 1279/1322 1108/1138 1107/1323 +f 1525/1325 332/3222 329/332 +f 1281/1326 1278/1318 1137/1180 +f 171/112 168/114 1553/1264 +f 1488/989 73/55 396/232 +f 1557/1328 1561/1521 1562/1329 +f 1282/1331 856/915 1206/1236 +f 1053/1082 1277/1317 1278/1318 +f 778/1333 777/854 607/736 +f 713/1335 583/1384 3566/1336 +f 251/323 225/278 144/98 +f 4/1 357/200 348/195 +f 709/1338 708/2182 707/1339 +f 728/373 48/31 45/33 +f 261/1341 135/1847 1563/1342 +f 789/864 1286/2354 1287/1343 +f 74/54 314/1287 263/156 +f 403/264 284/224 385/226 +f 1178/1216 1181/1215 1240/1260 +f 1223/1344 1259/1292 1289/1345 +f 1220/1247 20/24 1186/1219 +f 296/1187 1518/1186 354/1346 +f 784/428 459/284 1538/1348 +f 1181/1215 1180/2849 1291/1350 +f 1269/1306 1292/2860 1257/1351 +f 1566/1349 1538/1348 438/269 +f 136/429 784/428 1566/1349 +f 1563/1342 1564/1353 169/113 +f 107/81 114/155 109/157 +f 1297/1355 1282/1331 1298/1356 +f 1464/947 1477/946 1476/934 +f 1520/1358 187/123 507/1359 +f 1301/1361 1302/2795 1303/1362 +f 1306/1364 1307/2353 1308/1365 +f 175/176 456/2822 1532/1367 +f 1532/1367 456/2822 200/1213 +f 1311/1368 1266/1303 1312/1369 +f 872/928 1314/2859 1315/1371 +f 3836/603 843/602 878/626 +f 1305/1366 1308/1365 864/922 +f 1040/1072 1275/1315 863/921 +f 1564/1353 1566/1349 399/1352 +f 592/352 1576/2019 1575/1378 +f 591/353 1575/1378 1088/1379 +f 1581/1380 1584/1631 1583/1381 +f 1196/599 1152/579 1147/586 +f 710/1383 582/1461 583/1384 +f 285/170 407/234 408/248 +f 953/1296 954/490 961/492 +f 1594/1385 1592/1390 1589/1386 +f 1585/1001 1588/1526 1587/1388 +f 1592/1390 1591/1548 1590/1391 +f 3885/1392 945/1944 944/1393 +f 1600/1395 1599/1525 1598/1396 +f 1604/1398 1603/3139 1602/1399 +f 1399/748 1392/732 979/733 +f 1589/1386 1590/1391 1596/1402 +f 1610/1404 1609/1963 1608/1405 +f 298/1407 257/1193 361/1408 +f 551/1277 552/1276 276/169 +f 1627/1409 1626/1487 3814/1410 +f 859/1412 811/882 818/890 +f 1078/1112 857/919 1328/1413 +f 1555/1415 439/271 1530/1107 +f 572/1416 573/345 680/1417 +f 772/422 465/289 776/1419 +f 977/1014 1281/1326 918/955 +f 404/1265 403/264 392/1289 +f 1054/1084 1331/2759 750/843 +f 1644/1420 1645/1985 1643/1421 +f 932/1300 1265/1299 1082/1114 +f 1067/1423 1333/2808 1334/1424 +f 1540/1164 1539/1146 264/1147 +f 597/726 1335/1537 643/757 +f 471/1426 761/408 762/407 +f 1642/1422 696/1234 3831/877 +f 1644/1420 1642/1422 3842/1427 +f 923/476 946/478 950/480 +f 96/68 549/1184 548/1429 +f 520/1430 531/1116 522/1148 +f 445/1129 444/2847 443/1275 +f 849/1431 839/3661 848/1432 +f 480/363 759/403 755/401 +f 1646/1434 760/405 481/404 +f 311/1436 426/1056 1567/1437 +f 464/1311 463/576 3497/1438 +f 3497/1438 1360/2001 1361/1440 +f 234/1055 237/279 236/341 +f 190/1168 193/1161 192/250 +f 153/105 154/104 152/1441 +f 231/1059 287/1165 286/315 +f 1552/1327 1553/1264 1551/1227 +f 760/405 1646/1434 1648/1442 +f 3832/1443 1647/1964 1649/1444 +f 828/454 827/514 826/678 +f 52/34 167/555 166/218 +f 1663/1446 1662/1674 1661/1447 +f 287/1165 107/81 115/80 +f 668/1449 671/611 670/613 +f 634/1451 645/1725 640/1452 +f 553/1454 560/2229 559/1455 +f 118/83 36/16 33/18 +f 492/415 484/292 491/424 +f 3552/1458 1652/1508 1651/1459 +f 582/1461 589/1849 584/1462 +f 384/1463 395/1284 392/1289 +f 1381/1464 1382/2307 1383/1465 +f 1562/1329 1561/1521 435/268 +f 952/1297 953/1296 951/479 +f 3529/1467 3566/1336 583/1384 +f 1660/1448 1661/1447 1667/1468 +f 1394/1470 1395/1722 1396/1471 +f 1660/1448 1665/1569 1664/1473 +f 1671/1474 1670/1047 1669/1049 +f 1624/1476 1625/1485 1673/1477 +f 1407/1479 1408/2312 1409/1480 +f 1677/1482 1676/860 1675/862 +f 1613/1484 1679/1486 1677/1482 +f 1679/1486 1678/1015 1676/860 +f 1626/1487 1681/1804 3811/1488 +f 534/1141 199/127 198/128 +f 803/1489 733/398 734/400 +f 400/386 248/385 191/338 +f 65/486 66/45 143/253 +f 3526/1490 1657/1917 1656/1491 +f 402/1493 159/106 157/108 +f 629/1494 628/2154 627/1495 +f 461/285 477/1076 49/36 +f 758/1497 757/406 495/298 +f 796/1498 787/1780 786/1499 +f 406/235 407/234 1513/1198 +f 342/1502 343/498 166/218 +f 1694/1503 1659/1576 1658/825 +f 1692/1505 1687/3508 1694/1503 +f 3557/1507 1693/1016 1652/1508 +f 3921/1509 1704/1530 1703/1510 +f 326/1321 547/431 302/257 +f 3916/1512 3921/1509 1702/1513 +f 3919/1515 3918/1520 1698/1516 +f 1699/1518 1691/3926 3927/1519 +f 1557/1328 406/235 1560/1501 +f 1127/551 1116/545 1117/547 +f 400/386 401/249 289/175 +f 1706/1514 1702/1513 1707/1523 +f 1707/1523 1702/1513 1599/1525 +f 1709/1000 1706/1514 1708/1524 +f 1714/1527 1712/1553 1711/1528 +f 3919/1515 3915/1788 1709/1000 +f 1704/1530 1711/1528 1577/1531 +f 1716/1532 1714/1527 1713/1529 +f 1559/1330 1562/1329 1549/1222 +f 1722/1534 1721/1557 1720/1535 +f 1335/1537 597/726 596/728 +f 410/238 411/237 377/474 +f 973/506 1170/595 1163/580 +f 168/114 169/113 399/1352 +f 1571/813 682/1542 1089/1540 +f 1572/1541 681/1418 682/1542 +f 749/1543 646/365 647/367 +f 1730/1545 1724/1554 1723/1546 +f 1594/1385 1582/1382 1729/1547 +f 1729/1547 1723/1546 1591/1548 +f 1384/1550 1378/715 1377/717 +f 1732/1552 1731/1549 1712/1553 +f 1724/1554 1730/1545 1731/1549 +f 1722/1534 1724/1554 1732/1552 +f 1721/1557 1722/1534 1733/1556 +f 1734/1558 1733/1556 1716/1532 +f 1735/1560 1737/1568 1736/1561 +f 1733/1556 1732/1552 1714/1527 +f 1663/1446 1664/1473 1741/1563 +f 1742/1564 1665/1569 1740/1565 +f 1738/1567 1739/1582 1737/1568 +f 1665/1569 1660/1448 1666/1469 +f 1665/1569 1742/1564 1744/1570 +f 1725/1571 1726/1583 1746/1572 +f 1591/1548 1725/1571 1745/1573 +f 1590/1391 1745/1573 1747/1574 +f 1745/1573 1746/1572 1748/1575 +f 1659/1576 1688/3953 3538/1577 +f 3287/1578 1760/2207 3545/1579 +f 1185/591 1177/589 1182/895 +f 1664/1473 1744/1570 1749/1581 +f 1741/1563 1749/1581 1739/1582 +f 1746/1572 1742/1564 1743/1566 +f 1726/1583 1744/1570 1742/1564 +f 1115/1585 924/1584 1153/574 +f 1654/1586 3546/3931 3575/1587 +f 1896/1589 3562/1933 3549/1590 +f 1720/1535 1749/1581 1744/1570 +f 1721/1557 1739/1582 1749/1581 +f 1750/1591 1718/1841 1701/1517 +f 1697/1562 1736/1561 1750/1591 +f 1151/566 1152/579 1063/539 +f 1751/1592 1752/1593 1670/1047 +f 1695/1504 1658/825 1752/1593 +f 961/492 962/491 986/1594 +f 1770/1595 1769/1619 1768/1596 +f 1766/1597 1594/1385 1593/1387 +f 1731/1549 1759/3895 1579/1599 +f 1579/1599 1777/1607 1774/1600 +f 3928/1602 1772/1612 1771/1603 +f 1764/1605 1772/1612 3928/1602 +f 1777/1607 1776/1630 1775/1608 +f 3762/1609 3763/2780 1699/1518 +f 1004/1610 1127/551 1128/1522 +f 1774/1600 1775/1608 1771/1603 +f 987/1613 1400/749 1399/748 +f 1578/1601 1774/1600 1772/1612 +f 1783/1614 1782/1696 1781/1615 +f 1597/1397 1598/1396 1770/1595 +f 639/1453 1878/1731 3895/1617 +f 3928/1602 3929/1604 1769/1619 +f 1793/1620 1792/2099 1789/1621 +f 3929/1604 3920/1632 1798/1623 +f 3508/1624 3516/3950 3773/1625 +f 1802/1626 1867/1709 3924/1172 +f 1788/1622 1790/1678 1794/1628 +f 1793/1620 1794/1628 1795/1629 +f 3773/1625 3772/1648 1675/862 +f 1584/1631 1581/1380 1783/1614 +f 1771/1603 1797/1636 3920/1632 +f 1801/1633 1799/1635 1798/1623 +f 1768/1596 1769/1619 1798/1623 +f 1775/1608 1796/1637 1797/1636 +f 508/1360 514/554 515/556 +f 1796/1637 1803/1708 1802/1626 +f 1800/1634 1798/1623 3920/1632 +f 1794/1628 1805/1673 1804/1638 +f 3532/1639 1670/1047 1752/1593 +f 485/1641 1808/1645 1807/1018 +f 3809/413 3844/2092 1640/1642 +f 3837/1643 3844/2092 1810/1169 +f 485/1641 482/291 1640/1642 +f 1690/1646 1691/3926 3770/1647 +f 487/295 488/414 1648/1442 +f 3839/1650 1813/3880 1816/1651 +f 3838/1653 1811/1649 1648/1442 +f 3839/1650 1812/1654 1811/1649 +f 1812/1654 1809/1660 487/295 +f 1641/1655 1809/1660 767/1656 +f 465/289 466/288 755/401 +f 767/1656 1815/1661 3823/1657 +f 3822/1652 1816/1651 3826/1658 +f 1809/1660 1812/1654 1815/1661 +f 3837/1643 1653/1644 1651/1459 +f 675/1662 672/3186 3541/1663 +f 1814/1665 1649/1444 1822/1666 +f 1821/1667 1822/1666 3821/1668 +f 1824/1670 1822/1666 1649/1444 +f 1827/1672 1849/986 1850/957 +f 1668/1475 1669/1049 1662/1674 +f 1779/1675 1780/1747 1848/1676 +f 1790/1678 1827/1672 1805/1673 +f 1833/1679 1836/3184 1853/1680 +f 3521/1682 3506/1735 693/381 +f 1614/1683 1615/1721 1616/1684 +f 1852/1681 1853/1680 3498/1686 +f 1620/1688 1621/2326 1622/1689 +f 1855/1691 1765/1598 1574/1692 +f 1847/1694 1852/1681 3501/1687 +f 1766/1597 1765/1598 1855/1691 +f 1629/1697 1630/2311 1631/1698 +f 1633/1700 1622/1689 1634/1701 +f 1635/1703 1636/2331 1637/1704 +f 1848/1676 1833/1679 1852/1681 +f 1855/1691 1861/1693 1863/1706 +f 1803/1708 1866/3870 1867/1709 +f 1862/1707 1863/1706 1831/1710 +f 1782/1696 1855/1691 1862/1707 +f 3922/1627 3924/1172 1865/948 +f 1873/1712 1872/2117 1870/1713 +f 1874/1715 1869/1714 799/1716 +f 2045/1718 3878/1937 3897/1719 +f 1219/610 1345/663 1295/667 +f 1615/1721 1614/1683 1396/1471 +f 3884/1618 3895/1617 1877/1723 +f 645/1725 3898/2757 3786/1726 +f 1345/663 1219/610 1035/534 +f 634/1451 639/1453 3884/1618 +f 15/8 1887/3468 1327/1729 +f 3910/1730 3895/1617 1878/1731 +f 1894/1733 604/1850 605/1734 +f 3506/1735 3551/1899 627/1495 +f 3560/1736 640/1452 645/1725 +f 340/500 341/499 454/275 +f 1897/1737 194/124 195/126 +f 1889/1739 1882/1920 1877/1723 +f 1901/1741 94/69 129/88 +f 196/125 749/1543 754/1544 +f 1705/1744 1704/1530 3921/1509 +f 340/500 378/216 166/218 +f 1570/1745 1480/969 563/968 +f 1904/1746 1494/996 194/124 +f 1781/1615 1862/1707 1832/1711 +f 1905/1748 549/1184 55/38 +f 1000/1750 994/1796 995/1751 +f 986/1594 919/527 920/526 +f 1191/597 1185/591 1190/892 +f 2094/1752 2077/1982 2075/1753 +f 1920/1755 1921/2196 1919/1756 +f 453/276 454/275 1912/1758 +f 1915/1757 1919/1756 1918/1760 +f 3882/1762 2112/1890 2106/1763 +f 1655/1588 3575/1587 3531/1765 +f 1916/1761 1918/1760 1926/1767 +f 704/389 3554/820 3561/1769 +f 278/168 1916/1761 1925/1768 +f 3565/1770 3564/2811 1818/1771 +f 1097/543 949/553 1115/1585 +f 1927/1773 1928/2201 727/375 +f 3578/1774 3579/3584 3654/1775 +f 1920/1755 310/184 307/186 +f 1929/1777 307/186 48/31 +f 503/356 504/358 1497/1778 +f 1902/1779 1903/2111 787/1780 +f 1816/1651 1821/1667 3820/1669 +f 1925/1768 1926/1767 1923/628 +f 1535/1782 1569/1819 1568/1783 +f 254/244 252/145 258/147 +f 1509/1046 1497/1778 366/208 +f 119/194 86/63 35/1785 +f 219/464 288/172 293/1088 +f 141/174 46/32 47/191 +f 542/470 205/316 244/318 +f 1940/1787 552/1276 430/263 +f 1779/1675 1828/1677 1827/1672 +f 1701/1517 1710/1842 3915/1788 +f 1661/1447 3550/3898 3515/1789 +f 270/162 53/37 754/1544 +f 197/1314 1947/2080 749/1543 +f 1727/1790 1728/3193 1212/1791 +f 53/37 54/39 232/1743 +f 1465/894 1468/896 1479/962 +f 1284/1792 3862/640 3854/1793 +f 1951/1794 446/324 412/241 +f 168/114 398/1539 1554/1795 +f 1486/1039 1485/1097 206/129 +f 506/1192 77/56 78/58 +f 994/1796 1001/1986 1007/518 +f 951/479 953/1296 963/507 +f 1013/550 3859/863 3861/670 +f 3826/1658 3820/1669 1820/1772 +f 1498/1223 1909/1814 1954/1799 +f 299/1006 298/1407 543/1800 +f 1754/1801 1755/2271 1756/1802 +f 1681/1804 469/302 1683/1805 +f 1696/1506 1694/1503 1695/1504 +f 13/7 14/9 738/1807 +f 473/420 476/2205 475/416 +f 482/291 1641/1655 768/411 +f 204/1809 62/43 30/14 +f 1761/1810 1762/2739 820/1811 +f 611/738 1763/3241 1125/1812 +f 1501/1007 1503/2788 1908/1813 +f 1074/1104 1073/1103 711/809 +f 1960/1815 1944/1912 82/61 +f 120/1816 1956/1911 1960/1815 +f 35/1785 86/63 87/78 +f 1773/1817 1199/1231 1025/1060 +f 1965/1818 1545/1200 1569/1819 +f 494/299 761/408 471/1426 +f 1966/1820 1967/1824 1969/1821 +f 446/324 1951/1794 1971/1823 +f 174/115 368/209 125/86 +f 1580/1555 1579/1599 1578/1601 +f 1785/1825 1786/2724 1787/1826 +f 1972/1828 1546/2225 304/1199 +f 1254/1273 876/931 875/1035 +f 1973/1829 341/499 342/1502 +f 321/1830 1912/1758 1973/1829 +f 1910/1831 277/242 275/243 +f 509/984 1973/1829 514/554 +f 1486/1039 1507/1038 1527/1833 +f 1974/1834 1874/1715 804/1717 +f 1977/1836 1976/3511 1974/1834 +f 1980/1838 448/1848 1966/1820 +f 379/219 1/3 2/2 +f 1718/1841 1715/1533 1710/1842 +f 884/462 995/1751 992/513 +f 1680/1844 1684/3934 1678/1015 +f 902/1845 990/2760 525/1846 +f 344/1005 255/149 256/148 +f 135/1847 138/1126 137/427 +f 448/1848 446/324 1967/1824 +f 589/1849 605/1734 604/1850 +f 910/712 909/1139 908/716 +f 1981/1839 1966/1820 1968/1822 +f 1982/1852 1981/1839 1987/1851 +f 1988/1854 1979/1853 1987/1851 +f 1990/1856 1988/1854 1989/1855 +f 1991/1857 1989/1855 1993/1858 +f 322/1860 321/1830 320/983 +f 992/513 828/454 830/472 +f 1144/578 1076/565 1086/568 +f 1995/1863 1977/1836 1978/1837 +f 1133/558 1129/560 1118/546 +f 1997/1865 1995/1863 1994/1864 +f 870/927 1823/1884 1335/1537 +f 805/443 806/442 1982/1852 +f 727/375 184/374 2000/1868 +f 1996/1866 1994/1864 1988/1854 +f 1994/1864 1978/1837 1979/1853 +f 1829/1870 616/740 792/1871 +f 1830/1872 1073/1103 941/982 +f 798/1873 803/1489 800/441 +f 729/1874 732/2883 731/1875 +f 2011/1876 2012/2802 2023/1877 +f 1834/1879 1835/2845 846/908 +f 402/1493 283/171 284/224 +f 2022/1878 2023/1877 2024/1881 +f 1838/1883 718/1948 989/1024 +f 810/515 955/1295 952/1297 +f 1823/1884 1839/2741 1840/1885 +f 1430/822 1426/821 1432/834 +f 1215/1887 844/1245 1841/1888 +f 1316/1372 1208/2819 873/929 +f 2112/1890 1401/2337 1384/1550 +f 1842/1891 1843/2769 1844/1892 +f 1159/1880 846/908 845/907 +f 837/901 27/26 1846/1894 +f 1971/1823 1951/1794 2030/1895 +f 517/1897 369/433 370/434 +f 1471/933 2026/1923 2027/1896 +f 3551/1899 3525/1203 626/1496 +f 2174/1900 2175/3580 770/1901 +f 935/1045 934/974 1854/1902 +f 1613/1484 1619/1949 1611/1903 +f 1231/621 1228/623 1225/609 +f 1856/1905 1857/2779 891/941 +f 1300/638 1309/637 748/1906 +f 1150/1907 1149/2264 1858/1908 +f 1643/1421 606/359 696/1234 +f 1215/1887 1265/1299 1264/1910 +f 1320/1377 1860/2837 902/1845 +f 1956/1911 1943/3865 1944/1912 +f 2031/1913 668/1449 669/1450 +f 2034/1915 1325/644 1310/646 +f 1879/1916 1880/2096 1656/1491 +f 40/1305 245/1786 242/440 +f 1992/1859 1993/1858 2039/1918 +f 888/1919 958/999 957/998 +f 1882/1920 1889/1739 1996/1866 +f 1078/1112 1138/2830 1255/1274 +f 1871/1921 912/2728 1192/1922 +f 294/524 411/237 218/1212 +f 1323/645 2041/1925 2026/1923 +f 123/981 1960/1815 83/60 +f 1324/1924 2036/1927 2041/1925 +f 384/1463 358/199 359/260 +f 1324/1924 1325/644 2038/1926 +f 1883/1928 1884/2082 1885/1929 +f 287/1165 1540/1164 114/155 +f 1096/1128 1886/2340 1134/1177 +f 915/956 1329/1414 931/972 +f 806/442 1980/1838 1981/1839 +f 3533/1930 3529/1467 584/1462 +f 1425/800 2030/1895 2042/1931 +f 1018/530 2042/1931 1948/1932 +f 3562/1933 1896/1589 740/1934 +f 1948/1932 2042/1931 649/1935 +f 1881/1936 1882/1920 1990/1856 +f 1900/1742 129/88 130/1157 +f 992/513 995/1751 994/1796 +f 923/476 927/493 925/496 +f 3878/1937 2045/1718 2047/1938 +f 1643/1421 2048/1975 625/1940 +f 2050/1941 2052/1961 2051/1942 +f 2045/1718 1405/844 1404/759 +f 3876/1939 2047/1938 945/1944 +f 1139/1181 1080/1113 1079/1115 +f 1037/1069 1856/1905 892/940 +f 2055/1945 2053/2009 2057/1946 +f 1198/1228 989/1024 718/1948 +f 1619/1949 2058/1951 1612/1950 +f 2058/1951 1605/1954 1606/1952 +f 2059/1953 2060/2882 1607/1406 +f 2058/1951 1619/1949 1624/1476 +f 71/1955 72/50 179/118 +f 405/233 406/235 1557/1328 +f 322/1860 1548/2355 1972/1828 +f 2061/1956 2052/1961 2063/1957 +f 2066/1959 2065/1969 2067/1960 +f 2052/1961 2061/1956 2064/1962 +f 1609/1963 1610/1404 2066/1959 +f 1515/1965 151/101 152/1441 +f 1009/522 827/514 993/1797 +f 1644/1420 3843/1428 3815/1967 +f 279/1030 1915/1757 1916/1761 +f 1285/1968 3863/639 3862/640 +f 1647/1964 2066/1959 1825/1671 +f 2056/1947 2057/1946 2067/1960 +f 319/1861 320/983 1953/1970 +f 667/777 666/2278 1913/1972 +f 2070/1974 631/1316 625/1940 +f 2052/1961 2050/1941 2055/1945 +f 2038/1926 1654/1586 1655/1588 +f 1606/1952 1601/1400 2073/1977 +f 1235/1979 1917/2851 1270/1980 +f 2072/1978 2073/1977 2075/1753 +f 2059/1953 1606/1952 2072/1978 +f 2074/1981 2075/1753 2077/1982 +f 1601/1400 1602/1399 2078/1984 +f 2049/1943 2048/1975 1643/1421 +f 1000/1750 1004/1610 1001/1986 +f 1089/1540 943/2088 684/369 +f 1931/1987 1286/2354 1306/1364 +f 1462/1989 1459/880 1021/533 +f 1932/1990 1933/3405 1934/1991 +f 875/1035 874/930 1846/1894 +f 1936/1993 1051/1080 1937/1994 +f 44/140 45/33 46/32 +f 1534/1784 1955/3488 1956/1911 +f 1541/1162 318/181 317/183 +f 2081/1996 2082/2006 2084/1997 +f 146/557 1945/3866 1959/1999 +f 1360/2001 1941/2194 1942/2002 +f 985/1023 984/1022 1032/1066 +f 1995/1863 1997/1865 1892/2003 +f 2079/2005 2080/2338 2082/2006 +f 2095/2007 2096/2021 2076/1983 +f 2081/1996 2093/1754 2078/1984 +f 2055/1945 2050/1941 2054/2008 +f 501/456 2096/2021 2095/2007 +f 2097/2010 2100/630 2099/2011 +f 2098/2012 2099/2011 2094/1752 +f 2140/2013 2141/2016 3893/2014 +f 2141/2016 2046/2133 3894/2017 +f 2051/1942 2070/1974 2048/1975 +f 592/352 2102/2022 2101/2018 +f 2071/2020 2064/1962 2076/1983 +f 2102/2022 2097/2010 2098/2012 +f 593/2023 2103/2044 2102/2022 +f 1019/529 1895/2270 746/1283 +f 1950/2024 11/2191 12/2025 +f 725/396 2071/2020 2096/2021 +f 1385/791 2105/2046 2104/2026 +f 1379/1551 2104/2026 2106/1763 +f 2107/2027 2108/3944 2100/630 +f 3908/2028 726/397 502/303 +f 3896/2029 502/303 499/305 +f 940/2030 1389/2037 2109/2031 +f 928/2032 929/755 1170/595 +f 3868/2033 590/351 684/369 +f 1962/2034 1963/2350 1964/2035 +f 3878/1937 680/1417 573/345 +f 2039/1918 1993/1858 1968/1822 +f 1389/2037 1390/724 2110/2038 +f 1398/747 2111/2124 2113/2039 +f 3757/2040 3755/3310 3753/2041 +f 3911/2043 3883/2123 2107/2027 +f 949/553 923/476 924/1584 +f 260/150 241/187 324/189 +f 1390/724 1397/746 2113/2039 +f 2070/1974 2051/1942 2064/1962 +f 3867/2045 3911/2043 2103/2044 +f 2105/2046 1385/791 1421/792 +f 2114/2047 1421/792 1423/797 +f 3892/2049 2114/2047 2115/2048 +f 3908/2028 2105/2046 2114/2047 +f 554/1456 2116/2198 1902/1779 +f 972/505 1163/580 967/497 +f 1765/1598 1593/1387 1573/2051 +f 1984/2052 1985/2224 1986/2053 +f 2053/2009 2117/887 2069/2055 +f 3573/2056 3556/3129 724/2057 +f 1625/1485 1677/1482 1674/1483 +f 3881/2058 3888/2118 2119/2059 +f 3903/2050 2115/2048 2123/2061 +f 56/161 1954/1799 1906/1749 +f 626/1496 3877/1202 3879/2063 +f 2001/2064 2002/2245 2003/2065 +f 3912/2067 3841/2114 2127/2068 +f 2006/2070 2007/2461 2008/2071 +f 2127/2068 1413/779 1412/764 +f 2014/2074 2015/2263 2016/2075 +f 2018/2077 2019/2889 2020/2078 +f 1495/997 1424/804 1947/2080 +f 1379/1551 1377/717 1386/786 +f 3814/1410 3811/1488 1680/1844 +f 2029/2081 1885/1929 1884/2082 +f 3902/2084 2128/2073 2120/2085 +f 629/1494 3879/2063 3914/631 +f 682/1542 683/2797 943/2088 +f 724/2057 1650/2184 2121/2089 +f 1757/991 1819/2825 3821/1668 +f 765/2091 1810/1169 3844/2092 +f 3851/2093 714/376 715/378 +f 706/1340 707/1339 3870/2095 +f 981/508 986/1594 987/1613 +f 1457/871 1868/1766 1656/1491 +f 2017/2076 2016/2075 2032/2097 +f 1458/1976 1655/1588 1868/1766 +f 1776/1630 1777/1607 1792/2099 +f 1959/1999 1945/3866 1946/2100 +f 3663/805 3715/3643 3725/2102 +f 3913/2104 3881/2058 2122/2060 +f 1093/811 1089/1540 689/368 +f 966/495 965/494 968/502 +f 3852/2094 715/378 3881/2058 +f 2172/2106 3577/3579 2170/2107 +f 670/613 2138/2116 2137/2109 +f 3761/2110 787/1780 1903/2111 +f 1429/823 19/11 28/2112 +f 964/1798 963/507 978/509 +f 3841/2114 3813/782 1413/779 +f 1473/2115 1474/1914 669/1450 +f 3834/680 3833/681 1359/706 +f 2138/2116 798/1873 1870/1713 +f 1872/2117 1473/2115 2137/2109 +f 715/378 716/377 3888/2118 +f 1881/1936 1991/1857 1992/1859 +f 3843/1428 3842/1427 739/482 +f 1837/2119 1847/1694 3513/1695 +f 253/146 58/41 59/62 +f 1808/1645 3837/1643 3840/1019 +f 1647/1964 3832/1443 3818/2121 +f 3865/2122 3866/2015 671/611 +f 2112/1890 3882/1762 3883/2123 +f 1405/844 2045/1718 2044/1720 +f 3829/691 3824/690 1364/2125 +f 273/1163 1541/1162 391/230 +f 576/392 632/2493 3520/2127 +f 3875/2129 3865/2122 668/1449 +f 1430/822 1431/1886 2140/2013 +f 2136/2131 3850/2105 3860/849 +f 1430/822 2139/2130 18/12 +f 3893/2014 3894/2017 570/347 +f 1442/835 1439/828 2046/2133 +f 1431/1886 1442/835 2141/2016 +f 3869/2134 3913/2104 3850/2105 +f 3808/412 3823/1657 1817/2135 +f 3866/2015 3893/2014 695/2132 +f 2142/2137 2025/1882 798/1873 +f 571/346 2143/2138 2142/2137 +f 3819/1411 3814/1410 1618/1904 +f 2143/2138 2022/1878 2025/1882 +f 694/612 2142/2137 2138/2116 +f 681/1418 1572/1541 2144/2139 +f 2144/2139 2011/1876 2022/1878 +f 328/394 210/134 457/635 +f 1572/1541 2010/2801 2011/1876 +f 630/395 3892/2049 3903/2050 +f 2085/2140 2086/3946 2087/2141 +f 2090/2143 2091/3489 2092/2144 +f 572/1416 2144/2139 2143/2138 +f 1875/2146 1976/3511 2153/2147 +f 1873/1712 1875/2146 2149/2148 +f 2103/2044 2107/2027 2097/2010 +f 2157/2150 2043/2004 2155/2151 +f 2128/2073 1412/764 1414/766 +f 3874/2153 3871/2166 589/1849 +f 3872/2087 3870/2095 628/2154 +f 2149/2148 2153/2147 2158/2155 +f 2159/2156 2158/2155 3905/2157 +f 698/388 3907/2069 3902/2084 +f 3823/1657 3847/1659 3827/2160 +f 2158/2155 2156/2152 3874/2153 +f 1438/830 2044/1720 2046/2133 +f 2035/2161 2154/2149 2160/2162 +f 2161/2163 2160/2162 3889/2164 +f 3871/2166 3891/2171 605/1734 +f 3873/2167 17/10 18/12 +f 370/434 29/15 61/44 +f 649/1935 1951/1794 413/240 +f 2153/2147 2157/2150 2156/2152 +f 1428/2168 1429/823 1449/2113 +f 1490/1027 76/53 73/55 +f 3890/2158 3905/2157 710/1383 +f 2155/2151 1891/2175 3891/2171 +f 1456/872 1457/871 1880/2096 +f 2125/2172 2088/2142 2087/2141 +f 2160/2162 2159/2156 3890/2158 +f 300/2000 1959/1999 505/634 +f 1064/592 1062/540 1046/538 +f 2043/2004 1892/2003 1891/2175 +f 2129/2176 2130/2276 2004/2066 +f 2131/2177 2132/3433 2133/2178 +f 3848/850 3849/909 1450/2169 +f 1998/1867 1975/1835 804/1717 +f 3879/2063 673/2296 3864/2180 +f 474/418 492/415 490/1457 +f 1164/582 1165/584 1182/895 +f 707/1339 2162/2183 3909/2181 +f 708/2182 1472/2215 2162/2183 +f 2162/2183 2161/2163 3886/2165 +f 548/1429 549/1184 1905/1748 +f 1145/562 1136/564 1114/570 +f 705/2159 2121/2089 1650/2184 +f 2043/2004 2157/2150 1977/1836 +f 737/1808 738/1807 554/1456 +f 1034/605 1195/596 1191/597 +f 458/281 1905/1748 1907/1832 +f 195/126 196/125 232/1743 +f 2040/2185 2039/1918 1969/1821 +f 2145/2187 2146/2298 2147/2188 +f 2054/2008 2050/1941 2049/1943 +f 3832/1443 1646/1434 1639/1435 +f 9/2190 12/2025 11/2191 +f 252/145 364/206 506/1192 +f 472/300 478/362 479/364 +f 1865/948 3924/1172 3791/1174 +f 900/484 903/473 910/712 +f 1941/2194 2150/2273 2151/2195 +f 1921/2196 1928/2201 1927/1773 +f 2163/2197 1897/1737 1898/1738 +f 271/1003 156/103 153/105 +f 1904/1746 1326/2200 12/2025 +f 282/286 263/156 264/1147 +f 1872/2117 1873/1712 2154/2149 +f 1326/2200 1904/1746 1897/1737 +f 628/2154 692/379 693/381 +f 1724/1554 1722/1534 1719/1536 +f 1549/1222 1562/1329 436/267 +f 1582/1382 1583/1381 1730/1545 +f 1921/2196 1920/1755 1929/1777 +f 3563/2202 3511/5093 10/2192 +f 2152/2203 2037/3915 1950/2024 +f 475/416 772/422 774/425 +f 769/419 2172/2106 2171/2108 +f 2171/2108 2165/3515 476/2205 +f 476/2205 773/421 772/422 +f 1791/2206 565/1310 464/1311 +f 2166/2208 2167/3295 2168/2209 +f 595/354 371/212 372/1217 +f 483/293 1809/1660 1641/1655 +f 762/407 2174/1900 2173/853 +f 771/417 483/293 484/292 +f 486/296 487/295 1809/1660 +f 297/1291 1549/1222 1517/1185 +f 763/2211 2175/3580 2174/1900 +f 776/1419 755/401 759/403 +f 184/374 185/166 2176/2212 +f 412/241 447/325 303/1201 +f 2005/2213 2176/2212 185/166 +f 3811/1488 1681/1804 1682/1806 +f 2164/2214 2170/2107 756/426 +f 1472/2215 2035/2161 2161/2163 +f 950/480 951/479 964/1798 +f 2177/2216 2178/3440 2179/2217 +f 227/1218 226/1051 1510/1050 +f 2182/2219 2183/2886 2184/2220 +f 2186/2222 2019/2889 2018/2077 +f 1985/2224 2182/2219 2181/2221 +f 214/136 736/2821 729/1874 +f 865/675 849/1431 842/1433 +f 849/1431 865/675 1357/677 +f 559/1455 3187/3576 3188/2227 +f 16/2228 13/7 737/1808 +f 560/2229 553/1454 498/2230 +f 3193/2232 3196/3419 3195/2233 +f 3200/2235 3197/3420 3198/2236 +f 3211/2238 3214/3525 3213/2239 +f 3215/2241 3218/2266 3217/2242 +f 2187/2244 2188/3118 2002/2245 +f 2169/2210 2168/2209 2189/2246 +f 2192/2248 2086/3946 2085/2140 +f 2193/2250 2194/3896 2195/2251 +f 2198/2253 2199/2710 2200/2254 +f 1998/1867 1982/1852 1979/1853 +f 3664/2256 3665/3143 3668/2257 +f 2116/2198 1898/1738 1903/2111 +f 1859/1909 2006/2070 2009/2072 +f 3904/2062 2123/2061 2126/2259 +f 1135/2261 1148/563 1087/569 +f 2201/2262 2202/2277 2015/2263 +f 1149/2264 111/3492 110/152 +f 3221/2265 3222/3531 3218/2266 +f 2203/2267 2204/3435 2205/2268 +f 579/344 580/343 1895/2270 +f 1755/2271 2207/2295 2208/2272 +f 3891/2171 3910/1730 1893/1732 +f 1211/1241 2151/2195 2150/2273 +f 868/925 997/1032 2210/2274 +f 944/1393 945/1944 930/754 +f 2130/2276 2129/2176 2202/2277 +f 233/629 1903/2111 1898/1738 +f 665/776 751/841 750/843 +f 1167/1207 1254/1273 999/1034 +f 2211/2280 2212/2349 2213/2281 +f 2215/2283 2216/2348 2217/2284 +f 3867/2045 593/2023 590/351 +f 2220/2286 221/2566 220/306 +f 823/447 824/489 948/477 +f 3225/2288 3220/3573 3227/2289 +f 266/160 204/1809 455/469 +f 3882/1762 2108/3944 2107/2027 +f 2221/2291 2222/3311 3519/481 +f 2223/2292 2224/3382 2225/2293 +f 2207/2295 2125/2172 2124/2173 +f 673/2296 674/3187 3869/2134 +f 817/444 1000/1750 996/516 +f 2227/2297 2228/3190 2146/2298 +f 1947/2080 1948/1932 646/365 +f 3900/633 2135/632 17/10 +f 2229/2299 1635/1703 1638/1705 +f 1679/1486 1613/1484 1618/1904 +f 1637/1704 2231/2303 2232/2300 +f 2233/2302 2234/2945 1617/1685 +f 2231/2303 2235/2330 2236/2304 +f 1381/1464 1638/1705 2238/2306 +f 3586/2308 2241/3103 2242/2309 +f 1630/2311 2234/2945 2233/2302 +f 1681/1804 1626/1487 472/300 +f 1408/2312 1407/1479 2243/2313 +f 3236/2315 3239/3530 3238/2316 +f 3232/2318 3241/3497 3240/2319 +f 1637/1704 2230/2301 2238/2306 +f 3237/2317 3238/2316 3233/2321 +f 1409/1480 1408/2312 1634/1701 +f 1380/1466 1383/1465 2247/2324 +f 1628/1699 2248/2328 2245/2323 +f 2249/2327 1623/1690 1622/1689 +f 1632/1702 1634/1701 1408/2312 +f 2248/2328 1628/1699 1631/1698 +f 2235/2330 2231/2303 1637/1704 +f 3245/2332 3246/2949 3199/2237 +f 631/1316 2070/1974 2071/2020 +f 515/556 167/555 52/34 +f 782/858 781/2875 2251/2333 +f 2253/2334 2254/3196 2255/2335 +f 397/231 281/282 459/284 +f 1456/872 2039/1918 2040/2185 +f 2040/2185 2041/1925 2036/1927 +f 1401/2337 2112/1890 2111/2124 +f 2041/1925 2040/2185 1970/2186 +f 1535/1782 121/2834 414/239 +f 1586/1389 1587/1388 2080/2338 +f 2256/2339 983/1021 1132/1175 +f 1700/1002 1699/1518 3918/1520 +f 3815/1967 1896/1589 1686/1158 +f 1263/1298 2256/2339 1886/2340 +f 598/2341 1841/1888 844/1245 +f 2257/2342 2258/2345 2259/2343 +f 2258/2345 2261/3256 2262/2346 +f 2259/2343 2262/2346 2263/2347 +f 2260/2344 2259/2343 2216/2348 +f 1967/1824 1971/1823 1970/2186 +f 2212/2349 1964/2035 1963/2350 +f 922/2351 921/960 2218/2285 +f 1312/1369 1098/1132 1101/1131 +f 1307/2353 1306/1364 1286/2354 +f 1548/2355 1569/1819 1546/2225 +f 2266/2356 2267/2416 2268/2357 +f 2265/2358 2268/2357 2270/2359 +f 2272/2361 2265/2358 2269/2360 +f 2273/2363 2266/2356 2265/2358 +f 2275/2364 2276/2440 2277/2365 +f 2278/2367 2274/2366 2277/2365 +f 2284/2369 2285/2544 3617/2370 +f 2287/2372 2283/2376 2286/2373 +f 2288/2375 2289/2593 2283/2376 +f 2290/2377 2284/2369 3639/2371 +f 2291/2379 2292/2385 2293/2380 +f 2295/2382 2291/2379 2294/2381 +f 2296/2383 2297/2384 2291/2379 +f 2297/2384 2298/2388 2292/2385 +f 2294/2381 2293/2380 2299/2386 +f 2279/2368 2294/2381 2300/2387 +f 2298/2388 2301/2396 2302/2389 +f 2292/2385 2302/2389 2303/2390 +f 2293/2380 2303/2390 2304/2391 +f 2304/2391 2305/2587 2306/2392 +f 2308/2393 2309/2614 2310/2394 +f 2302/2389 2307/2395 2310/2394 +f 2301/2396 2311/2398 2307/2395 +f 2312/2397 2308/2393 2307/2395 +f 2313/2399 2314/2506 2315/2400 +f 2318/2402 2319/2638 2320/2403 +f 3748/2405 3771/3394 2322/2406 +f 2324/2408 2325/2417 2273/2363 +f 2313/2399 2324/2408 2326/2409 +f 2328/2411 2324/2408 2313/2399 +f 2329/2412 2325/2417 2324/2408 +f 2331/2413 2332/2420 2333/2414 +f 2330/2415 2333/2414 2267/2416 +f 2325/2417 2330/2415 2266/2356 +f 2329/2412 2331/2413 2330/2415 +f 2333/2414 2334/3875 2335/2418 +f 2336/2419 2334/3875 2333/2414 +f 2338/2421 2336/2419 2332/2420 +f 2331/2413 2339/2423 2337/2422 +f 2329/2412 2340/2424 2339/2423 +f 2328/2411 2341/2425 2340/2424 +f 2316/2401 2342/2426 2341/2425 +f 2342/2426 2316/2401 2315/2400 +f 2344/2428 2345/2432 2346/2429 +f 2341/2425 2344/2428 2347/2430 +f 2342/2426 2348/2431 2344/2428 +f 2348/2431 2349/2624 2345/2432 +f 2350/2433 2351/2436 2352/2434 +f 2339/2423 2350/2433 2353/2435 +f 2340/2424 2347/2430 2350/2433 +f 2347/2430 2346/2429 2351/2436 +f 2337/2422 2353/2435 2354/2437 +f 2353/2435 2352/2434 2355/2438 +f 2356/2439 2277/2365 2276/2440 +f 2356/2439 2357/2441 2358/2442 +f 2356/2439 2359/2443 2296/2383 +f 2279/2368 2277/2365 2356/2439 +f 2360/2444 2361/2448 2270/2359 +f 2267/2416 2335/2418 2360/2444 +f 2362/2445 2363/2450 2364/2446 +f 2362/2445 2365/2447 2361/2448 +f 2366/2449 2362/2445 2358/2442 +f 2366/2449 2367/2453 2363/2450 +f 2357/2441 2276/2440 2368/2451 +f 2366/2449 2368/2451 2369/2452 +f 2276/2440 2275/2364 2370/2454 +f 2368/2451 2370/2454 2282/2455 +f 2372/2456 2373/2537 2374/2457 +f 2375/2459 2376/2656 2377/2460 +f 2378/2462 2379/2469 2380/2463 +f 2382/2465 2378/2462 2381/2464 +f 2384/2467 2385/2468 2378/2462 +f 2385/2468 2386/2691 2379/2469 +f 2388/2470 2389/2530 2390/2471 +f 2387/2472 2390/2471 2392/2473 +f 2393/2475 2394/2476 2387/2472 +f 2394/2476 2395/2560 2388/2470 +f 2186/2222 2401/2885 2402/2477 +f 2403/2479 2404/2481 2327/2410 +f 2272/2361 2403/2479 2326/2409 +f 2271/2362 2405/2480 2403/2479 +f 2405/2480 2406/2491 2404/2481 +f 2404/2481 2407/2492 2408/2482 +f 2409/2483 2410/2487 2411/2484 +f 2409/2483 2412/2485 2271/2362 +f 2413/2486 2409/2483 2269/2360 +f 2413/2486 2414/3876 2410/2487 +f 2417/2488 2415/3958 2416/2489 +f 2417/2488 2418/2490 2406/2491 +f 2412/2485 2417/2488 2405/2480 +f 2411/2484 2415/3958 2417/2488 +f 2406/2491 2419/2500 2407/2492 +f 3569/1337 3520/2127 632/2493 +f 2420/2494 2398/2501 2397/2495 +f 2420/2494 2421/2496 2423/2497 +f 2424/2499 2420/2494 2422/2498 +f 2424/2499 2399/3407 2398/2501 +f 2425/2502 2391/2474 2392/2473 +f 2421/2496 2425/2502 2426/2503 +f 2397/2495 2396/2504 2425/2502 +f 2396/2504 2393/2475 2391/2474 +f 2427/2505 2314/2506 2408/2482 +f 2430/2508 2427/2505 2428/2507 +f 2343/2427 2315/2400 2427/2505 +f 2432/2510 2343/2427 2430/2508 +f 2431/2511 2430/2508 2429/2509 +f 2435/2513 2431/2511 2433/2512 +f 2436/2515 2432/2510 2431/2511 +f 2428/2507 2408/2482 2407/2492 +f 2422/2498 2437/2516 2407/2492 +f 2438/2517 2437/2516 2422/2498 +f 2429/2509 2428/2507 2437/2516 +f 2440/2518 2434/2514 2433/2512 +f 2441/2520 2439/2519 2433/2512 +f 2442/2521 2443/2522 2439/2519 +f 2443/2522 2444/2524 2440/2518 +f 2445/2523 2446/2526 2444/2524 +f 2447/2525 2445/2523 2443/2522 +f 2383/2466 2381/2464 2445/2523 +f 2381/2464 2380/2463 2446/2526 +f 2448/2527 2438/2517 2423/2497 +f 2449/2528 2448/2527 2426/2503 +f 2441/2520 2448/2527 2449/2528 +f 2441/2520 2429/2509 2438/2517 +f 2449/2528 2392/2473 2390/2471 +f 2450/2529 2390/2471 2389/2530 +f 2447/2525 2450/2529 2451/2531 +f 2442/2521 2449/2528 2450/2529 +f 2451/2531 2389/2530 2453/2532 +f 2452/2533 2453/2532 2455/2534 +f 2382/2465 2452/2533 2454/2535 +f 2383/2466 2451/2531 2452/2533 +f 2456/2536 2457/2713 2373/2537 +f 2459/2538 2460/3957 2461/2539 +f 2463/2541 2464/2567 2465/2542 +f 2462/2543 2465/2542 2285/2544 +f 2466/2545 2462/2543 2284/2369 +f 2467/2546 2463/2541 2462/2543 +f 2461/2539 2460/3957 3703/2547 +f 2468/2549 2469/3937 2470/2550 +f 2472/2552 2468/2549 2471/2551 +f 2474/2554 2461/2539 3701/2548 +f 2475/2556 2476/3290 2477/2557 +f 2458/2540 2455/2534 2453/2532 +f 2479/2559 2453/2532 2389/2530 +f 2395/2560 2480/2561 2479/2559 +f 2480/2561 2459/2538 2458/2540 +f 2481/2562 2482/3309 2483/2563 +f 2484/2564 113/154 2485/2565 +f 2471/2551 2470/2550 2464/2567 +f 2473/2553 2471/2551 2463/2541 +f 2487/2568 2467/2546 2466/2545 +f 2486/2569 2466/2545 2290/2377 +f 2490/2571 2486/2569 2488/2570 +f 2491/2573 2487/2568 2486/2569 +f 2492/2574 2473/2553 2467/2546 +f 2493/2575 2492/2574 2487/2568 +f 2494/2576 2495/2577 2492/2574 +f 2495/2577 2472/2552 2473/2553 +f 3690/2578 2496/2580 2474/2554 +f 3691/2579 2497/3172 2496/2580 +f 2498/2581 2494/2576 2493/2575 +f 2500/2583 2498/2581 2499/2582 +f 3700/2585 3687/2586 2498/2581 +f 3687/2586 3691/2579 2494/2576 +f 2306/2392 2504/2588 2278/2367 +f 2305/2587 2505/2594 2504/2588 +f 2504/2588 2505/2594 2507/2589 +f 2506/2590 2507/2589 2288/2375 +f 2274/2366 2506/2590 2508/2591 +f 2278/2367 2504/2588 2506/2590 +f 3633/2592 3632/2611 2489/2572 +f 3620/2378 3633/2592 2488/2570 +f 2507/2589 2509/2595 2289/2593 +f 2505/2594 2510/2604 2509/2595 +f 2508/2591 2288/2375 2287/2372 +f 2511/2596 2287/2372 2281/2374 +f 2370/2454 2511/2596 2280/2597 +f 2275/2364 2508/2591 2511/2596 +f 2512/2598 2490/2571 2489/2572 +f 2514/2600 2512/2598 2513/2599 +f 2517/2602 2512/2598 2514/2600 +f 2517/2602 2491/2573 2490/2571 +f 2499/2582 2493/2575 2491/2573 +f 2499/2582 2517/2602 2516/2603 +f 2510/2604 2505/2594 2519/2605 +f 2518/2606 2519/2605 2521/2607 +f 3616/2609 2518/2606 2520/2608 +f 3632/2611 2510/2604 2518/2606 +f 2522/2612 2304/2391 2303/2390 +f 2523/2613 2522/2612 2310/2394 +f 2305/2587 2304/2391 2522/2612 +f 2524/2615 2522/2612 2523/2613 +f 2519/2605 2524/2615 2525/2616 +f 2505/2594 2305/2587 2524/2615 +f 2308/2393 2312/2397 2527/2617 +f 2526/2618 2527/2617 2529/2619 +f 2531/2621 2526/2618 2528/2620 +f 2309/2614 2308/2393 2526/2618 +f 1214/1243 2532/3080 2151/2195 +f 2419/2500 2406/2491 2418/2490 +f 2533/2623 2345/2432 2349/2624 +f 2534/2625 2533/2623 2317/2404 +f 2535/2626 2536/2627 2533/2623 +f 2536/2627 2346/2429 2345/2432 +f 2537/2628 2351/2436 2346/2429 +f 2538/2629 2537/2628 2536/2627 +f 2539/2630 2540/2631 2537/2628 +f 2540/2631 2352/2434 2351/2436 +f 2312/2397 2355/2438 2541/2632 +f 2527/2617 2541/2632 2542/2633 +f 2543/2634 2435/2513 2434/2514 +f 2545/2636 2543/2634 2544/2635 +f 2319/2638 2318/2402 2543/2634 +f 2318/2402 2436/2515 2435/2513 +f 2514/2600 2515/2601 2548/2639 +f 2547/2640 2548/2639 2550/2641 +f 2552/2643 2547/2640 2549/2642 +f 2552/2643 2516/2603 2514/2600 +f 2520/2608 2521/2607 2554/2645 +f 2553/2646 2554/2645 2556/2647 +f 3590/2649 2553/2646 2555/2648 +f 3621/2610 2520/2608 2553/2646 +f 2523/2613 2309/2614 2531/2621 +f 2557/2651 2531/2621 2530/2622 +f 2525/2616 2523/2613 2557/2651 +f 2559/2653 2557/2651 2558/2652 +f 2554/2645 2559/2653 2560/2654 +f 2521/2607 2525/2616 2559/2653 +f 2561/2655 2375/2459 2007/2461 +f 2376/2656 2562/3315 2225/2293 +f 2564/2657 2565/2714 2566/2658 +f 2563/2659 2566/2658 338/511 +f 2569/2661 2563/2659 2567/2660 +f 2570/2663 2564/2657 2563/2659 +f 2572/2664 2573/2672 2574/2665 +f 2571/2666 2574/2665 2576/2667 +f 2544/2635 2571/2666 2575/2668 +f 2544/2635 2434/2514 2572/2664 +f 2578/2669 2570/2663 2569/2661 +f 2579/2671 2577/2670 2569/2661 +f 2574/2665 2577/2670 2579/2671 +f 2573/2672 2578/2669 2577/2670 +f 2581/2673 2582/2681 2583/2674 +f 2584/2676 2580/2675 2583/2674 +f 3697/2678 3675/2680 2580/2675 +f 3676/2679 2581/2673 2580/2675 +f 2582/2681 2516/2603 2552/2643 +f 2583/2674 2552/2643 2551/2644 +f 2500/2583 2501/2584 2589/2682 +f 2590/2684 2588/2683 2589/2682 +f 3693/2686 3695/2687 2588/2683 +f 3695/2687 3700/2585 2500/2583 +f 2501/2584 2516/2603 2591/2685 +f 2592/2688 2593/3306 2594/2689 +f 2386/2691 2385/2468 2503/2692 +f 2596/2694 2379/2469 2386/2691 +f 2596/2694 2374/2457 2373/2537 +f 2599/2696 2596/2694 2597/2695 +f 2599/2696 2380/2463 2379/2469 +f 2600/2698 2446/2526 2380/2463 +f 2600/2698 2599/2696 2598/2697 +f 2603/2700 2600/2698 2601/2699 +f 2603/2700 2444/2524 2446/2526 +f 2604/2702 2603/2700 2602/2701 +f 2440/2518 2604/2702 2605/2703 +f 2444/2524 2603/2700 2604/2702 +f 2606/2704 2582/2681 2581/2673 +f 2606/2704 2607/2705 2590/2684 +f 2591/2685 2516/2603 2582/2681 +f 3676/2679 2587/3314 2456/2536 +f 3689/2706 2456/2536 2372/2456 +f 2478/2558 2477/2557 2608/2707 +f 2609/2708 2610/3299 2611/2709 +f 2564/2657 2570/2663 2613/2711 +f 2612/2712 2613/2711 2598/2697 +f 2457/2713 2612/2712 2597/2695 +f 2565/2714 2564/2657 2612/2712 +f 2614/2715 2613/2711 2570/2663 +f 2615/2716 2614/2715 2578/2669 +f 2602/2701 2601/2699 2614/2715 +f 2601/2699 2598/2697 2613/2711 +f 2572/2664 2434/2514 2605/2703 +f 2616/2717 2605/2703 2602/2701 +f 2573/2672 2572/2664 2616/2717 +f 1459/880 1016/548 1045/541 +f 1265/1299 1215/1887 1079/1115 +f 1612/1950 1608/1405 1609/1963 +f 2067/1960 2057/1946 2069/2055 +f 1135/2261 1109/544 1136/564 +f 1938/1995 1937/1994 2618/2719 +f 95/387 548/1429 458/281 +f 2058/1951 1607/1406 1608/1405 +f 1454/873 1469/903 1445/851 +f 655/769 2620/2758 1048/1079 +f 722/2722 1049/1078 2621/2723 +f 1131/1176 985/1023 1033/1065 +f 562/349 1481/967 366/208 +f 1723/1546 1719/1536 1725/1571 +f 1786/2724 1123/3253 1122/1152 +f 267/159 300/2000 323/261 +f 1325/644 2034/1915 1654/1586 +f 1468/896 1165/584 1435/832 +f 658/772 657/771 529/2725 +f 8/4 532/321 247/339 +f 382/1281 1557/1328 1559/1330 +f 788/866 619/745 1307/2353 +f 940/2030 930/754 929/755 +f 911/951 2622/2793 2623/2727 +f 2621/2723 2620/2758 655/769 +f 2624/2729 1857/2779 2625/2730 +f 998/1033 1846/1894 27/26 +f 635/753 638/752 2626/2731 +f 2627/2733 641/756 644/758 +f 2628/2734 2627/2733 1840/1885 +f 1200/2736 1199/1231 2627/2733 +f 1199/1231 1773/1817 641/756 +f 1520/1358 515/556 49/36 +f 1452/874 1350/704 1349/668 +f 2630/2737 2631/2841 1859/1909 +f 644/758 643/757 1335/1537 +f 1197/1229 1198/1228 717/814 +f 2632/2738 717/814 719/815 +f 2628/2734 2632/2738 1166/1205 +f 2629/2735 1197/1229 2632/2738 +f 1762/2739 2633/2823 1050/2740 +f 1823/1884 870/927 873/929 +f 2634/2742 2635/2743 794/1124 +f 1042/1073 794/1124 2635/2743 +f 93/70 94/69 1901/1741 +f 2036/1927 2038/1926 1458/1976 +f 1527/1833 332/3222 1525/1325 +f 877/932 876/931 1274/1313 +f 134/91 409/258 797/430 +f 1328/1413 893/944 931/972 +f 607/736 777/854 1091/1122 +f 1043/1074 2635/2743 2637/2744 +f 2636/2745 2637/2744 2639/2746 +f 1275/1315 2636/2745 2638/2747 +f 1275/1315 1040/1072 1043/1074 +f 1272/1308 2624/2729 2626/2731 +f 2640/2748 2626/2731 638/752 +f 1110/1143 2641/2751 2640/2748 +f 536/2750 1272/1308 2640/2748 +f 1472/2215 708/2182 1474/1914 +f 2042/1931 2030/1895 1951/1794 +f 1058/2752 1106/1135 1105/2753 +f 2642/2754 2623/2727 2643/2755 +f 1192/1922 2642/2754 2644/2756 +f 912/2728 2623/2727 2642/2754 +f 3898/2757 1657/1917 3526/1490 +f 1049/1078 1048/1079 2620/2758 +f 616/740 1829/1870 1885/1929 +f 1331/2759 1913/1972 666/2278 +f 990/2760 989/1024 1198/1228 +f 1911/1759 1912/1758 321/1830 +f 1298/1356 1282/1331 1205/1332 +f 76/53 1490/1027 1504/1029 +f 1502/2763 1500/1009 421/254 +f 610/735 609/770 658/772 +f 1146/1188 937/976 659/773 +f 2646/2764 911/951 914/953 +f 1280/1324 1239/1258 1238/2765 +f 1453/906 1454/873 1452/874 +f 835/900 942/1102 1843/2769 +f 1271/1307 1270/1980 1173/2770 +f 1893/1732 1878/1731 1826/2773 +f 1050/2740 2633/2823 1937/1994 +f 2651/2774 2652/2863 814/884 +f 1006/1042 1053/1082 1281/1326 +f 2653/2775 1084/1117 2654/2776 +f 1001/1986 1004/1610 1003/1611 +f 585/723 997/1032 679/789 +f 2624/2729 1272/1308 621/1309 +f 555/708 678/3199 2028/2083 +f 1207/1238 526/693 1197/1229 +f 1626/1487 1627/1409 478/362 +f 426/1056 389/228 419/252 +f 1030/1225 1193/1224 1773/1817 +f 3898/2757 645/1725 634/1451 +f 534/1141 1533/1125 138/1126 +f 1125/1812 1092/1123 1042/1073 +f 333/1261 228/144 88/65 +f 569/437 1570/1745 561/348 +f 1489/1538 2656/2864 871/2778 +f 1857/2779 2624/2729 620/743 +f 1673/1477 1674/1483 3770/1647 +f 622/744 861/920 864/922 +f 619/745 788/866 891/941 +f 261/1341 544/329 545/331 +f 393/1288 1555/1415 1556/1263 +f 2658/2782 2659/2784 2647/2767 +f 1075/1105 2658/2782 2657/2781 +f 1081/2783 1080/1113 2658/2782 +f 1080/1113 1139/1181 2659/2784 +f 2644/2756 2643/2755 1141/1182 +f 2660/2785 1141/1182 1140/2786 +f 642/2787 2660/2785 1336/1425 +f 1193/1224 2644/2756 2660/2785 +f 679/789 678/3199 555/708 +f 1503/2788 1501/1007 1500/1009 +f 1318/1374 1317/1373 1101/1131 +f 2661/2790 2659/2784 1139/1181 +f 2661/2790 1142/1183 2663/2791 +f 2662/2792 2647/2767 2659/2784 +f 2662/2792 2663/2791 2622/2793 +f 2664/2794 2622/2793 911/951 +f 2648/2768 2664/2794 2646/2764 +f 2648/2768 2647/2767 2662/2792 +f 2083/1998 2094/1752 2093/1754 +f 1442/835 1440/836 1437/831 +f 519/475 329/332 330/1095 +f 1885/1929 2029/2081 1209/1239 +f 799/1716 800/441 805/443 +f 1946/2100 1943/3865 1952/1971 +f 1302/2795 1301/1361 1253/1272 +f 566/1320 2005/2213 224/277 +f 1389/2037 940/2030 928/2032 +f 683/2797 682/1542 681/1418 +f 836/2798 835/900 1842/1891 +f 2665/2799 1842/1891 1845/1893 +f 2666/2800 2665/2799 1303/1362 +f 24/27 836/2798 2665/2799 +f 831/899 1072/1101 2650/2772 +f 1845/1893 1844/1892 1304/1363 +f 2010/2801 2013/2840 2012/2802 +f 1051/1080 1936/1993 2667/2803 +f 1522/1089 1521/1077 1511/1054 +f 531/1116 440/270 441/2805 +f 1250/1271 635/753 2625/2730 +f 2669/2806 2625/2730 1857/2779 +f 783/859 2669/2806 1856/1905 +f 782/858 1250/1271 2669/2806 +f 720/818 2670/3237 2671/2807 +f 1055/1087 1931/1987 1930/1988 +f 1333/2808 1067/1423 1251/1270 +f 619/745 622/744 1308/1365 +f 652/767 655/769 2619/2721 +f 521/1058 522/1148 1539/1146 +f 1838/1883 988/1026 537/701 +f 2672/2809 537/701 536/2750 +f 1113/1145 2672/2809 2641/2751 +f 1113/1145 527/697 1838/1883 +f 1016/548 1013/550 1351/879 +f 3897/1719 573/345 570/347 +f 226/1051 227/1218 104/2810 +f 3564/2811 3669/3906 3827/2160 +f 1036/1068 1037/1069 1884/2082 +f 2674/2812 1301/1361 1304/1363 +f 2673/2813 1304/1363 1844/1892 +f 1334/1424 2673/2813 2675/2814 +f 1333/2808 2674/2812 2673/2813 +f 2676/2815 702/803 1039/1071 +f 1843/2769 2676/2815 2675/2814 +f 942/1102 1074/1104 2676/2815 +f 1074/1104 699/801 702/803 +f 752/2816 1057/1085 1055/1087 +f 1200/2736 1166/1205 656/1206 +f 1120/1150 1846/1894 874/930 +f 2677/2817 874/930 877/932 +f 1241/1262 2677/2817 1052/2818 +f 1119/1151 1120/1150 2677/2817 +f 1560/1501 1513/1198 151/101 +f 1208/2819 1316/1372 611/738 +f 215/2820 735/399 736/2821 +f 1460/881 1459/880 1462/1989 +f 1208/2819 1207/1238 1839/2741 +f 854/916 856/915 1282/1331 +f 72/50 445/1129 182/255 +f 544/329 1563/1342 1565/1354 +f 217/465 200/1213 456/2822 +f 2047/1938 1404/759 930/754 +f 1204/1235 2618/2719 1937/1994 +f 913/952 777/854 686/856 +f 840/904 527/697 1113/1145 +f 1531/1119 729/1874 730/435 +f 2678/2824 778/1333 1283/1334 +f 601/966 667/777 1914/1973 +f 1537/1149 1538/1348 459/284 +f 1819/2825 1757/991 3509/993 +f 991/1025 1860/2837 862/2827 +f 2679/2828 711/809 1073/1103 +f 2680/2829 1830/1872 983/1021 +f 779/855 2680/2829 2256/2339 +f 778/1333 2678/2824 2680/2829 +f 2678/2824 2679/2828 1830/1872 +f 180/120 542/470 245/1786 +f 1886/2340 1132/1175 1131/1176 +f 1138/2830 1137/1180 1274/1313 +f 1841/1888 598/2341 1140/2786 +f 75/2762 313/1142 314/1287 +f 193/1161 423/458 420/251 +f 2681/2831 2682/3251 1099/1130 +f 170/179 1567/1437 1565/1354 +f 349/197 416/336 417/247 +f 1121/1154 1124/1153 2684/2832 +f 547/431 797/430 409/258 +f 123/981 122/980 121/2834 +f 2643/2755 2663/2791 1142/1183 +f 1542/1179 1544/1190 513/313 +f 899/2835 900/484 907/525 +f 1210/2836 677/788 676/790 +f 791/865 663/1020 982/1167 +f 24/27 27/26 837/901 +f 893/944 896/943 932/1300 +f 1860/2837 1320/1377 863/921 +f 535/700 538/702 862/2827 +f 1569/1819 1548/2355 1547/2838 +f 181/119 455/469 542/470 +f 660/2839 659/773 937/976 +f 2013/2840 567/1319 568/436 +f 887/937 890/939 2631/2841 +f 1524/1092 1508/1041 361/1408 +f 520/1430 161/214 162/2842 +f 2653/2775 855/914 854/916 +f 1238/2765 1237/1257 2655/2777 +f 718/1948 1838/1883 527/697 +f 1492/1004 1493/1075 518/310 +f 792/1871 616/740 615/1156 +f 599/727 846/908 1835/2845 +f 290/393 457/635 456/2822 +f 312/178 171/112 1552/1327 +f 1248/2846 2687/2848 1179/1214 +f 153/105 149/100 177/1889 +f 1240/1260 1181/1215 1222/1248 +f 1257/1351 1292/2860 1235/1979 +f 1291/1350 1084/1117 1083/1249 +f 1052/2818 877/932 1277/1317 +f 743/1237 742/837 2618/2719 +f 444/2847 140/94 430/263 +f 23/23 1224/1251 1172/1209 +f 2687/2848 2654/2776 1180/2849 +f 102/76 103/75 409/258 +f 1283/1334 607/736 610/735 +f 373/2850 497/3623 650/372 +f 1180/2849 2654/2776 1084/1117 +f 855/914 2653/2775 2655/2777 +f 1173/2770 1270/1980 1917/2851 +f 2689/2852 850/913 853/912 +f 2688/2853 853/912 1108/1138 +f 2686/2844 2688/2853 1279/1322 +f 2687/2848 2689/2852 2688/2853 +f 2691/2854 1302/2795 1252/2796 +f 2690/2855 1252/2796 850/913 +f 1248/2846 2690/2855 2689/2852 +f 1248/2846 1247/1267 2691/2854 +f 2692/2856 617/742 24/27 +f 2691/2854 2692/2856 2666/2800 +f 1247/1267 1258/1290 2692/2856 +f 1258/1290 1168/2858 617/742 +f 394/1226 1554/1795 1555/1415 +f 2693/2857 1260/2866 1259/1292 +f 1072/1101 1236/1256 1262/1294 +f 1257/1351 1071/1255 1168/2858 +f 398/1539 439/271 1555/1415 +f 871/2778 1784/1827 1314/2859 +f 1178/1216 1232/1254 1249/1268 +f 1224/1251 1289/1345 1173/2770 +f 1010/1044 2617/2720 2618/2719 +f 1175/1211 1174/1210 1292/2860 +f 2654/2776 2687/2848 2686/2844 +f 2694/2861 1157/1196 1216/1246 +f 71/1955 418/1028 1487/988 +f 701/802 1112/1144 1111/2862 +f 1072/1101 1261/1293 2649/2771 +f 1085/1118 2695/2868 22/1250 +f 895/961 2694/2861 1264/1910 +f 1601/1400 1606/1952 1605/1954 +f 1259/1292 1262/1294 1271/1307 +f 21/22 20/24 1220/1247 +f 988/1026 991/1025 538/702 +f 598/2341 597/726 1336/1425 +f 1050/2740 860/918 818/890 +f 1298/1356 2645/2761 1761/1810 +f 1168/2858 1167/1207 618/2279 +f 858/917 1168/2858 1256/1279 +f 39/20 242/440 516/439 +f 2652/2863 1761/1810 819/888 +f 313/1142 316/1140 315/192 +f 2026/1923 1970/2186 1971/1823 +f 2656/2864 1489/1538 596/728 +f 814/884 819/888 818/890 +f 2696/2865 2651/2774 813/883 +f 1260/2866 2696/2865 2649/2771 +f 2693/2857 2697/2867 2696/2865 +f 2697/2867 1296/1357 2651/2774 +f 381/220 355/201 390/290 +f 3818/2121 3819/1411 1611/1903 +f 1475/935 1427/824 1428/2168 +f 1292/2860 1174/1210 1917/2851 +f 1551/1227 353/202 354/1346 +f 1526/1098 1525/1325 1523/1090 +f 1593/1387 1589/1386 1595/1403 +f 1232/1254 1269/1306 1246/1269 +f 2695/2868 2693/2857 1223/1344 +f 2698/2869 1297/1355 1296/1357 +f 2698/2869 2697/2867 2693/2857 +f 2685/2843 2698/2869 2695/2868 +f 2685/2843 854/916 1297/1355 +f 562/349 1497/1778 271/1003 +f 2652/2863 2651/2774 1296/1357 +f 1365/762 1366/2126 1364/2125 +f 1958/2101 1946/2100 1953/1970 +f 585/723 588/722 2210/2274 +f 1561/1521 1560/1501 1515/1965 +f 975/2766 2648/2768 1146/1188 +f 347/1266 67/48 68/49 +f 1038/1070 1111/2862 636/751 +f 2700/2871 781/2875 780/857 +f 2699/2872 780/857 1036/1068 +f 2701/2873 2699/2872 1883/1928 +f 851/911 2700/2871 2699/2872 +f 2674/2812 1333/2808 2251/2333 +f 2702/2874 2251/2333 781/2875 +f 1253/1272 2702/2874 2700/2871 +f 1301/1361 2674/2812 2702/2874 +f 1519/1253 387/227 388/229 +f 1273/1312 1239/1258 1280/1324 +f 2703/2876 1280/1324 1107/1323 +f 2701/2873 2703/2876 852/1137 +f 1829/1870 1273/1312 2703/2876 +f 91/2877 260/150 326/1321 +f 1784/1827 871/2778 2656/2864 +f 2683/2833 2684/2832 2705/2878 +f 604/1850 1894/1733 3499/2880 +f 1669/1049 3534/1048 3548/2881 +f 2062/1958 2063/1957 2060/2882 +f 621/1309 535/700 861/920 +f 2023/1877 731/1875 732/2883 +f 686/856 779/855 1263/1298 +f 857/919 1051/1080 2668/2804 +f 883/1843 830/472 882/1252 +f 637/2749 636/751 1111/2862 +f 2668/2804 894/942 893/944 +f 2667/2803 921/960 894/942 +f 1039/1071 1038/1070 1068/1096 +f 313/1142 75/2762 1504/1029 +f 1911/1759 1972/1828 305/1093 +f 1936/1993 2706/3194 1964/2035 +f 2707/2884 2402/2477 2401/2885 +f 2708/2887 2707/2884 2183/2886 +f 738/1807 2163/2197 2116/2198 +f 2349/2624 2348/2431 2432/2510 +f 2711/2888 2712/3848 2020/2078 +f 2400/2478 2711/2888 2019/2889 +f 2714/2890 2715/3063 2716/2891 +f 2717/2893 2718/3032 2719/2894 +f 2721/2896 2722/3365 2723/2897 +f 2726/2899 2727/3028 2728/2900 +f 2725/2901 2728/2900 2730/2902 +f 2731/2904 2725/2901 2729/2903 +f 2732/2906 2726/2899 2725/2901 +f 2734/2907 2735/3007 2736/2908 +f 2738/2910 2739/3877 2740/2911 +f 2737/2912 2740/2911 2741/2913 +f 2744/2915 2745/2918 2746/2916 +f 2745/2918 2744/2915 2748/2919 +f 2749/2921 2750/2975 2747/2920 +f 2753/2923 2749/2921 2751/2922 +f 2250/2329 1380/1466 2246/2325 +f 2754/2925 1396/1471 1614/1683 +f 3598/2927 2754/2925 2755/2926 +f 3608/2929 2745/2918 2747/2920 +f 1617/1685 2756/2942 2755/2926 +f 2242/2309 2758/3104 1620/1688 +f 2191/2249 2085/2140 2760/2931 +f 2761/2933 2759/2932 2760/2931 +f 3648/2935 2759/2932 2761/2933 +f 3623/2937 2191/2249 2759/2932 +f 3209/2938 3248/3570 3223/2939 +f 1395/1722 1394/1470 2236/2304 +f 1395/1722 2235/2330 1636/2331 +f 1616/1684 1635/1703 2229/2299 +f 1617/1685 2234/2945 2763/2941 +f 2756/2942 2763/2941 3628/2943 +f 2765/2944 2763/2941 2234/2945 +f 2766/2946 2764/3347 3628/2943 +f 3202/2948 3199/2237 3246/2949 +f 1516/1966 437/266 435/268 +f 2768/2951 2769/2988 3552/1458 +f 1495/997 541/382 578/342 +f 2135/632 2136/2131 28/2112 +f 3526/2952 3540/3908 2773/2953 +f 1690/1646 1689/3952 3926/2955 +f 2778/2957 2779/2967 2780/2958 +f 2777/2959 2780/2958 2781/2960 +f 2784/2962 2778/2957 2777/2959 +f 3555/2128 3521/1682 575/380 +f 1494/996 1904/1746 9/2190 +f 3529/1467 2016/2075 2015/2263 +f 451/320 61/44 505/634 +f 2786/2964 2787/3056 2788/2965 +f 2779/2967 2786/2964 2785/2966 +f 2789/2968 2752/2924 2741/2913 +f 2790/2969 2753/2923 2752/2924 +f 2792/2970 2793/2973 2794/2971 +f 2793/2973 2795/2981 2796/2974 +f 2796/2974 2797/2980 2750/2975 +f 2794/2971 2796/2974 2749/2921 +f 2798/2976 1393/1472 1396/1471 +f 3591/2977 2798/2976 2754/2925 +f 2798/2976 2799/2979 2800/2978 +f 3591/2977 3662/3883 2799/2979 +f 2797/2980 2796/2974 2795/2981 +f 2791/2972 2794/2971 2753/2923 +f 216/138 233/629 232/1743 +f 3804/2983 2806/3903 2807/2984 +f 3805/2986 2809/2993 2804/2987 +f 2769/2988 2810/3882 3557/1507 +f 183/211 185/166 184/374 +f 2811/2989 2812/2994 2775/2990 +f 2804/2987 2811/2989 2776/2991 +f 2809/2993 2813/2995 2811/2989 +f 2812/2994 2811/2989 2813/2995 +f 3805/2986 2808/3885 2816/2997 +f 3796/2999 2818/3054 2815/3000 +f 3798/3001 3800/3951 2820/3002 +f 3793/3004 2823/3008 2819/3005 +f 2824/3006 2736/2908 2735/3007 +f 2823/3008 2825/3009 2824/3006 +f 2825/3009 2823/3008 2826/3010 +f 3793/3004 2822/3017 2827/3011 +f 3796/2999 2817/3874 2826/3010 +f 2713/2892 2716/2891 2829/3013 +f 2828/3014 2829/3013 2831/3015 +f 2822/3017 2821/3003 2828/3014 +f 2821/3003 2820/3002 2713/2892 +f 2832/3018 2827/3011 2822/3017 +f 2833/3019 2832/3018 2830/3016 +f 2834/3020 2835/3021 2832/3018 +f 2835/3021 2818/3054 2827/3011 +f 2837/3022 2838/3036 2839/3023 +f 2836/3024 2839/3023 2841/3025 +f 2842/3027 2836/3024 2840/3026 +f 2843/3029 2837/3022 2836/3024 +f 2840/3026 2841/3025 2845/3030 +f 2718/3032 2844/3031 2845/3030 +f 2730/2902 2728/2900 2844/3031 +f 2727/3028 2840/3026 2844/3031 +f 2846/3033 2845/3030 2841/3025 +f 2829/3013 2846/3033 2847/3034 +f 2847/3034 2841/3025 2839/3023 +f 2848/3035 2839/3023 2838/3036 +f 2833/3019 2848/3035 2849/3037 +f 2831/3015 2847/3034 2848/3035 +f 2850/3038 2814/2996 2851/3039 +f 2853/3041 2850/3038 2852/3040 +f 2783/2963 2777/2959 2782/2961 +f 2785/2966 2788/2965 2853/3041 +f 2780/2958 2785/2966 2856/3044 +f 2857/3045 2858/3051 2855/3043 +f 2859/3046 2857/3045 2782/2961 +f 2861/3047 2843/3029 2842/3027 +f 2860/3048 2842/3027 2727/3028 +f 2862/3049 2860/3048 2726/2899 +f 2863/3050 2861/3047 2860/3048 +f 2861/3047 2863/3050 2858/3051 +f 2843/3029 2861/3047 2857/3045 +f 2856/3044 2853/3041 2854/3042 +f 2781/2960 2856/3044 2864/3052 +f 2837/3022 2843/3029 2859/3046 +f 2838/3036 2837/3022 2864/3052 +f 2852/3040 2851/3039 2834/3020 +f 2854/3042 2852/3040 2849/3037 +f 2865/3053 2815/3000 2818/3054 +f 2851/3039 2865/3053 2835/3021 +f 2813/2995 2809/2993 2815/3000 +f 2814/2996 2813/2995 2865/3053 +f 2866/3055 2812/2994 2814/2996 +f 2788/2965 2866/3055 2850/3038 +f 2866/3055 2788/2965 2787/3056 +f 2812/2994 2866/3055 2867/3057 +f 1892/2003 1889/1739 1890/1740 +f 2720/2895 2719/2894 2846/3033 +f 298/1407 299/1006 256/148 +f 2872/3059 2714/2890 2713/2892 +f 3794/3060 3800/3951 2735/3007 +f 2729/2903 2730/2902 2874/3061 +f 2846/3033 2716/2891 2715/3063 +f 2876/3064 2877/3093 2878/3065 +f 2733/2909 2875/3066 2878/3065 +f 3806/3067 3807/3091 2879/3068 +f 3806/3067 2880/3069 2872/3059 +f 2882/3070 2883/3083 2884/3071 +f 2881/3072 2884/3071 2715/3063 +f 2880/3069 2881/3072 2714/2890 +f 2879/3068 2882/3070 2881/3072 +f 2885/3073 2886/3131 2887/3074 +f 2888/3075 2889/3085 2890/3076 +f 2885/3073 2890/3076 2892/3077 +f 2893/3079 2886/3131 2885/3073 +f 1942/2002 2151/2195 2532/3080 +f 2895/3082 2892/3077 2720/2895 +f 2884/3071 2895/3082 2870/3058 +f 2883/3083 2896/3084 2895/3082 +f 2896/3084 2891/3078 2892/3077 +f 2889/3085 2874/3061 2717/2893 +f 2890/3076 2717/2893 2720/2895 +f 2897/3086 2893/3079 2891/3078 +f 2898/3087 2897/3086 2896/3084 +f 2899/3088 2898/3087 2883/3083 +f 2900/3089 2899/3088 2882/3070 +f 3797/3090 2900/3089 2879/3068 +f 2876/3064 2902/3807 2901/3092 +f 2901/3092 2902/3807 2904/3094 +f 1785/1825 2656/2864 1835/2845 +f 2905/3096 2906/3144 2899/3088 +f 2907/3097 2908/3141 2897/3086 +f 3797/3090 2901/3092 2903/3095 +f 632/2493 576/392 577/3099 +f 3532/1639 3547/1640 2867/3057 +f 434/265 385/226 550/337 +f 2768/2951 2770/3897 3640/3100 +f 2910/3102 2911/3925 1623/1690 +f 2241/3103 2912/3109 2758/3104 +f 3640/3100 2770/3897 2913/3105 +f 1623/1690 2911/3925 2757/2930 +f 3587/2310 2242/2309 2757/2930 +f 2229/2299 1381/1464 1380/1466 +f 2765/2944 1630/2311 1629/1697 +f 2915/3108 2914/3107 1629/1697 +f 3642/3110 3641/3111 2914/3107 +f 3641/3111 3622/2947 2765/2944 +f 2919/3112 2916/3123 3642/3110 +f 2918/3114 2915/3108 2912/3109 +f 3614/3115 2918/3114 2241/3103 +f 3618/3116 2919/3112 3613/3113 +f 3541/1663 2177/2216 2180/2218 +f 3521/1682 2002/2245 2188/3118 +f 3705/3119 3573/2056 714/376 +f 2925/3120 2766/2946 2917/3121 +f 2924/3122 2917/3121 2916/3123 +f 3610/3124 2924/3122 2919/3112 +f 3611/3125 2925/3120 2924/3122 +f 2926/3126 2927/3348 2766/2946 +f 3658/3127 2926/3126 2925/3120 +f 1715/1533 1713/1529 1705/1744 +f 785/1500 497/3623 498/2230 +f 1387/719 651/371 565/1310 +f 374/2231 375/3578 1899/3128 +f 3573/2056 2203/2267 2206/2269 +f 2929/3130 2930/3135 2886/3131 +f 2932/3132 2933/3146 2934/3133 +f 1905/1748 1906/1749 1908/1813 +f 2930/3135 2935/3917 2936/3136 +f 2938/3138 2935/3917 2930/3135 +f 1493/1075 369/433 517/1897 +f 1603/3139 1585/1001 1586/1389 +f 2939/3140 2938/3138 2929/3130 +f 2940/3142 2939/3140 2908/3141 +f 3665/3143 1707/1523 1600/1395 +f 2886/3131 2930/3135 2937/3137 +f 2906/3144 2907/3097 2898/3087 +f 2908/3141 2929/3130 2893/3079 +f 2942/3145 2934/3133 2933/3146 +f 2941/3147 2933/3146 2937/3137 +f 3501/1687 2941/3147 2936/3136 +f 3498/1686 2942/3145 2941/3147 +f 2943/3148 2944/5094 2710/3149 +f 2943/3148 2873/3062 2874/3061 +f 2945/3150 2943/3148 2889/3085 +f 2946/3151 2944/5094 2943/3148 +f 2932/3132 2945/3150 2888/3075 +f 2931/3134 2946/3151 2945/3150 +f 727/375 1928/2201 1929/1777 +f 2903/3095 2904/3094 2947/3152 +f 545/331 594/355 42/30 +f 1480/969 1570/1745 533/322 +f 2907/3097 2906/3144 2949/3154 +f 2906/3144 2905/3096 2950/3155 +f 1558/1286 452/274 215/2820 +f 3799/3098 2903/3095 2948/3153 +f 2054/2008 1645/1985 1644/1420 +f 3705/3119 3684/3434 2203/2267 +f 1828/1677 1837/2119 1849/986 +f 2953/3157 2954/3301 2955/3158 +f 2952/3159 2955/3158 2957/3160 +f 2956/3161 2261/3256 2258/2345 +f 2953/3157 2952/3159 2258/2345 +f 2454/2535 2455/2534 2474/2554 +f 3747/3162 2958/3168 2959/3163 +f 3747/3162 2132/3433 2131/2177 +f 2961/3166 2962/3297 2963/3167 +f 2958/3168 2963/3167 2964/3169 +f 222/307 221/2566 2485/2565 +f 2477/2557 2226/2294 2966/3171 +f 2631/2841 2561/2655 2006/2070 +f 2384/2467 2454/2535 2496/2580 +f 2967/3173 2968/3174 2959/3163 +f 2968/3174 2967/3173 2970/3175 +f 3759/3177 2033/2098 2971/3178 +f 3784/3179 2971/3178 2960/3180 +f 2972/3181 2971/3178 2033/2098 +f 2007/2461 2377/2460 2973/3182 +f 3560/3183 3559/3947 2134/2179 +f 1836/3184 1833/1679 1832/1711 +f 3506/1735 2188/3118 2090/2143 +f 955/1295 810/515 959/517 +f 672/3186 675/1662 674/3187 +f 633/2170 710/1383 713/1335 +f 2024/1881 732/2883 733/398 +f 2025/1882 2024/1881 803/1489 +f 425/1057 1552/1327 1550/1285 +f 2975/3188 2976/3404 888/1919 +f 1319/1375 1158/1194 1157/1196 +f 1212/1791 1935/1992 2977/3189 +f 889/938 957/998 2228/3190 +f 1978/1837 1974/1834 1975/1835 +f 1138/2830 1078/1112 1329/1414 +f 751/841 665/776 2979/3191 +f 1619/1949 1613/1484 1625/1485 +f 2252/2336 2255/2335 1728/3193 +f 1157/1196 1160/1195 845/907 +f 2706/3194 1936/1993 1938/1995 +f 2980/3195 2255/2335 2254/3196 +f 2982/3198 2975/3188 887/937 +f 802/875 712/810 711/809 +f 678/3199 677/788 2029/2081 +f 527/697 840/904 690/905 +f 1839/2741 1207/1238 2629/2735 +f 2983/3200 2984/3233 602/730 +f 2983/3200 1914/1973 1913/1972 +f 2985/3201 2983/3200 1331/2759 +f 2986/3202 2984/3233 2983/3200 +f 2987/3203 2637/2744 2635/2743 +f 2985/3201 2988/3220 2639/2746 +f 2990/3204 1319/1375 1318/1374 +f 2991/3206 2992/3211 2993/3207 +f 2995/3209 2991/3206 2994/3208 +f 1106/1135 1058/2752 2991/3206 +f 1058/2752 1057/1085 2992/3211 +f 1415/765 1406/763 1365/762 +f 1317/1373 922/2351 2217/2284 +f 1932/1990 1728/3193 2255/2335 +f 926/965 2998/3216 2999/3213 +f 3001/3214 2993/3207 2992/3211 +f 3000/3215 2992/3211 1057/1085 +f 2999/3213 3000/3215 2978/3192 +f 2998/3216 3001/3214 3000/3215 +f 3002/3217 2998/3216 2209/2275 +f 3002/3217 3003/3218 2996/3210 +f 2993/3207 3001/3214 3002/3217 +f 1103/1136 1106/1135 2995/3209 +f 3004/3219 2995/3209 2996/3210 +f 2209/2275 2210/2274 3004/3219 +f 588/722 1103/1136 3004/3219 +f 295/1840 296/1187 433/1347 +f 2638/2747 2639/2746 2988/3220 +f 3005/3221 2988/3220 1056/1083 +f 1305/1366 3005/3221 1930/1988 +f 1276/1376 2638/2747 3005/3221 +f 1987/1851 1968/1822 1993/1858 +f 2633/2823 1762/2739 1761/1810 +f 1423/797 1422/793 1417/783 +f 3561/1769 3517/483 739/482 +f 2671/2807 2670/3237 2704/2879 +f 1473/2115 1872/2117 2035/2161 +f 922/2351 1317/1373 2694/2861 +f 1401/2337 1400/749 1378/715 +f 556/1166 2028/2083 1884/2082 +f 842/1433 848/1432 3825/601 +f 1854/1902 2706/3194 2617/2720 +f 933/973 1082/1114 1081/2783 +f 1440/836 1441/840 1436/963 +f 529/2725 657/771 719/815 +f 1031/1063 1025/1060 1027/1061 +f 777/854 913/952 912/2728 +f 2645/2761 1205/1332 1204/1235 +f 441/2805 438/269 1538/1348 +f 790/1110 1287/1343 587/1197 +f 2065/1969 2066/1959 1610/1404 +f 1860/2837 991/1025 990/2760 +f 60/40 106/77 87/78 +f 1424/804 1017/528 1948/1932 +f 332/3222 1527/1833 1507/1038 +f 261/1341 262/335 138/1126 +f 187/123 188/122 449/273 +f 1719/1536 1720/1535 1726/1583 +f 1931/1987 1055/1087 1059/1086 +f 3006/3223 1059/1086 1105/2753 +f 1287/1343 3006/3223 1104/1134 +f 1286/2354 1931/1987 3006/3223 +f 3007/3224 3008/3244 3009/3225 +f 3009/3225 3008/3244 653/869 +f 618/2279 999/1034 26/2732 +f 2029/2081 677/788 1210/2836 +f 957/998 601/966 600/729 +f 1049/1078 2146/2298 2228/3190 +f 2631/2841 890/939 3011/3227 +f 890/939 889/938 2227/2297 +f 3011/3227 2227/2297 2145/2187 +f 2561/2655 3011/3227 3012/3228 +f 1026/1232 656/1206 609/770 +f 2706/3194 1854/1902 1961/2036 +f 3013/3229 2965/3170 2485/2565 +f 2148/2189 2147/2188 1267/1301 +f 3015/3231 1267/1301 1266/1303 +f 2997/3212 2264/2352 1101/1131 +f 642/2787 641/756 1773/1817 +f 186/121 477/1076 461/285 +f 1041/1155 1040/1072 1320/1377 +f 1121/1154 3016/3235 3017/3232 +f 2984/3233 2986/3202 2987/3203 +f 3018/3234 2987/3203 2634/2742 +f 1048/1079 603/731 3018/3234 +f 603/731 602/730 2984/3233 +f 2679/2828 2678/2824 801/876 +f 2062/1958 2072/1978 2074/1981 +f 2061/1956 2074/1981 2076/1983 +f 414/239 303/1201 1545/1200 +f 3016/3235 1121/1154 2683/2833 +f 3019/3236 2683/2833 2704/2879 +f 3020/3238 3019/3236 2670/3237 +f 3007/3224 3016/3235 3019/3236 +f 1871/1921 1090/1121 1091/1122 +f 3021/3239 1787/1826 1122/1152 +f 3022/3240 3021/3239 3017/3232 +f 1314/2859 3021/3239 3022/3240 +f 1784/1827 1787/1826 3021/3239 +f 1316/1372 1315/1371 1763/3241 +f 3023/3242 3022/3240 3010/3226 +f 3023/3242 3009/3225 795/868 +f 1763/3241 3023/3242 1092/1123 +f 1315/1371 3022/3240 3023/3242 +f 3016/3235 3007/3224 3010/3226 +f 752/2816 751/841 2978/3192 +f 2986/3202 2639/2746 2637/2744 +f 3024/3243 654/768 653/869 +f 3020/3238 3024/3243 3008/3244 +f 720/818 723/817 3024/3243 +f 723/817 2621/2723 654/768 +f 2681/2831 1098/1132 1268/1302 +f 2671/2807 3025/3245 1268/1302 +f 2705/2878 2681/2831 3025/3245 +f 1313/1370 3026/3247 3027/3246 +f 3026/3247 3028/3265 3029/3248 +f 3027/3246 3029/3248 3030/3249 +f 1311/1368 3027/3246 3031/3250 +f 2012/2802 568/436 731/1875 +f 917/1106 2657/2781 974/1012 +f 743/1237 1206/1236 856/915 +f 1888/1898 2027/1896 2030/1895 +f 2682/3251 2705/2878 2684/2832 +f 3032/3252 2684/2832 1124/1153 +f 1100/2789 3032/3252 2989/3205 +f 1099/1130 2682/3251 3032/3252 +f 1123/3253 2990/3204 2989/3205 +f 3033/3254 1834/1879 1159/1880 +f 2990/3204 3033/3254 1158/1194 +f 1123/3253 1786/2724 3033/3254 +f 1786/2724 1785/1825 1834/1879 +f 1054/1084 1056/1083 2988/3220 +f 2979/3191 665/776 664/778 +f 2623/2727 2622/2793 2663/2791 +f 1095/1127 660/2839 688/796 +f 3034/3255 3035/3260 2261/3256 +f 3036/3257 3034/3255 2956/3161 +f 3037/3258 3038/3259 3034/3255 +f 3038/3259 3039/3267 3035/3260 +f 3040/3261 3041/3262 3026/3247 +f 2263/2347 3042/3263 3041/3262 +f 3042/3263 3039/3267 3043/3264 +f 3041/3262 3043/3264 3028/3265 +f 3044/3266 3045/3269 3043/3264 +f 3046/3268 3047/3270 3045/3269 +f 3047/3270 3048/3304 3049/3271 +f 3043/3264 3045/3269 3049/3271 +f 3038/3259 3037/3258 3051/3272 +f 3050/3273 3051/3272 3053/3274 +f 3052/3275 3046/3268 3044/3266 +f 3039/3267 3038/3259 3050/3273 +f 3054/3276 3035/3260 3039/3267 +f 3042/3263 2263/2347 2262/2346 +f 2261/3256 3035/3260 3054/3276 +f 2216/2348 2263/2347 2997/3212 +f 2212/2349 2211/2280 2667/2803 +f 3056/3277 868/925 2209/2275 +f 934/974 3057/3507 1961/2036 +f 2013/2840 3755/3310 3757/2040 +f 3058/3279 3059/3282 3060/3280 +f 3059/3282 3062/3284 3063/3283 +f 3060/3280 3063/3283 2213/2281 +f 3061/3281 3060/3280 1963/2350 +f 3063/3283 3064/3285 2214/2282 +f 3062/3284 3065/3286 3064/3285 +f 3065/3286 2257/2342 2260/2344 +f 3064/3285 2260/2344 2215/2283 +f 3066/3287 3067/3288 3031/3250 +f 2608/2707 2966/3171 3067/3288 +f 2966/3171 2562/3315 3068/3289 +f 3031/3250 3067/3288 3068/3289 +f 2455/2534 2458/2540 2461/2539 +f 2476/3290 2223/2292 2226/2294 +f 3551/1899 2090/2143 2089/2145 +f 3526/2952 2774/2954 3769/3291 +f 433/1347 354/1346 355/201 +f 2115/2048 1423/797 1419/785 +f 3509/993 2868/3920 2922/3293 +f 3769/3291 3737/3165 2131/2177 +f 3069/3294 2611/2709 2167/3295 +f 2971/3178 2972/3181 2928/3296 +f 2961/3166 2774/2954 2773/2953 +f 3070/3298 2475/2556 2478/2558 +f 2610/3299 2609/2708 3071/3300 +f 2502/2693 2371/2458 2374/2457 +f 3072/3302 2592/2688 2595/2690 +f 2964/3169 3046/3268 3052/3275 +f 3048/3304 2593/3306 3074/3305 +f 2593/3306 2592/2688 3075/3307 +f 3075/3307 3030/3249 3029/3248 +f 3074/3305 3029/3248 3028/3265 +f 3072/3302 3076/3308 3075/3307 +f 3076/3308 3072/3302 2482/3309 +f 2481/2562 2608/2707 3066/3287 +f 3076/3308 3066/3287 3030/3249 +f 2967/3173 3052/3275 3053/3274 +f 2482/3309 3072/3302 3073/3303 +f 479/364 480/363 493/297 +f 706/1340 3900/633 3873/2167 +f 3561/1769 2194/3896 2221/2291 +f 2013/2840 2010/2801 3754/812 +f 3533/1930 2032/2097 2016/2075 +f 3562/1933 3519/481 2222/3311 +f 1171/587 1154/571 1155/573 +f 2586/3313 2566/2658 2565/2714 +f 2982/3198 110/152 113/154 +f 2562/3315 2376/2656 3077/3316 +f 2376/2656 2375/2459 3012/3228 +f 3012/3228 2145/2187 2148/2189 +f 3077/3316 2148/2189 3015/3231 +f 2963/3167 3047/3270 3046/3268 +f 3051/3272 3078/3469 2970/3175 +f 1683/1805 2173/853 491/424 +f 3850/2105 2122/2060 1447/848 +f 1986/2053 2181/2221 3734/3317 +f 3079/3319 3080/3323 2546/2637 +f 3081/3320 3079/3319 2575/2668 +f 1983/2054 1986/2053 3768/3318 +f 3082/3322 2323/2407 2319/2638 +f 3080/3323 3082/3322 2545/2636 +f 3734/3317 3732/3324 3082/3322 +f 3732/3324 3748/2405 2323/2407 +f 1309/637 1471/933 1888/1898 +f 3073/3303 2595/2690 2772/3325 +f 2760/2931 3083/3328 3084/3327 +f 2760/2931 2085/2140 2088/2142 +f 3085/3329 3084/3327 3086/3330 +f 3089/3332 3085/3329 3090/3333 +f 3089/3332 2762/2934 3084/3327 +f 3092/3335 3088/3334 3090/3333 +f 3093/3337 2748/2919 2744/2915 +f 2751/2922 2747/2920 2748/2919 +f 3093/3337 2741/2913 2752/2924 +f 2742/2914 2741/2913 3093/3337 +f 3094/3338 3093/3337 2743/2917 +f 3096/3340 3097/3343 3098/3341 +f 3099/3342 3096/3340 3087/3331 +f 3097/3343 3094/3338 3095/3339 +f 3100/3344 2743/2917 2746/2916 +f 3101/3345 2927/3348 3102/3346 +f 2764/3347 2766/2946 2927/3348 +f 2745/2918 2764/3347 3101/3345 +f 3102/3346 3092/3335 3091/3336 +f 3100/3344 3091/3336 3095/3339 +f 2869/3349 2802/3408 3592/3350 +f 3102/3346 3103/3353 3104/3352 +f 3658/3127 2868/3920 2869/3349 +f 2927/3348 2926/3126 3103/3353 +f 3098/3341 3095/3339 3091/3336 +f 3087/3331 3098/3341 3090/3333 +f 2088/2142 2125/2172 3105/3354 +f 3083/3328 3105/3354 3086/3330 +f 2483/2563 3073/3303 2771/3326 +f 2401/2885 2186/2222 2185/2223 +f 3099/3342 3086/3330 3105/3354 +f 2125/2172 2207/2295 3106/3356 +f 3108/3357 2737/2912 2742/2914 +f 3107/3358 2742/2914 3094/3338 +f 3110/3359 3108/3357 3107/3358 +f 3109/3360 3107/3358 3097/3343 +f 3111/3361 3109/3360 3096/3340 +f 3112/3362 3110/3359 3109/3360 +f 3113/3363 2738/2910 2737/2912 +f 3115/3364 2723/2897 2722/3365 +f 3117/3367 3115/3364 3114/3366 +f 3119/3369 3117/3367 3116/3368 +f 3118/3370 3116/3368 3120/3371 +f 3116/3368 3114/3366 3113/3363 +f 3122/3373 3112/3362 3111/3361 +f 3122/3373 3123/3374 1755/2271 +f 3124/3375 3125/3378 3126/3376 +f 3125/3378 3118/3370 3121/3372 +f 3128/3379 3119/3369 3118/3370 +f 3129/3380 3128/3379 3125/3378 +f 2020/2078 2712/3848 3128/3379 +f 3853/671 3857/662 1346/664 +f 1065/2174 1046/538 1047/537 +f 1814/1665 3838/1653 3830/1445 +f 2021/2079 2020/2078 3129/3380 +f 3111/3361 3099/3342 3106/3356 +f 3123/3374 3106/3356 2207/2295 +f 2377/2460 2225/2293 2224/3382 +f 3014/3230 2485/2565 113/154 +f 1756/1802 2208/2272 3782/3383 +f 3131/3385 3132/3400 2530/2622 +f 3133/3386 3131/3385 2528/2620 +f 1753/1803 1756/1802 3767/3384 +f 3134/3388 3124/3375 3127/3377 +f 2327/2410 2408/2482 2314/2506 +f 2436/2515 2318/2402 2317/2404 +f 2018/2077 2021/2079 3711/3390 +f 2534/2625 3136/3393 3137/3392 +f 2320/2403 2322/2406 3136/3393 +f 2185/2223 2018/2077 3712/3391 +f 3138/3395 3139/3878 2542/2633 +f 3134/3388 3135/3389 3744/3396 +f 3785/3398 3130/3381 3134/3388 +f 3140/3399 3138/3395 2539/2630 +f 2535/2626 3137/3392 3140/3399 +f 3711/3390 2021/2079 3130/3381 +f 2558/2652 2530/2622 3132/3400 +f 3782/3383 3760/3403 3141/3401 +f 2560/2654 2558/2652 3141/3401 +f 3760/3403 3777/3422 3142/3402 +f 958/999 888/1919 2976/3404 +f 1933/3405 1932/1990 2980/3195 +f 2399/3407 2424/2499 2418/2490 +f 2348/2431 2342/2426 2343/2427 +f 2322/2406 2320/2403 2319/2638 +f 2718/3032 2717/2893 2874/3061 +f 2709/2905 2729/2903 2873/3062 +f 2894/3081 2871/3762 1361/1440 +f 346/384 289/175 176/177 +f 1876/1724 1877/1723 1882/1920 +f 2802/3408 2803/3412 3607/3409 +f 3104/3352 3144/3923 3088/3334 +f 2974/3312 2222/3311 3649/2936 +f 3089/3332 3145/3411 2761/2933 +f 3088/3334 3144/3923 3145/3411 +f 2803/3412 2974/3312 3603/3410 +f 3870/2095 3909/2181 692/379 +f 2123/2061 1419/785 1418/780 +f 1365/762 1363/687 1368/688 +f 1581/1380 1766/1597 1782/1696 +f 3569/1337 2202/2277 2129/2176 +f 3555/2128 2003/2065 2002/2245 +f 3146/3413 2585/2677 2551/2644 +f 3751/3415 3146/3413 3147/3414 +f 3742/3417 3149/3418 3146/3413 +f 3149/3418 2584/2676 2585/2677 +f 3196/3419 3198/2236 3197/3420 +f 3604/3421 2192/2248 2191/2249 +f 3142/3402 3150/3423 2556/2647 +f 3777/3422 3778/3425 3150/3423 +f 3646/3424 3152/3428 2550/2641 +f 3150/3423 3151/3426 2555/2648 +f 3778/3425 3780/3922 3151/3426 +f 3788/3427 3781/3430 3152/3428 +f 3153/3429 3147/3414 2551/2644 +f 3152/3428 3153/3429 2549/2642 +f 3781/3430 3776/3431 3153/3429 +f 3776/3431 3750/3416 3147/3414 +f 3148/3432 2767/3486 2206/2269 +f 2183/2886 2401/2885 2321/3355 +f 2132/3433 2960/3180 2928/3296 +f 3684/3434 3685/3936 2204/3435 +f 3719/3436 2198/2253 2197/2255 +f 2594/2689 2962/3297 2773/2953 +f 2595/2690 2594/2689 2951/3438 +f 1870/1713 798/1873 799/1716 +f 3713/3439 2179/2217 2178/3440 +f 3706/3441 2178/3440 3736/3442 +f 336/510 3154/3446 3774/3444 +f 339/512 3155/3935 3154/3446 +f 3789/3447 3683/3448 3149/3418 +f 3683/3448 3697/2678 2584/2676 +f 2963/3167 2962/3297 3048/3304 +f 3722/3445 3730/3455 337/3449 +f 339/512 338/511 2566/2658 +f 3156/3450 3157/3451 2168/2209 +f 2610/3299 3156/3450 2167/3295 +f 2954/3301 2953/3157 3156/3450 +f 3157/3451 3156/3450 2953/3157 +f 3707/3452 2166/2208 2169/2210 +f 2567/2660 3158/3945 3159/3454 +f 337/3449 3158/3945 2567/2660 +f 3730/3455 3069/3294 2166/2208 +f 3764/3456 2190/2247 1983/2054 +f 2579/2671 3160/3457 3081/3320 +f 2568/2662 3159/3454 3160/3457 +f 3709/3453 2169/2210 2190/2247 +f 3065/3286 3062/3284 3162/3458 +f 3161/3459 3162/3458 1984/2052 +f 3157/3451 3161/3459 2189/2246 +f 2257/2342 3065/3286 3161/3459 +f 734/400 735/399 1922/3460 +f 3059/3282 3058/3279 2708/2887 +f 3163/3461 2708/2887 2182/2219 +f 3162/3458 3163/3461 1985/2224 +f 3062/3284 3059/3282 3163/3461 +f 3203/3462 3206/3566 3205/3463 +f 2122/2060 2119/2059 1448/852 +f 3817/886 1686/1158 1685/1160 +f 139/95 140/94 444/2847 +f 1570/1745 569/437 566/1320 +f 2957/3160 3164/3467 3165/3466 +f 2955/3158 3166/3479 3164/3467 +f 1912/1758 454/275 341/499 +f 3130/3381 3129/3380 3124/3375 +f 1548/2355 322/1860 319/1861 +f 1949/2204 1950/2024 1327/1729 +f 2969/3176 2970/3175 3078/3469 +f 3167/3470 3078/3469 3169/3471 +f 3724/3473 2014/2074 2017/2076 +f 3720/3474 2017/2076 2033/2098 +f 3168/3472 3169/3471 3165/3466 +f 3170/3475 3165/3466 3164/3467 +f 3779/3477 3170/3475 3171/3476 +f 3724/3473 3168/3472 3170/3475 +f 3171/3476 3164/3467 3166/3479 +f 3172/3480 3166/3479 3071/3300 +f 3752/3482 3172/3480 3173/3481 +f 3756/3478 3171/3476 3172/3480 +f 3169/3471 3037/3258 3036/3257 +f 2593/3306 3048/3304 2962/3297 +f 3173/3481 3071/3300 2609/2708 +f 3174/3484 2609/2708 2199/2710 +f 3783/3485 3174/3484 2198/2253 +f 3749/3483 3173/3481 3174/3484 +f 2190/2247 2189/2246 1984/2052 +f 2196/2252 2195/2251 2767/3486 +f 2119/2059 2120/2085 1414/766 +f 1464/947 1463/3868 1466/891 +f 2199/2710 2611/2709 3069/3294 +f 2457/2713 2456/2536 2587/3314 +f 2226/2294 2225/2293 2562/3315 +f 481/404 478/362 1627/1409 +f 333/1261 1512/1053 235/1052 +f 3078/3469 3051/3272 3037/3258 +f 3040/3261 2264/2352 2997/3212 +f 2219/2287 1934/1991 1933/3405 +f 2385/2468 2384/2467 2497/3172 +f 221/2566 2220/2286 3175/3487 +f 2220/2286 1933/3405 3143/3406 +f 3175/3487 3143/3406 2976/3404 +f 2484/2564 3175/3487 2975/3188 +f 2954/3301 3071/3300 3166/3479 +f 2630/2737 1858/1908 110/152 +f 1547/2838 319/1861 1952/1971 +f 2091/3489 2090/2143 2188/3118 +f 3733/3490 3736/3442 2178/3440 +f 1918/1760 1919/1756 1927/1773 +f 3563/3491 112/153 111/3492 +f 955/1295 956/3185 960/3494 +f 2037/3495 3014/3230 112/153 +f 3143/3406 2980/3195 2981/3197 +f 2218/2285 2211/2280 2214/2282 +f 672/3186 3877/1202 3787/1204 +f 2152/3496 3013/3229 3014/3230 +f 1147/586 1144/578 1143/1862 +f 2248/2328 2246/2325 1409/1480 +f 1615/1721 1636/2331 1635/1703 +f 1410/1481 1409/1480 2246/2325 +f 3241/3497 3243/3534 3242/3498 +f 1382/2307 3176/3500 3177/3499 +f 3177/3499 1410/1481 2247/2324 +f 2247/2324 1383/1465 3177/3499 +f 3176/3500 2238/2306 2230/2301 +f 1410/1481 3177/3499 3179/3502 +f 3179/3502 3180/3765 2243/2313 +f 1633/1700 3181/3503 2800/2978 +f 1621/2326 1620/1688 2758/3104 +f 1622/1689 1621/2326 2245/2323 +f 2236/2304 1394/1470 3181/3503 +f 2237/2305 2236/2304 3182/3504 +f 2250/2329 1631/1698 2233/2302 +f 2912/3109 1629/1697 1628/1699 +f 135/1847 136/429 1564/1353 +f 1567/1437 419/252 512/309 +f 1085/1118 1084/1117 2653/2775 +f 3184/3506 869/926 868/925 +f 869/926 3184/3506 3057/3507 +f 1266/1303 1268/1302 1098/1132 +f 1267/1301 2147/2188 722/2722 +f 937/976 1146/1188 2646/2764 +f 2146/2298 1049/1078 722/2722 +f 2211/2280 2218/2285 921/960 +f 1957/2870 510/985 507/1359 +f 1313/1370 1312/1369 2264/2352 +f 1496/995 362/205 1493/1075 +f 2887/3074 2937/3137 2933/3146 +f 1687/3508 1688/3953 1659/1576 +f 145/1011 1499/1010 1498/1223 +f 37/19 38/21 418/1028 +f 1935/1992 1934/1991 3185/3509 +f 104/2810 89/67 301/438 +f 2101/2018 2098/2012 2083/1998 +f 543/1800 298/1407 360/1040 +f 1934/1991 2219/2287 3186/3510 +f 234/1055 1521/1077 212/173 +f 3845/2090 3821/1668 1822/1666 +f 1976/3511 1875/2146 1874/1715 +f 223/308 3186/3510 2219/2287 +f 3655/3512 3656/3605 3605/3513 +f 2165/3515 2164/2214 467/410 +f 467/410 468/287 2165/3515 +f 773/421 476/2205 2165/3515 +f 1935/1992 1212/1791 1728/3193 +f 3240/2319 3225/2288 3226/2290 +f 3261/3516 3249/5095 3250/3517 +f 3215/2241 3216/2243 3244/3519 +f 3265/3521 3221/2265 3215/2241 +f 3267/3522 3245/2332 3198/2236 +f 3205/3463 3206/3566 3269/3523 +f 3214/3525 3211/2238 3265/3521 +f 3271/3527 3213/2239 3214/3525 +f 3251/3529 3252/3568 3239/3530 +f 3222/3531 3221/2265 3266/3532 +f 3232/2318 3235/2320 3234/2322 +f 3216/2243 3242/3498 3243/3534 +f 3196/3419 3193/2232 3268/3535 +f 3244/3519 3243/3534 3254/3536 +f 3238/2316 3239/3530 3247/2950 +f 3264/3520 3244/3519 3255/3524 +f 3253/3537 3242/3498 3216/2243 +f 3211/2238 3266/3532 3221/2265 +f 3257/3538 3217/2242 3218/2266 +f 3258/3540 3222/3531 3272/3533 +f 3274/3542 3275/3548 3280/3543 +f 3228/3545 3229/3560 3262/3546 +f 3275/3548 3231/3567 3281/3549 +f 3226/2290 3227/2289 3273/3544 +f 3236/2315 3263/3547 3260/3518 +f 3281/3549 3263/3547 3236/2315 +f 3287/1578 3271/3527 3192/3528 +f 3235/2320 3226/2290 3280/3543 +f 3234/2322 3280/3543 3281/3549 +f 3288/3551 3290/3561 3259/3541 +f 3260/3518 3250/3517 3276/3552 +f 3289/3553 3212/2240 3278/3554 +f 3210/3556 3209/2938 3224/2940 +f 3199/2237 3202/2948 3201/3558 +f 3228/3545 3231/3567 3230/3559 +f 3290/3561 3288/3551 3285/3562 +f 3282/1580 3279/3571 3271/3527 +f 3260/3518 3263/3547 3262/3546 +f 3243/3534 3241/3497 3268/3535 +f 3267/3522 3268/3535 3241/3497 +f 3194/2234 3207/3575 3208/3564 +f 3191/3565 3270/3526 3269/3523 +f 3270/3526 3191/3565 3192/3528 +f 3275/3548 3274/3542 3230/3559 +f 3264/3520 3269/3523 3270/3526 +f 3252/3568 3251/3529 3276/3552 +f 3248/3570 3252/3568 3291/3569 +f 3213/2239 3271/3527 3279/3571 +f 3272/3533 3266/3532 3289/3553 +f 3277/3572 3278/3554 3212/2240 +f 3228/3545 3263/3547 3281/3549 +f 3220/3573 3225/2288 3253/3537 +f 3219/3574 3253/3537 3217/2242 +f 3256/3539 3218/2266 3222/3531 +f 3247/2950 3239/3530 3252/3568 +f 3245/2332 3267/3522 3232/2318 +f 3266/3532 3211/2238 3212/2240 +f 3208/3564 3205/3463 3255/3524 +f 3288/3551 3289/3553 3284/3555 +f 3204/3464 3205/3463 3208/3564 +f 560/2229 1899/3128 3187/3576 +f 3209/2938 3210/3556 3201/3558 +f 3189/3550 3192/3528 3191/3565 +f 448/1848 1980/1838 807/450 +f 647/367 648/366 122/980 +f 1833/1679 1848/1676 1780/1747 +f 510/985 509/984 508/1360 +f 3210/3556 1949/2204 1887/3468 +f 1887/3468 15/8 3200/2235 +f 15/8 16/2228 3197/3420 +f 16/2228 3188/2227 3195/2233 +f 3195/2233 3188/2227 3187/3576 +f 3194/2234 3187/3576 1899/3128 +f 3207/3575 1899/3128 375/3578 +f 387/227 173/116 126/85 +f 3189/3550 1791/2206 1760/2207 +f 3577/3579 763/2211 757/406 +f 763/2211 3577/3579 2172/2106 +f 3583/3581 3582/3606 3580/3582 +f 3581/3583 3580/3582 3579/3584 +f 3594/3585 3597/3642 3596/3586 +f 3400/3588 3601/3596 3602/3589 +f 3626/3591 3624/3664 3625/3592 +f 3631/3594 3635/3615 3599/3595 +f 3636/3597 3637/3608 3595/3587 +f 3647/3598 3584/3602 3588/3599 +f 3650/3601 3582/3606 3584/3602 +f 3653/3603 3579/3584 3580/3582 +f 3653/3603 2176/2212 3654/1775 +f 1801/1633 1864/950 3656/3605 +f 3651/3604 3580/3582 3582/3606 +f 3644/3607 3638/3631 3595/3587 +f 1767/806 1768/1596 3659/3609 +f 1597/1397 3663/805 3661/2103 +f 1708/1524 1707/1523 3665/3143 +f 3671/2258 3668/2257 3667/3610 +f 3666/3611 3667/3610 3673/3612 +f 1588/1526 1708/1524 3664/2256 +f 2080/2338 3671/2258 3666/3611 +f 2082/2006 3666/3611 3672/3613 +f 3674/3614 3677/3616 3635/3615 +f 3677/3616 3678/3628 3636/3597 +f 3682/3617 3647/3598 3645/3600 +f 3692/3619 3651/3604 3650/3601 +f 3681/3618 3645/3600 3644/3607 +f 786/1499 3694/3622 3692/3619 +f 3694/3622 3653/3603 3651/3604 +f 785/1500 3692/3619 3686/3620 +f 497/3623 3686/3620 3682/3617 +f 650/372 3682/3617 3681/3618 +f 3696/3624 564/370 3681/3618 +f 3405/3625 3606/3659 3605/3513 +f 3702/3627 3699/3629 3678/3628 +f 3699/3629 3698/3639 3679/3630 +f 3645/3600 3588/3599 3638/3631 +f 1799/1635 3657/3655 3659/3609 +f 3708/3632 3704/3658 3458/3633 +f 3462/3635 3710/3638 3708/3632 +f 3714/577 463/576 564/370 +f 3615/3636 3606/3659 3602/3589 +f 3710/3638 3714/577 3696/3624 +f 3635/3615 3636/3597 3596/3586 +f 3629/3640 3626/3591 3627/3593 +f 3597/3642 3600/3637 3599/3595 +f 3630/3641 3716/3647 3715/3643 +f 3627/3593 3625/3592 3597/3642 +f 3717/3644 3716/3647 3723/3645 +f 3715/3643 3663/805 3660/807 +f 3716/3647 3717/3644 3725/2102 +f 3668/2257 3665/3143 3661/2103 +f 3668/2257 3725/2102 3717/3644 +f 3667/3610 3717/3644 3718/3646 +f 3727/3648 3728/3649 3673/3612 +f 3728/3649 1576/2019 3672/3613 +f 3726/3650 3727/3648 3718/3646 +f 1576/2019 2101/2018 2084/1997 +f 3727/3648 3726/3650 3731/3651 +f 3203/3462 376/718 1387/719 +f 3190/3577 1387/719 1791/2206 +f 3283/3557 2152/2203 1949/2204 +f 317/183 240/190 259/151 +f 2000/1868 2176/2212 3653/3603 +f 3601/3596 3599/3595 3600/3637 +f 3589/3653 3588/3599 3584/3602 +f 3577/3579 758/1497 756/426 +f 3585/3654 3584/3602 3582/3606 +f 3678/3628 3679/3630 3637/3608 +f 3204/3464 375/3578 376/718 +f 1801/1633 3655/3512 3657/3655 +f 3729/3652 3735/3657 3728/3649 +f 1575/1378 3735/3657 3738/3656 +f 3735/3657 1575/1378 1576/2019 +f 1776/1630 1795/1629 1796/1637 +f 1293/661 1294/657 1290/649 +f 1326/2200 14/9 1327/1729 +f 1294/657 1245/627 1234/619 +f 878/626 1244/625 1330/648 +f 497/3623 373/2850 374/2231 +f 122/980 648/366 413/240 +f 3699/3629 3702/3627 3704/3658 +f 3606/3659 3405/3625 3401/3590 +f 3708/3632 3710/3638 3698/3639 +f 3464/575 3714/577 3710/3638 +f 3704/3658 3674/3614 3443/3660 +f 439/271 398/1539 399/1352 +f 1512/1053 1528/1099 1526/1098 +f 1668/1475 1741/1563 1738/1567 +f 839/3661 849/1431 1362/2226 +f 1362/2226 1357/677 826/678 +f 3843/1428 740/1934 1896/1589 +f 2053/2009 2054/2008 2118/885 +f 3888/2118 3887/2086 2120/2085 +f 696/1234 697/1233 3816/3662 +f 1826/2773 1878/1731 639/1453 +f 3740/3663 3739/3666 3729/3652 +f 956/3185 885/463 886/485 +f 1980/1838 806/442 1922/3460 +f 33/18 34/17 105/111 +f 1343/659 1344/660 3856/666 +f 3907/2069 2127/2068 2128/2073 +f 1766/1597 1581/1380 1582/1382 +f 14/9 1326/2200 2163/2197 +f 595/354 98/72 99/1067 +f 3233/2321 3238/2316 3246/2949 +f 1346/664 1035/534 1044/535 +f 748/1906 1888/1898 1425/800 +f 3193/2232 3208/3564 3254/3536 +f 3190/3577 3191/3565 3206/3566 +f 3612/3514 3605/3513 3606/3659 +f 3624/3664 3612/3514 3615/3636 +f 3631/3594 3415/3665 3443/3660 +f 3679/3630 3680/3621 3644/3607 +f 3686/3620 3650/3601 3647/3598 +f 3702/3627 3677/3616 3674/3614 +f 3625/3592 3615/3636 3600/3637 +f 3601/3596 3400/3588 3415/3665 +f 3739/3666 3738/3656 3735/3657 +f 3630/3641 3627/3593 3594/3585 +f 3741/3667 3723/3645 3716/3647 +f 1284/1792 1290/649 1288/651 +f 3698/3639 3696/3624 3680/3621 +f 1778/1616 1781/1615 1780/1747 +f 1792/2099 1759/3895 1584/1631 +f 695/2132 570/347 571/346 +f 3638/3631 3741/3667 3594/3585 +f 3741/3667 3638/3631 3588/3599 +f 3247/2950 3248/3570 3209/2938 +f 3292/3668 3293/3742 3294/3669 +f 3296/3671 3297/3739 3298/3672 +f 3299/3673 3298/3672 3300/3674 +f 3302/3676 3303/3684 3304/3677 +f 3306/3679 3307/3682 3308/3680 +f 3307/3682 3310/3740 3311/3683 +f 3303/3684 3302/3676 3301/3675 +f 3312/3685 3305/3678 3304/3677 +f 3314/3687 3315/3755 3316/3688 +f 3318/3690 3319/3711 3320/3691 +f 3324/3693 3318/3690 3321/3692 +f 3326/3695 3327/3723 3328/3696 +f 3329/3698 3330/5096 3331/3699 +f 3333/3701 3334/3713 3335/3702 +f 3337/3704 3338/3712 3339/3705 +f 3333/3701 3336/3703 3341/3707 +f 3342/3708 3341/3707 3343/3709 +f 3319/3711 3345/3730 3344/3710 +f 3338/3712 3335/3702 3334/3713 +f 3346/3714 3301/3675 3302/3676 +f 3305/3678 3348/3720 3347/3715 +f 3312/3685 3224/3760 3223/3716 +f 3350/3718 3337/3704 3340/3706 +f 3312/3685 3349/3717 3348/3720 +f 3352/3721 3343/3709 3341/3707 +f 3311/3683 3353/3738 3354/3722 +f 3341/3707 3336/3703 3327/3723 +f 3359/3724 3360/3727 3361/3725 +f 3360/3727 3359/3724 3250/3728 +f 3318/3690 3363/3741 3345/3730 +f 3364/3731 3363/3741 3318/3690 +f 3314/3687 3364/3731 3324/3693 +f 3366/3733 3299/3673 3301/3675 +f 3299/3673 3366/3733 3367/3734 +f 3308/3680 3354/3722 3368/3735 +f 3369/3736 3317/3689 3295/3670 +f 3317/3689 3369/3736 3364/3731 +f 3370/3737 3295/3670 3317/3689 +f 3325/3694 3371/3747 3365/3732 +f 3352/3721 3320/3691 3319/3711 +f 3296/3671 3367/3734 3353/3738 +f 3297/3739 3296/3671 3311/3683 +f 3363/3741 3368/3735 3354/3722 +f 3345/3730 3354/3722 3353/3738 +f 3344/3710 3353/3738 3367/3734 +f 3366/3733 3333/3701 3342/3708 +f 3347/3715 3339/3705 3334/3713 +f 3339/3705 3347/3715 3348/3720 +f 3348/3720 3349/3717 3351/3719 +f 3363/3741 3364/3731 3369/3736 +f 3294/3669 3309/3681 3368/3735 +f 3293/3742 3306/3679 3309/3681 +f 3331/3699 3373/3749 3374/3743 +f 3346/3714 3334/3713 3333/3701 +f 3323/3697 3322/3748 3352/3721 +f 3356/3744 3355/3745 3321/3692 +f 3355/3745 3357/3746 3325/3694 +f 3357/3746 3358/3758 3371/3747 +f 3322/3748 3356/3744 3320/3691 +f 3361/3725 3330/5096 3329/3698 +f 3373/3749 3372/3752 3375/3750 +f 3374/3743 3375/3750 3376/3751 +f 3329/3698 3332/3700 3376/3751 +f 3327/3723 3375/3750 3372/3752 +f 3336/3703 3335/3702 3375/3750 +f 3335/3702 3338/3712 3376/3751 +f 3376/3751 3338/3712 3337/3704 +f 3337/3704 3350/3718 3359/3724 +f 3359/3724 3350/3718 3276/3753 +f 3377/3754 3292/3668 3295/3670 +f 3277/3572 3316/3688 3315/3755 +f 3371/3747 3378/3757 3379/3756 +f 3378/3757 3371/3747 3358/3758 +f 3365/3732 3379/3756 3315/3755 +f 3316/3688 3277/3572 3279/3571 +f 3282/1580 3377/3754 3370/3737 +f 3313/3686 3283/3763 3224/3760 +f 3378/3757 3285/3562 3284/3555 +f 3380/3759 3286/3563 3285/3562 +f 3379/3756 3284/3555 3278/3554 +f 3276/3753 3350/3718 3351/3719 +f 3291/3761 3351/3719 3349/3717 +f 3313/3686 3304/3677 2965/3170 +f 2965/3170 3304/3677 3303/3684 +f 222/307 3303/3684 3300/3674 +f 223/308 3300/3674 3298/3672 +f 3298/3672 3297/3739 3185/3509 +f 3297/3739 3310/3740 2977/3189 +f 3310/3740 3307/3682 1213/1242 +f 3307/3682 3306/3679 1214/1243 +f 2532/3080 1214/1243 3306/3679 +f 2894/3081 2532/3080 3293/3742 +f 2871/3762 2894/3081 3292/3668 +f 3253/3537 3225/2288 3240/2319 +f 3283/3763 3313/3686 3013/3229 +f 1997/1865 1996/1866 1889/1739 +f 1237/1257 745/839 744/838 +f 1535/1782 1534/1784 120/1816 +f 1531/1119 1532/1367 201/137 +f 1119/1151 1241/1262 1032/1066 +f 946/478 948/477 824/489 +f 163/213 8/4 5/6 +f 3589/3653 3726/3650 3723/3645 +f 1155/573 1156/572 1162/581 +f 427/280 213/132 327/1304 +f 553/1454 1902/1779 796/1498 +f 3381/3764 2232/2300 2231/2303 +f 3381/3764 3178/3501 2230/2301 +f 2237/2305 3183/3505 3180/3765 +f 3383/3766 3384/3769 3385/3767 +f 3384/3769 3386/3771 3387/3770 +f 3386/3771 3388/3773 3389/3772 +f 3388/3773 3390/3799 3391/3774 +f 3392/3775 3393/3794 3394/3776 +f 3395/3777 3394/3776 3396/3778 +f 3398/3780 3399/3781 3397/3779 +f 3400/3588 3401/3590 3399/3781 +f 3403/3782 3404/3784 3405/3625 +f 3406/3783 3407/3786 3404/3784 +f 3408/3785 3409/3788 3407/3786 +f 3410/3787 3411/3790 3409/3788 +f 3412/3789 3413/3836 3411/3790 +f 3415/3665 3400/3588 3398/3780 +f 3414/3791 3398/3780 3396/3778 +f 3416/3792 3396/3778 3394/3776 +f 3417/3793 3394/3776 3393/3794 +f 3393/3794 3419/3797 3420/3796 +f 3419/3797 3390/3799 3421/3798 +f 3390/3799 3388/3773 3422/3800 +f 3388/3773 3386/3771 3423/3801 +f 3386/3771 3384/3769 3424/3802 +f 3384/3769 3383/3766 3425/3803 +f 3425/3803 3383/3766 3426/3804 +f 2904/3094 3427/3864 3428/3805 +f 3429/3806 3427/3864 2904/3094 +f 3430/3808 3429/3806 2902/3807 +f 2875/3066 3431/3840 3430/3808 +f 3432/3809 3433/3841 2733/2909 +f 2825/3009 3434/3816 3435/3810 +f 3437/3811 3438/3843 3439/3812 +f 3441/3814 3437/3811 3436/3813 +f 2724/2898 2723/2897 3434/3816 +f 3436/3813 3439/3812 3115/3364 +f 3440/3815 3436/3813 3117/3367 +f 3443/3660 3415/3665 3414/3791 +f 3442/3817 3414/3791 3416/3792 +f 3444/3818 3416/3792 3417/3793 +f 3445/3819 3417/3793 3418/3795 +f 3446/3820 3418/3795 3420/3796 +f 3420/3796 3421/3798 3448/3822 +f 3421/3798 3422/3800 3449/3823 +f 3422/3800 3423/3801 3450/3824 +f 3423/3801 3424/3802 3451/3825 +f 3424/3802 3425/3803 3452/3826 +f 3425/3803 3184/3506 3056/3277 +f 3451/3825 3452/3826 2253/2334 +f 3450/3824 3451/3825 2252/2336 +f 3449/3823 3450/3824 1727/1790 +f 3448/3822 3449/3823 2150/2273 +f 3453/3827 3447/3821 3448/3822 +f 3454/3828 3446/3820 3447/3821 +f 3455/3829 3445/3819 3446/3820 +f 3456/3830 3444/3818 3445/3819 +f 3442/3817 3444/3818 3456/3830 +f 3443/3660 3442/3817 3457/3831 +f 3459/3832 3460/3634 3458/3633 +f 3457/3831 3456/3830 3455/3829 +f 3459/3832 3455/3829 3454/3828 +f 3462/3635 3460/3634 3459/3832 +f 3461/3833 3454/3828 3453/3827 +f 3464/575 3462/3635 3461/3833 +f 3463/3834 3453/3827 1941/2194 +f 3404/3784 3399/3781 3401/3590 +f 3407/3786 3397/3779 3399/3781 +f 3409/3788 3395/3777 3397/3779 +f 3411/3790 3392/3775 3395/3777 +f 3465/3835 3466/3839 3413/3836 +f 3467/3837 3468/3844 3469/3838 +f 3465/3835 3412/3789 3431/3840 +f 3470/3842 3467/3837 3466/3839 +f 3438/3843 3470/3842 3432/3809 +f 3467/3837 3470/3842 3438/3843 +f 3468/3844 3467/3837 3437/3811 +f 3471/3845 3469/3838 3468/3844 +f 3472/3846 3468/3844 3441/3814 +f 3473/3847 3441/3814 3440/3815 +f 2712/3848 3440/3815 3119/3369 +f 3475/3849 3471/3845 3472/3846 +f 3474/3850 3472/3846 3473/3847 +f 3476/3851 3473/3847 2712/3848 +f 3477/3852 3476/3851 2711/2888 +f 3478/3853 3474/3850 3476/3851 +f 3479/3854 3475/3849 3474/3850 +f 3392/3775 3411/3790 3413/3836 +f 3466/3839 3469/3838 3480/3855 +f 3419/3797 3393/3794 3392/3775 +f 3390/3799 3419/3797 3480/3855 +f 3469/3838 3471/3845 3391/3774 +f 3389/3772 3391/3774 3471/3845 +f 3387/3770 3389/3772 3475/3849 +f 3385/3767 3387/3770 3479/3854 +f 3482/3857 3477/3852 2400/2478 +f 3483/3858 3478/3853 3477/3852 +f 3481/3856 3479/3854 3478/3853 +f 3484/3859 3482/3857 2402/2477 +f 3058/3279 3484/3859 2707/2884 +f 3061/3281 3485/3860 3484/3859 +f 3485/3860 3483/3858 3482/3857 +f 3382/3768 3385/3767 3481/3856 +f 3486/3861 3481/3856 3483/3858 +f 3426/3804 3383/3766 3382/3768 +f 3487/3862 3382/3768 3486/3861 +f 3486/3861 3485/3860 3061/3281 +f 1961/2036 3057/3507 3487/3862 +f 3057/3507 3184/3506 3426/3804 +f 3428/3805 3403/3782 3402/3626 +f 3427/3864 3406/3783 3403/3782 +f 3429/3806 3408/3785 3406/3783 +f 3430/3808 3410/3787 3408/3785 +f 3431/3840 3412/3789 3410/3787 +f 3470/3842 3465/3835 3433/3841 +f 3434/3816 3439/3812 3438/3843 +f 2723/2897 3115/3364 3439/3812 +f 2875/3066 2733/2909 3433/3841 +f 2736/2908 2824/3006 3435/3810 +f 2947/3152 3428/3805 3488/3863 +f 3464/575 3463/3834 1360/2001 +f 1900/1742 1925/1768 1924/1781 +f 1736/1561 1717/1559 1718/1841 +f 1955/3488 1952/1971 1943/3865 +f 1671/1474 1738/1567 1735/1560 +f 1696/1506 1750/1591 1698/1516 +f 1800/1634 1865/948 1864/950 +f 2063/1957 2055/1945 2056/1947 +f 1737/1568 1734/1558 1717/1559 +f 1751/1592 1735/1560 1697/1562 +f 943/2088 3885/1392 3899/1394 +f 3656/3605 3488/3863 3402/3626 +f 1577/1531 1578/1601 1764/1605 +f 3917/1606 1598/1396 1599/1525 +f 1945/3866 146/557 82/61 +f 68/49 6/5 7/2726 +f 147/97 1542/1179 1543/1189 +f 1109/544 1135/2261 1133/558 +f 1503/2788 1910/1831 1907/1832 +f 1499/1010 1501/1007 1909/1814 +f 452/274 1922/3460 735/399 +f 1824/1670 2068/2718 3835/3867 +f 1825/1671 2067/1960 2068/2718 +f 1463/3868 1462/1989 1022/532 +f 841/686 1012/521 1009/522 +f 1568/1783 1547/2838 1955/3488 +f 1910/1831 1503/2788 1502/2763 +f 1939/3869 1502/2763 422/256 +f 309/1031 280/167 1940/1787 +f 1804/1638 1851/959 1866/3870 +f 269/163 1482/971 1954/1799 +f 2065/1969 1607/1406 2060/2882 +f 3812/3465 1685/1160 1758/3871 +f 280/167 276/169 552/1276 +f 1875/2146 1873/1712 1869/1714 +f 1739/1582 1721/1557 1734/1558 +f 1778/1616 1779/1675 1790/1678 +f 310/184 1920/1755 1915/1757 +f 1909/1814 1908/1813 1906/1749 +f 1789/1621 1783/1614 1778/1616 +f 978/509 981/508 980/1401 +f 1584/1631 1759/3895 1731/1549 +f 425/1057 426/1056 311/1436 +f 148/96 1543/1189 115/80 +f 2044/1720 3897/1719 3894/2017 +f 277/242 1910/1831 1939/3869 +f 1943/3865 1946/2100 1945/3866 +f 1461/936 1462/1989 1463/3868 +f 2253/2334 3452/3826 3056/3277 +f 2981/3197 2254/3196 3489/3872 +f 2209/2275 2998/3216 3489/3872 +f 2254/3196 2253/2334 3055/3278 +f 2998/3216 926/965 958/999 +f 2826/3010 2817/3874 2721/2896 +f 3490/3873 2739/3877 2721/2896 +f 2816/2997 3491/3879 3490/3873 +f 2334/3875 2297/2384 2296/2383 +f 2298/2388 2297/2384 2334/3875 +f 2301/2396 2298/2388 2336/2419 +f 2311/2398 2301/2396 2338/2421 +f 2312/2397 2311/2398 2354/2437 +f 2359/2443 2358/2442 2361/2448 +f 2335/2418 2296/2383 2359/2443 +f 2414/3876 2413/2486 2365/2447 +f 2413/2486 2270/2359 2361/2448 +f 2355/2438 2352/2434 2540/2631 +f 2541/2632 2540/2631 2539/2630 +f 2722/3365 2721/2896 2739/3877 +f 3114/3366 2722/3365 2738/2910 +f 3110/3359 3112/3362 3121/3372 +f 3108/3357 3110/3359 3120/3371 +f 3122/3373 1754/1801 3127/3377 +f 3112/3362 3122/3373 3126/3376 +f 3127/3377 1754/1801 1753/1803 +f 3139/3878 3133/3386 2529/2619 +f 3744/3396 3775/3387 3133/3386 +f 3490/3873 2789/2968 2740/2911 +f 3491/3879 2790/2969 2789/2968 +f 228/144 229/143 91/2877 +f 245/1786 244/318 243/340 +f 248/385 7/2726 247/339 +f 2/2 297/1291 296/1187 +f 145/1011 146/557 300/2000 +f 327/1304 213/132 210/134 +f 329/332 332/3222 331/1091 +f 239/1191 241/187 240/190 +f 373/2850 376/718 375/3578 +f 178/2199 177/1889 176/177 +f 1218/606 1225/609 1228/623 +f 3862/640 1231/621 1295/667 +f 808/445 809/452 824/489 +f 3858/923 3859/863 1013/550 +f 3813/782 3810/781 1411/761 +f 2154/2149 2149/2148 2159/2156 +f 3845/2090 3835/3867 1758/3871 +f 306/449 807/450 1922/3460 +f 2157/2150 2153/2147 1976/3511 +f 708/2182 709/1338 2031/1913 +f 1450/2169 1226/615 1227/617 +f 1672/1478 1604/1398 1605/1954 +f 1813/3880 1814/1665 1821/1667 +f 1717/1559 1716/1532 1715/1533 +f 176/177 159/106 402/1493 +f 1710/1842 1705/1744 3916/1512 +f 1712/1553 1580/1555 1577/1531 +f 1705/1744 1713/1529 1711/1528 +f 971/501 978/509 979/733 +f 822/448 823/447 1094/552 +f 2909/3881 2810/3882 2793/2973 +f 2810/3882 2769/2988 2795/2981 +f 2799/2979 2910/3102 2249/2327 +f 3662/3883 3643/3101 2910/3102 +f 2769/2988 2768/2951 2801/2982 +f 3177/3499 3176/3500 3178/3501 +f 3178/3501 3381/3764 3180/3765 +f 3181/3503 1394/1470 1393/1472 +f 3181/3503 1633/1700 1632/1702 +f 3182/3504 1632/1702 2244/2314 +f 3183/3505 2244/2314 2243/2313 +f 3492/3884 2791/2972 2790/2969 +f 2808/3885 3492/3884 3491/3879 +f 2239/3886 3574/1170 3572/3887 +f 1149/2264 1150/1907 3571/3889 +f 1150/1907 2009/2072 3567/3891 +f 2202/2277 3569/1337 3566/1336 +f 2921/3892 3564/2811 3565/1770 +f 3568/3894 3560/3183 2133/2178 +f 1777/1607 1579/1599 1759/3895 +f 1201/598 1196/599 1183/588 +f 3561/1769 3554/820 2195/2251 +f 3553/1460 3576/3930 2770/3897 +f 3927/1519 3925/2956 1687/3508 +f 1516/1966 152/1441 154/104 +f 3549/1590 3562/1933 2974/3312 +f 3550/3898 3548/2881 2779/2967 +f 2771/3326 2772/3325 3575/3899 +f 3070/3298 2771/3326 3546/3900 +f 3547/1640 3544/827 2775/2990 +f 1376/714 1375/713 1370/705 +f 2767/3486 3543/819 3556/3129 +f 2195/2251 3554/820 3543/819 +f 2775/2990 3544/827 3542/826 +f 3377/3754 3282/1580 3545/1579 +f 3804/2983 3802/3932 3539/3902 +f 2776/2991 3542/826 3538/1577 +f 2240/3888 3572/3887 3535/3904 +f 2920/3905 3535/3904 3669/3906 +f 3548/2881 3534/1048 2786/2964 +f 1805/1673 1850/957 1851/959 +f 3534/1048 3532/1639 2787/3056 +f 2951/3438 2773/2953 3540/3908 +f 2772/3325 2951/3438 3531/3909 +f 2973/3182 2224/3382 3530/3910 +f 2224/3382 2223/2292 3537/3912 +f 1957/2870 1953/1970 320/983 +f 306/449 305/1093 304/1199 +f 2770/3897 3576/3930 3528/1171 +f 2913/3105 3528/1171 3574/1170 +f 2923/3893 3565/1770 3527/2826 +f 308/185 1940/1787 431/262 +f 207/131 208/130 132/90 +f 1879/1916 1876/1724 1881/1936 +f 3733/3490 3787/1204 3525/1203 +f 2475/2556 3070/3298 3558/3901 +f 2476/3290 2475/2556 3524/3913 +f 2223/2292 2476/3290 3523/3914 +f 3522/1159 3549/1590 2803/3412 +f 2037/3915 3563/2202 11/2191 +f 1959/1999 1958/2101 450/272 +f 2003/2065 3555/2128 3520/2127 +f 9/2190 10/2192 334/328 +f 2009/2072 2008/2071 3518/3916 +f 2008/2071 2973/3182 3536/3911 +f 3877/1202 672/3186 673/2296 +f 3515/1789 3550/3898 2778/2957 +f 2935/3917 3514/2120 3513/1695 +f 111/3492 1149/2264 3570/3890 +f 3510/3918 3514/2120 2935/3917 +f 683/2797 3876/1939 3885/1392 +f 3508/1624 3557/1507 2810/3882 +f 736/2821 733/398 732/2883 +f 1795/1629 1804/1638 1803/1708 +f 3505/987 3510/3918 2938/3138 +f 3507/958 3505/987 2939/3140 +f 2950/3155 3512/1173 3504/3919 +f 2940/3142 2949/3154 3504/3919 +f 3801/3156 3791/1174 3512/1173 +f 2868/3920 3509/993 3502/992 +f 2869/3349 3502/992 3522/1159 +f 1024/614 1034/605 1023/593 +f 3500/949 3503/2193 2948/3153 +f 551/1277 1939/3869 442/487 +f 2972/3181 3499/3921 3568/3894 +f 2032/2097 3533/1930 3499/3921 +f 216/138 93/70 1924/1781 +f 203/236 202/135 201/137 +f 1828/1677 1848/1676 1847/1694 +f 1958/2101 1957/2870 449/273 +f 272/350 271/1003 178/2199 +f 2871/3762 3545/1579 3495/1439 +f 3619/3907 3669/3906 3564/2811 +f 2801/2982 2768/2951 3643/3101 +f 2005/2213 3758/1776 3654/1775 +f 960/3494 886/485 899/2835 +f 3657/3655 3655/3512 3612/3514 +f 3780/3922 3788/3427 3646/3424 +f 3151/3426 3646/3424 3593/2650 +f 2193/2250 3604/3421 3623/2937 +f 3607/3409 3603/3410 3145/3411 +f 3603/3410 3649/2936 2761/2933 +f 3592/3350 3607/3409 3144/3923 +f 2926/3126 3658/3127 3634/3351 +f 3634/3351 3592/3350 3104/3352 +f 3755/3310 3754/812 3746/3924 +f 566/1320 567/1319 3758/1776 +f 3488/3863 3656/3605 1864/950 +f 1602/1399 1586/1389 2079/2005 +f 2868/3920 3658/3127 3611/3125 +f 2922/3293 3611/3125 3610/3124 +f 2923/3893 3610/3124 3618/3116 +f 3670/3117 3613/3113 2918/3114 +f 3619/3907 3670/3117 3614/3115 +f 2921/3892 3618/3116 3670/3117 +f 2920/3905 3614/3115 3586/2308 +f 3613/3113 3642/3110 2915/3108 +f 2917/3121 2766/2946 3622/2947 +f 2916/3123 2917/3121 3641/3111 +f 2239/3886 3587/2310 3652/3106 +f 2911/3925 3640/3100 3652/3106 +f 3643/3101 3640/3100 2911/3925 +f 2797/2980 2801/2982 3662/3883 +f 2797/2980 3591/2977 3598/2927 +f 3622/2947 3628/2943 2763/2941 +f 3608/2929 3628/2943 2764/3347 +f 2194/3896 3623/2937 3648/2935 +f 2221/2291 3648/2935 3649/2936 +f 2756/2942 3608/2929 3609/2928 +f 2750/2975 3598/2927 3609/2928 +f 3669/3906 3535/3904 1817/2135 +f 3763/2780 3770/1647 1691/3926 +f 2515/2601 3621/2610 3590/2649 +f 2548/2639 3590/2649 3593/2650 +f 2513/2599 2489/2572 3632/2611 +f 2513/2599 3616/2609 3621/2610 +f 2509/2595 3633/2592 3620/2378 +f 2510/2604 3632/2611 3633/2592 +f 2289/2593 3620/2378 3639/2371 +f 2283/2376 3639/2371 3617/2370 +f 2240/3888 3586/2308 3587/2310 +f 3659/3609 3657/3655 3624/3664 +f 716/377 2121/2089 3887/2086 +f 727/375 1999/1869 3761/2110 +f 2165/3515 2171/2108 2170/2107 +f 3660/807 3659/3609 3626/3591 +f 3743/2042 3581/3583 3578/1774 +f 2175/3580 2172/2106 769/419 +f 2100/630 500/304 2095/2007 +f 3758/1776 567/1319 3757/2040 +f 640/1452 3560/1736 3568/3927 +f 1653/1644 3576/3930 3553/1460 +f 1895/2270 3536/3942 3530/3928 +f 746/1283 3530/3928 3537/3929 +f 1653/1644 1810/1169 3528/1171 +f 3762/1609 1603/3139 1604/1398 +f 2093/1754 2075/1753 2073/1977 +f 959/517 996/516 995/1751 +f 962/491 899/2835 919/527 +f 1400/749 1401/2337 1398/747 +f 1000/1750 817/444 1127/551 +f 956/3185 959/517 884/462 +f 1662/1674 3548/2881 3550/3898 +f 2034/1915 3558/3941 3546/3931 +f 3926/2955 1689/3952 3539/3902 +f 3535/3904 3572/3887 766/2136 +f 3572/3887 3574/1170 765/2091 +f 334/328 3570/3939 3571/3933 +f 1868/1766 3531/1765 3540/1492 +f 1684/3934 1651/1459 1652/1508 +f 1682/1806 1683/1805 775/423 +f 1585/1001 1603/3139 3762/1609 +f 3763/2780 3762/1609 1672/1478 +f 2173/853 2174/1900 490/1457 +f 1658/825 3544/827 3547/1640 +f 3556/3129 3543/819 1650/2184 +f 1571/813 3754/812 2010/2801 +f 1587/1388 3664/2256 3671/2258 +f 2099/2011 2095/2007 2077/1982 +f 787/1780 3761/2110 1999/1869 +f 2000/1868 3694/3622 786/1499 +f 3155/3935 339/512 3697/2678 +f 3713/3439 3155/3935 3683/3448 +f 2180/2218 2179/2217 3685/3936 +f 3496/1664 2180/2218 3684/3434 +f 1684/3934 3846/1017 3840/1019 +f 1760/2207 464/1311 3495/1439 +f 2607/2705 3689/2706 3693/2686 +f 2581/2673 3676/2679 3689/2706 +f 2371/2458 2502/2693 3700/2585 +f 2371/2458 3695/2687 3693/2686 +f 2587/3314 3676/2679 3675/2680 +f 2586/3313 3675/2680 3697/2678 +f 2497/3172 3691/2579 3687/2586 +f 2502/2693 2503/2692 3687/2586 +f 2494/2576 3691/2579 3690/2578 +f 3690/2578 3688/2555 2472/2552 +f 3688/2555 3701/2548 2468/2549 +f 3701/2548 3703/2547 2469/3937 +f 726/397 3908/2028 3892/2049 +f 581/383 3567/1120 3518/3938 +f 10/2192 3511/5093 3570/3939 +f 1837/2119 3514/2120 3510/3918 +f 741/1282 3537/3929 3523/3940 +f 1299/636 3524/1208 3558/3941 +f 1851/959 3507/958 3504/3919 +f 3527/2826 3565/1770 1820/1772 +f 580/343 3518/3938 3536/3942 +f 1758/3871 1685/1160 3522/1159 +f 1693/1016 3557/1507 3508/1624 +f 3504/3919 3512/1173 1867/1709 +f 1894/1733 1826/2773 3568/3927 +f 2126/2259 1418/780 3813/782 +f 3906/2260 2126/2259 3841/2114 +f 2113/2039 3911/2043 3867/2045 +f 2179/2217 3713/3439 3789/3447 +f 624/360 3903/2050 3904/2062 +f 2192/2248 3604/3421 3788/3427 +f 2177/2216 3541/1663 3787/1204 +f 1364/2125 3824/690 3833/681 +f 3858/923 3855/910 1470/3943 +f 3135/3389 1753/1803 3775/3387 +f 2069/2055 3812/3465 3835/3867 +f 1351/879 3861/670 3853/671 +f 2108/3944 3882/1762 3901/1764 +f 2089/2145 2092/2144 3736/3442 +f 3828/684 3834/680 1358/683 +f 2001/2064 3749/3483 3783/3485 +f 2187/2244 3783/3485 3719/3436 +f 2130/2276 3756/3478 3752/3482 +f 2004/2066 3752/3482 3749/3483 +f 2014/2074 3724/3473 3779/3477 +f 2201/2262 3779/3477 3756/3478 +f 3167/3470 3720/3474 3759/3177 +f 3168/3472 3724/3473 3720/3474 +f 3159/3454 3709/3453 3764/3456 +f 3160/3457 3764/3456 3765/3321 +f 3730/3455 3707/3452 3158/3945 +f 3158/3945 3707/3452 3709/3453 +f 2200/2254 3069/3294 3730/3455 +f 3685/3936 3789/3447 3742/3417 +f 3722/3445 3774/3444 2197/2255 +f 3790/3443 3736/3442 2092/2144 +f 3774/3444 3790/3443 3721/3437 +f 3154/3446 3706/3441 3790/3443 +f 3155/3935 3713/3439 3706/3441 +f 2091/3489 3719/3436 3721/3437 +f 2196/2252 3148/3432 3750/3416 +f 2193/2250 2196/2252 3776/3431 +f 3604/3421 2193/2250 3781/3430 +f 2086/3946 2192/2248 3780/3922 +f 2087/2141 2086/3946 3778/3425 +f 2204/3435 3742/3417 3751/3415 +f 2205/2268 3751/3415 3750/3416 +f 3909/2181 3886/2165 574/391 +f 2124/2173 2087/2141 3777/3422 +f 2208/2272 2124/2173 3760/3403 +f 3711/3390 3785/3398 3140/3399 +f 3785/3398 3745/3397 3138/3395 +f 3745/3397 3744/3396 3139/3878 +f 3771/3394 3712/3391 3136/3393 +f 3712/3391 3711/3390 3137/3392 +f 3775/3387 3767/3384 3131/3385 +f 3767/3384 3782/3383 3132/3400 +f 1813/3880 3839/1650 3838/1653 +f 1293/661 3854/1793 3856/666 +f 2184/2220 2321/3355 3748/2405 +f 2181/2221 2184/2220 3732/3324 +f 3765/3321 3768/3318 3079/3319 +f 3768/3318 3734/3317 3080/3323 +f 709/1338 3873/2167 3875/2129 +f 2774/2954 2961/3166 3737/3165 +f 3786/3292 3769/3291 2134/2179 +f 3809/413 3808/412 766/2136 +f 2968/3174 3784/3179 3766/3164 +f 2969/3176 3759/3177 3784/3179 +f 2958/3168 3747/3162 3737/3165 +f 2132/3433 3747/3162 3766/3164 +f 2321/3355 2185/2223 3771/3394 +f 3860/849 3848/850 1449/2113 +f 3905/2157 3874/2153 582/1461 +f 3886/2165 3889/2164 577/3099 +f 3847/1659 3826/1658 1818/1771 +f 705/2159 3902/2084 3887/2086 +f 3816/3662 3912/2067 3907/2069 +f 2111/2124 3883/2123 3911/2043 +f 3810/781 3829/691 1366/2126 +f 1812/1654 3839/1650 3822/1652 +f 675/1662 3851/2093 3852/2094 +f 3496/1664 3705/3119 3851/2093 +f 2110/2038 3867/2045 3868/2033 +f 2104/2026 3896/2029 3901/1764 +f 2109/2031 3868/2033 3899/1394 +f 3842/1427 3831/877 703/390 +f 1680/1844 3811/1488 3846/1017 +f 1639/1435 1627/1409 3819/1411 +f 623/361 3904/2062 3906/2260 +f 2139/2130 2140/2013 3866/2015 +f 1808/1645 1640/1642 3844/2092 +f 3820/1669 3821/1668 1819/2825 +f 3872/2087 3914/631 3900/633 +f 1646/1434 3832/1443 3830/1445 +f 3889/2164 3890/2158 633/2170 +f 2139/2130 3865/2122 3875/2129 +f 2156/2152 2155/2151 3871/2166 +f 674/3187 3852/2094 3913/2104 +f 2117/887 3817/886 3812/3465 +f 1815/1661 3822/1652 3847/1659 +f 3855/910 3849/909 1444/846 +f 697/1233 3906/2260 3912/2067 +f 866/682 842/1433 3836/603 +f 3864/2180 3869/2134 2136/2131 +f 3585/3654 3731/3651 3726/3650 +f 3583/3581 3740/3663 3731/3651 +f 3753/2041 3746/3924 3738/3656 +f 3740/3663 3583/3581 3581/3583 +f 1088/1379 3738/3656 3746/3924 +f 3739/3666 3740/3663 3743/2042 +f 2791/2972 3492/3884 3493/3948 +f 3492/3884 2808/3885 2807/2984 +f 2792/2970 3493/3948 3494/3949 +f 3493/3948 2807/2984 2806/3903 +f 3508/1624 2909/3881 3494/3949 +f 3516/3950 3494/3949 2806/3903 +f 2948/3153 3503/2193 3791/1174 +f 2105/2046 3908/2028 3896/2029 +f 3538/1577 3802/3932 3804/2983 +f 680/1417 3878/1937 3876/1939 +f 1891/2175 1890/1740 3910/1730 +f 2905/3096 3799/3098 3801/3156 +f 2900/3089 3797/3090 3799/3098 +f 2901/3092 3797/3090 3807/3091 +f 2734/2907 2878/3065 3806/3067 +f 2877/3093 3807/3091 3806/3067 +f 2820/3002 3800/3951 3794/3060 +f 2818/3054 3796/2999 3792/3012 +f 2823/3008 3793/3004 3792/3012 +f 2822/3017 3793/3004 3798/3001 +f 2819/3005 2735/3007 3800/3951 +f 2817/3874 3796/2999 3795/2998 +f 2809/2993 3805/2986 3795/2998 +f 2808/3885 3805/2986 3803/2985 +f 2805/2992 3804/2983 3803/2985 +f 1702/1513 3921/1509 3923/1511 +f 3922/1627 3920/1632 1797/1636 +f 3915/1788 3916/1512 1706/1514 +f 3918/1520 3927/1519 1692/1505 +f 1657/1917 3898/2757 3880/1728 +f 1890/1740 1877/1723 3895/1617 +f 3772/1648 3770/1647 1674/1483 +f 1689/3952 1690/1646 3772/1648 +f 3926/2955 3802/3932 3538/1577 +f 3516/3950 3539/3902 1689/3952 +f 3917/1606 3923/1511 1703/1510 +f 1598/1396 3917/1606 3928/1602 +f 3880/1728 3884/1618 1876/1724 +f 1691/3926 1690/1646 3925/2956 +f 3925/2956 3926/2955 1688/3953 +f 3934/3959 3933/3965 3937/3960 +f 3934/3959 3938/3961 3939/3962 +f 3932/3964 3936/4074 3937/3960 +f 3938/3961 3942/3969 3943/3966 +f 3937/3960 3936/4074 3940/3967 +f 3937/3960 3941/3968 3942/3969 +f 3942/3969 3946/3973 3947/3970 +f 3941/3968 3940/3967 3944/3971 +f 3942/3969 3941/3968 3945/3972 +f 3947/3970 3946/3973 3950/3974 +f 3944/3971 3948/4075 3949/3976 +f 3945/3972 3949/3976 3950/3974 +f 3950/3974 3954/3980 3955/3977 +f 3949/3976 3948/4075 3952/3978 +f 3950/3974 3949/3976 3953/3979 +f 3955/3977 3954/3980 3958/3981 +f 3953/3979 3952/3978 3956/3983 +f 3953/3979 3957/3984 3958/3981 +f 3959/3982 3958/3981 3962/3985 +f 3957/3984 3956/3983 3960/3987 +f 3958/3981 3957/3984 3961/3988 +f 3962/3985 3966/3991 3967/3989 +f 3960/3987 3964/4076 3965/3990 +f 3961/3988 3965/3990 3966/3991 +f 3966/3991 3970/3995 3971/3992 +f 3965/3990 3964/4076 3968/3993 +f 3966/3991 3965/3990 3969/3994 +f 3970/3995 3974/3998 3975/3996 +f 3968/3993 3972/4077 3973/3997 +f 3969/3994 3973/3997 3974/3998 +f 3974/3998 3978/4002 3979/3999 +f 3973/3997 3972/4077 3976/4000 +f 3974/3998 3973/3997 3977/4001 +f 3979/3999 3978/4002 3982/4003 +f 3976/4000 3980/4078 3981/4005 +f 3977/4001 3981/4005 3982/4003 +f 3982/4003 3986/4009 3987/4006 +f 3981/4005 3980/4078 3984/4007 +f 3982/4003 3981/4005 3985/4008 +f 3987/4006 3986/4009 3990/4010 +f 3984/4007 3988/4079 3989/4012 +f 3985/4008 3989/4012 3990/4010 +f 3991/4011 3990/4010 3994/4013 +f 3989/4012 3988/4079 3992/4015 +f 3990/4010 3989/4012 3993/4016 +f 3995/4014 3994/4013 3998/4017 +f 3992/4015 3996/4080 3997/4019 +f 3993/4016 3997/4019 3998/4017 +f 3998/4017 4002/4023 4003/4020 +f 3997/4019 3996/4080 4000/4021 +f 3998/4017 3997/4019 4001/4022 +f 4002/4023 4006/4026 4007/4024 +f 4000/4021 4004/4081 4005/4025 +f 4001/4022 4005/4025 4006/4026 +f 4006/4026 4010/4030 4011/4027 +f 4005/4025 4004/4081 4008/4028 +f 4006/4026 4005/4025 4009/4029 +f 4011/4027 4010/4030 4014/4031 +f 4008/4028 4012/4082 4013/4033 +f 4009/4029 4013/4033 4014/4031 +f 4014/4031 4018/4037 4019/4034 +f 4013/4033 4012/4082 4016/4035 +f 4014/4031 4013/4033 4017/4036 +f 4019/4034 4018/4037 4022/4038 +f 4016/4035 4020/4083 4021/4040 +f 4017/4036 4021/4040 4022/4038 +f 4023/4039 4022/4038 4026/4041 +f 4021/4040 4020/4083 4024/4043 +f 4022/4038 4021/4040 4025/4044 +f 4027/4042 4026/4041 4030/4045 +f 4024/4043 4028/4084 4029/4047 +f 4025/4044 4029/4047 4030/4045 +f 4031/4046 4030/4045 4034/4048 +f 4029/4047 4028/4084 4032/4050 +f 4030/4045 4029/4047 4033/4051 +f 4034/4048 4038/4054 4039/4052 +f 4032/4050 4036/4085 4037/4053 +f 4033/4051 4037/4053 4038/4054 +f 4038/4054 4042/4058 4043/4055 +f 4037/4053 4036/4085 4040/4056 +f 4038/4054 4037/4053 4041/4057 +f 4043/4055 4042/4058 4046/4059 +f 4040/4056 4044/4086 4045/4061 +f 4041/4057 4045/4061 4046/4059 +f 4046/4059 4050/4065 4051/4062 +f 4045/4061 4044/4086 4048/4063 +f 4046/4059 4045/4061 4049/4064 +f 4051/4062 4050/4065 4055/4066 +f 4048/4063 4053/4071 4054/4068 +f 4049/4064 4054/4068 4055/4066 +f 4056/4067 4055/4066 4059/4069 +f 4053/4071 4057/4087 4058/4072 +f 4055/4066 4054/4068 4058/4072 +f 4060/4070 4059/4069 3934/3959 +f 4058/4072 4057/4087 3932/3964 +f 4058/4072 3933/3965 3934/3959 +f 4069/4088 4068/4105 4081/4089 +f 4066/4091 4079/4122 4080/4092 +f 4065/4094 4064/4108 4077/4095 +f 4063/4097 4062/4110 4075/4098 +f 4071/4100 4084/4113 4085/4101 +f 4069/4088 4082/4090 4083/4103 +f 4067/4093 4080/4092 4081/4089 +f 4066/4106 4065/4094 4078/4096 +f 4063/4097 4076/4099 4077/4095 +f 4061/4109 4074/4115 4075/4098 +f 4072/4102 4085/4101 4086/4111 +f 4071/4100 4070/4104 4083/4103 +f 4076/4099 4089/4125 4090/4114 +f 4074/4115 4087/4525 4088/4116 +f 4086/4111 4085/4101 4098/4117 +f 4083/4103 4096/4126 4097/4119 +f 4082/4090 4081/4089 4094/4120 +f 4079/4122 4092/4135 4093/4123 +f 4078/4096 4077/4095 4090/4114 +f 4076/4099 4075/4098 4088/4116 +f 4085/4101 4084/4113 4097/4119 +f 4082/4090 4095/4121 4096/4126 +f 4080/4092 4093/4123 4094/4120 +f 4079/4107 4078/4096 4091/4124 +f 4089/4125 4102/4138 4103/4128 +f 4088/4116 4087/4525 4100/4129 +f 4098/4117 4111/4139 4112/4131 +f 4096/4126 4109/4140 4110/4132 +f 4095/4121 4094/4120 4107/4133 +f 4092/4135 4105/4150 4106/4136 +f 4090/4114 4103/4128 4104/4137 +f 4089/4125 4088/4116 4101/4130 +f 4097/4119 4110/4132 4111/4139 +f 4095/4121 4108/4134 4109/4140 +f 4094/4120 4093/4123 4106/4136 +f 4092/4127 4091/4124 4104/4137 +f 4103/4128 4102/4138 4115/4142 +f 4100/4129 4113/5097 4114/4144 +f 4112/4131 4111/4139 4124/4145 +f 4110/4132 4109/4140 4122/4147 +f 4107/4133 4120/4153 4121/4149 +f 4105/4150 4118/5098 4119/4151 +f 4103/4128 4116/4143 4117/4152 +f 4101/4130 4114/4144 4115/4142 +f 4110/4132 4123/4148 4124/4145 +f 4109/4140 4108/4134 4121/4149 +f 4107/4133 4106/4136 4119/4151 +f 4105/4141 4104/4137 4117/4152 +f 4115/4142 4128/4165 4129/4155 +f 4114/4144 4113/5097 4126/4156 +f 4124/4145 4137/4166 4138/4158 +f 4123/4148 4122/4147 4135/4159 +f 4120/4153 4133/4167 4134/4161 +f 4119/4151 4118/5098 4131/4162 +f 4117/4152 4116/4143 4129/4155 +f 4114/4144 4127/4157 4128/4165 +f 4124/4145 4123/4148 4136/4160 +f 4122/4147 4121/4149 4134/4161 +f 4119/4151 4132/4163 4133/4167 +f 4117/4152 4130/4164 4131/4168 +f 4129/4155 4128/4165 4141/4169 +f 4126/4156 4139/4524 4140/4171 +f 4138/4158 4137/4166 4150/4172 +f 4136/4160 4135/4159 4148/4174 +f 4133/4167 4146/4179 4147/4176 +f 4131/4162 4144/4189 4145/4177 +f 4130/4164 4129/4155 4142/4170 +f 4127/4157 4140/4171 4141/4169 +f 4137/4166 4136/4160 4149/4175 +f 4135/4159 4134/4161 4147/4176 +f 4132/4163 4145/4177 4146/4179 +f 4131/4168 4130/4164 4143/4178 +f 4142/4170 4141/4169 4154/4181 +f 4140/4171 4139/4524 4152/4183 +f 4150/4172 4163/4192 4164/4185 +f 4149/4175 4148/4174 4161/4186 +f 4146/4179 4159/4193 4160/4188 +f 4144/4189 4157/5099 4158/4190 +f 4142/4170 4155/4182 4156/4191 +f 4140/4171 4153/4184 4154/4181 +f 4149/4175 4162/4187 4163/4192 +f 4148/4174 4147/4176 4160/4188 +f 4146/4179 4145/4177 4158/4190 +f 4144/4180 4143/4178 4156/4191 +f 4155/4182 4154/4181 4167/4195 +f 4153/4184 4152/4183 4165/4197 +f 4163/4192 4176/4205 4177/4199 +f 4161/4186 4174/4206 4175/4200 +f 4159/4193 4172/4207 4173/4201 +f 4158/4190 4157/5099 4170/4202 +f 4155/4182 4168/4196 4169/4204 +f 4153/4184 4166/4198 4167/4195 +f 4163/4192 4162/4187 4175/4200 +f 4161/4186 4160/4188 4173/4201 +f 4159/4193 4158/4190 4171/4203 +f 4156/4191 4169/4204 4170/4208 +f 4167/4195 4180/4218 4181/4209 +f 4165/4197 4178/4222 4179/4210 +f 4176/4205 4189/4219 4190/4211 +f 4175/4200 4174/4206 4187/4212 +f 4173/4201 4172/4207 4185/4214 +f 4170/4202 4183/4229 4184/4216 +f 4169/4204 4168/4196 4181/4209 +f 4167/4195 4166/4198 4179/4210 +f 4175/4200 4188/4213 4189/4219 +f 4173/4201 4186/4215 4187/4212 +f 4171/4203 4184/4216 4185/4214 +f 4170/4208 4169/4204 4182/4217 +f 4180/4218 4193/4232 4194/4221 +f 4178/4222 4191/4523 4192/4223 +f 4190/4211 4189/4219 4202/4224 +f 4187/4212 4200/4233 4201/4226 +f 4186/4215 4185/4214 4198/4227 +f 4183/4229 4196/4242 4197/4230 +f 4182/4217 4181/4209 4194/4221 +f 4180/4218 4179/4210 4192/4223 +f 4189/4219 4188/4213 4201/4226 +f 4186/4215 4199/4228 4200/4233 +f 4184/4216 4197/4230 4198/4227 +f 4183/4220 4182/4217 4195/4231 +f 4193/4232 4206/4245 4207/4235 +f 4192/4223 4191/4523 4204/4236 +f 4202/4224 4215/4246 4216/4238 +f 4200/4233 4213/4247 4214/4239 +f 4199/4228 4198/4227 4211/4240 +f 4196/4242 4209/4256 4210/4243 +f 4194/4221 4207/4235 4208/4244 +f 4193/4232 4192/4223 4205/4237 +f 4201/4226 4214/4239 4215/4246 +f 4199/4228 4212/4241 4213/4247 +f 4198/4227 4197/4230 4210/4243 +f 4196/4234 4195/4231 4208/4244 +f 4207/4235 4206/4245 4219/4249 +f 4204/4236 4217/5100 4218/4251 +f 4216/4238 4215/4246 4228/4252 +f 4213/4247 4226/4259 4227/4254 +f 4211/4240 4224/4260 4225/4255 +f 4209/4256 4222/5101 4223/4257 +f 4207/4235 4220/4250 4221/4258 +f 4205/4237 4218/4251 4219/4249 +f 4214/4239 4227/4254 4228/4252 +f 4213/4247 4212/4241 4225/4255 +f 4211/4240 4210/4243 4223/4257 +f 4209/4248 4208/4244 4221/4258 +f 4219/4249 4232/4272 4233/4262 +f 4218/4251 4217/5100 4230/4263 +f 4228/4252 4241/4273 4242/4265 +f 4226/4259 4239/4274 4240/4266 +f 4225/4255 4224/4260 4237/4267 +f 4223/4257 4222/5101 4235/4269 +f 4221/4258 4220/4250 4233/4262 +f 4218/4251 4231/4264 4232/4272 +f 4228/4252 4227/4254 4240/4266 +f 4226/4259 4225/4255 4238/4268 +f 4223/4257 4236/4270 4237/4267 +f 4221/4258 4234/4271 4235/4275 +f 4233/4262 4232/4272 4245/4276 +f 4230/4263 4243/4522 4244/4278 +f 4242/4265 4241/4273 4254/4279 +f 4240/4266 4239/4274 4252/4281 +f 4237/4267 4250/4286 4251/4283 +f 4235/4269 4248/4296 4249/4284 +f 4234/4271 4233/4262 4246/4277 +f 4231/4264 4244/4278 4245/4276 +f 4241/4273 4240/4266 4253/4282 +f 4239/4274 4238/4268 4251/4283 +f 4236/4270 4249/4284 4250/4286 +f 4235/4275 4234/4271 4247/4285 +f 4246/4277 4245/4276 4258/4288 +f 4244/4278 4243/4522 4256/4290 +f 4254/4279 4267/4299 4268/4292 +f 4253/4282 4252/4281 4265/4293 +f 4250/4286 4263/4300 4264/4295 +f 4248/4296 4261/5102 4262/4297 +f 4246/4277 4259/4289 4260/4298 +f 4244/4278 4257/4291 4258/4288 +f 4253/4282 4266/4294 4267/4299 +f 4252/4281 4251/4283 4264/4295 +f 4250/4286 4249/4284 4262/4297 +f 4248/4287 4247/4285 4260/4298 +f 4259/4289 4258/4288 4271/4302 +f 4257/4291 4256/4290 4269/4304 +f 4267/4299 4280/4312 4281/4306 +f 4265/4293 4278/4313 4279/4307 +f 4263/4300 4276/4314 4277/4308 +f 4262/4297 4261/5102 4274/4309 +f 4259/4289 4272/4303 4273/4311 +f 4257/4291 4270/4305 4271/4302 +f 4266/4294 4279/4307 4280/4312 +f 4265/4293 4264/4295 4277/4308 +f 4263/4300 4262/4297 4275/4310 +f 4260/4298 4273/4311 4274/4315 +f 4271/4302 4284/4326 4285/4316 +f 4269/4304 4282/4329 4283/4317 +f 4281/4306 4280/4312 4293/4318 +f 4279/4307 4278/4313 4291/4320 +f 4277/4308 4276/4314 4289/4322 +f 4274/4309 4287/4336 4288/4324 +f 4273/4311 4272/4303 4285/4316 +f 4271/4302 4270/4305 4283/4317 +f 4280/4312 4279/4307 4292/4321 +f 4277/4308 4290/4323 4291/4320 +f 4275/4310 4288/4324 4289/4322 +f 4274/4315 4273/4311 4286/4325 +f 4284/4326 4297/4339 4298/4328 +f 4282/4329 4295/4521 4296/4330 +f 4294/4319 4293/4318 4306/4331 +f 4291/4320 4304/4340 4305/4333 +f 4290/4323 4289/4322 4302/4334 +f 4287/4336 4300/4349 4301/4337 +f 4286/4325 4285/4316 4298/4328 +f 4284/4326 4283/4317 4296/4330 +f 4293/4318 4292/4321 4305/4333 +f 4290/4323 4303/4335 4304/4340 +f 4288/4324 4301/4337 4302/4334 +f 4287/4327 4286/4325 4299/4338 +f 4297/4339 4310/4352 4311/4342 +f 4296/4330 4295/4521 4308/4343 +f 4306/4331 4319/4353 4320/4345 +f 4304/4340 4317/4354 4318/4346 +f 4303/4335 4302/4334 4315/4347 +f 4300/4349 4313/4364 4314/4350 +f 4298/4328 4311/4342 4312/4351 +f 4297/4339 4296/4330 4309/4344 +f 4305/4333 4318/4346 4319/4353 +f 4304/4340 4303/4335 4316/4348 +f 4302/4334 4301/4337 4314/4350 +f 4300/4341 4299/4338 4312/4351 +f 4310/4352 4323/4367 4324/4356 +f 4308/4343 4321/5103 4322/4357 +f 4320/4345 4319/4353 4332/4358 +f 4318/4346 4317/4354 4330/4360 +f 4316/4348 4315/4347 4328/4362 +f 4313/4364 4326/5104 4327/4365 +f 4311/4342 4324/4356 4325/4366 +f 4310/4352 4309/4344 4322/4357 +f 4318/4346 4331/4361 4332/4358 +f 4316/4348 4329/4363 4330/4360 +f 4315/4347 4314/4350 4327/4365 +f 4312/4351 4325/4366 4326/4368 +f 4323/4367 4336/4379 4337/4369 +f 4322/4357 4321/5103 4334/4370 +f 4332/4358 4345/4380 4346/4372 +f 4330/4360 4343/4381 4344/4373 +f 4329/4363 4328/4362 4341/4374 +f 4327/4365 4326/5104 4339/4376 +f 4325/4366 4324/4356 4337/4369 +f 4322/4357 4335/4371 4336/4379 +f 4332/4358 4331/4361 4344/4373 +f 4330/4360 4329/4363 4342/4375 +f 4327/4365 4340/4377 4341/4374 +f 4325/4366 4338/4378 4339/4382 +f 4337/4369 4336/4379 4350/4383 +f 4334/4370 4348/4520 4349/4385 +f 4346/4372 4345/4380 4359/4386 +f 4344/4373 4343/4381 4357/4388 +f 4341/4374 4355/4394 4356/4390 +f 4340/4377 4339/4376 4353/4391 +f 4338/4378 4337/4369 4351/4384 +f 4335/4371 4349/4385 4350/4383 +f 4345/4380 4344/4373 4358/4389 +f 4343/4381 4342/4375 4356/4390 +f 4340/4377 4354/4392 4355/4394 +f 4338/4378 4352/4393 4353/4395 +f 4351/4384 4350/4383 4363/4396 +f 4349/4385 4348/4520 4361/4398 +f 4359/4386 4372/4407 4373/4400 +f 4358/4389 4357/4388 4370/4401 +f 4355/4394 4368/4408 4369/4403 +f 4354/4392 4353/4391 4366/4404 +f 4351/4384 4364/4397 4365/4406 +f 4349/4385 4362/4399 4363/4396 +f 4358/4389 4371/4402 4372/4407 +f 4357/4388 4356/4390 4369/4403 +f 4355/4394 4354/4392 4367/4405 +f 4352/4393 4365/4406 4366/4409 +f 4364/4397 4363/4396 4376/4410 +f 4362/4399 4361/4398 4374/4412 +f 4373/4400 4372/4407 4385/4414 +f 4370/4401 4383/4421 4384/4416 +f 4368/4408 4381/4422 4382/4417 +f 4367/4405 4366/4404 4379/4418 +f 4364/4397 4377/4411 4378/4420 +f 4362/4399 4375/4413 4376/4410 +f 4372/4407 4371/4402 4384/4416 +f 4370/4401 4369/4403 4382/4417 +f 4368/4408 4367/4405 4380/4419 +f 4365/4406 4378/4420 4379/4423 +f 4376/4410 4389/4433 4390/4424 +f 4374/4412 4387/4437 4388/4425 +f 4385/4414 4398/4434 4399/4426 +f 4384/4416 4383/4421 4396/4427 +f 4382/4417 4381/4422 4394/4429 +f 4379/4418 4392/5105 4393/4431 +f 4378/4420 4377/4411 4390/4424 +f 4376/4410 4375/4413 4388/4425 +f 4384/4416 4397/4428 4398/4434 +f 4382/4417 4395/4430 4396/4427 +f 4380/4419 4393/4431 4394/4429 +f 4379/4423 4378/4420 4391/4432 +f 4389/4433 4402/4447 4403/4436 +f 4387/4437 4400/4519 4401/4438 +f 4399/4426 4398/4434 4411/4439 +f 4396/4427 4409/4448 4410/4441 +f 4395/4430 4394/4429 4407/4442 +f 4393/4431 4392/5105 4405/4444 +f 4391/4432 4390/4424 4403/4436 +f 4389/4433 4388/4425 4401/4438 +f 4398/4434 4397/4428 4410/4441 +f 4395/4430 4408/4443 4409/4448 +f 4393/4431 4406/4445 4407/4442 +f 4391/4432 4404/4446 4405/4449 +f 4402/4447 4415/4460 4416/4450 +f 4401/4438 4400/4519 4413/4451 +f 4411/4439 4424/4461 4425/4453 +f 4409/4448 4422/4462 4423/4454 +f 4408/4443 4407/4442 4420/4455 +f 4406/4445 4405/4444 4418/4457 +f 4403/4436 4416/4450 4417/4459 +f 4402/4447 4401/4438 4414/4452 +f 4410/4441 4423/4454 4424/4461 +f 4408/4443 4421/4456 4422/4462 +f 4407/4442 4406/4445 4419/4458 +f 4404/4446 4417/4459 4418/4463 +f 4416/4450 4415/4460 4428/4464 +f 4413/4451 4426/4477 4427/4466 +f 4425/4453 4424/4461 4437/4467 +f 4423/4454 4422/4462 4435/4469 +f 4421/4456 4420/4455 4433/4471 +f 4418/4457 4431/5106 4432/4473 +f 4416/4450 4429/4465 4430/4474 +f 4415/4460 4414/4452 4427/4466 +f 4423/4454 4436/4470 4437/4467 +f 4421/4456 4434/4472 4435/4469 +f 4420/4455 4419/4458 4432/4473 +f 4418/4463 4417/4459 4430/4474 +f 4428/4464 4441/4486 4442/4476 +f 4426/4477 4439/4492 4440/4478 +f 4437/4467 4450/4487 4451/4479 +f 4435/4469 4448/4488 4449/4480 +f 4434/4472 4433/4471 4446/4481 +f 4432/4473 4431/5106 4444/4483 +f 4430/4474 4429/4465 4442/4476 +f 4427/4466 4440/4478 4441/4486 +f 4437/4467 4436/4470 4449/4480 +f 4435/4469 4434/4472 4447/4482 +f 4432/4473 4445/4484 4446/4481 +f 4430/4474 4443/4485 4444/4489 +f 4442/4476 4441/4486 4454/4490 +f 4439/4492 4452/4518 4453/4493 +f 4451/4479 4450/4487 4463/4494 +f 4449/4480 4448/4488 4461/4496 +f 4446/4481 4459/4501 4460/4498 +f 4444/4483 4457/4511 4458/4499 +f 4443/4485 4442/4476 4455/4491 +f 4440/4478 4453/4493 4454/4490 +f 4450/4487 4449/4480 4462/4497 +f 4447/4482 4460/4498 4461/4496 +f 4445/4484 4458/4499 4459/4501 +f 4444/4489 4443/4485 4456/4500 +f 4455/4491 4454/4490 4467/4503 +f 4453/4493 4452/4518 4465/4505 +f 4463/4494 4476/4514 4477/4507 +f 4462/4497 4461/4496 4474/4508 +f 4459/4501 4472/4515 4473/4510 +f 4457/4511 4470/5107 4471/4512 +f 4455/4491 4468/4504 4469/4513 +f 4453/4493 4466/4506 4467/4503 +f 4462/4497 4475/4509 4476/4514 +f 4461/4496 4460/4498 4473/4510 +f 4459/4501 4458/4499 4471/4512 +f 4457/4502 4456/4500 4469/4513 +f 4468/4504 4467/4503 4063/4097 +f 4466/4506 4465/4505 4061/4109 +f 4477/4507 4476/4514 4072/4102 +f 4474/4508 4070/4104 4071/4100 +f 4472/4515 4068/4105 4069/4088 +f 4471/4512 4470/5107 4066/4091 +f 4468/4504 4064/4108 4065/4094 +f 4466/4506 4062/4110 4063/4097 +f 4476/4514 4475/4509 4071/4100 +f 4474/4508 4473/4510 4069/4088 +f 4472/4515 4471/4512 4067/4093 +f 4469/4513 4065/4094 4066/4106 +f 3935/3963 4061/4109 4465/4505 +f 4465/4505 4452/4518 4056/4067 +f 4452/4518 4439/4492 4051/4062 +f 4439/4492 4426/4477 4047/4060 +f 4047/4060 4426/4477 4413/4451 +f 4043/4055 4413/4451 4400/4519 +f 4039/4052 4400/4519 4387/4437 +f 4387/4437 4374/4412 4031/4046 +f 4031/4046 4374/4412 4361/4398 +f 4361/4398 4348/4520 4023/4039 +f 4348/4520 4334/4370 4019/4034 +f 4334/4370 4321/5103 4015/4032 +f 4015/4032 4321/5103 4308/4343 +f 4011/4027 4308/4343 4295/4521 +f 4007/4024 4295/4521 4282/4329 +f 4282/4329 4269/4304 3999/4018 +f 3999/4018 4269/4304 4256/4290 +f 4256/4290 4243/4522 3991/4011 +f 4243/4522 4230/4263 3987/4006 +f 4230/4263 4217/5100 3983/4004 +f 3983/4004 4217/5100 4204/4236 +f 3979/3999 4204/4236 4191/4523 +f 3975/3996 4191/4523 4178/4222 +f 4178/4222 4165/4197 3967/3989 +f 3967/3989 4165/4197 4152/4183 +f 4152/4183 4139/4524 3959/3982 +f 4139/4524 4126/4156 3955/3977 +f 4126/4156 4113/5097 3951/3975 +f 3951/3975 4113/5097 4100/4129 +f 3947/3970 4100/4129 4087/4525 +f 3943/3966 4087/4525 4074/4115 +f 4074/4115 4061/4109 3935/3963 +f 4480/4526 4479/4531 4483/4527 +f 4480/4526 4484/4528 4485/4529 +f 4479/4531 4478/4640 4482/4532 +f 4484/4528 4488/4535 4489/4533 +f 4482/4532 4486/4642 4487/4534 +f 4483/4527 4487/4534 4488/4535 +f 4488/4535 4492/4539 4493/4536 +f 4487/4534 4486/4642 4490/4537 +f 4488/4535 4487/4534 4491/4538 +f 4493/4536 4492/4539 4496/4540 +f 4490/4537 4494/4643 4495/4542 +f 4491/4538 4495/4542 4496/4540 +f 4496/4540 4500/4546 4501/4543 +f 4495/4542 4494/4643 4498/4544 +f 4496/4540 4495/4542 4499/4545 +f 4501/4543 4500/4546 4504/4547 +f 4498/4544 4502/4644 4503/4549 +f 4499/4545 4503/4549 4504/4547 +f 4505/4548 4504/4547 4508/4550 +f 4503/4549 4502/4644 4506/4552 +f 4504/4547 4503/4549 4507/4553 +f 4509/4551 4508/4550 4512/4554 +f 4506/4552 4510/4645 4511/4556 +f 4507/4553 4511/4556 4512/4554 +f 4513/4555 4512/4554 4516/4557 +f 4511/4556 4510/4645 4514/4559 +f 4512/4554 4511/4556 4515/4560 +f 4516/4557 4520/4563 4521/4561 +f 4514/4559 4518/4565 4519/4562 +f 4515/4560 4519/4562 4520/4563 +f 4520/4563 4524/4567 4525/4564 +f 4518/4565 4522/4570 4523/4566 +f 4520/4563 4519/4562 4523/4566 +f 4525/4564 4524/4567 4528/4568 +f 4522/4570 4526/4646 4527/4571 +f 4523/4566 4527/4571 4528/4568 +f 4528/4568 4532/4575 4533/4572 +f 4527/4571 4526/4646 4530/4573 +f 4528/4568 4527/4571 4531/4574 +f 4533/4572 4532/4575 4536/4576 +f 4530/4573 4534/4581 4535/4578 +f 4531/4574 4535/4578 4536/4576 +f 4537/4577 4536/4576 4540/4579 +f 4534/4581 4538/4647 4539/4582 +f 4536/4576 4535/4578 4539/4582 +f 4541/4580 4540/4579 4544/4583 +f 4539/4582 4538/4647 4542/4585 +f 4539/4582 4543/4586 4544/4583 +f 4544/4583 4548/4589 4549/4587 +f 4542/4585 4546/4648 4547/4588 +f 4544/4583 4543/4586 4547/4588 +f 4548/4589 4552/4593 4553/4590 +f 4547/4588 4546/4648 4550/4591 +f 4547/4588 4551/4592 4552/4593 +f 4552/4593 4556/4597 4557/4594 +f 4551/4592 4550/4591 4554/4595 +f 4552/4593 4551/4592 4555/4596 +f 4557/4594 4556/4597 4560/4598 +f 4554/4595 4558/4649 4559/4600 +f 4555/4596 4559/4600 4560/4598 +f 4560/4598 4564/4604 4565/4601 +f 4559/4600 4558/4649 4562/4602 +f 4560/4598 4559/4600 4563/4603 +f 4565/4601 4564/4604 4568/4605 +f 4562/4602 4566/4650 4567/4607 +f 4563/4603 4567/4607 4568/4605 +f 4569/4606 4568/4605 4572/4608 +f 4567/4607 4566/4650 4570/4610 +f 4568/4605 4567/4607 4571/4611 +f 4572/4608 4576/4614 4577/4612 +f 4570/4610 4574/4651 4575/4613 +f 4571/4611 4575/4613 4576/4614 +f 4576/4614 4580/4618 4581/4615 +f 4575/4613 4574/4651 4578/4616 +f 4576/4614 4575/4613 4579/4617 +f 4580/4618 4584/4621 4585/4619 +f 4578/4616 4582/4652 4583/4620 +f 4579/4617 4583/4620 4584/4621 +f 4584/4621 4588/4625 4589/4622 +f 4583/4620 4582/4652 4586/4623 +f 4584/4621 4583/4620 4587/4624 +f 4589/4622 4588/4625 4592/4626 +f 4586/4623 4590/4653 4591/4628 +f 4587/4624 4591/4628 4592/4626 +f 4592/4626 4596/4632 4597/4629 +f 4591/4628 4590/4653 4594/4630 +f 4592/4626 4591/4628 4595/4631 +f 4597/4629 4596/4632 4601/4633 +f 4594/4630 4599/4654 4600/4635 +f 4595/4631 4600/4635 4601/4633 +f 4602/4634 4601/4633 4605/4636 +f 4600/4635 4599/4654 4603/4638 +f 4601/4633 4600/4635 4604/4639 +f 4606/4637 4605/4636 4480/4526 +f 4603/4638 4478/4640 4479/4531 +f 4604/4639 4479/4531 4480/4526 +f 4615/4655 4614/4672 4627/4656 +f 4612/4658 4625/5108 4626/4659 +f 4611/4661 4610/4675 4623/4662 +f 4609/4664 4608/4677 4621/4665 +f 4618/4667 4617/4680 4630/4668 +f 4615/4655 4628/4657 4629/4670 +f 4613/4660 4626/4659 4627/4656 +f 4612/4673 4611/4661 4624/4663 +f 4609/4664 4622/4666 4623/4662 +f 4607/4676 4620/4682 4621/4665 +f 4619/4678 4618/4667 4631/4669 +f 4617/4680 4616/4671 4629/4670 +f 4622/4666 4635/4692 4636/4681 +f 4620/4682 4633/5092 4634/4683 +f 4632/4679 4631/4669 4644/4684 +f 4629/4670 4642/4693 4643/4686 +f 4628/4657 4627/4656 4640/4687 +f 4626/4659 4625/5108 4638/4689 +f 4624/4663 4623/4662 4636/4681 +f 4622/4666 4621/4665 4634/4683 +f 4631/4669 4630/4668 4643/4686 +f 4628/4657 4641/4688 4642/4693 +f 4626/4659 4639/4690 4640/4687 +f 4624/4663 4637/4691 4638/4694 +f 4635/4692 4648/4705 4649/4695 +f 4634/4683 4633/5092 4646/4696 +f 4644/4684 4657/4706 4658/4698 +f 4642/4693 4655/4707 4656/4699 +f 4641/4688 4640/4687 4653/4700 +f 4639/4690 4638/4689 4651/4702 +f 4636/4681 4649/4695 4650/4704 +f 4635/4692 4634/4683 4647/4697 +f 4643/4686 4656/4699 4657/4706 +f 4642/4693 4641/4688 4654/4701 +f 4640/4687 4639/4690 4652/4703 +f 4637/4691 4650/4704 4651/4708 +f 4648/4705 4661/4718 4662/4709 +f 4646/4696 4659/5109 4660/4710 +f 4658/4698 4657/4706 4670/4711 +f 4656/4699 4655/4707 4668/4713 +f 4653/4700 4666/4719 4667/4715 +f 4651/4702 4664/5110 4665/4716 +f 4649/4695 4662/4709 4663/4717 +f 4648/4705 4647/4697 4660/4710 +f 4656/4699 4669/4714 4670/4711 +f 4654/4701 4667/4715 4668/4713 +f 4653/4700 4652/4703 4665/4716 +f 4651/4708 4650/4704 4663/4717 +f 4661/4718 4674/4731 4675/4721 +f 4660/4710 4659/5109 4672/4722 +f 4670/4711 4683/4732 4684/4724 +f 4668/4713 4681/4733 4682/4725 +f 4667/4715 4666/4719 4679/4726 +f 4665/4716 4664/5110 4677/4728 +f 4663/4717 4662/4709 4675/4721 +f 4661/4718 4660/4710 4673/4723 +f 4670/4711 4669/4714 4682/4725 +f 4667/4715 4680/4727 4681/4733 +f 4665/4716 4678/4729 4679/4726 +f 4663/4717 4676/4730 4677/4734 +f 4675/4721 4674/4731 4687/4735 +f 4672/4722 4685/5091 4686/4737 +f 4684/4724 4683/4732 4696/4738 +f 4682/4725 4681/4733 4694/4740 +f 4679/4726 4692/4746 4693/4742 +f 4678/4729 4677/4728 4690/4743 +f 4676/4730 4675/4721 4688/4736 +f 4673/4723 4686/4737 4687/4735 +f 4683/4732 4682/4725 4695/4741 +f 4681/4733 4680/4727 4693/4742 +f 4678/4729 4691/4744 4692/4746 +f 4676/4730 4689/4745 4690/4747 +f 4688/4736 4687/4735 4700/4748 +f 4686/4737 4685/5091 4698/4750 +f 4696/4738 4709/4759 4710/4752 +f 4695/4741 4694/4740 4707/4753 +f 4692/4746 4705/4760 4706/4755 +f 4691/4744 4690/4743 4703/4756 +f 4688/4736 4701/4749 4702/4758 +f 4686/4737 4699/4751 4700/4748 +f 4695/4741 4708/4754 4709/4759 +f 4694/4740 4693/4742 4706/4755 +f 4692/4746 4691/4744 4704/4757 +f 4689/4745 4702/4758 4703/4761 +f 4701/4749 4700/4748 4713/4762 +f 4699/4751 4698/4750 4711/4764 +f 4710/4752 4709/4759 4722/4766 +f 4707/4753 4720/4773 4721/4768 +f 4705/4760 4718/4774 4719/4769 +f 4704/4757 4703/4756 4716/4770 +f 4701/4749 4714/4763 4715/4772 +f 4699/4751 4712/4765 4713/4762 +f 4709/4759 4708/4754 4721/4768 +f 4707/4753 4706/4755 4719/4769 +f 4705/4760 4704/4757 4717/4771 +f 4702/4758 4715/4772 4716/4775 +f 4713/4762 4726/4786 4727/4776 +f 4711/4764 4724/4789 4725/4777 +f 4723/4767 4722/4766 4735/4778 +f 4721/4768 4720/4773 4733/4780 +f 4719/4769 4718/4774 4731/4782 +f 4716/4770 4729/5111 4730/4784 +f 4715/4772 4714/4763 4727/4776 +f 4713/4762 4712/4765 4725/4777 +f 4721/4768 4734/4781 4735/4778 +f 4719/4769 4732/4783 4733/4780 +f 4717/4771 4730/4784 4731/4782 +f 4716/4775 4715/4772 4728/4785 +f 4726/4786 4739/4799 4740/4788 +f 4724/4789 4737/5090 4738/4790 +f 4736/4779 4735/4778 4748/4791 +f 4733/4780 4746/4800 4747/4793 +f 4732/4783 4731/4782 4744/4794 +f 4730/4784 4729/5111 4742/4796 +f 4728/4785 4727/4776 4740/4788 +f 4726/4786 4725/4777 4738/4790 +f 4735/4778 4734/4781 4747/4793 +f 4732/4783 4745/4795 4746/4800 +f 4730/4784 4743/4797 4744/4794 +f 4728/4785 4741/4798 4742/4801 +f 4739/4799 4752/4812 4753/4802 +f 4738/4790 4737/5090 4750/4803 +f 4748/4791 4761/4813 4762/4805 +f 4746/4800 4759/4814 4760/4806 +f 4745/4795 4744/4794 4757/4807 +f 4743/4797 4742/4796 4755/4809 +f 4740/4788 4753/4802 4754/4811 +f 4739/4799 4738/4790 4751/4804 +f 4747/4793 4760/4806 4761/4813 +f 4745/4795 4758/4808 4759/4814 +f 4744/4794 4743/4797 4756/4810 +f 4741/4798 4754/4811 4755/4815 +f 4753/4802 4752/4812 4765/4816 +f 4750/4803 4763/5112 4764/4818 +f 4762/4805 4761/4813 4774/4819 +f 4759/4814 4772/4826 4773/4821 +f 4758/4808 4757/4807 4770/4822 +f 4755/4809 4768/5113 4769/4824 +f 4753/4802 4766/4817 4767/4825 +f 4752/4812 4751/4804 4764/4818 +f 4760/4806 4773/4821 4774/4819 +f 4758/4808 4771/4823 4772/4826 +f 4757/4807 4756/4810 4769/4824 +f 4755/4815 4754/4811 4767/4825 +f 4765/4816 4778/4838 4779/4828 +f 4764/4818 4763/5112 4776/4829 +f 4774/4819 4787/4839 4788/4831 +f 4772/4826 4785/4840 4786/4832 +f 4771/4823 4770/4822 4783/4833 +f 4769/4824 4768/5113 4781/4835 +f 4767/4825 4766/4817 4779/4828 +f 4765/4816 4764/4818 4777/4830 +f 4774/4819 4773/4821 4786/4832 +f 4771/4823 4784/4834 4785/4840 +f 4769/4824 4782/4836 4783/4833 +f 4767/4825 4780/4837 4781/4841 +f 4779/4828 4778/4838 4791/4842 +f 4776/4829 4789/5089 4790/4844 +f 4788/4831 4787/4839 4800/4845 +f 4786/4832 4785/4840 4798/4847 +f 4783/4833 4796/4853 4797/4849 +f 4782/4836 4781/4835 4794/4850 +f 4780/4837 4779/4828 4792/4843 +f 4777/4830 4790/4844 4791/4842 +f 4787/4839 4786/4832 4799/4848 +f 4785/4840 4784/4834 4797/4849 +f 4782/4836 4795/4851 4796/4853 +f 4780/4837 4793/4852 4794/4854 +f 4792/4843 4791/4842 4804/4855 +f 4790/4844 4789/5089 4802/4857 +f 4800/4845 4813/4866 4814/4859 +f 4799/4848 4798/4847 4811/4860 +f 4796/4853 4809/4867 4810/4862 +f 4795/4851 4794/4850 4807/4863 +f 4792/4843 4805/4856 4806/4865 +f 4790/4844 4803/4858 4804/4855 +f 4799/4848 4812/4861 4813/4866 +f 4798/4847 4797/4849 4810/4862 +f 4796/4853 4795/4851 4808/4864 +f 4793/4852 4806/4865 4807/4868 +f 4805/4856 4804/4855 4817/4869 +f 4803/4858 4802/4857 4815/4871 +f 4814/4859 4813/4866 4826/4873 +f 4811/4860 4824/4880 4825/4875 +f 4809/4867 4822/4881 4823/4876 +f 4808/4864 4807/4863 4820/4877 +f 4805/4856 4818/4870 4819/4879 +f 4803/4858 4816/4872 4817/4869 +f 4813/4866 4812/4861 4825/4875 +f 4811/4860 4810/4862 4823/4876 +f 4809/4867 4808/4864 4821/4878 +f 4806/4865 4819/4879 4820/4882 +f 4817/4869 4830/4892 4831/4883 +f 4815/4871 4828/4896 4829/4884 +f 4826/4873 4839/4893 4840/4885 +f 4825/4875 4824/4880 4837/4886 +f 4823/4876 4822/4881 4835/4888 +f 4820/4877 4833/5114 4834/4890 +f 4819/4879 4818/4870 4831/4883 +f 4817/4869 4816/4872 4829/4884 +f 4825/4875 4838/4887 4839/4893 +f 4823/4876 4836/4889 4837/4886 +f 4821/4878 4834/4890 4835/4888 +f 4820/4882 4819/4879 4832/4891 +f 4830/4892 4843/4906 4844/4895 +f 4828/4896 4841/5088 4842/4897 +f 4840/4885 4839/4893 4852/4898 +f 4837/4886 4850/4907 4851/4900 +f 4836/4889 4835/4888 4848/4901 +f 4834/4890 4833/5114 4846/4903 +f 4832/4891 4831/4883 4844/4895 +f 4830/4892 4829/4884 4842/4897 +f 4839/4893 4838/4887 4851/4900 +f 4836/4889 4849/4902 4850/4907 +f 4834/4890 4847/4904 4848/4901 +f 4832/4891 4845/4905 4846/4908 +f 4843/4906 4856/4919 4857/4909 +f 4842/4897 4841/5088 4854/4910 +f 4852/4898 4865/4920 4866/4912 +f 4850/4907 4863/4921 4864/4913 +f 4849/4902 4848/4901 4861/4914 +f 4847/4904 4846/4903 4859/4916 +f 4844/4895 4857/4909 4858/4918 +f 4843/4906 4842/4897 4855/4911 +f 4851/4900 4864/4913 4865/4920 +f 4849/4902 4862/4915 4863/4921 +f 4848/4901 4847/4904 4860/4917 +f 4845/4905 4858/4918 4859/4922 +f 4857/4909 4856/4919 4869/4923 +f 4855/4911 4854/4910 4867/4925 +f 4866/4912 4865/4920 4878/4927 +f 4864/4913 4863/4921 4876/4929 +f 4862/4915 4861/4914 4874/4931 +f 4859/4916 4872/5115 4873/4933 +f 4857/4909 4870/4924 4871/4934 +f 4856/4919 4855/4911 4868/4926 +f 4864/4913 4877/4930 4878/4927 +f 4862/4915 4875/4932 4876/4929 +f 4861/4914 4860/4917 4873/4933 +f 4859/4922 4858/4918 4871/4934 +f 4869/4923 4882/4946 4883/4936 +f 4868/4926 4867/4925 4880/4937 +f 4878/4927 4891/4947 4892/4939 +f 4877/4930 4876/4929 4889/4940 +f 4874/4931 4887/4948 4888/4942 +f 4873/4933 4872/5115 4885/4943 +f 4871/4934 4870/4924 4883/4936 +f 4868/4926 4881/4938 4882/4946 +f 4878/4927 4877/4930 4890/4941 +f 4876/4929 4875/4932 4888/4942 +f 4873/4933 4886/4944 4887/4948 +f 4871/4934 4884/4945 4885/4949 +f 4883/4936 4882/4946 4896/4950 +f 4880/4937 4894/5087 4895/4952 +f 4892/4939 4891/4947 4905/4953 +f 4890/4941 4889/4940 4903/4955 +f 4887/4948 4901/4960 4902/4957 +f 4885/4943 4899/4970 4900/4958 +f 4884/4945 4883/4936 4897/4951 +f 4881/4938 4895/4952 4896/4950 +f 4891/4947 4890/4941 4904/4956 +f 4889/4940 4888/4942 4902/4957 +f 4886/4944 4900/4958 4901/4960 +f 4885/4949 4884/4945 4898/4959 +f 4897/4951 4896/4950 4909/4962 +f 4895/4952 4894/5087 4907/4964 +f 4905/4953 4918/4973 4919/4966 +f 4904/4956 4903/4955 4916/4967 +f 4901/4960 4914/4974 4915/4969 +f 4899/4970 4912/5116 4913/4971 +f 4897/4951 4910/4963 4911/4972 +f 4895/4952 4908/4965 4909/4962 +f 4904/4956 4917/4968 4918/4973 +f 4903/4955 4902/4957 4915/4969 +f 4901/4960 4900/4958 4913/4971 +f 4899/4961 4898/4959 4911/4972 +f 4910/4963 4909/4962 4922/4976 +f 4908/4965 4907/4964 4920/4978 +f 4919/4966 4918/4973 4931/4980 +f 4916/4967 4929/4987 4930/4982 +f 4914/4974 4927/4988 4928/4983 +f 4913/4971 4912/5116 4925/4984 +f 4910/4963 4923/4977 4924/4986 +f 4908/4965 4921/4979 4922/4976 +f 4918/4973 4917/4968 4930/4982 +f 4916/4967 4915/4969 4928/4983 +f 4914/4974 4913/4971 4926/4985 +f 4911/4972 4924/4986 4925/4989 +f 4922/4976 4935/4999 4936/4990 +f 4920/4978 4933/5003 4934/4991 +f 4931/4980 4944/5000 4945/4992 +f 4930/4982 4929/4987 4942/4993 +f 4928/4983 4927/4988 4940/4995 +f 4925/4984 4938/5010 4939/4997 +f 4924/4986 4923/4977 4936/4990 +f 4922/4976 4921/4979 4934/4991 +f 4930/4982 4943/4994 4944/5000 +f 4928/4983 4941/4996 4942/4993 +f 4926/4985 4939/4997 4940/4995 +f 4925/4989 4924/4986 4937/4998 +f 4935/4999 4948/5013 4949/5002 +f 4933/5003 4946/5086 4947/5004 +f 4945/4992 4944/5000 4957/5005 +f 4942/4993 4955/5014 4956/5007 +f 4941/4996 4940/4995 4953/5008 +f 4938/5010 4951/5023 4952/5011 +f 4937/4998 4936/4990 4949/5002 +f 4935/4999 4934/4991 4947/5004 +f 4944/5000 4943/4994 4956/5007 +f 4941/4996 4954/5009 4955/5014 +f 4939/4997 4952/5011 4953/5008 +f 4938/5001 4937/4998 4950/5012 +f 4948/5013 4961/5026 4962/5016 +f 4947/5004 4946/5086 4959/5017 +f 4957/5005 4970/5027 4971/5019 +f 4955/5014 4968/5028 4969/5020 +f 4954/5009 4953/5008 4966/5021 +f 4951/5023 4964/5037 4965/5024 +f 4949/5002 4962/5016 4963/5025 +f 4948/5013 4947/5004 4960/5018 +f 4956/5007 4969/5020 4970/5027 +f 4954/5009 4967/5022 4968/5028 +f 4953/5008 4952/5011 4965/5024 +f 4951/5015 4950/5012 4963/5025 +f 4962/5016 4961/5026 4974/5030 +f 4959/5017 4972/5117 4973/5032 +f 4971/5019 4970/5027 4983/5033 +f 4968/5028 4981/5040 4982/5035 +f 4966/5021 4979/5041 4980/5036 +f 4964/5037 4977/5118 4978/5038 +f 4962/5016 4975/5031 4976/5039 +f 4960/5018 4973/5032 4974/5030 +f 4969/5020 4982/5035 4983/5033 +f 4967/5022 4980/5036 4981/5040 +f 4966/5021 4965/5024 4978/5038 +f 4964/5029 4963/5025 4976/5039 +f 4975/5031 4974/5030 4987/5043 +f 4973/5032 4972/5117 4985/5045 +f 4983/5033 4996/5053 4997/5047 +f 4981/5040 4994/5054 4995/5048 +f 4979/5041 4992/5055 4993/5049 +f 4978/5038 4977/5118 4990/5050 +f 4976/5039 4975/5031 4988/5044 +f 4973/5032 4986/5046 4987/5043 +f 4983/5033 4982/5035 4995/5048 +f 4981/5040 4980/5036 4993/5049 +f 4978/5038 4991/5051 4992/5055 +f 4976/5039 4989/5052 4990/5056 +f 4988/5044 4987/5043 5000/5057 +f 4985/5045 4998/5085 4999/5059 +f 4997/5047 4996/5053 5009/5060 +f 4995/5048 4994/5054 5007/5062 +f 4992/5055 5005/5068 5006/5064 +f 4991/5051 4990/5050 5003/5065 +f 4989/5052 4988/5044 5001/5058 +f 4986/5046 4999/5059 5000/5057 +f 4996/5053 4995/5048 5008/5063 +f 4994/5054 4993/5049 5006/5064 +f 4991/5051 5004/5066 5005/5068 +f 4989/5052 5002/5067 5003/5069 +f 5001/5058 5000/5057 5013/5070 +f 4999/5059 4998/5085 5011/5072 +f 5009/5060 5022/5081 5023/5074 +f 5008/5063 5007/5062 5020/5075 +f 5005/5068 5018/5082 5019/5077 +f 5004/5066 5003/5065 5016/5078 +f 5001/5058 5014/5071 5015/5080 +f 4999/5059 5012/5073 5013/5070 +f 5008/5063 5021/5076 5022/5081 +f 5007/5062 5006/5064 5019/5077 +f 5005/5068 5004/5066 5017/5079 +f 5002/5067 5015/5080 5016/5083 +f 5014/5071 5013/5070 4609/4664 +f 5012/5073 5011/5072 4607/4676 +f 5022/5081 4618/4667 4619/4678 +f 5020/5075 4616/4671 4617/4680 +f 5018/5082 4614/4672 4615/4655 +f 5017/5079 5016/5078 4612/4658 +f 5014/5071 4610/4675 4611/4661 +f 5012/5073 4608/4677 4609/4664 +f 5021/5076 4617/4680 4618/4667 +f 5020/5075 5019/5077 4615/4655 +f 5018/5082 5017/5079 4613/4660 +f 5015/5080 4611/4661 4612/4673 +f 4481/4530 4607/4676 5011/5072 +f 5011/5072 4998/5085 4602/4634 +f 4998/5085 4985/5045 4597/4629 +f 4985/5045 4972/5117 4593/4627 +f 4593/4627 4972/5117 4959/5017 +f 4589/4622 4959/5017 4946/5086 +f 4585/4619 4946/5086 4933/5003 +f 4933/5003 4920/4978 4577/4612 +f 4577/4612 4920/4978 4907/4964 +f 4907/4964 4894/5087 4569/4606 +f 4894/5087 4880/4937 4565/4601 +f 4880/4937 4867/4925 4561/4599 +f 4561/4599 4867/4925 4854/4910 +f 4557/4594 4854/4910 4841/5088 +f 4553/4590 4841/5088 4828/4896 +f 4828/4896 4815/4871 4545/4584 +f 4545/4584 4815/4871 4802/4857 +f 4802/4857 4789/5089 4537/4577 +f 4789/5089 4776/4829 4533/4572 +f 4776/4829 4763/5112 4529/4569 +f 4529/4569 4763/5112 4750/4803 +f 4525/4564 4750/4803 4737/5090 +f 4521/4561 4737/5090 4724/4789 +f 4724/4789 4711/4764 4513/4555 +f 4513/4555 4711/4764 4698/4750 +f 4698/4750 4685/5091 4505/4548 +f 4685/5091 4672/4722 4501/4543 +f 4672/4722 4659/5109 4497/4541 +f 4497/4541 4659/5109 4646/4696 +f 4493/4536 4646/4696 4633/5092 +f 4489/4533 4633/5092 4620/4682 +f 4620/4682 4607/4676 4481/4530 diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/landmark_embedding.npy b/NeuralVoicePuppetry/neural-code/third/DECA/data/landmark_embedding.npy new file mode 100755 index 0000000..046c64f --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/data/landmark_embedding.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8095348eeafce5a02f6bd8765146307f9567a3f03b316d788a2e47336d667954 +size 31292 diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_2.png b/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_2.png new file mode 100644 index 0000000..7a63ded Binary files /dev/null and b/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_2.png differ diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_3.png b/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_3.png new file mode 100644 index 0000000..30b8407 Binary files /dev/null and b/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_3.png differ diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_face.png b/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_face.png new file mode 100644 index 0000000..69e2d94 Binary files /dev/null and b/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_face.png differ diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_mouth.png b/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_mouth.png new file mode 100644 index 0000000..b77ae40 Binary files /dev/null and b/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_mouth.png differ diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_mouth_2.png b/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_mouth_2.png new file mode 100644 index 0000000..e033c78 Binary files /dev/null and b/NeuralVoicePuppetry/neural-code/third/DECA/data/mask_mouth_2.png differ diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/mean_texture.jpg b/NeuralVoicePuppetry/neural-code/third/DECA/data/mean_texture.jpg new file mode 100644 index 0000000..a139edd --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/data/mean_texture.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:34364c02dec9dfb30580d1575b375553c6cd8d5da5caa6ac5a611ce65b91f529 +size 53722 diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/texture_data_256.npy b/NeuralVoicePuppetry/neural-code/third/DECA/data/texture_data_256.npy new file mode 100644 index 0000000..46c3295 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/data/texture_data_256.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8563919927a2bc2ea7343d0ceef35254105d21a1947f00e5ca39e01d6ed1f9f1 +size 6677908 diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/uv_face_eye_mask.png b/NeuralVoicePuppetry/neural-code/third/DECA/data/uv_face_eye_mask.png new file mode 100755 index 0000000..86c768c Binary files /dev/null and b/NeuralVoicePuppetry/neural-code/third/DECA/data/uv_face_eye_mask.png differ diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/data/uv_face_mask.png b/NeuralVoicePuppetry/neural-code/third/DECA/data/uv_face_mask.png new file mode 100755 index 0000000..7baaceb Binary files /dev/null and b/NeuralVoicePuppetry/neural-code/third/DECA/data/uv_face_mask.png differ diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/__init__.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/datasets/__init__.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/datasets/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/datasets/datasets.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/datasets/datasets.py new file mode 100755 index 0000000..f27fdd3 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/datasets/datasets.py @@ -0,0 +1,176 @@ +#-*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import os +import torch +from torch.utils.data import Dataset +import numpy as np +import cv2 +import scipy +from skimage.io import imread +from skimage.transform import estimate_transform, warp +from glob import glob +import scipy.io +from subprocess import call + +from . import detectors + + +def check_mkdir(path): + if not os.path.exists(path): + print('creating %s' % path) + os.makedirs(path) + +def video2sequence(video_path): + videofolder = video_path.split('.')[0] + check_mkdir(videofolder) + video_name = video_path.split('/')[-1].split('.')[0] + vidcap = cv2.VideoCapture(video_path) + success,image = vidcap.read() + count = 0 + imagepath_list = [] + while success: + imagepath = '{}/{}_frame{:04d}.png'.format(videofolder, video_name, count) + cv2.imwrite(imagepath, image) # save frame as PNG file + success,image = vidcap.read() + count += 1 + imagepath_list.append(imagepath) + print('video frames are stored in {}'.format(videofolder)) + return imagepath_list + + +def video2sequence_lossless(video_path, target_dir): + if target_dir is not None: + videofolder = target_dir + check_mkdir(videofolder) + video_name = video_path.split('/')[-1].split('.')[0] + + else: + videofolder = video_path.split('.')[0] + check_mkdir(videofolder) + video_name = video_path.split('/')[-1].split('.')[0] + + cmd = (f'ffmpeg -i {video_path} -vf fps=25 {videofolder}/{video_name}_frame%04d.png').split() + call(cmd) + imagepath_list = [os.path.join(videofolder, f) for f in os.listdir(videofolder)] + print('video frames are stored in {}'.format(videofolder)) + return imagepath_list + + +class TestData(Dataset): + def __init__(self, testpath, iscrop=True, crop_size=224, scale=1.25, face_detector='mtcnn', target_dir=None): + ''' + testpath: folder, imagepath_list, image path, video path + ''' + print('testpath: ', testpath) + if isinstance(testpath, list): + self.imagepath_list = testpath + elif os.path.isdir(testpath): + self.imagepath_list = glob(testpath + '/*.jpg') + glob(testpath + '/*.png') + glob(testpath + '/*.bmp') + elif os.path.isfile(testpath) and (testpath[-3:] in ['jpg', 'png', 'bmp']): + self.imagepath_list = [testpath] + elif os.path.isfile(testpath) and (testpath[-3:] in ['mp4', 'csv', 'vid', 'ebm']): + self.imagepath_list = video2sequence_lossless(testpath, target_dir) + else: + print(f'please check the test path: {testpath}') + exit() + print('total {} images'.format(len(self.imagepath_list))) + self.imagepath_list = sorted(self.imagepath_list) + self.crop_size = crop_size + self.scale = scale + self.iscrop = iscrop + self.resolution_inp = crop_size + if face_detector == 'fan': + self.face_detector = detectors.FAN() + # elif face_detector == 'mtcnn': + # self.face_detector = detectors.MTCNN() + else: + print(f'please check the detector: {face_detector}') + exit() + + def __len__(self): + return len(self.imagepath_list) + + def bbox2point(self, left, right, top, bottom, type='bbox'): + ''' bbox from detector and landmarks are different + ''' + if type=='kpt68': + old_size = (right - left + bottom - top)/2*1.1 + center = np.array([right - (right - left) / 2.0, bottom - (bottom - top) / 2.0 ]) + elif type=='bbox': + old_size = (right - left + bottom - top)/2 + center = np.array([right - (right - left) / 2.0, bottom - (bottom - top) / 2.0 + old_size*0.12]) + else: + raise NotImplementedError + return old_size, center + + def __getitem__(self, index): + imagepath = self.imagepath_list[index] + imagename = imagepath.split('/')[-1].split('.')[0] + + image = np.array(imread(imagepath)) + if len(image.shape) == 2: + image = image[:,:,None].repeat(1,1,3) + if len(image.shape) == 3 and image.shape[2] > 3: + image = image[:,:,:3] + + h, w, _ = image.shape + if self.iscrop: + # provide kpt as txt file, or mat file (for AFLW2000) + kpt_matpath = imagepath.replace('.jpg', '.mat').replace('.png', '.mat') + kpt_txtpath = imagepath.replace('.jpg', '.txt').replace('.png', '.txt') + if os.path.exists(kpt_matpath): + kpt = scipy.io.loadmat(kpt_matpath)['pt3d_68'].T + left = np.min(kpt[:,0]); right = np.max(kpt[:,0]); + top = np.min(kpt[:,1]); bottom = np.max(kpt[:,1]) + old_size, center = self.bbox2point(left, right, top, bottom, type='kpt68') + elif os.path.exists(kpt_txtpath): + kpt = np.loadtxt(kptpath) + left = np.min(kpt[:,0]); right = np.max(kpt[:,0]); + top = np.min(kpt[:,1]); bottom = np.max(kpt[:,1]) + old_size, center = self.bbox2point(left, right, top, bottom, type='kpt68') + else: + try: + bbox, bbox_type = self.face_detector.run(image) + + + except ValueError: + bbox = self.face_detector.run(image) + bbox_type = 'kpt68' + + if len(bbox) < 4: + print('no face detected! run original image') + left = 0; right = h-1; top=0; bottom=w-1 + else: + left = bbox[0]; right=bbox[2] + top = bbox[1]; bottom=bbox[3] + old_size, center = self.bbox2point(left, right, top, bottom, type=bbox_type) + + size = int(old_size*self.scale) + src_pts = np.array([[center[0]-size/2, center[1]-size/2], [center[0] - size/2, center[1]+size/2], [center[0]+size/2, center[1]-size/2]]) + else: + src_pts = np.array([[0, 0], [0, h-1], [w-1, 0]]) + + DST_PTS = np.array([[0,0], [0,self.resolution_inp - 1], [self.resolution_inp - 1, 0]]) + tform = estimate_transform('similarity', src_pts, DST_PTS) + + image = image/255. + dst_image = warp(image, tform.inverse, output_shape=(self.resolution_inp, self.resolution_inp)) + dst_image = dst_image.transpose(2,0,1) + return {'image': torch.tensor(dst_image).float(), + 'imagename': imagename, + 'tform': tform, + 'original_image': torch.tensor(image.transpose(2,0,1)).float(), + } \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/datasets/detectors.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/datasets/detectors.py new file mode 100755 index 0000000..0b6358d --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/datasets/detectors.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import numpy as np +import torch + +class FAN(object): + def __init__(self): + import face_alignment + self.model = face_alignment.FaceAlignment(face_alignment.LandmarksType._2D, flip_input=False) + + def run(self, image): + ''' + image: 0-255, uint8, rgb, [h, w, 3] + return: detected box list + ''' + out = self.model.get_landmarks(image) + if out is None: + return [0] + else: + kpt = out[0].squeeze() + left = np.min(kpt[:,0]); right = np.max(kpt[:,0]); + top = np.min(kpt[:,1]); bottom = np.max(kpt[:,1]) + bbox = [left,top, right, bottom] + return bbox, 'kpt68' + +class MTCNN(object): + def __init__(self, device = "cuda" if torch.cuda.is_available() else "cpu"): + ''' + https://github.com/timesler/facenet-pytorch/blob/master/examples/infer.ipynb + ''' + from facenet_pytorch import MTCNN as mtcnn + self.device = device + self.model = mtcnn(keep_all=True) + def run(self, input): + ''' + image: 0-255, uint8, rgb, [h, w, 3] + return: detected box + ''' + out = self.model.detect(input[None,...]) + if out[0][0] is None: + return [0] + else: + bbox = out[0][0].squeeze() + return bbox, 'bbox' + + + diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/deca.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/deca.py new file mode 100644 index 0000000..2019daa --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/deca.py @@ -0,0 +1,466 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import os +import torch +import torchvision +from torchvision import transforms +from PIL import Image +import torch.nn.functional as F +from skimage.transform import warp +import numpy as np +from skimage.io import imread + +from .utils.renderer import SRenderY +from .models.encoders import ResnetEncoder +from .models.FLAME import FLAME, FLAMETex +from .models.decoders import Generator +from .utils import util +from .utils.config import cfg +torch.backends.cudnn.benchmark = True + +class DECA(object): + def __init__(self, config=None, device="cuda" if torch.cuda.is_available() else "cpu"): + if config is None: + self.cfg = cfg + else: + self.cfg = config + self.device = device + self.image_size = self.cfg.dataset.image_size + self.uv_size = self.cfg.model.uv_size + + self._create_model(self.cfg.model) + self._setup_renderer(self.cfg.model) + + def _setup_renderer(self, model_cfg): + self.render = SRenderY(self.image_size, obj_filename=model_cfg.topology_path, uv_size=model_cfg.uv_size).to(self.device) + # face mask for rendering details + mask = imread(model_cfg.face_eye_mask_path).astype(np.float32)/255.; mask = torch.from_numpy(mask[:,:,0])[None,None,:,:].contiguous() + self.uv_face_eye_mask = F.interpolate(mask, [model_cfg.uv_size, model_cfg.uv_size]).to(self.device) + mask = imread(model_cfg.face_mask_path).astype(np.float32)/255.; mask = torch.from_numpy(mask[:,:,0])[None,None,:,:].contiguous() + self.uv_face_mask = F.interpolate(mask, [model_cfg.uv_size, model_cfg.uv_size]).to(self.device) + # displacement correction + fixed_dis = np.load(model_cfg.fixed_displacement_path) + self.fixed_uv_dis = torch.tensor(fixed_dis).float().to(self.device) + # mean texture + mean_texture = imread(model_cfg.mean_tex_path).astype(np.float32)/255.; mean_texture = torch.from_numpy(mean_texture.transpose(2,0,1))[None,:,:,:].contiguous() + self.mean_texture = F.interpolate(mean_texture, [model_cfg.uv_size, model_cfg.uv_size]).to(self.device) + # dense mesh template, for save detail mesh + self.dense_template = np.load(model_cfg.dense_template_path, allow_pickle=True, encoding='latin1').item() + + def _create_model(self, model_cfg): + # set up parameters + self.n_param = model_cfg.n_shape+model_cfg.n_tex+model_cfg.n_exp+model_cfg.n_pose+model_cfg.n_cam+model_cfg.n_light + self.n_detail = model_cfg.n_detail + self.n_cond = model_cfg.n_exp + 3 # exp + jaw pose + self.num_list = [model_cfg.n_shape, model_cfg.n_tex, model_cfg.n_exp, model_cfg.n_pose, model_cfg.n_cam, model_cfg.n_light] + self.param_dict = {i:model_cfg.get('n_' + i) for i in model_cfg.param_list} + + # encoders + self.E_flame = ResnetEncoder(outsize=self.n_param).to(self.device) + self.E_detail = ResnetEncoder(outsize=self.n_detail).to(self.device) + # decoders + self.flame = FLAME(model_cfg).to(self.device) + if model_cfg.use_tex: + self.flametex = FLAMETex(model_cfg).to(self.device) + self.D_detail = Generator(latent_dim=self.n_detail+self.n_cond, out_channels=1, out_scale=model_cfg.max_z, sample_mode = 'bilinear').to(self.device) + # resume model + model_path = self.cfg.pretrained_modelpath + if os.path.exists(model_path): + print(f'trained model found. load {model_path}') + checkpoint = torch.load(model_path) + self.checkpoint = checkpoint + util.copy_state_dict(self.E_flame.state_dict(), checkpoint['E_flame']) + util.copy_state_dict(self.E_detail.state_dict(), checkpoint['E_detail']) + util.copy_state_dict(self.D_detail.state_dict(), checkpoint['D_detail']) + else: + print(f'please check model path: {model_path}') + exit() + # eval mode + self.E_flame.eval() + self.E_detail.eval() + self.D_detail.eval() + + def decompose_code(self, code, num_dict): + ''' Convert a flattened parameter vector to a dictionary of parameters + code_dict.keys() = ['shape', 'tex', 'exp', 'pose', 'cam', 'light'] + ''' + code_dict = {} + start = 0 + for key in num_dict: + end = start+int(num_dict[key]) + code_dict[key] = code[:, start:end] + start = end + if key == 'light': + code_dict[key] = code_dict[key].reshape(code_dict[key].shape[0], 9, 3) + return code_dict + + def displacement2normal(self, uv_z, coarse_verts, coarse_normals): + ''' Convert displacement map into detail normal map + ''' + batch_size = uv_z.shape[0] + uv_coarse_vertices = self.render.world2uv(coarse_verts).detach() + uv_coarse_normals = self.render.world2uv(coarse_normals).detach() + + uv_z = uv_z*self.uv_face_eye_mask + uv_detail_vertices = uv_coarse_vertices + uv_z*uv_coarse_normals + self.fixed_uv_dis[None,None,:,:]*uv_coarse_normals.detach() + dense_vertices = uv_detail_vertices.permute(0,2,3,1).reshape([batch_size, -1, 3]) + uv_detail_normals = util.vertex_normals(dense_vertices, self.render.dense_faces.expand(batch_size, -1, -1)) + uv_detail_normals = uv_detail_normals.reshape([batch_size, uv_coarse_vertices.shape[2], uv_coarse_vertices.shape[3], 3]).permute(0,3,1,2) + return uv_detail_normals + + def displacement2vertex(self, uv_z, coarse_verts, coarse_normals): + ''' Convert displacement map into detail vertices + ''' + batch_size = uv_z.shape[0] + uv_coarse_vertices = self.render.world2uv(coarse_verts).detach() + uv_coarse_normals = self.render.world2uv(coarse_normals).detach() + + uv_z = uv_z*self.uv_face_eye_mask + uv_detail_vertices = uv_coarse_vertices + uv_z*uv_coarse_normals + self.fixed_uv_dis[None,None,:,:]*uv_coarse_normals.detach() + dense_vertices = uv_detail_vertices.permute(0,2,3,1).reshape([batch_size, -1, 3]) + # uv_detail_normals = util.vertex_normals(dense_vertices, self.render.dense_faces.expand(batch_size, -1, -1)) + # uv_detail_normals = uv_detail_normals.reshape([batch_size, uv_coarse_vertices.shape[2], uv_coarse_vertices.shape[3], 3]).permute(0,3,1,2) + detail_faces = self.render.dense_faces + return dense_vertices, detail_faces + + def visofp(self, normals): + ''' visibility of keypoints, based on the normal direction + ''' + normals68 = self.flame.seletec_3d68(normals) + vis68 = (normals68[:,:,2:] < 0.1).float() + return vis68 + + @torch.no_grad() + def encode(self, images): + batch_size = images.shape[0] + parameters = self.E_flame(images) + detailcode = self.E_detail(images) + codedict = self.decompose_code(parameters, self.param_dict) + codedict['detail'] = detailcode + codedict['images'] = images + return codedict + + @torch.no_grad() + def decode(self, codedict, tform=None): + images = codedict['images'] + batch_size = images.shape[0] + + # pose = codedict['pose'] + # print(f'Pose: {pose}') + # camera = codedict['cam'] + # print(f'Camera: {camera}') + + ## decode + verts, landmarks2d, landmarks3d = self.flame(shape_params=codedict['shape'], expression_params=codedict['exp'], pose_params=codedict['pose']) + uv_z = self.D_detail(torch.cat([codedict['pose'][:,3:], codedict['exp'], codedict['detail']], dim=1)) + if self.cfg.model.use_tex: + albedo = self.flametex(codedict['tex']) + else: + albedo = torch.zeros([batch_size, 3, self.uv_size, self.uv_size], device=images.device) + + # pdb.set_trace() + + ## projection + landmarks2d = util.batch_orth_proj(landmarks2d, codedict['cam'])[:, :, :2]; landmarks2d[:, :, 1:] = -landmarks2d[:, :, 1:]; landmarks2d = landmarks2d * self.image_size / 2 + self.image_size / 2 + landmarks3d = util.batch_orth_proj(landmarks3d, codedict['cam']); landmarks3d[:, :, 1:] = -landmarks3d[:, :, 1:]; landmarks3d = landmarks3d * self.image_size / 2 + self.image_size / 2 + trans_verts = util.batch_orth_proj(verts, codedict['cam']); trans_verts[:, :, 1:] = -trans_verts[:, :, 1:] + + if tform is not None: + + tform_tensor = torch.tensor(tform.params, dtype=torch.float32).cuda() + dst_image = warp(trans_verts[0,:,1:].cpu().numpy(), tform) + trans_verts = torch.cat((trans_verts[0,:,:1], torch.tensor(dst_image, dtype=torch.float32).cuda()), dim = 1) + trans_verts = torch.unsqueeze(trans_verts, dim=0) + + ## rendering + ops = self.render(verts, trans_verts, albedo, codedict['light']) + uv_detail_normals = self.displacement2normal(uv_z, verts, ops['normals']) + uv_shading = self.render.add_SHlight(uv_detail_normals, codedict['light']) + uv_texture = albedo*uv_shading + + landmarks3d_vis = self.visofp(ops['transformed_normals']) + landmarks3d = torch.cat([landmarks3d, landmarks3d_vis], dim=2) + + ## render shape + shape_images = self.render.render_shape(verts, trans_verts) + + # new_shape = shape_images[0].permute(1, 2, 0).cpu().numpy() + # plt.imshow(new_shape) + # plt.show() + + detail_normal_images = F.grid_sample(uv_detail_normals, ops['grid'], align_corners=False)*ops['alpha_images'] + shape_detail_images = self.render.render_shape(verts, trans_verts, detail_normal_images=detail_normal_images) + + ## extract texture + ## TODO: current resolution 256x256, support higher resolution, and add visibility + uv_pverts = self.render.world2uv(trans_verts) + uv_gt = F.grid_sample(images, uv_pverts.permute(0,2,3,1)[:,:,:,:2], mode='bilinear') + if self.cfg.model.use_tex: + ## TODO: poisson blending should give better-looking results + uv_texture_gt = uv_gt[:,:3,:,:]*self.uv_face_eye_mask + (uv_texture[:,:3,:,:]*(1-self.uv_face_eye_mask)*0.7) + else: + uv_texture_gt = uv_gt[:,:3,:,:]*self.uv_face_eye_mask + (torch.ones_like(uv_gt[:,:3,:,:])*(1-self.uv_face_eye_mask)*0.7) + + ## output + opdict = { + 'vertices': verts, + 'normals': ops['normals'], + 'grid': ops['grid'], + 'transformed_vertices': trans_verts, + 'landmarks2d': landmarks2d, + 'landmarks3d': landmarks3d, + 'uv_detail_normals': uv_detail_normals, + 'uv_texture_gt': uv_texture_gt, + 'displacement_map': uv_z+self.fixed_uv_dis[None,None,:,:], + 'detail_normal_images': detail_normal_images, + } + + if self.cfg.model.use_tex: + opdict['albedo'] = albedo + opdict['uv_texture'] = uv_texture + + visdict = { + 'inputs': images, + 'landmarks2d': util.tensor_vis_landmarks(images, landmarks2d, isScale=False), + 'landmarks3d': util.tensor_vis_landmarks(images, landmarks3d, isScale=False), + 'shape_images': shape_images, + 'shape_detail_images': shape_detail_images, + } + + if self.cfg.model.use_tex: + visdict['rendered_images'] = ops['images'] + + return opdict, visdict + + @torch.no_grad() + def decode_eyes(self, codedict, tform=None): + + images = codedict['images'] + batch_size = images.shape[0] + + pose = codedict['pose'] + print(f'Pose: {pose}') + camera = codedict['cam'] + print(f'Camera: {camera}') + + eye_pose_params = torch.tensor([[0, 0, 0, 0, 0, 0]], device="cuda" if torch.cuda.is_available() else "cpu") + eye_pose_params = -20 * pose + print(f'eye_pose_params: {eye_pose_params}') + + ## decode + verts, landmarks2d, landmarks3d = self.flame(shape_params=codedict['shape'], expression_params=codedict['exp'], + pose_params=codedict['pose'], eye_pose_params=eye_pose_params) + + uv_z = self.D_detail(torch.cat([codedict['pose'][:, 3:], codedict['exp'], codedict['detail']], dim=1)) + if self.cfg.model.use_tex: + albedo = self.flametex(codedict['tex']) + else: + albedo = torch.zeros([batch_size, 3, self.uv_size, self.uv_size], device=images.device) + + # pdb.set_trace() + + ## projection + landmarks2d = util.batch_orth_proj(landmarks2d, codedict['cam'])[:, :, :2]; + landmarks2d[:, :, 1:] = -landmarks2d[:, :, 1:]; + landmarks2d = landmarks2d * self.image_size / 2 + self.image_size / 2 + landmarks3d = util.batch_orth_proj(landmarks3d, codedict['cam']); + landmarks3d[:, :, 1:] = -landmarks3d[:, :, 1:]; + landmarks3d = landmarks3d * self.image_size / 2 + self.image_size / 2 + trans_verts = util.batch_orth_proj(verts, codedict['cam']); + trans_verts[:, :, 1:] = -trans_verts[:, :, 1:] + + if tform is not None: + tform_tensor = torch.tensor(tform.params, dtype=torch.float32).cuda() + dst_image = warp(trans_verts[0, :, 1:].cpu().numpy(), tform) + trans_verts = torch.cat((trans_verts[0, :, :1], torch.tensor(dst_image, dtype=torch.float32).cuda()), dim=1) + trans_verts = torch.unsqueeze(trans_verts, dim=0) + + ## rendering + ops = self.render(verts, trans_verts, albedo, codedict['light']) + uv_detail_normals = self.displacement2normal(uv_z, verts, ops['normals']) + uv_shading = self.render.add_SHlight(uv_detail_normals, codedict['light']) + uv_texture = albedo * uv_shading + + landmarks3d_vis = self.visofp(ops['transformed_normals']) + landmarks3d = torch.cat([landmarks3d, landmarks3d_vis], dim=2) + + ## render shape + shape_images = self.render.render_shape(verts, trans_verts) + + # new_shape = shape_images[0].permute(1, 2, 0).cpu().numpy() + # plt.imshow(new_shape) + # plt.show() + + detail_normal_images = F.grid_sample(uv_detail_normals, ops['grid'], align_corners=False) * ops['alpha_images'] + shape_detail_images = self.render.render_shape(verts, trans_verts, detail_normal_images=detail_normal_images) + + ## extract texture + ## TODO: current resolution 256x256, support higher resolution, and add visibility + uv_pverts = self.render.world2uv(trans_verts) + uv_gt = F.grid_sample(images, uv_pverts.permute(0, 2, 3, 1)[:, :, :, :2], mode='bilinear') + if self.cfg.model.use_tex: + ## TODO: poisson blending should give better-looking results + uv_texture_gt = uv_gt[:, :3, :, :] * self.uv_face_eye_mask + ( + uv_texture[:, :3, :, :] * (1 - self.uv_face_eye_mask) * 0.7) + else: + uv_texture_gt = uv_gt[:, :3, :, :] * self.uv_face_eye_mask + ( + torch.ones_like(uv_gt[:, :3, :, :]) * (1 - self.uv_face_eye_mask) * 0.7) + + ## output + opdict = { + 'vertices': verts, + 'normals': ops['normals'], + 'grid': ops['grid'], + 'transformed_vertices': trans_verts, + 'landmarks2d': landmarks2d, + 'landmarks3d': landmarks3d, + 'uv_detail_normals': uv_detail_normals, + 'uv_texture_gt': uv_texture_gt, + 'displacement_map': uv_z + self.fixed_uv_dis[None, None, :, :], + 'detail_normal_images': detail_normal_images, + + } + + if self.cfg.model.use_tex: + opdict['albedo'] = albedo + opdict['uv_texture'] = uv_texture + + visdict = { + 'inputs': images, + 'landmarks2d': util.tensor_vis_landmarks(images, landmarks2d, isScale=False), + 'landmarks3d': util.tensor_vis_landmarks(images, landmarks3d, isScale=False), + 'shape_images': shape_images, + 'shape_detail_images': shape_detail_images, + } + + if self.cfg.model.use_tex: + visdict['rendered_images'] = ops['images'] + + return opdict, visdict + + def render_uv(self, codedict, exp, pose): + + images = codedict['images'] + batch_size = images.shape[0] + + ## decode + verts, landmarks2d, landmarks3d = self.flame(shape_params=codedict['shape'], expression_params=exp, + pose_params=pose) + + if self.cfg.model.use_tex: + albedo = self.flametex(codedict['tex']) + else: + albedo = torch.zeros([batch_size, 3, self.uv_size, self.uv_size], device=images.device) + + ## projection + trans_verts = util.batch_orth_proj(verts, codedict['cam']) + trans_verts[:, :, 1:] = -trans_verts[:, :, 1:] + + ## rendering + ops = self.render(verts, trans_verts, albedo, codedict['light']) + + return ops['grid'] + + def render_uv_details(self, codedict, exp, pose): + + images = codedict['images'] + batch_size = images.shape[0] + + ## decode + verts, landmarks2d, landmarks3d = self.flame(shape_params=codedict['shape'], expression_params=exp, + pose_params=pose) + uv_z = self.D_detail(torch.cat([pose[:, 3:], exp, codedict['detail']], dim=1)) + + if self.cfg.model.use_tex: + albedo = self.flametex(codedict['tex']) + else: + albedo = torch.zeros([batch_size, 3, self.uv_size, self.uv_size], device=images.device) + + ## projection + trans_verts = util.batch_orth_proj(verts, codedict['cam']); + trans_verts[:, :, 1:] = -trans_verts[:, :, 1:] + + ## rendering + ops = self.render(verts, trans_verts, albedo, codedict['light']) + uv_detail_normals = self.displacement2normal(uv_z, verts, ops['normals']) + + detail_normal_images = F.grid_sample(uv_detail_normals, ops['grid'], align_corners=False) * ops['alpha_images'] + + return ops['grid'], detail_normal_images + + def render_mask(self, grid): + + # image = Image.open('./third/DECA/data/mask_face.png') + # image = Image.open('./third/DECA/data/mask_3.png') + current_path = os.getcwd() + try : + head_tail = os.path.split(current_path) + image = Image.open(head_tail[0] + '/third/DECA/data/mask_mouth_2.png') + + except FileNotFoundError: + image = Image.open(current_path + '/third/DECA/data/mask_mouth_2.png') + + trans = transforms.ToTensor() + tmp = trans(image) + image_tensor = trans(image).cuda().unsqueeze(0) + mask_1 = (grid[:, :, :, 0:1] != 0.0) & (grid[:, :, :, 1:2] != 0.0) + mask_1 = mask_1.permute(0, 3, 1, 2) + + mask = F.grid_sample(image_tensor, grid, align_corners=False, padding_mode='zeros') + mask = mask_1 * mask + + return mask[0,0,:,:] + + def visualize(self, visdict, size=None): + grids = {} + if size is None: + size = self.image_size + for key in visdict: + grids[key] = torchvision.utils.make_grid(F.interpolate(visdict[key], [size, size])).detach().cpu() + grid = torch.cat(list(grids.values()), 2) + grid_image = (grid.numpy().transpose(1,2,0).copy()*255)[:,:,[2,1,0]] + grid_image = np.minimum(np.maximum(grid_image, 0), 255).astype(np.uint8) + return grid_image + + def save_obj(self, filename, opdict): + ''' + vertices: [nv, 3], tensor + texture: [3, h, w], tensor + ''' + i = 0 + vertices = opdict['vertices'][i].cpu().numpy() + faces = self.render.faces[0].cpu().numpy() + texture = util.tensor2image(opdict['uv_texture_gt'][i]) + uvcoords = self.render.raw_uvcoords[0].cpu().numpy() + uvfaces = self.render.uvfaces[0].cpu().numpy() + # save coarse mesh, with texture and normal map + normal_map = util.tensor2image(opdict['uv_detail_normals'][i] * 0.5 + 0.5) + util.write_obj(filename, vertices, faces, + texture=texture, + uvcoords=uvcoords, + uvfaces=uvfaces, + normal_map=normal_map) + # upsample mesh, save detailed mesh + texture = texture[:,:,[2,1,0]] + normals = opdict['normals'][i].cpu().numpy() + displacement_map = opdict['displacement_map'][i].cpu().numpy().squeeze() + dense_vertices, dense_colors, dense_faces = util.upsample_mesh(vertices, normals, faces, displacement_map, + texture, self.dense_template) + util.write_obj(filename.replace('.obj', '_detail.obj'), + dense_vertices, + dense_faces, + colors=dense_colors, + inverse_face_order=True) diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/FLAME.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/FLAME.py new file mode 100755 index 0000000..7e2821a --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/FLAME.py @@ -0,0 +1,262 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import torch +import torch.nn as nn +import numpy as np +import pickle +import torch.nn.functional as F + +from .lbs import lbs, batch_rodrigues, vertices2landmarks, rot_mat_to_euler + +def to_tensor(array, dtype=torch.float32): + if 'torch.tensor' not in str(type(array)): + return torch.tensor(array, dtype=dtype) +def to_np(array, dtype=np.float32): + if 'scipy.sparse' in str(type(array)): + array = array.todense() + return np.array(array, dtype=dtype) + +class Struct(object): + def __init__(self, **kwargs): + for key, val in kwargs.items(): + setattr(self, key, val) + +class FLAME(nn.Module): + """ + borrowed from https://github.com/soubhiksanyal/FLAME_PyTorch/blob/master/FLAME.py + Given flame parameters this class generates a differentiable FLAME function + which outputs the a mesh and 2D/3D facial landmarks + """ + def __init__(self, config): + super(FLAME, self).__init__() + print("creating the FLAME Decoder") + with open(config.flame_model_path, 'rb') as f: + ss = pickle.load(f, encoding='latin1') + flame_model = Struct(**ss) + + self.dtype = torch.float32 + self.register_buffer('faces_tensor', to_tensor(to_np(flame_model.f, dtype=np.int64), dtype=torch.long)) + # The vertices of the template model + self.register_buffer('v_template', to_tensor(to_np(flame_model.v_template), dtype=self.dtype)) + # The shape components and expression + shapedirs = to_tensor(to_np(flame_model.shapedirs), dtype=self.dtype) + shapedirs = torch.cat([shapedirs[:,:,:config.n_shape], shapedirs[:,:,300:300+config.n_exp]], 2) + self.register_buffer('shapedirs', shapedirs) + # The pose components + num_pose_basis = flame_model.posedirs.shape[-1] + posedirs = np.reshape(flame_model.posedirs, [-1, num_pose_basis]).T + self.register_buffer('posedirs', to_tensor(to_np(posedirs), dtype=self.dtype)) + # + self.register_buffer('J_regressor', to_tensor(to_np(flame_model.J_regressor), dtype=self.dtype)) + parents = to_tensor(to_np(flame_model.kintree_table[0])).long(); parents[0] = -1 + self.register_buffer('parents', parents) + self.register_buffer('lbs_weights', to_tensor(to_np(flame_model.weights), dtype=self.dtype)) + + # Fixing Eyeball and neck rotation + default_eyball_pose = torch.zeros([1, 6], dtype=self.dtype, requires_grad=False) + self.register_parameter('eye_pose', nn.Parameter(default_eyball_pose, + requires_grad=False)) + default_neck_pose = torch.zeros([1, 3], dtype=self.dtype, requires_grad=False) + self.register_parameter('neck_pose', nn.Parameter(default_neck_pose, + requires_grad=False)) + + # Static and Dynamic Landmark embeddings for FLAME + lmk_embeddings = np.load(config.flame_lmk_embedding_path, allow_pickle=True, encoding='latin1') + lmk_embeddings = lmk_embeddings[()] + self.register_buffer('lmk_faces_idx', torch.from_numpy(lmk_embeddings['static_lmk_faces_idx']).long()) + self.register_buffer('lmk_bary_coords', torch.from_numpy(lmk_embeddings['static_lmk_bary_coords']).to(self.dtype)) + self.register_buffer('dynamic_lmk_faces_idx', lmk_embeddings['dynamic_lmk_faces_idx'].long()) + self.register_buffer('dynamic_lmk_bary_coords', lmk_embeddings['dynamic_lmk_bary_coords'].to(self.dtype)) + self.register_buffer('full_lmk_faces_idx', torch.from_numpy(lmk_embeddings['full_lmk_faces_idx']).long()) + self.register_buffer('full_lmk_bary_coords', torch.from_numpy(lmk_embeddings['full_lmk_bary_coords']).to(self.dtype)) + + neck_kin_chain = []; NECK_IDX=1 + curr_idx = torch.tensor(NECK_IDX, dtype=torch.long) + while curr_idx != -1: + neck_kin_chain.append(curr_idx) + curr_idx = self.parents[curr_idx] + self.register_buffer('neck_kin_chain', torch.stack(neck_kin_chain)) + + def _find_dynamic_lmk_idx_and_bcoords(self, pose, dynamic_lmk_faces_idx, + dynamic_lmk_b_coords, + neck_kin_chain, dtype=torch.float32): + """ + Selects the face contour depending on the reletive position of the head + Input: + vertices: N X num_of_vertices X 3 + pose: N X full pose + dynamic_lmk_faces_idx: The list of contour face indexes + dynamic_lmk_b_coords: The list of contour barycentric weights + neck_kin_chain: The tree to consider for the relative rotation + dtype: Data type + return: + The contour face indexes and the corresponding barycentric weights + """ + + batch_size = pose.shape[0] + + aa_pose = torch.index_select(pose.view(batch_size, -1, 3), 1, + neck_kin_chain) + rot_mats = batch_rodrigues( + aa_pose.view(-1, 3), dtype=dtype).view(batch_size, -1, 3, 3) + + rel_rot_mat = torch.eye(3, device=pose.device, + dtype=dtype).unsqueeze_(dim=0).expand(batch_size, -1, -1) + for idx in range(len(neck_kin_chain)): + rel_rot_mat = torch.bmm(rot_mats[:, idx], rel_rot_mat) + + y_rot_angle = torch.round( + torch.clamp(rot_mat_to_euler(rel_rot_mat) * 180.0 / np.pi, + max=39)).to(dtype=torch.long) + + neg_mask = y_rot_angle.lt(0).to(dtype=torch.long) + mask = y_rot_angle.lt(-39).to(dtype=torch.long) + neg_vals = mask * 78 + (1 - mask) * (39 - y_rot_angle) + y_rot_angle = (neg_mask * neg_vals + + (1 - neg_mask) * y_rot_angle) + + dyn_lmk_faces_idx = torch.index_select(dynamic_lmk_faces_idx, + 0, y_rot_angle) + dyn_lmk_b_coords = torch.index_select(dynamic_lmk_b_coords, + 0, y_rot_angle) + return dyn_lmk_faces_idx, dyn_lmk_b_coords + + def _vertices2landmarks(self, vertices, faces, lmk_faces_idx, lmk_bary_coords): + """ + Calculates landmarks by barycentric interpolation + Input: + vertices: torch.tensor NxVx3, dtype = torch.float32 + The tensor of input vertices + faces: torch.tensor (N*F)x3, dtype = torch.long + The faces of the mesh + lmk_faces_idx: torch.tensor N X L, dtype = torch.long + The tensor with the indices of the faces used to calculate the + landmarks. + lmk_bary_coords: torch.tensor N X L X 3, dtype = torch.float32 + The tensor of barycentric coordinates that are used to interpolate + the landmarks + + Returns: + landmarks: torch.tensor NxLx3, dtype = torch.float32 + The coordinates of the landmarks for each mesh in the batch + """ + # Extract the indices of the vertices for each face + # NxLx3 + batch_size, num_verts = vertices.shape[:dd2] + lmk_faces = torch.index_select(faces, 0, lmk_faces_idx.view(-1)).view( + 1, -1, 3).view(batch_size, lmk_faces_idx.shape[1], -1) + + lmk_faces += torch.arange(batch_size, dtype=torch.long).view(-1, 1, 1).to( + device=vertices.device) * num_verts + + lmk_vertices = vertices.view(-1, 3)[lmk_faces] + landmarks = torch.einsum('blfi,blf->bli', [lmk_vertices, lmk_bary_coords]) + return landmarks + + def seletec_3d68(self, vertices): + landmarks3d = vertices2landmarks(vertices, self.faces_tensor, + self.full_lmk_faces_idx.repeat(vertices.shape[0], 1), + self.full_lmk_bary_coords.repeat(vertices.shape[0], 1, 1)) + return landmarks3d + + def forward(self, shape_params=None, expression_params=None, pose_params=None, eye_pose_params=None): + """ + Input: + shape_params: N X number of shape parameters + expression_params: N X number of expression parameters + pose_params: N X number of pose parameters (6) + return:d + vertices: N X V X 3 + landmarks: N X number of landmarks X 3 + """ + batch_size = shape_params.shape[0] + if eye_pose_params is None: + eye_pose_params = self.eye_pose.expand(batch_size, -1) + betas = torch.cat([shape_params, expression_params], dim=1) + full_pose = torch.cat([pose_params[:, :3], self.neck_pose.expand(batch_size, -1), pose_params[:, 3:], eye_pose_params], dim=1) + template_vertices = self.v_template.unsqueeze(0).expand(batch_size, -1, -1) + + vertices, _ = lbs(betas, full_pose, template_vertices, + self.shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, dtype=self.dtype) + + lmk_faces_idx = self.lmk_faces_idx.unsqueeze(dim=0).expand(batch_size, -1) + lmk_bary_coords = self.lmk_bary_coords.unsqueeze(dim=0).expand(batch_size, -1, -1) + + dyn_lmk_faces_idx, dyn_lmk_bary_coords = self._find_dynamic_lmk_idx_and_bcoords( + full_pose, self.dynamic_lmk_faces_idx, + self.dynamic_lmk_bary_coords, + self.neck_kin_chain, dtype=self.dtype) + lmk_faces_idx = torch.cat([dyn_lmk_faces_idx, lmk_faces_idx], 1) + lmk_bary_coords = torch.cat([dyn_lmk_bary_coords, lmk_bary_coords], 1) + + landmarks2d = vertices2landmarks(vertices, self.faces_tensor, + lmk_faces_idx, + lmk_bary_coords) + bz = vertices.shape[0] + landmarks3d = vertices2landmarks(vertices, self.faces_tensor, + self.full_lmk_faces_idx.repeat(bz, 1), + self.full_lmk_bary_coords.repeat(bz, 1, 1)) + return vertices, landmarks2d, landmarks3d + +class FLAMETex(nn.Module): + """ + FLAME texture: + https://github.com/TimoBolkart/TF_FLAME/blob/ade0ab152300ec5f0e8555d6765411555c5ed43d/sample_texture.py#L64 + FLAME texture converted from BFM: + https://github.com/TimoBolkart/BFM_to_FLAME + """ + def __init__(self, config): + super(FLAMETex, self).__init__() + if config.tex_type == 'BFM': + mu_key = 'MU' + pc_key = 'PC' + n_pc = 199 + tex_path = config.tex_path + tex_space = np.load(tex_path) + texture_mean = tex_space[mu_key].reshape(1, -1) + texture_basis = tex_space[pc_key].reshape(-1, n_pc) + + elif config.tex_type == 'FLAME': + mu_key = 'mean' + pc_key = 'tex_dir' + n_pc = 200 + tex_path = config.flame_tex_path + tex_space = np.load(tex_path) + texture_mean = tex_space[mu_key].reshape(1, -1)/255. + texture_basis = tex_space[pc_key].reshape(-1, n_pc)/255. + else: + print('texture type ', config.tex_type, 'not exist!') + raise NotImplementedError + + n_tex = config.n_tex + num_components = texture_basis.shape[1] + texture_mean = torch.from_numpy(texture_mean).float()[None,...] + texture_basis = torch.from_numpy(texture_basis[:,:n_tex]).float()[None,...] + self.register_buffer('texture_mean', texture_mean) + self.register_buffer('texture_basis', texture_basis) + + def forward(self, texcode): + ''' + texcode: [batchsize, n_tex] + texture: [bz, 3, 256, 256], range: 0-1 + ''' + texture = self.texture_mean + (self.texture_basis*texcode[:,None,:]).sum(-1) + texture = texture.reshape(texcode.shape[0], 512, 512, 3).permute(0,3,1,2) + texture = F.interpolate(texture, [256, 256]) + texture = texture[:,[2,1,0], :,:] + return texture \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/__init__.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/decoders.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/decoders.py new file mode 100755 index 0000000..e1cdfa5 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/decoders.py @@ -0,0 +1,55 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import torch.nn as nn + +class Generator(nn.Module): + def __init__(self, latent_dim=100, out_channels=1, out_scale=0.01, sample_mode = 'bilinear'): + super(Generator, self).__init__() + self.out_scale = out_scale + + self.init_size = 32 // 4 # Initial size before upsampling + self.l1 = nn.Sequential(nn.Linear(latent_dim, 128 * self.init_size ** 2)) + self.conv_blocks = nn.Sequential( + nn.BatchNorm2d(128), + nn.Upsample(scale_factor=2, mode=sample_mode), #16 + nn.Conv2d(128, 128, 3, stride=1, padding=1), + nn.BatchNorm2d(128, 0.8), + nn.LeakyReLU(0.2, inplace=True), + nn.Upsample(scale_factor=2, mode=sample_mode), #32 + nn.Conv2d(128, 64, 3, stride=1, padding=1), + nn.BatchNorm2d(64, 0.8), + nn.LeakyReLU(0.2, inplace=True), + nn.Upsample(scale_factor=2, mode=sample_mode), #64 + nn.Conv2d(64, 64, 3, stride=1, padding=1), + nn.BatchNorm2d(64, 0.8), + nn.LeakyReLU(0.2, inplace=True), + nn.Upsample(scale_factor=2, mode=sample_mode), #128 + nn.Conv2d(64, 32, 3, stride=1, padding=1), + nn.BatchNorm2d(32, 0.8), + nn.LeakyReLU(0.2, inplace=True), + nn.Upsample(scale_factor=2, mode=sample_mode), #256 + nn.Conv2d(32, 16, 3, stride=1, padding=1), + nn.BatchNorm2d(16, 0.8), + nn.LeakyReLU(0.2, inplace=True), + nn.Conv2d(16, out_channels, 3, stride=1, padding=1), + nn.Tanh(), + ) + + def forward(self, noise): + out = self.l1(noise) + out = out.view(out.shape[0], 128, self.init_size, self.init_size) + img = self.conv_blocks(out) + return img*self.out_scale \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/encoders.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/encoders.py new file mode 100755 index 0000000..53331fd --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/encoders.py @@ -0,0 +1,37 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import torch.nn as nn +from . import resnet + +class ResnetEncoder(nn.Module): + def __init__(self, outsize, last_op=None): + super(ResnetEncoder, self).__init__() + feature_size = 2048 + self.encoder = resnet.load_ResNet50Model() #out: 2048 + ### regressor + self.layers = nn.Sequential( + nn.Linear(feature_size, 1024), + nn.ReLU(), + nn.Linear(1024, outsize) + ) + self.last_op = last_op + + def forward(self, inputs): + features = self.encoder(inputs) + parameters = self.layers(features) + if self.last_op: + parameters = self.last_op(parameters) + return parameters diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/lbs.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/lbs.py new file mode 100755 index 0000000..df55ab3 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/lbs.py @@ -0,0 +1,378 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from __future__ import absolute_import +from __future__ import print_function +from __future__ import division + +import numpy as np + +import torch +import torch.nn.functional as F + +def rot_mat_to_euler(rot_mats): + # Calculates rotation matrix to euler angles + # Careful for extreme cases of eular angles like [0.0, pi, 0.0] + + sy = torch.sqrt(rot_mats[:, 0, 0] * rot_mats[:, 0, 0] + + rot_mats[:, 1, 0] * rot_mats[:, 1, 0]) + return torch.atan2(-rot_mats[:, 2, 0], sy) + +def find_dynamic_lmk_idx_and_bcoords(vertices, pose, dynamic_lmk_faces_idx, + dynamic_lmk_b_coords, + neck_kin_chain, dtype=torch.float32): + ''' Compute the faces, barycentric coordinates for the dynamic landmarks + + + To do so, we first compute the rotation of the neck around the y-axis + and then use a pre-computed look-up table to find the faces and the + barycentric coordinates that will be used. + + Special thanks to Soubhik Sanyal (soubhik.sanyal@tuebingen.mpg.de) + for providing the original TensorFlow implementation and for the LUT. + + Parameters + ---------- + vertices: torch.tensor BxVx3, dtype = torch.float32 + The tensor of input vertices + pose: torch.tensor Bx(Jx3), dtype = torch.float32 + The current pose of the body model + dynamic_lmk_faces_idx: torch.tensor L, dtype = torch.long + The look-up table from neck rotation to faces + dynamic_lmk_b_coords: torch.tensor Lx3, dtype = torch.float32 + The look-up table from neck rotation to barycentric coordinates + neck_kin_chain: list + A python list that contains the indices of the joints that form the + kinematic chain of the neck. + dtype: torch.dtype, optional + + Returns + ------- + dyn_lmk_faces_idx: torch.tensor, dtype = torch.long + A tensor of size BxL that contains the indices of the faces that + will be used to compute the current dynamic landmarks. + dyn_lmk_b_coords: torch.tensor, dtype = torch.float32 + A tensor of size BxL that contains the indices of the faces that + will be used to compute the current dynamic landmarks. + ''' + + batch_size = vertices.shape[0] + + aa_pose = torch.index_select(pose.view(batch_size, -1, 3), 1, + neck_kin_chain) + rot_mats = batch_rodrigues( + aa_pose.view(-1, 3), dtype=dtype).view(batch_size, -1, 3, 3) + + rel_rot_mat = torch.eye(3, device=vertices.device, + dtype=dtype).unsqueeze_(dim=0) + for idx in range(len(neck_kin_chain)): + rel_rot_mat = torch.bmm(rot_mats[:, idx], rel_rot_mat) + + y_rot_angle = torch.round( + torch.clamp(-rot_mat_to_euler(rel_rot_mat) * 180.0 / np.pi, + max=39)).to(dtype=torch.long) + neg_mask = y_rot_angle.lt(0).to(dtype=torch.long) + mask = y_rot_angle.lt(-39).to(dtype=torch.long) + neg_vals = mask * 78 + (1 - mask) * (39 - y_rot_angle) + y_rot_angle = (neg_mask * neg_vals + + (1 - neg_mask) * y_rot_angle) + + dyn_lmk_faces_idx = torch.index_select(dynamic_lmk_faces_idx, + 0, y_rot_angle) + dyn_lmk_b_coords = torch.index_select(dynamic_lmk_b_coords, + 0, y_rot_angle) + + return dyn_lmk_faces_idx, dyn_lmk_b_coords + + +def vertices2landmarks(vertices, faces, lmk_faces_idx, lmk_bary_coords): + ''' Calculates landmarks by barycentric interpolation + + Parameters + ---------- + vertices: torch.tensor BxVx3, dtype = torch.float32 + The tensor of input vertices + faces: torch.tensor Fx3, dtype = torch.long + The faces of the mesh + lmk_faces_idx: torch.tensor L, dtype = torch.long + The tensor with the indices of the faces used to calculate the + landmarks. + lmk_bary_coords: torch.tensor Lx3, dtype = torch.float32 + The tensor of barycentric coordinates that are used to interpolate + the landmarks + + Returns + ------- + landmarks: torch.tensor BxLx3, dtype = torch.float32 + The coordinates of the landmarks for each mesh in the batch + ''' + # Extract the indices of the vertices for each face + # BxLx3 + batch_size, num_verts = vertices.shape[:2] + device = vertices.device + + lmk_faces = torch.index_select(faces, 0, lmk_faces_idx.view(-1)).view( + batch_size, -1, 3) + + lmk_faces += torch.arange( + batch_size, dtype=torch.long, device=device).view(-1, 1, 1) * num_verts + + lmk_vertices = vertices.view(-1, 3)[lmk_faces].view( + batch_size, -1, 3, 3) + + landmarks = torch.einsum('blfi,blf->bli', [lmk_vertices, lmk_bary_coords]) + return landmarks + + +def lbs(betas, pose, v_template, shapedirs, posedirs, J_regressor, parents, + lbs_weights, pose2rot=True, dtype=torch.float32): + ''' Performs Linear Blend Skinning with the given shape and pose parameters + + Parameters + ---------- + betas : torch.tensor BxNB + The tensor of shape parameters + pose : torch.tensor Bx(J + 1) * 3 + The pose parameters in axis-angle format + v_template torch.tensor BxVx3 + The template mesh that will be deformed + shapedirs : torch.tensor 1xNB + The tensor of PCA shape displacements + posedirs : torch.tensor Px(V * 3) + The pose PCA coefficients + J_regressor : torch.tensor JxV + The regressor array that is used to calculate the joints from + the position of the vertices + parents: torch.tensor J + The array that describes the kinematic tree for the model + lbs_weights: torch.tensor N x V x (J + 1) + The linear blend skinning weights that represent how much the + rotation matrix of each part affects each vertex + pose2rot: bool, optional + Flag on whether to convert the input pose tensor to rotation + matrices. The default value is True. If False, then the pose tensor + should already contain rotation matrices and have a size of + Bx(J + 1)x9 + dtype: torch.dtype, optional + + Returns + ------- + verts: torch.tensor BxVx3 + The vertices of the mesh after applying the shape and pose + displacements. + joints: torch.tensor BxJx3 + The joints of the model + ''' + + batch_size = max(betas.shape[0], pose.shape[0]) + device = betas.device + + # Add shape contribution + v_shaped = v_template + blend_shapes(betas, shapedirs) + + # Get the joints + # NxJx3 array + J = vertices2joints(J_regressor, v_shaped) + + # 3. Add pose blend shapes + # N x J x 3 x 3 + ident = torch.eye(3, dtype=dtype, device=device) + if pose2rot: + rot_mats = batch_rodrigues( + pose.view(-1, 3), dtype=dtype).view([batch_size, -1, 3, 3]) + + pose_feature = (rot_mats[:, 1:, :, :] - ident).view([batch_size, -1]) + # (N x P) x (P, V * 3) -> N x V x 3 + pose_offsets = torch.matmul(pose_feature, posedirs) \ + .view(batch_size, -1, 3) + else: + pose_feature = pose[:, 1:].view(batch_size, -1, 3, 3) - ident + rot_mats = pose.view(batch_size, -1, 3, 3) + + pose_offsets = torch.matmul(pose_feature.view(batch_size, -1), + posedirs).view(batch_size, -1, 3) + + v_posed = pose_offsets + v_shaped + # 4. Get the global joint location + J_transformed, A = batch_rigid_transform(rot_mats, J, parents, dtype=dtype) + + # 5. Do skinning: + # W is N x V x (J + 1) + W = lbs_weights.unsqueeze(dim=0).expand([batch_size, -1, -1]) + # (N x V x (J + 1)) x (N x (J + 1) x 16) + num_joints = J_regressor.shape[0] + T = torch.matmul(W, A.view(batch_size, num_joints, 16)) \ + .view(batch_size, -1, 4, 4) + + homogen_coord = torch.ones([batch_size, v_posed.shape[1], 1], + dtype=dtype, device=device) + v_posed_homo = torch.cat([v_posed, homogen_coord], dim=2) + v_homo = torch.matmul(T, torch.unsqueeze(v_posed_homo, dim=-1)) + + verts = v_homo[:, :, :3, 0] + + return verts, J_transformed + + +def vertices2joints(J_regressor, vertices): + ''' Calculates the 3D joint locations from the vertices + + Parameters + ---------- + J_regressor : torch.tensor JxV + The regressor array that is used to calculate the joints from the + position of the vertices + vertices : torch.tensor BxVx3 + The tensor of mesh vertices + + Returns + ------- + torch.tensor BxJx3 + The location of the joints + ''' + + return torch.einsum('bik,ji->bjk', [vertices, J_regressor]) + + +def blend_shapes(betas, shape_disps): + ''' Calculates the per vertex displacement due to the blend shapes + + + Parameters + ---------- + betas : torch.tensor Bx(num_betas) + Blend shape coefficients + shape_disps: torch.tensor Vx3x(num_betas) + Blend shapes + + Returns + ------- + torch.tensor BxVx3 + The per-vertex displacement due to shape deformation + ''' + + # Displacement[b, m, k] = sum_{l} betas[b, l] * shape_disps[m, k, l] + # i.e. Multiply each shape displacement by its corresponding beta and + # then sum them. + blend_shape = torch.einsum('bl,mkl->bmk', [betas, shape_disps]) + return blend_shape + + +def batch_rodrigues(rot_vecs, epsilon=1e-8, dtype=torch.float32): + ''' Calculates the rotation matrices for a batch of rotation vectors + Parameters + ---------- + rot_vecs: torch.tensor Nx3 + array of N axis-angle vectors + Returns + ------- + R: torch.tensor Nx3x3 + The rotation matrices for the given axis-angle parameters + ''' + + batch_size = rot_vecs.shape[0] + device = rot_vecs.device + + angle = torch.norm(rot_vecs + 1e-8, dim=1, keepdim=True) + rot_dir = rot_vecs / angle + + cos = torch.unsqueeze(torch.cos(angle), dim=1) + sin = torch.unsqueeze(torch.sin(angle), dim=1) + + # Bx1 arrays + rx, ry, rz = torch.split(rot_dir, 1, dim=1) + K = torch.zeros((batch_size, 3, 3), dtype=dtype, device=device) + + zeros = torch.zeros((batch_size, 1), dtype=dtype, device=device) + K = torch.cat([zeros, -rz, ry, rz, zeros, -rx, -ry, rx, zeros], dim=1) \ + .view((batch_size, 3, 3)) + + ident = torch.eye(3, dtype=dtype, device=device).unsqueeze(dim=0) + rot_mat = ident + sin * K + (1 - cos) * torch.bmm(K, K) + return rot_mat + + +def transform_mat(R, t): + ''' Creates a batch of transformation matrices + Args: + - R: Bx3x3 array of a batch of rotation matrices + - t: Bx3x1 array of a batch of translation vectors + Returns: + - T: Bx4x4 Transformation matrix + ''' + # No padding left or right, only add an extra row + return torch.cat([F.pad(R, [0, 0, 0, 1]), + F.pad(t, [0, 0, 0, 1], value=1)], dim=2) + + +def batch_rigid_transform(rot_mats, joints, parents, dtype=torch.float32): + """ + Applies a batch of rigid transformations to the joints + + Parameters + ---------- + rot_mats : torch.tensor BxNx3x3 + Tensor of rotation matrices + joints : torch.tensor BxNx3 + Locations of joints + parents : torch.tensor BxN + The kinematic tree of each object + dtype : torch.dtype, optional: + The data type of the created tensors, the default is torch.float32 + + Returns + ------- + posed_joints : torch.tensor BxNx3 + The locations of the joints after applying the pose rotations + rel_transforms : torch.tensor BxNx4x4 + The relative (with respect to the root joint) rigid transformations + for all the joints + """ + + joints = torch.unsqueeze(joints, dim=-1) + + rel_joints = joints.clone() + rel_joints[:, 1:] -= joints[:, parents[1:]] + + # transforms_mat = transform_mat( + # rot_mats.view(-1, 3, 3), + # rel_joints.view(-1, 3, 1)).view(-1, joints.shape[1], 4, 4) + transforms_mat = transform_mat( + rot_mats.view(-1, 3, 3), + rel_joints.reshape(-1, 3, 1)).reshape(-1, joints.shape[1], 4, 4) + + transform_chain = [transforms_mat[:, 0]] + for i in range(1, parents.shape[0]): + # Subtract the joint location at the rest pose + # No need for rotation, since it's identity when at rest + curr_res = torch.matmul(transform_chain[parents[i]], + transforms_mat[:, i]) + transform_chain.append(curr_res) + + transforms = torch.stack(transform_chain, dim=1) + + # The last column of the transformations contains the posed joints + posed_joints = transforms[:, :, :3, 3] + + # The last column of the transformations contains the posed joints + posed_joints = transforms[:, :, :3, 3] + + joints_homogen = F.pad(joints, [0, 0, 0, 1]) + + rel_transforms = transforms - F.pad( + torch.matmul(transforms, joints_homogen), [3, 0, 0, 0, 0, 0, 0, 0]) + + return posed_joints, rel_transforms \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/resnet.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/resnet.py new file mode 100755 index 0000000..039bab0 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/models/resnet.py @@ -0,0 +1,291 @@ +""" +Author: Soubhik Sanyal +Copyright (c) 2019, Soubhik Sanyal +All rights reserved. +Loads different resnet models +""" +''' + file: Resnet.py + date: 2018_05_02 + author: zhangxiong(1025679612@qq.com) + mark: copied from pytorch source code +''' + +import torch.nn as nn +import torch.nn.functional as F +import torch +from torch.nn.parameter import Parameter +import torch.optim as optim +import numpy as np +import math +import torchvision + +class ResNet(nn.Module): + def __init__(self, block, layers, num_classes=1000): + self.inplanes = 64 + super(ResNet, self).__init__() + self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, + bias=False) + self.bn1 = nn.BatchNorm2d(64) + self.relu = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.layer1 = self._make_layer(block, 64, layers[0]) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2) + self.layer3 = self._make_layer(block, 256, layers[2], stride=2) + self.layer4 = self._make_layer(block, 512, layers[3], stride=2) + self.avgpool = nn.AvgPool2d(7, stride=1) + # self.fc = nn.Linear(512 * block.expansion, num_classes) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(0, math.sqrt(2. / n)) + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + + def _make_layer(self, block, planes, blocks, stride=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.inplanes, planes * block.expansion, + kernel_size=1, stride=stride, bias=False), + nn.BatchNorm2d(planes * block.expansion), + ) + + layers = [] + layers.append(block(self.inplanes, planes, stride, downsample)) + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes)) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x1 = self.layer4(x) + + x2 = self.avgpool(x1) + x2 = x2.view(x2.size(0), -1) + # x = self.fc(x) + ## x2: [bz, 2048] for shape + ## x1: [bz, 2048, 7, 7] for texture + return x2 + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(Bottleneck, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + self.bn1 = nn.BatchNorm2d(planes) + self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, + padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(planes) + self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False) + self.bn3 = nn.BatchNorm2d(planes * 4) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + +def conv3x3(in_planes, out_planes, stride=1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, + padding=1, bias=False) + +class BasicBlock(nn.Module): + expansion = 1 + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(BasicBlock, self).__init__() + self.conv1 = conv3x3(inplanes, planes, stride) + self.bn1 = nn.BatchNorm2d(planes) + self.relu = nn.ReLU(inplace=True) + self.conv2 = conv3x3(planes, planes) + self.bn2 = nn.BatchNorm2d(planes) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + +def copy_parameter_from_resnet(model, resnet_dict): + cur_state_dict = model.state_dict() + # import ipdb; ipdb.set_trace() + for name, param in list(resnet_dict.items())[0:None]: + if name not in cur_state_dict: + print(name, ' not available in reconstructed resnet') + continue + if isinstance(param, Parameter): + param = param.data + try: + cur_state_dict[name].copy_(param) + except: + print(name, ' is inconsistent!') + continue + # print('copy resnet state dict finished!') + # import ipdb; ipdb.set_trace() + +def load_ResNet50Model(): + model = ResNet(Bottleneck, [3, 4, 6, 3]) + copy_parameter_from_resnet(model, torchvision.models.resnet50(pretrained = True).state_dict()) + return model + +def load_ResNet101Model(): + model = ResNet(Bottleneck, [3, 4, 23, 3]) + copy_parameter_from_resnet(model, torchvision.models.resnet101(pretrained = True).state_dict()) + return model + +def load_ResNet152Model(): + model = ResNet(Bottleneck, [3, 8, 36, 3]) + copy_parameter_from_resnet(model, torchvision.models.resnet152(pretrained = True).state_dict()) + return model + +# model.load_state_dict(checkpoint['model_state_dict']) + + +######## Unet + +class DoubleConv(nn.Module): + """(convolution => [BN] => ReLU) * 2""" + + def __init__(self, in_channels, out_channels): + super().__init__() + self.double_conv = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True), + nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True) + ) + + def forward(self, x): + return self.double_conv(x) + + +class Down(nn.Module): + """Downscaling with maxpool then double conv""" + + def __init__(self, in_channels, out_channels): + super().__init__() + self.maxpool_conv = nn.Sequential( + nn.MaxPool2d(2), + DoubleConv(in_channels, out_channels) + ) + + def forward(self, x): + return self.maxpool_conv(x) + + +class Up(nn.Module): + """Upscaling then double conv""" + + def __init__(self, in_channels, out_channels, bilinear=True): + super().__init__() + + # if bilinear, use the normal convolutions to reduce the number of channels + if bilinear: + self.up = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True) + else: + self.up = nn.ConvTranspose2d(in_channels // 2, in_channels // 2, kernel_size=2, stride=2) + + self.conv = DoubleConv(in_channels, out_channels) + + def forward(self, x1, x2): + x1 = self.up(x1) + # input is CHW + diffY = x2.size()[2] - x1.size()[2] + diffX = x2.size()[3] - x1.size()[3] + + x1 = F.pad(x1, [diffX // 2, diffX - diffX // 2, + diffY // 2, diffY - diffY // 2]) + # if you have padding issues, see + # https://github.com/HaiyongJiang/U-Net-Pytorch-Unstructured-Buggy/commit/0e854509c2cea854e247a9c615f175f76fbb2e3a + # https://github.com/xiaopeng-liao/Pytorch-UNet/commit/8ebac70e633bac59fc22bb5195e513d5832fb3bd + x = torch.cat([x2, x1], dim=1) + return self.conv(x) + + +class OutConv(nn.Module): + def __init__(self, in_channels, out_channels): + super(OutConv, self).__init__() + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1) + + def forward(self, x): + return self.conv(x) + +class UNet(nn.Module): + def __init__(self, n_channels, n_classes, bilinear=True): + super(UNet, self).__init__() + self.n_channels = n_channels + self.n_classes = n_classes + self.bilinear = bilinear + + self.inc = DoubleConv(n_channels, 64) + self.down1 = Down(64, 128) + self.down2 = Down(128, 256) + self.down3 = Down(256, 512) + self.down4 = Down(512, 512) + self.up1 = Up(1024, 256, bilinear) + self.up2 = Up(512, 128, bilinear) + self.up3 = Up(256, 64, bilinear) + self.up4 = Up(128, 64, bilinear) + self.outc = OutConv(64, n_classes) + + def forward(self, x): + x1 = self.inc(x) + x2 = self.down1(x1) + x3 = self.down2(x2) + x4 = self.down3(x3) + x5 = self.down4(x4) + x = self.up1(x5, x4) + x = self.up2(x, x3) + x = self.up3(x, x2) + x = self.up4(x, x1) + x = F.normalize(x) + return x \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/__init__.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/config.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/config.py new file mode 100644 index 0000000..e305b21 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/config.py @@ -0,0 +1,80 @@ +''' +Default config for DECA +''' +from yacs.config import CfgNode as CN +import argparse +import yaml +import os +import torch + +cfg = CN() + +abs_deca_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) +cfg.deca_dir = abs_deca_dir +cfg.device = "cuda" if torch.cuda.is_available() else "cpu" +cfg.device_id = '0' + +cfg.pretrained_modelpath = os.path.join(cfg.deca_dir, 'data', 'deca_model.tar') + +# ---------------------------------------------------------------------------- # +# Options for Face model +# ---------------------------------------------------------------------------- # +cfg.model = CN() +cfg.model.topology_path = os.path.join(cfg.deca_dir, 'data', 'head_template.obj') +# texture data original from http://files.is.tue.mpg.de/tbolkart/FLAME/FLAME_texture_data.zip +cfg.model.dense_template_path = os.path.join(cfg.deca_dir, 'data', 'texture_data_256.npy') +cfg.model.fixed_displacement_path = os.path.join(cfg.deca_dir, 'data', 'fixed_displacement_256.npy') +cfg.model.flame_model_path = os.path.join(cfg.deca_dir, 'data', 'generic_model.pkl') +cfg.model.flame_lmk_embedding_path = os.path.join(cfg.deca_dir, 'data', 'landmark_embedding.npy') +cfg.model.face_mask_path = os.path.join(cfg.deca_dir, 'data', 'uv_face_mask.png') +cfg.model.face_eye_mask_path = os.path.join(cfg.deca_dir, 'data', 'uv_face_eye_mask.png') +cfg.model.mean_tex_path = os.path.join(cfg.deca_dir, 'data', 'mean_texture.jpg') +cfg.model.tex_path = os.path.join(cfg.deca_dir, 'data', 'FLAME_albedo_from_BFM.npz') +cfg.model.tex_type = 'BFM' # BFM, FLAME, albedoMM +cfg.model.uv_size = 256 +cfg.model.param_list = ['shape', 'tex', 'exp', 'pose', 'cam', 'light'] +cfg.model.n_shape = 100 +cfg.model.n_tex = 50 +cfg.model.n_exp = 50 +cfg.model.n_cam = 3 +cfg.model.n_pose = 6 +cfg.model.n_light = 27 +cfg.model.use_tex = False +cfg.model.jaw_type = 'aa' # default use axis angle, another option: euler + +## details +cfg.model.n_detail = 128 +cfg.model.max_z = 0.01 + +# ---------------------------------------------------------------------------- # +# Options for Dataset +# ---------------------------------------------------------------------------- # +cfg.dataset = CN() +cfg.dataset.batch_size = 24 +cfg.dataset.num_workers = 2 +cfg.dataset.image_size = 224 + +def get_cfg_defaults(): + """Get a yacs CfgNode object with default values for my_project.""" + # Return a clone so that the defaults will not be altered + # This is for the "local variable" use pattern + return cfg.clone() + +def update_cfg(cfg, cfg_file): + cfg.merge_from_file(cfg_file) + return cfg.clone() + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--cfg', type=str, help='cfg file path') + + args = parser.parse_args() + print(args, end='\n\n') + + cfg = get_cfg_defaults() + if args.cfg is not None: + cfg_file = args.cfg + cfg = update_cfg(cfg, args.cfg) + cfg.cfg_file = cfg_file + + return cfg diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/renderer.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/renderer.py new file mode 100755 index 0000000..7da0b83 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/renderer.py @@ -0,0 +1,338 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from pytorch3d.structures import Meshes +from pytorch3d.io import load_obj +from pytorch3d.renderer.mesh import rasterize_meshes +from . import util +# from .rasterizer.standard_rasterize_cuda import standard_rasterize + +class Pytorch3dRasterizer(nn.Module): + """ Borrowed from https://github.com/facebookresearch/pytorch3d + Notice: + x,y,z are in image space, normalized + can only render squared image now + """ + + def __init__(self, image_size=224): + """ + use fixed raster_settings for rendering faces + """ + super().__init__() + raster_settings = { + 'image_size': image_size, + 'blur_radius': 0.0, + 'faces_per_pixel': 1, + 'bin_size': None, + 'max_faces_per_bin': None, + 'perspective_correct': False, + } + raster_settings = util.dict2obj(raster_settings) + self.raster_settings = raster_settings + + def forward(self, vertices, faces, attributes=None): + fixed_vertices = vertices.clone() + fixed_vertices[...,:2] = -fixed_vertices[...,:2] + meshes_screen = Meshes(verts=fixed_vertices.float(), faces=faces.long()) + raster_settings = self.raster_settings + pix_to_face, zbuf, bary_coords, dists = rasterize_meshes( + meshes_screen, + image_size=raster_settings.image_size, + blur_radius=raster_settings.blur_radius, + faces_per_pixel=raster_settings.faces_per_pixel, + bin_size=raster_settings.bin_size, + max_faces_per_bin=raster_settings.max_faces_per_bin, + perspective_correct=raster_settings.perspective_correct, + ) + vismask = (pix_to_face > -1).float() + D = attributes.shape[-1] + attributes = attributes.clone(); attributes = attributes.view(attributes.shape[0]*attributes.shape[1], 3, attributes.shape[-1]) + N, H, W, K, _ = bary_coords.shape + mask = pix_to_face == -1 + pix_to_face = pix_to_face.clone() + pix_to_face[mask] = 0 + idx = pix_to_face.view(N * H * W * K, 1, 1).expand(N * H * W * K, 3, D) + pixel_face_vals = attributes.gather(0, idx).view(N, H, W, K, 3, D) + pixel_vals = (bary_coords[..., None] * pixel_face_vals).sum(dim=-2) + pixel_vals[mask] = 0 # Replace masked values in output. + pixel_vals = pixel_vals[:,:,:,0].permute(0,3,1,2) + pixel_vals = torch.cat([pixel_vals, vismask[:,:,:,0][:,None,:,:]], dim=1) + return pixel_vals + +class SRenderY(nn.Module): + def __init__(self, image_size, obj_filename, uv_size=256, rasterizer_type='pytorch3d'): + super(SRenderY, self).__init__() + self.image_size = image_size + self.uv_size = uv_size + + verts, faces, aux = load_obj(obj_filename) + uvcoords = aux.verts_uvs[None, ...] # (N, V, 2) + uvfaces = faces.textures_idx[None, ...] # (N, F, 3) + faces = faces.verts_idx[None,...] + + if rasterizer_type == 'pytorch3d': + self.rasterizer = Pytorch3dRasterizer(image_size) + self.uv_rasterizer = Pytorch3dRasterizer(uv_size) + + # faces + dense_triangles = util.generate_triangles(uv_size, uv_size) + self.register_buffer('dense_faces', torch.from_numpy(dense_triangles).long()[None,:,:]) + self.register_buffer('faces', faces) + self.register_buffer('raw_uvcoords', uvcoords) + + # uv coords + uvcoords = torch.cat([uvcoords, uvcoords[:,:,0:1]*0.+1.], -1) #[bz, ntv, 3] + uvcoords = uvcoords*2 - 1; uvcoords[...,1] = -uvcoords[...,1] + face_uvcoords = util.face_vertices(uvcoords, uvfaces) + self.register_buffer('uvcoords', uvcoords) + self.register_buffer('uvfaces', uvfaces) + self.register_buffer('face_uvcoords', face_uvcoords) + + # shape colors, for rendering shape overlay + colors = torch.tensor([180, 180, 180])[None, None, :].repeat(1, faces.max()+1, 1).float()/255. + face_colors = util.face_vertices(colors, faces) + self.register_buffer('face_colors', face_colors) + + ## SH factors for lighting + pi = np.pi + constant_factor = torch.tensor([1/np.sqrt(4*pi), ((2*pi)/3)*(np.sqrt(3/(4*pi))), ((2*pi)/3)*(np.sqrt(3/(4*pi))),\ + ((2*pi)/3)*(np.sqrt(3/(4*pi))), (pi/4)*(3)*(np.sqrt(5/(12*pi))), (pi/4)*(3)*(np.sqrt(5/(12*pi))),\ + (pi/4)*(3)*(np.sqrt(5/(12*pi))), (pi/4)*(3/2)*(np.sqrt(5/(12*pi))), (pi/4)*(1/2)*(np.sqrt(5/(4*pi)))]).float() + self.register_buffer('constant_factor', constant_factor) + + def forward(self, vertices, transformed_vertices, albedos, lights=None, light_type='point'): + ''' + -- Texture Rendering + vertices: [batch_size, V, 3], vertices in world space, for calculating normals, then shading + transformed_vertices: [batch_size, V, 3], range:normalized to [-1,1], projected vertices in image space (that is aligned to the iamge pixel), for rasterization + albedos: [batch_size, 3, h, w], uv map + lights: + spherical homarnic: [N, 9(shcoeff), 3(rgb)] + points/directional lighting: [N, n_lights, 6(xyzrgb)] + light_type: + point or directional + ''' + batch_size = vertices.shape[0] + ## rasterizer near 0 far 100. move mesh so minz larger than 0 + transformed_vertices[:,:,2] = transformed_vertices[:,:,2] + 10 + # attributes + face_vertices = util.face_vertices(vertices, self.faces.expand(batch_size, -1, -1)) + normals = util.vertex_normals(vertices, self.faces.expand(batch_size, -1, -1)); face_normals = util.face_vertices(normals, self.faces.expand(batch_size, -1, -1)) + transformed_normals = util.vertex_normals(transformed_vertices, self.faces.expand(batch_size, -1, -1)); transformed_face_normals = util.face_vertices(transformed_normals, self.faces.expand(batch_size, -1, -1)) + + attributes = torch.cat([self.face_uvcoords.expand(batch_size, -1, -1, -1), + transformed_face_normals.detach(), + face_vertices.detach(), + face_normals], + -1) + # rasterize + rendering = self.rasterizer(transformed_vertices, self.faces.expand(batch_size, -1, -1), attributes) + + #### + # vis mask + alpha_images = rendering[:, -1, :, :][:, None, :, :].detach() + + # albedo + #pdb.set_trace() + uvcoords_images = rendering[:, :3, :, :]; grid = (uvcoords_images).permute(0, 2, 3, 1)[:, :, :, :2] + albedo_images = F.grid_sample(albedos, grid, align_corners=False) + + # visible mask for pixels with positive normal direction + transformed_normal_map = rendering[:, 3:6, :, :].detach() + pos_mask = (transformed_normal_map[:, 2:, :, :] < -0.05).float() + + # shading + normal_images = rendering[:, 9:12, :, :] + if lights is not None: + if lights.shape[1] == 9: + shading_images = self.add_SHlight(normal_images, lights) + else: + if light_type=='point': + vertice_images = rendering[:, 6:9, :, :].detach() + shading = self.add_pointlight(vertice_images.permute(0,2,3,1).reshape([batch_size, -1, 3]), normal_images.permute(0,2,3,1).reshape([batch_size, -1, 3]), lights) + shading_images = shading.reshape([batch_size, albedo_images.shape[2], albedo_images.shape[3], 3]).permute(0,3,1,2) + else: + shading = self.add_directionlight(normal_images.permute(0,2,3,1).reshape([batch_size, -1, 3]), lights) + shading_images = shading.reshape([batch_size, albedo_images.shape[2], albedo_images.shape[3], 3]).permute(0,3,1,2) + images = albedo_images*shading_images + else: + images = albedo_images + shading_images = images.detach()*0. + + outputs = { + 'images': images*alpha_images, + 'albedo_images': albedo_images*alpha_images, + 'alpha_images': alpha_images, + 'pos_mask': pos_mask, + 'shading_images': shading_images, + 'grid': grid, + 'normals': normals, + 'normal_images': normal_images*alpha_images, + 'transformed_normals': transformed_normals, + } + + return outputs + + def add_SHlight(self, normal_images, sh_coeff): + ''' + sh_coeff: [bz, 9, 3] + ''' + N = normal_images + sh = torch.stack([ + N[:,0]*0.+1., N[:,0], N[:,1], \ + N[:,2], N[:,0]*N[:,1], N[:,0]*N[:,2], + N[:,1]*N[:,2], N[:,0]**2 - N[:,1]**2, 3*(N[:,2]**2) - 1 + ], + 1) # [bz, 9, h, w] + sh = sh*self.constant_factor[None,:,None,None] + shading = torch.sum(sh_coeff[:,:,:,None,None]*sh[:,:,None,:,:], 1) # [bz, 9, 3, h, w] + return shading + + def add_pointlight(self, vertices, normals, lights): + ''' + vertices: [bz, nv, 3] + lights: [bz, nlight, 6] + returns: + shading: [bz, nv, 3] + ''' + light_positions = lights[:,:,:3]; light_intensities = lights[:,:,3:] + directions_to_lights = F.normalize(light_positions[:,:,None,:] - vertices[:,None,:,:], dim=3) + # normals_dot_lights = torch.clamp((normals[:,None,:,:]*directions_to_lights).sum(dim=3), 0., 1.) + normals_dot_lights = (normals[:,None,:,:]*directions_to_lights).sum(dim=3) + shading = normals_dot_lights[:,:,:,None]*light_intensities[:,:,None,:] + return shading.mean(1) + + def add_directionlight(self, normals, lights): + ''' + normals: [bz, nv, 3] + lights: [bz, nlight, 6] + returns: + shading: [bz, nv, 3] + ''' + light_direction = lights[:,:,:3]; light_intensities = lights[:,:,3:] + directions_to_lights = F.normalize(light_direction[:,:,None,:].expand(-1,-1,normals.shape[1],-1), dim=3) + # normals_dot_lights = torch.clamp((normals[:,None,:,:]*directions_to_lights).sum(dim=3), 0., 1.) + # normals_dot_lights = (normals[:,None,:,:]*directions_to_lights).sum(dim=3) + normals_dot_lights = torch.clamp((normals[:,None,:,:]*directions_to_lights).sum(dim=3), 0., 1.) + shading = normals_dot_lights[:,:,:,None]*light_intensities[:,:,None,:] + return shading.mean(1) + + def render_shape(self, vertices, transformed_vertices, images=None, detail_normal_images=None, lights=None): + ''' + -- rendering shape with detail normal map + ''' + batch_size = vertices.shape[0] + # set lighting + if lights is None: + light_positions = torch.tensor( + [ + [-1,1,1], + [1,1,1], + [-1,-1,1], + [1,-1,1], + [0,0,1] + ] + )[None,:,:].expand(batch_size, -1, -1).float() + light_intensities = torch.ones_like(light_positions).float()*1.7 + lights = torch.cat((light_positions, light_intensities), 2).to(vertices.device) + transformed_vertices[:,:,2] = transformed_vertices[:,:,2] + 10 + + # Attributes + face_vertices = util.face_vertices(vertices, self.faces.expand(batch_size, -1, -1)) + normals = util.vertex_normals(vertices, self.faces.expand(batch_size, -1, -1)); face_normals = util.face_vertices(normals, self.faces.expand(batch_size, -1, -1)) + transformed_normals = util.vertex_normals(transformed_vertices, self.faces.expand(batch_size, -1, -1)); transformed_face_normals = util.face_vertices(transformed_normals, self.faces.expand(batch_size, -1, -1)) + attributes = torch.cat([self.face_colors.expand(batch_size, -1, -1, -1), + transformed_face_normals.detach(), + face_vertices.detach(), + face_normals], + -1) + # rasterize + rendering = self.rasterizer(transformed_vertices, self.faces.expand(batch_size, -1, -1), attributes) + + #### + alpha_images = rendering[:, -1, :, :][:, None, :, :].detach() + + # albedo + albedo_images = rendering[:, :3, :, :] + # mask + transformed_normal_map = rendering[:, 3:6, :, :].detach() + pos_mask = (transformed_normal_map[:, 2:, :, :] < 0.15).float() + + # shading + normal_images = rendering[:, 9:12, :, :].detach() + vertice_images = rendering[:, 6:9, :, :].detach() + if detail_normal_images is not None: + normal_images = detail_normal_images + + shading = self.add_directionlight(normal_images.permute(0,2,3,1).reshape([batch_size, -1, 3]), lights) + shading_images = shading.reshape([batch_size, albedo_images.shape[2], albedo_images.shape[3], 3]).permute(0,3,1,2).contiguous() + shaded_images = albedo_images*shading_images + + alpha_images = alpha_images*pos_mask + if images is None: + shape_images = shaded_images*alpha_images + torch.zeros_like(shaded_images).to(vertices.device)*(1-alpha_images) + else: + shape_images = shaded_images*alpha_images + images*(1-alpha_images) + return shape_images + + def render_depth(self, transformed_vertices): + ''' + -- rendering depth + ''' + batch_size = transformed_vertices.shape[0] + + transformed_vertices[:,:,2] = transformed_vertices[:,:,2] - transformed_vertices[:,:,2].min() + z = -transformed_vertices[:,:,2:].repeat(1,1,3).clone() + z = z-z.min() + z = z/z.max() + # Attributes + attributes = util.face_vertices(z, self.faces.expand(batch_size, -1, -1)) + # rasterize + transformed_vertices[:,:,2] = transformed_vertices[:,:,2] + 10 + rendering = self.rasterizer(transformed_vertices, self.faces.expand(batch_size, -1, -1), attributes) + + #### + alpha_images = rendering[:, -1, :, :][:, None, :, :].detach() + depth_images = rendering[:, :1, :, :] + return depth_images + + def render_normal(self, transformed_vertices, normals): + ''' + -- rendering normal + ''' + batch_size = normals.shape[0] + + # Attributes + attributes = util.face_vertices(normals, self.faces.expand(batch_size, -1, -1)) + # rasterize + rendering = self.rasterizer(transformed_vertices, self.faces.expand(batch_size, -1, -1), attributes) + #### + alpha_images = rendering[:, -1, :, :][:, None, :, :].detach() + normal_images = rendering[:, :3, :, :] + return normal_images + + def world2uv(self, vertices): + ''' + warp vertices from world space to uv space + vertices: [bz, V, 3] + uv_vertices: [bz, 3, h, w] + ''' + batch_size = vertices.shape[0] + face_vertices = util.face_vertices(vertices, self.faces.expand(batch_size, -1, -1)) + uv_vertices = self.uv_rasterizer(self.uvcoords.expand(batch_size, -1, -1), self.uvfaces.expand(batch_size, -1, -1), face_vertices)[:, :3] + return uv_vertices diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/rotation_converter.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/rotation_converter.py new file mode 100644 index 0000000..89756d9 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/rotation_converter.py @@ -0,0 +1,381 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import torch +import pdb + +''' Rotation Converter +Representations: + euler angle(3), angle axis(3), rotation matrix(3x3), quaternion(4), continous repre +Ref: + https://kornia.readthedocs.io/en/v0.1.2/_modules/torchgeometry/core/conversions.html# + smplx/lbs +''' + +pi = torch.Tensor([3.14159265358979323846]) +def rad2deg(tensor): + """Function that converts angles from radians to degrees. + + See :class:`~torchgeometry.RadToDeg` for details. + + Args: + tensor (Tensor): Tensor of arbitrary shape. + + Returns: + Tensor: Tensor with same shape as input. + + Example: + >>> input = tgm.pi * torch.rand(1, 3, 3) + >>> output = tgm.rad2deg(input) + """ + if not torch.is_tensor(tensor): + raise TypeError("Input type is not a torch.Tensor. Got {}" + .format(type(tensor))) + + return 180. * tensor / pi.to(tensor.device).type(tensor.dtype) + +def deg2rad(tensor): + """Function that converts angles from degrees to radians. + + See :class:`~torchgeometry.DegToRad` for details. + + Args: + tensor (Tensor): Tensor of arbitrary shape. + + Returns: + Tensor: Tensor with same shape as input. + + Examples:: + + >>> input = 360. * torch.rand(1, 3, 3) + >>> output = tgm.deg2rad(input) + """ + if not torch.is_tensor(tensor): + raise TypeError("Input type is not a torch.Tensor. Got {}" + .format(type(tensor))) + + return tensor * pi.to(tensor.device).type(tensor.dtype) / 180. + +######### to quaternion +def euler_to_quaternion(r): + x = r[..., 0] + y = r[..., 1] + z = r[..., 2] + + z = z/2.0 + y = y/2.0 + x = x/2.0 + cz = torch.cos(z) + sz = torch.sin(z) + cy = torch.cos(y) + sy = torch.sin(y) + cx = torch.cos(x) + sx = torch.sin(x) + quaternion = torch.zeros_like(r.repeat(1,2))[..., :4].to(r.device) + quaternion[..., 0] += cx*cy*cz - sx*sy*sz + quaternion[..., 1] += cx*sy*sz + cy*cz*sx + quaternion[..., 2] += cx*cz*sy - sx*cy*sz + quaternion[..., 3] += cx*cy*sz + sx*cz*sy + return quaternion + +def rotation_matrix_to_quaternion(rotation_matrix, eps=1e-6): + """Convert 3x4 rotation matrix to 4d quaternion vector + + This algorithm is based on algorithm described in + https://github.com/KieranWynn/pyquaternion/blob/master/pyquaternion/quaternion.py#L201 + + Args: + rotation_matrix (Tensor): the rotation matrix to convert. + + Return: + Tensor: the rotation in quaternion + + Shape: + - Input: :math:`(N, 3, 4)` + - Output: :math:`(N, 4)` + + Example: + >>> input = torch.rand(4, 3, 4) # Nx3x4 + >>> output = tgm.rotation_matrix_to_quaternion(input) # Nx4 + """ + if not torch.is_tensor(rotation_matrix): + raise TypeError("Input type is not a torch.Tensor. Got {}".format( + type(rotation_matrix))) + + if len(rotation_matrix.shape) > 3: + raise ValueError( + "Input size must be a three dimensional tensor. Got {}".format( + rotation_matrix.shape)) + # if not rotation_matrix.shape[-2:] == (3, 4): + # raise ValueError( + # "Input size must be a N x 3 x 4 tensor. Got {}".format( + # rotation_matrix.shape)) + + rmat_t = torch.transpose(rotation_matrix, 1, 2) + + mask_d2 = rmat_t[:, 2, 2] < eps + + mask_d0_d1 = rmat_t[:, 0, 0] > rmat_t[:, 1, 1] + mask_d0_nd1 = rmat_t[:, 0, 0] < -rmat_t[:, 1, 1] + + t0 = 1 + rmat_t[:, 0, 0] - rmat_t[:, 1, 1] - rmat_t[:, 2, 2] + q0 = torch.stack([rmat_t[:, 1, 2] - rmat_t[:, 2, 1], + t0, rmat_t[:, 0, 1] + rmat_t[:, 1, 0], + rmat_t[:, 2, 0] + rmat_t[:, 0, 2]], -1) + t0_rep = t0.repeat(4, 1).t() + + t1 = 1 - rmat_t[:, 0, 0] + rmat_t[:, 1, 1] - rmat_t[:, 2, 2] + q1 = torch.stack([rmat_t[:, 2, 0] - rmat_t[:, 0, 2], + rmat_t[:, 0, 1] + rmat_t[:, 1, 0], + t1, rmat_t[:, 1, 2] + rmat_t[:, 2, 1]], -1) + t1_rep = t1.repeat(4, 1).t() + + t2 = 1 - rmat_t[:, 0, 0] - rmat_t[:, 1, 1] + rmat_t[:, 2, 2] + q2 = torch.stack([rmat_t[:, 0, 1] - rmat_t[:, 1, 0], + rmat_t[:, 2, 0] + rmat_t[:, 0, 2], + rmat_t[:, 1, 2] + rmat_t[:, 2, 1], t2], -1) + t2_rep = t2.repeat(4, 1).t() + + t3 = 1 + rmat_t[:, 0, 0] + rmat_t[:, 1, 1] + rmat_t[:, 2, 2] + q3 = torch.stack([t3, rmat_t[:, 1, 2] - rmat_t[:, 2, 1], + rmat_t[:, 2, 0] - rmat_t[:, 0, 2], + rmat_t[:, 0, 1] - rmat_t[:, 1, 0]], -1) + t3_rep = t3.repeat(4, 1).t() + + mask_c0 = mask_d2 * mask_d0_d1.float() + mask_c1 = mask_d2 * (1 - mask_d0_d1.float()) + mask_c2 = (1 - mask_d2.float()) * mask_d0_nd1 + mask_c3 = (1 - mask_d2.float()) * (1 - mask_d0_nd1.float()) + mask_c0 = mask_c0.view(-1, 1).type_as(q0) + mask_c1 = mask_c1.view(-1, 1).type_as(q1) + mask_c2 = mask_c2.view(-1, 1).type_as(q2) + mask_c3 = mask_c3.view(-1, 1).type_as(q3) + + q = q0 * mask_c0 + q1 * mask_c1 + q2 * mask_c2 + q3 * mask_c3 + q /= torch.sqrt(t0_rep * mask_c0 + t1_rep * mask_c1 + # noqa + t2_rep * mask_c2 + t3_rep * mask_c3) # noqa + q *= 0.5 + return q + +# def angle_axis_to_quaternion(theta): +# batch_size = theta.shape[0] +# l1norm = torch.norm(theta + 1e-8, p=2, dim=1) +# angle = torch.unsqueeze(l1norm, -1) +# normalized = torch.div(theta, angle) +# angle = angle * 0.5 +# v_cos = torch.cos(angle) +# v_sin = torch.sin(angle) +# quat = torch.cat([v_cos, v_sin * normalized], dim=1) +# return quat + +def angle_axis_to_quaternion(angle_axis: torch.Tensor) -> torch.Tensor: + """Convert an angle axis to a quaternion. + + Adapted from ceres C++ library: ceres-solver/include/ceres/rotation.h + + Args: + angle_axis (torch.Tensor): tensor with angle axis. + + Return: + torch.Tensor: tensor with quaternion. + + Shape: + - Input: :math:`(*, 3)` where `*` means, any number of dimensions + - Output: :math:`(*, 4)` + + Example: + >>> angle_axis = torch.rand(2, 4) # Nx4 + >>> quaternion = tgm.angle_axis_to_quaternion(angle_axis) # Nx3 + """ + if not torch.is_tensor(angle_axis): + raise TypeError("Input type is not a torch.Tensor. Got {}".format( + type(angle_axis))) + + if not angle_axis.shape[-1] == 3: + raise ValueError("Input must be a tensor of shape Nx3 or 3. Got {}" + .format(angle_axis.shape)) + # unpack input and compute conversion + a0: torch.Tensor = angle_axis[..., 0:1] + a1: torch.Tensor = angle_axis[..., 1:2] + a2: torch.Tensor = angle_axis[..., 2:3] + theta_squared: torch.Tensor = a0 * a0 + a1 * a1 + a2 * a2 + + theta: torch.Tensor = torch.sqrt(theta_squared) + half_theta: torch.Tensor = theta * 0.5 + + mask: torch.Tensor = theta_squared > 0.0 + ones: torch.Tensor = torch.ones_like(half_theta) + + k_neg: torch.Tensor = 0.5 * ones + k_pos: torch.Tensor = torch.sin(half_theta) / theta + k: torch.Tensor = torch.where(mask, k_pos, k_neg) + w: torch.Tensor = torch.where(mask, torch.cos(half_theta), ones) + + quaternion: torch.Tensor = torch.zeros_like(angle_axis) + quaternion[..., 0:1] += a0 * k + quaternion[..., 1:2] += a1 * k + quaternion[..., 2:3] += a2 * k + return torch.cat([w, quaternion], dim=-1) + +#### quaternion to +def quaternion_to_rotation_matrix(quat): + """Convert quaternion coefficients to rotation matrix. + Args: + quat: size = [B, 4] 4 <===>(w, x, y, z) + Returns: + Rotation matrix corresponding to the quaternion -- size = [B, 3, 3] + """ + norm_quat = quat + norm_quat = norm_quat / norm_quat.norm(p=2, dim=1, keepdim=True) + w, x, y, z = norm_quat[:, 0], norm_quat[:, 1], norm_quat[:, 2], norm_quat[:, 3] + + B = quat.size(0) + + w2, x2, y2, z2 = w.pow(2), x.pow(2), y.pow(2), z.pow(2) + wx, wy, wz = w * x, w * y, w * z + xy, xz, yz = x * y, x * z, y * z + + rotMat = torch.stack([w2 + x2 - y2 - z2, 2 * xy - 2 * wz, 2 * wy + 2 * xz, + 2 * wz + 2 * xy, w2 - x2 + y2 - z2, 2 * yz - 2 * wx, + 2 * xz - 2 * wy, 2 * wx + 2 * yz, w2 - x2 - y2 + z2], dim=1).view(B, 3, 3) + return rotMat + +def quaternion_to_angle_axis(quaternion: torch.Tensor): + """Convert quaternion vector to angle axis of rotation. TODO: CORRECT + + Adapted from ceres C++ library: ceres-solver/include/ceres/rotation.h + + Args: + quaternion (torch.Tensor): tensor with quaternions. + + Return: + torch.Tensor: tensor with angle axis of rotation. + + Shape: + - Input: :math:`(*, 4)` where `*` means, any number of dimensions + - Output: :math:`(*, 3)` + + Example: + >>> quaternion = torch.rand(2, 4) # Nx4 + >>> angle_axis = tgm.quaternion_to_angle_axis(quaternion) # Nx3 + """ + if not torch.is_tensor(quaternion): + raise TypeError("Input type is not a torch.Tensor. Got {}".format( + type(quaternion))) + + if not quaternion.shape[-1] == 4: + raise ValueError("Input must be a tensor of shape Nx4 or 4. Got {}" + .format(quaternion.shape)) + # unpack input and compute conversion + q1: torch.Tensor = quaternion[..., 1] + q2: torch.Tensor = quaternion[..., 2] + q3: torch.Tensor = quaternion[..., 3] + sin_squared_theta: torch.Tensor = q1 * q1 + q2 * q2 + q3 * q3 + + sin_theta: torch.Tensor = torch.sqrt(sin_squared_theta) + cos_theta: torch.Tensor = quaternion[..., 0] + two_theta: torch.Tensor = 2.0 * torch.where( + cos_theta < 0.0, + torch.atan2(-sin_theta, -cos_theta), + torch.atan2(sin_theta, cos_theta)) + + k_pos: torch.Tensor = two_theta / sin_theta + k_neg: torch.Tensor = 2.0 * torch.ones_like(sin_theta).to(quaternion.device) + k: torch.Tensor = torch.where(sin_squared_theta > 0.0, k_pos, k_neg) + + angle_axis: torch.Tensor = torch.zeros_like(quaternion).to(quaternion.device)[..., :3] + angle_axis[..., 0] += q1 * k + angle_axis[..., 1] += q2 * k + angle_axis[..., 2] += q3 * k + return angle_axis + +#### batch converter +def batch_euler2axis(r): + return quaternion_to_angle_axis(euler_to_quaternion(r)) + +def batch_euler2matrix(r): + return quaternion_to_rotation_matrix(euler_to_quaternion(r)) + +def batch_matrix2euler(rot_mats): + # Calculates rotation matrix to euler angles + # Careful for extreme cases of eular angles like [0.0, pi, 0.0] + ### only y? + # TODO: + sy = torch.sqrt(rot_mats[:, 0, 0] * rot_mats[:, 0, 0] + + rot_mats[:, 1, 0] * rot_mats[:, 1, 0]) + return torch.atan2(-rot_mats[:, 2, 0], sy) + +def batch_matrix2axis(rot_mats): + return quaternion_to_angle_axis(rotation_matrix_to_quaternion(rot_mats)) + +def batch_axis2matrix(theta): + + # angle axis to rotation matrix + # theta N x 3 + # return quat2mat(quat) + # batch_rodrigues + return quaternion_to_rotation_matrix(angle_axis_to_quaternion(theta)) + +def batch_axis2euler(theta): + return batch_matrix2euler(batch_axis2matrix(theta)) + +def batch_axis2euler(r): + return rot_mat_to_euler(batch_rodrigues(r)) + + +def batch_orth_proj(X, camera): + ''' + X is N x num_pquaternion_to_angle_axisoints x 3 + ''' + pdb.set_trace() + camera = camera.clone().view(-1, 1, 3) + X_trans = X[:, :, :2] + camera[:, :, 1:] + X_trans = torch.cat([X_trans, X[:,:,2:]], 2) + Xn = (camera[:, :, 0:1] * X_trans) + return Xn + +def batch_rodrigues(rot_vecs, epsilon=1e-8, dtype=torch.float32): + ''' same as batch_matrix2axis + Calculates the rotation matrices for a batch of rotation vectors + Parameters + ---------- + rot_vecs: torch.tensor Nx3 + array of N axis-angle vectors + Returns + ------- + R: torch.tensor Nx3x3 + The rotation matrices for the given axis-angle parameters + ''' + + batch_size = rot_vecs.shape[0] + device = rot_vecs.device + + angle = torch.norm(rot_vecs + 1e-8, dim=1, keepdim=True) + rot_dir = rot_vecs / angle + + cos = torch.unsqueeze(torch.cos(angle), dim=1) + sin = torch.unsqueeze(torch.sin(angle), dim=1) + + # Bx1 arrays + rx, ry, rz = torch.split(rot_dir, 1, dim=1) + K = torch.zeros((batch_size, 3, 3), dtype=dtype, device=device) + + zeros = torch.zeros((batch_size, 1), dtype=dtype, device=device) + K = torch.cat([zeros, -rz, ry, rz, zeros, -rx, -ry, rx, zeros], dim=1) \ + .view((batch_size, 3, 3)) + + ident = torch.eye(3, dtype=dtype, device=device).unsqueeze(dim=0) + rot_mat = ident + sin * K + (1 - cos) * torch.bmm(K, K) + return rot_mat diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/util.py b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/util.py new file mode 100755 index 0000000..5657fd4 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/decalib/utils/util.py @@ -0,0 +1,601 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import numpy as np +import torch +import torch.nn.functional as F +import math +from collections import OrderedDict +import os +from scipy.ndimage import morphology +from skimage.io import imsave +import cv2 + +def upsample_mesh(vertices, normals, faces, displacement_map, texture_map, dense_template): + ''' upsampling coarse mesh (with displacment map) + vertices: vertices of coarse mesh, [nv, 3] + normals: vertex normals, [nv, 3] + faces: faces of coarse mesh, [nf, 3] + texture_map: texture map, [256, 256, 3] + displacement_map: displacment map, [256, 256] + dense_template: + Returns: + dense_vertices: upsampled vertices with details, [number of dense vertices, 3] + dense_colors: vertex color, [number of dense vertices, 3] + dense_faces: [number of dense faces, 3] + ''' + img_size = dense_template['img_size'] + dense_faces = dense_template['f'] + x_coords = dense_template['x_coords'] + y_coords = dense_template['y_coords'] + valid_pixel_ids = dense_template['valid_pixel_ids'] + valid_pixel_3d_faces = dense_template['valid_pixel_3d_faces'] + valid_pixel_b_coords = dense_template['valid_pixel_b_coords'] + + pixel_3d_points = vertices[valid_pixel_3d_faces[:, 0], :] * valid_pixel_b_coords[:, 0][:, np.newaxis] + \ + vertices[valid_pixel_3d_faces[:, 1], :] * valid_pixel_b_coords[:, 1][:, np.newaxis] + \ + vertices[valid_pixel_3d_faces[:, 2], :] * valid_pixel_b_coords[:, 2][:, np.newaxis] + vertex_normals = normals + pixel_3d_normals = vertex_normals[valid_pixel_3d_faces[:, 0], :] * valid_pixel_b_coords[:, 0][:, np.newaxis] + \ + vertex_normals[valid_pixel_3d_faces[:, 1], :] * valid_pixel_b_coords[:, 1][:, np.newaxis] + \ + vertex_normals[valid_pixel_3d_faces[:, 2], :] * valid_pixel_b_coords[:, 2][:, np.newaxis] + pixel_3d_normals = pixel_3d_normals / np.linalg.norm(pixel_3d_normals, axis=-1)[:, np.newaxis] + displacements = displacement_map[y_coords[valid_pixel_ids].astype(int), x_coords[valid_pixel_ids].astype(int)] + dense_colors = texture_map[y_coords[valid_pixel_ids].astype(int), x_coords[valid_pixel_ids].astype(int)] + offsets = np.einsum('i,ij->ij', displacements, pixel_3d_normals) + dense_vertices = pixel_3d_points + offsets + return dense_vertices, dense_colors, dense_faces + +# borrowed from https://github.com/YadiraF/PRNet/blob/master/utils/write.py +def write_obj(obj_name, + vertices, + faces, + colors=None, + texture=None, + uvcoords=None, + uvfaces=None, + inverse_face_order=False, + normal_map=None, + ): + ''' Save 3D face model with texture. + Ref: https://github.com/patrikhuber/eos/blob/bd00155ebae4b1a13b08bf5a991694d682abbada/include/eos/core/Mesh.hpp + Args: + obj_name: str + vertices: shape = (nver, 3) + colors: shape = (nver, 3) + faces: shape = (ntri, 3) + texture: shape = (uv_size, uv_size, 3) + uvcoords: shape = (nver, 2) max value<=1 + ''' + if obj_name.split('.')[-1] != 'obj': + obj_name = obj_name + '.obj' + mtl_name = obj_name.replace('.obj', '.mtl') + texture_name = obj_name.replace('.obj', '.png') + material_name = 'FaceTexture' + + faces = faces.copy() + # mesh lab start with 1, python/c++ start from 0 + faces += 1 + if inverse_face_order: + faces = faces[:, [2, 1, 0]] + if uvfaces is not None: + uvfaces = uvfaces[:, [2, 1, 0]] + + # write obj + with open(obj_name, 'w') as f: + # first line: write mtlib(material library) + # f.write('# %s\n' % os.path.basename(obj_name)) + # f.write('#\n') + # f.write('\n') + if texture is not None: + f.write('mtllib %s\n\n' % os.path.basename(mtl_name)) + + # write vertices + if colors is None: + for i in range(vertices.shape[0]): + f.write('v {} {} {}\n'.format(vertices[i, 0], vertices[i, 1], vertices[i, 2])) + else: + for i in range(vertices.shape[0]): + f.write('v {} {} {} {} {} {}\n'.format(vertices[i, 0], vertices[i, 1], vertices[i, 2], colors[i, 0], colors[i, 1], colors[i, 2])) + + # write uv coords + if texture is None: + for i in range(faces.shape[0]): + f.write('f {} {} {}\n'.format(faces[i, 2], faces[i, 1], faces[i, 0])) + else: + for i in range(uvcoords.shape[0]): + f.write('vt {} {}\n'.format(uvcoords[i,0], uvcoords[i,1])) + f.write('usemtl %s\n' % material_name) + # write f: ver ind/ uv ind + uvfaces = uvfaces + 1 + for i in range(faces.shape[0]): + f.write('f {}/{} {}/{} {}/{}\n'.format( + # faces[i, 2], uvfaces[i, 2], + # faces[i, 1], uvfaces[i, 1], + # faces[i, 0], uvfaces[i, 0] + faces[i, 0], uvfaces[i, 0], + faces[i, 1], uvfaces[i, 1], + faces[i, 2], uvfaces[i, 2] + ) + ) + # write mtl + with open(mtl_name, 'w') as f: + f.write('newmtl %s\n' % material_name) + s = 'map_Kd {}\n'.format(os.path.basename(texture_name)) # map to image + f.write(s) + + if normal_map is not None: + name, _ = os.path.splitext(obj_name) + normal_name = f'{name}_normals.png' + f.write(f'disp {normal_name}') + # out_normal_map = normal_map / (np.linalg.norm( + # normal_map, axis=-1, keepdims=True) + 1e-9) + # out_normal_map = (out_normal_map + 1) * 0.5 + + cv2.imwrite( + normal_name, + # (out_normal_map * 255).astype(np.uint8)[:, :, ::-1] + normal_map + ) + cv2.imwrite(texture_name, texture) + +# ---------------------------- process/generate vertices, normals, faces +def generate_triangles(h, w, margin_x=2, margin_y=5, mask = None): + # quad layout: + # 0 1 ... w-1 + # w w+1 + #. + # w*h + triangles = [] + for x in range(margin_x, w-1-margin_x): + for y in range(margin_y, h-1-margin_y): + triangle0 = [y*w + x, y*w + x + 1, (y+1)*w + x] + triangle1 = [y*w + x + 1, (y+1)*w + x + 1, (y+1)*w + x] + triangles.append(triangle0) + triangles.append(triangle1) + triangles = np.array(triangles) + triangles = triangles[:,[0,2,1]] + return triangles + +# borrowed from https://github.com/daniilidis-group/neural_renderer/blob/master/neural_renderer/vertices_to_faces.py +def face_vertices(vertices, faces): + """ + :param vertices: [batch size, number of vertices, 3] + :param faces: [batch size, number of faces, 3] + :return: [batch size, number of faces, 3, 3] + """ + assert (vertices.ndimension() == 3) + assert (faces.ndimension() == 3) + assert (vertices.shape[0] == faces.shape[0]) + assert (vertices.shape[2] == 3) + assert (faces.shape[2] == 3) + + bs, nv = vertices.shape[:2] + bs, nf = faces.shape[:2] + device = vertices.device + faces = faces + (torch.arange(bs, dtype=torch.int32).to(device) * nv)[:, None, None] + vertices = vertices.reshape((bs * nv, 3)) + # pytorch only supports long and byte tensors for indexing + return vertices[faces.long()] + +def vertex_normals(vertices, faces): + """ + :param vertices: [batch size, number of vertices, 3] + :param faces: [batch size, number of faces, 3] + :return: [batch size, number of vertices, 3] + """ + assert (vertices.ndimension() == 3) + assert (faces.ndimension() == 3) + assert (vertices.shape[0] == faces.shape[0]) + assert (vertices.shape[2] == 3) + assert (faces.shape[2] == 3) + bs, nv = vertices.shape[:2] + bs, nf = faces.shape[:2] + device = vertices.device + normals = torch.zeros(bs * nv, 3).to(device) + + faces = faces + (torch.arange(bs, dtype=torch.int32).to(device) * nv)[:, None, None] # expanded faces + vertices_faces = vertices.reshape((bs * nv, 3))[faces.long()] + + faces = faces.reshape(-1, 3) + vertices_faces = vertices_faces.reshape(-1, 3, 3) + + normals.index_add_(0, faces[:, 1].long(), + torch.cross(vertices_faces[:, 2] - vertices_faces[:, 1], vertices_faces[:, 0] - vertices_faces[:, 1])) + normals.index_add_(0, faces[:, 2].long(), + torch.cross(vertices_faces[:, 0] - vertices_faces[:, 2], vertices_faces[:, 1] - vertices_faces[:, 2])) + normals.index_add_(0, faces[:, 0].long(), + torch.cross(vertices_faces[:, 1] - vertices_faces[:, 0], vertices_faces[:, 2] - vertices_faces[:, 0])) + + normals = F.normalize(normals, eps=1e-6, dim=1) + normals = normals.reshape((bs, nv, 3)) + # pytorch only supports long and byte tensors for indexing + return normals + +def batch_orth_proj(X, camera): + ''' orthgraphic projection + X: 3d vertices, [bz, n_point, 3] + camera: scale and translation, [bz, 3], [scale, tx, ty] + ''' + camera = camera.clone().view(-1, 1, 3) + X_trans = X[:, :, :2] + camera[:, :, 1:] + X_trans = torch.cat([X_trans, X[:,:,2:]], 2) + shape = X_trans.shape + Xn = (camera[:, :, 0:1] * X_trans) + return Xn + +# -------------------------------------- image processing +# borrowed from: https://torchgeometry.readthedocs.io/en/latest/_modules/kornia/filters +def gaussian(window_size, sigma): + def gauss_fcn(x): + return -(x - window_size // 2)**2 / float(2 * sigma**2) + gauss = torch.stack( + [torch.exp(torch.tensor(gauss_fcn(x))) for x in range(window_size)]) + return gauss / gauss.sum() + +def get_gaussian_kernel(kernel_size: int, sigma: float): + r"""Function that returns Gaussian filter coefficients. + + Args: + kernel_size (int): filter size. It should be odd and positive. + sigma (float): gaussian standard deviation. + + Returns: + Tensor: 1D tensor with gaussian filter coefficients. + + Shape: + - Output: :math:`(\text{kernel_size})` + + Examples:: + + >>> kornia.image.get_gaussian_kernel(3, 2.5) + tensor([0.3243, 0.3513, 0.3243]) + + >>> kornia.image.get_gaussian_kernel(5, 1.5) + tensor([0.1201, 0.2339, 0.2921, 0.2339, 0.1201]) + """ + if not isinstance(kernel_size, int) or kernel_size % 2 == 0 or \ + kernel_size <= 0: + raise TypeError("kernel_size must be an odd positive integer. " + "Got {}".format(kernel_size)) + window_1d = gaussian(kernel_size, sigma) + return window_1d + +def get_gaussian_kernel2d(kernel_size, sigma): + r"""Function that returns Gaussian filter matrix coefficients. + + Args: + kernel_size (Tuple[int, int]): filter sizes in the x and y direction. + Sizes should be odd and positive. + sigma (Tuple[int, int]): gaussian standard deviation in the x and y + direction. + + Returns: + Tensor: 2D tensor with gaussian filter matrix coefficients. + + Shape: + - Output: :math:`(\text{kernel_size}_x, \text{kernel_size}_y)` + + Examples:: + + >>> kornia.image.get_gaussian_kernel2d((3, 3), (1.5, 1.5)) + tensor([[0.0947, 0.1183, 0.0947], + [0.1183, 0.1478, 0.1183], + [0.0947, 0.1183, 0.0947]]) + + >>> kornia.image.get_gaussian_kernel2d((3, 5), (1.5, 1.5)) + tensor([[0.0370, 0.0720, 0.0899, 0.0720, 0.0370], + [0.0462, 0.0899, 0.1123, 0.0899, 0.0462], + [0.0370, 0.0720, 0.0899, 0.0720, 0.0370]]) + """ + if not isinstance(kernel_size, tuple) or len(kernel_size) != 2: + raise TypeError("kernel_size must be a tuple of length two. Got {}" + .format(kernel_size)) + if not isinstance(sigma, tuple) or len(sigma) != 2: + raise TypeError("sigma must be a tuple of length two. Got {}" + .format(sigma)) + ksize_x, ksize_y = kernel_size + sigma_x, sigma_y = sigma + kernel_x = get_gaussian_kernel(ksize_x, sigma_x) + kernel_y = get_gaussian_kernel(ksize_y, sigma_y) + kernel_2d = torch.matmul( + kernel_x.unsqueeze(-1), kernel_y.unsqueeze(-1).t()) + return kernel_2d + +def gaussian_blur(x, kernel_size=(3,3), sigma=(0.8,0.8)): + b, c, h, w = x.shape + kernel = get_gaussian_kernel2d(kernel_size, sigma).to(x.device).to(x.dtype) + kernel = kernel.repeat(c, 1, 1, 1) + padding = [(k - 1) // 2 for k in kernel_size] + return F.conv2d(x, kernel, padding=padding, stride=1, groups=c) + +def _compute_binary_kernel(window_size): + r"""Creates a binary kernel to extract the patches. If the window size + is HxW will create a (H*W)xHxW kernel. + """ + window_range = window_size[0] * window_size[1] + kernel: torch.Tensor = torch.zeros(window_range, window_range) + for i in range(window_range): + kernel[i, i] += 1.0 + return kernel.view(window_range, 1, window_size[0], window_size[1]) + +def median_blur(x, kernel_size=(3,3)): + b, c, h, w = x.shape + kernel = _compute_binary_kernel(kernel_size).to(x.device).to(x.dtype) + kernel = kernel.repeat(c, 1, 1, 1) + padding = [(k - 1) // 2 for k in kernel_size] + features = F.conv2d(x, kernel, padding=padding, stride=1, groups=c) + features = features.view(b,c,-1,h,w) + median = torch.median(features, dim=2)[0] + return median + +def get_laplacian_kernel2d(kernel_size: int): + r"""Function that returns Gaussian filter matrix coefficients. + + Args: + kernel_size (int): filter size should be odd. + + Returns: + Tensor: 2D tensor with laplacian filter matrix coefficients. + + Shape: + - Output: :math:`(\text{kernel_size}_x, \text{kernel_size}_y)` + + Examples:: + + >>> kornia.image.get_laplacian_kernel2d(3) + tensor([[ 1., 1., 1.], + [ 1., -8., 1.], + [ 1., 1., 1.]]) + + >>> kornia.image.get_laplacian_kernel2d(5) + tensor([[ 1., 1., 1., 1., 1.], + [ 1., 1., 1., 1., 1.], + [ 1., 1., -24., 1., 1.], + [ 1., 1., 1., 1., 1.], + [ 1., 1., 1., 1., 1.]]) + + """ + if not isinstance(kernel_size, int) or kernel_size % 2 == 0 or \ + kernel_size <= 0: + raise TypeError("ksize must be an odd positive integer. Got {}" + .format(kernel_size)) + + kernel = torch.ones((kernel_size, kernel_size)) + mid = kernel_size // 2 + kernel[mid, mid] = 1 - kernel_size ** 2 + kernel_2d: torch.Tensor = kernel + return kernel_2d + +def laplacian(x): + # https://torchgeometry.readthedocs.io/en/latest/_modules/kornia/filters/laplacian.html + b, c, h, w = x.shape + kernel_size = 3 + kernel = get_laplacian_kernel2d(kernel_size).to(x.device).to(x.dtype) + kernel = kernel.repeat(c, 1, 1, 1) + padding = (kernel_size - 1) // 2 + return F.conv2d(x, kernel, padding=padding, stride=1, groups=c) + +def angle2matrix(angles): + ''' get rotation matrix from three rotation angles(degree). right-handed. + Args: + angles: [batch_size, 3] tensor containing X, Y, and Z angles. + x: pitch. positive for looking down. + y: yaw. positive for looking left. + z: roll. positive for tilting head right. + Returns: + R: [batch_size, 3, 3]. rotation matrices. + ''' + angles = angles*(np.pi)/180. + s = torch.sin(angles) + c = torch.cos(angles) + + cx, cy, cz = (c[:, 0], c[:, 1], c[:, 2]) + sx, sy, sz = (s[:, 0], s[:, 1], s[:, 2]) + + zeros = torch.zeros_like(s[:, 0]).to(angles.device) + ones = torch.ones_like(s[:, 0]).to(angles.device) + + # Rz.dot(Ry.dot(Rx)) + R_flattened = torch.stack( + [ + cz * cy, cz * sy * sx - sz * cx, cz * sy * cx + sz * sx, + sz * cy, sz * sy * sx + cz * cx, sz * sy * cx - cz * sx, + -sy, cy * sx, cy * cx, + ], + dim=0) #[batch_size, 9] + R = torch.reshape(R_flattened, (-1, 3, 3)) #[batch_size, 3, 3] + return R + +def binary_erosion(tensor, kernel_size=5): + # tensor: [bz, 1, h, w]. + device = tensor.device + mask = tensor.cpu().numpy() + structure=np.ones((kernel_size,kernel_size)) + new_mask = mask.copy() + for i in range(mask.shape[0]): + new_mask[i,0] = morphology.binary_erosion(mask[i,0], structure) + return torch.from_numpy(new_mask.astype(np.float32)).to(device) + +def flip_image(src_image, kps): + ''' + purpose: + flip a image given by src_image and the 2d keypoints + flip_mode: + 0: horizontal flip + >0: vertical flip + <0: horizontal & vertical flip + ''' + h, w = src_image.shape[0], src_image.shape[1] + src_image = cv2.flip(src_image, 1) + if kps is not None: + kps[:, 0] = w - 1 - kps[:, 0] + kp_map = [5, 4, 3, 2, 1, 0, 11, 10, 9, 8, 7, 6, 12, 13] + kps[:, :] = kps[kp_map] + + return src_image, kps + +# -------------------------------------- io +def copy_state_dict(cur_state_dict, pre_state_dict, prefix='', load_name=None): + def _get_params(key): + key = prefix + key + if key in pre_state_dict: + return pre_state_dict[key] + return None + for k in cur_state_dict.keys(): + if load_name is not None: + if load_name not in k: + continue + v = _get_params(k) + try: + if v is None: + # print('parameter {} not found'.format(k)) + continue + cur_state_dict[k].copy_(v) + except: + # print('copy param {} failed'.format(k)) + continue + +def check_mkdir(path): + if not os.path.exists(path): + print('creating %s' % path) + os.makedirs(path) + +def check_mkdirlist(pathlist): + for path in pathlist: + if not os.path.exists(path): + print('creating %s' % path) + os.makedirs(path) + +def tensor2image(tensor): + image = tensor.detach().cpu().numpy() + image = image*255. + image = np.maximum(np.minimum(image, 255), 0) + image = image.transpose(1,2,0)[:,:,[2,1,0]] + return image.astype(np.uint8).copy() + +def dict2obj(d): + # if isinstance(d, list): + # d = [dict2obj(x) for x in d] + if not isinstance(d, dict): + return d + class C(object): + pass + o = C() + for k in d: + o.__dict__[k] = dict2obj(d[k]) + return o + +class Struct(object): + def __init__(self, **kwargs): + for key, val in kwargs.items(): + setattr(self, key, val) + +# original saved file with DataParallel +def remove_module(state_dict): +# create new OrderedDict that does not contain `module.` + new_state_dict = OrderedDict() + for k, v in state_dict.items(): + name = k[7:] # remove `module.` + new_state_dict[name] = v + return new_state_dict + +def dict_tensor2npy(tensor_dict): + npy_dict = {} + for key in tensor_dict: + npy_dict[key] = tensor_dict[key][0].cpu().numpy() + return npy_dict + +# ---------------------------------- visualization +end_list = np.array([17, 22, 27, 42, 48, 31, 36, 68], dtype = np.int32) - 1 +def plot_kpts(image, kpts, color = 'r'): + ''' Draw 68 key points + Args: + image: the input image + kpt: (68, 3). + ''' + if color == 'r': + c = (255, 0, 0) + elif color == 'g': + c = (0, 255, 0) + elif color == 'b': + c = (255, 0, 0) + image = image.copy() + kpts = kpts.copy() + + for i in range(kpts.shape[0]): + st = kpts[i, :2] + if kpts.shape[1]==4: + if kpts[i, 3] > 0.5: + c = (0, 255, 0) + else: + c = (0, 0, 255) + image = cv2.circle(image, (int(st[0]), int(st[1])), 1, c, 2) + if i in end_list: + continue + ed = kpts[i + 1, :2] + image = cv2.line(image, (int(st[0]), int(st[1])), (int(ed[0]), int(ed[1])), (255, 255, 255), 1) + + return image + +def plot_verts(image, kpts, color = 'r'): + ''' Draw 68 key points + Args: + image: the input image + kpt: (68, 3). + ''' + if color == 'r': + c = (255, 0, 0) + elif color == 'g': + c = (0, 255, 0) + elif color == 'b': + c = (0, 0, 255) + elif color == 'y': + c = (0, 255, 255) + image = image.copy() + + for i in range(kpts.shape[0]): + st = kpts[i, :2] + image = cv2.circle(image,(int(st[0]), int(st[1])), 1, c, 2) + + return image + +def tensor_vis_landmarks(images, landmarks, gt_landmarks=None, color = 'g', isScale=True): + # visualize landmarks + vis_landmarks = [] + images = images.cpu().numpy() + predicted_landmarks = landmarks.detach().cpu().numpy() + if gt_landmarks is not None: + gt_landmarks_np = gt_landmarks.detach().cpu().numpy() + for i in range(images.shape[0]): + image = images[i] + image = image.transpose(1,2,0)[:,:,[2,1,0]].copy(); image = (image*255) + if isScale: + predicted_landmark = predicted_landmarks[i]*image.shape[0]/2 + image.shape[0]/2 + else: + predicted_landmark = predicted_landmarks[i] + if predicted_landmark.shape[0] == 68: + image_landmarks = plot_kpts(image, predicted_landmark, color) + if gt_landmarks is not None: + image_landmarks = plot_verts(image_landmarks, gt_landmarks_np[i]*image.shape[0]/2 + image.shape[0]/2, 'r') + else: + image_landmarks = plot_verts(image, predicted_landmark, color) + if gt_landmarks is not None: + image_landmarks = plot_verts(image_landmarks, gt_landmarks_np[i]*image.shape[0]/2 + image.shape[0]/2, 'r') + vis_landmarks.append(image_landmarks) + + vis_landmarks = np.stack(vis_landmarks) + vis_landmarks = torch.from_numpy(vis_landmarks[:,:,:,[2,1,0]].transpose(0,3,1,2))/255.#, dtype=torch.float32) + return vis_landmarks diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/demos/__init__.py b/NeuralVoicePuppetry/neural-code/third/DECA/demos/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/demos/demo_reconstruct.py b/NeuralVoicePuppetry/neural-code/third/DECA/demos/demo_reconstruct.py new file mode 100755 index 0000000..4e5b77e --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/demos/demo_reconstruct.py @@ -0,0 +1,109 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import torch +import os, sys +import cv2 +import numpy as np +from time import time +from scipy.io import savemat +import argparse +from tqdm import tqdm + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) +from decalib.deca import DECA +from decalib.datasets import datasets +from decalib.utils import util +from decalib.utils.config import cfg as deca_cfg + + +def main(args): + savefolder = args.savefolder + device = args.device + os.makedirs(savefolder, exist_ok=True) + + # load test images + testdata = datasets.TestData(args.inputpath, iscrop=args.iscrop, face_detector=args.detector) + + # run DECA + deca_cfg.model.use_tex = args.useTex + deca = DECA(config = deca_cfg, device=device) + # for i in range(len(testdata)): + for i in tqdm(range(len(testdata))): + name = testdata[i]['imagename'] + images = testdata[i]['image'].to(device)[None,...] + codedict = deca.encode(images) + opdict, visdict = deca.decode(codedict) #tensor + + print(opdict.keys()) + print(visdict.keys()) + + if args.saveDepth or args.saveKpt or args.saveObj or args.saveMat or args.saveImages: + os.makedirs(os.path.join(savefolder, name), exist_ok=True) + # -- save results + if args.saveDepth: + depth_image = deca.render.render_depth(opdict['transformed_vertices']).repeat(1,3,1,1) + visdict['depth_images'] = depth_image + cv2.imwrite(os.path.join(savefolder, name, name + '_depth.jpg'), util.tensor2image(depth_image[0])) + if args.saveKpt: + np.savetxt(os.path.join(savefolder, name, name + '_kpt2d.txt'), opdict['landmarks2d'][0].cpu().numpy()) + np.savetxt(os.path.join(savefolder, name, name + '_kpt3d.txt'), opdict['landmarks3d'][0].cpu().numpy()) + if args.saveObj: + deca.save_obj(os.path.join(savefolder, name, name + '.obj'), opdict) + if args.saveMat: + opdict = util.dict_tensor2npy(opdict) + savemat(os.path.join(savefolder, name, name + '.mat'), opdict) + if args.saveVis: + cv2.imwrite(os.path.join(savefolder, name + '_vis.jpg'), deca.visualize(visdict)) + if args.saveImages: + for vis_name in ['inputs', 'rendered_images', 'albedo_images', 'shape_images', 'shape_detail_images']: + if vis_name not in visdict.keys(): + continue + image = util.tensor2image(visdict[vis_name][0]) + cv2.imwrite(os.path.join(savefolder, name, name + '_' + vis_name +'.jpg'), util.tensor2image(visdict[vis_name][0])) + print(f'-- please check the results in {savefolder}') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='DECA: Detailed Expression Capture and Animation') + + parser.add_argument('-i', '--inputpath', default='TestSamples/examples', type=str, + help='path to the test data, can be image folder, image path, image list, video') + parser.add_argument('-s', '--savefolder', default='TestSamples/examples/results', type=str, + help='path to the output directory, where results(obj, txt files) will be stored.') + parser.add_argument('--device', default="cuda" if torch.cuda.is_available() else "cpu", type=str, + help='set device, cpu for using cpu' ) + # process test images + parser.add_argument('--iscrop', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to crop input image, set false only when the test image are well cropped' ) + parser.add_argument('--detector', default='fan', type=str, + help='detector for cropping face, check decalib/detectors.py for details' ) + # save + parser.add_argument('--useTex', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to use FLAME texture model to generate uv texture map, \ + set it to True only if you downloaded texture model' ) + parser.add_argument('--saveVis', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save visualization of output' ) + parser.add_argument('--saveKpt', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save 2D and 3D keypoints' ) + parser.add_argument('--saveDepth', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save depth image' ) + parser.add_argument('--saveObj', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save outputs as .obj, detail mesh will end with _detail.obj. \ + Note that saving objs could be slow' ) + parser.add_argument('--saveMat', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save outputs as .mat' ) + parser.add_argument('--saveImages', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save visualization output as seperate images' ) + main(parser.parse_args()) \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/demos/demo_teaser.py b/NeuralVoicePuppetry/neural-code/third/DECA/demos/demo_teaser.py new file mode 100755 index 0000000..e7b5b2d --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/demos/demo_teaser.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import os, sys +import cv2 +import numpy as np +from time import time +from scipy.io import savemat +import argparse +import imageio +from skimage.transform import rescale +import torch + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) +from decalib.deca import DECA +from decalib.datasets import datasets +from decalib.utils import util +from decalib.utils.rotation_converter import batch_euler2axis, deg2rad + +def main(args): + savefolder = args.savefolder + device = args.device + os.makedirs(savefolder, exist_ok=True) + + # load test images + testdata = datasets.TestData(args.inputpath, iscrop=args.iscrop, face_detector=args.detector) + expdata = datasets.TestData(args.exp_path, iscrop=args.iscrop, face_detector=args.detector) + # DECA + deca = DECA(device=device) + + visdict_list_list = [] + for i in range(len(testdata)): + name = testdata[i]['imagename'] + images = testdata[i]['image'].to(device)[None,...] + codedict = deca.encode(images) + opdict, visdict = deca.decode(codedict) #tensor + ### show shape with different views and expressions + visdict_list = [] + max_yaw = 30 + yaw_list = list(range(0,max_yaw,5)) + list(range(max_yaw,-max_yaw,-5)) + list(range(-max_yaw,0,5)) + for k in yaw_list: #jaw angle from -50 to 50 + ## yaw angle + euler_pose = torch.randn((1, 3)) + euler_pose[:,1] = k#torch.rand((self.batch_size))*160 - 80 + euler_pose[:,0] = 0#(torch.rand((self.batch_size))*60 - 30)*(2./euler_pose[:,1].abs()) + euler_pose[:,2] = 0#(torch.rand((self.batch_size))*60 - 30)*(2./euler_pose[:,1].abs()) + global_pose = batch_euler2axis(deg2rad(euler_pose[:,:3].cuda())) + codedict['pose'][:,:3] = global_pose + codedict['cam'][:,:] = 0. + codedict['cam'][:,0] = 8 + _, visdict_view = deca.decode(codedict) + visdict = {x:visdict[x] for x in ['inputs', 'shape_detail_images']} + visdict['pose'] = visdict_view['shape_detail_images'] + visdict_list.append(visdict) + + euler_pose = torch.zeros((1, 3)) + global_pose = batch_euler2axis(deg2rad(euler_pose[:,:3].cuda())) + codedict['pose'][:,:3] = global_pose + for (i,k) in enumerate(range(0,31,2)): #jaw angle from -50 to 50 + # expression: jaw pose + euler_pose = torch.randn((1, 3)) + euler_pose[:,0] = k#torch.rand((self.batch_size))*160 - 80 + euler_pose[:,1] = 0#(torch.rand((self.batch_size))*60 - 30)*(2./euler_pose[:,1].abs()) + euler_pose[:,2] = 0#(torch.rand((self.batch_size))*60 - 30)*(2./euler_pose[:,1].abs()) + jaw_pose = batch_euler2axis(deg2rad(euler_pose[:,:3].cuda())) + codedict['pose'][:,3:] = jaw_pose + _, visdict_view = deca.decode(codedict) + visdict_list[i]['exp'] = visdict_view['shape_detail_images'] + count = i + + for (i,k) in enumerate(range(len(expdata))): #jaw angle from -50 to 50 + # expression: jaw pose + exp_images = expdata[i]['image'].to(device)[None,...] + exp_codedict = deca.encode(exp_images) + # transfer exp code + codedict['pose'][:,3:] = exp_codedict['pose'][:,3:] + codedict['exp'] = exp_codedict['exp'] + _, exp_visdict = deca.decode(codedict) + visdict_list[i+count]['exp'] = exp_visdict['shape_detail_images'] + + visdict_list_list.append(visdict_list) + + ### write gif + writer = imageio.get_writer(os.path.join(savefolder, 'teaser.gif'), mode='I') + for i in range(len(yaw_list)): + grid_image_list = [] + for j in range(len(testdata)): + grid_image = deca.visualize(visdict_list_list[j][i]) + grid_image_list.append(grid_image) + grid_image_all = np.concatenate(grid_image_list, 0) + grid_image_all = rescale(grid_image_all, 0.6, multichannel=True) # resize for showing in github + writer.append_data(grid_image_all[:,:,[2,1,0]]) + + print(f'-- please check the teaser figure in {savefolder}') + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='DECA: Detailed Expression Capture and Animation') + + parser.add_argument('-i', '--inputpath', default='TestSamples/teaser', type=str, + help='path to the test data, can be image folder, image path, image list, video') + parser.add_argument('-e', '--exp_path', default='TestSamples/exp', type=str, + help='path to expression') + parser.add_argument('-s', '--savefolder', default='TestSamples/teaser/results', type=str, + help='path to the output directory, where results(obj, txt files) will be stored.') + parser.add_argument('--device', default="cuda" if torch.cuda.is_available() else "cpu", type=str, + help='set device, cpu for using cpu' ) + # process test images + parser.add_argument('--iscrop', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to crop input image, set false only when the test image are well cropped' ) + parser.add_argument('--detector', default='fan', type=str, + help='detector for cropping face, check detectos.py for details' ) + + main(parser.parse_args()) \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/demos/demo_transfer.py b/NeuralVoicePuppetry/neural-code/third/DECA/demos/demo_transfer.py new file mode 100755 index 0000000..022287e --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/demos/demo_transfer.py @@ -0,0 +1,127 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import os, sys +import cv2 +import torch +import numpy as np +from time import time +import argparse + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) +from decalib.deca import DECA +from decalib.datasets import datasets +from decalib.utils import util +from decalib.utils.config import cfg as deca_cfg + +def main(args): + savefolder = args.savefolder + device = args.device + os.makedirs(savefolder, exist_ok=True) + + # load test images + testdata = datasets.TestData(args.image_path, iscrop=args.iscrop, face_detector=args.detector) + expdata = datasets.TestData(args.exp_path, iscrop=args.iscrop, face_detector=args.detector) + + # run DECA + deca_cfg.model.use_tex = args.useTex + deca = DECA(config = deca_cfg, device=device) + # identity reference + i = 0 + name = testdata[i]['imagename'] + savepath = '{}/{}.jpg'.format(savefolder, name) + images = testdata[i]['image'].to(device)[None,...] + id_codedict = deca.encode(images) + id_opdict, id_visdict = deca.decode(id_codedict) + id_visdict = {x:id_visdict[x] for x in ['inputs', 'shape_detail_images']} + + # -- expression transfer + # exp code from image + exp_images = expdata[i]['image'].to(device)[None,...] + exp_codedict = deca.encode(exp_images) + # transfer exp code + id_codedict['pose'][:,3:] = exp_codedict['pose'][:,3:] + id_codedict['exp'] = exp_codedict['exp'] + transfer_opdict, transfer_visdict = deca.decode(id_codedict) + id_visdict['transferred_shape'] = transfer_visdict['shape_detail_images'] + cv2.imwrite(os.path.join(savefolder, name + '_animation.jpg'), deca.visualize(id_visdict)) + + transfer_opdict['uv_texture_gt'] = id_opdict['uv_texture_gt'] + if args.saveDepth or args.saveKpt or args.saveObj or args.saveMat or args.saveImages: + os.makedirs(os.path.join(savefolder, name, 'reconstruction'), exist_ok=True) + os.makedirs(os.path.join(savefolder, name, 'animation'), exist_ok=True) + + # -- save results + image_name = name + for save_type in ['reconstruction', 'animation']: + if save_type == 'reconstruction': + visdict = id_codedict; opdict = id_opdict + else: + visdict = transfer_visdict; opdict = transfer_opdict + if args.saveDepth: + depth_image = deca.render.render_depth(opdict['transformed_vertices']).repeat(1,3,1,1) + visdict['depth_images'] = depth_image + cv2.imwrite(os.path.join(savefolder, name, save_type, name + '_depth.jpg'), util.tensor2image(depth_image[0])) + if args.saveKpt: + np.savetxt(os.path.join(savefolder, name, save_type, name + '_kpt2d.txt'), opdict['landmarks2d'][0].cpu().numpy()) + np.savetxt(os.path.join(savefolder, name, save_type, name + '_kpt3d.txt'), opdict['landmarks3d'][0].cpu().numpy()) + if args.saveObj: + deca.save_obj(os.path.join(savefolder, name, save_type, name + '.obj'), opdict) + if args.saveMat: + opdict = util.dict_tensor2npy(opdict) + savemat(os.path.join(savefolder, name, save_type, name + '.mat'), opdict) + if args.saveImages: + for vis_name in ['inputs', 'rendered_images', 'albedo_images', 'shape_images', 'shape_detail_images']: + if vis_name not in visdict.keys(): + continue + image =util.tensor2image(visdict[vis_name][0]) + cv2.imwrite(os.path.join(savefolder, name, save_type, name + '_' + vis_name +'.jpg'), util.tensor2image(visdict[vis_name][0])) + print(f'-- please check the results in {savefolder}') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='DECA: Detailed Expression Capture and Animation') + + parser.add_argument('-i', '--image_path', default='TestSamples/examples/IMG_0392_inputs.jpg', type=str, + help='path to input image') + parser.add_argument('-e', '--exp_path', default='TestSamples/exp/7.jpg', type=str, + help='path to expression') + parser.add_argument('-s', '--savefolder', default='TestSamples/animation_results', type=str, + help='path to the output directory, where results(obj, txt files) will be stored.') + parser.add_argument('--device', default="cuda" if torch.cuda.is_available() else "cpu", type=str, + help='set device, cpu for using cpu' ) + # process test images + parser.add_argument('--iscrop', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to crop input image, set false only when the test image are well cropped' ) + parser.add_argument('--detector', default='fan', type=str, + help='detector for cropping face, check detectos.py for details' ) + # save + parser.add_argument('--useTex', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to use FLAME texture model to generate uv texture map, \ + set it to True only if you downloaded texture model' ) + parser.add_argument('--saveVis', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save visualization of output' ) + parser.add_argument('--saveKpt', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save 2D and 3D keypoints' ) + parser.add_argument('--saveDepth', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save depth image' ) + parser.add_argument('--saveObj', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save outputs as .obj' ) + parser.add_argument('--saveMat', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save outputs as .mat' ) + parser.add_argument('--saveImages', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save visualization output as seperate images' ) + main(parser.parse_args()) + + main(parser.parse_args()) diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/demos/run_on_img.py b/NeuralVoicePuppetry/neural-code/third/DECA/demos/run_on_img.py new file mode 100644 index 0000000..e0d64f3 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/demos/run_on_img.py @@ -0,0 +1,127 @@ + +import os, sys +import cv2 +import torch +import numpy as np +from time import time +from scipy.io import savemat +import argparse +from tqdm import tqdm +import matplotlib.pyplot as plt +from skimage.transform import estimate_transform, warp, resize, rescale + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) +from decalib.deca import DECA +from decalib.datasets import datasets +from decalib.utils import util +from decalib.utils.config import cfg as deca_cfg + + +def warp_back(image, oldimage, tform): + + alpha = 0.6 + + oldimage = oldimage.astype(np.float64) /255. + new_size = oldimage.shape + + dst_image = warp(image, tform, output_shape=new_size) + + # Mask of non-black pixels. + mask = np.where(np.all(dst_image == [0, 0, 0], axis=-1)) + dst_image[mask] = oldimage[mask] + + res = cv2.addWeighted(oldimage, 1 - alpha, dst_image, alpha, 0) + res = res[:, :, ::-1] + + return res + + +def main(args): + savefolder = args.savefolder + device = args.device + os.makedirs(savefolder, exist_ok=True) + + # load test images + testdata = datasets.TestData(args.inputpath, iscrop=args.iscrop, face_detector=args.detector) + + # run DECA + deca_cfg.model.use_tex = args.useTex + deca = DECA(config=deca_cfg, device=device) + # for i in range(len(testdata)): + for i in tqdm(range(len(testdata))): + + name = testdata[i]['imagename'] + images = testdata[i]['image'].to(device)[None, ...] + original_images = testdata[i]['original_image'].to(device)[None, ...] + + codedict = deca.encode(images) + opdict, visdict = deca.decode(codedict) # tensor + + images = util.tensor2image(images[0]) + original_images = util.tensor2image(original_images[0]) + image = util.tensor2image(visdict['shape_detail_images'][0]) + + # plt.imshow(image) + # plt.show() + + new = warp_back(image, original_images, testdata[i]['tform']) + plt.imshow(new) + plt.show() + + break + + + + + # if args.saveDepth or args.saveKpt or args.saveObj or args.saveMat or args.saveImages: + # os.makedirs(os.path.join(savefolder, name), exist_ok=True) + # # -- save results + # if args.saveObj: + # deca.save_obj(os.path.join(savefolder, name, name + '.obj'), opdict) + # if args.saveMat: + # opdict = util.dict_tensor2npy(opdict) + # savemat(os.path.join(savefolder, name, name + '.mat'), opdict) + # if args.saveVis: + # cv2.imwrite(os.path.join(savefolder, name + '_vis.jpg'), deca.visualize(visdict)) + # if args.saveImages: + # for vis_name in ['inputs', 'rendered_images', 'albedo_images', 'shape_images', 'shape_detail_images']: + # if vis_name not in visdict.keys(): + # continue + # image = util.tensor2image(visdict[vis_name][0]) + # cv2.imwrite(os.path.join(savefolder, name, name + '_' + vis_name + '.jpg'), + # util.tensor2image(visdict[vis_name][0])) + # print(f'-- please check the results in {savefolder}') + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='DECA: Detailed Expression Capture and Animation') + + parser.add_argument('-i', '--inputpath', default='TestSamples/SRF', type=str, + help='path to the test data, can be image folder, image path, image list, video') + parser.add_argument('-s', '--savefolder', default='TestSamples/SRF/results', type=str, + help='path to the output directory, where results(obj, txt files) will be stored.') + parser.add_argument('--device', default="cuda" if torch.cuda.is_available() else "cpu", type=str, + help='set device, cpu for using cpu') + # process test images + parser.add_argument('--iscrop', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to crop input image, set false only when the test image are well cropped') + parser.add_argument('--detector', default='fan', type=str, + help='detector for cropping face, check decalib/detectors.py for details') + # save + parser.add_argument('--useTex', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to use FLAME texture model to generate uv texture map, \ + set it to True only if you downloaded texture model') + parser.add_argument('--saveVis', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save visualization of output') + parser.add_argument('--saveKpt', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save 2D and 3D keypoints') + parser.add_argument('--saveDepth', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save depth image') + parser.add_argument('--saveObj', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save outputs as .obj, detail mesh will end with _detail.obj. \ + Note that saving objs could be slow') + parser.add_argument('--saveMat', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save outputs as .mat') + parser.add_argument('--saveImages', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save visualization output as seperate images') + main(parser.parse_args()) \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/install_conda.sh b/NeuralVoicePuppetry/neural-code/third/DECA/install_conda.sh new file mode 100755 index 0000000..76c91f9 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/install_conda.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +export CONDA_ENV_NAME=deca-env +echo $CONDA_ENV_NAME + +conda create -n $CONDA_ENV_NAME python=3.7 + +eval "$(conda shell.bash hook)" +conda activate $CONDA_ENV_NAME +pip install -r requirements.txt \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/install_pip.sh b/NeuralVoicePuppetry/neural-code/third/DECA/install_pip.sh new file mode 100755 index 0000000..7346c85 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/install_pip.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +echo "Creating virtual environment" +python3.7 -m venv deca-env +echo "Activating virtual environment" + +source $PWD/deca-env/bin/activate +$PWD/deca-env/bin/pip install -r requirements.txt \ No newline at end of file diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/requirements.txt b/NeuralVoicePuppetry/neural-code/third/DECA/requirements.txt new file mode 100644 index 0000000..6539975 --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/requirements.txt @@ -0,0 +1,13 @@ +#to install all this requirements +#`pip install -r requirements.txt` +numpy>=1.18.5 +scipy>=1.4.1 +chumpy>=0.69 +scikit-image>=0.15 +opencv-python>=4.1.1 +scikit-image>=0.15 #skimage +PyYAML>=5.1.1 +torch==1.6.0 # for compatible with pytorch3d +torchvision==0.7.0 +face-alignment +# pytorch3d diff --git a/NeuralVoicePuppetry/neural-code/third/DECA/test_eye_gazing.py b/NeuralVoicePuppetry/neural-code/third/DECA/test_eye_gazing.py new file mode 100644 index 0000000..ee829ec --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DECA/test_eye_gazing.py @@ -0,0 +1,65 @@ +import sys +import os +import torch +import numpy as np +from PIL import Image +import torch.nn.functional as F + + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../third/DECA/'))) +from decalib.deca import DECA +from decalib.datasets import datasets +from decalib.utils.config import cfg as deca_cfg + +def save_img(name, image): + to_save = image[0].permute(1, 2, 0).cpu().numpy() + to_save = to_save * 255. + to_save = to_save.astype(np.uint8) + filename = name + '.jpg' + img = Image.fromarray(to_save) + img.save(filename) + +deca_cfg.model.use_tex = False +device = "cuda" if torch.cuda.is_available() else "cpu" +deca = DECA(config=deca_cfg, device=device) + +# load test images +testdata = datasets.TestData('test_expression.jpg', iscrop=True, face_detector='fan') +images = testdata[0]['image'].to(device)[None, ...] +codedict = deca.encode(images) +expr = codedict['exp'] + +# load test images +testdata = datasets.TestData('test_gaze.jpg', iscrop=True, face_detector='fan') +images = testdata[0]['image'].to(device)[None, ...] + +codedict = deca.encode(images) +opdict, visdict = deca.decode(codedict) + +save_img('shape', visdict['shape_images']) + +opdict, visdict = deca.decode(codedict) +verts_og = opdict['vertices'] +gird_og = opdict['grid'] +texture_og = opdict['uv_texture_gt'] +image = F.grid_sample(texture_og, gird_og, align_corners=False) +save_img('rendered_image_og', image) + + +# codedict['exp'] = expr +opdict, visdict = deca.decode_eyes(codedict) +verts_gaze = opdict['vertices'] +gird_gaze = opdict['grid'] +texture_gaze = opdict['uv_texture_gt'] +image = F.grid_sample(texture_og, gird_gaze, align_corners=False) +save_img('rendered_image_gaze', image) + + +euclidena_dist = sum(((verts_og - verts_gaze)**2).flatten()) +print('Mesh distance: ', euclidena_dist) + +gird_dist = sum(((gird_og - gird_gaze)**2).flatten()) +print('Grid distance: ', gird_dist) + +text_dist = sum(((texture_og - texture_gaze)**2).flatten()) +print('Texture distance: ', text_dist) diff --git a/NeuralVoicePuppetry/neural-code/third/DeepSpeech/models/output_graph.pb b/NeuralVoicePuppetry/neural-code/third/DeepSpeech/models/output_graph.pb new file mode 100644 index 0000000..da61e4e --- /dev/null +++ b/NeuralVoicePuppetry/neural-code/third/DeepSpeech/models/output_graph.pb @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:309a4c24f861edca2a027e3d9ecd5ae34468d8b20ed4d9be694858e0bd6f0640 +size 490979273 diff --git a/NeuralVoicePuppetry/neural-code/third/__init__.py b/NeuralVoicePuppetry/neural-code/third/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural_voice_backend_api/__init__.py b/NeuralVoicePuppetry/neural_voice_backend_api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural_voice_backend_api/app.py b/NeuralVoicePuppetry/neural_voice_backend_api/app.py new file mode 100644 index 0000000..81edc88 --- /dev/null +++ b/NeuralVoicePuppetry/neural_voice_backend_api/app.py @@ -0,0 +1,120 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import logging +from enum import Enum +from http import HTTPStatus +from typing import Union, List + +from fastapi import UploadFile, HTTPException, Query, Depends +from fastapi.responses import FileResponse +from mtc_api_utils.api import BaseApi +from mtc_api_utils.api_types import FirebaseUser +from mtc_api_utils.clients.firebase_client import firebase_user_auth + +from avatar_backend_api.api_types import ApiRoute, AvatarModelRequest +from avatar_backend_api.background_tools.inference_queue import InferenceQueueTask +from avatar_backend_api.clients.io_client import IoClient, IoClientException +from avatar_backend_api.models.mock_avatar_model import MockAvatarModel +from neural_voice_backend_api.config import NeuralVoiceConfig +from neural_voice_backend_api.neural_voice_inference_queue import NeuralVoiceInferenceQueue +from neural_voice_backend_api.neural_voice_model import NeuralVoiceModel + +NeuralVoiceConfig.print_config() + +user_auth = firebase_user_auth(config=NeuralVoiceConfig) + +io_client = IoClient(audio_base_path=NeuralVoiceConfig.audio_input_dir, video_base_path=NeuralVoiceConfig.video_output_dir, user_dir_mode=False) +neural_voice_model = MockAvatarModel(io_client=io_client) if NeuralVoiceConfig.mockBackend else NeuralVoiceModel(io_client=io_client) + +inference_queue = NeuralVoiceInferenceQueue( + model=neural_voice_model, + io_client=io_client, + audio_input_dir=NeuralVoiceConfig.audio_input_dir, + video_output_dir=NeuralVoiceConfig.video_output_dir, +) + +app = BaseApi(is_ready=neural_voice_model.is_ready, config=NeuralVoiceConfig) +tags: List[Union[str, Enum]] = ["Avatar Model"] + +logger = logging.Logger("App") + + +@app.get( + path=ApiRoute.video_ids.value, + response_model=List[str], + dependencies=[Depends(user_auth.with_roles(NeuralVoiceConfig.required_roles))], + tags=tags, +) +async def list_video_ids() -> List[str]: + return [path.split("/")[-1].split(".")[0] for path in io_client.video.list_videos()] + + +@app.get( + path=ApiRoute.video.value, + status_code=HTTPStatus.ACCEPTED, + response_class=FileResponse, + dependencies=[Depends(user_auth.with_roles(NeuralVoiceConfig.required_roles))], + tags=tags, +) +async def get_video(video_id: str = Query(alias="videoId")): + try: + return io_client.video.read_from_disk(video_id=video_id) + + except IoClientException: + print(f"") + + if io_client.audio.file_exists(video_id=video_id): + raise HTTPException( + status_code=HTTPStatus.PROCESSING, + detail=f"Audio with {video_id=} is currently being processed", + ) + + else: + raise HTTPException( + status_code=HTTPStatus.NOT_FOUND, + detail=f"There is no file available for {video_id=}. Either it has not yet been generated or it was removed in the meantime", + ) + + +@app.post( + path=ApiRoute.inference.value, + status_code=HTTPStatus.ACCEPTED, + response_model=AvatarModelRequest, + dependencies=[Depends(user_auth.with_roles(NeuralVoiceConfig.required_roles))], + tags=tags, +) +async def post_audio( + audio: UploadFile, + request_metadata: AvatarModelRequest = Depends(), +) -> AvatarModelRequest: + print(f"Processing {request_metadata.video_id}") + + if not request_metadata.video_id: + print(f"Creating {request_metadata.video_id=}") + request_metadata.video_id = audio.filename + + io_client.audio.save_to_disk(audio=audio, video_id=request_metadata.video_id) + + inference_queue.add_task( + task=InferenceQueueTask( + request=request_metadata, + ), + ) + + return request_metadata + + +@app.delete( + path=ApiRoute.video.value, + response_model=str, + dependencies=[Depends(user_auth)], + tags=tags, +) +async def delete_video(video_id: str = Query(alias="videoId"), user: FirebaseUser = Depends(user_auth)) -> str: + try: + io_client.video.delete_file(video_id=video_id) + except FileNotFoundError: + raise HTTPException(status_code=HTTPStatus.NOT_FOUND, detail=f"No files with {video_id=} found") + + return f"The files with {video_id=} were removed successfully" diff --git a/NeuralVoicePuppetry/neural_voice_backend_api/config.py b/NeuralVoicePuppetry/neural_voice_backend_api/config.py new file mode 100644 index 0000000..fa68ffc --- /dev/null +++ b/NeuralVoicePuppetry/neural_voice_backend_api/config.py @@ -0,0 +1,26 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os.path + +from avatar_backend_api.config import AvatarConfig +from mtc_api_utils.config import Config + + +class NeuralVoiceConfig(AvatarConfig): + mockBackend: bool = Config.parse_env_var("MOCK_BACKEND", default="False", convert_type=bool) + + # Data configs + db_filepath: str = Config.parse_env_var("DB_FILEPATH", default="/tmp/tinyDB/neuralVoices.json") + + neural_code_base_dir: str = "/app/neural-code" # Location of the neural-code dir in the execution environment + + data_base_dir: str = Config.parse_env_var("DATA_BASE_DIR", default="/tmp/neuralVoice") + audio_input_dir: str = os.path.join(data_base_dir, "input_data", "audio") + video_input_dir: str = os.path.join(data_base_dir, "input_data", "video") + + output_data_path: str = os.path.join(data_base_dir, "output_data") + features_dir: str = os.path.join(output_data_path, "features") + video_output_dir: str = os.path.join(output_data_path, "videos") + checkpoints_dir: str = os.path.join(output_data_path, "checkpoints") + mappings_dir = os.path.join(output_data_path, "mappings") diff --git a/NeuralVoicePuppetry/neural_voice_backend_api/neural_voice_inference_queue.py b/NeuralVoicePuppetry/neural_voice_backend_api/neural_voice_inference_queue.py new file mode 100644 index 0000000..1b9c1fe --- /dev/null +++ b/NeuralVoicePuppetry/neural_voice_backend_api/neural_voice_inference_queue.py @@ -0,0 +1,28 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +import shutil + +from avatar_backend_api.background_tools.inference_queue import InferenceQueue, InferenceQueueTask +from avatar_backend_api.clients.io_client import VIDEO_EXTENSION + +from neural_voice_backend_api.config import NeuralVoiceConfig + + +class NeuralVoiceInferenceQueue(InferenceQueue): + + def post_processing(self, task: InferenceQueueTask) -> None: + # Delete input audio + self.io_client.audio.delete_file(video_id=task.request.video_id) + + # Delete features + features_path = os.path.join(NeuralVoiceConfig.features_dir, task.request.video_id) + shutil.rmtree(path=features_path, ignore_errors=True) + + # Rename file and return video path + if not NeuralVoiceConfig.mockBackend: + self.io_client.video.rename_file( + filename=f"{task.request.video_id}_to_{task.request.avatar.name}{VIDEO_EXTENSION}", + new_filename=task.request.video_id + VIDEO_EXTENSION, + ) diff --git a/NeuralVoicePuppetry/neural_voice_backend_api/neural_voice_model.py b/NeuralVoicePuppetry/neural_voice_backend_api/neural_voice_model.py new file mode 100644 index 0000000..5b2eff8 --- /dev/null +++ b/NeuralVoicePuppetry/neural_voice_backend_api/neural_voice_model.py @@ -0,0 +1,62 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details +import glob +import os +import subprocess +from typing import Dict + +from avatar_backend_api.api_types import InferenceQueueTask +from avatar_backend_api.models.avatar_base_model import AvatarBaseModel +from neural_voice_backend_api.config import NeuralVoiceConfig + + +class NeuralVoiceModel(AvatarBaseModel): + + def init_model(self): + for avatar in NeuralVoiceConfig.available_avatars.keys(): + video_input_dir = os.path.join(NeuralVoiceConfig.video_input_dir, avatar) + features_dir = os.path.join(NeuralVoiceConfig.features_dir, avatar) + checkpoints_glob = glob.glob(os.path.join(NeuralVoiceConfig.checkpoints_dir, f"*.{avatar}")) + mappings_glob = glob.glob(os.path.join(NeuralVoiceConfig.mappings_dir, "*")) + + if not os.path.isdir(video_input_dir): + raise ValueError(f"Could not find directory {video_input_dir}, which is supposed to contain the input video files for avatar {avatar}") + + # Commented for training + # if not os.path.isdir(features_dir): + # raise ValueError(f"Could not find directory {features_dir}, which is supposed to contain the feature files for avatar {avatar}") + + # if not len(checkpoints_glob) >= 1: + # raise ValueError(f"Could not find a checkpoints directory for avatar {avatar} under {NeuralVoiceConfig.checkpoints_dir}") + + # if not len(mappings_glob) >= 1: + # raise ValueError(f"Could not find a mappings directory for avatar {avatar} under {NeuralVoiceConfig.mappings_dir}") + + # Create input & output directories + os.makedirs(NeuralVoiceConfig.audio_input_dir, exist_ok=True) + os.makedirs(NeuralVoiceConfig.features_dir, exist_ok=True) + os.makedirs(NeuralVoiceConfig.video_input_dir, exist_ok=True) + os.makedirs(NeuralVoiceConfig.video_output_dir, exist_ok=True) + + print("All files were successfully downloaded, neuralVoice model is ready") + + def inference(self, task: InferenceQueueTask) -> None: + """ Takes an audio_name, which represents a filename without extension, and an avatar, runs the inference and returns the path to the resulting outpout video file""" + + # Run inference script + subprocess.run( + args=["bash", "full_pipeline_nvp.sh", task.request.video_id, task.request.avatar.name, NeuralVoiceConfig.data_base_dir], + cwd=NeuralVoiceConfig.neural_code_base_dir, # Working directory from which to run the shell command + universal_newlines=True, # Decode output + check=True, # Throw exception if return code != 0 + ) + print(f"Inference completed for {task.request.video_id} - {task.request.avatar.value}") + + @staticmethod + def available_avatars() -> Dict[str, str]: + with os.scandir(NeuralVoiceConfig.video_input_dir) as iterator: + return { + avatar_dir.name: NeuralVoiceConfig.avatar_short_name(avatar_name=avatar_dir.name) + for avatar_dir in iterator + if avatar_dir.is_dir() + } diff --git a/NeuralVoicePuppetry/neural_voice_backend_api/tests/Test-2.wav b/NeuralVoicePuppetry/neural_voice_backend_api/tests/Test-2.wav new file mode 100644 index 0000000..2debc29 Binary files /dev/null and b/NeuralVoicePuppetry/neural_voice_backend_api/tests/Test-2.wav differ diff --git a/NeuralVoicePuppetry/neural_voice_backend_api/tests/__init__.py b/NeuralVoicePuppetry/neural_voice_backend_api/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/NeuralVoicePuppetry/neural_voice_backend_api/tests/integration_test_client.py b/NeuralVoicePuppetry/neural_voice_backend_api/tests/integration_test_client.py new file mode 100644 index 0000000..0355a9b --- /dev/null +++ b/NeuralVoicePuppetry/neural_voice_backend_api/tests/integration_test_client.py @@ -0,0 +1,60 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from http import HTTPStatus +from typing import List, BinaryIO, Tuple + +from avatar_backend_api.api_types import ApiRoute, AvatarModelRequest +from mtc_api_utils.api import BaseApi +from mtc_api_utils.clients.api_client import ApiClient +from starlette.testclient import TestClient + +from avatar_backend_api.config import AvatarConfig + + +class AvatarIntegrationTestClient(ApiClient): + + def __init__(self, test_app: BaseApi): + super().__init__(backend_url="", http_client=TestClient(app=test_app)) + + def list_video_ids(self) -> List[str]: + resp = self.http_client.get( + url=ApiRoute.video_ids.value, + ) + resp.raise_for_status() + + return resp.json() + + def get_video(self, video_id: str) -> Tuple[int, bytes]: + resp = self.http_client.get( + url=ApiRoute.video.value, + params={"videoId": video_id}, + ) + + if resp.status_code >= 300: + resp.raise_for_status() + + return resp.status_code, resp.content + + def post_audio(self, audio: BinaryIO, metadata: AvatarModelRequest) -> AvatarModelRequest: + resp = self.http_client.post( + url=ApiRoute.inference.value, + params=metadata.json_dict, + files={"audio": audio}, + ) + + if resp.status_code >= 300: + print(f"{resp.reason=}") + print(f"{resp.text=}") + print(f"{resp.raw=}") + + resp.raise_for_status() + assert resp.status_code == HTTPStatus.ACCEPTED + + return AvatarModelRequest.parse_obj(resp.json()) + + def delete_video(self, video_id: str) -> str: + resp = self.http_client.delete(ApiRoute.video_url(backend_url="", video_id=video_id)) + resp.raise_for_status() + + return resp.text diff --git a/NeuralVoicePuppetry/neural_voice_backend_api/tests/ping.wav b/NeuralVoicePuppetry/neural_voice_backend_api/tests/ping.wav new file mode 100644 index 0000000..c0dc31c Binary files /dev/null and b/NeuralVoicePuppetry/neural_voice_backend_api/tests/ping.wav differ diff --git a/NeuralVoicePuppetry/neural_voice_backend_api/tests/test.mp3 b/NeuralVoicePuppetry/neural_voice_backend_api/tests/test.mp3 new file mode 100644 index 0000000..7517a57 Binary files /dev/null and b/NeuralVoicePuppetry/neural_voice_backend_api/tests/test.mp3 differ diff --git a/NeuralVoicePuppetry/neural_voice_backend_api/tests/test_integration.py b/NeuralVoicePuppetry/neural_voice_backend_api/tests/test_integration.py new file mode 100644 index 0000000..4118943 --- /dev/null +++ b/NeuralVoicePuppetry/neural_voice_backend_api/tests/test_integration.py @@ -0,0 +1,74 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +import unittest +from enum import Enum +from http import HTTPStatus +from time import sleep + +import httpx + +from avatar_backend_api.api_types import AvatarModelRequest, BaseAvatar +from neural_voice_backend_api.app import app +from neural_voice_backend_api.tests.integration_test_client import AvatarIntegrationTestClient + + +class TestFile(Enum): + value: str + test_mp3 = "test.mp3" + ping_wav = "ping.wav" + voice_wav = "voice_test.wav" + frontend_wav = "Test-2.wav" + + +test_dir = os.path.dirname(os.path.realpath(__file__)) +test_file_name = TestFile.frontend_wav.value +test_file_path = os.path.join(test_dir, test_file_name) + +TEST_REQUEST = AvatarModelRequest( + video_id="id", + avatar=BaseAvatar.Jennifer_355_9415, +) + + +class TestIntegration(unittest.TestCase): + + def setUp(self) -> None: + self.client = AvatarIntegrationTestClient(test_app=app) + + self.client.wait_for_service_readiness() + try: + self.client.delete_video(TEST_REQUEST.video_id) + except httpx.HTTPStatusError as e: + self.assertEqual(e.response.status_code, HTTPStatus.NOT_FOUND) + + print("Integration Test Setup Completed") + + def tearDown(self) -> None: + self.setUp() + + def test_inference(self): + with open(test_file_path, "rb") as test_file: + initial_video_ids = self.client.list_video_ids() + self.assertNotIn(TEST_REQUEST.video_id, initial_video_ids) + + try: + self.client.delete_video(TEST_REQUEST.video_id) + self.fail("Expected delete to fail because video is not present") + except httpx.HTTPStatusError as e: + self.assertEqual(e.response.status_code, HTTPStatus.NOT_FOUND) + + self.assertEqual(TEST_REQUEST, self.client.post_audio(audio=test_file, metadata=TEST_REQUEST)) + + while True: + status_code, file_bytes = self.client.get_video(video_id=TEST_REQUEST.video_id) + self.assertIn(status_code, [HTTPStatus.OK, HTTPStatus.PROCESSING]) + + if status_code == HTTPStatus.OK: + break + else: + sleep(1) + + print("Test Delete Video") + self.client.delete_video(video_id=TEST_REQUEST.video_id) diff --git a/NeuralVoicePuppetry/neural_voice_backend_api/tests/voice_test.wav b/NeuralVoicePuppetry/neural_voice_backend_api/tests/voice_test.wav new file mode 100644 index 0000000..95ce3fe Binary files /dev/null and b/NeuralVoicePuppetry/neural_voice_backend_api/tests/voice_test.wav differ diff --git a/README.md b/README.md new file mode 100644 index 0000000..16e7b35 --- /dev/null +++ b/README.md @@ -0,0 +1,213 @@ +
+ +
+ +#
AvatarForge
+
Make your own digital avatars.
+ +

+ + +## Project Details +This repo contains the code of the project: Audio Driven Video Synthesis Of Personalized Moderations. + +Check out the project's scope and details on our [website](https://mtc.ethz.ch/research/image-video-processing/video-synthesis.html). + +# +## Table of Contents +1. [Demo Description](#demo-description) +2. [Available Models](#available-models) + 1. [Neural Voice Puppetry](#neural-voice-puppetry) + 2. [Motion GAN](#motion-gan) +3. [Data](#data) +4. [Usage](#usage) + 1. [Train New Avatars](#train-new-avatars) + 2. [Inference for NeuralVoicePuppertry Model](#inference-for-neuralvoicepuppertry-model) + 3. [Inference for MotionGan Model](#inference-for-motiongan-model) + 4. [Build and Run Docker Containers](#build-and-run-docker-containers) + 5. [API Usage](#api-usage) + +# +## Demo Description +This tool allows you to generate your own deepfake avatar video. + +- Provide a short video and train your own avatars. + +- Pick an avatar from our available collection, record your own audio snippet and wait for the automated process to complete. After a few minutes, your deepfake avatar video can be found in the gallery. + +# +## Available Models +You can find here the code for the two pipelines implemented during this project: +- Neural Voice Puppetry +- Motion GAN + +*More details for each pipeline can be found in the respective READMEs.* + + +### Neural Voice Puppetry +![NeuralVoicePuppetry](figures/pipeline_NVP.png "NeuralVoicePuppetry") + + +### Motion GAN +![MotionGAN](figures/pipeline_motiongan.gif "MotionGAN") + +# +## Data +If you plan on using this code with the already available and pre-trained moderators, you will only have to provide the audio data. Otherwise provide both audio and video. + +Please follow these instructions on data quality: +- Audio + - Provide a recording of a person speaking (audio of any duration is accepted). + - The cleaner the audio signal the better: audio with background noise will result in unmatching lip-sync. + - Avoid recording multiple people talking: the model is unable to distinguish between multiple voice signals. +- Video + - Provide a video of your desired avatar character talking. + - Minimum video duration: 3 minutes. + - Longer videos will results in longer training time. + - The background is irrelevant, it will be removed during preprocessing. + - Avoid hand or arms movements. Having such movements (that might cause occlusion) will interfere with the quality of the generated frames during training. + +# +## Usage +You can follow the instructions in each README to clone the respective repositories and run the python code independently. + +Following are some instructions to build both models using **Docker**. +### Train New Avatars +* Place the new video in `/data/{model-name}/input_data/video`. Where `model-name` is either `motionGan` or `neuralVoice`. + Be aware that the name of the folder determines the avatar name. + ```bash + input_data + └── video # Folder containing m video files + ├── avatar_1 + │ └── avatar_1.mp4 + ... + └── avatar_m + │ └── avatar_m.mp4 + │ + └── my_new_avatar + └── my_new_avatar.mp4 + + ``` + +* Set the `AVAILABLE_AVATARS` variable in `.env`/`debug.env` to the name of your new avatar(s) using a comma separated list +* Build and run the docker containers using instructions [here](#build-and-run-docker-containers). +* Use the API as described [here](#api-usage). + +# +### Inference for NeuralVoicePuppertry Model +* Place the checkpoints directory in `/data/neuralVoice/checkpoints`. Be aware that the name of the folder determines the avatar name. This directory must contain the following elements: + ```bash + checkpoints +    └── my_avatar +    ├── latest_inpainter.pth # Weights for the inpainter network +    ├── latest_netD.pth # Weights for the discriminator network +    ├── latest_texture_decoder.pth # Weights for the neural renderer network +    └── latest_texture.pth # Learned neural texture + ``` + *Note: all these files will be generated automatically when training an avatar on a new video.* + +* Place the checkpoints directory in `/data/neuralVoice/features`. Be aware that the name of the folder determines the avatar name. This directory must contain the following elements: + ```bash + features +    └── my_avatar +    ├── DECA_codedicts # folder containing all DECA morphable model information per frame +    ├── og_frames # folder containing all extracted video frames +    ├── my_avatar.h5 # H5 file containing all tracking information for each frame +    └── tform.npy # numpy file containing all transformation required to crop each frame + ``` + *Note: all these files will be generated automatically when training an avatar on a new video.* + +* Place the checkpoints directory in `/data/neuralVoice/mappings`. Be aware that the name of the folder determines the avatar name. This directory must contain the following elements: + ```bash + mappings + └── audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead +    └── mapping_my_avatar.npy # file containing mapping between audio expressions and person specific expressions  + ``` + *Note: all these files will be generated automatically when training an avatar on a new video.* + +* Place the input video directory in `/data/neuralVoice/input_data/video`. Be aware that the name of the folder determines the avatar name. This directory must contain the following files: + ```bash + input_data # Folder containing all input data + └── video # Folder containing video files + └── my_avatar # Folder generated by running the pipeline: contains "my_new_avatar" video processed information + ├── my_avatar.mp4 # original video of the avatar + └── my_avatar.wav # extracted audio from the original video + ``` + *Note: all these files will be generated automatically when training an avatar on a new video.* + +* Set the `AVAILABLE_AVATARS` variable in `.env`/`debug.env` to the name of your avatar(s) using a comma separated list +* Build and run the docker containers using instructions [here](#build-and-run-docker-containers). +* Use the API as described [here](#api-usage). + +# +### Inference for MotionGan Model +* Place the checkpoints directory in `/data/motionGan/checkpoints`. Be aware that the name of the folder determines the avatar name. This directory must contain the following elements: + ```bash + checkpoints # Folder containing all checkpoints of trained avatars + └── my_avatar # Folder containing checkpoints of the "my_new_avatar" Avatar + ├── GAN_config.txt # GAN configuration parameters during training + ├── head2body.pkl # checkpoint for head position to body position regression + ├── head2body_test.png # result of the regression + ├── latest_Audio2Headpose.pkl # checkpoint for audio to head-motion finetuned network + ├── latest_GAN_model.pt # checkpoint for the GAN network + ├── logs # folder containing all training logs during GAN training + └── train_opt.txt # audio to head-motion configuration parameters during finetuning + ``` + *Note: all these files will be generated automatically when training an avatar on a new video.* + + +* Place the input video directory in `/data/motionGan/input_data/video`. Be aware that the name of the folder determines the avatar name. This directory must contain the following files: + ```bash + input_data # Folder containing all input data + └── video # Folder containing video files + └── my_avatar # Folder generated by running the pipeline: contains "my_new_avatar" video processed information + ├── img_size.npy # file containing information about the frame size + ├── mapping.npy # file containing mapping between audio expressions and person specific expressions + └── track_params.pt # file containing all head poses and tracked information of the original video + ``` + *Note: all these files will be generated automatically when training an avatar on a new video.* + + +* Set the `AVAILABLE_AVATARS` variable in `.env`/`debug.env` to the name of your avatar(s) using a comma separated list +* Build and run the docker containers using instructions [here](#build-and-run-docker-containers). +* Use the API as described [here](#api-usage). + +# +### Build and Run Docker Containers +* Run the application using one of the following commands: + * In order to run a regular, full run including both models, run the following: + ```bash + docker compose up --build + ``` + This configuration makes use of the settings specified in the `docker-compose.yml` and the `.env` files + * In order to run a reduced debug setup, run the following: + ```bash + docker compose -f docker-compose.yml -f docker-compose-debug-yml up --build backend {model-name} + ``` + Where `{model-name}` is either `nerual-voice-model` or `motion-gan-model`. + + This configuration overrides the settings specified in the `docker-compose.yml` and the `.env` files with those in the `docker-compose-debug.yml` and `debug.env` files respectively. + +# +### API Usage +In order to interact with the running api server, access `http://localhost:5000/api/docs`, then navigate to the following routes: +* `POST /api/inference`: Run inference on a new input audio. If the selected avatar has not yet been trained, this will perform the training. +Inference should take roughly a minute for most short audio clips, training will take roughly 8 to 24 hours +* `GET /api/videos`: Returns the metadata for all generated videos, including ones currently being processed. Note the `inferenceCompleted` flag, indicating whether inference for this particular video has been completed. +* `GET /api/video`: Returns the video in a common web streaming format that can be displayed by most modern video players & browsers but does not work properly with the docs page. +Alternatively you can retrieve your finished videos from the folder structure the same way you added the input videos under `data/{model-name}/output_data/video`. +* `DELETE /api/video`: Deletes all data about a generated video from the database as well as the file system. + +# +### LICENSE + +This implementation AvatarForge is free and open source! All code in this repository is licensed under: + +* [MIT](LICENSE) License. + +Both pipeline rely and are inspired by the other works works. You can find further information in each the READMEs of both offered pipeline. + +# +### Contact information + +Alberto Pennino: [alberto.pennino@inf.ethz.ch](alberto.pennino@inf.ethz.ch) \ No newline at end of file diff --git a/avatar-api/.env b/avatar-api/.env new file mode 100644 index 0000000..aa6ef46 --- /dev/null +++ b/avatar-api/.env @@ -0,0 +1,18 @@ +##### GENERAL ENVS ##### +GPU_SUPPORTED=True +GPU_ENABLED=True + +##### AUTH ENVS ##### +AUTH_ENABLED=False +REQUIRED_AUTH_ROLES=video-synthesis + +##### BACKEND ENVS ##### +FFMPEG_DOCKER=True # Python will use a different version of ffmpeg, therefore we need to redirect to /usr/bin/ffmpeg + +##### Frontend ENVS ##### +FLUTTER_USE_LOCALHOST_BACKEND=true # Governs whether Flutter connects to production backend or platform dependent local backend. +FLUTTER_ENVIRONMENT=dev +FLUTTER_AUTH_ENABLED=false +FLUTTER_BACKEND_URL=/api + +BACKEND_URL=http://backend:5000 diff --git a/avatar-api/Dockerfile b/avatar-api/Dockerfile new file mode 100644 index 0000000..051488f --- /dev/null +++ b/avatar-api/Dockerfile @@ -0,0 +1,30 @@ +FROM continuumio/miniconda3 as base +ENV DEBIAN_FRONTEND noninteractive + +# Keeps Python from generating .pyc files in the container +ENV PYTHONDONTWRITEBYTECODE 1 +# Turns off buffering for easier container logging +ENV PYTHONUNBUFFERED 1 + +ENV TZ=Europe/Zurich + +SHELL ["/bin/bash", "-c"] +WORKDIR /app + +# Update & install packages +RUN apt-get -qq update --fix-missing && \ + apt-get -qq install -y bash curl wget git ca-certificates openssh-client gpg tzdata && \ + apt-get -qq clean + # Make sure google-cloud-sdk-gke-gcloud-auth-plugin is installed, because it won't be included in later releases + +FROM base as backend +WORKDIR /app + +COPY gunicorn.conf.py . +COPY requirements.txt . +RUN pip install -r requirements.txt + +COPY avatar_backend_api ./avatar_backend_api + +CMD [ "gunicorn", "-c", "gunicorn.conf.py", "--chdir", "./avatar_backend_api", "-k", "uvicorn.workers.UvicornWorker", "app:app" ] + diff --git a/avatar-api/README.md b/avatar-api/README.md new file mode 100644 index 0000000..be6a0c9 --- /dev/null +++ b/avatar-api/README.md @@ -0,0 +1,92 @@ +# Avatar API + + + +## Getting started + +To make it easy for you to get started with GitLab, here's a list of recommended next steps. + +Already a pro? Just edit this README.md and make it your own. Want to make it easy? [Use the template at the bottom](#editing-this-readme)! + +## Add your files + +- [ ] [Create](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#create-a-file) or [upload](https://docs.gitlab.com/ee/user/project/repository/web_editor.html#upload-a-file) files +- [ ] [Add files using the command line](https://docs.gitlab.com/ee/gitlab-basics/add-file.html#add-a-file-using-the-command-line) or push an existing Git repository with the following command: + +``` +cd existing_repo +git remote add origin https://gitlab.ethz.ch/mtc/video-synthesis/avatar-api.git +git branch -M main +git push -uf origin main +``` + +## Integrate with your tools + +- [ ] [Set up project integrations](https://gitlab.ethz.ch/mtc/video-synthesis/avatar-api/-/settings/integrations) + +## Collaborate with your team + +- [ ] [Invite team members and collaborators](https://docs.gitlab.com/ee/user/project/members/) +- [ ] [Create a new merge request](https://docs.gitlab.com/ee/user/project/merge_requests/creating_merge_requests.html) +- [ ] [Automatically close issues from merge requests](https://docs.gitlab.com/ee/user/project/issues/managing_issues.html#closing-issues-automatically) +- [ ] [Enable merge request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/) +- [ ] [Automatically merge when pipeline succeeds](https://docs.gitlab.com/ee/user/project/merge_requests/merge_when_pipeline_succeeds.html) + +## Test and Deploy + +Use the built-in continuous integration in GitLab. + +- [ ] [Get started with GitLab CI/CD](https://docs.gitlab.com/ee/ci/quick_start/index.html) +- [ ] [Analyze your code for known vulnerabilities with Static Application Security Testing(SAST)](https://docs.gitlab.com/ee/user/application_security/sast/) +- [ ] [Deploy to Kubernetes, Amazon EC2, or Amazon ECS using Auto Deploy](https://docs.gitlab.com/ee/topics/autodevops/requirements.html) +- [ ] [Use pull-based deployments for improved Kubernetes management](https://docs.gitlab.com/ee/user/clusters/agent/) +- [ ] [Set up protected environments](https://docs.gitlab.com/ee/ci/environments/protected_environments.html) + +*** + +# Editing this README + +When you're ready to make this README your own, just edit this file and use the handy template below (or feel free to structure it however you want - this is just a starting point!). Thank you to [makeareadme.com](https://www.makeareadme.com/) for this template. + +## Suggestions for a good README +Every project is different, so consider which of these sections apply to yours. The sections used in the template are suggestions for most open source projects. Also keep in mind that while a README can be too long and detailed, too long is better than too short. If you think your README is too long, consider utilizing another form of documentation rather than cutting out information. + +## Name +Choose a self-explaining name for your project. + +## Description +Let people know what your project can do specifically. Provide context and add a link to any reference visitors might be unfamiliar with. A list of Features or a Background subsection can also be added here. If there are alternatives to your project, this is a good place to list differentiating factors. + +## Badges +On some READMEs, you may see small images that convey metadata, such as whether or not all the tests are passing for the project. You can use Shields to add some to your README. Many services also have instructions for adding a badge. + +## Visuals +Depending on what you are making, it can be a good idea to include screenshots or even a video (you'll frequently see GIFs rather than actual videos). Tools like ttygif can help, but check out Asciinema for a more sophisticated method. + +## Installation +Within a particular ecosystem, there may be a common way of installing things, such as using Yarn, NuGet, or Homebrew. However, consider the possibility that whoever is reading your README is a novice and would like more guidance. Listing specific steps helps remove ambiguity and gets people to using your project as quickly as possible. If it only runs in a specific context like a particular programming language version or operating system or has dependencies that have to be installed manually, also add a Requirements subsection. + +## Usage +Use examples liberally, and show the expected output if you can. It's helpful to have inline the smallest example of usage that you can demonstrate, while providing links to more sophisticated examples if they are too long to reasonably include in the README. + +## Support +Tell people where they can go to for help. It can be any combination of an issue tracker, a chat room, an email address, etc. + +## Roadmap +If you have ideas for releases in the future, it is a good idea to list them in the README. + +## Contributing +State if you are open to contributions and what your requirements are for accepting them. + +For people who want to make changes to your project, it's helpful to have some documentation on how to get started. Perhaps there is a script that they should run or some environment variables that they need to set. Make these steps explicit. These instructions could also be useful to your future self. + +You can also document commands to lint the code or run tests. These steps help to ensure high code quality and reduce the likelihood that the changes inadvertently break something. Having instructions for running tests is especially helpful if it requires external setup, such as starting a Selenium server for testing in a browser. + +## Authors and acknowledgment +Show your appreciation to those who have contributed to the project. + +## License +For open source projects, say how it is licensed. + +## Project status +If you have run out of energy or time for your project, put a note at the top of the README saying that development has slowed down or stopped completely. Someone may choose to fork your project or volunteer to step in as a maintainer or owner, allowing your project to keep going. You can also make an explicit request for maintainers. diff --git a/avatar-api/avatar_backend_api/__init__.py b/avatar-api/avatar_backend_api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/avatar-api/avatar_backend_api/api_types.py b/avatar-api/avatar_backend_api/api_types.py new file mode 100644 index 0000000..6f2f9c4 --- /dev/null +++ b/avatar-api/avatar_backend_api/api_types.py @@ -0,0 +1,67 @@ +from __future__ import annotations + +from dataclasses import dataclass +from enum import Enum +from typing import Optional +from uuid import uuid4 + +from mtc_api_utils.api_types import ApiType, FirebaseUser +from pydantic import Field + +from avatar_backend_api.config import AvatarConfig + + +class ApiRoute(Enum): + value: str + + video = "/api/video" + list_videos = f"{video}s" + video_ids = f"{video}/ids" + + inference = "/api/inference" + avatars = "/api/avatars" + + @staticmethod + def video_url(backend_url: str, video_id: str) -> str: + return f"{backend_url}{ApiRoute.video.value}?videoId={video_id}" + + +class StrEnum(str, Enum): + value: str + + +class BaseAvatar(StrEnum): + Jennifer_355_9415 = "jennifer" + Arthur_A2226 = "arthur" + + +Avatar = StrEnum("Avatar", AvatarConfig.available_avatars) + + +class AvatarModel(Enum): + value: str + neural_voice = "neuralVoice" + motion_gan = "motionGan" + + +class AvatarModelRequest(ApiType): + video_id: str = Field(alias="videoId", default_factory=lambda: uuid4().hex) + avatar: Avatar = Field(example=BaseAvatar.Jennifer_355_9415) + + +class AvatarRequest(AvatarModelRequest): + audio_name: str = Field(alias="audioName", example="YourUserName") + avatar_model: AvatarModel = Field(alias="avatarModel", example=AvatarModel.neural_voice) + + def to_metadata(self, inference_completed: bool = False) -> VideoMetadata: + return VideoMetadata(**self.json_dict, inference_completed=inference_completed) + + +class VideoMetadata(AvatarRequest): + inference_completed: bool = Field(alias="inferenceCompleted", default=False) + + +@dataclass +class InferenceQueueTask: + request: AvatarModelRequest + user: Optional[FirebaseUser] = None diff --git a/avatar-api/avatar_backend_api/app.py b/avatar-api/avatar_backend_api/app.py new file mode 100644 index 0000000..3a959fc --- /dev/null +++ b/avatar-api/avatar_backend_api/app.py @@ -0,0 +1,153 @@ +from http import HTTPStatus +from typing import List, Dict +from uuid import uuid4 + +import uvicorn +from fastapi import UploadFile, Depends, Query, HTTPException +from mtc_api_utils.api import BaseApi, RequireReadinessDependency +from mtc_api_utils.api_types import FirebaseUser +from mtc_api_utils.clients.firebase_client import firebase_user_auth +from starlette.responses import FileResponse + +from avatar_backend_api.api_types import ApiRoute, VideoMetadata, AvatarRequest, AvatarModel +from avatar_backend_api.background_tools.model_result_poll_worker import ModelResultPollWorker +from avatar_backend_api.clients.avatar_client import AvatarModelClient +from avatar_backend_api.clients.db_client import AvatarDbClient +from avatar_backend_api.clients.io_client import IoClient, IoClientException +from avatar_backend_api.clients.mock_avatar_model_client import MockAvatarModelClient +from avatar_backend_api.config import AvatarConfig + +AvatarConfig.print_config() + +user_auth = firebase_user_auth(config=AvatarConfig) + +db_client = AvatarDbClient() +io_client = IoClient(user_dir_mode=True) + +model_clients: Dict[AvatarModel, AvatarModelClient] = { + AvatarModel.neural_voice: + AvatarModelClient(backend_url=AvatarConfig.neural_voice_backend_url) + if not AvatarConfig.mockBackend + else MockAvatarModelClient(backend_url=""), + AvatarModel.motion_gan: + AvatarModelClient(backend_url=AvatarConfig.motion_gan_backend_url) + if not AvatarConfig.mockBackend + else MockAvatarModelClient(backend_url=""), +} + +model_poll_worker = ModelResultPollWorker( + io_client=io_client, + db_client=db_client, + model_clients=model_clients, +) + + +def backend_is_ready() -> bool: + # Only turn ready if both models are ready + for model, client in model_clients.items(): + if model.value in AvatarConfig.avatar_models: + _, is_ready = client.get_readiness() + if not is_ready: + return False + + return True + + +app = BaseApi(is_ready=backend_is_ready, config=AvatarConfig, global_readiness_middleware_enabled=False) + + +@app.get( + path=ApiRoute.video_ids.value, + response_model=List[str], +) +async def list_video_ids(user: FirebaseUser = Depends(user_auth)) -> List[str]: + return [path.split("/")[-1].split(".")[0] for path in io_client.video.list_videos(user=user)] + + +@app.get( + path=ApiRoute.list_videos.value, + response_model=List[VideoMetadata], +) +async def list_videos(user: FirebaseUser = Depends(user_auth)) -> List[VideoMetadata]: + return db_client.list_videos(user=user) + + +@app.get( + path=ApiRoute.video.value, + response_class=FileResponse, +) +async def get_video(video_id: str = Query(alias="videoId")): # user: FirebaseUser = Depends(user_auth) + # The following line represents a workaround for the fact that the frontend video library can't send an authentication header with the video request. + # If the library should be fixed in the future, the user should be retrieved using the user_auth dependency as follows: [ user: FirebaseUser = Depends(user_auth) ] + user = FirebaseUser(email=db_client.get_user_from_id(video_id=video_id), roles=[]) + + try: + return io_client.video.read_from_disk(video_id=video_id, user=user) + except IoClientException: + if db_client.video_exists(video_id=video_id, user=user): + raise HTTPException( + status_code=HTTPStatus.PROCESSING, + detail=f"Audio with {video_id=} is currently being processed", + ) + + else: + raise HTTPException(status_code=HTTPStatus.NOT_FOUND, detail=f"A request with video_id={video_id} has not been found") + + +@app.post( + path=ApiRoute.inference.value, + status_code=HTTPStatus.ACCEPTED, + response_model=AvatarRequest, + dependencies=[Depends(RequireReadinessDependency(base_api=app))], + +) +async def inference( + audio: UploadFile, + request_metadata: AvatarRequest = Depends(), + user: FirebaseUser = Depends(user_auth), +) -> AvatarRequest: + print(f"Routing request to model={request_metadata.avatar_model.value} for audio_name={audio.filename}") + + if not request_metadata.video_id: + request_metadata.video_id = uuid4().hex + + if "." not in audio.filename: + audio.filename += ".wav" + + metadata = request_metadata.to_metadata() + filename = f"{request_metadata.video_id}.{audio.filename.split('.')[-1]}" + + model_clients[request_metadata.avatar_model].post_audio(audio=(filename, audio.file.read()), metadata=metadata) + db_client.insert_video(video_metadata=metadata, user=user) + + return metadata + + +@app.delete( + path=ApiRoute.video.value, + response_model=str, +) +async def delete_video(video_id: str = Query(alias="videoId"), user: FirebaseUser = Depends(user_auth)) -> str: + db_client.delete_video(video_id=video_id, user=user) + + try: + io_client.video.delete_file(video_id=video_id, user=user) + + except IoClientException: + raise HTTPException(status_code=HTTPStatus.NOT_FOUND, detail=f"Unable to find video with video_id={video_id}") + + return f"Successfully removed video with video_id={video_id}" + + +@app.get( + path=ApiRoute.avatars.value, + response_model=Dict[str, str], + tags=["Avatars"], + dependencies=[Depends(user_auth)], +) +async def available_avatars() -> Dict[str, str]: + return AvatarConfig.available_avatars + + +if __name__ == '__main__': + uvicorn.run(app=app, port=5000) diff --git a/avatar-api/avatar_backend_api/background_tools/__init__.py b/avatar-api/avatar_backend_api/background_tools/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/avatar-api/avatar_backend_api/background_tools/inference_queue.py b/avatar-api/avatar_backend_api/background_tools/inference_queue.py new file mode 100644 index 0000000..1fed652 --- /dev/null +++ b/avatar-api/avatar_backend_api/background_tools/inference_queue.py @@ -0,0 +1,48 @@ +import os +from queue import SimpleQueue +from threading import Thread +from time import sleep + +from avatar_backend_api.api_types import InferenceQueueTask +from avatar_backend_api.clients.io_client import IoClient +from avatar_backend_api.config import AvatarConfig +from avatar_backend_api.models.avatar_base_model import AvatarBaseModel + + +class InferenceQueue: + def __init__( + self, + model: AvatarBaseModel, + io_client: IoClient, + audio_input_dir: str = AvatarConfig.audio_input_dir, + video_output_dir: str = AvatarConfig.video_output_dir, + daemon_worker: bool = True, + ): + self.queue: SimpleQueue[InferenceQueueTask] = SimpleQueue() + + self.model = model + self.io_client = io_client + + self.audio_input_dir = audio_input_dir + self.video_output_dir = video_output_dir + self._worker_thread = Thread(target=self._worker, daemon=daemon_worker) + + os.makedirs(self.audio_input_dir, exist_ok=True) + os.makedirs(self.video_output_dir, exist_ok=True) + + self._worker_thread.start() + + def post_processing(self, task: InferenceQueueTask) -> None: + pass + + def add_task(self, task: InferenceQueueTask) -> None: + self.queue.put(task) + + def _worker(self): + while True: + if self.model.is_ready() and not self.queue.empty(): + task = self.queue.get() + self.model.inference(task=task) + self.post_processing(task=task) + else: + sleep(1) diff --git a/avatar-api/avatar_backend_api/background_tools/model_result_poll_worker.py b/avatar-api/avatar_backend_api/background_tools/model_result_poll_worker.py new file mode 100644 index 0000000..0438a6c --- /dev/null +++ b/avatar-api/avatar_backend_api/background_tools/model_result_poll_worker.py @@ -0,0 +1,69 @@ +from threading import Thread +from time import sleep +from typing import Dict, Callable + +from mtc_api_utils.api_types import FirebaseUser + +from avatar_backend_api.api_types import AvatarModel +from avatar_backend_api.clients.avatar_client import AvatarModelClient +from avatar_backend_api.clients.db_client import AvatarDbClient, AvatarDbException +from avatar_backend_api.clients.io_client import IoClient + + +class ModelResultPollWorker: + + def __init__( + self, + io_client: IoClient, + db_client: AvatarDbClient, + model_clients: Dict[AvatarModel, AvatarModelClient], + daemon_worker: bool = True, + ): + self.io_client = io_client + self.model_clients = model_clients + self.db_client = db_client + + self._worker_thread = Thread(target=self._worker, daemon=daemon_worker) + self._worker_thread.start() + + def _worker(self): + """ + Continuously polls processed videos from model backends, retrieves them and performs cleanup. + + Methods are separated mostly for testing purposes + """ + while True: + self._poll_models(perform_on_available_video=self._retrieve_video_from_model) + sleep(5) + + def _poll_models(self, perform_on_available_video: Callable[[str, AvatarModel], None]): + """ GET available videos from Model, then GET video & store video file + metadata in backend API""" + + for model, client in self.model_clients.items(): + if client.get_readiness()[1]: + video_ids = client.list_video_ids() + for video_id in video_ids: + print(f"Retrieving the following videos from model={model}: {video_ids}") + perform_on_available_video(video_id, model) + + def _retrieve_video_from_model(self, video_id: str, avatar_model: AvatarModel) -> None: + status, video_bytes = self.model_clients[avatar_model].get_video(video_id=video_id) + + try: + user = FirebaseUser( + email=self.db_client.get_user_from_id(video_id=video_id), + roles=[], + ) + + except AvatarDbException as e: + print(e.detail) + return + + video_metadata = self.db_client.get_video(video_id=video_id, user=user) + video_metadata.inference_completed = True + + self.io_client.video.save_to_disk(video_bytes=video_bytes, video_id=video_id, user=user) + self.db_client.upsert_video(video_metadata=video_metadata, user=user) + + print(f"Deleting video from model") + self.model_clients[avatar_model].delete_video(video_id=video_id) diff --git a/avatar-api/avatar_backend_api/clients/__init__.py b/avatar-api/avatar_backend_api/clients/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/avatar-api/avatar_backend_api/clients/avatar_client.py b/avatar-api/avatar_backend_api/clients/avatar_client.py new file mode 100644 index 0000000..e2d2e44 --- /dev/null +++ b/avatar-api/avatar_backend_api/clients/avatar_client.py @@ -0,0 +1,49 @@ +from typing import List, BinaryIO, Tuple, Union + +import requests +from mtc_api_utils.clients.api_client import ApiClient + +from avatar_backend_api.api_types import ApiRoute, AvatarModelRequest + + +class AvatarModelClient(ApiClient): + def list_video_ids(self) -> List[str]: + resp = requests.get( + url=self._backend_url + ApiRoute.video_ids.value, + ) + resp.raise_for_status() + + return resp.json() + + def get_video(self, video_id: str) -> Tuple[int, bytes]: + resp = requests.get( + url=self._backend_url + ApiRoute.video.value, + params={"videoId": video_id}, + ) + + if resp.status_code >= 300: + resp.raise_for_status() + + return resp.status_code, resp.content + + def post_audio(self, audio: Union[BinaryIO, Tuple[str, bytes]], metadata: AvatarModelRequest) -> AvatarModelRequest: + resp = requests.post( + url=self._backend_url + ApiRoute.inference.value, + params=metadata.json_dict, + files={"audio": audio}, + ) + + if resp.status_code >= 300: + print(f"resp.reason={resp.reason}") + print(f"resp.text={resp.text}") + print(f"resp.raw={resp.raw}") + + resp.raise_for_status() + + return metadata + + def delete_video(self, video_id: str) -> str: + resp = requests.delete(ApiRoute.video_url(backend_url=self._backend_url, video_id=video_id)) + resp.raise_for_status() + + return resp.text diff --git a/avatar-api/avatar_backend_api/clients/db_client.py b/avatar-api/avatar_backend_api/clients/db_client.py new file mode 100644 index 0000000..d36cad3 --- /dev/null +++ b/avatar-api/avatar_backend_api/clients/db_client.py @@ -0,0 +1,83 @@ +import os +from http import HTTPStatus +from typing import List + +from fastapi import HTTPException +from mtc_api_utils.api_types import FirebaseUser +from tinydb import TinyDB, where +from tinydb.table import Table + +from avatar_backend_api.api_types import VideoMetadata +from avatar_backend_api.config import AvatarConfig + + +class AvatarDbException(HTTPException): + pass + + +class AvatarDbClient: + db_filepath: str + db: TinyDB + + def __init__(self, db_filepath: str = AvatarConfig.db_filepath): + os.makedirs( + name=os.path.dirname(db_filepath), + exist_ok=True, + ) + + self.db_filepath = db_filepath + self.db = TinyDB(db_filepath) + + def _user_table(self, user: FirebaseUser) -> Table: + return self.db.table(name=user.email) + + def list_videos(self, user: FirebaseUser) -> List[VideoMetadata]: + return [VideoMetadata.parse_obj(doc) for doc in self._user_table(user=user).all()] + + def video_exists(self, video_id: str, user: FirebaseUser) -> bool: + return self._user_table(user=user).count(self.video_query(video_id=video_id)) == 1 + + def get_video(self, video_id: str, user: FirebaseUser) -> VideoMetadata: + result = self._user_table(user=user).get(self.video_query(video_id=video_id)) + + if result is None: + raise AvatarDbException(status_code=HTTPStatus.NOT_FOUND, detail=f"Video with id: {video_id} does not exist") + else: + return VideoMetadata.parse_obj(result) + + def insert_video(self, video_metadata: VideoMetadata, user: FirebaseUser) -> VideoMetadata: + if self._user_table(user=user).count(cond=self.video_query(video_id=video_metadata.video_id)) != 0: + raise AvatarDbException(HTTPStatus.CONFLICT, detail=f"An inference request with id {video_metadata.video_id} already exists") + + self._user_table(user=user).insert(document=video_metadata.json_dict) + print(f"Inserting new video_metadata for user={user} with video_id={video_metadata.video_id}") + + return video_metadata + + def upsert_video(self, video_metadata: VideoMetadata, user: FirebaseUser) -> VideoMetadata: + # Update doc if exists, else insert it + self._user_table(user=user).upsert( + document=video_metadata.json_dict, + cond=self.video_query(video_id=video_metadata.video_id), + ) + print(f"Updating video_metadata for user={user} with video_id={video_metadata.video_id}") + + return video_metadata + + def delete_video(self, video_id: str, user: FirebaseUser) -> VideoMetadata: + video_metadata = self.get_video(video_id=video_id, user=user) + self._user_table(user=user).remove(self.video_query(video_id=video_id)) + print(f"Removing video_metadata for user={user} with video_id={video_metadata.video_id}") + + return video_metadata + + def get_user_from_id(self, video_id) -> str: + for user_email in self.db.tables(): + if self.db.table(name=user_email).contains(self.video_query(video_id=video_id)): + return user_email + + raise AvatarDbException(status_code=HTTPStatus.NOT_FOUND, detail=f"Video with id: {video_id} does not exist in db. Existing users & video_ids: {[(email, self._user_table(user=FirebaseUser(email=email, roles=[])).all()) for email in self.db.tables()]}") + + @staticmethod + def video_query(video_id: str): + return where("videoId") == video_id diff --git a/avatar-api/avatar_backend_api/clients/io_client.py b/avatar-api/avatar_backend_api/clients/io_client.py new file mode 100644 index 0000000..fc8fd6b --- /dev/null +++ b/avatar-api/avatar_backend_api/clients/io_client.py @@ -0,0 +1,157 @@ +import os.path +from glob import glob +from http import HTTPStatus +from typing import BinaryIO, List, Optional, Union + +from fastapi import UploadFile, HTTPException +from mtc_api_utils.api_types import FirebaseUser +from starlette.responses import FileResponse + +from avatar_backend_api.config import AvatarConfig + +VIDEO_EXTENSION = ".mp4" + + +class IoClientException(HTTPException): + pass + + +class BaseIOClient: + def __init__(self, base_path: str, user_dir_mode: bool): + self.base_path = base_path + self.user_dir_mode = user_dir_mode + + def user_dir(self, user: Optional[FirebaseUser]) -> str: + if self.user_dir_mode: + if user is None: + raise ValueError("In user_dir_mode, a user is expected for IoClient methods") + else: + return os.path.join(self.base_path, user.email) + + if not self.user_dir_mode: + if user is not None: + raise ValueError("User is only expected in user_dir_mode. Either remove it or enable user_dir_mode in the IoClient constructor") + else: + return self.base_path + + +class AudioIoClient(BaseIOClient): + def matching_paths(self, video_id: str, user: Optional[FirebaseUser] = None) -> List[str]: + matching_paths = sorted(glob(os.path.join(self.user_dir(user=user), video_id, video_id) + ".*")) + + if len(matching_paths) == 0: + raise IoClientException(status_code=HTTPStatus.NOT_FOUND, detail=f"Unable to find file with video_id={video_id}") + + return matching_paths + + def audio_path(self, video_id: str, file_ending: str, user: Optional[FirebaseUser] = None) -> str: + return os.path.join(self.user_dir(user=user), video_id, f"{video_id}.{file_ending}") + + def file_exists(self, video_id: str, user: Optional[FirebaseUser] = None) -> bool: + try: + paths = self.matching_paths(video_id=video_id, user=user) + except IoClientException: + return False + + return len(paths) > 0 + + def save_to_disk(self, audio: UploadFile, video_id: str, user: Optional[FirebaseUser] = None) -> str: + file_ending = audio.filename.split(".")[-1] + + path = self.audio_path(video_id=video_id, file_ending=file_ending, user=user) + os.makedirs(os.path.dirname(path), exist_ok=True) + + with open(path, "wb") as disk_file: + file_bytes = audio.file.read() + disk_file.write(file_bytes) + + return path + + def read_from_disk(self, video_id: str, user: Optional[FirebaseUser] = None) -> BinaryIO: + try: + return open(self.matching_paths(video_id=video_id, user=user)[-1], "rb") + except FileNotFoundError: + raise IoClientException(status_code=HTTPStatus.NOT_FOUND, detail=f"Unable to find video with video_id={video_id}") + + def delete_file(self, video_id: str, user: Optional[FirebaseUser] = None) -> None: + for path in self.matching_paths(video_id=video_id, user=user): + try: + os.remove(path) + except FileNotFoundError: + raise IoClientException(status_code=HTTPStatus.NOT_FOUND, detail=f"Unable to find video with video_id={video_id}") + + +class VideoIoClient(BaseIOClient): + def list_videos(self, user: Optional[FirebaseUser] = None) -> List[str]: + return sorted(glob(os.path.join(self.user_dir(user=user), f"*{VIDEO_EXTENSION}"))) + + def video_path(self, video_id: str, user: Optional[FirebaseUser] = None) -> str: + return os.path.join(self.user_dir(user=user), video_id + VIDEO_EXTENSION) + + def file_exists(self, video_id: str, user: Optional[FirebaseUser] = None) -> bool: + return os.path.isfile(self.video_path(video_id=video_id, user=user)) + + def save_to_disk(self, video_bytes: Union[bytes, BinaryIO], video_id: str, user: Optional[FirebaseUser] = None) -> str: + try: + video_bytes = video_bytes.read() + except AttributeError: + pass + + path = self.video_path(video_id=video_id, user=user) + + os.makedirs(self.user_dir(user=user), exist_ok=True) + with open(path, "wb") as disk_file: + disk_file.write(video_bytes) + + return path + + def read_from_disk(self, video_id: str, user: Optional[FirebaseUser] = None) -> FileResponse: + file_path = self.video_path(video_id=video_id, user=user) + + if os.path.isfile(file_path): + return FileResponse( + path=file_path, + ) + + else: + raise IoClientException(status_code=HTTPStatus.NOT_FOUND, detail=f"Unable to find video with video_id={video_id}") + + def rename_file(self, filename: str, new_filename: str, user: Optional[FirebaseUser] = None) -> str: + input_path = os.path.join(self.user_dir(user=user), filename) + output_path = os.path.join(self.user_dir(user=user), new_filename) + + try: + os.rename( + src=input_path, + dst=output_path, + ) + except FileNotFoundError: + raise IoClientException(status_code=HTTPStatus.NOT_FOUND, detail=f"Unable to find video with filename={filename}") + + return output_path + + def delete_file(self, video_id: str, user: Optional[FirebaseUser] = None) -> str: + try: + path = self.video_path(video_id=video_id, user=user) + os.remove(path) + return path + + except FileNotFoundError: + raise IoClientException(status_code=HTTPStatus.NOT_FOUND, detail=f"Unable to find video with video_id={video_id}") + + +class IoClient: + """ Manages file input & output for Avatar audio & video files required by the Avatar project. + Can be used in two modes: If a FirebaseUser is passed to a method, the files will be read from and written to a subdirectory named after the user.email. + If the user is omitted, + """ + + def __init__( + self, + user_dir_mode: bool, + audio_base_path: str = AvatarConfig.audio_input_dir, + video_base_path: str = AvatarConfig.video_output_dir, + ): + self.user_dir_mode = user_dir_mode + self.audio = AudioIoClient(base_path=audio_base_path, user_dir_mode=user_dir_mode) + self.video = VideoIoClient(base_path=video_base_path, user_dir_mode=user_dir_mode) diff --git a/avatar-api/avatar_backend_api/clients/mock_avatar_model_client.py b/avatar-api/avatar_backend_api/clients/mock_avatar_model_client.py new file mode 100644 index 0000000..a22d470 --- /dev/null +++ b/avatar-api/avatar_backend_api/clients/mock_avatar_model_client.py @@ -0,0 +1,51 @@ +import logging +from http import HTTPStatus +from time import sleep +from typing import Dict, Union, BinaryIO, Tuple, List, Optional + +import requests +from fastapi import HTTPException +from mtc_api_utils.api_types import ApiStatus + +from avatar_backend_api.api_types import AvatarModelRequest +from avatar_backend_api.clients.avatar_client import AvatarModelClient + +log = logging.Logger("MockNeuralVoiceModel-Logger") + +INFERENCE_DELAY_SECONDS = 1 + +mock_db: Dict[str, AvatarModelRequest] = {} + + +class MockAvatarModelClient(AvatarModelClient): + + def get_liveness(self) -> Tuple[Optional[requests.Response], bool]: + return requests.Response(), True + + def get_readiness(self) -> Tuple[Optional[requests.Response], bool]: + return requests.Response(), True + + def get_status(self) -> Tuple[Optional[requests.Response], ApiStatus]: + return requests.Response(), ApiStatus(readiness=self.get_readiness()[1], gpu_supported=True, gpu_enabled=True) + + def list_video_ids(self) -> List[str]: + return [metadata.video_id for metadata in mock_db.values()] + + def get_video(self, video_id: str) -> Tuple[int, bytes]: + return HTTPStatus.OK, bytes() + + def post_audio(self, audio: Union[BinaryIO, Tuple[str, bytes]], metadata: AvatarModelRequest) -> AvatarModelRequest: + sleep(INFERENCE_DELAY_SECONDS) + + mock_db[metadata.video_id] = metadata + print(f"Processed audio: {metadata.video_id}") + + return metadata + + def delete_video(self, video_id: str) -> str: + try: + del mock_db[video_id] + except KeyError: + raise HTTPException(status_code=HTTPStatus.NOT_FOUND, detail=f"Video with video_id={video_id} was not found") + + return f"Successfully deleted video with video_id={video_id}" diff --git a/avatar-api/avatar_backend_api/config.py b/avatar-api/avatar_backend_api/config.py new file mode 100644 index 0000000..f5e5cb6 --- /dev/null +++ b/avatar-api/avatar_backend_api/config.py @@ -0,0 +1,36 @@ +from __future__ import annotations + +import os +from typing import Dict, List + +from mtc_api_utils.config import Config + + +def _avatar_short_name(avatar_name: str) -> str: + return avatar_name.split("_")[0].split("-")[0].split(".")[0].lower() + + +class AvatarConfig(Config): + # API configs + neural_voice_backend_url: str = Config.parse_env_var("NEURAL_VOICE_BACKEND_URL", default="http://neural-voice-model:5000") + motion_gan_backend_url: str = Config.parse_env_var("MOTION_GAN_BACKEND_URL", default="http://motion-gan-model:5000") + + # Debug configs + mockBackend: bool = Config.parse_env_var("MOCK_BACKEND", default="False", convert_type=bool) + + # IO configs + avatar_models: List[str] = Config.parse_env_var("AVATAR_MODELS", default="neuralVoice,motionGan", convert_type=list) + available_avatars: Dict[str, str] = { + avatar: _avatar_short_name(avatar) + for avatar in list(Config.parse_env_var("AVAILABLE_AVATARS", default="Jennifer_355_9415,Arthur_A2226", convert_type=list)) + } + + db_filepath: str = Config.parse_env_var("DB_FILEPATH", default="/tmp/tinyDB/neuralVoices.json") + + data_base_dir: str = Config.parse_env_var("DATA_BASE_DIR", default="/tmp/avatar") + audio_input_dir: str = os.path.join(data_base_dir, "input_data", "audio") + video_output_dir: str = os.path.join(data_base_dir, "output_data", "videos") + + @staticmethod + def avatar_short_name(avatar_name: str) -> str: + return _avatar_short_name(avatar_name=avatar_name) diff --git a/avatar-api/avatar_backend_api/models/__init__.py b/avatar-api/avatar_backend_api/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/avatar-api/avatar_backend_api/models/avatar_base_model.py b/avatar-api/avatar_backend_api/models/avatar_base_model.py new file mode 100644 index 0000000..f29c266 --- /dev/null +++ b/avatar-api/avatar_backend_api/models/avatar_base_model.py @@ -0,0 +1,23 @@ +from abc import ABC, abstractmethod +from typing import List + +from mtc_api_utils.base_model import MLBaseModel + +from avatar_backend_api.api_types import InferenceQueueTask +from avatar_backend_api.clients.io_client import IoClient + + +class AvatarBaseModel(MLBaseModel, ABC): + + def __init__(self, io_client: IoClient): + self.io_client = io_client + super().__init__() + + @abstractmethod + def inference(self, task: InferenceQueueTask) -> None: + raise Exception("Not implemented") + + @staticmethod + @abstractmethod + def available_avatars() -> List[str]: + raise Exception("Not implemented") diff --git a/avatar-api/avatar_backend_api/models/mock_avatar_model.py b/avatar-api/avatar_backend_api/models/mock_avatar_model.py new file mode 100644 index 0000000..f883de5 --- /dev/null +++ b/avatar-api/avatar_backend_api/models/mock_avatar_model.py @@ -0,0 +1,28 @@ +import os +from time import sleep + +from avatar_backend_api.background_tools.inference_queue import AvatarBaseModel, InferenceQueueTask + +INFERENCE_DELAY_SECONDS = 1 + + +class MockAvatarModel(AvatarBaseModel): + + def init_model(self): + pass + + def is_ready(self) -> bool: + return True + + def inference(self, task: InferenceQueueTask) -> None: + sleep(INFERENCE_DELAY_SECONDS) + print(f"Processed audio: {task.request.video_id} with avatar: {task.request.avatar.value}") + + # Create an empty file, representing the output video + os.makedirs(self.io_client.video.user_dir(user=task.user if self.io_client.user_dir_mode else None), exist_ok=True) + + file_path = self.io_client.video.video_path(video_id=task.request.video_id, user=task.user if self.io_client.user_dir_mode else None) + with open(file_path, "w") as disk_file: + disk_file.write("testFile") + + assert os.path.isfile(file_path) diff --git a/avatar-api/avatar_backend_api/py.typed b/avatar-api/avatar_backend_api/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/avatar-api/avatar_backend_api/tests/__init__.py b/avatar-api/avatar_backend_api/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/avatar-api/avatar_backend_api/tests/integration_test_client.py b/avatar-api/avatar_backend_api/tests/integration_test_client.py new file mode 100644 index 0000000..e9a8e54 --- /dev/null +++ b/avatar-api/avatar_backend_api/tests/integration_test_client.py @@ -0,0 +1,53 @@ +from typing import List, Tuple, Union, BinaryIO + +from fastapi.testclient import TestClient + +from avatar_backend_api.api_types import VideoMetadata, ApiRoute, AvatarRequest +from avatar_backend_api.app import app +from avatar_backend_api.clients.avatar_client import AvatarModelClient + + +class AvatarModelTestClient(AvatarModelClient): + def __init__(self, backend_url="", http_client=TestClient(app=app)): + super().__init__(backend_url=backend_url, http_client=http_client) + + def list_video_ids(self) -> List[str]: + resp = self.http_client.get(url=ApiRoute.video_ids.value) + resp.raise_for_status() + + return resp.json() + + def list_videos(self) -> List[VideoMetadata]: + resp = self.http_client.get(url=ApiRoute.list_videos.value) + resp.raise_for_status() + + return [VideoMetadata.parse_obj(vid) for vid in resp.json()] + + def get_video(self, video_id: str) -> Tuple[int, bytes]: + """ Returns the response code as well as the bytes representing the retrieved file""" + resp = self.http_client.get(ApiRoute.video_url(backend_url="", video_id=video_id)) + + if resp.status_code >= 300: + resp.raise_for_status() + + return resp.status_code, resp.content + + def post_audio(self, audio: Union[BinaryIO, Tuple[str, bytes]], metadata: AvatarRequest) -> AvatarRequest: + """ Overrides the AvatarBaseModel with AvatarRequest in order to send extended schema to backend for testing """ + resp = self.http_client.post( + url=ApiRoute.inference.value, + params=metadata.json_dict, + files={"audio": audio} + ) + + if resp.status_code >= 300: + print(resp.text) + resp.raise_for_status() + + return AvatarRequest.parse_obj(resp.json()) + + def delete_video(self, video_id: str) -> str: + resp = self.http_client.delete(ApiRoute.video_url(backend_url="",video_id=video_id)) + resp.raise_for_status() + + return resp.text diff --git a/avatar-api/avatar_backend_api/tests/ping.wav b/avatar-api/avatar_backend_api/tests/ping.wav new file mode 100644 index 0000000..c0dc31c Binary files /dev/null and b/avatar-api/avatar_backend_api/tests/ping.wav differ diff --git a/avatar-api/avatar_backend_api/tests/test.mp3 b/avatar-api/avatar_backend_api/tests/test.mp3 new file mode 100644 index 0000000..7517a57 Binary files /dev/null and b/avatar-api/avatar_backend_api/tests/test.mp3 differ diff --git a/avatar-api/avatar_backend_api/tests/test_api_types.py b/avatar-api/avatar_backend_api/tests/test_api_types.py new file mode 100644 index 0000000..25fd7d6 --- /dev/null +++ b/avatar-api/avatar_backend_api/tests/test_api_types.py @@ -0,0 +1,11 @@ +from unittest import TestCase + +from avatar_backend_api.api_types import AvatarModelRequest, BaseAvatar + + +class TestApiTypes(TestCase): + def test_model_request_uuid(self): + request1 = AvatarModelRequest(avatar=BaseAvatar.Jennifer_355_9415) + request2 = AvatarModelRequest(avatar=BaseAvatar.Jennifer_355_9415) + + self.assertNotEqual(request1.video_id, request2.video_id) diff --git a/avatar-api/avatar_backend_api/tests/test_db_client.py b/avatar-api/avatar_backend_api/tests/test_db_client.py new file mode 100644 index 0000000..ad3c757 --- /dev/null +++ b/avatar-api/avatar_backend_api/tests/test_db_client.py @@ -0,0 +1,62 @@ +from unittest import IsolatedAsyncioTestCase + +from mtc_api_utils.api_types import FirebaseUser + +from avatar_backend_api.api_types import VideoMetadata, AvatarModel, BaseAvatar +from avatar_backend_api.clients.db_client import AvatarDbClient, AvatarDbException + +TEST_DB_DIR = "/tmp/test-dbs/avatar-db.json" + +TEST_USER = FirebaseUser.example() + +TEST_METADATA = VideoMetadata( + video_id="test-id", + audio_name="test-video-name", + avatar=BaseAvatar.Jennifer_355_9415, + avatar_model=AvatarModel.motion_gan, +) + +TEST_METADATA_2 = VideoMetadata( + video_id=TEST_METADATA.video_id + "-2", + audio_name=TEST_METADATA.audio_name + "-2", + avatar=TEST_METADATA.avatar, + avatar_model=TEST_METADATA.avatar_model, +) + +TEST_DB_CLIENT = AvatarDbClient(db_filepath=TEST_DB_DIR) + + +class TestDbClient(IsolatedAsyncioTestCase): + + def setUp(self) -> None: + TEST_DB_CLIENT.db.drop_tables() + + def tearDown(self) -> None: + self.setUp() + + def test_db_client(self): + # Test access nonexistent entries & error handling + self.assertFalse(TEST_DB_CLIENT.video_exists(video_id=TEST_METADATA.video_id, user=TEST_USER)) + self.assertEqual(0, len(TEST_DB_CLIENT.list_videos(user=TEST_USER))) + self.assertRaises(AvatarDbException, lambda: TEST_DB_CLIENT.get_video(video_id=TEST_METADATA.video_id, user=TEST_USER)) + self.assertRaises(AvatarDbException, lambda: TEST_DB_CLIENT.delete_video(TEST_METADATA_2.video_id, user=TEST_USER)) + self.assertRaises(AvatarDbException, lambda: TEST_DB_CLIENT.get_user_from_id(video_id=TEST_METADATA.video_id)) + + # Test successful manipulation of db + TEST_DB_CLIENT.insert_video(video_metadata=TEST_METADATA, user=TEST_USER) + self.assertEqual(1, len(TEST_DB_CLIENT.list_videos(user=TEST_USER))) + self.assertEqual(TEST_METADATA, TEST_DB_CLIENT.get_video(TEST_METADATA.video_id, user=TEST_USER)) + self.assertEqual(TEST_METADATA, TEST_DB_CLIENT.get_video(video_id=TEST_METADATA.video_id, user=TEST_USER)) + + TEST_METADATA.inference_completed = True + TEST_DB_CLIENT.upsert_video(video_metadata=TEST_METADATA, user=TEST_USER) + self.assertEqual(TEST_METADATA, TEST_DB_CLIENT.get_video(video_id=TEST_METADATA.video_id, user=TEST_USER)) + + TEST_DB_CLIENT.insert_video(video_metadata=TEST_METADATA_2, user=TEST_USER) + self.assertEqual(2, len(TEST_DB_CLIENT.list_videos(user=TEST_USER))) + self.assertEqual(TEST_METADATA_2, TEST_DB_CLIENT.get_video(video_id=TEST_METADATA_2.video_id, user=TEST_USER)) + + self.assertEqual(TEST_METADATA_2, TEST_DB_CLIENT.delete_video(TEST_METADATA_2.video_id, user=TEST_USER)) + self.assertEqual(1, len(TEST_DB_CLIENT.list_videos(user=TEST_USER))) + self.assertEqual(TEST_METADATA, TEST_DB_CLIENT.get_video(TEST_METADATA.video_id, user=TEST_USER)) + self.assertEqual(TEST_METADATA, TEST_DB_CLIENT.delete_video(TEST_METADATA.video_id, user=TEST_USER)) diff --git a/avatar-api/avatar_backend_api/tests/test_inference_queue.py b/avatar-api/avatar_backend_api/tests/test_inference_queue.py new file mode 100644 index 0000000..09fb24e --- /dev/null +++ b/avatar-api/avatar_backend_api/tests/test_inference_queue.py @@ -0,0 +1,70 @@ +import shutil +import unittest +from time import sleep +from unittest import TestCase + +from fastapi import UploadFile +from mtc_api_utils.api_types import FirebaseUser + +from avatar_backend_api.api_types import AvatarModelRequest, BaseAvatar +from avatar_backend_api.app import io_client +from avatar_backend_api.background_tools.inference_queue import InferenceQueue, InferenceQueueTask +from avatar_backend_api.clients.io_client import IoClient +from avatar_backend_api.clients.mock_avatar_model_client import INFERENCE_DELAY_SECONDS +from avatar_backend_api.models.mock_avatar_model import MockAvatarModel +from avatar_backend_api.tests.test_io_client import TEST_VIDEO_BASE_PATH, TEST_AUDIO_BASE_PATH + +TEST_AUDIO_DIR = "/tmp/test-audio" + +TEST_UPLOAD_FILE = UploadFile( + filename="fest-audio-name", + file=None, +) + +TEST_TASK = InferenceQueueTask( + user=FirebaseUser.example(), + request=AvatarModelRequest( + video_id="test-id", + avatar=BaseAvatar.Jennifer_355_9415.value, + ) +) + +TEST_TASK_2 = InferenceQueueTask( + user=TEST_TASK.user, + request=AvatarModelRequest( + video_id=TEST_TASK.request.video_id + "-2", + avatar=TEST_TASK.request.avatar, + ) +) + + +class TestInferenceQueue(TestCase): + + def setUp(self) -> None: + test_io_client = IoClient(user_dir_mode=False, audio_base_path=TEST_AUDIO_BASE_PATH, video_base_path=TEST_VIDEO_BASE_PATH) + + self.inf_queue = InferenceQueue( + model=MockAvatarModel(io_client=io_client), + io_client=test_io_client, + audio_input_dir=TEST_AUDIO_DIR, + daemon_worker=True, + ) + + def tearDown(self) -> None: + shutil.rmtree(path=TEST_AUDIO_DIR) + + def test_inference_queue(self): + self.assertEqual(0, self.inf_queue.queue.qsize()) + + self.inf_queue.add_task(TEST_TASK) + self.assertEqual(1, self.inf_queue.queue.qsize()) + + self.inf_queue.add_task(TEST_TASK_2) + self.assertEqual(2, self.inf_queue.queue.qsize()) + + sleep(INFERENCE_DELAY_SECONDS * 3) + self.assertEqual(0, self.inf_queue.queue.qsize(), msg="Expected worker to process both tasks in the allotted time") + + +if __name__ == '__main__': + unittest.main() diff --git a/avatar-api/avatar_backend_api/tests/test_integration.py b/avatar-api/avatar_backend_api/tests/test_integration.py new file mode 100644 index 0000000..6d4061d --- /dev/null +++ b/avatar-api/avatar_backend_api/tests/test_integration.py @@ -0,0 +1,85 @@ +from http import HTTPStatus +from time import sleep +from unittest import TestCase + +from fastapi import HTTPException +from httpx import HTTPStatusError + +from avatar_backend_api.api_types import AvatarModel, AvatarRequest, BaseAvatar +from avatar_backend_api.clients.mock_avatar_model_client import INFERENCE_DELAY_SECONDS +from avatar_backend_api.config import AvatarConfig +from avatar_backend_api.tests.integration_test_client import AvatarModelTestClient +from avatar_backend_api.tests.test_io_client import TEST_FILE_PATH + +TEST_INFERENCE_REQUEST = AvatarRequest( + video_id="test-id", + audio_name="test-video-name", + avatar=BaseAvatar.Jennifer_355_9415.value, + avatar_model=AvatarModel.neural_voice, +) + +TEST_INFERENCE_REQUEST_2 = AvatarRequest( + video_id=TEST_INFERENCE_REQUEST.video_id + "-2", + audio_name=TEST_INFERENCE_REQUEST.audio_name + "-2", + avatar=TEST_INFERENCE_REQUEST.avatar, + avatar_model=TEST_INFERENCE_REQUEST.avatar_model, +) + + +class TestIntegration(TestCase): + client = AvatarModelTestClient() + + @classmethod + def setUpClass(cls) -> None: + cls.client.wait_for_service_readiness() + + def setUp(self) -> None: + try: + self.client.delete_video(video_id=TEST_INFERENCE_REQUEST.video_id) + except (HTTPException, HTTPStatusError): + pass + + try: + self.client.delete_video(video_id=TEST_INFERENCE_REQUEST_2.video_id) + except (HTTPException, HTTPStatusError): + pass + + def tearDown(self) -> None: + self.setUp() + + def test_api_test(self): + self.assertRaises((HTTPException, HTTPStatusError), lambda: self.client.get_video(video_id=TEST_INFERENCE_REQUEST.video_id)) + + with open(TEST_FILE_PATH, "rb") as test_audio: + created_video = self.client.post_audio(audio=test_audio, metadata=TEST_INFERENCE_REQUEST) + self.assertEqual(TEST_INFERENCE_REQUEST.video_id, created_video.video_id) + + self.assertIn(TEST_INFERENCE_REQUEST.video_id, [vid.video_id for vid in self.client.list_videos()]) + + # Wait for result to be available + while True: + status_code, file_bytes = self.client.get_video(created_video.video_id) + self.assertIn(status_code, [HTTPStatus.OK, HTTPStatus.PROCESSING]) + + if status_code == HTTPStatus.OK: + break + + if not AvatarConfig.mockBackend: + self.assertGreater(len(file_bytes), 0) + + self.client.delete_video(created_video.video_id) + self.assertNotIn(TEST_INFERENCE_REQUEST.video_id, [vid.video_id for vid in self.client.list_videos()]) + + def test_worker_test(self): + if not AvatarConfig.mockBackend: + self.skipTest("This test should only be performed if the model is mocked, otherwise it takes way too long and the delay is incorrect") + + with open(TEST_FILE_PATH, "rb") as test_audio: + self.client.post_audio(audio=test_audio, metadata=TEST_INFERENCE_REQUEST) + self.client.post_audio(audio=test_audio, metadata=TEST_INFERENCE_REQUEST_2) + + sleep(INFERENCE_DELAY_SECONDS * 3) + video_ids = self.client.list_video_ids() + + self.assertIn(TEST_INFERENCE_REQUEST.video_id, video_ids) + self.assertIn(TEST_INFERENCE_REQUEST.video_id, video_ids) diff --git a/avatar-api/avatar_backend_api/tests/test_io_client.py b/avatar-api/avatar_backend_api/tests/test_io_client.py new file mode 100644 index 0000000..3061fde --- /dev/null +++ b/avatar-api/avatar_backend_api/tests/test_io_client.py @@ -0,0 +1,169 @@ +import os.path +import shutil +from enum import Enum +from unittest import TestCase + +from avatar_backend_api.clients.io_client import IoClient, IoClientException +from avatar_backend_api.tests.test_db_client import TEST_USER +from fastapi import UploadFile +from mtc_api_utils.api_types import FirebaseUser + + +class TestFile(Enum): + value: str + test_mp3 = "test.mp3" + ping_wav = "ping.wav" + voice_wav = "voice_test.wav" + + +TEST_FILENAME = TestFile.voice_wav.value + +TEST_BASE_PATH = "/tmp/test" +TEST_AUDIO_BASE_PATH = os.path.join(TEST_BASE_PATH, "audio") +TEST_VIDEO_BASE_PATH = os.path.join(TEST_BASE_PATH, "video") + +TEST_ID = "test-id" +TEST_ID_2 = TEST_ID + "-2" + +TEST_USER_2 = FirebaseUser( + email=TEST_USER.email + "-2", + roles=TEST_USER.roles, +) + +# test_dir represents an absolute path to the test directory, independent of where the test was started from. Use this as a base dir for any path operations. +test_dir = os.path.dirname(os.path.realpath(__file__)) +TEST_FILE_PATH = os.path.join(test_dir, TEST_FILENAME) + +TEST_IO_CLIENT = IoClient(user_dir_mode=False, audio_base_path=TEST_AUDIO_BASE_PATH, video_base_path=TEST_VIDEO_BASE_PATH) +TEST_IO_CLIENT_WITH_USER_MODE = IoClient(user_dir_mode=True, audio_base_path=TEST_AUDIO_BASE_PATH, video_base_path=TEST_VIDEO_BASE_PATH) + + +class TestIoClient(TestCase): + def setUp(self) -> None: + shutil.rmtree(TEST_BASE_PATH, ignore_errors=True) + + os.makedirs(TEST_AUDIO_BASE_PATH, exist_ok=True) + os.makedirs(TEST_VIDEO_BASE_PATH, exist_ok=True) + + def tearDown(self) -> None: + shutil.rmtree(TEST_BASE_PATH, ignore_errors=True) + + def test_audio_without_user_dir_mode(self): + client = TEST_IO_CLIENT.audio + + # Expect an error to be raised for passing a user in userless mode + self.assertRaises( + ValueError, + lambda: client.file_exists(video_id=TEST_ID, user=TEST_USER), + ) + + # Test accessing nonexistent file + self.assertFalse(client.file_exists(video_id=TEST_ID)) + self.assertRaises(IoClientException, lambda: client.matching_paths(video_id=TEST_ID)) + self.assertRaises(IoClientException, lambda: client.read_from_disk(video_id=TEST_ID)) + self.assertRaises(IoClientException, lambda: client.delete_file(video_id=TEST_ID)) + + # Test save and access file successfully + with open(TEST_FILE_PATH, "rb") as test_file: + audio_file = UploadFile(filename=TEST_FILE_PATH, file=test_file) + client.save_to_disk(audio=audio_file, video_id=TEST_ID) + + self.assertTrue(client.file_exists(video_id=TEST_ID)) + self.assertEqual(1, len(client.matching_paths(video_id=TEST_ID))) + + with client.read_from_disk(video_id=TEST_ID) as read_file: + self.assertGreater(len(read_file.read()), 0) + + client.delete_file(video_id=TEST_ID) + self.assertFalse(client.file_exists(video_id=TEST_ID)) + + def test_audio_with_user_dir_mode(self): + client = TEST_IO_CLIENT_WITH_USER_MODE.audio + + # Expect an error to be raised for not passing a user in user mode + self.assertRaises( + ValueError, + lambda: client.file_exists(video_id=TEST_ID, user=None), + ) + + # Test accessing nonexistent file + self.assertFalse(client.file_exists(video_id=TEST_ID, user=TEST_USER)) + self.assertFalse(client.file_exists(video_id=TEST_ID, user=TEST_USER_2)) + self.assertRaises(IoClientException, lambda: client.matching_paths(video_id=TEST_ID, user=TEST_USER)) + self.assertRaises(IoClientException, lambda: client.read_from_disk(video_id=TEST_ID, user=TEST_USER)) + self.assertRaises(IoClientException, lambda: client.delete_file(video_id=TEST_ID, user=TEST_USER)) + + # Test save and access file successfully + with open(TEST_FILE_PATH, "rb") as test_file: + audio_file = UploadFile(filename=TEST_FILE_PATH, file=test_file) + client.save_to_disk(audio=audio_file, video_id=TEST_ID, user=TEST_USER) + + self.assertTrue(client.file_exists(video_id=TEST_ID, user=TEST_USER)) + self.assertFalse(client.file_exists(video_id=TEST_ID, user=TEST_USER_2)) + + self.assertEqual(1, len(client.matching_paths(video_id=TEST_ID, user=TEST_USER))) + + with client.read_from_disk(video_id=TEST_ID, user=TEST_USER) as read_file: + self.assertGreater(len(read_file.read()), 0) + + client.delete_file(video_id=TEST_ID, user=TEST_USER) + self.assertFalse(client.file_exists(video_id=TEST_ID, user=TEST_USER)) + + def test_video_without_user_dir_mode(self): + client = TEST_IO_CLIENT.video + + # Expect an error to be raised for passing a user in userless mode + self.assertRaises( + ValueError, + lambda: client.file_exists(video_id=TEST_ID, user=TEST_USER), + ) + + # Test accessing nonexistent file + self.assertFalse(client.file_exists(video_id=TEST_ID)) + self.assertRaises(IoClientException, lambda: client.read_from_disk(video_id=TEST_ID)) + self.assertRaises(IoClientException, lambda: client.delete_file(video_id=TEST_ID)) + + # Test save and access file successfully + with open(TEST_FILE_PATH, "rb") as test_file: + client.save_to_disk(video_bytes=test_file, video_id=TEST_ID) # Save as BinaryIO + client.save_to_disk(video_bytes=test_file.read(), video_id=TEST_ID) # Save as bytes + + self.assertTrue(client.file_exists(video_id=TEST_ID)) + + file_response = client.read_from_disk(video_id=TEST_ID) + self.assertEqual(client.video_path(video_id=TEST_ID), file_response.path) + self.assertGreater(file_response.chunk_size, 0) + self.assertIn("video", file_response.media_type) + + client.delete_file(video_id=TEST_ID) + self.assertFalse(client.file_exists(video_id=TEST_ID)) + + def test_video_with_user_dir_mode(self): + client = TEST_IO_CLIENT_WITH_USER_MODE.video + + # Expect an error to be raised for not passing a user in user mode + self.assertRaises( + ValueError, + lambda: client.file_exists(video_id=TEST_ID, user=None), + ) + + # Test accessing nonexistent file + self.assertFalse(client.file_exists(video_id=TEST_ID, user=TEST_USER)) + self.assertRaises(IoClientException, lambda: client.read_from_disk(video_id=TEST_ID, user=TEST_USER)) + self.assertRaises(IoClientException, lambda: client.delete_file(video_id=TEST_ID, user=TEST_USER)) + + # Test save and access file successfully + with open(TEST_FILE_PATH, "rb") as test_file: + client.save_to_disk(video_bytes=test_file, video_id=TEST_ID, user=TEST_USER) # Save as BinaryIO + client.save_to_disk(video_bytes=test_file.read(), video_id=TEST_ID, user=TEST_USER) # Save as bytes + + self.assertTrue(client.file_exists(video_id=TEST_ID, user=TEST_USER)) + self.assertFalse(client.file_exists(video_id=TEST_ID, user=TEST_USER_2)) + + file_response = client.read_from_disk(video_id=TEST_ID, user=TEST_USER) + self.assertEqual(client.video_path(video_id=TEST_ID, user=TEST_USER), file_response.path) + self.assertGreater(file_response.chunk_size, 0) + self.assertIn("video", file_response.media_type) + + client.delete_file(video_id=TEST_ID, user=TEST_USER) + self.assertFalse(client.file_exists(video_id=TEST_ID, user=TEST_USER)) diff --git a/avatar-api/avatar_backend_api/tests/test_model_result_poll_worker.py b/avatar-api/avatar_backend_api/tests/test_model_result_poll_worker.py new file mode 100644 index 0000000..c6f9652 --- /dev/null +++ b/avatar-api/avatar_backend_api/tests/test_model_result_poll_worker.py @@ -0,0 +1,62 @@ +import os.path +import shutil +from unittest import TestCase + +from fastapi import HTTPException + +from avatar_backend_api.api_types import AvatarModel, AvatarModelRequest +from avatar_backend_api.background_tools.model_result_poll_worker import ModelResultPollWorker +from avatar_backend_api.clients.mock_avatar_model_client import MockAvatarModelClient +from avatar_backend_api.tests.test_db_client import TEST_USER, TEST_METADATA, TEST_DB_CLIENT +from avatar_backend_api.tests.test_io_client import TEST_IO_CLIENT_WITH_USER_MODE + +TEST_AVATAR_MODEL = AvatarModel.neural_voice + +TEST_MODEL_REQUEST = AvatarModelRequest(**TEST_METADATA.json_dict) + + +class TestResultPollWorker(ModelResultPollWorker): + """ A test variation of the ModelResultPollWorker which does not call any actions in the background """ + + def _worker(self): + pass + + +class TestModelResultPollWorker(TestCase): + + def setUp(self) -> None: + self.poll_worker = TestResultPollWorker( + io_client=TEST_IO_CLIENT_WITH_USER_MODE, + db_client=TEST_DB_CLIENT, + model_clients={TEST_AVATAR_MODEL: MockAvatarModelClient(backend_url="")}, + daemon_worker=True, + ) + + self.poll_worker.db_client.db.drop_tables() + shutil.rmtree(os.path.join(self.poll_worker.io_client.audio.base_path, "/*"), ignore_errors=True) + shutil.rmtree(os.path.join(self.poll_worker.io_client.video.base_path, "/*"), ignore_errors=True) + + self.client = self.poll_worker.model_clients[TEST_AVATAR_MODEL] + + def test_no_results(self): + def fail_on_being_called(video_id: str, avatar_model: AvatarModel): + self.fail(msg=f"Did not expect any available results for video_id={video_id} and avatar_model={avatar_model.value}") + + self.poll_worker._poll_models(perform_on_available_video=fail_on_being_called) + + def test_result_available(self): + # Required for retrieving user from video_id + self.poll_worker.db_client.insert_video(video_metadata=TEST_METADATA, user=TEST_USER) + + # Simulate inference + self.client.post_audio(audio=(TEST_MODEL_REQUEST.video_id, bytes()), metadata=TEST_MODEL_REQUEST) + + # + self.poll_worker._retrieve_video_from_model(video_id=TEST_MODEL_REQUEST.video_id, avatar_model=TEST_AVATAR_MODEL) + self.assertTrue(self.poll_worker.db_client.video_exists(video_id=TEST_MODEL_REQUEST.video_id, user=TEST_USER)) + + self.test_no_results() + self.assertRaises( + HTTPException, + lambda: self.poll_worker._retrieve_video_from_model(video_id=TEST_MODEL_REQUEST.video_id, avatar_model=TEST_AVATAR_MODEL), + ) diff --git a/avatar-api/avatar_backend_api/tests/voice_test.wav b/avatar-api/avatar_backend_api/tests/voice_test.wav new file mode 100644 index 0000000..95ce3fe Binary files /dev/null and b/avatar-api/avatar_backend_api/tests/voice_test.wav differ diff --git a/avatar-api/gunicorn.conf.py b/avatar-api/gunicorn.conf.py new file mode 100644 index 0000000..b0a27bf --- /dev/null +++ b/avatar-api/gunicorn.conf.py @@ -0,0 +1,17 @@ +# https://github.com/benoitc/gunicorn/blob/master/examples/example_config.py + +# Bind & deployment + +bind = '0.0.0.0:5000' +reload = False + +# Connections +# The dashboard backend should be capable of supporting multiple workers, +# however initialization is currently an issue when running in multiple threads. +workers = 1 # if DashboardConfig.debug else 4 +threads = 4 +backlog = 64 +timeout = 300 + +# Logging +loglevel = 'info' diff --git a/avatar-api/requirements.txt b/avatar-api/requirements.txt new file mode 100644 index 0000000..5c79d8d --- /dev/null +++ b/avatar-api/requirements.txt @@ -0,0 +1,6 @@ +tinydb~=4.7.0 +python-multipart~=0.0.5 +httpx~=0.23.2 +fastapi~=0.95.0 + +git+https://github.com/mediatechnologycenter/api-utils.git@0.1.0 diff --git a/avatar-api/setup.py b/avatar-api/setup.py new file mode 100644 index 0000000..051f5d5 --- /dev/null +++ b/avatar-api/setup.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python +from distutils.core import setup + +setup( + name="avatar_backend_api", + description="The Avatar backend API", + version="0.1.0", + url="https://gitlab.ethz.ch/mtc/video-synthesis/mtc-avatar-api", + author="Media Technology Center (ETH Zürich)", + author_email="mtc@ethz.ch", + package_data={ + "avatar_backend_api": ["py.typed"], + }, + packages=[ + "avatar_backend_api", + "avatar_backend_api.clients", + "avatar_backend_api.models", + "avatar_backend_api.background_tools", + ], + install_requires=[ + "mtc_api_utils @ git+https://github.com/mediatechnologycenter/api-utils.git@0.1.0", + "tinydb~=4.7.0", + "python-multipart~=0.0.5", + "httpx~=0.23.1", + ], + classifiers=[ + "Development Status :: 4 - Beta", + "Intended Audience :: Science/Research", + "Operating System :: POSIX :: Linux", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3.6", + ] +) diff --git a/debug.env b/debug.env new file mode 100644 index 0000000..004cc2c --- /dev/null +++ b/debug.env @@ -0,0 +1,7 @@ +MOCK_BACKEND=False +AUTH_ENABLED=False +CORS_ALLOW_ORIGINS=localhost,localhost:80,localhost:8080,localhost:8081,http://localhost,http://localhost:80,http://localhost:8080,http://localhost:8081 + +#### AVATAR SETTINGS #### +AVAILABLE_AVATARS="" # Comma separated list of all Avatar names +AVATAR_MODELS=motionGan # All options: "neuralVoice,motionGan" diff --git a/docker-compose-debug.yml b/docker-compose-debug.yml new file mode 100644 index 0000000..e8c0088 --- /dev/null +++ b/docker-compose-debug.yml @@ -0,0 +1,27 @@ +version: "3.9" +services: + backend: + image: avatar-backend-v2-debug + container_name: avatar-backend-v2-debug + env_file: + - .env + - debug.env + + neural-voice-model: + image: neural-voice-backend-debug + container_name: neural-voice-backend-debug + env_file: + - .env + - debug.env + + command: + ["conda", "run", "--no-capture-output", "-n", "base", "gunicorn", "-c", "gunicorn.conf.py", "--log-level", "debug", "--chdir", "./neural_voice_backend_api", "-k", "uvicorn.workers.UvicornWorker", "app:app"] + + motion-gan-model: + image: motion-gan-backend-debug + container_name: motion-gan-backend-debug + env_file: + - .env + - debug.env + command: + ["conda", "run", "--no-capture-output", "-n", "base", "gunicorn", "-c", "gunicorn.conf.py", "--log-level", "debug", "--chdir", "./motion_gan_backend_api", "-k", "uvicorn.workers.UvicornWorker", "app:app"] diff --git a/docker-compose-tests.yml b/docker-compose-tests.yml new file mode 100644 index 0000000..b5dcd50 --- /dev/null +++ b/docker-compose-tests.yml @@ -0,0 +1,69 @@ +version: "3.9" +services: + backend: + image: avatar-backend-v2-debug + container_name: avatar-backend-v2-debug + environment: + - MOCK_BACKEND=False + - AUTH_ENABLED=False + - CORS_ALLOW_ORIGINS=localhost,localhost:80,localhost:8080,localhost:8081,http://localhost,http://localhost:80,http://localhost:8080,http://localhost:8081 + restart: "no" + command: + [ + "python", + "-m", + "unittest", + "discover", + "--start-directory", + "avatar_backend_api/tests" + ] + + neural-voice-model: + image: neural-voice-backend-debug + container_name: neural-voice-backend-debug + environment: + - MOCK_BACKEND=False + - AUTH_ENABLED=False + - STATIC_INPUT_FILES=Jennifer_355_9415.mp4,Jennifer_355_9415.wav # Only download the smaller of the two avatars + - FEATURE_FILES=Jennifer_355_9415.tar.gz # Only download the smaller of the two avatars + - CORS_ALLOW_ORIGINS=localhost,localhost:80,localhost:8080,localhost:8081,http://localhost,http://localhost:80,http://localhost:8080,http://localhost:8081 + restart: "no" + # Uncomment the following line in order to run the neural-voice-model integration tests instead + command: + [ + "conda", + "run", + "--no-capture-output", + "-n", + "base", + "python", + "-m", + "unittest", + "discover", + "--start-directory", + "neural_voice_backend_api/tests" + ] + + motion-gan-model: + image: motion-gan-backend-debug + container_name: motion-gan-backend-debug + environment: + - MOCK_BACKEND=False + - AUTH_ENABLED=False + - CORS_ALLOW_ORIGINS=localhost,localhost:80,localhost:8080,localhost:8081,http://localhost,http://localhost:80,http://localhost:8080,http://localhost:8081 + restart: "no" + command: + [ + "conda", + "run", + "--no-capture-output", + "-n", + "base", + "python", + "-m", + "unittest", + "discover", + "--start-directory", + "motion_gan_backend_api/tests" + ] + # command: tail -f /dev/null diff --git a/docker-compose.yml b/docker-compose.yml new file mode 100644 index 0000000..ff3b419 --- /dev/null +++ b/docker-compose.yml @@ -0,0 +1,94 @@ +version: "3.9" +services: + backend: + image: avatar-backend + container_name: avatar-backend-v2 + build: + context: avatar-api + ports: + - "5000:5000" + env_file: + - .env + volumes: + - backend-db-dir:/tmp/tinyDB + - ./data/avatar/input_data/audio:/tmp/avatar/input_data/audio + - ./data/avatar/output_data/videos:/tmp/avatar/output_data/videos + restart: unless-stopped + + neural-voice-model: + image: neural-voice-backend + container_name: neural-voice-backend + build: + context: . + dockerfile: Dockerfile-neural-voice + healthcheck: + test: [ "CMD-SHELL", "test -s `which nvidia-smi` && nvidia-smi || exit 1" ] + start_period: 1s + interval: 20s + timeout: 5s + retries: 2 + ports: + - "5001:5000" + env_file: + - .env + volumes: + - ./data/neuralVoice/input_data:/tmp/neuralVoice/input_data # Static files + - ./data/neuralVoice/output_data:/tmp/neuralVoice/output_data # Static files + - ./data/neuralVoice/input_data/audio:/tmp/neuralVoice/input_data/audio # Model input + - ./data/neuralVoice/output_data/videos:/tmp/neuralVoice/output_data/videos # Model output + - neural-voice-torch-hub:/root/.cache/torch/hub # External model cache + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [ gpu ] + restart: unless-stopped + + motion-gan-model: + image: motion-gan-backend + container_name: motion-gan-backend + build: + context: . + dockerfile: Dockerfile-motion-gan + healthcheck: + test: [ "CMD-SHELL", "test -s `which nvidia-smi` && nvidia-smi || exit 1" ] + start_period: 1s + interval: 20s + timeout: 5s + retries: 2 + ports: + - "5002:5000" + env_file: + - .env + volumes: + - ./data/motionGan/checkpoints:/tmp/motionGan/checkpoints # Model weights + - ./data/motionGan/input_data/video:/tmp/motionGan/input_data/video # Input data + - ./data/motionGan/input_data/audio:/tmp/motionGan/input_data/audio # Model input + - ./data/motionGan/output_data/videos:/tmp/motionGan/output_data/videos # Model output + - motion-gan-torch-hub:/root/.cache/torch/hub # External model cache + deploy: + resources: + reservations: + devices: + - driver: nvidia + count: 1 + capabilities: [ gpu ] + restart: unless-stopped + shm_size: '2gb' + +volumes: + backend-db-dir: { } + # backend-audio-input-dir: { } + # backend-video-output-dir: { } + # neural-voice-input-data: { } + # neural-voice-output-data: { } + # neural-voice-input-audio: { } + # neural-voice-output-video: { } + neural-voice-torch-hub: { } + # motion-gan-checkpoints: { } + # motion-gan-input-video: { } + # motion-gan-input-audio: { } + # motion-gan-output-video: { } + motion-gan-torch-hub: { } diff --git a/figures/logo.png b/figures/logo.png new file mode 100644 index 0000000..793ee0a Binary files /dev/null and b/figures/logo.png differ diff --git a/figures/pipeline_NVP.png b/figures/pipeline_NVP.png new file mode 100644 index 0000000..7328917 Binary files /dev/null and b/figures/pipeline_NVP.png differ diff --git a/figures/pipeline_motiongan.gif b/figures/pipeline_motiongan.gif new file mode 100644 index 0000000..aa5a243 Binary files /dev/null and b/figures/pipeline_motiongan.gif differ diff --git a/motion-gan-pipeline/.env b/motion-gan-pipeline/.env new file mode 100644 index 0000000..2cdf2a9 --- /dev/null +++ b/motion-gan-pipeline/.env @@ -0,0 +1,18 @@ +##### GENERAL ENVS ##### +GPU_SUPPORTED=True +GPU_ENABLED=True + +##### AUTH ENVS ##### +AUTH_ENABLED=False +FIREBASE_ADMIN_CREDENTIALS_URL=*** +REQUIRED_AUTH_ROLES=video-synthesis + +##### PATHS ##### +INPUT_DATA_DOWNLOAD_TARGET="../neural-code/input_data/video/" +OUTPUT_DATA_DOWNLOAD_TARGET="../neural-code/output_data/features" + +##### BACKEND ENVS ##### +FFMPEG_DOCKER=True # Python will use a different version of ffmpeg, therefore we need to redirect to /usr/bin/ffmpeg + +##### REACT ENVS ##### +REACT_APP_ENVIRONMENT=prod diff --git a/motion-gan-pipeline/AUTHORS.txt b/motion-gan-pipeline/AUTHORS.txt new file mode 100644 index 0000000..df870c1 --- /dev/null +++ b/motion-gan-pipeline/AUTHORS.txt @@ -0,0 +1,7 @@ +Contributions: + +* Alberto Pennino : Research and backend developement. +* Thomas Steinmann : Frontend and API developement. +* Marc Willhaus : Frontend developement. +* Clara Labrador-Fernandez : Supervision. + diff --git a/motion-gan-pipeline/GFPGAN/.github/workflows/no-response.yml b/motion-gan-pipeline/GFPGAN/.github/workflows/no-response.yml new file mode 100644 index 0000000..fa702ee --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/.github/workflows/no-response.yml @@ -0,0 +1,33 @@ +name: No Response + +# TODO: it seems not to work +# Modified from: https://raw.githubusercontent.com/github/docs/main/.github/workflows/no-response.yaml + +# **What it does**: Closes issues that don't have enough information to be actionable. +# **Why we have it**: To remove the need for maintainers to remember to check back on issues periodically +# to see if contributors have responded. +# **Who does it impact**: Everyone that works on docs or docs-internal. + +on: + issue_comment: + types: [created] + + schedule: + # Schedule for five minutes after the hour every hour + - cron: '5 * * * *' + +jobs: + noResponse: + runs-on: ubuntu-latest + steps: + - uses: lee-dohm/no-response@v0.5.0 + with: + token: ${{ github.token }} + closeComment: > + This issue has been automatically closed because there has been no response + to our request for more information from the original author. With only the + information that is currently in the issue, we don't have enough information + to take action. Please reach out if you have or find the answers we need so + that we can investigate further. + If you still have questions, please improve your description and re-open it. + Thanks :-) diff --git a/motion-gan-pipeline/GFPGAN/.github/workflows/publish-pip.yml b/motion-gan-pipeline/GFPGAN/.github/workflows/publish-pip.yml new file mode 100644 index 0000000..71190fb --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/.github/workflows/publish-pip.yml @@ -0,0 +1,29 @@ +name: PyPI Publish + +on: push + +jobs: + build-n-publish: + runs-on: ubuntu-latest + if: startsWith(github.event.ref, 'refs/tags') + + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v1 + with: + python-version: 3.8 + - name: Upgrade pip + run: pip install pip --upgrade + - name: Install PyTorch (cpu) + run: pip install torch==1.7.0+cpu torchvision==0.8.1+cpu -f https://download.pytorch.org/whl/torch_stable.html + - name: Install dependencies + run: pip install -r requirements.txt + - name: Build and install + run: rm -rf .eggs && pip install -e . + - name: Build for distribution + run: python setup.py sdist bdist_wheel + - name: Publish distribution to PyPI + uses: pypa/gh-action-pypi-publish@master + with: + password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/motion-gan-pipeline/GFPGAN/.github/workflows/pylint.yml b/motion-gan-pipeline/GFPGAN/.github/workflows/pylint.yml new file mode 100644 index 0000000..89318e4 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/.github/workflows/pylint.yml @@ -0,0 +1,30 @@ +name: PyLint + +on: [push, pull_request] + +jobs: + build: + + runs-on: ubuntu-latest + strategy: + matrix: + python-version: [3.8] + + steps: + - uses: actions/checkout@v2 + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install codespell flake8 isort yapf + + - name: Lint + run: | + codespell + flake8 . + isort --check-only --diff gfpgan/ scripts/ inference_gfpgan.py setup.py + yapf -r -d gfpgan/ scripts/ inference_gfpgan.py setup.py diff --git a/motion-gan-pipeline/GFPGAN/.github/workflows/release.yml b/motion-gan-pipeline/GFPGAN/.github/workflows/release.yml new file mode 100644 index 0000000..1b8f35f --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/.github/workflows/release.yml @@ -0,0 +1,41 @@ +name: release +on: + push: + tags: + - '*' + +jobs: + build: + permissions: write-all + name: Create Release + runs-on: ubuntu-latest + steps: + - name: Checkout code + uses: actions/checkout@v2 + - name: Create Release + id: create_release + uses: actions/create-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: ${{ github.ref }} + release_name: GFPGAN ${{ github.ref }} Release Note + body: | + 🚀 See you again 😸 + 🚀Have a nice day 😸 and happy everyday 😃 + 🚀 Long time no see ☄️ + + ✨ **Highlights** + ✅ [Features] Support ... + + 🐛 **Bug Fixes** + + 🌴 **Improvements** + + 📢📢📢 + +

+ +

+ draft: true + prerelease: false diff --git a/motion-gan-pipeline/GFPGAN/.gitignore b/motion-gan-pipeline/GFPGAN/.gitignore new file mode 100644 index 0000000..8151890 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/.gitignore @@ -0,0 +1,139 @@ +# ignored folders +datasets/* +experiments/* +results/* +tb_logger/* +wandb/* +tmp/* + +version.py + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +pip-wheel-metadata/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +.python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ diff --git a/motion-gan-pipeline/GFPGAN/.pre-commit-config.yaml b/motion-gan-pipeline/GFPGAN/.pre-commit-config.yaml new file mode 100644 index 0000000..d221d29 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/.pre-commit-config.yaml @@ -0,0 +1,46 @@ +repos: + # flake8 + - repo: https://github.com/PyCQA/flake8 + rev: 3.8.3 + hooks: + - id: flake8 + args: ["--config=setup.cfg", "--ignore=W504, W503"] + + # modify known_third_party + - repo: https://github.com/asottile/seed-isort-config + rev: v2.2.0 + hooks: + - id: seed-isort-config + + # isort + - repo: https://github.com/timothycrosley/isort + rev: 5.2.2 + hooks: + - id: isort + + # yapf + - repo: https://github.com/pre-commit/mirrors-yapf + rev: v0.30.0 + hooks: + - id: yapf + + # codespell + - repo: https://github.com/codespell-project/codespell + rev: v2.1.0 + hooks: + - id: codespell + + # pre-commit-hooks + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v3.2.0 + hooks: + - id: trailing-whitespace # Trim trailing whitespace + - id: check-yaml # Attempt to load all yaml files to verify syntax + - id: check-merge-conflict # Check for files that contain merge conflict strings + - id: double-quote-string-fixer # Replace double quoted strings with single quoted strings + - id: end-of-file-fixer # Make sure files end in a newline and only a newline + - id: requirements-txt-fixer # Sort entries in requirements.txt and remove incorrect entry for pkg-resources==0.0.0 + - id: fix-encoding-pragma # Remove the coding pragma: # -*- coding: utf-8 -*- + args: ["--remove"] + - id: mixed-line-ending # Replace or check mixed line ending + args: ["--fix=lf"] diff --git a/motion-gan-pipeline/GFPGAN/CODE_OF_CONDUCT.md b/motion-gan-pipeline/GFPGAN/CODE_OF_CONDUCT.md new file mode 100644 index 0000000..e8cc4da --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/CODE_OF_CONDUCT.md @@ -0,0 +1,128 @@ +# Contributor Covenant Code of Conduct + +## Our Pledge + +We as members, contributors, and leaders pledge to make participation in our +community a harassment-free experience for everyone, regardless of age, body +size, visible or invisible disability, ethnicity, sex characteristics, gender +identity and expression, level of experience, education, socio-economic status, +nationality, personal appearance, race, religion, or sexual identity +and orientation. + +We pledge to act and interact in ways that contribute to an open, welcoming, +diverse, inclusive, and healthy community. + +## Our Standards + +Examples of behavior that contributes to a positive environment for our +community include: + +* Demonstrating empathy and kindness toward other people +* Being respectful of differing opinions, viewpoints, and experiences +* Giving and gracefully accepting constructive feedback +* Accepting responsibility and apologizing to those affected by our mistakes, + and learning from the experience +* Focusing on what is best not just for us as individuals, but for the + overall community + +Examples of unacceptable behavior include: + +* The use of sexualized language or imagery, and sexual attention or + advances of any kind +* Trolling, insulting or derogatory comments, and personal or political attacks +* Public or private harassment +* Publishing others' private information, such as a physical or email + address, without their explicit permission +* Other conduct which could reasonably be considered inappropriate in a + professional setting + +## Enforcement Responsibilities + +Community leaders are responsible for clarifying and enforcing our standards of +acceptable behavior and will take appropriate and fair corrective action in +response to any behavior that they deem inappropriate, threatening, offensive, +or harmful. + +Community leaders have the right and responsibility to remove, edit, or reject +comments, commits, code, wiki edits, issues, and other contributions that are +not aligned to this Code of Conduct, and will communicate reasons for moderation +decisions when appropriate. + +## Scope + +This Code of Conduct applies within all community spaces, and also applies when +an individual is officially representing the community in public spaces. +Examples of representing our community include using an official e-mail address, +posting via an official social media account, or acting as an appointed +representative at an online or offline event. + +## Enforcement + +Instances of abusive, harassing, or otherwise unacceptable behavior may be +reported to the community leaders responsible for enforcement at +xintao.wang@outlook.com or xintaowang@tencent.com. +All complaints will be reviewed and investigated promptly and fairly. + +All community leaders are obligated to respect the privacy and security of the +reporter of any incident. + +## Enforcement Guidelines + +Community leaders will follow these Community Impact Guidelines in determining +the consequences for any action they deem in violation of this Code of Conduct: + +### 1. Correction + +**Community Impact**: Use of inappropriate language or other behavior deemed +unprofessional or unwelcome in the community. + +**Consequence**: A private, written warning from community leaders, providing +clarity around the nature of the violation and an explanation of why the +behavior was inappropriate. A public apology may be requested. + +### 2. Warning + +**Community Impact**: A violation through a single incident or series +of actions. + +**Consequence**: A warning with consequences for continued behavior. No +interaction with the people involved, including unsolicited interaction with +those enforcing the Code of Conduct, for a specified period of time. This +includes avoiding interactions in community spaces as well as external channels +like social media. Violating these terms may lead to a temporary or +permanent ban. + +### 3. Temporary Ban + +**Community Impact**: A serious violation of community standards, including +sustained inappropriate behavior. + +**Consequence**: A temporary ban from any sort of interaction or public +communication with the community for a specified period of time. No public or +private interaction with the people involved, including unsolicited interaction +with those enforcing the Code of Conduct, is allowed during this period. +Violating these terms may lead to a permanent ban. + +### 4. Permanent Ban + +**Community Impact**: Demonstrating a pattern of violation of community +standards, including sustained inappropriate behavior, harassment of an +individual, or aggression toward or disparagement of classes of individuals. + +**Consequence**: A permanent ban from any sort of public interaction within +the community. + +## Attribution + +This Code of Conduct is adapted from the [Contributor Covenant][homepage], +version 2.0, available at +https://www.contributor-covenant.org/version/2/0/code_of_conduct.html. + +Community Impact Guidelines were inspired by [Mozilla's code of conduct +enforcement ladder](https://github.com/mozilla/diversity). + +[homepage]: https://www.contributor-covenant.org + +For answers to common questions about this code of conduct, see the FAQ at +https://www.contributor-covenant.org/faq. Translations are available at +https://www.contributor-covenant.org/translations. diff --git a/motion-gan-pipeline/GFPGAN/Comparisons.md b/motion-gan-pipeline/GFPGAN/Comparisons.md new file mode 100644 index 0000000..1542d4c --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/Comparisons.md @@ -0,0 +1,24 @@ +# Comparisons + +## Comparisons among different model versions + +Note that V1.3 is not always better than V1.2. You may need to try different models based on your purpose and inputs. + +| Version | Strengths | Weaknesses | +| :---: | :---: | :---: | +|V1.3 | ✓ natural outputs
✓better results on very low-quality inputs
✓ work on relatively high-quality inputs
✓ can have repeated (twice) restorations | ✗ not very sharp
✗ have a slight change on identity | +|V1.2 | ✓ sharper output
✓ with beauty makeup | ✗ some outputs are unnatural| + +For the following images, you may need to **zoom in** for comparing details, or **click the image** to see in the full size. + +| Input | V1 | V1.2 | V1.3 +| :---: | :---: | :---: | :---: | +|![019_Anne_Hathaway_01_00](https://user-images.githubusercontent.com/17445847/153762146-96b25999-4ddd-42a5-a3fe-bb90565f4c4f.png)| ![](https://user-images.githubusercontent.com/17445847/153762256-ef41e749-5a27-495c-8a9c-d8403be55869.png) | ![](https://user-images.githubusercontent.com/17445847/153762297-d41582fc-6253-4e7e-a1ce-4dc237ae3bf3.png) | ![](https://user-images.githubusercontent.com/17445847/153762215-e0535e94-b5ba-426e-97b5-35c00873604d.png) | +| ![106_Harry_Styles_00_00](https://user-images.githubusercontent.com/17445847/153789040-632c0eda-c15a-43e9-a63c-9ead64f92d4a.png) | ![](https://user-images.githubusercontent.com/17445847/153789172-93cd4980-5318-4633-a07e-1c8f8064ff89.png) | ![](https://user-images.githubusercontent.com/17445847/153789185-f7b268a7-d1db-47b0-ae4a-335e5d657a18.png) | ![](https://user-images.githubusercontent.com/17445847/153789198-7c7f3bca-0ef0-4494-92f0-20aa6f7d7464.png)| +| ![076_Paris_Hilton_00_00](https://user-images.githubusercontent.com/17445847/153789607-86387770-9db8-441f-b08a-c9679b121b85.png) | ![](https://user-images.githubusercontent.com/17445847/153789619-e56b438a-78a0-425d-8f44-ec4692a43dda.png) | ![](https://user-images.githubusercontent.com/17445847/153789633-5b28f778-3b7f-4e08-8a1d-740ca6e82d8a.png) | ![](https://user-images.githubusercontent.com/17445847/153789645-bc623f21-b32d-4fc3-bfe9-61203407a180.png)| +| ![008_George_Clooney_00_00](https://user-images.githubusercontent.com/17445847/153790017-0c3ca94d-1c9d-4a0e-b539-ab12d4da98ff.png) | ![](https://user-images.githubusercontent.com/17445847/153790028-fb0d38ab-399d-4a30-8154-2dcd72ca90e8.png) | ![](https://user-images.githubusercontent.com/17445847/153790044-1ef68e34-6120-4439-a5d9-0b6cdbe9c3d0.png) | ![](https://user-images.githubusercontent.com/17445847/153790059-a8d3cece-8989-4e9a-9ffe-903e1690cfd6.png)| +| ![057_Madonna_01_00](https://user-images.githubusercontent.com/17445847/153790624-2d0751d0-8fb4-4806-be9d-71b833c2c226.png) | ![](https://user-images.githubusercontent.com/17445847/153790639-7eb870e5-26b2-41dc-b139-b698bb40e6e6.png) | ![](https://user-images.githubusercontent.com/17445847/153790651-86899b7a-a1b6-4242-9e8a-77b462004998.png) | ![](https://user-images.githubusercontent.com/17445847/153790655-c8f6c25b-9b4e-4633-b16f-c43da86cff8f.png)| +| ![044_Amy_Schumer_01_00](https://user-images.githubusercontent.com/17445847/153790811-3fb4fc46-5b4f-45fe-8fcb-a128de2bfa60.png) | ![](https://user-images.githubusercontent.com/17445847/153790817-d45aa4ff-bfc4-4163-b462-75eef9426fab.png) | ![](https://user-images.githubusercontent.com/17445847/153790824-5f93c3a0-fe5a-42f6-8b4b-5a5de8cd0ac3.png) | ![](https://user-images.githubusercontent.com/17445847/153790835-0edf9944-05c7-41c4-8581-4dc5ffc56c9d.png)| +| ![012_Jackie_Chan_01_00](https://user-images.githubusercontent.com/17445847/153791176-737b016a-e94f-4898-8db7-43e7762141c9.png) | ![](https://user-images.githubusercontent.com/17445847/153791183-2f25a723-56bf-4cd5-aafe-a35513a6d1c5.png) | ![](https://user-images.githubusercontent.com/17445847/153791194-93416cf9-2b58-4e70-b806-27e14c58d4fd.png) | ![](https://user-images.githubusercontent.com/17445847/153791202-aa98659c-b702-4bce-9c47-a2fa5eccc5ae.png)| + + diff --git a/motion-gan-pipeline/GFPGAN/FAQ.md b/motion-gan-pipeline/GFPGAN/FAQ.md new file mode 100644 index 0000000..e4d5a49 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/FAQ.md @@ -0,0 +1,7 @@ +# FAQ + +1. **How to finetune the GFPGANCleanv1-NoCE-C2 (v1.2) model** + +**A:** 1) The GFPGANCleanv1-NoCE-C2 (v1.2) model uses the *clean* architecture, which is more friendly for deploying. +2) This model is not directly trained. Instead, it is converted from another *bilinear* model. +3) If you want to finetune the GFPGANCleanv1-NoCE-C2 (v1.2), you need to finetune its original *bilinear* model, and then do the conversion. diff --git a/motion-gan-pipeline/GFPGAN/LICENSE b/motion-gan-pipeline/GFPGAN/LICENSE new file mode 100644 index 0000000..24384c0 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/LICENSE @@ -0,0 +1,351 @@ +Tencent is pleased to support the open source community by making GFPGAN available. + +Copyright (C) 2021 THL A29 Limited, a Tencent company. All rights reserved. + +GFPGAN is licensed under the Apache License Version 2.0 except for the third-party components listed below. + + +Terms of the Apache License Version 2.0: +--------------------------------------------- +Apache License + +Version 2.0, January 2004 + +http://www.apache.org/licenses/ + +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION +1. Definitions. + +“License” shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. + +“Licensor” shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. + +“Legal Entity” shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, “control” means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. + +“You” (or “Your”) shall mean an individual or Legal Entity exercising permissions granted by this License. + +“Source” form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. + +“Object” form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. + +“Work” shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). + +“Derivative Works” shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. + +“Contribution” shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, “submitted” means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as “Not a Contribution.” + +“Contributor” shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: + +You must give any other recipients of the Work or Derivative Works a copy of this License; and + +You must cause any modified files to carry prominent notices stating that You changed the files; and + +You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and + +If the Work includes a “NOTICE” text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. + +You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an “AS IS” BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + + + +Other dependencies and licenses: + + +Open Source Software licensed under the Apache 2.0 license and Other Licenses of the Third-Party Components therein: +--------------------------------------------- +1. basicsr +Copyright 2018-2020 BasicSR Authors + + +This BasicSR project is released under the Apache 2.0 license. + +A copy of Apache 2.0 is included in this file. + +StyleGAN2 +The codes are modified from the repository stylegan2-pytorch. Many thanks to the author - Kim Seonghyeon 😊 for translating from the official TensorFlow codes to PyTorch ones. Here is the license of stylegan2-pytorch. +The official repository is https://github.com/NVlabs/stylegan2, and here is the NVIDIA license. +DFDNet +The codes are largely modified from the repository DFDNet. Their license is Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License. + +Terms of the Nvidia License: +--------------------------------------------- + +1. Definitions + +"Licensor" means any person or entity that distributes its Work. + +"Software" means the original work of authorship made available under +this License. + +"Work" means the Software and any additions to or derivative works of +the Software that are made available under this License. + +"Nvidia Processors" means any central processing unit (CPU), graphics +processing unit (GPU), field-programmable gate array (FPGA), +application-specific integrated circuit (ASIC) or any combination +thereof designed, made, sold, or provided by Nvidia or its affiliates. + +The terms "reproduce," "reproduction," "derivative works," and +"distribution" have the meaning as provided under U.S. copyright law; +provided, however, that for the purposes of this License, derivative +works shall not include works that remain separable from, or merely +link (or bind by name) to the interfaces of, the Work. + +Works, including the Software, are "made available" under this License +by including in or with the Work either (a) a copyright notice +referencing the applicability of this License to the Work, or (b) a +copy of this License. + +2. License Grants + + 2.1 Copyright Grant. Subject to the terms and conditions of this + License, each Licensor grants to you a perpetual, worldwide, + non-exclusive, royalty-free, copyright license to reproduce, + prepare derivative works of, publicly display, publicly perform, + sublicense and distribute its Work and any resulting derivative + works in any form. + +3. Limitations + + 3.1 Redistribution. You may reproduce or distribute the Work only + if (a) you do so under this License, (b) you include a complete + copy of this License with your distribution, and (c) you retain + without modification any copyright, patent, trademark, or + attribution notices that are present in the Work. + + 3.2 Derivative Works. You may specify that additional or different + terms apply to the use, reproduction, and distribution of your + derivative works of the Work ("Your Terms") only if (a) Your Terms + provide that the use limitation in Section 3.3 applies to your + derivative works, and (b) you identify the specific derivative + works that are subject to Your Terms. Notwithstanding Your Terms, + this License (including the redistribution requirements in Section + 3.1) will continue to apply to the Work itself. + + 3.3 Use Limitation. The Work and any derivative works thereof only + may be used or intended for use non-commercially. The Work or + derivative works thereof may be used or intended for use by Nvidia + or its affiliates commercially or non-commercially. As used herein, + "non-commercially" means for research or evaluation purposes only. + + 3.4 Patent Claims. If you bring or threaten to bring a patent claim + against any Licensor (including any claim, cross-claim or + counterclaim in a lawsuit) to enforce any patents that you allege + are infringed by any Work, then your rights under this License from + such Licensor (including the grants in Sections 2.1 and 2.2) will + terminate immediately. + + 3.5 Trademarks. This License does not grant any rights to use any + Licensor's or its affiliates' names, logos, or trademarks, except + as necessary to reproduce the notices described in this License. + + 3.6 Termination. If you violate any term of this License, then your + rights under this License (including the grants in Sections 2.1 and + 2.2) will terminate immediately. + +4. Disclaimer of Warranty. + +THE WORK IS PROVIDED "AS IS" WITHOUT WARRANTIES OR CONDITIONS OF ANY +KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WARRANTIES OR CONDITIONS OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, TITLE OR +NON-INFRINGEMENT. YOU BEAR THE RISK OF UNDERTAKING ANY ACTIVITIES UNDER +THIS LICENSE. + +5. Limitation of Liability. + +EXCEPT AS PROHIBITED BY APPLICABLE LAW, IN NO EVENT AND UNDER NO LEGAL +THEORY, WHETHER IN TORT (INCLUDING NEGLIGENCE), CONTRACT, OR OTHERWISE +SHALL ANY LICENSOR BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY DIRECT, +INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES ARISING OUT OF +OR RELATED TO THIS LICENSE, THE USE OR INABILITY TO USE THE WORK +(INCLUDING BUT NOT LIMITED TO LOSS OF GOODWILL, BUSINESS INTERRUPTION, +LOST PROFITS OR DATA, COMPUTER FAILURE OR MALFUNCTION, OR ANY OTHER +COMMERCIAL DAMAGES OR LOSSES), EVEN IF THE LICENSOR HAS BEEN ADVISED OF +THE POSSIBILITY OF SUCH DAMAGES. + +MIT License + +Copyright (c) 2019 Kim Seonghyeon + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + + + +Open Source Software licensed under the BSD 3-Clause license: +--------------------------------------------- +1. torchvision +Copyright (c) Soumith Chintala 2016, +All rights reserved. + +2. torch +Copyright (c) 2016- Facebook, Inc (Adam Paszke) +Copyright (c) 2014- Facebook, Inc (Soumith Chintala) +Copyright (c) 2011-2014 Idiap Research Institute (Ronan Collobert) +Copyright (c) 2012-2014 Deepmind Technologies (Koray Kavukcuoglu) +Copyright (c) 2011-2012 NEC Laboratories America (Koray Kavukcuoglu) +Copyright (c) 2011-2013 NYU (Clement Farabet) +Copyright (c) 2006-2010 NEC Laboratories America (Ronan Collobert, Leon Bottou, Iain Melvin, Jason Weston) +Copyright (c) 2006 Idiap Research Institute (Samy Bengio) +Copyright (c) 2001-2004 Idiap Research Institute (Ronan Collobert, Samy Bengio, Johnny Mariethoz) + + +Terms of the BSD 3-Clause License: +--------------------------------------------- +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: + +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. + +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. + +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS “AS IS” AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + + +Open Source Software licensed under the BSD 3-Clause License and Other Licenses of the Third-Party Components therein: +--------------------------------------------- +1. numpy +Copyright (c) 2005-2020, NumPy Developers. +All rights reserved. + +A copy of BSD 3-Clause License is included in this file. + +The NumPy repository and source distributions bundle several libraries that are +compatibly licensed. We list these here. + +Name: Numpydoc +Files: doc/sphinxext/numpydoc/* +License: BSD-2-Clause + For details, see doc/sphinxext/LICENSE.txt + +Name: scipy-sphinx-theme +Files: doc/scipy-sphinx-theme/* +License: BSD-3-Clause AND PSF-2.0 AND Apache-2.0 + For details, see doc/scipy-sphinx-theme/LICENSE.txt + +Name: lapack-lite +Files: numpy/linalg/lapack_lite/* +License: BSD-3-Clause + For details, see numpy/linalg/lapack_lite/LICENSE.txt + +Name: tempita +Files: tools/npy_tempita/* +License: MIT + For details, see tools/npy_tempita/license.txt + +Name: dragon4 +Files: numpy/core/src/multiarray/dragon4.c +License: MIT + For license text, see numpy/core/src/multiarray/dragon4.c + + + +Open Source Software licensed under the MIT license: +--------------------------------------------- +1. facexlib +Copyright (c) 2020 Xintao Wang + +2. opencv-python +Copyright (c) Olli-Pekka Heinisuo +Please note that only files in cv2 package are used. + + +Terms of the MIT License: +--------------------------------------------- +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + + + +Open Source Software licensed under the MIT license and Other Licenses of the Third-Party Components therein: +--------------------------------------------- +1. tqdm +Copyright (c) 2013 noamraph + +`tqdm` is a product of collaborative work. +Unless otherwise stated, all authors (see commit logs) retain copyright +for their respective work, and release the work under the MIT licence +(text below). + +Exceptions or notable authors are listed below +in reverse chronological order: + +* files: * + MPLv2.0 2015-2020 (c) Casper da Costa-Luis + [casperdcl](https://github.com/casperdcl). +* files: tqdm/_tqdm.py + MIT 2016 (c) [PR #96] on behalf of Google Inc. +* files: tqdm/_tqdm.py setup.py README.rst MANIFEST.in .gitignore + MIT 2013 (c) Noam Yorav-Raphael, original author. + +[PR #96]: https://github.com/tqdm/tqdm/pull/96 + + +Mozilla Public Licence (MPL) v. 2.0 - Exhibit A +----------------------------------------------- + +This Source Code Form is subject to the terms of the +Mozilla Public License, v. 2.0. +If a copy of the MPL was not distributed with this file, +You can obtain one at https://mozilla.org/MPL/2.0/. + + +MIT License (MIT) +----------------- + +Copyright (c) 2013 noamraph + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of +the Software, and to permit persons to whom the Software is furnished to do so, +subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/motion-gan-pipeline/GFPGAN/MANIFEST.in b/motion-gan-pipeline/GFPGAN/MANIFEST.in new file mode 100644 index 0000000..bcaa717 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/MANIFEST.in @@ -0,0 +1,8 @@ +include assets/* +include inputs/* +include scripts/*.py +include inference_gfpgan.py +include VERSION +include LICENSE +include requirements.txt +include gfpgan/weights/README.md diff --git a/motion-gan-pipeline/GFPGAN/PaperModel.md b/motion-gan-pipeline/GFPGAN/PaperModel.md new file mode 100644 index 0000000..e9c8bdc --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/PaperModel.md @@ -0,0 +1,76 @@ +# Installation + +We now provide a *clean* version of GFPGAN, which does not require customized CUDA extensions. See [here](README.md#installation) for this easier installation.
+If you want want to use the original model in our paper, please follow the instructions below. + +1. Clone repo + + ```bash + git clone https://github.com/xinntao/GFPGAN.git + cd GFPGAN + ``` + +1. Install dependent packages + + As StyleGAN2 uses customized PyTorch C++ extensions, you need to **compile them during installation** or **load them just-in-time(JIT)**. + You can refer to [BasicSR-INSTALL.md](https://github.com/xinntao/BasicSR/blob/master/INSTALL.md) for more details. + + **Option 1: Load extensions just-in-time(JIT)** (For those just want to do simple inferences, may have less issues) + + ```bash + # Install basicsr - https://github.com/xinntao/BasicSR + # We use BasicSR for both training and inference + pip install basicsr + + # Install facexlib - https://github.com/xinntao/facexlib + # We use face detection and face restoration helper in the facexlib package + pip install facexlib + + pip install -r requirements.txt + python setup.py develop + + # remember to set BASICSR_JIT=True before your running commands + ``` + + **Option 2: Compile extensions during installation** (For those need to train/inference for many times) + + ```bash + # Install basicsr - https://github.com/xinntao/BasicSR + # We use BasicSR for both training and inference + # Set BASICSR_EXT=True to compile the cuda extensions in the BasicSR - It may take several minutes to compile, please be patient + # Add -vvv for detailed log prints + BASICSR_EXT=True pip install basicsr -vvv + + # Install facexlib - https://github.com/xinntao/facexlib + # We use face detection and face restoration helper in the facexlib package + pip install facexlib + + pip install -r requirements.txt + python setup.py develop + ``` + +## :zap: Quick Inference + +Download pre-trained models: [GFPGANv1.pth](https://github.com/TencentARC/GFPGAN/releases/download/v0.1.0/GFPGANv1.pth) + +```bash +wget https://github.com/TencentARC/GFPGAN/releases/download/v0.1.0/GFPGANv1.pth -P experiments/pretrained_models +``` + +- Option 1: Load extensions just-in-time(JIT) + + ```bash + BASICSR_JIT=True python inference_gfpgan.py --input inputs/whole_imgs --output results --version 1 + + # for aligned images + BASICSR_JIT=True python inference_gfpgan.py --input inputs/whole_imgs --output results --version 1 --aligned + ``` + +- Option 2: Have successfully compiled extensions during installation + + ```bash + python inference_gfpgan.py --input inputs/whole_imgs --output results --version 1 + + # for aligned images + python inference_gfpgan.py --input inputs/whole_imgs --output results --version 1 --aligned + ``` diff --git a/motion-gan-pipeline/GFPGAN/README.md b/motion-gan-pipeline/GFPGAN/README.md new file mode 100644 index 0000000..285956f --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/README.md @@ -0,0 +1,202 @@ +

+ +

+ +## + +
+ + +[![download](https://img.shields.io/github/downloads/TencentARC/GFPGAN/total.svg)](https://github.com/TencentARC/GFPGAN/releases) +[![PyPI](https://img.shields.io/pypi/v/gfpgan)](https://pypi.org/project/gfpgan/) +[![Open issue](https://img.shields.io/github/issues/TencentARC/GFPGAN)](https://github.com/TencentARC/GFPGAN/issues) +[![Closed issue](https://img.shields.io/github/issues-closed/TencentARC/GFPGAN)](https://github.com/TencentARC/GFPGAN/issues) +[![LICENSE](https://img.shields.io/badge/License-Apache%202.0-blue.svg)](https://github.com/TencentARC/GFPGAN/blob/master/LICENSE) +[![python lint](https://github.com/TencentARC/GFPGAN/actions/workflows/pylint.yml/badge.svg)](https://github.com/TencentARC/GFPGAN/blob/master/.github/workflows/pylint.yml) +[![Publish-pip](https://github.com/TencentARC/GFPGAN/actions/workflows/publish-pip.yml/badge.svg)](https://github.com/TencentARC/GFPGAN/blob/master/.github/workflows/publish-pip.yml) +
+ +1. :boom: **Updated** online demo: [![Replicate](https://img.shields.io/static/v1?label=Demo&message=Replicate&color=blue)](https://replicate.com/tencentarc/gfpgan). Here is the [backup](https://replicate.com/xinntao/gfpgan). +1. :boom: **Updated** online demo: [![Huggingface Gradio](https://img.shields.io/static/v1?label=Demo&message=Huggingface%20Gradio&color=orange)](https://huggingface.co/spaces/Xintao/GFPGAN) +1. [Colab Demo](https://colab.research.google.com/drive/1sVsoBd9AjckIXThgtZhGrHRfFI6UUYOo) for GFPGAN google colab logo; (Another [Colab Demo](https://colab.research.google.com/drive/1Oa1WwKB4M4l1GmR7CtswDVgOCOeSLChA?usp=sharing) for the original paper model) + + + +> :rocket: **Thanks for your interest in our work. You may also want to check our new updates on the *tiny models* for *anime images and videos* in [Real-ESRGAN](https://github.com/xinntao/Real-ESRGAN/blob/master/docs/anime_video_model.md)** :blush: + +GFPGAN aims at developing a **Practical Algorithm for Real-world Face Restoration**.
+It leverages rich and diverse priors encapsulated in a pretrained face GAN (*e.g.*, StyleGAN2) for blind face restoration. + +:question: Frequently Asked Questions can be found in [FAQ.md](FAQ.md). + +:triangular_flag_on_post: **Updates** + +- :white_check_mark: Add CodeFormer ([CC BY-NC-SA 4.0 License](https://creativecommons.org/licenses/by-nc-sa/4.0/)) and RestoreFormer. +- :white_check_mark: Add [V1.4 model](https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.4.pth), which produces slightly more details and better identity than V1.3. +- :white_check_mark: Add **[V1.3 model](https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.3.pth)**, which produces **more natural** restoration results, and better results on *very low-quality* / *high-quality* inputs. See more in [Model zoo](#european_castle-model-zoo), [Comparisons.md](Comparisons.md) +- :white_check_mark: Integrated to [Huggingface Spaces](https://huggingface.co/spaces) with [Gradio](https://github.com/gradio-app/gradio). See [Gradio Web Demo](https://huggingface.co/spaces/akhaliq/GFPGAN). +- :white_check_mark: Support enhancing non-face regions (background) with [Real-ESRGAN](https://github.com/xinntao/Real-ESRGAN). +- :white_check_mark: We provide a *clean* version of GFPGAN, which does not require CUDA extensions. +- :white_check_mark: We provide an updated model without colorizing faces. + +--- + +If GFPGAN is helpful in your photos/projects, please help to :star: this repo or recommend it to your friends. Thanks:blush: +Other recommended projects:
+:arrow_forward: [Real-ESRGAN](https://github.com/xinntao/Real-ESRGAN): A practical algorithm for general image restoration
+:arrow_forward: [BasicSR](https://github.com/xinntao/BasicSR): An open-source image and video restoration toolbox
+:arrow_forward: [facexlib](https://github.com/xinntao/facexlib): A collection that provides useful face-relation functions
+:arrow_forward: [HandyView](https://github.com/xinntao/HandyView): A PyQt5-based image viewer that is handy for view and comparison
+ +--- + +### :book: GFP-GAN: Towards Real-World Blind Face Restoration with Generative Facial Prior + +> [[Paper](https://arxiv.org/abs/2101.04061)]   [[Project Page](https://xinntao.github.io/projects/gfpgan)]   [Demo]
+> [Xintao Wang](https://xinntao.github.io/), [Yu Li](https://yu-li.github.io/), [Honglun Zhang](https://scholar.google.com/citations?hl=en&user=KjQLROoAAAAJ), [Ying Shan](https://scholar.google.com/citations?user=4oXBp9UAAAAJ&hl=en)
+> Applied Research Center (ARC), Tencent PCG + +

+ +

+ +--- + +## :wrench: Dependencies and Installation + +- Python >= 3.7 (Recommend to use [Anaconda](https://www.anaconda.com/download/#linux) or [Miniconda](https://docs.conda.io/en/latest/miniconda.html)) +- [PyTorch >= 1.7](https://pytorch.org/) +- Option: NVIDIA GPU + [CUDA](https://developer.nvidia.com/cuda-downloads) +- Option: Linux + +### Installation + +We now provide a *clean* version of GFPGAN, which does not require customized CUDA extensions.
+If you want to use the original model in our paper, please see [PaperModel.md](PaperModel.md) for installation. + +1. Clone repo + + ```bash + git clone https://github.com/TencentARC/GFPGAN.git + cd GFPGAN + ``` + +1. Install dependent packages + + ```bash + # Install basicsr - https://github.com/xinntao/BasicSR + # We use BasicSR for both training and inference + pip install basicsr + + # Install facexlib - https://github.com/xinntao/facexlib + # We use face detection and face restoration helper in the facexlib package + pip install facexlib + + pip install -r requirements.txt + python setup.py develop + + # If you want to enhance the background (non-face) regions with Real-ESRGAN, + # you also need to install the realesrgan package + pip install realesrgan + ``` + +## :zap: Quick Inference + +We take the v1.3 version for an example. More models can be found [here](#european_castle-model-zoo). + +Download pre-trained models: [GFPGANv1.3.pth](https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.3.pth) + +```bash +wget https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.3.pth -P experiments/pretrained_models +``` + +**Inference!** + +```bash +python inference_gfpgan.py -i inputs/whole_imgs -o results -v 1.3 -s 2 +``` + +```console +Usage: python inference_gfpgan.py -i inputs/whole_imgs -o results -v 1.3 -s 2 [options]... + + -h show this help + -i input Input image or folder. Default: inputs/whole_imgs + -o output Output folder. Default: results + -v version GFPGAN model version. Option: 1 | 1.2 | 1.3. Default: 1.3 + -s upscale The final upsampling scale of the image. Default: 2 + -bg_upsampler background upsampler. Default: realesrgan + -bg_tile Tile size for background sampler, 0 for no tile during testing. Default: 400 + -suffix Suffix of the restored faces + -only_center_face Only restore the center face + -aligned Input are aligned faces + -ext Image extension. Options: auto | jpg | png, auto means using the same extension as inputs. Default: auto +``` + +If you want to use the original model in our paper, please see [PaperModel.md](PaperModel.md) for installation and inference. + +## :european_castle: Model Zoo + +| Version | Model Name | Description | +| :---: | :---: | :---: | +| V1.3 | [GFPGANv1.3.pth](https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.3.pth) | Based on V1.2; **more natural** restoration results; better results on very low-quality / high-quality inputs. | +| V1.2 | [GFPGANCleanv1-NoCE-C2.pth](https://github.com/TencentARC/GFPGAN/releases/download/v0.2.0/GFPGANCleanv1-NoCE-C2.pth) | No colorization; no CUDA extensions are required. Trained with more data with pre-processing. | +| V1 | [GFPGANv1.pth](https://github.com/TencentARC/GFPGAN/releases/download/v0.1.0/GFPGANv1.pth) | The paper model, with colorization. | + +The comparisons are in [Comparisons.md](Comparisons.md). + +Note that V1.3 is not always better than V1.2. You may need to select different models based on your purpose and inputs. + +| Version | Strengths | Weaknesses | +| :---: | :---: | :---: | +|V1.3 | ✓ natural outputs
✓better results on very low-quality inputs
✓ work on relatively high-quality inputs
✓ can have repeated (twice) restorations | ✗ not very sharp
✗ have a slight change on identity | +|V1.2 | ✓ sharper output
✓ with beauty makeup | ✗ some outputs are unnatural | + +You can find **more models (such as the discriminators)** here: [[Google Drive](https://drive.google.com/drive/folders/17rLiFzcUMoQuhLnptDsKolegHWwJOnHu?usp=sharing)], OR [[Tencent Cloud 腾讯微云](https://share.weiyun.com/ShYoCCoc)] + +## :computer: Training + +We provide the training codes for GFPGAN (used in our paper).
+You could improve it according to your own needs. + +**Tips** + +1. More high quality faces can improve the restoration quality. +2. You may need to perform some pre-processing, such as beauty makeup. + +**Procedures** + +(You can try a simple version ( `options/train_gfpgan_v1_simple.yml`) that does not require face component landmarks.) + +1. Dataset preparation: [FFHQ](https://github.com/NVlabs/ffhq-dataset) + +1. Download pre-trained models and other data. Put them in the `experiments/pretrained_models` folder. + 1. [Pre-trained StyleGAN2 model: StyleGAN2_512_Cmul1_FFHQ_B12G4_scratch_800k.pth](https://github.com/TencentARC/GFPGAN/releases/download/v0.1.0/StyleGAN2_512_Cmul1_FFHQ_B12G4_scratch_800k.pth) + 1. [Component locations of FFHQ: FFHQ_eye_mouth_landmarks_512.pth](https://github.com/TencentARC/GFPGAN/releases/download/v0.1.0/FFHQ_eye_mouth_landmarks_512.pth) + 1. [A simple ArcFace model: arcface_resnet18.pth](https://github.com/TencentARC/GFPGAN/releases/download/v0.1.0/arcface_resnet18.pth) + +1. Modify the configuration file `options/train_gfpgan_v1.yml` accordingly. + +1. Training + +> python -m torch.distributed.launch --nproc_per_node=4 --master_port=22021 gfpgan/train.py -opt options/train_gfpgan_v1.yml --launcher pytorch + +## :scroll: License and Acknowledgement + +GFPGAN is released under Apache License Version 2.0. + +## BibTeX + + @InProceedings{wang2021gfpgan, + author = {Xintao Wang and Yu Li and Honglun Zhang and Ying Shan}, + title = {Towards Real-World Blind Face Restoration with Generative Facial Prior}, + booktitle={The IEEE Conference on Computer Vision and Pattern Recognition (CVPR)}, + year = {2021} + } + +## :e-mail: Contact + +If you have any question, please email `xintao.wang@outlook.com` or `xintaowang@tencent.com`. diff --git a/motion-gan-pipeline/GFPGAN/README_CN.md b/motion-gan-pipeline/GFPGAN/README_CN.md new file mode 100644 index 0000000..880f206 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/README_CN.md @@ -0,0 +1,7 @@ +

+ +

+ +## + +还未完工,欢迎贡献! diff --git a/motion-gan-pipeline/GFPGAN/VERSION b/motion-gan-pipeline/GFPGAN/VERSION new file mode 100644 index 0000000..3336003 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/VERSION @@ -0,0 +1 @@ +1.3.7 diff --git a/motion-gan-pipeline/GFPGAN/assets/gfpgan_logo.png b/motion-gan-pipeline/GFPGAN/assets/gfpgan_logo.png new file mode 100644 index 0000000..f019378 Binary files /dev/null and b/motion-gan-pipeline/GFPGAN/assets/gfpgan_logo.png differ diff --git a/motion-gan-pipeline/GFPGAN/cog.yaml b/motion-gan-pipeline/GFPGAN/cog.yaml new file mode 100644 index 0000000..f22d2c7 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/cog.yaml @@ -0,0 +1,22 @@ +# This file is used for constructing replicate env +image: "r8.im/tencentarc/gfpgan" + +build: + gpu: true + python_version: "3.8" + system_packages: + - "libgl1-mesa-glx" + - "libglib2.0-0" + python_packages: + - "torch==1.7.1" + - "torchvision==0.8.2" + - "numpy==1.21.1" + - "lmdb==1.2.1" + - "opencv-python==4.5.3.56" + - "PyYAML==5.4.1" + - "tqdm==4.62.2" + - "yapf==0.31.0" + - "basicsr==1.4.2" + - "facexlib==0.2.5" + +predict: "cog_predict.py:Predictor" diff --git a/motion-gan-pipeline/GFPGAN/cog_predict.py b/motion-gan-pipeline/GFPGAN/cog_predict.py new file mode 100644 index 0000000..ff37ae3 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/cog_predict.py @@ -0,0 +1,177 @@ +# flake8: noqa +# This file is used for deploying replicate models +# running: cog predict -i img=@inputs/whole_imgs/10045.png -i version='v1.4' -i scale=2 +# push: cog push r8.im/tencentarc/gfpgan +# push (backup): cog push r8.im/xinntao/gfpgan + +import os + +os.system('python setup.py develop') +os.system('pip install realesrgan') + +import cv2 +import shutil +import tempfile +import torch +from basicsr.archs.srvgg_arch import SRVGGNetCompact + +from gfpgan import GFPGANer + +try: + from cog import BasePredictor, Input, Path + from realesrgan.utils import RealESRGANer +except Exception: + print('please install cog and realesrgan package') + + +class Predictor(BasePredictor): + + def setup(self): + os.makedirs('output', exist_ok=True) + # download weights + if not os.path.exists('gfpgan/weights/realesr-general-x4v3.pth'): + os.system( + 'wget https://github.com/xinntao/Real-ESRGAN/releases/download/v0.2.5.0/realesr-general-x4v3.pth -P ./gfpgan/weights' + ) + if not os.path.exists('gfpgan/weights/GFPGANv1.2.pth'): + os.system( + 'wget https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.2.pth -P ./gfpgan/weights') + if not os.path.exists('gfpgan/weights/GFPGANv1.3.pth'): + os.system( + 'wget https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.3.pth -P ./gfpgan/weights') + if not os.path.exists('gfpgan/weights/GFPGANv1.4.pth'): + os.system( + 'wget https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.4.pth -P ./gfpgan/weights') + if not os.path.exists('gfpgan/weights/RestoreFormer.pth'): + os.system( + 'wget https://github.com/TencentARC/GFPGAN/releases/download/v1.3.4/RestoreFormer.pth -P ./gfpgan/weights' + ) + if not os.path.exists('gfpgan/weights/CodeFormer.pth'): + os.system( + 'wget https://github.com/TencentARC/GFPGAN/releases/download/v1.3.4/CodeFormer.pth -P ./gfpgan/weights') + + # background enhancer with RealESRGAN + model = SRVGGNetCompact(num_in_ch=3, num_out_ch=3, num_feat=64, num_conv=32, upscale=4, act_type='prelu') + model_path = 'gfpgan/weights/realesr-general-x4v3.pth' + half = True if torch.cuda.is_available() else False + self.upsampler = RealESRGANer( + scale=4, model_path=model_path, model=model, tile=0, tile_pad=10, pre_pad=0, half=half) + + # Use GFPGAN for face enhancement + self.face_enhancer = GFPGANer( + model_path='gfpgan/weights/GFPGANv1.4.pth', + upscale=2, + arch='clean', + channel_multiplier=2, + bg_upsampler=self.upsampler) + self.current_version = 'v1.4' + + def predict( + self, + img: Path = Input(description='Input'), + version: str = Input( + description='GFPGAN version. v1.3: better quality. v1.4: more details and better identity.', + choices=['v1.2', 'v1.3', 'v1.4', 'RestoreFormer', 'CodeFormer'], + default='v1.4'), + scale: float = Input(description='Rescaling factor', default=2), + weight: float = Input( + description='Weight, only for CodeFormer. 0 for better quality, 1 for better identity', + default=0.5, + ge=0, + le=1.0) + ) -> Path: + if not isinstance(weight, (int, float)): + weight = 0.5 + print(img, version, scale, weight) + try: + extension = os.path.splitext(os.path.basename(str(img)))[1] + img = cv2.imread(str(img), cv2.IMREAD_UNCHANGED) + if len(img.shape) == 3 and img.shape[2] == 4: + img_mode = 'RGBA' + elif len(img.shape) == 2: + img_mode = None + img = cv2.cvtColor(img, cv2.COLOR_GRAY2BGR) + else: + img_mode = None + + h, w = img.shape[0:2] + if h < 300: + img = cv2.resize(img, (w * 2, h * 2), interpolation=cv2.INTER_LANCZOS4) + + if self.current_version != version: + if version == 'v1.2': + self.face_enhancer = GFPGANer( + model_path='gfpgan/weights/GFPGANv1.2.pth', + upscale=2, + arch='clean', + channel_multiplier=2, + bg_upsampler=self.upsampler) + self.current_version = 'v1.2' + elif version == 'v1.3': + self.face_enhancer = GFPGANer( + model_path='gfpgan/weights/GFPGANv1.3.pth', + upscale=2, + arch='clean', + channel_multiplier=2, + bg_upsampler=self.upsampler) + self.current_version = 'v1.3' + elif version == 'v1.4': + self.face_enhancer = GFPGANer( + model_path='gfpgan/weights/GFPGANv1.4.pth', + upscale=2, + arch='clean', + channel_multiplier=2, + bg_upsampler=self.upsampler) + self.current_version = 'v1.4' + elif version == 'RestoreFormer': + self.face_enhancer = GFPGANer( + model_path='gfpgan/weights/RestoreFormer.pth', + upscale=2, + arch='RestoreFormer', + channel_multiplier=2, + bg_upsampler=self.upsampler) + elif version == 'CodeFormer': + self.face_enhancer = GFPGANer( + model_path='gfpgan/weights/CodeFormer.pth', + upscale=2, + arch='CodeFormer', + channel_multiplier=2, + bg_upsampler=self.upsampler) + + try: + _, _, output = self.face_enhancer.enhance( + img, has_aligned=False, only_center_face=False, paste_back=True, weight=weight) + except RuntimeError as error: + print('Error', error) + + try: + if scale != 2: + interpolation = cv2.INTER_AREA if scale < 2 else cv2.INTER_LANCZOS4 + h, w = img.shape[0:2] + output = cv2.resize(output, (int(w * scale / 2), int(h * scale / 2)), interpolation=interpolation) + except Exception as error: + print('wrong scale input.', error) + + if img_mode == 'RGBA': # RGBA images should be saved in png format + extension = 'png' + # save_path = f'output/out.{extension}' + # cv2.imwrite(save_path, output) + out_path = Path(tempfile.mkdtemp()) / f'out.{extension}' + cv2.imwrite(str(out_path), output) + except Exception as error: + print('global exception: ', error) + finally: + clean_folder('output') + return out_path + + +def clean_folder(folder): + for filename in os.listdir(folder): + file_path = os.path.join(folder, filename) + try: + if os.path.isfile(file_path) or os.path.islink(file_path): + os.unlink(file_path) + elif os.path.isdir(file_path): + shutil.rmtree(file_path) + except Exception as e: + print(f'Failed to delete {file_path}. Reason: {e}') diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/__init__.py b/motion-gan-pipeline/GFPGAN/gfpgan/__init__.py new file mode 100644 index 0000000..94daaee --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/__init__.py @@ -0,0 +1,7 @@ +# flake8: noqa +from .archs import * +from .data import * +from .models import * +from .utils import * + +# from .version import * diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/archs/__init__.py b/motion-gan-pipeline/GFPGAN/gfpgan/archs/__init__.py new file mode 100644 index 0000000..bec5f17 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/archs/__init__.py @@ -0,0 +1,10 @@ +import importlib +from basicsr.utils import scandir +from os import path as osp + +# automatically scan and import arch modules for registry +# scan all the files that end with '_arch.py' under the archs folder +arch_folder = osp.dirname(osp.abspath(__file__)) +arch_filenames = [osp.splitext(osp.basename(v))[0] for v in scandir(arch_folder) if v.endswith('_arch.py')] +# import all the arch modules +_arch_modules = [importlib.import_module(f'gfpgan.archs.{file_name}') for file_name in arch_filenames] diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/archs/arcface_arch.py b/motion-gan-pipeline/GFPGAN/gfpgan/archs/arcface_arch.py new file mode 100644 index 0000000..e6d3bd9 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/archs/arcface_arch.py @@ -0,0 +1,245 @@ +import torch.nn as nn +from basicsr.utils.registry import ARCH_REGISTRY + + +def conv3x3(inplanes, outplanes, stride=1): + """A simple wrapper for 3x3 convolution with padding. + + Args: + inplanes (int): Channel number of inputs. + outplanes (int): Channel number of outputs. + stride (int): Stride in convolution. Default: 1. + """ + return nn.Conv2d(inplanes, outplanes, kernel_size=3, stride=stride, padding=1, bias=False) + + +class BasicBlock(nn.Module): + """Basic residual block used in the ResNetArcFace architecture. + + Args: + inplanes (int): Channel number of inputs. + planes (int): Channel number of outputs. + stride (int): Stride in convolution. Default: 1. + downsample (nn.Module): The downsample module. Default: None. + """ + expansion = 1 # output channel expansion ratio + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(BasicBlock, self).__init__() + self.conv1 = conv3x3(inplanes, planes, stride) + self.bn1 = nn.BatchNorm2d(planes) + self.relu = nn.ReLU(inplace=True) + self.conv2 = conv3x3(planes, planes) + self.bn2 = nn.BatchNorm2d(planes) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class IRBlock(nn.Module): + """Improved residual block (IR Block) used in the ResNetArcFace architecture. + + Args: + inplanes (int): Channel number of inputs. + planes (int): Channel number of outputs. + stride (int): Stride in convolution. Default: 1. + downsample (nn.Module): The downsample module. Default: None. + use_se (bool): Whether use the SEBlock (squeeze and excitation block). Default: True. + """ + expansion = 1 # output channel expansion ratio + + def __init__(self, inplanes, planes, stride=1, downsample=None, use_se=True): + super(IRBlock, self).__init__() + self.bn0 = nn.BatchNorm2d(inplanes) + self.conv1 = conv3x3(inplanes, inplanes) + self.bn1 = nn.BatchNorm2d(inplanes) + self.prelu = nn.PReLU() + self.conv2 = conv3x3(inplanes, planes, stride) + self.bn2 = nn.BatchNorm2d(planes) + self.downsample = downsample + self.stride = stride + self.use_se = use_se + if self.use_se: + self.se = SEBlock(planes) + + def forward(self, x): + residual = x + out = self.bn0(x) + out = self.conv1(out) + out = self.bn1(out) + out = self.prelu(out) + + out = self.conv2(out) + out = self.bn2(out) + if self.use_se: + out = self.se(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.prelu(out) + + return out + + +class Bottleneck(nn.Module): + """Bottleneck block used in the ResNetArcFace architecture. + + Args: + inplanes (int): Channel number of inputs. + planes (int): Channel number of outputs. + stride (int): Stride in convolution. Default: 1. + downsample (nn.Module): The downsample module. Default: None. + """ + expansion = 4 # output channel expansion ratio + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(Bottleneck, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + self.bn1 = nn.BatchNorm2d(planes) + self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(planes) + self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1, bias=False) + self.bn3 = nn.BatchNorm2d(planes * self.expansion) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class SEBlock(nn.Module): + """The squeeze-and-excitation block (SEBlock) used in the IRBlock. + + Args: + channel (int): Channel number of inputs. + reduction (int): Channel reduction ration. Default: 16. + """ + + def __init__(self, channel, reduction=16): + super(SEBlock, self).__init__() + self.avg_pool = nn.AdaptiveAvgPool2d(1) # pool to 1x1 without spatial information + self.fc = nn.Sequential( + nn.Linear(channel, channel // reduction), nn.PReLU(), nn.Linear(channel // reduction, channel), + nn.Sigmoid()) + + def forward(self, x): + b, c, _, _ = x.size() + y = self.avg_pool(x).view(b, c) + y = self.fc(y).view(b, c, 1, 1) + return x * y + + +@ARCH_REGISTRY.register() +class ResNetArcFace(nn.Module): + """ArcFace with ResNet architectures. + + Ref: ArcFace: Additive Angular Margin Loss for Deep Face Recognition. + + Args: + block (str): Block used in the ArcFace architecture. + layers (tuple(int)): Block numbers in each layer. + use_se (bool): Whether use the SEBlock (squeeze and excitation block). Default: True. + """ + + def __init__(self, block, layers, use_se=True): + if block == 'IRBlock': + block = IRBlock + self.inplanes = 64 + self.use_se = use_se + super(ResNetArcFace, self).__init__() + + self.conv1 = nn.Conv2d(1, 64, kernel_size=3, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(64) + self.prelu = nn.PReLU() + self.maxpool = nn.MaxPool2d(kernel_size=2, stride=2) + self.layer1 = self._make_layer(block, 64, layers[0]) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2) + self.layer3 = self._make_layer(block, 256, layers[2], stride=2) + self.layer4 = self._make_layer(block, 512, layers[3], stride=2) + self.bn4 = nn.BatchNorm2d(512) + self.dropout = nn.Dropout() + self.fc5 = nn.Linear(512 * 8 * 8, 512) + self.bn5 = nn.BatchNorm1d(512) + + # initialization + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.xavier_normal_(m.weight) + elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d): + nn.init.constant_(m.weight, 1) + nn.init.constant_(m.bias, 0) + elif isinstance(m, nn.Linear): + nn.init.xavier_normal_(m.weight) + nn.init.constant_(m.bias, 0) + + def _make_layer(self, block, planes, num_blocks, stride=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.inplanes, planes * block.expansion, kernel_size=1, stride=stride, bias=False), + nn.BatchNorm2d(planes * block.expansion), + ) + layers = [] + layers.append(block(self.inplanes, planes, stride, downsample, use_se=self.use_se)) + self.inplanes = planes + for _ in range(1, num_blocks): + layers.append(block(self.inplanes, planes, use_se=self.use_se)) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.prelu(x) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x = self.layer4(x) + x = self.bn4(x) + x = self.dropout(x) + x = x.view(x.size(0), -1) + x = self.fc5(x) + x = self.bn5(x) + + return x diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/archs/codeformer_arch.py b/motion-gan-pipeline/GFPGAN/gfpgan/archs/codeformer_arch.py new file mode 100644 index 0000000..ba204a9 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/archs/codeformer_arch.py @@ -0,0 +1,630 @@ +""" +Modified from https://github.com/sczhou/CodeFormer +VQGAN code, adapted from the original created by the Unleashing Transformers authors: +https://github.com/samb-t/unleashing-transformers/blob/master/models/vqgan.py +""" +import math +import torch +import torch.nn as nn +import torch.nn.functional as F +from basicsr.utils import get_root_logger +from basicsr.utils.registry import ARCH_REGISTRY +from torch import Tensor +from typing import Optional + + +class VectorQuantizer(nn.Module): + + def __init__(self, codebook_size, emb_dim, beta): + super(VectorQuantizer, self).__init__() + self.codebook_size = codebook_size # number of embeddings + self.emb_dim = emb_dim # dimension of embedding + self.beta = beta # commitment cost used in loss term, beta * ||z_e(x)-sg[e]||^2 + self.embedding = nn.Embedding(self.codebook_size, self.emb_dim) + self.embedding.weight.data.uniform_(-1.0 / self.codebook_size, 1.0 / self.codebook_size) + + def forward(self, z): + # reshape z -> (batch, height, width, channel) and flatten + z = z.permute(0, 2, 3, 1).contiguous() + z_flattened = z.view(-1, self.emb_dim) + + # distances from z to embeddings e_j (z - e)^2 = z^2 + e^2 - 2 e * z + d = (z_flattened ** 2).sum(dim=1, keepdim=True) + (self.embedding.weight**2).sum(1) - \ + 2 * torch.matmul(z_flattened, self.embedding.weight.t()) + + mean_distance = torch.mean(d) + # find closest encodings + # min_encoding_indices = torch.argmin(d, dim=1).unsqueeze(1) + min_encoding_scores, min_encoding_indices = torch.topk(d, 1, dim=1, largest=False) + # [0-1], higher score, higher confidence + min_encoding_scores = torch.exp(-min_encoding_scores / 10) + + min_encodings = torch.zeros(min_encoding_indices.shape[0], self.codebook_size).to(z) + min_encodings.scatter_(1, min_encoding_indices, 1) + + # get quantized latent vectors + z_q = torch.matmul(min_encodings, self.embedding.weight).view(z.shape) + # compute loss for embedding + loss = torch.mean((z_q.detach() - z)**2) + self.beta * torch.mean((z_q - z.detach())**2) + # preserve gradients + z_q = z + (z_q - z).detach() + + # perplexity + e_mean = torch.mean(min_encodings, dim=0) + perplexity = torch.exp(-torch.sum(e_mean * torch.log(e_mean + 1e-10))) + # reshape back to match original input shape + z_q = z_q.permute(0, 3, 1, 2).contiguous() + + return z_q, loss, { + 'perplexity': perplexity, + 'min_encodings': min_encodings, + 'min_encoding_indices': min_encoding_indices, + 'min_encoding_scores': min_encoding_scores, + 'mean_distance': mean_distance + } + + def get_codebook_feat(self, indices, shape): + # input indices: batch*token_num -> (batch*token_num)*1 + # shape: batch, height, width, channel + indices = indices.view(-1, 1) + min_encodings = torch.zeros(indices.shape[0], self.codebook_size).to(indices) + min_encodings.scatter_(1, indices, 1) + # get quantized latent vectors + z_q = torch.matmul(min_encodings.float(), self.embedding.weight) + + if shape is not None: # reshape back to match original input shape + z_q = z_q.view(shape).permute(0, 3, 1, 2).contiguous() + + return z_q + + +class GumbelQuantizer(nn.Module): + + def __init__(self, codebook_size, emb_dim, num_hiddens, straight_through=False, kl_weight=5e-4, temp_init=1.0): + super().__init__() + self.codebook_size = codebook_size # number of embeddings + self.emb_dim = emb_dim # dimension of embedding + self.straight_through = straight_through + self.temperature = temp_init + self.kl_weight = kl_weight + self.proj = nn.Conv2d(num_hiddens, codebook_size, 1) # projects last encoder layer to quantized logits + self.embed = nn.Embedding(codebook_size, emb_dim) + + def forward(self, z): + hard = self.straight_through if self.training else True + + logits = self.proj(z) + + soft_one_hot = F.gumbel_softmax(logits, tau=self.temperature, dim=1, hard=hard) + + z_q = torch.einsum('b n h w, n d -> b d h w', soft_one_hot, self.embed.weight) + + # + kl divergence to the prior loss + qy = F.softmax(logits, dim=1) + diff = self.kl_weight * torch.sum(qy * torch.log(qy * self.codebook_size + 1e-10), dim=1).mean() + min_encoding_indices = soft_one_hot.argmax(dim=1) + + return z_q, diff, {'min_encoding_indices': min_encoding_indices} + + +class Downsample(nn.Module): + + def __init__(self, in_channels): + super().__init__() + self.conv = torch.nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=2, padding=0) + + def forward(self, x): + pad = (0, 1, 0, 1) + x = torch.nn.functional.pad(x, pad, mode='constant', value=0) + x = self.conv(x) + return x + + +class Upsample(nn.Module): + + def __init__(self, in_channels): + super().__init__() + self.conv = nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1) + + def forward(self, x): + x = F.interpolate(x, scale_factor=2.0, mode='nearest') + x = self.conv(x) + + return x + + +class AttnBlock(nn.Module): + + def __init__(self, in_channels): + super().__init__() + self.in_channels = in_channels + + self.norm = normalize(in_channels) + self.q = torch.nn.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, padding=0) + self.k = torch.nn.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, padding=0) + self.v = torch.nn.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, padding=0) + self.proj_out = torch.nn.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, padding=0) + + def forward(self, x): + h_ = x + h_ = self.norm(h_) + q = self.q(h_) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b, c, h, w = q.shape + q = q.reshape(b, c, h * w) + q = q.permute(0, 2, 1) + k = k.reshape(b, c, h * w) + w_ = torch.bmm(q, k) + w_ = w_ * (int(c)**(-0.5)) + w_ = F.softmax(w_, dim=2) + + # attend to values + v = v.reshape(b, c, h * w) + w_ = w_.permute(0, 2, 1) + h_ = torch.bmm(v, w_) + h_ = h_.reshape(b, c, h, w) + + h_ = self.proj_out(h_) + + return x + h_ + + +class Encoder(nn.Module): + + def __init__(self, in_channels, nf, out_channels, ch_mult, num_res_blocks, resolution, attn_resolutions): + super().__init__() + self.nf = nf + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.attn_resolutions = attn_resolutions + + curr_res = self.resolution + in_ch_mult = (1, ) + tuple(ch_mult) + + blocks = [] + # initial convultion + blocks.append(nn.Conv2d(in_channels, nf, kernel_size=3, stride=1, padding=1)) + + # residual and downsampling blocks, with attention on smaller res (16x16) + for i in range(self.num_resolutions): + block_in_ch = nf * in_ch_mult[i] + block_out_ch = nf * ch_mult[i] + for _ in range(self.num_res_blocks): + blocks.append(ResBlock(block_in_ch, block_out_ch)) + block_in_ch = block_out_ch + if curr_res in attn_resolutions: + blocks.append(AttnBlock(block_in_ch)) + + if i != self.num_resolutions - 1: + blocks.append(Downsample(block_in_ch)) + curr_res = curr_res // 2 + + # non-local attention block + blocks.append(ResBlock(block_in_ch, block_in_ch)) + blocks.append(AttnBlock(block_in_ch)) + blocks.append(ResBlock(block_in_ch, block_in_ch)) + + # normalise and convert to latent size + blocks.append(normalize(block_in_ch)) + blocks.append(nn.Conv2d(block_in_ch, out_channels, kernel_size=3, stride=1, padding=1)) + self.blocks = nn.ModuleList(blocks) + + def forward(self, x): + for block in self.blocks: + x = block(x) + + return x + + +class Generator(nn.Module): + + def __init__(self, nf, ch_mult, res_blocks, img_size, attn_resolutions, emb_dim): + super().__init__() + self.nf = nf + self.ch_mult = ch_mult + self.num_resolutions = len(self.ch_mult) + self.num_res_blocks = res_blocks + self.resolution = img_size + self.attn_resolutions = attn_resolutions + self.in_channels = emb_dim + self.out_channels = 3 + block_in_ch = self.nf * self.ch_mult[-1] + curr_res = self.resolution // 2**(self.num_resolutions - 1) + + blocks = [] + # initial conv + blocks.append(nn.Conv2d(self.in_channels, block_in_ch, kernel_size=3, stride=1, padding=1)) + + # non-local attention block + blocks.append(ResBlock(block_in_ch, block_in_ch)) + blocks.append(AttnBlock(block_in_ch)) + blocks.append(ResBlock(block_in_ch, block_in_ch)) + + for i in reversed(range(self.num_resolutions)): + block_out_ch = self.nf * self.ch_mult[i] + + for _ in range(self.num_res_blocks): + blocks.append(ResBlock(block_in_ch, block_out_ch)) + block_in_ch = block_out_ch + + if curr_res in self.attn_resolutions: + blocks.append(AttnBlock(block_in_ch)) + + if i != 0: + blocks.append(Upsample(block_in_ch)) + curr_res = curr_res * 2 + + blocks.append(normalize(block_in_ch)) + blocks.append(nn.Conv2d(block_in_ch, self.out_channels, kernel_size=3, stride=1, padding=1)) + + self.blocks = nn.ModuleList(blocks) + + def forward(self, x): + for block in self.blocks: + x = block(x) + + return x + + +class VQAutoEncoder(nn.Module): + + def __init__(self, + img_size, + nf, + ch_mult, + quantizer='nearest', + res_blocks=2, + attn_resolutions=[16], + codebook_size=1024, + emb_dim=256, + beta=0.25, + gumbel_straight_through=False, + gumbel_kl_weight=1e-8, + model_path=None): + super().__init__() + logger = get_root_logger() + self.in_channels = 3 + self.nf = nf + self.n_blocks = res_blocks + self.codebook_size = codebook_size + self.embed_dim = emb_dim + self.ch_mult = ch_mult + self.resolution = img_size + self.attn_resolutions = attn_resolutions + self.quantizer_type = quantizer + self.encoder = Encoder(self.in_channels, self.nf, self.embed_dim, self.ch_mult, self.n_blocks, self.resolution, + self.attn_resolutions) + if self.quantizer_type == 'nearest': + self.beta = beta # 0.25 + self.quantize = VectorQuantizer(self.codebook_size, self.embed_dim, self.beta) + elif self.quantizer_type == 'gumbel': + self.gumbel_num_hiddens = emb_dim + self.straight_through = gumbel_straight_through + self.kl_weight = gumbel_kl_weight + self.quantize = GumbelQuantizer(self.codebook_size, self.embed_dim, self.gumbel_num_hiddens, + self.straight_through, self.kl_weight) + self.generator = Generator(nf, ch_mult, res_blocks, img_size, attn_resolutions, emb_dim) + + if model_path is not None: + chkpt = torch.load(model_path, map_location='cpu') + if 'params_ema' in chkpt: + self.load_state_dict(torch.load(model_path, map_location='cpu')['params_ema']) + logger.info(f'vqgan is loaded from: {model_path} [params_ema]') + elif 'params' in chkpt: + self.load_state_dict(torch.load(model_path, map_location='cpu')['params']) + logger.info(f'vqgan is loaded from: {model_path} [params]') + else: + raise ValueError('Wrong params!') + + def forward(self, x): + x = self.encoder(x) + quant, codebook_loss, quant_stats = self.quantize(x) + x = self.generator(quant) + return x, codebook_loss, quant_stats + + +def calc_mean_std(feat, eps=1e-5): + """Calculate mean and std for adaptive_instance_normalization. + + Args: + feat (Tensor): 4D tensor. + eps (float): A small value added to the variance to avoid + divide-by-zero. Default: 1e-5. + """ + size = feat.size() + assert len(size) == 4, 'The input feature should be 4D tensor.' + b, c = size[:2] + feat_var = feat.view(b, c, -1).var(dim=2) + eps + feat_std = feat_var.sqrt().view(b, c, 1, 1) + feat_mean = feat.view(b, c, -1).mean(dim=2).view(b, c, 1, 1) + return feat_mean, feat_std + + +def adaptive_instance_normalization(content_feat, style_feat): + """Adaptive instance normalization. + + Adjust the reference features to have the similar color and illuminations + as those in the degradate features. + + Args: + content_feat (Tensor): The reference feature. + style_feat (Tensor): The degradate features. + """ + size = content_feat.size() + style_mean, style_std = calc_mean_std(style_feat) + content_mean, content_std = calc_mean_std(content_feat) + normalized_feat = (content_feat - content_mean.expand(size)) / content_std.expand(size) + return normalized_feat * style_std.expand(size) + style_mean.expand(size) + + +class PositionEmbeddingSine(nn.Module): + """ + This is a more standard version of the position embedding, very similar to the one + used by the Attention is all you need paper, generalized to work on images. + """ + + def __init__(self, num_pos_feats=64, temperature=10000, normalize=False, scale=None): + super().__init__() + self.num_pos_feats = num_pos_feats + self.temperature = temperature + self.normalize = normalize + if scale is not None and normalize is False: + raise ValueError('normalize should be True if scale is passed') + if scale is None: + scale = 2 * math.pi + self.scale = scale + + def forward(self, x, mask=None): + if mask is None: + mask = torch.zeros((x.size(0), x.size(2), x.size(3)), device=x.device, dtype=torch.bool) + not_mask = ~mask + y_embed = not_mask.cumsum(1, dtype=torch.float32) + x_embed = not_mask.cumsum(2, dtype=torch.float32) + if self.normalize: + eps = 1e-6 + y_embed = y_embed / (y_embed[:, -1:, :] + eps) * self.scale + x_embed = x_embed / (x_embed[:, :, -1:] + eps) * self.scale + + dim_t = torch.arange(self.num_pos_feats, dtype=torch.float32, device=x.device) + dim_t = self.temperature**(2 * (dim_t // 2) / self.num_pos_feats) + + pos_x = x_embed[:, :, :, None] / dim_t + pos_y = y_embed[:, :, :, None] / dim_t + pos_x = torch.stack((pos_x[:, :, :, 0::2].sin(), pos_x[:, :, :, 1::2].cos()), dim=4).flatten(3) + pos_y = torch.stack((pos_y[:, :, :, 0::2].sin(), pos_y[:, :, :, 1::2].cos()), dim=4).flatten(3) + pos = torch.cat((pos_y, pos_x), dim=3).permute(0, 3, 1, 2) + return pos + + +def _get_activation_fn(activation): + """Return an activation function given a string""" + if activation == 'relu': + return F.relu + if activation == 'gelu': + return F.gelu + if activation == 'glu': + return F.glu + raise RuntimeError(F'activation should be relu/gelu, not {activation}.') + + +class TransformerSALayer(nn.Module): + + def __init__(self, embed_dim, nhead=8, dim_mlp=2048, dropout=0.0, activation='gelu'): + super().__init__() + self.self_attn = nn.MultiheadAttention(embed_dim, nhead, dropout=dropout) + # Implementation of Feedforward model - MLP + self.linear1 = nn.Linear(embed_dim, dim_mlp) + self.dropout = nn.Dropout(dropout) + self.linear2 = nn.Linear(dim_mlp, embed_dim) + + self.norm1 = nn.LayerNorm(embed_dim) + self.norm2 = nn.LayerNorm(embed_dim) + self.dropout1 = nn.Dropout(dropout) + self.dropout2 = nn.Dropout(dropout) + + self.activation = _get_activation_fn(activation) + + def with_pos_embed(self, tensor, pos: Optional[Tensor]): + return tensor if pos is None else tensor + pos + + def forward(self, + tgt, + tgt_mask: Optional[Tensor] = None, + tgt_key_padding_mask: Optional[Tensor] = None, + query_pos: Optional[Tensor] = None): + + # self attention + tgt2 = self.norm1(tgt) + q = k = self.with_pos_embed(tgt2, query_pos) + tgt2 = self.self_attn(q, k, value=tgt2, attn_mask=tgt_mask, key_padding_mask=tgt_key_padding_mask)[0] + tgt = tgt + self.dropout1(tgt2) + + # ffn + tgt2 = self.norm2(tgt) + tgt2 = self.linear2(self.dropout(self.activation(self.linear1(tgt2)))) + tgt = tgt + self.dropout2(tgt2) + return tgt + + +def normalize(in_channels): + return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True) + + +@torch.jit.script +def swish(x): + return x * torch.sigmoid(x) + + +class ResBlock(nn.Module): + + def __init__(self, in_channels, out_channels=None): + super(ResBlock, self).__init__() + self.in_channels = in_channels + self.out_channels = in_channels if out_channels is None else out_channels + self.norm1 = normalize(in_channels) + self.conv1 = nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=1, padding=1) + self.norm2 = normalize(out_channels) + self.conv2 = nn.Conv2d(out_channels, out_channels, kernel_size=3, stride=1, padding=1) + if self.in_channels != self.out_channels: + self.conv_out = nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, padding=0) + + def forward(self, x_in): + x = x_in + x = self.norm1(x) + x = swish(x) + x = self.conv1(x) + x = self.norm2(x) + x = swish(x) + x = self.conv2(x) + if self.in_channels != self.out_channels: + x_in = self.conv_out(x_in) + + return x + x_in + + +class Fuse_sft_block(nn.Module): + + def __init__(self, in_ch, out_ch): + super().__init__() + self.encode_enc = ResBlock(2 * in_ch, out_ch) + + self.scale = nn.Sequential( + nn.Conv2d(in_ch, out_ch, kernel_size=3, padding=1), nn.LeakyReLU(0.2, True), + nn.Conv2d(out_ch, out_ch, kernel_size=3, padding=1)) + + self.shift = nn.Sequential( + nn.Conv2d(in_ch, out_ch, kernel_size=3, padding=1), nn.LeakyReLU(0.2, True), + nn.Conv2d(out_ch, out_ch, kernel_size=3, padding=1)) + + def forward(self, enc_feat, dec_feat, w=1): + enc_feat = self.encode_enc(torch.cat([enc_feat, dec_feat], dim=1)) + scale = self.scale(enc_feat) + shift = self.shift(enc_feat) + residual = w * (dec_feat * scale + shift) + out = dec_feat + residual + return out + + +@ARCH_REGISTRY.register() +class CodeFormer(VQAutoEncoder): + + def __init__(self, + dim_embd=512, + n_head=8, + n_layers=9, + codebook_size=1024, + latent_size=256, + connect_list=['32', '64', '128', '256'], + fix_modules=['quantize', 'generator']): + super(CodeFormer, self).__init__(512, 64, [1, 2, 2, 4, 4, 8], 'nearest', 2, [16], codebook_size) + + if fix_modules is not None: + for module in fix_modules: + for param in getattr(self, module).parameters(): + param.requires_grad = False + + self.connect_list = connect_list + self.n_layers = n_layers + self.dim_embd = dim_embd + self.dim_mlp = dim_embd * 2 + + self.position_emb = nn.Parameter(torch.zeros(latent_size, self.dim_embd)) + self.feat_emb = nn.Linear(256, self.dim_embd) + + # transformer + self.ft_layers = nn.Sequential(*[ + TransformerSALayer(embed_dim=dim_embd, nhead=n_head, dim_mlp=self.dim_mlp, dropout=0.0) + for _ in range(self.n_layers) + ]) + + # logits_predict head + self.idx_pred_layer = nn.Sequential(nn.LayerNorm(dim_embd), nn.Linear(dim_embd, codebook_size, bias=False)) + + self.channels = {'16': 512, '32': 256, '64': 256, '128': 128, '256': 128, '512': 64} + + # after second residual block for > 16, before attn layer for ==16 + self.fuse_encoder_block = {'512': 2, '256': 5, '128': 8, '64': 11, '32': 14, '16': 18} + # after first residual block for > 16, before attn layer for ==16 + self.fuse_generator_block = {'16': 6, '32': 9, '64': 12, '128': 15, '256': 18, '512': 21} + + # fuse_convs_dict + self.fuse_convs_dict = nn.ModuleDict() + for f_size in self.connect_list: + in_ch = self.channels[f_size] + self.fuse_convs_dict[f_size] = Fuse_sft_block(in_ch, in_ch) + + def _init_weights(self, module): + if isinstance(module, (nn.Linear, nn.Embedding)): + module.weight.data.normal_(mean=0.0, std=0.02) + if isinstance(module, nn.Linear) and module.bias is not None: + module.bias.data.zero_() + elif isinstance(module, nn.LayerNorm): + module.bias.data.zero_() + module.weight.data.fill_(1.0) + + def forward(self, x, weight=0.5, **kwargs): + detach_16 = True + code_only = False + adain = True + # ################### Encoder ##################### + enc_feat_dict = {} + out_list = [self.fuse_encoder_block[f_size] for f_size in self.connect_list] + for i, block in enumerate(self.encoder.blocks): + x = block(x) + if i in out_list: + enc_feat_dict[str(x.shape[-1])] = x.clone() + + lq_feat = x + # ################# Transformer ################### + # quant_feat, codebook_loss, quant_stats = self.quantize(lq_feat) + pos_emb = self.position_emb.unsqueeze(1).repeat(1, x.shape[0], 1) + # BCHW -> BC(HW) -> (HW)BC + feat_emb = self.feat_emb(lq_feat.flatten(2).permute(2, 0, 1)) + query_emb = feat_emb + # Transformer encoder + for layer in self.ft_layers: + query_emb = layer(query_emb, query_pos=pos_emb) + + # output logits + logits = self.idx_pred_layer(query_emb) # (hw)bn + logits = logits.permute(1, 0, 2) # (hw)bn -> b(hw)n + + if code_only: # for training stage II + # logits doesn't need softmax before cross_entropy loss + return logits, lq_feat + + # ################# Quantization ################### + # if self.training: + # quant_feat = torch.einsum('btn,nc->btc', [soft_one_hot, self.quantize.embedding.weight]) + # # b(hw)c -> bc(hw) -> bchw + # quant_feat = quant_feat.permute(0,2,1).view(lq_feat.shape) + # ------------ + soft_one_hot = F.softmax(logits, dim=2) + _, top_idx = torch.topk(soft_one_hot, 1, dim=2) + quant_feat = self.quantize.get_codebook_feat(top_idx, shape=[x.shape[0], 16, 16, 256]) + # preserve gradients + # quant_feat = lq_feat + (quant_feat - lq_feat).detach() + + if detach_16: + quant_feat = quant_feat.detach() # for training stage III + if adain: + quant_feat = adaptive_instance_normalization(quant_feat, lq_feat) + + # ################## Generator #################### + x = quant_feat + fuse_list = [self.fuse_generator_block[f_size] for f_size in self.connect_list] + + for i, block in enumerate(self.generator.blocks): + x = block(x) + if i in fuse_list: # fuse after i-th block + f_size = str(x.shape[-1]) + if weight > 0: + x = self.fuse_convs_dict[f_size](enc_feat_dict[f_size].detach(), x, weight) + out = x + # logits doesn't need softmax before cross_entropy loss + # return out, logits, lq_feat + return out, logits diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/archs/gfpgan_bilinear_arch.py b/motion-gan-pipeline/GFPGAN/gfpgan/archs/gfpgan_bilinear_arch.py new file mode 100644 index 0000000..52e0de8 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/archs/gfpgan_bilinear_arch.py @@ -0,0 +1,312 @@ +import math +import random +import torch +from basicsr.utils.registry import ARCH_REGISTRY +from torch import nn + +from .gfpganv1_arch import ResUpBlock +from .stylegan2_bilinear_arch import (ConvLayer, EqualConv2d, EqualLinear, ResBlock, ScaledLeakyReLU, + StyleGAN2GeneratorBilinear) + + +class StyleGAN2GeneratorBilinearSFT(StyleGAN2GeneratorBilinear): + """StyleGAN2 Generator with SFT modulation (Spatial Feature Transform). + + It is the bilinear version. It does not use the complicated UpFirDnSmooth function that is not friendly for + deployment. It can be easily converted to the clean version: StyleGAN2GeneratorCSFT. + + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + num_mlp (int): Layer number of MLP style layers. Default: 8. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + lr_mlp (float): Learning rate multiplier for mlp layers. Default: 0.01. + narrow (float): The narrow ratio for channels. Default: 1. + sft_half (bool): Whether to apply SFT on half of the input channels. Default: False. + """ + + def __init__(self, + out_size, + num_style_feat=512, + num_mlp=8, + channel_multiplier=2, + lr_mlp=0.01, + narrow=1, + sft_half=False): + super(StyleGAN2GeneratorBilinearSFT, self).__init__( + out_size, + num_style_feat=num_style_feat, + num_mlp=num_mlp, + channel_multiplier=channel_multiplier, + lr_mlp=lr_mlp, + narrow=narrow) + self.sft_half = sft_half + + def forward(self, + styles, + conditions, + input_is_latent=False, + noise=None, + randomize_noise=True, + truncation=1, + truncation_latent=None, + inject_index=None, + return_latents=False): + """Forward function for StyleGAN2GeneratorBilinearSFT. + + Args: + styles (list[Tensor]): Sample codes of styles. + conditions (list[Tensor]): SFT conditions to generators. + input_is_latent (bool): Whether input is latent style. Default: False. + noise (Tensor | None): Input noise or None. Default: None. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + truncation (float): The truncation ratio. Default: 1. + truncation_latent (Tensor | None): The truncation latent tensor. Default: None. + inject_index (int | None): The injection index for mixing noise. Default: None. + return_latents (bool): Whether to return style latents. Default: False. + """ + # style codes -> latents with Style MLP layer + if not input_is_latent: + styles = [self.style_mlp(s) for s in styles] + # noises + if noise is None: + if randomize_noise: + noise = [None] * self.num_layers # for each style conv layer + else: # use the stored noise + noise = [getattr(self.noises, f'noise{i}') for i in range(self.num_layers)] + # style truncation + if truncation < 1: + style_truncation = [] + for style in styles: + style_truncation.append(truncation_latent + truncation * (style - truncation_latent)) + styles = style_truncation + # get style latents with injection + if len(styles) == 1: + inject_index = self.num_latent + + if styles[0].ndim < 3: + # repeat latent code for all the layers + latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + else: # used for encoder with different latent code for each layer + latent = styles[0] + elif len(styles) == 2: # mixing noises + if inject_index is None: + inject_index = random.randint(1, self.num_latent - 1) + latent1 = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + latent2 = styles[1].unsqueeze(1).repeat(1, self.num_latent - inject_index, 1) + latent = torch.cat([latent1, latent2], 1) + + # main generation + out = self.constant_input(latent.shape[0]) + out = self.style_conv1(out, latent[:, 0], noise=noise[0]) + skip = self.to_rgb1(out, latent[:, 1]) + + i = 1 + for conv1, conv2, noise1, noise2, to_rgb in zip(self.style_convs[::2], self.style_convs[1::2], noise[1::2], + noise[2::2], self.to_rgbs): + out = conv1(out, latent[:, i], noise=noise1) + + # the conditions may have fewer levels + if i < len(conditions): + # SFT part to combine the conditions + if self.sft_half: # only apply SFT to half of the channels + out_same, out_sft = torch.split(out, int(out.size(1) // 2), dim=1) + out_sft = out_sft * conditions[i - 1] + conditions[i] + out = torch.cat([out_same, out_sft], dim=1) + else: # apply SFT to all the channels + out = out * conditions[i - 1] + conditions[i] + + out = conv2(out, latent[:, i + 1], noise=noise2) + skip = to_rgb(out, latent[:, i + 2], skip) # feature back to the rgb space + i += 2 + + image = skip + + if return_latents: + return image, latent + else: + return image, None + + +@ARCH_REGISTRY.register() +class GFPGANBilinear(nn.Module): + """The GFPGAN architecture: Unet + StyleGAN2 decoder with SFT. + + It is the bilinear version and it does not use the complicated UpFirDnSmooth function that is not friendly for + deployment. It can be easily converted to the clean version: GFPGANv1Clean. + + + Ref: GFP-GAN: Towards Real-World Blind Face Restoration with Generative Facial Prior. + + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + decoder_load_path (str): The path to the pre-trained decoder model (usually, the StyleGAN2). Default: None. + fix_decoder (bool): Whether to fix the decoder. Default: True. + + num_mlp (int): Layer number of MLP style layers. Default: 8. + lr_mlp (float): Learning rate multiplier for mlp layers. Default: 0.01. + input_is_latent (bool): Whether input is latent style. Default: False. + different_w (bool): Whether to use different latent w for different layers. Default: False. + narrow (float): The narrow ratio for channels. Default: 1. + sft_half (bool): Whether to apply SFT on half of the input channels. Default: False. + """ + + def __init__( + self, + out_size, + num_style_feat=512, + channel_multiplier=1, + decoder_load_path=None, + fix_decoder=True, + # for stylegan decoder + num_mlp=8, + lr_mlp=0.01, + input_is_latent=False, + different_w=False, + narrow=1, + sft_half=False): + + super(GFPGANBilinear, self).__init__() + self.input_is_latent = input_is_latent + self.different_w = different_w + self.num_style_feat = num_style_feat + + unet_narrow = narrow * 0.5 # by default, use a half of input channels + channels = { + '4': int(512 * unet_narrow), + '8': int(512 * unet_narrow), + '16': int(512 * unet_narrow), + '32': int(512 * unet_narrow), + '64': int(256 * channel_multiplier * unet_narrow), + '128': int(128 * channel_multiplier * unet_narrow), + '256': int(64 * channel_multiplier * unet_narrow), + '512': int(32 * channel_multiplier * unet_narrow), + '1024': int(16 * channel_multiplier * unet_narrow) + } + + self.log_size = int(math.log(out_size, 2)) + first_out_size = 2**(int(math.log(out_size, 2))) + + self.conv_body_first = ConvLayer(3, channels[f'{first_out_size}'], 1, bias=True, activate=True) + + # downsample + in_channels = channels[f'{first_out_size}'] + self.conv_body_down = nn.ModuleList() + for i in range(self.log_size, 2, -1): + out_channels = channels[f'{2**(i - 1)}'] + self.conv_body_down.append(ResBlock(in_channels, out_channels)) + in_channels = out_channels + + self.final_conv = ConvLayer(in_channels, channels['4'], 3, bias=True, activate=True) + + # upsample + in_channels = channels['4'] + self.conv_body_up = nn.ModuleList() + for i in range(3, self.log_size + 1): + out_channels = channels[f'{2**i}'] + self.conv_body_up.append(ResUpBlock(in_channels, out_channels)) + in_channels = out_channels + + # to RGB + self.toRGB = nn.ModuleList() + for i in range(3, self.log_size + 1): + self.toRGB.append(EqualConv2d(channels[f'{2**i}'], 3, 1, stride=1, padding=0, bias=True, bias_init_val=0)) + + if different_w: + linear_out_channel = (int(math.log(out_size, 2)) * 2 - 2) * num_style_feat + else: + linear_out_channel = num_style_feat + + self.final_linear = EqualLinear( + channels['4'] * 4 * 4, linear_out_channel, bias=True, bias_init_val=0, lr_mul=1, activation=None) + + # the decoder: stylegan2 generator with SFT modulations + self.stylegan_decoder = StyleGAN2GeneratorBilinearSFT( + out_size=out_size, + num_style_feat=num_style_feat, + num_mlp=num_mlp, + channel_multiplier=channel_multiplier, + lr_mlp=lr_mlp, + narrow=narrow, + sft_half=sft_half) + + # load pre-trained stylegan2 model if necessary + if decoder_load_path: + self.stylegan_decoder.load_state_dict( + torch.load(decoder_load_path, map_location=lambda storage, loc: storage)['params_ema']) + # fix decoder without updating params + if fix_decoder: + for _, param in self.stylegan_decoder.named_parameters(): + param.requires_grad = False + + # for SFT modulations (scale and shift) + self.condition_scale = nn.ModuleList() + self.condition_shift = nn.ModuleList() + for i in range(3, self.log_size + 1): + out_channels = channels[f'{2**i}'] + if sft_half: + sft_out_channels = out_channels + else: + sft_out_channels = out_channels * 2 + self.condition_scale.append( + nn.Sequential( + EqualConv2d(out_channels, out_channels, 3, stride=1, padding=1, bias=True, bias_init_val=0), + ScaledLeakyReLU(0.2), + EqualConv2d(out_channels, sft_out_channels, 3, stride=1, padding=1, bias=True, bias_init_val=1))) + self.condition_shift.append( + nn.Sequential( + EqualConv2d(out_channels, out_channels, 3, stride=1, padding=1, bias=True, bias_init_val=0), + ScaledLeakyReLU(0.2), + EqualConv2d(out_channels, sft_out_channels, 3, stride=1, padding=1, bias=True, bias_init_val=0))) + + def forward(self, x, return_latents=False, return_rgb=True, randomize_noise=True): + """Forward function for GFPGANBilinear. + + Args: + x (Tensor): Input images. + return_latents (bool): Whether to return style latents. Default: False. + return_rgb (bool): Whether return intermediate rgb images. Default: True. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + """ + conditions = [] + unet_skips = [] + out_rgbs = [] + + # encoder + feat = self.conv_body_first(x) + for i in range(self.log_size - 2): + feat = self.conv_body_down[i](feat) + unet_skips.insert(0, feat) + + feat = self.final_conv(feat) + + # style code + style_code = self.final_linear(feat.view(feat.size(0), -1)) + if self.different_w: + style_code = style_code.view(style_code.size(0), -1, self.num_style_feat) + + # decode + for i in range(self.log_size - 2): + # add unet skip + feat = feat + unet_skips[i] + # ResUpLayer + feat = self.conv_body_up[i](feat) + # generate scale and shift for SFT layers + scale = self.condition_scale[i](feat) + conditions.append(scale.clone()) + shift = self.condition_shift[i](feat) + conditions.append(shift.clone()) + # generate rgb images + if return_rgb: + out_rgbs.append(self.toRGB[i](feat)) + + # decoder + image, _ = self.stylegan_decoder([style_code], + conditions, + return_latents=return_latents, + input_is_latent=self.input_is_latent, + randomize_noise=randomize_noise) + + return image, out_rgbs diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/archs/gfpganv1_arch.py b/motion-gan-pipeline/GFPGAN/gfpgan/archs/gfpganv1_arch.py new file mode 100644 index 0000000..eaf3162 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/archs/gfpganv1_arch.py @@ -0,0 +1,439 @@ +import math +import random +import torch +from basicsr.archs.stylegan2_arch import (ConvLayer, EqualConv2d, EqualLinear, ResBlock, ScaledLeakyReLU, + StyleGAN2Generator) +from basicsr.ops.fused_act import FusedLeakyReLU +from basicsr.utils.registry import ARCH_REGISTRY +from torch import nn +from torch.nn import functional as F + + +class StyleGAN2GeneratorSFT(StyleGAN2Generator): + """StyleGAN2 Generator with SFT modulation (Spatial Feature Transform). + + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + num_mlp (int): Layer number of MLP style layers. Default: 8. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + resample_kernel (list[int]): A list indicating the 1D resample kernel magnitude. A cross production will be + applied to extent 1D resample kernel to 2D resample kernel. Default: (1, 3, 3, 1). + lr_mlp (float): Learning rate multiplier for mlp layers. Default: 0.01. + narrow (float): The narrow ratio for channels. Default: 1. + sft_half (bool): Whether to apply SFT on half of the input channels. Default: False. + """ + + def __init__(self, + out_size, + num_style_feat=512, + num_mlp=8, + channel_multiplier=2, + resample_kernel=(1, 3, 3, 1), + lr_mlp=0.01, + narrow=1, + sft_half=False): + super(StyleGAN2GeneratorSFT, self).__init__( + out_size, + num_style_feat=num_style_feat, + num_mlp=num_mlp, + channel_multiplier=channel_multiplier, + resample_kernel=resample_kernel, + lr_mlp=lr_mlp, + narrow=narrow) + self.sft_half = sft_half + + def forward(self, + styles, + conditions, + input_is_latent=False, + noise=None, + randomize_noise=True, + truncation=1, + truncation_latent=None, + inject_index=None, + return_latents=False): + """Forward function for StyleGAN2GeneratorSFT. + + Args: + styles (list[Tensor]): Sample codes of styles. + conditions (list[Tensor]): SFT conditions to generators. + input_is_latent (bool): Whether input is latent style. Default: False. + noise (Tensor | None): Input noise or None. Default: None. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + truncation (float): The truncation ratio. Default: 1. + truncation_latent (Tensor | None): The truncation latent tensor. Default: None. + inject_index (int | None): The injection index for mixing noise. Default: None. + return_latents (bool): Whether to return style latents. Default: False. + """ + # style codes -> latents with Style MLP layer + if not input_is_latent: + styles = [self.style_mlp(s) for s in styles] + # noises + if noise is None: + if randomize_noise: + noise = [None] * self.num_layers # for each style conv layer + else: # use the stored noise + noise = [getattr(self.noises, f'noise{i}') for i in range(self.num_layers)] + # style truncation + if truncation < 1: + style_truncation = [] + for style in styles: + style_truncation.append(truncation_latent + truncation * (style - truncation_latent)) + styles = style_truncation + # get style latents with injection + if len(styles) == 1: + inject_index = self.num_latent + + if styles[0].ndim < 3: + # repeat latent code for all the layers + latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + else: # used for encoder with different latent code for each layer + latent = styles[0] + elif len(styles) == 2: # mixing noises + if inject_index is None: + inject_index = random.randint(1, self.num_latent - 1) + latent1 = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + latent2 = styles[1].unsqueeze(1).repeat(1, self.num_latent - inject_index, 1) + latent = torch.cat([latent1, latent2], 1) + + # main generation + out = self.constant_input(latent.shape[0]) + out = self.style_conv1(out, latent[:, 0], noise=noise[0]) + skip = self.to_rgb1(out, latent[:, 1]) + + i = 1 + for conv1, conv2, noise1, noise2, to_rgb in zip(self.style_convs[::2], self.style_convs[1::2], noise[1::2], + noise[2::2], self.to_rgbs): + out = conv1(out, latent[:, i], noise=noise1) + + # the conditions may have fewer levels + if i < len(conditions): + # SFT part to combine the conditions + if self.sft_half: # only apply SFT to half of the channels + out_same, out_sft = torch.split(out, int(out.size(1) // 2), dim=1) + out_sft = out_sft * conditions[i - 1] + conditions[i] + out = torch.cat([out_same, out_sft], dim=1) + else: # apply SFT to all the channels + out = out * conditions[i - 1] + conditions[i] + + out = conv2(out, latent[:, i + 1], noise=noise2) + skip = to_rgb(out, latent[:, i + 2], skip) # feature back to the rgb space + i += 2 + + image = skip + + if return_latents: + return image, latent + else: + return image, None + + +class ConvUpLayer(nn.Module): + """Convolutional upsampling layer. It uses bilinear upsampler + Conv. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + stride (int): Stride of the convolution. Default: 1 + padding (int): Zero-padding added to both sides of the input. Default: 0. + bias (bool): If ``True``, adds a learnable bias to the output. Default: ``True``. + bias_init_val (float): Bias initialized value. Default: 0. + activate (bool): Whether use activateion. Default: True. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + bias=True, + bias_init_val=0, + activate=True): + super(ConvUpLayer, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = kernel_size + self.stride = stride + self.padding = padding + # self.scale is used to scale the convolution weights, which is related to the common initializations. + self.scale = 1 / math.sqrt(in_channels * kernel_size**2) + + self.weight = nn.Parameter(torch.randn(out_channels, in_channels, kernel_size, kernel_size)) + + if bias and not activate: + self.bias = nn.Parameter(torch.zeros(out_channels).fill_(bias_init_val)) + else: + self.register_parameter('bias', None) + + # activation + if activate: + if bias: + self.activation = FusedLeakyReLU(out_channels) + else: + self.activation = ScaledLeakyReLU(0.2) + else: + self.activation = None + + def forward(self, x): + # bilinear upsample + out = F.interpolate(x, scale_factor=2, mode='bilinear', align_corners=False) + # conv + out = F.conv2d( + out, + self.weight * self.scale, + bias=self.bias, + stride=self.stride, + padding=self.padding, + ) + # activation + if self.activation is not None: + out = self.activation(out) + return out + + +class ResUpBlock(nn.Module): + """Residual block with upsampling. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + """ + + def __init__(self, in_channels, out_channels): + super(ResUpBlock, self).__init__() + + self.conv1 = ConvLayer(in_channels, in_channels, 3, bias=True, activate=True) + self.conv2 = ConvUpLayer(in_channels, out_channels, 3, stride=1, padding=1, bias=True, activate=True) + self.skip = ConvUpLayer(in_channels, out_channels, 1, bias=False, activate=False) + + def forward(self, x): + out = self.conv1(x) + out = self.conv2(out) + skip = self.skip(x) + out = (out + skip) / math.sqrt(2) + return out + + +@ARCH_REGISTRY.register() +class GFPGANv1(nn.Module): + """The GFPGAN architecture: Unet + StyleGAN2 decoder with SFT. + + Ref: GFP-GAN: Towards Real-World Blind Face Restoration with Generative Facial Prior. + + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + resample_kernel (list[int]): A list indicating the 1D resample kernel magnitude. A cross production will be + applied to extent 1D resample kernel to 2D resample kernel. Default: (1, 3, 3, 1). + decoder_load_path (str): The path to the pre-trained decoder model (usually, the StyleGAN2). Default: None. + fix_decoder (bool): Whether to fix the decoder. Default: True. + + num_mlp (int): Layer number of MLP style layers. Default: 8. + lr_mlp (float): Learning rate multiplier for mlp layers. Default: 0.01. + input_is_latent (bool): Whether input is latent style. Default: False. + different_w (bool): Whether to use different latent w for different layers. Default: False. + narrow (float): The narrow ratio for channels. Default: 1. + sft_half (bool): Whether to apply SFT on half of the input channels. Default: False. + """ + + def __init__( + self, + out_size, + num_style_feat=512, + channel_multiplier=1, + resample_kernel=(1, 3, 3, 1), + decoder_load_path=None, + fix_decoder=True, + # for stylegan decoder + num_mlp=8, + lr_mlp=0.01, + input_is_latent=False, + different_w=False, + narrow=1, + sft_half=False): + + super(GFPGANv1, self).__init__() + self.input_is_latent = input_is_latent + self.different_w = different_w + self.num_style_feat = num_style_feat + + unet_narrow = narrow * 0.5 # by default, use a half of input channels + channels = { + '4': int(512 * unet_narrow), + '8': int(512 * unet_narrow), + '16': int(512 * unet_narrow), + '32': int(512 * unet_narrow), + '64': int(256 * channel_multiplier * unet_narrow), + '128': int(128 * channel_multiplier * unet_narrow), + '256': int(64 * channel_multiplier * unet_narrow), + '512': int(32 * channel_multiplier * unet_narrow), + '1024': int(16 * channel_multiplier * unet_narrow) + } + + self.log_size = int(math.log(out_size, 2)) + first_out_size = 2**(int(math.log(out_size, 2))) + + self.conv_body_first = ConvLayer(3, channels[f'{first_out_size}'], 1, bias=True, activate=True) + + # downsample + in_channels = channels[f'{first_out_size}'] + self.conv_body_down = nn.ModuleList() + for i in range(self.log_size, 2, -1): + out_channels = channels[f'{2**(i - 1)}'] + self.conv_body_down.append(ResBlock(in_channels, out_channels, resample_kernel)) + in_channels = out_channels + + self.final_conv = ConvLayer(in_channels, channels['4'], 3, bias=True, activate=True) + + # upsample + in_channels = channels['4'] + self.conv_body_up = nn.ModuleList() + for i in range(3, self.log_size + 1): + out_channels = channels[f'{2**i}'] + self.conv_body_up.append(ResUpBlock(in_channels, out_channels)) + in_channels = out_channels + + # to RGB + self.toRGB = nn.ModuleList() + for i in range(3, self.log_size + 1): + self.toRGB.append(EqualConv2d(channels[f'{2**i}'], 3, 1, stride=1, padding=0, bias=True, bias_init_val=0)) + + if different_w: + linear_out_channel = (int(math.log(out_size, 2)) * 2 - 2) * num_style_feat + else: + linear_out_channel = num_style_feat + + self.final_linear = EqualLinear( + channels['4'] * 4 * 4, linear_out_channel, bias=True, bias_init_val=0, lr_mul=1, activation=None) + + # the decoder: stylegan2 generator with SFT modulations + self.stylegan_decoder = StyleGAN2GeneratorSFT( + out_size=out_size, + num_style_feat=num_style_feat, + num_mlp=num_mlp, + channel_multiplier=channel_multiplier, + resample_kernel=resample_kernel, + lr_mlp=lr_mlp, + narrow=narrow, + sft_half=sft_half) + + # load pre-trained stylegan2 model if necessary + if decoder_load_path: + self.stylegan_decoder.load_state_dict( + torch.load(decoder_load_path, map_location=lambda storage, loc: storage)['params_ema']) + # fix decoder without updating params + if fix_decoder: + for _, param in self.stylegan_decoder.named_parameters(): + param.requires_grad = False + + # for SFT modulations (scale and shift) + self.condition_scale = nn.ModuleList() + self.condition_shift = nn.ModuleList() + for i in range(3, self.log_size + 1): + out_channels = channels[f'{2**i}'] + if sft_half: + sft_out_channels = out_channels + else: + sft_out_channels = out_channels * 2 + self.condition_scale.append( + nn.Sequential( + EqualConv2d(out_channels, out_channels, 3, stride=1, padding=1, bias=True, bias_init_val=0), + ScaledLeakyReLU(0.2), + EqualConv2d(out_channels, sft_out_channels, 3, stride=1, padding=1, bias=True, bias_init_val=1))) + self.condition_shift.append( + nn.Sequential( + EqualConv2d(out_channels, out_channels, 3, stride=1, padding=1, bias=True, bias_init_val=0), + ScaledLeakyReLU(0.2), + EqualConv2d(out_channels, sft_out_channels, 3, stride=1, padding=1, bias=True, bias_init_val=0))) + + def forward(self, x, return_latents=False, return_rgb=True, randomize_noise=True, **kwargs): + """Forward function for GFPGANv1. + + Args: + x (Tensor): Input images. + return_latents (bool): Whether to return style latents. Default: False. + return_rgb (bool): Whether return intermediate rgb images. Default: True. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + """ + conditions = [] + unet_skips = [] + out_rgbs = [] + + # encoder + feat = self.conv_body_first(x) + for i in range(self.log_size - 2): + feat = self.conv_body_down[i](feat) + unet_skips.insert(0, feat) + + feat = self.final_conv(feat) + + # style code + style_code = self.final_linear(feat.view(feat.size(0), -1)) + if self.different_w: + style_code = style_code.view(style_code.size(0), -1, self.num_style_feat) + + # decode + for i in range(self.log_size - 2): + # add unet skip + feat = feat + unet_skips[i] + # ResUpLayer + feat = self.conv_body_up[i](feat) + # generate scale and shift for SFT layers + scale = self.condition_scale[i](feat) + conditions.append(scale.clone()) + shift = self.condition_shift[i](feat) + conditions.append(shift.clone()) + # generate rgb images + if return_rgb: + out_rgbs.append(self.toRGB[i](feat)) + + # decoder + image, _ = self.stylegan_decoder([style_code], + conditions, + return_latents=return_latents, + input_is_latent=self.input_is_latent, + randomize_noise=randomize_noise) + + return image, out_rgbs + + +@ARCH_REGISTRY.register() +class FacialComponentDiscriminator(nn.Module): + """Facial component (eyes, mouth, noise) discriminator used in GFPGAN. + """ + + def __init__(self): + super(FacialComponentDiscriminator, self).__init__() + # It now uses a VGG-style architectrue with fixed model size + self.conv1 = ConvLayer(3, 64, 3, downsample=False, resample_kernel=(1, 3, 3, 1), bias=True, activate=True) + self.conv2 = ConvLayer(64, 128, 3, downsample=True, resample_kernel=(1, 3, 3, 1), bias=True, activate=True) + self.conv3 = ConvLayer(128, 128, 3, downsample=False, resample_kernel=(1, 3, 3, 1), bias=True, activate=True) + self.conv4 = ConvLayer(128, 256, 3, downsample=True, resample_kernel=(1, 3, 3, 1), bias=True, activate=True) + self.conv5 = ConvLayer(256, 256, 3, downsample=False, resample_kernel=(1, 3, 3, 1), bias=True, activate=True) + self.final_conv = ConvLayer(256, 1, 3, bias=True, activate=False) + + def forward(self, x, return_feats=False, **kwargs): + """Forward function for FacialComponentDiscriminator. + + Args: + x (Tensor): Input images. + return_feats (bool): Whether to return intermediate features. Default: False. + """ + feat = self.conv1(x) + feat = self.conv3(self.conv2(feat)) + rlt_feats = [] + if return_feats: + rlt_feats.append(feat.clone()) + feat = self.conv5(self.conv4(feat)) + if return_feats: + rlt_feats.append(feat.clone()) + out = self.final_conv(feat) + + if return_feats: + return out, rlt_feats + else: + return out, None diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/archs/gfpganv1_clean_arch.py b/motion-gan-pipeline/GFPGAN/gfpgan/archs/gfpganv1_clean_arch.py new file mode 100644 index 0000000..d6c2705 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/archs/gfpganv1_clean_arch.py @@ -0,0 +1,324 @@ +import math +import random +import torch +from basicsr.utils.registry import ARCH_REGISTRY +from torch import nn +from torch.nn import functional as F + +from .stylegan2_clean_arch import StyleGAN2GeneratorClean + + +class StyleGAN2GeneratorCSFT(StyleGAN2GeneratorClean): + """StyleGAN2 Generator with SFT modulation (Spatial Feature Transform). + + It is the clean version without custom compiled CUDA extensions used in StyleGAN2. + + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + num_mlp (int): Layer number of MLP style layers. Default: 8. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + narrow (float): The narrow ratio for channels. Default: 1. + sft_half (bool): Whether to apply SFT on half of the input channels. Default: False. + """ + + def __init__(self, out_size, num_style_feat=512, num_mlp=8, channel_multiplier=2, narrow=1, sft_half=False): + super(StyleGAN2GeneratorCSFT, self).__init__( + out_size, + num_style_feat=num_style_feat, + num_mlp=num_mlp, + channel_multiplier=channel_multiplier, + narrow=narrow) + self.sft_half = sft_half + + def forward(self, + styles, + conditions, + input_is_latent=False, + noise=None, + randomize_noise=True, + truncation=1, + truncation_latent=None, + inject_index=None, + return_latents=False): + """Forward function for StyleGAN2GeneratorCSFT. + + Args: + styles (list[Tensor]): Sample codes of styles. + conditions (list[Tensor]): SFT conditions to generators. + input_is_latent (bool): Whether input is latent style. Default: False. + noise (Tensor | None): Input noise or None. Default: None. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + truncation (float): The truncation ratio. Default: 1. + truncation_latent (Tensor | None): The truncation latent tensor. Default: None. + inject_index (int | None): The injection index for mixing noise. Default: None. + return_latents (bool): Whether to return style latents. Default: False. + """ + # style codes -> latents with Style MLP layer + if not input_is_latent: + styles = [self.style_mlp(s) for s in styles] + # noises + if noise is None: + if randomize_noise: + noise = [None] * self.num_layers # for each style conv layer + else: # use the stored noise + noise = [getattr(self.noises, f'noise{i}') for i in range(self.num_layers)] + # style truncation + if truncation < 1: + style_truncation = [] + for style in styles: + style_truncation.append(truncation_latent + truncation * (style - truncation_latent)) + styles = style_truncation + # get style latents with injection + if len(styles) == 1: + inject_index = self.num_latent + + if styles[0].ndim < 3: + # repeat latent code for all the layers + latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + else: # used for encoder with different latent code for each layer + latent = styles[0] + elif len(styles) == 2: # mixing noises + if inject_index is None: + inject_index = random.randint(1, self.num_latent - 1) + latent1 = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + latent2 = styles[1].unsqueeze(1).repeat(1, self.num_latent - inject_index, 1) + latent = torch.cat([latent1, latent2], 1) + + # main generation + out = self.constant_input(latent.shape[0]) + out = self.style_conv1(out, latent[:, 0], noise=noise[0]) + skip = self.to_rgb1(out, latent[:, 1]) + + i = 1 + for conv1, conv2, noise1, noise2, to_rgb in zip(self.style_convs[::2], self.style_convs[1::2], noise[1::2], + noise[2::2], self.to_rgbs): + out = conv1(out, latent[:, i], noise=noise1) + + # the conditions may have fewer levels + if i < len(conditions): + # SFT part to combine the conditions + if self.sft_half: # only apply SFT to half of the channels + out_same, out_sft = torch.split(out, int(out.size(1) // 2), dim=1) + out_sft = out_sft * conditions[i - 1] + conditions[i] + out = torch.cat([out_same, out_sft], dim=1) + else: # apply SFT to all the channels + out = out * conditions[i - 1] + conditions[i] + + out = conv2(out, latent[:, i + 1], noise=noise2) + skip = to_rgb(out, latent[:, i + 2], skip) # feature back to the rgb space + i += 2 + + image = skip + + if return_latents: + return image, latent + else: + return image, None + + +class ResBlock(nn.Module): + """Residual block with bilinear upsampling/downsampling. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + mode (str): Upsampling/downsampling mode. Options: down | up. Default: down. + """ + + def __init__(self, in_channels, out_channels, mode='down'): + super(ResBlock, self).__init__() + + self.conv1 = nn.Conv2d(in_channels, in_channels, 3, 1, 1) + self.conv2 = nn.Conv2d(in_channels, out_channels, 3, 1, 1) + self.skip = nn.Conv2d(in_channels, out_channels, 1, bias=False) + if mode == 'down': + self.scale_factor = 0.5 + elif mode == 'up': + self.scale_factor = 2 + + def forward(self, x): + out = F.leaky_relu_(self.conv1(x), negative_slope=0.2) + # upsample/downsample + out = F.interpolate(out, scale_factor=self.scale_factor, mode='bilinear', align_corners=False) + out = F.leaky_relu_(self.conv2(out), negative_slope=0.2) + # skip + x = F.interpolate(x, scale_factor=self.scale_factor, mode='bilinear', align_corners=False) + skip = self.skip(x) + out = out + skip + return out + + +@ARCH_REGISTRY.register() +class GFPGANv1Clean(nn.Module): + """The GFPGAN architecture: Unet + StyleGAN2 decoder with SFT. + + It is the clean version without custom compiled CUDA extensions used in StyleGAN2. + + Ref: GFP-GAN: Towards Real-World Blind Face Restoration with Generative Facial Prior. + + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + decoder_load_path (str): The path to the pre-trained decoder model (usually, the StyleGAN2). Default: None. + fix_decoder (bool): Whether to fix the decoder. Default: True. + + num_mlp (int): Layer number of MLP style layers. Default: 8. + input_is_latent (bool): Whether input is latent style. Default: False. + different_w (bool): Whether to use different latent w for different layers. Default: False. + narrow (float): The narrow ratio for channels. Default: 1. + sft_half (bool): Whether to apply SFT on half of the input channels. Default: False. + """ + + def __init__( + self, + out_size, + num_style_feat=512, + channel_multiplier=1, + decoder_load_path=None, + fix_decoder=True, + # for stylegan decoder + num_mlp=8, + input_is_latent=False, + different_w=False, + narrow=1, + sft_half=False): + + super(GFPGANv1Clean, self).__init__() + self.input_is_latent = input_is_latent + self.different_w = different_w + self.num_style_feat = num_style_feat + + unet_narrow = narrow * 0.5 # by default, use a half of input channels + channels = { + '4': int(512 * unet_narrow), + '8': int(512 * unet_narrow), + '16': int(512 * unet_narrow), + '32': int(512 * unet_narrow), + '64': int(256 * channel_multiplier * unet_narrow), + '128': int(128 * channel_multiplier * unet_narrow), + '256': int(64 * channel_multiplier * unet_narrow), + '512': int(32 * channel_multiplier * unet_narrow), + '1024': int(16 * channel_multiplier * unet_narrow) + } + + self.log_size = int(math.log(out_size, 2)) + first_out_size = 2**(int(math.log(out_size, 2))) + + self.conv_body_first = nn.Conv2d(3, channels[f'{first_out_size}'], 1) + + # downsample + in_channels = channels[f'{first_out_size}'] + self.conv_body_down = nn.ModuleList() + for i in range(self.log_size, 2, -1): + out_channels = channels[f'{2**(i - 1)}'] + self.conv_body_down.append(ResBlock(in_channels, out_channels, mode='down')) + in_channels = out_channels + + self.final_conv = nn.Conv2d(in_channels, channels['4'], 3, 1, 1) + + # upsample + in_channels = channels['4'] + self.conv_body_up = nn.ModuleList() + for i in range(3, self.log_size + 1): + out_channels = channels[f'{2**i}'] + self.conv_body_up.append(ResBlock(in_channels, out_channels, mode='up')) + in_channels = out_channels + + # to RGB + self.toRGB = nn.ModuleList() + for i in range(3, self.log_size + 1): + self.toRGB.append(nn.Conv2d(channels[f'{2**i}'], 3, 1)) + + if different_w: + linear_out_channel = (int(math.log(out_size, 2)) * 2 - 2) * num_style_feat + else: + linear_out_channel = num_style_feat + + self.final_linear = nn.Linear(channels['4'] * 4 * 4, linear_out_channel) + + # the decoder: stylegan2 generator with SFT modulations + self.stylegan_decoder = StyleGAN2GeneratorCSFT( + out_size=out_size, + num_style_feat=num_style_feat, + num_mlp=num_mlp, + channel_multiplier=channel_multiplier, + narrow=narrow, + sft_half=sft_half) + + # load pre-trained stylegan2 model if necessary + if decoder_load_path: + self.stylegan_decoder.load_state_dict( + torch.load(decoder_load_path, map_location=lambda storage, loc: storage)['params_ema']) + # fix decoder without updating params + if fix_decoder: + for _, param in self.stylegan_decoder.named_parameters(): + param.requires_grad = False + + # for SFT modulations (scale and shift) + self.condition_scale = nn.ModuleList() + self.condition_shift = nn.ModuleList() + for i in range(3, self.log_size + 1): + out_channels = channels[f'{2**i}'] + if sft_half: + sft_out_channels = out_channels + else: + sft_out_channels = out_channels * 2 + self.condition_scale.append( + nn.Sequential( + nn.Conv2d(out_channels, out_channels, 3, 1, 1), nn.LeakyReLU(0.2, True), + nn.Conv2d(out_channels, sft_out_channels, 3, 1, 1))) + self.condition_shift.append( + nn.Sequential( + nn.Conv2d(out_channels, out_channels, 3, 1, 1), nn.LeakyReLU(0.2, True), + nn.Conv2d(out_channels, sft_out_channels, 3, 1, 1))) + + def forward(self, x, return_latents=False, return_rgb=True, randomize_noise=True, **kwargs): + """Forward function for GFPGANv1Clean. + + Args: + x (Tensor): Input images. + return_latents (bool): Whether to return style latents. Default: False. + return_rgb (bool): Whether return intermediate rgb images. Default: True. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + """ + conditions = [] + unet_skips = [] + out_rgbs = [] + + # encoder + feat = F.leaky_relu_(self.conv_body_first(x), negative_slope=0.2) + for i in range(self.log_size - 2): + feat = self.conv_body_down[i](feat) + unet_skips.insert(0, feat) + feat = F.leaky_relu_(self.final_conv(feat), negative_slope=0.2) + + # style code + style_code = self.final_linear(feat.view(feat.size(0), -1)) + if self.different_w: + style_code = style_code.view(style_code.size(0), -1, self.num_style_feat) + + # decode + for i in range(self.log_size - 2): + # add unet skip + feat = feat + unet_skips[i] + # ResUpLayer + feat = self.conv_body_up[i](feat) + # generate scale and shift for SFT layers + scale = self.condition_scale[i](feat) + conditions.append(scale.clone()) + shift = self.condition_shift[i](feat) + conditions.append(shift.clone()) + # generate rgb images + if return_rgb: + out_rgbs.append(self.toRGB[i](feat)) + + # decoder + image, _ = self.stylegan_decoder([style_code], + conditions, + return_latents=return_latents, + input_is_latent=self.input_is_latent, + randomize_noise=randomize_noise) + + return image, out_rgbs diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/archs/restoreformer_arch.py b/motion-gan-pipeline/GFPGAN/gfpgan/archs/restoreformer_arch.py new file mode 100644 index 0000000..66cdff3 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/archs/restoreformer_arch.py @@ -0,0 +1,658 @@ +"""Modified from https://github.com/wzhouxiff/RestoreFormer +""" +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class VectorQuantizer(nn.Module): + """ + see https://github.com/MishaLaskin/vqvae/blob/d761a999e2267766400dc646d82d3ac3657771d4/models/quantizer.py + ____________________________________________ + Discretization bottleneck part of the VQ-VAE. + Inputs: + - n_e : number of embeddings + - e_dim : dimension of embedding + - beta : commitment cost used in loss term, beta * ||z_e(x)-sg[e]||^2 + _____________________________________________ + """ + + def __init__(self, n_e, e_dim, beta): + super(VectorQuantizer, self).__init__() + self.n_e = n_e + self.e_dim = e_dim + self.beta = beta + + self.embedding = nn.Embedding(self.n_e, self.e_dim) + self.embedding.weight.data.uniform_(-1.0 / self.n_e, 1.0 / self.n_e) + + def forward(self, z): + """ + Inputs the output of the encoder network z and maps it to a discrete + one-hot vector that is the index of the closest embedding vector e_j + z (continuous) -> z_q (discrete) + z.shape = (batch, channel, height, width) + quantization pipeline: + 1. get encoder input (B,C,H,W) + 2. flatten input to (B*H*W,C) + """ + # reshape z -> (batch, height, width, channel) and flatten + z = z.permute(0, 2, 3, 1).contiguous() + z_flattened = z.view(-1, self.e_dim) + # distances from z to embeddings e_j (z - e)^2 = z^2 + e^2 - 2 e * z + + d = torch.sum(z_flattened ** 2, dim=1, keepdim=True) + \ + torch.sum(self.embedding.weight**2, dim=1) - 2 * \ + torch.matmul(z_flattened, self.embedding.weight.t()) + + # could possible replace this here + # #\start... + # find closest encodings + + min_value, min_encoding_indices = torch.min(d, dim=1) + + min_encoding_indices = min_encoding_indices.unsqueeze(1) + + min_encodings = torch.zeros(min_encoding_indices.shape[0], self.n_e).to(z) + min_encodings.scatter_(1, min_encoding_indices, 1) + + # dtype min encodings: torch.float32 + # min_encodings shape: torch.Size([2048, 512]) + # min_encoding_indices.shape: torch.Size([2048, 1]) + + # get quantized latent vectors + z_q = torch.matmul(min_encodings, self.embedding.weight).view(z.shape) + # .........\end + + # with: + # .........\start + # min_encoding_indices = torch.argmin(d, dim=1) + # z_q = self.embedding(min_encoding_indices) + # ......\end......... (TODO) + + # compute loss for embedding + loss = torch.mean((z_q.detach() - z)**2) + self.beta * torch.mean((z_q - z.detach())**2) + + # preserve gradients + z_q = z + (z_q - z).detach() + + # perplexity + + e_mean = torch.mean(min_encodings, dim=0) + perplexity = torch.exp(-torch.sum(e_mean * torch.log(e_mean + 1e-10))) + + # reshape back to match original input shape + z_q = z_q.permute(0, 3, 1, 2).contiguous() + + return z_q, loss, (perplexity, min_encodings, min_encoding_indices, d) + + def get_codebook_entry(self, indices, shape): + # shape specifying (batch, height, width, channel) + # TODO: check for more easy handling with nn.Embedding + min_encodings = torch.zeros(indices.shape[0], self.n_e).to(indices) + min_encodings.scatter_(1, indices[:, None], 1) + + # get quantized latent vectors + z_q = torch.matmul(min_encodings.float(), self.embedding.weight) + + if shape is not None: + z_q = z_q.view(shape) + + # reshape back to match original input shape + z_q = z_q.permute(0, 3, 1, 2).contiguous() + + return z_q + + +# pytorch_diffusion + derived encoder decoder +def nonlinearity(x): + # swish + return x * torch.sigmoid(x) + + +def Normalize(in_channels): + return torch.nn.GroupNorm(num_groups=32, num_channels=in_channels, eps=1e-6, affine=True) + + +class Upsample(nn.Module): + + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + self.conv = torch.nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=1, padding=1) + + def forward(self, x): + x = torch.nn.functional.interpolate(x, scale_factor=2.0, mode='nearest') + if self.with_conv: + x = self.conv(x) + return x + + +class Downsample(nn.Module): + + def __init__(self, in_channels, with_conv): + super().__init__() + self.with_conv = with_conv + if self.with_conv: + # no asymmetric padding in torch conv, must do it ourselves + self.conv = torch.nn.Conv2d(in_channels, in_channels, kernel_size=3, stride=2, padding=0) + + def forward(self, x): + if self.with_conv: + pad = (0, 1, 0, 1) + x = torch.nn.functional.pad(x, pad, mode='constant', value=0) + x = self.conv(x) + else: + x = torch.nn.functional.avg_pool2d(x, kernel_size=2, stride=2) + return x + + +class ResnetBlock(nn.Module): + + def __init__(self, *, in_channels, out_channels=None, conv_shortcut=False, dropout, temb_channels=512): + super().__init__() + self.in_channels = in_channels + out_channels = in_channels if out_channels is None else out_channels + self.out_channels = out_channels + self.use_conv_shortcut = conv_shortcut + + self.norm1 = Normalize(in_channels) + self.conv1 = torch.nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=1, padding=1) + if temb_channels > 0: + self.temb_proj = torch.nn.Linear(temb_channels, out_channels) + self.norm2 = Normalize(out_channels) + self.dropout = torch.nn.Dropout(dropout) + self.conv2 = torch.nn.Conv2d(out_channels, out_channels, kernel_size=3, stride=1, padding=1) + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + self.conv_shortcut = torch.nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=1, padding=1) + else: + self.nin_shortcut = torch.nn.Conv2d(in_channels, out_channels, kernel_size=1, stride=1, padding=0) + + def forward(self, x, temb): + h = x + h = self.norm1(h) + h = nonlinearity(h) + h = self.conv1(h) + + if temb is not None: + h = h + self.temb_proj(nonlinearity(temb))[:, :, None, None] + + h = self.norm2(h) + h = nonlinearity(h) + h = self.dropout(h) + h = self.conv2(h) + + if self.in_channels != self.out_channels: + if self.use_conv_shortcut: + x = self.conv_shortcut(x) + else: + x = self.nin_shortcut(x) + + return x + h + + +class MultiHeadAttnBlock(nn.Module): + + def __init__(self, in_channels, head_size=1): + super().__init__() + self.in_channels = in_channels + self.head_size = head_size + self.att_size = in_channels // head_size + assert (in_channels % head_size == 0), 'The size of head should be divided by the number of channels.' + + self.norm1 = Normalize(in_channels) + self.norm2 = Normalize(in_channels) + + self.q = torch.nn.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, padding=0) + self.k = torch.nn.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, padding=0) + self.v = torch.nn.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, padding=0) + self.proj_out = torch.nn.Conv2d(in_channels, in_channels, kernel_size=1, stride=1, padding=0) + self.num = 0 + + def forward(self, x, y=None): + h_ = x + h_ = self.norm1(h_) + if y is None: + y = h_ + else: + y = self.norm2(y) + + q = self.q(y) + k = self.k(h_) + v = self.v(h_) + + # compute attention + b, c, h, w = q.shape + q = q.reshape(b, self.head_size, self.att_size, h * w) + q = q.permute(0, 3, 1, 2) # b, hw, head, att + + k = k.reshape(b, self.head_size, self.att_size, h * w) + k = k.permute(0, 3, 1, 2) + + v = v.reshape(b, self.head_size, self.att_size, h * w) + v = v.permute(0, 3, 1, 2) + + q = q.transpose(1, 2) + v = v.transpose(1, 2) + k = k.transpose(1, 2).transpose(2, 3) + + scale = int(self.att_size)**(-0.5) + q.mul_(scale) + w_ = torch.matmul(q, k) + w_ = F.softmax(w_, dim=3) + + w_ = w_.matmul(v) + + w_ = w_.transpose(1, 2).contiguous() # [b, h*w, head, att] + w_ = w_.view(b, h, w, -1) + w_ = w_.permute(0, 3, 1, 2) + + w_ = self.proj_out(w_) + + return x + w_ + + +class MultiHeadEncoder(nn.Module): + + def __init__(self, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks=2, + attn_resolutions=(16, ), + dropout=0.0, + resamp_with_conv=True, + in_channels=3, + resolution=512, + z_channels=256, + double_z=True, + enable_mid=True, + head_size=1, + **ignore_kwargs): + super().__init__() + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.enable_mid = enable_mid + + # downsampling + self.conv_in = torch.nn.Conv2d(in_channels, self.ch, kernel_size=3, stride=1, padding=1) + + curr_res = resolution + in_ch_mult = (1, ) + tuple(ch_mult) + self.down = nn.ModuleList() + for i_level in range(self.num_resolutions): + block = nn.ModuleList() + attn = nn.ModuleList() + block_in = ch * in_ch_mult[i_level] + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks): + block.append( + ResnetBlock( + in_channels=block_in, out_channels=block_out, temb_channels=self.temb_ch, dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(MultiHeadAttnBlock(block_in, head_size)) + down = nn.Module() + down.block = block + down.attn = attn + if i_level != self.num_resolutions - 1: + down.downsample = Downsample(block_in, resamp_with_conv) + curr_res = curr_res // 2 + self.down.append(down) + + # middle + if self.enable_mid: + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, out_channels=block_in, temb_channels=self.temb_ch, dropout=dropout) + self.mid.attn_1 = MultiHeadAttnBlock(block_in, head_size) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, out_channels=block_in, temb_channels=self.temb_ch, dropout=dropout) + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d( + block_in, 2 * z_channels if double_z else z_channels, kernel_size=3, stride=1, padding=1) + + def forward(self, x): + hs = {} + # timestep embedding + temb = None + + # downsampling + h = self.conv_in(x) + hs['in'] = h + for i_level in range(self.num_resolutions): + for i_block in range(self.num_res_blocks): + h = self.down[i_level].block[i_block](h, temb) + if len(self.down[i_level].attn) > 0: + h = self.down[i_level].attn[i_block](h) + + if i_level != self.num_resolutions - 1: + # hs.append(h) + hs['block_' + str(i_level)] = h + h = self.down[i_level].downsample(h) + + # middle + # h = hs[-1] + if self.enable_mid: + h = self.mid.block_1(h, temb) + hs['block_' + str(i_level) + '_atten'] = h + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + hs['mid_atten'] = h + + # end + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + # hs.append(h) + hs['out'] = h + + return hs + + +class MultiHeadDecoder(nn.Module): + + def __init__(self, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks=2, + attn_resolutions=(16, ), + dropout=0.0, + resamp_with_conv=True, + in_channels=3, + resolution=512, + z_channels=256, + give_pre_end=False, + enable_mid=True, + head_size=1, + **ignorekwargs): + super().__init__() + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.enable_mid = enable_mid + + # compute in_ch_mult, block_in and curr_res at lowest res + block_in = ch * ch_mult[self.num_resolutions - 1] + curr_res = resolution // 2**(self.num_resolutions - 1) + self.z_shape = (1, z_channels, curr_res, curr_res) + print('Working with z of shape {} = {} dimensions.'.format(self.z_shape, np.prod(self.z_shape))) + + # z to block_in + self.conv_in = torch.nn.Conv2d(z_channels, block_in, kernel_size=3, stride=1, padding=1) + + # middle + if self.enable_mid: + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, out_channels=block_in, temb_channels=self.temb_ch, dropout=dropout) + self.mid.attn_1 = MultiHeadAttnBlock(block_in, head_size) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, out_channels=block_in, temb_channels=self.temb_ch, dropout=dropout) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + block.append( + ResnetBlock( + in_channels=block_in, out_channels=block_out, temb_channels=self.temb_ch, dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(MultiHeadAttnBlock(block_in, head_size)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, out_ch, kernel_size=3, stride=1, padding=1) + + def forward(self, z): + # assert z.shape[1:] == self.z_shape[1:] + self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + if self.enable_mid: + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.up[i_level].block[i_block](h, temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h) + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class MultiHeadDecoderTransformer(nn.Module): + + def __init__(self, + ch, + out_ch, + ch_mult=(1, 2, 4, 8), + num_res_blocks=2, + attn_resolutions=(16, ), + dropout=0.0, + resamp_with_conv=True, + in_channels=3, + resolution=512, + z_channels=256, + give_pre_end=False, + enable_mid=True, + head_size=1, + **ignorekwargs): + super().__init__() + self.ch = ch + self.temb_ch = 0 + self.num_resolutions = len(ch_mult) + self.num_res_blocks = num_res_blocks + self.resolution = resolution + self.in_channels = in_channels + self.give_pre_end = give_pre_end + self.enable_mid = enable_mid + + # compute in_ch_mult, block_in and curr_res at lowest res + block_in = ch * ch_mult[self.num_resolutions - 1] + curr_res = resolution // 2**(self.num_resolutions - 1) + self.z_shape = (1, z_channels, curr_res, curr_res) + print('Working with z of shape {} = {} dimensions.'.format(self.z_shape, np.prod(self.z_shape))) + + # z to block_in + self.conv_in = torch.nn.Conv2d(z_channels, block_in, kernel_size=3, stride=1, padding=1) + + # middle + if self.enable_mid: + self.mid = nn.Module() + self.mid.block_1 = ResnetBlock( + in_channels=block_in, out_channels=block_in, temb_channels=self.temb_ch, dropout=dropout) + self.mid.attn_1 = MultiHeadAttnBlock(block_in, head_size) + self.mid.block_2 = ResnetBlock( + in_channels=block_in, out_channels=block_in, temb_channels=self.temb_ch, dropout=dropout) + + # upsampling + self.up = nn.ModuleList() + for i_level in reversed(range(self.num_resolutions)): + block = nn.ModuleList() + attn = nn.ModuleList() + block_out = ch * ch_mult[i_level] + for i_block in range(self.num_res_blocks + 1): + block.append( + ResnetBlock( + in_channels=block_in, out_channels=block_out, temb_channels=self.temb_ch, dropout=dropout)) + block_in = block_out + if curr_res in attn_resolutions: + attn.append(MultiHeadAttnBlock(block_in, head_size)) + up = nn.Module() + up.block = block + up.attn = attn + if i_level != 0: + up.upsample = Upsample(block_in, resamp_with_conv) + curr_res = curr_res * 2 + self.up.insert(0, up) # prepend to get consistent order + + # end + self.norm_out = Normalize(block_in) + self.conv_out = torch.nn.Conv2d(block_in, out_ch, kernel_size=3, stride=1, padding=1) + + def forward(self, z, hs): + # assert z.shape[1:] == self.z_shape[1:] + # self.last_z_shape = z.shape + + # timestep embedding + temb = None + + # z to block_in + h = self.conv_in(z) + + # middle + if self.enable_mid: + h = self.mid.block_1(h, temb) + h = self.mid.attn_1(h, hs['mid_atten']) + h = self.mid.block_2(h, temb) + + # upsampling + for i_level in reversed(range(self.num_resolutions)): + for i_block in range(self.num_res_blocks + 1): + h = self.up[i_level].block[i_block](h, temb) + if len(self.up[i_level].attn) > 0: + h = self.up[i_level].attn[i_block](h, hs['block_' + str(i_level) + '_atten']) + # hfeature = h.clone() + if i_level != 0: + h = self.up[i_level].upsample(h) + + # end + if self.give_pre_end: + return h + + h = self.norm_out(h) + h = nonlinearity(h) + h = self.conv_out(h) + return h + + +class RestoreFormer(nn.Module): + + def __init__(self, + n_embed=1024, + embed_dim=256, + ch=64, + out_ch=3, + ch_mult=(1, 2, 2, 4, 4, 8), + num_res_blocks=2, + attn_resolutions=(16, ), + dropout=0.0, + in_channels=3, + resolution=512, + z_channels=256, + double_z=False, + enable_mid=True, + fix_decoder=False, + fix_codebook=True, + fix_encoder=False, + head_size=8): + super(RestoreFormer, self).__init__() + + self.encoder = MultiHeadEncoder( + ch=ch, + out_ch=out_ch, + ch_mult=ch_mult, + num_res_blocks=num_res_blocks, + attn_resolutions=attn_resolutions, + dropout=dropout, + in_channels=in_channels, + resolution=resolution, + z_channels=z_channels, + double_z=double_z, + enable_mid=enable_mid, + head_size=head_size) + self.decoder = MultiHeadDecoderTransformer( + ch=ch, + out_ch=out_ch, + ch_mult=ch_mult, + num_res_blocks=num_res_blocks, + attn_resolutions=attn_resolutions, + dropout=dropout, + in_channels=in_channels, + resolution=resolution, + z_channels=z_channels, + enable_mid=enable_mid, + head_size=head_size) + + self.quantize = VectorQuantizer(n_embed, embed_dim, beta=0.25) + + self.quant_conv = torch.nn.Conv2d(z_channels, embed_dim, 1) + self.post_quant_conv = torch.nn.Conv2d(embed_dim, z_channels, 1) + + if fix_decoder: + for _, param in self.decoder.named_parameters(): + param.requires_grad = False + for _, param in self.post_quant_conv.named_parameters(): + param.requires_grad = False + for _, param in self.quantize.named_parameters(): + param.requires_grad = False + elif fix_codebook: + for _, param in self.quantize.named_parameters(): + param.requires_grad = False + + if fix_encoder: + for _, param in self.encoder.named_parameters(): + param.requires_grad = False + + def encode(self, x): + + hs = self.encoder(x) + h = self.quant_conv(hs['out']) + quant, emb_loss, info = self.quantize(h) + return quant, emb_loss, info, hs + + def decode(self, quant, hs): + quant = self.post_quant_conv(quant) + dec = self.decoder(quant, hs) + + return dec + + def forward(self, input, **kwargs): + quant, diff, info, hs = self.encode(input) + dec = self.decode(quant, hs) + + return dec, None diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/archs/stylegan2_bilinear_arch.py b/motion-gan-pipeline/GFPGAN/gfpgan/archs/stylegan2_bilinear_arch.py new file mode 100644 index 0000000..1342ee3 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/archs/stylegan2_bilinear_arch.py @@ -0,0 +1,613 @@ +import math +import random +import torch +from basicsr.ops.fused_act import FusedLeakyReLU, fused_leaky_relu +from basicsr.utils.registry import ARCH_REGISTRY +from torch import nn +from torch.nn import functional as F + + +class NormStyleCode(nn.Module): + + def forward(self, x): + """Normalize the style codes. + + Args: + x (Tensor): Style codes with shape (b, c). + + Returns: + Tensor: Normalized tensor. + """ + return x * torch.rsqrt(torch.mean(x**2, dim=1, keepdim=True) + 1e-8) + + +class EqualLinear(nn.Module): + """Equalized Linear as StyleGAN2. + + Args: + in_channels (int): Size of each sample. + out_channels (int): Size of each output sample. + bias (bool): If set to ``False``, the layer will not learn an additive + bias. Default: ``True``. + bias_init_val (float): Bias initialized value. Default: 0. + lr_mul (float): Learning rate multiplier. Default: 1. + activation (None | str): The activation after ``linear`` operation. + Supported: 'fused_lrelu', None. Default: None. + """ + + def __init__(self, in_channels, out_channels, bias=True, bias_init_val=0, lr_mul=1, activation=None): + super(EqualLinear, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.lr_mul = lr_mul + self.activation = activation + if self.activation not in ['fused_lrelu', None]: + raise ValueError(f'Wrong activation value in EqualLinear: {activation}' + "Supported ones are: ['fused_lrelu', None].") + self.scale = (1 / math.sqrt(in_channels)) * lr_mul + + self.weight = nn.Parameter(torch.randn(out_channels, in_channels).div_(lr_mul)) + if bias: + self.bias = nn.Parameter(torch.zeros(out_channels).fill_(bias_init_val)) + else: + self.register_parameter('bias', None) + + def forward(self, x): + if self.bias is None: + bias = None + else: + bias = self.bias * self.lr_mul + if self.activation == 'fused_lrelu': + out = F.linear(x, self.weight * self.scale) + out = fused_leaky_relu(out, bias) + else: + out = F.linear(x, self.weight * self.scale, bias=bias) + return out + + def __repr__(self): + return (f'{self.__class__.__name__}(in_channels={self.in_channels}, ' + f'out_channels={self.out_channels}, bias={self.bias is not None})') + + +class ModulatedConv2d(nn.Module): + """Modulated Conv2d used in StyleGAN2. + + There is no bias in ModulatedConv2d. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + num_style_feat (int): Channel number of style features. + demodulate (bool): Whether to demodulate in the conv layer. + Default: True. + sample_mode (str | None): Indicating 'upsample', 'downsample' or None. + Default: None. + eps (float): A value added to the denominator for numerical stability. + Default: 1e-8. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size, + num_style_feat, + demodulate=True, + sample_mode=None, + eps=1e-8, + interpolation_mode='bilinear'): + super(ModulatedConv2d, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = kernel_size + self.demodulate = demodulate + self.sample_mode = sample_mode + self.eps = eps + self.interpolation_mode = interpolation_mode + if self.interpolation_mode == 'nearest': + self.align_corners = None + else: + self.align_corners = False + + self.scale = 1 / math.sqrt(in_channels * kernel_size**2) + # modulation inside each modulated conv + self.modulation = EqualLinear( + num_style_feat, in_channels, bias=True, bias_init_val=1, lr_mul=1, activation=None) + + self.weight = nn.Parameter(torch.randn(1, out_channels, in_channels, kernel_size, kernel_size)) + self.padding = kernel_size // 2 + + def forward(self, x, style): + """Forward function. + + Args: + x (Tensor): Tensor with shape (b, c, h, w). + style (Tensor): Tensor with shape (b, num_style_feat). + + Returns: + Tensor: Modulated tensor after convolution. + """ + b, c, h, w = x.shape # c = c_in + # weight modulation + style = self.modulation(style).view(b, 1, c, 1, 1) + # self.weight: (1, c_out, c_in, k, k); style: (b, 1, c, 1, 1) + weight = self.scale * self.weight * style # (b, c_out, c_in, k, k) + + if self.demodulate: + demod = torch.rsqrt(weight.pow(2).sum([2, 3, 4]) + self.eps) + weight = weight * demod.view(b, self.out_channels, 1, 1, 1) + + weight = weight.view(b * self.out_channels, c, self.kernel_size, self.kernel_size) + + if self.sample_mode == 'upsample': + x = F.interpolate(x, scale_factor=2, mode=self.interpolation_mode, align_corners=self.align_corners) + elif self.sample_mode == 'downsample': + x = F.interpolate(x, scale_factor=0.5, mode=self.interpolation_mode, align_corners=self.align_corners) + + b, c, h, w = x.shape + x = x.view(1, b * c, h, w) + # weight: (b*c_out, c_in, k, k), groups=b + out = F.conv2d(x, weight, padding=self.padding, groups=b) + out = out.view(b, self.out_channels, *out.shape[2:4]) + + return out + + def __repr__(self): + return (f'{self.__class__.__name__}(in_channels={self.in_channels}, ' + f'out_channels={self.out_channels}, ' + f'kernel_size={self.kernel_size}, ' + f'demodulate={self.demodulate}, sample_mode={self.sample_mode})') + + +class StyleConv(nn.Module): + """Style conv. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + num_style_feat (int): Channel number of style features. + demodulate (bool): Whether demodulate in the conv layer. Default: True. + sample_mode (str | None): Indicating 'upsample', 'downsample' or None. + Default: None. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size, + num_style_feat, + demodulate=True, + sample_mode=None, + interpolation_mode='bilinear'): + super(StyleConv, self).__init__() + self.modulated_conv = ModulatedConv2d( + in_channels, + out_channels, + kernel_size, + num_style_feat, + demodulate=demodulate, + sample_mode=sample_mode, + interpolation_mode=interpolation_mode) + self.weight = nn.Parameter(torch.zeros(1)) # for noise injection + self.activate = FusedLeakyReLU(out_channels) + + def forward(self, x, style, noise=None): + # modulate + out = self.modulated_conv(x, style) + # noise injection + if noise is None: + b, _, h, w = out.shape + noise = out.new_empty(b, 1, h, w).normal_() + out = out + self.weight * noise + # activation (with bias) + out = self.activate(out) + return out + + +class ToRGB(nn.Module): + """To RGB from features. + + Args: + in_channels (int): Channel number of input. + num_style_feat (int): Channel number of style features. + upsample (bool): Whether to upsample. Default: True. + """ + + def __init__(self, in_channels, num_style_feat, upsample=True, interpolation_mode='bilinear'): + super(ToRGB, self).__init__() + self.upsample = upsample + self.interpolation_mode = interpolation_mode + if self.interpolation_mode == 'nearest': + self.align_corners = None + else: + self.align_corners = False + self.modulated_conv = ModulatedConv2d( + in_channels, + 3, + kernel_size=1, + num_style_feat=num_style_feat, + demodulate=False, + sample_mode=None, + interpolation_mode=interpolation_mode) + self.bias = nn.Parameter(torch.zeros(1, 3, 1, 1)) + + def forward(self, x, style, skip=None): + """Forward function. + + Args: + x (Tensor): Feature tensor with shape (b, c, h, w). + style (Tensor): Tensor with shape (b, num_style_feat). + skip (Tensor): Base/skip tensor. Default: None. + + Returns: + Tensor: RGB images. + """ + out = self.modulated_conv(x, style) + out = out + self.bias + if skip is not None: + if self.upsample: + skip = F.interpolate( + skip, scale_factor=2, mode=self.interpolation_mode, align_corners=self.align_corners) + out = out + skip + return out + + +class ConstantInput(nn.Module): + """Constant input. + + Args: + num_channel (int): Channel number of constant input. + size (int): Spatial size of constant input. + """ + + def __init__(self, num_channel, size): + super(ConstantInput, self).__init__() + self.weight = nn.Parameter(torch.randn(1, num_channel, size, size)) + + def forward(self, batch): + out = self.weight.repeat(batch, 1, 1, 1) + return out + + +@ARCH_REGISTRY.register() +class StyleGAN2GeneratorBilinear(nn.Module): + """StyleGAN2 Generator. + + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + num_mlp (int): Layer number of MLP style layers. Default: 8. + channel_multiplier (int): Channel multiplier for large networks of + StyleGAN2. Default: 2. + lr_mlp (float): Learning rate multiplier for mlp layers. Default: 0.01. + narrow (float): Narrow ratio for channels. Default: 1.0. + """ + + def __init__(self, + out_size, + num_style_feat=512, + num_mlp=8, + channel_multiplier=2, + lr_mlp=0.01, + narrow=1, + interpolation_mode='bilinear'): + super(StyleGAN2GeneratorBilinear, self).__init__() + # Style MLP layers + self.num_style_feat = num_style_feat + style_mlp_layers = [NormStyleCode()] + for i in range(num_mlp): + style_mlp_layers.append( + EqualLinear( + num_style_feat, num_style_feat, bias=True, bias_init_val=0, lr_mul=lr_mlp, + activation='fused_lrelu')) + self.style_mlp = nn.Sequential(*style_mlp_layers) + + channels = { + '4': int(512 * narrow), + '8': int(512 * narrow), + '16': int(512 * narrow), + '32': int(512 * narrow), + '64': int(256 * channel_multiplier * narrow), + '128': int(128 * channel_multiplier * narrow), + '256': int(64 * channel_multiplier * narrow), + '512': int(32 * channel_multiplier * narrow), + '1024': int(16 * channel_multiplier * narrow) + } + self.channels = channels + + self.constant_input = ConstantInput(channels['4'], size=4) + self.style_conv1 = StyleConv( + channels['4'], + channels['4'], + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode=None, + interpolation_mode=interpolation_mode) + self.to_rgb1 = ToRGB(channels['4'], num_style_feat, upsample=False, interpolation_mode=interpolation_mode) + + self.log_size = int(math.log(out_size, 2)) + self.num_layers = (self.log_size - 2) * 2 + 1 + self.num_latent = self.log_size * 2 - 2 + + self.style_convs = nn.ModuleList() + self.to_rgbs = nn.ModuleList() + self.noises = nn.Module() + + in_channels = channels['4'] + # noise + for layer_idx in range(self.num_layers): + resolution = 2**((layer_idx + 5) // 2) + shape = [1, 1, resolution, resolution] + self.noises.register_buffer(f'noise{layer_idx}', torch.randn(*shape)) + # style convs and to_rgbs + for i in range(3, self.log_size + 1): + out_channels = channels[f'{2**i}'] + self.style_convs.append( + StyleConv( + in_channels, + out_channels, + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode='upsample', + interpolation_mode=interpolation_mode)) + self.style_convs.append( + StyleConv( + out_channels, + out_channels, + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode=None, + interpolation_mode=interpolation_mode)) + self.to_rgbs.append( + ToRGB(out_channels, num_style_feat, upsample=True, interpolation_mode=interpolation_mode)) + in_channels = out_channels + + def make_noise(self): + """Make noise for noise injection.""" + device = self.constant_input.weight.device + noises = [torch.randn(1, 1, 4, 4, device=device)] + + for i in range(3, self.log_size + 1): + for _ in range(2): + noises.append(torch.randn(1, 1, 2**i, 2**i, device=device)) + + return noises + + def get_latent(self, x): + return self.style_mlp(x) + + def mean_latent(self, num_latent): + latent_in = torch.randn(num_latent, self.num_style_feat, device=self.constant_input.weight.device) + latent = self.style_mlp(latent_in).mean(0, keepdim=True) + return latent + + def forward(self, + styles, + input_is_latent=False, + noise=None, + randomize_noise=True, + truncation=1, + truncation_latent=None, + inject_index=None, + return_latents=False): + """Forward function for StyleGAN2Generator. + + Args: + styles (list[Tensor]): Sample codes of styles. + input_is_latent (bool): Whether input is latent style. + Default: False. + noise (Tensor | None): Input noise or None. Default: None. + randomize_noise (bool): Randomize noise, used when 'noise' is + False. Default: True. + truncation (float): TODO. Default: 1. + truncation_latent (Tensor | None): TODO. Default: None. + inject_index (int | None): The injection index for mixing noise. + Default: None. + return_latents (bool): Whether to return style latents. + Default: False. + """ + # style codes -> latents with Style MLP layer + if not input_is_latent: + styles = [self.style_mlp(s) for s in styles] + # noises + if noise is None: + if randomize_noise: + noise = [None] * self.num_layers # for each style conv layer + else: # use the stored noise + noise = [getattr(self.noises, f'noise{i}') for i in range(self.num_layers)] + # style truncation + if truncation < 1: + style_truncation = [] + for style in styles: + style_truncation.append(truncation_latent + truncation * (style - truncation_latent)) + styles = style_truncation + # get style latent with injection + if len(styles) == 1: + inject_index = self.num_latent + + if styles[0].ndim < 3: + # repeat latent code for all the layers + latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + else: # used for encoder with different latent code for each layer + latent = styles[0] + elif len(styles) == 2: # mixing noises + if inject_index is None: + inject_index = random.randint(1, self.num_latent - 1) + latent1 = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + latent2 = styles[1].unsqueeze(1).repeat(1, self.num_latent - inject_index, 1) + latent = torch.cat([latent1, latent2], 1) + + # main generation + out = self.constant_input(latent.shape[0]) + out = self.style_conv1(out, latent[:, 0], noise=noise[0]) + skip = self.to_rgb1(out, latent[:, 1]) + + i = 1 + for conv1, conv2, noise1, noise2, to_rgb in zip(self.style_convs[::2], self.style_convs[1::2], noise[1::2], + noise[2::2], self.to_rgbs): + out = conv1(out, latent[:, i], noise=noise1) + out = conv2(out, latent[:, i + 1], noise=noise2) + skip = to_rgb(out, latent[:, i + 2], skip) + i += 2 + + image = skip + + if return_latents: + return image, latent + else: + return image, None + + +class ScaledLeakyReLU(nn.Module): + """Scaled LeakyReLU. + + Args: + negative_slope (float): Negative slope. Default: 0.2. + """ + + def __init__(self, negative_slope=0.2): + super(ScaledLeakyReLU, self).__init__() + self.negative_slope = negative_slope + + def forward(self, x): + out = F.leaky_relu(x, negative_slope=self.negative_slope) + return out * math.sqrt(2) + + +class EqualConv2d(nn.Module): + """Equalized Linear as StyleGAN2. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + stride (int): Stride of the convolution. Default: 1 + padding (int): Zero-padding added to both sides of the input. + Default: 0. + bias (bool): If ``True``, adds a learnable bias to the output. + Default: ``True``. + bias_init_val (float): Bias initialized value. Default: 0. + """ + + def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, bias=True, bias_init_val=0): + super(EqualConv2d, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = kernel_size + self.stride = stride + self.padding = padding + self.scale = 1 / math.sqrt(in_channels * kernel_size**2) + + self.weight = nn.Parameter(torch.randn(out_channels, in_channels, kernel_size, kernel_size)) + if bias: + self.bias = nn.Parameter(torch.zeros(out_channels).fill_(bias_init_val)) + else: + self.register_parameter('bias', None) + + def forward(self, x): + out = F.conv2d( + x, + self.weight * self.scale, + bias=self.bias, + stride=self.stride, + padding=self.padding, + ) + + return out + + def __repr__(self): + return (f'{self.__class__.__name__}(in_channels={self.in_channels}, ' + f'out_channels={self.out_channels}, ' + f'kernel_size={self.kernel_size},' + f' stride={self.stride}, padding={self.padding}, ' + f'bias={self.bias is not None})') + + +class ConvLayer(nn.Sequential): + """Conv Layer used in StyleGAN2 Discriminator. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Kernel size. + downsample (bool): Whether downsample by a factor of 2. + Default: False. + bias (bool): Whether with bias. Default: True. + activate (bool): Whether use activateion. Default: True. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size, + downsample=False, + bias=True, + activate=True, + interpolation_mode='bilinear'): + layers = [] + self.interpolation_mode = interpolation_mode + # downsample + if downsample: + if self.interpolation_mode == 'nearest': + self.align_corners = None + else: + self.align_corners = False + + layers.append( + torch.nn.Upsample(scale_factor=0.5, mode=interpolation_mode, align_corners=self.align_corners)) + stride = 1 + self.padding = kernel_size // 2 + # conv + layers.append( + EqualConv2d( + in_channels, out_channels, kernel_size, stride=stride, padding=self.padding, bias=bias + and not activate)) + # activation + if activate: + if bias: + layers.append(FusedLeakyReLU(out_channels)) + else: + layers.append(ScaledLeakyReLU(0.2)) + + super(ConvLayer, self).__init__(*layers) + + +class ResBlock(nn.Module): + """Residual block used in StyleGAN2 Discriminator. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + """ + + def __init__(self, in_channels, out_channels, interpolation_mode='bilinear'): + super(ResBlock, self).__init__() + + self.conv1 = ConvLayer(in_channels, in_channels, 3, bias=True, activate=True) + self.conv2 = ConvLayer( + in_channels, + out_channels, + 3, + downsample=True, + interpolation_mode=interpolation_mode, + bias=True, + activate=True) + self.skip = ConvLayer( + in_channels, + out_channels, + 1, + downsample=True, + interpolation_mode=interpolation_mode, + bias=False, + activate=False) + + def forward(self, x): + out = self.conv1(x) + out = self.conv2(out) + skip = self.skip(x) + out = (out + skip) / math.sqrt(2) + return out diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/archs/stylegan2_clean_arch.py b/motion-gan-pipeline/GFPGAN/gfpgan/archs/stylegan2_clean_arch.py new file mode 100644 index 0000000..9e2ee94 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/archs/stylegan2_clean_arch.py @@ -0,0 +1,368 @@ +import math +import random +import torch +from basicsr.archs.arch_util import default_init_weights +from basicsr.utils.registry import ARCH_REGISTRY +from torch import nn +from torch.nn import functional as F + + +class NormStyleCode(nn.Module): + + def forward(self, x): + """Normalize the style codes. + + Args: + x (Tensor): Style codes with shape (b, c). + + Returns: + Tensor: Normalized tensor. + """ + return x * torch.rsqrt(torch.mean(x**2, dim=1, keepdim=True) + 1e-8) + + +class ModulatedConv2d(nn.Module): + """Modulated Conv2d used in StyleGAN2. + + There is no bias in ModulatedConv2d. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + num_style_feat (int): Channel number of style features. + demodulate (bool): Whether to demodulate in the conv layer. Default: True. + sample_mode (str | None): Indicating 'upsample', 'downsample' or None. Default: None. + eps (float): A value added to the denominator for numerical stability. Default: 1e-8. + """ + + def __init__(self, + in_channels, + out_channels, + kernel_size, + num_style_feat, + demodulate=True, + sample_mode=None, + eps=1e-8): + super(ModulatedConv2d, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = kernel_size + self.demodulate = demodulate + self.sample_mode = sample_mode + self.eps = eps + + # modulation inside each modulated conv + self.modulation = nn.Linear(num_style_feat, in_channels, bias=True) + # initialization + default_init_weights(self.modulation, scale=1, bias_fill=1, a=0, mode='fan_in', nonlinearity='linear') + + self.weight = nn.Parameter( + torch.randn(1, out_channels, in_channels, kernel_size, kernel_size) / + math.sqrt(in_channels * kernel_size**2)) + self.padding = kernel_size // 2 + + def forward(self, x, style): + """Forward function. + + Args: + x (Tensor): Tensor with shape (b, c, h, w). + style (Tensor): Tensor with shape (b, num_style_feat). + + Returns: + Tensor: Modulated tensor after convolution. + """ + b, c, h, w = x.shape # c = c_in + # weight modulation + style = self.modulation(style).view(b, 1, c, 1, 1) + # self.weight: (1, c_out, c_in, k, k); style: (b, 1, c, 1, 1) + weight = self.weight * style # (b, c_out, c_in, k, k) + + if self.demodulate: + demod = torch.rsqrt(weight.pow(2).sum([2, 3, 4]) + self.eps) + weight = weight * demod.view(b, self.out_channels, 1, 1, 1) + + weight = weight.view(b * self.out_channels, c, self.kernel_size, self.kernel_size) + + # upsample or downsample if necessary + if self.sample_mode == 'upsample': + x = F.interpolate(x, scale_factor=2, mode='bilinear', align_corners=False) + elif self.sample_mode == 'downsample': + x = F.interpolate(x, scale_factor=0.5, mode='bilinear', align_corners=False) + + b, c, h, w = x.shape + x = x.view(1, b * c, h, w) + # weight: (b*c_out, c_in, k, k), groups=b + out = F.conv2d(x, weight, padding=self.padding, groups=b) + out = out.view(b, self.out_channels, *out.shape[2:4]) + + return out + + def __repr__(self): + return (f'{self.__class__.__name__}(in_channels={self.in_channels}, out_channels={self.out_channels}, ' + f'kernel_size={self.kernel_size}, demodulate={self.demodulate}, sample_mode={self.sample_mode})') + + +class StyleConv(nn.Module): + """Style conv used in StyleGAN2. + + Args: + in_channels (int): Channel number of the input. + out_channels (int): Channel number of the output. + kernel_size (int): Size of the convolving kernel. + num_style_feat (int): Channel number of style features. + demodulate (bool): Whether demodulate in the conv layer. Default: True. + sample_mode (str | None): Indicating 'upsample', 'downsample' or None. Default: None. + """ + + def __init__(self, in_channels, out_channels, kernel_size, num_style_feat, demodulate=True, sample_mode=None): + super(StyleConv, self).__init__() + self.modulated_conv = ModulatedConv2d( + in_channels, out_channels, kernel_size, num_style_feat, demodulate=demodulate, sample_mode=sample_mode) + self.weight = nn.Parameter(torch.zeros(1)) # for noise injection + self.bias = nn.Parameter(torch.zeros(1, out_channels, 1, 1)) + self.activate = nn.LeakyReLU(negative_slope=0.2, inplace=True) + + def forward(self, x, style, noise=None): + # modulate + out = self.modulated_conv(x, style) * 2**0.5 # for conversion + # noise injection + if noise is None: + b, _, h, w = out.shape + noise = out.new_empty(b, 1, h, w).normal_() + out = out + self.weight * noise + # add bias + out = out + self.bias + # activation + out = self.activate(out) + return out + + +class ToRGB(nn.Module): + """To RGB (image space) from features. + + Args: + in_channels (int): Channel number of input. + num_style_feat (int): Channel number of style features. + upsample (bool): Whether to upsample. Default: True. + """ + + def __init__(self, in_channels, num_style_feat, upsample=True): + super(ToRGB, self).__init__() + self.upsample = upsample + self.modulated_conv = ModulatedConv2d( + in_channels, 3, kernel_size=1, num_style_feat=num_style_feat, demodulate=False, sample_mode=None) + self.bias = nn.Parameter(torch.zeros(1, 3, 1, 1)) + + def forward(self, x, style, skip=None): + """Forward function. + + Args: + x (Tensor): Feature tensor with shape (b, c, h, w). + style (Tensor): Tensor with shape (b, num_style_feat). + skip (Tensor): Base/skip tensor. Default: None. + + Returns: + Tensor: RGB images. + """ + out = self.modulated_conv(x, style) + out = out + self.bias + if skip is not None: + if self.upsample: + skip = F.interpolate(skip, scale_factor=2, mode='bilinear', align_corners=False) + out = out + skip + return out + + +class ConstantInput(nn.Module): + """Constant input. + + Args: + num_channel (int): Channel number of constant input. + size (int): Spatial size of constant input. + """ + + def __init__(self, num_channel, size): + super(ConstantInput, self).__init__() + self.weight = nn.Parameter(torch.randn(1, num_channel, size, size)) + + def forward(self, batch): + out = self.weight.repeat(batch, 1, 1, 1) + return out + + +@ARCH_REGISTRY.register() +class StyleGAN2GeneratorClean(nn.Module): + """Clean version of StyleGAN2 Generator. + + Args: + out_size (int): The spatial size of outputs. + num_style_feat (int): Channel number of style features. Default: 512. + num_mlp (int): Layer number of MLP style layers. Default: 8. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + narrow (float): Narrow ratio for channels. Default: 1.0. + """ + + def __init__(self, out_size, num_style_feat=512, num_mlp=8, channel_multiplier=2, narrow=1): + super(StyleGAN2GeneratorClean, self).__init__() + # Style MLP layers + self.num_style_feat = num_style_feat + style_mlp_layers = [NormStyleCode()] + for i in range(num_mlp): + style_mlp_layers.extend( + [nn.Linear(num_style_feat, num_style_feat, bias=True), + nn.LeakyReLU(negative_slope=0.2, inplace=True)]) + self.style_mlp = nn.Sequential(*style_mlp_layers) + # initialization + default_init_weights(self.style_mlp, scale=1, bias_fill=0, a=0.2, mode='fan_in', nonlinearity='leaky_relu') + + # channel list + channels = { + '4': int(512 * narrow), + '8': int(512 * narrow), + '16': int(512 * narrow), + '32': int(512 * narrow), + '64': int(256 * channel_multiplier * narrow), + '128': int(128 * channel_multiplier * narrow), + '256': int(64 * channel_multiplier * narrow), + '512': int(32 * channel_multiplier * narrow), + '1024': int(16 * channel_multiplier * narrow) + } + self.channels = channels + + self.constant_input = ConstantInput(channels['4'], size=4) + self.style_conv1 = StyleConv( + channels['4'], + channels['4'], + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode=None) + self.to_rgb1 = ToRGB(channels['4'], num_style_feat, upsample=False) + + self.log_size = int(math.log(out_size, 2)) + self.num_layers = (self.log_size - 2) * 2 + 1 + self.num_latent = self.log_size * 2 - 2 + + self.style_convs = nn.ModuleList() + self.to_rgbs = nn.ModuleList() + self.noises = nn.Module() + + in_channels = channels['4'] + # noise + for layer_idx in range(self.num_layers): + resolution = 2**((layer_idx + 5) // 2) + shape = [1, 1, resolution, resolution] + self.noises.register_buffer(f'noise{layer_idx}', torch.randn(*shape)) + # style convs and to_rgbs + for i in range(3, self.log_size + 1): + out_channels = channels[f'{2**i}'] + self.style_convs.append( + StyleConv( + in_channels, + out_channels, + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode='upsample')) + self.style_convs.append( + StyleConv( + out_channels, + out_channels, + kernel_size=3, + num_style_feat=num_style_feat, + demodulate=True, + sample_mode=None)) + self.to_rgbs.append(ToRGB(out_channels, num_style_feat, upsample=True)) + in_channels = out_channels + + def make_noise(self): + """Make noise for noise injection.""" + device = self.constant_input.weight.device + noises = [torch.randn(1, 1, 4, 4, device=device)] + + for i in range(3, self.log_size + 1): + for _ in range(2): + noises.append(torch.randn(1, 1, 2**i, 2**i, device=device)) + + return noises + + def get_latent(self, x): + return self.style_mlp(x) + + def mean_latent(self, num_latent): + latent_in = torch.randn(num_latent, self.num_style_feat, device=self.constant_input.weight.device) + latent = self.style_mlp(latent_in).mean(0, keepdim=True) + return latent + + def forward(self, + styles, + input_is_latent=False, + noise=None, + randomize_noise=True, + truncation=1, + truncation_latent=None, + inject_index=None, + return_latents=False): + """Forward function for StyleGAN2GeneratorClean. + + Args: + styles (list[Tensor]): Sample codes of styles. + input_is_latent (bool): Whether input is latent style. Default: False. + noise (Tensor | None): Input noise or None. Default: None. + randomize_noise (bool): Randomize noise, used when 'noise' is False. Default: True. + truncation (float): The truncation ratio. Default: 1. + truncation_latent (Tensor | None): The truncation latent tensor. Default: None. + inject_index (int | None): The injection index for mixing noise. Default: None. + return_latents (bool): Whether to return style latents. Default: False. + """ + # style codes -> latents with Style MLP layer + if not input_is_latent: + styles = [self.style_mlp(s) for s in styles] + # noises + if noise is None: + if randomize_noise: + noise = [None] * self.num_layers # for each style conv layer + else: # use the stored noise + noise = [getattr(self.noises, f'noise{i}') for i in range(self.num_layers)] + # style truncation + if truncation < 1: + style_truncation = [] + for style in styles: + style_truncation.append(truncation_latent + truncation * (style - truncation_latent)) + styles = style_truncation + # get style latents with injection + if len(styles) == 1: + inject_index = self.num_latent + + if styles[0].ndim < 3: + # repeat latent code for all the layers + latent = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + else: # used for encoder with different latent code for each layer + latent = styles[0] + elif len(styles) == 2: # mixing noises + if inject_index is None: + inject_index = random.randint(1, self.num_latent - 1) + latent1 = styles[0].unsqueeze(1).repeat(1, inject_index, 1) + latent2 = styles[1].unsqueeze(1).repeat(1, self.num_latent - inject_index, 1) + latent = torch.cat([latent1, latent2], 1) + + # main generation + out = self.constant_input(latent.shape[0]) + out = self.style_conv1(out, latent[:, 0], noise=noise[0]) + skip = self.to_rgb1(out, latent[:, 1]) + + i = 1 + for conv1, conv2, noise1, noise2, to_rgb in zip(self.style_convs[::2], self.style_convs[1::2], noise[1::2], + noise[2::2], self.to_rgbs): + out = conv1(out, latent[:, i], noise=noise1) + out = conv2(out, latent[:, i + 1], noise=noise2) + skip = to_rgb(out, latent[:, i + 2], skip) # feature back to the rgb space + i += 2 + + image = skip + + if return_latents: + return image, latent + else: + return image, None diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/data/__init__.py b/motion-gan-pipeline/GFPGAN/gfpgan/data/__init__.py new file mode 100644 index 0000000..69fd9f9 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/data/__init__.py @@ -0,0 +1,10 @@ +import importlib +from basicsr.utils import scandir +from os import path as osp + +# automatically scan and import dataset modules for registry +# scan all the files that end with '_dataset.py' under the data folder +data_folder = osp.dirname(osp.abspath(__file__)) +dataset_filenames = [osp.splitext(osp.basename(v))[0] for v in scandir(data_folder) if v.endswith('_dataset.py')] +# import all the dataset modules +_dataset_modules = [importlib.import_module(f'gfpgan.data.{file_name}') for file_name in dataset_filenames] diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/data/ffhq_degradation_dataset.py b/motion-gan-pipeline/GFPGAN/gfpgan/data/ffhq_degradation_dataset.py new file mode 100644 index 0000000..64e5755 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/data/ffhq_degradation_dataset.py @@ -0,0 +1,230 @@ +import cv2 +import math +import numpy as np +import os.path as osp +import torch +import torch.utils.data as data +from basicsr.data import degradations as degradations +from basicsr.data.data_util import paths_from_folder +from basicsr.data.transforms import augment +from basicsr.utils import FileClient, get_root_logger, imfrombytes, img2tensor +from basicsr.utils.registry import DATASET_REGISTRY +from torchvision.transforms.functional import (adjust_brightness, adjust_contrast, adjust_hue, adjust_saturation, + normalize) + + +@DATASET_REGISTRY.register() +class FFHQDegradationDataset(data.Dataset): + """FFHQ dataset for GFPGAN. + + It reads high resolution images, and then generate low-quality (LQ) images on-the-fly. + + Args: + opt (dict): Config for train datasets. It contains the following keys: + dataroot_gt (str): Data root path for gt. + io_backend (dict): IO backend type and other kwarg. + mean (list | tuple): Image mean. + std (list | tuple): Image std. + use_hflip (bool): Whether to horizontally flip. + Please see more options in the codes. + """ + + def __init__(self, opt): + super(FFHQDegradationDataset, self).__init__() + self.opt = opt + # file client (io backend) + self.file_client = None + self.io_backend_opt = opt['io_backend'] + + self.gt_folder = opt['dataroot_gt'] + self.mean = opt['mean'] + self.std = opt['std'] + self.out_size = opt['out_size'] + + self.crop_components = opt.get('crop_components', False) # facial components + self.eye_enlarge_ratio = opt.get('eye_enlarge_ratio', 1) # whether enlarge eye regions + + if self.crop_components: + # load component list from a pre-process pth files + self.components_list = torch.load(opt.get('component_path')) + + # file client (lmdb io backend) + if self.io_backend_opt['type'] == 'lmdb': + self.io_backend_opt['db_paths'] = self.gt_folder + if not self.gt_folder.endswith('.lmdb'): + raise ValueError(f"'dataroot_gt' should end with '.lmdb', but received {self.gt_folder}") + with open(osp.join(self.gt_folder, 'meta_info.txt')) as fin: + self.paths = [line.split('.')[0] for line in fin] + else: + # disk backend: scan file list from a folder + self.paths = paths_from_folder(self.gt_folder) + + # degradation configurations + self.blur_kernel_size = opt['blur_kernel_size'] + self.kernel_list = opt['kernel_list'] + self.kernel_prob = opt['kernel_prob'] + self.blur_sigma = opt['blur_sigma'] + self.downsample_range = opt['downsample_range'] + self.noise_range = opt['noise_range'] + self.jpeg_range = opt['jpeg_range'] + + # color jitter + self.color_jitter_prob = opt.get('color_jitter_prob') + self.color_jitter_pt_prob = opt.get('color_jitter_pt_prob') + self.color_jitter_shift = opt.get('color_jitter_shift', 20) + # to gray + self.gray_prob = opt.get('gray_prob') + + logger = get_root_logger() + logger.info(f'Blur: blur_kernel_size {self.blur_kernel_size}, sigma: [{", ".join(map(str, self.blur_sigma))}]') + logger.info(f'Downsample: downsample_range [{", ".join(map(str, self.downsample_range))}]') + logger.info(f'Noise: [{", ".join(map(str, self.noise_range))}]') + logger.info(f'JPEG compression: [{", ".join(map(str, self.jpeg_range))}]') + + if self.color_jitter_prob is not None: + logger.info(f'Use random color jitter. Prob: {self.color_jitter_prob}, shift: {self.color_jitter_shift}') + if self.gray_prob is not None: + logger.info(f'Use random gray. Prob: {self.gray_prob}') + self.color_jitter_shift /= 255. + + @staticmethod + def color_jitter(img, shift): + """jitter color: randomly jitter the RGB values, in numpy formats""" + jitter_val = np.random.uniform(-shift, shift, 3).astype(np.float32) + img = img + jitter_val + img = np.clip(img, 0, 1) + return img + + @staticmethod + def color_jitter_pt(img, brightness, contrast, saturation, hue): + """jitter color: randomly jitter the brightness, contrast, saturation, and hue, in torch Tensor formats""" + fn_idx = torch.randperm(4) + for fn_id in fn_idx: + if fn_id == 0 and brightness is not None: + brightness_factor = torch.tensor(1.0).uniform_(brightness[0], brightness[1]).item() + img = adjust_brightness(img, brightness_factor) + + if fn_id == 1 and contrast is not None: + contrast_factor = torch.tensor(1.0).uniform_(contrast[0], contrast[1]).item() + img = adjust_contrast(img, contrast_factor) + + if fn_id == 2 and saturation is not None: + saturation_factor = torch.tensor(1.0).uniform_(saturation[0], saturation[1]).item() + img = adjust_saturation(img, saturation_factor) + + if fn_id == 3 and hue is not None: + hue_factor = torch.tensor(1.0).uniform_(hue[0], hue[1]).item() + img = adjust_hue(img, hue_factor) + return img + + def get_component_coordinates(self, index, status): + """Get facial component (left_eye, right_eye, mouth) coordinates from a pre-loaded pth file""" + components_bbox = self.components_list[f'{index:08d}'] + if status[0]: # hflip + # exchange right and left eye + tmp = components_bbox['left_eye'] + components_bbox['left_eye'] = components_bbox['right_eye'] + components_bbox['right_eye'] = tmp + # modify the width coordinate + components_bbox['left_eye'][0] = self.out_size - components_bbox['left_eye'][0] + components_bbox['right_eye'][0] = self.out_size - components_bbox['right_eye'][0] + components_bbox['mouth'][0] = self.out_size - components_bbox['mouth'][0] + + # get coordinates + locations = [] + for part in ['left_eye', 'right_eye', 'mouth']: + mean = components_bbox[part][0:2] + half_len = components_bbox[part][2] + if 'eye' in part: + half_len *= self.eye_enlarge_ratio + loc = np.hstack((mean - half_len + 1, mean + half_len)) + loc = torch.from_numpy(loc).float() + locations.append(loc) + return locations + + def __getitem__(self, index): + if self.file_client is None: + self.file_client = FileClient(self.io_backend_opt.pop('type'), **self.io_backend_opt) + + # load gt image + # Shape: (h, w, c); channel order: BGR; image range: [0, 1], float32. + gt_path = self.paths[index] + img_bytes = self.file_client.get(gt_path) + img_gt = imfrombytes(img_bytes, float32=True) + + # random horizontal flip + img_gt, status = augment(img_gt, hflip=self.opt['use_hflip'], rotation=False, return_status=True) + h, w, _ = img_gt.shape + + # get facial component coordinates + if self.crop_components: + locations = self.get_component_coordinates(index, status) + loc_left_eye, loc_right_eye, loc_mouth = locations + + # ------------------------ generate lq image ------------------------ # + # blur + kernel = degradations.random_mixed_kernels( + self.kernel_list, + self.kernel_prob, + self.blur_kernel_size, + self.blur_sigma, + self.blur_sigma, [-math.pi, math.pi], + noise_range=None) + img_lq = cv2.filter2D(img_gt, -1, kernel) + # downsample + scale = np.random.uniform(self.downsample_range[0], self.downsample_range[1]) + img_lq = cv2.resize(img_lq, (int(w // scale), int(h // scale)), interpolation=cv2.INTER_LINEAR) + # noise + if self.noise_range is not None: + img_lq = degradations.random_add_gaussian_noise(img_lq, self.noise_range) + # jpeg compression + if self.jpeg_range is not None: + img_lq = degradations.random_add_jpg_compression(img_lq, self.jpeg_range) + + # resize to original size + img_lq = cv2.resize(img_lq, (w, h), interpolation=cv2.INTER_LINEAR) + + # random color jitter (only for lq) + if self.color_jitter_prob is not None and (np.random.uniform() < self.color_jitter_prob): + img_lq = self.color_jitter(img_lq, self.color_jitter_shift) + # random to gray (only for lq) + if self.gray_prob and np.random.uniform() < self.gray_prob: + img_lq = cv2.cvtColor(img_lq, cv2.COLOR_BGR2GRAY) + img_lq = np.tile(img_lq[:, :, None], [1, 1, 3]) + if self.opt.get('gt_gray'): # whether convert GT to gray images + img_gt = cv2.cvtColor(img_gt, cv2.COLOR_BGR2GRAY) + img_gt = np.tile(img_gt[:, :, None], [1, 1, 3]) # repeat the color channels + + # BGR to RGB, HWC to CHW, numpy to tensor + img_gt, img_lq = img2tensor([img_gt, img_lq], bgr2rgb=True, float32=True) + + # random color jitter (pytorch version) (only for lq) + if self.color_jitter_pt_prob is not None and (np.random.uniform() < self.color_jitter_pt_prob): + brightness = self.opt.get('brightness', (0.5, 1.5)) + contrast = self.opt.get('contrast', (0.5, 1.5)) + saturation = self.opt.get('saturation', (0, 1.5)) + hue = self.opt.get('hue', (-0.1, 0.1)) + img_lq = self.color_jitter_pt(img_lq, brightness, contrast, saturation, hue) + + # round and clip + img_lq = torch.clamp((img_lq * 255.0).round(), 0, 255) / 255. + + # normalize + normalize(img_gt, self.mean, self.std, inplace=True) + normalize(img_lq, self.mean, self.std, inplace=True) + + if self.crop_components: + return_dict = { + 'lq': img_lq, + 'gt': img_gt, + 'gt_path': gt_path, + 'loc_left_eye': loc_left_eye, + 'loc_right_eye': loc_right_eye, + 'loc_mouth': loc_mouth + } + return return_dict + else: + return {'lq': img_lq, 'gt': img_gt, 'gt_path': gt_path} + + def __len__(self): + return len(self.paths) diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/models/__init__.py b/motion-gan-pipeline/GFPGAN/gfpgan/models/__init__.py new file mode 100644 index 0000000..6afad57 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/models/__init__.py @@ -0,0 +1,10 @@ +import importlib +from basicsr.utils import scandir +from os import path as osp + +# automatically scan and import model modules for registry +# scan all the files that end with '_model.py' under the model folder +model_folder = osp.dirname(osp.abspath(__file__)) +model_filenames = [osp.splitext(osp.basename(v))[0] for v in scandir(model_folder) if v.endswith('_model.py')] +# import all the model modules +_model_modules = [importlib.import_module(f'gfpgan.models.{file_name}') for file_name in model_filenames] diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/models/gfpgan_model.py b/motion-gan-pipeline/GFPGAN/gfpgan/models/gfpgan_model.py new file mode 100644 index 0000000..b5fb8c9 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/models/gfpgan_model.py @@ -0,0 +1,579 @@ +import math +import os.path as osp +import torch +from basicsr.archs import build_network +from basicsr.losses import build_loss +from basicsr.losses.gan_loss import r1_penalty +from basicsr.metrics import calculate_metric +from basicsr.models.base_model import BaseModel +from basicsr.utils import get_root_logger, imwrite, tensor2img +from basicsr.utils.registry import MODEL_REGISTRY +from collections import OrderedDict +from torch.nn import functional as F +from torchvision.ops import roi_align +from tqdm import tqdm + + +@MODEL_REGISTRY.register() +class GFPGANModel(BaseModel): + """The GFPGAN model for Towards real-world blind face restoratin with generative facial prior""" + + def __init__(self, opt): + super(GFPGANModel, self).__init__(opt) + self.idx = 0 # it is used for saving data for check + + # define network + self.net_g = build_network(opt['network_g']) + self.net_g = self.model_to_device(self.net_g) + self.print_network(self.net_g) + + # load pretrained model + load_path = self.opt['path'].get('pretrain_network_g', None) + if load_path is not None: + param_key = self.opt['path'].get('param_key_g', 'params') + self.load_network(self.net_g, load_path, self.opt['path'].get('strict_load_g', True), param_key) + + self.log_size = int(math.log(self.opt['network_g']['out_size'], 2)) + + if self.is_train: + self.init_training_settings() + + def init_training_settings(self): + train_opt = self.opt['train'] + + # ----------- define net_d ----------- # + self.net_d = build_network(self.opt['network_d']) + self.net_d = self.model_to_device(self.net_d) + self.print_network(self.net_d) + # load pretrained model + load_path = self.opt['path'].get('pretrain_network_d', None) + if load_path is not None: + self.load_network(self.net_d, load_path, self.opt['path'].get('strict_load_d', True)) + + # ----------- define net_g with Exponential Moving Average (EMA) ----------- # + # net_g_ema only used for testing on one GPU and saving. There is no need to wrap with DistributedDataParallel + self.net_g_ema = build_network(self.opt['network_g']).to(self.device) + # load pretrained model + load_path = self.opt['path'].get('pretrain_network_g', None) + if load_path is not None: + self.load_network(self.net_g_ema, load_path, self.opt['path'].get('strict_load_g', True), 'params_ema') + else: + self.model_ema(0) # copy net_g weight + + self.net_g.train() + self.net_d.train() + self.net_g_ema.eval() + + # ----------- facial component networks ----------- # + if ('network_d_left_eye' in self.opt and 'network_d_right_eye' in self.opt and 'network_d_mouth' in self.opt): + self.use_facial_disc = True + else: + self.use_facial_disc = False + + if self.use_facial_disc: + # left eye + self.net_d_left_eye = build_network(self.opt['network_d_left_eye']) + self.net_d_left_eye = self.model_to_device(self.net_d_left_eye) + self.print_network(self.net_d_left_eye) + load_path = self.opt['path'].get('pretrain_network_d_left_eye') + if load_path is not None: + self.load_network(self.net_d_left_eye, load_path, True, 'params') + # right eye + self.net_d_right_eye = build_network(self.opt['network_d_right_eye']) + self.net_d_right_eye = self.model_to_device(self.net_d_right_eye) + self.print_network(self.net_d_right_eye) + load_path = self.opt['path'].get('pretrain_network_d_right_eye') + if load_path is not None: + self.load_network(self.net_d_right_eye, load_path, True, 'params') + # mouth + self.net_d_mouth = build_network(self.opt['network_d_mouth']) + self.net_d_mouth = self.model_to_device(self.net_d_mouth) + self.print_network(self.net_d_mouth) + load_path = self.opt['path'].get('pretrain_network_d_mouth') + if load_path is not None: + self.load_network(self.net_d_mouth, load_path, True, 'params') + + self.net_d_left_eye.train() + self.net_d_right_eye.train() + self.net_d_mouth.train() + + # ----------- define facial component gan loss ----------- # + self.cri_component = build_loss(train_opt['gan_component_opt']).to(self.device) + + # ----------- define losses ----------- # + # pixel loss + if train_opt.get('pixel_opt'): + self.cri_pix = build_loss(train_opt['pixel_opt']).to(self.device) + else: + self.cri_pix = None + + # perceptual loss + if train_opt.get('perceptual_opt'): + self.cri_perceptual = build_loss(train_opt['perceptual_opt']).to(self.device) + else: + self.cri_perceptual = None + + # L1 loss is used in pyramid loss, component style loss and identity loss + self.cri_l1 = build_loss(train_opt['L1_opt']).to(self.device) + + # gan loss (wgan) + self.cri_gan = build_loss(train_opt['gan_opt']).to(self.device) + + # ----------- define identity loss ----------- # + if 'network_identity' in self.opt: + self.use_identity = True + else: + self.use_identity = False + + if self.use_identity: + # define identity network + self.network_identity = build_network(self.opt['network_identity']) + self.network_identity = self.model_to_device(self.network_identity) + self.print_network(self.network_identity) + load_path = self.opt['path'].get('pretrain_network_identity') + if load_path is not None: + self.load_network(self.network_identity, load_path, True, None) + self.network_identity.eval() + for param in self.network_identity.parameters(): + param.requires_grad = False + + # regularization weights + self.r1_reg_weight = train_opt['r1_reg_weight'] # for discriminator + self.net_d_iters = train_opt.get('net_d_iters', 1) + self.net_d_init_iters = train_opt.get('net_d_init_iters', 0) + self.net_d_reg_every = train_opt['net_d_reg_every'] + + # set up optimizers and schedulers + self.setup_optimizers() + self.setup_schedulers() + + def setup_optimizers(self): + train_opt = self.opt['train'] + + # ----------- optimizer g ----------- # + net_g_reg_ratio = 1 + normal_params = [] + for _, param in self.net_g.named_parameters(): + normal_params.append(param) + optim_params_g = [{ # add normal params first + 'params': normal_params, + 'lr': train_opt['optim_g']['lr'] + }] + optim_type = train_opt['optim_g'].pop('type') + lr = train_opt['optim_g']['lr'] * net_g_reg_ratio + betas = (0**net_g_reg_ratio, 0.99**net_g_reg_ratio) + self.optimizer_g = self.get_optimizer(optim_type, optim_params_g, lr, betas=betas) + self.optimizers.append(self.optimizer_g) + + # ----------- optimizer d ----------- # + net_d_reg_ratio = self.net_d_reg_every / (self.net_d_reg_every + 1) + normal_params = [] + for _, param in self.net_d.named_parameters(): + normal_params.append(param) + optim_params_d = [{ # add normal params first + 'params': normal_params, + 'lr': train_opt['optim_d']['lr'] + }] + optim_type = train_opt['optim_d'].pop('type') + lr = train_opt['optim_d']['lr'] * net_d_reg_ratio + betas = (0**net_d_reg_ratio, 0.99**net_d_reg_ratio) + self.optimizer_d = self.get_optimizer(optim_type, optim_params_d, lr, betas=betas) + self.optimizers.append(self.optimizer_d) + + # ----------- optimizers for facial component networks ----------- # + if self.use_facial_disc: + # setup optimizers for facial component discriminators + optim_type = train_opt['optim_component'].pop('type') + lr = train_opt['optim_component']['lr'] + # left eye + self.optimizer_d_left_eye = self.get_optimizer( + optim_type, self.net_d_left_eye.parameters(), lr, betas=(0.9, 0.99)) + self.optimizers.append(self.optimizer_d_left_eye) + # right eye + self.optimizer_d_right_eye = self.get_optimizer( + optim_type, self.net_d_right_eye.parameters(), lr, betas=(0.9, 0.99)) + self.optimizers.append(self.optimizer_d_right_eye) + # mouth + self.optimizer_d_mouth = self.get_optimizer( + optim_type, self.net_d_mouth.parameters(), lr, betas=(0.9, 0.99)) + self.optimizers.append(self.optimizer_d_mouth) + + def feed_data(self, data): + self.lq = data['lq'].to(self.device) + if 'gt' in data: + self.gt = data['gt'].to(self.device) + + if 'loc_left_eye' in data: + # get facial component locations, shape (batch, 4) + self.loc_left_eyes = data['loc_left_eye'] + self.loc_right_eyes = data['loc_right_eye'] + self.loc_mouths = data['loc_mouth'] + + # uncomment to check data + # import torchvision + # if self.opt['rank'] == 0: + # import os + # os.makedirs('tmp/gt', exist_ok=True) + # os.makedirs('tmp/lq', exist_ok=True) + # print(self.idx) + # torchvision.utils.save_image( + # self.gt, f'tmp/gt/gt_{self.idx}.png', nrow=4, padding=2, normalize=True, range=(-1, 1)) + # torchvision.utils.save_image( + # self.lq, f'tmp/lq/lq{self.idx}.png', nrow=4, padding=2, normalize=True, range=(-1, 1)) + # self.idx = self.idx + 1 + + def construct_img_pyramid(self): + """Construct image pyramid for intermediate restoration loss""" + pyramid_gt = [self.gt] + down_img = self.gt + for _ in range(0, self.log_size - 3): + down_img = F.interpolate(down_img, scale_factor=0.5, mode='bilinear', align_corners=False) + pyramid_gt.insert(0, down_img) + return pyramid_gt + + def get_roi_regions(self, eye_out_size=80, mouth_out_size=120): + face_ratio = int(self.opt['network_g']['out_size'] / 512) + eye_out_size *= face_ratio + mouth_out_size *= face_ratio + + rois_eyes = [] + rois_mouths = [] + for b in range(self.loc_left_eyes.size(0)): # loop for batch size + # left eye and right eye + img_inds = self.loc_left_eyes.new_full((2, 1), b) + bbox = torch.stack([self.loc_left_eyes[b, :], self.loc_right_eyes[b, :]], dim=0) # shape: (2, 4) + rois = torch.cat([img_inds, bbox], dim=-1) # shape: (2, 5) + rois_eyes.append(rois) + # mouse + img_inds = self.loc_left_eyes.new_full((1, 1), b) + rois = torch.cat([img_inds, self.loc_mouths[b:b + 1, :]], dim=-1) # shape: (1, 5) + rois_mouths.append(rois) + + rois_eyes = torch.cat(rois_eyes, 0).to(self.device) + rois_mouths = torch.cat(rois_mouths, 0).to(self.device) + + # real images + all_eyes = roi_align(self.gt, boxes=rois_eyes, output_size=eye_out_size) * face_ratio + self.left_eyes_gt = all_eyes[0::2, :, :, :] + self.right_eyes_gt = all_eyes[1::2, :, :, :] + self.mouths_gt = roi_align(self.gt, boxes=rois_mouths, output_size=mouth_out_size) * face_ratio + # output + all_eyes = roi_align(self.output, boxes=rois_eyes, output_size=eye_out_size) * face_ratio + self.left_eyes = all_eyes[0::2, :, :, :] + self.right_eyes = all_eyes[1::2, :, :, :] + self.mouths = roi_align(self.output, boxes=rois_mouths, output_size=mouth_out_size) * face_ratio + + def _gram_mat(self, x): + """Calculate Gram matrix. + + Args: + x (torch.Tensor): Tensor with shape of (n, c, h, w). + + Returns: + torch.Tensor: Gram matrix. + """ + n, c, h, w = x.size() + features = x.view(n, c, w * h) + features_t = features.transpose(1, 2) + gram = features.bmm(features_t) / (c * h * w) + return gram + + def gray_resize_for_identity(self, out, size=128): + out_gray = (0.2989 * out[:, 0, :, :] + 0.5870 * out[:, 1, :, :] + 0.1140 * out[:, 2, :, :]) + out_gray = out_gray.unsqueeze(1) + out_gray = F.interpolate(out_gray, (size, size), mode='bilinear', align_corners=False) + return out_gray + + def optimize_parameters(self, current_iter): + # optimize net_g + for p in self.net_d.parameters(): + p.requires_grad = False + self.optimizer_g.zero_grad() + + # do not update facial component net_d + if self.use_facial_disc: + for p in self.net_d_left_eye.parameters(): + p.requires_grad = False + for p in self.net_d_right_eye.parameters(): + p.requires_grad = False + for p in self.net_d_mouth.parameters(): + p.requires_grad = False + + # image pyramid loss weight + pyramid_loss_weight = self.opt['train'].get('pyramid_loss_weight', 0) + if pyramid_loss_weight > 0 and current_iter > self.opt['train'].get('remove_pyramid_loss', float('inf')): + pyramid_loss_weight = 1e-12 # very small weight to avoid unused param error + if pyramid_loss_weight > 0: + self.output, out_rgbs = self.net_g(self.lq, return_rgb=True) + pyramid_gt = self.construct_img_pyramid() + else: + self.output, out_rgbs = self.net_g(self.lq, return_rgb=False) + + # get roi-align regions + if self.use_facial_disc: + self.get_roi_regions(eye_out_size=80, mouth_out_size=120) + + l_g_total = 0 + loss_dict = OrderedDict() + if (current_iter % self.net_d_iters == 0 and current_iter > self.net_d_init_iters): + # pixel loss + if self.cri_pix: + l_g_pix = self.cri_pix(self.output, self.gt) + l_g_total += l_g_pix + loss_dict['l_g_pix'] = l_g_pix + + # image pyramid loss + if pyramid_loss_weight > 0: + for i in range(0, self.log_size - 2): + l_pyramid = self.cri_l1(out_rgbs[i], pyramid_gt[i]) * pyramid_loss_weight + l_g_total += l_pyramid + loss_dict[f'l_p_{2**(i+3)}'] = l_pyramid + + # perceptual loss + if self.cri_perceptual: + l_g_percep, l_g_style = self.cri_perceptual(self.output, self.gt) + if l_g_percep is not None: + l_g_total += l_g_percep + loss_dict['l_g_percep'] = l_g_percep + if l_g_style is not None: + l_g_total += l_g_style + loss_dict['l_g_style'] = l_g_style + + # gan loss + fake_g_pred = self.net_d(self.output) + l_g_gan = self.cri_gan(fake_g_pred, True, is_disc=False) + l_g_total += l_g_gan + loss_dict['l_g_gan'] = l_g_gan + + # facial component loss + if self.use_facial_disc: + # left eye + fake_left_eye, fake_left_eye_feats = self.net_d_left_eye(self.left_eyes, return_feats=True) + l_g_gan = self.cri_component(fake_left_eye, True, is_disc=False) + l_g_total += l_g_gan + loss_dict['l_g_gan_left_eye'] = l_g_gan + # right eye + fake_right_eye, fake_right_eye_feats = self.net_d_right_eye(self.right_eyes, return_feats=True) + l_g_gan = self.cri_component(fake_right_eye, True, is_disc=False) + l_g_total += l_g_gan + loss_dict['l_g_gan_right_eye'] = l_g_gan + # mouth + fake_mouth, fake_mouth_feats = self.net_d_mouth(self.mouths, return_feats=True) + l_g_gan = self.cri_component(fake_mouth, True, is_disc=False) + l_g_total += l_g_gan + loss_dict['l_g_gan_mouth'] = l_g_gan + + if self.opt['train'].get('comp_style_weight', 0) > 0: + # get gt feat + _, real_left_eye_feats = self.net_d_left_eye(self.left_eyes_gt, return_feats=True) + _, real_right_eye_feats = self.net_d_right_eye(self.right_eyes_gt, return_feats=True) + _, real_mouth_feats = self.net_d_mouth(self.mouths_gt, return_feats=True) + + def _comp_style(feat, feat_gt, criterion): + return criterion(self._gram_mat(feat[0]), self._gram_mat( + feat_gt[0].detach())) * 0.5 + criterion( + self._gram_mat(feat[1]), self._gram_mat(feat_gt[1].detach())) + + # facial component style loss + comp_style_loss = 0 + comp_style_loss += _comp_style(fake_left_eye_feats, real_left_eye_feats, self.cri_l1) + comp_style_loss += _comp_style(fake_right_eye_feats, real_right_eye_feats, self.cri_l1) + comp_style_loss += _comp_style(fake_mouth_feats, real_mouth_feats, self.cri_l1) + comp_style_loss = comp_style_loss * self.opt['train']['comp_style_weight'] + l_g_total += comp_style_loss + loss_dict['l_g_comp_style_loss'] = comp_style_loss + + # identity loss + if self.use_identity: + identity_weight = self.opt['train']['identity_weight'] + # get gray images and resize + out_gray = self.gray_resize_for_identity(self.output) + gt_gray = self.gray_resize_for_identity(self.gt) + + identity_gt = self.network_identity(gt_gray).detach() + identity_out = self.network_identity(out_gray) + l_identity = self.cri_l1(identity_out, identity_gt) * identity_weight + l_g_total += l_identity + loss_dict['l_identity'] = l_identity + + l_g_total.backward() + self.optimizer_g.step() + + # EMA + self.model_ema(decay=0.5**(32 / (10 * 1000))) + + # ----------- optimize net_d ----------- # + for p in self.net_d.parameters(): + p.requires_grad = True + self.optimizer_d.zero_grad() + if self.use_facial_disc: + for p in self.net_d_left_eye.parameters(): + p.requires_grad = True + for p in self.net_d_right_eye.parameters(): + p.requires_grad = True + for p in self.net_d_mouth.parameters(): + p.requires_grad = True + self.optimizer_d_left_eye.zero_grad() + self.optimizer_d_right_eye.zero_grad() + self.optimizer_d_mouth.zero_grad() + + fake_d_pred = self.net_d(self.output.detach()) + real_d_pred = self.net_d(self.gt) + l_d = self.cri_gan(real_d_pred, True, is_disc=True) + self.cri_gan(fake_d_pred, False, is_disc=True) + loss_dict['l_d'] = l_d + # In WGAN, real_score should be positive and fake_score should be negative + loss_dict['real_score'] = real_d_pred.detach().mean() + loss_dict['fake_score'] = fake_d_pred.detach().mean() + l_d.backward() + + # regularization loss + if current_iter % self.net_d_reg_every == 0: + self.gt.requires_grad = True + real_pred = self.net_d(self.gt) + l_d_r1 = r1_penalty(real_pred, self.gt) + l_d_r1 = (self.r1_reg_weight / 2 * l_d_r1 * self.net_d_reg_every + 0 * real_pred[0]) + loss_dict['l_d_r1'] = l_d_r1.detach().mean() + l_d_r1.backward() + + self.optimizer_d.step() + + # optimize facial component discriminators + if self.use_facial_disc: + # left eye + fake_d_pred, _ = self.net_d_left_eye(self.left_eyes.detach()) + real_d_pred, _ = self.net_d_left_eye(self.left_eyes_gt) + l_d_left_eye = self.cri_component( + real_d_pred, True, is_disc=True) + self.cri_gan( + fake_d_pred, False, is_disc=True) + loss_dict['l_d_left_eye'] = l_d_left_eye + l_d_left_eye.backward() + # right eye + fake_d_pred, _ = self.net_d_right_eye(self.right_eyes.detach()) + real_d_pred, _ = self.net_d_right_eye(self.right_eyes_gt) + l_d_right_eye = self.cri_component( + real_d_pred, True, is_disc=True) + self.cri_gan( + fake_d_pred, False, is_disc=True) + loss_dict['l_d_right_eye'] = l_d_right_eye + l_d_right_eye.backward() + # mouth + fake_d_pred, _ = self.net_d_mouth(self.mouths.detach()) + real_d_pred, _ = self.net_d_mouth(self.mouths_gt) + l_d_mouth = self.cri_component( + real_d_pred, True, is_disc=True) + self.cri_gan( + fake_d_pred, False, is_disc=True) + loss_dict['l_d_mouth'] = l_d_mouth + l_d_mouth.backward() + + self.optimizer_d_left_eye.step() + self.optimizer_d_right_eye.step() + self.optimizer_d_mouth.step() + + self.log_dict = self.reduce_loss_dict(loss_dict) + + def test(self): + with torch.no_grad(): + if hasattr(self, 'net_g_ema'): + self.net_g_ema.eval() + self.output, _ = self.net_g_ema(self.lq) + else: + logger = get_root_logger() + logger.warning('Do not have self.net_g_ema, use self.net_g.') + self.net_g.eval() + self.output, _ = self.net_g(self.lq) + self.net_g.train() + + def dist_validation(self, dataloader, current_iter, tb_logger, save_img): + if self.opt['rank'] == 0: + self.nondist_validation(dataloader, current_iter, tb_logger, save_img) + + def nondist_validation(self, dataloader, current_iter, tb_logger, save_img): + dataset_name = dataloader.dataset.opt['name'] + with_metrics = self.opt['val'].get('metrics') is not None + use_pbar = self.opt['val'].get('pbar', False) + + if with_metrics: + if not hasattr(self, 'metric_results'): # only execute in the first run + self.metric_results = {metric: 0 for metric in self.opt['val']['metrics'].keys()} + # initialize the best metric results for each dataset_name (supporting multiple validation datasets) + self._initialize_best_metric_results(dataset_name) + # zero self.metric_results + self.metric_results = {metric: 0 for metric in self.metric_results} + + metric_data = dict() + if use_pbar: + pbar = tqdm(total=len(dataloader), unit='image') + + for idx, val_data in enumerate(dataloader): + img_name = osp.splitext(osp.basename(val_data['lq_path'][0]))[0] + self.feed_data(val_data) + self.test() + + sr_img = tensor2img(self.output.detach().cpu(), min_max=(-1, 1)) + metric_data['img'] = sr_img + if hasattr(self, 'gt'): + gt_img = tensor2img(self.gt.detach().cpu(), min_max=(-1, 1)) + metric_data['img2'] = gt_img + del self.gt + + # tentative for out of GPU memory + del self.lq + del self.output + torch.cuda.empty_cache() + + if save_img: + if self.opt['is_train']: + save_img_path = osp.join(self.opt['path']['visualization'], img_name, + f'{img_name}_{current_iter}.png') + else: + if self.opt['val']['suffix']: + save_img_path = osp.join(self.opt['path']['visualization'], dataset_name, + f'{img_name}_{self.opt["val"]["suffix"]}.png') + else: + save_img_path = osp.join(self.opt['path']['visualization'], dataset_name, + f'{img_name}_{self.opt["name"]}.png') + imwrite(sr_img, save_img_path) + + if with_metrics: + # calculate metrics + for name, opt_ in self.opt['val']['metrics'].items(): + self.metric_results[name] += calculate_metric(metric_data, opt_) + if use_pbar: + pbar.update(1) + pbar.set_description(f'Test {img_name}') + if use_pbar: + pbar.close() + + if with_metrics: + for metric in self.metric_results.keys(): + self.metric_results[metric] /= (idx + 1) + # update the best metric result + self._update_best_metric_result(dataset_name, metric, self.metric_results[metric], current_iter) + + self._log_validation_metric_values(current_iter, dataset_name, tb_logger) + + def _log_validation_metric_values(self, current_iter, dataset_name, tb_logger): + log_str = f'Validation {dataset_name}\n' + for metric, value in self.metric_results.items(): + log_str += f'\t # {metric}: {value:.4f}' + if hasattr(self, 'best_metric_results'): + log_str += (f'\tBest: {self.best_metric_results[dataset_name][metric]["val"]:.4f} @ ' + f'{self.best_metric_results[dataset_name][metric]["iter"]} iter') + log_str += '\n' + + logger = get_root_logger() + logger.info(log_str) + if tb_logger: + for metric, value in self.metric_results.items(): + tb_logger.add_scalar(f'metrics/{dataset_name}/{metric}', value, current_iter) + + def save(self, epoch, current_iter): + # save net_g and net_d + self.save_network([self.net_g, self.net_g_ema], 'net_g', current_iter, param_key=['params', 'params_ema']) + self.save_network(self.net_d, 'net_d', current_iter) + # save component discriminators + if self.use_facial_disc: + self.save_network(self.net_d_left_eye, 'net_d_left_eye', current_iter) + self.save_network(self.net_d_right_eye, 'net_d_right_eye', current_iter) + self.save_network(self.net_d_mouth, 'net_d_mouth', current_iter) + # save training state + self.save_training_state(epoch, current_iter) diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/train.py b/motion-gan-pipeline/GFPGAN/gfpgan/train.py new file mode 100644 index 0000000..fe5f1f9 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/train.py @@ -0,0 +1,11 @@ +# flake8: noqa +import os.path as osp +from basicsr.train import train_pipeline + +import gfpgan.archs +import gfpgan.data +import gfpgan.models + +if __name__ == '__main__': + root_path = osp.abspath(osp.join(__file__, osp.pardir, osp.pardir)) + train_pipeline(root_path) diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/utils.py b/motion-gan-pipeline/GFPGAN/gfpgan/utils.py new file mode 100644 index 0000000..3dba847 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/utils.py @@ -0,0 +1,152 @@ +import cv2 +import os +import torch +from basicsr.utils import img2tensor, tensor2img +from basicsr.utils.download_util import load_file_from_url +from facexlib.utils.face_restoration_helper import FaceRestoreHelper +from torchvision.transforms.functional import normalize + +from gfpgan.archs.gfpgan_bilinear_arch import GFPGANBilinear +from gfpgan.archs.gfpganv1_arch import GFPGANv1 +from gfpgan.archs.gfpganv1_clean_arch import GFPGANv1Clean + +ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + + +class GFPGANer(): + """Helper for restoration with GFPGAN. + + It will detect and crop faces, and then resize the faces to 512x512. + GFPGAN is used to restored the resized faces. + The background is upsampled with the bg_upsampler. + Finally, the faces will be pasted back to the upsample background image. + + Args: + model_path (str): The path to the GFPGAN model. It can be urls (will first download it automatically). + upscale (float): The upscale of the final output. Default: 2. + arch (str): The GFPGAN architecture. Option: clean | original. Default: clean. + channel_multiplier (int): Channel multiplier for large networks of StyleGAN2. Default: 2. + bg_upsampler (nn.Module): The upsampler for the background. Default: None. + """ + + def __init__(self, model_path, upscale=2, arch='clean', channel_multiplier=2, bg_upsampler=None, device=None): + self.upscale = upscale + self.bg_upsampler = bg_upsampler + + # initialize model + self.device = torch.device('cuda' if torch.cuda.is_available() else 'cpu') if device is None else device + # initialize the GFP-GAN + if arch == 'clean': + self.gfpgan = GFPGANv1Clean( + out_size=512, + num_style_feat=512, + channel_multiplier=channel_multiplier, + decoder_load_path=None, + fix_decoder=False, + num_mlp=8, + input_is_latent=True, + different_w=True, + narrow=1, + sft_half=True) + elif arch == 'bilinear': + self.gfpgan = GFPGANBilinear( + out_size=512, + num_style_feat=512, + channel_multiplier=channel_multiplier, + decoder_load_path=None, + fix_decoder=False, + num_mlp=8, + input_is_latent=True, + different_w=True, + narrow=1, + sft_half=True) + elif arch == 'original': + self.gfpgan = GFPGANv1( + out_size=512, + num_style_feat=512, + channel_multiplier=channel_multiplier, + decoder_load_path=None, + fix_decoder=True, + num_mlp=8, + input_is_latent=True, + different_w=True, + narrow=1, + sft_half=True) + elif arch == 'RestoreFormer': + from gfpgan.archs.restoreformer_arch import RestoreFormer + self.gfpgan = RestoreFormer() + elif arch == 'CodeFormer': + from gfpgan.archs.codeformer_arch import CodeFormer + self.gfpgan = CodeFormer( + dim_embd=512, codebook_size=1024, n_head=8, n_layers=9, connect_list=['32', '64', '128', '256']) + # initialize face helper + self.face_helper = FaceRestoreHelper( + upscale, + face_size=512, + crop_ratio=(1, 1), + det_model='retinaface_resnet50', + save_ext='png', + use_parse=True, + device=self.device, + model_rootpath='gfpgan/weights') + + if model_path.startswith('https://'): + model_path = load_file_from_url( + url=model_path, model_dir=os.path.join(ROOT_DIR, 'gfpgan/weights'), progress=True, file_name=None) + loadnet = torch.load(model_path) + if 'params_ema' in loadnet: + keyname = 'params_ema' + else: + keyname = 'params' + self.gfpgan.load_state_dict(loadnet[keyname], strict=True) + self.gfpgan.eval() + self.gfpgan = self.gfpgan.to(self.device) + + @torch.no_grad() + def enhance(self, img, has_aligned=False, only_center_face=False, paste_back=True, weight=0.5): + self.face_helper.clean_all() + + if has_aligned: # the inputs are already aligned + img = cv2.resize(img, (512, 512)) + self.face_helper.cropped_faces = [img] + else: + self.face_helper.read_image(img) + # get face landmarks for each face + self.face_helper.get_face_landmarks_5(only_center_face=only_center_face, eye_dist_threshold=5) + # eye_dist_threshold=5: skip faces whose eye distance is smaller than 5 pixels + # TODO: even with eye_dist_threshold, it will still introduce wrong detections and restorations. + # align and warp each face + self.face_helper.align_warp_face() + + # face restoration + for cropped_face in self.face_helper.cropped_faces: + # prepare data + cropped_face_t = img2tensor(cropped_face / 255., bgr2rgb=True, float32=True) + normalize(cropped_face_t, (0.5, 0.5, 0.5), (0.5, 0.5, 0.5), inplace=True) + cropped_face_t = cropped_face_t.unsqueeze(0).to(self.device) + + try: + output = self.gfpgan(cropped_face_t, return_rgb=False, weight=weight)[0] + # convert to image + restored_face = tensor2img(output.squeeze(0), rgb2bgr=True, min_max=(-1, 1)) + except RuntimeError as error: + print(f'\tFailed inference for GFPGAN: {error}.') + restored_face = cropped_face + + restored_face = restored_face.astype('uint8') + self.face_helper.add_restored_face(restored_face) + + if not has_aligned and paste_back: + # upsample the background + if self.bg_upsampler is not None: + # Now only support RealESRGAN for upsampling background + bg_img = self.bg_upsampler.enhance(img, outscale=self.upscale)[0] + else: + bg_img = None + + self.face_helper.get_inverse_affine(None) + # paste each restored face to the input image + restored_img = self.face_helper.paste_faces_to_input_image(upsample_img=bg_img) + return self.face_helper.cropped_faces, self.face_helper.restored_faces, restored_img + else: + return self.face_helper.cropped_faces, self.face_helper.restored_faces, None diff --git a/motion-gan-pipeline/GFPGAN/gfpgan/weights/README.md b/motion-gan-pipeline/GFPGAN/gfpgan/weights/README.md new file mode 100644 index 0000000..4d7b7e6 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/gfpgan/weights/README.md @@ -0,0 +1,3 @@ +# Weights + +Put the downloaded weights to this folder. diff --git a/motion-gan-pipeline/GFPGAN/inference_gfpgan.py b/motion-gan-pipeline/GFPGAN/inference_gfpgan.py new file mode 100644 index 0000000..74b3a2a --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/inference_gfpgan.py @@ -0,0 +1,186 @@ +import argparse +import cv2 +import glob +import numpy as np +import os +import torch +from basicsr.utils import imwrite +from tqdm import tqdm +from gfpgan import GFPGANer +from typing import Dict + +def run_inference(args: Dict): + # ------------------------ input & output ------------------------ + if args.input.endswith('/'): + args.input = args.input[:-1] + if os.path.isfile(args.input): + img_list = [args.input] + else: + img_list = sorted(glob.glob(os.path.join(args.input, '*'))) + + os.makedirs(args.output, exist_ok=True) + + if len(os.listdir(args.output)) != 0: + print('Already done.\n\n') + return + + #------------------------ set up background upsampler ------------------------ + if args.bg_upsampler == 'realesrgan': + if not torch.cuda.is_available(): # CPU + import warnings + warnings.warn('The unoptimized RealESRGAN is slow on CPU. We do not use it. ' + 'If you really want to use it, please modify the corresponding codes.') + bg_upsampler = None + else: + from basicsr.archs.rrdbnet_arch import RRDBNet + from realesrgan import RealESRGANer + model = RRDBNet(num_in_ch=3, num_out_ch=3, num_feat=64, num_block=23, num_grow_ch=32, scale=2) + bg_upsampler = RealESRGANer( + scale=2, + model_path='https://github.com/xinntao/Real-ESRGAN/releases/download/v0.2.1/RealESRGAN_x2plus.pth', + model=model, + tile=args.bg_tile, + tile_pad=10, + pre_pad=0, + half=True) # need to set False in CPU mode + else: + bg_upsampler = None + bg_upsampler = None + + # ------------------------ set up GFPGAN restorer ------------------------ + if args.version == '1': + arch = 'original' + channel_multiplier = 1 + model_name = 'GFPGANv1' + url = 'https://github.com/TencentARC/GFPGAN/releases/download/v0.1.0/GFPGANv1.pth' + elif args.version == '1.2': + arch = 'clean' + channel_multiplier = 2 + model_name = 'GFPGANCleanv1-NoCE-C2' + url = 'https://github.com/TencentARC/GFPGAN/releases/download/v0.2.0/GFPGANCleanv1-NoCE-C2.pth' + elif args.version == '1.3': + arch = 'clean' + channel_multiplier = 2 + model_name = 'GFPGANv1.3' + url = 'https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.3.pth' + elif args.version == '1.4': + arch = 'clean' + channel_multiplier = 2 + model_name = 'GFPGANv1.4' + url = 'https://github.com/TencentARC/GFPGAN/releases/download/v1.3.0/GFPGANv1.4.pth' + elif args.version == 'RestoreFormer': + arch = 'RestoreFormer' + channel_multiplier = 2 + model_name = 'RestoreFormer' + url = 'https://github.com/TencentARC/GFPGAN/releases/download/v1.3.4/RestoreFormer.pth' + elif args.version == 'CodeFormer': + arch = 'CodeFormer' + channel_multiplier = 2 + model_name = 'CodeFormer' + url = 'https://github.com/TencentARC/GFPGAN/releases/download/v1.3.4/CodeFormer.pth' + else: + raise ValueError(f'Wrong model version {args.version}.') + + # determine model paths + model_path = os.path.join('experiments/pretrained_models', model_name + '.pth') + if not os.path.isfile(model_path): + model_path = os.path.join('gfpgan/weights', model_name + '.pth') + if not os.path.isfile(model_path): + # download pre-trained models from url + model_path = url + + restorer = GFPGANer( + model_path=model_path, + upscale=args.upscale, + arch=arch, + channel_multiplier=channel_multiplier, + bg_upsampler=bg_upsampler) + + # ------------------------ restore ------------------------ + for img_path in tqdm(img_list): + # read image + img_name = os.path.basename(img_path) + # print(f'Processing {img_name} ...') + basename, ext = os.path.splitext(img_name) + input_img = cv2.imread(img_path, cv2.IMREAD_COLOR) + + # restore faces and background if necessary + cropped_faces, restored_faces, restored_img = restorer.enhance( + input_img, + has_aligned=args.aligned, + only_center_face=args.only_center_face, + paste_back=True, + weight=args.weight) + + # save faces + for idx, (cropped_face, restored_face) in enumerate(zip(cropped_faces, restored_faces)): + # save cropped face + save_crop_path = os.path.join(args.output, 'cropped_faces', f'{basename}_{idx:02d}.png') + imwrite(cropped_face, save_crop_path) + # save restored face + if args.suffix is not None: + save_face_name = f'{basename}_{idx:02d}_{args.suffix}.png' + else: + save_face_name = f'{basename}_{idx:02d}.png' + save_restore_path = os.path.join(args.output, 'restored_faces', save_face_name) + imwrite(restored_face, save_restore_path) + # save comparison image + cmp_img = np.concatenate((cropped_face, restored_face), axis=1) + imwrite(cmp_img, os.path.join(args.output, 'cmp', f'{basename}_{idx:02d}.png')) + + # save restored img + if restored_img is not None: + if args.ext == 'auto': + extension = ext[1:] + else: + extension = args.ext + + if args.suffix is not None: + save_restore_path = os.path.join(args.output, 'generated_frames', f'{basename}_{args.suffix}.{extension}') + else: + save_restore_path = os.path.join(args.output, 'generated_frames', f'{basename}.{extension}') + imwrite(restored_img, save_restore_path) + + print(f'Results are in the [{args.output}] folder.') + + +def main(): + """Inference demo for GFPGAN (for users). + """ + parser = argparse.ArgumentParser() + parser.add_argument( + '-i', + '--input', + type=str, + default='inputs/whole_imgs', + help='Input image or folder. Default: inputs/whole_imgs') + parser.add_argument('-o', '--output', type=str, default='results', help='Output folder. Default: results') + # we use version to select models, which is more user-friendly + parser.add_argument( + '-v', '--version', type=str, default='1.3', help='GFPGAN model version. Option: 1 | 1.2 | 1.3. Default: 1.3') + parser.add_argument( + '-s', '--upscale', type=int, default=1, help='The final upsampling scale of the image. Default: 2') + + parser.add_argument( + '--bg_upsampler', type=str, default='realesrgan', help='background upsampler. Default: realesrgan') + parser.add_argument( + '--bg_tile', + type=int, + default=400, + help='Tile size for background sampler, 0 for no tile during testing. Default: 400') + parser.add_argument('--suffix', type=str, default=None, help='Suffix of the restored faces') + parser.add_argument('--only_center_face', action='store_true', help='Only restore the center face') + parser.add_argument('--aligned', action='store_true', help='Input are aligned faces') + parser.add_argument( + '--ext', + type=str, + default='auto', + help='Image extension. Options: auto | jpg | png, auto means using the same extension as inputs. Default: auto') + parser.add_argument('-w', '--weight', type=float, default=0.5, help='Adjustable weights for CodeFormer.') + args = parser.parse_args() + + run_inference(args) + + +if __name__ == '__main__': + main() diff --git a/motion-gan-pipeline/GFPGAN/inputs/cropped_faces/Adele_crop.png b/motion-gan-pipeline/GFPGAN/inputs/cropped_faces/Adele_crop.png new file mode 100644 index 0000000..afeb555 Binary files /dev/null and b/motion-gan-pipeline/GFPGAN/inputs/cropped_faces/Adele_crop.png differ diff --git a/motion-gan-pipeline/GFPGAN/inputs/cropped_faces/Julia_Roberts_crop.png b/motion-gan-pipeline/GFPGAN/inputs/cropped_faces/Julia_Roberts_crop.png new file mode 100644 index 0000000..38c75c6 Binary files /dev/null and b/motion-gan-pipeline/GFPGAN/inputs/cropped_faces/Julia_Roberts_crop.png differ diff --git a/motion-gan-pipeline/GFPGAN/inputs/cropped_faces/Justin_Timberlake_crop.png b/motion-gan-pipeline/GFPGAN/inputs/cropped_faces/Justin_Timberlake_crop.png new file mode 100644 index 0000000..f4c118c Binary files /dev/null and b/motion-gan-pipeline/GFPGAN/inputs/cropped_faces/Justin_Timberlake_crop.png differ diff --git a/motion-gan-pipeline/GFPGAN/inputs/cropped_faces/Paris_Hilton_crop.png b/motion-gan-pipeline/GFPGAN/inputs/cropped_faces/Paris_Hilton_crop.png new file mode 100644 index 0000000..8d8ffc1 Binary files /dev/null and b/motion-gan-pipeline/GFPGAN/inputs/cropped_faces/Paris_Hilton_crop.png differ diff --git a/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/00.jpg b/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/00.jpg new file mode 100644 index 0000000..cc41a5a --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/00.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:775f8079c8e0227273c6c43488936db0c4f3e0b72dfcc7e6fbbd8dc0fd956a17 +size 2376753 diff --git a/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/00000.png b/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/00000.png new file mode 100644 index 0000000..c10f7c9 Binary files /dev/null and b/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/00000.png differ diff --git a/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/10045.png b/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/10045.png new file mode 100644 index 0000000..72032fb Binary files /dev/null and b/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/10045.png differ diff --git a/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/Blake_Lively.jpg b/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/Blake_Lively.jpg new file mode 100644 index 0000000..e9933e7 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/Blake_Lively.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1fae5b7f8b3f315db5b1cc141e4391f474832081fc4c7c5aea7e4d95b8b8aad9 +size 93150 diff --git a/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/low.jpg b/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/low.jpg new file mode 100644 index 0000000..412c9d5 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/inputs/whole_imgs/low.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3fbecfabde7cdab30cb51ceaa8a19824298a8877a6895cf9dc6dbbc43ffe897d +size 15750 diff --git a/motion-gan-pipeline/GFPGAN/options/train_gfpgan_v1.yml b/motion-gan-pipeline/GFPGAN/options/train_gfpgan_v1.yml new file mode 100644 index 0000000..aa5212a --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/options/train_gfpgan_v1.yml @@ -0,0 +1,216 @@ +# general settings +name: train_GFPGANv1_512 +model_type: GFPGANModel +num_gpu: auto # officially, we use 4 GPUs +manual_seed: 0 + +# dataset and data loader settings +datasets: + train: + name: FFHQ + type: FFHQDegradationDataset + # dataroot_gt: datasets/ffhq/ffhq_512.lmdb + dataroot_gt: datasets/ffhq/ffhq_512 + io_backend: + # type: lmdb + type: disk + + use_hflip: true + mean: [0.5, 0.5, 0.5] + std: [0.5, 0.5, 0.5] + out_size: 512 + + blur_kernel_size: 41 + kernel_list: ['iso', 'aniso'] + kernel_prob: [0.5, 0.5] + blur_sigma: [0.1, 10] + downsample_range: [0.8, 8] + noise_range: [0, 20] + jpeg_range: [60, 100] + + # color jitter and gray + color_jitter_prob: 0.3 + color_jitter_shift: 20 + color_jitter_pt_prob: 0.3 + gray_prob: 0.01 + + # If you do not want colorization, please set + # color_jitter_prob: ~ + # color_jitter_pt_prob: ~ + # gray_prob: 0.01 + # gt_gray: True + + crop_components: true + component_path: experiments/pretrained_models/FFHQ_eye_mouth_landmarks_512.pth + eye_enlarge_ratio: 1.4 + + # data loader + use_shuffle: true + num_worker_per_gpu: 6 + batch_size_per_gpu: 3 + dataset_enlarge_ratio: 1 + prefetch_mode: ~ + + val: + # Please modify accordingly to use your own validation + # Or comment the val block if do not need validation during training + name: validation + type: PairedImageDataset + dataroot_lq: datasets/faces/validation/input + dataroot_gt: datasets/faces/validation/reference + io_backend: + type: disk + mean: [0.5, 0.5, 0.5] + std: [0.5, 0.5, 0.5] + scale: 1 + +# network structures +network_g: + type: GFPGANv1 + out_size: 512 + num_style_feat: 512 + channel_multiplier: 1 + resample_kernel: [1, 3, 3, 1] + decoder_load_path: experiments/pretrained_models/StyleGAN2_512_Cmul1_FFHQ_B12G4_scratch_800k.pth + fix_decoder: true + num_mlp: 8 + lr_mlp: 0.01 + input_is_latent: true + different_w: true + narrow: 1 + sft_half: true + +network_d: + type: StyleGAN2Discriminator + out_size: 512 + channel_multiplier: 1 + resample_kernel: [1, 3, 3, 1] + +network_d_left_eye: + type: FacialComponentDiscriminator + +network_d_right_eye: + type: FacialComponentDiscriminator + +network_d_mouth: + type: FacialComponentDiscriminator + +network_identity: + type: ResNetArcFace + block: IRBlock + layers: [2, 2, 2, 2] + use_se: False + +# path +path: + pretrain_network_g: ~ + param_key_g: params_ema + strict_load_g: ~ + pretrain_network_d: ~ + pretrain_network_d_left_eye: ~ + pretrain_network_d_right_eye: ~ + pretrain_network_d_mouth: ~ + pretrain_network_identity: experiments/pretrained_models/arcface_resnet18.pth + # resume + resume_state: ~ + ignore_resume_networks: ['network_identity'] + +# training settings +train: + optim_g: + type: Adam + lr: !!float 2e-3 + optim_d: + type: Adam + lr: !!float 2e-3 + optim_component: + type: Adam + lr: !!float 2e-3 + + scheduler: + type: MultiStepLR + milestones: [600000, 700000] + gamma: 0.5 + + total_iter: 800000 + warmup_iter: -1 # no warm up + + # losses + # pixel loss + pixel_opt: + type: L1Loss + loss_weight: !!float 1e-1 + reduction: mean + # L1 loss used in pyramid loss, component style loss and identity loss + L1_opt: + type: L1Loss + loss_weight: 1 + reduction: mean + + # image pyramid loss + pyramid_loss_weight: 1 + remove_pyramid_loss: 50000 + # perceptual loss (content and style losses) + perceptual_opt: + type: PerceptualLoss + layer_weights: + # before relu + 'conv1_2': 0.1 + 'conv2_2': 0.1 + 'conv3_4': 1 + 'conv4_4': 1 + 'conv5_4': 1 + vgg_type: vgg19 + use_input_norm: true + perceptual_weight: !!float 1 + style_weight: 50 + range_norm: true + criterion: l1 + # gan loss + gan_opt: + type: GANLoss + gan_type: wgan_softplus + loss_weight: !!float 1e-1 + # r1 regularization for discriminator + r1_reg_weight: 10 + # facial component loss + gan_component_opt: + type: GANLoss + gan_type: vanilla + real_label_val: 1.0 + fake_label_val: 0.0 + loss_weight: !!float 1 + comp_style_weight: 200 + # identity loss + identity_weight: 10 + + net_d_iters: 1 + net_d_init_iters: 0 + net_d_reg_every: 16 + +# validation settings +val: + val_freq: !!float 5e3 + save_img: true + + metrics: + psnr: # metric name + type: calculate_psnr + crop_border: 0 + test_y_channel: false + +# logging settings +logger: + print_freq: 100 + save_checkpoint_freq: !!float 5e3 + use_tb_logger: true + wandb: + project: ~ + resume_id: ~ + +# dist training settings +dist_params: + backend: nccl + port: 29500 + +find_unused_parameters: true diff --git a/motion-gan-pipeline/GFPGAN/options/train_gfpgan_v1_simple.yml b/motion-gan-pipeline/GFPGAN/options/train_gfpgan_v1_simple.yml new file mode 100644 index 0000000..3807575 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/options/train_gfpgan_v1_simple.yml @@ -0,0 +1,182 @@ +# general settings +name: train_GFPGANv1_512_simple +model_type: GFPGANModel +num_gpu: auto # officially, we use 4 GPUs +manual_seed: 0 + +# dataset and data loader settings +datasets: + train: + name: FFHQ + type: FFHQDegradationDataset + # dataroot_gt: datasets/ffhq/ffhq_512.lmdb + dataroot_gt: datasets/ffhq/ffhq_512 + io_backend: + # type: lmdb + type: disk + + use_hflip: true + mean: [0.5, 0.5, 0.5] + std: [0.5, 0.5, 0.5] + out_size: 512 + + blur_kernel_size: 41 + kernel_list: ['iso', 'aniso'] + kernel_prob: [0.5, 0.5] + blur_sigma: [0.1, 10] + downsample_range: [0.8, 8] + noise_range: [0, 20] + jpeg_range: [60, 100] + + # color jitter and gray + color_jitter_prob: 0.3 + color_jitter_shift: 20 + color_jitter_pt_prob: 0.3 + gray_prob: 0.01 + + # If you do not want colorization, please set + # color_jitter_prob: ~ + # color_jitter_pt_prob: ~ + # gray_prob: 0.01 + # gt_gray: True + + # data loader + use_shuffle: true + num_worker_per_gpu: 6 + batch_size_per_gpu: 3 + dataset_enlarge_ratio: 1 + prefetch_mode: ~ + + val: + # Please modify accordingly to use your own validation + # Or comment the val block if do not need validation during training + name: validation + type: PairedImageDataset + dataroot_lq: datasets/faces/validation/input + dataroot_gt: datasets/faces/validation/reference + io_backend: + type: disk + mean: [0.5, 0.5, 0.5] + std: [0.5, 0.5, 0.5] + scale: 1 + +# network structures +network_g: + type: GFPGANv1 + out_size: 512 + num_style_feat: 512 + channel_multiplier: 1 + resample_kernel: [1, 3, 3, 1] + decoder_load_path: experiments/pretrained_models/StyleGAN2_512_Cmul1_FFHQ_B12G4_scratch_800k.pth + fix_decoder: true + num_mlp: 8 + lr_mlp: 0.01 + input_is_latent: true + different_w: true + narrow: 1 + sft_half: true + +network_d: + type: StyleGAN2Discriminator + out_size: 512 + channel_multiplier: 1 + resample_kernel: [1, 3, 3, 1] + + +# path +path: + pretrain_network_g: ~ + param_key_g: params_ema + strict_load_g: ~ + pretrain_network_d: ~ + resume_state: ~ + +# training settings +train: + optim_g: + type: Adam + lr: !!float 2e-3 + optim_d: + type: Adam + lr: !!float 2e-3 + optim_component: + type: Adam + lr: !!float 2e-3 + + scheduler: + type: MultiStepLR + milestones: [600000, 700000] + gamma: 0.5 + + total_iter: 800000 + warmup_iter: -1 # no warm up + + # losses + # pixel loss + pixel_opt: + type: L1Loss + loss_weight: !!float 1e-1 + reduction: mean + # L1 loss used in pyramid loss, component style loss and identity loss + L1_opt: + type: L1Loss + loss_weight: 1 + reduction: mean + + # image pyramid loss + pyramid_loss_weight: 1 + remove_pyramid_loss: 50000 + # perceptual loss (content and style losses) + perceptual_opt: + type: PerceptualLoss + layer_weights: + # before relu + 'conv1_2': 0.1 + 'conv2_2': 0.1 + 'conv3_4': 1 + 'conv4_4': 1 + 'conv5_4': 1 + vgg_type: vgg19 + use_input_norm: true + perceptual_weight: !!float 1 + style_weight: 50 + range_norm: true + criterion: l1 + # gan loss + gan_opt: + type: GANLoss + gan_type: wgan_softplus + loss_weight: !!float 1e-1 + # r1 regularization for discriminator + r1_reg_weight: 10 + + net_d_iters: 1 + net_d_init_iters: 0 + net_d_reg_every: 16 + +# validation settings +val: + val_freq: !!float 5e3 + save_img: true + + metrics: + psnr: # metric name + type: calculate_psnr + crop_border: 0 + test_y_channel: false + +# logging settings +logger: + print_freq: 100 + save_checkpoint_freq: !!float 5e3 + use_tb_logger: true + wandb: + project: ~ + resume_id: ~ + +# dist training settings +dist_params: + backend: nccl + port: 29500 + +find_unused_parameters: true diff --git a/motion-gan-pipeline/GFPGAN/requirements.txt b/motion-gan-pipeline/GFPGAN/requirements.txt new file mode 100644 index 0000000..4f46d25 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/requirements.txt @@ -0,0 +1,12 @@ +basicsr>=1.4.2 +facexlib>=0.2.5 +lmdb +numpy +opencv-python +pyyaml +scipy +tb-nightly +torch>=1.7 +torchvision +tqdm +yapf diff --git a/motion-gan-pipeline/GFPGAN/scripts/convert_gfpganv_to_clean.py b/motion-gan-pipeline/GFPGAN/scripts/convert_gfpganv_to_clean.py new file mode 100644 index 0000000..8fdccb6 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/scripts/convert_gfpganv_to_clean.py @@ -0,0 +1,164 @@ +import argparse +import math +import torch + +from gfpgan.archs.gfpganv1_clean_arch import GFPGANv1Clean + + +def modify_checkpoint(checkpoint_bilinear, checkpoint_clean): + for ori_k, ori_v in checkpoint_bilinear.items(): + if 'stylegan_decoder' in ori_k: + if 'style_mlp' in ori_k: # style_mlp_layers + lr_mul = 0.01 + prefix, name, idx, var = ori_k.split('.') + idx = (int(idx) * 2) - 1 + crt_k = f'{prefix}.{name}.{idx}.{var}' + if var == 'weight': + _, c_in = ori_v.size() + scale = (1 / math.sqrt(c_in)) * lr_mul + crt_v = ori_v * scale * 2**0.5 + else: + crt_v = ori_v * lr_mul * 2**0.5 + checkpoint_clean[crt_k] = crt_v + elif 'modulation' in ori_k: # modulation in StyleConv + lr_mul = 1 + crt_k = ori_k + var = ori_k.split('.')[-1] + if var == 'weight': + _, c_in = ori_v.size() + scale = (1 / math.sqrt(c_in)) * lr_mul + crt_v = ori_v * scale + else: + crt_v = ori_v * lr_mul + checkpoint_clean[crt_k] = crt_v + elif 'style_conv' in ori_k: + # StyleConv in style_conv1 and style_convs + if 'activate' in ori_k: # FusedLeakyReLU + # eg. style_conv1.activate.bias + # eg. style_convs.13.activate.bias + split_rlt = ori_k.split('.') + if len(split_rlt) == 4: + prefix, name, _, var = split_rlt + crt_k = f'{prefix}.{name}.{var}' + elif len(split_rlt) == 5: + prefix, name, idx, _, var = split_rlt + crt_k = f'{prefix}.{name}.{idx}.{var}' + crt_v = ori_v * 2**0.5 # 2**0.5 used in FusedLeakyReLU + c = crt_v.size(0) + checkpoint_clean[crt_k] = crt_v.view(1, c, 1, 1) + elif 'modulated_conv' in ori_k: + # eg. style_conv1.modulated_conv.weight + # eg. style_convs.13.modulated_conv.weight + _, c_out, c_in, k1, k2 = ori_v.size() + scale = 1 / math.sqrt(c_in * k1 * k2) + crt_k = ori_k + checkpoint_clean[crt_k] = ori_v * scale + elif 'weight' in ori_k: + crt_k = ori_k + checkpoint_clean[crt_k] = ori_v * 2**0.5 + elif 'to_rgb' in ori_k: # StyleConv in to_rgb1 and to_rgbs + if 'modulated_conv' in ori_k: + # eg. to_rgb1.modulated_conv.weight + # eg. to_rgbs.5.modulated_conv.weight + _, c_out, c_in, k1, k2 = ori_v.size() + scale = 1 / math.sqrt(c_in * k1 * k2) + crt_k = ori_k + checkpoint_clean[crt_k] = ori_v * scale + else: + crt_k = ori_k + checkpoint_clean[crt_k] = ori_v + else: + crt_k = ori_k + checkpoint_clean[crt_k] = ori_v + # end of 'stylegan_decoder' + elif 'conv_body_first' in ori_k or 'final_conv' in ori_k: + # key name + name, _, var = ori_k.split('.') + crt_k = f'{name}.{var}' + # weight and bias + if var == 'weight': + c_out, c_in, k1, k2 = ori_v.size() + scale = 1 / math.sqrt(c_in * k1 * k2) + checkpoint_clean[crt_k] = ori_v * scale * 2**0.5 + else: + checkpoint_clean[crt_k] = ori_v * 2**0.5 + elif 'conv_body' in ori_k: + if 'conv_body_up' in ori_k: + ori_k = ori_k.replace('conv2.weight', 'conv2.1.weight') + ori_k = ori_k.replace('skip.weight', 'skip.1.weight') + name1, idx1, name2, _, var = ori_k.split('.') + crt_k = f'{name1}.{idx1}.{name2}.{var}' + if name2 == 'skip': + c_out, c_in, k1, k2 = ori_v.size() + scale = 1 / math.sqrt(c_in * k1 * k2) + checkpoint_clean[crt_k] = ori_v * scale / 2**0.5 + else: + if var == 'weight': + c_out, c_in, k1, k2 = ori_v.size() + scale = 1 / math.sqrt(c_in * k1 * k2) + checkpoint_clean[crt_k] = ori_v * scale + else: + checkpoint_clean[crt_k] = ori_v + if 'conv1' in ori_k: + checkpoint_clean[crt_k] *= 2**0.5 + elif 'toRGB' in ori_k: + crt_k = ori_k + if 'weight' in ori_k: + c_out, c_in, k1, k2 = ori_v.size() + scale = 1 / math.sqrt(c_in * k1 * k2) + checkpoint_clean[crt_k] = ori_v * scale + else: + checkpoint_clean[crt_k] = ori_v + elif 'final_linear' in ori_k: + crt_k = ori_k + if 'weight' in ori_k: + _, c_in = ori_v.size() + scale = 1 / math.sqrt(c_in) + checkpoint_clean[crt_k] = ori_v * scale + else: + checkpoint_clean[crt_k] = ori_v + elif 'condition' in ori_k: + crt_k = ori_k + if '0.weight' in ori_k: + c_out, c_in, k1, k2 = ori_v.size() + scale = 1 / math.sqrt(c_in * k1 * k2) + checkpoint_clean[crt_k] = ori_v * scale * 2**0.5 + elif '0.bias' in ori_k: + checkpoint_clean[crt_k] = ori_v * 2**0.5 + elif '2.weight' in ori_k: + c_out, c_in, k1, k2 = ori_v.size() + scale = 1 / math.sqrt(c_in * k1 * k2) + checkpoint_clean[crt_k] = ori_v * scale + elif '2.bias' in ori_k: + checkpoint_clean[crt_k] = ori_v + + return checkpoint_clean + + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--ori_path', type=str, help='Path to the original model') + parser.add_argument('--narrow', type=float, default=1) + parser.add_argument('--channel_multiplier', type=float, default=2) + parser.add_argument('--save_path', type=str) + args = parser.parse_args() + + ori_ckpt = torch.load(args.ori_path)['params_ema'] + + net = GFPGANv1Clean( + 512, + num_style_feat=512, + channel_multiplier=args.channel_multiplier, + decoder_load_path=None, + fix_decoder=False, + # for stylegan decoder + num_mlp=8, + input_is_latent=True, + different_w=True, + narrow=args.narrow, + sft_half=True) + crt_ckpt = net.state_dict() + + crt_ckpt = modify_checkpoint(ori_ckpt, crt_ckpt) + print(f'Save to {args.save_path}.') + torch.save(dict(params_ema=crt_ckpt), args.save_path, _use_new_zipfile_serialization=False) diff --git a/motion-gan-pipeline/GFPGAN/scripts/parse_landmark.py b/motion-gan-pipeline/GFPGAN/scripts/parse_landmark.py new file mode 100644 index 0000000..74e2ff9 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/scripts/parse_landmark.py @@ -0,0 +1,85 @@ +import cv2 +import json +import numpy as np +import os +import torch +from basicsr.utils import FileClient, imfrombytes +from collections import OrderedDict + +# ---------------------------- This script is used to parse facial landmarks ------------------------------------- # +# Configurations +save_img = False +scale = 0.5 # 0.5 for official FFHQ (512x512), 1 for others +enlarge_ratio = 1.4 # only for eyes +json_path = 'ffhq-dataset-v2.json' +face_path = 'datasets/ffhq/ffhq_512.lmdb' +save_path = './FFHQ_eye_mouth_landmarks_512.pth' + +print('Load JSON metadata...') +# use the official json file in FFHQ dataset +with open(json_path, 'rb') as f: + json_data = json.load(f, object_pairs_hook=OrderedDict) + +print('Open LMDB file...') +# read ffhq images +file_client = FileClient('lmdb', db_paths=face_path) +with open(os.path.join(face_path, 'meta_info.txt')) as fin: + paths = [line.split('.')[0] for line in fin] + +save_dict = {} + +for item_idx, item in enumerate(json_data.values()): + print(f'\r{item_idx} / {len(json_data)}, {item["image"]["file_path"]} ', end='', flush=True) + + # parse landmarks + lm = np.array(item['image']['face_landmarks']) + lm = lm * scale + + item_dict = {} + # get image + if save_img: + img_bytes = file_client.get(paths[item_idx]) + img = imfrombytes(img_bytes, float32=True) + + # get landmarks for each component + map_left_eye = list(range(36, 42)) + map_right_eye = list(range(42, 48)) + map_mouth = list(range(48, 68)) + + # eye_left + mean_left_eye = np.mean(lm[map_left_eye], 0) # (x, y) + half_len_left_eye = np.max((np.max(np.max(lm[map_left_eye], 0) - np.min(lm[map_left_eye], 0)) / 2, 16)) + item_dict['left_eye'] = [mean_left_eye[0], mean_left_eye[1], half_len_left_eye] + # mean_left_eye[0] = 512 - mean_left_eye[0] # for testing flip + half_len_left_eye *= enlarge_ratio + loc_left_eye = np.hstack((mean_left_eye - half_len_left_eye + 1, mean_left_eye + half_len_left_eye)).astype(int) + if save_img: + eye_left_img = img[loc_left_eye[1]:loc_left_eye[3], loc_left_eye[0]:loc_left_eye[2], :] + cv2.imwrite(f'tmp/{item_idx:08d}_eye_left.png', eye_left_img * 255) + + # eye_right + mean_right_eye = np.mean(lm[map_right_eye], 0) + half_len_right_eye = np.max((np.max(np.max(lm[map_right_eye], 0) - np.min(lm[map_right_eye], 0)) / 2, 16)) + item_dict['right_eye'] = [mean_right_eye[0], mean_right_eye[1], half_len_right_eye] + # mean_right_eye[0] = 512 - mean_right_eye[0] # # for testing flip + half_len_right_eye *= enlarge_ratio + loc_right_eye = np.hstack( + (mean_right_eye - half_len_right_eye + 1, mean_right_eye + half_len_right_eye)).astype(int) + if save_img: + eye_right_img = img[loc_right_eye[1]:loc_right_eye[3], loc_right_eye[0]:loc_right_eye[2], :] + cv2.imwrite(f'tmp/{item_idx:08d}_eye_right.png', eye_right_img * 255) + + # mouth + mean_mouth = np.mean(lm[map_mouth], 0) + half_len_mouth = np.max((np.max(np.max(lm[map_mouth], 0) - np.min(lm[map_mouth], 0)) / 2, 16)) + item_dict['mouth'] = [mean_mouth[0], mean_mouth[1], half_len_mouth] + # mean_mouth[0] = 512 - mean_mouth[0] # for testing flip + loc_mouth = np.hstack((mean_mouth - half_len_mouth + 1, mean_mouth + half_len_mouth)).astype(int) + if save_img: + mouth_img = img[loc_mouth[1]:loc_mouth[3], loc_mouth[0]:loc_mouth[2], :] + cv2.imwrite(f'tmp/{item_idx:08d}_mouth.png', mouth_img * 255) + + save_dict[f'{item_idx:08d}'] = item_dict + +print('Save...') +torch.save(save_dict, save_path) diff --git a/motion-gan-pipeline/GFPGAN/setup.cfg b/motion-gan-pipeline/GFPGAN/setup.cfg new file mode 100644 index 0000000..3d90d60 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/setup.cfg @@ -0,0 +1,33 @@ +[flake8] +ignore = + # line break before binary operator (W503) + W503, + # line break after binary operator (W504) + W504, +max-line-length=120 + +[yapf] +based_on_style = pep8 +column_limit = 120 +blank_line_before_nested_class_or_def = true +split_before_expression_after_opening_paren = true + +[isort] +line_length = 120 +multi_line_output = 0 +known_standard_library = pkg_resources,setuptools +known_first_party = gfpgan +known_third_party = basicsr,cv2,facexlib,numpy,pytest,torch,torchvision,tqdm,yaml +no_lines_before = STDLIB,LOCALFOLDER +default_section = THIRDPARTY + +[codespell] +skip = .git,./docs/build +count = +quiet-level = 3 + +[aliases] +test=pytest + +[tool:pytest] +addopts=tests/ diff --git a/motion-gan-pipeline/GFPGAN/setup.py b/motion-gan-pipeline/GFPGAN/setup.py new file mode 100644 index 0000000..474e918 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/setup.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python + +from setuptools import find_packages, setup + +import os +import subprocess +import time + +version_file = 'gfpgan/version.py' + + +def readme(): + with open('README.md', encoding='utf-8') as f: + content = f.read() + return content + + +def get_git_hash(): + + def _minimal_ext_cmd(cmd): + # construct minimal environment + env = {} + for k in ['SYSTEMROOT', 'PATH', 'HOME']: + v = os.environ.get(k) + if v is not None: + env[k] = v + # LANGUAGE is used on win32 + env['LANGUAGE'] = 'C' + env['LANG'] = 'C' + env['LC_ALL'] = 'C' + out = subprocess.Popen(cmd, stdout=subprocess.PIPE, env=env).communicate()[0] + return out + + try: + out = _minimal_ext_cmd(['git', 'rev-parse', 'HEAD']) + sha = out.strip().decode('ascii') + except OSError: + sha = 'unknown' + + return sha + + +def get_hash(): + if os.path.exists('.git'): + sha = get_git_hash()[:7] + else: + sha = 'unknown' + + return sha + + +def write_version_py(): + content = """# GENERATED VERSION FILE +# TIME: {} +__version__ = '{}' +__gitsha__ = '{}' +version_info = ({}) +""" + sha = get_hash() + with open('VERSION', 'r') as f: + SHORT_VERSION = f.read().strip() + VERSION_INFO = ', '.join([x if x.isdigit() else f'"{x}"' for x in SHORT_VERSION.split('.')]) + + version_file_str = content.format(time.asctime(), SHORT_VERSION, sha, VERSION_INFO) + with open(version_file, 'w') as f: + f.write(version_file_str) + + +def get_version(): + with open(version_file, 'r') as f: + exec(compile(f.read(), version_file, 'exec')) + return locals()['__version__'] + + +def get_requirements(filename='requirements.txt'): + here = os.path.dirname(os.path.realpath(__file__)) + with open(os.path.join(here, filename), 'r') as f: + requires = [line.replace('\n', '') for line in f.readlines()] + return requires + + +if __name__ == '__main__': + write_version_py() + setup( + name='gfpgan', + version=get_version(), + description='GFPGAN aims at developing Practical Algorithms for Real-world Face Restoration', + long_description=readme(), + long_description_content_type='text/markdown', + author='Xintao Wang', + author_email='xintao.wang@outlook.com', + keywords='computer vision, pytorch, image restoration, super-resolution, face restoration, gan, gfpgan', + url='https://github.com/TencentARC/GFPGAN', + include_package_data=True, + packages=find_packages(exclude=('options', 'datasets', 'experiments', 'results', 'tb_logger', 'wandb')), + classifiers=[ + 'Development Status :: 4 - Beta', + 'License :: OSI Approved :: Apache Software License', + 'Operating System :: OS Independent', + 'Programming Language :: Python :: 3', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + ], + license='Apache License Version 2.0', + setup_requires=['cython', 'numpy'], + install_requires=get_requirements(), + zip_safe=False) diff --git a/motion-gan-pipeline/GFPGAN/test_GPU.py b/motion-gan-pipeline/GFPGAN/test_GPU.py new file mode 100644 index 0000000..73c2464 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/test_GPU.py @@ -0,0 +1,3 @@ +import torch + +print(torch.cuda.is_available()) \ No newline at end of file diff --git a/motion-gan-pipeline/GFPGAN/tests/data/ffhq_gt.lmdb/data.mdb b/motion-gan-pipeline/GFPGAN/tests/data/ffhq_gt.lmdb/data.mdb new file mode 100644 index 0000000..823e0a9 Binary files /dev/null and b/motion-gan-pipeline/GFPGAN/tests/data/ffhq_gt.lmdb/data.mdb differ diff --git a/motion-gan-pipeline/GFPGAN/tests/data/ffhq_gt.lmdb/lock.mdb b/motion-gan-pipeline/GFPGAN/tests/data/ffhq_gt.lmdb/lock.mdb new file mode 100644 index 0000000..c53d2e5 Binary files /dev/null and b/motion-gan-pipeline/GFPGAN/tests/data/ffhq_gt.lmdb/lock.mdb differ diff --git a/motion-gan-pipeline/GFPGAN/tests/data/ffhq_gt.lmdb/meta_info.txt b/motion-gan-pipeline/GFPGAN/tests/data/ffhq_gt.lmdb/meta_info.txt new file mode 100644 index 0000000..8f18d95 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/tests/data/ffhq_gt.lmdb/meta_info.txt @@ -0,0 +1 @@ +00000000.png (512,512,3) 1 diff --git a/motion-gan-pipeline/GFPGAN/tests/data/gt/00000000.png b/motion-gan-pipeline/GFPGAN/tests/data/gt/00000000.png new file mode 100644 index 0000000..33425aa Binary files /dev/null and b/motion-gan-pipeline/GFPGAN/tests/data/gt/00000000.png differ diff --git a/motion-gan-pipeline/GFPGAN/tests/data/test_eye_mouth_landmarks.pth b/motion-gan-pipeline/GFPGAN/tests/data/test_eye_mouth_landmarks.pth new file mode 100644 index 0000000..a27f352 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/tests/data/test_eye_mouth_landmarks.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:131583fca2cc346652f8754eb3c5a0bdeda808686039ff10ead7a26254b72358 +size 943 diff --git a/motion-gan-pipeline/GFPGAN/tests/data/test_ffhq_degradation_dataset.yml b/motion-gan-pipeline/GFPGAN/tests/data/test_ffhq_degradation_dataset.yml new file mode 100644 index 0000000..df50c4b --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/tests/data/test_ffhq_degradation_dataset.yml @@ -0,0 +1,24 @@ +name: UnitTest +type: FFHQDegradationDataset +dataroot_gt: tests/data/gt +io_backend: + type: disk + +use_hflip: true +mean: [0.5, 0.5, 0.5] +std: [0.5, 0.5, 0.5] +out_size: 512 + +blur_kernel_size: 41 +kernel_list: ['iso', 'aniso'] +kernel_prob: [0.5, 0.5] +blur_sigma: [0.1, 10] +downsample_range: [0.8, 8] +noise_range: [0, 20] +jpeg_range: [60, 100] + +# color jitter and gray +color_jitter_prob: 1 +color_jitter_shift: 20 +color_jitter_pt_prob: 1 +gray_prob: 1 diff --git a/motion-gan-pipeline/GFPGAN/tests/data/test_gfpgan_model.yml b/motion-gan-pipeline/GFPGAN/tests/data/test_gfpgan_model.yml new file mode 100644 index 0000000..bac650e --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/tests/data/test_gfpgan_model.yml @@ -0,0 +1,140 @@ +num_gpu: 1 +manual_seed: 0 +is_train: True +dist: False + +# network structures +network_g: + type: GFPGANv1 + out_size: 512 + num_style_feat: 512 + channel_multiplier: 1 + resample_kernel: [1, 3, 3, 1] + decoder_load_path: ~ + fix_decoder: true + num_mlp: 8 + lr_mlp: 0.01 + input_is_latent: true + different_w: true + narrow: 0.5 + sft_half: true + +network_d: + type: StyleGAN2Discriminator + out_size: 512 + channel_multiplier: 1 + resample_kernel: [1, 3, 3, 1] + +network_d_left_eye: + type: FacialComponentDiscriminator + +network_d_right_eye: + type: FacialComponentDiscriminator + +network_d_mouth: + type: FacialComponentDiscriminator + +network_identity: + type: ResNetArcFace + block: IRBlock + layers: [2, 2, 2, 2] + use_se: False + +# path +path: + pretrain_network_g: ~ + param_key_g: params_ema + strict_load_g: ~ + pretrain_network_d: ~ + pretrain_network_d_left_eye: ~ + pretrain_network_d_right_eye: ~ + pretrain_network_d_mouth: ~ + pretrain_network_identity: ~ + # resume + resume_state: ~ + ignore_resume_networks: ['network_identity'] + +# training settings +train: + optim_g: + type: Adam + lr: !!float 2e-3 + optim_d: + type: Adam + lr: !!float 2e-3 + optim_component: + type: Adam + lr: !!float 2e-3 + + scheduler: + type: MultiStepLR + milestones: [600000, 700000] + gamma: 0.5 + + total_iter: 800000 + warmup_iter: -1 # no warm up + + # losses + # pixel loss + pixel_opt: + type: L1Loss + loss_weight: !!float 1e-1 + reduction: mean + # L1 loss used in pyramid loss, component style loss and identity loss + L1_opt: + type: L1Loss + loss_weight: 1 + reduction: mean + + # image pyramid loss + pyramid_loss_weight: 1 + remove_pyramid_loss: 50000 + # perceptual loss (content and style losses) + perceptual_opt: + type: PerceptualLoss + layer_weights: + # before relu + 'conv1_2': 0.1 + 'conv2_2': 0.1 + 'conv3_4': 1 + 'conv4_4': 1 + 'conv5_4': 1 + vgg_type: vgg19 + use_input_norm: true + perceptual_weight: !!float 1 + style_weight: 50 + range_norm: true + criterion: l1 + # gan loss + gan_opt: + type: GANLoss + gan_type: wgan_softplus + loss_weight: !!float 1e-1 + # r1 regularization for discriminator + r1_reg_weight: 10 + # facial component loss + gan_component_opt: + type: GANLoss + gan_type: vanilla + real_label_val: 1.0 + fake_label_val: 0.0 + loss_weight: !!float 1 + comp_style_weight: 200 + # identity loss + identity_weight: 10 + + net_d_iters: 1 + net_d_init_iters: 0 + net_d_reg_every: 1 + +# validation settings +val: + val_freq: !!float 5e3 + save_img: True + use_pbar: True + + metrics: + psnr: # metric name + type: calculate_psnr + crop_border: 0 + test_y_channel: false diff --git a/motion-gan-pipeline/GFPGAN/tests/test_arcface_arch.py b/motion-gan-pipeline/GFPGAN/tests/test_arcface_arch.py new file mode 100644 index 0000000..b4b28d3 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/tests/test_arcface_arch.py @@ -0,0 +1,49 @@ +import torch + +from gfpgan.archs.arcface_arch import BasicBlock, Bottleneck, ResNetArcFace + + +def test_resnetarcface(): + """Test arch: ResNetArcFace.""" + + # model init and forward (gpu) + if torch.cuda.is_available(): + net = ResNetArcFace(block='IRBlock', layers=(2, 2, 2, 2), use_se=True).cuda().eval() + img = torch.rand((1, 1, 128, 128), dtype=torch.float32).cuda() + output = net(img) + assert output.shape == (1, 512) + + # -------------------- without SE block ----------------------- # + net = ResNetArcFace(block='IRBlock', layers=(2, 2, 2, 2), use_se=False).cuda().eval() + output = net(img) + assert output.shape == (1, 512) + + +def test_basicblock(): + """Test the BasicBlock in arcface_arch""" + block = BasicBlock(1, 3, stride=1, downsample=None).cuda() + img = torch.rand((1, 1, 12, 12), dtype=torch.float32).cuda() + output = block(img) + assert output.shape == (1, 3, 12, 12) + + # ----------------- use the downsmaple module--------------- # + downsample = torch.nn.UpsamplingNearest2d(scale_factor=0.5).cuda() + block = BasicBlock(1, 3, stride=2, downsample=downsample).cuda() + img = torch.rand((1, 1, 12, 12), dtype=torch.float32).cuda() + output = block(img) + assert output.shape == (1, 3, 6, 6) + + +def test_bottleneck(): + """Test the Bottleneck in arcface_arch""" + block = Bottleneck(1, 1, stride=1, downsample=None).cuda() + img = torch.rand((1, 1, 12, 12), dtype=torch.float32).cuda() + output = block(img) + assert output.shape == (1, 4, 12, 12) + + # ----------------- use the downsmaple module--------------- # + downsample = torch.nn.UpsamplingNearest2d(scale_factor=0.5).cuda() + block = Bottleneck(1, 1, stride=2, downsample=downsample).cuda() + img = torch.rand((1, 1, 12, 12), dtype=torch.float32).cuda() + output = block(img) + assert output.shape == (1, 4, 6, 6) diff --git a/motion-gan-pipeline/GFPGAN/tests/test_ffhq_degradation_dataset.py b/motion-gan-pipeline/GFPGAN/tests/test_ffhq_degradation_dataset.py new file mode 100644 index 0000000..fa56c03 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/tests/test_ffhq_degradation_dataset.py @@ -0,0 +1,96 @@ +import pytest +import yaml + +from gfpgan.data.ffhq_degradation_dataset import FFHQDegradationDataset + + +def test_ffhq_degradation_dataset(): + + with open('tests/data/test_ffhq_degradation_dataset.yml', mode='r') as f: + opt = yaml.load(f, Loader=yaml.FullLoader) + + dataset = FFHQDegradationDataset(opt) + assert dataset.io_backend_opt['type'] == 'disk' # io backend + assert len(dataset) == 1 # whether to read correct meta info + assert dataset.kernel_list == ['iso', 'aniso'] # correct initialization the degradation configurations + assert dataset.color_jitter_prob == 1 + + # test __getitem__ + result = dataset.__getitem__(0) + # check returned keys + expected_keys = ['gt', 'lq', 'gt_path'] + assert set(expected_keys).issubset(set(result.keys())) + # check shape and contents + assert result['gt'].shape == (3, 512, 512) + assert result['lq'].shape == (3, 512, 512) + assert result['gt_path'] == 'tests/data/gt/00000000.png' + + # ------------------ test with probability = 0 -------------------- # + opt['color_jitter_prob'] = 0 + opt['color_jitter_pt_prob'] = 0 + opt['gray_prob'] = 0 + opt['io_backend'] = dict(type='disk') + dataset = FFHQDegradationDataset(opt) + assert dataset.io_backend_opt['type'] == 'disk' # io backend + assert len(dataset) == 1 # whether to read correct meta info + assert dataset.kernel_list == ['iso', 'aniso'] # correct initialization the degradation configurations + assert dataset.color_jitter_prob == 0 + + # test __getitem__ + result = dataset.__getitem__(0) + # check returned keys + expected_keys = ['gt', 'lq', 'gt_path'] + assert set(expected_keys).issubset(set(result.keys())) + # check shape and contents + assert result['gt'].shape == (3, 512, 512) + assert result['lq'].shape == (3, 512, 512) + assert result['gt_path'] == 'tests/data/gt/00000000.png' + + # ------------------ test lmdb backend -------------------- # + opt['dataroot_gt'] = 'tests/data/ffhq_gt.lmdb' + opt['io_backend'] = dict(type='lmdb') + + dataset = FFHQDegradationDataset(opt) + assert dataset.io_backend_opt['type'] == 'lmdb' # io backend + assert len(dataset) == 1 # whether to read correct meta info + assert dataset.kernel_list == ['iso', 'aniso'] # correct initialization the degradation configurations + assert dataset.color_jitter_prob == 0 + + # test __getitem__ + result = dataset.__getitem__(0) + # check returned keys + expected_keys = ['gt', 'lq', 'gt_path'] + assert set(expected_keys).issubset(set(result.keys())) + # check shape and contents + assert result['gt'].shape == (3, 512, 512) + assert result['lq'].shape == (3, 512, 512) + assert result['gt_path'] == '00000000' + + # ------------------ test with crop_components -------------------- # + opt['crop_components'] = True + opt['component_path'] = 'tests/data/test_eye_mouth_landmarks.pth' + opt['eye_enlarge_ratio'] = 1.4 + opt['gt_gray'] = True + opt['io_backend'] = dict(type='lmdb') + + dataset = FFHQDegradationDataset(opt) + assert dataset.crop_components is True + + # test __getitem__ + result = dataset.__getitem__(0) + # check returned keys + expected_keys = ['gt', 'lq', 'gt_path', 'loc_left_eye', 'loc_right_eye', 'loc_mouth'] + assert set(expected_keys).issubset(set(result.keys())) + # check shape and contents + assert result['gt'].shape == (3, 512, 512) + assert result['lq'].shape == (3, 512, 512) + assert result['gt_path'] == '00000000' + assert result['loc_left_eye'].shape == (4, ) + assert result['loc_right_eye'].shape == (4, ) + assert result['loc_mouth'].shape == (4, ) + + # ------------------ lmdb backend should have paths ends with lmdb -------------------- # + with pytest.raises(ValueError): + opt['dataroot_gt'] = 'tests/data/gt' + opt['io_backend'] = dict(type='lmdb') + dataset = FFHQDegradationDataset(opt) diff --git a/motion-gan-pipeline/GFPGAN/tests/test_gfpgan_arch.py b/motion-gan-pipeline/GFPGAN/tests/test_gfpgan_arch.py new file mode 100644 index 0000000..cef14a4 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/tests/test_gfpgan_arch.py @@ -0,0 +1,203 @@ +import torch + +from gfpgan.archs.gfpganv1_arch import FacialComponentDiscriminator, GFPGANv1, StyleGAN2GeneratorSFT +from gfpgan.archs.gfpganv1_clean_arch import GFPGANv1Clean, StyleGAN2GeneratorCSFT + + +def test_stylegan2generatorsft(): + """Test arch: StyleGAN2GeneratorSFT.""" + + # model init and forward (gpu) + if torch.cuda.is_available(): + net = StyleGAN2GeneratorSFT( + out_size=32, + num_style_feat=512, + num_mlp=8, + channel_multiplier=1, + resample_kernel=(1, 3, 3, 1), + lr_mlp=0.01, + narrow=1, + sft_half=False).cuda().eval() + style = torch.rand((1, 512), dtype=torch.float32).cuda() + condition1 = torch.rand((1, 512, 8, 8), dtype=torch.float32).cuda() + condition2 = torch.rand((1, 512, 16, 16), dtype=torch.float32).cuda() + condition3 = torch.rand((1, 512, 32, 32), dtype=torch.float32).cuda() + conditions = [condition1, condition1, condition2, condition2, condition3, condition3] + output = net([style], conditions) + assert output[0].shape == (1, 3, 32, 32) + assert output[1] is None + + # -------------------- with return_latents ----------------------- # + output = net([style], conditions, return_latents=True) + assert output[0].shape == (1, 3, 32, 32) + assert len(output[1]) == 1 + # check latent + assert output[1][0].shape == (8, 512) + + # -------------------- with randomize_noise = False ----------------------- # + output = net([style], conditions, randomize_noise=False) + assert output[0].shape == (1, 3, 32, 32) + assert output[1] is None + + # -------------------- with truncation = 0.5 and mixing----------------------- # + output = net([style, style], conditions, truncation=0.5, truncation_latent=style) + assert output[0].shape == (1, 3, 32, 32) + assert output[1] is None + + +def test_gfpganv1(): + """Test arch: GFPGANv1.""" + + # model init and forward (gpu) + if torch.cuda.is_available(): + net = GFPGANv1( + out_size=32, + num_style_feat=512, + channel_multiplier=1, + resample_kernel=(1, 3, 3, 1), + decoder_load_path=None, + fix_decoder=True, + # for stylegan decoder + num_mlp=8, + lr_mlp=0.01, + input_is_latent=False, + different_w=False, + narrow=1, + sft_half=True).cuda().eval() + img = torch.rand((1, 3, 32, 32), dtype=torch.float32).cuda() + output = net(img) + assert output[0].shape == (1, 3, 32, 32) + assert len(output[1]) == 3 + # check out_rgbs for intermediate loss + assert output[1][0].shape == (1, 3, 8, 8) + assert output[1][1].shape == (1, 3, 16, 16) + assert output[1][2].shape == (1, 3, 32, 32) + + # -------------------- with different_w = True ----------------------- # + net = GFPGANv1( + out_size=32, + num_style_feat=512, + channel_multiplier=1, + resample_kernel=(1, 3, 3, 1), + decoder_load_path=None, + fix_decoder=True, + # for stylegan decoder + num_mlp=8, + lr_mlp=0.01, + input_is_latent=False, + different_w=True, + narrow=1, + sft_half=True).cuda().eval() + img = torch.rand((1, 3, 32, 32), dtype=torch.float32).cuda() + output = net(img) + assert output[0].shape == (1, 3, 32, 32) + assert len(output[1]) == 3 + # check out_rgbs for intermediate loss + assert output[1][0].shape == (1, 3, 8, 8) + assert output[1][1].shape == (1, 3, 16, 16) + assert output[1][2].shape == (1, 3, 32, 32) + + +def test_facialcomponentdiscriminator(): + """Test arch: FacialComponentDiscriminator.""" + + # model init and forward (gpu) + if torch.cuda.is_available(): + net = FacialComponentDiscriminator().cuda().eval() + img = torch.rand((1, 3, 32, 32), dtype=torch.float32).cuda() + output = net(img) + assert len(output) == 2 + assert output[0].shape == (1, 1, 8, 8) + assert output[1] is None + + # -------------------- return intermediate features ----------------------- # + output = net(img, return_feats=True) + assert len(output) == 2 + assert output[0].shape == (1, 1, 8, 8) + assert len(output[1]) == 2 + assert output[1][0].shape == (1, 128, 16, 16) + assert output[1][1].shape == (1, 256, 8, 8) + + +def test_stylegan2generatorcsft(): + """Test arch: StyleGAN2GeneratorCSFT.""" + + # model init and forward (gpu) + if torch.cuda.is_available(): + net = StyleGAN2GeneratorCSFT( + out_size=32, num_style_feat=512, num_mlp=8, channel_multiplier=1, narrow=1, sft_half=False).cuda().eval() + style = torch.rand((1, 512), dtype=torch.float32).cuda() + condition1 = torch.rand((1, 512, 8, 8), dtype=torch.float32).cuda() + condition2 = torch.rand((1, 512, 16, 16), dtype=torch.float32).cuda() + condition3 = torch.rand((1, 512, 32, 32), dtype=torch.float32).cuda() + conditions = [condition1, condition1, condition2, condition2, condition3, condition3] + output = net([style], conditions) + assert output[0].shape == (1, 3, 32, 32) + assert output[1] is None + + # -------------------- with return_latents ----------------------- # + output = net([style], conditions, return_latents=True) + assert output[0].shape == (1, 3, 32, 32) + assert len(output[1]) == 1 + # check latent + assert output[1][0].shape == (8, 512) + + # -------------------- with randomize_noise = False ----------------------- # + output = net([style], conditions, randomize_noise=False) + assert output[0].shape == (1, 3, 32, 32) + assert output[1] is None + + # -------------------- with truncation = 0.5 and mixing----------------------- # + output = net([style, style], conditions, truncation=0.5, truncation_latent=style) + assert output[0].shape == (1, 3, 32, 32) + assert output[1] is None + + +def test_gfpganv1clean(): + """Test arch: GFPGANv1Clean.""" + + # model init and forward (gpu) + if torch.cuda.is_available(): + net = GFPGANv1Clean( + out_size=32, + num_style_feat=512, + channel_multiplier=1, + decoder_load_path=None, + fix_decoder=True, + # for stylegan decoder + num_mlp=8, + input_is_latent=False, + different_w=False, + narrow=1, + sft_half=True).cuda().eval() + + img = torch.rand((1, 3, 32, 32), dtype=torch.float32).cuda() + output = net(img) + assert output[0].shape == (1, 3, 32, 32) + assert len(output[1]) == 3 + # check out_rgbs for intermediate loss + assert output[1][0].shape == (1, 3, 8, 8) + assert output[1][1].shape == (1, 3, 16, 16) + assert output[1][2].shape == (1, 3, 32, 32) + + # -------------------- with different_w = True ----------------------- # + net = GFPGANv1Clean( + out_size=32, + num_style_feat=512, + channel_multiplier=1, + decoder_load_path=None, + fix_decoder=True, + # for stylegan decoder + num_mlp=8, + input_is_latent=False, + different_w=True, + narrow=1, + sft_half=True).cuda().eval() + img = torch.rand((1, 3, 32, 32), dtype=torch.float32).cuda() + output = net(img) + assert output[0].shape == (1, 3, 32, 32) + assert len(output[1]) == 3 + # check out_rgbs for intermediate loss + assert output[1][0].shape == (1, 3, 8, 8) + assert output[1][1].shape == (1, 3, 16, 16) + assert output[1][2].shape == (1, 3, 32, 32) diff --git a/motion-gan-pipeline/GFPGAN/tests/test_gfpgan_model.py b/motion-gan-pipeline/GFPGAN/tests/test_gfpgan_model.py new file mode 100644 index 0000000..1408ddd --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/tests/test_gfpgan_model.py @@ -0,0 +1,132 @@ +import tempfile +import torch +import yaml +from basicsr.archs.stylegan2_arch import StyleGAN2Discriminator +from basicsr.data.paired_image_dataset import PairedImageDataset +from basicsr.losses.losses import GANLoss, L1Loss, PerceptualLoss + +from gfpgan.archs.arcface_arch import ResNetArcFace +from gfpgan.archs.gfpganv1_arch import FacialComponentDiscriminator, GFPGANv1 +from gfpgan.models.gfpgan_model import GFPGANModel + + +def test_gfpgan_model(): + with open('tests/data/test_gfpgan_model.yml', mode='r') as f: + opt = yaml.load(f, Loader=yaml.FullLoader) + + # build model + model = GFPGANModel(opt) + # test attributes + assert model.__class__.__name__ == 'GFPGANModel' + assert isinstance(model.net_g, GFPGANv1) # generator + assert isinstance(model.net_d, StyleGAN2Discriminator) # discriminator + # facial component discriminators + assert isinstance(model.net_d_left_eye, FacialComponentDiscriminator) + assert isinstance(model.net_d_right_eye, FacialComponentDiscriminator) + assert isinstance(model.net_d_mouth, FacialComponentDiscriminator) + # identity network + assert isinstance(model.network_identity, ResNetArcFace) + # losses + assert isinstance(model.cri_pix, L1Loss) + assert isinstance(model.cri_perceptual, PerceptualLoss) + assert isinstance(model.cri_gan, GANLoss) + assert isinstance(model.cri_l1, L1Loss) + # optimizer + assert isinstance(model.optimizers[0], torch.optim.Adam) + assert isinstance(model.optimizers[1], torch.optim.Adam) + + # prepare data + gt = torch.rand((1, 3, 512, 512), dtype=torch.float32) + lq = torch.rand((1, 3, 512, 512), dtype=torch.float32) + loc_left_eye = torch.rand((1, 4), dtype=torch.float32) + loc_right_eye = torch.rand((1, 4), dtype=torch.float32) + loc_mouth = torch.rand((1, 4), dtype=torch.float32) + data = dict(gt=gt, lq=lq, loc_left_eye=loc_left_eye, loc_right_eye=loc_right_eye, loc_mouth=loc_mouth) + model.feed_data(data) + # check data shape + assert model.lq.shape == (1, 3, 512, 512) + assert model.gt.shape == (1, 3, 512, 512) + assert model.loc_left_eyes.shape == (1, 4) + assert model.loc_right_eyes.shape == (1, 4) + assert model.loc_mouths.shape == (1, 4) + + # ----------------- test optimize_parameters -------------------- # + model.feed_data(data) + model.optimize_parameters(1) + assert model.output.shape == (1, 3, 512, 512) + assert isinstance(model.log_dict, dict) + # check returned keys + expected_keys = [ + 'l_g_pix', 'l_g_percep', 'l_g_style', 'l_g_gan', 'l_g_gan_left_eye', 'l_g_gan_right_eye', 'l_g_gan_mouth', + 'l_g_comp_style_loss', 'l_identity', 'l_d', 'real_score', 'fake_score', 'l_d_r1', 'l_d_left_eye', + 'l_d_right_eye', 'l_d_mouth' + ] + assert set(expected_keys).issubset(set(model.log_dict.keys())) + + # ----------------- remove pyramid_loss_weight-------------------- # + model.feed_data(data) + model.optimize_parameters(100000) # large than remove_pyramid_loss = 50000 + assert model.output.shape == (1, 3, 512, 512) + assert isinstance(model.log_dict, dict) + # check returned keys + expected_keys = [ + 'l_g_pix', 'l_g_percep', 'l_g_style', 'l_g_gan', 'l_g_gan_left_eye', 'l_g_gan_right_eye', 'l_g_gan_mouth', + 'l_g_comp_style_loss', 'l_identity', 'l_d', 'real_score', 'fake_score', 'l_d_r1', 'l_d_left_eye', + 'l_d_right_eye', 'l_d_mouth' + ] + assert set(expected_keys).issubset(set(model.log_dict.keys())) + + # ----------------- test save -------------------- # + with tempfile.TemporaryDirectory() as tmpdir: + model.opt['path']['models'] = tmpdir + model.opt['path']['training_states'] = tmpdir + model.save(0, 1) + + # ----------------- test the test function -------------------- # + model.test() + assert model.output.shape == (1, 3, 512, 512) + # delete net_g_ema + model.__delattr__('net_g_ema') + model.test() + assert model.output.shape == (1, 3, 512, 512) + assert model.net_g.training is True # should back to training mode after testing + + # ----------------- test nondist_validation -------------------- # + # construct dataloader + dataset_opt = dict( + name='Demo', + dataroot_gt='tests/data/gt', + dataroot_lq='tests/data/gt', + io_backend=dict(type='disk'), + scale=4, + phase='val') + dataset = PairedImageDataset(dataset_opt) + dataloader = torch.utils.data.DataLoader(dataset=dataset, batch_size=1, shuffle=False, num_workers=0) + assert model.is_train is True + with tempfile.TemporaryDirectory() as tmpdir: + model.opt['path']['visualization'] = tmpdir + model.nondist_validation(dataloader, 1, None, save_img=True) + assert model.is_train is True + # check metric_results + assert 'psnr' in model.metric_results + assert isinstance(model.metric_results['psnr'], float) + + # validation + with tempfile.TemporaryDirectory() as tmpdir: + model.opt['is_train'] = False + model.opt['val']['suffix'] = 'test' + model.opt['path']['visualization'] = tmpdir + model.opt['val']['pbar'] = True + model.nondist_validation(dataloader, 1, None, save_img=True) + # check metric_results + assert 'psnr' in model.metric_results + assert isinstance(model.metric_results['psnr'], float) + + # if opt['val']['suffix'] is None + model.opt['val']['suffix'] = None + model.opt['name'] = 'demo' + model.opt['path']['visualization'] = tmpdir + model.nondist_validation(dataloader, 1, None, save_img=True) + # check metric_results + assert 'psnr' in model.metric_results + assert isinstance(model.metric_results['psnr'], float) diff --git a/motion-gan-pipeline/GFPGAN/tests/test_stylegan2_clean_arch.py b/motion-gan-pipeline/GFPGAN/tests/test_stylegan2_clean_arch.py new file mode 100644 index 0000000..78bb920 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/tests/test_stylegan2_clean_arch.py @@ -0,0 +1,52 @@ +import torch + +from gfpgan.archs.stylegan2_clean_arch import StyleGAN2GeneratorClean + + +def test_stylegan2generatorclean(): + """Test arch: StyleGAN2GeneratorClean.""" + + # model init and forward (gpu) + if torch.cuda.is_available(): + net = StyleGAN2GeneratorClean( + out_size=32, num_style_feat=512, num_mlp=8, channel_multiplier=1, narrow=0.5).cuda().eval() + style = torch.rand((1, 512), dtype=torch.float32).cuda() + output = net([style], input_is_latent=False) + assert output[0].shape == (1, 3, 32, 32) + assert output[1] is None + + # -------------------- with return_latents ----------------------- # + output = net([style], input_is_latent=True, return_latents=True) + assert output[0].shape == (1, 3, 32, 32) + assert len(output[1]) == 1 + # check latent + assert output[1][0].shape == (8, 512) + + # -------------------- with randomize_noise = False ----------------------- # + output = net([style], randomize_noise=False) + assert output[0].shape == (1, 3, 32, 32) + assert output[1] is None + + # -------------------- with truncation = 0.5 and mixing----------------------- # + output = net([style, style], truncation=0.5, truncation_latent=style) + assert output[0].shape == (1, 3, 32, 32) + assert output[1] is None + + # ------------------ test make_noise ----------------------- # + out = net.make_noise() + assert len(out) == 7 + assert out[0].shape == (1, 1, 4, 4) + assert out[1].shape == (1, 1, 8, 8) + assert out[2].shape == (1, 1, 8, 8) + assert out[3].shape == (1, 1, 16, 16) + assert out[4].shape == (1, 1, 16, 16) + assert out[5].shape == (1, 1, 32, 32) + assert out[6].shape == (1, 1, 32, 32) + + # ------------------ test get_latent ----------------------- # + out = net.get_latent(style) + assert out.shape == (1, 512) + + # ------------------ test mean_latent ----------------------- # + out = net.mean_latent(2) + assert out.shape == (1, 512) diff --git a/motion-gan-pipeline/GFPGAN/tests/test_utils.py b/motion-gan-pipeline/GFPGAN/tests/test_utils.py new file mode 100644 index 0000000..a963b32 --- /dev/null +++ b/motion-gan-pipeline/GFPGAN/tests/test_utils.py @@ -0,0 +1,43 @@ +import cv2 +from facexlib.utils.face_restoration_helper import FaceRestoreHelper + +from gfpgan.archs.gfpganv1_arch import GFPGANv1 +from gfpgan.archs.gfpganv1_clean_arch import GFPGANv1Clean +from gfpgan.utils import GFPGANer + + +def test_gfpganer(): + # initialize with the clean model + restorer = GFPGANer( + model_path='experiments/pretrained_models/GFPGANCleanv1-NoCE-C2.pth', + upscale=2, + arch='clean', + channel_multiplier=2, + bg_upsampler=None) + # test attribute + assert isinstance(restorer.gfpgan, GFPGANv1Clean) + assert isinstance(restorer.face_helper, FaceRestoreHelper) + + # initialize with the original model + restorer = GFPGANer( + model_path='experiments/pretrained_models/GFPGANv1.pth', + upscale=2, + arch='original', + channel_multiplier=1, + bg_upsampler=None) + # test attribute + assert isinstance(restorer.gfpgan, GFPGANv1) + assert isinstance(restorer.face_helper, FaceRestoreHelper) + + # ------------------ test enhance ---------------- # + img = cv2.imread('tests/data/gt/00000000.png', cv2.IMREAD_COLOR) + result = restorer.enhance(img, has_aligned=False, paste_back=True) + assert result[0][0].shape == (512, 512, 3) + assert result[1][0].shape == (512, 512, 3) + assert result[2].shape == (1024, 1024, 3) + + # with has_aligned=True + result = restorer.enhance(img, has_aligned=True, paste_back=False) + assert result[0][0].shape == (512, 512, 3) + assert result[1][0].shape == (512, 512, 3) + assert result[2] is None diff --git a/motion-gan-pipeline/ImageToImage/README.md b/motion-gan-pipeline/ImageToImage/README.md new file mode 100644 index 0000000..52744f5 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/README.md @@ -0,0 +1,109 @@ +# Image2Image Generation + +This repository contains and implementation of an Image-to-Image translation network used in the Audio-driven Video Synthesis project. \ + +## Requirements + +Use the package manager [pip](https://pip.pypa.io/en/stable/) to install the requirements. + +```bash +pip install -r requirements.txt +``` + +## W&B monitoring + +All training monitoring can be done on Weights & Biases . \ +You can follow here a detailed guide for [W&B](https://docs.wandb.ai/?_gl=1*1r4dwbe*_ga*OTk1MTkyMDI1LjE2NjQ4MDkyNDY.*_ga_JH1SJHJQXJ*MTY2NDg4MzM1Mi4yLjEuMTY2NDg4MzM1Ny41NS4wLjA.) to set up your own tracking environment. + +To run our code you should follow these simple instructions: +1. Sign up for a free account at https://wandb.ai/site and then login to your wandb account. + +2. Install the CLI and Python library for interacting with the Weights and Biases API: + ```bash + pip install wandb + ``` + +3. Log in your wandb account: + ```bash + wandb login + ``` +4. You can now run the code and monitor the experiments on your W&B account. + + +## Losses + +We provide training scripts to experiment with a variety of different training losses: +- L1 loss +- Perceptual VGG loss +- GAN loss +- Optical Flow warping loss for time consistency +- PatchGAN3D Video Discriminator loss for time consistency + +All losses to be used can be chosen in the config file. \ +During our experiment we found the best combination of loss functions is a mixture of: L1 + VGG Perceptual + PatchGAN3D. + +## Train +We provide 3 different scripts for network training: + +1. GAN U-Net: \ +The first proposed model is a classical Image to Image network. You can regulate training parameters such as losses used or losses weights by making your own config in the config folder. \ +You can run the training using the following command: \ + ```bash + python train.py -c $CONFIGPATH + --checkpoints_dir $CHECKPOINTPATH + --input_train_root_dir $INPUTPATH + --output_train_root_dir $OUTPUTPATH + --height $HEIGHT + --width $WIDTH + ``` + Where: + - $CONFIGPATH: path to config file (config.train.yml is given). + - $CHECKPOINTPATH: path to checkpoints directory. + - $INPUTPATH: path to input images folder. + - $OUTPUTPATH: path to target images folder. + - $HEIGHT: height of output. + - $WIDTH: width of output. +

+ +2. GAN + Optical Flow \ +In addition to the first model, you can train the Image-to-Image network usign optical flow warping loss. \ +You can run the training using the following command: + ```bash + python train_optical.py -c $CONFIGPATH + --checkpoints_dir $CHECKPOINTPATH + --input_train_root_dir $INPUTPATH + --output_train_root_dir $OUTPUTPATH + --height $HEIGHT + --width $WIDTH + ``` +

+ +3. GAN + PatchGAN3D \ +Our final model can be trained using a PatchGAN3D video discriminator. This model is shown to be the best performant in terms of time consistency. \ +You can run the training using the following command: + ```bash + python train_temporal.py -c $CONFIGPATH + --checkpoints_dir $CHECKPOINTPATH + --input_train_root_dir $INPUTPATH + --output_train_root_dir $OUTPUTPATH + --height $HEIGHT + --width $WIDTH + ``` + +## Inference +You can run inferency by running the following command: \ +```bash +python generate_images.py --dataroot $DATAROOT + --video_name $NAME_VIDEO + --input_test_root_dir $INPUTFOLDER + --out_dir $OUTPUTFOLDER + --checkpoint_dir $CHECKPOINTDIR +``` +Where: +- $INPUTDATAPATH: path to the data directory. +- $NAME_VIDEO: name of the target video. +- $INPUTFOLDER: path to input images for the network. +- $OUTPUTFOLDER: path to output folder. +- $CHECKPOINTDIR: path to the existing checkpoint folder. + +If no checkoints are found this script will autimatically start training the GAN + PatchGAN3D model. \ No newline at end of file diff --git a/motion-gan-pipeline/ImageToImage/config/sweep_train.yml b/motion-gan-pipeline/ImageToImage/config/sweep_train.yml new file mode 100644 index 0000000..a72fdb6 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/config/sweep_train.yml @@ -0,0 +1,22 @@ +program: train.py +method: random +metric: + goal: minimize + name: metrics_combined +command: + - ${env} + - ${interpreter} + - ${program} + - "-c" + - "config/train.yml" + - ${args} + +parameters: + lr: + values: [0.000001, 0.00001, 0.0001, 0.001] + lr_disc: + values: [0.000001, 0.00001, 0.0001, 0.001] + batch_size: + values: [1, 2, 4, 8] + optimizer: + values: ['adam', 'sgd'] \ No newline at end of file diff --git a/motion-gan-pipeline/ImageToImage/config/sweep_train_temporal.yml b/motion-gan-pipeline/ImageToImage/config/sweep_train_temporal.yml new file mode 100644 index 0000000..fcb2fad --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/config/sweep_train_temporal.yml @@ -0,0 +1,42 @@ +project: video-synth/ImageToImage +entity: apennino +program: train_temporal.py +method: grid +metric: + goal: minimize + name: metrics_combined +command: + - ${env} + - ${interpreter} + - ${program} + - "-c" + - "config/train_temporal.yml" + - "--checkpoints_dir" + - "/mnt/full_pipeline/checkpoints/Alberto" + - "--input_train_root_dir" + - "/mnt/full_pipeline/input_data/video/Alberto/edges" + - "--output_train_root_dir" + - "/mnt/full_pipeline/input_data/video/Alberto/cropped" + - "--input_val_root_dir" + - "/mnt/full_pipeline/input_data/video/Alberto/edges_val" + - "--output_val_root_dir" + - "/mnt/full_pipeline/input_data/video/Alberto/cropped_val" + - "--height" + - "720" + - "--width" + - "405" + - ${args} + +parameters: + steps_train_video_discr: + values: [1, 2, 5] + + loss_gan_video_weight: + values: [0.1, 1, 10] + + lr: + values: [0.001, 0.01] + + lr_decay_steps: + values: [20, 30] + \ No newline at end of file diff --git a/motion-gan-pipeline/ImageToImage/config/test.yml b/motion-gan-pipeline/ImageToImage/config/test.yml new file mode 100644 index 0000000..e3af695 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/config/test.yml @@ -0,0 +1,23 @@ +# mode +mode: test # val or test + +# image network input size +width: 320 +height: 320 +num_input_channels: 1 + +# dataloader parameters +num_workers: 1 +batch_size: 1 + +# metric functions +metric_names: [mean_absolute_error, mean_squared_error, structual_similarity_index_measure, peak_signal_to_noise_ratio, frechet_inception_distance] +metric_weights: [1.0, 0.0, 0.0, 0.0, 0.0] + +# data directories and dataset type +input_test_root_dir: /input_images_test +input_val_root_dir: /input_images_test +output_val_root_dir: /output_images_test +label_val_root_dir: /labels_test +dataset_type: CustomDataset + diff --git a/motion-gan-pipeline/ImageToImage/config/train.yml b/motion-gan-pipeline/ImageToImage/config/train.yml new file mode 100644 index 0000000..8d83f6e --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/config/train.yml @@ -0,0 +1,52 @@ + +# image network input size +width: 320 +height: 320 + +# model parameters +use_discriminator: false #was true +# use_video_discriminator: true +lr_disc: 0.0001 + +# training parameters +num_epochs: 50 +lr: 0.001 +lr_decay_steps: 20 +optimizer: adam +num_steps: 100000 + +# network parameters +num_input_channels: 1 + +# dataloader parameters +num_workers: 16 +batch_size: 2 + +# loss functions +loss_names: [l1_perceptual_loss, perceptual_loss_vgg] +loss_weights: [100.0, 10.0] + +# # loss +# loss_names: [l1_perceptual_loss] +# loss_weights: [100.0] +# loss_names: [perceptual_loss_vgg] +# loss_weights: [1.0] + +# metric functions +metric_names: [mean_absolute_error, mean_squared_error, structual_similarity_index_measure, peak_signal_to_noise_ratio, frechet_inception_distance] +metric_weights: [1.0, 0.0, 0.0, 0.0, 0.0] + +# data directories and dataset type +input_train_root_dir: /input_images_train +output_train_root_dir: /output_images_train +label_train_root_dir: /labels_train +input_val_root_dir: /input_images_test +output_val_root_dir: /output_images_test +label_val_root_dir: /labels_test +dataset_type: CustomDataset + +# saving training progress +checkpoints_dir: ./checkpoints/ +save_every: 10 +log_every: 200 +save_val_images: true diff --git a/motion-gan-pipeline/ImageToImage/config/train_optical.yml b/motion-gan-pipeline/ImageToImage/config/train_optical.yml new file mode 100644 index 0000000..423a8e4 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/config/train_optical.yml @@ -0,0 +1,41 @@ + +# image network input size +width: 320 +height: 320 + +# model parameters +use_discriminator: false #was true +# use_video_discriminator: true +lr_disc: 0.0001 + +# training parameters +num_epochs: 50 +lr: 0.001 +lr_decay_steps: 20 +optimizer: adam +num_steps: 100000 + +# network parameters +num_input_channels: 1 + +# dataloader parameters +num_workers: 16 +batch_size: 3 + +# loss functions +loss_names: [l1_perceptual_loss, perceptual_loss_vgg] +loss_weights: [100.0, 10.0] + +# metric functions +metric_names: [mean_absolute_error, mean_squared_error, structual_similarity_index_measure, peak_signal_to_noise_ratio, frechet_inception_distance] +metric_weights: [1.0, 0.0, 0.0, 0.0, 0.0] + +# data directories and dataset type +dataset_type: OpticalFlowDataset +optical_steps: 5 + +# saving training progress +checkpoints_dir: ./checkpoints/ +save_every: 10 +log_every: 200 +save_val_images: true diff --git a/motion-gan-pipeline/ImageToImage/config/train_temporal.yml b/motion-gan-pipeline/ImageToImage/config/train_temporal.yml new file mode 100644 index 0000000..2d686e8 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/config/train_temporal.yml @@ -0,0 +1,45 @@ + +# image network input size +width: 320 +height: 320 + +# model parameters +use_discriminator: false +# use_discriminator: true + +# video discriminator +use_video_discriminator: true +steps_train_video_discr: 1 +lr_disc: 0.001 +loss_gan_video_weight: 1 + +# training parameters +num_epochs: 40 +lr: 0.001 +lr_decay_steps: 20 +optimizer: adam +num_steps: 100000 + +# network parameters +num_input_channels: 1 + +# dataloader parameters +num_workers: 16 +batch_size: 8 + +# loss functions +loss_names: [l1_perceptual_loss, perceptual_loss_vgg] +loss_weights: [100.0, 10.0] + +# metric functions +metric_names: [mean_absolute_error, mean_squared_error, structual_similarity_index_measure, peak_signal_to_noise_ratio, frechet_inception_distance] +metric_weights: [1.0, 0.0, -1.0, 0.0, 0.0] + +# data directories and dataset type +dataset_type: CustomDataset + +# saving training progress +save_every: 20 +log_every: 200 +save_train_images: true +save_val_images: true diff --git a/motion-gan-pipeline/ImageToImage/datasets/__init__.py b/motion-gan-pipeline/ImageToImage/datasets/__init__.py new file mode 100644 index 0000000..4dbe0fa --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/datasets/__init__.py @@ -0,0 +1 @@ +from .base import build_dataloader, build_dataset \ No newline at end of file diff --git a/motion-gan-pipeline/ImageToImage/datasets/base.py b/motion-gan-pipeline/ImageToImage/datasets/base.py new file mode 100644 index 0000000..886e8e5 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/datasets/base.py @@ -0,0 +1,74 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import torch +from torch.utils.data import DataLoader +from torchvision import transforms +from .custom_datasets import CustomDataset, OpticalFlowDataset +from torchvision.transforms.functional import InterpolationMode + + +def build_dataset(args, mode): + + if args.dataset_type == "CustomDataset": + transform_input = transforms.Compose([transforms.ToTensor(), transforms.Resize([args.height, args.width]),]) # transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),]) + transform_output = transforms.Compose([transforms.ToTensor(), transforms.Resize([args.height, args.width]),]) + transform_label_map = transforms.Compose([transforms.Resize([args.height, args.width], interpolation=InterpolationMode.NEAREST),]) + if mode=='train': + input_dir = args.input_train_root_dir + output_dir = args.output_train_root_dir + label_dir = args.label_train_root_dir + elif mode=='val': + input_dir = args.input_val_root_dir + output_dir = args.output_val_root_dir + label_dir = args.label_val_root_dir + elif mode=='test': + input_dir = args.input_test_root_dir + output_dir = None + label_dir = None + + dataset = CustomDataset(args = args, + input_root_dir=input_dir, + output_root_dir=output_dir, + label_root_dir=label_dir, + transform_input=transform_input, + transform_output=transform_output, + transform_label_map=transform_label_map, + mode=mode) + + elif args.dataset_type == "OpticalFlowDataset": + transform_input = transforms.Compose([transforms.ToTensor(), transforms.Resize([args.height, args.width]),]) # transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)),]) + transform_output = transforms.Compose([transforms.ToTensor(), transforms.Resize([args.height, args.width]),]) + + if mode=='train': + input_dir = args.input_train_root_dir + output_dir = args.output_train_root_dir + flow_dir = args.flow_train_root_dir + elif mode=='val': + input_dir = args.input_val_root_dir + output_dir = args.output_val_root_dir + flow_dir = args.flow_val_root_dir + elif mode=='test': + input_dir = args.input_test_root_dir + output_dir = None + flow_dir = None + + dataset = OpticalFlowDataset(args = args, + input_root_dir=input_dir, + flow_dir=flow_dir, + output_root_dir=output_dir, + transform_input=transform_input, + transform_output=transform_output, + mode=mode) + else: + raise NotImplementedError() + + return dataset + + +def build_dataloader(args, mode, shuffle): + + dataset = build_dataset(args, mode) + dataloader = DataLoader(dataset, batch_size=args.batch_size, shuffle=shuffle, num_workers=args.num_workers, pin_memory=False, drop_last=True) + + return dataloader diff --git a/motion-gan-pipeline/ImageToImage/datasets/custom_datasets.py b/motion-gan-pipeline/ImageToImage/datasets/custom_datasets.py new file mode 100644 index 0000000..1510007 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/datasets/custom_datasets.py @@ -0,0 +1,200 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from logging import raiseExceptions +import torch +from torch.utils.data import Dataset +from PIL import Image +import os +from torchvision import transforms +import numpy as np + +def readFlow(fn): + """ Read .flo file in Middlebury format""" + # Code adapted from: + # http://stackoverflow.com/questions/28013200/reading-middlebury-flow-files-with-python-bytes-array-numpy + + # WARNING: this will work on little-endian architectures (eg Intel x86) only! + # print 'fn = %s'%(fn) + with open(fn, 'rb') as f: + magic = np.fromfile(f, np.float32, count=1) + if 202021.25 != magic: + print('Magic number incorrect. Invalid .flo file') + return None + else: + w = np.fromfile(f, np.int32, count=1) + h = np.fromfile(f, np.int32, count=1) + # print 'Reading %d x %d flo file\n' % (w, h) + data = np.fromfile(f, np.float32, count=2*int(w)*int(h)) + # Reshape data into 3D array (columns, rows, bands) + # The reshape here is for visualization, the original code is (w,h,2) + return np.resize(data, (int(h), int(w), 2)) + +# Example from https://pytorch.org/tutorials/beginner/data_loading_tutorial.html +class CustomDataset(Dataset): + def __init__(self, args, input_root_dir, output_root_dir, label_root_dir, transform_input=None, transform_output=None, transform_label_map=None, mode='train'): + """ + Args: + input_root_dir (string): Directory with all the input images. + output_root_dir (string): Directory with all the output images. + transform (callable, optional): Optional transform to be applied + on a sample. + """ + self.use_label_maps = args.use_label_maps + self.mode = mode + self.input_root_dir = input_root_dir + self.input_img_name_list = sorted(os.listdir(self.input_root_dir)) + self.transform_input = transform_input + if mode=='train' or mode=='val': + self.output_root_dir = output_root_dir + self.output_img_name_list = sorted(os.listdir(self.output_root_dir)) + self.transform_output = transform_output + if args.use_label_maps: + self.transform_label_map = transform_label_map + self.label_root_dir = label_root_dir + self.label_img_name_list = sorted(os.listdir(self.label_root_dir)) + + def convert_to_rgb(self, image): + if image.mode == 'RGBA': + image.load() + image_new = Image.new("RGB", image.size, (255, 255, 255)) + image_new.paste(image, mask=image.split()[3]) + elif image.mode == 'RGB' or image.mode == 'L': + image_new = image + else: + raiseExceptions('Non-compatible image format!') + return image_new + + def __len__(self): + return len(os.listdir(self.input_root_dir)) + + def __getitem__(self, idx): + + input_img_name = os.path.join(self.input_root_dir, + self.input_img_name_list[idx]) + input_image = Image.open(input_img_name) + input_image = self.convert_to_rgb(input_image) + + if self.mode == 'train' or self.mode == 'val': + output_img_name = os.path.join(self.output_root_dir, + self.output_img_name_list[idx]) + output_image = Image.open(output_img_name) + output_image = self.convert_to_rgb(output_image) + + if self.use_label_maps: + label_img_name = os.path.join(self.label_root_dir, + self.label_img_name_list[idx]) + label_image = Image.open(label_img_name) + label_image = torch.unsqueeze(torch.tensor(np.array(self.convert_to_rgb(label_image))), 0) + + + sample = {'input_image': self.transform_input(input_image), + 'output_image': self.transform_output(output_image), + 'label_image': self.transform_label_map(label_image), + 'name': self.input_img_name_list[idx]} + + else: + sample = {'input_image': self.transform_input(input_image), + 'output_image': self.transform_output(output_image), + 'name': self.input_img_name_list[idx]} + + else: + sample = {'input_image': self.transform_input(input_image), 'name': self.input_img_name_list[idx]} + + return sample + +class OpticalFlowDataset(Dataset): + def __init__(self, args, input_root_dir, flow_dir, output_root_dir, transform_input=None, transform_output=None, mode='train'): + """ + Args: + input_root_dir (string): Directory with all the input images. + flow_dir(string): Directory with all the computed optical flow. + output_root_dir (string): Directory with all the output images. + transform (callable, optional): Optional transform to be applied + on a sample. + """ + self.use_label_maps = args.use_label_maps + self.mode = mode + self.input_root_dir = input_root_dir + self.input_img_name_list = sorted(os.listdir(self.input_root_dir)) + self.transform_input = transform_input + + self.optical_steps = args.optical_steps + + if mode=='train' or mode=='val': + self.output_root_dir = output_root_dir + self.output_img_name_list = sorted(os.listdir(self.output_root_dir)) + self.transform_output = transform_output + + self.flow_dir = flow_dir + self.flow_name_list = sorted(os.listdir(self.flow_dir)) + + + def convert_to_rgb(self, image): + if image.mode == 'RGBA': + image.load() + image_new = Image.new("RGB", image.size, (255, 255, 255)) + image_new.paste(image, mask=image.split()[3]) + elif image.mode == 'RGB' or image.mode == 'L': + image_new = image + else: + raiseExceptions('Non-compatible image format!') + return image_new + + def __len__(self): + return len(os.listdir(self.input_root_dir)) + + def __getitem__(self, idx): + + input_img_name = os.path.join(self.input_root_dir, + self.input_img_name_list[idx]) + input_image = Image.open(input_img_name) + input_image = self.convert_to_rgb(input_image) + + if self.mode == 'train' or self.mode == 'val': + # Load output image + output_img_name = os.path.join(self.output_root_dir, + self.output_img_name_list[idx]) + output_image = Image.open(output_img_name) + output_image = self.convert_to_rgb(output_image) + + # Load previous images & Optical Flows + flow = np.zeros_like(readFlow(os.path.join(self.flow_dir, self.flow_name_list[0]))) + + flows = [] + prev_imgs = [] + + if idx > self.optical_steps: + + for i in range(self.optical_steps): + # Load prev image + prev_img_name = os.path.join(self.output_root_dir, + self.output_img_name_list[idx - i]) + prev_img = Image.open(prev_img_name) + prev_img = self.convert_to_rgb(prev_img) + prev_imgs.append(self.transform_output(prev_img)) + + # load flow + flow_name = os.path.join(self.flow_dir, + self.flow_name_list[idx - i]) + tmp_flow = readFlow(flow_name) + flow += tmp_flow + flows.append(self.transform_output(flow)) + + else: + prev_img_name = os.path.join(self.output_root_dir, self.output_img_name_list[idx]) + prev_img = Image.open(prev_img_name) + prev_img = self.convert_to_rgb(prev_img) + flows = [self.transform_output(flow)] * self.optical_steps + prev_imgs = [self.transform_output(prev_img)] * self.optical_steps + + sample = {'input_image': self.transform_input(input_image), + 'output_image': self.transform_output(output_image), + 'prev_images': prev_imgs, + 'flows': flows, + 'name': self.input_img_name_list[idx]} + + else: + sample = {'input_image': self.transform_input(input_image), 'name': self.input_img_name_list[idx]} + + return sample diff --git a/motion-gan-pipeline/ImageToImage/generate_images.py b/motion-gan-pipeline/ImageToImage/generate_images.py new file mode 100644 index 0000000..51bbc1e --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/generate_images.py @@ -0,0 +1,153 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from email.policy import default +from PIL import Image +import torch +import PIL +import configargparse +from pathlib import Path +import os +from tqdm import tqdm +import numpy as np + +from models.unet import UNet +from datasets.base import build_dataloader +from utils.utils import create_image_pair, save_image_list + +import warnings +warnings.filterwarnings("ignore") + +def config_parser(): + parser = configargparse.ArgumentParser() + # config file + parser.add_argument('-c', '--my-config', is_config_file=True, help='config file path', default='config/test.yml') + # dataloader options + parser.add_argument("--mode", type=str, default='test', help="test mode has if no ground truth data available, val otherwise") + parser.add_argument("--batch_size", type=int, default=16, help="size of the batches") + parser.add_argument("--num_workers", type=int, default=0, help="number of workers to use during batch generation") + parser.add_argument("--num_input_channels", type=int, default=3, help="number of input image channels") + parser.add_argument("--num_output_channels", type=int, default=3, help="number of output image channels") + parser.add_argument("--use_label_maps", action='store_true', help="choose if to use label maps for discriminator") + + # dataset options + parser.add_argument("--dataset_type", type=str, help="options: CustomDataset", default='CustomDataset') + parser.add_argument("--input_test_root_dir", type=str, help="Path to test input images", default='./data/input_images_test') + parser.add_argument("--input_val_root_dir", type=str, help="Path to val input images", default='./data/input_images_val') + parser.add_argument("--label_val_root_dir", type=str, help="Path to val label images", default='./data/label_images_val') + parser.add_argument("--output_val_root_dir", type=str, help="Path to val output images", default='./data/output_images_val') + parser.add_argument("--width", type=int, default=640, help="width") + parser.add_argument("--height", type=int, default=360, help="height") + parser.add_argument("--metric_names", nargs="*", type=str, default=['mean_absolute_error'], help="names of metrics to be logged") + parser.add_argument("--metric_weights", nargs="*", type=float, default=[1.0], help="weights assigned to metrics in the order") + # logging/saving options + parser.add_argument("--video_name", type=str, help='name of the reference video', default='Clara') + parser.add_argument("--out_dir", type=str, help='directory in which to save result images', default='./results/') + # dataroot + parser.add_argument("--dataroot", required=True, type=str, help='input data dataroot', default='') + parser.add_argument("--checkpoint_dir", required=True, type=str, help='path to checkpoint folder', default='../checkpoints/') + return parser + + +def load_model(network, args, device): + if Path(args.load_path).exists(): + checkpoint = torch.load(args.load_path, map_location=device) + network.load_state_dict(checkpoint['model_state_dict']) + try: + args.continue_from_epoch = checkpoint['epoch']+1 + print("-> loaded model %s (epoch: %d)"%(args.load_path, args.continue_from_epoch)) + + except TypeError: + args.continue_from_epoch = None + print("-> loaded model %s (epoch: final)"%(args.load_path)) + + +if __name__=='__main__': + + parser = config_parser() + args = parser.parse_args() + + # Overwrite image dimentions + edge_path = os.path.join(args.dataroot, args.video_name, 'edges') + # img_size = Image.open(os.path.join(edge_path, os.listdir(edge_path)[0])).size + try: + img_size = np.load(os.path.join(args.dataroot, args.video_name, 'img_size.npy')) + except FileNotFoundError: + img_size = Image.open(os.path.join(edge_path, os.listdir(edge_path)[0])).size + + args.width, args.height = img_size + ratio = args.height / args.width + + print('Img Ratio: ', ratio) + + # resize for training quickly + if args.width >= args.height: + args.width = 720 + args.height = int(args.width * ratio) + + else: + args.height = 720 + args.width = int(args.height / ratio) + + os.makedirs(os.path.join(args.checkpoint_dir, args.video_name), exist_ok=True) + args.load_path = os.path.join(args.checkpoint_dir, args.video_name, 'latest_GAN_model.pt') + + os.makedirs(args.out_dir, exist_ok=True) + + # Normal training + temporal = True + optical = False + + if not os.path.isfile(args.load_path): + print('Checkpoint not found!') + chkp_dir = os.path.join(args.checkpoint_dir, args.video_name) + data_input_path = os.path.join(args.dataroot, args.video_name) + + if temporal: + os.system('python train_temporal.py -c config/train_temporal.yml --checkpoints_dir ' + chkp_dir + + ' --input_train_root_dir ' + data_input_path + + '/edges --output_train_root_dir ' + data_input_path + + '/cropped --height ' + str(args.height) + ' --width ' + str(args.width) + ' --skip_log') + + elif optical: + os.system('python train_optical.py -c config/train_optical.yml --checkpoints_dir ' + chkp_dir + + ' --input_train_root_dir ' + data_input_path + + '/edges --output_train_root_dir ' + data_input_path + + '/cropped --flow_train_root_dir ' + data_input_path + + '/opticalflow --height ' + str(args.height) + ' --width ' + str(args.width)) + + else: + os.system('python train.py -c config/train.yml --checkpoints_dir ' + chkp_dir + + ' --input_train_root_dir ' + data_input_path + + '/edges --output_train_root_dir ' + data_input_path + + '/cropped' + ' --height ' + str(args.height) + ' --width ' + str(args.width)) + + + # check device + device = torch.device("cuda" if torch.cuda.is_available() else "cpu") + print("Running code on", device) + + # initialize the dataloaders + test_loader = build_dataloader(args, mode=args.mode, shuffle=False) + + # build the network + network = UNet(args).to(device) + + load_model(network, args, device) + network.eval() + + # run inference + with torch.no_grad(): + for batch_idx, data in enumerate(tqdm(test_loader)): + + inputs = data['input_image'].to(device) + names = data['name'] + + prediction = network(inputs) + + images = create_image_pair([inputs, prediction]) + images_output = create_image_pair([prediction]) + + # save_image_list(images, args.results_dir, names) + save_image_list(images_output, args.out_dir, names) + diff --git a/motion-gan-pipeline/ImageToImage/losses/__init__.py b/motion-gan-pipeline/ImageToImage/losses/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/ImageToImage/losses/gan_loss.py b/motion-gan-pipeline/ImageToImage/losses/gan_loss.py new file mode 100644 index 0000000..5f8f30b --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/losses/gan_loss.py @@ -0,0 +1,18 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import torch +import torch.nn as nn + + +class GANLoss(nn.Module): + def __init__(self): + super(GANLoss, self).__init__() + self.loss = nn.BCEWithLogitsLoss() + + def forward(self, y_pred, real_image_flag): + if real_image_flag: + target = torch.ones_like(y_pred) + else: + target = torch.zeros_like(y_pred) + return self.loss(y_pred, target) diff --git a/motion-gan-pipeline/ImageToImage/losses/loss.py b/motion-gan-pipeline/ImageToImage/losses/loss.py new file mode 100644 index 0000000..d2b1bce --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/losses/loss.py @@ -0,0 +1,32 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import torch +import torch.nn as nn +from .perceptual_loss_vgg import PerceptualLossVGG +from .gan_loss import GANLoss + +class Loss(nn.Module): + def __init__(self, args, device): + super().__init__() + available_losses = { + 'perceptual_loss_vgg': lambda: PerceptualLossVGG(layers_weights=[1.0/32,1.0/16,1.0/8,1.0/4,1.0]), #[1.0/32,1.0/16,1.0/8,1.0/4,1.0] + 'l1_perceptual_loss': lambda: nn.L1Loss() + } + self.loss_names = args.loss_names + self.loss_weights = args.loss_weights + self.loss_list = [] + assert(len(self.loss_names)==len(self.loss_weights)) + for loss_name in self.loss_names: + if loss_name in available_losses: + self.loss_list.append(available_losses[loss_name]().to(device)) + + def eval(self): + for loss in self.loss_list: + loss.eval() + + def forward(self, y_pred, y_gt): + loss = 0 + for k, loss_fn in enumerate(self.loss_list): + loss += self.loss_weights[k]*loss_fn(y_pred, y_gt) + return loss diff --git a/motion-gan-pipeline/ImageToImage/losses/perceptual_loss_vgg.py b/motion-gan-pipeline/ImageToImage/losses/perceptual_loss_vgg.py new file mode 100644 index 0000000..3001641 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/losses/perceptual_loss_vgg.py @@ -0,0 +1,69 @@ +# extract perceptual features from the pre-trained Vgg16 network +# these features are used for the perceptual loss function (https://arxiv.org/abs/1603.08155) +# +# based on the code snippet of W. Falcon: +# https://gist.github.com/williamFalcon/1ee773c159ff5d76d47518653369d890 + +import torch +import torch.nn as nn +from torchvision import models + +class Vgg16Features(nn.Module): + + def __init__(self, + requires_grad=False, + layers_weights=None): + super(Vgg16Features, self).__init__() + if layers_weights is None: + self.layers_weights = [1.0, 1.0 / 16, 1.0 / 8, 1.0 / 4, 1.0] + else: + self.layers_weights = layers_weights + + vgg_pretrained_features = models.vgg16(pretrained=True).features + self.slice1 = nn.Sequential() + self.slice2 = nn.Sequential() + self.slice3 = nn.Sequential() + self.slice4 = nn.Sequential() + for x in range(4): + self.slice1.add_module(str(x), vgg_pretrained_features[x]) + for x in range(4, 9): + self.slice2.add_module(str(x), vgg_pretrained_features[x]) + for x in range(9, 16): + self.slice3.add_module(str(x), vgg_pretrained_features[x]) + for x in range(16, 23): + self.slice4.add_module(str(x), vgg_pretrained_features[x]) + if not requires_grad: + for param in self.parameters(): + param.requires_grad = False + + def forward(self, x): + + h_0 = x.flatten(start_dim=1) + h = self.slice1(x) + h_relu1_2 = h.flatten(start_dim=1) + h = self.slice2(h) + h_relu2_2 = h.flatten(start_dim=1) + h = self.slice3(h) + h_relu3_3 = h.flatten(start_dim=1) + h = self.slice4(h) + h_relu4_3 = h.flatten(start_dim=1) + + h = torch.cat([self.layers_weights[0] * h_0, + self.layers_weights[1] * h_relu1_2, + self.layers_weights[2] * h_relu2_2, + self.layers_weights[3] * h_relu3_3, + self.layers_weights[4] * h_relu4_3], 1) + + return h + +class PerceptualLossVGG(nn.Module): + + def __init__(self, layers_weights=None): + super().__init__() + self.vgg_features = Vgg16Features(layers_weights=layers_weights) + self.loss = nn.L1Loss() + + def forward(self, y_pred, y_gt): + y_pred_vgg_features = self.vgg_features(y_pred) + y_gt_vgg_features = self.vgg_features(y_gt).detach() + return self.loss(y_pred_vgg_features, y_gt_vgg_features) \ No newline at end of file diff --git a/motion-gan-pipeline/ImageToImage/metrics/__init__.py b/motion-gan-pipeline/ImageToImage/metrics/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/ImageToImage/metrics/metrics.py b/motion-gan-pipeline/ImageToImage/metrics/metrics.py new file mode 100644 index 0000000..11e6f5c --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/metrics/metrics.py @@ -0,0 +1,66 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import torch +import torch.nn as nn +from ignite.metrics import MeanAbsoluteError, MeanSquaredError, SSIM, PSNR, FID + +from pytorch_fid.inception import InceptionV3 + +class Metrics(): + def __init__(self, args, device): + + # FID score model + dims = 2048 + block_idx = InceptionV3.BLOCK_INDEX_BY_DIM[dims] + model = InceptionV3([block_idx]).to(device) + wrapper_model = WrapperInceptionV3(model) + wrapper_model.eval() + + available_metrics = { + 'mean_absolute_error': lambda: MeanAbsoluteError(device=device), + 'mean_squared_error': lambda: MeanSquaredError(device=device), + 'structual_similarity_index_measure': lambda: SSIM(data_range=1.0, device=device), + 'peak_signal_to_noise_ratio': lambda: PSNR(data_range=1.0, device=device), + 'frechet_inception_distance': lambda: FID(num_features=dims, feature_extractor=wrapper_model, device=device) + } + self.metric_names = args.metric_names + self.metric_weights = args.metric_weights + self.metric_list = [] + for metric_name in self.metric_names: + if metric_name in available_metrics: + self.metric_list.append(available_metrics[metric_name]()) + + def update(self, y_pred, y_gt): + for metric_fn in self.metric_list: + metric_fn.update((y_pred, y_gt)) + + def reset(self): + for metric_fn in self.metric_list: + metric_fn.reset() + + def compute(self): + results = {} + metric_combined = 0 + for idx, (metric_fn, metric_name) in enumerate(zip(self.metric_list, self.metric_names)): + if metric_name == 'peak_signal_to_noise_ratio': + results[metric_name] = metric_fn.compute().item() + else: + results[metric_name] = metric_fn.compute() + metric_combined += self.metric_weights[idx]*results[metric_name] + return results, metric_combined + + +# wrapper class as feature_extractor +class WrapperInceptionV3(nn.Module): + + def __init__(self, fid_incv3): + super().__init__() + self.fid_incv3 = fid_incv3 + + @torch.no_grad() + def forward(self, x): + y = self.fid_incv3(x) + y = y[0] + y = y[:, :, 0, 0] + return y diff --git a/motion-gan-pipeline/ImageToImage/models/__init__.py b/motion-gan-pipeline/ImageToImage/models/__init__.py new file mode 100644 index 0000000..67cafc4 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/models/__init__.py @@ -0,0 +1 @@ +from .unet import UNet diff --git a/motion-gan-pipeline/ImageToImage/models/patch_gan_discriminator.py b/motion-gan-pipeline/ImageToImage/models/patch_gan_discriminator.py new file mode 100644 index 0000000..e76953d --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/models/patch_gan_discriminator.py @@ -0,0 +1,50 @@ +import torch +import torch.nn as nn +from torchvision.utils import save_image + + +class NLayerDiscriminator(nn.Module): + """Defines a PatchGAN discriminator""" + + def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d): + """Construct a PatchGAN discriminator + Parameters: + input_nc (int) -- the number of channels in input images + ndf (int) -- the number of filters in the last conv layer + n_layers (int) -- the number of conv layers in the discriminator + norm_layer -- normalization layer + """ + super(NLayerDiscriminator, self).__init__() # no need to use bias as BatchNorm2d has affine parameters + + use_bias = norm_layer == nn.InstanceNorm2d + + kw = 4 + padw = 1 + sequence = [nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), nn.LeakyReLU(0.2, True)] + nf_mult = 1 + nf_mult_prev = 1 + for n in range(1, n_layers): # gradually increase the number of filters + nf_mult_prev = nf_mult + nf_mult = min(2 ** n, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=2, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + nf_mult_prev = nf_mult + nf_mult = min(2 ** n_layers, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, kernel_size=kw, stride=1, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + sequence += [nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)] # output 1 channel prediction map + self.model = nn.Sequential(*sequence) + + def forward(self, input_img, label_map=None): + """Standard forward.""" + if not label_map==None: + input_img = torch.where((label_map!=4) & (label_map!=5) & (label_map!=10) & (label_map!=11) & (label_map!=12) & (label_map!=1) & (label_map!=6) & (label_map!=7) & (label_map!=2), torch.ones_like(input_img), input_img) + return self.model(input_img) \ No newline at end of file diff --git a/motion-gan-pipeline/ImageToImage/models/unet.py b/motion-gan-pipeline/ImageToImage/models/unet.py new file mode 100644 index 0000000..47ce55b --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/models/unet.py @@ -0,0 +1,36 @@ +""" Full assembly of the parts to form the complete network """ + +from .unet_parts import * + + +class UNet(nn.Module): + def __init__(self, args, bilinear=False): + super().__init__() + self.n_channels_in = args.num_input_channels + self.n_channels_out = args.num_output_channels + self.bilinear = bilinear + self.inc = DoubleConv(self.n_channels_in, 64) + self.down1 = Down(64, 128) + self.down2 = Down(128, 256) + self.down3 = Down(256, 512) + factor = 2 if bilinear else 1 + self.down4 = Down(512, 1024 // factor) + self.up1 = Up(1024, 512 // factor, bilinear) + self.up2 = Up(512, 256 // factor, bilinear) + self.up3 = Up(256, 128 // factor, bilinear) + self.up4 = Up(128, 64, bilinear) + self.outc = OutConv(64, self.n_channels_out) + + def forward(self, x): + x1 = self.inc(x) + x2 = self.down1(x1) + x3 = self.down2(x2) + x4 = self.down3(x3) + x5 = self.down4(x4) + x = self.up1(x5, x4) + x = self.up2(x, x3) + x = self.up3(x, x2) + x = self.up4(x, x1) + x_out = self.outc(x) + return x_out + diff --git a/motion-gan-pipeline/ImageToImage/models/unet_parts.py b/motion-gan-pipeline/ImageToImage/models/unet_parts.py new file mode 100644 index 0000000..e7323e6 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/models/unet_parts.py @@ -0,0 +1,80 @@ +""" Parts of the U-Net model """ + +import torch +import torch.nn as nn +import torch.nn.functional as F + + +class DoubleConv(nn.Module): + """(convolution => [BN] => ReLU) * 2""" + + def __init__(self, in_channels, out_channels, mid_channels=None): + super().__init__() + if not mid_channels: + mid_channels = out_channels + self.double_conv = nn.Sequential( + nn.Conv2d(in_channels, mid_channels, kernel_size=3, padding=1, bias=False), + nn.BatchNorm2d(mid_channels), + nn.ReLU(inplace=True), + nn.Conv2d(mid_channels, out_channels, kernel_size=3, padding=1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True) + ) + + def forward(self, x): + return self.double_conv(x) + + +class Down(nn.Module): + """Downscaling with maxpool then double conv""" + + def __init__(self, in_channels, out_channels): + super().__init__() + self.maxpool_conv = nn.Sequential( + nn.MaxPool2d(2), + DoubleConv(in_channels, out_channels) + ) + + def forward(self, x): + return self.maxpool_conv(x) + + +class Up(nn.Module): + """Upscaling then double conv""" + + def __init__(self, in_channels, out_channels, bilinear=True): + super().__init__() + + # if bilinear, use the normal convolutions to reduce the number of channels + if bilinear: + self.up = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True) + self.conv = DoubleConv(in_channels, out_channels, in_channels // 2) + else: + self.up = nn.ConvTranspose2d(in_channels, in_channels // 2, kernel_size=2, stride=2) + self.conv = DoubleConv(in_channels, out_channels) + + def forward(self, x1, x2): + x1 = self.up(x1) + # input is CHW + diffY = x2.size()[2] - x1.size()[2] + diffX = x2.size()[3] - x1.size()[3] + + x1 = F.pad(x1, [diffX // 2, diffX - diffX // 2, + diffY // 2, diffY - diffY // 2]) + # if you have padding issues, see + # https://github.com/HaiyongJiang/U-Net-Pytorch-Unstructured-Buggy/commit/0e854509c2cea854e247a9c615f175f76fbb2e3a + # https://github.com/xiaopeng-liao/Pytorch-UNet/commit/8ebac70e633bac59fc22bb5195e513d5832fb3bd + x = torch.cat([x2, x1], dim=1) + return self.conv(x) + + +class OutConv(nn.Module): + def __init__(self, in_channels, out_channels): + super().__init__() + self.conv = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=1), + nn.Sigmoid(), + ) + + def forward(self, x): + return self.conv(x) diff --git a/motion-gan-pipeline/ImageToImage/models/video_discriminator.py b/motion-gan-pipeline/ImageToImage/models/video_discriminator.py new file mode 100644 index 0000000..46acfec --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/models/video_discriminator.py @@ -0,0 +1,80 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.autograd import Variable + +from torch.nn.utils import spectral_norm +from torch.nn.init import xavier_uniform_ +import torch.nn.init as init + + +def init_weights(m): + if type(m) == nn.Linear or type(m) == nn.Conv2d: + xavier_uniform_(m.weight) + m.bias.data.fill_(0.) + +def snconv2d(in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True): + return spectral_norm(nn.Conv2d(in_channels=in_channels, + out_channels=out_channels, + kernel_size=kernel_size, + stride=stride, + padding=padding, + dilation=dilation, + groups=groups, + bias=bias)) + +def snconv3d(in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True): + return spectral_norm(nn.Conv3d(in_channels=in_channels, out_channels=out_channels, kernel_size=kernel_size, + stride=stride, padding=padding, dilation=dilation, groups=groups, bias=bias)) + +def snlinear(in_features, out_features, bias=True): + return spectral_norm(nn.Linear(in_features=in_features, out_features=out_features, bias=bias)) + +def sn_embedding(num_embeddings, embedding_dim): + return spectral_norm(nn.Embedding(num_embeddings=num_embeddings, embedding_dim=embedding_dim)) + + +class Discriminator_3D(nn.Module): + def __init__(self, d_conv_dim=96, T=8): + super(Discriminator_3D, self).__init__() + self.main = nn.Sequential( + # input is (nc) x T x 96 x 96 + snconv3d(3, d_conv_dim, 4, 2, 1, bias=False), + nn.LeakyReLU(0.2, inplace=True), + # state size. (ndf) x T/2 x 48 x 48 + snconv3d(d_conv_dim, d_conv_dim * 2, 4, 2, 1, bias=False), + nn.BatchNorm3d(d_conv_dim * 2), + nn.LeakyReLU(0.2, inplace=True), + # state size. (ndf*2) x T/4 x 24 x 24 + snconv3d(d_conv_dim * 2, d_conv_dim * 4, 4, 2, 1, bias=False), + nn.BatchNorm3d(d_conv_dim * 4), + nn.LeakyReLU(0.2, inplace=True), + # state size. (ndf*4) x T/8 x 12 x 12 + + #TODO: fix this because depends on batchsize for us + # snconv3d(d_conv_dim * 4, d_conv_dim * 8, 4, 2, 1, bias=False), + # nn.BatchNorm3d(d_conv_dim * 8), + # nn.LeakyReLU(0.2, inplace=True), + # # state size. (ndf*8) x T/16 x 6 x 6 + ) + self.linear = snlinear(d_conv_dim * 4, 1) # was 8 + self.final_conv = snconv2d(d_conv_dim * 4, 1, 4, 1, 1, bias=False) + # self.embed = sn_embedding(num_classes, d_conv_dim*8) + # # Weight init + # self.apply(init_weights) + # xavier_uniform_(self.embed.weight) + + def forward(self, input): + input = input.transpose(2,1) + output = self.main(input) + output = output.squeeze(2) + output_conv = self.final_conv(output) + # output = torch.sum(output, dim=[3,4]).view(-1, output.size(1)) + # output_linear = self.linear(output) + # y = class_id.long() + # embed = self.embed(y) + # prod = (output * embed).sum(1) + return output_conv diff --git a/motion-gan-pipeline/ImageToImage/train.py b/motion-gan-pipeline/ImageToImage/train.py new file mode 100644 index 0000000..2c681c7 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/train.py @@ -0,0 +1,298 @@ +from email.policy import default +import os +import configargparse +import torch +import torch.nn as nn +from pathlib import Path +import wandb +from datasets.base import build_dataloader +from models.unet import UNet +from models.patch_gan_discriminator import NLayerDiscriminator +from models.video_discriminator import Discriminator_3D +from losses.loss import Loss +from losses.gan_loss import GANLoss +from utils.utils import create_image_pair +from metrics.metrics import Metrics + +def config_parser(): + parser = configargparse.ArgumentParser() + # config file + parser.add_argument('-c', '--my-config', required=True, is_config_file=True, help='config file path') + # training and model options + parser.add_argument("--optimizer", type=str, help='choose optimizer type', default='adam') + parser.add_argument("--num_epochs", type=int, default=61, help="number of epochs of training") + parser.add_argument("--num_steps", type=int, default=25000, help="number of steps of training") + parser.add_argument("--continue_from_epoch", type=int, default=0, help="Continue training from epoch (default=0)") + parser.add_argument("--batch_size", type=int, default=16, help="size of the batches") + parser.add_argument("--lr", type=float, default=0.001, help="learning rate") # 0.00001 + parser.add_argument("--lr_decay_steps", type=int, default=10, help="learning rate decay steps in epochs") + parser.add_argument("--lr_disc", type=float, default=0.001, help="discriminator learning rate") + parser.add_argument("--num_workers", type=int, default=0, help="number of workers to use during batch generation") + parser.add_argument("--num_input_channels", type=int, default=3, help="number of input image channels") + parser.add_argument("--num_output_channels", type=int, default=3, help="number of output image channels") + parser.add_argument("--loss_names", nargs="*", type=str, default=['perceptual_loss_vgg'], help="names of losses used in training") + parser.add_argument("--loss_weights", nargs="*", type=float, default=[1.0], help="weights assigned to losses in the order") + parser.add_argument("--metric_names", nargs="*", type=str, default=['mean_absolute_error'], help="names of metrics to be logged") + parser.add_argument("--metric_weights", nargs="*", type=float, default=[1.0], help="weights assigned to metrics in the order") + parser.add_argument("--use_discriminator", action='store_true', help="choose if to use discriminator network") + parser.add_argument("--use_label_maps", action='store_true', help="choose if to use label maps for discriminator") + # dataset options + parser.add_argument("--dataset_type", type=str, help="options: CustomDataset", default='CustomDataset') + parser.add_argument("--input_train_root_dir", type=str, help="Path to training input images", default='./data/input_images_train') + parser.add_argument("--output_train_root_dir", type=str, help="Path to training output images", default='./data/output_images_train') + parser.add_argument("--label_train_root_dir", type=str, help="Path to training label images", default='./data/label_images_train') + parser.add_argument("--input_val_root_dir", type=str, help="Path to val input images", default='./data/input_images_val') + parser.add_argument("--output_val_root_dir", type=str, help="Path to val output images", default='./data/output_images_val') + parser.add_argument("--label_val_root_dir", type=str, help="Path to val label images", default='./data/label_images_val') + parser.add_argument("--width", type=int, default=640, help="width") + parser.add_argument("--height", type=int, default=360, help="height") + # logging/saving options + parser.add_argument("--checkpoints_dir", type=str, help='specify the directory to save the model', default='./chkpts/experiment/') + parser.add_argument("--save_every", type=int, help='save model every # epochs', default=5) + parser.add_argument("--log_every", type=int, help='save logs every # batches', default=100) + parser.add_argument("--save_val_images", action='store_true', help='choose if you want to save validation images') + parser.add_argument("--wandb_dir", type=str, help="directory where to save wandb data locally", default='./wandb') + return parser + + +def save_model(network, epoch, optimizer, loss, best_val_loss, args, discriminator=None, optimizer_disc=None, best_model=False): + print('Saving model epoch: ', epoch) + if not Path(args.checkpoints_dir).exists(): + Path(args.checkpoints_dir).mkdir(exist_ok=True) + + save_dir = Path(args.checkpoints_dir, 'latest_GAN.pt') + + if args.use_discriminator: + torch.save({'epoch': epoch, + 'model_state_dict': network.state_dict(), + 'discriminator_state_dict': discriminator.state_dict(), + 'optimizer_state_dict': optimizer.state_dict(), + 'optimizer_disc_state_dict': optimizer_disc.state_dict(), + 'loss': loss, + 'best_val_loss': best_val_loss}, + save_dir) + else: + torch.save({'epoch': epoch, + 'model_state_dict': network.state_dict(), + 'optimizer_state_dict': optimizer.state_dict(), + 'loss': loss, + 'best_val_loss': best_val_loss}, + save_dir) + + return save_dir + + +def print_progress(loss, best_loss = None, mode='train'): + print(mode + ' loss: ', loss) + if best_loss: + print('best ' + mode + ' loss: ', best_loss) + + +def set_requires_grad(network, requires_grad_flag): + for param in network.parameters(): + param.requires_grad = requires_grad_flag + + +def init_weights(net, init_type='normal', init_gain=0.02): + """Initialize network weights. + Parameters: + net (network) -- network to be initialized + init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal + init_gain (float) -- scaling factor for normal, xavier and orthogonal. + We use 'normal' in the original pix2pix and CycleGAN paper. But xavier and kaiming might + work better for some applications. Feel free to try yourself. + """ + def init_func(m): # define the initialization function + classname = m.__class__.__name__ + if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1): + if init_type == 'normal': + nn.init.normal_(m.weight.data, 0.0, init_gain) + elif init_type == 'xavier': + nn.init.xavier_normal_(m.weight.data, gain=init_gain) + elif init_type == 'kaiming': + nn.init.kaiming_normal_(m.weight.data, a=0, mode='fan_in') + elif init_type == 'orthogonal': + nn.init.orthogonal_(m.weight.data, gain=init_gain) + else: + raise NotImplementedError('initialization method [%s] is not implemented' % init_type) + if hasattr(m, 'bias') and m.bias is not None: + nn.init.constant_(m.bias.data, 0.0) + elif classname.find('BatchNorm2d') != -1: # BatchNorm Layer's weight is not a matrix; only normal distribution applies. + nn.init.normal_(m.weight.data, 1.0, init_gain) + nn.init.constant_(m.bias.data, 0.0) + + print('initialize network with %s' % init_type) + net.apply(init_func) + + +if __name__=='__main__': + + parser = config_parser() + args = parser.parse_args() + + # check device + device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + print("Running code on", device) + + # prepare data + train_loader = build_dataloader(args, mode='train', shuffle=True) + + # build the network + network = UNet(args).to(device) + init_weights(network) + + # make the networks multi-gpu available + if torch.cuda.device_count() > 1: + print("There are", torch.cuda.device_count(), "gpus available.") + network = nn.DataParallel(network) + + # Build loss function + loss_fn = Loss(args, device).to(device) + # loss_fn.eval() # make sure networks used in calculating loss (e.g. VGG) are in eval mode (because of batch normalization and dropout layers) + + #Build metrics functions + metrics_fn = Metrics(args, device) + metrics_fn.reset() + + # Build an optimizer + if args.optimizer == 'adam': + optimizer = torch.optim.Adam(network.parameters(), lr=args.lr, betas=(0.5, 0.999)) + elif args.optimizer == 'sgd': + optimizer = torch.optim.SGD(network.parameters(), lr=args.lr) + else: + raise NotImplementedError() + + # Learning rate decay + lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=args.lr_decay_steps, gamma=0.1) + + # Print number of parameters in the generator network + pytorch_generator_params = sum(p.numel() for p in network.parameters()) + print('Number of parameters in the generator network: ', pytorch_generator_params) + + # build discriminator if it is used + if args.use_discriminator: + discriminator = NLayerDiscriminator(input_nc=args.num_input_channels+args.num_output_channels, n_layers=3).to(device) + init_weights(discriminator) + set_requires_grad(discriminator, False) + loss_gan_fn = GANLoss().to(device) + if args.optimizer == 'adam': + optimizer_discriminator = torch.optim.Adam(discriminator.parameters(), lr=args.lr_disc, betas=(0.5, 0.999)) + elif args.optimizer == 'sgd': + optimizer_discriminator = torch.optim.SGD(discriminator.parameters(), lr=args.lr_disc) + pytorch_discriminator_params = sum(p.numel() for p in discriminator.parameters()) + print('Number of parameters in the discriminator network: ', pytorch_discriminator_params) + else: + discriminator = None + optimizer_discriminator = None + + best_val_loss = None + + # initialize wandb + wandb.init(project='GAN-video-synthesis', config=args, tags=["demo"], job_type='train', dir=args.wandb_dir) + args = wandb.config + wandb.watch(network) + + # Start the training process + start_train_disc = -1 + step = 0 + for epoch in range(args.continue_from_epoch, args.num_epochs): + print('Training epoch: ', epoch) + + network.train() + if args.use_discriminator: + discriminator.train() + + for batch_idx, data in enumerate(train_loader): + + inputs = data["input_image"].to(device) + labels = data["output_image"].to(device) + if args.use_label_maps: + label_maps = data["label_image"].to(device) + else: + label_maps = None + + prediction = network(inputs) + + loss_perceptual = loss_fn(prediction, labels) + + if args.use_discriminator and epoch>start_train_disc: + + set_requires_grad(discriminator, True) + + real_pair = torch.cat((inputs, labels), 1) + pred_real = discriminator(real_pair, label_maps) + loss_real = loss_gan_fn(pred_real, real_image_flag=True) + + fake_pair = torch.cat((inputs, prediction.detach()), 1) + pred_fake = discriminator(fake_pair, label_maps) + loss_fake = loss_gan_fn(pred_fake, real_image_flag=False) + + loss_discriminator = (loss_fake + loss_real)*0.5 + + optimizer_discriminator.zero_grad() + loss_discriminator.backward() + optimizer_discriminator.step() + + set_requires_grad(discriminator, False) + + fake_pair = torch.cat((inputs, prediction), 1) + pred_fake = discriminator(fake_pair, label_maps) + + loss_gan = loss_gan_fn(pred_fake, real_image_flag=True) + + loss = loss_perceptual + loss_gan + else: + loss = loss_perceptual + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + step = step + 1 + + if batch_idx % args.log_every == 0: + if args.save_val_images: + image_concat = create_image_pair([inputs.detach(), prediction.detach(), labels.detach()]) + wandb_images = [wandb.Image(image) for image in image_concat] + if args.use_discriminator and epoch>start_train_disc: + wandb.log({'train loss perceptual': loss_perceptual, + 'train loss gan': loss_gan, + 'discriminator train loss': loss_discriminator.detach(), + 'train images predictions': wandb_images, + 'discriminator loss real': loss_real, + 'discriminator loss fake': loss_fake}, step=step) + wandb.log({'train_loss': loss.detach()}, step=step) + else: + wandb.log({'train images predictions': wandb_images}, step=step) + wandb.log({'train_loss': loss.detach()}, step=step) + else: + if args.use_discriminator and epoch>start_train_disc: + wandb.log({'train loss perceptual': loss_perceptual, + 'train loss gan': loss_gan, + 'discriminator train loss': loss_discriminator.detach(), + 'discriminator loss real': loss_real, + 'discriminator loss fake': loss_fake}, step=step) + wandb.log({'train_loss': loss.detach()}, step=step) + else: + wandb.log({'train_loss': loss.detach()}, step=step) + + if step == args.num_steps: + print(f'Breaking at {args.num_steps} steps.') + save_dir = save_model(network, epoch, optimizer, loss, best_val_loss, args, discriminator, optimizer_discriminator, best_model=False) + model_artifact = wandb.Artifact('last_model_train', type='model') + model_artifact.add_file(save_dir) + wandb.log_artifact(model_artifact) + exit() + + if epoch % args.save_every == 0: + save_dir = save_model(network, epoch, optimizer, loss, best_val_loss, args, discriminator, optimizer_discriminator, best_model=False) + model_artifact = wandb.Artifact('last_model_train', type='model') + model_artifact.add_file(save_dir) + wandb.log_artifact(model_artifact) + + lr_scheduler.step() + + save_dir = save_model(network, epoch, optimizer, loss, best_val_loss, args, discriminator, optimizer_discriminator, best_model=False) + model_artifact = wandb.Artifact('last_model_train', type='model') + model_artifact.add_file(save_dir) + wandb.log_artifact(model_artifact) \ No newline at end of file diff --git a/motion-gan-pipeline/ImageToImage/train_optical.py b/motion-gan-pipeline/ImageToImage/train_optical.py new file mode 100644 index 0000000..07d9004 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/train_optical.py @@ -0,0 +1,324 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from email.policy import default +import os +import configargparse +import torch +import torch.nn as nn +from pathlib import Path +import wandb +from datasets.base import build_dataloader +from models.unet import UNet +from models.patch_gan_discriminator import NLayerDiscriminator +from models.video_discriminator import Discriminator_3D +from losses.loss import Loss +from losses.gan_loss import GANLoss +from utils.utils import create_image_pair +from utils.optical_flow import backwarp +from metrics.metrics import Metrics + +def config_parser(): + parser = configargparse.ArgumentParser() + # config file + parser.add_argument('-c', '--my-config', required=True, is_config_file=True, help='config file path') + # training and model options + parser.add_argument("--optimizer", type=str, help='choose optimizer type', default='adam') + parser.add_argument("--num_epochs", type=int, default=61, help="number of epochs of training") + parser.add_argument("--num_steps", type=int, default=25000, help="number of steps of training") + parser.add_argument("--continue_from_epoch", type=int, default=0, help="Continue training from epoch (default=0)") + parser.add_argument("--batch_size", type=int, default=16, help="size of the batches") + parser.add_argument("--lr", type=float, default=0.001, help="learning rate") # 0.00001 + parser.add_argument("--lr_decay_steps", type=int, default=10, help="learning rate decay steps in epochs") + parser.add_argument("--lr_disc", type=float, default=0.001, help="discriminator learning rate") + parser.add_argument("--num_workers", type=int, default=0, help="number of workers to use during batch generation") + parser.add_argument("--num_input_channels", type=int, default=3, help="number of input image channels") + parser.add_argument("--num_output_channels", type=int, default=3, help="number of output image channels") + parser.add_argument("--loss_names", nargs="*", type=str, default=['perceptual_loss_vgg'], help="names of losses used in training") + parser.add_argument("--loss_weights", nargs="*", type=float, default=[1.0], help="weights assigned to losses in the order") + parser.add_argument("--metric_names", nargs="*", type=str, default=['mean_absolute_error'], help="names of metrics to be logged") + parser.add_argument("--metric_weights", nargs="*", type=float, default=[1.0], help="weights assigned to metrics in the order") + parser.add_argument("--use_discriminator", action='store_true', help="choose if to use discriminator network") + parser.add_argument("--use_label_maps", action='store_true', help="choose if to use label maps for discriminator") + # dataset options + parser.add_argument("--dataset_type", type=str, help="options: CustomDataset", default='CustomDataset') + parser.add_argument("--input_train_root_dir", type=str, help="Path to training input images", default='./data/input_images_train') + parser.add_argument("--flow_train_root_dir", type=str, help="Path to training optical flows", default='./data/opticalflow_train') + parser.add_argument("--output_train_root_dir", type=str, help="Path to training output images", default='./data/output_images_train') + parser.add_argument("--label_train_root_dir", type=str, help="Path to training label images", default='./data/label_images_train') + parser.add_argument("--input_val_root_dir", type=str, help="Path to val input images", default='./data/input_images_val') + parser.add_argument("--flow_val_root_dir", type=str, help="Path to val optical flows", default='./data/opticalflow_val') + parser.add_argument("--output_val_root_dir", type=str, help="Path to val output images", default='./data/output_images_val') + parser.add_argument("--label_val_root_dir", type=str, help="Path to val label images", default='./data/label_images_val') + parser.add_argument("--width", type=int, default=640, help="width") + parser.add_argument("--height", type=int, default=360, help="height") + parser.add_argument("--optical_steps", type=int, default=1, help="Number of stept to go back with optical flow") + # logging/saving options + parser.add_argument("--checkpoints_dir", type=str, help='specify the directory to save the model', default='./chkpts/experiment/') + parser.add_argument("--save_every", type=int, help='save model every # epochs', default=5) + parser.add_argument("--log_every", type=int, help='save logs every # batches', default=100) + parser.add_argument("--save_val_images", action='store_true', help='choose if you want to save validation images') + parser.add_argument("--wandb_dir", type=str, help="directory where to save wandb data locally", default='./wandb') + return parser + + +def save_model(network, epoch, optimizer, loss, best_val_loss, args, discriminator=None, optimizer_disc=None, best_model=False): + print('Saving model epoch: ', epoch) + if not Path(args.checkpoints_dir).exists(): + Path(args.checkpoints_dir).mkdir(exist_ok=True) + + save_dir = Path(args.checkpoints_dir, 'latest_GAN.pt') + + if args.use_discriminator: + torch.save({'epoch': epoch, + 'model_state_dict': network.state_dict(), + 'discriminator_state_dict': discriminator.state_dict(), + 'optimizer_state_dict': optimizer.state_dict(), + 'optimizer_disc_state_dict': optimizer_disc.state_dict(), + 'loss': loss, + 'best_val_loss': best_val_loss}, + save_dir) + else: + torch.save({'epoch': epoch, + 'model_state_dict': network.state_dict(), + 'optimizer_state_dict': optimizer.state_dict(), + 'loss': loss, + 'best_val_loss': best_val_loss}, + save_dir) + + return save_dir + + +def print_progress(loss, best_loss = None, mode='train'): + print(mode + ' loss: ', loss) + if best_loss: + print('best ' + mode + ' loss: ', best_loss) + + +def set_requires_grad(network, requires_grad_flag): + for param in network.parameters(): + param.requires_grad = requires_grad_flag + + +def init_weights(net, init_type='normal', init_gain=0.02): + """Initialize network weights. + Parameters: + net (network) -- network to be initialized + init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal + init_gain (float) -- scaling factor for normal, xavier and orthogonal. + We use 'normal' in the original pix2pix and CycleGAN paper. But xavier and kaiming might + work better for some applications. Feel free to try yourself. + """ + def init_func(m): # define the initialization function + classname = m.__class__.__name__ + if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1): + if init_type == 'normal': + nn.init.normal_(m.weight.data, 0.0, init_gain) + elif init_type == 'xavier': + nn.init.xavier_normal_(m.weight.data, gain=init_gain) + elif init_type == 'kaiming': + nn.init.kaiming_normal_(m.weight.data, a=0, mode='fan_in') + elif init_type == 'orthogonal': + nn.init.orthogonal_(m.weight.data, gain=init_gain) + else: + raise NotImplementedError('initialization method [%s] is not implemented' % init_type) + if hasattr(m, 'bias') and m.bias is not None: + nn.init.constant_(m.bias.data, 0.0) + elif classname.find('BatchNorm2d') != -1: # BatchNorm Layer's weight is not a matrix; only normal distribution applies. + nn.init.normal_(m.weight.data, 1.0, init_gain) + nn.init.constant_(m.bias.data, 0.0) + + print('initialize network with %s' % init_type) + net.apply(init_func) + + +if __name__=='__main__': + + parser = config_parser() + args = parser.parse_args() + + # check device + device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + print("Running code on", device) + + # prepare data + train_loader = build_dataloader(args, mode='train', shuffle=False) + + # build the network + network = UNet(args).to(device) + init_weights(network) + + # make the networks multi-gpu available + if torch.cuda.device_count() > 1: + print("There are", torch.cuda.device_count(), "gpus available.") + network = nn.DataParallel(network) + + # Build loss function + loss_fn = Loss(args, device).to(device) + # loss_fn.eval() # make sure networks used in calculating loss (e.g. VGG) are in eval mode (because of batch normalization and dropout layers) + + #Build metrics functions + metrics_fn = Metrics(args, device) + metrics_fn.reset() + + # Build an optimizer + if args.optimizer == 'adam': + optimizer = torch.optim.Adam(network.parameters(), lr=args.lr, betas=(0.5, 0.999)) + elif args.optimizer == 'sgd': + optimizer = torch.optim.SGD(network.parameters(), lr=args.lr) + else: + raise NotImplementedError() + + # Learning rate decay + lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=args.lr_decay_steps, gamma=0.1) + + # Print number of parameters in the generator network + pytorch_generator_params = sum(p.numel() for p in network.parameters()) + print('Number of parameters in the generator network: ', pytorch_generator_params) + + # build discriminator if it is used + if args.use_discriminator: + discriminator = NLayerDiscriminator(input_nc=args.num_input_channels+args.num_output_channels, n_layers=3).to(device) + init_weights(discriminator) + set_requires_grad(discriminator, False) + loss_gan_fn = GANLoss().to(device) + if args.optimizer == 'adam': + optimizer_discriminator = torch.optim.Adam(discriminator.parameters(), lr=args.lr_disc, betas=(0.5, 0.999)) + elif args.optimizer == 'sgd': + optimizer_discriminator = torch.optim.SGD(discriminator.parameters(), lr=args.lr_disc) + pytorch_discriminator_params = sum(p.numel() for p in discriminator.parameters()) + print('Number of parameters in the discriminator network: ', pytorch_discriminator_params) + else: + discriminator = None + optimizer_discriminator = None + + best_val_loss = None + + # initialize wandb + wandb.init(project='GAN-video-synthesis', config=args, tags=["demo"], job_type='train', dir=args.wandb_dir) + args = wandb.config + wandb.watch(network) + + # Start the training process + start_train_disc = -1 + step = 0 + for epoch in range(args.continue_from_epoch, args.num_epochs): + print('Training epoch: ', epoch) + + network.train() + if args.use_discriminator: + discriminator.train() + + for batch_idx, data in enumerate(train_loader): + + inputs = data["input_image"].to(device) + prevs = data["prev_images"] + flows = data["flows"] + labels = data["output_image"].to(device) + + if args.use_label_maps: + label_maps = data["label_image"].to(device) + else: + label_maps = None + + prediction = network(inputs) + + # Loss on: generated VS target + loss_perceptual = loss_fn(prediction, labels) + + # Loss on: warped generated with opticalflow VS previous target frame + loss_temporal = torch.zeros_like(loss_perceptual) + + for flow, prev in zip(flows, prevs): + flow = flow.to(device) + prev = prev.to(device) + warped_prediction = backwarp(prediction, flow) + loss_temporal += loss_fn(warped_prediction, prev) + + if args.use_discriminator and epoch>start_train_disc: + + set_requires_grad(discriminator, True) + + real_pair = torch.cat((inputs, labels), 1) + pred_real = discriminator(real_pair, label_maps) + loss_real = loss_gan_fn(pred_real, real_image_flag=True) + + fake_pair = torch.cat((inputs, prediction.detach()), 1) + pred_fake = discriminator(fake_pair, label_maps) + loss_fake = loss_gan_fn(pred_fake, real_image_flag=False) + + loss_discriminator = (loss_fake + loss_real)*0.5 + + optimizer_discriminator.zero_grad() + loss_discriminator.backward() + optimizer_discriminator.step() + + set_requires_grad(discriminator, False) + + fake_pair = torch.cat((inputs, prediction), 1) + pred_fake = discriminator(fake_pair, label_maps) + + loss_gan = loss_gan_fn(pred_fake, real_image_flag=True) + + loss = loss_perceptual + loss_temporal + loss_gan + else: + loss = loss_perceptual + loss_temporal + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + step = step + 1 + + if batch_idx % args.log_every == 0: + if args.save_val_images: + image_concat = create_image_pair([inputs.detach(), prediction.detach(), labels.detach(), warped_prediction.detach(), prev.detach()]) + wandb_images = [wandb.Image(image) for image in image_concat] + if args.use_discriminator and epoch>start_train_disc: + wandb.log({'train loss perceptual': loss_perceptual, + 'train loss temporal': loss_temporal, + 'train loss gan': loss_gan, + 'discriminator train loss': loss_discriminator.detach(), + 'train images predictions': wandb_images, + 'discriminator loss real': loss_real, + 'discriminator loss fake': loss_fake}, step=step) + wandb.log({'train_loss': loss.detach()}, step=step) + else: + wandb.log({'train images predictions': wandb_images}, step=step) + wandb.log({'train loss perceptual': loss_perceptual, + 'train loss temporal': loss_temporal}, step=step) + wandb.log({'train_loss': loss.detach()}, step=step) + else: + if args.use_discriminator and epoch>start_train_disc: + wandb.log({'train loss perceptual': loss_perceptual, + 'train loss temporal': loss_temporal, + 'train loss gan': loss_gan, + 'discriminator train loss': loss_discriminator.detach(), + 'discriminator loss real': loss_real, + 'discriminator loss fake': loss_fake}, step=step) + wandb.log({'train_loss': loss.detach()}, step=step) + else: + wandb.log({'train loss perceptual': loss_perceptual, + 'train loss temporal': loss_temporal}, step=step) + wandb.log({'train_loss': loss.detach()}, step=step) + + if step == args.num_steps: + print(f'Breaking at {args.num_steps} steps.') + save_dir = save_model(network, epoch, optimizer, loss, best_val_loss, args, discriminator, optimizer_discriminator, best_model=False) + model_artifact = wandb.Artifact('last_model_train', type='model') + model_artifact.add_file(save_dir) + wandb.log_artifact(model_artifact) + exit() + + if epoch % args.save_every == 0: + save_dir = save_model(network, epoch, optimizer, loss, best_val_loss, args, discriminator, optimizer_discriminator, best_model=False) + model_artifact = wandb.Artifact('last_model_train', type='model') + model_artifact.add_file(save_dir) + wandb.log_artifact(model_artifact) + + lr_scheduler.step() + + save_dir = save_model(network, epoch, optimizer, loss, best_val_loss, args, discriminator, optimizer_discriminator, best_model=False) + model_artifact = wandb.Artifact('last_model_train', type='model') + model_artifact.add_file(save_dir) + wandb.log_artifact(model_artifact) diff --git a/motion-gan-pipeline/ImageToImage/train_temporal.py b/motion-gan-pipeline/ImageToImage/train_temporal.py new file mode 100644 index 0000000..0ec6f51 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/train_temporal.py @@ -0,0 +1,486 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from ast import arg +from email.policy import default +import os +import configargparse +import torch +import torch.nn as nn +from pathlib import Path +import wandb +from datasets.base import build_dataloader +from models.unet import UNet +from models.patch_gan_discriminator import NLayerDiscriminator +from models.video_discriminator import Discriminator_3D +from losses.loss import Loss +from losses.gan_loss import GANLoss +from utils.utils import create_image_pair +from metrics.metrics import Metrics +import shutil +import datetime +from tqdm import tqdm +import warnings +warnings.filterwarnings("ignore") + +def print_options(opt, parser): + """Print and save options + + It will print both current options and default values(if different). + It will save options into a text file / [checkpoints_dir] / opt.txt + """ + message = '' + message += '----------------- Options ---------------\n' + for k, v in sorted(vars(opt).items()): + comment = '' + default = parser.get_default(k) + if v != default: + comment = '\t[default: %s]' % str(default) + message += '{:>25}: {:<30}{}\n'.format(str(k), str(v), comment) + message += '----------------- End -------------------' + print(message) + + + file_name = os.path.join(opt.checkpoints_dir, 'GAN_config.txt') + with open(file_name, 'wt') as opt_file: + opt_file.write(message) + opt_file.write('\n') + + +def config_parser(): + parser = configargparse.ArgumentParser() + # config file + parser.add_argument('-c', '--my-config', required=True, is_config_file=True, help='config file path') + # training and model options + parser.add_argument("--optimizer", type=str, help='choose optimizer type', default='adam') + parser.add_argument("--num_epochs", type=int, default=61, help="number of epochs of training") + parser.add_argument("--num_steps", type=int, default=25000, help="number of steps of training") + parser.add_argument("--continue_from_epoch", type=int, default=0, help="Continue training from epoch (default=0)") + parser.add_argument("--batch_size", type=int, default=16, help="size of the batches") + parser.add_argument("--lr", type=float, default=0.001, help="learning rate") # 0.00001 + parser.add_argument("--lr_decay_steps", type=int, default=10, help="learning rate decay steps in epochs") + parser.add_argument("--lr_disc", type=float, default=0.001, help="discriminator learning rate") + parser.add_argument("--num_workers", type=int, default=0, help="number of workers to use during batch generation") + parser.add_argument("--num_input_channels", type=int, default=3, help="number of input image channels") + parser.add_argument("--num_output_channels", type=int, default=3, help="number of output image channels") + parser.add_argument("--loss_names", nargs="*", type=str, default=['perceptual_loss_vgg'], help="names of losses used in training") + parser.add_argument("--loss_weights", nargs="*", type=float, default=[1.0], help="weights assigned to losses in the order") + parser.add_argument("--metric_names", nargs="*", type=str, default=['mean_absolute_error'], help="names of metrics to be logged") + parser.add_argument("--metric_weights", nargs="*", type=float, default=[1.0], help="weights assigned to metrics in the order") + parser.add_argument("--use_discriminator", action='store_true', help="choose if to use discriminator network") + parser.add_argument("--use_video_discriminator", action='store_true', help="choose if to use video discriminator network") + parser.add_argument("--steps_train_video_discr", type=int, default=1, help='frequency of video discriminator training') + parser.add_argument("--loss_gan_video_weight", type=float, default=1, help='weight assigned to video discriminator loss') + parser.add_argument("--use_label_maps", action='store_true', help="choose if to use label maps for discriminator") + # dataset options + parser.add_argument("--dataset_type", type=str, help="options: CustomDataset", default='CustomDataset') + parser.add_argument("--input_train_root_dir", type=str, help="Path to training input images", default='./data/input_images_train') + parser.add_argument("--output_train_root_dir", type=str, help="Path to training output images", default='./data/output_images_train') + parser.add_argument("--label_train_root_dir", type=str, help="Path to training label images", default='./data/label_images_train') + parser.add_argument("--input_val_root_dir", type=str, help="Path to val input images", default=None) + parser.add_argument("--output_val_root_dir", type=str, help="Path to val output images", default=None) + parser.add_argument("--label_val_root_dir", type=str, help="Path to val label images", default=None) + parser.add_argument("--width", type=int, default=640, help="width") + parser.add_argument("--height", type=int, default=360, help="height") + # logging/saving options + parser.add_argument("--checkpoints_dir", type=str, help='specify the directory to save the model', default='./chkpts/experiment/') + parser.add_argument("--save_every", type=int, help='save model every # epochs', default=5) + parser.add_argument("--log_every", type=int, help='save logs every # batches', default=100) + parser.add_argument("--val_every", type=int, help='validate model every # epoch', default=0) + parser.add_argument("--save_val_images", action='store_true', help='choose if you want to save validation images') + parser.add_argument("--save_train_images", action='store_true', help='choose if you want to save train images') + parser.add_argument("--wandb_dir", type=str, help="directory where to save wandb data locally", default='./wandb') + parser.add_argument("--skip_log", action='store_true', help='choose if you want to stop wandb monitoring') + return parser + + +def save_model(network, epoch, optimizer, + loss, best_val_loss, args, + discriminator=None, optimizer_disc=None, + video_discriminator=None, optimizer_video_discriminator=None, + best_model=False): + + print('Saving model epoch: ', epoch) + if not Path(args.checkpoints_dir).exists(): + Path(args.checkpoints_dir).mkdir(exist_ok=True) + + if type(epoch) == str: + save_dir = Path(args.checkpoints_dir, epoch + '.pt') + else: + save_dir = Path(args.checkpoints_dir, 'GAN_epoch_%03d.pt' % epoch) + + if args.use_discriminator: + torch.save({'epoch': epoch, + 'model_state_dict': network.state_dict(), + 'discriminator_state_dict': discriminator.state_dict(), + 'optimizer_state_dict': optimizer.state_dict(), + 'optimizer_disc_state_dict': optimizer_disc.state_dict(), + 'loss': loss, + 'best_val_loss': best_val_loss}, + save_dir) + + elif args.use_video_discriminator: + torch.save({'epoch': epoch, + 'model_state_dict': network.state_dict(), + 'optimizer_state_dict': optimizer.state_dict(), + 'video_discriminator_state_dict': video_discriminator.state_dict(), + 'video_optimizer_disc_state_dict': optimizer_video_discriminator.state_dict(), + 'loss': loss, + 'best_val_loss': best_val_loss}, + save_dir) + + else: + torch.save({'epoch': epoch, + 'model_state_dict': network.state_dict(), + 'optimizer_state_dict': optimizer.state_dict(), + 'loss': loss, + 'best_val_loss': best_val_loss}, + save_dir) + + return save_dir + + +def print_progress(loss, best_loss = None, mode='train'): + print(mode + ' loss: ', loss) + if best_loss: + print('best ' + mode + ' loss: ', best_loss) + + +def set_requires_grad(network, requires_grad_flag): + for param in network.parameters(): + param.requires_grad = requires_grad_flag + + +def init_weights(net, init_type='normal', init_gain=0.02): + """Initialize network weights. + Parameters: + net (network) -- network to be initialized + init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal + init_gain (float) -- scaling factor for normal, xavier and orthogonal. + We use 'normal' in the original pix2pix and CycleGAN paper. But xavier and kaiming might + work better for some applications. Feel free to try yourself. + """ + def init_func(m): # define the initialization function + classname = m.__class__.__name__ + if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1): + if init_type == 'normal': + nn.init.normal_(m.weight.data, 0.0, init_gain) + elif init_type == 'xavier': + nn.init.xavier_normal_(m.weight.data, gain=init_gain) + elif init_type == 'kaiming': + nn.init.kaiming_normal_(m.weight.data, a=0, mode='fan_in') + elif init_type == 'orthogonal': + nn.init.orthogonal_(m.weight.data, gain=init_gain) + else: + raise NotImplementedError('initialization method [%s] is not implemented' % init_type) + if hasattr(m, 'bias') and m.bias is not None: + nn.init.constant_(m.bias.data, 0.0) + elif classname.find('BatchNorm2d') != -1: # BatchNorm Layer's weight is not a matrix; only normal distribution applies. + nn.init.normal_(m.weight.data, 1.0, init_gain) + nn.init.constant_(m.bias.data, 0.0) + + print('initialize network with %s' % init_type) + net.apply(init_func) + + +if __name__=='__main__': + + parser = config_parser() + args = parser.parse_args() + + # TODO: remove after sweep + # make new checkpoint with time + # ct = datetime.datetime.now() + # args.checkpoints_dir = args.checkpoints_dir + '_' + '%02d'%ct.day + '%02d'%ct.month + '%04d'%ct.year + '_' + '%02d'%ct.hour + '%02d'%ct.minute + '%02d'%ct.second + os.makedirs(args.checkpoints_dir, exist_ok=True) + + # copy config in checkpoint folder + print_options(args, parser) + + # check device + device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + print("Running code on", device) + + # prepare data + train_loader = build_dataloader(args, mode='train', shuffle=False) + if args.val_every != 0: + val_loader = build_dataloader(args, mode='val', shuffle=False) + else: + val_loader = None + + # build the network + network = UNet(args).to(device) + init_weights(network) + + # make the networks multi-gpu available + if torch.cuda.device_count() > 1: + print("There are", torch.cuda.device_count(), "gpus available.") + network = nn.DataParallel(network) + + # Build loss function + loss_fn = Loss(args, device).to(device) + loss_fn.eval() # make sure networks used in calculating loss (e.g. VGG) are in eval mode (because of batch normalization and dropout layers) + + # Build metrics functions + metrics_fn = Metrics(args, device) + metrics_fn.reset() + + # Build an optimizer + if args.optimizer == 'adam': + optimizer = torch.optim.Adam(network.parameters(), lr=args.lr, betas=(0.5, 0.999)) + elif args.optimizer == 'sgd': + optimizer = torch.optim.SGD(network.parameters(), lr=args.lr) + else: + raise NotImplementedError() + + # Learning rate decay + lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=args.lr_decay_steps, gamma=0.1) + + # Print number of parameters in the generator network + pytorch_generator_params = sum(p.numel() for p in network.parameters()) + print('Number of parameters in the generator network: ', pytorch_generator_params) + + # build discriminator if it is used + if args.use_discriminator: + discriminator = NLayerDiscriminator(input_nc=args.num_input_channels+args.num_output_channels, n_layers=3).to(device) + init_weights(discriminator) + set_requires_grad(discriminator, False) + loss_gan_fn = GANLoss().to(device) + if args.optimizer == 'adam': + optimizer_discriminator = torch.optim.Adam(discriminator.parameters(), lr=args.lr_disc, betas=(0.5, 0.999)) + elif args.optimizer == 'sgd': + optimizer_discriminator = torch.optim.SGD(discriminator.parameters(), lr=args.lr_disc) + pytorch_discriminator_params = sum(p.numel() for p in discriminator.parameters()) + print('Number of parameters in the discriminator network: ', pytorch_discriminator_params) + # Learning rate decay + lr_scheduler_discr = torch.optim.lr_scheduler.StepLR(optimizer_discriminator, step_size=args.lr_decay_steps, gamma=0.1) + + else: + discriminator = None + optimizer_discriminator = None + lr_scheduler_discr = None + + # build video discriminator if it is used + if args.use_video_discriminator: + video_discriminator = Discriminator_3D(d_conv_dim=96).to(device) # TODO: test + init_weights(video_discriminator) + set_requires_grad(video_discriminator, False) + loss_gan_fn = GANLoss().to(device) + if args.optimizer == 'adam': + optimizer_video_discriminator = torch.optim.Adam(video_discriminator.parameters(), lr=args.lr_disc, betas=(0.5, 0.999)) + elif args.optimizer == 'sgd': + optimizer_video_discriminator = torch.optim.SGD(video_discriminator.parameters(), lr=args.lr_disc) + pytorch_video_discriminator_params = sum(p.numel() for p in video_discriminator.parameters()) + print('Number of parameters in the video discriminator network: ', pytorch_video_discriminator_params) + # Learning rate decay + lr_scheduler_video_discr = torch.optim.lr_scheduler.StepLR(optimizer_video_discriminator, step_size=args.lr_decay_steps, gamma=1) # gamma=1 for no updates in lr + + else: + video_discriminator = None + optimizer_video_discriminator = None + lr_scheduler_video_discr = None + + best_val_loss = None + + # initialize wandb + if not args.skip_log: + wandb.init(project='GAN-video-synthesis', config=args, tags=["demo"], job_type='train', dir=args.wandb_dir) + args = wandb.config + wandb.watch(network) + + # Start the training process + start_train_disc = -1 + step = 0 + for epoch in tqdm(range(args.continue_from_epoch, args.num_epochs), desc='Epoch: '): + # print('Training epoch: ', epoch) + network.train() + if args.use_discriminator: + discriminator.train() + + # Training loop + for batch_idx, data in enumerate(train_loader): + + inputs = data["input_image"].to(device) + labels = data["output_image"].to(device) + + if args.use_label_maps: + label_maps = data["label_image"].to(device) + else: + label_maps = None + + prediction = network(inputs) + + loss_perceptual = loss_fn(prediction, labels) + + loss = loss_perceptual + + if args.use_discriminator and epoch>start_train_disc: + + set_requires_grad(discriminator, True) + + real_pair = torch.cat((inputs, labels), 1) + pred_real = discriminator(real_pair, label_maps) + loss_real = loss_gan_fn(pred_real, real_image_flag=True) + + fake_pair = torch.cat((inputs, prediction.detach()), 1) + pred_fake = discriminator(fake_pair, label_maps) + loss_fake = loss_gan_fn(pred_fake, real_image_flag=False) + + loss_discriminator = (loss_fake + loss_real)*0.5 + + optimizer_discriminator.zero_grad() + loss_discriminator.backward() + optimizer_discriminator.step() + + set_requires_grad(discriminator, False) + + fake_pair = torch.cat((inputs, prediction), 1) + pred_fake = discriminator(fake_pair, label_maps) + + loss_gan = loss_gan_fn(pred_fake, real_image_flag=True) + loss += loss_gan + + if args.use_video_discriminator and epoch>start_train_disc: + + # Train discriminator + if (batch_idx % args.steps_train_video_discr) == 0: + set_requires_grad(video_discriminator, True) + + pred_real = video_discriminator(labels.unsqueeze(0)) + loss_video_real = loss_gan_fn(pred_real, real_image_flag=True) + + pred_fake = video_discriminator(prediction.unsqueeze(0).detach()) + loss_video_fake = loss_gan_fn(pred_fake, real_image_flag=False) + + loss_video_discriminator = (loss_video_real + loss_video_fake)*0.5 + + # print(loss_video_discriminator.item(), loss_video_fake.item(), loss_video_real.item()) + + optimizer_video_discriminator.zero_grad() + loss_video_discriminator.backward() + optimizer_video_discriminator.step() + + + # Train generator + set_requires_grad(video_discriminator, False) + + pred_fake = video_discriminator(prediction.unsqueeze(0).detach()) + loss_gan_video = loss_gan_fn(pred_fake, real_image_flag=True) + # print('Perceptual loss: ', loss_perceptual.item()) + # print('GAN video loss: ', loss_gan_video.item()) + + loss += loss_gan_video * args.loss_gan_video_weight + + optimizer.zero_grad() + loss.backward() + optimizer.step() + + step = step + 1 + + if not args.skip_log and batch_idx % args.log_every == 0: + if args.save_train_images: + image_concat = create_image_pair([inputs.detach(), prediction.detach(), labels.detach()]) + wandb_images = [wandb.Image(image) for image in image_concat] + wandb.log({'train images predictions': wandb_images}, step=step) + + wandb.log({'train_loss': loss.detach()}, step=step) + wandb.log({'train loss perceptual': loss_perceptual.detach()}, step=step) + + if args.use_discriminator and epoch>start_train_disc: + wandb.log({ 'train loss gan': loss_gan, + 'discriminator train loss': loss_discriminator.detach(), + 'discriminator loss real': loss_real, + 'discriminator loss fake': loss_fake}, step=step) + + if args.use_video_discriminator and epoch>start_train_disc: + try: + wandb.log({ 'train loss gan video': loss_gan_video, + 'video discriminator train loss': loss_video_discriminator.detach(), + 'video discriminator loss real': loss_video_real, + 'video discriminator loss fake': loss_video_fake}, step=step) + except NameError: + wandb.log({ 'train loss gan video': loss_gan_video}, step=step) + + if step == args.num_steps: + print(f'Breaking at {args.num_steps} steps.') + save_dir = save_model(network, 'latest_GAN_model', optimizer, loss, best_val_loss, args, discriminator, optimizer_discriminator, video_discriminator, optimizer_video_discriminator, best_model=False) + # model_artifact = wandb.Artifact('last_model_train', type='model') + # model_artifact.add_file(save_dir) + # wandb.log_artifact(model_artifact) + exit() + + # Save model + if epoch % args.save_every == 0 and epoch > 0: + save_dir = save_model(network, epoch, optimizer, loss, best_val_loss, args, discriminator, optimizer_discriminator, video_discriminator, optimizer_video_discriminator, best_model=False) + # model_artifact = wandb.Artifact('last_model_train', type='model') + # model_artifact.add_file(save_dir) + # wandb.log_artifact(model_artifact) + + # Update learning rates + lr_scheduler.step() + if args.use_discriminator: + lr_scheduler_discr.step() + if args.use_video_discriminator: + lr_scheduler_video_discr.step() + + # Validation + if args.val_every != 0 and epoch % args.val_every == 0 and epoch > 0: + print('Validating epoch: ', epoch) + val_loss = 0 + count = 0 + metrics_fn.reset() + + with torch.no_grad(): + network.eval() + for batch_idx, data in enumerate(val_loader): + + inputs = data["input_image"].to(device) + labels = data["output_image"].to(device) + + prediction = network(inputs) + + # compute metrics + metrics_fn.update(prediction, labels) + + # compute losses + loss_perceptual_val = loss_fn(prediction, labels) + loss_val = loss_perceptual_val + + if args.use_video_discriminator and epoch>start_train_disc: + + pred_real_val = video_discriminator(labels.unsqueeze(0)) + loss_video_real_val = loss_gan_fn(pred_real_val, real_image_flag=True) + + pred_fake_val = video_discriminator(prediction.unsqueeze(0)) + loss_video_fake_val = loss_gan_fn(pred_fake_val, real_image_flag=False) + + loss_video_discriminator_val = (loss_video_real_val + loss_video_fake_val)*0.5 + + loss_gan_video_val = loss_gan_fn(pred_fake_val, real_image_flag=True) + + loss_val += loss_gan_video_val * args.loss_gan_video_weight + + val_loss += loss_val.detach() + count += 1 + + avg_val_loss = val_loss/count + + metrics_log, metrics_combined = metrics_fn.compute() + + if not args.skip_log: + if args.save_val_images: + image_concat = create_image_pair([inputs.detach(), prediction.detach(), labels.detach()]) + wandb_images = [wandb.Image(image) for image in image_concat] + wandb.log({'val loss': avg_val_loss, 'validation images predictions': wandb_images}, step=step) + wandb.log(metrics_log, step=step) + wandb.log({'metrics_combined': metrics_combined}, step=step) + + else: + wandb.log({'val loss': avg_val_loss}, step=step) + wandb.log(metrics_log, step=step) + wandb.log({'metrics_combined': metrics_combined}, step=step) + + save_dir = save_model(network, 'latest_GAN_model', optimizer, loss, best_val_loss, args, discriminator, optimizer_discriminator, video_discriminator, optimizer_video_discriminator, best_model=False) + # model_artifact = wandb.Artifact('last_model_train', type='model') + # model_artifact.add_file(save_dir) + # wandb.log_artifact(model_artifact) diff --git a/motion-gan-pipeline/ImageToImage/utils/__init__.py b/motion-gan-pipeline/ImageToImage/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/ImageToImage/utils/optical_flow.py b/motion-gan-pipeline/ImageToImage/utils/optical_flow.py new file mode 100644 index 0000000..93628a6 --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/utils/optical_flow.py @@ -0,0 +1,18 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import torch + +backwarp_tenGrid = {} +def backwarp(tenInput, tenFlow): + if str(tenFlow.shape) not in backwarp_tenGrid: + tenHor = torch.linspace(-1.0 + (1.0 / tenFlow.shape[3]), 1.0 - (1.0 / tenFlow.shape[3]), tenFlow.shape[3]).view(1, 1, 1, -1).repeat(1, 1, tenFlow.shape[2], 1) + tenVer = torch.linspace(-1.0 + (1.0 / tenFlow.shape[2]), 1.0 - (1.0 / tenFlow.shape[2]), tenFlow.shape[2]).view(1, 1, -1, 1).repeat(1, 1, 1, tenFlow.shape[3]) + + backwarp_tenGrid[str(tenFlow.shape)] = torch.cat([ tenHor, tenVer ], 1).to(tenInput.get_device()) + # end + + tenFlow = torch.cat([ tenFlow[:, 0:1, :, :] / ((tenInput.shape[3] - 1.0) / 2.0), tenFlow[:, 1:2, :, :] / ((tenInput.shape[2] - 1.0) / 2.0) ], 1) + + return torch.nn.functional.grid_sample(input=tenInput, grid=(backwarp_tenGrid[str(tenFlow.shape)] + tenFlow).permute(0, 2, 3, 1), mode='bilinear', padding_mode='border', align_corners=False) +# end diff --git a/motion-gan-pipeline/ImageToImage/utils/utils.py b/motion-gan-pipeline/ImageToImage/utils/utils.py new file mode 100644 index 0000000..b0f914c --- /dev/null +++ b/motion-gan-pipeline/ImageToImage/utils/utils.py @@ -0,0 +1,31 @@ +import PIL +import torch +from torchvision import transforms +import wandb +import configargparse +from pathlib import Path +import os + +def create_image_pair(images_to_concat): + + num_images = images_to_concat[0].shape[0] + images = [] + for image_num in range(num_images): + image = images_to_concat[0][image_num,::] + if image.shape[0] == 1: + image = torch.cat((image,)*3, axis=0) + for img_num, img in enumerate(images_to_concat): + if not img_num==0: + image_to_concat = img[image_num,::] + if image_to_concat.shape[0] == 1: + image_to_concat = torch.cat((image_to_concat,)*3, axis=-1) + image = torch.cat((image, image_to_concat), dim=2) + image_transform = transforms.ToPILImage() + image = image_transform(image) + images.append(image) + return images + + +def save_image_list(image_list, save_path, names): + for image, name in zip(image_list, names): + image.save(save_path+name) diff --git a/motion-gan-pipeline/LICENSE b/motion-gan-pipeline/LICENSE new file mode 100644 index 0000000..352e419 --- /dev/null +++ b/motion-gan-pipeline/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2023 Media Technology Center, ETH Zurich + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/motion-gan-pipeline/README.md b/motion-gan-pipeline/README.md new file mode 100644 index 0000000..4de7d08 --- /dev/null +++ b/motion-gan-pipeline/README.md @@ -0,0 +1,278 @@ +# Audio-Driven Video Synthesis + +This repository contains the new end-to-end "Audio Motion GAN pipeline" for the Audio-driven Video Synthesis project. + +## Requirements + + + +Please clone this repository with the following commands in order to download all the related submodules: +```bash +git clone https://gitlab.ethz.ch/mtc/video-synthesis/motion-gan-pipeline +cd motion-gan-pipeline +git submodule init +git submodule update +export PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python +``` + +Create a [conda](https://www.anaconda.com) environment: +```bash +conda create -n deepspeech python=3.7 +``` + +To install the requirements follow these instructions: +```bash +conda activate deepspeech + +conda install pysoundfile -c conda-forge +# Install Pytorch and cudatoolkit +pip install torch==1.9.1+cu111 torchvision==0.10.1+cu111 torchaudio==0.9.1 -f https://download.pytorch.org/whl/torch_stable.html +# Install the rest of the pip requirements +pip install -r requirements_deepspeech.txt + +conda deactivate +``` + +Create a [conda](https://www.anaconda.com) environment: +```bash +conda create -n pyenv python=3.9 +``` + +To install the requirements follow these instructions: +```bash +conda activate pyenv + +conda install pysoundfile -c conda-forge +# Install Pytorch and cudatoolkit +conda install pytorch=1.13.0 torchvision pytorch-cuda=11.6 -c pytorch -c nvidia +# Install ffmpeg for video editing +conda install x264=='1!152.20180717' ffmpeg=4.0.2 -c conda-forge +conda install -c fvcore -c iopath -c conda-forge fvcore iopath +# Install pytorch3d +conda install pytorch3d -c pytorch3d +# Install the rest of the pip requirements +pip install -r requirements.txt + +# Set up enhancements tool +cd GFPGAN +python setup.py develop +cd .. + +conda deactivate +``` + +## Goal + +This project is designed to generate realistic videos of human avatars. Given an audio recording and a choosen video of a target person, the code will produce a new video of that person pronouncing the given audio. + +The produced video is fully artificial: everything from the lip movement, to the head movement, to the singluar frames of the video is completely generated. \ +When an avatar is trained once, it can be used multiple times with any given audio source. + + + +[![IMAGE_ALT](https://img.youtube.com/vi/7zK_Wmpxrd0/0.jpg)](https://youtu.be/7zK_Wmpxrd0) + +## Model Structure + +You can observe the pipeline structure in the following image. + +drawing + + \ + + +- Audio2Expressions \ + The Audio2Expressions model is a network trained to generate FLAME facial expressions from the extracted audio DeepSpeech features. The Audio2Expressions model is inspired by [Neural Voice Puppetry](https://github.com/miu200521358/NeuralVoicePuppetryMMD). This network produces the corresponding mouth movements from the audio signal and is resposible for lip-sync. + +- Motion Generation \ + The Motion Generation model is a network trained to generate rotation and translation matrices from the extracted audio DeepSpeech features. This model predicts the corresponding head motion to certain audio inputs. The Motion Generation model is inspired by [Live-Speech Portraits](https://github.com/YuanxunLu/LiveSpeechPortraits). This network produces the head movements from the audio signal and is resposible for the overall head motion during the video. + +- Edges Generation \ + The Edge Generation model combines the information generated by the Audio2Expressions and the Motion Generation to render a FLAME facial mesh. Key landmarks are projected and rendered. Body landmarks (shoulders and hips) are extracted using [Mediapipe](https://github.com/google/mediapipe) during training, and predicted by a network during inference. This module prepares the input data for the Image2Image network. + +- Image2Image \ + The Image2Image model is a network trained to generate realistic images from edges information coming as input. \ + The Image2Image network transforms the images generated in the Edge Generation step to the final video frames. The image generation network is trained for both image fidelity (L1 loss, VGG loss) and temporal consistency (3D video discriminator). + +- Image Enhancement \ + Optionally the final output of the pipeline can be enhanced by using [GFPGAN](https://github.com/TencentARC/GFPGAN) and image restoration network. The enhancement only works on the facial area and produces more realistic results. + + +## Data +If you plan on using this code with the already available and pre-trained moderators, you will only have to provide the audio data. + +Please follow these instructions on data quality: +- Audio + - Provide a recording of a person speaking (audio of any duration is accepted). + - The cleaner the audio signal the better: audio with background noise will result in unmatching lip-sync. + - Avoid recording multiple people talking: the model is unable to distinguish between multiple voice signals. +- Video + - Provide a video of your desired avatar character talking. + - Minimum video duration: 3 minutes. + - Longer videos will results in longer training time. + - The background is irrelevant, it will be removed during preprocessing. + - Avoid any hand or arms movements. Having such movements will interfere with the quality of the generated frames during training. + - Provide a half-body shot recording, possibly frontal. + + + +Refer to the following image for shot instructions. + +drawing + + + +## Data structure + +Your data should be organised in the following structure: + +```bash +$DATAROOT/input_data +├── audio # Folder containing n audio files +│ ├── audio_1 +│ │ └──audio_1.wav +│ ... +│ └── audio_n +│ └──audio_n.wav +└── video # Folder containing m video files + ├── video_1 + │ └──video_1.mp4 + ... + └── video_m + └──video_m.mp4 + +``` + +## Usage +The same command can be used for both training and inference. If the trained checkpoints for the requested avatar are not found, the code will launch the training procedure. + +Use the following call to launch the code: +```bash +bash full_pipeline.sh $DATAROOT $AUDIO_NAME $VIDEO_NAME +``` +Where: +- $DATAROOT: path to the data folder. +- $AUDIO_NAME: name of the audio file (without extension). +- $VIDEO_NAME: name of the video file (without extension). This will also be the name of the Avatar. + + +If you want to launch the enhancement version please use this call instead: +```bash +bash full_pipeline_enhancement.sh $DATAROOT $AUDIO_NAME $VIDEO_NAME +``` + +## Train new Avatars +In order to train a new avatar follow this easy steps: + +1. Record a new video following the instructions above. \ + Eg: VIDEONAME = my_new_avatar +2. Move your file to your data folder inside the video subfolder. + ```bash + mv my_new_avatar.mp4 $DATAROOT/video/my_new_avatar.mp4 + ``` +3. Choose any audio file. \ + Eg: AUDIONAME = my_audio_sample + +4. Start training (and inference) with the following command: + ```bash + bash full_pipeline.sh $DATAROOT my_audio_sample my_new_avatar + ``` +5. Wait until it's finished. +6. Enjoy your new Avatar! \ + You can now use the same avatar with any other audio file. + +## Output Data +Ruinning this pipeline will generate the following data: + +```bash +$DATAROOT +├── input_data # Folder containing all input data +│ ├── audio # Folder containing audio files +│ │ └── my_audio_sample # Folder generated by running the pipeline: contains "my_audio_sample" audio processed information +│ └── video # Folder containing video files +│ └── my_new_avatar # Folder generated by running the pipeline: contains "my_new_avatar" video processed information +│ +├── checkpoints # Folder containing all checkpoints of trained avatars +│ └── my_new_avatar # Folder containing checkpoints of the "my_new_avatar" Avatar +│ ├── GAN_config.txt # GAN configuration parameters during training +│ ├── head2body.pkl # checkpoint for head position to body position regression +│ ├── head2body_test.png # result of the regression +│ ├── latest_Audio2Headpose.pkl # checkpoint for audio to head-motion finetuned network +│ ├── latest_GAN_model.pt # checkpoint for the GAN network +│ ├── logs # folder containing all training logs during GAN training +│ └── train_opt.txt # audio to head-motion configuration parameters during finetuning +│ +└── output_data # Folder containing all output data + └── my_audio_sample_to_my_new_avatar # Folder generated by running the pipeline command + ├── generated_video.mp4 # Generated video + └── generated_superes.mp4 # Generated enhanced video (only with the enhancement version) +``` + +## LICENSE + +The AMG Pipeline is free and open source! All code in this repository is licensed under: + +* [MIT](LICENSE) License. + +This pipeline relies and is inspired by the following works, and therefore refers to their individual licenses: + +- [GFPGAN](https://github.com/TencentARC/GFPGAN): [license](https://github.com/TencentARC/GFPGAN/blob/master/LICENSE), internal path `/GFPGAN`. +- [Live Speech Portraits](https://github.com/YuanxunLu/LiveSpeechPortraits): [license](https://github.com/YuanxunLu/LiveSpeechPortraits/blob/main/LICENSE), internal path `/motion-generation`. +- [Neural Voice Puppetry](https://web.archive.org/web/20201113014501/https://github.com/JustusThies/NeuralVoicePuppetry): [license](https://gitlab.ethz.ch/mtc/video-synthesis/.NeuralVoicePuppetry/-/blob/0fc75ba2edfdfd5655984f1515bfe06de880d91d/LICENSE), internal path `/preprocessing/third/Audio2ExpressionNet`. +- [DECA](https://github.com/YadiraF/DECA): [license](https://github.com/YadiraF/DECA/blob/master/LICENSE), internal path `/preprocessing/third/DECA`. +- [Robust Video Matting](https://github.com/PeterL1n/RobustVideoMatting): [license](https://github.com/PeterL1n/RobustVideoMatting/blob/master/LICENSE), internal path `/preprocessing/third/RobustVideoMatting`. +- [VOCA](https://github.com/TimoBolkart/voca): [license](https://voca.is.tue.mpg.de/license), used internally by DECA. +- [AD-NeRF](https://github.com/YudongGuo/AD-NeRF): [license](https://github.com/YudongGuo/AD-NeRF/blob/master/LICENSE), internal path `/preprocessing/face_parsing`, `/preprocessing/face_tracking` +- [DeepSpeech](https://github.com/mozilla/DeepSpeech): [license](https://github.com/mozilla/DeepSpeech/blob/master/LICENSE), used by VOCA. +- [CycleGAN and Pix2Pix](https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix): [license](https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/blob/master/LICENSE), used as skeleton for GAN training. + + +## Contact information + +Alberto Pennino: [alberto.pennino@inf.ethz.ch](alberto.pennino@inf.ethz.ch) \ No newline at end of file diff --git a/motion-gan-pipeline/face2body.py b/motion-gan-pipeline/face2body.py new file mode 100644 index 0000000..54a10de --- /dev/null +++ b/motion-gan-pipeline/face2body.py @@ -0,0 +1,174 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from genericpath import isfile +from math import floor +import os +from PIL import Image +import numpy as np +import argparse +from tqdm import tqdm +import matplotlib +matplotlib.use('agg') +import matplotlib.pyplot as plt + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from torch.utils.data import Dataset +from torch.utils.data import DataLoader +from torchvision import transforms + +class LandmarkDataset(Dataset): + def __init__(self, landmarks_dir, body_dir, im_size, split): + """ + Args: + input_root_dir (string): Directory with all the input images. + output_root_dir (string): Directory with all the output images. + transform (callable, optional): Optional transform to be applied + on a sample. + """ + self.landmarks_dir = landmarks_dir + self.body_dir = body_dir + self.w, self.h = im_size + + # set split for train test + self.split_ratio = 0.8 + self.split = split + + self.total_num = len(os.listdir(self.landmarks_dir)) + + def __len__(self): + + if self.split=='Train': + return int(floor(self.total_num ) * self.split_ratio) + + else: + return int(floor(self.total_num ) * (1 - self.split_ratio)) + + def __getitem__(self, idx): + + if self.split=='Train': + base = 0 + + else: + base = int(floor(self.total_num) * (self.split_ratio)) + + idx += base + + landmark_path = os.path.join(self.landmarks_dir, '%05d.lms' % idx) + body_path = os.path.join(self.body_dir, '%05d.npy' % idx) + + input_ldk = np.loadtxt(landmark_path)[:, :-1] + output_ldk = np.load(body_path)[:, :-2] + output_ldk = np.array([output_ldk[11], output_ldk[12], output_ldk[23], output_ldk[24]]) + + # scale + input_ldk[:, 0] = input_ldk[:, 0] / self.w + input_ldk[:, 1] = input_ldk[:, 1] / self.h + + output_ldk[:, 0] = output_ldk[:, 0] / self.w + output_ldk[:, 1] = output_ldk[:, 1] / self.h + + trans = transforms.ToTensor() + sample = { + 'input_ldk': trans(input_ldk).flatten().float(), + 'output_ldk': trans(output_ldk).flatten().float(), + } + + return sample + + +def train_regression_head2body(landmarks_dir, body_dir, checkpoint_path, im_size): + + train_dataset = LandmarkDataset(landmarks_dir, body_dir, im_size, "Train") + train_dataloader = DataLoader(train_dataset, batch_size=train_dataset.__len__()) + + test_dataset = LandmarkDataset(landmarks_dir, body_dir, im_size, "Test") + test_dataloader = DataLoader(test_dataset, batch_size=test_dataset.__len__()) + + # check device + device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + # print("Running on", device) + + # Make linear model + model = nn.Linear(77 * 2, 4 * 2).to(device) # 77 landmarks with 2 coordinates (x, y) -> 4 landmarks with 2 coordinates (x, y) + + # Optimizer + opt = torch.optim.SGD(model.parameters(), lr=0.1) + lr_scheduler = torch.optim.lr_scheduler.StepLR(opt, step_size=5, gamma=0.1) + + # Loss Function + loss_fn = F.mse_loss + + # Run optimisation + for epoch in tqdm(range(25)): + for batch in train_dataloader: + # Generate predictions + input_ldk = batch['input_ldk'].to(device) + output_ldk = batch['output_ldk'].to(device) + pred = model(input_ldk) + + # compute loss + loss = loss_fn(pred, output_ldk) + + # Perform gradient descent + loss.backward() + opt.step() + opt.zero_grad() + + lr_scheduler.step() + + if epoch % 10 == 0: + with torch.no_grad(): + total_loss = 0. + for batch in test_dataloader: + # Generate predictions + input_ldk = batch['input_ldk'].to(device) + output_ldk = batch['output_ldk'].to(device) + pred = model(input_ldk) + + # compute loss + loss = loss_fn(pred, output_ldk) + total_loss += loss + + input_ldk = input_ldk.cpu().detach().reshape((-1, 77, 2)).numpy() + output_ldk = output_ldk.cpu().detach().reshape((-1, 4, 2)).numpy() + pred = pred.cpu().detach().reshape((-1, 4, 2)).numpy() + plt.scatter(input_ldk[0, :, 0], input_ldk[0, :, 1], marker='o', c='red') + plt.scatter(output_ldk[0, :, 0], output_ldk[0, :, 1], marker='o', c='green') + plt.scatter(pred[0, :, 0], pred[0, :, 1], marker='^', c='blue') + + plt.savefig(os.path.splitext(checkpoint_path)[0] + '_test.png') + plt.close('all') + + print(f'Test Loss at epoch {epoch}: {total_loss.item()}') + + torch.save(model.state_dict(), checkpoint_path) + + +if __name__ == '__main__': + + # load args + parser = argparse.ArgumentParser() + parser.add_argument('--dataset_base', required=True) + parser.add_argument('--target_name', required=True) + parser.add_argument('--checkpoint_dir', required=True) + + inopt = parser.parse_args() + + landmarks_dir = os.path.join(inopt.dataset_base, 'debug/proj_landmarks') + body_dir = os.path.join(inopt.dataset_base, 'body_pose') + checkpoint_path = os.path.join(inopt.checkpoint_dir, inopt.target_name, 'head2body.pkl') + + + if os.path.isfile(checkpoint_path): + print('Head2body already trained!') + + else: + tmp_img = Image.open(os.path.join(inopt.dataset_base, 'frames', '00000.jpg')) + im_size = tmp_img.size + train_regression_head2body(landmarks_dir, body_dir, checkpoint_path, im_size) + + diff --git a/motion-gan-pipeline/full_pipeline.sh b/motion-gan-pipeline/full_pipeline.sh new file mode 100644 index 0000000..126d4bd --- /dev/null +++ b/motion-gan-pipeline/full_pipeline.sh @@ -0,0 +1,112 @@ +#!/bin/bash + +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +set -e +eval "$(conda shell.bash hook)" + +# SET GPU DEVICE +GPUID=0 + +# BASE='/mnt/full_pipeline' +BASE=$1 +echo "DATAROOT: $BASE" + +NAME_AUDIO_FILE=$2 +NAME_VIDEO_FILE=$3 +echo "NAME_AUDIO_FILE: $2" +echo "NAME_VIDEO_FILE: $3" + +# You can choose here what you want +FPS=25 +SAMPLERATE=16000 + +# DATA PATHS +INPUTDATAPATH=$BASE/input_data +OUTPUTDATAPATH=$BASE/output_data +FEATURESPATH=features + +###AUDIO### +NAME_AUDIOS_LIST=( $NAME_AUDIO_FILE ) + +### VIDEO ### +NAME_VIDEOS_LIST=( $NAME_VIDEO_FILE ) + +for NAME_AUDIO in "${NAME_AUDIOS_LIST[@]}" +do + for NAME_VIDEO in "${NAME_VIDEOS_LIST[@]}" + do + + # if [ ! -d "$INPUTDATAPATH/audio/$NAME_AUDIO" ]; then + # echo "Moving audio file.." + # mkdir -p $INPUTDATAPATH/audio/$NAME_AUDIO + # mv $INPUTDATAPATH/audio/$NAME_AUDIO.wav $INPUTDATAPATH/audio/$NAME_AUDIO/$NAME_AUDIO.wav + # fi + + # if [ ! -d "$INPUTDATAPATH/video/$NAME_VIDEO" ]; then + # echo "Moving video file.." + # mkdir -p $INPUTDATAPATH/video/$NAME_VIDEO + # mv $INPUTDATAPATH/video/$NAME_VIDEO.mp4 $INPUTDATAPATH/video/$NAME_VIDEO/$NAME_VIDEO.mp4 + # fi + + # Make output folder + tmp='_to_' + now="$(date +'_%d%m%y_%X')" + OUTPUTFOLDER=$OUTPUTDATAPATH/$NAME_AUDIO$tmp$NAME_VIDEO + # OUTPUTFOLDER=$OUTPUTDATAPATH/$NAME_AUDIO$tmp$NAME_VIDEO$now + echo $OUTPUTFOLDER + mkdir -p $OUTPUTFOLDER + + ##################### PREPROCESSING ##################### + echo -e '\n--------------PREPROCESSING---------------\n' + cd preprocessing + if [ ! -f "$INPUTDATAPATH/video/$NAME_VIDEO/track_params.pt" ] + then + bash process_video.sh $INPUTDATAPATH/video $NAME_VIDEO $FPS $SAMPLERATE + fi + + bash process_audio.sh $INPUTDATAPATH/audio $NAME_AUDIO $FPS $SAMPLERATE + cd .. + conda deactivate + + #################### AUDIO EXPRESSIONS ##################### + echo -e '\n--------------AUDIO EXPRESSIONS---------------\n' + conda activate pyenv + cd preprocessing + python third/Audio2ExpressionNet/get_audioexpr.py --name $NAME_AUDIO --dataset_base $INPUTDATAPATH/audio/$NAME_AUDIO --out_dir $OUTPUTFOLDER --mapping_path $INPUTDATAPATH/video/$NAME_VIDEO/mapping.npy + cd .. + + ##################### HEAD MOTION ##################### + echo -e '\n--------------HEAD MOTION---------------\n' + cd motion-generation + python transfer.py --dataroot $INPUTDATAPATH --dataset_names $NAME_AUDIO --target_name $NAME_VIDEO --out_dir $OUTPUTFOLDER --checkpoint_dir $BASE/checkpoints/ + cd .. + + ##################### HEAD TO SHOULDERS ##################### + echo -e '\n--------------HEAD TO BODY---------------\n' + python face2body.py --dataset_base $INPUTDATAPATH/video/$NAME_VIDEO --target_name $NAME_VIDEO --checkpoint_dir $BASE/checkpoints/ + + #################### EDGE MAP ##################### + echo -e '\n--------------EDGE MAP---------------\n' + cd preprocessing + python combine_edges.py --dataset_base $INPUTDATAPATH/video/$NAME_VIDEO --out_dir $OUTPUTFOLDER --target_name $NAME_VIDEO --checkpoint_dir $BASE/checkpoints/ + cd .. + + ##################### GAN INFERENCE ##################### + echo -e '\n--------------GAN INFERENCE---------------\n' + cd ImageToImage + python generate_images.py --dataroot $INPUTDATAPATH/video --video_name $NAME_VIDEO --input_test_root_dir $OUTPUTFOLDER/edges/ --out_dir $OUTPUTFOLDER/generated_frames/ --checkpoint_dir $BASE/checkpoints/ + cd .. + + ##################### POSTPROCESSING ##################### + echo -e '\n--------------POSTPROCESSING---------------\n' + python postprocessing.py --dataroot $INPUTDATAPATH --name_audio $NAME_AUDIO --out_dir $OUTPUTFOLDER --fps $FPS --sr $SAMPLERATE --clean --move_to_one_folder + + echo -e ' ================= ALL DONE ================= ' + conda deactivate + + + done + +done diff --git a/motion-gan-pipeline/full_pipeline_enhancement.sh b/motion-gan-pipeline/full_pipeline_enhancement.sh new file mode 100644 index 0000000..4a4701d --- /dev/null +++ b/motion-gan-pipeline/full_pipeline_enhancement.sh @@ -0,0 +1,118 @@ +#!/bin/bash + +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +set -e +eval "$(conda shell.bash hook)" + +set -e + +# eval "$(conda shell.bash hook)" +#conda activate adnerf + +# SET GPU DEVICE +GPUID=0 + +# BASE='/mnt/full_pipeline' +BASE=$1 +echo "DATAROOT: $BASE" + +NAME_AUDIO_FILE=$2 +NAME_VIDEO_FILE=$3 +echo "NAME_AUDIO_FILE: $2" +echo "NAME_VIDEO_FILE: $3" + +# You can choose here what you want +FPS=25 +SAMPLERATE=16000 + +# DATA PATHS +INPUTDATAPATH=$BASE/input_data +OUTPUTDATAPATH=$BASE/output_data +FEATURESPATH=features + +###AUDIO### +NAME_AUDIOS_LIST=( $NAME_AUDIO_FILE ) + +### VIDEO ### +NAME_VIDEOS_LIST=( $NAME_VIDEO_FILE ) + +for NAME_AUDIO in "${NAME_AUDIOS_LIST[@]}" +do + for NAME_VIDEO in "${NAME_VIDEOS_LIST[@]}" + do + + if [ ! -d "$INPUTDATAPATH/audio/$NAME_AUDIO" ]; then + echo "Moving audio file.." + mkdir -p $INPUTDATAPATH/audio/$NAME_AUDIO + mv $INPUTDATAPATH/audio/$NAME_AUDIO.wav $INPUTDATAPATH/audio/$NAME_AUDIO/$NAME_AUDIO.wav + fi + + if [ ! -d "$INPUTDATAPATH/video/$NAME_VIDEO" ]; then + echo "Moving video file.." + mkdir -p $INPUTDATAPATH/video/$NAME_VIDEO + mv $INPUTDATAPATH/video/$NAME_VIDEO.mp4 $INPUTDATAPATH/video/$NAME_VIDEO/$NAME_VIDEO.mp4 + fi + + # Make output folder + tmp='_to_' + now="$(date +'_%d%m%y_%X')" + OUTPUTFOLDER=$OUTPUTDATAPATH/$NAME_AUDIO$tmp$NAME_VIDEO + # OUTPUTFOLDER=$OUTPUTDATAPATH/$NAME_AUDIO$tmp$NAME_VIDEO$now + echo $OUTPUTFOLDER + mkdir -p $OUTPUTFOLDER + + ##################### PREPROCESSING ##################### + echo -e '\n--------------PREPROCESSING---------------\n' + conda activate deepspeech + cd preprocessing + bash process_audio.sh $INPUTDATAPATH/audio $NAME_AUDIO $FPS $SAMPLERATE + bash process_video.sh $INPUTDATAPATH/video $NAME_VIDEO $FPS $SAMPLERATE + cd .. + + #################### AUDIO EXPRESSIONS ##################### + echo -e '\n--------------AUDIO EXPRESSIONS---------------\n' + cd preprocessing/ + python third/Audio2ExpressionNet/get_audioexpr.py --name $NAME_AUDIO --dataset_base $INPUTDATAPATH/audio/$NAME_AUDIO --out_dir $OUTPUTFOLDER --mapping_path $INPUTDATAPATH/video/$NAME_VIDEO/mapping.npy + cd .. + conda deactivate + + ##################### HEAD MOTION ##################### + echo -e '\n--------------HEAD MOTION---------------\n' + conda activate pyenv + cd motion-generation + python transfer.py --dataroot $INPUTDATAPATH --dataset_names $NAME_AUDIO --target_name $NAME_VIDEO --out_dir $OUTPUTFOLDER --checkpoint_dir $BASE/checkpoints/ + cd .. + + ##################### HEAD TO SHOULDERS ##################### + echo -e '\n--------------HEAD TO BODY---------------\n' + python face2body.py --dataset_base $INPUTDATAPATH/video/$NAME_VIDEO --target_name $NAME_VIDEO --checkpoint_dir $BASE/checkpoints/ + + #################### EDGE MAP ##################### + echo -e '\n--------------EDGE MAP---------------\n' + cd preprocessing + python combine_edges.py --dataset_base $INPUTDATAPATH/video/$NAME_VIDEO --out_dir $OUTPUTFOLDER --target_name $NAME_VIDEO --checkpoint_dir $BASE/checkpoints/ + cd .. + + ##################### GAN INFERENCE ##################### + echo -e '\n--------------GAN INFERENCE---------------\n' + cd ImageToImage + python generate_images.py --dataroot $INPUTDATAPATH/video --video_name $NAME_VIDEO --input_test_root_dir $OUTPUTFOLDER/edges/ --out_dir $OUTPUTFOLDER/generated_frames/ --checkpoint_dir $BASE/checkpoints/ + cd .. + + ##################### ENHANCEMENT ##################### + echo -e '\n--------------ENHANCEMENT---------------\n' + cd GFPGAN + python inference_gfpgan.py -i $OUTPUTFOLDER/generated_frames/ -o $OUTPUTFOLDER/superes/ -v 1.4 -s 1 --bg_upsampler realesrgan + cd .. + + ##################### POSTPROCESSING ##################### + echo -e '\n--------------POSTPROCESSING---------------\n' + python postprocessing.py --dataroot $INPUTDATAPATH --name_audio $NAME_AUDIO --out_dir $OUTPUTFOLDER --fps $FPS --sr $SAMPLERATE --enhance --clean + + echo -e ' ================= ALL DONE ================= ' + + done + +done diff --git a/motion-gan-pipeline/gunicorn.conf.py b/motion-gan-pipeline/gunicorn.conf.py new file mode 100644 index 0000000..9bc8d9c --- /dev/null +++ b/motion-gan-pipeline/gunicorn.conf.py @@ -0,0 +1,19 @@ +# https://github.com/benoitc/gunicorn/blob/master/examples/example_config.py + +# Bind & deployment + +bind = '0.0.0.0:5000' +reload = False + +# Connections +# The dashboard backend should be capable of supporting multiple workers, +# however initialization is currently an issue when running in multiple threads. +workers = 1 # if DashboardConfig.debug else 4 +threads = 4 +backlog = 64 + +timeout = 300 +TIMEOUT = 300 + +# Logging +loglevel = 'info' diff --git a/motion-gan-pipeline/media/audio2video.mp4 b/motion-gan-pipeline/media/audio2video.mp4 new file mode 100644 index 0000000..4191be3 --- /dev/null +++ b/motion-gan-pipeline/media/audio2video.mp4 @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:0139fa480dd3c47d3c1e39bd50001f0c6a0c39444e06f03aa58d0a47a2ed4246 +size 2201602 diff --git a/motion-gan-pipeline/media/pipeline.gif b/motion-gan-pipeline/media/pipeline.gif new file mode 100644 index 0000000..aa5a243 Binary files /dev/null and b/motion-gan-pipeline/media/pipeline.gif differ diff --git a/motion-gan-pipeline/media/pose.png b/motion-gan-pipeline/media/pose.png new file mode 100644 index 0000000..b6ad696 Binary files /dev/null and b/motion-gan-pipeline/media/pose.png differ diff --git a/motion-gan-pipeline/motion-generation/README.md b/motion-gan-pipeline/motion-generation/README.md new file mode 100644 index 0000000..1b8cbe9 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/README.md @@ -0,0 +1,116 @@ +# Motion Generation + +This is a re-adaptation of the [LiveSpeechPortraits](https://github.com/YuanxunLu/LiveSpeechPortraits) repository. \ +The LSP pipeline generates realistic head motion from an input audio signal. \ +We modified the original pipeline to generate head motion from extracted Deepspeech audio features. + +## Requirements + +``` +conda create -n LSP python=3.6 +conda activate LSP +``` + +- Clone the repository: + +- FFmpeg is required to combine the audio and the silent generated videos. Please check [FFmpeg](http://ffmpeg.org/download.html) for installation. For Linux users, you can also: + +``` +sudo apt-get install ffmpeg +``` + +- Install the dependences: + +``` +pip install -r requirements.txt +``` + +## Train on large dataset + +We trained our model on a large video dataset [TED Dataset](https://snap-research.github.io/articulated-animation/) to learn generic human head motion. +You can download the dataset using the scripts in TED_dataset_utils. + +You can run the training script by using the following command: +```bash +python train.py --config config/Audio2Headpose_Ted_deepspeech.yml +``` +You can crete your own config file using the make_config.py script. + +The following data structure is expected: +```bash +TED_dataset +├── Test +│ ├── VID_0 +│ ... +│ └── VID_N +│ +└── Train + ├── VID_0 + ... + └── VID_N +``` + +Where each video folder contains processed data that was generated using our [Preprocessing pipeline](https://gitlab.ethz.ch/mtc/video-synthesis/visionPreprocessing). + + +## Transfer & Finetuning +In the Audio-driven Video Synthesis project, we finetune over the checkpoint obtained by training on the TED dataset, using the input video data. We can therefore better fit the generated motion to the person specific motion presented in the video. + +The transfer is executed using: +```bash +python transfer.py --dataroot $INPUTDATAPATH + --dataset_names $NAME_AUDIO + --target_name $NAME_VIDEO + --out_dir $OUTPUTFOLDER + --checkpoint_dir $CHECKPOINTPATH +``` +Where: +- ```$INPUTDATAPATH```: path to data folder. +- ```$NAME_AUDIO```: input audio name. +- ```$NAME_VIDEO```: target video name. +- ```$OUTPUTFOLDER```: path to the output folder. +- ```$CHECKPOINTPATH```: path to the checkpoints folder. + +If no finetuned checkpoint for the requested video exists, the finetuning script is launched automatically. + +Alternatively you can directly launch finetuning by running: +```bash +python finetune.py --name $NAME_VIDEO + --dataset_mode $DATASETMODE + --dataroot $INPUTDATAPATH + --dataset_names $NAME_AUDIO + --fps $FPS + --target_checkpoints $OUTPUTCHECKPOINT + --checkpoint_path $TUNECHECKPOINT +``` +Where: +- ```$NAME_VIDEO```: target video name. +- ```$DATASETMODE```: type of dataset, set to 'deepspeech'. +- ```$INPUTDATAPATH```: path to data folders. +- ```$NAME_AUDIO```: input audio name. +- ```$FPS```: target video's fps. +- ```$OUTPUTCHECKPOINT```: path to the output checkpoint directory. +- ```$TUNECHECKPOINT```: path to the checkpoints to finetune. + +## Correlation Visuals +We also provide some scripts to visualise audio features, and compute audio features to head motion correlation. + + +- audio_visuals: +visualise multiple audio representations, original video, tracked FLAME head and Deepspeech features. + + drawing +

+ +- emotion_correlation_visuals: +plot and save correlation between EMOCA emotions and head motion. + + drawing +

+ +- audio_correlation_visual: +plot and save correlation between audio features and head motion. + + drawing + + diff --git a/motion-gan-pipeline/motion-generation/TED_dataset_utils/load_videos.py b/motion-gan-pipeline/motion-generation/TED_dataset_utils/load_videos.py new file mode 100644 index 0000000..f909a9d --- /dev/null +++ b/motion-gan-pipeline/motion-generation/TED_dataset_utils/load_videos.py @@ -0,0 +1,127 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import numpy as np +import pandas as pd +import imageio +import os +import subprocess +from multiprocessing import Pool +from itertools import cycle +import warnings +import glob +import time +from tqdm import tqdm +from util import save +from argparse import ArgumentParser +from skimage import img_as_ubyte +from skimage.transform import resize +from moviepy.video.io.ffmpeg_tools import ffmpeg_extract_subclip +from moviepy.video.fx.all import crop +from moviepy.editor import * + +warnings.filterwarnings("ignore") + +DEVNULL = open(os.devnull, 'wb') + +def download(video_id, args): + video_path = os.path.join(args.video_folder, video_id + ".mp4") + subprocess.call([args.youtube, '-f', "''best/mp4''", '--write-auto-sub', '--write-sub', + '--sub-lang', 'en', '--skip-unavailable-fragments', + "https://www.youtube.com/watch?v=" + video_id, "--output", + video_path], stdout=DEVNULL, stderr=DEVNULL) + return video_path + +def run(data): + video_id, args = data + + if not os.path.exists(os.path.join(args.video_folder, video_id + '.mp4')): + download(video_id, args) + + if not os.path.exists(os.path.join(args.video_folder, video_id + '.mp4')): + print('Broken Link: ', video_id) + return + + if args.mode == 'd': + return + try: + reader = imageio.get_reader(os.path.join(args.video_folder, video_id + '.mp4')) + + except FileNotFoundError: + print(f'Could not find: {os.path.join(args.video_folder, video_id + ".mp4")}') + return + + frame = reader.get_next_data() + fps = reader.get_meta_data()['fps'] + df = pd.read_csv(args.metadata) + df = df[df['video_id'] == video_id] + + all_chunks_dict = [{'start': df['start'].iloc[j], 'end': df['end'].iloc[j], + 'bbox': list(map(int, df['bbox'].iloc[j].split('-'))), 'frames': []} for j in + range(df.shape[0])] + + ref_height = df['height'].iloc[0] + ref_width = df['width'].iloc[0] + partition = df['partition'].iloc[0] + + for entry in all_chunks_dict: + + if 'person_id' in df: + first_part = df['person_id'].iloc[0] + "#" + else: + first_part = "" + first_part = first_part + '#' + video_id + inpath = os.path.join(args.video_folder, video_id + '.mp4') + path = first_part + '#' + str(entry['start']).zfill(6) + '#' + str(entry['end']).zfill(6) + '.mp4' + outpath = os.path.join(args.out_folder, partition, path) + + clip = VideoFileClip(inpath) + print('start: ', entry['start']/fps, ', end: ', entry['end']/fps) + + try: + clip = clip.subclip(entry['start']/fps, entry['end']/fps) + + except ValueError as e: + print(e) + return + + left, top, right, bot = entry['bbox'] + + left = int(left / (ref_width / frame.shape[1])) + top = int(top / (ref_height / frame.shape[0])) + right = int(right / (ref_width / frame.shape[1])) + bot = int(bot / (ref_height / frame.shape[0])) + clip = crop(clip, left, top, right, bot) + + # saving the clip + clip.write_videofile(outpath) + + +if __name__ == "__main__": + parser = ArgumentParser() + parser.add_argument("--video_folder", help='Path to youtube videos') + parser.add_argument("--metadata", help='Path to metadata') + parser.add_argument("--out_folder", help='Path to output') + parser.add_argument("--format", help='Storing format') + parser.add_argument("--workers", default=1, type=int, help='Number of workers') + parser.add_argument("--youtube", default='./youtube-dl', help='Path to youtube-dl') + parser.add_argument("--mode", default='d_p', help='Mode: download (d) or download and process (d_p)') + + args = parser.parse_args() + if not os.path.exists(args.video_folder): + os.makedirs(args.video_folder) + if not os.path.exists(args.out_folder): + os.makedirs(args.out_folder) + for partition in ['test', 'train']: + if not os.path.exists(os.path.join(args.out_folder, partition)): + os.makedirs(os.path.join(args.out_folder, partition)) + + df = pd.read_csv(args.metadata) + video_ids = df['video_id'] + + pool = Pool(processes=args.workers) + args_list = cycle([args]) + for chunks_data in tqdm(pool.imap_unordered(run, zip(video_ids, args_list))): + None + + diff --git a/motion-gan-pipeline/motion-generation/TED_dataset_utils/ted384-metadata.csv b/motion-gan-pipeline/motion-generation/TED_dataset_utils/ted384-metadata.csv new file mode 100644 index 0000000..683251b --- /dev/null +++ b/motion-gan-pipeline/motion-generation/TED_dataset_utils/ted384-metadata.csv @@ -0,0 +1,1266 @@ +video_id,start,end,bbox,fps,width,height,partition +VM6HZqQKhok,16609,16737,616-112-1172-667,24.0,1920,1080,train +4E_1AB1rsSw,14854,15029,1240-131-1691-581,24.0,1920,1080,test +B43f89Y9f-A,13336,13572,534-270-933-669,30.0,1920,1080,train +4zLmWpMDY8Q,12019,12190,241-242-591-591,24.0,1920,1080,train +rS_Ssisz2_M,7168,7339,784-136-1097-448,30.0,1280,720,train +Fxt_MZKMdes,1276,1514,378-59-761-442,24.0,1280,720,train +8Dv2Hdf5TRg,10948,11114,764-75-1080-392,24.0,1280,720,test +8DDgHq9ewOo,19725,19916,1049-79-1435-466,24.0,1920,1080,train +pyFcr2WcOyo,13029,13417,1131-254-1500-623,24.0,1920,1080,train +UQzvNIIMayo,24043,24186,613-78-1174-640,30.0,1920,1080,train +esD6aaIjhek,8846,9046,271-37-583-349,30.0,1280,720,train +N39x_WTPix0,13432,13562,638-21-1017-400,25.0,1280,720,train +27lMmdmy-b8,19823,20124,452-57-810-413,30.0,1280,720,train +LnJwH_PZXnM,24134,24328,930-91-1484-644,25.0,1920,1080,train +77HUdJ7Tij0,1754,2046,1231-120-1698-587,25.0,1920,1080,train +kdqUhTnAgJU,4528,4674,550-64-861-375,30.0,1280,720,train +EZKDkSY1GBM,1801,1947,1371-130-1728-486,25.0,1920,1080,train +lmyZMtPVodo,3288,3455,668-88-985-405,24.0,1280,720,train +5zWmdHmJMd0,13695,13828,394-40-978-624,24.0,1920,1080,train +6wNif5SlN08,12243,12391,725-90-1073-438,24.0,1280,720,train +yNhu0MG_2MA,15079,15207,298-128-722-553,24.0,1920,1080,train +HiwJ0hNl1Fw,19802,19950,331-47-663-379,25.0,1280,720,train +ERSZb2wHFDw,9942,10154,791-178-1222-609,30.0,1920,1080,train +yNhu0MG_2MA,15293,15483,677-82-1133-539,24.0,1920,1080,train +QeIrdqU0o9s,9717,9869,407-57-775-425,24.0,1280,720,train +LiUClSItcy0,2910,3194,419-104-743-428,24.0,1280,720,test +yLfCCcVDUiU,11743,12036,1005-141-1365-500,24.0,1920,1080,train +Uq1idqpX9-A,3270,3404,755-69-1104-418,24.0,1280,720,train +8Z24LCysq3A,8681,8961,531-42-891-402,25.0,1280,720,train +YUUP2MMz7PU,1605,1830,726-177-1151-602,24.0,1920,1080,train +Jc9RdbHEFa0,11429,11585,666-59-1002-396,24.0,1280,720,test +949yOxz_x80,13333,13579,984-101-1480-598,25.0,1920,1080,train +yNhu0MG_2MA,4164,4617,499-37-1046-584,24.0,1920,1080,train +BzeTjn0R2VY,7387,7708,482-45-1075-637,24.0,1920,1080,train +0gks6ceq4eQ,16612,16848,1055-68-1575-588,24.0,1920,1080,test +dkmpRQdJslI,8681,9033,348-13-764-427,24.0,1280,720,train +LiUClSItcy0,6855,7128,438-86-788-436,24.0,1280,720,test +DGvPfD1Dd1U,14815,15208,1180-101-1657-579,25.0,1920,1080,train +Ubbmje44gLg,11585,11723,909-161-1290-543,25.0,1920,1080,train +g2m97gPI70I,16699,16891,244-86-574-417,30.0,1280,720,train +Mr8nvXvl-y8,719,970,332-152-643-463,25.0,1280,720,train +V1yW5IsnSjo,12145,12273,607-137-932-461,30.0,1280,720,train +kmbui1xF8DE,17256,17534,659-138-1165-643,25.0,1920,1080,train +RjquHTj4HlY,11734,12042,1023-188-1498-661,24.0,1920,1080,train +GIBoCJeB3HQ,3803,3962,229-230-597-599,24.0,1920,1080,train +BoRUrWcdkQ4,589,745,449-105-798-453,30.0,1280,720,train +I3BJVaioX_k,22294,22546,777-77-1313-612,25.0,1920,1080,train +UMhLBPPtlrY,23040,23306,655-58-1008-411,30.0,1280,720,test +idfv7Lw4Y_s,2452,2701,715-83-1232-600,25.0,1920,1080,train +pxEcvU0Vp_M,14404,14610,825-207-1152-533,30.0,1920,1080,train +YUUP2MMz7PU,17126,17263,713-180-1133-600,24.0,1920,1080,train +cK74vhqzeeQ,18362,18592,461-84-793-415,25.0,1280,720,test +aTfwA1TaH3Q,12919,13059,732-56-1067-391,24.0,1280,720,train +I3BJVaioX_k,511,641,537-141-1026-631,25.0,1920,1080,train +-nKdufEaL8k,7202,7434,266-121-652-508,25.0,1920,1080,train +2wseM6wWd74,12014,12363,608-41-917-350,24.0,1280,720,train +gypAjPp6eps,16669,16808,310-88-650-428,24.0,1280,720,train +ClfBxWPkBKU,24118,24322,222-42-568-390,30.0,1280,720,train +TLZ6W-Nqv1I,314,448,1057-117-1514-575,25.0,1920,1080,train +360bU-vBJOI,11030,11186,795-165-1230-600,24.0,1920,1080,test +UMhLBPPtlrY,21859,22146,707-85-1021-399,30.0,1280,720,test +jAw8t2g-eVU,21784,22144,622-69-1135-583,30.0,1920,1080,train +TLZ6W-Nqv1I,10376,10568,1039-133-1437-531,25.0,1920,1080,train +C74amJRp730,16824,17085,766-55-1129-418,24.0,1280,720,train +360bU-vBJOI,10853,11004,741-180-1166-605,24.0,1920,1080,test +kmbui1xF8DE,8597,8782,747-152-1243-647,25.0,1920,1080,train +bGUVQaBdxKw,14130,14499,1185-100-1665-580,25.0,1920,1080,train +B43f89Y9f-A,19659,19857,1105-261-1480-636,30.0,1920,1080,train +4CGFPbFqdJ4,13554,13882,753-83-1257-587,24.0,1920,1080,train +bGUVQaBdxKw,2392,2628,1242-123-1662-543,25.0,1920,1080,train +BzeTjn0R2VY,16566,16817,694-50-1191-547,24.0,1920,1080,train +kaU7IPlg9PA,4193,4337,1161-101-1585-525,25.0,1920,1080,train +d38LKbYfWrs,2610,2807,730-88-1046-404,24.0,1280,720,train +a63t8r70QN0,1613,2039,922-239-1337-654,30.0,1920,1080,train +ONs9FCY74p0,3498,3629,1216-159-1653-596,24.0,1920,1080,train +zamvnyBB-SU,15610,15876,1078-36-1621-579,24.0,1920,1080,train +AEKy1AS75Zs,10278,10459,788-39-1344-595,25.0,1920,1080,train +esD6aaIjhek,10109,10314,439-19-814-392,30.0,1280,720,train +SF9qq6vQ3Pg,2546,2690,733-128-1235-629,24.0,1920,1080,train +CZNrOzgNWf4,7008,7147,754-76-1077-398,30.0,1280,720,train +iFTWM7HV2UI,11447,12032,684-111-1199-626,24.0,1920,1080,train +08z_xW-szwM,6317,6736,670-254-1058-642,30.0,1920,1080,train +U51MSK6nSQE,6747,7244,1240-111-1705-576,24.0,1920,1080,train +YUUP2MMz7PU,14064,14540,743-175-1166-599,24.0,1920,1080,train +5ErKrSyUpEo,15895,16145,286-85-769-567,24.0,1920,1080,train +a63t8r70QN0,21847,22019,241-261-585-606,30.0,1920,1080,train +4E_1AB1rsSw,509,674,1227-131-1649-553,24.0,1920,1080,test +PVXQUItNEDQ,12524,12939,613-62-917-365,24.0,1280,720,train +ddw1_3ZVjTE,6951,7207,487-103-815-430,24.0,1280,720,train +YUUP2MMz7PU,3555,3686,763-184-1172-592,24.0,1920,1080,train +-eBUcBfkVCo,46758,46901,484-121-817-455,30.0,1280,720,train +xDKth-qS8Jk,19917,20109,980-44-1549-612,25.0,1920,1080,train +yLfCCcVDUiU,5261,5463,922-167-1267-512,24.0,1920,1080,train +U51MSK6nSQE,9261,9551,1270-114-1716-560,24.0,1920,1080,train +zfbigT9I0Qg,6723,6933,1243-166-1690-614,24.0,1920,1080,train +phgjouv0BUA,20509,20703,342-166-715-540,24.0,1920,1080,train +iMFJef3xnmg,17073,17268,843-179-1271-607,30.0,1920,1080,train +GIrfNWed1Mk,9269,9486,1229-177-1692-640,24.0,1920,1080,test +i2l9v_seHCc,15736,16025,1218-156-1651-588,24.0,1920,1080,train +idfv7Lw4Y_s,13853,14130,695-88-1211-604,25.0,1920,1080,train +360bU-vBJOI,15287,15464,796-177-1216-598,24.0,1920,1080,test +9A7_xCrgX1U,19079,19270,1053-147-1553-648,24.0,1920,1080,test +1AT5klu_yAQ,13167,13330,212-70-672-530,24.0,1920,1080,train +e-BZigXlphY,15854,16226,701-74-1015-388,24.0,1280,720,train +UoMpbL_Fsig,532,720,461-63-774-376,24.0,1280,720,train +ONs9FCY74p0,20110,20295,1136-153-1581-598,24.0,1920,1080,train +eaCrsBtiYA4,18951,19400,263-110-790-638,24.0,1920,1080,train +BzeTjn0R2VY,18010,18206,270-13-870-614,24.0,1920,1080,train +2paoNvG5Nmo,49057,49289,656-206-1065-614,50.0,1920,1080,train +360bU-vBJOI,12867,13032,820-176-1240-596,24.0,1920,1080,test +2IFa0tqHrwE,6921,7133,1037-294-1429-685,24.0,1920,1080,train +4E_1AB1rsSw,10714,10884,1225-111-1700-585,24.0,1920,1080,test +0gks6ceq4eQ,17414,17568,1008-63-1519-575,24.0,1920,1080,test +ERSZb2wHFDw,11903,12049,800-180-1226-605,30.0,1920,1080,train +U51MSK6nSQE,12982,13375,1213-110-1672-568,24.0,1920,1080,train +BzeTjn0R2VY,20243,20385,671-61-1155-546,24.0,1920,1080,train +bNpx7gpSqbY,8243,8415,414-31-811-428,24.0,1280,720,train +cgxZ4H3gJ8c,15037,15423,419-67-779-427,24.0,1280,720,train +raCIUeGUr3s,828,962,403-80-709-386,30.0,1280,720,train +I3BJVaioX_k,18895,19194,620-101-1134-614,25.0,1920,1080,train +3zJHwOwirjA,6829,7078,405-161-770-525,24.0,1920,1080,train +t0Cr64zCc38,16937,17181,954-125-1453-624,24.0,1920,1080,train +iMFJef3xnmg,24671,25227,842-170-1303-632,30.0,1920,1080,train +PJLT0cAPNfs,4731,4905,1100-171-1545-616,24.0,1920,1080,train +LnJwH_PZXnM,20609,20825,986-134-1451-600,25.0,1920,1080,train +77HUdJ7Tij0,7186,7316,1222-109-1668-555,25.0,1920,1080,train +BzeTjn0R2VY,13774,13980,615-11-1192-588,24.0,1920,1080,train +iMFJef3xnmg,891,1078,669-182-1126-638,30.0,1920,1080,train +zbHe4RpgV80,7350,7492,615-211-1021-617,30.0,1920,1080,test +fO2htapfNhA,14908,15079,616-54-1126-562,24.0,1920,1080,train +RcGyVTAoXEU,10100,10243,649-60-993-404,25.0,1280,720,train +hU-aTB-heU0,17365,17560,752-127-1088-462,30.0,1920,1080,train +vQILP19qABk,9286,9565,682-108-1017-443,30.0,1280,720,train +1fZ915L1w7I,11968,12149,806-190-1208-593,30.0,1920,1080,train +lyR-K2CZIHQ,5664,5906,585-52-937-403,24.0,1280,720,train +I3BJVaioX_k,20337,20686,545-102-1065-623,25.0,1920,1080,train +4E_1AB1rsSw,10112,10411,1190-120-1652-583,24.0,1920,1080,test +0JGarsZE1rk,18217,18408,347-57-875-586,24.0,1920,1080,train +4E_1AB1rsSw,6255,6461,1198-107-1662-573,24.0,1920,1080,test +g2m97gPI70I,5586,5728,172-90-501-419,30.0,1280,720,train +-eBUcBfkVCo,46901,47078,485-119-818-452,30.0,1280,720,train +3lVNA4aF1lg,6525,6791,682-114-1189-621,25.0,1920,1080,train +1k89OTpDvIU,5282,5447,739-59-1091-411,30.0,1280,720,train +cZe3zcNoIKk,3813,3988,617-87-1166-635,25.0,1920,1080,train +UoMpbL_Fsig,780,962,466-64-777-375,24.0,1280,720,train +ON4iy8hq2hM,11970,12110,1094-174-1537-618,24.0,1920,1080,train +6wNif5SlN08,11660,11924,718-94-1058-434,24.0,1280,720,train +U51MSK6nSQE,5486,5673,1178-119-1606-547,24.0,1920,1080,train +yNhu0MG_2MA,14024,14297,676-55-1179-557,24.0,1920,1080,train +y7l5QHRY_AI,7139,7386,491-61-845-415,24.0,1280,720,train +tbvUCrS5_5I,4632,4773,712-123-1211-621,24.0,1920,1080,train +vQILP19qABk,13867,14019,699-120-1009-430,30.0,1280,720,train +VJoQj00RZHg,7527,7692,461-93-770-402,25.0,1280,720,train +pxEcvU0Vp_M,6057,6264,800-169-1193-563,30.0,1920,1080,train +LcNvkhS4UYg,10842,10979,850-164-1210-522,30.0,1920,1080,test +iFTWM7HV2UI,18446,18693,726-94-1262-631,24.0,1920,1080,train +4E_1AB1rsSw,4064,4342,1222-119-1667-564,24.0,1920,1080,test +YUUP2MMz7PU,2609,2952,752-186-1164-598,24.0,1920,1080,train +TLZ6W-Nqv1I,17442,17593,998-131-1478-611,25.0,1920,1080,train +kmbui1xF8DE,15953,16213,683-123-1225-665,25.0,1920,1080,train +rToBZSrxjgM,2341,2472,700-64-1039-403,24.0,1280,720,train +B43f89Y9f-A,14203,14426,529-270-923-665,30.0,1920,1080,train +BzeTjn0R2VY,5198,5382,258-56-827-625,24.0,1920,1080,train +27lMmdmy-b8,9976,10272,393-46-748-400,30.0,1280,720,train +yhMi0tkYyT8,19941,20307,492-58-839-404,30.0,1280,720,train +RovaUZY3e8k,2916,3169,324-17-735-429,24.0,1280,720,train +1fZ915L1w7I,554,752,793-105-1236-548,30.0,1920,1080,train +kaU7IPlg9PA,3917,4190,1196-99-1633-536,25.0,1920,1080,train +oGlsUYGMYks,2271,2455,404-76-748-420,24.0,1280,720,train +yNhu0MG_2MA,16183,16544,695-72-1174-551,24.0,1920,1080,train +vPn94XAUBoo,1562,1821,528-59-874-405,24.0,1280,720,train +yLfCCcVDUiU,7354,7607,902-340-1270-707,24.0,1920,1080,train +YUUP2MMz7PU,3692,3821,800-185-1209-595,24.0,1920,1080,train +U51MSK6nSQE,959,1199,1216-107-1663-554,24.0,1920,1080,train +iMFJef3xnmg,541,795,729-196-1172-638,30.0,1920,1080,train +iFTWM7HV2UI,13431,13905,709-142-1160-594,24.0,1920,1080,train +iMFJef3xnmg,21011,21913,855-179-1300-623,30.0,1920,1080,train +F3mzS32SH2Q,15905,16083,435-263-800-628,25.0,1920,1080,train +pxEcvU0Vp_M,21526,21722,796-197-1142-542,30.0,1920,1080,train +iFTWM7HV2UI,10355,10616,737-129-1204-595,24.0,1920,1080,train +jAw8t2g-eVU,8276,8728,692-103-1176-586,30.0,1920,1080,train +VMXdSkW6hns,12546,12870,679-67-1199-587,30.0,1920,1080,train +eaCrsBtiYA4,10465,10784,215-119-741-645,24.0,1920,1080,train +2VBkDNzeRZM,4732,4938,349-70-923-645,24.0,1920,1080,train +wYb3Wimn01s,402,534,878-101-1359-583,24.0,1920,1080,train +bGUVQaBdxKw,13965,14129,1226-113-1689-577,25.0,1920,1080,train +tuvxXnQrRv8,12883,13120,1224-123-1694-593,25.0,1920,1080,train +Juc_yvEkJuc,6698,6833,1106-73-1640-607,25.0,1920,1080,train +YUUP2MMz7PU,4967,5164,764-186-1176-598,24.0,1920,1080,train +ER0G2S9r7aE,11569,11705,576-135-1013-572,25.0,1920,1080,train +0gks6ceq4eQ,14103,14264,1030-69-1534-572,24.0,1920,1080,test +CTn0HYmMAYM,12038,12321,807-174-1301-667,25.0,1920,1080,train +zbHe4RpgV80,20694,21047,920-206-1323-609,30.0,1920,1080,test +501FEzbB1JI,7406,7594,246-74-597-425,30.0,1280,720,train +SF9qq6vQ3Pg,12614,12762,828-171-1256-599,24.0,1920,1080,train +iMFJef3xnmg,4140,4584,747-182-1186-621,30.0,1920,1080,train +77HUdJ7Tij0,13225,13483,1210-107-1698-595,25.0,1920,1080,train +kmbui1xF8DE,2148,2395,669-70-1247-649,25.0,1920,1080,train +2Brajdazp1o,11560,11741,703-83-1051-431,30.0,1280,720,train +B43f89Y9f-A,14427,14653,529-278-914-663,30.0,1920,1080,train +-7ORAKULel4,19858,20076,755-197-1224-665,25.0,1920,1080,train +9EVEmZ2c_es,13627,13824,675-76-1026-427,30.0,1280,720,train +pxEcvU0Vp_M,2773,3073,850-198-1192-539,30.0,1920,1080,train +ERSZb2wHFDw,2886,3100,750-173-1200-622,30.0,1920,1080,train +zMWYQRKuc5M,20879,21008,798-51-1112-365,24.0,1280,720,train +oGlsUYGMYks,11880,12201,354-61-742-448,24.0,1280,720,train +fWqKalpYgLo,1296,1455,334-164-730-560,25.0,1920,1080,train +g3vSYbT1Aco,17247,17381,575-140-1051-616,24.0,1920,1080,train +ZCRC5_0kfiw,10674,10852,697-68-1030-403,30.0,1280,720,train +rftagV38YKY,12970,13145,247-64-573-391,24.0,1280,720,train +BXlnrFpCu0c,26558,26762,433-81-741-388,30.0,1280,720,train +fO2htapfNhA,10834,11101,689-62-1196-569,24.0,1920,1080,train +g3vSYbT1Aco,12113,12300,491-111-1011-631,24.0,1920,1080,train +2paoNvG5Nmo,18161,18303,781-156-1278-654,50.0,1920,1080,train +JKS7HWy2TRU,1690,2062,1138-166-1597-624,24.0,1920,1080,train +y7l5QHRY_AI,587,1006,439-64-803-428,24.0,1280,720,train +-H1tUMRJoeo,1296,1442,1021-84-1544-608,24.0,1920,1080,train +CsECS5qsGLs,5768,5922,820-91-1138-410,25.0,1280,720,train +Cr1usGUSuL4,7426,7555,542-96-890-443,24.0,1280,720,train +iFTWM7HV2UI,6125,6435,671-49-1294-672,24.0,1920,1080,train +i-icXZ2tMRM,12258,12558,590-104-905-419,24.0,1280,720,train +UQzvNIIMayo,8793,9029,721-82-1267-628,30.0,1920,1080,train +oGlsUYGMYks,15430,15725,383-77-723-416,24.0,1280,720,train +RDIy58g9n2k,7326,7494,403-80-719-395,24.0,1280,720,train +U4gR_cd6xOc,3971,4121,253-84-690-521,24.0,1920,1080,train +tuvxXnQrRv8,3137,3330,1170-128-1645-604,25.0,1920,1080,train +QeIrdqU0o9s,5982,6229,468-88-778-398,24.0,1280,720,train +v3-ZTqj2_kw,23193,23383,307-54-651-398,30.0,1280,720,train +r-YXvkWLy-g,7151,7366,310-118-794-603,24.0,1920,1080,train +Xb1Wq4NA290,13417,13602,982-89-1428-535,24.0,1920,1080,train +yjYrxcGSWX4,7930,8096,327-265-725-664,25.0,1920,1080,train +cK74vhqzeeQ,1775,2079,333-46-678-390,25.0,1280,720,test +iMFJef3xnmg,1766,1895,705-194-1135-624,30.0,1920,1080,train +yNhu0MG_2MA,1526,1929,722-139-1190-607,24.0,1920,1080,train +1N39Z0ODeME,8820,9044,506-162-821-476,30.0,1280,720,train +-nKdufEaL8k,1783,1933,259-123-650-514,25.0,1920,1080,train +eemOZfsrKdg,1065,1255,383-144-772-534,25.0,1920,1080,train +501FEzbB1JI,10268,10443,280-65-659-444,30.0,1280,720,train +uuatZO76MgQ,2428,2636,963-89-1449-575,30.0,1920,1080,train +lV3trdyDdHE,16576,16818,578-36-977-435,25.0,1280,720,train +j-S03JfgHEA,2604,2742,772-75-1103-404,25.0,1280,720,train +3lVNA4aF1lg,2338,2554,699-138-1193-632,25.0,1920,1080,train +QeIrdqU0o9s,5459,5642,447-69-791-412,24.0,1280,720,train +AHV_BxlNzmM,17655,17839,727-133-1061-468,24.0,1920,1080,train +eemOZfsrKdg,10670,10973,569-75-1037-543,25.0,1920,1080,train +2Euof4PnjDk,11683,11931,751-113-1086-447,24.0,1280,720,train +9KvZy09vaNg,12208,12365,931-123-1355-548,24.0,1920,1080,train +xv4_L5zlYaA,15194,15386,660-156-1079-575,24.0,1920,1080,train +zpjxElfNpks,10561,10716,548-158-1015-624,25.0,1920,1080,test +cgxZ4H3gJ8c,16105,16318,385-69-745-429,24.0,1280,720,train +iB4MS1hsWXU,11324,11469,1012-124-1561-674,24.0,1920,1080,test +d6NKdnZvdoo,10763,10949,712-69-1040-396,25.0,1280,720,train +3wxBTEo8-T8,6514,6682,517-71-827-380,24.0,1280,720,test +54ST8bwOVyc,3983,4116,769-122-1219-572,24.0,1920,1080,train +bCucggordMM,11903,12040,301-183-746-628,24.0,1920,1080,test +y7l5QHRY_AI,2445,2710,428-54-781-407,24.0,1280,720,train +iFTWM7HV2UI,2425,2700,718-109-1249-640,24.0,1920,1080,train +yLfCCcVDUiU,14401,14559,1002-194-1349-540,24.0,1920,1080,train +QeIrdqU0o9s,7820,7991,376-66-726-416,24.0,1280,720,train +g9M3HIjHuq0,11056,11237,717-74-1054-411,24.0,1280,720,train +2ju61VPdTzw,21138,21274,457-76-770-387,30.0,1280,720,train +2paoNvG5Nmo,52760,52890,678-210-1115-647,50.0,1920,1080,train +raCIUeGUr3s,18608,18808,283-83-597-397,30.0,1280,720,train +qH4GAXXH29s,4171,4348,508-55-894-441,24.0,1280,720,train +ONs9FCY74p0,20805,20938,1133-153-1573-593,24.0,1920,1080,train +0DHywidLX6A,4696,4898,1068-132-1528-592,24.0,1920,1080,train +BXlnrFpCu0c,7692,8093,582-66-888-372,30.0,1280,720,train +RZgkjEdMbSw,2878,3025,1043-144-1503-605,24.0,1920,1080,train +iFTWM7HV2UI,27461,27836,741-94-1245-598,24.0,1920,1080,train +360bU-vBJOI,9474,9620,722-155-1192-625,24.0,1920,1080,test +wYb3Wimn01s,3364,3510,1005-80-1521-596,24.0,1920,1080,train +4E_1AB1rsSw,16185,16451,1215-123-1675-583,24.0,1920,1080,test +Juc_yvEkJuc,1721,1946,1145-141-1639-635,25.0,1920,1080,train +iMFJef3xnmg,14145,14649,796-157-1254-614,30.0,1920,1080,train +HCgv_HNoJrY,6985,7172,652-61-994-403,24.0,1280,720,train +t0Cr64zCc38,5724,5936,865-124-1361-620,24.0,1920,1080,train +oGlsUYGMYks,10112,10275,403-76-743-416,24.0,1280,720,train +I3BJVaioX_k,15742,15941,516-100-1064-648,25.0,1920,1080,train +oGlsUYGMYks,12737,12897,360-78-714-432,24.0,1280,720,train +8Dv2Hdf5TRg,5670,5856,556-69-887-400,24.0,1280,720,test +zbHe4RpgV80,3048,3221,502-207-886-591,30.0,1920,1080,test +1AT5klu_yAQ,10294,10479,194-48-692-546,24.0,1920,1080,train +FDhlOovaGrI,20017,20178,624-68-970-415,24.0,1280,720,test +DhnBn_c9f8Q,3002,3228,944-132-1430-618,24.0,1920,1080,train +9RTkZaX1cH0,10065,10291,311-78-654-421,30.0,1280,720,train +rYwTA5RA9eU,8589,8721,631-112-956-436,25.0,1280,720,train +gWPFJgLAzu4,15751,15952,676-51-1039-414,25.0,1280,720,train +9EVEmZ2c_es,8486,8868,726-46-1094-412,30.0,1280,720,train +VJoQj00RZHg,5864,6113,452-77-787-412,25.0,1280,720,train +ER15KmrB8h8,2508,2709,758-56-1307-605,24.0,1920,1080,train +PJLT0cAPNfs,7392,7627,1084-180-1519-614,24.0,1920,1080,train +lV3trdyDdHE,13121,13308,661-62-1001-401,25.0,1280,720,train +Hh09xlzxRmE,8629,8818,250-63-598-412,25.0,1280,720,train +27lMmdmy-b8,6544,6705,452-41-809-397,30.0,1280,720,train +U4gR_cd6xOc,8750,9090,391-52-880-541,24.0,1920,1080,train +ERSZb2wHFDw,3624,3897,750-177-1196-622,30.0,1920,1080,train +2IFa0tqHrwE,878,1063,969-300-1361-692,24.0,1920,1080,train +gJzSWacrkKo,5298,5427,1077-127-1536-586,25.0,1920,1080,test +Juc_yvEkJuc,3099,3249,1143-126-1628-611,25.0,1920,1080,train +zbHe4RpgV80,19622,19887,400-173-848-622,30.0,1920,1080,test +oGlsUYGMYks,4524,4665,359-87-692-420,24.0,1280,720,train +4CGFPbFqdJ4,6370,6664,749-88-1238-575,24.0,1920,1080,train +FDhlOovaGrI,10929,11233,304-56-652-404,24.0,1280,720,test +TLZ6W-Nqv1I,18766,18985,1042-131-1485-573,25.0,1920,1080,train +fO2htapfNhA,496,626,795-35-1339-579,24.0,1920,1080,train +UoMpbL_Fsig,7884,8028,487-38-824-376,24.0,1280,720,train +08z_xW-szwM,5044,5209,650-248-1041-639,30.0,1920,1080,train +EEjyPqyFe_s,18650,18899,693-55-1044-407,24.0,1280,720,train +360bU-vBJOI,1258,1545,731-151-1217-637,24.0,1920,1080,test +U51MSK6nSQE,7802,8074,1252-113-1704-563,24.0,1920,1080,train +FauPVZxDXxk,7454,7671,452-77-787-411,24.0,1280,720,train +GwRzjFQa_Og,6775,7595,578-66-1111-599,29.97,1920,1080,train +2_0aEezKvBE,2811,2945,510-73-820-384,30.0,1280,720,train +hs-YuHv0vUk,5291,5450,1052-69-1575-592,24.0,1920,1080,train +tFfYh9THuGo,14095,14334,267-102-768-603,24.0,1920,1080,train +y8ohTeRKVqs,14302,14513,390-80-919-608,24.0,1920,1080,train +BzeTjn0R2VY,1029,1183,552-46-1066-559,24.0,1920,1080,train +2VBkDNzeRZM,895,1218,434-71-943-580,24.0,1920,1080,train +ni4FV5zL6lM,4609,4750,335-54-702-422,24.0,1280,720,train +TdQ1eaZH-J8,2328,2538,335-109-650-424,30.0,1280,720,train +TLZ6W-Nqv1I,18582,18765,882-67-1473-658,25.0,1920,1080,train +yhMi0tkYyT8,3868,4037,479-75-792-388,30.0,1280,720,train +Kv0Xz0lMKDQ,4906,5347,1221-124-1691-594,25.0,1920,1080,train +3NdSVi38RM8,5706,5978,609-96-930-417,24.0,1280,720,train +16p9YRF0l-g,8670,8827,306-44-664-402,24.0,1280,720,train +fwvbx80QidM,3253,3524,1039-132-1550-643,24.0,1920,1080,train +b2Jv8vC-m3g,2261,2415,1031-137-1524-630,24.0,1920,1080,test +fO2htapfNhA,4109,4335,711-60-1206-556,24.0,1920,1080,train +hU-aTB-heU0,2405,2552,794-123-1134-463,30.0,1920,1080,train +ClfBxWPkBKU,14665,15393,180-37-531-388,30.0,1280,720,train +hJnEQCMA5Sg,8095,8256,696-101-1003-407,24.0,1280,720,train +cK74vhqzeeQ,12425,12799,521-54-857-390,25.0,1280,720,test +vC1uxXvPG0Q,14388,14621,346-79-649-381,24.0,1280,720,train +Kv0Xz0lMKDQ,14014,14209,1275-108-1764-597,25.0,1920,1080,train +xnfEBUI_YTE,12337,12493,747-152-1210-614,24.0,1920,1080,train +oEIYHTlbeLA,6578,6716,488-85-819-417,24.0,1280,720,train +5bXiuSrZUKg,21583,21739,311-25-729-443,30.0,1280,720,train +7Q59suYxIec,10991,11156,273-81-737-545,24.0,1920,1080,train +YUUP2MMz7PU,8145,8499,697-185-1111-599,24.0,1920,1080,train +CXvUdCdKTJY,5931,6095,372-93-692-413,24.0,1280,720,train +U51MSK6nSQE,3538,3816,1211-102-1682-573,24.0,1920,1080,train +5MuIMqhT8DM,21992,22166,745-104-1279-638,24.0,1920,1080,train +iwUkbi4_wWo,11939,12086,1164-109-1664-609,25.0,1920,1080,train +8v_GrrQs8ow,15456,15661,758-65-1108-414,24.0,1280,720,train +4E_1AB1rsSw,1526,1706,1219-123-1648-552,24.0,1920,1080,test +wzjVT07bcYA,10698,10878,1021-171-1482-631,24.0,1920,1080,train +fO2htapfNhA,8980,9184,699-60-1187-547,24.0,1920,1080,train +DsrxbqFo41k,19914,20055,470-35-844-408,24.0,1280,720,test +7Lc_dlVrg5M,26555,26698,791-74-1102-386,24.0,1280,720,train +dO1LxQEoGLc,13098,13461,260-272-656-667,30.0,1920,1080,train +BzeTjn0R2VY,23005,23200,548-76-1068-596,24.0,1920,1080,train +JKsHhXwqDqM,30363,30585,648-77-969-398,25.0,1280,720,test +g9M3HIjHuq0,13635,13782,602-62-959-419,24.0,1280,720,train +kaU7IPlg9PA,5940,6114,1264-107-1691-536,25.0,1920,1080,train +oXeAWdHP0uY,6555,6853,696-101-1219-624,25.0,1920,1080,train +KzIp4IzDPG0,394,579,591-139-1125-673,25.0,1920,1080,train +DBBA2LAsepU,4487,4685,785-93-1259-567,24.0,1920,1080,train +SF9qq6vQ3Pg,10707,10856,653-114-1196-657,24.0,1920,1080,train +Ubbmje44gLg,16858,17112,937-168-1302-532,25.0,1920,1080,train +27lMmdmy-b8,18317,18820,453-62-798-407,30.0,1280,720,train +iqKYtA3pK1c,2587,2882,1108-152-1586-629,24.0,1920,1080,train +lyR-K2CZIHQ,11097,11305,810-103-1125-419,24.0,1280,720,train +KyVSuI6JyOs,20209,20340,708-76-1228-596,24.0,1920,1080,train +P2AUat93a8Q,16766,16967,644-66-949-370,24.0,1280,720,train +RjquHTj4HlY,18430,18756,1005-170-1508-673,24.0,1920,1080,train +yMWlkJAqKYU,5708,5838,314-103-636-424,24.0,1280,720,test +aTfwA1TaH3Q,8426,8637,729-54-1070-395,24.0,1280,720,train +GIrfNWed1Mk,11411,11696,1216-172-1687-642,24.0,1920,1080,test +aQsOmGflf1o,11747,11884,279-202-673-596,25.0,1920,1080,train +360bU-vBJOI,17243,17480,772-155-1238-620,24.0,1920,1080,test +xDKth-qS8Jk,6056,6206,1091-110-1564-583,25.0,1920,1080,train +CZNrOzgNWf4,8518,8650,712-71-1041-400,30.0,1280,720,train +O13KwsyDqeE,26321,26512,538-62-887-412,30.0,1280,720,train +i2l9v_seHCc,11649,11793,1120-178-1550-609,24.0,1920,1080,train +oGlsUYGMYks,4000,4134,374-73-718-417,24.0,1280,720,train +kSR8G8mfp84,10386,10553,371-44-708-382,24.0,1280,720,train +1g-1_Y3fvUg,8302,8433,952-95-1435-577,24.0,1920,1080,train +9RTkZaX1cH0,2602,2768,254-91-584-420,30.0,1280,720,train +3VTsIju1dLI,10817,11015,1054-94-1563-604,24.0,1920,1080,train +zbHe4RpgV80,26249,26379,377-194-752-570,30.0,1920,1080,test +ClfBxWPkBKU,15449,15598,172-43-514-385,30.0,1280,720,train +iFTWM7HV2UI,24229,24550,746-115-1233-602,24.0,1920,1080,train +TLZ6W-Nqv1I,3016,3387,1053-100-1536-583,25.0,1920,1080,train +-nKdufEaL8k,454,730,335-136-721-522,25.0,1920,1080,train +ERSZb2wHFDw,865,1129,780-178-1209-607,30.0,1920,1080,train +P2AUat93a8Q,20587,20761,601-61-923-383,24.0,1280,720,train +RKK7wGAYP6k,2345,2491,367-93-859-585,24.0,1920,1080,train +sNQfoYm3WI0,1946,2147,1230-169-1649-588,25.0,1920,1080,train +DzNN_4rcIjs,9734,10104,496-51-824-378,25.0,1280,720,train +kdqUhTnAgJU,9444,9587,624-69-946-391,30.0,1280,720,train +lhP52caGW6s,835,1058,622-14-1007-398,25.0,1280,720,train +Ds_rzoyyfF0,10122,10297,581-49-945-414,24.0,1280,720,train +87qLWFZManA,10325,10469,1056-148-1532-623,24.0,1920,1080,train +B43f89Y9f-A,12764,12911,517-269-911-663,30.0,1920,1080,train +UxLRv0FEndM,4055,4197,766-168-1085-487,24.0,1280,720,train +Xb1Wq4NA290,5032,5283,1080-100-1528-547,24.0,1920,1080,train +GIrfNWed1Mk,6437,6574,1240-176-1706-643,24.0,1920,1080,test +1g-1_Y3fvUg,8445,8604,895-90-1395-589,24.0,1920,1080,train +HI7zfpitZpo,15101,15257,484-32-852-400,24.0,1280,720,train +iMFJef3xnmg,30251,30463,848-176-1329-656,30.0,1920,1080,train +GwRzjFQa_Og,17002,17601,623-71-1157-605,29.97,1920,1080,train +1AT5klu_yAQ,7902,8045,230-74-683-527,24.0,1920,1080,train +i2l9v_seHCc,15106,15294,1200-153-1626-579,24.0,1920,1080,train +kmbui1xF8DE,5890,6031,689-139-1207-656,25.0,1920,1080,train +a63t8r70QN0,21652,21817,209-266-548-604,30.0,1920,1080,train +6iM6M_7wBMc,1586,1872,1086-111-1577-602,24.0,1920,1080,train +HzAtOyw6ACw,13374,13694,954-133-1441-619,24.0,1920,1080,train +51k3UASQE5E,13143,13291,490-214-901-624,24.0,1920,1080,train +kLIfeGflNp8,17580,17740,800-53-1102-355,24.0,1280,720,train +iMFJef3xnmg,27492,27819,913-133-1433-653,30.0,1920,1080,train +PrUA8L40Dic,3009,3139,934-64-1488-618,24.0,1920,1080,train +EjSuaeVfE9I,17823,17953,585-70-893-376,25.0,1280,720,test +uTL9tm7S1Io,8277,8561,511-55-846-390,24.0,1280,720,train +BltRufe5kkI,2395,2555,684-215-1127-657,24.0,1920,1080,train +Ubbmje44gLg,9773,9946,947-165-1314-532,25.0,1920,1080,train +ER15KmrB8h8,643,799,753-73-1262-581,24.0,1920,1080,train +pyFcr2WcOyo,7334,7524,1132-247-1496-611,24.0,1920,1080,train +CXvUdCdKTJY,8503,8648,360-90-685-415,24.0,1280,720,train +yNhu0MG_2MA,12473,12938,639-80-1110-551,24.0,1920,1080,train +Juc_yvEkJuc,9008,9193,1091-36-1709-653,25.0,1920,1080,train +HI7zfpitZpo,6547,6782,307-43-661-398,24.0,1280,720,train +7G4WI4oTC5A,22837,23077,639-113-1201-675,30.0,1920,1080,train +y7l5QHRY_AI,6162,6621,446-58-800-412,24.0,1280,720,train +A2DzsgJSwcc,8903,9046,1118-96-1578-556,24.0,1920,1080,test +3zJHwOwirjA,6672,6829,1122-296-1522-696,24.0,1920,1080,train +4R3gFN8sUa8,1178,1328,1027-93-1527-593,24.0,1920,1080,train +eemOZfsrKdg,11777,11954,674-96-1094-517,25.0,1920,1080,train +08z_xW-szwM,12590,12788,687-270-1054-637,30.0,1920,1080,train +hU-aTB-heU0,1360,1503,581-109-1074-601,30.0,1920,1080,train +y7l5QHRY_AI,2159,2351,461-58-811-408,24.0,1280,720,train +_GMVTJ9ZKVc,5372,5533,423-38-776-390,30.0,1280,720,train +vQILP19qABk,2812,2959,517-85-869-436,30.0,1280,720,train +TLZ6W-Nqv1I,10662,10878,974-110-1410-545,25.0,1920,1080,train +-vqV-gHa2FE,6453,6717,451-69-814-432,24.0,1280,720,test +tFfYh9THuGo,14861,15017,305-129-738-562,24.0,1920,1080,train +77HUdJ7Tij0,8006,8207,1201-110-1651-558,25.0,1920,1080,train +cC0KxNeLp1E,2385,2543,311-67-648-404,24.0,1280,720,train +xHHb7R3kx40,17134,17281,803-94-1267-557,24.0,1920,1080,train +LnJwH_PZXnM,21765,22074,811-105-1370-663,25.0,1920,1080,train +ER15KmrB8h8,13352,13527,786-72-1298-584,24.0,1920,1080,train +aI8KiPiVLyY,14355,14839,479-67-862-451,24.0,1280,720,train +DhnBn_c9f8Q,5403,5589,979-135-1451-606,24.0,1920,1080,train +guXxy8LH2QM,8246,8417,757-157-1189-590,30.0,1920,1080,train +7Lc_dlVrg5M,23285,23413,751-36-1095-381,24.0,1280,720,train +eemOZfsrKdg,6510,6666,325-143-726-544,25.0,1920,1080,train +PJLT0cAPNfs,10075,10374,1120-175-1560-614,24.0,1920,1080,train +YUUP2MMz7PU,6534,7059,625-183-1040-598,24.0,1920,1080,train +2paoNvG5Nmo,44332,44592,737-203-1246-712,50.0,1920,1080,train +xv4_L5zlYaA,12043,12227,589-172-1029-613,24.0,1920,1080,train +oGlsUYGMYks,14723,14875,320-45-730-455,24.0,1280,720,train +51k3UASQE5E,6415,6658,498-200-931-633,24.0,1920,1080,train +cK74vhqzeeQ,2949,3251,454-59-772-377,25.0,1280,720,test +4E_1AB1rsSw,5649,5841,1222-109-1681-568,24.0,1920,1080,test +9alL95G293s,6729,6873,852-84-1400-632,25.0,1920,1080,train +xDKth-qS8Jk,20110,20478,1044-49-1607-612,25.0,1920,1080,train +pxEcvU0Vp_M,29640,29791,792-193-1114-515,30.0,1920,1080,train +iFTWM7HV2UI,7168,7474,722-99-1226-602,24.0,1920,1080,train +y7l5QHRY_AI,12051,12267,476-66-819-409,24.0,1280,720,train +5MuIMqhT8DM,3969,4099,877-81-1428-633,24.0,1920,1080,train +cK74vhqzeeQ,29350,29504,394-46-722-374,25.0,1280,720,test +501FEzbB1JI,4736,4965,310-89-647-424,30.0,1280,720,train +bWA1gvA5lxU,3611,3801,1160-99-1650-588,25.0,1920,1080,train +jr5mTwfFh00,2750,2896,676-174-1025-524,24.0,1920,1080,test +idfv7Lw4Y_s,5643,5856,834-85-1354-604,25.0,1920,1080,train +kBBmVezBUkg,2268,2413,649-46-1162-557,30.0,1920,1080,train +iMFJef3xnmg,16662,16900,839-179-1282-623,30.0,1920,1080,train +aQsOmGflf1o,16408,16602,285-212-664-589,25.0,1920,1080,train +FauPVZxDXxk,2089,2296,443-48-786-392,24.0,1280,720,train +9alL95G293s,10973,11104,481-279-872-670,25.0,1920,1080,train +IFpK-MqBD5E,8315,8444,393-218-786-610,24.0,1920,1080,train +gypAjPp6eps,3126,3378,246-80-618-452,24.0,1280,720,train +yNhu0MG_2MA,6577,7061,567-74-1030-535,24.0,1920,1080,train +pyFcr2WcOyo,5628,5991,1123-246-1493-617,24.0,1920,1080,train +oGlsUYGMYks,7305,7605,399-79-742-421,24.0,1280,720,train +rtcrqLWZr_0,8160,8422,704-93-1173-563,24.0,1920,1080,train +0R9zjn9BBvA,7885,8045,777-60-1294-577,24.0,1920,1080,test +fO2htapfNhA,7182,7362,624-56-1130-562,24.0,1920,1080,train +iqKYtA3pK1c,17050,17329,1123-150-1605-632,24.0,1920,1080,train +kdAs3UVgIGg,7967,8110,1299-166-1699-567,25.0,1920,1080,train +fO2htapfNhA,12416,12613,587-62-1096-571,24.0,1920,1080,train +5bXiuSrZUKg,5705,6042,412-59-786-432,30.0,1280,720,train +w8mToWb9NC8,2079,2372,888-166-1229-507,30.0,1920,1080,train +v9EKV2nSU8w,12312,12538,868-54-1458-644,24.0,1920,1080,train +S2H_8IfxzT4,5889,6017,253-45-613-406,24.0,1280,720,train +iB4MS1hsWXU,9870,10004,1294-127-1698-531,24.0,1920,1080,test +4CGFPbFqdJ4,5371,5499,749-89-1229-569,24.0,1920,1080,train +2Euof4PnjDk,10238,10431,761-119-1088-446,24.0,1280,720,train +HCgv_HNoJrY,17375,17606,675-71-1014-411,24.0,1280,720,train +501FEzbB1JI,8005,8205,246-86-585-425,30.0,1280,720,train +y7l5QHRY_AI,10228,10443,467-64-811-409,24.0,1280,720,train +GIrfNWed1Mk,13049,13340,1218-176-1695-653,24.0,1920,1080,test +eaCrsBtiYA4,13640,13820,1196-228-1567-598,24.0,1920,1080,train +CZNrOzgNWf4,8681,8822,722-18-1130-426,30.0,1280,720,train +idfv7Lw4Y_s,8604,8915,656-63-1237-643,25.0,1920,1080,train +5ErKrSyUpEo,28084,28318,312-19-926-632,24.0,1920,1080,train +360bU-vBJOI,4334,4656,745-174-1175-606,24.0,1920,1080,test +iMFJef3xnmg,9191,9506,730-167-1161-598,30.0,1920,1080,train +ER15KmrB8h8,4086,4362,724-60-1267-603,24.0,1920,1080,train +QijH4UAqGD8,314,467,304-38-909-643,24.0,1920,1080,train +RKK7wGAYP6k,3580,3754,348-99-826-578,24.0,1920,1080,train +jAw8t2g-eVU,3656,3975,560-89-1065-594,30.0,1920,1080,train +hU-aTB-heU0,11708,11916,763-92-1142-471,30.0,1920,1080,train +0R9zjn9BBvA,6194,6354,819-10-1445-635,24.0,1920,1080,test +51k3UASQE5E,2220,2365,538-219-930-611,24.0,1920,1080,train +lAzQWtkPzbI,4332,4625,497-38-804-344,25.0,1280,720,train +mLzTo-y8Ef0,9046,9327,1045-122-1549-625,24.0,1920,1080,train +6wNif5SlN08,19941,20104,808-107-1113-412,24.0,1280,720,train +P2AUat93a8Q,11367,11521,676-63-1000-388,24.0,1280,720,train +lEjegKJwI0M,4691,4896,909-159-1386-636,24.0,1920,1080,train +hiIcwt88o94,11365,11560,777-88-1086-397,24.0,1280,720,train +8Z24LCysq3A,10985,11156,440-46-788-393,25.0,1280,720,train +DOgsYATbV-s,7513,7678,630-79-956-405,24.0,1280,720,train +idfv7Lw4Y_s,10022,10476,727-104-1228-604,25.0,1920,1080,train +Jc9RdbHEFa0,10035,10186,266-73-624-431,24.0,1280,720,test +LiUClSItcy0,4099,4317,483-104-797-419,24.0,1280,720,test +QeIrdqU0o9s,1540,1710,428-76-746-394,24.0,1280,720,train +a63t8r70QN0,778,1084,945-242-1348-645,30.0,1920,1080,train +xDKth-qS8Jk,7409,7689,1021-86-1522-587,25.0,1920,1080,train +QeIrdqU0o9s,14577,14722,380-58-744-422,24.0,1280,720,train +P2AUat93a8Q,15177,15367,513-37-880-404,24.0,1280,720,train +2VBkDNzeRZM,8304,8487,442-86-964-609,24.0,1920,1080,train +r-YXvkWLy-g,2607,2908,328-116-821-609,24.0,1920,1080,train +ni4FV5zL6lM,1731,1884,282-54-650-422,24.0,1280,720,train +_2u_eHHzRto,15410,15606,744-44-1117-416,24.0,1280,720,test +9zC2Bc22QfA,5225,5435,751-104-1247-601,25.0,1920,1080,test +t0Cr64zCc38,5043,5193,1194-116-1640-563,24.0,1920,1080,train +jAw8t2g-eVU,11256,11542,572-88-1058-574,30.0,1920,1080,train +kBBmVezBUkg,2767,3311,614-90-1108-584,30.0,1920,1080,train +Juc_yvEkJuc,12276,12435,1141-57-1742-659,25.0,1920,1080,train +cZe3zcNoIKk,4909,5048,785-151-1247-613,25.0,1920,1080,train +kdqUhTnAgJU,9043,9203,649-67-980-398,30.0,1280,720,train +-2Dj9M71JAc,18345,18501,361-35-698-372,24.0,1280,720,train +uuatZO76MgQ,8004,8184,680-104-1166-590,30.0,1920,1080,train +77HUdJ7Tij0,18050,18284,1074-88-1571-586,25.0,1920,1080,train +2ju61VPdTzw,21558,21719,525-84-829-388,30.0,1280,720,train +ER15KmrB8h8,4364,4507,669-80-1178-588,24.0,1920,1080,train +KPNn880KWfU,1045,1186,516-60-842-385,25.0,1280,720,train +hn8N8p9P5gw,20293,20452,306-141-743-578,24.0,1920,1080,test +CstD6O95L-o,25087,25332,391-68-720-397,30.0,1280,720,train +B43f89Y9f-A,18832,19074,1099-264-1479-644,30.0,1920,1080,train +yNhu0MG_2MA,17140,17545,675-77-1144-547,24.0,1920,1080,train +5bXiuSrZUKg,18047,18212,314-60-659-406,30.0,1280,720,train +HCgv_HNoJrY,7885,8024,660-61-1014-414,24.0,1280,720,train +I3BJVaioX_k,23940,24085,746-80-1263-597,25.0,1920,1080,train +xHHb7R3kx40,12650,12856,813-97-1274-559,24.0,1920,1080,train +mgcjr1yz7ow,9089,9226,1162-53-1715-604,25.0,1920,1080,train +4zLmWpMDY8Q,17493,17716,252-232-616-596,24.0,1920,1080,train +jM9Q3xP2iBo,9779,10054,433-72-800-438,25.0,1280,720,train +08z_xW-szwM,2740,3248,666-243-1064-642,30.0,1920,1080,train +-eBUcBfkVCo,40827,40969,454-153-765-464,30.0,1280,720,train +3m6dV7Xo3Vc,2714,2899,703-119-1014-430,25.0,1280,720,train +pyFcr2WcOyo,3963,4272,1129-239-1507-616,24.0,1920,1080,train +_GMVTJ9ZKVc,24344,24566,497-36-872-412,30.0,1280,720,train +DzNN_4rcIjs,17737,18157,543-50-873-378,25.0,1280,720,train +yLfCCcVDUiU,19556,19778,860-249-1255-645,24.0,1920,1080,train +3-UcGCnJ14c,5226,5359,1154-172-1566-584,24.0,1920,1080,train +o_U-_akINwQ,16275,16443,381-299-783-701,25.0,1920,1080,test +bVV2Zk88beY,9085,9400,713-59-1049-393,30.0,1280,720,train +Sm5xF-UYgdg,28097,28227,660-90-978-407,25.0,1280,720,train +FhP0AfZdRZ4,1304,1544,1094-185-1509-600,30.0,1920,1080,train +501FEzbB1JI,474,620,236-54-590-408,30.0,1280,720,train +0B543Zkqq88,1501,1794,735-98-1282-645,25.0,1920,1080,train +g3vSYbT1Aco,15701,15837,543-106-1097-659,24.0,1920,1080,train +Cm1Ij-jRhpQ,17133,17334,909-62-1445-598,24.0,1920,1080,train +pyFcr2WcOyo,2005,2344,1132-242-1501-610,24.0,1920,1080,train +1AT5klu_yAQ,13348,13488,152-22-691-561,24.0,1920,1080,train +v9EKV2nSU8w,9175,9331,906-99-1408-601,24.0,1920,1080,train +oGlsUYGMYks,10988,11156,337-64-715-442,24.0,1280,720,train +PJLT0cAPNfs,3638,3858,1119-182-1548-612,24.0,1920,1080,train +_GMVTJ9ZKVc,5571,5763,539-45-895-400,30.0,1280,720,train +FDhlOovaGrI,6410,6567,323-41-704-422,24.0,1280,720,test +r-YXvkWLy-g,688,839,221-116-753-648,24.0,1920,1080,train +kaU7IPlg9PA,1305,1594,1196-131-1652-587,25.0,1920,1080,train +P5Mpo4JQZhw,14685,14882,518-64-827-373,24.0,1280,720,train +87ro2-kT7kQ,8970,9107,689-76-1030-417,25.0,1280,720,train +zamvnyBB-SU,2336,2555,1109-24-1663-578,24.0,1920,1080,train +Ubbmje44gLg,13378,13631,924-164-1297-537,25.0,1920,1080,train +tZYkjaKNr_o,11836,11994,333-44-670-383,25.0,1280,720,train +xDKth-qS8Jk,10004,10156,1004-57-1549-602,25.0,1920,1080,train +dsJWs6Z6eNs,8838,8983,963-125-1500-662,24.0,1920,1080,train +_ltDJynTduY,27641,27900,429-79-758-408,30.0,1280,720,test +4zLmWpMDY8Q,14283,14520,242-240-595-593,24.0,1920,1080,train +w8mToWb9NC8,22789,23007,809-162-1143-497,30.0,1920,1080,train +95ovIJ3dsNk,5138,5270,519-87-832-401,30.0,1280,720,train +J4r9pgx_95E,5979,6112,1076-100-1571-596,24.0,1920,1080,train +501FEzbB1JI,12496,12671,285-80-641-436,30.0,1280,720,train +IFpK-MqBD5E,6644,6818,162-135-543-515,24.0,1920,1080,train +4CGFPbFqdJ4,11593,11803,806-87-1288-569,24.0,1920,1080,train +5MuIMqhT8DM,17795,17936,1010-68-1574-633,24.0,1920,1080,train +y1yK36RMyRE,1835,2067,316-31-884-598,24.0,1920,1080,train +U51MSK6nSQE,294,547,1226-107-1680-561,24.0,1920,1080,train +5zWmdHmJMd0,11878,12059,285-76-825-616,24.0,1920,1080,train +WfTZ5iIUn4s,6178,6319,744-76-1052-384,24.0,1280,720,train +O13KwsyDqeE,16325,16461,521-65-838-382,30.0,1280,720,train +27lMmdmy-b8,2064,2457,408-48-804-445,30.0,1280,720,train +yNhu0MG_2MA,17959,18288,657-80-1126-547,24.0,1920,1080,train +cK74vhqzeeQ,8308,8505,493-62-810-377,25.0,1280,720,test +zORUUqJd81M,3809,3958,678-56-986-363,24.0,1280,720,train +t0Cr64zCc38,16243,16389,1037-133-1511-607,24.0,1920,1080,train +iB4MS1hsWXU,4613,4750,891-121-1358-587,24.0,1920,1080,test +t0Cr64zCc38,8533,8769,981-126-1482-627,24.0,1920,1080,train +lyR-K2CZIHQ,4879,5317,711-37-1095-421,24.0,1280,720,train +FVUkKKc3Vvk,1811,2180,178-111-662-594,24.0,1920,1080,train +501FEzbB1JI,17586,17749,262-75-630-443,30.0,1280,720,train +9alL95G293s,25918,26055,601-233-970-602,25.0,1920,1080,train +iMFJef3xnmg,15411,15590,831-190-1238-597,30.0,1920,1080,train +9EVEmZ2c_es,18671,19115,753-25-1127-398,30.0,1280,720,train +u08T3A7slkE,695,870,1111-62-1658-609,24.0,1920,1080,train +iw7nPE2jioU,16022,16209,680-167-1122-609,24.0,1920,1080,train +pyFcr2WcOyo,646,810,1108-210-1520-621,24.0,1920,1080,train +wxStlzunxCw,8846,8987,1005-166-1356-517,25.0,1920,1080,train +0B543Zkqq88,7553,7716,624-85-1160-621,25.0,1920,1080,train +ogCIqaCe2zI,5243,5379,1015-122-1507-614,24.0,1920,1080,test +oGlsUYGMYks,1542,1847,376-78-724-427,24.0,1280,720,train +FauPVZxDXxk,1387,1523,454-49-791-387,24.0,1280,720,train +kvdHqS3ryw0,14488,14788,1238-182-1651-594,24.0,1920,1080,train +CsECS5qsGLs,13979,14117,805-90-1132-417,25.0,1280,720,train +F2XPF6rQ6fs,9428,9592,905-78-1394-568,24.0,1920,1080,train +A2DzsgJSwcc,9831,10018,1145-62-1649-566,24.0,1920,1080,test +360bU-vBJOI,15956,16115,766-138-1264-635,24.0,1920,1080,test +XAcARiiK5uY,9289,9644,452-43-839-430,30.0,1280,720,train +-eBUcBfkVCo,41225,41363,429-149-739-460,30.0,1280,720,train +WfTZ5iIUn4s,3925,4057,581-49-913-381,24.0,1280,720,train +1fZ915L1w7I,16432,16587,921-105-1375-558,30.0,1920,1080,train +jr5mTwfFh00,12699,12847,753-160-1139-544,24.0,1920,1080,test +fWqKalpYgLo,910,1077,308-143-761-597,25.0,1920,1080,train +EEjyPqyFe_s,13713,13912,643-69-976-403,24.0,1280,720,train +87qLWFZManA,14062,14254,1061-97-1584-620,24.0,1920,1080,train +O13KwsyDqeE,16171,16305,509-65-855-412,30.0,1280,720,train +9RTkZaX1cH0,8900,9063,405-84-744-425,30.0,1280,720,train +pyFcr2WcOyo,4797,5052,1127-246-1491-610,24.0,1920,1080,train +85hbMtegrLc,10034,10202,1226-155-1655-585,24.0,1920,1080,train +BzeTjn0R2VY,23639,23997,241-21-853-633,24.0,1920,1080,train +7Lc_dlVrg5M,8739,8888,666-165-994-492,24.0,1280,720,train +yMWlkJAqKYU,3981,4196,310-96-643-428,24.0,1280,720,test +3VTsIju1dLI,14775,14964,1004-66-1565-627,24.0,1920,1080,train +FFG2rilqT2g,2775,3009,253-100-580-426,30.0,1280,720,train +08z_xW-szwM,927,1086,680-243-1076-639,30.0,1920,1080,train +xnfEBUI_YTE,9416,9588,788-132-1266-610,24.0,1920,1080,train +MO0L_LY2hRA,5613,5767,362-36-687-360,24.0,1280,720,test +UUfUAJD3qrU,40504,40818,298-31-643-375,30.0,854,480,train +yLfCCcVDUiU,13269,13521,961-337-1324-699,24.0,1920,1080,train +zamvnyBB-SU,11902,12117,1067-59-1583-574,24.0,1920,1080,train +08z_xW-szwM,17347,17580,681-262-1066-648,30.0,1920,1080,train +kvdHqS3ryw0,3070,3382,1100-107-1587-594,24.0,1920,1080,train +08z_xW-szwM,11522,11862,652-258-1040-648,30.0,1920,1080,train +GIrfNWed1Mk,8267,8503,1187-171-1676-660,24.0,1920,1080,test +_GMVTJ9ZKVc,27683,27821,586-46-951-410,30.0,1280,720,train +JTN9Nx8VYtk,10295,10606,500-41-874-415,30.0,1280,720,train +DTIjvPLkJgo,2688,2836,1161-225-1564-628,24.0,1920,1080,train +d0LQCXCiW8g,10398,10703,215-127-539-451,30.0,1280,720,train +_waBFUg_oT8,4048,4312,393-54-720-380,24.0,1280,720,train +PrK0CifulU0,11426,11573,807-75-1128-397,24.0,1280,720,train +5zWmdHmJMd0,2117,2263,291-73-813-595,24.0,1920,1080,train +87qLWFZManA,896,1040,946-61-1499-614,24.0,1920,1080,train +ieSV8-isy3M,17666,17799,973-47-1501-575,24.0,1920,1080,train +C0dmOPDLKhY,9486,9757,669-55-1235-621,30.0,1920,1080,test +rYwTA5RA9eU,9487,9908,597-106-932-441,25.0,1280,720,train +VJoQj00RZHg,5272,5797,476-93-806-423,25.0,1280,720,train +ClfBxWPkBKU,22832,23375,219-38-574-393,30.0,1280,720,train +dkmpRQdJslI,2364,3025,355-9-764-419,24.0,1280,720,train +FDhlOovaGrI,10151,10337,321-61-657-396,24.0,1280,720,test +ER0G2S9r7aE,8693,8854,687-131-1127-571,25.0,1920,1080,train +i2l9v_seHCc,6067,6252,1184-171-1615-603,24.0,1920,1080,train +4E_1AB1rsSw,3093,3391,1135-122-1579-566,24.0,1920,1080,test +uTL9tm7S1Io,1318,1487,787-55-1103-372,24.0,1280,720,train +aTfwA1TaH3Q,10853,11144,728-50-1060-383,24.0,1280,720,train +zfbigT9I0Qg,5803,6023,1218-167-1660-609,24.0,1920,1080,train +oEIYHTlbeLA,5557,5874,512-87-836-411,24.0,1280,720,train +iwUkbi4_wWo,8228,8385,1214-158-1632-576,25.0,1920,1080,train +oEIYHTlbeLA,9044,9272,464-89-790-415,24.0,1280,720,train +V1yW5IsnSjo,10827,11207,599-129-942-472,30.0,1280,720,train +501FEzbB1JI,4349,4565,266-77-629-441,30.0,1280,720,train +UMhLBPPtlrY,20936,21145,714-77-1035-397,30.0,1280,720,test +9alL95G293s,4722,4913,820-105-1369-654,25.0,1920,1080,train +iMFJef3xnmg,28457,28614,888-184-1312-608,30.0,1920,1080,train +iMFJef3xnmg,2506,3479,692-153-1190-651,30.0,1920,1080,train +9KvZy09vaNg,6030,6176,853-85-1310-542,24.0,1920,1080,train +kaU7IPlg9PA,16544,16800,1055-92-1518-556,25.0,1920,1080,train +H3ddtbeduoo,5290,5422,316-63-672-419,24.0,1280,720,train +xDKth-qS8Jk,6213,6353,1122-101-1611-590,25.0,1920,1080,train +HXbsVbFAczg,21301,21435,708-87-1009-388,24.0,1280,720,train +a63t8r70QN0,392,568,966-239-1370-643,30.0,1920,1080,train +iWHq2m_Hwhw,22482,22737,211-119-550-458,30.0,1280,720,train +cK74vhqzeeQ,9460,9807,501-62-816-378,25.0,1280,720,test +rtcrqLWZr_0,7128,7347,723-90-1209-577,24.0,1920,1080,train +xv4_L5zlYaA,10593,10795,630-172-1066-608,24.0,1920,1080,train +TLZ6W-Nqv1I,5900,6080,971-100-1425-554,25.0,1920,1080,train +LcNvkhS4UYg,16989,17205,797-166-1162-532,30.0,1920,1080,test +i2l9v_seHCc,12657,12921,1190-181-1609-601,24.0,1920,1080,train +9RTkZaX1cH0,15261,15406,304-71-668-435,30.0,1280,720,train +y1yK36RMyRE,1231,1445,374-58-871-555,24.0,1920,1080,train +kmbui1xF8DE,16549,16842,636-116-1192-672,25.0,1920,1080,train +GIrfNWed1Mk,4228,4452,1162-172-1645-656,24.0,1920,1080,test +2paoNvG5Nmo,13920,14056,850-216-1313-680,50.0,1920,1080,train +cmpu58yv8-g,500,709,712-76-1062-425,24.0,1280,720,train +ni4FV5zL6lM,6605,6736,316-48-706-438,24.0,1280,720,train +eemOZfsrKdg,5733,5891,442-137-845-542,25.0,1920,1080,train +5ErKrSyUpEo,11210,11569,288-53-843-606,24.0,1920,1080,train +PrK0CifulU0,8335,8480,753-76-1071-395,24.0,1280,720,train +kmbui1xF8DE,7707,7843,1180-199-1556-573,25.0,1920,1080,train +i2l9v_seHCc,2959,3112,1198-160-1603-566,24.0,1920,1080,train +4CGFPbFqdJ4,14496,14792,747-88-1252-593,24.0,1920,1080,train +oGlsUYGMYks,2716,2851,395-80-731-417,24.0,1280,720,train +08z_xW-szwM,3969,4398,661-251-1055-645,30.0,1920,1080,train +B43f89Y9f-A,2416,2548,781-257-1180-657,30.0,1920,1080,train +lAzQWtkPzbI,18377,18653,447-17-802-372,25.0,1280,720,train +sNQfoYm3WI0,4932,5164,1271-142-1724-595,25.0,1920,1080,train +XAcARiiK5uY,14958,15562,479-26-887-433,30.0,1280,720,train +BHY0FxzoKZE,7142,7320,323-59-849-585,24.0,1920,1080,train +9EVEmZ2c_es,12484,12858,730-48-1098-416,30.0,1280,720,train +SF9qq6vQ3Pg,9372,9571,725-143-1199-617,24.0,1920,1080,train +PJLT0cAPNfs,13758,14084,1061-165-1517-621,24.0,1920,1080,train +2Euof4PnjDk,9678,9838,769-108-1110-450,24.0,1280,720,train +GwRzjFQa_Og,917,1051,694-86-1210-603,29.97,1920,1080,train +ogCIqaCe2zI,3662,3808,999-110-1507-618,24.0,1920,1080,test +kaU7IPlg9PA,4604,4740,1138-120-1546-529,25.0,1920,1080,train +pvBlSFVmoaw,1577,1748,250-77-582-409,24.0,1280,720,train +8DDgHq9ewOo,18079,18308,956-49-1491-584,24.0,1920,1080,train +HzAtOyw6ACw,14634,14798,850-111-1389-651,24.0,1920,1080,train +cK74vhqzeeQ,6322,6591,545-56-861-372,25.0,1280,720,test +KCr8s57hdzY,405,554,639-87-1150-598,24.0,1920,1080,train +aupPrDnhrKA,5225,5367,543-127-993-577,24.0,1920,1080,train +kBBmVezBUkg,20514,20738,606-84-1080-558,30.0,1920,1080,train +5aH2Ppjpcho,2059,2228,491-128-817-454,25.0,1280,720,test +Juc_yvEkJuc,10766,10999,1118-96-1606-584,25.0,1920,1080,train +fO2htapfNhA,6421,6742,676-43-1210-576,24.0,1920,1080,train +o_U-_akINwQ,1686,1870,459-337-794-672,25.0,1920,1080,test +yhMi0tkYyT8,18774,19010,511-52-864-404,30.0,1280,720,train +KyVSuI6JyOs,5673,5871,726-68-1250-591,24.0,1920,1080,train +SF9qq6vQ3Pg,13355,13490,785-158-1227-599,24.0,1920,1080,train +DzNN_4rcIjs,18707,18889,556-63-870-377,25.0,1280,720,train +fO2htapfNhA,1457,1756,661-59-1148-548,24.0,1920,1080,train +7a5NyUITbyk,12445,12708,936-83-1461-607,24.0,1920,1080,train +_GMVTJ9ZKVc,20813,20967,280-37-658-415,30.0,1280,720,train +j7fRIGphgtk,1015,1189,676-47-1003-373,24.0,1280,720,train +xHHb7R3kx40,21206,21397,757-212-1143-599,24.0,1920,1080,train +iFTWM7HV2UI,7478,7748,717-113-1191-589,24.0,1920,1080,train +2paoNvG5Nmo,49343,49566,852-240-1259-647,50.0,1920,1080,train +27lMmdmy-b8,21212,21393,443-59-788-404,30.0,1280,720,train +2VBkDNzeRZM,3897,4025,411-95-912-597,24.0,1920,1080,train +DsrxbqFo41k,2305,2660,445-34-823-411,24.0,1280,720,test +hokUdXYRe2Q,4140,4391,768-165-1192-588,30.0,1920,1080,train +UoMpbL_Fsig,3194,3328,423-57-751-385,24.0,1280,720,train +2wseM6wWd74,8329,8923,609-41-919-350,24.0,1280,720,train +vQILP19qABk,4223,4468,653-81-1021-450,30.0,1280,720,train +-7ORAKULel4,14254,14392,855-128-1318-592,25.0,1920,1080,train +I6xuJu7gLe0,8542,8850,288-132-761-606,24.0,1920,1080,train +kdqUhTnAgJU,19602,19930,676-62-993-380,30.0,1280,720,train +YUUP2MMz7PU,10820,10971,726-182-1138-595,24.0,1920,1080,train +Depn_GsxSqo,14520,14730,722-73-1243-593,24.0,1920,1080,test +21hgbMa_sVc,6761,6899,279-121-756-598,25.0,1920,1080,train +VJoQj00RZHg,1878,2012,505-98-818-411,25.0,1280,720,train +1k89OTpDvIU,379,508,740-63-1084-408,30.0,1280,720,train +UMhLBPPtlrY,7039,7334,693-85-1020-412,30.0,1280,720,test +aI8KiPiVLyY,7024,7793,508-105-812-409,24.0,1280,720,train +r-YXvkWLy-g,8994,9200,268-115-744-591,24.0,1920,1080,train +FqrLUtIFVjs,20245,20384,437-60-762-385,24.0,1280,720,train +y8ohTeRKVqs,10711,10938,391-80-928-616,24.0,1920,1080,train +1g-1_Y3fvUg,5582,5752,802-102-1281-580,24.0,1920,1080,train +xnfEBUI_YTE,7306,7441,583-132-1047-596,24.0,1920,1080,train +y7l5QHRY_AI,5121,5256,422-54-773-406,24.0,1280,720,train +ERSZb2wHFDw,2042,2331,798-185-1221-608,30.0,1920,1080,train +SF9qq6vQ3Pg,1887,2050,761-155-1207-601,24.0,1920,1080,train +I3BJVaioX_k,6305,6550,577-91-1105-619,25.0,1920,1080,train +iMFJef3xnmg,11312,11639,752-183-1176-607,30.0,1920,1080,train +360bU-vBJOI,11780,11947,838-179-1260-601,24.0,1920,1080,test +xHHb7R3kx40,16563,16747,793-80-1288-574,24.0,1920,1080,train +pyFcr2WcOyo,15311,15623,1117-252-1488-622,24.0,1920,1080,train +7a5NyUITbyk,10089,10284,979-101-1460-582,24.0,1920,1080,train +1fZ915L1w7I,11224,11372,831-214-1177-561,30.0,1920,1080,train +B43f89Y9f-A,6475,6766,595-233-1076-713,30.0,1920,1080,train +8UNCvk9YXOo,12271,12525,722-51-1242-572,24.0,1920,1080,train +lmyZMtPVodo,16106,16268,719-45-1047-373,24.0,1280,720,train +IyrcWMvu9ek,1087,1398,703-207-1113-617,30.0,1920,1080,test +v9EKV2nSU8w,3498,3692,983-92-1492-602,24.0,1920,1080,train +zamvnyBB-SU,10161,10442,1100-32-1665-596,24.0,1920,1080,train +UxLRv0FEndM,14337,14650,176-36-501-359,24.0,1280,720,train +8UNCvk9YXOo,13811,14011,650-50-1176-577,24.0,1920,1080,train +Uq3MCf_6HtI,5056,5193,1137-167-1662-691,24.0,1920,1080,train +0gks6ceq4eQ,4737,4870,1128-66-1659-597,24.0,1920,1080,test +oGlsUYGMYks,13337,13619,365-63-727-424,24.0,1280,720,train +ONs9FCY74p0,14774,14915,1146-157-1586-596,24.0,1920,1080,train +JKS7HWy2TRU,9526,9656,1021-162-1498-639,24.0,1920,1080,train +eemOZfsrKdg,7584,7919,462-142-864-544,25.0,1920,1080,train +AI7M-JTC6_w,4437,4643,1038-111-1531-604,24.0,1920,1080,train +a63t8r70QN0,19338,19510,199-259-549-609,30.0,1920,1080,train +-eBUcBfkVCo,37616,37807,431-168-753-491,30.0,1280,720,train +iMFJef3xnmg,12668,13553,783-153-1264-635,30.0,1920,1080,train +TLZ6W-Nqv1I,13173,13386,1032-75-1568-611,25.0,1920,1080,train +Ubbmje44gLg,12261,12772,921-161-1296-536,25.0,1920,1080,train +jAw8t2g-eVU,1308,1607,488-76-990-577,30.0,1920,1080,train +51k3UASQE5E,5090,5220,525-211-930-615,24.0,1920,1080,train +zMWYQRKuc5M,12618,12983,601-37-941-378,24.0,1280,720,train +iFTWM7HV2UI,29357,29510,696-113-1192-608,24.0,1920,1080,train +fO2htapfNhA,5827,6198,739-56-1241-558,24.0,1920,1080,train +08z_xW-szwM,10578,10874,678-271-1043-635,30.0,1920,1080,train +Juc_yvEkJuc,3265,3570,1156-132-1604-580,25.0,1920,1080,train +501FEzbB1JI,7199,7339,123-83-472-433,30.0,1280,720,train +xgW-0egOWVg,13239,13400,957-92-1434-569,24.0,1920,1080,train +bVV2Zk88beY,7966,8114,700-63-1025-388,30.0,1280,720,train +wzjVT07bcYA,13030,13181,1115-157-1608-650,24.0,1920,1080,train +P2AUat93a8Q,18396,18638,662-65-972-375,24.0,1280,720,train +CiLn-GrcuEs,14748,14903,1212-370-1593-750,30.0,1920,1080,train +esD6aaIjhek,3219,3545,529-55-858-384,30.0,1280,720,train +V1yW5IsnSjo,11245,11471,611-132-941-462,30.0,1280,720,train +_GMVTJ9ZKVc,462,666,535-46-890-402,30.0,1280,720,train +SF9qq6vQ3Pg,4021,4168,689-132-1190-633,24.0,1920,1080,train +KzIp4IzDPG0,9843,10044,688-118-1224-654,25.0,1920,1080,train +zamvnyBB-SU,5203,5390,1059-60-1561-562,24.0,1920,1080,train +vC1uxXvPG0Q,11520,11705,339-75-651-387,24.0,1280,720,train +77HUdJ7Tij0,3810,3979,1230-121-1689-581,25.0,1920,1080,train +QijH4UAqGD8,5293,5537,325-29-922-625,24.0,1920,1080,train +idfv7Lw4Y_s,11935,12118,930-98-1444-612,25.0,1920,1080,train +ONs9FCY74p0,11685,11818,1153-151-1591-588,24.0,1920,1080,train +r-YXvkWLy-g,6046,6181,345-116-827-599,24.0,1920,1080,train +A-OvJjAw2Jo,1037,1253,470-75-800-405,24.0,1280,720,train +pxEcvU0Vp_M,18605,18735,716-77-1235-596,30.0,1920,1080,train +Kc6hkHGHQQc,1709,1884,1062-88-1565-591,24.0,1920,1080,test +Rhcrbcg8HBw,9416,9616,1214-131-1626-543,24.0,1920,1080,train +hU-aTB-heU0,12892,13070,591-108-1093-610,30.0,1920,1080,train +jAw8t2g-eVU,9664,9949,578-88-1067-576,30.0,1920,1080,train +3aYWvujaT6M,6500,6725,543-64-899-419,30.0,1280,720,train +yLfCCcVDUiU,2784,3077,550-339-913-701,24.0,1920,1080,train +UoMpbL_Fsig,12024,12185,430-51-771-392,24.0,1280,720,train +51k3UASQE5E,13964,14106,508-199-962-652,24.0,1920,1080,train +aQsOmGflf1o,10131,10319,378-189-794-605,25.0,1920,1080,train +0JGarsZE1rk,4129,4305,300-52-830-581,24.0,1920,1080,train +eaCrsBtiYA4,10211,10442,370-87-950-667,24.0,1920,1080,train +y8ohTeRKVqs,9891,10058,364-76-900-611,24.0,1920,1080,train +U51MSK6nSQE,12147,12311,1225-59-1779-613,24.0,1920,1080,train +9RTkZaX1cH0,18692,18836,301-83-628-410,30.0,1280,720,train +wmMrZyVANUo,18707,18920,367-49-892-574,24.0,1920,1080,train +7uRPPaYuu44,6495,6641,148-81-640-572,24.0,1920,1080,train +t0Cr64zCc38,9488,9667,1124-118-1599-594,24.0,1920,1080,train +_QdPW8JrYzQ,1038,1185,399-38-775-413,25.0,1280,720,train +yMWlkJAqKYU,1400,1588,315-108-618-411,24.0,1280,720,test +ERSZb2wHFDw,2732,2879,780-174-1217-610,30.0,1920,1080,train +kmbui1xF8DE,9104,9337,1088-202-1511-625,25.0,1920,1080,train +7G4WI4oTC5A,16990,17136,638-105-1176-643,30.0,1920,1080,train +yNhu0MG_2MA,15508,15678,701-77-1155-530,24.0,1920,1080,train +51k3UASQE5E,6923,7105,481-201-908-629,24.0,1920,1080,train +o_U-_akINwQ,4468,4671,431-298-841-708,25.0,1920,1080,test +77HUdJ7Tij0,18717,18954,987-131-1426-570,25.0,1920,1080,train +l--QYXCecew,14583,14718,639-180-1081-621,25.0,1920,1080,train +8rZzHkpyPkc,6670,6815,650-66-994-410,25.0,1280,720,train +ERSZb2wHFDw,12868,13105,783-173-1224-614,30.0,1920,1080,train +uuatZO76MgQ,4778,4975,721-100-1186-565,30.0,1920,1080,train +2ix8JEqCJ1s,16359,16528,870-99-1431-658,24.0,1920,1080,train +2VBkDNzeRZM,7389,7532,313-62-893-641,24.0,1920,1080,train +YUUP2MMz7PU,15781,15939,776-183-1188-595,24.0,1920,1080,train +360bU-vBJOI,18018,18280,746-173-1174-601,24.0,1920,1080,test +3zJHwOwirjA,13513,13693,394-127-845-577,24.0,1920,1080,train +kaU7IPlg9PA,583,748,1147-131-1585-569,25.0,1920,1080,train +kmbui1xF8DE,5460,5660,683-133-1205-655,25.0,1920,1080,train +G88JZ1DIdg8,12398,12547,1100-101-1577-579,24.0,1920,1080,train +QeIrdqU0o9s,8359,8523,416-88-725-397,24.0,1280,720,train +yNhu0MG_2MA,21474,21753,652-84-1114-547,24.0,1920,1080,train +-7ORAKULel4,21686,21824,867-233-1280-648,25.0,1920,1080,train +rtcrqLWZr_0,4780,4953,688-74-1208-593,24.0,1920,1080,train +FDhlOovaGrI,12016,12191,375-59-719-403,24.0,1280,720,test +08z_xW-szwM,10369,10560,677-270-1048-641,30.0,1920,1080,train +jAw8t2g-eVU,19774,19983,632-129-1098-596,30.0,1920,1080,train +3zJHwOwirjA,15403,15918,255-140-752-639,24.0,1920,1080,train +kyaiTGmwxnU,3267,3658,448-51-850-452,24.0,1280,720,train +ER15KmrB8h8,1744,1924,743-56-1293-605,24.0,1920,1080,train +jM9Q3xP2iBo,17155,17284,464-78-831-444,25.0,1280,720,train +QijH4UAqGD8,6989,7168,307-41-887-622,24.0,1920,1080,train +4CGFPbFqdJ4,9268,9400,722-88-1191-558,24.0,1920,1080,train +i2l9v_seHCc,1452,1604,1232-149-1657-574,24.0,1920,1080,train +Juc_yvEkJuc,6474,6697,1111-97-1598-584,25.0,1920,1080,train +77HUdJ7Tij0,10456,10670,1247-114-1715-584,25.0,1920,1080,train +I3BJVaioX_k,10461,10664,627-89-1154-616,25.0,1920,1080,train +i2l9v_seHCc,4879,5228,1187-172-1612-598,24.0,1920,1080,train +aI8KiPiVLyY,4850,5431,459-80-822-443,24.0,1280,720,train +QijH4UAqGD8,4110,4372,302-65-838-601,24.0,1920,1080,train +kdAs3UVgIGg,2788,3025,1319-148-1751-581,25.0,1920,1080,train +CXy1byguvJY,13009,13279,877-68-1470-663,24.0,1920,1080,train +2K-P7Hv_Zwg,6605,6738,522-60-849-388,25.0,1280,720,train +0txtVkBUdSQ,3002,3213,448-97-764-414,24.0,1280,720,train +0gks6ceq4eQ,13288,13470,1029-71-1555-596,24.0,1920,1080,test +VJoQj00RZHg,8342,8614,422-86-741-405,25.0,1280,720,train +XAcARiiK5uY,11229,11481,464-48-837-421,30.0,1280,720,train +vC1uxXvPG0Q,10730,10873,348-76-651-379,24.0,1280,720,train +4n8qT0vQbWk,22484,22851,765-35-1131-400,30.0,1280,720,train +9RTkZaX1cH0,14094,14239,338-78-696-437,30.0,1280,720,train +3zJHwOwirjA,17383,17537,450-153-897-600,24.0,1920,1080,train +cUee1I69nFs,3334,3589,651-68-1228-644,25.0,1920,1080,train +tuvxXnQrRv8,1194,1332,1229-90-1794-656,25.0,1920,1080,train +i_wtaoHCw3k,16581,16720,288-125-755-592,25.0,1920,1080,train +kmbui1xF8DE,12681,12899,693-133-1214-653,25.0,1920,1080,train +8Dv2Hdf5TRg,5867,6000,681-74-1003-397,24.0,1280,720,test +kyaiTGmwxnU,4489,4858,470-74-800-403,24.0,1280,720,train +TdQ1eaZH-J8,20204,20372,269-84-626-442,30.0,1280,720,train +6pSMgmL7vF0,4495,4645,1153-43-1714-604,24.0,1920,1080,train +CTn0HYmMAYM,9198,9400,1055-207-1514-666,25.0,1920,1080,train +1g-1_Y3fvUg,4391,4527,950-89-1439-578,24.0,1920,1080,train +OEydHbngSz0,6238,6368,1094-101-1535-543,24.0,1920,1080,train +Cetg4gu0oQQ,9125,9306,591-59-1129-596,24.0,1920,1080,train +5zWmdHmJMd0,16525,16674,350-77-875-600,24.0,1920,1080,train +oEIYHTlbeLA,307,437,493-119-834-461,24.0,1280,720,train +bVV2Zk88beY,6417,6547,712-51-1044-383,30.0,1280,720,train +4tQee2_JgUo,473,607,403-154-812-563,25.0,1920,1080,train +zMWYQRKuc5M,11627,11919,620-41-953-374,24.0,1280,720,train +ni4FV5zL6lM,6740,6890,260-46-637-423,24.0,1280,720,train +KzIp4IzDPG0,9615,9834,650-104-1186-640,25.0,1920,1080,train +ClfBxWPkBKU,17815,18215,182-46-547-411,30.0,1280,720,train +DGvPfD1Dd1U,22944,23232,1150-107-1627-584,25.0,1920,1080,train +0B543Zkqq88,9602,9830,642-134-1116-608,25.0,1920,1080,train +XAcARiiK5uY,16833,17036,473-49-837-413,30.0,1280,720,train +pxEcvU0Vp_M,19076,19204,838-92-1346-599,30.0,1920,1080,train +cfzkBGgxXGE,25311,25497,198-247-586-635,30.0,1920,1080,test +iMFJef3xnmg,5282,5594,710-133-1220-644,30.0,1920,1080,train +40riCqvRoMs,14481,14662,447-71-765-389,24.0,1280,720,train +TSomH3iGgYY,5756,5885,1169-182-1608-621,24.0,1920,1080,train +0JGarsZE1rk,14546,14715,331-63-850-582,24.0,1920,1080,train +08z_xW-szwM,2054,2416,663-243-1071-651,30.0,1920,1080,train +51k3UASQE5E,14552,14689,510-179-938-607,24.0,1920,1080,train +2ju61VPdTzw,18567,18834,487-80-801-393,30.0,1280,720,train +7G4WI4oTC5A,16329,16522,717-101-1231-615,30.0,1920,1080,train +8DDgHq9ewOo,18753,19015,1057-67-1552-561,24.0,1920,1080,train +I3BJVaioX_k,22153,22288,596-94-1103-601,25.0,1920,1080,train +9RTkZaX1cH0,4429,4570,233-80-575-422,30.0,1280,720,train +cK74vhqzeeQ,4026,4382,474-57-788-371,25.0,1280,720,test +2paoNvG5Nmo,6802,6951,809-238-1215-644,50.0,1920,1080,train +1k89OTpDvIU,3607,3797,730-55-1086-411,30.0,1280,720,train +_7t6Ry4PlTo,637,873,1168-93-1683-609,25.0,1920,1080,train +eemOZfsrKdg,4397,4595,389-127-829-566,25.0,1920,1080,train +9RTkZaX1cH0,19531,19715,311-77-650-416,30.0,1280,720,train +kmbui1xF8DE,306,614,703-107-1244-649,25.0,1920,1080,train +oXeAWdHP0uY,8423,8742,669-86-1221-639,25.0,1920,1080,train +fO2htapfNhA,14414,14737,608-33-1152-576,24.0,1920,1080,train +ON4iy8hq2hM,886,1092,951-128-1446-622,24.0,1920,1080,train +w8mToWb9NC8,22257,22434,798-161-1138-500,30.0,1920,1080,train +UMhLBPPtlrY,14037,14172,278-72-594-388,30.0,1280,720,test +ClfBxWPkBKU,24359,24670,202-26-570-395,30.0,1280,720,train +tuvxXnQrRv8,6925,7053,1195-108-1708-621,25.0,1920,1080,train +HI7zfpitZpo,13405,13642,194-43-558-407,24.0,1280,720,train +pyFcr2WcOyo,14248,14729,1119-251-1492-623,24.0,1920,1080,train +Q6U69HbAF9Y,5821,6027,735-66-1074-406,24.0,1280,720,train +7Q59suYxIec,12729,12861,345-107-754-515,24.0,1920,1080,train +0B543Zkqq88,6366,6582,694-107-1192-604,25.0,1920,1080,train +2ix8JEqCJ1s,11591,11740,1075-65-1649-640,24.0,1920,1080,train +oEIYHTlbeLA,5148,5283,546-105-867-426,24.0,1280,720,train +4sZdcB6bjI8,11869,12053,491-52-824-384,30.0,1280,720,train +XAcARiiK5uY,16338,16468,475-53-825-403,30.0,1280,720,train +-nKdufEaL8k,3574,3710,303-118-688-503,25.0,1920,1080,train +uuatZO76MgQ,6718,6850,708-97-1174-563,30.0,1920,1080,train +bnvnBwP9sTo,2408,2584,585-87-929-431,24.0,1280,720,train +A_u2WFTfbcg,2185,2515,794-72-1328-606,24.0,1920,1080,train +3VTsIju1dLI,13066,13223,984-49-1570-635,24.0,1920,1080,train +iMFJef3xnmg,4596,4941,732-181-1175-625,30.0,1920,1080,train +4n8qT0vQbWk,5675,5828,591-78-899-386,30.0,1280,720,train +ER0G2S9r7aE,2620,2750,431-126-874-568,25.0,1920,1080,train +1AT5klu_yAQ,7129,7342,154-25-704-575,24.0,1920,1080,train +Kc6hkHGHQQc,2961,3098,1031-62-1582-612,24.0,1920,1080,test +1AT5klu_yAQ,8870,9033,181-68-644-530,24.0,1920,1080,train +JI-HzvXk5bk,1254,1438,718-52-1048-382,24.0,1280,720,train +0JGarsZE1rk,16617,16807,346-62-873-589,24.0,1920,1080,train +yNhu0MG_2MA,20159,20553,616-80-1087-551,24.0,1920,1080,train +jAw8t2g-eVU,15334,15576,503-100-1001-598,30.0,1920,1080,train +RjquHTj4HlY,3640,3820,1041-179-1489-627,24.0,1920,1080,train +LHmz_s981-g,1778,2000,348-39-942-633,24.0,1920,1080,train +jr5mTwfFh00,14054,14208,671-174-1030-532,24.0,1920,1080,test +pb--rJGgVIo,12541,12733,586-67-930-411,24.0,1280,720,train +I3BJVaioX_k,7190,7488,619-88-1142-611,25.0,1920,1080,train +xDKth-qS8Jk,17653,17802,1053-93-1525-564,25.0,1920,1080,train +SF9qq6vQ3Pg,10573,10706,779-150-1227-598,24.0,1920,1080,train +ni4FV5zL6lM,3380,3677,234-41-632-438,24.0,1280,720,train +TLZ6W-Nqv1I,4056,4187,990-83-1449-542,25.0,1920,1080,train +V1yW5IsnSjo,12359,12787,592-133-937-478,30.0,1280,720,train +0JGarsZE1rk,18755,19132,358-63-892-595,24.0,1920,1080,train +87qLWFZManA,17167,17298,922-136-1401-616,24.0,1920,1080,train +iMFJef3xnmg,18854,19091,828-186-1266-623,30.0,1920,1080,train +1fZ915L1w7I,17994,18208,887-222-1214-548,30.0,1920,1080,train +TLZ6W-Nqv1I,17655,17789,1038-141-1486-589,25.0,1920,1080,train +4CGFPbFqdJ4,10495,10664,771-88-1241-558,24.0,1920,1080,train +ER15KmrB8h8,5573,5740,683-85-1181-582,24.0,1920,1080,train +wmMrZyVANUo,10566,10727,358-48-888-578,24.0,1920,1080,train +360bU-vBJOI,10322,10527,769-175-1194-601,24.0,1920,1080,test +TLZ6W-Nqv1I,8126,8266,993-112-1454-573,25.0,1920,1080,train +8DDgHq9ewOo,22472,22751,1072-66-1562-556,24.0,1920,1080,train +9RTkZaX1cH0,18091,18256,342-84-675-418,30.0,1280,720,train +AI7M-JTC6_w,5097,5341,970-73-1539-642,24.0,1920,1080,train +08z_xW-szwM,8070,8330,658-243-1071-655,30.0,1920,1080,train +kmbui1xF8DE,14197,14491,687-120-1222-655,25.0,1920,1080,train +eemOZfsrKdg,12474,12745,668-95-1090-517,25.0,1920,1080,train +A2DzsgJSwcc,11748,11936,1079-72-1582-575,24.0,1920,1080,test +ERSZb2wHFDw,4499,4655,754-176-1215-636,30.0,1920,1080,train +360bU-vBJOI,2706,2962,708-176-1135-603,24.0,1920,1080,test +08z_xW-szwM,524,688,703-243-1103-644,30.0,1920,1080,train +aI8KiPiVLyY,11317,12130,462-75-821-434,24.0,1280,720,train +wKlMcLTqRLs,3326,3484,605-18-971-384,24.0,1280,720,train +B43f89Y9f-A,21903,22097,829-265-1215-651,30.0,1920,1080,train +cUee1I69nFs,1152,1366,708-107-1250-648,25.0,1920,1080,train +360bU-vBJOI,16433,16664,730-126-1245-642,24.0,1920,1080,test +Cetg4gu0oQQ,5753,5980,490-43-1030-583,24.0,1920,1080,train +tFfYh9THuGo,4539,4810,243-136-686-579,24.0,1920,1080,train +GIrfNWed1Mk,13696,13888,1219-181-1680-642,24.0,1920,1080,test +51k3UASQE5E,8759,8912,498-178-942-622,24.0,1920,1080,train +0DHywidLX6A,3962,4156,1077-127-1557-606,24.0,1920,1080,train +0txtVkBUdSQ,10584,10747,675-90-1074-488,24.0,1280,720,train +uuatZO76MgQ,9072,9237,1046-96-1524-574,30.0,1920,1080,train +iFTWM7HV2UI,30873,31674,706-59-1286-641,24.0,1920,1080,train +RKK7wGAYP6k,2156,2329,328-90-828-589,24.0,1920,1080,train +aQsOmGflf1o,11960,12103,344-216-713-585,25.0,1920,1080,train +ER0G2S9r7aE,1600,1848,347-81-883-618,25.0,1920,1080,train +kaU7IPlg9PA,5541,5876,1208-110-1648-550,25.0,1920,1080,train +OWiiA9hXbY8,2130,2377,221-73-663-516,24.0,1920,1080,train +GIBoCJeB3HQ,18129,18354,201-217-617-633,24.0,1920,1080,train +2paoNvG5Nmo,18330,18486,908-201-1373-665,50.0,1920,1080,train +eaCrsBtiYA4,18793,18951,332-70-872-610,24.0,1920,1080,train +fFJeTy1_8Ng,4461,4673,1200-123-1637-560,25.0,1920,1080,train +GIrfNWed1Mk,7291,7546,1196-175-1665-645,24.0,1920,1080,test +iqKYtA3pK1c,16888,17035,1075-143-1578-645,24.0,1920,1080,train +LHmz_s981-g,2935,3322,361-70-908-617,24.0,1920,1080,train +Juc_yvEkJuc,19559,19787,1055-64-1572-580,25.0,1920,1080,train +PrK0CifulU0,9975,10182,786-76-1098-390,24.0,1280,720,train +L7X17aash2s,19780,19951,948-190-1405-648,30.0,1920,1080,train +b4qVI780Jgw,2593,2759,1182-98-1662-579,24.0,1920,1080,train +SF9qq6vQ3Pg,3533,3748,719-115-1218-613,24.0,1920,1080,train +iMFJef3xnmg,25236,25373,919-187-1353-621,30.0,1920,1080,train +VM6HZqQKhok,23930,24104,563-115-1116-669,24.0,1920,1080,train +PrK0CifulU0,3012,3150,801-77-1120-396,24.0,1280,720,train +_7t6Ry4PlTo,20636,20850,1098-93-1610-604,25.0,1920,1080,train +Ubbmje44gLg,1615,1785,927-238-1299-609,25.0,1920,1080,train +kdqUhTnAgJU,17298,17427,507-47-848-388,30.0,1280,720,train +GIrfNWed1Mk,6747,6890,1141-178-1608-645,24.0,1920,1080,test +QijH4UAqGD8,3416,3605,407-77-908-578,24.0,1920,1080,train +3-UcGCnJ14c,2398,2612,1059-172-1471-583,24.0,1920,1080,train +MeKJK4uetL8,2078,2264,936-68-1464-594,25.0,1920,1080,train +F3mzS32SH2Q,14249,14517,437-245-835-643,25.0,1920,1080,train +g3vSYbT1Aco,14299,14433,598-96-1149-646,24.0,1920,1080,train +kaU7IPlg9PA,11679,12092,1180-108-1661-588,25.0,1920,1080,train +j-S03JfgHEA,7436,7603,769-83-1113-426,25.0,1280,720,train +v9EKV2nSU8w,4498,4747,958-70-1512-623,24.0,1920,1080,train +y8ohTeRKVqs,12926,13106,403-77-936-609,24.0,1920,1080,train +yLfCCcVDUiU,1031,1283,1297-312-1637-652,24.0,1920,1080,train +K926HAKRFvw,7722,7901,789-60-1098-369,25.0,1280,720,train +7a5NyUITbyk,8686,8861,984-61-1533-610,24.0,1920,1080,train +cK74vhqzeeQ,15612,15941,465-81-797-414,25.0,1280,720,test +lmyZMtPVodo,9703,9910,723-32-1144-453,24.0,1280,720,train +idfv7Lw4Y_s,4644,4834,733-124-1221-611,25.0,1920,1080,train +aQsOmGflf1o,14335,14490,345-212-741-608,25.0,1920,1080,train +PJLT0cAPNfs,5867,6038,1099-183-1530-615,24.0,1920,1080,train +ulYR5bpu68E,3266,3402,278-147-592-461,25.0,1280,720,train +25s8BCsSWuk,9105,9353,1168-118-1636-587,25.0,1920,1080,train +tFfYh9THuGo,3476,3609,417-139-858-580,24.0,1920,1080,train +BzeTjn0R2VY,10245,10420,849-60-1371-582,24.0,1920,1080,train +GIrfNWed1Mk,1508,1700,1195-174-1674-652,24.0,1920,1080,test +I3BJVaioX_k,14697,14869,625-97-1124-596,25.0,1920,1080,train +cK74vhqzeeQ,17081,17386,501-84-836-419,25.0,1280,720,test +BvpmZktlBFs,3041,3475,442-68-768-394,24.0,1280,720,train +yNhu0MG_2MA,21245,21397,627-85-1092-551,24.0,1920,1080,train +-2Dj9M71JAc,2968,3164,326-41-701-416,24.0,1280,720,train +iMFJef3xnmg,18359,18529,804-162-1271-629,30.0,1920,1080,train +JKsHhXwqDqM,29401,29533,663-58-1007-402,25.0,1280,720,test +1fZ915L1w7I,10844,10985,971-110-1390-529,30.0,1920,1080,train +HzAtOyw6ACw,1354,1562,957-130-1396-569,24.0,1920,1080,train +RZgkjEdMbSw,7727,7862,1088-111-1592-615,24.0,1920,1080,train +fO2htapfNhA,3816,3973,787-65-1276-554,24.0,1920,1080,train +yLfCCcVDUiU,17898,18095,1160-153-1511-504,24.0,1920,1080,train +iB4MS1hsWXU,14911,15238,974-101-1558-685,24.0,1920,1080,test +kvdHqS3ryw0,6983,7246,1251-172-1641-562,24.0,1920,1080,train +kmbui1xF8DE,14775,14991,637-149-1140-652,25.0,1920,1080,train +HzAtOyw6ACw,11460,11605,1216-90-1650-525,24.0,1920,1080,train +hs-YuHv0vUk,15589,15729,940-90-1450-601,24.0,1920,1080,train +1g-1_Y3fvUg,5374,5508,969-89-1475-595,24.0,1920,1080,train +27lMmdmy-b8,831,1005,445-56-793-403,30.0,1280,720,train +xDKth-qS8Jk,2066,2229,1144-147-1610-612,25.0,1920,1080,train +Qd5rAVHT6Vw,5345,5474,549-118-1024-593,24.0,1920,1080,train +0gks6ceq4eQ,23529,23757,1159-65-1676-581,24.0,1920,1080,test +XPDHY-jOQYA,1826,2074,1239-163-1680-604,25.0,1920,1080,train +KxDwieKpawg,4327,4485,709-64-1053-408,24.0,1280,720,train +U4gR_cd6xOc,9094,9315,400-78-831-508,24.0,1920,1080,train +jfqwHT3u1-8,2916,3149,456-62-789-394,25.0,1280,720,train +Dig_QFPPPtE,2199,2359,383-129-788-534,24.0,1920,1080,train +3zJHwOwirjA,10307,10436,245-145-738-638,24.0,1920,1080,train +oGlsUYGMYks,918,1193,384-79-728-423,24.0,1280,720,train +27lMmdmy-b8,8029,8366,436-45-790-399,30.0,1280,720,train +o_U-_akINwQ,16444,16586,295-286-725-717,25.0,1920,1080,test +dKq34EVggjI,4160,4301,932-86-1423-576,24.0,1920,1080,train +360bU-vBJOI,1981,2201,733-173-1161-601,24.0,1920,1080,test +uuatZO76MgQ,1058,1549,771-100-1234-563,30.0,1920,1080,train +tZYkjaKNr_o,2749,3013,685-33-1034-382,25.0,1280,720,train +BzeTjn0R2VY,11391,11533,527-15-1125-614,24.0,1920,1080,train +fO2htapfNhA,8569,8859,672-39-1202-569,24.0,1920,1080,train +BzeTjn0R2VY,1638,1777,155-63-702-611,24.0,1920,1080,train +0bRocfcPhHU,4094,4270,789-90-1101-402,24.0,1280,720,train +YUUP2MMz7PU,9621,9873,837-185-1246-594,24.0,1920,1080,train +0JGarsZE1rk,16045,16216,362-44-908-589,24.0,1920,1080,train +wxStlzunxCw,16214,16410,1021-177-1366-522,25.0,1920,1080,train +eemOZfsrKdg,1596,1764,377-141-796-560,25.0,1920,1080,train +9RTkZaX1cH0,5935,6132,227-91-559-423,30.0,1280,720,train +fO2htapfNhA,2165,2342,768-58-1278-568,24.0,1920,1080,train +4zLmWpMDY8Q,11122,11318,346-238-697-588,24.0,1920,1080,train +5zWmdHmJMd0,1383,1691,284-76-814-606,24.0,1920,1080,train +bVV2Zk88beY,6548,7177,706-52-1041-387,30.0,1280,720,train +9RTkZaX1cH0,6935,7182,283-80-628-426,30.0,1280,720,train +VJoQj00RZHg,3593,3833,524-92-859-427,25.0,1280,720,train +Qd5rAVHT6Vw,7440,7607,564-117-1049-600,24.0,1920,1080,train +_GMVTJ9ZKVc,24577,24806,418-31-808-419,30.0,1280,720,train +LnJwH_PZXnM,2810,3093,1046-117-1523-594,25.0,1920,1080,train +2Brajdazp1o,7144,7315,774-67-1157-449,30.0,1280,720,train +iMFJef3xnmg,20827,21010,873-182-1312-623,30.0,1920,1080,train +08z_xW-szwM,17122,17346,678-268-1048-637,30.0,1920,1080,train +51k3UASQE5E,3674,3924,508-198-927-617,24.0,1920,1080,train +cK74vhqzeeQ,9975,10552,505-66-819-380,25.0,1280,720,test +yLfCCcVDUiU,20490,21145,542-222-961-641,24.0,1920,1080,train +1fZ915L1w7I,15932,16082,814-197-1189-572,30.0,1920,1080,train +2VBkDNzeRZM,6219,6398,374-96-875-597,24.0,1920,1080,train +bmFhWr693is,15384,15525,560-29-915-383,30.0,1280,720,train +7Q59suYxIec,8447,8582,426-62-915-551,24.0,1920,1080,train +95ovIJ3dsNk,7201,7342,620-64-937-380,30.0,1280,720,train +TLZ6W-Nqv1I,6835,7160,985-111-1437-563,25.0,1920,1080,train +N39x_WTPix0,2492,2744,569-52-930-414,25.0,1280,720,train +hokUdXYRe2Q,3998,4140,818-190-1196-567,30.0,1920,1080,train +13zN4-MVM9g,2519,2792,684-73-1199-588,24.0,1920,1080,train +pxEcvU0Vp_M,10551,10814,775-181-1149-556,30.0,1920,1080,train +4CGFPbFqdJ4,2591,2874,798-86-1297-585,24.0,1920,1080,train +I3BJVaioX_k,11990,12280,589-75-1126-612,25.0,1920,1080,train +y7l5QHRY_AI,17430,17757,455-108-760-412,24.0,1280,720,train +4sZdcB6bjI8,11303,11868,469-53-813-397,30.0,1280,720,train +i2l9v_seHCc,16548,16750,1210-131-1686-607,24.0,1920,1080,train +jr5mTwfFh00,10253,10484,794-159-1175-540,24.0,1920,1080,test +IFpK-MqBD5E,2840,3042,773-86-1272-584,24.0,1920,1080,train +kvdHqS3ryw0,4300,4491,1056-124-1521-590,24.0,1920,1080,train +_7t6Ry4PlTo,5499,5660,1118-107-1611-601,25.0,1920,1080,train +BvpmZktlBFs,4092,4626,429-69-755-396,24.0,1280,720,train +X4LOF6ftJVU,2677,2818,657-76-995-413,25.0,1280,720,train +GSf6nij-SdA,21618,21845,585-31-972-419,24.0,1280,720,train +RKK7wGAYP6k,844,1153,300-36-873-609,24.0,1920,1080,train +YUUP2MMz7PU,428,668,749-188-1156-596,24.0,1920,1080,train +pxEcvU0Vp_M,380,597,779-194-1146-560,30.0,1920,1080,train +iMFJef3xnmg,7612,8487,751-169-1199-617,30.0,1920,1080,train +LnJwH_PZXnM,15334,15524,1248-208-1599-559,25.0,1920,1080,train +YUUP2MMz7PU,16346,16525,656-182-1066-592,24.0,1920,1080,train +8DDgHq9ewOo,19934,20070,1118-73-1522-476,24.0,1920,1080,train +tFfYh9THuGo,9218,9347,319-132-745-558,24.0,1920,1080,train +5bXiuSrZUKg,18736,18956,262-57-621-416,30.0,1280,720,train +RjquHTj4HlY,4870,5059,979-188-1407-615,24.0,1920,1080,train +Cr1usGUSuL4,6000,6275,560-106-884-430,24.0,1280,720,train +jAw8t2g-eVU,18576,18724,664-111-1150-597,30.0,1920,1080,train +27lMmdmy-b8,7200,7379,442-41-797-396,30.0,1280,720,train +fO2htapfNhA,15936,16225,597-44-1119-566,24.0,1920,1080,train +TLZ6W-Nqv1I,5566,5769,1012-81-1464-532,25.0,1920,1080,train +tFfYh9THuGo,13258,13549,288-118-756-585,24.0,1920,1080,train +r-YXvkWLy-g,4954,5188,313-112-801-601,24.0,1920,1080,train +bWA1gvA5lxU,1987,2148,1132-106-1597-572,25.0,1920,1080,train +I3BJVaioX_k,13242,13410,482-66-1010-594,25.0,1920,1080,train +bVV2Zk88beY,7739,7930,711-60-1040-390,30.0,1280,720,train +iMFJef3xnmg,21965,22115,869-192-1294-618,30.0,1920,1080,train +2VBkDNzeRZM,4945,5084,361-96-854-589,24.0,1920,1080,train +yNhu0MG_2MA,8510,8927,577-70-1044-537,24.0,1920,1080,train +0B543Zkqq88,7069,7266,627-112-1112-597,25.0,1920,1080,train +3m6dV7Xo3Vc,12399,12540,624-103-977-456,25.0,1280,720,train +cgxZ4H3gJ8c,14464,14663,434-78-777-422,24.0,1280,720,train +iMFJef3xnmg,26983,27489,887-175-1347-635,30.0,1920,1080,train +iB4MS1hsWXU,17750,17883,863-84-1483-704,24.0,1920,1080,test +8Dv2Hdf5TRg,16390,16598,766-84-1102-421,24.0,1280,720,test +pyFcr2WcOyo,11319,11738,1125-253-1490-618,24.0,1920,1080,train +PJLT0cAPNfs,6043,6190,1112-169-1557-613,24.0,1920,1080,train +7Q59suYxIec,17016,17158,412-95-838-520,24.0,1920,1080,train +501FEzbB1JI,5149,5292,362-89-693-420,30.0,1280,720,train +kSR8G8mfp84,10772,10963,364-46-694-375,24.0,1280,720,train +kmbui1xF8DE,10578,10815,642-100-1222-680,25.0,1920,1080,train +P2y3Uspr8Bg,16926,17170,406-148-725-467,25.0,1280,720,train +ClfBxWPkBKU,21932,22183,209-25-604-421,30.0,1280,720,train +oGlsUYGMYks,10837,10984,392-71-754-432,24.0,1280,720,train +WvvuLDX7iIk,9306,9548,744-70-1059-385,24.0,1280,720,train +th3nnEpITz0,29041,29184,453-18-833-398,30.0,1280,720,train +cUee1I69nFs,5041,5190,614-76-1199-660,25.0,1920,1080,train +zamvnyBB-SU,14946,15131,1031-9-1625-603,24.0,1920,1080,train +VJoQj00RZHg,7121,7525,442-84-765-408,25.0,1280,720,train +0R9zjn9BBvA,6509,6658,704-27-1290-613,24.0,1920,1080,test +jAw8t2g-eVU,18831,19041,651-110-1127-588,30.0,1920,1080,train +PVXQUItNEDQ,3674,3802,652-76-980-404,24.0,1280,720,train +i_wtaoHCw3k,12816,12973,312-88-739-516,25.0,1920,1080,train +1g-1_Y3fvUg,3170,3335,943-93-1434-583,24.0,1920,1080,train +iMFJef3xnmg,26532,26949,849-159-1334-644,30.0,1920,1080,train +xHHb7R3kx40,969,1121,814-124-1289-599,24.0,1920,1080,train +7Q59suYxIec,9206,9444,244-56-746-558,24.0,1920,1080,train +_QdPW8JrYzQ,11052,11327,581-37-960-415,25.0,1280,720,train +kvdHqS3ryw0,11661,11793,1259-173-1690-604,24.0,1920,1080,train +4zLmWpMDY8Q,10907,11037,258-237-618-597,24.0,1920,1080,train +kmbui1xF8DE,6761,7046,714-110-1282-678,25.0,1920,1080,train +pvBlSFVmoaw,12585,12769,198-5-638-445,24.0,1280,720,train +P2AUat93a8Q,25250,25395,688-45-1033-390,24.0,1280,720,train +L7X17aash2s,28681,28849,964-125-1444-604,30.0,1920,1080,train +4E_1AB1rsSw,17114,17337,1233-125-1685-578,24.0,1920,1080,test +r-YXvkWLy-g,6390,6643,383-96-907-620,24.0,1920,1080,train +5zWmdHmJMd0,2763,3015,317-52-909-643,24.0,1920,1080,train +Juc_yvEkJuc,21449,21591,1103-107-1586-589,25.0,1920,1080,train +Kc6hkHGHQQc,3747,3979,997-79-1522-603,24.0,1920,1080,test +EEjyPqyFe_s,9495,9651,637-62-976-401,24.0,1280,720,train +g3vSYbT1Aco,10129,10378,415-82-977-645,24.0,1920,1080,train +YUUP2MMz7PU,18136,18265,849-144-1311-605,24.0,1920,1080,train +PrUA8L40Dic,9881,10105,990-67-1513-590,24.0,1920,1080,train +uTL9tm7S1Io,5554,5865,787-62-1099-375,24.0,1280,720,train +RjquHTj4HlY,2751,2889,1053-187-1485-620,24.0,1920,1080,train +VJoQj00RZHg,12156,12618,403-57-803-457,25.0,1280,720,train +EYgoqiX15Rw,5582,5755,487-56-852-421,24.0,1280,720,train +FqrLUtIFVjs,13477,13779,512-53-860-401,24.0,1280,720,train +TLZ6W-Nqv1I,16103,16307,1016-122-1485-592,25.0,1920,1080,train +51k3UASQE5E,12876,13019,542-224-934-617,24.0,1920,1080,train +g9M3HIjHuq0,8648,8787,621-74-956-409,24.0,1280,720,train +U51MSK6nSQE,13856,14386,1189-99-1674-584,24.0,1920,1080,train +pyFcr2WcOyo,810,1028,1131-240-1511-620,24.0,1920,1080,train +FDhlOovaGrI,18214,18355,607-84-936-412,24.0,1280,720,test +LnJwH_PZXnM,2025,2181,686-49-1269-632,25.0,1920,1080,train +05jJodDVJRQ,10853,11164,548-95-888-435,25.0,1280,720,train +iFTWM7HV2UI,14838,15190,718-128-1195-605,24.0,1920,1080,train +iMFJef3xnmg,9517,10451,746-163-1184-601,30.0,1920,1080,train +IFpK-MqBD5E,4154,4288,679-78-1164-563,24.0,1920,1080,train +a4ZBzM3L6ws,476,612,598-88-1097-588,25.0,1920,1080,train +UxLRv0FEndM,9616,9749,214-64-521-371,24.0,1280,720,train +0JGarsZE1rk,2661,2830,283-26-841-584,24.0,1920,1080,train +Juc_yvEkJuc,13966,14183,1011-74-1574-637,25.0,1920,1080,train +501FEzbB1JI,2984,3112,315-89-646-421,30.0,1280,720,train +I6xuJu7gLe0,5026,5256,232-137-714-619,24.0,1920,1080,train +zMWYQRKuc5M,8415,8575,769-55-1108-394,24.0,1280,720,train +1fZ915L1w7I,14972,15187,911-205-1273-567,30.0,1920,1080,train +pxEcvU0Vp_M,23789,23987,792-168-1183-559,30.0,1920,1080,train +QeIrdqU0o9s,2971,3131,437-87-741-392,24.0,1280,720,train +EEjyPqyFe_s,5171,5448,654-92-966-405,24.0,1280,720,train +U4gR_cd6xOc,2252,2646,355-46-859-551,24.0,1920,1080,train +VJoQj00RZHg,1399,1866,520-94-843-418,25.0,1280,720,train +L7X17aash2s,5125,5264,952-179-1379-606,30.0,1920,1080,train +WyprXhvGVYk,2769,3024,778-63-1335-619,24.0,1920,1080,train +UxLRv0FEndM,16665,16835,248-44-555-351,24.0,1280,720,train +BzeTjn0R2VY,15154,15400,308-23-850-565,24.0,1920,1080,train +g3vSYbT1Aco,14136,14290,659-136-1122-598,24.0,1920,1080,train +ClfBxWPkBKU,17057,17291,184-40-535-390,30.0,1280,720,train +pxEcvU0Vp_M,28065,28266,846-204-1159-516,30.0,1920,1080,train +U51MSK6nSQE,11961,12092,1203-112-1656-566,24.0,1920,1080,train +-eBUcBfkVCo,24722,25085,524-151-838-466,30.0,1280,720,train +yfLp_vueUxY,10178,10340,1109-172-1538-602,30.0,1920,1080,test +yNhu0MG_2MA,13194,13536,664-79-1128-543,24.0,1920,1080,train +jzrcRcEBrmA,1632,1787,1039-110-1525-597,24.0,1920,1080,train +kefDIEb3xyQ,1259,1454,422-152-803-534,25.0,1920,1080,train +ER15KmrB8h8,5743,5901,652-50-1214-612,24.0,1920,1080,train +Y0UB6g8Rsyw,1670,1817,756-183-1165-592,30.0,1920,1080,train +lV3trdyDdHE,10959,11133,596-72-948-424,25.0,1280,720,train +xHHb7R3kx40,7264,7526,805-86-1285-566,24.0,1920,1080,train +_X0mgOOSpLU,11369,11500,549-42-889-383,25.0,1280,720,train +YUUP2MMz7PU,12077,12389,759-180-1176-599,24.0,1920,1080,train +kyaiTGmwxnU,6134,6434,500-56-861-416,24.0,1280,720,train +VJoQj00RZHg,6752,6893,425-83-755-413,25.0,1280,720,train +uTL9tm7S1Io,10977,11164,762-56-1080-375,24.0,1280,720,train +r-YXvkWLy-g,4539,4682,366-114-849-596,24.0,1920,1080,train +onaiTOLPeUs,3828,3994,688-84-1211-606,24.0,1920,1080,train +3VTsIju1dLI,8604,8763,930-62-1502-634,24.0,1920,1080,train +oEIYHTlbeLA,12224,12423,479-84-813-417,24.0,1280,720,train +DGvPfD1Dd1U,22744,22939,1137-107-1616-587,25.0,1920,1080,train +k0GQSJrpVhM,15322,15698,1152-149-1616-613,24.0,1920,1080,train +kmbui1xF8DE,958,1166,764-93-1291-619,25.0,1920,1080,train +0g0S34XE2b8,3206,3349,1143-111-1677-644,24.0,1920,1080,train +y5nbWUOc9tY,6834,6997,608-71-948-411,24.0,1280,720,train +0B543Zkqq88,2512,2704,794-123-1293-623,25.0,1920,1080,train +rftagV38YKY,11003,11161,218-57-554-394,24.0,1280,720,train +idfv7Lw4Y_s,13252,13555,666-97-1173-604,25.0,1920,1080,train +QeIrdqU0o9s,15207,15370,447-76-781-410,24.0,1280,720,train +Juc_yvEkJuc,11957,12185,1119-98-1647-626,25.0,1920,1080,train +dDClQv1SeeE,5379,5512,1139-110-1674-646,24.0,1920,1080,train +XPDHY-jOQYA,7613,7835,1183-123-1629-571,25.0,1920,1080,train diff --git a/motion-gan-pipeline/motion-generation/TED_dataset_utils/util.py b/motion-gan-pipeline/motion-generation/TED_dataset_utils/util.py new file mode 100644 index 0000000..7d693f7 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/TED_dataset_utils/util.py @@ -0,0 +1,129 @@ +import numpy as np +from skimage import img_as_ubyte +from skimage.transform import resize +import imageio + +def bb_intersection_over_union(boxA, boxB): + xA = max(boxA[0], boxB[0]) + yA = max(boxA[1], boxB[1]) + xB = min(boxA[2], boxB[2]) + yB = min(boxA[3], boxB[3]) + interArea = max(0, xB - xA + 1) * max(0, yB - yA + 1) + boxAArea = (boxA[2] - boxA[0] + 1) * (boxA[3] - boxA[1] + 1) + boxBArea = (boxB[2] - boxB[0] + 1) * (boxB[3] - boxB[1] + 1) + iou = interArea / float(boxAArea + boxBArea - interArea) + return iou + + +def one_box_inside_other(boxA, boxB): + xA = boxA[0] <= boxB[0] + yA = boxA[1] <= boxB[1] + xB = boxA[2] >= boxB[2] + yB = boxA[3] >= boxB[3] + return xA and yA and xB and yB + +def join(tube_bbox, bbox): + xA = min(tube_bbox[0], bbox[0]) + yA = min(tube_bbox[1], bbox[1]) + xB = max(tube_bbox[2], bbox[2]) + yB = max(tube_bbox[3], bbox[3]) + return (xA, yA, xB, yB) + + +def compute_aspect_preserved_bbox(bbox, increase_area): + left, top, right, bot = bbox + width = right - left + height = bot - top + + width_increase = max(increase_area, ((1 + 2 * increase_area) * height - width) / (2 * width)) + height_increase = max(increase_area, ((1 + 2 * increase_area) * width - height) / (2 * height)) + + left = int(left - width_increase * width) + top = int(top - height_increase * height) + right = int(right + width_increase * width) + bot = int(bot + height_increase * height) + + return (left, top, right, bot) + +def compute_increased_bbox(bbox, increase_area): + left, top, right, bot = bbox + width = right - left + height = bot - top + + left = int(left - increase_area * width) + top = int(top - increase_area * height) + right = int(right + increase_area * width) + bot = int(bot + increase_area * height) + + return (left, top, right, bot) + + + +def crop_bbox_from_frames(frame_list, tube_bbox, min_frames=16, image_shape=(256, 256), min_size=200, + increase_area=0.1, aspect_preserving=True): + frame_shape = frame_list[0].shape + # Filter short sequences + if len(frame_list) < min_frames: + return None, None + left, top, right, bot = tube_bbox + width = right - left + height = bot - top + # Filter if it is too small + if max(width, height) < min_size: + return None, None + + if aspect_preserving: + left, top, right, bot = compute_aspect_preserved_bbox(tube_bbox, increase_area) + else: + left, top, right, bot = compute_increased_bbox(tube_bbox, increase_area) + + # Compute out of bounds + left_oob = -min(0, left) + right_oob = right - min(right, frame_shape[1]) + top_oob = -min(0, top) + bot_oob = bot - min(bot, frame_shape[0]) + + #Not use near the border + if max(left_oob / float(width), right_oob / float(width), top_oob / float(height), bot_oob / float(height)) > 0: + return [None, None] + + selected = [frame[top:bot, left:right] for frame in frame_list] + if image_shape is not None: + out = [img_as_ubyte(resize(frame, image_shape, anti_aliasing=True)) for frame in selected] + else: + out = selected + + return out, [left, top, right, bot] + +from multiprocessing import Pool +from itertools import cycle +from tqdm import tqdm +import os + +def scheduler(data_list, fn, args): + device_ids = args.device_ids.split(",") + pool = Pool(processes=args.workers) + args_list = cycle([args]) + f = open(args.chunks_metadata, 'w') + line = "{video_id},{start},{end},{bbox},{fps},{width},{height},{partition}" + print (line.replace('{', '').replace('}', ''), file=f) + for chunks_data in tqdm(pool.imap_unordered(fn, zip(data_list, cycle(device_ids), args_list))): + for data in chunks_data: + print (line.format(**data), file=f) + f.flush() + f.close() + +def save(path, frames, format): + if format == '.mp4': + imageio.mimsave(path, frames) + elif format == '.png': + if os.path.exists(path): + print ("Warning: skiping video %s" % os.path.basename(path)) + return + else: + os.makedirs(path) + for j, frame in enumerate(frames): + imageio.imsave(os.path.join(path, str(j).zfill(7) + '.png'), frames[j]) + else: + print ("Unknown format %s" % format) + exit() diff --git a/motion-gan-pipeline/motion-generation/audio_correlation_visual.py b/motion-gan-pipeline/motion-generation/audio_correlation_visual.py new file mode 100644 index 0000000..bfc1862 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/audio_correlation_visual.py @@ -0,0 +1,201 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +from os.path import join +from shutil import move +import sys +from turtle import shape +import yaml +import argparse +import numpy as np +from options.test_audio2headpose_options import TestOptions +from datasets import create_dataset +from util.cfgnode import CfgNode +import cv2 +import subprocess +from pathlib import Path +from tqdm import tqdm +import matplotlib +matplotlib.use('agg') +import matplotlib.pyplot as plt +from matplotlib.gridspec import GridSpec +import librosa +import librosa.display +from PIL import Image +sys.path.append('../preprocessing/') +from face_tracking.FLAME.FLAME import FLAME +from face_tracking.FLAME.config import cfg as FLAME_cfg +from face_tracking.FLAME.lbs import vertices2landmarks +from face_tracking.render_3dmm import Render_FLAME +from face_tracking.util import * +import torch +from scipy.stats import pearsonr + + +def write_video_with_audio(audio_path, output_path, prefix='pred_', h=512, w=512, fps=25): + fourcc = cv2.VideoWriter_fourcc(*'DIVX') + # fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G') + video_tmp_path = join(target_dir, 'tmp.avi') + out = cv2.VideoWriter(video_tmp_path, fourcc, fps, (w, h)) + nframe = len(list(Path(target_dir).glob('*.png'))) + + for j in tqdm(range(nframe), position=0, desc='writing video'): + img = cv2.imread(join(target_dir, prefix + str(j+1) + '.png')) + out.write(img) + out.release() + cmd = 'ffmpeg -y -i "' + video_tmp_path + '" -i "' + \ + audio_path + '" -codec copy -shortest "' + output_path + '"' + subprocess.call(cmd, shell=True) + os.remove(video_tmp_path) # remove the template video + + +if __name__ == '__main__': + # Load default options + Test_parser = TestOptions() + opt = Test_parser.parse() # get training options + + # Load config + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='config/Audio2Headpose_Ted.yml', + help="person name, e.g. Obama1, Obama2, May, Nadella, McStay", required=False) + inopt = parser.parse_args() + # TODO: make config + with open(inopt.config) as f: + cfg_dict = yaml.load(f, Loader=yaml.FullLoader) + cfg = CfgNode(cfg_dict) + + # Overwrite with config + opt.phase = 'Test' + opt.name = cfg.experiment.name + opt.dataset_mode = cfg.experiment.dataset_mode + opt.dataroot = cfg.experiment.dataroot + opt.dataset_names = cfg.experiment.dataset_names + opt.FPS = cfg.experiment.fps + + # save to the disk + Test_parser.print_options(opt) + + # Set device + device = torch.device('cuda:{}'.format(opt.gpu_ids[0])) if len( + opt.gpu_ids) > 0 else torch.device('cpu') + + # Load data + # create a dataset given opt.dataset_mode and other options + dataset = create_dataset(opt) + dataset = dataset.dataset + + # Target dir + target_dir= './visuals/Audio_Correlation/' + os.makedirs(target_dir, exist_ok=True) + + # Make renderer + h_fl, w_fl = 512, 512 + focal = torch.from_numpy(np.array([900.])).double() + + cxy = torch.tensor((w_fl / 2.0, h_fl / 2.0), dtype=torch.float).cpu() + + model_3dmm = FLAME(FLAME_cfg.model) + renderer = Render_FLAME(model_3dmm.faces_tensor, focal, h_fl, w_fl, 1, device) + + stop_at = 50 + + movement = [] + amplitude = [] + + for file_index in tqdm(range(len(dataset))): + + if file_index >= stop_at: + break + + test_name = dataset.clip_names[file_index] + + name, flame, frames, audio_packed, audio_features, emotions, landmarks, exps, headposes, velocity_pose, fit_data_path = dataset[file_index] + + nframes = headposes.shape[0] + + # Spectrogram of frequency + audio, sr = audio_packed + fps = 25 + audio_len = len(audio)/sr + + # load fit data + fit_data = torch.load(fit_data_path) + + # Spectogram + X = librosa.stft(audio) + # print('Amplitude shape: ', audio.shape) + # print('Num frames: ', nframes) + + # RMS + S, phase = librosa.magphase(X) + rms = librosa.feature.rms(S=S) + + ampl_delta = np.concatenate([[0], audio[1:] - audio[:-1]]) + + interval = len(audio)/nframes + + # Over teh spast second + window = fps * 2 + for frame in range(window, nframes): + mov = np.sum(np.abs(velocity_pose[frame-window:frame]), axis=0) + # mov = velocity_pose[frame] + # mov = headposes[frame] + + ampl = np.sum(np.abs(ampl_delta[int((frame-window)*interval):int(frame*interval)])) + # ampl = np.sum((ampl_delta[int((frame-window)*interval):int(frame*interval)])) + # ampl = np.mean(audio[int((frame-window)*interval):int(frame*interval)]) + movement.append(mov) + amplitude.append(ampl) + + movement = np.array(movement) + amplitude = np.array(amplitude) + + print(movement.shape) + print(amplitude.shape) + + for idx, angle in enumerate(['Pitch', 'Yaw', 'Roll']): + + # Movement per frame + r, p = pearsonr(x=amplitude, y=movement[:, idx]) + r = round(r, 3) + p = round(p, 3) + x = np.linspace(np.min(amplitude), np.max(amplitude), 1000) + + plt.title(f'{angle} Angle total movement and total change in Amplitude in the past 2 seconds \n Correlation: {int(r*100)}%, p-value: {int(p*100)}%') + plt.scatter(x=amplitude, y=movement[:, idx]) + + plt.plot(x, (x * r) + np.mean(movement[:, idx]), linestyle='solid', color='red') + plt.xlabel('Amplitude') + plt.ylabel('Movement') + plt.savefig(f'{target_dir}{angle}_movement&delta_Amplitude.png') + plt.close() + + # Talking head + R = torch.zeros(3) + R[idx] += 45 + R = R.unsqueeze(0).to(device).double() + t = torch.from_numpy(np.array([0, 0, -5])).unsqueeze(0).to(device).double() + id = torch.zeros_like(fit_data['id']).to(device).double() + expr = torch.zeros_like(exps[0]).unsqueeze(0).to(device).double() + + rott_geo = model_3dmm.forward_geo(id, expr, R, t) + landmarks3d = model_3dmm.get_3dlandmarks(id, expr, R, t, focal, cxy).cpu() + proj_geo = proj_pts(landmarks3d, focal, cxy) + + sel_pts3D = vertices2landmarks(rott_geo, + model_3dmm.faces_tensor, + model_3dmm.full_lmk_faces_idx.repeat( + 1, 1), + model_3dmm.full_lmk_bary_coords.repeat(1, 1, 1)) + + render_imgs = renderer(rott_geo.float(), model_3dmm.faces_tensor) + img_arr = render_imgs[0, :, :, :3].cpu().numpy() + img_arr *= 255 + img_arr = img_arr.astype(np.uint8) + im = Image.fromarray(img_arr) + plt.imshow(im) + plt.savefig(f'{target_dir}{angle}_45.png') + plt.close() + + print('Finish!') diff --git a/motion-gan-pipeline/motion-generation/audio_visual.py b/motion-gan-pipeline/motion-generation/audio_visual.py new file mode 100644 index 0000000..0f82fd3 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/audio_visual.py @@ -0,0 +1,245 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +from os.path import join +import yaml +import argparse +import numpy as np +from options.test_audio2headpose_options import TestOptions +from datasets import create_dataset +from util.cfgnode import CfgNode +import cv2 +import subprocess +from pathlib import Path +from tqdm import tqdm +import matplotlib +matplotlib.use('agg') +import matplotlib.pyplot as plt +from matplotlib.gridspec import GridSpec +import librosa +import librosa.display +from PIL import Image +import sys +sys.path.append('../preprocessing/') +from face_tracking.FLAME.FLAME import FLAME +from face_tracking.FLAME.config import cfg as FLAME_cfg +from face_tracking.FLAME.lbs import vertices2landmarks +from face_tracking.render_3dmm import Render_FLAME +from face_tracking.util import * +import torch +from scipy.ndimage import gaussian_filter1d + +def write_video_with_audio(audio_path, output_path, prefix='pred_', h=512, w=512, fps=25): + fourcc = cv2.VideoWriter_fourcc(*'DIVX') + # fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G') + video_tmp_path = join(target_dir, 'tmp.avi') + out = cv2.VideoWriter(video_tmp_path, fourcc, fps, (w, h)) + nframe = len(list(Path(target_dir).glob('*.png'))) + + for j in tqdm(range(nframe), position=0, desc='writing video'): + img = cv2.imread(join(target_dir, prefix + str(j+1) + '.png')) + out.write(img) + out.release() + cmd = 'ffmpeg -y -i "' + video_tmp_path + '" -i "' + \ + audio_path + '" -codec copy -shortest "' + output_path + '"' + subprocess.call(cmd, shell=True) + os.remove(video_tmp_path) # remove the template video + + +if __name__ == '__main__': + # Load default options + Test_parser = TestOptions() + opt = Test_parser.parse() # get training options + + # Load config + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='config/Audio2Headpose_Ted.yml', + help="person name, e.g. Obama1, Obama2, May, Nadella, McStay", required=False) + inopt = parser.parse_args() + # TODO: make config + with open(inopt.config) as f: + cfg_dict = yaml.load(f, Loader=yaml.FullLoader) + cfg = CfgNode(cfg_dict) + + # Overwrite with config + opt.phase = 'Test' + opt.name = cfg.experiment.name + opt.dataset_mode = cfg.experiment.dataset_mode + opt.dataroot = cfg.experiment.dataroot + opt.dataset_names = cfg.experiment.dataset_names + opt.FPS = cfg.experiment.fps + + # save to the disk + Test_parser.print_options(opt) + + # Set device + device = torch.device('cuda:{}'.format(opt.gpu_ids[0])) if len( + opt.gpu_ids) > 0 else torch.device('cpu') + + # Load data + # create a dataset given opt.dataset_mode and other options + dataset = create_dataset(opt) + dataset = dataset.dataset + + # Target dir + target_dir= './visuals/Audio_Visual/' + os.makedirs(target_dir, exist_ok=True) + + # Make renderer + h_fl, w_fl = 512, 512 + focal = torch.from_numpy(np.array([900.], dtype=np.float32)) + + cxy = torch.tensor((w_fl / 2.0, h_fl / 2.0), dtype=torch.float).cpu() + + model_3dmm = FLAME(FLAME_cfg.model) + renderer = Render_FLAME(model_3dmm.faces_tensor, focal, h_fl, w_fl, 1, device) + + stop_at = 50 + + for file_index in tqdm(range(len(dataset))): + + if file_index >= stop_at: + break + + test_name = dataset.clip_names[file_index] + + name, flame, frames, audio_packed, audio_features, emotions, landmarks, exps, headposes, velocity_pose, fit_data_path = dataset[file_index] + + # Spectrogram of frequency + audio, sr = audio_packed + fps = 25 + audio_len = len(audio)/sr + + # load fit data + fit_data = torch.load(fit_data_path) + + # Amplitude + X = audio + time_sf = np.linspace(start=0, stop=len(X)/sr, num=len(X)) + lim = np.max([np.abs(np.min(X)), np.max(X)]) + + # Spectogram + X = librosa.stft(audio) + Xdb = librosa.amplitude_to_db(abs(X)) + + # RMS + S, phase = librosa.magphase(X) + rms = librosa.feature.rms(S=S) + times = librosa.times_like(rms) + + # log Power spectrogram + lpw = librosa.amplitude_to_db(S, ref=np.max) + + # Zero Crossing Rate ZCR + zcrs = librosa.feature.zero_crossing_rate(audio) + + # Mel-Frequency Cepstral Coefficients (MFCC) + mfccs = librosa.feature.mfcc(y=audio, sr=sr) + + # Make frames for each fps + for i in tqdm(range(min(len(frames), len(audio_features)))): + + line_at = i/fps + + fig = plt.figure(figsize=(16, 15), tight_layout=True) + fontsize = 18 + gs = GridSpec(5, 2) + current_plot = 0 + column = 1 + + # Amplitude + ax1 = fig.add_subplot(gs[current_plot, column]) + ax1.set_title('Amplitude', fontsize=fontsize-2) + ax1.set_ylabel('Amplitude') + ax1.set_xlabel('Time') + ax1.plot(time_sf, audio, label=name, alpha=0.5) + ax1.set_xlim([0, audio_len]) + ax1.set_ylim([-lim, lim]) + ax1.axvline(line_at, color = 'r') + current_plot += 1 + + # Spectogram + ax2 = fig.add_subplot(gs[current_plot, column]) + ax2.set_title('Spectogram', fontsize=fontsize-2) + librosa.display.specshow(Xdb, sr=sr, x_axis='time', y_axis='hz', ax=ax2) + # plt.colorbar(orientation="horizontal", ax=ax2) + ax2.axvline(line_at, color = 'r') + current_plot += 1 + + # RMS + ax3 = fig.add_subplot(gs[current_plot, column]) + ax3.set_title('RMS - Total magnitude / energy', fontsize=fontsize) + ax3.semilogy(times, rms[0], label='RMS Energy') + ax3.set_xlim([0, audio_len]) + ax3.set_ylim([0, np.max(rms[0])]) + plt.axvline(line_at, color = 'r') + current_plot += 1 + + # log Power spectrogram + ax4 = fig.add_subplot(gs[current_plot, column]) + ax4.set_title('Log Power Spectrogram', fontsize=fontsize-2) + librosa.display.specshow(lpw, y_axis='log', x_axis='time', ax=ax4) + # plt.colorbar(orientation="horizontal", ax=ax4) + plt.axvline(line_at, color = 'r') + current_plot += 1 + + # Mel-Frequency Cepstral Coefficients (MFCC) + ax5 = fig.add_subplot(gs[current_plot, column]) + ax5.set_title('Mel-Frequency Cepstral Coefficients (MFCC)', fontsize=fontsize-2) + librosa.display.specshow(mfccs, sr=sr, x_axis='time', ax=ax5) + # plt.colorbar(orientation="horizontal", ax=ax5) + plt.axvline(line_at, color = 'r') + current_plot += 1 + + # Right column + column = 0 + + # Original frame + ax6 = fig.add_subplot(gs[0:2, column]) + ax6.set_title(name, fontsize=fontsize + 2) + ax6.imshow(Image.open(frames[i])) + plt.axis('off') + + # Talking head + R = fit_data['euler'][i] + R = R.unsqueeze(0).to(device).double() + t = torch.from_numpy(np.array([0, 0, -5])).unsqueeze(0).to(device).double() + id = fit_data['id'].to(device).double() + expr = exps[i].unsqueeze(0).to(device).double() + rott_geo = model_3dmm.forward_geo(id, expr, R, t) + landmarks3d = model_3dmm.get_3dlandmarks(id, expr, R, t, focal, cxy).cpu() + proj_geo = proj_pts(landmarks3d, focal, cxy) + render_imgs = renderer(rott_geo.float(), model_3dmm.faces_tensor) + img_arr = render_imgs[0, :, :, :3].cpu().numpy() + img_arr *= 255 + img_arr = img_arr.astype(np.uint8) + im = Image.fromarray(img_arr) + ax7 = fig.add_subplot(gs[2:4, column]) + ax7.set_title('Flame Head', fontsize=fontsize) + ax7.imshow(im) + # ax7.imshow(Image.open(flame[i])) + plt.axis('off') + + # Deepspeech visuals + ax8 = fig.add_subplot(gs[4, column]) + deepspeech = audio_features[i].reshape((16,29)) + ax8.set_title('Deepspeech Features', fontsize=fontsize-2) + ax8.imshow(deepspeech, cmap='hot', interpolation='nearest') + + # Save and close + plt.savefig(os.path.join(target_dir, f'audio_{i}.png')) + plt.close() + + # make videos + final_path = os.path.join(target_dir, test_name + '.avi') + audio_path = os.path.join(dataset.dataset_root, test_name, test_name +'.wav') + tmp = Image.open(os.path.join(target_dir, f'audio_{i}.png')) + w, h = tmp.size + write_video_with_audio(audio_path, final_path, 'audio_', h, w, fps) + + _img_paths = list(map(lambda x:str(x), list(Path(target_dir).glob('*.png')))) + for i in tqdm(range(len(_img_paths)), desc='deleting intermediate images'): + os.remove(_img_paths[i]) + + print('Finish!') diff --git a/motion-gan-pipeline/motion-generation/config/Audio2Headpose_MTC.yml b/motion-gan-pipeline/motion-generation/config/Audio2Headpose_MTC.yml new file mode 100644 index 0000000..94f11b3 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/config/Audio2Headpose_MTC.yml @@ -0,0 +1,6 @@ +experiment: + dataroot: /mnt/ + dataset_mode: deepspeech + dataset_names: MTC + fps: 25 + name: Audio2Headpose_MTC diff --git a/motion-gan-pipeline/motion-generation/config/Audio2Headpose_Ted.yml b/motion-gan-pipeline/motion-generation/config/Audio2Headpose_Ted.yml new file mode 100644 index 0000000..e982f04 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/config/Audio2Headpose_Ted.yml @@ -0,0 +1,6 @@ +experiment: + dataroot: /mnt/ + dataset_mode: deepspeech + dataset_names: MTC + fps: 25 + name: Audio2Headpose_Ted diff --git a/motion-gan-pipeline/motion-generation/config/Audio2Headpose_Ted_deepspeech.yml b/motion-gan-pipeline/motion-generation/config/Audio2Headpose_Ted_deepspeech.yml new file mode 100644 index 0000000..7df44b7 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/config/Audio2Headpose_Ted_deepspeech.yml @@ -0,0 +1,6 @@ +experiment: + dataroot: /mnt/ + dataset_mode: deepspeech + dataset_names: TED384-v2 + fps: 25 + name: Audio2Headpose_Ted_deepspeech diff --git a/motion-gan-pipeline/motion-generation/config/Audio2Headpose_Ted_emotion.yml b/motion-gan-pipeline/motion-generation/config/Audio2Headpose_Ted_emotion.yml new file mode 100644 index 0000000..89cf199 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/config/Audio2Headpose_Ted_emotion.yml @@ -0,0 +1,6 @@ +experiment: + dataroot: /mnt/ + dataset_mode: emotion + dataset_names: TED384-v2 + fps: 25 + name: Audio2Headpose_Ted_emotion diff --git a/motion-gan-pipeline/motion-generation/config/Correlation_TMP_AVSpeech.yml b/motion-gan-pipeline/motion-generation/config/Correlation_TMP_AVSpeech.yml new file mode 100644 index 0000000..6eab26d --- /dev/null +++ b/motion-gan-pipeline/motion-generation/config/Correlation_TMP_AVSpeech.yml @@ -0,0 +1,6 @@ +experiment: + dataroot: /mnt/ + dataset_mode: emotion + dataset_names: TMP_AVSpeech + fps: 25 + name: Correlation_TMP_AVSpeech diff --git a/motion-gan-pipeline/motion-generation/datasets/__init__.py b/motion-gan-pipeline/motion-generation/datasets/__init__.py new file mode 100644 index 0000000..c9e8ba5 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/datasets/__init__.py @@ -0,0 +1,93 @@ +"""This package includes all the modules related to data loading and preprocessing + + To add a custom dataset class called 'dummy', you need to add a file called 'dummy_dataset.py' and define a subclass 'DummyDataset' inherited from BaseDataset. + You need to implement four functions: + -- <__init__>: initialize the class, first call BaseDataset.__init__(self, opt). + -- <__len__>: return the size of dataset. + -- <__getitem__>: get a data point from data loader. + -- : (optionally) add dataset-specific options and set default options. + +Now you can use the dataset class by specifying flag '--dataset_mode dummy'. +See our template dataset class 'template_dataset.py' for more details. +""" +import importlib +import torch.utils.data +from datasets.base_dataset import BaseDataset + + +def find_dataset_using_name(dataset_name): + """Import the module "data/[dataset_name]_dataset.py". + + In the file, the class called DatasetNameDataset() will + be instantiated. It has to be a subclass of BaseDataset, + and it is case-insensitive. + """ + dataset_filename = "datasets." + dataset_name + "_dataset" + datasetlib = importlib.import_module(dataset_filename) + + dataset = None + target_dataset_name = dataset_name.replace('_', '') + 'dataset' + for name, cls in datasetlib.__dict__.items(): + if name.lower() == target_dataset_name.lower() \ + and issubclass(cls, BaseDataset): + dataset = cls + + if dataset is None: + raise NotImplementedError("In %s.py, there should be a subclass of BaseDataset with class name that matches %s in lowercase." % (dataset_filename, target_dataset_name)) + + return dataset + + +def get_option_setter(dataset_name): + """Return the static method of the dataset class.""" + dataset_class = find_dataset_using_name(dataset_name) + return dataset_class.modify_commandline_options + + +def create_dataset(opt): + """Create a dataset given the option. + + This function wraps the class CustomDatasetDataLoader. + This is the main interface between this package and 'train.py'/'test.py' + + Example: + >>> from data import create_dataset + >>> dataset = create_dataset(opt) + """ + data_loader = CustomDatasetDataLoader(opt) + dataset = data_loader.load_data() + return dataset + + +class CustomDatasetDataLoader(): + """Wrapper class of Dataset class that performs multi-threaded data loading""" + + def __init__(self, opt): + """Initialize this class + + Step 1: create a dataset instance given the name [dataset_mode] + Step 2: create a multi-threaded data loader. + """ + self.opt = opt + dataset_class = find_dataset_using_name(opt.dataset_mode) + self.dataset = dataset_class(opt) + print("dataset [%s] was created" % type(self.dataset).__name__) + self.dataloader = torch.utils.data.DataLoader( + self.dataset, + batch_size=opt.batch_size, + shuffle=not opt.serial_batches, + num_workers=int(opt.num_threads)) + + def load_data(self): + return self + + def __len__(self): + """Return the number of data in the dataset""" + return min(len(self.dataset), self.opt.max_dataset_size) + + def __iter__(self): + """Return a batch of data""" + for i, data in enumerate(self.dataloader): + if i * self.opt.batch_size >= self.opt.max_dataset_size: + break + yield data diff --git a/motion-gan-pipeline/motion-generation/datasets/audio_dataset.py b/motion-gan-pipeline/motion-generation/datasets/audio_dataset.py new file mode 100644 index 0000000..0de694f --- /dev/null +++ b/motion-gan-pipeline/motion-generation/datasets/audio_dataset.py @@ -0,0 +1,150 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import sys + +sys.path.append("..") + +from datasets.base_dataset import BaseDataset +import torch +import librosa +import os +import numpy as np + + +def load_audio_expressions(clip_root): + audio_expr_path = os.path.join(clip_root, 'audio_expr') + + audio_expr = [] + for i in range(len(os.listdir(audio_expr_path))): + audio_expr.append(np.load( os.path.join(audio_expr_path, f'audio_expression_{i}.npy'))[0]) + try: + audio_expr = np.stack(audio_expr, axis=0) + return torch.from_numpy(audio_expr) + + except ValueError: + return None + + +class AudioDataset(BaseDataset): + """ + + # for wavenet: + # |----receptive_field----| + # |--output_length--| + # example: | | | | | | | | | | | | | | | | | | | | | + # target: | | | | | | | | | | + + """ + + def __init__(self, opt): + # save the option and datasets root + BaseDataset.__init__(self, opt) + self.isTrain = self.opt.isTrain + self.state = opt.phase + self.dataset_name = opt.dataset_names + self.target_length = opt.time_frame_length + self.sample_rate = opt.sample_rate + self.fps = opt.FPS + + self.audioRF_history = opt.audioRF_history + self.audioRF_future = opt.audioRF_future + # self.compute_mel_online = opt.compute_mel_online + # self.feature_name = opt.feature_name + + self.audio_samples_one_frame = self.sample_rate / self.fps + self.frame_jump_stride = opt.frame_jump_stride + self.augment = False + self.task = opt.task + self.item_length_audio = int((self.audioRF_history + self.audioRF_future) / self.fps * self.sample_rate) + + #Audio2Headpose flags + self.A2H_receptive_field = opt.A2H_receptive_field + self.A2H_item_length = self.A2H_receptive_field + self.target_length - 1 #204 + self.audio_window = opt.audio_windows + self.half_audio_win = int(self.audio_window / 2) #1 + self.frame_future = opt.frame_future #15 + self.predict_length = opt.predict_length + self.predict_len = int((self.predict_length - 1) / 2) #0 + + self.gpu_ids = opt.gpu_ids + self.device = torch.device('cuda:{}'.format(self.gpu_ids[0])) if self.gpu_ids else torch.device('cpu') + + self.total_len = 0 + + if opt.train_test_split: + self.dataset_root = os.path.join(self.root, self.dataset_name, self.state) + self.clip_names = sorted(os.listdir(self.dataset_root)) + + else: + self.dataset_root = self.root + self.clip_names = [self.dataset_name] + + print(self.dataset_root) + print(self.clip_names) + + # check clips + self.valid_clips = [] + for i in range(len(self.clip_names)): + # check lenght of video + name = self.clip_names[i] + clip_root = os.path.join(self.dataset_root, name) + + deepspeech_feature_path = os.path.join(clip_root, 'audio_feature') + + audio_features_len = len(os.listdir(deepspeech_feature_path)) + + # if audio_features_len > self.A2H_item_length: + # self.valid_clips.append(i) + + # else: + # print(f'Audio {name} is too short and will not be used for training.') + + self.valid_clips.append(i) + + + self.clip_nums = len(self.valid_clips) + print(f'Total clips for training: {self.clip_nums}') + + # main info + self.audio = [''] * self.clip_nums + self.audio_features = [''] * self.clip_nums + self.fit_data = [''] * self.clip_nums + + # audio expressions + self.audio_expr = [''] * self.clip_nums + + + for i in range(len(self.valid_clips)): + name = self.clip_names[i] + clip_root = os.path.join(self.dataset_root, name) + deepspeech_feature_path = os.path.join(clip_root, 'audio_feature') + + # Load audio + try: + audio_path = os.path.join(clip_root, name + '.wav') + self.audio[i], _ = librosa.load(audio_path, sr=self.sample_rate) + + except FileNotFoundError: + pass + + # load deepspeech + self.audio_features[i] = torch.from_numpy(np.stack([np.load(os.path.join(deepspeech_feature_path, ds)).flatten() for ds in os.listdir(deepspeech_feature_path)], axis=0)) + + + try: + fit_data_path = os.path.join(clip_root, 'track_params.pt') + fit_data = torch.load(fit_data_path) + self.fit_data[i] = fit_data + # load audio expr + self.audio_expr[i] = load_audio_expressions(clip_root) + + except FileNotFoundError: + pass + + def __getitem__(self, index): + return index + + def __len__(self): + return len(self.audio_features) + diff --git a/motion-gan-pipeline/motion-generation/datasets/base_dataset.py b/motion-gan-pipeline/motion-generation/datasets/base_dataset.py new file mode 100644 index 0000000..a64b758 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/datasets/base_dataset.py @@ -0,0 +1,64 @@ +"""This module implements an abstract base class (ABC) 'BaseDataset' for datasets. + +It also includes common transformation functions (e.g., get_transform, __scale_width), which can be later used in subclasses. +""" + +import numpy as np +import torch.utils.data as data +from PIL import Image +import torchvision.transforms as transforms +from abc import ABC, abstractmethod + + +class BaseDataset(data.Dataset, ABC): + """This class is an abstract base class (ABC) for datasets. + + To create a subclass, you need to implement the following four functions: + -- <__init__>: initialize the class, first call BaseDataset.__init__(self, opt). + -- <__len__>: return the size of dataset. + -- <__getitem__>: get a data point. + -- : (optionally) add dataset-specific options and set default options. + """ + + def __init__(self, opt): + """Initialize the class; save the options in the class + + Parameters: + opt (Option class)-- stores all the experiment flags; needs to be a subclass of BaseOptions + """ + self.opt = opt + self.root = opt.dataroot + + @staticmethod + def modify_commandline_options(parser, is_train): + """Add new dataset-specific options, and rewrite default values for existing options. + + Parameters: + parser -- original option parser + is_train (bool) -- whether training phase or test phase. You can use this flag to add training-specific or test-specific options. + + Returns: + the modified parser. + """ + return parser + + @abstractmethod + def __len__(self): + """Return the total number of images in the dataset.""" + return 0 + + @abstractmethod + def __getitem__(self, index): + """Return a data point and its metadata information. + + Parameters: + index - - a random integer for data indexing + + Returns: + a dictionary of data with their names. It ususally contains the data itself and its metadata information. + """ + pass + + + + diff --git a/motion-gan-pipeline/motion-generation/datasets/deepspeech_dataset.py b/motion-gan-pipeline/motion-generation/datasets/deepspeech_dataset.py new file mode 100644 index 0000000..62e21f1 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/datasets/deepspeech_dataset.py @@ -0,0 +1,262 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import sys +from unittest import skip + +sys.path.append("..") + +from datasets.base_dataset import BaseDataset +import scipy.io as sio +import torch +import librosa +import bisect +import os +import numpy as np + +from funcs import utils + + +class DeepspeechDataset(BaseDataset): + """ DECA datasets. currently, return 2D info and 3D tracking info. + + # for wavenet: + # |----receptive_field----| + # |--output_length--| + # example: | | | | | | | | | | | | | | | | | | | | | + # target: | | | | | | | | | | + + """ + + def __init__(self, opt): + # save the option and datasets root + BaseDataset.__init__(self, opt) + self.isTrain = self.opt.isTrain + self.state = opt.dataset_type + self.dataset_name = opt.dataset_names + self.target_length = opt.time_frame_length + self.sample_rate = opt.sample_rate + self.fps = opt.FPS + + self.audioRF_history = opt.audioRF_history + self.audioRF_future = opt.audioRF_future + + #Audio2Headpose flags + self.A2H_receptive_field = opt.A2H_receptive_field + self.A2H_item_length = self.A2H_receptive_field + self.target_length - 1 #204 + self.frame_future = opt.frame_future #15 + self.predict_length = opt.predict_length + self.predict_len = int((self.predict_length - 1) / 2) #0 + + self.gpu_ids = opt.gpu_ids + self.device = torch.device('cuda:{}'.format(self.gpu_ids[0])) if self.gpu_ids else torch.device('cpu') + + self.total_len = 0 + start_point = self.A2H_receptive_field + if opt.train_test_split: + self.dataset_root = os.path.join(self.root, self.dataset_name, self.state) + self.clip_names = sorted(os.listdir(self.dataset_root)) + + else: + self.dataset_root = self.root + self.clip_names = [self.dataset_name] + + + # check clips + self.valid_clips = [] + for i in range(len(self.clip_names)): + # check lenght of video + name = self.clip_names[i] + clip_root = os.path.join(self.dataset_root, name) + n_frames = len(os.listdir(os.path.join(clip_root, 'frames'))) + + if n_frames >= start_point + self.target_length: # added 25 because later they remove 25 and without it, crashes + + deepspeech_feature_path = os.path.join(clip_root, 'audio_feature') + + audio_features_len = len(os.listdir(deepspeech_feature_path)) + + if audio_features_len > self.A2H_item_length: + self.valid_clips.append(i) + + else: + print(f'Audio {name} is too short and will not be used for training.') + + else: + print(f'Clip {name} is too short and will not be used for training.') + + self.clip_nums = len(self.valid_clips) + print(f'Total clips for training: {self.clip_nums}') + + # main info + self.audio = [''] * self.clip_nums + self.audio_features = [''] * self.clip_nums + self.feats = [''] * self.clip_nums + self.exps = [''] * self.clip_nums + self.pts3d = [''] * self.clip_nums + self.rot_angles = [''] * self.clip_nums + self.trans = [''] * self.clip_nums + self.headposes = [''] * self.clip_nums + self.velocity_pose = [''] * self.clip_nums + self.acceleration_pose = [''] * self.clip_nums + self.mean_trans = [''] * self.clip_nums + if self.state == 'Test': + self.landmarks = [''] * self.clip_nums + # meta info + self.start_point = [''] * self.clip_nums + self.end_point = [''] * self.clip_nums + self.len = [''] * self.clip_nums + self.sample_start = [] + self.clip_valid = ['True'] * self.clip_nums + self.invalid_clip = [] + + self.mouth_related_indices = np.concatenate([np.arange(4, 11), np.arange(46, 64)]) + + # if opt.use_delta_pts: + # self.pts3d_mean = np.load(os.path.join(self.dataset_root, 'mean_pts3d.npy')) + + for i in range(len(self.valid_clips)): + name = self.clip_names[self.valid_clips[i]] + clip_root = os.path.join(self.dataset_root, name) + deepspeech_feature_path = os.path.join(clip_root, 'audio_feature') + + # load deepspeech + self.audio_features[i] = torch.from_numpy(np.stack([np.load(os.path.join(deepspeech_feature_path, ds)).flatten() for ds in sorted(os.listdir(deepspeech_feature_path))], axis=0)) + + # 3D landmarks & headposes + self.start_point[i] = start_point # They had 300 at 60 fps + fit_data_path = os.path.join(clip_root, 'track_params.pt') + fit_data = torch.load(fit_data_path) + self.fit_data = fit_data + + self.rot_angles[i] = fit_data['euler'] + + # # change -180~180 to 0~360 + # rot_change = self.rot_angles[i][:, 0] < 0 + # self.rot_angles[i][rot_change, 0] += 360 + # self.rot_angles[i][:, 0] -= 180 # change x axis direction + + # use delta translation + self.mean_trans[i] = fit_data['trans'].mean(axis=0) + self.trans[i] = fit_data['trans'] - self.mean_trans[i] + + self.headposes[i] = np.concatenate([self.rot_angles[i], self.trans[i]], axis=1) + self.velocity_pose[i] = np.concatenate([np.zeros(6)[None, :], self.headposes[i][1:] - self.headposes[i][:-1]]) + self.acceleration_pose[i] = np.concatenate([np.zeros(6)[None, :], self.velocity_pose[i][1:] - self.velocity_pose[i][:-1]]) + + # print(clip_root) + # print('Headposes: ', self.headposes[i].shape) + # print('Audio: ', self.audio_features[i].size()) + + total_frames = min(self.trans[i].shape[0] - 25, self.audio_features[i].shape[0] - 25) # They had -60 (1 second at 60 fps) # Crashes without 25 + # print(f'Total Frames for clip {i}: {total_frames}') + + valid_frames = total_frames - self.start_point[i] + self.len[i] = valid_frames - self.target_length # ??? They had - 400 (6,66 s at 60 fps) + if i == 0: + self.sample_start.append(0) + else: + prev_start = self.sample_start[-1] + len_prev = self.len[i - 1] + self.sample_start.append(prev_start + len_prev - 1) + self.total_len += np.int32(np.floor(self.len[i])) + + + def __getitem__(self, index): + # recover real index from compressed one + index_real = np.int32(index) + # find which audio file and the start frame index + file_index = bisect.bisect_right(self.sample_start, index_real) -1 + current_frame = index_real - self.sample_start[file_index] + self.start_point[file_index] + current_target_length = self.target_length + + # find the history info start points + A2H_history_start = current_frame - self.A2H_receptive_field + A2H_item_length = self.A2H_item_length # 204 + A2H_receptive_field = self.A2H_receptive_field + + + A2Hsamples = np.zeros([A2H_item_length, 16 * 29]) #deepspeech + for i in range(A2H_item_length - 1): + try: + A2Hsamples[i] = self.audio_features[file_index][A2H_history_start + i] + + except IndexError: + print('### ERROR AUDIO ###') + print('Name: ', self.clip_names[self.valid_clips[file_index]]) + print('Current Frame: ', current_frame) + print('Data Index: ', index_real) + print('Starts at: ', self.sample_start[file_index]) + print('Len: ', self.len[file_index]) + print(A2H_history_start + i) + print(self.audio_features[file_index].size()) + print(self.headposes[file_index].shape) + exit() + + if self.predict_len == 0: + try: + target_headpose = self.headposes[file_index][ + A2H_history_start + A2H_receptive_field: A2H_history_start + A2H_item_length + 1] # [current frame: current frame + target] + history_headpose = self.headposes[file_index][A2H_history_start: A2H_history_start + A2H_item_length] + history_headpose = history_headpose.reshape(A2H_item_length, -1) # [current_frame - self.A2H_receptive_field : current frame + target -1] + + target_velocity = self.velocity_pose[file_index][ + A2H_history_start + A2H_receptive_field: A2H_history_start + A2H_item_length + 1] + history_velocity = self.velocity_pose[file_index][ + A2H_history_start: A2H_history_start + A2H_item_length].reshape(A2H_item_length, + -1) + target_info = torch.from_numpy( + np.concatenate([target_headpose, target_velocity], axis=1).reshape(current_target_length, + -1)).float() + except ValueError: + print('### ERROR HEAD ###') + print('Name: ', self.clip_names[self.valid_clips[file_index]]) + print('Current Frame: ', current_frame) + print('Data Index: ', index_real) + print('Starts at: ', self.sample_start[file_index]) + print('Len: ', self.len[file_index]) + print(target_headpose.size) + exit() + + else: + history_headpose = self.headposes[file_index][ + A2H_history_start: A2H_history_start + A2H_item_length] + history_headpose = history_headpose.reshape(A2H_item_length, -1) #Same + + history_velocity = self.velocity_pose[file_index][ + A2H_history_start: A2H_history_start + A2H_item_length] + history_velocity = history_velocity.reshape(A2H_item_length, -1) #Same + + target_headpose_ = self.headposes[file_index][ + A2H_history_start + A2H_receptive_field - self.predict_len: A2H_history_start + A2H_item_length + 1 + self.predict_len + 1] # [current frame - predict_len: current frame + target + predict_len + 1] + target_headpose = np.zeros([current_target_length, self.predict_length, target_headpose_.shape[1]]) + for i in range(current_target_length): + target_headpose[i] = target_headpose_[i: i + self.predict_length] + target_headpose = target_headpose # .reshape(current_target_length, -1, order='F') + + target_velocity_ = self.headposes[file_index][ + A2H_history_start + A2H_receptive_field - self.predict_len: A2H_history_start + A2H_item_length + 1 + self.predict_len + 1] + target_velocity = np.zeros([current_target_length, self.predict_length, target_velocity_.shape[1]]) + for i in range(current_target_length): + target_velocity[i] = target_velocity_[i: i + self.predict_length] + target_velocity = target_velocity # .reshape(current_target_length, -1, order='F') + + target_info = torch.from_numpy( + np.concatenate([target_headpose, target_velocity], axis=2).reshape(current_target_length, + -1)).float() + + A2Hsamples = torch.from_numpy(A2Hsamples).float() + + history_info = torch.from_numpy(np.concatenate([history_headpose, history_velocity], axis=1)).float() + + # [item_length, mel_channels, mel_width], or [item_length, APC_hidden_size] + + return A2Hsamples, history_info, target_info + + + def __len__(self): + return self.total_len + # return self.total_len - 2000 # Hardcoded to make it work... does not work with short videos + + + diff --git a/motion-gan-pipeline/motion-generation/datasets/emotion_dataset.py b/motion-gan-pipeline/motion-generation/datasets/emotion_dataset.py new file mode 100644 index 0000000..55b2b76 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/datasets/emotion_dataset.py @@ -0,0 +1,136 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from email.mime import audio +import sys + +sys.path.append("..") + +from datasets.base_dataset import BaseDataset +import scipy.io as sio +import torch +import librosa +import os +import numpy as np + +from funcs import utils + + +class EmotionDataset(BaseDataset): + """ + Load Emotions and headposes. + """ + + def __init__(self, opt): + # save the option and datasets root + BaseDataset.__init__(self, opt) + self.isTrain = self.opt.isTrain + self.state = opt.dataset_type + self.dataset_name = opt.dataset_names + self.fps = opt.FPS + + self.dataset_root = os.path.join(self.root, self.dataset_name, self.state) + self.clip_names = sorted(os.listdir(self.dataset_root)) + + self.clip_nums = len(self.clip_names) + print(f'Total clips for training: {self.clip_nums}') + + # main info + self.frames = [''] * self.clip_nums + self.flame = [''] * self.clip_nums + self.audio = [''] * self.clip_nums + self.audio_features = [''] * self.clip_nums + self.emotions = [''] * self.clip_nums + self.exps = [''] * self.clip_nums + self.pts3d = [''] * self.clip_nums + self.rot_angles = [''] * self.clip_nums + self.trans = [''] * self.clip_nums + self.headposes = [''] * self.clip_nums + self.velocity_pose = [''] * self.clip_nums + self.acceleration_pose = [''] * self.clip_nums + self.mean_trans = [''] * self.clip_nums + self.landmarks = [''] * self.clip_nums + self.fit_data_path = [''] * self.clip_nums + + self.len = [''] * self.clip_nums + self.clip_valid = ['True'] * self.clip_nums + + + for i in range(len(self.clip_names)): + name = self.clip_names[i] + clip_root = os.path.join(self.dataset_root, name) + + # Paths to subfolders + frames_path = os.path.join(clip_root, 'frames') + flame_path = os.path.join(clip_root, 'debug/debug_render') + deepspeech_feature_path = os.path.join(clip_root, 'audio_feature') + emotions_path = os.path.join(clip_root, 'emotions') + expr_path = os.path.join(clip_root, 'deca_expr') + landmarks_path = os.path.join(clip_root, 'landmarks') + fit_data_path = os.path.join(clip_root, 'track_params.pt') + + # load wav audio signal + # Open wav file and read frames as bytes + x, sr = librosa.load(os.path.join(clip_root, f'{name}.wav')) + self.audio[i] = (x, sr) + + # load frames + self.frames[i] = [os.path.join(frames_path, f'{f}.jpg') for f in range(len(os.listdir(frames_path)))] + + # load debug + self.flame[i] = [os.path.join(flame_path, f'{f}.jpg') for f in range(len(os.listdir(flame_path)))] + + # load deepspeech + self.audio_features[i] = torch.from_numpy(np.stack([np.load(os.path.join(deepspeech_feature_path, f'{ds}.deepspeech.npy')).flatten() for ds in range(len(os.listdir(deepspeech_feature_path)))], axis=0)) + + # load emotions + try: + self.emotions[i] = [np.load(os.path.join(emotions_path, em), allow_pickle=True) for em in sorted(os.listdir(emotions_path))] + + except FileNotFoundError: + self.emotions[i] = None + + # load landmarks + self.landmarks[i] = torch.from_numpy(np.stack([np.loadtxt(os.path.join(landmarks_path, ldk)) for ldk in sorted(os.listdir(landmarks_path))], axis=0)) + + # load expressions + self.exps[i] = torch.from_numpy(np.stack([np.load(os.path.join(expr_path, f'{exp}.npy')) for exp in range(len(os.listdir(expr_path)))], axis=0)) + + # 3D landmarks & headposes + fit_data = torch.load(fit_data_path) + self.fit_data = fit_data + self.fit_data_path[i] = fit_data_path + self.rot_angles[i] = fit_data['euler'] + + self.mean_trans[i] = fit_data['trans'].mean(axis=0) + self.trans[i] = fit_data['trans'] - self.mean_trans[i] + self.headposes[i] = np.concatenate([self.rot_angles[i], self.trans[i]], axis=1) + self.velocity_pose[i] = np.concatenate([np.zeros(6)[None, :], self.headposes[i][1:] - self.headposes[i][:-1]]) + self.acceleration_pose[i] = np.concatenate([np.zeros(6)[None, :], self.velocity_pose[i][1:] - self.velocity_pose[i][:-1]]) + + + return + + def __getitem__(self, index): + + data = ( + self.clip_names[index], + self.flame[index], + self.frames[index], + self.audio[index], + self.audio_features[index], + self.emotions[index], + self.landmarks[index], + self.exps[index], + self.headposes[index], + self.velocity_pose[index], + self.fit_data_path[index], + ) + + return data + + def __len__(self): + return self.clip_nums + + + diff --git a/motion-gan-pipeline/motion-generation/datasets/full_dataset.py b/motion-gan-pipeline/motion-generation/datasets/full_dataset.py new file mode 100644 index 0000000..876e417 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/datasets/full_dataset.py @@ -0,0 +1,320 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import sys +from unittest import skip + +sys.path.append("..") + +from datasets.base_dataset import BaseDataset +import scipy.io as sio +import torch +import librosa +import bisect +import os +import numpy as np +from models.networks import APC_encoder + +from funcs import utils + + +class FullDataset(BaseDataset): + """ DECA datasets. currently, return 2D info and 3D tracking info. + + # for wavenet: + # |----receptive_field----| + # |--output_length--| + # example: | | | | | | | | | | | | | | | | | | | | | + # target: | | | | | | | | | | + + """ + + def __init__(self, opt): + # save the option and datasets root + BaseDataset.__init__(self, opt) + self.isTrain = self.opt.isTrain + self.state = opt.dataset_type + self.dataset_name = opt.dataset_names + self.target_length = opt.time_frame_length + self.sample_rate = opt.sample_rate + self.fps = opt.FPS + + self.audioRF_history = opt.audioRF_history + self.audioRF_future = opt.audioRF_future + # self.compute_mel_online = opt.compute_mel_online + # self.feature_name = opt.feature_name + + self.audio_samples_one_frame = self.sample_rate / self.fps + self.frame_jump_stride = opt.frame_jump_stride + self.augment = False + self.task = opt.task + self.item_length_audio = int((self.audioRF_history + self.audioRF_future) / self.fps * self.sample_rate) + + #Audio2Headpose flags + self.A2H_receptive_field = opt.A2H_receptive_field + self.A2H_item_length = self.A2H_receptive_field + self.target_length - 1 #204 + self.audio_window = opt.audio_windows + self.half_audio_win = int(self.audio_window / 2) # 1 + self.frame_future = opt.frame_future #15 + self.predict_length = opt.predict_length + self.predict_len = int((self.predict_length - 1) / 2) #0 + + self.gpu_ids = opt.gpu_ids + self.device = torch.device('cuda:{}'.format(self.gpu_ids[0])) if self.gpu_ids else torch.device('cpu') + + self.total_len = 0 + start_point = 105 + self.dataset_root = os.path.join(self.root, self.dataset_name, self.state) + self.clip_names = sorted(os.listdir(self.dataset_root)) + + + # check clips + self.valid_clips = [] + for i in range(len(self.clip_names)): + # check lenght of video + name = self.clip_names[i] + clip_root = os.path.join(self.dataset_root, name) + n_frames = len(os.listdir(os.path.join(clip_root, 'frames'))) + + if n_frames >= start_point + self.target_length + 25: # added 25 because later they remove 25 and without it, crashes + APC_name = os.path.split(self.opt.APC_model_path)[-1] + APC_feature_file = name + '_APC_feature_V0324_ckpt_{}.npy'.format(APC_name) + APC_feature_path = os.path.join(clip_root, APC_feature_file) + + if not os.path.exists(APC_feature_path): + self.valid_clips.append(i) + continue + + audio_features = np.load(APC_feature_path).astype(np.float32) + + if audio_features.shape[0] >= 2 * self.A2H_item_length: + self.valid_clips.append(i) + + else: + print(f'Audio {name} is too short and will not be used for training.') + + else: + print(f'Clip {name} is too short and will not be used for training.') + + self.clip_nums = len(self.valid_clips) + print(f'Total clips for training: {self.clip_nums}') + + # main info + self.audio = [''] * self.clip_nums + self.audio_features = [''] * self.clip_nums + self.feats = [''] * self.clip_nums + self.exps = [''] * self.clip_nums + self.pts3d = [''] * self.clip_nums + self.rot_angles = [''] * self.clip_nums + self.trans = [''] * self.clip_nums + self.headposes = [''] * self.clip_nums + self.velocity_pose = [''] * self.clip_nums + self.acceleration_pose = [''] * self.clip_nums + self.mean_trans = [''] * self.clip_nums + if self.state == 'Test': + self.landmarks = [''] * self.clip_nums + # meta info + self.start_point = [''] * self.clip_nums + self.end_point = [''] * self.clip_nums + self.len = [''] * self.clip_nums + self.sample_start = [] + self.clip_valid = ['True'] * self.clip_nums + self.invalid_clip = [] + + self.mouth_related_indices = np.concatenate([np.arange(4, 11), np.arange(46, 64)]) + + # if opt.use_delta_pts: + # self.pts3d_mean = np.load(os.path.join(self.dataset_root, 'mean_pts3d.npy')) + + for i in range(self.clip_nums): + name = self.clip_names[self.valid_clips[i]] + clip_root = os.path.join(self.dataset_root, name) + + # Load audio + audio_path = os.path.join(clip_root, name + '.wav') + self.audio[i], _ = librosa.load(audio_path, sr=self.sample_rate) + + if self.opt.audio_encoder == 'APC': + APC_name = os.path.split(self.opt.APC_model_path)[-1] + APC_feature_file = name + '_APC_feature_V0324_ckpt_{}.npy'.format(APC_name) + APC_feature_path = os.path.join(clip_root, APC_feature_file) + need_deepfeats = False if os.path.exists(APC_feature_path) else True + if not need_deepfeats: + self.audio_features[i] = np.load(APC_feature_path).astype(np.float32) + else: + need_deepfeats = False + + # 3D landmarks & headposes + self.start_point[i] = start_point # They had 300 at 60 fps + fit_data_path = os.path.join(clip_root, 'track_params.pt') + fit_data = torch.load(fit_data_path) + self.fit_data = fit_data + + ori_pts3d = fit_data['pts3D'] + self.pts3d[i] = ori_pts3d + + self.feats[i] = self.pts3d[i] + + # elif opt.feature_dtype == 'FW': + # self.feats[i] = fit_data['exp'] + + self.rot_angles[i] = fit_data['euler'] + # change -180~180 to 0~360 + rot_change = self.rot_angles[i][:, 0] < 0 + self.rot_angles[i][rot_change, 0] += 360 + self.rot_angles[i][:, 0] -= 180 # change x axis direction + + # use delta translation + self.mean_trans[i] = fit_data['trans'].mean(axis=0) + self.trans[i] = fit_data['trans'] - self.mean_trans[i] + + self.headposes[i] = np.concatenate([self.rot_angles[i], self.trans[i]], axis=1) + self.velocity_pose[i] = np.concatenate( + [np.zeros(6)[None, :], self.headposes[i][1:] - self.headposes[i][:-1]]) + self.acceleration_pose[i] = np.concatenate( + [np.zeros(6)[None, :], self.velocity_pose[i][1:] - self.velocity_pose[i][:-1]]) + + total_frames = self.feats[i].shape[0] - 25 # They had -60 (1 second at 60 fps) # Crashes without 25 + print(f'Total Frames for clip {i}: {total_frames}') + + if need_deepfeats: + if self.opt.audio_encoder == 'APC': + print('datasets {} need to pre-compute APC features ...'.format(name)) + print('first we compute mel spectram for datasets {} '.format(name)) + mel80 = utils.compute_mel_one_sequence(self.audio[i], sr=opt.sample_rate, fps=opt.FPS) + mel_nframe = mel80.shape[0] + print('loading pre-trained model: ', self.opt.APC_model_path) + APC_model = APC_encoder(self.opt.audiofeature_input_channels, + self.opt.APC_hidden_size, + self.opt.APC_rnn_layers, + self.opt.APC_residual) + APC_model.load_state_dict(torch.load(self.opt.APC_model_path, map_location=str(self.device)), + strict=False) + # APC_model.load_state_dict(torch.load(self.opt.APC_model_path), strict=False) + APC_model.cuda() + APC_model.eval() + with torch.no_grad(): + length = torch.Tensor([mel_nframe]) + # hidden_reps = torch.zeros([mel_nframe, self.opt.APC_hidden_size]).cuda() + mel80_torch = torch.from_numpy(mel80.astype(np.float32)).cuda().unsqueeze(0) + hidden_reps = APC_model.forward(mel80_torch, length)[0] # [mel_nframe, 512] + hidden_reps = hidden_reps.cpu().numpy() + np.save(APC_feature_path, hidden_reps) + self.audio_features[i] = hidden_reps + + valid_frames = total_frames - self.start_point[i] + self.len[i] = valid_frames - self.target_length # ??? They had - 400 (6,66 s at 60 fps) + if i == 0: + self.sample_start.append(0) + else: + self.sample_start.append(self.sample_start[-1] + self.len[i - 1] - 1) + self.total_len += np.int32(np.floor(self.len[i] / self.frame_jump_stride)) + + def __getitem__(self, index): + # recover real index from compressed one + index_real = np.int32(index * self.frame_jump_stride) + # find which audio file and the start frame index + file_index = bisect.bisect_right(self.sample_start, index_real) - 1 + current_frame = index_real - self.sample_start[file_index] + self.start_point[file_index] + current_target_length = self.target_length + + if self.task == 'Audio2Feature': + # start point is current frame + A2Lsamples = self.audio_features[file_index][current_frame * 2: (current_frame + self.seq_len) * 2] + target_pts3d = self.feats[file_index][current_frame: current_frame + self.seq_len, self.indices].reshape( + self.seq_len, -1) + + A2Lsamples = torch.from_numpy(A2Lsamples).float() + target_pts3d = torch.from_numpy(target_pts3d).float() + + # [item_length, mel_channels, mel_width], or [item_length, APC_hidden_size] + return A2Lsamples, target_pts3d + + + elif self.task == 'Audio2Headpose': + if self.opt.feature_decoder == 'WaveNet': + # find the history info start points + A2H_history_start = current_frame - self.A2H_receptive_field + A2H_item_length = self.A2H_item_length # 204 + A2H_receptive_field = self.A2H_receptive_field + + if self.half_audio_win == 1: + A2Hsamples = self.audio_features[file_index][2 * (A2H_history_start + self.frame_future): 2 * (A2H_history_start + self.frame_future + A2H_item_length)] # Why 2? + else: + A2Hsamples = np.zeros([A2H_item_length, 512]) + for i in range(A2H_item_length): + A2Hsamples[i] = self.audio_features[file_index][A2H_history_start + i] + + if self.predict_len == 0: + target_headpose = self.headposes[file_index][ + A2H_history_start + A2H_receptive_field: A2H_history_start + A2H_item_length + 1] # [current frame: current frame + target] + history_headpose = self.headposes[file_index][A2H_history_start: A2H_history_start + A2H_item_length] + history_headpose = history_headpose.reshape(A2H_item_length,-1) # [current_frame - self.A2H_receptive_field : current frame + target -1] + + target_velocity = self.velocity_pose[file_index][ + A2H_history_start + A2H_receptive_field: A2H_history_start + A2H_item_length + 1] + history_velocity = self.velocity_pose[file_index][ + A2H_history_start: A2H_history_start + A2H_item_length].reshape(A2H_item_length, + -1) + target_info = torch.from_numpy( + np.concatenate([target_headpose, target_velocity], axis=1).reshape(current_target_length, + -1)).float() + + else: + history_headpose = self.headposes[file_index][ + A2H_history_start: A2H_history_start + A2H_item_length] + history_headpose = history_headpose.reshape(A2H_item_length, -1) #Same + + history_velocity = self.velocity_pose[file_index][ + A2H_history_start: A2H_history_start + A2H_item_length] + history_velocity = history_velocity.reshape(A2H_item_length, -1) #Same + + target_headpose_ = self.headposes[file_index][ + A2H_history_start + A2H_receptive_field - self.predict_len: A2H_history_start + A2H_item_length + 1 + self.predict_len + 1] # [current frame - predict_len: current frame + target + predict_len + 1] + target_headpose = np.zeros([current_target_length, self.predict_length, target_headpose_.shape[1]]) + for i in range(current_target_length): + target_headpose[i] = target_headpose_[i: i + self.predict_length] + target_headpose = target_headpose # .reshape(current_target_length, -1, order='F') + + target_velocity_ = self.headposes[file_index][ + A2H_history_start + A2H_receptive_field - self.predict_len: A2H_history_start + A2H_item_length + 1 + self.predict_len + 1] + target_velocity = np.zeros([current_target_length, self.predict_length, target_velocity_.shape[1]]) + for i in range(current_target_length): + target_velocity[i] = target_velocity_[i: i + self.predict_length] + target_velocity = target_velocity # .reshape(current_target_length, -1, order='F') + + target_info = torch.from_numpy( + np.concatenate([target_headpose, target_velocity], axis=2).reshape(current_target_length, + -1)).float() + + A2Hsamples = torch.from_numpy(A2Hsamples).float() + + history_info = torch.from_numpy(np.concatenate([history_headpose, history_velocity], axis=1)).float() + + # [item_length, mel_channels, mel_width], or [item_length, APC_hidden_size] + + return A2Hsamples, history_info, target_info + + + elif self.opt.feature_decoder == 'LSTM': + A2Hsamples = self.audio_features[file_index][ + current_frame * 2: (current_frame + self.opt.A2H_receptive_field) * 2] + + target_headpose = self.headposes[file_index][ + current_frame: current_frame + self.opt.A2H_receptive_field] + target_velocity = self.velocity_pose[file_index][ + current_frame: current_frame + self.opt.A2H_receptive_field] + target_info = torch.from_numpy( + np.concatenate([target_headpose, target_velocity], axis=1).reshape(self.opt.A2H_receptive_field, + -1)).float() + + A2Hsamples = torch.from_numpy(A2Hsamples).float() + + # [item_length, mel_channels, mel_width], or [item_length, APC_hidden_size] + return A2Hsamples, target_info + + def __len__(self): + return self.total_len + + + diff --git a/motion-gan-pipeline/motion-generation/emotion_correlation_visual.py b/motion-gan-pipeline/motion-generation/emotion_correlation_visual.py new file mode 100644 index 0000000..c887cac --- /dev/null +++ b/motion-gan-pipeline/motion-generation/emotion_correlation_visual.py @@ -0,0 +1,173 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +from os.path import join +from turtle import color +import yaml +import argparse +import numpy as np +from options.test_audio2headpose_options import TestOptions +from datasets import create_dataset +from util.cfgnode import CfgNode +from torch.functional import F + +from tqdm import tqdm +import matplotlib +matplotlib.use('agg') +import matplotlib.pyplot as plt +from matplotlib import colors + +import torch +from scipy.stats import pearsonr + + +if __name__ == '__main__': + # Load default options + Test_parser = TestOptions() + opt = Test_parser.parse() # get training options + + # Load config + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='config/Audio2Headpose_Ted.yml', + help="person name, e.g. Obama1, Obama2, May, Nadella, McStay", required=False) + inopt = parser.parse_args() + # TODO: make config + with open(inopt.config) as f: + cfg_dict = yaml.load(f, Loader=yaml.FullLoader) + cfg = CfgNode(cfg_dict) + + # Overwrite with config + opt.phase = 'Test' + opt.name = cfg.experiment.name + opt.dataset_mode = cfg.experiment.dataset_mode + opt.dataroot = cfg.experiment.dataroot + opt.dataset_names = cfg.experiment.dataset_names + opt.FPS = cfg.experiment.fps + + # save to the disk + Test_parser.print_options(opt) + + # Set device + device = torch.device('cuda:{}'.format(opt.gpu_ids[0])) if len( + opt.gpu_ids) > 0 else torch.device('cpu') + + # Load data + # create a dataset given opt.dataset_mode and other options + dataset = create_dataset(opt) + dataset = dataset.dataset + + # Target dir + target_dir= './visuals/Correlation/' + os.makedirs(target_dir, exist_ok=True) + + total_movement = [] + total_emotions = [] + total_valence = [] + total_arousal = [] + + emotion_labels = [ + 'Neutral', + 'Happy', + 'Sad', + 'Surprise', + 'Fear', + 'Disgust', + 'Anger', + 'Contempt', + ] + + for file_index in tqdm(range(len(dataset))): + + test_name = dataset.clip_names[file_index] + + name, flame, frames, audio_packed, audio_features, emotions, landmarks, exps, headposes, velocity_pose, fit_data_path = dataset[file_index] + + try: + # Compute avarage totale movement (sum of all movement divided by number of frames) + total_i = np.sum(np.abs(velocity_pose), axis=0) / headposes.shape[0] + total_movement.append(total_i) + + avg_valence = np.mean(np.stack([frame.item()['valence'].flatten() for frame in emotions], axis=0)) + avg_arousal = np.mean(np.stack([frame.item()['arousal'].flatten() for frame in emotions], axis=0)) + avg_class = np.mean([frame.item()['expr_classification'] for frame in emotions], axis=0) + + except: + print(f'Error in: {test_name}') + pass + + avg_class = torch.tensor(avg_class) + softmax = F.softmax(avg_class) + top_expr = torch.argmax(softmax, dim=1) + + total_emotions.append(top_expr.numpy()[0]) + total_valence.append(avg_valence) + total_arousal.append(avg_arousal) + + total_emotions = np.array(total_emotions) + total_movement = np.array(total_movement) + total_arousal = np.array(total_arousal) + total_valence = np.array(total_valence) + + # Emotion distribution + plt.title(f'Emotion Distribution out of 100 videos') + xticks = np.arange(8) + 0.5 + plt.xticks(xticks, emotion_labels, rotation=0) # Set text labels and properties. + plt.yticks([]) + # Make histogram + n, bins, patches = plt.hist(total_emotions, bins=np.arange(8), label=emotion_labels) + # We'll color code by height, but you could use any scalar + fracs = n / n.max() + # we need to normalize the data to 0..1 for the full range of the colormap + norm = colors.Normalize(fracs.min(), fracs.max()) + + for idx, value in enumerate(n): + if value > 0: + plt.text(xticks[idx], value+1, int(value), ha='center') + + for thisfrac, thispatch in zip(fracs, patches): + color = plt.cm.viridis(norm(thisfrac)) + thispatch.set_facecolor(color) + + plt.savefig(f'{target_dir}Emotion_distr.png') + plt.close() + + for idx, angle in enumerate(['Pitch', 'Roll', 'Yaw']): + + # Emotion + plt.xticks(np.arange(8), emotion_labels, rotation=0) # Set text labels and properties. + plt.title(f'{angle} Angle') + plt.scatter(x=total_emotions, y=total_movement[:, idx]) + plt.ylabel('Avg total movement') + plt.savefig(f'{target_dir}{angle}_Emotions.png') + plt.close() + + # Arousal + r, p = pearsonr(x=total_arousal, y=total_movement[:, idx]) + r = round(r, 3) + p = round(p, 3) + x = np.linspace(np.min(total_arousal), np.max(total_arousal), 1000) + + plt.title(f'{angle} Angle and Arousal\n Correlation: {int(r*100)}%, p-value: {int(p*100)}%') + plt.scatter(x=total_arousal, y=total_movement[:, idx]) + plt.plot(x, (x * r) + np.mean(total_movement[:, idx]), linestyle='solid', color='red') + plt.xlabel('Arousal') + plt.ylabel('Avg total movement') + plt.savefig(f'{target_dir}{angle}_Arousal.png') + plt.close() + + # Valence + r, p = pearsonr(x=total_valence, y=total_movement[:, idx]) + r = round(r, 3) + p = round(p, 3) + x = np.linspace(np.min(total_valence), np.max(total_valence), 1000) + + plt.title(f'{angle} Angle and Valence\n Correlation: {int(r*100)}%, p-value: {int(p*100)}%') + plt.scatter(x=total_valence, y=total_movement[:, idx]) + plt.plot(x, (x * r) + np.mean(total_movement[:, idx]), linestyle='solid', color='red') + plt.xlabel('Valence') + plt.ylabel('Avg total movement') + plt.savefig(f'{target_dir}{angle}_Valence.png') + plt.close() + + print('Finish!') diff --git a/motion-gan-pipeline/motion-generation/environment.yml b/motion-gan-pipeline/motion-generation/environment.yml new file mode 100644 index 0000000..1033a91 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/environment.yml @@ -0,0 +1,179 @@ +name: adnerf +channels: + - conda-forge + - defaults +dependencies: + - _libgcc_mutex=0.1=main + - _openmp_mutex=4.5=1_gnu + - autopep8=1.5.7=pyhd3eb1b0_0 + - blas=1.0=mkl + - bzip2=1.0.8=h7b6447c_0 + - ca-certificates=2021.5.25=h06a4308_1 + - cairo=1.14.12=h8948797_3 + - certifi=2021.5.30=py37h06a4308_0 + - configargparse=1.4=pyhd3eb1b0_0 + - cudatoolkit=11.1.1=h6406543_8 + - cycler=0.10.0=py37_0 + - dbus=1.13.18=hb2f20db_0 + - expat=2.4.1=h2531618_2 + - ffmpeg=4.0=hcdf2ecd_0 + - fontconfig=2.13.1=h6c09931_0 + - freeglut=3.0.0=hf484d3e_5 + - freetype=2.10.4=h5ab3b9f_0 + - glib=2.63.1=h5a9c865_0 + - graphite2=1.3.14=h23475e2_0 + - gst-plugins-base=1.14.0=hbbd80ab_1 + - gstreamer=1.14.0=hb453b48_1 + - harfbuzz=1.8.8=hffaf4a1_0 + - hdf5=1.10.2=hba1933b_1 + - icu=58.2=he6710b0_3 + - imageio=2.9.0=pyhd3eb1b0_0 + - intel-openmp=2021.2.0=h06a4308_610 + - jasper=2.0.14=h07fcdf6_1 + - joblib=1.0.1=pyhd3eb1b0_0 + - jpeg=9b=h024ee3a_2 + - kiwisolver=1.3.1=py37h2531618_0 + - lcms2=2.12=h3be6417_0 + - libedit=3.1.20210216=h27cfd23_1 + - libffi=3.2.1=hf484d3e_1007 + - libgcc-ng=9.3.0=h5101ec6_17 + - libgfortran-ng=7.5.0=ha8ba4b0_17 + - libgfortran4=7.5.0=ha8ba4b0_17 + - libglu=9.0.0=hf484d3e_1 + - libgomp=9.3.0=h5101ec6_17 + - libllvm10=10.0.1=hbcb73fb_5 + - libopencv=3.4.2=hb342d67_1 + - libopus=1.3.1=h7b6447c_0 + - libpng=1.6.37=hbc83047_0 + - libstdcxx-ng=9.3.0=hd4cf53a_17 + - libtiff=4.2.0=h85742a9_0 + - libuuid=1.0.3=h1bed415_2 + - libuv=1.41.0=h7f98852_0 + - libvpx=1.7.0=h439df22_0 + - libwebp-base=1.2.0=h27cfd23_0 + - libxcb=1.14=h7b6447c_0 + - libxml2=2.9.10=hb55368b_3 + - llvmlite=0.36.0=py37h612dafd_4 + - lz4-c=1.9.3=h2531618_0 + - matplotlib=3.3.4=py37h06a4308_0 + - matplotlib-base=3.3.4=py37h62a2d02_0 + - mkl=2021.2.0=h06a4308_296 + - mkl-service=2.3.0=py37h27cfd23_1 + - mkl_fft=1.3.0=py37h42c9631_2 + - mkl_random=1.2.1=py37ha9443f7_2 + - natsort=7.1.1=pyhd3eb1b0_0 + - ncurses=6.2=he6710b0_1 + - numba=0.53.1=py37ha9443f7_0 + - numpy=1.20.2=py37h2d18471_0 + - numpy-base=1.20.2=py37hfae3a4d_0 + - olefile=0.46=pyh9f0ad1d_1 + - opencv=3.4.2=py37h6fd60c2_1 + - openssl=1.1.1k=h27cfd23_0 + - pandas=1.2.4=py37h2531618_0 + - pcre=8.44=he6710b0_0 + - pillow=8.2.0=py37he98fc37_0 + - pixman=0.40.0=h7b6447c_0 + - py-opencv=3.4.2=py37hb342d67_1 + - pycodestyle=2.7.0=pyhd3eb1b0_0 + - pyparsing=2.4.7=pyhd3eb1b0_0 + - pyqt=5.9.2=py37h05f1152_2 + - python=3.7.1=h0371630_7 + - python-dateutil=2.8.1=pyhd3eb1b0_0 + - python_abi=3.7=1_cp37m + - pytz=2021.1=pyhd3eb1b0_0 + - qt=5.9.7=h5867ecd_1 + - readline=7.0=h7b6447c_5 + - resampy=0.2.2=py_0 + - scikit-learn=0.24.2=py37ha9443f7_0 + - scipy=1.6.2=py37had2a1c9_1 + - sip=4.19.8=py37hf484d3e_0 + - six=1.16.0=pyhd3eb1b0_0 + - sqlite=3.33.0=h62c20be_0 + - tbb=2020.3=hfd86e86_0 + - threadpoolctl=2.1.0=pyh5ca1d4c_0 + - tk=8.6.10=hbc83047_0 + - toml=0.10.2=pyhd3eb1b0_0 + - tornado=6.1=py37h27cfd23_0 + - tqdm=4.59.0=pyhd3eb1b0_1 + - typing_extensions=3.10.0.0=pyha770c72_0 + - xz=5.2.5=h7b6447c_0 + - zlib=1.2.11=h7b6447c_3 + - zstd=1.4.9=haebb681_0 + - pip: + - absl-py==0.13.0 + - antlr4-python3-runtime==4.8 + - appdirs==1.4.4 + - astor==0.8.1 + - audioread==2.1.9 + - av==8.1.0 + - cached-property==1.5.2 + - cffi==1.15.0 + - chardet==4.0.0 + - chumpy==0.70 + - click==8.0.3 + - cloudpickle==2.0.0 + - decorator==4.4.2 + - face-alignment==1.3.4 + - filelock==3.4.2 + - future==0.18.2 + - fvcore==0.1.5.post20210617 + - gast==0.2.2 + - google-pasta==0.2.0 + - grpcio==1.38.0 + - h5py==3.2.1 + - huggingface-hub==0.4.0 + - idna==2.10 + - imageio-ffmpeg==0.4.5 + - importlib-metadata==4.5.0 + - iopath==0.1.8 + - keras-applications==1.0.8 + - keras-preprocessing==1.1.2 + - kornia==0.4.0 + - librosa==0.8.1 + - markdown==3.3.4 + - moviepy==1.0.3 + - networkx==2.5.1 + - ninja==1.10.2.3 + - omegaconf==2.1.1 + - opencv-python==4.5.2.54 + - opt-einsum==3.3.0 + - packaging==21.3 + - pims==0.5 + - pip==22.0.2 + - pooch==1.6.0 + - portalocker==2.3.0 + - proglog==0.1.9 + - protobuf==3.17.3 + - pycocotools==2.0.4 + - pycparser==2.21 + - pydub==0.25.1 + - python-speech-features==0.6 + - pywavelets==1.1.1 + - pyyaml==5.1.1 + - regex==2022.1.18 + - requests==2.25.1 + - sacremoses==0.0.47 + - scikit-image==0.18.1 + - sentencepiece==0.1.96 + - setuptools==60.6.0 + - slicerator==1.0.0 + - soundfile==0.10.3.post1 + - speechrecognition==3.8.1 + - tabulate==0.8.9 + - tensorboard==1.15.0 + - tensorflow-estimator==1.15.1 + - tensorflow-gpu==1.15.2 + - termcolor==1.1.0 + - tifffile==2021.6.14 + - tokenizers==0.11.4 + - torch==1.9.0+cu111 + - torchaudio==0.9.0 + - torchvision==0.10.0+cu111 + - transformers==4.16.2 + - urllib3==1.26.5 + - werkzeug==2.0.1 + - wheel==0.37.1 + - wrapt==1.12.1 + - yacs==0.1.8 + - zipp==3.4.1 +prefix: /home/alberto/anaconda3/envs/adnerf diff --git a/motion-gan-pipeline/motion-generation/finetune.py b/motion-gan-pipeline/motion-generation/finetune.py new file mode 100644 index 0000000..23a3663 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/finetune.py @@ -0,0 +1,109 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import imp +import os +import time +import yaml +from options.train_audio2headpose_options import TrainOptions +from datasets import create_dataset +from models import create_model +from util.visualizer import Visualizer +from tqdm import tqdm +import argparse +from util.cfgnode import CfgNode +import numpy as np + + +def finetune(name, dataroot, dataset_names, target_checkpoints, checkpoint_path, dataset_mode='deepspeech', fps=25): + print('Fine-tuning model..') + # Load default options + Train_parser = TrainOptions() + opt = Train_parser.parse() # get training options + + # Overwrite with input + opt.name = name + opt.dataset_mode = dataset_mode + opt.dataroot = os.path.join(dataroot, 'video') + opt.dataset_names = dataset_names + opt.FPS = fps + + # Finetune params + opt.n_epochs = 15 + opt.n_epochs_decay = 10 + opt.checkpoints_dir = target_checkpoints + opt.train_dataset_names = [os.path.join(opt.dataroot, opt.dataset_names)] + opt.validate_dataset_names = [] + opt.train_test_split = False + opt.lr = 1e-4 + + # save to the disk + Train_parser.print_options(opt) + + # Load data + dataset = create_dataset(opt) # create a dataset given opt.dataset_mode and other options + dataset_size = len(dataset) # get the number of images in the dataset. + print('The number of training images = %d' % dataset_size) + + model = create_model(opt) # create a model given opt.model and other options + model.setup(opt) # regular setup: load and print networks; create schedulers + model.load_checkpoint(checkpoint_path) # Load checkpoint to finetune + visualizer = Visualizer(opt) # create a visualizer that display/save images and plots + total_iters = 0 # the total number of training iterations + + for epoch in tqdm(range(opt.epoch_count, opt.n_epochs + opt.n_epochs_decay + 1)): # outer loop for different epochs; we save the model by , + + epoch_start_time = time.time() # timer for entire epoch + iter_data_time = time.time() # timer for data loading per iteration + epoch_iter = 0 # the number of training iterations in current epoch, reset to 0 every epoch + # visualizer.reset() # reset the visualizer: make sure it saves the results to HTML at least once every epoch + model.update_learning_rate() # update learning rates in the beginning of every epoch. + for i, data in enumerate(dataset): # inner loop within one epoch + + iter_start_time = time.time() # timer for computation per iteration + if total_iters % opt.print_freq == 0: + t_data = iter_start_time - iter_data_time + + total_iters += opt.batch_size + epoch_iter += opt.batch_size + model.set_input(data) # unpack data from dataset and apply preprocessing + model.optimize_parameters() # calculate loss functions, get gradients, update network weights + + # if total_iters % opt.print_freq == 0: # print training losses and save logging information to the disk + # losses = model.get_current_losses() + # t_comp = (time.time() - iter_start_time) / opt.batch_size + # visualizer.print_current_errors(epoch, total_iters, losses, t_data) + # if opt.display_id > 0: + # visualizer.plot_current_errors(losses, total_iters) + + iter_data_time = time.time() + + if epoch % opt.save_epoch_freq == 0 and epoch!=0: # cache our model every epochs + print('saving the model at the end of epoch %d, iters %d' % (epoch, total_iters)) + model.save_networks('epoch_%d' % (epoch)) + + # early stopping + losses = model.get_current_losses() + if losses['GMM'] <= 0: + print('Negative loss at the end of epoch %d, total iters %d' % (epoch, total_iters)) + print('Stopping Training') + break + + print('End of epoch %d / %d \t Time Taken: %d sec' % (epoch, opt.n_epochs + opt.n_epochs_decay, time.time() - epoch_start_time)) + + model.save_networks('latest') + +if __name__ == '__main__': + + # Load config + parser = argparse.ArgumentParser() + parser.add_argument('--name', default='', help="person name, e.g. Obama1, Obama2, May, Nadella, McStay") + parser.add_argument('--dataset_mode', default='deepspeech', help="type of dataset") + parser.add_argument('--dataroot', help="Path to data folders") + parser.add_argument('--dataset_names', default='', help="Nme of the dataset") + parser.add_argument('--fps', default=25, help="target fps") + parser.add_argument('--target_checkpoints', default="", help="Path to the output checkpoint directory") + parser.add_argument('--checkpoint_path', default="./checkpoints/latest_Audio2Headpose.pkl", help="Path to the checkpoints to finetune") + + inopt = parser.parse_args() + + finetune(inopt.name, inopt.dataroot, inopt.dataset_names, inopt.target_checkpoints, inopt.dataset_mode, inopt.fps, inopt.checkpoint_path) diff --git a/motion-gan-pipeline/motion-generation/funcs/audio_funcs.py b/motion-gan-pipeline/motion-generation/funcs/audio_funcs.py new file mode 100644 index 0000000..1f55930 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/funcs/audio_funcs.py @@ -0,0 +1,426 @@ +import os +import os.path +import math +# import sox +#import pyworld as pw +import torch +import torch.utils.data +import numpy as np +import librosa + + +""" +useage +fft = Audio2Mel().cuda() +# audio shape is B x 1 x T, the normalized mel shape is B x D x T +mel = fft(audio) +""" +from librosa.filters import mel as librosa_mel_fn +import torch.nn.functional as F +class Audio2Mel(torch.nn.Module): + def __init__( + self, + n_fft=512, + hop_length=256, + win_length=1024, + sampling_rate=16000, + n_mel_channels=80, + mel_fmin=90, + mel_fmax=7600.0, + ): + super(Audio2Mel, self).__init__() + ############################################## + # FFT Parameters # + ############################################## + window = torch.hann_window(win_length).float() + mel_basis = librosa_mel_fn( + sampling_rate, n_fft, n_mel_channels, mel_fmin, mel_fmax + ) + mel_basis = torch.from_numpy(mel_basis).float() + self.register_buffer("mel_basis", mel_basis) + self.register_buffer("window", window) + self.n_fft = n_fft + self.hop_length = hop_length + self.win_length = win_length + self.sampling_rate = sampling_rate + self.n_mel_channels = n_mel_channels + self.min_mel = math.log(1e-5) + self.mel_fmin = mel_fmin + self.mel_fmax = mel_fmax + + """ + input audio signal (-1,1): B x 1 x T + output mel signal: B x D x T', T' is a reduction of T + """ + + def forward(self, audio, normalize=True): + p = (self.n_fft - self.hop_length) // 2 + audio = F.pad(audio, (p, p), "reflect").squeeze(1) + fft = torch.stft( + audio, + n_fft=self.n_fft, + hop_length=self.hop_length, + win_length=self.win_length, + window=self.window, + center=False, + ) + real_part, imag_part = fft.unbind(-1) + magnitude = torch.sqrt(real_part ** 2 + imag_part ** 2) + mel_output = torch.matmul(self.mel_basis, magnitude) + log_mel_spec = torch.log(torch.clamp(mel_output, min=1e-5)) + + # normalize to the range [0,1] + if normalize: + log_mel_spec = (log_mel_spec - self.min_mel) / -self.min_mel + return log_mel_spec + + def mel_to_audio(self, mel): + mel = torch.exp(mel * (-self.min_mel) + self.min_mel) ** 2 + mel_np = mel.cpu().numpy() + audio = librosa.feature.inverse.mel_to_audio(mel_np, sr=self.sampling_rate, n_fft=self.n_fft, + hop_length=self.hop_length, win_length=self.win_length, + window='hann', center=False, + pad_mode='reflect', power=2.0, n_iter=32, fmin=self.mel_fmin, + fmax=self.mel_fmax) + return audio + + """ + here we will get per frame energy to replace mc0 in the corresponding prosody representation + the audio is already in the gpu card for accerelate the computation speed + input audio signal: B x 1 x T + output energy: B x 1 x T' + """ + + def get_energy(self, audio, normalize=True): + # B x 1 x T + p = (self.n_fft - self.hop_length) // 2 + audio_new = F.pad(audio, (p, p), "reflect").squeeze(1) + # audio_new = audio.squeeze(1) + audio_fold = audio_new.unfold(1, self.win_length, self.hop_length) + audio_energy = torch.sqrt(torch.mean(audio_fold ** 2, dim=-1)) + audio_energy = torch.log(torch.clamp(audio_energy, min=1e-5)) + if normalize: + audio_energy = (audio_energy - self.min_mel) / -self.min_mel + return audio_energy + + # we can get the energy of mels here, B*D*T + def get_energy_mel(self, mels, normalize=True): + m = mels.exp().mean(dim=1) + audio_energy = torch.log(m) + # audio_energy = torch.log(torch.clamp(m,min=1e-5)) + # if normalize: + # audio_energy = (audio_energy - self.min_mel) / -self.min_mel + return audio_energy + + + + +def mu_law_encoding(data, mu=255): + '''encode the original audio via mu-law companding and mu-bits quantization + ''' + # mu-law companding + mu_x = np.sign(data) * np.log(1 + mu * np.abs(data)) / np.log(mu + 1) + # mu-bits quantization from [-1, 1] to [0, mu] + mu_x = (mu_x + 1) / 2 * mu + 0.5 + return mu_x.astype(np.int32) + +#%timeit mu_x = mu_law_encoding(x, 255) 305 µs ± 554 ns per loop (mean ± std. dev. of 7 runs, 1000 loops each) + + +def mu_law_decoding(data, mu=255): + '''inverse the mu-law compressed and quantized data. + ''' + # dequantization + y = 2 * (data.astype(np.float32) / mu) - 1 + # inverse mu-law companding + x = np.sign(y) * (1.0 / mu) * ((1.0 + mu)**abs(y) - 1.0) + return x + + + +## audio augmentation +def inject_gaussian_noise(data, noise_factor, use_torch=False): + ''' inject random gaussian noise (mean=0, std=1) to audio clip + In my test, a reasonable factor region could be [0, 0.01] + larger will be too large and smaller could be ignored. + Args: + data: [n,] original audio sequence + noise_factor(float): scaled factor + use_torch(bool): optional, if use_torch=True, input data and implementation will + be torch methods. + Returns: + augmented_data: [n,] noised audio clip + + ''' + if use_torch == False: + augmented_data = data + noise_factor * np.random.normal(0, 1, len(data)) + # Cast back to same data type + augmented_data = augmented_data.astype(type(data[0])) + # use torch + else: + augmented_data = data + noise_factor * torch.randn(1).cuda() + + return augmented_data + + +# pitch shifting +def pitch_shifting(data, sampling_rate=48000, factor=5): + ''' shift the audio pitch. + ''' + # Permissible factor values = -5 <= x <= 5 + pitch_factor = np.random.rand(1) * 2 * factor - factor + return librosa.effects.pitch_shift(data, sampling_rate, pitch_factor) + + +def speed_change(data, landmark=None): + ''' change the speed of input audio. Note that we return the speed_rate to + change the speed of landmarks or videos. + Args: + data: [n,] audio clip + landmark: [m, pts, 2] aligned landmarks with audio if existed. + ''' + # Permissible factor values = 0.7 <= x <= 1.3 (higher is faster) + # resulted audio length: np.round(n/rate) + speed_rate = np.random.uniform(0.7, 1.3) + # only augment audio + if landmark == None: + return librosa.effects.time_stretch(data, speed_rate), speed_rate + else: +# n_after = np.round(data.shape[0]/speed_rate) + pass + + + + +def world_augment(wav, sr, op): + f0, sp, ap = pw.wav2world(wav.astype(np.float64), sr) + op = op if op is not None else np.random.randint(0,4) + if op == 0: + base_f0 = np.random.randint(100,300) +# base_f0 = np.random.randint(100, 200) + robot_like_f0 = np.ones_like(f0) * base_f0 # 100是个适当的数字 + robot_like = pw.synthesize(robot_like_f0, sp, ap, sr) + out_wav = robot_like + elif op == 1: + ratio = 1 + np.random.rand() + female_like_sp = np.zeros_like(sp) + for f in range(female_like_sp.shape[1]): + female_like_sp[:, f] = sp[:, int(f/ratio)] + ratio_f = 0.65 + 1.4 * np.random.rand() + out_wav = pw.synthesize(f0*ratio_f, female_like_sp, ap, sr) + elif op == 2: + # change the current pitch here + ratio = 0.65 + 1.4 * np.random.rand() + out_wav = pw.synthesize(f0*ratio, sp, ap, sr) + elif op == 3: + # the random masking using the time axis + mask_len = np.random.randint(0,256 * 4) + mask_pos = np.random.randint(0, wav.shape[0] - mask_len + 1) + out_wav = np.copy(wav) + out_wav[mask_pos:mask_pos+mask_len] = 0 + else: + out_wav = np.copy(wav) + + return out_wav.astype(np.float32) + + +def sox_augment(wav, sr, tempo_ratio=1.0, op=None): + aug_choice = op if op is not None else np.random.randint(low=1, high=8) +# tempo_ratio = 1.0 + hop_length = 256 + tfm = sox.Transformer() + if aug_choice == 1: + # 1 pitch aug + param = np.random.uniform(-5.0, 5.0) + tfm.pitch(param) + elif aug_choice == 2: + # 2 tempo aug, when tempo_ratio is around 1.0, no tempo aug +# tempo_ratio = np.random.uniform(0.5, 2.0) +# if tempo_ratio >= 0.9 and tempo_ratio <= 1.1: +# tempo_ratio = 1.0 +# if tempo_ratio != 1.0: +# tfm.tempo(tempo_ratio, 's', quick=False) + pass + elif aug_choice == 3: + # 3 gain aug + param = np.random.uniform(-20, 5) + tfm.norm() + tfm.gain(param) + elif aug_choice == 4: + # 4 echo aug + # delays = np.random.uniform(5, 60) + # decays = np.random.uniform(0.2, 0.6) + # tfm.echo(delays=[delays], decays=[decays]) + pass + elif aug_choice == 5: + # 5 reverb aug + param = np.random.uniform(0, 100, size=(4,)) + tfm.reverb(param[0], param[1], param[2], param[3]) + elif aug_choice == 6: + # 6 bandreject aug + param1 = np.random.randint(200, 7500) + param2 = np.random.uniform(1.0, 4.0) + tfm.bandreject(param1, param2) + elif aug_choice == 7: + # 8 equalizer aug + param1 = np.random.randint(200, 7500) + param2 = np.random.uniform(1.0, 4.0) + param3 = np.random.uniform(-20, 5) + tfm.equalizer(param1, param2, param3) + else: + raise RuntimeError('Aug choice error!') + + wave_length = wav.shape[0] + if aug_choice == 1:# When using pitch augmentation, pad silence to keep audio length + wav = np.concatenate((wav, np.array([0.0]*(hop_length * 2))), axis=0) + + aug_wave_data = tfm.build_array(input_array=wav, sample_rate_in=sr) + + if aug_choice == 1:# Keep audio length unchanged when using pitch augmentation + aug_wave_data = aug_wave_data[:wave_length] + + return aug_wave_data + + +def sox_augment_v2(wav, sr, op=None): + aug_choice = op if op is not None else np.random.randint(low=1, high=5) + hop_length = 256 + tfm = sox.Transformer() + if aug_choice == 1: + # 1 pitch aug + param = np.random.uniform(-5.0, 5.0) + tfm.pitch(param) + elif aug_choice == 2: + # 5 reverb aug + param = np.random.uniform(0, 100, size=(4,)) + tfm.reverb(param[0], param[1], param[2], param[3]) + elif aug_choice == 3: + # 6 bandreject aug + param1 = np.random.randint(200, 7500) + param2 = np.random.uniform(1.0, 4.0) + tfm.bandreject(param1, param2) + elif aug_choice == 4: + # 8 equalizer aug + param1 = np.random.randint(200, 7500) + param2 = np.random.uniform(1.0, 4.0) + param3 = np.random.uniform(-20, 5) + tfm.equalizer(param1, param2, param3) + else: + raise RuntimeError('Aug choice error!') + + wave_length = wav.shape[0] + if aug_choice == 1:# When using pitch augmentation, pad silence to keep audio length + wav = np.concatenate((wav, np.array([0.0]*(hop_length * 2))), axis=0) + + aug_wave_data = tfm.build_array(input_array=wav, sample_rate_in=sr) + + if aug_choice == 1:# Keep audio length unchanged when using pitch augmentation + aug_wave_data = aug_wave_data[:wave_length] + + return aug_wave_data + + +def audio_output_augment(wav, sr, op=None): + aug_choice = op if op is not None else np.random.randint(low=1, high=4) + tfm = sox.Transformer() + if aug_choice == 1: + # 5 reverb aug + param = np.random.uniform(0, 100, size=(4,)) + tfm.reverb(param[0], param[1], param[2], param[3]) + elif aug_choice == 2: + # 6 bandreject aug + param1 = np.random.randint(200, 7500) + param2 = np.random.uniform(1.0, 4.0) + tfm.bandreject(param1, param2) + elif aug_choice == 3: + # 8 equalizer aug + param1 = np.random.randint(200, 7500) + param2 = np.random.uniform(1.0, 4.0) + param3 = np.random.uniform(-20, 5) + tfm.equalizer(param1, param2, param3) + else: + raise RuntimeError('Aug choice error!') + + aug_wave_data = tfm.build_array(input_array=wav, sample_rate_in=sr) + + return aug_wave_data + + +def audio_time_augment(wav, sr, time_scale): + tfm = sox.Transformer() + tfm.tempo(time_scale, 's', quick=False) + + aug_wave_data = tfm.build_array(input_array=wav, sample_rate_in=sr) + + return aug_wave_data + + +def prepare_noises(scp_file, root=None, sampline_rate=None, ignore_class=None): + noises = [] + print('Loading augmentation noises...') + with open(scp_file,'r') as fp: + for line in fp.readlines(): + line = line.rstrip('\n') + if ignore_class is not None and ignore_class in line: + continue + + noise, sr = librosa.load(os.path.join(root, line), sr=sampline_rate) + noises.append(noise) + print('Augmentation noises loaded!') + return noises, sr + + +def add_gauss_noise(wav, noise_std=0.03, max_wav_value=1.0): + if isinstance(wav, np.ndarray): + wav = torch.tensor(wav.copy()) + + real_std = np.random.random() * noise_std + wav_new = wav.float() / max_wav_value + torch.randn(wav.size()) * real_std + wav_new = wav_new * max_wav_value + wav_new = wav_new.clamp_(-max_wav_value, max_wav_value) + + return wav_new.float().numpy() + +def add_background_noise(wav, noises, min_snr=2, max_snr=15): + def mix_noise(wav, noise, scale): + x = wav + scale * noise + x = x.clip(-1, 1) + return x + + def voice_energy(wav): + wav_float = np.copy(wav) + return np.sum(wav_float ** 2) / (wav_float.shape[0] + 1e-5) + + def voice_energy_ratio(wav, noise, target_snr): + wav_eng = voice_energy(wav) + noise_eng = voice_energy(noise) + target_noise_eng = wav_eng / (10 ** (target_snr / 10.0)) + ratio = target_noise_eng / (noise_eng + 1e-5) + return ratio + + total_id = len(noises) + # 0 is no need to generate the noise + idx = np.random.choice(range(0, total_id)) + noise_wav = noises[idx] + if noise_wav.shape[0] > wav.shape[0]: + sel_range_id = np.random.choice(range(0, noise_wav.shape[0] - wav.shape[0])) + n = noise_wav[sel_range_id:sel_range_id + wav.shape[0]] + else: + n = np.zeros(wav.shape[0]) + sel_range_id = np.random.choice(range(0, wav.shape[0] - noise_wav.shape[0] + 1)) + n[sel_range_id:sel_range_id + noise_wav.shape[0]] = noise_wav + # + target_snr = np.random.random() * (max_snr - min_snr) + min_snr + scale = voice_energy_ratio(wav, n, target_snr) + wav_new = mix_noise(wav, n, scale) + return wav_new + + +def noise_augment(wav, wav_noises, gaussian_prob=0.5): + if np.random.random() > gaussian_prob:# add gauss noise + noise_std = np.random.uniform(low=0.001, high=0.02) + aug_wave_data = add_gauss_noise(wav, noise_std=noise_std) + else:# add background noise + aug_wave_data = add_background_noise(wav, wav_noises, min_snr=2, max_snr=15) + + return aug_wave_data diff --git a/motion-gan-pipeline/motion-generation/funcs/utils.py b/motion-gan-pipeline/motion-generation/funcs/utils.py new file mode 100644 index 0000000..0565af2 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/funcs/utils.py @@ -0,0 +1,422 @@ +import sys +sys.path.append("..") +from . import audio_funcs + +import numpy as np +from math import cos, sin +import torch +from numpy.linalg import solve +from scipy.ndimage import gaussian_filter1d, median_filter +from sklearn.neighbors import KDTree +import time +from tqdm import tqdm + + +class camera(object): + def __init__(self, fx=0, fy=0, cx=0, cy=0): + self.name = 'default camera' + self.fx = fx + self.fy = fy + self.cx = cx + self.cy = cy + self.relative_rotation = np.diag([1,1,1]).astype(np.float32) + self.relative_translation = np.zeros(3, dtype=np.float32) +# self.intrinsic = np.array([[self.fx, 0, self.cx], +# [0, self.fy, self.cy], +# [0, 0, 1]]) + + def intrinsic(self, trans_matrix=0): + ''' compute the intrinsic matrix + ''' + intrinsic = np.array([[self.fx, 0, self.cx], + [0, self.fy, self.cy], + [0, 0, 1]]) + + return intrinsic + + def relative(self): + ''' compute the relative transformation 4x4 matrix with respect to the + first camera kinect. specially the kinect's relative transformation + matrix is exact a identity matrix. + ''' + relative = np.eye(4, dtype=np.float32) + relative[:3, :3] = self.relative_rotation + relative[:3, 3] = self.relative_translation + + return relative + + def transform_intrinsic(self, transform_matrix): + ''' change the camera intrinsic matrix + transformed_intrinsic = transform_matrix * intrinsic + ''' + scale = transform_matrix[0,0] + self.fx *= scale + self.fy *= scale + self.cx = scale * self.cx + transform_matrix[0, 2] + self.cy = scale * self.cy + transform_matrix[1, 2] + + +# def compute_mel_one_sequence(audio, sr=16000, fps=60, device='cpu'): +# ''' compute mel for an audio sequence. +# ''' + + +# hop_length=int(sr/(fps*2)) +# win_length=1/fps +# winlen = int(sr/fps) +# winstep=0.5/fps +# device = torch.device(device) +# n_fft=512 + +# Audio2Mel_torch = audio_funcs.Audio2Mel(n_fft=n_fft, +# hop_length=hop_length, +# win_length=winlen, +# sampling_rate=sr, +# n_mel_channels=80, +# mel_fmin=90, +# mel_fmax=7600.0).to(device) + +# nframe = int(audio.shape[0] / sr * fps) +# mel_nframe = nframe +# mel_frame_len = int(sr * win_length) +# mel_frame_step = sr * winstep + +# mel80s = np.zeros([mel_nframe, 80]) +# for i in range(mel_nframe): +# st = int(i * mel_frame_step) +# audio_clip = audio[st : st + mel_frame_len] +# if len(audio_clip) < mel_frame_len: +# audio_clip = np.concatenate([audio_clip, np.zeros([mel_frame_len - len(audio_clip)])]) +# audio_clip_device = torch.from_numpy(audio_clip).unsqueeze(0).unsqueeze(0).to(device).float() +# mel_tmp = Audio2Mel_torch(audio_clip_device).cpu().numpy() +# mel80s[i] = mel_tmp[0].T # [1, 80] -> we get [2, 80] ??? +# print(mel80s[i].size) + +# return mel80s + +def compute_mel_one_sequence(audio, hop_length=int(16000/120), winlen=1/60, winstep=0.5/60, sr=16000, fps=60, device='cpu'): + ''' compute mel for an audio sequence. + ''' + device = torch.device(device) + Audio2Mel_torch = audio_funcs.Audio2Mel(n_fft=512, hop_length=int(16000/120), win_length=int(16000/60), sampling_rate=16000, + n_mel_channels=80, mel_fmin=90, mel_fmax=7600.0).to(device) + + nframe = int(audio.shape[0] / 16000 * 60) + mel_nframe = 2 * nframe + mel_frame_len = int(sr * winlen) + mel_frame_step = sr * winstep + + mel80s = np.zeros([mel_nframe, 80]) + for i in range(mel_nframe): +# for i in tqdm(range(mel_nframe)): + st = int(i * mel_frame_step) + audio_clip = audio[st : st + mel_frame_len] + if len(audio_clip) < mel_frame_len: + audio_clip = np.concatenate([audio_clip, np.zeros([mel_frame_len - len(audio_clip)])]) + audio_clip_device = torch.from_numpy(audio_clip).unsqueeze(0).unsqueeze(0).to(device).float() + mel80s[i] = Audio2Mel_torch(audio_clip_device).cpu().numpy()[0].T # [1, 80] + + return mel80s + + +def KNN(feats, feat_database, K=10): + ''' compute KNN for feat in feat base + ''' + tree = KDTree(feat_database, leaf_size=100000) + print('start computing KNN ...') + st = time.time() + dist, ind = tree.query(feats, k=K) + et = time.time() + print('Taken time: ', et-st) + + return dist, ind + + +def KNN_with_torch(feats, feat_database, K=10): + feats = torch.from_numpy(feats)#.cuda() + feat_database = torch.from_numpy(feat_database)#.cuda() + # Training + feat_base_norm = (feat_database ** 2).sum(-1) +# print('start computing KNN ...') +# st = time.time() + feats_norm = (feats ** 2).sum(-1) + diss = (feats_norm.view(-1, 1) + + feat_base_norm.view(1, -1) + - 2 * feats @ feat_database.t() # Rely on cuBLAS for better performance! + ) + ind = diss.topk(K, dim=1, largest=False).indices +# et = time.time() +# print('Taken time: ', et-st) + + return ind.cpu().numpy() + + + + +def solve_LLE_projection(feat, feat_base): + '''find LLE projection weights given feat base and target feat + Args: + feat: [ndim, ] target feat + feat_base: [K, ndim] K-nearest feat base + ======================================= + We need to solve the following function + ``` + min|| feat - \sum_0^k{w_i} * feat_base_i ||, s.t. \sum_0^k{w_i}=1 + ``` + equals to: + ft = w1*f1 + w2*f2 + ... + wk*fk, s.t. w1+w2+...+wk=1 + = (1-w2-...-wk)*f1 + w2*f2 + ... + wk*fk + ft-f1 = w2*(f2-f1) + w3*(f3-f1) + ... + wk*(fk-f1) + ft-f1 = (f2-f1, f3-f1, ..., fk-f1) dot (w2, w3, ..., wk).T + B = A dot w_, here, B: [ndim,] A: [ndim, k-1], w_: [k-1,] + Finally, + ft' = (1-w2-..wk, w2, ..., wk) dot (f1, f2, ..., fk) + ======================================= + Returns: + w: [K,] linear weights, sums to 1 + ft': [ndim,] reconstructed feats + ''' + K, ndim = feat_base.shape + if K == 1: + feat_fuse = feat_base[0] + w = np.array([1]) + else: + w = np.zeros(K) + B = feat - feat_base[0] # [ndim,] + A = (feat_base[1:] - feat_base[0]).T # [ndim, K-1] + AT = A.T + w[1:] = solve(AT.dot(A), AT.dot(B)) + w[0] = 1 - w[1:].sum() + feat_fuse = w.dot(feat_base) + + return w, feat_fuse + + + +def compute_LLE_projection_frame(feats, feat_database, ind): + nframe = feats.shape[0] + feat_fuse = np.zeros_like(feats) + w = np.zeros([nframe, ind.shape[1]]) + current_K_feats = feat_database[ind] + w, feat_fuse = solve_LLE_projection(feats, current_K_feats) + + return w, feat_fuse + + +def compute_LLE_projection_all_frame(feats, feat_database, ind, nframe): + nframe = feats.shape[0] + feat_fuse = np.zeros_like(feats) + w = np.zeros([nframe, ind.shape[1]]) + for i in tqdm(range(nframe), desc='LLE projection'): + current_K_feats = feat_database[ind[i]] + w[i], feat_fuse[i] = solve_LLE_projection(feats[i], current_K_feats) + + return w, feat_fuse + + +def angle2matrix(angles, gradient='false'): + ''' get rotation matrix from three rotation angles(degree). right-handed. + Args: + angles: [3,]. x, y, z angles + x: pitch. positive for looking down. + y: yaw. positive for looking left. + z: roll. positive for tilting head right. + gradient(str): whether to compute gradient matrix: dR/d_x,y,z + Returns: + R: [3, 3]. rotation matrix. + ''' + x, y, z = np.deg2rad(angles[0]), np.deg2rad(angles[1]), np.deg2rad(angles[2]) + # x + Rx=np.array([[1, 0, 0], + [0, cos(x), -sin(x)], + [0, sin(x), cos(x)]]) + # y + Ry=np.array([[ cos(y), 0, sin(y)], + [ 0, 1, 0], + [-sin(y), 0, cos(y)]]) + # z + Rz=np.array([[cos(z), -sin(z), 0], + [sin(z), cos(z), 0], + [ 0, 0, 1]]) + + R=Rz.dot(Ry.dot(Rx)) + #R=Rx.dot(Ry.dot(Rz)) + + if gradient != 'true': + return R.astype(np.float32) + elif gradient == 'true': + # gradident matrix + dRxdx = np.array([[0, 0, 0], + [0, -sin(x), -cos(x)], + [0, cos(x), -sin(x)]]) + dRdx = Rz.dot(Ry.dot(dRxdx)) * np.pi/180 + dRydy = np.array([[-sin(y), 0, cos(y)], + [ 0, 0, 0], + [-cos(y), 0, -sin(y)]]) + dRdy = Rz.dot(dRydy.dot(Rx)) * np.pi/180 + dRzdz = np.array([[-sin(z), -cos(z), 0], + [ cos(z), -sin(z), 0], + [ 0, 0, 0]]) + dRdz = dRzdz.dot(Ry.dot(Rx)) * np.pi/180 + + return R.astype(np.float32), [dRdx.astype(np.float32), dRdy.astype(np.float32), dRdz.astype(np.float32)] + + + +def project_landmarks(camera_intrinsic, viewpoint_R, viewpoint_T, scale, headposes, pts_3d): + ''' project 2d landmarks given predicted 3d landmarks & headposes and user-defined + camera & viewpoint parameters + ''' + rot, trans = angle2matrix(headposes[:3]), headposes[3:][:, None] + pts3d_headpose = scale * rot.dot(pts_3d.T) + trans + pts3d_viewpoint = viewpoint_R.dot(pts3d_headpose) + viewpoint_T[:, None] + pts2d_project = camera_intrinsic.dot(pts3d_viewpoint) + pts2d_project[:2, :] /= pts2d_project[2, :] # divide z + pts2d_project = pts2d_project[:2, :].T + + return pts2d_project, rot, trans + + + +def landmark_smooth_3d(pts3d, smooth_sigma=0, area='only_mouth'): + ''' smooth the input 3d landmarks using gaussian filters on each dimension. + Args: + pts3d: [N, 73, 3] + ''' + # per-landmark smooth + if not smooth_sigma == 0: + if area == 'all': + pts3d = gaussian_filter1d(pts3d.reshape(-1, 73*3), smooth_sigma, axis=0).reshape(-1, 73, 3) + elif area == 'only_mouth': + mouth_pts3d = pts3d[:, 46:64, :].copy() + mouth_pts3d = gaussian_filter1d(mouth_pts3d.reshape(-1, 18*3), smooth_sigma, axis=0).reshape(-1, 18, 3) + pts3d = gaussian_filter1d(pts3d.reshape(-1, 73*3), smooth_sigma, axis=0).reshape(-1, 73, 3) + pts3d[:, 46:64, :] = mouth_pts3d + + + + return pts3d + + + +mouth_indices = list(range(46 * 2, 64 * 2)) +upper_outer_lip = list(range(47, 52)) +upper_inner_lip = [63, 62, 61] +lower_inner_lip = [58, 59, 60] +lower_outer_lip = list(range(57, 52, -1)) +lower_mouth = [53, 54, 55, 56, 57, 58, 59, 60] +upper_mouth = [46, 47, 48, 49, 50, 51, 52, 61, 62, 63] +def mouth_pts_AMP(pts3d, is_delta=True, method='XY', paras=[1,1]): + ''' mouth region AMP to control the reaction amplitude. + method: 'XY', 'delta', 'XYZ', 'LowerMore' or 'CloseSmall' + ''' + if method == 'XY': + AMP_scale_x, AMP_scale_y = paras + if is_delta: + pts3d[:, 46:64, 0] *= AMP_scale_x + pts3d[:, 46:64, 1] *= AMP_scale_y + else: + mean_mouth3d_xy = pts3d[:, 46:64, :2].mean(axis=0) + pts3d[:, 46:64, 0] += (AMP_scale_x-1) * (pts3d[:, 46:64, 0] - mean_mouth3d_xy[:,0]) + pts3d[:, 46:64, 1] += (AMP_scale_y-1) * (pts3d[:, 46:64, 1] - mean_mouth3d_xy[:,1]) + elif method == 'delta': + AMP_scale_x, AMP_scale_y = paras + if is_delta: + diff = AMP_scale_x * (pts3d[1:, 46:64] - pts3d[:-1, 46:64]) + pts3d[1:, 46:64] += diff + + elif method == 'XYZ': + AMP_scale_x, AMP_scale_y, AMP_scale_z = paras + if is_delta: + pts3d[:, 46:64, 0] *= AMP_scale_x + pts3d[:, 46:64, 1] *= AMP_scale_y + pts3d[:, 46:64, 2] *= AMP_scale_z + + elif method == 'LowerMore': + upper_x, upper_y, upper_z, lower_x, lower_y, lower_z = paras + if is_delta: + pts3d[:, upper_mouth, 0] *= upper_x + pts3d[:, upper_mouth, 1] *= upper_y + pts3d[:, upper_mouth, 2] *= upper_z + pts3d[:, lower_mouth, 0] *= lower_x + pts3d[:, lower_mouth, 1] *= lower_y + pts3d[:, lower_mouth, 2] *= lower_z + + elif method == 'CloseSmall': + open_x, open_y, open_z, close_x, close_y, close_z = paras + nframe = pts3d.shape[0] + for i in tqdm(range(nframe), desc='AMP mouth..'): + if sum(pts3d[i, upper_mouth, 1] > 0) + sum(pts3d[i, lower_mouth, 1] < 0) > 16 * 0.3: + # open + pts3d[i, 46:64, 0] *= open_x + pts3d[i, 46:64, 1] *= open_y + pts3d[i, 46:64, 2] *= open_z + else: + # close + pts3d[:, 46:64, 0] *= close_x + pts3d[:, 46:64, 1] *= close_y + pts3d[:, 46:64, 2] *= close_z + + return pts3d + + + + +def solve_intersect_mouth(pts3d): + ''' solve the generated intersec lips, usually happens in mouth AMP usage. + Args: + pts3d: [N, 73, 3] + ''' + upper_inner = pts3d[:, upper_inner_lip] + lower_inner = pts3d[:, lower_inner_lip] + + lower_inner_y = lower_inner[:,:,1] + upper_inner_y = upper_inner[:,:,1] + # all three inner lip flip + flip = lower_inner_y > upper_inner_y + flip = np.where(flip.sum(axis=1) == 3)[0] + + # flip frames + inner_y_diff = lower_inner_y[flip] - upper_inner_y[flip] + half_inner_y_diff = inner_y_diff * 0.5 + # upper inner + pts3d[flip[:,None], upper_inner_lip, 1] += half_inner_y_diff + # lower inner + pts3d[flip[:,None], lower_inner_lip, 1] -= half_inner_y_diff + # upper outer + pts3d[flip[:,None], upper_outer_lip, 1] += half_inner_y_diff.mean() + # lower outer + pts3d[flip[:,None], lower_outer_lip, 1] -= half_inner_y_diff.mean() + + + return pts3d + + + +def headpose_smooth(headpose, smooth_sigmas=[0,0], method='gaussian'): + rot_sigma, trans_sigma = smooth_sigmas + + if method == 'gaussian': + rot = gaussian_filter1d(headpose.reshape(-1, 6)[:,:3], rot_sigma, axis=0).reshape(-1, 3) + trans = gaussian_filter1d(headpose.reshape(-1, 6)[:,3:], trans_sigma, axis=0).reshape(-1, 3) + headpose_smooth = np.concatenate([rot, trans], axis=1) + + elif method == 'median': + rot = median_filter(headpose.reshape(-1, 6)[:,:3], size=rot_sigma).reshape(-1, 3) + trans = median_filter(headpose.reshape(-1, 6)[:,3:], size=trans_sigma).reshape(-1, 3) + headpose_smooth = np.concatenate([rot, trans], axis=1) + + else: + print(f'{method} smoothing not implemented.') + return headpose + + return headpose_smooth + + + + + + + + + diff --git a/motion-gan-pipeline/motion-generation/make_config.py b/motion-gan-pipeline/motion-generation/make_config.py new file mode 100644 index 0000000..1ca3443 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/make_config.py @@ -0,0 +1,42 @@ +import yaml +import os + + +def make_config(name, dictionary): + config_path = './config' + yml_path = os.path.join(config_path, f'{name}.yml') + with open(yml_path, 'w') as outfile: + yaml.dump(dictionary, outfile, default_flow_style=False) + + +def base_config(): + + config = {} + + ## Pick a name + # name = 'MTC' + # name = 'TED384-v2' + name = 'TMP_AVSpeech' + + ## Pick a task + task = 'Audio2Headpose' + # task = 'Correlation' + + ## Pick a dataset + dataset = 'deepspeech' + # dataset = 'emotion' + + # Parameters to setup experiment. + config['experiment'] = { + 'name': f'{task}_{name}', + 'dataset_mode': dataset, + 'dataroot': '/media/apennino/', + 'dataset_names': name, + 'fps': 25, + } + + make_config(config['experiment']['name'], config) + + +if __name__ == "__main__": + base_config() diff --git a/motion-gan-pipeline/motion-generation/media/Pitch_movement&delta_RMS.png b/motion-gan-pipeline/motion-generation/media/Pitch_movement&delta_RMS.png new file mode 100644 index 0000000..21709fa Binary files /dev/null and b/motion-gan-pipeline/motion-generation/media/Pitch_movement&delta_RMS.png differ diff --git a/motion-gan-pipeline/motion-generation/media/audio_14.png b/motion-gan-pipeline/motion-generation/media/audio_14.png new file mode 100644 index 0000000..a31af89 Binary files /dev/null and b/motion-gan-pipeline/motion-generation/media/audio_14.png differ diff --git a/motion-gan-pipeline/motion-generation/media/yam_corr.png b/motion-gan-pipeline/motion-generation/media/yam_corr.png new file mode 100644 index 0000000..ccf29dc Binary files /dev/null and b/motion-gan-pipeline/motion-generation/media/yam_corr.png differ diff --git a/motion-gan-pipeline/motion-generation/models/__init__.py b/motion-gan-pipeline/motion-generation/models/__init__.py new file mode 100644 index 0000000..186aba7 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/models/__init__.py @@ -0,0 +1,142 @@ +"""This package contains modules related to objective functions, optimizations, and network architectures. + +To add a custom model class called 'dummy', you need to add a file called 'dummy_model.py' and define a subclass DummyModel inherited from BaseModel. +You need to implement the following five functions: + -- <__init__>: initialize the class; first call BaseModel.__init__(self, opt). + -- : unpack data from dataset and apply preprocessing. + -- : produce intermediate results. + -- : calculate loss, gradients, and update network weights. + -- : (optionally) add model-specific options and set default options. + +In the function <__init__>, you need to define four lists: + -- self.loss_names (str list): specify the training losses that you want to plot and save. + -- self.model_names (str list): define networks used in our training. + -- self.visual_names (str list): specify the images that you want to display and save. + -- self.optimizers (optimizer list): define and initialize optimizers. You can define one optimizer for each network. If two networks are updated at the same time, you can use itertools.chain to group them. See cycle_gan_model.py for an usage. + +Now you can use the model class by specifying flag '--model dummy'. +See our template model class 'template_model.py' for more details. +""" +import os +import importlib +import numpy as np +import torch +import torch.nn as nn +from models.base_model import BaseModel + + + +def find_model_using_name(model_name): + """Import the module "models/[model_name]_model.py". + + In the file, the class called DatasetNameModel() will + be instantiated. It has to be a subclass of BaseModel, + and it is case-insensitive. + """ + model_filename = "models." + model_name + "_model" + modellib = importlib.import_module(model_filename) + model = None + target_model_name = model_name.replace('_', '') + 'model' + for name, cls in modellib.__dict__.items(): + if name.lower() == target_model_name.lower() \ + and issubclass(cls, BaseModel): + model = cls + + if model is None: + print("In %s.py, there should be a subclass of BaseModel with class name that matches %s in lowercase." % (model_filename, target_model_name)) + exit() + + return model + + +def get_option_setter(model_name): + """Return the static method of the model class.""" + model_class = find_model_using_name(model_name) + return model_class.modify_commandline_options + + +def create_model(opt): + """Create a model given the option. + + This function warps the class CustomDatasetDataLoader. + This is the main interface between this package and 'train.py'/'test.py' + + Example: + >>> from models import create_model + >>> model = create_model(opt) + """ + model = find_model_using_name(opt.model) + instance = model(opt) + print("model [%s] was created" % type(instance).__name__) + return instance + + +def save_models(opt, epoch, epoch_iter, total_steps, visualizer, iter_path, modelG, modelD, end_of_epoch=False): + if not end_of_epoch: + if total_steps % opt.save_latest_freq == 0: + visualizer.vis_print('saving the latest model (epoch %d, total_steps %d)' % (epoch, total_steps)) + modelG.module.save('latest') + modelD.module.save('latest') + np.savetxt(iter_path, (epoch, epoch_iter), delimiter=',', fmt='%d') + else: + if epoch % opt.save_epoch_freq == 0: + visualizer.vis_print('saving the model at the end of epoch %d, iters %d' % (epoch, total_steps)) + modelG.module.save('latest') + modelD.module.save('latest') + modelG.module.save(epoch) + modelD.module.save(epoch) + np.savetxt(iter_path, (epoch+1, 0), delimiter=',', fmt='%d') + + +def update_models(opt, epoch, modelG, modelD, dataset_warp): + ### linearly decay learning rate after certain iterations + if epoch > opt.niter: + modelG.module.update_learning_rate(epoch, 'G') + modelD.module.update_learning_rate(epoch, 'D') + + ### gradually grow training sequence length + if (epoch % opt.niter_step) == 0: + dataset_warp.dataset.update_training_batch(epoch//opt.niter_step) +# modelG.module.update_training_batch(epoch//opt.niter_step) + + ### finetune all scales + if (opt.n_scales_spatial > 1) and (opt.niter_fix_global != 0) and (epoch == opt.niter_fix_global): + modelG.module.update_fixed_params() + + +class myModel(nn.Module): + def __init__(self, opt, model): + super(myModel, self).__init__() + self.opt = opt + self.module = model + self.model = nn.DataParallel(model, device_ids=opt.gpu_ids) + self.bs_per_gpu = int(np.ceil(float(opt.batch_size) / len(opt.gpu_ids))) # batch size for each GPU + self.pad_bs = self.bs_per_gpu * len(opt.gpu_ids) - opt.batch_size + + def forward(self, *inputs, **kwargs): + inputs = self.add_dummy_to_tensor(inputs, self.pad_bs) + outputs = self.model(*inputs, **kwargs, dummy_bs=self.pad_bs) + if self.pad_bs == self.bs_per_gpu: # gpu 0 does 0 batch but still returns 1 batch + return self.remove_dummy_from_tensor(outputs, 1) + return outputs + + def add_dummy_to_tensor(self, tensors, add_size=0): + if add_size == 0 or tensors is None: return tensors + if type(tensors) == list or type(tensors) == tuple: + return [self.add_dummy_to_tensor(tensor, add_size) for tensor in tensors] + + if isinstance(tensors, torch.Tensor): + dummy = torch.zeros_like(tensors)[:add_size] + tensors = torch.cat([dummy, tensors]) + return tensors + + def remove_dummy_from_tensor(self, tensors, remove_size=0): + if remove_size == 0 or tensors is None: return tensors + if type(tensors) == list or type(tensors) == tuple: + return [self.remove_dummy_from_tensor(tensor, remove_size) for tensor in tensors] + + if isinstance(tensors, torch.Tensor): + tensors = tensors[remove_size:] + return tensors + + diff --git a/motion-gan-pipeline/motion-generation/models/audio2headpose.py b/motion-gan-pipeline/motion-generation/models/audio2headpose.py new file mode 100644 index 0000000..7ddc17b --- /dev/null +++ b/motion-gan-pipeline/motion-generation/models/audio2headpose.py @@ -0,0 +1,108 @@ +import torch.nn as nn + +from .networks import WaveNet + + + +class Audio2Headpose(nn.Module): + def __init__(self, opt): + super(Audio2Headpose, self).__init__() + self.opt = opt + if self.opt.loss == 'GMM': + output_size = (2 * opt.A2H_GMM_ndim + 1) * opt.A2H_GMM_ncenter + elif self.opt.loss == 'L2': + output_size = opt.A2H_GMM_ndim + # define networks + self.audio_downsample = nn.Sequential( + nn.Linear(in_features=opt.APC_hidden_size, out_features=opt.APC_hidden_size), + nn.BatchNorm1d(opt.APC_hidden_size), + nn.LeakyReLU(0.2), + nn.Linear(opt.APC_hidden_size, opt.APC_hidden_size), + ) + + self.WaveNet = WaveNet(residual_layers=opt.A2H_wavenet_residual_layers, + residual_blocks=opt.A2H_wavenet_residual_blocks, + dilation_channels=opt.A2H_wavenet_residual_channels, + residual_channels =opt.A2H_wavenet_dilation_channels, + skip_channels=opt.A2H_wavenet_skip_channels, + kernel_size=opt.A2H_wavenet_kernel_size, + output_length=opt.time_frame_length, + use_bias=opt.A2H_wavenet_use_bias, + cond=True, + input_channels=opt.A2H_wavenet_input_channels, + ncenter=opt.A2H_GMM_ncenter, + ndim=opt.A2H_GMM_ndim, + output_channels=output_size, + cond_channels=opt.A2H_wavenet_cond_channels) + self.item_length = self.WaveNet.receptive_field + opt.time_frame_length - 1 + + + def forward(self, history_info, audio_features): + ''' + Args: + history_info: [b, T, ndim] + audio_features: [b, 1, nfeas, nwins] + ''' + # APC features: [b, item_length, APC_hidden_size] ==> [b, APC_hidden_size, item_length] + bs, item_len, ndim = audio_features.shape + down_audio_feats = self.audio_downsample(audio_features.reshape(-1, ndim)).reshape(bs, item_len, -1) + pred = self.WaveNet.forward(history_info.permute(0,2,1), down_audio_feats.transpose(1,2)) + + + return pred + + + + +class Audio2Headpose_LSTM(nn.Module): + def __init__(self, opt): + super(Audio2Headpose_LSTM, self).__init__() + self.opt = opt + if self.opt.loss == 'GMM': + output_size = (2 * opt.A2H_GMM_ndim + 1) * opt.A2H_GMM_ncenter + elif self.opt.loss == 'L2': + output_size = opt.A2H_GMM_ndim + # define networks + self.audio_downsample = nn.Sequential( + nn.Linear(in_features=opt.APC_hidden_size * 2, out_features=opt.APC_hidden_size), + nn.BatchNorm1d(opt.APC_hidden_size), + nn.LeakyReLU(0.2), + nn.Linear(opt.APC_hidden_size, opt.APC_hidden_size), + ) + + self.LSTM = nn.LSTM(input_size=opt.APC_hidden_size, + hidden_size=256, + num_layers=3, + dropout=0, + bidirectional=False, + batch_first=True) + self.fc = nn.Sequential( + nn.Linear(in_features=256, out_features=512), + nn.BatchNorm1d(512), + nn.LeakyReLU(0.2), + nn.Linear(512, 512), + nn.BatchNorm1d(512), + nn.LeakyReLU(0.2), + nn.Linear(512, output_size)) + + + def forward(self, audio_features): + ''' + Args: + history_info: [b, T, ndim] + audio_features: [b, 1, nfeas, nwins] + ''' + # APC features: [b, item_length, APC_hidden_size] ==> [b, APC_hidden_size, item_length] + bs, item_len, ndim = audio_features.shape + down_audio_feats = self.audio_downsample(audio_features.reshape(-1, ndim)).reshape(bs, item_len, -1) + output, (hn, cn) = self.LSTM(down_audio_feats) + pred = self.fc(output.reshape(-1, 256)).reshape(bs, item_len, -1) + + + return pred + + + + + + \ No newline at end of file diff --git a/motion-gan-pipeline/motion-generation/models/audio2headpose_model.py b/motion-gan-pipeline/motion-generation/models/audio2headpose_model.py new file mode 100644 index 0000000..65a3f11 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/models/audio2headpose_model.py @@ -0,0 +1,197 @@ +import numpy as np +import torch +from tqdm import tqdm + +from .base_model import BaseModel +from . import networks +from . import audio2headpose +from .losses import GMMLogLoss, Sample_GMM +import torch.nn as nn + + +class Audio2HeadposeModel(BaseModel): + def __init__(self, opt): + """Initialize the Audio2Headpose class. + + Parameters: + opt (Option class)-- stores all the experiment flags; needs to be a subclass of BaseOptions + """ + BaseModel.__init__(self, opt) + + # specify the models you want to save to the disk. The training/test scripts will call and + # define networks + self.model_names = ['Audio2Headpose'] + if opt.feature_decoder == 'WaveNet': + self.Audio2Headpose = networks.init_net(audio2headpose.Audio2Headpose(opt), init_type='normal', init_gain=0.02, gpu_ids=opt.gpu_ids) + elif opt.feature_decoder == 'LSTM': + self.Audio2Headpose = networks.init_net(audio2headpose.Audio2Headpose_LSTM(opt), init_type='normal', init_gain=0.02, gpu_ids=opt.gpu_ids) + + # define only during training time + if self.isTrain: + # losses + self.criterion_GMM = GMMLogLoss(opt.A2H_GMM_ncenter, opt.A2H_GMM_ndim, opt.A2H_GMM_sigma_min).to(self.device) + self.criterion_L2 = nn.MSELoss().cuda() + + self.loss_names = ['GMM'] + # optimizer + self.optimizer = torch.optim.Adam([{'params':self.Audio2Headpose.parameters(), + 'initial_lr': opt.lr}], lr=opt.lr, betas=(0.9, 0.99)) + + self.optimizers.append(self.optimizer) + + if opt.continue_train: + self.resume_training() + + + def resume_training(self): + opt = self.opt + ### if continue training, recover previous states + print('Resuming from epoch %s ' % (opt.load_epoch)) + # change epoch count & update schedule settings + opt.epoch_count = int(opt.load_epoch) + self.schedulers = [networks.get_scheduler(optimizer, opt) for optimizer in self.optimizers] + # print lerning rate + lr = self.optimizers[0].param_groups[0]['lr'] + print('update learning rate: {} -> {}'.format(opt.lr, lr)) + + + + def set_input(self, data, data_info=None): + """Unpack input data from the dataloader and perform necessary pre-processing steps. + """ + if self.opt.feature_decoder == 'WaveNet': + self.headpose_audio_feats, self.history_headpose, self.target_headpose = data + self.headpose_audio_feats = self.headpose_audio_feats.to(self.device) + self.history_headpose = self.history_headpose.to(self.device) + self.target_headpose = self.target_headpose.to(self.device) + elif self.opt.feature_decoder == 'LSTM': + self.headpose_audio_feats, self.target_headpose = data + self.headpose_audio_feats = self.headpose_audio_feats.to(self.device) + self.target_headpose = self.target_headpose.to(self.device) + + + + def forward(self): + ''' + Args: + history_landmarks: [b, T, ndim] + audio_features: [b, 1, nfeas, nwins] + Returns: + preds: [b, T, output_channels] + ''' + + # if self.opt.audio_windows == 2: + # bs, item_len, ndim = self.headpose_audio_feats.shape + # self.headpose_audio_feats = self.headpose_audio_feats.reshape(bs, -1, ndim * 2) + # else: + bs, item_len, ndim = self.headpose_audio_feats.shape + if self.opt.feature_decoder == 'WaveNet': + self.preds_headpose = self.Audio2Headpose.forward(self.history_headpose, self.headpose_audio_feats) + elif self.opt.feature_decoder == 'LSTM': + self.preds_headpose = self.Audio2Headpose.forward(self.headpose_audio_feats) + + + def calculate_loss(self): + """ calculate loss in detail, only forward pass included""" + if self.opt.loss == 'GMM': + self.loss_GMM = self.criterion_GMM(self.preds_headpose, self.target_headpose) + self.loss = self.loss_GMM + elif self.opt.loss == 'L2': + self.loss_L2 = self.criterion_L2(self.preds_headpose, self.target_headpose) + self.loss = self.loss_L2 + + if not self.opt.smooth_loss == 0: + mu_gen = Sample_GMM(self.preds_headpose, + self.Audio2Headpose.module.WaveNet.ncenter, + self.Audio2Headpose.module.WaveNet.ndim, + sigma_scale=0) + self.smooth_loss = (mu_gen[:,2:] + self.target_headpose[:,:-2] - 2 * self.target_headpose[:,1:-1]).mean(dim=2).abs().mean() + self.loss += self.smooth_loss * self.opt.smooth_loss + + + + def backward(self): + """Calculate losses, gradients, and update network weights; called in every training iteration""" + self.calculate_loss() + self.loss.backward() + + + def optimize_parameters(self): + """Update network weights; it will be called in every training iteration.""" + self.optimizer.zero_grad() # clear optimizer parameters grad + self.forward() # forward pass + self.backward() # calculate loss and gradients + self.optimizer.step() # update gradients + + + def validate(self): + """ validate process """ + with torch.no_grad(): + self.forward() + self.calculate_loss() + + + def generate_sequences(self, audio_feats, pre_headpose, fill_zero=True, sigma_scale=0.0, opt=[]): + + frame_future = opt.frame_future + nframe = audio_feats.shape[0] - frame_future + pred_headpose = np.zeros([nframe, opt.A2H_GMM_ndim]) + + if opt.feature_decoder == 'WaveNet': + # fill zero or not + if fill_zero == True: + # headpose + audio_feats_insert = np.repeat(audio_feats[0], opt.A2H_receptive_field - 1) + audio_feats_insert = audio_feats_insert.reshape(-1, opt.A2H_receptive_field - 1).T + audio_feats = np.concatenate([audio_feats_insert, audio_feats]) + # history headpose + history_headpose = np.repeat(pre_headpose, opt.A2H_receptive_field) + history_headpose = history_headpose.reshape(-1, opt.A2H_receptive_field).T + history_headpose = torch.from_numpy(history_headpose).unsqueeze(0).float().to(self.device) + infer_start = 0 + else: + return None + + # evaluate mode + self.Audio2Headpose.eval() + + with torch.no_grad(): + for i in tqdm(range(infer_start, nframe), desc='generating headpose'): + history_start = i - infer_start + input_audio_feats = audio_feats[history_start + frame_future: history_start + frame_future + opt.A2H_receptive_field] + input_audio_feats = torch.from_numpy(input_audio_feats).unsqueeze(0).float().to(self.device) + + if self.opt.feature_decoder == 'WaveNet': + preds = self.Audio2Headpose.forward(history_headpose, input_audio_feats) + elif self.opt.feature_decoder == 'LSTM': + preds = self.Audio2Headpose.forward(input_audio_feats) + + if opt.loss == 'GMM': + pred_data = Sample_GMM(preds, opt.A2H_GMM_ncenter, opt.A2H_GMM_ndim, sigma_scale=sigma_scale) + elif opt.loss == 'L2': + pred_data = preds + + # get predictions + pred_headpose[i] = pred_data[0,0].cpu().detach().numpy() + history_headpose = torch.cat((history_headpose[:,1:,:], pred_data.to(self.device)), dim=1) # add in time-axis + + return pred_headpose + + elif opt.feature_decoder == 'LSTM': + self.Audio2Headpose.eval() + with torch.no_grad(): + input = torch.from_numpy(audio_feats).unsqueeze(0).float().to(self.device) + preds = self.Audio2Headpose.forward(input) + if opt.loss == 'GMM': + pred_data = Sample_GMM(preds, opt.A2H_GMM_ncenter, opt.A2H_GMM_ndim, sigma_scale=sigma_scale) + elif opt.loss == 'L2': + pred_data = preds + # get predictions + pred_headpose = pred_data[0].cpu().detach().numpy() + + return pred_headpose + + + + + \ No newline at end of file diff --git a/motion-gan-pipeline/motion-generation/models/base_model.py b/motion-gan-pipeline/motion-generation/models/base_model.py new file mode 100644 index 0000000..fe3e155 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/models/base_model.py @@ -0,0 +1,301 @@ +import os +import torch +import numpy as np +from collections import OrderedDict +from abc import ABC, abstractmethod +from . import networks + + +class BaseModel(ABC): + """This class is an abstract base class (ABC) for models. + To create a subclass, you need to implement the following five functions: + -- <__init__>: initialize the class; first call BaseModel.__init__(self, opt). + -- : unpack data from dataset and apply preprocessing. + -- : produce intermediate results. + -- : calculate losses, gradients, and update network weights. + -- : (optionally) add model-specific options and set default options. + """ + + def __init__(self, opt): + """Initialize the BaseModel class. + + Parameters: + opt (Option class)-- stores all the experiment flags; needs to be a subclass of BaseOptions + + When creating your custom class, you need to implement your own initialization. + In this function, you should first call + Then, you need to define four lists: + -- self.loss_names (str list): specify the training losses that you want to plot and save. + -- self.model_names (str list): define networks used in our training. + -- self.visual_names (str list): specify the images that you want to display and save. + -- self.optimizers (optimizer list): define and initialize optimizers. You can define one optimizer for each network. If two networks are updated at the same time, you can use itertools.chain to group them. See cycle_gan_model.py for an example. + """ + self.opt = opt + self.gpu_ids = opt.gpu_ids + self.isTrain = opt.isTrain + # get device name: CPU or GPU + # if self.gpu_ids == '-1': + # self.device = torch.device('cpu') + # self.gpu_ids = opt.gpu_ids == [] + # else: + # self.device = torch.device('cuda:{}'.format(self.gpu_ids[0])) + self.device = torch.device('cuda:{}'.format(self.gpu_ids[0])) if len(self.gpu_ids) > 0 else torch.device('cpu') + + self.save_dir = os.path.join(opt.checkpoints_dir, opt.name) # save all the checkpoints to save_dir + # torch speed up training + torch.backends.cudnn.enabled = True + torch.backends.cudnn.benchmark = True + self.loss_names = [] + self.model_names = [] + self.visual_names = [] + self.optimizers = [] + self.image_paths = [] + self.metric = 0 # used for learning rate policy 'plateau' + + @staticmethod + def modify_commandline_options(parser, is_train): + """Add new model-specific options, and rewrite default values for existing options. + + Parameters: + parser -- original option parser + is_train (bool) -- whether training phase or test phase. You can use this flag to add training-specific or test-specific options. + + Returns: + the modified parser. + """ + return parser + + @abstractmethod + def set_input(self, input): + """Unpack input data from the dataloader and perform necessary pre-processing steps. + + Parameters: + input (dict): includes the data itself and its metadata information. + """ + pass + + @abstractmethod + def forward(self): + """Run forward pass; called by both functions and .""" + pass + + @abstractmethod + def optimize_parameters(self): + """Calculate losses, gradients, and update network weights; called in every training iteration""" + pass + + def setup(self, opt): + """Load and print networks; create schedulers + + Parameters: + opt (Option class) -- stores all the experiment flags; needs to be a subclass of BaseOptions + """ + if self.isTrain: + self.schedulers = [networks.get_scheduler(optimizer, opt) for optimizer in self.optimizers] + if not self.isTrain or opt.continue_train: + self.load_networks(opt.load_epoch) + + + self.print_networks(opt.verbose) + + + def train(self): + """Make models train mode during train time""" + for name in self.model_names: + if isinstance(name, str): + net = getattr(self, name) + net.train(mode=True) + + + def eval(self): + """Make models eval mode during test time""" + for name in self.model_names: + if isinstance(name, str): + net = getattr(self, name) + net.eval() + + + def test(self): + """Forward function used in test time. + + This function wraps function in no_grad() so we don't save intermediate steps for backprop + It also calls to produce additional visualization results + """ + with torch.no_grad(): + self.forward() + self.compute_visuals() + + def compute_visuals(self): + """Calculate additional output images for visdom and HTML visualization""" + pass + + def get_image_paths(self): + """ Return image paths that are used to load current data""" + return self.image_paths + + def update_learning_rate(self): + """Update learning rates for all the networks; called at the end of every epoch""" + for scheduler in self.schedulers: + if self.opt.lr_policy == 'plateau': + scheduler.step(self.metric) + else: + scheduler.step() + + lr = self.optimizers[0].param_groups[0]['lr'] + print('learning rate = %.7f' % lr) + + def get_current_visuals(self): + """Return visualization images. train.py will display these images with visdom, and save the images to a HTML""" + visual_ret = OrderedDict() + for name in self.visual_names: + if isinstance(name, str): + visual_ret[name] = getattr(self, name) + return visual_ret + + def get_current_losses(self): + """Return traning losses / errors. train.py will print out these errors on console, and save them to a file""" + errors_ret = OrderedDict() + for name in self.loss_names: + if isinstance(name, str): + errors_ret[name] = float(getattr(self, 'loss_' + name)) # float(...) works for both scalar tensor and float number + return errors_ret + + def save_networks(self, epoch, train_info=None): + """Save all the networks to the disk. + + Parameters: + epoch (int) -- current epoch; used in the file name '%s_net_%s.pth' % (epoch, name) + """ + for name in self.model_names: + if isinstance(name, str): + save_filename = '%s_%s.pkl' % (epoch, name) + save_path = os.path.join(self.save_dir, save_filename) + net = getattr(self, name) + torch.save(net.state_dict(), save_path) + if train_info is not None: + epoch, epoch_iter = train_info + iter_path = os.path.join(self.save_dir, 'iter.txt') + np.savetxt(iter_path, (epoch, epoch_iter), delimiter=',', fmt='%d') + + + def __patch_instance_norm_state_dict(self, state_dict, module, keys, i=0): + """Fix InstanceNorm checkpoints incompatibility (prior to 0.4)""" + key = keys[i] + if i + 1 == len(keys): # at the end, pointing to a parameter/buffer + if module.__class__.__name__.startswith('InstanceNorm') and \ + (key == 'running_mean' or key == 'running_var'): + if getattr(module, key) is None: + state_dict.pop('.'.join(keys)) + if module.__class__.__name__.startswith('InstanceNorm') and \ + (key == 'num_batches_tracked'): + state_dict.pop('.'.join(keys)) + else: + self.__patch_instance_norm_state_dict(state_dict, getattr(module, key), keys, i + 1) + + def load_networks(self, epoch): + """Load all the networks from the disk. + + Parameters: + epoch (int) -- current epoch; used in the file name '%s_net_%s.pth' % (epoch, name) + """ + + + for name in self.model_names: + if isinstance(name, str): + if epoch[-3:] == 'pkl': + load_path = epoch + else: + load_filename = '%s_%s.pkl' % (epoch, name) + load_path = os.path.join(self.save_dir, load_filename) + + net = getattr(self, name) +# if isinstance(net, torch.nn.DataParallel): +# net = net.module + if os.path.exists(load_path): + state_dict = torch.load(load_path, map_location=str(self.device)) + if self.device == torch.device('cpu'): + for key in list(state_dict.keys()): + state_dict[key[7:]] = state_dict.pop(key) + if hasattr(state_dict, '_metadata'): + del state_dict._metadata + print('loading the model from %s' % load_path) + net.load_state_dict(state_dict, strict=False) + else: + print('No model weight file:', load_path, 'initialize model without pre-trained weights.') + if self.isTrain == False: + raise ValueError('We are now in inference process, no pre-trained model found! Check the model checkpoint!') + + +# if isinstance(net, torch.nn.DataParallel): +# net = net.module + + # if you are using PyTorch newer than 0.4 (e.g., built from + # GitHub source), you can remove str() on self.device + +# state_dict = torch.load(load_path, map_location=str(self.device)) +# if hasattr(state_dict, '_metadata'): +# del state_dict._metadata +# +# # patch InstanceNorm checkpoints prior to 0.4 +# for key in list(state_dict.keys()): # need to copy keys here because we mutate in loop +# self.__patch_instance_norm_state_dict(state_dict, net, key.split('.')) +# net.load_state_dict(state_dict) + + def load_checkpoint(self, checkpoint_path): + """ + Load all the networks from checkpoint. + """ + + + for name in self.model_names: + if isinstance(name, str): + + load_path = checkpoint_path + + net = getattr(self, name) +# if isinstance(net, torch.nn.DataParallel): +# net = net.module + if os.path.exists(load_path): + state_dict = torch.load(load_path, map_location=str(self.device)) + if self.device == torch.device('cpu'): + for key in list(state_dict.keys()): + state_dict[key[7:]] = state_dict.pop(key) + if hasattr(state_dict, '_metadata'): + del state_dict._metadata + print('loading the model from %s' % load_path) + net.load_state_dict(state_dict, strict=False) + else: + print('No model weight file:', load_path, 'initialize model without pre-trained weights.') + if self.isTrain == False: + raise ValueError('We are now in inference process, no pre-trained model found! Check the model checkpoint!') + + def print_networks(self, verbose): + """Print the total number of parameters in the network and (if verbose) network architecture + + Parameters: + verbose (bool) -- if verbose: print the network architecture + """ + print('---------- Networks initialized -------------') + for name in self.model_names: + if isinstance(name, str): + net = getattr(self, name) + num_params = 0 + for param in net.parameters(): + num_params += param.numel() + if verbose: + print(net) + print('[Network %s] Total number of parameters : %.3f M' % (name, num_params / 1e6)) + print('-----------------------------------------------') + + def set_requires_grad(self, nets, requires_grad=False): + """Set requies_grad=Fasle for all the networks to avoid unnecessary computations + Parameters: + nets (network list) -- a list of networks + requires_grad (bool) -- whether the networks require gradients or not + """ + if not isinstance(nets, list): + nets = [nets] + for net in nets: + if net is not None: + for param in net.parameters(): + param.requires_grad = requires_grad diff --git a/motion-gan-pipeline/motion-generation/models/losses.py b/motion-gan-pipeline/motion-generation/models/losses.py new file mode 100644 index 0000000..4f38542 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/models/losses.py @@ -0,0 +1,275 @@ +import torch +import torch.nn as nn +from torch.autograd import Variable +import math +import torch.nn.functional as F + + +class GMMLogLoss(nn.Module): + ''' compute the GMM loss between model output and the groundtruth data. + Args: + ncenter: numbers of gaussian distribution + ndim: dimension of each gaussian distribution + sigma_bias: + sigma_min: current we do not use it. + ''' + def __init__(self, ncenter, ndim, sigma_min=0.03): + super(GMMLogLoss,self).__init__() + self.ncenter = ncenter + self.ndim = ndim + self.sigma_min = sigma_min + + + def forward(self, output, target): + ''' + Args: + output: [b, T, ncenter + ncenter * ndim * 2]: + [:, :, : ncenter] shows each gaussian probability + [:, :, ncenter : ncenter + ndim * ncenter] shows the average values of each dimension of each gaussian + [: ,:, ncenter + ndim * ncenter : ncenter + ndim * 2 * ncenter] show the negative log sigma of each dimension of each gaussian + target: [b, T, ndim], the ground truth target landmark data is shown here + To maximize the log-likelihood equals to minimize the negative log-likelihood. + NOTE: It is unstable to directly compute the log results of sigma, e.g. ln(-0.1) as we need to clip the sigma results + into positive. Hence here we predict the negative log sigma results to avoid numerical instablility, which mean: + `` sigma = 1/exp(predict), predict = -ln(sigma) `` + Also, it will be just the 'B' term below! + Currently we only implement single gaussian distribution, hence the first values of pred are meaningless. + For single gaussian distribution: + L(mu, sigma) = -n/2 * ln(2pi * sigma^2) - 1 / (2 x sigma^2) * sum^n (x_i - mu)^2 (n for prediction times, n=1 for one frame, x_i for gt) + = -1/2 * ln(2pi) - 1/2 * ln(sigma^2) - 1/(2 x sigma^2) * (x - mu)^2 + == min -L(mu, sgima) = 0.5 x ln(2pi) + 0.5 x ln(sigma^2) + 1/(2 x sigma^2) * (x - mu)^2 + = 0.5 x ln_2PI + ln(sigma) + 0.5 x (MU_DIFF/sigma)^2 + = A - B + C + In batch and Time sample, b and T are summed and averaged. + ''' + b, T, _ = target.shape + # read prediction paras + mus = output[:, :, self.ncenter : (self.ncenter + self.ncenter * self.ndim)].view(b, T, self.ncenter, self.ndim) # [b, T, ncenter, ndim] + + # apply min sigma + neg_log_sigmas_out = output[:, :, (self.ncenter + self.ncenter * self.ndim):].view(b, T, self.ncenter, self.ndim) # [b, T, ncenter, ndim] + inv_sigmas_min = torch.ones(neg_log_sigmas_out.size()).cuda() * (1. / self.sigma_min) + inv_sigmas_min_log = torch.log(inv_sigmas_min) + neg_log_sigmas = torch.min(neg_log_sigmas_out, inv_sigmas_min_log) + + inv_sigmas = torch.exp(neg_log_sigmas) + # replicate the target of ncenter to minus mu + target_rep = target.unsqueeze(2).expand(b, T, self.ncenter, self.ndim) # [b, T, ncenter, ndim] + MU_DIFF = target_rep - mus # [b, T, ncenter, ndim] + # sigma process + A = 0.5 * math.log(2 * math.pi) # 0.9189385332046727 + B = neg_log_sigmas # [b, T, ncenter, ndim] + C = 0.5 * (MU_DIFF * inv_sigmas)**2 # [b, T, ncenter, ndim] + negative_loglikelihood = A - B + C # [b, T, ncenter, ndim] + + return negative_loglikelihood.mean() + + +def Sample_GMM(gmm_params, ncenter, ndim, weight_smooth = 0.0, sigma_scale = 0.0): + ''' Sample values from a given a GMM distribution. + Args: + gmm_params: [b, target_length, (2 * ndim + 1) * ncenter], including the + distribution weights, average and sigma + ncenter: numbers of gaussian distribution + ndim: dimension of each gaussian distribution + weight_smooth: float, smooth the gaussian distribution weights + sigma_scale: float, adjust the gaussian scale, larger for sharper prediction, + 0 for zero sigma which always return average values + Returns: + current_sample: [] + ''' + # reshape as [b*T, (2 * ndim + 1) * ncenter] + b, T, _ = gmm_params.shape + gmm_params_cpu = gmm_params.cpu().view(-1, (2 * ndim + 1) * ncenter) + # compute each distrubution probability + prob = nn.functional.softmax(gmm_params_cpu[:, : ncenter] * (1 + weight_smooth), dim=1) + # select the gaussian distribution according to their weights + selected_idx = torch.multinomial(prob, num_samples=1, replacement=True) + + mu = gmm_params_cpu[:, ncenter : ncenter + ncenter * ndim] + # please note that we use -logsigma as output, hence here we need to take the negative + sigma = torch.exp(-gmm_params_cpu[:, ncenter + ncenter * ndim:]) * sigma_scale +# print('sigma average:', sigma.mean()) + + selected_sigma = torch.empty(b*T, ndim).float() + selected_mu = torch.empty(b*T, ndim).float() + current_sample = torch.randn(b*T, ndim).float() +# current_sample = test_sample + + for i in range(b*T): + idx = selected_idx[i, 0] + selected_sigma[i, :] = sigma[i, idx * ndim:(idx + 1) * ndim] + selected_mu[i, :] = mu[i, idx * ndim:(idx + 1) * ndim] + + # sample with sel sigma and sel mean + current_sample = current_sample * selected_sigma + selected_mu + # cur_sample = sel_mu +# return current_sample.unsqueeze(1).cuda() + return current_sample.reshape(b, T, -1).cuda() + + + + +class GANLoss(nn.Module): + def __init__(self, use_lsgan=True, target_real_label=1.0, target_fake_label=0.0, + tensor=torch.FloatTensor): + super(GANLoss, self).__init__() + self.real_label = target_real_label + self.fake_label = target_fake_label + self.real_label_var = None + self.fake_label_var = None + self.Tensor = tensor + if use_lsgan: + self.loss = nn.MSELoss() + else: + self.loss = nn.BCELoss() + + def get_target_tensor(self, input, target_is_real): + target_tensor = None + gpu_id = input.get_device() + if target_is_real: + create_label = ((self.real_label_var is None) or + (self.real_label_var.numel() != input.numel())) + if create_label: + real_tensor = self.Tensor(input.size()).cuda(gpu_id).fill_(self.real_label) + self.real_label_var = Variable(real_tensor, requires_grad=False) + target_tensor = self.real_label_var + else: + create_label = ((self.fake_label_var is None) or + (self.fake_label_var.numel() != input.numel())) + if create_label: + fake_tensor = self.Tensor(input.size()).cuda(gpu_id).fill_(self.fake_label) + self.fake_label_var = Variable(fake_tensor, requires_grad=False) + target_tensor = self.fake_label_var + return target_tensor + + def __call__(self, input, target_is_real): + if isinstance(input[0], list): + loss = 0 + for input_i in input: + pred = input_i[-1] + target_tensor = self.get_target_tensor(pred, target_is_real) + loss += self.loss(pred, target_tensor) + return loss + else: + target_tensor = self.get_target_tensor(input[-1], target_is_real) + return self.loss(input[-1], target_tensor) + + + + +class VGGLoss(nn.Module): + def __init__(self, model=None): + super(VGGLoss, self).__init__() + if model is None: + self.vgg = Vgg19() + else: + self.vgg = model + + self.vgg.cuda() + # self.vgg.eval() + self.criterion = nn.L1Loss() + self.style_criterion = StyleLoss() + self.weights = [1.0, 1.0, 1.0, 1.0, 1.0] + self.style_weights = [1.0, 1.0, 1.0, 1.0, 1.0] + # self.weights = [5.0, 1.0, 0.5, 0.4, 0.8] + # self.style_weights = [10e4, 1000, 50, 15, 50] + + def forward(self, x, y, style=False): + x_vgg, y_vgg = self.vgg(x), self.vgg(y) + loss = 0 + if style: + # return both perceptual loss and style loss. + style_loss = 0 + for i in range(len(x_vgg)): + this_loss = (self.weights[i] * + self.criterion(x_vgg[i], y_vgg[i].detach())) + this_style_loss = (self.style_weights[i] * + self.style_criterion(x_vgg[i], y_vgg[i].detach())) + loss += this_loss + style_loss += this_style_loss + return loss, style_loss + + for i in range(len(x_vgg)): + this_loss = (self.weights[i] * self.criterion(x_vgg[i], y_vgg[i].detach())) + loss += this_loss + return loss + + +def gram_matrix(input): + a, b, c, d = input.size() # a=batch size(=1) + # b=number of feature maps + # (c,d)=dimensions of a f. map (N=c*d) + features = input.view(a * b, c * d) # resise F_XL into \hat F_XL + G = torch.mm(features, features.t()) # compute the gram product + # we 'normalize' the values of the gram matrix + # by dividing by the number of element in each feature maps. + return G.div(a * b * c * d) + + +class StyleLoss(nn.Module): + def __init__(self): + super(StyleLoss, self).__init__() + + def forward(self, x, y): + Gx = gram_matrix(x) + Gy = gram_matrix(y) + return F.mse_loss(Gx, Gy) * 30000000 + + + +class MaskedL1Loss(nn.Module): + def __init__(self): + super(MaskedL1Loss, self).__init__() + self.criterion = nn.L1Loss() + + def forward(self, input, target, mask): + mask = mask.expand(-1, input.size()[1], -1, -1) + loss = self.criterion(input * mask, target * mask) + return loss + + + +from torchvision import models +class Vgg19(nn.Module): + def __init__(self, requires_grad=False): + super(Vgg19, self).__init__() + vgg_pretrained_features = models.vgg19(pretrained=True).features + self.slice1 = torch.nn.Sequential() + self.slice2 = torch.nn.Sequential() + self.slice3 = torch.nn.Sequential() + self.slice4 = torch.nn.Sequential() + self.slice5 = torch.nn.Sequential() + for x in range(2): + self.slice1.add_module(str(x), vgg_pretrained_features[x]) + for x in range(2, 7): + self.slice2.add_module(str(x), vgg_pretrained_features[x]) + for x in range(7, 12): + self.slice3.add_module(str(x), vgg_pretrained_features[x]) + for x in range(12, 21): + self.slice4.add_module(str(x), vgg_pretrained_features[x]) + for x in range(21, 30): + self.slice5.add_module(str(x), vgg_pretrained_features[x]) + if not requires_grad: + for param in self.parameters(): + param.requires_grad = False + + def forward(self, X): + h_relu1 = self.slice1(X) + h_relu2 = self.slice2(h_relu1) + h_relu3 = self.slice3(h_relu2) + h_relu4 = self.slice4(h_relu3) + h_relu5 = self.slice5(h_relu4) + out = [h_relu1, h_relu2, h_relu3, h_relu4, h_relu5] + return out + + + + + + + + + + + diff --git a/motion-gan-pipeline/motion-generation/models/networks.py b/motion-gan-pipeline/motion-generation/models/networks.py new file mode 100644 index 0000000..17ff4e1 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/models/networks.py @@ -0,0 +1,873 @@ +import os +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from torch.optim import lr_scheduler +from torch.nn import init +import functools + + +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.nn.utils.rnn import pad_packed_sequence, pack_padded_sequence + + + +############################################################################### +# The detailed network architecture implementation for each model +############################################################################### + +class APC_encoder(nn.Module): + def __init__(self, + mel_dim, + hidden_size, + num_layers, + residual): + super(APC_encoder, self).__init__() + + input_size = mel_dim + + in_sizes = ([input_size] + [hidden_size] * (num_layers - 1)) + out_sizes = [hidden_size] * num_layers + self.rnns = nn.ModuleList( + [nn.GRU(input_size=in_size, hidden_size=out_size, batch_first=True) for (in_size, out_size) in zip(in_sizes, out_sizes)]) + + self.rnn_residual = residual + + def forward(self, inputs, lengths): + ''' + input: + inputs: (batch_size, seq_len, mel_dim) + lengths: (batch_size,) + + return: + predicted_mel: (batch_size, seq_len, mel_dim) + internal_reps: (num_layers + x, batch_size, seq_len, rnn_hidden_size), + where x is 1 if there's a prenet, otherwise 0 + ''' + with torch.no_grad(): + seq_len = inputs.size(1) + packed_rnn_inputs = pack_padded_sequence(inputs, lengths, True) + + for i, layer in enumerate(self.rnns): + packed_rnn_outputs, _ = layer(packed_rnn_inputs) + + rnn_outputs, _ = pad_packed_sequence( + packed_rnn_outputs, True, total_length=seq_len) + # outputs: (batch_size, seq_len, rnn_hidden_size) + + if i + 1 < len(self.rnns): + rnn_inputs, _ = pad_packed_sequence( + packed_rnn_inputs, True, total_length=seq_len) + # rnn_inputs: (batch_size, seq_len, rnn_hidden_size) + if self.rnn_residual and rnn_inputs.size(-1) == rnn_outputs.size(-1): + # Residual connections + rnn_outputs = rnn_outputs + rnn_inputs + packed_rnn_inputs = pack_padded_sequence(rnn_outputs, lengths, True) + + + return rnn_outputs + + + + +class WaveNet(nn.Module): + ''' This is a complete implementation of WaveNet architecture, mainly composed + of several residual blocks and some other operations. + Args: + batch_size: number of batch size + residual_layers: number of layers in each residual blocks + residual_blocks: number of residual blocks + dilation_channels: number of channels for the dilated convolution + residual_channels: number of channels for the residual connections + skip_channels: number of channels for the skip connections + end_channels: number of channels for the end convolution + classes: Number of possible values each sample can have as output + kernel_size: size of dilation convolution kernel + output_length(int): Number of samples that are generated for each input + use_bias: whether bias is used in each layer. + cond(bool): whether condition information are applied. if cond == True: + cond_channels: channel number of condition information + `` loss(str): GMM loss is adopted. `` + ''' + def __init__(self, + residual_layers = 10, + residual_blocks = 3, + dilation_channels = 32, + residual_channels = 32, + skip_channels = 256, + kernel_size = 2, + output_length = 16, + use_bias = False, + cond = True, + input_channels = 128, + ncenter = 1, + ndim = 73*2, + output_channels = 73*3, + cond_channels = 256, + activation = 'leakyrelu'): + super(WaveNet, self).__init__() + + self.layers = residual_layers + self.blocks = residual_blocks + self.dilation_channels = dilation_channels + self.residual_channels = residual_channels + self.skip_channels = skip_channels + self.input_channels = input_channels + self.ncenter = ncenter + self.ndim = ndim +# self.output_channels = (2 * self.ndim + 1) * self.ncenter + self.output_channels = output_channels + self.kernel_size = kernel_size + self.output_length = output_length + self.bias = use_bias + self.cond = cond + self.cond_channels = cond_channels + + # build modules + self.dilations = [] + self.dilation_queues = [] + residual_blocks = [] + self.receptive_field = 1 + + # 1x1 convolution to create channels + self.start_conv1 = nn.Conv1d(in_channels=self.input_channels, + out_channels=self.residual_channels, + kernel_size=1, + bias=True) + self.start_conv2 = nn.Conv1d(in_channels=self.residual_channels, + out_channels=self.residual_channels, + kernel_size=1, + bias=True) + if activation == 'relu': + self.activation = nn.ReLU(inplace = True) + elif activation == 'leakyrelu': + self.activation = nn.LeakyReLU(0.2) + self.drop_out2D = nn.Dropout2d(p=0.5) + + + # build residual blocks + for b in range(self.blocks): + new_dilation = 1 + additional_scope = kernel_size - 1 + for i in range(self.layers): + # create current residual block + residual_blocks.append(residual_block(dilation = new_dilation, + dilation_channels = self.dilation_channels, + residual_channels = self.residual_channels, + skip_channels = self.skip_channels, + kernel_size = self.kernel_size, + use_bias = self.bias, + cond = self.cond, + cond_channels = self.cond_channels)) + new_dilation *= 2 + + self.receptive_field += additional_scope + additional_scope *= 2 + + self.residual_blocks = nn.ModuleList(residual_blocks) + # end convolutions + + self.end_conv_1 = nn.Conv1d(in_channels = self.skip_channels, + out_channels = self.output_channels, + kernel_size = 1, + bias = True) + self.end_conv_2 = nn.Conv1d(in_channels = self.output_channels, + out_channels = self.output_channels, + kernel_size = 1, + bias = True) + + + def parameter_count(self): + par = list(self.parameters()) + s = sum([np.prod(list(d.size())) for d in par]) + return s + + def forward(self, input, cond=None): + ''' + Args: + input: [b, ndim, T] + cond: [b, nfeature, T] + Returns: + res: [b, T, ndim] + ''' + # dropout + x = self.drop_out2D(input) + + # preprocess + x = self.activation(self.start_conv1(x)) + x = self.activation(self.start_conv2(x)) + skip = 0 +# for i in range(self.blocks * self.layers): + for i, dilation_block in enumerate(self.residual_blocks): + x, current_skip = self.residual_blocks[i](x, cond) + skip += current_skip + + # postprocess + res = self.end_conv_1(self.activation(skip)) + res = self.end_conv_2(self.activation(res)) + + # cut the output size + res = res[:, :, -self.output_length:] # [b, ndim, T] + res = res.transpose(1, 2) # [b, T, ndim] + + return res + + + +class residual_block(nn.Module): + ''' + This is the implementation of a residual block in wavenet model. Every + residual block takes previous block's output as input. The forward pass of + each residual block can be illusatrated as below: + + ######################### Current Residual Block ########################## + # |-----------------------*residual*--------------------| # + # | | # + # | |-- dilated conv -- tanh --| | # + # -> -|-- pad--| * ---- |-- 1x1 -- + --> *input* # + # |-- dilated conv -- sigm --| | # + # 1x1 # + # | # + # ---------------------------------------------> + -------------> *skip* # + ########################################################################### + As shown above, each residual block returns two value: 'input' and 'skip': + 'input' is indeed this block's output and also is the next block's input. + 'skip' is the skip data which will be added finally to compute the prediction. + The input args own the same meaning in the WaveNet class. + + ''' + def __init__(self, + dilation, + dilation_channels = 32, + residual_channels = 32, + skip_channels = 256, + kernel_size = 2, + use_bias = False, + cond = True, + cond_channels = 128): + super(residual_block, self).__init__() + + self.dilation = dilation + self.dilation_channels = dilation_channels + self.residual_channels = residual_channels + self.skip_channels = skip_channels + self.kernel_size = kernel_size + self.bias = use_bias + self.cond = cond + self.cond_channels = cond_channels + # zero padding to the left of the sequence. + self.padding = (int((self.kernel_size - 1) * self.dilation), 0) + + # dilated convolutions + self.filter_conv= nn.Conv1d(in_channels = self.residual_channels, + out_channels = self.dilation_channels, + kernel_size = self.kernel_size, + dilation = self.dilation, + bias = self.bias) + + self.gate_conv = nn.Conv1d(in_channels = self.residual_channels, + out_channels = self.dilation_channels, + kernel_size = self.kernel_size, + dilation = self.dilation, + bias = self.bias) + + # 1x1 convolution for residual connections + self.residual_conv = nn.Conv1d(in_channels = self.dilation_channels, + out_channels = self.residual_channels, + kernel_size = 1, + bias = self.bias) + + # 1x1 convolution for skip connections + self.skip_conv = nn.Conv1d(in_channels = self.dilation_channels, + out_channels = self.skip_channels, + kernel_size = 1, + bias = self.bias) + + # condition conv, no dilation + if self.cond == True: + self.cond_filter_conv = nn.Conv1d(in_channels = self.cond_channels, + out_channels = self.dilation_channels, + kernel_size = 1, + bias = True) + self.cond_gate_conv = nn.Conv1d(in_channels = self.cond_channels, + out_channels = self.dilation_channels, + kernel_size = 1, + bias = True) + + + def forward(self, input, cond=None): + if self.cond is True and cond is None: + raise RuntimeError("set using condition to true, but no cond tensor inputed") + + x_pad = F.pad(input, self.padding) + # filter + filter = self.filter_conv(x_pad) + # gate + gate = self.gate_conv(x_pad) + + if self.cond == True and cond is not None: + filter_cond = self.cond_filter_conv(cond) + gate_cond = self.cond_gate_conv(cond) + # add cond results + filter = filter + filter_cond + gate = gate + gate_cond + + # element-wise multiple + filter = torch.tanh(filter) + gate = torch.sigmoid(gate) + x = filter * gate + + # residual and skip + residual = self.residual_conv(x) + input + skip = self.skip_conv(x) + + + return residual, skip + + + + +## 2D convolution layers +def conv2d(batch_norm, in_planes, out_planes, kernel_size=3, stride=1): + if batch_norm: + return nn.Sequential( + nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=(kernel_size-1)//2, bias=True), + nn.BatchNorm2d(out_planes), + nn.LeakyReLU(0.2, inplace=True) + ) + else: + return nn.Sequential( + nn.Conv2d(in_planes, out_planes, kernel_size=kernel_size, stride=stride, padding=(kernel_size-1)//2, bias=True), + nn.LeakyReLU(0.2, inplace=True) + ) + + + +def init_weights(net, init_type='normal', init_gain=0.02): + """Initialize network weights. + + Parameters: + net (network) -- network to be initialized + init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal + init_gain (float) -- scaling factor for normal, xavier and orthogonal. + + We use 'normal' in the original pix2pix and CycleGAN paper. But xavier and kaiming might + work better for some applications. Feel free to try yourself. + """ + def init_func(m): # define the initialization function + classname = m.__class__.__name__ + if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1): + if init_type == 'normal': + init.normal_(m.weight.data, 0.0, init_gain) + elif init_type == 'xavier': + init.xavier_normal_(m.weight.data, gain=init_gain) + elif init_type == 'kaiming': + init.kaiming_normal_(m.weight.data, a=0, mode='fan_in') + elif init_type == 'orthogonal': + init.orthogonal_(m.weight.data, gain=init_gain) + else: + raise NotImplementedError('initialization method [%s] is not implemented' % init_type) + if hasattr(m, 'bias') and m.bias is not None: + init.constant_(m.bias.data, 0.0) + elif classname.find('BatchNorm2d') != -1: # BatchNorm Layer's weight is not a matrix; only normal distribution applies. + init.normal_(m.weight.data, 1.0, init_gain) + init.constant_(m.bias.data, 0.0) + + print('initialize network with %s' % init_type) + net.apply(init_func) # apply the initialization function + + + +def init_net(net, init_type='normal', init_gain=0.02, gpu_ids=[], useDDP=False): + """Initialize a network: 1. register CPU/GPU device (with multi-GPU support); 2. initialize the network weights + Parameters: + net (network) -- the network to be initialized + init_type (str) -- the name of an initialization method: normal | xavier | kaiming | orthogonal + gain (float) -- scaling factor for normal, xavier and orthogonal. + gpu_ids (int list) -- which GPUs the network runs on: e.g., 0,1,2 + + Return an initialized network. + """ + if len(gpu_ids) > 0: + assert(torch.cuda.is_available()) + net.to(gpu_ids[0]) + if useDDP: + net = net().to(gpu_ids) + net = DDP(net, device_ids=gpu_ids) # DDP + print(f'use DDP to apply models on {gpu_ids}') + else: + net = torch.nn.DataParallel(net, gpu_ids) # multi-GPUs + init_weights(net, init_type, init_gain=init_gain) + return net + + +def get_scheduler(optimizer, opt): + """Return a learning rate scheduler + + Parameters: + optimizer -- the optimizer of the network + opt (option class) -- stores all the experiment flags; needs to be a subclass of BaseOptions.  + opt.lr_policy is the name of learning rate policy: linear | step | plateau | cosine + + For 'linear', we keep the same learning rate for the first epochs + and linearly decay the rate to zero over the next epochs. + For other schedulers (step, plateau, and cosine), we use the default PyTorch schedulers. + See https://pytorch.org/docs/stable/optim.html for more details. + """ + if opt.lr_policy == 'linear': + def lambda_rule(epoch): + lr_l = 1.0 - max(0, epoch - opt.n_epochs) / float(opt.n_epochs_decay + 1) + return lr_l + scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule, last_epoch=opt.epoch_count-2) + elif opt.lr_policy == 'step': + scheduler = lr_scheduler.StepLR(optimizer, step_size=opt.lr_decay_iters, gamma=opt.gamma, last_epoch=opt.epoch_count-2) + for _ in range(opt.epoch_count-2): + scheduler.step() + elif opt.lr_policy == 'plateau': + scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.2, threshold=0.01, patience=5) + elif opt.lr_policy == 'cosine': + scheduler = lr_scheduler.CosineAnnealingLR(optimizer, T_max=opt.n_epochs, eta_min=0) + else: + return NotImplementedError('learning rate policy [%s] is not implemented', opt.lr_policy) + return scheduler + + + +def weights_init(m): + classname = m.__class__.__name__ + if classname.find('Conv') != -1 and hasattr(m, 'weight'): + m.weight.data.normal_(0.0, 0.02) + elif classname.find('BatchNorm2d') != -1: + m.weight.data.normal_(1.0, 0.02) + m.bias.data.fill_(0) + + +def print_network(net): + if isinstance(net, list): + net = net[0] + num_params = 0 + for param in net.parameters(): + num_params += param.numel() + print(net) + print('Total number of parameters: %d' % num_params) + + + + +class Feature2FaceGenerator_normal(nn.Module): + def __init__(self, input_nc=4, output_nc=3, num_downs=8, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(Feature2FaceGenerator_normal, self).__init__() + # construct unet structure + unet_block = ResUnetSkipConnectionBlock_small(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, + innermost=True) + + for i in range(num_downs - 5): + unet_block = ResUnetSkipConnectionBlock_small(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, + norm_layer=norm_layer, use_dropout=use_dropout) + unet_block = ResUnetSkipConnectionBlock_small(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, + norm_layer=norm_layer) + unet_block = ResUnetSkipConnectionBlock_small(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, + norm_layer=norm_layer) + unet_block = ResUnetSkipConnectionBlock_small(ngf, ngf * 2, input_nc=None, submodule=unet_block, + norm_layer=norm_layer) + unet_block = ResUnetSkipConnectionBlock_small(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, + norm_layer=norm_layer) + + self.model = unet_block + + def forward(self, input): + output = self.model(input) + output = torch.tanh(output) # scale to [-1, 1] + + return output + + +# Defines the submodule with skip connection. +# X -------------------identity---------------------- X +# |-- downsampling -- |submodule| -- upsampling --| +class ResUnetSkipConnectionBlock_small(nn.Module): + def __init__(self, outer_nc, inner_nc, input_nc=None, + submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(ResUnetSkipConnectionBlock_small, self).__init__() + self.outermost = outermost + use_bias = norm_layer == nn.InstanceNorm2d + + if input_nc is None: + input_nc = outer_nc + downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=3, + stride=2, padding=1, bias=use_bias) + # add two resblock + res_downconv = [ResidualBlock(inner_nc, norm_layer)] + res_upconv = [ResidualBlock(outer_nc, norm_layer)] + + # res_downconv = [ResidualBlock(inner_nc)] + # res_upconv = [ResidualBlock(outer_nc)] + + downrelu = nn.ReLU(True) + uprelu = nn.ReLU(True) + if norm_layer != None: + downnorm = norm_layer(inner_nc) + upnorm = norm_layer(outer_nc) + + if outermost: + upsample = nn.Upsample(scale_factor=2, mode='nearest') + upconv = nn.Conv2d(inner_nc * 2, outer_nc, kernel_size=3, stride=1, padding=1, bias=use_bias) + down = [downconv, downrelu] + res_downconv + # up = [uprelu, upsample, upconv, upnorm] + up = [upsample, upconv] + model = down + [submodule] + up + elif innermost: + upsample = nn.Upsample(scale_factor=2, mode='nearest') + upconv = nn.Conv2d(inner_nc, outer_nc, kernel_size=3, stride=1, padding=1, bias=use_bias) + down = [downconv, downrelu] + res_downconv + if norm_layer == None: + up = [upsample, upconv, uprelu] + res_upconv + else: + up = [upsample, upconv, upnorm, uprelu] + res_upconv + model = down + up + else: + upsample = nn.Upsample(scale_factor=2, mode='nearest') + upconv = nn.Conv2d(inner_nc * 2, outer_nc, kernel_size=3, stride=1, padding=1, bias=use_bias) + if norm_layer == None: + down = [downconv, downrelu] + res_downconv + up = [upsample, upconv, uprelu] + res_upconv + else: + down = [downconv, downnorm, downrelu] + res_downconv + up = [upsample, upconv, upnorm, uprelu] + res_upconv + + if use_dropout: + model = down + [submodule] + up + [nn.Dropout(0.5)] + else: + model = down + [submodule] + up + + self.model = nn.Sequential(*model) + + def forward(self, x): + if self.outermost: + return self.model(x) + else: + return torch.cat([x, self.model(x)], 1) + + + +class Feature2FaceGenerator_large(nn.Module): + def __init__(self, input_nc=4, output_nc=3, num_downs=8, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(Feature2FaceGenerator_large, self).__init__() + # construct unet structure + unet_block = ResUnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, + innermost=True) + + for i in range(num_downs - 5): + unet_block = ResUnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, + norm_layer=norm_layer, use_dropout=use_dropout) + unet_block = ResUnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, + norm_layer=norm_layer) + unet_block = ResUnetSkipConnectionBlock(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, + norm_layer=norm_layer) + unet_block = ResUnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, + norm_layer=norm_layer) + unet_block = ResUnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, + norm_layer=norm_layer) + + self.model = unet_block + + def forward(self, input): + output = self.model(input) + output = torch.tanh(output) # scale to [-1, 1] + + return output + + +# Defines the submodule with skip connection. +# X -------------------identity---------------------- X +# |-- downsampling -- |submodule| -- upsampling --| +class ResUnetSkipConnectionBlock(nn.Module): + def __init__(self, outer_nc, inner_nc, input_nc=None, + submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(ResUnetSkipConnectionBlock, self).__init__() + self.outermost = outermost + use_bias = norm_layer == nn.InstanceNorm2d + + if input_nc is None: + input_nc = outer_nc + downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=3, + stride=2, padding=1, bias=use_bias) + # add two resblock + res_downconv = [ResidualBlock(inner_nc, norm_layer), ResidualBlock(inner_nc, norm_layer)] + res_upconv = [ResidualBlock(outer_nc, norm_layer), ResidualBlock(outer_nc, norm_layer)] + + # res_downconv = [ResidualBlock(inner_nc)] + # res_upconv = [ResidualBlock(outer_nc)] + + downrelu = nn.ReLU(True) + uprelu = nn.ReLU(True) + if norm_layer != None: + downnorm = norm_layer(inner_nc) + upnorm = norm_layer(outer_nc) + + if outermost: + upsample = nn.Upsample(scale_factor=2, mode='nearest') + upconv = nn.Conv2d(inner_nc * 2, outer_nc, kernel_size=3, stride=1, padding=1, bias=use_bias) + down = [downconv, downrelu] + res_downconv + # up = [uprelu, upsample, upconv, upnorm] + up = [upsample, upconv] + model = down + [submodule] + up + elif innermost: + upsample = nn.Upsample(scale_factor=2, mode='nearest') + upconv = nn.Conv2d(inner_nc, outer_nc, kernel_size=3, stride=1, padding=1, bias=use_bias) + down = [downconv, downrelu] + res_downconv + if norm_layer == None: + up = [upsample, upconv, uprelu] + res_upconv + else: + up = [upsample, upconv, upnorm, uprelu] + res_upconv + model = down + up + else: + upsample = nn.Upsample(scale_factor=2, mode='nearest') + upconv = nn.Conv2d(inner_nc * 2, outer_nc, kernel_size=3, stride=1, padding=1, bias=use_bias) + if norm_layer == None: + down = [downconv, downrelu] + res_downconv + up = [upsample, upconv, uprelu] + res_upconv + else: + down = [downconv, downnorm, downrelu] + res_downconv + up = [upsample, upconv, upnorm, uprelu] + res_upconv + + if use_dropout: + model = down + [submodule] + up + [nn.Dropout(0.5)] + else: + model = down + [submodule] + up + + self.model = nn.Sequential(*model) + + def forward(self, x): + if self.outermost: + return self.model(x) + else: + return torch.cat([x, self.model(x)], 1) + + +# UNet with residual blocks +class ResidualBlock(nn.Module): + def __init__(self, in_features=64, norm_layer=nn.BatchNorm2d): + super(ResidualBlock, self).__init__() + self.relu = nn.ReLU(True) + if norm_layer == None: + # hard to converge with out batch or instance norm + self.block = nn.Sequential( + nn.Conv2d(in_features, in_features, 3, 1, 1, bias=False), + nn.ReLU(inplace=True), + nn.Conv2d(in_features, in_features, 3, 1, 1, bias=False), + ) + else: + self.block = nn.Sequential( + nn.Conv2d(in_features, in_features, 3, 1, 1, bias=False), + norm_layer(in_features), + nn.ReLU(inplace=True), + nn.Conv2d(in_features, in_features, 3, 1, 1, bias=False), + norm_layer(in_features) + ) + + def forward(self, x): + residual = x + out = self.block(x) + out += residual + out = self.relu(out) + return out + # return self.relu(x + self.block(x)) + + + +class Feature2FaceGenerator_Unet(nn.Module): + def __init__(self, input_nc=4, output_nc=3, num_downs=8, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(Feature2FaceGenerator_Unet, self).__init__() + + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) # add the innermost layer + for i in range(num_downs - 5): # add intermediate layers with ngf * 8 filters + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout) + # gradually reduce the number of filters from ngf * 8 to ngf + unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + self.model = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) # add the outermost layer + + + def forward(self, input): + output = self.model(input) + + return output + + + + +class UnetSkipConnectionBlock(nn.Module): + """Defines the Unet submodule with skip connection. + X -------------------identity---------------------- + |-- downsampling -- |submodule| -- upsampling --| + """ + + def __init__(self, outer_nc, inner_nc, input_nc=None, + submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False): + """Construct a Unet submodule with skip connections. + + Parameters: + outer_nc (int) -- the number of filters in the outer conv layer + inner_nc (int) -- the number of filters in the inner conv layer + input_nc (int) -- the number of channels in input images/features + submodule (UnetSkipConnectionBlock) -- previously defined submodules + outermost (bool) -- if this module is the outermost module + innermost (bool) -- if this module is the innermost module + norm_layer -- normalization layer + use_dropout (bool) -- if use dropout layers. + """ + super(UnetSkipConnectionBlock, self).__init__() + self.outermost = outermost + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + if input_nc is None: + input_nc = outer_nc + downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4, + stride=2, padding=1, bias=use_bias) + downrelu = nn.LeakyReLU(0.2, True) + downnorm = norm_layer(inner_nc) + uprelu = nn.ReLU(True) + upnorm = norm_layer(outer_nc) + + if outermost: + upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, + kernel_size=4, stride=2, + padding=1) + down = [downconv] + up = [uprelu, upconv, nn.Tanh()] + model = down + [submodule] + up + elif innermost: + upconv = nn.ConvTranspose2d(inner_nc, outer_nc, + kernel_size=4, stride=2, + padding=1, bias=use_bias) + down = [downrelu, downconv] + up = [uprelu, upconv, upnorm] + model = down + up + else: + upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, + kernel_size=4, stride=2, + padding=1, bias=use_bias) + down = [downrelu, downconv, downnorm] + up = [uprelu, upconv, upnorm] + + if use_dropout: + model = down + [submodule] + up + [nn.Dropout(0.5)] + else: + model = down + [submodule] + up + + self.model = nn.Sequential(*model) + + def forward(self, x): + if self.outermost: + return self.model(x) + else: # add skip connections + return torch.cat([x, self.model(x)], 1) + + + +class MultiscaleDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d, + num_D=3, getIntermFeat=False): + super(MultiscaleDiscriminator, self).__init__() + self.num_D = num_D + self.n_layers = n_layers + self.getIntermFeat = getIntermFeat + ndf_max = 64 + + for i in range(num_D): + netD = NLayerDiscriminator(input_nc, min(ndf_max, ndf*(2**(num_D-1-i))), n_layers, getIntermFeat) + if getIntermFeat: + for j in range(n_layers+2): + setattr(self, 'scale'+str(i)+'_layer'+str(j), getattr(netD, 'model'+str(j))) + else: + setattr(self, 'layer'+str(i), netD.model) + + self.downsample = nn.AvgPool2d(3, stride=2, padding=[1, 1], count_include_pad=False) + + def singleD_forward(self, model, input): + if self.getIntermFeat: + result = [input] + for i in range(len(model)): + result.append(model[i](result[-1])) + return result[1:] + else: + return [model(input)] + + def forward(self, input): + num_D = self.num_D + result = [] + input_downsampled = input + for i in range(num_D): + if self.getIntermFeat: + model = [getattr(self, 'scale'+str(num_D-1-i)+'_layer'+str(j)) for j in range(self.n_layers+2)] + else: + model = getattr(self, 'layer'+str(num_D-1-i)) + result.append(self.singleD_forward(model, input_downsampled)) + if i != (num_D-1): + input_downsampled = self.downsample(input_downsampled) + return result + + + +# Defines the PatchGAN discriminator with the specified arguments. +class NLayerDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, n_layers=3, getIntermFeat=False): + super(NLayerDiscriminator, self).__init__() + self.getIntermFeat = getIntermFeat + self.n_layers = n_layers + + kw = 4 + padw = int(np.ceil((kw-1.0)/2)) + sequence = [[nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), + nn.LeakyReLU(0.2, True)]] + + nf = ndf + for n in range(1, n_layers): + nf_prev = nf + nf = min(nf * 2, 512) + sequence += [[ + nn.Conv2d(nf_prev, nf, kernel_size=kw, stride=2, padding=padw), + nn.BatchNorm2d(nf), + nn.LeakyReLU(0.2, True) + ]] + + nf_prev = nf + nf = min(nf * 2, 512) + sequence += [[ + nn.Conv2d(nf_prev, nf, kernel_size=kw, stride=1, padding=padw), + nn.BatchNorm2d(nf), + nn.LeakyReLU(0.2, True) + ]] + + sequence += [[nn.Conv2d(nf, 1, kernel_size=kw, stride=1, padding=padw)]] + + if getIntermFeat: + for n in range(len(sequence)): + setattr(self, 'model'+str(n), nn.Sequential(*sequence[n])) + else: + sequence_stream = [] + for n in range(len(sequence)): + sequence_stream += sequence[n] + self.model = nn.Sequential(*sequence_stream) + + + def forward(self, input): + if self.getIntermFeat: + res = [input] + for n in range(self.n_layers+2): + model = getattr(self, 'model'+str(n)) + res.append(model(res[-1])) + return res[1:] + else: + return self.model(input) + + + + + + diff --git a/motion-gan-pipeline/motion-generation/options/__init__.py b/motion-gan-pipeline/motion-generation/options/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/motion-generation/options/base_options_audio2headpose.py b/motion-gan-pipeline/motion-generation/options/base_options_audio2headpose.py new file mode 100644 index 0000000..101012a --- /dev/null +++ b/motion-gan-pipeline/motion-generation/options/base_options_audio2headpose.py @@ -0,0 +1,192 @@ +import argparse +from email.policy import default +import os +from util import util +import torch +import models +import numpy as np + + +class BaseOptions(): + """This class defines options used during both training and test time. + + It also implements several helper functions such as parsing, printing, and saving the options. + It also gathers additional options defined in functions in both datasets class and model class. + """ + + def __init__(self): + """Reset the class; indicates the class hasn't been initailized""" + self.initialized = False + + def initialize(self, parser): + """Define the common options that are used in both training and test.""" + ## task + parser.add_argument('--task', type=str, default='Audio2Headpose', help='|Audio2Feature|Feature2Face|Full|') + + ## basic parameters + parser.add_argument('--model', type=str, default='audio2headpose', help='trained model') + parser.add_argument('--dataset_mode', type=str, default='full', help='chooses how datasets are loaded. [unaligned | aligned | single]') + parser.add_argument('--name', type=str, default='Audio2Headpose', help='name of the experiment. It decides where to store samples and models') + parser.add_argument('--gpu_ids', type=str, default='0', help='gpu ids: e.g. 0 0,1,2, 0,2. use -1 for CPU') + parser.add_argument('--checkpoints_dir', type=str, default='./checkpoints/', help='models are saved here') + + + # data parameters + parser.add_argument('--FPS', type=str, default=25, help='video fps') # they had 60 + parser.add_argument('--sample_rate', type=int, default=16000, help='audio sample rate') + parser.add_argument('--audioRF_history', type=int, default=25, help='audio history receptive field length') #TODO: whoudl I change this to 25? + parser.add_argument('--audioRF_future', type=int, default=0, help='audio future receptive field length') + parser.add_argument('--feature_decoder', type=str, default='WaveNet', help='|WaveNet|LSTM|') + parser.add_argument('--loss', type=str, default='GMM', help='|GMM|L2|') + + + # datasets parameters + parser.add_argument('--dataset_names', type=str, default='Mine', help='chooses how datasets are loaded.') + parser.add_argument('--dataroot', type=str, default='/media/apennino/') + parser.add_argument('--frame_jump_stride', type=int, default=1, help='jump index in audio datasets.') + parser.add_argument('--num_threads', default=0, type=int, help='# threads for loading data') + parser.add_argument('--batch_size', type=int, default=64, help='input batch size') #64 + parser.add_argument('--serial_batches', action='store_true', help='if true, takes images in order to make batches, otherwise takes them randomly') + parser.add_argument('--max_dataset_size', type=int, default=float("inf"), help='Maximum number of samples allowed per datasets. If the datasets directory contains more than max_dataset_size, only a subset is loaded.') + parser.add_argument('--audio_encoder', type=str, default='APC', help='|CNN|LSTM|APC|') + parser.add_argument('--audiofeature_input_channels', type=int, default=80, help='input channels of audio features') + parser.add_argument('--frame_future', type=int, default=5, help='It also looks at frame_future in the future to predict head movents') # They had 15 + parser.add_argument('--predict_length', type=int, default=1, help='Useless parameter, keep to 1') + parser.add_argument('--audio_windows', type=int, default=2) # Was 2 + parser.add_argument('--time_frame_length', type=int, default=50, help='length of training frames in each iteration') # they used 240 because they have 60fps + parser.add_argument('--train_test_split', type=bool, default=True, help='Is dataset split into train and test?') + + # APC parameters + parser.add_argument('--APC_hidden_size', type=int, default=16 * 29) + parser.add_argument('--APC_rnn_layers', type=int, default=3) + parser.add_argument("--APC_residual", action="store_true") + parser.add_argument('--APC_frame_history', type=int, default=25) #they used 60 + parser.add_argument('--APC_model_path', default='/home/alberto/motion-generation/third/AutoregressivePredictiveCoding/bs32-rhl3-rhs512-rd0-adam-res-ts3.pt') + + + ## network parameters + parser.add_argument('--A2H_receptive_field', type=int, default=50, help='Receptive field of the Audio2headpose network') # they used 255 frames -> 4,25 seconds for them at 60 fps + # audio2headpose wavenet + parser.add_argument('--A2H_wavenet_residual_layers', type=int, default=7, help='residual layer numbers') + parser.add_argument('--A2H_wavenet_residual_blocks', type=int, default=2, help='residual block numbers') + parser.add_argument('--A2H_wavenet_dilation_channels', type=int, default=128, help='dilation convolution channels') + parser.add_argument('--A2H_wavenet_residual_channels', type=int, default=128, help='residual channels') + parser.add_argument('--A2H_wavenet_skip_channels', type=int, default=256, help='skip channels') + parser.add_argument('--A2H_wavenet_kernel_size', type=int, default=2, help='dilation convolution kernel size') + parser.add_argument('--A2H_wavenet_use_bias', type=bool, default=True, help='whether to use bias in dilation convolution') + parser.add_argument('--A2H_wavenet_cond', type=bool, default=True, help='whether use condition input') + parser.add_argument('--A2H_wavenet_cond_channels', type=int, default=16*29, help='whether use condition input') + parser.add_argument('--A2H_wavenet_input_channels', type=int, default=12, help='input channels') + parser.add_argument('--A2H_GMM_ncenter', type=int, default=1, help='gaussian distribution numbers, 1 for single gaussian distribution') + parser.add_argument('--A2H_GMM_ndim', type=int, default=12, help='dimension of each gaussian, usually number of pts') + parser.add_argument('--A2H_GMM_sigma_min', type=float, default=0.03, help='minimal gaussian sigma values') + + + # additional parameters + parser.add_argument('--verbose', action='store_true', help='if specified, print more debugging information') + parser.add_argument('--suffix', default='', type=str, help='customized suffix: opt.name = opt.name + suffix: e.g., {model}_{netG}_size{load_size}') + parser.add_argument('--sequence_length', type=int, default=100, help='length of training frames in each iteration') # They used 240 -> 4 seconds at 60 fps | I put 25 instead of 100 + + + self.initialized = True + return parser + + def gather_options(self): + """Initialize our parser with basic options(only once). + Add additional model-specific and datasets-specific options. + These options are defined in the function + in model and datasets classes. + """ + if not self.initialized: # check if it has been initialized + parser = argparse.ArgumentParser(formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser = self.initialize(parser) + + # get the basic options + opt, _ = parser.parse_known_args() + + # modify model-related parser options + model_name = opt.model + model_option_setter = models.get_option_setter(model_name) + parser = model_option_setter(parser, self.isTrain) + opt, _ = parser.parse_known_args() # parse again with new defaults + + + # save and return the parser + self.parser = parser + return opt + + def print_options(self, opt): + """Print and save options + + It will print both current options and default values(if different). + It will save options into a text file / [checkpoints_dir] / opt.txt + """ + message = '' + message += '----------------- Options ---------------\n' + for k, v in sorted(vars(opt).items()): + comment = '' + default = self.parser.get_default(k) + if v != default: + comment = '\t[default: %s]' % str(default) + message += '{:>25}: {:<30}{}\n'.format(str(k), str(v), comment) + message += '----------------- End -------------------' + print(message) + + if opt.isTrain: + # save to the disk + expr_dir = os.path.join(opt.checkpoints_dir, opt.name) + util.mkdirs(expr_dir) + file_name = os.path.join(expr_dir, '{}_opt.txt'.format(opt.phase)) + with open(file_name, 'wt') as opt_file: + opt_file.write(message) + opt_file.write('\n') + + def parse(self): + """Parse our options, create checkpoints directory suffix, and set up gpu device.""" + opt = self.gather_options() + opt.isTrain = self.isTrain # train or test + + # process opt.suffix + if opt.suffix: + suffix = ('_' + opt.suffix.format(**vars(opt))) if opt.suffix != '' else '' + opt.name = opt.name + suffix + + # self.print_options(opt) + + # set gpu ids + str_ids = opt.gpu_ids.split(',') + opt.gpu_ids = [] + for str_id in str_ids: + id = int(str_id) + if id >= 0: + opt.gpu_ids.append(id) + # if len(opt.gpu_ids) > 0: + # torch.cuda.set_device(opt.gpu_ids[0]) + + # set datasets + if self.isTrain : + + # opt.train_dataset_names = [os.path.join(opt.dataroot, opt.dataset_names, 'Train', f) + # for f in os.listdir(os.path.join(opt.dataroot, opt.dataset_names, 'Train'))] + + opt.dataset_type = 'Train' + + # opt.validate_dataset_names = [os.path.join(opt.dataroot, opt.dataset_names, 'Test', f) + # for f in os.listdir(os.path.join(opt.dataroot, opt.dataset_names, 'Test'))] + + else: + opt.dataset_type = 'Test' + + self.opt = opt + return self.opt + + + + + + + + + + + diff --git a/motion-gan-pipeline/motion-generation/options/test_audio2headpose_options.py b/motion-gan-pipeline/motion-generation/options/test_audio2headpose_options.py new file mode 100644 index 0000000..b8db468 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/options/test_audio2headpose_options.py @@ -0,0 +1,24 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from .base_options_audio2headpose import BaseOptions + + +class TestOptions(BaseOptions): + """This class includes test options. + + It also includes shared options defined in BaseOptions. + """ + + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) # define shared options + parser.add_argument('--phase', type=str, default='test', help='train, val, test, etc') + parser.add_argument('--load_epoch', type=str, default='latest', help='which epoch to load? set to latest to use latest cached model') + # Dropout and Batchnorm has different behavioir during training and test. + parser.add_argument('--eval', action='store_true', help='use eval mode during test time.') + # rewrite devalue values + parser.set_defaults(time_frame_length=1) + parser.set_defaults(dataset_mode='audio') + self.isTrain = False + + return parser diff --git a/motion-gan-pipeline/motion-generation/options/train_audio2headpose_options.py b/motion-gan-pipeline/motion-generation/options/train_audio2headpose_options.py new file mode 100644 index 0000000..99af00d --- /dev/null +++ b/motion-gan-pipeline/motion-generation/options/train_audio2headpose_options.py @@ -0,0 +1,63 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from email.policy import default +from .base_options_audio2headpose import BaseOptions + + +class TrainOptions(BaseOptions): + """This class includes training options. + + It also includes shared options defined in BaseOptions. + """ + + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) + + # logging options + parser.add_argument('--tf_log', type=bool, default=True, help='Use tensorboard logs') + parser.add_argument('--display_winsize', type=int, default=256, help='display window size for both visdom and HTML') + + # visdom and HTML visualization parameters + parser.add_argument('--display_ncols', type=int, default=4, help='if positive, display all images in a single visdom web panel with certain number of images per row.') + parser.add_argument('--display_id', type=int, default=1, help='window id of the web display') + parser.add_argument('--display_server', type=str, default="http://localhost", help='visdom server of the web display') + parser.add_argument('--display_env', type=str, default='main', help='visdom display environment name (default is "main")') + parser.add_argument('--display_port', type=int, default=8097, help='visdom port of the web display') + parser.add_argument('--update_html_freq', type=int, default=1000, help='frequency of saving training results to html') + parser.add_argument('--print_freq', type=int, default='10000', help='frequency of showing training results on console') + parser.add_argument('--no_html', action='store_true', help='do not save intermediate training results to [opt.checkpoints_dir]/[opt.name]/web/') + + # network saving and loading parameters + parser.add_argument('--save_latest_freq', type=int, default=5000, help='frequency of saving intermediate checkpoints during training') + parser.add_argument('--save_epoch_freq', type=int, default=25, help='frequency of saving checkpoints at the end of epochs') + parser.add_argument('--save_by_iter', action='store_true', help='whether saves model by iteration') + parser.add_argument('--continue_train', default=False, action='store_true', help='continue training: load the latest model') + parser.add_argument('--load_epoch', type=str, default='0', help='which epoch to load? set to latest to use latest cached model') + parser.add_argument('--epoch_count', type=int, default=0, help='the starting epoch count, we save the model by , +, ...') + parser.add_argument('--phase', type=str, default='train', help='train, val, test, etc') + parser.add_argument('--re_transform', type=int, default=0, help='re-transform landmarks') + + + # training parameters + parser.add_argument('--smooth_loss', type=int, default=0, help='use smooth loss weight, 0 for not use') + # parser.add_argument('--train_dataset_names', type=str, default='train_list.txt', help='chooses validation datasets.') + # parser.add_argument('--validate_dataset_names', type=str, default='val_list.txt', help='chooses validation datasets.') + parser.add_argument('--n_epochs', type=int, default=500, help='number of epochs') + parser.add_argument('--lr_policy', type=str, default='step', help='learning rate policy. [linear | step | plateau | cosine]') + parser.add_argument('--lr', type=float, default=1e-4, help='initial learning rate for adam') + parser.add_argument('--gamma', type=float, default=0.2, help='step learning rate gamma') + parser.add_argument('--lr_decay_iters', type=int, default=250, help='multiply by a gamma every lr_decay_iters iterations') + parser.add_argument('--n_epochs_decay', type=int, default=250, help='number of epochs to linearly decay learning rate to zero') + parser.add_argument('--validate_epoch', type=int, default=50, help='validate model every some epochs, 0 for not validate during training') + parser.add_argument('--loss_smooth_weight', type=float, default=0, help='smooth loss weight, 0 for not use smooth loss') + parser.add_argument('--optimizer', type=str, default='AdamW', help='Adam, AdamW, RMSprop') + + + # data augmentations + parser.add_argument('--gaussian_noise', type=int, default=1, help='whether add gaussian noise to input & groundtruth features') + parser.add_argument('--gaussian_noise_scale', type=float, default=0.01, help='gaussian noise scale') + + + self.isTrain = True + return parser diff --git a/motion-gan-pipeline/motion-generation/test.py b/motion-gan-pipeline/motion-generation/test.py new file mode 100644 index 0000000..b063e8c --- /dev/null +++ b/motion-gan-pipeline/motion-generation/test.py @@ -0,0 +1,277 @@ +import os +from os.path import join +import yaml +import argparse +import numpy as np +from options.test_audio2headpose_options import TestOptions +from datasets import create_dataset +from models import create_model +from util.cfgnode import CfgNode +import cv2 +import librosa +import soundfile as sf +from tqdm import tqdm +import matplotlib.pyplot as plt +import subprocess +from pathlib import Path +import torch +from funcs import utils +import sys +sys.path.append('../preprocessing') +from preprocessing.face_tracking.FLAME.FLAME import FLAME +from preprocessing.face_tracking.FLAME.config import cfg as FLAME_cfg +from preprocessing.face_tracking.FLAME.lbs import vertices2landmarks +from preprocessing.face_tracking.render_3dmm import Render_FLAME +from preprocessing.face_tracking.util import * + + +def write_video_with_audio(audio_path, output_path, prefix='pred_', h=512, w=512, fps=25): + fourcc = cv2.VideoWriter_fourcc(*'DIVX') + # fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G') + video_tmp_path = join(save_root, 'tmp.avi') + out = cv2.VideoWriter(video_tmp_path, fourcc, fps, (w, h)) + for j in tqdm(range(nframe), position=0, desc='writing video'): + img = cv2.imread(join(save_root, prefix + str(j+1) + '.jpg')) + out.write(img) + out.release() + cmd = 'ffmpeg -y -i "' + video_tmp_path + '" -i "' + \ + audio_path + '" -codec copy -shortest "' + output_path + '"' + subprocess.call(cmd, shell=True) + + os.remove(video_tmp_path) # remove the template video + + +def load_audio_expressions(new_opt): + + clip_names = os.listdir(self.dataset_root) + + # check clips + self.valid_clips = [] + for i in range(len(self.clip_names)): + # check lenght of video + name = self.clip_names[i] + clip_root = os.path.join(self.dataset_root, name) + n_frames = len(os.listdir(os.path.join(clip_root, 'frames'))) + if n_frames >= start_point + self.target_length + 25: # added 25 because later they remove 25 and without it, crashes + audio_path = os.path.join(clip_root, name + '.wav') + audio, _ = librosa.load(audio_path, sr=self.sample_rate) + + if len(audio) >= self.frame_future + self.A2H_item_length: + self.valid_clips.append(i) + + else: + print(f'Audio {name} is too short and will not be used for training.') + + else: + print(f'Clip {name} is too short and will not be used for training.') + + self.clip_nums = len(self.valid_clips) + print(f'Total clips for training: {self.clip_nums}') + + return + +if __name__ == '__main__': + # Load default options + Test_parser = TestOptions() + opt = Test_parser.parse() # get training options + + # Load config + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='config/Audio2Headpose_Ted.yml', + help="person name, e.g. Obama1, Obama2, May, Nadella, McStay", required=False) + inopt = parser.parse_args() + # TODO: make config + with open(inopt.config) as f: + cfg_dict = yaml.load(f, Loader=yaml.FullLoader) + cfg = CfgNode(cfg_dict) + + # Overwrite with config + opt.phase = 'Test' + opt.name = cfg.experiment.name + opt.dataset_mode = 'audio' # for testing + opt.dataroot = cfg.experiment.dataroot + opt.dataset_names = cfg.experiment.dataset_names + opt.FPS = cfg.experiment.fps + opt.serial_batches = True + + # save to the disk + Test_parser.print_options(opt) + + # Set device + device = torch.device('cuda:{}'.format(opt.gpu_ids[0])) if len( + opt.gpu_ids) > 0 else torch.device('cpu') + + # Load data + # create a dataset given opt.dataset_mode and other options + dataset = create_dataset(opt) + try: + fit_data = dataset.dataset.fit_data + + except AttributeError: + focal = torch.from_numpy([900.]).to(device).double() + expr_para = torch.zeros(size=(1,53)).to(device).double() + id_para = torch.zeros(size=(1,100)).to(device).double() + + # Load Model + print('---------- Loading Model: {} -------------'.format(opt.task)) + Audio2Headpose = create_model(opt) + Audio2Headpose.setup(opt) + Audio2Headpose.eval() + + stop_at = 10 + + for iter, file_index in enumerate(dataset.dataset.valid_clips): + + if iter >= stop_at: + break + + test_name = dataset.dataset.clip_names[dataset.dataset.valid_clips[file_index]] + #____________________________________________________# + print('Generating movement for video: ', test_name) + audio = dataset.dataset.audio[file_index] + audio_features = dataset.dataset.audio_features[file_index] + try: + audio_expr = dataset.dataset.audio_expr[file_index] + except argparse.ArgumentError: + pass + + # Audio2Headpose + print('Headpose inference...') + # set history headposes as zero + pre_headpose = np.zeros(opt.A2H_wavenet_input_channels, np.float32) + pred_Head = Audio2Headpose.generate_sequences( + audio_features, pre_headpose, fill_zero=True, sigma_scale=0.3, opt=opt) + + # Build FLAME and Renderer + try: + h = fit_data[iter]['h'] + w = fit_data[iter]['w'] + focal = fit_data[iter]['focal'] + id_para = fit_data[iter]['id'] + # expr_para = fit_data[iter]['exp'] + expr_para = audio_expr + nframe = min(expr_para.size()[0], pred_Head.shape[0]) + + except KeyError: + h = 720 + w = 1280 + nframe = pred_Head.shape[0] + expr_para = expr_para.repeat(nframe) + + cxy = torch.tensor((w / 2.0, h / 2.0), dtype=torch.float).cpu() + + model_3dmm = FLAME(FLAME_cfg.model) + renderer = Render_FLAME(model_3dmm.faces_tensor, focal, h, w, 1, device) + + # Postprocessing + if fit_data: + + og_rot = fit_data[iter]['euler'].numpy() + # og_rot[:, 0] += 180 + og_trans = fit_data[iter]['trans'].numpy() + + mean_translation = og_trans.mean(axis=0) + + # headpose back to og + Head_smooth_sigma = [5, 10] + + pred_headpose = utils.headpose_smooth( + pred_Head[:, :6], Head_smooth_sigma).astype(np.float32) + + # pred_headpose = pred_Head[:, :6].astype(np.float32) + pred_headpose[:, 3:] += mean_translation + # pred_headpose[:, 0] += 180 + + else: + pred_headpose = pred_Head[:, :6].astype(np.float32) + + # Make images + save_root = os.path.join('./results/', opt.name, test_name) + if not os.path.exists(save_root): + os.makedirs(save_root) + os.makedirs(os.path.join(save_root, 'landmarks')) + + np.save(os.path.join(save_root, 'headposes.npy'), pred_headpose) + + for i in tqdm(range(nframe), desc='rendering: '): + R = torch.from_numpy(pred_headpose[i, 0:3]).unsqueeze( + 0).to(device).double() + t = torch.from_numpy(pred_headpose[i, 3:6]).unsqueeze( + 0).to(device).double() + id = id_para.to(device).double() + expr = expr_para[i].unsqueeze(0).to(device).double() + + + if 0: + og_R = torch.from_numpy(og_rot[i]).unsqueeze(0).to(device).double() + og_t = torch.from_numpy(og_trans[i]).unsqueeze( + 0).to(device).double() + print( + f'OG Rotation euler: \n{og_R.data}, \nOG trans: \n{og_t.data}') + print( + f'\nPred Rotation euler: \n{R.data},\nPred trans: \n{t.data}') + + og_rott_geo = model_3dmm.forward_geo(id, expr, og_R, og_t) + + og_landmarks3d = model_3dmm.get_3dlandmarks( + id, expr, og_R, og_t, focal, cxy).cpu() + og_proj_geo = proj_pts(og_landmarks3d, focal, cxy) + # Porj points + colormap_blue = plt.cm.Blues + for num, lin in enumerate(np.linspace(0, 0.9, len(og_proj_geo[0, :, 0]))): + plt.scatter(og_proj_geo[0, num, 0].detach().cpu(), + og_proj_geo[0, num, 1].detach().cpu(), + color=colormap_blue(lin), + s=10) + + rott_geo = model_3dmm.forward_geo(id, expr, R, t) + landmarks3d = model_3dmm.get_3dlandmarks( + id, expr, R, t, focal, cxy).cpu() + proj_geo = proj_pts(landmarks3d, focal, cxy) + np.save(os.path.join(save_root, 'landmarks', f'ldk_{i}.npy'), proj_geo[0].detach().cpu().numpy()) + + # # Porj points + # colormap_red = plt.cm.Reds + # for num, lin in enumerate(np.linspace(0, 0.9, len(proj_geo[0, :, 0]))): + # plt.scatter(proj_geo[0, num, 0].detach().cpu(), + # proj_geo[0, num, 1].detach().cpu(), + # color=colormap_red(lin), + # s=10) + + # plt.show() + + sel_pts3D = vertices2landmarks(rott_geo, + model_3dmm.faces_tensor, + model_3dmm.full_lmk_faces_idx.repeat( + 1, 1), + model_3dmm.full_lmk_bary_coords.repeat(1, 1, 1)) + + render_imgs = renderer(rott_geo.float(), model_3dmm.faces_tensor) + img_arr = render_imgs[0, :, :, :3].cpu().numpy() + # img_arr = img_arr / 255. + # img_arr = img_arr.astype(np.uint8) + # cv2.imwrite(os.path.join(save_root, f'flame_{i}.jpg'), img_arr) + + # # plt.imshow(img_arr) + # # plt.show() + from PIL import Image + img_arr *= 255 + img_arr = img_arr.astype(np.uint8) + im = Image.fromarray(img_arr) + im.save(os.path.join(save_root, f'flame_{i}.jpg')) + + # make videos + # generate corresponding audio, reused for all results + tmp_audio_path = os.path.join(save_root, 'tmp.wav') + tmp_audio_clip = audio[: np.int32(nframe * opt.sample_rate / opt.FPS)] + sf.write(tmp_audio_path, tmp_audio_clip, opt.sample_rate) + final_path = os.path.join(save_root, test_name + '.avi') + write_video_with_audio(tmp_audio_path, final_path, 'flame_', h, w) + + if os.path.exists(tmp_audio_path): + os.remove(tmp_audio_path) + _img_paths = list(map(lambda x:str(x), list(Path(save_root).glob('*.jpg')))) + for i in tqdm(range(len(_img_paths)), desc='deleting intermediate images'): + os.remove(_img_paths[i]) + + print('Finish!') diff --git a/motion-gan-pipeline/motion-generation/test_dataset.py b/motion-gan-pipeline/motion-generation/test_dataset.py new file mode 100644 index 0000000..1660b0e --- /dev/null +++ b/motion-gan-pipeline/motion-generation/test_dataset.py @@ -0,0 +1,42 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import yaml +from options.train_audio2headpose_options import TrainOptions +from datasets import create_dataset +import argparse +from util.cfgnode import CfgNode + + +if __name__ == '__main__': + # Load default options + Train_parser = TrainOptions() + opt = Train_parser.parse() # get training options + + # Load config + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='config/Audio2Headpose_MTC.yml', help="person name, e.g. Obama1, Obama2, May, Nadella, McStay") + inopt = parser.parse_args() + # TODO: make config + conf_debug = 'config/Audio2Headpose_Ted.yml' + # with open(inopt.config) as f: + with open(conf_debug) as f: + cfg_dict = yaml.load(f, Loader=yaml.FullLoader) + cfg = CfgNode(cfg_dict) + + # Overwrite with config + opt.name = cfg.experiment.name + opt.dataset_mode = cfg.experiment.dataset_mode + opt.dataroot = cfg.experiment.dataroot + opt.dataset_names = cfg.experiment.dataset_names + + # Load data + dataset = create_dataset(opt) # create a dataset given opt.dataset_mode and other options + dataset_size = len(dataset) # get the number of images in the dataset. + print('The number of training points = %d' % dataset_size) # 80650 + + dataset = dataset.dataset + + for i in range(len(dataset)): + A2Hsamples, history_info, target_info = dataset[i] + # print(f'Item: {i}, audio: {A2Hsamples.size()}, history: {history_info.size()}, target: {target_info.size()}') diff --git a/motion-gan-pipeline/motion-generation/train.py b/motion-gan-pipeline/motion-generation/train.py new file mode 100644 index 0000000..57236b2 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/train.py @@ -0,0 +1,88 @@ + +import imp +import os +import time +import yaml +from options.train_audio2headpose_options import TrainOptions +from datasets import create_dataset +from models import create_model +from util.visualizer import Visualizer +from tqdm import tqdm +import argparse +from util.cfgnode import CfgNode + + +if __name__ == '__main__': + # Load default options + Train_parser = TrainOptions() + opt = Train_parser.parse() # get training options + + # Load config + parser = argparse.ArgumentParser() + parser.add_argument('--config', default='config/Audio2Headpose_Mine.yml', help="person name, e.g. Obama1, Obama2, May, Nadella, McStay") + inopt = parser.parse_args() + # TODO: make config + with open(inopt.config) as f: + cfg_dict = yaml.load(f, Loader=yaml.FullLoader) + cfg = CfgNode(cfg_dict) + + # Overwrite with config + opt.name = cfg.experiment.name + opt.dataset_mode = cfg.experiment.dataset_mode + opt.dataroot = cfg.experiment.dataroot + opt.dataset_names = cfg.experiment.dataset_names + opt.FPS = cfg.experiment.fps + + # save to the disk + Train_parser.print_options(opt) + + # Load data + dataset = create_dataset(opt) # create a dataset given opt.dataset_mode and other options + dataset_size = len(dataset) # get the number of images in the dataset. + print('The number of training images = %d' % dataset_size) + + model = create_model(opt) # create a model given opt.model and other options + model.setup(opt) # regular setup: load and print networks; create schedulers + visualizer = Visualizer(opt) # create a visualizer that display/save images and plots + total_iters = 0 # the total number of training iterations + + for epoch in tqdm(range(opt.epoch_count, opt.n_epochs + opt.n_epochs_decay + 1)): # outer loop for different epochs; we save the model by , + + epoch_start_time = time.time() # timer for entire epoch + iter_data_time = time.time() # timer for data loading per iteration + epoch_iter = 0 # the number of training iterations in current epoch, reset to 0 every epoch + # visualizer.reset() # reset the visualizer: make sure it saves the results to HTML at least once every epoch + model.update_learning_rate() # update learning rates in the beginning of every epoch. + for i, data in enumerate(dataset): # inner loop within one epoch + + iter_start_time = time.time() # timer for computation per iteration + if total_iters % opt.print_freq == 0: + t_data = iter_start_time - iter_data_time + + total_iters += opt.batch_size + epoch_iter += opt.batch_size + model.set_input(data) # unpack data from dataset and apply preprocessing + model.optimize_parameters() # calculate loss functions, get gradients, update network weights + + if total_iters % opt.print_freq == 0: # print training losses and save logging information to the disk + losses = model.get_current_losses() + t_comp = (time.time() - iter_start_time) / opt.batch_size + visualizer.print_current_errors(epoch, total_iters, losses, t_data) + if opt.display_id > 0: + visualizer.plot_current_errors(losses, total_iters) + + iter_data_time = time.time() + + if epoch % opt.save_epoch_freq == 0: # cache our model every epochs + print('saving the model at the end of epoch %d, iters %d' % (epoch, total_iters)) + model.save_networks('epoch_%d' % (epoch)) + + # early stopping + losses = model.get_current_losses() + if losses['GMM'] <= 0: + print('Negative loss at the end of epoch %d, total iters %d' % (epoch, total_iters)) + print('Stopping Training') + break + + print('End of epoch %d / %d \t Time Taken: %d sec' % (epoch, opt.n_epochs + opt.n_epochs_decay, time.time() - epoch_start_time)) + + model.save_networks('latest') \ No newline at end of file diff --git a/motion-gan-pipeline/motion-generation/transfer.py b/motion-gan-pipeline/motion-generation/transfer.py new file mode 100644 index 0000000..61d366b --- /dev/null +++ b/motion-gan-pipeline/motion-generation/transfer.py @@ -0,0 +1,231 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from cmath import inf +import os +from os.path import join +import yaml +import argparse +import numpy as np +from options.test_audio2headpose_options import TestOptions +from datasets import create_dataset +from models import create_model +from util.cfgnode import CfgNode +import cv2 +import librosa +import soundfile as sf +from tqdm import tqdm +import matplotlib.pyplot as plt +import subprocess +from pathlib import Path +import torch +from funcs import utils +import sys +sys.path.append('../preprocessing/') +from face_tracking.FLAME.FLAME import FLAME +from face_tracking.FLAME.config import cfg as FLAME_cfg +from face_tracking.FLAME.lbs import vertices2landmarks +from face_tracking.render_3dmm import Render_FLAME +from face_tracking.util import * +from finetune import finetune +from PIL import Image + + +def write_video_with_audio(audio_path, output_path, prefix='pred_', h=512, w=512, fps=25): + fourcc = cv2.VideoWriter_fourcc(*'DIVX') + # fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G') + video_tmp_path = join(save_root, 'tmp.avi') + out = cv2.VideoWriter(video_tmp_path, fourcc, fps, (w, h)) + for j in tqdm(range(nframe), position=0, desc='writing video'): + img = cv2.imread(join(save_root, prefix + str(j+1) + '.jpg')) + out.write(img) + out.release() + cmd = 'ffmpeg -y -i "' + video_tmp_path + '" -i "' + \ + audio_path + '" -codec copy -shortest "' + output_path + '"' + subprocess.call(cmd, shell=True) + + os.remove(video_tmp_path) # remove the template video + + + +if __name__ == '__main__': + + # load args + parser = argparse.ArgumentParser() + parser.add_argument('--dataroot', required=True) + parser.add_argument('--dataset_names', required=True) + parser.add_argument('--target_name', required=True) + parser.add_argument('--checkpoint_dir', required=True) + parser.add_argument('--out_dir', required=True) + + inopt = parser.parse_args() + + # Load default options + Test_parser = TestOptions() + opt = Test_parser.parse() # get training options + + # Overwrite with config + opt.phase = '' + opt.dataset_mode = 'audio' # for testing + opt.dataroot = os.path.join(inopt.dataroot, 'audio') + opt.dataset_names = inopt.dataset_names + opt.FPS = 25 + opt.serial_batches = True + opt.train_test_split = False + + # save to the disk + Test_parser.print_options(opt) + + # Set device + device = torch.device('cuda:{}'.format(opt.gpu_ids[0])) if len( + opt.gpu_ids) > 0 else torch.device('cpu') + + # Load Model + print('---------- Loading Model: {} -------------'.format(opt.task)) + checkpoint_path = os.path.join(inopt.checkpoint_dir, inopt.target_name, 'latest_Audio2Headpose.pkl') + print(checkpoint_path) + if not os.path.isfile(checkpoint_path): + print('No fine-tuned checkpoint for headposes found..') + finetune(name=inopt.target_name, + dataroot=inopt.dataroot, + dataset_names=inopt.target_name, + target_checkpoints=inopt.checkpoint_dir, + checkpoint_path=os.path.join(inopt.checkpoint_dir, 'Audio2Headpose_TED_checkpoint.pkl'), + fps=25) + + Audio2Headpose = create_model(opt) + Audio2Headpose.load_checkpoint(checkpoint_path) + Audio2Headpose.eval() + + # Load data + dataset = create_dataset(opt) + fit_data_path = os.path.join(inopt.dataroot, 'video', inopt.target_name, 'track_params.pt') + fit_data = torch.load(fit_data_path) + + stop_at = inf + + for iter, file_index in enumerate(dataset.dataset.valid_clips): + + if iter >= stop_at: + break + + test_name = dataset.dataset.clip_names[dataset.dataset.valid_clips[file_index]] + #____________________________________________________# + print('Generating movement for video: ', test_name) + + # Get features + audio_features = dataset.dataset.audio_features[file_index] + # audio_expr = torch.from_numpy(np.stack([np.load(os.path.join(inopt.out_dir,'audio_expr', ae)).flatten() for ae in os.listdir(os.path.join(inopt.out_dir,'audio_expr'))], axis=0)) + # audio_expr = torch.from_numpy(np.stack([np.load(os.path.join(inopt.dataroot, 'video', inopt.target_name,'deca_expr', ae)).flatten() for ae in sorted(os.listdir(os.path.join(inopt.dataroot, 'video', inopt.target_name,'deca_expr')))], axis=0)) + + # Audio2Headpose + print('Headpose inference...') + + # Set history headposes as first tracked headposes + init_rot = fit_data['euler'][0].numpy() + init_trans = fit_data['trans'][0].numpy() - fit_data['trans'].mean(axis=0).numpy() + pre_headpose = np.concatenate([init_rot, init_trans], axis=0) + pre_headpose = np.concatenate([pre_headpose, np.zeros_like(pre_headpose)], axis=0) # headpose and velocity: velocity is zero: still head. + + # Set history headposes as zeros + # pre_headpose = np.zeros(opt.A2H_wavenet_input_channels, np.float32) + + print('Initial Headpose for prediction: \n', pre_headpose) + + # Generate headposes + pred_Head = Audio2Headpose.generate_sequences(audio_features, pre_headpose, fill_zero=True, sigma_scale=0.3, opt=opt) + + # Build FLAME and Renderer + h = fit_data['h'] + w = fit_data['w'] + focal = fit_data['focal'] + id_para = fit_data['id'] + # expr_para = audio_expr + # nframe = min(expr_para.size()[0], pred_Head.shape[0]) + + nframe = pred_Head.shape[0] + + + cxy = torch.tensor((w / 2.0, h / 2.0), dtype=torch.float).cpu() + + model_3dmm = FLAME(FLAME_cfg.model) + renderer = Render_FLAME(model_3dmm.faces_tensor, focal, h, w, 1, device) + + # Smooth predicted headpose + Head_smooth_sigma = [5, 10] + pred_headpose = utils.headpose_smooth(pred_Head[:, :6], Head_smooth_sigma, method='gaussian').astype(np.float32) + # Head_smooth_sigma = [10, 25] + # pred_headpose = utils.headpose_smooth(pred_Head[:, :6], Head_smooth_sigma , method='median').astype(np.float32) + + # Postprocessing + if fit_data: + + og_rot = fit_data['euler'].numpy() + og_trans = fit_data['trans'].numpy() + + mean_translation = og_trans.mean(axis=0) + + pred_headpose[:, 3:] += mean_translation + # pred_headpose[:, 0] += 180 + + # Make images + save_root = inopt.out_dir + os.makedirs(os.path.join(save_root, 'render'), exist_ok=True) + os.makedirs(os.path.join(save_root, 'landmarks'), exist_ok=True) + + np.save(os.path.join(save_root, 'headposes.npy'), pred_headpose) + + deca_expr_path = os.path.join(inopt.dataroot, 'video', inopt.target_name,'deca_expr') + audio_expr_path = os.path.join(inopt.out_dir, 'audio_expr') + + for i in tqdm(range(nframe), desc='rendering: '): + R = torch.from_numpy(pred_headpose[i, 0:3]).unsqueeze( + 0).to(device).double() + t = torch.from_numpy(pred_headpose[i, 3:6]).unsqueeze( + 0).to(device).double() + + # Zero translation + # t = torch.tensor((0, 0, -5)).unsqueeze( + # 0).to(device).double() + + id = id_para.to(device).double() + expr = torch.from_numpy(np.load(os.path.join(audio_expr_path, '%05d.npy' % i))).to(device).double() + + ''' + # Original Rotation and Translation used for debug + if 0: + og_R = torch.from_numpy(og_rot[i]).unsqueeze(0).to(device).double() + og_t = torch.from_numpy(og_trans[i]).unsqueeze( + 0).to(device).double() + print( + f'OG Rotation euler: \n{og_R.data}, \nOG trans: \n{og_t.data}') + print( + f'\nPred Rotation euler: \n{R.data},\nPred trans: \n{t.data}') + + og_rott_geo = model_3dmm.forward_geo(id, expr, og_R, og_t) + + og_landmarks3d = model_3dmm.get_3dlandmarks( + id, expr, og_R, og_t, focal, cxy).cpu() + og_proj_geo = proj_pts(og_landmarks3d, focal, cxy) + # Porj points + colormap_blue = plt.cm.Blues + for num, lin in enumerate(np.linspace(0, 0.9, len(og_proj_geo[0, :, 0]))): + plt.scatter(og_proj_geo[0, num, 0].detach().cpu(), + og_proj_geo[0, num, 1].detach().cpu(), + color=colormap_blue(lin), + s=10) + + ''' + + rott_geo = model_3dmm.forward_geo(id, expr, R, t) + landmarks3d = model_3dmm.get_3dlandmarks_forehead(id, expr, R, t, focal, cxy).cpu() + proj_geo = proj_pts(landmarks3d, focal, cxy) + np.savetxt(os.path.join(save_root, 'landmarks', '%05d.lms' % i), proj_geo[0].detach().cpu().numpy()) + render_imgs = renderer(rott_geo.float(), model_3dmm.faces_tensor) + img_arr = render_imgs[0, :, :, :3].cpu().numpy() + img_arr *= 255 + img_arr = img_arr.astype(np.uint8) + im = Image.fromarray(img_arr) + im.save(os.path.join(save_root, 'render','%05d.png' % i)) + + print('Finish!') diff --git a/motion-gan-pipeline/motion-generation/util/cfgnode.py b/motion-gan-pipeline/motion-generation/util/cfgnode.py new file mode 100644 index 0000000..8895187 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/util/cfgnode.py @@ -0,0 +1,507 @@ +""" +Define a class to hold configurations. + +Borrows and merges stuff from YACS, fvcore, and detectron2 +https://github.com/rbgirshick/yacs +https://github.com/facebookresearch/fvcore/ +https://github.com/facebookresearch/detectron2/ + +""" + +import copy +import importlib.util +import io +import logging +import os +from ast import literal_eval +from typing import Optional + +import yaml + +# File exts for yaml +_YAML_EXTS = {"", ".yml", ".yaml"} +# File exts for python +_PY_EXTS = {".py"} + +# CfgNodes can only contain a limited set of valid types +_VALID_TYPES = {tuple, list, str, int, float, bool} + +# Valid file object types +_FILE_TYPES = (io.IOBase,) + +# Logger +logger = logging.getLogger(__name__) + + +class CfgNode(dict): + r"""CfgNode is a `node` in the configuration `tree`. It's a simple wrapper around a `dict` and supports access to + `attributes` via `keys`. + """ + + IMMUTABLE = "__immutable__" + DEPRECATED_KEYS = "__deprecated_keys__" + RENAMED_KEYS = "__renamed_keys__" + NEW_ALLOWED = "__new_allowed__" + + def __init__( + self, + init_dict: Optional[dict] = None, + key_list: Optional[list] = None, + new_allowed: Optional[bool] = False, + ): + r""" + Args: + init_dict (dict): A dictionary to initialize the `CfgNode`. + key_list (list[str]): A list of names that index this `CfgNode` from the root. Currently, only used for + logging. + new_allowed (bool): Whether adding a new key is allowed when merging with other `CfgNode` objects. + + """ + + # Recursively convert nested dictionaries in `init_dict` to config tree. + init_dict = {} if init_dict is None else init_dict + key_list = [] if key_list is None else key_list + init_dict = self._create_config_tree_from_dict(init_dict, key_list) + super(CfgNode, self).__init__(init_dict) + + # Control the immutability of the `CfgNode`. + self.__dict__[CfgNode.IMMUTABLE] = False + # Support for deprecated options. + # If you choose to remove support for an option in code, but don't want to change all of the config files + # (to allow for deprecated config files to run), you can add the full config key as a string to this set. + self.__dict__[CfgNode.DEPRECATED_KEYS] = set() + # Support for renamed options. + # If you rename an option, record the mapping from the old name to the new name in this dictionary. Optionally, + # if the type also changed, you can make this value a tuple that specifies two things: the renamed key, and the + # instructions to edit the config file. + self.__dict__[CfgNode.RENAMED_KEYS] = { + # 'EXAMPLE.OLD.KEY': 'EXAMPLE.NEW.KEY', # Dummy example + # 'EXAMPLE.OLD.KEY': ( # A more complex example + # 'EXAMPLE.NEW.KEY', + # "Also convert to a tuple, eg. 'foo' -> ('foo', ) or " + # + "'foo.bar' -> ('foo', 'bar')" + # ), + } + + # Allow new attributes after initialization. + self.__dict__[CfgNode.NEW_ALLOWED] = new_allowed + + @classmethod + def _create_config_tree_from_dict(cls, init_dict: dict, key_list: list): + r"""Create a configuration tree using the input dict. Any dict-like objects inside `init_dict` will be treated + as new `CfgNode` objects. + + Args: + init_dict (dict): Input dictionary, to create config tree from. + key_list (list): A list of names that index this `CfgNode` from the root. Currently only used for logging. + + """ + + d = copy.deepcopy(init_dict) + for k, v in d.items(): + if isinstance(v, dict): + # Convert dictionary to CfgNode + d[k] = cls(v, key_list=key_list + [k]) + else: + # Check for valid leaf type or nested CfgNode + _assert_with_logging( + _valid_type(v, allow_cfg_node=False), + "Key {} with value {} is not a valid type; valid types: {}".format( + ".".join(key_list + [k]), type(v), _VALID_TYPES + ), + ) + return d + + def __getattr__(self, name: str): + if name in self: + return self[name] + else: + raise AttributeError(name) + + def __setattr__(self, name: str, value): + if self.is_frozen(): + raise AttributeError( + "Attempted to set {} to {}, but CfgNode is immutable".format( + name, value + ) + ) + + _assert_with_logging( + name not in self.__dict__, + "Invalid attempt to modify internal CfgNode state: {}".format(name), + ) + + _assert_with_logging( + _valid_type(value, allow_cfg_node=True), + "Invalid type {} for key {}; valid types = {}".format( + type(value), name, _VALID_TYPES + ), + ) + + self[name] = value + + def __str__(self): + def _indent(s_, num_spaces): + s = s_.split("\n") + if len(s) == 1: + return s_ + first = s.pop(0) + s = [(num_spaces * " ") + line for line in s] + s = "\n".join(s) + s = first + "\n" + s + return s + + r = "" + s = [] + for k, v in sorted(self.items()): + separator = "\n" if isinstance(v, CfgNode) else " " + attr_str = "{}:{}{}".format(str(k), separator, str(v)) + attr_str = _indent(attr_str, 2) + s.append(attr_str) + r += "\n".join(s) + return r + + def __repr__(self): + return "{}({})".format(self.__class__.__name__, super(CfgNode, self).__repr__()) + + def dump(self, **kwargs): + r"""Dump CfgNode to a string. + """ + + def _convert_to_dict(cfg_node, key_list): + if not isinstance(cfg_node, CfgNode): + _assert_with_logging( + _valid_type(cfg_node), + "Key {} with value {} is not a valid type; valid types: {}".format( + ".".join(key_list), type(cfg_node), _VALID_TYPES + ), + ) + return cfg_node + else: + cfg_dict = dict(cfg_node) + for k, v in cfg_dict.items(): + cfg_dict[k] = _convert_to_dict(v, key_list + [k]) + return cfg_dict + + self_as_dict = _convert_to_dict(self, []) + return yaml.safe_dump(self_as_dict, **kwargs) + + def merge_from_file(self, cfg_filename: str): + r"""Load a yaml config file and merge it with this CfgNode. + + Args: + cfg_filename (str): Config file path. + + """ + with open(cfg_filename, "r") as f: + cfg = self.load_cfg(f) + self.merge_from_other_cfg(cfg) + + def merge_from_other_cfg(self, cfg_other): + r"""Merge `cfg_other` into the current `CfgNode`. + + Args: + cfg_other + """ + _merge_a_into_b(cfg_other, self, self, []) + + def merge_from_list(self, cfg_list: list): + r"""Merge config (keys, values) in a list (eg. from commandline) into this `CfgNode`. + + Eg. `cfg_list = ['FOO.BAR', 0.5]`. + """ + _assert_with_logging( + len(cfg_list) % 2 == 0, + "Override list has odd lengths: {}; it must be a list of pairs".format( + cfg_list + ), + ) + root = self + for full_key, v in zip(cfg_list[0::2], cfg_list[1::2]): + if root.key_is_deprecated(full_key): + continue + if root.key_is_renamed(full_key): + root.raise_key_rename_error(full_key) + key_list = full_key.split(".") + d = self + for subkey in key_list[:-1]: + _assert_with_logging( + subkey in d, "Non-existent key: {}".format(full_key) + ) + d = d[subkey] + subkey = key_list[-1] + _assert_with_logging(subkey in d, "Non-existent key: {}".format(full_key)) + value = self._decode_cfg_value(v) + value = _check_and_coerce_cfg_value_type(value, d[subkey], subkey, full_key) + d[subkey] = value + + def freeze(self): + r"""Make this `CfgNode` and all of its children immutable. """ + self._immutable(True) + + def defrost(self): + r"""Make this `CfgNode` and all of its children mutable. """ + self._immutable(False) + + def is_frozen(self): + r"""Return mutability. """ + return self.__dict__[CfgNode.IMMUTABLE] + + def _immutable(self, is_immutable: bool): + r"""Set mutability and recursively apply to all nested `CfgNode` objects. + + Args: + is_immutable (bool): Whether or not the `CfgNode` and its children are immutable. + + """ + self.__dict__[CfgNode.IMMUTABLE] = is_immutable + # Recursively propagate state to all children. + for v in self.__dict__.values(): + if isinstance(v, CfgNode): + v._immutable(is_immutable) + for v in self.values(): + if isinstance(v, CfgNode): + v._immutable(is_immutable) + + def clone(self): + r"""Recursively copy this `CfgNode`. """ + return copy.deepcopy(self) + + def register_deprecated_key(self, key: str): + r"""Register key (eg. `FOO.BAR`) a deprecated option. When merging deprecated keys, a warning is generated and + the key is ignored. + """ + + _assert_with_logging( + key not in self.__dict__[CfgNode.DEPRECATED_KEYS], + "key {} is already registered as a deprecated key".format(key), + ) + self.__dict__[CfgNode.DEPRECATED_KEYS].add(key) + + def register_renamed_key( + self, old_name: str, new_name: str, message: Optional[str] = None + ): + r"""Register a key as having been renamed from `old_name` to `new_name`. When merging a renamed key, an + exception is thrown alerting the user to the fact that the key has been renamed. + """ + + _assert_with_logging( + old_name not in self.__dict__[CfgNode.RENAMED_KEYS], + "key {} is already registered as a renamed cfg key".format(old_name), + ) + value = new_name + if message: + value = (new_name, message) + self.__dict__[CfgNode.RENAMED_KEYS][old_name] = value + + def key_is_deprecated(self, full_key: str): + r"""Test if a key is deprecated. """ + if full_key in self.__dict__[CfgNode.DEPRECATED_KEYS]: + logger.warning("deprecated config key (ignoring): {}".format(full_key)) + return True + return False + + def key_is_renamed(self, full_key: str): + r"""Test if a key is renamed. """ + return full_key in self.__dict__[CfgNode.RENAMED_KEYS] + + def raise_key_rename_error(self, full_key: str): + new_key = self.__dict__[CfgNode.RENAMED_KEYS][full_key] + if isinstance(new_key, tuple): + msg = " Note: " + new_key[1] + new_key = new_key[0] + else: + msg = "" + raise KeyError( + "Key {} was renamed to {}; please update your config.{}".format( + full_key, new_key, msg + ) + ) + + def is_new_allowed(self): + return self.__dict__[CfgNode.NEW_ALLOWED] + + @classmethod + def load_cfg(cls, cfg_file_obj_or_str): + r"""Load a configuration into the `CfgNode`. + + Args: + cfg_file_obj_or_str (str or cfg compatible object): Supports loading from: + - A file object backed by a YAML file. + - A file object backed by a Python source file that exports an sttribute "cfg" (dict or `CfgNode`). + - A string that can be parsed as valid YAML. + + """ + _assert_with_logging( + isinstance(cfg_file_obj_or_str, _FILE_TYPES + (str,)), + "Expected first argument to be of type {} or {}, but got {}".format( + _FILE_TYPES, str, type(cfg_file_obj_or_str) + ), + ) + if isinstance(cfg_file_obj_or_str, str): + return cls._load_cfg_from_yaml_str(cfg_file_obj_or_str) + elif isinstance(cfg_file_obj_or_str, _FILE_TYPES): + return cls._load_cfg_from_file(cfg_file_obj_or_str) + else: + raise NotImplementedError("Impossible to reach here (unless there's a bug)") + + @classmethod + def _load_cfg_from_file(cls, file_obj): + r"""Load a config from a YAML file or a Python source file. """ + _, file_ext = os.path.splitext(file_obj.name) + if file_ext in _YAML_EXTS: + return cls._load_cfg_from_yaml_str(file_obj.read()) + elif file_ext in _PY_EXTS: + return cls._load_cfg_py_source(file_obj.name) + else: + raise Exception( + "Attempt to load from an unsupported filetype {}; only {} supported".format( + _YAML_EXTS.union(_PY_EXTS) + ) + ) + + @classmethod + def _load_cfg_from_yaml_str(cls, str_obj): + r"""Load a config from a YAML string encoding. """ + cfg_as_dict = yaml.safe_load(str_obj) + return cls(cfg_as_dict) + + @classmethod + def _load_cfg_py_source(cls, filename): + r"""Load a config from a Python source file. """ + module = _load_module_from_file("yacs.config.override", filename) + _assert_with_logging( + hasattr(module, "cfg"), + "Python module from file {} must export a 'cfg' attribute".format(filename), + ) + VALID_ATTR_TYPES = {dict, CfgNode} + _assert_with_logging( + type(module.cfg) in VALID_ATTR_TYPES, + "Import module 'cfg' attribute must be in {} but is {}".format( + VALID_ATTR_TYPES, type(module.cfg) + ), + ) + return cls(module.cfg) + + @classmethod + def _decode_cfg_value(cls, value): + r"""Decodes a raw config value (eg. from a yaml config file or commandline argument) into a Python object. + + If `value` is a dict, it will be interpreted as a new `CfgNode`. + If `value` is a str, it will be evaluated as a literal. + Otherwise, it is returned as is. + + """ + # Configs parsed from raw yaml will contain dictionary keys that need to be converted to `CfgNode` objects. + if isinstance(value, dict): + return cls(value) + # All remaining processing is only applied to strings. + if not isinstance(value, str): + return value + # Try to interpret `value` as a: string, number, tuple, list, dict, bool, or None + try: + value = literal_eval(value) + # The following two excepts allow `value` to pass through it when it represents a string. + # The type of `value` is always a string (before calling `literal_eval`), but sometimes it *represents* a + # string and other times a data structure, like a list. In the case that `value` represents a str, what we + # got back from the yaml parser is `foo` *without quotes* (so, not `"foo"`). `literal_eval` is ok with `"foo"`, + # but will raise a `ValueError` if given `foo`. In other cases, like paths (`val = 'foo/bar'`) `literal_eval` + # will raise a `SyntaxError`. + except ValueError: + pass + except SyntaxError: + pass + return value + + +# Keep this function in global scope, for backward compataibility. +load_cfg = CfgNode.load_cfg + + +def _valid_type(value, allow_cfg_node: Optional[bool] = False): + return (type(value) in _VALID_TYPES) or ( + allow_cfg_node and isinstance(value, CfgNode) + ) + + +def _merge_a_into_b(a: CfgNode, b: CfgNode, root: CfgNode, key_list: list): + r"""Merge `CfgNode` `a` into `CfgNode` `b`, clobbering the options in `b` wherever they are also specified in `a`. + """ + _assert_with_logging( + isinstance(a, CfgNode), + "`a` (cur type {}) must be an instance of {}".format(type(a), CfgNode), + ) + _assert_with_logging( + isinstance(b, CfgNode), + "`b` (cur type {}) must be an instance of {}".format(type(b), CfgNode), + ) + + for k, v_ in a.items(): + full_key = ".".join(key_list + [k]) + v = copy.deepcopy(v_) + v = b._decode_cfg_value(v) + + if k in b: + v = _check_and_coerce_cfg_value_type(v, b[k], k, full_key) + # Recursively merge dicts. + if isinstance(v, CfgNode): + try: + _merge_a_into_b(v, b[k], root, key_list + [k]) + except BaseException: + raise + else: + b[k] = v + elif b.is_new_allowed(): + b[k] = v + else: + if root.key_is_deprecated(full_key): + continue + elif root.key_is_renamed(full_key): + root.raise_key_rename_error(full_key) + else: + raise KeyError("Non-existent config key: {}".format(full_key)) + + +def _check_and_coerce_cfg_value_type(replacement, original, key, full_key): + r"""Checks that `replacement`, which is intended to replace `original` is of the right type. The type is correct if + it matches exactly or is one of a few cases in which the type can easily be coerced. + """ + + original_type = type(original) + replacement_type = type(replacement) + if replacement_type == original_type: + return replacement + + # If replacement and original types match, cast replacement from `from_type` to `to_type`. + def _conditional_cast(from_type, to_type): + if replacement_type == from_type and original_type == to_type: + return True, to_type(replacement) + else: + return False, None + + # Conditional casts. + # list <-> tuple + casts = [(tuple, list), (list, tuple)] + for (from_type, to_type) in casts: + converted, converted_value = _conditional_cast(from_type, to_type) + if converted: + return converted_value + + raise ValueError( + "Type mismatch ({} vs. {} with values ({} vs. {}) for config key: {}".format( + original_type, replacement_type, original, replacement, full_key + ) + ) + + +def _assert_with_logging(cond, msg): + if not cond: + logger.debug(msg) + assert cond, msg + + +def _load_module_from_file(name, filename): + spec = importlib.util.spec_from_file_location(name, filename) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module diff --git a/motion-gan-pipeline/motion-generation/util/flow_viz.py b/motion-gan-pipeline/motion-generation/util/flow_viz.py new file mode 100644 index 0000000..41bd013 --- /dev/null +++ b/motion-gan-pipeline/motion-generation/util/flow_viz.py @@ -0,0 +1,132 @@ +# Flow visualization code used from https://github.com/tomrunia/OpticalFlow_Visualization + + +# MIT License +# +# Copyright (c) 2018 Tom Runia +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to conditions. +# +# Author: Tom Runia +# Date Created: 2018-08-03 + +import numpy as np + +def make_colorwheel(): + """ + Generates a color wheel for optical flow visualization as presented in: + Baker et al. "A Database and Evaluation Methodology for Optical Flow" (ICCV, 2007) + URL: http://vision.middlebury.edu/flow/flowEval-iccv07.pdf + + Code follows the original C++ source code of Daniel Scharstein. + Code follows the the Matlab source code of Deqing Sun. + + Returns: + np.ndarray: Color wheel + """ + + RY = 15 + YG = 6 + GC = 4 + CB = 11 + BM = 13 + MR = 6 + + ncols = RY + YG + GC + CB + BM + MR + colorwheel = np.zeros((ncols, 3)) + col = 0 + + # RY + colorwheel[0:RY, 0] = 255 + colorwheel[0:RY, 1] = np.floor(255*np.arange(0,RY)/RY) + col = col+RY + # YG + colorwheel[col:col+YG, 0] = 255 - np.floor(255*np.arange(0,YG)/YG) + colorwheel[col:col+YG, 1] = 255 + col = col+YG + # GC + colorwheel[col:col+GC, 1] = 255 + colorwheel[col:col+GC, 2] = np.floor(255*np.arange(0,GC)/GC) + col = col+GC + # CB + colorwheel[col:col+CB, 1] = 255 - np.floor(255*np.arange(CB)/CB) + colorwheel[col:col+CB, 2] = 255 + col = col+CB + # BM + colorwheel[col:col+BM, 2] = 255 + colorwheel[col:col+BM, 0] = np.floor(255*np.arange(0,BM)/BM) + col = col+BM + # MR + colorwheel[col:col+MR, 2] = 255 - np.floor(255*np.arange(MR)/MR) + colorwheel[col:col+MR, 0] = 255 + return colorwheel + + +def flow_uv_to_colors(u, v, convert_to_bgr=False): + """ + Applies the flow color wheel to (possibly clipped) flow components u and v. + + According to the C++ source code of Daniel Scharstein + According to the Matlab source code of Deqing Sun + + Args: + u (np.ndarray): Input horizontal flow of shape [H,W] + v (np.ndarray): Input vertical flow of shape [H,W] + convert_to_bgr (bool, optional): Convert output image to BGR. Defaults to False. + + Returns: + np.ndarray: Flow visualization image of shape [H,W,3] + """ + flow_image = np.zeros((u.shape[0], u.shape[1], 3), np.uint8) + colorwheel = make_colorwheel() # shape [55x3] + ncols = colorwheel.shape[0] + rad = np.sqrt(np.square(u) + np.square(v)) + a = np.arctan2(-v, -u)/np.pi + fk = (a+1) / 2*(ncols-1) + k0 = np.floor(fk).astype(np.int32) + k1 = k0 + 1 + k1[k1 == ncols] = 0 + f = fk - k0 + for i in range(colorwheel.shape[1]): + tmp = colorwheel[:,i] + col0 = tmp[k0] / 255.0 + col1 = tmp[k1] / 255.0 + col = (1-f)*col0 + f*col1 + idx = (rad <= 1) + col[idx] = 1 - rad[idx] * (1-col[idx]) + col[~idx] = col[~idx] * 0.75 # out of range + # Note the 2-i => BGR instead of RGB + ch_idx = 2-i if convert_to_bgr else i + flow_image[:,:,ch_idx] = np.floor(255 * col) + return flow_image + + +def flow_to_image(flow_uv, clip_flow=None, convert_to_bgr=False): + """ + Expects a two dimensional flow image of shape. + + Args: + flow_uv (np.ndarray): Flow UV image of shape [H,W,2] + clip_flow (float, optional): Clip maximum of flow values. Defaults to None. + convert_to_bgr (bool, optional): Convert output image to BGR. Defaults to False. + + Returns: + np.ndarray: Flow visualization image of shape [H,W,3] + """ + assert flow_uv.ndim == 3, 'input flow must have three dimensions' + assert flow_uv.shape[2] == 2, 'input flow must have shape [H,W,2]' + if clip_flow is not None: + flow_uv = np.clip(flow_uv, 0, clip_flow) + u = flow_uv[:,:,0] + v = flow_uv[:,:,1] + rad = np.sqrt(np.square(u) + np.square(v)) + rad_max = np.max(rad) + epsilon = 1e-5 + u = u / (rad_max + epsilon) + v = v / (rad_max + epsilon) + return flow_uv_to_colors(u, v, convert_to_bgr) \ No newline at end of file diff --git a/motion-gan-pipeline/motion-generation/util/get_data.py b/motion-gan-pipeline/motion-generation/util/get_data.py new file mode 100644 index 0000000..1efa19f --- /dev/null +++ b/motion-gan-pipeline/motion-generation/util/get_data.py @@ -0,0 +1,110 @@ +from __future__ import print_function +import os +import tarfile +import requests +from warnings import warn +from zipfile import ZipFile +from bs4 import BeautifulSoup +from os.path import abspath, isdir, join, basename + + +class GetData(object): + """A Python script for downloading CycleGAN or pix2pix datasets. + + Parameters: + technique (str) -- One of: 'cyclegan' or 'pix2pix'. + verbose (bool) -- If True, print additional information. + + Examples: + >>> from util.get_data import GetData + >>> gd = GetData(technique='cyclegan') + >>> new_data_path = gd.get(save_path='./datasets') # options will be displayed. + + Alternatively, You can use bash scripts: 'scripts/download_pix2pix_model.sh' + and 'scripts/download_cyclegan_model.sh'. + """ + + def __init__(self, technique='cyclegan', verbose=True): + url_dict = { + 'pix2pix': 'http://efrosgans.eecs.berkeley.edu/pix2pix/datasets/', + 'cyclegan': 'https://people.eecs.berkeley.edu/~taesung_park/CycleGAN/datasets' + } + self.url = url_dict.get(technique.lower()) + self._verbose = verbose + + def _print(self, text): + if self._verbose: + print(text) + + @staticmethod + def _get_options(r): + soup = BeautifulSoup(r.text, 'lxml') + options = [h.text for h in soup.find_all('a', href=True) + if h.text.endswith(('.zip', 'tar.gz'))] + return options + + def _present_options(self): + r = requests.get(self.url) + options = self._get_options(r) + print('Options:\n') + for i, o in enumerate(options): + print("{0}: {1}".format(i, o)) + choice = input("\nPlease enter the number of the " + "dataset above you wish to download:") + return options[int(choice)] + + def _download_data(self, dataset_url, save_path): + if not isdir(save_path): + os.makedirs(save_path) + + base = basename(dataset_url) + temp_save_path = join(save_path, base) + + with open(temp_save_path, "wb") as f: + r = requests.get(dataset_url) + f.write(r.content) + + if base.endswith('.tar.gz'): + obj = tarfile.open(temp_save_path) + elif base.endswith('.zip'): + obj = ZipFile(temp_save_path, 'r') + else: + raise ValueError("Unknown File Type: {0}.".format(base)) + + self._print("Unpacking Data...") + obj.extractall(save_path) + obj.close() + os.remove(temp_save_path) + + def get(self, save_path, dataset=None): + """ + + Download a dataset. + + Parameters: + save_path (str) -- A directory to save the data to. + dataset (str) -- (optional). A specific dataset to download. + Note: this must include the file extension. + If None, options will be presented for you + to choose from. + + Returns: + save_path_full (str) -- the absolute path to the downloaded data. + + """ + if dataset is None: + selected_dataset = self._present_options() + else: + selected_dataset = dataset + + save_path_full = join(save_path, selected_dataset.split('.')[0]) + + if isdir(save_path_full): + warn("\n'{0}' already exists. Voiding Download.".format( + save_path_full)) + else: + self._print('Downloading Data...') + url = "{0}/{1}".format(self.url, selected_dataset) + self._download_data(url, save_path=save_path) + + return abspath(save_path_full) diff --git a/motion-gan-pipeline/motion-generation/util/html.py b/motion-gan-pipeline/motion-generation/util/html.py new file mode 100644 index 0000000..10f2fbd --- /dev/null +++ b/motion-gan-pipeline/motion-generation/util/html.py @@ -0,0 +1,67 @@ +import dominate +from dominate.tags import * +import os + + +class HTML: + def __init__(self, web_dir, title, reflesh=0): + self.title = title + self.web_dir = web_dir + self.img_dir = os.path.join(self.web_dir, 'images') + if not os.path.exists(self.web_dir): + os.makedirs(self.web_dir) + if not os.path.exists(self.img_dir): + os.makedirs(self.img_dir) + # print(self.img_dir) + + self.doc = dominate.document(title=title) + if reflesh > 0: + with self.doc.head: + meta(http_equiv="reflesh", content=str(reflesh)) + + def get_image_dir(self): + return self.img_dir + + def add_header(self, str): + with self.doc: + h3(str) + + def add_table(self, border=1): + self.t = table(border=border, style="table-layout: fixed;") + self.doc.add(self.t) + + def add_images(self, ims, txts, links, width=400, height=0): + self.add_table() + with self.t: + with tr(): + for im, txt, link in zip(ims, txts, links): + with td(style="word-wrap: break-word;", halign="center", valign="top"): + with p(): + with a(href=os.path.join('images', link)): + if height != 0: + img(style="width:%dpx;height:%dpx" % (width, height), src=os.path.join('images', im)) + else: + img(style="width:%dpx" % (width), src=os.path.join('images', im)) + br() + p(txt) + + def save(self): + html_file = '%s/index.html' % self.web_dir + f = open(html_file, 'wt') + f.write(self.doc.render()) + f.close() + + +if __name__ == '__main__': + html = HTML('web/', 'test_html') + html.add_header('hello world') + + ims = [] + txts = [] + links = [] + for n in range(4): + ims.append('image_%d.jpg' % n) + txts.append('text_%d' % n) + links.append('image_%d.jpg' % n) + html.add_images(ims, txts, links) + html.save() diff --git a/motion-gan-pipeline/motion-generation/util/image_pool.py b/motion-gan-pipeline/motion-generation/util/image_pool.py new file mode 100644 index 0000000..152ef5b --- /dev/null +++ b/motion-gan-pipeline/motion-generation/util/image_pool.py @@ -0,0 +1,32 @@ +import random +import numpy as np +import torch +from torch.autograd import Variable +class ImagePool(): + def __init__(self, pool_size): + self.pool_size = pool_size + if self.pool_size > 0: + self.num_imgs = 0 + self.images = [] + + def query(self, images): + if self.pool_size == 0: + return images + return_images = [] + for image in images.data: + image = torch.unsqueeze(image, 0) + if self.num_imgs < self.pool_size: + self.num_imgs = self.num_imgs + 1 + self.images.append(image) + return_images.append(image) + else: + p = random.uniform(0, 1) + if p > 0.5: + random_id = random.randint(0, self.pool_size-1) + tmp = self.images[random_id].clone() + self.images[random_id] = image + return_images.append(tmp) + else: + return_images.append(image) + return_images = Variable(torch.cat(return_images, 0)) + return return_images diff --git a/motion-gan-pipeline/motion-generation/util/util.py b/motion-gan-pipeline/motion-generation/util/util.py new file mode 100644 index 0000000..6bd1dab --- /dev/null +++ b/motion-gan-pipeline/motion-generation/util/util.py @@ -0,0 +1,93 @@ +from __future__ import print_function +import torch +import numpy as np +from PIL import Image +import inspect, re +import numpy as np +import os +import collections +from PIL import Image +import cv2 +from collections import OrderedDict + +from . import flow_viz + + + +# Converts a Tensor into a Numpy array +# |imtype|: the desired type of the converted numpy array +def tensor2im(image_tensor, imtype=np.uint8, normalize=True): + if isinstance(image_tensor, list): + image_numpy = [] + for i in range(len(image_tensor)): + image_numpy.append(tensor2im(image_tensor[i], imtype, normalize)) + return image_numpy + + if isinstance(image_tensor, torch.autograd.Variable): + image_tensor = image_tensor.data + if len(image_tensor.size()) == 5: + image_tensor = image_tensor[0, -1] + if len(image_tensor.size()) == 4: + image_tensor = image_tensor[0] + image_tensor = image_tensor[:3] + image_numpy = image_tensor.cpu().float().numpy() + if normalize: + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 + else: + image_numpy = np.transpose(image_numpy, (1, 2, 0)) * 255.0 + #image_numpy = (np.transpose(image_numpy, (1, 2, 0)) * std + mean) * 255.0 + image_numpy = np.clip(image_numpy, 0, 255) + if image_numpy.shape[2] == 1: + image_numpy = image_numpy[:,:,0] + return image_numpy.astype(imtype) + + +def tensor2flow(flo, imtype=np.uint8): + flo = flo[0].permute(1,2,0).cpu().detach().numpy() + flo = flow_viz.flow_to_image(flo) + return flo + + +def add_dummy_to_tensor(tensors, add_size=0): + if add_size == 0 or tensors is None: return tensors + if isinstance(tensors, list): + return [add_dummy_to_tensor(tensor, add_size) for tensor in tensors] + + if isinstance(tensors, torch.Tensor): + dummy = torch.zeros_like(tensors)[:add_size] + tensors = torch.cat([dummy, tensors]) + return tensors + +def remove_dummy_from_tensor(tensors, remove_size=0): + if remove_size == 0 or tensors is None: return tensors + if isinstance(tensors, list): + return [remove_dummy_from_tensor(tensor, remove_size) for tensor in tensors] + + if isinstance(tensors, torch.Tensor): + tensors = tensors[remove_size:] + return tensors + +def save_image(image_numpy, image_path): + image_pil = Image.fromarray(image_numpy) + image_pil.save(image_path) + +def print_numpy(x, val=True, shp=False): + x = x.astype(np.float64) + if shp: + print('shape,', x.shape) + if val: + x = x.flatten() + print('mean = %3.3f, min = %3.3f, max = %3.3f, median = %3.3f, std=%3.3f' % ( + np.mean(x), np.min(x), np.max(x), np.median(x), np.std(x))) + +def mkdirs(paths): + if isinstance(paths, list) and not isinstance(paths, str): + for path in paths: + mkdir(path) + else: + mkdir(paths) + +def mkdir(path): + if not os.path.exists(path): + os.makedirs(path) + diff --git a/motion-gan-pipeline/motion-generation/util/visualizer.py b/motion-gan-pipeline/motion-generation/util/visualizer.py new file mode 100644 index 0000000..fd25a6e --- /dev/null +++ b/motion-gan-pipeline/motion-generation/util/visualizer.py @@ -0,0 +1,149 @@ +### Copyright (C) 2017 NVIDIA Corporation. All rights reserved. +### Licensed under the CC BY-NC-SA 4.0 license (https://creativecommons.org/licenses/by-nc-sa/4.0/legalcode). +import numpy as np +import os +import time +from . import util +from . import html +import scipy.misc +try: + from StringIO import StringIO # Python 2.7 +except ImportError: + from io import BytesIO # Python 3.x + +class Visualizer(): + def __init__(self, opt): + self.opt = opt + self.tf_log = opt.tf_log + self.use_html = opt.isTrain and not opt.no_html + self.win_size = opt.display_winsize + self.name = opt.name + if opt.isTrain: + if self.tf_log: + from torch.utils.tensorboard import SummaryWriter + # import tensorflow as tf + # self.tf = tf + self.log_dir = os.path.join(opt.checkpoints_dir, opt.name, 'logs') + # self.writer = tf.summary.FileWriter(self.log_dir) + self.writer = SummaryWriter(self.log_dir, flush_secs=1) + + if self.use_html: + self.web_dir = os.path.join(opt.checkpoints_dir, opt.name, 'web') + self.img_dir = os.path.join(self.web_dir, 'images') + print('create web directory %s...' % self.web_dir) + util.mkdirs([self.web_dir, self.img_dir]) + + self.log_name = os.path.join(opt.checkpoints_dir, opt.name, 'loss_log.txt') + with open(self.log_name, "a") as log_file: + now = time.strftime("%c") + log_file.write('================ Training Loss (%s) ================\n' % now) + + # |visuals|: dictionary of images to display or save + def display_current_results(self, visuals, epoch, step): +# if self.tf_log: # show images in tensorboard output +# img_summaries = [] +# for label, image_numpy in visuals.items(): +# # Write the image to a string +# try: +# s = StringIO() +# except: +# s = BytesIO() +# scipy.misc.toimage(image_numpy).save(s, format="jpeg") +# # Create an Image object +# img_sum = self.tf.Summary.Image(encoded_image_string=s.getvalue(), height=image_numpy.shape[0], width=image_numpy.shape[1]) +# # Create a Summary value +# img_summaries.append(self.tf.Summary.Value(tag=label, image=img_sum)) +# +# # Create and write Summary +# summary = self.tf.Summary(value=img_summaries) +# self.writer.add_summary(summary, step) + + if self.use_html: # save images to a html file + for label, image_numpy in visuals.items(): + if isinstance(image_numpy, list): + for i in range(len(image_numpy)): + img_path = os.path.join(self.img_dir, 'epoch%.3d_%s_%d.jpg' % (epoch, label, i)) + util.save_image(image_numpy[i], img_path) + else: + img_path = os.path.join(self.img_dir, 'epoch%.3d_%s.jpg' % (epoch, label)) + util.save_image(image_numpy, img_path) + + # update website + webpage = html.HTML(self.web_dir, 'Experiment name = %s' % self.name, reflesh=1) + for n in range(epoch, 0, -1): + webpage.add_header('epoch [%d]' % n) + ims = [] + txts = [] + links = [] + + for label, image_numpy in visuals.items(): + if isinstance(image_numpy, list): + for i in range(len(image_numpy)): + img_path = 'epoch%.3d_%s_%d.jpg' % (n, label, i) + ims.append(img_path) + txts.append(label+str(i)) + links.append(img_path) + else: + img_path = 'epoch%.3d_%s.jpg' % (n, label) + ims.append(img_path) + txts.append(label) + links.append(img_path) + if len(ims) < 5: + webpage.add_images(ims, txts, links, width=self.win_size) + else: + num = int(round(len(ims)/2.0)) + webpage.add_images(ims[:num], txts[:num], links[:num], width=self.win_size) + webpage.add_images(ims[num:], txts[num:], links[num:], width=self.win_size) + webpage.save() + + # errors: dictionary of error labels and values + def plot_current_errors(self, errors, step): + if self.tf_log: + for tag, value in errors.items(): +# summary = self.tf.Summary(value=[self.tf.Summary.Value(tag=tag, simple_value=value)]) +# self.writer.add_summary(summary, step) + self.writer.add_scalar(tag, value, step) + + + # errors: same format as |errors| of plotCurrentErrors + def print_current_errors(self, epoch, i, errors, t): + message = '(epoch: %d, iters: %d, time: %.3f) ' % (epoch, i, t) + for k, v in sorted(errors.items()): + if v != 0: + message += '%s: %.3f ' % (k, v) + + print(message) + with open(self.log_name, "a") as log_file: + log_file.write('%s\n' % message) + + # save image to the disk + def save_images(self, image_dir, visuals, image_path, webpage=None): + dirname = os.path.basename(os.path.dirname(image_path[0])) + image_dir = os.path.join(image_dir, dirname) + util.mkdir(image_dir) + name = image_path +# name = os.path.basename(image_path[0]) +# name = os.path.splitext(name)[0] + + if webpage is not None: + webpage.add_header(name) + ims, txts, links = [], [], [] + + for label, image_numpy in visuals.items(): + save_ext = 'jpg' + image_name = '%s_%s.%s' % (label, name, save_ext) + save_path = os.path.join(image_dir, image_name) + util.save_image(image_numpy, save_path) + + if webpage is not None: + ims.append(image_name) + txts.append(label) + links.append(image_name) + if webpage is not None: + webpage.add_images(ims, txts, links, width=self.win_size) + + def vis_print(self, message): + print(message) + with open(self.log_name, "a") as log_file: + log_file.write('%s\n' % message) + diff --git a/motion-gan-pipeline/motion_gan_backend_api/__init__.py b/motion-gan-pipeline/motion_gan_backend_api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/motion_gan_backend_api/app.py b/motion-gan-pipeline/motion_gan_backend_api/app.py new file mode 100644 index 0000000..f99351c --- /dev/null +++ b/motion-gan-pipeline/motion_gan_backend_api/app.py @@ -0,0 +1,123 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import logging +from http import HTTPStatus +from typing import List, Dict + +from fastapi import UploadFile, Depends, Query, HTTPException +from fastapi.responses import FileResponse +from mtc_api_utils.api import BaseApi +from mtc_api_utils.clients.firebase_client import firebase_user_auth + +from avatar_backend_api.api_types import ApiRoute, InferenceQueueTask, AvatarModelRequest +from avatar_backend_api.clients.io_client import IoClient, IoClientException +from avatar_backend_api.models.avatar_base_model import AvatarBaseModel +from avatar_backend_api.models.mock_avatar_model import MockAvatarModel +from motion_gan_backend_api.config import MotionGanConfig +from motion_gan_backend_api.motion_gan_inference_queue import MotionGanInferenceQueue +from motion_gan_backend_api.motion_gan_model import MotionGANModel + +MotionGanConfig.print_config() + +user_auth = firebase_user_auth(config=MotionGanConfig) + +io_client = IoClient(audio_base_path=MotionGanConfig.audio_input_dir, video_base_path=MotionGanConfig.video_output_dir, user_dir_mode=False) +motion_gan_model: AvatarBaseModel = MockAvatarModel(io_client=io_client) if MotionGanConfig.mockBackend else MotionGANModel(io_client=io_client) + +inference_queue = MotionGanInferenceQueue( + model=motion_gan_model, + io_client=io_client, + audio_input_dir=MotionGanConfig.audio_input_dir, + video_output_dir=MotionGanConfig.video_output_dir, +) + +app = BaseApi(is_ready=motion_gan_model.is_ready, config=MotionGanConfig) +tags = ["Avatar Model"] + +logger = logging.Logger("App") + + +@app.get( + path=ApiRoute.video_ids.value, + response_model=List[str], + tags=tags, + dependencies=[Depends(user_auth)], +) +async def list_video_ids() -> List[str]: + return [path.split("/")[-1].split(".")[0] for path in io_client.video.list_videos()] + + +@app.get( + path=ApiRoute.video.value, + status_code=HTTPStatus.ACCEPTED, + response_class=FileResponse, + tags=tags, + dependencies=[Depends(user_auth)], +) +async def get_video(video_id: str = Query(alias="videoId")): # -> FileResponse: # For some reason does not seem to work with python 3.7 + try: + return io_client.video.read_from_disk(video_id=video_id) + + except IoClientException: + if io_client.audio.file_exists(video_id=video_id): + raise HTTPException( + status_code=HTTPStatus.PROCESSING, + detail=f"Audio with video_id={video_id} is currently being processed", + ) + + else: + raise HTTPException( + status_code=HTTPStatus.NOT_FOUND, + detail=f"There is no file available for video_id={video_id}. Either it has not yet been generated or it was removed in the meantime", + ) + + +@app.post( + path=ApiRoute.inference.value, + status_code=HTTPStatus.ACCEPTED, + response_model=AvatarModelRequest, + tags=tags, + dependencies=[Depends(user_auth)], +) +async def model_inference(audio: UploadFile, request_metadata: AvatarModelRequest = Depends()) -> AvatarModelRequest: + print(f"Processing request for video_id={request_metadata.video_id}") + + if not request_metadata.video_id: + print(f"Creating video_id={request_metadata.video_id}") + request_metadata.video_id = audio.filename + + io_client.audio.save_to_disk(audio=audio, video_id=request_metadata.video_id) + + inference_queue.add_task( + task=InferenceQueueTask( + request=request_metadata, + ), + ) + + return request_metadata + + +@app.delete( + path=ApiRoute.video.value, + response_model=str, + tags=tags, + dependencies=[Depends(user_auth)], +) +async def delete_video(video_id: str = Query(alias="videoId")) -> str: + try: + io_client.video.delete_file(video_id=video_id) + except FileNotFoundError: + raise HTTPException(status_code=HTTPStatus.NOT_FOUND, detail=f"No files with video_id={video_id} found") + + return f"The files with video_id={video_id} were removed successfully" + + +@app.get( + path=ApiRoute.avatars.value, + response_model=Dict[str, str], + tags=["Avatars"], + dependencies=[Depends(user_auth)], +) +async def available_avatars() -> Dict[str, str]: + return MotionGANModel.available_avatars() diff --git a/motion-gan-pipeline/motion_gan_backend_api/config.py b/motion-gan-pipeline/motion_gan_backend_api/config.py new file mode 100644 index 0000000..b9f914d --- /dev/null +++ b/motion-gan-pipeline/motion_gan_backend_api/config.py @@ -0,0 +1,25 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os.path + +from mtc_api_utils.config import Config + +from avatar_backend_api.config import AvatarConfig + + +class MotionGanConfig(AvatarConfig): + mockBackend: bool = Config.parse_env_var("MOCK_BACKEND", default="False", convert_type=bool) + + # Data configs + db_filepath: str = Config.parse_env_var("DB_FILEPATH", default="/tmp/tinyDB/neuralVoices.json") + + motion_gan_base_dir: str = "/app" # Location of the neural-code dir in the execution environment + + data_base_dir: str = Config.parse_env_var("DATA_BASE_DIR", default="/tmp/motionGan") + + audio_input_dir: str = os.path.join(data_base_dir, "input_data", "audio") + video_output_dir: str = os.path.join(data_base_dir, "output_data", "videos") + + checkpoints_dir: str = os.path.join(data_base_dir, "checkpoints") + video_input_dir: str = os.path.join(data_base_dir, "input_data", "video") diff --git a/motion-gan-pipeline/motion_gan_backend_api/motion_gan_inference_queue.py b/motion-gan-pipeline/motion_gan_backend_api/motion_gan_inference_queue.py new file mode 100644 index 0000000..0b38db4 --- /dev/null +++ b/motion-gan-pipeline/motion_gan_backend_api/motion_gan_inference_queue.py @@ -0,0 +1,24 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +import shutil + +from avatar_backend_api.background_tools.inference_queue import InferenceQueue, InferenceQueueTask +from avatar_backend_api.clients.io_client import VIDEO_EXTENSION + +from motion_gan_backend_api.config import MotionGanConfig + + +class MotionGanInferenceQueue(InferenceQueue): + + def post_processing(self, task: InferenceQueueTask) -> None: + # Delete input audio + self.io_client.audio.delete_file(video_id=task.request.video_id) + + # Rename file and return video path + if not MotionGanConfig.mockBackend: + self.io_client.video.rename_file( + filename=f"{task.request.video_id}_to_{task.request.avatar.name}{VIDEO_EXTENSION}", + new_filename=task.request.video_id + VIDEO_EXTENSION, + ) diff --git a/motion-gan-pipeline/motion_gan_backend_api/motion_gan_model.py b/motion-gan-pipeline/motion_gan_backend_api/motion_gan_model.py new file mode 100644 index 0000000..72676e6 --- /dev/null +++ b/motion-gan-pipeline/motion_gan_backend_api/motion_gan_model.py @@ -0,0 +1,51 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details +import os +import subprocess +from enum import Enum +from typing import Dict + +from avatar_backend_api.api_types import InferenceQueueTask +from avatar_backend_api.models.avatar_base_model import AvatarBaseModel +from motion_gan_backend_api.config import MotionGanConfig + + +class Pipeline(Enum): + value: str + full_pipeline = "full_pipeline.sh" + full_pipeline_enhancement = "full_pipeline_enhancement.sh" + full_pipeline_multiview = "full_pipeline_multiview.sh" + + +class MotionGANModel(AvatarBaseModel): + def init_model(self): + # Checking if either expected files are available + for avatar in MotionGanConfig.available_avatars.keys(): + checkpoint_path = os.path.join(MotionGanConfig.checkpoints_dir, avatar) + video_path = os.path.join(MotionGanConfig.video_input_dir, avatar) + + if not os.path.isdir(checkpoint_path): + if os.path.isdir(video_path): + print(f"No checkpoints found for avatar {avatar}. Run inference on it in order to start training it") + else: + raise ValueError(f"Could not find directory {checkpoint_path}, which is supposed to contain the checkpoint files for avatar {avatar}") + + print("All files were successfully downloaded, motionGan model is ready") + + def inference(self, task: InferenceQueueTask, pipeline: Pipeline = Pipeline.full_pipeline) -> None: + subprocess.run( + args=["bash", pipeline.value, MotionGanConfig.data_base_dir, task.request.video_id, task.request.avatar.name], + cwd=MotionGanConfig.motion_gan_base_dir, # Working directory from which to run the shell command + universal_newlines=True, # Decode output + check=True, # Throw exception if return code != 0 + ) + print(f"Inference completed for {task.request.video_id} - {task.request.avatar.value}") + + @staticmethod + def available_avatars() -> Dict[str, str]: + with os.scandir(MotionGanConfig.checkpoints_dir) as dirs: + return { + avatar_dir.name: MotionGanConfig.avatar_short_name(avatar_name=avatar_dir.name) + for avatar_dir in dirs + if avatar_dir.is_dir() + } diff --git a/motion-gan-pipeline/motion_gan_backend_api/tests/Test-2.wav b/motion-gan-pipeline/motion_gan_backend_api/tests/Test-2.wav new file mode 100644 index 0000000..2debc29 Binary files /dev/null and b/motion-gan-pipeline/motion_gan_backend_api/tests/Test-2.wav differ diff --git a/motion-gan-pipeline/motion_gan_backend_api/tests/__init__.py b/motion-gan-pipeline/motion_gan_backend_api/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/motion_gan_backend_api/tests/integration_test_client.py b/motion-gan-pipeline/motion_gan_backend_api/tests/integration_test_client.py new file mode 100644 index 0000000..34185de --- /dev/null +++ b/motion-gan-pipeline/motion_gan_backend_api/tests/integration_test_client.py @@ -0,0 +1,58 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from http import HTTPStatus +from typing import List, BinaryIO, Tuple + +from mtc_api_utils.api import BaseApi +from mtc_api_utils.clients.api_client import ApiClient +from starlette.testclient import TestClient + +from avatar_backend_api.api_types import ApiRoute, AvatarModelRequest + + +class AvatarIntegrationTestClient(ApiClient): + + def __init__(self, test_app: BaseApi): + super().__init__(backend_url="", http_client=TestClient(app=test_app)) + + def list_video_ids(self) -> List[str]: + resp = self.http_client.get( + url=ApiRoute.video_ids.value, + ) + resp.raise_for_status() + + return resp.json() + + def get_video(self, video_id: str) -> Tuple[int, bytes]: + resp = self.http_client.get( + url=ApiRoute.video.value, + params={"videoId": video_id}, + ) + + if resp.status_code >= 300: + resp.raise_for_status() + + return resp.status_code, resp.content + + def post_audio(self, audio: BinaryIO, metadata: AvatarModelRequest) -> AvatarModelRequest: + resp = self.http_client.post( + url=ApiRoute.inference.value, + params=metadata.json_dict, + files={"audio": audio}, + ) + + if resp.status_code >= 300: + print(f"text={resp.text}") + print(f"json={resp.json()}") + + resp.raise_for_status() + assert resp.status_code == HTTPStatus.ACCEPTED + + return AvatarModelRequest.parse_obj(resp.json()) + + def delete_video(self, video_id: str) -> str: + resp = self.http_client.delete(ApiRoute.video_url(backend_url="", video_id=video_id)) + resp.raise_for_status() + + return resp.text diff --git a/motion-gan-pipeline/motion_gan_backend_api/tests/ping.wav b/motion-gan-pipeline/motion_gan_backend_api/tests/ping.wav new file mode 100644 index 0000000..c0dc31c Binary files /dev/null and b/motion-gan-pipeline/motion_gan_backend_api/tests/ping.wav differ diff --git a/motion-gan-pipeline/motion_gan_backend_api/tests/test.mp3 b/motion-gan-pipeline/motion_gan_backend_api/tests/test.mp3 new file mode 100644 index 0000000..7517a57 Binary files /dev/null and b/motion-gan-pipeline/motion_gan_backend_api/tests/test.mp3 differ diff --git a/motion-gan-pipeline/motion_gan_backend_api/tests/test_integration.py b/motion-gan-pipeline/motion_gan_backend_api/tests/test_integration.py new file mode 100644 index 0000000..cfdef64 --- /dev/null +++ b/motion-gan-pipeline/motion_gan_backend_api/tests/test_integration.py @@ -0,0 +1,76 @@ +import os +from datetime import timedelta +from enum import Enum +from http import HTTPStatus +from time import sleep +from unittest import TestCase + +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import httpx + +from avatar_backend_api.api_types import AvatarModelRequest, BaseAvatar +from motion_gan_backend_api.app import app +from motion_gan_backend_api.tests.integration_test_client import AvatarIntegrationTestClient + + +class TestFile(Enum): + value: str + test_mp3 = "test.mp3" + ping_wav = "ping.wav" + voice_wav = "voice_test.wav" + frontend_wav = "Test-2.wav" + + +test_dir = os.path.dirname(os.path.realpath(__file__)) +test_file_name = TestFile.frontend_wav.value +test_file_path = os.path.join(test_dir, test_file_name) + +TEST_REQUEST = AvatarModelRequest( + video_id="id", + avatar=BaseAvatar.Jennifer_355_9415, +) + + +class TestIntegration(TestCase): + + def setUp(self) -> None: + self.client = AvatarIntegrationTestClient(test_app=app) + + self.client.wait_for_service_readiness(timeout=timedelta(minutes=15)) + + try: + self.client.delete_video(TEST_REQUEST.video_id) + except httpx.HTTPStatusError as e: + self.assertEqual(e.response.status_code, HTTPStatus.NOT_FOUND) + + print("Integration Test Setup Completed") + + def tearDown(self) -> None: + self.setUp() + + def test_inference(self): + with open(test_file_path, "rb") as test_file: + initial_video_ids = self.client.list_video_ids() + self.assertNotIn(TEST_REQUEST.video_id, initial_video_ids) + + try: + self.client.delete_video(TEST_REQUEST.video_id) + self.fail("Expected delete to fail because video is not present") + except httpx.HTTPStatusError as e: + self.assertEqual(e.response.status_code, HTTPStatus.NOT_FOUND) + + self.assertEqual(TEST_REQUEST, self.client.post_audio(audio=test_file, metadata=TEST_REQUEST)) + + while True: + status_code, file_bytes = self.client.get_video(video_id=TEST_REQUEST.video_id) + self.assertIn(status_code, [HTTPStatus.OK, HTTPStatus.PROCESSING]) + + if status_code == HTTPStatus.OK: + break + else: + sleep(1) + + print("Test Delete Video") + self.client.delete_video(video_id=TEST_REQUEST.video_id) diff --git a/motion-gan-pipeline/motion_gan_backend_api/tests/voice_test.wav b/motion-gan-pipeline/motion_gan_backend_api/tests/voice_test.wav new file mode 100644 index 0000000..95ce3fe Binary files /dev/null and b/motion-gan-pipeline/motion_gan_backend_api/tests/voice_test.wav differ diff --git a/motion-gan-pipeline/postprocessing.py b/motion-gan-pipeline/postprocessing.py new file mode 100644 index 0000000..b49a342 --- /dev/null +++ b/motion-gan-pipeline/postprocessing.py @@ -0,0 +1,186 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +from os.path import join +import argparse +import numpy as np +from tqdm import tqdm +import soundfile as sf +import matplotlib.pyplot as plt +import subprocess +import librosa +from pathlib import Path +from PIL import Image +import cv2 +import shutil + +def write_video_with_audio(save_root, audio_path, output_path, h=512, w=512, fps=25): + font = cv2.FONT_HERSHEY_SIMPLEX + fontScale = 1.0 + fontColor = (255, 255, 255) + thickness = 1 + lineType = 2 + text = 'This video has been manipulated.' + + label_width, label_height = cv2.getTextSize(text, font, 1, 2)[0] + print(label_width, label_height) + bottomLeftCornerOfText = (int((w - label_width) / 2), int(h - label_height - 20)) + + fourcc = cv2.VideoWriter_fourcc(*'DIVX') + # fourcc = cv2.VideoWriter_fourcc('M', 'J', 'P', 'G') + video_tmp_path = os.path.join(save_root, 'tmp.avi') + num_images = int(len(os.listdir(save_root))) + + out = cv2.VideoWriter(video_tmp_path, fourcc, fps, (w, h)) + for j in tqdm(range(num_images), position=0, desc='writing video'): + img = cv2.imread(os.path.join(save_root, '%05d.png' % j)) + img = cv2.putText( + img, + text, + bottomLeftCornerOfText, + font, + fontScale, + fontColor, + thickness, + lineType, + ) + out.write(img) + + out.release() + + print("ffmpeg version:") + subprocess.call("ffmpeg -version", shell=True) + + # TODO: Set proper size of video: [-s {w}x{h}] + cmd = f'ffmpeg -y -i "{video_tmp_path}" -i "{audio_path}" -vcodec libx264 -c:a aac "{output_path}"' + # "-pix_fmt yuv420p -profile:v baseline -level 3" + + print(f"ffmpeg cmd: {cmd}") + return_code = subprocess.call(cmd, shell=True) + + if return_code > 0: + raise Exception(f"An error occurred when assembling the output video: ffmpeg return_code={return_code}") + + # try: + # os.remove(video_tmp_path) # remove the template video + + # except FileNotFoundError: + # return + +def write_video_with_audio_old(save_root, audio_path, output_path, h=512, w=512, fps=25): + + cmd = 'ffmpeg -y -r ' + str(fps) + ' -s ' + f'{w}x{h}' + ' -i "' + f'{save_root}/%05d.png' + \ + '" -i "' + audio_path + '" -vcodec libx264 -c:a aac "' + output_path + '"' + subprocess.call(cmd, shell=True) + + +if __name__ == '__main__': + + # load args + parser = argparse.ArgumentParser() + parser.add_argument('--dataroot', required=True) + parser.add_argument('--name_audio', required=True) + parser.add_argument('--out_dir', required=True) + parser.add_argument('--fps', required=True, type=np.int32) + parser.add_argument('--sr', required=True, type=np.int32) + parser.add_argument('--all_vids', required=False, action='store_true', help='Generate all videos?') + parser.add_argument('--enhance', required=False, action='store_true', help='Enhance generated video. (Takes longer).') + parser.add_argument('--clean', required=False, action='store_true', help='Delete everything but the videos?') + parser.add_argument('--move_to_one_folder', required=False, action='store_true', help='Put generated video into videos folder.') + + inopt = parser.parse_args() + + # make videos + # generate corresponding audio, reused for all results + audio_path = os.path.join(inopt.dataroot, 'audio', inopt.name_audio, inopt.name_audio + '.wav') + audio, _ = librosa.load(audio_path, sr=inopt.sr) + + # make generated video + generated_frames_path = os.path.join(inopt.out_dir, 'generated_frames') + edges_path = os.path.join(inopt.out_dir, 'edges') + render_path = os.path.join(inopt.out_dir, 'render') + + try: + nframe = len(os.listdir(generated_frames_path)) + w, h = Image.open(os.path.join(generated_frames_path, os.listdir(generated_frames_path)[0])).size + + except (IndexError, FileNotFoundError) as e: + nframe = len(os.listdir(render_path)) + w, h = Image.open(os.path.join(render_path, os.listdir(render_path)[0])).size + + print(f'Image size: height {h} width {w}') + + tmp_audio_path = os.path.join(inopt.out_dir, 'tmp.wav') + tmp_audio_clip = audio[: np.int32(nframe * inopt.sr / inopt.fps)] + sf.write(tmp_audio_path, tmp_audio_clip, inopt.sr) + + # TODO: when deploying make one or the other + final_video_path = os.path.join(inopt.out_dir, 'generated_video.mp4') + write_video_with_audio(generated_frames_path, tmp_audio_path, final_video_path, h, w, inopt.fps) + + if inopt.enhance: + superes_frames_path = os.path.join(inopt.out_dir, 'superes/generated_frames') + w, h = Image.open(os.path.join(superes_frames_path, os.listdir(superes_frames_path)[0])).size + # make video of edges for comparison + video_superes_path = os.path.join(inopt.out_dir, 'generated_superes.mp4') + write_video_with_audio(superes_frames_path, tmp_audio_path, video_superes_path, h, w, inopt.fps) + + if inopt.all_vids: + # make video of edges for comparison + video_edges_path = os.path.join(inopt.out_dir, 'generated_edges.mp4') + write_video_with_audio(edges_path, tmp_audio_path, video_edges_path, h, w, inopt.fps) + + # make video of render for comparison + video_render_path = os.path.join(inopt.out_dir, 'generated_render.mp4') + write_video_with_audio(render_path, tmp_audio_path, video_render_path, h, w, inopt.fps) + + # make side to side video: render 2 edges + combined_path = os.path.join(inopt.out_dir, 'render2edges.mp4') + cmd = 'ffmpeg -y -i "' + video_render_path + '" -i "' + \ + video_edges_path + '" -filter_complex hstack -codec:v libx264 -crf 0 -preset veryslow "' + combined_path + '"' + subprocess.call(cmd, shell=True) + + # make side to side video: edges 2 video + combined_path = os.path.join(inopt.out_dir, 'edges2video.mp4') + cmd = 'ffmpeg -y -i "' + video_edges_path + '" -i "' + \ + final_video_path + '" -filter_complex hstack -codec:v libx264 -crf 0 -preset veryslow "' + combined_path + '"' + subprocess.call(cmd, shell=True) + + # make side to side video: render 2 edges 2 video + combined_path = os.path.join(inopt.out_dir, 'render2edges2video.mp4') + + cmd = 'ffmpeg -y -i "' + video_render_path + '" -i "' + \ + video_edges_path + '" -i "' + final_video_path + \ + '" -filter_complex "[0:v][1:v][2:v]hstack=inputs=3[v]" -map "[v]" -map 1:a -codec:v libx264 -crf 0 -preset veryslow "' + combined_path + '"' + + subprocess.call(cmd, shell=True) + + if os.path.exists(tmp_audio_path): + os.remove(tmp_audio_path) + + #TODO: move generated video to /videos/audio_to_video.mp4 + if inopt.move_to_one_folder: + parent, name = os.path.split(inopt.out_dir) + shutil.move(final_video_path, os.path.join(parent, 'videos', name + '.mp4')) + + if inopt.clean: + shutil.rmtree(inopt.out_dir) + + # delete subfolder + else: + if inopt.clean: + # clean all subfolders + subfolders = [f.path for f in os.scandir(inopt.out_dir) if f.is_dir()] + for sub in subfolders: + shutil.rmtree(sub) + + # remove headposes + try: + os.remove(os.path.join(inopt.out_dir, 'headposes.npy')) + + except FileNotFoundError: + pass + + + print('Finish!') diff --git a/motion-gan-pipeline/preprocessing/README.md b/motion-gan-pipeline/preprocessing/README.md new file mode 100644 index 0000000..e3f8ecf --- /dev/null +++ b/motion-gan-pipeline/preprocessing/README.md @@ -0,0 +1,158 @@ +# Video Preprocessing + +This is a Python repository for video processing. \ +This collection of techniques can be used to process any video file and extract numerous useful features. The preprocessing is project independent and can be used for any Computer Vision application where video processing is required. + +## Installation + +Use the package manager [pip](https://pip.pypa.io/en/stable/) to install the requirements. + +```bash +pip install -r requirements.txt +``` +## Available Processing Techniques +Each processing step can be accessed independently given its reference numbered: + +0. Deepspeech Features \ +Extract [Deepspeech](https://arxiv.org/abs/1412.5567) features from the original video audio signal. + +1. Frames Extraction \ +Save all frames of the input video. + +2. Landmark Detection \ +Extract [FAN](https://github.com/1adrianb/face-alignment) 2D facial landmarks from all video's frames. + +3. Head Pose Estimation \ +Run [Deca](https://github.com/YadiraF/DECA) tracker on all frames to extract [FLAME](https://flame.is.tue.mpg.de) morphable model parameters. Then fit a Rotation matrix and Translation vector to express face position at each frame. + +4. Audio Expressions \ +Generate DECA facial expressions from Deepspeech audio features. + +5. Background Matting \ +Run [RVM](https://github.com/PeterL1n/RobustVideoMatting) to remove the background from each frame. + +6. Extract Meshes \ +Create FLAME meshes for each frame of the video. + +7. Save Parameters \ +Create JSON file with all transformation parameters. Used for NeRF training. + +8. Speech To Text \ +Create text file with transcript of the audio file. + +9. Body Tracking \ +Use [Mediapipe](https://google.github.io/mediapipe/solutions/pose.html) for body pose tracking. + +10. Emotion Detection \ +Run [EMOCA](https://github.com/radekd91/emoca) for per-frame emotion detection. + +11. Edge Detection \ +Create edge images from FAN facial landmarks and mediapipe body landmarks. Used as input for generative models (GAN). + +12. Noise Reduction \ +Clean the audio file removing noise. + +13. Optical Flow \ +Use [FlowNet](https://github.com/NVIDIA/flownet2-pytorch) to extract optical flow between consecutive frames. + +## Usage + +You can run each processing step by using the following command: +```bash +python preprocessing.py --dataroot $DATAROOT + --name $NAME + --target_fps $TARGET_FPS + --preprocessing_type $TYPE + --step $STEP + --use_deca +``` + +Where: +- ```$DATAROOT```: path to data folder. +- ```$NAME```: name of the video. +- ```$TARGET_FPS```: video's frame per second (suggested: 25). +- ```$TYPE```: preprocessing type: 'audio' or 'video'. +- ```$STEP```: preprocessing step to run. + +The data should be organised in the following structure: + +```bash +Data_folder +├── audio # Folder containing n audio files +│ ├── audio_1 +│ │ └──audio_1.wav +│ ... +│ └── audio_n +│ └──audio_n.wav +└── video # Folder containing m video files + ├── video_1 + │ └──video_1.mp4 + ... + └── video_m + └──video_m.mp4 + +``` +Example: \ +To run step 0 of preprocessing on video_1 you should run the following command: +```bash +python preprocessing.py --dataroot Data_folder/video/ + --name video_1 + --target_fps 25 + --preprocessing_type video + --step 0 + --use_deca +``` + +## Audio-Driven Video Synthesis Usage + +In order to run the necessary preprocessing for the Audio-Driven Video Synthesis project, the following commands are used: + +Preprocessing for the input video: +```bash +bash process_video.sh $INPUTDATAPATH/video $NAME_VIDEO $FPS $SAMPLERATE +``` +Preprocessing for the input audio: +```bash +bash process_audio.sh $INPUTDATAPATH/audio $NAME_AUDIO $FPS $SAMPLERATE +``` + +Where: +- ```$INPUTDATAPATH```: path to input data folder. +- ```$NAME_VIDEO```: name of the video. +- ```$NAME_AUDIO```: name of the audio. +- ```$FPS```: video's frame per second (suggested: 25). +- ```$SAMPLERATE```: audio's sample rate. + +This call in automatically executed in the complete pipeline script. + + +## Output Folder Structure +By running the previous commands, the following data structure is produced. + +```bash +Video_Name +├── audio_expr # Contains Audio Expressions: .npy +├── audio_feature # Contains Deepspeech Features: .npy +├── body_pose # Contains Mediapipe Landmarks: .npy +├── cropped # Contained cropped frames for training: .png +├── debug # Debug folder +│ ├── debug_mixed # Overlay images, original + FLAME render: .jpg +│ ├── debug_render # FLAME render: .jpg +│ ├── opticalflow # Optical flow visualisation: .png +│ ├── proj_landmarks # 2D projected FAN landmarks: .lms +│ └── proj_landmarks_img # Visualization of projected landmarks: .png +├── deca_expr # DECA expressions: .npy +├── edges # Edge Images: .png +├── expr_masks # DECA mask: .jpg +├── frames # Extracted frames: .jpg +├── landmarks # FAN landmarks: .lms +├── matting # Frames without background: .png +├── opticalflow # Optical flows: .flo +├── Video_Name.mp4 # Original video file +├── Video_Name.wav # Extracted audio file +├── mapping.npy # Learned mapping deepspeech -> FLAME Expressions +└── track_params.pt # Fitted Pose information +``` + +## License +[MIT](https://choosealicense.com/licenses/mit/) \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/__init__.py b/motion-gan-pipeline/preprocessing/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/preprocessing/autils/__init__.py b/motion-gan-pipeline/preprocessing/autils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/preprocessing/autils/camera_pose_visualizer.py b/motion-gan-pipeline/preprocessing/autils/camera_pose_visualizer.py new file mode 100644 index 0000000..e796233 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/autils/camera_pose_visualizer.py @@ -0,0 +1,55 @@ +import numpy as np +import matplotlib as mpl +import matplotlib.pyplot as plt +from matplotlib.patches import Patch +from mpl_toolkits.mplot3d.art3d import Poly3DCollection + + +class CameraPoseVisualizer: + def __init__(self, xlim, ylim, zlim): + self.fig = plt.figure(figsize=(18, 7)) + self.ax = self.fig.gca(projection='3d') + self.ax.set_aspect("auto") + self.ax.set_xlim(xlim) + self.ax.set_ylim(ylim) + self.ax.set_zlim(zlim) + self.ax.set_xlabel('x') + self.ax.set_ylabel('y') + self.ax.set_zlabel('z') + print('initialize camera pose visualizer') + + def extrinsic2pyramid(self, extrinsic, color='r', focal_len_scaled=5, aspect_ratio=0.3): + vertex_std = np.array([[0, 0, 0, 1], + [focal_len_scaled * aspect_ratio, -focal_len_scaled * aspect_ratio, focal_len_scaled, 1], + [focal_len_scaled * aspect_ratio, focal_len_scaled * aspect_ratio, focal_len_scaled, 1], + [-focal_len_scaled * aspect_ratio, focal_len_scaled * aspect_ratio, focal_len_scaled, 1], + [-focal_len_scaled * aspect_ratio, -focal_len_scaled * aspect_ratio, focal_len_scaled, 1]]) + vertex_transformed = vertex_std @ extrinsic.T + meshes = [[vertex_transformed[0, :-1], vertex_transformed[1][:-1], vertex_transformed[2, :-1]], + [vertex_transformed[0, :-1], vertex_transformed[2, :-1], vertex_transformed[3, :-1]], + [vertex_transformed[0, :-1], vertex_transformed[3, :-1], vertex_transformed[4, :-1]], + [vertex_transformed[0, :-1], vertex_transformed[4, :-1], vertex_transformed[1, :-1]], + [vertex_transformed[1, :-1], vertex_transformed[2, :-1], vertex_transformed[3, :-1], vertex_transformed[4, :-1]]] + self.ax.add_collection3d( + Poly3DCollection(meshes, facecolors=color, linewidths=0.3, edgecolors=color, alpha=0.35)) + + def customize_legend(self, list_label): + list_handle = [] + for idx, label in enumerate(list_label): + color = plt.cm.rainbow(idx / len(list_label)) + patch = Patch(color=color, label=label) + list_handle.append(patch) + plt.legend(loc='right', bbox_to_anchor=(1.8, 0.5), handles=list_handle) + + def colorbar(self, max_frame_length): + cmap = mpl.cm.rainbow + norm = mpl.colors.Normalize(vmin=0, vmax=max_frame_length) + self.fig.colorbar(mpl.cm.ScalarMappable(norm=norm, cmap=cmap), orientation='vertical', label='Frame Number') + + def show(self): + plt.title('Extrinsic Parameters') + plt.show() + + def save_img(self, name): + plt.title(f'Extrinsic Parameters {name}') + plt.savefig(fname=name) diff --git a/motion-gan-pipeline/preprocessing/autils/cfgnode.py b/motion-gan-pipeline/preprocessing/autils/cfgnode.py new file mode 100644 index 0000000..8895187 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/autils/cfgnode.py @@ -0,0 +1,507 @@ +""" +Define a class to hold configurations. + +Borrows and merges stuff from YACS, fvcore, and detectron2 +https://github.com/rbgirshick/yacs +https://github.com/facebookresearch/fvcore/ +https://github.com/facebookresearch/detectron2/ + +""" + +import copy +import importlib.util +import io +import logging +import os +from ast import literal_eval +from typing import Optional + +import yaml + +# File exts for yaml +_YAML_EXTS = {"", ".yml", ".yaml"} +# File exts for python +_PY_EXTS = {".py"} + +# CfgNodes can only contain a limited set of valid types +_VALID_TYPES = {tuple, list, str, int, float, bool} + +# Valid file object types +_FILE_TYPES = (io.IOBase,) + +# Logger +logger = logging.getLogger(__name__) + + +class CfgNode(dict): + r"""CfgNode is a `node` in the configuration `tree`. It's a simple wrapper around a `dict` and supports access to + `attributes` via `keys`. + """ + + IMMUTABLE = "__immutable__" + DEPRECATED_KEYS = "__deprecated_keys__" + RENAMED_KEYS = "__renamed_keys__" + NEW_ALLOWED = "__new_allowed__" + + def __init__( + self, + init_dict: Optional[dict] = None, + key_list: Optional[list] = None, + new_allowed: Optional[bool] = False, + ): + r""" + Args: + init_dict (dict): A dictionary to initialize the `CfgNode`. + key_list (list[str]): A list of names that index this `CfgNode` from the root. Currently, only used for + logging. + new_allowed (bool): Whether adding a new key is allowed when merging with other `CfgNode` objects. + + """ + + # Recursively convert nested dictionaries in `init_dict` to config tree. + init_dict = {} if init_dict is None else init_dict + key_list = [] if key_list is None else key_list + init_dict = self._create_config_tree_from_dict(init_dict, key_list) + super(CfgNode, self).__init__(init_dict) + + # Control the immutability of the `CfgNode`. + self.__dict__[CfgNode.IMMUTABLE] = False + # Support for deprecated options. + # If you choose to remove support for an option in code, but don't want to change all of the config files + # (to allow for deprecated config files to run), you can add the full config key as a string to this set. + self.__dict__[CfgNode.DEPRECATED_KEYS] = set() + # Support for renamed options. + # If you rename an option, record the mapping from the old name to the new name in this dictionary. Optionally, + # if the type also changed, you can make this value a tuple that specifies two things: the renamed key, and the + # instructions to edit the config file. + self.__dict__[CfgNode.RENAMED_KEYS] = { + # 'EXAMPLE.OLD.KEY': 'EXAMPLE.NEW.KEY', # Dummy example + # 'EXAMPLE.OLD.KEY': ( # A more complex example + # 'EXAMPLE.NEW.KEY', + # "Also convert to a tuple, eg. 'foo' -> ('foo', ) or " + # + "'foo.bar' -> ('foo', 'bar')" + # ), + } + + # Allow new attributes after initialization. + self.__dict__[CfgNode.NEW_ALLOWED] = new_allowed + + @classmethod + def _create_config_tree_from_dict(cls, init_dict: dict, key_list: list): + r"""Create a configuration tree using the input dict. Any dict-like objects inside `init_dict` will be treated + as new `CfgNode` objects. + + Args: + init_dict (dict): Input dictionary, to create config tree from. + key_list (list): A list of names that index this `CfgNode` from the root. Currently only used for logging. + + """ + + d = copy.deepcopy(init_dict) + for k, v in d.items(): + if isinstance(v, dict): + # Convert dictionary to CfgNode + d[k] = cls(v, key_list=key_list + [k]) + else: + # Check for valid leaf type or nested CfgNode + _assert_with_logging( + _valid_type(v, allow_cfg_node=False), + "Key {} with value {} is not a valid type; valid types: {}".format( + ".".join(key_list + [k]), type(v), _VALID_TYPES + ), + ) + return d + + def __getattr__(self, name: str): + if name in self: + return self[name] + else: + raise AttributeError(name) + + def __setattr__(self, name: str, value): + if self.is_frozen(): + raise AttributeError( + "Attempted to set {} to {}, but CfgNode is immutable".format( + name, value + ) + ) + + _assert_with_logging( + name not in self.__dict__, + "Invalid attempt to modify internal CfgNode state: {}".format(name), + ) + + _assert_with_logging( + _valid_type(value, allow_cfg_node=True), + "Invalid type {} for key {}; valid types = {}".format( + type(value), name, _VALID_TYPES + ), + ) + + self[name] = value + + def __str__(self): + def _indent(s_, num_spaces): + s = s_.split("\n") + if len(s) == 1: + return s_ + first = s.pop(0) + s = [(num_spaces * " ") + line for line in s] + s = "\n".join(s) + s = first + "\n" + s + return s + + r = "" + s = [] + for k, v in sorted(self.items()): + separator = "\n" if isinstance(v, CfgNode) else " " + attr_str = "{}:{}{}".format(str(k), separator, str(v)) + attr_str = _indent(attr_str, 2) + s.append(attr_str) + r += "\n".join(s) + return r + + def __repr__(self): + return "{}({})".format(self.__class__.__name__, super(CfgNode, self).__repr__()) + + def dump(self, **kwargs): + r"""Dump CfgNode to a string. + """ + + def _convert_to_dict(cfg_node, key_list): + if not isinstance(cfg_node, CfgNode): + _assert_with_logging( + _valid_type(cfg_node), + "Key {} with value {} is not a valid type; valid types: {}".format( + ".".join(key_list), type(cfg_node), _VALID_TYPES + ), + ) + return cfg_node + else: + cfg_dict = dict(cfg_node) + for k, v in cfg_dict.items(): + cfg_dict[k] = _convert_to_dict(v, key_list + [k]) + return cfg_dict + + self_as_dict = _convert_to_dict(self, []) + return yaml.safe_dump(self_as_dict, **kwargs) + + def merge_from_file(self, cfg_filename: str): + r"""Load a yaml config file and merge it with this CfgNode. + + Args: + cfg_filename (str): Config file path. + + """ + with open(cfg_filename, "r") as f: + cfg = self.load_cfg(f) + self.merge_from_other_cfg(cfg) + + def merge_from_other_cfg(self, cfg_other): + r"""Merge `cfg_other` into the current `CfgNode`. + + Args: + cfg_other + """ + _merge_a_into_b(cfg_other, self, self, []) + + def merge_from_list(self, cfg_list: list): + r"""Merge config (keys, values) in a list (eg. from commandline) into this `CfgNode`. + + Eg. `cfg_list = ['FOO.BAR', 0.5]`. + """ + _assert_with_logging( + len(cfg_list) % 2 == 0, + "Override list has odd lengths: {}; it must be a list of pairs".format( + cfg_list + ), + ) + root = self + for full_key, v in zip(cfg_list[0::2], cfg_list[1::2]): + if root.key_is_deprecated(full_key): + continue + if root.key_is_renamed(full_key): + root.raise_key_rename_error(full_key) + key_list = full_key.split(".") + d = self + for subkey in key_list[:-1]: + _assert_with_logging( + subkey in d, "Non-existent key: {}".format(full_key) + ) + d = d[subkey] + subkey = key_list[-1] + _assert_with_logging(subkey in d, "Non-existent key: {}".format(full_key)) + value = self._decode_cfg_value(v) + value = _check_and_coerce_cfg_value_type(value, d[subkey], subkey, full_key) + d[subkey] = value + + def freeze(self): + r"""Make this `CfgNode` and all of its children immutable. """ + self._immutable(True) + + def defrost(self): + r"""Make this `CfgNode` and all of its children mutable. """ + self._immutable(False) + + def is_frozen(self): + r"""Return mutability. """ + return self.__dict__[CfgNode.IMMUTABLE] + + def _immutable(self, is_immutable: bool): + r"""Set mutability and recursively apply to all nested `CfgNode` objects. + + Args: + is_immutable (bool): Whether or not the `CfgNode` and its children are immutable. + + """ + self.__dict__[CfgNode.IMMUTABLE] = is_immutable + # Recursively propagate state to all children. + for v in self.__dict__.values(): + if isinstance(v, CfgNode): + v._immutable(is_immutable) + for v in self.values(): + if isinstance(v, CfgNode): + v._immutable(is_immutable) + + def clone(self): + r"""Recursively copy this `CfgNode`. """ + return copy.deepcopy(self) + + def register_deprecated_key(self, key: str): + r"""Register key (eg. `FOO.BAR`) a deprecated option. When merging deprecated keys, a warning is generated and + the key is ignored. + """ + + _assert_with_logging( + key not in self.__dict__[CfgNode.DEPRECATED_KEYS], + "key {} is already registered as a deprecated key".format(key), + ) + self.__dict__[CfgNode.DEPRECATED_KEYS].add(key) + + def register_renamed_key( + self, old_name: str, new_name: str, message: Optional[str] = None + ): + r"""Register a key as having been renamed from `old_name` to `new_name`. When merging a renamed key, an + exception is thrown alerting the user to the fact that the key has been renamed. + """ + + _assert_with_logging( + old_name not in self.__dict__[CfgNode.RENAMED_KEYS], + "key {} is already registered as a renamed cfg key".format(old_name), + ) + value = new_name + if message: + value = (new_name, message) + self.__dict__[CfgNode.RENAMED_KEYS][old_name] = value + + def key_is_deprecated(self, full_key: str): + r"""Test if a key is deprecated. """ + if full_key in self.__dict__[CfgNode.DEPRECATED_KEYS]: + logger.warning("deprecated config key (ignoring): {}".format(full_key)) + return True + return False + + def key_is_renamed(self, full_key: str): + r"""Test if a key is renamed. """ + return full_key in self.__dict__[CfgNode.RENAMED_KEYS] + + def raise_key_rename_error(self, full_key: str): + new_key = self.__dict__[CfgNode.RENAMED_KEYS][full_key] + if isinstance(new_key, tuple): + msg = " Note: " + new_key[1] + new_key = new_key[0] + else: + msg = "" + raise KeyError( + "Key {} was renamed to {}; please update your config.{}".format( + full_key, new_key, msg + ) + ) + + def is_new_allowed(self): + return self.__dict__[CfgNode.NEW_ALLOWED] + + @classmethod + def load_cfg(cls, cfg_file_obj_or_str): + r"""Load a configuration into the `CfgNode`. + + Args: + cfg_file_obj_or_str (str or cfg compatible object): Supports loading from: + - A file object backed by a YAML file. + - A file object backed by a Python source file that exports an sttribute "cfg" (dict or `CfgNode`). + - A string that can be parsed as valid YAML. + + """ + _assert_with_logging( + isinstance(cfg_file_obj_or_str, _FILE_TYPES + (str,)), + "Expected first argument to be of type {} or {}, but got {}".format( + _FILE_TYPES, str, type(cfg_file_obj_or_str) + ), + ) + if isinstance(cfg_file_obj_or_str, str): + return cls._load_cfg_from_yaml_str(cfg_file_obj_or_str) + elif isinstance(cfg_file_obj_or_str, _FILE_TYPES): + return cls._load_cfg_from_file(cfg_file_obj_or_str) + else: + raise NotImplementedError("Impossible to reach here (unless there's a bug)") + + @classmethod + def _load_cfg_from_file(cls, file_obj): + r"""Load a config from a YAML file or a Python source file. """ + _, file_ext = os.path.splitext(file_obj.name) + if file_ext in _YAML_EXTS: + return cls._load_cfg_from_yaml_str(file_obj.read()) + elif file_ext in _PY_EXTS: + return cls._load_cfg_py_source(file_obj.name) + else: + raise Exception( + "Attempt to load from an unsupported filetype {}; only {} supported".format( + _YAML_EXTS.union(_PY_EXTS) + ) + ) + + @classmethod + def _load_cfg_from_yaml_str(cls, str_obj): + r"""Load a config from a YAML string encoding. """ + cfg_as_dict = yaml.safe_load(str_obj) + return cls(cfg_as_dict) + + @classmethod + def _load_cfg_py_source(cls, filename): + r"""Load a config from a Python source file. """ + module = _load_module_from_file("yacs.config.override", filename) + _assert_with_logging( + hasattr(module, "cfg"), + "Python module from file {} must export a 'cfg' attribute".format(filename), + ) + VALID_ATTR_TYPES = {dict, CfgNode} + _assert_with_logging( + type(module.cfg) in VALID_ATTR_TYPES, + "Import module 'cfg' attribute must be in {} but is {}".format( + VALID_ATTR_TYPES, type(module.cfg) + ), + ) + return cls(module.cfg) + + @classmethod + def _decode_cfg_value(cls, value): + r"""Decodes a raw config value (eg. from a yaml config file or commandline argument) into a Python object. + + If `value` is a dict, it will be interpreted as a new `CfgNode`. + If `value` is a str, it will be evaluated as a literal. + Otherwise, it is returned as is. + + """ + # Configs parsed from raw yaml will contain dictionary keys that need to be converted to `CfgNode` objects. + if isinstance(value, dict): + return cls(value) + # All remaining processing is only applied to strings. + if not isinstance(value, str): + return value + # Try to interpret `value` as a: string, number, tuple, list, dict, bool, or None + try: + value = literal_eval(value) + # The following two excepts allow `value` to pass through it when it represents a string. + # The type of `value` is always a string (before calling `literal_eval`), but sometimes it *represents* a + # string and other times a data structure, like a list. In the case that `value` represents a str, what we + # got back from the yaml parser is `foo` *without quotes* (so, not `"foo"`). `literal_eval` is ok with `"foo"`, + # but will raise a `ValueError` if given `foo`. In other cases, like paths (`val = 'foo/bar'`) `literal_eval` + # will raise a `SyntaxError`. + except ValueError: + pass + except SyntaxError: + pass + return value + + +# Keep this function in global scope, for backward compataibility. +load_cfg = CfgNode.load_cfg + + +def _valid_type(value, allow_cfg_node: Optional[bool] = False): + return (type(value) in _VALID_TYPES) or ( + allow_cfg_node and isinstance(value, CfgNode) + ) + + +def _merge_a_into_b(a: CfgNode, b: CfgNode, root: CfgNode, key_list: list): + r"""Merge `CfgNode` `a` into `CfgNode` `b`, clobbering the options in `b` wherever they are also specified in `a`. + """ + _assert_with_logging( + isinstance(a, CfgNode), + "`a` (cur type {}) must be an instance of {}".format(type(a), CfgNode), + ) + _assert_with_logging( + isinstance(b, CfgNode), + "`b` (cur type {}) must be an instance of {}".format(type(b), CfgNode), + ) + + for k, v_ in a.items(): + full_key = ".".join(key_list + [k]) + v = copy.deepcopy(v_) + v = b._decode_cfg_value(v) + + if k in b: + v = _check_and_coerce_cfg_value_type(v, b[k], k, full_key) + # Recursively merge dicts. + if isinstance(v, CfgNode): + try: + _merge_a_into_b(v, b[k], root, key_list + [k]) + except BaseException: + raise + else: + b[k] = v + elif b.is_new_allowed(): + b[k] = v + else: + if root.key_is_deprecated(full_key): + continue + elif root.key_is_renamed(full_key): + root.raise_key_rename_error(full_key) + else: + raise KeyError("Non-existent config key: {}".format(full_key)) + + +def _check_and_coerce_cfg_value_type(replacement, original, key, full_key): + r"""Checks that `replacement`, which is intended to replace `original` is of the right type. The type is correct if + it matches exactly or is one of a few cases in which the type can easily be coerced. + """ + + original_type = type(original) + replacement_type = type(replacement) + if replacement_type == original_type: + return replacement + + # If replacement and original types match, cast replacement from `from_type` to `to_type`. + def _conditional_cast(from_type, to_type): + if replacement_type == from_type and original_type == to_type: + return True, to_type(replacement) + else: + return False, None + + # Conditional casts. + # list <-> tuple + casts = [(tuple, list), (list, tuple)] + for (from_type, to_type) in casts: + converted, converted_value = _conditional_cast(from_type, to_type) + if converted: + return converted_value + + raise ValueError( + "Type mismatch ({} vs. {} with values ({} vs. {}) for config key: {}".format( + original_type, replacement_type, original, replacement, full_key + ) + ) + + +def _assert_with_logging(cond, msg): + if not cond: + logger.debug(msg) + assert cond, msg + + +def _load_module_from_file(name, filename): + spec = importlib.util.spec_from_file_location(name, filename) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + return module diff --git a/motion-gan-pipeline/preprocessing/autils/ops3D.py b/motion-gan-pipeline/preprocessing/autils/ops3D.py new file mode 100644 index 0000000..24cbbf1 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/autils/ops3D.py @@ -0,0 +1,147 @@ +import numpy as np +import open3d as o3d + +import argparse +import os +import numpy as np +import torch +from importlib import import_module +from plyfile import PlyData, PlyElement +from skimage import measure +from skimage.draw import ellipsoid + + +def export_obj(vertices, triangles, diffuse, normals, filename): + """ + Exports a mesh in the (.obj) format. + """ + print('Writing to obj...') + + with open(filename, "w") as fh: + + for index, v in enumerate(vertices): + fh.write("v {} {} {}".format(*v)) + if len(diffuse) > index: + fh.write(" {} {} {}".format(*diffuse[index])) + + fh.write("\n") + + for n in normals: + fh.write("vn {} {} {}\n".format(*n)) + + for f in triangles: + fh.write("f") + for index in f: + fh.write(" {}//{}".format(index + 1, index + 1)) + + fh.write("\n") + + print(f"Finished writing to {filename} with {len(vertices)} vertices") + + +def export_ply(vertices, diffuse, normals, filename): + names = 'x, y, z, nx, ny, nz, red, green, blue' + formats = 'f4, f4, f4, f4, f4, f4, u1, u1, u1' + arr = np.concatenate((vertices, normals, diffuse * 255), axis = -1) + vertices_s = np.core.records.fromarrays(arr.transpose(), names=names, formats = formats) + + # Recreate the PlyElement instance + v = PlyElement.describe(vertices_s, 'vertex') + + # Create the PlyData instance + p = PlyData([ v ], text = True) + + p.write(filename) + + +def export_point_cloud(it, ray_origins, ray_directions, depth_fine, dep_target): + vertices_output = (ray_origins + ray_directions * depth_fine[..., None]).view(-1, 3) + vertices_target = (ray_origins + ray_directions * dep_target[..., None]).view(-1, 3) + vertices = torch.cat((vertices_output, vertices_target), dim = 0) + diffuse_output = torch.zeros_like(vertices_output) + diffuse_output[:, 0] = 1.0 + diffuse_target = torch.zeros_like(vertices_target) + diffuse_target[:, 2] = 1.0 + diffuse = torch.cat((diffuse_output, diffuse_target), dim = 0) + normals = torch.cat((-ray_directions.view(-1, 3), -ray_directions.view(-1, 3)), dim = 0) + export_obj(vertices, [], diffuse, normals, f"{it:04d}.obj") + + +def create_point_cloud(ray_origins, ray_directions, depth, color, mask = None): + + if mask is not None: + ray_directions, depth = ray_directions[mask], depth[mask] + + vertices = (ray_origins + ray_directions * depth[..., None]).view(-1, 3) + diffuse = color.expand(vertices.shape) + normals = -ray_directions.view(-1, 3) + + return vertices, diffuse, normals + + +def extract_iso_level(density, iso_level): + # Density boundaries + min_a, max_a, std_a = density.min(), density.max(), density.std() + + # Adaptive iso level + iso_value = min(max(iso_level, min_a + std_a), max_a - std_a) + print(f"Min density {min_a}, Max density: {max_a}, Mean density {density.mean()}") + print(f"Querying based on iso level: {iso_value}") + + return iso_value + + +def extract_geometry(radiance, pts, iso_level, limit, res): + + # nums = (res,) * 3 + # radiance = radiance.view(*nums, 4).contiguous().numpy() + + radiance = torch.flatten(radiance, end_dim=1) + pts = torch.flatten(pts, end_dim=1) + print(radiance.size()) + print(pts.size()) + + # Density grid + density = radiance[..., 3] + + # Adaptive iso level + iso_value = extract_iso_level(density, iso_level) + + # Extracting iso-surface triangulated + # Generate a level set about zero of two identical ellipsoids in 3D + ellip_base = ellipsoid(6, 10, 16, levelset=True) + ellip_double = np.concatenate((ellip_base[:-1, ...], + ellip_base[2:, ...]), axis=0) + + print(ellip_double.shape) + results = measure.marching_cubes(density, iso_value) + + # Use contiguous tensors + vertices, triangles, normals, _ = [torch.from_numpy(np.ascontiguousarray(result)) for result in results] + + # Use contiguous tensors + normals = torch.from_numpy(np.ascontiguousarray(normals)) + vertices = torch.from_numpy(np.ascontiguousarray(vertices)) + triangles = torch.from_numpy(np.ascontiguousarray(triangles)) + + # Normalize vertices, to the (-limit, limit) + vertices = limit * (vertices / (res / 2.) - 1.) + + return vertices, triangles, normals, density + + +def export_marching_cubes(radiance, pts, iso_level, limit, res, path): + + mesh_path = os.path.join(path + 'mesh.obj') + point_cloud_path = os.path.join(path + 'pointcloud.ply') + + vertices, triangles, normals, density = extract_geometry(radiance, pts, iso_level, limit, res) + diffuse = radiance[..., :3] + + # Export obj + export_obj(vertices, triangles, diffuse, normals, mesh_path) + + # Export ply + export_ply(vertices, diffuse, normals, point_cloud_path) + + diff --git a/motion-gan-pipeline/preprocessing/autils/options.py b/motion-gan-pipeline/preprocessing/autils/options.py new file mode 100644 index 0000000..e6d8b22 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/autils/options.py @@ -0,0 +1,74 @@ +import argparse + + +class Options(): + def __init__(self): + self.initialized = False + + def initialize(self, parser): + self.initialized = True + return parser + + def gather_options(self): + # initialize parser with basic options + if not self.initialized: + parser = argparse.ArgumentParser( + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser = self.initialize(parser) + + # get the basic options + opt, _ = parser.parse_known_args() + + self.parser = parser + + return parser.parse_args() + + def print_options(self, opt): + message = '' + message += '----------------- Options ---------------\n' + for k, v in sorted(vars(opt).items()): + comment = '' + default = self.parser.get_default(k) + if v != default: + comment = '\t[default: %s]' % str(default) + message += '{:>25}: {:<30}{}\n'.format(str(k), str(v), comment) + message += '----------------- End -------------------' + print(message) + + def parse(self): + + opt = self.gather_options() + + self.print_options(opt) + + self.opt = opt + return self.opt + + +class PreprocessingOptions(Options): + + def initialize(self, parser): + + parser = Options.initialize(self, parser) + parser.add_argument('--dataroot', required=True, default='/home/alberto/data/nerf-videosynth/', + help='path to data folder') + + parser.add_argument('--name', required=True, help='name of video/audio to process') + + parser.add_argument('--target_fps', type=int, default=25, help='Fps of target video') + + parser.add_argument('--preprocessing_type', type=str, default='video', help='Preprocessing for video or audio') + + parser.add_argument('--audioexpr', action='store_true', help='Extracts NVP audio-expressions') + + parser.add_argument('--step', type=str, default='0', help='Pre-processing step to take') + + parser.add_argument('--use_DECA', action='store_true', help='If true, use deca tracker for face model') + parser.add_argument('--use_FLAME', action='store_true', help='If true, use flame tracker for face model') + parser.add_argument('--use_BASEL', action='store_true', help='If true, use basel tracker for face model') + + parser.add_argument('--train_split', type=float, default=0.9, help='Percentage of data used for training') + parser.add_argument('--val_split', type=float, default=0.01, help='Percentage of data used for validation') + + return parser + diff --git a/motion-gan-pipeline/preprocessing/autils/rename_files.py b/motion-gan-pipeline/preprocessing/autils/rename_files.py new file mode 100644 index 0000000..cf748af --- /dev/null +++ b/motion-gan-pipeline/preprocessing/autils/rename_files.py @@ -0,0 +1,20 @@ +import os +from argparse import ArgumentParser +from tqdm import tqdm + +if __name__ == "__main__": + parser = ArgumentParser() + parser.add_argument("--folder", type=str, help='Path to folder to rename.') + + args = parser.parse_args() + + path = args.folder + file_list = os.listdir(path) + num_files = len(file_list) + print(f'Total number of files: {num_files}') + + for i, file_name in enumerate(tqdm(file_list)): + name, ext = os.path.splitext(file_name) + new_name = os.path.join(path, name, file_name) + os.makedirs(os.path.join(path, name), exist_ok=True) + os.rename(os.path.join(path, file_name), new_name) diff --git a/motion-gan-pipeline/preprocessing/autils/scheduler.py b/motion-gan-pipeline/preprocessing/autils/scheduler.py new file mode 100644 index 0000000..d2fe693 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/autils/scheduler.py @@ -0,0 +1,178 @@ +import numpy as np +import torchvision +import os +from torch.utils.tensorboard import SummaryWriter +import torch +from datetime import datetime +import imageio +from subprocess import call +import trimesh +import open3d as o3d +from .ops3D import export_marching_cubes + + +class Scheduler: + + def __init__(self, opt): + self.opt = opt + self.total_step = 0 + self.val_step = 0 + self.epoch = 0 + + def initialize(self, mode='train', num_images=None, num_images_val=None, existing_dir=None): + self.mode = mode + + if existing_dir is not None: + self.base_dir = os.path.join(existing_dir, 'test_results') + + else: + date_time = datetime.now().strftime("%m%d%Y_%H%M%S") + self.base_dir = os.path.join(self.opt.experiment.outdir, date_time) + + if self.mode == 'train': + # Setup logging and checkpoints. + self.num_images = num_images + self.num_images_val = num_images_val + self.logdir = os.path.join(self.base_dir, 'logs') + self.checkpointdir = os.path.join(self.base_dir, 'checkpoints') + os.makedirs(self.logdir, exist_ok=True) + os.makedirs(self.checkpointdir, exist_ok=True) + self.writer = SummaryWriter(self.logdir) + + # Write out config parameters. + with open(os.path.join(self.base_dir, "config.yml"), "w") as f: + f.write(self.opt.dump()) # cfg, f, default_flow_style=False) + + elif self.mode == 'test': + # Setup images directory. + self.savedir = os.path.join(self.base_dir, 'images') + os.makedirs(self.savedir, exist_ok=True) + if self.opt.experiment.save_all_images: + self.disparitydir = os.path.join(self.base_dir, "disparity") + os.makedirs(self.disparitydir, exist_ok=True) + self.depthdir = os.path.join(self.base_dir, "depth") + os.makedirs(self.depthdir, exist_ok=True) + self.targetdir = os.path.join(self.base_dir, "target") + os.makedirs(self.targetdir, exist_ok=True) + + def cast_to_image(self, img): + # Input tensor is (H, W, 3). Convert to (3, H, W). + img = img.permute(2, 0, 1) + + # Conver to PIL Image and then np.array (output shape: (H, W, 3)) + img = np.array(torchvision.transforms.ToPILImage()(img.detach().cpu())) + # # Map back to shape (3, H, W), as tensorboard needs channels first. + # img = np.moveaxis(img, [-1], [0]) + return img + + def cast_one_channel_image(self, tensor): + img = (tensor - tensor.min()) / (tensor.max() - tensor.min()) + img = img.clamp(0, 1) * 255 + return img.detach().cpu().numpy().astype(np.uint8) + + def write_losses(self, phase, losses, metrics): + # if self.total_step % self.opt.scheduler.print_every == 0 or \ + # self.total_step == (self.opt.scheduler.epochs * self.num_images) - 1 or \ + # phase == 'validation': + # + # if phase == 'validation': + # print(f"\n{phase.upper()} ==> Iter: {self.val_step}/{self.num_images_val}" + + # '\n\nLosses - ' + ''.join(f'{i}: {losses[i].item()}, ' for i in losses.keys())) + # + # else: + # print(f"\n{phase.upper()} ==> Iter: {self.current_step}/{self.num_images}" + + # '\nLosses - ' + ''.join(f'{i}: {losses[i].item()}, ' for i in losses.keys())) + # + # print('\nMetrics - ' + ''.join(f'{i}: {metrics[i]}, ' for i in metrics.keys())) + + for loss in losses.keys(): + self.writer.add_scalar(f"{phase}/losses/{loss}", losses[loss].item(), self.total_step) + + for metric in metrics.keys(): + self.writer.add_scalar(f"{phase}/metrics/{metric}", metrics[metric], self.total_step) + + def write_images(self, phase, images): + for nerf in images.keys(): + for image in images[nerf].keys(): + img = images[nerf][image].detach().cpu() + self.writer.add_image(f'{phase}/{nerf}/{image}', img, self.epoch, dataformats='HWC') + + def save_images(self, rgb, disp, depth, target, image_id): + savefile = os.path.join(self.savedir, f"{image_id:04d}.png") + rgb_image = self.cast_to_image(rgb[..., :3]) + imageio.imwrite(savefile, rgb_image) + + if self.opt.experiment.save_all_images: + # save disp + savefile = os.path.join(self.disparitydir, f"{image_id:04d}.png") + disp_image = self.cast_one_channel_image(disp) + imageio.imwrite(savefile, disp_image) + # save depth + savefile = os.path.join(self.depthdir, f"{image_id:04d}.png") + depth_image = self.cast_one_channel_image(depth) + imageio.imwrite(savefile, depth_image) + # save target + savefile = os.path.join(self.targetdir, f"{image_id:04d}.png") + target_image = self.cast_to_image(target) + imageio.imwrite(savefile, target_image) + + def save_video(self, audio_path=None): + target_fps = 25 + if audio_path is None: + cmd = f"ffmpeg -framerate {target_fps} -i {self.savedir + '/%04d.png'} -vcodec h264 -ac 2 -channel_layout stereo -pix_fmt yuv420p video.mp4".split() + else: + cmd = f"ffmpeg -framerate {target_fps} -i {self.savedir + '/%04d.png'} -i {audio_path} -vcodec h264 -ac 2 -channel_layout stereo -pix_fmt yuv420p video.mp4".split() + + call(cmd) + + def check_validation(self): + if self.epoch % self.opt.scheduler.validate_every == 0 or\ + self.epoch == self.opt.scheduler.epochs - 1: + print("[VAL] =======> Epoch: " + str(self.epoch)) + return True + + else: + return False + + def check_save(self): + if self.epoch % self.opt.scheduler.save_every == 0 or \ + self.epoch == self.opt.scheduler.epochs: + return True + + else: + return False + + def save_3d_scene(self, vertices, triangles): + + print('Saving mesh ..') + mesh = trimesh.Trimesh(vertices, triangles) + trimesh.exchange.export.export_mesh(mesh, os.path.join(self.base_dir, 'mesh.obj')) + + def save_pointcloud(self, vertices): + + print('Saving pointcloud ..') + mesh = trimesh.Trimesh(vertices) + trimesh.exchange.export.export_mesh(mesh, os.path.join(self.base_dir, 'pointcloud.ply')) + + def from_pointcloud_to_mesh(self): + print('Saving mesh from pointcloud ..') + + pcd = o3d.io.read_point_cloud(os.path.join(self.base_dir, 'pointcloud.ply')) + pcd.estimate_normals() + + # estimate radius for rolling ball + distances = pcd.compute_nearest_neighbor_distance() + avg_dist = np.mean(distances) + radius = 4.0 * avg_dist + + mesh = o3d.geometry.TriangleMesh.create_from_point_cloud_ball_pivoting( + pcd, + o3d.utility.DoubleVector([radius, radius * 2])) + + # create the triangular mesh with the vertices and faces from open3d + tri_mesh = trimesh.Trimesh(np.asarray(mesh.vertices), np.asarray(mesh.triangles), + vertex_normals=np.asarray(mesh.vertex_normals)) + + trimesh.convex.is_convex(tri_mesh) + trimesh.exchange.export.export_mesh(tri_mesh, os.path.join(self.base_dir, 'mesh.ply')) + diff --git a/motion-gan-pipeline/preprocessing/combine_edges.py b/motion-gan-pipeline/preprocessing/combine_edges.py new file mode 100644 index 0000000..ed313ec --- /dev/null +++ b/motion-gan-pipeline/preprocessing/combine_edges.py @@ -0,0 +1,132 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from ast import Num +from genericpath import isfile +import os +from tqdm import tqdm +import numpy as np +import torch +import torch.nn as nn +from torchvision import transforms +from scipy.spatial import KDTree +import matplotlib +matplotlib.use('agg') +import matplotlib.pyplot as plt + +from PIL import Image +import argparse +from edge_creation.utils import get_edge_predicted, get_edge_image_mixed, get_crop_coords, convert_to_rgb +from scipy.ndimage import gaussian_filter1d + +def combine_edges(inopt): + # load generated headposes + generated_hp_path = os.path.join(inopt.out_dir, 'headposes.npy') + generated_hp = np.load(generated_hp_path) + + # load tracked headposes from target video + track_params_path = os.path.join(inopt.dataset_base, 'track_params.pt') + tracked_params = torch.load(track_params_path) + tracked_R = tracked_params['euler'].numpy() + tracked_t = tracked_params['trans'].numpy() + tracked_hp = np.concatenate((tracked_R, tracked_t), axis=1) + + # Compute crop size + img = Image.open(os.path.join(inopt.dataset_base, 'matting', '%05d.png' % 0)) + img = convert_to_rgb(img) + + ldk_paths = os.listdir(os.path.join(inopt.out_dir, 'landmarks')) + points = np.mean([np.loadtxt(os.path.join(inopt.out_dir, 'landmarks', ldk))[:,:2] for ldk in ldk_paths], axis=0) + crop_coords = get_crop_coords(points, img.size) + + for frame_num in tqdm(range(generated_hp.shape[0])): + + ist, index = KDTree(tracked_hp).query(generated_hp[frame_num], workers=-1) + + img_path = os.path.join(inopt.dataset_base, 'matting', '%05d.png' % index) + generated_landmark_path = os.path.join(inopt.out_dir, 'landmarks', '%05d.lms' % frame_num) + tracked_landmark_path = os.path.join(inopt.dataset_base, 'landmarks', '%05d.lms' % index) + + get_edge_image_mixed(inopt.out_dir, img_path, generated_landmark_path, tracked_landmark_path, crop_coords, frame_num) + + +def predict_face2body(inopt): + + # check device + device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu") + + # load head2body model + checkpoint_path = os.path.join(inopt.checkpoint_dir, inopt.target_name, 'head2body.pkl') + if not os.path.isfile(checkpoint_path): + print('No checkpoint found.') + exit() + + model = model = nn.Linear(77 * 2, 4 * 2).to(device) + model.load_state_dict(torch.load(checkpoint_path)) + model.eval() + + # landmarks + num_ldk = len(os.listdir(os.path.join(inopt.out_dir, 'landmarks'))) + + # transforms + trans = transforms.ToTensor() + + # get img size + tmp_img = Image.open(os.path.join(inopt.out_dir, 'render', '00000.png')) + im_size = tmp_img.size + w, h = im_size + + print('Predicting body landmarks..') + output_ldks = [] + for idx in tqdm(range(num_ldk)): + landmark_name = os.path.join(inopt.out_dir, 'landmarks', '%05d.lms' % idx) + + # load and transform input + input_ldk = np.loadtxt(landmark_name)[:, :2] + input_ldk[:, 0] = input_ldk[:, 0] / w + input_ldk[:, 1] = input_ldk[:, 1] / h + input_ldk = trans(input_ldk).flatten().float().to(device).unsqueeze(0) + + # generate and scale output + output_ldk = model(input_ldk) + + # Transform + input_ldk = input_ldk.cpu().detach().reshape((77, 2)).numpy() + input_ldk[:, 0] = input_ldk[:, 0] * w + input_ldk[:, 1] = input_ldk[:, 1] * h + + output_ldk = output_ldk.cpu().detach().reshape((4, 2)).numpy() + output_ldk[:, 0] = output_ldk[:, 0] * w + output_ldk[:, 1] = output_ldk[:, 1] * h + + output_ldks.append(output_ldk) + + # Smooth predicted landmarks + smooth_sigma = 5 + output_ldks = np.array(output_ldks) + output_ldks = gaussian_filter1d(output_ldks.reshape(-1, 8), smooth_sigma, axis=0).reshape(-1, 4, 2) + + print('\nMaking edges..') + for idx in tqdm(range(num_ldk)): + landmark_name = os.path.join(inopt.out_dir, 'landmarks', '%05d.lms' % idx) + output_ldk = output_ldks[idx] + get_edge_predicted(idx, inopt.out_dir, landmark_name, output_ldk, im_size) + + + +if __name__=='__main__': + + # load args + parser = argparse.ArgumentParser() + parser.add_argument('--dataset_base', required=True) + parser.add_argument('--out_dir', required=True) + parser.add_argument('--target_name', required=True) + parser.add_argument('--checkpoint_dir', required=True) + + inopt = parser.parse_args() + + # call + # combine_edges(inopt) + + predict_face2body(inopt) + diff --git a/motion-gan-pipeline/preprocessing/deca_tracker.py b/motion-gan-pipeline/preprocessing/deca_tracker.py new file mode 100644 index 0000000..c3634b8 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/deca_tracker.py @@ -0,0 +1,43 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os, sys +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../third/DECA/'))) +from decalib.deca import DECA +from decalib.datasets import datasets +from decalib.utils import util +from decalib.utils.config import cfg as deca_cfg + + +class DECA_tracker: + def __init__(self, video_path): + + # load test images + self.testdata = datasets.TestData(video_path, iscrop=True, face_detector='fan') + self.device = 'cuda' + + # run DECA + deca_cfg.model.use_tex = False + self.deca = DECA(config=deca_cfg, device=self.device) + + def get_flame_mesh(self, codedict): + + verts, landmarks2d, landmarks3d = self.deca.flame(shape_params=codedict['shape'], + expression_params=codedict['exp'], + pose_params=codedict['pose']) + + trans_verts = util.batch_orth_proj(verts, codedict['cam']) + trans_verts[:, :, 1:] = -trans_verts[:, :, 1:] + + return verts, trans_verts + + def save_obj(self, filename, vertices): + faces = self.deca.render.faces[0].cpu().numpy() + + util.write_obj(filename, vertices, faces) + + def __call__(self, images, tform=None): + + codedict = self.deca.encode(images) + + return codedict diff --git a/motion-gan-pipeline/preprocessing/deepspeech_features.py b/motion-gan-pipeline/preprocessing/deepspeech_features.py new file mode 100644 index 0000000..627cf0a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/deepspeech_features.py @@ -0,0 +1,353 @@ +""" +20.10.14 +Code collection from VOCA: +https://github.com/TimoBolkart/voca +""" +import argparse +import re +import subprocess + +from scipy.io import wavfile +import os +import copy +import resampy +import numpy as np +import tensorflow as tf +from python_speech_features import mfcc +import moviepy.editor as mp +from pydub import AudioSegment +from subprocess import call + +class AudioHandler: + def __init__(self, config): + self.config = config + self.audio_feature_type = config['audio_feature_type'] + self.num_audio_features = config['num_audio_features'] + self.audio_window_size = config['audio_window_size'] + self.audio_window_stride = config['audio_window_stride'] + self.target_fps = config["target_fps"] + + def process(self, audio): + if self.audio_feature_type.lower() == "none": + return None + elif self.audio_feature_type.lower() == 'deepspeech': + return self.convert_to_deepspeech(audio) + else: + raise NotImplementedError("Audio features not supported") + + def convert_to_deepspeech(self, audio): + def audioToInputVector(audio, fs, numcep, numcontext): + # Get mfcc coefficients + features = mfcc(audio, samplerate=fs, numcep=numcep) + + # We only keep every second feature (BiRNN stride = 2) + features = features[::2] + + # One stride per time step in the input + num_strides = len(features) + + # Add empty initial and final contexts + empty_context = np.zeros((numcontext, numcep), dtype=features.dtype) + features = np.concatenate((empty_context, features, empty_context)) + + # Create a view into the array with overlapping strides of size + # numcontext (past) + 1 (present) + numcontext (future) + window_size = 2 * numcontext + 1 + train_inputs = np.lib.stride_tricks.as_strided( + features, + (num_strides, window_size, numcep), + (features.strides[0], features.strides[0], features.strides[1]), + writeable=False) + + # Flatten the second and third dimensions + train_inputs = np.reshape(train_inputs, [num_strides, -1]) + + train_inputs = np.copy(train_inputs) + train_inputs = (train_inputs - np.mean(train_inputs)) / np.std(train_inputs) + + # Return results + return train_inputs + + if type(audio) == dict: + pass + else: + raise ValueError('Wrong type for audio') + + # Load graph and place_holders + + with tf.io.gfile.GFile(self.config['deepspeech_graph_fname'], "rb") as f: + graph_def = tf.compat.v1.GraphDef() + graph_def.ParseFromString(f.read()) + + from tensorflow.python.framework.ops import get_default_graph + graph = get_default_graph() + tf.import_graph_def(graph_def, name="deepspeech") + input_tensor = graph.get_tensor_by_name('deepspeech/input_node:0') + # input_tensor = graph.get_tensor_by_name('input_node:0') + seq_length = graph.get_tensor_by_name('deepspeech/input_lengths:0') + layer_6 = graph.get_tensor_by_name('deepspeech/logits:0') + + n_input = 26 + n_context = 9 + + processed_audio = copy.deepcopy(audio) + with tf.compat.v1.Session(graph=graph) as sess: + for subj in audio.keys(): + for seq in audio[subj].keys(): + print('process audio: %s - %s' % (subj, seq)) + + audio_sample = audio[subj][seq]['audio'] + sample_rate = audio[subj][seq]['sample_rate'] + resampled_audio = resampy.resample(audio_sample.astype(float), sample_rate, 16000) + input_vector = audioToInputVector(resampled_audio.astype('int16'), 16000, n_input, n_context) + + network_output = sess.run(layer_6, feed_dict={input_tensor: input_vector[np.newaxis, ...], + seq_length: [input_vector.shape[0]]}) + + # Resample network output from 50 fps to 60 fps + audio_len_s = float(audio_sample.shape[0]) / sample_rate + num_frames = int(round(audio_len_s * self.target_fps)) + network_output = self.interpolate_features(network_output[:, 0], 50, self.target_fps, + output_len=num_frames) + + # Make windows + zero_pad = np.zeros((int(self.audio_window_size / 2), network_output.shape[1])) + network_output = np.concatenate((zero_pad, network_output, zero_pad), axis=0) + windows = [] + for window_index in range(0, network_output.shape[0] - self.audio_window_size, + self.audio_window_stride): + windows.append(network_output[window_index:window_index + self.audio_window_size]) + + processed_audio[subj][seq]['audio'] = np.array(windows) + return processed_audio + + def interpolate_features(self, features, input_rate, output_rate, output_len=None): + num_features = features.shape[1] + input_len = features.shape[0] + seq_len = input_len / float(input_rate) + if output_len is None: + output_len = int(seq_len * output_rate) + input_timestamps = np.arange(input_len) / float(input_rate) + output_timestamps = np.arange(output_len) / float(output_rate) + output_features = np.zeros((output_len, num_features)) + for feat in range(num_features): + output_features[:, feat] = np.interp(output_timestamps, + input_timestamps, + features[:, feat]) + return output_features + + +class AudioFeatures: + def __init__(self, config): + self.audio_handler = AudioHandler(config) + + def process_audio(self, audio, sample_rate): + tmp_audio = {'subj': {'seq': {'audio': audio, 'sample_rate': sample_rate}}} + return self.audio_handler.process(tmp_audio)['subj']['seq']['audio'] + + def inference_interpolate_styles(self, audio_fname): + # TODO: check if this works + # sample_rate, audio = wavfile.read(audio_fname) + import soundfile as sf + import librosa + import wavio + + x,_ = librosa.load(audio_fname, sr=16000) + sf.write(audio_fname, x, 16000) + print('Successfully converted.') + + wav = wavio.read(audio_fname) + audio, sample_rate = wav.data, wav.rate + + # audio, sample_rate = librosa.load(audio_fname) + + print("sample rate: ", sample_rate) + if audio.ndim != 1: + print('Audio has multiple channels, only first channel is considered') + audio = audio[:, 0] + + processed_audio = self.process_audio(audio, sample_rate) + return processed_audio + + def run(self, audio_fname): + features = self.inference_interpolate_styles(audio_fname) + return features + + +class FFMPEGMetaReader: + """ + Uses ffprobe to extract metadata from audio and video + """ + + def __init__(cls, fps): + cls.DEFAULTS = { + "samplerate": 48000, + "bitrate": 96, + "duration": "00", + "fps": fps + } + + cls.PATTERNS = { + "sample_and_bitrate": r'Audio:.*\s(\d+)\sHz.*\s(\d+)\skb/s', + "duration": r"Duration:\s(\d{2}):(\d{2}):(\d{2})\.(\d{2})", + "fps": r"Video:.*\s(\d+\.?\d*)\sfps" + } + + def _run_ffmpeg(cls, path): + process = subprocess.Popen(['ffprobe', '-i', path, '-hide_banner'], stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + stdout, stderr = process.communicate() + output = stderr.decode('utf-8') + return output + + def _sample_and_bitrate(cls, path, default_samplerate=None, default_bitrate=None, output=None): + if output is None: + output = cls._run_ffmpeg(path) + match = re.search(cls.PATTERNS["sample_and_bitrate"], output) + + samplerate = default_samplerate if default_samplerate else cls.DEFAULTS["samplerate"] + bitrate = default_bitrate if default_bitrate else cls.DEFAULTS["bitrate"] + + if match: + samplerate = match.group(1) if match.group(1) else samplerate + bitrate = match.group(2) if match.group(2) else bitrate + return samplerate, bitrate + + def _duration(cls, path, output=None): + if output is None: + output = cls._run_ffmpeg(path) + + duration = cls.DEFAULTS["duration"] + + match = re.search(cls.PATTERNS["duration"], output) + + if match and match.group(4): + duration = "{}:{}:{}.{}".format(*[match.group(i) if match.group(i) else duration for i in range(1, 5)]) + return duration + + def _fps(cls, video_path, output=None): + if output is None: + output = cls._run_ffmpeg(video_path) + + fps = cls.DEFAULTS["fps"] + + match = re.search(cls.PATTERNS["fps"], output) + + if match: + fps = match.group(1) if match.group(1) else fps + else: + raise Warning("No fps found.") + return fps + + def extract_meta(cls, path): + output = cls._run_ffmpeg(path) + samplerate, bitrate = cls._sample_and_bitrate(path, output=output) + duration = cls._duration(path, output=output) + fps = cls._fps(path, output=output) + return { + "samplerate": samplerate, + "bitrate": bitrate, + "duration": duration, + "fps": fps + } + + +def get_parser(): + parser = argparse.ArgumentParser() + parser.add_argument("--datasets", type=str, required=True) + parser.add_argument("--video_id", type=str, required=True) + return parser + + +def get_config(): + config = {} + config['deepspeech_graph_fname'] = os.path.join( + "third/DeepSpeech/models/", "output_graph.pb") + config['audio_feature_type'] = 'deepspeech' + config['num_audio_features'] = 29 + + config['audio_window_size'] = 16 + config['audio_window_stride'] = 1 + # config['target_fps'] = target_fps + return config + + +def mkdir(path): + if not os.path.exists(path): + os.makedirs(path, exist_ok=True) + + +def extract_ds(dataset_base, name, file_type, sample_rate=16000, target_fps=25.0): + + config = get_config() + path_wav = '{}/{}.wav'.format(dataset_base, name) + + # First convert mp4 to wav + if file_type == 'video' and not os.path.isfile(path_wav): + + path_video_in = '{}/{}.mp4'.format(dataset_base, name) + + if not os.path.isfile(path_video_in): + path_video_in = '{}/{}.avi'.format(dataset_base, name) + + # cmd = "ffmpeg -i input-video.avi -vn -acodec copy output-audio.aac" + + cmd = ('ffmpeg' + f' -i {path_video_in} -vn -acodec copy -ar {sample_rate} {path_wav} ').split() + call(cmd) + + else: + if not os.path.isfile(path_wav): + try: + path_mp3 = '{}/{}.mp3'.format(dataset_base, name) + sound = AudioSegment.from_mp3(path_mp3) + sound.export(path_wav, format="wav") + + except FileNotFoundError: + print('Audio file not found. File format should be mp3 or wav.') + + config["target_fps"] = target_fps + + # Compute features + features = AudioFeatures(config=config).run(path_wav) + folder_out = os.path.join(dataset_base, "audio_feature") + + # Save features + mkdir(folder_out) + for i, feature in enumerate(features): + fn_out = "%05d.deepspeech.npy" % i + np.save(os.path.join(folder_out, fn_out), feature) + print("Written {} files to '{}'".format(features.shape[0], folder_out)) + + +if __name__ == "__main__": + args = get_parser().parse_args() + folder_videos = args.dataset + file_id = args.video_id + folder_nvp = "TARGETS" + + # First convert mp4 to wav + path_video_in = '{}/{}.mp4'.format(folder_videos, file_id) + path_wav = '{}/{}.wav'.format(folder_videos, file_id) + path_features = os.path.join(folder_videos, "deepspeech", "{}.npy".format(file_id)) + clip = mp.VideoFileClip(path_video_in) + clip.audio.write_audiofile(path_wav) + + # Get the meta data via ffmpeg + config = get_config() + metadata = FFMPEGMetaReader.extract_meta(path_video_in) + if int(metadata["fps"]) != float(metadata["fps"]): + raise Warning("Careful: fps is not an integer ({})".format(metadata["fps"])) + config["target_fps"] = int(metadata["fps"]) + + # Compute features + features = AudioFeatures(config=config).run(path_wav) + folder_out = os.path.join( + './NeuralVoicePuppetry/Audio2ExpressionNet/Inference/datasets/{}'.format( + folder_nvp), file_id, "audio_feature") + + # Save features + mkdir(folder_out) + for i, feature in enumerate(features): + fn_out = "%05d.deepspeech.npy" % i + np.save(os.path.join(folder_out, fn_out), feature) + print("Written {} files to '{}'".format(features.shape[0], folder_out)) diff --git a/motion-gan-pipeline/preprocessing/edge_creation/__init__.py b/motion-gan-pipeline/preprocessing/edge_creation/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/preprocessing/edge_creation/keypoint2image.py b/motion-gan-pipeline/preprocessing/edge_creation/keypoint2image.py new file mode 100644 index 0000000..89e6188 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/edge_creation/keypoint2image.py @@ -0,0 +1,196 @@ +import os.path +from PIL import Image +import numpy as np +import json +import glob +from scipy.optimize import curve_fit +import warnings + +def func(x, a, b, c): + return a * x**2 + b * x + c + +def linear(x, a, b): + return a * x + b + +def setColor(im, yy, xx, color): + if len(im.shape) == 3: + if (im[yy, xx] == 0).all(): + im[yy, xx, 0], im[yy, xx, 1], im[yy, xx, 2] = color[0], color[1], color[2] + else: + im[yy, xx, 0] = ((im[yy, xx, 0].astype(float) + color[0]) / 2).astype(np.uint8) + im[yy, xx, 1] = ((im[yy, xx, 1].astype(float) + color[1]) / 2).astype(np.uint8) + im[yy, xx, 2] = ((im[yy, xx, 2].astype(float) + color[2]) / 2).astype(np.uint8) + else: + im[yy, xx] = color[0] + +def drawEdge(im, x, y, bw=1, color=(255,255,255), draw_end_points=False): + if x is not None and x.size: + h, w = im.shape[0], im.shape[1] + # edge + for i in range(-bw, bw): + for j in range(-bw, bw): + yy = np.maximum(0, np.minimum(h-1, y+i)) + xx = np.maximum(0, np.minimum(w-1, x+j)) + setColor(im, yy, xx, color) + + # edge endpoints + if draw_end_points: + for i in range(-bw*2, bw*2): + for j in range(-bw*2, bw*2): + if (i**2) + (j**2) < (4 * bw**2): + yy = np.maximum(0, np.minimum(h-1, np.array([y[0], y[-1]])+i)) + xx = np.maximum(0, np.minimum(w-1, np.array([x[0], x[-1]])+j)) + setColor(im, yy, xx, color) + +def interpPoints(x, y): + if abs(x[:-1] - x[1:]).max() < abs(y[:-1] - y[1:]).max(): + curve_y, curve_x = interpPoints(y, x) + if curve_y is None: + return None, None + else: + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + if len(x) < 3: + popt, _ = curve_fit(linear, x, y) + else: + popt, _ = curve_fit(func, x, y) + if abs(popt[0]) > 1: + return None, None + if x[0] > x[-1]: + x = list(reversed(x)) + y = list(reversed(y)) + curve_x = np.linspace(int(x[0]), int(x[-1]), int((x[-1]-x[0]))) + if len(x) < 3: + curve_y = linear(curve_x, *popt) + else: + curve_y = func(curve_x, *popt) + return curve_x.astype(int), curve_y.astype(int) + +def read_keypoints(json_input, size, random_drop_prob=0, remove_face_labels=False, basic_point_only=False): + with open(json_input, encoding='utf-8') as f: + keypoint_dicts = json.loads(f.read())["people"] + + edge_lists = define_edge_lists(basic_point_only) + w, h = size + pose_img = np.zeros((h, w, 3), np.uint8) + for keypoint_dict in keypoint_dicts: + pose_pts = np.array(keypoint_dict["pose_keypoints_2d"]).reshape(25, 3) + face_pts = np.array(keypoint_dict["face_keypoints_2d"]).reshape(70, 3) + hand_pts_l = np.array(keypoint_dict["hand_left_keypoints_2d"]).reshape(21, 3) + hand_pts_r = np.array(keypoint_dict["hand_right_keypoints_2d"]).reshape(21, 3) + pts = [extract_valid_keypoints(pts, edge_lists) for pts in [pose_pts, face_pts, hand_pts_l, hand_pts_r]] + pose_img += connect_keypoints(pts, edge_lists, size, random_drop_prob, remove_face_labels, basic_point_only) + return pose_img + +def extract_valid_keypoints(pts, edge_lists): + pose_edge_list, _, hand_edge_list, _, face_list = edge_lists + p = pts.shape[0] + thre = 0.1 if p == 70 else 0.01 + output = np.zeros((p, 2)) + + if p == 70: # face + for edge_list in face_list: + for edge in edge_list: + if (pts[edge, 2] > thre).all(): + output[edge, :] = pts[edge, :2] + elif p == 21: # hand + for edge in hand_edge_list: + if (pts[edge, 2] > thre).all(): + output[edge, :] = pts[edge, :2] + else: # pose + valid = (pts[:, 2] > thre) + output[valid, :] = pts[valid, :2] + + return output + +def connect_keypoints(pts, edge_lists, size, random_drop_prob, remove_face_labels, basic_point_only): + pose_pts, face_pts, hand_pts_l, hand_pts_r = pts + w, h = size + output_edges = np.zeros((h, w, 3), np.uint8) + pose_edge_list, pose_color_list, hand_edge_list, hand_color_list, face_list = edge_lists + + if random_drop_prob > 0 and remove_face_labels: + # add random noise to keypoints + pose_pts[[0,15,16,17,18], :] += 5 * np.random.randn(5,2) + face_pts[:,0] += 2 * np.random.randn() + face_pts[:,1] += 2 * np.random.randn() + + ### pose + for i, edge in enumerate(pose_edge_list): + x, y = pose_pts[edge, 0], pose_pts[edge, 1] + if (np.random.rand() > random_drop_prob) and (0 not in x): + curve_x, curve_y = interpPoints(x, y) + drawEdge(output_edges, curve_x, curve_y, bw=3, color=pose_color_list[i], draw_end_points=True) + + if not basic_point_only: + ### hand + for hand_pts in [hand_pts_l, hand_pts_r]: # for left and right hand + if np.random.rand() > random_drop_prob: + for i, edge in enumerate(hand_edge_list): # for each finger + for j in range(0, len(edge)-1): # for each part of the finger + sub_edge = edge[j:j+2] + x, y = hand_pts[sub_edge, 0], hand_pts[sub_edge, 1] + if 0 not in x: + line_x, line_y = interpPoints(x, y) + drawEdge(output_edges, line_x, line_y, bw=1, color=hand_color_list[i], draw_end_points=True) + + ### face + edge_len = 2 + if (np.random.rand() > random_drop_prob): + for edge_list in face_list: + for edge in edge_list: + for i in range(0, max(1, len(edge)-1), edge_len-1): + sub_edge = edge[i:i+edge_len] + x, y = face_pts[sub_edge, 0], face_pts[sub_edge, 1] + if 0 not in x: + curve_x, curve_y = interpPoints(x, y) + drawEdge(output_edges, curve_x, curve_y, draw_end_points=True) + + return output_edges + +def define_edge_lists(basic_point_only): + ### pose + pose_edge_list = [] + pose_color_list = [] + if not basic_point_only: + pose_edge_list += [[17, 15], [15, 0], [ 0, 16], [16, 18]] # head + pose_color_list += [[153, 0,153], [153, 0,102], [102, 0,153], [ 51, 0,153]] + + pose_edge_list += [ + [ 0, 1], [ 1, 8], # body + [ 1, 2], [ 2, 3], [ 3, 4], # right arm + [ 1, 5], [ 5, 6], [ 6, 7], # left arm + [ 8, 9], [ 9, 10], [10, 11], [11, 24], [11, 22], [22, 23], # right leg + [ 8, 12], [12, 13], [13, 14], [14, 21], [14, 19], [19, 20] # left leg + ] + pose_color_list += [ + [153, 0, 51], [153, 0, 0], + [153, 51, 0], [153,102, 0], [153,153, 0], + [102,153, 0], [ 51,153, 0], [ 0,153, 0], + [ 0,153, 51], [ 0,153,102], [ 0,153,153], [ 0,153,153], [ 0,153,153], [ 0,153,153], + [ 0,102,153], [ 0, 51,153], [ 0, 0,153], [ 0, 0,153], [ 0, 0,153], [ 0, 0,153] + ] + + ### hand + hand_edge_list = [ + [0, 1, 2, 3, 4], + [0, 5, 6, 7, 8], + [0, 9, 10, 11, 12], + [0, 13, 14, 15, 16], + [0, 17, 18, 19, 20] + ] + hand_color_list = [ + [204,0,0], [163,204,0], [0,204,82], [0,82,204], [163,0,204] + ] + + ### face + face_list = [ + #[range(0, 17)], # face + [range(17, 22)], # left eyebrow + [range(22, 27)], # right eyebrow + [range(27, 31), range(31, 36)], # nose + [[36,37,38,39], [39,40,41,36]], # left eye + [[42,43,44,45], [45,46,47,42]], # right eye + [range(48, 55), [54,55,56,57,58,59,48]], # mouth + ] + return pose_edge_list, pose_color_list, hand_edge_list, hand_color_list, face_list \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/edge_creation/utils.py b/motion-gan-pipeline/preprocessing/edge_creation/utils.py new file mode 100644 index 0000000..030e660 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/edge_creation/utils.py @@ -0,0 +1,455 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import numpy as np +import cv2 as cv2 +from matplotlib import pyplot as plt +from edge_creation.keypoint2image import interpPoints, drawEdge +import PIL +from PIL import Image, ImageDraw +import torch +from skimage import feature +from logging import raiseExceptions +import torchvision.transforms as transforms +import os +from scipy.ndimage import shift +from tqdm import tqdm +from math import floor + + +def read_keypoints(A_path, size): + # mapping from keypoints to face part + part_list = [[list(range(0, 17)) + list(range(68, 83)) + [0]], # face + [range(17, 22)], # right eyebrow + [range(22, 27)], # left eyebrow + [[28, 31], range(31, 36), [35, 28]], # nose + [[36,37,38,39], [39,40,41,36]], # right eye + [[42,43,44,45], [45,46,47,42]], # left eye + [range(48, 55), [54,55,56,57,58,59,48]], # mouth + [range(60, 65), [64,65,66,67,60]] # tongue + ] + label_list = [50, 100, 100, 125, 150, 150, 200, 250] # labeling for different facial parts + keypoints = np.loadtxt(A_path)[:,:2] + # keypoints = np.loadtxt(A_path, delimiter=' ') + + # add upper half face by symmetry + pts = keypoints[:17, :].astype(np.int32) + baseline_y = (pts[0,1] + pts[-1,1]) / 2 + upper_pts = pts[1:-1,:].copy() + upper_pts[:,1] = baseline_y + (baseline_y-upper_pts[:,1]) * 2 // 3 + keypoints = np.vstack((keypoints, upper_pts[::-1,:])) + + # label map for facial part + w, h = size + part_labels = np.zeros((h, w), np.uint8) + for p, edge_list in enumerate(part_list): + indices = [item for sublist in edge_list for item in sublist] + pts = keypoints[indices, :].astype(np.int32) + cv2.fillPoly(part_labels, pts=[pts], color=label_list[p]) + + return keypoints, part_list, part_labels + +def read_keypoints_forehead(A_path, size): + # mapping from keypoints to face part + part_list = [[list(range(0, 17)) + list(range(68, 77)) + [0]], # face + [range(17, 22)], # right eyebrow + [range(22, 27)], # left eyebrow + [[28, 31], range(31, 36), [35, 28]], # nose + [[36,37,38,39], [39,40,41,36]], # right eye + [[42,43,44,45], [45,46,47,42]], # left eye + [range(48, 55), [54,55,56,57,58,59,48]], # mouth + [range(60, 65), [64,65,66,67,60]] # tongue + ] + label_list = [50, 100, 100, 125, 150, 150, 200, 250] # labeling for different facial parts + keypoints = np.loadtxt(A_path)[:,:2] + + # label map for facial part + w, h = size + part_labels = np.zeros((h, w), np.uint8) + for p, edge_list in enumerate(part_list): + indices = [item for sublist in edge_list for item in sublist] + pts = keypoints[indices, :].astype(np.int32) + cv2.fillPoly(part_labels, pts=[pts], color=label_list[p]) + + return keypoints, part_list, part_labels + +def draw_face_edges(keypoints, part_list, size): + w, h = size + edge_len = 3 # interpolate 3 keypoints to form a curve when drawing edges + # edge map for face region from keypoints + im_edges = np.zeros((h, w), np.uint8) # edge map for all edges + dist_tensor = 0 + e = 1 + for edge_list in part_list: + for edge in edge_list: + for i in range(0, max(1, len(edge)-1), edge_len-1): # divide a long edge into multiple small edges when drawing + sub_edge = edge[i:i+edge_len] + x = keypoints[sub_edge, 0] + y = keypoints[sub_edge, 1] + + curve_x, curve_y = interpPoints(x, y) # interp keypoints to get the curve shape + drawEdge(im_edges, curve_x, curve_y) + + return im_edges, dist_tensor + +def draw_body_edges(keypoints, size): + w, h = size + body_edges = Image.fromarray(np.zeros((h, w), np.uint8)) # edge map for all edges + + # left to right shoulder + a = keypoints[11][:-2]# left shoulder + b = keypoints[12][:-2] # right shoulder + + # create line image + tmp1 = ImageDraw.Draw(body_edges) + shape = [a[0], a[1], b[0], b[1]] + tmp1.line(shape, fill=(255), width = 3) + + # mid to nose + c = keypoints[0][:-2] + d = (a + b) / 2 + shape = [c[0], c[1], d[0], d[1]] + tmp1.line(shape, fill=(255), width = 3) + + # left to right hips + e = keypoints[23][:-2]# left hip + f = keypoints[24][:-2] # right hip + + # create line image + shape = [a[0], a[1], e[0], e[1]] + tmp1.line(shape, fill=(255), width = 3) + shape = [b[0], b[1], f[0], f[1]] + tmp1.line(shape, fill=(255), width = 3) + + # Cross for content + shape = [a[0], a[1], f[0], f[1]] + tmp1.line(shape, fill=(255), width = 3) + shape = [b[0], b[1], e[0], e[1]] + tmp1.line(shape, fill=(255), width = 3) + + return body_edges + +def draw_keypoints(img_path, keypoints, color=(255, 0, 0)): + tmp = Image.open(img_path) + size = tmp.size + + for point in keypoints: + margin = (max(size) // 500) + 3 + ldmks = ([point[0] - margin, point[1] - margin, point[0] + margin, point[1] + margin]) + draw = ImageDraw.Draw(tmp) + draw.ellipse(ldmks, fill=color) + + return tmp + +def get_crop_coords(keypoints, size): + min_y, max_y = keypoints[:,1].min(), keypoints[:,1].max() + min_x, max_x = keypoints[:,0].min(), keypoints[:,0].max() + xc = (min_x + max_x) // 2 + yc = (min_y*3 + max_y) // 4 + h = w = (max_x - min_x) * 2.5 + xc = min(max(0, xc - w//2) + w, size[0]) - w//2 + yc = min(max(0, yc - h//2) + h, size[1]) - h//2 + min_x, max_x = xc - w//2, xc + w//2 + min_y, max_y = yc - h//2, yc + h//2 + return [int(min_y), int(max_y), int(min_x), int(max_x)] + +def crop(img, ext_values): + if isinstance(img, np.ndarray): + return img[ext_values[0]:ext_values[1], ext_values[2]:ext_values[3]] + else: + return img.crop((ext_values[2], ext_values[0], ext_values[3], ext_values[1])) + +def get_img_params(size, loadSize=320): + w, h = size + new_h, new_w = h, w + + new_w = loadSize + new_h = loadSize * h // w + + new_w = int(round(new_w / 4)) * 4 + new_h = int(round(new_h / 4)) * 4 + + return {'new_size': (new_w, new_h)} + +def get_transform(params, method=Image.BICUBIC, normalize=True, toTensor=True): + transform_list = [] + ### resize input image + transform_list.append(transforms.Lambda(lambda img: __scale_image(img, params['new_size'], method))) + + if toTensor: + transform_list += [transforms.ToTensor()] + if normalize: + transform_list += [transforms.Normalize((0.5, 0.5, 0.5), + (0.5, 0.5, 0.5))] + return transforms.Compose(transform_list) + + +def __scale_image(img, size, method=Image.BICUBIC): + w, h = size + return img.resize((w, h), method) + + +def convert_to_rgb(image): + if image.mode == 'RGBA': + image.load() + image_new = Image.new("RGB", image.size, (255, 255, 255)) + image_new.paste(image, mask=image.split()[3]) + elif image.mode == 'RGB': + image_new = image + else: + raiseExceptions('Non-compatible image format!') + return image_new + +def get_edge_predicted(img_num, out_dir, landmark_name, predicted_ldk, im_size): + # Used at inference time when combining generated landmarks and predicted shoulders + + params = get_img_params(im_size, loadSize=np.max(im_size)) + transform_scaleA = get_transform(params, method=Image.BILINEAR, normalize=False, toTensor=False) + + # Draw face + # keypoints, part_list, part_labels = read_keypoints(landmark_name, im_size) + keypoints, part_list, part_labels = read_keypoints_forehead(landmark_name, im_size) + im_edges, dist_tensor = draw_face_edges(keypoints, part_list, im_size) + + # body edges + w, h = im_size + body_edges = Image.fromarray(np.zeros((h, w), np.uint8)) # edge map for all edges + + # left to right shoulder + a = predicted_ldk[0] # left shoulder + b = predicted_ldk[1] # right shoulder + + # create line image + tmp1 = ImageDraw.Draw(body_edges) + shape = [a[0], a[1], b[0], b[1]] + tmp1.line(shape, fill=(255), width = 3) + + # mid to nose + c = keypoints[33][:2] + d = (a + b) / 2 + shape = [c[0], c[1], d[0], d[1]] + tmp1.line(shape, fill=(255), width = 3) + + # left to right hips + e = predicted_ldk[2]# left hip + f = predicted_ldk[3] # right hip + + # create line image + shape = [a[0], a[1], e[0], e[1]] + tmp1.line(shape, fill=(255), width = 3) + shape = [b[0], b[1], f[0], f[1]] + tmp1.line(shape, fill=(255), width = 3) + + # Cross for content + shape = [a[0], a[1], f[0], f[1]] + tmp1.line(shape, fill=(255), width = 3) + shape = [b[0], b[1], e[0], e[1]] + tmp1.line(shape, fill=(255), width = 3) + + # clean + body_edges = np.array(body_edges) * (part_labels == 0) # remove edges within face + im_edges += body_edges + + # Transform and save + edge_path = os.path.join(out_dir, 'edges') + os.makedirs(edge_path, exist_ok=True) + edge_image = transform_scaleA(Image.fromarray(im_edges)) + edge_image.save(os.path.join(edge_path, '%05d.png' % img_num)) + +def get_edge_image_mixed(out_dir, img_name, landmark_name, tr_landmark_name, crop_coords, img_num): + # Used at inference time when combining generated landmarks and original frame's edges + + # Flag for cropping + crop_flag = False + + # Make edges from generated landmarks and closest image + img = Image.open(img_name) + img = convert_to_rgb(img) + img_size = img.size + points = np.loadtxt(landmark_name)[:,:2] + tr_points = np.loadtxt(tr_landmark_name, delimiter=' ') + face_dist_x = np.mean(tr_points[31:36, 0]-points[31:36, 0]) + face_dist_y = np.mean(tr_points[31:36, 1]-points[31:36, 1]) + + if crop_flag: + params = get_img_params(crop(img, crop_coords).size) + + else: + params = get_img_params(img.size, loadSize=np.max(img.size)) + + transform_scaleA = get_transform(params, method=Image.BILINEAR, normalize=False, toTensor=False) + transform_label = get_transform(params, method=Image.NEAREST, normalize=False, toTensor=False) + transform_scaleB = get_transform(params, normalize=False, toTensor=False) + + # keypoints, part_list, part_labels = read_keypoints(landmark_name, img_size) + keypoints, part_list, part_labels = read_keypoints_forehead(landmark_name, img_size) + + im_edges, dist_tensor = draw_face_edges(keypoints, part_list, img_size) + + edges = feature.canny(shift(np.array(img.convert('L')), (-face_dist_y, -face_dist_x), mode='nearest')) + # edges = feature.canny(np.array(img.convert('L'))) + edges = edges * (part_labels == 0) # remove edges within face + im_edges += (edges * 255).astype(np.uint8) + + if crop_flag: + edge_image = transform_scaleA(Image.fromarray(crop(im_edges, crop_coords))) + label_image = transform_label(Image.fromarray(crop(part_labels.astype(np.uint8), crop_coords))) + img_image = transform_scaleB(crop(img, crop_coords)) + + else: + edge_image = transform_scaleA(Image.fromarray(im_edges)) + label_image = transform_label(Image.fromarray(part_labels.astype(np.uint8))) + img_image = transform_scaleB(img) + + edge_path = os.path.join(out_dir, 'edges') + os.makedirs(edge_path, exist_ok=True) + edge_image.save(os.path.join(edge_path, '%05d.png' % img_num)) + +def get_edge_image(index, img_path, tracked_landmark_path, edges_dir, cropped_dir, im_size, crop_coords): + # Used in preprocessing when combining tracked landmarks with frame's edges + + # Flag for cropping + crop_flag = False + + # Load image + img = Image.open(img_path) + img = convert_to_rgb(img) + + # Make crop parameters + if crop_flag: + params = get_img_params(crop(img, crop_coords).size) + + else: + params = get_img_params(img.size, loadSize=np.max(img.size)) + + transform_scaleA = get_transform(params, method=Image.BILINEAR, normalize=False, toTensor=False) + transform_scaleB = get_transform(params, normalize=False, toTensor=False) + + # Draw face + # keypoints, part_list, part_labels = read_keypoints(tracked_landmark_path, im_size) + keypoints, part_list, part_labels = read_keypoints_forehead(tracked_landmark_path, im_size) + im_edges, dist_tensor = draw_face_edges(keypoints, part_list, im_size) + + # Other Edges + edges = feature.canny(np.array(img.convert('L'))) + edges = edges * (part_labels == 0) # remove edges within face + im_edges += (edges * 255).astype(np.uint8) + + # Transform + if crop_flag: + edge_image = transform_scaleA(Image.fromarray(crop(im_edges, crop_coords))) + img_image = transform_scaleB(crop(img, crop_coords)) + + + else: + edge_image = transform_scaleA(Image.fromarray(im_edges)) + img_image = transform_scaleB(img) + + # Save edges and cropped target for GAN training + edge_image.save(os.path.join(edges_dir, '%05d.png' % index)) + img_image.save(os.path.join(cropped_dir, '%05d.png' % index)) + +def get_edge_image_body(index, img_path, tracked_landmark_path, edges_dir, cropped_dir, im_size, crop_coords, body_landmark_path): + # Used in preprocessing when combining tracked landmarks with tracked body edges + + # Flag for cropping + crop_flag = False + + # Load image + img = Image.open(img_path) + img = convert_to_rgb(img) + + # Make crop parameters + if crop_flag: + params = get_img_params(crop(img, crop_coords).size) + + else: + params = get_img_params(img.size, loadSize=np.max(img.size)) + + transform_scaleA = get_transform(params, method=Image.BILINEAR, normalize=False, toTensor=False) + transform_scaleB = get_transform(params, normalize=False, toTensor=False) + + # Draw face + # keypoints, part_list, part_labels = read_keypoints(tracked_landmark_path, im_size) + keypoints, part_list, part_labels = read_keypoints_forehead(tracked_landmark_path, im_size) + im_edges, dist_tensor = draw_face_edges(keypoints, part_list, im_size) + + # body edges + body_keypoints = np.load(body_landmark_path) + body_edges = np.array(draw_body_edges(body_keypoints, im_size)) + body_edges = body_edges * (part_labels == 0) # remove edges within face + im_edges += body_edges + + # Save landmarks for DEBUGGING + debug = False + if debug: + debug_dir = os.path.join(edges_dir, 'debug') + os.makedirs(debug_dir, exist_ok=True) + debug_path = os.path.join(debug_dir, '%05d.png' % index) + tmp1 = draw_keypoints(img_path, keypoints, color=(255, 0, 0)) + Image.fromarray(np.array(tmp1)).save(debug_path) + tmp2 = draw_keypoints(debug_path, body_keypoints, color=(0, 0, 255)) + Image.fromarray(np.array(tmp2)).save(debug_path) + + # Transform + if crop_flag: + edge_image = transform_scaleA(Image.fromarray(crop(im_edges, crop_coords))) + img_image = transform_scaleB(crop(img, crop_coords)) + + + else: + edge_image = transform_scaleA(Image.fromarray(im_edges)) + img_image = transform_scaleB(img) + + # Save edges and cropped target for GAN training + edge_image.save(os.path.join(edges_dir, '%05d.png' % index)) + img_image.save(os.path.join(cropped_dir, '%05d.png' % index)) + +def make_edges(mattdir, landmarkdir, edges_dir, cropped_dir): + + # Compute crop size + img = Image.open(os.path.join(mattdir, '%05d.png' % 0)) + im_size = img.size + + points = np.mean([np.loadtxt(os.path.join(landmarkdir, ldk))[:,:2] for ldk in os.listdir(landmarkdir)], axis=0) + crop_coords = get_crop_coords(points, im_size) + + for index in tqdm(range(len(os.listdir(mattdir)))): + + img_path = os.path.join(mattdir, '%05d.png' % index) + tracked_landmark_path = os.path.join(landmarkdir, '%05d.lms' % index) + + get_edge_image(index, img_path, tracked_landmark_path, edges_dir, cropped_dir, im_size, crop_coords) + +def make_edges_body(mattdir, landmarkdir, edges_dir, cropped_dir, body_dir, split_val=0.0): + # Compute crop size + img = Image.open(os.path.join(mattdir, '%05d.png' % 0)) + im_size = img.size + + points = np.mean([np.loadtxt(os.path.join(landmarkdir, ldk))[:,:2] for ldk in os.listdir(landmarkdir)], axis=0) + crop_coords = get_crop_coords(points, im_size) + + num_frames = len(os.listdir(mattdir)) + num_train = floor(num_frames * (1.0 - split_val)) + + # train + for index in tqdm(range(num_train)): + + img_path = os.path.join(mattdir, '%05d.png' % index) + tracked_landmark_path = os.path.join(landmarkdir, '%05d.lms' % index) + body_landmark_path = os.path.join(body_dir, '%05d.npy' % index) + + get_edge_image_body(index, img_path, tracked_landmark_path, edges_dir, cropped_dir, im_size, crop_coords, body_landmark_path) + + # val + for index in tqdm(range(num_train, num_frames)): + + img_path = os.path.join(mattdir, '%05d.png' % index) + tracked_landmark_path = os.path.join(landmarkdir, '%05d.lms' % index) + body_landmark_path = os.path.join(body_dir, '%05d.npy' % index) + + os.makedirs(edges_dir + '_val', exist_ok=True) + os.makedirs(cropped_dir + '_val', exist_ok=True) + + get_edge_image_body(index, img_path, tracked_landmark_path, edges_dir + '_val', cropped_dir + '_val', im_size, crop_coords, body_landmark_path) + diff --git a/motion-gan-pipeline/preprocessing/emoca_tracker.py b/motion-gan-pipeline/preprocessing/emoca_tracker.py new file mode 100644 index 0000000..17cd733 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/emoca_tracker.py @@ -0,0 +1,106 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from gdl_apps.EmotionRecognition.utils.io import load_model +from gdl.datasets.ImageTestDataset import TestData +import gdl +import numpy as np +import os +import torch +from pathlib import Path +import matplotlib.pyplot as plt +from torch.functional import F +from gdl.datasets.AffectNetDataModule import AffectNetExpressions +from gdl.utils.other import get_path_to_assets +from tqdm import tqdm + +def load_dir(lmspath, framepath, start, end): + lmss = [] + imgs_paths = [] + for i in range(start, end): + if os.path.isfile(os.path.join(lmspath, str(i) + '.lms')): + lms = np.loadtxt(os.path.join( + lmspath, str(i) + '.lms'), dtype=np.float32) + lmss.append(lms) + imgs_paths.append(os.path.join(framepath, str(i) + '.jpg')) + lmss = np.stack(lmss) + lmss = torch.as_tensor(lmss).cuda() + return imgs_paths + +class EMOCA_tracker: + def __init__(self): + + model_name = 'ResNet50' + path_to_models = get_path_to_assets() /"EmotionRecognition" + + path_to_models = path_to_models / "image_based_networks" + + self.model = load_model(Path(path_to_models) / model_name) + print(self.model) + self.model.cuda() + self.model.eval() + + def __call__(self, images, tform=None): + + codedict = self.model(images) + + return codedict + + def save_images(self, batch, predictions, output_folder): + # Save the images + + softmax = F.softmax(predictions["expr_classification"]) + top_expr = torch.argmax(softmax, dim=1) + for i in range(len(batch["image"])): + img = batch["image"][i].cpu().detach().numpy() + img = img.transpose(1, 2, 0) + img = img * 255 + img = img.astype(np.uint8) + + plt.figure() + # plot the image with matplotlib + plt.imshow(img) + # write valence and arousal to the image + expr = AffectNetExpressions(int(top_expr[i].item())) + text = "Predicted emotion:\n" + text += f'Arousal: {predictions["arousal"][i].item():.2f} \nValence: {predictions["valence"][i].item():.2f}' + text += f"\nExpression: {expr.name}, {softmax[i][expr.value].item()*100:.2f}%" + plt.title(text) + out_fname = Path(output_folder) / f"{batch['image_name'][i]}.png" + # save the image to the output folder + + # axis off + plt.axis('off') + plt.savefig(out_fname) + plt.close() + + +def emotion_detection(dataset_base, emotion_dir): + ''' + Face tracker using FLAME model. + Used to have geometry prior for nerf sampling. + ''' + + id_dir = dataset_base + debug_emotions = os.path.join(id_dir, 'debug', 'emotions_imgs') + Path(debug_emotions).mkdir(parents=True, exist_ok=True) + + emoca_tracker = EMOCA_tracker() + + # Run deca on all frames + testdata = TestData(os.path.join(id_dir, 'frames'), face_detector="fan", max_detection=20) + + for i, data in enumerate(tqdm(testdata)): + batch = testdata[i] + batch["image"] = batch["image"].cuda() + predictions = emoca_tracker(batch) + npy_pred = {k: v.cpu().detach().numpy() for k,v in predictions.items()} + np.save(os.path.join(emotion_dir, '%5d.npy' % i), npy_pred) + + emoca_tracker.save_images(batch, predictions, debug_emotions) + +if __name__ == '__main__': + + dataset_base = '/media/apennino/EmotionDetection/Test/Greta/' + emotion_dir = '/media/apennino/EmotionDetection/Test/Greta/emotions/' + emotion_detection(dataset_base, emotion_dir) diff --git a/motion-gan-pipeline/preprocessing/face_parsing/79999_iter.pth b/motion-gan-pipeline/preprocessing/face_parsing/79999_iter.pth new file mode 100644 index 0000000..ca57f32 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_parsing/79999_iter.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:468e13ca13a9b43cc0881a9f99083a430e9c0a38abd935431d1c28ee94b26567 +size 53289463 diff --git a/motion-gan-pipeline/preprocessing/face_parsing/logger.py b/motion-gan-pipeline/preprocessing/face_parsing/logger.py new file mode 100644 index 0000000..d3f9ddc --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_parsing/logger.py @@ -0,0 +1,23 @@ +#!/usr/bin/python +# -*- encoding: utf-8 -*- + + +import os.path as osp +import time +import sys +import logging + +import torch.distributed as dist + + +def setup_logger(logpth): + logfile = 'BiSeNet-{}.log'.format(time.strftime('%Y-%m-%d-%H-%M-%S')) + logfile = osp.join(logpth, logfile) + FORMAT = '%(levelname)s %(filename)s(%(lineno)d): %(message)s' + log_level = logging.INFO + if dist.is_initialized() and not dist.get_rank()==0: + log_level = logging.ERROR + logging.basicConfig(level=log_level, format=FORMAT, filename=logfile) + logging.root.addHandler(logging.StreamHandler()) + + diff --git a/motion-gan-pipeline/preprocessing/face_parsing/model.py b/motion-gan-pipeline/preprocessing/face_parsing/model.py new file mode 100644 index 0000000..5119e75 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_parsing/model.py @@ -0,0 +1,283 @@ +#!/usr/bin/python +# -*- encoding: utf-8 -*- + + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision + +from .resnet import Resnet18 +# from modules.bn import InPlaceABNSync as BatchNorm2d + + +class ConvBNReLU(nn.Module): + def __init__(self, in_chan, out_chan, ks=3, stride=1, padding=1, *args, **kwargs): + super(ConvBNReLU, self).__init__() + self.conv = nn.Conv2d(in_chan, + out_chan, + kernel_size = ks, + stride = stride, + padding = padding, + bias = False) + self.bn = nn.BatchNorm2d(out_chan) + self.init_weight() + + def forward(self, x): + x = self.conv(x) + x = F.relu(self.bn(x)) + return x + + def init_weight(self): + for ly in self.children(): + if isinstance(ly, nn.Conv2d): + nn.init.kaiming_normal_(ly.weight, a=1) + if not ly.bias is None: nn.init.constant_(ly.bias, 0) + +class BiSeNetOutput(nn.Module): + def __init__(self, in_chan, mid_chan, n_classes, *args, **kwargs): + super(BiSeNetOutput, self).__init__() + self.conv = ConvBNReLU(in_chan, mid_chan, ks=3, stride=1, padding=1) + self.conv_out = nn.Conv2d(mid_chan, n_classes, kernel_size=1, bias=False) + self.init_weight() + + def forward(self, x): + x = self.conv(x) + x = self.conv_out(x) + return x + + def init_weight(self): + for ly in self.children(): + if isinstance(ly, nn.Conv2d): + nn.init.kaiming_normal_(ly.weight, a=1) + if not ly.bias is None: nn.init.constant_(ly.bias, 0) + + def get_params(self): + wd_params, nowd_params = [], [] + for name, module in self.named_modules(): + if isinstance(module, nn.Linear) or isinstance(module, nn.Conv2d): + wd_params.append(module.weight) + if not module.bias is None: + nowd_params.append(module.bias) + elif isinstance(module, nn.BatchNorm2d): + nowd_params += list(module.parameters()) + return wd_params, nowd_params + + +class AttentionRefinementModule(nn.Module): + def __init__(self, in_chan, out_chan, *args, **kwargs): + super(AttentionRefinementModule, self).__init__() + self.conv = ConvBNReLU(in_chan, out_chan, ks=3, stride=1, padding=1) + self.conv_atten = nn.Conv2d(out_chan, out_chan, kernel_size= 1, bias=False) + self.bn_atten = nn.BatchNorm2d(out_chan) + self.sigmoid_atten = nn.Sigmoid() + self.init_weight() + + def forward(self, x): + feat = self.conv(x) + atten = F.avg_pool2d(feat, feat.size()[2:]) + atten = self.conv_atten(atten) + atten = self.bn_atten(atten) + atten = self.sigmoid_atten(atten) + out = torch.mul(feat, atten) + return out + + def init_weight(self): + for ly in self.children(): + if isinstance(ly, nn.Conv2d): + nn.init.kaiming_normal_(ly.weight, a=1) + if not ly.bias is None: nn.init.constant_(ly.bias, 0) + + +class ContextPath(nn.Module): + def __init__(self, *args, **kwargs): + super(ContextPath, self).__init__() + self.resnet = Resnet18() + self.arm16 = AttentionRefinementModule(256, 128) + self.arm32 = AttentionRefinementModule(512, 128) + self.conv_head32 = ConvBNReLU(128, 128, ks=3, stride=1, padding=1) + self.conv_head16 = ConvBNReLU(128, 128, ks=3, stride=1, padding=1) + self.conv_avg = ConvBNReLU(512, 128, ks=1, stride=1, padding=0) + + self.init_weight() + + def forward(self, x): + H0, W0 = x.size()[2:] + feat8, feat16, feat32 = self.resnet(x) + H8, W8 = feat8.size()[2:] + H16, W16 = feat16.size()[2:] + H32, W32 = feat32.size()[2:] + + avg = F.avg_pool2d(feat32, feat32.size()[2:]) + avg = self.conv_avg(avg) + avg_up = F.interpolate(avg, (H32, W32), mode='nearest') + + feat32_arm = self.arm32(feat32) + feat32_sum = feat32_arm + avg_up + feat32_up = F.interpolate(feat32_sum, (H16, W16), mode='nearest') + feat32_up = self.conv_head32(feat32_up) + + feat16_arm = self.arm16(feat16) + feat16_sum = feat16_arm + feat32_up + feat16_up = F.interpolate(feat16_sum, (H8, W8), mode='nearest') + feat16_up = self.conv_head16(feat16_up) + + return feat8, feat16_up, feat32_up # x8, x8, x16 + + def init_weight(self): + for ly in self.children(): + if isinstance(ly, nn.Conv2d): + nn.init.kaiming_normal_(ly.weight, a=1) + if not ly.bias is None: nn.init.constant_(ly.bias, 0) + + def get_params(self): + wd_params, nowd_params = [], [] + for name, module in self.named_modules(): + if isinstance(module, (nn.Linear, nn.Conv2d)): + wd_params.append(module.weight) + if not module.bias is None: + nowd_params.append(module.bias) + elif isinstance(module, nn.BatchNorm2d): + nowd_params += list(module.parameters()) + return wd_params, nowd_params + + +### This is not used, since I replace this with the resnet feature with the same size +class SpatialPath(nn.Module): + def __init__(self, *args, **kwargs): + super(SpatialPath, self).__init__() + self.conv1 = ConvBNReLU(3, 64, ks=7, stride=2, padding=3) + self.conv2 = ConvBNReLU(64, 64, ks=3, stride=2, padding=1) + self.conv3 = ConvBNReLU(64, 64, ks=3, stride=2, padding=1) + self.conv_out = ConvBNReLU(64, 128, ks=1, stride=1, padding=0) + self.init_weight() + + def forward(self, x): + feat = self.conv1(x) + feat = self.conv2(feat) + feat = self.conv3(feat) + feat = self.conv_out(feat) + return feat + + def init_weight(self): + for ly in self.children(): + if isinstance(ly, nn.Conv2d): + nn.init.kaiming_normal_(ly.weight, a=1) + if not ly.bias is None: nn.init.constant_(ly.bias, 0) + + def get_params(self): + wd_params, nowd_params = [], [] + for name, module in self.named_modules(): + if isinstance(module, nn.Linear) or isinstance(module, nn.Conv2d): + wd_params.append(module.weight) + if not module.bias is None: + nowd_params.append(module.bias) + elif isinstance(module, nn.BatchNorm2d): + nowd_params += list(module.parameters()) + return wd_params, nowd_params + + +class FeatureFusionModule(nn.Module): + def __init__(self, in_chan, out_chan, *args, **kwargs): + super(FeatureFusionModule, self).__init__() + self.convblk = ConvBNReLU(in_chan, out_chan, ks=1, stride=1, padding=0) + self.conv1 = nn.Conv2d(out_chan, + out_chan//4, + kernel_size = 1, + stride = 1, + padding = 0, + bias = False) + self.conv2 = nn.Conv2d(out_chan//4, + out_chan, + kernel_size = 1, + stride = 1, + padding = 0, + bias = False) + self.relu = nn.ReLU(inplace=True) + self.sigmoid = nn.Sigmoid() + self.init_weight() + + def forward(self, fsp, fcp): + fcat = torch.cat([fsp, fcp], dim=1) + feat = self.convblk(fcat) + atten = F.avg_pool2d(feat, feat.size()[2:]) + atten = self.conv1(atten) + atten = self.relu(atten) + atten = self.conv2(atten) + atten = self.sigmoid(atten) + feat_atten = torch.mul(feat, atten) + feat_out = feat_atten + feat + return feat_out + + def init_weight(self): + for ly in self.children(): + if isinstance(ly, nn.Conv2d): + nn.init.kaiming_normal_(ly.weight, a=1) + if not ly.bias is None: nn.init.constant_(ly.bias, 0) + + def get_params(self): + wd_params, nowd_params = [], [] + for name, module in self.named_modules(): + if isinstance(module, nn.Linear) or isinstance(module, nn.Conv2d): + wd_params.append(module.weight) + if not module.bias is None: + nowd_params.append(module.bias) + elif isinstance(module, nn.BatchNorm2d): + nowd_params += list(module.parameters()) + return wd_params, nowd_params + + +class BiSeNet(nn.Module): + def __init__(self, n_classes, *args, **kwargs): + super(BiSeNet, self).__init__() + self.cp = ContextPath() + ## here self.sp is deleted + self.ffm = FeatureFusionModule(256, 256) + self.conv_out = BiSeNetOutput(256, 256, n_classes) + self.conv_out16 = BiSeNetOutput(128, 64, n_classes) + self.conv_out32 = BiSeNetOutput(128, 64, n_classes) + self.init_weight() + + def forward(self, x): + H, W = x.size()[2:] + feat_res8, feat_cp8, feat_cp16 = self.cp(x) # here return res3b1 feature + feat_sp = feat_res8 # use res3b1 feature to replace spatial path feature + feat_fuse = self.ffm(feat_sp, feat_cp8) + + feat_out = self.conv_out(feat_fuse) + feat_out16 = self.conv_out16(feat_cp8) + feat_out32 = self.conv_out32(feat_cp16) + + feat_out = F.interpolate(feat_out, (H, W), mode='bilinear', align_corners=True) + feat_out16 = F.interpolate(feat_out16, (H, W), mode='bilinear', align_corners=True) + feat_out32 = F.interpolate(feat_out32, (H, W), mode='bilinear', align_corners=True) + return feat_out, feat_out16, feat_out32 + + def init_weight(self): + for ly in self.children(): + if isinstance(ly, nn.Conv2d): + nn.init.kaiming_normal_(ly.weight, a=1) + if not ly.bias is None: nn.init.constant_(ly.bias, 0) + + def get_params(self): + wd_params, nowd_params, lr_mul_wd_params, lr_mul_nowd_params = [], [], [], [] + for name, child in self.named_children(): + child_wd_params, child_nowd_params = child.get_params() + if isinstance(child, FeatureFusionModule) or isinstance(child, BiSeNetOutput): + lr_mul_wd_params += child_wd_params + lr_mul_nowd_params += child_nowd_params + else: + wd_params += child_wd_params + nowd_params += child_nowd_params + return wd_params, nowd_params, lr_mul_wd_params, lr_mul_nowd_params + + +if __name__ == "__main__": + net = BiSeNet(19) + net.cuda() + net.eval() + in_ten = torch.randn(16, 3, 640, 480).cuda() + out, out16, out32 = net(in_ten) + print(out.shape) + + net.get_params() diff --git a/motion-gan-pipeline/preprocessing/face_parsing/resnet.py b/motion-gan-pipeline/preprocessing/face_parsing/resnet.py new file mode 100644 index 0000000..aa2bf95 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_parsing/resnet.py @@ -0,0 +1,109 @@ +#!/usr/bin/python +# -*- encoding: utf-8 -*- + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torch.utils.model_zoo as modelzoo + +# from modules.bn import InPlaceABNSync as BatchNorm2d + +resnet18_url = 'https://download.pytorch.org/models/resnet18-5c106cde.pth' + + +def conv3x3(in_planes, out_planes, stride=1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, + padding=1, bias=False) + + +class BasicBlock(nn.Module): + def __init__(self, in_chan, out_chan, stride=1): + super(BasicBlock, self).__init__() + self.conv1 = conv3x3(in_chan, out_chan, stride) + self.bn1 = nn.BatchNorm2d(out_chan) + self.conv2 = conv3x3(out_chan, out_chan) + self.bn2 = nn.BatchNorm2d(out_chan) + self.relu = nn.ReLU(inplace=True) + self.downsample = None + if in_chan != out_chan or stride != 1: + self.downsample = nn.Sequential( + nn.Conv2d(in_chan, out_chan, + kernel_size=1, stride=stride, bias=False), + nn.BatchNorm2d(out_chan), + ) + + def forward(self, x): + residual = self.conv1(x) + residual = F.relu(self.bn1(residual)) + residual = self.conv2(residual) + residual = self.bn2(residual) + + shortcut = x + if self.downsample is not None: + shortcut = self.downsample(x) + + out = shortcut + residual + out = self.relu(out) + return out + + +def create_layer_basic(in_chan, out_chan, bnum, stride=1): + layers = [BasicBlock(in_chan, out_chan, stride=stride)] + for i in range(bnum-1): + layers.append(BasicBlock(out_chan, out_chan, stride=1)) + return nn.Sequential(*layers) + + +class Resnet18(nn.Module): + def __init__(self): + super(Resnet18, self).__init__() + self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, + bias=False) + self.bn1 = nn.BatchNorm2d(64) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.layer1 = create_layer_basic(64, 64, bnum=2, stride=1) + self.layer2 = create_layer_basic(64, 128, bnum=2, stride=2) + self.layer3 = create_layer_basic(128, 256, bnum=2, stride=2) + self.layer4 = create_layer_basic(256, 512, bnum=2, stride=2) + self.init_weight() + + def forward(self, x): + x = self.conv1(x) + x = F.relu(self.bn1(x)) + x = self.maxpool(x) + + x = self.layer1(x) + feat8 = self.layer2(x) # 1/8 + feat16 = self.layer3(feat8) # 1/16 + feat32 = self.layer4(feat16) # 1/32 + return feat8, feat16, feat32 + + def init_weight(self): + state_dict = modelzoo.load_url(resnet18_url) + self_state_dict = self.state_dict() + for k, v in state_dict.items(): + if 'fc' in k: continue + self_state_dict.update({k: v}) + self.load_state_dict(self_state_dict) + + def get_params(self): + wd_params, nowd_params = [], [] + for name, module in self.named_modules(): + if isinstance(module, (nn.Linear, nn.Conv2d)): + wd_params.append(module.weight) + if not module.bias is None: + nowd_params.append(module.bias) + elif isinstance(module, nn.BatchNorm2d): + nowd_params += list(module.parameters()) + return wd_params, nowd_params + + +if __name__ == "__main__": + net = Resnet18() + x = torch.randn(16, 3, 224, 224) + out = net(x) + print(out[0].size()) + print(out[1].size()) + print(out[2].size()) + net.get_params() diff --git a/motion-gan-pipeline/preprocessing/face_parsing/test.py b/motion-gan-pipeline/preprocessing/face_parsing/test.py new file mode 100644 index 0000000..de1b5d8 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_parsing/test.py @@ -0,0 +1,94 @@ +#!/usr/bin/python +# -*- encoding: utf-8 -*- +import numpy as np +from .logger import setup_logger +from .model import BiSeNet + +import torch + +import os +import os.path as osp + +from PIL import Image +import torchvision.transforms as transforms +import cv2 +from pathlib import Path +from tqdm import tqdm + + +def vis_parsing_maps(im, parsing_anno, stride, save_im=False, save_path='vis_results/parsing_map_on_im.jpg', + img_size=(512, 512)): + im = np.array(im) + vis_im = im.copy().astype(np.uint8) + vis_parsing_anno = parsing_anno.copy().astype(np.uint8) + vis_parsing_anno = cv2.resize( + vis_parsing_anno, None, fx=stride, fy=stride, interpolation=cv2.INTER_NEAREST) + vis_parsing_anno_color = np.zeros( + (vis_parsing_anno.shape[0], vis_parsing_anno.shape[1], 3)) + np.array([255, 255, 255]) # + 255 + + num_of_class = np.max(vis_parsing_anno) + # print(num_of_class) + for pi in range(1, 14): + index = np.where(vis_parsing_anno == pi) + vis_parsing_anno_color[index[0], index[1], :] = np.array([255, 0, 0]) + + for pi in range(14, 16): + index = np.where(vis_parsing_anno == pi) + vis_parsing_anno_color[index[0], index[1], :] = np.array([0, 255, 0]) + for pi in range(16, 17): + index = np.where(vis_parsing_anno == pi) + vis_parsing_anno_color[index[0], index[1], :] = np.array([0, 0, 255]) + for pi in range(17, num_of_class+1): + index = np.where(vis_parsing_anno == pi) + vis_parsing_anno_color[index[0], index[1], :] = np.array([255, 0, 0]) + + vis_parsing_anno_color = vis_parsing_anno_color.astype(np.uint8) + index = np.where(vis_parsing_anno == num_of_class-1) + vis_im = cv2.resize(vis_parsing_anno_color, img_size, + interpolation=cv2.INTER_NEAREST) + if save_im: + cv2.imwrite(save_path, vis_im) + + +def evaluate(respth='./res/test_res', dspth='./data', cp='model_final_diss.pth'): + + Path(respth).mkdir(parents=True, exist_ok=True) + + n_classes = 19 + net = BiSeNet(n_classes=n_classes) + net.cuda() + net.load_state_dict(torch.load(cp)) + net.eval() + + to_tensor = transforms.Compose([ + transforms.ToTensor(), + transforms.Normalize((0.485, 0.456, 0.406), (0.229, 0.224, 0.225)), + ]) + processed_num = 0 + with torch.no_grad(): + for image_path in tqdm(os.listdir(dspth)): + if image_path.endswith('.jpg') or image_path.endswith('.png'): + img = Image.open(osp.join(dspth, image_path)) + ori_size = img.size + image = img.resize((512, 512), Image.BILINEAR) + image = image.convert("RGB") + img = to_tensor(image) + img = torch.unsqueeze(img, 0) + img = img.cuda() + out = net(img)[0] + parsing = out.squeeze(0).cpu().numpy().argmax(0) + image_path = int(image_path[:-4]) + image_path = str(image_path) + '.png' + + vis_parsing_maps(image, parsing, stride=1, save_im=True, + save_path=osp.join(respth, image_path), img_size=ori_size) + processed_num = processed_num + 1 + # if processed_num % 100 == 0: + # print('processed parsing', processed_num) + + +def parse_face(outpath, framespath): + + modelpath='preprocessing/face_parsing/79999_iter.pth' + + evaluate(respth=outpath, dspth=framespath, cp=modelpath) diff --git a/motion-gan-pipeline/preprocessing/face_tracking/3DMM/01_MorphableModel.mat b/motion-gan-pipeline/preprocessing/face_tracking/3DMM/01_MorphableModel.mat new file mode 100644 index 0000000..f251485 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/3DMM/01_MorphableModel.mat @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:37b1f0742db356a3b1568a8365a06f5b0fe0ab687ac1c3068c803666cbd4d8e2 +size 240875364 diff --git a/motion-gan-pipeline/preprocessing/face_tracking/3DMM/3DMM_info.npy b/motion-gan-pipeline/preprocessing/face_tracking/3DMM/3DMM_info.npy new file mode 100644 index 0000000..5c27d59 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/3DMM/3DMM_info.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2c360b2dc7d8cf891bd468455451b6aa10eb762615a500b02da5079791bfc9be +size 199586817 diff --git a/motion-gan-pipeline/preprocessing/face_tracking/3DMM/exp_info.npy b/motion-gan-pipeline/preprocessing/face_tracking/3DMM/exp_info.npy new file mode 100644 index 0000000..31a84bc --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/3DMM/exp_info.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c3196029ed038eb9a461df6c782125fc4d1ec1545f2e5f361891471136b6cbb6 +size 33264853 diff --git a/motion-gan-pipeline/preprocessing/face_tracking/3DMM/keys_info.npy b/motion-gan-pipeline/preprocessing/face_tracking/3DMM/keys_info.npy new file mode 100644 index 0000000..1be7e71 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/3DMM/keys_info.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:028d3c383bae129c4bdcac880e22c551a5d2436ec7db7a26f5c57148d12469e6 +size 7375 diff --git a/motion-gan-pipeline/preprocessing/face_tracking/3DMM/sub_mesh.obj b/motion-gan-pipeline/preprocessing/face_tracking/3DMM/sub_mesh.obj new file mode 100644 index 0000000..cebb6be --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/3DMM/sub_mesh.obj @@ -0,0 +1,103396 @@ +v -0.573122 0.352377 0.812774 +v -0.572659 0.349678 0.813056 +v -0.572158 0.346983 0.81332 +v -0.571678 0.343587 0.813224 +v -0.57113 0.340195 0.813099 +v -0.570474 0.33619 0.81276 +v -0.569748 0.332201 0.812392 +v -0.569059 0.32794 0.811738 +v -0.568278 0.32371 0.811034 +v -0.567402 0.319212 0.810274 +v -0.566446 0.314741 0.809465 +v -0.565382 0.310056 0.808479 +v -0.564235 0.305402 0.807448 +v -0.562943 0.300554 0.806399 +v -0.561593 0.295734 0.805312 +v -0.560277 0.290874 0.804095 +v -0.558954 0.286047 0.802874 +v -0.557431 0.281129 0.801712 +v -0.555884 0.276214 0.800559 +v -0.554336 0.271239 0.799341 +v -0.552761 0.26631 0.79812 +v -0.550788 0.261428 0.797319 +v -0.548829 0.256546 0.796529 +v -0.547686 0.251802 0.79506 +v -0.546603 0.247035 0.793648 +v -0.547062 0.242245 0.790915 +v -0.547628 0.237398 0.788303 +v -0.54871 0.232352 0.785612 +v -0.549935 0.227263 0.783073 +v -0.551272 0.221861 0.780926 +v -0.552785 0.216424 0.778986 +v -0.554675 0.210518 0.777243 +v -0.556726 0.204621 0.775693 +v -0.558964 0.198414 0.774479 +v -0.561359 0.192234 0.773444 +v -0.563743 0.185866 0.772755 +v -0.566233 0.179529 0.772172 +v -0.568637 0.172974 0.771869 +v -0.571139 0.166449 0.771666 +v -0.573818 0.159762 0.77145 +v -0.576552 0.153108 0.771294 +v -0.579245 0.146226 0.771207 +v -0.58197 0.139387 0.771177 +v -0.584798 0.132236 0.771044 +v -0.58761 0.125083 0.770903 +v -0.59025 0.117732 0.770823 +v -0.592803 0.110369 0.77068 +v -0.595321 0.102716 0.770434 +v -0.597767 0.0950546 0.770152 +v -0.600173 0.0871051 0.769817 +v -0.602402 0.0791149 0.769366 +v -0.604489 0.0710618 0.768897 +v -0.606467 0.0629812 0.768355 +v -0.608283 0.054659 0.767727 +v -0.609899 0.046316 0.766984 +v -0.611463 0.0379287 0.766181 +v -0.612709 0.0294982 0.765187 +v -0.613953 0.020921 0.764174 +v -0.614931 0.0123513 0.763053 +v -0.615941 0.00375045 0.761859 +v -0.616594 -0.00485023 0.760489 +v -0.617311 -0.0134459 0.759033 +v -0.61773 -0.0220667 0.757371 +v -0.618094 -0.0307328 0.755725 +v -0.618273 -0.0393992 0.753988 +v -0.618415 -0.0480887 0.752317 +v -0.618289 -0.0567499 0.750524 +v -0.618158 -0.0654608 0.748775 +v -0.617794 -0.0741405 0.746925 +v -0.61738 -0.0828093 0.745178 +v -0.616812 -0.0914479 0.743369 +v -0.616221 -0.10003 0.741605 +v -0.61549 -0.108602 0.739756 +v -0.614754 -0.117146 0.737974 +v -0.613835 -0.125653 0.736114 +v -0.612953 -0.13404 0.734337 +v -0.611987 -0.142407 0.732528 +v -0.61105 -0.150596 0.730794 +v -0.609966 -0.1588 0.72896 +v -0.608833 -0.166908 0.727125 +v -0.607759 -0.175036 0.725314 +v -0.606717 -0.183034 0.723479 +v -0.605584 -0.191009 0.72162 +v -0.60446 -0.198835 0.71988 +v -0.603299 -0.20665 0.718119 +v -0.602209 -0.214247 0.716388 +v -0.601088 -0.221836 0.714654 +v -0.599989 -0.229255 0.712967 +v -0.598881 -0.236668 0.711274 +v -0.597741 -0.243936 0.709694 +v -0.596621 -0.251203 0.708131 +v -0.595509 -0.258364 0.70662 +v -0.594444 -0.265544 0.705121 +v -0.593347 -0.272587 0.703702 +v -0.592293 -0.279636 0.702299 +v -0.591242 -0.286539 0.70091 +v -0.590226 -0.293465 0.699568 +v -0.589138 -0.300222 0.698391 +v -0.588087 -0.306995 0.697242 +v -0.587001 -0.313535 0.696223 +v -0.585891 -0.320083 0.695185 +v -0.58478 -0.326454 0.694289 +v -0.583697 -0.332836 0.693405 +v -0.582493 -0.339102 0.692717 +v -0.581312 -0.345366 0.69204 +v -0.580108 -0.351373 0.691554 +v -0.578889 -0.357369 0.691054 +v -0.577669 -0.363304 0.690599 +v -0.576451 -0.36924 0.690133 +v -0.575152 -0.375058 0.689755 +v -0.573878 -0.380878 0.689411 +v -0.572537 -0.38655 0.68918 +v -0.571197 -0.392227 0.688946 +v -0.56985 -0.397613 0.688811 +v -0.56851 -0.403001 0.688667 +v -0.567106 -0.408094 0.688673 +v -0.565746 -0.413197 0.688709 +v -0.564315 -0.417961 0.688806 +v -0.562902 -0.422728 0.688921 +v -0.56149 -0.427175 0.689135 +v -0.560065 -0.431608 0.689344 +v -0.558612 -0.435584 0.689774 +v -0.557189 -0.439567 0.690225 +v -0.555706 -0.443431 0.690669 +v -0.55426 -0.447311 0.691133 +v -0.553135 -0.450369 0.691414 +v -0.552031 -0.453424 0.691697 +v -0.551398 -0.455613 0.691442 +v -0.550803 -0.457807 0.691205 +v -0.572328 0.353562 0.814889 +v -0.571452 0.35027 0.815873 +v -0.570501 0.346979 0.816822 +v -0.569731 0.34325 0.817174 +v -0.568835 0.339538 0.817461 +v -0.568008 0.335476 0.817314 +v -0.567093 0.331433 0.81712 +v -0.566122 0.327042 0.816708 +v -0.565062 0.322704 0.816244 +v -0.563969 0.318175 0.815592 +v -0.562791 0.313684 0.814888 +v -0.561493 0.308982 0.81401 +v -0.560126 0.304305 0.813086 +v -0.558615 0.299453 0.812091 +v -0.557053 0.294629 0.811062 +v -0.555484 0.289779 0.809922 +v -0.55389 0.284955 0.808766 +v -0.552205 0.280134 0.807636 +v -0.550503 0.275325 0.806489 +v -0.548703 0.270444 0.805362 +v -0.54688 0.265577 0.804239 +v -0.544878 0.26081 0.803289 +v -0.542899 0.256034 0.802362 +v -0.541549 0.251387 0.800951 +v -0.540229 0.246733 0.799573 +v -0.540504 0.24202 0.796897 +v -0.540887 0.237253 0.794348 +v -0.54188 0.232146 0.791609 +v -0.543037 0.226994 0.789046 +v -0.544361 0.221556 0.78686 +v -0.545877 0.216089 0.784902 +v -0.547674 0.210217 0.783281 +v -0.549644 0.204339 0.781878 +v -0.551888 0.198117 0.780787 +v -0.554266 0.191916 0.779852 +v -0.556644 0.185545 0.779352 +v -0.559107 0.179199 0.778949 +v -0.561602 0.17266 0.778806 +v -0.564165 0.166131 0.778729 +v -0.566884 0.159452 0.778759 +v -0.569623 0.152781 0.778813 +v -0.572414 0.145855 0.778958 +v -0.575224 0.138939 0.779123 +v -0.578079 0.131832 0.779254 +v -0.580898 0.124723 0.779351 +v -0.583628 0.117406 0.77944 +v -0.586291 0.110064 0.779477 +v -0.588894 0.102432 0.77949 +v -0.591364 0.0947802 0.779435 +v -0.593828 0.0868665 0.779314 +v -0.596114 0.0788826 0.779045 +v -0.598297 0.0707856 0.778753 +v -0.60034 0.0626516 0.778353 +v -0.602295 0.0543624 0.777879 +v -0.603972 0.0460295 0.777229 +v -0.605613 0.037596 0.776579 +v -0.60696 0.0291255 0.775759 +v -0.608295 0.0205615 0.774882 +v -0.60939 0.0119865 0.773879 +v -0.610431 0.00334361 0.772792 +v -0.611184 -0.00529986 0.771555 +v -0.611871 -0.0139447 0.770155 +v -0.612239 -0.0225982 0.76855 +v -0.612711 -0.0313061 0.767008 +v -0.612937 -0.0400013 0.765336 +v -0.613136 -0.0487446 0.763724 +v -0.612999 -0.057441 0.761944 +v -0.61287 -0.0661884 0.760253 +v -0.612532 -0.0749082 0.758459 +v -0.612125 -0.0836019 0.756663 +v -0.611626 -0.0922965 0.754776 +v -0.611157 -0.100949 0.752997 +v -0.610419 -0.109545 0.7511 +v -0.609675 -0.118079 0.749266 +v -0.608874 -0.126587 0.747431 +v -0.608129 -0.135034 0.745638 +v -0.607231 -0.143454 0.743781 +v -0.606326 -0.151667 0.742031 +v -0.605265 -0.159888 0.740173 +v -0.604242 -0.168043 0.738337 +v -0.603153 -0.176176 0.736485 +v -0.602091 -0.184182 0.734586 +v -0.600986 -0.192172 0.732661 +v -0.599889 -0.200017 0.730861 +v -0.598682 -0.207839 0.729016 +v -0.597593 -0.215463 0.727223 +v -0.596526 -0.223093 0.725431 +v -0.595442 -0.230546 0.723664 +v -0.59434 -0.237997 0.721885 +v -0.593221 -0.24529 0.72024 +v -0.59215 -0.252601 0.718616 +v -0.591019 -0.259781 0.717 +v -0.589975 -0.266976 0.71543 +v -0.588917 -0.274012 0.713913 +v -0.587851 -0.281052 0.712384 +v -0.586824 -0.287981 0.710951 +v -0.585805 -0.294917 0.709519 +v -0.584679 -0.301636 0.708188 +v -0.583562 -0.308373 0.70693 +v -0.582498 -0.314972 0.705776 +v -0.581425 -0.321571 0.70464 +v -0.580298 -0.327928 0.703638 +v -0.579176 -0.334283 0.702631 +v -0.578067 -0.340534 0.701811 +v -0.576915 -0.346777 0.700973 +v -0.575727 -0.352849 0.700275 +v -0.57457 -0.358918 0.699586 +v -0.573383 -0.364831 0.698971 +v -0.57218 -0.37073 0.698345 +v -0.570978 -0.376529 0.697816 +v -0.569775 -0.382329 0.697285 +v -0.568526 -0.387983 0.69686 +v -0.567258 -0.393638 0.696432 +v -0.566014 -0.398993 0.696095 +v -0.56472 -0.404337 0.695727 +v -0.563434 -0.40942 0.695536 +v -0.562094 -0.414492 0.695312 +v -0.560807 -0.419267 0.695193 +v -0.559486 -0.424048 0.695054 +v -0.558165 -0.4285 0.695077 +v -0.556775 -0.432951 0.695067 +v -0.555494 -0.436931 0.695222 +v -0.55416 -0.440905 0.695348 +v -0.5529 -0.444659 0.695522 +v -0.551603 -0.448405 0.695654 +v -0.550722 -0.451542 0.695371 +v -0.549865 -0.454691 0.695092 +v -0.549548 -0.457093 0.694175 +v -0.549216 -0.459488 0.693258 +v -0.57148 0.354754 0.816984 +v -0.570207 0.350859 0.818678 +v -0.568801 0.346978 0.820308 +v -0.567702 0.342926 0.821084 +v -0.566464 0.338898 0.821788 +v -0.565449 0.334786 0.821816 +v -0.564333 0.330688 0.821795 +v -0.563081 0.326188 0.821621 +v -0.561731 0.321738 0.821387 +v -0.560397 0.317194 0.820833 +v -0.559009 0.312679 0.820236 +v -0.557482 0.307953 0.819461 +v -0.555882 0.303261 0.818643 +v -0.554161 0.298408 0.817701 +v -0.552399 0.293587 0.81673 +v -0.550595 0.288736 0.815676 +v -0.548765 0.283903 0.814602 +v -0.546893 0.279195 0.813483 +v -0.545032 0.27449 0.812373 +v -0.542981 0.269687 0.811322 +v -0.540901 0.264901 0.810272 +v -0.538865 0.260253 0.809162 +v -0.536836 0.255604 0.808059 +v -0.535289 0.251046 0.806716 +v -0.533727 0.246507 0.805361 +v -0.533842 0.241846 0.802768 +v -0.534007 0.237166 0.800232 +v -0.534976 0.231979 0.797516 +v -0.536091 0.226744 0.794968 +v -0.537406 0.221275 0.792765 +v -0.538922 0.215766 0.79078 +v -0.540635 0.209927 0.789293 +v -0.542555 0.204074 0.788061 +v -0.544783 0.197823 0.787062 +v -0.547176 0.191597 0.786243 +v -0.549526 0.185219 0.785916 +v -0.551963 0.178862 0.785685 +v -0.554537 0.172325 0.785698 +v -0.557183 0.165815 0.78579 +v -0.559889 0.159115 0.786018 +v -0.562656 0.152437 0.786304 +v -0.565515 0.145446 0.786646 +v -0.568411 0.138483 0.787014 +v -0.571229 0.131401 0.787345 +v -0.574061 0.124331 0.787697 +v -0.576867 0.117031 0.787944 +v -0.579603 0.109709 0.788134 +v -0.582232 0.102076 0.788372 +v -0.584793 0.0944388 0.788586 +v -0.587262 0.0865636 0.788651 +v -0.589606 0.0786012 0.788578 +v -0.591823 0.0704535 0.788421 +v -0.593947 0.0622774 0.788184 +v -0.595958 0.0540016 0.787802 +v -0.59775 0.045704 0.787295 +v -0.599379 0.0372529 0.786767 +v -0.600798 0.0287433 0.786104 +v -0.602211 0.0201688 0.785317 +v -0.603315 0.0115877 0.784365 +v -0.604368 0.00292718 0.783396 +v -0.605127 -0.00573814 0.782268 +v -0.605895 -0.0144143 0.780999 +v -0.606327 -0.0231087 0.779495 +v -0.606748 -0.0318407 0.777988 +v -0.60698 -0.0405617 0.776376 +v -0.607112 -0.0493036 0.774774 +v -0.607061 -0.0580362 0.773059 +v -0.606914 -0.0668188 0.771403 +v -0.606614 -0.0756154 0.769603 +v -0.606336 -0.0843581 0.767783 +v -0.605854 -0.0930631 0.76587 +v -0.605304 -0.101719 0.764023 +v -0.604653 -0.110361 0.762142 +v -0.604014 -0.118898 0.760305 +v -0.603273 -0.127414 0.758429 +v -0.602545 -0.135914 0.756563 +v -0.601716 -0.144384 0.754647 +v -0.60082 -0.152645 0.752858 +v -0.599802 -0.160904 0.750985 +v -0.598806 -0.169052 0.749143 +v -0.597772 -0.177196 0.747289 +v -0.596768 -0.185203 0.745353 +v -0.595745 -0.193208 0.743406 +v -0.594712 -0.20106 0.741594 +v -0.593562 -0.208902 0.739701 +v -0.592437 -0.216543 0.737825 +v -0.591388 -0.224204 0.735981 +v -0.590376 -0.231712 0.734155 +v -0.589281 -0.239195 0.732293 +v -0.588231 -0.246535 0.730597 +v -0.587117 -0.253864 0.728864 +v -0.586005 -0.261057 0.727163 +v -0.584948 -0.268265 0.725489 +v -0.583908 -0.275304 0.723866 +v -0.58291 -0.282355 0.722258 +v -0.581818 -0.289294 0.720729 +v -0.580783 -0.296248 0.71923 +v -0.579742 -0.302973 0.717854 +v -0.578714 -0.309695 0.716486 +v -0.577626 -0.316323 0.715191 +v -0.576598 -0.322983 0.71397 +v -0.575528 -0.329331 0.712881 +v -0.574471 -0.335689 0.711792 +v -0.573359 -0.341907 0.710774 +v -0.572277 -0.34813 0.709789 +v -0.57114 -0.354269 0.70889 +v -0.570041 -0.360415 0.708016 +v -0.568903 -0.366309 0.707246 +v -0.567792 -0.3722 0.706495 +v -0.566631 -0.377974 0.705776 +v -0.565529 -0.383763 0.705084 +v -0.564337 -0.38939 0.704459 +v -0.563198 -0.395032 0.703859 +v -0.561995 -0.40035 0.703297 +v -0.560829 -0.405679 0.702749 +v -0.559615 -0.410734 0.702321 +v -0.558414 -0.415796 0.701902 +v -0.557209 -0.420573 0.701524 +v -0.556055 -0.425359 0.701177 +v -0.554777 -0.429833 0.701003 +v -0.553561 -0.434327 0.700854 +v -0.552381 -0.438291 0.700684 +v -0.551239 -0.442269 0.700526 +v -0.550109 -0.44588 0.700368 +v -0.549062 -0.449523 0.700253 +v -0.548401 -0.452743 0.699391 +v -0.547813 -0.455994 0.698566 +v -0.547713 -0.458578 0.696941 +v -0.547685 -0.461189 0.695347 +v -0.570355 0.356731 0.819883 +v -0.568875 0.352296 0.821961 +v -0.567174 0.347897 0.823938 +v -0.565843 0.34356 0.82494 +v -0.564368 0.339249 0.825872 +v -0.563034 0.334788 0.826207 +v -0.561562 0.330359 0.826464 +v -0.560091 0.325769 0.826377 +v -0.558533 0.321228 0.826229 +v -0.556968 0.316567 0.825795 +v -0.555337 0.311937 0.82532 +v -0.553531 0.307128 0.824639 +v -0.551655 0.302355 0.823911 +v -0.549752 0.297559 0.823006 +v -0.547824 0.292795 0.822098 +v -0.545791 0.288006 0.821112 +v -0.543734 0.283233 0.820105 +v -0.541704 0.278528 0.818973 +v -0.539663 0.273839 0.817841 +v -0.537499 0.269139 0.816772 +v -0.535294 0.264467 0.815686 +v -0.53287 0.259928 0.814797 +v -0.530447 0.255393 0.813919 +v -0.52842 0.250968 0.812761 +v -0.526422 0.246519 0.811637 +v -0.526714 0.241894 0.808818 +v -0.527093 0.237216 0.806096 +v -0.528139 0.2321 0.803244 +v -0.529347 0.226916 0.800582 +v -0.530539 0.221391 0.79855 +v -0.531895 0.215845 0.796713 +v -0.533626 0.209948 0.79527 +v -0.535535 0.20405 0.794036 +v -0.537712 0.197844 0.793217 +v -0.540027 0.191655 0.792557 +v -0.54247 0.1852 0.792254 +v -0.544995 0.17877 0.79204 +v -0.547608 0.172187 0.792193 +v -0.550271 0.165638 0.792425 +v -0.553003 0.158899 0.79287 +v -0.555755 0.152171 0.793322 +v -0.558646 0.145211 0.793846 +v -0.561538 0.138257 0.794375 +v -0.564407 0.131186 0.794925 +v -0.567275 0.12411 0.795474 +v -0.5701 0.116791 0.795986 +v -0.57285 0.109438 0.796438 +v -0.575541 0.101844 0.796911 +v -0.57814 0.0942131 0.797304 +v -0.580685 0.0863308 0.797569 +v -0.583066 0.078395 0.797705 +v -0.585359 0.070238 0.797733 +v -0.587503 0.0620407 0.797639 +v -0.58956 0.0537913 0.797466 +v -0.591359 0.0454979 0.797121 +v -0.593065 0.0370139 0.796715 +v -0.594595 0.0284454 0.79617 +v -0.596034 0.0198298 0.795545 +v -0.597209 0.0111932 0.794758 +v -0.59828 0.00252709 0.793905 +v -0.599083 -0.00614844 0.792891 +v -0.599857 -0.0148804 0.791671 +v -0.600369 -0.02362 0.790287 +v -0.60081 -0.0323809 0.78889 +v -0.60102 -0.0411336 0.78736 +v -0.60124 -0.0498991 0.78582 +v -0.601223 -0.0587 0.784079 +v -0.601133 -0.067533 0.782361 +v -0.600844 -0.0763288 0.780533 +v -0.600576 -0.0851147 0.778746 +v -0.600054 -0.0938575 0.776834 +v -0.599639 -0.102558 0.775041 +v -0.599036 -0.111222 0.773142 +v -0.59837 -0.119776 0.77126 +v -0.597642 -0.128317 0.769346 +v -0.596948 -0.136817 0.767475 +v -0.596149 -0.145291 0.765553 +v -0.59531 -0.153613 0.763704 +v -0.594367 -0.161916 0.761784 +v -0.593453 -0.170095 0.75991 +v -0.592479 -0.178254 0.757993 +v -0.591562 -0.18633 0.756075 +v -0.590547 -0.194362 0.754118 +v -0.589508 -0.202209 0.752246 +v -0.588415 -0.210053 0.750321 +v -0.587435 -0.217736 0.748457 +v -0.586343 -0.225415 0.746555 +v -0.585298 -0.232937 0.744686 +v -0.58419 -0.24044 0.742784 +v -0.583116 -0.247827 0.740958 +v -0.582044 -0.255214 0.739141 +v -0.581008 -0.26244 0.737395 +v -0.579942 -0.269659 0.735634 +v -0.578918 -0.27671 0.733918 +v -0.577918 -0.283778 0.732212 +v -0.576883 -0.290724 0.730606 +v -0.575801 -0.297655 0.728984 +v -0.57478 -0.304424 0.727483 +v -0.573799 -0.31121 0.72601 +v -0.57278 -0.317836 0.724615 +v -0.571799 -0.32448 0.723253 +v -0.570717 -0.330857 0.722024 +v -0.569617 -0.337233 0.720795 +v -0.568585 -0.343498 0.719671 +v -0.56754 -0.34976 0.718541 +v -0.566499 -0.355869 0.717491 +v -0.565397 -0.361966 0.716418 +v -0.564313 -0.367923 0.715493 +v -0.563269 -0.373882 0.714583 +v -0.562232 -0.379666 0.713725 +v -0.561125 -0.385419 0.712823 +v -0.560006 -0.391026 0.712007 +v -0.558907 -0.396649 0.711196 +v -0.557859 -0.401984 0.710489 +v -0.556789 -0.40731 0.709743 +v -0.555767 -0.41239 0.70914 +v -0.554615 -0.417437 0.708469 +v -0.553512 -0.422239 0.70787 +v -0.552382 -0.427005 0.707282 +v -0.551316 -0.431491 0.706798 +v -0.550164 -0.435955 0.706275 +v -0.549143 -0.439919 0.705827 +v -0.548136 -0.443906 0.705378 +v -0.54725 -0.447625 0.704844 +v -0.546358 -0.451341 0.704306 +v -0.54588 -0.45466 0.703148 +v -0.545389 -0.457967 0.701987 +v -0.545459 -0.460754 0.699956 +v -0.545514 -0.463536 0.697916 +v -0.569264 0.358701 0.822794 +v -0.56754 0.353739 0.825243 +v -0.565516 0.348826 0.827553 +v -0.563972 0.344191 0.828797 +v -0.56225 0.339604 0.829946 +v -0.560576 0.334798 0.830568 +v -0.558772 0.330031 0.831121 +v -0.557089 0.325356 0.831116 +v -0.55531 0.320731 0.831051 +v -0.553481 0.315968 0.83072 +v -0.551591 0.311229 0.83035 +v -0.549506 0.306336 0.829761 +v -0.547361 0.301482 0.829134 +v -0.545269 0.29675 0.828264 +v -0.543164 0.29206 0.827394 +v -0.540914 0.287326 0.826479 +v -0.538653 0.282609 0.825544 +v -0.536461 0.2779 0.824402 +v -0.534206 0.273247 0.823231 +v -0.531923 0.268663 0.822139 +v -0.529592 0.264124 0.821013 +v -0.526784 0.259688 0.820348 +v -0.523925 0.255286 0.819635 +v -0.521469 0.250961 0.81871 +v -0.518999 0.246641 0.817775 +v -0.519537 0.241995 0.814795 +v -0.520089 0.237342 0.811841 +v -0.521271 0.232243 0.808921 +v -0.522564 0.227108 0.806143 +v -0.523634 0.221523 0.80429 +v -0.524875 0.215923 0.802631 +v -0.52663 0.209982 0.801255 +v -0.528576 0.20403 0.800075 +v -0.530677 0.197862 0.799401 +v -0.532896 0.191717 0.798866 +v -0.535428 0.185185 0.798582 +v -0.538051 0.178675 0.798387 +v -0.540666 0.172044 0.798676 +v -0.543349 0.165461 0.799043 +v -0.54606 0.158664 0.799641 +v -0.548794 0.151881 0.80027 +v -0.551673 0.144925 0.800943 +v -0.554578 0.137993 0.801655 +v -0.557497 0.130921 0.802415 +v -0.560418 0.123859 0.803185 +v -0.563222 0.116503 0.803934 +v -0.566011 0.10915 0.804684 +v -0.568723 0.101565 0.805337 +v -0.571367 0.0939534 0.805938 +v -0.573939 0.0860512 0.806362 +v -0.57638 0.0781068 0.80669 +v -0.578672 0.0699936 0.806905 +v -0.580871 0.0618148 0.807007 +v -0.582902 0.0535524 0.806968 +v -0.584777 0.0452606 0.80682 +v -0.586466 0.0367337 0.806471 +v -0.588066 0.0281142 0.806035 +v -0.589453 0.0194544 0.805518 +v -0.590658 0.0107779 0.80488 +v -0.591751 0.00211228 0.804148 +v -0.592571 -0.006556 0.803235 +v -0.593317 -0.0153379 0.802046 +v -0.593891 -0.0241103 0.800752 +v -0.594326 -0.0329094 0.79943 +v -0.594548 -0.0416663 0.798005 +v -0.594739 -0.0504304 0.796492 +v -0.59475 -0.0593101 0.794681 +v -0.594756 -0.0681514 0.792982 +v -0.594563 -0.076965 0.79118 +v -0.594259 -0.0857833 0.789401 +v -0.593859 -0.0945826 0.787569 +v -0.593394 -0.103283 0.78573 +v -0.592853 -0.111962 0.78385 +v -0.59228 -0.120559 0.781989 +v -0.591601 -0.129134 0.780071 +v -0.590902 -0.137625 0.77816 +v -0.590196 -0.146115 0.776258 +v -0.58933 -0.154468 0.77431 +v -0.588445 -0.162805 0.772357 +v -0.58758 -0.171002 0.770415 +v -0.58664 -0.179148 0.768455 +v -0.585697 -0.187255 0.766507 +v -0.584727 -0.195351 0.764551 +v -0.583758 -0.203234 0.762639 +v -0.582733 -0.211098 0.760692 +v -0.581746 -0.218781 0.758768 +v -0.580731 -0.226475 0.75684 +v -0.579703 -0.234028 0.754972 +v -0.578636 -0.241568 0.753081 +v -0.577604 -0.249011 0.751164 +v -0.576595 -0.256455 0.749261 +v -0.575528 -0.263692 0.747424 +v -0.574519 -0.270934 0.745619 +v -0.573499 -0.278008 0.743783 +v -0.57256 -0.285098 0.742002 +v -0.571566 -0.292046 0.7403 +v -0.570588 -0.299004 0.738625 +v -0.569582 -0.305826 0.737007 +v -0.568581 -0.312654 0.735403 +v -0.567594 -0.31928 0.733883 +v -0.566671 -0.325922 0.732406 +v -0.565674 -0.332333 0.731061 +v -0.564691 -0.338755 0.729767 +v -0.563641 -0.345049 0.728478 +v -0.562645 -0.351342 0.727231 +v -0.561661 -0.357419 0.726015 +v -0.560697 -0.363498 0.724814 +v -0.55965 -0.36952 0.723721 +v -0.558651 -0.375551 0.72265 +v -0.55764 -0.381307 0.721596 +v -0.55667 -0.387073 0.720558 +v -0.555613 -0.392667 0.71955 +v -0.554685 -0.398295 0.718594 +v -0.553709 -0.403626 0.717668 +v -0.552717 -0.408963 0.71672 +v -0.551697 -0.414003 0.715846 +v -0.550706 -0.419046 0.714978 +v -0.549709 -0.423855 0.714183 +v -0.548758 -0.428659 0.713428 +v -0.547795 -0.43313 0.712586 +v -0.546892 -0.437617 0.711779 +v -0.546017 -0.441594 0.711025 +v -0.545202 -0.445587 0.710298 +v -0.544437 -0.449378 0.709334 +v -0.543727 -0.453178 0.708389 +v -0.543347 -0.456561 0.706909 +v -0.542993 -0.459954 0.705424 +v -0.543198 -0.462927 0.702981 +v -0.543479 -0.465928 0.700561 +v -0.567898 0.360977 0.826266 +v -0.566036 0.355649 0.828899 +v -0.563851 0.350367 0.831386 +v -0.561963 0.345257 0.832972 +v -0.559877 0.34019 0.834456 +v -0.558036 0.335225 0.835161 +v -0.556036 0.330312 0.83577 +v -0.554067 0.325371 0.835909 +v -0.551985 0.320477 0.835983 +v -0.549877 0.315583 0.835698 +v -0.547691 0.310726 0.835358 +v -0.545432 0.305822 0.83475 +v -0.543124 0.300961 0.834097 +v -0.540788 0.296194 0.833291 +v -0.538425 0.291468 0.832463 +v -0.536029 0.286778 0.831544 +v -0.533626 0.282094 0.830618 +v -0.531164 0.277483 0.829595 +v -0.528676 0.272905 0.82857 +v -0.526094 0.268487 0.827537 +v -0.523491 0.264107 0.82648 +v -0.520412 0.259834 0.825799 +v -0.517326 0.255565 0.825115 +v -0.515 0.251304 0.823966 +v -0.512683 0.247025 0.822846 +v -0.512814 0.24248 0.820154 +v -0.513027 0.23789 0.817571 +v -0.514212 0.232692 0.814638 +v -0.515562 0.227424 0.811903 +v -0.516765 0.221862 0.809995 +v -0.518156 0.216259 0.8083 +v -0.519972 0.210331 0.80702 +v -0.521918 0.204407 0.805896 +v -0.523984 0.198175 0.805276 +v -0.526178 0.19196 0.804816 +v -0.528587 0.18533 0.804711 +v -0.531093 0.17873 0.804714 +v -0.533693 0.172007 0.805085 +v -0.536347 0.165333 0.805536 +v -0.539036 0.15853 0.806221 +v -0.541762 0.151743 0.806941 +v -0.544653 0.14475 0.807764 +v -0.547569 0.137784 0.808633 +v -0.55049 0.130703 0.809574 +v -0.553403 0.123617 0.810511 +v -0.556261 0.116254 0.811478 +v -0.559089 0.108896 0.812441 +v -0.561798 0.101364 0.813312 +v -0.564444 0.0937972 0.814118 +v -0.567027 0.0859295 0.814806 +v -0.569496 0.0780255 0.8154 +v -0.57178 0.0699021 0.815819 +v -0.573963 0.0617093 0.816111 +v -0.576057 0.0533902 0.816227 +v -0.577966 0.0450357 0.816208 +v -0.579735 0.0365041 0.816089 +v -0.581299 0.0278708 0.815777 +v -0.58274 0.0191967 0.81538 +v -0.583998 0.0104995 0.814842 +v -0.585108 0.0017618 0.814149 +v -0.585976 -0.00699421 0.813281 +v -0.586766 -0.0157705 0.812255 +v -0.587321 -0.024525 0.8111 +v -0.587803 -0.0333215 0.809903 +v -0.588017 -0.0420896 0.808533 +v -0.58832 -0.050931 0.807039 +v -0.588369 -0.0598504 0.805249 +v -0.588339 -0.0687587 0.803568 +v -0.588198 -0.0776304 0.801847 +v -0.587987 -0.0864624 0.800071 +v -0.587554 -0.0952485 0.798178 +v -0.587173 -0.104001 0.796412 +v -0.586627 -0.112719 0.794565 +v -0.586043 -0.121346 0.792646 +v -0.585413 -0.129969 0.790711 +v -0.584856 -0.138495 0.788773 +v -0.584131 -0.146994 0.786749 +v -0.583278 -0.155385 0.784855 +v -0.582386 -0.163729 0.782941 +v -0.581531 -0.171956 0.780988 +v -0.580594 -0.180162 0.779003 +v -0.579768 -0.188312 0.77706 +v -0.578871 -0.196445 0.775068 +v -0.577979 -0.204354 0.773118 +v -0.577003 -0.212249 0.771128 +v -0.576045 -0.219997 0.769181 +v -0.575046 -0.227735 0.767212 +v -0.574018 -0.235303 0.765234 +v -0.572958 -0.242874 0.763233 +v -0.571998 -0.250349 0.761285 +v -0.571128 -0.257847 0.759379 +v -0.570159 -0.265124 0.757499 +v -0.569124 -0.272389 0.755614 +v -0.568155 -0.279483 0.753715 +v -0.56719 -0.286571 0.751817 +v -0.566179 -0.293584 0.750055 +v -0.56516 -0.300585 0.748265 +v -0.564223 -0.307411 0.74657 +v -0.563294 -0.314248 0.744904 +v -0.56236 -0.320935 0.743277 +v -0.561406 -0.327609 0.741642 +v -0.560447 -0.334028 0.740204 +v -0.559451 -0.340435 0.738741 +v -0.558488 -0.346766 0.737302 +v -0.557551 -0.353086 0.735888 +v -0.556635 -0.359185 0.734536 +v -0.555724 -0.365288 0.733179 +v -0.554831 -0.3713 0.731854 +v -0.553912 -0.377297 0.730525 +v -0.553037 -0.383078 0.729349 +v -0.552089 -0.388843 0.728125 +v -0.551158 -0.394432 0.72697 +v -0.550236 -0.400036 0.725809 +v -0.549332 -0.405439 0.724675 +v -0.548438 -0.41083 0.723544 +v -0.547606 -0.415891 0.722487 +v -0.546713 -0.42095 0.721399 +v -0.545877 -0.425755 0.720343 +v -0.544994 -0.430535 0.719269 +v -0.544201 -0.435034 0.718193 +v -0.543377 -0.439519 0.717096 +v -0.542692 -0.443622 0.716059 +v -0.541969 -0.447706 0.715007 +v -0.54139 -0.451555 0.713842 +v -0.540756 -0.455404 0.71264 +v -0.540534 -0.458946 0.710774 +v -0.540266 -0.462466 0.708898 +v -0.540596 -0.465636 0.706244 +v -0.540859 -0.46879 0.703554 +v -0.566515 0.363254 0.829727 +v -0.564506 0.357561 0.83255 +v -0.56216 0.351918 0.835204 +v -0.55994 0.346325 0.837145 +v -0.557459 0.340792 0.838944 +v -0.555439 0.335669 0.83972 +v -0.553258 0.330603 0.840401 +v -0.550959 0.325409 0.84066 +v -0.548563 0.320262 0.840854 +v -0.546173 0.315247 0.840606 +v -0.54374 0.310254 0.840328 +v -0.541305 0.305343 0.839692 +v -0.538858 0.300456 0.839033 +v -0.536246 0.295677 0.838262 +v -0.53362 0.29093 0.837468 +v -0.53109 0.286268 0.836556 +v -0.528537 0.28162 0.835622 +v -0.525803 0.277115 0.834726 +v -0.523022 0.272661 0.833792 +v -0.520181 0.268395 0.832843 +v -0.517259 0.264201 0.831808 +v -0.513971 0.260058 0.831161 +v -0.510642 0.255937 0.830501 +v -0.50847 0.251707 0.829157 +v -0.506313 0.247492 0.827824 +v -0.506062 0.243024 0.82544 +v -0.50586 0.238518 0.823156 +v -0.507106 0.233175 0.820271 +v -0.508466 0.227782 0.817548 +v -0.509875 0.222205 0.815657 +v -0.511464 0.216612 0.813988 +v -0.513298 0.210699 0.812775 +v -0.515278 0.204779 0.811725 +v -0.517335 0.198488 0.811197 +v -0.519485 0.19221 0.81079 +v -0.521761 0.18548 0.810849 +v -0.524136 0.178786 0.811014 +v -0.526712 0.171969 0.811485 +v -0.529338 0.165202 0.81201 +v -0.532 0.158379 0.812765 +v -0.534728 0.151594 0.813592 +v -0.537595 0.144553 0.814541 +v -0.540511 0.13754 0.815549 +v -0.543422 0.130442 0.816664 +v -0.546334 0.123344 0.817779 +v -0.549212 0.115963 0.818936 +v -0.552078 0.10861 0.820121 +v -0.554798 0.101148 0.821227 +v -0.557457 0.0936599 0.822276 +v -0.560016 0.0857934 0.823178 +v -0.562505 0.0779011 0.82402 +v -0.564775 0.0697573 0.824622 +v -0.566935 0.0615783 0.825126 +v -0.569005 0.0531872 0.82533 +v -0.570978 0.0447719 0.825454 +v -0.57267 0.0362081 0.82544 +v -0.574245 0.0275846 0.825309 +v -0.575698 0.0189042 0.825001 +v -0.576967 0.0102053 0.824552 +v -0.57812 0.00140588 0.82392 +v -0.57905 -0.00741028 0.82312 +v -0.579791 -0.0161391 0.822252 +v -0.580359 -0.0248728 0.821266 +v -0.580812 -0.0336838 0.820088 +v -0.581161 -0.0424928 0.81885 +v -0.581478 -0.051383 0.817322 +v -0.581555 -0.06032 0.815546 +v -0.581522 -0.0693073 0.813885 +v -0.581353 -0.0782306 0.812198 +v -0.581181 -0.0870471 0.810427 +v -0.580898 -0.0958556 0.808581 +v -0.580434 -0.104615 0.806805 +v -0.57992 -0.113361 0.805007 +v -0.57944 -0.122044 0.803109 +v -0.578861 -0.130711 0.80116 +v -0.57829 -0.139235 0.799103 +v -0.577694 -0.147767 0.797042 +v -0.576836 -0.156157 0.795179 +v -0.575973 -0.164544 0.793325 +v -0.575137 -0.172833 0.79136 +v -0.574291 -0.181115 0.789403 +v -0.573463 -0.18927 0.787388 +v -0.572599 -0.197424 0.785351 +v -0.571739 -0.205359 0.783349 +v -0.570844 -0.213283 0.781324 +v -0.569851 -0.221085 0.779326 +v -0.568921 -0.228896 0.777368 +v -0.568072 -0.236517 0.775354 +v -0.567119 -0.244128 0.773288 +v -0.566165 -0.251626 0.771271 +v -0.565277 -0.259133 0.769295 +v -0.564303 -0.266427 0.767339 +v -0.56331 -0.273721 0.765386 +v -0.562375 -0.28084 0.763402 +v -0.561506 -0.287972 0.761457 +v -0.560509 -0.295042 0.75962 +v -0.55958 -0.302126 0.757818 +v -0.558638 -0.308932 0.756007 +v -0.557721 -0.315767 0.754249 +v -0.556778 -0.32249 0.752484 +v -0.555942 -0.329249 0.750782 +v -0.554975 -0.335667 0.749211 +v -0.554102 -0.342112 0.747695 +v -0.553258 -0.34846 0.746084 +v -0.552394 -0.354794 0.744507 +v -0.551562 -0.360931 0.743016 +v -0.550695 -0.367052 0.741512 +v -0.549909 -0.373035 0.739936 +v -0.549161 -0.379024 0.738388 +v -0.548281 -0.384803 0.737005 +v -0.547451 -0.390593 0.735661 +v -0.546589 -0.39618 0.734327 +v -0.545818 -0.40179 0.73305 +v -0.545004 -0.407257 0.731716 +v -0.54424 -0.412724 0.730416 +v -0.54342 -0.41777 0.729101 +v -0.542699 -0.422846 0.727831 +v -0.541963 -0.427621 0.726469 +v -0.54128 -0.432425 0.72513 +v -0.540612 -0.436933 0.723786 +v -0.539986 -0.441461 0.722458 +v -0.539338 -0.445636 0.721073 +v -0.538797 -0.449845 0.719732 +v -0.538245 -0.453709 0.718298 +v -0.537772 -0.457627 0.716884 +v -0.537654 -0.461305 0.714645 +v -0.537624 -0.465014 0.712431 +v -0.537949 -0.468341 0.709499 +v -0.538358 -0.471695 0.706603 +v -0.564716 0.365532 0.834039 +v -0.562552 0.359519 0.836928 +v -0.560068 0.353559 0.839662 +v -0.55772 0.347742 0.841628 +v -0.555105 0.341974 0.843439 +v -0.552751 0.336438 0.844415 +v -0.550184 0.330982 0.845255 +v -0.547617 0.325607 0.845569 +v -0.544952 0.320283 0.845813 +v -0.542339 0.315129 0.845535 +v -0.539667 0.31001 0.845212 +v -0.536981 0.305011 0.844619 +v -0.534271 0.300037 0.844008 +v -0.531517 0.295223 0.843198 +v -0.528742 0.290429 0.84237 +v -0.525992 0.285811 0.841423 +v -0.523228 0.281208 0.840474 +v -0.52026 0.276841 0.839544 +v -0.51727 0.27252 0.838584 +v -0.514092 0.268425 0.837613 +v -0.510909 0.264303 0.836648 +v -0.507423 0.26029 0.835999 +v -0.503945 0.256268 0.835376 +v -0.501198 0.252255 0.834459 +v -0.498452 0.248225 0.833557 +v -0.498657 0.243698 0.830851 +v -0.498927 0.239112 0.828253 +v -0.500275 0.233817 0.825442 +v -0.501767 0.228461 0.822826 +v -0.503244 0.222864 0.82108 +v -0.50486 0.217241 0.819515 +v -0.50673 0.211264 0.818447 +v -0.508726 0.205289 0.817514 +v -0.510744 0.198914 0.81708 +v -0.512854 0.192551 0.816749 +v -0.515113 0.185776 0.816844 +v -0.517444 0.179032 0.817019 +v -0.519894 0.172138 0.817516 +v -0.522416 0.16529 0.818102 +v -0.524985 0.158366 0.81898 +v -0.527605 0.151473 0.819924 +v -0.530417 0.144411 0.821048 +v -0.53326 0.137369 0.822218 +v -0.536136 0.130234 0.823495 +v -0.539023 0.123111 0.824785 +v -0.54187 0.115824 0.826092 +v -0.544756 0.108576 0.827481 +v -0.547547 0.101085 0.828815 +v -0.550252 0.0935531 0.830066 +v -0.552803 0.085706 0.831194 +v -0.555283 0.0778155 0.832242 +v -0.557587 0.0696861 0.833046 +v -0.559731 0.0615103 0.833714 +v -0.561749 0.0531349 0.834153 +v -0.563659 0.0447037 0.834456 +v -0.565464 0.0360754 0.834565 +v -0.567077 0.0274115 0.834509 +v -0.568535 0.0186955 0.834315 +v -0.569787 0.00996883 0.833956 +v -0.570961 0.00117244 0.833422 +v -0.571928 -0.0076627 0.832737 +v -0.572706 -0.0164615 0.832022 +v -0.573259 -0.0252427 0.831154 +v -0.573785 -0.0340409 0.830115 +v -0.574106 -0.0428404 0.828924 +v -0.57446 -0.0518096 0.82748 +v -0.574608 -0.0607998 0.825837 +v -0.574564 -0.0697603 0.824186 +v -0.574415 -0.0786874 0.822492 +v -0.574324 -0.0875962 0.820767 +v -0.574067 -0.0964675 0.818955 +v -0.573731 -0.105287 0.817234 +v -0.573252 -0.114089 0.815422 +v -0.572767 -0.12281 0.813492 +v -0.572202 -0.131499 0.811526 +v -0.571668 -0.14005 0.809597 +v -0.571009 -0.148609 0.807528 +v -0.570247 -0.157048 0.805589 +v -0.569483 -0.165474 0.803649 +v -0.568713 -0.173797 0.801693 +v -0.567868 -0.182103 0.799696 +v -0.567094 -0.190253 0.797651 +v -0.566323 -0.198407 0.795614 +v -0.565503 -0.206426 0.793575 +v -0.564607 -0.214423 0.791489 +v -0.563717 -0.222282 0.78952 +v -0.56278 -0.230122 0.787511 +v -0.561963 -0.237795 0.78545 +v -0.561095 -0.245452 0.783367 +v -0.560197 -0.252941 0.781278 +v -0.559262 -0.260435 0.779172 +v -0.558332 -0.267822 0.777184 +v -0.557381 -0.275209 0.775176 +v -0.556501 -0.282373 0.773146 +v -0.555675 -0.289546 0.771134 +v -0.554745 -0.296602 0.769205 +v -0.553811 -0.303658 0.767254 +v -0.552962 -0.31053 0.765383 +v -0.552071 -0.317407 0.763535 +v -0.551233 -0.324136 0.76172 +v -0.550368 -0.330859 0.759888 +v -0.549503 -0.337368 0.758186 +v -0.54866 -0.343884 0.756486 +v -0.54792 -0.350218 0.754748 +v -0.547158 -0.35655 0.752989 +v -0.546366 -0.362722 0.751316 +v -0.545574 -0.368895 0.749675 +v -0.544907 -0.374911 0.747997 +v -0.544183 -0.380912 0.746277 +v -0.543389 -0.386703 0.744759 +v -0.542613 -0.392493 0.743233 +v -0.541894 -0.398151 0.741722 +v -0.541171 -0.403807 0.740204 +v -0.540475 -0.409237 0.738657 +v -0.53976 -0.41466 0.7371 +v -0.539131 -0.419773 0.735587 +v -0.538457 -0.424851 0.734072 +v -0.537842 -0.429737 0.732544 +v -0.537225 -0.434619 0.731001 +v -0.536735 -0.439215 0.729452 +v -0.536188 -0.443791 0.727873 +v -0.535741 -0.448056 0.726233 +v -0.535279 -0.452322 0.7246 +v -0.534925 -0.456292 0.72284 +v -0.534562 -0.46026 0.721073 +v -0.534639 -0.464055 0.718661 +v -0.534634 -0.467817 0.716214 +v -0.535086 -0.47138 0.713027 +v -0.535471 -0.474922 0.70982 +v -0.562825 0.367817 0.838291 +v -0.560582 0.361486 0.841297 +v -0.557966 0.355205 0.844113 +v -0.555465 0.34916 0.846079 +v -0.552717 0.343166 0.847911 +v -0.549993 0.337233 0.849061 +v -0.547066 0.331377 0.850081 +v -0.544224 0.325827 0.850445 +v -0.541299 0.320326 0.850744 +v -0.538455 0.31504 0.850425 +v -0.535557 0.309793 0.850063 +v -0.532618 0.304704 0.84951 +v -0.529645 0.299651 0.848936 +v -0.526719 0.294811 0.848077 +v -0.523765 0.289993 0.847192 +v -0.520834 0.285397 0.846249 +v -0.517878 0.280841 0.845284 +v -0.514679 0.276624 0.8443 +v -0.511413 0.272503 0.843239 +v -0.507954 0.268505 0.842319 +v -0.504463 0.264521 0.841358 +v -0.500847 0.260577 0.84078 +v -0.497201 0.256664 0.8402 +v -0.493867 0.252846 0.839699 +v -0.490527 0.249021 0.839213 +v -0.491194 0.244427 0.836175 +v -0.491919 0.239769 0.833245 +v -0.49343 0.234484 0.830563 +v -0.495038 0.229164 0.828038 +v -0.496586 0.223525 0.826451 +v -0.498258 0.217876 0.825026 +v -0.500194 0.211842 0.824126 +v -0.502212 0.205806 0.823319 +v -0.504216 0.199352 0.822997 +v -0.506287 0.192909 0.822761 +v -0.508484 0.186081 0.822857 +v -0.510783 0.17928 0.823051 +v -0.513117 0.172313 0.823589 +v -0.515523 0.16539 0.824223 +v -0.517962 0.158347 0.825194 +v -0.520484 0.151346 0.826252 +v -0.523209 0.144242 0.827516 +v -0.525976 0.137165 0.828843 +v -0.528774 0.130003 0.830237 +v -0.531621 0.122869 0.831684 +v -0.534454 0.115658 0.833166 +v -0.537356 0.108498 0.834751 +v -0.540138 0.10094 0.83624 +v -0.542898 0.0933789 0.837708 +v -0.54545 0.0855568 0.839071 +v -0.547926 0.0776785 0.840341 +v -0.550224 0.0695611 0.841316 +v -0.552397 0.0613994 0.842181 +v -0.554319 0.0530335 0.842816 +v -0.556246 0.0446014 0.843374 +v -0.558039 0.0359198 0.843511 +v -0.559721 0.0272321 0.843558 +v -0.561102 0.0184906 0.84341 +v -0.562385 0.00974174 0.843187 +v -0.563477 0.00092397 0.842664 +v -0.564511 -0.0079156 0.842101 +v -0.565207 -0.0168024 0.841454 +v -0.565812 -0.0256737 0.84074 +v -0.566314 -0.0344376 0.83978 +v -0.56675 -0.0432217 0.838733 +v -0.567082 -0.0522204 0.837353 +v -0.567279 -0.0612271 0.835851 +v -0.567278 -0.0701648 0.834253 +v -0.567226 -0.079095 0.832623 +v -0.567155 -0.088085 0.8309 +v -0.566938 -0.0970358 0.82912 +v -0.56662 -0.105897 0.827388 +v -0.566155 -0.114708 0.825577 +v -0.565716 -0.123454 0.823672 +v -0.565215 -0.132188 0.821731 +v -0.564668 -0.140749 0.819852 +v -0.564063 -0.149368 0.817865 +v -0.563415 -0.157875 0.815836 +v -0.562709 -0.166336 0.813797 +v -0.561915 -0.174685 0.811811 +v -0.561119 -0.183027 0.809824 +v -0.560369 -0.191169 0.80774 +v -0.559628 -0.199306 0.805669 +v -0.55883 -0.207411 0.803584 +v -0.558034 -0.2155 0.801494 +v -0.557151 -0.22339 0.799489 +v -0.556259 -0.231275 0.797477 +v -0.555493 -0.239 0.795371 +v -0.554639 -0.246694 0.793228 +v -0.553838 -0.254195 0.791065 +v -0.553033 -0.261696 0.788924 +v -0.552139 -0.26917 0.786876 +v -0.551264 -0.276646 0.784831 +v -0.550371 -0.283833 0.78271 +v -0.549591 -0.291044 0.780664 +v -0.548685 -0.298087 0.778609 +v -0.547888 -0.305147 0.776613 +v -0.547018 -0.312051 0.774604 +v -0.546246 -0.319003 0.772739 +v -0.545415 -0.325711 0.770808 +v -0.544642 -0.332435 0.768908 +v -0.543809 -0.339004 0.767031 +v -0.543056 -0.345599 0.765212 +v -0.542348 -0.351917 0.763291 +v -0.54174 -0.358257 0.761414 +v -0.541046 -0.364476 0.75958 +v -0.540397 -0.370722 0.757799 +v -0.539749 -0.376758 0.755934 +v -0.539184 -0.382809 0.754135 +v -0.538447 -0.388594 0.752455 +v -0.537769 -0.394387 0.750813 +v -0.537069 -0.400073 0.749026 +v -0.536536 -0.405807 0.747336 +v -0.535913 -0.411199 0.74556 +v -0.535377 -0.416618 0.743835 +v -0.534849 -0.421769 0.742088 +v -0.534353 -0.426899 0.740385 +v -0.533801 -0.431866 0.738655 +v -0.533322 -0.43685 0.736955 +v -0.532879 -0.4415 0.735153 +v -0.532448 -0.446144 0.733343 +v -0.532105 -0.450474 0.731404 +v -0.531819 -0.45482 0.729488 +v -0.531582 -0.458856 0.727376 +v -0.531444 -0.462929 0.725303 +v -0.531541 -0.466774 0.722646 +v -0.531677 -0.470642 0.720011 +v -0.532148 -0.474394 0.716532 +v -0.532693 -0.478189 0.71308 +v -0.560412 0.370555 0.843644 +v -0.55808 0.363906 0.846637 +v -0.555373 0.357319 0.849426 +v -0.552733 0.350938 0.851412 +v -0.549785 0.344636 0.853201 +v -0.54684 0.33847 0.854327 +v -0.5437 0.332381 0.855317 +v -0.540657 0.326622 0.855647 +v -0.537511 0.320919 0.8559 +v -0.534383 0.315387 0.855559 +v -0.531191 0.3099 0.855162 +v -0.527967 0.304588 0.854496 +v -0.524728 0.299336 0.853813 +v -0.521638 0.29446 0.852891 +v -0.518531 0.289613 0.851952 +v -0.515343 0.285092 0.850921 +v -0.512117 0.280629 0.84984 +v -0.508614 0.276522 0.848754 +v -0.505082 0.272487 0.847631 +v -0.501321 0.268542 0.846796 +v -0.497576 0.264573 0.845986 +v -0.493665 0.2607 0.845581 +v -0.489745 0.25682 0.845201 +v -0.486397 0.253309 0.844692 +v -0.483058 0.249789 0.84421 +v -0.483753 0.245348 0.841328 +v -0.484524 0.24084 0.838578 +v -0.4863 0.235594 0.835921 +v -0.488179 0.230309 0.833445 +v -0.489967 0.224568 0.831868 +v -0.491883 0.21882 0.830458 +v -0.493851 0.212705 0.82962 +v -0.495907 0.206586 0.828884 +v -0.497959 0.199981 0.828634 +v -0.500048 0.193383 0.828436 +v -0.50206 0.186516 0.828691 +v -0.504107 0.17968 0.829005 +v -0.506273 0.172607 0.829623 +v -0.508512 0.165574 0.830327 +v -0.510881 0.158485 0.831304 +v -0.513312 0.151418 0.832344 +v -0.515901 0.144266 0.833659 +v -0.51853 0.137144 0.835019 +v -0.521298 0.129908 0.836561 +v -0.524087 0.122681 0.838126 +v -0.526888 0.11546 0.83988 +v -0.529694 0.108247 0.841653 +v -0.532477 0.100719 0.843396 +v -0.535202 0.093158 0.845061 +v -0.537734 0.0853695 0.846635 +v -0.540177 0.0775232 0.848105 +v -0.542478 0.0694863 0.84929 +v -0.544651 0.0614009 0.850351 +v -0.546605 0.0529859 0.851182 +v -0.548458 0.0444761 0.851868 +v -0.55022 0.0357758 0.852218 +v -0.551819 0.0270411 0.852425 +v -0.553245 0.0183136 0.852398 +v -0.554529 0.00956763 0.852246 +v -0.555668 0.000743066 0.851901 +v -0.556622 -0.00809769 0.851404 +v -0.557415 -0.0170468 0.850841 +v -0.558036 -0.0259581 0.850137 +v -0.558638 -0.0347636 0.849336 +v -0.559076 -0.0436477 0.848282 +v -0.559487 -0.0526898 0.847035 +v -0.55968 -0.0617121 0.845612 +v -0.559805 -0.0706506 0.844148 +v -0.559809 -0.0795751 0.842604 +v -0.559772 -0.0886036 0.840985 +v -0.559589 -0.0975693 0.839342 +v -0.559355 -0.106432 0.837652 +v -0.558936 -0.115259 0.835828 +v -0.558543 -0.124043 0.833967 +v -0.558088 -0.132818 0.83208 +v -0.557595 -0.141453 0.830126 +v -0.557007 -0.150107 0.82807 +v -0.556384 -0.158699 0.826089 +v -0.555697 -0.167264 0.824087 +v -0.555025 -0.175679 0.822036 +v -0.554264 -0.184056 0.819928 +v -0.553549 -0.192249 0.817875 +v -0.552808 -0.20044 0.815793 +v -0.552104 -0.208545 0.813665 +v -0.551335 -0.216631 0.811505 +v -0.550508 -0.22459 0.809501 +v -0.549638 -0.232526 0.807464 +v -0.548893 -0.240294 0.805296 +v -0.548089 -0.248056 0.803078 +v -0.547341 -0.255648 0.800908 +v -0.546525 -0.263226 0.798703 +v -0.54575 -0.270689 0.796556 +v -0.544909 -0.278148 0.794411 +v -0.544088 -0.285421 0.792265 +v -0.543275 -0.292681 0.790114 +v -0.542525 -0.299761 0.787975 +v -0.541717 -0.306823 0.785803 +v -0.540936 -0.313786 0.783811 +v -0.540164 -0.320778 0.781869 +v -0.539503 -0.327488 0.779806 +v -0.538831 -0.33421 0.77774 +v -0.538048 -0.340819 0.775767 +v -0.537307 -0.347447 0.773806 +v -0.536683 -0.353831 0.771802 +v -0.536086 -0.360227 0.769818 +v -0.535509 -0.366466 0.767851 +v -0.53493 -0.372705 0.765894 +v -0.534349 -0.378774 0.763942 +v -0.533766 -0.384832 0.762015 +v -0.533194 -0.390688 0.760185 +v -0.53254 -0.396517 0.758323 +v -0.532056 -0.402218 0.756406 +v -0.531541 -0.407909 0.75449 +v -0.531123 -0.413397 0.752581 +v -0.530725 -0.418891 0.750695 +v -0.53028 -0.424075 0.748769 +v -0.529808 -0.429238 0.746846 +v -0.529444 -0.434221 0.744896 +v -0.52905 -0.4392 0.742926 +v -0.528833 -0.443904 0.740873 +v -0.528544 -0.448581 0.738784 +v -0.528402 -0.453036 0.736551 +v -0.528214 -0.457472 0.734298 +v -0.528119 -0.461673 0.731953 +v -0.52796 -0.465854 0.729595 +v -0.528152 -0.469869 0.726796 +v -0.528315 -0.473873 0.723952 +v -0.528907 -0.47779 0.720393 +v -0.529385 -0.481667 0.716779 +v -0.557903 0.373293 0.848953 +v -0.555484 0.366338 0.851925 +v -0.552712 0.359445 0.854697 +v -0.54991 0.352747 0.856671 +v -0.546805 0.346119 0.858461 +v -0.543657 0.339713 0.859577 +v -0.540319 0.333393 0.860543 +v -0.537056 0.327434 0.860822 +v -0.533716 0.321527 0.86104 +v -0.530271 0.315768 0.860649 +v -0.526758 0.310054 0.860204 +v -0.523277 0.304512 0.859448 +v -0.519757 0.299061 0.858641 +v -0.51651 0.29415 0.857663 +v -0.513238 0.289287 0.856653 +v -0.509775 0.284872 0.855502 +v -0.506273 0.280504 0.854307 +v -0.502492 0.276497 0.85314 +v -0.498679 0.272555 0.851932 +v -0.494679 0.268623 0.851254 +v -0.490674 0.26466 0.850592 +v -0.486486 0.260835 0.850381 +v -0.482266 0.25702 0.850165 +v -0.478932 0.253803 0.849666 +v -0.475591 0.25058 0.849194 +v -0.476326 0.246291 0.846482 +v -0.477111 0.241948 0.843868 +v -0.479136 0.236736 0.841247 +v -0.481273 0.231497 0.838785 +v -0.483322 0.225644 0.837233 +v -0.485468 0.219782 0.835826 +v -0.487529 0.213578 0.835118 +v -0.489627 0.207372 0.834472 +v -0.491703 0.200613 0.834282 +v -0.493824 0.193856 0.834138 +v -0.495619 0.186955 0.83453 +v -0.497481 0.180083 0.835004 +v -0.499459 0.172904 0.835675 +v -0.501516 0.16576 0.836445 +v -0.503783 0.158609 0.83739 +v -0.506127 0.151512 0.838435 +v -0.50859 0.1443 0.839801 +v -0.511102 0.137113 0.841209 +v -0.513778 0.129774 0.842829 +v -0.516516 0.122473 0.844528 +v -0.519247 0.115215 0.846507 +v -0.521976 0.107958 0.848482 +v -0.524742 0.10045 0.85046 +v -0.52746 0.0929121 0.852366 +v -0.529936 0.0851373 0.854109 +v -0.532385 0.077344 0.855821 +v -0.53459 0.069383 0.857143 +v -0.536714 0.0613919 0.858376 +v -0.538678 0.0529156 0.859393 +v -0.54059 0.0443032 0.860262 +v -0.542246 0.0355665 0.860786 +v -0.543849 0.0268045 0.861224 +v -0.545231 0.0180944 0.861249 +v -0.546527 0.00936701 0.861185 +v -0.547594 0.000546431 0.860922 +v -0.548582 -0.00829079 0.860571 +v -0.549386 -0.017271 0.860016 +v -0.550148 -0.0262374 0.85943 +v -0.550671 -0.0350739 0.858648 +v -0.551176 -0.0440561 0.857645 +v -0.551575 -0.0531254 0.856459 +v -0.55188 -0.0621815 0.855199 +v -0.552061 -0.0711313 0.853821 +v -0.552138 -0.0800513 0.852399 +v -0.552015 -0.0890605 0.850855 +v -0.551899 -0.0980519 0.849331 +v -0.551707 -0.106903 0.847636 +v -0.551441 -0.115749 0.845878 +v -0.551068 -0.124566 0.844061 +v -0.550688 -0.133379 0.842251 +v -0.550168 -0.142062 0.840194 +v -0.549721 -0.150787 0.838148 +v -0.548985 -0.159439 0.836126 +v -0.548386 -0.168115 0.834196 +v -0.547786 -0.176574 0.832044 +v -0.547188 -0.185018 0.829901 +v -0.546453 -0.193251 0.827811 +v -0.545771 -0.201494 0.825768 +v -0.545071 -0.209593 0.823559 +v -0.544417 -0.217693 0.82138 +v -0.543593 -0.225705 0.819312 +v -0.542826 -0.233731 0.817285 +v -0.542065 -0.241542 0.815032 +v -0.541391 -0.249361 0.812829 +v -0.540569 -0.257021 0.810585 +v -0.539854 -0.264707 0.808418 +v -0.53915 -0.27217 0.806175 +v -0.538427 -0.279625 0.803916 +v -0.537612 -0.286945 0.801696 +v -0.536921 -0.294305 0.79957 +v -0.5362 -0.301387 0.797253 +v -0.5356 -0.308503 0.795033 +v -0.534778 -0.315492 0.792966 +v -0.533991 -0.322501 0.790948 +v -0.533396 -0.329209 0.788704 +v -0.532819 -0.335918 0.786483 +v -0.532151 -0.342595 0.784414 +v -0.531554 -0.349297 0.782384 +v -0.530915 -0.355733 0.78027 +v -0.530415 -0.362213 0.77822 +v -0.52981 -0.368415 0.776037 +v -0.529455 -0.374695 0.774006 +v -0.528911 -0.380792 0.771947 +v -0.528441 -0.386895 0.769935 +v -0.527853 -0.392774 0.76786 +v -0.52744 -0.398703 0.765898 +v -0.527017 -0.404374 0.763789 +v -0.526686 -0.41006 0.761734 +v -0.526264 -0.415582 0.759568 +v -0.525967 -0.421135 0.757464 +v -0.525579 -0.426351 0.755346 +v -0.525321 -0.431614 0.753309 +v -0.525041 -0.436592 0.751081 +v -0.524872 -0.441601 0.74892 +v -0.524754 -0.446336 0.746567 +v -0.524668 -0.451082 0.744246 +v -0.524563 -0.455593 0.741648 +v -0.524639 -0.460157 0.739119 +v -0.52456 -0.464478 0.736522 +v -0.524578 -0.468845 0.733977 +v -0.524734 -0.472954 0.730929 +v -0.525034 -0.477151 0.727935 +v -0.525604 -0.481161 0.724223 +v -0.526239 -0.485202 0.720547 +v -0.55499 0.375916 0.854889 +v -0.552461 0.368648 0.857813 +v -0.549536 0.361459 0.860514 +v -0.546602 0.354408 0.862421 +v -0.543341 0.347427 0.864126 +v -0.540026 0.340842 0.865155 +v -0.536529 0.334337 0.866047 +v -0.533059 0.328173 0.86622 +v -0.529477 0.322084 0.866297 +v -0.525791 0.31616 0.865801 +v -0.52208 0.310229 0.865298 +v -0.518429 0.30457 0.86438 +v -0.514749 0.298978 0.863429 +v -0.511228 0.29404 0.862296 +v -0.507692 0.289172 0.861116 +v -0.503987 0.284885 0.859785 +v -0.500242 0.280652 0.858401 +v -0.496262 0.276584 0.857184 +v -0.492297 0.272524 0.856004 +v -0.487859 0.268386 0.855574 +v -0.483408 0.264222 0.855162 +v -0.478911 0.260657 0.855019 +v -0.474362 0.257131 0.854837 +v -0.471776 0.254241 0.8541 +v -0.469208 0.251323 0.853399 +v -0.469755 0.247573 0.851287 +v -0.470342 0.243792 0.84925 +v -0.472504 0.238526 0.846747 +v -0.474744 0.233216 0.844352 +v -0.476969 0.227191 0.842774 +v -0.479244 0.221187 0.841304 +v -0.481288 0.214748 0.840676 +v -0.483398 0.208304 0.84013 +v -0.485342 0.20142 0.840084 +v -0.487321 0.194526 0.84008 +v -0.489085 0.187475 0.840453 +v -0.490888 0.180443 0.840878 +v -0.492735 0.173235 0.841542 +v -0.494645 0.166044 0.842274 +v -0.496743 0.158805 0.843235 +v -0.498914 0.151625 0.844304 +v -0.501233 0.144373 0.845798 +v -0.503583 0.137109 0.847294 +v -0.50612 0.129766 0.848999 +v -0.508736 0.122472 0.850819 +v -0.511393 0.115139 0.852917 +v -0.514059 0.107811 0.855028 +v -0.516688 0.100274 0.857171 +v -0.519284 0.0927169 0.859278 +v -0.521749 0.0849948 0.861203 +v -0.524154 0.0772391 0.863061 +v -0.52636 0.0692878 0.864635 +v -0.528475 0.0612949 0.866108 +v -0.530491 0.0527636 0.86725 +v -0.532423 0.0441439 0.868246 +v -0.534082 0.0354503 0.868929 +v -0.535655 0.0267137 0.869456 +v -0.537057 0.0179435 0.869649 +v -0.538331 0.0091411 0.869728 +v -0.539339 0.000308013 0.869627 +v -0.540239 -0.00852994 0.869422 +v -0.541085 -0.0174475 0.868968 +v -0.541813 -0.0263709 0.868396 +v -0.542433 -0.0353723 0.867689 +v -0.542949 -0.0444193 0.866804 +v -0.543419 -0.0534625 0.865855 +v -0.543735 -0.0624911 0.86477 +v -0.544003 -0.0715023 0.86354 +v -0.544118 -0.0804967 0.862185 +v -0.544174 -0.0895417 0.860715 +v -0.544039 -0.0985416 0.85913 +v -0.543908 -0.10743 0.857543 +v -0.543616 -0.116284 0.855835 +v -0.543385 -0.125154 0.854087 +v -0.543027 -0.13399 0.852255 +v -0.542631 -0.142769 0.850198 +v -0.54221 -0.15155 0.848116 +v -0.541641 -0.160242 0.846179 +v -0.541013 -0.168926 0.844206 +v -0.540461 -0.177438 0.842129 +v -0.539843 -0.185916 0.84001 +v -0.539212 -0.194262 0.837896 +v -0.538502 -0.202616 0.835703 +v -0.537823 -0.210774 0.833512 +v -0.537178 -0.218933 0.831343 +v -0.536502 -0.226988 0.829229 +v -0.535758 -0.235033 0.827072 +v -0.535088 -0.242928 0.824839 +v -0.534399 -0.250822 0.822604 +v -0.533761 -0.258513 0.820315 +v -0.533065 -0.266194 0.818007 +v -0.532388 -0.273716 0.815703 +v -0.531731 -0.281235 0.813404 +v -0.53111 -0.288588 0.811119 +v -0.530415 -0.295933 0.808821 +v -0.529796 -0.303091 0.806517 +v -0.529158 -0.310236 0.804202 +v -0.528466 -0.317246 0.802018 +v -0.527788 -0.324276 0.799859 +v -0.5272 -0.331052 0.797553 +v -0.526703 -0.337866 0.795303 +v -0.526113 -0.344549 0.793104 +v -0.525527 -0.351235 0.790897 +v -0.525008 -0.357742 0.788676 +v -0.524523 -0.364259 0.78647 +v -0.524074 -0.370504 0.784268 +v -0.523651 -0.376748 0.782081 +v -0.523228 -0.382902 0.779894 +v -0.522852 -0.38907 0.777742 +v -0.522508 -0.395043 0.775588 +v -0.522129 -0.401016 0.773427 +v -0.521758 -0.406691 0.771173 +v -0.52142 -0.412368 0.768943 +v -0.521137 -0.417909 0.766618 +v -0.520871 -0.423419 0.764331 +v -0.520683 -0.428753 0.762061 +v -0.520507 -0.434086 0.759821 +v -0.520371 -0.439125 0.757412 +v -0.520221 -0.44417 0.754996 +v -0.520265 -0.44906 0.752377 +v -0.520243 -0.453935 0.74972 +v -0.52042 -0.458572 0.747005 +v -0.520514 -0.463176 0.744216 +v -0.520582 -0.467589 0.741478 +v -0.520641 -0.472006 0.738706 +v -0.521034 -0.476319 0.735508 +v -0.521371 -0.480631 0.732268 +v -0.522019 -0.484829 0.728445 +v -0.522561 -0.488987 0.724571 +v -0.551923 0.378563 0.860738 +v -0.549299 0.370986 0.863623 +v -0.546326 0.363478 0.866311 +v -0.543204 0.35609 0.868122 +v -0.539817 0.348763 0.869754 +v -0.536345 0.341993 0.870696 +v -0.532703 0.335301 0.871505 +v -0.528986 0.32896 0.871552 +v -0.525178 0.322682 0.871504 +v -0.521297 0.316553 0.870948 +v -0.517367 0.310425 0.870365 +v -0.513545 0.304655 0.869283 +v -0.509685 0.298936 0.868175 +v -0.505907 0.293981 0.866876 +v -0.502091 0.289127 0.865515 +v -0.498153 0.284957 0.864005 +v -0.49419 0.280863 0.862444 +v -0.490048 0.276694 0.861225 +v -0.485926 0.272513 0.860063 +v -0.481055 0.268151 0.859908 +v -0.476161 0.263762 0.859759 +v -0.471329 0.260494 0.859634 +v -0.466454 0.257253 0.859481 +v -0.464627 0.254675 0.85851 +v -0.462829 0.252068 0.857596 +v -0.463201 0.248848 0.856094 +v -0.463594 0.245616 0.854657 +v -0.465934 0.240304 0.852296 +v -0.468321 0.234963 0.849996 +v -0.470659 0.228767 0.848378 +v -0.473084 0.222584 0.846862 +v -0.475113 0.215918 0.846317 +v -0.477197 0.209241 0.845833 +v -0.479009 0.202226 0.84591 +v -0.480863 0.195205 0.846047 +v -0.482554 0.188 0.846376 +v -0.484308 0.180807 0.846766 +v -0.485998 0.173554 0.847393 +v -0.487746 0.166325 0.848079 +v -0.489684 0.158999 0.849062 +v -0.491718 0.151751 0.850191 +v -0.493838 0.144408 0.851729 +v -0.496031 0.137089 0.853331 +v -0.498434 0.129737 0.855132 +v -0.500927 0.122451 0.857078 +v -0.503516 0.115037 0.859305 +v -0.506131 0.107639 0.861563 +v -0.508613 0.100076 0.863876 +v -0.511111 0.0925088 0.866196 +v -0.513523 0.0848072 0.868255 +v -0.515886 0.0770909 0.870254 +v -0.518046 0.0691404 0.872032 +v -0.520122 0.0611519 0.873706 +v -0.522197 0.0525786 0.874983 +v -0.524182 0.0439671 0.87615 +v -0.525814 0.035302 0.876934 +v -0.527351 0.0266085 0.877583 +v -0.528709 0.0177733 0.877894 +v -0.529974 0.00889831 0.878102 +v -0.530899 5.92822e-05 0.878156 +v -0.531763 -0.00878604 0.878141 +v -0.532596 -0.0176667 0.877736 +v -0.533423 -0.026537 0.877319 +v -0.533985 -0.0356084 0.876681 +v -0.534553 -0.0446994 0.875959 +v -0.534977 -0.0537254 0.875086 +v -0.535384 -0.0627577 0.874193 +v -0.535682 -0.0718347 0.87304 +v -0.535938 -0.0809374 0.871791 +v -0.535964 -0.0899587 0.8703 +v -0.536013 -0.0989776 0.868816 +v -0.535863 -0.107905 0.867254 +v -0.535723 -0.116801 0.865723 +v -0.535469 -0.125709 0.863931 +v -0.535203 -0.134643 0.862082 +v -0.53486 -0.143469 0.859988 +v -0.534541 -0.152289 0.857928 +v -0.534007 -0.160966 0.856012 +v -0.533477 -0.169664 0.8541 +v -0.532884 -0.178211 0.852028 +v -0.532311 -0.186746 0.849974 +v -0.531735 -0.195216 0.847774 +v -0.531201 -0.203714 0.845586 +v -0.530509 -0.211936 0.84339 +v -0.529906 -0.22017 0.841236 +v -0.529261 -0.228234 0.839006 +v -0.528581 -0.236279 0.836755 +v -0.527902 -0.244258 0.834508 +v -0.527273 -0.252238 0.832302 +v -0.526665 -0.259932 0.829888 +v -0.526139 -0.267645 0.827542 +v -0.525505 -0.275221 0.825162 +v -0.524918 -0.282811 0.822814 +v -0.524299 -0.290166 0.820387 +v -0.523757 -0.297548 0.818008 +v -0.523171 -0.304724 0.815615 +v -0.522663 -0.311939 0.813311 +v -0.522035 -0.318964 0.810979 +v -0.521438 -0.326007 0.80869 +v -0.520885 -0.332881 0.806334 +v -0.520467 -0.339798 0.804083 +v -0.519907 -0.346462 0.801695 +v -0.51948 -0.353161 0.799377 +v -0.519011 -0.359715 0.79702 +v -0.518661 -0.366305 0.794738 +v -0.518207 -0.372533 0.792422 +v -0.517857 -0.378795 0.790168 +v -0.517512 -0.385006 0.787837 +v -0.517256 -0.39123 0.78556 +v -0.516921 -0.397251 0.78318 +v -0.516712 -0.403292 0.78089 +v -0.516457 -0.408993 0.778524 +v -0.516281 -0.414709 0.776205 +v -0.516048 -0.420221 0.773704 +v -0.516039 -0.425771 0.771369 +v -0.515853 -0.431159 0.768842 +v -0.515735 -0.436576 0.766373 +v -0.515671 -0.441673 0.763741 +v -0.515732 -0.446804 0.761159 +v -0.515809 -0.451804 0.758202 +v -0.51604 -0.456859 0.755338 +v -0.516218 -0.461528 0.75235 +v -0.516506 -0.466239 0.74941 +v -0.516611 -0.470706 0.746414 +v -0.516857 -0.475262 0.743509 +v -0.517176 -0.479652 0.739985 +v -0.517694 -0.484144 0.736567 +v -0.518309 -0.488467 0.732581 +v -0.519079 -0.49285 0.728672 +v -0.548414 0.381205 0.867197 +v -0.54566 0.373296 0.869955 +v -0.54255 0.36548 0.872504 +v -0.539385 0.357288 0.873878 +v -0.535911 0.349176 0.875032 +v -0.532237 0.342735 0.876184 +v -0.528385 0.33638 0.877182 +v -0.524446 0.329812 0.877122 +v -0.520412 0.32333 0.876976 +v -0.516399 0.317019 0.876211 +v -0.512364 0.310716 0.875434 +v -0.508331 0.304829 0.874189 +v -0.504246 0.298977 0.872896 +v -0.500206 0.294126 0.871391 +v -0.496102 0.28937 0.869796 +v -0.49207 0.285138 0.868055 +v -0.488031 0.280939 0.866291 +v -0.484025 0.276407 0.865063 +v -0.480074 0.271873 0.86391 +v -0.474427 0.267631 0.864003 +v -0.46874 0.263455 0.864043 +v -0.462908 0.260389 0.863836 +v -0.457062 0.257346 0.86361 +v -0.456301 0.255238 0.862575 +v -0.455562 0.253102 0.861584 +v -0.455306 0.250735 0.860993 +v -0.455096 0.248364 0.860454 +v -0.458761 0.243068 0.857858 +v -0.462403 0.237764 0.85528 +v -0.46491 0.231119 0.853905 +v -0.4674 0.224472 0.852554 +v -0.469304 0.217366 0.852023 +v -0.471259 0.210271 0.851558 +v -0.472926 0.203089 0.851626 +v -0.474606 0.195895 0.851744 +v -0.476126 0.188582 0.852123 +v -0.477654 0.181277 0.852525 +v -0.479184 0.173965 0.853182 +v -0.480751 0.166664 0.853891 +v -0.482461 0.159238 0.854896 +v -0.484247 0.151887 0.856031 +v -0.486231 0.144485 0.857572 +v -0.488306 0.13713 0.859226 +v -0.490531 0.129703 0.861077 +v -0.492907 0.122387 0.863177 +v -0.49538 0.114933 0.865489 +v -0.497852 0.107492 0.867808 +v -0.500318 0.0999284 0.870223 +v -0.502783 0.0923405 0.872639 +v -0.505159 0.084631 0.874947 +v -0.507438 0.0769119 0.877144 +v -0.509525 0.0689323 0.879107 +v -0.511556 0.0608739 0.880951 +v -0.513599 0.0523642 0.882412 +v -0.515533 0.0438056 0.883741 +v -0.517123 0.0351758 0.884678 +v -0.518595 0.0265103 0.885474 +v -0.519902 0.0176551 0.885959 +v -0.521079 0.00876084 0.88629 +v -0.522064 -0.000120771 0.886403 +v -0.522968 -0.00900492 0.886425 +v -0.52386 -0.0179257 0.886169 +v -0.524657 -0.0268561 0.885801 +v -0.525328 -0.0358768 0.885306 +v -0.525937 -0.0449311 0.884646 +v -0.526449 -0.0539869 0.883932 +v -0.526876 -0.0630455 0.883146 +v -0.527252 -0.0721404 0.882191 +v -0.527499 -0.0812961 0.881034 +v -0.527676 -0.0903878 0.879699 +v -0.527742 -0.0994611 0.878266 +v -0.527765 -0.108437 0.876772 +v -0.527669 -0.11736 0.875246 +v -0.527508 -0.126337 0.873538 +v -0.527216 -0.135332 0.871683 +v -0.526978 -0.144205 0.869754 +v -0.526642 -0.153052 0.867754 +v -0.526188 -0.161826 0.865848 +v -0.52567 -0.170574 0.863893 +v -0.525166 -0.179178 0.861923 +v -0.524592 -0.187778 0.859902 +v -0.5241 -0.196286 0.85772 +v -0.523516 -0.204794 0.855457 +v -0.522943 -0.213152 0.853265 +v -0.522312 -0.221486 0.851033 +v -0.521752 -0.229632 0.84882 +v -0.521127 -0.237762 0.846573 +v -0.520547 -0.245765 0.844285 +v -0.519915 -0.253748 0.841966 +v -0.51942 -0.26148 0.839567 +v -0.518882 -0.269199 0.837169 +v -0.518363 -0.276883 0.834717 +v -0.517788 -0.284553 0.832244 +v -0.51728 -0.291948 0.829824 +v -0.516742 -0.299329 0.827384 +v -0.516257 -0.306566 0.824953 +v -0.515756 -0.313828 0.822551 +v -0.515231 -0.320912 0.82015 +v -0.51472 -0.328016 0.817765 +v -0.514255 -0.33489 0.815322 +v -0.513806 -0.341777 0.812887 +v -0.513429 -0.348535 0.810411 +v -0.513038 -0.355296 0.807921 +v -0.512713 -0.361819 0.805548 +v -0.512335 -0.36834 0.803144 +v -0.512027 -0.374735 0.8007 +v -0.511754 -0.381131 0.798284 +v -0.511525 -0.387337 0.795833 +v -0.511308 -0.393567 0.793389 +v -0.511128 -0.399531 0.790931 +v -0.510917 -0.405479 0.788462 +v -0.510897 -0.411318 0.785933 +v -0.510813 -0.417128 0.783383 +v -0.51075 -0.422711 0.780852 +v -0.510631 -0.428267 0.778272 +v -0.510624 -0.433747 0.775673 +v -0.51058 -0.439221 0.773046 +v -0.510733 -0.444406 0.770266 +v -0.51083 -0.449576 0.76745 +v -0.511078 -0.454674 0.76438 +v -0.511326 -0.459767 0.761308 +v -0.51171 -0.464593 0.758108 +v -0.512003 -0.469394 0.754862 +v -0.512277 -0.474044 0.7517 +v -0.512535 -0.478701 0.748512 +v -0.513069 -0.483236 0.744925 +v -0.513512 -0.48776 0.741248 +v -0.514232 -0.492232 0.737171 +v -0.514914 -0.496684 0.733082 +v -0.544726 0.383869 0.87354 +v -0.541898 0.37565 0.87621 +v -0.538706 0.367503 0.878649 +v -0.535489 0.358512 0.879577 +v -0.53195 0.34961 0.880268 +v -0.528048 0.343515 0.881599 +v -0.524023 0.33748 0.882821 +v -0.519848 0.3307 0.882649 +v -0.515597 0.324002 0.882399 +v -0.511485 0.317493 0.881468 +v -0.507332 0.311045 0.880471 +v -0.503081 0.305025 0.879057 +v -0.498772 0.299055 0.877576 +v -0.494455 0.294321 0.875848 +v -0.490078 0.289683 0.874009 +v -0.485976 0.28533 0.872075 +v -0.481875 0.280976 0.870153 +v -0.478051 0.2761 0.868946 +v -0.474253 0.271187 0.867802 +v -0.467824 0.267114 0.868103 +v -0.461268 0.263161 0.868291 +v -0.454463 0.26032 0.867999 +v -0.447651 0.25746 0.867694 +v -0.447951 0.255809 0.866584 +v -0.448259 0.254137 0.865506 +v -0.447377 0.252635 0.865808 +v -0.446511 0.251128 0.866147 +v -0.451493 0.245828 0.863365 +v -0.456442 0.240536 0.860538 +v -0.459183 0.233452 0.859442 +v -0.461826 0.226341 0.85831 +v -0.463599 0.218815 0.857813 +v -0.465404 0.211302 0.857359 +v -0.466902 0.203952 0.857408 +v -0.468399 0.196588 0.857481 +v -0.469709 0.189162 0.857881 +v -0.471029 0.181748 0.858309 +v -0.472345 0.174374 0.858939 +v -0.473717 0.167007 0.85964 +v -0.475237 0.159474 0.860702 +v -0.476801 0.152028 0.861878 +v -0.478629 0.144561 0.863414 +v -0.480552 0.137137 0.86506 +v -0.482634 0.129676 0.867047 +v -0.484831 0.12229 0.869199 +v -0.487168 0.114794 0.871579 +v -0.489534 0.107321 0.87399 +v -0.491961 0.0997379 0.876491 +v -0.494419 0.0921445 0.879027 +v -0.496693 0.0844211 0.881513 +v -0.49894 0.0767 0.883969 +v -0.500918 0.0686793 0.886069 +v -0.502945 0.0605708 0.888137 +v -0.504907 0.0520915 0.88972 +v -0.506799 0.0435846 0.891204 +v -0.508322 0.0349877 0.892288 +v -0.509748 0.0263606 0.893273 +v -0.510917 0.0174839 0.893875 +v -0.512073 0.00860159 0.89439 +v -0.513081 -0.000306665 0.89451 +v -0.514019 -0.00922126 0.894548 +v -0.514897 -0.0181898 0.894361 +v -0.515739 -0.0271691 0.894114 +v -0.516484 -0.0361532 0.893698 +v -0.517199 -0.0451583 0.893204 +v -0.517728 -0.0542651 0.892585 +v -0.51822 -0.0633728 0.891924 +v -0.5186 -0.0725294 0.891052 +v -0.518933 -0.0816884 0.890119 +v -0.519125 -0.0907888 0.88887 +v -0.519278 -0.0999046 0.887565 +v -0.519356 -0.108919 0.886041 +v -0.519391 -0.117875 0.884571 +v -0.519267 -0.126935 0.882902 +v -0.519117 -0.135972 0.8812 +v -0.518823 -0.144867 0.87929 +v -0.518533 -0.153763 0.877397 +v -0.518146 -0.162631 0.875491 +v -0.517761 -0.171491 0.873588 +v -0.517263 -0.180198 0.871643 +v -0.516828 -0.188906 0.869733 +v -0.51629 -0.19738 0.867499 +v -0.515796 -0.205888 0.865285 +v -0.51522 -0.214344 0.863052 +v -0.514676 -0.222802 0.860822 +v -0.514104 -0.23101 0.858549 +v -0.513508 -0.239203 0.856263 +v -0.513024 -0.247228 0.853934 +v -0.512545 -0.255261 0.851613 +v -0.512075 -0.263006 0.849193 +v -0.511576 -0.270745 0.846781 +v -0.511088 -0.278521 0.844212 +v -0.510652 -0.286302 0.84167 +v -0.510174 -0.293702 0.839216 +v -0.50976 -0.301124 0.836808 +v -0.509222 -0.308408 0.83429 +v -0.508768 -0.315741 0.831848 +v -0.508299 -0.322837 0.82931 +v -0.507925 -0.329974 0.826821 +v -0.507508 -0.336864 0.824254 +v -0.507225 -0.343777 0.821778 +v -0.506892 -0.35059 0.819109 +v -0.506631 -0.357421 0.816501 +v -0.506357 -0.363905 0.814033 +v -0.506148 -0.370402 0.81162 +v -0.505942 -0.376929 0.809031 +v -0.505797 -0.383491 0.806497 +v -0.505568 -0.389697 0.803847 +v -0.505501 -0.395941 0.801295 +v -0.505368 -0.401818 0.798706 +v -0.505292 -0.407711 0.796155 +v -0.505298 -0.413624 0.793335 +v -0.505408 -0.419562 0.790622 +v -0.505423 -0.425188 0.78796 +v -0.505464 -0.430844 0.785326 +v -0.505474 -0.436371 0.782563 +v -0.505573 -0.441932 0.779844 +v -0.50575 -0.447144 0.776796 +v -0.505996 -0.452374 0.773797 +v -0.506264 -0.457515 0.770511 +v -0.506712 -0.4627 0.767315 +v -0.507158 -0.467647 0.763843 +v -0.50764 -0.472627 0.760376 +v -0.507976 -0.477403 0.756989 +v -0.508413 -0.482205 0.753642 +v -0.508933 -0.486801 0.749816 +v -0.509593 -0.49146 0.746064 +v -0.510211 -0.496001 0.741812 +v -0.511019 -0.500628 0.737652 +v -0.540499 0.386547 0.880351 +v -0.537547 0.378178 0.882961 +v -0.534214 0.369879 0.885312 +v -0.530819 0.361207 0.88642 +v -0.527135 0.352624 0.887304 +v -0.523137 0.345738 0.888145 +v -0.519008 0.338908 0.888872 +v -0.514774 0.331814 0.888424 +v -0.510458 0.324803 0.887896 +v -0.506228 0.318059 0.886753 +v -0.501943 0.311403 0.885536 +v -0.497518 0.305394 0.88389 +v -0.493061 0.299498 0.882184 +v -0.488514 0.294732 0.880241 +v -0.483951 0.29008 0.878235 +v -0.479734 0.285225 0.876488 +v -0.475561 0.280358 0.874816 +v -0.470942 0.275094 0.873645 +v -0.466286 0.269835 0.872469 +v -0.459756 0.266335 0.8722 +v -0.453199 0.262949 0.871887 +v -0.448402 0.260566 0.870633 +v -0.443607 0.258144 0.869386 +v -0.442765 0.256303 0.867752 +v -0.441938 0.254461 0.866167 +v -0.442159 0.252758 0.866783 +v -0.442391 0.251026 0.867464 +v -0.445868 0.247538 0.866236 +v -0.449326 0.244043 0.864989 +v -0.453432 0.236078 0.864354 +v -0.457313 0.228057 0.8636 +v -0.45865 0.22016 0.863494 +v -0.459966 0.212259 0.863377 +v -0.46111 0.204771 0.863387 +v -0.462273 0.197254 0.86341 +v -0.46332 0.189723 0.863701 +v -0.464377 0.182219 0.864027 +v -0.465469 0.174807 0.86457 +v -0.466624 0.167409 0.865191 +v -0.467949 0.15977 0.866228 +v -0.469323 0.152222 0.86739 +v -0.470965 0.144741 0.868923 +v -0.472684 0.137295 0.870559 +v -0.474615 0.129733 0.87269 +v -0.476645 0.122244 0.874947 +v -0.47883 0.114721 0.877412 +v -0.481049 0.107216 0.879925 +v -0.483338 0.0996079 0.88256 +v -0.485647 0.0919887 0.885222 +v -0.487854 0.084284 0.887835 +v -0.490029 0.0765515 0.890392 +v -0.492041 0.0684822 0.892665 +v -0.494068 0.0602919 0.894841 +v -0.495912 0.0519039 0.896633 +v -0.497628 0.0434593 0.898251 +v -0.499091 0.0348336 0.89951 +v -0.500471 0.0261261 0.900627 +v -0.501647 0.0172538 0.901398 +v -0.502761 0.00837047 0.901981 +v -0.5038 -0.000510537 0.902217 +v -0.504764 -0.00939604 0.902362 +v -0.505695 -0.0183679 0.902277 +v -0.506569 -0.0273519 0.902136 +v -0.507304 -0.0363852 0.901901 +v -0.507981 -0.0454187 0.901604 +v -0.508588 -0.0545485 0.901134 +v -0.509111 -0.0637005 0.900524 +v -0.509539 -0.0728812 0.899794 +v -0.5099 -0.0820668 0.898996 +v -0.510207 -0.0912262 0.897949 +v -0.510397 -0.100377 0.896778 +v -0.510523 -0.109444 0.89543 +v -0.510542 -0.118495 0.894002 +v -0.51055 -0.127608 0.892431 +v -0.510432 -0.136698 0.890749 +v -0.510272 -0.145691 0.888974 +v -0.510046 -0.154664 0.887137 +v -0.509796 -0.163573 0.885286 +v -0.509463 -0.172478 0.883366 +v -0.509084 -0.181222 0.881458 +v -0.508606 -0.189943 0.879487 +v -0.508143 -0.198528 0.877309 +v -0.507656 -0.207111 0.875105 +v -0.507167 -0.215617 0.872858 +v -0.506631 -0.224115 0.870571 +v -0.506172 -0.232342 0.868328 +v -0.505682 -0.240563 0.866053 +v -0.505302 -0.24869 0.86367 +v -0.504863 -0.256811 0.861223 +v -0.504445 -0.264639 0.858785 +v -0.503983 -0.272465 0.856316 +v -0.503599 -0.280267 0.853734 +v -0.503169 -0.288058 0.851142 +v -0.502797 -0.295546 0.848601 +v -0.502382 -0.303007 0.846055 +v -0.502009 -0.310315 0.843496 +v -0.501581 -0.317624 0.840957 +v -0.501253 -0.324803 0.838339 +v -0.500904 -0.331985 0.835718 +v -0.500667 -0.338904 0.833103 +v -0.50042 -0.345834 0.830476 +v -0.500231 -0.352682 0.827819 +v -0.500024 -0.359518 0.825152 +v -0.499805 -0.366058 0.822528 +v -0.499618 -0.372604 0.819913 +v -0.499538 -0.379146 0.817239 +v -0.499439 -0.385673 0.81456 +v -0.499455 -0.391927 0.811837 +v -0.499436 -0.398176 0.809089 +v -0.49943 -0.404177 0.806364 +v -0.499491 -0.41019 0.803656 +v -0.499625 -0.416164 0.800848 +v -0.499735 -0.422111 0.798034 +v -0.499848 -0.427772 0.795216 +v -0.4999 -0.433403 0.792352 +v -0.500065 -0.439009 0.789404 +v -0.50022 -0.44461 0.786442 +v -0.500559 -0.449936 0.783351 +v -0.500872 -0.455246 0.780245 +v -0.501311 -0.460458 0.776924 +v -0.50168 -0.46566 0.773589 +v -0.502258 -0.470695 0.769948 +v -0.502789 -0.47575 0.766247 +v -0.503293 -0.480687 0.762721 +v -0.503716 -0.485599 0.759134 +v -0.504376 -0.490371 0.755163 +v -0.505024 -0.49515 0.751168 +v -0.505843 -0.499835 0.74689 +v -0.506553 -0.504484 0.742528 +v -0.536122 0.389269 0.88706 +v -0.533037 0.380744 0.889595 +v -0.529624 0.372281 0.891906 +v -0.526039 0.363934 0.893181 +v -0.522226 0.355673 0.894257 +v -0.518167 0.347983 0.894641 +v -0.513959 0.340358 0.89489 +v -0.509662 0.33295 0.894165 +v -0.50531 0.325606 0.893389 +v -0.500947 0.318645 0.892002 +v -0.496517 0.311782 0.890562 +v -0.491945 0.30578 0.888706 +v -0.48731 0.299958 0.886737 +v -0.482558 0.295165 0.884607 +v -0.477806 0.290474 0.882444 +v -0.473515 0.285094 0.880956 +v -0.469315 0.27962 0.879555 +v -0.463859 0.274009 0.87836 +v -0.45825 0.268479 0.87709 +v -0.451702 0.265573 0.876312 +v -0.445122 0.262766 0.875417 +v -0.442333 0.260814 0.873225 +v -0.439553 0.258821 0.871076 +v -0.437556 0.256829 0.868826 +v -0.434175 0.255037 0.866099 +v -0.436922 0.252874 0.8677 +v -0.43827 0.250925 0.868798 +v -0.440206 0.249231 0.869067 +v -0.442136 0.247545 0.869332 +v -0.447583 0.238709 0.869205 +v -0.452827 0.229783 0.868904 +v -0.453779 0.221497 0.869227 +v -0.454646 0.213233 0.869493 +v -0.455407 0.205571 0.869431 +v -0.456207 0.19792 0.869406 +v -0.456955 0.190295 0.86955 +v -0.457738 0.182686 0.869757 +v -0.458595 0.175239 0.870195 +v -0.459522 0.167801 0.87072 +v -0.460645 0.160068 0.871731 +v -0.461814 0.1524 0.872842 +v -0.463269 0.144905 0.874388 +v -0.464803 0.13743 0.876052 +v -0.466565 0.129765 0.878277 +v -0.468399 0.122153 0.880596 +v -0.470452 0.114595 0.883168 +v -0.472506 0.10704 0.885742 +v -0.474671 0.0994197 0.888544 +v -0.476825 0.0917823 0.891321 +v -0.478964 0.0840934 0.894059 +v -0.481066 0.0763829 0.896737 +v -0.483075 0.0682332 0.899159 +v -0.485039 0.0600336 0.901486 +v -0.486732 0.0516832 0.903382 +v -0.488349 0.0433039 0.905163 +v -0.48975 0.0346356 0.906571 +v -0.491101 0.0258705 0.907855 +v -0.49225 0.0170032 0.908706 +v -0.493341 0.00812457 0.909412 +v -0.494373 -0.000733521 0.909731 +v -0.495366 -0.00959982 0.909992 +v -0.496329 -0.0185745 0.910018 +v -0.497251 -0.0275497 0.909997 +v -0.497964 -0.0366182 0.909931 +v -0.498683 -0.0456891 0.909867 +v -0.499328 -0.054836 0.909522 +v -0.499944 -0.0640283 0.909039 +v -0.500385 -0.0732481 0.908428 +v -0.500765 -0.0824634 0.907742 +v -0.501119 -0.0917121 0.906815 +v -0.501377 -0.100949 0.905795 +v -0.501491 -0.110058 0.90455 +v -0.501549 -0.119168 0.903259 +v -0.501599 -0.128294 0.901734 +v -0.501605 -0.137401 0.900152 +v -0.501549 -0.146471 0.898487 +v -0.501435 -0.155527 0.896752 +v -0.501214 -0.164466 0.894872 +v -0.500975 -0.173408 0.892967 +v -0.500609 -0.182186 0.891068 +v -0.500254 -0.190946 0.889156 +v -0.499821 -0.199627 0.886976 +v -0.499396 -0.208305 0.884826 +v -0.498956 -0.216853 0.882533 +v -0.498598 -0.225441 0.880304 +v -0.498145 -0.233679 0.877994 +v -0.49775 -0.241916 0.875732 +v -0.497417 -0.250119 0.873242 +v -0.497063 -0.258319 0.870751 +v -0.496742 -0.266254 0.868308 +v -0.496336 -0.274161 0.865809 +v -0.495984 -0.281992 0.863186 +v -0.495682 -0.289829 0.860591 +v -0.495321 -0.297348 0.857915 +v -0.495103 -0.304909 0.855352 +v -0.494747 -0.312201 0.852696 +v -0.494487 -0.319527 0.850138 +v -0.49419 -0.326767 0.847377 +v -0.49399 -0.33403 0.844692 +v -0.49379 -0.34096 0.841941 +v -0.493685 -0.347925 0.839263 +v -0.493511 -0.35475 0.836505 +v -0.493404 -0.361612 0.833816 +v -0.493282 -0.368225 0.831014 +v -0.493286 -0.374863 0.828337 +v -0.493199 -0.381369 0.825497 +v -0.49327 -0.387906 0.822755 +v -0.493307 -0.39416 0.819814 +v -0.493486 -0.400463 0.816967 +v -0.493529 -0.406552 0.814015 +v -0.493769 -0.412695 0.811196 +v -0.493833 -0.418657 0.808273 +v -0.493945 -0.424617 0.8054 +v -0.494193 -0.430346 0.802361 +v -0.494475 -0.436056 0.799425 +v -0.494736 -0.441692 0.796258 +v -0.495022 -0.447344 0.793128 +v -0.495388 -0.452718 0.789892 +v -0.495773 -0.458115 0.786687 +v -0.49627 -0.463375 0.783297 +v -0.496749 -0.468649 0.779918 +v -0.497395 -0.47378 0.776042 +v -0.498117 -0.478923 0.772206 +v -0.498563 -0.483956 0.768431 +v -0.499138 -0.489038 0.764723 +v -0.499812 -0.493953 0.760506 +v -0.500562 -0.498886 0.756324 +v -0.501346 -0.50362 0.751896 +v -0.50221 -0.508394 0.747469 +v -0.531226 0.392021 0.894131 +v -0.527998 0.383398 0.896552 +v -0.524445 0.374839 0.898716 +v -0.520754 0.366353 0.899883 +v -0.51685 0.357946 0.900879 +v -0.51276 0.349971 0.901031 +v -0.508532 0.342062 0.90104 +v -0.504151 0.334349 0.900075 +v -0.499709 0.326697 0.899063 +v -0.495224 0.319177 0.897141 +v -0.490672 0.311775 0.895183 +v -0.486005 0.306112 0.893277 +v -0.481274 0.300573 0.891245 +v -0.476611 0.295613 0.889179 +v -0.471965 0.290618 0.887128 +v -0.467485 0.284519 0.885731 +v -0.463057 0.278392 0.884363 +v -0.4564 0.272763 0.88219 +v -0.449589 0.26721 0.880013 +v -0.445026 0.265089 0.878604 +v -0.440442 0.262993 0.877174 +v -0.437874 0.261187 0.874839 +v -0.435319 0.259349 0.872519 +v -0.432199 0.257199 0.870595 +v -0.431968 0.254595 0.870231 +v -0.431978 0.252679 0.869677 +v -0.434048 0.250446 0.870161 +v -0.436817 0.248037 0.871094 +v -0.439587 0.245633 0.872009 +v -0.443622 0.237641 0.873614 +v -0.447511 0.229534 0.875071 +v -0.448492 0.22171 0.875512 +v -0.449331 0.213907 0.875862 +v -0.449778 0.206244 0.875732 +v -0.45023 0.198585 0.875597 +v -0.450656 0.190895 0.875489 +v -0.45112 0.183188 0.875452 +v -0.451712 0.175642 0.875774 +v -0.452366 0.168114 0.876179 +v -0.453237 0.160361 0.877119 +v -0.454186 0.152665 0.878209 +v -0.455397 0.145077 0.87978 +v -0.456675 0.137518 0.881445 +v -0.458313 0.129801 0.883686 +v -0.460023 0.12213 0.886023 +v -0.46194 0.11451 0.888654 +v -0.463887 0.1069 0.891322 +v -0.465906 0.0992473 0.894178 +v -0.467936 0.0915983 0.897055 +v -0.469923 0.0838188 0.899907 +v -0.471894 0.076019 0.902734 +v -0.473787 0.0678849 0.905312 +v -0.475611 0.0596902 0.907736 +v -0.477227 0.0513591 0.909781 +v -0.478771 0.0429811 0.911698 +v -0.480129 0.03431 0.913235 +v -0.481404 0.025563 0.914603 +v -0.482556 0.0167511 0.915602 +v -0.483604 0.00791812 0.916435 +v -0.484609 -0.000960244 0.916946 +v -0.485524 -0.00986024 0.917318 +v -0.486452 -0.0188326 0.917549 +v -0.487325 -0.0278139 0.91771 +v -0.488129 -0.0369009 0.917797 +v -0.4889 -0.0460047 0.917838 +v -0.489661 -0.0552258 0.917588 +v -0.49037 -0.0644625 0.917259 +v -0.490892 -0.0737105 0.916816 +v -0.491305 -0.0829475 0.91625 +v -0.491732 -0.0922215 0.91543 +v -0.492074 -0.101463 0.914504 +v -0.49236 -0.110637 0.913392 +v -0.492512 -0.119803 0.912164 +v -0.492648 -0.128968 0.910788 +v -0.492656 -0.138115 0.909285 +v -0.492642 -0.147226 0.907723 +v -0.492502 -0.15632 0.906042 +v -0.492379 -0.165368 0.904256 +v -0.492172 -0.174399 0.902399 +v -0.49195 -0.183243 0.900555 +v -0.491611 -0.192044 0.89861 +v -0.49136 -0.20074 0.896484 +v -0.491024 -0.209415 0.894307 +v -0.490691 -0.218049 0.892109 +v -0.490313 -0.226699 0.889854 +v -0.490036 -0.235076 0.887555 +v -0.489699 -0.24345 0.885229 +v -0.489469 -0.251744 0.882738 +v -0.489139 -0.260002 0.880174 +v -0.488815 -0.267984 0.877722 +v -0.488431 -0.275936 0.875235 +v -0.488155 -0.283782 0.872569 +v -0.487861 -0.291647 0.869888 +v -0.48763 -0.299275 0.867268 +v -0.48735 -0.306888 0.864611 +v -0.48717 -0.314258 0.861896 +v -0.486969 -0.32163 0.85917 +v -0.486887 -0.32887 0.856405 +v -0.486739 -0.336101 0.853607 +v -0.486626 -0.34315 0.850876 +v -0.486477 -0.350176 0.848104 +v -0.486411 -0.357031 0.845277 +v -0.486357 -0.363886 0.842469 +v -0.486416 -0.370568 0.839634 +v -0.486452 -0.377256 0.836795 +v -0.486565 -0.383818 0.833827 +v -0.486657 -0.390376 0.830859 +v -0.486896 -0.396701 0.827936 +v -0.487088 -0.403003 0.824983 +v -0.487292 -0.409121 0.821966 +v -0.487516 -0.415248 0.818938 +v -0.487786 -0.421236 0.81596 +v -0.487999 -0.427203 0.812955 +v -0.488329 -0.432974 0.809804 +v -0.488628 -0.438745 0.806633 +v -0.489027 -0.44447 0.803354 +v -0.489398 -0.450188 0.800051 +v -0.489923 -0.455638 0.796762 +v -0.490327 -0.461059 0.793389 +v -0.490899 -0.466386 0.789785 +v -0.491433 -0.471729 0.78614 +v -0.492173 -0.476991 0.782234 +v -0.492872 -0.482257 0.778303 +v -0.493507 -0.487423 0.77447 +v -0.494077 -0.492557 0.770605 +v -0.494828 -0.497592 0.766338 +v -0.495537 -0.502596 0.762044 +v -0.496478 -0.507526 0.75747 +v -0.497342 -0.512443 0.752852 +v -0.526184 0.394792 0.901093 +v -0.5228 0.386085 0.903372 +v -0.51917 0.377429 0.905447 +v -0.515385 0.368808 0.906527 +v -0.511419 0.360246 0.907442 +v -0.507311 0.351988 0.907369 +v -0.503059 0.343789 0.907146 +v -0.498591 0.335774 0.905941 +v -0.494069 0.327818 0.904685 +v -0.489469 0.319736 0.902245 +v -0.484797 0.311788 0.899759 +v -0.480039 0.306464 0.897803 +v -0.475218 0.301236 0.895721 +v -0.470649 0.296039 0.89374 +v -0.466175 0.290702 0.891864 +v -0.461475 0.283943 0.890502 +v -0.456753 0.277166 0.889121 +v -0.448891 0.271452 0.886051 +v -0.440854 0.266061 0.882827 +v -0.438352 0.264693 0.880911 +v -0.435769 0.263218 0.878932 +v -0.433419 0.261547 0.876428 +v -0.430749 0.259919 0.873829 +v -0.430211 0.257064 0.874102 +v -0.429586 0.254153 0.874389 +v -0.429943 0.251998 0.873127 +v -0.430256 0.249888 0.871862 +v -0.433435 0.246871 0.873113 +v -0.437048 0.24373 0.874709 +v -0.439638 0.236541 0.878021 +v -0.442126 0.22929 0.881215 +v -0.443143 0.221933 0.881774 +v -0.444017 0.21458 0.88224 +v -0.444173 0.206916 0.882039 +v -0.444298 0.199264 0.881824 +v -0.444404 0.191479 0.881476 +v -0.444544 0.183682 0.881179 +v -0.444818 0.176053 0.881332 +v -0.4452 0.168424 0.881621 +v -0.445818 0.160645 0.882491 +v -0.44653 0.152905 0.883538 +v -0.447479 0.145231 0.885099 +v -0.448502 0.137583 0.886772 +v -0.450019 0.129813 0.889031 +v -0.451611 0.122091 0.891401 +v -0.453378 0.114387 0.894061 +v -0.455197 0.106706 0.896792 +v -0.457093 0.0990416 0.899747 +v -0.458984 0.0913645 0.902689 +v -0.460845 0.083502 0.905701 +v -0.462665 0.0756077 0.908648 +v -0.464409 0.0674753 0.911304 +v -0.466102 0.0593014 0.913848 +v -0.467638 0.0509591 0.916001 +v -0.469096 0.0425564 0.918006 +v -0.470418 0.0338872 0.919669 +v -0.471641 0.0251807 0.921165 +v -0.472736 0.0164431 0.922267 +v -0.47375 0.00768904 0.923226 +v -0.474682 -0.00120903 0.923921 +v -0.475561 -0.0101267 0.924462 +v -0.476452 -0.019093 0.924881 +v -0.477312 -0.0280657 0.925254 +v -0.47819 -0.0371797 0.925514 +v -0.479049 -0.0463983 0.925605 +v -0.47988 -0.0556257 0.925469 +v -0.48066 -0.064875 0.925257 +v -0.481225 -0.0741515 0.924966 +v -0.481732 -0.0834246 0.924607 +v -0.482203 -0.0926974 0.92387 +v -0.482611 -0.101959 0.923041 +v -0.482946 -0.111174 0.921963 +v -0.483266 -0.1204 0.920867 +v -0.483416 -0.129599 0.919567 +v -0.483525 -0.138788 0.918223 +v -0.483487 -0.147954 0.916719 +v -0.483441 -0.157106 0.915212 +v -0.483278 -0.166198 0.913393 +v -0.483219 -0.175338 0.911666 +v -0.483032 -0.184232 0.909804 +v -0.482795 -0.193094 0.907892 +v -0.482621 -0.20178 0.905744 +v -0.482451 -0.210488 0.903579 +v -0.482169 -0.219255 0.901402 +v -0.481898 -0.228012 0.899241 +v -0.481626 -0.236503 0.896837 +v -0.481416 -0.244999 0.894477 +v -0.48119 -0.253332 0.891905 +v -0.481058 -0.261685 0.889414 +v -0.480675 -0.269685 0.886917 +v -0.480364 -0.277702 0.884471 +v -0.480118 -0.285586 0.881761 +v -0.479931 -0.293477 0.879069 +v -0.479737 -0.301157 0.876407 +v -0.479585 -0.308874 0.8738 +v -0.479546 -0.316303 0.87098 +v -0.479533 -0.323746 0.868154 +v -0.479516 -0.330961 0.865262 +v -0.479621 -0.338214 0.86247 +v -0.479502 -0.345336 0.859657 +v -0.479482 -0.352492 0.856925 +v -0.479459 -0.359379 0.853988 +v -0.479598 -0.366325 0.851163 +v -0.479637 -0.373038 0.848177 +v -0.479769 -0.379786 0.845272 +v -0.479988 -0.386347 0.842161 +v -0.480344 -0.392948 0.839139 +v -0.480523 -0.399244 0.83606 +v -0.480821 -0.405568 0.83308 +v -0.481083 -0.411695 0.829893 +v -0.481485 -0.417852 0.826818 +v -0.481726 -0.42381 0.823649 +v -0.482059 -0.429785 0.820538 +v -0.482466 -0.435607 0.817222 +v -0.482934 -0.441442 0.81396 +v -0.483366 -0.447265 0.810484 +v -0.483907 -0.453084 0.807115 +v -0.484359 -0.458531 0.803567 +v -0.484975 -0.464034 0.800159 +v -0.48558 -0.469436 0.796284 +v -0.486295 -0.4749 0.792473 +v -0.486945 -0.480249 0.788434 +v -0.487736 -0.485662 0.784485 +v -0.488362 -0.490862 0.780478 +v -0.489092 -0.496113 0.776531 +v -0.489781 -0.501203 0.772124 +v -0.490632 -0.50635 0.767811 +v -0.491562 -0.511414 0.763028 +v -0.492554 -0.516522 0.758278 +v -0.520485 0.397665 0.90828 +v -0.516904 0.38894 0.910398 +v -0.513132 0.380252 0.912325 +v -0.50936 0.371451 0.913295 +v -0.505387 0.362699 0.914078 +v -0.501223 0.354185 0.913849 +v -0.4969 0.345743 0.913451 +v -0.492464 0.337581 0.912101 +v -0.487961 0.329508 0.910669 +v -0.483316 0.32164 0.908322 +v -0.478613 0.313882 0.905914 +v -0.473752 0.308014 0.903519 +v -0.468833 0.302271 0.901002 +v -0.464605 0.296605 0.899212 +v -0.460484 0.290853 0.897536 +v -0.455972 0.284181 0.895901 +v -0.451435 0.277559 0.894219 +v -0.444783 0.272274 0.890498 +v -0.438078 0.267346 0.886637 +v -0.434543 0.265396 0.883928 +v -0.431007 0.263416 0.881233 +v -0.427671 0.26168 0.87947 +v -0.427332 0.259377 0.879519 +v -0.426836 0.256153 0.879583 +v -0.42626 0.252864 0.879642 +v -0.426655 0.249889 0.878128 +v -0.426974 0.246985 0.876604 +v -0.428255 0.243497 0.879109 +v -0.430504 0.239762 0.882267 +v -0.433133 0.234327 0.884487 +v -0.435713 0.228825 0.88663 +v -0.437246 0.222133 0.887359 +v -0.43861 0.215414 0.887931 +v -0.438553 0.207659 0.887958 +v -0.438425 0.199948 0.887936 +v -0.438207 0.192069 0.887437 +v -0.438003 0.18416 0.886962 +v -0.437953 0.176404 0.886862 +v -0.438 0.168619 0.886895 +v -0.438363 0.160808 0.887636 +v -0.438818 0.153055 0.888555 +v -0.439636 0.145292 0.890071 +v -0.440549 0.137564 0.891736 +v -0.441815 0.129785 0.894043 +v -0.443163 0.122052 0.896483 +v -0.444684 0.11429 0.899249 +v -0.446265 0.106538 0.902104 +v -0.447955 0.0988102 0.905139 +v -0.449659 0.0910954 0.908193 +v -0.451397 0.0832022 0.911207 +v -0.453091 0.0752646 0.914167 +v -0.45472 0.0671317 0.916911 +v -0.456287 0.0589491 0.919533 +v -0.457738 0.0505976 0.921761 +v -0.45914 0.042154 0.923855 +v -0.460419 0.0334937 0.925615 +v -0.461607 0.0247874 0.927209 +v -0.462632 0.0160757 0.928434 +v -0.463577 0.00735013 0.929502 +v -0.464484 -0.00152426 0.930379 +v -0.465391 -0.0104076 0.93113 +v -0.466259 -0.0193546 0.931802 +v -0.467099 -0.0283432 0.932417 +v -0.46803 -0.0374826 0.932723 +v -0.468937 -0.0466512 0.932963 +v -0.469776 -0.055947 0.933035 +v -0.470545 -0.0652672 0.932998 +v -0.471187 -0.0745566 0.932823 +v -0.471745 -0.0838223 0.932537 +v -0.472328 -0.0931205 0.931956 +v -0.472831 -0.102418 0.931275 +v -0.473215 -0.111692 0.930381 +v -0.473488 -0.120962 0.929357 +v -0.473744 -0.130268 0.928167 +v -0.47388 -0.139551 0.926852 +v -0.473945 -0.14875 0.925456 +v -0.473936 -0.157938 0.924003 +v -0.47398 -0.167085 0.922327 +v -0.473968 -0.176221 0.920594 +v -0.473845 -0.185194 0.918832 +v -0.473661 -0.194182 0.916972 +v -0.473564 -0.202998 0.914891 +v -0.473422 -0.211809 0.91275 +v -0.473288 -0.220619 0.910598 +v -0.473107 -0.229416 0.908395 +v -0.472921 -0.237972 0.9061 +v -0.472699 -0.246539 0.903772 +v -0.472593 -0.254956 0.901247 +v -0.472435 -0.263349 0.898664 +v -0.47223 -0.271473 0.896165 +v -0.471984 -0.279594 0.893637 +v -0.471873 -0.287555 0.89095 +v -0.471712 -0.295505 0.888223 +v -0.471683 -0.303202 0.885574 +v -0.471602 -0.310892 0.882864 +v -0.471748 -0.318378 0.879957 +v -0.471857 -0.325882 0.877033 +v -0.471938 -0.333187 0.874212 +v -0.471964 -0.340502 0.871385 +v -0.472012 -0.347677 0.868497 +v -0.472066 -0.354855 0.865623 +v -0.472275 -0.36182 0.86262 +v -0.472475 -0.368791 0.859616 +v -0.472749 -0.375539 0.856608 +v -0.473004 -0.382293 0.853577 +v -0.473354 -0.388866 0.850464 +v -0.473697 -0.395423 0.847326 +v -0.474003 -0.401813 0.844196 +v -0.474276 -0.408195 0.841038 +v -0.474697 -0.414348 0.837816 +v -0.475066 -0.420462 0.834599 +v -0.475539 -0.426485 0.831399 +v -0.475927 -0.432469 0.828148 +v -0.476403 -0.438356 0.824808 +v -0.476849 -0.444217 0.821474 +v -0.477406 -0.450026 0.817887 +v -0.47793 -0.455821 0.814266 +v -0.478561 -0.461424 0.810678 +v -0.479148 -0.467028 0.807057 +v -0.479888 -0.472597 0.803137 +v -0.480582 -0.478158 0.799189 +v -0.481401 -0.483597 0.795047 +v -0.482193 -0.489033 0.790858 +v -0.482934 -0.494317 0.786752 +v -0.483621 -0.499563 0.782606 +v -0.484505 -0.504845 0.778211 +v -0.485282 -0.510094 0.77375 +v -0.48635 -0.515312 0.768899 +v -0.487292 -0.520512 0.763992 +v -0.514642 0.400564 0.91536 +v -0.510896 0.391812 0.917328 +v -0.507005 0.38309 0.919121 +v -0.503217 0.374127 0.919956 +v -0.49927 0.365193 0.920635 +v -0.495061 0.35642 0.920253 +v -0.490712 0.347723 0.919715 +v -0.486318 0.339427 0.918189 +v -0.481845 0.331212 0.916631 +v -0.477143 0.323546 0.914372 +v -0.472404 0.316011 0.912034 +v -0.467454 0.309571 0.909216 +v -0.46246 0.303276 0.906287 +v -0.458575 0.297139 0.904699 +v -0.454804 0.290952 0.903225 +v -0.450491 0.284411 0.901308 +v -0.446089 0.277954 0.899305 +v -0.440713 0.27315 0.89496 +v -0.435396 0.269153 0.890593 +v -0.430826 0.26619 0.886992 +v -0.425482 0.263838 0.883028 +v -0.424519 0.261222 0.884124 +v -0.423615 0.258822 0.88518 +v -0.423199 0.255257 0.885011 +v -0.422687 0.251618 0.884829 +v -0.423118 0.247827 0.883062 +v -0.423449 0.244116 0.881264 +v -0.423058 0.240112 0.885094 +v -0.423933 0.235765 0.889798 +v -0.426586 0.232071 0.890902 +v -0.429221 0.228354 0.891975 +v -0.431228 0.222308 0.892846 +v -0.433117 0.216258 0.893565 +v -0.432868 0.208387 0.893829 +v -0.43251 0.200611 0.894036 +v -0.432 0.192635 0.893396 +v -0.43147 0.184633 0.892756 +v -0.431077 0.176746 0.892383 +v -0.430779 0.168799 0.892147 +v -0.430871 0.160956 0.892711 +v -0.43107 0.153182 0.893497 +v -0.431758 0.145327 0.894959 +v -0.432545 0.137504 0.896592 +v -0.433571 0.129708 0.898959 +v -0.434664 0.121964 0.901443 +v -0.435956 0.114139 0.904349 +v -0.437301 0.106304 0.907355 +v -0.438787 0.0985273 0.910473 +v -0.440284 0.0907631 0.913604 +v -0.441892 0.0828439 0.916615 +v -0.443467 0.0748763 0.919588 +v -0.444946 0.0667227 0.922346 +v -0.446378 0.0585318 0.925012 +v -0.447745 0.0501644 0.927328 +v -0.449076 0.0416952 0.929507 +v -0.450283 0.0330308 0.931339 +v -0.451388 0.0243213 0.932972 +v -0.452368 0.0156552 0.934347 +v -0.453257 0.00697644 0.935519 +v -0.454156 -0.00188734 0.936605 +v -0.455064 -0.0107578 0.937537 +v -0.455917 -0.0197231 0.938471 +v -0.456741 -0.0287607 0.939262 +v -0.457764 -0.0378281 0.939764 +v -0.458746 -0.0469115 0.940193 +v -0.459556 -0.0562808 0.940441 +v -0.460321 -0.0656666 0.940596 +v -0.461023 -0.074939 0.940515 +v -0.461645 -0.0842097 0.940315 +v -0.46231 -0.093528 0.939838 +v -0.462894 -0.102852 0.939263 +v -0.463276 -0.112192 0.938522 +v -0.46359 -0.12153 0.937675 +v -0.463892 -0.13092 0.936535 +v -0.464144 -0.140311 0.935335 +v -0.464252 -0.149542 0.934018 +v -0.464341 -0.15877 0.932677 +v -0.464449 -0.167955 0.930996 +v -0.464575 -0.177152 0.929333 +v -0.464477 -0.186204 0.927601 +v -0.464352 -0.195272 0.925832 +v -0.464333 -0.204188 0.92383 +v -0.464322 -0.213102 0.921836 +v -0.46421 -0.221934 0.919604 +v -0.464121 -0.230766 0.917379 +v -0.463963 -0.239388 0.915145 +v -0.463813 -0.248012 0.912911 +v -0.463748 -0.256493 0.910357 +v -0.463673 -0.264975 0.907792 +v -0.463546 -0.273207 0.905224 +v -0.463435 -0.281438 0.902675 +v -0.463449 -0.289464 0.899993 +v -0.463451 -0.297491 0.897328 +v -0.463492 -0.305199 0.894607 +v -0.463562 -0.312915 0.891919 +v -0.463786 -0.32046 0.888848 +v -0.464082 -0.328034 0.885842 +v -0.464125 -0.335409 0.882992 +v -0.464198 -0.34282 0.880228 +v -0.464374 -0.350013 0.877241 +v -0.464618 -0.357223 0.874302 +v -0.46496 -0.364231 0.871174 +v -0.465338 -0.371249 0.868069 +v -0.465673 -0.37799 0.864905 +v -0.466095 -0.384757 0.861805 +v -0.46657 -0.391317 0.858624 +v -0.467093 -0.397892 0.855496 +v -0.467474 -0.404372 0.852294 +v -0.467851 -0.410846 0.849064 +v -0.468319 -0.417012 0.845728 +v -0.468893 -0.42319 0.842494 +v -0.469315 -0.429209 0.83908 +v -0.469869 -0.435277 0.835745 +v -0.470301 -0.44115 0.832339 +v -0.470847 -0.44706 0.82901 +v -0.471485 -0.452811 0.825271 +v -0.472233 -0.45861 0.821579 +v -0.472821 -0.464322 0.817775 +v -0.473513 -0.470088 0.814056 +v -0.47425 -0.47578 0.810039 +v -0.474973 -0.481456 0.806014 +v -0.475879 -0.486958 0.801669 +v -0.476786 -0.492449 0.797314 +v -0.477528 -0.497752 0.793009 +v -0.478383 -0.503095 0.788801 +v -0.47918 -0.508494 0.784263 +v -0.480061 -0.513917 0.779766 +v -0.481105 -0.519209 0.774761 +v -0.482148 -0.524541 0.769738 +v -0.508239 0.403563 0.922353 +v -0.504209 0.395148 0.924271 +v -0.500069 0.386777 0.926042 +v -0.496393 0.377226 0.926661 +v -0.49255 0.367706 0.927116 +v -0.488423 0.358741 0.926568 +v -0.484157 0.349858 0.92586 +v -0.4797 0.341431 0.924278 +v -0.475152 0.333093 0.922661 +v -0.470423 0.325382 0.920346 +v -0.465667 0.317819 0.91794 +v -0.46079 0.310924 0.914912 +v -0.455917 0.304085 0.911868 +v -0.452213 0.297701 0.910445 +v -0.448569 0.291297 0.909099 +v -0.444317 0.284983 0.906852 +v -0.439975 0.278776 0.904487 +v -0.43573 0.274577 0.900487 +v -0.431595 0.270733 0.896427 +v -0.427283 0.268523 0.892699 +v -0.422001 0.266422 0.888471 +v -0.420652 0.262843 0.889966 +v -0.419309 0.25951 0.891438 +v -0.418761 0.255112 0.891289 +v -0.418104 0.250695 0.891072 +v -0.418296 0.245696 0.889328 +v -0.418332 0.240842 0.887532 +v -0.417708 0.238002 0.890082 +v -0.418214 0.234859 0.893471 +v -0.420065 0.232229 0.894469 +v -0.421926 0.229593 0.895472 +v -0.423945 0.223636 0.897272 +v -0.425869 0.217615 0.898913 +v -0.425975 0.209436 0.899554 +v -0.42592 0.201326 0.900057 +v -0.42562 0.193196 0.899097 +v -0.425242 0.185078 0.898069 +v -0.424273 0.177026 0.897649 +v -0.423375 0.168939 0.897351 +v -0.423275 0.161017 0.89764 +v -0.423304 0.153159 0.898194 +v -0.423645 0.145318 0.899692 +v -0.424091 0.137511 0.901371 +v -0.424879 0.129622 0.903809 +v -0.425749 0.121787 0.906393 +v -0.42689 0.113947 0.909366 +v -0.428084 0.106067 0.912396 +v -0.429409 0.0982338 0.915511 +v -0.430744 0.0904256 0.918619 +v -0.432156 0.0824391 0.921679 +v -0.433529 0.0744009 0.924684 +v -0.434871 0.0662693 0.927429 +v -0.436159 0.0581093 0.930066 +v -0.437435 0.0496422 0.932452 +v -0.438651 0.041104 0.934695 +v -0.439764 0.0325066 0.936641 +v -0.440816 0.0238788 0.938448 +v -0.441741 0.0152115 0.93995 +v -0.442599 0.00652698 0.941313 +v -0.443543 -0.00231256 0.942563 +v -0.444521 -0.0111539 0.943717 +v -0.44543 -0.0201611 0.944759 +v -0.446318 -0.0292338 0.945677 +v -0.447276 -0.038329 0.946394 +v -0.448186 -0.0474362 0.947039 +v -0.449077 -0.0567386 0.947499 +v -0.44991 -0.0660718 0.9478 +v -0.450606 -0.0753536 0.947869 +v -0.451228 -0.084643 0.947829 +v -0.451868 -0.0940044 0.947551 +v -0.452407 -0.103375 0.94714 +v -0.452845 -0.112812 0.94653 +v -0.453232 -0.122212 0.945786 +v -0.453619 -0.131551 0.94477 +v -0.453891 -0.140884 0.943633 +v -0.454115 -0.150242 0.942394 +v -0.454264 -0.159591 0.941083 +v -0.454454 -0.168836 0.939564 +v -0.454571 -0.178066 0.937971 +v -0.454627 -0.187192 0.936224 +v -0.454641 -0.19631 0.934419 +v -0.45471 -0.205328 0.932508 +v -0.454725 -0.21435 0.930549 +v -0.454785 -0.223235 0.928413 +v -0.454771 -0.232124 0.926202 +v -0.454692 -0.240861 0.92394 +v -0.454598 -0.249494 0.921589 +v -0.454646 -0.258065 0.919129 +v -0.454633 -0.266627 0.916621 +v -0.454655 -0.274934 0.914069 +v -0.454658 -0.283234 0.911496 +v -0.454782 -0.291318 0.908859 +v -0.454847 -0.299388 0.906186 +v -0.455025 -0.307214 0.903449 +v -0.455164 -0.315035 0.90066 +v -0.455504 -0.322695 0.897665 +v -0.455812 -0.330349 0.894657 +v -0.456027 -0.337765 0.891807 +v -0.456181 -0.345236 0.888986 +v -0.456564 -0.352523 0.885982 +v -0.456899 -0.359781 0.882943 +v -0.457361 -0.366807 0.879821 +v -0.457794 -0.373823 0.876682 +v -0.458329 -0.380674 0.873548 +v -0.458799 -0.38752 0.870366 +v -0.459387 -0.394145 0.867151 +v -0.459906 -0.400764 0.863886 +v -0.460465 -0.407226 0.86059 +v -0.46102 -0.413676 0.857299 +v -0.461625 -0.419845 0.853966 +v -0.462184 -0.425994 0.850615 +v -0.462785 -0.432067 0.847156 +v -0.463303 -0.438134 0.843612 +v -0.463942 -0.44404 0.840106 +v -0.464567 -0.449937 0.836579 +v -0.465378 -0.455745 0.832801 +v -0.466039 -0.461529 0.828921 +v -0.466821 -0.467294 0.824999 +v -0.467548 -0.473054 0.821035 +v -0.468444 -0.478814 0.816909 +v -0.469216 -0.484549 0.812719 +v -0.47013 -0.490132 0.808444 +v -0.470968 -0.495683 0.804097 +v -0.471806 -0.501179 0.799687 +v -0.47261 -0.506662 0.795255 +v -0.473524 -0.512132 0.790612 +v -0.474392 -0.517595 0.785966 +v -0.475494 -0.523069 0.780924 +v -0.476501 -0.528531 0.775798 +v -0.501732 0.406562 0.929256 +v -0.497439 0.398506 0.931136 +v -0.493052 0.390479 0.932871 +v -0.489485 0.380351 0.933278 +v -0.485772 0.370238 0.933538 +v -0.481743 0.361084 0.932838 +v -0.477569 0.352012 0.931936 +v -0.473035 0.34347 0.930308 +v -0.468432 0.334998 0.92865 +v -0.463711 0.327257 0.92628 +v -0.458921 0.31966 0.923787 +v -0.454125 0.312269 0.920607 +v -0.449394 0.304877 0.917451 +v -0.445848 0.298274 0.916174 +v -0.442321 0.291653 0.914948 +v -0.438116 0.28559 0.912348 +v -0.433846 0.279641 0.90963 +v -0.430784 0.276099 0.90597 +v -0.427828 0.272753 0.902285 +v -0.42348 0.27062 0.89845 +v -0.418291 0.269056 0.89372 +v -0.416533 0.2645 0.895623 +v -0.414589 0.259825 0.897437 +v -0.414059 0.255011 0.897343 +v -0.41323 0.249795 0.89707 +v -0.41314 0.243625 0.895344 +v -0.412858 0.237645 0.893509 +v -0.41232 0.235876 0.895029 +v -0.4125 0.233935 0.89713 +v -0.413551 0.232374 0.898022 +v -0.414603 0.230813 0.898916 +v -0.416595 0.22492 0.901585 +v -0.418531 0.218956 0.904142 +v -0.418988 0.210478 0.905181 +v -0.41928 0.20202 0.906016 +v -0.419202 0.193767 0.904758 +v -0.419018 0.185514 0.903386 +v -0.417464 0.177301 0.902888 +v -0.415946 0.16908 0.902511 +v -0.415649 0.161082 0.90253 +v -0.415504 0.153127 0.902823 +v -0.415489 0.145293 0.904336 +v -0.415583 0.137489 0.906046 +v -0.41614 0.129503 0.908559 +v -0.416788 0.121595 0.911289 +v -0.417778 0.11371 0.914283 +v -0.418819 0.105798 0.917345 +v -0.419978 0.0979056 0.92043 +v -0.421159 0.0900517 0.923534 +v -0.422354 0.0819799 0.92661 +v -0.423522 0.0738782 0.929635 +v -0.424704 0.0657617 0.932326 +v -0.425852 0.0576168 0.934935 +v -0.427026 0.049049 0.937353 +v -0.428161 0.0404589 0.939673 +v -0.429169 0.0319205 0.941694 +v -0.430117 0.023368 0.943578 +v -0.430987 0.0147096 0.945267 +v -0.431809 0.00603204 0.946836 +v -0.432806 -0.00276941 0.948272 +v -0.43385 -0.0116086 0.949617 +v -0.434803 -0.020666 0.950775 +v -0.435737 -0.0297243 0.951914 +v -0.436619 -0.0388432 0.952889 +v -0.437437 -0.047976 0.953741 +v -0.438419 -0.0572656 0.954315 +v -0.439318 -0.0665744 0.954758 +v -0.440036 -0.075833 0.955017 +v -0.4407 -0.0850942 0.955196 +v -0.441256 -0.0944998 0.955047 +v -0.441733 -0.103935 0.954792 +v -0.442241 -0.11343 0.954296 +v -0.442715 -0.122877 0.953675 +v -0.443106 -0.132183 0.952759 +v -0.443435 -0.141485 0.951753 +v -0.443774 -0.150931 0.950541 +v -0.444011 -0.160396 0.949265 +v -0.444214 -0.169668 0.947852 +v -0.444381 -0.178939 0.946359 +v -0.444585 -0.188137 0.944614 +v -0.444745 -0.197317 0.942823 +v -0.444831 -0.206449 0.940927 +v -0.444896 -0.215597 0.939033 +v -0.445051 -0.224541 0.936925 +v -0.445147 -0.233471 0.934747 +v -0.445201 -0.242236 0.932453 +v -0.44526 -0.250966 0.930154 +v -0.445331 -0.259576 0.927639 +v -0.445449 -0.268234 0.92522 +v -0.445576 -0.276604 0.92267 +v -0.445687 -0.284968 0.920115 +v -0.445852 -0.293095 0.917474 +v -0.445992 -0.301213 0.914813 +v -0.446281 -0.309155 0.912041 +v -0.446573 -0.317099 0.909282 +v -0.446954 -0.324849 0.906278 +v -0.447398 -0.332609 0.903323 +v -0.447723 -0.340061 0.900435 +v -0.448023 -0.347591 0.89763 +v -0.448527 -0.354935 0.894541 +v -0.449043 -0.362291 0.891448 +v -0.449588 -0.36933 0.888326 +v -0.450135 -0.376368 0.885214 +v -0.450812 -0.383318 0.88206 +v -0.451479 -0.390263 0.878914 +v -0.452109 -0.396944 0.87559 +v -0.452782 -0.40364 0.872293 +v -0.453474 -0.410064 0.868875 +v -0.454225 -0.416508 0.865473 +v -0.454865 -0.422662 0.862077 +v -0.455637 -0.42884 0.858785 +v -0.45623 -0.434924 0.855153 +v -0.456951 -0.441048 0.85159 +v -0.457603 -0.446934 0.847875 +v -0.458339 -0.452848 0.844204 +v -0.459162 -0.458672 0.840273 +v -0.459972 -0.464495 0.836326 +v -0.460856 -0.470297 0.832231 +v -0.461826 -0.476125 0.828184 +v -0.462642 -0.481882 0.823785 +v -0.463595 -0.48768 0.819486 +v -0.464363 -0.493293 0.815167 +v -0.465234 -0.498943 0.810924 +v -0.466031 -0.504581 0.806318 +v -0.466945 -0.510269 0.80177 +v -0.467839 -0.515757 0.796963 +v -0.468899 -0.521329 0.792259 +v -0.469925 -0.526939 0.787089 +v -0.471028 -0.532592 0.781979 +v -0.494967 0.40912 0.935923 +v -0.490886 0.401102 0.937413 +v -0.486714 0.393095 0.938764 +v -0.482877 0.382911 0.939228 +v -0.478901 0.372764 0.939534 +v -0.474738 0.363415 0.938826 +v -0.470434 0.354138 0.937881 +v -0.465903 0.345546 0.936214 +v -0.4613 0.337012 0.934499 +v -0.456553 0.329128 0.932029 +v -0.451752 0.321361 0.929448 +v -0.447146 0.313657 0.926466 +v -0.442672 0.305928 0.923554 +v -0.439115 0.299185 0.922265 +v -0.435577 0.29244 0.921023 +v -0.430696 0.286246 0.917797 +v -0.425726 0.280172 0.914444 +v -0.422996 0.277519 0.911415 +v -0.420274 0.274863 0.908416 +v -0.417191 0.273358 0.90584 +v -0.412249 0.272103 0.901276 +v -0.410731 0.266577 0.902746 +v -0.408976 0.260874 0.904115 +v -0.40832 0.25575 0.904113 +v -0.407384 0.250138 0.90389 +v -0.407124 0.244344 0.902574 +v -0.406709 0.238594 0.901096 +v -0.405918 0.235581 0.900824 +v -0.405685 0.232423 0.90108 +v -0.406479 0.230887 0.902421 +v -0.407293 0.22936 0.903788 +v -0.408609 0.225379 0.905688 +v -0.409936 0.221367 0.907581 +v -0.410883 0.21183 0.90977 +v -0.411574 0.202317 0.91163 +v -0.411169 0.193669 0.910699 +v -0.41064 0.185031 0.909624 +v -0.409303 0.176935 0.908549 +v -0.408026 0.168834 0.907611 +v -0.40782 0.161 0.907221 +v -0.407764 0.153162 0.907112 +v -0.40723 0.145185 0.908706 +v -0.406828 0.137251 0.910564 +v -0.407181 0.129274 0.913178 +v -0.407617 0.121359 0.915966 +v -0.40842 0.113438 0.918949 +v -0.409291 0.10548 0.922018 +v -0.410257 0.0975246 0.925096 +v -0.411233 0.0895676 0.928194 +v -0.412283 0.0814846 0.93119 +v -0.4133 0.0733725 0.934113 +v -0.414321 0.0652085 0.936815 +v -0.41534 0.0569625 0.939444 +v -0.416389 0.0484256 0.94189 +v -0.417396 0.0398677 0.944253 +v -0.418321 0.0313738 0.946385 +v -0.419196 0.0228558 0.948359 +v -0.420054 0.0141875 0.950216 +v -0.420892 0.0054963 0.952016 +v -0.421813 -0.00328809 0.953652 +v -0.422737 -0.0121018 0.955216 +v -0.423671 -0.0211187 0.956649 +v -0.424585 -0.0301533 0.958009 +v -0.425478 -0.0392714 0.959143 +v -0.426365 -0.0484269 0.960161 +v -0.427343 -0.0577101 0.960938 +v -0.428215 -0.0670167 0.961554 +v -0.429022 -0.0762926 0.961959 +v -0.429723 -0.0855927 0.962214 +v -0.430389 -0.0950336 0.962258 +v -0.430947 -0.104484 0.962128 +v -0.431402 -0.114008 0.961814 +v -0.431787 -0.123477 0.961313 +v -0.432246 -0.13287 0.960515 +v -0.432637 -0.142257 0.959603 +v -0.43307 -0.151711 0.958479 +v -0.433441 -0.161151 0.957273 +v -0.433733 -0.170493 0.955908 +v -0.43394 -0.179846 0.95442 +v -0.434187 -0.189118 0.952763 +v -0.434391 -0.198382 0.951044 +v -0.43456 -0.207605 0.949201 +v -0.434696 -0.216819 0.947314 +v -0.434902 -0.225848 0.945253 +v -0.435059 -0.234844 0.943125 +v -0.435221 -0.243654 0.940906 +v -0.435308 -0.252436 0.938601 +v -0.435553 -0.261093 0.936117 +v -0.435763 -0.269791 0.933661 +v -0.43604 -0.278234 0.93118 +v -0.436274 -0.286664 0.928664 +v -0.436555 -0.294893 0.926025 +v -0.436795 -0.303118 0.923371 +v -0.437166 -0.311214 0.92061 +v -0.437513 -0.319315 0.91783 +v -0.43803 -0.327122 0.914931 +v -0.438523 -0.33493 0.912034 +v -0.439079 -0.342462 0.909055 +v -0.439541 -0.350074 0.906117 +v -0.4402 -0.357476 0.903022 +v -0.440824 -0.36487 0.899903 +v -0.441533 -0.371995 0.896816 +v -0.442203 -0.379106 0.893707 +v -0.443035 -0.386054 0.890451 +v -0.443858 -0.393 0.887164 +v -0.444693 -0.399745 0.883864 +v -0.445466 -0.406472 0.880499 +v -0.446296 -0.412887 0.877053 +v -0.447103 -0.419291 0.873592 +v -0.447912 -0.425485 0.870179 +v -0.448633 -0.431662 0.866688 +v -0.449449 -0.437771 0.863162 +v -0.450168 -0.443846 0.859564 +v -0.451015 -0.449806 0.855825 +v -0.45182 -0.455752 0.852044 +v -0.452711 -0.46159 0.848071 +v -0.45356 -0.467433 0.844115 +v -0.454556 -0.473328 0.839885 +v -0.455489 -0.479213 0.83562 +v -0.456396 -0.485052 0.831291 +v -0.457237 -0.49088 0.826918 +v -0.458127 -0.496625 0.822465 +v -0.458951 -0.502354 0.817965 +v -0.459952 -0.508082 0.813336 +v -0.460843 -0.513784 0.808649 +v -0.461879 -0.519476 0.803723 +v -0.462901 -0.525149 0.798774 +v -0.463977 -0.530894 0.793559 +v -0.464945 -0.536613 0.788293 +v -0.488142 0.411693 0.942546 +v -0.484293 0.403693 0.943649 +v -0.480363 0.395719 0.944639 +v -0.476237 0.385487 0.945134 +v -0.471994 0.375307 0.945487 +v -0.4677 0.365767 0.944755 +v -0.46327 0.356297 0.943789 +v -0.458738 0.347647 0.942074 +v -0.454155 0.33905 0.940294 +v -0.449375 0.330994 0.937733 +v -0.444558 0.32308 0.935073 +v -0.440205 0.315009 0.932332 +v -0.435954 0.306997 0.929655 +v -0.432394 0.300122 0.928357 +v -0.428829 0.293255 0.927069 +v -0.423268 0.286933 0.923205 +v -0.417627 0.280767 0.919192 +v -0.415258 0.278948 0.916893 +v -0.412873 0.277159 0.914507 +v -0.411156 0.27619 0.912959 +v -0.405852 0.27515 0.90828 +v -0.404607 0.268658 0.909421 +v -0.40309 0.26194 0.910381 +v -0.40232 0.256522 0.910513 +v -0.401263 0.250557 0.91033 +v -0.400772 0.245175 0.909431 +v -0.400154 0.239775 0.908374 +v -0.399858 0.23518 0.907022 +v -0.399456 0.230684 0.90561 +v -0.399407 0.229445 0.906821 +v -0.39997 0.227939 0.908661 +v -0.400616 0.225817 0.909775 +v -0.401269 0.223698 0.910877 +v -0.402663 0.213192 0.914218 +v -0.403799 0.202608 0.917127 +v -0.403049 0.193556 0.916526 +v -0.402167 0.184576 0.915746 +v -0.401094 0.176592 0.914139 +v -0.400045 0.168586 0.912617 +v -0.399933 0.160891 0.911841 +v -0.399969 0.15317 0.911346 +v -0.398909 0.145042 0.912995 +v -0.398007 0.136962 0.914983 +v -0.398167 0.128976 0.9177 +v -0.398375 0.12105 0.920514 +v -0.399009 0.113078 0.923509 +v -0.399699 0.105071 0.926572 +v -0.40047 0.0970434 0.929631 +v -0.401236 0.0890027 0.932714 +v -0.402141 0.080903 0.935603 +v -0.40302 0.0727899 0.938441 +v -0.403882 0.0645712 0.941132 +v -0.404748 0.0562443 0.94373 +v -0.405665 0.0477516 0.946217 +v -0.40654 0.0392291 0.948604 +v -0.407392 0.0307763 0.950837 +v -0.408198 0.0222691 0.952917 +v -0.40905 0.0135609 0.954941 +v -0.409884 0.00483935 0.956915 +v -0.410723 -0.00389536 0.958772 +v -0.411537 -0.0126575 0.960537 +v -0.412486 -0.0216471 0.962192 +v -0.413399 -0.0306583 0.963761 +v -0.41431 -0.039776 0.965091 +v -0.415177 -0.0489176 0.966335 +v -0.416104 -0.0581838 0.967291 +v -0.416981 -0.0674851 0.968119 +v -0.417854 -0.076797 0.968669 +v -0.418617 -0.0861335 0.969028 +v -0.419342 -0.0956024 0.969182 +v -0.419968 -0.105076 0.96916 +v -0.42039 -0.114594 0.969081 +v -0.420708 -0.124094 0.968736 +v -0.421229 -0.133562 0.968035 +v -0.421675 -0.14302 0.967213 +v -0.422173 -0.152474 0.966159 +v -0.422628 -0.161913 0.96503 +v -0.422996 -0.171357 0.963688 +v -0.423291 -0.180784 0.962221 +v -0.423579 -0.190102 0.960657 +v -0.423812 -0.199403 0.959006 +v -0.424057 -0.20871 0.957204 +v -0.424263 -0.217997 0.955335 +v -0.424548 -0.227097 0.953338 +v -0.424779 -0.236135 0.951225 +v -0.425008 -0.245 0.949061 +v -0.425179 -0.25385 0.946824 +v -0.425554 -0.262568 0.944356 +v -0.425941 -0.271301 0.941909 +v -0.426275 -0.279795 0.939417 +v -0.426621 -0.288292 0.936951 +v -0.427035 -0.296635 0.934344 +v -0.427458 -0.304977 0.931736 +v -0.427861 -0.313223 0.928947 +v -0.428293 -0.321476 0.926176 +v -0.428883 -0.329341 0.923364 +v -0.429447 -0.337196 0.920515 +v -0.430212 -0.344797 0.917438 +v -0.430957 -0.352521 0.914482 +v -0.431758 -0.359982 0.911377 +v -0.432547 -0.367449 0.908256 +v -0.433376 -0.374635 0.90517 +v -0.434193 -0.381825 0.902073 +v -0.435179 -0.388778 0.898676 +v -0.436199 -0.39573 0.89535 +v -0.437096 -0.402498 0.891977 +v -0.438042 -0.409271 0.888625 +v -0.439024 -0.41568 0.885157 +v -0.439998 -0.422067 0.881712 +v -0.440848 -0.428274 0.878173 +v -0.44177 -0.434507 0.874703 +v -0.442548 -0.440575 0.871087 +v -0.443428 -0.446657 0.867532 +v -0.444359 -0.452658 0.863678 +v -0.445316 -0.458663 0.859826 +v -0.446208 -0.464505 0.855832 +v -0.447135 -0.470348 0.851887 +v -0.448158 -0.476327 0.847477 +v -0.44926 -0.482333 0.843103 +v -0.450069 -0.488202 0.838746 +v -0.450963 -0.494107 0.834431 +v -0.451871 -0.49998 0.829745 +v -0.452881 -0.505895 0.825126 +v -0.453838 -0.511606 0.820362 +v -0.454802 -0.517332 0.815591 +v -0.455876 -0.523157 0.810443 +v -0.457029 -0.529011 0.80536 +v -0.458015 -0.534848 0.800016 +v -0.459138 -0.540744 0.794762 +v -0.480677 0.41399 0.949133 +v -0.476958 0.405992 0.950046 +v -0.473154 0.398039 0.950838 +v -0.468856 0.387724 0.951178 +v -0.464456 0.377464 0.951394 +v -0.460092 0.367895 0.950592 +v -0.455608 0.358394 0.949576 +v -0.451026 0.349675 0.947776 +v -0.446419 0.340981 0.945906 +v -0.441685 0.332849 0.943295 +v -0.436915 0.324881 0.940592 +v -0.432689 0.316567 0.937975 +v -0.428562 0.308338 0.935434 +v -0.425041 0.301721 0.934248 +v -0.421473 0.295091 0.933032 +v -0.416509 0.289073 0.929541 +v -0.411479 0.283179 0.925929 +v -0.408135 0.281178 0.922834 +v -0.405058 0.279316 0.919668 +v -0.399324 0.276842 0.914475 +v -0.398818 0.274403 0.915012 +v -0.397623 0.268441 0.915992 +v -0.396225 0.262371 0.916811 +v -0.39526 0.256569 0.91694 +v -0.394233 0.251112 0.916939 +v -0.39363 0.245707 0.916174 +v -0.392917 0.240262 0.91523 +v -0.392553 0.235593 0.913985 +v -0.392094 0.230974 0.912647 +v -0.391575 0.228772 0.912199 +v -0.391638 0.226397 0.912413 +v -0.393011 0.22315 0.9145 +v -0.394415 0.2199 0.916611 +v -0.395009 0.210602 0.919539 +v -0.395408 0.201199 0.922109 +v -0.394669 0.192538 0.921527 +v -0.39378 0.183928 0.920741 +v -0.392422 0.175998 0.91911 +v -0.391065 0.16805 0.917534 +v -0.390502 0.160317 0.916706 +v -0.39008 0.152576 0.916197 +v -0.389187 0.144565 0.917556 +v -0.388496 0.136623 0.919367 +v -0.388564 0.128658 0.922049 +v -0.38869 0.120734 0.924848 +v -0.389166 0.112705 0.927833 +v -0.389694 0.104636 0.930881 +v -0.390312 0.0965592 0.933924 +v -0.390917 0.0884569 0.93691 +v -0.391649 0.0802912 0.939717 +v -0.392361 0.0721303 0.942495 +v -0.393144 0.063847 0.945144 +v -0.393928 0.0555109 0.947747 +v -0.394729 0.0470735 0.950262 +v -0.395498 0.0386044 0.952709 +v -0.396294 0.0300833 0.955005 +v -0.397073 0.0215473 0.957216 +v -0.397859 0.0128582 0.95943 +v -0.39862 0.00414736 0.961603 +v -0.399478 -0.00454996 0.963633 +v -0.400333 -0.0132708 0.96563 +v -0.401264 -0.0222184 0.967436 +v -0.402182 -0.031198 0.969116 +v -0.403058 -0.0403098 0.970646 +v -0.403879 -0.0494469 0.972082 +v -0.404684 -0.0586741 0.973295 +v -0.405413 -0.0679349 0.974344 +v -0.406339 -0.0773218 0.97507 +v -0.407166 -0.0867532 0.975572 +v -0.407864 -0.0962202 0.975913 +v -0.408467 -0.105708 0.976091 +v -0.408966 -0.11522 0.976081 +v -0.409386 -0.124686 0.975845 +v -0.409963 -0.134178 0.975257 +v -0.410475 -0.143663 0.974533 +v -0.411016 -0.153281 0.973569 +v -0.411458 -0.162901 0.972442 +v -0.411909 -0.172359 0.971156 +v -0.412312 -0.181793 0.969755 +v -0.412634 -0.191131 0.968234 +v -0.412869 -0.200512 0.966632 +v -0.413183 -0.209847 0.964898 +v -0.413429 -0.219177 0.96308 +v -0.413827 -0.228314 0.961052 +v -0.414207 -0.237384 0.958931 +v -0.414553 -0.246315 0.956778 +v -0.414866 -0.255236 0.954588 +v -0.415385 -0.264009 0.952225 +v -0.41584 -0.272786 0.949814 +v -0.416316 -0.281371 0.947346 +v -0.416766 -0.289987 0.944929 +v -0.417276 -0.29848 0.942398 +v -0.417729 -0.306962 0.939807 +v -0.418315 -0.315293 0.937113 +v -0.41886 -0.32361 0.934366 +v -0.41956 -0.331595 0.93153 +v -0.420244 -0.339576 0.928684 +v -0.42114 -0.347237 0.925678 +v -0.421922 -0.355001 0.922729 +v -0.422903 -0.362552 0.919689 +v -0.423828 -0.370106 0.916605 +v -0.424877 -0.377343 0.913486 +v -0.425865 -0.384566 0.910294 +v -0.427108 -0.39163 0.90681 +v -0.428274 -0.398668 0.903269 +v -0.429334 -0.405421 0.899936 +v -0.430346 -0.412137 0.896588 +v -0.431447 -0.41857 0.893221 +v -0.432448 -0.424952 0.88981 +v -0.433531 -0.431182 0.886304 +v -0.434527 -0.437389 0.882724 +v -0.435509 -0.44341 0.879129 +v -0.436397 -0.449406 0.875464 +v -0.437425 -0.45547 0.871617 +v -0.438378 -0.461508 0.867717 +v -0.43942 -0.467416 0.863714 +v -0.440367 -0.47328 0.859617 +v -0.441446 -0.479262 0.855317 +v -0.442459 -0.485233 0.850958 +v -0.44353 -0.491203 0.846518 +v -0.444525 -0.497149 0.841991 +v -0.445503 -0.503098 0.837308 +v -0.446439 -0.509053 0.832607 +v -0.447534 -0.514934 0.827682 +v -0.448557 -0.520772 0.822709 +v -0.44964 -0.526709 0.817485 +v -0.450662 -0.532602 0.812196 +v -0.451851 -0.538561 0.806747 +v -0.452956 -0.544485 0.801246 +v -0.473158 0.416308 0.955643 +v -0.469562 0.408328 0.956362 +v -0.465907 0.400358 0.957 +v -0.461436 0.38998 0.957181 +v -0.456883 0.379638 0.957245 +v -0.452444 0.370051 0.956363 +v -0.447904 0.360532 0.955288 +v -0.443291 0.351725 0.95343 +v -0.438679 0.342936 0.951498 +v -0.433993 0.334742 0.948839 +v -0.429279 0.326689 0.946091 +v -0.425154 0.318148 0.943579 +v -0.421174 0.309691 0.94121 +v -0.417657 0.303324 0.940105 +v -0.414108 0.296951 0.938957 +v -0.409759 0.291241 0.935847 +v -0.405386 0.285601 0.932653 +v -0.40103 0.283402 0.928811 +v -0.392794 0.281165 0.919862 +v -0.392099 0.277443 0.920637 +v -0.391316 0.273664 0.92137 +v -0.390248 0.268265 0.922158 +v -0.389043 0.262835 0.922779 +v -0.387992 0.257099 0.922978 +v -0.386779 0.251316 0.92297 +v -0.386153 0.246332 0.92245 +v -0.385289 0.240836 0.921627 +v -0.384842 0.236129 0.920498 +v -0.384308 0.231446 0.919253 +v -0.384086 0.227932 0.91805 +v -0.383811 0.224487 0.916797 +v -0.385393 0.220465 0.919188 +v -0.387544 0.216091 0.922313 +v -0.387335 0.207966 0.924809 +v -0.386984 0.199798 0.927039 +v -0.386216 0.191526 0.926445 +v -0.38532 0.183294 0.925653 +v -0.383673 0.175426 0.923979 +v -0.38201 0.167548 0.922341 +v -0.380985 0.159778 0.92143 +v -0.380103 0.151977 0.920864 +v -0.379403 0.144077 0.921973 +v -0.37892 0.136262 0.923612 +v -0.378905 0.128311 0.926261 +v -0.378942 0.120398 0.92903 +v -0.379275 0.112297 0.93204 +v -0.37963 0.10416 0.935103 +v -0.380094 0.0960206 0.938072 +v -0.380546 0.0878625 0.940944 +v -0.381104 0.0796405 0.943676 +v -0.381649 0.071436 0.946394 +v -0.382345 0.0630774 0.948986 +v -0.383031 0.0547059 0.95155 +v -0.38372 0.0463166 0.954111 +v -0.384395 0.0378942 0.956613 +v -0.385131 0.0293429 0.95903 +v -0.38588 0.0207897 0.961427 +v -0.386586 0.0121096 0.96382 +v -0.38729 0.00342533 0.966192 +v -0.388168 -0.00525129 0.968355 +v -0.389037 -0.0139423 0.9705 +v -0.389958 -0.0228519 0.972453 +v -0.390837 -0.0318009 0.974284 +v -0.391642 -0.0409096 0.976033 +v -0.392375 -0.0500557 0.97763 +v -0.393102 -0.0592396 0.979054 +v -0.393751 -0.068456 0.980305 +v -0.39473 -0.0779148 0.981195 +v -0.3956 -0.0874055 0.981865 +v -0.39622 -0.0968696 0.982354 +v -0.396773 -0.106361 0.982669 +v -0.397339 -0.115829 0.982726 +v -0.397843 -0.125272 0.982576 +v -0.398497 -0.134798 0.982115 +v -0.399043 -0.144346 0.981456 +v -0.399607 -0.15411 0.980567 +v -0.400096 -0.163863 0.979538 +v -0.400632 -0.173331 0.978317 +v -0.401107 -0.18277 0.976949 +v -0.401411 -0.192185 0.975505 +v -0.401647 -0.2016 0.973967 +v -0.402011 -0.210959 0.972252 +v -0.402331 -0.220298 0.97048 +v -0.402902 -0.229447 0.968451 +v -0.403404 -0.238574 0.966336 +v -0.403854 -0.24761 0.964241 +v -0.404285 -0.256611 0.962094 +v -0.404848 -0.265444 0.959783 +v -0.405407 -0.274264 0.957447 +v -0.406059 -0.282934 0.955018 +v -0.406605 -0.291651 0.952588 +v -0.407201 -0.300302 0.950127 +v -0.407776 -0.308947 0.947659 +v -0.408499 -0.317347 0.945022 +v -0.409187 -0.325734 0.942326 +v -0.410043 -0.333859 0.939519 +v -0.410903 -0.341976 0.936703 +v -0.411888 -0.349691 0.933749 +v -0.4128 -0.357495 0.930857 +v -0.413861 -0.365115 0.927773 +v -0.414912 -0.372742 0.924717 +v -0.416121 -0.380014 0.9215 +v -0.417364 -0.387294 0.918316 +v -0.418806 -0.394433 0.914706 +v -0.4203 -0.401594 0.911164 +v -0.421467 -0.408293 0.90785 +v -0.422628 -0.414996 0.90452 +v -0.423726 -0.421413 0.901178 +v -0.424816 -0.427819 0.897846 +v -0.426033 -0.434051 0.894267 +v -0.42724 -0.440274 0.890676 +v -0.428325 -0.446222 0.887059 +v -0.429412 -0.45216 0.883427 +v -0.430435 -0.458264 0.879527 +v -0.431464 -0.464357 0.875623 +v -0.432588 -0.470293 0.871529 +v -0.433752 -0.476243 0.867451 +v -0.434733 -0.482194 0.863125 +v -0.435804 -0.488169 0.858839 +v -0.436902 -0.494173 0.854151 +v -0.438095 -0.500206 0.84954 +v -0.439033 -0.50619 0.844801 +v -0.440075 -0.512219 0.840111 +v -0.441133 -0.518185 0.834912 +v -0.44229 -0.524198 0.8298 +v -0.443375 -0.530251 0.82448 +v -0.444507 -0.536315 0.819188 +v -0.445619 -0.542261 0.813449 +v -0.446941 -0.548297 0.807838 +v -0.465301 0.418407 0.962072 +v -0.461637 0.410447 0.962589 +v -0.457912 0.402508 0.96302 +v -0.453363 0.392018 0.962994 +v -0.448736 0.381565 0.962822 +v -0.444201 0.371937 0.961809 +v -0.439566 0.362385 0.960626 +v -0.434949 0.35358 0.958761 +v -0.430297 0.344795 0.956805 +v -0.425687 0.336562 0.954169 +v -0.421067 0.328448 0.951442 +v -0.416993 0.319942 0.949082 +v -0.413066 0.311581 0.946846 +v -0.409564 0.30537 0.945622 +v -0.406016 0.299176 0.944346 +v -0.401923 0.29386 0.940994 +v -0.397768 0.2886 0.937559 +v -0.393701 0.286579 0.933306 +v -0.386328 0.284384 0.924655 +v -0.385088 0.27964 0.925926 +v -0.383718 0.274789 0.927088 +v -0.382475 0.269135 0.927951 +v -0.381087 0.263454 0.928637 +v -0.379951 0.25758 0.928834 +v -0.378657 0.251704 0.928846 +v -0.377849 0.24616 0.928275 +v -0.376987 0.240853 0.92759 +v -0.376543 0.235798 0.926369 +v -0.375974 0.230785 0.925025 +v -0.37614 0.226415 0.923233 +v -0.376236 0.222143 0.921343 +v -0.377014 0.218407 0.923475 +v -0.378659 0.214102 0.926839 +v -0.378251 0.206164 0.929327 +v -0.377707 0.198198 0.931566 +v -0.376649 0.190196 0.930954 +v -0.375477 0.182242 0.930121 +v -0.373994 0.174579 0.928266 +v -0.372509 0.166927 0.926472 +v -0.371277 0.159238 0.92551 +v -0.370179 0.151495 0.924906 +v -0.369482 0.143625 0.925972 +v -0.368994 0.135815 0.927606 +v -0.368874 0.127841 0.93029 +v -0.368804 0.119899 0.933093 +v -0.36903 0.111726 0.936079 +v -0.369261 0.103542 0.939079 +v -0.369574 0.095355 0.941951 +v -0.369876 0.0871459 0.944718 +v -0.370352 0.0789062 0.947345 +v -0.370829 0.0706247 0.94996 +v -0.37141 0.0622471 0.952531 +v -0.372003 0.0538748 0.955147 +v -0.372552 0.0454861 0.9578 +v -0.373095 0.0370754 0.960434 +v -0.373781 0.0285838 0.962974 +v -0.374483 0.0200956 0.965479 +v -0.375168 0.0114841 0.968056 +v -0.375837 0.00285533 0.970586 +v -0.376623 -0.00583853 0.972975 +v -0.377389 -0.0145765 0.97527 +v -0.378225 -0.0234978 0.977394 +v -0.379035 -0.0324566 0.979427 +v -0.379849 -0.0415171 0.981321 +v -0.380608 -0.0506192 0.983061 +v -0.381357 -0.0597918 0.984604 +v -0.382028 -0.0689993 0.985959 +v -0.382848 -0.078467 0.987006 +v -0.383593 -0.0879698 0.987899 +v -0.384245 -0.0974475 0.988529 +v -0.384805 -0.106934 0.988976 +v -0.38544 -0.11643 0.98913 +v -0.385969 -0.125957 0.989013 +v -0.386615 -0.135561 0.988676 +v -0.387182 -0.145164 0.988157 +v -0.387794 -0.154892 0.987391 +v -0.388309 -0.164606 0.986453 +v -0.388832 -0.174158 0.985238 +v -0.3893 -0.183689 0.983905 +v -0.389729 -0.193177 0.982518 +v -0.390075 -0.202641 0.980968 +v -0.390576 -0.21203 0.979288 +v -0.391033 -0.221417 0.977513 +v -0.391651 -0.23063 0.975549 +v -0.392237 -0.239836 0.973543 +v -0.392783 -0.248962 0.971529 +v -0.393302 -0.258045 0.96942 +v -0.393947 -0.266941 0.967229 +v -0.394556 -0.27582 0.964982 +v -0.395284 -0.284587 0.9627 +v -0.395932 -0.293397 0.960409 +v -0.396677 -0.302173 0.957951 +v -0.397407 -0.310958 0.955495 +v -0.398209 -0.319471 0.952996 +v -0.398984 -0.327974 0.950466 +v -0.399991 -0.336228 0.947724 +v -0.40096 -0.34449 0.94495 +v -0.402095 -0.352275 0.942051 +v -0.403126 -0.360135 0.939167 +v -0.40433 -0.367824 0.936099 +v -0.405512 -0.375502 0.932993 +v -0.40691 -0.382788 0.929752 +v -0.408287 -0.390071 0.926505 +v -0.409876 -0.397152 0.923046 +v -0.411452 -0.404229 0.919577 +v -0.412817 -0.410921 0.916251 +v -0.414137 -0.417604 0.912889 +v -0.415429 -0.423993 0.90959 +v -0.416656 -0.430353 0.906242 +v -0.418042 -0.436576 0.90269 +v -0.419359 -0.442785 0.899064 +v -0.420589 -0.448754 0.895542 +v -0.421742 -0.454705 0.891938 +v -0.422942 -0.460821 0.888023 +v -0.424058 -0.466914 0.884015 +v -0.425264 -0.472901 0.879872 +v -0.426384 -0.478841 0.875696 +v -0.427588 -0.484942 0.871291 +v -0.428699 -0.491014 0.866814 +v -0.429914 -0.497056 0.86212 +v -0.431082 -0.503094 0.857427 +v -0.432226 -0.509203 0.852556 +v -0.433303 -0.515285 0.847664 +v -0.434479 -0.521367 0.842475 +v -0.435613 -0.527423 0.837211 +v -0.436783 -0.533632 0.831756 +v -0.437883 -0.539804 0.826241 +v -0.439086 -0.545951 0.820559 +v -0.440261 -0.552065 0.81483 +v -0.457398 0.420531 0.96842 +v -0.453657 0.412603 0.968747 +v -0.449881 0.404671 0.968983 +v -0.445245 0.394076 0.968727 +v -0.440542 0.383518 0.96835 +v -0.435912 0.373866 0.967201 +v -0.431199 0.36427 0.965911 +v -0.426561 0.355476 0.963998 +v -0.42189 0.346656 0.962037 +v -0.417371 0.338383 0.95944 +v -0.412841 0.330223 0.956767 +v -0.408844 0.321775 0.954556 +v -0.404957 0.313479 0.952456 +v -0.401467 0.307432 0.951105 +v -0.397944 0.301432 0.949702 +v -0.394065 0.296517 0.94612 +v -0.390151 0.291603 0.942484 +v -0.386307 0.289782 0.937939 +v -0.379614 0.287502 0.929021 +v -0.377741 0.281837 0.930834 +v -0.375767 0.275949 0.932373 +v -0.374356 0.270037 0.933286 +v -0.372808 0.264114 0.933985 +v -0.371606 0.25814 0.934184 +v -0.370265 0.252153 0.934151 +v -0.369372 0.246467 0.933616 +v -0.368363 0.240744 0.93285 +v -0.367904 0.235617 0.931705 +v -0.367324 0.230255 0.930263 +v -0.367881 0.225047 0.927966 +v -0.368355 0.219933 0.925513 +v -0.368649 0.216328 0.927753 +v -0.369772 0.212092 0.931339 +v -0.369142 0.204369 0.933813 +v -0.368375 0.19661 0.935981 +v -0.367025 0.188902 0.935319 +v -0.365568 0.181222 0.934452 +v -0.36426 0.17377 0.932436 +v -0.362954 0.166328 0.930474 +v -0.361509 0.158732 0.929469 +v -0.360182 0.151045 0.928793 +v -0.359498 0.143165 0.929838 +v -0.359006 0.135341 0.931468 +v -0.358782 0.127339 0.934188 +v -0.358606 0.119355 0.937038 +v -0.358721 0.11111 0.939973 +v -0.35883 0.102863 0.94289 +v -0.358995 0.094622 0.945653 +v -0.359146 0.0863663 0.948333 +v -0.359538 0.0780783 0.950853 +v -0.359945 0.0697688 0.95341 +v -0.360429 0.0613802 0.955988 +v -0.360943 0.0530122 0.958666 +v -0.36136 0.0446232 0.961439 +v -0.361774 0.0362317 0.964201 +v -0.362417 0.0278174 0.966876 +v -0.363088 0.0194115 0.96955 +v -0.363723 0.0108438 0.972242 +v -0.36435 0.00226302 0.974923 +v -0.365043 -0.00647091 0.977476 +v -0.365718 -0.0152357 0.979978 +v -0.366468 -0.024172 0.982262 +v -0.367206 -0.0331385 0.98448 +v -0.368005 -0.0421687 0.986442 +v -0.368747 -0.0512317 0.988284 +v -0.369498 -0.0603901 0.989904 +v -0.370177 -0.0695904 0.991356 +v -0.37083 -0.0790587 0.992569 +v -0.371397 -0.0885524 0.993602 +v -0.372049 -0.0980476 0.994354 +v -0.372615 -0.107568 0.994878 +v -0.373295 -0.117112 0.995105 +v -0.373878 -0.126672 0.995056 +v -0.374517 -0.136315 0.994831 +v -0.37506 -0.145956 0.994395 +v -0.375645 -0.155627 0.993641 +v -0.376137 -0.165286 0.992703 +v -0.376707 -0.174933 0.99158 +v -0.377216 -0.184546 0.990312 +v -0.377746 -0.19409 0.988937 +v -0.378238 -0.203605 0.987455 +v -0.37886 -0.213047 0.985843 +v -0.379427 -0.222471 0.984122 +v -0.380128 -0.231747 0.982246 +v -0.380831 -0.241006 0.980344 +v -0.381495 -0.250199 0.978401 +v -0.382153 -0.259371 0.976406 +v -0.382877 -0.268343 0.974357 +v -0.383569 -0.277306 0.972263 +v -0.384341 -0.286178 0.970085 +v -0.385111 -0.295044 0.967906 +v -0.386077 -0.30399 0.965572 +v -0.386981 -0.31293 0.963186 +v -0.387868 -0.321559 0.960867 +v -0.388696 -0.330176 0.95847 +v -0.389822 -0.338579 0.955793 +v -0.390894 -0.346964 0.953047 +v -0.392179 -0.354812 0.950185 +v -0.393349 -0.362739 0.947326 +v -0.39469 -0.370496 0.944244 +v -0.396006 -0.378246 0.941156 +v -0.397513 -0.38555 0.937858 +v -0.399058 -0.392847 0.934569 +v -0.400764 -0.399839 0.931183 +v -0.402402 -0.406832 0.927798 +v -0.403951 -0.413511 0.924455 +v -0.405514 -0.420191 0.921145 +v -0.40696 -0.426522 0.917815 +v -0.408424 -0.432863 0.914513 +v -0.409845 -0.439056 0.910882 +v -0.411356 -0.445266 0.907332 +v -0.412617 -0.451225 0.90383 +v -0.413877 -0.457177 0.900317 +v -0.415152 -0.463288 0.896298 +v -0.416496 -0.469401 0.892303 +v -0.417696 -0.475421 0.888039 +v -0.418977 -0.481447 0.883851 +v -0.420207 -0.487623 0.879267 +v -0.42149 -0.4938 0.874708 +v -0.422726 -0.499879 0.869963 +v -0.424032 -0.505964 0.865268 +v -0.425235 -0.512152 0.860166 +v -0.426489 -0.518348 0.855116 +v -0.427724 -0.524488 0.849875 +v -0.428956 -0.530635 0.844626 +v -0.430136 -0.536957 0.838961 +v -0.431386 -0.543306 0.833336 +v -0.432548 -0.549572 0.827584 +v -0.433724 -0.555853 0.821859 +v -0.448622 0.422114 0.974559 +v -0.44497 0.414261 0.974688 +v -0.44128 0.406405 0.974697 +v -0.436511 0.39578 0.974189 +v -0.431673 0.3852 0.973571 +v -0.427022 0.375578 0.972252 +v -0.422291 0.366016 0.970792 +v -0.41761 0.357275 0.968847 +v -0.412864 0.34851 0.966832 +v -0.408346 0.340179 0.964288 +v -0.403839 0.331961 0.961676 +v -0.39983 0.323612 0.959509 +v -0.395898 0.315406 0.957404 +v -0.39224 0.309741 0.955992 +v -0.388551 0.304111 0.954507 +v -0.384703 0.299411 0.951367 +v -0.380856 0.29468 0.948228 +v -0.377257 0.292892 0.942955 +v -0.371573 0.290809 0.93358 +v -0.36937 0.284328 0.935749 +v -0.367185 0.277812 0.937356 +v -0.365605 0.271473 0.93832 +v -0.363896 0.265133 0.939028 +v -0.362596 0.258895 0.93921 +v -0.361177 0.252657 0.939143 +v -0.360188 0.246709 0.93859 +v -0.359084 0.240754 0.937811 +v -0.358526 0.235154 0.936567 +v -0.357841 0.229522 0.935122 +v -0.357957 0.223799 0.932938 +v -0.358002 0.218197 0.930578 +v -0.358415 0.214834 0.932524 +v -0.359447 0.210971 0.935685 +v -0.358609 0.203351 0.937925 +v -0.357657 0.195665 0.939867 +v -0.35631 0.188044 0.939029 +v -0.354872 0.180452 0.937982 +v -0.353589 0.173095 0.935918 +v -0.352322 0.165758 0.933882 +v -0.35121 0.158183 0.932791 +v -0.350221 0.150511 0.932036 +v -0.349356 0.14266 0.93328 +v -0.348652 0.134842 0.935069 +v -0.348313 0.12676 0.93785 +v -0.348017 0.118666 0.940782 +v -0.348032 0.110404 0.943624 +v -0.348036 0.102123 0.946423 +v -0.348141 0.0938671 0.949035 +v -0.348232 0.0855758 0.951603 +v -0.348526 0.0772635 0.954114 +v -0.348827 0.0689453 0.956659 +v -0.349267 0.0606383 0.959311 +v -0.349733 0.0523413 0.962058 +v -0.350053 0.0439167 0.964955 +v -0.35038 0.0354862 0.967861 +v -0.350937 0.0270845 0.970707 +v -0.351499 0.0187006 0.973582 +v -0.352113 0.0101569 0.976421 +v -0.352721 0.00158643 0.979227 +v -0.353363 -0.00710825 0.981879 +v -0.353994 -0.0158197 0.984481 +v -0.354649 -0.0247295 0.986883 +v -0.355303 -0.033664 0.989223 +v -0.356051 -0.0426977 0.991328 +v -0.35674 -0.0517647 0.99328 +v -0.357364 -0.060921 0.994981 +v -0.357924 -0.0701086 0.996568 +v -0.358533 -0.0795249 0.997875 +v -0.359082 -0.0889766 0.99897 +v -0.359684 -0.0985136 0.999786 +v -0.360202 -0.108079 1.00039 +v -0.360894 -0.11769 1.00063 +v -0.361524 -0.127313 1.00072 +v -0.362096 -0.136993 1.00055 +v -0.362567 -0.146671 1.00016 +v -0.363246 -0.156378 0.999452 +v -0.363824 -0.166073 0.998528 +v -0.364417 -0.175795 0.997463 +v -0.364971 -0.185495 0.996314 +v -0.365604 -0.195076 0.995023 +v -0.366171 -0.204623 0.993576 +v -0.366908 -0.2141 0.992037 +v -0.367605 -0.223561 0.990409 +v -0.368388 -0.232868 0.988666 +v -0.369144 -0.242199 0.986874 +v -0.369971 -0.251447 0.985019 +v -0.370768 -0.26069 0.98313 +v -0.3716 -0.269788 0.981254 +v -0.372409 -0.278849 0.979328 +v -0.373233 -0.287903 0.977429 +v -0.374026 -0.296967 0.975501 +v -0.37508 -0.305964 0.97335 +v -0.376049 -0.315006 0.971175 +v -0.37705 -0.323819 0.968877 +v -0.378019 -0.332631 0.966573 +v -0.379231 -0.341102 0.963981 +v -0.380384 -0.349577 0.96134 +v -0.381751 -0.357559 0.958456 +v -0.38304 -0.365619 0.955615 +v -0.38455 -0.373397 0.952518 +v -0.385992 -0.381155 0.949328 +v -0.387697 -0.388394 0.946048 +v -0.389404 -0.395632 0.942725 +v -0.391231 -0.402564 0.939384 +v -0.392979 -0.40948 0.935982 +v -0.394709 -0.415997 0.932628 +v -0.396399 -0.422512 0.92935 +v -0.398005 -0.428794 0.926057 +v -0.399572 -0.435062 0.922708 +v -0.401216 -0.441244 0.919255 +v -0.402782 -0.447413 0.915721 +v -0.404206 -0.453411 0.912157 +v -0.405565 -0.459378 0.908544 +v -0.407082 -0.465525 0.904605 +v -0.408515 -0.471636 0.900587 +v -0.409954 -0.477727 0.8963 +v -0.411287 -0.483789 0.891919 +v -0.412687 -0.490045 0.887455 +v -0.413962 -0.496232 0.882911 +v -0.4153 -0.502415 0.87807 +v -0.416625 -0.508581 0.873228 +v -0.417911 -0.514886 0.868167 +v -0.419154 -0.521172 0.863049 +v -0.42055 -0.527464 0.85765 +v -0.421825 -0.533704 0.85215 +v -0.423111 -0.540145 0.846588 +v -0.424327 -0.546565 0.840947 +v -0.425635 -0.553059 0.834981 +v -0.426871 -0.559527 0.828973 +v -0.43979 0.423728 0.980614 +v -0.436228 0.415958 0.980531 +v -0.432628 0.40817 0.980315 +v -0.427723 0.397515 0.979555 +v -0.422762 0.386907 0.978701 +v -0.418088 0.377328 0.977194 +v -0.413358 0.367804 0.975579 +v -0.40862 0.359109 0.973603 +v -0.40381 0.350391 0.971559 +v -0.399288 0.342016 0.969081 +v -0.394802 0.333741 0.966571 +v -0.390758 0.325487 0.964425 +v -0.386789 0.317404 0.962352 +v -0.38295 0.312077 0.96087 +v -0.379049 0.306713 0.959329 +v -0.375314 0.302249 0.9567 +v -0.371554 0.297789 0.954121 +v -0.368153 0.295999 0.947859 +v -0.362816 0.293831 0.938231 +v -0.360664 0.286789 0.940168 +v -0.358323 0.279673 0.941807 +v -0.356595 0.27292 0.942789 +v -0.354727 0.266199 0.943465 +v -0.353348 0.25973 0.943635 +v -0.351851 0.253255 0.943534 +v -0.350767 0.247086 0.94299 +v -0.349576 0.2409 0.942188 +v -0.348906 0.234992 0.940945 +v -0.348104 0.229001 0.939387 +v -0.347779 0.222808 0.937265 +v -0.347384 0.216749 0.934936 +v -0.348179 0.213358 0.937309 +v -0.349119 0.209868 0.940036 +v -0.348052 0.20233 0.941974 +v -0.346899 0.19473 0.943674 +v -0.345534 0.187211 0.942599 +v -0.344114 0.179741 0.941354 +v -0.342871 0.172494 0.939226 +v -0.341649 0.165269 0.937154 +v -0.340879 0.157682 0.935956 +v -0.340239 0.150003 0.935175 +v -0.339183 0.142142 0.936611 +v -0.338262 0.134328 0.938549 +v -0.337819 0.126131 0.941436 +v -0.3374 0.117955 0.94441 +v -0.337309 0.109662 0.947131 +v -0.337209 0.101355 0.949823 +v -0.337247 0.0930471 0.952279 +v -0.337283 0.0847433 0.954743 +v -0.337475 0.076414 0.957231 +v -0.337693 0.0681114 0.959826 +v -0.338088 0.0598879 0.962574 +v -0.338514 0.0516453 0.965385 +v -0.338757 0.0431943 0.968436 +v -0.339006 0.0347286 0.971506 +v -0.339472 0.0263619 0.974578 +v -0.339926 0.0179813 0.977623 +v -0.340526 0.00945541 0.980603 +v -0.341111 0.000862183 0.983485 +v -0.341706 -0.00776046 0.98626 +v -0.342257 -0.0164274 0.988911 +v -0.342816 -0.025307 0.991446 +v -0.343366 -0.0342167 0.993896 +v -0.34404 -0.0432619 0.996064 +v -0.344628 -0.0523427 0.998096 +v -0.345143 -0.0614922 0.999866 +v -0.3456 -0.070673 1.00152 +v -0.346189 -0.0800542 1.00283 +v -0.346731 -0.0894729 1.00397 +v -0.347199 -0.0990307 1.00484 +v -0.347636 -0.108608 1.00553 +v -0.348296 -0.11828 1.00574 +v -0.348879 -0.127952 1.00577 +v -0.349393 -0.137661 1.00565 +v -0.349823 -0.147362 1.00529 +v -0.350582 -0.157092 1.00468 +v -0.351241 -0.166791 1.00377 +v -0.351868 -0.176566 1.00278 +v -0.352442 -0.186324 1.00165 +v -0.353142 -0.195942 1.00041 +v -0.353841 -0.205504 0.999059 +v -0.354691 -0.215005 0.99755 +v -0.355522 -0.224472 0.99596 +v -0.356382 -0.233868 0.994455 +v -0.357201 -0.243258 0.992831 +v -0.358225 -0.252605 0.991198 +v -0.359205 -0.261932 0.98947 +v -0.360166 -0.271143 0.987812 +v -0.361124 -0.280329 0.98613 +v -0.361979 -0.289571 0.984481 +v -0.362844 -0.298813 0.982803 +v -0.364015 -0.307852 0.980818 +v -0.365036 -0.316979 0.978842 +v -0.366195 -0.326013 0.976691 +v -0.367293 -0.335044 0.974466 +v -0.368541 -0.343582 0.971958 +v -0.369758 -0.352103 0.969403 +v -0.371233 -0.360242 0.966558 +v -0.372637 -0.368423 0.963684 +v -0.374273 -0.376224 0.960524 +v -0.375869 -0.384022 0.957335 +v -0.377759 -0.391229 0.954008 +v -0.379643 -0.398402 0.950627 +v -0.381511 -0.40526 0.947282 +v -0.3834 -0.412118 0.943936 +v -0.385236 -0.418484 0.940614 +v -0.387077 -0.424824 0.937326 +v -0.388839 -0.431046 0.93405 +v -0.390589 -0.43726 0.930764 +v -0.392239 -0.44337 0.927284 +v -0.393885 -0.449472 0.923788 +v -0.395484 -0.455497 0.920198 +v -0.397052 -0.46151 0.916568 +v -0.398667 -0.467651 0.912597 +v -0.400271 -0.473782 0.908592 +v -0.401862 -0.479925 0.904235 +v -0.403487 -0.486078 0.899891 +v -0.404833 -0.492322 0.895418 +v -0.406173 -0.498552 0.890923 +v -0.407628 -0.50486 0.885977 +v -0.409108 -0.51116 0.881056 +v -0.41041 -0.517555 0.875986 +v -0.411779 -0.523971 0.870963 +v -0.413201 -0.530361 0.865295 +v -0.414721 -0.536791 0.859693 +v -0.415953 -0.543299 0.854065 +v -0.417212 -0.549821 0.848457 +v -0.418584 -0.556485 0.842289 +v -0.420045 -0.563212 0.836122 +v -0.430698 0.424681 0.986495 +v -0.427095 0.417037 0.986071 +v -0.423445 0.409383 0.985542 +v -0.418435 0.398812 0.98448 +v -0.413359 0.388298 0.983303 +v -0.408521 0.378756 0.981634 +v -0.403665 0.369289 0.979843 +v -0.398954 0.360659 0.977787 +v -0.394169 0.351992 0.975661 +v -0.389621 0.343696 0.973211 +v -0.385111 0.335492 0.97072 +v -0.380925 0.3274 0.968509 +v -0.376775 0.319426 0.966366 +v -0.372749 0.314216 0.964762 +v -0.368659 0.308956 0.963108 +v -0.364463 0.30456 0.960623 +v -0.360265 0.300167 0.958152 +v -0.357197 0.298463 0.952057 +v -0.352498 0.296185 0.942779 +v -0.350633 0.28883 0.94453 +v -0.348598 0.281378 0.945958 +v -0.346881 0.274392 0.946882 +v -0.345041 0.267413 0.947497 +v -0.343569 0.260702 0.947646 +v -0.341989 0.254005 0.947506 +v -0.340819 0.24763 0.946936 +v -0.339548 0.241252 0.946097 +v -0.338702 0.235109 0.94486 +v -0.337739 0.228863 0.943233 +v -0.337155 0.22249 0.941188 +v -0.336506 0.216211 0.938874 +v -0.337024 0.213046 0.940949 +v -0.337742 0.209686 0.943599 +v -0.336686 0.202095 0.945042 +v -0.335558 0.19442 0.946322 +v -0.334193 0.186848 0.945094 +v -0.332823 0.179345 0.943742 +v -0.331724 0.172056 0.941616 +v -0.330671 0.164787 0.939592 +v -0.32995 0.157188 0.938621 +v -0.32935 0.149483 0.938115 +v -0.328389 0.141572 0.939794 +v -0.327542 0.133694 0.941922 +v -0.326983 0.125462 0.944811 +v -0.32643 0.117252 0.947716 +v -0.326273 0.108982 0.950314 +v -0.326097 0.100674 0.952858 +v -0.326044 0.0923394 0.955256 +v -0.326 0.0840132 0.957702 +v -0.326175 0.0757139 0.960272 +v -0.326382 0.0674529 0.962987 +v -0.326663 0.0592049 0.965958 +v -0.326978 0.0509581 0.968971 +v -0.32727 0.0425682 0.972139 +v -0.327574 0.0342014 0.975355 +v -0.328014 0.0258354 0.978518 +v -0.328452 0.0174639 0.981679 +v -0.328945 0.00890521 0.984741 +v -0.329415 0.000323872 0.987755 +v -0.329884 -0.00825758 0.990592 +v -0.33033 -0.016881 0.993344 +v -0.330831 -0.0257633 0.995925 +v -0.331305 -0.0346744 0.998392 +v -0.33175 -0.0437317 1.00062 +v -0.332137 -0.0528209 1.00271 +v -0.332628 -0.0619826 1.00451 +v -0.333041 -0.0711925 1.00609 +v -0.333545 -0.0805817 1.00744 +v -0.333993 -0.0899912 1.00863 +v -0.334491 -0.0995546 1.00949 +v -0.334928 -0.109147 1.01007 +v -0.33549 -0.118891 1.01035 +v -0.336007 -0.128637 1.01044 +v -0.336594 -0.138329 1.01031 +v -0.33711 -0.148016 1.00994 +v -0.33785 -0.157782 1.00931 +v -0.338539 -0.167533 1.00851 +v -0.339287 -0.177312 1.00755 +v -0.339997 -0.187046 1.00643 +v -0.34071 -0.196699 1.00521 +v -0.34141 -0.206332 1.00397 +v -0.342335 -0.215925 1.0026 +v -0.34327 -0.225488 1.00122 +v -0.344279 -0.234973 0.999894 +v -0.345284 -0.244448 0.998482 +v -0.346308 -0.253846 0.99713 +v -0.347341 -0.263266 0.995761 +v -0.348377 -0.272575 0.994356 +v -0.349358 -0.281902 0.99298 +v -0.350456 -0.291308 0.991569 +v -0.351476 -0.300678 0.990018 +v -0.352594 -0.30989 0.988318 +v -0.353608 -0.319153 0.986554 +v -0.354695 -0.32836 0.984493 +v -0.355712 -0.337572 0.982416 +v -0.357093 -0.346266 0.979945 +v -0.358436 -0.354946 0.97737 +v -0.360107 -0.363257 0.974402 +v -0.36165 -0.371563 0.971404 +v -0.363443 -0.37933 0.96828 +v -0.365209 -0.387073 0.965101 +v -0.36726 -0.394262 0.961672 +v -0.369306 -0.401409 0.958253 +v -0.371374 -0.408021 0.954888 +v -0.373451 -0.414634 0.951476 +v -0.375411 -0.42086 0.948192 +v -0.377352 -0.427073 0.944925 +v -0.379288 -0.433114 0.941707 +v -0.381189 -0.439148 0.938409 +v -0.382972 -0.445149 0.935086 +v -0.384679 -0.451123 0.931673 +v -0.386428 -0.457108 0.92816 +v -0.388099 -0.463061 0.924548 +v -0.389865 -0.469187 0.920629 +v -0.391573 -0.475269 0.916662 +v -0.393239 -0.481524 0.912487 +v -0.394812 -0.487737 0.908207 +v -0.396338 -0.494084 0.903735 +v -0.39778 -0.500391 0.899172 +v -0.399413 -0.506811 0.894338 +v -0.400925 -0.513192 0.88938 +v -0.402354 -0.519776 0.884278 +v -0.40371 -0.526313 0.87914 +v -0.405262 -0.5329 0.873604 +v -0.40674 -0.539456 0.868005 +v -0.408143 -0.546198 0.862278 +v -0.409459 -0.552903 0.856486 +v -0.410927 -0.559704 0.850321 +v -0.41237 -0.566484 0.84411 +v -0.421559 0.425676 0.992276 +v -0.417894 0.418144 0.991537 +v -0.414212 0.410636 0.990665 +v -0.409086 0.400159 0.989282 +v -0.403921 0.38971 0.987793 +v -0.398919 0.380226 0.985951 +v -0.393911 0.370809 0.983983 +v -0.389238 0.362247 0.981872 +v -0.384492 0.35363 0.979692 +v -0.379928 0.345407 0.977265 +v -0.375408 0.337258 0.974831 +v -0.371082 0.329311 0.972567 +v -0.366756 0.321491 0.970357 +v -0.362515 0.316349 0.968631 +v -0.358244 0.311182 0.966912 +v -0.353598 0.306895 0.96467 +v -0.348953 0.302614 0.962375 +v -0.346175 0.300836 0.956566 +v -0.341903 0.298395 0.946623 +v -0.340331 0.290764 0.948235 +v -0.338622 0.283039 0.949501 +v -0.33692 0.275845 0.950369 +v -0.33511 0.26866 0.950905 +v -0.333564 0.261737 0.951026 +v -0.331926 0.254839 0.950833 +v -0.330665 0.248296 0.950242 +v -0.329316 0.241739 0.949355 +v -0.32831 0.235393 0.948109 +v -0.32718 0.228994 0.946443 +v -0.326325 0.222419 0.944424 +v -0.325401 0.215966 0.942113 +v -0.325868 0.212723 0.944567 +v -0.326369 0.209501 0.947145 +v -0.325301 0.201825 0.948073 +v -0.324203 0.194151 0.948871 +v -0.322852 0.186559 0.947484 +v -0.321521 0.179035 0.946023 +v -0.320578 0.171694 0.943901 +v -0.319669 0.164367 0.941887 +v -0.318999 0.156716 0.941134 +v -0.318433 0.148976 0.940932 +v -0.317567 0.140995 0.942864 +v -0.316796 0.133034 0.945191 +v -0.316112 0.124778 0.948033 +v -0.315427 0.116516 0.950877 +v -0.315191 0.108248 0.95333 +v -0.314949 0.0999645 0.955738 +v -0.31482 0.0916117 0.95812 +v -0.314712 0.0832795 0.960602 +v -0.314885 0.0750256 0.963333 +v -0.31509 0.0667997 0.966199 +v -0.315273 0.0585513 0.969432 +v -0.315475 0.0503057 0.972687 +v -0.315822 0.0419967 0.975989 +v -0.316153 0.0336757 0.979259 +v -0.316581 0.0253258 0.982542 +v -0.31698 0.016932 0.985736 +v -0.317352 0.00837139 0.9889 +v -0.317702 -0.000222476 0.992002 +v -0.318058 -0.00877251 0.994906 +v -0.31838 -0.0173635 0.997704 +v -0.318824 -0.0262349 1.00034 +v -0.319192 -0.0351541 1.00282 +v -0.319434 -0.0442312 1.00508 +v -0.319624 -0.0533286 1.00719 +v -0.320046 -0.0625074 1.00898 +v -0.320416 -0.0717099 1.0106 +v -0.320783 -0.0811019 1.01191 +v -0.321098 -0.0905106 1.01301 +v -0.321617 -0.10008 1.01376 +v -0.322098 -0.109699 1.01426 +v -0.322559 -0.119508 1.01449 +v -0.322972 -0.129311 1.01455 +v -0.323589 -0.138972 1.01434 +v -0.324166 -0.14862 1.01392 +v -0.324892 -0.158402 1.01326 +v -0.325588 -0.168149 1.01243 +v -0.326467 -0.177905 1.01147 +v -0.327323 -0.187641 1.0104 +v -0.328047 -0.197347 1.00933 +v -0.32879 -0.207031 1.00824 +v -0.329773 -0.216726 1.00704 +v -0.330795 -0.226398 1.00584 +v -0.332018 -0.235977 1.00472 +v -0.333263 -0.245551 1.00362 +v -0.334318 -0.255017 1.00263 +v -0.33539 -0.264535 1.0017 +v -0.336486 -0.274004 1.00073 +v -0.337575 -0.28347 0.999709 +v -0.338864 -0.293028 0.998506 +v -0.340128 -0.302539 0.997147 +v -0.341149 -0.311911 0.995677 +v -0.342125 -0.3213 0.99415 +v -0.34316 -0.330703 0.992287 +v -0.344085 -0.340172 0.99037 +v -0.34558 -0.349041 0.987835 +v -0.347011 -0.357884 0.985132 +v -0.348817 -0.366285 0.982063 +v -0.350554 -0.374672 0.978947 +v -0.352482 -0.382367 0.975704 +v -0.354388 -0.390046 0.972441 +v -0.356583 -0.397252 0.968975 +v -0.358787 -0.404415 0.96539 +v -0.360993 -0.410761 0.961923 +v -0.363186 -0.417104 0.958455 +v -0.365238 -0.423185 0.955302 +v -0.367277 -0.429253 0.952075 +v -0.369345 -0.435075 0.948851 +v -0.371405 -0.440897 0.945584 +v -0.373253 -0.446789 0.942367 +v -0.375053 -0.452658 0.939054 +v -0.376932 -0.45858 0.935633 +v -0.378764 -0.464482 0.932116 +v -0.38067 -0.470563 0.928255 +v -0.382547 -0.476646 0.924351 +v -0.384179 -0.482972 0.920288 +v -0.385795 -0.489274 0.916178 +v -0.387503 -0.495721 0.911722 +v -0.389167 -0.502121 0.90721 +v -0.390872 -0.508625 0.902418 +v -0.392525 -0.515104 0.897546 +v -0.394025 -0.521825 0.892402 +v -0.39552 -0.52856 0.887243 +v -0.397085 -0.535305 0.881723 +v -0.398618 -0.542034 0.87618 +v -0.400117 -0.548975 0.870318 +v -0.401602 -0.555918 0.864442 +v -0.403119 -0.562847 0.858218 +v -0.404683 -0.569775 0.852043 +v -0.4116 0.426185 0.997657 +v -0.408121 0.418725 0.996502 +v -0.404601 0.411305 0.995223 +v -0.399289 0.40099 0.993499 +v -0.393957 0.390682 0.991697 +v -0.388929 0.381353 0.989641 +v -0.383896 0.372091 0.987474 +v -0.379188 0.363638 0.985234 +v -0.374419 0.355127 0.982945 +v -0.369833 0.346993 0.98045 +v -0.365282 0.33888 0.977951 +v -0.360809 0.330997 0.975565 +v -0.356337 0.323219 0.973239 +v -0.351904 0.318049 0.971281 +v -0.347425 0.312853 0.969313 +v -0.341898 0.308594 0.966732 +v -0.336361 0.304314 0.964164 +v -0.334512 0.302222 0.958442 +v -0.33172 0.299571 0.94953 +v -0.330145 0.292049 0.9512 +v -0.328556 0.284225 0.952364 +v -0.326786 0.277 0.953186 +v -0.324941 0.269801 0.953673 +v -0.323334 0.262847 0.953763 +v -0.321655 0.255916 0.953531 +v -0.320281 0.249226 0.952923 +v -0.318837 0.242543 0.952009 +v -0.317699 0.236097 0.950757 +v -0.316479 0.229623 0.949106 +v -0.315438 0.22301 0.947077 +v -0.314375 0.216501 0.94477 +v -0.314473 0.213691 0.946055 +v -0.314813 0.21048 0.948552 +v -0.313684 0.202533 0.949404 +v -0.312548 0.194596 0.950181 +v -0.311368 0.186848 0.948793 +v -0.310185 0.179179 0.947304 +v -0.309289 0.17163 0.945386 +v -0.308401 0.164137 0.943573 +v -0.30795 0.156051 0.94328 +v -0.307571 0.147936 0.943586 +v -0.306543 0.140144 0.945759 +v -0.305578 0.132374 0.948196 +v -0.304849 0.124116 0.950979 +v -0.304104 0.115812 0.953697 +v -0.303824 0.107501 0.956131 +v -0.30355 0.0991788 0.958498 +v -0.303462 0.0908693 0.96101 +v -0.303394 0.0825786 0.963638 +v -0.303559 0.0743937 0.966623 +v -0.303771 0.0662459 0.969746 +v -0.303986 0.0579875 0.973186 +v -0.30421 0.0497312 0.976637 +v -0.304532 0.041463 0.980037 +v -0.304851 0.0331792 0.983417 +v -0.305167 0.024815 0.986712 +v -0.305458 0.0164234 0.989941 +v -0.305712 0.00792115 0.993125 +v -0.305942 -0.000623252 0.996214 +v -0.306141 -0.00924958 0.999158 +v -0.306318 -0.0178976 1.00204 +v -0.306487 -0.026796 1.00464 +v -0.306624 -0.0357254 1.00714 +v -0.306721 -0.0447473 1.00937 +v -0.306768 -0.0538176 1.01145 +v -0.307009 -0.0630501 1.0132 +v -0.307193 -0.0723118 1.01476 +v -0.307565 -0.0816658 1.0159 +v -0.307904 -0.0910353 1.01688 +v -0.308296 -0.100616 1.01756 +v -0.308677 -0.110244 1.01798 +v -0.309163 -0.120039 1.01814 +v -0.309596 -0.129835 1.01808 +v -0.310237 -0.139522 1.01783 +v -0.310878 -0.149184 1.01735 +v -0.311733 -0.159004 1.01669 +v -0.312566 -0.168804 1.01586 +v -0.313575 -0.17856 1.01493 +v -0.314553 -0.18828 1.01393 +v -0.315157 -0.198147 1.01307 +v -0.315789 -0.208013 1.01218 +v -0.316981 -0.21774 1.01123 +v -0.3182 -0.227482 1.01033 +v -0.319469 -0.237151 1.00955 +v -0.320766 -0.246817 1.0089 +v -0.322005 -0.256403 1.0083 +v -0.323258 -0.266022 1.00774 +v -0.324545 -0.275668 1.0071 +v -0.325778 -0.285331 1.00639 +v -0.326994 -0.295038 1.00548 +v -0.328123 -0.304786 1.00449 +v -0.329156 -0.314398 1.00325 +v -0.330084 -0.324002 1.00186 +v -0.330904 -0.33354 1.00026 +v -0.33155 -0.343102 0.998474 +v -0.333073 -0.352122 0.995814 +v -0.334519 -0.361126 0.993049 +v -0.336469 -0.369437 0.989801 +v -0.338296 -0.377819 0.986541 +v -0.34066 -0.385332 0.983004 +v -0.342891 -0.392885 0.979486 +v -0.345441 -0.399776 0.975711 +v -0.347981 -0.406654 0.971895 +v -0.350517 -0.413034 0.968348 +v -0.353069 -0.419332 0.96469 +v -0.355258 -0.425106 0.96147 +v -0.357432 -0.4309 0.958369 +v -0.35954 -0.436579 0.955338 +v -0.361629 -0.442232 0.952226 +v -0.363573 -0.447894 0.949113 +v -0.365471 -0.45352 0.945918 +v -0.367445 -0.45933 0.942603 +v -0.369358 -0.465129 0.939141 +v -0.371239 -0.471286 0.935521 +v -0.37306 -0.477407 0.931805 +v -0.374835 -0.483769 0.92786 +v -0.376549 -0.49011 0.923849 +v -0.37836 -0.496654 0.919542 +v -0.38009 -0.503132 0.915195 +v -0.381858 -0.509797 0.910514 +v -0.383524 -0.516416 0.905723 +v -0.385195 -0.52331 0.900652 +v -0.386803 -0.53017 0.895501 +v -0.388488 -0.537125 0.890058 +v -0.390105 -0.544061 0.884563 +v -0.391717 -0.551255 0.878712 +v -0.393232 -0.558426 0.872774 +v -0.394875 -0.565687 0.866458 +v -0.396467 -0.572918 0.860116 +v -0.401611 0.426745 1.00291 +v -0.398284 0.419361 1.00134 +v -0.394926 0.412035 0.999645 +v -0.389429 0.401875 0.997567 +v -0.383937 0.391704 0.995431 +v -0.378894 0.382535 0.993174 +v -0.37385 0.373403 0.990854 +v -0.369115 0.365055 0.988504 +v -0.364345 0.356633 0.986117 +v -0.359735 0.34859 0.98356 +v -0.355154 0.340609 0.981044 +v -0.350534 0.332687 0.978559 +v -0.345914 0.324961 0.9761 +v -0.341278 0.319756 0.973908 +v -0.336611 0.314527 0.971736 +v -0.330171 0.310308 0.968914 +v -0.323715 0.306073 0.9661 +v -0.32274 0.303745 0.96041 +v -0.321124 0.300879 0.951923 +v -0.319676 0.293271 0.953522 +v -0.318224 0.2854 0.954612 +v -0.316429 0.278173 0.955374 +v -0.314574 0.270974 0.955794 +v -0.312891 0.264013 0.955847 +v -0.31116 0.257082 0.955582 +v -0.309695 0.250276 0.954948 +v -0.308181 0.243487 0.953994 +v -0.306904 0.236977 0.952731 +v -0.305551 0.23046 0.951069 +v -0.304333 0.223842 0.949043 +v -0.303115 0.2173 0.946729 +v -0.303086 0.214646 0.947525 +v -0.303257 0.211444 0.949936 +v -0.302077 0.203232 0.950696 +v -0.300926 0.195052 0.95143 +v -0.299881 0.187176 0.949996 +v -0.298857 0.179349 0.94847 +v -0.297986 0.171619 0.946743 +v -0.297144 0.163896 0.945177 +v -0.29689 0.155389 0.945381 +v -0.296704 0.146889 0.94621 +v -0.295509 0.139282 0.948605 +v -0.294344 0.131703 0.951124 +v -0.293561 0.123421 0.953793 +v -0.292764 0.115106 0.956397 +v -0.29245 0.10676 0.958789 +v -0.29215 0.0984117 0.961206 +v -0.292115 0.0901474 0.963919 +v -0.292104 0.0819192 0.966742 +v -0.292274 0.0738342 0.970037 +v -0.292488 0.0657454 0.973393 +v -0.292737 0.0574848 0.977054 +v -0.292976 0.0491963 0.980679 +v -0.293264 0.0409362 0.984128 +v -0.293542 0.0326605 0.987552 +v -0.293755 0.024302 0.99089 +v -0.293939 0.0159113 0.994148 +v -0.294068 0.00745236 0.997316 +v -0.294166 -0.00105237 1.00038 +v -0.294219 -0.0097364 1.00339 +v -0.294236 -0.0184569 1.0063 +v -0.294156 -0.0273723 1.00888 +v -0.294043 -0.0363206 1.01136 +v -0.293968 -0.0453129 1.01353 +v -0.293869 -0.0543378 1.01556 +v -0.293904 -0.0636287 1.01718 +v -0.293897 -0.0729259 1.01864 +v -0.294316 -0.0822336 1.01964 +v -0.294697 -0.0915592 1.02045 +v -0.29499 -0.101178 1.02091 +v -0.295237 -0.110795 1.02112 +v -0.295688 -0.12055 1.02117 +v -0.296112 -0.130295 1.02106 +v -0.296814 -0.140003 1.02063 +v -0.297489 -0.149676 1.02008 +v -0.298423 -0.159527 1.01935 +v -0.299379 -0.169356 1.01853 +v -0.300515 -0.179097 1.01762 +v -0.301677 -0.188839 1.01674 +v -0.302186 -0.19887 1.01612 +v -0.302732 -0.208912 1.0156 +v -0.304094 -0.218725 1.01499 +v -0.305498 -0.228554 1.01455 +v -0.306852 -0.238339 1.01433 +v -0.308255 -0.248102 1.01416 +v -0.309701 -0.257808 1.01404 +v -0.311148 -0.267562 1.01391 +v -0.312556 -0.277441 1.01357 +v -0.313881 -0.287414 1.01319 +v -0.315003 -0.29733 1.01268 +v -0.31602 -0.307258 1.01199 +v -0.317051 -0.31704 1.01093 +v -0.317948 -0.326833 1.00964 +v -0.318467 -0.336486 1.00817 +v -0.318733 -0.346161 1.00647 +v -0.320204 -0.3553 1.00368 +v -0.321533 -0.364417 1.00071 +v -0.323538 -0.372628 0.997197 +v -0.325452 -0.380799 0.993529 +v -0.328062 -0.388182 0.989658 +v -0.330586 -0.395518 0.985708 +v -0.333577 -0.402103 0.981806 +v -0.336513 -0.408739 0.977852 +v -0.339438 -0.415035 0.973976 +v -0.342427 -0.421339 0.970298 +v -0.344794 -0.426834 0.967118 +v -0.34715 -0.432334 0.963955 +v -0.349306 -0.437854 0.960934 +v -0.351435 -0.443393 0.957988 +v -0.353479 -0.448819 0.955058 +v -0.355475 -0.454195 0.952078 +v -0.357529 -0.459916 0.948885 +v -0.359555 -0.465631 0.945633 +v -0.361403 -0.471794 0.942238 +v -0.363206 -0.477966 0.938774 +v -0.36508 -0.484366 0.934976 +v -0.366884 -0.490724 0.931035 +v -0.368781 -0.497356 0.926884 +v -0.37063 -0.503946 0.922706 +v -0.372485 -0.510798 0.918197 +v -0.374251 -0.517604 0.913578 +v -0.376077 -0.524656 0.908602 +v -0.377804 -0.53168 0.903485 +v -0.379574 -0.538822 0.898094 +v -0.381297 -0.545942 0.892656 +v -0.382982 -0.553375 0.886816 +v -0.384661 -0.560818 0.880958 +v -0.386411 -0.568416 0.874538 +v -0.388179 -0.576002 0.868133 +v -0.390918 0.426721 1.00776 +v -0.387729 0.419434 1.00565 +v -0.384562 0.41214 1.00348 +v -0.378986 0.402207 1.00107 +v -0.373406 0.392277 0.998569 +v -0.368336 0.383312 0.996074 +v -0.363269 0.374392 0.993538 +v -0.358504 0.366158 0.991049 +v -0.353734 0.357848 0.988526 +v -0.349203 0.35006 0.985806 +v -0.344693 0.342398 0.983123 +v -0.340231 0.334868 0.980482 +v -0.335766 0.327566 0.977937 +v -0.331128 0.321763 0.975436 +v -0.326472 0.315917 0.972958 +v -0.320503 0.311149 0.969649 +v -0.314511 0.306348 0.966326 +v -0.312909 0.303892 0.960581 +v -0.311123 0.301176 0.95372 +v -0.309604 0.293773 0.955175 +v -0.308087 0.286101 0.956179 +v -0.306219 0.279103 0.956863 +v -0.304316 0.272114 0.957215 +v -0.302543 0.265176 0.957234 +v -0.300741 0.25827 0.956925 +v -0.299164 0.251487 0.956276 +v -0.297565 0.244732 0.955307 +v -0.296067 0.238253 0.954053 +v -0.29454 0.231789 0.952486 +v -0.29288 0.225444 0.950483 +v -0.291271 0.219134 0.948259 +v -0.291315 0.216143 0.948093 +v -0.291463 0.212413 0.950127 +v -0.290395 0.204162 0.950936 +v -0.289322 0.195917 0.951696 +v -0.288352 0.187837 0.950377 +v -0.287374 0.179825 0.948968 +v -0.28653 0.171543 0.947632 +v -0.285708 0.163229 0.946527 +v -0.284909 0.155009 0.947407 +v -0.284147 0.146794 0.948805 +v -0.283322 0.138932 0.951272 +v -0.282515 0.131042 0.95379 +v -0.281939 0.122732 0.956398 +v -0.281347 0.114401 0.958888 +v -0.281085 0.106109 0.961402 +v -0.280849 0.0978204 0.964035 +v -0.280917 0.0896965 0.967057 +v -0.281027 0.0816323 0.970252 +v -0.2812 0.0735211 0.973883 +v -0.281393 0.0654172 0.97756 +v -0.281631 0.0572133 0.981282 +v -0.281864 0.0489627 0.984934 +v -0.282091 0.0406935 0.988414 +v -0.282296 0.0323949 0.991834 +v -0.282366 0.02399 0.995168 +v -0.28242 0.0155677 0.998451 +v -0.282408 0.00704504 1.00158 +v -0.282384 -0.00148776 1.00467 +v -0.28225 -0.0102022 1.00761 +v -0.282078 -0.0189521 1.01045 +v -0.281757 -0.0278376 1.01298 +v -0.281397 -0.0367645 1.0154 +v -0.281074 -0.0457908 1.01747 +v -0.280708 -0.0548255 1.01938 +v -0.28043 -0.0640517 1.02083 +v -0.280148 -0.0733039 1.02216 +v -0.280166 -0.0826803 1.02305 +v -0.280179 -0.0921184 1.02366 +v -0.280324 -0.101714 1.02391 +v -0.28044 -0.111301 1.024 +v -0.28091 -0.120979 1.02383 +v -0.281378 -0.130638 1.02347 +v -0.282177 -0.140378 1.0229 +v -0.282974 -0.150097 1.02224 +v -0.284071 -0.159894 1.02157 +v -0.285148 -0.169694 1.02085 +v -0.286447 -0.179555 1.02013 +v -0.287794 -0.189446 1.01949 +v -0.288412 -0.199582 1.01919 +v -0.289079 -0.209756 1.0191 +v -0.290641 -0.219708 1.01909 +v -0.292266 -0.229631 1.01921 +v -0.293768 -0.239607 1.0196 +v -0.295258 -0.249588 1.02001 +v -0.296908 -0.259563 1.02032 +v -0.298488 -0.269532 1.02051 +v -0.299818 -0.279623 1.02064 +v -0.301069 -0.289739 1.02057 +v -0.30226 -0.299778 1.02026 +v -0.303302 -0.309841 1.01968 +v -0.304277 -0.319845 1.01872 +v -0.305026 -0.329848 1.01741 +v -0.305527 -0.339664 1.01579 +v -0.305781 -0.349424 1.01375 +v -0.306856 -0.358663 1.01093 +v -0.307651 -0.36778 1.00764 +v -0.30959 -0.375735 1.00397 +v -0.311391 -0.383533 1.00001 +v -0.314279 -0.390374 0.995935 +v -0.317122 -0.397216 0.991921 +v -0.321388 -0.403441 0.987454 +v -0.325681 -0.409669 0.982989 +v -0.328925 -0.416248 0.978924 +v -0.332143 -0.422794 0.97499 +v -0.334608 -0.428025 0.971977 +v -0.337098 -0.433276 0.969011 +v -0.339238 -0.438639 0.966216 +v -0.341323 -0.443981 0.963334 +v -0.343292 -0.449264 0.960579 +v -0.345244 -0.454503 0.957807 +v -0.347282 -0.460126 0.954808 +v -0.349281 -0.465725 0.951846 +v -0.351188 -0.471817 0.948693 +v -0.353039 -0.47788 0.945406 +v -0.354988 -0.484217 0.941824 +v -0.356893 -0.490534 0.938164 +v -0.358921 -0.497322 0.934191 +v -0.360888 -0.504047 0.93009 +v -0.362817 -0.511095 0.925728 +v -0.364635 -0.518124 0.921259 +v -0.366539 -0.525397 0.916434 +v -0.368368 -0.532626 0.911508 +v -0.370226 -0.54008 0.906243 +v -0.372014 -0.547505 0.900876 +v -0.373816 -0.555206 0.895099 +v -0.375569 -0.562861 0.88927 +v -0.377467 -0.570714 0.882957 +v -0.379269 -0.578554 0.876548 +v -0.38015 0.426801 1.01244 +v -0.377148 0.419553 1.00983 +v -0.37415 0.412294 1.00719 +v -0.368497 0.402608 1.0044 +v -0.362842 0.392912 1.00156 +v -0.357752 0.384164 0.99887 +v -0.352664 0.375434 0.996124 +v -0.347896 0.367321 0.993531 +v -0.343122 0.359127 0.990929 +v -0.338667 0.3516 0.988064 +v -0.334229 0.344221 0.985236 +v -0.329913 0.337036 0.982461 +v -0.325614 0.330159 0.979793 +v -0.320985 0.323773 0.976951 +v -0.316332 0.317314 0.974168 +v -0.310821 0.311989 0.970391 +v -0.305308 0.306662 0.966605 +v -0.30307 0.304101 0.960713 +v -0.300846 0.301439 0.954893 +v -0.299283 0.294212 0.956226 +v -0.297718 0.286784 0.957137 +v -0.295776 0.280031 0.957719 +v -0.293816 0.27329 0.957999 +v -0.291982 0.266393 0.957965 +v -0.290141 0.259549 0.957613 +v -0.288478 0.252824 0.956936 +v -0.286805 0.246123 0.955943 +v -0.285094 0.239714 0.954679 +v -0.2834 0.233336 0.953107 +v -0.281386 0.227306 0.951208 +v -0.279417 0.221284 0.949044 +v -0.279583 0.217651 0.948703 +v -0.279721 0.213395 0.950369 +v -0.27873 0.205099 0.95119 +v -0.277746 0.196786 0.95192 +v -0.27682 0.188535 0.950694 +v -0.275896 0.180331 0.949399 +v -0.275082 0.171475 0.94847 +v -0.274272 0.162566 0.94788 +v -0.272913 0.154634 0.94945 +v -0.271585 0.146717 0.951345 +v -0.271132 0.138554 0.953846 +v -0.27068 0.13036 0.956346 +v -0.270307 0.122031 0.958839 +v -0.269938 0.11371 0.961348 +v -0.269749 0.105472 0.964103 +v -0.269591 0.0972846 0.966994 +v -0.269783 0.0893238 0.970394 +v -0.270011 0.08141 0.973941 +v -0.270179 0.0732774 0.977914 +v -0.270335 0.0651375 0.981869 +v -0.270562 0.0569629 0.985586 +v -0.270765 0.04875 0.989234 +v -0.27093 0.0404622 0.992735 +v -0.271061 0.0321355 0.996112 +v -0.270997 0.0236958 0.999452 +v -0.270906 0.0152341 1.00273 +v -0.270777 0.00667923 1.00589 +v -0.270608 -0.00191877 1.00894 +v -0.270281 -0.0106698 1.01181 +v -0.269922 -0.0194508 1.01458 +v -0.269361 -0.0282988 1.01705 +v -0.268758 -0.0371759 1.0194 +v -0.268199 -0.0462174 1.02134 +v -0.267574 -0.0552973 1.0231 +v -0.267007 -0.0644928 1.02437 +v -0.266395 -0.0737003 1.02547 +v -0.26598 -0.0831553 1.02609 +v -0.265581 -0.0926328 1.02651 +v -0.26563 -0.102196 1.0265 +v -0.26566 -0.11175 1.02636 +v -0.266154 -0.121344 1.02584 +v -0.266648 -0.130906 1.02522 +v -0.267534 -0.140635 1.0245 +v -0.268427 -0.15036 1.02382 +v -0.269666 -0.16017 1.02322 +v -0.270894 -0.170015 1.02266 +v -0.272353 -0.18005 1.02224 +v -0.273846 -0.190094 1.02204 +v -0.274637 -0.200348 1.02233 +v -0.275501 -0.210676 1.02295 +v -0.277314 -0.220806 1.02365 +v -0.279178 -0.230919 1.02446 +v -0.28078 -0.241052 1.0254 +v -0.282377 -0.25122 1.02633 +v -0.284191 -0.261393 1.02701 +v -0.285921 -0.271596 1.02749 +v -0.287134 -0.281892 1.02789 +v -0.288229 -0.29225 1.02805 +v -0.289416 -0.302383 1.02774 +v -0.290429 -0.312545 1.02716 +v -0.291238 -0.322733 1.02609 +v -0.291781 -0.332875 1.02457 +v -0.2922 -0.342742 1.02258 +v -0.292238 -0.352505 1.02001 +v -0.292837 -0.361585 1.01681 +v -0.293103 -0.370491 1.01297 +v -0.294871 -0.377902 1.00866 +v -0.296504 -0.385162 1.00414 +v -0.299622 -0.391399 0.999767 +v -0.302767 -0.397711 0.995431 +v -0.308488 -0.403757 0.990917 +v -0.314254 -0.409929 0.986433 +v -0.317799 -0.416789 0.98237 +v -0.321375 -0.423718 0.978494 +v -0.323981 -0.42876 0.975693 +v -0.326603 -0.433805 0.972936 +v -0.328682 -0.439026 0.970407 +v -0.33077 -0.444256 0.96793 +v -0.332652 -0.449427 0.965397 +v -0.334546 -0.4546 0.962889 +v -0.336532 -0.460037 0.960155 +v -0.338539 -0.465523 0.957361 +v -0.340539 -0.471571 0.954411 +v -0.342516 -0.477588 0.951367 +v -0.344564 -0.483916 0.948075 +v -0.346557 -0.490205 0.944677 +v -0.348718 -0.497072 0.940916 +v -0.350836 -0.503937 0.937006 +v -0.352803 -0.511256 0.932834 +v -0.354725 -0.518509 0.928591 +v -0.356707 -0.525967 0.923896 +v -0.358606 -0.533405 0.919147 +v -0.36056 -0.541164 0.914065 +v -0.362491 -0.548915 0.90886 +v -0.364454 -0.556909 0.903188 +v -0.366355 -0.564865 0.897452 +v -0.36832 -0.572933 0.891175 +v -0.370263 -0.581025 0.884833 +v -0.368532 0.426014 1.01656 +v -0.365754 0.418954 1.0134 +v -0.363016 0.411821 1.0103 +v -0.357267 0.402461 1.0072 +v -0.35151 0.39312 1.00407 +v -0.34643 0.384606 1.00112 +v -0.341377 0.376093 0.998152 +v -0.33677 0.368295 0.995331 +v -0.332157 0.360405 0.992525 +v -0.327885 0.353009 0.989437 +v -0.323628 0.345769 0.986365 +v -0.319498 0.33863 0.983263 +v -0.315427 0.331891 0.98027 +v -0.311217 0.325312 0.977186 +v -0.307008 0.318627 0.974183 +v -0.302308 0.312545 0.970253 +v -0.297604 0.306468 0.9663 +v -0.294207 0.303378 0.960536 +v -0.290885 0.300331 0.955761 +v -0.28929 0.294116 0.956712 +v -0.287674 0.287805 0.957415 +v -0.285597 0.281173 0.957914 +v -0.283517 0.274506 0.958108 +v -0.281568 0.267518 0.958012 +v -0.279635 0.260569 0.957591 +v -0.277912 0.253873 0.956867 +v -0.276209 0.247238 0.955832 +v -0.274401 0.241096 0.954552 +v -0.272607 0.234987 0.952988 +v -0.270319 0.229313 0.951185 +v -0.268054 0.223632 0.949041 +v -0.268182 0.219775 0.9479 +v -0.268065 0.215116 0.949301 +v -0.267264 0.206485 0.950377 +v -0.266473 0.197833 0.951352 +v -0.265535 0.189412 0.950398 +v -0.264603 0.181016 0.9494 +v -0.263395 0.172114 0.949154 +v -0.262183 0.163158 0.949248 +v -0.261131 0.154924 0.951205 +v -0.260105 0.146676 0.95336 +v -0.259578 0.138355 0.955949 +v -0.25905 0.130023 0.958538 +v -0.258745 0.121721 0.96128 +v -0.258453 0.113425 0.9641 +v -0.258442 0.10533 0.967324 +v -0.258467 0.0972985 0.970682 +v -0.258689 0.0892962 0.974584 +v -0.258944 0.0813151 0.978538 +v -0.259054 0.0732223 0.982614 +v -0.259148 0.0651067 0.986634 +v -0.259295 0.0568904 0.990339 +v -0.25942 0.0486527 0.993994 +v -0.259446 0.0403185 0.997428 +v -0.259451 0.0319719 1.00081 +v -0.259282 0.0234935 1.0041 +v -0.2591 0.0150037 1.00736 +v -0.258783 0.00641464 1.01045 +v -0.258452 -0.00220377 1.01347 +v -0.257826 -0.0109906 1.01628 +v -0.257162 -0.0197985 1.01901 +v -0.256236 -0.0286396 1.02134 +v -0.255283 -0.0374817 1.02354 +v -0.254336 -0.046513 1.02522 +v -0.253347 -0.0555785 1.02675 +v -0.252572 -0.0647332 1.02767 +v -0.251787 -0.0739173 1.02836 +v -0.251207 -0.0833499 1.02852 +v -0.250609 -0.0927854 1.02857 +v -0.250255 -0.102392 1.02828 +v -0.249943 -0.112001 1.02784 +v -0.250669 -0.121529 1.02719 +v -0.251417 -0.131055 1.0265 +v -0.252468 -0.140788 1.02587 +v -0.253476 -0.150553 1.02529 +v -0.254895 -0.160521 1.02488 +v -0.256347 -0.170505 1.02472 +v -0.258115 -0.180678 1.02496 +v -0.259948 -0.190871 1.02542 +v -0.261026 -0.201297 1.02652 +v -0.262169 -0.211687 1.0278 +v -0.264159 -0.22195 1.02906 +v -0.266173 -0.232203 1.03037 +v -0.267983 -0.242589 1.03176 +v -0.269768 -0.252951 1.03302 +v -0.271518 -0.263359 1.03396 +v -0.273163 -0.27382 1.03471 +v -0.274294 -0.284378 1.03529 +v -0.275294 -0.294967 1.03556 +v -0.276173 -0.305312 1.0354 +v -0.276837 -0.315615 1.03481 +v -0.277635 -0.325915 1.0335 +v -0.278073 -0.336131 1.03159 +v -0.278218 -0.345994 1.0293 +v -0.27792 -0.355619 1.02618 +v -0.278369 -0.364418 1.0224 +v -0.278445 -0.372878 1.01792 +v -0.279242 -0.380111 1.01313 +v -0.279885 -0.387154 1.00809 +v -0.285781 -0.391996 1.00317 +v -0.291749 -0.397118 0.99861 +v -0.29755 -0.402795 0.994128 +v -0.303402 -0.408738 0.990153 +v -0.307233 -0.415629 0.986104 +v -0.311012 -0.422592 0.982455 +v -0.313439 -0.428354 0.979607 +v -0.31586 -0.434177 0.976892 +v -0.317865 -0.439096 0.974598 +v -0.319824 -0.44398 0.972305 +v -0.321741 -0.449024 0.97006 +v -0.323647 -0.454047 0.967783 +v -0.325608 -0.459497 0.965391 +v -0.327501 -0.464837 0.962919 +v -0.329535 -0.470569 0.960216 +v -0.331553 -0.476348 0.957432 +v -0.333693 -0.482751 0.954477 +v -0.33576 -0.489091 0.951343 +v -0.338016 -0.496145 0.947773 +v -0.340205 -0.503159 0.944054 +v -0.342342 -0.510676 0.940057 +v -0.344394 -0.518151 0.935923 +v -0.346517 -0.525941 0.931433 +v -0.348553 -0.533671 0.926805 +v -0.350612 -0.541752 0.921837 +v -0.352555 -0.549786 0.916726 +v -0.354625 -0.558104 0.911207 +v -0.356626 -0.566387 0.905601 +v -0.358713 -0.574828 0.899512 +v -0.360702 -0.583222 0.8933 +v -0.356814 0.425375 1.02043 +v -0.354325 0.418408 1.01684 +v -0.351855 0.41138 1.01329 +v -0.346013 0.402366 1.0099 +v -0.340153 0.393368 1.00646 +v -0.3351 0.385071 1.00328 +v -0.330081 0.376782 1.00007 +v -0.325638 0.369278 0.997071 +v -0.321187 0.361694 0.994088 +v -0.317108 0.354404 0.990769 +v -0.313019 0.347312 0.987457 +v -0.309093 0.340284 0.984038 +v -0.305176 0.333665 0.980745 +v -0.301455 0.326824 0.977433 +v -0.297698 0.319922 0.974189 +v -0.293791 0.313116 0.970114 +v -0.289891 0.306315 0.966003 +v -0.285398 0.302586 0.960285 +v -0.28087 0.299009 0.955951 +v -0.279209 0.293922 0.956627 +v -0.277524 0.288785 0.95713 +v -0.275293 0.28227 0.957528 +v -0.273061 0.275719 0.957639 +v -0.271005 0.268643 0.957467 +v -0.268973 0.261628 0.956958 +v -0.267254 0.254978 0.956181 +v -0.265574 0.248432 0.955104 +v -0.263689 0.242606 0.953816 +v -0.261836 0.236784 0.952239 +v -0.259303 0.231522 0.950445 +v -0.256769 0.226199 0.948279 +v -0.256777 0.221852 0.947084 +v -0.256407 0.216806 0.948233 +v -0.255802 0.207853 0.949558 +v -0.255197 0.198859 0.950765 +v -0.254249 0.190273 0.950102 +v -0.253314 0.181711 0.949406 +v -0.25169 0.172732 0.949841 +v -0.250095 0.163748 0.950588 +v -0.249349 0.155178 0.952896 +v -0.248622 0.146612 0.955318 +v -0.248022 0.138153 0.958017 +v -0.247425 0.129691 0.960738 +v -0.247197 0.121427 0.963796 +v -0.246995 0.11317 0.966962 +v -0.247183 0.105244 0.970713 +v -0.247406 0.0973614 0.974555 +v -0.247663 0.0893302 0.978911 +v -0.24792 0.0812658 0.983228 +v -0.247965 0.0732046 0.98738 +v -0.247966 0.0650799 0.991405 +v -0.248041 0.0568367 0.995121 +v -0.248083 0.0485608 0.998736 +v -0.247994 0.0402178 1.0022 +v -0.247876 0.0318515 1.00559 +v -0.247618 0.023347 1.00888 +v -0.247328 0.0148064 1.0121 +v -0.246839 0.00617369 1.01512 +v -0.246327 -0.0024793 1.01806 +v -0.2454 -0.011297 1.02081 +v -0.244429 -0.0201223 1.02349 +v -0.24315 -0.0289616 1.02563 +v -0.241836 -0.0378195 1.02768 +v -0.2405 -0.0468384 1.02907 +v -0.23915 -0.05587 1.03029 +v -0.238173 -0.064999 1.0307 +v -0.237148 -0.0741218 1.03093 +v -0.236404 -0.0835315 1.03065 +v -0.235652 -0.0929385 1.0303 +v -0.234995 -0.102502 1.02967 +v -0.234364 -0.112084 1.02899 +v -0.235317 -0.121624 1.0283 +v -0.236233 -0.131191 1.02762 +v -0.237399 -0.140972 1.02724 +v -0.238569 -0.150762 1.02696 +v -0.240249 -0.160948 1.02724 +v -0.241968 -0.171128 1.02769 +v -0.244078 -0.181385 1.02855 +v -0.246234 -0.191694 1.02956 +v -0.247628 -0.202178 1.03134 +v -0.24906 -0.212643 1.03325 +v -0.251195 -0.223048 1.035 +v -0.253355 -0.233471 1.03672 +v -0.255361 -0.244067 1.03841 +v -0.257301 -0.2547 1.03995 +v -0.258967 -0.265409 1.04121 +v -0.260508 -0.276137 1.04214 +v -0.261532 -0.286884 1.04278 +v -0.262407 -0.297644 1.0431 +v -0.262926 -0.308128 1.04296 +v -0.263193 -0.318603 1.04232 +v -0.263943 -0.32899 1.04068 +v -0.264278 -0.339243 1.03835 +v -0.264095 -0.349062 1.03558 +v -0.263491 -0.358584 1.03202 +v -0.263717 -0.366993 1.02755 +v -0.263608 -0.375017 1.02244 +v -0.263439 -0.381956 1.01697 +v -0.263067 -0.388537 1.0111 +v -0.271802 -0.392127 1.00582 +v -0.28065 -0.396168 1.00102 +v -0.286478 -0.401305 0.996429 +v -0.292344 -0.406943 0.992482 +v -0.29635 -0.413732 0.988385 +v -0.300386 -0.42093 0.984985 +v -0.302572 -0.427431 0.982109 +v -0.304788 -0.434097 0.979621 +v -0.306672 -0.438695 0.977719 +v -0.30854 -0.443286 0.97588 +v -0.310441 -0.448171 0.973931 +v -0.312374 -0.4531 0.971917 +v -0.314277 -0.458511 0.969857 +v -0.316149 -0.463904 0.967694 +v -0.318292 -0.469603 0.965358 +v -0.320393 -0.475177 0.96298 +v -0.322511 -0.481427 0.960356 +v -0.324621 -0.487813 0.957564 +v -0.326977 -0.495042 0.954356 +v -0.329216 -0.5022 0.95091 +v -0.331556 -0.50995 0.94711 +v -0.333774 -0.51761 0.943092 +v -0.336085 -0.525754 0.938708 +v -0.338228 -0.533824 0.934149 +v -0.340398 -0.54223 0.929309 +v -0.342461 -0.550575 0.924346 +v -0.34458 -0.559204 0.91896 +v -0.3466 -0.567801 0.913425 +v -0.348799 -0.576559 0.907524 +v -0.350899 -0.585299 0.901523 +v -0.343862 0.424133 1.02393 +v -0.341557 0.417191 1.01988 +v -0.339298 0.410185 1.01591 +v -0.333542 0.401613 1.01223 +v -0.327791 0.393059 1.00854 +v -0.323006 0.385201 1.00505 +v -0.318258 0.377358 1.00158 +v -0.314053 0.370057 0.99831 +v -0.309843 0.3627 0.995061 +v -0.305971 0.35568 0.991549 +v -0.302128 0.348699 0.987995 +v -0.298525 0.341867 0.984184 +v -0.294901 0.335433 0.980463 +v -0.29174 0.328541 0.977003 +v -0.288553 0.321623 0.973613 +v -0.286039 0.314003 0.969632 +v -0.283534 0.306383 0.965611 +v -0.278671 0.302977 0.960693 +v -0.2738 0.299577 0.955816 +v -0.271113 0.295507 0.955974 +v -0.268271 0.29141 0.956089 +v -0.26531 0.284084 0.956513 +v -0.262318 0.276689 0.956449 +v -0.260225 0.269248 0.956168 +v -0.258184 0.261868 0.95552 +v -0.256565 0.255265 0.954662 +v -0.255016 0.248787 0.95352 +v -0.253277 0.24333 0.952216 +v -0.251568 0.237932 0.950681 +v -0.24938 0.23319 0.948993 +v -0.247177 0.228346 0.946933 +v -0.247002 0.223986 0.946232 +v -0.246383 0.219009 0.947618 +v -0.24539 0.209471 0.948953 +v -0.244426 0.199903 0.950145 +v -0.243272 0.190953 0.949863 +v -0.24212 0.181997 0.949577 +v -0.240372 0.172919 0.950674 +v -0.238652 0.163843 0.952025 +v -0.237832 0.155198 0.954617 +v -0.237025 0.146555 0.957258 +v -0.236459 0.138051 0.960289 +v -0.235901 0.129568 0.963398 +v -0.235842 0.121455 0.967035 +v -0.235816 0.113358 0.9708 +v -0.236098 0.105478 0.975121 +v -0.236416 0.0976368 0.979497 +v -0.236705 0.0896332 0.983971 +v -0.236988 0.0815988 0.988421 +v -0.237054 0.0735133 0.992525 +v -0.237076 0.0653887 0.996516 +v -0.237075 0.0571302 1.0002 +v -0.237051 0.0488458 1.0038 +v -0.236822 0.0404288 1.0073 +v -0.236571 0.0319997 1.01077 +v -0.236125 0.0234583 1.01403 +v -0.235657 0.0148853 1.01722 +v -0.234884 0.00615837 1.02021 +v -0.234112 -0.00257529 1.02315 +v -0.232948 -0.0114277 1.0257 +v -0.231718 -0.0203034 1.02814 +v -0.229946 -0.0291609 1.02998 +v -0.228146 -0.038019 1.03173 +v -0.22635 -0.0469671 1.0326 +v -0.224518 -0.0559202 1.03333 +v -0.223188 -0.0649339 1.03325 +v -0.221876 -0.0739392 1.0331 +v -0.221201 -0.0832813 1.03252 +v -0.220559 -0.0925658 1.03189 +v -0.220202 -0.102243 1.03121 +v -0.219803 -0.111964 1.03052 +v -0.220215 -0.121524 1.03013 +v -0.220647 -0.1311 1.02985 +v -0.221765 -0.140978 1.03005 +v -0.222922 -0.150885 1.03044 +v -0.225134 -0.161177 1.03123 +v -0.227379 -0.171468 1.03219 +v -0.230132 -0.18194 1.03348 +v -0.232925 -0.192418 1.03487 +v -0.23472 -0.202946 1.03706 +v -0.236554 -0.213487 1.03931 +v -0.238889 -0.224164 1.04146 +v -0.241201 -0.234882 1.04355 +v -0.243221 -0.245795 1.04549 +v -0.245152 -0.256723 1.04725 +v -0.246625 -0.267699 1.04868 +v -0.248017 -0.278664 1.04984 +v -0.248988 -0.289639 1.0505 +v -0.249773 -0.300577 1.05077 +v -0.25021 -0.311222 1.05046 +v -0.250286 -0.321828 1.04958 +v -0.250584 -0.332256 1.04785 +v -0.250424 -0.342529 1.04543 +v -0.250303 -0.352246 1.04213 +v -0.249827 -0.361732 1.03824 +v -0.249792 -0.369939 1.0332 +v -0.249394 -0.377779 1.02766 +v -0.252418 -0.382682 1.0213 +v -0.25538 -0.387571 1.01494 +v -0.262956 -0.391013 1.0092 +v -0.270575 -0.394897 1.00392 +v -0.27621 -0.399833 0.998956 +v -0.28182 -0.405222 0.994687 +v -0.285703 -0.411876 0.990495 +v -0.289614 -0.419186 0.987517 +v -0.291952 -0.425495 0.985012 +v -0.294311 -0.431926 0.982816 +v -0.295786 -0.436569 0.981321 +v -0.297269 -0.441211 0.979851 +v -0.299017 -0.445927 0.978293 +v -0.30075 -0.45064 0.976697 +v -0.302654 -0.455898 0.974954 +v -0.304566 -0.461165 0.973117 +v -0.30674 -0.46699 0.970988 +v -0.30888 -0.472753 0.968802 +v -0.311146 -0.47938 0.966385 +v -0.313342 -0.485914 0.963831 +v -0.315707 -0.493188 0.960951 +v -0.318009 -0.500561 0.957849 +v -0.320396 -0.508551 0.954324 +v -0.322652 -0.51644 0.950581 +v -0.325091 -0.524877 0.946377 +v -0.327439 -0.533326 0.94192 +v -0.329705 -0.542121 0.937139 +v -0.331848 -0.550849 0.932219 +v -0.3341 -0.559868 0.926907 +v -0.336256 -0.568916 0.921445 +v -0.33858 -0.578037 0.915615 +v -0.340818 -0.587112 0.909714 +v -0.330862 0.422959 1.0273 +v -0.328773 0.416029 1.02285 +v -0.326722 0.40901 1.01846 +v -0.321069 0.400875 1.01453 +v -0.315421 0.392765 1.01056 +v -0.310906 0.385343 1.00681 +v -0.30642 0.377956 1.00306 +v -0.302454 0.370878 0.999507 +v -0.298485 0.363764 0.996003 +v -0.294844 0.356957 0.992275 +v -0.291228 0.350129 0.988508 +v -0.287963 0.343545 0.984313 +v -0.284619 0.337191 0.980211 +v -0.282032 0.330293 0.976553 +v -0.279396 0.323353 0.973025 +v -0.278291 0.314896 0.969127 +v -0.277194 0.306451 0.965198 +v -0.271954 0.303394 0.9611 +v -0.266707 0.300373 0.957023 +v -0.263275 0.296978 0.953336 +v -0.259039 0.294009 0.954665 +v -0.255213 0.285987 0.954789 +v -0.251412 0.277785 0.954432 +v -0.249336 0.269984 0.954053 +v -0.247323 0.262187 0.953324 +v -0.245868 0.255626 0.952401 +v -0.24449 0.249257 0.951226 +v -0.242952 0.244193 0.949952 +v -0.241451 0.239213 0.948482 +v -0.239629 0.234989 0.946941 +v -0.237791 0.230614 0.945038 +v -0.237237 0.226092 0.945371 +v -0.236355 0.221195 0.947012 +v -0.234984 0.211079 0.948351 +v -0.233648 0.200936 0.949523 +v -0.232289 0.191615 0.949628 +v -0.230927 0.182296 0.949837 +v -0.229053 0.173095 0.951544 +v -0.227208 0.163933 0.953479 +v -0.226313 0.155218 0.956324 +v -0.225428 0.146514 0.959201 +v -0.224908 0.137992 0.962634 +v -0.224408 0.129504 0.966178 +v -0.224541 0.121573 0.970474 +v -0.224707 0.113672 0.974867 +v -0.225093 0.10584 0.979718 +v -0.225475 0.0979869 0.984536 +v -0.225806 0.0900068 0.989135 +v -0.226093 0.0819852 0.99367 +v -0.226172 0.0738771 0.997693 +v -0.226215 0.065733 1.00165 +v -0.226156 0.0574625 1.00535 +v -0.226078 0.0491717 1.00898 +v -0.225714 0.0406917 1.01253 +v -0.225328 0.0321951 1.01604 +v -0.224726 0.0236134 1.0193 +v -0.22409 0.0150087 1.02249 +v -0.223041 0.00618891 1.02545 +v -0.221974 -0.00264224 1.02835 +v -0.220549 -0.0115352 1.03069 +v -0.219056 -0.0204493 1.03289 +v -0.216809 -0.0292997 1.03445 +v -0.214492 -0.0381662 1.03582 +v -0.212176 -0.0470686 1.03613 +v -0.2098 -0.0559706 1.03629 +v -0.208187 -0.0648492 1.03577 +v -0.206638 -0.0736496 1.03522 +v -0.206059 -0.0829215 1.03441 +v -0.205465 -0.0922039 1.03361 +v -0.205367 -0.102044 1.03299 +v -0.205263 -0.111889 1.03238 +v -0.205231 -0.121515 1.03246 +v -0.205224 -0.131202 1.03266 +v -0.206314 -0.141139 1.03348 +v -0.207423 -0.151057 1.03446 +v -0.210136 -0.161434 1.03579 +v -0.212903 -0.171794 1.03723 +v -0.21632 -0.18246 1.03887 +v -0.219771 -0.19314 1.04057 +v -0.221953 -0.203759 1.0431 +v -0.224148 -0.214362 1.04565 +v -0.226662 -0.225278 1.04811 +v -0.229125 -0.236236 1.05045 +v -0.231149 -0.247453 1.05266 +v -0.233067 -0.258746 1.05468 +v -0.234434 -0.269864 1.05631 +v -0.235645 -0.281049 1.05764 +v -0.236575 -0.292238 1.05836 +v -0.237246 -0.303456 1.05863 +v -0.237545 -0.314299 1.05823 +v -0.237521 -0.325097 1.05728 +v -0.237421 -0.335605 1.05555 +v -0.236965 -0.346018 1.05331 +v -0.236999 -0.355739 1.04958 +v -0.236606 -0.365189 1.04522 +v -0.236215 -0.373159 1.0394 +v -0.23547 -0.380782 1.03318 +v -0.241603 -0.38374 1.026 +v -0.247724 -0.386839 1.01897 +v -0.254147 -0.390122 1.01273 +v -0.260595 -0.393713 1.00679 +v -0.265915 -0.39831 1.00137 +v -0.271268 -0.403402 0.99649 +v -0.275056 -0.409797 0.991925 +v -0.278749 -0.417068 0.988851 +v -0.281165 -0.423159 0.986784 +v -0.2836 -0.429396 0.985256 +v -0.284736 -0.434146 0.98406 +v -0.285813 -0.438878 0.983073 +v -0.287439 -0.443485 0.981988 +v -0.289015 -0.448069 0.980837 +v -0.290975 -0.453237 0.979417 +v -0.292914 -0.458409 0.97796 +v -0.295041 -0.464218 0.976312 +v -0.297156 -0.470048 0.974492 +v -0.29963 -0.476884 0.972378 +v -0.301957 -0.483701 0.970118 +v -0.304403 -0.491291 0.967546 +v -0.306707 -0.498844 0.964696 +v -0.309168 -0.507093 0.961478 +v -0.311474 -0.515267 0.958033 +v -0.314034 -0.523983 0.953971 +v -0.316478 -0.532713 0.949657 +v -0.318912 -0.541941 0.944982 +v -0.321177 -0.551066 0.940128 +v -0.323482 -0.56045 0.93484 +v -0.325741 -0.569873 0.929485 +v -0.328177 -0.579375 0.923707 +v -0.330581 -0.58886 0.917898 +v -0.316774 0.421266 1.03044 +v -0.314477 0.414134 1.02557 +v -0.312216 0.406978 1.02072 +v -0.307199 0.399509 1.01657 +v -0.302179 0.39208 1.01238 +v -0.29804 0.385155 1.00837 +v -0.293919 0.378267 1.00434 +v -0.290253 0.371542 1.00054 +v -0.2866 0.364772 0.996779 +v -0.283244 0.358175 0.992893 +v -0.27989 0.351592 0.988966 +v -0.276965 0.345234 0.984583 +v -0.273928 0.338878 0.980182 +v -0.271501 0.332273 0.976032 +v -0.269064 0.325699 0.971989 +v -0.267425 0.317794 0.968478 +v -0.265827 0.309831 0.964867 +v -0.262393 0.30518 0.961162 +v -0.258968 0.300537 0.957431 +v -0.254411 0.295968 0.952497 +v -0.249861 0.291379 0.947617 +v -0.243622 0.285142 0.952349 +v -0.238629 0.278299 0.951266 +v -0.23743 0.269516 0.950891 +v -0.236382 0.260749 0.950064 +v -0.235131 0.254097 0.948883 +v -0.233949 0.248122 0.947891 +v -0.232602 0.243559 0.946629 +v -0.23134 0.239108 0.945239 +v -0.230241 0.235733 0.944046 +v -0.229091 0.232234 0.942555 +v -0.22813 0.227894 0.944294 +v -0.227246 0.223591 0.945973 +v -0.225363 0.212712 0.947674 +v -0.223506 0.201796 0.949229 +v -0.221707 0.192122 0.949828 +v -0.219897 0.18246 0.950569 +v -0.217772 0.173175 0.95285 +v -0.21567 0.163956 0.955323 +v -0.214759 0.155292 0.958599 +v -0.213863 0.146645 0.961934 +v -0.213645 0.138343 0.966151 +v -0.213457 0.130079 0.970505 +v -0.213809 0.12231 0.975409 +v -0.214196 0.114559 0.980349 +v -0.214619 0.106729 0.985336 +v -0.215027 0.0988846 0.990292 +v -0.215339 0.0908807 0.994833 +v -0.21561 0.082844 0.999308 +v -0.215678 0.0746869 1.00341 +v -0.215706 0.0664883 1.00742 +v -0.215589 0.0581338 1.01118 +v -0.215453 0.0497692 1.01489 +v -0.215033 0.0412445 1.01852 +v -0.214575 0.0326849 1.02206 +v -0.213797 0.0240092 1.02528 +v -0.212985 0.0153235 1.02845 +v -0.211641 0.00651491 1.03129 +v -0.210231 -0.00231251 1.03403 +v -0.208738 -0.011252 1.036 +v -0.207138 -0.020216 1.03774 +v -0.204017 -0.0291479 1.03922 +v -0.200809 -0.0380938 1.04054 +v -0.197844 -0.0472612 1.04144 +v -0.19488 -0.0563933 1.04233 +v -0.193475 -0.0648569 1.04263 +v -0.192102 -0.0733033 1.04297 +v -0.192123 -0.0824901 1.04176 +v -0.192171 -0.0916856 1.04068 +v -0.192118 -0.101839 1.03936 +v -0.192109 -0.112078 1.03802 +v -0.191274 -0.121639 1.03825 +v -0.190437 -0.131191 1.0385 +v -0.190939 -0.140956 1.03923 +v -0.191475 -0.150718 1.04013 +v -0.195002 -0.161399 1.04167 +v -0.198565 -0.172098 1.0433 +v -0.202945 -0.182962 1.04508 +v -0.20736 -0.193852 1.04681 +v -0.209721 -0.204489 1.04956 +v -0.212114 -0.215116 1.05232 +v -0.214684 -0.226303 1.05502 +v -0.217205 -0.237488 1.05764 +v -0.219159 -0.248996 1.0601 +v -0.221053 -0.260529 1.06235 +v -0.222266 -0.272077 1.06421 +v -0.223327 -0.28366 1.06577 +v -0.224055 -0.295199 1.06659 +v -0.224529 -0.30679 1.06699 +v -0.22466 -0.317851 1.06665 +v -0.22451 -0.328837 1.06587 +v -0.224658 -0.339388 1.06401 +v -0.224477 -0.349848 1.06173 +v -0.22431 -0.359479 1.0577 +v -0.223556 -0.368791 1.05293 +v -0.225761 -0.374887 1.04584 +v -0.227805 -0.380811 1.03856 +v -0.233638 -0.383873 1.03091 +v -0.239359 -0.38703 1.02338 +v -0.245193 -0.390209 1.01671 +v -0.251101 -0.393467 1.01016 +v -0.256248 -0.397414 1.0043 +v -0.261364 -0.401801 0.998848 +v -0.264903 -0.407674 0.993804 +v -0.268264 -0.414703 0.990575 +v -0.270745 -0.420536 0.98875 +v -0.273256 -0.426527 0.987517 +v -0.274381 -0.430888 0.986923 +v -0.275512 -0.435281 0.986403 +v -0.276016 -0.439275 0.985923 +v -0.276488 -0.443253 0.985655 +v -0.278556 -0.448456 0.984621 +v -0.280584 -0.453662 0.983481 +v -0.28287 -0.459804 0.98211 +v -0.28507 -0.46589 0.980684 +v -0.287624 -0.473046 0.978986 +v -0.290109 -0.48029 0.977009 +v -0.292571 -0.488211 0.974758 +v -0.294962 -0.496093 0.972231 +v -0.297559 -0.504663 0.96916 +v -0.299995 -0.513238 0.965863 +v -0.302589 -0.522507 0.961993 +v -0.304988 -0.531687 0.957854 +v -0.307618 -0.541383 0.953183 +v -0.310108 -0.551035 0.948286 +v -0.312513 -0.560857 0.943121 +v -0.314759 -0.570634 0.937892 +v -0.317343 -0.580558 0.93214 +v -0.319864 -0.590471 0.926326 +v -0.302671 0.419559 1.03357 +v -0.300188 0.412274 1.02827 +v -0.297705 0.404984 1.02295 +v -0.29333 0.398173 1.01861 +v -0.288943 0.391388 1.01419 +v -0.285184 0.38495 1.00993 +v -0.281424 0.37854 1.00568 +v -0.278059 0.372175 1.00158 +v -0.274709 0.365779 0.997585 +v -0.271631 0.359415 0.993516 +v -0.268549 0.353102 0.989471 +v -0.265901 0.346925 0.984875 +v -0.26328 0.340772 0.980187 +v -0.261014 0.334326 0.975572 +v -0.258749 0.32802 0.971046 +v -0.256624 0.320659 0.967756 +v -0.254497 0.313257 0.964525 +v -0.252855 0.30696 0.961234 +v -0.251236 0.300724 0.957834 +v -0.245546 0.294962 0.951684 +v -0.239864 0.289206 0.945536 +v -0.234417 0.283974 0.942593 +v -0.22614 0.278837 0.94703 +v -0.225786 0.269138 0.946877 +v -0.225623 0.259388 0.946169 +v -0.22459 0.253193 0.94503 +v -0.223667 0.247141 0.94387 +v -0.222607 0.243063 0.942687 +v -0.221633 0.239124 0.941421 +v -0.221218 0.236616 0.940677 +v -0.220747 0.233966 0.939676 +v -0.219018 0.229692 0.943209 +v -0.218137 0.225989 0.944933 +v -0.21574 0.214345 0.947018 +v -0.213364 0.202649 0.94895 +v -0.211116 0.192619 0.950064 +v -0.208866 0.182606 0.951327 +v -0.206489 0.173257 0.954182 +v -0.204134 0.163971 0.957173 +v -0.203216 0.155383 0.960932 +v -0.202313 0.146824 0.964757 +v -0.202422 0.138779 0.969836 +v -0.202566 0.130784 0.975032 +v -0.203156 0.12315 0.980482 +v -0.203749 0.115499 0.985906 +v -0.204204 0.107685 0.991051 +v -0.204619 0.0998462 0.996146 +v -0.204927 0.0918071 1.00064 +v -0.205189 0.0837453 1.00508 +v -0.205239 0.0755407 1.00921 +v -0.205236 0.0672735 1.01321 +v -0.205098 0.0588635 1.01709 +v -0.204923 0.0504307 1.02092 +v -0.20445 0.041861 1.0246 +v -0.203891 0.0332241 1.02813 +v -0.202948 0.0244536 1.03133 +v -0.201956 0.0156748 1.03446 +v -0.200305 0.00687203 1.03718 +v -0.198562 -0.00195171 1.03976 +v -0.196959 -0.0109477 1.04124 +v -0.195236 -0.0199709 1.04253 +v -0.191212 -0.0289927 1.04396 +v -0.187153 -0.0379888 1.04525 +v -0.183566 -0.0474452 1.04698 +v -0.180264 -0.0568621 1.04896 +v -0.17919 -0.064934 1.05043 +v -0.178269 -0.0730324 1.05202 +v -0.17892 -0.0821838 1.05053 +v -0.179448 -0.0913482 1.04903 +v -0.179337 -0.101791 1.0468 +v -0.179162 -0.112231 1.04452 +v -0.177492 -0.121675 1.04476 +v -0.175762 -0.131088 1.04499 +v -0.175606 -0.140725 1.04537 +v -0.175574 -0.150347 1.046 +v -0.179899 -0.161315 1.04764 +v -0.18428 -0.172368 1.0495 +v -0.189635 -0.183469 1.05132 +v -0.194976 -0.194576 1.05312 +v -0.197524 -0.205205 1.05609 +v -0.200087 -0.215823 1.05902 +v -0.202703 -0.227249 1.06198 +v -0.205273 -0.238709 1.06486 +v -0.207186 -0.250487 1.06763 +v -0.209058 -0.262301 1.07008 +v -0.210165 -0.27418 1.07216 +v -0.211099 -0.286127 1.07394 +v -0.211622 -0.298096 1.07502 +v -0.211904 -0.310061 1.07565 +v -0.212001 -0.321362 1.07552 +v -0.211852 -0.332603 1.07507 +v -0.21227 -0.343179 1.07313 +v -0.212287 -0.353699 1.07068 +v -0.211658 -0.363272 1.06606 +v -0.210407 -0.372322 1.06066 +v -0.215285 -0.376626 1.05238 +v -0.220131 -0.380936 1.04408 +v -0.225636 -0.384111 1.03594 +v -0.231007 -0.387391 1.0279 +v -0.236223 -0.390486 1.02079 +v -0.241667 -0.393452 1.01361 +v -0.246548 -0.396841 1.00739 +v -0.251455 -0.400463 1.00142 +v -0.254673 -0.405761 0.995955 +v -0.257776 -0.412317 0.992061 +v -0.260351 -0.41791 0.990682 +v -0.263005 -0.423707 0.989983 +v -0.264151 -0.427714 0.989869 +v -0.265323 -0.431652 0.989952 +v -0.264713 -0.435027 0.990334 +v -0.264146 -0.438391 0.990955 +v -0.266363 -0.443632 0.990279 +v -0.268487 -0.448825 0.989518 +v -0.270901 -0.455322 0.988362 +v -0.273235 -0.46179 0.987091 +v -0.275838 -0.469344 0.985635 +v -0.278305 -0.476902 0.98398 +v -0.280886 -0.485144 0.982079 +v -0.283278 -0.493352 0.979839 +v -0.286041 -0.502207 0.977048 +v -0.288598 -0.511108 0.973897 +v -0.291248 -0.520902 0.970193 +v -0.293657 -0.530579 0.96618 +v -0.296398 -0.54074 0.961504 +v -0.299029 -0.550927 0.956571 +v -0.301458 -0.5612 0.9515 +v -0.303742 -0.571378 0.946292 +v -0.306512 -0.581741 0.940575 +v -0.309232 -0.592092 0.934792 +v -0.287679 0.417321 1.03636 +v -0.285203 0.409914 1.03059 +v -0.282728 0.402533 1.02478 +v -0.278983 0.396433 1.0204 +v -0.27522 0.390344 1.01599 +v -0.271975 0.384465 1.01161 +v -0.268769 0.378633 1.00722 +v -0.265646 0.37269 1.00297 +v -0.262545 0.366746 0.998795 +v -0.259695 0.360645 0.994592 +v -0.25685 0.354524 0.990393 +v -0.254441 0.348424 0.985761 +v -0.25201 0.342314 0.981062 +v -0.249963 0.336107 0.975912 +v -0.247855 0.329938 0.97089 +v -0.245552 0.322362 0.966719 +v -0.243251 0.314764 0.962809 +v -0.240594 0.307212 0.95907 +v -0.237978 0.299706 0.955169 +v -0.232228 0.292443 0.94935 +v -0.226499 0.285202 0.94351 +v -0.223444 0.279416 0.9401 +v -0.217343 0.273412 0.943467 +v -0.216228 0.264585 0.942628 +v -0.215332 0.255832 0.941346 +v -0.214386 0.250286 0.940074 +v -0.213551 0.244934 0.938721 +v -0.212533 0.240979 0.937553 +v -0.211595 0.237331 0.936214 +v -0.212716 0.235733 0.93643 +v -0.211677 0.232838 0.940386 +v -0.211639 0.229876 0.94215 +v -0.211595 0.226934 0.943933 +v -0.207429 0.214907 0.946601 +v -0.203278 0.202797 0.949136 +v -0.200563 0.192676 0.951138 +v -0.19784 0.182584 0.9533 +v -0.195437 0.173316 0.956736 +v -0.193048 0.164122 0.960292 +v -0.192383 0.155891 0.965082 +v -0.191754 0.147705 0.969964 +v -0.192176 0.139988 0.975566 +v -0.192629 0.132283 0.98119 +v -0.193272 0.12468 0.986821 +v -0.193909 0.117063 0.992423 +v -0.194381 0.109216 0.997568 +v -0.194813 0.101332 1.00266 +v -0.195095 0.0932793 1.00722 +v -0.195328 0.0852012 1.01172 +v -0.195441 0.076908 1.01584 +v -0.195514 0.0685847 1.01991 +v -0.19534 0.0601325 1.02386 +v -0.195128 0.0516673 1.02777 +v -0.194534 0.0429333 1.03146 +v -0.193881 0.0341585 1.03505 +v -0.192811 0.0253152 1.03828 +v -0.191645 0.0164468 1.0414 +v -0.189668 0.00758806 1.0441 +v -0.187557 -0.00129079 1.04663 +v -0.184874 -0.0100981 1.04868 +v -0.182033 -0.0189079 1.05055 +v -0.180361 -0.028488 1.05102 +v -0.178679 -0.0380761 1.05157 +v -0.177645 -0.0482406 1.05262 +v -0.1769 -0.0583724 1.05377 +v -0.176254 -0.0646404 1.05654 +v -0.17579 -0.0709242 1.05938 +v -0.176569 -0.0814 1.05874 +v -0.177109 -0.091911 1.05807 +v -0.176572 -0.102771 1.05625 +v -0.175747 -0.113585 1.05437 +v -0.173444 -0.123208 1.05513 +v -0.170906 -0.132772 1.05574 +v -0.16938 -0.142119 1.05343 +v -0.168018 -0.151502 1.05149 +v -0.170213 -0.162146 1.05319 +v -0.172523 -0.172821 1.05518 +v -0.178243 -0.184268 1.05727 +v -0.183941 -0.195714 1.05935 +v -0.186773 -0.206529 1.06248 +v -0.189602 -0.217349 1.06559 +v -0.192124 -0.228797 1.06874 +v -0.19459 -0.240257 1.0718 +v -0.196316 -0.252315 1.07482 +v -0.198002 -0.264442 1.07757 +v -0.19882 -0.276796 1.07999 +v -0.199447 -0.289191 1.08215 +v -0.199686 -0.301467 1.08358 +v -0.199739 -0.313743 1.08463 +v -0.200075 -0.325094 1.08461 +v -0.200197 -0.336404 1.08429 +v -0.200455 -0.346641 1.08222 +v -0.200081 -0.356786 1.07936 +v -0.201633 -0.364706 1.07315 +v -0.202813 -0.372214 1.06642 +v -0.207327 -0.376852 1.05806 +v -0.211838 -0.381358 1.04955 +v -0.216752 -0.384913 1.04118 +v -0.221558 -0.388474 1.03288 +v -0.226564 -0.39158 1.02511 +v -0.231423 -0.394731 1.01753 +v -0.236364 -0.397748 1.01095 +v -0.241562 -0.400854 1.00436 +v -0.245315 -0.404782 0.998794 +v -0.248889 -0.409657 0.994116 +v -0.251438 -0.415062 0.993088 +v -0.25405 -0.420263 0.992483 +v -0.254939 -0.424067 0.992895 +v -0.255789 -0.427857 0.993183 +v -0.255254 -0.43094 0.994152 +v -0.254723 -0.434024 0.995198 +v -0.255191 -0.43789 0.995834 +v -0.255615 -0.441771 0.996422 +v -0.258299 -0.448776 0.995593 +v -0.260864 -0.455814 0.994493 +v -0.263616 -0.464001 0.99305 +v -0.266262 -0.472071 0.991574 +v -0.268863 -0.480767 0.989901 +v -0.271355 -0.489378 0.987975 +v -0.274142 -0.498591 0.985473 +v -0.276745 -0.507949 0.982584 +v -0.279544 -0.518377 0.978985 +v -0.282098 -0.52872 0.975045 +v -0.284883 -0.539466 0.970426 +v -0.287532 -0.550208 0.965515 +v -0.290141 -0.561092 0.960293 +v -0.292693 -0.571964 0.954909 +v -0.295444 -0.582901 0.949209 +v -0.298165 -0.59373 0.943546 +v -0.272661 0.415049 1.03918 +v -0.270214 0.407522 1.03294 +v -0.267744 0.400045 1.02664 +v -0.264639 0.394622 1.02227 +v -0.261514 0.389251 1.01783 +v -0.258793 0.383909 1.01335 +v -0.256084 0.378605 1.00885 +v -0.253248 0.373131 1.00445 +v -0.250389 0.367596 1.00011 +v -0.24778 0.361739 0.995735 +v -0.245163 0.355862 0.991402 +v -0.24296 0.349784 0.9867 +v -0.240771 0.343723 0.981962 +v -0.238811 0.337727 0.976425 +v -0.236857 0.331814 0.97084 +v -0.234477 0.324085 0.96579 +v -0.232022 0.316285 0.961063 +v -0.228341 0.307463 0.956824 +v -0.224723 0.298684 0.952489 +v -0.218926 0.289925 0.94703 +v -0.213138 0.281182 0.941552 +v -0.21245 0.274827 0.937669 +v -0.20893 0.268075 0.93929 +v -0.207162 0.260168 0.937771 +v -0.20561 0.252374 0.935882 +v -0.204785 0.247516 0.934563 +v -0.204176 0.242862 0.933436 +v -0.203158 0.239202 0.931972 +v -0.202464 0.235741 0.930786 +v -0.201094 0.233138 0.938044 +v -0.203451 0.232239 0.939255 +v -0.204246 0.230058 0.94109 +v -0.205052 0.227873 0.94292 +v -0.199105 0.215451 0.946171 +v -0.193188 0.20294 0.949334 +v -0.190012 0.192729 0.952253 +v -0.186826 0.182553 0.955304 +v -0.18439 0.17338 0.959322 +v -0.181976 0.164296 0.963484 +v -0.181594 0.156453 0.969346 +v -0.181258 0.148634 0.975294 +v -0.182001 0.141236 0.981372 +v -0.182752 0.133847 0.987446 +v -0.183456 0.126272 0.993254 +v -0.184122 0.118658 0.999015 +v -0.184637 0.110801 1.0042 +v -0.185095 0.1029 1.00932 +v -0.185368 0.0948323 1.01393 +v -0.185586 0.0867279 1.01847 +v -0.185791 0.0783707 1.02265 +v -0.185946 0.0699857 1.02679 +v -0.185741 0.0614964 1.03084 +v -0.185494 0.0529781 1.03482 +v -0.184823 0.0440949 1.03856 +v -0.184087 0.0351928 1.04222 +v -0.182903 0.026274 1.04552 +v -0.181566 0.017304 1.04866 +v -0.179268 0.00836555 1.05133 +v -0.17677 -0.000583364 1.0538 +v -0.173081 -0.00922501 1.05658 +v -0.169494 -0.0178531 1.05945 +v -0.170202 -0.0280151 1.05903 +v -0.171098 -0.0381385 1.05869 +v -0.172544 -0.0489921 1.05873 +v -0.174045 -0.0598396 1.05882 +v -0.173751 -0.064302 1.06278 +v -0.173638 -0.0687426 1.0668 +v -0.174597 -0.080578 1.06701 +v -0.175256 -0.0924932 1.06722 +v -0.174384 -0.103776 1.06581 +v -0.173147 -0.115028 1.06436 +v -0.170165 -0.124949 1.0656 +v -0.16689 -0.134782 1.06674 +v -0.163621 -0.143661 1.06189 +v -0.160707 -0.152724 1.05727 +v -0.160522 -0.162919 1.05885 +v -0.160718 -0.173161 1.06094 +v -0.166829 -0.184974 1.06329 +v -0.172943 -0.196824 1.06564 +v -0.176067 -0.207796 1.06887 +v -0.179155 -0.218747 1.07203 +v -0.181579 -0.230224 1.07546 +v -0.183926 -0.241756 1.07877 +v -0.185478 -0.254126 1.08208 +v -0.18694 -0.266547 1.08515 +v -0.18747 -0.279351 1.08795 +v -0.187823 -0.29222 1.09046 +v -0.187841 -0.304783 1.09231 +v -0.187631 -0.31729 1.09387 +v -0.188254 -0.328719 1.09393 +v -0.188564 -0.340133 1.09357 +v -0.188398 -0.350023 1.09108 +v -0.187409 -0.359478 1.08746 +v -0.191397 -0.365879 1.07997 +v -0.195189 -0.372017 1.07214 +v -0.199309 -0.376995 1.06368 +v -0.203279 -0.381868 1.05513 +v -0.2077 -0.385717 1.04648 +v -0.211967 -0.389582 1.03789 +v -0.216752 -0.392762 1.02967 +v -0.221359 -0.395895 1.02135 +v -0.22624 -0.399137 1.01493 +v -0.231456 -0.401703 1.0078 +v -0.235726 -0.404184 1.00251 +v -0.239972 -0.407287 0.997602 +v -0.242522 -0.412097 0.996235 +v -0.245227 -0.416714 0.995286 +v -0.245768 -0.420401 0.995995 +v -0.246342 -0.423993 0.99679 +v -0.245855 -0.426792 0.998239 +v -0.245349 -0.429578 0.999676 +v -0.244132 -0.432103 1.00162 +v -0.242901 -0.434632 1.00357 +v -0.246076 -0.442269 1.00323 +v -0.248961 -0.449915 1.00263 +v -0.251755 -0.45858 1.00121 +v -0.254451 -0.467274 0.999743 +v -0.257083 -0.476428 0.998062 +v -0.259544 -0.48561 0.996218 +v -0.262363 -0.495315 0.993894 +v -0.264971 -0.505012 0.991284 +v -0.267885 -0.515878 0.987862 +v -0.270551 -0.526807 0.984005 +v -0.273394 -0.538184 0.979386 +v -0.276075 -0.549536 0.974479 +v -0.278847 -0.561001 0.969059 +v -0.281593 -0.572484 0.963516 +v -0.28433 -0.583942 0.957881 +v -0.287072 -0.59531 0.952328 +v -0.257276 0.412778 1.04244 +v -0.254702 0.405015 1.03561 +v -0.252085 0.397292 1.02871 +v -0.249574 0.392511 1.02449 +v -0.247054 0.387767 1.02023 +v -0.24538 0.383038 1.01558 +v -0.243689 0.378206 1.01098 +v -0.240676 0.373196 1.00663 +v -0.237669 0.368154 1.00236 +v -0.235348 0.362626 0.997811 +v -0.233019 0.357053 0.993308 +v -0.231071 0.351031 0.98848 +v -0.229119 0.345023 0.983655 +v -0.22745 0.338789 0.978073 +v -0.225758 0.332614 0.972398 +v -0.223386 0.324822 0.966216 +v -0.221005 0.316912 0.960271 +v -0.217521 0.307073 0.95549 +v -0.214055 0.297286 0.950667 +v -0.208311 0.286452 0.944695 +v -0.202561 0.275698 0.938578 +v -0.200144 0.269068 0.93368 +v -0.196039 0.262125 0.931521 +v -0.196451 0.25605 0.931053 +v -0.197002 0.250045 0.930397 +v -0.195995 0.245342 0.928903 +v -0.195197 0.240804 0.927322 +v -0.196514 0.238746 0.927713 +v -0.197969 0.236676 0.928196 +v -0.195276 0.233344 0.93709 +v -0.197482 0.232228 0.938511 +v -0.19505 0.227719 0.941658 +v -0.19259 0.223235 0.944822 +v -0.187333 0.212739 0.948266 +v -0.182125 0.202229 0.951703 +v -0.178912 0.192494 0.955213 +v -0.175695 0.182786 0.958797 +v -0.173738 0.174302 0.964083 +v -0.171814 0.165898 0.969495 +v -0.171904 0.158385 0.975864 +v -0.172028 0.150897 0.982271 +v -0.172853 0.143562 0.988573 +v -0.17366 0.136196 0.994843 +v -0.174417 0.128586 1.00066 +v -0.17514 0.120939 1.00646 +v -0.175606 0.11305 1.01172 +v -0.176013 0.105127 1.0169 +v -0.176312 0.0969636 1.02164 +v -0.17655 0.0887635 1.02632 +v -0.176692 0.0803403 1.03066 +v -0.176763 0.0718807 1.03494 +v -0.176565 0.0632704 1.0391 +v -0.176281 0.054605 1.04314 +v -0.175657 0.0456508 1.04694 +v -0.174917 0.0366791 1.05063 +v -0.173723 0.0276243 1.05394 +v -0.172295 0.0185185 1.05703 +v -0.170212 0.00941345 1.05968 +v -0.167927 0.000261069 1.06215 +v -0.165723 -0.00866848 1.0646 +v -0.163867 -0.0175697 1.06728 +v -0.164818 -0.0272088 1.06849 +v -0.166126 -0.0367603 1.06983 +v -0.168267 -0.0466331 1.07135 +v -0.1705 -0.0564753 1.07287 +v -0.171676 -0.063292 1.07583 +v -0.172849 -0.0701091 1.07879 +v -0.17417 -0.0825997 1.07876 +v -0.175088 -0.0951327 1.07873 +v -0.17368 -0.107051 1.07699 +v -0.171917 -0.118921 1.07521 +v -0.167761 -0.12904 1.07407 +v -0.163369 -0.139073 1.07289 +v -0.159096 -0.147086 1.06907 +v -0.154883 -0.155121 1.06528 +v -0.15364 -0.165139 1.06538 +v -0.153032 -0.175347 1.06628 +v -0.158125 -0.186745 1.06911 +v -0.163218 -0.198178 1.07189 +v -0.166243 -0.209037 1.07524 +v -0.169246 -0.219916 1.07856 +v -0.171505 -0.231438 1.08211 +v -0.173713 -0.243 1.08556 +v -0.174994 -0.255525 1.08916 +v -0.176221 -0.268142 1.09252 +v -0.176522 -0.281561 1.09573 +v -0.176646 -0.294987 1.09867 +v -0.176647 -0.307949 1.10086 +v -0.176494 -0.320921 1.10289 +v -0.176677 -0.332065 1.10303 +v -0.176285 -0.343241 1.10234 +v -0.178127 -0.351587 1.09796 +v -0.17942 -0.359457 1.09276 +v -0.183193 -0.365815 1.08532 +v -0.186741 -0.37195 1.07758 +v -0.190326 -0.37715 1.0692 +v -0.193742 -0.382246 1.06069 +v -0.197602 -0.386469 1.05173 +v -0.20132 -0.390625 1.04278 +v -0.205365 -0.394964 1.03399 +v -0.209459 -0.39918 1.02527 +v -0.215517 -0.401572 1.01818 +v -0.221423 -0.403325 1.01054 +v -0.227039 -0.405446 1.00523 +v -0.232469 -0.407163 0.999537 +v -0.235055 -0.410134 0.998878 +v -0.237589 -0.413156 0.998073 +v -0.237708 -0.416388 0.999005 +v -0.237843 -0.41963 1.00011 +v -0.237007 -0.422666 1.00207 +v -0.236205 -0.425697 1.00409 +v -0.235076 -0.428457 1.0066 +v -0.23389 -0.431223 1.00913 +v -0.235186 -0.436821 1.01039 +v -0.236372 -0.442406 1.01151 +v -0.239493 -0.451672 1.01038 +v -0.242422 -0.460847 1.00895 +v -0.244885 -0.470688 1.00719 +v -0.247212 -0.480407 1.00537 +v -0.249762 -0.490613 1.00319 +v -0.252248 -0.500855 1.00079 +v -0.255517 -0.512676 0.99729 +v -0.258459 -0.524471 0.993315 +v -0.261556 -0.536703 0.988607 +v -0.264418 -0.548886 0.983473 +v -0.267273 -0.560929 0.978057 +v -0.270012 -0.572956 0.972478 +v -0.27282 -0.584943 0.96686 +v -0.275625 -0.596995 0.961242 +v -0.241928 0.410519 1.04572 +v -0.239194 0.402431 1.03834 +v -0.236436 0.394474 1.03083 +v -0.234528 0.390332 1.02677 +v -0.232612 0.386216 1.02268 +v -0.23195 0.381959 1.01793 +v -0.231313 0.377779 1.01317 +v -0.228115 0.373148 1.00897 +v -0.22494 0.36857 1.00475 +v -0.222917 0.363353 1.00006 +v -0.220879 0.358124 0.995374 +v -0.219185 0.352165 0.990381 +v -0.217482 0.346197 0.985438 +v -0.216109 0.339726 0.979772 +v -0.214695 0.333313 0.974046 +v -0.212331 0.325396 0.966812 +v -0.209949 0.317466 0.959761 +v -0.206685 0.306719 0.954206 +v -0.20341 0.295831 0.948892 +v -0.197694 0.282965 0.942381 +v -0.192016 0.270082 0.935697 +v -0.187883 0.263265 0.92969 +v -0.184563 0.256627 0.922917 +v -0.186835 0.252287 0.923894 +v -0.189292 0.247907 0.92477 +v -0.188134 0.24338 0.922976 +v -0.187206 0.239137 0.921176 +v -0.190452 0.238304 0.923471 +v -0.1937 0.237535 0.925548 +v -0.189453 0.233568 0.93614 +v -0.191508 0.232219 0.937766 +v -0.185822 0.225402 0.942228 +v -0.180094 0.218527 0.946678 +v -0.175566 0.210034 0.950386 +v -0.171057 0.201503 0.95412 +v -0.167804 0.192244 0.958187 +v -0.164567 0.183023 0.962348 +v -0.163117 0.175254 0.96891 +v -0.161704 0.167553 0.975588 +v -0.162279 0.160386 0.982439 +v -0.162867 0.153214 0.989283 +v -0.163769 0.145915 0.995802 +v -0.16464 0.138594 1.0023 +v -0.16546 0.130959 1.00815 +v -0.166237 0.123278 1.01395 +v -0.166681 0.115376 1.0193 +v -0.167067 0.107436 1.0246 +v -0.167422 0.0991882 1.02951 +v -0.167711 0.090895 1.03435 +v -0.167815 0.0824127 1.03888 +v -0.167837 0.0738888 1.04334 +v -0.16767 0.0651596 1.04763 +v -0.167409 0.0563676 1.05179 +v -0.166813 0.0473273 1.05563 +v -0.166089 0.0382595 1.05936 +v -0.16487 0.0290425 1.06267 +v -0.163401 0.0197958 1.06577 +v -0.161566 0.0104622 1.06844 +v -0.159721 0.00112589 1.07108 +v -0.159064 -0.00808843 1.07317 +v -0.158823 -0.0172387 1.07548 +v -0.16007 -0.0263105 1.07826 +v -0.161831 -0.0352188 1.08121 +v -0.16458 -0.044105 1.08412 +v -0.167429 -0.0529839 1.087 +v -0.170034 -0.0621855 1.08889 +v -0.172474 -0.0714654 1.09082 +v -0.173986 -0.0845741 1.0905 +v -0.175096 -0.0977462 1.09023 +v -0.173069 -0.110293 1.08814 +v -0.170638 -0.122776 1.086 +v -0.165254 -0.13306 1.0825 +v -0.159726 -0.143255 1.07903 +v -0.154473 -0.150405 1.07621 +v -0.149243 -0.157553 1.07341 +v -0.14691 -0.167423 1.07206 +v -0.145355 -0.177498 1.07165 +v -0.149408 -0.188485 1.07495 +v -0.153474 -0.199474 1.07828 +v -0.156406 -0.210267 1.08177 +v -0.159299 -0.221087 1.08522 +v -0.161428 -0.232636 1.0889 +v -0.163547 -0.244176 1.0924 +v -0.164561 -0.256918 1.09623 +v -0.165501 -0.269736 1.09986 +v -0.165573 -0.283732 1.1035 +v -0.165456 -0.297691 1.10689 +v -0.165514 -0.311104 1.10952 +v -0.165416 -0.324545 1.11194 +v -0.16495 -0.335467 1.11186 +v -0.163516 -0.346124 1.11035 +v -0.16756 -0.352843 1.10437 +v -0.171316 -0.359393 1.09798 +v -0.174875 -0.365683 1.09056 +v -0.17829 -0.371755 1.08288 +v -0.181309 -0.377186 1.0746 +v -0.184248 -0.382426 1.06613 +v -0.187448 -0.387052 1.057 +v -0.190662 -0.391491 1.0478 +v -0.193994 -0.397022 1.03834 +v -0.197615 -0.402879 1.02897 +v -0.204684 -0.404064 1.02092 +v -0.211663 -0.405163 1.0132 +v -0.218277 -0.405823 1.00719 +v -0.224886 -0.406701 1.00112 +v -0.227374 -0.408136 1.00112 +v -0.229935 -0.40957 1.00116 +v -0.229563 -0.412423 1.00223 +v -0.229215 -0.415254 1.00348 +v -0.228176 -0.418418 1.0061 +v -0.227077 -0.421623 1.0086 +v -0.225925 -0.424696 1.01171 +v -0.224783 -0.427805 1.01482 +v -0.224226 -0.431528 1.01757 +v -0.223578 -0.4353 1.02027 +v -0.227373 -0.444892 1.0197 +v -0.230744 -0.45451 1.01868 +v -0.233086 -0.465003 1.01688 +v -0.235273 -0.475449 1.01502 +v -0.237495 -0.486096 1.01287 +v -0.239659 -0.496721 1.01051 +v -0.24316 -0.50956 1.0067 +v -0.246389 -0.522263 1.00258 +v -0.249702 -0.535468 0.997693 +v -0.252682 -0.548446 0.992318 +v -0.2556 -0.560968 0.986901 +v -0.258446 -0.573575 0.981368 +v -0.261228 -0.585949 0.975717 +v -0.264151 -0.598568 0.970202 +v -0.226222 0.408611 1.04958 +v -0.222609 0.399959 1.04194 +v -0.21896 0.391527 1.03408 +v -0.218056 0.388043 1.0302 +v -0.217161 0.384609 1.02631 +v -0.216538 0.380805 1.02192 +v -0.215925 0.377048 1.01751 +v -0.213793 0.372889 1.01308 +v -0.21167 0.368698 1.0087 +v -0.210138 0.363833 1.00374 +v -0.208607 0.358892 0.998866 +v -0.207254 0.353188 0.993567 +v -0.205888 0.347405 0.988381 +v -0.204812 0.340804 0.98255 +v -0.203709 0.334247 0.976714 +v -0.201736 0.325775 0.969103 +v -0.199774 0.317388 0.961419 +v -0.197074 0.306594 0.954843 +v -0.194417 0.295666 0.948689 +v -0.190475 0.281295 0.941782 +v -0.186577 0.266993 0.93454 +v -0.183787 0.261854 0.930695 +v -0.181006 0.25673 0.926853 +v -0.184546 0.252702 0.922231 +v -0.184855 0.248276 0.921522 +v -0.185193 0.244878 0.921077 +v -0.185677 0.241605 0.920739 +v -0.18036 0.237614 0.932348 +v -0.183031 0.236453 0.934009 +v -0.181518 0.233383 0.937416 +v -0.179995 0.230306 0.940786 +v -0.174342 0.223884 0.945606 +v -0.168679 0.217415 0.950422 +v -0.164424 0.20927 0.95468 +v -0.160167 0.201109 0.958956 +v -0.157666 0.192873 0.964225 +v -0.155193 0.184681 0.969556 +v -0.154233 0.177489 0.976505 +v -0.153318 0.17034 0.983509 +v -0.153925 0.163245 0.990517 +v -0.154545 0.156131 0.997489 +v -0.155454 0.148816 1.00403 +v -0.156314 0.141467 1.01054 +v -0.157078 0.133837 1.01648 +v -0.157786 0.126164 1.02238 +v -0.158314 0.118205 1.02783 +v -0.158772 0.110204 1.03322 +v -0.15911 0.101887 1.03827 +v -0.159383 0.0935371 1.04327 +v -0.159505 0.0849812 1.04793 +v -0.159544 0.0763939 1.05251 +v -0.159354 0.0675352 1.0569 +v -0.159065 0.0586319 1.06122 +v -0.158543 0.0494735 1.06516 +v -0.15783 0.0402754 1.06897 +v -0.156723 0.0308833 1.07236 +v -0.155397 0.0214699 1.07559 +v -0.154061 0.0118809 1.07851 +v -0.152875 0.00229647 1.0815 +v -0.153038 -0.00704853 1.08411 +v -0.153744 -0.0162715 1.08698 +v -0.156148 -0.0254706 1.09036 +v -0.159044 -0.0344842 1.09385 +v -0.162752 -0.0437576 1.09707 +v -0.166437 -0.0530679 1.10025 +v -0.169525 -0.0630084 1.10239 +v -0.172382 -0.0730656 1.10456 +v -0.173719 -0.0868479 1.10346 +v -0.174695 -0.100717 1.10246 +v -0.171459 -0.113838 1.09952 +v -0.167913 -0.126908 1.09655 +v -0.161246 -0.136386 1.09191 +v -0.154586 -0.145814 1.08721 +v -0.148992 -0.152401 1.08341 +v -0.14342 -0.159022 1.07962 +v -0.140487 -0.168549 1.07809 +v -0.138136 -0.178423 1.0772 +v -0.141505 -0.189443 1.08074 +v -0.144896 -0.20042 1.08434 +v -0.147485 -0.211177 1.08798 +v -0.150036 -0.221987 1.09157 +v -0.151972 -0.233479 1.09527 +v -0.153877 -0.244973 1.09882 +v -0.154738 -0.257825 1.10282 +v -0.155478 -0.270754 1.10662 +v -0.155431 -0.284814 1.11045 +v -0.155283 -0.298876 1.11411 +v -0.154809 -0.314257 1.11741 +v -0.153857 -0.329883 1.12021 +v -0.155146 -0.338173 1.11813 +v -0.15587 -0.346065 1.11504 +v -0.159596 -0.352533 1.10935 +v -0.163042 -0.35885 1.10315 +v -0.166156 -0.365241 1.09585 +v -0.16912 -0.371367 1.08827 +v -0.17146 -0.376919 1.08001 +v -0.173729 -0.382243 1.0716 +v -0.175106 -0.387835 1.06234 +v -0.176466 -0.393357 1.05301 +v -0.182103 -0.397055 1.0432 +v -0.187668 -0.40063 1.03322 +v -0.194859 -0.401034 1.02388 +v -0.202218 -0.401499 1.0149 +v -0.211494 -0.402784 1.00696 +v -0.222299 -0.405475 1.00031 +v -0.223825 -0.406925 1.00166 +v -0.222431 -0.408584 1.00396 +v -0.220993 -0.410289 1.00622 +v -0.21942 -0.413676 1.00982 +v -0.21784 -0.417122 1.01346 +v -0.216633 -0.421095 1.01729 +v -0.215458 -0.424963 1.02103 +v -0.215201 -0.429191 1.02419 +v -0.214788 -0.433406 1.02733 +v -0.217216 -0.440705 1.02804 +v -0.219323 -0.448052 1.02864 +v -0.221466 -0.458552 1.02751 +v -0.223241 -0.468983 1.02606 +v -0.224559 -0.479386 1.02421 +v -0.225811 -0.489879 1.02231 +v -0.230029 -0.505155 1.01724 +v -0.234102 -0.520426 1.01198 +v -0.237577 -0.534481 1.00682 +v -0.240799 -0.548494 1.00121 +v -0.243708 -0.561408 0.995694 +v -0.246633 -0.574717 0.990054 +v -0.249479 -0.58742 0.984526 +v -0.252457 -0.60036 0.979139 +v -0.210494 0.406699 1.05347 +v -0.206022 0.397453 1.04559 +v -0.201514 0.38848 1.03744 +v -0.20162 0.385602 1.03377 +v -0.201728 0.382753 1.03009 +v -0.201171 0.379498 1.02606 +v -0.200607 0.376218 1.02202 +v -0.199518 0.372399 1.01742 +v -0.198451 0.368603 1.01284 +v -0.197399 0.364073 1.00767 +v -0.196358 0.359472 1.00258 +v -0.195346 0.353988 0.99696 +v -0.194323 0.348451 0.991474 +v -0.193522 0.341806 0.985424 +v -0.192716 0.335123 0.979426 +v -0.191163 0.326132 0.97136 +v -0.189596 0.317203 0.963234 +v -0.187458 0.306427 0.955684 +v -0.185373 0.295426 0.948569 +v -0.183249 0.279635 0.941185 +v -0.181149 0.263899 0.933391 +v -0.179707 0.260424 0.931699 +v -0.178275 0.25693 0.929985 +v -0.175908 0.251797 0.928344 +v -0.180859 0.248592 0.918302 +v -0.182521 0.24619 0.91925 +v -0.184144 0.243965 0.920048 +v -0.177011 0.239627 0.932119 +v -0.178645 0.23801 0.933507 +v -0.173559 0.233205 0.938661 +v -0.168449 0.228417 0.943819 +v -0.16284 0.222392 0.949017 +v -0.157269 0.216284 0.954202 +v -0.153278 0.208498 0.958997 +v -0.149315 0.200737 0.963852 +v -0.147571 0.193558 0.970342 +v -0.145869 0.186403 0.976853 +v -0.145405 0.179758 0.984123 +v -0.144965 0.173152 0.991461 +v -0.145616 0.166116 0.998595 +v -0.146254 0.159072 1.00572 +v -0.147167 0.151742 1.01227 +v -0.148032 0.144375 1.0188 +v -0.148772 0.136765 1.02487 +v -0.149449 0.129115 1.0309 +v -0.150072 0.121101 1.03644 +v -0.150626 0.11305 1.04194 +v -0.150969 0.104673 1.04716 +v -0.151232 0.0962598 1.05231 +v -0.151397 0.0876402 1.05709 +v -0.151466 0.0789826 1.06181 +v -0.151294 0.0700007 1.06637 +v -0.150988 0.0609773 1.07084 +v -0.150526 0.0516923 1.07488 +v -0.149895 0.0423594 1.0788 +v -0.148979 0.0327787 1.08235 +v -0.147896 0.0231818 1.08577 +v -0.147104 0.0133335 1.08892 +v -0.146559 0.00351743 1.09221 +v -0.147435 -0.00595224 1.09527 +v -0.148966 -0.0152298 1.09861 +v -0.152486 -0.0245937 1.10251 +v -0.156369 -0.0337509 1.10651 +v -0.160921 -0.043415 1.11003 +v -0.165365 -0.0531531 1.11351 +v -0.168849 -0.0638988 1.1159 +v -0.172069 -0.0747105 1.11828 +v -0.173263 -0.0891603 1.11647 +v -0.174082 -0.103676 1.11469 +v -0.169652 -0.117329 1.11089 +v -0.165097 -0.130978 1.10714 +v -0.157222 -0.139606 1.10125 +v -0.149426 -0.148279 1.09538 +v -0.143489 -0.154315 1.09057 +v -0.137586 -0.160404 1.08579 +v -0.134043 -0.169703 1.08407 +v -0.130979 -0.179315 1.08282 +v -0.133637 -0.190373 1.08657 +v -0.136347 -0.201359 1.09043 +v -0.138596 -0.212078 1.09423 +v -0.140777 -0.222873 1.09793 +v -0.142526 -0.234317 1.10166 +v -0.144217 -0.245785 1.10523 +v -0.144887 -0.258736 1.10933 +v -0.145433 -0.271706 1.11323 +v -0.145282 -0.285879 1.11728 +v -0.145086 -0.300068 1.12122 +v -0.144084 -0.317459 1.12534 +v -0.14154 -0.335078 1.12727 +v -0.144925 -0.340563 1.12358 +v -0.148132 -0.345929 1.11956 +v -0.151554 -0.352151 1.11404 +v -0.154737 -0.35825 1.1082 +v -0.157411 -0.364668 1.10106 +v -0.159935 -0.370837 1.09363 +v -0.161638 -0.376505 1.08541 +v -0.163266 -0.381956 1.07702 +v -0.162842 -0.388475 1.0676 +v -0.162455 -0.39546 1.05827 +v -0.170146 -0.397059 1.04813 +v -0.177772 -0.398486 1.03788 +v -0.185082 -0.397986 1.02774 +v -0.192496 -0.397694 1.01785 +v -0.217764 -0.404388 1.002 +v -0.215286 -0.404918 1.00525 +v -0.212854 -0.405585 1.00871 +v -0.210792 -0.409106 1.0135 +v -0.208667 -0.412903 1.01832 +v -0.207479 -0.417533 1.02275 +v -0.206263 -0.422264 1.02718 +v -0.206116 -0.426925 1.03074 +v -0.205992 -0.431597 1.03437 +v -0.206824 -0.436537 1.03651 +v -0.207558 -0.441509 1.03854 +v -0.209879 -0.45216 1.0383 +v -0.211596 -0.462702 1.03757 +v -0.21207 -0.472931 1.03612 +v -0.21243 -0.483141 1.03463 +v -0.217178 -0.500863 1.02815 +v -0.221893 -0.518716 1.0215 +v -0.225469 -0.533669 1.01589 +v -0.228832 -0.548615 1.00993 +v -0.231793 -0.562126 1.00431 +v -0.234738 -0.575859 0.998639 +v -0.237654 -0.588991 0.993218 +v -0.240681 -0.602258 0.987923 +v -0.195576 0.405312 1.05735 +v -0.190508 0.39589 1.0498 +v -0.185391 0.386667 1.04208 +v -0.185928 0.383831 1.03848 +v -0.186443 0.381068 1.03481 +v -0.186219 0.378123 1.03115 +v -0.185994 0.375207 1.02746 +v -0.18557 0.371498 1.02266 +v -0.18516 0.36779 1.01786 +v -0.184844 0.363894 1.01285 +v -0.18453 0.359945 1.00788 +v -0.183648 0.354674 1.002 +v -0.18276 0.349324 0.996217 +v -0.182081 0.3428 0.989809 +v -0.181401 0.336174 0.983478 +v -0.180418 0.327598 0.9757 +v -0.179417 0.319047 0.967916 +v -0.178316 0.308262 0.959508 +v -0.177293 0.297344 0.951284 +v -0.176337 0.283744 0.945025 +v -0.175438 0.270115 0.938798 +v -0.176107 0.263936 0.93563 +v -0.176764 0.257779 0.932555 +v -0.173989 0.253562 0.93212 +v -0.171231 0.249322 0.931693 +v -0.171591 0.246422 0.931703 +v -0.17197 0.243529 0.931745 +v -0.170386 0.241104 0.934939 +v -0.168827 0.238668 0.938123 +v -0.163354 0.233866 0.94374 +v -0.157852 0.229053 0.949327 +v -0.152423 0.223027 0.954957 +v -0.146994 0.216992 0.960586 +v -0.143687 0.209856 0.966365 +v -0.140419 0.202749 0.972175 +v -0.139227 0.195748 0.978877 +v -0.138087 0.18878 0.985617 +v -0.137512 0.182475 0.993074 +v -0.136959 0.176192 1.00056 +v -0.137621 0.169172 1.0076 +v -0.13828 0.162156 1.01464 +v -0.139149 0.154881 1.02125 +v -0.139986 0.147575 1.02784 +v -0.140794 0.139995 1.03397 +v -0.141531 0.13236 1.04005 +v -0.142154 0.124392 1.04574 +v -0.142686 0.116375 1.05136 +v -0.143057 0.107956 1.05668 +v -0.143323 0.0994932 1.06194 +v -0.143452 0.090764 1.06689 +v -0.143468 0.0819922 1.07176 +v -0.143359 0.072902 1.07639 +v -0.143146 0.0637863 1.08095 +v -0.142808 0.0543849 1.08514 +v -0.142295 0.0449431 1.08921 +v -0.14168 0.0351748 1.09296 +v -0.140988 0.0254022 1.09666 +v -0.140609 0.0153002 1.10014 +v -0.140572 0.00525183 1.10379 +v -0.142117 -0.00448963 1.10731 +v -0.144415 -0.014004 1.11107 +v -0.148694 -0.0235867 1.11546 +v -0.153225 -0.0330201 1.11989 +v -0.158117 -0.0430301 1.12375 +v -0.162852 -0.0531394 1.12758 +v -0.166305 -0.0647436 1.13027 +v -0.16944 -0.0764388 1.13291 +v -0.17009 -0.0916944 1.13054 +v -0.170356 -0.106935 1.12806 +v -0.16447 -0.119618 1.12236 +v -0.158626 -0.132334 1.11677 +v -0.15064 -0.140407 1.11008 +v -0.142743 -0.148561 1.1034 +v -0.137812 -0.154202 1.09832 +v -0.132928 -0.159872 1.09323 +v -0.128356 -0.16968 1.09108 +v -0.124085 -0.17976 1.08927 +v -0.126038 -0.190927 1.09298 +v -0.128054 -0.20203 1.09681 +v -0.130088 -0.212697 1.10045 +v -0.132092 -0.223383 1.10405 +v -0.133572 -0.234741 1.10771 +v -0.135013 -0.246076 1.11127 +v -0.135561 -0.258841 1.11526 +v -0.136014 -0.27165 1.11907 +v -0.135767 -0.285378 1.12314 +v -0.13557 -0.299099 1.12723 +v -0.134631 -0.314924 1.13122 +v -0.132372 -0.330997 1.1332 +v -0.136189 -0.337809 1.12924 +v -0.139665 -0.344395 1.12467 +v -0.14291 -0.351147 1.11914 +v -0.145872 -0.357625 1.11314 +v -0.148287 -0.364021 1.10619 +v -0.150566 -0.370087 1.09892 +v -0.151877 -0.37565 1.09092 +v -0.153098 -0.381026 1.08273 +v -0.152627 -0.387071 1.07401 +v -0.152013 -0.393121 1.06522 +v -0.157125 -0.394947 1.05588 +v -0.162195 -0.396468 1.0465 +v -0.177227 -0.392226 1.03068 +v -0.204714 -0.40336 1.01536 +v -0.201236 -0.405852 1.0196 +v -0.198103 -0.407943 1.02348 +v -0.197384 -0.413893 1.02918 +v -0.19657 -0.419796 1.03493 +v -0.196682 -0.425217 1.03894 +v -0.196706 -0.430624 1.04298 +v -0.197344 -0.435954 1.04544 +v -0.197954 -0.441268 1.04782 +v -0.19931 -0.448976 1.04852 +v -0.200356 -0.456804 1.04893 +v -0.202148 -0.471145 1.04582 +v -0.203447 -0.4852 1.04236 +v -0.207095 -0.502113 1.03619 +v -0.210724 -0.519313 1.02988 +v -0.21385 -0.534544 1.02415 +v -0.216829 -0.549662 1.01829 +v -0.219628 -0.563526 1.01275 +v -0.222422 -0.577394 1.0072 +v -0.22529 -0.590877 1.00188 +v -0.228386 -0.604445 0.996886 +v -0.180641 0.403987 1.06118 +v -0.175013 0.394298 1.05404 +v -0.169302 0.384819 1.04672 +v -0.170269 0.382023 1.0432 +v -0.17122 0.379267 1.03965 +v -0.171322 0.376617 1.03634 +v -0.171435 0.374008 1.03303 +v -0.171692 0.370366 1.02802 +v -0.171953 0.366759 1.02301 +v -0.17234 0.363474 1.01823 +v -0.172758 0.360168 1.01343 +v -0.172011 0.355087 1.00727 +v -0.171256 0.349958 1.0012 +v -0.170671 0.343568 0.994407 +v -0.170113 0.337072 0.987725 +v -0.169701 0.328949 0.980128 +v -0.169253 0.320839 0.972587 +v -0.169188 0.310026 0.963342 +v -0.169194 0.299145 0.954181 +v -0.169443 0.287853 0.948891 +v -0.169679 0.276402 0.943933 +v -0.172481 0.26749 0.939623 +v -0.175247 0.258634 0.935102 +v -0.17206 0.255315 0.935871 +v -0.168872 0.252 0.936635 +v -0.168724 0.248897 0.934723 +v -0.168576 0.2458 0.932788 +v -0.163797 0.242597 0.937815 +v -0.159013 0.239405 0.942835 +v -0.153167 0.234561 0.94886 +v -0.147294 0.229639 0.954872 +v -0.142018 0.223675 0.960948 +v -0.136767 0.217743 0.96704 +v -0.134142 0.211249 0.973754 +v -0.131564 0.204797 0.980534 +v -0.130926 0.197985 0.987465 +v -0.130339 0.191202 0.994432 +v -0.129646 0.18522 1.00204 +v -0.128981 0.179277 1.00971 +v -0.12968 0.172292 1.01667 +v -0.130383 0.165316 1.02364 +v -0.131214 0.158083 1.0303 +v -0.132027 0.15085 1.03696 +v -0.132921 0.1433 1.04317 +v -0.133729 0.135681 1.04933 +v -0.134363 0.127762 1.05517 +v -0.13489 0.119782 1.06092 +v -0.135286 0.111318 1.06635 +v -0.135553 0.102802 1.07172 +v -0.13565 0.0939608 1.07681 +v -0.13565 0.0850787 1.08184 +v -0.135609 0.0758835 1.08655 +v -0.13547 0.0666619 1.09119 +v -0.135313 0.0571416 1.09556 +v -0.135023 0.0475828 1.09984 +v -0.134749 0.0376263 1.10382 +v -0.134428 0.0276571 1.10777 +v -0.134494 0.0173037 1.11156 +v -0.134894 0.00702955 1.11551 +v -0.137091 -0.00298526 1.11945 +v -0.139994 -0.0127426 1.12359 +v -0.144888 -0.022602 1.12841 +v -0.149986 -0.0323781 1.13326 +v -0.155155 -0.0427625 1.13746 +v -0.160139 -0.0532342 1.14162 +v -0.16352 -0.0656773 1.14459 +v -0.166594 -0.0782108 1.14748 +v -0.166728 -0.094224 1.1445 +v -0.166581 -0.110195 1.14142 +v -0.159363 -0.121917 1.13387 +v -0.152192 -0.133651 1.12639 +v -0.144084 -0.141193 1.1189 +v -0.136092 -0.148795 1.11143 +v -0.132168 -0.154053 1.10607 +v -0.128264 -0.159305 1.10066 +v -0.122622 -0.169639 1.09805 +v -0.117234 -0.180166 1.09575 +v -0.118447 -0.191435 1.09939 +v -0.119778 -0.202643 1.1032 +v -0.121598 -0.21326 1.10669 +v -0.1234 -0.223853 1.11012 +v -0.124616 -0.23508 1.11375 +v -0.125783 -0.246357 1.11727 +v -0.126222 -0.258962 1.12115 +v -0.126562 -0.271606 1.12485 +v -0.126246 -0.284904 1.12895 +v -0.12597 -0.298174 1.13311 +v -0.125072 -0.312416 1.1369 +v -0.123046 -0.32686 1.13876 +v -0.127325 -0.334973 1.13469 +v -0.131178 -0.342834 1.12967 +v -0.134237 -0.350039 1.12409 +v -0.136981 -0.356956 1.11804 +v -0.139134 -0.363316 1.1112 +v -0.141195 -0.369367 1.10409 +v -0.14209 -0.374789 1.09636 +v -0.142912 -0.380023 1.08847 +v -0.14231 -0.38546 1.08042 +v -0.141567 -0.390851 1.07226 +v -0.144014 -0.392369 1.06363 +v -0.146515 -0.394118 1.05511 +v -0.196237 -0.401408 1.02238 +v -0.192143 -0.402385 1.02575 +v -0.187726 -0.403138 1.0289 +v -0.187352 -0.410027 1.03585 +v -0.187005 -0.417112 1.04283 +v -0.187248 -0.423395 1.04722 +v -0.187468 -0.429661 1.05161 +v -0.187966 -0.435311 1.05437 +v -0.188369 -0.440961 1.05704 +v -0.188638 -0.445881 1.05862 +v -0.188796 -0.450813 1.06011 +v -0.192265 -0.469201 1.05563 +v -0.194491 -0.487203 1.05015 +v -0.196939 -0.503391 1.04418 +v -0.1995 -0.519891 1.03821 +v -0.202083 -0.535322 1.03225 +v -0.204673 -0.550669 1.02641 +v -0.207255 -0.564862 1.02089 +v -0.209884 -0.579043 1.01545 +v -0.212774 -0.592822 1.01029 +v -0.215977 -0.606696 1.00565 +v -0.167915 0.403085 1.0647 +v -0.164222 0.395565 1.05946 +v -0.160473 0.388203 1.054 +v -0.160709 0.384629 1.05039 +v -0.160918 0.38112 1.04671 +v -0.160923 0.377836 1.0431 +v -0.160906 0.374604 1.03945 +v -0.16108 0.370942 1.03477 +v -0.161248 0.367344 1.03006 +v -0.16168 0.364198 1.02558 +v -0.162089 0.36105 1.0211 +v -0.161164 0.355778 1.01467 +v -0.160274 0.35044 1.00824 +v -0.159653 0.344541 1.00156 +v -0.159053 0.338576 0.99492 +v -0.158765 0.330894 0.98716 +v -0.158449 0.323123 0.979521 +v -0.15859 0.313025 0.970629 +v -0.15875 0.30292 0.961796 +v -0.160234 0.291299 0.954425 +v -0.161823 0.279517 0.947404 +v -0.161668 0.272292 0.945171 +v -0.161532 0.265078 0.94294 +v -0.158911 0.261054 0.943601 +v -0.156307 0.257021 0.9443 +v -0.155804 0.253087 0.943784 +v -0.155285 0.249162 0.943268 +v -0.153082 0.245271 0.945758 +v -0.150876 0.241348 0.94825 +v -0.143455 0.236998 0.955844 +v -0.136053 0.232657 0.963462 +v -0.131344 0.22689 0.970532 +v -0.126687 0.221133 0.977653 +v -0.124541 0.21486 0.984591 +v -0.122465 0.208638 0.991585 +v -0.121911 0.202006 0.998559 +v -0.121412 0.195397 1.00555 +v -0.121335 0.189027 1.01263 +v -0.121305 0.182707 1.01977 +v -0.121984 0.175913 1.02655 +v -0.122664 0.169117 1.03333 +v -0.123552 0.16198 1.03993 +v -0.124413 0.154835 1.04653 +v -0.125319 0.147309 1.05277 +v -0.126131 0.13972 1.05896 +v -0.126775 0.131759 1.06486 +v -0.127306 0.123741 1.07067 +v -0.127695 0.11524 1.07619 +v -0.127932 0.10668 1.08163 +v -0.128011 0.0977343 1.08683 +v -0.127994 0.0887451 1.09196 +v -0.128007 0.079451 1.09687 +v -0.127964 0.0701191 1.10172 +v -0.12803 0.0604677 1.10638 +v -0.128016 0.0507889 1.111 +v -0.128128 0.0407181 1.11528 +v -0.128193 0.0306466 1.11953 +v -0.128684 0.0202012 1.12358 +v -0.129529 0.00982488 1.12775 +v -0.132084 -0.000798486 1.13215 +v -0.135381 -0.0111547 1.13673 +v -0.140463 -0.0213847 1.14176 +v -0.145628 -0.0315515 1.14679 +v -0.150635 -0.0426121 1.15143 +v -0.155428 -0.0538109 1.15602 +v -0.158049 -0.0670945 1.15931 +v -0.160384 -0.0804589 1.16246 +v -0.15892 -0.095801 1.15809 +v -0.157305 -0.111107 1.15366 +v -0.149392 -0.120587 1.14437 +v -0.141508 -0.130075 1.13518 +v -0.135461 -0.13592 1.12727 +v -0.129405 -0.141882 1.11943 +v -0.125882 -0.14895 1.1136 +v -0.122387 -0.155951 1.10773 +v -0.115907 -0.168132 1.10577 +v -0.109585 -0.18045 1.10413 +v -0.110505 -0.191711 1.10704 +v -0.111564 -0.20295 1.11009 +v -0.113184 -0.213528 1.11325 +v -0.114795 -0.224082 1.11637 +v -0.115845 -0.235243 1.11977 +v -0.116855 -0.246411 1.12308 +v -0.11722 -0.258722 1.12662 +v -0.117532 -0.271051 1.13005 +v -0.117308 -0.283786 1.13388 +v -0.117168 -0.296461 1.13786 +v -0.116341 -0.310007 1.1416 +v -0.114442 -0.323756 1.14345 +v -0.118683 -0.332535 1.13954 +v -0.122408 -0.341001 1.13452 +v -0.125431 -0.348699 1.12901 +v -0.128037 -0.355997 1.12287 +v -0.130002 -0.362556 1.1161 +v -0.13181 -0.368717 1.10906 +v -0.132737 -0.373926 1.1015 +v -0.133612 -0.378918 1.09387 +v -0.133368 -0.384163 1.08588 +v -0.132994 -0.389462 1.07782 +v -0.135671 -0.390767 1.06937 +v -0.138375 -0.392155 1.06097 +v -0.182973 -0.399212 1.02712 +v -0.178899 -0.400083 1.03345 +v -0.174945 -0.400925 1.0396 +v -0.175718 -0.408259 1.04638 +v -0.176324 -0.415586 1.05304 +v -0.176919 -0.422481 1.05732 +v -0.177493 -0.429377 1.06159 +v -0.178206 -0.435945 1.06424 +v -0.178771 -0.442481 1.06685 +v -0.179192 -0.448704 1.0683 +v -0.179499 -0.454974 1.0696 +v -0.18255 -0.472829 1.06451 +v -0.184449 -0.490268 1.05836 +v -0.186176 -0.506043 1.05219 +v -0.187974 -0.521893 1.04605 +v -0.19015 -0.537095 1.04 +v -0.192392 -0.552298 1.03407 +v -0.194684 -0.566594 1.02859 +v -0.197173 -0.580975 1.02345 +v -0.200011 -0.595003 1.01849 +v -0.203232 -0.609104 1.01422 +v -0.155201 0.402223 1.06818 +v -0.153404 0.396966 1.06469 +v -0.151592 0.391749 1.06116 +v -0.151106 0.38738 1.05745 +v -0.150601 0.383068 1.0537 +v -0.150542 0.379117 1.04982 +v -0.150462 0.37525 1.04587 +v -0.150531 0.371522 1.04154 +v -0.15059 0.367853 1.03719 +v -0.151031 0.364812 1.03303 +v -0.151489 0.361795 1.02886 +v -0.150457 0.356223 1.02218 +v -0.149408 0.35065 1.0155 +v -0.148733 0.345231 1.00896 +v -0.148097 0.339751 1.00244 +v -0.147883 0.332525 0.994507 +v -0.147691 0.325202 0.986679 +v -0.148025 0.315927 0.978047 +v -0.148361 0.306583 0.969502 +v -0.151094 0.294673 0.960138 +v -0.153952 0.282572 0.950981 +v -0.150868 0.277026 0.950739 +v -0.147802 0.271475 0.950536 +v -0.145749 0.266756 0.951162 +v -0.143706 0.262027 0.951818 +v -0.142801 0.257282 0.952695 +v -0.141901 0.25252 0.953584 +v -0.142305 0.247861 0.95361 +v -0.142704 0.243183 0.953631 +v -0.133759 0.239437 0.962837 +v -0.124888 0.23571 0.972108 +v -0.120771 0.23012 0.980211 +v -0.11673 0.224575 0.988391 +v -0.115066 0.218541 0.99553 +v -0.113463 0.212521 1.00268 +v -0.113003 0.206075 1.0097 +v -0.112629 0.199672 1.01676 +v -0.11318 0.192929 1.02329 +v -0.113755 0.186214 1.02987 +v -0.114428 0.179623 1.03648 +v -0.115122 0.173043 1.04312 +v -0.116047 0.165992 1.04966 +v -0.116917 0.158902 1.05615 +v -0.117846 0.151398 1.06243 +v -0.11869 0.143841 1.06866 +v -0.119324 0.135823 1.07459 +v -0.119848 0.127756 1.08046 +v -0.120171 0.119192 1.08607 +v -0.120386 0.110588 1.09161 +v -0.120465 0.101539 1.0969 +v -0.120456 0.0924687 1.10216 +v -0.120554 0.0830654 1.10728 +v -0.120662 0.0736452 1.1124 +v -0.120997 0.0638521 1.11737 +v -0.121335 0.0540558 1.12234 +v -0.12185 0.0438739 1.12692 +v -0.12231 0.033689 1.13146 +v -0.12322 0.0231625 1.13573 +v -0.124428 0.0126977 1.1401 +v -0.127277 0.00144783 1.1449 +v -0.130823 -0.00955022 1.14989 +v -0.135946 -0.0201936 1.15509 +v -0.141151 -0.0307768 1.16029 +v -0.145973 -0.0425683 1.16537 +v -0.150538 -0.0544645 1.17037 +v -0.15242 -0.068574 1.17394 +v -0.154049 -0.082733 1.17739 +v -0.151101 -0.0974044 1.17169 +v -0.148072 -0.112019 1.16596 +v -0.139455 -0.119269 1.15488 +v -0.130932 -0.126471 1.14394 +v -0.126853 -0.130627 1.13561 +v -0.122769 -0.134956 1.12735 +v -0.119605 -0.143762 1.12104 +v -0.116499 -0.152555 1.11472 +v -0.109131 -0.166609 1.11346 +v -0.101942 -0.180711 1.11252 +v -0.10259 -0.191992 1.1147 +v -0.103388 -0.203208 1.117 +v -0.104799 -0.213755 1.11982 +v -0.106211 -0.2243 1.12264 +v -0.107091 -0.235399 1.1258 +v -0.107918 -0.246503 1.12887 +v -0.10819 -0.258498 1.13204 +v -0.108425 -0.27053 1.13512 +v -0.108332 -0.282679 1.13873 +v -0.108349 -0.294729 1.14254 +v -0.107485 -0.30759 1.14613 +v -0.105752 -0.320617 1.14797 +v -0.109941 -0.330026 1.14417 +v -0.113623 -0.339143 1.13924 +v -0.116531 -0.347241 1.13376 +v -0.11911 -0.354964 1.12768 +v -0.120881 -0.361711 1.121 +v -0.122462 -0.368014 1.11394 +v -0.123398 -0.373039 1.10659 +v -0.124286 -0.377875 1.0992 +v -0.124425 -0.382911 1.09139 +v -0.12443 -0.388074 1.08354 +v -0.127305 -0.389218 1.07515 +v -0.130214 -0.390337 1.06683 +v -0.169865 -0.397115 1.0319 +v -0.166137 -0.398095 1.04112 +v -0.162435 -0.398882 1.05026 +v -0.164131 -0.406501 1.05692 +v -0.165586 -0.414053 1.06316 +v -0.166556 -0.421593 1.06738 +v -0.167472 -0.429081 1.07152 +v -0.16836 -0.436464 1.07411 +v -0.16909 -0.443878 1.0766 +v -0.169699 -0.451458 1.0779 +v -0.170118 -0.459127 1.07901 +v -0.172653 -0.476342 1.07317 +v -0.174231 -0.493217 1.06641 +v -0.175162 -0.508449 1.05994 +v -0.176232 -0.523804 1.05355 +v -0.177917 -0.538737 1.04723 +v -0.179847 -0.55375 1.04132 +v -0.181824 -0.568178 1.03581 +v -0.184091 -0.582796 1.03083 +v -0.186921 -0.597039 1.02614 +v -0.190228 -0.611418 1.02234 +v -0.144335 0.407349 1.07471 +v -0.142798 0.401165 1.07114 +v -0.141221 0.39511 1.06738 +v -0.140654 0.390108 1.06374 +v -0.140054 0.385183 1.05999 +v -0.139897 0.380743 1.05618 +v -0.139706 0.376367 1.0523 +v -0.139685 0.372267 1.0482 +v -0.139642 0.368213 1.04406 +v -0.139781 0.364525 1.03989 +v -0.139906 0.360854 1.0357 +v -0.139311 0.355684 1.02987 +v -0.138679 0.350515 1.02406 +v -0.13815 0.345084 1.01784 +v -0.137621 0.339649 1.01161 +v -0.137163 0.332991 1.00436 +v -0.136751 0.326255 0.997136 +v -0.136602 0.31799 0.989409 +v -0.136479 0.309666 0.981732 +v -0.137566 0.299819 0.974307 +v -0.138689 0.289995 0.966983 +v -0.136362 0.283958 0.965939 +v -0.134051 0.277897 0.964907 +v -0.132134 0.272654 0.965267 +v -0.13024 0.267401 0.965658 +v -0.129105 0.26233 0.96656 +v -0.127987 0.25725 0.967482 +v -0.127427 0.252416 0.968713 +v -0.126886 0.247585 0.969952 +v -0.121058 0.243385 0.977394 +v -0.115342 0.239211 0.984909 +v -0.112134 0.233776 0.992419 +v -0.109022 0.228374 0.999976 +v -0.10762 0.222487 1.00688 +v -0.106316 0.216635 1.01384 +v -0.10597 0.210367 1.02062 +v -0.105711 0.204143 1.02742 +v -0.10616 0.197578 1.03388 +v -0.106642 0.191033 1.04037 +v -0.107317 0.1844 1.04686 +v -0.107993 0.177772 1.05334 +v -0.108875 0.170714 1.05979 +v -0.109701 0.163632 1.06622 +v -0.110561 0.156108 1.07247 +v -0.111329 0.148538 1.07866 +v -0.11192 0.140461 1.08463 +v -0.11238 0.132323 1.09053 +v -0.112639 0.123683 1.0962 +v -0.112796 0.11499 1.10179 +v -0.112884 0.105844 1.10723 +v -0.112953 0.0966982 1.11267 +v -0.113187 0.0871964 1.11806 +v -0.113493 0.0777029 1.12347 +v -0.114089 0.0678538 1.12887 +v -0.114669 0.0579939 1.13428 +v -0.115585 0.0476973 1.13924 +v -0.11649 0.0373983 1.14419 +v -0.11783 0.0265247 1.14887 +v -0.119451 0.0157221 1.15365 +v -0.122478 0.00419337 1.15865 +v -0.12608 -0.00715216 1.1638 +v -0.131043 -0.018568 1.16911 +v -0.136058 -0.0299797 1.17442 +v -0.140353 -0.0424498 1.17939 +v -0.144348 -0.0550118 1.18425 +v -0.145287 -0.0689648 1.18709 +v -0.145999 -0.0829406 1.18983 +v -0.141742 -0.0965132 1.18385 +v -0.137458 -0.11003 1.17786 +v -0.129761 -0.113192 1.16479 +v -0.122111 -0.116399 1.15184 +v -0.118181 -0.117212 1.13865 +v -0.114373 -0.118204 1.12548 +v -0.109544 -0.130889 1.1226 +v -0.104801 -0.143641 1.11978 +v -0.0993829 -0.162178 1.12035 +v -0.0940266 -0.180587 1.12104 +v -0.094632 -0.191938 1.12254 +v -0.0953528 -0.203205 1.12417 +v -0.0965839 -0.213719 1.12645 +v -0.0978459 -0.224241 1.12877 +v -0.098571 -0.235231 1.13147 +v -0.0992647 -0.246214 1.13408 +v -0.0996008 -0.257976 1.13679 +v -0.0999342 -0.269699 1.13949 +v -0.0999095 -0.281472 1.14295 +v -0.0999914 -0.293181 1.14661 +v -0.0991925 -0.305687 1.15012 +v -0.0975138 -0.318421 1.15179 +v -0.101583 -0.328276 1.14816 +v -0.10512 -0.337805 1.14326 +v -0.107736 -0.34623 1.13801 +v -0.110006 -0.354211 1.13199 +v -0.111694 -0.361081 1.12554 +v -0.113166 -0.367452 1.11859 +v -0.114121 -0.372382 1.11137 +v -0.115024 -0.377149 1.10413 +v -0.115312 -0.382038 1.09652 +v -0.115541 -0.387156 1.08888 +v -0.117526 -0.38811 1.08057 +v -0.119547 -0.389033 1.0723 +v -0.149762 -0.396349 1.05521 +v -0.150005 -0.397515 1.06393 +v -0.151953 -0.405563 1.0692 +v -0.153744 -0.413512 1.07407 +v -0.155173 -0.421358 1.07812 +v -0.156516 -0.429214 1.08204 +v -0.157609 -0.437519 1.08439 +v -0.158587 -0.445868 1.08654 +v -0.15942 -0.454455 1.08729 +v -0.159932 -0.463128 1.08775 +v -0.161729 -0.479818 1.08178 +v -0.162539 -0.496126 1.07475 +v -0.163139 -0.511128 1.06769 +v -0.163824 -0.526221 1.06079 +v -0.165117 -0.540991 1.05419 +v -0.166753 -0.555904 1.04816 +v -0.168513 -0.57026 1.04255 +v -0.170664 -0.584951 1.0378 +v -0.173372 -0.599265 1.03336 +v -0.176709 -0.613794 1.0302 +v -0.133275 0.412933 1.08054 +v -0.132037 0.405752 1.07706 +v -0.130728 0.398734 1.07327 +v -0.130102 0.393075 1.06972 +v -0.129436 0.387502 1.06606 +v -0.129167 0.382503 1.06238 +v -0.128871 0.377605 1.05861 +v -0.128775 0.373097 1.05482 +v -0.128654 0.368643 1.05093 +v -0.128537 0.364253 1.04677 +v -0.128397 0.359935 1.04255 +v -0.128207 0.355123 1.03763 +v -0.127991 0.350325 1.03268 +v -0.127637 0.344836 1.02681 +v -0.127269 0.339372 1.02092 +v -0.126628 0.333201 1.01436 +v -0.125998 0.327007 1.00782 +v -0.125336 0.319765 1.00102 +v -0.124697 0.312471 0.994255 +v -0.124136 0.304809 0.988798 +v -0.123624 0.297102 0.983376 +v -0.122038 0.290628 0.981453 +v -0.120482 0.284132 0.979553 +v -0.118705 0.278402 0.979624 +v -0.116961 0.272657 0.979725 +v -0.115649 0.267296 0.980666 +v -0.114368 0.261931 0.981624 +v -0.112858 0.256974 0.984067 +v -0.111385 0.252012 0.986534 +v -0.108662 0.247364 0.992171 +v -0.105994 0.242752 0.997844 +v -0.103735 0.237488 1.00474 +v -0.101559 0.232241 1.01169 +v -0.10045 0.226542 1.01839 +v -0.0994491 0.220878 1.02513 +v -0.0991873 0.214781 1.03163 +v -0.0990248 0.208722 1.03816 +v -0.0993446 0.202322 1.04455 +v -0.0997154 0.19595 1.05097 +v -0.100389 0.189284 1.05732 +v -0.101069 0.182621 1.06368 +v -0.101886 0.17555 1.07004 +v -0.102665 0.168462 1.07639 +v -0.103412 0.160896 1.08257 +v -0.104086 0.153301 1.08872 +v -0.104571 0.145129 1.0947 +v -0.104971 0.136915 1.10065 +v -0.105153 0.128176 1.10634 +v -0.105261 0.119409 1.11201 +v -0.105401 0.110188 1.11762 +v -0.105529 0.100964 1.12324 +v -0.105952 0.0913691 1.1289 +v -0.106431 0.0817969 1.1346 +v -0.107235 0.0718675 1.14041 +v -0.108107 0.0619515 1.14626 +v -0.109433 0.0515321 1.1516 +v -0.110788 0.0411278 1.15696 +v -0.112665 0.0299405 1.1621 +v -0.114689 0.018789 1.16728 +v -0.117835 0.00697416 1.17246 +v -0.121403 -0.00471371 1.17773 +v -0.126202 -0.0169332 1.18316 +v -0.130985 -0.0291529 1.18858 +v -0.134729 -0.0423237 1.19341 +v -0.138101 -0.0555882 1.1981 +v -0.138107 -0.0693774 1.20023 +v -0.137921 -0.08314 1.20227 +v -0.132396 -0.0956124 1.19605 +v -0.126836 -0.108044 1.18978 +v -0.120022 -0.107201 1.17465 +v -0.113351 -0.106134 1.15977 +v -0.109601 -0.103774 1.14163 +v -0.10599 -0.101394 1.12363 +v -0.0995161 -0.118006 1.12411 +v -0.0931065 -0.134723 1.12471 +v -0.0896577 -0.157655 1.12716 +v -0.0861093 -0.180449 1.12954 +v -0.0866622 -0.191875 1.13036 +v -0.0873206 -0.203238 1.13132 +v -0.0883723 -0.213706 1.13308 +v -0.0894494 -0.224191 1.13487 +v -0.0900132 -0.235081 1.13707 +v -0.0905587 -0.245948 1.13921 +v -0.0909657 -0.257434 1.14145 +v -0.091392 -0.268896 1.14374 +v -0.0914107 -0.280301 1.14702 +v -0.0915595 -0.291666 1.15055 +v -0.0907882 -0.303816 1.15391 +v -0.089207 -0.316188 1.15547 +v -0.0931338 -0.326484 1.15198 +v -0.0965286 -0.336381 1.14717 +v -0.0988599 -0.345139 1.14213 +v -0.100887 -0.35348 1.1363 +v -0.102474 -0.360417 1.13 +v -0.103844 -0.3669 1.1232 +v -0.10481 -0.371736 1.11615 +v -0.105699 -0.376407 1.10908 +v -0.106194 -0.381226 1.10151 +v -0.106626 -0.386228 1.09392 +v -0.107737 -0.386978 1.08588 +v -0.108867 -0.387709 1.07778 +v -0.137755 -0.396081 1.07748 +v -0.139739 -0.40475 1.08121 +v -0.141814 -0.413002 1.08484 +v -0.143772 -0.421184 1.08863 +v -0.145604 -0.429399 1.09225 +v -0.146892 -0.438598 1.09438 +v -0.148009 -0.447867 1.09624 +v -0.148996 -0.457462 1.09646 +v -0.149625 -0.4671 1.09631 +v -0.150554 -0.483204 1.09009 +v -0.150538 -0.498749 1.08258 +v -0.150735 -0.513497 1.0749 +v -0.151067 -0.528424 1.06743 +v -0.151936 -0.542854 1.06033 +v -0.153262 -0.557744 1.0542 +v -0.154764 -0.572166 1.04842 +v -0.156814 -0.586954 1.04393 +v -0.159487 -0.601398 1.03991 +v -0.162888 -0.616023 1.03746 +v -0.121247 0.416546 1.08479 +v -0.120197 0.409055 1.08164 +v -0.119086 0.401692 1.0782 +v -0.118567 0.395581 1.07492 +v -0.117988 0.389587 1.07146 +v -0.11776 0.384163 1.06801 +v -0.117499 0.378823 1.06445 +v -0.117458 0.374068 1.06095 +v -0.117392 0.369355 1.05739 +v -0.117325 0.364781 1.05371 +v -0.117245 0.360253 1.04998 +v -0.117084 0.355628 1.04599 +v -0.11691 0.35102 1.04199 +v -0.116888 0.345242 1.0366 +v -0.116869 0.339473 1.03123 +v -0.116519 0.333408 1.02562 +v -0.116164 0.327356 1.02001 +v -0.115609 0.320776 1.01442 +v -0.115058 0.314162 1.00886 +v -0.114334 0.307346 1.00443 +v -0.113642 0.300496 1.00002 +v -0.112288 0.294362 0.998163 +v -0.110971 0.288212 0.996338 +v -0.109419 0.282703 0.996268 +v -0.107904 0.277186 0.996223 +v -0.106581 0.271981 0.99723 +v -0.105301 0.266777 0.998253 +v -0.103856 0.261872 1.00072 +v -0.102458 0.256991 1.00321 +v -0.100401 0.252279 1.008 +v -0.0984399 0.247576 1.01284 +v -0.0967086 0.24239 1.01884 +v -0.0951215 0.237227 1.0249 +v -0.0941773 0.231646 1.03106 +v -0.0933048 0.226081 1.03724 +v -0.0930188 0.220106 1.04339 +v -0.0928051 0.21415 1.04956 +v -0.0930354 0.207823 1.05571 +v -0.0933259 0.201534 1.06189 +v -0.0938774 0.194886 1.06812 +v -0.0944398 0.188244 1.07436 +v -0.095105 0.181127 1.08061 +v -0.0957173 0.173979 1.08682 +v -0.0963057 0.166327 1.09294 +v -0.0968203 0.158645 1.09903 +v -0.0971839 0.150385 1.10501 +v -0.0974725 0.142088 1.11094 +v -0.0976147 0.133271 1.11672 +v -0.0977004 0.124431 1.12248 +v -0.0978823 0.115119 1.12823 +v -0.0980793 0.105813 1.13399 +v -0.0985785 0.0961262 1.1399 +v -0.0991599 0.086466 1.14585 +v -0.100151 0.0764375 1.15207 +v -0.101206 0.0664165 1.15832 +v -0.102956 0.055892 1.16408 +v -0.10473 0.0453785 1.16985 +v -0.107026 0.0340558 1.1755 +v -0.109382 0.0227349 1.18114 +v -0.112641 0.0104962 1.18682 +v -0.116178 -0.00167019 1.19254 +v -0.120734 -0.0146097 1.19804 +v -0.125227 -0.0275666 1.20352 +v -0.128381 -0.0413713 1.20788 +v -0.13108 -0.0552914 1.21205 +v -0.129752 -0.0695821 1.21357 +v -0.128215 -0.0838635 1.21493 +v -0.123037 -0.0947346 1.20999 +v -0.117859 -0.105587 1.20502 +v -0.111468 -0.104104 1.19408 +v -0.105052 -0.102556 1.18313 +v -0.0994351 -0.0806871 1.14915 +v -0.0939555 -0.0592415 1.11479 +v -0.0894066 -0.0952885 1.12123 +v -0.0849121 -0.131272 1.12778 +v -0.0820431 -0.155837 1.13213 +v -0.0789792 -0.180255 1.13639 +v -0.0793176 -0.19173 1.13683 +v -0.0797576 -0.203172 1.13736 +v -0.0806089 -0.213562 1.13857 +v -0.0814905 -0.223948 1.13983 +v -0.0819934 -0.234828 1.14154 +v -0.0825119 -0.245664 1.1432 +v -0.0828158 -0.257058 1.14514 +v -0.0831597 -0.268426 1.14716 +v -0.0832842 -0.279777 1.15034 +v -0.0835405 -0.291049 1.15384 +v -0.0830854 -0.303005 1.15704 +v -0.0818716 -0.315197 1.15831 +v -0.0851367 -0.325732 1.15515 +v -0.0878378 -0.335777 1.15059 +v -0.0899773 -0.344656 1.14565 +v -0.0918135 -0.353131 1.13989 +v -0.0932556 -0.360135 1.13389 +v -0.0944499 -0.366645 1.12724 +v -0.09533 -0.37157 1.12035 +v -0.0961503 -0.376261 1.11331 +v -0.0965309 -0.380943 1.10592 +v -0.0968824 -0.38593 1.0986 +v -0.0981764 -0.386469 1.0902 +v -0.0995106 -0.386942 1.08171 +v -0.12435 -0.395099 1.08587 +v -0.12682 -0.404017 1.08986 +v -0.129328 -0.412663 1.09385 +v -0.131473 -0.421309 1.09802 +v -0.133536 -0.430043 1.10203 +v -0.135044 -0.439987 1.10408 +v -0.136288 -0.450018 1.1058 +v -0.137187 -0.4604 1.10558 +v -0.137657 -0.470851 1.10466 +v -0.138168 -0.486629 1.09779 +v -0.137749 -0.501765 1.08965 +v -0.13764 -0.516091 1.0815 +v -0.137668 -0.530682 1.07358 +v -0.138187 -0.544936 1.06609 +v -0.139244 -0.559771 1.05997 +v -0.140585 -0.574159 1.05406 +v -0.142531 -0.588996 1.04979 +v -0.144995 -0.603323 1.04625 +v -0.14831 -0.617848 1.0448 +v -0.109119 0.420427 1.08851 +v -0.108264 0.412579 1.08579 +v -0.107353 0.404879 1.08272 +v -0.106908 0.398325 1.07975 +v -0.106427 0.391872 1.07658 +v -0.106245 0.386015 1.07341 +v -0.10602 0.380241 1.07008 +v -0.106056 0.375172 1.06693 +v -0.106063 0.370177 1.06369 +v -0.106056 0.3654 1.06052 +v -0.106028 0.360646 1.05729 +v -0.10589 0.356222 1.05423 +v -0.105747 0.351813 1.05116 +v -0.106092 0.345719 1.04632 +v -0.106419 0.339647 1.04146 +v -0.106392 0.333675 1.03683 +v -0.106351 0.32771 1.03219 +v -0.105941 0.321744 1.02787 +v -0.105515 0.31577 1.02354 +v -0.104658 0.30978 1.02013 +v -0.103827 0.303771 1.01674 +v -0.102701 0.298009 1.01497 +v -0.101614 0.292234 1.01323 +v -0.100311 0.286961 1.01301 +v -0.0990397 0.281678 1.01282 +v -0.0977551 0.276667 1.01391 +v -0.0965246 0.271652 1.01503 +v -0.095196 0.266848 1.01754 +v -0.0939332 0.262042 1.02009 +v -0.0925668 0.257244 1.02404 +v -0.0912979 0.252449 1.02803 +v -0.0900639 0.247345 1.0331 +v -0.0889462 0.24226 1.03821 +v -0.0881218 0.2368 1.04381 +v -0.0873747 0.231349 1.04944 +v -0.0870621 0.225497 1.05523 +v -0.0868301 0.219667 1.06104 +v -0.0869342 0.213406 1.06695 +v -0.0870969 0.207182 1.07289 +v -0.0874936 0.200541 1.07898 +v -0.0879046 0.193899 1.08505 +v -0.088389 0.186723 1.09117 +v -0.0888402 0.179535 1.09727 +v -0.0892535 0.171792 1.10335 +v -0.0896195 0.164031 1.10941 +v -0.0898516 0.155683 1.11536 +v -0.0900355 0.147315 1.12129 +v -0.0901285 0.138412 1.12715 +v -0.090197 0.129499 1.133 +v -0.090414 0.120095 1.13888 +v -0.0906518 0.110697 1.14478 +v -0.0912397 0.100917 1.15094 +v -0.0919089 0.0911646 1.15715 +v -0.0930091 0.0810077 1.16375 +v -0.0941962 0.0708677 1.17038 +v -0.0963162 0.0602102 1.17654 +v -0.0984825 0.049577 1.18271 +v -0.10116 0.0380966 1.18886 +v -0.103849 0.0266128 1.19499 +v -0.10727 0.0139556 1.20116 +v -0.11081 0.00131792 1.20732 +v -0.115139 -0.0123261 1.21289 +v -0.119339 -0.0260144 1.21842 +v -0.121954 -0.0404376 1.22232 +v -0.124012 -0.0549988 1.22597 +v -0.121402 -0.0698054 1.22687 +v -0.118482 -0.0845542 1.22756 +v -0.113675 -0.0938474 1.22392 +v -0.108856 -0.103086 1.22025 +v -0.102882 -0.101026 1.21347 +v -0.0969038 -0.0989273 1.2067 +v -0.0894149 -0.0580442 1.15642 +v -0.0820059 -0.0174394 1.10592 +v -0.0793054 -0.0724715 1.1184 +v -0.0768177 -0.127815 1.13083 +v -0.0744365 -0.154042 1.13711 +v -0.0718395 -0.180074 1.14322 +v -0.0719438 -0.19162 1.14327 +v -0.0721514 -0.203145 1.14334 +v -0.0727828 -0.213466 1.14396 +v -0.0734619 -0.223757 1.14467 +v -0.0738983 -0.234603 1.14587 +v -0.0743827 -0.245379 1.14706 +v -0.0745779 -0.256689 1.14865 +v -0.0748319 -0.267976 1.15039 +v -0.0750655 -0.279316 1.15341 +v -0.075434 -0.290526 1.15688 +v -0.0752541 -0.302217 1.15987 +v -0.0744657 -0.314187 1.16093 +v -0.0770706 -0.324927 1.15815 +v -0.0791454 -0.335116 1.15384 +v -0.0810679 -0.344096 1.14902 +v -0.082702 -0.352713 1.14341 +v -0.083983 -0.359745 1.1376 +v -0.0850633 -0.366309 1.13119 +v -0.0858666 -0.371317 1.12443 +v -0.0865976 -0.376106 1.11754 +v -0.0869495 -0.380729 1.11029 +v -0.0873438 -0.38579 1.10324 +v -0.0887269 -0.385976 1.09441 +v -0.0901093 -0.386126 1.08553 +v -0.11096 -0.39376 1.09401 +v -0.113803 -0.403233 1.09827 +v -0.116769 -0.412294 1.10269 +v -0.119143 -0.421378 1.10725 +v -0.12136 -0.43064 1.1115 +v -0.123017 -0.441353 1.11354 +v -0.124274 -0.4522 1.11502 +v -0.125137 -0.463356 1.11421 +v -0.125347 -0.47448 1.11252 +v -0.125404 -0.489766 1.10494 +v -0.124592 -0.504388 1.09604 +v -0.124161 -0.518295 1.08727 +v -0.123827 -0.532413 1.07882 +v -0.123954 -0.546441 1.07079 +v -0.124698 -0.561309 1.06439 +v -0.125935 -0.575789 1.05837 +v -0.127892 -0.590794 1.05464 +v -0.130246 -0.605262 1.052 +v -0.133548 -0.619672 1.05175 +v -0.0966741 0.422839 1.09148 +v -0.095912 0.414975 1.08904 +v -0.0951155 0.407212 1.08632 +v -0.0948021 0.40042 1.08368 +v -0.0944483 0.393707 1.08083 +v -0.0942716 0.387617 1.078 +v -0.0940725 0.381566 1.0751 +v -0.0941129 0.376217 1.07234 +v -0.0941384 0.370911 1.06953 +v -0.0942438 0.365943 1.06676 +v -0.0943369 0.360998 1.06395 +v -0.0944029 0.356263 1.06119 +v -0.0944741 0.351524 1.05844 +v -0.0949007 0.345793 1.05471 +v -0.0953237 0.340071 1.05097 +v -0.0955714 0.33434 1.04731 +v -0.0958246 0.328602 1.04364 +v -0.0957647 0.32293 1.04035 +v -0.095717 0.317252 1.03707 +v -0.0952884 0.311624 1.03454 +v -0.0948712 0.305988 1.03201 +v -0.0941137 0.30069 1.03065 +v -0.0933846 0.295381 1.02932 +v -0.092414 0.290426 1.02922 +v -0.0914814 0.285473 1.02914 +v -0.0904136 0.280791 1.03018 +v -0.0893957 0.2761 1.03125 +v -0.0882285 0.271541 1.03348 +v -0.0871447 0.266991 1.03575 +v -0.08594 0.262371 1.03921 +v -0.0848131 0.257755 1.0427 +v -0.0836839 0.252815 1.04714 +v -0.0826487 0.247885 1.05162 +v -0.0818671 0.242556 1.05668 +v -0.0811291 0.237224 1.06176 +v -0.0807063 0.231469 1.06717 +v -0.0803428 0.225727 1.07261 +v -0.0803278 0.219551 1.07828 +v -0.0803465 0.213402 1.08397 +v -0.0805966 0.206744 1.08988 +v -0.0808416 0.200076 1.09578 +v -0.0811717 0.192862 1.10179 +v -0.0814826 0.185638 1.10779 +v -0.0817421 0.17782 1.11383 +v -0.081958 0.169984 1.11985 +v -0.0820971 0.161544 1.1258 +v -0.0822009 0.153086 1.13174 +v -0.0822688 0.144109 1.13766 +v -0.0823324 0.135131 1.14359 +v -0.0825491 0.125643 1.14959 +v -0.0827805 0.116153 1.1556 +v -0.0833466 0.106233 1.16189 +v -0.0839637 0.096337 1.16822 +v -0.0851237 0.086025 1.17494 +v -0.0863584 0.0757174 1.18169 +v -0.0885016 0.0649072 1.18842 +v -0.0907296 0.0541206 1.19518 +v -0.0936804 0.0424569 1.20197 +v -0.0966351 0.0307628 1.20872 +v -0.100171 0.0177497 1.21561 +v -0.103695 0.00472346 1.22249 +v -0.107809 -0.00976665 1.22854 +v -0.111715 -0.0243535 1.23448 +v -0.11388 -0.0399874 1.23796 +v -0.115231 -0.055783 1.24101 +v -0.111807 -0.0702961 1.24037 +v -0.108053 -0.0846804 1.23948 +v -0.102931 -0.0932613 1.23668 +v -0.0978093 -0.101829 1.23387 +v -0.0916813 -0.100919 1.22612 +v -0.0855566 -0.100006 1.21838 +v -0.0713948 -0.056642 1.16346 +v -0.0570792 -0.0129198 1.10844 +v -0.062298 -0.0710739 1.12215 +v -0.06754 -0.129123 1.13593 +v -0.066392 -0.155121 1.14213 +v -0.0649958 -0.180876 1.14815 +v -0.064726 -0.192079 1.14787 +v -0.0645135 -0.203286 1.14766 +v -0.0648038 -0.213527 1.14787 +v -0.0651416 -0.223746 1.1482 +v -0.0654196 -0.234687 1.14902 +v -0.0657575 -0.245551 1.14986 +v -0.0659992 -0.256821 1.15108 +v -0.0663155 -0.268055 1.15262 +v -0.0665952 -0.279421 1.15553 +v -0.0669909 -0.290644 1.15903 +v -0.0673931 -0.302194 1.16212 +v -0.0672328 -0.314073 1.16301 +v -0.0691446 -0.325019 1.16057 +v -0.0704625 -0.335339 1.15646 +v -0.0720204 -0.34424 1.15182 +v -0.0733412 -0.352782 1.14646 +v -0.0743996 -0.359859 1.14074 +v -0.0752522 -0.366419 1.13437 +v -0.0758613 -0.371536 1.12767 +v -0.0763883 -0.376363 1.12076 +v -0.0767756 -0.38101 1.11376 +v -0.0771444 -0.385915 1.10677 +v -0.0782495 -0.385904 1.0979 +v -0.079398 -0.385782 1.08893 +v -0.0988423 -0.393757 1.09976 +v -0.101181 -0.403361 1.10454 +v -0.103609 -0.41253 1.1095 +v -0.105839 -0.421939 1.11448 +v -0.107957 -0.431514 1.11916 +v -0.109745 -0.44275 1.12151 +v -0.110984 -0.454204 1.12301 +v -0.111989 -0.465911 1.12187 +v -0.112121 -0.477539 1.11936 +v -0.111766 -0.49259 1.11115 +v -0.110653 -0.506834 1.10157 +v -0.110103 -0.520498 1.09229 +v -0.109623 -0.534352 1.08336 +v -0.109629 -0.548184 1.07496 +v -0.110189 -0.562996 1.06859 +v -0.111008 -0.577309 1.06239 +v -0.112628 -0.592546 1.05952 +v -0.115034 -0.607418 1.05795 +v -0.118367 -0.62212 1.05901 +v -0.0841624 0.425429 1.09401 +v -0.0834834 0.417545 1.09188 +v -0.0827785 0.409726 1.08952 +v -0.0825754 0.402709 1.08716 +v -0.0823498 0.395754 1.08466 +v -0.0821724 0.389396 1.08225 +v -0.081976 0.38308 1.07975 +v -0.0820276 0.37746 1.0774 +v -0.0820784 0.371848 1.07503 +v -0.0822993 0.366678 1.07269 +v -0.082511 0.361523 1.07031 +v -0.0828141 0.356425 1.06796 +v -0.0831072 0.351338 1.06557 +v -0.0835987 0.345962 1.06294 +v -0.0840925 0.3406 1.0603 +v -0.0846194 0.335091 1.05762 +v -0.0851461 0.329584 1.05494 +v -0.0853934 0.324231 1.05265 +v -0.0856564 0.318865 1.05039 +v -0.0855768 0.313617 1.04868 +v -0.0855193 0.308358 1.047 +v -0.0851786 0.303485 1.04613 +v -0.0848583 0.298628 1.04526 +v -0.0842342 0.293985 1.04529 +v -0.0836428 0.289335 1.04534 +v -0.0828042 0.284977 1.04634 +v -0.0819942 0.280616 1.04735 +v -0.0810295 0.276303 1.04933 +v -0.0800958 0.27199 1.05132 +v -0.0790496 0.267512 1.05428 +v -0.0780485 0.263038 1.05725 +v -0.0770467 0.258259 1.06109 +v -0.0760929 0.253489 1.06495 +v -0.0753528 0.248272 1.06946 +v -0.074635 0.243044 1.07398 +v -0.0741562 0.237393 1.07905 +v -0.0737164 0.231755 1.08412 +v -0.0735801 0.225655 1.08955 +v -0.0734602 0.219576 1.09497 +v -0.0735588 0.212903 1.10074 +v -0.0736665 0.206225 1.10651 +v -0.0738479 0.198968 1.11241 +v -0.0740259 0.191708 1.11829 +v -0.0741347 0.183812 1.12427 +v -0.0742232 0.175911 1.13025 +v -0.0742732 0.167377 1.13621 +v -0.0743074 0.158836 1.14215 +v -0.0743577 0.149791 1.14815 +v -0.0744125 0.140749 1.15416 +v -0.0746088 0.131162 1.16026 +v -0.0748151 0.121572 1.16638 +v -0.0753439 0.111515 1.1728 +v -0.0759024 0.101468 1.17924 +v -0.0771061 0.0909795 1.18607 +v -0.0783573 0.0804888 1.19292 +v -0.0805178 0.0695094 1.20023 +v -0.0827329 0.0585534 1.20755 +v -0.0858903 0.0466577 1.21494 +v -0.089064 0.0347548 1.22231 +v -0.0926381 0.0213689 1.22992 +v -0.096174 0.00796925 1.23751 +v -0.10008 -0.00736568 1.24403 +v -0.10374 -0.0227987 1.25039 +v -0.105506 -0.0395973 1.25348 +v -0.106277 -0.0565527 1.25594 +v -0.102184 -0.0707781 1.25385 +v -0.0976236 -0.0847978 1.25142 +v -0.0921915 -0.0926848 1.24943 +v -0.0867477 -0.100545 1.24746 +v -0.0804715 -0.100777 1.23877 +v -0.0742047 -0.100978 1.23011 +v -0.0532314 -0.0548035 1.17066 +v -0.0322352 -0.00867565 1.11094 +v -0.0451782 -0.0693855 1.12602 +v -0.0582757 -0.130434 1.14106 +v -0.0583446 -0.156172 1.14721 +v -0.0581122 -0.181646 1.15307 +v -0.0574381 -0.192519 1.1524 +v -0.0567866 -0.203354 1.1518 +v -0.0566969 -0.213535 1.15157 +v -0.0566632 -0.223716 1.15144 +v -0.0567781 -0.234751 1.15176 +v -0.0569432 -0.245733 1.15213 +v -0.057259 -0.257058 1.15294 +v -0.0576414 -0.268318 1.15421 +v -0.0580253 -0.279682 1.15709 +v -0.0584516 -0.290885 1.16071 +v -0.0593805 -0.302212 1.16384 +v -0.0599481 -0.313891 1.16481 +v -0.0611523 -0.325012 1.16286 +v -0.0617852 -0.335487 1.15898 +v -0.0629842 -0.344243 1.1544 +v -0.0640141 -0.352716 1.14926 +v -0.064836 -0.359842 1.14368 +v -0.0654858 -0.366485 1.13749 +v -0.0659541 -0.371659 1.13074 +v -0.0663079 -0.376618 1.1239 +v -0.0666083 -0.381154 1.11702 +v -0.0669968 -0.38615 1.11041 +v -0.0678478 -0.385816 1.10129 +v -0.0687168 -0.385459 1.09219 +v -0.0867429 -0.393755 1.10554 +v -0.0884737 -0.403558 1.11058 +v -0.0903245 -0.412903 1.11602 +v -0.0923927 -0.422625 1.12138 +v -0.0942798 -0.432582 1.12629 +v -0.0960497 -0.444282 1.12875 +v -0.0972771 -0.456216 1.13006 +v -0.0983314 -0.468391 1.12871 +v -0.0984159 -0.480301 1.12532 +v -0.0975833 -0.494797 1.1163 +v -0.0962154 -0.50852 1.10597 +v -0.0955593 -0.52186 1.09602 +v -0.0949932 -0.535491 1.0865 +v -0.0949157 -0.549163 1.07768 +v -0.0952854 -0.564034 1.07122 +v -0.0957609 -0.578615 1.06512 +v -0.0971801 -0.594239 1.06377 +v -0.0997473 -0.609465 1.0638 +v -0.103097 -0.624405 1.06595 +v -0.0712406 0.427076 1.09593 +v -0.0707348 0.419147 1.09413 +v -0.0702105 0.411289 1.09207 +v -0.0700125 0.404199 1.09001 +v -0.0698017 0.397149 1.08785 +v -0.0696642 0.390709 1.08577 +v -0.069524 0.384285 1.08366 +v -0.0696131 0.378492 1.08166 +v -0.0697018 0.372711 1.07963 +v -0.0699247 0.367362 1.07773 +v -0.0701514 0.362011 1.07583 +v -0.0705103 0.356863 1.07396 +v -0.070875 0.351716 1.0721 +v -0.0714073 0.346467 1.07015 +v -0.0719522 0.341221 1.06822 +v -0.0725391 0.335948 1.06634 +v -0.0731257 0.330675 1.06446 +v -0.0735569 0.325546 1.06287 +v -0.0740113 0.320419 1.06132 +v -0.0741729 0.315538 1.06022 +v -0.0743456 0.310659 1.05914 +v -0.0742133 0.306153 1.05872 +v -0.0741004 0.301645 1.05832 +v -0.0737193 0.297302 1.05861 +v -0.0733649 0.292968 1.05893 +v -0.0727764 0.288933 1.05993 +v -0.0722135 0.284894 1.06095 +v -0.0714598 0.280788 1.06276 +v -0.0707267 0.276679 1.06458 +v -0.0698386 0.272383 1.06722 +v -0.0689632 0.268087 1.06988 +v -0.0680579 0.263463 1.0733 +v -0.0671643 0.258826 1.07673 +v -0.0664756 0.25371 1.08086 +v -0.0657904 0.248598 1.08499 +v -0.0653036 0.243022 1.08971 +v -0.0648398 0.237451 1.09443 +v -0.064649 0.231375 1.09962 +v -0.0644631 0.225311 1.10482 +v -0.0644452 0.218646 1.11041 +v -0.0644378 0.211976 1.11602 +v -0.0645536 0.204711 1.12187 +v -0.0646634 0.197445 1.12771 +v -0.0647403 0.189543 1.13369 +v -0.0648176 0.181639 1.13967 +v -0.0648917 0.173111 1.1457 +v -0.0649642 0.164582 1.15173 +v -0.0650237 0.155479 1.15783 +v -0.0650838 0.146371 1.16392 +v -0.0652865 0.136759 1.17017 +v -0.0654893 0.127146 1.17644 +v -0.0659573 0.116993 1.18301 +v -0.0664365 0.106841 1.18958 +v -0.0675072 0.096172 1.1966 +v -0.0686025 0.0854962 1.20362 +v -0.0705921 0.0742745 1.21124 +v -0.0726131 0.0630618 1.21887 +v -0.0755446 0.0509943 1.2269 +v -0.0784893 0.0389154 1.23491 +v -0.0820349 0.0250834 1.24335 +v -0.0855481 0.0112229 1.25174 +v -0.0894775 -0.00485498 1.25894 +v -0.09323 -0.0210301 1.26599 +v -0.0945963 -0.0384317 1.26932 +v -0.0950224 -0.0559788 1.27199 +v -0.0909242 -0.0713677 1.26819 +v -0.0864491 -0.0865023 1.26408 +v -0.0821133 -0.0947972 1.25984 +v -0.0777662 -0.103069 1.25561 +v -0.0711555 -0.10739 1.24723 +v -0.0645576 -0.111684 1.23892 +v -0.0523398 -0.0890766 1.20044 +v -0.0401549 -0.0667329 1.16182 +v -0.0470421 -0.0994542 1.15523 +v -0.0539199 -0.132393 1.14877 +v -0.0522205 -0.157857 1.15286 +v -0.0502787 -0.183105 1.15673 +v -0.0493965 -0.193339 1.15563 +v -0.048527 -0.203568 1.15456 +v -0.048248 -0.213728 1.15383 +v -0.0479908 -0.223894 1.15324 +v -0.0481057 -0.234979 1.15312 +v -0.0482787 -0.246009 1.15311 +v -0.0487232 -0.257442 1.15358 +v -0.0491928 -0.268837 1.15459 +v -0.0496638 -0.280521 1.1576 +v -0.0501277 -0.291969 1.16155 +v -0.0508383 -0.303495 1.16517 +v -0.0512342 -0.315528 1.1664 +v -0.0521483 -0.326221 1.16478 +v -0.0525137 -0.336306 1.16113 +v -0.053396 -0.344911 1.15669 +v -0.0540959 -0.353185 1.1516 +v -0.0547693 -0.360296 1.14599 +v -0.0552666 -0.366916 1.13974 +v -0.0555433 -0.37217 1.13302 +v -0.0557605 -0.377122 1.12604 +v -0.0560826 -0.381783 1.11921 +v -0.0564151 -0.386708 1.11254 +v -0.0565079 -0.386238 1.10349 +v -0.0566706 -0.385775 1.09454 +v -0.0736353 -0.394581 1.10986 +v -0.0749982 -0.404331 1.11496 +v -0.0764452 -0.413664 1.12048 +v -0.0781689 -0.423649 1.12611 +v -0.0797155 -0.433882 1.13123 +v -0.0812117 -0.445741 1.13417 +v -0.0822625 -0.457909 1.13561 +v -0.0833388 -0.470566 1.13436 +v -0.0833756 -0.482794 1.13023 +v -0.0824331 -0.496711 1.12006 +v -0.081219 -0.50985 1.10905 +v -0.0804923 -0.522893 1.09875 +v -0.0798318 -0.536385 1.08912 +v -0.0796643 -0.550073 1.08031 +v -0.0797826 -0.565009 1.07398 +v -0.0802233 -0.579619 1.06805 +v -0.0817354 -0.595412 1.06807 +v -0.0841766 -0.610998 1.06966 +v -0.0871839 -0.626228 1.07275 +v -0.0582968 0.428781 1.0975 +v -0.05794 0.42086 1.09591 +v -0.0575755 0.412994 1.09418 +v -0.0573769 0.405826 1.09245 +v -0.0571658 0.398691 1.0906 +v -0.0570651 0.392163 1.08888 +v -0.0569618 0.385651 1.08715 +v -0.0570806 0.379668 1.0855 +v -0.0571934 0.373707 1.08382 +v -0.0573953 0.368179 1.08233 +v -0.0576079 0.362646 1.08087 +v -0.058024 0.35745 1.07952 +v -0.0584503 0.352254 1.07818 +v -0.0590089 0.347147 1.07693 +v -0.0595818 0.342036 1.07569 +v -0.060186 0.337017 1.0746 +v -0.06082 0.331978 1.07355 +v -0.0614254 0.327078 1.07271 +v -0.062048 0.322171 1.07188 +v -0.0624441 0.317619 1.07143 +v -0.062851 0.31308 1.07099 +v -0.0629252 0.308913 1.07104 +v -0.0630178 0.304747 1.07112 +v -0.0628479 0.300711 1.07167 +v -0.062701 0.296672 1.07224 +v -0.062367 0.29293 1.07326 +v -0.0620515 0.28919 1.0743 +v -0.0615241 0.285277 1.07596 +v -0.0610124 0.281368 1.07763 +v -0.0602961 0.277235 1.07998 +v -0.0595905 0.273096 1.08234 +v -0.0587871 0.268591 1.08534 +v -0.057999 0.264091 1.08835 +v -0.0573798 0.259093 1.09211 +v -0.0567699 0.254101 1.09588 +v -0.0562989 0.248596 1.10027 +v -0.0558375 0.243097 1.10465 +v -0.0555922 0.237047 1.10963 +v -0.0553493 0.231011 1.11461 +v -0.0552238 0.224353 1.12004 +v -0.0551082 0.217693 1.12546 +v -0.0551576 0.210424 1.13126 +v -0.0552038 0.203154 1.13707 +v -0.0552613 0.195247 1.14305 +v -0.0553204 0.187338 1.14904 +v -0.0554318 0.178822 1.15516 +v -0.0555419 0.170304 1.16127 +v -0.0556075 0.161136 1.16746 +v -0.0556782 0.151961 1.17363 +v -0.0558836 0.142329 1.18005 +v -0.0560861 0.132695 1.18646 +v -0.056485 0.12244 1.19316 +v -0.0568886 0.112177 1.19986 +v -0.0578205 0.101324 1.20707 +v -0.0587637 0.0904709 1.21429 +v -0.0605702 0.07899 1.22221 +v -0.0623977 0.0675209 1.23014 +v -0.0650807 0.055261 1.2388 +v -0.0677713 0.0429982 1.24744 +v -0.0712495 0.0286919 1.25666 +v -0.0747155 0.0143787 1.26586 +v -0.0786008 -0.00247368 1.27364 +v -0.082405 -0.0193613 1.28136 +v -0.0832423 -0.0373054 1.28486 +v -0.0835786 -0.0553291 1.28789 +v -0.0796083 -0.0718613 1.28247 +v -0.0753214 -0.0881867 1.27678 +v -0.0720794 -0.0968976 1.27026 +v -0.0688074 -0.105563 1.26376 +v -0.0618554 -0.11399 1.2557 +v -0.0549281 -0.122392 1.2477 +v -0.0514132 -0.123237 1.23007 +v -0.0479514 -0.124186 1.21253 +v -0.0488018 -0.129286 1.18448 +v -0.0495853 -0.13435 1.15648 +v -0.0460735 -0.159522 1.15851 +v -0.0423758 -0.184493 1.16032 +v -0.0412632 -0.194145 1.15871 +v -0.0401336 -0.203813 1.15717 +v -0.0396557 -0.213959 1.15585 +v -0.0392019 -0.224063 1.15467 +v -0.0393676 -0.235173 1.15398 +v -0.039568 -0.246285 1.15354 +v -0.0401838 -0.25783 1.15371 +v -0.0407814 -0.269362 1.1545 +v -0.0413433 -0.281391 1.15778 +v -0.0418069 -0.293132 1.16204 +v -0.0422248 -0.304851 1.16613 +v -0.0424453 -0.317103 1.16774 +v -0.0430482 -0.327326 1.16646 +v -0.0432296 -0.337062 1.1632 +v -0.0437767 -0.34555 1.15887 +v -0.0441578 -0.353652 1.15388 +v -0.0446569 -0.360741 1.14819 +v -0.0450196 -0.36738 1.14193 +v -0.0451163 -0.37264 1.13512 +v -0.0451576 -0.377639 1.12815 +v -0.0454461 -0.382279 1.12124 +v -0.0457568 -0.387286 1.11463 +v -0.0452114 -0.386657 1.10561 +v -0.0440857 -0.386075 1.09716 +v -0.0604424 -0.395208 1.1139 +v -0.0614003 -0.405157 1.11904 +v -0.0624681 -0.414593 1.12465 +v -0.0637733 -0.424847 1.13039 +v -0.0649439 -0.43538 1.13552 +v -0.0660501 -0.447342 1.13839 +v -0.0668639 -0.459566 1.13977 +v -0.06785 -0.472456 1.13844 +v -0.0680344 -0.484779 1.13381 +v -0.0670959 -0.497932 1.12288 +v -0.066012 -0.51029 1.1109 +v -0.0652731 -0.522961 1.10013 +v -0.0644992 -0.53642 1.09046 +v -0.0642026 -0.550354 1.08171 +v -0.0640783 -0.56563 1.07561 +v -0.0646344 -0.580653 1.07049 +v -0.0662254 -0.596575 1.07224 +v -0.0684916 -0.612498 1.07503 +v -0.0710352 -0.628158 1.07899 +v -0.0452268 0.429727 1.09881 +v -0.0449807 0.421823 1.09739 +v -0.0447212 0.413963 1.09586 +v -0.0445319 0.40671 1.09434 +v -0.0443365 0.399489 1.09274 +v -0.0442481 0.392929 1.09127 +v -0.0441606 0.386374 1.08979 +v -0.0442373 0.380307 1.08843 +v -0.0443126 0.374278 1.08707 +v -0.0444691 0.368708 1.0859 +v -0.0446359 0.363136 1.08475 +v -0.044996 0.35795 1.08372 +v -0.0453627 0.352758 1.08273 +v -0.0458075 0.347726 1.0819 +v -0.0462675 0.342688 1.0811 +v -0.046752 0.337847 1.08045 +v -0.0472465 0.333012 1.07984 +v -0.0477326 0.328299 1.07945 +v -0.0482316 0.323584 1.07909 +v -0.0485723 0.319293 1.07902 +v -0.0489295 0.315004 1.07897 +v -0.0490314 0.31102 1.0793 +v -0.0491424 0.307047 1.07964 +v -0.0490585 0.303282 1.08032 +v -0.0489846 0.299518 1.08102 +v -0.0487443 0.295908 1.08207 +v -0.0485205 0.292298 1.08315 +v -0.0481167 0.288495 1.0847 +v -0.0477267 0.284688 1.08626 +v -0.0471819 0.280615 1.08839 +v -0.0466547 0.276551 1.09054 +v -0.0460478 0.272101 1.09331 +v -0.0454439 0.267652 1.09609 +v -0.0449306 0.262693 1.09957 +v -0.0444194 0.257737 1.10307 +v -0.0440289 0.252268 1.10723 +v -0.0436475 0.246805 1.11141 +v -0.043406 0.240787 1.1162 +v -0.0431741 0.234775 1.12101 +v -0.0430655 0.228145 1.12634 +v -0.0429618 0.221521 1.13168 +v -0.0429641 0.214255 1.13743 +v -0.0429729 0.206983 1.14318 +v -0.0430402 0.199116 1.14919 +v -0.0431079 0.191257 1.15521 +v -0.0432212 0.182788 1.16143 +v -0.0433373 0.174315 1.16766 +v -0.0434218 0.165153 1.17398 +v -0.0435075 0.155991 1.1803 +v -0.0436563 0.146349 1.18685 +v -0.0438035 0.136705 1.1934 +v -0.0441567 0.12647 1.20032 +v -0.0445161 0.116232 1.20726 +v -0.0452839 0.105393 1.21475 +v -0.0460511 0.0945453 1.22223 +v -0.0475794 0.0829526 1.23051 +v -0.0491197 0.0713578 1.2388 +v -0.0512212 0.0589483 1.24798 +v -0.0533327 0.0465437 1.25717 +v -0.0562294 0.0320026 1.26733 +v -0.0591235 0.0174501 1.27746 +v -0.0628997 -0.000147686 1.28626 +v -0.0666511 -0.0177517 1.29497 +v -0.0674179 -0.0368626 1.29856 +v -0.0679802 -0.0560369 1.30182 +v -0.0642615 -0.0750596 1.29526 +v -0.060202 -0.0938132 1.28841 +v -0.0573898 -0.105216 1.27686 +v -0.054576 -0.116563 1.26535 +v -0.0490274 -0.125015 1.25236 +v -0.0434966 -0.133451 1.23942 +v -0.0406824 -0.136499 1.22349 +v -0.0378803 -0.13958 1.20758 +v -0.0377874 -0.144812 1.18778 +v -0.0377596 -0.150101 1.16798 +v -0.0355258 -0.166988 1.1657 +v -0.0333016 -0.183793 1.16333 +v -0.0322853 -0.193801 1.16084 +v -0.0312357 -0.203828 1.15844 +v -0.0308042 -0.213995 1.15671 +v -0.0304072 -0.224078 1.15507 +v -0.0305699 -0.235235 1.15397 +v -0.0307275 -0.246437 1.1532 +v -0.0312075 -0.25828 1.1531 +v -0.0316307 -0.270117 1.15368 +v -0.0320559 -0.282572 1.15724 +v -0.0322857 -0.294626 1.16203 +v -0.0322852 -0.306897 1.16684 +v -0.0321695 -0.319756 1.169 +v -0.0329345 -0.32918 1.16783 +v -0.0333944 -0.338227 1.16483 +v -0.0338212 -0.346631 1.16064 +v -0.0340826 -0.354611 1.15567 +v -0.0344162 -0.361517 1.14987 +v -0.0346336 -0.367986 1.14356 +v -0.034719 -0.373177 1.13656 +v -0.0347563 -0.378138 1.12939 +v -0.0349301 -0.382831 1.1223 +v -0.0351339 -0.387801 1.11537 +v -0.0351301 -0.386917 1.10633 +v -0.0352015 -0.386031 1.09741 +v -0.0460058 -0.396299 1.11606 +v -0.0469013 -0.406237 1.12131 +v -0.0477789 -0.415651 1.12685 +v -0.0487549 -0.425893 1.13266 +v -0.0496427 -0.436455 1.13782 +v -0.0503988 -0.448497 1.14086 +v -0.0509954 -0.460789 1.14257 +v -0.0518331 -0.473884 1.1414 +v -0.051994 -0.486181 1.13615 +v -0.0511367 -0.498684 1.12438 +v -0.0502202 -0.51028 1.11181 +v -0.0496254 -0.522752 1.10089 +v -0.049009 -0.53621 1.09136 +v -0.0487883 -0.550165 1.0831 +v -0.0486573 -0.565302 1.07735 +v -0.0490509 -0.580937 1.073 +v -0.0502898 -0.597012 1.07582 +v -0.0520919 -0.612869 1.07955 +v -0.0539362 -0.628644 1.0838 +v -0.0321314 0.430792 1.09972 +v -0.0319963 0.422976 1.0985 +v -0.0318401 0.415072 1.09714 +v -0.031633 0.407751 1.09583 +v -0.0314223 0.400444 1.09446 +v -0.0313413 0.393878 1.0932 +v -0.0312635 0.387311 1.09196 +v -0.0312824 0.381188 1.09084 +v -0.031307 0.375093 1.08973 +v -0.0313924 0.369491 1.08885 +v -0.031483 0.363878 1.08799 +v -0.0317827 0.358678 1.0873 +v -0.0320887 0.353479 1.08665 +v -0.0323937 0.348506 1.0862 +v -0.0327112 0.343522 1.0858 +v -0.0330498 0.338855 1.0856 +v -0.0333977 0.334174 1.08543 +v -0.0337523 0.32961 1.0855 +v -0.0341197 0.325047 1.08562 +v -0.0344038 0.320982 1.08595 +v -0.0346968 0.316913 1.08631 +v -0.0348134 0.313111 1.08693 +v -0.034941 0.30932 1.08756 +v -0.0349388 0.305816 1.08839 +v -0.0349498 0.302315 1.08924 +v -0.0348194 0.298816 1.09037 +v -0.0346962 0.29532 1.09152 +v -0.034435 0.291609 1.09298 +v -0.0341832 0.287899 1.09446 +v -0.0338468 0.28391 1.09642 +v -0.0335196 0.279925 1.09841 +v -0.0331243 0.275526 1.10098 +v -0.0327282 0.271125 1.10356 +v -0.0323262 0.266196 1.10679 +v -0.0319327 0.261276 1.11005 +v -0.0316313 0.255854 1.11402 +v -0.0313338 0.250438 1.11801 +v -0.0311237 0.244448 1.12263 +v -0.0309185 0.238472 1.12727 +v -0.0308321 0.231888 1.13253 +v -0.0307467 0.225306 1.13779 +v -0.0307134 0.218037 1.1435 +v -0.0306829 0.210761 1.1492 +v -0.0307622 0.202938 1.15524 +v -0.0308392 0.195117 1.16128 +v -0.030953 0.186692 1.16761 +v -0.0310695 0.178259 1.17393 +v -0.0311609 0.169105 1.18039 +v -0.0312526 0.159949 1.18685 +v -0.0313394 0.150295 1.19352 +v -0.0314208 0.140629 1.20021 +v -0.0317269 0.130424 1.20737 +v -0.0320361 0.120213 1.21454 +v -0.0326364 0.10938 1.2223 +v -0.0332389 0.0985492 1.23007 +v -0.0344911 0.0868328 1.23871 +v -0.0357535 0.0751284 1.24736 +v -0.0372847 0.0625732 1.25709 +v -0.0388259 0.0500357 1.26684 +v -0.0411489 0.035267 1.27795 +v -0.0434647 0.0204876 1.28903 +v -0.0471501 0.00217469 1.29884 +v -0.0507872 -0.016167 1.30848 +v -0.0515973 -0.0363744 1.31226 +v -0.052186 -0.0566858 1.31557 +v -0.0488447 -0.0782349 1.30816 +v -0.0450025 -0.0993603 1.30001 +v -0.0426899 -0.113514 1.28348 +v -0.0403529 -0.127553 1.26698 +v -0.0361976 -0.136042 1.24909 +v -0.0320216 -0.144522 1.23136 +v -0.0298894 -0.149715 1.21706 +v -0.0277821 -0.154925 1.20276 +v -0.0267418 -0.16026 1.19101 +v -0.0257332 -0.165687 1.17936 +v -0.0249314 -0.174304 1.17272 +v -0.024197 -0.183053 1.16623 +v -0.0232683 -0.193404 1.16277 +v -0.0223107 -0.203772 1.15951 +v -0.0219497 -0.214002 1.15732 +v -0.0216179 -0.224116 1.15518 +v -0.0217671 -0.235306 1.15377 +v -0.0219152 -0.246544 1.15259 +v -0.0222366 -0.258719 1.15226 +v -0.0224882 -0.270905 1.15257 +v -0.0227181 -0.283805 1.15659 +v -0.0227432 -0.296219 1.16187 +v -0.0223193 -0.308924 1.16738 +v -0.0218191 -0.322394 1.17002 +v -0.0227645 -0.330965 1.16887 +v -0.0234898 -0.339291 1.16627 +v -0.0237712 -0.347585 1.16218 +v -0.0239279 -0.355417 1.1573 +v -0.0241256 -0.362184 1.15143 +v -0.0242465 -0.368561 1.14512 +v -0.0243288 -0.373705 1.13796 +v -0.0243793 -0.37859 1.13061 +v -0.0244802 -0.38323 1.12333 +v -0.0245087 -0.388302 1.11624 +v -0.0250401 -0.387166 1.10713 +v -0.0255589 -0.386018 1.0981 +v -0.031592 -0.397375 1.11808 +v -0.0323377 -0.407389 1.12325 +v -0.0330984 -0.416855 1.12884 +v -0.0337136 -0.4271 1.13457 +v -0.0343201 -0.437643 1.13956 +v -0.0347374 -0.449662 1.14253 +v -0.0350738 -0.461901 1.14425 +v -0.0356936 -0.474927 1.14305 +v -0.0359533 -0.487039 1.13767 +v -0.035269 -0.498935 1.12555 +v -0.0345251 -0.509916 1.11248 +v -0.0340648 -0.522351 1.10145 +v -0.033585 -0.535779 1.09202 +v -0.0333472 -0.549781 1.08412 +v -0.0331853 -0.564811 1.0784 +v -0.0334134 -0.581201 1.07487 +v -0.0342871 -0.597559 1.07863 +v -0.0354295 -0.613456 1.08292 +v -0.0365734 -0.629438 1.0873 +v -0.0187771 0.431277 1.10033 +v -0.0186682 0.423266 1.0992 +v -0.0185576 0.415296 1.09794 +v -0.0183943 0.407983 1.09674 +v -0.0182254 0.400689 1.09547 +v -0.0180806 0.394096 1.09433 +v -0.0179321 0.387543 1.09319 +v -0.0179286 0.381398 1.09215 +v -0.0179306 0.375244 1.09115 +v -0.0179239 0.369645 1.09036 +v -0.01792 0.364047 1.08957 +v -0.0180121 0.358836 1.08899 +v -0.0181088 0.353625 1.08846 +v -0.0181565 0.348669 1.08813 +v -0.0182102 0.343706 1.08784 +v -0.0183181 0.339009 1.08775 +v -0.0184304 0.334311 1.0877 +v -0.0185479 0.329874 1.08787 +v -0.0186698 0.325438 1.08808 +v -0.0187628 0.321376 1.0885 +v -0.0188568 0.317319 1.08895 +v -0.0188509 0.31364 1.0896 +v -0.0188466 0.30998 1.09026 +v -0.0188206 0.306573 1.09109 +v -0.0188001 0.303158 1.09195 +v -0.0187325 0.299686 1.09306 +v -0.0186677 0.296219 1.09419 +v -0.0185416 0.292568 1.09561 +v -0.0184235 0.28892 1.09707 +v -0.0182608 0.284944 1.09897 +v -0.018084 0.280992 1.10088 +v -0.0178853 0.276598 1.10336 +v -0.0176923 0.272197 1.10586 +v -0.017494 0.267282 1.10901 +v -0.0172969 0.262374 1.11218 +v -0.017148 0.25695 1.11605 +v -0.0170023 0.251532 1.11994 +v -0.0168713 0.245557 1.12452 +v -0.0167444 0.239601 1.12911 +v -0.0166911 0.23302 1.13434 +v -0.0166501 0.226438 1.13955 +v -0.016626 0.219191 1.14525 +v -0.0166024 0.211943 1.15095 +v -0.0166375 0.20411 1.15697 +v -0.0166754 0.196264 1.16298 +v -0.0167235 0.187821 1.16931 +v -0.0167724 0.179373 1.17565 +v -0.016788 0.17023 1.18215 +v -0.0168032 0.161086 1.18865 +v -0.0168093 0.151444 1.19541 +v -0.0168138 0.141798 1.20218 +v -0.0169274 0.131639 1.20947 +v -0.0170413 0.121473 1.21676 +v -0.017291 0.110675 1.22469 +v -0.0175404 0.0998781 1.23262 +v -0.0181003 0.0882173 1.24154 +v -0.018664 0.076562 1.25046 +v -0.0193661 0.0639757 1.26047 +v -0.0200752 0.0513999 1.27049 +v -0.0212053 0.0366039 1.28192 +v -0.0223291 0.0217955 1.29333 +v -0.0242038 0.00381002 1.30375 +v -0.0260424 -0.01427 1.31395 +v -0.0265189 -0.0352911 1.31855 +v -0.0267903 -0.0566704 1.32222 +v -0.0252731 -0.0795227 1.31484 +v -0.0232532 -0.101326 1.30538 +v -0.0222102 -0.116074 1.28801 +v -0.0211517 -0.130595 1.27067 +v -0.0191653 -0.138863 1.25247 +v -0.0171893 -0.147148 1.23436 +v -0.0161995 -0.152095 1.21954 +v -0.0152014 -0.157055 1.20475 +v -0.014737 -0.162393 1.19269 +v -0.0142727 -0.167826 1.1806 +v -0.0138986 -0.175482 1.17401 +v -0.0135394 -0.18331 1.16753 +v -0.0130588 -0.19352 1.16351 +v -0.0125648 -0.203808 1.15979 +v -0.0122775 -0.214067 1.15732 +v -0.0120138 -0.224222 1.15492 +v -0.0120523 -0.235425 1.15323 +v -0.0120742 -0.246658 1.15186 +v -0.0121719 -0.258876 1.15135 +v -0.0122156 -0.271153 1.15167 +v -0.0125173 -0.284131 1.1557 +v -0.0126679 -0.296521 1.16124 +v -0.0124648 -0.309645 1.16737 +v -0.0122364 -0.323702 1.17064 +v -0.0127493 -0.331972 1.16973 +v -0.0132069 -0.340068 1.16731 +v -0.0134637 -0.348298 1.1633 +v -0.0136206 -0.355967 1.15835 +v -0.0137875 -0.362767 1.15234 +v -0.0139015 -0.369133 1.14583 +v -0.0140165 -0.374152 1.13857 +v -0.0140916 -0.37887 1.13114 +v -0.0141133 -0.383518 1.12381 +v -0.0141534 -0.388451 1.11656 +v -0.0143662 -0.387377 1.10772 +v -0.0147094 -0.386421 1.09883 +v -0.0179187 -0.398069 1.11894 +v -0.0181983 -0.408038 1.12399 +v -0.0184327 -0.417425 1.1294 +v -0.0187013 -0.427701 1.13494 +v -0.0189684 -0.438242 1.13985 +v -0.0190417 -0.450153 1.14301 +v -0.0190766 -0.462384 1.14488 +v -0.0192197 -0.475506 1.1439 +v -0.019198 -0.48775 1.13843 +v -0.0187872 -0.499309 1.12604 +v -0.0183407 -0.510016 1.11291 +v -0.0180561 -0.522435 1.10194 +v -0.0177857 -0.535876 1.0926 +v -0.0176341 -0.549801 1.0849 +v -0.0175531 -0.564689 1.07937 +v -0.0175448 -0.581175 1.07636 +v -0.0179097 -0.597478 1.08028 +v -0.0183522 -0.613549 1.08482 +v -0.0188066 -0.629784 1.08913 +v -0.00541389 0.431567 1.10059 +v -0.0053379 0.423557 1.09946 +v -0.00525297 0.415567 1.09821 +v -0.00511389 0.408276 1.0971 +v -0.00498326 0.400997 1.0959 +v -0.00477812 0.394368 1.09486 +v -0.00456903 0.387751 1.09384 +v -0.00453952 0.381512 1.09288 +v -0.00451575 0.375263 1.09198 +v -0.00440582 0.36969 1.09124 +v -0.00429805 0.364109 1.09058 +v -0.00417896 0.358876 1.09009 +v -0.00406368 0.353642 1.08967 +v -0.00384947 0.348702 1.08945 +v -0.00363759 0.343762 1.08928 +v -0.00350792 0.339055 1.08931 +v -0.00338186 0.334344 1.0894 +v -0.00325583 0.33003 1.08966 +v -0.00313025 0.325716 1.08997 +v -0.00301991 0.32165 1.09049 +v -0.00291356 0.317583 1.09105 +v -0.0027838 0.314026 1.09176 +v -0.00265322 0.310485 1.09248 +v -0.00260346 0.307156 1.09335 +v -0.00255404 0.303833 1.09424 +v -0.00254986 0.300395 1.09532 +v -0.00254595 0.296964 1.09644 +v -0.00255656 0.293384 1.09784 +v -0.00256619 0.289804 1.09928 +v -0.00257219 0.285857 1.10115 +v -0.00257853 0.281919 1.10303 +v -0.00259104 0.277531 1.10546 +v -0.00260448 0.273136 1.1079 +v -0.00260706 0.268231 1.111 +v -0.00260905 0.263335 1.11411 +v -0.002623 0.257904 1.11791 +v -0.002637 0.252485 1.12171 +v -0.00259402 0.246534 1.12626 +v -0.00255124 0.2406 1.13081 +v -0.00253578 0.234034 1.13599 +v -0.00252121 0.227468 1.14118 +v -0.00251226 0.220244 1.14686 +v -0.00249863 0.213021 1.15255 +v -0.00249363 0.205171 1.15855 +v -0.00249108 0.197311 1.16455 +v -0.00247045 0.18885 1.17088 +v -0.00245048 0.180392 1.17722 +v -0.00238601 0.171253 1.18375 +v -0.00231997 0.16211 1.1903 +v -0.00224224 0.152485 1.19713 +v -0.00216724 0.142853 1.20398 +v -0.00208502 0.132725 1.21137 +v -0.002002 0.122597 1.21877 +v -0.00189623 0.111824 1.22687 +v -0.00179068 0.101058 1.23497 +v -0.00165054 0.0894456 1.24415 +v -0.00151052 0.0778387 1.25334 +v -0.00138145 0.0651928 1.26359 +v -0.00125385 0.0525569 1.27386 +v -0.00118035 0.0377123 1.28558 +v -0.00110515 0.0228539 1.29727 +v -0.00110498 0.00511533 1.30809 +v -0.00110469 -0.0127176 1.31863 +v -0.00103974 -0.0345199 1.32331 +v -0.000974969 -0.0565558 1.32685 +v -0.00114867 -0.0799935 1.31937 +v -0.00130698 -0.102729 1.31003 +v -0.00161071 -0.118268 1.29226 +v -0.00190254 -0.133488 1.27433 +v -0.0021256 -0.141621 1.2558 +v -0.00234629 -0.149695 1.23737 +v -0.00247608 -0.154423 1.22202 +v -0.00260856 -0.159144 1.20674 +v -0.00269259 -0.164442 1.1942 +v -0.00278106 -0.169835 1.18175 +v -0.00282531 -0.176572 1.17516 +v -0.00286477 -0.183449 1.16868 +v -0.0028363 -0.193569 1.16416 +v -0.00279244 -0.203759 1.16001 +v -0.00259825 -0.214081 1.1573 +v -0.00241166 -0.224234 1.15469 +v -0.00231336 -0.23548 1.15285 +v -0.00221817 -0.246777 1.15135 +v -0.0021232 -0.259066 1.15078 +v -0.00203564 -0.27139 1.15108 +v -0.00227904 -0.284358 1.15513 +v -0.00254853 -0.29678 1.16067 +v -0.00258511 -0.310413 1.16733 +v -0.00264002 -0.325098 1.17079 +v -0.00279259 -0.332996 1.16987 +v -0.00291652 -0.340664 1.16761 +v -0.00308559 -0.348681 1.16377 +v -0.00324961 -0.356221 1.15904 +v -0.0033718 -0.363113 1.15292 +v -0.00349747 -0.369524 1.14633 +v -0.00363789 -0.374463 1.13907 +v -0.00379066 -0.379131 1.13163 +v -0.00377678 -0.383742 1.12426 +v -0.00373142 -0.388677 1.11697 +v -0.00378308 -0.387776 1.10828 +v -0.00381667 -0.386841 1.09965 +v -0.00421555 -0.398648 1.11977 +v -0.00401142 -0.408665 1.1245 +v -0.00372511 -0.418015 1.12987 +v -0.00366271 -0.428355 1.13525 +v -0.00360918 -0.438907 1.14009 +v -0.00335173 -0.450788 1.14322 +v -0.0030684 -0.462926 1.14515 +v -0.00269213 -0.476014 1.14406 +v -0.00239078 -0.488169 1.13861 +v -0.00226203 -0.499536 1.12642 +v -0.00211109 -0.510061 1.11325 +v -0.00204216 -0.522446 1.10222 +v -0.00200632 -0.535807 1.09295 +v -0.00193268 -0.549645 1.08525 +v -0.00188649 -0.564404 1.07963 +v -0.00160831 -0.581148 1.07656 +v -0.00141053 -0.597682 1.08092 +v -0.00120213 -0.613949 1.0855 +v -0.000962824 -0.630383 1.08983 +v 0.00783903 0.431217 1.10031 +v 0.00781391 0.423218 1.09917 +v 0.00777363 0.415232 1.09795 +v 0.00784328 0.407923 1.09676 +v 0.0079163 0.400637 1.09549 +v 0.00813357 0.393992 1.09434 +v 0.00834605 0.387362 1.09322 +v 0.00845131 0.381179 1.09221 +v 0.00855705 0.375006 1.09122 +v 0.00881068 0.369363 1.09042 +v 0.00906762 0.363717 1.08965 +v 0.00935661 0.358448 1.08907 +v 0.00964704 0.353172 1.08855 +v 0.010063 0.348198 1.08817 +v 0.0104816 0.343225 1.08784 +v 0.0108828 0.33849 1.08771 +v 0.0112908 0.333751 1.08762 +v 0.0116851 0.329302 1.08772 +v 0.012085 0.324852 1.08787 +v 0.0124452 0.320769 1.08823 +v 0.0128134 0.316688 1.08863 +v 0.0130718 0.313014 1.08921 +v 0.0133406 0.309365 1.08981 +v 0.0134459 0.305976 1.09061 +v 0.0135527 0.302588 1.09141 +v 0.0135293 0.299155 1.09247 +v 0.0135089 0.29573 1.09356 +v 0.013377 0.292127 1.09498 +v 0.0132571 0.288524 1.09644 +v 0.0130592 0.284573 1.09836 +v 0.012866 0.280628 1.10029 +v 0.0126331 0.276268 1.10279 +v 0.0124024 0.271898 1.10531 +v 0.012189 0.267008 1.10847 +v 0.0119816 0.262121 1.11167 +v 0.0118313 0.256711 1.11555 +v 0.0116793 0.251311 1.11943 +v 0.0116165 0.245348 1.12402 +v 0.0115574 0.239409 1.12861 +v 0.0115623 0.232845 1.13381 +v 0.0115718 0.226283 1.13904 +v 0.0115932 0.219041 1.14472 +v 0.0116052 0.211796 1.15041 +v 0.0116606 0.203936 1.1564 +v 0.0117202 0.196062 1.1624 +v 0.0118137 0.187599 1.16869 +v 0.0119085 0.179137 1.17499 +v 0.0120267 0.170006 1.18149 +v 0.0121403 0.160862 1.18797 +v 0.0123053 0.151236 1.19474 +v 0.0124698 0.141603 1.20152 +v 0.0127378 0.131444 1.20878 +v 0.013009 0.121274 1.21606 +v 0.0134628 0.110472 1.22401 +v 0.0139227 0.099677 1.23196 +v 0.0147598 0.0880472 1.24091 +v 0.0156005 0.0764198 1.24986 +v 0.0165535 0.0638728 1.2599 +v 0.0175149 0.0513328 1.26995 +v 0.0188053 0.0365667 1.28138 +v 0.0200891 0.0217694 1.29279 +v 0.0220244 0.00387675 1.30325 +v 0.0238983 -0.0141846 1.31341 +v 0.0244973 -0.035041 1.31829 +v 0.0248382 -0.0563139 1.32199 +v 0.0228122 -0.0789698 1.31415 +v 0.0204974 -0.100754 1.30487 +v 0.0188588 -0.115532 1.28753 +v 0.0172533 -0.130259 1.27031 +v 0.0148002 -0.138717 1.25216 +v 0.012376 -0.1472 1.23412 +v 0.0110914 -0.152319 1.21937 +v 0.00980332 -0.157454 1.20465 +v 0.00914303 -0.162851 1.19266 +v 0.00848679 -0.168327 1.18065 +v 0.00804288 -0.175845 1.17405 +v 0.00761418 -0.183526 1.16755 +v 0.00721638 -0.193676 1.1635 +v 0.0068381 -0.2039 1.1597 +v 0.00673224 -0.214155 1.15718 +v 0.00664961 -0.224318 1.15471 +v 0.00674884 -0.235578 1.15298 +v 0.00682401 -0.246833 1.15158 +v 0.00694194 -0.259012 1.15105 +v 0.00705429 -0.271252 1.15137 +v 0.00717539 -0.28387 1.15536 +v 0.00681326 -0.296123 1.16065 +v 0.00665033 -0.309555 1.16701 +v 0.00649326 -0.324035 1.17053 +v 0.00679421 -0.332278 1.16978 +v 0.00698462 -0.340273 1.16747 +v 0.00707546 -0.348455 1.16344 +v 0.00706812 -0.356102 1.15851 +v 0.00706866 -0.362922 1.15242 +v 0.00701127 -0.369299 1.14586 +v 0.00694337 -0.374314 1.13866 +v 0.0068728 -0.379044 1.13122 +v 0.0068438 -0.383703 1.12385 +v 0.00680693 -0.38867 1.11656 +v 0.00693939 -0.387551 1.10763 +v 0.00708363 -0.386409 1.09872 +v 0.00955814 -0.398277 1.11929 +v 0.0101535 -0.408264 1.12425 +v 0.0108059 -0.417603 1.12967 +v 0.0113913 -0.427802 1.1352 +v 0.0120354 -0.438289 1.14002 +v 0.0126671 -0.450304 1.14312 +v 0.0132864 -0.462639 1.14481 +v 0.0139618 -0.475789 1.14386 +v 0.0143913 -0.487921 1.1381 +v 0.0142494 -0.499435 1.12587 +v 0.0140704 -0.510132 1.11279 +v 0.0139963 -0.522348 1.1018 +v 0.0139142 -0.535705 1.0925 +v 0.0139802 -0.54952 1.08476 +v 0.0140948 -0.564333 1.07936 +v 0.0144063 -0.58097 1.07621 +v 0.0149812 -0.597394 1.08026 +v 0.0159305 -0.613486 1.08473 +v 0.0168757 -0.629737 1.08899 +v 0.0210802 0.430971 1.09972 +v 0.0209322 0.422997 1.09847 +v 0.020784 0.415026 1.09718 +v 0.0207854 0.407703 1.09586 +v 0.0207826 0.400418 1.09447 +v 0.0210008 0.393756 1.09322 +v 0.0212178 0.3871 1.09204 +v 0.0213895 0.380981 1.09089 +v 0.0215743 0.374852 1.08981 +v 0.0219723 0.369139 1.08887 +v 0.0223784 0.363418 1.08799 +v 0.0228209 0.358094 1.08728 +v 0.0232773 0.352767 1.08663 +v 0.0238839 0.34774 1.0861 +v 0.0245041 0.342709 1.08563 +v 0.0251755 0.337929 1.08532 +v 0.0258561 0.333146 1.08506 +v 0.0265085 0.32854 1.08499 +v 0.0271728 0.323932 1.08496 +v 0.0277764 0.319804 1.08516 +v 0.0283932 0.315674 1.0854 +v 0.0287847 0.311867 1.08591 +v 0.0291877 0.308081 1.08641 +v 0.0293454 0.30461 1.08715 +v 0.0295109 0.301142 1.08791 +v 0.0294626 0.297704 1.08897 +v 0.029422 0.29427 1.09005 +v 0.029191 0.290641 1.09155 +v 0.028971 0.287012 1.09307 +v 0.0286008 0.283066 1.0951 +v 0.028234 0.279124 1.09714 +v 0.0277869 0.274798 1.09975 +v 0.0273476 0.270467 1.10238 +v 0.0269265 0.265599 1.10565 +v 0.0265117 0.260743 1.10894 +v 0.0262179 0.25536 1.11293 +v 0.0259318 0.249985 1.11692 +v 0.0257873 0.244021 1.12156 +v 0.0256462 0.238081 1.1262 +v 0.0256383 0.23153 1.13146 +v 0.0256289 0.224979 1.13674 +v 0.0256546 0.21772 1.14243 +v 0.0256793 0.210461 1.14813 +v 0.0257928 0.202595 1.15411 +v 0.0259031 0.194722 1.16009 +v 0.0260723 0.186257 1.16635 +v 0.0262385 0.177789 1.17262 +v 0.0264009 0.168648 1.17905 +v 0.0265626 0.159502 1.18547 +v 0.026813 0.149866 1.19215 +v 0.0270552 0.14023 1.19885 +v 0.0275058 0.130017 1.20597 +v 0.0279634 0.1198 1.21311 +v 0.0287627 0.108956 1.2209 +v 0.0295682 0.0981204 1.2287 +v 0.0310947 0.0864423 1.23738 +v 0.032633 0.0747877 1.24609 +v 0.0344116 0.0623172 1.2559 +v 0.0361996 0.0498658 1.26573 +v 0.0386993 0.0351481 1.27684 +v 0.0411812 0.0204056 1.28791 +v 0.0449687 0.00220807 1.29772 +v 0.0486752 -0.0160593 1.30727 +v 0.049599 -0.0359651 1.31147 +v 0.0501827 -0.0560216 1.31504 +v 0.0464403 -0.0774637 1.30758 +v 0.0422283 -0.0985388 1.2994 +v 0.0393095 -0.112811 1.28285 +v 0.0363758 -0.126973 1.2663 +v 0.0317372 -0.135817 1.2485 +v 0.0270843 -0.14465 1.23087 +v 0.0246245 -0.150166 1.21672 +v 0.0221944 -0.155727 1.20248 +v 0.0209671 -0.16116 1.19093 +v 0.0197483 -0.166686 1.17946 +v 0.0188928 -0.175035 1.17282 +v 0.0180892 -0.183514 1.16633 +v 0.0172633 -0.193739 1.16273 +v 0.0164334 -0.204012 1.15934 +v 0.0160601 -0.214239 1.15703 +v 0.0157103 -0.224367 1.15481 +v 0.0157975 -0.235581 1.15328 +v 0.0158714 -0.246881 1.15204 +v 0.0160503 -0.258973 1.15161 +v 0.0161258 -0.271073 1.15199 +v 0.0163024 -0.283394 1.15573 +v 0.0160845 -0.295365 1.16075 +v 0.015863 -0.308734 1.16677 +v 0.0155712 -0.322932 1.16997 +v 0.0163235 -0.33143 1.16906 +v 0.016863 -0.339603 1.16655 +v 0.017154 -0.347848 1.16243 +v 0.0173054 -0.355649 1.15752 +v 0.0174542 -0.362465 1.15152 +v 0.0175035 -0.36886 1.14507 +v 0.0175248 -0.37398 1.13803 +v 0.0175132 -0.378799 1.1308 +v 0.017446 -0.383551 1.12344 +v 0.0173755 -0.38871 1.11626 +v 0.017674 -0.387386 1.10703 +v 0.0179762 -0.38608 1.09788 +v 0.0233152 -0.39778 1.11893 +v 0.0242869 -0.407845 1.12382 +v 0.0253308 -0.417126 1.12946 +v 0.0264446 -0.427229 1.13514 +v 0.0277277 -0.43772 1.13984 +v 0.0287342 -0.449936 1.14263 +v 0.0296632 -0.462364 1.14407 +v 0.0306722 -0.475416 1.14277 +v 0.0312004 -0.487471 1.13707 +v 0.0307864 -0.499234 1.12512 +v 0.0302745 -0.510068 1.11215 +v 0.030027 -0.522226 1.1012 +v 0.0297875 -0.535435 1.09176 +v 0.0298795 -0.549228 1.08386 +v 0.0300548 -0.564147 1.07846 +v 0.0303658 -0.580766 1.07463 +v 0.0313027 -0.597401 1.07864 +v 0.032951 -0.613356 1.08273 +v 0.0346109 -0.62936 1.08693 +v 0.0342571 0.43014 1.09868 +v 0.034053 0.422129 1.09734 +v 0.0338305 0.414192 1.09583 +v 0.0337828 0.406909 1.09429 +v 0.0337347 0.39964 1.09271 +v 0.0338847 0.392995 1.09122 +v 0.0340459 0.386354 1.08975 +v 0.0342434 0.380249 1.0884 +v 0.0344445 0.374153 1.08707 +v 0.0348934 0.368475 1.08583 +v 0.0353547 0.362786 1.08464 +v 0.0359356 0.357415 1.08358 +v 0.0365192 0.352048 1.08255 +v 0.0372233 0.346873 1.08161 +v 0.0379406 0.341693 1.0807 +v 0.0387578 0.33674 1.07994 +v 0.0395805 0.331789 1.0792 +v 0.0403779 0.327023 1.07863 +v 0.0411932 0.322254 1.07811 +v 0.04185 0.317885 1.07789 +v 0.0425178 0.313513 1.0777 +v 0.0429304 0.309509 1.07788 +v 0.0433453 0.305507 1.07807 +v 0.0434573 0.301774 1.07866 +v 0.0435768 0.298039 1.07927 +v 0.0434354 0.294496 1.08031 +v 0.0432984 0.290949 1.08136 +v 0.0429106 0.28723 1.08295 +v 0.0425342 0.283515 1.08455 +v 0.0419869 0.279535 1.08675 +v 0.0414463 0.275559 1.08896 +v 0.040832 0.271225 1.0918 +v 0.0402198 0.266886 1.09465 +v 0.0396739 0.262014 1.09819 +v 0.0391315 0.25715 1.10173 +v 0.0387554 0.251749 1.10593 +v 0.0383849 0.246353 1.11013 +v 0.0382316 0.240371 1.11495 +v 0.0380818 0.234412 1.11978 +v 0.0380626 0.227839 1.12512 +v 0.0380491 0.221272 1.13048 +v 0.0381233 0.214041 1.13623 +v 0.0382086 0.206798 1.14198 +v 0.0383283 0.1989 1.14795 +v 0.0384447 0.191007 1.15391 +v 0.0386142 0.182509 1.16009 +v 0.0387892 0.174005 1.16626 +v 0.0389429 0.164846 1.17255 +v 0.0390966 0.155686 1.17883 +v 0.039398 0.146068 1.18537 +v 0.0396955 0.136443 1.19192 +v 0.040202 0.126215 1.19883 +v 0.0407197 0.11598 1.20575 +v 0.0416836 0.105155 1.21326 +v 0.0426564 0.0943341 1.22078 +v 0.0444265 0.0827996 1.22912 +v 0.0462176 0.0712672 1.23746 +v 0.0485522 0.0589348 1.24673 +v 0.0508992 0.0466125 1.25602 +v 0.0539981 0.0321475 1.26625 +v 0.057091 0.0176642 1.27644 +v 0.0609191 0.000154565 1.28526 +v 0.0647226 -0.0173688 1.29394 +v 0.065408 -0.0362956 1.29772 +v 0.0658776 -0.0552973 1.30117 +v 0.0618341 -0.0743102 1.29463 +v 0.0573512 -0.0930062 1.28771 +v 0.0540205 -0.104625 1.27615 +v 0.0506564 -0.116135 1.2646 +v 0.0446216 -0.124916 1.25165 +v 0.0386022 -0.13369 1.2388 +v 0.0351373 -0.136985 1.22289 +v 0.031723 -0.140321 1.20701 +v 0.0309653 -0.145541 1.18717 +v 0.0301989 -0.150739 1.1673 +v 0.0286705 -0.16759 1.16548 +v 0.0270504 -0.184271 1.16341 +v 0.0261971 -0.194156 1.16076 +v 0.0253277 -0.204067 1.15825 +v 0.0249405 -0.214207 1.15643 +v 0.0245868 -0.224295 1.15467 +v 0.0246666 -0.235449 1.15346 +v 0.0247402 -0.246644 1.15252 +v 0.0250329 -0.25846 1.15234 +v 0.025251 -0.270277 1.15291 +v 0.0255196 -0.282296 1.15626 +v 0.0255785 -0.293943 1.16088 +v 0.0258313 -0.306753 1.16617 +v 0.0259546 -0.320235 1.16892 +v 0.0266155 -0.329473 1.16803 +v 0.0268492 -0.338389 1.16516 +v 0.0271902 -0.346883 1.16081 +v 0.0273782 -0.354975 1.15577 +v 0.0276591 -0.361964 1.14986 +v 0.0278355 -0.368469 1.14339 +v 0.027997 -0.373633 1.13658 +v 0.0281182 -0.378506 1.12954 +v 0.0281456 -0.383258 1.12249 +v 0.028186 -0.388241 1.11546 +v 0.0279132 -0.387142 1.10624 +v 0.0277413 -0.386009 1.09719 +v 0.0380559 -0.397008 1.11706 +v 0.0390847 -0.406896 1.12203 +v 0.0401747 -0.416154 1.12773 +v 0.0417341 -0.426157 1.13329 +v 0.0432786 -0.436582 1.13803 +v 0.0445792 -0.448654 1.14096 +v 0.0456624 -0.461044 1.14213 +v 0.0469259 -0.474245 1.14094 +v 0.0474019 -0.48657 1.13528 +v 0.0468688 -0.498942 1.12369 +v 0.0462107 -0.510303 1.11117 +v 0.0458917 -0.522435 1.10038 +v 0.0455131 -0.535585 1.09098 +v 0.0454897 -0.549405 1.08278 +v 0.0455398 -0.56435 1.07726 +v 0.0461334 -0.580476 1.07285 +v 0.0475699 -0.596997 1.07577 +v 0.0496636 -0.612806 1.07931 +v 0.0518058 -0.628556 1.0835 +v 0.0473948 0.429232 1.09737 +v 0.0471115 0.421303 1.09581 +v 0.0468057 0.413393 1.0941 +v 0.0467216 0.406146 1.09237 +v 0.0466251 0.398911 1.0906 +v 0.0467042 0.392305 1.08885 +v 0.0467802 0.385713 1.08707 +v 0.0469903 0.379628 1.08545 +v 0.0472109 0.373559 1.08385 +v 0.0477083 0.367907 1.08231 +v 0.048206 0.362254 1.08078 +v 0.048886 0.356853 1.07932 +v 0.0495705 0.351455 1.07785 +v 0.0503501 0.346129 1.07648 +v 0.0511438 0.340795 1.07514 +v 0.05209 0.335664 1.0739 +v 0.0530428 0.330532 1.07267 +v 0.0539938 0.32559 1.07166 +v 0.0549594 0.320643 1.07067 +v 0.0556883 0.315996 1.07009 +v 0.0564184 0.311369 1.06949 +v 0.0568409 0.307139 1.06939 +v 0.0572777 0.302903 1.0693 +v 0.0573544 0.298895 1.06978 +v 0.0574419 0.294888 1.07027 +v 0.0572079 0.291233 1.0713 +v 0.0569888 0.287575 1.07235 +v 0.0564634 0.283768 1.07405 +v 0.0559472 0.279959 1.07575 +v 0.0552357 0.27594 1.07815 +v 0.054531 0.271925 1.08056 +v 0.0537455 0.267575 1.08365 +v 0.0529665 0.263225 1.08675 +v 0.0523072 0.258339 1.09057 +v 0.0516559 0.253457 1.09441 +v 0.0511976 0.248047 1.09882 +v 0.0507458 0.242642 1.10323 +v 0.0505856 0.236656 1.10824 +v 0.0504245 0.230685 1.11325 +v 0.0504083 0.224089 1.11867 +v 0.0503976 0.217496 1.12411 +v 0.0505305 0.210272 1.12991 +v 0.0506611 0.203049 1.13572 +v 0.0507784 0.195125 1.14166 +v 0.0508929 0.187199 1.14761 +v 0.0510633 0.178659 1.15367 +v 0.0512308 0.170113 1.15974 +v 0.051365 0.160934 1.16587 +v 0.0515007 0.151756 1.17201 +v 0.0518371 0.142133 1.1784 +v 0.0521725 0.132509 1.18479 +v 0.0527396 0.12227 1.19147 +v 0.0533114 0.112022 1.19819 +v 0.0544482 0.101221 1.20544 +v 0.0555998 0.0904258 1.21269 +v 0.0576393 0.0790321 1.22069 +v 0.0596905 0.0676468 1.22871 +v 0.0625919 0.0554585 1.23746 +v 0.0654991 0.0432698 1.24621 +v 0.0692036 0.0290598 1.25555 +v 0.0728995 0.014847 1.26489 +v 0.0767911 -0.00193338 1.27272 +v 0.0806384 -0.0187268 1.28049 +v 0.0811306 -0.0366061 1.28391 +v 0.0813308 -0.0545162 1.28702 +v 0.0770561 -0.0710689 1.28164 +v 0.0724003 -0.0873871 1.27594 +v 0.0686798 -0.0963638 1.26942 +v 0.0649424 -0.105286 1.26291 +v 0.0574974 -0.114016 1.25486 +v 0.0500877 -0.122739 1.2469 +v 0.0456379 -0.123768 1.22923 +v 0.0412191 -0.124853 1.21168 +v 0.0409589 -0.129817 1.18343 +v 0.0406393 -0.134732 1.15523 +v 0.0384947 -0.160006 1.15799 +v 0.0359888 -0.184999 1.1604 +v 0.0351025 -0.194521 1.15866 +v 0.0342093 -0.204054 1.15698 +v 0.0338101 -0.214134 1.1556 +v 0.0334642 -0.224229 1.15431 +v 0.0335362 -0.235294 1.15347 +v 0.0336479 -0.246283 1.15291 +v 0.034018 -0.257917 1.15294 +v 0.0343614 -0.269458 1.1537 +v 0.0347904 -0.281152 1.15678 +v 0.035064 -0.292536 1.161 +v 0.0357805 -0.304768 1.16542 +v 0.0363195 -0.317495 1.16771 +v 0.0367887 -0.327486 1.16664 +v 0.03675 -0.337047 1.1635 +v 0.0371591 -0.345805 1.159 +v 0.0374066 -0.354168 1.15387 +v 0.0378598 -0.361303 1.14807 +v 0.0381854 -0.367965 1.14174 +v 0.0384549 -0.373156 1.1351 +v 0.0386761 -0.378076 1.12823 +v 0.0388295 -0.382742 1.1214 +v 0.0389992 -0.387747 1.11472 +v 0.0381541 -0.386888 1.10546 +v 0.0373463 -0.385986 1.09626 +v 0.052852 -0.396109 1.11518 +v 0.0539531 -0.406074 1.12002 +v 0.0551331 -0.41528 1.12569 +v 0.0570562 -0.425239 1.13102 +v 0.0588655 -0.43554 1.1356 +v 0.060381 -0.447471 1.13817 +v 0.0615734 -0.45968 1.13914 +v 0.0629737 -0.472878 1.13776 +v 0.0634611 -0.485244 1.13261 +v 0.0629752 -0.498214 1.12187 +v 0.0621675 -0.510206 1.10992 +v 0.0617058 -0.522478 1.09941 +v 0.0612287 -0.535566 1.08991 +v 0.0610462 -0.549347 1.08132 +v 0.0609893 -0.564432 1.07541 +v 0.0618287 -0.580172 1.07016 +v 0.0636938 -0.596742 1.07226 +v 0.0660701 -0.612526 1.07488 +v 0.0686928 -0.628078 1.07881 +v 0.0603556 0.427596 1.09572 +v 0.0599648 0.419662 1.09388 +v 0.0595642 0.411789 1.09184 +v 0.059414 0.404625 1.08987 +v 0.0592504 0.397513 1.08774 +v 0.0592428 0.391004 1.08568 +v 0.0592258 0.384529 1.08358 +v 0.0594135 0.378569 1.0816 +v 0.0595968 0.372635 1.07956 +v 0.060037 0.367084 1.07756 +v 0.0604834 0.36154 1.07557 +v 0.0610883 0.35617 1.07358 +v 0.0616976 0.350804 1.0716 +v 0.0624923 0.345301 1.06952 +v 0.0632946 0.339797 1.06745 +v 0.0642033 0.334386 1.06549 +v 0.0651099 0.328972 1.06354 +v 0.0659404 0.323758 1.06183 +v 0.0667807 0.318541 1.06014 +v 0.0673054 0.313596 1.05888 +v 0.0678439 0.308651 1.05764 +v 0.0680216 0.304102 1.05709 +v 0.0682165 0.299549 1.05656 +v 0.0680278 0.295277 1.05678 +v 0.0678592 0.291006 1.05701 +v 0.0674026 0.287049 1.058 +v 0.0669681 0.283089 1.05901 +v 0.0662657 0.279117 1.06085 +v 0.0655815 0.275146 1.0627 +v 0.0647312 0.27101 1.06541 +v 0.0639019 0.266873 1.06813 +v 0.0630375 0.262416 1.07166 +v 0.0621886 0.257949 1.0752 +v 0.0615133 0.253012 1.07941 +v 0.0608486 0.248076 1.08363 +v 0.0604299 0.242624 1.08838 +v 0.060035 0.237178 1.09313 +v 0.0599423 0.231177 1.09835 +v 0.0598537 0.225193 1.10357 +v 0.0599305 0.2186 1.10916 +v 0.0600161 0.212004 1.11475 +v 0.0601881 0.204779 1.12059 +v 0.0603543 0.197551 1.12644 +v 0.0604961 0.189641 1.13237 +v 0.060639 0.181727 1.1383 +v 0.0607544 0.173183 1.14429 +v 0.0608668 0.164637 1.15028 +v 0.0609815 0.155502 1.15632 +v 0.0610981 0.146362 1.16236 +v 0.0614123 0.136763 1.16859 +v 0.0617269 0.127158 1.17483 +v 0.0623413 0.117022 1.18138 +v 0.0629675 0.106889 1.18793 +v 0.0642049 0.0962604 1.19498 +v 0.0654683 0.0856323 1.20203 +v 0.0676372 0.0744712 1.20971 +v 0.0698298 0.063318 1.21739 +v 0.07294 0.0513076 1.2255 +v 0.0760628 0.0392819 1.23359 +v 0.0797719 0.0255153 1.24213 +v 0.0834637 0.0117296 1.25063 +v 0.0873445 -0.00431223 1.25782 +v 0.0911481 -0.0204007 1.26495 +v 0.092146 -0.037767 1.26819 +v 0.0925503 -0.0552292 1.27099 +v 0.0881834 -0.070701 1.26726 +v 0.0834692 -0.0859462 1.26326 +v 0.0787104 -0.0944972 1.25905 +v 0.073953 -0.10302 1.25485 +v 0.0666616 -0.107537 1.24647 +v 0.0593646 -0.11202 1.23812 +v 0.0451826 -0.0897021 1.19957 +v 0.0308106 -0.0671463 1.16131 +v 0.0380571 -0.0998724 1.1545 +v 0.0452411 -0.132715 1.14782 +v 0.0448672 -0.158268 1.15257 +v 0.0438976 -0.183499 1.15672 +v 0.0431946 -0.193648 1.15554 +v 0.0425084 -0.203796 1.15439 +v 0.0422634 -0.21391 1.15359 +v 0.0420463 -0.224015 1.15291 +v 0.0421375 -0.235075 1.15264 +v 0.0422834 -0.246082 1.1525 +v 0.042668 -0.257491 1.15289 +v 0.0430592 -0.268871 1.15385 +v 0.0436006 -0.280377 1.15679 +v 0.0441187 -0.291644 1.16069 +v 0.0448825 -0.303448 1.16449 +v 0.0454036 -0.315723 1.16625 +v 0.0460693 -0.326265 1.16481 +v 0.0461695 -0.336261 1.16132 +v 0.0468657 -0.345113 1.15678 +v 0.0473896 -0.353627 1.15161 +v 0.0479855 -0.3609 1.14585 +v 0.0484222 -0.367642 1.13944 +v 0.0487653 -0.372839 1.13284 +v 0.0490346 -0.37774 1.12601 +v 0.0492837 -0.382307 1.1193 +v 0.0495734 -0.387166 1.11256 +v 0.0494922 -0.386481 1.1033 +v 0.0494305 -0.385797 1.09407 +v 0.0663158 -0.395224 1.11089 +v 0.0679685 -0.405066 1.11575 +v 0.0697394 -0.414279 1.12131 +v 0.0719488 -0.423993 1.12663 +v 0.07401 -0.434072 1.13128 +v 0.0758679 -0.445803 1.13395 +v 0.0772164 -0.457854 1.13478 +v 0.0787853 -0.470812 1.13326 +v 0.0792001 -0.483142 1.12856 +v 0.0786309 -0.496889 1.11864 +v 0.0775648 -0.509683 1.10766 +v 0.076979 -0.522355 1.09768 +v 0.0765144 -0.535493 1.08837 +v 0.0765003 -0.549025 1.07978 +v 0.0767301 -0.56379 1.07372 +v 0.0775292 -0.578814 1.06768 +v 0.0793412 -0.595089 1.06786 +v 0.081894 -0.610683 1.0695 +v 0.0849545 -0.626001 1.07255 +v 0.0732828 0.42615 1.0938 +v 0.0727966 0.418202 1.09173 +v 0.072276 0.410335 1.08939 +v 0.0720554 0.403259 1.08708 +v 0.0718132 0.396226 1.08462 +v 0.0717041 0.389828 1.08221 +v 0.0715677 0.383463 1.07973 +v 0.0717088 0.377628 1.07734 +v 0.0718426 0.371808 1.07491 +v 0.0722276 0.366335 1.07249 +v 0.0725957 0.360912 1.07002 +v 0.0731108 0.355566 1.06751 +v 0.0736216 0.350243 1.065 +v 0.0744235 0.344565 1.06222 +v 0.0752318 0.338894 1.05944 +v 0.0760777 0.333217 1.05678 +v 0.07691 0.327557 1.05407 +v 0.0775963 0.322102 1.05165 +v 0.078286 0.316645 1.04925 +v 0.0786223 0.311348 1.04741 +v 0.0789643 0.306052 1.04557 +v 0.0788618 0.301174 1.04455 +v 0.0787743 0.296294 1.04353 +v 0.0783128 0.291747 1.04349 +v 0.077883 0.287194 1.04347 +v 0.0771966 0.282918 1.04445 +v 0.0765492 0.278638 1.04544 +v 0.0756919 0.274491 1.04744 +v 0.0748687 0.270347 1.04946 +v 0.073916 0.26607 1.0525 +v 0.0730108 0.261798 1.05556 +v 0.0720995 0.257213 1.05954 +v 0.071223 0.252634 1.06353 +v 0.0705599 0.24765 1.06815 +v 0.0698995 0.242654 1.07276 +v 0.0695489 0.237159 1.07785 +v 0.0692142 0.23167 1.08294 +v 0.069192 0.225653 1.08839 +v 0.0691813 0.219666 1.09384 +v 0.069365 0.213074 1.09958 +v 0.0695522 0.20648 1.10533 +v 0.0697616 0.199257 1.11123 +v 0.0699627 0.192024 1.11711 +v 0.0701398 0.184125 1.12303 +v 0.0703021 0.176224 1.12895 +v 0.0703619 0.167683 1.13486 +v 0.0704162 0.159133 1.14077 +v 0.0705082 0.150028 1.14671 +v 0.0706 0.140924 1.15264 +v 0.0708873 0.131348 1.15872 +v 0.0711812 0.121768 1.16482 +v 0.0718481 0.111732 1.17121 +v 0.0725315 0.101703 1.17762 +v 0.0738747 0.0912488 1.18447 +v 0.0752481 0.080802 1.19134 +v 0.0775573 0.0698733 1.19869 +v 0.0798998 0.0589585 1.20604 +v 0.0832197 0.0471114 1.21349 +v 0.0865495 0.0352605 1.22093 +v 0.0902516 0.0219207 1.22864 +v 0.0939293 0.0085728 1.23631 +v 0.0978015 -0.00673723 1.24284 +v 0.101495 -0.0221145 1.24928 +v 0.102945 -0.0389422 1.25231 +v 0.103583 -0.0558686 1.25481 +v 0.0992575 -0.0702535 1.25285 +v 0.0945507 -0.0844742 1.25062 +v 0.0887671 -0.0926112 1.24872 +v 0.0829811 -0.100728 1.24681 +v 0.0758307 -0.101044 1.23807 +v 0.0686981 -0.10132 1.2294 +v 0.0445695 -0.0552612 1.17019 +v 0.0203612 -0.0092539 1.1106 +v 0.0351008 -0.0698982 1.12551 +v 0.0498602 -0.130705 1.14043 +v 0.0512167 -0.156485 1.14709 +v 0.0517491 -0.181919 1.153 +v 0.051193 -0.192739 1.15228 +v 0.0506715 -0.203533 1.15163 +v 0.0505729 -0.21367 1.15136 +v 0.0505088 -0.223798 1.15117 +v 0.0506733 -0.234825 1.15134 +v 0.0509007 -0.245802 1.15161 +v 0.0513039 -0.25708 1.15232 +v 0.0517666 -0.26831 1.15351 +v 0.0524352 -0.279694 1.1564 +v 0.0531501 -0.290861 1.16006 +v 0.0539775 -0.302214 1.16321 +v 0.0544355 -0.313913 1.16457 +v 0.0552912 -0.324968 1.16275 +v 0.055549 -0.335405 1.15903 +v 0.0565198 -0.344391 1.15438 +v 0.0573082 -0.353106 1.1492 +v 0.0580411 -0.360482 1.14343 +v 0.0585969 -0.367323 1.137 +v 0.0589945 -0.372473 1.13043 +v 0.0593031 -0.377321 1.12364 +v 0.059686 -0.381734 1.1169 +v 0.0601434 -0.386595 1.11039 +v 0.0608174 -0.386057 1.10115 +v 0.0615316 -0.385516 1.09193 +v 0.0798307 -0.394183 1.10646 +v 0.0818986 -0.404154 1.11125 +v 0.0841857 -0.413403 1.11666 +v 0.0866433 -0.422964 1.12172 +v 0.0888504 -0.432863 1.12613 +v 0.0909143 -0.444295 1.12829 +v 0.0923407 -0.45602 1.12891 +v 0.0939371 -0.468399 1.12719 +v 0.0944425 -0.48041 1.12324 +v 0.0939571 -0.494806 1.11445 +v 0.0926945 -0.508259 1.10424 +v 0.0921298 -0.521271 1.09468 +v 0.091617 -0.534543 1.08553 +v 0.0917235 -0.548083 1.07699 +v 0.0922386 -0.562763 1.07075 +v 0.0931381 -0.577479 1.06458 +v 0.0949676 -0.593429 1.06331 +v 0.0976806 -0.608851 1.06351 +v 0.101073 -0.623993 1.0656 +v 0.0859107 0.423841 1.09145 +v 0.0852834 0.415909 1.08903 +v 0.0846076 0.408079 1.08631 +v 0.0843212 0.40116 1.08361 +v 0.0840103 0.394307 1.08077 +v 0.0837926 0.388141 1.078 +v 0.0835545 0.382028 1.07514 +v 0.0836662 0.376403 1.07226 +v 0.08376 0.370798 1.06935 +v 0.0839665 0.365585 1.06645 +v 0.0841681 0.360377 1.06354 +v 0.0844347 0.355388 1.06068 +v 0.0846988 0.350408 1.05781 +v 0.0853646 0.344381 1.054 +v 0.0860296 0.338354 1.05019 +v 0.0866047 0.332445 1.04648 +v 0.0871895 0.326532 1.04279 +v 0.0874921 0.320709 1.03941 +v 0.0878133 0.314884 1.03603 +v 0.0877568 0.309226 1.03337 +v 0.0877066 0.303565 1.0307 +v 0.0872322 0.298268 1.0292 +v 0.0867954 0.292959 1.02773 +v 0.0860289 0.288066 1.0275 +v 0.0853216 0.283157 1.02731 +v 0.0844255 0.278586 1.02828 +v 0.0836118 0.273999 1.0293 +v 0.0825952 0.269639 1.03156 +v 0.0816546 0.265283 1.03385 +v 0.0805339 0.260869 1.0374 +v 0.0794933 0.256455 1.04099 +v 0.078466 0.251723 1.04555 +v 0.0775091 0.246998 1.05013 +v 0.0768263 0.24188 1.05525 +v 0.0761753 0.236764 1.06039 +v 0.0758887 0.231168 1.06583 +v 0.0756327 0.225576 1.07128 +v 0.0757167 0.219497 1.07696 +v 0.0758305 0.213452 1.08267 +v 0.0761528 0.206851 1.08854 +v 0.0764843 0.200246 1.09441 +v 0.0768744 0.193077 1.10038 +v 0.077248 0.185899 1.10635 +v 0.0775421 0.178077 1.11233 +v 0.0778147 0.170246 1.11829 +v 0.0779606 0.161786 1.1242 +v 0.0780969 0.153316 1.1301 +v 0.0782249 0.144293 1.13598 +v 0.0783468 0.135266 1.14185 +v 0.0786366 0.125798 1.14783 +v 0.0789423 0.116332 1.15383 +v 0.0796406 0.106441 1.16011 +v 0.0803693 0.0965611 1.1664 +v 0.0816642 0.0862977 1.17315 +v 0.0829992 0.0760367 1.1799 +v 0.0852429 0.0652545 1.18667 +v 0.0875461 0.0544818 1.19345 +v 0.0906023 0.0428489 1.2003 +v 0.0936673 0.0311977 1.20712 +v 0.0973475 0.0182431 1.2141 +v 0.101022 0.00527984 1.22109 +v 0.10514 -0.00918796 1.22715 +v 0.109095 -0.0237391 1.23317 +v 0.111063 -0.0393991 1.23667 +v 0.112231 -0.0552045 1.23974 +v 0.108654 -0.0698632 1.23928 +v 0.104775 -0.0843968 1.23858 +v 0.099336 -0.0932277 1.23593 +v 0.0939029 -0.102041 1.23326 +v 0.0869644 -0.101214 1.22553 +v 0.0800361 -0.100372 1.21782 +v 0.0629698 -0.0570254 1.16305 +v 0.045737 -0.0134602 1.10812 +v 0.052625 -0.0714365 1.12172 +v 0.0594637 -0.129305 1.13548 +v 0.0593811 -0.155189 1.14196 +v 0.0586915 -0.180905 1.14799 +v 0.0584797 -0.192081 1.14765 +v 0.0583269 -0.203281 1.14738 +v 0.0585214 -0.213528 1.14759 +v 0.0587523 -0.223746 1.14788 +v 0.0589995 -0.234743 1.14862 +v 0.0593194 -0.245669 1.14939 +v 0.0596024 -0.256971 1.15057 +v 0.0599643 -0.268214 1.15204 +v 0.0604525 -0.279575 1.15498 +v 0.0610345 -0.290845 1.15856 +v 0.0614685 -0.302384 1.16161 +v 0.0614032 -0.314232 1.16277 +v 0.0631523 -0.325115 1.16039 +v 0.0643095 -0.335358 1.15632 +v 0.0656305 -0.344417 1.15159 +v 0.0667327 -0.353155 1.14623 +v 0.0676887 -0.360491 1.14047 +v 0.0684073 -0.367237 1.13397 +v 0.0688755 -0.372338 1.12734 +v 0.0692963 -0.37713 1.12043 +v 0.0697603 -0.381595 1.11353 +v 0.0702194 -0.386343 1.10678 +v 0.0712479 -0.386108 1.09776 +v 0.0724339 -0.385929 1.08835 +v 0.0923072 -0.394153 1.10023 +v 0.0949583 -0.403776 1.10492 +v 0.0978448 -0.412819 1.1099 +v 0.100514 -0.422055 1.1147 +v 0.102991 -0.431598 1.11897 +v 0.105026 -0.442647 1.12093 +v 0.106418 -0.453964 1.12178 +v 0.107793 -0.465829 1.12035 +v 0.108128 -0.477514 1.11723 +v 0.107978 -0.492452 1.10908 +v 0.106984 -0.506452 1.09968 +v 0.106598 -0.519872 1.0907 +v 0.106305 -0.533471 1.08206 +v 0.106554 -0.547179 1.07394 +v 0.107306 -0.561822 1.06778 +v 0.108448 -0.576229 1.06165 +v 0.110393 -0.591558 1.05888 +v 0.113013 -0.606622 1.05751 +v 0.116511 -0.621459 1.05847 +v 0.0984702 0.421669 1.08863 +v 0.0976996 0.413764 1.08586 +v 0.0968555 0.405991 1.08277 +v 0.0964944 0.399235 1.07974 +v 0.096079 0.392571 1.07651 +v 0.0957594 0.386642 1.07339 +v 0.0954018 0.380775 1.07014 +v 0.0954838 0.375337 1.06685 +v 0.0955295 0.369982 1.06345 +v 0.0955793 0.364955 1.06016 +v 0.0956029 0.35997 1.0568 +v 0.0956412 0.355302 1.05364 +v 0.0956662 0.350657 1.05045 +v 0.0961752 0.344267 1.04562 +v 0.0966617 0.337908 1.04076 +v 0.0969568 0.331771 1.03603 +v 0.097231 0.325641 1.0313 +v 0.0971055 0.319457 1.02692 +v 0.0969628 0.313282 1.02253 +v 0.0964288 0.307269 1.01901 +v 0.095924 0.301238 1.0155 +v 0.0950867 0.295518 1.01356 +v 0.0943099 0.289774 1.01165 +v 0.09331 0.284497 1.0113 +v 0.0923897 0.27921 1.01099 +v 0.0913395 0.274332 1.01201 +v 0.0904021 0.269439 1.01308 +v 0.0892307 0.264852 1.01559 +v 0.0881649 0.260261 1.01814 +v 0.0868878 0.255681 1.0222 +v 0.0857288 0.251105 1.02631 +v 0.0846259 0.246221 1.03147 +v 0.0836235 0.241363 1.03667 +v 0.0829085 0.236096 1.04231 +v 0.0822599 0.230837 1.04797 +v 0.0820831 0.225142 1.05377 +v 0.0819256 0.219458 1.05957 +v 0.0821337 0.213311 1.06549 +v 0.0823854 0.207208 1.07143 +v 0.0828579 0.200604 1.07747 +v 0.0833361 0.193996 1.08349 +v 0.0839125 0.186879 1.08956 +v 0.0844606 0.179753 1.09562 +v 0.084887 0.172007 1.10162 +v 0.085268 0.164246 1.1076 +v 0.085514 0.15587 1.1135 +v 0.0857288 0.147477 1.11938 +v 0.0858951 0.138539 1.12522 +v 0.0860418 0.129592 1.13104 +v 0.0863345 0.120228 1.13692 +v 0.0866551 0.110872 1.14282 +v 0.08736 0.101122 1.14897 +v 0.088118 0.0913903 1.15515 +v 0.0893571 0.0813008 1.16178 +v 0.0906521 0.0712199 1.16843 +v 0.0928242 0.0605681 1.17462 +v 0.0950317 0.0499326 1.18082 +v 0.0977932 0.0384903 1.18704 +v 0.100574 0.0270458 1.19323 +v 0.104149 0.0144571 1.19951 +v 0.107801 0.00187308 1.20576 +v 0.112165 -0.0117527 1.21139 +v 0.1164 -0.0254312 1.21697 +v 0.118894 -0.0399005 1.22091 +v 0.120736 -0.054522 1.22457 +v 0.118046 -0.0694577 1.22567 +v 0.115006 -0.0843077 1.22655 +v 0.109917 -0.0938439 1.22315 +v 0.104818 -0.103343 1.2197 +v 0.098093 -0.101359 1.21298 +v 0.0913646 -0.0993472 1.20625 +v 0.0812947 -0.0583299 1.1561 +v 0.0710767 -0.0173086 1.10578 +v 0.0699994 -0.0726359 1.11812 +v 0.0690314 -0.127882 1.13061 +v 0.0674983 -0.153946 1.13691 +v 0.0655826 -0.179867 1.14289 +v 0.065691 -0.191418 1.1429 +v 0.0658888 -0.202962 1.14293 +v 0.0663453 -0.213358 1.1436 +v 0.0668384 -0.223692 1.14433 +v 0.067167 -0.234671 1.14554 +v 0.067578 -0.245567 1.14673 +v 0.0677789 -0.2569 1.14828 +v 0.0680432 -0.268216 1.15 +v 0.0683706 -0.279671 1.15309 +v 0.0688332 -0.290975 1.15664 +v 0.0688715 -0.302622 1.15959 +v 0.0683343 -0.314573 1.16075 +v 0.0709597 -0.325184 1.15785 +v 0.0730751 -0.335291 1.15354 +v 0.0747121 -0.344351 1.14857 +v 0.0761452 -0.353117 1.1431 +v 0.0773169 -0.360374 1.13733 +v 0.0782164 -0.367022 1.1308 +v 0.0788072 -0.372125 1.12409 +v 0.0793083 -0.376886 1.11713 +v 0.0798016 -0.381316 1.11009 +v 0.0803455 -0.386061 1.10309 +v 0.0817457 -0.386176 1.09427 +v 0.0831909 -0.386337 1.08538 +v 0.104609 -0.394123 1.09422 +v 0.107844 -0.403491 1.09831 +v 0.111298 -0.412418 1.10283 +v 0.114156 -0.421358 1.10723 +v 0.116722 -0.430566 1.11112 +v 0.118572 -0.44113 1.1127 +v 0.119895 -0.45184 1.11353 +v 0.121027 -0.463058 1.11245 +v 0.121323 -0.474208 1.11022 +v 0.121619 -0.489486 1.1029 +v 0.120845 -0.503931 1.0941 +v 0.120636 -0.517702 1.08555 +v 0.120543 -0.531642 1.07724 +v 0.121001 -0.545546 1.06954 +v 0.121945 -0.560287 1.06324 +v 0.123465 -0.574732 1.05744 +v 0.125636 -0.589721 1.05394 +v 0.128271 -0.604343 1.05134 +v 0.131786 -0.618922 1.05108 +v 0.110527 0.41788 1.08491 +v 0.109575 0.410317 1.08179 +v 0.108581 0.402888 1.07834 +v 0.108043 0.396568 1.07498 +v 0.107462 0.390334 1.07145 +v 0.107168 0.384806 1.06794 +v 0.106851 0.379332 1.06437 +v 0.106884 0.374295 1.06067 +v 0.106894 0.369298 1.05693 +v 0.10687 0.364497 1.05313 +v 0.106827 0.359729 1.0493 +v 0.106701 0.354865 1.04527 +v 0.106574 0.350017 1.04123 +v 0.106643 0.343992 1.03585 +v 0.106688 0.337985 1.03047 +v 0.106527 0.331698 1.02479 +v 0.106349 0.325419 1.01911 +v 0.105938 0.318665 1.01341 +v 0.105538 0.311887 1.00773 +v 0.105035 0.304995 1.00313 +v 0.104609 0.298049 0.998572 +v 0.103607 0.291879 0.996583 +v 0.102676 0.285689 0.994636 +v 0.101532 0.280166 0.994445 +v 0.10047 0.274625 0.9943 +v 0.0994844 0.269548 0.995244 +v 0.0985746 0.264458 0.996222 +v 0.0973519 0.259785 0.998695 +v 0.0962108 0.255133 1.00121 +v 0.0942974 0.250669 1.00612 +v 0.0925274 0.246218 1.01108 +v 0.0909506 0.241248 1.01715 +v 0.089531 0.236317 1.02327 +v 0.0887168 0.230913 1.02943 +v 0.0879997 0.225535 1.03562 +v 0.0878383 0.219701 1.04176 +v 0.0877442 0.213888 1.04791 +v 0.0880664 0.20766 1.05404 +v 0.0884292 0.201466 1.06019 +v 0.0890502 0.194857 1.06635 +v 0.0896613 0.188248 1.07251 +v 0.0903701 0.181167 1.07869 +v 0.0910653 0.174063 1.08485 +v 0.0916701 0.166406 1.0909 +v 0.0922073 0.158724 1.09692 +v 0.0926148 0.150456 1.10284 +v 0.0929451 0.142142 1.10872 +v 0.0931567 0.133295 1.11448 +v 0.0933219 0.124429 1.12022 +v 0.0935694 0.115159 1.12595 +v 0.0938435 0.105893 1.1317 +v 0.0944461 0.0962541 1.13764 +v 0.0951058 0.0866331 1.1436 +v 0.0961928 0.0766545 1.14985 +v 0.0973544 0.0666869 1.15612 +v 0.0991512 0.0561796 1.16192 +v 0.100978 0.045681 1.16773 +v 0.103317 0.0344 1.17345 +v 0.105767 0.0231384 1.1792 +v 0.109146 0.0109533 1.18499 +v 0.112823 -0.00114543 1.19082 +v 0.117467 -0.0140556 1.1964 +v 0.122018 -0.0270093 1.20195 +v 0.125117 -0.0408432 1.20639 +v 0.127624 -0.054847 1.21059 +v 0.126266 -0.0692382 1.21237 +v 0.124569 -0.0836255 1.21391 +v 0.119152 -0.0947602 1.20928 +v 0.113721 -0.105863 1.20459 +v 0.10653 -0.104457 1.19374 +v 0.0994726 -0.10284 1.18304 +v 0.0919731 -0.0812359 1.14858 +v 0.0848203 -0.0593791 1.11459 +v 0.0810628 -0.0953192 1.12116 +v 0.0774761 -0.131246 1.12769 +v 0.0751711 -0.155632 1.13193 +v 0.0726713 -0.179906 1.13601 +v 0.0729469 -0.191312 1.13636 +v 0.0733658 -0.202714 1.13682 +v 0.0740557 -0.213238 1.13808 +v 0.07478 -0.223758 1.13939 +v 0.0751823 -0.234778 1.14116 +v 0.0756368 -0.245715 1.14287 +v 0.075908 -0.257185 1.14482 +v 0.0762178 -0.268632 1.14686 +v 0.0764367 -0.28029 1.1501 +v 0.0768074 -0.291801 1.15373 +v 0.0766285 -0.30372 1.15676 +v 0.075775 -0.315906 1.15801 +v 0.0789952 -0.326221 1.15474 +v 0.0816855 -0.336057 1.1502 +v 0.0836365 -0.344993 1.14515 +v 0.0853359 -0.353558 1.13955 +v 0.0867309 -0.360706 1.13361 +v 0.0878397 -0.367275 1.12692 +v 0.0885301 -0.372301 1.11997 +v 0.0891307 -0.377039 1.11284 +v 0.0895865 -0.381511 1.10566 +v 0.0901053 -0.386328 1.09858 +v 0.091376 -0.38681 1.09005 +v 0.0929396 -0.387374 1.08111 +v 0.118229 -0.395525 1.08593 +v 0.121144 -0.404194 1.08971 +v 0.12427 -0.412592 1.09388 +v 0.126897 -0.421092 1.09793 +v 0.129277 -0.429834 1.10162 +v 0.131043 -0.439674 1.10329 +v 0.132276 -0.449659 1.10444 +v 0.133335 -0.460105 1.10394 +v 0.13375 -0.470592 1.10252 +v 0.134461 -0.48628 1.0957 +v 0.134105 -0.501228 1.08762 +v 0.134161 -0.51541 1.07958 +v 0.134386 -0.529809 1.07185 +v 0.135127 -0.543995 1.06463 +v 0.136311 -0.55871 1.05855 +v 0.137901 -0.573082 1.05295 +v 0.140059 -0.587854 1.04883 +v 0.142843 -0.602433 1.04545 +v 0.146436 -0.617117 1.04402 +v 0.122491 0.414325 1.08069 +v 0.121382 0.407118 1.07721 +v 0.120198 0.400069 1.0734 +v 0.119498 0.394148 1.06978 +v 0.118737 0.388324 1.06599 +v 0.118492 0.383153 1.06222 +v 0.118201 0.378089 1.05833 +v 0.1182 0.373386 1.05432 +v 0.11817 0.368736 1.05026 +v 0.1181 0.364139 1.04597 +v 0.118003 0.359614 1.04165 +v 0.117703 0.354564 1.03673 +v 0.117378 0.349544 1.03179 +v 0.117017 0.343826 1.02594 +v 0.116638 0.338136 1.02007 +v 0.116035 0.331687 1.01346 +v 0.115418 0.325239 1.00684 +v 0.11478 0.317856 0.999898 +v 0.114193 0.310426 0.992983 +v 0.113828 0.302598 0.987357 +v 0.113541 0.294725 0.981784 +v 0.112433 0.288124 0.979777 +v 0.111387 0.281496 0.977814 +v 0.110171 0.275731 0.977811 +v 0.109003 0.269953 0.977841 +v 0.108133 0.26472 0.978715 +v 0.107312 0.259479 0.979618 +v 0.106098 0.254785 0.982077 +v 0.104932 0.250089 0.984567 +v 0.102367 0.245734 0.990303 +v 0.0998828 0.241401 0.996079 +v 0.09775 0.236349 1.00301 +v 0.0957543 0.231328 1.01 +v 0.0947822 0.225785 1.01665 +v 0.0939554 0.220284 1.02335 +v 0.0938311 0.214327 1.02981 +v 0.0937692 0.208397 1.03631 +v 0.0941555 0.202074 1.04263 +v 0.0945951 0.195779 1.04899 +v 0.0953252 0.18915 1.05528 +v 0.0960649 0.182534 1.06156 +v 0.0969208 0.175475 1.06782 +v 0.0977441 0.168398 1.07407 +v 0.0985065 0.160833 1.08019 +v 0.099204 0.153232 1.08628 +v 0.0997588 0.145064 1.09221 +v 0.10021 0.136852 1.0981 +v 0.100462 0.128099 1.1038 +v 0.100646 0.119314 1.10946 +v 0.100848 0.110127 1.11504 +v 0.101065 0.100957 1.12063 +v 0.101548 0.0914219 1.12634 +v 0.102086 0.0819024 1.13208 +v 0.103 0.0720202 1.13794 +v 0.103961 0.0621511 1.14381 +v 0.105372 0.0517744 1.14922 +v 0.106751 0.0413974 1.15463 +v 0.108665 0.0302568 1.15986 +v 0.110749 0.0191553 1.16513 +v 0.114004 0.00739213 1.17045 +v 0.117726 -0.00421809 1.17585 +v 0.122642 -0.0164086 1.18138 +v 0.127528 -0.0286071 1.18691 +v 0.131262 -0.0417929 1.19186 +v 0.134472 -0.0551389 1.19661 +v 0.134448 -0.0690104 1.19901 +v 0.134096 -0.0829129 1.20124 +v 0.128394 -0.0956764 1.19539 +v 0.122594 -0.108337 1.18948 +v 0.115033 -0.107389 1.17459 +v 0.107477 -0.106468 1.15975 +v 0.102921 -0.103997 1.14176 +v 0.0984619 -0.101571 1.12368 +v 0.092183 -0.118025 1.1242 +v 0.0859009 -0.134597 1.12477 +v 0.0828661 -0.157328 1.12699 +v 0.0797145 -0.17993 1.12909 +v 0.0801617 -0.191227 1.12977 +v 0.0807891 -0.202491 1.13066 +v 0.0816979 -0.213153 1.13246 +v 0.0826407 -0.223805 1.13432 +v 0.0830934 -0.234945 1.1366 +v 0.083585 -0.245965 1.13879 +v 0.0839253 -0.257541 1.1411 +v 0.0842685 -0.269119 1.14339 +v 0.0844221 -0.280956 1.14684 +v 0.0846861 -0.292702 1.15055 +v 0.0842737 -0.304879 1.15361 +v 0.0831359 -0.317248 1.15503 +v 0.0869485 -0.327195 1.15148 +v 0.090262 -0.33677 1.14681 +v 0.0925104 -0.345512 1.14163 +v 0.0944782 -0.353882 1.13586 +v 0.0960847 -0.3609 1.12973 +v 0.0974033 -0.367362 1.12291 +v 0.0982315 -0.372426 1.1158 +v 0.0989611 -0.37719 1.10848 +v 0.0994313 -0.381707 1.10107 +v 0.100006 -0.386669 1.09378 +v 0.101056 -0.387474 1.08576 +v 0.102103 -0.388435 1.07748 +v 0.131923 -0.396663 1.07729 +v 0.134436 -0.404966 1.08094 +v 0.137136 -0.41292 1.08464 +v 0.139523 -0.420964 1.08834 +v 0.141732 -0.429193 1.09171 +v 0.143192 -0.438321 1.09349 +v 0.144368 -0.447506 1.09489 +v 0.14533 -0.457175 1.09485 +v 0.145718 -0.466839 1.09418 +v 0.146888 -0.482778 1.08793 +v 0.146957 -0.498104 1.08035 +v 0.147325 -0.512739 1.07287 +v 0.147779 -0.527496 1.06554 +v 0.14879 -0.541878 1.0587 +v 0.150148 -0.556703 1.05264 +v 0.151904 -0.571133 1.04716 +v 0.154117 -0.585839 1.04279 +v 0.157187 -0.600488 1.03898 +v 0.160915 -0.615251 1.03661 +v 0.133641 0.408897 1.07466 +v 0.13211 0.402597 1.07107 +v 0.130536 0.396398 1.06731 +v 0.130003 0.391248 1.06359 +v 0.129426 0.386183 1.05979 +v 0.129224 0.381579 1.05585 +v 0.129 0.377031 1.05183 +v 0.129009 0.372731 1.04757 +v 0.128993 0.368505 1.04327 +v 0.129107 0.364626 1.03896 +v 0.1292 0.360772 1.03463 +v 0.128493 0.355402 1.02882 +v 0.12777 0.350029 1.02304 +v 0.127197 0.34441 1.01678 +v 0.126619 0.338801 1.01053 +v 0.126061 0.33192 1.00316 +v 0.125528 0.324987 0.995826 +v 0.125282 0.316497 0.987922 +v 0.125097 0.307933 0.980095 +v 0.12626 0.297897 0.97246 +v 0.127466 0.287864 0.964865 +v 0.125658 0.281584 0.963748 +v 0.123891 0.275294 0.962655 +v 0.122597 0.269976 0.963014 +v 0.121332 0.264644 0.963382 +v 0.120739 0.259694 0.964282 +v 0.120153 0.254742 0.965186 +v 0.119968 0.250201 0.966426 +v 0.119793 0.245662 0.96767 +v 0.114321 0.241763 0.975268 +v 0.108947 0.23789 0.982942 +v 0.105915 0.232659 0.990493 +v 0.103002 0.22748 0.998094 +v 0.101728 0.221735 1.00496 +v 0.100564 0.216027 1.01188 +v 0.100335 0.209872 1.01857 +v 0.100192 0.203756 1.02528 +v 0.100714 0.197229 1.03168 +v 0.101288 0.190741 1.03811 +v 0.102041 0.184163 1.04453 +v 0.102794 0.177587 1.05096 +v 0.103718 0.170535 1.05733 +v 0.104585 0.163452 1.06367 +v 0.105459 0.155932 1.06983 +v 0.106245 0.148349 1.07594 +v 0.106873 0.140263 1.08185 +v 0.10739 0.132127 1.08771 +v 0.107698 0.123465 1.09335 +v 0.107913 0.11477 1.09895 +v 0.108078 0.105668 1.10437 +v 0.108235 0.096569 1.10979 +v 0.10853 0.0871225 1.11523 +v 0.10889 0.0776787 1.12069 +v 0.109564 0.0678963 1.12614 +v 0.110205 0.0580978 1.13157 +v 0.111158 0.0478476 1.13661 +v 0.112114 0.0376017 1.14163 +v 0.11352 0.0267949 1.14643 +v 0.11518 0.016054 1.1513 +v 0.118312 0.00457749 1.15646 +v 0.122074 -0.00669555 1.16173 +v 0.127217 -0.0180395 1.16718 +v 0.132373 -0.029392 1.17263 +v 0.13672 -0.0418533 1.17775 +v 0.140613 -0.0545139 1.18271 +v 0.141507 -0.0685317 1.1858 +v 0.141968 -0.0826207 1.18867 +v 0.137586 -0.0964134 1.18308 +v 0.133065 -0.110097 1.17745 +v 0.124831 -0.11331 1.16464 +v 0.116613 -0.116545 1.15186 +v 0.112098 -0.117438 1.13876 +v 0.107687 -0.118359 1.12558 +v 0.102781 -0.130867 1.12266 +v 0.0978634 -0.14346 1.11976 +v 0.092608 -0.16169 1.12005 +v 0.0873949 -0.179868 1.12047 +v 0.087881 -0.191155 1.12192 +v 0.0885671 -0.202393 1.12358 +v 0.0897028 -0.213057 1.12593 +v 0.0908475 -0.223758 1.12829 +v 0.0915636 -0.235015 1.13102 +v 0.0922775 -0.246223 1.13365 +v 0.0925826 -0.258098 1.13643 +v 0.0928821 -0.26997 1.13917 +v 0.0930111 -0.28216 1.14275 +v 0.093263 -0.294296 1.14654 +v 0.092682 -0.306793 1.14974 +v 0.0912833 -0.319497 1.1513 +v 0.0953338 -0.329016 1.14759 +v 0.0988652 -0.33818 1.14284 +v 0.101442 -0.346617 1.13755 +v 0.10371 -0.354643 1.13163 +v 0.105419 -0.361509 1.12533 +v 0.106841 -0.36783 1.11838 +v 0.107625 -0.372935 1.11107 +v 0.108336 -0.377779 1.10357 +v 0.108659 -0.3825 1.0961 +v 0.108981 -0.387591 1.08872 +v 0.110868 -0.388649 1.08054 +v 0.112835 -0.389608 1.07225 +v 0.144836 -0.397939 1.06362 +v 0.147309 -0.405637 1.06893 +v 0.149662 -0.413297 1.07385 +v 0.151525 -0.421002 1.07772 +v 0.153155 -0.428857 1.0814 +v 0.154352 -0.437143 1.08342 +v 0.155335 -0.445546 1.08517 +v 0.156133 -0.454192 1.08562 +v 0.156382 -0.462905 1.08566 +v 0.158228 -0.479373 1.07961 +v 0.159079 -0.495405 1.0724 +v 0.159842 -0.51041 1.0655 +v 0.160606 -0.525376 1.05875 +v 0.161989 -0.540055 1.05232 +v 0.163672 -0.554943 1.04645 +v 0.165579 -0.569322 1.041 +v 0.16791 -0.583935 1.03638 +v 0.170962 -0.598372 1.0322 +v 0.174584 -0.61294 1.02918 +v 0.144606 0.403866 1.0679 +v 0.142702 0.398397 1.06442 +v 0.140765 0.392995 1.06087 +v 0.14041 0.388567 1.05714 +v 0.140037 0.38424 1.05333 +v 0.139908 0.380141 1.04926 +v 0.139745 0.376105 1.04513 +v 0.139775 0.372196 1.04072 +v 0.13977 0.368389 1.03624 +v 0.140084 0.365151 1.03195 +v 0.140429 0.361961 1.02762 +v 0.139341 0.356206 1.02099 +v 0.138215 0.350464 1.01436 +v 0.13745 0.344913 1.00772 +v 0.13669 0.339348 1.00111 +v 0.136276 0.331934 0.993027 +v 0.135866 0.324457 0.985018 +v 0.135997 0.314888 0.976182 +v 0.136134 0.305247 0.967421 +v 0.138802 0.293085 0.957741 +v 0.141548 0.280816 0.948173 +v 0.139038 0.274916 0.947906 +v 0.136544 0.269008 0.947673 +v 0.135197 0.26414 0.948385 +v 0.133862 0.259265 0.949108 +v 0.133547 0.254633 0.950027 +v 0.133238 0.249999 0.950958 +v 0.134086 0.245644 0.950989 +v 0.134933 0.24128 0.951026 +v 0.126542 0.237851 0.960449 +v 0.118216 0.234426 0.969936 +v 0.114334 0.229051 0.9781 +v 0.110531 0.223705 0.986327 +v 0.108988 0.217791 0.993438 +v 0.107512 0.211905 1.00058 +v 0.10717 0.20556 1.00747 +v 0.106904 0.199252 1.0144 +v 0.107561 0.192523 1.02085 +v 0.10825 0.18583 1.02735 +v 0.108987 0.179284 1.0339 +v 0.109731 0.172746 1.04045 +v 0.110667 0.165665 1.04689 +v 0.111572 0.158581 1.05333 +v 0.112533 0.151093 1.05952 +v 0.113379 0.143519 1.06563 +v 0.114068 0.13551 1.07155 +v 0.114631 0.127442 1.07738 +v 0.115029 0.118872 1.08297 +v 0.115252 0.110237 1.08846 +v 0.115419 0.101245 1.09376 +v 0.115504 0.0922258 1.09902 +v 0.115676 0.0828706 1.10421 +v 0.115836 0.0735015 1.10937 +v 0.116223 0.063792 1.11439 +v 0.116558 0.0540715 1.11939 +v 0.117097 0.0439457 1.12405 +v 0.117577 0.0338136 1.12869 +v 0.118521 0.023358 1.13305 +v 0.119783 0.0129776 1.13752 +v 0.122778 0.00181015 1.1425 +v 0.126522 -0.00909684 1.14764 +v 0.131826 -0.0196633 1.153 +v 0.137193 -0.0301831 1.15835 +v 0.142119 -0.0419398 1.16362 +v 0.146617 -0.0539201 1.16875 +v 0.148488 -0.0680618 1.17254 +v 0.149777 -0.0823199 1.17609 +v 0.146788 -0.0971494 1.17081 +v 0.143545 -0.111898 1.16542 +v 0.134659 -0.119236 1.15471 +v 0.125805 -0.126585 1.14405 +v 0.121291 -0.130837 1.13578 +v 0.116902 -0.135093 1.12749 +v 0.113345 -0.143702 1.12111 +v 0.109902 -0.152328 1.11463 +v 0.102377 -0.166025 1.11311 +v 0.0950481 -0.179804 1.11179 +v 0.0956057 -0.191081 1.11404 +v 0.0963451 -0.202288 1.1165 +v 0.0976858 -0.213008 1.11936 +v 0.0990233 -0.223743 1.12222 +v 0.0999847 -0.235109 1.12536 +v 0.100943 -0.24643 1.1284 +v 0.101184 -0.258679 1.13165 +v 0.101426 -0.270857 1.13481 +v 0.101545 -0.283428 1.13852 +v 0.101762 -0.295953 1.1424 +v 0.101 -0.308718 1.14574 +v 0.0993821 -0.321714 1.14748 +v 0.103631 -0.330789 1.14356 +v 0.107384 -0.339534 1.13865 +v 0.110279 -0.347638 1.13326 +v 0.112857 -0.355316 1.1272 +v 0.11468 -0.362042 1.12081 +v 0.116217 -0.368237 1.1138 +v 0.116938 -0.373457 1.10633 +v 0.117549 -0.378392 1.09862 +v 0.117748 -0.383342 1.09117 +v 0.117903 -0.388578 1.08371 +v 0.120681 -0.389766 1.07535 +v 0.123648 -0.390881 1.0668 +v 0.1578 -0.399234 1.04967 +v 0.160143 -0.406391 1.05653 +v 0.162138 -0.413775 1.0628 +v 0.163367 -0.421151 1.06694 +v 0.164418 -0.428617 1.07096 +v 0.165376 -0.436084 1.07319 +v 0.166182 -0.443614 1.07526 +v 0.166726 -0.451256 1.07617 +v 0.166839 -0.45895 1.07679 +v 0.169363 -0.475875 1.0709 +v 0.170872 -0.492507 1.06396 +v 0.171986 -0.507786 1.05761 +v 0.173063 -0.523001 1.05128 +v 0.174838 -0.537911 1.04521 +v 0.176731 -0.552811 1.03935 +v 0.178882 -0.567295 1.03405 +v 0.181273 -0.581873 1.02912 +v 0.184404 -0.596123 1.02476 +v 0.187956 -0.610496 1.02121 +v 0.157319 0.40475 1.06429 +v 0.153378 0.397001 1.05903 +v 0.149378 0.389423 1.05358 +v 0.149785 0.385831 1.04983 +v 0.150167 0.382313 1.04602 +v 0.150131 0.378923 1.04227 +v 0.150079 0.3756 1.03849 +v 0.150153 0.371831 1.03371 +v 0.150247 0.368105 1.0289 +v 0.150588 0.364797 1.02428 +v 0.150898 0.361478 1.01967 +v 0.149912 0.356049 1.01322 +v 0.148905 0.350609 1.00678 +v 0.148087 0.344611 0.999991 +v 0.147295 0.338552 0.993242 +v 0.146741 0.330652 0.985354 +v 0.146178 0.322725 0.977535 +v 0.146081 0.312322 0.968458 +v 0.145992 0.301882 0.959421 +v 0.147434 0.289902 0.951767 +v 0.148935 0.277859 0.944209 +v 0.149298 0.270225 0.94188 +v 0.149669 0.262597 0.939553 +v 0.147696 0.258423 0.940373 +v 0.145729 0.25424 0.941213 +v 0.145778 0.250533 0.940835 +v 0.145832 0.246825 0.940468 +v 0.144211 0.243207 0.943006 +v 0.142581 0.23957 0.945546 +v 0.135728 0.23548 0.953191 +v 0.128885 0.231416 0.960861 +v 0.1245 0.22585 0.967993 +v 0.120165 0.220302 0.975169 +v 0.118213 0.214151 0.982104 +v 0.116336 0.208058 0.989082 +v 0.115919 0.201502 0.995984 +v 0.115563 0.194978 1.00291 +v 0.115579 0.188621 1.00993 +v 0.115631 0.1823 1.017 +v 0.116366 0.175519 1.02371 +v 0.117095 0.168732 1.03042 +v 0.117995 0.161562 1.03691 +v 0.118874 0.154392 1.04341 +v 0.119815 0.146872 1.04958 +v 0.12067 0.139293 1.05571 +v 0.121364 0.131333 1.06158 +v 0.12192 0.123303 1.06736 +v 0.122351 0.114811 1.07285 +v 0.122644 0.106261 1.07828 +v 0.12281 0.0973568 1.08349 +v 0.12286 0.0884111 1.08862 +v 0.122914 0.079153 1.09357 +v 0.122917 0.0698894 1.09847 +v 0.123008 0.0603067 1.10318 +v 0.123016 0.0507045 1.10784 +v 0.12314 0.0407151 1.11221 +v 0.123192 0.0307036 1.11655 +v 0.123673 0.0203415 1.12069 +v 0.124567 0.0100602 1.12498 +v 0.127299 -0.000466338 1.12955 +v 0.130767 -0.0107333 1.13429 +v 0.136065 -0.0208625 1.13949 +v 0.14142 -0.0309218 1.14469 +v 0.146534 -0.0419524 1.14955 +v 0.151326 -0.0532001 1.15431 +v 0.153971 -0.0664965 1.15782 +v 0.156007 -0.0799564 1.16113 +v 0.154543 -0.0954012 1.15715 +v 0.1527 -0.110784 1.15304 +v 0.144658 -0.120401 1.14413 +v 0.136676 -0.130055 1.13527 +v 0.130197 -0.13596 1.12741 +v 0.123823 -0.1419 1.11957 +v 0.119814 -0.148736 1.1136 +v 0.115888 -0.155587 1.10752 +v 0.109119 -0.167514 1.10536 +v 0.102516 -0.179636 1.10339 +v 0.103401 -0.190797 1.10635 +v 0.104422 -0.201931 1.10948 +v 0.105954 -0.212683 1.1127 +v 0.107484 -0.223434 1.11592 +v 0.108658 -0.234908 1.11932 +v 0.109814 -0.246349 1.12262 +v 0.110192 -0.258891 1.1262 +v 0.110517 -0.271502 1.12965 +v 0.110515 -0.284592 1.1336 +v 0.110585 -0.297635 1.13768 +v 0.109776 -0.311113 1.14109 +v 0.108026 -0.324832 1.14291 +v 0.11234 -0.333221 1.13886 +v 0.116212 -0.341358 1.13398 +v 0.11924 -0.348956 1.1285 +v 0.121946 -0.356194 1.12244 +v 0.123902 -0.362744 1.11597 +v 0.125621 -0.368824 1.10895 +v 0.126349 -0.374168 1.10132 +v 0.126997 -0.379273 1.09348 +v 0.126788 -0.384408 1.08572 +v 0.126591 -0.389905 1.07801 +v 0.129366 -0.391299 1.06956 +v 0.131871 -0.392606 1.06105 +v 0.174512 -0.400195 1.03193 +v 0.170748 -0.401044 1.03827 +v 0.17217 -0.408122 1.04549 +v 0.173279 -0.415427 1.05238 +v 0.174126 -0.422334 1.05671 +v 0.174851 -0.429291 1.06088 +v 0.175522 -0.435825 1.06333 +v 0.176043 -0.442421 1.06563 +v 0.176351 -0.448615 1.06681 +v 0.176378 -0.454889 1.06779 +v 0.179302 -0.472461 1.06244 +v 0.18105 -0.489676 1.05603 +v 0.182851 -0.505412 1.04985 +v 0.184709 -0.521193 1.04379 +v 0.186941 -0.536256 1.03786 +v 0.189268 -0.551293 1.03206 +v 0.191641 -0.5656 1.02668 +v 0.194242 -0.579972 1.02167 +v 0.197422 -0.593992 1.01692 +v 0.200957 -0.608107 1.01285 +v 0.170019 0.405717 1.06053 +v 0.164006 0.395768 1.05341 +v 0.157902 0.386062 1.04608 +v 0.159087 0.383252 1.04238 +v 0.160238 0.38052 1.0386 +v 0.160356 0.377823 1.03522 +v 0.160454 0.37519 1.0318 +v 0.16061 0.371473 1.0267 +v 0.160742 0.367775 1.02159 +v 0.16106 0.364337 1.01668 +v 0.161393 0.3609 1.01175 +v 0.160558 0.355719 1.00552 +v 0.159718 0.350508 0.999353 +v 0.158847 0.344054 0.992442 +v 0.157985 0.33754 0.985586 +v 0.157268 0.329204 0.977877 +v 0.156531 0.32081 0.970226 +v 0.156203 0.309626 0.960841 +v 0.155895 0.298415 0.951533 +v 0.156095 0.28668 0.945872 +v 0.156338 0.274918 0.940263 +v 0.159578 0.265544 0.935879 +v 0.162816 0.256168 0.93145 +v 0.160209 0.252653 0.932398 +v 0.157602 0.24914 0.933342 +v 0.158005 0.246333 0.931655 +v 0.158408 0.243536 0.929956 +v 0.154319 0.240655 0.935014 +v 0.150226 0.237782 0.94007 +v 0.144932 0.233146 0.945971 +v 0.139633 0.228454 0.951872 +v 0.134757 0.222684 0.957975 +v 0.129913 0.216947 0.964108 +v 0.127566 0.2106 0.970863 +v 0.125266 0.204277 0.977657 +v 0.124794 0.197516 0.984554 +v 0.124373 0.190786 0.991484 +v 0.123763 0.184814 0.999094 +v 0.12317 0.178861 1.00671 +v 0.123904 0.17183 1.01357 +v 0.124648 0.16481 1.02045 +v 0.125534 0.157589 1.02704 +v 0.126364 0.150329 1.03359 +v 0.127287 0.14276 1.03975 +v 0.128123 0.135138 1.04585 +v 0.1288 0.127215 1.05165 +v 0.129352 0.119223 1.05738 +v 0.12981 0.110793 1.06279 +v 0.130117 0.102303 1.06813 +v 0.130307 0.0934903 1.07326 +v 0.130346 0.0846388 1.07829 +v 0.130322 0.075508 1.08302 +v 0.130213 0.0663535 1.08769 +v 0.130083 0.0568947 1.09212 +v 0.129829 0.0474081 1.09647 +v 0.129557 0.0375374 1.10055 +v 0.129211 0.0276572 1.10458 +v 0.129246 0.0173915 1.10847 +v 0.129679 0.00721395 1.11253 +v 0.132 -0.00269106 1.11664 +v 0.135095 -0.0123379 1.12096 +v 0.140231 -0.0220874 1.12597 +v 0.145511 -0.0317563 1.13099 +v 0.150756 -0.0421188 1.13542 +v 0.155729 -0.052617 1.13976 +v 0.159102 -0.0650411 1.14298 +v 0.161919 -0.077636 1.14606 +v 0.1621 -0.0936474 1.1434 +v 0.161741 -0.109647 1.14064 +v 0.154696 -0.121578 1.13356 +v 0.147566 -0.133518 1.12647 +v 0.139154 -0.141107 1.11906 +v 0.130821 -0.148691 1.11165 +v 0.126321 -0.153742 1.10604 +v 0.121861 -0.158824 1.10038 +v 0.11582 -0.169012 1.09756 +v 0.110015 -0.179445 1.09502 +v 0.111236 -0.190501 1.0987 +v 0.112542 -0.201531 1.10248 +v 0.114251 -0.212329 1.10608 +v 0.115953 -0.223105 1.10963 +v 0.11732 -0.234697 1.11328 +v 0.118658 -0.24626 1.1168 +v 0.119166 -0.259156 1.1207 +v 0.119582 -0.272101 1.12441 +v 0.119443 -0.28576 1.12864 +v 0.119345 -0.299333 1.13288 +v 0.118506 -0.313518 1.1364 +v 0.116594 -0.327911 1.1382 +v 0.120965 -0.335607 1.13402 +v 0.124946 -0.343092 1.12909 +v 0.128114 -0.350189 1.12364 +v 0.130974 -0.356993 1.11766 +v 0.133104 -0.363361 1.11108 +v 0.135033 -0.369299 1.10404 +v 0.135879 -0.374827 1.09619 +v 0.136613 -0.380023 1.08816 +v 0.135952 -0.385454 1.08025 +v 0.135347 -0.391263 1.07238 +v 0.13784 -0.392812 1.06368 +v 0.140228 -0.394257 1.05483 +v 0.191786 -0.400993 1.02028 +v 0.18788 -0.402126 1.02384 +v 0.183764 -0.402934 1.02716 +v 0.18416 -0.409895 1.03463 +v 0.184345 -0.417183 1.04201 +v 0.184865 -0.423588 1.04646 +v 0.185246 -0.430019 1.05082 +v 0.185581 -0.435566 1.05341 +v 0.185736 -0.441152 1.05588 +v 0.185757 -0.445928 1.05729 +v 0.185637 -0.450668 1.05851 +v 0.18901 -0.46892 1.05378 +v 0.191036 -0.486764 1.04794 +v 0.193557 -0.502934 1.04189 +v 0.196101 -0.519213 1.03594 +v 0.198815 -0.534443 1.03014 +v 0.201505 -0.549605 1.0243 +v 0.204136 -0.563776 1.0189 +v 0.206851 -0.577966 1.01361 +v 0.210133 -0.591773 1.00862 +v 0.213715 -0.605681 1.00411 +v 0.184876 0.407111 1.05631 +v 0.179554 0.397557 1.04873 +v 0.174148 0.388223 1.04096 +v 0.174782 0.385324 1.03719 +v 0.175399 0.382485 1.03339 +v 0.175154 0.379486 1.02959 +v 0.174914 0.376529 1.0258 +v 0.174397 0.372742 1.02092 +v 0.173889 0.368942 1.01603 +v 0.173454 0.364976 1.01095 +v 0.173006 0.360964 1.0059 +v 0.172003 0.355588 0.999907 +v 0.171008 0.350153 0.993993 +v 0.170042 0.343549 0.987494 +v 0.169093 0.33688 0.981063 +v 0.167835 0.327996 0.973211 +v 0.166563 0.319112 0.965383 +v 0.165161 0.307969 0.956961 +v 0.163789 0.296786 0.948609 +v 0.162837 0.282789 0.941909 +v 0.161893 0.268793 0.935238 +v 0.162823 0.262213 0.932154 +v 0.163748 0.25563 0.929099 +v 0.161527 0.251083 0.928727 +v 0.159312 0.246535 0.928364 +v 0.160329 0.244031 0.928676 +v 0.161353 0.241526 0.928995 +v 0.160411 0.239402 0.932157 +v 0.159476 0.237289 0.93532 +v 0.154602 0.232662 0.940761 +v 0.149724 0.228023 0.946206 +v 0.144749 0.222143 0.951731 +v 0.139761 0.216251 0.957275 +v 0.136788 0.209238 0.963033 +v 0.133867 0.202255 0.968846 +v 0.132931 0.195322 0.975563 +v 0.132044 0.188423 0.982314 +v 0.131573 0.182102 0.989777 +v 0.13112 0.175793 0.997253 +v 0.131816 0.168729 1.00424 +v 0.132507 0.161663 1.01122 +v 0.133396 0.154343 1.01775 +v 0.134234 0.147 1.02428 +v 0.135054 0.139388 1.0304 +v 0.135807 0.131735 1.03645 +v 0.136448 0.123755 1.0421 +v 0.136979 0.115715 1.04769 +v 0.1374 0.107349 1.053 +v 0.137695 0.0989222 1.05822 +v 0.137892 0.0902135 1.06316 +v 0.137958 0.081458 1.06802 +v 0.137895 0.0724543 1.07268 +v 0.137672 0.0634023 1.07724 +v 0.137363 0.0540755 1.08149 +v 0.136843 0.044702 1.08561 +v 0.136247 0.0350288 1.08947 +v 0.135538 0.0253434 1.09325 +v 0.135155 0.0153356 1.09683 +v 0.135141 0.00538683 1.1006 +v 0.136836 -0.00424042 1.1043 +v 0.139295 -0.0136133 1.10825 +v 0.143773 -0.0230978 1.11283 +v 0.148469 -0.0324397 1.11744 +v 0.153462 -0.0423912 1.12153 +v 0.158261 -0.0524717 1.12557 +v 0.161771 -0.0640361 1.12853 +v 0.164775 -0.0757692 1.1314 +v 0.165511 -0.0910339 1.12938 +v 0.165509 -0.106308 1.12722 +v 0.159823 -0.119144 1.12201 +v 0.153904 -0.131901 1.11677 +v 0.145666 -0.140018 1.1102 +v 0.137489 -0.14812 1.10362 +v 0.131979 -0.153536 1.09832 +v 0.126494 -0.158969 1.09297 +v 0.121615 -0.168732 1.0906 +v 0.116977 -0.17883 1.0885 +v 0.118833 -0.189836 1.09217 +v 0.120758 -0.200816 1.09594 +v 0.122746 -0.211656 1.09961 +v 0.124697 -0.222524 1.10323 +v 0.126287 -0.234107 1.10695 +v 0.127873 -0.245696 1.11055 +v 0.128539 -0.258791 1.11458 +v 0.129132 -0.271917 1.11846 +v 0.129004 -0.28605 1.12269 +v 0.128895 -0.300144 1.12692 +v 0.128036 -0.315885 1.13059 +v 0.125985 -0.331908 1.13244 +v 0.129842 -0.338409 1.12849 +v 0.133432 -0.344719 1.12406 +v 0.136803 -0.35132 1.11864 +v 0.139929 -0.357604 1.11269 +v 0.142359 -0.363825 1.10599 +v 0.144645 -0.369731 1.09883 +v 0.145913 -0.375339 1.09082 +v 0.147073 -0.38061 1.08261 +v 0.146459 -0.386629 1.07374 +v 0.145884 -0.393204 1.06516 +v 0.151013 -0.394692 1.05565 +v 0.156052 -0.39598 1.04585 +v 0.204385 -0.402788 1.00496 +v 0.200145 -0.40332 1.01344 +v 0.197532 -0.405795 1.01773 +v 0.194787 -0.408032 1.02192 +v 0.194565 -0.414066 1.02812 +v 0.194277 -0.420133 1.03425 +v 0.194515 -0.42558 1.03828 +v 0.19457 -0.431263 1.04227 +v 0.194921 -0.436359 1.04449 +v 0.195139 -0.44154 1.04661 +v 0.196395 -0.449114 1.04701 +v 0.1972 -0.456794 1.04708 +v 0.198916 -0.470861 1.04376 +v 0.200124 -0.484702 1.04013 +v 0.203722 -0.501558 1.03385 +v 0.207283 -0.518608 1.02762 +v 0.210448 -0.533651 1.02194 +v 0.213547 -0.548633 1.01616 +v 0.216357 -0.562501 1.01066 +v 0.219227 -0.576352 1.00524 +v 0.222467 -0.589798 1.00005 +v 0.225966 -0.603377 0.995224 +v 0.199744 0.40856 1.05206 +v 0.195141 0.399322 1.04409 +v 0.190476 0.390356 1.03584 +v 0.19054 0.387346 1.03205 +v 0.190579 0.3844 1.02822 +v 0.190002 0.381054 1.02407 +v 0.189412 0.377752 1.01988 +v 0.188276 0.373848 1.01523 +v 0.187128 0.369976 1.01054 +v 0.185887 0.365428 1.00537 +v 0.184657 0.360849 1.00022 +v 0.1835 0.355267 0.99447 +v 0.182349 0.349618 0.98881 +v 0.181288 0.342879 0.98273 +v 0.180215 0.336106 0.976672 +v 0.178403 0.326743 0.968639 +v 0.176599 0.317395 0.960588 +v 0.17414 0.30629 0.953093 +v 0.171703 0.295114 0.945704 +v 0.169555 0.278898 0.937971 +v 0.167453 0.262693 0.930148 +v 0.16607 0.258894 0.92845 +v 0.164677 0.255093 0.92673 +v 0.162847 0.249515 0.925044 +v 0.168093 0.245561 0.915115 +v 0.170188 0.24367 0.91653 +v 0.172303 0.241793 0.917865 +v 0.166525 0.238171 0.929308 +v 0.168745 0.236842 0.930584 +v 0.164306 0.232188 0.935585 +v 0.159848 0.227543 0.940586 +v 0.154753 0.221595 0.945571 +v 0.149674 0.215594 0.950536 +v 0.146074 0.207934 0.955283 +v 0.142513 0.200287 0.960088 +v 0.141103 0.193183 0.966606 +v 0.139746 0.186117 0.973178 +v 0.139419 0.179442 0.980499 +v 0.139119 0.172792 0.987839 +v 0.139812 0.165666 0.994901 +v 0.140475 0.158572 1.00202 +v 0.141378 0.151178 1.00853 +v 0.142236 0.143749 1.01503 +v 0.142971 0.136107 1.02113 +v 0.143641 0.12841 1.02714 +v 0.144262 0.120386 1.03268 +v 0.144785 0.112287 1.03813 +v 0.145167 0.103978 1.04332 +v 0.145446 0.0956132 1.04842 +v 0.145651 0.0869972 1.05319 +v 0.145752 0.0783411 1.05789 +v 0.145642 0.0694644 1.06248 +v 0.14536 0.0605218 1.06694 +v 0.144907 0.051325 1.07104 +v 0.144214 0.0420607 1.07498 +v 0.143313 0.0325733 1.07861 +v 0.142242 0.0230694 1.08212 +v 0.141474 0.013324 1.08541 +v 0.140933 0.00360934 1.0888 +v 0.141879 -0.00575036 1.09204 +v 0.14357 -0.0148993 1.09556 +v 0.147288 -0.0241355 1.09969 +v 0.151307 -0.0332108 1.10387 +v 0.155945 -0.0428007 1.10763 +v 0.160462 -0.0524624 1.11133 +v 0.164014 -0.0631671 1.11401 +v 0.167182 -0.0739857 1.11664 +v 0.168481 -0.088401 1.1152 +v 0.169007 -0.102894 1.11373 +v 0.164835 -0.116605 1.11041 +v 0.160159 -0.130156 1.107 +v 0.152171 -0.138868 1.10128 +v 0.144151 -0.147483 1.09555 +v 0.137627 -0.153267 1.09054 +v 0.131112 -0.159084 1.08556 +v 0.127341 -0.1685 1.08357 +v 0.123927 -0.178165 1.08196 +v 0.126446 -0.189132 1.08566 +v 0.128998 -0.200064 1.08943 +v 0.131253 -0.210961 1.09315 +v 0.133468 -0.221883 1.09683 +v 0.135258 -0.233549 1.10062 +v 0.13705 -0.245165 1.10425 +v 0.137885 -0.258439 1.10843 +v 0.138615 -0.271757 1.11241 +v 0.138509 -0.286355 1.11667 +v 0.138352 -0.300957 1.12082 +v 0.13746 -0.318302 1.12459 +v 0.135228 -0.335854 1.12639 +v 0.138619 -0.341137 1.12278 +v 0.141839 -0.346259 1.11881 +v 0.145467 -0.352273 1.1134 +v 0.14888 -0.358081 1.1076 +v 0.15163 -0.364243 1.10079 +v 0.154226 -0.370069 1.09354 +v 0.155948 -0.375729 1.08539 +v 0.157512 -0.381045 1.07698 +v 0.156926 -0.387733 1.06732 +v 0.15645 -0.39511 1.05803 +v 0.164108 -0.396432 1.04762 +v 0.17178 -0.397669 1.03715 +v 0.21089 -0.39956 0.994048 +v 0.212201 -0.403044 0.999201 +v 0.210393 -0.404472 1.00326 +v 0.208398 -0.405338 1.00676 +v 0.206973 -0.409234 1.01179 +v 0.205534 -0.413057 1.01684 +v 0.204838 -0.417845 1.02165 +v 0.204125 -0.422705 1.0265 +v 0.203992 -0.427639 1.03022 +v 0.203808 -0.432457 1.03378 +v 0.204215 -0.437127 1.03564 +v 0.204517 -0.441793 1.03728 +v 0.20685 -0.452313 1.03661 +v 0.208337 -0.462812 1.03537 +v 0.208763 -0.472729 1.03385 +v 0.209079 -0.482512 1.03226 +v 0.213719 -0.500159 1.02576 +v 0.218382 -0.517961 1.0192 +v 0.221983 -0.532766 1.01365 +v 0.225368 -0.547564 1.00768 +v 0.228367 -0.561112 1.00214 +v 0.231394 -0.574843 0.996586 +v 0.234663 -0.587902 0.991268 +v 0.238112 -0.601191 0.986159 +v 0.215545 0.410635 1.04794 +v 0.211806 0.40194 1.04013 +v 0.20801 0.393463 1.03214 +v 0.20706 0.3899 1.02814 +v 0.206092 0.386319 1.02411 +v 0.205385 0.382512 1.01957 +v 0.204691 0.37874 1.01503 +v 0.202485 0.374521 1.01057 +v 0.200261 0.37028 1.00613 +v 0.198536 0.365434 1.00111 +v 0.196821 0.360529 0.99614 +v 0.195338 0.354741 0.990762 +v 0.193856 0.348922 0.985458 +v 0.192476 0.342159 0.979594 +v 0.191086 0.335378 0.97376 +v 0.188843 0.326504 0.966348 +v 0.186598 0.317699 0.958916 +v 0.183677 0.306665 0.952215 +v 0.180765 0.295513 0.945657 +v 0.176791 0.280785 0.938655 +v 0.172854 0.266076 0.931587 +v 0.169919 0.260902 0.927784 +v 0.16698 0.255724 0.923975 +v 0.171484 0.251243 0.91886 +v 0.17206 0.246356 0.918452 +v 0.172851 0.243095 0.918554 +v 0.173668 0.239933 0.918592 +v 0.169576 0.236606 0.929619 +v 0.172855 0.235611 0.931154 +v 0.171902 0.232611 0.9343 +v 0.170942 0.229606 0.937445 +v 0.165783 0.223214 0.942053 +v 0.160638 0.216799 0.94665 +v 0.156807 0.208739 0.950733 +v 0.152967 0.200665 0.95484 +v 0.15085 0.192507 0.960101 +v 0.148778 0.184396 0.965445 +v 0.148059 0.177179 0.972486 +v 0.147387 0.169994 0.979555 +v 0.148076 0.162814 0.986582 +v 0.14877 0.155617 0.99358 +v 0.149653 0.148236 1.00012 +v 0.150507 0.140815 1.00665 +v 0.151245 0.133143 1.01259 +v 0.151918 0.12543 1.01846 +v 0.152433 0.11743 1.02392 +v 0.152869 0.109375 1.0293 +v 0.153195 0.101099 1.0343 +v 0.15343 0.0927752 1.03922 +v 0.153557 0.0842355 1.04386 +v 0.153621 0.0756718 1.04845 +v 0.153501 0.0669065 1.05287 +v 0.153244 0.0580894 1.05717 +v 0.152714 0.0490335 1.06115 +v 0.151986 0.039921 1.06497 +v 0.150912 0.0306341 1.06843 +v 0.149598 0.0213198 1.0717 +v 0.148312 0.0118404 1.07477 +v 0.147109 0.00236307 1.07788 +v 0.147311 -0.00686736 1.08065 +v 0.148109 -0.0159731 1.08369 +v 0.150751 -0.0250559 1.08732 +v 0.153739 -0.0339435 1.09099 +v 0.157513 -0.0431491 1.09446 +v 0.161284 -0.0523815 1.09788 +v 0.164427 -0.0622696 1.10033 +v 0.167374 -0.0722897 1.10278 +v 0.1689 -0.086007 1.10203 +v 0.169706 -0.0998735 1.10136 +v 0.166652 -0.112987 1.09885 +v 0.162961 -0.125949 1.09624 +v 0.156167 -0.135361 1.09177 +v 0.149268 -0.144651 1.08734 +v 0.142999 -0.150863 1.08336 +v 0.136747 -0.157072 1.07942 +v 0.133556 -0.166819 1.07764 +v 0.130861 -0.176882 1.07619 +v 0.134132 -0.187899 1.0796 +v 0.137413 -0.198871 1.08305 +v 0.140004 -0.209834 1.08666 +v 0.142578 -0.220835 1.09023 +v 0.144617 -0.232621 1.09402 +v 0.146661 -0.244383 1.09766 +v 0.147697 -0.257494 1.10169 +v 0.148641 -0.270617 1.10556 +v 0.148659 -0.285034 1.10958 +v 0.148578 -0.299475 1.11341 +v 0.148218 -0.31484 1.11654 +v 0.147471 -0.330468 1.11925 +v 0.148799 -0.3387 1.11715 +v 0.14953 -0.346539 1.11415 +v 0.153495 -0.352685 1.1085 +v 0.157285 -0.358561 1.10246 +v 0.160587 -0.364582 1.09547 +v 0.163656 -0.370307 1.08815 +v 0.166134 -0.375808 1.07998 +v 0.168466 -0.381026 1.07156 +v 0.169669 -0.386733 1.06203 +v 0.170681 -0.392313 1.05239 +v 0.176217 -0.396087 1.0426 +v 0.205471 -0.400837 1.00471 +v 0.214932 -0.401949 0.996603 +v 0.216779 -0.403506 0.997757 +v 0.218514 -0.405582 0.999223 +v 0.217744 -0.407798 1.0017 +v 0.216693 -0.409975 1.00412 +v 0.215583 -0.413659 1.00805 +v 0.214515 -0.417329 1.01204 +v 0.213758 -0.421449 1.0161 +v 0.213042 -0.425597 1.02011 +v 0.212667 -0.429932 1.02333 +v 0.212179 -0.434322 1.02646 +v 0.214317 -0.441346 1.02692 +v 0.216 -0.448493 1.02702 +v 0.217992 -0.458817 1.0256 +v 0.219519 -0.468984 1.02384 +v 0.220834 -0.479089 1.02192 +v 0.222138 -0.489232 1.01998 +v 0.226325 -0.504395 1.01497 +v 0.230376 -0.519595 1.00973 +v 0.233888 -0.533511 1.00459 +v 0.237082 -0.547399 0.998948 +v 0.240104 -0.560401 0.993361 +v 0.243235 -0.573744 0.987821 +v 0.246435 -0.586354 0.982407 +v 0.249844 -0.599301 0.977206 +v 0.231363 0.412758 1.04375 +v 0.228472 0.40455 1.03617 +v 0.225563 0.396475 1.02849 +v 0.223603 0.392278 1.0243 +v 0.221639 0.388128 1.02009 +v 0.220827 0.383865 1.0152 +v 0.22 0.379577 1.01031 +v 0.216738 0.374998 1.00607 +v 0.213463 0.370441 1.00181 +v 0.211237 0.365271 0.996992 +v 0.209009 0.360078 0.9922 +v 0.20719 0.354113 0.987204 +v 0.205356 0.348132 0.982217 +v 0.203677 0.341322 0.976569 +v 0.20199 0.3345 0.970911 +v 0.199299 0.326209 0.964079 +v 0.196601 0.317998 0.957259 +v 0.193218 0.30698 0.951367 +v 0.189825 0.295899 0.945645 +v 0.184046 0.282652 0.939349 +v 0.178266 0.269424 0.932996 +v 0.173771 0.262892 0.927108 +v 0.171122 0.25649 0.919193 +v 0.173669 0.251826 0.92056 +v 0.176315 0.247205 0.921775 +v 0.175623 0.242501 0.920521 +v 0.175068 0.238035 0.919223 +v 0.178586 0.237151 0.921524 +v 0.182206 0.236402 0.92384 +v 0.179493 0.233045 0.933035 +v 0.182025 0.231718 0.934339 +v 0.17683 0.224858 0.938577 +v 0.171631 0.21798 0.942814 +v 0.167548 0.20956 0.946279 +v 0.163494 0.201078 0.949727 +v 0.160666 0.191884 0.953703 +v 0.15789 0.182729 0.957811 +v 0.156754 0.174946 0.964515 +v 0.155689 0.167226 0.971303 +v 0.156404 0.159954 0.978212 +v 0.157109 0.152715 0.985194 +v 0.157991 0.145343 0.991769 +v 0.158838 0.137938 0.99832 +v 0.159619 0.130266 1.00414 +v 0.16034 0.122541 1.0099 +v 0.160777 0.11457 1.01529 +v 0.161137 0.106557 1.02059 +v 0.161436 0.098322 1.02543 +v 0.161663 0.0900504 1.03021 +v 0.161753 0.0815961 1.03474 +v 0.161769 0.0731046 1.0392 +v 0.161651 0.0644478 1.04346 +v 0.161412 0.0557378 1.04761 +v 0.160848 0.0468259 1.05151 +v 0.160122 0.0378662 1.05527 +v 0.158932 0.0287662 1.05858 +v 0.157458 0.0196265 1.06165 +v 0.155624 0.0104021 1.06447 +v 0.15374 0.00117812 1.06722 +v 0.153159 -0.00790334 1.06948 +v 0.152986 -0.0169448 1.07196 +v 0.1544 -0.025903 1.07499 +v 0.156302 -0.0346875 1.07815 +v 0.159145 -0.0435042 1.0813 +v 0.162033 -0.0523082 1.08444 +v 0.164715 -0.0614462 1.08665 +v 0.167248 -0.0706391 1.08887 +v 0.168904 -0.0836875 1.08889 +v 0.17006 -0.0968358 1.08899 +v 0.168108 -0.109293 1.08728 +v 0.165528 -0.121636 1.08552 +v 0.160029 -0.131713 1.08226 +v 0.154305 -0.141641 1.07903 +v 0.148343 -0.148345 1.07613 +v 0.142366 -0.154986 1.07325 +v 0.139727 -0.165148 1.07156 +v 0.137812 -0.175578 1.07045 +v 0.141843 -0.186634 1.07357 +v 0.145863 -0.197649 1.07673 +v 0.148799 -0.208682 1.08022 +v 0.151723 -0.219797 1.08369 +v 0.154003 -0.231664 1.08746 +v 0.156276 -0.243515 1.09104 +v 0.157528 -0.256468 1.09497 +v 0.158652 -0.269476 1.09869 +v 0.158788 -0.283716 1.10246 +v 0.158766 -0.298004 1.10592 +v 0.158971 -0.311464 1.10851 +v 0.158976 -0.324939 1.11091 +v 0.158519 -0.335969 1.11074 +v 0.157118 -0.346679 1.10924 +v 0.161492 -0.352907 1.10337 +v 0.165681 -0.358891 1.09714 +v 0.169485 -0.364757 1.09005 +v 0.173058 -0.3704 1.0827 +v 0.176349 -0.375726 1.07443 +v 0.17942 -0.380798 1.06601 +v 0.18237 -0.385583 1.05666 +v 0.185202 -0.390153 1.04721 +v 0.188333 -0.395403 1.03742 +v 0.191889 -0.401435 1.02807 +v 0.205696 -0.402844 1.0104 +v 0.212335 -0.403542 1.00443 +v 0.218801 -0.404368 0.998114 +v 0.221753 -0.406213 0.998437 +v 0.224614 -0.408029 0.998532 +v 0.224732 -0.41142 0.999767 +v 0.225047 -0.414786 1.00149 +v 0.224413 -0.41819 1.0044 +v 0.223819 -0.4216 1.00736 +v 0.222974 -0.425019 1.01055 +v 0.222202 -0.428506 1.01369 +v 0.221441 -0.432274 1.01643 +v 0.220581 -0.4361 1.01905 +v 0.224176 -0.445573 1.01818 +v 0.227011 -0.455023 1.01674 +v 0.22914 -0.465068 1.01488 +v 0.231195 -0.475216 1.01289 +v 0.233532 -0.485692 1.01062 +v 0.235698 -0.496044 1.00826 +v 0.239132 -0.508642 1.00442 +v 0.242409 -0.521317 1.0003 +v 0.245754 -0.534437 0.995434 +v 0.248701 -0.547344 0.990035 +v 0.251824 -0.559885 0.984521 +v 0.254999 -0.572623 0.978956 +v 0.258199 -0.584964 0.973498 +v 0.261514 -0.597503 0.968139 +v 0.246826 0.415274 1.04032 +v 0.244055 0.407266 1.03323 +v 0.241279 0.399369 1.02601 +v 0.238671 0.3946 1.02172 +v 0.236074 0.389923 1.01738 +v 0.234215 0.385059 1.01261 +v 0.23238 0.380223 1.00781 +v 0.229221 0.375242 1.00343 +v 0.226054 0.370243 0.999104 +v 0.223566 0.364789 0.994454 +v 0.221083 0.359283 0.989853 +v 0.218977 0.353186 0.985041 +v 0.216861 0.347093 0.980245 +v 0.214956 0.340485 0.974817 +v 0.213037 0.333916 0.969337 +v 0.210413 0.325764 0.963388 +v 0.207778 0.317582 0.957549 +v 0.204262 0.30753 0.952458 +v 0.200754 0.297497 0.947386 +v 0.194946 0.286301 0.94179 +v 0.189146 0.275119 0.936184 +v 0.186482 0.268827 0.93132 +v 0.182981 0.26235 0.927929 +v 0.183598 0.256084 0.92784 +v 0.184326 0.249897 0.927558 +v 0.183681 0.245033 0.926458 +v 0.18315 0.240344 0.925292 +v 0.184836 0.23803 0.925919 +v 0.18649 0.235725 0.926421 +v 0.185081 0.233013 0.934134 +v 0.187732 0.231956 0.935308 +v 0.185705 0.227414 0.938203 +v 0.18365 0.2229 0.941073 +v 0.178895 0.21238 0.94424 +v 0.17418 0.201847 0.947408 +v 0.171401 0.192107 0.950631 +v 0.168626 0.182369 0.953984 +v 0.167007 0.173856 0.959385 +v 0.165453 0.165404 0.964916 +v 0.165756 0.157799 0.971395 +v 0.166087 0.150234 0.977915 +v 0.16693 0.142856 0.984346 +v 0.167757 0.135458 0.990751 +v 0.168485 0.127848 0.996544 +v 0.169165 0.120168 1.00227 +v 0.169593 0.112212 1.00749 +v 0.169966 0.10422 1.01267 +v 0.170202 0.0960438 1.01742 +v 0.170371 0.0878317 1.02209 +v 0.170496 0.0794481 1.02643 +v 0.170539 0.0710269 1.03069 +v 0.17037 0.0624658 1.03481 +v 0.170129 0.0538706 1.03887 +v 0.169532 0.0450781 1.04271 +v 0.168811 0.0362411 1.04642 +v 0.167651 0.0273106 1.04972 +v 0.16626 0.0183363 1.0528 +v 0.164155 0.00932577 1.05554 +v 0.161832 0.000326675 1.05811 +v 0.159661 -0.00847011 1.06074 +v 0.157826 -0.0173003 1.06357 +v 0.158848 -0.0268347 1.06494 +v 0.160281 -0.0362796 1.0665 +v 0.162587 -0.0460483 1.06827 +v 0.164902 -0.0558038 1.07002 +v 0.166171 -0.0625197 1.0733 +v 0.16741 -0.0692384 1.07659 +v 0.168804 -0.0816291 1.0769 +v 0.16986 -0.0941185 1.07724 +v 0.168514 -0.105928 1.07582 +v 0.166666 -0.117677 1.07435 +v 0.162491 -0.12761 1.07345 +v 0.157925 -0.137417 1.0725 +v 0.152985 -0.145064 1.06862 +v 0.148026 -0.152693 1.0648 +v 0.146305 -0.162945 1.06447 +v 0.145286 -0.173319 1.06479 +v 0.150232 -0.184736 1.06751 +v 0.15518 -0.196164 1.07027 +v 0.15835 -0.207322 1.07363 +v 0.161511 -0.218508 1.07697 +v 0.163948 -0.230318 1.08062 +v 0.16636 -0.242132 1.08412 +v 0.167862 -0.254866 1.08773 +v 0.16926 -0.267666 1.09115 +v 0.169643 -0.281312 1.09447 +v 0.169838 -0.294931 1.09757 +v 0.169975 -0.307982 1.09976 +v 0.169965 -0.321029 1.10178 +v 0.170213 -0.332182 1.10179 +v 0.169896 -0.34341 1.10117 +v 0.171987 -0.351429 1.09684 +v 0.173594 -0.35902 1.09178 +v 0.177689 -0.364894 1.08459 +v 0.181508 -0.370569 1.07712 +v 0.185446 -0.375561 1.06891 +v 0.189137 -0.380295 1.06057 +v 0.192739 -0.384628 1.05151 +v 0.19627 -0.388703 1.04241 +v 0.200117 -0.392854 1.03313 +v 0.204068 -0.39697 1.024 +v 0.20993 -0.398725 1.01579 +v 0.2157 -0.400308 1.00785 +v 0.220797 -0.402266 1.00174 +v 0.226592 -0.404704 0.996558 +v 0.229342 -0.407978 0.995835 +v 0.232424 -0.411461 0.995343 +v 0.233041 -0.41511 0.996639 +v 0.233662 -0.418703 0.998109 +v 0.233178 -0.422044 1.00041 +v 0.232702 -0.425349 1.00272 +v 0.231661 -0.428412 1.00529 +v 0.230586 -0.431501 1.00784 +v 0.231776 -0.437166 1.00892 +v 0.232722 -0.442914 1.00981 +v 0.23558 -0.451782 1.00854 +v 0.238133 -0.460861 1.00688 +v 0.240497 -0.470456 1.00509 +v 0.242919 -0.480085 1.0032 +v 0.24555 -0.490103 1.00088 +v 0.248045 -0.50031 0.998358 +v 0.251373 -0.511879 0.994856 +v 0.25443 -0.523474 0.990877 +v 0.257521 -0.535582 0.986055 +v 0.260423 -0.547754 0.980945 +v 0.263504 -0.559756 0.975503 +v 0.266587 -0.571792 0.970049 +v 0.269731 -0.583876 0.964597 +v 0.272923 -0.595979 0.959201 +v 0.26231 0.417713 1.03701 +v 0.259655 0.409911 1.03037 +v 0.257019 0.402209 1.0236 +v 0.253751 0.396846 1.0192 +v 0.250482 0.391526 1.01477 +v 0.247614 0.386125 1.01011 +v 0.244787 0.380773 1.00539 +v 0.241731 0.375359 1.00093 +v 0.238645 0.369925 0.996514 +v 0.235895 0.364167 0.992033 +v 0.233139 0.358389 0.987602 +v 0.230743 0.35221 0.982976 +v 0.228346 0.346033 0.978351 +v 0.226241 0.33963 0.973074 +v 0.224107 0.333272 0.967776 +v 0.221536 0.325192 0.962781 +v 0.218956 0.317085 0.95797 +v 0.215286 0.30805 0.953595 +v 0.211631 0.299035 0.94915 +v 0.20583 0.289911 0.944264 +v 0.200038 0.280799 0.939366 +v 0.199195 0.274768 0.93552 +v 0.196264 0.268532 0.935729 +v 0.19457 0.26065 0.934562 +v 0.193086 0.252808 0.933061 +v 0.19255 0.247761 0.932156 +v 0.192101 0.242911 0.931151 +v 0.191483 0.238967 0.930087 +v 0.19097 0.235154 0.929021 +v 0.190632 0.232971 0.935171 +v 0.193431 0.232173 0.93626 +v 0.194561 0.229988 0.937815 +v 0.195669 0.227789 0.939358 +v 0.190259 0.215234 0.942255 +v 0.184875 0.202613 0.945139 +v 0.182132 0.192322 0.947666 +v 0.179404 0.182045 0.950297 +v 0.177336 0.172831 0.954409 +v 0.175314 0.163671 0.958658 +v 0.175197 0.155719 0.964651 +v 0.175135 0.147824 0.97073 +v 0.175948 0.14042 0.976963 +v 0.176751 0.133013 0.983209 +v 0.177442 0.125473 0.988997 +v 0.178096 0.117879 0.994734 +v 0.178551 0.109956 0.99983 +v 0.178967 0.101995 1.00488 +v 0.179158 0.0938706 1.00957 +v 0.179301 0.0857349 1.0142 +v 0.179463 0.0774142 1.01833 +v 0.179572 0.0690597 1.02241 +v 0.1794 0.0606011 1.02644 +v 0.179178 0.0521165 1.03042 +v 0.178555 0.043459 1.03424 +v 0.177838 0.034736 1.03791 +v 0.176712 0.0259309 1.04118 +v 0.175465 0.0171021 1.04433 +v 0.173153 0.00828247 1.04708 +v 0.170593 -0.000549946 1.04955 +v 0.166874 -0.00905851 1.05249 +v 0.163225 -0.017585 1.05549 +v 0.163952 -0.0276528 1.05522 +v 0.164867 -0.0377051 1.05508 +v 0.166446 -0.048439 1.05537 +v 0.168124 -0.0591566 1.05571 +v 0.167975 -0.0634986 1.06003 +v 0.167951 -0.0678291 1.06438 +v 0.169093 -0.0795629 1.06492 +v 0.169923 -0.0913662 1.06545 +v 0.169084 -0.102533 1.06432 +v 0.167882 -0.113658 1.06316 +v 0.164852 -0.123451 1.06462 +v 0.161432 -0.133102 1.06603 +v 0.157449 -0.141686 1.06121 +v 0.153748 -0.150385 1.0564 +v 0.153023 -0.160736 1.05749 +v 0.152784 -0.171046 1.0592 +v 0.15865 -0.182849 1.06154 +v 0.164506 -0.194627 1.06387 +v 0.167938 -0.205902 1.06704 +v 0.171354 -0.217164 1.07017 +v 0.173933 -0.228927 1.07369 +v 0.17645 -0.240736 1.07712 +v 0.178213 -0.253256 1.08049 +v 0.179893 -0.265852 1.08356 +v 0.180513 -0.278829 1.08645 +v 0.180926 -0.291792 1.08906 +v 0.181066 -0.304409 1.09094 +v 0.181006 -0.317042 1.09252 +v 0.18172 -0.328444 1.0926 +v 0.182139 -0.339836 1.09231 +v 0.182208 -0.349688 1.08986 +v 0.181411 -0.359069 1.08626 +v 0.185783 -0.364938 1.07899 +v 0.189923 -0.370577 1.07138 +v 0.194462 -0.37517 1.06323 +v 0.198755 -0.379614 1.05495 +v 0.2031 -0.383404 1.04626 +v 0.207382 -0.387092 1.03752 +v 0.211884 -0.390175 1.02878 +v 0.216464 -0.393174 1.02003 +v 0.220972 -0.395619 1.01254 +v 0.225552 -0.39813 1.00499 +v 0.229786 -0.401131 0.99956 +v 0.233905 -0.404727 0.994446 +v 0.237021 -0.40979 0.992914 +v 0.240108 -0.414721 0.992531 +v 0.24111 -0.418678 0.993606 +v 0.24215 -0.422647 0.994755 +v 0.241825 -0.425866 0.9964 +v 0.241456 -0.429078 0.998071 +v 0.240168 -0.4318 1.00006 +v 0.238862 -0.434506 1.00207 +v 0.241918 -0.442085 1.00151 +v 0.244619 -0.449817 1.00056 +v 0.247226 -0.458436 0.999078 +v 0.249793 -0.466835 0.997582 +v 0.252467 -0.47592 0.995847 +v 0.254951 -0.485142 0.993889 +v 0.257849 -0.49497 0.991413 +v 0.260549 -0.504572 0.988752 +v 0.26361 -0.515146 0.985221 +v 0.266349 -0.525646 0.981265 +v 0.269293 -0.536892 0.976636 +v 0.272077 -0.548313 0.971723 +v 0.275152 -0.559809 0.966399 +v 0.27823 -0.571342 0.960989 +v 0.281244 -0.582881 0.955593 +v 0.284234 -0.594393 0.950173 +v 0.277367 0.420134 1.03398 +v 0.274749 0.412534 1.02785 +v 0.272135 0.404971 1.02162 +v 0.268278 0.398914 1.0172 +v 0.264422 0.392906 1.01274 +v 0.260972 0.386988 1.00819 +v 0.25753 0.381065 1.00366 +v 0.254221 0.375193 0.999304 +v 0.250918 0.369328 0.995029 +v 0.247942 0.363312 0.990767 +v 0.244971 0.357267 0.986524 +v 0.242299 0.350966 0.982022 +v 0.239653 0.34467 0.977486 +v 0.237426 0.338046 0.972592 +v 0.235168 0.331492 0.967684 +v 0.232808 0.323563 0.963521 +v 0.230423 0.315681 0.959448 +v 0.227775 0.307866 0.955647 +v 0.225159 0.300082 0.951805 +v 0.219375 0.292606 0.946422 +v 0.213593 0.285147 0.941025 +v 0.210463 0.279563 0.937656 +v 0.204862 0.273896 0.939794 +v 0.203831 0.265124 0.939325 +v 0.203019 0.25637 0.938437 +v 0.202334 0.250697 0.937569 +v 0.201755 0.245194 0.936541 +v 0.201033 0.241026 0.935512 +v 0.200379 0.237023 0.93446 +v 0.20156 0.235224 0.934673 +v 0.201451 0.23284 0.937723 +v 0.201748 0.229934 0.939268 +v 0.20202 0.227019 0.940799 +v 0.198347 0.21483 0.943018 +v 0.194687 0.202584 0.945212 +v 0.192395 0.192311 0.946907 +v 0.190112 0.18205 0.948676 +v 0.188079 0.172663 0.952008 +v 0.18606 0.163312 0.955467 +v 0.185642 0.154931 0.960271 +v 0.18527 0.146619 0.965219 +v 0.185809 0.138818 0.97089 +v 0.18635 0.131128 0.97675 +v 0.18703 0.123614 0.982437 +v 0.187686 0.116055 0.988062 +v 0.188145 0.108223 0.993143 +v 0.188565 0.100351 0.99816 +v 0.18876 0.0922584 1.00274 +v 0.188907 0.0841469 1.00725 +v 0.189015 0.0758968 1.01137 +v 0.189077 0.0676182 1.01543 +v 0.188915 0.0591932 1.01935 +v 0.188708 0.0507487 1.02321 +v 0.188148 0.0422014 1.02696 +v 0.187534 0.0336072 1.03057 +v 0.186505 0.0249107 1.03384 +v 0.185387 0.0161851 1.03699 +v 0.183463 0.00747181 1.03975 +v 0.181374 -0.00126726 1.0423 +v 0.178668 -0.00994726 1.04448 +v 0.175798 -0.0186163 1.04643 +v 0.174066 -0.0280759 1.04699 +v 0.172306 -0.0375682 1.04757 +v 0.171273 -0.0477149 1.04891 +v 0.170671 -0.0577304 1.05038 +v 0.170157 -0.0638579 1.05348 +v 0.169855 -0.069988 1.05666 +v 0.170769 -0.0803353 1.05629 +v 0.171463 -0.0906911 1.05589 +v 0.17088 -0.101362 1.05426 +v 0.169972 -0.112025 1.05261 +v 0.167512 -0.121475 1.05367 +v 0.164776 -0.130862 1.05457 +v 0.162454 -0.139971 1.05201 +v 0.160566 -0.149229 1.04971 +v 0.162402 -0.159967 1.05124 +v 0.164275 -0.170563 1.05304 +v 0.169992 -0.182075 1.05516 +v 0.175694 -0.19357 1.05727 +v 0.178806 -0.204604 1.06037 +v 0.181933 -0.215681 1.06344 +v 0.18453 -0.227316 1.06676 +v 0.18708 -0.238985 1.06999 +v 0.188974 -0.251205 1.07306 +v 0.190855 -0.263432 1.0758 +v 0.191843 -0.27591 1.07824 +v 0.192674 -0.288445 1.08042 +v 0.193008 -0.300733 1.08192 +v 0.193114 -0.313007 1.08308 +v 0.193547 -0.324387 1.08305 +v 0.19376 -0.335741 1.08281 +v 0.194254 -0.34593 1.08077 +v 0.194082 -0.355961 1.07801 +v 0.195872 -0.363681 1.07202 +v 0.197215 -0.370954 1.06541 +v 0.202242 -0.375049 1.05737 +v 0.207112 -0.378971 1.04915 +v 0.212055 -0.382307 1.04079 +v 0.216958 -0.38565 1.03239 +v 0.221843 -0.388565 1.02409 +v 0.226523 -0.391643 1.01606 +v 0.231076 -0.394335 1.00892 +v 0.235808 -0.397485 1.00188 +v 0.23933 -0.401542 0.995803 +v 0.243121 -0.407101 0.99113 +v 0.245963 -0.412682 0.989943 +v 0.249066 -0.418069 0.989784 +v 0.250198 -0.422194 0.990363 +v 0.251303 -0.426373 0.990954 +v 0.250809 -0.429686 0.992062 +v 0.250307 -0.433029 0.993249 +v 0.25075 -0.437056 0.993879 +v 0.251165 -0.441073 0.99444 +v 0.253756 -0.448218 0.993477 +v 0.256202 -0.455417 0.992342 +v 0.258899 -0.463415 0.990874 +v 0.26155 -0.471472 0.989291 +v 0.264256 -0.48021 0.987445 +v 0.266778 -0.48886 0.9854 +v 0.269599 -0.49803 0.982886 +v 0.272259 -0.507224 0.979982 +v 0.275209 -0.517369 0.976251 +v 0.277909 -0.527492 0.972199 +v 0.280786 -0.53828 0.967517 +v 0.28357 -0.549086 0.962684 +v 0.286521 -0.560016 0.957485 +v 0.289416 -0.570956 0.952216 +v 0.292319 -0.581991 0.946776 +v 0.295201 -0.592935 0.941372 +v 0.29246 0.422496 1.03104 +v 0.289844 0.415119 1.02538 +v 0.287244 0.407712 1.01966 +v 0.282817 0.400943 1.01523 +v 0.278381 0.39422 1.01076 +v 0.274325 0.387748 1.00636 +v 0.270263 0.381265 1.00197 +v 0.266719 0.374957 0.997781 +v 0.263181 0.368697 0.993617 +v 0.25999 0.362387 0.989554 +v 0.256802 0.356051 0.985531 +v 0.253899 0.349605 0.981103 +v 0.250959 0.343207 0.9766 +v 0.248579 0.336398 0.972114 +v 0.246189 0.329623 0.967719 +v 0.244039 0.321968 0.964303 +v 0.2419 0.314288 0.960931 +v 0.240298 0.307711 0.95772 +v 0.238713 0.30115 0.954463 +v 0.232933 0.295283 0.948593 +v 0.227157 0.28945 0.942707 +v 0.221716 0.284275 0.939861 +v 0.213888 0.27926 0.94329 +v 0.213617 0.269703 0.94348 +v 0.213539 0.260066 0.943169 +v 0.212704 0.253782 0.942379 +v 0.211992 0.247678 0.941407 +v 0.211233 0.243282 0.94044 +v 0.21049 0.239103 0.939469 +v 0.21017 0.236319 0.938812 +v 0.209957 0.233767 0.93827 +v 0.208923 0.229894 0.940729 +v 0.208372 0.226246 0.942236 +v 0.206421 0.214432 0.943782 +v 0.204496 0.202536 0.945302 +v 0.202658 0.192301 0.946168 +v 0.200827 0.182056 0.947113 +v 0.198829 0.17252 0.949709 +v 0.196852 0.163017 0.952439 +v 0.19616 0.154252 0.956096 +v 0.195503 0.145555 0.959953 +v 0.19575 0.137382 0.965095 +v 0.196023 0.129335 0.97048 +v 0.196685 0.121814 0.976 +v 0.197349 0.114282 0.981503 +v 0.197826 0.106539 0.986572 +v 0.198262 0.0987404 0.991558 +v 0.198494 0.0907244 0.996067 +v 0.198681 0.0826664 1.00051 +v 0.198743 0.0744833 1.00461 +v 0.19877 0.0662766 1.00867 +v 0.198641 0.0579007 1.01252 +v 0.198485 0.0495041 1.01632 +v 0.198005 0.0410495 1.01996 +v 0.197487 0.0325665 1.02352 +v 0.19657 0.0239799 1.02681 +v 0.195587 0.0153591 1.02999 +v 0.194025 0.00671259 1.03273 +v 0.192369 -0.00195274 1.03534 +v 0.190747 -0.0108239 1.03689 +v 0.189014 -0.0196905 1.03825 +v 0.184913 -0.0285606 1.03967 +v 0.180705 -0.0374268 1.04094 +v 0.177009 -0.0468439 1.04292 +v 0.173673 -0.0562422 1.04522 +v 0.172748 -0.0641757 1.04705 +v 0.171959 -0.0720961 1.04898 +v 0.172704 -0.0810424 1.04772 +v 0.173304 -0.0900334 1.04647 +v 0.173049 -0.100254 1.04438 +v 0.172653 -0.110462 1.04226 +v 0.170806 -0.119671 1.04273 +v 0.168894 -0.128938 1.04311 +v 0.168179 -0.138543 1.04313 +v 0.167718 -0.148128 1.04342 +v 0.171721 -0.159105 1.0451 +v 0.1758 -0.170001 1.04696 +v 0.181361 -0.18123 1.04885 +v 0.186903 -0.192472 1.05072 +v 0.189698 -0.203249 1.05373 +v 0.192529 -0.214104 1.05673 +v 0.195147 -0.225658 1.05989 +v 0.197708 -0.237211 1.06293 +v 0.199773 -0.249055 1.06569 +v 0.201755 -0.26094 1.06817 +v 0.203125 -0.272939 1.07022 +v 0.204337 -0.284986 1.07192 +v 0.204976 -0.296935 1.07304 +v 0.20532 -0.308837 1.07383 +v 0.20549 -0.320251 1.07378 +v 0.205414 -0.331639 1.07336 +v 0.20602 -0.342104 1.0715 +v 0.206198 -0.352473 1.06912 +v 0.205736 -0.36217 1.06474 +v 0.204386 -0.371222 1.05933 +v 0.20992 -0.37479 1.05138 +v 0.215388 -0.37824 1.0433 +v 0.220979 -0.381134 1.03525 +v 0.226316 -0.384199 1.02735 +v 0.231473 -0.387092 1.01971 +v 0.236641 -0.39002 1.01229 +v 0.241278 -0.393358 1.00559 +v 0.245812 -0.39701 0.999076 +v 0.249069 -0.402407 0.992301 +v 0.252273 -0.409593 0.988583 +v 0.255092 -0.415366 0.987345 +v 0.257986 -0.421515 0.987003 +v 0.259247 -0.425857 0.987124 +v 0.260433 -0.4302 0.98728 +v 0.2598 -0.433603 0.987945 +v 0.259188 -0.437007 0.988711 +v 0.261418 -0.442322 0.987954 +v 0.263565 -0.447649 0.987044 +v 0.265956 -0.454362 0.98588 +v 0.26828 -0.460997 0.98469 +v 0.27098 -0.468604 0.983139 +v 0.273551 -0.476211 0.981399 +v 0.276197 -0.484338 0.979389 +v 0.278675 -0.492434 0.977109 +v 0.281478 -0.501343 0.974322 +v 0.284053 -0.510207 0.971156 +v 0.286889 -0.519845 0.967316 +v 0.289517 -0.529458 0.963147 +v 0.292365 -0.539687 0.958496 +v 0.29511 -0.549894 0.953622 +v 0.297893 -0.560218 0.948591 +v 0.300559 -0.570542 0.943386 +v 0.303381 -0.581022 0.937995 +v 0.306163 -0.59145 0.932605 +v 0.306849 0.424455 1.02792 +v 0.304362 0.417281 1.02269 +v 0.301941 0.410056 1.01742 +v 0.296856 0.402578 1.01315 +v 0.29177 0.395089 1.00889 +v 0.287394 0.38811 1.00474 +v 0.283021 0.381126 1.0006 +v 0.279135 0.374445 0.996661 +v 0.275286 0.367777 0.992775 +v 0.271803 0.361153 0.988832 +v 0.268331 0.354525 0.984946 +v 0.265139 0.347932 0.980757 +v 0.261879 0.341371 0.976554 +v 0.259292 0.334316 0.972614 +v 0.256698 0.327301 0.968719 +v 0.255091 0.319103 0.964993 +v 0.25348 0.310937 0.961226 +v 0.250021 0.306092 0.957527 +v 0.246558 0.301251 0.95384 +v 0.241943 0.296631 0.949143 +v 0.237324 0.292016 0.944428 +v 0.231344 0.28578 0.948453 +v 0.226448 0.278991 0.947565 +v 0.225392 0.270216 0.94754 +v 0.224483 0.261458 0.947071 +v 0.223407 0.254935 0.946305 +v 0.222434 0.248536 0.945301 +v 0.22136 0.243702 0.944295 +v 0.220401 0.238844 0.942935 +v 0.219314 0.235514 0.942169 +v 0.218444 0.232144 0.941163 +v 0.217975 0.228322 0.942459 +v 0.217417 0.22435 0.944128 +v 0.215956 0.213119 0.945002 +v 0.214511 0.201804 0.945855 +v 0.213085 0.191814 0.94625 +v 0.211667 0.181812 0.946673 +v 0.209887 0.17231 0.948647 +v 0.208122 0.162836 0.950815 +v 0.207446 0.153995 0.953893 +v 0.206782 0.14521 0.95715 +v 0.206678 0.136798 0.96141 +v 0.206612 0.128455 0.96585 +v 0.207049 0.120721 0.97086 +v 0.207531 0.113006 0.975893 +v 0.208024 0.105346 0.980874 +v 0.208495 0.0976235 0.985754 +v 0.208762 0.0896756 0.990255 +v 0.208984 0.0817011 0.994706 +v 0.209073 0.0735725 0.998779 +v 0.209122 0.0654104 1.00279 +v 0.209017 0.0571171 1.00658 +v 0.208896 0.0487972 1.0103 +v 0.208497 0.0403776 1.01391 +v 0.208068 0.0319393 1.01746 +v 0.207317 0.023431 1.02073 +v 0.206519 0.0148931 1.0239 +v 0.20528 0.00628789 1.02674 +v 0.203974 -0.00234871 1.02945 +v 0.202477 -0.0111315 1.0314 +v 0.200894 -0.0199376 1.03318 +v 0.197712 -0.028751 1.03469 +v 0.19443 -0.0375679 1.03603 +v 0.191386 -0.046609 1.03716 +v 0.188311 -0.0556596 1.03823 +v 0.186846 -0.064043 1.0388 +v 0.185358 -0.072413 1.03939 +v 0.185413 -0.0813781 1.03841 +v 0.185543 -0.0903728 1.03751 +v 0.185429 -0.100202 1.03622 +v 0.185214 -0.110141 1.035 +v 0.184166 -0.119553 1.03532 +v 0.183145 -0.128958 1.03571 +v 0.183359 -0.138707 1.03633 +v 0.183594 -0.148403 1.03702 +v 0.187052 -0.159097 1.03865 +v 0.190503 -0.169791 1.04035 +v 0.195027 -0.180713 1.04219 +v 0.199566 -0.191678 1.04406 +v 0.202099 -0.202473 1.04692 +v 0.204674 -0.213305 1.04978 +v 0.207299 -0.224571 1.05261 +v 0.209889 -0.235861 1.05538 +v 0.211941 -0.247381 1.0579 +v 0.213909 -0.258859 1.06013 +v 0.215348 -0.270438 1.06192 +v 0.21664 -0.282066 1.06345 +v 0.217404 -0.293624 1.06432 +v 0.217933 -0.305144 1.06488 +v 0.218163 -0.316237 1.06461 +v 0.218104 -0.327295 1.06398 +v 0.218344 -0.337722 1.06217 +v 0.218271 -0.348004 1.06 +v 0.218329 -0.357593 1.05612 +v 0.217704 -0.366889 1.05142 +v 0.220241 -0.372646 1.04454 +v 0.222546 -0.378124 1.03739 +v 0.228527 -0.380815 1.02983 +v 0.234535 -0.383578 1.02234 +v 0.24024 -0.386541 1.01537 +v 0.246083 -0.389799 1.00863 +v 0.25105 -0.393715 1.0023 +v 0.256148 -0.398057 0.996218 +v 0.259781 -0.4042 0.990135 +v 0.263239 -0.4119 0.986744 +v 0.265901 -0.418028 0.985229 +v 0.268506 -0.424381 0.984274 +v 0.269533 -0.428997 0.983873 +v 0.270598 -0.433599 0.983497 +v 0.271069 -0.437619 0.98316 +v 0.271524 -0.441665 0.982881 +v 0.273702 -0.447036 0.98177 +v 0.275849 -0.452384 0.980641 +v 0.278122 -0.458697 0.979347 +v 0.280333 -0.465021 0.977929 +v 0.282872 -0.472246 0.976177 +v 0.285302 -0.479381 0.974219 +v 0.287938 -0.487258 0.971892 +v 0.29044 -0.49513 0.969244 +v 0.293079 -0.503672 0.966177 +v 0.295559 -0.512199 0.962836 +v 0.298256 -0.521449 0.958927 +v 0.300801 -0.530703 0.954765 +v 0.303544 -0.540454 0.950103 +v 0.306166 -0.550157 0.945252 +v 0.308874 -0.560009 0.940161 +v 0.311425 -0.569824 0.935019 +v 0.314139 -0.579833 0.929533 +v 0.316808 -0.589828 0.924045 +v 0.321229 0.426507 1.02473 +v 0.318927 0.419509 1.01995 +v 0.316654 0.412401 1.0152 +v 0.310912 0.404167 1.01112 +v 0.305174 0.395917 1.00705 +v 0.300468 0.388445 1.00314 +v 0.295772 0.380961 0.999259 +v 0.291551 0.373897 0.99559 +v 0.287367 0.366837 0.991966 +v 0.283592 0.359916 0.988148 +v 0.27985 0.352927 0.98438 +v 0.276343 0.346204 0.980433 +v 0.272761 0.339624 0.97658 +v 0.270004 0.332311 0.973102 +v 0.26721 0.325002 0.969703 +v 0.26611 0.316341 0.965609 +v 0.265031 0.307612 0.96149 +v 0.259715 0.304483 0.957348 +v 0.254406 0.301357 0.953199 +v 0.250959 0.297982 0.949652 +v 0.246739 0.295077 0.950591 +v 0.243033 0.286953 0.950955 +v 0.239367 0.278646 0.950869 +v 0.237426 0.270743 0.950736 +v 0.235598 0.262867 0.95021 +v 0.234335 0.256134 0.949517 +v 0.233166 0.249479 0.94857 +v 0.231847 0.244196 0.947446 +v 0.230468 0.239006 0.946326 +v 0.228762 0.234849 0.94505 +v 0.22723 0.230762 0.94377 +v 0.226997 0.226783 0.94417 +v 0.226452 0.222435 0.946009 +v 0.225459 0.211785 0.946191 +v 0.224516 0.201051 0.946396 +v 0.223503 0.191305 0.946327 +v 0.222503 0.181548 0.946273 +v 0.220945 0.172098 0.947627 +v 0.219404 0.162675 0.949249 +v 0.218748 0.153764 0.9518 +v 0.218102 0.144929 0.95452 +v 0.217686 0.136297 0.9579 +v 0.217296 0.127721 0.961444 +v 0.21752 0.119739 0.965866 +v 0.217791 0.111815 0.970406 +v 0.218289 0.10419 0.975233 +v 0.218789 0.0965565 0.980034 +v 0.2191 0.0887014 0.984574 +v 0.219369 0.0808087 0.98905 +v 0.219489 0.0727342 0.993111 +v 0.219578 0.064622 0.99709 +v 0.219519 0.0564218 1.00083 +v 0.219444 0.048177 1.00448 +v 0.219125 0.0397891 1.00806 +v 0.218776 0.0313807 1.01157 +v 0.218202 0.0229591 1.01484 +v 0.217581 0.0144915 1.01797 +v 0.216648 0.00590466 1.02084 +v 0.215673 -0.00269859 1.02361 +v 0.214281 -0.011419 1.02591 +v 0.212816 -0.0201583 1.02807 +v 0.210497 -0.0289485 1.02964 +v 0.208112 -0.0377352 1.03104 +v 0.20569 -0.0464765 1.03147 +v 0.203235 -0.0552217 1.03177 +v 0.201514 -0.0639736 1.03145 +v 0.199804 -0.0726978 1.03112 +v 0.199166 -0.0817985 1.03047 +v 0.198532 -0.0909009 1.02985 +v 0.198317 -0.100476 1.02933 +v 0.198055 -0.110066 1.02879 +v 0.197772 -0.119469 1.02888 +v 0.197516 -0.128912 1.02905 +v 0.198495 -0.138815 1.02991 +v 0.199482 -0.148693 1.03084 +v 0.202322 -0.15915 1.03228 +v 0.205184 -0.16958 1.03378 +v 0.208721 -0.180239 1.03557 +v 0.212295 -0.190948 1.03741 +v 0.214552 -0.20167 1.04016 +v 0.21681 -0.21245 1.04289 +v 0.219481 -0.223438 1.04546 +v 0.222094 -0.234437 1.04789 +v 0.224132 -0.2455 1.05015 +v 0.226064 -0.25663 1.05218 +v 0.227584 -0.267888 1.05381 +v 0.228973 -0.279118 1.05509 +v 0.229954 -0.290238 1.05582 +v 0.230739 -0.3014 1.05618 +v 0.231086 -0.312201 1.05587 +v 0.23112 -0.322966 1.05505 +v 0.231083 -0.333394 1.0535 +v 0.230661 -0.343662 1.05138 +v 0.23097 -0.353207 1.04776 +v 0.230823 -0.362469 1.0435 +v 0.230496 -0.370581 1.03789 +v 0.229643 -0.378145 1.03162 +v 0.236195 -0.380589 1.02452 +v 0.242831 -0.38316 1.01765 +v 0.249113 -0.386345 1.0113 +v 0.255531 -0.389837 1.00522 +v 0.260972 -0.394339 0.999204 +v 0.266426 -0.399566 0.993943 +v 0.270379 -0.406233 0.988339 +v 0.274196 -0.414382 0.985293 +v 0.276624 -0.420704 0.983301 +v 0.27908 -0.427196 0.981789 +v 0.280028 -0.432031 0.980775 +v 0.280984 -0.436886 0.979898 +v 0.2826 -0.441664 0.978737 +v 0.284194 -0.446457 0.977553 +v 0.286297 -0.451737 0.976178 +v 0.288345 -0.456978 0.974734 +v 0.290472 -0.462937 0.973148 +v 0.29252 -0.468936 0.971415 +v 0.294906 -0.475844 0.969387 +v 0.297179 -0.482663 0.967134 +v 0.299796 -0.490259 0.964436 +v 0.302273 -0.497821 0.961486 +v 0.304803 -0.506032 0.958199 +v 0.307198 -0.514204 0.95468 +v 0.309726 -0.523045 0.950707 +v 0.312165 -0.531903 0.946501 +v 0.314777 -0.54108 0.941879 +v 0.317243 -0.550194 0.937084 +v 0.319751 -0.559615 0.931957 +v 0.32225 -0.569036 0.926673 +v 0.32487 -0.578626 0.921098 +v 0.327429 -0.588193 0.91558 +v 0.334607 0.427998 1.02122 +v 0.332058 0.420987 1.01694 +v 0.329551 0.41387 1.01268 +v 0.323666 0.405153 1.0089 +v 0.31779 0.396462 1.00509 +v 0.312794 0.388479 1.00149 +v 0.307817 0.380536 0.997849 +v 0.303367 0.373201 0.994375 +v 0.298953 0.365784 0.990929 +v 0.294905 0.358606 0.987401 +v 0.290906 0.351387 0.983829 +v 0.287104 0.344445 0.980222 +v 0.283259 0.337724 0.976719 +v 0.279922 0.33056 0.973366 +v 0.276554 0.323385 0.970068 +v 0.273996 0.315536 0.965984 +v 0.27146 0.307641 0.961869 +v 0.266485 0.304163 0.956938 +v 0.26151 0.300685 0.952009 +v 0.258901 0.296532 0.951908 +v 0.256153 0.292466 0.952221 +v 0.253285 0.285051 0.952755 +v 0.250438 0.277517 0.952923 +v 0.248505 0.269897 0.952873 +v 0.246646 0.262357 0.952452 +v 0.245218 0.255527 0.951812 +v 0.243858 0.248854 0.950898 +v 0.242227 0.243134 0.949817 +v 0.240646 0.237699 0.948556 +v 0.238551 0.23306 0.947154 +v 0.236634 0.228492 0.945686 +v 0.236731 0.224545 0.945035 +v 0.236429 0.220052 0.946562 +v 0.235809 0.210014 0.946963 +v 0.235235 0.199914 0.947383 +v 0.234418 0.190533 0.946909 +v 0.233614 0.181141 0.946433 +v 0.232161 0.171799 0.947103 +v 0.230714 0.16247 0.948097 +v 0.230133 0.153634 0.950317 +v 0.229561 0.144869 0.952721 +v 0.229111 0.136272 0.955626 +v 0.228665 0.127695 0.95865 +v 0.228667 0.119506 0.962294 +v 0.228706 0.111403 0.966141 +v 0.22905 0.103626 0.970522 +v 0.229435 0.0958824 0.97495 +v 0.229802 0.088104 0.979404 +v 0.230146 0.0802783 0.983788 +v 0.230307 0.0722635 0.987875 +v 0.230438 0.0642128 0.991891 +v 0.230446 0.0560362 0.995601 +v 0.230431 0.0478328 0.999244 +v 0.230216 0.0394979 1.00274 +v 0.229985 0.0311419 1.0062 +v 0.229544 0.0227289 1.00945 +v 0.229076 0.0142898 1.01261 +v 0.228344 0.00575464 1.01552 +v 0.227576 -0.00280233 1.01832 +v 0.2265 -0.0114594 1.02076 +v 0.225371 -0.0201417 1.02309 +v 0.223603 -0.0289007 1.02492 +v 0.221782 -0.0376777 1.02663 +v 0.219888 -0.0464568 1.0276 +v 0.217994 -0.0552473 1.02843 +v 0.216599 -0.0640764 1.02857 +v 0.215179 -0.0729168 1.0286 +v 0.214365 -0.0820853 1.02818 +v 0.213576 -0.0912502 1.02773 +v 0.213104 -0.100744 1.02714 +v 0.212629 -0.110241 1.02656 +v 0.212856 -0.11965 1.02624 +v 0.213084 -0.129073 1.02599 +v 0.214105 -0.138821 1.02624 +v 0.21518 -0.148671 1.02665 +v 0.217514 -0.15896 1.02759 +v 0.219894 -0.169247 1.02861 +v 0.222739 -0.179737 1.03005 +v 0.225613 -0.190255 1.03153 +v 0.227552 -0.200908 1.03388 +v 0.229515 -0.211546 1.0363 +v 0.231953 -0.222211 1.03855 +v 0.234362 -0.232909 1.04072 +v 0.236446 -0.243687 1.04273 +v 0.238445 -0.254509 1.04457 +v 0.240049 -0.265452 1.04595 +v 0.241529 -0.276436 1.04709 +v 0.24251 -0.28736 1.0478 +v 0.243377 -0.298213 1.04812 +v 0.243847 -0.308755 1.04789 +v 0.243995 -0.319282 1.04718 +v 0.244259 -0.329563 1.04555 +v 0.244156 -0.33969 1.04328 +v 0.244232 -0.34927 1.04007 +v 0.243894 -0.358574 1.03621 +v 0.243984 -0.366767 1.0313 +v 0.243641 -0.374523 1.02575 +v 0.246977 -0.37914 1.0196 +v 0.25024 -0.383798 1.01348 +v 0.257864 -0.386989 1.00745 +v 0.265521 -0.390769 1.00187 +v 0.27135 -0.395745 0.99647 +v 0.277157 -0.4013 0.991917 +v 0.281195 -0.408455 0.987283 +v 0.285182 -0.416318 0.98397 +v 0.287524 -0.422838 0.981382 +v 0.289868 -0.429518 0.979247 +v 0.291278 -0.434362 0.977783 +v 0.292655 -0.439252 0.976345 +v 0.294419 -0.444111 0.97481 +v 0.296169 -0.448845 0.973195 +v 0.298217 -0.454066 0.971417 +v 0.300262 -0.459322 0.969639 +v 0.302291 -0.465297 0.967626 +v 0.304299 -0.471286 0.965518 +v 0.3066 -0.477996 0.963064 +v 0.30884 -0.484682 0.960501 +v 0.311325 -0.492079 0.957474 +v 0.313757 -0.499443 0.954378 +v 0.316139 -0.507406 0.950916 +v 0.318419 -0.515324 0.947276 +v 0.320787 -0.523862 0.943178 +v 0.323122 -0.532419 0.938834 +v 0.325576 -0.541226 0.934106 +v 0.327826 -0.549954 0.929258 +v 0.330293 -0.55899 0.924006 +v 0.332703 -0.568009 0.918648 +v 0.335221 -0.577249 0.912991 +v 0.337672 -0.586462 0.907255 +v 0.347964 0.429582 1.0176 +v 0.345195 0.422534 1.01384 +v 0.342446 0.415362 1.01013 +v 0.336411 0.406182 1.00663 +v 0.330404 0.397037 1.00307 +v 0.325132 0.388598 0.999753 +v 0.319874 0.380171 0.996387 +v 0.315198 0.372536 0.993143 +v 0.310552 0.364827 0.989884 +v 0.30621 0.357328 0.986601 +v 0.301935 0.349881 0.983243 +v 0.297868 0.342824 0.980021 +v 0.293769 0.335899 0.976874 +v 0.289852 0.328851 0.973633 +v 0.285916 0.32176 0.970453 +v 0.281898 0.314723 0.96639 +v 0.277894 0.307672 0.962273 +v 0.273251 0.303838 0.956538 +v 0.268588 0.300101 0.951869 +v 0.267066 0.294981 0.952667 +v 0.26553 0.289773 0.953307 +v 0.263461 0.283104 0.953893 +v 0.261387 0.276361 0.954192 +v 0.259496 0.269085 0.95423 +v 0.257645 0.261818 0.953915 +v 0.256113 0.254891 0.95325 +v 0.254507 0.248218 0.952518 +v 0.252724 0.242359 0.951465 +v 0.250944 0.236556 0.950173 +v 0.248506 0.231417 0.94869 +v 0.246201 0.226302 0.94702 +v 0.246458 0.222326 0.945898 +v 0.246413 0.217663 0.947124 +v 0.246147 0.208246 0.947745 +v 0.245937 0.198778 0.948362 +v 0.245318 0.189742 0.947503 +v 0.244717 0.180721 0.946619 +v 0.243368 0.171489 0.946611 +v 0.242024 0.162266 0.946961 +v 0.24152 0.153516 0.948874 +v 0.241031 0.144822 0.950979 +v 0.240565 0.136257 0.95346 +v 0.2401 0.12772 0.956013 +v 0.239898 0.119369 0.958985 +v 0.239722 0.111125 0.962165 +v 0.239913 0.103187 0.966007 +v 0.240158 0.0953106 0.97002 +v 0.240571 0.0875701 0.974345 +v 0.240979 0.0798051 0.978646 +v 0.241176 0.0718393 0.982734 +v 0.241346 0.0638456 0.986776 +v 0.241425 0.0556976 0.990464 +v 0.241475 0.0475282 0.994102 +v 0.241382 0.0392476 0.997546 +v 0.241272 0.030951 1.00094 +v 0.240977 0.0225573 1.00417 +v 0.240663 0.0141372 1.00734 +v 0.240127 0.00564733 1.01028 +v 0.239559 -0.00287339 1.0131 +v 0.23878 -0.0114829 1.01567 +v 0.23797 -0.0200939 1.01821 +v 0.236776 -0.0288096 1.02035 +v 0.235536 -0.0375436 1.02237 +v 0.234156 -0.046396 1.02381 +v 0.232738 -0.0552369 1.02508 +v 0.231644 -0.0641877 1.02564 +v 0.23054 -0.0731463 1.02611 +v 0.229637 -0.0823408 1.02595 +v 0.228718 -0.0915322 1.0257 +v 0.227914 -0.10102 1.02516 +v 0.227186 -0.110425 1.02454 +v 0.227993 -0.119818 1.02387 +v 0.228792 -0.129223 1.02324 +v 0.229919 -0.138974 1.02298 +v 0.231051 -0.148719 1.02281 +v 0.232858 -0.158811 1.02319 +v 0.234728 -0.168902 1.02373 +v 0.236849 -0.179207 1.02484 +v 0.238998 -0.189544 1.02598 +v 0.240635 -0.200079 1.02794 +v 0.242305 -0.210593 1.02995 +v 0.24451 -0.221004 1.03185 +v 0.246691 -0.231468 1.03371 +v 0.24884 -0.241894 1.03548 +v 0.250881 -0.252368 1.03703 +v 0.252594 -0.26304 1.03834 +v 0.254144 -0.27375 1.03928 +v 0.255256 -0.284342 1.03993 +v 0.256223 -0.294972 1.04031 +v 0.25676 -0.30529 1.0402 +v 0.257047 -0.315594 1.03964 +v 0.257741 -0.325801 1.03803 +v 0.258086 -0.335893 1.03588 +v 0.258068 -0.345623 1.03322 +v 0.257584 -0.355051 1.02978 +v 0.257983 -0.363423 1.02547 +v 0.257975 -0.371393 1.02048 +v 0.25802 -0.378204 1.01519 +v 0.257824 -0.384665 1.00949 +v 0.266648 -0.388022 1.00379 +v 0.275549 -0.392037 0.998794 +v 0.281707 -0.397237 0.993808 +v 0.287812 -0.402979 0.989629 +v 0.291884 -0.41018 0.985159 +v 0.296017 -0.417872 0.981559 +v 0.298267 -0.424711 0.978629 +v 0.300491 -0.431775 0.976148 +v 0.302298 -0.436494 0.974298 +v 0.304154 -0.441208 0.972494 +v 0.306125 -0.446112 0.970558 +v 0.308058 -0.451025 0.968594 +v 0.310064 -0.456355 0.966499 +v 0.31204 -0.461666 0.964358 +v 0.314074 -0.467649 0.961987 +v 0.316062 -0.473634 0.959579 +v 0.318293 -0.480127 0.956875 +v 0.320459 -0.486579 0.954088 +v 0.322918 -0.493802 0.950832 +v 0.325248 -0.500895 0.947395 +v 0.327443 -0.508662 0.943721 +v 0.329514 -0.516376 0.939849 +v 0.331807 -0.524661 0.935535 +v 0.333914 -0.532866 0.931083 +v 0.336148 -0.541289 0.926254 +v 0.338304 -0.549667 0.921317 +v 0.340699 -0.558315 0.915975 +v 0.342984 -0.566915 0.910553 +v 0.345407 -0.575805 0.904719 +v 0.347763 -0.584666 0.898823 +v 0.359973 0.430513 1.01358 +v 0.356886 0.423354 1.01029 +v 0.353842 0.416072 1.00707 +v 0.347908 0.406535 1.00394 +v 0.341981 0.397032 1.00075 +v 0.336695 0.388312 0.997711 +v 0.331414 0.379627 0.994563 +v 0.326515 0.371668 0.991478 +v 0.321647 0.363633 0.988381 +v 0.317159 0.355933 0.985253 +v 0.31272 0.348301 0.982054 +v 0.308445 0.341174 0.979194 +v 0.304147 0.33417 0.976404 +v 0.299791 0.32728 0.973435 +v 0.295426 0.320338 0.970472 +v 0.290586 0.31404 0.966585 +v 0.285752 0.307751 0.962659 +v 0.282206 0.304491 0.95679 +v 0.278694 0.301291 0.951707 +v 0.277241 0.295013 0.952835 +v 0.275772 0.2886 0.953719 +v 0.273866 0.281793 0.954406 +v 0.271967 0.274929 0.95479 +v 0.270185 0.267751 0.954894 +v 0.268432 0.260588 0.954661 +v 0.266818 0.253811 0.954166 +v 0.265225 0.247081 0.953365 +v 0.263493 0.240955 0.952344 +v 0.261779 0.23485 0.951048 +v 0.259607 0.229281 0.94957 +v 0.257501 0.22369 0.947787 +v 0.257936 0.219996 0.946466 +v 0.258151 0.215529 0.947877 +v 0.257705 0.206562 0.948546 +v 0.257285 0.197574 0.949179 +v 0.256666 0.188762 0.948083 +v 0.256053 0.179979 0.946928 +v 0.255081 0.170799 0.946242 +v 0.254107 0.161589 0.945875 +v 0.253274 0.153171 0.947412 +v 0.252474 0.14483 0.949251 +v 0.252067 0.136391 0.951586 +v 0.251667 0.127977 0.953959 +v 0.251427 0.119658 0.956577 +v 0.251187 0.111369 0.959312 +v 0.251222 0.103255 0.962592 +v 0.251298 0.0952037 0.966055 +v 0.251628 0.0873776 0.969982 +v 0.251979 0.0795891 0.97398 +v 0.252261 0.071655 0.977991 +v 0.252528 0.0637019 0.98196 +v 0.252702 0.0556213 0.98564 +v 0.252857 0.0475111 0.989261 +v 0.252872 0.0392525 0.992711 +v 0.252866 0.0309641 0.996064 +v 0.252711 0.0226018 0.999264 +v 0.252536 0.0142293 1.0024 +v 0.252187 0.00580376 1.00541 +v 0.251804 -0.00266106 1.00831 +v 0.251236 -0.0112568 1.01101 +v 0.250638 -0.0198717 1.01364 +v 0.249748 -0.0285827 1.0159 +v 0.248829 -0.037308 1.01806 +v 0.247873 -0.0461551 1.01978 +v 0.246863 -0.0550449 1.02133 +v 0.246019 -0.0640402 1.0223 +v 0.245169 -0.0730537 1.02315 +v 0.244499 -0.0822594 1.02349 +v 0.243801 -0.0914741 1.02364 +v 0.243316 -0.100917 1.0234 +v 0.242846 -0.110363 1.02309 +v 0.243505 -0.119811 1.02252 +v 0.244188 -0.129238 1.02195 +v 0.245226 -0.138883 1.02145 +v 0.246214 -0.14856 1.02099 +v 0.24771 -0.158482 1.0208 +v 0.249226 -0.168416 1.02076 +v 0.25111 -0.178559 1.02116 +v 0.253022 -0.188709 1.02175 +v 0.254301 -0.199098 1.0229 +v 0.255627 -0.209516 1.02431 +v 0.257555 -0.219791 1.02576 +v 0.259514 -0.230034 1.02723 +v 0.261525 -0.240281 1.02867 +v 0.263493 -0.250574 1.03001 +v 0.265222 -0.260969 1.03108 +v 0.26686 -0.271367 1.03187 +v 0.268137 -0.281685 1.03236 +v 0.269299 -0.292044 1.03263 +v 0.270147 -0.302267 1.03246 +v 0.270815 -0.312464 1.03188 +v 0.271647 -0.32258 1.03058 +v 0.272059 -0.332625 1.02878 +v 0.272274 -0.342374 1.02649 +v 0.272099 -0.351891 1.02352 +v 0.272682 -0.360535 1.01988 +v 0.2729 -0.368884 1.01548 +v 0.273911 -0.376084 1.01092 +v 0.274772 -0.38301 1.00591 +v 0.280875 -0.387859 1.00081 +v 0.287003 -0.392987 0.996116 +v 0.29284 -0.398745 0.991388 +v 0.298698 -0.40481 0.987199 +v 0.302596 -0.412058 0.982901 +v 0.306515 -0.419498 0.979097 +v 0.308947 -0.425606 0.976155 +v 0.311388 -0.431756 0.973366 +v 0.313343 -0.436736 0.97112 +v 0.315325 -0.441694 0.968866 +v 0.317291 -0.446811 0.966556 +v 0.319262 -0.451949 0.964241 +v 0.321367 -0.457444 0.961773 +v 0.323437 -0.462862 0.959287 +v 0.325422 -0.468896 0.956623 +v 0.327373 -0.474902 0.953884 +v 0.329523 -0.481345 0.95093 +v 0.331675 -0.487755 0.947806 +v 0.333942 -0.494856 0.944252 +v 0.336098 -0.501903 0.940573 +v 0.33818 -0.5094 0.936681 +v 0.340253 -0.516933 0.932563 +v 0.342387 -0.524861 0.928084 +v 0.344425 -0.532733 0.923476 +v 0.346579 -0.540865 0.918546 +v 0.348626 -0.54898 0.913511 +v 0.350866 -0.557314 0.908041 +v 0.353031 -0.565618 0.902505 +v 0.355341 -0.574127 0.896595 +v 0.357547 -0.582594 0.890567 +v 0.371943 0.431555 1.00945 +v 0.36854 0.424238 1.00664 +v 0.365181 0.41684 1.00387 +v 0.35936 0.406949 1.00114 +v 0.353539 0.397067 0.99837 +v 0.34825 0.388099 0.995581 +v 0.342954 0.379139 0.992689 +v 0.337845 0.370857 0.989785 +v 0.332736 0.362458 0.986847 +v 0.328094 0.354606 0.983863 +v 0.323494 0.346775 0.980837 +v 0.319032 0.339545 0.978341 +v 0.314547 0.332407 0.975959 +v 0.309752 0.325699 0.973208 +v 0.304947 0.318928 0.970529 +v 0.299287 0.313396 0.966823 +v 0.293614 0.307832 0.963045 +v 0.29116 0.305111 0.957029 +v 0.288729 0.302346 0.950967 +v 0.287371 0.294935 0.952453 +v 0.285962 0.287458 0.953544 +v 0.284169 0.280557 0.954279 +v 0.282371 0.273661 0.954733 +v 0.280711 0.266591 0.954888 +v 0.279023 0.25961 0.954745 +v 0.277462 0.252817 0.954291 +v 0.275864 0.246096 0.953545 +v 0.274232 0.239685 0.952532 +v 0.272593 0.233296 0.951217 +v 0.27078 0.227333 0.949734 +v 0.268917 0.221255 0.947779 +v 0.269417 0.217659 0.947021 +v 0.269895 0.21339 0.948625 +v 0.269267 0.204876 0.949333 +v 0.268653 0.196355 0.949987 +v 0.268019 0.187781 0.948648 +v 0.267393 0.179234 0.947247 +v 0.266788 0.170091 0.945862 +v 0.266188 0.160907 0.944775 +v 0.26502 0.152823 0.945911 +v 0.263905 0.144791 0.947434 +v 0.26357 0.136568 0.949681 +v 0.263239 0.128314 0.951936 +v 0.262966 0.12 0.954251 +v 0.26269 0.111675 0.956601 +v 0.262585 0.103409 0.959373 +v 0.262515 0.095191 0.962303 +v 0.262753 0.0872813 0.96579 +v 0.263036 0.079419 0.969415 +v 0.263385 0.0715114 0.973318 +v 0.263741 0.0635893 0.977209 +v 0.264013 0.0555693 0.980894 +v 0.264271 0.0475181 0.984508 +v 0.264402 0.0392978 0.987973 +v 0.264508 0.0310429 0.991344 +v 0.264502 0.0227302 0.994545 +v 0.264476 0.0144055 0.997697 +v 0.264298 0.00600219 1.0007 +v 0.264095 -0.00242761 1.00364 +v 0.263724 -0.0110169 1.00642 +v 0.263323 -0.0196308 1.00912 +v 0.262771 -0.0283294 1.01154 +v 0.262176 -0.0370554 1.01383 +v 0.261605 -0.0459297 1.01575 +v 0.260992 -0.0548325 1.01753 +v 0.260405 -0.0638965 1.01884 +v 0.259793 -0.0729544 1.01999 +v 0.259335 -0.082165 1.02076 +v 0.258865 -0.0913925 1.02131 +v 0.258763 -0.100803 1.02144 +v 0.258656 -0.110208 1.02143 +v 0.259167 -0.119708 1.02109 +v 0.259665 -0.129181 1.02064 +v 0.260523 -0.138794 1.01998 +v 0.261384 -0.148403 1.01937 +v 0.262704 -0.15819 1.01883 +v 0.264037 -0.16798 1.01838 +v 0.265582 -0.177933 1.01811 +v 0.267165 -0.187894 1.01801 +v 0.268141 -0.198158 1.01851 +v 0.269191 -0.20841 1.01922 +v 0.270838 -0.218515 1.0201 +v 0.272509 -0.228629 1.0211 +v 0.27437 -0.238717 1.02213 +v 0.276223 -0.248784 1.02307 +v 0.277946 -0.25888 1.02386 +v 0.279569 -0.26904 1.0245 +v 0.281052 -0.279066 1.02488 +v 0.282399 -0.289139 1.02499 +v 0.283611 -0.299178 1.02471 +v 0.284614 -0.309275 1.02408 +v 0.285438 -0.319301 1.02303 +v 0.28603 -0.329302 1.02161 +v 0.286425 -0.339006 1.01961 +v 0.28653 -0.348602 1.0171 +v 0.287287 -0.357532 1.01402 +v 0.287714 -0.366284 1.01038 +v 0.289667 -0.373693 1.00615 +v 0.291496 -0.380967 1.00176 +v 0.294896 -0.38723 0.997186 +v 0.298325 -0.393613 0.992804 +v 0.303888 -0.399794 0.988064 +v 0.309411 -0.406122 0.983614 +v 0.313063 -0.413271 0.979372 +v 0.316678 -0.420529 0.975337 +v 0.319366 -0.42594 0.97235 +v 0.322043 -0.431385 0.969499 +v 0.324116 -0.436681 0.966918 +v 0.326194 -0.442014 0.964355 +v 0.328208 -0.447318 0.961843 +v 0.330208 -0.452629 0.959379 +v 0.332411 -0.458245 0.956567 +v 0.334588 -0.463862 0.953799 +v 0.336555 -0.469969 0.95087 +v 0.338473 -0.47605 0.947861 +v 0.340597 -0.482435 0.944517 +v 0.342652 -0.488799 0.941151 +v 0.344733 -0.495796 0.937387 +v 0.346749 -0.502757 0.933513 +v 0.348773 -0.510083 0.929409 +v 0.350696 -0.517334 0.925147 +v 0.352765 -0.524917 0.920497 +v 0.354758 -0.532472 0.915761 +v 0.356758 -0.54032 0.91066 +v 0.358658 -0.548139 0.905427 +v 0.360765 -0.556193 0.89984 +v 0.362804 -0.564185 0.894195 +v 0.364983 -0.572302 0.888193 +v 0.367056 -0.580414 0.882044 +v 0.382813 0.431858 1.00471 +v 0.37929 0.424383 1.00242 +v 0.37578 0.416872 1.00009 +v 0.370084 0.406722 0.997765 +v 0.36436 0.396605 0.995333 +v 0.35913 0.387416 0.992798 +v 0.353885 0.378253 0.990104 +v 0.348741 0.369765 0.98732 +v 0.3436 0.361149 0.984513 +v 0.338905 0.35306 0.981661 +v 0.334261 0.345003 0.978808 +v 0.329648 0.337353 0.976449 +v 0.325024 0.329808 0.974187 +v 0.320182 0.323614 0.971798 +v 0.315345 0.317389 0.969454 +v 0.309187 0.312335 0.966182 +v 0.303031 0.307325 0.962906 +v 0.301156 0.304698 0.956943 +v 0.299136 0.301794 0.949875 +v 0.297836 0.294234 0.951556 +v 0.296474 0.286506 0.952717 +v 0.294775 0.27949 0.953553 +v 0.293027 0.272387 0.95406 +v 0.291382 0.265377 0.954252 +v 0.289722 0.258367 0.954111 +v 0.288213 0.251633 0.953715 +v 0.286686 0.244893 0.952998 +v 0.285243 0.238441 0.952007 +v 0.283784 0.231964 0.950704 +v 0.282335 0.225608 0.949053 +v 0.280881 0.219174 0.947043 +v 0.28124 0.216149 0.946374 +v 0.281737 0.212308 0.948404 +v 0.281036 0.203879 0.949226 +v 0.280348 0.195462 0.94999 +v 0.27964 0.187057 0.948583 +v 0.278938 0.178703 0.947087 +v 0.278297 0.17018 0.945346 +v 0.277661 0.161635 0.943769 +v 0.277038 0.15328 0.944209 +v 0.276463 0.144919 0.945188 +v 0.275801 0.136981 0.947354 +v 0.275138 0.12904 0.949601 +v 0.274645 0.120719 0.951929 +v 0.274154 0.112396 0.954259 +v 0.273985 0.104102 0.956698 +v 0.27384 0.0958308 0.959284 +v 0.273949 0.087718 0.962384 +v 0.274109 0.0796522 0.965664 +v 0.274422 0.0716926 0.9693 +v 0.274754 0.0637567 0.972983 +v 0.275063 0.0557411 0.976597 +v 0.275367 0.0477055 0.980177 +v 0.275586 0.0395218 0.983645 +v 0.27578 0.0313073 0.987041 +v 0.275875 0.0230282 0.990249 +v 0.275952 0.0147293 0.993408 +v 0.275915 0.00631972 0.996427 +v 0.275862 -0.00210217 0.999395 +v 0.275695 -0.0106532 1.00221 +v 0.275507 -0.0192277 1.00494 +v 0.275214 -0.0279392 1.00743 +v 0.274868 -0.036699 1.00979 +v 0.274518 -0.0455742 1.01181 +v 0.27415 -0.0544611 1.01373 +v 0.273853 -0.0635348 1.01522 +v 0.273529 -0.0726263 1.01658 +v 0.273552 -0.0818122 1.01755 +v 0.273558 -0.0910396 1.01833 +v 0.273608 -0.100489 1.0187 +v 0.273645 -0.109925 1.01897 +v 0.274131 -0.119458 1.0189 +v 0.274609 -0.128973 1.01867 +v 0.275393 -0.13859 1.0182 +v 0.276198 -0.148185 1.01766 +v 0.277379 -0.15791 1.01708 +v 0.278552 -0.167609 1.01644 +v 0.279943 -0.177426 1.01583 +v 0.281345 -0.187246 1.01529 +v 0.282103 -0.197308 1.01514 +v 0.282932 -0.20741 1.01515 +v 0.284377 -0.217373 1.0153 +v 0.285864 -0.22734 1.01564 +v 0.287492 -0.237216 1.01612 +v 0.289128 -0.247091 1.01658 +v 0.290692 -0.25702 1.01702 +v 0.292261 -0.266954 1.01741 +v 0.293808 -0.276785 1.01751 +v 0.295276 -0.286663 1.01746 +v 0.296594 -0.296543 1.0171 +v 0.297747 -0.306471 1.01646 +v 0.298676 -0.316334 1.01549 +v 0.29938 -0.326223 1.01419 +v 0.299897 -0.335922 1.01251 +v 0.300179 -0.345578 1.01051 +v 0.301342 -0.354649 1.0077 +v 0.302245 -0.363626 1.00455 +v 0.304252 -0.371565 1.0009 +v 0.306176 -0.379379 0.997081 +v 0.309235 -0.386293 0.992985 +v 0.312284 -0.393183 0.988874 +v 0.316447 -0.399564 0.984374 +v 0.320657 -0.405994 0.979896 +v 0.323993 -0.412793 0.975718 +v 0.327394 -0.419602 0.971649 +v 0.329986 -0.425156 0.968568 +v 0.332561 -0.430725 0.965484 +v 0.334782 -0.436143 0.962605 +v 0.336977 -0.441591 0.959828 +v 0.339 -0.447069 0.957095 +v 0.341044 -0.452528 0.954232 +v 0.343157 -0.458268 0.951209 +v 0.345261 -0.463994 0.94816 +v 0.347197 -0.470182 0.944965 +v 0.349101 -0.476303 0.941678 +v 0.351116 -0.482704 0.938157 +v 0.353079 -0.489087 0.934564 +v 0.355067 -0.495941 0.930638 +v 0.356991 -0.502759 0.926583 +v 0.358938 -0.509884 0.922241 +v 0.360796 -0.516951 0.917789 +v 0.362754 -0.524334 0.912982 +v 0.364617 -0.53167 0.90807 +v 0.366555 -0.539264 0.902853 +v 0.368372 -0.54681 0.897505 +v 0.370318 -0.554556 0.891832 +v 0.372175 -0.562246 0.886074 +v 0.374217 -0.570114 0.879983 +v 0.376151 -0.577981 0.873752 +v 0.393633 0.432213 0.999855 +v 0.389992 0.424596 0.998037 +v 0.386342 0.416956 0.996168 +v 0.380757 0.406555 0.994232 +v 0.375153 0.396168 0.992205 +v 0.369987 0.386773 0.989888 +v 0.364792 0.377397 0.987392 +v 0.359608 0.368704 0.984758 +v 0.354431 0.359855 0.982112 +v 0.349703 0.351536 0.979389 +v 0.345019 0.34325 0.976716 +v 0.340244 0.335176 0.974514 +v 0.335449 0.327204 0.972482 +v 0.330604 0.321572 0.970446 +v 0.32573 0.315861 0.968365 +v 0.319145 0.311371 0.965371 +v 0.312439 0.306742 0.962732 +v 0.311145 0.304247 0.956869 +v 0.309295 0.3011 0.948205 +v 0.308069 0.29346 0.950063 +v 0.306762 0.285564 0.951305 +v 0.30515 0.278336 0.952195 +v 0.303455 0.271164 0.952786 +v 0.301891 0.264199 0.953014 +v 0.300268 0.257278 0.952949 +v 0.29887 0.250522 0.952551 +v 0.297424 0.24379 0.951856 +v 0.296191 0.237356 0.950879 +v 0.294922 0.230816 0.949441 +v 0.293892 0.224076 0.947637 +v 0.292854 0.217365 0.94556 +v 0.293092 0.214636 0.945771 +v 0.293602 0.211227 0.948214 +v 0.292836 0.202877 0.94913 +v 0.292068 0.194571 0.949993 +v 0.291276 0.186375 0.948488 +v 0.290499 0.178212 0.946893 +v 0.289812 0.170289 0.944801 +v 0.289143 0.162363 0.942807 +v 0.289031 0.153685 0.942514 +v 0.289011 0.145065 0.942883 +v 0.288028 0.137388 0.944964 +v 0.287046 0.12972 0.947192 +v 0.286345 0.121454 0.949581 +v 0.285644 0.113133 0.951943 +v 0.285427 0.104819 0.954184 +v 0.285223 0.0965324 0.956465 +v 0.285207 0.0882293 0.95918 +v 0.285247 0.0799611 0.962099 +v 0.285526 0.0719303 0.965449 +v 0.285832 0.0639434 0.968903 +v 0.286158 0.0559397 0.972403 +v 0.286486 0.0479202 0.97591 +v 0.286778 0.0397571 0.979343 +v 0.287062 0.0315845 0.982758 +v 0.287281 0.0233473 0.985999 +v 0.287474 0.0150848 0.989166 +v 0.287587 0.00669773 0.99224 +v 0.287676 -0.00173486 0.995214 +v 0.287701 -0.0102601 0.998048 +v 0.2877 -0.0188138 1.00079 +v 0.28765 -0.0275478 1.0033 +v 0.287568 -0.0363151 1.00573 +v 0.28745 -0.0451864 1.00785 +v 0.287308 -0.0540894 1.00985 +v 0.287332 -0.0631729 1.01152 +v 0.287329 -0.0722668 1.01306 +v 0.287774 -0.0814661 1.01415 +v 0.288202 -0.0906532 1.01509 +v 0.288423 -0.1001 1.01568 +v 0.288612 -0.109557 1.01609 +v 0.289071 -0.119137 1.01624 +v 0.289539 -0.128715 1.01621 +v 0.290256 -0.138309 1.01584 +v 0.290965 -0.147886 1.01541 +v 0.291999 -0.157571 1.0148 +v 0.293009 -0.16722 1.01409 +v 0.294268 -0.176901 1.01326 +v 0.295544 -0.186582 1.01251 +v 0.296116 -0.196523 1.01192 +v 0.296727 -0.206501 1.01145 +v 0.298022 -0.216291 1.01099 +v 0.299338 -0.226077 1.0106 +v 0.300727 -0.235786 1.01052 +v 0.302134 -0.245531 1.01053 +v 0.303572 -0.255238 1.01049 +v 0.304979 -0.264948 1.0104 +v 0.306575 -0.274588 1.0102 +v 0.308073 -0.284304 1.00982 +v 0.309427 -0.29402 1.00926 +v 0.310643 -0.303746 1.00848 +v 0.311624 -0.313434 1.00752 +v 0.312425 -0.323122 1.0063 +v 0.313055 -0.332703 1.00486 +v 0.313352 -0.342329 1.00311 +v 0.314837 -0.351446 1.00041 +v 0.31616 -0.36051 0.997459 +v 0.318188 -0.368771 0.994026 +v 0.320086 -0.376977 0.990452 +v 0.322793 -0.384361 0.98659 +v 0.32547 -0.391714 0.982632 +v 0.328457 -0.398498 0.978689 +v 0.331417 -0.405263 0.974786 +v 0.334613 -0.411712 0.970924 +v 0.337789 -0.418296 0.967085 +v 0.340252 -0.424008 0.963801 +v 0.342716 -0.429735 0.96057 +v 0.345052 -0.435329 0.957564 +v 0.347365 -0.440919 0.954522 +v 0.349441 -0.446555 0.951452 +v 0.351496 -0.452193 0.948373 +v 0.353558 -0.458068 0.945205 +v 0.355598 -0.463952 0.941965 +v 0.357485 -0.470194 0.938561 +v 0.359313 -0.476375 0.93509 +v 0.361253 -0.482802 0.93135 +v 0.363135 -0.489211 0.92753 +v 0.36504 -0.495907 0.923402 +v 0.366881 -0.502563 0.919148 +v 0.368772 -0.509502 0.914624 +v 0.370585 -0.516414 0.910004 +v 0.372426 -0.523587 0.905066 +v 0.374154 -0.53071 0.9 +v 0.37597 -0.538008 0.894638 +v 0.377704 -0.545267 0.889196 +v 0.379519 -0.552734 0.883462 +v 0.381228 -0.560147 0.877631 +v 0.383133 -0.567786 0.871454 +v 0.384954 -0.575396 0.865176 +v 0.40376 0.431751 0.994532 +v 0.39999 0.424061 0.99312 +v 0.396249 0.416329 0.991715 +v 0.390853 0.405791 0.99016 +v 0.385401 0.395299 0.988437 +v 0.38027 0.385732 0.986378 +v 0.375091 0.376235 0.984089 +v 0.369984 0.367382 0.981548 +v 0.364871 0.358407 0.978952 +v 0.360097 0.350009 0.97626 +v 0.355347 0.341559 0.97362 +v 0.350769 0.333453 0.971553 +v 0.346169 0.325402 0.96964 +v 0.34155 0.319763 0.967887 +v 0.336917 0.314084 0.966148 +v 0.331173 0.309453 0.963592 +v 0.32539 0.304797 0.960984 +v 0.323173 0.302379 0.95493 +v 0.320048 0.299643 0.946273 +v 0.318705 0.291973 0.947927 +v 0.317243 0.284147 0.94923 +v 0.315646 0.277002 0.950172 +v 0.313961 0.269925 0.950811 +v 0.312454 0.263052 0.951087 +v 0.310877 0.256206 0.951066 +v 0.309572 0.24963 0.950697 +v 0.308203 0.243056 0.950043 +v 0.307144 0.236641 0.949037 +v 0.306014 0.230172 0.947639 +v 0.305178 0.223421 0.945848 +v 0.304294 0.216717 0.943758 +v 0.304616 0.213795 0.944456 +v 0.30528 0.210305 0.946996 +v 0.304547 0.202186 0.947972 +v 0.303793 0.19411 0.948906 +v 0.302865 0.186106 0.947423 +v 0.301947 0.178131 0.945845 +v 0.301216 0.170391 0.943609 +v 0.300499 0.162665 0.941416 +v 0.300199 0.15438 0.940665 +v 0.299962 0.146165 0.940558 +v 0.29914 0.138312 0.942397 +v 0.298351 0.130459 0.944526 +v 0.297696 0.122212 0.947018 +v 0.297032 0.113942 0.949446 +v 0.296845 0.105585 0.951643 +v 0.296674 0.0972474 0.953916 +v 0.296614 0.0889429 0.956376 +v 0.296591 0.0806456 0.95899 +v 0.296831 0.0724966 0.962014 +v 0.297113 0.064401 0.965203 +v 0.297394 0.0563559 0.968577 +v 0.297681 0.0483107 0.971972 +v 0.298036 0.0401856 0.97534 +v 0.298395 0.0320222 0.978672 +v 0.298713 0.0237847 0.9819 +v 0.299008 0.0155247 0.985055 +v 0.299258 0.00714852 0.988105 +v 0.299491 -0.00125721 0.991077 +v 0.299655 -0.00979166 0.993864 +v 0.299801 -0.0183432 0.996593 +v 0.299942 -0.0270625 0.999104 +v 0.30009 -0.0358226 1.00155 +v 0.300188 -0.0446922 1.00377 +v 0.300231 -0.0536059 1.00581 +v 0.300408 -0.0626761 1.00752 +v 0.300553 -0.0717582 1.00914 +v 0.300966 -0.0809937 1.01039 +v 0.301342 -0.0902449 1.01147 +v 0.301664 -0.0996727 1.01224 +v 0.301986 -0.109143 1.0128 +v 0.302549 -0.118724 1.01303 +v 0.303066 -0.128304 1.01309 +v 0.303792 -0.137882 1.01288 +v 0.304481 -0.147436 1.01248 +v 0.305381 -0.157072 1.01192 +v 0.306294 -0.166688 1.01125 +v 0.307362 -0.176373 1.01042 +v 0.308406 -0.186018 1.00948 +v 0.309046 -0.195819 1.00871 +v 0.309705 -0.205615 1.00795 +v 0.310866 -0.215277 1.00709 +v 0.312058 -0.224947 1.00628 +v 0.313247 -0.234557 1.0056 +v 0.314503 -0.244135 1.0051 +v 0.315857 -0.253648 1.00455 +v 0.317231 -0.263191 1.00405 +v 0.318696 -0.272697 1.0035 +v 0.320091 -0.282236 1.00283 +v 0.321437 -0.291801 1.00197 +v 0.322694 -0.301355 1.00092 +v 0.323795 -0.310835 0.99972 +v 0.324751 -0.320357 0.998407 +v 0.325651 -0.329802 0.996784 +v 0.326356 -0.339278 0.995017 +v 0.327831 -0.348306 0.992376 +v 0.329257 -0.357399 0.989731 +v 0.331069 -0.365809 0.986522 +v 0.332874 -0.374197 0.98336 +v 0.335306 -0.381735 0.97979 +v 0.33772 -0.389277 0.976178 +v 0.340441 -0.396232 0.972455 +v 0.343096 -0.403223 0.968708 +v 0.34581 -0.409821 0.965027 +v 0.34856 -0.416306 0.96132 +v 0.350889 -0.422257 0.958106 +v 0.353195 -0.428193 0.954837 +v 0.355404 -0.43396 0.951682 +v 0.3576 -0.43972 0.94853 +v 0.359633 -0.445566 0.945408 +v 0.361656 -0.451417 0.942195 +v 0.363642 -0.457426 0.938868 +v 0.365555 -0.463369 0.93548 +v 0.367406 -0.469597 0.931867 +v 0.369186 -0.475805 0.928177 +v 0.371086 -0.482227 0.924282 +v 0.372898 -0.488592 0.920275 +v 0.374738 -0.495152 0.915948 +v 0.376519 -0.501693 0.911556 +v 0.3783 -0.508503 0.906932 +v 0.37994 -0.515243 0.90218 +v 0.381673 -0.522227 0.897095 +v 0.383326 -0.529172 0.891953 +v 0.384976 -0.536239 0.88653 +v 0.386594 -0.543289 0.881082 +v 0.388319 -0.550495 0.875315 +v 0.389967 -0.557671 0.869474 +v 0.391644 -0.565044 0.863328 +v 0.393266 -0.572382 0.857136 +v 0.413778 0.431362 0.98899 +v 0.409966 0.423556 0.988139 +v 0.406113 0.41573 0.987147 +v 0.400871 0.405083 0.985899 +v 0.395598 0.394453 0.984513 +v 0.390495 0.38476 0.98271 +v 0.385336 0.375131 0.98064 +v 0.380334 0.366115 0.978215 +v 0.375299 0.35703 0.975714 +v 0.370477 0.348462 0.97308 +v 0.365666 0.339922 0.970461 +v 0.361256 0.331736 0.968518 +v 0.356834 0.323577 0.966761 +v 0.352477 0.31798 0.965363 +v 0.348085 0.312317 0.963929 +v 0.343222 0.307592 0.961596 +v 0.338338 0.302896 0.959202 +v 0.335147 0.300737 0.953197 +v 0.330552 0.298035 0.943428 +v 0.32908 0.290429 0.945149 +v 0.327466 0.282726 0.946541 +v 0.325902 0.275652 0.947519 +v 0.324243 0.268597 0.948184 +v 0.322801 0.261931 0.94851 +v 0.321271 0.255186 0.948529 +v 0.320077 0.248826 0.948185 +v 0.318819 0.242411 0.947546 +v 0.317832 0.236166 0.94652 +v 0.316886 0.229704 0.945142 +v 0.316226 0.222967 0.94333 +v 0.315511 0.216285 0.941242 +v 0.316139 0.212918 0.943109 +v 0.316956 0.209348 0.945738 +v 0.316269 0.20149 0.946797 +v 0.315548 0.193651 0.947765 +v 0.314483 0.185845 0.946285 +v 0.313438 0.1781 0.944727 +v 0.312632 0.170526 0.94233 +v 0.311847 0.162987 0.939946 +v 0.311351 0.155074 0.938772 +v 0.310905 0.147245 0.93819 +v 0.310234 0.139208 0.939764 +v 0.309636 0.131179 0.941738 +v 0.309019 0.122943 0.9443 +v 0.308399 0.114703 0.946818 +v 0.308261 0.106358 0.949051 +v 0.30812 0.0979861 0.951285 +v 0.308036 0.0896763 0.953586 +v 0.307977 0.0814051 0.956023 +v 0.308197 0.073158 0.958774 +v 0.308446 0.0649491 0.961673 +v 0.30868 0.0568396 0.964896 +v 0.308917 0.0487475 0.968145 +v 0.30932 0.0406363 0.971435 +v 0.30975 0.0324789 0.974674 +v 0.310159 0.0242394 0.977843 +v 0.310551 0.0159734 0.980979 +v 0.310932 0.00759675 0.983968 +v 0.3113 -0.000794897 0.98692 +v 0.311616 -0.0093161 0.989705 +v 0.31191 -0.0178707 0.992408 +v 0.312273 -0.0265923 0.994908 +v 0.31264 -0.0353597 0.997317 +v 0.312903 -0.0442416 0.999545 +v 0.31312 -0.0531456 1.00166 +v 0.313447 -0.0622005 1.00337 +v 0.313728 -0.071274 1.00498 +v 0.314105 -0.0805514 1.0063 +v 0.314448 -0.0898419 1.00748 +v 0.31488 -0.0992675 1.00841 +v 0.315312 -0.108761 1.00903 +v 0.315911 -0.118319 1.00932 +v 0.316452 -0.127879 1.00942 +v 0.317179 -0.137423 1.00926 +v 0.317863 -0.146931 1.00891 +v 0.318672 -0.156506 1.00837 +v 0.31945 -0.166055 1.00766 +v 0.320335 -0.175742 1.00687 +v 0.321179 -0.185367 1.00591 +v 0.321895 -0.195043 1.00493 +v 0.322598 -0.2047 1.00389 +v 0.323677 -0.214256 1.00287 +v 0.324744 -0.223797 1.00176 +v 0.325788 -0.2333 1.00071 +v 0.326869 -0.242793 0.999697 +v 0.328159 -0.252184 0.998786 +v 0.329449 -0.261571 0.997831 +v 0.330823 -0.270918 0.996931 +v 0.332133 -0.280253 0.995897 +v 0.333404 -0.28971 0.994808 +v 0.334608 -0.299112 0.993434 +v 0.335811 -0.308437 0.992057 +v 0.336877 -0.317765 0.990521 +v 0.338047 -0.32703 0.988763 +v 0.339048 -0.336317 0.986812 +v 0.340526 -0.345316 0.984357 +v 0.341897 -0.354296 0.981795 +v 0.343648 -0.362759 0.978777 +v 0.345203 -0.371269 0.975828 +v 0.347294 -0.378994 0.97255 +v 0.349331 -0.386732 0.969194 +v 0.351784 -0.393952 0.965538 +v 0.354218 -0.401159 0.961851 +v 0.356517 -0.407641 0.958568 +v 0.358803 -0.414116 0.955256 +v 0.361033 -0.420328 0.951908 +v 0.36322 -0.426505 0.948493 +v 0.365311 -0.432425 0.945252 +v 0.367398 -0.438367 0.941978 +v 0.369432 -0.444445 0.938656 +v 0.371383 -0.450481 0.935244 +v 0.373249 -0.456542 0.931835 +v 0.375055 -0.462592 0.92834 +v 0.376902 -0.468833 0.924617 +v 0.378634 -0.475029 0.920759 +v 0.380448 -0.481422 0.916617 +v 0.382189 -0.487751 0.912408 +v 0.383985 -0.494187 0.90799 +v 0.38568 -0.500582 0.903467 +v 0.387317 -0.50722 0.898752 +v 0.388819 -0.513819 0.893875 +v 0.390463 -0.520641 0.888692 +v 0.392045 -0.527422 0.883452 +v 0.393622 -0.534302 0.878073 +v 0.395104 -0.541149 0.872606 +v 0.39675 -0.5481 0.866836 +v 0.398312 -0.555038 0.860963 +v 0.399875 -0.562177 0.85497 +v 0.40137 -0.569296 0.848922 +v 0.423132 0.430364 0.983233 +v 0.419326 0.422436 0.982733 +v 0.415474 0.414494 0.982102 +v 0.410365 0.4038 0.981154 +v 0.405206 0.39314 0.980038 +v 0.400192 0.383335 0.978403 +v 0.395118 0.373618 0.976494 +v 0.390126 0.364497 0.97417 +v 0.385084 0.355344 0.971735 +v 0.380285 0.346748 0.969062 +v 0.375507 0.338206 0.9664 +v 0.371266 0.3298 0.964506 +v 0.367062 0.321432 0.962781 +v 0.363014 0.315739 0.961658 +v 0.358916 0.309989 0.960497 +v 0.354417 0.30509 0.957979 +v 0.349922 0.300276 0.955415 +v 0.346289 0.298215 0.949283 +v 0.34132 0.295652 0.939819 +v 0.339518 0.288342 0.941666 +v 0.337556 0.280913 0.9432 +v 0.335966 0.274096 0.944221 +v 0.334265 0.267289 0.944953 +v 0.332911 0.260925 0.945288 +v 0.331433 0.25447 0.945368 +v 0.330348 0.248321 0.945037 +v 0.329117 0.24228 0.944514 +v 0.328374 0.236008 0.943398 +v 0.327577 0.229794 0.94207 +v 0.327223 0.223216 0.940242 +v 0.326773 0.216696 0.938148 +v 0.327384 0.213289 0.939544 +v 0.328404 0.209472 0.942177 +v 0.327728 0.201717 0.943738 +v 0.327002 0.193948 0.945174 +v 0.325907 0.186147 0.943879 +v 0.324821 0.178413 0.942504 +v 0.323883 0.17087 0.940176 +v 0.322958 0.163395 0.937842 +v 0.322437 0.155646 0.93647 +v 0.321973 0.147926 0.935616 +v 0.321207 0.139908 0.936948 +v 0.320524 0.131892 0.938679 +v 0.320017 0.123637 0.941257 +v 0.319519 0.115387 0.943859 +v 0.31945 0.107123 0.946205 +v 0.319364 0.0988039 0.948506 +v 0.319363 0.0904929 0.950778 +v 0.319381 0.082215 0.953162 +v 0.319589 0.0739395 0.955741 +v 0.319828 0.0656909 0.958457 +v 0.320127 0.0574969 0.961449 +v 0.320448 0.0493524 0.964544 +v 0.320774 0.0411954 0.967728 +v 0.321117 0.0330146 0.970901 +v 0.321615 0.0247638 0.973942 +v 0.322106 0.0165024 0.976977 +v 0.322588 0.00811825 0.979936 +v 0.323056 -0.00029144 0.982834 +v 0.323495 -0.00878013 0.985611 +v 0.323904 -0.0173317 0.988243 +v 0.32437 -0.0260624 0.990685 +v 0.324828 -0.0348275 0.993044 +v 0.325282 -0.043732 0.995183 +v 0.325699 -0.0526638 0.99721 +v 0.326162 -0.0617064 0.998953 +v 0.326561 -0.0707746 1.00057 +v 0.327022 -0.0800704 1.00192 +v 0.327429 -0.0893893 1.00311 +v 0.3279 -0.0987964 1.00402 +v 0.32836 -0.108257 1.00466 +v 0.329005 -0.117802 1.00501 +v 0.329573 -0.127343 1.00516 +v 0.330301 -0.136885 1.00504 +v 0.330979 -0.146424 1.00476 +v 0.331701 -0.156004 1.00424 +v 0.332376 -0.165559 1.00352 +v 0.333214 -0.175163 1.00265 +v 0.334007 -0.184736 1.00163 +v 0.334677 -0.194339 1.00058 +v 0.335331 -0.203928 0.999438 +v 0.336261 -0.213419 0.998209 +v 0.337208 -0.222862 0.996929 +v 0.338205 -0.232265 0.995603 +v 0.339198 -0.241634 0.994231 +v 0.340314 -0.250889 0.992918 +v 0.341419 -0.260214 0.991611 +v 0.342586 -0.269492 0.990347 +v 0.343761 -0.27876 0.989106 +v 0.344966 -0.288008 0.987677 +v 0.346132 -0.297285 0.986189 +v 0.347333 -0.306433 0.984553 +v 0.348376 -0.315661 0.982924 +v 0.349594 -0.324786 0.980951 +v 0.350715 -0.333928 0.978878 +v 0.352106 -0.34277 0.976459 +v 0.353389 -0.351605 0.973962 +v 0.355011 -0.359906 0.971094 +v 0.35641 -0.368316 0.968301 +v 0.358377 -0.376114 0.965143 +v 0.360307 -0.383895 0.961859 +v 0.362511 -0.391108 0.958405 +v 0.364659 -0.398279 0.95509 +v 0.366903 -0.405001 0.951603 +v 0.369132 -0.411749 0.947983 +v 0.371182 -0.41806 0.944742 +v 0.373231 -0.424385 0.941445 +v 0.37524 -0.430543 0.938133 +v 0.377187 -0.436677 0.934743 +v 0.379105 -0.442795 0.931375 +v 0.380927 -0.4489 0.927966 +v 0.382754 -0.455056 0.924432 +v 0.384476 -0.461172 0.920775 +v 0.386211 -0.467429 0.916945 +v 0.387808 -0.473615 0.913017 +v 0.389561 -0.479936 0.908857 +v 0.391207 -0.486214 0.904592 +v 0.392841 -0.492572 0.900135 +v 0.39435 -0.498893 0.895533 +v 0.395951 -0.505419 0.890704 +v 0.397422 -0.511903 0.885761 +v 0.398929 -0.518533 0.88063 +v 0.400353 -0.525135 0.875434 +v 0.401839 -0.531861 0.869988 +v 0.403256 -0.538552 0.864504 +v 0.404736 -0.545349 0.858831 +v 0.40615 -0.552124 0.853083 +v 0.407698 -0.559088 0.847146 +v 0.409111 -0.565989 0.841116 +v 0.432396 0.429423 0.977307 +v 0.428603 0.421367 0.977175 +v 0.424769 0.413298 0.976918 +v 0.419798 0.402554 0.976281 +v 0.414761 0.391858 0.975441 +v 0.409838 0.381968 0.973952 +v 0.404846 0.372129 0.972248 +v 0.399858 0.362942 0.969994 +v 0.394814 0.353723 0.967654 +v 0.390059 0.345069 0.964971 +v 0.385313 0.336545 0.962291 +v 0.381223 0.327861 0.960456 +v 0.377267 0.319287 0.958816 +v 0.373488 0.31348 0.957934 +v 0.36969 0.307658 0.957025 +v 0.365613 0.302633 0.954369 +v 0.361513 0.297652 0.951682 +v 0.357515 0.295627 0.945182 +v 0.351866 0.293083 0.935524 +v 0.349724 0.28611 0.937509 +v 0.347409 0.279076 0.939228 +v 0.345796 0.27252 0.940295 +v 0.344055 0.265992 0.9411 +v 0.342812 0.259837 0.941405 +v 0.341408 0.253765 0.941543 +v 0.340409 0.247901 0.941254 +v 0.339289 0.241926 0.940754 +v 0.338747 0.236004 0.939656 +v 0.338121 0.230055 0.938348 +v 0.338028 0.223668 0.93646 +v 0.337836 0.217356 0.934331 +v 0.338641 0.213659 0.935959 +v 0.339861 0.209602 0.938598 +v 0.339178 0.201933 0.940626 +v 0.338439 0.194255 0.942508 +v 0.337339 0.186492 0.941415 +v 0.336193 0.17877 0.940176 +v 0.335129 0.171288 0.937892 +v 0.334057 0.163919 0.935582 +v 0.333493 0.156275 0.934017 +v 0.333019 0.14862 0.932923 +v 0.332147 0.140602 0.933978 +v 0.331397 0.132574 0.935544 +v 0.330984 0.124304 0.938099 +v 0.330601 0.116041 0.940728 +v 0.330601 0.107836 0.943196 +v 0.33059 0.0996028 0.945617 +v 0.330684 0.0913026 0.947922 +v 0.330789 0.0830201 0.950282 +v 0.330997 0.0747234 0.952735 +v 0.331234 0.0664381 0.955319 +v 0.331613 0.058187 0.958118 +v 0.33202 0.0499864 0.961026 +v 0.332277 0.041769 0.964114 +v 0.332545 0.033568 0.96724 +v 0.33312 0.0253074 0.970153 +v 0.333688 0.0170331 0.973039 +v 0.334267 0.00861712 0.975899 +v 0.334821 0.000200195 0.978743 +v 0.335368 -0.00827911 0.981443 +v 0.33589 -0.0168048 0.984036 +v 0.336453 -0.0255442 0.986408 +v 0.336999 -0.0343247 0.988679 +v 0.337618 -0.0432366 0.990741 +v 0.33823 -0.0521794 0.992713 +v 0.338809 -0.0612316 0.994407 +v 0.339349 -0.0703047 0.995956 +v 0.339856 -0.0796224 0.997267 +v 0.340312 -0.0889574 0.998442 +v 0.340823 -0.098346 0.999314 +v 0.341297 -0.107769 0.99997 +v 0.341953 -0.117273 1.00027 +v 0.34256 -0.126778 1.00041 +v 0.343271 -0.136327 1.00028 +v 0.3439 -0.145866 0.999975 +v 0.344537 -0.155427 0.999438 +v 0.345093 -0.164976 0.998671 +v 0.345872 -0.17449 0.997717 +v 0.3466 -0.183981 0.996616 +v 0.347239 -0.193507 0.995492 +v 0.347846 -0.203012 0.994271 +v 0.348667 -0.212414 0.992901 +v 0.349454 -0.221792 0.99143 +v 0.350437 -0.231113 0.989946 +v 0.35141 -0.240416 0.988414 +v 0.352414 -0.249631 0.986846 +v 0.35341 -0.258838 0.985249 +v 0.354399 -0.268008 0.983673 +v 0.355383 -0.27717 0.982058 +v 0.356476 -0.286303 0.980372 +v 0.357569 -0.295429 0.978626 +v 0.358769 -0.304435 0.976787 +v 0.359791 -0.313515 0.974939 +v 0.36106 -0.322522 0.972808 +v 0.362202 -0.331526 0.970546 +v 0.363423 -0.340188 0.968195 +v 0.364548 -0.348846 0.965748 +v 0.365979 -0.357044 0.963019 +v 0.367257 -0.365332 0.960313 +v 0.3691 -0.373142 0.957127 +v 0.3709 -0.380946 0.953858 +v 0.372894 -0.388196 0.950519 +v 0.374874 -0.395441 0.947151 +v 0.37699 -0.402357 0.943709 +v 0.379066 -0.409248 0.940296 +v 0.380998 -0.415729 0.937101 +v 0.38288 -0.422207 0.933887 +v 0.384768 -0.428556 0.930457 +v 0.386538 -0.434895 0.927013 +v 0.388291 -0.441084 0.923546 +v 0.389986 -0.44724 0.920058 +v 0.391742 -0.453435 0.916397 +v 0.393387 -0.459599 0.912627 +v 0.394998 -0.465822 0.908774 +v 0.396517 -0.471996 0.904842 +v 0.398154 -0.47826 0.900585 +v 0.399692 -0.484497 0.896208 +v 0.401209 -0.490787 0.891739 +v 0.402597 -0.497035 0.887167 +v 0.404126 -0.50343 0.882246 +v 0.405561 -0.509792 0.877248 +v 0.406921 -0.51624 0.872187 +v 0.408251 -0.522675 0.86709 +v 0.409711 -0.529279 0.861631 +v 0.411132 -0.53584 0.856168 +v 0.4125 -0.542505 0.850637 +v 0.413829 -0.549156 0.845076 +v 0.4153 -0.555905 0.83916 +v 0.4167 -0.56261 0.833209 +v 0.441069 0.427699 0.971245 +v 0.437285 0.419612 0.971356 +v 0.433436 0.411527 0.971372 +v 0.428629 0.40079 0.970975 +v 0.423714 0.390112 0.970378 +v 0.418865 0.380185 0.969085 +v 0.413913 0.370348 0.967538 +v 0.408991 0.361084 0.96535 +v 0.403998 0.351831 0.963042 +v 0.399271 0.343193 0.960308 +v 0.394533 0.334665 0.957518 +v 0.390534 0.32588 0.95566 +v 0.386683 0.31716 0.954012 +v 0.383095 0.31097 0.95319 +v 0.379454 0.304763 0.952314 +v 0.375284 0.299454 0.949265 +v 0.371082 0.29421 0.946132 +v 0.366851 0.292225 0.94046 +v 0.360583 0.289563 0.931445 +v 0.358661 0.283478 0.933312 +v 0.356445 0.277055 0.934975 +v 0.354914 0.271002 0.935972 +v 0.353285 0.264916 0.93679 +v 0.352128 0.259028 0.937099 +v 0.350824 0.25324 0.937255 +v 0.349886 0.247674 0.937092 +v 0.348948 0.24189 0.936478 +v 0.348539 0.23636 0.935466 +v 0.348038 0.230761 0.934217 +v 0.348372 0.22483 0.932239 +v 0.348611 0.218949 0.930053 +v 0.349016 0.215237 0.931211 +v 0.350328 0.210832 0.934229 +v 0.349846 0.203054 0.936471 +v 0.349314 0.195244 0.938561 +v 0.348253 0.187383 0.93773 +v 0.347113 0.179547 0.936749 +v 0.345997 0.171993 0.934586 +v 0.344885 0.164525 0.932397 +v 0.343902 0.156916 0.930938 +v 0.343066 0.149224 0.92994 +v 0.342389 0.141172 0.930804 +v 0.341868 0.133146 0.932213 +v 0.341587 0.124965 0.934668 +v 0.34135 0.1168 0.937263 +v 0.341416 0.108599 0.939859 +v 0.341476 0.100368 0.942431 +v 0.341673 0.0921594 0.94488 +v 0.34187 0.0839146 0.947315 +v 0.342157 0.0756372 0.949781 +v 0.342462 0.0673233 0.952277 +v 0.342862 0.0590184 0.954954 +v 0.343293 0.0507484 0.957748 +v 0.343677 0.042527 0.960635 +v 0.34407 0.0343033 0.963563 +v 0.344679 0.0260424 0.966389 +v 0.345294 0.0177354 0.969142 +v 0.345943 0.00929634 0.97185 +v 0.346576 0.000845137 0.974529 +v 0.347206 -0.00767873 0.977132 +v 0.347816 -0.0162282 0.979672 +v 0.348478 -0.0249912 0.982003 +v 0.349119 -0.0338226 0.984188 +v 0.349798 -0.0427211 0.986174 +v 0.350438 -0.0516474 0.988046 +v 0.351079 -0.0607367 0.989684 +v 0.351687 -0.0698416 0.991186 +v 0.35235 -0.0791259 0.992401 +v 0.352887 -0.0884303 0.99346 +v 0.35344 -0.0978151 0.994251 +v 0.353959 -0.10722 0.994872 +v 0.35462 -0.116702 0.995125 +v 0.355202 -0.126192 0.995194 +v 0.355876 -0.135737 0.995013 +v 0.356483 -0.145279 0.994681 +v 0.357142 -0.154811 0.994096 +v 0.357719 -0.164339 0.993335 +v 0.358386 -0.173833 0.992326 +v 0.358994 -0.183309 0.991178 +v 0.359633 -0.192797 0.989925 +v 0.360244 -0.202232 0.988583 +v 0.360968 -0.211577 0.987131 +v 0.361671 -0.220902 0.98562 +v 0.362463 -0.230133 0.983985 +v 0.363244 -0.239361 0.982257 +v 0.364142 -0.248531 0.980491 +v 0.365034 -0.257681 0.978688 +v 0.365924 -0.266716 0.976881 +v 0.366789 -0.275732 0.975033 +v 0.367767 -0.284744 0.973077 +v 0.368696 -0.293768 0.971094 +v 0.369704 -0.302656 0.969084 +v 0.370622 -0.311586 0.967062 +v 0.371767 -0.320464 0.964791 +v 0.372849 -0.329332 0.962464 +v 0.374051 -0.337839 0.960013 +v 0.375159 -0.346346 0.957496 +v 0.376506 -0.35448 0.954784 +v 0.377693 -0.362662 0.952033 +v 0.379355 -0.370438 0.948892 +v 0.381027 -0.378221 0.945762 +v 0.382943 -0.385511 0.942491 +v 0.384818 -0.392792 0.939262 +v 0.386799 -0.399766 0.935952 +v 0.388757 -0.406731 0.932551 +v 0.390545 -0.413317 0.929227 +v 0.392256 -0.419893 0.925873 +v 0.39398 -0.426317 0.922517 +v 0.395612 -0.43271 0.919073 +v 0.39728 -0.438979 0.91554 +v 0.398892 -0.445218 0.911975 +v 0.400513 -0.451394 0.908381 +v 0.401977 -0.457484 0.904703 +v 0.403493 -0.463699 0.900842 +v 0.404869 -0.469867 0.896853 +v 0.406371 -0.476087 0.892623 +v 0.407735 -0.482261 0.888274 +v 0.409144 -0.488492 0.88375 +v 0.410449 -0.494693 0.879149 +v 0.411843 -0.501005 0.874334 +v 0.413142 -0.507278 0.869429 +v 0.414463 -0.513655 0.864424 +v 0.415711 -0.519997 0.859365 +v 0.417089 -0.526444 0.854046 +v 0.418414 -0.532878 0.848698 +v 0.41973 -0.539418 0.843202 +v 0.420987 -0.545936 0.837658 +v 0.422374 -0.552509 0.831928 +v 0.423714 -0.559064 0.826163 +v 0.449719 0.425984 0.965133 +v 0.445901 0.417892 0.96546 +v 0.442045 0.40978 0.965726 +v 0.437397 0.39906 0.965579 +v 0.432622 0.388392 0.965222 +v 0.427836 0.378453 0.964084 +v 0.422926 0.368605 0.962707 +v 0.418071 0.359279 0.960576 +v 0.413142 0.349967 0.958349 +v 0.40844 0.341311 0.955551 +v 0.403702 0.332823 0.952698 +v 0.39979 0.323956 0.950823 +v 0.396048 0.315073 0.94921 +v 0.39264 0.308515 0.948398 +v 0.38917 0.301904 0.947567 +v 0.384937 0.296247 0.944194 +v 0.380642 0.290723 0.940651 +v 0.376168 0.288777 0.935782 +v 0.369164 0.286547 0.926721 +v 0.367292 0.280805 0.928589 +v 0.365221 0.275038 0.930197 +v 0.363831 0.269432 0.931155 +v 0.362303 0.263831 0.931923 +v 0.361204 0.258266 0.93222 +v 0.360024 0.25266 0.932338 +v 0.359193 0.247435 0.932151 +v 0.358363 0.241967 0.931621 +v 0.358066 0.236819 0.930653 +v 0.357689 0.231636 0.929501 +v 0.358455 0.22621 0.927397 +v 0.359132 0.220807 0.925083 +v 0.359385 0.216829 0.926473 +v 0.360793 0.212091 0.92988 +v 0.360527 0.204188 0.932328 +v 0.36017 0.196252 0.934552 +v 0.359126 0.188296 0.933939 +v 0.357996 0.180378 0.933187 +v 0.356833 0.172774 0.931139 +v 0.355667 0.165224 0.929059 +v 0.354287 0.157568 0.927717 +v 0.353082 0.149805 0.926806 +v 0.35261 0.141733 0.927519 +v 0.352317 0.133689 0.928798 +v 0.352168 0.125591 0.931161 +v 0.352075 0.117523 0.93369 +v 0.352198 0.109321 0.93638 +v 0.352331 0.101099 0.939103 +v 0.352616 0.0929338 0.941649 +v 0.352904 0.0847641 0.944185 +v 0.353289 0.0765035 0.946681 +v 0.353683 0.068201 0.949189 +v 0.354116 0.0598513 0.951797 +v 0.354565 0.0515169 0.954477 +v 0.35509 0.0432794 0.957212 +v 0.355614 0.0350453 0.959948 +v 0.356256 0.0267743 0.962645 +v 0.356904 0.0184346 0.96524 +v 0.357634 0.0099668 0.967805 +v 0.358351 0.00148826 0.970339 +v 0.359071 -0.0070933 0.972827 +v 0.35976 -0.0156948 0.975243 +v 0.360502 -0.0244776 0.977525 +v 0.361214 -0.0333412 0.97962 +v 0.361936 -0.0422295 0.9815 +v 0.362606 -0.0511456 0.983254 +v 0.36329 -0.0602645 0.98481 +v 0.363941 -0.0694096 0.98622 +v 0.364696 -0.0786603 0.987325 +v 0.365373 -0.0879394 0.988249 +v 0.365968 -0.0973121 0.988948 +v 0.366479 -0.1067 0.989448 +v 0.367109 -0.116154 0.989605 +v 0.367668 -0.125606 0.98961 +v 0.36828 -0.135145 0.98934 +v 0.368811 -0.144679 0.988889 +v 0.369463 -0.154167 0.988207 +v 0.370068 -0.163633 0.987392 +v 0.370649 -0.1731 0.986389 +v 0.37114 -0.182546 0.98521 +v 0.371761 -0.191991 0.983821 +v 0.372396 -0.201363 0.982377 +v 0.373084 -0.210621 0.980889 +v 0.373672 -0.219878 0.97926 +v 0.374306 -0.229043 0.977524 +v 0.374904 -0.238199 0.97569 +v 0.37568 -0.247316 0.973701 +v 0.376455 -0.256427 0.971702 +v 0.377282 -0.265321 0.969727 +v 0.378083 -0.2742 0.967708 +v 0.378951 -0.283136 0.965606 +v 0.37977 -0.292097 0.963495 +v 0.380651 -0.300866 0.96135 +v 0.381463 -0.309635 0.959144 +v 0.382486 -0.318371 0.956747 +v 0.383442 -0.327106 0.954288 +v 0.384563 -0.335476 0.951713 +v 0.385623 -0.343839 0.949073 +v 0.386856 -0.351878 0.946324 +v 0.388001 -0.35996 0.943586 +v 0.38951 -0.367705 0.940566 +v 0.390988 -0.375448 0.937514 +v 0.392757 -0.382757 0.934285 +v 0.394449 -0.39006 0.930991 +v 0.396288 -0.397096 0.927614 +v 0.398066 -0.404117 0.924157 +v 0.399693 -0.410823 0.920898 +v 0.401249 -0.417513 0.917598 +v 0.402831 -0.423977 0.914229 +v 0.404353 -0.430428 0.910801 +v 0.405942 -0.436779 0.90721 +v 0.407417 -0.443093 0.903542 +v 0.408826 -0.449203 0.899969 +v 0.410103 -0.455273 0.896294 +v 0.411488 -0.461452 0.892418 +v 0.412725 -0.467588 0.888429 +v 0.414106 -0.473751 0.884265 +v 0.415326 -0.479866 0.87998 +v 0.41669 -0.486081 0.875472 +v 0.417972 -0.492259 0.870859 +v 0.419281 -0.498494 0.866171 +v 0.420449 -0.504679 0.86139 +v 0.421717 -0.510968 0.856433 +v 0.422929 -0.51723 0.851409 +v 0.424216 -0.523511 0.846226 +v 0.425463 -0.529813 0.84107 +v 0.426788 -0.536258 0.835658 +v 0.42805 -0.542661 0.830182 +v 0.429313 -0.549056 0.824575 +v 0.430565 -0.555448 0.818881 +v 0.457805 0.423871 0.958882 +v 0.454036 0.415704 0.959389 +v 0.450229 0.407542 0.959817 +v 0.445658 0.39693 0.959869 +v 0.440966 0.386377 0.959742 +v 0.43633 0.376455 0.958675 +v 0.431573 0.366619 0.957408 +v 0.426657 0.357332 0.955369 +v 0.421656 0.34808 0.953214 +v 0.416903 0.339482 0.950412 +v 0.412092 0.331004 0.947506 +v 0.408138 0.322069 0.94547 +v 0.404403 0.313027 0.9437 +v 0.401068 0.306204 0.943068 +v 0.397682 0.299326 0.942386 +v 0.393228 0.293343 0.939203 +v 0.388684 0.287494 0.935866 +v 0.383913 0.285355 0.93136 +v 0.376092 0.282958 0.922474 +v 0.374888 0.278407 0.923829 +v 0.373388 0.273723 0.925095 +v 0.372117 0.268425 0.92602 +v 0.370712 0.263123 0.926792 +v 0.369642 0.257703 0.927081 +v 0.368463 0.252266 0.927199 +v 0.367812 0.247122 0.926842 +v 0.367073 0.24189 0.926362 +v 0.366829 0.237046 0.92542 +v 0.366504 0.232199 0.924344 +v 0.366897 0.227657 0.922731 +v 0.367213 0.223119 0.920934 +v 0.368001 0.219029 0.922326 +v 0.3699 0.214283 0.92544 +v 0.369815 0.206135 0.927929 +v 0.369626 0.197933 0.930192 +v 0.368838 0.189676 0.929574 +v 0.367969 0.181452 0.928766 +v 0.366668 0.173639 0.926957 +v 0.365314 0.165913 0.925078 +v 0.364173 0.158184 0.923801 +v 0.363227 0.15035 0.922941 +v 0.362715 0.142253 0.923713 +v 0.362407 0.134183 0.925048 +v 0.362351 0.126111 0.927401 +v 0.362353 0.118059 0.929926 +v 0.362596 0.109916 0.932664 +v 0.362851 0.101752 0.935454 +v 0.36328 0.0936495 0.938101 +v 0.363716 0.0855389 0.940737 +v 0.364241 0.0773465 0.943318 +v 0.364773 0.069127 0.945909 +v 0.365293 0.0607898 0.948498 +v 0.365811 0.0524539 0.951101 +v 0.366433 0.0441359 0.953718 +v 0.367049 0.0358241 0.956326 +v 0.367713 0.0274928 0.958869 +v 0.368419 0.0191607 0.96136 +v 0.369192 0.010673 0.963762 +v 0.369949 0.00216626 0.966143 +v 0.370752 -0.00645979 0.968459 +v 0.371526 -0.0150943 0.970751 +v 0.372356 -0.023851 0.972846 +v 0.373162 -0.0326322 0.974841 +v 0.373924 -0.0415874 0.976599 +v 0.37464 -0.0505782 0.978224 +v 0.375409 -0.0597289 0.979635 +v 0.376146 -0.0689034 0.980891 +v 0.376938 -0.0781459 0.981869 +v 0.377652 -0.0874037 0.982672 +v 0.378271 -0.0967403 0.983264 +v 0.378775 -0.106086 0.983701 +v 0.379381 -0.115527 0.983749 +v 0.379896 -0.124971 0.983587 +v 0.380453 -0.134487 0.983197 +v 0.380942 -0.144001 0.982668 +v 0.381516 -0.153473 0.981906 +v 0.382043 -0.162921 0.98102 +v 0.382631 -0.172335 0.97994 +v 0.383168 -0.181739 0.97876 +v 0.383708 -0.191132 0.977324 +v 0.38422 -0.200492 0.975815 +v 0.384779 -0.209687 0.974244 +v 0.385273 -0.218892 0.972546 +v 0.385874 -0.228001 0.970666 +v 0.386433 -0.237114 0.968761 +v 0.387073 -0.246168 0.966687 +v 0.387706 -0.255194 0.964572 +v 0.38841 -0.264011 0.962438 +v 0.389088 -0.272829 0.960281 +v 0.389854 -0.281612 0.958034 +v 0.390513 -0.290462 0.955799 +v 0.391261 -0.299152 0.953508 +v 0.391964 -0.307834 0.951176 +v 0.392856 -0.316387 0.948702 +v 0.393707 -0.324929 0.946185 +v 0.394722 -0.333164 0.94352 +v 0.395672 -0.341393 0.94079 +v 0.396788 -0.349354 0.938012 +v 0.397868 -0.357337 0.935254 +v 0.399237 -0.365041 0.932259 +v 0.40056 -0.372747 0.929216 +v 0.40211 -0.380051 0.926026 +v 0.403642 -0.387352 0.922808 +v 0.405426 -0.39448 0.919351 +v 0.407121 -0.40159 0.915863 +v 0.408558 -0.408305 0.912645 +v 0.40992 -0.41501 0.909374 +v 0.411381 -0.4215 0.906029 +v 0.412734 -0.427968 0.902539 +v 0.414158 -0.43431 0.898937 +v 0.415449 -0.440604 0.89526 +v 0.41674 -0.44673 0.891649 +v 0.417946 -0.45282 0.88799 +v 0.41922 -0.458967 0.884149 +v 0.420384 -0.465073 0.880219 +v 0.421669 -0.471189 0.876124 +v 0.422808 -0.477269 0.871915 +v 0.424034 -0.483433 0.867517 +v 0.425169 -0.489556 0.863036 +v 0.426347 -0.495702 0.858401 +v 0.427458 -0.501818 0.853709 +v 0.428639 -0.508015 0.848831 +v 0.429773 -0.514194 0.843914 +v 0.431046 -0.520423 0.838899 +v 0.432194 -0.526611 0.833809 +v 0.433463 -0.532926 0.828569 +v 0.434701 -0.539206 0.823222 +v 0.436 -0.545511 0.817621 +v 0.437187 -0.551764 0.811938 +v 0.465806 0.4218 0.952501 +v 0.462105 0.413555 0.953202 +v 0.458329 0.405324 0.953788 +v 0.453854 0.39483 0.954074 +v 0.449245 0.384389 0.954158 +v 0.444786 0.374485 0.953183 +v 0.440171 0.364672 0.952009 +v 0.435218 0.355426 0.95009 +v 0.430169 0.346233 0.948091 +v 0.425345 0.337664 0.945214 +v 0.420461 0.329194 0.942292 +v 0.416452 0.320165 0.940027 +v 0.412722 0.310973 0.938158 +v 0.409474 0.303871 0.937683 +v 0.40616 0.296758 0.937158 +v 0.401502 0.290453 0.934184 +v 0.396758 0.284312 0.931058 +v 0.391673 0.281984 0.926936 +v 0.382777 0.279457 0.917795 +v 0.382163 0.275965 0.918682 +v 0.381209 0.272423 0.919551 +v 0.380079 0.26744 0.920429 +v 0.378821 0.26243 0.921161 +v 0.377809 0.257183 0.921464 +v 0.376694 0.251884 0.92158 +v 0.376115 0.246968 0.92117 +v 0.37547 0.241927 0.920645 +v 0.375269 0.237426 0.919748 +v 0.374998 0.232933 0.918752 +v 0.375021 0.229203 0.9176 +v 0.374986 0.225487 0.916345 +v 0.376622 0.2212 0.918136 +v 0.379011 0.216448 0.920961 +v 0.379098 0.20807 0.923506 +v 0.379046 0.199615 0.925764 +v 0.378515 0.191057 0.925087 +v 0.377872 0.182556 0.924244 +v 0.376418 0.174565 0.922622 +v 0.374905 0.166653 0.920971 +v 0.37401 0.15884 0.91975 +v 0.373306 0.150921 0.918931 +v 0.372785 0.142776 0.919813 +v 0.372439 0.134668 0.921152 +v 0.372479 0.126603 0.923508 +v 0.372587 0.11857 0.926047 +v 0.372943 0.110469 0.928816 +v 0.373322 0.10237 0.931676 +v 0.373898 0.0943271 0.934424 +v 0.374479 0.086283 0.937171 +v 0.375137 0.0781318 0.939811 +v 0.375794 0.0699811 0.942442 +v 0.37641 0.0616753 0.945075 +v 0.377007 0.0533468 0.947645 +v 0.377735 0.0449579 0.95013 +v 0.37845 0.0365712 0.952627 +v 0.37917 0.0282054 0.955058 +v 0.379907 0.0198399 0.957415 +v 0.38072 0.0113504 0.959668 +v 0.381513 0.00281904 0.961868 +v 0.382393 -0.00584773 0.964017 +v 0.383238 -0.0145336 0.96611 +v 0.384152 -0.023259 0.968004 +v 0.385036 -0.0320135 0.969811 +v 0.385839 -0.0410049 0.971508 +v 0.386583 -0.0500783 0.972998 +v 0.387429 -0.0592493 0.974246 +v 0.388242 -0.0684428 0.97533 +v 0.389063 -0.0776637 0.976134 +v 0.389804 -0.0869012 0.9768 +v 0.390379 -0.0961916 0.977263 +v 0.390887 -0.105498 0.977574 +v 0.391474 -0.114919 0.977507 +v 0.391955 -0.124336 0.977222 +v 0.392459 -0.133829 0.976725 +v 0.392887 -0.143321 0.976091 +v 0.393388 -0.15275 0.975265 +v 0.393833 -0.162174 0.974293 +v 0.394385 -0.17154 0.973096 +v 0.394886 -0.180891 0.971802 +v 0.3954 -0.190225 0.970396 +v 0.395843 -0.199547 0.96888 +v 0.396262 -0.208728 0.96723 +v 0.396606 -0.217889 0.965451 +v 0.397164 -0.226953 0.963498 +v 0.397673 -0.236003 0.961467 +v 0.398227 -0.244966 0.959344 +v 0.398781 -0.253903 0.957191 +v 0.39938 -0.262652 0.954943 +v 0.399946 -0.271391 0.952649 +v 0.400592 -0.280065 0.950306 +v 0.40116 -0.288778 0.947977 +v 0.401806 -0.29739 0.945568 +v 0.402381 -0.305994 0.943088 +v 0.403158 -0.314352 0.94052 +v 0.403891 -0.322706 0.937912 +v 0.404808 -0.33082 0.935181 +v 0.405684 -0.338922 0.932411 +v 0.406654 -0.3468 0.929602 +v 0.407635 -0.354699 0.926823 +v 0.408848 -0.362357 0.92383 +v 0.410008 -0.370008 0.920797 +v 0.411354 -0.377308 0.917658 +v 0.412655 -0.384612 0.914534 +v 0.414338 -0.391822 0.911001 +v 0.41592 -0.399009 0.907334 +v 0.417128 -0.405734 0.904084 +v 0.418299 -0.412454 0.900786 +v 0.419578 -0.418952 0.897389 +v 0.420768 -0.425416 0.89395 +v 0.422053 -0.431745 0.89041 +v 0.423228 -0.43804 0.886781 +v 0.424425 -0.444196 0.883141 +v 0.42553 -0.4503 0.879474 +v 0.4267 -0.456406 0.875685 +v 0.427759 -0.462476 0.871808 +v 0.42891 -0.468534 0.867757 +v 0.429954 -0.474562 0.863626 +v 0.431067 -0.480669 0.859346 +v 0.432098 -0.486752 0.855015 +v 0.43323 -0.492831 0.850465 +v 0.434303 -0.498881 0.845854 +v 0.435441 -0.505008 0.84112 +v 0.436524 -0.511133 0.836381 +v 0.437728 -0.517277 0.831466 +v 0.438865 -0.523394 0.826512 +v 0.44006 -0.529551 0.821328 +v 0.441209 -0.535695 0.816087 +v 0.442498 -0.541882 0.810532 +v 0.443702 -0.548034 0.804926 +v 0.473288 0.419174 0.946147 +v 0.469508 0.410963 0.947005 +v 0.465659 0.402786 0.947769 +v 0.461329 0.392423 0.948231 +v 0.456838 0.382135 0.948448 +v 0.452427 0.372283 0.947563 +v 0.447862 0.362526 0.946474 +v 0.443042 0.353342 0.944589 +v 0.438087 0.34426 0.942612 +v 0.433209 0.335724 0.939757 +v 0.428258 0.327361 0.936779 +v 0.424081 0.318498 0.934375 +v 0.420215 0.309459 0.93235 +v 0.417102 0.302089 0.931892 +v 0.413929 0.294686 0.931397 +v 0.408693 0.288035 0.928106 +v 0.403309 0.281586 0.924597 +v 0.399226 0.279527 0.921241 +v 0.395167 0.277457 0.917912 +v 0.38968 0.275214 0.912624 +v 0.388943 0.273054 0.913347 +v 0.387632 0.267522 0.914468 +v 0.386151 0.261922 0.915438 +v 0.385204 0.256624 0.915703 +v 0.384143 0.251294 0.91581 +v 0.38372 0.246395 0.9153 +v 0.383197 0.241384 0.914623 +v 0.38312 0.236879 0.913577 +v 0.382955 0.232404 0.912437 +v 0.382729 0.229875 0.91181 +v 0.38291 0.227308 0.911652 +v 0.38446 0.223823 0.913533 +v 0.386037 0.220316 0.915427 +v 0.38694 0.210754 0.918349 +v 0.387657 0.201097 0.920917 +v 0.387136 0.192115 0.92025 +v 0.386482 0.183218 0.919415 +v 0.385287 0.175179 0.917795 +v 0.384008 0.167224 0.916186 +v 0.383571 0.159383 0.915036 +v 0.383327 0.151469 0.914261 +v 0.382651 0.143254 0.915402 +v 0.382138 0.135076 0.916907 +v 0.382248 0.127033 0.919329 +v 0.382415 0.119018 0.921911 +v 0.382927 0.110991 0.924712 +v 0.383471 0.10297 0.927588 +v 0.384156 0.0949265 0.930435 +v 0.384851 0.0868797 0.933296 +v 0.385662 0.0788033 0.936023 +v 0.386477 0.0707283 0.93873 +v 0.387221 0.0624905 0.941383 +v 0.387946 0.0541953 0.94397 +v 0.388779 0.0457854 0.946401 +v 0.389601 0.0373655 0.948809 +v 0.390429 0.0289824 0.951086 +v 0.391255 0.020598 0.953272 +v 0.392123 0.0120749 0.955425 +v 0.392955 0.00346273 0.957455 +v 0.393836 -0.00521645 0.959397 +v 0.394698 -0.0139131 0.961295 +v 0.395645 -0.0227228 0.963047 +v 0.396559 -0.0315554 0.964694 +v 0.397413 -0.0405125 0.96617 +v 0.398214 -0.0494933 0.96752 +v 0.399082 -0.0586483 0.968614 +v 0.399912 -0.0678299 0.969554 +v 0.400692 -0.0770716 0.970212 +v 0.401389 -0.0863241 0.970723 +v 0.402036 -0.0956604 0.970994 +v 0.402598 -0.105005 0.971112 +v 0.403154 -0.11439 0.970879 +v 0.403637 -0.123773 0.970487 +v 0.404101 -0.133224 0.969904 +v 0.404486 -0.142667 0.969194 +v 0.404941 -0.15208 0.968259 +v 0.405349 -0.161476 0.967205 +v 0.4058 -0.170794 0.965933 +v 0.4062 -0.180102 0.964573 +v 0.406642 -0.189367 0.963083 +v 0.407023 -0.198631 0.961496 +v 0.407372 -0.207794 0.959775 +v 0.407676 -0.216906 0.957928 +v 0.408085 -0.225914 0.955919 +v 0.408473 -0.234913 0.953877 +v 0.408945 -0.243791 0.951701 +v 0.409403 -0.252661 0.949501 +v 0.40992 -0.261341 0.947214 +v 0.410418 -0.270014 0.9449 +v 0.410936 -0.278611 0.942527 +v 0.411412 -0.287204 0.940129 +v 0.411928 -0.295686 0.937683 +v 0.412384 -0.30416 0.935176 +v 0.413035 -0.312432 0.932533 +v 0.413665 -0.3207 0.929863 +v 0.414442 -0.32869 0.927079 +v 0.415177 -0.336679 0.924277 +v 0.416048 -0.344487 0.921439 +v 0.416878 -0.352288 0.91858 +v 0.417919 -0.35984 0.915592 +v 0.418885 -0.367383 0.912547 +v 0.420037 -0.374674 0.909447 +v 0.421171 -0.381962 0.906332 +v 0.4226 -0.389083 0.90291 +v 0.423902 -0.396168 0.899369 +v 0.425044 -0.402914 0.896103 +v 0.426087 -0.40964 0.89275 +v 0.427256 -0.416133 0.889396 +v 0.428348 -0.422614 0.885978 +v 0.429511 -0.428978 0.882488 +v 0.430523 -0.435306 0.878893 +v 0.43163 -0.441487 0.875265 +v 0.432629 -0.447613 0.871608 +v 0.433711 -0.453708 0.867846 +v 0.434672 -0.459774 0.863975 +v 0.435725 -0.465776 0.85998 +v 0.436701 -0.471748 0.855914 +v 0.437795 -0.477838 0.851606 +v 0.438824 -0.4839 0.847233 +v 0.439917 -0.489909 0.84285 +v 0.440891 -0.495878 0.838373 +v 0.442002 -0.501931 0.833751 +v 0.443038 -0.507982 0.829149 +v 0.444213 -0.514025 0.824388 +v 0.445306 -0.520047 0.81951 +v 0.446475 -0.526116 0.814394 +v 0.447575 -0.532157 0.809234 +v 0.448754 -0.538168 0.803789 +v 0.449951 -0.544156 0.798306 +v 0.480703 0.416551 0.939706 +v 0.476831 0.408404 0.940709 +v 0.472887 0.400266 0.941616 +v 0.468735 0.39005 0.942288 +v 0.464383 0.379901 0.942675 +v 0.460024 0.370114 0.941863 +v 0.455509 0.360408 0.940874 +v 0.450804 0.351299 0.938999 +v 0.445986 0.342261 0.937038 +v 0.44104 0.333792 0.93421 +v 0.436068 0.325539 0.931276 +v 0.431702 0.316845 0.928676 +v 0.427685 0.30793 0.926511 +v 0.424747 0.300292 0.926067 +v 0.421734 0.292649 0.925626 +v 0.415891 0.285633 0.921988 +v 0.409878 0.278907 0.91809 +v 0.406774 0.277088 0.915525 +v 0.403702 0.275321 0.91292 +v 0.401276 0.274448 0.91143 +v 0.396227 0.273672 0.906757 +v 0.394827 0.267621 0.908104 +v 0.39319 0.261428 0.909272 +v 0.392316 0.256124 0.90951 +v 0.391334 0.250906 0.909665 +v 0.391011 0.245906 0.908984 +v 0.390583 0.240932 0.908192 +v 0.390576 0.23644 0.907003 +v 0.390476 0.231995 0.905705 +v 0.391089 0.230321 0.906918 +v 0.391491 0.228672 0.907956 +v 0.392258 0.226399 0.908891 +v 0.39304 0.22415 0.909848 +v 0.394778 0.213416 0.913175 +v 0.396228 0.202579 0.915996 +v 0.395698 0.193193 0.91532 +v 0.39503 0.183908 0.914489 +v 0.394059 0.175824 0.912871 +v 0.393055 0.167808 0.911302 +v 0.393073 0.159947 0.910204 +v 0.393279 0.152034 0.909455 +v 0.392469 0.143729 0.910874 +v 0.391812 0.135481 0.912613 +v 0.391973 0.127442 0.915056 +v 0.392207 0.119449 0.917695 +v 0.392869 0.111482 0.920504 +v 0.39357 0.103527 0.9234 +v 0.394372 0.095492 0.926332 +v 0.395177 0.0874427 0.9293 +v 0.396146 0.0794338 0.932095 +v 0.397101 0.0714221 0.934849 +v 0.397971 0.0632524 0.937507 +v 0.398817 0.0549821 0.940073 +v 0.399772 0.0465648 0.942495 +v 0.400694 0.0381249 0.944837 +v 0.401634 0.0297245 0.946948 +v 0.402552 0.0213078 0.948983 +v 0.403449 0.0126935 0.950964 +v 0.404314 0.00403731 0.952859 +v 0.4052 -0.00464737 0.954603 +v 0.406076 -0.013346 0.956328 +v 0.406991 -0.0222251 0.957956 +v 0.407929 -0.0311293 0.959439 +v 0.408883 -0.0400425 0.9607 +v 0.409782 -0.0489735 0.961855 +v 0.410642 -0.0580784 0.962809 +v 0.411461 -0.0672463 0.963543 +v 0.41221 -0.076517 0.96404 +v 0.412879 -0.085803 0.9644 +v 0.413588 -0.0951546 0.964491 +v 0.414169 -0.104516 0.964407 +v 0.414704 -0.113864 0.964038 +v 0.415174 -0.123204 0.963516 +v 0.415572 -0.132603 0.962818 +v 0.415872 -0.14201 0.962036 +v 0.416307 -0.1514 0.961003 +v 0.416672 -0.16077 0.959826 +v 0.41704 -0.170037 0.958506 +v 0.417332 -0.17929 0.957079 +v 0.417692 -0.188483 0.955491 +v 0.417971 -0.197678 0.953806 +v 0.418276 -0.206805 0.952038 +v 0.418533 -0.215878 0.950143 +v 0.418818 -0.224833 0.948111 +v 0.419083 -0.233767 0.946033 +v 0.419485 -0.242562 0.943813 +v 0.419848 -0.251349 0.94155 +v 0.420246 -0.259975 0.939221 +v 0.420563 -0.268602 0.936834 +v 0.420953 -0.277095 0.934388 +v 0.421345 -0.285587 0.93194 +v 0.421746 -0.293956 0.929479 +v 0.422098 -0.302312 0.926973 +v 0.422659 -0.310483 0.924291 +v 0.423179 -0.318648 0.921558 +v 0.423813 -0.326523 0.918745 +v 0.424394 -0.334415 0.915927 +v 0.425155 -0.342146 0.913046 +v 0.425892 -0.349869 0.910143 +v 0.426748 -0.357319 0.90714 +v 0.427574 -0.364773 0.904126 +v 0.428554 -0.372034 0.901035 +v 0.429462 -0.379278 0.89788 +v 0.430564 -0.386284 0.894564 +v 0.431591 -0.393271 0.891175 +v 0.432607 -0.400013 0.887863 +v 0.433579 -0.406735 0.884551 +v 0.434643 -0.413244 0.88119 +v 0.435654 -0.419743 0.877786 +v 0.436646 -0.426135 0.874329 +v 0.437525 -0.432495 0.870798 +v 0.43854 -0.438683 0.867198 +v 0.439458 -0.444829 0.863539 +v 0.440473 -0.450927 0.859797 +v 0.441365 -0.456993 0.855952 +v 0.442351 -0.462938 0.85201 +v 0.443301 -0.468872 0.848051 +v 0.444397 -0.474953 0.843721 +v 0.445445 -0.481017 0.83937 +v 0.446459 -0.486955 0.835131 +v 0.447385 -0.492859 0.830816 +v 0.448395 -0.498816 0.826299 +v 0.449426 -0.504799 0.821762 +v 0.450511 -0.510733 0.817086 +v 0.451498 -0.516627 0.812346 +v 0.452653 -0.522594 0.807325 +v 0.453727 -0.528542 0.802256 +v 0.454953 -0.534409 0.796964 +v 0.456135 -0.540257 0.791639 +v 0.48761 0.413839 0.933161 +v 0.483524 0.405658 0.934601 +v 0.479308 0.397533 0.935904 +v 0.4755 0.387413 0.936401 +v 0.471513 0.377358 0.936644 +v 0.46721 0.367718 0.935923 +v 0.462734 0.358182 0.934978 +v 0.458028 0.349173 0.933205 +v 0.453189 0.34025 0.931315 +v 0.448291 0.331877 0.928627 +v 0.443361 0.323803 0.925808 +v 0.438697 0.315421 0.922923 +v 0.434376 0.306812 0.920458 +v 0.431439 0.299304 0.919959 +v 0.428455 0.291746 0.919493 +v 0.423535 0.284957 0.916607 +v 0.418327 0.278397 0.913412 +v 0.414932 0.2758 0.910193 +v 0.411555 0.273237 0.906966 +v 0.407989 0.271842 0.904337 +v 0.402904 0.270657 0.899975 +v 0.401178 0.265568 0.901684 +v 0.399266 0.26031 0.903284 +v 0.398494 0.255267 0.903426 +v 0.397624 0.250433 0.903566 +v 0.397578 0.245014 0.902492 +v 0.397411 0.239655 0.901286 +v 0.396869 0.236585 0.900936 +v 0.397379 0.233301 0.901546 +v 0.39818 0.231664 0.902398 +v 0.398998 0.230044 0.90327 +v 0.40046 0.225982 0.905024 +v 0.401936 0.221857 0.906757 +v 0.403154 0.212042 0.908862 +v 0.404107 0.202241 0.910558 +v 0.403955 0.193261 0.909573 +v 0.403672 0.184381 0.908455 +v 0.402446 0.176186 0.907328 +v 0.401205 0.168027 0.906233 +v 0.401126 0.160076 0.905588 +v 0.401192 0.152087 0.905241 +v 0.400852 0.143885 0.906626 +v 0.400668 0.13574 0.908321 +v 0.401049 0.127739 0.910697 +v 0.401515 0.119791 0.913283 +v 0.402346 0.111861 0.916086 +v 0.403226 0.103948 0.918992 +v 0.404266 0.0959995 0.921946 +v 0.405308 0.0880456 0.924898 +v 0.406404 0.0800701 0.927775 +v 0.407491 0.0720845 0.930612 +v 0.408541 0.063932 0.933302 +v 0.409564 0.0556883 0.935888 +v 0.410634 0.0472681 0.938256 +v 0.411669 0.0388181 0.940509 +v 0.412686 0.0303728 0.942523 +v 0.413671 0.0219187 0.944451 +v 0.414579 0.0132751 0.946244 +v 0.415466 0.00461349 0.947968 +v 0.416378 -0.00410208 0.94955 +v 0.417265 -0.0128567 0.951076 +v 0.418198 -0.0217058 0.952488 +v 0.419097 -0.0305702 0.953802 +v 0.420032 -0.0395379 0.954884 +v 0.420931 -0.0485227 0.955876 +v 0.421834 -0.0576368 0.956621 +v 0.422669 -0.0668005 0.957202 +v 0.423432 -0.0760693 0.957538 +v 0.424109 -0.085349 0.957733 +v 0.424743 -0.0946564 0.957674 +v 0.425295 -0.103963 0.957485 +v 0.425851 -0.11329 0.957003 +v 0.426317 -0.122616 0.956362 +v 0.42668 -0.131988 0.955547 +v 0.426961 -0.141352 0.95462 +v 0.427334 -0.150694 0.95349 +v 0.42764 -0.160028 0.95226 +v 0.427924 -0.169256 0.950821 +v 0.428135 -0.178487 0.949289 +v 0.428406 -0.187614 0.947628 +v 0.428614 -0.196733 0.945894 +v 0.428846 -0.205799 0.944115 +v 0.429009 -0.214834 0.942215 +v 0.429253 -0.223731 0.940146 +v 0.429466 -0.232608 0.938025 +v 0.429746 -0.241332 0.935725 +v 0.429978 -0.250044 0.933389 +v 0.430218 -0.258602 0.931013 +v 0.430395 -0.267205 0.928668 +v 0.430658 -0.275614 0.926217 +v 0.430897 -0.284017 0.923736 +v 0.431252 -0.292263 0.921197 +v 0.431547 -0.300499 0.918615 +v 0.431987 -0.308558 0.915904 +v 0.432416 -0.316622 0.913183 +v 0.432954 -0.324432 0.910377 +v 0.4334 -0.332265 0.907544 +v 0.433978 -0.33991 0.90468 +v 0.434555 -0.347548 0.901806 +v 0.435234 -0.354955 0.898793 +v 0.435908 -0.36236 0.895777 +v 0.43672 -0.369547 0.892666 +v 0.437508 -0.376727 0.889521 +v 0.438416 -0.383679 0.886243 +v 0.439271 -0.390629 0.882953 +v 0.440159 -0.397354 0.879713 +v 0.440995 -0.40405 0.876433 +v 0.441929 -0.410579 0.873118 +v 0.442742 -0.417082 0.869734 +v 0.443632 -0.4234 0.866303 +v 0.444399 -0.429696 0.862801 +v 0.44532 -0.435857 0.859188 +v 0.446186 -0.441994 0.855529 +v 0.447091 -0.448064 0.85185 +v 0.447902 -0.454111 0.848123 +v 0.448862 -0.460072 0.844283 +v 0.449734 -0.46601 0.840385 +v 0.45076 -0.471998 0.83625 +v 0.451761 -0.477975 0.832076 +v 0.452738 -0.483877 0.827827 +v 0.453617 -0.48977 0.823498 +v 0.45462 -0.495655 0.819121 +v 0.455548 -0.501506 0.814692 +v 0.456579 -0.507358 0.810083 +v 0.457554 -0.513192 0.805448 +v 0.458675 -0.519045 0.800589 +v 0.459722 -0.524867 0.795679 +v 0.460964 -0.530629 0.790508 +v 0.462113 -0.536364 0.785267 +v 0.494423 0.411152 0.926496 +v 0.490116 0.402962 0.928388 +v 0.485629 0.394825 0.930074 +v 0.48222 0.384795 0.930455 +v 0.478569 0.37484 0.930529 +v 0.474325 0.365366 0.929879 +v 0.469896 0.35599 0.928998 +v 0.4652 0.347084 0.927344 +v 0.460372 0.338268 0.925551 +v 0.455534 0.330024 0.923007 +v 0.450632 0.322069 0.920304 +v 0.445679 0.313974 0.917178 +v 0.441027 0.305678 0.914374 +v 0.438143 0.298325 0.913837 +v 0.435218 0.290895 0.913341 +v 0.431144 0.284299 0.911161 +v 0.426798 0.277977 0.908661 +v 0.423118 0.274531 0.904841 +v 0.419443 0.271213 0.900953 +v 0.414624 0.269351 0.897197 +v 0.409227 0.267656 0.892662 +v 0.407268 0.263451 0.894792 +v 0.405112 0.259118 0.896842 +v 0.404412 0.254466 0.896963 +v 0.403656 0.250012 0.897075 +v 0.403845 0.244197 0.895562 +v 0.403878 0.238521 0.893932 +v 0.403934 0.236591 0.895489 +v 0.404091 0.234628 0.897174 +v 0.405299 0.233014 0.897861 +v 0.406522 0.231418 0.898571 +v 0.40867 0.225501 0.901114 +v 0.410749 0.219501 0.903531 +v 0.411427 0.210705 0.904406 +v 0.411933 0.201905 0.905043 +v 0.412137 0.193354 0.903728 +v 0.41224 0.184875 0.902329 +v 0.410761 0.176535 0.901678 +v 0.409316 0.168251 0.901106 +v 0.409117 0.1602 0.900904 +v 0.409053 0.152115 0.900958 +v 0.409192 0.144021 0.902304 +v 0.409468 0.135982 0.903921 +v 0.410071 0.127998 0.906228 +v 0.410759 0.120089 0.908758 +v 0.411769 0.112192 0.911568 +v 0.412821 0.104316 0.914467 +v 0.414096 0.096435 0.917421 +v 0.415378 0.0885635 0.920393 +v 0.416609 0.0806166 0.923335 +v 0.417818 0.0726495 0.926239 +v 0.419041 0.0644964 0.928909 +v 0.420228 0.056295 0.931492 +v 0.421416 0.0478871 0.933804 +v 0.422569 0.0394582 0.935989 +v 0.42366 0.030966 0.937891 +v 0.424696 0.0224542 0.939672 +v 0.425635 0.0137984 0.941316 +v 0.426542 0.00512973 0.942888 +v 0.427472 -0.00358942 0.94431 +v 0.42837 -0.0123988 0.945658 +v 0.429322 -0.0212163 0.946854 +v 0.430251 -0.0300612 0.947926 +v 0.431152 -0.0390836 0.948834 +v 0.432048 -0.0481275 0.949608 +v 0.432913 -0.0572516 0.950177 +v 0.433713 -0.0664129 0.950583 +v 0.43449 -0.0756524 0.950752 +v 0.435173 -0.0849201 0.950787 +v 0.435777 -0.0941696 0.950646 +v 0.436269 -0.103416 0.950356 +v 0.4368 -0.11274 0.949726 +v 0.437234 -0.122036 0.948931 +v 0.437586 -0.13136 0.947991 +v 0.437858 -0.140672 0.946946 +v 0.438167 -0.149967 0.945728 +v 0.438398 -0.159247 0.944406 +v 0.43862 -0.168454 0.942885 +v 0.438751 -0.17765 0.941247 +v 0.438924 -0.186711 0.939515 +v 0.439067 -0.195762 0.937734 +v 0.439179 -0.204753 0.935878 +v 0.439212 -0.213714 0.933945 +v 0.439376 -0.222559 0.931833 +v 0.439471 -0.231372 0.929619 +v 0.439675 -0.240036 0.927315 +v 0.439814 -0.248679 0.924941 +v 0.43993 -0.257203 0.922605 +v 0.439941 -0.265732 0.920212 +v 0.440088 -0.274043 0.917756 +v 0.44018 -0.282348 0.915252 +v 0.440469 -0.290479 0.912632 +v 0.440705 -0.298592 0.909979 +v 0.441035 -0.306555 0.907246 +v 0.44131 -0.314504 0.904469 +v 0.441755 -0.32226 0.901652 +v 0.442112 -0.33005 0.898838 +v 0.442555 -0.337589 0.896027 +v 0.442938 -0.345133 0.893169 +v 0.443479 -0.352492 0.890166 +v 0.444018 -0.359857 0.887175 +v 0.444723 -0.366989 0.884095 +v 0.445346 -0.374105 0.880958 +v 0.446103 -0.381028 0.877808 +v 0.446775 -0.387934 0.874597 +v 0.447484 -0.394623 0.871364 +v 0.448201 -0.401309 0.868142 +v 0.448998 -0.407868 0.864897 +v 0.449633 -0.414382 0.861527 +v 0.450451 -0.420632 0.858116 +v 0.451206 -0.426873 0.854674 +v 0.45208 -0.433028 0.851091 +v 0.452857 -0.439145 0.847442 +v 0.453671 -0.445187 0.843831 +v 0.454423 -0.451213 0.840227 +v 0.455292 -0.457175 0.836458 +v 0.456073 -0.463112 0.832635 +v 0.457 -0.469002 0.828651 +v 0.457852 -0.474893 0.824602 +v 0.45883 -0.480811 0.820326 +v 0.459808 -0.486714 0.816047 +v 0.460787 -0.492489 0.811842 +v 0.461619 -0.49822 0.807552 +v 0.462605 -0.503996 0.80303 +v 0.463547 -0.509757 0.798478 +v 0.464648 -0.515494 0.793793 +v 0.465606 -0.52118 0.78901 +v 0.466827 -0.526822 0.783918 +v 0.467965 -0.532433 0.778779 +v 0.500965 0.408034 0.919712 +v 0.49698 0.3996 0.921631 +v 0.492803 0.391184 0.923368 +v 0.48916 0.381614 0.923951 +v 0.485288 0.372134 0.924256 +v 0.481011 0.362889 0.923695 +v 0.476557 0.353741 0.922914 +v 0.471887 0.344964 0.921367 +v 0.467103 0.336302 0.919661 +v 0.46221 0.328129 0.917215 +v 0.457253 0.320127 0.91463 +v 0.45228 0.312614 0.911623 +v 0.447463 0.305045 0.908737 +v 0.44441 0.297863 0.908032 +v 0.441485 0.290627 0.907451 +v 0.437577 0.283609 0.905806 +v 0.433258 0.276812 0.903784 +v 0.428454 0.272926 0.89957 +v 0.423654 0.269146 0.895316 +v 0.418669 0.266991 0.8918 +v 0.413148 0.264872 0.887577 +v 0.411608 0.261643 0.889303 +v 0.409943 0.258289 0.890992 +v 0.409366 0.254359 0.89108 +v 0.408774 0.250646 0.891183 +v 0.409214 0.245959 0.889591 +v 0.409532 0.241368 0.887933 +v 0.409232 0.238498 0.890245 +v 0.409914 0.235394 0.893376 +v 0.41194 0.232811 0.89425 +v 0.413975 0.230196 0.895115 +v 0.416173 0.224133 0.896819 +v 0.41825 0.217985 0.898353 +v 0.418628 0.209522 0.898879 +v 0.41882 0.201079 0.899217 +v 0.418711 0.192743 0.898215 +v 0.418514 0.184452 0.897147 +v 0.417624 0.176297 0.89651 +v 0.416807 0.168199 0.895994 +v 0.416776 0.160111 0.896059 +v 0.416887 0.152014 0.896375 +v 0.417339 0.144021 0.897751 +v 0.417912 0.136074 0.899348 +v 0.418762 0.128128 0.901585 +v 0.419685 0.120272 0.90401 +v 0.420887 0.112447 0.906759 +v 0.422136 0.10464 0.909597 +v 0.423557 0.0968073 0.912588 +v 0.424993 0.0889743 0.91558 +v 0.426443 0.0810744 0.918494 +v 0.427876 0.0731386 0.921373 +v 0.42924 0.0650215 0.924069 +v 0.430557 0.0568735 0.926665 +v 0.431856 0.0485004 0.928941 +v 0.433122 0.0400959 0.931057 +v 0.434265 0.0315501 0.932877 +v 0.435344 0.0229822 0.93456 +v 0.436338 0.0143009 0.936056 +v 0.437284 0.00560632 0.937464 +v 0.438206 -0.00311982 0.938695 +v 0.439143 -0.0118929 0.939797 +v 0.440133 -0.0207576 0.940744 +v 0.441082 -0.0296421 0.941642 +v 0.441942 -0.0386586 0.942401 +v 0.442801 -0.0476942 0.943041 +v 0.443686 -0.0568439 0.943442 +v 0.444492 -0.0660235 0.943674 +v 0.445237 -0.0751875 0.943684 +v 0.445901 -0.084373 0.943585 +v 0.446462 -0.0936086 0.943323 +v 0.446924 -0.102843 0.942944 +v 0.447406 -0.112128 0.942204 +v 0.447794 -0.121482 0.941236 +v 0.448121 -0.130813 0.940186 +v 0.44835 -0.140119 0.939025 +v 0.448584 -0.149348 0.937733 +v 0.448738 -0.15857 0.936338 +v 0.448879 -0.167714 0.93477 +v 0.448938 -0.176849 0.933082 +v 0.449015 -0.185858 0.931275 +v 0.449081 -0.194855 0.929447 +v 0.449157 -0.20379 0.927529 +v 0.449156 -0.21266 0.925541 +v 0.449194 -0.221401 0.923378 +v 0.4492 -0.230139 0.921141 +v 0.44928 -0.238744 0.91884 +v 0.449298 -0.24733 0.916473 +v 0.449287 -0.255787 0.914105 +v 0.449241 -0.264239 0.9117 +v 0.449304 -0.272482 0.909196 +v 0.449291 -0.28071 0.906637 +v 0.449407 -0.288767 0.90398 +v 0.449511 -0.296816 0.901315 +v 0.449731 -0.304677 0.898586 +v 0.449931 -0.312534 0.895841 +v 0.450291 -0.320183 0.892989 +v 0.450532 -0.327878 0.890149 +v 0.450804 -0.335376 0.887303 +v 0.451116 -0.342879 0.88448 +v 0.451561 -0.350165 0.881562 +v 0.451906 -0.357452 0.878575 +v 0.452427 -0.364523 0.875538 +v 0.4529 -0.371598 0.872514 +v 0.45353 -0.378485 0.869439 +v 0.454098 -0.385358 0.866302 +v 0.454744 -0.392011 0.863121 +v 0.455332 -0.398645 0.859894 +v 0.456 -0.405147 0.856635 +v 0.456606 -0.411633 0.853335 +v 0.457274 -0.417877 0.849999 +v 0.45788 -0.424109 0.846634 +v 0.458615 -0.430262 0.843198 +v 0.459289 -0.436397 0.839711 +v 0.460004 -0.44238 0.836147 +v 0.460631 -0.448339 0.83259 +v 0.4614 -0.454279 0.828928 +v 0.462132 -0.460218 0.825198 +v 0.463008 -0.466108 0.821331 +v 0.46383 -0.471978 0.81742 +v 0.464768 -0.477799 0.813312 +v 0.465652 -0.483593 0.809161 +v 0.466577 -0.489291 0.804963 +v 0.467474 -0.494985 0.800735 +v 0.468481 -0.500664 0.796388 +v 0.469391 -0.506303 0.791957 +v 0.470424 -0.511907 0.787336 +v 0.471366 -0.517479 0.782644 +v 0.472501 -0.523004 0.777651 +v 0.473588 -0.528505 0.772631 +v 0.507325 0.404961 0.912756 +v 0.503695 0.396261 0.914736 +v 0.499803 0.387596 0.916465 +v 0.495978 0.378482 0.917316 +v 0.491883 0.369474 0.917856 +v 0.487611 0.360451 0.917418 +v 0.48315 0.351535 0.916755 +v 0.478513 0.342902 0.915298 +v 0.473771 0.334343 0.913713 +v 0.46885 0.326238 0.911349 +v 0.463869 0.318218 0.908929 +v 0.458858 0.311259 0.906049 +v 0.453962 0.304376 0.90314 +v 0.45064 0.297378 0.902187 +v 0.447614 0.290336 0.901501 +v 0.443861 0.282912 0.900354 +v 0.439628 0.275696 0.898829 +v 0.433821 0.271214 0.894359 +v 0.427922 0.267107 0.889773 +v 0.422706 0.264666 0.886113 +v 0.41683 0.262137 0.882307 +v 0.415714 0.259844 0.883612 +v 0.414525 0.257512 0.8849 +v 0.414133 0.254587 0.885001 +v 0.413632 0.251324 0.885028 +v 0.414298 0.247782 0.883326 +v 0.414889 0.244298 0.881583 +v 0.414508 0.240409 0.88498 +v 0.415737 0.236174 0.889578 +v 0.418577 0.232578 0.890615 +v 0.421394 0.228964 0.891618 +v 0.423582 0.222742 0.892411 +v 0.425603 0.216446 0.893001 +v 0.425702 0.208329 0.893197 +v 0.425638 0.200233 0.893284 +v 0.425211 0.192139 0.892621 +v 0.424738 0.184044 0.891918 +v 0.424471 0.17609 0.891333 +v 0.424262 0.168136 0.89084 +v 0.424414 0.160017 0.891181 +v 0.424692 0.151916 0.891741 +v 0.425455 0.144012 0.893139 +v 0.426317 0.136128 0.894693 +v 0.42742 0.128235 0.896872 +v 0.428582 0.120436 0.899223 +v 0.429964 0.112671 0.901874 +v 0.431405 0.104933 0.904643 +v 0.432971 0.0971372 0.907653 +v 0.434553 0.0893469 0.910654 +v 0.436216 0.0814764 0.913518 +v 0.437861 0.073574 0.916356 +v 0.439363 0.0654887 0.919062 +v 0.440806 0.0573681 0.921651 +v 0.442207 0.0490521 0.923862 +v 0.443575 0.0406731 0.925901 +v 0.444774 0.0320814 0.927648 +v 0.445891 0.0234436 0.929242 +v 0.446914 0.0147112 0.930553 +v 0.44788 0.0059616 0.931763 +v 0.448807 -0.00276839 0.932809 +v 0.449714 -0.0115171 0.933757 +v 0.450737 -0.0203899 0.934486 +v 0.451737 -0.0292766 0.935157 +v 0.452642 -0.0382609 0.935735 +v 0.453439 -0.0472855 0.936251 +v 0.454309 -0.056452 0.936473 +v 0.455107 -0.06565 0.93651 +v 0.455804 -0.0747193 0.936413 +v 0.456408 -0.0837845 0.936206 +v 0.45697 -0.0930218 0.935831 +v 0.457381 -0.102263 0.935275 +v 0.457809 -0.111494 0.934429 +v 0.458148 -0.120798 0.93335 +v 0.458437 -0.13021 0.932124 +v 0.458612 -0.13953 0.93082 +v 0.458787 -0.148704 0.929461 +v 0.458869 -0.157859 0.927992 +v 0.458944 -0.166945 0.926404 +v 0.4589 -0.176005 0.924637 +v 0.458915 -0.184951 0.922796 +v 0.45884 -0.193884 0.920857 +v 0.458849 -0.202717 0.918858 +v 0.458766 -0.211526 0.916774 +v 0.458739 -0.220168 0.914635 +v 0.458644 -0.228806 0.912361 +v 0.458599 -0.237344 0.910048 +v 0.458426 -0.245882 0.907663 +v 0.458342 -0.25426 0.905274 +v 0.458149 -0.262608 0.902791 +v 0.458099 -0.270785 0.900243 +v 0.458019 -0.278951 0.897669 +v 0.458081 -0.28697 0.895052 +v 0.458077 -0.294969 0.892383 +v 0.458182 -0.302727 0.889667 +v 0.458261 -0.310477 0.88693 +v 0.458514 -0.318026 0.884061 +v 0.458656 -0.325633 0.881226 +v 0.458867 -0.333108 0.878417 +v 0.459047 -0.340584 0.875589 +v 0.45936 -0.347808 0.872722 +v 0.459619 -0.355026 0.869818 +v 0.460029 -0.362033 0.86689 +v 0.460367 -0.369042 0.863926 +v 0.46085 -0.375876 0.860876 +v 0.461271 -0.382697 0.857777 +v 0.461781 -0.389311 0.854618 +v 0.462289 -0.395921 0.851468 +v 0.462909 -0.402396 0.848293 +v 0.463379 -0.408826 0.845006 +v 0.463932 -0.415072 0.841775 +v 0.464433 -0.421301 0.838516 +v 0.465026 -0.427454 0.835209 +v 0.465537 -0.433585 0.83185 +v 0.466202 -0.439534 0.82839 +v 0.466813 -0.445492 0.82491 +v 0.467518 -0.451438 0.821323 +v 0.468156 -0.457369 0.8177 +v 0.468976 -0.463216 0.813965 +v 0.469689 -0.46903 0.810165 +v 0.470563 -0.474738 0.806205 +v 0.471378 -0.480433 0.802215 +v 0.472347 -0.48609 0.798066 +v 0.473216 -0.491717 0.793844 +v 0.47414 -0.497256 0.789589 +v 0.474995 -0.502787 0.785273 +v 0.476058 -0.508279 0.780774 +v 0.476938 -0.513712 0.776156 +v 0.478045 -0.519138 0.771307 +v 0.479087 -0.524532 0.766407 +v 0.513282 0.401927 0.905778 +v 0.509755 0.393259 0.907877 +v 0.505959 0.384614 0.909748 +v 0.502102 0.375699 0.910723 +v 0.497995 0.366887 0.911416 +v 0.493726 0.358094 0.911097 +v 0.489258 0.349432 0.910543 +v 0.484697 0.340943 0.909176 +v 0.480013 0.332545 0.90769 +v 0.47511 0.32427 0.905358 +v 0.470097 0.316109 0.902956 +v 0.465222 0.309696 0.900519 +v 0.460335 0.303473 0.897942 +v 0.456529 0.296944 0.896602 +v 0.453083 0.290354 0.895603 +v 0.449372 0.282774 0.894918 +v 0.445222 0.275287 0.89394 +v 0.438284 0.270302 0.890164 +v 0.43138 0.265871 0.88617 +v 0.427102 0.263958 0.883331 +v 0.422831 0.261971 0.880532 +v 0.419085 0.260217 0.878788 +v 0.418572 0.258052 0.879201 +v 0.418092 0.255358 0.879518 +v 0.41748 0.252343 0.879801 +v 0.418086 0.24957 0.878308 +v 0.41861 0.246858 0.876824 +v 0.419708 0.243499 0.878834 +v 0.422325 0.239848 0.881916 +v 0.42521 0.234537 0.884095 +v 0.428031 0.229128 0.886167 +v 0.429815 0.222231 0.88684 +v 0.431378 0.215279 0.887307 +v 0.431586 0.207317 0.887247 +v 0.431694 0.199422 0.887119 +v 0.431536 0.191509 0.88661 +v 0.431365 0.183602 0.886098 +v 0.431386 0.175694 0.885814 +v 0.431481 0.167817 0.885636 +v 0.431942 0.159788 0.886121 +v 0.432518 0.151794 0.886816 +v 0.433483 0.143949 0.888232 +v 0.434546 0.136115 0.889791 +v 0.435835 0.128306 0.891932 +v 0.437187 0.120581 0.89423 +v 0.438734 0.112854 0.896845 +v 0.440338 0.105149 0.899563 +v 0.442111 0.0974177 0.902476 +v 0.4439 0.0896879 0.905397 +v 0.445676 0.0818357 0.908253 +v 0.447435 0.0739528 0.911077 +v 0.449106 0.0658904 0.913698 +v 0.450724 0.057786 0.916216 +v 0.452211 0.0495027 0.9184 +v 0.453669 0.0411046 0.920383 +v 0.454977 0.0325041 0.921985 +v 0.456195 0.0238698 0.923423 +v 0.457249 0.0151122 0.924596 +v 0.458243 0.00634291 0.925664 +v 0.459177 -0.00241847 0.926525 +v 0.460091 -0.0111917 0.927301 +v 0.461016 -0.0200547 0.927917 +v 0.461949 -0.0289628 0.928414 +v 0.462835 -0.0379503 0.928809 +v 0.463669 -0.0469503 0.929129 +v 0.464539 -0.0559997 0.929217 +v 0.465333 -0.0650953 0.929163 +v 0.465992 -0.0742038 0.928956 +v 0.466549 -0.0833109 0.928629 +v 0.467083 -0.0924667 0.928096 +v 0.467504 -0.101634 0.927437 +v 0.467884 -0.110858 0.926546 +v 0.468137 -0.120157 0.925395 +v 0.468417 -0.129478 0.924027 +v 0.468598 -0.138791 0.922551 +v 0.468665 -0.147978 0.921061 +v 0.468621 -0.157091 0.919488 +v 0.468605 -0.166132 0.917838 +v 0.468488 -0.175152 0.916034 +v 0.468486 -0.184038 0.914115 +v 0.468398 -0.192884 0.912132 +v 0.468295 -0.201636 0.910073 +v 0.46815 -0.210384 0.907954 +v 0.467974 -0.218958 0.905748 +v 0.467764 -0.227517 0.903481 +v 0.467639 -0.235962 0.901115 +v 0.467403 -0.24443 0.898723 +v 0.467182 -0.25275 0.896304 +v 0.466911 -0.261061 0.893837 +v 0.466785 -0.269168 0.891281 +v 0.466615 -0.277259 0.888689 +v 0.466548 -0.285262 0.886081 +v 0.466443 -0.293241 0.88344 +v 0.466465 -0.300924 0.880743 +v 0.466462 -0.308612 0.878044 +v 0.466597 -0.316081 0.875205 +v 0.466616 -0.323622 0.872429 +v 0.46667 -0.331021 0.8696 +v 0.46672 -0.338418 0.866786 +v 0.466876 -0.345612 0.863963 +v 0.467063 -0.35281 0.861153 +v 0.46738 -0.359758 0.858272 +v 0.467606 -0.366682 0.855352 +v 0.46794 -0.37346 0.852369 +v 0.468244 -0.380231 0.849358 +v 0.468612 -0.386813 0.846307 +v 0.468968 -0.393399 0.843228 +v 0.469414 -0.399823 0.840154 +v 0.469796 -0.40621 0.837036 +v 0.470281 -0.412438 0.833888 +v 0.470716 -0.418663 0.830706 +v 0.471188 -0.424781 0.827437 +v 0.471664 -0.430892 0.824177 +v 0.472279 -0.436848 0.82081 +v 0.472823 -0.442784 0.817403 +v 0.473442 -0.448682 0.813952 +v 0.474005 -0.45457 0.81048 +v 0.474721 -0.460332 0.806848 +v 0.475394 -0.466078 0.803217 +v 0.476187 -0.4717 0.79936 +v 0.476952 -0.477302 0.79548 +v 0.477889 -0.482903 0.791461 +v 0.478731 -0.488474 0.787367 +v 0.479583 -0.493918 0.783244 +v 0.480401 -0.499353 0.7791 +v 0.481383 -0.504715 0.774726 +v 0.482267 -0.510036 0.770276 +v 0.483367 -0.515334 0.765522 +v 0.484309 -0.520563 0.760695 +v 0.518995 0.398941 0.898579 +v 0.515651 0.390274 0.900878 +v 0.511941 0.381677 0.902869 +v 0.50813 0.372946 0.904026 +v 0.50397 0.364367 0.904847 +v 0.499744 0.355801 0.904668 +v 0.495295 0.347361 0.904255 +v 0.490781 0.339042 0.902951 +v 0.486183 0.330789 0.901583 +v 0.481311 0.322347 0.899299 +v 0.476311 0.314019 0.896955 +v 0.471552 0.308141 0.894947 +v 0.466789 0.302463 0.892832 +v 0.462487 0.296486 0.891073 +v 0.458609 0.290296 0.889723 +v 0.454777 0.282633 0.889433 +v 0.450606 0.274976 0.888922 +v 0.442699 0.269327 0.885927 +v 0.4347 0.264563 0.882721 +v 0.431495 0.263232 0.880555 +v 0.428297 0.261862 0.878408 +v 0.42512 0.260205 0.875758 +v 0.422382 0.258822 0.873438 +v 0.421773 0.256115 0.873967 +v 0.421088 0.253372 0.874504 +v 0.421604 0.251391 0.873241 +v 0.42209 0.249418 0.87197 +v 0.424898 0.246571 0.872679 +v 0.428898 0.243518 0.874241 +v 0.43179 0.236427 0.877506 +v 0.434545 0.229201 0.880584 +v 0.435898 0.221672 0.881156 +v 0.436988 0.214109 0.881511 +v 0.437383 0.206353 0.881246 +v 0.437717 0.198614 0.880924 +v 0.437832 0.190876 0.880581 +v 0.437964 0.183117 0.880256 +v 0.438287 0.175273 0.880283 +v 0.438656 0.167481 0.88038 +v 0.439426 0.159555 0.881008 +v 0.440282 0.151674 0.881817 +v 0.441462 0.143861 0.88325 +v 0.442714 0.136089 0.8848 +v 0.444206 0.128351 0.886916 +v 0.445751 0.120712 0.889176 +v 0.447476 0.113011 0.891775 +v 0.449233 0.105333 0.894428 +v 0.451206 0.0976467 0.89722 +v 0.453198 0.0899724 0.900052 +v 0.45508 0.0821224 0.902882 +v 0.45694 0.0742475 0.905675 +v 0.458778 0.0662055 0.908198 +v 0.460551 0.0581213 0.910612 +v 0.462108 0.0498405 0.912716 +v 0.463607 0.0414698 0.914638 +v 0.465055 0.0328854 0.916113 +v 0.466387 0.0242645 0.917408 +v 0.467474 0.0154864 0.918439 +v 0.468473 0.00669289 0.919348 +v 0.469416 -0.00209271 0.920025 +v 0.47032 -0.0108895 0.920616 +v 0.471187 -0.0197603 0.921107 +v 0.472032 -0.0286675 0.92147 +v 0.47292 -0.0376813 0.921686 +v 0.47372 -0.0467068 0.921789 +v 0.474608 -0.0556865 0.921756 +v 0.475371 -0.0646397 0.921608 +v 0.475983 -0.0737562 0.921273 +v 0.476466 -0.0828732 0.920776 +v 0.476968 -0.0919336 0.920098 +v 0.477351 -0.101005 0.91929 +v 0.477648 -0.110194 0.918315 +v 0.477789 -0.119472 0.917054 +v 0.478054 -0.128709 0.915555 +v 0.478175 -0.137919 0.913907 +v 0.478175 -0.147119 0.912329 +v 0.478056 -0.156269 0.910651 +v 0.477981 -0.165265 0.908982 +v 0.477753 -0.174226 0.90709 +v 0.477681 -0.183027 0.905082 +v 0.477508 -0.191779 0.902976 +v 0.477369 -0.200478 0.900893 +v 0.477091 -0.209139 0.898681 +v 0.476864 -0.217652 0.896524 +v 0.476524 -0.226141 0.894266 +v 0.476327 -0.234516 0.891901 +v 0.476022 -0.242902 0.889446 +v 0.475744 -0.251193 0.887038 +v 0.475399 -0.259464 0.884574 +v 0.475194 -0.267481 0.882032 +v 0.474922 -0.275478 0.879444 +v 0.474743 -0.283443 0.876861 +v 0.474531 -0.291396 0.874253 +v 0.474455 -0.299041 0.87159 +v 0.474344 -0.306663 0.868903 +v 0.474378 -0.314063 0.866127 +v 0.474292 -0.321521 0.863393 +v 0.474293 -0.328866 0.860639 +v 0.474277 -0.336213 0.857857 +v 0.474311 -0.343375 0.855109 +v 0.474297 -0.350526 0.852316 +v 0.474485 -0.357406 0.84946 +v 0.474638 -0.364276 0.846601 +v 0.474849 -0.371032 0.843704 +v 0.475043 -0.377791 0.840795 +v 0.475307 -0.384333 0.83786 +v 0.475572 -0.390876 0.834918 +v 0.475889 -0.397257 0.83197 +v 0.476129 -0.403626 0.828963 +v 0.476504 -0.409862 0.825867 +v 0.476866 -0.416092 0.822757 +v 0.477341 -0.422162 0.81963 +v 0.477745 -0.428218 0.816452 +v 0.478288 -0.434148 0.813176 +v 0.478769 -0.44006 0.809868 +v 0.479333 -0.44592 0.806567 +v 0.479835 -0.451764 0.803245 +v 0.4805 -0.457455 0.799771 +v 0.481088 -0.463118 0.796245 +v 0.481877 -0.468664 0.792538 +v 0.482569 -0.474179 0.788756 +v 0.483401 -0.479702 0.784805 +v 0.484156 -0.4852 0.780793 +v 0.484956 -0.490543 0.776844 +v 0.485643 -0.495853 0.772822 +v 0.486605 -0.501097 0.768599 +v 0.487432 -0.506295 0.764286 +v 0.48853 -0.511469 0.759653 +v 0.489457 -0.516571 0.754919 +v 0.524012 0.395843 0.891759 +v 0.52086 0.387333 0.894125 +v 0.51728 0.378892 0.896177 +v 0.513537 0.3703 0.897396 +v 0.509455 0.361847 0.89831 +v 0.505261 0.353632 0.898309 +v 0.50087 0.345529 0.898109 +v 0.496358 0.337533 0.89711 +v 0.491766 0.329615 0.89604 +v 0.487163 0.321767 0.894241 +v 0.482396 0.314083 0.892292 +v 0.477663 0.307855 0.890411 +v 0.472858 0.301825 0.888356 +v 0.468342 0.29621 0.886521 +v 0.464097 0.290431 0.884905 +v 0.460445 0.283589 0.884439 +v 0.456773 0.276576 0.884021 +v 0.450481 0.270578 0.882268 +v 0.443569 0.265105 0.880215 +v 0.438659 0.263503 0.878566 +v 0.433758 0.26174 0.876918 +v 0.430527 0.259998 0.874385 +v 0.427301 0.258199 0.871875 +v 0.424097 0.256103 0.87017 +v 0.423765 0.253617 0.870176 +v 0.423914 0.251776 0.869523 +v 0.425653 0.249672 0.86972 +v 0.428682 0.247328 0.870599 +v 0.431727 0.244963 0.871508 +v 0.435962 0.237066 0.873163 +v 0.440018 0.229012 0.874598 +v 0.441318 0.221052 0.875041 +v 0.442374 0.213116 0.875308 +v 0.442991 0.2055 0.875101 +v 0.443566 0.197886 0.874862 +v 0.444039 0.190202 0.874689 +v 0.444551 0.182525 0.874553 +v 0.445205 0.174798 0.874716 +v 0.445907 0.167095 0.874956 +v 0.446881 0.159263 0.875678 +v 0.447959 0.151462 0.87657 +v 0.449334 0.143744 0.878036 +v 0.45078 0.136061 0.879618 +v 0.452451 0.128365 0.881685 +v 0.454178 0.120762 0.8839 +v 0.456076 0.113116 0.886403 +v 0.458015 0.105484 0.888956 +v 0.460103 0.0978333 0.891656 +v 0.46222 0.0901854 0.894406 +v 0.464232 0.0823764 0.897138 +v 0.466217 0.0745436 0.899825 +v 0.468161 0.0664945 0.90226 +v 0.470043 0.0584032 0.904596 +v 0.471702 0.0501274 0.906572 +v 0.473277 0.0417902 0.908394 +v 0.474729 0.0332077 0.909769 +v 0.476084 0.024592 0.910994 +v 0.477247 0.0158173 0.911906 +v 0.478315 0.00702341 0.91269 +v 0.479277 -0.00179575 0.913208 +v 0.4802 -0.0106348 0.91362 +v 0.481077 -0.0195307 0.913913 +v 0.481915 -0.0284319 0.91415 +v 0.482773 -0.037437 0.914192 +v 0.4836 -0.0464561 0.914173 +v 0.484413 -0.0553958 0.914008 +v 0.485129 -0.0642907 0.913771 +v 0.485709 -0.0733341 0.913402 +v 0.486151 -0.0824268 0.912806 +v 0.486591 -0.0914744 0.912003 +v 0.48692 -0.100552 0.911021 +v 0.487208 -0.109699 0.909872 +v 0.487335 -0.118897 0.908532 +v 0.487515 -0.128061 0.907 +v 0.487575 -0.137197 0.905341 +v 0.487502 -0.146302 0.90368 +v 0.487296 -0.155403 0.901878 +v 0.487132 -0.164433 0.900029 +v 0.486866 -0.17333 0.898109 +v 0.486699 -0.182033 0.89605 +v 0.48649 -0.190737 0.893942 +v 0.486248 -0.199387 0.891778 +v 0.485929 -0.208014 0.889538 +v 0.485626 -0.216488 0.887317 +v 0.485269 -0.224958 0.885054 +v 0.484961 -0.233309 0.882697 +v 0.484516 -0.2416 0.880238 +v 0.484123 -0.249775 0.877802 +v 0.483705 -0.257956 0.875351 +v 0.483415 -0.265913 0.872826 +v 0.483082 -0.273863 0.87028 +v 0.482801 -0.281713 0.867691 +v 0.482516 -0.289567 0.86511 +v 0.482336 -0.297181 0.86247 +v 0.482112 -0.304781 0.859807 +v 0.48201 -0.312169 0.85712 +v 0.48183 -0.319608 0.854486 +v 0.481747 -0.326887 0.851786 +v 0.481649 -0.334157 0.84908 +v 0.481551 -0.341258 0.84633 +v 0.481453 -0.348354 0.843579 +v 0.481486 -0.3552 0.840795 +v 0.481542 -0.362054 0.838025 +v 0.481625 -0.368765 0.835182 +v 0.481731 -0.375483 0.832355 +v 0.481913 -0.382031 0.829532 +v 0.48207 -0.388577 0.826689 +v 0.482315 -0.394918 0.823813 +v 0.482496 -0.401235 0.820906 +v 0.482751 -0.407417 0.817933 +v 0.483003 -0.413604 0.814958 +v 0.483364 -0.419627 0.811949 +v 0.483696 -0.425652 0.80891 +v 0.48414 -0.43156 0.805804 +v 0.484559 -0.437455 0.802678 +v 0.485054 -0.443254 0.799467 +v 0.485471 -0.449026 0.79626 +v 0.485978 -0.454619 0.792898 +v 0.48648 -0.460226 0.789528 +v 0.48721 -0.465711 0.785952 +v 0.487854 -0.471164 0.782318 +v 0.488643 -0.476562 0.778481 +v 0.489404 -0.481949 0.774623 +v 0.490124 -0.48719 0.77083 +v 0.490754 -0.492399 0.766985 +v 0.491626 -0.497539 0.762864 +v 0.492395 -0.502649 0.758682 +v 0.493422 -0.507665 0.754169 +v 0.494312 -0.512632 0.749583 +v 0.528815 0.392768 0.884798 +v 0.525791 0.384446 0.887177 +v 0.522397 0.376168 0.889302 +v 0.518784 0.367714 0.890624 +v 0.514755 0.359396 0.891616 +v 0.510675 0.351511 0.891858 +v 0.506344 0.343749 0.891858 +v 0.501887 0.336063 0.89121 +v 0.497307 0.328476 0.890453 +v 0.492938 0.321261 0.889102 +v 0.488433 0.314172 0.887583 +v 0.483729 0.307607 0.88583 +v 0.478916 0.301211 0.88388 +v 0.474239 0.295854 0.881987 +v 0.469688 0.290481 0.880197 +v 0.46612 0.284481 0.879455 +v 0.462763 0.27824 0.87901 +v 0.457929 0.271825 0.878597 +v 0.452384 0.265819 0.877566 +v 0.445812 0.263439 0.876542 +v 0.439181 0.261256 0.875421 +v 0.435875 0.259425 0.872981 +v 0.432582 0.257523 0.870586 +v 0.429505 0.255645 0.868127 +v 0.426233 0.253836 0.865891 +v 0.428436 0.251776 0.867047 +v 0.430355 0.24973 0.868283 +v 0.432477 0.248048 0.86858 +v 0.434563 0.246386 0.868855 +v 0.440172 0.237702 0.86887 +v 0.445415 0.228803 0.868554 +v 0.446654 0.220458 0.868874 +v 0.447734 0.212122 0.869108 +v 0.448581 0.20464 0.868968 +v 0.449435 0.197138 0.868822 +v 0.450263 0.189531 0.868819 +v 0.451139 0.181922 0.868859 +v 0.452117 0.174305 0.869145 +v 0.453121 0.166707 0.869487 +v 0.454309 0.158943 0.87031 +v 0.455596 0.151234 0.871288 +v 0.457171 0.143604 0.872783 +v 0.458797 0.136015 0.874371 +v 0.460646 0.12835 0.876387 +v 0.462554 0.120786 0.878555 +v 0.464645 0.11319 0.880978 +v 0.466752 0.105612 0.883424 +v 0.468948 0.0979965 0.886035 +v 0.471186 0.090377 0.888703 +v 0.473336 0.0825965 0.891316 +v 0.475441 0.0747998 0.893891 +v 0.477476 0.0667413 0.89621 +v 0.47945 0.0586379 0.898443 +v 0.481198 0.0503651 0.900266 +v 0.482842 0.042046 0.901942 +v 0.484292 0.0334927 0.903243 +v 0.48563 0.0248834 0.904362 +v 0.486849 0.0161016 0.905135 +v 0.487967 0.0072963 0.905759 +v 0.489003 -0.00156094 0.906116 +v 0.489929 -0.0104215 0.906344 +v 0.490824 -0.0193276 0.906494 +v 0.491643 -0.0282373 0.906553 +v 0.492459 -0.0372023 0.906484 +v 0.49322 -0.046214 0.906242 +v 0.493984 -0.0551013 0.905971 +v 0.49463 -0.0639657 0.905598 +v 0.495172 -0.0729348 0.905191 +v 0.495544 -0.081983 0.904471 +v 0.495949 -0.09109 0.903469 +v 0.496191 -0.100184 0.902293 +v 0.496446 -0.109302 0.901021 +v 0.496488 -0.118393 0.89955 +v 0.496578 -0.12748 0.898 +v 0.496494 -0.136562 0.896253 +v 0.496367 -0.145586 0.894524 +v 0.496072 -0.154575 0.892643 +v 0.495866 -0.163527 0.890734 +v 0.495536 -0.172419 0.888696 +v 0.495296 -0.181072 0.886568 +v 0.494958 -0.189704 0.884382 +v 0.494699 -0.198299 0.88222 +v 0.494286 -0.206854 0.879936 +v 0.493977 -0.215283 0.87773 +v 0.49349 -0.223724 0.875393 +v 0.493111 -0.232025 0.873075 +v 0.492606 -0.240279 0.870641 +v 0.492184 -0.248388 0.86824 +v 0.49168 -0.256469 0.865774 +v 0.491308 -0.264346 0.863294 +v 0.490895 -0.272235 0.860781 +v 0.490619 -0.279996 0.858269 +v 0.490236 -0.287727 0.855685 +v 0.489952 -0.2953 0.853084 +v 0.489663 -0.302877 0.850484 +v 0.489474 -0.310271 0.84793 +v 0.489152 -0.31768 0.845333 +v 0.488963 -0.324875 0.842688 +v 0.488786 -0.332086 0.840054 +v 0.488694 -0.339163 0.837408 +v 0.488479 -0.34621 0.834676 +v 0.488445 -0.353049 0.832001 +v 0.488354 -0.359872 0.829315 +v 0.488386 -0.366538 0.826598 +v 0.488374 -0.373196 0.823852 +v 0.488399 -0.379712 0.821106 +v 0.488465 -0.386238 0.818375 +v 0.488682 -0.39255 0.815608 +v 0.488793 -0.398836 0.812779 +v 0.488972 -0.404987 0.809959 +v 0.489113 -0.411128 0.807106 +v 0.489388 -0.417121 0.804213 +v 0.489616 -0.423098 0.801294 +v 0.48993 -0.428967 0.79835 +v 0.490222 -0.434822 0.795376 +v 0.49064 -0.440559 0.792303 +v 0.490973 -0.446267 0.789195 +v 0.491453 -0.451814 0.786007 +v 0.491858 -0.457339 0.78277 +v 0.492509 -0.462753 0.779336 +v 0.493043 -0.468128 0.775827 +v 0.493807 -0.473399 0.77211 +v 0.494453 -0.478632 0.768319 +v 0.49509 -0.483764 0.764691 +v 0.495669 -0.488877 0.761021 +v 0.496467 -0.493921 0.757019 +v 0.497161 -0.498937 0.752949 +v 0.498124 -0.5038 0.748571 +v 0.498986 -0.508615 0.744137 +v 0.53316 0.389806 0.878149 +v 0.530193 0.381609 0.880594 +v 0.526898 0.373474 0.882788 +v 0.523491 0.364753 0.883918 +v 0.519626 0.356193 0.884679 +v 0.515561 0.349069 0.885399 +v 0.511286 0.342054 0.885914 +v 0.506976 0.334753 0.885484 +v 0.50256 0.327542 0.884955 +v 0.498262 0.320619 0.883856 +v 0.493889 0.313791 0.882665 +v 0.489362 0.307301 0.881119 +v 0.484769 0.301009 0.879467 +v 0.480228 0.295602 0.877724 +v 0.475678 0.290314 0.875961 +v 0.472018 0.28496 0.874762 +v 0.468543 0.27945 0.873792 +v 0.464755 0.273484 0.873393 +v 0.460551 0.267491 0.87274 +v 0.454139 0.263996 0.872392 +v 0.447661 0.260769 0.872017 +v 0.442547 0.258706 0.870494 +v 0.437449 0.256649 0.868984 +v 0.435853 0.254878 0.867179 +v 0.434298 0.253054 0.865506 +v 0.434716 0.251308 0.866132 +v 0.435172 0.249545 0.866858 +v 0.438668 0.246127 0.865826 +v 0.442181 0.242702 0.864757 +v 0.446334 0.234673 0.86421 +v 0.450151 0.226549 0.863455 +v 0.451636 0.218752 0.863325 +v 0.453073 0.210956 0.863169 +v 0.454273 0.203614 0.863052 +v 0.455491 0.196247 0.862948 +v 0.456616 0.188774 0.86305 +v 0.45777 0.181294 0.863189 +v 0.458966 0.173766 0.863553 +v 0.460197 0.166237 0.863981 +v 0.461619 0.15856 0.864858 +v 0.463142 0.150953 0.865884 +v 0.464873 0.143402 0.867375 +v 0.466663 0.135889 0.868965 +v 0.46871 0.128321 0.870902 +v 0.470822 0.120839 0.872978 +v 0.473045 0.113296 0.875309 +v 0.475298 0.105764 0.877676 +v 0.477599 0.0981962 0.880155 +v 0.479956 0.0906109 0.882704 +v 0.482203 0.0828463 0.885195 +v 0.484408 0.0750527 0.887621 +v 0.486507 0.0670096 0.889798 +v 0.488531 0.0589045 0.891873 +v 0.490339 0.0506126 0.893578 +v 0.492038 0.0422646 0.895133 +v 0.493518 0.0337 0.896319 +v 0.494918 0.0250538 0.897308 +v 0.496163 0.0162821 0.897946 +v 0.497329 0.00749155 0.898452 +v 0.498405 -0.00138117 0.898717 +v 0.499379 -0.0102404 0.898812 +v 0.500261 -0.0191313 0.898829 +v 0.501065 -0.0280279 0.898759 +v 0.501848 -0.0369541 0.898545 +v 0.502583 -0.0459102 0.898197 +v 0.503309 -0.0548086 0.897846 +v 0.503917 -0.0637269 0.897341 +v 0.50443 -0.0727136 0.896679 +v 0.504787 -0.0816811 0.895849 +v 0.50513 -0.0907625 0.894712 +v 0.505352 -0.0998463 0.893448 +v 0.505521 -0.108893 0.892094 +v 0.505487 -0.117904 0.89056 +v 0.505504 -0.126884 0.888965 +v 0.505355 -0.135897 0.887171 +v 0.50519 -0.144898 0.885329 +v 0.504867 -0.153862 0.883338 +v 0.504541 -0.162759 0.881384 +v 0.50415 -0.171622 0.879366 +v 0.503845 -0.180225 0.87722 +v 0.503467 -0.188808 0.875011 +v 0.503061 -0.197326 0.872781 +v 0.502574 -0.205821 0.870522 +v 0.502122 -0.214174 0.868237 +v 0.501631 -0.222557 0.865941 +v 0.501137 -0.230827 0.863583 +v 0.500581 -0.239065 0.86118 +v 0.500131 -0.247084 0.858769 +v 0.49962 -0.25509 0.856325 +v 0.499163 -0.262937 0.853893 +v 0.498729 -0.270804 0.851465 +v 0.498344 -0.278476 0.848985 +v 0.497906 -0.286137 0.846477 +v 0.497531 -0.293651 0.84391 +v 0.497194 -0.301168 0.841368 +v 0.496879 -0.30853 0.838834 +v 0.496528 -0.315906 0.83631 +v 0.496247 -0.323064 0.833737 +v 0.495933 -0.330218 0.831143 +v 0.495685 -0.337218 0.828544 +v 0.495456 -0.344224 0.825963 +v 0.495289 -0.35104 0.823366 +v 0.495099 -0.357849 0.820788 +v 0.494992 -0.364461 0.81817 +v 0.494889 -0.371076 0.815544 +v 0.494835 -0.377552 0.812877 +v 0.49481 -0.384033 0.810226 +v 0.49486 -0.390288 0.80754 +v 0.494881 -0.396535 0.804846 +v 0.495006 -0.402675 0.802166 +v 0.495133 -0.408819 0.799491 +v 0.495292 -0.414771 0.796742 +v 0.495444 -0.420717 0.79399 +v 0.495642 -0.426501 0.791149 +v 0.495808 -0.432279 0.788299 +v 0.496086 -0.43794 0.785377 +v 0.496341 -0.443586 0.782436 +v 0.496755 -0.449054 0.779374 +v 0.497115 -0.454501 0.77627 +v 0.497628 -0.459834 0.772991 +v 0.498028 -0.465137 0.769651 +v 0.498689 -0.470325 0.76609 +v 0.499278 -0.475497 0.762497 +v 0.499819 -0.480495 0.759026 +v 0.500295 -0.485471 0.755503 +v 0.501011 -0.490377 0.751657 +v 0.50169 -0.495268 0.747787 +v 0.502608 -0.500013 0.743527 +v 0.503426 -0.504743 0.73921 +v 0.537248 0.386877 0.871344 +v 0.534485 0.378799 0.873904 +v 0.531241 0.370833 0.876156 +v 0.528073 0.361843 0.877094 +v 0.524344 0.35304 0.877618 +v 0.520355 0.346673 0.878865 +v 0.516121 0.340414 0.879879 +v 0.512003 0.333478 0.879698 +v 0.5077 0.326674 0.879356 +v 0.503532 0.320017 0.878549 +v 0.499262 0.313442 0.877649 +v 0.494943 0.307081 0.876345 +v 0.490567 0.300778 0.875007 +v 0.486186 0.295392 0.873427 +v 0.48177 0.290124 0.871773 +v 0.478045 0.285318 0.870214 +v 0.474463 0.280476 0.868771 +v 0.471434 0.275112 0.868076 +v 0.468426 0.269533 0.867547 +v 0.462478 0.264774 0.868258 +v 0.456085 0.260514 0.868529 +v 0.44922 0.258107 0.867986 +v 0.442317 0.255753 0.867391 +v 0.442155 0.254137 0.866119 +v 0.442011 0.252452 0.864922 +v 0.440977 0.25094 0.865125 +v 0.439979 0.249416 0.865422 +v 0.444806 0.244199 0.862924 +v 0.449575 0.23893 0.860388 +v 0.452277 0.231624 0.8594 +v 0.454836 0.224297 0.858346 +v 0.456656 0.217053 0.857814 +v 0.458477 0.209793 0.857279 +v 0.460026 0.202569 0.857188 +v 0.461591 0.195334 0.857102 +v 0.462979 0.187994 0.857291 +v 0.464362 0.180679 0.857491 +v 0.465788 0.173222 0.857935 +v 0.467234 0.165747 0.858436 +v 0.46889 0.158177 0.859364 +v 0.470626 0.150665 0.860409 +v 0.472522 0.143185 0.86191 +v 0.474474 0.135743 0.863498 +v 0.476738 0.128271 0.865366 +v 0.479038 0.120873 0.867336 +v 0.481402 0.113373 0.869578 +v 0.483791 0.105892 0.871863 +v 0.486195 0.098357 0.874198 +v 0.488664 0.0908054 0.876618 +v 0.491007 0.0830443 0.878981 +v 0.49329 0.0752385 0.88126 +v 0.495458 0.0672006 0.883264 +v 0.497509 0.0590797 0.885135 +v 0.499366 0.0507629 0.886707 +v 0.501098 0.0423978 0.888116 +v 0.502634 0.0338467 0.889214 +v 0.504065 0.0251867 0.89007 +v 0.505342 0.0164404 0.890579 +v 0.506518 0.00766654 0.890926 +v 0.507625 -0.00120841 0.891064 +v 0.508588 -0.0100795 0.891016 +v 0.509417 -0.0189667 0.890914 +v 0.510156 -0.0278487 0.890709 +v 0.510985 -0.0367334 0.890354 +v 0.511665 -0.0456167 0.889831 +v 0.512342 -0.0546199 0.889267 +v 0.512843 -0.0636006 0.88855 +v 0.51336 -0.0725201 0.887749 +v 0.513649 -0.0814216 0.886735 +v 0.51399 -0.0904007 0.885647 +v 0.514113 -0.0994509 0.884208 +v 0.514209 -0.108418 0.882793 +v 0.51405 -0.117341 0.881165 +v 0.513988 -0.12627 0.879471 +v 0.513749 -0.135168 0.87763 +v 0.513616 -0.144127 0.875759 +v 0.513247 -0.153061 0.873653 +v 0.512842 -0.161899 0.871706 +v 0.512322 -0.170709 0.869665 +v 0.511968 -0.179269 0.867531 +v 0.511461 -0.187782 0.865281 +v 0.511012 -0.196245 0.863073 +v 0.510421 -0.20468 0.86076 +v 0.509916 -0.212989 0.85848 +v 0.509319 -0.22127 0.856138 +v 0.508845 -0.229528 0.85386 +v 0.508239 -0.23775 0.851475 +v 0.507774 -0.245703 0.849085 +v 0.507251 -0.253621 0.846646 +v 0.506788 -0.261462 0.844305 +v 0.50619 -0.269278 0.841873 +v 0.505724 -0.276867 0.839465 +v 0.505224 -0.284451 0.837018 +v 0.50487 -0.291939 0.834558 +v 0.504373 -0.299389 0.831995 +v 0.503927 -0.306702 0.82949 +v 0.503514 -0.314046 0.827026 +v 0.503183 -0.32116 0.824566 +v 0.50279 -0.328262 0.822065 +v 0.50251 -0.335212 0.81957 +v 0.502179 -0.342162 0.817056 +v 0.501937 -0.348979 0.814588 +v 0.501608 -0.355768 0.812089 +v 0.501432 -0.362341 0.809612 +v 0.501265 -0.368914 0.80713 +v 0.501213 -0.375373 0.804602 +v 0.5011 -0.381817 0.802037 +v 0.501072 -0.388034 0.799498 +v 0.500995 -0.394237 0.796934 +v 0.50096 -0.400334 0.794343 +v 0.500951 -0.406436 0.791764 +v 0.501004 -0.412355 0.789169 +v 0.501005 -0.418265 0.786543 +v 0.501155 -0.423992 0.78386 +v 0.50126 -0.429701 0.781145 +v 0.50145 -0.435299 0.778401 +v 0.501595 -0.440883 0.775623 +v 0.501953 -0.446268 0.77267 +v 0.502233 -0.451634 0.769686 +v 0.502636 -0.456901 0.766596 +v 0.502975 -0.462147 0.763463 +v 0.503517 -0.467246 0.760055 +v 0.503996 -0.472324 0.756608 +v 0.504472 -0.477197 0.753312 +v 0.504875 -0.482046 0.749966 +v 0.505554 -0.486829 0.746302 +v 0.506143 -0.491589 0.742579 +v 0.507045 -0.496257 0.738458 +v 0.507801 -0.500867 0.734253 +v 0.540862 0.384034 0.865059 +v 0.538161 0.376248 0.86772 +v 0.535048 0.368581 0.87012 +v 0.531885 0.36042 0.871434 +v 0.5283 0.352394 0.872461 +v 0.524515 0.345797 0.873544 +v 0.520468 0.339317 0.874413 +v 0.516505 0.332573 0.874339 +v 0.512411 0.325933 0.874136 +v 0.508372 0.319474 0.873436 +v 0.504267 0.313059 0.872673 +v 0.500262 0.306867 0.871533 +v 0.496149 0.300717 0.870298 +v 0.492084 0.29536 0.868966 +v 0.48793 0.290104 0.867518 +v 0.484128 0.285404 0.866006 +v 0.480374 0.280688 0.864555 +v 0.477036 0.275763 0.863785 +v 0.473804 0.270665 0.863168 +v 0.468941 0.265694 0.863797 +v 0.463572 0.260957 0.864078 +v 0.457691 0.258111 0.863795 +v 0.451774 0.255371 0.863451 +v 0.450657 0.253363 0.86224 +v 0.449584 0.251295 0.861118 +v 0.449149 0.248969 0.860435 +v 0.448793 0.246614 0.859884 +v 0.452239 0.241292 0.857467 +v 0.45567 0.235922 0.855073 +v 0.458081 0.229067 0.853801 +v 0.460456 0.22223 0.852549 +v 0.462389 0.215369 0.851997 +v 0.464351 0.208488 0.851469 +v 0.466083 0.201448 0.851394 +v 0.467831 0.194421 0.851326 +v 0.469426 0.187191 0.851531 +v 0.471022 0.180005 0.851744 +v 0.472653 0.17264 0.852207 +v 0.474314 0.165271 0.852737 +v 0.476116 0.157832 0.853679 +v 0.478022 0.150432 0.854742 +v 0.4801 0.143039 0.85623 +v 0.482245 0.135679 0.857806 +v 0.484636 0.128274 0.859608 +v 0.487064 0.120933 0.861503 +v 0.489563 0.113482 0.863633 +v 0.492091 0.106043 0.865796 +v 0.49458 0.0985066 0.867997 +v 0.497135 0.0909398 0.870278 +v 0.499556 0.0831885 0.872465 +v 0.501928 0.0754102 0.87459 +v 0.504144 0.0673916 0.876428 +v 0.506245 0.0592773 0.878121 +v 0.50815 0.0509377 0.879475 +v 0.509938 0.0425551 0.88071 +v 0.511495 0.0339959 0.881639 +v 0.512953 0.0253746 0.882393 +v 0.514284 0.0166204 0.882791 +v 0.515525 0.0078446 0.883039 +v 0.516596 -0.00101236 0.883103 +v 0.517546 -0.00987626 0.883038 +v 0.518393 -0.018751 0.882831 +v 0.519145 -0.0276317 0.882493 +v 0.519924 -0.0365119 0.881957 +v 0.520613 -0.0453856 0.88137 +v 0.521244 -0.0543155 0.880668 +v 0.521737 -0.0633069 0.879732 +v 0.522141 -0.0722633 0.878758 +v 0.52239 -0.0811782 0.877672 +v 0.522638 -0.0901265 0.876477 +v 0.522708 -0.099076 0.875087 +v 0.522735 -0.108004 0.873556 +v 0.522555 -0.11689 0.871847 +v 0.522426 -0.125787 0.870093 +v 0.522158 -0.13466 0.868223 +v 0.521931 -0.143489 0.866304 +v 0.521551 -0.152348 0.86422 +v 0.521022 -0.161124 0.862201 +v 0.520411 -0.16987 0.860109 +v 0.519921 -0.178391 0.857954 +v 0.519335 -0.186857 0.855725 +v 0.518796 -0.195236 0.853432 +v 0.51823 -0.203662 0.851159 +v 0.517672 -0.211928 0.848891 +v 0.517134 -0.220196 0.846631 +v 0.516549 -0.228339 0.844282 +v 0.515942 -0.236464 0.84193 +v 0.515394 -0.244388 0.839565 +v 0.514805 -0.252302 0.837184 +v 0.514227 -0.260067 0.834845 +v 0.513607 -0.267823 0.832487 +v 0.513093 -0.27539 0.830108 +v 0.512566 -0.282946 0.827712 +v 0.512084 -0.290351 0.825251 +v 0.511582 -0.297756 0.822782 +v 0.511047 -0.305029 0.820386 +v 0.510561 -0.312313 0.818014 +v 0.510127 -0.319399 0.815619 +v 0.509677 -0.326478 0.813216 +v 0.50924 -0.333371 0.810777 +v 0.508846 -0.340278 0.808379 +v 0.50852 -0.347043 0.805985 +v 0.508163 -0.353799 0.803611 +v 0.507891 -0.360349 0.801212 +v 0.507616 -0.366905 0.798819 +v 0.507405 -0.37329 0.796391 +v 0.507233 -0.379691 0.793999 +v 0.507102 -0.385888 0.791581 +v 0.506978 -0.392087 0.789172 +v 0.506879 -0.398157 0.786736 +v 0.506738 -0.404216 0.784274 +v 0.506657 -0.410082 0.781803 +v 0.506545 -0.415945 0.779323 +v 0.506569 -0.421622 0.776804 +v 0.506577 -0.427294 0.774284 +v 0.506663 -0.432825 0.771672 +v 0.506684 -0.438336 0.769007 +v 0.506912 -0.443646 0.766238 +v 0.507083 -0.448947 0.763463 +v 0.50739 -0.454119 0.760531 +v 0.507636 -0.459276 0.757567 +v 0.508117 -0.464281 0.754427 +v 0.508499 -0.469256 0.751228 +v 0.508863 -0.474031 0.748119 +v 0.509178 -0.478781 0.744971 +v 0.509765 -0.483428 0.741424 +v 0.510251 -0.488049 0.737819 +v 0.511133 -0.492589 0.733836 +v 0.511856 -0.49707 0.729769 +v 0.544317 0.381209 0.858669 +v 0.54174 0.373743 0.861475 +v 0.53861 0.366392 0.863913 +v 0.535587 0.359031 0.865703 +v 0.532082 0.351814 0.867175 +v 0.528576 0.344967 0.868156 +v 0.52471 0.338268 0.868855 +v 0.520949 0.331708 0.868915 +v 0.51699 0.325268 0.868801 +v 0.513144 0.318981 0.868258 +v 0.509199 0.312731 0.867641 +v 0.505475 0.30671 0.866606 +v 0.501671 0.300778 0.865503 +v 0.497907 0.295378 0.864415 +v 0.494054 0.290126 0.863214 +v 0.490197 0.285511 0.861793 +v 0.486361 0.280906 0.860388 +v 0.482729 0.27631 0.859587 +v 0.479242 0.271657 0.858932 +v 0.47516 0.266605 0.859224 +v 0.470911 0.261606 0.859424 +v 0.466129 0.258227 0.859491 +v 0.461168 0.255082 0.859372 +v 0.459102 0.252667 0.858229 +v 0.457066 0.250215 0.857136 +v 0.4572 0.247052 0.855563 +v 0.457406 0.243836 0.854105 +v 0.459514 0.238375 0.851857 +v 0.461693 0.232892 0.849721 +v 0.463922 0.226517 0.848243 +v 0.466191 0.220148 0.846831 +v 0.468221 0.213666 0.846257 +v 0.470279 0.207171 0.845701 +v 0.472188 0.200335 0.845634 +v 0.47409 0.193513 0.845574 +v 0.475884 0.186403 0.845779 +v 0.477665 0.179317 0.845984 +v 0.479498 0.172051 0.846456 +v 0.481345 0.164792 0.846989 +v 0.483337 0.157479 0.847984 +v 0.485393 0.150195 0.849053 +v 0.487642 0.142876 0.85051 +v 0.48996 0.135596 0.852059 +v 0.492486 0.128249 0.853808 +v 0.495061 0.120908 0.855615 +v 0.497715 0.113505 0.857643 +v 0.500352 0.106123 0.859669 +v 0.502936 0.0985914 0.861745 +v 0.505536 0.0910246 0.863844 +v 0.508042 0.0832974 0.865869 +v 0.510452 0.0755449 0.867785 +v 0.512724 0.06753 0.869456 +v 0.514854 0.0594277 0.870961 +v 0.516823 0.0510813 0.87215 +v 0.518613 0.0426759 0.873137 +v 0.520219 0.0341177 0.873906 +v 0.521653 0.0255299 0.874513 +v 0.523072 0.0167766 0.874823 +v 0.524304 0.00799289 0.874906 +v 0.525345 -0.000834395 0.874883 +v 0.526221 -0.00967383 0.874674 +v 0.527075 -0.0185506 0.874359 +v 0.527785 -0.0274134 0.873867 +v 0.528579 -0.036287 0.87326 +v 0.529178 -0.045136 0.872521 +v 0.529822 -0.054005 0.87175 +v 0.530225 -0.0629228 0.870651 +v 0.530533 -0.0718605 0.869606 +v 0.530636 -0.0807753 0.868364 +v 0.530858 -0.0897411 0.867032 +v 0.53079 -0.0986631 0.865459 +v 0.53078 -0.107508 0.863881 +v 0.530529 -0.116353 0.862065 +v 0.53039 -0.125223 0.860313 +v 0.529992 -0.134037 0.858356 +v 0.529754 -0.142819 0.856407 +v 0.529229 -0.151532 0.854247 +v 0.528742 -0.160235 0.852324 +v 0.528068 -0.16893 0.850231 +v 0.527518 -0.177428 0.848123 +v 0.526871 -0.185848 0.845923 +v 0.52636 -0.194199 0.843651 +v 0.525694 -0.202566 0.841294 +v 0.525049 -0.210774 0.839025 +v 0.524365 -0.218967 0.836729 +v 0.523854 -0.227024 0.834473 +v 0.523167 -0.23505 0.832091 +v 0.522589 -0.24298 0.829815 +v 0.521947 -0.250881 0.827488 +v 0.521373 -0.258585 0.82521 +v 0.520684 -0.266274 0.822889 +v 0.520154 -0.273816 0.820551 +v 0.519562 -0.281347 0.818168 +v 0.519095 -0.288711 0.815811 +v 0.518526 -0.296049 0.813383 +v 0.517956 -0.303283 0.811122 +v 0.517319 -0.310499 0.808817 +v 0.51683 -0.317557 0.806507 +v 0.516269 -0.324595 0.804149 +v 0.515793 -0.331461 0.801869 +v 0.515303 -0.338326 0.799564 +v 0.514895 -0.345054 0.797242 +v 0.51446 -0.351761 0.794974 +v 0.514133 -0.358289 0.79269 +v 0.513774 -0.364823 0.790371 +v 0.513486 -0.371169 0.788109 +v 0.513177 -0.377506 0.785831 +v 0.512966 -0.38369 0.783557 +v 0.512729 -0.389872 0.781257 +v 0.512542 -0.395905 0.778964 +v 0.512308 -0.401925 0.776643 +v 0.512163 -0.407758 0.774349 +v 0.511973 -0.413585 0.77203 +v 0.511882 -0.419226 0.769695 +v 0.511763 -0.424854 0.767326 +v 0.511783 -0.430329 0.764874 +v 0.511742 -0.435783 0.762377 +v 0.511833 -0.441017 0.759789 +v 0.51187 -0.446238 0.757189 +v 0.51211 -0.451331 0.754441 +v 0.51226 -0.456394 0.751633 +v 0.51253 -0.461258 0.748681 +v 0.5128 -0.466121 0.745726 +v 0.513112 -0.470826 0.742828 +v 0.513316 -0.475502 0.739861 +v 0.51388 -0.48001 0.736474 +v 0.514349 -0.484495 0.733042 +v 0.515134 -0.488883 0.729161 +v 0.515834 -0.493244 0.725228 +v 0.547283 0.378385 0.852871 +v 0.544828 0.37123 0.8557 +v 0.541847 0.364156 0.858258 +v 0.538905 0.357149 0.860098 +v 0.535564 0.35025 0.86168 +v 0.532242 0.343616 0.862709 +v 0.528607 0.337103 0.86351 +v 0.525013 0.330781 0.863651 +v 0.521271 0.324552 0.863668 +v 0.517611 0.318495 0.863201 +v 0.513892 0.312473 0.862687 +v 0.510328 0.306576 0.861775 +v 0.506723 0.30075 0.860821 +v 0.503218 0.295497 0.859846 +v 0.499681 0.290357 0.858818 +v 0.495979 0.285618 0.857591 +v 0.492266 0.280995 0.856314 +v 0.488675 0.276571 0.855386 +v 0.485184 0.272097 0.85457 +v 0.481412 0.267413 0.854469 +v 0.477628 0.262605 0.854439 +v 0.473309 0.258734 0.854662 +v 0.468778 0.254995 0.854692 +v 0.466063 0.252201 0.853824 +v 0.4634 0.249373 0.853011 +v 0.463725 0.245699 0.850795 +v 0.464143 0.241947 0.848729 +v 0.466073 0.236531 0.84628 +v 0.468159 0.231079 0.84402 +v 0.470221 0.224899 0.842529 +v 0.472382 0.218729 0.841166 +v 0.474409 0.212389 0.840541 +v 0.476513 0.206045 0.839988 +v 0.478571 0.199361 0.839819 +v 0.480635 0.192677 0.839666 +v 0.482495 0.185691 0.83989 +v 0.484372 0.178719 0.840145 +v 0.486321 0.171576 0.840644 +v 0.488314 0.164454 0.841229 +v 0.490481 0.157252 0.842177 +v 0.492728 0.150081 0.84322 +v 0.495124 0.142806 0.844623 +v 0.497596 0.135572 0.846121 +v 0.500232 0.128271 0.847786 +v 0.502918 0.120977 0.849508 +v 0.50567 0.113592 0.851423 +v 0.508405 0.106219 0.853331 +v 0.511047 0.0987207 0.855221 +v 0.513691 0.0911915 0.857112 +v 0.516275 0.0834296 0.858944 +v 0.518757 0.0756576 0.860671 +v 0.521055 0.0676413 0.862169 +v 0.523216 0.059559 0.863512 +v 0.525221 0.0511949 0.864539 +v 0.527065 0.0427817 0.865387 +v 0.528667 0.034255 0.866036 +v 0.530098 0.0256884 0.866501 +v 0.531513 0.0169475 0.866679 +v 0.532775 0.00815669 0.866664 +v 0.533893 -0.000659932 0.866505 +v 0.534821 -0.00948915 0.866159 +v 0.535671 -0.0183492 0.865735 +v 0.536422 -0.0271917 0.865161 +v 0.537153 -0.0360509 0.864432 +v 0.537686 -0.0448782 0.863633 +v 0.538229 -0.0537176 0.862762 +v 0.538576 -0.0625573 0.861721 +v 0.538824 -0.0714705 0.860497 +v 0.538882 -0.0803681 0.859124 +v 0.539 -0.0893131 0.857645 +v 0.538888 -0.0982344 0.855958 +v 0.538795 -0.107108 0.854298 +v 0.538505 -0.115929 0.852487 +v 0.538275 -0.124728 0.85065 +v 0.53785 -0.133499 0.84867 +v 0.537479 -0.142211 0.846666 +v 0.536991 -0.150899 0.84459 +v 0.536385 -0.159457 0.842613 +v 0.535719 -0.168054 0.84057 +v 0.535143 -0.176538 0.838397 +v 0.534496 -0.184927 0.836151 +v 0.53381 -0.193252 0.833871 +v 0.53313 -0.201586 0.83161 +v 0.532474 -0.209732 0.829366 +v 0.531777 -0.217884 0.827102 +v 0.531139 -0.225898 0.824874 +v 0.53043 -0.233898 0.822601 +v 0.529787 -0.241772 0.820333 +v 0.529117 -0.249622 0.818041 +v 0.528506 -0.257286 0.81578 +v 0.527824 -0.264931 0.813481 +v 0.527173 -0.27242 0.811174 +v 0.526511 -0.279909 0.808876 +v 0.52599 -0.287217 0.806591 +v 0.52541 -0.29452 0.804271 +v 0.524718 -0.301685 0.802012 +v 0.524086 -0.308879 0.799796 +v 0.523443 -0.3159 0.797578 +v 0.522823 -0.32292 0.79536 +v 0.522289 -0.329765 0.793114 +v 0.521743 -0.336605 0.790866 +v 0.521237 -0.343281 0.788639 +v 0.520729 -0.349942 0.786486 +v 0.520315 -0.356444 0.784327 +v 0.519892 -0.362933 0.782159 +v 0.519475 -0.369219 0.780008 +v 0.519114 -0.375536 0.777872 +v 0.518781 -0.381678 0.775714 +v 0.518453 -0.387823 0.773564 +v 0.518158 -0.393822 0.771415 +v 0.51783 -0.399805 0.769237 +v 0.517555 -0.40559 0.767086 +v 0.517287 -0.411365 0.764938 +v 0.517087 -0.416982 0.762753 +v 0.516905 -0.422604 0.760576 +v 0.516789 -0.428018 0.758284 +v 0.516609 -0.433413 0.755953 +v 0.51656 -0.438574 0.753578 +v 0.516475 -0.443722 0.751178 +v 0.516518 -0.448703 0.748599 +v 0.516553 -0.453682 0.746013 +v 0.516772 -0.458462 0.743319 +v 0.516912 -0.463215 0.740582 +v 0.5171 -0.467806 0.7379 +v 0.517252 -0.472383 0.735196 +v 0.517754 -0.476771 0.732 +v 0.518144 -0.481121 0.728741 +v 0.518836 -0.48535 0.724999 +v 0.519424 -0.489535 0.721212 +v 0.550079 0.375611 0.846969 +v 0.547719 0.368757 0.849851 +v 0.544851 0.361971 0.85245 +v 0.542088 0.355299 0.854402 +v 0.538913 0.348738 0.856092 +v 0.53581 0.342306 0.857193 +v 0.532336 0.336025 0.858019 +v 0.528982 0.329913 0.858305 +v 0.525431 0.323902 0.858437 +v 0.522025 0.318046 0.858105 +v 0.518489 0.312271 0.857657 +v 0.515122 0.30649 0.856885 +v 0.511684 0.30078 0.856052 +v 0.508476 0.295684 0.855221 +v 0.505191 0.290664 0.854308 +v 0.501713 0.285831 0.853302 +v 0.498114 0.281098 0.852181 +v 0.494666 0.276828 0.851226 +v 0.491196 0.272573 0.850251 +v 0.487741 0.26812 0.849812 +v 0.484353 0.263637 0.849465 +v 0.480391 0.259261 0.849713 +v 0.476255 0.255007 0.849868 +v 0.472986 0.251786 0.849341 +v 0.469675 0.248567 0.848809 +v 0.470208 0.244348 0.845996 +v 0.470859 0.240048 0.843349 +v 0.472693 0.234669 0.840775 +v 0.474639 0.229269 0.838362 +v 0.476609 0.223295 0.836914 +v 0.478657 0.217312 0.835577 +v 0.480686 0.21112 0.834904 +v 0.482765 0.204924 0.834298 +v 0.484952 0.198365 0.834005 +v 0.487161 0.191833 0.833745 +v 0.489102 0.18497 0.834012 +v 0.491066 0.17812 0.834304 +v 0.493123 0.171094 0.834814 +v 0.495222 0.1641 0.835408 +v 0.497593 0.157007 0.83634 +v 0.50002 0.149945 0.837339 +v 0.502589 0.142722 0.838713 +v 0.505195 0.135529 0.84014 +v 0.50795 0.128266 0.841724 +v 0.510739 0.121019 0.843351 +v 0.513559 0.113672 0.845137 +v 0.516349 0.106311 0.846892 +v 0.519101 0.098809 0.84862 +v 0.521756 0.0913058 0.850269 +v 0.524412 0.083527 0.851909 +v 0.526939 0.0757114 0.853415 +v 0.529251 0.067693 0.854729 +v 0.531393 0.0596092 0.85586 +v 0.533436 0.0512519 0.856713 +v 0.535284 0.0428241 0.857369 +v 0.536887 0.0343289 0.857897 +v 0.538299 0.025786 0.858213 +v 0.539742 0.0170526 0.858287 +v 0.540942 0.00827686 0.858109 +v 0.542112 -0.000503813 0.857797 +v 0.543038 -0.00931061 0.857282 +v 0.543921 -0.0181429 0.856755 +v 0.544572 -0.0269589 0.856004 +v 0.545281 -0.0357866 0.855239 +v 0.545715 -0.0445795 0.854301 +v 0.54617 -0.0533929 0.853368 +v 0.5464 -0.0621871 0.852246 +v 0.546675 -0.0710849 0.850969 +v 0.546688 -0.0800052 0.849385 +v 0.546782 -0.0889136 0.847857 +v 0.546539 -0.0977634 0.846068 +v 0.54639 -0.106628 0.844386 +v 0.545962 -0.115438 0.842498 +v 0.545717 -0.124203 0.840618 +v 0.545233 -0.132911 0.838562 +v 0.544815 -0.141549 0.836609 +v 0.544234 -0.150157 0.834541 +v 0.543708 -0.158659 0.832659 +v 0.542956 -0.167123 0.830585 +v 0.542409 -0.175547 0.828438 +v 0.54173 -0.183922 0.826129 +v 0.541012 -0.192228 0.823936 +v 0.540216 -0.200529 0.821688 +v 0.539565 -0.208644 0.819499 +v 0.538798 -0.216726 0.817226 +v 0.538104 -0.224696 0.815074 +v 0.537295 -0.232638 0.812841 +v 0.536642 -0.240466 0.810623 +v 0.535946 -0.24826 0.80836 +v 0.535262 -0.255875 0.806117 +v 0.534563 -0.263491 0.803856 +v 0.533933 -0.270956 0.801657 +v 0.533183 -0.278398 0.799387 +v 0.532547 -0.285642 0.797167 +v 0.531866 -0.292877 0.794899 +v 0.53125 -0.300035 0.792763 +v 0.530506 -0.307153 0.790542 +v 0.529805 -0.314164 0.788457 +v 0.529098 -0.32118 0.786358 +v 0.528539 -0.328009 0.784207 +v 0.527901 -0.334817 0.782004 +v 0.527344 -0.341457 0.779891 +v 0.526784 -0.348082 0.777857 +v 0.526291 -0.354544 0.775829 +v 0.525731 -0.360992 0.773756 +v 0.52527 -0.367261 0.771752 +v 0.524803 -0.373526 0.769753 +v 0.524412 -0.37962 0.767764 +v 0.524001 -0.385721 0.765749 +v 0.523649 -0.391693 0.76376 +v 0.523213 -0.397646 0.761736 +v 0.522848 -0.403379 0.759754 +v 0.522448 -0.409106 0.757752 +v 0.522144 -0.4147 0.755732 +v 0.521808 -0.420284 0.75369 +v 0.521598 -0.425652 0.751589 +v 0.521328 -0.431004 0.749447 +v 0.521152 -0.436097 0.747282 +v 0.520965 -0.441188 0.745105 +v 0.520955 -0.44611 0.742757 +v 0.520862 -0.451011 0.740377 +v 0.520939 -0.45566 0.737906 +v 0.520952 -0.460291 0.735405 +v 0.521035 -0.464767 0.732948 +v 0.521016 -0.469206 0.730427 +v 0.521399 -0.473445 0.727389 +v 0.521739 -0.477674 0.724321 +v 0.522428 -0.481771 0.720787 +v 0.523012 -0.485831 0.717198 +v 0.552571 0.372729 0.841671 +v 0.550229 0.366159 0.844583 +v 0.547456 0.35964 0.847272 +v 0.544799 0.353316 0.849177 +v 0.541838 0.347081 0.850885 +v 0.538936 0.340921 0.851967 +v 0.535761 0.33486 0.852862 +v 0.532605 0.329011 0.853176 +v 0.529304 0.32324 0.853379 +v 0.52613 0.317618 0.853123 +v 0.522848 0.312061 0.852782 +v 0.519674 0.306555 0.852108 +v 0.516475 0.301086 0.851423 +v 0.513439 0.296012 0.850579 +v 0.510378 0.290992 0.849712 +v 0.507196 0.286141 0.848751 +v 0.503955 0.281355 0.847727 +v 0.500653 0.276934 0.846782 +v 0.497327 0.272561 0.845801 +v 0.493918 0.268225 0.845193 +v 0.490556 0.26383 0.844651 +v 0.486951 0.259538 0.84462 +v 0.483255 0.255256 0.844583 +v 0.480133 0.251618 0.844164 +v 0.476964 0.248022 0.843703 +v 0.477414 0.243508 0.84077 +v 0.477983 0.238912 0.837998 +v 0.479597 0.233551 0.83536 +v 0.481376 0.228141 0.832944 +v 0.483155 0.22225 0.83144 +v 0.485054 0.21635 0.830094 +v 0.48704 0.210189 0.829282 +v 0.489129 0.204013 0.828591 +v 0.491278 0.197605 0.828261 +v 0.493481 0.19122 0.827999 +v 0.495558 0.184453 0.828173 +v 0.497667 0.177699 0.828383 +v 0.499865 0.170735 0.828885 +v 0.502141 0.163816 0.829524 +v 0.504642 0.156805 0.830432 +v 0.507203 0.149825 0.831408 +v 0.509904 0.142691 0.832663 +v 0.512657 0.13558 0.833981 +v 0.515479 0.128375 0.83544 +v 0.518324 0.12118 0.836925 +v 0.521203 0.113848 0.838521 +v 0.524076 0.10651 0.840111 +v 0.526866 0.0989779 0.84165 +v 0.529595 0.0914087 0.843119 +v 0.532243 0.083632 0.844539 +v 0.53479 0.0758082 0.845863 +v 0.537119 0.0677898 0.846966 +v 0.539281 0.059709 0.847908 +v 0.54134 0.0513879 0.848624 +v 0.543193 0.0429759 0.849137 +v 0.544857 0.0344761 0.849527 +v 0.546377 0.0259057 0.849696 +v 0.547812 0.0171915 0.849618 +v 0.54905 0.00844443 0.849354 +v 0.550189 -0.000327427 0.848964 +v 0.551115 -0.00910241 0.848384 +v 0.551984 -0.0179029 0.847755 +v 0.552642 -0.026712 0.846961 +v 0.55327 -0.0355113 0.846073 +v 0.553668 -0.0443108 0.845004 +v 0.554134 -0.0531359 0.843932 +v 0.554374 -0.0619467 0.842673 +v 0.554573 -0.0707715 0.84139 +v 0.554537 -0.0796803 0.839771 +v 0.554528 -0.0885617 0.838104 +v 0.554259 -0.0973979 0.836249 +v 0.55403 -0.106218 0.834496 +v 0.553585 -0.114998 0.832585 +v 0.553235 -0.123722 0.830654 +v 0.552722 -0.132402 0.828611 +v 0.552246 -0.140998 0.826597 +v 0.551679 -0.149579 0.824529 +v 0.551034 -0.158027 0.822568 +v 0.550293 -0.166445 0.82053 +v 0.549679 -0.174763 0.8185 +v 0.548938 -0.183128 0.816317 +v 0.54819 -0.19136 0.814143 +v 0.547385 -0.199581 0.811945 +v 0.546616 -0.207622 0.809744 +v 0.545818 -0.215656 0.80752 +v 0.545087 -0.223601 0.805382 +v 0.544298 -0.231536 0.803205 +v 0.543516 -0.239308 0.800991 +v 0.542758 -0.24706 0.798781 +v 0.542012 -0.254615 0.796585 +v 0.541238 -0.262163 0.794373 +v 0.540548 -0.269581 0.792231 +v 0.539834 -0.276994 0.790056 +v 0.539127 -0.284195 0.787858 +v 0.53839 -0.291418 0.785688 +v 0.537647 -0.298534 0.783593 +v 0.536948 -0.305665 0.781496 +v 0.536177 -0.312629 0.77944 +v 0.535408 -0.319585 0.777387 +v 0.534689 -0.326363 0.775325 +v 0.533999 -0.333153 0.773285 +v 0.533373 -0.339753 0.771277 +v 0.53276 -0.346339 0.769341 +v 0.532158 -0.35276 0.767411 +v 0.531559 -0.359186 0.765483 +v 0.531017 -0.365439 0.763607 +v 0.53052 -0.371701 0.761758 +v 0.530048 -0.377778 0.759914 +v 0.529563 -0.383853 0.758058 +v 0.529043 -0.389745 0.756207 +v 0.528492 -0.395639 0.754338 +v 0.528051 -0.401377 0.752521 +v 0.527593 -0.407113 0.750686 +v 0.527173 -0.41265 0.748817 +v 0.526707 -0.418176 0.746934 +v 0.526355 -0.423489 0.745029 +v 0.525961 -0.428787 0.743094 +v 0.525647 -0.433847 0.741151 +v 0.525323 -0.438906 0.739198 +v 0.525168 -0.443736 0.7371 +v 0.524936 -0.448553 0.734998 +v 0.524867 -0.453083 0.732788 +v 0.524765 -0.457604 0.730561 +v 0.524735 -0.461964 0.728308 +v 0.524624 -0.466298 0.726008 +v 0.524889 -0.470389 0.723194 +v 0.525122 -0.474471 0.720364 +v 0.525756 -0.478423 0.716956 +v 0.526302 -0.482343 0.713495 +v 0.554911 0.369868 0.836288 +v 0.552686 0.363562 0.839275 +v 0.549941 0.357338 0.842007 +v 0.547486 0.351349 0.843926 +v 0.544651 0.345455 0.845615 +v 0.541977 0.339562 0.846697 +v 0.539005 0.333773 0.847575 +v 0.536162 0.328143 0.847993 +v 0.533083 0.322626 0.848244 +v 0.530146 0.317242 0.848075 +v 0.527098 0.311921 0.847811 +v 0.524154 0.306677 0.847269 +v 0.521172 0.301449 0.846704 +v 0.51834 0.296402 0.84587 +v 0.515431 0.291434 0.844973 +v 0.512573 0.286544 0.84409 +v 0.50966 0.281731 0.843139 +v 0.506554 0.277148 0.842212 +v 0.50339 0.272645 0.84124 +v 0.500099 0.268343 0.840556 +v 0.496788 0.264039 0.83986 +v 0.493531 0.259801 0.839577 +v 0.490209 0.255565 0.839272 +v 0.487196 0.251545 0.838877 +v 0.484131 0.247568 0.83845 +v 0.484545 0.242732 0.835437 +v 0.485064 0.237813 0.83257 +v 0.486526 0.232432 0.829966 +v 0.488112 0.227021 0.827513 +v 0.489703 0.221206 0.825965 +v 0.491422 0.215398 0.824563 +v 0.493416 0.20925 0.823676 +v 0.495505 0.203125 0.822891 +v 0.497637 0.196863 0.822542 +v 0.499786 0.19061 0.82224 +v 0.502014 0.183934 0.822332 +v 0.504276 0.177283 0.822459 +v 0.506637 0.170385 0.82299 +v 0.509038 0.163518 0.82361 +v 0.511676 0.156591 0.824501 +v 0.514354 0.149684 0.825435 +v 0.517197 0.142636 0.826581 +v 0.520074 0.135596 0.827766 +v 0.522965 0.128454 0.829107 +v 0.525849 0.121311 0.830442 +v 0.528785 0.113978 0.831829 +v 0.531699 0.106643 0.833206 +v 0.534542 0.0990828 0.834568 +v 0.537286 0.0914825 0.835823 +v 0.539922 0.0836761 0.837009 +v 0.542433 0.0758304 0.838092 +v 0.544789 0.0678163 0.839006 +v 0.546922 0.0597388 0.83972 +v 0.548983 0.0514551 0.840288 +v 0.550824 0.043068 0.840648 +v 0.552544 0.034562 0.840877 +v 0.554071 0.0259817 0.840841 +v 0.555572 0.0173014 0.840683 +v 0.556785 0.0085884 0.840276 +v 0.557951 -0.000158267 0.839845 +v 0.558788 -0.00891329 0.839151 +v 0.559629 -0.0176916 0.838423 +v 0.56019 -0.0264654 0.837465 +v 0.560847 -0.0352846 0.836471 +v 0.561209 -0.0440948 0.835246 +v 0.561648 -0.0528814 0.834072 +v 0.561753 -0.0616358 0.832655 +v 0.561901 -0.070433 0.831327 +v 0.561779 -0.0792565 0.829698 +v 0.561763 -0.0881163 0.827977 +v 0.561437 -0.0969294 0.82604 +v 0.561162 -0.105713 0.824248 +v 0.560609 -0.11443 0.822262 +v 0.560232 -0.12312 0.82032 +v 0.559666 -0.131753 0.81827 +v 0.55927 -0.14033 0.816298 +v 0.558585 -0.148849 0.814155 +v 0.557923 -0.157266 0.812214 +v 0.557185 -0.165663 0.810219 +v 0.556497 -0.173972 0.808209 +v 0.5557 -0.182286 0.806082 +v 0.555001 -0.190437 0.80399 +v 0.554133 -0.198588 0.801779 +v 0.553354 -0.206577 0.799656 +v 0.552534 -0.214561 0.797505 +v 0.551692 -0.222458 0.795351 +v 0.55082 -0.230346 0.793176 +v 0.550028 -0.238082 0.791075 +v 0.549228 -0.245798 0.78893 +v 0.548477 -0.253305 0.786833 +v 0.54766 -0.260795 0.784687 +v 0.546841 -0.268141 0.782558 +v 0.546082 -0.275503 0.780467 +v 0.545386 -0.282708 0.778388 +v 0.544593 -0.289899 0.776266 +v 0.543735 -0.296973 0.7742 +v 0.542964 -0.304077 0.772181 +v 0.54219 -0.311007 0.770224 +v 0.541378 -0.317925 0.768235 +v 0.540617 -0.32468 0.766334 +v 0.539848 -0.331438 0.764427 +v 0.539178 -0.338005 0.762527 +v 0.538506 -0.344569 0.760684 +v 0.537855 -0.350967 0.758892 +v 0.537195 -0.357364 0.757096 +v 0.536563 -0.363608 0.755331 +v 0.535915 -0.369847 0.753558 +v 0.535353 -0.37591 0.751855 +v 0.534741 -0.381962 0.750119 +v 0.534152 -0.387813 0.74845 +v 0.533584 -0.393669 0.746796 +v 0.533026 -0.39938 0.745126 +v 0.532457 -0.405097 0.74343 +v 0.531943 -0.410571 0.741755 +v 0.53139 -0.416036 0.740056 +v 0.530919 -0.421281 0.738351 +v 0.530413 -0.426521 0.736631 +v 0.529987 -0.431564 0.734929 +v 0.529496 -0.43658 0.733181 +v 0.5292 -0.441322 0.731357 +v 0.528843 -0.446049 0.729509 +v 0.528653 -0.450469 0.727582 +v 0.52842 -0.454875 0.725628 +v 0.528311 -0.459126 0.723595 +v 0.528107 -0.463352 0.721512 +v 0.528298 -0.467314 0.71896 +v 0.528436 -0.471256 0.716375 +v 0.529047 -0.475064 0.713104 +v 0.52956 -0.478843 0.709779 +v 0.556756 0.367449 0.832103 +v 0.554593 0.361486 0.834967 +v 0.552042 0.355578 0.837645 +v 0.5497 0.349813 0.839548 +v 0.546979 0.344142 0.841244 +v 0.544606 0.338639 0.842106 +v 0.542062 0.333196 0.842859 +v 0.539514 0.327786 0.843188 +v 0.536797 0.322463 0.8434 +v 0.534111 0.317226 0.843224 +v 0.531316 0.312049 0.842965 +v 0.528578 0.306904 0.842431 +v 0.525796 0.30178 0.841858 +v 0.523113 0.296805 0.841079 +v 0.520418 0.291862 0.84028 +v 0.517686 0.286957 0.839357 +v 0.514928 0.282115 0.838398 +v 0.512063 0.277459 0.837516 +v 0.509141 0.272851 0.836579 +v 0.506097 0.268441 0.835798 +v 0.503038 0.264025 0.835013 +v 0.499851 0.259738 0.834579 +v 0.496686 0.255427 0.834197 +v 0.494158 0.251144 0.833386 +v 0.491606 0.246875 0.832569 +v 0.491693 0.242148 0.829916 +v 0.491892 0.237352 0.82741 +v 0.493192 0.231949 0.824696 +v 0.494662 0.226469 0.82222 +v 0.496178 0.220655 0.820532 +v 0.497889 0.214829 0.819058 +v 0.499862 0.208752 0.818016 +v 0.50197 0.202688 0.817134 +v 0.504106 0.196392 0.816689 +v 0.506326 0.190113 0.816351 +v 0.508595 0.18353 0.816401 +v 0.51093 0.176977 0.816522 +v 0.513417 0.170195 0.816956 +v 0.515992 0.163448 0.817514 +v 0.518724 0.156556 0.818351 +v 0.521489 0.149679 0.819231 +v 0.524413 0.142694 0.820256 +v 0.527373 0.135699 0.821301 +v 0.530323 0.128568 0.822484 +v 0.533261 0.12143 0.823655 +v 0.536215 0.114103 0.824864 +v 0.539137 0.106776 0.826073 +v 0.541982 0.0992154 0.827217 +v 0.544747 0.0916288 0.828279 +v 0.547373 0.0838243 0.829252 +v 0.549877 0.0759869 0.830146 +v 0.552236 0.0679579 0.830849 +v 0.554411 0.0598739 0.831392 +v 0.556494 0.0515876 0.831793 +v 0.558347 0.0432158 0.831994 +v 0.560126 0.0347108 0.832079 +v 0.561687 0.0261512 0.831942 +v 0.563172 0.0174985 0.831679 +v 0.564361 0.00883143 0.831185 +v 0.565542 0.000108599 0.830661 +v 0.566381 -0.00862856 0.829869 +v 0.567228 -0.0174111 0.829041 +v 0.567823 -0.0261706 0.828024 +v 0.568417 -0.0349406 0.826949 +v 0.568802 -0.0437605 0.825622 +v 0.569178 -0.0525924 0.8243 +v 0.569253 -0.0613664 0.822788 +v 0.569313 -0.0701444 0.821331 +v 0.569114 -0.0788755 0.819676 +v 0.568965 -0.0876954 0.817898 +v 0.568596 -0.0964806 0.81597 +v 0.568324 -0.105228 0.814121 +v 0.567826 -0.113929 0.81213 +v 0.567315 -0.122576 0.810143 +v 0.566742 -0.131201 0.808157 +v 0.566179 -0.139719 0.806146 +v 0.565509 -0.148209 0.804074 +v 0.564785 -0.156575 0.80213 +v 0.56403 -0.164929 0.800183 +v 0.563266 -0.173242 0.798108 +v 0.562466 -0.181533 0.796021 +v 0.56172 -0.189651 0.793958 +v 0.560886 -0.197749 0.791828 +v 0.56009 -0.205718 0.789741 +v 0.559249 -0.21369 0.787638 +v 0.558399 -0.221529 0.785539 +v 0.557515 -0.229367 0.783422 +v 0.556611 -0.23702 0.781282 +v 0.555732 -0.244667 0.779141 +v 0.554893 -0.252142 0.777064 +v 0.554082 -0.25964 0.775004 +v 0.553266 -0.266951 0.772971 +v 0.552386 -0.27424 0.770898 +v 0.551545 -0.281392 0.76885 +v 0.550682 -0.288548 0.766834 +v 0.549833 -0.295616 0.764906 +v 0.548998 -0.302699 0.762983 +v 0.548147 -0.309585 0.761071 +v 0.54731 -0.316474 0.759174 +v 0.546542 -0.32321 0.75738 +v 0.5458 -0.329952 0.755597 +v 0.545007 -0.336487 0.753807 +v 0.544229 -0.343018 0.752063 +v 0.543495 -0.349401 0.750403 +v 0.542804 -0.355794 0.748773 +v 0.542133 -0.362015 0.74718 +v 0.541398 -0.368215 0.745554 +v 0.540676 -0.374229 0.743956 +v 0.539998 -0.38026 0.742381 +v 0.539379 -0.38611 0.740905 +v 0.538654 -0.391927 0.739376 +v 0.537988 -0.397595 0.737881 +v 0.537337 -0.403254 0.736385 +v 0.536682 -0.408689 0.734886 +v 0.535994 -0.414129 0.733371 +v 0.535411 -0.419345 0.731884 +v 0.534765 -0.424533 0.730369 +v 0.534212 -0.4295 0.728865 +v 0.533642 -0.434458 0.727341 +v 0.533189 -0.439144 0.725788 +v 0.532689 -0.443816 0.724214 +v 0.53234 -0.448154 0.722549 +v 0.531942 -0.452472 0.720858 +v 0.531642 -0.456598 0.719124 +v 0.531315 -0.460714 0.717376 +v 0.531444 -0.46458 0.71502 +v 0.531518 -0.468419 0.712626 +v 0.532057 -0.472035 0.709585 +v 0.532501 -0.47562 0.70651 +v 0.558516 0.365047 0.827865 +v 0.5565 0.359397 0.830665 +v 0.554046 0.353836 0.83322 +v 0.551836 0.348286 0.835137 +v 0.549216 0.34285 0.836824 +v 0.547193 0.337733 0.8375 +v 0.544988 0.332672 0.83806 +v 0.542761 0.327478 0.838308 +v 0.540356 0.322374 0.838441 +v 0.537957 0.317278 0.838282 +v 0.53538 0.312268 0.83799 +v 0.532863 0.307212 0.837473 +v 0.530283 0.302197 0.836905 +v 0.527805 0.297265 0.83621 +v 0.525248 0.292405 0.835449 +v 0.522694 0.28745 0.834533 +v 0.520088 0.282552 0.833575 +v 0.517429 0.277881 0.832679 +v 0.514747 0.273212 0.831765 +v 0.512009 0.268614 0.830942 +v 0.509202 0.264089 0.830048 +v 0.506173 0.259711 0.829573 +v 0.503095 0.255342 0.829057 +v 0.501055 0.250807 0.827822 +v 0.499006 0.246258 0.826603 +v 0.498785 0.241625 0.824326 +v 0.498612 0.23697 0.822117 +v 0.4998 0.231481 0.819379 +v 0.501136 0.225954 0.81682 +v 0.502653 0.220125 0.815071 +v 0.504295 0.21429 0.813466 +v 0.506298 0.208258 0.812346 +v 0.508395 0.202247 0.811326 +v 0.510596 0.195921 0.810851 +v 0.512846 0.189619 0.810438 +v 0.515176 0.18313 0.810475 +v 0.517569 0.17667 0.810563 +v 0.520223 0.170006 0.810943 +v 0.522927 0.163368 0.811387 +v 0.525742 0.156498 0.812168 +v 0.528578 0.149639 0.812967 +v 0.531586 0.142692 0.813838 +v 0.534606 0.135742 0.81474 +v 0.537613 0.128629 0.81577 +v 0.540577 0.121496 0.816764 +v 0.543548 0.114173 0.817795 +v 0.546437 0.106841 0.818804 +v 0.549315 0.099298 0.819756 +v 0.552057 0.09171 0.820587 +v 0.554666 0.0839085 0.82135 +v 0.557123 0.0760766 0.822022 +v 0.559499 0.0680523 0.822543 +v 0.561637 0.0599622 0.822863 +v 0.563732 0.0516633 0.823081 +v 0.565527 0.0432988 0.823049 +v 0.567331 0.0348068 0.822978 +v 0.568854 0.0262718 0.822691 +v 0.570332 0.0176672 0.822329 +v 0.571451 0.00904137 0.821698 +v 0.572618 0.000357983 0.821074 +v 0.573409 -0.0083427 0.820166 +v 0.5743 -0.0171052 0.819279 +v 0.574814 -0.0258396 0.818124 +v 0.575452 -0.0345712 0.817001 +v 0.575718 -0.0433246 0.815559 +v 0.576065 -0.0521478 0.814181 +v 0.576029 -0.0609207 0.812537 +v 0.5761 -0.0697099 0.810963 +v 0.575884 -0.0784454 0.809227 +v 0.57574 -0.0871941 0.80753 +v 0.575302 -0.0959326 0.805586 +v 0.574931 -0.104624 0.80365 +v 0.574404 -0.11328 0.801619 +v 0.573873 -0.121907 0.799688 +v 0.573229 -0.13051 0.797698 +v 0.572668 -0.139002 0.795742 +v 0.571922 -0.147448 0.793672 +v 0.57123 -0.155776 0.791791 +v 0.570383 -0.164122 0.789787 +v 0.569592 -0.172401 0.787742 +v 0.568751 -0.180668 0.785671 +v 0.567995 -0.18875 0.783641 +v 0.567117 -0.196806 0.781548 +v 0.566297 -0.204727 0.77952 +v 0.56537 -0.212647 0.777405 +v 0.564481 -0.220428 0.775344 +v 0.56356 -0.228207 0.773263 +v 0.562681 -0.235824 0.771184 +v 0.561808 -0.243417 0.769084 +v 0.560942 -0.250893 0.767074 +v 0.559998 -0.258347 0.76501 +v 0.559143 -0.265603 0.763046 +v 0.558245 -0.272861 0.761079 +v 0.557344 -0.279987 0.759155 +v 0.556491 -0.287128 0.757242 +v 0.555596 -0.294167 0.755403 +v 0.554662 -0.301212 0.753543 +v 0.553801 -0.308084 0.751735 +v 0.552957 -0.314952 0.74994 +v 0.552069 -0.321632 0.748198 +v 0.551265 -0.32834 0.746496 +v 0.550491 -0.334873 0.744901 +v 0.549674 -0.341401 0.743291 +v 0.548851 -0.347766 0.741758 +v 0.548051 -0.354137 0.740236 +v 0.547274 -0.360307 0.738782 +v 0.546498 -0.366478 0.737329 +v 0.545759 -0.372489 0.735914 +v 0.544985 -0.378488 0.734471 +v 0.544238 -0.384314 0.733146 +v 0.543455 -0.390129 0.7318 +v 0.542705 -0.395747 0.730481 +v 0.541942 -0.40136 0.729156 +v 0.541252 -0.406794 0.72791 +v 0.540453 -0.412199 0.726605 +v 0.539733 -0.417358 0.725323 +v 0.538972 -0.422511 0.724016 +v 0.538331 -0.42741 0.722729 +v 0.537648 -0.432299 0.721423 +v 0.537032 -0.436931 0.720139 +v 0.536357 -0.441544 0.718828 +v 0.535883 -0.445791 0.717433 +v 0.535374 -0.450033 0.716034 +v 0.534943 -0.454055 0.71464 +v 0.534465 -0.458059 0.713213 +v 0.534502 -0.461811 0.711031 +v 0.534497 -0.465551 0.708829 +v 0.534944 -0.468965 0.706023 +v 0.535334 -0.472363 0.70319 +v 0.559869 0.362621 0.824432 +v 0.557972 0.35735 0.827011 +v 0.555727 0.352138 0.829439 +v 0.553811 0.347082 0.830986 +v 0.551636 0.342092 0.832401 +v 0.549783 0.337144 0.833039 +v 0.547766 0.332251 0.833588 +v 0.545844 0.327299 0.833701 +v 0.543742 0.322424 0.833713 +v 0.541625 0.317484 0.833469 +v 0.539391 0.312586 0.833152 +v 0.537083 0.307599 0.83258 +v 0.534725 0.302642 0.831971 +v 0.532403 0.297723 0.831237 +v 0.530041 0.292863 0.830459 +v 0.527677 0.287937 0.829548 +v 0.525266 0.283041 0.828601 +v 0.522808 0.278283 0.827639 +v 0.520357 0.273583 0.826651 +v 0.517858 0.268894 0.825718 +v 0.515315 0.264224 0.824771 +v 0.512508 0.259732 0.824205 +v 0.509683 0.255214 0.823656 +v 0.507493 0.250685 0.822627 +v 0.505314 0.246135 0.821637 +v 0.505401 0.241387 0.819045 +v 0.505576 0.236584 0.816579 +v 0.506731 0.231219 0.813778 +v 0.508088 0.22581 0.811219 +v 0.509448 0.219933 0.809363 +v 0.511014 0.214015 0.807747 +v 0.512909 0.207967 0.806507 +v 0.515006 0.201935 0.805482 +v 0.517165 0.195667 0.80493 +v 0.519436 0.189431 0.804494 +v 0.521874 0.182988 0.804416 +v 0.524412 0.176577 0.804439 +v 0.527107 0.169943 0.804732 +v 0.529868 0.163336 0.8051 +v 0.532733 0.156533 0.805718 +v 0.535642 0.149746 0.806383 +v 0.538655 0.142847 0.807121 +v 0.541691 0.135953 0.807903 +v 0.544717 0.128845 0.808755 +v 0.547728 0.121726 0.80958 +v 0.550679 0.114385 0.810402 +v 0.553566 0.107046 0.811232 +v 0.556409 0.0995093 0.811968 +v 0.559141 0.0919236 0.812602 +v 0.561726 0.0840856 0.813146 +v 0.564183 0.0762189 0.813604 +v 0.566568 0.068202 0.813967 +v 0.56871 0.0601166 0.814138 +v 0.570745 0.0518591 0.814203 +v 0.572546 0.0435019 0.814036 +v 0.574355 0.035029 0.813793 +v 0.575879 0.0265113 0.813344 +v 0.577385 0.0179156 0.81287 +v 0.578564 0.0093041 0.81216 +v 0.579708 0.000637642 0.811414 +v 0.580523 -0.00804479 0.810418 +v 0.581339 -0.0167691 0.809408 +v 0.581874 -0.0254658 0.808219 +v 0.582496 -0.0341815 0.807019 +v 0.582763 -0.0428904 0.805569 +v 0.58304 -0.0516863 0.804092 +v 0.583035 -0.0604473 0.802437 +v 0.582993 -0.0692141 0.800764 +v 0.582726 -0.07797 0.798935 +v 0.582528 -0.0866997 0.797198 +v 0.582052 -0.0954143 0.795253 +v 0.581603 -0.104088 0.793292 +v 0.581071 -0.112735 0.791291 +v 0.580538 -0.121344 0.789367 +v 0.579881 -0.129904 0.787361 +v 0.579322 -0.138388 0.785438 +v 0.578546 -0.146823 0.783388 +v 0.577705 -0.155087 0.781469 +v 0.576802 -0.163419 0.779427 +v 0.575983 -0.171654 0.777421 +v 0.575098 -0.179879 0.775379 +v 0.574262 -0.187934 0.773346 +v 0.573403 -0.195976 0.771296 +v 0.572561 -0.203883 0.769313 +v 0.571675 -0.211767 0.767296 +v 0.570784 -0.219499 0.76529 +v 0.569784 -0.227218 0.763218 +v 0.568828 -0.23478 0.761198 +v 0.567888 -0.242332 0.759158 +v 0.566976 -0.249762 0.757184 +v 0.566023 -0.257177 0.755177 +v 0.565086 -0.264413 0.753272 +v 0.564148 -0.271656 0.751389 +v 0.563251 -0.278754 0.749573 +v 0.562344 -0.285844 0.747733 +v 0.561346 -0.292841 0.745925 +v 0.560403 -0.299886 0.744167 +v 0.559524 -0.306746 0.742486 +v 0.558612 -0.313602 0.740788 +v 0.557664 -0.320253 0.739145 +v 0.556834 -0.326942 0.737568 +v 0.555965 -0.33343 0.736061 +v 0.555091 -0.339914 0.73454 +v 0.554262 -0.346282 0.733186 +v 0.553398 -0.352643 0.731821 +v 0.552517 -0.35878 0.730512 +v 0.551693 -0.364935 0.729235 +v 0.55086 -0.370941 0.727986 +v 0.549994 -0.376938 0.726708 +v 0.549154 -0.382738 0.725526 +v 0.548299 -0.388531 0.724334 +v 0.547516 -0.394142 0.723206 +v 0.546626 -0.399732 0.722041 +v 0.545755 -0.405124 0.720942 +v 0.544865 -0.410508 0.719819 +v 0.544044 -0.415635 0.718768 +v 0.543198 -0.420748 0.717696 +v 0.542396 -0.425616 0.716674 +v 0.541519 -0.430471 0.715617 +v 0.540723 -0.435044 0.714566 +v 0.539926 -0.439603 0.71351 +v 0.539279 -0.443767 0.712424 +v 0.53862 -0.447932 0.711353 +v 0.538106 -0.451886 0.710197 +v 0.53754 -0.455823 0.709008 +v 0.537402 -0.459433 0.707194 +v 0.537215 -0.463026 0.705361 +v 0.53759 -0.466267 0.702761 +v 0.537919 -0.469496 0.700131 +v 0.561164 0.360203 0.820971 +v 0.559385 0.355309 0.823349 +v 0.557331 0.350467 0.825602 +v 0.555752 0.345884 0.826826 +v 0.553927 0.341366 0.827917 +v 0.552244 0.336593 0.828519 +v 0.550396 0.331876 0.82902 +v 0.548791 0.327161 0.829016 +v 0.547007 0.322525 0.8289 +v 0.545204 0.317733 0.828595 +v 0.543226 0.313007 0.828169 +v 0.541213 0.308056 0.827603 +v 0.539046 0.303175 0.826923 +v 0.536924 0.298247 0.826183 +v 0.534702 0.29339 0.825361 +v 0.532535 0.288496 0.824465 +v 0.530275 0.283655 0.823485 +v 0.528091 0.278798 0.822479 +v 0.525856 0.274009 0.82143 +v 0.523592 0.269248 0.820394 +v 0.52128 0.2645 0.819326 +v 0.518724 0.259838 0.818732 +v 0.516098 0.255207 0.818071 +v 0.513839 0.250653 0.817312 +v 0.5115 0.246123 0.816509 +v 0.511933 0.241221 0.813654 +v 0.512366 0.236305 0.810819 +v 0.513592 0.231015 0.808081 +v 0.514885 0.225733 0.805411 +v 0.516205 0.219749 0.8036 +v 0.517618 0.213763 0.801896 +v 0.519542 0.207693 0.800674 +v 0.521542 0.201638 0.799535 +v 0.523745 0.195433 0.798986 +v 0.526016 0.189248 0.798512 +v 0.528585 0.182838 0.798368 +v 0.531201 0.17646 0.798264 +v 0.533954 0.169852 0.798467 +v 0.536743 0.163259 0.7987 +v 0.539681 0.156531 0.7992 +v 0.542614 0.149807 0.799703 +v 0.545635 0.142955 0.800314 +v 0.548652 0.136102 0.800934 +v 0.551723 0.129007 0.801613 +v 0.554719 0.121881 0.802218 +v 0.557658 0.114533 0.802882 +v 0.560481 0.10717 0.803469 +v 0.563315 0.0996438 0.804009 +v 0.565979 0.0920547 0.804402 +v 0.56858 0.0841957 0.804763 +v 0.570988 0.0762827 0.804976 +v 0.573367 0.068275 0.805172 +v 0.575449 0.0601936 0.805145 +v 0.577457 0.0519745 0.805085 +v 0.579241 0.0436515 0.804782 +v 0.581065 0.035202 0.80438 +v 0.582505 0.0267 0.803723 +v 0.583996 0.0181276 0.803101 +v 0.585074 0.00953662 0.802215 +v 0.586214 0.000910122 0.80135 +v 0.586977 -0.00773391 0.800226 +v 0.587799 -0.0164046 0.799164 +v 0.588283 -0.0250588 0.797885 +v 0.588873 -0.033742 0.796605 +v 0.58903 -0.0423902 0.795063 +v 0.589313 -0.05117 0.793536 +v 0.589218 -0.059925 0.79178 +v 0.589182 -0.0686853 0.790076 +v 0.5889 -0.0774063 0.788234 +v 0.588653 -0.0860863 0.786488 +v 0.588162 -0.0947707 0.784531 +v 0.587776 -0.10344 0.782645 +v 0.587199 -0.112077 0.780644 +v 0.586633 -0.120636 0.778698 +v 0.585916 -0.12916 0.77667 +v 0.585262 -0.137613 0.774744 +v 0.584466 -0.146032 0.772743 +v 0.583619 -0.154304 0.770802 +v 0.582732 -0.162602 0.768801 +v 0.5819 -0.170794 0.766832 +v 0.580943 -0.178964 0.764799 +v 0.58011 -0.187016 0.762806 +v 0.579131 -0.195011 0.760746 +v 0.578206 -0.202871 0.758758 +v 0.577271 -0.210728 0.75677 +v 0.576344 -0.218397 0.754815 +v 0.575388 -0.226082 0.752842 +v 0.57442 -0.233606 0.750909 +v 0.57345 -0.241111 0.748952 +v 0.572481 -0.248486 0.747 +v 0.571537 -0.255873 0.74509 +v 0.570608 -0.263124 0.743294 +v 0.569582 -0.270342 0.741453 +v 0.568616 -0.277392 0.739691 +v 0.567707 -0.284455 0.737954 +v 0.56674 -0.291439 0.736259 +v 0.565786 -0.298474 0.734591 +v 0.564831 -0.305308 0.732995 +v 0.563862 -0.31214 0.73139 +v 0.56293 -0.318804 0.729916 +v 0.562023 -0.325471 0.728441 +v 0.561136 -0.331913 0.727047 +v 0.560253 -0.33836 0.725653 +v 0.559341 -0.344713 0.724426 +v 0.558381 -0.351052 0.723194 +v 0.557495 -0.357191 0.722102 +v 0.556513 -0.363305 0.720945 +v 0.555567 -0.369302 0.719844 +v 0.554653 -0.375296 0.718754 +v 0.553793 -0.381088 0.717748 +v 0.552831 -0.386854 0.716682 +v 0.551926 -0.392453 0.71572 +v 0.550973 -0.39803 0.714738 +v 0.550078 -0.403409 0.71385 +v 0.549087 -0.408763 0.712911 +v 0.548163 -0.413862 0.712081 +v 0.547208 -0.41895 0.711237 +v 0.546273 -0.423784 0.710512 +v 0.54529 -0.428617 0.709752 +v 0.544432 -0.433157 0.708992 +v 0.54349 -0.437672 0.70818 +v 0.542668 -0.441753 0.707418 +v 0.541818 -0.44583 0.706652 +v 0.541165 -0.449696 0.705702 +v 0.540495 -0.453556 0.704739 +v 0.540203 -0.457034 0.703315 +v 0.539863 -0.460491 0.701865 +v 0.540193 -0.463562 0.699476 +v 0.540455 -0.466615 0.69705 +v 0.562253 0.35815 0.818103 +v 0.560687 0.353775 0.820116 +v 0.55893 0.349439 0.822037 +v 0.557601 0.345127 0.823015 +v 0.556068 0.340862 0.823889 +v 0.55474 0.336431 0.824203 +v 0.553266 0.332032 0.824442 +v 0.551838 0.327428 0.824336 +v 0.550294 0.322874 0.82416 +v 0.548766 0.318176 0.823743 +v 0.547106 0.313516 0.82325 +v 0.545287 0.308628 0.822591 +v 0.543396 0.30378 0.821861 +v 0.541468 0.298898 0.821016 +v 0.539493 0.294038 0.820139 +v 0.537494 0.289135 0.819188 +v 0.535458 0.28427 0.8182 +v 0.533399 0.279345 0.817135 +v 0.531321 0.274433 0.816048 +v 0.529198 0.269645 0.815044 +v 0.52707 0.264889 0.814041 +v 0.524774 0.26015 0.813236 +v 0.522502 0.255376 0.812427 +v 0.520639 0.25076 0.811401 +v 0.518772 0.246143 0.810396 +v 0.519008 0.24126 0.807686 +v 0.519377 0.23631 0.805121 +v 0.520493 0.230985 0.802308 +v 0.521806 0.225609 0.799719 +v 0.523113 0.219759 0.797695 +v 0.52468 0.213871 0.795958 +v 0.526493 0.2078 0.794609 +v 0.528518 0.201733 0.793516 +v 0.530706 0.195536 0.792796 +v 0.533028 0.189356 0.792233 +v 0.535586 0.182951 0.791982 +v 0.538236 0.176569 0.791828 +v 0.541004 0.17 0.791879 +v 0.543843 0.163456 0.792006 +v 0.546721 0.15676 0.792336 +v 0.549632 0.150075 0.792712 +v 0.55264 0.14322 0.793133 +v 0.555656 0.136367 0.793567 +v 0.558673 0.129254 0.794065 +v 0.56164 0.122117 0.794521 +v 0.564556 0.114792 0.794955 +v 0.567373 0.107452 0.79533 +v 0.570176 0.0998911 0.7957 +v 0.572807 0.0922856 0.795941 +v 0.575369 0.0844537 0.796111 +v 0.577749 0.0765691 0.796139 +v 0.580092 0.0685368 0.796142 +v 0.582171 0.0604428 0.795969 +v 0.584175 0.0521962 0.795706 +v 0.585983 0.043901 0.795285 +v 0.587773 0.0354668 0.794778 +v 0.589188 0.0269697 0.794041 +v 0.590624 0.0184259 0.793265 +v 0.591737 0.0098601 0.792288 +v 0.592805 0.00124821 0.79129 +v 0.593512 -0.00737261 0.790085 +v 0.594356 -0.0160148 0.788917 +v 0.594847 -0.0246551 0.787526 +v 0.595423 -0.0333348 0.786173 +v 0.595538 -0.0419835 0.784567 +v 0.595724 -0.0507117 0.782941 +v 0.595645 -0.0594409 0.781141 +v 0.595564 -0.0681959 0.779398 +v 0.595227 -0.0768886 0.777558 +v 0.594956 -0.0855712 0.775813 +v 0.594504 -0.0942356 0.773901 +v 0.593999 -0.102866 0.77199 +v 0.593429 -0.11147 0.770044 +v 0.592872 -0.120013 0.768136 +v 0.592104 -0.128516 0.766131 +v 0.591372 -0.136922 0.764176 +v 0.590549 -0.145319 0.76218 +v 0.589673 -0.15362 0.760238 +v 0.588678 -0.161895 0.758242 +v 0.587784 -0.170052 0.756269 +v 0.586876 -0.178207 0.754302 +v 0.585963 -0.186229 0.752314 +v 0.584983 -0.194198 0.750291 +v 0.584012 -0.20203 0.748344 +v 0.58301 -0.209852 0.746366 +v 0.581986 -0.217463 0.744448 +v 0.581029 -0.225097 0.742559 +v 0.580015 -0.232552 0.740664 +v 0.579099 -0.240049 0.73879 +v 0.578145 -0.247413 0.73699 +v 0.577088 -0.25476 0.735155 +v 0.576106 -0.261949 0.733398 +v 0.575081 -0.269145 0.731642 +v 0.57405 -0.276189 0.729933 +v 0.573129 -0.283253 0.728277 +v 0.572123 -0.290206 0.726666 +v 0.571154 -0.297226 0.725098 +v 0.570198 -0.304046 0.723602 +v 0.569209 -0.310852 0.722096 +v 0.568216 -0.317491 0.720715 +v 0.567267 -0.324147 0.719343 +v 0.566323 -0.330574 0.718063 +v 0.565372 -0.336994 0.716779 +v 0.564373 -0.343296 0.715658 +v 0.563394 -0.349594 0.714586 +v 0.562453 -0.355733 0.713617 +v 0.561502 -0.361877 0.712643 +v 0.560477 -0.367842 0.711708 +v 0.559456 -0.373804 0.710773 +v 0.55848 -0.379588 0.709942 +v 0.557432 -0.385358 0.709075 +v 0.556419 -0.390963 0.708287 +v 0.555424 -0.396564 0.70752 +v 0.554422 -0.401951 0.706824 +v 0.553329 -0.407323 0.706095 +v 0.552258 -0.412396 0.705479 +v 0.551173 -0.417469 0.704844 +v 0.55014 -0.422287 0.704339 +v 0.549044 -0.427086 0.703795 +v 0.548066 -0.431582 0.703289 +v 0.547042 -0.436077 0.702756 +v 0.546061 -0.440163 0.702295 +v 0.545028 -0.444222 0.701796 +v 0.544128 -0.448005 0.70125 +v 0.543247 -0.451791 0.700709 +v 0.542799 -0.45517 0.69961 +v 0.54233 -0.458535 0.698498 +v 0.54246 -0.461416 0.696517 +v 0.542551 -0.464282 0.694519 +v 0.563312 0.356098 0.815222 +v 0.562011 0.352234 0.816889 +v 0.560513 0.348411 0.818462 +v 0.559392 0.344379 0.819179 +v 0.55811 0.340384 0.819809 +v 0.557102 0.336299 0.81982 +v 0.555964 0.332246 0.819759 +v 0.554732 0.327742 0.819561 +v 0.553384 0.323289 0.819291 +v 0.55211 0.318688 0.818756 +v 0.550688 0.314147 0.81812 +v 0.549161 0.309302 0.817418 +v 0.547487 0.304516 0.816602 +v 0.545825 0.29964 0.8157 +v 0.544049 0.294814 0.814723 +v 0.542265 0.289887 0.813753 +v 0.540437 0.284992 0.812739 +v 0.538562 0.279969 0.811652 +v 0.536603 0.274984 0.810497 +v 0.534662 0.270147 0.809557 +v 0.532643 0.265405 0.808545 +v 0.530699 0.26055 0.80758 +v 0.528704 0.255681 0.806563 +v 0.527314 0.25096 0.80534 +v 0.52586 0.246284 0.804074 +v 0.525997 0.241351 0.801602 +v 0.526142 0.236417 0.799152 +v 0.527315 0.230992 0.796451 +v 0.528548 0.225555 0.793808 +v 0.530018 0.219785 0.791774 +v 0.5316 0.213999 0.789864 +v 0.533446 0.207913 0.788554 +v 0.535383 0.201831 0.787363 +v 0.537681 0.195634 0.786609 +v 0.540013 0.189437 0.785889 +v 0.542613 0.183052 0.78558 +v 0.545218 0.176675 0.785288 +v 0.548028 0.170137 0.78525 +v 0.550842 0.163611 0.785207 +v 0.553704 0.156957 0.785405 +v 0.556528 0.150286 0.785595 +v 0.559552 0.143436 0.785854 +v 0.562524 0.136572 0.786068 +v 0.5655 0.129454 0.786406 +v 0.568394 0.122305 0.786673 +v 0.571312 0.115012 0.786907 +v 0.574075 0.107685 0.787034 +v 0.576824 0.100097 0.787232 +v 0.579385 0.0924449 0.787289 +v 0.581902 0.0846361 0.78727 +v 0.584198 0.0767799 0.787087 +v 0.586509 0.0687278 0.786901 +v 0.58852 0.0606062 0.7865 +v 0.590546 0.0523678 0.786091 +v 0.592262 0.0440851 0.785475 +v 0.593985 0.0356686 0.784851 +v 0.595302 0.0272022 0.783989 +v 0.596748 0.018696 0.783114 +v 0.597763 0.0101714 0.781986 +v 0.598807 0.00159634 0.780918 +v 0.599482 -0.00701096 0.779596 +v 0.600265 -0.0156525 0.778243 +v 0.600685 -0.024242 0.776712 +v 0.601223 -0.0328891 0.775304 +v 0.601292 -0.0414861 0.77364 +v 0.601432 -0.050166 0.771969 +v 0.601357 -0.0588705 0.770118 +v 0.601272 -0.0675963 0.768378 +v 0.600865 -0.0762602 0.766523 +v 0.600543 -0.0849101 0.76474 +v 0.600044 -0.0935524 0.762834 +v 0.599616 -0.102173 0.761002 +v 0.59891 -0.110732 0.759034 +v 0.598251 -0.119229 0.75713 +v 0.597553 -0.127714 0.755197 +v 0.596829 -0.136097 0.753269 +v 0.595947 -0.144503 0.751237 +v 0.595052 -0.152783 0.749331 +v 0.594045 -0.161035 0.747377 +v 0.59314 -0.169175 0.745449 +v 0.592104 -0.177288 0.743468 +v 0.591171 -0.185274 0.741516 +v 0.590146 -0.193215 0.739531 +v 0.589181 -0.201028 0.737639 +v 0.588146 -0.20882 0.735712 +v 0.587104 -0.216395 0.733867 +v 0.586124 -0.223984 0.732046 +v 0.585159 -0.231411 0.730223 +v 0.584147 -0.238878 0.728392 +v 0.583129 -0.246209 0.72667 +v 0.58205 -0.253523 0.724921 +v 0.581053 -0.26068 0.723245 +v 0.580071 -0.267843 0.721576 +v 0.579078 -0.274887 0.719976 +v 0.578112 -0.281949 0.718383 +v 0.577123 -0.288882 0.716882 +v 0.576126 -0.295879 0.715391 +v 0.575082 -0.302655 0.713963 +v 0.574131 -0.309451 0.712573 +v 0.573168 -0.316087 0.711315 +v 0.572185 -0.322724 0.710048 +v 0.571231 -0.329149 0.708903 +v 0.570221 -0.335559 0.70774 +v 0.569209 -0.341832 0.70678 +v 0.568159 -0.348073 0.705845 +v 0.56713 -0.354216 0.705003 +v 0.566063 -0.360352 0.70414 +v 0.564995 -0.366288 0.703378 +v 0.563958 -0.372223 0.702622 +v 0.562891 -0.378019 0.701987 +v 0.561762 -0.383792 0.701332 +v 0.560686 -0.389413 0.700739 +v 0.559541 -0.395014 0.700117 +v 0.558413 -0.400402 0.699628 +v 0.557261 -0.405783 0.699121 +v 0.556134 -0.41087 0.698756 +v 0.554939 -0.415931 0.698351 +v 0.553773 -0.420723 0.698043 +v 0.552571 -0.425505 0.697717 +v 0.551461 -0.429961 0.697459 +v 0.550307 -0.434409 0.697181 +v 0.549209 -0.438493 0.697029 +v 0.548064 -0.442569 0.696851 +v 0.547017 -0.446299 0.696754 +v 0.545905 -0.450007 0.696629 +v 0.545298 -0.453281 0.69585 +v 0.544682 -0.456549 0.695068 +v 0.54465 -0.459243 0.693525 +v 0.544591 -0.461929 0.691964 +v 0.564138 0.354834 0.813137 +v 0.563265 0.351563 0.81409 +v 0.562257 0.348309 0.814999 +v 0.561431 0.34459 0.81531 +v 0.560508 0.340892 0.815567 +v 0.559685 0.336847 0.815411 +v 0.558754 0.33283 0.815196 +v 0.557801 0.328439 0.814781 +v 0.556737 0.324083 0.814309 +v 0.555663 0.319539 0.813638 +v 0.554472 0.315026 0.81292 +v 0.553176 0.310203 0.812103 +v 0.551781 0.305428 0.811223 +v 0.550299 0.300551 0.810256 +v 0.548741 0.295714 0.809243 +v 0.547175 0.290802 0.808189 +v 0.545601 0.285883 0.807131 +v 0.54395 0.280864 0.805975 +v 0.542275 0.275868 0.804792 +v 0.540489 0.270946 0.803755 +v 0.538695 0.266068 0.802708 +v 0.536768 0.26112 0.801828 +v 0.534857 0.256151 0.800972 +v 0.533632 0.251361 0.799617 +v 0.53244 0.24656 0.798308 +v 0.532729 0.241583 0.795711 +v 0.533138 0.23656 0.793261 +v 0.534262 0.231215 0.790456 +v 0.535629 0.225804 0.787901 +v 0.537034 0.220163 0.785778 +v 0.538686 0.214486 0.783922 +v 0.540542 0.208418 0.782405 +v 0.542611 0.202345 0.781122 +v 0.544876 0.196112 0.780166 +v 0.547269 0.189896 0.779364 +v 0.549823 0.183477 0.778856 +v 0.552477 0.177086 0.778467 +v 0.555179 0.170562 0.778283 +v 0.557943 0.164059 0.778161 +v 0.560762 0.157366 0.778138 +v 0.563619 0.150708 0.778163 +v 0.566546 0.143851 0.77823 +v 0.569456 0.136983 0.77829 +v 0.572358 0.129869 0.778369 +v 0.57523 0.122739 0.778421 +v 0.578041 0.115436 0.778433 +v 0.580771 0.108105 0.778388 +v 0.583462 0.100511 0.778364 +v 0.585982 0.0928622 0.778228 +v 0.588467 0.0850252 0.778058 +v 0.590717 0.0771219 0.77773 +v 0.592923 0.0690647 0.777322 +v 0.594969 0.0609405 0.776769 +v 0.596981 0.0526946 0.776255 +v 0.598607 0.0443981 0.775509 +v 0.600288 0.0360117 0.774765 +v 0.601617 0.0275826 0.773823 +v 0.60289 0.0190749 0.772819 +v 0.603816 0.0105617 0.771638 +v 0.60492 0.00199411 0.770478 +v 0.605588 -0.00660552 0.769042 +v 0.606329 -0.0152263 0.767572 +v 0.606694 -0.0238212 0.765911 +v 0.607161 -0.032446 0.764382 +v 0.607274 -0.0410284 0.762681 +v 0.607414 -0.0496665 0.761 +v 0.607259 -0.0583287 0.759103 +v 0.607136 -0.067013 0.757354 +v 0.606809 -0.0756668 0.75553 +v 0.606411 -0.0842929 0.753711 +v 0.605903 -0.0929198 0.751838 +v 0.605389 -0.10151 0.749992 +v 0.604655 -0.110055 0.748055 +v 0.603952 -0.118537 0.746175 +v 0.603139 -0.127008 0.744256 +v 0.602348 -0.135353 0.742373 +v 0.601412 -0.143749 0.740373 +v 0.600482 -0.151991 0.738497 +v 0.599451 -0.160227 0.736574 +v 0.598421 -0.168315 0.734647 +v 0.597428 -0.176409 0.732738 +v 0.596443 -0.184396 0.730847 +v 0.595433 -0.192339 0.728934 +v 0.594443 -0.200109 0.727076 +v 0.593405 -0.207877 0.725206 +v 0.592413 -0.215445 0.723414 +v 0.59143 -0.223016 0.721624 +v 0.590419 -0.230447 0.719882 +v 0.589326 -0.237867 0.718105 +v 0.588206 -0.24517 0.716433 +v 0.587128 -0.252463 0.714775 +v 0.586101 -0.259597 0.713185 +v 0.585125 -0.266743 0.711612 +v 0.58414 -0.273794 0.710103 +v 0.583098 -0.280823 0.708581 +v 0.58207 -0.287715 0.707131 +v 0.581138 -0.294705 0.705757 +v 0.580172 -0.301503 0.704448 +v 0.579204 -0.308289 0.703139 +v 0.57819 -0.314891 0.701952 +v 0.577202 -0.321494 0.700779 +v 0.57619 -0.32791 0.699752 +v 0.575131 -0.334321 0.698723 +v 0.573979 -0.340535 0.697868 +v 0.572928 -0.346769 0.697085 +v 0.571836 -0.352887 0.696409 +v 0.570773 -0.359004 0.695731 +v 0.569691 -0.364956 0.695156 +v 0.568538 -0.370897 0.694553 +v 0.567392 -0.37668 0.694086 +v 0.566183 -0.382447 0.69359 +v 0.565004 -0.3881 0.693177 +v 0.563738 -0.393736 0.692736 +v 0.562502 -0.399114 0.692425 +v 0.561286 -0.404505 0.692112 +v 0.560064 -0.409622 0.691984 +v 0.558751 -0.414721 0.691813 +v 0.557488 -0.419511 0.691741 +v 0.55614 -0.424291 0.691645 +v 0.5549 -0.428755 0.6916 +v 0.553654 -0.433213 0.691543 +v 0.552364 -0.437272 0.69171 +v 0.551038 -0.441325 0.691847 +v 0.549827 -0.445101 0.692046 +v 0.548592 -0.448867 0.692237 +v 0.547735 -0.452093 0.69197 +v 0.546839 -0.455307 0.691689 +v 0.546509 -0.457803 0.69077 +v 0.546176 -0.460296 0.689847 +v -0.677561 -0.195507 0.207184 +v -0.675958 -0.201503 0.209603 +v -0.674342 -0.207497 0.212025 +v -0.673041 -0.212336 0.214369 +v -0.671729 -0.217174 0.216714 +v -0.670405 -0.221999 0.219218 +v -0.669103 -0.226828 0.22172 +v -0.667848 -0.231746 0.224158 +v -0.666599 -0.236665 0.226599 +v -0.665326 -0.24194 0.229048 +v -0.66408 -0.247221 0.231494 +v -0.662896 -0.252629 0.233575 +v -0.661731 -0.258043 0.235653 +v -0.660577 -0.263686 0.237655 +v -0.65943 -0.269331 0.239665 +v -0.658195 -0.275183 0.24156 +v -0.656944 -0.281044 0.243451 +v -0.655697 -0.287132 0.245588 +v -0.654444 -0.293219 0.247723 +v -0.65327 -0.299753 0.251255 +v -0.652054 -0.306267 0.254796 +v -0.650702 -0.312876 0.257837 +v -0.649333 -0.319482 0.260881 +v -0.647921 -0.326405 0.264173 +v -0.646514 -0.333305 0.267477 +v -0.645005 -0.340445 0.270916 +v -0.643475 -0.347563 0.274365 +v -0.641843 -0.354916 0.277836 +v -0.640234 -0.36227 0.28131 +v -0.638467 -0.369849 0.284624 +v -0.636676 -0.377443 0.28793 +v -0.634758 -0.385251 0.29113 +v -0.632835 -0.393092 0.294311 +v -0.630724 -0.401113 0.29737 +v -0.628593 -0.409126 0.300431 +v -0.62627 -0.417306 0.303409 +v -0.623868 -0.425482 0.306381 +v -0.621364 -0.433854 0.309273 +v -0.618745 -0.442184 0.312169 +v -0.615978 -0.450668 0.315033 +v -0.61309 -0.459083 0.317928 +v -0.681981 -0.177268 0.203734 +v -0.680352 -0.182528 0.207495 +v -0.678925 -0.188252 0.210414 +v -0.677494 -0.193976 0.213333 +v -0.676042 -0.199758 0.215595 +v -0.674616 -0.205549 0.217857 +v -0.673273 -0.210913 0.220387 +v -0.671941 -0.21628 0.222919 +v -0.670661 -0.221451 0.225432 +v -0.669391 -0.226626 0.227952 +v -0.668162 -0.231837 0.230378 +v -0.666936 -0.23705 0.232805 +v -0.665817 -0.242603 0.236204 +v -0.664725 -0.248163 0.239603 +v -0.66358 -0.253774 0.241954 +v -0.662474 -0.259397 0.244301 +v -0.661328 -0.265202 0.246621 +v -0.660178 -0.271008 0.248943 +v -0.658962 -0.27699 0.251104 +v -0.657741 -0.282993 0.253254 +v -0.65648 -0.289197 0.255606 +v -0.655211 -0.295399 0.257951 +v -0.653953 -0.301928 0.261159 +v -0.652672 -0.308454 0.264365 +v -0.651296 -0.315121 0.267395 +v -0.649892 -0.32178 0.270425 +v -0.648443 -0.328685 0.273647 +v -0.646985 -0.335554 0.276883 +v -0.645463 -0.342677 0.280231 +v -0.643898 -0.349785 0.283591 +v -0.642283 -0.357125 0.286952 +v -0.640629 -0.364456 0.290323 +v -0.638837 -0.372038 0.293545 +v -0.637044 -0.379622 0.29677 +v -0.635109 -0.387427 0.299887 +v -0.633134 -0.395228 0.302995 +v -0.631091 -0.403238 0.305995 +v -0.628896 -0.411205 0.309007 +v -0.62656 -0.419352 0.311883 +v -0.624137 -0.427484 0.314761 +v -0.621611 -0.43581 0.317692 +v -0.618951 -0.444092 0.320626 +v -0.616198 -0.452504 0.323557 +v -0.613327 -0.460882 0.326482 +v -0.610306 -0.469416 0.329378 +v -0.684946 -0.162537 0.203521 +v -0.683133 -0.169089 0.207583 +v -0.681877 -0.17409 0.210923 +v -0.680643 -0.179096 0.214273 +v -0.67911 -0.185787 0.216886 +v -0.677559 -0.192474 0.219499 +v -0.676263 -0.198047 0.221601 +v -0.674997 -0.203627 0.223699 +v -0.6736 -0.209512 0.226411 +v -0.672224 -0.215402 0.229124 +v -0.670985 -0.220921 0.231653 +v -0.669761 -0.226442 0.234185 +v -0.668543 -0.231945 0.236596 +v -0.667359 -0.237454 0.239005 +v -0.666361 -0.243277 0.243358 +v -0.665419 -0.249114 0.247707 +v -0.664315 -0.25493 0.250338 +v -0.663186 -0.26074 0.252993 +v -0.662053 -0.266712 0.255607 +v -0.660934 -0.272687 0.25822 +v -0.659737 -0.278814 0.260643 +v -0.658517 -0.284942 0.263063 +v -0.657216 -0.291253 0.265628 +v -0.655896 -0.29756 0.268193 +v -0.654547 -0.304085 0.271076 +v -0.653193 -0.310608 0.27396 +v -0.651765 -0.317333 0.276965 +v -0.650369 -0.323999 0.28 +v -0.648906 -0.330879 0.283157 +v -0.64744 -0.337751 0.286322 +v -0.64588 -0.344877 0.289571 +v -0.644322 -0.352004 0.292818 +v -0.642652 -0.359316 0.296075 +v -0.64096 -0.366646 0.299324 +v -0.639169 -0.374218 0.302467 +v -0.637341 -0.381805 0.305604 +v -0.635391 -0.38959 0.308642 +v -0.633446 -0.397372 0.311685 +v -0.631315 -0.405326 0.314639 +v -0.629094 -0.413252 0.317597 +v -0.626731 -0.42136 0.320376 +v -0.624279 -0.429443 0.323153 +v -0.621737 -0.437736 0.326114 +v -0.619131 -0.445956 0.329121 +v -0.616408 -0.454335 0.332079 +v -0.613497 -0.462651 0.335043 +v -0.610453 -0.471121 0.337978 +v -0.607224 -0.479546 0.340913 +v -0.730308 0.445041 0.344805 +v -0.731301 0.436508 0.339605 +v -0.732303 0.428085 0.334409 +v -0.733316 0.419662 0.329214 +v -0.734363 0.411332 0.323981 +v -0.735403 0.403001 0.31874 +v -0.736485 0.394761 0.313436 +v -0.737572 0.386518 0.308139 +v -0.738736 0.378354 0.302799 +v -0.73988 0.370197 0.297465 +v -0.740968 0.362123 0.292057 +v -0.742052 0.354055 0.286654 +v -0.742971 0.346078 0.281087 +v -0.743877 0.338102 0.27552 +v -0.744606 0.330227 0.269817 +v -0.745342 0.322351 0.264118 +v -0.745812 0.314637 0.258289 +v -0.746292 0.306919 0.252462 +v -0.746485 0.299375 0.246455 +v -0.74669 0.291832 0.240454 +v -0.746692 0.284446 0.234302 +v -0.746683 0.27706 0.228148 +v -0.746407 0.269858 0.2219 +v -0.688881 -0.146683 0.203605 +v -0.686832 -0.153408 0.207397 +v -0.685217 -0.159761 0.211065 +v -0.683609 -0.166111 0.21475 +v -0.682238 -0.17204 0.21767 +v -0.680889 -0.177978 0.220596 +v -0.679415 -0.184479 0.223068 +v -0.677966 -0.190988 0.225545 +v -0.67661 -0.197079 0.228115 +v -0.675288 -0.203179 0.230679 +v -0.674064 -0.209201 0.234787 +v -0.672863 -0.215235 0.238896 +v -0.671689 -0.221027 0.241774 +v -0.670528 -0.226823 0.244649 +v -0.669386 -0.232585 0.247358 +v -0.668271 -0.238352 0.250062 +v -0.667254 -0.244295 0.253777 +v -0.666238 -0.25024 0.257516 +v -0.665095 -0.256189 0.260208 +v -0.663952 -0.262137 0.262901 +v -0.66279 -0.268207 0.265497 +v -0.6616 -0.274279 0.268092 +v -0.660326 -0.280488 0.270522 +v -0.659077 -0.286705 0.27295 +v -0.657771 -0.293079 0.27544 +v -0.65642 -0.299445 0.277927 +v -0.655046 -0.305998 0.280755 +v -0.653647 -0.312545 0.28358 +v -0.652218 -0.319242 0.286574 +v -0.650795 -0.325934 0.289574 +v -0.649314 -0.332836 0.292679 +v -0.647796 -0.339733 0.295787 +v -0.646213 -0.346854 0.298954 +v -0.64462 -0.353975 0.302123 +v -0.642979 -0.361295 0.305288 +v -0.641295 -0.368649 0.30844 +v -0.639488 -0.376217 0.3115 +v -0.637648 -0.383793 0.314556 +v -0.6357 -0.39156 0.317542 +v -0.633687 -0.399308 0.320534 +v -0.631509 -0.407217 0.323439 +v -0.629283 -0.415112 0.326354 +v -0.626931 -0.423187 0.329137 +v -0.624486 -0.431235 0.331929 +v -0.62196 -0.439446 0.334817 +v -0.619344 -0.447614 0.337721 +v -0.616566 -0.455929 0.340707 +v -0.613674 -0.464204 0.343699 +v -0.610582 -0.472617 0.346669 +v -0.607355 -0.480992 0.349628 +v -0.724304 0.466743 0.371787 +v -0.725268 0.458187 0.366589 +v -0.726225 0.449632 0.361389 +v -0.72718 0.44117 0.356201 +v -0.728132 0.432708 0.351011 +v -0.729126 0.424353 0.345837 +v -0.730103 0.415995 0.340663 +v -0.731086 0.407731 0.33545 +v -0.732092 0.399467 0.330242 +v -0.733148 0.391279 0.324988 +v -0.734219 0.383092 0.319735 +v -0.735341 0.374971 0.314417 +v -0.736432 0.366856 0.309095 +v -0.737479 0.358835 0.303772 +v -0.738547 0.350809 0.298448 +v -0.739476 0.342869 0.292973 +v -0.740392 0.334933 0.287495 +v -0.741122 0.327087 0.281916 +v -0.741831 0.319245 0.276336 +v -0.742304 0.311543 0.270683 +v -0.742783 0.303843 0.26503 +v -0.742922 0.29631 0.259226 +v -0.743083 0.288776 0.253421 +v -0.743077 0.281359 0.24754 +v -0.743083 0.273943 0.241666 +v -0.742744 0.266681 0.235723 +v -0.742403 0.259418 0.229776 +v -0.741909 0.252248 0.223901 +v -0.741422 0.24508 0.218026 +v -0.694768 -0.122812 0.202473 +v -0.692586 -0.130139 0.205544 +v -0.690433 -0.137477 0.208624 +v -0.688697 -0.144155 0.21198 +v -0.687002 -0.150845 0.215347 +v -0.685579 -0.157 0.21863 +v -0.684178 -0.163147 0.221928 +v -0.682697 -0.17001 0.224423 +v -0.681188 -0.176871 0.226924 +v -0.679814 -0.183192 0.229259 +v -0.678452 -0.189515 0.231596 +v -0.677072 -0.196135 0.234629 +v -0.675718 -0.20276 0.23766 +v -0.674643 -0.20892 0.243162 +v -0.673615 -0.215088 0.248662 +v -0.672487 -0.221153 0.251886 +v -0.67138 -0.22722 0.255108 +v -0.670304 -0.233241 0.258113 +v -0.669209 -0.239257 0.261127 +v -0.668103 -0.245304 0.264238 +v -0.666995 -0.25135 0.267352 +v -0.665833 -0.257436 0.270088 +v -0.664661 -0.263521 0.272824 +v -0.663421 -0.269681 0.275401 +v -0.66215 -0.275853 0.27797 +v -0.660857 -0.28215 0.280408 +v -0.65955 -0.288438 0.282849 +v -0.658189 -0.294859 0.285267 +v -0.656826 -0.301278 0.287691 +v -0.655411 -0.307848 0.290462 +v -0.654011 -0.314383 0.293249 +v -0.652572 -0.321116 0.296203 +v -0.651118 -0.327837 0.299169 +v -0.649628 -0.334776 0.302215 +v -0.648111 -0.341705 0.305265 +v -0.64652 -0.34882 0.308344 +v -0.644923 -0.35594 0.311432 +v -0.643235 -0.363263 0.314506 +v -0.641489 -0.370636 0.317551 +v -0.639641 -0.378189 0.320539 +v -0.637804 -0.385746 0.323531 +v -0.635846 -0.39349 0.326458 +v -0.633775 -0.401201 0.329387 +v -0.631618 -0.40908 0.332252 +v -0.62937 -0.41694 0.335123 +v -0.627036 -0.42498 0.337919 +v -0.624601 -0.432977 0.340729 +v -0.622043 -0.441112 0.343525 +v -0.619419 -0.449225 0.346333 +v -0.616682 -0.457509 0.349341 +v -0.613738 -0.465722 0.352356 +v -0.610677 -0.474097 0.355359 +v -0.607457 -0.482426 0.358354 +v -0.720071 0.471131 0.388305 +v -0.720995 0.462552 0.383076 +v -0.721907 0.454076 0.377885 +v -0.722833 0.445599 0.372696 +v -0.723762 0.437216 0.367537 +v -0.724687 0.428836 0.362376 +v -0.725623 0.420552 0.357227 +v -0.726582 0.412262 0.352087 +v -0.727576 0.404059 0.346922 +v -0.728573 0.395857 0.341764 +v -0.729629 0.387725 0.336565 +v -0.730652 0.379595 0.331355 +v -0.731712 0.371542 0.3261 +v -0.732798 0.363481 0.320851 +v -0.73385 0.355491 0.315575 +v -0.7349 0.347504 0.31028 +v -0.735807 0.339596 0.304864 +v -0.736735 0.331686 0.299456 +v -0.737459 0.323859 0.293948 +v -0.738207 0.316029 0.288448 +v -0.738653 0.30837 0.28305 +v -0.739111 0.300711 0.277655 +v -0.739345 0.293161 0.272088 +v -0.739565 0.285613 0.266518 +v -0.739561 0.278176 0.260874 +v -0.739559 0.27074 0.255239 +v -0.739322 0.263403 0.249583 +v -0.739106 0.256063 0.243927 +v -0.738582 0.248808 0.238335 +v -0.73805 0.241552 0.232742 +v -0.737429 0.234268 0.22734 +v -0.736801 0.226984 0.221941 +v -0.735815 0.219654 0.216805 +v -0.702408 -0.0925665 0.20632 +v -0.699291 -0.103204 0.209081 +v -0.696927 -0.111614 0.211785 +v -0.694582 -0.12003 0.214487 +v -0.692648 -0.127635 0.217418 +v -0.690739 -0.135244 0.220356 +v -0.689105 -0.14222 0.223912 +v -0.687509 -0.1492 0.22747 +v -0.686126 -0.155765 0.230632 +v -0.684743 -0.162328 0.233791 +v -0.683326 -0.169226 0.236384 +v -0.681915 -0.17613 0.238977 +v -0.68058 -0.182691 0.241516 +v -0.679272 -0.189258 0.24405 +v -0.677966 -0.195919 0.247147 +v -0.67668 -0.202579 0.250238 +v -0.675614 -0.208933 0.254746 +v -0.67455 -0.215289 0.259259 +v -0.673416 -0.221519 0.262421 +v -0.672274 -0.22774 0.265576 +v -0.671131 -0.2339 0.268548 +v -0.669971 -0.240052 0.271533 +v -0.668799 -0.246215 0.274489 +v -0.667602 -0.252376 0.277441 +v -0.666389 -0.258563 0.280146 +v -0.665168 -0.264745 0.28285 +v -0.663877 -0.270997 0.285384 +v -0.662574 -0.277255 0.287915 +v -0.661237 -0.28361 0.290335 +v -0.659917 -0.289967 0.292754 +v -0.658557 -0.296449 0.2951 +v -0.657143 -0.302894 0.297452 +v -0.655709 -0.309473 0.300204 +v -0.654314 -0.316056 0.302962 +v -0.652852 -0.322821 0.305889 +v -0.651404 -0.329588 0.308812 +v -0.649893 -0.33653 0.311791 +v -0.64833 -0.343462 0.31478 +v -0.646756 -0.350596 0.317785 +v -0.645149 -0.35772 0.320799 +v -0.643446 -0.36506 0.323783 +v -0.641654 -0.372426 0.326746 +v -0.639817 -0.37997 0.329672 +v -0.637961 -0.38751 0.332602 +v -0.635948 -0.395211 0.335477 +v -0.633874 -0.402892 0.338349 +v -0.631725 -0.410739 0.341164 +v -0.629475 -0.418569 0.343985 +v -0.627145 -0.426562 0.346753 +v -0.624736 -0.434504 0.349545 +v -0.622219 -0.442611 0.352282 +v -0.619561 -0.450672 0.355032 +v -0.616809 -0.458904 0.358064 +v -0.61383 -0.467053 0.361102 +v -0.610738 -0.475364 0.364136 +v -0.607521 -0.483645 0.367161 +v -0.713908 0.49254 0.415336 +v -0.714888 0.48394 0.410067 +v -0.715849 0.47534 0.404792 +v -0.716766 0.466847 0.399577 +v -0.717683 0.458355 0.394362 +v -0.718563 0.449959 0.389185 +v -0.719465 0.441563 0.384013 +v -0.720372 0.433261 0.378884 +v -0.721259 0.424963 0.373748 +v -0.722169 0.416747 0.368629 +v -0.723077 0.408532 0.363505 +v -0.724052 0.40039 0.358389 +v -0.72504 0.392253 0.353275 +v -0.726056 0.384179 0.348126 +v -0.727058 0.376108 0.34297 +v -0.728101 0.368112 0.337786 +v -0.729155 0.360109 0.332608 +v -0.730174 0.352158 0.327352 +v -0.731196 0.344207 0.322101 +v -0.732099 0.33633 0.316752 +v -0.733005 0.328448 0.311403 +v -0.733773 0.320635 0.305975 +v -0.734563 0.312816 0.300555 +v -0.734998 0.305196 0.295418 +v -0.735443 0.297574 0.290283 +v -0.735742 0.290014 0.284952 +v -0.736038 0.282454 0.279613 +v -0.736031 0.274995 0.274198 +v -0.736042 0.267536 0.268797 +v -0.735914 0.260126 0.263432 +v -0.735808 0.252709 0.258065 +v -0.735256 0.245368 0.252769 +v -0.734711 0.238025 0.24747 +v -0.734023 0.230634 0.24236 +v -0.733322 0.223247 0.23725 +v -0.732246 0.215778 0.232395 +v -0.731174 0.208307 0.22754 +v -0.72993 0.200608 0.223069 +v -0.728691 0.192907 0.218599 +v -0.72743 0.184793 0.214618 +v -0.726211 0.176673 0.210643 +v -0.724928 0.168132 0.207267 +v -0.723652 0.15959 0.20389 +v -0.722342 0.150629 0.201247 +v -0.721018 0.141672 0.198599 +v -0.719299 0.132258 0.19712 +v -0.717582 0.122846 0.195638 +v -0.716276 0.112584 0.194466 +v -0.715019 0.102315 0.193301 +v -0.714104 0.091502 0.193083 +v -0.713319 0.0806769 0.192879 +v -0.713394 0.069405 0.193219 +v -0.713637 0.0581346 0.19358 +v -0.714633 0.0464866 0.19379 +v -0.715737 0.0348464 0.194019 +v -0.716482 0.0231018 0.195847 +v -0.717173 0.0113506 0.197655 +v -0.716862 -0.000518804 0.199748 +v -0.716355 -0.0123927 0.201767 +v -0.714769 -0.0240601 0.203934 +v -0.713049 -0.0357225 0.206051 +v -0.710703 -0.0470145 0.208358 +v -0.708307 -0.0583002 0.210648 +v -0.705834 -0.0690636 0.213213 +v -0.703336 -0.0798225 0.215773 +v -0.700978 -0.0898142 0.218422 +v -0.698625 -0.0998059 0.221072 +v -0.696641 -0.108556 0.223808 +v -0.694641 -0.117303 0.226541 +v -0.692936 -0.125175 0.22932 +v -0.691236 -0.133048 0.232118 +v -0.689705 -0.14031 0.235861 +v -0.68822 -0.147583 0.239608 +v -0.686844 -0.154561 0.24263 +v -0.685503 -0.161549 0.24565 +v -0.684147 -0.168483 0.248339 +v -0.682787 -0.175416 0.251028 +v -0.681482 -0.182216 0.253768 +v -0.680196 -0.189017 0.256507 +v -0.67897 -0.195716 0.25966 +v -0.677763 -0.20242 0.262814 +v -0.676586 -0.208946 0.26634 +v -0.675401 -0.215471 0.269863 +v -0.674218 -0.221853 0.272962 +v -0.673042 -0.228232 0.27609 +v -0.671836 -0.234525 0.279022 +v -0.670596 -0.240811 0.281958 +v -0.66936 -0.247088 0.284755 +v -0.668106 -0.253362 0.287553 +v -0.666812 -0.25964 0.290223 +v -0.665554 -0.265924 0.292894 +v -0.664231 -0.272273 0.295384 +v -0.662883 -0.278623 0.297871 +v -0.661525 -0.285048 0.300269 +v -0.660171 -0.291469 0.302671 +v -0.658782 -0.297967 0.304955 +v -0.6574 -0.304456 0.307248 +v -0.655995 -0.31109 0.309958 +v -0.654559 -0.317708 0.312677 +v -0.65308 -0.32451 0.315574 +v -0.651571 -0.331303 0.31847 +v -0.650046 -0.338259 0.321388 +v -0.648496 -0.345209 0.324305 +v -0.646886 -0.352349 0.327239 +v -0.645259 -0.359504 0.330157 +v -0.643526 -0.366856 0.333052 +v -0.641739 -0.374203 0.335943 +v -0.639892 -0.381728 0.338804 +v -0.637976 -0.389236 0.341669 +v -0.636008 -0.396915 0.34449 +v -0.633916 -0.404557 0.347316 +v -0.6318 -0.412381 0.350087 +v -0.629538 -0.420177 0.352866 +v -0.627195 -0.428108 0.355611 +v -0.624751 -0.436002 0.358368 +v -0.622206 -0.444053 0.361053 +v -0.619523 -0.452058 0.363744 +v -0.616766 -0.460234 0.366796 +v -0.613804 -0.468341 0.369853 +v -0.610729 -0.476616 0.372912 +v -0.607525 -0.484842 0.375968 +v -0.709324 0.496508 0.431796 +v -0.710355 0.487899 0.426471 +v -0.711308 0.479394 0.421208 +v -0.712271 0.470888 0.415946 +v -0.713184 0.462486 0.41075 +v -0.714078 0.454087 0.405553 +v -0.714941 0.44578 0.400406 +v -0.715809 0.437469 0.39526 +v -0.716715 0.429246 0.39017 +v -0.717598 0.421026 0.385066 +v -0.718506 0.412881 0.379995 +v -0.719404 0.40474 0.374923 +v -0.720379 0.396664 0.369856 +v -0.721334 0.388594 0.364787 +v -0.722338 0.38058 0.359703 +v -0.723353 0.372566 0.354628 +v -0.724392 0.364617 0.349522 +v -0.72542 0.35666 0.344413 +v -0.726414 0.348764 0.339249 +v -0.727425 0.340864 0.334093 +v -0.728339 0.333023 0.328852 +v -0.729274 0.325179 0.323621 +v -0.730048 0.3174 0.318329 +v -0.730831 0.309624 0.313035 +v -0.731354 0.302007 0.308034 +v -0.731882 0.294387 0.303036 +v -0.732225 0.286825 0.297899 +v -0.732579 0.279259 0.292754 +v -0.732705 0.271764 0.287576 +v -0.73283 0.264274 0.282407 +v -0.732731 0.256821 0.277273 +v -0.732625 0.249367 0.27214 +v -0.732275 0.241935 0.267131 +v -0.731917 0.234507 0.26211 +v -0.731267 0.227035 0.257243 +v -0.730614 0.219567 0.252381 +v -0.72974 0.211991 0.24778 +v -0.728842 0.204411 0.243174 +v -0.727629 0.196624 0.23892 +v -0.726417 0.188838 0.23467 +v -0.725245 0.180688 0.230882 +v -0.724086 0.172539 0.227091 +v -0.722843 0.164001 0.223877 +v -0.721592 0.155463 0.220662 +v -0.720328 0.146524 0.218146 +v -0.719053 0.137586 0.215629 +v -0.717675 0.128177 0.214046 +v -0.716301 0.11877 0.212467 +v -0.715173 0.108697 0.211343 +v -0.714096 0.0986212 0.210226 +v -0.713316 0.0880229 0.209855 +v -0.712627 0.0774217 0.209485 +v -0.712477 0.0664111 0.209595 +v -0.71241 0.0554005 0.209706 +v -0.712822 0.0440622 0.209858 +v -0.713296 0.0327249 0.210009 +v -0.713731 0.0212151 0.21068 +v -0.714102 0.00970697 0.211339 +v -0.713731 -0.00176815 0.21317 +v -0.713222 -0.0132443 0.214975 +v -0.712018 -0.0245424 0.216992 +v -0.710654 -0.0358334 0.21897 +v -0.708787 -0.0467394 0.221726 +v -0.706845 -0.0576392 0.224456 +v -0.704851 -0.0681003 0.226956 +v -0.702818 -0.0785523 0.229445 +v -0.700858 -0.088306 0.231988 +v -0.698873 -0.0980562 0.234528 +v -0.697067 -0.10685 0.237159 +v -0.695284 -0.115647 0.239788 +v -0.693646 -0.123659 0.242547 +v -0.692016 -0.131668 0.245317 +v -0.690537 -0.139145 0.248619 +v -0.6891 -0.146632 0.251925 +v -0.687725 -0.153815 0.254797 +v -0.686366 -0.161011 0.257678 +v -0.685014 -0.168071 0.260341 +v -0.68368 -0.175142 0.263006 +v -0.68236 -0.182058 0.265714 +v -0.681059 -0.188976 0.268409 +v -0.679808 -0.195764 0.271385 +v -0.678549 -0.202549 0.274354 +v -0.677288 -0.209164 0.277562 +v -0.676028 -0.215773 0.280776 +v -0.674813 -0.222259 0.28378 +v -0.673551 -0.228741 0.286777 +v -0.672273 -0.235138 0.289611 +v -0.671013 -0.241537 0.292436 +v -0.669723 -0.247901 0.295151 +v -0.668425 -0.254265 0.297864 +v -0.667135 -0.26064 0.300463 +v -0.665791 -0.267009 0.303051 +v -0.664436 -0.273433 0.305505 +v -0.663088 -0.279861 0.307954 +v -0.661748 -0.286343 0.310313 +v -0.660356 -0.292785 0.312694 +v -0.65896 -0.299328 0.314914 +v -0.657541 -0.305869 0.317132 +v -0.656115 -0.312548 0.319826 +v -0.654655 -0.319207 0.322523 +v -0.653182 -0.326029 0.325352 +v -0.651706 -0.332858 0.328176 +v -0.650145 -0.339825 0.331052 +v -0.648623 -0.346804 0.333931 +v -0.647003 -0.353958 0.336779 +v -0.6453 -0.36112 0.339618 +v -0.643584 -0.368477 0.342441 +v -0.641794 -0.375809 0.345264 +v -0.639937 -0.383313 0.348064 +v -0.638033 -0.390801 0.350875 +v -0.636039 -0.398442 0.353637 +v -0.633965 -0.406055 0.356416 +v -0.631757 -0.413818 0.359159 +v -0.629563 -0.421572 0.361913 +v -0.627225 -0.429461 0.364627 +v -0.624758 -0.437313 0.367344 +v -0.622264 -0.445326 0.370017 +v -0.619531 -0.453261 0.372698 +v -0.616747 -0.461375 0.37575 +v -0.613779 -0.469425 0.3788 +v -0.610721 -0.477642 0.38183 +v -0.607541 -0.485822 0.38484 +v -0.702398 0.517477 0.459058 +v -0.703552 0.508872 0.453659 +v -0.704704 0.500268 0.44826 +v -0.705736 0.491767 0.442936 +v -0.706753 0.483264 0.437613 +v -0.707694 0.474854 0.432364 +v -0.708669 0.466442 0.427127 +v -0.709565 0.458134 0.421945 +v -0.710455 0.449824 0.416763 +v -0.711341 0.441594 0.411647 +v -0.712192 0.433372 0.40653 +v -0.713056 0.425229 0.401457 +v -0.713922 0.417087 0.396387 +v -0.714841 0.409017 0.391365 +v -0.715748 0.400946 0.38634 +v -0.716705 0.392939 0.381329 +v -0.717641 0.384938 0.376306 +v -0.718613 0.376987 0.371289 +v -0.719611 0.369031 0.366278 +v -0.720627 0.361125 0.361245 +v -0.721655 0.353216 0.356213 +v -0.72267 0.345367 0.35115 +v -0.723684 0.337518 0.346089 +v -0.724611 0.329714 0.34096 +v -0.725542 0.321911 0.335838 +v -0.726334 0.314171 0.330689 +v -0.727098 0.306438 0.325525 +v -0.727715 0.298819 0.320658 +v -0.728327 0.2912 0.315795 +v -0.728727 0.283634 0.310868 +v -0.729136 0.276066 0.305919 +v -0.729372 0.268539 0.300964 +v -0.729642 0.261012 0.296028 +v -0.729575 0.253516 0.291126 +v -0.72949 0.246024 0.286229 +v -0.729322 0.238506 0.281489 +v -0.729161 0.230986 0.27675 +v -0.728535 0.223439 0.272132 +v -0.727914 0.215892 0.267514 +v -0.727219 0.208202 0.263161 +v -0.726521 0.200514 0.258809 +v -0.725354 0.192639 0.25478 +v -0.724185 0.184766 0.250749 +v -0.72312 0.176582 0.247156 +v -0.722056 0.168398 0.243564 +v -0.720864 0.159862 0.240514 +v -0.719676 0.151324 0.237467 +v -0.718451 0.142404 0.235075 +v -0.717251 0.133482 0.232684 +v -0.716213 0.124081 0.230997 +v -0.715191 0.114678 0.229311 +v -0.714265 0.104792 0.228238 +v -0.713354 0.0949084 0.227167 +v -0.712719 0.0845338 0.226633 +v -0.712114 0.0741621 0.226102 +v -0.711763 0.0634174 0.225981 +v -0.711482 0.05267 0.225862 +v -0.711398 0.0416503 0.225973 +v -0.71135 0.0306331 0.226083 +v -0.711468 0.0193609 0.225615 +v -0.711535 0.00808978 0.225142 +v -0.711135 -0.00300954 0.226741 +v -0.710622 -0.0141083 0.228321 +v -0.709673 -0.0250442 0.23016 +v -0.708635 -0.0359756 0.231984 +v -0.707177 -0.0465001 0.235158 +v -0.705683 -0.0570228 0.238318 +v -0.70411 -0.067175 0.240731 +v -0.702452 -0.07731 0.243146 +v -0.700839 -0.0868166 0.245574 +v -0.699203 -0.0963229 0.248002 +v -0.697557 -0.105139 0.250542 +v -0.695905 -0.113951 0.253091 +v -0.694375 -0.122122 0.25581 +v -0.692847 -0.13029 0.258535 +v -0.6914 -0.137973 0.261402 +v -0.689939 -0.145656 0.264268 +v -0.688563 -0.153049 0.266986 +v -0.687173 -0.160444 0.269707 +v -0.685825 -0.167644 0.272361 +v -0.684473 -0.174839 0.275014 +v -0.683112 -0.181867 0.277673 +v -0.681772 -0.1889 0.280325 +v -0.680465 -0.195766 0.28312 +v -0.679146 -0.202626 0.285933 +v -0.677853 -0.209332 0.28884 +v -0.676538 -0.216023 0.291728 +v -0.675216 -0.222616 0.294603 +v -0.673875 -0.229201 0.297481 +v -0.672567 -0.235714 0.300205 +v -0.671258 -0.242223 0.302921 +v -0.66995 -0.248684 0.305557 +v -0.668587 -0.255131 0.308187 +v -0.66725 -0.261593 0.310699 +v -0.665912 -0.268069 0.313207 +v -0.664526 -0.274564 0.315625 +v -0.66317 -0.281047 0.318051 +v -0.661811 -0.287557 0.320386 +v -0.660425 -0.294057 0.322725 +v -0.65903 -0.300666 0.324873 +v -0.657619 -0.307269 0.327022 +v -0.656198 -0.313994 0.3297 +v -0.654769 -0.320711 0.332374 +v -0.653254 -0.327554 0.335112 +v -0.651757 -0.334398 0.337856 +v -0.650245 -0.341394 0.340709 +v -0.648627 -0.348396 0.343546 +v -0.646987 -0.355563 0.346311 +v -0.645317 -0.362737 0.349082 +v -0.643552 -0.370069 0.351836 +v -0.641765 -0.377399 0.354581 +v -0.639897 -0.384872 0.357323 +v -0.637985 -0.392332 0.360069 +v -0.635981 -0.399943 0.362792 +v -0.633848 -0.407501 0.365518 +v -0.631712 -0.415228 0.368253 +v -0.629484 -0.422937 0.370968 +v -0.627134 -0.430786 0.373648 +v -0.624655 -0.438591 0.376325 +v -0.622144 -0.446545 0.378989 +v -0.619427 -0.454428 0.381657 +v -0.616652 -0.462485 0.38471 +v -0.613679 -0.470479 0.387757 +v -0.610605 -0.478633 0.390736 +v -0.60741 -0.486756 0.393676 +v -0.697305 0.520938 0.475503 +v -0.698602 0.512341 0.470016 +v -0.69974 0.503848 0.464625 +v -0.700855 0.495357 0.459227 +v -0.701894 0.486961 0.453928 +v -0.702926 0.478564 0.448626 +v -0.703887 0.470253 0.443409 +v -0.704825 0.461943 0.438187 +v -0.705708 0.453729 0.433038 +v -0.706594 0.445515 0.42789 +v -0.707468 0.437376 0.422808 +v -0.708356 0.429233 0.41773 +v -0.709257 0.421169 0.412721 +v -0.710147 0.413107 0.407703 +v -0.711076 0.405112 0.402736 +v -0.711997 0.397112 0.397764 +v -0.71295 0.389169 0.392801 +v -0.713913 0.381226 0.387861 +v -0.714919 0.373333 0.382925 +v -0.715919 0.365439 0.377987 +v -0.716967 0.35759 0.373053 +v -0.718 0.349746 0.368099 +v -0.719013 0.34194 0.363137 +v -0.720013 0.334139 0.358165 +v -0.720949 0.326374 0.353156 +v -0.721903 0.318608 0.348154 +v -0.722753 0.310902 0.34315 +v -0.723584 0.30319 0.338141 +v -0.724255 0.295583 0.33339 +v -0.724936 0.287975 0.328654 +v -0.725469 0.280406 0.323914 +v -0.725979 0.272838 0.319148 +v -0.726317 0.265306 0.314401 +v -0.726673 0.25777 0.309663 +v -0.726753 0.250239 0.304987 +v -0.726829 0.242705 0.300309 +v -0.72673 0.235143 0.295764 +v -0.726623 0.227579 0.291233 +v -0.726263 0.219963 0.286861 +v -0.72592 0.212345 0.282482 +v -0.7253 0.20459 0.278319 +v -0.724696 0.196834 0.274164 +v -0.72385 0.188897 0.270334 +v -0.723002 0.180956 0.266497 +v -0.722057 0.172747 0.263077 +v -0.721113 0.164536 0.259649 +v -0.720145 0.155985 0.256724 +v -0.719181 0.147441 0.253798 +v -0.718132 0.138541 0.251471 +v -0.717088 0.129639 0.249142 +v -0.716164 0.120311 0.247439 +v -0.715248 0.110981 0.245736 +v -0.714416 0.101228 0.244604 +v -0.713603 0.0914726 0.243473 +v -0.71298 0.0812928 0.242839 +v -0.712373 0.0711169 0.242206 +v -0.711939 0.0605973 0.241962 +v -0.711517 0.0500804 0.241718 +v -0.711195 0.0393302 0.241755 +v -0.710887 0.0285793 0.241788 +v -0.710672 0.0176398 0.241653 +v -0.710417 0.006705 0.241518 +v -0.709914 -0.004132 0.242761 +v -0.709341 -0.0149648 0.243984 +v -0.7085 -0.0256202 0.245649 +v -0.707581 -0.0362698 0.247309 +v -0.70638 -0.0465831 0.249875 +v -0.705067 -0.0568832 0.252435 +v -0.703687 -0.0668119 0.254657 +v -0.702241 -0.0767319 0.256881 +v -0.700748 -0.0861071 0.259183 +v -0.699247 -0.0954839 0.261482 +v -0.697712 -0.104266 0.263883 +v -0.696158 -0.113051 0.26629 +v -0.694665 -0.12129 0.268837 +v -0.693192 -0.12953 0.271382 +v -0.691816 -0.137333 0.274023 +v -0.690403 -0.145123 0.276664 +v -0.689008 -0.152622 0.279251 +v -0.687634 -0.160131 0.28184 +v -0.68624 -0.167425 0.284377 +v -0.684842 -0.174715 0.286904 +v -0.683493 -0.181832 0.289461 +v -0.682117 -0.188944 0.292028 +v -0.680755 -0.195876 0.294707 +v -0.679396 -0.202812 0.297379 +v -0.678082 -0.209592 0.300097 +v -0.676756 -0.216371 0.302822 +v -0.675386 -0.223048 0.305534 +v -0.674013 -0.229725 0.308249 +v -0.672683 -0.236319 0.310878 +v -0.671331 -0.242907 0.313494 +v -0.669951 -0.249442 0.316035 +v -0.668607 -0.255987 0.318579 +v -0.667231 -0.262511 0.321027 +v -0.665848 -0.269056 0.323468 +v -0.664504 -0.275601 0.325851 +v -0.663143 -0.282125 0.328235 +v -0.661754 -0.288691 0.330556 +v -0.660361 -0.295257 0.332874 +v -0.658981 -0.301917 0.335105 +v -0.657597 -0.308576 0.337332 +v -0.656169 -0.315333 0.339886 +v -0.654714 -0.322076 0.342444 +v -0.653255 -0.328954 0.34509 +v -0.651773 -0.335822 0.347741 +v -0.650168 -0.342828 0.350466 +v -0.648563 -0.349847 0.353194 +v -0.646906 -0.356997 0.355889 +v -0.645227 -0.364174 0.358568 +v -0.643514 -0.371514 0.361264 +v -0.641671 -0.378814 0.363951 +v -0.639816 -0.386265 0.366643 +v -0.637873 -0.39368 0.369327 +v -0.635921 -0.401261 0.372 +v -0.633846 -0.408801 0.374721 +v -0.631661 -0.416478 0.377412 +v -0.629345 -0.424121 0.380079 +v -0.627058 -0.431936 0.382733 +v -0.624534 -0.439682 0.385382 +v -0.621982 -0.447568 0.388018 +v -0.619323 -0.455425 0.390659 +v -0.616591 -0.46344 0.39371 +v -0.613634 -0.47136 0.396733 +v -0.610529 -0.47943 0.399718 +v -0.607316 -0.487472 0.402687 +v -0.689228 0.541338 0.503069 +v -0.690731 0.532742 0.497495 +v -0.692228 0.524143 0.491918 +v -0.693522 0.515665 0.486455 +v -0.694819 0.507193 0.480977 +v -0.695933 0.498816 0.475591 +v -0.697033 0.490439 0.470201 +v -0.69807 0.482151 0.464924 +v -0.699105 0.47386 0.459647 +v -0.70006 0.465651 0.454449 +v -0.701008 0.457438 0.449251 +v -0.70192 0.449319 0.444147 +v -0.702803 0.441201 0.439036 +v -0.703677 0.433148 0.433991 +v -0.704546 0.425094 0.428945 +v -0.70544 0.417111 0.423978 +v -0.706358 0.409127 0.419019 +v -0.707309 0.401201 0.414106 +v -0.708265 0.393278 0.409174 +v -0.709231 0.385395 0.404287 +v -0.71019 0.377516 0.399419 +v -0.711204 0.369681 0.394558 +v -0.712226 0.361846 0.3897 +v -0.713239 0.354061 0.38484 +v -0.714252 0.346275 0.379981 +v -0.715285 0.338521 0.375118 +v -0.716321 0.330761 0.370248 +v -0.717296 0.323034 0.365353 +v -0.718274 0.315307 0.360469 +v -0.719149 0.307632 0.355605 +v -0.720039 0.299952 0.350741 +v -0.720777 0.292352 0.346125 +v -0.721541 0.284752 0.341512 +v -0.722158 0.277191 0.336939 +v -0.722799 0.269624 0.332367 +v -0.723248 0.262074 0.327836 +v -0.723684 0.254529 0.323304 +v -0.723928 0.246961 0.31885 +v -0.724173 0.239393 0.314394 +v -0.724123 0.231781 0.310055 +v -0.724087 0.224171 0.305719 +v -0.724014 0.216487 0.301597 +v -0.723909 0.208807 0.297437 +v -0.723389 0.200983 0.293479 +v -0.722861 0.193159 0.28952 +v -0.722356 0.185152 0.285881 +v -0.721851 0.177146 0.282248 +v -0.721034 0.168909 0.279004 +v -0.720206 0.160671 0.27575 +v -0.719485 0.15211 0.272936 +v -0.718763 0.143554 0.270137 +v -0.717879 0.134675 0.267871 +v -0.716986 0.125796 0.265605 +v -0.716184 0.11654 0.263886 +v -0.715388 0.107285 0.262164 +v -0.714651 0.097662 0.260969 +v -0.713931 0.0880377 0.259774 +v -0.713312 0.0780517 0.259042 +v -0.712721 0.068068 0.25831 +v -0.712181 0.0577777 0.257942 +v -0.711636 0.0474902 0.257576 +v -0.711107 0.0370059 0.25754 +v -0.710594 0.0265217 0.257502 +v -0.710136 0.0159179 0.257716 +v -0.709649 0.00531654 0.257929 +v -0.709053 -0.00525494 0.258817 +v -0.708439 -0.0158308 0.259694 +v -0.707663 -0.0262124 0.261176 +v -0.706802 -0.0365871 0.262653 +v -0.705691 -0.0466623 0.264619 +v -0.704514 -0.0567161 0.266604 +v -0.703247 -0.066408 0.268641 +v -0.701962 -0.0760952 0.270681 +v -0.700601 -0.0853598 0.272829 +v -0.699176 -0.0946189 0.274982 +v -0.697758 -0.103377 0.277241 +v -0.696343 -0.112136 0.279498 +v -0.694936 -0.120454 0.281856 +v -0.693515 -0.128767 0.284215 +v -0.69209 -0.136661 0.286644 +v -0.690665 -0.144556 0.289069 +v -0.689229 -0.152157 0.291524 +v -0.687803 -0.159765 0.293982 +v -0.686434 -0.167163 0.2964 +v -0.685015 -0.174548 0.298828 +v -0.683623 -0.181741 0.3013 +v -0.682197 -0.188928 0.303775 +v -0.68082 -0.19594 0.3063 +v -0.679438 -0.202938 0.308801 +v -0.678092 -0.209803 0.311351 +v -0.676737 -0.216675 0.313908 +v -0.675375 -0.223445 0.316467 +v -0.673939 -0.2302 0.319025 +v -0.672525 -0.236863 0.321539 +v -0.671178 -0.24354 0.324054 +v -0.669841 -0.250169 0.326502 +v -0.66846 -0.256797 0.32896 +v -0.667082 -0.263403 0.331353 +v -0.665748 -0.26999 0.333758 +v -0.664363 -0.276584 0.336078 +v -0.662978 -0.283176 0.33841 +v -0.661618 -0.289814 0.340717 +v -0.660247 -0.296454 0.34302 +v -0.658851 -0.303155 0.345325 +v -0.657442 -0.309852 0.347636 +v -0.656053 -0.316648 0.350081 +v -0.654577 -0.323426 0.352522 +v -0.653077 -0.330308 0.355073 +v -0.65156 -0.337221 0.35761 +v -0.649984 -0.344247 0.360218 +v -0.648373 -0.351266 0.36283 +v -0.646712 -0.358409 0.365451 +v -0.645047 -0.365593 0.368067 +v -0.643261 -0.372896 0.370693 +v -0.641455 -0.380178 0.373323 +v -0.639632 -0.387606 0.375953 +v -0.637675 -0.394997 0.378585 +v -0.635678 -0.402524 0.381247 +v -0.63356 -0.410021 0.383917 +v -0.631408 -0.417668 0.386564 +v -0.629117 -0.425275 0.389192 +v -0.626767 -0.433022 0.391817 +v -0.624317 -0.440738 0.394445 +v -0.621795 -0.448581 0.397067 +v -0.619094 -0.456355 0.399662 +v -0.616279 -0.464261 0.402679 +v -0.613313 -0.472124 0.405693 +v -0.610274 -0.48014 0.408708 +v -0.607058 -0.488118 0.411702 +v -0.68524 0.535605 0.51379 +v -0.686758 0.527142 0.508227 +v -0.688238 0.518681 0.502657 +v -0.689516 0.510322 0.497203 +v -0.690798 0.501967 0.49174 +v -0.691914 0.493711 0.486383 +v -0.693032 0.485459 0.481023 +v -0.694102 0.477273 0.475788 +v -0.695147 0.469093 0.470544 +v -0.696112 0.460993 0.465386 +v -0.697078 0.45289 0.460232 +v -0.698016 0.444862 0.455179 +v -0.698949 0.436833 0.45012 +v -0.699862 0.428871 0.445138 +v -0.700768 0.420907 0.440155 +v -0.70169 0.413002 0.435244 +v -0.702638 0.405099 0.430336 +v -0.703608 0.397247 0.425476 +v -0.704575 0.389396 0.420612 +v -0.70559 0.381584 0.415822 +v -0.706577 0.373777 0.41103 +v -0.707635 0.366004 0.406267 +v -0.708677 0.358233 0.4015 +v -0.709746 0.350498 0.396742 +v -0.710808 0.342773 0.391994 +v -0.711903 0.335059 0.387254 +v -0.712962 0.327353 0.382494 +v -0.713992 0.31967 0.377717 +v -0.715003 0.311987 0.372942 +v -0.715949 0.304342 0.368214 +v -0.7169 0.296696 0.363487 +v -0.71775 0.289091 0.358908 +v -0.718601 0.281484 0.354338 +v -0.719309 0.273942 0.349977 +v -0.720031 0.266396 0.345619 +v -0.720578 0.258845 0.341285 +v -0.72113 0.251288 0.336935 +v -0.721522 0.24371 0.332682 +v -0.721896 0.236132 0.328426 +v -0.722029 0.228493 0.324292 +v -0.722183 0.22086 0.320159 +v -0.722162 0.213138 0.31619 +v -0.722178 0.205419 0.312218 +v -0.721937 0.19756 0.308445 +v -0.721695 0.189703 0.304679 +v -0.721309 0.18166 0.301191 +v -0.720915 0.173621 0.297728 +v -0.72036 0.165361 0.294619 +v -0.719805 0.1571 0.291495 +v -0.719189 0.148542 0.288763 +v -0.718571 0.139983 0.286032 +v -0.717867 0.131129 0.283799 +v -0.71718 0.122276 0.281567 +v -0.716476 0.113088 0.27983 +v -0.715768 0.103897 0.278097 +v -0.7151 0.0943803 0.27686 +v -0.714435 0.0848616 0.275621 +v -0.713852 0.0750288 0.274807 +v -0.713232 0.0652006 0.274001 +v -0.712621 0.0551078 0.273561 +v -0.711993 0.0450118 0.273122 +v -0.711356 0.0347551 0.273023 +v -0.710765 0.0244862 0.272915 +v -0.710169 0.014127 0.273125 +v -0.709543 0.00376792 0.273326 +v -0.708825 -0.00655762 0.274083 +v -0.708089 -0.0168853 0.274839 +v -0.707218 -0.0270343 0.276106 +v -0.706274 -0.0371663 0.277401 +v -0.705164 -0.0470301 0.279099 +v -0.703985 -0.0568907 0.280804 +v -0.702798 -0.0664383 0.282647 +v -0.701517 -0.075978 0.284493 +v -0.700191 -0.0851378 0.286477 +v -0.698843 -0.0942932 0.288445 +v -0.697533 -0.103034 0.290538 +v -0.696162 -0.111765 0.292615 +v -0.694787 -0.120098 0.29481 +v -0.69334 -0.128418 0.296999 +v -0.691959 -0.136375 0.299268 +v -0.69052 -0.144331 0.30153 +v -0.689076 -0.151996 0.303814 +v -0.687687 -0.159667 0.306125 +v -0.686296 -0.167122 0.308444 +v -0.684826 -0.17456 0.310751 +v -0.683434 -0.181819 0.313091 +v -0.682012 -0.18907 0.315443 +v -0.680622 -0.196136 0.317801 +v -0.67927 -0.203208 0.320175 +v -0.677894 -0.210134 0.322587 +v -0.676509 -0.217065 0.325 +v -0.675124 -0.223903 0.327431 +v -0.673719 -0.230738 0.329873 +v -0.672299 -0.237476 0.332268 +v -0.670914 -0.244216 0.334675 +v -0.669582 -0.250911 0.337051 +v -0.668245 -0.2576 0.339418 +v -0.666886 -0.26425 0.341747 +v -0.665489 -0.270894 0.34408 +v -0.664119 -0.27754 0.346359 +v -0.662779 -0.284189 0.348635 +v -0.661407 -0.290883 0.350903 +v -0.66002 -0.297565 0.353168 +v -0.658638 -0.304305 0.355454 +v -0.657263 -0.311042 0.357742 +v -0.655812 -0.317846 0.360119 +v -0.654338 -0.324645 0.362505 +v -0.652833 -0.331576 0.364961 +v -0.651307 -0.338502 0.367423 +v -0.649744 -0.345535 0.369955 +v -0.648194 -0.352564 0.372486 +v -0.646528 -0.359714 0.375038 +v -0.644799 -0.366866 0.377581 +v -0.643059 -0.37415 0.380147 +v -0.641231 -0.381404 0.382709 +v -0.63937 -0.38879 0.385291 +v -0.637439 -0.396157 0.387898 +v -0.63539 -0.403636 0.39052 +v -0.633291 -0.411103 0.393138 +v -0.631156 -0.418714 0.395746 +v -0.628896 -0.426286 0.398339 +v -0.62654 -0.433977 0.400933 +v -0.62406 -0.441622 0.40352 +v -0.621576 -0.449396 0.406081 +v -0.618861 -0.457092 0.408639 +v -0.616056 -0.464946 0.411654 +v -0.613129 -0.472756 0.414671 +v -0.610085 -0.480697 0.417683 +v -0.606878 -0.488608 0.420682 +v -0.681216 0.52988 0.524492 +v -0.682734 0.521548 0.518949 +v -0.684238 0.513219 0.513399 +v -0.685508 0.504978 0.507954 +v -0.686779 0.496738 0.502507 +v -0.687904 0.488605 0.497176 +v -0.689022 0.480473 0.49184 +v -0.690094 0.472398 0.486633 +v -0.691162 0.464326 0.481426 +v -0.692158 0.456331 0.476321 +v -0.693145 0.448338 0.471214 +v -0.694111 0.440399 0.466209 +v -0.695083 0.432462 0.461202 +v -0.696024 0.424586 0.45628 +v -0.696958 0.41671 0.451357 +v -0.697934 0.408891 0.446508 +v -0.698918 0.401063 0.441657 +v -0.699936 0.393284 0.436871 +v -0.700928 0.385505 0.432078 +v -0.701964 0.377772 0.427354 +v -0.703002 0.370038 0.422645 +v -0.704084 0.362329 0.417981 +v -0.705158 0.354621 0.413309 +v -0.706257 0.346943 0.408656 +v -0.707351 0.339265 0.404001 +v -0.708461 0.331607 0.399366 +v -0.709574 0.323949 0.394733 +v -0.71065 0.316304 0.390069 +v -0.711748 0.308664 0.385416 +v -0.71278 0.301048 0.380837 +v -0.713804 0.293432 0.376249 +v -0.714719 0.285827 0.371696 +v -0.715669 0.278218 0.367167 +v -0.716484 0.270691 0.363022 +v -0.717281 0.263165 0.358871 +v -0.717955 0.255608 0.354729 +v -0.718596 0.248053 0.350578 +v -0.719105 0.240462 0.346523 +v -0.719613 0.232872 0.342468 +v -0.719962 0.225214 0.338534 +v -0.720323 0.217555 0.334606 +v -0.720414 0.209799 0.330826 +v -0.720502 0.202044 0.327039 +v -0.720516 0.194149 0.323458 +v -0.720536 0.186255 0.319875 +v -0.720262 0.178178 0.316543 +v -0.719983 0.170098 0.313212 +v -0.719694 0.161814 0.310232 +v -0.719406 0.153527 0.30724 +v -0.718902 0.144972 0.304599 +v -0.718405 0.136416 0.301959 +v -0.717909 0.127585 0.299746 +v -0.717407 0.118757 0.297536 +v -0.716787 0.109633 0.295779 +v -0.716175 0.10051 0.294028 +v -0.715566 0.0910983 0.292749 +v -0.714955 0.0816865 0.291468 +v -0.714366 0.072006 0.29058 +v -0.713787 0.0623344 0.289696 +v -0.713116 0.0524415 0.289202 +v -0.712437 0.0425506 0.288705 +v -0.711732 0.0325142 0.28853 +v -0.710994 0.0224758 0.288356 +v -0.710265 0.0123628 0.288557 +v -0.709469 0.00225247 0.288748 +v -0.708596 -0.00782669 0.289368 +v -0.707732 -0.017901 0.289998 +v -0.706764 -0.0278105 0.291077 +v -0.705725 -0.037718 0.292158 +v -0.704613 -0.0473936 0.293582 +v -0.703517 -0.0570735 0.295006 +v -0.702316 -0.0664743 0.296652 +v -0.701076 -0.0758763 0.298292 +v -0.699803 -0.0849346 0.300084 +v -0.698505 -0.0939814 0.301887 +v -0.697166 -0.102677 0.303818 +v -0.695746 -0.111358 0.305741 +v -0.694334 -0.119695 0.307766 +v -0.692943 -0.128031 0.309788 +v -0.691576 -0.136054 0.311888 +v -0.690137 -0.14406 0.314014 +v -0.688734 -0.151787 0.316165 +v -0.687273 -0.159505 0.318321 +v -0.68583 -0.167016 0.320499 +v -0.68444 -0.174532 0.322682 +v -0.683049 -0.181855 0.324894 +v -0.681629 -0.189164 0.327087 +v -0.680216 -0.19629 0.329315 +v -0.678788 -0.203413 0.33154 +v -0.677415 -0.210405 0.333809 +v -0.676059 -0.217412 0.336094 +v -0.674676 -0.224321 0.338405 +v -0.673315 -0.231223 0.340696 +v -0.671946 -0.238036 0.342981 +v -0.670566 -0.244848 0.345266 +v -0.669188 -0.251588 0.347553 +v -0.667846 -0.25832 0.349846 +v -0.666506 -0.265042 0.352109 +v -0.665166 -0.271765 0.354377 +v -0.663762 -0.278465 0.356624 +v -0.66242 -0.28517 0.358868 +v -0.661045 -0.291912 0.361094 +v -0.659658 -0.29865 0.363313 +v -0.658274 -0.305426 0.365568 +v -0.656849 -0.312185 0.36783 +v -0.6554 -0.319012 0.370158 +v -0.653945 -0.325875 0.372463 +v -0.652483 -0.332831 0.374852 +v -0.650976 -0.339772 0.37724 +v -0.649398 -0.346792 0.379689 +v -0.647803 -0.353808 0.382141 +v -0.646188 -0.360961 0.384626 +v -0.644448 -0.368102 0.387108 +v -0.64271 -0.37536 0.389607 +v -0.640857 -0.38259 0.392103 +v -0.638982 -0.389939 0.394656 +v -0.637032 -0.397271 0.397226 +v -0.635031 -0.404726 0.399804 +v -0.632936 -0.412153 0.402373 +v -0.630756 -0.419706 0.404939 +v -0.628485 -0.427234 0.407485 +v -0.626151 -0.434854 0.41001 +v -0.623681 -0.442438 0.412536 +v -0.621111 -0.450115 0.415078 +v -0.618441 -0.45777 0.41761 +v -0.61566 -0.465568 0.420627 +v -0.612703 -0.4733 0.42364 +v -0.609671 -0.481167 0.426663 +v -0.606444 -0.48899 0.429662 +v -0.677146 0.524097 0.534992 +v -0.678669 0.5159 0.529475 +v -0.680163 0.507704 0.52394 +v -0.681457 0.499593 0.518537 +v -0.682752 0.491482 0.513142 +v -0.68393 0.483456 0.507863 +v -0.6851 0.475436 0.502582 +v -0.686196 0.467481 0.497427 +v -0.687275 0.459523 0.492267 +v -0.688333 0.451628 0.48723 +v -0.68938 0.443731 0.48219 +v -0.690393 0.435894 0.477251 +v -0.691393 0.428057 0.472307 +v -0.692396 0.420267 0.467457 +v -0.693408 0.412478 0.462606 +v -0.694444 0.40474 0.457843 +v -0.695471 0.396998 0.45308 +v -0.696531 0.389296 0.448377 +v -0.697585 0.381595 0.443669 +v -0.698679 0.373929 0.439046 +v -0.699775 0.366268 0.434423 +v -0.70089 0.358631 0.429837 +v -0.702 0.350998 0.425251 +v -0.703148 0.343382 0.420711 +v -0.704295 0.335767 0.416169 +v -0.705479 0.328161 0.411657 +v -0.706659 0.320557 0.407142 +v -0.707786 0.312962 0.402625 +v -0.708908 0.305369 0.398108 +v -0.709967 0.297786 0.393645 +v -0.71101 0.290202 0.389171 +v -0.712054 0.282621 0.384767 +v -0.713097 0.275041 0.380373 +v -0.713995 0.267492 0.376189 +v -0.714903 0.259943 0.372016 +v -0.715686 0.252409 0.36809 +v -0.716482 0.244874 0.36417 +v -0.717116 0.237284 0.360317 +v -0.717739 0.229692 0.356442 +v -0.718221 0.222029 0.352697 +v -0.7187 0.214369 0.348951 +v -0.718984 0.206601 0.345342 +v -0.71927 0.198823 0.341726 +v -0.71939 0.190911 0.338292 +v -0.719517 0.182999 0.334855 +v -0.719439 0.174907 0.33166 +v -0.719357 0.166816 0.328461 +v -0.719169 0.158521 0.325561 +v -0.718975 0.150224 0.322669 +v -0.718623 0.141674 0.320093 +v -0.718279 0.133127 0.317523 +v -0.717856 0.124325 0.315341 +v -0.717432 0.115524 0.313157 +v -0.716923 0.106452 0.31139 +v -0.716406 0.0973863 0.309622 +v -0.715826 0.0880779 0.308298 +v -0.715243 0.0787663 0.306982 +v -0.714634 0.0692239 0.30605 +v -0.714017 0.0596874 0.305122 +v -0.71329 0.0499507 0.304548 +v -0.712541 0.0402117 0.303981 +v -0.711763 0.0303613 0.303747 +v -0.710941 0.0205076 0.303506 +v -0.710091 0.0106008 0.303621 +v -0.709231 0.000694346 0.303728 +v -0.708266 -0.00916956 0.304251 +v -0.707297 -0.0190374 0.304788 +v -0.706231 -0.0287714 0.305704 +v -0.705137 -0.0384968 0.306617 +v -0.704038 -0.0480286 0.307862 +v -0.702909 -0.0575679 0.309111 +v -0.701653 -0.0668416 0.31056 +v -0.700382 -0.076127 0.312012 +v -0.699096 -0.085099 0.313639 +v -0.697728 -0.0940622 0.315278 +v -0.696345 -0.102697 0.317032 +v -0.694984 -0.111333 0.318787 +v -0.693626 -0.119664 0.320681 +v -0.692258 -0.128 0.322592 +v -0.690871 -0.136022 0.324551 +v -0.689479 -0.144054 0.326508 +v -0.688086 -0.15182 0.328507 +v -0.686686 -0.159592 0.33051 +v -0.685201 -0.167134 0.33255 +v -0.683741 -0.17468 0.334588 +v -0.682333 -0.182039 0.336648 +v -0.680935 -0.189402 0.338707 +v -0.679595 -0.196593 0.340817 +v -0.678253 -0.203773 0.342915 +v -0.676909 -0.210818 0.345047 +v -0.675538 -0.217872 0.347189 +v -0.674158 -0.224827 0.349368 +v -0.672796 -0.231777 0.351548 +v -0.671429 -0.238645 0.353739 +v -0.670075 -0.245508 0.355933 +v -0.66876 -0.2523 0.358129 +v -0.667435 -0.259094 0.360323 +v -0.66607 -0.265854 0.362522 +v -0.664712 -0.272618 0.364718 +v -0.663363 -0.279378 0.366906 +v -0.662011 -0.286138 0.369093 +v -0.660657 -0.292917 0.371289 +v -0.659239 -0.299677 0.373459 +v -0.657858 -0.306484 0.375661 +v -0.65643 -0.31328 0.37787 +v -0.654987 -0.320158 0.380124 +v -0.653572 -0.327033 0.382377 +v -0.652099 -0.333982 0.384698 +v -0.650597 -0.340928 0.387022 +v -0.649093 -0.347954 0.389408 +v -0.647443 -0.354965 0.391782 +v -0.645795 -0.362096 0.394201 +v -0.644061 -0.369224 0.396614 +v -0.642322 -0.376453 0.399056 +v -0.640499 -0.383674 0.401536 +v -0.638653 -0.390991 0.404048 +v -0.636716 -0.398293 0.406566 +v -0.634664 -0.405694 0.409093 +v -0.632559 -0.413076 0.411624 +v -0.630394 -0.42056 0.414118 +v -0.62809 -0.428005 0.416587 +v -0.625772 -0.43558 0.419078 +v -0.623263 -0.443091 0.421568 +v -0.620748 -0.450725 0.42408 +v -0.618047 -0.458301 0.426583 +v -0.615313 -0.466053 0.429601 +v -0.61228 -0.473689 0.432605 +v -0.609265 -0.481481 0.43563 +v -0.606063 -0.489243 0.438635 +v -0.673044 0.518319 0.545481 +v -0.674564 0.510255 0.539981 +v -0.676085 0.502194 0.534481 +v -0.677401 0.494216 0.529125 +v -0.678722 0.486237 0.523766 +v -0.679947 0.478313 0.518542 +v -0.681177 0.470408 0.513314 +v -0.682299 0.462566 0.508212 +v -0.683434 0.454724 0.503116 +v -0.684542 0.446925 0.498145 +v -0.685639 0.439125 0.49317 +v -0.686685 0.431386 0.488297 +v -0.68772 0.423651 0.483411 +v -0.688798 0.415946 0.478637 +v -0.689876 0.408243 0.473861 +v -0.690959 0.400589 0.469182 +v -0.692045 0.392932 0.464501 +v -0.693157 0.385305 0.459891 +v -0.694283 0.37768 0.455278 +v -0.695415 0.370087 0.450742 +v -0.696556 0.362496 0.446202 +v -0.697702 0.354929 0.441703 +v -0.698873 0.347373 0.437205 +v -0.700078 0.339817 0.432774 +v -0.701277 0.33226 0.428344 +v -0.702487 0.324711 0.423941 +v -0.70371 0.317164 0.419541 +v -0.704889 0.30962 0.415172 +v -0.706071 0.302075 0.410796 +v -0.707163 0.294525 0.406452 +v -0.708252 0.286972 0.402098 +v -0.709381 0.27942 0.397836 +v -0.710539 0.271868 0.39358 +v -0.711551 0.264297 0.389366 +v -0.712559 0.256724 0.385159 +v -0.71345 0.249208 0.381457 +v -0.714359 0.241693 0.377761 +v -0.715119 0.234101 0.374088 +v -0.715872 0.226511 0.370414 +v -0.71647 0.218847 0.366853 +v -0.717093 0.211181 0.36329 +v -0.717554 0.203391 0.359848 +v -0.718001 0.195601 0.356408 +v -0.718244 0.187669 0.353123 +v -0.718479 0.179738 0.349837 +v -0.718591 0.171639 0.34677 +v -0.718694 0.163541 0.343701 +v -0.718613 0.155231 0.340899 +v -0.718534 0.14692 0.338095 +v -0.718356 0.138379 0.335593 +v -0.718169 0.129844 0.333091 +v -0.717811 0.121076 0.330937 +v -0.717446 0.11231 0.328783 +v -0.717012 0.103305 0.32701 +v -0.716587 0.0943015 0.325244 +v -0.716022 0.0850973 0.3239 +v -0.71546 0.0758939 0.32256 +v -0.714808 0.0664855 0.321569 +v -0.714122 0.0570796 0.320583 +v -0.713331 0.0474896 0.319943 +v -0.712516 0.0379077 0.319298 +v -0.711668 0.0282443 0.318989 +v -0.710815 0.0185815 0.318685 +v -0.709891 0.00886644 0.318715 +v -0.708916 -0.000846182 0.318741 +v -0.707873 -0.0105216 0.319143 +v -0.706813 -0.0201878 0.31956 +v -0.705702 -0.0297368 0.320313 +v -0.704544 -0.0392852 0.321072 +v -0.703283 -0.0486539 0.322136 +v -0.70207 -0.0580411 0.32319 +v -0.700789 -0.0671969 0.324456 +v -0.699508 -0.0763527 0.325723 +v -0.698181 -0.0852397 0.327187 +v -0.696835 -0.0941033 0.328664 +v -0.695489 -0.102687 0.330274 +v -0.694137 -0.111271 0.331899 +v -0.692726 -0.119602 0.333644 +v -0.6913 -0.127926 0.335382 +v -0.689914 -0.135965 0.337176 +v -0.688514 -0.143999 0.338974 +v -0.687116 -0.151804 0.340832 +v -0.685717 -0.159614 0.342678 +v -0.684327 -0.167212 0.344586 +v -0.682929 -0.174801 0.346466 +v -0.681533 -0.182195 0.348391 +v -0.68016 -0.189595 0.350316 +v -0.67879 -0.196823 0.352281 +v -0.677424 -0.204059 0.35424 +v -0.676078 -0.211157 0.356251 +v -0.674749 -0.218279 0.358279 +v -0.673427 -0.225291 0.360337 +v -0.672089 -0.232301 0.362392 +v -0.670761 -0.239216 0.364491 +v -0.669441 -0.246122 0.366584 +v -0.668161 -0.252979 0.368687 +v -0.666836 -0.259831 0.370805 +v -0.665494 -0.266641 0.372924 +v -0.664142 -0.273453 0.375041 +v -0.662786 -0.280258 0.377167 +v -0.661444 -0.287065 0.379291 +v -0.660069 -0.293867 0.381431 +v -0.658705 -0.30067 0.383581 +v -0.657302 -0.307517 0.385746 +v -0.655904 -0.314368 0.387891 +v -0.654433 -0.32125 0.390087 +v -0.65303 -0.328143 0.392281 +v -0.65154 -0.335095 0.394534 +v -0.650062 -0.342046 0.396789 +v -0.648549 -0.349072 0.39911 +v -0.646935 -0.356081 0.401426 +v -0.645258 -0.3632 0.40378 +v -0.643523 -0.370306 0.406121 +v -0.641751 -0.377508 0.408545 +v -0.639939 -0.3847 0.410972 +v -0.638065 -0.391984 0.413427 +v -0.636129 -0.399245 0.415889 +v -0.634138 -0.406599 0.418348 +v -0.631985 -0.413909 0.420798 +v -0.629803 -0.42133 0.423246 +v -0.627529 -0.428732 0.425677 +v -0.625227 -0.436248 0.428145 +v -0.622747 -0.443712 0.430599 +v -0.620251 -0.451292 0.433083 +v -0.617501 -0.458792 0.435548 +v -0.6147 -0.46644 0.438565 +v -0.611709 -0.47402 0.441565 +v -0.608682 -0.481727 0.444598 +v -0.605456 -0.489378 0.447613 +v -0.668943 0.512501 0.555913 +v -0.670465 0.504581 0.550454 +v -0.671988 0.496662 0.544996 +v -0.673373 0.488811 0.539701 +v -0.674742 0.480955 0.534403 +v -0.676011 0.473153 0.529246 +v -0.677315 0.465358 0.524091 +v -0.678501 0.457624 0.519076 +v -0.679701 0.449892 0.514064 +v -0.680838 0.442202 0.509169 +v -0.681988 0.434514 0.504277 +v -0.68311 0.426872 0.499491 +v -0.684219 0.419231 0.494694 +v -0.685363 0.411622 0.490012 +v -0.686496 0.404011 0.485326 +v -0.687653 0.39644 0.480745 +v -0.688799 0.388866 0.476162 +v -0.689975 0.38132 0.471649 +v -0.691154 0.373776 0.46714 +v -0.692331 0.366254 0.462692 +v -0.693511 0.358732 0.458245 +v -0.694728 0.351235 0.453851 +v -0.695945 0.343744 0.449452 +v -0.697204 0.336263 0.445113 +v -0.698448 0.32877 0.440784 +v -0.699719 0.321281 0.436486 +v -0.700999 0.313791 0.432194 +v -0.702262 0.306298 0.427865 +v -0.703505 0.298801 0.42353 +v -0.704701 0.291295 0.419318 +v -0.705891 0.283792 0.4151 +v -0.707074 0.276272 0.410963 +v -0.708242 0.268738 0.406831 +v -0.709351 0.26119 0.402754 +v -0.710456 0.253643 0.398691 +v -0.711456 0.246091 0.394903 +v -0.712454 0.238541 0.391119 +v -0.713293 0.230987 0.387706 +v -0.714133 0.223434 0.384296 +v -0.714832 0.215772 0.380926 +v -0.715523 0.20811 0.377533 +v -0.716126 0.200327 0.374249 +v -0.716738 0.192547 0.370964 +v -0.71711 0.184601 0.367812 +v -0.71749 0.176668 0.364651 +v -0.717708 0.168569 0.36169 +v -0.71793 0.16047 0.358729 +v -0.717987 0.152165 0.356004 +v -0.718015 0.143862 0.353272 +v -0.717886 0.135347 0.350827 +v -0.717776 0.126838 0.348389 +v -0.717498 0.11809 0.346264 +v -0.717196 0.109346 0.344138 +v -0.716792 0.1004 0.342357 +v -0.716349 0.0914626 0.340599 +v -0.715788 0.082344 0.339229 +v -0.715218 0.0732204 0.337849 +v -0.714515 0.0639307 0.336822 +v -0.71381 0.0546169 0.335776 +v -0.712961 0.04517 0.335069 +v -0.71211 0.0357256 0.334364 +v -0.711223 0.0262017 0.333981 +v -0.710289 0.0166796 0.333593 +v -0.709288 0.00712281 0.333539 +v -0.70827 -0.00242785 0.333482 +v -0.707136 -0.0119335 0.333765 +v -0.706001 -0.0214478 0.33406 +v -0.704757 -0.0308411 0.334667 +v -0.70349 -0.040235 0.335291 +v -0.702306 -0.0494931 0.336185 +v -0.70103 -0.0587441 0.337074 +v -0.699711 -0.0677831 0.338195 +v -0.698383 -0.0768194 0.339324 +v -0.697067 -0.0856183 0.340646 +v -0.695712 -0.0944197 0.341972 +v -0.694328 -0.10296 0.343434 +v -0.692934 -0.111514 0.344923 +v -0.691568 -0.119823 0.346512 +v -0.690219 -0.12814 0.348097 +v -0.688822 -0.136181 0.349737 +v -0.68738 -0.144223 0.351387 +v -0.686013 -0.152048 0.353094 +v -0.684639 -0.159858 0.354791 +v -0.683229 -0.167464 0.356551 +v -0.681884 -0.175071 0.358281 +v -0.680558 -0.182505 0.360067 +v -0.679177 -0.189941 0.361873 +v -0.677846 -0.19721 0.363708 +v -0.676534 -0.204488 0.365547 +v -0.675316 -0.211648 0.367442 +v -0.673982 -0.218819 0.369363 +v -0.67264 -0.225866 0.371312 +v -0.67138 -0.232913 0.373279 +v -0.670068 -0.239859 0.375276 +v -0.668786 -0.246819 0.377275 +v -0.667526 -0.253716 0.379291 +v -0.666161 -0.260607 0.381314 +v -0.664814 -0.267445 0.383342 +v -0.663517 -0.274302 0.385398 +v -0.66217 -0.28114 0.387452 +v -0.660847 -0.287976 0.389506 +v -0.659501 -0.294813 0.391604 +v -0.658126 -0.301641 0.393686 +v -0.656758 -0.308512 0.39579 +v -0.65539 -0.315386 0.397878 +v -0.653942 -0.322305 0.400019 +v -0.65247 -0.329198 0.402165 +v -0.651064 -0.336162 0.404361 +v -0.649536 -0.3431 0.406549 +v -0.647958 -0.350106 0.408796 +v -0.646372 -0.357114 0.41105 +v -0.644716 -0.364218 0.413331 +v -0.642947 -0.3713 0.415643 +v -0.641205 -0.378478 0.418019 +v -0.639367 -0.385634 0.420388 +v -0.637505 -0.392854 0.422778 +v -0.635563 -0.400055 0.425166 +v -0.633579 -0.407368 0.427573 +v -0.631457 -0.414639 0.42998 +v -0.629346 -0.422027 0.432383 +v -0.627037 -0.429361 0.434772 +v -0.624678 -0.436799 0.4372 +v -0.622183 -0.444198 0.439618 +v -0.619661 -0.451706 0.44206 +v -0.616945 -0.459148 0.444502 +v -0.614122 -0.466714 0.447506 +v -0.611198 -0.474228 0.450519 +v -0.608143 -0.481832 0.453574 +v -0.604904 -0.489407 0.456587 +v -0.664815 0.506696 0.566329 +v -0.666389 0.498912 0.56093 +v -0.667936 0.49113 0.555525 +v -0.66936 0.483409 0.550286 +v -0.670803 0.475671 0.545053 +v -0.672096 0.467992 0.539954 +v -0.673406 0.460311 0.534864 +v -0.674699 0.452687 0.52995 +v -0.675965 0.445061 0.525014 +v -0.677144 0.437483 0.520196 +v -0.678323 0.429909 0.515375 +v -0.679523 0.42236 0.510679 +v -0.680734 0.414812 0.50598 +v -0.681921 0.407299 0.501382 +v -0.683125 0.399784 0.496789 +v -0.684344 0.392296 0.492302 +v -0.685553 0.384805 0.487814 +v -0.68679 0.37734 0.483396 +v -0.688012 0.36988 0.478974 +v -0.689246 0.362427 0.47463 +v -0.690495 0.354973 0.470288 +v -0.691769 0.34754 0.465998 +v -0.693047 0.340115 0.461708 +v -0.694349 0.332704 0.457464 +v -0.69563 0.325283 0.453232 +v -0.696963 0.31785 0.449039 +v -0.698309 0.31042 0.444849 +v -0.699627 0.302973 0.440555 +v -0.700952 0.295526 0.436262 +v -0.702242 0.288063 0.432186 +v -0.703531 0.280604 0.428095 +v -0.704756 0.273108 0.424087 +v -0.705985 0.265613 0.420083 +v -0.707187 0.258092 0.416157 +v -0.708403 0.250569 0.412235 +v -0.709491 0.242978 0.408361 +v -0.710592 0.235388 0.404488 +v -0.711507 0.227873 0.401339 +v -0.712455 0.220363 0.398185 +v -0.71323 0.212705 0.394983 +v -0.713994 0.205044 0.391772 +v -0.714731 0.197265 0.388645 +v -0.715451 0.189485 0.385513 +v -0.715968 0.181543 0.382488 +v -0.716499 0.173601 0.379464 +v -0.716834 0.1655 0.376611 +v -0.71715 0.1574 0.373749 +v -0.717328 0.149099 0.371093 +v -0.71749 0.140803 0.368442 +v -0.717445 0.132303 0.366046 +v -0.717389 0.123805 0.363649 +v -0.717181 0.115089 0.361557 +v -0.716938 0.106375 0.359464 +v -0.716528 0.0974929 0.357689 +v -0.716112 0.0886209 0.355944 +v -0.715551 0.0795829 0.354536 +v -0.714955 0.0705453 0.35313 +v -0.714184 0.0613554 0.352057 +v -0.713428 0.0521591 0.350969 +v -0.712517 0.0428457 0.350194 +v -0.711631 0.0335319 0.349418 +v -0.710622 0.0241585 0.348956 +v -0.709593 0.0147887 0.348493 +v -0.708524 0.00539595 0.348353 +v -0.707449 -0.00400073 0.34821 +v -0.706235 -0.0133496 0.348376 +v -0.705004 -0.0226959 0.348543 +v -0.70374 -0.0319392 0.349026 +v -0.702474 -0.0411803 0.349513 +v -0.701146 -0.0502839 0.350249 +v -0.699832 -0.0593979 0.350986 +v -0.698488 -0.0683357 0.351956 +v -0.69718 -0.0772756 0.352925 +v -0.695794 -0.0859915 0.354087 +v -0.69441 -0.0947155 0.355268 +v -0.693043 -0.103229 0.356609 +v -0.691635 -0.11174 0.357945 +v -0.690254 -0.120034 0.35938 +v -0.688868 -0.128304 0.360785 +v -0.687526 -0.136344 0.362266 +v -0.686172 -0.144389 0.363751 +v -0.684832 -0.152221 0.365309 +v -0.6835 -0.160048 0.366863 +v -0.682125 -0.167686 0.368477 +v -0.68076 -0.175325 0.370072 +v -0.67943 -0.182799 0.371729 +v -0.678154 -0.19028 0.373402 +v -0.67691 -0.197598 0.375127 +v -0.675569 -0.204908 0.376848 +v -0.674306 -0.212103 0.378637 +v -0.67304 -0.219319 0.380434 +v -0.671773 -0.226399 0.382297 +v -0.670491 -0.233478 0.384157 +v -0.669226 -0.24048 0.386058 +v -0.667968 -0.247483 0.387953 +v -0.666689 -0.254416 0.389876 +v -0.665365 -0.261353 0.391815 +v -0.66406 -0.268236 0.393776 +v -0.662751 -0.275118 0.395738 +v -0.661455 -0.281995 0.397733 +v -0.660082 -0.288862 0.399724 +v -0.658724 -0.295723 0.401746 +v -0.657396 -0.302585 0.403763 +v -0.656043 -0.309466 0.405811 +v -0.654618 -0.316346 0.40784 +v -0.653241 -0.323285 0.409932 +v -0.651755 -0.330204 0.412017 +v -0.650258 -0.337157 0.414158 +v -0.648746 -0.344099 0.416294 +v -0.647226 -0.351102 0.418477 +v -0.645595 -0.358102 0.420656 +v -0.643977 -0.365192 0.422913 +v -0.642242 -0.372245 0.425156 +v -0.640475 -0.379369 0.42746 +v -0.638658 -0.386482 0.429761 +v -0.636773 -0.393659 0.432099 +v -0.634792 -0.400808 0.434426 +v -0.632783 -0.408065 0.436783 +v -0.630653 -0.415291 0.439136 +v -0.628548 -0.422625 0.441505 +v -0.626195 -0.429885 0.443852 +v -0.62386 -0.437272 0.446235 +v -0.621401 -0.444618 0.448617 +v -0.618902 -0.452056 0.451035 +v -0.616206 -0.459423 0.453446 +v -0.613372 -0.466897 0.456463 +v -0.610372 -0.474306 0.459466 +v -0.607396 -0.481858 0.462532 +v -0.604155 -0.489354 0.465553 +v -0.660738 0.500873 0.576543 +v -0.662352 0.493227 0.571209 +v -0.663972 0.485579 0.565879 +v -0.665443 0.477987 0.560716 +v -0.666883 0.470382 0.55554 +v -0.668286 0.46282 0.550548 +v -0.66968 0.455254 0.545554 +v -0.670996 0.447733 0.540712 +v -0.672313 0.440233 0.535865 +v -0.673588 0.432767 0.531153 +v -0.674866 0.425306 0.526439 +v -0.676121 0.417855 0.521833 +v -0.677376 0.410406 0.517228 +v -0.678647 0.402988 0.512741 +v -0.67991 0.395564 0.508255 +v -0.681183 0.388165 0.503862 +v -0.682474 0.380763 0.49947 +v -0.683775 0.37337 0.495161 +v -0.685045 0.365993 0.490844 +v -0.686358 0.358632 0.486604 +v -0.687672 0.351263 0.482365 +v -0.688975 0.3439 0.478179 +v -0.690281 0.336546 0.473992 +v -0.691634 0.329199 0.469855 +v -0.692956 0.321845 0.465727 +v -0.694333 0.314466 0.461654 +v -0.69572 0.307091 0.457586 +v -0.697059 0.299701 0.453464 +v -0.698416 0.292301 0.449344 +v -0.699781 0.284889 0.445381 +v -0.701124 0.277477 0.441405 +v -0.702432 0.270029 0.437517 +v -0.703721 0.26258 0.433631 +v -0.704979 0.255098 0.429843 +v -0.706239 0.247617 0.426057 +v -0.707407 0.240065 0.422346 +v -0.708572 0.232511 0.418639 +v -0.709615 0.224903 0.415134 +v -0.710666 0.217304 0.411633 +v -0.711526 0.209712 0.408795 +v -0.712366 0.202112 0.405952 +v -0.713185 0.194349 0.402949 +v -0.713972 0.186578 0.399938 +v -0.7146 0.178643 0.397023 +v -0.715235 0.170707 0.3941 +v -0.715675 0.162609 0.391328 +v -0.716083 0.154512 0.388544 +v -0.716355 0.146218 0.385959 +v -0.716604 0.137929 0.383382 +v -0.716613 0.129444 0.381019 +v -0.716615 0.120959 0.378661 +v -0.716424 0.112281 0.376574 +v -0.716244 0.103601 0.374495 +v -0.715907 0.0947673 0.372721 +v -0.715499 0.0859419 0.370953 +v -0.714909 0.0769712 0.369504 +v -0.714306 0.0679996 0.368057 +v -0.713537 0.058895 0.36693 +v -0.712723 0.0497836 0.365786 +v -0.71179 0.040578 0.364944 +v -0.710827 0.0313796 0.364106 +v -0.70978 0.0221319 0.363585 +v -0.708685 0.0128856 0.363051 +v -0.707551 0.00362268 0.362811 +v -0.706388 -0.00563698 0.362581 +v -0.705137 -0.0148513 0.362643 +v -0.70391 -0.0240568 0.362753 +v -0.702608 -0.0331639 0.363128 +v -0.701253 -0.0422739 0.36349 +v -0.699871 -0.0512751 0.364109 +v -0.698531 -0.0602838 0.364725 +v -0.69721 -0.0691388 0.365537 +v -0.695856 -0.0779987 0.366378 +v -0.694392 -0.0866384 0.367415 +v -0.69298 -0.0953036 0.368452 +v -0.691521 -0.103739 0.369627 +v -0.690195 -0.112202 0.370809 +v -0.688886 -0.120453 0.372088 +v -0.687566 -0.128694 0.373333 +v -0.6862 -0.136728 0.374691 +v -0.684876 -0.144773 0.376039 +v -0.68351 -0.152608 0.377466 +v -0.682163 -0.160438 0.37888 +v -0.680912 -0.168111 0.380379 +v -0.679614 -0.175774 0.381845 +v -0.678353 -0.183275 0.38339 +v -0.677032 -0.190761 0.384926 +v -0.67572 -0.198106 0.386514 +v -0.674484 -0.205467 0.388123 +v -0.67326 -0.212696 0.389805 +v -0.672023 -0.219923 0.391502 +v -0.670815 -0.227041 0.393268 +v -0.669579 -0.234157 0.395041 +v -0.668288 -0.241179 0.396832 +v -0.66704 -0.248217 0.398634 +v -0.665834 -0.255183 0.400482 +v -0.664559 -0.262148 0.402331 +v -0.663232 -0.269056 0.404211 +v -0.661893 -0.275965 0.406094 +v -0.66067 -0.282872 0.408026 +v -0.659385 -0.28977 0.409933 +v -0.658055 -0.296644 0.411888 +v -0.656708 -0.303519 0.413838 +v -0.655345 -0.310418 0.415826 +v -0.653921 -0.317313 0.417795 +v -0.652509 -0.324242 0.419825 +v -0.651024 -0.331153 0.421839 +v -0.649577 -0.338109 0.423926 +v -0.648052 -0.345048 0.426002 +v -0.6465 -0.352037 0.428138 +v -0.644893 -0.359018 0.430272 +v -0.643263 -0.366069 0.432455 +v -0.641488 -0.373083 0.434631 +v -0.639752 -0.380177 0.43688 +v -0.637937 -0.387252 0.439126 +v -0.63608 -0.394391 0.441409 +v -0.634067 -0.4015 0.443683 +v -0.632062 -0.408707 0.445995 +v -0.629948 -0.415888 0.448294 +v -0.627874 -0.423163 0.450624 +v -0.625618 -0.430395 0.452926 +v -0.623287 -0.437718 0.455278 +v -0.620717 -0.444958 0.457602 +v -0.618193 -0.452303 0.459996 +v -0.615433 -0.45957 0.462369 +v -0.612597 -0.466951 0.465387 +v -0.609645 -0.474295 0.46839 +v -0.60666 -0.481757 0.471459 +v -0.603452 -0.489176 0.474482 +v -0.656675 0.49505 0.586762 +v -0.658346 0.487541 0.581495 +v -0.660014 0.480035 0.576231 +v -0.661513 0.472567 0.571139 +v -0.663009 0.465096 0.566041 +v -0.66448 0.457656 0.561137 +v -0.665954 0.450211 0.556231 +v -0.667317 0.442809 0.551469 +v -0.668693 0.43542 0.546713 +v -0.670035 0.428062 0.542101 +v -0.67138 0.420704 0.537491 +v -0.672715 0.413352 0.532989 +v -0.67403 0.405999 0.528477 +v -0.675372 0.39867 0.5241 +v -0.676711 0.391345 0.519722 +v -0.678056 0.38403 0.515423 +v -0.679406 0.376716 0.511123 +v -0.680755 0.369408 0.506916 +v -0.682096 0.362112 0.502714 +v -0.683457 0.354835 0.498572 +v -0.684835 0.347551 0.494436 +v -0.686189 0.340262 0.490362 +v -0.687537 0.332982 0.48628 +v -0.688913 0.325695 0.482239 +v -0.690303 0.318402 0.478218 +v -0.69173 0.311086 0.474268 +v -0.693148 0.303766 0.470318 +v -0.694539 0.296422 0.466371 +v -0.69593 0.289081 0.462425 +v -0.697309 0.281716 0.458565 +v -0.698722 0.274352 0.454705 +v -0.700108 0.266952 0.450945 +v -0.701473 0.259552 0.447181 +v -0.702758 0.252106 0.443523 +v -0.70406 0.244662 0.43987 +v -0.705313 0.237148 0.436324 +v -0.70656 0.229633 0.432776 +v -0.707728 0.221939 0.428927 +v -0.70892 0.214248 0.425082 +v -0.709861 0.206722 0.422611 +v -0.710776 0.199189 0.420128 +v -0.711656 0.191429 0.417251 +v -0.712522 0.183668 0.414371 +v -0.713243 0.175743 0.411558 +v -0.713974 0.167816 0.408731 +v -0.714518 0.159718 0.406042 +v -0.715041 0.15162 0.40335 +v -0.715357 0.143335 0.400827 +v -0.715656 0.135052 0.398311 +v -0.715744 0.126583 0.395997 +v -0.715818 0.118114 0.393682 +v -0.715668 0.109473 0.3916 +v -0.71551 0.100831 0.389523 +v -0.715171 0.0920474 0.387743 +v -0.714825 0.0832665 0.385967 +v -0.714239 0.0743635 0.384482 +v -0.713598 0.0654629 0.38299 +v -0.712786 0.0564501 0.381819 +v -0.711953 0.047422 0.380608 +v -0.710987 0.0383227 0.379699 +v -0.709984 0.0292255 0.378795 +v -0.708878 0.0201006 0.378205 +v -0.707779 0.010972 0.377605 +v -0.70656 0.00184291 0.377281 +v -0.705297 -0.00726048 0.376982 +v -0.703983 -0.0163306 0.376981 +v -0.702665 -0.0254007 0.376979 +v -0.701296 -0.0343807 0.377226 +v -0.699918 -0.0433672 0.37747 +v -0.69851 -0.0522636 0.377957 +v -0.697113 -0.0611596 0.378443 +v -0.695758 -0.069925 0.37912 +v -0.694352 -0.0786653 0.379817 +v -0.692931 -0.0872357 0.380698 +v -0.691569 -0.095825 0.381603 +v -0.690255 -0.104239 0.382634 +v -0.688853 -0.112645 0.383662 +v -0.687478 -0.120859 0.384791 +v -0.686149 -0.129079 0.385911 +v -0.684805 -0.137109 0.387104 +v -0.683492 -0.145141 0.38831 +v -0.682209 -0.152992 0.38961 +v -0.680861 -0.16084 0.390901 +v -0.67959 -0.168524 0.392255 +v -0.678287 -0.176198 0.393603 +v -0.67701 -0.183705 0.395012 +v -0.675762 -0.191224 0.396425 +v -0.674523 -0.198602 0.397913 +v -0.673336 -0.20599 0.399407 +v -0.672128 -0.21325 0.400983 +v -0.670907 -0.220509 0.402575 +v -0.66969 -0.227655 0.404235 +v -0.668468 -0.234798 0.405892 +v -0.667286 -0.241869 0.407607 +v -0.666035 -0.248933 0.40931 +v -0.664791 -0.25592 0.411065 +v -0.663537 -0.262917 0.412833 +v -0.662274 -0.269857 0.414638 +v -0.660985 -0.276797 0.416445 +v -0.659704 -0.283716 0.418287 +v -0.65836 -0.290626 0.420125 +v -0.657038 -0.297518 0.422012 +v -0.655693 -0.304403 0.423903 +v -0.654336 -0.311316 0.425822 +v -0.652975 -0.318233 0.427731 +v -0.651612 -0.325164 0.429698 +v -0.650131 -0.332071 0.431657 +v -0.648683 -0.339013 0.43368 +v -0.647115 -0.345933 0.435693 +v -0.645528 -0.352883 0.437771 +v -0.643947 -0.359846 0.439845 +v -0.642356 -0.366881 0.441981 +v -0.640549 -0.37387 0.444093 +v -0.638757 -0.380911 0.44628 +v -0.63694 -0.38795 0.448464 +v -0.635103 -0.395059 0.4507 +v -0.633097 -0.402127 0.452921 +v -0.631066 -0.409278 0.455183 +v -0.628957 -0.416403 0.457438 +v -0.626842 -0.423602 0.45971 +v -0.624549 -0.430758 0.461972 +v -0.62218 -0.437983 0.464284 +v -0.619685 -0.445172 0.466581 +v -0.617145 -0.452435 0.468932 +v -0.614438 -0.459644 0.471269 +v -0.611647 -0.466951 0.474291 +v -0.608675 -0.474193 0.477287 +v -0.605694 -0.481565 0.480359 +v -0.602443 -0.48887 0.483378 +v -0.652763 0.489232 0.596823 +v -0.654466 0.481864 0.591629 +v -0.65614 0.474495 0.586426 +v -0.657723 0.467155 0.581433 +v -0.659288 0.459812 0.576433 +v -0.66079 0.452495 0.571604 +v -0.662292 0.445183 0.56678 +v -0.663744 0.437905 0.562135 +v -0.665199 0.43063 0.557486 +v -0.666603 0.423381 0.552973 +v -0.668007 0.41614 0.54846 +v -0.669386 0.408891 0.544068 +v -0.670773 0.401643 0.539673 +v -0.672157 0.394415 0.535393 +v -0.673557 0.387186 0.531116 +v -0.674948 0.379955 0.526942 +v -0.676384 0.37272 0.522772 +v -0.677793 0.365494 0.518669 +v -0.679203 0.358284 0.514571 +v -0.680613 0.351077 0.510553 +v -0.682018 0.343866 0.50653 +v -0.683424 0.336658 0.502558 +v -0.684827 0.329454 0.49858 +v -0.686263 0.322235 0.494669 +v -0.687676 0.315008 0.490774 +v -0.689135 0.307756 0.486928 +v -0.690595 0.300506 0.483081 +v -0.692004 0.29322 0.479275 +v -0.693425 0.285934 0.475466 +v -0.69485 0.278624 0.471731 +v -0.696286 0.271308 0.467994 +v -0.697699 0.263954 0.464362 +v -0.699096 0.256587 0.460726 +v -0.700435 0.249185 0.457175 +v -0.701773 0.241779 0.453635 +v -0.703064 0.2343 0.450219 +v -0.704363 0.226819 0.446807 +v -0.705603 0.219204 0.443297 +v -0.706838 0.211584 0.439788 +v -0.707848 0.204009 0.437102 +v -0.708858 0.196419 0.434414 +v -0.709793 0.188669 0.431575 +v -0.710731 0.180916 0.428734 +v -0.711541 0.173 0.426001 +v -0.712326 0.165081 0.423259 +v -0.71292 0.156994 0.420641 +v -0.713529 0.148911 0.418025 +v -0.713917 0.140644 0.415553 +v -0.714289 0.132378 0.413096 +v -0.714459 0.123929 0.410809 +v -0.71459 0.115477 0.408505 +v -0.714505 0.106863 0.406427 +v -0.714398 0.0982494 0.404347 +v -0.714056 0.089504 0.402526 +v -0.713692 0.0807688 0.400722 +v -0.713106 0.0719105 0.399185 +v -0.712471 0.0630561 0.397638 +v -0.711662 0.054102 0.396393 +v -0.710852 0.0451375 0.395134 +v -0.709844 0.0361278 0.394177 +v -0.708826 0.0271189 0.393214 +v -0.707703 0.0180872 0.392552 +v -0.706503 0.00907579 0.391881 +v -0.705262 7.68115e-05 0.391512 +v -0.703975 -0.00890128 0.391141 +v -0.702619 -0.0178586 0.391057 +v -0.701299 -0.026822 0.390967 +v -0.699915 -0.035711 0.39112 +v -0.698486 -0.0445855 0.391253 +v -0.697107 -0.0533952 0.391622 +v -0.695739 -0.0621968 0.392014 +v -0.694345 -0.0708567 0.392581 +v -0.692941 -0.0795314 0.393152 +v -0.69147 -0.0880404 0.393877 +v -0.690036 -0.0965618 0.394604 +v -0.688672 -0.104931 0.395506 +v -0.687327 -0.113303 0.396411 +v -0.685995 -0.121497 0.397402 +v -0.68463 -0.129683 0.398379 +v -0.683284 -0.137705 0.399447 +v -0.681932 -0.145718 0.40052 +v -0.680612 -0.15355 0.40169 +v -0.679373 -0.161404 0.402873 +v -0.678175 -0.169092 0.404094 +v -0.676981 -0.176781 0.405328 +v -0.675822 -0.184311 0.406621 +v -0.674588 -0.191835 0.407922 +v -0.673386 -0.199232 0.409304 +v -0.672166 -0.206631 0.410698 +v -0.671004 -0.213916 0.412182 +v -0.669751 -0.221199 0.413653 +v -0.668495 -0.228367 0.415187 +v -0.667356 -0.235546 0.416745 +v -0.666232 -0.242647 0.418379 +v -0.665008 -0.249724 0.419996 +v -0.663807 -0.256739 0.421663 +v -0.662537 -0.263754 0.423337 +v -0.661267 -0.270708 0.425058 +v -0.65998 -0.277672 0.426786 +v -0.658753 -0.284604 0.428558 +v -0.657439 -0.291525 0.430325 +v -0.656126 -0.298424 0.43214 +v -0.65475 -0.305317 0.433935 +v -0.6534 -0.312239 0.435794 +v -0.652067 -0.319157 0.437642 +v -0.650709 -0.326073 0.439552 +v -0.649244 -0.332971 0.441445 +v -0.647758 -0.339898 0.443406 +v -0.646227 -0.346798 0.445374 +v -0.644711 -0.353731 0.447405 +v -0.643051 -0.360654 0.449411 +v -0.641363 -0.367637 0.451477 +v -0.63962 -0.3746 0.453546 +v -0.637881 -0.381614 0.455686 +v -0.63605 -0.388619 0.457815 +v -0.634217 -0.395681 0.459996 +v -0.632219 -0.402705 0.462158 +v -0.630234 -0.409805 0.464373 +v -0.628074 -0.416857 0.466564 +v -0.6259 -0.423971 0.468801 +v -0.623589 -0.431044 0.471017 +v -0.621263 -0.4382 0.473285 +v -0.618739 -0.445306 0.475528 +v -0.616185 -0.45248 0.47783 +v -0.613515 -0.459616 0.480124 +v -0.610745 -0.466851 0.48314 +v -0.607779 -0.474015 0.486142 +v -0.604777 -0.481284 0.489185 +v -0.601566 -0.488507 0.492182 +v -0.64882 0.483447 0.606874 +v -0.650571 0.476206 0.601757 +v -0.652291 0.468972 0.596627 +v -0.653947 0.461741 0.591728 +v -0.655579 0.454523 0.586822 +v -0.657114 0.447336 0.582078 +v -0.658655 0.440151 0.577336 +v -0.660176 0.432995 0.572797 +v -0.661689 0.425837 0.568256 +v -0.663147 0.418702 0.563835 +v -0.664613 0.411571 0.55942 +v -0.666048 0.404431 0.555145 +v -0.667491 0.397291 0.550868 +v -0.668951 0.390167 0.546688 +v -0.670388 0.383045 0.542504 +v -0.671836 0.375895 0.538454 +v -0.673317 0.368746 0.534404 +v -0.674785 0.36161 0.530404 +v -0.676241 0.354486 0.526409 +v -0.677716 0.347346 0.522515 +v -0.679174 0.340201 0.518618 +v -0.680651 0.33305 0.514752 +v -0.682089 0.325917 0.510876 +v -0.683568 0.318769 0.507092 +v -0.685024 0.311608 0.503324 +v -0.686502 0.304424 0.499579 +v -0.687983 0.297242 0.495835 +v -0.68945 0.290018 0.492174 +v -0.6909 0.282793 0.488508 +v -0.692359 0.275529 0.484889 +v -0.693827 0.268264 0.481271 +v -0.695233 0.260946 0.477763 +v -0.696642 0.253627 0.474243 +v -0.698039 0.246258 0.470816 +v -0.699445 0.238893 0.467389 +v -0.700786 0.231444 0.464107 +v -0.702082 0.223996 0.460806 +v -0.703381 0.216463 0.45764 +v -0.704656 0.20892 0.454464 +v -0.705776 0.201293 0.45158 +v -0.706896 0.19366 0.448691 +v -0.707933 0.185914 0.445902 +v -0.708942 0.178166 0.443107 +v -0.709806 0.170253 0.440445 +v -0.710652 0.16234 0.437778 +v -0.711308 0.154272 0.435247 +v -0.711972 0.146197 0.432701 +v -0.712434 0.13795 0.430288 +v -0.712841 0.129706 0.427871 +v -0.713059 0.121278 0.425607 +v -0.713277 0.11285 0.423339 +v -0.713272 0.104258 0.421268 +v -0.713208 0.0956675 0.419196 +v -0.712882 0.0869674 0.41732 +v -0.712475 0.0782781 0.415473 +v -0.711913 0.0694574 0.413893 +v -0.711286 0.0606446 0.412302 +v -0.710455 0.0517607 0.410986 +v -0.709614 0.0428714 0.40966 +v -0.708669 0.03394 0.408643 +v -0.70761 0.025028 0.407628 +v -0.706432 0.0161141 0.406913 +v -0.70521 0.0072193 0.406191 +v -0.703946 -0.00165135 0.405764 +v -0.702639 -0.0105327 0.405318 +v -0.701269 -0.0193753 0.405135 +v -0.699879 -0.0282163 0.404953 +v -0.698425 -0.0370094 0.405002 +v -0.697026 -0.0458094 0.405058 +v -0.695606 -0.0544998 0.405321 +v -0.694138 -0.0631769 0.405582 +v -0.692656 -0.0717402 0.406016 +v -0.691226 -0.080347 0.406457 +v -0.689798 -0.088811 0.407049 +v -0.688429 -0.0972921 0.407646 +v -0.686971 -0.105605 0.408378 +v -0.685603 -0.11393 0.409121 +v -0.684245 -0.1221 0.409975 +v -0.682993 -0.130269 0.410838 +v -0.681729 -0.138272 0.411792 +v -0.680458 -0.146281 0.412748 +v -0.679216 -0.154115 0.4138 +v -0.677924 -0.161937 0.414839 +v -0.676703 -0.169624 0.415938 +v -0.675526 -0.177313 0.417041 +v -0.674389 -0.184859 0.418229 +v -0.673168 -0.192402 0.419416 +v -0.671956 -0.19982 0.420693 +v -0.670789 -0.207237 0.421976 +v -0.669593 -0.214545 0.423338 +v -0.66839 -0.221852 0.424702 +v -0.667241 -0.229061 0.426159 +v -0.66607 -0.236267 0.427604 +v -0.664908 -0.243378 0.429117 +v -0.663731 -0.250475 0.430641 +v -0.662537 -0.257508 0.432232 +v -0.661315 -0.264543 0.433822 +v -0.660112 -0.271529 0.435473 +v -0.658838 -0.278503 0.437111 +v -0.657571 -0.285448 0.438804 +v -0.656262 -0.292386 0.440498 +v -0.654958 -0.299291 0.442244 +v -0.653634 -0.306196 0.443981 +v -0.65233 -0.313124 0.445761 +v -0.65093 -0.320037 0.447535 +v -0.649585 -0.326946 0.449381 +v -0.648084 -0.333825 0.451216 +v -0.646584 -0.340712 0.453125 +v -0.645069 -0.347595 0.455033 +v -0.643519 -0.354496 0.457 +v -0.641898 -0.361401 0.458953 +v -0.640278 -0.36836 0.46098 +v -0.638538 -0.375295 0.462993 +v -0.636758 -0.382269 0.465073 +v -0.63492 -0.389235 0.46714 +v -0.63305 -0.396234 0.46927 +v -0.631012 -0.403184 0.471366 +v -0.629021 -0.410213 0.473511 +v -0.626862 -0.4172 0.47563 +v -0.6247 -0.424248 0.47782 +v -0.622386 -0.431258 0.479996 +v -0.620055 -0.438337 0.482233 +v -0.617545 -0.445363 0.484439 +v -0.615043 -0.452468 0.48671 +v -0.612376 -0.459518 0.48895 +v -0.609616 -0.46667 0.491971 +v -0.606631 -0.473743 0.49496 +v -0.603606 -0.480909 0.497975 +v -0.600413 -0.488034 0.500948 +v -0.645023 0.477694 0.616723 +v -0.646802 0.470606 0.6117 +v -0.648597 0.463509 0.606681 +v -0.650288 0.456418 0.601872 +v -0.651959 0.449319 0.597054 +v -0.653566 0.442244 0.592427 +v -0.655143 0.435171 0.58779 +v -0.6567 0.428123 0.583345 +v -0.658248 0.421081 0.578899 +v -0.659748 0.414057 0.574597 +v -0.661244 0.407032 0.570296 +v -0.662741 0.400004 0.566127 +v -0.664203 0.39298 0.561946 +v -0.665724 0.385958 0.557901 +v -0.66722 0.378933 0.553847 +v -0.66871 0.371886 0.549899 +v -0.670233 0.364834 0.545955 +v -0.671726 0.357778 0.542081 +v -0.67322 0.350742 0.538213 +v -0.674727 0.343683 0.534417 +v -0.67625 0.33662 0.530632 +v -0.67778 0.329529 0.526904 +v -0.679266 0.322458 0.523163 +v -0.680788 0.31538 0.519485 +v -0.68226 0.308291 0.515816 +v -0.683765 0.30116 0.512214 +v -0.685253 0.294033 0.508603 +v -0.686752 0.28688 0.505047 +v -0.68824 0.279724 0.501486 +v -0.689739 0.272509 0.498006 +v -0.691214 0.26529 0.494519 +v -0.692661 0.258022 0.491107 +v -0.69412 0.250757 0.487697 +v -0.695526 0.243431 0.484391 +v -0.696928 0.236097 0.481083 +v -0.698276 0.228681 0.47788 +v -0.699643 0.221267 0.474677 +v -0.700949 0.213756 0.471621 +v -0.702241 0.206237 0.468564 +v -0.703448 0.198632 0.465715 +v -0.704644 0.191033 0.462869 +v -0.705719 0.183297 0.460146 +v -0.706755 0.175553 0.457413 +v -0.707686 0.16766 0.454801 +v -0.708601 0.159765 0.452187 +v -0.709348 0.151707 0.449712 +v -0.710039 0.143646 0.447221 +v -0.710558 0.135422 0.444834 +v -0.711034 0.127193 0.442444 +v -0.711321 0.118783 0.440209 +v -0.711573 0.110371 0.437977 +v -0.711601 0.101815 0.435903 +v -0.711541 0.0932628 0.43383 +v -0.711299 0.0845956 0.431938 +v -0.710974 0.0759296 0.430039 +v -0.710428 0.0671456 0.428349 +v -0.709826 0.0583688 0.426652 +v -0.709068 0.0495196 0.42527 +v -0.708202 0.0406872 0.423878 +v -0.707193 0.0318514 0.422812 +v -0.706141 0.0230111 0.421743 +v -0.704994 0.0141762 0.420983 +v -0.703743 0.00536871 0.420214 +v -0.702409 -0.0034092 0.419714 +v -0.701113 -0.0122 0.419229 +v -0.699692 -0.0209455 0.418979 +v -0.6983 -0.0297018 0.418743 +v -0.696843 -0.0383994 0.418713 +v -0.695366 -0.047072 0.418687 +v -0.693855 -0.0556703 0.418847 +v -0.692468 -0.064288 0.41901 +v -0.691048 -0.0728172 0.419326 +v -0.689603 -0.0813596 0.419649 +v -0.688143 -0.0897598 0.420114 +v -0.686799 -0.0981945 0.42058 +v -0.685396 -0.106467 0.421194 +v -0.684068 -0.114745 0.421816 +v -0.682794 -0.122884 0.422537 +v -0.681457 -0.131013 0.423266 +v -0.680093 -0.138984 0.424091 +v -0.678825 -0.146963 0.424924 +v -0.677599 -0.154793 0.425835 +v -0.676482 -0.162646 0.426775 +v -0.675266 -0.170332 0.427741 +v -0.674055 -0.178012 0.428703 +v -0.672876 -0.18557 0.42978 +v -0.671738 -0.193129 0.43087 +v -0.670532 -0.200543 0.432033 +v -0.669349 -0.207974 0.433205 +v -0.668211 -0.215298 0.434471 +v -0.667077 -0.222638 0.435743 +v -0.665947 -0.229852 0.4371 +v -0.664839 -0.237074 0.438453 +v -0.663681 -0.244191 0.439873 +v -0.662482 -0.251308 0.441292 +v -0.661296 -0.258367 0.442788 +v -0.660064 -0.265412 0.444284 +v -0.658871 -0.272407 0.445863 +v -0.657638 -0.279398 0.447424 +v -0.656445 -0.28635 0.44905 +v -0.655198 -0.293291 0.45067 +v -0.653912 -0.300199 0.452343 +v -0.652591 -0.307116 0.454002 +v -0.65127 -0.31403 0.45572 +v -0.649819 -0.320923 0.457423 +v -0.648378 -0.327801 0.45919 +v -0.646938 -0.334666 0.460966 +v -0.645491 -0.341542 0.462816 +v -0.643989 -0.348399 0.464657 +v -0.642425 -0.355272 0.466568 +v -0.64076 -0.362148 0.46846 +v -0.639129 -0.369056 0.470429 +v -0.637395 -0.375948 0.472393 +v -0.635643 -0.382875 0.474412 +v -0.633773 -0.389779 0.476421 +v -0.63187 -0.396711 0.478469 +v -0.6299 -0.40362 0.480532 +v -0.627926 -0.410591 0.48261 +v -0.625692 -0.417491 0.484644 +v -0.62354 -0.424473 0.486796 +v -0.621255 -0.431422 0.488931 +v -0.618937 -0.438425 0.491126 +v -0.616457 -0.445379 0.493289 +v -0.613956 -0.452391 0.495489 +v -0.611316 -0.45936 0.497677 +v -0.608584 -0.466418 0.500714 +v -0.60556 -0.473394 0.503701 +v -0.602505 -0.480443 0.506767 +v -0.599382 -0.487492 0.509818 +v -0.641222 0.471954 0.626573 +v -0.643046 0.465006 0.621649 +v -0.644878 0.458046 0.616725 +v -0.646604 0.451095 0.612006 +v -0.648335 0.444135 0.607286 +v -0.649971 0.437176 0.602766 +v -0.651605 0.430234 0.598241 +v -0.653216 0.423311 0.593896 +v -0.654781 0.416388 0.589537 +v -0.656339 0.409458 0.585362 +v -0.657889 0.402517 0.581177 +v -0.659408 0.395586 0.577107 +v -0.660929 0.388671 0.573034 +v -0.662464 0.381744 0.569102 +v -0.664007 0.374817 0.565175 +v -0.665534 0.367866 0.561326 +v -0.667079 0.360915 0.557479 +v -0.66862 0.353944 0.553744 +v -0.670168 0.346989 0.550013 +v -0.67172 0.340014 0.546323 +v -0.673285 0.333037 0.542637 +v -0.674823 0.326013 0.539034 +v -0.676366 0.318989 0.53543 +v -0.677914 0.311984 0.531857 +v -0.679425 0.304971 0.528296 +v -0.68094 0.297895 0.524826 +v -0.682449 0.290819 0.521353 +v -0.684009 0.283739 0.517906 +v -0.685533 0.27665 0.514452 +v -0.687046 0.269482 0.511107 +v -0.688557 0.262317 0.507761 +v -0.690077 0.255103 0.50445 +v -0.691567 0.247888 0.501132 +v -0.692987 0.240597 0.497946 +v -0.694394 0.233298 0.494755 +v -0.695798 0.225918 0.491648 +v -0.697193 0.218536 0.488538 +v -0.698523 0.211041 0.485597 +v -0.699845 0.203546 0.482657 +v -0.701097 0.19597 0.479847 +v -0.702343 0.18839 0.477031 +v -0.703504 0.180668 0.47438 +v -0.70459 0.172935 0.471714 +v -0.705559 0.16506 0.469148 +v -0.706514 0.157183 0.46658 +v -0.707317 0.149136 0.464149 +v -0.708074 0.141088 0.461715 +v -0.708645 0.132895 0.459383 +v -0.709194 0.124682 0.457021 +v -0.709513 0.116288 0.454816 +v -0.709776 0.107891 0.452604 +v -0.709827 0.0993732 0.450526 +v -0.7098 0.0908625 0.448452 +v -0.709554 0.0822273 0.446528 +v -0.709263 0.073595 0.444599 +v -0.708771 0.0648439 0.44279 +v -0.708215 0.0561036 0.440985 +v -0.707436 0.0473116 0.439545 +v -0.706551 0.0385621 0.438104 +v -0.705537 0.0297903 0.436983 +v -0.704477 0.0210228 0.435856 +v -0.703244 0.0122858 0.435035 +v -0.702017 0.00354839 0.434205 +v -0.700753 -0.00516392 0.433658 +v -0.699384 -0.013854 0.433104 +v -0.697947 -0.0224952 0.432829 +v -0.696538 -0.0311197 0.432564 +v -0.695102 -0.03971 0.432448 +v -0.693629 -0.04829 0.432323 +v -0.692135 -0.0568129 0.432368 +v -0.69074 -0.0653529 0.432422 +v -0.689259 -0.0738053 0.432618 +v -0.68783 -0.0822853 0.432813 +v -0.68641 -0.090655 0.433165 +v -0.68503 -0.0990208 0.433508 +v -0.68364 -0.107257 0.433987 +v -0.682331 -0.115496 0.434473 +v -0.681012 -0.123605 0.435078 +v -0.679698 -0.131707 0.435683 +v -0.678436 -0.139682 0.436388 +v -0.677176 -0.147647 0.437097 +v -0.675915 -0.155468 0.437894 +v -0.674747 -0.163304 0.438686 +v -0.673569 -0.170996 0.439515 +v -0.672393 -0.178695 0.44036 +v -0.671252 -0.186261 0.441325 +v -0.670103 -0.193823 0.442282 +v -0.668964 -0.201258 0.443349 +v -0.66784 -0.208697 0.444422 +v -0.666719 -0.216045 0.445594 +v -0.665612 -0.223387 0.446761 +v -0.66448 -0.230608 0.448024 +v -0.663338 -0.237831 0.449284 +v -0.662221 -0.244955 0.450618 +v -0.661051 -0.252085 0.451946 +v -0.659866 -0.259165 0.453344 +v -0.658653 -0.266251 0.454743 +v -0.657429 -0.273261 0.456219 +v -0.656234 -0.280263 0.457702 +v -0.655048 -0.287212 0.459256 +v -0.65376 -0.294141 0.460793 +v -0.652512 -0.301053 0.462399 +v -0.651148 -0.307957 0.463984 +v -0.649814 -0.314859 0.465629 +v -0.648432 -0.321751 0.46726 +v -0.647072 -0.328618 0.468983 +v -0.645604 -0.335469 0.470693 +v -0.644156 -0.342316 0.472487 +v -0.642618 -0.34915 0.474263 +v -0.641055 -0.355988 0.476111 +v -0.63946 -0.362845 0.477951 +v -0.637794 -0.369699 0.479859 +v -0.636026 -0.376535 0.481747 +v -0.634255 -0.383408 0.483708 +v -0.632389 -0.390259 0.485658 +v -0.630501 -0.397134 0.487671 +v -0.628501 -0.403983 0.489657 +v -0.626472 -0.410876 0.491654 +v -0.624311 -0.417736 0.493633 +v -0.622145 -0.424641 0.495736 +v -0.619842 -0.431503 0.497834 +v -0.61752 -0.438423 0.499974 +v -0.61505 -0.445295 0.502092 +v -0.612591 -0.452219 0.504237 +v -0.609975 -0.459097 0.506354 +v -0.607188 -0.466047 0.509378 +v -0.604235 -0.47294 0.51239 +v -0.601242 -0.479909 0.515515 +v -0.598089 -0.486835 0.518603 +v -0.637472 0.466271 0.636192 +v -0.639352 0.459459 0.631376 +v -0.641212 0.452636 0.626548 +v -0.643 0.445818 0.621958 +v -0.644753 0.439 0.617354 +v -0.64641 0.432179 0.612936 +v -0.648122 0.425377 0.608533 +v -0.649724 0.418571 0.604301 +v -0.65133 0.411753 0.600066 +v -0.652891 0.404925 0.59598 +v -0.654492 0.398096 0.591911 +v -0.656032 0.391257 0.587976 +v -0.6576 0.384434 0.584042 +v -0.659151 0.377613 0.58021 +v -0.660725 0.370788 0.576392 +v -0.662277 0.363927 0.572673 +v -0.663851 0.357064 0.568962 +v -0.665413 0.350182 0.565331 +v -0.666967 0.343304 0.561704 +v -0.66854 0.33641 0.558153 +v -0.670124 0.329515 0.554603 +v -0.671707 0.322575 0.551106 +v -0.673321 0.315636 0.547615 +v -0.674866 0.308669 0.544176 +v -0.676408 0.301727 0.540751 +v -0.677958 0.294729 0.537383 +v -0.679518 0.287732 0.534015 +v -0.681081 0.280703 0.530705 +v -0.682667 0.27367 0.527401 +v -0.684203 0.266566 0.524147 +v -0.685755 0.259469 0.520893 +v -0.687263 0.252297 0.517706 +v -0.688747 0.245122 0.514517 +v -0.690238 0.237874 0.511419 +v -0.69168 0.230618 0.50831 +v -0.693106 0.223269 0.505314 +v -0.694521 0.21591 0.502315 +v -0.695882 0.208457 0.499442 +v -0.697205 0.200997 0.496557 +v -0.698481 0.193433 0.493826 +v -0.699761 0.185872 0.491093 +v -0.700934 0.178168 0.488478 +v -0.702052 0.170459 0.485852 +v -0.7031 0.162599 0.48336 +v -0.704067 0.154733 0.48085 +v -0.704903 0.146714 0.478461 +v -0.7057 0.138719 0.476076 +v -0.706329 0.130548 0.473807 +v -0.706915 0.12238 0.471514 +v -0.707271 0.114018 0.469324 +v -0.707546 0.105658 0.46711 +v -0.707686 0.0971694 0.465046 +v -0.707722 0.0886834 0.462983 +v -0.707537 0.080073 0.461001 +v -0.707277 0.0714699 0.459013 +v -0.706892 0.0626984 0.456925 +v -0.706331 0.0539317 0.454803 +v -0.705586 0.0452319 0.453402 +v -0.70474 0.0365436 0.451994 +v -0.703703 0.0278355 0.450856 +v -0.702592 0.0191432 0.449703 +v -0.701463 0.0104507 0.448829 +v -0.70025 0.00177428 0.447938 +v -0.698953 -0.00687412 0.447304 +v -0.697587 -0.0155017 0.446664 +v -0.696202 -0.0240162 0.446453 +v -0.694719 -0.0325042 0.446236 +v -0.693238 -0.0410173 0.446029 +v -0.691819 -0.0495524 0.445827 +v -0.690351 -0.0580191 0.445766 +v -0.688902 -0.0664908 0.445701 +v -0.687495 -0.0749232 0.445786 +v -0.686101 -0.0833685 0.445861 +v -0.684668 -0.0916923 0.44609 +v -0.683315 -0.100039 0.446326 +v -0.681928 -0.108226 0.446695 +v -0.680563 -0.116416 0.447057 +v -0.67919 -0.124489 0.447545 +v -0.677968 -0.132581 0.448051 +v -0.676739 -0.140537 0.448645 +v -0.675515 -0.148499 0.449233 +v -0.674253 -0.156307 0.449906 +v -0.673121 -0.164137 0.450581 +v -0.671978 -0.171853 0.451242 +v -0.670835 -0.179561 0.451901 +v -0.669688 -0.187116 0.452774 +v -0.668531 -0.194653 0.453654 +v -0.667397 -0.202083 0.45463 +v -0.666292 -0.209526 0.45561 +v -0.665194 -0.216868 0.456678 +v -0.66407 -0.224209 0.457754 +v -0.662923 -0.231436 0.458919 +v -0.661877 -0.238673 0.460097 +v -0.660812 -0.245825 0.46135 +v -0.659651 -0.252967 0.46258 +v -0.658446 -0.260044 0.46389 +v -0.657216 -0.267118 0.465193 +v -0.656047 -0.274144 0.466584 +v -0.654858 -0.28118 0.467971 +v -0.653639 -0.288118 0.469432 +v -0.652377 -0.29505 0.470899 +v -0.651146 -0.301962 0.472447 +v -0.649835 -0.308868 0.473956 +v -0.648502 -0.315757 0.475532 +v -0.647101 -0.322618 0.477101 +v -0.645735 -0.329465 0.478754 +v -0.644232 -0.336286 0.480386 +v -0.642785 -0.343106 0.482108 +v -0.641217 -0.349896 0.483813 +v -0.639681 -0.356705 0.485597 +v -0.638068 -0.363511 0.487361 +v -0.636441 -0.370329 0.489214 +v -0.634658 -0.377114 0.491048 +v -0.632919 -0.383946 0.492962 +v -0.631023 -0.39074 0.494852 +v -0.629151 -0.397555 0.496817 +v -0.627123 -0.404334 0.498747 +v -0.625116 -0.411157 0.500723 +v -0.622935 -0.41794 0.502675 +v -0.620762 -0.424757 0.504738 +v -0.618457 -0.431542 0.506783 +v -0.616159 -0.43837 0.508888 +v -0.613707 -0.445151 0.51097 +v -0.611239 -0.451977 0.513083 +v -0.608651 -0.458772 0.515181 +v -0.605928 -0.465632 0.518209 +v -0.602999 -0.472428 0.521204 +v -0.599994 -0.479293 0.524286 +v -0.59683 -0.486098 0.527344 +v -0.633758 0.460573 0.645819 +v -0.63565 0.453895 0.641092 +v -0.637533 0.44722 0.636363 +v -0.639369 0.440542 0.6319 +v -0.641164 0.433863 0.627417 +v -0.642829 0.427183 0.623098 +v -0.644561 0.420513 0.618796 +v -0.646189 0.413811 0.614686 +v -0.647832 0.407105 0.610578 +v -0.649431 0.400391 0.6066 +v -0.651036 0.393676 0.602623 +v -0.652617 0.386941 0.598822 +v -0.654181 0.380207 0.595016 +v -0.655729 0.373475 0.591285 +v -0.657317 0.366756 0.587564 +v -0.658898 0.359984 0.583983 +v -0.660508 0.353209 0.580414 +v -0.662113 0.346414 0.576902 +v -0.663701 0.339623 0.573381 +v -0.665279 0.332809 0.569964 +v -0.666839 0.325994 0.566544 +v -0.66846 0.319136 0.563151 +v -0.670066 0.312279 0.559755 +v -0.671664 0.305357 0.556457 +v -0.673264 0.298471 0.553176 +v -0.674836 0.291552 0.549903 +v -0.676435 0.284633 0.546637 +v -0.678013 0.277651 0.543462 +v -0.679609 0.270671 0.540298 +v -0.681162 0.263634 0.53713 +v -0.682755 0.256599 0.533974 +v -0.684323 0.249474 0.530936 +v -0.685813 0.24234 0.527876 +v -0.687332 0.235136 0.524856 +v -0.68885 0.227933 0.521835 +v -0.690297 0.220602 0.518954 +v -0.691731 0.213275 0.516071 +v -0.693103 0.205857 0.513254 +v -0.694504 0.198444 0.510444 +v -0.695826 0.190895 0.507793 +v -0.697101 0.183345 0.505132 +v -0.6983 0.175662 0.50256 +v -0.699474 0.167977 0.499977 +v -0.700538 0.160131 0.497543 +v -0.701549 0.152297 0.49511 +v -0.702429 0.144321 0.492765 +v -0.703248 0.136343 0.490411 +v -0.703885 0.128198 0.488189 +v -0.7045 0.12005 0.485962 +v -0.704933 0.111746 0.483811 +v -0.705279 0.103422 0.481617 +v -0.705413 0.0949636 0.479554 +v -0.705445 0.0865109 0.477477 +v -0.705355 0.077944 0.47546 +v -0.705115 0.0693996 0.473422 +v -0.704785 0.0606048 0.471039 +v -0.704313 0.0517981 0.468621 +v -0.703615 0.0431669 0.467256 +v -0.702723 0.0345608 0.465864 +v -0.701718 0.0259114 0.464714 +v -0.700635 0.0172744 0.463558 +v -0.699478 0.00864981 0.462611 +v -0.698269 3.33057e-05 0.461656 +v -0.69697 -0.008535 0.460934 +v -0.695606 -0.0170899 0.460202 +v -0.694193 -0.0254898 0.460041 +v -0.692794 -0.033894 0.45989 +v -0.691419 -0.0423586 0.459611 +v -0.689935 -0.0508032 0.459319 +v -0.688435 -0.0592062 0.459146 +v -0.68701 -0.0676286 0.458971 +v -0.685546 -0.0760105 0.458938 +v -0.684181 -0.0844134 0.458909 +v -0.682803 -0.0927029 0.459018 +v -0.681402 -0.100982 0.459118 +v -0.680056 -0.109163 0.459365 +v -0.67871 -0.117342 0.459626 +v -0.67737 -0.125393 0.459997 +v -0.676096 -0.133452 0.460376 +v -0.674829 -0.141382 0.460863 +v -0.673576 -0.149311 0.461347 +v -0.67237 -0.157108 0.461894 +v -0.671233 -0.164914 0.46246 +v -0.670095 -0.172636 0.462933 +v -0.668962 -0.180355 0.463396 +v -0.667809 -0.187896 0.464187 +v -0.666753 -0.195448 0.464998 +v -0.665672 -0.202882 0.465889 +v -0.664563 -0.210323 0.466778 +v -0.663465 -0.217667 0.467755 +v -0.662377 -0.225012 0.468731 +v -0.661315 -0.232251 0.46981 +v -0.660196 -0.239492 0.470884 +v -0.659035 -0.246644 0.472029 +v -0.657935 -0.253802 0.473176 +v -0.656815 -0.260886 0.47441 +v -0.655656 -0.267968 0.475641 +v -0.654424 -0.274987 0.47694 +v -0.653223 -0.28201 0.47824 +v -0.652043 -0.28898 0.479618 +v -0.650761 -0.295932 0.480981 +v -0.649514 -0.30283 0.482449 +v -0.648179 -0.309731 0.483888 +v -0.646876 -0.316589 0.485397 +v -0.645482 -0.323433 0.486896 +v -0.644075 -0.330242 0.488471 +v -0.642631 -0.337043 0.490035 +v -0.641172 -0.343824 0.49168 +v -0.639653 -0.350598 0.493319 +v -0.638147 -0.357378 0.495051 +v -0.636475 -0.364138 0.496739 +v -0.634829 -0.370899 0.498536 +v -0.63304 -0.377638 0.500312 +v -0.631276 -0.384413 0.502169 +v -0.629401 -0.391159 0.504005 +v -0.627502 -0.397903 0.505912 +v -0.625488 -0.404621 0.507798 +v -0.623496 -0.411368 0.509763 +v -0.621283 -0.418064 0.511686 +v -0.619101 -0.4248 0.513692 +v -0.616854 -0.431514 0.515699 +v -0.614587 -0.438253 0.517767 +v -0.61214 -0.444934 0.519813 +v -0.609682 -0.451667 0.521911 +v -0.607134 -0.458375 0.523991 +v -0.604444 -0.465139 0.527002 +v -0.601472 -0.47182 0.529946 +v -0.598471 -0.478576 0.533005 +v -0.595366 -0.485284 0.536044 +v -0.630141 0.45492 0.65519 +v -0.632082 0.448393 0.6506 +v -0.633969 0.441863 0.645988 +v -0.635846 0.435328 0.64164 +v -0.637613 0.428797 0.637252 +v -0.639321 0.422251 0.63308 +v -0.641055 0.4157 0.628903 +v -0.642707 0.409118 0.624907 +v -0.644333 0.402535 0.620896 +v -0.645959 0.395937 0.61706 +v -0.647569 0.389341 0.613216 +v -0.649149 0.382715 0.609527 +v -0.650703 0.376093 0.605827 +v -0.652281 0.369439 0.602251 +v -0.653876 0.362801 0.59867 +v -0.65544 0.35613 0.595196 +v -0.657051 0.349452 0.591738 +v -0.658664 0.342742 0.588368 +v -0.66023 0.336037 0.584982 +v -0.661813 0.329303 0.581677 +v -0.663432 0.32257 0.578376 +v -0.66506 0.315792 0.575126 +v -0.6667 0.309004 0.57188 +v -0.668278 0.302159 0.568672 +v -0.669891 0.295327 0.565487 +v -0.671561 0.288482 0.562375 +v -0.673173 0.281625 0.559243 +v -0.674752 0.274715 0.556161 +v -0.67633 0.267802 0.553077 +v -0.67791 0.260822 0.550055 +v -0.679502 0.25384 0.547025 +v -0.681097 0.246773 0.544067 +v -0.682673 0.2397 0.541099 +v -0.684249 0.232544 0.538212 +v -0.685769 0.225379 0.535306 +v -0.687239 0.2181 0.532489 +v -0.68872 0.210818 0.529676 +v -0.690162 0.203427 0.526971 +v -0.691589 0.19604 0.524261 +v -0.692927 0.188518 0.521653 +v -0.694251 0.181007 0.519052 +v -0.69549 0.173343 0.516561 +v -0.696635 0.165676 0.514048 +v -0.697748 0.157881 0.511666 +v -0.69879 0.150078 0.509271 +v -0.699708 0.14212 0.50699 +v -0.700489 0.134156 0.504682 +v -0.701215 0.126039 0.502488 +v -0.701857 0.117922 0.50028 +v -0.702335 0.10965 0.498171 +v -0.702702 0.101366 0.496047 +v -0.702912 0.0929552 0.493996 +v -0.703011 0.0845546 0.491938 +v -0.70292 0.0760339 0.489946 +v -0.70274 0.0675175 0.48795 +v -0.702438 0.058826 0.48579 +v -0.701968 0.0501298 0.483603 +v -0.701286 0.0414973 0.482016 +v -0.700481 0.032878 0.480419 +v -0.699558 0.0242684 0.479217 +v -0.698464 0.0156858 0.477992 +v -0.6973 0.00713522 0.477002 +v -0.696121 -0.00140565 0.475998 +v -0.694889 -0.00993862 0.475196 +v -0.693587 -0.0184591 0.474386 +v -0.692194 -0.0268772 0.473955 +v -0.690768 -0.0352862 0.473513 +v -0.689319 -0.0437011 0.473094 +v -0.687877 -0.0521231 0.472671 +v -0.686441 -0.0604926 0.472401 +v -0.685047 -0.0688764 0.472126 +v -0.683604 -0.0772211 0.471966 +v -0.682214 -0.0855785 0.471803 +v -0.680764 -0.0938218 0.471803 +v -0.679349 -0.102075 0.471798 +v -0.677996 -0.110223 0.471945 +v -0.676785 -0.118398 0.472123 +v -0.675508 -0.126415 0.472404 +v -0.674194 -0.134445 0.472672 +v -0.672894 -0.142347 0.473036 +v -0.671705 -0.150251 0.473415 +v -0.670461 -0.158014 0.473865 +v -0.669279 -0.165813 0.474346 +v -0.668227 -0.173513 0.474856 +v -0.667163 -0.181223 0.47535 +v -0.666086 -0.188771 0.47602 +v -0.665004 -0.196321 0.47671 +v -0.663918 -0.203754 0.477502 +v -0.662849 -0.211208 0.478286 +v -0.661806 -0.218562 0.479148 +v -0.660718 -0.225908 0.480005 +v -0.659615 -0.233152 0.480977 +v -0.658423 -0.240383 0.481929 +v -0.657333 -0.24755 0.482967 +v -0.65622 -0.254707 0.484005 +v -0.655139 -0.261798 0.485157 +v -0.653945 -0.268875 0.486281 +v -0.652784 -0.275898 0.487482 +v -0.651616 -0.28292 0.488693 +v -0.650425 -0.289881 0.489992 +v -0.649196 -0.296829 0.491279 +v -0.64793 -0.303722 0.492641 +v -0.646589 -0.310599 0.493959 +v -0.645304 -0.317439 0.495388 +v -0.643916 -0.324263 0.496809 +v -0.642576 -0.331056 0.498301 +v -0.641127 -0.337833 0.499791 +v -0.63959 -0.344574 0.501348 +v -0.638018 -0.351305 0.502903 +v -0.636525 -0.358044 0.504562 +v -0.634858 -0.364758 0.506189 +v -0.63317 -0.371474 0.507922 +v -0.631373 -0.378158 0.50963 +v -0.629614 -0.384866 0.511428 +v -0.627733 -0.391542 0.513179 +v -0.625868 -0.398233 0.515041 +v -0.623833 -0.404876 0.516867 +v -0.621848 -0.41155 0.518777 +v -0.619698 -0.418176 0.520668 +v -0.617522 -0.424821 0.522636 +v -0.615312 -0.431449 0.52461 +v -0.613079 -0.438105 0.526659 +v -0.610664 -0.444708 0.528662 +v -0.608227 -0.451345 0.530736 +v -0.605651 -0.457938 0.532784 +v -0.602892 -0.46457 0.535736 +v -0.599945 -0.471142 0.538636 +v -0.597077 -0.477827 0.541701 +v -0.593929 -0.484408 0.544704 +v -0.626566 0.449268 0.664576 +v -0.628519 0.442884 0.660106 +v -0.630404 0.436516 0.655615 +v -0.632234 0.430129 0.65135 +v -0.634054 0.423742 0.647081 +v -0.635767 0.417314 0.643033 +v -0.637505 0.410882 0.638999 +v -0.63916 0.404426 0.635112 +v -0.640808 0.397966 0.63122 +v -0.642387 0.391484 0.627495 +v -0.644008 0.385001 0.623788 +v -0.645581 0.378484 0.620204 +v -0.647192 0.371972 0.616632 +v -0.648757 0.36541 0.613185 +v -0.650338 0.358852 0.609742 +v -0.651915 0.352263 0.606386 +v -0.65351 0.345685 0.603036 +v -0.655109 0.339062 0.599802 +v -0.656679 0.332445 0.596555 +v -0.658254 0.32579 0.593351 +v -0.659875 0.319136 0.590158 +v -0.661441 0.312425 0.587031 +v -0.663079 0.305716 0.583925 +v -0.664705 0.298947 0.58083 +v -0.666355 0.292181 0.577743 +v -0.668015 0.285387 0.574759 +v -0.66963 0.278599 0.571767 +v -0.671246 0.271757 0.568786 +v -0.672843 0.264913 0.565799 +v -0.674455 0.257987 0.562917 +v -0.676092 0.251061 0.560035 +v -0.677704 0.244055 0.557157 +v -0.679336 0.237046 0.554281 +v -0.680925 0.229927 0.551512 +v -0.682454 0.222802 0.548726 +v -0.684021 0.21559 0.545996 +v -0.685539 0.208357 0.543249 +v -0.686971 0.200991 0.540636 +v -0.688382 0.193623 0.538018 +v -0.689774 0.186139 0.53547 +v -0.691131 0.178657 0.532914 +v -0.692403 0.171036 0.530517 +v -0.693625 0.163414 0.528105 +v -0.69472 0.155638 0.525741 +v -0.695824 0.147861 0.523386 +v -0.696783 0.139921 0.521169 +v -0.697633 0.131977 0.518925 +v -0.698364 0.12389 0.516743 +v -0.699002 0.115799 0.514545 +v -0.699515 0.107551 0.512479 +v -0.699865 0.0993192 0.510386 +v -0.700103 0.0909691 0.508357 +v -0.70023 0.0826138 0.506299 +v -0.700233 0.0741232 0.504363 +v -0.700051 0.0656434 0.502393 +v -0.699771 0.057059 0.500471 +v -0.699287 0.0484832 0.498507 +v -0.698693 0.0398387 0.496716 +v -0.697931 0.0312184 0.4949 +v -0.69705 0.0226864 0.493657 +v -0.696043 0.0141961 0.49241 +v -0.694927 0.00568442 0.491363 +v -0.69378 -0.00281647 0.490312 +v -0.692582 -0.0112994 0.489431 +v -0.691241 -0.0197549 0.48852 +v -0.689918 -0.028208 0.487816 +v -0.688521 -0.0366413 0.487108 +v -0.687127 -0.045031 0.486558 +v -0.685752 -0.0534225 0.486008 +v -0.684353 -0.0617649 0.485618 +v -0.682883 -0.0701174 0.485216 +v -0.681423 -0.0784004 0.484948 +v -0.680021 -0.0867133 0.484684 +v -0.678671 -0.0949417 0.484582 +v -0.677315 -0.103169 0.484477 +v -0.675911 -0.111278 0.48452 +v -0.674554 -0.119381 0.484563 +v -0.67324 -0.127388 0.484729 +v -0.672005 -0.1354 0.484909 +v -0.670814 -0.143272 0.485181 +v -0.669631 -0.15115 0.485452 +v -0.668492 -0.158924 0.485837 +v -0.667305 -0.166715 0.486225 +v -0.666151 -0.174378 0.486712 +v -0.665069 -0.182046 0.487213 +v -0.663937 -0.189596 0.48779 +v -0.662894 -0.197149 0.488371 +v -0.661859 -0.204592 0.489044 +v -0.660783 -0.212045 0.489718 +v -0.659679 -0.219392 0.490466 +v -0.658636 -0.22676 0.491218 +v -0.657594 -0.234014 0.492084 +v -0.656491 -0.24126 0.492933 +v -0.65542 -0.248426 0.493876 +v -0.654324 -0.25558 0.494817 +v -0.653247 -0.262675 0.49587 +v -0.652079 -0.269758 0.496898 +v -0.650978 -0.276776 0.498022 +v -0.649764 -0.283778 0.499122 +v -0.64854 -0.290714 0.500314 +v -0.647315 -0.297647 0.501513 +v -0.64606 -0.304535 0.50279 +v -0.644716 -0.311397 0.504035 +v -0.643374 -0.318212 0.505351 +v -0.641983 -0.325024 0.506657 +v -0.640627 -0.331799 0.508069 +v -0.639183 -0.338546 0.509472 +v -0.637674 -0.345261 0.510959 +v -0.636159 -0.351973 0.512452 +v -0.634652 -0.358664 0.514041 +v -0.632966 -0.365331 0.51558 +v -0.631304 -0.371998 0.517255 +v -0.629551 -0.378648 0.518909 +v -0.627764 -0.385269 0.520629 +v -0.625875 -0.39188 0.522322 +v -0.624016 -0.398505 0.524127 +v -0.621998 -0.405092 0.525901 +v -0.620012 -0.411676 0.527774 +v -0.61784 -0.41821 0.529612 +v -0.615724 -0.424772 0.531551 +v -0.613513 -0.431308 0.533479 +v -0.611291 -0.437871 0.535486 +v -0.608845 -0.444371 0.537444 +v -0.606414 -0.450904 0.539489 +v -0.60388 -0.457406 0.54151 +v -0.60118 -0.463947 0.544425 +v -0.598254 -0.470409 0.547291 +v -0.595299 -0.476946 0.550306 +v -0.592195 -0.483428 0.55329 +v -0.623095 0.443723 0.673707 +v -0.625047 0.437494 0.669353 +v -0.626898 0.431271 0.664962 +v -0.628745 0.425023 0.660847 +v -0.63057 0.418767 0.65672 +v -0.632284 0.412455 0.652794 +v -0.633983 0.406154 0.648866 +v -0.635661 0.399815 0.645138 +v -0.637318 0.393481 0.641391 +v -0.638879 0.387117 0.637789 +v -0.64044 0.380745 0.634192 +v -0.642016 0.374338 0.630753 +v -0.643571 0.367925 0.627303 +v -0.645153 0.361457 0.623991 +v -0.646727 0.354982 0.620677 +v -0.648275 0.348466 0.617466 +v -0.649825 0.341968 0.614243 +v -0.651386 0.335439 0.611121 +v -0.652985 0.328913 0.608007 +v -0.654566 0.322353 0.60495 +v -0.656135 0.31579 0.601885 +v -0.65772 0.309163 0.598879 +v -0.659374 0.302537 0.595894 +v -0.660969 0.295847 0.592923 +v -0.662614 0.289166 0.589976 +v -0.664276 0.282431 0.58708 +v -0.665936 0.275719 0.584199 +v -0.66759 0.268947 0.581357 +v -0.669201 0.262171 0.578504 +v -0.670804 0.255315 0.575696 +v -0.672453 0.248464 0.572904 +v -0.674096 0.24151 0.570152 +v -0.675744 0.234559 0.5674 +v -0.677356 0.227488 0.564705 +v -0.678961 0.220426 0.56201 +v -0.680569 0.213255 0.559398 +v -0.68209 0.206072 0.556755 +v -0.683599 0.198759 0.554211 +v -0.685063 0.191437 0.551658 +v -0.686465 0.183982 0.549197 +v -0.687817 0.176564 0.546737 +v -0.689128 0.16899 0.54439 +v -0.690402 0.161394 0.542024 +v -0.691565 0.153628 0.539732 +v -0.692634 0.145862 0.537422 +v -0.693619 0.137951 0.535229 +v -0.694491 0.13003 0.533008 +v -0.695259 0.121961 0.530877 +v -0.695905 0.113896 0.528729 +v -0.696484 0.105686 0.526685 +v -0.696907 0.0974825 0.524614 +v -0.697235 0.089157 0.522637 +v -0.697331 0.0808368 0.520611 +v -0.697326 0.0723915 0.51868 +v -0.697179 0.0639462 0.516716 +v -0.696933 0.0554035 0.514856 +v -0.696479 0.0468657 0.512946 +v -0.69595 0.0382888 0.511223 +v -0.695278 0.0297713 0.509498 +v -0.694458 0.0212746 0.508174 +v -0.693478 0.0127957 0.506816 +v -0.692525 0.00430384 0.505685 +v -0.691449 -0.00418169 0.504534 +v -0.69028 -0.0126397 0.503547 +v -0.688992 -0.0210856 0.502544 +v -0.687654 -0.0295029 0.501761 +v -0.686304 -0.0379155 0.500946 +v -0.684902 -0.0462863 0.500304 +v -0.683494 -0.0546547 0.499652 +v -0.68214 -0.0629753 0.499174 +v -0.680669 -0.0712948 0.498661 +v -0.679239 -0.0795601 0.498291 +v -0.677807 -0.0878304 0.497913 +v -0.676424 -0.0960244 0.497701 +v -0.675075 -0.104225 0.497476 +v -0.673705 -0.11232 0.497396 +v -0.672438 -0.120431 0.497349 +v -0.671159 -0.128419 0.4974 +v -0.669949 -0.136397 0.497459 +v -0.668739 -0.144255 0.497613 +v -0.667583 -0.152127 0.497771 +v -0.666342 -0.159875 0.498025 +v -0.665229 -0.167662 0.49832 +v -0.664145 -0.175338 0.498685 +v -0.663044 -0.183008 0.499086 +v -0.661969 -0.190561 0.499555 +v -0.660899 -0.198115 0.500036 +v -0.659782 -0.205542 0.500585 +v -0.658749 -0.213011 0.501143 +v -0.657766 -0.220378 0.501796 +v -0.656717 -0.227733 0.502434 +v -0.655637 -0.23498 0.503182 +v -0.654583 -0.242227 0.503934 +v -0.653491 -0.249372 0.504771 +v -0.65244 -0.256527 0.505618 +v -0.651322 -0.263611 0.506555 +v -0.650188 -0.270696 0.507485 +v -0.649038 -0.277697 0.508498 +v -0.647912 -0.284696 0.509525 +v -0.646768 -0.29163 0.51064 +v -0.645482 -0.298548 0.511736 +v -0.644299 -0.305429 0.512955 +v -0.642979 -0.312268 0.514101 +v -0.641641 -0.319059 0.515329 +v -0.640273 -0.325848 0.516555 +v -0.638885 -0.332598 0.517865 +v -0.637326 -0.33931 0.519143 +v -0.635921 -0.346007 0.520558 +v -0.634412 -0.35268 0.521967 +v -0.632803 -0.359306 0.523467 +v -0.631134 -0.365929 0.524946 +v -0.629518 -0.372557 0.52656 +v -0.627733 -0.379145 0.528128 +v -0.625993 -0.385722 0.529796 +v -0.624076 -0.39226 0.531437 +v -0.622247 -0.398808 0.533195 +v -0.620229 -0.40532 0.534923 +v -0.618259 -0.411822 0.536751 +v -0.616136 -0.418292 0.538546 +v -0.614007 -0.424756 0.540434 +v -0.611758 -0.431186 0.542314 +v -0.609488 -0.43763 0.544275 +v -0.607065 -0.444032 0.546199 +v -0.604707 -0.450455 0.548219 +v -0.602234 -0.456858 0.550194 +v -0.599504 -0.463269 0.553068 +v -0.596552 -0.469605 0.555887 +v -0.59365 -0.476034 0.558872 +v -0.590564 -0.482393 0.561813 +v -0.619625 0.438186 0.682852 +v -0.621543 0.432107 0.678593 +v -0.623442 0.426024 0.674324 +v -0.625246 0.419912 0.670335 +v -0.627026 0.413789 0.666332 +v -0.628734 0.407605 0.662532 +v -0.630459 0.401424 0.65874 +v -0.632055 0.395208 0.655119 +v -0.633671 0.388992 0.651509 +v -0.635242 0.382742 0.648037 +v -0.636818 0.37649 0.644572 +v -0.638337 0.370179 0.641258 +v -0.63988 0.363858 0.637956 +v -0.641408 0.357481 0.634751 +v -0.642951 0.351104 0.631551 +v -0.644463 0.344675 0.628473 +v -0.646018 0.338245 0.625406 +v -0.647567 0.331801 0.622399 +v -0.64908 0.325362 0.619379 +v -0.650596 0.318889 0.616444 +v -0.652196 0.312417 0.613543 +v -0.653749 0.305877 0.610632 +v -0.655383 0.299328 0.607755 +v -0.657009 0.29272 0.604941 +v -0.658612 0.286106 0.602109 +v -0.66022 0.279441 0.599288 +v -0.661896 0.272804 0.59651 +v -0.663554 0.266104 0.593807 +v -0.665221 0.259399 0.5911 +v -0.666906 0.252617 0.588403 +v -0.668568 0.245832 0.585691 +v -0.670185 0.238946 0.583056 +v -0.671851 0.232039 0.580426 +v -0.673498 0.225024 0.577811 +v -0.675143 0.218013 0.575195 +v -0.676722 0.210879 0.572679 +v -0.678324 0.203748 0.570171 +v -0.67992 0.196496 0.567705 +v -0.681386 0.18923 0.5652 +v -0.682878 0.181849 0.562869 +v -0.684257 0.174462 0.560504 +v -0.685606 0.166918 0.558202 +v -0.686878 0.159348 0.555857 +v -0.688115 0.151607 0.553656 +v -0.689206 0.143856 0.551414 +v -0.690267 0.135974 0.54925 +v -0.691146 0.128072 0.547036 +v -0.691943 0.120032 0.544966 +v -0.692659 0.111997 0.542878 +v -0.69328 0.10382 0.540853 +v -0.693656 0.0956432 0.538776 +v -0.693955 0.0873468 0.536832 +v -0.694155 0.0790557 0.534861 +v -0.694254 0.0706583 0.532961 +v -0.69411 0.0622653 0.531005 +v -0.693884 0.0537638 0.529195 +v -0.693483 0.0453168 0.527373 +v -0.693001 0.0368391 0.525733 +v -0.692347 0.0283816 0.524063 +v -0.691604 0.0198958 0.522644 +v -0.690741 0.0114233 0.521207 +v -0.689812 0.00295785 0.519958 +v -0.688756 -0.00548938 0.51869 +v -0.687598 -0.0139313 0.517603 +v -0.686389 -0.0223692 0.516503 +v -0.685122 -0.0307671 0.515625 +v -0.683784 -0.03915 0.514733 +v -0.68241 -0.0475022 0.513994 +v -0.681027 -0.0558503 0.513262 +v -0.679643 -0.0641294 0.512667 +v -0.678236 -0.0724335 0.512049 +v -0.676814 -0.0806813 0.511572 +v -0.675425 -0.0889338 0.511103 +v -0.67403 -0.0970915 0.510769 +v -0.672672 -0.105262 0.51044 +v -0.671324 -0.113346 0.510253 +v -0.67001 -0.121412 0.510069 +v -0.668765 -0.129368 0.510007 +v -0.667518 -0.137332 0.509936 +v -0.666319 -0.145187 0.509976 +v -0.665151 -0.153053 0.510014 +v -0.663934 -0.160804 0.510156 +v -0.662896 -0.168595 0.510344 +v -0.661853 -0.176267 0.510624 +v -0.660741 -0.18393 0.510889 +v -0.659627 -0.191465 0.511246 +v -0.65862 -0.199022 0.511625 +v -0.65761 -0.20647 0.512096 +v -0.656555 -0.21393 0.512529 +v -0.655516 -0.221282 0.513064 +v -0.654484 -0.228639 0.513595 +v -0.653461 -0.235894 0.514246 +v -0.652399 -0.243144 0.514883 +v -0.651352 -0.250294 0.51562 +v -0.650292 -0.257444 0.516364 +v -0.649254 -0.26453 0.517215 +v -0.648109 -0.2716 0.518038 +v -0.646961 -0.278586 0.518961 +v -0.645803 -0.285578 0.519882 +v -0.644615 -0.292499 0.520901 +v -0.643401 -0.299415 0.521922 +v -0.642218 -0.306277 0.523041 +v -0.640839 -0.313092 0.524085 +v -0.639505 -0.319855 0.525227 +v -0.638121 -0.326609 0.526358 +v -0.636735 -0.333325 0.527593 +v -0.635277 -0.340023 0.528813 +v -0.633807 -0.346666 0.530136 +v -0.632217 -0.35329 0.531427 +v -0.630621 -0.359881 0.53283 +v -0.628999 -0.366471 0.534253 +v -0.627379 -0.373035 0.535787 +v -0.625596 -0.37957 0.537294 +v -0.623864 -0.386087 0.538919 +v -0.621946 -0.392563 0.540497 +v -0.62012 -0.399033 0.542198 +v -0.618113 -0.40545 0.543867 +v -0.61613 -0.411871 0.545642 +v -0.614009 -0.418261 0.547385 +v -0.611888 -0.424629 0.549233 +v -0.609646 -0.430959 0.551068 +v -0.607443 -0.437313 0.553007 +v -0.605071 -0.44362 0.554903 +v -0.602718 -0.449925 0.556866 +v -0.600238 -0.456195 0.55879 +v -0.59754 -0.462492 0.56164 +v -0.59465 -0.468728 0.564434 +v -0.591752 -0.475027 0.567381 +v -0.588661 -0.481247 0.57027 +v -0.616262 0.432721 0.691737 +v -0.618191 0.426791 0.687634 +v -0.620025 0.420859 0.683485 +v -0.621801 0.414891 0.679617 +v -0.623562 0.408897 0.675741 +v -0.625217 0.40285 0.672074 +v -0.626929 0.396805 0.668427 +v -0.628516 0.390704 0.664947 +v -0.630081 0.384604 0.661462 +v -0.631626 0.378458 0.658142 +v -0.633154 0.372304 0.654819 +v -0.634685 0.366091 0.651666 +v -0.636208 0.359879 0.648497 +v -0.637657 0.353599 0.645417 +v -0.639166 0.347328 0.642365 +v -0.640619 0.340993 0.639403 +v -0.64217 0.334659 0.636477 +v -0.643618 0.328278 0.633593 +v -0.645116 0.321922 0.630722 +v -0.646603 0.315538 0.627918 +v -0.648149 0.309144 0.62512 +v -0.649685 0.302692 0.622353 +v -0.651273 0.296231 0.619601 +v -0.652903 0.289701 0.616895 +v -0.654559 0.283162 0.614187 +v -0.656151 0.276588 0.611497 +v -0.657763 0.270027 0.608826 +v -0.65938 0.263406 0.606196 +v -0.661071 0.25679 0.603592 +v -0.662791 0.250087 0.601022 +v -0.664493 0.243379 0.598444 +v -0.666149 0.236574 0.595899 +v -0.66782 0.229761 0.593347 +v -0.669503 0.222802 0.590848 +v -0.671192 0.215854 0.588346 +v -0.672842 0.208774 0.585911 +v -0.67447 0.201695 0.583473 +v -0.676055 0.194496 0.581108 +v -0.677555 0.187295 0.578716 +v -0.679097 0.179954 0.576445 +v -0.680537 0.172598 0.574141 +v -0.681962 0.165093 0.571932 +v -0.683255 0.157537 0.569662 +v -0.684455 0.149826 0.567474 +v -0.685592 0.142107 0.565273 +v -0.686751 0.134241 0.563183 +v -0.687656 0.126374 0.561036 +v -0.688524 0.118355 0.558993 +v -0.689231 0.110337 0.55691 +v -0.689869 0.102181 0.554928 +v -0.690328 0.0940213 0.552897 +v -0.690703 0.0857471 0.550972 +v -0.690908 0.0774766 0.549005 +v -0.691034 0.0691001 0.547141 +v -0.690886 0.0607574 0.545228 +v -0.690785 0.0523331 0.543468 +v -0.690453 0.043925 0.541662 +v -0.690064 0.035462 0.540042 +v -0.689413 0.0270329 0.538354 +v -0.688765 0.0185616 0.536892 +v -0.687975 0.0101012 0.535395 +v -0.687022 0.00165341 0.534079 +v -0.68606 -0.00678996 0.532762 +v -0.684999 -0.0152287 0.53157 +v -0.683803 -0.0236593 0.530352 +v -0.682549 -0.0320392 0.529403 +v -0.681242 -0.0404038 0.528437 +v -0.679941 -0.0487419 0.52763 +v -0.678561 -0.0570664 0.526793 +v -0.677176 -0.0653412 0.526084 +v -0.675771 -0.0736332 0.525371 +v -0.674368 -0.0818544 0.524792 +v -0.672973 -0.0900856 0.524209 +v -0.671548 -0.0982215 0.52376 +v -0.670246 -0.106381 0.523337 +v -0.668947 -0.114425 0.523042 +v -0.667684 -0.122472 0.522763 +v -0.66641 -0.130423 0.52258 +v -0.665241 -0.138387 0.522404 +v -0.664038 -0.146235 0.522325 +v -0.662902 -0.154093 0.522269 +v -0.661708 -0.161847 0.522311 +v -0.660608 -0.169619 0.522368 +v -0.659556 -0.177283 0.522535 +v -0.658482 -0.18494 0.52269 +v -0.657489 -0.192494 0.522958 +v -0.656431 -0.200042 0.523223 +v -0.655382 -0.20749 0.52358 +v -0.654353 -0.214947 0.523911 +v -0.653311 -0.222299 0.524325 +v -0.65232 -0.229652 0.524747 +v -0.65129 -0.236896 0.525287 +v -0.650276 -0.244146 0.525826 +v -0.649273 -0.251301 0.526466 +v -0.648204 -0.258448 0.527104 +v -0.647114 -0.265514 0.527832 +v -0.646004 -0.272585 0.528557 +v -0.644942 -0.279575 0.529404 +v -0.643779 -0.286553 0.530235 +v -0.642612 -0.293466 0.531167 +v -0.64139 -0.300363 0.532081 +v -0.640202 -0.307204 0.533101 +v -0.638839 -0.313999 0.534054 +v -0.63754 -0.320733 0.535113 +v -0.636137 -0.327457 0.536156 +v -0.634748 -0.334133 0.537305 +v -0.633292 -0.340793 0.538433 +v -0.631783 -0.347391 0.539673 +v -0.630233 -0.353989 0.540886 +v -0.628651 -0.360526 0.542204 +v -0.626955 -0.367057 0.543499 +v -0.625276 -0.373559 0.544978 +v -0.623493 -0.380036 0.546418 +v -0.621769 -0.386487 0.54798 +v -0.619948 -0.392909 0.549514 +v -0.618115 -0.3993 0.55117 +v -0.616107 -0.405646 0.552773 +v -0.614091 -0.411967 0.554496 +v -0.611978 -0.418257 0.556183 +v -0.609904 -0.424541 0.558008 +v -0.607669 -0.430775 0.559796 +v -0.605435 -0.437001 0.561691 +v -0.60307 -0.443191 0.563543 +v -0.600792 -0.449401 0.565496 +v -0.598314 -0.455556 0.567388 +v -0.595595 -0.46171 0.570183 +v -0.592739 -0.46782 0.572939 +v -0.589878 -0.473968 0.57584 +v -0.586822 -0.480049 0.578685 +v -0.61298 0.427266 0.700649 +v -0.614883 0.42148 0.696679 +v -0.616725 0.415701 0.692684 +v -0.618477 0.409858 0.68895 +v -0.620191 0.404 0.685198 +v -0.621799 0.398086 0.681656 +v -0.623436 0.39217 0.67813 +v -0.625012 0.386183 0.674795 +v -0.626544 0.380179 0.671441 +v -0.628016 0.374119 0.668255 +v -0.629489 0.368096 0.665078 +v -0.630966 0.361994 0.662039 +v -0.632418 0.355891 0.658994 +v -0.633833 0.349714 0.656062 +v -0.635292 0.343539 0.653141 +v -0.636688 0.337304 0.650295 +v -0.638158 0.331067 0.647482 +v -0.639573 0.324768 0.644744 +v -0.64101 0.318491 0.642012 +v -0.642427 0.312185 0.639305 +v -0.643936 0.305878 0.636633 +v -0.64544 0.299505 0.633999 +v -0.64694 0.293117 0.631363 +v -0.648497 0.286654 0.628733 +v -0.650139 0.280199 0.626132 +v -0.651709 0.273708 0.623567 +v -0.653305 0.26723 0.621018 +v -0.654935 0.260686 0.618485 +v -0.656596 0.25415 0.615959 +v -0.65829 0.24752 0.613498 +v -0.660003 0.240887 0.611042 +v -0.661675 0.234154 0.608579 +v -0.663376 0.227424 0.606125 +v -0.665112 0.220543 0.603747 +v -0.666789 0.213649 0.601352 +v -0.668467 0.206633 0.59899 +v -0.670146 0.199635 0.596639 +v -0.671775 0.19248 0.594387 +v -0.673371 0.185327 0.592122 +v -0.674938 0.178017 0.589907 +v -0.676402 0.170705 0.587654 +v -0.677867 0.163228 0.585527 +v -0.679154 0.155714 0.583331 +v -0.68046 0.148039 0.581204 +v -0.681655 0.14036 0.579045 +v -0.682742 0.132521 0.576997 +v -0.683756 0.124685 0.574939 +v -0.684719 0.116696 0.572936 +v -0.68543 0.108696 0.570861 +v -0.686153 0.100564 0.568934 +v -0.686652 0.0924341 0.566952 +v -0.687093 0.0841772 0.565047 +v -0.687336 0.0759346 0.563087 +v -0.687524 0.0676333 0.561291 +v -0.687521 0.059345 0.559455 +v -0.687404 0.0509817 0.557708 +v -0.687088 0.0426101 0.555901 +v -0.686753 0.0341705 0.554281 +v -0.686196 0.0257604 0.552613 +v -0.68554 0.017318 0.551077 +v -0.684834 0.00887931 0.549539 +v -0.684079 0.000415337 0.548182 +v -0.683074 -0.00800036 0.546785 +v -0.682051 -0.0164466 0.545487 +v -0.680982 -0.0248732 0.54418 +v -0.679815 -0.0332339 0.543151 +v -0.678563 -0.0415824 0.542119 +v -0.677255 -0.0498925 0.541209 +v -0.675911 -0.0581968 0.540298 +v -0.674588 -0.0664785 0.5395 +v -0.673174 -0.0747559 0.53867 +v -0.671784 -0.0829672 0.537986 +v -0.670372 -0.0911686 0.537296 +v -0.669011 -0.0993008 0.536741 +v -0.667741 -0.107436 0.536182 +v -0.666476 -0.115479 0.535798 +v -0.665182 -0.123512 0.535409 +v -0.663885 -0.131451 0.535117 +v -0.6627 -0.13941 0.534835 +v -0.661458 -0.147241 0.534642 +v -0.660378 -0.155097 0.53447 +v -0.659266 -0.162863 0.534414 +v -0.658165 -0.170631 0.534356 +v -0.657072 -0.178283 0.534396 +v -0.656033 -0.185942 0.534458 +v -0.654966 -0.193481 0.534618 +v -0.653952 -0.201028 0.53478 +v -0.652951 -0.208482 0.53502 +v -0.651951 -0.215948 0.535239 +v -0.650963 -0.223297 0.535548 +v -0.649934 -0.230636 0.535852 +v -0.648915 -0.237877 0.536284 +v -0.647888 -0.245117 0.536717 +v -0.646825 -0.252261 0.537235 +v -0.645801 -0.259417 0.537772 +v -0.644787 -0.266483 0.538422 +v -0.643651 -0.273546 0.539051 +v -0.642516 -0.280515 0.539783 +v -0.641401 -0.287485 0.540508 +v -0.64026 -0.294378 0.541341 +v -0.639026 -0.301266 0.542157 +v -0.637761 -0.308061 0.543068 +v -0.636484 -0.314848 0.543938 +v -0.635186 -0.321555 0.544911 +v -0.633797 -0.328248 0.545851 +v -0.632447 -0.334888 0.546905 +v -0.630924 -0.341497 0.547922 +v -0.629468 -0.348068 0.549086 +v -0.627898 -0.354619 0.550217 +v -0.626275 -0.361094 0.55143 +v -0.624648 -0.367582 0.552664 +v -0.622967 -0.374021 0.554071 +v -0.621262 -0.380458 0.55547 +v -0.619508 -0.386838 0.55698 +v -0.61764 -0.39319 0.55846 +v -0.615788 -0.399497 0.560054 +v -0.613792 -0.405764 0.561611 +v -0.611818 -0.411994 0.563286 +v -0.609722 -0.418191 0.564927 +v -0.607616 -0.424365 0.566694 +v -0.605394 -0.430507 0.568446 +v -0.603217 -0.436629 0.570311 +v -0.600898 -0.442709 0.572129 +v -0.598599 -0.448789 0.574047 +v -0.59612 -0.454819 0.575914 +v -0.593432 -0.460844 0.578666 +v -0.590607 -0.466817 0.581377 +v -0.587824 -0.472843 0.584251 +v -0.584744 -0.478768 0.587029 +v -0.609831 0.421934 0.709313 +v -0.611641 0.416277 0.705467 +v -0.613462 0.410638 0.701615 +v -0.615192 0.404926 0.698039 +v -0.616862 0.399211 0.694436 +v -0.618448 0.393423 0.691052 +v -0.620048 0.387613 0.687671 +v -0.621537 0.381727 0.684476 +v -0.622995 0.375845 0.681271 +v -0.624444 0.369904 0.678249 +v -0.625821 0.363981 0.675204 +v -0.627229 0.358001 0.67232 +v -0.628611 0.352002 0.669416 +v -0.62996 0.345928 0.666623 +v -0.631351 0.33985 0.663841 +v -0.6327 0.333706 0.661148 +v -0.634105 0.327567 0.658472 +v -0.635472 0.321385 0.655873 +v -0.636853 0.315197 0.653278 +v -0.638213 0.30897 0.65071 +v -0.639662 0.302754 0.648175 +v -0.641063 0.296452 0.645639 +v -0.642543 0.290135 0.643125 +v -0.644039 0.283749 0.640617 +v -0.645638 0.277369 0.638141 +v -0.647205 0.270985 0.635683 +v -0.648809 0.264606 0.633242 +v -0.650421 0.258131 0.630792 +v -0.652023 0.251703 0.628361 +v -0.653669 0.245143 0.625959 +v -0.65536 0.238591 0.623578 +v -0.657114 0.23194 0.621237 +v -0.658855 0.225287 0.61889 +v -0.66062 0.218494 0.616602 +v -0.662287 0.211682 0.614273 +v -0.66402 0.204753 0.612024 +v -0.665707 0.197811 0.609763 +v -0.667368 0.190711 0.607579 +v -0.669048 0.183616 0.605397 +v -0.670654 0.176352 0.603259 +v -0.672125 0.169074 0.601083 +v -0.673663 0.161624 0.599019 +v -0.675075 0.154144 0.596902 +v -0.676393 0.146501 0.594849 +v -0.677617 0.138855 0.592759 +v -0.678761 0.131041 0.590754 +v -0.679788 0.123229 0.58872 +v -0.680834 0.115255 0.586791 +v -0.681659 0.10727 0.584791 +v -0.682405 0.0991584 0.582884 +v -0.682971 0.0910407 0.580921 +v -0.68346 0.0828194 0.579062 +v -0.683731 0.0746316 0.577164 +v -0.683955 0.0663527 0.57537 +v -0.68398 0.0580828 0.573532 +v -0.684042 0.049723 0.57184 +v -0.683832 0.0413913 0.570085 +v -0.683499 0.0329676 0.568426 +v -0.683036 0.0245649 0.566748 +v -0.682518 0.0161105 0.565202 +v -0.681765 0.00770012 0.563614 +v -0.681068 -0.000755874 0.562198 +v -0.680124 -0.00917608 0.560727 +v -0.679206 -0.0176252 0.559418 +v -0.678187 -0.0260358 0.558071 +v -0.677037 -0.0343867 0.556928 +v -0.675805 -0.0427302 0.555767 +v -0.674594 -0.0510544 0.554753 +v -0.673271 -0.0593641 0.553718 +v -0.671885 -0.067627 0.552818 +v -0.6705 -0.0758974 0.551907 +v -0.669151 -0.0841203 0.551121 +v -0.667833 -0.0923182 0.55031 +v -0.666558 -0.100452 0.549645 +v -0.665266 -0.108578 0.54899 +v -0.663969 -0.11661 0.548509 +v -0.66272 -0.124654 0.548026 +v -0.661467 -0.132594 0.547637 +v -0.660262 -0.140543 0.547245 +v -0.659033 -0.148371 0.546946 +v -0.657952 -0.156226 0.546667 +v -0.656803 -0.163983 0.546494 +v -0.655722 -0.171746 0.546325 +v -0.65463 -0.179401 0.54626 +v -0.653611 -0.187069 0.546209 +v -0.652553 -0.194605 0.546265 +v -0.651525 -0.202142 0.546316 +v -0.650494 -0.209588 0.546434 +v -0.649526 -0.217041 0.546547 +v -0.648498 -0.224387 0.54674 +v -0.647502 -0.231722 0.546948 +v -0.646514 -0.238958 0.547272 +v -0.64552 -0.246192 0.547595 +v -0.644559 -0.253336 0.548027 +v -0.643491 -0.260478 0.548443 +v -0.642442 -0.267538 0.548984 +v -0.641336 -0.274595 0.549518 +v -0.640291 -0.281569 0.550154 +v -0.639155 -0.288526 0.55077 +v -0.638004 -0.295394 0.551479 +v -0.6368 -0.302259 0.552184 +v -0.635601 -0.30904 0.553002 +v -0.634296 -0.315788 0.553757 +v -0.633006 -0.322466 0.554626 +v -0.631593 -0.329124 0.555464 +v -0.630157 -0.335717 0.556422 +v -0.628653 -0.342288 0.557361 +v -0.627204 -0.348815 0.558439 +v -0.625664 -0.355325 0.55947 +v -0.624036 -0.361762 0.560615 +v -0.622394 -0.368189 0.561769 +v -0.620738 -0.374572 0.563108 +v -0.619023 -0.380929 0.564438 +v -0.617263 -0.387241 0.565882 +v -0.615396 -0.393525 0.567299 +v -0.613561 -0.399758 0.568851 +v -0.611556 -0.405948 0.570351 +v -0.609602 -0.412094 0.571993 +v -0.60749 -0.41819 0.573582 +v -0.605384 -0.424248 0.575316 +v -0.603184 -0.430287 0.577029 +v -0.60102 -0.436285 0.578846 +v -0.59876 -0.442234 0.580634 +v -0.596471 -0.448185 0.582534 +v -0.594022 -0.454079 0.584387 +v -0.591296 -0.459937 0.587091 +v -0.588497 -0.465778 0.589778 +v -0.585698 -0.47163 0.592584 +v -0.582709 -0.477433 0.595333 +v -0.606699 0.416602 0.717973 +v -0.608502 0.411084 0.714284 +v -0.61027 0.405572 0.71057 +v -0.611922 0.399994 0.707131 +v -0.613559 0.394398 0.703681 +v -0.615106 0.388714 0.700444 +v -0.616621 0.38303 0.697191 +v -0.618085 0.377274 0.694162 +v -0.619468 0.371519 0.691105 +v -0.620854 0.365692 0.68823 +v -0.622182 0.359878 0.685334 +v -0.623497 0.353991 0.682588 +v -0.624805 0.348105 0.679836 +v -0.62606 0.342138 0.677167 +v -0.62733 0.336158 0.674501 +v -0.628633 0.330108 0.671959 +v -0.629907 0.324083 0.669403 +v -0.63112 0.317999 0.6669 +v -0.632445 0.311903 0.66444 +v -0.633772 0.305744 0.662025 +v -0.635101 0.299614 0.659606 +v -0.636445 0.29338 0.657179 +v -0.637867 0.28714 0.654777 +v -0.639341 0.280828 0.652395 +v -0.64081 0.27452 0.65001 +v -0.642325 0.268223 0.647631 +v -0.643914 0.261912 0.64527 +v -0.645503 0.255539 0.642932 +v -0.647097 0.249213 0.640617 +v -0.648737 0.242747 0.638294 +v -0.650432 0.236275 0.635992 +v -0.65212 0.229687 0.63372 +v -0.653864 0.223101 0.631472 +v -0.655573 0.216391 0.629232 +v -0.65736 0.209684 0.62702 +v -0.659097 0.202806 0.624864 +v -0.660845 0.195931 0.622719 +v -0.662533 0.188885 0.620599 +v -0.664225 0.181842 0.618479 +v -0.665893 0.174631 0.616444 +v -0.667468 0.167402 0.614373 +v -0.669 0.159987 0.612354 +v -0.670449 0.152542 0.610297 +v -0.671857 0.144933 0.608336 +v -0.673141 0.137316 0.606338 +v -0.674426 0.129542 0.604402 +v -0.675518 0.121753 0.602408 +v -0.676531 0.113794 0.600516 +v -0.677377 0.105835 0.59857 +v -0.67821 0.0977512 0.596699 +v -0.678858 0.0897022 0.594792 +v -0.679487 0.0815212 0.593015 +v -0.679814 0.0733502 0.591156 +v -0.680203 0.0650902 0.589403 +v -0.680268 0.0568354 0.587564 +v -0.680302 0.0484971 0.585875 +v -0.680152 0.0401776 0.584132 +v -0.67996 0.0317997 0.582499 +v -0.679551 0.0234086 0.580798 +v -0.679119 0.014963 0.579238 +v -0.678465 0.00653485 0.577628 +v -0.67777 -0.00190114 0.57615 +v -0.676898 -0.0103141 0.57463 +v -0.676062 -0.0187541 0.57328 +v -0.675019 -0.0271437 0.571892 +v -0.674 -0.0355117 0.570653 +v -0.672864 -0.0438616 0.569393 +v -0.671775 -0.052194 0.568262 +v -0.670465 -0.0605046 0.567091 +v -0.669202 -0.068786 0.566128 +v -0.66784 -0.0770409 0.565133 +v -0.666534 -0.0852437 0.564222 +v -0.665266 -0.0934561 0.563314 +v -0.663986 -0.101574 0.562548 +v -0.662693 -0.109696 0.561791 +v -0.661359 -0.117724 0.561193 +v -0.660118 -0.125767 0.560609 +v -0.658873 -0.133702 0.560109 +v -0.657672 -0.141643 0.559624 +v -0.656458 -0.14948 0.55922 +v -0.65533 -0.157332 0.558829 +v -0.654181 -0.165081 0.558534 +v -0.653044 -0.172841 0.558264 +v -0.651928 -0.180491 0.558087 +v -0.650901 -0.188155 0.557926 +v -0.649875 -0.195693 0.557863 +v -0.648861 -0.203232 0.557803 +v -0.647785 -0.210662 0.557791 +v -0.646841 -0.218117 0.55779 +v -0.645812 -0.225447 0.55787 +v -0.644843 -0.232787 0.557963 +v -0.643841 -0.240012 0.558168 +v -0.64286 -0.247237 0.558379 +v -0.641885 -0.254361 0.558693 +v -0.640834 -0.261498 0.559009 +v -0.639783 -0.268554 0.559436 +v -0.638711 -0.275607 0.559853 +v -0.637605 -0.282556 0.560362 +v -0.636487 -0.289498 0.560868 +v -0.635363 -0.296349 0.561486 +v -0.634172 -0.303189 0.562088 +v -0.632987 -0.309945 0.562816 +v -0.631671 -0.316664 0.563463 +v -0.630348 -0.323298 0.56423 +v -0.628991 -0.329934 0.564987 +v -0.627636 -0.3365 0.565879 +v -0.62619 -0.343054 0.566752 +v -0.624703 -0.349526 0.567707 +v -0.623111 -0.355983 0.568637 +v -0.621546 -0.362373 0.569725 +v -0.619861 -0.368743 0.570799 +v -0.618204 -0.375049 0.572072 +v -0.616456 -0.381339 0.57332 +v -0.614733 -0.387583 0.574704 +v -0.612873 -0.393795 0.576039 +v -0.610984 -0.399932 0.577518 +v -0.609034 -0.406053 0.57899 +v -0.607087 -0.412095 0.580583 +v -0.604986 -0.418099 0.582137 +v -0.602924 -0.424063 0.583848 +v -0.600721 -0.429984 0.585515 +v -0.598582 -0.435841 0.587304 +v -0.596278 -0.441649 0.589039 +v -0.594008 -0.447464 0.590915 +v -0.591572 -0.453226 0.592738 +v -0.588927 -0.458964 0.595438 +v -0.586159 -0.464659 0.598103 +v -0.583348 -0.470348 0.600851 +v -0.580449 -0.476006 0.603564 +v -0.603781 0.41133 0.726386 +v -0.605514 0.405982 0.722851 +v -0.607193 0.400597 0.719291 +v -0.608761 0.395132 0.716 +v -0.610343 0.389666 0.712726 +v -0.611835 0.384109 0.709668 +v -0.613264 0.37855 0.706579 +v -0.614646 0.372913 0.703707 +v -0.61595 0.367271 0.700811 +v -0.617251 0.361559 0.698096 +v -0.618524 0.355857 0.695373 +v -0.619754 0.350078 0.692769 +v -0.620994 0.344296 0.690161 +v -0.62214 0.338439 0.687638 +v -0.623357 0.33258 0.685135 +v -0.624499 0.326645 0.682709 +v -0.625675 0.320708 0.680286 +v -0.626795 0.314718 0.677923 +v -0.628023 0.308719 0.675602 +v -0.629259 0.302643 0.673305 +v -0.630503 0.296583 0.671018 +v -0.631799 0.290425 0.668738 +v -0.633122 0.284256 0.666468 +v -0.634497 0.278011 0.664171 +v -0.635938 0.271789 0.661899 +v -0.637364 0.265595 0.659591 +v -0.638859 0.259405 0.657318 +v -0.64044 0.25314 0.655062 +v -0.642049 0.246907 0.652836 +v -0.643696 0.240536 0.650593 +v -0.645381 0.234163 0.648368 +v -0.647131 0.227676 0.646184 +v -0.648865 0.221189 0.643999 +v -0.650556 0.214546 0.641832 +v -0.652319 0.20791 0.639693 +v -0.654094 0.201108 0.637625 +v -0.655864 0.194278 0.635541 +v -0.657592 0.187286 0.633507 +v -0.659309 0.180304 0.63147 +v -0.661039 0.173125 0.629517 +v -0.662621 0.165936 0.627503 +v -0.664267 0.158573 0.625596 +v -0.665789 0.151198 0.623646 +v -0.667251 0.143616 0.621742 +v -0.668604 0.136025 0.6198 +v -0.670054 0.128276 0.617974 +v -0.671112 0.120492 0.616013 +v -0.672167 0.112558 0.614156 +v -0.673113 0.104631 0.612272 +v -0.674066 0.0966159 0.610507 +v -0.674807 0.0885732 0.608661 +v -0.675508 0.0804105 0.606907 +v -0.675943 0.0722539 0.60507 +v -0.676349 0.0639828 0.603336 +v -0.676513 0.0557262 0.601531 +v -0.676746 0.047386 0.599868 +v -0.676614 0.0390661 0.598114 +v -0.676516 0.0306824 0.596492 +v -0.676125 0.0223182 0.594788 +v -0.675804 0.0138603 0.59321 +v -0.675139 0.00543674 0.591543 +v -0.674564 -0.00300793 0.590043 +v -0.673757 -0.0114288 0.588484 +v -0.672892 -0.0198496 0.587059 +v -0.6719 -0.0282393 0.585593 +v -0.671069 -0.0366385 0.58431 +v -0.669954 -0.0449973 0.58296 +v -0.668862 -0.0533243 0.581739 +v -0.667671 -0.0616452 0.580495 +v -0.666497 -0.0699353 0.579403 +v -0.665236 -0.0782137 0.578287 +v -0.664019 -0.0864461 0.577284 +v -0.662748 -0.0946561 0.576275 +v -0.661498 -0.102787 0.575429 +v -0.660154 -0.1109 0.574569 +v -0.658831 -0.11893 0.573866 +v -0.657583 -0.126965 0.573179 +v -0.656288 -0.134888 0.572572 +v -0.655072 -0.14283 0.571975 +v -0.653855 -0.150668 0.571477 +v -0.652705 -0.158527 0.570968 +v -0.65152 -0.166287 0.570566 +v -0.650489 -0.174058 0.570194 +v -0.649384 -0.181701 0.569903 +v -0.648326 -0.189348 0.569624 +v -0.647259 -0.196873 0.569422 +v -0.646255 -0.204409 0.569245 +v -0.645265 -0.211849 0.569125 +v -0.644242 -0.219284 0.568998 +v -0.643259 -0.226609 0.568967 +v -0.642244 -0.23393 0.568931 +v -0.641196 -0.241143 0.569013 +v -0.640198 -0.24837 0.569107 +v -0.639295 -0.255514 0.56933 +v -0.638333 -0.26266 0.569554 +v -0.637244 -0.269694 0.569852 +v -0.636079 -0.27672 0.570136 +v -0.635055 -0.283663 0.570547 +v -0.633958 -0.290596 0.570947 +v -0.632833 -0.297419 0.571457 +v -0.631663 -0.304232 0.57196 +v -0.630482 -0.310965 0.572582 +v -0.629185 -0.317669 0.573126 +v -0.627915 -0.324278 0.573816 +v -0.626568 -0.330884 0.57448 +v -0.625172 -0.337399 0.575264 +v -0.623688 -0.343903 0.576011 +v -0.622257 -0.350339 0.57691 +v -0.620663 -0.356746 0.577771 +v -0.619081 -0.36307 0.57877 +v -0.617431 -0.369386 0.579766 +v -0.61575 -0.375634 0.580962 +v -0.613982 -0.381865 0.582147 +v -0.612218 -0.388024 0.583451 +v -0.610421 -0.394171 0.584752 +v -0.608603 -0.400236 0.586184 +v -0.606646 -0.406262 0.587569 +v -0.604653 -0.412183 0.589099 +v -0.602594 -0.418089 0.590611 +v -0.60056 -0.423934 0.592304 +v -0.598361 -0.429736 0.593937 +v -0.596231 -0.435466 0.595711 +v -0.593917 -0.441148 0.597422 +v -0.591641 -0.446815 0.59929 +v -0.589244 -0.452449 0.601143 +v -0.586578 -0.458006 0.603774 +v -0.583865 -0.463545 0.606387 +v -0.581093 -0.469085 0.609089 +v -0.578212 -0.474598 0.611756 +v -0.600897 0.406061 0.734807 +v -0.602561 0.40086 0.731428 +v -0.604205 0.395624 0.728034 +v -0.605728 0.390292 0.724919 +v -0.60721 0.384956 0.721791 +v -0.608607 0.379532 0.718892 +v -0.609983 0.374104 0.715985 +v -0.611287 0.368575 0.713284 +v -0.612495 0.363043 0.710545 +v -0.6137 0.357452 0.707984 +v -0.614852 0.351859 0.70541 +v -0.615971 0.346184 0.702928 +v -0.617086 0.340508 0.700447 +v -0.61815 0.33475 0.698074 +v -0.619215 0.328995 0.695706 +v -0.620277 0.323125 0.693414 +v -0.6213 0.317276 0.691109 +v -0.622297 0.31138 0.688869 +v -0.623412 0.305471 0.686672 +v -0.624549 0.299483 0.684497 +v -0.625657 0.29353 0.682317 +v -0.626861 0.287453 0.68017 +v -0.628045 0.28136 0.678014 +v -0.629266 0.275184 0.675766 +v -0.630631 0.269037 0.673585 +v -0.63203 0.263001 0.671401 +v -0.633459 0.256944 0.66923 +v -0.634929 0.250779 0.667009 +v -0.636486 0.244636 0.66484 +v -0.638095 0.238329 0.662651 +v -0.63978 0.232035 0.660507 +v -0.641512 0.225621 0.658377 +v -0.643278 0.219219 0.656264 +v -0.645017 0.212646 0.654194 +v -0.646787 0.206083 0.652146 +v -0.648591 0.199326 0.650152 +v -0.650368 0.192568 0.648143 +v -0.65216 0.185637 0.646216 +v -0.65396 0.178709 0.644286 +v -0.655698 0.171569 0.642389 +v -0.657417 0.164429 0.640489 +v -0.659063 0.157103 0.638648 +v -0.660633 0.14977 0.636776 +v -0.662179 0.142258 0.634964 +v -0.663617 0.134699 0.63309 +v -0.66502 0.126959 0.631308 +v -0.666275 0.119221 0.629476 +v -0.667472 0.111358 0.627712 +v -0.668498 0.103491 0.625896 +v -0.669488 0.0954716 0.624172 +v -0.670278 0.087446 0.622376 +v -0.671032 0.0793075 0.620646 +v -0.671566 0.0711637 0.618844 +v -0.672129 0.062899 0.617168 +v -0.672427 0.0546351 0.615412 +v -0.672659 0.0463061 0.613736 +v -0.672676 0.0379896 0.611996 +v -0.672659 0.0296032 0.610373 +v -0.672433 0.0212353 0.608689 +v -0.672154 0.0128306 0.607097 +v -0.671684 0.00440977 0.605441 +v -0.671123 -0.00403503 0.603885 +v -0.670433 -0.0124598 0.602304 +v -0.669742 -0.0208924 0.600838 +v -0.668842 -0.0292881 0.599319 +v -0.667928 -0.0376818 0.597939 +v -0.666906 -0.0460543 0.596524 +v -0.665855 -0.0543993 0.595219 +v -0.664778 -0.0627498 0.593908 +v -0.663649 -0.0710786 0.592685 +v -0.662428 -0.0793769 0.591453 +v -0.6612 -0.0876 0.590349 +v -0.659931 -0.0958148 0.589247 +v -0.65861 -0.103939 0.588287 +v -0.657363 -0.112078 0.587365 +v -0.656083 -0.120109 0.586557 +v -0.654786 -0.128138 0.585753 +v -0.653535 -0.136077 0.585044 +v -0.652329 -0.144013 0.584356 +v -0.651177 -0.151833 0.583776 +v -0.64999 -0.15969 0.58312 +v -0.648861 -0.167465 0.582536 +v -0.647712 -0.175224 0.582031 +v -0.646565 -0.182848 0.581621 +v -0.645508 -0.190493 0.581225 +v -0.644425 -0.198019 0.580907 +v -0.643437 -0.205563 0.580616 +v -0.642423 -0.212987 0.580375 +v -0.641409 -0.220415 0.580127 +v -0.640408 -0.227736 0.579973 +v -0.639477 -0.235068 0.579834 +v -0.6385 -0.242287 0.579833 +v -0.637482 -0.249513 0.579821 +v -0.636466 -0.256638 0.579905 +v -0.635424 -0.263773 0.580004 +v -0.634377 -0.270797 0.5802 +v -0.63331 -0.277819 0.580386 +v -0.632249 -0.284737 0.580673 +v -0.631135 -0.29165 0.580952 +v -0.63004 -0.298453 0.581362 +v -0.628842 -0.305239 0.581746 +v -0.627656 -0.311945 0.582252 +v -0.626382 -0.318625 0.582693 +v -0.6251 -0.325197 0.583286 +v -0.62372 -0.33176 0.583843 +v -0.622339 -0.33823 0.584523 +v -0.620874 -0.34469 0.585182 +v -0.619402 -0.351071 0.585998 +v -0.617786 -0.357424 0.586773 +v -0.616186 -0.363676 0.587684 +v -0.614535 -0.369931 0.588599 +v -0.612883 -0.376128 0.589746 +v -0.61117 -0.382311 0.590875 +v -0.609454 -0.388406 0.592123 +v -0.607641 -0.394478 0.593347 +v -0.60585 -0.400446 0.594714 +v -0.603937 -0.406383 0.596046 +v -0.602018 -0.41222 0.597551 +v -0.599966 -0.418021 0.599007 +v -0.597892 -0.42373 0.600658 +v -0.595745 -0.429421 0.602274 +v -0.593591 -0.435013 0.60401 +v -0.591359 -0.440577 0.605726 +v -0.589084 -0.446106 0.607607 +v -0.586676 -0.451585 0.60944 +v -0.584063 -0.456979 0.612021 +v -0.58139 -0.462356 0.614581 +v -0.578638 -0.46775 0.617241 +v -0.575807 -0.473124 0.619881 +v -0.598129 0.40088 0.742928 +v -0.599766 0.395813 0.739752 +v -0.601364 0.390734 0.736559 +v -0.602784 0.38555 0.733621 +v -0.604172 0.380363 0.730678 +v -0.605489 0.375063 0.72796 +v -0.606771 0.36977 0.725225 +v -0.607979 0.364363 0.722694 +v -0.609049 0.358969 0.720122 +v -0.610094 0.353485 0.717705 +v -0.611173 0.348003 0.715295 +v -0.612167 0.342437 0.712973 +v -0.613161 0.336861 0.710654 +v -0.614148 0.331193 0.708437 +v -0.615071 0.325529 0.706193 +v -0.615977 0.319763 0.704027 +v -0.616944 0.313999 0.701888 +v -0.617892 0.308171 0.699802 +v -0.618843 0.302343 0.697721 +v -0.619841 0.296444 0.695686 +v -0.62083 0.290582 0.69366 +v -0.621833 0.284613 0.691622 +v -0.622897 0.278611 0.689606 +v -0.624007 0.272584 0.687526 +v -0.625221 0.266571 0.685498 +v -0.626493 0.260613 0.683307 +v -0.627869 0.254654 0.681166 +v -0.629366 0.248613 0.678997 +v -0.630917 0.24258 0.676872 +v -0.632507 0.236372 0.674721 +v -0.634179 0.230169 0.672605 +v -0.635877 0.223808 0.670508 +v -0.637662 0.217483 0.668462 +v -0.639466 0.211001 0.66648 +v -0.641243 0.204516 0.66448 +v -0.643049 0.197821 0.66256 +v -0.644878 0.191121 0.660655 +v -0.646665 0.184249 0.658787 +v -0.648544 0.177375 0.656951 +v -0.650323 0.17028 0.655144 +v -0.652052 0.16318 0.653319 +v -0.6538 0.155882 0.651564 +v -0.655504 0.148589 0.649796 +v -0.657155 0.141107 0.648085 +v -0.658634 0.13361 0.646305 +v -0.660129 0.125934 0.644614 +v -0.661441 0.118254 0.642862 +v -0.662705 0.110402 0.641174 +v -0.663834 0.10254 0.639433 +v -0.664961 0.0945304 0.637767 +v -0.665805 0.0865007 0.636003 +v -0.666676 0.0783695 0.634337 +v -0.667347 0.0702189 0.632602 +v -0.667991 0.0619478 0.630948 +v -0.668353 0.0536814 0.629201 +v -0.668793 0.0453395 0.62758 +v -0.668851 0.037021 0.625848 +v -0.668915 0.0286318 0.624209 +v -0.668706 0.0202623 0.622496 +v -0.668643 0.0118364 0.620929 +v -0.668301 0.00343107 0.619286 +v -0.667889 -0.00502687 0.617717 +v -0.667201 -0.0134644 0.616067 +v -0.666585 -0.0219063 0.614544 +v -0.665784 -0.0303108 0.61298 +v -0.665021 -0.0387319 0.611538 +v -0.66401 -0.047128 0.610048 +v -0.662994 -0.0554784 0.608683 +v -0.661922 -0.0638601 0.60727 +v -0.660808 -0.0722133 0.605924 +v -0.659643 -0.0805513 0.60457 +v -0.658491 -0.088801 0.603408 +v -0.657243 -0.0970294 0.602221 +v -0.655995 -0.105171 0.601186 +v -0.654702 -0.113313 0.600164 +v -0.653406 -0.121355 0.59926 +v -0.652072 -0.129397 0.598341 +v -0.650807 -0.137323 0.597543 +v -0.649643 -0.145256 0.59677 +v -0.648443 -0.153076 0.596075 +v -0.647243 -0.160948 0.595296 +v -0.646095 -0.168712 0.594593 +v -0.644995 -0.176483 0.593895 +v -0.643848 -0.184118 0.593346 +v -0.642713 -0.191755 0.592816 +v -0.641607 -0.199265 0.592382 +v -0.64062 -0.206804 0.591953 +v -0.639622 -0.214231 0.591603 +v -0.638668 -0.221666 0.591259 +v -0.637646 -0.228976 0.590996 +v -0.636701 -0.236317 0.59076 +v -0.635679 -0.24354 0.59062 +v -0.63463 -0.25075 0.590484 +v -0.633611 -0.257858 0.590452 +v -0.632607 -0.264987 0.590436 +v -0.631592 -0.272001 0.590525 +v -0.630533 -0.279017 0.590595 +v -0.629471 -0.285923 0.590768 +v -0.628411 -0.292826 0.590943 +v -0.627294 -0.299602 0.591232 +v -0.626091 -0.306367 0.591483 +v -0.624895 -0.313033 0.591863 +v -0.623644 -0.319676 0.592204 +v -0.622326 -0.326207 0.592673 +v -0.620907 -0.332724 0.593097 +v -0.619504 -0.339151 0.593693 +v -0.618044 -0.345572 0.594271 +v -0.616598 -0.351906 0.595007 +v -0.615032 -0.358229 0.595717 +v -0.613496 -0.364439 0.596584 +v -0.61186 -0.370634 0.597431 +v -0.610203 -0.376767 0.598494 +v -0.608456 -0.382885 0.599534 +v -0.606794 -0.388909 0.600732 +v -0.605042 -0.394908 0.601889 +v -0.60328 -0.400793 0.603199 +v -0.601321 -0.406619 0.604433 +v -0.59943 -0.412365 0.605899 +v -0.597374 -0.418069 0.607311 +v -0.595313 -0.423654 0.608916 +v -0.593195 -0.429231 0.610491 +v -0.59105 -0.434673 0.612227 +v -0.588777 -0.440076 0.61391 +v -0.586557 -0.445419 0.615819 +v -0.584203 -0.45072 0.617687 +v -0.581632 -0.455953 0.620208 +v -0.578969 -0.46116 0.622697 +v -0.576289 -0.466406 0.625294 +v -0.57354 -0.471622 0.627857 +v -0.595584 0.395701 0.751126 +v -0.597104 0.39077 0.748119 +v -0.598544 0.385842 0.745086 +v -0.599915 0.380809 0.742352 +v -0.601201 0.375768 0.739585 +v -0.602414 0.370601 0.73704 +v -0.603531 0.36543 0.734457 +v -0.604625 0.360152 0.7321 +v -0.605599 0.354889 0.729703 +v -0.606529 0.349516 0.727434 +v -0.607444 0.344144 0.725163 +v -0.608299 0.338679 0.722991 +v -0.60911 0.333215 0.720806 +v -0.609917 0.327639 0.718701 +v -0.610729 0.322065 0.716599 +v -0.611487 0.316406 0.714559 +v -0.612303 0.310738 0.712541 +v -0.613079 0.304974 0.710554 +v -0.613867 0.299242 0.708582 +v -0.61464 0.293435 0.706641 +v -0.61547 0.287671 0.704742 +v -0.616318 0.281778 0.702838 +v -0.617256 0.275866 0.700961 +v -0.618284 0.269972 0.699054 +v -0.619292 0.264091 0.697137 +v -0.620486 0.258207 0.694961 +v -0.62187 0.252334 0.692888 +v -0.623309 0.246392 0.69073 +v -0.62485 0.240466 0.688632 +v -0.626379 0.234355 0.686501 +v -0.628029 0.228251 0.684431 +v -0.629706 0.221966 0.682374 +v -0.631512 0.21569 0.680388 +v -0.633288 0.209281 0.67844 +v -0.635146 0.202879 0.676535 +v -0.636968 0.196243 0.6747 +v -0.638829 0.189607 0.672895 +v -0.640688 0.182784 0.671123 +v -0.642529 0.175963 0.669336 +v -0.644376 0.168917 0.667639 +v -0.646216 0.161867 0.665938 +v -0.647985 0.154599 0.664245 +v -0.649752 0.147333 0.662547 +v -0.651498 0.139908 0.660963 +v -0.653051 0.132484 0.659302 +v -0.654636 0.124858 0.657692 +v -0.656044 0.117214 0.656016 +v -0.657467 0.109389 0.654438 +v -0.658658 0.101539 0.652773 +v -0.65994 0.0935488 0.651182 +v -0.660886 0.085533 0.649458 +v -0.661883 0.0773936 0.647869 +v -0.662621 0.069252 0.646186 +v -0.663356 0.0610054 0.644561 +v -0.663858 0.0527433 0.642849 +v -0.664366 0.044404 0.641245 +v -0.664593 0.0360743 0.639563 +v -0.664807 0.0276872 0.63794 +v -0.664785 0.0193053 0.636239 +v -0.664804 0.010871 0.634666 +v -0.664515 0.00246109 0.633014 +v -0.66425 -0.00598454 0.631433 +v -0.663678 -0.0144247 0.629757 +v -0.663183 -0.0228742 0.628203 +v -0.662388 -0.0312818 0.626573 +v -0.66174 -0.0397277 0.625086 +v -0.660749 -0.0481221 0.623519 +v -0.659843 -0.0565133 0.622075 +v -0.658778 -0.0649061 0.620562 +v -0.657747 -0.0732954 0.61911 +v -0.656642 -0.0816653 0.617648 +v -0.655468 -0.0899421 0.616399 +v -0.654223 -0.098187 0.615136 +v -0.652949 -0.106336 0.613994 +v -0.651726 -0.11449 0.612906 +v -0.650488 -0.122563 0.611909 +v -0.649207 -0.130596 0.610927 +v -0.647914 -0.138516 0.610023 +v -0.646735 -0.14646 0.609142 +v -0.645514 -0.15428 0.608328 +v -0.644312 -0.162166 0.607423 +v -0.643148 -0.169931 0.606594 +v -0.64205 -0.177704 0.605782 +v -0.640909 -0.185353 0.60505 +v -0.639863 -0.193016 0.604391 +v -0.638724 -0.200515 0.603836 +v -0.637731 -0.208043 0.603288 +v -0.636671 -0.215457 0.602802 +v -0.63568 -0.22289 0.602331 +v -0.634601 -0.230205 0.601946 +v -0.633622 -0.237536 0.60159 +v -0.632585 -0.244746 0.601332 +v -0.631599 -0.251962 0.60109 +v -0.630597 -0.259066 0.600945 +v -0.629565 -0.266173 0.600812 +v -0.628503 -0.273173 0.600767 +v -0.627471 -0.280178 0.600718 +v -0.626393 -0.28706 0.600771 +v -0.625296 -0.293941 0.600819 +v -0.624199 -0.300693 0.60098 +v -0.623011 -0.307432 0.601109 +v -0.621848 -0.314064 0.601394 +v -0.620569 -0.320666 0.601599 +v -0.619246 -0.327158 0.601948 +v -0.617904 -0.333654 0.602288 +v -0.616496 -0.340036 0.602801 +v -0.615078 -0.346417 0.603314 +v -0.613645 -0.352718 0.60398 +v -0.612053 -0.358983 0.60459 +v -0.610508 -0.365129 0.605379 +v -0.608863 -0.371263 0.606162 +v -0.607237 -0.377348 0.60716 +v -0.605554 -0.383417 0.608133 +v -0.603878 -0.389352 0.609246 +v -0.602077 -0.395255 0.610323 +v -0.600292 -0.401033 0.61154 +v -0.598424 -0.406792 0.612719 +v -0.596542 -0.412436 0.614145 +v -0.594514 -0.418041 0.615517 +v -0.592478 -0.423511 0.617085 +v -0.590393 -0.428964 0.618628 +v -0.588292 -0.434269 0.620362 +v -0.586057 -0.439532 0.622057 +v -0.583818 -0.444673 0.623961 +v -0.581518 -0.449793 0.625846 +v -0.578986 -0.454867 0.628322 +v -0.576384 -0.459909 0.630749 +v -0.573777 -0.464989 0.633271 +v -0.57105 -0.470025 0.635738 +v -0.593108 0.390628 0.758947 +v -0.594564 0.385853 0.756159 +v -0.595877 0.38107 0.753331 +v -0.597059 0.376176 0.750786 +v -0.59825 0.371276 0.748244 +v -0.599333 0.36625 0.745905 +v -0.600365 0.361222 0.743549 +v -0.601291 0.356071 0.741352 +v -0.602185 0.350944 0.739145 +v -0.603026 0.345681 0.73707 +v -0.603771 0.340418 0.734957 +v -0.604488 0.335046 0.732922 +v -0.60518 0.329681 0.730881 +v -0.605842 0.324195 0.728915 +v -0.606446 0.318698 0.726939 +v -0.607017 0.313126 0.724999 +v -0.60763 0.307541 0.723068 +v -0.608203 0.30192 0.721207 +v -0.60886 0.296295 0.71939 +v -0.609478 0.290565 0.717573 +v -0.610159 0.28488 0.715804 +v -0.610809 0.279065 0.714053 +v -0.611553 0.273255 0.712353 +v -0.612346 0.267442 0.710606 +v -0.613272 0.261634 0.708921 +v -0.614438 0.255912 0.706693 +v -0.615755 0.250181 0.704556 +v -0.617162 0.244321 0.702342 +v -0.618717 0.238486 0.700223 +v -0.62029 0.232486 0.698156 +v -0.621918 0.226479 0.696129 +v -0.623517 0.220271 0.694073 +v -0.625278 0.214088 0.692116 +v -0.627073 0.207778 0.690239 +v -0.62895 0.201479 0.688404 +v -0.630824 0.194896 0.686631 +v -0.632718 0.188312 0.684868 +v -0.634632 0.181547 0.683222 +v -0.636583 0.17479 0.681586 +v -0.638476 0.167798 0.679965 +v -0.640328 0.160789 0.678314 +v -0.64222 0.153581 0.676784 +v -0.644005 0.146383 0.675216 +v -0.645798 0.139003 0.673698 +v -0.647483 0.13161 0.672131 +v -0.649159 0.124004 0.670623 +v -0.650668 0.116382 0.669041 +v -0.652222 0.108558 0.667546 +v -0.653552 0.100714 0.665947 +v -0.65486 0.0927082 0.664411 +v -0.656 0.0846921 0.662805 +v -0.657066 0.0765462 0.661248 +v -0.657974 0.0683951 0.659622 +v -0.658841 0.0601465 0.658061 +v -0.659492 0.0519111 0.656435 +v -0.660039 0.0435619 0.654833 +v -0.660419 0.0352093 0.65317 +v -0.660848 0.0268015 0.651607 +v -0.660931 0.0184035 0.64993 +v -0.661058 0.00995309 0.648335 +v -0.660844 0.00153647 0.646652 +v -0.660687 -0.00691584 0.645057 +v -0.660216 -0.0153348 0.643366 +v -0.659786 -0.0237983 0.641773 +v -0.659175 -0.0322473 0.64013 +v -0.658503 -0.0406992 0.638566 +v -0.657651 -0.0491212 0.636957 +v -0.656825 -0.0575533 0.635442 +v -0.655837 -0.0659829 0.633846 +v -0.654811 -0.0744034 0.632292 +v -0.653776 -0.0828248 0.630743 +v -0.652627 -0.0911214 0.629402 +v -0.651412 -0.0994119 0.628044 +v -0.650122 -0.107584 0.626817 +v -0.648881 -0.115751 0.625637 +v -0.647628 -0.1238 0.624567 +v -0.646375 -0.131858 0.623492 +v -0.645156 -0.139807 0.622507 +v -0.643884 -0.147745 0.621501 +v -0.642608 -0.15558 0.620537 +v -0.641427 -0.16348 0.619532 +v -0.64029 -0.171266 0.618607 +v -0.639173 -0.17905 0.617696 +v -0.637994 -0.186687 0.616831 +v -0.636964 -0.194359 0.615984 +v -0.635916 -0.201877 0.615289 +v -0.63483 -0.209396 0.6146 +v -0.633731 -0.216809 0.614003 +v -0.632688 -0.22424 0.613418 +v -0.631638 -0.231546 0.612925 +v -0.630657 -0.238861 0.612446 +v -0.629632 -0.246073 0.612083 +v -0.628625 -0.253281 0.611716 +v -0.627631 -0.260382 0.611445 +v -0.626537 -0.267481 0.611157 +v -0.625445 -0.274467 0.610974 +v -0.624395 -0.281458 0.610802 +v -0.62338 -0.288317 0.61074 +v -0.622318 -0.295178 0.610671 +v -0.621253 -0.301913 0.61072 +v -0.620093 -0.30863 0.610744 +v -0.61891 -0.315213 0.610862 +v -0.617642 -0.321782 0.610943 +v -0.616412 -0.328259 0.611218 +v -0.615045 -0.33471 0.611457 +v -0.613583 -0.341047 0.611864 +v -0.612185 -0.347394 0.612281 +v -0.610744 -0.353632 0.612865 +v -0.60919 -0.35985 0.613422 +v -0.60764 -0.365951 0.614138 +v -0.605997 -0.372042 0.614842 +v -0.604388 -0.378062 0.615754 +v -0.602713 -0.384066 0.616637 +v -0.601028 -0.389923 0.61767 +v -0.599209 -0.395745 0.618645 +v -0.597431 -0.401448 0.619805 +v -0.595597 -0.407133 0.620939 +v -0.593739 -0.412678 0.622287 +v -0.591789 -0.418191 0.623607 +v -0.589754 -0.423515 0.625115 +v -0.587654 -0.428818 0.626593 +v -0.585613 -0.433978 0.628296 +v -0.583428 -0.439088 0.62996 +v -0.581177 -0.444062 0.631912 +v -0.578842 -0.449006 0.63382 +v -0.576395 -0.4539 0.63624 +v -0.57385 -0.458771 0.638638 +v -0.571275 -0.463638 0.640965 +v -0.5686 -0.468472 0.643282 +v -0.590701 0.385543 0.766785 +v -0.592019 0.38091 0.7642 +v -0.593192 0.376272 0.761565 +v -0.594255 0.371515 0.759233 +v -0.595279 0.366757 0.756891 +v -0.596232 0.361871 0.754762 +v -0.597048 0.356973 0.752581 +v -0.597861 0.351945 0.750556 +v -0.598579 0.346945 0.748506 +v -0.59925 0.341803 0.746586 +v -0.599815 0.336666 0.744626 +v -0.600347 0.331401 0.742713 +v -0.600886 0.326135 0.740802 +v -0.601381 0.32074 0.73899 +v -0.601812 0.315352 0.737148 +v -0.602258 0.309851 0.735311 +v -0.602687 0.3044 0.733479 +v -0.603077 0.298876 0.731753 +v -0.603488 0.293342 0.730043 +v -0.60388 0.287716 0.728323 +v -0.604365 0.28209 0.726652 +v -0.604786 0.276371 0.725023 +v -0.605309 0.270643 0.723449 +v -0.605853 0.264911 0.72184 +v -0.606526 0.259175 0.720302 +v -0.607648 0.253602 0.718015 +v -0.608947 0.248022 0.715837 +v -0.610328 0.24225 0.713572 +v -0.611875 0.236476 0.711411 +v -0.613366 0.230565 0.709341 +v -0.615019 0.224661 0.707377 +v -0.616702 0.218538 0.705413 +v -0.618492 0.212427 0.703515 +v -0.620274 0.20621 0.701685 +v -0.622155 0.200006 0.699922 +v -0.624092 0.193469 0.69821 +v -0.626055 0.186934 0.696519 +v -0.627993 0.180224 0.694975 +v -0.629908 0.173511 0.693416 +v -0.6319 0.166565 0.691907 +v -0.633858 0.159624 0.690382 +v -0.635765 0.15249 0.68899 +v -0.637668 0.14535 0.687587 +v -0.639538 0.138007 0.686148 +v -0.641339 0.130661 0.684682 +v -0.643076 0.123084 0.683277 +v -0.644733 0.115492 0.681833 +v -0.64642 0.107677 0.680416 +v -0.647842 0.0998438 0.678876 +v -0.64928 0.0918366 0.67744 +v -0.65052 0.0838171 0.675917 +v -0.651773 0.0756775 0.674441 +v -0.652757 0.0675266 0.672857 +v -0.653757 0.0592881 0.671367 +v -0.654489 0.0510525 0.66979 +v -0.655205 0.0427231 0.668247 +v -0.655658 0.0343601 0.666589 +v -0.656161 0.0259501 0.66505 +v -0.656359 0.0175387 0.663401 +v -0.656596 0.00908226 0.661801 +v -0.656562 0.00064356 0.660116 +v -0.656521 -0.0078083 0.658516 +v -0.656251 -0.0162361 0.656849 +v -0.655985 -0.0247011 0.655252 +v -0.655439 -0.0331541 0.653568 +v -0.654884 -0.0416335 0.651958 +v -0.654086 -0.0500734 0.650292 +v -0.653284 -0.058519 0.648697 +v -0.652359 -0.0669917 0.647017 +v -0.651452 -0.0754626 0.645379 +v -0.650397 -0.0839024 0.643719 +v -0.649336 -0.0922312 0.64231 +v -0.64818 -0.100536 0.640881 +v -0.646978 -0.108747 0.639591 +v -0.64575 -0.116925 0.638327 +v -0.644557 -0.125008 0.637173 +v -0.643261 -0.133065 0.635999 +v -0.641975 -0.141021 0.634887 +v -0.640793 -0.148996 0.633806 +v -0.639569 -0.156861 0.632738 +v -0.638336 -0.164766 0.631605 +v -0.637125 -0.172555 0.630551 +v -0.636015 -0.180361 0.629523 +v -0.634844 -0.187996 0.62853 +v -0.633827 -0.195659 0.627578 +v -0.632705 -0.203179 0.626685 +v -0.631645 -0.210725 0.625825 +v -0.630549 -0.218132 0.625119 +v -0.629472 -0.225544 0.624415 +v -0.628325 -0.232815 0.623776 +v -0.62734 -0.240119 0.623172 +v -0.62632 -0.247333 0.622689 +v -0.625265 -0.254536 0.6222 +v -0.624233 -0.261629 0.621797 +v -0.623216 -0.268731 0.621405 +v -0.622141 -0.275707 0.621097 +v -0.621102 -0.282689 0.6208 +v -0.62008 -0.289525 0.620608 +v -0.618982 -0.296349 0.620399 +v -0.61786 -0.303045 0.620291 +v -0.616733 -0.309739 0.620163 +v -0.615581 -0.316283 0.620163 +v -0.614356 -0.322824 0.620134 +v -0.613104 -0.329265 0.620325 +v -0.611735 -0.335682 0.620473 +v -0.610346 -0.341997 0.620805 +v -0.608936 -0.348303 0.621122 +v -0.607507 -0.354488 0.621651 +v -0.605957 -0.360643 0.622135 +v -0.604434 -0.366705 0.622766 +v -0.602828 -0.372756 0.6234 +v -0.601223 -0.378701 0.624225 +v -0.599567 -0.38464 0.625034 +v -0.59788 -0.390422 0.625981 +v -0.596153 -0.396199 0.626925 +v -0.594406 -0.401822 0.628035 +v -0.592525 -0.407409 0.629079 +v -0.590635 -0.412835 0.630323 +v -0.588719 -0.418253 0.63155 +v -0.58675 -0.423442 0.633026 +v -0.58474 -0.428622 0.634497 +v -0.582659 -0.433596 0.636126 +v -0.580548 -0.438556 0.637738 +v -0.57829 -0.443364 0.639731 +v -0.575993 -0.448152 0.641715 +v -0.57353 -0.452848 0.644067 +v -0.571061 -0.457539 0.646412 +v -0.568606 -0.462227 0.648615 +v -0.566052 -0.466881 0.650805 +v -0.58844 0.380563 0.774184 +v -0.589575 0.376087 0.771856 +v -0.590604 0.371611 0.769492 +v -0.591537 0.367006 0.767434 +v -0.592353 0.362384 0.765337 +v -0.593123 0.357628 0.763386 +v -0.593782 0.352873 0.761397 +v -0.594444 0.347997 0.759552 +v -0.595032 0.343147 0.757687 +v -0.595578 0.338139 0.755931 +v -0.596012 0.333135 0.754124 +v -0.596374 0.327964 0.752365 +v -0.596728 0.322786 0.750599 +v -0.597061 0.317481 0.748907 +v -0.597319 0.312181 0.747187 +v -0.597566 0.306794 0.745489 +v -0.597773 0.301405 0.743767 +v -0.597938 0.295958 0.742117 +v -0.598183 0.2905 0.740512 +v -0.598408 0.284958 0.738919 +v -0.598641 0.279425 0.737357 +v -0.59887 0.273798 0.735799 +v -0.599168 0.268162 0.734291 +v -0.599527 0.26259 0.732694 +v -0.600043 0.257018 0.731194 +v -0.601043 0.251514 0.728972 +v -0.60225 0.246002 0.726871 +v -0.603552 0.240368 0.724586 +v -0.60499 0.234735 0.722416 +v -0.60648 0.228945 0.720294 +v -0.608121 0.223155 0.718279 +v -0.609787 0.217128 0.716354 +v -0.611597 0.21111 0.714533 +v -0.613423 0.204923 0.712747 +v -0.615333 0.198734 0.711014 +v -0.617289 0.192284 0.709406 +v -0.61932 0.18584 0.707851 +v -0.62129 0.179144 0.706351 +v -0.623297 0.172455 0.704881 +v -0.625352 0.165548 0.703524 +v -0.627398 0.158653 0.702171 +v -0.629431 0.151568 0.700895 +v -0.631407 0.144465 0.699581 +v -0.633408 0.137154 0.698295 +v -0.635287 0.129821 0.696945 +v -0.637148 0.122285 0.695646 +v -0.638915 0.114727 0.694289 +v -0.640718 0.106933 0.693004 +v -0.642255 0.0991369 0.69161 +v -0.643807 0.091148 0.690244 +v -0.645218 0.0831297 0.688808 +v -0.64663 0.0749733 0.687449 +v -0.647707 0.0667973 0.68596 +v -0.648828 0.0585471 0.684528 +v -0.649668 0.0502951 0.682989 +v -0.650488 0.0419448 0.681513 +v -0.651045 0.0336051 0.679944 +v -0.651728 0.0251715 0.678432 +v -0.651958 0.0167269 0.676756 +v -0.65245 0.00825186 0.675226 +v -0.652486 -0.000202568 0.673558 +v -0.652536 -0.00867848 0.671926 +v -0.652355 -0.0171186 0.670232 +v -0.652274 -0.0256153 0.668629 +v -0.651869 -0.0340702 0.666929 +v -0.651486 -0.0425729 0.665305 +v -0.650775 -0.0510509 0.663569 +v -0.650069 -0.059536 0.661938 +v -0.649166 -0.0680408 0.660191 +v -0.648332 -0.0765345 0.658525 +v -0.647306 -0.0850019 0.656819 +v -0.646263 -0.0933703 0.65529 +v -0.645106 -0.101714 0.653745 +v -0.644006 -0.109954 0.652402 +v -0.642735 -0.118175 0.651017 +v -0.641507 -0.126279 0.649758 +v -0.640312 -0.134388 0.64851 +v -0.639019 -0.142353 0.64731 +v -0.637748 -0.150323 0.646112 +v -0.636484 -0.158223 0.644902 +v -0.635291 -0.166153 0.643683 +v -0.634112 -0.173961 0.642522 +v -0.633013 -0.181778 0.641374 +v -0.631846 -0.189434 0.64026 +v -0.630679 -0.197108 0.639141 +v -0.629563 -0.20463 0.638129 +v -0.628493 -0.212163 0.63712 +v -0.627445 -0.21956 0.636249 +v -0.626394 -0.226975 0.635433 +v -0.625253 -0.234236 0.634686 +v -0.624162 -0.241518 0.633943 +v -0.623061 -0.248694 0.633307 +v -0.622095 -0.255897 0.632716 +v -0.621077 -0.263001 0.632199 +v -0.620042 -0.270095 0.63169 +v -0.618929 -0.277061 0.631243 +v -0.617918 -0.28404 0.630823 +v -0.616874 -0.290861 0.630508 +v -0.615707 -0.297645 0.630131 +v -0.614558 -0.304304 0.629863 +v -0.61347 -0.310975 0.629608 +v -0.612337 -0.317499 0.629524 +v -0.61111 -0.324005 0.629398 +v -0.609789 -0.330385 0.62941 +v -0.608496 -0.336762 0.629435 +v -0.607195 -0.343071 0.629724 +v -0.605768 -0.349344 0.629957 +v -0.604331 -0.355477 0.630385 +v -0.602822 -0.361599 0.630795 +v -0.601307 -0.367605 0.631365 +v -0.599702 -0.3736 0.631926 +v -0.598107 -0.379496 0.632662 +v -0.59645 -0.385391 0.633378 +v -0.594808 -0.391112 0.634245 +v -0.593091 -0.396815 0.635084 +v -0.591345 -0.402349 0.636075 +v -0.589527 -0.407867 0.637043 +v -0.587659 -0.413198 0.638205 +v -0.585777 -0.418533 0.639354 +v -0.583881 -0.42358 0.640727 +v -0.581863 -0.428591 0.642039 +v -0.579769 -0.433425 0.643633 +v -0.577644 -0.438253 0.645215 +v -0.575466 -0.442884 0.647231 +v -0.573184 -0.447478 0.64921 +v -0.570845 -0.452013 0.651351 +v -0.568496 -0.456538 0.653488 +v -0.56614 -0.461003 0.655625 +v -0.563676 -0.46544 0.657715 +v -0.586027 0.375577 0.781529 +v -0.586968 0.371261 0.77945 +v -0.587813 0.366935 0.777341 +v -0.588552 0.362466 0.775539 +v -0.589202 0.358004 0.773702 +v -0.589819 0.353386 0.771942 +v -0.590337 0.348771 0.770146 +v -0.590806 0.344053 0.768467 +v -0.591191 0.339357 0.766757 +v -0.591496 0.334495 0.765109 +v -0.591771 0.329637 0.763452 +v -0.591975 0.324548 0.761855 +v -0.592092 0.319465 0.760223 +v -0.592224 0.314254 0.758616 +v -0.592286 0.309075 0.756974 +v -0.592326 0.303752 0.755394 +v -0.592291 0.298446 0.753776 +v -0.592217 0.29309 0.752187 +v -0.592188 0.287718 0.750621 +v -0.592178 0.282287 0.749121 +v -0.592196 0.276851 0.74765 +v -0.592178 0.271319 0.746133 +v -0.592258 0.265775 0.74468 +v -0.592459 0.260354 0.743094 +v -0.592792 0.254919 0.741604 +v -0.593662 0.249476 0.739409 +v -0.594707 0.244025 0.737353 +v -0.595957 0.238515 0.735077 +v -0.597384 0.232992 0.732942 +v -0.598886 0.227286 0.730771 +v -0.600607 0.221582 0.728763 +v -0.602276 0.215644 0.726892 +v -0.604086 0.209715 0.725136 +v -0.605968 0.20354 0.723385 +v -0.607962 0.197375 0.721721 +v -0.609902 0.190998 0.720206 +v -0.611938 0.184642 0.718768 +v -0.613975 0.177969 0.717343 +v -0.616077 0.171304 0.715963 +v -0.618151 0.164417 0.714739 +v -0.620292 0.157572 0.713579 +v -0.622461 0.150536 0.712429 +v -0.624573 0.143491 0.711247 +v -0.62667 0.136208 0.710117 +v -0.628689 0.128907 0.708935 +v -0.630698 0.121399 0.707737 +v -0.632597 0.113901 0.706507 +v -0.634392 0.106132 0.705311 +v -0.63612 0.098358 0.704084 +v -0.637845 0.090418 0.702838 +v -0.639343 0.082421 0.701466 +v -0.640828 0.0742529 0.700203 +v -0.642049 0.0660615 0.698832 +v -0.643322 0.0578068 0.697466 +v -0.644301 0.0495404 0.695986 +v -0.645299 0.0411868 0.694615 +v -0.645918 0.032823 0.6931 +v -0.64665 0.0244112 0.691597 +v -0.647135 0.0159615 0.689991 +v -0.647628 0.00747004 0.68846 +v -0.647776 -0.00100911 0.686803 +v -0.648038 -0.00950324 0.685189 +v -0.647937 -0.0179682 0.683452 +v -0.647942 -0.0264648 0.681815 +v -0.647584 -0.0349178 0.680068 +v -0.64732 -0.0434313 0.678427 +v -0.646684 -0.0519134 0.676654 +v -0.646063 -0.0604362 0.67496 +v -0.645259 -0.0689633 0.673182 +v -0.644528 -0.0774788 0.671515 +v -0.643529 -0.0859483 0.669781 +v -0.64261 -0.0943864 0.668146 +v -0.641503 -0.102793 0.666468 +v -0.640446 -0.111064 0.665059 +v -0.639237 -0.119302 0.663616 +v -0.63806 -0.127472 0.662254 +v -0.636802 -0.135603 0.660894 +v -0.635574 -0.143592 0.659608 +v -0.634355 -0.151584 0.658336 +v -0.633165 -0.15953 0.657021 +v -0.631908 -0.167469 0.655684 +v -0.630668 -0.17529 0.654377 +v -0.629527 -0.183124 0.65309 +v -0.628309 -0.190798 0.65184 +v -0.627242 -0.198514 0.650622 +v -0.626136 -0.206029 0.649498 +v -0.625037 -0.213547 0.648368 +v -0.623925 -0.22093 0.64734 +v -0.622833 -0.228317 0.646326 +v -0.62172 -0.235579 0.645461 +v -0.620662 -0.242859 0.644615 +v -0.619572 -0.250021 0.64385 +v -0.618533 -0.257198 0.643109 +v -0.617466 -0.264283 0.642472 +v -0.616427 -0.271384 0.641851 +v -0.615348 -0.278344 0.641273 +v -0.6143 -0.285316 0.640698 +v -0.613212 -0.292104 0.64021 +v -0.612147 -0.298894 0.639734 +v -0.611055 -0.305533 0.639353 +v -0.609923 -0.312163 0.638956 +v -0.608727 -0.31864 0.638752 +v -0.607532 -0.325123 0.638543 +v -0.606294 -0.33146 0.638449 +v -0.605044 -0.337799 0.638344 +v -0.603714 -0.344069 0.638521 +v -0.602324 -0.350323 0.638675 +v -0.600884 -0.356416 0.639035 +v -0.59941 -0.362491 0.639366 +v -0.59791 -0.368446 0.639862 +v -0.596311 -0.374386 0.640355 +v -0.594712 -0.380235 0.641001 +v -0.593103 -0.386083 0.641635 +v -0.591476 -0.391735 0.642392 +v -0.589773 -0.39737 0.643126 +v -0.588037 -0.402814 0.644013 +v -0.586283 -0.408255 0.644892 +v -0.584483 -0.4135 0.645993 +v -0.582573 -0.418718 0.647036 +v -0.580732 -0.423632 0.648292 +v -0.57879 -0.428513 0.649507 +v -0.576753 -0.433229 0.651092 +v -0.574709 -0.437939 0.652676 +v -0.57245 -0.442344 0.654634 +v -0.570257 -0.446777 0.656656 +v -0.568104 -0.45116 0.65862 +v -0.565833 -0.45551 0.660547 +v -0.56349 -0.459731 0.662542 +v -0.561193 -0.463966 0.664559 +v -0.583767 0.370855 0.788257 +v -0.584486 0.36666 0.786551 +v -0.585113 0.362482 0.784817 +v -0.585677 0.358188 0.783274 +v -0.586177 0.353907 0.781713 +v -0.586585 0.349476 0.780165 +v -0.586906 0.345058 0.778589 +v -0.587218 0.340492 0.77706 +v -0.587457 0.335939 0.775494 +v -0.587621 0.331163 0.774026 +v -0.587693 0.326402 0.772501 +v -0.587678 0.321406 0.770994 +v -0.587572 0.316417 0.769461 +v -0.587457 0.311313 0.767994 +v -0.587319 0.30621 0.76651 +v -0.587172 0.300985 0.764995 +v -0.586952 0.29578 0.763453 +v -0.586666 0.290472 0.761933 +v -0.586403 0.285141 0.760416 +v -0.586137 0.279816 0.758977 +v -0.585876 0.274493 0.757553 +v -0.585571 0.269109 0.756158 +v -0.585363 0.263694 0.754812 +v -0.585332 0.258346 0.753296 +v -0.585425 0.252978 0.751877 +v -0.586138 0.247659 0.74966 +v -0.587003 0.24231 0.747563 +v -0.588232 0.23693 0.745239 +v -0.58965 0.23153 0.743065 +v -0.591157 0.225962 0.740903 +v -0.592836 0.220395 0.738884 +v -0.59455 0.214507 0.736969 +v -0.596418 0.208618 0.735193 +v -0.59836 0.202465 0.733494 +v -0.600417 0.196321 0.731883 +v -0.602449 0.189952 0.730471 +v -0.604565 0.1836 0.729125 +v -0.606655 0.177002 0.727914 +v -0.608824 0.170387 0.726743 +v -0.610983 0.163529 0.725613 +v -0.613195 0.156716 0.724543 +v -0.615451 0.149714 0.723539 +v -0.617694 0.142705 0.722517 +v -0.619982 0.135453 0.721523 +v -0.622181 0.128204 0.720476 +v -0.62428 0.120734 0.719451 +v -0.626257 0.113246 0.718358 +v -0.628281 0.105502 0.717314 +v -0.63012 0.0977317 0.716154 +v -0.631986 0.0897913 0.715063 +v -0.633579 0.0818154 0.713836 +v -0.635151 0.073667 0.712645 +v -0.636583 0.0654776 0.71137 +v -0.637947 0.0572083 0.710133 +v -0.639034 0.0489211 0.708767 +v -0.640175 0.0405574 0.707471 +v -0.640967 0.0321954 0.706034 +v -0.641814 0.0237455 0.704609 +v -0.642352 0.0152669 0.703054 +v -0.642961 0.00675188 0.701532 +v -0.643337 -0.00175893 0.69992 +v -0.643655 -0.0102734 0.698289 +v -0.643641 -0.0187743 0.696533 +v -0.643761 -0.0273031 0.694869 +v -0.643536 -0.0357949 0.693094 +v -0.643301 -0.044326 0.691407 +v -0.642857 -0.0528253 0.689647 +v -0.64239 -0.0613859 0.687951 +v -0.641666 -0.0699429 0.686133 +v -0.640912 -0.078473 0.684421 +v -0.640041 -0.0869898 0.682669 +v -0.639174 -0.0954666 0.681004 +v -0.638121 -0.103908 0.679299 +v -0.63709 -0.11223 0.677765 +v -0.635913 -0.120532 0.676186 +v -0.634759 -0.128729 0.674763 +v -0.633506 -0.136893 0.673307 +v -0.632257 -0.144913 0.671906 +v -0.63101 -0.152939 0.670516 +v -0.629789 -0.160903 0.669085 +v -0.628619 -0.168892 0.667678 +v -0.62744 -0.176745 0.666269 +v -0.626231 -0.184591 0.664848 +v -0.625036 -0.19228 0.663498 +v -0.623835 -0.199972 0.662139 +v -0.622768 -0.207508 0.660895 +v -0.621646 -0.21505 0.659641 +v -0.620468 -0.2224 0.658488 +v -0.619378 -0.229778 0.65735 +v -0.618261 -0.237027 0.656299 +v -0.617209 -0.244291 0.655316 +v -0.616124 -0.25146 0.654444 +v -0.615087 -0.258645 0.653594 +v -0.61398 -0.265716 0.652813 +v -0.612888 -0.272794 0.652038 +v -0.611838 -0.279754 0.651324 +v -0.61079 -0.286707 0.650599 +v -0.609676 -0.293485 0.649959 +v -0.608621 -0.300273 0.649342 +v -0.607497 -0.306871 0.648852 +v -0.60637 -0.313462 0.648357 +v -0.605188 -0.319912 0.647988 +v -0.604021 -0.326371 0.647624 +v -0.602859 -0.332676 0.647445 +v -0.601659 -0.338974 0.647239 +v -0.600332 -0.345204 0.647351 +v -0.598895 -0.351413 0.647436 +v -0.597463 -0.357474 0.647723 +v -0.595972 -0.363525 0.647991 +v -0.594518 -0.369434 0.648366 +v -0.592967 -0.375327 0.648731 +v -0.591397 -0.381138 0.649277 +v -0.589767 -0.386942 0.64981 +v -0.588183 -0.39255 0.650448 +v -0.586507 -0.398139 0.651058 +v -0.584833 -0.403512 0.651831 +v -0.583122 -0.408873 0.652599 +v -0.581312 -0.414011 0.653545 +v -0.579413 -0.419116 0.654461 +v -0.577594 -0.423928 0.65557 +v -0.575757 -0.428726 0.656669 +v -0.573831 -0.433262 0.658142 +v -0.571811 -0.437768 0.659593 +v -0.569769 -0.442113 0.661428 +v -0.567662 -0.446427 0.663227 +v -0.565529 -0.450598 0.665035 +v -0.563322 -0.454739 0.666809 +v -0.561146 -0.45874 0.668588 +v -0.558963 -0.462743 0.670377 +v -0.581237 0.366115 0.794882 +v -0.581694 0.362073 0.793541 +v -0.582089 0.35803 0.792176 +v -0.582429 0.353931 0.79088 +v -0.582722 0.349833 0.789565 +v -0.582953 0.345595 0.788238 +v -0.583117 0.341361 0.786891 +v -0.583241 0.336946 0.785486 +v -0.583286 0.332556 0.784051 +v -0.58322 0.327887 0.782702 +v -0.583096 0.323228 0.781327 +v -0.582904 0.318328 0.779934 +v -0.582644 0.313457 0.778509 +v -0.582258 0.308442 0.77716 +v -0.581866 0.303428 0.775812 +v -0.581479 0.298311 0.774334 +v -0.581039 0.293212 0.772826 +v -0.580531 0.287934 0.771339 +v -0.580044 0.28263 0.76987 +v -0.579501 0.27744 0.768472 +v -0.578983 0.272256 0.767099 +v -0.578411 0.267008 0.765808 +v -0.577901 0.261732 0.764559 +v -0.577594 0.256453 0.763075 +v -0.577363 0.2512 0.761685 +v -0.577949 0.245959 0.759436 +v -0.578722 0.240667 0.757339 +v -0.579913 0.235406 0.754941 +v -0.581295 0.230116 0.752699 +v -0.58277 0.22468 0.750527 +v -0.584441 0.219227 0.748522 +v -0.586216 0.213359 0.746572 +v -0.588197 0.207497 0.744808 +v -0.590225 0.201349 0.743179 +v -0.592382 0.195207 0.74166 +v -0.594494 0.188838 0.740346 +v -0.596697 0.182487 0.739115 +v -0.598822 0.175923 0.738098 +v -0.601021 0.169374 0.737138 +v -0.603325 0.162548 0.736148 +v -0.605717 0.155789 0.735259 +v -0.608062 0.148819 0.734397 +v -0.6104 0.141868 0.733538 +v -0.612766 0.134649 0.732613 +v -0.615093 0.127412 0.731667 +v -0.617351 0.119963 0.730841 +v -0.619496 0.1125 0.729948 +v -0.621669 0.10479 0.729016 +v -0.623655 0.0970368 0.727971 +v -0.625584 0.0890974 0.72701 +v -0.627361 0.0811316 0.725963 +v -0.629075 0.0730111 0.724875 +v -0.630566 0.0648445 0.723658 +v -0.632023 0.0565804 0.722549 +v -0.633217 0.0483064 0.721321 +v -0.634487 0.0399439 0.720099 +v -0.635346 0.0315646 0.718696 +v -0.63626 0.0230994 0.717347 +v -0.636927 0.0145938 0.715874 +v -0.637617 0.00606618 0.714351 +v -0.638036 -0.00245879 0.712717 +v -0.638492 -0.0110014 0.711098 +v -0.638609 -0.0195222 0.709352 +v -0.638807 -0.0280648 0.707658 +v -0.638685 -0.0365722 0.705861 +v -0.638571 -0.0451216 0.704148 +v -0.638195 -0.0536538 0.702343 +v -0.637836 -0.0622366 0.700633 +v -0.6372 -0.0708026 0.698807 +v -0.636536 -0.0793673 0.697084 +v -0.635713 -0.0879033 0.695302 +v -0.634926 -0.0964196 0.693627 +v -0.633908 -0.104886 0.69188 +v -0.632901 -0.113257 0.690221 +v -0.631848 -0.121629 0.688552 +v -0.630737 -0.129834 0.687074 +v -0.629521 -0.138022 0.685555 +v -0.628427 -0.14612 0.684088 +v -0.627136 -0.154183 0.682551 +v -0.625908 -0.162187 0.681016 +v -0.624698 -0.170193 0.679487 +v -0.623583 -0.178079 0.677981 +v -0.62236 -0.18594 0.676444 +v -0.621163 -0.193638 0.674968 +v -0.620029 -0.201344 0.673514 +v -0.618852 -0.208879 0.672121 +v -0.617763 -0.216434 0.670751 +v -0.616671 -0.223797 0.669515 +v -0.615545 -0.231157 0.668259 +v -0.614467 -0.238409 0.667091 +v -0.6134 -0.245661 0.665936 +v -0.612299 -0.252833 0.664907 +v -0.611185 -0.260014 0.663912 +v -0.610108 -0.267073 0.663005 +v -0.609048 -0.274135 0.662071 +v -0.607959 -0.281078 0.661208 +v -0.606879 -0.288025 0.660347 +v -0.605794 -0.294803 0.659575 +v -0.604725 -0.301586 0.658809 +v -0.603655 -0.308145 0.658237 +v -0.602577 -0.314708 0.657656 +v -0.601465 -0.321148 0.657168 +v -0.600315 -0.327588 0.656654 +v -0.599116 -0.333832 0.656316 +v -0.597911 -0.340074 0.655982 +v -0.596533 -0.346262 0.656022 +v -0.595146 -0.352444 0.656057 +v -0.593719 -0.358469 0.656279 +v -0.59225 -0.364479 0.656475 +v -0.590837 -0.370341 0.656738 +v -0.589328 -0.376175 0.656964 +v -0.587748 -0.381951 0.657407 +v -0.586172 -0.387723 0.657854 +v -0.58462 -0.393294 0.658385 +v -0.582975 -0.39884 0.658877 +v -0.581296 -0.404123 0.659527 +v -0.579642 -0.409414 0.660181 +v -0.577904 -0.414456 0.661019 +v -0.576193 -0.4195 0.66187 +v -0.574424 -0.424216 0.662842 +v -0.572652 -0.428929 0.663803 +v -0.570747 -0.433261 0.665147 +v -0.568888 -0.437612 0.666514 +v -0.566854 -0.441809 0.668102 +v -0.564796 -0.446006 0.669696 +v -0.562775 -0.449984 0.671373 +v -0.560705 -0.453942 0.673041 +v -0.558665 -0.45772 0.674587 +v -0.556708 -0.461525 0.676176 +v -0.579048 0.361991 0.800404 +v -0.579208 0.357998 0.799633 +v -0.5793 0.354006 0.798827 +v -0.579333 0.350051 0.798085 +v -0.579286 0.3461 0.797313 +v -0.579444 0.342166 0.795957 +v -0.579524 0.338235 0.794562 +v -0.57936 0.333851 0.793489 +v -0.579124 0.329498 0.792384 +v -0.57895 0.324993 0.790982 +v -0.578714 0.320507 0.789552 +v -0.578248 0.315622 0.788453 +v -0.577661 0.310762 0.787293 +v -0.577173 0.305856 0.785845 +v -0.576638 0.300966 0.784373 +v -0.575951 0.295861 0.783028 +v -0.575206 0.290784 0.781669 +v -0.574575 0.285703 0.780079 +v -0.573939 0.280606 0.778486 +v -0.573119 0.275427 0.777207 +v -0.572294 0.270283 0.775926 +v -0.571496 0.265129 0.77462 +v -0.570723 0.259979 0.773342 +v -0.570186 0.254891 0.771871 +v -0.569743 0.249778 0.77047 +v -0.570248 0.244691 0.768174 +v -0.570882 0.239569 0.765997 +v -0.571854 0.23441 0.76372 +v -0.573008 0.22921 0.761607 +v -0.574555 0.223839 0.759302 +v -0.5763 0.218451 0.757179 +v -0.578081 0.212576 0.755317 +v -0.580017 0.206711 0.753621 +v -0.582204 0.200509 0.752029 +v -0.58451 0.194321 0.750563 +v -0.586583 0.187971 0.749524 +v -0.588731 0.181635 0.748553 +v -0.590964 0.175095 0.747665 +v -0.593249 0.168566 0.746825 +v -0.595599 0.161778 0.746135 +v -0.598015 0.155044 0.745529 +v -0.60057 0.148124 0.744751 +v -0.603088 0.141194 0.743947 +v -0.605584 0.13402 0.743267 +v -0.608021 0.126821 0.742544 +v -0.610489 0.11938 0.741823 +v -0.612801 0.111908 0.741004 +v -0.61511 0.104216 0.740264 +v -0.617281 0.0964794 0.739425 +v -0.619369 0.0885551 0.738533 +v -0.621326 0.0806019 0.737574 +v -0.623195 0.072492 0.736697 +v -0.624735 0.0643442 0.735655 +v -0.626356 0.0560918 0.734645 +v -0.627666 0.047814 0.733481 +v -0.628916 0.0394335 0.732423 +v -0.629884 0.0310325 0.731231 +v -0.630942 0.0225323 0.72996 +v -0.63169 0.0139969 0.728541 +v -0.632497 0.0054343 0.727112 +v -0.632948 -0.00312571 0.72553 +v -0.633507 -0.0116872 0.72385 +v -0.633785 -0.0202352 0.722046 +v -0.634088 -0.028821 0.72035 +v -0.633991 -0.0373753 0.718505 +v -0.634009 -0.0459661 0.716753 +v -0.633785 -0.0545441 0.714898 +v -0.633439 -0.0631562 0.713184 +v -0.632883 -0.0717429 0.711385 +v -0.632408 -0.0803481 0.709669 +v -0.631652 -0.0889276 0.707843 +v -0.630831 -0.0974755 0.706128 +v -0.629869 -0.106 0.704375 +v -0.628881 -0.114394 0.702662 +v -0.627857 -0.122786 0.700933 +v -0.626799 -0.131049 0.69936 +v -0.62568 -0.139296 0.697771 +v -0.624526 -0.147388 0.696176 +v -0.623341 -0.155499 0.694538 +v -0.622132 -0.163536 0.692931 +v -0.620951 -0.171567 0.69134 +v -0.619831 -0.179472 0.689712 +v -0.618551 -0.187336 0.688034 +v -0.617403 -0.195074 0.686479 +v -0.616285 -0.202819 0.684938 +v -0.615102 -0.210359 0.683426 +v -0.61403 -0.217909 0.681937 +v -0.61288 -0.225284 0.680558 +v -0.611752 -0.232665 0.679192 +v -0.610686 -0.239907 0.677924 +v -0.609612 -0.247138 0.676649 +v -0.608531 -0.254345 0.67549 +v -0.607441 -0.26154 0.674331 +v -0.606306 -0.268575 0.673269 +v -0.60519 -0.275621 0.672211 +v -0.604093 -0.282555 0.6712 +v -0.603018 -0.289496 0.670195 +v -0.602054 -0.296251 0.669309 +v -0.601087 -0.303005 0.66843 +v -0.600007 -0.309536 0.66769 +v -0.598882 -0.316061 0.666932 +v -0.59773 -0.322453 0.666367 +v -0.596562 -0.32885 0.665785 +v -0.595379 -0.335111 0.665428 +v -0.594112 -0.341359 0.665035 +v -0.592752 -0.347492 0.664917 +v -0.591391 -0.353626 0.66479 +v -0.58999 -0.359615 0.66485 +v -0.588552 -0.3656 0.664934 +v -0.587096 -0.371441 0.66507 +v -0.585637 -0.377274 0.665203 +v -0.584163 -0.383015 0.66552 +v -0.582628 -0.388739 0.665805 +v -0.581054 -0.39426 0.666198 +v -0.579422 -0.399764 0.666566 +v -0.577823 -0.405075 0.667139 +v -0.576165 -0.410369 0.667694 +v -0.574508 -0.415243 0.668463 +v -0.572793 -0.420096 0.669198 +v -0.571279 -0.424747 0.669732 +v -0.569703 -0.429379 0.670231 +v -0.567881 -0.433667 0.671478 +v -0.565985 -0.437932 0.67269 +v -0.564079 -0.441979 0.674146 +v -0.562113 -0.446018 0.675585 +v -0.560291 -0.449772 0.676841 +v -0.558465 -0.453519 0.678087 +v -0.556678 -0.456951 0.679504 +v -0.554835 -0.460368 0.68091 +v -0.576594 0.357865 0.805825 +v -0.576408 0.353929 0.805594 +v -0.576112 0.350003 0.805321 +v -0.575769 0.346211 0.805109 +v -0.575345 0.342423 0.804859 +v -0.575413 0.338801 0.803451 +v -0.575452 0.335186 0.802034 +v -0.575 0.330844 0.801291 +v -0.574459 0.326535 0.800509 +v -0.574212 0.322199 0.799057 +v -0.573908 0.317894 0.797569 +v -0.57308 0.313026 0.796705 +v -0.57216 0.308182 0.795786 +v -0.571565 0.3034 0.79423 +v -0.570929 0.298629 0.79266 +v -0.569934 0.293554 0.791455 +v -0.568896 0.288509 0.790235 +v -0.568165 0.283622 0.788533 +v -0.567431 0.278728 0.786837 +v -0.566294 0.273604 0.785642 +v -0.565118 0.268529 0.784423 +v -0.564078 0.263471 0.783085 +v -0.563055 0.258415 0.781759 +v -0.562353 0.253447 0.780284 +v -0.561696 0.248473 0.77885 +v -0.562117 0.243526 0.776501 +v -0.562659 0.238537 0.774269 +v -0.563406 0.233463 0.772111 +v -0.564308 0.228357 0.770109 +v -0.565981 0.223034 0.767723 +v -0.56781 0.217688 0.765489 +v -0.569569 0.211796 0.76371 +v -0.571512 0.205897 0.762113 +v -0.573814 0.199635 0.760544 +v -0.576275 0.193388 0.759137 +v -0.578325 0.187046 0.758384 +v -0.580447 0.180724 0.757705 +v -0.582789 0.174202 0.756961 +v -0.585233 0.167712 0.756307 +v -0.587613 0.160924 0.7559 +v -0.590038 0.154179 0.755556 +v -0.592777 0.147299 0.754854 +v -0.595532 0.140428 0.754171 +v -0.598099 0.133303 0.753702 +v -0.600637 0.126149 0.753212 +v -0.603242 0.118708 0.752552 +v -0.605792 0.111252 0.751859 +v -0.60815 0.10356 0.751262 +v -0.610445 0.0958448 0.750613 +v -0.612711 0.0879408 0.749817 +v -0.614791 0.0800093 0.748922 +v -0.616746 0.0719069 0.748224 +v -0.618433 0.0637609 0.747374 +v -0.620162 0.0554985 0.746443 +v -0.621633 0.0472087 0.74537 +v -0.622912 0.0388158 0.744493 +v -0.623868 0.0303991 0.743453 +v -0.62501 0.0218909 0.742251 +v -0.62582 0.0133553 0.740884 +v -0.62667 0.0047847 0.739547 +v -0.627173 -0.00378169 0.73806 +v -0.627893 -0.0123397 0.736344 +v -0.62825 -0.0209143 0.734442 +v -0.628617 -0.0295273 0.732737 +v -0.628687 -0.0381193 0.730902 +v -0.628795 -0.0467391 0.729086 +v -0.628596 -0.0553214 0.727153 +v -0.628322 -0.0639673 0.725442 +v -0.627872 -0.0725863 0.72367 +v -0.627413 -0.0812095 0.72193 +v -0.626702 -0.089804 0.720079 +v -0.625929 -0.0983908 0.718364 +v -0.625029 -0.106952 0.716601 +v -0.62421 -0.115397 0.714877 +v -0.623173 -0.123801 0.71308 +v -0.622219 -0.132132 0.711433 +v -0.621102 -0.140429 0.709721 +v -0.62005 -0.14854 0.708053 +v -0.618927 -0.15668 0.706313 +v -0.617762 -0.164753 0.704626 +v -0.616583 -0.172823 0.702952 +v -0.615465 -0.18074 0.701209 +v -0.614331 -0.188654 0.699458 +v -0.613171 -0.196428 0.697806 +v -0.612055 -0.204212 0.696172 +v -0.610938 -0.211758 0.694543 +v -0.609811 -0.219304 0.692913 +v -0.608689 -0.226701 0.691437 +v -0.607568 -0.234101 0.689963 +v -0.606473 -0.241323 0.688568 +v -0.605389 -0.248541 0.687176 +v -0.604249 -0.25576 0.685863 +v -0.603177 -0.262992 0.684586 +v -0.602097 -0.270005 0.683365 +v -0.601034 -0.277051 0.682219 +v -0.599971 -0.283986 0.681075 +v -0.598908 -0.290918 0.679935 +v -0.59786 -0.29761 0.678866 +v -0.596844 -0.304315 0.677803 +v -0.595801 -0.310814 0.676911 +v -0.594752 -0.317323 0.676024 +v -0.593594 -0.323676 0.675397 +v -0.592467 -0.330039 0.67479 +v -0.591204 -0.336301 0.67437 +v -0.589953 -0.342564 0.673941 +v -0.588646 -0.348648 0.673668 +v -0.587361 -0.354739 0.673426 +v -0.585995 -0.360706 0.673355 +v -0.584631 -0.366669 0.673293 +v -0.583261 -0.372517 0.673365 +v -0.581792 -0.378338 0.673385 +v -0.580316 -0.384024 0.673523 +v -0.578877 -0.389709 0.673654 +v -0.577327 -0.395186 0.673936 +v -0.575811 -0.400673 0.674236 +v -0.574183 -0.405977 0.674696 +v -0.572558 -0.411282 0.675158 +v -0.570942 -0.415977 0.675835 +v -0.56935 -0.42068 0.676541 +v -0.568033 -0.425248 0.676593 +v -0.566672 -0.429805 0.676609 +v -0.564829 -0.434014 0.677719 +v -0.56301 -0.438225 0.678864 +v -0.561165 -0.442104 0.68015 +v -0.559347 -0.445994 0.681444 +v -0.557779 -0.449545 0.682274 +v -0.556242 -0.45311 0.683123 +v -0.554612 -0.456166 0.684395 +v -0.553036 -0.459235 0.685682 +v -0.57494 0.355109 0.809333 +v -0.5746 0.351794 0.809355 +v -0.574195 0.348485 0.809344 +v -0.573787 0.344887 0.809193 +v -0.5733 0.341298 0.809008 +v -0.573028 0.337483 0.808151 +v -0.57269 0.333677 0.807256 +v -0.572087 0.329383 0.806542 +v -0.571424 0.325116 0.805793 +v -0.570889 0.320694 0.804698 +v -0.570267 0.316297 0.803555 +v -0.569332 0.311517 0.802635 +v -0.568297 0.306768 0.801661 +v -0.567358 0.301948 0.80037 +v -0.566365 0.297147 0.799041 +v -0.565209 0.292176 0.797827 +v -0.564013 0.287241 0.796598 +v -0.562887 0.282336 0.79519 +v -0.561751 0.277428 0.79377 +v -0.560384 0.272388 0.792548 +v -0.55902 0.26738 0.791347 +v -0.55752 0.262416 0.79028 +v -0.556055 0.257434 0.78924 +v -0.555121 0.25259 0.787755 +v -0.554248 0.247713 0.786338 +v -0.554674 0.242851 0.783787 +v -0.555244 0.23793 0.781388 +v -0.556157 0.23288 0.778962 +v -0.557225 0.227784 0.776701 +v -0.558696 0.222434 0.774402 +v -0.560369 0.217046 0.772318 +v -0.562196 0.211154 0.770552 +v -0.564185 0.205263 0.768978 +v -0.566445 0.199034 0.767575 +v -0.56884 0.192821 0.766326 +v -0.57108 0.186465 0.76562 +v -0.573387 0.180133 0.764987 +v -0.575773 0.173604 0.764474 +v -0.578229 0.167092 0.764025 +v -0.580779 0.160366 0.763735 +v -0.583339 0.15365 0.763461 +v -0.586077 0.146784 0.763089 +v -0.588806 0.139926 0.762716 +v -0.591528 0.1328 0.762438 +v -0.594186 0.125646 0.762109 +v -0.596818 0.11826 0.761747 +v -0.599344 0.110846 0.761322 +v -0.601838 0.103188 0.76094 +v -0.60423 0.0954784 0.760463 +v -0.606565 0.0875548 0.759899 +v -0.608718 0.0795931 0.759224 +v -0.610754 0.0715129 0.758644 +v -0.612602 0.0633914 0.757947 +v -0.614402 0.0551022 0.757184 +v -0.615979 0.0467865 0.756295 +v -0.617398 0.0383833 0.755446 +v -0.618502 0.029964 0.754431 +v -0.619714 0.0214191 0.753332 +v -0.620616 0.0128484 0.752072 +v -0.621576 0.00423912 0.7508 +v -0.622159 -0.00433536 0.749387 +v -0.622897 -0.0128982 0.74783 +v -0.623228 -0.0214972 0.74601 +v -0.623683 -0.0301581 0.744363 +v -0.623843 -0.0387963 0.742591 +v -0.623989 -0.0474554 0.740859 +v -0.623801 -0.0560758 0.738972 +v -0.623563 -0.0647515 0.737227 +v -0.623123 -0.0734109 0.73541 +v -0.622797 -0.0820702 0.733709 +v -0.622183 -0.090688 0.7319 +v -0.6215 -0.0992808 0.730166 +v -0.620602 -0.10784 0.728325 +v -0.619737 -0.116323 0.726534 +v -0.618826 -0.124785 0.724725 +v -0.617923 -0.133149 0.72301 +v -0.616873 -0.141485 0.721247 +v -0.615897 -0.149645 0.719555 +v -0.614778 -0.157815 0.717758 +v -0.613658 -0.165907 0.716013 +v -0.612525 -0.174009 0.714259 +v -0.611375 -0.18195 0.712444 +v -0.610277 -0.189908 0.71066 +v -0.609162 -0.19771 0.708976 +v -0.608034 -0.205516 0.707285 +v -0.606897 -0.21308 0.705589 +v -0.605759 -0.220643 0.703906 +v -0.6046 -0.228043 0.702313 +v -0.603445 -0.235444 0.700711 +v -0.602339 -0.24269 0.699226 +v -0.601281 -0.249935 0.69776 +v -0.600169 -0.257129 0.696362 +v -0.599008 -0.264311 0.694945 +v -0.59791 -0.271337 0.693618 +v -0.596901 -0.278382 0.692322 +v -0.595871 -0.285311 0.691073 +v -0.594776 -0.292237 0.689834 +v -0.593692 -0.298959 0.688717 +v -0.592618 -0.305692 0.687601 +v -0.591531 -0.312212 0.686623 +v -0.590441 -0.318734 0.685656 +v -0.58931 -0.325094 0.684894 +v -0.588215 -0.331466 0.684143 +v -0.587005 -0.337733 0.683602 +v -0.585767 -0.343995 0.683039 +v -0.584508 -0.350034 0.682655 +v -0.583192 -0.356071 0.682266 +v -0.581926 -0.362028 0.682014 +v -0.580639 -0.36798 0.681758 +v -0.579319 -0.373815 0.681622 +v -0.577958 -0.379639 0.681462 +v -0.576577 -0.38532 0.681414 +v -0.575135 -0.390992 0.681346 +v -0.573625 -0.396405 0.681379 +v -0.572129 -0.401823 0.68143 +v -0.570675 -0.407042 0.681699 +v -0.569204 -0.412254 0.681949 +v -0.567743 -0.417 0.682379 +v -0.566189 -0.42172 0.682765 +v -0.564789 -0.426218 0.682875 +v -0.563377 -0.430705 0.682973 +v -0.561761 -0.434807 0.683772 +v -0.560111 -0.4389 0.684556 +v -0.558453 -0.442771 0.685423 +v -0.556779 -0.446642 0.686273 +v -0.555466 -0.449957 0.686852 +v -0.55413 -0.453265 0.687409 +v -0.553034 -0.455897 0.687932 +v -0.55192 -0.458514 0.688449 +v 0.564933 0.353569 0.811035 +v 0.564472 0.350895 0.81129 +v 0.563922 0.348227 0.811497 +v 0.5634 0.34482 0.811401 +v 0.562791 0.341429 0.811263 +v 0.562143 0.337424 0.810937 +v 0.561389 0.333447 0.810551 +v 0.560679 0.329191 0.809893 +v 0.559879 0.324955 0.809189 +v 0.559017 0.320452 0.80842 +v 0.558024 0.315989 0.807581 +v 0.556935 0.311206 0.806638 +v 0.555735 0.306472 0.805616 +v 0.554476 0.301599 0.804598 +v 0.553136 0.296755 0.803532 +v 0.551835 0.291824 0.802432 +v 0.550447 0.286937 0.801264 +v 0.549065 0.281914 0.800049 +v 0.547616 0.276927 0.798792 +v 0.546084 0.271869 0.79775 +v 0.544445 0.266881 0.796603 +v 0.542619 0.261812 0.795873 +v 0.540665 0.256786 0.795051 +v 0.539718 0.251881 0.79366 +v 0.538689 0.247016 0.792198 +v 0.539272 0.241909 0.789635 +v 0.539779 0.236847 0.787018 +v 0.541111 0.231478 0.784361 +v 0.542449 0.226111 0.781732 +v 0.543993 0.220562 0.779711 +v 0.545605 0.215009 0.777766 +v 0.547622 0.20892 0.776205 +v 0.549739 0.202846 0.774746 +v 0.552063 0.196583 0.773718 +v 0.554421 0.190338 0.772727 +v 0.557 0.183895 0.772103 +v 0.559608 0.177458 0.771506 +v 0.562253 0.170959 0.771227 +v 0.5649 0.16447 0.77095 +v 0.567757 0.157762 0.770763 +v 0.570539 0.151078 0.770562 +v 0.573383 0.144206 0.770472 +v 0.57617 0.137315 0.77033 +v 0.579081 0.130231 0.770216 +v 0.581843 0.123109 0.769999 +v 0.584579 0.115794 0.769815 +v 0.587233 0.108454 0.769565 +v 0.589863 0.100857 0.769326 +v 0.592276 0.0932015 0.768952 +v 0.594741 0.0853336 0.768644 +v 0.596857 0.077387 0.768107 +v 0.599059 0.0693319 0.767548 +v 0.600984 0.0612265 0.766802 +v 0.602896 0.0529739 0.766137 +v 0.604424 0.0446746 0.765248 +v 0.60604 0.0363138 0.764371 +v 0.607278 0.0279281 0.763277 +v 0.608595 0.0194462 0.762278 +v 0.609439 0.0109561 0.76103 +v 0.610411 0.00238341 0.759681 +v 0.61107 -0.00619138 0.758164 +v 0.611836 -0.0147701 0.756637 +v 0.612119 -0.0233264 0.754873 +v 0.612504 -0.0319264 0.753193 +v 0.612529 -0.040489 0.751344 +v 0.612616 -0.0490612 0.749643 +v 0.612456 -0.0576789 0.747761 +v 0.612333 -0.0663307 0.746003 +v 0.611915 -0.074934 0.744115 +v 0.611573 -0.0835668 0.742357 +v 0.610943 -0.0921519 0.740473 +v 0.610371 -0.100715 0.738632 +v 0.609645 -0.109254 0.736723 +v 0.608877 -0.11772 0.73487 +v 0.608058 -0.126171 0.732989 +v 0.607183 -0.134493 0.73116 +v 0.606207 -0.142842 0.729231 +v 0.605253 -0.151064 0.727374 +v 0.604124 -0.159236 0.72545 +v 0.603112 -0.167304 0.723589 +v 0.602039 -0.175369 0.721699 +v 0.600968 -0.183331 0.719838 +v 0.599944 -0.19128 0.717985 +v 0.598899 -0.19901 0.71616 +v 0.597898 -0.206758 0.71436 +v 0.596914 -0.214316 0.712605 +v 0.595838 -0.22188 0.710817 +v 0.594752 -0.229265 0.709131 +v 0.593765 -0.236662 0.70749 +v 0.592746 -0.243964 0.705952 +v 0.591699 -0.25126 0.704387 +v 0.590616 -0.258386 0.70287 +v 0.589652 -0.265528 0.701388 +v 0.588655 -0.272555 0.699987 +v 0.587664 -0.279588 0.698586 +v 0.586662 -0.286472 0.697233 +v 0.585658 -0.293436 0.695913 +v 0.584659 -0.300221 0.694666 +v 0.583654 -0.306997 0.693415 +v 0.582683 -0.313579 0.692344 +v 0.581691 -0.320155 0.691265 +v 0.580663 -0.326581 0.690392 +v 0.579593 -0.332976 0.689531 +v 0.578517 -0.339197 0.68887 +v 0.577377 -0.345408 0.688197 +v 0.576189 -0.351481 0.687662 +v 0.575082 -0.357577 0.687162 +v 0.573954 -0.363536 0.686755 +v 0.572744 -0.36948 0.686311 +v 0.571518 -0.375261 0.686013 +v 0.570275 -0.381033 0.685706 +v 0.568998 -0.386728 0.685489 +v 0.56769 -0.392414 0.685263 +v 0.566401 -0.3978 0.685154 +v 0.56506 -0.40317 0.685014 +v 0.563682 -0.40831 0.685096 +v 0.562278 -0.413444 0.685166 +v 0.560926 -0.418238 0.68534 +v 0.559534 -0.423016 0.685495 +v 0.558197 -0.427499 0.685678 +v 0.556824 -0.431971 0.68584 +v 0.555413 -0.436034 0.686336 +v 0.553983 -0.440088 0.686822 +v 0.552609 -0.443894 0.68732 +v 0.551181 -0.447696 0.687796 +v 0.550099 -0.450886 0.688056 +v 0.548973 -0.454059 0.68829 +v 0.548381 -0.456368 0.688017 +v 0.547775 -0.458669 0.687734 +v 0.5668 0.356269 0.807624 +v 0.566459 0.35294 0.807637 +v 0.566009 0.349626 0.807596 +v 0.565539 0.346033 0.807431 +v 0.56499 0.342449 0.807232 +v 0.56472 0.338624 0.806373 +v 0.564388 0.334799 0.805482 +v 0.563787 0.330497 0.804773 +v 0.563104 0.326214 0.804024 +v 0.562563 0.321751 0.802934 +v 0.561914 0.317323 0.801793 +v 0.560964 0.31251 0.800886 +v 0.559912 0.307735 0.799919 +v 0.558957 0.302884 0.798666 +v 0.557966 0.298047 0.797399 +v 0.556794 0.293049 0.796262 +v 0.555583 0.288065 0.7951 +v 0.554548 0.283068 0.793699 +v 0.553489 0.27808 0.792291 +v 0.552155 0.27296 0.791183 +v 0.550803 0.267865 0.790059 +v 0.549451 0.262796 0.788947 +v 0.548109 0.257728 0.787849 +v 0.547247 0.252695 0.786406 +v 0.546438 0.247655 0.78502 +v 0.546957 0.24252 0.782496 +v 0.547622 0.237342 0.780112 +v 0.548656 0.232057 0.77765 +v 0.549907 0.226733 0.775394 +v 0.551509 0.221245 0.773167 +v 0.553326 0.215718 0.771171 +v 0.555215 0.209649 0.769504 +v 0.557319 0.203579 0.768068 +v 0.559652 0.19732 0.766749 +v 0.562135 0.191104 0.765598 +v 0.564467 0.184639 0.764892 +v 0.56691 0.178195 0.764302 +v 0.569466 0.171691 0.763786 +v 0.572106 0.165196 0.763344 +v 0.574737 0.158433 0.763027 +v 0.577369 0.151715 0.762761 +v 0.580234 0.144836 0.76237 +v 0.583072 0.13795 0.761956 +v 0.585827 0.130825 0.761623 +v 0.588505 0.123696 0.761262 +v 0.591201 0.11635 0.760843 +v 0.593831 0.10897 0.760363 +v 0.596351 0.101331 0.759891 +v 0.598751 0.093661 0.759361 +v 0.601134 0.0857851 0.758794 +v 0.603271 0.0778536 0.758073 +v 0.605384 0.0697829 0.757399 +v 0.607178 0.0616615 0.756544 +v 0.609008 0.0534023 0.755703 +v 0.610553 0.0451156 0.7547 +v 0.612032 0.0367506 0.753793 +v 0.613164 0.0283665 0.752705 +v 0.614372 0.0199027 0.751574 +v 0.615237 0.011412 0.750243 +v 0.616192 0.00284777 0.748874 +v 0.616739 -0.00571079 0.747308 +v 0.617406 -0.0142587 0.745598 +v 0.61767 -0.0227908 0.743698 +v 0.618073 -0.0313706 0.741987 +v 0.61805 -0.0399046 0.740095 +v 0.618122 -0.0485024 0.73829 +v 0.617958 -0.0570967 0.736374 +v 0.617758 -0.0657222 0.73461 +v 0.617358 -0.0743216 0.732756 +v 0.616831 -0.0829069 0.73092 +v 0.616226 -0.091469 0.729064 +v 0.615679 -0.100044 0.727312 +v 0.614883 -0.108573 0.725444 +v 0.614003 -0.116995 0.723587 +v 0.613099 -0.125397 0.721722 +v 0.612216 -0.133717 0.719976 +v 0.611167 -0.142029 0.718119 +v 0.610151 -0.150195 0.71628 +v 0.60901 -0.158346 0.7144 +v 0.607959 -0.166409 0.712606 +v 0.606883 -0.174467 0.710804 +v 0.605772 -0.182379 0.708955 +v 0.604711 -0.190283 0.707119 +v 0.603674 -0.198018 0.705379 +v 0.602627 -0.20575 0.703623 +v 0.601585 -0.213285 0.701921 +v 0.60053 -0.220854 0.700234 +v 0.599462 -0.228243 0.698653 +v 0.598344 -0.235626 0.697052 +v 0.597324 -0.242886 0.695583 +v 0.596321 -0.25015 0.694107 +v 0.595218 -0.257309 0.692693 +v 0.594218 -0.264484 0.691316 +v 0.593155 -0.27149 0.689975 +v 0.592197 -0.278509 0.688671 +v 0.591247 -0.285404 0.687402 +v 0.590251 -0.292358 0.686141 +v 0.589239 -0.299098 0.684973 +v 0.588281 -0.305856 0.683825 +v 0.587286 -0.312404 0.682816 +v 0.586313 -0.318962 0.68181 +v 0.585202 -0.325328 0.681057 +v 0.584077 -0.331691 0.680309 +v 0.583041 -0.337945 0.679801 +v 0.581965 -0.344185 0.679302 +v 0.580777 -0.350288 0.678964 +v 0.579591 -0.356391 0.678628 +v 0.578319 -0.362335 0.678363 +v 0.577001 -0.36827 0.678084 +v 0.575732 -0.374093 0.677983 +v 0.574406 -0.379891 0.677845 +v 0.573033 -0.385595 0.677796 +v 0.571615 -0.391281 0.677741 +v 0.570249 -0.396726 0.677825 +v 0.568901 -0.402172 0.677916 +v 0.567465 -0.407411 0.678232 +v 0.565877 -0.412601 0.678474 +v 0.564376 -0.417341 0.67891 +v 0.562859 -0.422075 0.679329 +v 0.56155 -0.426605 0.679434 +v 0.560232 -0.431129 0.679529 +v 0.558654 -0.435299 0.680361 +v 0.557012 -0.439448 0.681154 +v 0.555453 -0.443321 0.682086 +v 0.55388 -0.447193 0.683008 +v 0.552605 -0.450582 0.683556 +v 0.551276 -0.453955 0.684085 +v 0.550124 -0.456681 0.684549 +v 0.548956 -0.45939 0.685002 +v 0.568536 0.358982 0.804155 +v 0.568294 0.355002 0.80391 +v 0.567951 0.35104 0.803614 +v 0.567577 0.347243 0.803407 +v 0.567082 0.343466 0.803142 +v 0.567154 0.339828 0.801737 +v 0.567172 0.336195 0.800303 +v 0.566704 0.331847 0.799551 +v 0.566121 0.327528 0.79874 +v 0.565863 0.323128 0.797311 +v 0.565532 0.318749 0.795844 +v 0.564718 0.313906 0.794974 +v 0.563769 0.309103 0.79403 +v 0.563144 0.30427 0.792551 +v 0.562445 0.299463 0.791025 +v 0.561444 0.294385 0.78988 +v 0.560373 0.289339 0.788683 +v 0.559674 0.284366 0.787082 +v 0.558951 0.279412 0.785468 +v 0.557889 0.274216 0.784343 +v 0.556754 0.269044 0.783163 +v 0.555966 0.263921 0.78174 +v 0.555133 0.258844 0.780271 +v 0.554491 0.253621 0.778859 +v 0.553817 0.248422 0.777445 +v 0.554423 0.243187 0.775103 +v 0.555073 0.237938 0.772808 +v 0.555991 0.232689 0.770694 +v 0.556974 0.227426 0.768654 +v 0.558814 0.221951 0.766403 +v 0.560692 0.216455 0.764199 +v 0.562616 0.210381 0.762601 +v 0.564615 0.2043 0.761072 +v 0.567083 0.198051 0.759598 +v 0.569602 0.19184 0.758195 +v 0.571776 0.185358 0.757513 +v 0.573997 0.178884 0.756872 +v 0.576529 0.172369 0.756187 +v 0.579049 0.165838 0.755487 +v 0.581523 0.159046 0.755114 +v 0.583995 0.152285 0.754773 +v 0.586897 0.145406 0.754081 +v 0.589714 0.138501 0.753326 +v 0.592391 0.131355 0.752875 +v 0.594918 0.124175 0.752309 +v 0.597588 0.116796 0.751663 +v 0.600093 0.109368 0.750906 +v 0.60255 0.101719 0.750257 +v 0.604821 0.0940274 0.749511 +v 0.60715 0.0861535 0.748718 +v 0.609207 0.0782272 0.74775 +v 0.611174 0.0701493 0.746955 +v 0.612918 0.0620302 0.74603 +v 0.614683 0.0537955 0.745033 +v 0.616067 0.0455193 0.743828 +v 0.617408 0.0371463 0.742895 +v 0.618372 0.0287639 0.741781 +v 0.619505 0.0203343 0.740539 +v 0.620296 0.0118561 0.739085 +v 0.62115 0.00332459 0.737673 +v 0.621584 -0.00520116 0.736066 +v 0.622297 -0.0137237 0.734233 +v 0.622626 -0.0222298 0.732237 +v 0.622964 -0.0307832 0.730468 +v 0.622837 -0.0392843 0.728506 +v 0.622859 -0.0478594 0.726615 +v 0.622632 -0.0564114 0.72462 +v 0.622408 -0.0650131 0.722864 +v 0.621878 -0.0735708 0.720989 +v 0.621458 -0.0821398 0.71922 +v 0.620761 -0.090672 0.717352 +v 0.620094 -0.0992113 0.715633 +v 0.619209 -0.107714 0.713795 +v 0.618429 -0.11613 0.712016 +v 0.617411 -0.124495 0.710165 +v 0.616426 -0.13277 0.708463 +v 0.615368 -0.141047 0.70671 +v 0.614281 -0.149168 0.704896 +v 0.613173 -0.15729 0.703085 +v 0.612062 -0.165336 0.701333 +v 0.610909 -0.173379 0.699573 +v 0.609903 -0.181268 0.697807 +v 0.608795 -0.189136 0.696014 +v 0.607733 -0.19686 0.694313 +v 0.606675 -0.204596 0.692619 +v 0.605594 -0.21213 0.690999 +v 0.604598 -0.219681 0.689401 +v 0.603512 -0.227078 0.687906 +v 0.602487 -0.23448 0.686438 +v 0.601422 -0.241692 0.685019 +v 0.600438 -0.248929 0.683627 +v 0.599387 -0.256127 0.682347 +v 0.598345 -0.263328 0.681063 +v 0.597358 -0.270345 0.679832 +v 0.596421 -0.277356 0.678602 +v 0.595451 -0.28425 0.677405 +v 0.594464 -0.291187 0.676223 +v 0.593508 -0.297911 0.675153 +v 0.592522 -0.304628 0.674065 +v 0.591482 -0.311146 0.6731 +v 0.590561 -0.317668 0.672225 +v 0.589529 -0.324035 0.671645 +v 0.588413 -0.33038 0.671024 +v 0.58727 -0.33662 0.67061 +v 0.586128 -0.342862 0.670206 +v 0.584851 -0.348982 0.670032 +v 0.583631 -0.355119 0.669868 +v 0.58239 -0.361082 0.669831 +v 0.581024 -0.367012 0.669743 +v 0.57966 -0.372855 0.669802 +v 0.57828 -0.3787 0.669854 +v 0.57686 -0.384409 0.670014 +v 0.575396 -0.390103 0.670149 +v 0.573911 -0.395602 0.67041 +v 0.572396 -0.401089 0.670659 +v 0.570851 -0.406403 0.67118 +v 0.569223 -0.411696 0.671664 +v 0.567639 -0.416399 0.672385 +v 0.56602 -0.421095 0.673087 +v 0.564763 -0.425679 0.673124 +v 0.563435 -0.430241 0.673128 +v 0.561692 -0.434505 0.67429 +v 0.559907 -0.438766 0.675436 +v 0.558135 -0.442705 0.676783 +v 0.556306 -0.446631 0.678105 +v 0.554853 -0.450219 0.678937 +v 0.553344 -0.453794 0.679756 +v 0.551782 -0.456973 0.681033 +v 0.550099 -0.460118 0.682255 +v 0.571186 0.363021 0.798801 +v 0.57129 0.358956 0.798017 +v 0.57125 0.354922 0.797173 +v 0.571211 0.350954 0.796434 +v 0.571094 0.347004 0.79566 +v 0.57125 0.34305 0.794299 +v 0.57128 0.339113 0.792886 +v 0.571128 0.33471 0.791825 +v 0.570889 0.330327 0.790723 +v 0.570734 0.325746 0.789373 +v 0.570499 0.3212 0.787974 +v 0.570003 0.316335 0.786865 +v 0.569381 0.311495 0.785691 +v 0.568859 0.30656 0.78425 +v 0.568303 0.301646 0.782781 +v 0.567612 0.296514 0.781476 +v 0.566906 0.291381 0.780158 +v 0.566308 0.286288 0.778648 +v 0.565715 0.281189 0.77715 +v 0.564901 0.275966 0.77589 +v 0.56415 0.270724 0.774676 +v 0.563518 0.265486 0.773296 +v 0.56294 0.260227 0.771943 +v 0.562421 0.254912 0.770495 +v 0.56201 0.249583 0.769148 +v 0.56264 0.244314 0.766806 +v 0.563468 0.239006 0.764648 +v 0.564522 0.233739 0.762401 +v 0.56577 0.22844 0.760348 +v 0.567483 0.222911 0.75811 +v 0.569399 0.217368 0.756071 +v 0.571272 0.211333 0.754272 +v 0.573324 0.205302 0.752649 +v 0.575573 0.199072 0.751083 +v 0.577967 0.192865 0.74965 +v 0.580107 0.186447 0.74867 +v 0.582347 0.180037 0.747781 +v 0.584692 0.173429 0.746928 +v 0.587125 0.166811 0.746134 +v 0.589554 0.160011 0.745447 +v 0.591998 0.153239 0.744791 +v 0.594616 0.146303 0.744004 +v 0.597203 0.139359 0.743186 +v 0.599765 0.132158 0.742496 +v 0.602246 0.124955 0.741761 +v 0.604761 0.117531 0.740999 +v 0.607128 0.110081 0.740145 +v 0.609446 0.102401 0.739313 +v 0.611578 0.0946819 0.738382 +v 0.613749 0.0867854 0.73748 +v 0.615692 0.0788424 0.736444 +v 0.617576 0.0707576 0.735476 +v 0.619178 0.0626375 0.734368 +v 0.620844 0.0544085 0.733307 +v 0.622096 0.0461332 0.732042 +v 0.623382 0.03778 0.730918 +v 0.624341 0.0293969 0.729645 +v 0.625409 0.020924 0.728309 +v 0.626084 0.0124346 0.726791 +v 0.626874 0.00392086 0.725283 +v 0.627395 -0.00459901 0.723661 +v 0.627834 -0.0131051 0.72183 +v 0.628054 -0.0216084 0.719912 +v 0.628344 -0.0301459 0.718157 +v 0.628241 -0.0386434 0.716233 +v 0.628117 -0.0471875 0.714372 +v 0.627808 -0.055716 0.712444 +v 0.627534 -0.0642976 0.710679 +v 0.62695 -0.0728247 0.708812 +v 0.626329 -0.081348 0.707024 +v 0.625555 -0.0898461 0.705194 +v 0.62488 -0.0983381 0.703518 +v 0.623986 -0.106815 0.701703 +v 0.623121 -0.115196 0.69995 +v 0.622093 -0.123552 0.698168 +v 0.621071 -0.131783 0.696567 +v 0.6199 -0.139971 0.694902 +v 0.618814 -0.148087 0.693196 +v 0.617692 -0.156202 0.691466 +v 0.616554 -0.164208 0.689792 +v 0.615363 -0.172218 0.688106 +v 0.614182 -0.180082 0.6864 +v 0.613054 -0.187939 0.684713 +v 0.612023 -0.195643 0.683129 +v 0.611016 -0.203394 0.681557 +v 0.609964 -0.210932 0.680058 +v 0.608886 -0.218466 0.678538 +v 0.607725 -0.225824 0.67713 +v 0.606746 -0.233214 0.675783 +v 0.605698 -0.240441 0.674491 +v 0.604766 -0.247693 0.673241 +v 0.603744 -0.254882 0.672078 +v 0.602682 -0.262065 0.670891 +v 0.601616 -0.269091 0.669751 +v 0.600663 -0.276134 0.668655 +v 0.599739 -0.283028 0.66761 +v 0.598697 -0.289929 0.666538 +v 0.597734 -0.29668 0.665592 +v 0.596745 -0.303438 0.664648 +v 0.595712 -0.309969 0.663885 +v 0.594702 -0.316501 0.663148 +v 0.593655 -0.322897 0.66261 +v 0.592562 -0.329282 0.662069 +v 0.591432 -0.335528 0.661721 +v 0.590277 -0.341773 0.661359 +v 0.589035 -0.347929 0.66133 +v 0.58771 -0.354071 0.661266 +v 0.586385 -0.360054 0.66137 +v 0.584964 -0.366025 0.66144 +v 0.583532 -0.371878 0.661607 +v 0.58216 -0.377737 0.66179 +v 0.580746 -0.38349 0.6621 +v 0.579219 -0.38921 0.662355 +v 0.577708 -0.394748 0.66275 +v 0.576103 -0.40026 0.663107 +v 0.574485 -0.405559 0.6637 +v 0.572881 -0.410851 0.664289 +v 0.571303 -0.415737 0.665114 +v 0.569615 -0.420581 0.665875 +v 0.568102 -0.425242 0.666386 +v 0.56652 -0.429882 0.666871 +v 0.564754 -0.434218 0.668131 +v 0.562922 -0.438536 0.669363 +v 0.561008 -0.44262 0.670827 +v 0.559113 -0.446701 0.672289 +v 0.557408 -0.450488 0.673584 +v 0.555629 -0.454251 0.674846 +v 0.553849 -0.457748 0.676251 +v 0.552055 -0.461236 0.677647 +v 0.573323 0.367079 0.793258 +v 0.57377 0.362959 0.791922 +v 0.574087 0.358844 0.790539 +v 0.574382 0.354727 0.789264 +v 0.574577 0.35061 0.787951 +v 0.574769 0.346359 0.786614 +v 0.574881 0.342109 0.785243 +v 0.574988 0.337671 0.783836 +v 0.575013 0.333249 0.782392 +v 0.574967 0.328501 0.781104 +v 0.574802 0.32379 0.779753 +v 0.574585 0.318909 0.778381 +v 0.57429 0.314048 0.776977 +v 0.573975 0.309 0.775609 +v 0.573533 0.303976 0.774176 +v 0.573106 0.298789 0.772676 +v 0.572683 0.293615 0.771171 +v 0.572238 0.288388 0.769773 +v 0.571757 0.283177 0.768351 +v 0.571282 0.277895 0.767018 +v 0.570776 0.272632 0.765648 +v 0.57037 0.267249 0.764339 +v 0.56996 0.261873 0.763015 +v 0.56975 0.256425 0.76164 +v 0.569564 0.250965 0.760286 +v 0.570358 0.24558 0.758058 +v 0.571187 0.240224 0.755856 +v 0.572446 0.234895 0.753555 +v 0.573874 0.229511 0.751369 +v 0.575579 0.223895 0.749263 +v 0.577379 0.218273 0.747238 +v 0.579331 0.212267 0.745389 +v 0.581412 0.206269 0.743645 +v 0.583545 0.200032 0.742085 +v 0.585749 0.193804 0.740589 +v 0.587912 0.187434 0.739365 +v 0.590159 0.18108 0.738211 +v 0.592427 0.174376 0.737301 +v 0.594679 0.167663 0.736362 +v 0.597093 0.160866 0.735403 +v 0.599474 0.154064 0.73442 +v 0.601895 0.147094 0.733606 +v 0.604242 0.140094 0.732723 +v 0.606712 0.132865 0.731828 +v 0.609089 0.12562 0.730875 +v 0.611464 0.118177 0.730023 +v 0.613618 0.110697 0.729038 +v 0.6158 0.102984 0.72804 +v 0.617836 0.0952465 0.726957 +v 0.619879 0.0873271 0.725981 +v 0.621646 0.079364 0.724847 +v 0.623435 0.0712808 0.723709 +v 0.624919 0.0631647 0.722418 +v 0.626441 0.0549321 0.72127 +v 0.627592 0.0466562 0.71995 +v 0.62877 0.0383236 0.718623 +v 0.629672 0.0299685 0.717161 +v 0.630585 0.0215008 0.715739 +v 0.631218 0.0130071 0.714196 +v 0.631966 0.0045166 0.712614 +v 0.632273 -0.00395826 0.710863 +v 0.632675 -0.0124488 0.70914 +v 0.632795 -0.0209295 0.707314 +v 0.632971 -0.0294377 0.705536 +v 0.632745 -0.0379078 0.703611 +v 0.632604 -0.0464275 0.701824 +v 0.632148 -0.0549133 0.69993 +v 0.631772 -0.0634513 0.698163 +v 0.631124 -0.0719443 0.696309 +v 0.630484 -0.0804378 0.69458 +v 0.629683 -0.0888973 0.692802 +v 0.628909 -0.097326 0.691124 +v 0.627997 -0.105767 0.689336 +v 0.627088 -0.114121 0.687647 +v 0.625981 -0.122439 0.685896 +v 0.624887 -0.130604 0.684372 +v 0.623786 -0.138771 0.682851 +v 0.622678 -0.146865 0.681251 +v 0.621442 -0.154957 0.679593 +v 0.620258 -0.162936 0.677997 +v 0.619101 -0.170913 0.676401 +v 0.61801 -0.178785 0.674841 +v 0.616859 -0.186643 0.67326 +v 0.615732 -0.194339 0.671755 +v 0.614671 -0.20205 0.670267 +v 0.613652 -0.209592 0.66888 +v 0.612602 -0.217122 0.667472 +v 0.611526 -0.224478 0.666195 +v 0.610532 -0.231848 0.664954 +v 0.609498 -0.239087 0.663787 +v 0.608486 -0.246331 0.662618 +v 0.60747 -0.253502 0.661564 +v 0.606453 -0.260676 0.660511 +v 0.605456 -0.267722 0.659515 +v 0.604501 -0.274778 0.658531 +v 0.60351 -0.281687 0.657613 +v 0.602559 -0.288583 0.656703 +v 0.60157 -0.295371 0.655892 +v 0.600541 -0.302132 0.655107 +v 0.599499 -0.3087 0.654524 +v 0.598466 -0.31527 0.65394 +v 0.597451 -0.321696 0.653472 +v 0.596354 -0.328112 0.652976 +v 0.595228 -0.334361 0.652674 +v 0.594094 -0.340621 0.652378 +v 0.592844 -0.346803 0.65249 +v 0.591489 -0.352964 0.652554 +v 0.590117 -0.358988 0.652808 +v 0.58871 -0.365006 0.653053 +v 0.587304 -0.370876 0.653373 +v 0.58583 -0.376735 0.653663 +v 0.584346 -0.38251 0.654064 +v 0.582806 -0.388268 0.654442 +v 0.581275 -0.39385 0.654986 +v 0.579656 -0.399402 0.655488 +v 0.578042 -0.404693 0.656181 +v 0.576374 -0.40997 0.656852 +v 0.574672 -0.41499 0.657696 +v 0.572969 -0.420013 0.658545 +v 0.571284 -0.424763 0.659575 +v 0.569492 -0.429483 0.660549 +v 0.567645 -0.433884 0.66189 +v 0.565766 -0.438268 0.663214 +v 0.563819 -0.442511 0.664844 +v 0.561789 -0.446735 0.66643 +v 0.559792 -0.450711 0.668153 +v 0.557791 -0.454682 0.669882 +v 0.555921 -0.458522 0.671464 +v 0.553905 -0.462325 0.67299 +v 0.575945 0.371728 0.786732 +v 0.576612 0.36747 0.785025 +v 0.577181 0.363208 0.78328 +v 0.577677 0.358891 0.781729 +v 0.578113 0.354584 0.780164 +v 0.57852 0.350127 0.778637 +v 0.578776 0.345684 0.777055 +v 0.579035 0.34108 0.775529 +v 0.579262 0.336475 0.773977 +v 0.579441 0.33162 0.772525 +v 0.57949 0.326814 0.771005 +v 0.579474 0.321825 0.769545 +v 0.579388 0.316848 0.768048 +v 0.57927 0.311718 0.766543 +v 0.579131 0.30659 0.765029 +v 0.578952 0.301312 0.763506 +v 0.578789 0.296039 0.761995 +v 0.578556 0.290711 0.760509 +v 0.578318 0.285395 0.75902 +v 0.578049 0.280026 0.75762 +v 0.577822 0.274657 0.75626 +v 0.577625 0.269211 0.754833 +v 0.577553 0.26374 0.753474 +v 0.577595 0.258235 0.751993 +v 0.577773 0.252693 0.750627 +v 0.578593 0.247261 0.748392 +v 0.579568 0.241847 0.746295 +v 0.580807 0.23635 0.744006 +v 0.582295 0.230827 0.741924 +v 0.583921 0.225134 0.739794 +v 0.585732 0.219445 0.737813 +v 0.587584 0.213452 0.735942 +v 0.589594 0.20746 0.734206 +v 0.591597 0.201212 0.732538 +v 0.593733 0.194986 0.730977 +v 0.595805 0.188594 0.729624 +v 0.597975 0.182208 0.728341 +v 0.600104 0.175494 0.727157 +v 0.60233 0.168795 0.726049 +v 0.604609 0.161946 0.724955 +v 0.606921 0.1551 0.723871 +v 0.609209 0.148061 0.722823 +v 0.611501 0.141022 0.721774 +v 0.613822 0.133759 0.720758 +v 0.616046 0.126479 0.719683 +v 0.618269 0.119007 0.718648 +v 0.620305 0.111503 0.717517 +v 0.622346 0.103768 0.716414 +v 0.624188 0.0959675 0.715195 +v 0.626132 0.0879925 0.714042 +v 0.627772 0.0800117 0.712752 +v 0.629436 0.0719215 0.71154 +v 0.630778 0.063792 0.710179 +v 0.632253 0.0555681 0.708922 +v 0.633304 0.047313 0.707482 +v 0.6344 0.0389816 0.706097 +v 0.635153 0.0306074 0.704554 +v 0.635998 0.0221731 0.703057 +v 0.636537 0.0136901 0.701435 +v 0.637174 0.00519058 0.699834 +v 0.637429 -0.00329987 0.698109 +v 0.637801 -0.0117759 0.696414 +v 0.637758 -0.0202402 0.694561 +v 0.637833 -0.0287382 0.692813 +v 0.637559 -0.0372021 0.690953 +v 0.637304 -0.0457058 0.689187 +v 0.636763 -0.0541677 0.687329 +v 0.636222 -0.0626655 0.685571 +v 0.635569 -0.0711372 0.683783 +v 0.63486 -0.0796005 0.682078 +v 0.633939 -0.0880143 0.680314 +v 0.633049 -0.096395 0.678646 +v 0.632076 -0.104824 0.67691 +v 0.631059 -0.113116 0.67532 +v 0.629977 -0.121388 0.673701 +v 0.62895 -0.12957 0.672252 +v 0.62778 -0.137712 0.67076 +v 0.626625 -0.145765 0.669274 +v 0.62535 -0.153832 0.667699 +v 0.624167 -0.161797 0.666204 +v 0.62301 -0.169765 0.664696 +v 0.621897 -0.177607 0.663244 +v 0.620681 -0.185425 0.661763 +v 0.619628 -0.193132 0.660438 +v 0.618543 -0.200826 0.65908 +v 0.617438 -0.208342 0.657805 +v 0.616401 -0.215864 0.656524 +v 0.615319 -0.223228 0.655351 +v 0.614288 -0.230589 0.654188 +v 0.613322 -0.237842 0.653153 +v 0.612346 -0.245109 0.652125 +v 0.611361 -0.252282 0.651186 +v 0.610338 -0.259442 0.650237 +v 0.609448 -0.266504 0.649403 +v 0.608446 -0.273553 0.648539 +v 0.607433 -0.280475 0.647734 +v 0.606445 -0.287393 0.646935 +v 0.605409 -0.294165 0.646314 +v 0.604436 -0.300941 0.645706 +v 0.603406 -0.307539 0.645219 +v 0.602357 -0.31414 0.644748 +v 0.601277 -0.320594 0.644401 +v 0.600191 -0.327039 0.64406 +v 0.599021 -0.333335 0.643862 +v 0.597827 -0.339627 0.643665 +v 0.596592 -0.345838 0.643843 +v 0.595247 -0.352037 0.643991 +v 0.593888 -0.358099 0.644328 +v 0.592462 -0.364149 0.644648 +v 0.591023 -0.370064 0.645079 +v 0.589542 -0.375969 0.64549 +v 0.588041 -0.381776 0.646003 +v 0.586459 -0.387563 0.646471 +v 0.584941 -0.393208 0.64714 +v 0.583242 -0.3988 0.647731 +v 0.581562 -0.404149 0.648506 +v 0.579831 -0.40949 0.649261 +v 0.578132 -0.414625 0.650264 +v 0.576294 -0.419724 0.65121 +v 0.574493 -0.424559 0.652361 +v 0.572704 -0.429398 0.653517 +v 0.570813 -0.433984 0.655001 +v 0.568828 -0.438534 0.656443 +v 0.566821 -0.442873 0.65829 +v 0.564661 -0.447187 0.660069 +v 0.562522 -0.45137 0.661885 +v 0.56042 -0.455564 0.663719 +v 0.558368 -0.459624 0.665538 +v 0.556171 -0.463641 0.667312 +v 0.578084 0.376399 0.780037 +v 0.578984 0.371996 0.777958 +v 0.579789 0.367593 0.775845 +v 0.580526 0.363082 0.774038 +v 0.581133 0.358579 0.772188 +v 0.581719 0.353953 0.770442 +v 0.582193 0.34932 0.768648 +v 0.582607 0.344537 0.767001 +v 0.582948 0.339763 0.765325 +v 0.583301 0.334829 0.763683 +v 0.583515 0.329925 0.761971 +v 0.583697 0.324845 0.760403 +v 0.583819 0.319778 0.758806 +v 0.583931 0.314555 0.757172 +v 0.58398 0.309336 0.755502 +v 0.584018 0.303973 0.753935 +v 0.584006 0.298627 0.752339 +v 0.584044 0.293208 0.750773 +v 0.584041 0.287799 0.749183 +v 0.584022 0.282326 0.747749 +v 0.584011 0.276851 0.746323 +v 0.58415 0.271297 0.744813 +v 0.584324 0.265729 0.743323 +v 0.58461 0.260158 0.741756 +v 0.584981 0.254567 0.740236 +v 0.585948 0.249068 0.738103 +v 0.587061 0.243563 0.736067 +v 0.588381 0.237898 0.733876 +v 0.589857 0.232223 0.731804 +v 0.591446 0.226441 0.729691 +v 0.593216 0.22067 0.727717 +v 0.595034 0.214629 0.725885 +v 0.596936 0.208596 0.724106 +v 0.598923 0.202316 0.722423 +v 0.600967 0.196062 0.720788 +v 0.602975 0.189637 0.71933 +v 0.605063 0.183206 0.717927 +v 0.607193 0.176493 0.716587 +v 0.60935 0.169787 0.715268 +v 0.611533 0.162887 0.714066 +v 0.613684 0.155984 0.712844 +v 0.615909 0.148903 0.711632 +v 0.618086 0.141827 0.710408 +v 0.620222 0.13454 0.709271 +v 0.622293 0.127235 0.70809 +v 0.624364 0.119733 0.706896 +v 0.626336 0.112221 0.70565 +v 0.628245 0.104444 0.704452 +v 0.629994 0.0966193 0.70315 +v 0.631816 0.0886461 0.701854 +v 0.633323 0.0806349 0.700414 +v 0.634858 0.0725032 0.699105 +v 0.636059 0.0643784 0.697671 +v 0.637402 0.0561704 0.696287 +v 0.638325 0.0479402 0.694724 +v 0.63934 0.0395964 0.693276 +v 0.639968 0.0312417 0.691682 +v 0.640743 0.0228323 0.690121 +v 0.641144 0.014395 0.688406 +v 0.64156 0.00591597 0.686766 +v 0.641765 -0.00255603 0.685063 +v 0.641997 -0.0110324 0.683343 +v 0.641927 -0.019482 0.681531 +v 0.641843 -0.0279532 0.679803 +v 0.641461 -0.0363881 0.67799 +v 0.641078 -0.0448714 0.676234 +v 0.640493 -0.0533228 0.674416 +v 0.639897 -0.0617939 0.672714 +v 0.639166 -0.0702349 0.670964 +v 0.638456 -0.0786731 0.669307 +v 0.637475 -0.0870636 0.667573 +v 0.636577 -0.0954732 0.665907 +v 0.635566 -0.103835 0.664219 +v 0.634492 -0.112048 0.662769 +v 0.633362 -0.120265 0.661284 +v 0.632248 -0.128404 0.659873 +v 0.631075 -0.136533 0.658447 +v 0.629963 -0.144557 0.657094 +v 0.62874 -0.1526 0.655643 +v 0.627562 -0.160562 0.654234 +v 0.626349 -0.168507 0.652822 +v 0.62525 -0.176324 0.651488 +v 0.624075 -0.184113 0.650178 +v 0.623002 -0.191801 0.648953 +v 0.621885 -0.199485 0.647715 +v 0.620823 -0.206998 0.646584 +v 0.619796 -0.214517 0.645441 +v 0.618776 -0.221888 0.644389 +v 0.617793 -0.229267 0.643347 +v 0.616787 -0.236537 0.642416 +v 0.615833 -0.243814 0.641493 +v 0.614912 -0.250987 0.640695 +v 0.613918 -0.258141 0.639858 +v 0.61286 -0.265179 0.639098 +v 0.611967 -0.272252 0.638403 +v 0.611029 -0.27919 0.637753 +v 0.610029 -0.286124 0.637116 +v 0.609025 -0.292909 0.636653 +v 0.607997 -0.299694 0.636177 +v 0.606971 -0.306323 0.635802 +v 0.605897 -0.312948 0.635405 +v 0.604796 -0.319425 0.63521 +v 0.603653 -0.325896 0.635 +v 0.602474 -0.332241 0.634928 +v 0.601282 -0.338592 0.634847 +v 0.600019 -0.344829 0.635083 +v 0.598657 -0.35105 0.635281 +v 0.597277 -0.357133 0.635702 +v 0.595882 -0.36322 0.636116 +v 0.594449 -0.369186 0.636663 +v 0.59288 -0.375128 0.637152 +v 0.591426 -0.380982 0.637791 +v 0.589792 -0.38679 0.638361 +v 0.588164 -0.392458 0.639105 +v 0.586516 -0.398118 0.639841 +v 0.584842 -0.403552 0.640733 +v 0.583107 -0.40897 0.641611 +v 0.581328 -0.414194 0.642722 +v 0.57949 -0.419404 0.643816 +v 0.577669 -0.424351 0.645138 +v 0.575704 -0.429259 0.646397 +v 0.573729 -0.434012 0.648008 +v 0.571688 -0.43874 0.649585 +v 0.569541 -0.443168 0.651615 +v 0.567341 -0.447581 0.653616 +v 0.565172 -0.452005 0.655581 +v 0.562923 -0.456409 0.657493 +v 0.560716 -0.460696 0.659583 +v 0.558367 -0.464945 0.661612 +v 0.580435 0.381318 0.772718 +v 0.581528 0.376767 0.7704 +v 0.582535 0.372212 0.768046 +v 0.583458 0.367537 0.765987 +v 0.584274 0.362864 0.763893 +v 0.585053 0.358063 0.76197 +v 0.585705 0.353268 0.760004 +v 0.586327 0.348344 0.758183 +v 0.586849 0.343423 0.756326 +v 0.587331 0.338345 0.754533 +v 0.587764 0.333311 0.752716 +v 0.588141 0.328119 0.750992 +v 0.588427 0.322928 0.749233 +v 0.588713 0.317624 0.7475 +v 0.589056 0.312312 0.745791 +v 0.589364 0.306856 0.744144 +v 0.589606 0.301412 0.742477 +v 0.589783 0.295926 0.740814 +v 0.590005 0.29044 0.739166 +v 0.590202 0.28487 0.737606 +v 0.590517 0.279296 0.736108 +v 0.59078 0.273656 0.734555 +v 0.591188 0.267992 0.733071 +v 0.591663 0.262295 0.731499 +v 0.592236 0.256577 0.729989 +v 0.593273 0.251014 0.727779 +v 0.594555 0.245442 0.725719 +v 0.595925 0.239725 0.723518 +v 0.597481 0.234002 0.721456 +v 0.599049 0.228119 0.719358 +v 0.600776 0.222245 0.717374 +v 0.60253 0.216125 0.715472 +v 0.604408 0.210012 0.713652 +v 0.606314 0.203707 0.711871 +v 0.608279 0.197425 0.710136 +v 0.610229 0.190915 0.708543 +v 0.61231 0.18443 0.70704 +v 0.61435 0.177685 0.705593 +v 0.616392 0.170943 0.704153 +v 0.618485 0.164015 0.70278 +v 0.620557 0.157078 0.701396 +v 0.622695 0.149952 0.700082 +v 0.624759 0.142839 0.698752 +v 0.626815 0.13552 0.697466 +v 0.628813 0.128188 0.696141 +v 0.630699 0.120643 0.694819 +v 0.63252 0.113106 0.693464 +v 0.634352 0.105291 0.692124 +v 0.635972 0.097453 0.690694 +v 0.637611 0.0894564 0.6893 +v 0.638965 0.0814338 0.687792 +v 0.640383 0.0732966 0.686367 +v 0.64152 0.0651303 0.684825 +v 0.642681 0.0569015 0.68335 +v 0.643543 0.0486533 0.681747 +v 0.644548 0.0403186 0.680262 +v 0.645145 0.031976 0.678626 +v 0.645677 0.0235673 0.676999 +v 0.646019 0.0151569 0.675306 +v 0.646349 0.00668533 0.673647 +v 0.6464 -0.00176246 0.6719 +v 0.646507 -0.0102353 0.670212 +v 0.646196 -0.0187406 0.668346 +v 0.646092 -0.0272363 0.66666 +v 0.645598 -0.0356722 0.664858 +v 0.645124 -0.0441335 0.663152 +v 0.644411 -0.0525547 0.661392 +v 0.643706 -0.060998 0.65972 +v 0.642847 -0.0694049 0.658006 +v 0.64211 -0.077825 0.656379 +v 0.641152 -0.08623 0.654692 +v 0.640129 -0.09459 0.653099 +v 0.638974 -0.102896 0.651491 +v 0.638001 -0.111071 0.650168 +v 0.636875 -0.119227 0.648793 +v 0.635599 -0.127306 0.647449 +v 0.634429 -0.135418 0.646122 +v 0.633264 -0.143403 0.644885 +v 0.63207 -0.151436 0.643532 +v 0.630957 -0.159386 0.642251 +v 0.629818 -0.167317 0.64097 +v 0.62863 -0.175095 0.639764 +v 0.62754 -0.182897 0.638592 +v 0.626422 -0.190554 0.637484 +v 0.625372 -0.198236 0.636399 +v 0.624337 -0.205761 0.635406 +v 0.623295 -0.213286 0.634398 +v 0.622244 -0.220659 0.633455 +v 0.621317 -0.228062 0.632552 +v 0.620384 -0.235337 0.631754 +v 0.619453 -0.242618 0.630951 +v 0.618484 -0.249789 0.630248 +v 0.617466 -0.256962 0.62954 +v 0.61655 -0.264026 0.628945 +v 0.615679 -0.271108 0.628366 +v 0.614715 -0.278047 0.627876 +v 0.613733 -0.284998 0.6274 +v 0.612654 -0.291775 0.627023 +v 0.611653 -0.298579 0.626677 +v 0.610615 -0.305239 0.626439 +v 0.60947 -0.311878 0.626155 +v 0.60836 -0.31839 0.626064 +v 0.607196 -0.324885 0.625936 +v 0.605949 -0.33127 0.625982 +v 0.604758 -0.337659 0.626038 +v 0.603533 -0.343945 0.62637 +v 0.602213 -0.350212 0.62666 +v 0.600764 -0.356338 0.62713 +v 0.599247 -0.36245 0.627586 +v 0.597834 -0.368466 0.628213 +v 0.596396 -0.374478 0.628841 +v 0.594866 -0.380372 0.629567 +v 0.593186 -0.386227 0.630231 +v 0.591588 -0.391959 0.631086 +v 0.589921 -0.397674 0.631921 +v 0.588179 -0.403179 0.632889 +v 0.586406 -0.40868 0.633846 +v 0.584587 -0.414004 0.63504 +v 0.582717 -0.419308 0.636204 +v 0.580828 -0.424382 0.637584 +v 0.578888 -0.429436 0.638955 +v 0.576858 -0.434323 0.640617 +v 0.574711 -0.439176 0.642231 +v 0.572574 -0.443805 0.644256 +v 0.570341 -0.448407 0.646241 +v 0.568032 -0.452974 0.648398 +v 0.565725 -0.457537 0.650567 +v 0.563392 -0.462032 0.652726 +v 0.561014 -0.466512 0.654859 +v 0.582541 0.386232 0.765313 +v 0.583786 0.381531 0.762734 +v 0.584993 0.376829 0.760145 +v 0.586113 0.371989 0.757848 +v 0.587095 0.367151 0.7555 +v 0.587985 0.362187 0.75337 +v 0.588808 0.357226 0.751212 +v 0.58962 0.352162 0.749195 +v 0.590285 0.347097 0.747128 +v 0.590923 0.341901 0.7452 +v 0.591506 0.336735 0.743245 +v 0.592089 0.331424 0.741375 +v 0.592576 0.326115 0.739469 +v 0.593043 0.320727 0.737632 +v 0.593532 0.31534 0.735801 +v 0.593959 0.309822 0.734011 +v 0.594398 0.304303 0.732227 +v 0.594831 0.298722 0.730511 +v 0.595259 0.293159 0.728788 +v 0.595702 0.287486 0.727096 +v 0.596232 0.281803 0.725447 +v 0.59668 0.276058 0.723857 +v 0.597246 0.270317 0.72233 +v 0.597871 0.264479 0.720727 +v 0.598588 0.258639 0.719161 +v 0.59978 0.252985 0.716913 +v 0.601144 0.247337 0.714763 +v 0.602569 0.241541 0.71256 +v 0.604119 0.235744 0.710444 +v 0.605682 0.229738 0.708368 +v 0.607405 0.223743 0.706393 +v 0.60916 0.217518 0.704482 +v 0.610997 0.211307 0.702614 +v 0.61288 0.20498 0.70079 +v 0.614795 0.198673 0.699002 +v 0.616749 0.192102 0.697315 +v 0.618774 0.185532 0.695671 +v 0.620774 0.178753 0.694161 +v 0.62274 0.171968 0.692629 +v 0.624759 0.16501 0.691098 +v 0.626806 0.15807 0.689587 +v 0.628838 0.150889 0.688174 +v 0.630796 0.143731 0.686744 +v 0.632759 0.136386 0.685309 +v 0.63461 0.129023 0.683817 +v 0.636414 0.121457 0.682423 +v 0.638102 0.113862 0.680956 +v 0.63987 0.10605 0.67952 +v 0.641347 0.0981957 0.67795 +v 0.642807 0.0901874 0.676475 +v 0.644067 0.0821599 0.674919 +v 0.645356 0.0740181 0.673378 +v 0.646389 0.0658681 0.671746 +v 0.647442 0.0576205 0.670213 +v 0.648199 0.0493618 0.66857 +v 0.648963 0.041035 0.666967 +v 0.649411 0.0327179 0.665258 +v 0.649862 0.0243412 0.663632 +v 0.65011 0.0159538 0.661943 +v 0.650348 0.00750894 0.660265 +v 0.650368 -0.000920186 0.658532 +v 0.650349 -0.00943853 0.656824 +v 0.649883 -0.0179378 0.654981 +v 0.649613 -0.0264287 0.653291 +v 0.649032 -0.0348653 0.651528 +v 0.64846 -0.0432977 0.649883 +v 0.647711 -0.0516981 0.648189 +v 0.646964 -0.0601089 0.646572 +v 0.646072 -0.0684952 0.64492 +v 0.645192 -0.0768778 0.643318 +v 0.644198 -0.0853087 0.641615 +v 0.64321 -0.0936246 0.640162 +v 0.642051 -0.101897 0.638696 +v 0.640905 -0.110024 0.637408 +v 0.639843 -0.11816 0.636143 +v 0.638726 -0.126171 0.634966 +v 0.637598 -0.134238 0.633757 +v 0.63643 -0.142216 0.632611 +v 0.635212 -0.150227 0.631375 +v 0.633997 -0.158125 0.630173 +v 0.63285 -0.16603 0.629031 +v 0.631681 -0.173812 0.627954 +v 0.630634 -0.181616 0.626912 +v 0.629494 -0.189244 0.625923 +v 0.628502 -0.196916 0.624961 +v 0.627428 -0.204442 0.624067 +v 0.626423 -0.211973 0.62319 +v 0.625413 -0.219363 0.622423 +v 0.624494 -0.226774 0.621634 +v 0.623588 -0.234055 0.620956 +v 0.622622 -0.241325 0.620257 +v 0.621641 -0.24852 0.619681 +v 0.620754 -0.255724 0.619128 +v 0.619818 -0.262797 0.618643 +v 0.618826 -0.26986 0.618186 +v 0.617831 -0.276808 0.617832 +v 0.616893 -0.283768 0.617487 +v 0.615935 -0.290582 0.617253 +v 0.614862 -0.297394 0.617 +v 0.613748 -0.30406 0.616885 +v 0.612692 -0.31074 0.616777 +v 0.611626 -0.317291 0.616785 +v 0.61046 -0.32383 0.616771 +v 0.609288 -0.330263 0.616979 +v 0.607988 -0.336685 0.617146 +v 0.606666 -0.342988 0.617505 +v 0.605351 -0.349297 0.617881 +v 0.603989 -0.35549 0.618476 +v 0.602517 -0.361658 0.619027 +v 0.601035 -0.367715 0.619705 +v 0.599502 -0.373757 0.620368 +v 0.598037 -0.379702 0.621228 +v 0.596326 -0.385605 0.622001 +v 0.594648 -0.391372 0.62293 +v 0.592962 -0.397147 0.623869 +v 0.591267 -0.402754 0.624962 +v 0.589462 -0.408337 0.626011 +v 0.587659 -0.413768 0.627286 +v 0.585727 -0.419161 0.628499 +v 0.583827 -0.424366 0.629971 +v 0.581793 -0.429539 0.631392 +v 0.579748 -0.434575 0.63313 +v 0.577555 -0.439564 0.634809 +v 0.575366 -0.444368 0.63681 +v 0.57313 -0.449164 0.638778 +v 0.570747 -0.453895 0.641174 +v 0.568267 -0.45859 0.643528 +v 0.565879 -0.463311 0.645779 +v 0.563385 -0.467998 0.647989 +v 0.584815 0.39125 0.757523 +v 0.586223 0.386404 0.754746 +v 0.587527 0.381554 0.75194 +v 0.58879 0.376578 0.749436 +v 0.589983 0.3716 0.746908 +v 0.59106 0.366498 0.744586 +v 0.592012 0.361403 0.742226 +v 0.592953 0.356191 0.74004 +v 0.593834 0.350983 0.73783 +v 0.594644 0.345664 0.73576 +v 0.595377 0.34037 0.73365 +v 0.596068 0.334971 0.731624 +v 0.596772 0.32956 0.729607 +v 0.597443 0.324068 0.72764 +v 0.598104 0.318569 0.725677 +v 0.598735 0.312964 0.723797 +v 0.599339 0.307372 0.721898 +v 0.599918 0.301692 0.72004 +v 0.600602 0.296016 0.718238 +v 0.601272 0.290261 0.716467 +v 0.601992 0.284497 0.71472 +v 0.602703 0.278662 0.713003 +v 0.603486 0.272825 0.711327 +v 0.604374 0.266935 0.709594 +v 0.60535 0.261011 0.707918 +v 0.606524 0.255229 0.705706 +v 0.607881 0.249459 0.703592 +v 0.609293 0.243572 0.701398 +v 0.610875 0.237684 0.699309 +v 0.612425 0.231589 0.697201 +v 0.614139 0.225515 0.695179 +v 0.61588 0.219227 0.693204 +v 0.617661 0.212961 0.691259 +v 0.619527 0.206572 0.689382 +v 0.621446 0.200208 0.687555 +v 0.623337 0.193581 0.68578 +v 0.625279 0.186932 0.684028 +v 0.627251 0.180088 0.682381 +v 0.629213 0.173253 0.680735 +v 0.63115 0.166254 0.679118 +v 0.633099 0.159252 0.67751 +v 0.635028 0.152028 0.675946 +v 0.636895 0.144813 0.674372 +v 0.638724 0.137427 0.672839 +v 0.640489 0.130025 0.671267 +v 0.642247 0.122411 0.669759 +v 0.643798 0.114758 0.66816 +v 0.645421 0.106916 0.666623 +v 0.646769 0.0990526 0.664977 +v 0.648064 0.0910436 0.663385 +v 0.649322 0.0830257 0.661775 +v 0.650466 0.0748719 0.660176 +v 0.651382 0.0667152 0.658495 +v 0.652236 0.0584744 0.656874 +v 0.652866 0.050215 0.655172 +v 0.653557 0.0418732 0.653554 +v 0.653953 0.0335442 0.651851 +v 0.654291 0.025166 0.650199 +v 0.654354 0.0168091 0.648459 +v 0.654521 0.00837174 0.646811 +v 0.654297 -7.43506e-05 0.64502 +v 0.654093 -0.00860642 0.643297 +v 0.653668 -0.0171145 0.641501 +v 0.65328 -0.0256002 0.639852 +v 0.652629 -0.0340226 0.638143 +v 0.651912 -0.0424495 0.636533 +v 0.651053 -0.0508347 0.634882 +v 0.650245 -0.0592116 0.633335 +v 0.649308 -0.0675819 0.631773 +v 0.648409 -0.0759759 0.630209 +v 0.647373 -0.0843739 0.628611 +v 0.64624 -0.0926559 0.627225 +v 0.645061 -0.100923 0.625828 +v 0.643968 -0.10903 0.624645 +v 0.642871 -0.11714 0.623479 +v 0.64169 -0.125127 0.622387 +v 0.640592 -0.133132 0.621314 +v 0.639435 -0.141056 0.620293 +v 0.638283 -0.149049 0.619198 +v 0.63713 -0.15695 0.618153 +v 0.636037 -0.164854 0.617137 +v 0.634814 -0.172605 0.616162 +v 0.633719 -0.180394 0.615221 +v 0.632649 -0.188041 0.614373 +v 0.631555 -0.195686 0.613504 +v 0.63052 -0.203211 0.612743 +v 0.629578 -0.21075 0.612005 +v 0.628607 -0.218165 0.611359 +v 0.627718 -0.225584 0.610742 +v 0.626808 -0.232886 0.610178 +v 0.625887 -0.240183 0.609594 +v 0.624914 -0.247375 0.609133 +v 0.624023 -0.254578 0.608693 +v 0.623087 -0.261659 0.608383 +v 0.622109 -0.268727 0.608055 +v 0.621149 -0.275696 0.607833 +v 0.620175 -0.282669 0.607609 +v 0.619262 -0.289502 0.607502 +v 0.618254 -0.296345 0.607408 +v 0.617173 -0.30305 0.607416 +v 0.616018 -0.309748 0.607398 +v 0.614897 -0.316324 0.607533 +v 0.613744 -0.322908 0.607657 +v 0.61256 -0.329379 0.607961 +v 0.611268 -0.335826 0.608216 +v 0.60991 -0.342175 0.608676 +v 0.608531 -0.348511 0.609124 +v 0.607181 -0.354755 0.609774 +v 0.605781 -0.360995 0.610414 +v 0.604352 -0.367109 0.611195 +v 0.602741 -0.373188 0.611909 +v 0.601183 -0.379178 0.612817 +v 0.599554 -0.385152 0.613706 +v 0.597903 -0.391 0.614729 +v 0.596117 -0.396814 0.615715 +v 0.594425 -0.402509 0.616897 +v 0.592595 -0.408171 0.618026 +v 0.590755 -0.413707 0.619366 +v 0.588831 -0.419212 0.620667 +v 0.586853 -0.424535 0.622172 +v 0.58482 -0.429846 0.62365 +v 0.582719 -0.435014 0.625385 +v 0.580526 -0.440161 0.627088 +v 0.578279 -0.445133 0.629024 +v 0.576066 -0.450112 0.630967 +v 0.573673 -0.455032 0.633424 +v 0.571134 -0.459902 0.635826 +v 0.568619 -0.464793 0.6382 +v 0.565994 -0.469641 0.640542 +v 0.587045 0.396242 0.749712 +v 0.588533 0.391249 0.746716 +v 0.590024 0.386249 0.743725 +v 0.591381 0.381127 0.74099 +v 0.592699 0.376003 0.738245 +v 0.593932 0.370776 0.73574 +v 0.595015 0.365549 0.73318 +v 0.596048 0.360208 0.730814 +v 0.597031 0.354865 0.728427 +v 0.597978 0.349429 0.726175 +v 0.598901 0.344014 0.72391 +v 0.599744 0.338507 0.721755 +v 0.600579 0.333003 0.719588 +v 0.601415 0.327394 0.717488 +v 0.602243 0.32179 0.715377 +v 0.603009 0.31612 0.713364 +v 0.603788 0.310454 0.711358 +v 0.604589 0.304679 0.709388 +v 0.605421 0.298896 0.707435 +v 0.606211 0.293056 0.705524 +v 0.607093 0.287218 0.703659 +v 0.608007 0.281281 0.701778 +v 0.608994 0.275353 0.699933 +v 0.609996 0.269374 0.698007 +v 0.611105 0.2634 0.696129 +v 0.612363 0.257478 0.694003 +v 0.613721 0.251555 0.691926 +v 0.615162 0.245557 0.689766 +v 0.616712 0.239569 0.687667 +v 0.618374 0.233388 0.685586 +v 0.620068 0.227208 0.683514 +v 0.621802 0.220863 0.681487 +v 0.623615 0.214527 0.679508 +v 0.625406 0.208065 0.677558 +v 0.627275 0.201612 0.675646 +v 0.629176 0.194915 0.673819 +v 0.631109 0.188197 0.671995 +v 0.633041 0.181299 0.670222 +v 0.634968 0.174426 0.668454 +v 0.636874 0.167349 0.666764 +v 0.638737 0.160283 0.66506 +v 0.640671 0.153039 0.663411 +v 0.642441 0.145788 0.661703 +v 0.644193 0.138368 0.660102 +v 0.645812 0.130921 0.658436 +v 0.647485 0.123269 0.65682 +v 0.648952 0.115591 0.655118 +v 0.650413 0.107735 0.653494 +v 0.651677 0.0998677 0.651799 +v 0.652943 0.0918708 0.650149 +v 0.65398 0.0838463 0.648406 +v 0.654983 0.075701 0.646757 +v 0.655839 0.0675504 0.645062 +v 0.656649 0.0593146 0.643402 +v 0.657177 0.0510668 0.641655 +v 0.657624 0.0427376 0.639979 +v 0.657968 0.034412 0.638274 +v 0.658239 0.0260299 0.636605 +v 0.65816 0.0176856 0.634843 +v 0.658147 0.00924256 0.633173 +v 0.657856 0.000756968 0.631394 +v 0.657475 -0.00775419 0.629665 +v 0.656965 -0.0162462 0.627913 +v 0.656508 -0.0247155 0.626305 +v 0.655717 -0.0331058 0.624652 +v 0.654983 -0.0415172 0.623107 +v 0.654112 -0.0498997 0.62153 +v 0.65321 -0.0582678 0.620046 +v 0.652238 -0.0666408 0.618524 +v 0.651191 -0.0750105 0.617019 +v 0.650138 -0.0833685 0.615526 +v 0.649037 -0.0916346 0.614234 +v 0.647872 -0.0998786 0.61294 +v 0.646716 -0.107976 0.611825 +v 0.645599 -0.116051 0.61074 +v 0.644484 -0.124047 0.609765 +v 0.643363 -0.132036 0.608787 +v 0.642191 -0.139907 0.607881 +v 0.641098 -0.147831 0.606973 +v 0.639913 -0.155701 0.606048 +v 0.638814 -0.163595 0.605146 +v 0.637689 -0.171357 0.60431 +v 0.636599 -0.179128 0.603482 +v 0.635547 -0.186782 0.602746 +v 0.634541 -0.194452 0.602025 +v 0.63348 -0.20197 0.601377 +v 0.632556 -0.209511 0.600762 +v 0.631561 -0.21693 0.600217 +v 0.630719 -0.22437 0.599723 +v 0.629758 -0.231677 0.599291 +v 0.628877 -0.239 0.598866 +v 0.628004 -0.246211 0.59855 +v 0.627039 -0.253401 0.59825 +v 0.6261 -0.260482 0.598053 +v 0.625128 -0.267558 0.597849 +v 0.624187 -0.274536 0.597749 +v 0.623199 -0.281515 0.597639 +v 0.622241 -0.288369 0.597656 +v 0.621156 -0.295218 0.59766 +v 0.620128 -0.30196 0.597798 +v 0.619021 -0.308693 0.597915 +v 0.617891 -0.315314 0.598182 +v 0.616715 -0.321934 0.598425 +v 0.615506 -0.328428 0.598809 +v 0.614259 -0.334921 0.599183 +v 0.612971 -0.341324 0.599755 +v 0.611546 -0.347698 0.600299 +v 0.61011 -0.353984 0.600986 +v 0.608714 -0.36027 0.601687 +v 0.607226 -0.366417 0.602523 +v 0.605678 -0.372545 0.603343 +v 0.604165 -0.378611 0.604351 +v 0.60245 -0.384633 0.605287 +v 0.600792 -0.390539 0.606395 +v 0.599054 -0.396437 0.607492 +v 0.597307 -0.402207 0.608742 +v 0.595406 -0.407936 0.609928 +v 0.593534 -0.41356 0.611326 +v 0.591595 -0.419175 0.612706 +v 0.589603 -0.424634 0.61426 +v 0.58757 -0.430078 0.615799 +v 0.585467 -0.435396 0.617557 +v 0.583275 -0.440686 0.619278 +v 0.581079 -0.445857 0.621202 +v 0.578786 -0.450998 0.623077 +v 0.576309 -0.456082 0.625562 +v 0.573762 -0.461145 0.628027 +v 0.571135 -0.466196 0.630533 +v 0.568472 -0.471237 0.633039 +v 0.589431 0.401344 0.741564 +v 0.591073 0.396224 0.738399 +v 0.59267 0.391066 0.735231 +v 0.594138 0.385803 0.732323 +v 0.595537 0.380536 0.729396 +v 0.596891 0.375173 0.726721 +v 0.598149 0.369817 0.72401 +v 0.599291 0.364363 0.721455 +v 0.60041 0.35891 0.718898 +v 0.601441 0.353366 0.716484 +v 0.60249 0.347825 0.714075 +v 0.603517 0.342209 0.711778 +v 0.604499 0.336596 0.709465 +v 0.605436 0.330888 0.707224 +v 0.606421 0.325182 0.705001 +v 0.607369 0.319414 0.702864 +v 0.608325 0.313647 0.700732 +v 0.609254 0.307788 0.698648 +v 0.61025 0.301931 0.696602 +v 0.611234 0.295992 0.694581 +v 0.612265 0.290066 0.692578 +v 0.613315 0.284052 0.690561 +v 0.614477 0.278019 0.688605 +v 0.615655 0.271931 0.686565 +v 0.616928 0.265846 0.684571 +v 0.618205 0.259857 0.6824 +v 0.619643 0.253868 0.680306 +v 0.621126 0.24775 0.678114 +v 0.6227 0.241651 0.67598 +v 0.624313 0.235378 0.673836 +v 0.626037 0.229116 0.671737 +v 0.627751 0.222691 0.669635 +v 0.629533 0.216269 0.667569 +v 0.631378 0.209715 0.665572 +v 0.633208 0.20318 0.663569 +v 0.635053 0.196437 0.661644 +v 0.636941 0.189669 0.659723 +v 0.638863 0.182748 0.657884 +v 0.640726 0.175827 0.656016 +v 0.642591 0.168705 0.654226 +v 0.644447 0.161587 0.652423 +v 0.646253 0.154275 0.650658 +v 0.647886 0.146975 0.648848 +v 0.649618 0.139523 0.647166 +v 0.651247 0.132032 0.645433 +v 0.652789 0.124347 0.643725 +v 0.654123 0.116645 0.641945 +v 0.655474 0.108775 0.64023 +v 0.656624 0.100893 0.638449 +v 0.657792 0.0928715 0.636742 +v 0.658783 0.0848349 0.634975 +v 0.659709 0.0766903 0.633271 +v 0.660409 0.068534 0.631501 +v 0.661054 0.060285 0.629798 +v 0.66148 0.0520286 0.628027 +v 0.661888 0.0436951 0.626333 +v 0.662054 0.0353778 0.624578 +v 0.662182 0.0269803 0.622898 +v 0.662053 0.0186181 0.621157 +v 0.661981 0.0101591 0.619504 +v 0.661528 0.00169035 0.617712 +v 0.661065 -0.00680618 0.616042 +v 0.660481 -0.0152911 0.614345 +v 0.65984 -0.023756 0.612746 +v 0.659077 -0.0321444 0.611175 +v 0.658251 -0.0405305 0.609697 +v 0.657329 -0.0488938 0.608185 +v 0.656351 -0.0572604 0.606759 +v 0.655301 -0.0656533 0.6053 +v 0.654212 -0.0739994 0.603923 +v 0.653065 -0.082326 0.602539 +v 0.651976 -0.09057 0.601335 +v 0.650742 -0.0987934 0.600105 +v 0.649552 -0.106896 0.59906 +v 0.648489 -0.114974 0.598081 +v 0.647308 -0.122939 0.597205 +v 0.646155 -0.130907 0.596323 +v 0.64499 -0.138793 0.595527 +v 0.643841 -0.146731 0.594684 +v 0.642682 -0.154559 0.593955 +v 0.641603 -0.162452 0.593157 +v 0.64053 -0.170207 0.592434 +v 0.639527 -0.177989 0.59175 +v 0.638513 -0.185648 0.591138 +v 0.637525 -0.193308 0.59053 +v 0.636543 -0.200854 0.59003 +v 0.635612 -0.208399 0.589525 +v 0.63459 -0.215818 0.5891 +v 0.633741 -0.223264 0.588717 +v 0.63285 -0.230582 0.588416 +v 0.631993 -0.237916 0.588125 +v 0.631046 -0.245114 0.587967 +v 0.630113 -0.252314 0.587784 +v 0.629121 -0.259407 0.587706 +v 0.628172 -0.266507 0.587621 +v 0.627241 -0.273498 0.587646 +v 0.626269 -0.280479 0.587655 +v 0.625282 -0.287355 0.587783 +v 0.624262 -0.294235 0.587914 +v 0.623175 -0.300993 0.588166 +v 0.621954 -0.307732 0.588372 +v 0.620866 -0.314398 0.588756 +v 0.619746 -0.321061 0.589137 +v 0.61857 -0.327608 0.589643 +v 0.61724 -0.334123 0.590094 +v 0.615974 -0.340568 0.590743 +v 0.614615 -0.346991 0.591378 +v 0.613241 -0.353331 0.592171 +v 0.611721 -0.359638 0.592912 +v 0.610228 -0.365842 0.593832 +v 0.60866 -0.372026 0.594712 +v 0.607071 -0.378138 0.595771 +v 0.605403 -0.38423 0.596804 +v 0.603772 -0.390222 0.597991 +v 0.601992 -0.39618 0.599145 +v 0.600225 -0.402043 0.600465 +v 0.598323 -0.40788 0.601741 +v 0.596438 -0.413603 0.603187 +v 0.594481 -0.419299 0.604603 +v 0.592485 -0.424888 0.606206 +v 0.590401 -0.430456 0.607794 +v 0.588325 -0.435924 0.609556 +v 0.586048 -0.441333 0.611246 +v 0.583835 -0.446676 0.613153 +v 0.581579 -0.452003 0.615032 +v 0.579037 -0.457246 0.617578 +v 0.576389 -0.462452 0.620071 +v 0.573745 -0.467687 0.622675 +v 0.571035 -0.472895 0.625251 +v 0.591926 0.406453 0.733454 +v 0.593654 0.401189 0.7301 +v 0.595342 0.395892 0.726736 +v 0.596882 0.390477 0.723646 +v 0.598362 0.385063 0.720529 +v 0.599762 0.379579 0.717652 +v 0.601124 0.374101 0.714761 +v 0.602391 0.368529 0.712043 +v 0.603653 0.362959 0.70932 +v 0.604845 0.3573 0.706765 +v 0.605976 0.35164 0.70419 +v 0.60708 0.345912 0.701719 +v 0.608186 0.340183 0.699249 +v 0.60923 0.334374 0.69687 +v 0.610343 0.328571 0.694518 +v 0.611428 0.322698 0.692246 +v 0.612522 0.316829 0.689974 +v 0.613626 0.310895 0.687795 +v 0.614687 0.304952 0.685602 +v 0.61578 0.298932 0.683418 +v 0.616982 0.292925 0.681279 +v 0.618161 0.286798 0.679129 +v 0.619394 0.280667 0.677008 +v 0.620721 0.274466 0.674837 +v 0.622052 0.268269 0.672668 +v 0.623387 0.262202 0.670469 +v 0.624882 0.256141 0.668343 +v 0.626398 0.249905 0.666128 +v 0.628025 0.243681 0.663963 +v 0.629693 0.237311 0.661803 +v 0.631386 0.230954 0.659653 +v 0.633101 0.22445 0.657486 +v 0.634921 0.217955 0.655364 +v 0.636736 0.211319 0.653296 +v 0.638529 0.204687 0.65122 +v 0.640342 0.19787 0.649189 +v 0.642236 0.191082 0.647205 +v 0.644126 0.184118 0.645289 +v 0.645983 0.177152 0.643352 +v 0.647771 0.169989 0.641443 +v 0.649536 0.162823 0.63953 +v 0.651303 0.155484 0.637706 +v 0.652947 0.148128 0.635834 +v 0.654578 0.140602 0.634023 +v 0.656045 0.133078 0.632168 +v 0.657539 0.125383 0.630417 +v 0.658771 0.117673 0.628569 +v 0.659999 0.109787 0.626772 +v 0.661089 0.101895 0.624927 +v 0.662137 0.0938574 0.623165 +v 0.66305 0.0857667 0.621331 +v 0.663844 0.0776002 0.619561 +v 0.66446 0.0694505 0.617747 +v 0.665018 0.0611958 0.616022 +v 0.665347 0.0529488 0.614237 +v 0.665719 0.0446157 0.612539 +v 0.665728 0.0363004 0.610747 +v 0.665716 0.0279201 0.609068 +v 0.665515 0.0195359 0.607339 +v 0.665318 0.0111115 0.605691 +v 0.664849 0.00268788 0.603978 +v 0.664354 -0.00578416 0.602365 +v 0.663602 -0.0142369 0.60069 +v 0.66287 -0.0226989 0.599134 +v 0.66207 -0.0310871 0.597612 +v 0.661229 -0.0394718 0.596218 +v 0.660212 -0.0478331 0.594781 +v 0.659202 -0.0562116 0.593427 +v 0.658018 -0.0645607 0.592034 +v 0.656905 -0.0729011 0.590782 +v 0.655774 -0.0812287 0.589518 +v 0.654596 -0.0894432 0.588384 +v 0.653392 -0.0976548 0.587242 +v 0.65217 -0.105776 0.586265 +v 0.651043 -0.113842 0.585376 +v 0.649889 -0.121799 0.584599 +v 0.648762 -0.129752 0.58383 +v 0.64754 -0.137678 0.58309 +v 0.646425 -0.145625 0.582371 +v 0.645268 -0.153441 0.581735 +v 0.644225 -0.161274 0.581111 +v 0.643178 -0.169031 0.580544 +v 0.642134 -0.176803 0.579945 +v 0.641023 -0.184438 0.579428 +v 0.640103 -0.192099 0.578955 +v 0.639198 -0.199662 0.57858 +v 0.638277 -0.207225 0.578194 +v 0.637333 -0.214664 0.577902 +v 0.63646 -0.222109 0.577631 +v 0.635574 -0.229448 0.577445 +v 0.634644 -0.236773 0.577278 +v 0.633697 -0.24397 0.577244 +v 0.632817 -0.251185 0.577209 +v 0.631926 -0.258296 0.577266 +v 0.63095 -0.265407 0.577301 +v 0.629978 -0.272412 0.577445 +v 0.629012 -0.279411 0.577582 +v 0.628057 -0.286307 0.577831 +v 0.626992 -0.293199 0.578073 +v 0.625944 -0.299985 0.578452 +v 0.624801 -0.306764 0.578802 +v 0.623641 -0.313453 0.57927 +v 0.622469 -0.320136 0.579733 +v 0.621251 -0.326713 0.58034 +v 0.620002 -0.333288 0.580926 +v 0.618676 -0.339759 0.581639 +v 0.617316 -0.346227 0.582355 +v 0.615942 -0.352602 0.583228 +v 0.614465 -0.358952 0.58407 +v 0.612974 -0.365215 0.585045 +v 0.61142 -0.371465 0.586013 +v 0.609827 -0.377631 0.587142 +v 0.60812 -0.383775 0.588235 +v 0.606412 -0.389827 0.58947 +v 0.604672 -0.395875 0.590718 +v 0.602917 -0.401836 0.592111 +v 0.601019 -0.407766 0.593458 +v 0.599094 -0.413575 0.594952 +v 0.597095 -0.419362 0.596413 +v 0.595109 -0.425081 0.598065 +v 0.592946 -0.430753 0.599668 +v 0.590812 -0.436351 0.601426 +v 0.588615 -0.441924 0.603154 +v 0.586397 -0.447438 0.605038 +v 0.584069 -0.452923 0.606882 +v 0.581543 -0.458341 0.609495 +v 0.57887 -0.46372 0.612056 +v 0.576158 -0.46912 0.614735 +v 0.573375 -0.474491 0.617384 +v 0.594586 0.411648 0.725045 +v 0.596343 0.406226 0.721521 +v 0.598097 0.400791 0.718003 +v 0.599707 0.395256 0.714746 +v 0.601264 0.389724 0.71147 +v 0.602754 0.384119 0.708428 +v 0.604169 0.378514 0.70535 +v 0.605555 0.372825 0.70249 +v 0.606904 0.367136 0.699619 +v 0.608193 0.361369 0.696907 +v 0.609469 0.355596 0.69419 +v 0.610654 0.349762 0.691574 +v 0.611884 0.343934 0.688966 +v 0.613061 0.338021 0.686466 +v 0.614286 0.332103 0.683981 +v 0.615483 0.326135 0.681577 +v 0.616681 0.32017 0.679178 +v 0.61789 0.314131 0.676861 +v 0.619059 0.308098 0.674526 +v 0.620241 0.301999 0.672218 +v 0.621567 0.295901 0.66997 +v 0.622881 0.289681 0.667705 +v 0.624231 0.283482 0.66547 +v 0.625575 0.277222 0.663184 +v 0.627031 0.270976 0.660945 +v 0.628485 0.264771 0.658679 +v 0.63004 0.258578 0.656451 +v 0.631618 0.252257 0.654182 +v 0.633304 0.245949 0.651962 +v 0.634981 0.239498 0.649716 +v 0.636657 0.233037 0.647477 +v 0.638428 0.226467 0.64527 +v 0.640231 0.219894 0.643068 +v 0.64206 0.213186 0.640925 +v 0.643883 0.206488 0.638767 +v 0.645645 0.199622 0.636652 +v 0.647488 0.19278 0.634571 +v 0.649365 0.185761 0.63256 +v 0.651105 0.178734 0.630505 +v 0.652861 0.171523 0.628529 +v 0.654632 0.164336 0.626556 +v 0.656282 0.156941 0.624621 +v 0.657898 0.149544 0.622671 +v 0.659447 0.141971 0.620782 +v 0.660816 0.134385 0.618832 +v 0.66216 0.126647 0.61696 +v 0.66336 0.118915 0.615051 +v 0.664567 0.111016 0.613224 +v 0.665583 0.103098 0.611346 +v 0.66655 0.0950038 0.6095 +v 0.66735 0.0868954 0.607599 +v 0.668056 0.0787065 0.605793 +v 0.668527 0.0705252 0.603941 +v 0.668992 0.0622511 0.60217 +v 0.66924 0.0540004 0.600352 +v 0.669448 0.0456639 0.598635 +v 0.669382 0.0373453 0.596849 +v 0.669311 0.0289407 0.595169 +v 0.668993 0.0205668 0.593421 +v 0.668633 0.0121471 0.591799 +v 0.668144 0.003721 0.590129 +v 0.667532 -0.00471794 0.588571 +v 0.666847 -0.0131543 0.586983 +v 0.666017 -0.0216065 0.585483 +v 0.665092 -0.0300016 0.58399 +v 0.664192 -0.0383823 0.582671 +v 0.663194 -0.0467867 0.581294 +v 0.662132 -0.0551592 0.580021 +v 0.660885 -0.0634921 0.578704 +v 0.659705 -0.0718145 0.577573 +v 0.658469 -0.0801166 0.576414 +v 0.657266 -0.0883574 0.575388 +v 0.656056 -0.0965731 0.574348 +v 0.654837 -0.104701 0.573458 +v 0.653681 -0.112767 0.572649 +v 0.652553 -0.120721 0.571983 +v 0.651372 -0.128728 0.571283 +v 0.650097 -0.136643 0.570638 +v 0.648987 -0.144582 0.57002 +v 0.64785 -0.152402 0.569503 +v 0.646808 -0.160236 0.568993 +v 0.645815 -0.167968 0.568584 +v 0.644837 -0.17573 0.56815 +v 0.643739 -0.183367 0.567761 +v 0.642767 -0.191028 0.567379 +v 0.641859 -0.198594 0.567113 +v 0.640995 -0.206162 0.566854 +v 0.640073 -0.213614 0.566692 +v 0.639248 -0.221072 0.566544 +v 0.638355 -0.228406 0.566507 +v 0.637401 -0.235734 0.566447 +v 0.636559 -0.24296 0.566547 +v 0.635682 -0.250173 0.566634 +v 0.634784 -0.257294 0.566821 +v 0.633782 -0.264401 0.566948 +v 0.632772 -0.271411 0.567198 +v 0.631808 -0.278428 0.567446 +v 0.630919 -0.28536 0.567837 +v 0.629845 -0.292273 0.568189 +v 0.628772 -0.299085 0.568673 +v 0.62757 -0.305883 0.569114 +v 0.62644 -0.312603 0.569702 +v 0.625158 -0.319306 0.570248 +v 0.624049 -0.325935 0.570981 +v 0.622844 -0.332551 0.571691 +v 0.621454 -0.339053 0.572471 +v 0.620089 -0.345547 0.573274 +v 0.618739 -0.35198 0.574244 +v 0.617253 -0.358379 0.57516 +v 0.615699 -0.364679 0.57619 +v 0.614085 -0.370976 0.577216 +v 0.612537 -0.377207 0.578426 +v 0.610905 -0.383437 0.579612 +v 0.609227 -0.389574 0.580913 +v 0.607396 -0.395688 0.582204 +v 0.605596 -0.401726 0.583642 +v 0.603716 -0.407741 0.585063 +v 0.60184 -0.413671 0.586638 +v 0.599828 -0.419557 0.588156 +v 0.597756 -0.425375 0.589821 +v 0.595585 -0.431168 0.591453 +v 0.593477 -0.436905 0.593247 +v 0.591174 -0.442597 0.594971 +v 0.588999 -0.448269 0.596876 +v 0.586669 -0.453896 0.598726 +v 0.584077 -0.45947 0.601379 +v 0.581418 -0.465021 0.604011 +v 0.578654 -0.470561 0.606725 +v 0.575816 -0.476071 0.609424 +v 0.597337 0.416854 0.716658 +v 0.599151 0.411265 0.712982 +v 0.60093 0.405675 0.70929 +v 0.602591 0.400039 0.705862 +v 0.604228 0.394384 0.702427 +v 0.605771 0.388645 0.699199 +v 0.607306 0.382909 0.695968 +v 0.608737 0.37711 0.692939 +v 0.610152 0.371309 0.689905 +v 0.611513 0.36543 0.687027 +v 0.612876 0.359549 0.684154 +v 0.614184 0.353607 0.681403 +v 0.61552 0.347676 0.678658 +v 0.616826 0.341662 0.676026 +v 0.618093 0.335645 0.673378 +v 0.619341 0.329572 0.670832 +v 0.620648 0.3235 0.668303 +v 0.621939 0.317372 0.665835 +v 0.623236 0.311229 0.663371 +v 0.624554 0.305045 0.660958 +v 0.625926 0.298863 0.658563 +v 0.627364 0.292578 0.656181 +v 0.628762 0.286278 0.65379 +v 0.630179 0.27996 0.651417 +v 0.631696 0.273651 0.649073 +v 0.633183 0.267308 0.646695 +v 0.634765 0.260984 0.644352 +v 0.636387 0.254568 0.64202 +v 0.638049 0.248162 0.639711 +v 0.639796 0.241614 0.637408 +v 0.641563 0.235082 0.635117 +v 0.643304 0.22843 0.63284 +v 0.645051 0.22178 0.630558 +v 0.646825 0.214991 0.628304 +v 0.648632 0.208223 0.626057 +v 0.650442 0.201322 0.623895 +v 0.652224 0.194416 0.621725 +v 0.654036 0.187341 0.619612 +v 0.655811 0.180265 0.617486 +v 0.657526 0.173024 0.615434 +v 0.659168 0.165779 0.613362 +v 0.660805 0.158349 0.611354 +v 0.66232 0.150904 0.609308 +v 0.66378 0.143304 0.607354 +v 0.665179 0.135692 0.605382 +v 0.666483 0.127915 0.603432 +v 0.667663 0.120133 0.601443 +v 0.668696 0.112215 0.599543 +v 0.669656 0.104235 0.597599 +v 0.670567 0.0961278 0.595706 +v 0.671306 0.0880198 0.593759 +v 0.671956 0.0798068 0.591931 +v 0.672334 0.0716224 0.590046 +v 0.672646 0.063346 0.588235 +v 0.672788 0.0550707 0.586378 +v 0.672922 0.0467234 0.584667 +v 0.672794 0.0383827 0.58288 +v 0.672647 0.0299884 0.5812 +v 0.67229 0.0216106 0.579479 +v 0.671884 0.0131949 0.577894 +v 0.671307 0.00476694 0.576261 +v 0.670667 -0.0036747 0.574738 +v 0.669883 -0.0120992 0.573195 +v 0.669059 -0.0205293 0.571801 +v 0.668081 -0.0289357 0.570374 +v 0.667045 -0.0373308 0.569074 +v 0.665963 -0.0457186 0.567773 +v 0.664827 -0.0540709 0.566569 +v 0.663606 -0.0624126 0.565352 +v 0.662359 -0.0706951 0.56433 +v 0.661131 -0.0789858 0.563302 +v 0.659901 -0.0872247 0.562363 +v 0.658629 -0.0954586 0.561427 +v 0.657366 -0.103581 0.560643 +v 0.656211 -0.111668 0.559911 +v 0.654941 -0.119655 0.559299 +v 0.653783 -0.127678 0.558687 +v 0.652598 -0.135598 0.558161 +v 0.651516 -0.143541 0.557669 +v 0.650406 -0.151365 0.557261 +v 0.649351 -0.159199 0.556861 +v 0.64833 -0.16692 0.556557 +v 0.647313 -0.174646 0.556264 +v 0.646248 -0.182268 0.556026 +v 0.645348 -0.189946 0.555779 +v 0.644419 -0.197507 0.55563 +v 0.643506 -0.205074 0.555473 +v 0.642563 -0.212518 0.555416 +v 0.641692 -0.219984 0.555399 +v 0.640818 -0.227335 0.555486 +v 0.639913 -0.23467 0.555561 +v 0.638961 -0.241896 0.55575 +v 0.6381 -0.24913 0.555957 +v 0.637205 -0.256246 0.556261 +v 0.636257 -0.263354 0.556532 +v 0.635333 -0.270388 0.556908 +v 0.634312 -0.277412 0.557256 +v 0.633259 -0.284342 0.557704 +v 0.632263 -0.291291 0.55819 +v 0.631201 -0.298125 0.558773 +v 0.630076 -0.304956 0.559352 +v 0.628959 -0.311709 0.560048 +v 0.627731 -0.318444 0.560719 +v 0.626523 -0.325095 0.561513 +v 0.625244 -0.331731 0.562297 +v 0.624027 -0.338294 0.563231 +v 0.622609 -0.344825 0.56411 +v 0.621196 -0.351283 0.565135 +v 0.619729 -0.357739 0.566142 +v 0.618233 -0.36411 0.567271 +v 0.616645 -0.370463 0.568373 +v 0.615075 -0.376759 0.569646 +v 0.613341 -0.38302 0.570877 +v 0.6117 -0.389247 0.572267 +v 0.60983 -0.395432 0.573618 +v 0.607994 -0.401545 0.575106 +v 0.606131 -0.407649 0.576586 +v 0.604247 -0.41368 0.5782 +v 0.602181 -0.419665 0.579778 +v 0.60016 -0.425611 0.58149 +v 0.598054 -0.431532 0.583174 +v 0.595876 -0.437392 0.584986 +v 0.593603 -0.443228 0.58675 +v 0.591307 -0.449011 0.588611 +v 0.58895 -0.454774 0.590457 +v 0.586372 -0.460521 0.593185 +v 0.583612 -0.466206 0.595839 +v 0.580875 -0.471911 0.598627 +v 0.57796 -0.477553 0.601356 +v 0.600247 0.422117 0.70802 +v 0.602117 0.416392 0.704204 +v 0.603933 0.410664 0.700361 +v 0.605611 0.404889 0.696766 +v 0.607262 0.399101 0.693165 +v 0.60886 0.393247 0.689792 +v 0.610462 0.387394 0.68642 +v 0.611966 0.381471 0.683241 +v 0.613461 0.375541 0.680064 +v 0.614901 0.36956 0.677051 +v 0.616375 0.363578 0.674049 +v 0.617734 0.357526 0.671144 +v 0.619141 0.351491 0.66826 +v 0.620525 0.34538 0.665496 +v 0.621873 0.339262 0.662717 +v 0.623221 0.333099 0.660034 +v 0.624618 0.32694 0.65737 +v 0.625949 0.320701 0.654769 +v 0.627396 0.314468 0.652206 +v 0.628806 0.308198 0.649666 +v 0.630266 0.301938 0.647143 +v 0.631691 0.295578 0.644629 +v 0.633164 0.28921 0.642137 +v 0.634729 0.282813 0.639671 +v 0.636256 0.276429 0.63719 +v 0.63778 0.269995 0.63472 +v 0.639385 0.263571 0.632279 +v 0.641061 0.257079 0.62986 +v 0.642828 0.250584 0.62747 +v 0.644544 0.243951 0.625068 +v 0.646243 0.237331 0.622659 +v 0.647995 0.230613 0.620291 +v 0.649755 0.223892 0.61792 +v 0.651458 0.217037 0.615579 +v 0.653253 0.210214 0.613272 +v 0.655098 0.203264 0.61104 +v 0.656817 0.196291 0.608755 +v 0.658554 0.189161 0.606556 +v 0.660299 0.182036 0.604366 +v 0.661994 0.174735 0.602218 +v 0.663619 0.167435 0.600056 +v 0.665193 0.159979 0.597983 +v 0.666663 0.152506 0.595879 +v 0.66807 0.144868 0.593843 +v 0.669407 0.137219 0.591787 +v 0.670658 0.129416 0.589792 +v 0.671713 0.121608 0.587737 +v 0.672767 0.113621 0.585775 +v 0.673695 0.105615 0.583768 +v 0.674488 0.0974933 0.581828 +v 0.675144 0.089359 0.579856 +v 0.675716 0.0811156 0.577976 +v 0.676027 0.072916 0.576052 +v 0.676282 0.0646172 0.574228 +v 0.676418 0.0563274 0.572384 +v 0.676381 0.0479295 0.570622 +v 0.676158 0.0395359 0.568787 +v 0.675963 0.0311291 0.567136 +v 0.675511 0.0227409 0.565434 +v 0.675074 0.0143111 0.563891 +v 0.674409 0.00588838 0.56228 +v 0.673696 -0.0025694 0.560822 +v 0.6729 -0.0110076 0.55934 +v 0.672015 -0.0194483 0.557979 +v 0.670988 -0.0278899 0.556577 +v 0.669895 -0.0362551 0.555393 +v 0.668737 -0.0446354 0.554202 +v 0.66754 -0.0529801 0.553116 +v 0.666254 -0.0613111 0.552013 +v 0.665059 -0.0696073 0.551103 +v 0.663819 -0.0779208 0.550154 +v 0.662557 -0.0861426 0.549318 +v 0.661256 -0.094381 0.548488 +v 0.659956 -0.102519 0.547789 +v 0.658688 -0.110659 0.547107 +v 0.657515 -0.118692 0.546588 +v 0.656347 -0.126725 0.546091 +v 0.655119 -0.134639 0.545689 +v 0.654002 -0.14258 0.545288 +v 0.652891 -0.150413 0.544985 +v 0.651945 -0.158274 0.544723 +v 0.650876 -0.16598 0.544533 +v 0.649883 -0.173715 0.544364 +v 0.648859 -0.181326 0.544258 +v 0.647879 -0.188959 0.54415 +v 0.646928 -0.196519 0.544102 +v 0.646018 -0.204093 0.544073 +v 0.645073 -0.211544 0.544159 +v 0.64417 -0.218996 0.544258 +v 0.643286 -0.226358 0.544446 +v 0.642424 -0.233721 0.544661 +v 0.64153 -0.240952 0.54497 +v 0.640566 -0.248181 0.545264 +v 0.639669 -0.255304 0.545666 +v 0.638648 -0.262411 0.546036 +v 0.637734 -0.269453 0.546536 +v 0.636759 -0.276494 0.546999 +v 0.635807 -0.283455 0.547575 +v 0.634864 -0.290419 0.548176 +v 0.633774 -0.297282 0.548859 +v 0.632621 -0.304138 0.549534 +v 0.63151 -0.310922 0.55033 +v 0.630291 -0.317672 0.551095 +v 0.629049 -0.324353 0.551988 +v 0.627763 -0.331017 0.552864 +v 0.62647 -0.337614 0.553863 +v 0.625095 -0.344174 0.554821 +v 0.623778 -0.350698 0.555943 +v 0.622353 -0.357211 0.557047 +v 0.620839 -0.363631 0.558258 +v 0.61925 -0.370038 0.559443 +v 0.617613 -0.376393 0.560784 +v 0.615881 -0.382729 0.562102 +v 0.614265 -0.389036 0.563575 +v 0.61248 -0.395304 0.565 +v 0.610617 -0.401497 0.566549 +v 0.60864 -0.407659 0.568057 +v 0.606715 -0.413774 0.569705 +v 0.604669 -0.419867 0.571335 +v 0.602594 -0.42591 0.573082 +v 0.600412 -0.431929 0.574793 +v 0.598269 -0.43792 0.576639 +v 0.596039 -0.443891 0.57846 +v 0.593772 -0.449824 0.580363 +v 0.591316 -0.455706 0.582203 +v 0.588653 -0.46157 0.584938 +v 0.585876 -0.467397 0.587627 +v 0.583157 -0.473263 0.590485 +v 0.580223 -0.479053 0.593263 +v 0.603154 0.427385 0.699383 +v 0.605077 0.421518 0.695421 +v 0.606895 0.41565 0.691415 +v 0.608652 0.409732 0.68768 +v 0.610378 0.403815 0.683933 +v 0.612045 0.397835 0.680427 +v 0.613647 0.391854 0.676897 +v 0.615225 0.38582 0.673565 +v 0.616739 0.379779 0.670211 +v 0.61822 0.373692 0.667046 +v 0.619729 0.367602 0.663894 +v 0.621226 0.36145 0.660869 +v 0.622661 0.355305 0.657822 +v 0.624111 0.349086 0.65492 +v 0.62553 0.342871 0.652003 +v 0.626943 0.3366 0.649172 +v 0.628424 0.330336 0.646365 +v 0.629861 0.324013 0.643648 +v 0.631287 0.317679 0.640925 +v 0.632762 0.311316 0.63825 +v 0.634242 0.304962 0.635573 +v 0.635748 0.298533 0.632958 +v 0.637317 0.292099 0.63037 +v 0.638904 0.285634 0.627772 +v 0.64049 0.279169 0.625177 +v 0.642029 0.272638 0.622599 +v 0.643693 0.266124 0.620068 +v 0.645379 0.259552 0.617545 +v 0.647088 0.252978 0.615026 +v 0.648809 0.246275 0.612543 +v 0.650555 0.239573 0.610071 +v 0.652291 0.232784 0.607606 +v 0.654035 0.225996 0.605142 +v 0.655788 0.21908 0.602751 +v 0.657558 0.212167 0.600366 +v 0.659352 0.205149 0.59802 +v 0.661068 0.198127 0.595652 +v 0.662776 0.190945 0.593394 +v 0.66444 0.183758 0.591114 +v 0.666086 0.176393 0.588861 +v 0.667674 0.169043 0.586607 +v 0.669157 0.161552 0.584462 +v 0.670617 0.154055 0.582314 +v 0.672007 0.146392 0.580208 +v 0.673295 0.138723 0.578073 +v 0.674516 0.130896 0.576044 +v 0.675536 0.123037 0.573954 +v 0.676505 0.115042 0.571914 +v 0.677356 0.107028 0.569843 +v 0.678103 0.0988744 0.567886 +v 0.678684 0.0907211 0.565889 +v 0.679204 0.0824545 0.563964 +v 0.679486 0.0742366 0.562006 +v 0.679675 0.065926 0.560177 +v 0.679682 0.0575934 0.558301 +v 0.679598 0.0491829 0.556518 +v 0.679299 0.0407774 0.554675 +v 0.678947 0.0323129 0.553002 +v 0.678459 0.0238991 0.551327 +v 0.677886 0.0154649 0.549801 +v 0.677224 0.0070301 0.548247 +v 0.676478 -0.00143296 0.546853 +v 0.675586 -0.00987109 0.545429 +v 0.674657 -0.0183447 0.544081 +v 0.673578 -0.026804 0.542695 +v 0.672448 -0.0351657 0.541643 +v 0.671237 -0.0435096 0.540582 +v 0.670002 -0.0518471 0.539628 +v 0.66872 -0.0601754 0.538665 +v 0.667452 -0.068469 0.537819 +v 0.666232 -0.0767834 0.536961 +v 0.664971 -0.0850226 0.536233 +v 0.66369 -0.093252 0.535508 +v 0.662417 -0.101415 0.534931 +v 0.66113 -0.109571 0.534363 +v 0.659821 -0.117616 0.533939 +v 0.658624 -0.125648 0.533525 +v 0.657432 -0.133595 0.533231 +v 0.656356 -0.141555 0.532942 +v 0.655258 -0.149397 0.532753 +v 0.654235 -0.157252 0.532572 +v 0.653175 -0.164984 0.532481 +v 0.652216 -0.172724 0.532414 +v 0.651231 -0.18037 0.532407 +v 0.650293 -0.187996 0.532455 +v 0.649334 -0.195521 0.532566 +v 0.648442 -0.203096 0.532684 +v 0.647513 -0.210559 0.532894 +v 0.646621 -0.218019 0.533101 +v 0.645689 -0.225378 0.533398 +v 0.644764 -0.232734 0.533696 +v 0.643805 -0.239967 0.534115 +v 0.642906 -0.247211 0.534535 +v 0.641976 -0.254348 0.535035 +v 0.641025 -0.261481 0.535529 +v 0.640108 -0.268524 0.536139 +v 0.639078 -0.275562 0.536715 +v 0.638069 -0.282528 0.537387 +v 0.637008 -0.28949 0.538054 +v 0.635955 -0.296384 0.538856 +v 0.634835 -0.303266 0.539637 +v 0.633734 -0.310076 0.540533 +v 0.632504 -0.316855 0.5414 +v 0.631298 -0.323564 0.542382 +v 0.629991 -0.330256 0.543352 +v 0.628736 -0.336895 0.544439 +v 0.627384 -0.343501 0.545493 +v 0.626026 -0.350051 0.546672 +v 0.624525 -0.356594 0.547831 +v 0.623026 -0.363073 0.549113 +v 0.621459 -0.369541 0.550388 +v 0.619889 -0.375967 0.551814 +v 0.618112 -0.382355 0.553183 +v 0.616351 -0.388695 0.554676 +v 0.614602 -0.395047 0.556201 +v 0.612797 -0.401336 0.557838 +v 0.610824 -0.407586 0.559431 +v 0.608914 -0.413815 0.561137 +v 0.606856 -0.419998 0.562801 +v 0.604805 -0.426159 0.564601 +v 0.602615 -0.432282 0.566365 +v 0.60044 -0.438379 0.56824 +v 0.598145 -0.444452 0.570068 +v 0.595837 -0.450516 0.57198 +v 0.593401 -0.456539 0.573862 +v 0.590748 -0.462557 0.576626 +v 0.587986 -0.468532 0.579358 +v 0.585158 -0.474515 0.582227 +v 0.582177 -0.480444 0.585055 +v 0.606285 0.432772 0.6905 +v 0.60816 0.426756 0.686379 +v 0.610034 0.420743 0.682254 +v 0.611836 0.414689 0.678397 +v 0.613606 0.408635 0.67453 +v 0.615268 0.402535 0.670865 +v 0.616901 0.396421 0.667192 +v 0.618473 0.39027 0.663717 +v 0.620085 0.38412 0.660257 +v 0.621627 0.377934 0.656945 +v 0.623158 0.371747 0.653635 +v 0.624651 0.365479 0.650464 +v 0.626215 0.359224 0.647327 +v 0.627696 0.352899 0.644274 +v 0.629205 0.346576 0.641226 +v 0.630656 0.340212 0.638275 +v 0.632172 0.333845 0.635349 +v 0.633682 0.327437 0.632499 +v 0.635191 0.321029 0.629644 +v 0.636687 0.314571 0.626845 +v 0.638236 0.30812 0.624064 +v 0.639797 0.301611 0.621325 +v 0.641337 0.295126 0.618574 +v 0.642946 0.288584 0.615872 +v 0.644601 0.282038 0.613186 +v 0.646208 0.275434 0.6105 +v 0.647921 0.268846 0.607856 +v 0.649573 0.262201 0.605224 +v 0.651261 0.255549 0.602612 +v 0.653024 0.248813 0.60004 +v 0.654796 0.242045 0.597457 +v 0.656511 0.235189 0.594908 +v 0.658238 0.22833 0.592358 +v 0.659961 0.221354 0.589854 +v 0.661679 0.21438 0.58735 +v 0.663413 0.207279 0.584919 +v 0.665059 0.200173 0.582465 +v 0.666706 0.19296 0.580088 +v 0.668376 0.185717 0.577696 +v 0.669976 0.178307 0.575384 +v 0.671508 0.170902 0.573065 +v 0.672968 0.163375 0.570838 +v 0.674369 0.15585 0.568595 +v 0.675751 0.148153 0.566456 +v 0.67695 0.140452 0.564269 +v 0.678137 0.132576 0.562164 +v 0.679184 0.124701 0.560027 +v 0.680117 0.116672 0.557965 +v 0.680912 0.108635 0.555877 +v 0.681623 0.100461 0.553874 +v 0.682169 0.0922794 0.551829 +v 0.682601 0.0839899 0.549879 +v 0.682878 0.0757232 0.547915 +v 0.682968 0.0673643 0.546027 +v 0.682922 0.0590083 0.544117 +v 0.6828 0.0505586 0.542325 +v 0.682498 0.0421183 0.540489 +v 0.68214 0.0336228 0.538828 +v 0.681538 0.0251508 0.537113 +v 0.680836 0.0166798 0.535614 +v 0.680087 0.00823106 0.534118 +v 0.679221 -0.000234233 0.532778 +v 0.678292 -0.00870534 0.531385 +v 0.677253 -0.0171876 0.530133 +v 0.67613 -0.0256521 0.52887 +v 0.675034 -0.0340489 0.527893 +v 0.673823 -0.0424197 0.526919 +v 0.672563 -0.0507347 0.526082 +v 0.671294 -0.0590874 0.525222 +v 0.669995 -0.0673874 0.524475 +v 0.668649 -0.0756978 0.52372 +v 0.66738 -0.0839506 0.523099 +v 0.666112 -0.0922077 0.522478 +v 0.664818 -0.100378 0.522021 +v 0.663542 -0.108549 0.521561 +v 0.662249 -0.116609 0.521243 +v 0.661036 -0.1247 0.520952 +v 0.659986 -0.132668 0.520782 +v 0.658924 -0.140643 0.52062 +v 0.657728 -0.148474 0.520516 +v 0.656628 -0.156329 0.520434 +v 0.655536 -0.164063 0.520471 +v 0.654555 -0.171809 0.520519 +v 0.653598 -0.17944 0.52064 +v 0.652699 -0.187096 0.520787 +v 0.651767 -0.194608 0.521034 +v 0.650819 -0.202176 0.521252 +v 0.649843 -0.209646 0.52158 +v 0.648906 -0.21712 0.521892 +v 0.647958 -0.224479 0.522298 +v 0.647107 -0.231849 0.522718 +v 0.646174 -0.239102 0.523251 +v 0.645249 -0.246348 0.523779 +v 0.644349 -0.253488 0.524396 +v 0.64338 -0.26063 0.525 +v 0.642286 -0.267669 0.525673 +v 0.64132 -0.274718 0.526364 +v 0.640376 -0.281697 0.527156 +v 0.639345 -0.288673 0.527934 +v 0.638199 -0.295562 0.528815 +v 0.637029 -0.302456 0.529688 +v 0.635874 -0.309283 0.530673 +v 0.634679 -0.316094 0.531637 +v 0.633492 -0.322834 0.532706 +v 0.632207 -0.329564 0.533781 +v 0.631028 -0.336256 0.534962 +v 0.62967 -0.342902 0.536093 +v 0.628301 -0.349488 0.537348 +v 0.626869 -0.356068 0.538595 +v 0.625368 -0.362593 0.539939 +v 0.623777 -0.369122 0.541288 +v 0.622191 -0.375612 0.542774 +v 0.620428 -0.382061 0.544222 +v 0.618657 -0.38847 0.545787 +v 0.616889 -0.394878 0.547362 +v 0.615103 -0.401256 0.549056 +v 0.613146 -0.40759 0.550703 +v 0.611189 -0.413905 0.552463 +v 0.609134 -0.420184 0.554206 +v 0.607062 -0.426447 0.556058 +v 0.604866 -0.432672 0.557872 +v 0.602675 -0.438882 0.559786 +v 0.600376 -0.445072 0.561652 +v 0.598049 -0.451251 0.563575 +v 0.595612 -0.457391 0.565489 +v 0.592939 -0.46354 0.568294 +v 0.590105 -0.469633 0.57105 +v 0.587273 -0.47576 0.57397 +v 0.584259 -0.481813 0.576831 +v 0.609338 0.43816 0.681579 +v 0.611268 0.431996 0.677342 +v 0.613182 0.425832 0.673095 +v 0.615007 0.419646 0.669111 +v 0.616768 0.413459 0.665103 +v 0.61844 0.407225 0.66129 +v 0.620126 0.400988 0.657483 +v 0.621724 0.39472 0.653876 +v 0.623356 0.388457 0.650278 +v 0.624968 0.382154 0.646832 +v 0.626556 0.375852 0.643377 +v 0.628117 0.369494 0.640081 +v 0.629684 0.363121 0.636787 +v 0.631226 0.356696 0.633601 +v 0.632744 0.350272 0.630402 +v 0.634213 0.343807 0.627323 +v 0.635774 0.337348 0.624277 +v 0.637299 0.330855 0.621271 +v 0.638877 0.324361 0.618283 +v 0.640438 0.317819 0.615372 +v 0.642026 0.311279 0.612472 +v 0.643577 0.3047 0.609582 +v 0.645182 0.298137 0.60671 +v 0.646791 0.291517 0.603898 +v 0.648429 0.284889 0.601094 +v 0.650106 0.278195 0.598294 +v 0.651803 0.271545 0.595517 +v 0.653451 0.264814 0.592805 +v 0.655137 0.258081 0.590095 +v 0.656872 0.25127 0.587393 +v 0.658606 0.244457 0.584689 +v 0.660309 0.237531 0.582064 +v 0.662023 0.230612 0.579438 +v 0.663733 0.223579 0.576827 +v 0.665448 0.216549 0.574215 +v 0.667147 0.20939 0.571716 +v 0.668769 0.20222 0.569193 +v 0.670383 0.194939 0.566706 +v 0.672033 0.187662 0.564217 +v 0.673602 0.180224 0.561855 +v 0.675103 0.172778 0.559475 +v 0.676556 0.165223 0.557172 +v 0.677868 0.157654 0.55483 +v 0.67917 0.149922 0.552637 +v 0.680403 0.142158 0.550401 +v 0.681538 0.134261 0.548223 +v 0.682538 0.126362 0.546019 +v 0.683408 0.118307 0.543936 +v 0.684157 0.110248 0.541821 +v 0.684836 0.10205 0.539776 +v 0.685356 0.0938509 0.537698 +v 0.685777 0.0855332 0.535746 +v 0.685943 0.0772191 0.533752 +v 0.686039 0.0688233 0.531834 +v 0.685943 0.0604392 0.529876 +v 0.685723 0.0519609 0.528076 +v 0.685393 0.0434858 0.526249 +v 0.68496 0.0349689 0.524574 +v 0.684333 0.0264767 0.522866 +v 0.683585 0.0179583 0.521409 +v 0.682761 0.00945613 0.519948 +v 0.681848 0.000960933 0.518654 +v 0.680867 -0.0075325 0.51733 +v 0.679812 -0.0160212 0.516188 +v 0.678622 -0.0244867 0.515033 +v 0.677383 -0.0329011 0.514098 +v 0.676176 -0.0412854 0.513202 +v 0.674905 -0.0496338 0.51246 +v 0.673496 -0.0579542 0.511703 +v 0.672177 -0.0662469 0.511078 +v 0.670887 -0.0745731 0.510434 +v 0.669623 -0.0828565 0.509936 +v 0.66833 -0.0911363 0.509437 +v 0.667 -0.0993127 0.509082 +v 0.665757 -0.107496 0.508731 +v 0.664489 -0.115585 0.50853 +v 0.663263 -0.123677 0.508335 +v 0.66202 -0.131657 0.508239 +v 0.660934 -0.139646 0.508184 +v 0.659808 -0.147496 0.508225 +v 0.658779 -0.155362 0.508266 +v 0.65777 -0.163122 0.508427 +v 0.656764 -0.170873 0.508584 +v 0.65582 -0.178516 0.508842 +v 0.654815 -0.186155 0.509075 +v 0.653852 -0.193673 0.509411 +v 0.652936 -0.201238 0.509766 +v 0.652001 -0.208712 0.510198 +v 0.651083 -0.216191 0.510636 +v 0.650154 -0.223551 0.511156 +v 0.649226 -0.230917 0.51169 +v 0.648334 -0.238178 0.512337 +v 0.647358 -0.245432 0.512957 +v 0.646421 -0.252583 0.513675 +v 0.645392 -0.259725 0.514379 +v 0.644386 -0.266786 0.515182 +v 0.643342 -0.273844 0.515964 +v 0.642361 -0.280826 0.516862 +v 0.641265 -0.287795 0.517728 +v 0.640187 -0.294708 0.518714 +v 0.639041 -0.301621 0.519699 +v 0.637915 -0.308477 0.520784 +v 0.636698 -0.315308 0.52184 +v 0.635498 -0.322078 0.523 +v 0.634255 -0.328847 0.524153 +v 0.633029 -0.335572 0.525413 +v 0.631614 -0.342243 0.526625 +v 0.63024 -0.34886 0.527953 +v 0.628779 -0.355469 0.529259 +v 0.627332 -0.362055 0.530686 +v 0.625732 -0.368635 0.532097 +v 0.624166 -0.37518 0.533647 +v 0.622439 -0.381693 0.535168 +v 0.620727 -0.388184 0.536814 +v 0.618908 -0.394658 0.538444 +v 0.617036 -0.401088 0.540165 +v 0.615141 -0.407515 0.54189 +v 0.613161 -0.41391 0.543699 +v 0.611097 -0.420287 0.545493 +v 0.608979 -0.426637 0.547373 +v 0.606827 -0.432973 0.549259 +v 0.604669 -0.439313 0.551231 +v 0.602295 -0.445589 0.553113 +v 0.599937 -0.451878 0.555077 +v 0.597479 -0.458136 0.557007 +v 0.594815 -0.464422 0.559863 +v 0.591957 -0.470645 0.562672 +v 0.589075 -0.476888 0.565617 +v 0.586046 -0.48308 0.568519 +v 0.612563 0.443646 0.672417 +v 0.614502 0.437345 0.668074 +v 0.616374 0.431036 0.663704 +v 0.618214 0.424712 0.65959 +v 0.619991 0.418383 0.655456 +v 0.6217 0.412024 0.651539 +v 0.623425 0.405667 0.647625 +v 0.625069 0.399278 0.643893 +v 0.626678 0.392884 0.640144 +v 0.628283 0.386467 0.636573 +v 0.62993 0.380046 0.633008 +v 0.631515 0.37358 0.629574 +v 0.633111 0.367114 0.626141 +v 0.63469 0.360594 0.622824 +v 0.636255 0.354069 0.619508 +v 0.637781 0.347518 0.616301 +v 0.639303 0.340959 0.613096 +v 0.640855 0.334371 0.609973 +v 0.64248 0.327788 0.60687 +v 0.644066 0.321164 0.603827 +v 0.645676 0.314544 0.60078 +v 0.647272 0.307912 0.597803 +v 0.648865 0.301275 0.594816 +v 0.650519 0.294581 0.591876 +v 0.652187 0.287872 0.588946 +v 0.65384 0.281131 0.586051 +v 0.655501 0.274401 0.583171 +v 0.65717 0.267612 0.580329 +v 0.658896 0.260821 0.577506 +v 0.660565 0.253936 0.574706 +v 0.662246 0.247054 0.571908 +v 0.663965 0.240077 0.569165 +v 0.665654 0.233106 0.566408 +v 0.667356 0.226021 0.563729 +v 0.669059 0.218943 0.561046 +v 0.670655 0.211726 0.558409 +v 0.672266 0.204522 0.555776 +v 0.673824 0.197186 0.553226 +v 0.675328 0.189849 0.550659 +v 0.676897 0.182382 0.548214 +v 0.678376 0.17491 0.545742 +v 0.67977 0.167313 0.543389 +v 0.681102 0.159709 0.541012 +v 0.682316 0.151926 0.53871 +v 0.683504 0.144138 0.536402 +v 0.684629 0.136199 0.534185 +v 0.685622 0.128259 0.531943 +v 0.686481 0.120191 0.52981 +v 0.687248 0.112104 0.52766 +v 0.687879 0.103877 0.525598 +v 0.688412 0.0956456 0.52352 +v 0.688786 0.0873012 0.521517 +v 0.689007 0.0789647 0.519502 +v 0.68902 0.0705013 0.517578 +v 0.688812 0.0620708 0.515606 +v 0.688543 0.0535483 0.513737 +v 0.688193 0.0450347 0.511862 +v 0.687726 0.0364512 0.510125 +v 0.687046 0.027908 0.508359 +v 0.686275 0.0193718 0.507001 +v 0.685381 0.0108269 0.50563 +v 0.684432 0.00226421 0.504416 +v 0.683338 -0.00625929 0.503176 +v 0.682194 -0.0147678 0.502126 +v 0.680966 -0.0232617 0.50107 +v 0.67972 -0.0317159 0.500214 +v 0.678465 -0.0401405 0.49941 +v 0.677209 -0.0485228 0.498766 +v 0.675847 -0.0568858 0.498095 +v 0.674464 -0.0651874 0.497586 +v 0.673121 -0.0735211 0.497067 +v 0.671826 -0.0818341 0.496696 +v 0.670554 -0.0901341 0.496305 +v 0.669221 -0.0983316 0.496063 +v 0.667971 -0.106549 0.495832 +v 0.666702 -0.114641 0.495737 +v 0.66548 -0.122756 0.495659 +v 0.664277 -0.130753 0.495695 +v 0.663108 -0.138763 0.495741 +v 0.662003 -0.146637 0.495895 +v 0.660955 -0.15451 0.496065 +v 0.659859 -0.162265 0.496311 +v 0.658839 -0.170036 0.496598 +v 0.657861 -0.177668 0.496951 +v 0.656956 -0.185313 0.497337 +v 0.65597 -0.192826 0.497794 +v 0.655007 -0.200382 0.498249 +v 0.654104 -0.207864 0.498794 +v 0.653177 -0.215336 0.499349 +v 0.652307 -0.222705 0.500002 +v 0.651332 -0.230074 0.500631 +v 0.650385 -0.23733 0.501368 +v 0.649423 -0.244592 0.50211 +v 0.648407 -0.25174 0.502917 +v 0.647417 -0.258884 0.503727 +v 0.646469 -0.265964 0.504648 +v 0.6454 -0.273023 0.505535 +v 0.644378 -0.280015 0.506526 +v 0.643343 -0.286984 0.507503 +v 0.642301 -0.293924 0.508601 +v 0.641094 -0.300857 0.509681 +v 0.639916 -0.307726 0.510837 +v 0.638754 -0.31459 0.512 +v 0.637646 -0.321405 0.513276 +v 0.636354 -0.328195 0.514511 +v 0.635032 -0.334932 0.515821 +v 0.633696 -0.341655 0.517133 +v 0.632341 -0.348324 0.518529 +v 0.63085 -0.354974 0.519919 +v 0.629345 -0.361594 0.521403 +v 0.62771 -0.368195 0.522862 +v 0.626109 -0.374786 0.524475 +v 0.624441 -0.381367 0.526066 +v 0.622795 -0.387934 0.527782 +v 0.621057 -0.39449 0.529491 +v 0.619221 -0.401017 0.531286 +v 0.617186 -0.407487 0.533038 +v 0.615191 -0.41396 0.534899 +v 0.613115 -0.420415 0.536742 +v 0.611067 -0.426875 0.538688 +v 0.608881 -0.433299 0.540603 +v 0.606682 -0.439728 0.542593 +v 0.60431 -0.446117 0.544522 +v 0.601941 -0.452527 0.546528 +v 0.599417 -0.458893 0.548499 +v 0.596724 -0.465292 0.551402 +v 0.593852 -0.471634 0.554248 +v 0.590967 -0.478014 0.557237 +v 0.588008 -0.484364 0.560212 +v 0.615857 0.449143 0.663293 +v 0.617765 0.442692 0.658825 +v 0.619649 0.436248 0.654345 +v 0.621459 0.429782 0.650088 +v 0.623266 0.423311 0.645828 +v 0.624989 0.416833 0.641796 +v 0.6267 0.410346 0.637755 +v 0.628314 0.403833 0.63387 +v 0.630014 0.397319 0.630013 +v 0.631616 0.390774 0.62631 +v 0.633228 0.384231 0.622609 +v 0.634843 0.377666 0.619043 +v 0.63645 0.371091 0.615469 +v 0.638028 0.364469 0.612027 +v 0.63962 0.357845 0.608589 +v 0.64119 0.351207 0.605227 +v 0.642776 0.344564 0.601889 +v 0.644327 0.337883 0.59864 +v 0.645923 0.331203 0.595402 +v 0.647565 0.3245 0.592226 +v 0.649204 0.3178 0.589048 +v 0.650816 0.31109 0.585958 +v 0.652375 0.304395 0.582864 +v 0.654015 0.297623 0.579784 +v 0.655701 0.290849 0.576722 +v 0.657321 0.284043 0.573733 +v 0.658969 0.277239 0.570756 +v 0.660653 0.270387 0.56779 +v 0.662326 0.263528 0.564821 +v 0.663981 0.256575 0.561935 +v 0.66566 0.249622 0.559053 +v 0.667327 0.242591 0.556176 +v 0.669016 0.235564 0.553304 +v 0.670686 0.228428 0.550543 +v 0.672277 0.221287 0.547759 +v 0.673903 0.214043 0.545025 +v 0.675543 0.206801 0.542291 +v 0.677075 0.199424 0.539682 +v 0.678566 0.192039 0.537068 +v 0.680008 0.184532 0.534515 +v 0.681475 0.177029 0.531967 +v 0.682799 0.169376 0.52955 +v 0.684067 0.161707 0.527117 +v 0.685284 0.153906 0.524748 +v 0.68647 0.146103 0.522373 +v 0.687584 0.138143 0.520134 +v 0.688551 0.130173 0.517865 +v 0.689434 0.122066 0.515661 +v 0.690183 0.113963 0.513457 +v 0.690739 0.105707 0.511377 +v 0.691253 0.0974545 0.509288 +v 0.69158 0.0890862 0.507251 +v 0.691786 0.0807154 0.505194 +v 0.691734 0.0722159 0.50325 +v 0.691553 0.0637237 0.501285 +v 0.691208 0.05515 0.499356 +v 0.690816 0.046601 0.497419 +v 0.690271 0.0379702 0.495635 +v 0.689623 0.0293519 0.49382 +v 0.6888 0.0207761 0.49256 +v 0.687869 0.0122141 0.491284 +v 0.686871 0.00363187 0.490187 +v 0.685699 -0.00493809 0.489036 +v 0.684515 -0.0134889 0.48807 +v 0.683275 -0.0220107 0.487126 +v 0.681999 -0.0304958 0.486355 +v 0.680648 -0.0389448 0.485615 +v 0.679337 -0.0473516 0.485051 +v 0.677995 -0.0557478 0.484487 +v 0.676634 -0.0640969 0.484086 +v 0.675305 -0.0724595 0.483692 +v 0.673959 -0.0807689 0.483425 +v 0.672634 -0.0890883 0.483146 +v 0.671311 -0.0973081 0.483022 +v 0.67004 -0.105548 0.4829 +v 0.668753 -0.113675 0.48293 +v 0.667533 -0.121798 0.482967 +v 0.666315 -0.129812 0.48312 +v 0.665208 -0.137837 0.483287 +v 0.664034 -0.145737 0.483554 +v 0.662965 -0.15364 0.483829 +v 0.661903 -0.161404 0.484201 +v 0.660888 -0.169176 0.484595 +v 0.659834 -0.1768 0.485058 +v 0.65887 -0.184439 0.485551 +v 0.657946 -0.191969 0.486132 +v 0.656967 -0.199514 0.486704 +v 0.655993 -0.206983 0.487361 +v 0.655045 -0.214459 0.488024 +v 0.654112 -0.221825 0.488759 +v 0.653169 -0.229197 0.489508 +v 0.652184 -0.236449 0.490356 +v 0.651235 -0.24371 0.49121 +v 0.650264 -0.250872 0.492129 +v 0.64926 -0.258031 0.493047 +v 0.648234 -0.265108 0.494052 +v 0.647186 -0.272176 0.495052 +v 0.646176 -0.279159 0.496155 +v 0.645128 -0.286134 0.497243 +v 0.644044 -0.293074 0.49843 +v 0.64292 -0.300019 0.499609 +v 0.641803 -0.306916 0.500873 +v 0.640538 -0.313792 0.502104 +v 0.639244 -0.320623 0.503426 +v 0.637984 -0.32746 0.504747 +v 0.636689 -0.334243 0.506152 +v 0.635318 -0.340984 0.507537 +v 0.633996 -0.347707 0.509026 +v 0.632556 -0.35441 0.510498 +v 0.631091 -0.36108 0.512046 +v 0.62953 -0.367733 0.513595 +v 0.627905 -0.374356 0.515257 +v 0.626218 -0.380985 0.516925 +v 0.624507 -0.387604 0.518688 +v 0.622684 -0.394206 0.52044 +v 0.620814 -0.400792 0.522283 +v 0.618945 -0.407381 0.524129 +v 0.616978 -0.413945 0.526037 +v 0.61492 -0.420482 0.527934 +v 0.61286 -0.427028 0.52992 +v 0.610628 -0.433536 0.531867 +v 0.608437 -0.440077 0.533899 +v 0.606035 -0.446561 0.535861 +v 0.603652 -0.453086 0.537905 +v 0.601133 -0.45957 0.539919 +v 0.598399 -0.466081 0.542845 +v 0.595503 -0.472532 0.545733 +v 0.592528 -0.47902 0.54876 +v 0.589516 -0.485494 0.551782 +v 0.619199 0.454734 0.653907 +v 0.621103 0.448139 0.649324 +v 0.62299 0.441545 0.644733 +v 0.624835 0.434938 0.640383 +v 0.626602 0.42833 0.636005 +v 0.628313 0.421723 0.631843 +v 0.630026 0.415111 0.627682 +v 0.631673 0.408474 0.623699 +v 0.633334 0.40183 0.619722 +v 0.634933 0.395174 0.615887 +v 0.6365 0.388518 0.612042 +v 0.638107 0.381834 0.608364 +v 0.639738 0.375157 0.604689 +v 0.641343 0.368444 0.60112 +v 0.642943 0.361728 0.597546 +v 0.644495 0.354991 0.594066 +v 0.646077 0.348261 0.590596 +v 0.647672 0.341502 0.58721 +v 0.649314 0.334741 0.583844 +v 0.650908 0.327952 0.580547 +v 0.652546 0.321165 0.577262 +v 0.654125 0.314382 0.574027 +v 0.655705 0.30761 0.570801 +v 0.657318 0.300786 0.56763 +v 0.658971 0.293969 0.564473 +v 0.660606 0.287101 0.561358 +v 0.662278 0.280233 0.558251 +v 0.66389 0.273303 0.555182 +v 0.665524 0.266378 0.552113 +v 0.667176 0.259375 0.549092 +v 0.668847 0.252374 0.546077 +v 0.670519 0.245287 0.543129 +v 0.672171 0.238205 0.54017 +v 0.673782 0.231022 0.537275 +v 0.675356 0.223838 0.534374 +v 0.676937 0.216555 0.53157 +v 0.678478 0.209261 0.528754 +v 0.679969 0.201857 0.526039 +v 0.681459 0.194447 0.523328 +v 0.682862 0.1869 0.520727 +v 0.684285 0.179359 0.518128 +v 0.685589 0.171665 0.515619 +v 0.686852 0.163979 0.513111 +v 0.688041 0.156146 0.510705 +v 0.689172 0.148302 0.508291 +v 0.69023 0.14032 0.505987 +v 0.691241 0.132333 0.503665 +v 0.692117 0.124193 0.501448 +v 0.692882 0.116045 0.499206 +v 0.693441 0.107745 0.497077 +v 0.693921 0.099464 0.494943 +v 0.694243 0.091059 0.49291 +v 0.694428 0.0826568 0.490856 +v 0.694387 0.0741284 0.488869 +v 0.694155 0.0656017 0.486878 +v 0.693771 0.0568978 0.484726 +v 0.693306 0.0482133 0.482544 +v 0.692701 0.0395874 0.480969 +v 0.691952 0.030953 0.479355 +v 0.691056 0.0223232 0.478142 +v 0.690062 0.0137141 0.476928 +v 0.688962 0.00510077 0.475898 +v 0.687856 -0.00352965 0.474807 +v 0.686643 -0.012135 0.473909 +v 0.685413 -0.020742 0.473018 +v 0.68413 -0.0292151 0.472524 +v 0.682783 -0.0376662 0.472067 +v 0.681353 -0.0460921 0.471622 +v 0.680011 -0.0545359 0.47119 +v 0.678659 -0.0629266 0.470904 +v 0.677355 -0.0713317 0.470624 +v 0.675985 -0.079647 0.470465 +v 0.674632 -0.0880125 0.470333 +v 0.67336 -0.0962784 0.470303 +v 0.672103 -0.104564 0.470297 +v 0.670818 -0.112705 0.470442 +v 0.6696 -0.120858 0.470596 +v 0.668398 -0.128896 0.470842 +v 0.667204 -0.136934 0.471103 +v 0.666017 -0.144867 0.471484 +v 0.664898 -0.152795 0.471856 +v 0.663878 -0.160603 0.472334 +v 0.662809 -0.16837 0.472792 +v 0.661836 -0.176038 0.473268 +v 0.660835 -0.183702 0.473744 +v 0.659819 -0.19122 0.474429 +v 0.65884 -0.198746 0.475108 +v 0.65785 -0.206218 0.475887 +v 0.656861 -0.213689 0.47665 +v 0.655918 -0.221056 0.477512 +v 0.654998 -0.228422 0.478362 +v 0.653965 -0.235667 0.479318 +v 0.653017 -0.242928 0.480284 +v 0.652019 -0.250091 0.481314 +v 0.650943 -0.257236 0.482315 +v 0.650034 -0.264313 0.483438 +v 0.649096 -0.271389 0.484577 +v 0.648011 -0.278367 0.485757 +v 0.646851 -0.285347 0.486939 +v 0.645831 -0.292301 0.488224 +v 0.644723 -0.299241 0.489485 +v 0.643537 -0.306144 0.49084 +v 0.64235 -0.313052 0.492175 +v 0.641167 -0.319918 0.493606 +v 0.639836 -0.326754 0.494996 +v 0.638464 -0.333565 0.496476 +v 0.637142 -0.340357 0.497936 +v 0.635712 -0.347105 0.499491 +v 0.634292 -0.353856 0.501046 +v 0.632807 -0.360568 0.502673 +v 0.631251 -0.367271 0.504282 +v 0.629793 -0.37398 0.506032 +v 0.628123 -0.380648 0.507747 +v 0.626328 -0.387316 0.509568 +v 0.624494 -0.393974 0.511381 +v 0.622694 -0.400651 0.513293 +v 0.620774 -0.407301 0.51518 +v 0.618834 -0.413947 0.517151 +v 0.616765 -0.420554 0.519089 +v 0.614605 -0.427176 0.521106 +v 0.612408 -0.433785 0.5231 +v 0.610189 -0.44041 0.525168 +v 0.6078 -0.446991 0.52717 +v 0.605413 -0.453625 0.529249 +v 0.60287 -0.460207 0.531293 +v 0.600132 -0.466829 0.534258 +v 0.597207 -0.473375 0.537169 +v 0.594254 -0.47999 0.540234 +v 0.591162 -0.486563 0.543273 +v 0.622565 0.460328 0.644524 +v 0.624467 0.453585 0.639828 +v 0.626357 0.446837 0.635127 +v 0.628135 0.4401 0.630651 +v 0.629906 0.433357 0.626173 +v 0.631591 0.426619 0.621875 +v 0.6333 0.419893 0.617588 +v 0.634906 0.413134 0.613482 +v 0.636549 0.406373 0.609391 +v 0.638193 0.399612 0.605445 +v 0.639807 0.392849 0.60149 +v 0.641393 0.386066 0.597695 +v 0.642987 0.379282 0.593899 +v 0.64457 0.372475 0.590186 +v 0.646156 0.365668 0.586471 +v 0.647724 0.358843 0.582885 +v 0.649278 0.352004 0.579296 +v 0.650891 0.345147 0.575772 +v 0.652516 0.338309 0.572255 +v 0.654079 0.331439 0.568843 +v 0.655711 0.324572 0.56545 +v 0.657326 0.317715 0.562086 +v 0.658919 0.310862 0.558723 +v 0.660498 0.303961 0.55544 +v 0.662101 0.297069 0.552183 +v 0.663751 0.290134 0.54894 +v 0.665401 0.2832 0.545693 +v 0.667048 0.276207 0.542541 +v 0.668654 0.26921 0.539378 +v 0.670269 0.262155 0.536229 +v 0.671915 0.255099 0.53308 +v 0.673491 0.247949 0.530033 +v 0.675084 0.240807 0.526988 +v 0.676677 0.233588 0.523971 +v 0.678256 0.226371 0.520955 +v 0.679846 0.219046 0.518094 +v 0.68137 0.211717 0.515214 +v 0.682802 0.204281 0.512391 +v 0.684272 0.196848 0.509579 +v 0.685653 0.189269 0.506926 +v 0.68701 0.181681 0.504263 +v 0.688291 0.173971 0.50168 +v 0.689527 0.166262 0.499087 +v 0.690681 0.158391 0.496644 +v 0.691795 0.150521 0.494196 +v 0.692827 0.142517 0.491832 +v 0.693855 0.134514 0.489461 +v 0.694645 0.12634 0.48721 +v 0.695347 0.118152 0.484943 +v 0.695918 0.109805 0.482766 +v 0.696343 0.101459 0.480568 +v 0.696572 0.0930226 0.47852 +v 0.696749 0.0845868 0.47647 +v 0.696701 0.0760277 0.474457 +v 0.6965 0.0674674 0.472435 +v 0.696124 0.0586574 0.470039 +v 0.695685 0.0498494 0.467632 +v 0.694967 0.0412046 0.46626 +v 0.694149 0.0325772 0.464861 +v 0.693231 0.0238989 0.463704 +v 0.692196 0.0152276 0.462535 +v 0.69107 0.00657294 0.461566 +v 0.689897 -0.00209987 0.460547 +v 0.688694 -0.0107616 0.459749 +v 0.687415 -0.0194019 0.458937 +v 0.686031 -0.0278855 0.458695 +v 0.684676 -0.0363412 0.45848 +v 0.683273 -0.0448125 0.458167 +v 0.681942 -0.0533004 0.45788 +v 0.680512 -0.0617186 0.457701 +v 0.679196 -0.0701542 0.457535 +v 0.677841 -0.0785291 0.457494 +v 0.676522 -0.0869332 0.457464 +v 0.67518 -0.095214 0.457569 +v 0.673907 -0.10351 0.457667 +v 0.672693 -0.111695 0.457919 +v 0.671462 -0.119877 0.458168 +v 0.670245 -0.127942 0.458535 +v 0.669068 -0.136015 0.458903 +v 0.667891 -0.14396 0.459387 +v 0.666744 -0.151906 0.45989 +v 0.665621 -0.159708 0.46045 +v 0.664591 -0.167507 0.461001 +v 0.663586 -0.175203 0.461464 +v 0.662583 -0.182905 0.461933 +v 0.661589 -0.190438 0.462726 +v 0.660582 -0.197964 0.463528 +v 0.659564 -0.205433 0.464396 +v 0.65856 -0.212902 0.465264 +v 0.657563 -0.220258 0.466223 +v 0.656589 -0.227617 0.46719 +v 0.655573 -0.234869 0.46826 +v 0.654649 -0.242127 0.469329 +v 0.653657 -0.249287 0.470458 +v 0.65264 -0.256428 0.471601 +v 0.651636 -0.263489 0.472823 +v 0.65054 -0.270538 0.474034 +v 0.649501 -0.277535 0.475322 +v 0.648445 -0.284531 0.476621 +v 0.64738 -0.291488 0.477992 +v 0.646222 -0.298435 0.479345 +v 0.645094 -0.305354 0.480783 +v 0.643835 -0.312259 0.482205 +v 0.642606 -0.319138 0.483701 +v 0.641331 -0.326004 0.485184 +v 0.64007 -0.33285 0.486757 +v 0.638666 -0.339657 0.488295 +v 0.63732 -0.346451 0.489928 +v 0.635872 -0.353229 0.491547 +v 0.634368 -0.359994 0.493244 +v 0.632814 -0.366754 0.494933 +v 0.631196 -0.373488 0.496721 +v 0.629535 -0.380209 0.498502 +v 0.627862 -0.386956 0.500388 +v 0.626042 -0.393681 0.502269 +v 0.624194 -0.400415 0.504228 +v 0.622274 -0.407128 0.506171 +v 0.620344 -0.413856 0.508186 +v 0.618271 -0.420548 0.510181 +v 0.616152 -0.427268 0.512242 +v 0.613977 -0.433972 0.514294 +v 0.61177 -0.440696 0.516395 +v 0.609334 -0.447349 0.518438 +v 0.606915 -0.454076 0.520536 +v 0.604345 -0.460755 0.522606 +v 0.601581 -0.467475 0.525601 +v 0.598707 -0.474154 0.528569 +v 0.595741 -0.480886 0.531657 +v 0.592608 -0.487562 0.534711 +v 0.626045 0.465988 0.6349 +v 0.627933 0.459116 0.630114 +v 0.629781 0.45223 0.625309 +v 0.631512 0.445356 0.620711 +v 0.633256 0.438483 0.61612 +v 0.63492 0.43162 0.611718 +v 0.636587 0.424779 0.607316 +v 0.638209 0.417922 0.603108 +v 0.639873 0.411063 0.598913 +v 0.641467 0.404188 0.594859 +v 0.643052 0.397308 0.590803 +v 0.644625 0.390421 0.586881 +v 0.646188 0.383534 0.582958 +v 0.647738 0.376628 0.579137 +v 0.649285 0.369719 0.575317 +v 0.650845 0.362782 0.5716 +v 0.652395 0.355842 0.567881 +v 0.653965 0.348904 0.564247 +v 0.655555 0.341984 0.56062 +v 0.657133 0.335043 0.557068 +v 0.658745 0.328095 0.553536 +v 0.660349 0.32116 0.550082 +v 0.661931 0.314223 0.546613 +v 0.663527 0.307258 0.54321 +v 0.665095 0.300295 0.539809 +v 0.666689 0.293284 0.536463 +v 0.668296 0.286275 0.533115 +v 0.669919 0.279233 0.529823 +v 0.671523 0.272176 0.526524 +v 0.673112 0.265062 0.523283 +v 0.674686 0.257944 0.520036 +v 0.676265 0.250763 0.51686 +v 0.677843 0.24358 0.513681 +v 0.679394 0.236321 0.510592 +v 0.680922 0.229058 0.507499 +v 0.682448 0.221703 0.504515 +v 0.683969 0.214346 0.501528 +v 0.685404 0.206877 0.498653 +v 0.686825 0.199399 0.495772 +v 0.688156 0.191804 0.493027 +v 0.689496 0.184208 0.490289 +v 0.690711 0.176467 0.487662 +v 0.69193 0.168733 0.485033 +v 0.69307 0.16085 0.482522 +v 0.694168 0.152964 0.480003 +v 0.695148 0.144924 0.477605 +v 0.696099 0.136883 0.475187 +v 0.696856 0.128673 0.472878 +v 0.697579 0.120463 0.470566 +v 0.698092 0.112083 0.468378 +v 0.698529 0.103701 0.466171 +v 0.698753 0.0951973 0.464097 +v 0.698901 0.0867116 0.462023 +v 0.698823 0.0781151 0.460067 +v 0.698548 0.0695197 0.458104 +v 0.698109 0.0607248 0.456 +v 0.697615 0.0519361 0.45388 +v 0.69688 0.0432211 0.452465 +v 0.696032 0.0345158 0.451036 +v 0.695087 0.0257795 0.449883 +v 0.694064 0.0170533 0.448721 +v 0.692907 0.00833647 0.447813 +v 0.691713 -0.000377466 0.446877 +v 0.690487 -0.00907767 0.446158 +v 0.689222 -0.0177891 0.445448 +v 0.68789 -0.0263939 0.445171 +v 0.686505 -0.0349719 0.444915 +v 0.685069 -0.0435342 0.444661 +v 0.683721 -0.0521006 0.444465 +v 0.682343 -0.0605893 0.444394 +v 0.681005 -0.0690825 0.444327 +v 0.679638 -0.0775004 0.444401 +v 0.678353 -0.0859457 0.444484 +v 0.677066 -0.0942745 0.444708 +v 0.675784 -0.102615 0.444948 +v 0.674436 -0.110797 0.445291 +v 0.67322 -0.119028 0.445654 +v 0.672026 -0.127115 0.446145 +v 0.670847 -0.135215 0.446639 +v 0.669797 -0.143183 0.447262 +v 0.668711 -0.151144 0.44788 +v 0.667522 -0.158965 0.448561 +v 0.666418 -0.166772 0.44921 +v 0.665345 -0.174487 0.4499 +v 0.664319 -0.182175 0.450567 +v 0.663269 -0.1897 0.451442 +v 0.662294 -0.197233 0.452313 +v 0.661281 -0.204693 0.453283 +v 0.660294 -0.21216 0.454233 +v 0.659266 -0.21952 0.455296 +v 0.658199 -0.226882 0.456328 +v 0.657223 -0.234146 0.457491 +v 0.656167 -0.241398 0.458634 +v 0.655102 -0.248518 0.459853 +v 0.654105 -0.255662 0.461105 +v 0.653121 -0.262725 0.462417 +v 0.652161 -0.269799 0.463746 +v 0.651097 -0.276789 0.465106 +v 0.649963 -0.283768 0.466469 +v 0.648835 -0.290724 0.46792 +v 0.647704 -0.297686 0.469357 +v 0.646584 -0.304601 0.470874 +v 0.645419 -0.311517 0.472381 +v 0.644171 -0.318407 0.473947 +v 0.642817 -0.325281 0.475492 +v 0.641534 -0.332145 0.477131 +v 0.640224 -0.338985 0.478745 +v 0.63881 -0.345803 0.480436 +v 0.6374 -0.352615 0.482131 +v 0.635943 -0.359423 0.483906 +v 0.634411 -0.366219 0.485657 +v 0.632736 -0.372987 0.487499 +v 0.63109 -0.379762 0.489325 +v 0.629416 -0.386573 0.491263 +v 0.627631 -0.393365 0.493203 +v 0.625826 -0.400179 0.495216 +v 0.623865 -0.406947 0.49721 +v 0.621867 -0.413745 0.499234 +v 0.619834 -0.420522 0.501248 +v 0.617794 -0.427347 0.503374 +v 0.615528 -0.434111 0.505462 +v 0.613296 -0.440925 0.507593 +v 0.610823 -0.447657 0.509671 +v 0.608439 -0.454484 0.511802 +v 0.605892 -0.461262 0.513909 +v 0.603102 -0.468079 0.516925 +v 0.600224 -0.474865 0.51992 +v 0.597231 -0.481711 0.523035 +v 0.59409 -0.488492 0.526118 +v 0.629482 0.471656 0.625255 +v 0.631324 0.464643 0.620367 +v 0.633127 0.457627 0.615461 +v 0.634883 0.45062 0.610767 +v 0.636614 0.443615 0.606067 +v 0.638266 0.436632 0.601564 +v 0.639917 0.429656 0.597059 +v 0.641514 0.42269 0.592731 +v 0.64309 0.415722 0.588396 +v 0.644645 0.408744 0.58424 +v 0.646196 0.401763 0.580081 +v 0.647742 0.394772 0.576038 +v 0.649294 0.387779 0.571991 +v 0.65083 0.380769 0.568072 +v 0.652332 0.373734 0.564134 +v 0.653866 0.366697 0.560283 +v 0.655424 0.359664 0.55644 +v 0.656987 0.35265 0.552708 +v 0.65856 0.34565 0.548983 +v 0.660137 0.338632 0.545292 +v 0.661717 0.331612 0.541608 +v 0.663247 0.324591 0.538034 +v 0.66482 0.31757 0.534471 +v 0.666378 0.310534 0.530924 +v 0.667948 0.303509 0.527397 +v 0.669486 0.296418 0.523946 +v 0.671046 0.289332 0.520498 +v 0.672654 0.282232 0.517069 +v 0.674219 0.275131 0.513629 +v 0.675787 0.267952 0.510299 +v 0.67736 0.260774 0.506965 +v 0.678942 0.25356 0.503666 +v 0.680518 0.246348 0.500363 +v 0.682031 0.239046 0.497194 +v 0.683544 0.231741 0.494025 +v 0.68502 0.224355 0.49092 +v 0.686486 0.216969 0.487815 +v 0.687882 0.209458 0.484874 +v 0.689247 0.201939 0.481925 +v 0.690562 0.194331 0.479107 +v 0.691891 0.186726 0.476292 +v 0.693069 0.178962 0.473624 +v 0.694257 0.171203 0.470954 +v 0.695377 0.163307 0.468383 +v 0.696459 0.155406 0.465805 +v 0.697404 0.14733 0.463363 +v 0.698315 0.13925 0.460904 +v 0.699027 0.131014 0.458539 +v 0.699685 0.122775 0.456168 +v 0.700133 0.114363 0.453955 +v 0.700516 0.105948 0.451733 +v 0.700689 0.0974084 0.449663 +v 0.700824 0.0888714 0.447585 +v 0.700679 0.0802077 0.445646 +v 0.700381 0.0715692 0.443727 +v 0.6999 0.0628034 0.441919 +v 0.699392 0.0540419 0.440107 +v 0.698604 0.0452507 0.438657 +v 0.697743 0.0364693 0.437188 +v 0.69677 0.0276808 0.436048 +v 0.695744 0.0189017 0.434888 +v 0.69462 0.0101261 0.434046 +v 0.693449 0.00135301 0.433196 +v 0.692262 -0.00741572 0.432588 +v 0.690972 -0.0161605 0.431979 +v 0.689567 -0.0248411 0.431668 +v 0.688202 -0.0335356 0.431362 +v 0.686794 -0.0421772 0.4312 +v 0.685433 -0.0508204 0.431055 +v 0.684143 -0.0593882 0.431105 +v 0.682735 -0.0679364 0.431137 +v 0.681344 -0.0763956 0.431323 +v 0.680026 -0.0848892 0.431509 +v 0.678703 -0.0932815 0.431846 +v 0.67741 -0.101678 0.432179 +v 0.676129 -0.109918 0.432651 +v 0.674917 -0.118164 0.433135 +v 0.673693 -0.126285 0.433746 +v 0.67253 -0.1344 0.434372 +v 0.67139 -0.142372 0.435103 +v 0.670256 -0.150344 0.435823 +v 0.669137 -0.158182 0.436641 +v 0.66802 -0.166016 0.43743 +v 0.666939 -0.17371 0.438283 +v 0.665891 -0.181409 0.439148 +v 0.664892 -0.188955 0.440118 +v 0.66383 -0.196487 0.441077 +v 0.662788 -0.203923 0.442135 +v 0.661758 -0.211387 0.443185 +v 0.66067 -0.218745 0.444332 +v 0.659695 -0.22611 0.445485 +v 0.658668 -0.233362 0.446727 +v 0.657655 -0.240601 0.447975 +v 0.656613 -0.247735 0.449299 +v 0.655571 -0.254871 0.450618 +v 0.654495 -0.261941 0.452002 +v 0.653409 -0.268996 0.453386 +v 0.652356 -0.275995 0.454855 +v 0.651302 -0.282985 0.456307 +v 0.650221 -0.28994 0.457833 +v 0.649055 -0.296891 0.459352 +v 0.647886 -0.303814 0.460939 +v 0.64662 -0.310722 0.462505 +v 0.645406 -0.31762 0.464149 +v 0.644141 -0.324527 0.46578 +v 0.642823 -0.33141 0.467484 +v 0.641469 -0.338265 0.469165 +v 0.640136 -0.345118 0.470928 +v 0.63868 -0.351951 0.472681 +v 0.637206 -0.358787 0.474513 +v 0.63563 -0.365609 0.47633 +v 0.634014 -0.372434 0.478231 +v 0.632375 -0.379264 0.480133 +v 0.630696 -0.386119 0.482103 +v 0.628941 -0.392968 0.484083 +v 0.627192 -0.399852 0.486146 +v 0.625194 -0.406686 0.488182 +v 0.623245 -0.413574 0.490247 +v 0.621192 -0.420437 0.492288 +v 0.619067 -0.427322 0.494444 +v 0.616839 -0.434173 0.496584 +v 0.614616 -0.441085 0.498752 +v 0.61216 -0.447925 0.500881 +v 0.609703 -0.454816 0.503032 +v 0.607116 -0.461667 0.505157 +v 0.604401 -0.468612 0.508206 +v 0.601455 -0.475481 0.511219 +v 0.598447 -0.482421 0.514358 +v 0.59528 -0.489306 0.517468 +v 0.633037 0.47738 0.615409 +v 0.634823 0.47023 0.610417 +v 0.636617 0.463076 0.605424 +v 0.6383 0.45594 0.600629 +v 0.639991 0.448821 0.595834 +v 0.641611 0.441721 0.591231 +v 0.643221 0.434622 0.586621 +v 0.64476 0.427532 0.582192 +v 0.646335 0.420441 0.577777 +v 0.647854 0.413358 0.573506 +v 0.649336 0.406273 0.569219 +v 0.650819 0.399193 0.565073 +v 0.652343 0.392093 0.560929 +v 0.653839 0.384965 0.556881 +v 0.655344 0.377832 0.552834 +v 0.656846 0.370706 0.548887 +v 0.658378 0.36358 0.544951 +v 0.659905 0.356486 0.541093 +v 0.661419 0.349399 0.537237 +v 0.662931 0.342297 0.533446 +v 0.664435 0.335206 0.529648 +v 0.66599 0.328118 0.52595 +v 0.66753 0.321026 0.522256 +v 0.669062 0.313922 0.51861 +v 0.670587 0.30682 0.51498 +v 0.672134 0.299682 0.511402 +v 0.673675 0.292533 0.507811 +v 0.675223 0.285371 0.504269 +v 0.676772 0.27821 0.500726 +v 0.678324 0.270985 0.497264 +v 0.679862 0.26376 0.493797 +v 0.681376 0.256491 0.490395 +v 0.682912 0.249224 0.487 +v 0.684421 0.241888 0.483715 +v 0.685913 0.234555 0.480423 +v 0.687348 0.227131 0.477234 +v 0.68879 0.219709 0.474042 +v 0.690174 0.212179 0.470991 +v 0.691542 0.204645 0.467932 +v 0.692803 0.197021 0.465082 +v 0.694073 0.189391 0.462234 +v 0.695222 0.181616 0.459507 +v 0.696361 0.173846 0.45678 +v 0.697415 0.165926 0.454153 +v 0.698463 0.158009 0.451527 +v 0.699333 0.14992 0.449033 +v 0.700177 0.141825 0.446514 +v 0.700806 0.133562 0.444107 +v 0.701402 0.125304 0.441701 +v 0.701843 0.116872 0.439462 +v 0.702235 0.108431 0.437212 +v 0.702383 0.0998522 0.435134 +v 0.702454 0.0912773 0.433052 +v 0.702269 0.0825682 0.431136 +v 0.70204 0.0738645 0.429213 +v 0.701534 0.0650407 0.427484 +v 0.700961 0.0562354 0.425765 +v 0.700163 0.04738 0.424375 +v 0.699244 0.038542 0.422962 +v 0.698293 0.0296692 0.421883 +v 0.697261 0.020811 0.42079 +v 0.696122 0.011951 0.419994 +v 0.694982 0.00309546 0.419205 +v 0.693731 -0.0057324 0.418681 +v 0.692482 -0.014553 0.418153 +v 0.691115 -0.0233412 0.4179 +v 0.689788 -0.0321138 0.417649 +v 0.688414 -0.0408452 0.417604 +v 0.687061 -0.049567 0.417542 +v 0.685711 -0.0582327 0.417681 +v 0.684361 -0.0668751 0.417831 +v 0.683001 -0.0753979 0.418153 +v 0.68171 -0.0839495 0.418452 +v 0.680428 -0.0924003 0.418888 +v 0.679136 -0.100841 0.419341 +v 0.677833 -0.10913 0.419936 +v 0.676569 -0.117422 0.420544 +v 0.675352 -0.125573 0.421297 +v 0.674167 -0.133724 0.42204 +v 0.67298 -0.141718 0.422877 +v 0.671846 -0.149704 0.423712 +v 0.670707 -0.157553 0.424669 +v 0.669639 -0.165395 0.425601 +v 0.668557 -0.173098 0.426599 +v 0.667514 -0.18079 0.427593 +v 0.666489 -0.188352 0.428712 +v 0.665434 -0.195888 0.4298 +v 0.664377 -0.203303 0.430952 +v 0.663341 -0.210774 0.432104 +v 0.662305 -0.21811 0.433354 +v 0.661219 -0.225443 0.434597 +v 0.660108 -0.23267 0.435933 +v 0.659041 -0.239893 0.437287 +v 0.658014 -0.247018 0.438695 +v 0.656928 -0.254139 0.440104 +v 0.655836 -0.261185 0.441583 +v 0.654823 -0.268242 0.443052 +v 0.653767 -0.275241 0.444595 +v 0.652632 -0.282234 0.446136 +v 0.651525 -0.289194 0.447729 +v 0.650354 -0.296157 0.449308 +v 0.649198 -0.303092 0.45097 +v 0.648019 -0.310011 0.45262 +v 0.646794 -0.316913 0.454347 +v 0.64545 -0.323796 0.456034 +v 0.644105 -0.33069 0.457801 +v 0.642769 -0.337567 0.45955 +v 0.641344 -0.344426 0.46138 +v 0.639962 -0.351297 0.463208 +v 0.638484 -0.358165 0.46509 +v 0.636902 -0.365019 0.466977 +v 0.635393 -0.371896 0.468942 +v 0.633752 -0.37877 0.470905 +v 0.632036 -0.385673 0.47293 +v 0.630231 -0.392555 0.474944 +v 0.628481 -0.399497 0.477047 +v 0.626541 -0.406398 0.479137 +v 0.624573 -0.413349 0.481258 +v 0.62251 -0.420278 0.483372 +v 0.620394 -0.427236 0.485575 +v 0.618091 -0.434151 0.487743 +v 0.615792 -0.441136 0.489955 +v 0.613445 -0.448098 0.492141 +v 0.611033 -0.455102 0.494366 +v 0.608421 -0.462037 0.496573 +v 0.605628 -0.469056 0.499607 +v 0.602725 -0.476029 0.502622 +v 0.599747 -0.483091 0.505719 +v 0.596553 -0.490063 0.508777 +v 0.63662 0.483107 0.605571 +v 0.638331 0.475818 0.600467 +v 0.640066 0.468529 0.595372 +v 0.64173 0.461275 0.590494 +v 0.643363 0.454027 0.585605 +v 0.644926 0.446808 0.58089 +v 0.646502 0.439588 0.576179 +v 0.648014 0.432379 0.571661 +v 0.649527 0.425162 0.567143 +v 0.650997 0.417976 0.56275 +v 0.652473 0.410791 0.55836 +v 0.653919 0.40359 0.554106 +v 0.655359 0.396369 0.549844 +v 0.656843 0.389159 0.545691 +v 0.658316 0.381929 0.541525 +v 0.659771 0.374711 0.537479 +v 0.661262 0.367493 0.533442 +v 0.662756 0.360321 0.529462 +v 0.664263 0.353153 0.525485 +v 0.665742 0.345978 0.521601 +v 0.667199 0.338805 0.517712 +v 0.668709 0.33165 0.513871 +v 0.670212 0.324491 0.510043 +v 0.671715 0.31731 0.506298 +v 0.6732 0.310135 0.502565 +v 0.67473 0.302936 0.498839 +v 0.67626 0.295738 0.495111 +v 0.677756 0.288506 0.491461 +v 0.679261 0.281278 0.487813 +v 0.680748 0.274007 0.484205 +v 0.682272 0.266738 0.480604 +v 0.683784 0.259419 0.477117 +v 0.685274 0.252096 0.473626 +v 0.686786 0.244733 0.470228 +v 0.688275 0.237367 0.466825 +v 0.689688 0.229907 0.463546 +v 0.691106 0.222446 0.460269 +v 0.692458 0.214901 0.457102 +v 0.693801 0.207354 0.453932 +v 0.695006 0.199701 0.451053 +v 0.696209 0.192048 0.448173 +v 0.697313 0.184269 0.445382 +v 0.698422 0.17649 0.442589 +v 0.699401 0.168547 0.439916 +v 0.700373 0.160602 0.437242 +v 0.701173 0.152504 0.434693 +v 0.701954 0.1444 0.432119 +v 0.702556 0.136117 0.429683 +v 0.703121 0.127832 0.427239 +v 0.703471 0.119375 0.424959 +v 0.703804 0.110917 0.422677 +v 0.703922 0.102303 0.420596 +v 0.703974 0.0936871 0.418504 +v 0.703761 0.0849384 0.416615 +v 0.703481 0.0761935 0.414719 +v 0.70292 0.0673323 0.413071 +v 0.70238 0.0584697 0.411419 +v 0.701618 0.0495556 0.410093 +v 0.700769 0.0406577 0.408756 +v 0.699793 0.03172 0.407733 +v 0.698761 0.022787 0.406703 +v 0.697616 0.0138478 0.405956 +v 0.696458 0.00491359 0.405213 +v 0.695222 -0.00401355 0.404762 +v 0.693894 -0.0129147 0.404322 +v 0.692598 -0.0217933 0.404131 +v 0.691281 -0.0306635 0.403927 +v 0.689894 -0.0394813 0.40397 +v 0.688533 -0.0483008 0.404022 +v 0.68718 -0.0570346 0.40426 +v 0.685861 -0.0657733 0.4045 +v 0.684526 -0.0743787 0.404911 +v 0.683241 -0.0829831 0.405349 +v 0.682013 -0.0914881 0.405932 +v 0.680658 -0.0999668 0.406508 +v 0.679384 -0.108301 0.407242 +v 0.678186 -0.116651 0.407985 +v 0.676941 -0.124824 0.408836 +v 0.675748 -0.133018 0.409712 +v 0.674552 -0.141045 0.410689 +v 0.673382 -0.149062 0.411655 +v 0.672295 -0.156915 0.412722 +v 0.671116 -0.16476 0.413767 +v 0.669979 -0.17245 0.41489 +v 0.66886 -0.180141 0.416008 +v 0.66775 -0.187676 0.417218 +v 0.66668 -0.195221 0.418438 +v 0.665629 -0.202638 0.419719 +v 0.664576 -0.210105 0.420994 +v 0.663494 -0.217421 0.422346 +v 0.662402 -0.224735 0.423693 +v 0.661336 -0.231939 0.425123 +v 0.660308 -0.239155 0.426566 +v 0.659288 -0.246278 0.42807 +v 0.658159 -0.253389 0.429581 +v 0.657022 -0.260417 0.431144 +v 0.655922 -0.267448 0.432692 +v 0.654868 -0.274446 0.434315 +v 0.653762 -0.281431 0.435925 +v 0.652613 -0.288389 0.437602 +v 0.651408 -0.295357 0.43926 +v 0.65023 -0.302293 0.440989 +v 0.649005 -0.309222 0.442701 +v 0.647772 -0.316142 0.444475 +v 0.646498 -0.323043 0.446252 +v 0.645215 -0.329941 0.448097 +v 0.643828 -0.336821 0.449921 +v 0.642466 -0.343708 0.451801 +v 0.640994 -0.350581 0.453686 +v 0.639536 -0.357489 0.455641 +v 0.63799 -0.364383 0.457589 +v 0.636435 -0.371308 0.459615 +v 0.634774 -0.378207 0.461616 +v 0.633092 -0.385158 0.463697 +v 0.631324 -0.392103 0.465781 +v 0.629562 -0.399094 0.46792 +v 0.627574 -0.406037 0.470052 +v 0.625649 -0.413063 0.472247 +v 0.623544 -0.420043 0.474417 +v 0.621421 -0.427072 0.476654 +v 0.619203 -0.434072 0.478877 +v 0.61688 -0.441134 0.481131 +v 0.614457 -0.448166 0.483361 +v 0.611984 -0.45525 0.485649 +v 0.6094 -0.4623 0.487917 +v 0.606693 -0.469442 0.490977 +v 0.603677 -0.476471 0.493978 +v 0.600758 -0.483642 0.497035 +v 0.597541 -0.490709 0.500038 +v 0.640285 0.488858 0.595505 +v 0.641982 0.481436 0.590333 +v 0.643676 0.474008 0.585162 +v 0.645277 0.466635 0.5802 +v 0.646852 0.459269 0.575224 +v 0.648379 0.451925 0.570427 +v 0.649876 0.444578 0.56562 +v 0.651323 0.437259 0.561 +v 0.652792 0.429931 0.556388 +v 0.654192 0.422635 0.551896 +v 0.655621 0.41533 0.547404 +v 0.657037 0.408008 0.543037 +v 0.658423 0.400683 0.538662 +v 0.65985 0.393376 0.534416 +v 0.661271 0.386069 0.530167 +v 0.662688 0.37877 0.526008 +v 0.664117 0.371466 0.521853 +v 0.66557 0.36421 0.51778 +v 0.667003 0.356969 0.513705 +v 0.668446 0.349729 0.509708 +v 0.669888 0.342491 0.505711 +v 0.671356 0.335259 0.501776 +v 0.672805 0.328031 0.497828 +v 0.67427 0.320792 0.493959 +v 0.675736 0.313547 0.490092 +v 0.67723 0.306283 0.48626 +v 0.678713 0.299018 0.482423 +v 0.680181 0.291728 0.47864 +v 0.681657 0.284441 0.474856 +v 0.683141 0.277121 0.471139 +v 0.684623 0.269804 0.46742 +v 0.686109 0.262449 0.463816 +v 0.687587 0.255088 0.460208 +v 0.689026 0.247684 0.456696 +v 0.690465 0.240279 0.453181 +v 0.691855 0.232789 0.449787 +v 0.693236 0.225297 0.446383 +v 0.694537 0.21768 0.442879 +v 0.695836 0.210062 0.439371 +v 0.696953 0.202456 0.436702 +v 0.698083 0.194853 0.434034 +v 0.699138 0.187073 0.431204 +v 0.700182 0.179291 0.428374 +v 0.701076 0.171341 0.425622 +v 0.701964 0.163392 0.422872 +v 0.702684 0.155274 0.420247 +v 0.703397 0.147156 0.417604 +v 0.703895 0.138848 0.4151 +v 0.704386 0.130544 0.412599 +v 0.704672 0.122061 0.410272 +v 0.704948 0.113576 0.407954 +v 0.704984 0.104924 0.405853 +v 0.704972 0.0962696 0.40375 +v 0.704735 0.0874803 0.40188 +v 0.704484 0.0786868 0.400017 +v 0.703993 0.0697771 0.398418 +v 0.70347 0.0608595 0.39682 +v 0.702727 0.0518725 0.395515 +v 0.701949 0.0428852 0.394213 +v 0.700979 0.0338515 0.393249 +v 0.699967 0.0248131 0.392266 +v 0.698867 0.0157694 0.391584 +v 0.697743 0.00672312 0.390907 +v 0.696492 -0.00230644 0.390521 +v 0.695205 -0.0113112 0.390176 +v 0.693875 -0.0202875 0.390126 +v 0.692599 -0.0292682 0.39002 +v 0.691254 -0.0381871 0.390175 +v 0.689941 -0.0471094 0.390339 +v 0.68856 -0.0559345 0.39069 +v 0.687248 -0.0647671 0.39105 +v 0.68603 -0.0734849 0.391592 +v 0.684746 -0.0821948 0.392139 +v 0.683475 -0.090739 0.392855 +v 0.682186 -0.0992785 0.393589 +v 0.680899 -0.107665 0.394451 +v 0.679629 -0.116039 0.395309 +v 0.678371 -0.124241 0.396299 +v 0.67723 -0.132484 0.397321 +v 0.676028 -0.140528 0.39841 +v 0.674867 -0.148554 0.399514 +v 0.673685 -0.156414 0.400712 +v 0.672535 -0.164268 0.401901 +v 0.671399 -0.171958 0.403145 +v 0.670319 -0.179654 0.404406 +v 0.669159 -0.187173 0.405728 +v 0.668021 -0.194695 0.407064 +v 0.66693 -0.202108 0.408458 +v 0.665809 -0.209543 0.409833 +v 0.664725 -0.21683 0.411297 +v 0.663728 -0.224138 0.412765 +v 0.662633 -0.231326 0.414302 +v 0.661455 -0.238499 0.415818 +v 0.660369 -0.245601 0.417407 +v 0.659275 -0.252698 0.419002 +v 0.658281 -0.259728 0.420677 +v 0.657192 -0.266746 0.422311 +v 0.656044 -0.273716 0.424027 +v 0.654866 -0.280683 0.425717 +v 0.653725 -0.287625 0.427468 +v 0.652551 -0.294579 0.429186 +v 0.6513 -0.301502 0.43099 +v 0.650078 -0.308426 0.432782 +v 0.648867 -0.315349 0.434626 +v 0.647556 -0.32226 0.436466 +v 0.646204 -0.329161 0.438355 +v 0.644869 -0.336062 0.440252 +v 0.643487 -0.342971 0.44221 +v 0.642032 -0.349866 0.444157 +v 0.640587 -0.356798 0.446157 +v 0.639049 -0.363717 0.448175 +v 0.63747 -0.370664 0.450234 +v 0.635857 -0.377612 0.452299 +v 0.634189 -0.384612 0.454439 +v 0.632415 -0.391597 0.456584 +v 0.630635 -0.398638 0.458773 +v 0.628786 -0.405659 0.460958 +v 0.626775 -0.412728 0.463196 +v 0.624606 -0.41975 0.465416 +v 0.622488 -0.426845 0.467702 +v 0.620262 -0.433919 0.469974 +v 0.617975 -0.441077 0.472277 +v 0.615522 -0.448177 0.474544 +v 0.613011 -0.455336 0.476878 +v 0.610406 -0.462461 0.479194 +v 0.607704 -0.469695 0.482253 +v 0.604762 -0.476849 0.485283 +v 0.601746 -0.48408 0.488352 +v 0.598608 -0.491269 0.491401 +v 0.643943 0.494628 0.585432 +v 0.645661 0.487064 0.580209 +v 0.647322 0.479491 0.574963 +v 0.64882 0.471981 0.569892 +v 0.650327 0.464494 0.564825 +v 0.651813 0.457033 0.559957 +v 0.653284 0.449565 0.55508 +v 0.654676 0.442133 0.550356 +v 0.656075 0.434697 0.545634 +v 0.657442 0.427283 0.541049 +v 0.658799 0.419851 0.536455 +v 0.660131 0.412425 0.53196 +v 0.661485 0.404994 0.527479 +v 0.662833 0.397591 0.52313 +v 0.664199 0.390187 0.518785 +v 0.665579 0.382815 0.514524 +v 0.666954 0.375441 0.510259 +v 0.668319 0.368109 0.506087 +v 0.669692 0.360783 0.501915 +v 0.671119 0.353476 0.497808 +v 0.67254 0.346173 0.493699 +v 0.673957 0.338875 0.489654 +v 0.675365 0.331578 0.485607 +v 0.676818 0.32427 0.481608 +v 0.678261 0.316953 0.477619 +v 0.679733 0.309628 0.473687 +v 0.681182 0.302304 0.469753 +v 0.68263 0.29496 0.465835 +v 0.684084 0.287618 0.461922 +v 0.685541 0.280246 0.458078 +v 0.686993 0.272879 0.454244 +v 0.688458 0.265486 0.450522 +v 0.689919 0.258092 0.446797 +v 0.691272 0.250642 0.443164 +v 0.692642 0.24319 0.439537 +v 0.693981 0.235668 0.436016 +v 0.695324 0.228147 0.432494 +v 0.69658 0.220458 0.428655 +v 0.697851 0.21277 0.424815 +v 0.69889 0.205213 0.422355 +v 0.69993 0.197658 0.419899 +v 0.700901 0.18987 0.417031 +v 0.701879 0.182089 0.414158 +v 0.702707 0.174134 0.411336 +v 0.703534 0.16618 0.408514 +v 0.704195 0.158049 0.405814 +v 0.704837 0.149913 0.403097 +v 0.705267 0.141585 0.400534 +v 0.705676 0.133257 0.397968 +v 0.705859 0.124747 0.395602 +v 0.70603 0.116238 0.39324 +v 0.706017 0.107551 0.391122 +v 0.705981 0.0988663 0.389007 +v 0.705739 0.0900227 0.387168 +v 0.705455 0.0811836 0.385326 +v 0.704959 0.0722228 0.38377 +v 0.70442 0.0632653 0.382211 +v 0.703697 0.054205 0.380964 +v 0.702941 0.0451475 0.379715 +v 0.702009 0.0360092 0.37878 +v 0.70105 0.0268698 0.377846 +v 0.700005 0.0177134 0.377229 +v 0.698915 0.00856369 0.376608 +v 0.697771 -0.000594756 0.376288 +v 0.696506 -0.00971863 0.376003 +v 0.695238 -0.0188028 0.376047 +v 0.69394 -0.0278777 0.376088 +v 0.692564 -0.0368827 0.37637 +v 0.691251 -0.0459018 0.376624 +v 0.689975 -0.0548299 0.377117 +v 0.688688 -0.0637587 0.377608 +v 0.687444 -0.0725466 0.378266 +v 0.686191 -0.0813373 0.378918 +v 0.684877 -0.0899498 0.379766 +v 0.683644 -0.0985701 0.38063 +v 0.682415 -0.107002 0.38163 +v 0.681153 -0.115432 0.382637 +v 0.679949 -0.123679 0.383773 +v 0.67872 -0.131921 0.384905 +v 0.677512 -0.139983 0.38615 +v 0.676323 -0.148045 0.387392 +v 0.675121 -0.155916 0.388724 +v 0.673947 -0.16378 0.390035 +v 0.672788 -0.171462 0.391418 +v 0.671626 -0.179139 0.392803 +v 0.670515 -0.186654 0.394255 +v 0.669338 -0.194156 0.395682 +v 0.668185 -0.201533 0.397181 +v 0.667035 -0.208951 0.398683 +v 0.665935 -0.21623 0.400252 +v 0.664761 -0.223496 0.401807 +v 0.663624 -0.230662 0.403427 +v 0.662485 -0.237826 0.405058 +v 0.661345 -0.244903 0.406742 +v 0.660307 -0.251989 0.408444 +v 0.659218 -0.258998 0.410186 +v 0.658033 -0.265993 0.411913 +v 0.656961 -0.272947 0.413697 +v 0.655797 -0.279898 0.415478 +v 0.654614 -0.286822 0.417294 +v 0.653391 -0.293762 0.41911 +v 0.652168 -0.300678 0.420962 +v 0.650944 -0.307601 0.422837 +v 0.649783 -0.314528 0.424757 +v 0.648468 -0.321433 0.426664 +v 0.647143 -0.328353 0.428626 +v 0.645774 -0.335265 0.430585 +v 0.644399 -0.342197 0.432603 +v 0.642948 -0.349114 0.43461 +v 0.641481 -0.356074 0.436675 +v 0.639922 -0.363005 0.438745 +v 0.638329 -0.369994 0.440855 +v 0.636718 -0.376974 0.442972 +v 0.635042 -0.384002 0.445166 +v 0.633298 -0.391033 0.447366 +v 0.63149 -0.398124 0.449609 +v 0.629559 -0.405178 0.451836 +v 0.62758 -0.412308 0.45411 +v 0.625511 -0.419416 0.456395 +v 0.623378 -0.42658 0.458718 +v 0.621137 -0.433714 0.461045 +v 0.618843 -0.440942 0.46339 +v 0.616397 -0.448128 0.465711 +v 0.613899 -0.455371 0.468089 +v 0.611266 -0.46257 0.470457 +v 0.608542 -0.469877 0.473512 +v 0.60557 -0.477099 0.476541 +v 0.60253 -0.484426 0.479635 +v 0.599375 -0.491715 0.48271 +v 0.647819 0.500388 0.575226 +v 0.649437 0.492687 0.569916 +v 0.651071 0.484974 0.564615 +v 0.652553 0.47733 0.559476 +v 0.654012 0.469713 0.554336 +v 0.655401 0.462137 0.549374 +v 0.656823 0.454561 0.544423 +v 0.658157 0.447029 0.539625 +v 0.659492 0.439471 0.534811 +v 0.660763 0.431929 0.530114 +v 0.662058 0.424391 0.525424 +v 0.663349 0.41687 0.520846 +v 0.664634 0.409349 0.516264 +v 0.665923 0.401858 0.511804 +v 0.667222 0.394366 0.507358 +v 0.668537 0.386909 0.503006 +v 0.669853 0.379487 0.498662 +v 0.671158 0.372086 0.494377 +v 0.672486 0.364681 0.490106 +v 0.673844 0.357293 0.485901 +v 0.675201 0.34991 0.481692 +v 0.676587 0.342543 0.477543 +v 0.677985 0.335169 0.473402 +v 0.679387 0.327797 0.469303 +v 0.68078 0.320425 0.465215 +v 0.682198 0.313037 0.46116 +v 0.683649 0.305657 0.457124 +v 0.68509 0.298265 0.453044 +v 0.686508 0.290869 0.448944 +v 0.687934 0.283458 0.445004 +v 0.689359 0.276047 0.441075 +v 0.690735 0.268609 0.437234 +v 0.692113 0.261172 0.433391 +v 0.693428 0.253687 0.429628 +v 0.694762 0.246198 0.42587 +v 0.696031 0.238641 0.422186 +v 0.697283 0.231082 0.4185 +v 0.698433 0.223471 0.415024 +v 0.699598 0.215866 0.411539 +v 0.700563 0.208252 0.408715 +v 0.70152 0.200637 0.405894 +v 0.702404 0.192843 0.402896 +v 0.703292 0.18505 0.399891 +v 0.704036 0.177084 0.396966 +v 0.70477 0.169121 0.39404 +v 0.7053 0.160983 0.39125 +v 0.705829 0.152844 0.388443 +v 0.70618 0.144512 0.385802 +v 0.706505 0.136173 0.383164 +v 0.706627 0.127643 0.380751 +v 0.706734 0.11912 0.378326 +v 0.706653 0.110393 0.37619 +v 0.706573 0.101667 0.374053 +v 0.706311 0.092777 0.372213 +v 0.705994 0.0838938 0.370374 +v 0.7055 0.0748581 0.368848 +v 0.704982 0.0658257 0.367328 +v 0.704279 0.056664 0.366126 +v 0.703542 0.0475092 0.364917 +v 0.702696 0.0382523 0.364049 +v 0.701806 0.0290052 0.363173 +v 0.700824 0.0197182 0.362626 +v 0.69984 0.0104182 0.362088 +v 0.698712 0.00113417 0.361838 +v 0.69758 -0.00813483 0.361623 +v 0.696353 -0.0173562 0.36176 +v 0.695053 -0.0265697 0.361885 +v 0.693755 -0.0357088 0.362303 +v 0.692473 -0.0448357 0.362712 +v 0.691282 -0.0538901 0.363339 +v 0.690007 -0.0629224 0.363944 +v 0.688776 -0.0717983 0.364738 +v 0.687551 -0.0806882 0.365558 +v 0.686344 -0.0893828 0.366548 +v 0.685064 -0.0980752 0.367562 +v 0.683803 -0.106548 0.368693 +v 0.682535 -0.115023 0.369842 +v 0.68133 -0.123295 0.37112 +v 0.680093 -0.131563 0.372388 +v 0.678873 -0.139653 0.373797 +v 0.6776 -0.147712 0.375152 +v 0.676427 -0.155598 0.376641 +v 0.675232 -0.163465 0.378106 +v 0.67408 -0.17113 0.379634 +v 0.672894 -0.178796 0.381138 +v 0.671702 -0.186276 0.382684 +v 0.670498 -0.193748 0.384236 +v 0.669377 -0.201105 0.385865 +v 0.66826 -0.208482 0.387507 +v 0.667085 -0.215732 0.389196 +v 0.665897 -0.222989 0.390855 +v 0.66475 -0.230118 0.392601 +v 0.663634 -0.237261 0.394332 +v 0.662461 -0.244313 0.396091 +v 0.661247 -0.25135 0.397849 +v 0.660105 -0.258328 0.399683 +v 0.659038 -0.26531 0.401507 +v 0.657931 -0.272239 0.403382 +v 0.656772 -0.279171 0.405242 +v 0.655536 -0.286058 0.407119 +v 0.654305 -0.292971 0.409019 +v 0.653083 -0.299883 0.410952 +v 0.651829 -0.306779 0.412875 +v 0.650554 -0.313691 0.414859 +v 0.649264 -0.320584 0.416826 +v 0.648005 -0.327523 0.418864 +v 0.646576 -0.334435 0.420873 +v 0.645206 -0.34138 0.422958 +v 0.643761 -0.348306 0.425038 +v 0.642306 -0.355288 0.427171 +v 0.640797 -0.362261 0.42929 +v 0.639243 -0.369279 0.431449 +v 0.637601 -0.376286 0.433613 +v 0.635918 -0.383358 0.435874 +v 0.63412 -0.390406 0.438124 +v 0.632318 -0.397539 0.440411 +v 0.630407 -0.404643 0.442687 +v 0.628459 -0.411834 0.445023 +v 0.626418 -0.419004 0.447337 +v 0.624244 -0.42622 0.449713 +v 0.621962 -0.433409 0.452086 +v 0.619648 -0.440699 0.454461 +v 0.617304 -0.447977 0.456838 +v 0.614849 -0.455319 0.459268 +v 0.612156 -0.462582 0.461662 +v 0.60936 -0.46995 0.464712 +v 0.606426 -0.477274 0.467748 +v 0.603442 -0.484707 0.470843 +v 0.600225 -0.492062 0.473903 +v 0.651697 0.50615 0.565017 +v 0.653269 0.498306 0.559644 +v 0.654817 0.490462 0.554263 +v 0.656269 0.482687 0.54906 +v 0.65772 0.474932 0.543856 +v 0.659015 0.467248 0.538798 +v 0.660325 0.459565 0.533744 +v 0.661608 0.451922 0.52886 +v 0.662883 0.444244 0.523964 +v 0.664071 0.436597 0.519171 +v 0.66529 0.428948 0.514378 +v 0.666532 0.421331 0.509709 +v 0.667768 0.413713 0.505038 +v 0.66899 0.406133 0.500478 +v 0.670223 0.398555 0.495923 +v 0.671476 0.391043 0.491487 +v 0.672738 0.383532 0.487054 +v 0.674029 0.376054 0.482684 +v 0.675312 0.368579 0.478311 +v 0.676605 0.361111 0.474001 +v 0.677909 0.353648 0.469697 +v 0.679265 0.346204 0.465449 +v 0.680597 0.338759 0.461193 +v 0.681964 0.331326 0.456998 +v 0.683334 0.323888 0.452817 +v 0.684743 0.31645 0.448656 +v 0.686147 0.309014 0.444497 +v 0.687539 0.30157 0.440235 +v 0.688936 0.294123 0.435966 +v 0.690315 0.286667 0.431929 +v 0.691697 0.279215 0.427902 +v 0.693007 0.271737 0.423945 +v 0.694318 0.264254 0.419989 +v 0.695613 0.25673 0.4161 +v 0.696904 0.249206 0.412211 +v 0.698083 0.241612 0.40836 +v 0.699258 0.234018 0.404509 +v 0.700296 0.226491 0.401386 +v 0.701332 0.218966 0.398257 +v 0.702215 0.211293 0.395067 +v 0.703086 0.203618 0.391876 +v 0.703901 0.195816 0.388748 +v 0.704727 0.188017 0.385615 +v 0.705364 0.18004 0.382585 +v 0.705992 0.172064 0.379554 +v 0.706394 0.16392 0.376674 +v 0.706795 0.155767 0.373763 +v 0.70705 0.147425 0.37105 +v 0.707309 0.139086 0.368337 +v 0.707376 0.130541 0.365869 +v 0.707425 0.121996 0.363402 +v 0.707294 0.113229 0.361245 +v 0.707139 0.104466 0.359091 +v 0.706833 0.095534 0.357248 +v 0.706512 0.0866033 0.355406 +v 0.706059 0.0774806 0.353913 +v 0.705572 0.0683676 0.352423 +v 0.704875 0.0591133 0.351268 +v 0.704158 0.0498649 0.350111 +v 0.703349 0.0405073 0.3493 +v 0.702499 0.0311472 0.348492 +v 0.701557 0.021716 0.348013 +v 0.700598 0.0123014 0.347526 +v 0.699576 0.00288091 0.347385 +v 0.698539 -0.00653416 0.347251 +v 0.697409 -0.0158939 0.347489 +v 0.696205 -0.0252499 0.347726 +v 0.694949 -0.03452 0.34825 +v 0.693658 -0.0437844 0.348765 +v 0.692489 -0.0529429 0.34953 +v 0.691244 -0.06209 0.350274 +v 0.690028 -0.0710483 0.351223 +v 0.688809 -0.0800174 0.352176 +v 0.687574 -0.08878 0.353293 +v 0.686355 -0.0975512 0.354443 +v 0.685148 -0.10608 0.355762 +v 0.683922 -0.114608 0.35707 +v 0.682664 -0.122891 0.358465 +v 0.681441 -0.131197 0.359884 +v 0.680156 -0.139275 0.361412 +v 0.678938 -0.147361 0.362928 +v 0.677697 -0.155236 0.364538 +v 0.676475 -0.163109 0.366142 +v 0.675269 -0.170786 0.367806 +v 0.674064 -0.178424 0.369441 +v 0.672826 -0.185881 0.37113 +v 0.671623 -0.193343 0.372812 +v 0.670474 -0.200659 0.374557 +v 0.669252 -0.207979 0.376293 +v 0.668028 -0.215207 0.378088 +v 0.666853 -0.222436 0.379895 +v 0.665666 -0.229543 0.381733 +v 0.664496 -0.236648 0.383563 +v 0.663356 -0.243677 0.385427 +v 0.662222 -0.250702 0.3873 +v 0.661039 -0.257649 0.389209 +v 0.659836 -0.264594 0.391119 +v 0.658647 -0.27149 0.393051 +v 0.657502 -0.278392 0.394987 +v 0.656342 -0.285276 0.396954 +v 0.655018 -0.292158 0.398915 +v 0.653772 -0.299046 0.400912 +v 0.652516 -0.305936 0.402904 +v 0.651231 -0.31283 0.404947 +v 0.649933 -0.319721 0.406987 +v 0.648641 -0.326659 0.409089 +v 0.647314 -0.333584 0.411175 +v 0.645879 -0.340527 0.413302 +v 0.644431 -0.347471 0.415446 +v 0.642962 -0.354468 0.417634 +v 0.641462 -0.361458 0.419822 +v 0.639833 -0.368492 0.422041 +v 0.638241 -0.375532 0.424263 +v 0.636591 -0.382645 0.426566 +v 0.63484 -0.389755 0.428874 +v 0.633021 -0.396928 0.43121 +v 0.631139 -0.404087 0.43355 +v 0.629156 -0.411317 0.435913 +v 0.627088 -0.418529 0.438278 +v 0.624987 -0.42583 0.440693 +v 0.62271 -0.433078 0.443113 +v 0.620341 -0.440413 0.445529 +v 0.617903 -0.447734 0.447945 +v 0.615431 -0.455149 0.450404 +v 0.612741 -0.462485 0.452845 +v 0.609994 -0.469962 0.455891 +v 0.607038 -0.477366 0.458921 +v 0.604022 -0.484877 0.462 +v 0.600903 -0.492353 0.465079 +v 0.655572 0.511901 0.554588 +v 0.657118 0.503926 0.549159 +v 0.658649 0.495952 0.54373 +v 0.660068 0.488048 0.538475 +v 0.66147 0.480158 0.533212 +v 0.662747 0.472355 0.528099 +v 0.664023 0.464554 0.522986 +v 0.665203 0.456783 0.518005 +v 0.666426 0.449002 0.513032 +v 0.667609 0.441252 0.508175 +v 0.668773 0.433502 0.503312 +v 0.669955 0.425793 0.498565 +v 0.671109 0.418086 0.493803 +v 0.67227 0.410416 0.489152 +v 0.673445 0.40278 0.484521 +v 0.674629 0.395194 0.479988 +v 0.675843 0.387606 0.475471 +v 0.677093 0.380049 0.471013 +v 0.678341 0.372494 0.466556 +v 0.679594 0.364958 0.462156 +v 0.680832 0.357424 0.457756 +v 0.682113 0.349915 0.453403 +v 0.683402 0.342404 0.449052 +v 0.684728 0.334906 0.444759 +v 0.686063 0.327408 0.440482 +v 0.687422 0.319918 0.436225 +v 0.688772 0.312428 0.431966 +v 0.690125 0.304934 0.427672 +v 0.691481 0.297445 0.423385 +v 0.692773 0.289949 0.41922 +v 0.694065 0.282461 0.415064 +v 0.695331 0.27495 0.410986 +v 0.696584 0.267438 0.406902 +v 0.697776 0.259893 0.402875 +v 0.698971 0.252349 0.398846 +v 0.70005 0.24479 0.395087 +v 0.701139 0.237232 0.391329 +v 0.702097 0.229671 0.387941 +v 0.703056 0.22211 0.384551 +v 0.703861 0.214435 0.38118 +v 0.70466 0.206758 0.377812 +v 0.705329 0.198952 0.374529 +v 0.706007 0.19115 0.371238 +v 0.706477 0.183177 0.368073 +v 0.706945 0.175198 0.364882 +v 0.707232 0.167048 0.361866 +v 0.707529 0.158898 0.358834 +v 0.707666 0.150543 0.35603 +v 0.70778 0.142197 0.35323 +v 0.707759 0.133627 0.350705 +v 0.707728 0.125058 0.348183 +v 0.70751 0.116263 0.34599 +v 0.707298 0.107469 0.343791 +v 0.706982 0.0984718 0.341939 +v 0.706664 0.0894541 0.340095 +v 0.706196 0.0802514 0.338623 +v 0.705724 0.0710441 0.337152 +v 0.705092 0.0616801 0.336049 +v 0.704472 0.0523072 0.334943 +v 0.703702 0.0428097 0.334193 +v 0.702896 0.0333191 0.333442 +v 0.702025 0.0237438 0.333039 +v 0.701152 0.0141638 0.33264 +v 0.700181 0.00458287 0.332591 +v 0.699212 -0.00498285 0.332529 +v 0.698167 -0.0145258 0.332877 +v 0.697015 -0.0240292 0.333215 +v 0.695845 -0.033461 0.333883 +v 0.694672 -0.0428882 0.334528 +v 0.693484 -0.0521705 0.335443 +v 0.692292 -0.0614583 0.336356 +v 0.691096 -0.0705404 0.337481 +v 0.689891 -0.0796192 0.338603 +v 0.688689 -0.0884449 0.339882 +v 0.687483 -0.0972842 0.341195 +v 0.686249 -0.105853 0.342653 +v 0.684983 -0.114422 0.344122 +v 0.683766 -0.122747 0.345681 +v 0.682545 -0.131067 0.347268 +v 0.68135 -0.139149 0.348948 +v 0.680109 -0.147229 0.350613 +v 0.678866 -0.15508 0.352355 +v 0.677584 -0.162933 0.354099 +v 0.676323 -0.170582 0.355904 +v 0.675045 -0.178224 0.357707 +v 0.673877 -0.18566 0.359538 +v 0.672673 -0.193087 0.361347 +v 0.671439 -0.200358 0.363211 +v 0.670221 -0.207633 0.365071 +v 0.669066 -0.214832 0.366997 +v 0.667836 -0.222024 0.368903 +v 0.666628 -0.229091 0.370863 +v 0.665398 -0.236154 0.372795 +v 0.664153 -0.24312 0.374762 +v 0.662935 -0.25009 0.37674 +v 0.661777 -0.257 0.378743 +v 0.66062 -0.263909 0.380742 +v 0.659455 -0.270779 0.382753 +v 0.658245 -0.277645 0.384773 +v 0.657017 -0.284483 0.386798 +v 0.655778 -0.291343 0.388822 +v 0.654515 -0.298216 0.390875 +v 0.653199 -0.305079 0.392934 +v 0.651908 -0.31196 0.395033 +v 0.650602 -0.318832 0.397125 +v 0.649301 -0.325756 0.399284 +v 0.647951 -0.332669 0.401435 +v 0.64653 -0.339624 0.403633 +v 0.645089 -0.34658 0.405827 +v 0.643615 -0.353603 0.408079 +v 0.642055 -0.360587 0.410323 +v 0.640471 -0.367653 0.412611 +v 0.638912 -0.374719 0.414907 +v 0.637169 -0.381847 0.417248 +v 0.635436 -0.388987 0.419616 +v 0.633735 -0.396231 0.422002 +v 0.631789 -0.403418 0.424385 +v 0.629833 -0.41071 0.426799 +v 0.627793 -0.417971 0.429215 +v 0.625685 -0.425332 0.431666 +v 0.623413 -0.43264 0.43411 +v 0.621084 -0.440059 0.436578 +v 0.618567 -0.447415 0.439019 +v 0.616066 -0.454886 0.441505 +v 0.613453 -0.462323 0.443981 +v 0.610638 -0.469854 0.44703 +v 0.607721 -0.477349 0.450067 +v 0.604673 -0.484924 0.453148 +v 0.601499 -0.492468 0.456211 +v 0.659494 0.51765 0.54417 +v 0.661012 0.509547 0.53869 +v 0.66255 0.501435 0.533222 +v 0.663874 0.493409 0.527895 +v 0.665208 0.485382 0.522574 +v 0.666459 0.477458 0.517404 +v 0.667711 0.469527 0.512224 +v 0.668855 0.461644 0.507164 +v 0.670015 0.453756 0.502115 +v 0.67115 0.445903 0.497179 +v 0.672273 0.438052 0.492235 +v 0.673364 0.430258 0.4874 +v 0.674447 0.422463 0.482563 +v 0.67555 0.414732 0.477835 +v 0.676687 0.407011 0.473125 +v 0.677835 0.399338 0.468514 +v 0.678948 0.391679 0.463878 +v 0.680145 0.384042 0.45933 +v 0.681348 0.376407 0.454781 +v 0.682552 0.368803 0.450289 +v 0.683755 0.361197 0.445811 +v 0.684983 0.353623 0.441361 +v 0.686217 0.346048 0.436911 +v 0.687508 0.338487 0.432521 +v 0.688788 0.330926 0.42814 +v 0.690109 0.323385 0.423794 +v 0.691416 0.315845 0.41944 +v 0.692697 0.308301 0.415113 +v 0.693995 0.300755 0.41079 +v 0.695211 0.293228 0.406501 +v 0.696413 0.285705 0.402219 +v 0.697625 0.278166 0.398012 +v 0.698839 0.270626 0.393815 +v 0.699943 0.263058 0.38965 +v 0.701043 0.255492 0.385483 +v 0.702043 0.247968 0.381819 +v 0.703045 0.240444 0.37815 +v 0.703911 0.23285 0.374497 +v 0.704782 0.225257 0.370844 +v 0.705518 0.217577 0.367298 +v 0.706231 0.209899 0.363769 +v 0.706769 0.202089 0.36031 +v 0.707324 0.194281 0.356855 +v 0.707638 0.186303 0.353525 +v 0.707939 0.178325 0.350193 +v 0.708112 0.170178 0.347057 +v 0.708281 0.162032 0.343913 +v 0.708256 0.153671 0.341019 +v 0.708227 0.145308 0.338127 +v 0.708126 0.136713 0.335541 +v 0.708015 0.128122 0.332959 +v 0.707714 0.119299 0.330724 +v 0.707419 0.110464 0.328487 +v 0.707072 0.101391 0.32663 +v 0.706711 0.0923198 0.324773 +v 0.706276 0.0830278 0.323316 +v 0.705819 0.0737415 0.321867 +v 0.705251 0.0642581 0.320815 +v 0.704671 0.0547801 0.319764 +v 0.703999 0.0451278 0.319073 +v 0.70325 0.03549 0.318387 +v 0.702456 0.0257607 0.318062 +v 0.70167 0.0160326 0.317739 +v 0.700795 0.00628095 0.31777 +v 0.699886 -0.00346697 0.317808 +v 0.69892 -0.0131601 0.318226 +v 0.69789 -0.0228499 0.318654 +v 0.696812 -0.0324335 0.319444 +v 0.695675 -0.0420091 0.320244 +v 0.694476 -0.0514133 0.321326 +v 0.693315 -0.0608244 0.322408 +v 0.692121 -0.0700088 0.323714 +v 0.690901 -0.0791946 0.325011 +v 0.689728 -0.0881046 0.32646 +v 0.688527 -0.0970128 0.327931 +v 0.687326 -0.105634 0.329556 +v 0.686104 -0.114251 0.331178 +v 0.684878 -0.122589 0.33291 +v 0.683687 -0.130935 0.334642 +v 0.682448 -0.13901 0.336456 +v 0.681183 -0.147083 0.338275 +v 0.679911 -0.154905 0.34017 +v 0.678646 -0.162742 0.342063 +v 0.677424 -0.170362 0.344016 +v 0.67614 -0.177978 0.34594 +v 0.674805 -0.185405 0.347918 +v 0.673535 -0.192807 0.349875 +v 0.672315 -0.200037 0.351873 +v 0.671067 -0.207262 0.353856 +v 0.669848 -0.214413 0.355889 +v 0.668581 -0.221566 0.35792 +v 0.66737 -0.2286 0.359977 +v 0.666104 -0.235621 0.362019 +v 0.664897 -0.242553 0.364094 +v 0.663674 -0.249474 0.36617 +v 0.662451 -0.256342 0.368266 +v 0.661249 -0.26321 0.370363 +v 0.660028 -0.270033 0.372447 +v 0.658866 -0.276862 0.374531 +v 0.657627 -0.283673 0.376623 +v 0.656322 -0.290482 0.378716 +v 0.655084 -0.297331 0.380835 +v 0.653815 -0.30418 0.382954 +v 0.652514 -0.311057 0.38511 +v 0.651143 -0.317916 0.387262 +v 0.649815 -0.32483 0.389481 +v 0.64844 -0.331733 0.391684 +v 0.647079 -0.338703 0.393947 +v 0.645624 -0.345658 0.396202 +v 0.644088 -0.352675 0.398514 +v 0.642553 -0.359694 0.400827 +v 0.641018 -0.366788 0.403184 +v 0.639396 -0.373867 0.405524 +v 0.637724 -0.381039 0.407921 +v 0.635977 -0.38821 0.410334 +v 0.634174 -0.395464 0.412778 +v 0.632284 -0.4027 0.415213 +v 0.63035 -0.410042 0.417682 +v 0.628273 -0.417347 0.420141 +v 0.6262 -0.424779 0.422627 +v 0.623926 -0.432154 0.425099 +v 0.621542 -0.43961 0.427589 +v 0.619113 -0.447054 0.430076 +v 0.616613 -0.454593 0.432599 +v 0.6139 -0.46207 0.435101 +v 0.611143 -0.469695 0.438152 +v 0.608186 -0.477246 0.441198 +v 0.605114 -0.484894 0.444268 +v 0.601932 -0.492514 0.447327 +v 0.663373 0.523368 0.53368 +v 0.66488 0.515142 0.528168 +v 0.666401 0.506905 0.52267 +v 0.667724 0.498743 0.517318 +v 0.66908 0.490581 0.511973 +v 0.670302 0.482531 0.506751 +v 0.67149 0.474471 0.501518 +v 0.672608 0.466478 0.496411 +v 0.673713 0.458487 0.491296 +v 0.674814 0.450551 0.486303 +v 0.675915 0.442601 0.481311 +v 0.676954 0.434718 0.476405 +v 0.677983 0.426849 0.471502 +v 0.679039 0.419042 0.466714 +v 0.680094 0.411239 0.461923 +v 0.68119 0.403491 0.457225 +v 0.682281 0.395743 0.452528 +v 0.683413 0.388035 0.447893 +v 0.684558 0.380327 0.44326 +v 0.685724 0.372651 0.438698 +v 0.686892 0.364974 0.434135 +v 0.688106 0.357334 0.429612 +v 0.689308 0.349695 0.425076 +v 0.690542 0.342085 0.420576 +v 0.691779 0.334472 0.416093 +v 0.693049 0.326871 0.411638 +v 0.694314 0.319274 0.407173 +v 0.695553 0.311693 0.402711 +v 0.696799 0.304111 0.398247 +v 0.697984 0.296558 0.393842 +v 0.699172 0.289001 0.38944 +v 0.70031 0.281436 0.385098 +v 0.701428 0.273871 0.380763 +v 0.702451 0.266329 0.376641 +v 0.703465 0.258788 0.372518 +v 0.704338 0.251248 0.368626 +v 0.70522 0.243708 0.364733 +v 0.705963 0.236116 0.360896 +v 0.706718 0.228523 0.357061 +v 0.707305 0.220848 0.353332 +v 0.707891 0.213171 0.349603 +v 0.708255 0.205378 0.34597 +v 0.708614 0.197581 0.34232 +v 0.708811 0.18962 0.33884 +v 0.709003 0.18166 0.335358 +v 0.708958 0.173523 0.332084 +v 0.708914 0.165386 0.328804 +v 0.708786 0.157026 0.325803 +v 0.708646 0.148666 0.322801 +v 0.708363 0.140059 0.320131 +v 0.708084 0.131451 0.317465 +v 0.707717 0.122589 0.315198 +v 0.707321 0.113727 0.312929 +v 0.706869 0.104598 0.311082 +v 0.706429 0.09547 0.309231 +v 0.70594 0.0860818 0.307817 +v 0.705453 0.0766988 0.306405 +v 0.704908 0.0670802 0.305395 +v 0.704353 0.0574564 0.304397 +v 0.703706 0.0476471 0.303772 +v 0.703062 0.0378323 0.303138 +v 0.702367 0.0278932 0.302866 +v 0.701642 0.0179851 0.302584 +v 0.700855 0.00801533 0.302694 +v 0.700053 -0.00192819 0.302786 +v 0.699154 -0.011836 0.303299 +v 0.698319 -0.0217597 0.303826 +v 0.697349 -0.0315336 0.304739 +v 0.696357 -0.0413088 0.305661 +v 0.695257 -0.0508719 0.306931 +v 0.694175 -0.0604449 0.308191 +v 0.693 -0.0697458 0.309684 +v 0.691778 -0.0790471 0.311184 +v 0.690573 -0.0880315 0.312841 +v 0.689365 -0.0970197 0.314522 +v 0.688199 -0.105697 0.316317 +v 0.686974 -0.114361 0.31811 +v 0.685711 -0.122707 0.320015 +v 0.684412 -0.131041 0.321932 +v 0.68316 -0.139097 0.323891 +v 0.681909 -0.147157 0.325863 +v 0.680674 -0.154955 0.327911 +v 0.679396 -0.162739 0.329944 +v 0.678134 -0.170323 0.332042 +v 0.67686 -0.177902 0.334113 +v 0.675579 -0.185292 0.336228 +v 0.67435 -0.192687 0.338349 +v 0.673074 -0.199872 0.340475 +v 0.671819 -0.207047 0.342603 +v 0.670564 -0.21412 0.344755 +v 0.669243 -0.221199 0.346912 +v 0.668004 -0.228182 0.349088 +v 0.666794 -0.23517 0.351278 +v 0.665526 -0.242051 0.353456 +v 0.664263 -0.24893 0.355632 +v 0.66306 -0.255737 0.357815 +v 0.66182 -0.262535 0.359998 +v 0.660601 -0.269305 0.362161 +v 0.659335 -0.276069 0.364314 +v 0.658145 -0.282848 0.366481 +v 0.656906 -0.289621 0.368632 +v 0.655596 -0.296426 0.370798 +v 0.654286 -0.30323 0.372963 +v 0.653 -0.310088 0.375177 +v 0.651641 -0.316927 0.377373 +v 0.650308 -0.323835 0.379653 +v 0.648938 -0.330733 0.381906 +v 0.647441 -0.337682 0.384229 +v 0.646057 -0.344646 0.38656 +v 0.644633 -0.351693 0.388934 +v 0.643075 -0.358708 0.39131 +v 0.641458 -0.365819 0.393732 +v 0.63983 -0.372911 0.396141 +v 0.638089 -0.380091 0.398578 +v 0.636418 -0.387311 0.401051 +v 0.634646 -0.394611 0.403543 +v 0.632745 -0.401869 0.406039 +v 0.630808 -0.409255 0.408554 +v 0.628783 -0.416621 0.411063 +v 0.626618 -0.424069 0.413574 +v 0.624438 -0.431524 0.416084 +v 0.62211 -0.439063 0.4186 +v 0.619611 -0.446558 0.421119 +v 0.617074 -0.45416 0.423664 +v 0.614407 -0.461711 0.42621 +v 0.611563 -0.469378 0.429262 +v 0.608654 -0.477023 0.432309 +v 0.605602 -0.484752 0.435378 +v 0.602388 -0.492436 0.438432 +v 0.667296 0.529087 0.523196 +v 0.668809 0.52073 0.517669 +v 0.67031 0.512366 0.512143 +v 0.671623 0.504076 0.506744 +v 0.672943 0.495788 0.501347 +v 0.674108 0.487611 0.496076 +v 0.675272 0.479439 0.490804 +v 0.676353 0.47134 0.485641 +v 0.67745 0.463242 0.48048 +v 0.678476 0.455217 0.475416 +v 0.679508 0.447174 0.470354 +v 0.680551 0.439207 0.465403 +v 0.681534 0.431246 0.460445 +v 0.682523 0.423356 0.455585 +v 0.683526 0.415466 0.450727 +v 0.684559 0.407639 0.445948 +v 0.685614 0.399808 0.441176 +v 0.686686 0.392025 0.436458 +v 0.687766 0.38424 0.431756 +v 0.688897 0.376497 0.427105 +v 0.690035 0.368752 0.422459 +v 0.691201 0.36105 0.417845 +v 0.692364 0.35335 0.413229 +v 0.693582 0.345683 0.408632 +v 0.694779 0.338016 0.404042 +v 0.696004 0.330362 0.399473 +v 0.697221 0.322709 0.394901 +v 0.698422 0.315088 0.390304 +v 0.699635 0.307467 0.385711 +v 0.700794 0.299878 0.381188 +v 0.701953 0.292292 0.376672 +v 0.702998 0.284704 0.372189 +v 0.704041 0.277114 0.367717 +v 0.704971 0.269599 0.363636 +v 0.705897 0.262084 0.359533 +v 0.706661 0.254529 0.355419 +v 0.707416 0.246975 0.351302 +v 0.708031 0.239382 0.347283 +v 0.708663 0.23179 0.343266 +v 0.709114 0.224119 0.339355 +v 0.709563 0.216446 0.335441 +v 0.709738 0.208666 0.331615 +v 0.709913 0.200886 0.327788 +v 0.709992 0.192943 0.324153 +v 0.710064 0.185 0.320517 +v 0.709819 0.176873 0.317105 +v 0.709573 0.168746 0.313694 +v 0.709326 0.160389 0.310583 +v 0.709069 0.152038 0.307499 +v 0.708596 0.143433 0.304781 +v 0.70813 0.134826 0.30206 +v 0.70767 0.125928 0.29975 +v 0.707219 0.11703 0.297441 +v 0.706656 0.107839 0.295588 +v 0.706084 0.0986483 0.293733 +v 0.705555 0.0891624 0.292353 +v 0.705024 0.0796784 0.290971 +v 0.704505 0.0699191 0.290016 +v 0.703995 0.0601614 0.28906 +v 0.70345 0.0501779 0.288496 +v 0.702857 0.0401942 0.287937 +v 0.702217 0.0300802 0.287711 +v 0.701582 0.019962 0.287482 +v 0.700891 0.00979163 0.287643 +v 0.700191 -0.000372891 0.287808 +v 0.699454 -0.0104913 0.288421 +v 0.6987 -0.0206218 0.289033 +v 0.697858 -0.0306015 0.290094 +v 0.696965 -0.0405753 0.291164 +v 0.69596 -0.0503141 0.292557 +v 0.694934 -0.0600468 0.293961 +v 0.693796 -0.0694855 0.295634 +v 0.692616 -0.078915 0.297314 +v 0.69141 -0.0879841 0.299131 +v 0.690221 -0.0970539 0.300962 +v 0.688936 -0.105763 0.302918 +v 0.687634 -0.114457 0.304949 +v 0.686346 -0.122789 0.307105 +v 0.6851 -0.131137 0.309195 +v 0.683856 -0.139183 0.311337 +v 0.682586 -0.147231 0.313469 +v 0.681276 -0.154973 0.315637 +v 0.680037 -0.162727 0.317834 +v 0.678774 -0.170271 0.320069 +v 0.677487 -0.177815 0.322286 +v 0.676189 -0.185151 0.324535 +v 0.674871 -0.192481 0.326782 +v 0.673609 -0.19965 0.329065 +v 0.672347 -0.206797 0.331337 +v 0.671049 -0.213793 0.333614 +v 0.669808 -0.220822 0.335903 +v 0.668576 -0.227754 0.33821 +v 0.667301 -0.234679 0.340515 +v 0.666053 -0.241509 0.342793 +v 0.664777 -0.248334 0.345072 +v 0.66349 -0.255076 0.347336 +v 0.662247 -0.261829 0.349627 +v 0.660991 -0.268554 0.351868 +v 0.659782 -0.275283 0.35411 +v 0.658534 -0.282002 0.356324 +v 0.657254 -0.288717 0.35854 +v 0.655986 -0.295502 0.360757 +v 0.654707 -0.302283 0.362977 +v 0.653397 -0.309096 0.365232 +v 0.652052 -0.315912 0.367495 +v 0.650693 -0.3228 0.369816 +v 0.649265 -0.329673 0.372133 +v 0.647884 -0.336647 0.374526 +v 0.646459 -0.343612 0.376907 +v 0.644923 -0.350656 0.379342 +v 0.64341 -0.357691 0.381788 +v 0.641811 -0.364822 0.38427 +v 0.640176 -0.371944 0.38675 +v 0.638488 -0.379161 0.389245 +v 0.636703 -0.386373 0.391758 +v 0.634935 -0.3937 0.394303 +v 0.633051 -0.401008 0.396852 +v 0.631122 -0.408434 0.399409 +v 0.629062 -0.415818 0.401954 +v 0.626971 -0.423343 0.404514 +v 0.6247 -0.430812 0.407066 +v 0.622392 -0.438425 0.4096 +v 0.619967 -0.445994 0.412146 +v 0.617395 -0.453658 0.414722 +v 0.614714 -0.46128 0.417297 +v 0.611918 -0.469035 0.420361 +v 0.608958 -0.47673 0.423413 +v 0.60591 -0.484537 0.426479 +v 0.602709 -0.492295 0.429533 +v 0.671091 0.534759 0.512462 +v 0.672626 0.526267 0.506931 +v 0.67414 0.517773 0.501392 +v 0.675467 0.509368 0.495986 +v 0.676785 0.500965 0.490583 +v 0.67795 0.492672 0.48529 +v 0.679103 0.484382 0.479985 +v 0.680208 0.476168 0.474809 +v 0.681291 0.467962 0.46962 +v 0.682297 0.459836 0.464513 +v 0.683316 0.451692 0.45941 +v 0.684297 0.44365 0.454414 +v 0.685266 0.435606 0.449412 +v 0.686222 0.427634 0.444495 +v 0.687191 0.419665 0.439575 +v 0.688203 0.411755 0.434741 +v 0.689232 0.403844 0.429915 +v 0.690296 0.395989 0.425144 +v 0.691359 0.388132 0.420372 +v 0.69245 0.380327 0.41565 +v 0.693557 0.372514 0.410927 +v 0.694713 0.36475 0.406233 +v 0.695863 0.356985 0.401539 +v 0.697046 0.349271 0.39685 +v 0.698219 0.341553 0.392167 +v 0.699413 0.333858 0.387487 +v 0.700602 0.326161 0.38281 +v 0.701754 0.318506 0.378101 +v 0.702913 0.310848 0.373394 +v 0.704 0.303228 0.368731 +v 0.705095 0.295609 0.364066 +v 0.706078 0.288029 0.359551 +v 0.707045 0.280447 0.355047 +v 0.707891 0.27292 0.350738 +v 0.708736 0.265389 0.346422 +v 0.709389 0.257843 0.342131 +v 0.710059 0.250297 0.337839 +v 0.710551 0.242722 0.333624 +v 0.711032 0.235145 0.329403 +v 0.711276 0.227503 0.325287 +v 0.711525 0.219859 0.321158 +v 0.711592 0.21211 0.317135 +v 0.711666 0.204354 0.313117 +v 0.711469 0.196453 0.309295 +v 0.711275 0.18855 0.305468 +v 0.710902 0.180458 0.301917 +v 0.710527 0.172367 0.298379 +v 0.709983 0.164041 0.295159 +v 0.709438 0.155718 0.291939 +v 0.708848 0.1471 0.289117 +v 0.708251 0.138484 0.286288 +v 0.707572 0.129554 0.283935 +v 0.706913 0.120627 0.281584 +v 0.706275 0.111367 0.279738 +v 0.705635 0.102098 0.277887 +v 0.705034 0.0925001 0.276536 +v 0.704453 0.082891 0.275201 +v 0.703917 0.0729763 0.274319 +v 0.703367 0.0630669 0.273434 +v 0.702845 0.0528914 0.272934 +v 0.702289 0.0427164 0.272435 +v 0.701782 0.0323786 0.272267 +v 0.701289 0.022031 0.272099 +v 0.700748 0.0116055 0.272253 +v 0.700235 0.00117851 0.272405 +v 0.699567 -0.00920492 0.273124 +v 0.698931 -0.0195948 0.273852 +v 0.698178 -0.0298013 0.275114 +v 0.69735 -0.0400064 0.276368 +v 0.696357 -0.0499449 0.278057 +v 0.695324 -0.0598764 0.279728 +v 0.694246 -0.0694922 0.281568 +v 0.693121 -0.0790913 0.283388 +v 0.69192 -0.0882819 0.285342 +v 0.690655 -0.097456 0.287352 +v 0.689381 -0.106199 0.28948 +v 0.688093 -0.114946 0.291619 +v 0.686826 -0.123274 0.293924 +v 0.68546 -0.131599 0.296258 +v 0.684134 -0.139557 0.298638 +v 0.68284 -0.147533 0.301018 +v 0.681602 -0.155237 0.303364 +v 0.680339 -0.162927 0.305697 +v 0.679044 -0.1704 0.308068 +v 0.677827 -0.177889 0.310436 +v 0.676493 -0.185155 0.312818 +v 0.675192 -0.192428 0.315204 +v 0.673923 -0.199533 0.317624 +v 0.672637 -0.206637 0.320054 +v 0.671402 -0.213595 0.322495 +v 0.670171 -0.220555 0.324931 +v 0.668903 -0.227399 0.327352 +v 0.667637 -0.234248 0.329792 +v 0.666395 -0.241001 0.332187 +v 0.665085 -0.247743 0.334572 +v 0.663864 -0.254439 0.336926 +v 0.662647 -0.261133 0.339278 +v 0.661376 -0.267803 0.341601 +v 0.660072 -0.274463 0.343924 +v 0.658792 -0.281134 0.3462 +v 0.65756 -0.287804 0.348468 +v 0.656246 -0.294528 0.350728 +v 0.65492 -0.301254 0.352988 +v 0.653664 -0.308044 0.355269 +v 0.652377 -0.314832 0.357554 +v 0.650985 -0.321689 0.359926 +v 0.649582 -0.328554 0.362301 +v 0.648152 -0.335507 0.364763 +v 0.646706 -0.342453 0.367218 +v 0.64523 -0.34951 0.369733 +v 0.6437 -0.356552 0.372249 +v 0.642132 -0.363715 0.3748 +v 0.640481 -0.370856 0.377345 +v 0.638786 -0.378097 0.379904 +v 0.63708 -0.385355 0.382487 +v 0.635244 -0.392697 0.385074 +v 0.633326 -0.400014 0.387661 +v 0.631362 -0.40746 0.390261 +v 0.629368 -0.414897 0.392865 +v 0.627268 -0.422468 0.395464 +v 0.625083 -0.430008 0.398058 +v 0.622724 -0.437653 0.400632 +v 0.620247 -0.445254 0.40319 +v 0.617712 -0.452998 0.405787 +v 0.615013 -0.460676 0.408381 +v 0.612253 -0.468515 0.411446 +v 0.609296 -0.476288 0.414506 +v 0.606229 -0.484167 0.417573 +v 0.602982 -0.491986 0.420625 +v 0.674914 0.54043 0.501743 +v 0.676465 0.531804 0.496196 +v 0.677984 0.523178 0.490647 +v 0.679303 0.514659 0.485228 +v 0.68066 0.506142 0.479821 +v 0.681822 0.497733 0.474504 +v 0.682956 0.489327 0.469176 +v 0.684041 0.481007 0.463964 +v 0.685112 0.472689 0.458747 +v 0.686115 0.464458 0.453609 +v 0.687131 0.456224 0.448478 +v 0.68808 0.448094 0.443438 +v 0.68902 0.439964 0.438398 +v 0.689956 0.43191 0.433427 +v 0.690918 0.423853 0.428467 +v 0.691906 0.415863 0.423578 +v 0.692891 0.407875 0.41869 +v 0.693919 0.399947 0.413849 +v 0.694941 0.392024 0.409007 +v 0.696016 0.384149 0.40421 +v 0.697086 0.376274 0.399412 +v 0.69823 0.368446 0.394636 +v 0.699368 0.360623 0.389855 +v 0.700525 0.352854 0.38508 +v 0.701661 0.345086 0.380297 +v 0.702823 0.337352 0.375505 +v 0.703978 0.329617 0.370715 +v 0.705084 0.321924 0.365896 +v 0.7062 0.314228 0.361077 +v 0.707226 0.306578 0.356274 +v 0.708248 0.298928 0.35147 +v 0.709131 0.291355 0.346909 +v 0.710032 0.28378 0.342362 +v 0.710803 0.276236 0.33784 +v 0.711564 0.268694 0.333316 +v 0.712145 0.261158 0.328848 +v 0.712712 0.253622 0.32438 +v 0.713052 0.246062 0.319961 +v 0.713406 0.2385 0.315542 +v 0.713448 0.230889 0.311225 +v 0.71349 0.22327 0.306873 +v 0.713469 0.215548 0.302667 +v 0.713444 0.207825 0.298457 +v 0.712965 0.19997 0.294476 +v 0.712481 0.192115 0.290496 +v 0.711997 0.184054 0.286793 +v 0.711498 0.175994 0.283091 +v 0.710667 0.167695 0.279736 +v 0.709842 0.159396 0.276383 +v 0.709107 0.15077 0.273451 +v 0.708359 0.142142 0.270518 +v 0.707476 0.133185 0.268119 +v 0.706599 0.124227 0.265722 +v 0.705855 0.114894 0.263882 +v 0.705107 0.105564 0.262042 +v 0.704457 0.0958397 0.260739 +v 0.703833 0.086114 0.259434 +v 0.703285 0.0760473 0.258623 +v 0.702728 0.065982 0.257815 +v 0.702268 0.0556063 0.257381 +v 0.701812 0.0452301 0.256946 +v 0.701405 0.0346676 0.256831 +v 0.701021 0.024104 0.256719 +v 0.700695 0.0134224 0.256872 +v 0.700331 0.0027423 0.257022 +v 0.699794 -0.00789945 0.257868 +v 0.699241 -0.0185445 0.258715 +v 0.698524 -0.0289862 0.26016 +v 0.697768 -0.0394243 0.261607 +v 0.696794 -0.0495685 0.263559 +v 0.695777 -0.0597071 0.265507 +v 0.694647 -0.0694779 0.267526 +v 0.693586 -0.0792905 0.269389 +v 0.692217 -0.088514 0.271695 +v 0.690979 -0.0978082 0.273825 +v 0.689706 -0.106607 0.27608 +v 0.688379 -0.115396 0.278349 +v 0.687049 -0.123726 0.280778 +v 0.685692 -0.132046 0.283254 +v 0.684404 -0.139958 0.285815 +v 0.683045 -0.147863 0.288374 +v 0.681731 -0.15548 0.29096 +v 0.680445 -0.163099 0.293538 +v 0.679204 -0.170512 0.296064 +v 0.677934 -0.177925 0.298574 +v 0.676697 -0.185145 0.301098 +v 0.675385 -0.192352 0.303618 +v 0.674142 -0.19939 0.306181 +v 0.672812 -0.206419 0.30874 +v 0.671536 -0.21331 0.311342 +v 0.670333 -0.22022 0.313915 +v 0.669098 -0.227009 0.316491 +v 0.667847 -0.233789 0.319055 +v 0.666577 -0.240465 0.32157 +v 0.665333 -0.247142 0.324075 +v 0.664114 -0.25378 0.326516 +v 0.66286 -0.260409 0.32895 +v 0.661588 -0.26701 0.331332 +v 0.660306 -0.273608 0.33373 +v 0.65904 -0.280231 0.336056 +v 0.657778 -0.286856 0.33838 +v 0.656493 -0.293537 0.34069 +v 0.655174 -0.300226 0.343 +v 0.653799 -0.306966 0.345305 +v 0.652497 -0.313719 0.34761 +v 0.651154 -0.320563 0.350038 +v 0.649807 -0.327411 0.352471 +v 0.648355 -0.33435 0.355004 +v 0.646897 -0.34128 0.357527 +v 0.645374 -0.348329 0.360129 +v 0.643889 -0.355386 0.362726 +v 0.642289 -0.362545 0.365332 +v 0.640688 -0.369709 0.367941 +v 0.63901 -0.376996 0.370556 +v 0.637239 -0.384276 0.373167 +v 0.635436 -0.391649 0.375828 +v 0.633568 -0.39901 0.378468 +v 0.631584 -0.406484 0.381118 +v 0.629557 -0.413946 0.383769 +v 0.627425 -0.421548 0.386399 +v 0.625199 -0.429126 0.38904 +v 0.622931 -0.43684 0.391648 +v 0.620455 -0.444501 0.394253 +v 0.617856 -0.452273 0.396851 +v 0.615209 -0.460034 0.399469 +v 0.612367 -0.467907 0.402532 +v 0.609469 -0.475763 0.405586 +v 0.6064 -0.483713 0.408651 +v 0.603182 -0.491633 0.411705 +v 0.682951 0.51989 0.474292 +v 0.684283 0.511254 0.468874 +v 0.685482 0.50273 0.463567 +v 0.686656 0.494209 0.458249 +v 0.687738 0.485791 0.453022 +v 0.688822 0.477372 0.447799 +v 0.689828 0.469044 0.442647 +v 0.690849 0.460712 0.437504 +v 0.691824 0.452488 0.432437 +v 0.692779 0.444263 0.427365 +v 0.693722 0.436125 0.422368 +v 0.694672 0.427988 0.417359 +v 0.69564 0.419924 0.412423 +v 0.696595 0.411869 0.407478 +v 0.697625 0.403867 0.402582 +v 0.698667 0.395869 0.397704 +v 0.699718 0.387939 0.392833 +v 0.700797 0.380004 0.387972 +v 0.701926 0.372122 0.383116 +v 0.703055 0.364238 0.378271 +v 0.704211 0.356415 0.373402 +v 0.705353 0.348592 0.368536 +v 0.706528 0.340819 0.363658 +v 0.707688 0.333042 0.358769 +v 0.708781 0.325312 0.353828 +v 0.709865 0.317583 0.348895 +v 0.710849 0.30991 0.343949 +v 0.711825 0.302235 0.339007 +v 0.712664 0.294658 0.33433 +v 0.713504 0.287079 0.329654 +v 0.714161 0.279543 0.324959 +v 0.714822 0.272008 0.320271 +v 0.715302 0.264485 0.315606 +v 0.715789 0.256959 0.310928 +v 0.715979 0.249439 0.306271 +v 0.716167 0.241922 0.30163 +v 0.716144 0.234353 0.297085 +v 0.716123 0.226778 0.292504 +v 0.715831 0.219137 0.288086 +v 0.715524 0.211499 0.283684 +v 0.714945 0.203708 0.2795 +v 0.714369 0.195915 0.275317 +v 0.713524 0.187919 0.271415 +v 0.71268 0.179924 0.267512 +v 0.711708 0.171645 0.263985 +v 0.710735 0.163369 0.260452 +v 0.709725 0.154748 0.257384 +v 0.708716 0.146126 0.254315 +v 0.707663 0.137144 0.251845 +v 0.706629 0.128163 0.249373 +v 0.705765 0.118754 0.247546 +v 0.704909 0.109337 0.245725 +v 0.704164 0.099486 0.244482 +v 0.703431 0.0896362 0.243239 +v 0.702884 0.079366 0.24252 +v 0.702346 0.069097 0.241812 +v 0.702004 0.0584899 0.241484 +v 0.701674 0.0478775 0.241164 +v 0.701481 0.0370446 0.241099 +v 0.701257 0.0262096 0.241039 +v 0.701165 0.015189 0.240811 +v 0.701067 0.00416811 0.240588 +v 0.700666 -0.00675667 0.241773 +v 0.700197 -0.0176801 0.242955 +v 0.699438 -0.0284148 0.244566 +v 0.698639 -0.0391471 0.246178 +v 0.69748 -0.0495183 0.248729 +v 0.69626 -0.0598839 0.251275 +v 0.694976 -0.0698713 0.253496 +v 0.693641 -0.0798512 0.255713 +v 0.69229 -0.089267 0.258002 +v 0.690895 -0.0986698 0.260298 +v 0.689531 -0.107497 0.262709 +v 0.688185 -0.116327 0.265119 +v 0.686882 -0.124618 0.267676 +v 0.685522 -0.132899 0.270231 +v 0.684164 -0.140698 0.272989 +v 0.682834 -0.148511 0.27577 +v 0.681552 -0.156017 0.278512 +v 0.680234 -0.16352 0.281251 +v 0.678958 -0.170825 0.283961 +v 0.677648 -0.178118 0.286653 +v 0.676445 -0.185251 0.289289 +v 0.675225 -0.19239 0.291927 +v 0.673944 -0.199346 0.294656 +v 0.672667 -0.206302 0.297362 +v 0.671468 -0.213119 0.300148 +v 0.670233 -0.219933 0.302907 +v 0.668991 -0.226643 0.305617 +v 0.667832 -0.233352 0.308362 +v 0.666597 -0.239953 0.310998 +v 0.665351 -0.246552 0.313636 +v 0.664123 -0.253109 0.316184 +v 0.662932 -0.259668 0.318717 +v 0.661682 -0.266204 0.321152 +v 0.660413 -0.272738 0.323593 +v 0.659102 -0.279282 0.325963 +v 0.65786 -0.285844 0.328344 +v 0.65663 -0.292461 0.330671 +v 0.655299 -0.299088 0.332995 +v 0.653964 -0.305797 0.33522 +v 0.6526 -0.312502 0.337448 +v 0.651231 -0.319321 0.339993 +v 0.649906 -0.326152 0.34255 +v 0.648458 -0.333054 0.345157 +v 0.64702 -0.339979 0.347785 +v 0.645507 -0.347028 0.35049 +v 0.644001 -0.354081 0.353203 +v 0.642471 -0.361257 0.355888 +v 0.640843 -0.368414 0.35856 +v 0.639101 -0.375685 0.361234 +v 0.637371 -0.382983 0.363907 +v 0.635528 -0.390409 0.366594 +v 0.633702 -0.397827 0.36931 +v 0.631739 -0.405349 0.372004 +v 0.629716 -0.412855 0.374699 +v 0.627605 -0.420499 0.377376 +v 0.625384 -0.428107 0.380052 +v 0.623011 -0.435846 0.382688 +v 0.620599 -0.443575 0.385315 +v 0.618024 -0.451405 0.387941 +v 0.615334 -0.459192 0.390567 +v 0.612568 -0.467155 0.39363 +v 0.609598 -0.475037 0.396684 +v 0.606528 -0.483064 0.399718 +v 0.603314 -0.491061 0.40275 +v 0.687951 0.516363 0.457994 +v 0.689166 0.507725 0.452679 +v 0.690374 0.499089 0.447363 +v 0.691467 0.49057 0.44212 +v 0.692551 0.482053 0.436875 +v 0.693569 0.473627 0.431706 +v 0.694577 0.465201 0.426535 +v 0.69554 0.456883 0.421431 +v 0.696506 0.448565 0.416327 +v 0.697455 0.440342 0.411296 +v 0.698397 0.432123 0.406259 +v 0.699362 0.423986 0.401269 +v 0.700323 0.415851 0.396277 +v 0.701334 0.407786 0.391328 +v 0.702351 0.399721 0.386385 +v 0.70341 0.391727 0.381455 +v 0.704473 0.383734 0.376523 +v 0.705594 0.375798 0.371595 +v 0.70672 0.367857 0.366674 +v 0.707888 0.359977 0.361726 +v 0.709062 0.352097 0.356781 +v 0.710226 0.344285 0.351803 +v 0.711382 0.336471 0.346822 +v 0.71246 0.328707 0.341771 +v 0.713542 0.320941 0.336722 +v 0.714472 0.313243 0.331638 +v 0.715397 0.305545 0.326552 +v 0.716176 0.297963 0.321754 +v 0.716959 0.290381 0.316958 +v 0.717516 0.282851 0.312086 +v 0.718069 0.275323 0.307226 +v 0.718456 0.267811 0.302347 +v 0.718852 0.260296 0.297464 +v 0.71888 0.25282 0.292585 +v 0.718907 0.245346 0.287712 +v 0.718827 0.237818 0.282932 +v 0.718749 0.230295 0.278176 +v 0.718179 0.222734 0.273544 +v 0.717608 0.215174 0.268914 +v 0.716932 0.207445 0.264525 +v 0.716258 0.199716 0.260137 +v 0.715076 0.191782 0.256038 +v 0.713893 0.183848 0.25194 +v 0.71277 0.175595 0.248233 +v 0.711648 0.167342 0.244526 +v 0.710363 0.158726 0.24132 +v 0.709101 0.15011 0.238118 +v 0.707898 0.141103 0.235573 +v 0.706697 0.132096 0.23303 +v 0.705728 0.122607 0.23122 +v 0.704768 0.113116 0.22941 +v 0.703923 0.103133 0.22823 +v 0.703082 0.0931496 0.227051 +v 0.70254 0.0826822 0.226425 +v 0.702015 0.0722126 0.225804 +v 0.701784 0.0613718 0.225591 +v 0.701563 0.0505303 0.22538 +v 0.70161 0.0394227 0.225369 +v 0.701689 0.028314 0.225367 +v 0.701883 0.0169555 0.224775 +v 0.702059 0.00559641 0.224183 +v 0.701762 -0.00561341 0.225706 +v 0.701414 -0.0168224 0.227222 +v 0.700568 -0.0278513 0.228995 +v 0.69965 -0.0388772 0.230758 +v 0.698254 -0.0494737 0.233912 +v 0.69681 -0.0600651 0.237062 +v 0.695323 -0.070261 0.239479 +v 0.693797 -0.0804531 0.241889 +v 0.692306 -0.0899841 0.24434 +v 0.690786 -0.0995116 0.24679 +v 0.689326 -0.108367 0.249334 +v 0.687826 -0.117214 0.251904 +v 0.686457 -0.125432 0.254638 +v 0.685106 -0.13365 0.257368 +v 0.683794 -0.141377 0.260267 +v 0.682427 -0.14907 0.263275 +v 0.681104 -0.156464 0.266179 +v 0.679804 -0.163868 0.269062 +v 0.678504 -0.171072 0.271889 +v 0.677202 -0.178274 0.274707 +v 0.675957 -0.185316 0.277479 +v 0.674783 -0.192374 0.280241 +v 0.673626 -0.199275 0.283113 +v 0.672423 -0.206164 0.285994 +v 0.671218 -0.212893 0.288948 +v 0.670015 -0.219624 0.291899 +v 0.668841 -0.226247 0.294781 +v 0.667647 -0.23287 0.297661 +v 0.666475 -0.239403 0.300399 +v 0.665314 -0.245932 0.303146 +v 0.66411 -0.252417 0.305803 +v 0.662865 -0.258889 0.308456 +v 0.66163 -0.265369 0.310977 +v 0.660404 -0.271842 0.31348 +v 0.659149 -0.278333 0.315895 +v 0.657865 -0.284816 0.318315 +v 0.656571 -0.291359 0.320648 +v 0.655325 -0.297932 0.322987 +v 0.653984 -0.304587 0.32513 +v 0.652617 -0.311238 0.327276 +v 0.651259 -0.318032 0.329932 +v 0.649883 -0.324819 0.332593 +v 0.648488 -0.331742 0.335317 +v 0.647067 -0.338652 0.338032 +v 0.64558 -0.345697 0.340849 +v 0.644067 -0.352739 0.343668 +v 0.642491 -0.359913 0.346417 +v 0.64089 -0.367085 0.349167 +v 0.639249 -0.374392 0.351901 +v 0.637467 -0.381681 0.354644 +v 0.635615 -0.389144 0.357366 +v 0.633775 -0.396591 0.360121 +v 0.631788 -0.404159 0.362874 +v 0.629755 -0.411711 0.365622 +v 0.62765 -0.419397 0.368342 +v 0.625415 -0.427041 0.371058 +v 0.623082 -0.434845 0.373727 +v 0.620653 -0.44262 0.376388 +v 0.618074 -0.450494 0.379036 +v 0.615421 -0.458342 0.381678 +v 0.612623 -0.466338 0.384728 +v 0.609662 -0.474286 0.387785 +v 0.606593 -0.482393 0.390791 +v 0.603363 -0.490453 0.393789 +v 0.694907 0.495296 0.431061 +v 0.696027 0.486672 0.425813 +v 0.697047 0.478158 0.420632 +v 0.698081 0.469642 0.415451 +v 0.699062 0.461236 0.410332 +v 0.700048 0.452829 0.405218 +v 0.701027 0.444521 0.400161 +v 0.701981 0.436219 0.395096 +v 0.702971 0.428004 0.39008 +v 0.703947 0.419789 0.385059 +v 0.704956 0.411663 0.380062 +v 0.705985 0.403527 0.375068 +v 0.707088 0.395474 0.370095 +v 0.70819 0.387414 0.365126 +v 0.709318 0.379424 0.360132 +v 0.710455 0.371432 0.355137 +v 0.711644 0.363505 0.350123 +v 0.712832 0.355586 0.345105 +v 0.713996 0.347724 0.34003 +v 0.71517 0.339864 0.334956 +v 0.716258 0.332065 0.329801 +v 0.717342 0.324254 0.324648 +v 0.718285 0.316526 0.31943 +v 0.719208 0.308793 0.314211 +v 0.719899 0.301216 0.309276 +v 0.720591 0.293639 0.304344 +v 0.721106 0.28611 0.299278 +v 0.721616 0.278582 0.294212 +v 0.721886 0.271115 0.289096 +v 0.722154 0.263649 0.283979 +v 0.72217 0.256217 0.278871 +v 0.722179 0.248787 0.273763 +v 0.7219 0.24136 0.268736 +v 0.721623 0.233934 0.263709 +v 0.721024 0.226451 0.258834 +v 0.720418 0.218971 0.253958 +v 0.719547 0.211357 0.249316 +v 0.718681 0.203742 0.244675 +v 0.717449 0.195897 0.240353 +v 0.716222 0.188052 0.236035 +v 0.714983 0.179826 0.232127 +v 0.713755 0.171602 0.228219 +v 0.712399 0.162981 0.22484 +v 0.711058 0.154357 0.221464 +v 0.70979 0.145323 0.218796 +v 0.70852 0.136291 0.21613 +v 0.707182 0.126797 0.214425 +v 0.705858 0.117301 0.212722 +v 0.704862 0.107117 0.211497 +v 0.703875 0.0969365 0.210275 +v 0.703181 0.0862493 0.209802 +v 0.702514 0.0755599 0.20933 +v 0.702485 0.0644615 0.209331 +v 0.702512 0.053364 0.209336 +v 0.703068 0.0419331 0.209343 +v 0.703659 0.0304995 0.209353 +v 0.704164 0.01888 0.209874 +v 0.704671 0.00725869 0.21039 +v 0.704392 -0.00433823 0.212117 +v 0.704031 -0.0159356 0.213821 +v 0.702839 -0.0273319 0.215735 +v 0.701598 -0.0387301 0.217635 +v 0.699799 -0.0497204 0.220353 +v 0.697972 -0.0607091 0.223059 +v 0.696064 -0.0712132 0.225563 +v 0.694146 -0.0817148 0.22806 +v 0.692282 -0.0914771 0.230657 +v 0.69044 -0.101243 0.233242 +v 0.688815 -0.110061 0.235912 +v 0.687178 -0.118873 0.238581 +v 0.685731 -0.126927 0.241355 +v 0.684282 -0.134982 0.244152 +v 0.682997 -0.14252 0.247495 +v 0.681698 -0.150057 0.250841 +v 0.680394 -0.157255 0.253877 +v 0.679079 -0.164445 0.256958 +v 0.67777 -0.171511 0.259833 +v 0.676461 -0.178589 0.262663 +v 0.675269 -0.185539 0.265491 +v 0.674055 -0.192477 0.268311 +v 0.672869 -0.199288 0.271392 +v 0.67175 -0.206113 0.274449 +v 0.670605 -0.212756 0.277745 +v 0.669474 -0.219401 0.281034 +v 0.668357 -0.225913 0.284052 +v 0.667256 -0.232436 0.287071 +v 0.666097 -0.238871 0.289909 +v 0.664923 -0.245291 0.292753 +v 0.663775 -0.251694 0.295496 +v 0.662563 -0.258084 0.298226 +v 0.661399 -0.26448 0.30083 +v 0.660199 -0.270871 0.303436 +v 0.658934 -0.277295 0.305909 +v 0.657724 -0.283723 0.308371 +v 0.656461 -0.290203 0.310727 +v 0.655168 -0.296684 0.313103 +v 0.653841 -0.303284 0.315314 +v 0.652526 -0.309887 0.317526 +v 0.651153 -0.316621 0.320206 +v 0.649799 -0.323365 0.322879 +v 0.648379 -0.330256 0.325663 +v 0.646979 -0.337149 0.328452 +v 0.645546 -0.3442 0.331312 +v 0.644038 -0.351229 0.334163 +v 0.642479 -0.358413 0.336978 +v 0.64087 -0.365586 0.339805 +v 0.639168 -0.372894 0.342619 +v 0.637459 -0.3802 0.345429 +v 0.635652 -0.387694 0.34822 +v 0.633776 -0.395171 0.351004 +v 0.631811 -0.402782 0.353787 +v 0.629754 -0.410354 0.356584 +v 0.627619 -0.418081 0.359344 +v 0.625435 -0.425798 0.362121 +v 0.6231 -0.433652 0.364828 +v 0.62071 -0.44148 0.367517 +v 0.61815 -0.449416 0.370177 +v 0.615455 -0.457311 0.372832 +v 0.612659 -0.465367 0.375862 +v 0.609732 -0.473382 0.378917 +v 0.606601 -0.481517 0.381959 +v 0.603426 -0.48965 0.384996 +v 0.699508 0.49129 0.414763 +v 0.700565 0.482682 0.409572 +v 0.70161 0.474077 0.404378 +v 0.702607 0.465584 0.399243 +v 0.703596 0.457093 0.394106 +v 0.704587 0.448705 0.389021 +v 0.705587 0.440314 0.383938 +v 0.706595 0.432025 0.378888 +v 0.70759 0.423735 0.373836 +v 0.708618 0.415532 0.368801 +v 0.709645 0.407331 0.363763 +v 0.710756 0.399216 0.35874 +v 0.711884 0.3911 0.353722 +v 0.713052 0.383049 0.34867 +v 0.714204 0.375003 0.343608 +v 0.715409 0.367038 0.338518 +v 0.716626 0.359068 0.333434 +v 0.717817 0.35116 0.328269 +v 0.718989 0.343254 0.323099 +v 0.720057 0.335413 0.317833 +v 0.721138 0.327572 0.312571 +v 0.722083 0.319809 0.30722 +v 0.723024 0.312045 0.301871 +v 0.723637 0.304466 0.296807 +v 0.72425 0.296889 0.291744 +v 0.72471 0.289366 0.286478 +v 0.725164 0.281844 0.28121 +v 0.725313 0.274421 0.275865 +v 0.725457 0.267001 0.270512 +v 0.725453 0.259618 0.265175 +v 0.725448 0.252235 0.259839 +v 0.724981 0.244902 0.254543 +v 0.724504 0.237571 0.249245 +v 0.723876 0.230167 0.244127 +v 0.723245 0.222764 0.239003 +v 0.722177 0.215266 0.234107 +v 0.72111 0.207767 0.229212 +v 0.719834 0.200011 0.224669 +v 0.718562 0.192254 0.220125 +v 0.717222 0.184057 0.216024 +v 0.715888 0.175859 0.211919 +v 0.71447 0.167231 0.208368 +v 0.713065 0.158601 0.204822 +v 0.711735 0.14954 0.202032 +v 0.710404 0.14048 0.199242 +v 0.708723 0.130979 0.197645 +v 0.707033 0.121479 0.196046 +v 0.705871 0.111102 0.194776 +v 0.704738 0.100722 0.193506 +v 0.703883 0.0898171 0.193181 +v 0.703083 0.0789108 0.192864 +v 0.703313 0.0675541 0.193084 +v 0.703588 0.0562025 0.193307 +v 0.704716 0.0444527 0.193343 +v 0.705893 0.0327087 0.193389 +v 0.706739 0.0208266 0.195037 +v 0.707533 0.00893836 0.196666 +v 0.707259 -0.00305767 0.19859 +v 0.706891 -0.0150562 0.20048 +v 0.705324 -0.0268263 0.202526 +v 0.703696 -0.0385947 0.20455 +v 0.701486 -0.0499832 0.206821 +v 0.699244 -0.0613698 0.209081 +v 0.696897 -0.0721785 0.211659 +v 0.694534 -0.0829866 0.214232 +v 0.692329 -0.0929856 0.216963 +v 0.690118 -0.102982 0.219691 +v 0.688301 -0.11175 0.222493 +v 0.686503 -0.120526 0.225283 +v 0.684953 -0.128408 0.228123 +v 0.683425 -0.136296 0.230964 +v 0.682149 -0.143648 0.234734 +v 0.680872 -0.150995 0.23853 +v 0.679658 -0.158039 0.2416 +v 0.678356 -0.165036 0.244794 +v 0.67702 -0.171973 0.247692 +v 0.675696 -0.178913 0.250595 +v 0.674437 -0.185731 0.253518 +v 0.673234 -0.192571 0.256383 +v 0.672116 -0.199304 0.259654 +v 0.670994 -0.20604 0.262915 +v 0.66991 -0.212597 0.266539 +v 0.668843 -0.219156 0.270161 +v 0.667768 -0.22556 0.273325 +v 0.666704 -0.231972 0.276482 +v 0.665623 -0.23831 0.279433 +v 0.66452 -0.244641 0.282377 +v 0.663385 -0.250962 0.285189 +v 0.662247 -0.257272 0.288015 +v 0.66107 -0.263575 0.290697 +v 0.659906 -0.269878 0.29338 +v 0.658699 -0.276234 0.295885 +v 0.657477 -0.282589 0.298389 +v 0.65623 -0.289014 0.300801 +v 0.654991 -0.295435 0.303215 +v 0.653712 -0.301985 0.305486 +v 0.652407 -0.308525 0.307761 +v 0.651062 -0.31522 0.31046 +v 0.649699 -0.321904 0.313151 +v 0.648301 -0.32878 0.316016 +v 0.646898 -0.335656 0.318879 +v 0.645352 -0.342666 0.321767 +v 0.643905 -0.349692 0.324655 +v 0.642358 -0.356885 0.327547 +v 0.640763 -0.364065 0.33044 +v 0.639065 -0.371387 0.333323 +v 0.637391 -0.378716 0.336209 +v 0.635579 -0.386221 0.339061 +v 0.633725 -0.393719 0.341909 +v 0.631726 -0.401361 0.344713 +v 0.629695 -0.408977 0.347547 +v 0.627578 -0.416752 0.350349 +v 0.625348 -0.424499 0.353157 +v 0.623049 -0.432405 0.35591 +v 0.620635 -0.440283 0.358642 +v 0.618093 -0.448302 0.361316 +v 0.615433 -0.456263 0.363986 +v 0.612623 -0.464371 0.367023 +v 0.60969 -0.472434 0.370058 +v 0.606581 -0.480626 0.373138 +v 0.603353 -0.488792 0.376209 +v 0.705868 0.469858 0.388062 +v 0.706871 0.461282 0.382911 +v 0.707903 0.452812 0.377812 +v 0.708931 0.444343 0.372719 +v 0.709978 0.435981 0.367647 +v 0.711026 0.42762 0.362575 +v 0.712088 0.419349 0.357513 +v 0.71315 0.411086 0.352451 +v 0.714294 0.402906 0.347387 +v 0.71545 0.394728 0.342322 +v 0.716646 0.386622 0.33722 +v 0.717825 0.378521 0.332113 +v 0.719069 0.370506 0.326953 +v 0.720305 0.362479 0.321794 +v 0.721515 0.354534 0.3166 +v 0.722712 0.346593 0.311398 +v 0.72379 0.33873 0.306065 +v 0.724875 0.330867 0.300737 +v 0.725803 0.323089 0.295314 +v 0.726734 0.315308 0.289894 +v 0.727374 0.307693 0.284576 +v 0.728013 0.300077 0.279259 +v 0.728401 0.292575 0.273761 +v 0.728791 0.285071 0.268263 +v 0.72894 0.277678 0.262677 +v 0.729092 0.270283 0.257089 +v 0.728985 0.26298 0.25147 +v 0.728884 0.255677 0.245852 +v 0.728442 0.248438 0.240272 +v 0.728003 0.241203 0.234693 +v 0.727416 0.233915 0.229268 +v 0.726832 0.226631 0.223852 +v 0.725844 0.219273 0.218665 +v 0.69359 -0.0956849 0.204734 +v 0.690619 -0.106316 0.207571 +v 0.688485 -0.114743 0.210381 +v 0.686356 -0.123174 0.213203 +v 0.684616 -0.130793 0.216222 +v 0.682915 -0.138423 0.219241 +v 0.681499 -0.145467 0.222844 +v 0.680124 -0.152518 0.22645 +v 0.678926 -0.159168 0.229637 +v 0.677739 -0.165818 0.232859 +v 0.676405 -0.172746 0.235637 +v 0.67504 -0.179673 0.238411 +v 0.673791 -0.186271 0.241135 +v 0.672521 -0.192862 0.243842 +v 0.671259 -0.199541 0.24711 +v 0.67002 -0.206236 0.250324 +v 0.668993 -0.212626 0.254968 +v 0.667999 -0.219026 0.259595 +v 0.66696 -0.225289 0.262827 +v 0.665917 -0.231558 0.266052 +v 0.664869 -0.237763 0.269052 +v 0.663817 -0.243964 0.272048 +v 0.662774 -0.25019 0.275013 +v 0.661697 -0.256399 0.277987 +v 0.660551 -0.262598 0.280712 +v 0.659422 -0.268801 0.283429 +v 0.658278 -0.275079 0.28598 +v 0.657108 -0.281346 0.288522 +v 0.655895 -0.287702 0.290959 +v 0.654657 -0.294051 0.293397 +v 0.653374 -0.300524 0.29574 +v 0.652102 -0.307017 0.298086 +v 0.650782 -0.313669 0.300825 +v 0.649463 -0.32031 0.303551 +v 0.648065 -0.327143 0.306453 +v 0.64667 -0.333973 0.309342 +v 0.645223 -0.340993 0.3123 +v 0.643739 -0.347994 0.315256 +v 0.642208 -0.355183 0.318226 +v 0.64063 -0.362355 0.321193 +v 0.638974 -0.369704 0.324157 +v 0.637276 -0.377039 0.327122 +v 0.635467 -0.38455 0.330032 +v 0.633616 -0.392074 0.332943 +v 0.631639 -0.399755 0.335804 +v 0.629628 -0.407421 0.338664 +v 0.627493 -0.41523 0.341493 +v 0.625327 -0.423029 0.344347 +v 0.622985 -0.430961 0.347126 +v 0.620555 -0.438881 0.349896 +v 0.617997 -0.446938 0.352628 +v 0.615375 -0.454981 0.355361 +v 0.612585 -0.463165 0.358391 +v 0.609655 -0.471281 0.361401 +v 0.606577 -0.479534 0.36444 +v 0.603339 -0.487755 0.367478 +v 0.710124 0.465475 0.371708 +v 0.711203 0.456922 0.366607 +v 0.712279 0.448373 0.361502 +v 0.713367 0.439935 0.356406 +v 0.714431 0.431503 0.351304 +v 0.715559 0.42317 0.346227 +v 0.716668 0.414837 0.341144 +v 0.71781 0.406599 0.336027 +v 0.718945 0.398362 0.330908 +v 0.720183 0.390204 0.325757 +v 0.721417 0.382046 0.320608 +v 0.722694 0.373969 0.31538 +v 0.72396 0.365895 0.310155 +v 0.725185 0.357915 0.304919 +v 0.726428 0.349932 0.299689 +v 0.727535 0.342045 0.2943 +v 0.728634 0.334158 0.28891 +v 0.729539 0.326366 0.283412 +v 0.730446 0.318574 0.277916 +v 0.731107 0.310921 0.272344 +v 0.731769 0.303268 0.266773 +v 0.732098 0.295782 0.261045 +v 0.732429 0.288296 0.255318 +v 0.732583 0.28093 0.249494 +v 0.73273 0.273564 0.24367 +v 0.732514 0.266343 0.237762 +v 0.732298 0.259122 0.231855 +v 0.731896 0.251979 0.225995 +v 0.731491 0.244834 0.220133 +v 0.686353 -0.125863 0.201139 +v 0.684431 -0.133217 0.204334 +v 0.682542 -0.140582 0.207529 +v 0.68103 -0.147323 0.21096 +v 0.679539 -0.154069 0.214398 +v 0.678347 -0.160319 0.217731 +v 0.677178 -0.166574 0.221066 +v 0.675908 -0.173543 0.223593 +v 0.674518 -0.180457 0.22625 +v 0.673275 -0.186835 0.228741 +v 0.672022 -0.193214 0.231241 +v 0.670646 -0.199854 0.234461 +v 0.66925 -0.206489 0.237689 +v 0.668165 -0.21268 0.24339 +v 0.667125 -0.218892 0.249035 +v 0.666091 -0.225006 0.252333 +v 0.665059 -0.231128 0.255623 +v 0.664062 -0.2372 0.258673 +v 0.663057 -0.243271 0.261727 +v 0.662066 -0.249379 0.264854 +v 0.661074 -0.255488 0.267981 +v 0.659995 -0.261612 0.270732 +v 0.658902 -0.26772 0.273485 +v 0.657747 -0.2739 0.276077 +v 0.656655 -0.28009 0.278667 +v 0.655474 -0.286373 0.281121 +v 0.654271 -0.292652 0.283576 +v 0.653021 -0.299056 0.285994 +v 0.651755 -0.305482 0.28842 +v 0.650439 -0.31208 0.291182 +v 0.649145 -0.318683 0.293951 +v 0.64777 -0.325482 0.296885 +v 0.646408 -0.332282 0.299822 +v 0.644947 -0.339287 0.302837 +v 0.643503 -0.346281 0.305852 +v 0.641954 -0.353455 0.308901 +v 0.64037 -0.360619 0.311948 +v 0.638788 -0.367996 0.314977 +v 0.637129 -0.375356 0.318008 +v 0.635304 -0.382873 0.320999 +v 0.63342 -0.390405 0.323986 +v 0.631462 -0.39812 0.326895 +v 0.629475 -0.405827 0.329807 +v 0.627351 -0.413691 0.332649 +v 0.625187 -0.421526 0.335528 +v 0.622913 -0.42952 0.338354 +v 0.620475 -0.43747 0.341155 +v 0.617884 -0.44557 0.343946 +v 0.615233 -0.453651 0.346734 +v 0.612478 -0.461897 0.349742 +v 0.609546 -0.470078 0.352759 +v 0.606442 -0.478395 0.355746 +v 0.60323 -0.486681 0.358753 +v 0.716481 0.443824 0.345107 +v 0.717596 0.435318 0.34 +v 0.718758 0.426919 0.334904 +v 0.719911 0.41852 0.329807 +v 0.721094 0.410221 0.324657 +v 0.722294 0.401919 0.319519 +v 0.723555 0.393709 0.314326 +v 0.724807 0.3855 0.309125 +v 0.726114 0.377384 0.303878 +v 0.727435 0.369267 0.298634 +v 0.728709 0.361244 0.293325 +v 0.729971 0.353221 0.288013 +v 0.731076 0.345293 0.282533 +v 0.732185 0.337366 0.277057 +v 0.733127 0.329545 0.271445 +v 0.734054 0.321723 0.265827 +v 0.734714 0.314058 0.260082 +v 0.735376 0.306392 0.254337 +v 0.735745 0.298899 0.248407 +v 0.736118 0.291408 0.242478 +v 0.736262 0.284077 0.236377 +v 0.736414 0.276746 0.230278 +v 0.736278 0.269593 0.224073 +v 0.681161 -0.149757 0.202687 +v 0.679397 -0.156555 0.206586 +v 0.677976 -0.16299 0.210321 +v 0.676629 -0.169441 0.214061 +v 0.675485 -0.1755 0.217017 +v 0.674346 -0.181557 0.219973 +v 0.673045 -0.188142 0.222596 +v 0.67172 -0.194724 0.225213 +v 0.670415 -0.200867 0.227945 +v 0.669089 -0.207007 0.23067 +v 0.667795 -0.213067 0.234983 +v 0.666568 -0.219149 0.239272 +v 0.665409 -0.225006 0.242232 +v 0.664262 -0.230874 0.245175 +v 0.663191 -0.236706 0.247924 +v 0.662136 -0.242542 0.25068 +v 0.66118 -0.248536 0.254452 +v 0.660225 -0.254533 0.258226 +v 0.659178 -0.260517 0.260935 +v 0.658137 -0.266503 0.263638 +v 0.657069 -0.272597 0.266247 +v 0.655973 -0.27868 0.268845 +v 0.654814 -0.284871 0.271295 +v 0.653673 -0.29106 0.273742 +v 0.652469 -0.297404 0.276236 +v 0.651251 -0.303756 0.27873 +v 0.649985 -0.31031 0.281554 +v 0.648697 -0.316861 0.284389 +v 0.647368 -0.323627 0.287363 +v 0.646002 -0.330385 0.290337 +v 0.644584 -0.337372 0.293413 +v 0.643135 -0.344344 0.296484 +v 0.641604 -0.351519 0.299607 +v 0.64007 -0.35869 0.302735 +v 0.638472 -0.36607 0.305868 +v 0.636839 -0.373436 0.308982 +v 0.635077 -0.380979 0.312038 +v 0.633239 -0.388538 0.315095 +v 0.631299 -0.396288 0.318071 +v 0.6293 -0.404016 0.321049 +v 0.627166 -0.411915 0.323936 +v 0.624984 -0.419797 0.326826 +v 0.62269 -0.42782 0.329632 +v 0.620306 -0.43583 0.332432 +v 0.617742 -0.443989 0.335318 +v 0.615112 -0.452128 0.338209 +v 0.6123 -0.460405 0.341187 +v 0.609428 -0.468664 0.344175 +v 0.606313 -0.477027 0.347141 +v 0.603095 -0.485365 0.350098 +v 0.677767 -0.165688 0.202923 +v 0.67619 -0.172329 0.207066 +v 0.675117 -0.177468 0.210437 +v 0.674074 -0.182607 0.213816 +v 0.672804 -0.189445 0.216463 +v 0.671455 -0.196242 0.219214 +v 0.670233 -0.201893 0.221446 +v 0.668976 -0.207536 0.223673 +v 0.667566 -0.213498 0.226551 +v 0.66615 -0.219459 0.229432 +v 0.664826 -0.22503 0.232119 +v 0.663572 -0.230643 0.234726 +v 0.662385 -0.236223 0.237186 +v 0.661221 -0.241809 0.239644 +v 0.660299 -0.247693 0.244056 +v 0.659398 -0.253581 0.248468 +v 0.65834 -0.259421 0.251141 +v 0.657302 -0.265264 0.253811 +v 0.656276 -0.271254 0.256435 +v 0.655244 -0.277241 0.259061 +v 0.654152 -0.28335 0.26149 +v 0.653034 -0.289453 0.263922 +v 0.651887 -0.295744 0.266477 +v 0.650711 -0.302023 0.269047 +v 0.64947 -0.308526 0.271938 +v 0.648241 -0.315033 0.274832 +v 0.646916 -0.321757 0.277845 +v 0.645593 -0.328481 0.280864 +v 0.644188 -0.335432 0.283987 +v 0.642727 -0.342371 0.287113 +v 0.641209 -0.349559 0.290324 +v 0.639692 -0.356745 0.293534 +v 0.638086 -0.364121 0.296745 +v 0.636481 -0.371499 0.299958 +v 0.634755 -0.379073 0.303093 +v 0.632939 -0.386645 0.306214 +v 0.631039 -0.394428 0.309246 +v 0.629095 -0.402199 0.312283 +v 0.626922 -0.410116 0.315223 +v 0.624733 -0.418034 0.318162 +v 0.622416 -0.426112 0.320917 +v 0.620043 -0.434155 0.323708 +v 0.617538 -0.442385 0.326691 +v 0.614901 -0.450572 0.329681 +v 0.61215 -0.458923 0.332632 +v 0.609224 -0.467217 0.335592 +v 0.606158 -0.475653 0.338521 +v 0.602956 -0.484039 0.341456 +v 0.67527 -0.180542 0.203443 +v 0.673871 -0.185926 0.20722 +v 0.672653 -0.191806 0.210144 +v 0.671433 -0.197677 0.213088 +v 0.670123 -0.203583 0.215467 +v 0.668773 -0.209482 0.21784 +v 0.667386 -0.214968 0.220521 +v 0.666014 -0.220455 0.223211 +v 0.664685 -0.225734 0.225885 +v 0.66337 -0.231023 0.228511 +v 0.662119 -0.236331 0.231001 +v 0.660876 -0.241647 0.233476 +v 0.659778 -0.247254 0.236934 +v 0.658703 -0.252868 0.240387 +v 0.657621 -0.258493 0.242782 +v 0.656549 -0.264121 0.245177 +v 0.655495 -0.26993 0.247509 +v 0.65445 -0.27574 0.249838 +v 0.653347 -0.28171 0.251998 +v 0.652249 -0.28768 0.254163 +v 0.651109 -0.293841 0.256513 +v 0.649968 -0.300002 0.258864 +v 0.648808 -0.306496 0.262077 +v 0.647621 -0.313003 0.26529 +v 0.646363 -0.319673 0.268315 +v 0.64506 -0.326335 0.271345 +v 0.643676 -0.333264 0.274549 +v 0.642312 -0.340201 0.277759 +v 0.640811 -0.347383 0.281072 +v 0.63929 -0.354556 0.28439 +v 0.637724 -0.361968 0.287714 +v 0.636086 -0.36936 0.291038 +v 0.634366 -0.376948 0.294253 +v 0.632633 -0.384541 0.297455 +v 0.630704 -0.392349 0.300561 +v 0.62878 -0.400154 0.303661 +v 0.626674 -0.408125 0.30665 +v 0.624475 -0.416071 0.309643 +v 0.622162 -0.424196 0.312506 +v 0.619796 -0.432301 0.315372 +v 0.617268 -0.440562 0.318317 +v 0.614684 -0.448807 0.321265 +v 0.611914 -0.4572 0.324184 +v 0.609046 -0.465558 0.327107 +v 0.60591 -0.474025 0.329999 +v 0.671461 -0.199103 0.20705 +v 0.670055 -0.205283 0.209499 +v 0.668576 -0.21143 0.212032 +v 0.667253 -0.216445 0.214519 +v 0.665906 -0.221456 0.217004 +v 0.664561 -0.226445 0.219654 +v 0.663239 -0.231438 0.222261 +v 0.661903 -0.236453 0.224802 +v 0.660587 -0.24149 0.227311 +v 0.659302 -0.246821 0.229808 +v 0.658037 -0.252157 0.232304 +v 0.656923 -0.25757 0.234427 +v 0.655817 -0.262982 0.236547 +v 0.654719 -0.268606 0.238587 +v 0.653634 -0.27423 0.24062 +v 0.652513 -0.280058 0.242516 +v 0.651391 -0.285886 0.244412 +v 0.650283 -0.291925 0.246556 +v 0.649176 -0.297962 0.248699 +v 0.648084 -0.304445 0.25224 +v 0.646976 -0.310949 0.255784 +v 0.645742 -0.317565 0.258807 +v 0.644487 -0.324178 0.261831 +v 0.643163 -0.331094 0.265116 +v 0.641797 -0.338004 0.268407 +v 0.640336 -0.345188 0.271825 +v 0.638894 -0.352366 0.275245 +v 0.637283 -0.359781 0.278681 +v 0.635684 -0.367196 0.282115 +v 0.633966 -0.374814 0.285406 +v 0.63221 -0.382416 0.288701 +v 0.630377 -0.390266 0.291869 +v 0.62843 -0.398097 0.295039 +v 0.626342 -0.40611 0.298087 +v 0.624208 -0.414111 0.301138 +v 0.621895 -0.422278 0.304097 +v 0.619542 -0.430436 0.307058 +v 0.617026 -0.438745 0.309948 +v 0.61444 -0.447036 0.312851 +v 0.611649 -0.455465 0.315738 +v 0.608797 -0.463873 0.318622 +v -0.683451 0.54418 0.519456 +v -0.679465 0.538322 0.530168 +v -0.675389 0.532397 0.540654 +v -0.671288 0.526468 0.551125 +v -0.667181 0.520499 0.561522 +v -0.663033 0.514548 0.5719 +v -0.658965 0.50859 0.582073 +v -0.654878 0.502621 0.592239 +v -0.650899 0.496659 0.60221 +v -0.646959 0.490723 0.612195 +v -0.643079 0.484813 0.621954 +v -0.639227 0.478923 0.631713 +v -0.635465 0.473083 0.641229 +v -0.631719 0.467225 0.650748 +v -0.628068 0.461422 0.660011 +v -0.624485 0.455627 0.669304 +v -0.621046 0.449922 0.678324 +v -0.617568 0.444224 0.687343 +v -0.614198 0.438618 0.696101 +v -0.610975 0.433027 0.70491 +v -0.607828 0.427538 0.713427 +v -0.604799 0.422054 0.721988 +v -0.601899 0.416606 0.730234 +v -0.599068 0.411169 0.738501 +v -0.596404 0.40583 0.746462 +v -0.59391 0.400506 0.754487 +v -0.591558 0.395285 0.76212 +v -0.589254 0.390021 0.769769 +v -0.587119 0.384872 0.776955 +v -0.584893 0.379721 0.784103 +v -0.582828 0.374786 0.790581 +v -0.580509 0.369841 0.79697 +v -0.57845 0.365397 0.802309 +v -0.576099 0.360961 0.807524 +v -0.57469 0.358881 0.810728 +v -0.681666 0.552747 0.525122 +v -0.677715 0.546756 0.535845 +v -0.673627 0.540682 0.546309 +v -0.669534 0.534609 0.55677 +v -0.665395 0.528498 0.56712 +v -0.661254 0.52239 0.577471 +v -0.657138 0.516282 0.587581 +v -0.653044 0.510175 0.597703 +v -0.649043 0.504082 0.607599 +v -0.645036 0.497998 0.617489 +v -0.641135 0.49194 0.627173 +v -0.637254 0.485888 0.63686 +v -0.633459 0.479888 0.64627 +v -0.629667 0.473874 0.65567 +v -0.626028 0.46792 0.664846 +v -0.622412 0.46197 0.674032 +v -0.618933 0.456121 0.682913 +v -0.615489 0.450276 0.691821 +v -0.612133 0.444526 0.700455 +v -0.608933 0.438785 0.709149 +v -0.60585 0.433144 0.717556 +v -0.602807 0.427494 0.725968 +v -0.599949 0.421879 0.734054 +v -0.597209 0.416276 0.742184 +v -0.594582 0.410774 0.749965 +v -0.592233 0.405292 0.757844 +v -0.589959 0.399896 0.765274 +v -0.587758 0.394479 0.772733 +v -0.585724 0.389172 0.77969 +v -0.583676 0.383854 0.786652 +v -0.581811 0.378717 0.792888 +v -0.579657 0.373571 0.79902 +v -0.577786 0.368814 0.804185 +v -0.575532 0.364062 0.809196 +v -0.573699 0.359395 0.813168 +v -0.679615 0.561417 0.530914 +v -0.67567 0.55528 0.541644 +v -0.671578 0.549059 0.5521 +v -0.667525 0.542835 0.562569 +v -0.663379 0.536572 0.572888 +v -0.659232 0.530313 0.58321 +v -0.655096 0.524034 0.593274 +v -0.650996 0.517766 0.603352 +v -0.646965 0.51153 0.613188 +v -0.642954 0.505295 0.623035 +v -0.639032 0.499086 0.632634 +v -0.635132 0.492866 0.642241 +v -0.631322 0.486701 0.651571 +v -0.627496 0.480527 0.660889 +v -0.623863 0.474426 0.669961 +v -0.620214 0.468324 0.679026 +v -0.616691 0.462321 0.68779 +v -0.613265 0.45633 0.696606 +v -0.609932 0.450413 0.705128 +v -0.606749 0.444507 0.713714 +v -0.603698 0.438697 0.721981 +v -0.600723 0.43287 0.730279 +v -0.59786 0.42709 0.738218 +v -0.595193 0.421321 0.746233 +v -0.592683 0.415642 0.753884 +v -0.590358 0.409981 0.761597 +v -0.58818 0.404397 0.768884 +v -0.586109 0.398794 0.776205 +v -0.584188 0.393301 0.783007 +v -0.582276 0.387795 0.789814 +v -0.580496 0.382425 0.795904 +v -0.578459 0.377044 0.801908 +v -0.576668 0.371911 0.807039 +v -0.574469 0.366779 0.812006 +v -0.572603 0.361747 0.816023 +v -0.677539 0.570088 0.536696 +v -0.673598 0.563804 0.547429 +v -0.66951 0.55743 0.557881 +v -0.665492 0.551058 0.568355 +v -0.661344 0.544648 0.578647 +v -0.657184 0.538236 0.588935 +v -0.65305 0.531789 0.598962 +v -0.64893 0.525369 0.608991 +v -0.644887 0.518979 0.618778 +v -0.640831 0.512582 0.628558 +v -0.636852 0.506204 0.638061 +v -0.632916 0.499824 0.647583 +v -0.629075 0.493515 0.656826 +v -0.625257 0.487181 0.666074 +v -0.621608 0.480931 0.675037 +v -0.617934 0.474678 0.683987 +v -0.614484 0.468528 0.692688 +v -0.610984 0.462381 0.701366 +v -0.607688 0.456305 0.709778 +v -0.604487 0.450234 0.718236 +v -0.601454 0.444248 0.726371 +v -0.598503 0.43824 0.734533 +v -0.595708 0.432291 0.742361 +v -0.5931 0.426366 0.750255 +v -0.590668 0.420504 0.757754 +v -0.588391 0.414664 0.765312 +v -0.586309 0.408891 0.772453 +v -0.584347 0.403101 0.77964 +v -0.582581 0.397403 0.786288 +v -0.580773 0.391705 0.792927 +v -0.579159 0.386125 0.798914 +v -0.5772 0.380511 0.804777 +v -0.5755 0.375003 0.809872 +v -0.573395 0.369493 0.814812 +v -0.571492 0.364099 0.818872 +v -0.675152 0.578869 0.542607 +v -0.671237 0.572431 0.553362 +v -0.667162 0.565904 0.563818 +v -0.663116 0.559384 0.574279 +v -0.659032 0.552811 0.584573 +v -0.654887 0.546239 0.594847 +v -0.650741 0.539626 0.604837 +v -0.646608 0.533026 0.614833 +v -0.642544 0.526454 0.624558 +v -0.638498 0.519886 0.634292 +v -0.634517 0.513339 0.643745 +v -0.630542 0.506803 0.65319 +v -0.626685 0.500333 0.662348 +v -0.622878 0.493854 0.671522 +v -0.619203 0.487448 0.680398 +v -0.615526 0.481027 0.689274 +v -0.611999 0.474725 0.697836 +v -0.608578 0.46842 0.706442 +v -0.605296 0.462174 0.714731 +v -0.602111 0.455932 0.723076 +v -0.599055 0.449769 0.731089 +v -0.596127 0.443578 0.739156 +v -0.593397 0.437458 0.746871 +v -0.590834 0.431349 0.754646 +v -0.588445 0.425292 0.762029 +v -0.586247 0.419274 0.769479 +v -0.584259 0.413295 0.776503 +v -0.582358 0.407312 0.783559 +v -0.580728 0.401403 0.790129 +v -0.579047 0.395494 0.796685 +v -0.577473 0.389673 0.802586 +v -0.575639 0.383832 0.808387 +v -0.574086 0.378082 0.813471 +v -0.57201 0.372309 0.818369 +v -0.570125 0.366638 0.822391 +v -0.672748 0.587652 0.548511 +v -0.668841 0.58106 0.559277 +v -0.664794 0.574377 0.569744 +v -0.660723 0.567704 0.580198 +v -0.656648 0.560971 0.590478 +v -0.652564 0.554237 0.60075 +v -0.648402 0.547459 0.610702 +v -0.64426 0.540686 0.620661 +v -0.640186 0.533938 0.630332 +v -0.636112 0.527196 0.640002 +v -0.632159 0.520473 0.649411 +v -0.628147 0.513779 0.658788 +v -0.624262 0.50715 0.667855 +v -0.620392 0.500512 0.676924 +v -0.616716 0.493938 0.685728 +v -0.613028 0.487373 0.694523 +v -0.609492 0.480916 0.702978 +v -0.606023 0.474453 0.711457 +v -0.602764 0.468047 0.719626 +v -0.59961 0.461629 0.727863 +v -0.596597 0.455275 0.735783 +v -0.593701 0.448914 0.743756 +v -0.591014 0.442616 0.751348 +v -0.588512 0.436333 0.759007 +v -0.586199 0.430087 0.766289 +v -0.584031 0.423875 0.773624 +v -0.582107 0.417689 0.780514 +v -0.580352 0.411519 0.787474 +v -0.57878 0.405396 0.79394 +v -0.577215 0.399273 0.800412 +v -0.575829 0.393209 0.806263 +v -0.574103 0.387124 0.812007 +v -0.572624 0.381147 0.817052 +v -0.570595 0.375119 0.821911 +v -0.568741 0.369177 0.825898 +v -0.669984 0.596546 0.554548 +v -0.666099 0.589803 0.56534 +v -0.662056 0.58296 0.575816 +v -0.658034 0.576125 0.586295 +v -0.653969 0.569223 0.596558 +v -0.64989 0.562322 0.606822 +v -0.645761 0.555382 0.616756 +v -0.641613 0.54844 0.626677 +v -0.637519 0.541492 0.636309 +v -0.633474 0.53457 0.645953 +v -0.629506 0.527685 0.6553 +v -0.625488 0.520811 0.664618 +v -0.621593 0.514006 0.673622 +v -0.617734 0.507193 0.682638 +v -0.614013 0.500442 0.691343 +v -0.610313 0.493717 0.70006 +v -0.606793 0.487099 0.708424 +v -0.603352 0.480482 0.71682 +v -0.600121 0.47391 0.724906 +v -0.596965 0.467323 0.73304 +v -0.593949 0.460764 0.740844 +v -0.59106 0.454216 0.748697 +v -0.588404 0.447731 0.756195 +v -0.585954 0.441277 0.763773 +v -0.583719 0.434839 0.770966 +v -0.581648 0.428419 0.778211 +v -0.579757 0.422027 0.785016 +v -0.578132 0.415671 0.791926 +v -0.576678 0.409338 0.798339 +v -0.575138 0.403013 0.804722 +v -0.573849 0.396731 0.81057 +v -0.57228 0.39042 0.816328 +v -0.570793 0.384203 0.821336 +v -0.568843 0.377893 0.826168 +v -0.566992 0.3717 0.83019 +v -0.663335 0.598551 0.571396 +v -0.659317 0.591542 0.581884 +v -0.655294 0.584544 0.59237 +v -0.651246 0.577478 0.602631 +v -0.647187 0.570412 0.612879 +v -0.643044 0.563303 0.622776 +v -0.638914 0.556192 0.632674 +v -0.634835 0.549054 0.642273 +v -0.630767 0.541946 0.651874 +v -0.626778 0.534899 0.661154 +v -0.622759 0.527859 0.670419 +v -0.618871 0.520864 0.679366 +v -0.614986 0.513876 0.688316 +v -0.611254 0.506958 0.696939 +v -0.607543 0.500059 0.705569 +v -0.604087 0.493273 0.713867 +v -0.600614 0.486485 0.722154 +v -0.597358 0.479752 0.730133 +v -0.594174 0.472988 0.738154 +v -0.591205 0.466246 0.745857 +v -0.58837 0.45952 0.753615 +v -0.585762 0.45286 0.76103 +v -0.583326 0.446228 0.768512 +v -0.581119 0.43959 0.775588 +v -0.579136 0.432986 0.782739 +v -0.577348 0.426403 0.789481 +v -0.575792 0.419851 0.796322 +v -0.574436 0.413321 0.802675 +v -0.572999 0.406775 0.809005 +v -0.571727 0.400268 0.814816 +v -0.570302 0.393737 0.820584 +v -0.568929 0.387255 0.825579 +v -0.567027 0.380675 0.830383 +v -0.565173 0.374241 0.834449 +v -0.660189 0.607397 0.577583 +v -0.656182 0.600231 0.588088 +v -0.652163 0.593064 0.598585 +v -0.648164 0.585826 0.608853 +v -0.644125 0.578593 0.619097 +v -0.640006 0.571299 0.628984 +v -0.635884 0.564009 0.638867 +v -0.631846 0.556693 0.648445 +v -0.627792 0.549405 0.658018 +v -0.623794 0.542172 0.667258 +v -0.619807 0.534943 0.676504 +v -0.615924 0.52776 0.685399 +v -0.612043 0.52058 0.694292 +v -0.608278 0.513537 0.702898 +v -0.604532 0.506515 0.711505 +v -0.601022 0.499564 0.719779 +v -0.597534 0.492609 0.728066 +v -0.59425 0.485708 0.736008 +v -0.591078 0.478802 0.744008 +v -0.588063 0.4719 0.751649 +v -0.585235 0.464985 0.759365 +v -0.582635 0.458125 0.766736 +v -0.580218 0.451291 0.774189 +v -0.578095 0.444478 0.781247 +v -0.576131 0.437699 0.788353 +v -0.574438 0.430945 0.795078 +v -0.572909 0.424219 0.801877 +v -0.571572 0.417515 0.808182 +v -0.570266 0.410817 0.814508 +v -0.569119 0.40414 0.820306 +v -0.567795 0.397428 0.826031 +v -0.566497 0.390751 0.831026 +v -0.564704 0.383993 0.835856 +v -0.562806 0.377246 0.839863 +v -0.657008 0.616242 0.583758 +v -0.653032 0.608916 0.594286 +v -0.649023 0.601584 0.604798 +v -0.64502 0.594175 0.615052 +v -0.641004 0.586772 0.625287 +v -0.636912 0.5793 0.635162 +v -0.632829 0.571824 0.645042 +v -0.628796 0.56434 0.654596 +v -0.624765 0.55687 0.664155 +v -0.62077 0.549446 0.67336 +v -0.616778 0.542026 0.682556 +v -0.612875 0.534652 0.691383 +v -0.608984 0.527281 0.700213 +v -0.605216 0.520119 0.708809 +v -0.601462 0.512974 0.717414 +v -0.597901 0.505852 0.725673 +v -0.594376 0.498732 0.733939 +v -0.591114 0.491659 0.74187 +v -0.587869 0.484586 0.749812 +v -0.58489 0.477508 0.75742 +v -0.582047 0.470437 0.765089 +v -0.579429 0.463378 0.772409 +v -0.577062 0.456348 0.779844 +v -0.574946 0.449352 0.786848 +v -0.573066 0.442396 0.79395 +v -0.571381 0.435469 0.80062 +v -0.569894 0.428569 0.807378 +v -0.568671 0.421708 0.813671 +v -0.567454 0.414845 0.819982 +v -0.56642 0.407996 0.825751 +v -0.565179 0.401107 0.831438 +v -0.56397 0.394248 0.836441 +v -0.562181 0.387301 0.841239 +v -0.560291 0.380285 0.845212 +v -0.649474 0.617706 0.600626 +v -0.645476 0.610202 0.611154 +v -0.641489 0.602614 0.621404 +v -0.637489 0.595031 0.631645 +v -0.633462 0.587371 0.64153 +v -0.629424 0.579711 0.651408 +v -0.625424 0.572049 0.660955 +v -0.621378 0.564385 0.670479 +v -0.617471 0.556758 0.679705 +v -0.613479 0.549145 0.688875 +v -0.609599 0.541586 0.697656 +v -0.605719 0.534034 0.706448 +v -0.601906 0.52672 0.715029 +v -0.598128 0.519423 0.723624 +v -0.594589 0.512141 0.731894 +v -0.591034 0.504856 0.740157 +v -0.587711 0.497583 0.748049 +v -0.584466 0.490313 0.755976 +v -0.581446 0.483058 0.76354 +v -0.578611 0.475809 0.771208 +v -0.576023 0.468586 0.778508 +v -0.573657 0.461381 0.785888 +v -0.571602 0.454206 0.792885 +v -0.569708 0.447065 0.799942 +v -0.568071 0.439956 0.806599 +v -0.566651 0.432882 0.81335 +v -0.56548 0.425851 0.819643 +v -0.564344 0.418812 0.82596 +v -0.563375 0.411761 0.831729 +v -0.562237 0.4047 0.837435 +v -0.561072 0.397636 0.842434 +v -0.559325 0.390467 0.847224 +v -0.557414 0.383194 0.851176 +v -0.641924 0.618818 0.61751 +v -0.637952 0.611054 0.627758 +v -0.633926 0.603287 0.637979 +v -0.62994 0.595438 0.647863 +v -0.625931 0.587606 0.657744 +v -0.621951 0.579753 0.667269 +v -0.617953 0.5719 0.676785 +v -0.614055 0.56407 0.686001 +v -0.610099 0.55626 0.695153 +v -0.606179 0.548519 0.703877 +v -0.602363 0.540782 0.712653 +v -0.598539 0.53332 0.721217 +v -0.594721 0.525868 0.729796 +v -0.591126 0.518425 0.738045 +v -0.587557 0.510982 0.746311 +v -0.584201 0.503504 0.754173 +v -0.580958 0.496035 0.762084 +v -0.577891 0.488605 0.769615 +v -0.575013 0.481157 0.777259 +v -0.572483 0.473755 0.784529 +v -0.570134 0.466378 0.791874 +v -0.568041 0.45903 0.798816 +v -0.566212 0.451708 0.805878 +v -0.564664 0.444429 0.812536 +v -0.563278 0.437175 0.819268 +v -0.562139 0.429953 0.825556 +v -0.561077 0.42273 0.831876 +v -0.560201 0.415512 0.837653 +v -0.559129 0.408261 0.843363 +v -0.558098 0.401011 0.848391 +v -0.556344 0.393638 0.853148 +v -0.554438 0.386102 0.8571 +v -0.637944 0.627532 0.624025 +v -0.634007 0.619583 0.634297 +v -0.630047 0.611634 0.644558 +v -0.626089 0.603596 0.654458 +v -0.622106 0.595577 0.664363 +v -0.618167 0.587534 0.673907 +v -0.614132 0.579484 0.683402 +v -0.610267 0.571449 0.692583 +v -0.606389 0.563442 0.70173 +v -0.602553 0.555501 0.710431 +v -0.59874 0.547556 0.719141 +v -0.594909 0.53993 0.727737 +v -0.591037 0.532297 0.736307 +v -0.587393 0.524672 0.744546 +v -0.583775 0.517037 0.752796 +v -0.580456 0.509376 0.760664 +v -0.57717 0.501716 0.768548 +v -0.574105 0.494102 0.77607 +v -0.571281 0.486481 0.783709 +v -0.568682 0.478876 0.790916 +v -0.566342 0.471304 0.798245 +v -0.564276 0.463789 0.805185 +v -0.562465 0.456308 0.812248 +v -0.560997 0.448863 0.818925 +v -0.559681 0.441432 0.825668 +v -0.558562 0.434015 0.831956 +v -0.557529 0.426617 0.838282 +v -0.5567 0.419224 0.844084 +v -0.555725 0.411818 0.849832 +v -0.554766 0.404334 0.854915 +v -0.553038 0.396751 0.859696 +v -0.551003 0.388981 0.863604 +v -0.633929 0.636235 0.630534 +v -0.630042 0.6281 0.640833 +v -0.626094 0.61996 0.651108 +v -0.622184 0.611736 0.661043 +v -0.618225 0.60353 0.670961 +v -0.61429 0.595296 0.680495 +v -0.610348 0.587061 0.690021 +v -0.606499 0.578836 0.699155 +v -0.602572 0.570617 0.708234 +v -0.598782 0.562468 0.716901 +v -0.594964 0.554324 0.725549 +v -0.591106 0.546521 0.734163 +v -0.587217 0.538717 0.742763 +v -0.58357 0.530903 0.751006 +v -0.579932 0.523089 0.759258 +v -0.576569 0.515234 0.767102 +v -0.57327 0.507392 0.774972 +v -0.570208 0.499593 0.782464 +v -0.567354 0.491794 0.790078 +v -0.564816 0.483993 0.797283 +v -0.562482 0.476224 0.804584 +v -0.560445 0.468529 0.811522 +v -0.558614 0.460866 0.818564 +v -0.55716 0.453255 0.82524 +v -0.555875 0.445632 0.831985 +v -0.554826 0.438047 0.838293 +v -0.553817 0.430461 0.844628 +v -0.553039 0.422908 0.850446 +v -0.552119 0.415322 0.856223 +v -0.55115 0.407655 0.86131 +v -0.54946 0.399857 0.866097 +v -0.547354 0.391876 0.869972 +v -0.62561 0.636694 0.647526 +v -0.621726 0.62837 0.657852 +v -0.617871 0.619964 0.667818 +v -0.61397 0.611573 0.677768 +v -0.610056 0.603146 0.687311 +v -0.606152 0.594722 0.696847 +v -0.60232 0.586301 0.70597 +v -0.598449 0.577881 0.715071 +v -0.594695 0.569515 0.723729 +v -0.590867 0.561148 0.732352 +v -0.586998 0.553169 0.740985 +v -0.583146 0.545188 0.749609 +v -0.57946 0.537164 0.757837 +v -0.575767 0.529152 0.766066 +v -0.572366 0.521093 0.773887 +v -0.569044 0.513054 0.781731 +v -0.565998 0.505064 0.789215 +v -0.563173 0.497084 0.796818 +v -0.56061 0.489082 0.804005 +v -0.558281 0.481106 0.81131 +v -0.55625 0.473213 0.818235 +v -0.554503 0.465374 0.825301 +v -0.55302 0.457561 0.831957 +v -0.55171 0.449741 0.838707 +v -0.550737 0.442004 0.845069 +v -0.549752 0.434264 0.851419 +v -0.549023 0.426546 0.857294 +v -0.548122 0.418829 0.863085 +v -0.547146 0.41096 0.86821 +v -0.545434 0.402973 0.872984 +v -0.543244 0.394783 0.876832 +v -0.621147 0.645283 0.654199 +v -0.617305 0.636776 0.664571 +v -0.613509 0.628191 0.67457 +v -0.609647 0.619613 0.684534 +v -0.605762 0.61099 0.694083 +v -0.601878 0.602367 0.703638 +v -0.598044 0.593748 0.712748 +v -0.594214 0.585136 0.721855 +v -0.590461 0.576553 0.730489 +v -0.586652 0.567961 0.739088 +v -0.582776 0.559805 0.747724 +v -0.578903 0.551651 0.756358 +v -0.575181 0.543417 0.764578 +v -0.571491 0.535218 0.772798 +v -0.568074 0.526962 0.780606 +v -0.564732 0.51871 0.788447 +v -0.561684 0.510521 0.795906 +v -0.558825 0.502351 0.803469 +v -0.556242 0.494132 0.810644 +v -0.553956 0.485962 0.817972 +v -0.551923 0.477869 0.824876 +v -0.550192 0.469833 0.831929 +v -0.548703 0.461825 0.838589 +v -0.547412 0.45381 0.845359 +v -0.546469 0.445918 0.85174 +v -0.545495 0.438018 0.858113 +v -0.544774 0.430154 0.864023 +v -0.543924 0.422276 0.869865 +v -0.542957 0.414263 0.875028 +v -0.541298 0.406102 0.879803 +v -0.539001 0.397694 0.883606 +v -0.616274 0.653971 0.661033 +v -0.612512 0.64528 0.671472 +v -0.608747 0.636494 0.681515 +v -0.604917 0.627677 0.691525 +v -0.601109 0.618862 0.701113 +v -0.597261 0.610038 0.710686 +v -0.593461 0.601231 0.7198 +v -0.589612 0.592431 0.728884 +v -0.585927 0.583665 0.737529 +v -0.582136 0.57489 0.746121 +v -0.578196 0.566524 0.754802 +v -0.574229 0.55816 0.763463 +v -0.570543 0.549727 0.771698 +v -0.566823 0.541322 0.779893 +v -0.563408 0.532852 0.787677 +v -0.560123 0.524396 0.79553 +v -0.55706 0.515977 0.802958 +v -0.554159 0.507584 0.810482 +v -0.551623 0.499159 0.817681 +v -0.549308 0.490773 0.824985 +v -0.547312 0.48246 0.831915 +v -0.545517 0.474183 0.838936 +v -0.544081 0.465985 0.845635 +v -0.542721 0.457746 0.85239 +v -0.541744 0.449685 0.858767 +v -0.540824 0.441643 0.865172 +v -0.540116 0.43367 0.871135 +v -0.539321 0.425678 0.877029 +v -0.538303 0.417495 0.882205 +v -0.536562 0.409212 0.886957 +v -0.534224 0.400612 0.890741 +v -0.611358 0.662649 0.667833 +v -0.607647 0.653772 0.678332 +v -0.60392 0.644755 0.688423 +v -0.600115 0.635726 0.69848 +v -0.596335 0.626713 0.708083 +v -0.592517 0.617705 0.717661 +v -0.588768 0.608719 0.726788 +v -0.584991 0.599723 0.735898 +v -0.581271 0.590766 0.744506 +v -0.577476 0.581801 0.753073 +v -0.57347 0.573234 0.761789 +v -0.569494 0.564666 0.770521 +v -0.565762 0.556019 0.778724 +v -0.562018 0.547396 0.786909 +v -0.558628 0.538731 0.794679 +v -0.55533 0.530071 0.802496 +v -0.552247 0.521416 0.809911 +v -0.549374 0.512791 0.817438 +v -0.546804 0.504148 0.824613 +v -0.544476 0.495545 0.831912 +v -0.542514 0.487005 0.838845 +v -0.540739 0.478505 0.845891 +v -0.539273 0.470095 0.85259 +v -0.537909 0.46165 0.859356 +v -0.536908 0.453422 0.865732 +v -0.535996 0.445225 0.872153 +v -0.535319 0.437129 0.878144 +v -0.534487 0.429041 0.884078 +v -0.533442 0.420743 0.889272 +v -0.531727 0.412306 0.894053 +v -0.529289 0.40354 0.897795 +v -0.60603 0.671422 0.674787 +v -0.602322 0.662316 0.685357 +v -0.598659 0.653109 0.695521 +v -0.594912 0.643881 0.705645 +v -0.591196 0.634687 0.715297 +v -0.587417 0.625499 0.724913 +v -0.583705 0.616279 0.734063 +v -0.579891 0.607053 0.743156 +v -0.576206 0.597901 0.751782 +v -0.572421 0.588742 0.760341 +v -0.568396 0.579992 0.769157 +v -0.564318 0.571234 0.777932 +v -0.560586 0.562341 0.786106 +v -0.556928 0.553479 0.794314 +v -0.553509 0.54458 0.80208 +v -0.550134 0.535685 0.809865 +v -0.547144 0.526809 0.817289 +v -0.544275 0.517948 0.824777 +v -0.541722 0.509073 0.831937 +v -0.539348 0.500229 0.839197 +v -0.537332 0.491457 0.846119 +v -0.535531 0.482728 0.853162 +v -0.534042 0.47407 0.859887 +v -0.532667 0.465375 0.866681 +v -0.531637 0.456924 0.873083 +v -0.530701 0.4485 0.879526 +v -0.529968 0.440398 0.88551 +v -0.529173 0.432285 0.891448 +v -0.528052 0.423889 0.896625 +v -0.526353 0.415374 0.901427 +v -0.52368 0.40649 0.905061 +v -0.600658 0.680177 0.68171 +v -0.596977 0.670895 0.692361 +v -0.593326 0.661491 0.702565 +v -0.589644 0.652082 0.712759 +v -0.58595 0.642683 0.722444 +v -0.582191 0.633303 0.732092 +v -0.578493 0.623853 0.741255 +v -0.574661 0.614386 0.750342 +v -0.570994 0.605029 0.75897 +v -0.567268 0.595668 0.767559 +v -0.563154 0.586722 0.776425 +v -0.559043 0.577782 0.78529 +v -0.555287 0.568634 0.793426 +v -0.551652 0.559514 0.801621 +v -0.548157 0.55038 0.809352 +v -0.544817 0.541263 0.817165 +v -0.54181 0.532156 0.824532 +v -0.538916 0.523061 0.831966 +v -0.536367 0.513944 0.83911 +v -0.533998 0.504861 0.846362 +v -0.531954 0.495854 0.85329 +v -0.530149 0.486901 0.860355 +v -0.528682 0.477995 0.867115 +v -0.527294 0.469069 0.873931 +v -0.526207 0.460369 0.880339 +v -0.525213 0.451713 0.886805 +v -0.524516 0.443602 0.892797 +v -0.523698 0.435457 0.898728 +v -0.522573 0.427007 0.903933 +v -0.520829 0.418441 0.908689 +v -0.518021 0.40947 0.912251 +v -0.594882 0.689094 0.688748 +v -0.591167 0.679721 0.699697 +v -0.587507 0.670174 0.710013 +v -0.583822 0.660608 0.720318 +v -0.580149 0.651007 0.730108 +v -0.576372 0.64143 0.739832 +v -0.572623 0.631793 0.749065 +v -0.56881 0.622152 0.758262 +v -0.565037 0.612575 0.766991 +v -0.561222 0.602991 0.775691 +v -0.55727 0.593673 0.784394 +v -0.553238 0.584338 0.793047 +v -0.549565 0.574917 0.801165 +v -0.545881 0.565521 0.809264 +v -0.542464 0.556145 0.816969 +v -0.539091 0.546786 0.8247 +v -0.536079 0.537431 0.832056 +v -0.533149 0.528095 0.839466 +v -0.530604 0.518757 0.846601 +v -0.528206 0.509448 0.853829 +v -0.526203 0.5002 0.86078 +v -0.52435 0.491001 0.867822 +v -0.522852 0.481821 0.874611 +v -0.521409 0.472622 0.881433 +v -0.520285 0.463475 0.887975 +v -0.519218 0.454349 0.89455 +v -0.518455 0.446393 0.900385 +v -0.517646 0.438396 0.906185 +v -0.516492 0.429958 0.911298 +v -0.514737 0.421402 0.916001 +v -0.511734 0.412448 0.919369 +v -0.589099 0.698007 0.69578 +v -0.585285 0.688515 0.706997 +v -0.581657 0.678816 0.717447 +v -0.577918 0.669099 0.727837 +v -0.574201 0.6593 0.737693 +v -0.570423 0.649533 0.747501 +v -0.566668 0.639725 0.756827 +v -0.562756 0.629891 0.766057 +v -0.558949 0.620106 0.774938 +v -0.555054 0.61029 0.78376 +v -0.551198 0.600582 0.792256 +v -0.547306 0.590868 0.800731 +v -0.543653 0.581172 0.80879 +v -0.540012 0.571515 0.816843 +v -0.536583 0.56189 0.82447 +v -0.533199 0.552279 0.832127 +v -0.530121 0.542664 0.839444 +v -0.527217 0.533071 0.846872 +v -0.524652 0.52352 0.853975 +v -0.522245 0.513992 0.861193 +v -0.520242 0.504503 0.868144 +v -0.51837 0.495048 0.875183 +v -0.51683 0.485579 0.881983 +v -0.515372 0.476114 0.888846 +v -0.514191 0.466512 0.895502 +v -0.513097 0.456927 0.902224 +v -0.512292 0.44914 0.907891 +v -0.511467 0.441293 0.913561 +v -0.510258 0.43288 0.918568 +v -0.50855 0.424329 0.923235 +v -0.505331 0.415421 0.926436 +v -0.58266 0.707046 0.703467 +v -0.578937 0.697359 0.714674 +v -0.575325 0.68747 0.725196 +v -0.571566 0.677569 0.735631 +v -0.567856 0.66757 0.745551 +v -0.56407 0.657596 0.755418 +v -0.560298 0.647559 0.764784 +v -0.556434 0.637485 0.774096 +v -0.552644 0.627392 0.782979 +v -0.548738 0.617304 0.791797 +v -0.544915 0.607312 0.800264 +v -0.541039 0.597311 0.8087 +v -0.537316 0.587338 0.816676 +v -0.533595 0.577405 0.824637 +v -0.53015 0.567538 0.832215 +v -0.526798 0.557686 0.839851 +v -0.523726 0.547845 0.84714 +v -0.520781 0.538022 0.854515 +v -0.518235 0.528229 0.861591 +v -0.515828 0.518468 0.868762 +v -0.513791 0.508738 0.875672 +v -0.511908 0.499052 0.882697 +v -0.510336 0.489392 0.889525 +v -0.508803 0.479707 0.89638 +v -0.507587 0.469974 0.903028 +v -0.506415 0.460237 0.909717 +v -0.505635 0.452282 0.915315 +v -0.504828 0.444307 0.920905 +v -0.503538 0.435741 0.925817 +v -0.501762 0.42709 0.930353 +v -0.498506 0.418089 0.933299 +v -0.576213 0.716092 0.711149 +v -0.572575 0.706226 0.72234 +v -0.568913 0.69614 0.73289 +v -0.565141 0.686035 0.743382 +v -0.561409 0.675821 0.753355 +v -0.557593 0.665622 0.763265 +v -0.553794 0.655325 0.772661 +v -0.54993 0.645009 0.782013 +v -0.546145 0.634649 0.790902 +v -0.542223 0.62426 0.799704 +v -0.538402 0.613976 0.808129 +v -0.534503 0.603695 0.8165 +v -0.530768 0.593476 0.824423 +v -0.52707 0.583284 0.832362 +v -0.523612 0.573166 0.839882 +v -0.520264 0.563071 0.847485 +v -0.517137 0.552982 0.8547 +v -0.514179 0.542927 0.862049 +v -0.511605 0.532882 0.869052 +v -0.509238 0.522894 0.876204 +v -0.507207 0.51293 0.883119 +v -0.505307 0.503004 0.890127 +v -0.503668 0.493124 0.896946 +v -0.502083 0.483215 0.903808 +v -0.50083 0.473343 0.910451 +v -0.4996 0.463478 0.917115 +v -0.498861 0.455381 0.922651 +v -0.498099 0.447251 0.928187 +v -0.496731 0.438598 0.932997 +v -0.494863 0.429847 0.937395 +v -0.491664 0.420726 0.940144 +v -0.56928 0.725235 0.719074 +v -0.565687 0.715168 0.730283 +v -0.56205 0.704863 0.740906 +v -0.558273 0.694523 0.751447 +v -0.55454 0.684049 0.76145 +v -0.550689 0.673575 0.771373 +v -0.546916 0.663012 0.780807 +v -0.543034 0.652435 0.790172 +v -0.539257 0.641812 0.799063 +v -0.535307 0.631173 0.807854 +v -0.531414 0.620597 0.816208 +v -0.527487 0.610033 0.824526 +v -0.523808 0.599543 0.832406 +v -0.520119 0.589056 0.840269 +v -0.516633 0.578659 0.847733 +v -0.51323 0.568284 0.855252 +v -0.510128 0.55794 0.862436 +v -0.507119 0.547601 0.869682 +v -0.504589 0.537358 0.876684 +v -0.502171 0.527146 0.883759 +v -0.500154 0.517004 0.890671 +v -0.498216 0.506877 0.897648 +v -0.496566 0.496836 0.904443 +v -0.494918 0.486771 0.911245 +v -0.493599 0.476795 0.917829 +v -0.492316 0.466855 0.924435 +v -0.491564 0.458555 0.929927 +v -0.490757 0.450191 0.935397 +v -0.489331 0.441372 0.940069 +v -0.487521 0.432406 0.944385 +v -0.484239 0.423168 0.946918 +v -0.55878 0.724078 0.73822 +v -0.555109 0.713526 0.748882 +v -0.551321 0.702947 0.759475 +v -0.547575 0.692253 0.769492 +v -0.543746 0.681551 0.779453 +v -0.539942 0.670716 0.788898 +v -0.536015 0.659855 0.798257 +v -0.532202 0.64897 0.807131 +v -0.528207 0.638068 0.81588 +v -0.52426 0.62719 0.824163 +v -0.520323 0.616351 0.832451 +v -0.516626 0.605565 0.840228 +v -0.512934 0.59478 0.848014 +v -0.509424 0.5841 0.85542 +v -0.506031 0.57345 0.862917 +v -0.502901 0.562827 0.870013 +v -0.499923 0.552235 0.877227 +v -0.497353 0.541766 0.884148 +v -0.494968 0.531354 0.891215 +v -0.492939 0.521027 0.898103 +v -0.490998 0.5107 0.905073 +v -0.489296 0.500492 0.911813 +v -0.487608 0.490287 0.918562 +v -0.486263 0.480237 0.925114 +v -0.484931 0.470195 0.931677 +v -0.484158 0.461665 0.937124 +v -0.483355 0.453093 0.942541 +v -0.481856 0.444059 0.947049 +v -0.480056 0.434944 0.95126 +v -0.476742 0.425583 0.95362 +v -0.551294 0.733002 0.746351 +v -0.547656 0.722199 0.757061 +v -0.543905 0.711369 0.7677 +v -0.540143 0.700424 0.77775 +v -0.53629 0.689459 0.787746 +v -0.532483 0.678381 0.797214 +v -0.528521 0.667264 0.806578 +v -0.52464 0.656049 0.815396 +v -0.520596 0.644827 0.824088 +v -0.516704 0.633655 0.832383 +v -0.512685 0.62249 0.840581 +v -0.508982 0.611437 0.848283 +v -0.505239 0.600377 0.855934 +v -0.501755 0.589394 0.863317 +v -0.49828 0.578424 0.870702 +v -0.4952 0.567541 0.877782 +v -0.492198 0.556685 0.884928 +v -0.489642 0.546042 0.89185 +v -0.487183 0.535428 0.898852 +v -0.485179 0.524909 0.905698 +v -0.483217 0.514361 0.912587 +v -0.481545 0.504 0.919304 +v -0.479837 0.493629 0.925989 +v -0.478412 0.483509 0.932484 +v -0.477008 0.473388 0.938996 +v -0.476234 0.464613 0.944316 +v -0.47539 0.455775 0.949591 +v -0.473824 0.446545 0.953955 +v -0.471999 0.43721 0.958038 +v -0.468753 0.427783 0.9602 +v -0.543734 0.741907 0.754435 +v -0.540111 0.730851 0.76518 +v -0.536379 0.719783 0.775849 +v -0.532616 0.708577 0.785954 +v -0.528718 0.697339 0.795966 +v -0.524865 0.686008 0.805423 +v -0.520834 0.674629 0.814766 +v -0.516901 0.663098 0.823526 +v -0.512875 0.651556 0.832219 +v -0.508943 0.640061 0.840454 +v -0.504904 0.62861 0.848603 +v -0.501162 0.617281 0.856182 +v -0.497401 0.605939 0.863733 +v -0.493853 0.594638 0.871021 +v -0.490401 0.583359 0.878376 +v -0.48731 0.572208 0.885394 +v -0.484327 0.561094 0.892507 +v -0.48172 0.550245 0.899376 +v -0.479282 0.539451 0.9064 +v -0.47728 0.528724 0.913184 +v -0.475363 0.518 0.920043 +v -0.473644 0.507441 0.926666 +v -0.471952 0.496905 0.933312 +v -0.470454 0.486727 0.939756 +v -0.46898 0.476523 0.946226 +v -0.468179 0.4675 0.951404 +v -0.467335 0.458436 0.956547 +v -0.465705 0.448981 0.960754 +v -0.463841 0.439468 0.964686 +v -0.460731 0.429983 0.96672 +v -0.532033 0.739548 0.773483 +v -0.528278 0.728219 0.784151 +v -0.524513 0.716711 0.794281 +v -0.520622 0.705181 0.804316 +v -0.516724 0.693538 0.813755 +v -0.512641 0.681835 0.823059 +v -0.508689 0.670003 0.831811 +v -0.504615 0.658151 0.840473 +v -0.500634 0.64638 0.848641 +v -0.496574 0.63463 0.856729 +v -0.492792 0.622953 0.864239 +v -0.488992 0.611267 0.871736 +v -0.485459 0.59966 0.878977 +v -0.481965 0.588069 0.886231 +v -0.478905 0.576692 0.893193 +v -0.475916 0.565331 0.900209 +v -0.473378 0.554245 0.907078 +v -0.470935 0.54319 0.914034 +v -0.468948 0.532257 0.920779 +v -0.466978 0.521332 0.927536 +v -0.465336 0.510629 0.934079 +v -0.463669 0.499911 0.940598 +v -0.462193 0.489552 0.946925 +v -0.460728 0.479169 0.953272 +v -0.459794 0.470013 0.958339 +v -0.458784 0.460827 0.963322 +v -0.457071 0.451089 0.967418 +v -0.455137 0.441294 0.971261 +v -0.452 0.431686 0.973089 +v -0.520048 0.736611 0.792373 +v -0.516244 0.724794 0.802497 +v -0.512325 0.712959 0.812532 +v -0.508381 0.701006 0.821938 +v -0.504289 0.689009 0.831233 +v -0.500289 0.676872 0.839947 +v -0.496179 0.664717 0.848586 +v -0.492168 0.652639 0.856693 +v -0.488093 0.640589 0.864738 +v -0.48429 0.628581 0.872199 +v -0.480474 0.616558 0.879647 +v -0.476905 0.604643 0.886793 +v -0.473402 0.592749 0.893982 +v -0.470361 0.581131 0.900859 +v -0.467447 0.569554 0.907864 +v -0.464895 0.558196 0.914646 +v -0.462495 0.546887 0.921581 +v -0.460457 0.535734 0.928223 +v -0.458512 0.524614 0.934954 +v -0.45691 0.513772 0.941378 +v -0.455316 0.502901 0.947813 +v -0.453842 0.492337 0.954007 +v -0.452374 0.48176 0.960223 +v -0.451315 0.472482 0.965176 +v -0.450202 0.463184 0.970069 +v -0.44838 0.453179 0.974023 +v -0.4464 0.443127 0.977776 +v -0.443201 0.433408 0.979389 +v -0.511134 0.744865 0.800684 +v -0.507339 0.73276 0.810822 +v -0.503366 0.720606 0.820819 +v -0.499415 0.708341 0.830204 +v -0.495331 0.696037 0.839478 +v -0.491298 0.683608 0.848163 +v -0.487177 0.671158 0.856773 +v -0.48316 0.658766 0.864838 +v -0.479038 0.646377 0.872803 +v -0.475231 0.634055 0.880219 +v -0.471385 0.621717 0.887606 +v -0.467819 0.609518 0.894707 +v -0.46431 0.597339 0.901851 +v -0.461305 0.585447 0.908707 +v -0.458365 0.573579 0.915623 +v -0.455886 0.561921 0.92237 +v -0.453514 0.550289 0.929219 +v -0.451545 0.538909 0.935825 +v -0.449631 0.527554 0.942489 +v -0.448126 0.516479 0.94878 +v -0.446611 0.505346 0.955074 +v -0.445264 0.494494 0.961088 +v -0.44388 0.48361 0.967062 +v -0.442584 0.474327 0.971954 +v -0.441216 0.465001 0.976767 +v -0.439279 0.454724 0.980605 +v -0.43715 0.444362 0.984219 +v -0.43405 0.434496 0.985552 +v -0.502094 0.753066 0.80891 +v -0.498248 0.740658 0.819013 +v -0.494276 0.728227 0.829003 +v -0.490314 0.715644 0.838356 +v -0.486243 0.703032 0.847615 +v -0.482173 0.690302 0.856258 +v -0.478017 0.677569 0.864829 +v -0.47398 0.66485 0.872835 +v -0.469879 0.652155 0.880769 +v -0.466035 0.639501 0.888111 +v -0.462182 0.62684 0.895435 +v -0.458618 0.614358 0.902499 +v -0.455152 0.601902 0.909655 +v -0.452105 0.589711 0.916403 +v -0.449193 0.577564 0.923295 +v -0.446789 0.565609 0.930001 +v -0.444509 0.553689 0.936835 +v -0.442546 0.542049 0.943341 +v -0.44068 0.530466 0.949949 +v -0.439242 0.519139 0.956077 +v -0.437826 0.507752 0.962254 +v -0.436609 0.496599 0.968085 +v -0.435353 0.485435 0.973874 +v -0.433798 0.476136 0.978679 +v -0.4322 0.466803 0.983421 +v -0.430121 0.456244 0.987152 +v -0.427854 0.445595 0.990555 +v -0.424844 0.435566 0.991684 +v -0.492391 0.761246 0.817198 +v -0.488558 0.748532 0.827303 +v -0.48458 0.735778 0.837291 +v -0.480619 0.722843 0.846627 +v -0.476515 0.70986 0.85585 +v -0.472431 0.696796 0.864474 +v -0.468224 0.683722 0.872988 +v -0.464148 0.670718 0.880937 +v -0.460013 0.657723 0.888812 +v -0.456211 0.644777 0.896121 +v -0.452376 0.631821 0.903391 +v -0.44888 0.618977 0.910381 +v -0.445439 0.606159 0.917433 +v -0.442438 0.59369 0.924258 +v -0.439513 0.581249 0.931156 +v -0.437192 0.568973 0.937829 +v -0.434905 0.556714 0.944541 +v -0.433019 0.54482 0.951004 +v -0.43115 0.53293 0.957481 +v -0.429804 0.521309 0.963531 +v -0.428462 0.509622 0.969609 +v -0.427394 0.498067 0.975234 +v -0.42626 0.486497 0.980776 +v -0.424463 0.477103 0.985537 +v -0.422571 0.467646 0.990174 +v -0.420486 0.456965 0.993617 +v -0.418202 0.446146 0.996712 +v -0.415008 0.436103 0.997426 +v -0.482604 0.769402 0.825416 +v -0.478775 0.756375 0.835523 +v -0.474808 0.743305 0.845517 +v -0.470809 0.729999 0.854808 +v -0.466715 0.71666 0.864016 +v -0.462543 0.703243 0.872553 +v -0.458311 0.689825 0.881036 +v -0.454196 0.676544 0.888918 +v -0.450094 0.663282 0.896802 +v -0.446244 0.650017 0.903988 +v -0.442467 0.636776 0.91124 +v -0.439035 0.623583 0.918154 +v -0.435717 0.61043 0.925192 +v -0.4327 0.597644 0.932029 +v -0.429795 0.584922 0.938969 +v -0.427476 0.57231 0.945521 +v -0.425253 0.559731 0.95218 +v -0.423378 0.547546 0.95853 +v -0.421562 0.535387 0.964952 +v -0.42029 0.523444 0.970905 +v -0.419033 0.51146 0.976888 +v -0.41812 0.499503 0.982304 +v -0.417117 0.487518 0.987593 +v -0.415053 0.478021 0.992293 +v -0.412893 0.468462 0.99684 +v -0.410834 0.457651 1.00003 +v -0.408473 0.446704 1.00272 +v -0.405209 0.43664 1.00313 +v -0.472242 0.777566 0.833805 +v -0.468407 0.764209 0.843915 +v -0.464441 0.750809 0.853915 +v -0.460412 0.737159 0.863156 +v -0.456249 0.723461 0.872271 +v -0.452096 0.709684 0.880786 +v -0.447798 0.695873 0.889153 +v -0.44367 0.682259 0.896952 +v -0.439514 0.668656 0.904707 +v -0.435735 0.655028 0.911958 +v -0.431925 0.641397 0.919181 +v -0.42853 0.627984 0.926146 +v -0.425185 0.614589 0.933163 +v -0.422287 0.601431 0.939981 +v -0.419439 0.588318 0.946839 +v -0.417144 0.575398 0.95343 +v -0.414871 0.562494 0.96005 +v -0.413034 0.549937 0.966371 +v -0.41121 0.537365 0.972713 +v -0.410068 0.525072 0.978516 +v -0.408903 0.512744 0.984294 +v -0.408299 0.500323 0.989322 +v -0.407594 0.487867 0.994225 +v -0.405217 0.478245 0.998874 +v -0.40275 0.468562 1.00337 +v -0.400423 0.457603 1.00636 +v -0.397898 0.446495 1.00892 +v -0.3946 0.436518 1.00876 +v -0.461832 0.785726 0.84215 +v -0.457983 0.772022 0.852264 +v -0.454008 0.758292 0.862254 +v -0.449957 0.744291 0.871446 +v -0.445777 0.730248 0.880517 +v -0.441582 0.7161 0.888947 +v -0.437263 0.70192 0.897249 +v -0.433076 0.687951 0.904914 +v -0.42892 0.674025 0.912591 +v -0.425118 0.660011 0.91982 +v -0.42135 0.646008 0.927084 +v -0.417947 0.632356 0.934046 +v -0.414589 0.618727 0.941046 +v -0.411784 0.60519 0.947832 +v -0.409052 0.591697 0.954677 +v -0.406738 0.578466 0.961258 +v -0.404469 0.565248 0.967885 +v -0.402616 0.55228 0.974123 +v -0.400799 0.5393 0.980421 +v -0.399756 0.526661 0.98602 +v -0.398691 0.513982 0.991583 +v -0.398398 0.501098 0.996245 +v -0.397989 0.488162 1.00072 +v -0.395297 0.478427 1.00531 +v -0.392503 0.468587 1.00972 +v -0.389981 0.457521 1.01259 +v -0.387192 0.446338 1.0149 +v -0.383838 0.436433 1.01411 +v -0.450911 0.793834 0.850651 +v -0.447081 0.779746 0.860767 +v -0.443104 0.765634 0.87073 +v -0.439002 0.7513 0.879863 +v -0.43478 0.736926 0.888874 +v -0.43054 0.722417 0.897215 +v -0.426174 0.707874 0.905416 +v -0.422015 0.693505 0.913014 +v -0.417836 0.679176 0.920576 +v -0.414011 0.66489 0.927765 +v -0.410192 0.650604 0.934965 +v -0.406831 0.636543 0.941936 +v -0.403507 0.622504 0.948954 +v -0.400709 0.608654 0.955815 +v -0.397937 0.594838 0.962704 +v -0.395619 0.581223 0.969296 +v -0.393317 0.567603 0.975914 +v -0.391514 0.554169 0.982084 +v -0.38971 0.54071 0.98826 +v -0.388597 0.527606 0.993771 +v -0.387466 0.514471 0.999251 +v -0.387118 0.501211 1.00366 +v -0.386624 0.487915 1.00784 +v -0.383835 0.477781 1.01213 +v -0.38098 0.467579 1.01631 +v -0.378298 0.456298 1.01869 +v -0.375253 0.444934 1.02027 +v -0.372032 0.435306 1.0188 +v -0.439938 0.801928 0.859105 +v -0.436097 0.787441 0.869196 +v -0.432134 0.772961 0.879146 +v -0.428006 0.758297 0.888236 +v -0.423773 0.743601 0.897218 +v -0.419456 0.728725 0.905428 +v -0.415058 0.713828 0.913547 +v -0.410852 0.699025 0.920999 +v -0.406673 0.6843 0.928468 +v -0.402815 0.669738 0.935596 +v -0.398975 0.655181 0.942762 +v -0.395627 0.640697 0.949727 +v -0.392392 0.626267 0.956819 +v -0.389534 0.612085 0.963684 +v -0.38675 0.597939 0.970631 +v -0.384423 0.58394 0.97724 +v -0.382101 0.569918 0.983872 +v -0.380326 0.556018 0.989932 +v -0.378546 0.542085 0.995991 +v -0.377377 0.528522 1.00143 +v -0.376169 0.514918 1.00679 +v -0.375755 0.501299 1.01093 +v -0.375199 0.487604 1.01485 +v -0.372328 0.477109 1.01888 +v -0.369364 0.46655 1.0227 +v -0.366568 0.455033 1.02473 +v -0.363237 0.443499 1.02551 +v -0.360178 0.434242 1.02338 +v -0.428294 0.809939 0.867608 +v -0.424451 0.795098 0.877779 +v -0.420482 0.780239 0.887804 +v -0.416361 0.765173 0.896822 +v -0.41212 0.750067 0.905715 +v -0.407803 0.734845 0.913813 +v -0.403365 0.719591 0.921773 +v -0.399113 0.704431 0.929162 +v -0.394852 0.689316 0.936534 +v -0.391019 0.674375 0.943571 +v -0.387211 0.659437 0.950646 +v -0.383857 0.644601 0.957635 +v -0.380554 0.629794 0.964672 +v -0.377758 0.615248 0.971623 +v -0.374995 0.600715 0.978617 +v -0.372643 0.58631 0.985244 +v -0.370275 0.571895 0.99185 +v -0.368406 0.557595 0.997895 +v -0.366505 0.543246 1.0039 +v -0.365112 0.529093 1.0093 +v -0.36366 0.514903 1.0146 +v -0.362949 0.500796 1.01859 +v -0.362093 0.486655 1.02233 +v -0.359314 0.475665 1.02596 +v -0.356431 0.464615 1.02937 +v -0.353629 0.453078 1.03045 +v -0.350272 0.441674 1.03032 +v -0.347167 0.432707 1.02748 +v -0.416591 0.817917 0.876059 +v -0.412741 0.802728 0.886298 +v -0.408778 0.787503 0.896417 +v -0.404649 0.772026 0.905338 +v -0.400412 0.756516 0.914142 +v -0.396085 0.740947 0.922117 +v -0.39164 0.725345 0.929954 +v -0.387312 0.709819 0.93725 +v -0.382989 0.694318 0.944548 +v -0.379155 0.678984 0.951457 +v -0.375383 0.663674 0.958436 +v -0.371993 0.648465 0.965406 +v -0.368658 0.633292 0.972441 +v -0.365907 0.61838 0.97946 +v -0.36318 0.603475 0.986514 +v -0.360794 0.588687 0.993144 +v -0.358388 0.573878 0.999744 +v -0.356397 0.559156 1.00572 +v -0.354365 0.544393 1.01164 +v -0.352751 0.529651 1.01701 +v -0.351052 0.514834 1.02226 +v -0.350053 0.500273 1.02609 +v -0.34889 0.485676 1.02958 +v -0.346215 0.474211 1.0329 +v -0.343361 0.462665 1.03579 +v -0.34054 0.451166 1.03603 +v -0.337202 0.43993 1.0348 +v -0.334132 0.431242 1.0314 +v -0.400325 0.810227 0.894706 +v -0.396383 0.794627 0.904827 +v -0.392267 0.778811 0.913782 +v -0.388016 0.762957 0.922579 +v -0.383714 0.746952 0.930466 +v -0.379284 0.730914 0.938202 +v -0.374929 0.71499 0.945342 +v -0.370582 0.699061 0.952494 +v -0.366708 0.683321 0.959346 +v -0.362867 0.667602 0.96625 +v -0.359477 0.652109 0.973206 +v -0.356148 0.636637 0.980249 +v -0.35338 0.621333 0.987295 +v -0.350636 0.606037 0.994386 +v -0.348193 0.590837 1.0011 +v -0.34569 0.575607 1.00772 +v -0.343511 0.560346 1.01371 +v -0.341265 0.545036 1.0196 +v -0.339306 0.52973 1.02492 +v -0.337233 0.51436 1.03005 +v -0.33564 0.499268 1.03382 +v -0.333878 0.484145 1.03723 +v -0.331565 0.472391 1.03985 +v -0.329084 0.460492 1.04208 +v -0.326179 0.449134 1.04135 +v -0.322881 0.438167 1.03921 +v -0.319885 0.429562 1.03504 +v -0.387814 0.817688 0.903019 +v -0.383924 0.801737 0.913155 +v -0.379786 0.785559 0.922118 +v -0.375544 0.769373 0.930914 +v -0.371249 0.752931 0.938702 +v -0.366856 0.73646 0.946358 +v -0.362474 0.720123 0.953347 +v -0.358105 0.703791 0.960341 +v -0.354163 0.687626 0.967111 +v -0.35029 0.671512 0.973967 +v -0.346872 0.655709 0.980878 +v -0.343543 0.639944 0.987916 +v -0.340754 0.624241 0.994992 +v -0.338033 0.608574 1.00217 +v -0.33551 0.592955 1.00893 +v -0.332941 0.577306 1.01561 +v -0.330549 0.561496 1.02158 +v -0.328073 0.5456 1.02743 +v -0.325767 0.529752 1.03267 +v -0.323318 0.513846 1.03765 +v -0.321132 0.498219 1.04134 +v -0.318725 0.482555 1.04456 +v -0.316818 0.470549 1.04664 +v -0.31459 0.458365 1.04791 +v -0.311733 0.447208 1.04626 +v -0.308458 0.436602 1.0432 +v -0.30563 0.427953 1.03865 +v -0.374396 0.824885 0.911088 +v -0.370586 0.808633 0.92133 +v -0.366524 0.792026 0.930272 +v -0.362348 0.775433 0.939039 +v -0.358077 0.758592 0.94678 +v -0.35368 0.741715 0.954349 +v -0.349365 0.724999 0.961192 +v -0.345031 0.708285 0.968013 +v -0.341091 0.69173 0.974662 +v -0.337201 0.675247 0.981383 +v -0.333776 0.659041 0.988282 +v -0.330448 0.642882 0.995352 +v -0.327673 0.626828 1.00249 +v -0.324939 0.610783 1.0097 +v -0.322338 0.59476 1.01658 +v -0.319646 0.57869 1.02331 +v -0.317026 0.562379 1.02931 +v -0.314279 0.545987 1.03511 +v -0.311524 0.529565 1.04029 +v -0.308584 0.513092 1.04509 +v -0.305407 0.496936 1.04877 +v -0.302008 0.480753 1.05194 +v -0.300749 0.468472 1.0532 +v -0.299137 0.455968 1.05358 +v -0.296478 0.445059 1.05089 +v -0.293474 0.434623 1.04717 +v -0.290593 0.425882 1.04189 +v -0.360886 0.832036 0.919048 +v -0.357168 0.815496 0.929407 +v -0.353157 0.798463 0.938306 +v -0.349061 0.781454 0.947049 +v -0.344804 0.764219 0.954716 +v -0.340434 0.746942 0.96222 +v -0.336152 0.729844 0.968891 +v -0.331861 0.712753 0.975531 +v -0.327914 0.695802 0.982033 +v -0.324026 0.678938 0.988641 +v -0.320593 0.662334 0.995552 +v -0.317246 0.645769 1.00261 +v -0.314477 0.629346 1.0098 +v -0.311748 0.612941 1.01707 +v -0.309061 0.596518 1.02407 +v -0.30626 0.580009 1.03087 +v -0.303389 0.563206 1.03686 +v -0.300364 0.546325 1.04256 +v -0.297154 0.529346 1.04763 +v -0.293704 0.512289 1.0522 +v -0.28954 0.495615 1.05586 +v -0.285142 0.478889 1.05897 +v -0.284551 0.46631 1.05956 +v -0.283491 0.453709 1.05876 +v -0.281111 0.442954 1.0554 +v -0.27845 0.432696 1.05095 +v -0.275579 0.42378 1.04521 +v -0.346409 0.83872 0.926448 +v -0.342826 0.821884 0.936909 +v -0.338915 0.804526 0.945903 +v -0.334898 0.787161 0.954711 +v -0.330738 0.76961 0.962287 +v -0.326453 0.752013 0.969671 +v -0.322222 0.734502 0.976261 +v -0.317959 0.716997 0.982804 +v -0.314008 0.699622 0.98921 +v -0.31013 0.682331 0.995732 +v -0.306766 0.665328 1.00251 +v -0.303532 0.648389 1.0095 +v -0.300707 0.631637 1.01675 +v -0.297939 0.6149 1.0241 +v -0.295217 0.598071 1.03115 +v -0.292366 0.581123 1.03799 +v -0.289365 0.563816 1.04402 +v -0.286154 0.546419 1.04964 +v -0.282548 0.528884 1.05449 +v -0.278675 0.511263 1.05876 +v -0.273991 0.494025 1.06212 +v -0.269096 0.476788 1.06489 +v -0.268685 0.464423 1.06503 +v -0.267828 0.451969 1.06383 +v -0.265597 0.441141 1.05979 +v -0.263085 0.430706 1.05488 +v -0.260218 0.421618 1.04882 +v -0.33185 0.845352 0.933722 +v -0.328386 0.828211 0.944273 +v -0.324559 0.810533 0.953332 +v -0.320631 0.79282 0.962213 +v -0.316552 0.774949 0.969673 +v -0.312363 0.757032 0.976934 +v -0.308176 0.739118 0.98345 +v -0.30393 0.721189 0.989864 +v -0.299974 0.703389 0.996152 +v -0.296082 0.685652 1.00257 +v -0.292828 0.668256 1.00925 +v -0.289705 0.65094 1.01619 +v -0.286797 0.633841 1.02344 +v -0.283978 0.616766 1.03084 +v -0.281227 0.599537 1.03797 +v -0.278346 0.582176 1.04484 +v -0.275183 0.564364 1.05086 +v -0.271765 0.54643 1.05632 +v -0.267774 0.528378 1.06101 +v -0.263472 0.510176 1.06491 +v -0.258291 0.492417 1.06794 +v -0.252873 0.474663 1.07027 +v -0.252677 0.462516 1.07016 +v -0.251979 0.450501 1.06827 +v -0.249988 0.439383 1.06412 +v -0.247709 0.428721 1.05882 +v -0.244893 0.419477 1.05245 +v -0.316342 0.851423 0.940179 +v -0.313036 0.834006 0.950853 +v -0.309373 0.816018 0.959968 +v -0.305596 0.797967 0.96889 +v -0.301627 0.779761 0.976395 +v -0.297552 0.761505 0.983673 +v -0.293439 0.743253 0.990119 +v -0.289263 0.724979 0.996447 +v -0.285409 0.706836 1.00262 +v -0.281603 0.688745 1.00888 +v -0.278398 0.670997 1.01547 +v -0.275318 0.653325 1.02235 +v -0.272471 0.63584 1.02963 +v -0.269691 0.618361 1.03707 +v -0.266876 0.600758 1.04425 +v -0.26392 0.583026 1.05116 +v -0.260718 0.564827 1.05712 +v -0.257252 0.546492 1.06244 +v -0.253154 0.52765 1.06684 +v -0.248758 0.50868 1.07041 +v -0.243542 0.490247 1.07293 +v -0.238053 0.471852 1.07463 +v -0.237532 0.460362 1.07416 +v -0.23671 0.448976 1.07238 +v -0.234632 0.437846 1.06807 +v -0.23228 0.427161 1.06282 +v -0.229307 0.417706 1.0564 +v -0.300751 0.85743 0.946501 +v -0.297599 0.839739 0.957293 +v -0.294082 0.821444 0.966433 +v -0.290449 0.803063 0.975355 +v -0.286581 0.784515 0.98287 +v -0.282591 0.765911 0.990123 +v -0.278552 0.747319 0.996491 +v -0.274435 0.728702 1.00271 +v -0.270686 0.710219 1.00876 +v -0.266968 0.691788 1.01488 +v -0.263842 0.67368 1.02143 +v -0.26082 0.655657 1.02824 +v -0.258012 0.637759 1.03552 +v -0.255241 0.619835 1.04294 +v -0.252368 0.601875 1.05022 +v -0.249331 0.583772 1.0571 +v -0.246099 0.565211 1.06301 +v -0.242571 0.546477 1.06814 +v -0.23842 0.526863 1.07236 +v -0.2339 0.507141 1.07548 +v -0.228649 0.488056 1.07751 +v -0.223102 0.469024 1.07849 +v -0.222369 0.458189 1.0778 +v -0.221346 0.447536 1.07583 +v -0.219221 0.436292 1.07195 +v -0.216811 0.425772 1.0666 +v -0.213695 0.416067 1.06025 +v -0.284321 0.862734 0.951927 +v -0.281336 0.844811 0.962804 +v -0.277956 0.826256 0.972047 +v -0.27447 0.80761 0.98105 +v -0.270811 0.788726 0.9886 +v -0.267042 0.769794 0.995866 +v -0.263169 0.750947 1.00219 +v -0.259232 0.732068 1.00834 +v -0.255568 0.713222 1.01426 +v -0.25198 0.694465 1.02036 +v -0.248873 0.676058 1.02683 +v -0.245885 0.65777 1.03363 +v -0.243129 0.639571 1.04081 +v -0.240437 0.621366 1.04821 +v -0.237608 0.603099 1.05547 +v -0.234634 0.584698 1.06236 +v -0.231491 0.565668 1.06824 +v -0.228023 0.546398 1.07326 +v -0.224033 0.52629 1.07723 +v -0.219665 0.506055 1.07991 +v -0.214399 0.485587 1.08143 +v -0.208727 0.465203 1.08157 +v -0.207964 0.455728 1.08077 +v -0.20698 0.446474 1.07893 +v -0.204649 0.435634 1.07526 +v -0.202095 0.42531 1.07043 +v -0.198897 0.415111 1.06416 +v -0.267832 0.867983 0.957235 +v -0.26501 0.849823 0.968184 +v -0.261752 0.831003 0.977478 +v -0.258398 0.812084 0.986527 +v -0.254928 0.792858 0.994064 +v -0.251354 0.773595 1.00129 +v -0.247645 0.754493 1.00752 +v -0.243873 0.735357 1.01358 +v -0.240323 0.716165 1.01942 +v -0.236836 0.697049 1.02545 +v -0.233769 0.678348 1.03189 +v -0.230814 0.659781 1.03868 +v -0.228126 0.641281 1.04578 +v -0.225509 0.622791 1.05313 +v -0.222723 0.604221 1.06039 +v -0.219799 0.585522 1.0672 +v -0.216717 0.566001 1.07305 +v -0.213311 0.546248 1.07784 +v -0.209522 0.525687 1.08164 +v -0.205292 0.504972 1.08388 +v -0.200032 0.483092 1.08506 +v -0.194247 0.461378 1.084 +v -0.193453 0.453389 1.08325 +v -0.192551 0.445492 1.08174 +v -0.190049 0.434941 1.07856 +v -0.187384 0.424824 1.0742 +v -0.184067 0.41421 1.06799 +v -0.250681 0.872611 0.961667 +v -0.248002 0.854239 0.972681 +v -0.244938 0.835219 0.982019 +v -0.241782 0.816095 0.991115 +v -0.238435 0.796612 0.998709 +v -0.234994 0.777118 1.00598 +v -0.23155 0.757643 1.01226 +v -0.228045 0.738116 1.01831 +v -0.224739 0.718761 1.02413 +v -0.221466 0.699457 1.03004 +v -0.218462 0.680488 1.03641 +v -0.215572 0.661659 1.04312 +v -0.212951 0.642857 1.05021 +v -0.210377 0.624091 1.05746 +v -0.207717 0.605252 1.06463 +v -0.204943 0.586268 1.07144 +v -0.201922 0.566476 1.07733 +v -0.198575 0.546453 1.08209 +v -0.194855 0.525749 1.08576 +v -0.190698 0.504857 1.08773 +v -0.186612 0.486084 1.08847 +v -0.182192 0.467439 1.08752 +v -0.180779 0.45724 1.0865 +v -0.179242 0.447184 1.0847 +v -0.175205 0.436059 1.08209 +v -0.17096 0.425342 1.07825 +v -0.169523 0.413896 1.07197 +v -0.23349 0.877188 0.965998 +v -0.230934 0.858587 0.977053 +v -0.228062 0.839368 0.986407 +v -0.225097 0.820035 0.995496 +v -0.221854 0.8003 1.0031 +v -0.218544 0.780562 1.01038 +v -0.215344 0.760706 1.01663 +v -0.212093 0.74081 1.0227 +v -0.209023 0.721257 1.02843 +v -0.205987 0.701793 1.0343 +v -0.203036 0.68253 1.04052 +v -0.200221 0.663448 1.04723 +v -0.197643 0.644332 1.0542 +v -0.195138 0.625269 1.06144 +v -0.192596 0.606168 1.06852 +v -0.189982 0.586885 1.07528 +v -0.187001 0.566801 1.08117 +v -0.183697 0.54652 1.08582 +v -0.180042 0.525707 1.08933 +v -0.175985 0.504693 1.09097 +v -0.173118 0.489068 1.09139 +v -0.170036 0.473528 1.09061 +v -0.168052 0.461142 1.08952 +v -0.1659 0.448923 1.08733 +v -0.16027 0.437273 1.08533 +v -0.154486 0.425992 1.08208 +v -0.154976 0.413769 1.07574 +v -0.215751 0.881201 0.969672 +v -0.213353 0.862397 0.980687 +v -0.210668 0.842938 0.990063 +v -0.207911 0.823369 0.999186 +v -0.204924 0.803424 1.00681 +v -0.201859 0.783445 1.0141 +v -0.198829 0.76342 1.02043 +v -0.195716 0.743327 1.02649 +v -0.192837 0.72353 1.03221 +v -0.189968 0.703808 1.03801 +v -0.187264 0.684324 1.04419 +v -0.184637 0.664933 1.0507 +v -0.182216 0.645679 1.05764 +v -0.179833 0.626445 1.06476 +v -0.177444 0.607069 1.07183 +v -0.174972 0.587504 1.07857 +v -0.172201 0.567365 1.08445 +v -0.169134 0.54692 1.08894 +v -0.16594 0.52596 1.09227 +v -0.162414 0.504797 1.09374 +v -0.159894 0.490491 1.09409 +v -0.157182 0.476257 1.0934 +v -0.155217 0.46393 1.09241 +v -0.153107 0.451742 1.09051 +v -0.14932 0.440275 1.08842 +v -0.145401 0.429106 1.08525 +v -0.144954 0.417984 1.08053 +v -0.197987 0.885168 0.97326 +v -0.195737 0.866153 0.984205 +v -0.193243 0.846455 0.993595 +v -0.19067 0.826633 1.0027 +v -0.187927 0.806462 1.0103 +v -0.185116 0.78624 1.01759 +v -0.18222 0.766032 1.02394 +v -0.179254 0.745752 1.02997 +v -0.176557 0.725699 1.03563 +v -0.173857 0.705707 1.04134 +v -0.171392 0.68597 1.04749 +v -0.168983 0.666282 1.05388 +v -0.166702 0.646872 1.06072 +v -0.164459 0.627491 1.06775 +v -0.162204 0.607809 1.07475 +v -0.159871 0.588017 1.08145 +v -0.157271 0.567821 1.08713 +v -0.154456 0.547297 1.09149 +v -0.151758 0.526203 1.09476 +v -0.148761 0.504908 1.09602 +v -0.146602 0.491939 1.09635 +v -0.144324 0.479003 1.09582 +v -0.142345 0.466777 1.09496 +v -0.140263 0.454648 1.09318 +v -0.138306 0.443374 1.09107 +v -0.136234 0.432352 1.08799 +v -0.134825 0.422495 1.08463 +v -0.179696 0.888596 0.976261 +v -0.177624 0.869331 0.987213 +v -0.175325 0.849439 0.996568 +v -0.172953 0.829424 1.00567 +v -0.170449 0.809082 1.0133 +v -0.167872 0.788674 1.02061 +v -0.165236 0.76828 1.02696 +v -0.162538 0.747814 1.03301 +v -0.160092 0.727545 1.03864 +v -0.157645 0.707357 1.04437 +v -0.15536 0.687396 1.05042 +v -0.153117 0.667524 1.0567 +v -0.150955 0.647901 1.06336 +v -0.148829 0.628325 1.07024 +v -0.14675 0.608476 1.07711 +v -0.144601 0.588509 1.08368 +v -0.142197 0.568186 1.08938 +v -0.139624 0.547515 1.09372 +v -0.137083 0.526526 1.09696 +v -0.134314 0.505292 1.09817 +v -0.132571 0.492807 1.09844 +v -0.130737 0.480357 1.09793 +v -0.129109 0.468601 1.09716 +v -0.127395 0.456951 1.09558 +v -0.125646 0.446086 1.0938 +v -0.123776 0.435442 1.09113 +v -0.122567 0.425885 1.08824 +v -0.159494 0.872478 0.990149 +v -0.157381 0.852381 0.999441 +v -0.155213 0.832155 1.0085 +v -0.152932 0.811645 1.01612 +v -0.150581 0.791031 1.02337 +v -0.1482 0.770464 1.02973 +v -0.145766 0.749831 1.0358 +v -0.143568 0.729336 1.04138 +v -0.141371 0.708904 1.04708 +v -0.13925 0.688727 1.05293 +v -0.137182 0.66866 1.05912 +v -0.135141 0.648819 1.0656 +v -0.133151 0.629036 1.07237 +v -0.131237 0.60904 1.07911 +v -0.129258 0.588896 1.08544 +v -0.127048 0.568435 1.09102 +v -0.124733 0.547666 1.09537 +v -0.122337 0.52683 1.09852 +v -0.119772 0.505741 1.09977 +v -0.118455 0.49376 1.10004 +v -0.117075 0.481795 1.09962 +v -0.11581 0.470518 1.09896 +v -0.114476 0.459325 1.09752 +v -0.112915 0.448936 1.09604 +v -0.111269 0.438699 1.09388 +v -0.110238 0.429479 1.09145 +v -0.141044 0.875097 0.992525 +v -0.139158 0.85487 1.00185 +v -0.137212 0.834499 1.0109 +v -0.135148 0.813779 1.0185 +v -0.133029 0.792947 1.02578 +v -0.130982 0.772267 1.03213 +v -0.128877 0.75149 1.03813 +v -0.126954 0.730832 1.04376 +v -0.125009 0.710208 1.04935 +v -0.123035 0.689897 1.05515 +v -0.121086 0.669661 1.06117 +v -0.119214 0.649675 1.06749 +v -0.117388 0.629709 1.07406 +v -0.115667 0.609566 1.08062 +v -0.113901 0.589268 1.08677 +v -0.111963 0.568681 1.09231 +v -0.109958 0.547823 1.09656 +v -0.10795 0.526693 1.09986 +v -0.105788 0.50531 1.10107 +v -0.104589 0.493969 1.10144 +v -0.10333 0.482669 1.10113 +v -0.102202 0.471759 1.10053 +v -0.101035 0.460925 1.09928 +v -0.0997479 0.450852 1.09792 +v -0.0984192 0.440866 1.096 +v -0.0975639 0.431792 1.09398 +v -0.122584 0.8777 0.994857 +v -0.120906 0.857299 1.00412 +v -0.119179 0.836781 1.01315 +v -0.117344 0.815858 1.02073 +v -0.115448 0.794808 1.02797 +v -0.113725 0.774003 1.03429 +v -0.111949 0.753084 1.04022 +v -0.110285 0.732247 1.04579 +v -0.108603 0.711446 1.05132 +v -0.106768 0.690988 1.05703 +v -0.104958 0.670598 1.06297 +v -0.103263 0.650425 1.06907 +v -0.101591 0.630291 1.0754 +v -0.100063 0.609981 1.08169 +v -0.0985142 0.589601 1.08783 +v -0.0968674 0.568873 1.09312 +v -0.0951652 0.547959 1.0975 +v -0.0935229 0.526527 1.10071 +v -0.0918006 0.504878 1.10207 +v -0.0906873 0.494207 1.10239 +v -0.0895497 0.483564 1.10214 +v -0.088553 0.473047 1.10164 +v -0.0875598 0.462584 1.1006 +v -0.0865526 0.452797 1.09949 +v -0.0855133 0.443091 1.0978 +v -0.0848457 0.434207 1.0961 +v -0.103896 0.879654 0.996697 +v -0.102461 0.859152 1.00593 +v -0.10098 0.838516 1.0149 +v -0.0994138 0.817469 1.0225 +v -0.0978086 0.7963 1.02975 +v -0.0963333 0.775324 1.03602 +v -0.0948215 0.754263 1.04194 +v -0.0933444 0.733372 1.04757 +v -0.0918261 0.712489 1.05301 +v -0.0903162 0.691861 1.05861 +v -0.0888175 0.671284 1.06434 +v -0.0873741 0.650936 1.07036 +v -0.0859231 0.630624 1.07645 +v -0.08461 0.610215 1.08258 +v -0.0832925 0.589714 1.08843 +v -0.0818879 0.568942 1.09375 +v -0.0804416 0.547972 1.09803 +v -0.0789726 0.526604 1.10131 +v -0.077466 0.504988 1.10282 +v -0.0765811 0.494535 1.10314 +v -0.0756815 0.48412 1.103 +v -0.0748751 0.473905 1.10258 +v -0.0740473 0.463724 1.10174 +v -0.0732273 0.454112 1.10073 +v -0.0723887 0.444561 1.09927 +v -0.0718311 0.435779 1.09779 +v -0.0851953 0.881571 0.998464 +v -0.083998 0.86095 1.00761 +v -0.0827635 0.840201 1.01651 +v -0.0814635 0.819008 1.02405 +v -0.080143 0.797724 1.03131 +v -0.0789169 0.776596 1.0375 +v -0.0776697 0.755409 1.0434 +v -0.0763525 0.734438 1.04894 +v -0.075019 0.713476 1.0544 +v -0.0738251 0.692682 1.05984 +v -0.0726673 0.671932 1.06555 +v -0.0714653 0.651441 1.07133 +v -0.0702349 0.630944 1.0772 +v -0.0691492 0.610433 1.08311 +v -0.0680591 0.589869 1.08885 +v -0.0668823 0.569008 1.09403 +v -0.0656969 0.547967 1.0983 +v -0.064422 0.526635 1.10161 +v -0.0631137 0.505141 1.10314 +v -0.0624564 0.494949 1.10358 +v -0.0617978 0.484723 1.10342 +v -0.0611731 0.474786 1.10319 +v -0.0605304 0.464904 1.10251 +v -0.0598917 0.45548 1.10163 +v -0.0592363 0.446086 1.10041 +v -0.0587785 0.437413 1.0991 +v -0.0662566 0.882881 0.999516 +v -0.0652785 0.862081 1.00869 +v -0.0642713 0.84115 1.0176 +v -0.063289 0.81997 1.02514 +v -0.0622781 0.798666 1.03232 +v -0.0612933 0.777474 1.03853 +v -0.0602828 0.756227 1.04444 +v -0.0593085 0.735175 1.04999 +v -0.0583118 0.714102 1.0554 +v -0.0573312 0.693212 1.06083 +v -0.0563548 0.672359 1.06637 +v -0.0554155 0.6518 1.07204 +v -0.0544704 0.631267 1.0777 +v -0.0536292 0.610692 1.08343 +v -0.0527596 0.590122 1.08903 +v -0.0518118 0.569301 1.09422 +v -0.0508215 0.548268 1.09835 +v -0.0499175 0.526865 1.10176 +v -0.0489799 0.505366 1.10341 +v -0.0484456 0.495343 1.10386 +v -0.0479202 0.485241 1.10385 +v -0.0474543 0.475429 1.10371 +v -0.0469764 0.465648 1.10308 +v -0.0465161 0.456357 1.10236 +v -0.046035 0.447063 1.1013 +v -0.0456374 0.438369 1.10018 +v -0.0473096 0.884134 1.00044 +v -0.0465481 0.863157 1.00963 +v -0.0457743 0.842051 1.01855 +v -0.0450919 0.820855 1.02601 +v -0.0443981 0.799538 1.03311 +v -0.0436439 0.778277 1.03929 +v -0.042882 0.756987 1.04525 +v -0.0422327 0.73582 1.0507 +v -0.041597 0.714628 1.0561 +v -0.0408359 0.693627 1.06145 +v -0.0400442 0.672697 1.0669 +v -0.0393633 0.652076 1.07243 +v -0.0386976 0.631512 1.07804 +v -0.0381093 0.610893 1.08363 +v -0.0375051 0.590215 1.08909 +v -0.0367292 0.56952 1.09419 +v -0.0359615 0.548535 1.09839 +v -0.0354169 0.527078 1.10177 +v -0.0348435 0.505509 1.10351 +v -0.0344331 0.495624 1.10399 +v -0.0340399 0.485724 1.10402 +v -0.0337256 0.47607 1.10398 +v -0.0334108 0.466455 1.10345 +v -0.0331191 0.457277 1.10278 +v -0.0328228 0.448082 1.10196 +v -0.032478 0.439421 1.10094 +v -0.0282269 0.8846 1.00068 +v -0.0277684 0.863609 1.00986 +v -0.0273092 0.842504 1.01878 +v -0.0268435 0.821278 1.02631 +v -0.026371 0.799926 1.03347 +v -0.0259075 0.77859 1.03965 +v -0.0254331 0.757213 1.0456 +v -0.0250523 0.736028 1.05109 +v -0.0246664 0.714826 1.05646 +v -0.0242196 0.693832 1.06182 +v -0.023755 0.672869 1.06719 +v -0.0232996 0.652267 1.07271 +v -0.0228447 0.631682 1.07821 +v -0.022484 0.611036 1.08375 +v -0.0220929 0.590309 1.08919 +v -0.0216284 0.569529 1.09415 +v -0.0211742 0.548531 1.09833 +v -0.0208573 0.52715 1.1017 +v -0.0205129 0.505648 1.1036 +v -0.0202849 0.495822 1.10408 +v -0.0200583 0.485987 1.10422 +v -0.0198595 0.476336 1.10411 +v -0.0196547 0.466724 1.10371 +v -0.0194614 0.45755 1.10316 +v -0.0192724 0.448418 1.10233 +v -0.0190301 0.43985 1.1014 +v -0.00914172 0.88501 1.0008 +v -0.00899074 0.864008 1.00994 +v -0.00884323 0.842889 1.01883 +v -0.00858856 0.82162 1.02637 +v -0.0083386 0.800239 1.03358 +v -0.00816528 0.778826 1.03974 +v -0.00799206 0.757372 1.04569 +v -0.00786603 0.736179 1.05119 +v -0.00773543 0.714951 1.05652 +v -0.00760104 0.693981 1.06189 +v -0.00746509 0.672989 1.06724 +v -0.00722332 0.65239 1.07272 +v -0.00699495 0.631821 1.07818 +v -0.00683626 0.611122 1.08369 +v -0.00667705 0.590376 1.08913 +v -0.00653537 0.569477 1.09412 +v -0.00638856 0.548449 1.09837 +v -0.00628387 0.527227 1.10161 +v -0.00620156 0.505769 1.10347 +v -0.00613636 0.496008 1.10397 +v -0.006074 0.48625 1.10416 +v -0.00597798 0.476625 1.10413 +v -0.00588389 0.467009 1.10374 +v -0.0057999 0.457815 1.10326 +v -0.00571383 0.448654 1.10246 +v -0.00556178 0.440097 1.1016 +v 0.0099083 0.884529 1.00021 +v 0.00982051 0.863612 1.00939 +v 0.00972183 0.842576 1.01832 +v 0.00969565 0.821308 1.02591 +v 0.00965112 0.799927 1.03314 +v 0.0095598 0.778592 1.03942 +v 0.00949803 0.757225 1.04541 +v 0.00933182 0.736068 1.05097 +v 0.00919727 0.714897 1.05634 +v 0.00909752 0.693933 1.06169 +v 0.00899181 0.672977 1.06707 +v 0.00893906 0.65235 1.07259 +v 0.00889175 0.631742 1.07811 +v 0.00874164 0.611097 1.08361 +v 0.00858574 0.590467 1.08901 +v 0.00844427 0.569703 1.09401 +v 0.00831276 0.548666 1.09818 +v 0.00822602 0.527398 1.10156 +v 0.00812292 0.505907 1.10337 +v 0.00805948 0.496074 1.10389 +v 0.0079962 0.486217 1.10407 +v 0.00796956 0.476556 1.10401 +v 0.00793676 0.466893 1.10367 +v 0.00786403 0.457646 1.10314 +v 0.00777151 0.448447 1.10229 +v 0.00780692 0.439819 1.10139 +v 0.0289484 0.883983 0.999499 +v 0.0286186 0.863148 1.00869 +v 0.0282725 0.842192 1.01764 +v 0.0279593 0.820927 1.02525 +v 0.0276224 0.799551 1.03252 +v 0.0272742 0.778288 1.03885 +v 0.0269059 0.756956 1.04483 +v 0.0265203 0.735881 1.05042 +v 0.026123 0.71478 1.05585 +v 0.0257777 0.693837 1.06121 +v 0.0254281 0.672914 1.06657 +v 0.0250994 0.652267 1.07218 +v 0.0247754 0.631658 1.07783 +v 0.0243159 0.611049 1.08337 +v 0.0238475 0.590703 1.08875 +v 0.0234202 0.569921 1.09381 +v 0.0230075 0.548912 1.09805 +v 0.0227334 0.527585 1.10147 +v 0.0224539 0.506039 1.10324 +v 0.0222607 0.496171 1.10373 +v 0.022076 0.486239 1.10385 +v 0.0219172 0.476525 1.10375 +v 0.0217553 0.466816 1.10329 +v 0.0215026 0.457553 1.10272 +v 0.021254 0.448329 1.10184 +v 0.0211767 0.439622 1.10086 +v 0.0479248 0.882738 0.9981 +v 0.0473607 0.862004 1.00737 +v 0.0467794 0.841151 1.01635 +v 0.0461956 0.82003 1.02407 +v 0.0455831 0.798795 1.03145 +v 0.0449421 0.777585 1.03779 +v 0.044277 0.75633 1.04386 +v 0.0436216 0.735333 1.04952 +v 0.0429448 0.714307 1.055 +v 0.0423002 0.693479 1.06044 +v 0.0416593 0.672663 1.06592 +v 0.041061 0.652112 1.07161 +v 0.0404703 0.631612 1.0773 +v 0.0398379 0.611192 1.08301 +v 0.039184 0.590701 1.08853 +v 0.0385833 0.569938 1.09367 +v 0.0379721 0.548912 1.09789 +v 0.0373495 0.527522 1.10132 +v 0.0367236 0.505991 1.10303 +v 0.03641 0.495963 1.10351 +v 0.0360976 0.485918 1.10359 +v 0.0357718 0.476078 1.10341 +v 0.035443 0.466246 1.1028 +v 0.0351188 0.456852 1.10217 +v 0.0347821 0.447546 1.10115 +v 0.0345296 0.438831 1.1 +v 0.0668847 0.881439 0.996603 +v 0.0660983 0.860797 1.00589 +v 0.065277 0.840053 1.01491 +v 0.0644108 0.819054 1.02267 +v 0.0635242 0.797955 1.03014 +v 0.062584 0.776809 1.03649 +v 0.061622 0.755622 1.04262 +v 0.0606979 0.734689 1.04829 +v 0.0597566 0.713745 1.05387 +v 0.0588137 0.693018 1.05936 +v 0.0578905 0.672307 1.06494 +v 0.0570307 0.651863 1.07076 +v 0.056162 0.631447 1.0766 +v 0.0553481 0.611044 1.08248 +v 0.0545311 0.590565 1.08813 +v 0.0537436 0.569822 1.09338 +v 0.0529441 0.548858 1.09772 +v 0.0519832 0.527439 1.10107 +v 0.0509962 0.505877 1.10277 +v 0.0505573 0.495736 1.10315 +v 0.050118 0.485595 1.10318 +v 0.0496255 0.475563 1.10284 +v 0.0491187 0.465514 1.10221 +v 0.0487209 0.456126 1.10134 +v 0.0482917 0.446762 1.10015 +v 0.0478583 0.437967 1.09893 +v 0.0856323 0.87944 0.994546 +v 0.0845965 0.858969 1.00394 +v 0.0835196 0.838391 1.01304 +v 0.0824102 0.817537 1.02082 +v 0.0812576 0.796557 1.02826 +v 0.0800415 0.775577 1.03475 +v 0.0787884 0.754529 1.04095 +v 0.0776463 0.733692 1.04671 +v 0.0764817 0.71283 1.0523 +v 0.0752927 0.692236 1.05793 +v 0.0741081 0.671667 1.06371 +v 0.0730558 0.651393 1.06972 +v 0.0719956 0.631099 1.07575 +v 0.0709539 0.610857 1.08182 +v 0.0698986 0.590501 1.08764 +v 0.0688224 0.569769 1.09297 +v 0.0677259 0.548826 1.09736 +v 0.0666042 0.527488 1.10072 +v 0.0654353 0.505936 1.10229 +v 0.0647636 0.49551 1.10268 +v 0.0640897 0.485058 1.10258 +v 0.0634528 0.47473 1.10229 +v 0.0627958 0.464462 1.10139 +v 0.0621684 0.454815 1.10042 +v 0.0615182 0.445248 1.09898 +v 0.0609494 0.436394 1.09751 +v 0.104349 0.877389 0.992377 +v 0.103067 0.857089 1.00184 +v 0.101733 0.83668 1.01102 +v 0.100375 0.81595 1.01877 +v 0.0989639 0.795103 1.0262 +v 0.0974573 0.774263 1.03271 +v 0.0959136 0.753356 1.03896 +v 0.0945545 0.732606 1.04475 +v 0.0931744 0.711863 1.05045 +v 0.0917185 0.691366 1.05618 +v 0.090289 0.670937 1.06211 +v 0.0890421 0.650805 1.06831 +v 0.0878055 0.6307 1.07462 +v 0.0865337 0.610619 1.08088 +v 0.0852434 0.590401 1.08691 +v 0.0838785 0.569752 1.09237 +v 0.0824749 0.548867 1.09674 +v 0.0812097 0.52754 1.10006 +v 0.0798644 0.50599 1.10153 +v 0.0789688 0.49526 1.10188 +v 0.0780466 0.484529 1.10169 +v 0.0772702 0.473973 1.10131 +v 0.0764475 0.463481 1.10033 +v 0.0755805 0.453702 1.09917 +v 0.0746987 0.443957 1.09756 +v 0.0740138 0.435004 1.09589 +v 0.122797 0.874813 0.989708 +v 0.121349 0.854689 0.999224 +v 0.119838 0.834441 1.00843 +v 0.118191 0.813875 1.01624 +v 0.116485 0.793189 1.02371 +v 0.114798 0.772502 1.03026 +v 0.113067 0.75174 1.0365 +v 0.111417 0.73123 1.0424 +v 0.109734 0.710713 1.04814 +v 0.108058 0.690366 1.05403 +v 0.106419 0.670082 1.06013 +v 0.104957 0.65022 1.06657 +v 0.103515 0.630364 1.07309 +v 0.102158 0.610416 1.07958 +v 0.100761 0.590302 1.08582 +v 0.0991649 0.569757 1.09141 +v 0.0974867 0.548974 1.09574 +v 0.0958142 0.527853 1.09909 +v 0.0940087 0.506518 1.10052 +v 0.0929814 0.49516 1.10089 +v 0.0919057 0.483817 1.10065 +v 0.0908921 0.472817 1.10015 +v 0.0898541 0.461896 1.09897 +v 0.0887834 0.451818 1.09766 +v 0.0876614 0.441819 1.09584 +v 0.0868064 0.432766 1.0939 +v 0.141231 0.872203 0.986953 +v 0.139602 0.85223 0.99648 +v 0.137919 0.832159 1.00575 +v 0.135975 0.811736 1.01354 +v 0.133968 0.791219 1.021 +v 0.132091 0.770663 1.02753 +v 0.130171 0.750043 1.03379 +v 0.128216 0.72976 1.03968 +v 0.126238 0.709475 1.04549 +v 0.124344 0.689272 1.05153 +v 0.122505 0.669157 1.05786 +v 0.120831 0.649542 1.06451 +v 0.119182 0.629936 1.07123 +v 0.117749 0.61012 1.07796 +v 0.116258 0.590118 1.0844 +v 0.114412 0.569699 1.09004 +v 0.112474 0.549034 1.09443 +v 0.110386 0.528156 1.09771 +v 0.108138 0.507024 1.09921 +v 0.10696 0.495069 1.09948 +v 0.105736 0.483143 1.09925 +v 0.104502 0.471725 1.09859 +v 0.10322 0.46037 1.09731 +v 0.101919 0.450005 1.09582 +v 0.100559 0.439766 1.09367 +v 0.0995407 0.430651 1.09138 +v 0.161108 0.888311 0.972744 +v 0.159402 0.869042 0.983798 +v 0.157538 0.849266 0.993334 +v 0.155618 0.8294 1.00263 +v 0.153473 0.809178 1.01044 +v 0.151248 0.788863 1.01788 +v 0.149088 0.768515 1.02441 +v 0.146886 0.748116 1.03071 +v 0.144711 0.728 1.03666 +v 0.142529 0.707881 1.04259 +v 0.140476 0.687921 1.04872 +v 0.138494 0.668058 1.05518 +v 0.136703 0.648594 1.06201 +v 0.134948 0.629196 1.06898 +v 0.133333 0.609571 1.0759 +v 0.131654 0.589729 1.08251 +v 0.129579 0.569475 1.08823 +v 0.127346 0.548905 1.09276 +v 0.125134 0.52803 1.09606 +v 0.122698 0.506886 1.09748 +v 0.121138 0.494321 1.09787 +v 0.119487 0.48177 1.0975 +v 0.117948 0.469944 1.09678 +v 0.116321 0.458197 1.0953 +v 0.114693 0.447382 1.09356 +v 0.112984 0.436715 1.09109 +v 0.111791 0.427198 1.0883 +v 0.179422 0.884882 0.969515 +v 0.17755 0.865838 0.980567 +v 0.175448 0.846261 0.990086 +v 0.173285 0.826592 0.999377 +v 0.170921 0.806546 1.00712 +v 0.168492 0.786456 1.01459 +v 0.166046 0.766318 1.02109 +v 0.163551 0.74616 1.02739 +v 0.161154 0.726186 1.0334 +v 0.158751 0.706208 1.03939 +v 0.156556 0.686496 1.04563 +v 0.154424 0.66688 1.05221 +v 0.152512 0.647598 1.0592 +v 0.150648 0.628347 1.06636 +v 0.148867 0.608902 1.07349 +v 0.146981 0.589282 1.08017 +v 0.14467 0.569158 1.08594 +v 0.14217 0.548719 1.09054 +v 0.139813 0.52785 1.09385 +v 0.137189 0.506749 1.09531 +v 0.135235 0.493611 1.09573 +v 0.133172 0.480499 1.09531 +v 0.131327 0.468288 1.09447 +v 0.129373 0.45617 1.09288 +v 0.127406 0.444891 1.09093 +v 0.125321 0.433837 1.08806 +v 0.123966 0.423931 1.08474 +v 0.197302 0.880934 0.96574 +v 0.195264 0.862139 0.976823 +v 0.192989 0.842771 0.986327 +v 0.190643 0.823299 0.995584 +v 0.188044 0.803434 1.00334 +v 0.185372 0.783535 1.0108 +v 0.182728 0.763677 1.01733 +v 0.180025 0.743769 1.02363 +v 0.177459 0.724037 1.02969 +v 0.1749 0.704324 1.03578 +v 0.172551 0.684863 1.04213 +v 0.170267 0.665545 1.04881 +v 0.168208 0.646533 1.05595 +v 0.166195 0.627496 1.06325 +v 0.164196 0.608225 1.07046 +v 0.162103 0.588816 1.07732 +v 0.159653 0.568853 1.0832 +v 0.156967 0.548582 1.08788 +v 0.154055 0.527728 1.09138 +v 0.150795 0.506705 1.09301 +v 0.148599 0.49231 1.09344 +v 0.146255 0.477953 1.0929 +v 0.1445 0.465736 1.09189 +v 0.142594 0.453631 1.09003 +v 0.138654 0.442112 1.08808 +v 0.134584 0.430867 1.08512 +v 0.134202 0.419701 1.08036 +v 0.215162 0.876949 0.961892 +v 0.212945 0.85839 0.972978 +v 0.21049 0.839225 0.982427 +v 0.207954 0.81995 0.991641 +v 0.205114 0.800258 0.999389 +v 0.202203 0.780558 1.00685 +v 0.199339 0.760934 1.01332 +v 0.196426 0.741287 1.01963 +v 0.19368 0.721794 1.0257 +v 0.190951 0.702324 1.03182 +v 0.188455 0.683138 1.03833 +v 0.186022 0.664083 1.04509 +v 0.183826 0.645297 1.05241 +v 0.181665 0.626474 1.05981 +v 0.179463 0.607425 1.06714 +v 0.177157 0.588207 1.07412 +v 0.174541 0.568413 1.07998 +v 0.171669 0.548371 1.08478 +v 0.168179 0.527572 1.08835 +v 0.164337 0.506659 1.09026 +v 0.161924 0.491019 1.09073 +v 0.159303 0.475448 1.09014 +v 0.157623 0.46323 1.08894 +v 0.155757 0.451182 1.08677 +v 0.149879 0.439417 1.08496 +v 0.143823 0.427989 1.08196 +v 0.144341 0.41562 1.07549 +v 0.232502 0.872408 0.957418 +v 0.230131 0.854076 0.968466 +v 0.227491 0.835155 0.977884 +v 0.224763 0.816131 0.987076 +v 0.221762 0.796722 0.994779 +v 0.218688 0.777317 1.0022 +v 0.215624 0.757922 1.00872 +v 0.212509 0.738503 1.01507 +v 0.209591 0.719302 1.02119 +v 0.206695 0.700135 1.02737 +v 0.204087 0.681211 1.03395 +v 0.201567 0.662451 1.04086 +v 0.199277 0.643916 1.04822 +v 0.197019 0.625389 1.05573 +v 0.194733 0.60662 1.0631 +v 0.192351 0.587717 1.07015 +v 0.189618 0.568121 1.07617 +v 0.186583 0.548286 1.08112 +v 0.183062 0.527713 1.0848 +v 0.179164 0.507038 1.08692 +v 0.175439 0.488312 1.08763 +v 0.17142 0.469645 1.08692 +v 0.170197 0.459471 1.08589 +v 0.168819 0.449425 1.08402 +v 0.16464 0.438228 1.08148 +v 0.160297 0.427353 1.07781 +v 0.158914 0.415794 1.07145 +v 0.249793 0.867809 0.95285 +v 0.247269 0.849709 0.963825 +v 0.24444 0.831028 0.973194 +v 0.241516 0.812253 0.982344 +v 0.238348 0.79314 0.989972 +v 0.235102 0.774016 0.997341 +v 0.23184 0.754857 1.00388 +v 0.228511 0.735661 1.01024 +v 0.225413 0.716753 1.01641 +v 0.222352 0.697874 1.02265 +v 0.21963 0.679227 1.02933 +v 0.217012 0.660726 1.03633 +v 0.214628 0.64246 1.04375 +v 0.212278 0.624202 1.0513 +v 0.209906 0.605711 1.05871 +v 0.207435 0.587105 1.06581 +v 0.204561 0.567758 1.07188 +v 0.20138 0.548154 1.077 +v 0.197834 0.527828 1.08079 +v 0.193914 0.507406 1.08319 +v 0.188886 0.485605 1.08424 +v 0.183449 0.463874 1.08345 +v 0.182724 0.455741 1.08261 +v 0.181858 0.447764 1.08093 +v 0.179369 0.437122 1.07763 +v 0.176726 0.426946 1.07325 +v 0.17344 0.416079 1.06727 +v 0.266474 0.862581 0.947506 +v 0.263819 0.84473 0.958396 +v 0.260825 0.8263 0.967711 +v 0.257732 0.807779 0.976812 +v 0.254402 0.788986 0.98445 +v 0.250979 0.770165 0.991845 +v 0.247531 0.751318 0.998414 +v 0.244029 0.732446 1.00485 +v 0.240822 0.713857 1.01108 +v 0.237648 0.6953 1.01739 +v 0.234807 0.677012 1.02414 +v 0.232082 0.658843 1.03122 +v 0.229671 0.640904 1.0387 +v 0.227277 0.62294 1.04628 +v 0.224893 0.604763 1.05369 +v 0.222388 0.586474 1.06078 +v 0.219461 0.567579 1.06687 +v 0.216259 0.548442 1.07219 +v 0.212538 0.528585 1.07622 +v 0.208452 0.508658 1.07901 +v 0.203329 0.488194 1.08059 +v 0.197852 0.46776 1.08085 +v 0.197112 0.458139 1.07991 +v 0.196193 0.448706 1.07788 +v 0.193866 0.437965 1.07421 +v 0.191327 0.427634 1.06948 +v 0.188181 0.417148 1.06321 +v 0.283112 0.857297 0.942043 +v 0.28031 0.839691 0.952829 +v 0.277142 0.821512 0.962064 +v 0.273861 0.803241 0.971083 +v 0.270354 0.784762 0.97871 +v 0.266756 0.766252 0.986126 +v 0.263113 0.747716 0.992688 +v 0.259435 0.729168 0.999186 +v 0.256114 0.7109 1.00547 +v 0.252828 0.692657 1.01183 +v 0.249879 0.674714 1.0187 +v 0.247054 0.656881 1.02585 +v 0.2446 0.639235 1.03337 +v 0.242178 0.621587 1.04099 +v 0.239767 0.603724 1.04836 +v 0.237229 0.585755 1.05544 +v 0.23423 0.567279 1.0615 +v 0.231005 0.548656 1.06698 +v 0.227098 0.529322 1.07118 +v 0.222889 0.509901 1.07453 +v 0.21766 0.490797 1.07658 +v 0.212143 0.471704 1.07765 +v 0.211453 0.460616 1.07676 +v 0.210485 0.449727 1.07451 +v 0.208294 0.438843 1.07054 +v 0.205858 0.428493 1.06545 +v 0.202893 0.418304 1.05904 +v 0.298906 0.851341 0.935744 +v 0.295956 0.833994 0.946435 +v 0.292644 0.816123 0.955595 +v 0.289219 0.798171 0.964561 +v 0.285588 0.78004 0.972216 +v 0.281862 0.761859 0.979682 +v 0.278109 0.743694 0.986295 +v 0.274327 0.725524 0.992854 +v 0.270891 0.70761 0.999285 +v 0.267495 0.689724 1.0058 +v 0.26454 0.672144 1.01273 +v 0.2617 0.654657 1.01992 +v 0.259213 0.637367 1.02742 +v 0.256757 0.620081 1.03502 +v 0.254355 0.602598 1.04235 +v 0.251804 0.585001 1.04936 +v 0.248878 0.566955 1.05547 +v 0.245739 0.54878 1.06106 +v 0.241947 0.53019 1.06557 +v 0.237851 0.51145 1.06932 +v 0.232646 0.49303 1.07184 +v 0.227181 0.474609 1.07362 +v 0.226753 0.4631 1.07314 +v 0.226003 0.45158 1.07116 +v 0.223803 0.440529 1.06676 +v 0.221456 0.430002 1.06145 +v 0.21855 0.420162 1.05489 +v 0.31463 0.845332 0.929326 +v 0.311518 0.828241 0.93992 +v 0.308054 0.810677 0.948976 +v 0.304488 0.793063 0.957869 +v 0.30073 0.775264 0.965538 +v 0.296857 0.757412 0.973014 +v 0.293005 0.739628 0.979699 +v 0.289119 0.721836 0.98633 +v 0.285561 0.70427 0.992888 +v 0.28205 0.68674 0.999533 +v 0.2791 0.669514 1.00655 +v 0.276241 0.652364 1.01376 +v 0.273718 0.635415 1.02125 +v 0.271217 0.618477 1.02879 +v 0.268811 0.601376 1.03603 +v 0.26627 0.58417 1.04303 +v 0.26342 0.566562 1.04917 +v 0.260376 0.548855 1.05489 +v 0.256671 0.530963 1.05965 +v 0.252703 0.512956 1.06378 +v 0.247548 0.495243 1.06682 +v 0.242158 0.477486 1.06921 +v 0.242013 0.465499 1.06895 +v 0.241442 0.453629 1.0671 +v 0.239414 0.442308 1.06282 +v 0.236985 0.431636 1.05724 +v 0.23423 0.422022 1.05074 +v 0.329396 0.838769 0.922095 +v 0.326171 0.822014 0.932612 +v 0.322593 0.804774 0.941663 +v 0.318921 0.787513 0.950546 +v 0.315078 0.770068 0.958247 +v 0.311126 0.752576 0.965777 +v 0.307204 0.735182 0.972604 +v 0.303267 0.717792 0.979405 +v 0.299679 0.700641 0.986063 +v 0.296143 0.683544 0.992811 +v 0.293195 0.666677 0.999874 +v 0.290337 0.649882 1.00711 +v 0.287821 0.633282 1.01451 +v 0.285332 0.616689 1.02198 +v 0.282957 0.599971 1.02913 +v 0.280463 0.583175 1.03608 +v 0.277776 0.566062 1.04223 +v 0.274939 0.548875 1.04808 +v 0.271593 0.531503 1.05306 +v 0.267991 0.51402 1.05745 +v 0.263393 0.496872 1.06074 +v 0.258605 0.479661 1.06355 +v 0.258264 0.467538 1.06366 +v 0.257546 0.455351 1.06256 +v 0.255189 0.444307 1.05846 +v 0.252595 0.433664 1.05333 +v 0.249743 0.42431 1.04705 +v 0.344089 0.832159 0.914764 +v 0.34073 0.815724 0.925171 +v 0.337049 0.798819 0.934213 +v 0.333255 0.781912 0.943073 +v 0.329329 0.764827 0.950807 +v 0.325288 0.747697 0.958376 +v 0.321308 0.7307 0.965343 +v 0.31729 0.713696 0.972256 +v 0.313695 0.69697 0.979053 +v 0.310139 0.6803 0.985916 +v 0.307185 0.66378 0.99301 +v 0.304307 0.647326 1.00024 +v 0.301816 0.631077 1.00759 +v 0.299335 0.614826 1.01497 +v 0.296996 0.5985 1.02204 +v 0.294537 0.582105 1.02891 +v 0.292031 0.565508 1.03508 +v 0.2894 0.548842 1.04104 +v 0.28638 0.531978 1.04614 +v 0.283156 0.515038 1.0508 +v 0.279125 0.498458 1.05434 +v 0.274885 0.4818 1.05741 +v 0.274398 0.469512 1.05796 +v 0.273491 0.457289 1.05733 +v 0.271008 0.44633 1.05382 +v 0.268175 0.435862 1.04921 +v 0.265302 0.426615 1.04335 +v 0.357814 0.825126 0.906905 +v 0.354396 0.809031 0.917216 +v 0.350638 0.792479 0.926293 +v 0.346768 0.775932 0.935193 +v 0.342789 0.759279 0.942992 +v 0.338722 0.742593 0.950667 +v 0.334718 0.726004 0.957725 +v 0.330726 0.709422 0.964803 +v 0.32712 0.693013 0.971715 +v 0.323551 0.676662 0.978677 +v 0.320551 0.660537 0.985809 +v 0.317623 0.644456 0.993056 +v 0.315177 0.628577 1.00031 +v 0.312742 0.612694 1.00759 +v 0.310461 0.596799 1.01456 +v 0.308091 0.580854 1.02138 +v 0.305774 0.564726 1.02757 +v 0.303349 0.548539 1.03358 +v 0.30083 0.53228 1.03874 +v 0.298116 0.515937 1.04352 +v 0.295062 0.49983 1.04705 +v 0.291795 0.483668 1.05011 +v 0.290589 0.471709 1.05155 +v 0.289116 0.459656 1.05217 +v 0.286506 0.448597 1.04936 +v 0.283532 0.437966 1.04541 +v 0.280495 0.428954 1.0398 +v 0.371476 0.818062 0.898962 +v 0.367955 0.802289 0.909126 +v 0.364132 0.786109 0.918239 +v 0.360174 0.76991 0.92717 +v 0.356171 0.753701 0.93506 +v 0.352043 0.73745 0.94279 +v 0.348051 0.721277 0.949984 +v 0.344041 0.705101 0.95716 +v 0.340443 0.689019 0.964208 +v 0.336867 0.67298 0.971282 +v 0.333819 0.657242 0.978435 +v 0.330825 0.641532 0.985674 +v 0.328446 0.626023 0.992865 +v 0.326055 0.610507 1.00005 +v 0.323854 0.595058 1.00693 +v 0.321565 0.57956 1.01369 +v 0.319428 0.563893 1.01989 +v 0.31722 0.548198 1.02595 +v 0.315188 0.53255 1.03113 +v 0.312992 0.516815 1.03602 +v 0.310909 0.501193 1.03951 +v 0.308635 0.485513 1.04258 +v 0.306715 0.473855 1.04479 +v 0.30453 0.462148 1.04639 +v 0.30189 0.451082 1.04442 +v 0.298797 0.440299 1.04124 +v 0.295681 0.431244 1.03638 +v 0.384189 0.81066 0.890733 +v 0.380612 0.795231 0.900896 +v 0.376774 0.779481 0.910007 +v 0.372814 0.763702 0.918969 +v 0.368813 0.747876 0.926994 +v 0.364684 0.732014 0.934868 +v 0.360723 0.716225 0.942149 +v 0.356763 0.700441 0.949447 +v 0.353162 0.684738 0.95657 +v 0.349603 0.669058 0.963753 +v 0.346583 0.653738 0.970865 +v 0.343641 0.638455 0.978087 +v 0.341203 0.623277 0.985181 +v 0.338797 0.60811 0.992319 +v 0.336647 0.593043 0.99913 +v 0.334458 0.57795 1.00589 +v 0.332499 0.562821 1.01203 +v 0.330514 0.547678 1.01814 +v 0.328815 0.53256 1.02338 +v 0.326974 0.517365 1.02837 +v 0.325459 0.502289 1.0319 +v 0.323783 0.487184 1.03513 +v 0.321508 0.475774 1.03782 +v 0.3191 0.464302 1.04024 +v 0.316399 0.452986 1.0392 +v 0.313267 0.441898 1.03706 +v 0.310123 0.433038 1.0327 +v 0.400324 0.818236 0.872118 +v 0.396828 0.803219 0.882414 +v 0.393152 0.788126 0.89252 +v 0.389338 0.772823 0.901671 +v 0.385341 0.757452 0.910623 +v 0.381353 0.742018 0.918799 +v 0.377221 0.726541 0.926808 +v 0.373329 0.711151 0.934231 +v 0.36939 0.695759 0.94159 +v 0.365799 0.680423 0.948813 +v 0.362216 0.66509 0.956051 +v 0.359262 0.650198 0.963172 +v 0.356332 0.635321 0.97032 +v 0.353888 0.620497 0.977385 +v 0.351441 0.605666 0.984447 +v 0.349373 0.590997 0.991213 +v 0.347251 0.576299 0.997911 +v 0.345499 0.561717 1.00406 +v 0.343715 0.547118 1.01016 +v 0.342354 0.532521 1.01548 +v 0.340879 0.517874 1.0206 +v 0.339932 0.503386 1.02414 +v 0.338841 0.488828 1.02744 +v 0.336215 0.477657 1.03068 +v 0.333478 0.466488 1.03365 +v 0.330891 0.454961 1.0339 +v 0.327745 0.443568 1.03278 +v 0.324578 0.434775 1.02906 +v 0.412247 0.810316 0.863777 +v 0.408725 0.795656 0.874025 +v 0.405053 0.780958 0.884118 +v 0.401247 0.76601 0.893283 +v 0.397336 0.75102 0.902343 +v 0.393358 0.735993 0.910594 +v 0.389287 0.720939 0.918731 +v 0.385393 0.705879 0.92628 +v 0.381492 0.690842 0.93382 +v 0.377903 0.675917 0.941062 +v 0.374344 0.661002 0.948349 +v 0.371368 0.646405 0.955439 +v 0.368439 0.631831 0.96258 +v 0.365953 0.617369 0.969569 +v 0.363489 0.602915 0.976586 +v 0.361448 0.588679 0.98329 +v 0.35937 0.574416 0.989971 +v 0.357712 0.56029 0.99614 +v 0.356023 0.546148 1.00225 +v 0.354871 0.532126 1.00759 +v 0.353633 0.518066 1.01279 +v 0.352956 0.504036 1.01657 +v 0.352134 0.48995 1.02011 +v 0.349504 0.479164 1.02375 +v 0.346781 0.468384 1.02722 +v 0.344194 0.456932 1.02818 +v 0.341244 0.445398 1.02818 +v 0.337998 0.436514 1.02497 +v 0.424086 0.802353 0.855361 +v 0.420553 0.788069 0.865567 +v 0.41684 0.773744 0.875598 +v 0.413101 0.75917 0.884846 +v 0.409193 0.744541 0.893909 +v 0.405297 0.729947 0.902303 +v 0.401233 0.7153 0.910509 +v 0.397381 0.700582 0.918242 +v 0.393472 0.68588 0.925892 +v 0.389913 0.671367 0.933194 +v 0.386352 0.656871 0.940489 +v 0.383385 0.642575 0.947587 +v 0.380421 0.628288 0.954687 +v 0.377936 0.614204 0.961643 +v 0.375436 0.60012 0.968585 +v 0.373429 0.586307 0.975267 +v 0.37139 0.572481 0.981905 +v 0.369832 0.55882 0.988087 +v 0.368239 0.545139 0.994213 +v 0.36732 0.531704 0.999599 +v 0.366313 0.518229 1.00485 +v 0.365888 0.504643 1.00885 +v 0.365333 0.491018 1.01263 +v 0.36266 0.480638 1.01659 +v 0.359957 0.470219 1.02052 +v 0.357503 0.4588 1.02239 +v 0.354486 0.447368 1.0231 +v 0.351373 0.438281 1.02072 +v 0.43528 0.794338 0.847058 +v 0.431755 0.780382 0.857256 +v 0.42811 0.766414 0.867333 +v 0.424349 0.752221 0.876605 +v 0.420471 0.738002 0.88576 +v 0.416606 0.723718 0.894212 +v 0.412624 0.709401 0.902541 +v 0.408831 0.695078 0.910296 +v 0.404988 0.680775 0.917988 +v 0.401392 0.666637 0.925345 +v 0.397816 0.652517 0.932726 +v 0.394754 0.63856 0.939817 +v 0.391731 0.624619 0.946959 +v 0.389228 0.610846 0.953847 +v 0.386755 0.597095 0.960767 +v 0.384724 0.583656 0.967375 +v 0.382698 0.570224 0.973988 +v 0.381164 0.556976 0.980192 +v 0.379612 0.543713 0.986383 +v 0.378673 0.53074 0.99188 +v 0.377692 0.517743 0.997307 +v 0.377388 0.50459 1.0015 +v 0.376963 0.491407 1.00549 +v 0.374329 0.48138 1.00979 +v 0.371609 0.471313 1.01396 +v 0.369334 0.460296 1.01622 +v 0.366717 0.44907 1.0178 +v 0.363511 0.439575 1.01613 +v 0.446393 0.786296 0.838685 +v 0.442912 0.772691 0.848897 +v 0.439267 0.759043 0.858958 +v 0.43554 0.745261 0.868306 +v 0.431634 0.731413 0.877484 +v 0.427848 0.717467 0.886041 +v 0.423861 0.703447 0.8944 +v 0.420195 0.689554 0.902253 +v 0.416373 0.675628 0.909926 +v 0.412788 0.661878 0.917391 +v 0.409148 0.648127 0.924786 +v 0.406031 0.634515 0.931928 +v 0.402877 0.620891 0.939027 +v 0.400418 0.607447 0.945919 +v 0.397929 0.594009 0.952766 +v 0.395946 0.580979 0.959381 +v 0.393932 0.567943 0.965965 +v 0.392402 0.555089 0.972187 +v 0.390875 0.542234 0.978412 +v 0.389953 0.529742 0.984055 +v 0.388976 0.517208 0.989629 +v 0.388804 0.504502 0.994015 +v 0.388525 0.491762 0.998241 +v 0.385905 0.482093 1.00285 +v 0.383208 0.472385 1.00731 +v 0.381151 0.461725 1.01001 +v 0.37879 0.450851 1.01213 +v 0.375589 0.441039 1.01124 +v 0.457034 0.778243 0.830465 +v 0.453538 0.764996 0.840695 +v 0.449894 0.75169 0.850792 +v 0.446177 0.738199 0.860167 +v 0.442364 0.724698 0.869437 +v 0.438548 0.711137 0.877989 +v 0.434637 0.697536 0.886455 +v 0.430932 0.683977 0.894369 +v 0.427123 0.670392 0.902171 +v 0.423526 0.656942 0.909609 +v 0.419936 0.643493 0.917039 +v 0.41679 0.630138 0.924102 +v 0.413718 0.616814 0.931237 +v 0.411127 0.6037 0.938074 +v 0.408583 0.590628 0.944955 +v 0.406515 0.57792 0.951568 +v 0.404471 0.565219 0.958218 +v 0.4029 0.552713 0.964503 +v 0.401341 0.540206 0.970808 +v 0.400299 0.528103 0.976574 +v 0.399242 0.515976 0.982329 +v 0.398814 0.503769 0.987072 +v 0.398289 0.491533 0.991677 +v 0.396133 0.482005 0.996278 +v 0.393916 0.472435 1.0008 +v 0.391925 0.461846 1.00377 +v 0.389677 0.451077 1.00631 +v 0.386421 0.441335 1.00592 +v 0.467592 0.770151 0.822172 +v 0.464096 0.757243 0.832433 +v 0.4604 0.744277 0.842519 +v 0.456757 0.731115 0.851966 +v 0.452937 0.717934 0.861239 +v 0.449184 0.704783 0.869881 +v 0.445263 0.691575 0.878351 +v 0.441554 0.678354 0.886366 +v 0.437674 0.665085 0.894195 +v 0.434172 0.651965 0.901709 +v 0.430527 0.638789 0.909075 +v 0.427455 0.625725 0.916167 +v 0.424362 0.61266 0.92323 +v 0.421731 0.599913 0.930116 +v 0.419106 0.587197 0.936993 +v 0.417008 0.574823 0.943671 +v 0.414896 0.562448 0.950337 +v 0.413312 0.550296 0.956724 +v 0.411701 0.538128 0.963072 +v 0.410586 0.526439 0.969028 +v 0.409423 0.514703 0.974922 +v 0.408745 0.503004 0.980027 +v 0.407998 0.491271 0.985036 +v 0.40631 0.481887 0.989658 +v 0.404544 0.472452 0.994131 +v 0.402619 0.461994 0.997413 +v 0.400425 0.451329 1.00024 +v 0.397254 0.441694 1.00046 +v 0.477548 0.762031 0.814104 +v 0.474074 0.749421 0.824317 +v 0.470461 0.736763 0.834418 +v 0.46678 0.723954 0.843888 +v 0.462986 0.711124 0.853249 +v 0.459258 0.698264 0.861945 +v 0.455403 0.685377 0.870512 +v 0.45167 0.672451 0.878503 +v 0.447855 0.659527 0.886409 +v 0.444309 0.646745 0.893897 +v 0.440706 0.633949 0.901327 +v 0.437499 0.62118 0.908365 +v 0.434396 0.608451 0.915502 +v 0.431663 0.595957 0.922395 +v 0.429036 0.583514 0.929376 +v 0.426884 0.571471 0.93602 +v 0.424776 0.559441 0.942718 +v 0.423133 0.547573 0.949144 +v 0.42152 0.535727 0.955595 +v 0.420299 0.52427 0.961631 +v 0.419076 0.512765 0.967676 +v 0.418177 0.501412 0.973113 +v 0.417235 0.490045 0.978494 +v 0.415835 0.480821 0.983184 +v 0.414331 0.471553 0.987722 +v 0.412454 0.461373 0.991065 +v 0.410373 0.450983 0.994093 +v 0.407238 0.44132 0.994622 +v 0.487384 0.753859 0.805948 +v 0.483972 0.741561 0.816138 +v 0.48037 0.729206 0.826175 +v 0.476715 0.716755 0.835723 +v 0.472859 0.704249 0.845097 +v 0.469196 0.691697 0.853874 +v 0.465307 0.679101 0.862445 +v 0.46165 0.666518 0.870498 +v 0.457814 0.653906 0.878384 +v 0.454284 0.641491 0.885901 +v 0.450636 0.629063 0.893295 +v 0.447431 0.616642 0.900413 +v 0.44422 0.604202 0.907526 +v 0.441516 0.591988 0.914564 +v 0.438794 0.579782 0.921566 +v 0.436648 0.568079 0.928247 +v 0.434524 0.55638 0.934953 +v 0.43288 0.544829 0.941478 +v 0.431224 0.533276 0.947987 +v 0.429954 0.522069 0.95417 +v 0.42864 0.510782 0.96032 +v 0.42753 0.499779 0.966102 +v 0.426373 0.488758 0.971825 +v 0.425254 0.479719 0.976563 +v 0.424046 0.470615 0.981188 +v 0.422254 0.460727 0.984664 +v 0.42025 0.450642 0.98784 +v 0.417214 0.440971 0.988757 +v 0.496702 0.745654 0.79786 +v 0.493253 0.733656 0.808031 +v 0.489689 0.721629 0.818111 +v 0.486062 0.709488 0.827634 +v 0.482269 0.697288 0.837026 +v 0.478553 0.685035 0.845829 +v 0.474739 0.672768 0.854513 +v 0.471066 0.660471 0.862592 +v 0.467279 0.648218 0.870557 +v 0.463664 0.636057 0.878064 +v 0.460062 0.623907 0.885578 +v 0.456787 0.611852 0.892665 +v 0.453592 0.599788 0.899838 +v 0.450842 0.587843 0.906824 +v 0.448173 0.575935 0.91388 +v 0.445939 0.564406 0.920636 +v 0.443789 0.552913 0.927471 +v 0.442058 0.541641 0.934043 +v 0.44038 0.530392 0.940664 +v 0.439008 0.519398 0.946933 +v 0.437627 0.508317 0.953217 +v 0.436443 0.497511 0.959202 +v 0.435257 0.486746 0.965174 +v 0.434267 0.477861 0.969955 +v 0.433182 0.468936 0.974597 +v 0.431541 0.459221 0.978213 +v 0.429639 0.449305 0.981489 +v 0.426539 0.439836 0.982584 +v 0.505846 0.737377 0.789646 +v 0.502415 0.725698 0.79984 +v 0.498817 0.71399 0.809895 +v 0.495238 0.702162 0.819413 +v 0.491446 0.69026 0.82877 +v 0.487753 0.678345 0.837623 +v 0.483878 0.666361 0.846325 +v 0.480266 0.654408 0.85449 +v 0.47645 0.642448 0.862468 +v 0.472895 0.63057 0.870095 +v 0.469199 0.618654 0.877592 +v 0.466003 0.606984 0.884795 +v 0.462747 0.595301 0.891937 +v 0.460071 0.583644 0.899006 +v 0.457372 0.57198 0.90605 +v 0.455128 0.560673 0.912935 +v 0.452922 0.54939 0.919843 +v 0.451142 0.538411 0.926506 +v 0.44937 0.527435 0.933173 +v 0.447957 0.516661 0.939595 +v 0.446511 0.505812 0.946002 +v 0.445275 0.495246 0.952202 +v 0.444058 0.484688 0.958424 +v 0.443226 0.475977 0.963255 +v 0.442272 0.467234 0.967935 +v 0.440737 0.45768 0.971669 +v 0.438944 0.447996 0.97503 +v 0.435812 0.438675 0.976378 +v 0.517752 0.740294 0.77076 +v 0.514408 0.729072 0.781574 +v 0.51098 0.717674 0.791749 +v 0.50742 0.706256 0.801818 +v 0.503837 0.694702 0.811309 +v 0.500105 0.683103 0.820705 +v 0.49639 0.671488 0.82957 +v 0.492584 0.659852 0.838364 +v 0.488956 0.648197 0.846537 +v 0.485211 0.63653 0.854601 +v 0.481661 0.624942 0.86224 +v 0.478071 0.613337 0.869855 +v 0.474781 0.601907 0.877094 +v 0.471521 0.590481 0.884365 +v 0.468725 0.579134 0.891375 +v 0.466035 0.567808 0.898504 +v 0.463743 0.556734 0.905391 +v 0.461563 0.545716 0.91237 +v 0.459756 0.534905 0.919078 +v 0.457999 0.524118 0.925836 +v 0.456539 0.513531 0.932314 +v 0.455079 0.50291 0.938805 +v 0.453805 0.492511 0.945118 +v 0.452563 0.482094 0.951476 +v 0.45186 0.473531 0.956399 +v 0.451042 0.464921 0.961198 +v 0.449578 0.455526 0.965017 +v 0.447765 0.446036 0.96847 +v 0.444588 0.43683 0.970078 +v 0.529358 0.742582 0.751768 +v 0.526124 0.731666 0.762608 +v 0.522749 0.72071 0.773361 +v 0.519352 0.709589 0.783528 +v 0.515751 0.698442 0.793551 +v 0.512223 0.687176 0.803071 +v 0.508481 0.675855 0.812427 +v 0.504791 0.664577 0.821341 +v 0.500974 0.653284 0.830155 +v 0.497439 0.641944 0.838407 +v 0.493699 0.630548 0.846498 +v 0.490247 0.619245 0.854243 +v 0.486598 0.607913 0.861822 +v 0.483355 0.596757 0.869224 +v 0.480044 0.585585 0.876553 +v 0.477245 0.574573 0.883639 +v 0.474479 0.563558 0.890748 +v 0.472214 0.552735 0.897717 +v 0.469978 0.541942 0.904687 +v 0.468195 0.531319 0.911496 +v 0.466414 0.520701 0.918308 +v 0.465007 0.510355 0.924927 +v 0.463522 0.499939 0.931483 +v 0.462241 0.489707 0.937949 +v 0.460986 0.47945 0.94445 +v 0.460382 0.471018 0.949437 +v 0.459702 0.462556 0.954337 +v 0.458289 0.453373 0.958291 +v 0.45653 0.444076 0.961817 +v 0.45328 0.435007 0.96367 +v 0.537172 0.733666 0.743801 +v 0.533895 0.722997 0.754602 +v 0.530497 0.712303 0.76532 +v 0.52712 0.701436 0.77548 +v 0.523588 0.690562 0.785529 +v 0.520058 0.67957 0.795032 +v 0.516356 0.668533 0.804422 +v 0.51274 0.657544 0.81336 +v 0.508972 0.646556 0.822175 +v 0.505414 0.635503 0.830493 +v 0.501707 0.624411 0.8387 +v 0.498181 0.613374 0.846458 +v 0.494629 0.602349 0.854191 +v 0.491359 0.591478 0.861579 +v 0.488114 0.580622 0.868991 +v 0.485259 0.56985 0.876124 +v 0.482477 0.559103 0.883316 +v 0.480156 0.548495 0.890239 +v 0.477959 0.537935 0.897254 +v 0.476143 0.527495 0.904074 +v 0.474415 0.517084 0.910965 +v 0.472969 0.506899 0.917596 +v 0.471518 0.496689 0.924229 +v 0.470277 0.486572 0.930725 +v 0.469087 0.476451 0.937273 +v 0.46847 0.46825 0.942344 +v 0.467787 0.460012 0.947361 +v 0.466393 0.450961 0.95152 +v 0.464626 0.441747 0.955311 +v 0.461381 0.432767 0.957304 +v 0.544809 0.724695 0.73574 +v 0.541562 0.714296 0.746528 +v 0.538113 0.70385 0.757191 +v 0.534722 0.693246 0.767324 +v 0.531225 0.682633 0.77737 +v 0.527741 0.671921 0.786899 +v 0.524008 0.661145 0.796261 +v 0.520463 0.650461 0.80521 +v 0.51664 0.639734 0.813962 +v 0.513119 0.628996 0.822377 +v 0.509353 0.618202 0.830623 +v 0.505948 0.607471 0.838537 +v 0.502312 0.596707 0.846269 +v 0.499088 0.586143 0.853722 +v 0.495825 0.575568 0.86114 +v 0.49303 0.565061 0.86842 +v 0.4902 0.554545 0.875671 +v 0.487941 0.544194 0.882635 +v 0.485691 0.533839 0.889612 +v 0.48395 0.523618 0.89652 +v 0.482204 0.513383 0.903432 +v 0.480779 0.503382 0.910137 +v 0.479334 0.493361 0.916819 +v 0.4782 0.483381 0.923401 +v 0.477051 0.473381 0.929972 +v 0.476454 0.46543 0.935158 +v 0.475772 0.457427 0.940269 +v 0.474391 0.4485 0.944669 +v 0.4726 0.439392 0.948685 +v 0.469372 0.430538 0.9508 +v 0.555219 0.725723 0.716666 +v 0.551985 0.715782 0.727951 +v 0.548666 0.705587 0.73865 +v 0.545242 0.695367 0.749288 +v 0.541848 0.685015 0.759386 +v 0.538393 0.674662 0.769428 +v 0.534898 0.664231 0.778943 +v 0.531193 0.653764 0.788311 +v 0.527688 0.643331 0.797272 +v 0.523963 0.632864 0.806094 +v 0.520392 0.622405 0.814486 +v 0.516701 0.611923 0.822807 +v 0.513256 0.601495 0.830744 +v 0.50972 0.591067 0.838608 +v 0.506416 0.580702 0.846073 +v 0.503221 0.570346 0.853622 +v 0.500372 0.560093 0.860854 +v 0.497619 0.549876 0.868161 +v 0.49528 0.539744 0.875149 +v 0.493053 0.529625 0.882222 +v 0.491228 0.519615 0.889087 +v 0.489543 0.509645 0.896057 +v 0.488112 0.49976 0.902762 +v 0.486728 0.489883 0.909495 +v 0.485596 0.479982 0.916138 +v 0.48449 0.470097 0.922796 +v 0.483934 0.46228 0.928075 +v 0.483314 0.454425 0.933304 +v 0.481983 0.445738 0.937784 +v 0.480207 0.436856 0.941868 +v 0.476917 0.427984 0.944208 +v 0.562359 0.716574 0.708862 +v 0.559054 0.706844 0.720101 +v 0.555759 0.696878 0.730768 +v 0.552289 0.686865 0.741335 +v 0.54889 0.676763 0.751397 +v 0.545412 0.666656 0.761397 +v 0.541878 0.656515 0.770868 +v 0.538216 0.646353 0.780247 +v 0.534684 0.636157 0.789192 +v 0.531002 0.625938 0.798033 +v 0.527423 0.615757 0.806447 +v 0.523729 0.605555 0.814776 +v 0.520272 0.595436 0.822765 +v 0.51668 0.585334 0.830641 +v 0.513489 0.575196 0.838241 +v 0.510204 0.565037 0.84579 +v 0.507498 0.555073 0.853124 +v 0.504698 0.545109 0.860384 +v 0.502371 0.535217 0.86747 +v 0.500108 0.525316 0.87459 +v 0.498367 0.515561 0.881543 +v 0.496654 0.505829 0.888504 +v 0.495304 0.496096 0.895267 +v 0.493935 0.486321 0.902035 +v 0.492866 0.476537 0.908781 +v 0.491784 0.466751 0.915521 +v 0.49127 0.459065 0.92088 +v 0.490725 0.451377 0.926219 +v 0.489456 0.442946 0.930761 +v 0.487671 0.434332 0.934906 +v 0.484433 0.425423 0.937536 +v 0.56903 0.707548 0.701289 +v 0.565689 0.698001 0.712572 +v 0.562339 0.688214 0.723142 +v 0.558893 0.678409 0.733664 +v 0.555474 0.668511 0.743661 +v 0.551957 0.658631 0.753576 +v 0.548484 0.648721 0.76303 +v 0.544869 0.638781 0.772392 +v 0.541317 0.628855 0.781329 +v 0.537638 0.61889 0.790191 +v 0.534032 0.608948 0.798645 +v 0.530384 0.599016 0.807069 +v 0.526902 0.589196 0.815087 +v 0.523378 0.579373 0.823072 +v 0.520082 0.569503 0.830673 +v 0.516893 0.559662 0.838348 +v 0.514076 0.549954 0.845619 +v 0.511375 0.540276 0.852968 +v 0.508997 0.530624 0.860024 +v 0.50676 0.520993 0.867172 +v 0.504981 0.511448 0.874109 +v 0.503308 0.501931 0.881123 +v 0.501995 0.492395 0.887905 +v 0.500666 0.48284 0.894685 +v 0.499667 0.47317 0.901385 +v 0.49872 0.463503 0.908136 +v 0.498165 0.455935 0.913569 +v 0.497594 0.448377 0.918983 +v 0.496307 0.439997 0.923636 +v 0.494568 0.431468 0.927937 +v 0.491306 0.422629 0.930791 +v 0.575664 0.698512 0.693698 +v 0.572209 0.689133 0.704979 +v 0.568849 0.679543 0.715487 +v 0.565388 0.669931 0.725929 +v 0.561975 0.660244 0.735871 +v 0.558393 0.650576 0.745695 +v 0.55493 0.640893 0.755103 +v 0.551307 0.631168 0.764419 +v 0.547772 0.621518 0.773361 +v 0.544041 0.611827 0.782183 +v 0.540491 0.602147 0.790736 +v 0.536735 0.592466 0.799153 +v 0.533298 0.58291 0.80725 +v 0.52967 0.573336 0.815217 +v 0.526426 0.563765 0.822931 +v 0.523157 0.554217 0.830612 +v 0.520417 0.544774 0.83796 +v 0.517661 0.535341 0.845276 +v 0.515369 0.525973 0.852409 +v 0.513118 0.516605 0.85955 +v 0.511352 0.507259 0.86652 +v 0.509709 0.497938 0.873571 +v 0.5085 0.488627 0.880406 +v 0.50719 0.479287 0.887182 +v 0.506313 0.469736 0.893883 +v 0.505468 0.460201 0.900609 +v 0.504931 0.452772 0.906149 +v 0.504319 0.445326 0.911631 +v 0.503097 0.437035 0.916458 +v 0.501285 0.428613 0.920852 +v 0.498115 0.419859 0.923904 +v 0.581638 0.689592 0.686789 +v 0.578216 0.680319 0.697782 +v 0.574832 0.670883 0.708152 +v 0.571408 0.661448 0.7185 +v 0.568011 0.651946 0.728353 +v 0.564493 0.642477 0.73812 +v 0.561011 0.632986 0.74739 +v 0.557472 0.62348 0.756637 +v 0.553956 0.614004 0.76542 +v 0.5504 0.604501 0.774186 +v 0.546624 0.595259 0.78289 +v 0.542773 0.586008 0.791549 +v 0.539291 0.576695 0.799704 +v 0.535732 0.567363 0.807825 +v 0.532413 0.558045 0.815495 +v 0.52924 0.548755 0.823249 +v 0.526383 0.539525 0.830558 +v 0.52366 0.530326 0.837946 +v 0.521312 0.521168 0.845033 +v 0.519168 0.512026 0.852255 +v 0.517333 0.502912 0.859169 +v 0.515748 0.493829 0.86624 +v 0.514475 0.484781 0.872979 +v 0.513289 0.475759 0.879774 +v 0.512397 0.466738 0.886318 +v 0.511583 0.45774 0.892918 +v 0.511101 0.450062 0.89866 +v 0.510479 0.442314 0.904313 +v 0.509309 0.434013 0.909233 +v 0.507544 0.42558 0.913742 +v 0.504522 0.416778 0.916955 +v 0.587524 0.680641 0.679838 +v 0.584158 0.671489 0.69055 +v 0.580772 0.662221 0.700793 +v 0.577328 0.652951 0.711009 +v 0.573941 0.643627 0.72077 +v 0.57044 0.634338 0.73045 +v 0.567003 0.625044 0.73963 +v 0.563428 0.615731 0.748734 +v 0.559998 0.606431 0.757395 +v 0.556442 0.597121 0.765988 +v 0.552586 0.588299 0.774941 +v 0.548563 0.579451 0.783795 +v 0.545058 0.570395 0.792037 +v 0.541388 0.561326 0.800168 +v 0.538177 0.552273 0.807923 +v 0.534918 0.543229 0.815627 +v 0.532115 0.534227 0.823017 +v 0.529327 0.525249 0.830399 +v 0.527045 0.516309 0.83754 +v 0.524839 0.507362 0.84472 +v 0.523108 0.498479 0.851693 +v 0.5215 0.489633 0.858737 +v 0.520304 0.480873 0.865478 +v 0.519156 0.472131 0.872233 +v 0.518321 0.463661 0.878662 +v 0.51754 0.455209 0.885134 +v 0.517066 0.447265 0.891033 +v 0.516416 0.439225 0.896849 +v 0.515343 0.430978 0.901884 +v 0.513627 0.42253 0.906511 +v 0.510776 0.413703 0.909883 +v 0.592974 0.671843 0.672984 +v 0.589591 0.662891 0.683618 +v 0.586147 0.653813 0.693796 +v 0.582667 0.644733 0.703953 +v 0.579222 0.635605 0.713656 +v 0.575781 0.626503 0.723353 +v 0.572283 0.617407 0.732492 +v 0.568734 0.608316 0.741612 +v 0.565245 0.599244 0.750266 +v 0.5617 0.590163 0.758886 +v 0.557849 0.581502 0.767719 +v 0.553944 0.572828 0.776519 +v 0.550404 0.564035 0.784734 +v 0.546817 0.555237 0.792911 +v 0.543558 0.546392 0.800656 +v 0.540382 0.537576 0.80844 +v 0.537504 0.528798 0.815822 +v 0.534756 0.520036 0.823287 +v 0.532405 0.511338 0.830373 +v 0.530303 0.502629 0.837618 +v 0.528532 0.493993 0.844553 +v 0.52696 0.485388 0.851601 +v 0.525701 0.476864 0.858265 +v 0.524601 0.468378 0.865027 +v 0.523785 0.460119 0.871417 +v 0.523041 0.451893 0.877852 +v 0.522588 0.443916 0.883792 +v 0.521896 0.43586 0.889615 +v 0.520885 0.427667 0.89466 +v 0.519179 0.419292 0.899314 +v 0.51657 0.410584 0.902773 +v 0.598359 0.663038 0.666097 +v 0.594939 0.654268 0.676641 +v 0.591464 0.64539 0.686764 +v 0.587934 0.6365 0.696856 +v 0.584473 0.627577 0.706529 +v 0.580925 0.618659 0.716142 +v 0.577443 0.609771 0.725289 +v 0.573835 0.600873 0.734364 +v 0.570357 0.592036 0.743049 +v 0.566679 0.583165 0.751618 +v 0.562959 0.574684 0.760405 +v 0.559018 0.566167 0.76906 +v 0.5556 0.557658 0.777346 +v 0.551946 0.549117 0.785476 +v 0.548716 0.540493 0.793248 +v 0.545476 0.531871 0.801015 +v 0.542673 0.523315 0.808499 +v 0.539878 0.514754 0.815986 +v 0.537568 0.506291 0.823116 +v 0.535407 0.497828 0.830322 +v 0.533702 0.489427 0.837278 +v 0.53212 0.481066 0.844282 +v 0.530899 0.4728 0.850951 +v 0.529794 0.464543 0.857673 +v 0.529056 0.456526 0.864057 +v 0.528342 0.448485 0.870467 +v 0.527909 0.440487 0.876445 +v 0.527203 0.432419 0.88228 +v 0.526214 0.424323 0.887334 +v 0.524518 0.416042 0.891975 +v 0.522106 0.407458 0.895533 +v 0.603305 0.654294 0.659348 +v 0.599841 0.645703 0.669838 +v 0.596348 0.637018 0.679928 +v 0.592811 0.628332 0.689989 +v 0.589313 0.619634 0.69962 +v 0.585742 0.610935 0.709205 +v 0.582246 0.602252 0.718358 +v 0.578623 0.593545 0.727446 +v 0.57503 0.584864 0.736092 +v 0.571401 0.576197 0.744709 +v 0.567658 0.567905 0.753407 +v 0.563835 0.559612 0.762074 +v 0.560319 0.551275 0.770312 +v 0.556762 0.542951 0.778513 +v 0.553519 0.534567 0.786264 +v 0.550362 0.526202 0.794067 +v 0.547472 0.517855 0.801504 +v 0.544757 0.509532 0.809038 +v 0.542393 0.501243 0.816143 +v 0.540274 0.492973 0.823388 +v 0.538503 0.484786 0.830318 +v 0.536939 0.476651 0.837363 +v 0.53576 0.468592 0.844035 +v 0.534628 0.460517 0.850737 +v 0.533885 0.452613 0.85708 +v 0.533224 0.444714 0.863475 +v 0.532768 0.436823 0.86945 +v 0.532007 0.428873 0.875253 +v 0.531016 0.420898 0.880249 +v 0.529378 0.412727 0.884928 +v 0.527048 0.404269 0.888589 +v 0.608207 0.645548 0.652575 +v 0.604667 0.63713 0.663 +v 0.601186 0.62865 0.673062 +v 0.597646 0.620163 0.683093 +v 0.594058 0.611667 0.692663 +v 0.590423 0.603173 0.702203 +v 0.586865 0.594688 0.711341 +v 0.583167 0.586183 0.720408 +v 0.579569 0.57769 0.729069 +v 0.575897 0.5692 0.737687 +v 0.572197 0.561119 0.746352 +v 0.56841 0.553026 0.754956 +v 0.564881 0.544877 0.763201 +v 0.561285 0.536752 0.771391 +v 0.558099 0.52862 0.779162 +v 0.554951 0.520495 0.786951 +v 0.552109 0.512371 0.794422 +v 0.549391 0.504266 0.801951 +v 0.54705 0.496158 0.809088 +v 0.544909 0.488083 0.816316 +v 0.543129 0.480108 0.823272 +v 0.541561 0.472183 0.830327 +v 0.540373 0.464319 0.836989 +v 0.539271 0.45642 0.843701 +v 0.538557 0.448651 0.850028 +v 0.537903 0.440894 0.856383 +v 0.537369 0.433117 0.862308 +v 0.536609 0.425283 0.868122 +v 0.535624 0.417421 0.873081 +v 0.534012 0.409406 0.877727 +v 0.531824 0.401048 0.88153 +v 0.612686 0.63691 0.645968 +v 0.609129 0.62869 0.656352 +v 0.605551 0.620389 0.666361 +v 0.601962 0.612093 0.676362 +v 0.598341 0.603783 0.685922 +v 0.594681 0.595468 0.695468 +v 0.591075 0.58715 0.704591 +v 0.587409 0.578824 0.713685 +v 0.583799 0.570551 0.722335 +v 0.5802 0.562281 0.730999 +v 0.576474 0.554382 0.739637 +v 0.572699 0.546477 0.748242 +v 0.569186 0.538523 0.756496 +v 0.565622 0.530595 0.764704 +v 0.56241 0.522665 0.772454 +v 0.55933 0.51476 0.780266 +v 0.556453 0.506837 0.787738 +v 0.553771 0.498932 0.795303 +v 0.551409 0.491023 0.802461 +v 0.549265 0.483149 0.809714 +v 0.54745 0.475363 0.816651 +v 0.545906 0.467629 0.823724 +v 0.544633 0.459909 0.83035 +v 0.543559 0.452182 0.837081 +v 0.542834 0.444566 0.843387 +v 0.542158 0.43696 0.849715 +v 0.541586 0.429344 0.855623 +v 0.540737 0.421661 0.861375 +v 0.539745 0.413953 0.866333 +v 0.538194 0.406109 0.871039 +v 0.536071 0.397924 0.874819 +v 0.620725 0.636314 0.628998 +v 0.617134 0.628284 0.639335 +v 0.613518 0.620251 0.649661 +v 0.609901 0.612129 0.659647 +v 0.606224 0.604014 0.669603 +v 0.602548 0.595887 0.679154 +v 0.59884 0.587758 0.688688 +v 0.595243 0.579612 0.697823 +v 0.59152 0.571462 0.706882 +v 0.587909 0.563399 0.715535 +v 0.584218 0.55533 0.724147 +v 0.58058 0.547626 0.732821 +v 0.576779 0.539904 0.741409 +v 0.573294 0.532154 0.749682 +v 0.569748 0.524417 0.757913 +v 0.566571 0.516701 0.765669 +v 0.563494 0.508997 0.773466 +v 0.560645 0.501283 0.780976 +v 0.557952 0.493581 0.788558 +v 0.55561 0.485868 0.795751 +v 0.553454 0.478183 0.803025 +v 0.551688 0.470602 0.809992 +v 0.550046 0.463026 0.817018 +v 0.548794 0.455449 0.823667 +v 0.547667 0.447886 0.830374 +v 0.546923 0.440429 0.836657 +v 0.54621 0.432976 0.842957 +v 0.54561 0.425523 0.848821 +v 0.544664 0.417998 0.854532 +v 0.543663 0.410458 0.859496 +v 0.541998 0.402795 0.864161 +v 0.539993 0.394829 0.867946 +v 0.624785 0.62756 0.622548 +v 0.621125 0.619718 0.632849 +v 0.617459 0.611872 0.643147 +v 0.613786 0.603932 0.653098 +v 0.610092 0.596005 0.663039 +v 0.606383 0.588059 0.672588 +v 0.60265 0.580105 0.682124 +v 0.599023 0.572158 0.691295 +v 0.595274 0.56422 0.700405 +v 0.591655 0.556347 0.709099 +v 0.587975 0.548463 0.717754 +v 0.584306 0.540926 0.72637 +v 0.580586 0.533378 0.734959 +v 0.577113 0.525809 0.743221 +v 0.57364 0.518242 0.751467 +v 0.570487 0.510721 0.759265 +v 0.567405 0.503202 0.767095 +v 0.564537 0.495673 0.774587 +v 0.561953 0.488157 0.782231 +v 0.55954 0.480638 0.78942 +v 0.557413 0.473157 0.796743 +v 0.55556 0.465738 0.803682 +v 0.553952 0.458326 0.810741 +v 0.552643 0.450944 0.817398 +v 0.551476 0.443573 0.824115 +v 0.550674 0.436268 0.830381 +v 0.54993 0.428993 0.836665 +v 0.549259 0.421681 0.842479 +v 0.548239 0.414347 0.848135 +v 0.54729 0.406977 0.85315 +v 0.545466 0.399491 0.857771 +v 0.543501 0.391764 0.861569 +v 0.628784 0.618806 0.616068 +v 0.625095 0.611142 0.626354 +v 0.621369 0.60348 0.636617 +v 0.617629 0.595731 0.646532 +v 0.613875 0.587985 0.656437 +v 0.610202 0.580213 0.666013 +v 0.606429 0.57244 0.67554 +v 0.602696 0.564686 0.684715 +v 0.598916 0.556968 0.693859 +v 0.595273 0.549275 0.702595 +v 0.591607 0.541594 0.711296 +v 0.587957 0.534224 0.719878 +v 0.584268 0.526854 0.728438 +v 0.580835 0.51945 0.736694 +v 0.57738 0.512057 0.744938 +v 0.574216 0.504718 0.752767 +v 0.571138 0.497387 0.760619 +v 0.568313 0.49005 0.768137 +v 0.565668 0.4827 0.775755 +v 0.563281 0.475379 0.783001 +v 0.561148 0.468079 0.790354 +v 0.559319 0.460819 0.797329 +v 0.55766 0.453587 0.804381 +v 0.556338 0.446399 0.811065 +v 0.555097 0.439214 0.817775 +v 0.554251 0.432072 0.824025 +v 0.553445 0.424945 0.830285 +v 0.552742 0.417825 0.83606 +v 0.551695 0.410642 0.841686 +v 0.550729 0.403473 0.846687 +v 0.54875 0.396157 0.851257 +v 0.546824 0.388708 0.855113 +v 0.635972 0.617547 0.599142 +v 0.632341 0.61014 0.609739 +v 0.628615 0.602646 0.620013 +v 0.624898 0.595162 0.630287 +v 0.621114 0.587587 0.640198 +v 0.617328 0.580018 0.650103 +v 0.613561 0.572412 0.659654 +v 0.609791 0.564831 0.669204 +v 0.606044 0.557268 0.678409 +v 0.602251 0.54975 0.687576 +v 0.598597 0.542259 0.696361 +v 0.594933 0.534778 0.705121 +v 0.591304 0.527536 0.713702 +v 0.58771 0.520308 0.722297 +v 0.584283 0.51308 0.730554 +v 0.580865 0.505857 0.738809 +v 0.577734 0.498694 0.746687 +v 0.574661 0.491547 0.754567 +v 0.57183 0.484386 0.762103 +v 0.569214 0.477217 0.769747 +v 0.566835 0.470084 0.777029 +v 0.56468 0.46296 0.784405 +v 0.562831 0.455864 0.791386 +v 0.561163 0.448792 0.798449 +v 0.559776 0.441772 0.805143 +v 0.558522 0.434772 0.811879 +v 0.557551 0.42782 0.81813 +v 0.55663 0.420873 0.824416 +v 0.555832 0.413934 0.830144 +v 0.554813 0.406948 0.835793 +v 0.553656 0.399917 0.840735 +v 0.551679 0.392854 0.845334 +v 0.549816 0.385632 0.84926 +v 0.643243 0.615949 0.58229 +v 0.639551 0.608707 0.59284 +v 0.635856 0.60147 0.60339 +v 0.632111 0.594156 0.61366 +v 0.628372 0.586842 0.623934 +v 0.624526 0.579449 0.633832 +v 0.620684 0.57205 0.643726 +v 0.616895 0.564632 0.653289 +v 0.613079 0.557216 0.66284 +v 0.609328 0.549863 0.672064 +v 0.605476 0.542524 0.68124 +v 0.601813 0.535237 0.690069 +v 0.598108 0.527949 0.698871 +v 0.594592 0.520847 0.707494 +v 0.59099 0.513754 0.716083 +v 0.587607 0.506703 0.724347 +v 0.584206 0.499648 0.732599 +v 0.581073 0.492666 0.740505 +v 0.578053 0.485701 0.748443 +v 0.5753 0.478712 0.756039 +v 0.572602 0.471694 0.763665 +v 0.570248 0.464741 0.770999 +v 0.568041 0.457811 0.778388 +v 0.566187 0.450883 0.785378 +v 0.564476 0.443962 0.792435 +v 0.563072 0.437119 0.799158 +v 0.561769 0.430303 0.805905 +v 0.560706 0.423528 0.812194 +v 0.559711 0.416772 0.818507 +v 0.55883 0.410016 0.824215 +v 0.557683 0.403225 0.829821 +v 0.556489 0.39641 0.834748 +v 0.554521 0.389534 0.839359 +v 0.552637 0.382546 0.843327 +v 0.64642 0.607051 0.576143 +v 0.642668 0.599969 0.58666 +v 0.638951 0.592891 0.597187 +v 0.635195 0.585742 0.607463 +v 0.631408 0.578592 0.617726 +v 0.627531 0.571375 0.627627 +v 0.623649 0.564159 0.637527 +v 0.619834 0.556917 0.647112 +v 0.616009 0.549677 0.656694 +v 0.612238 0.542519 0.665942 +v 0.608454 0.535362 0.675184 +v 0.60475 0.528246 0.684055 +v 0.601092 0.52114 0.69294 +v 0.597593 0.514171 0.70157 +v 0.594084 0.507229 0.710189 +v 0.590716 0.500332 0.718452 +v 0.587386 0.493441 0.72673 +v 0.584256 0.486619 0.734677 +v 0.581269 0.479802 0.742683 +v 0.578477 0.472965 0.750314 +v 0.575815 0.466132 0.757999 +v 0.57343 0.459365 0.765347 +v 0.571249 0.45263 0.77278 +v 0.569324 0.445886 0.779799 +v 0.567644 0.439172 0.786926 +v 0.566129 0.432481 0.793646 +v 0.564759 0.425826 0.800427 +v 0.563627 0.419226 0.806726 +v 0.562531 0.412618 0.813042 +v 0.561588 0.406056 0.818794 +v 0.5603 0.399421 0.824439 +v 0.559 0.392766 0.829359 +v 0.557003 0.386069 0.834022 +v 0.555096 0.379368 0.837995 +v 0.649567 0.598154 0.569982 +v 0.645802 0.591231 0.580482 +v 0.642031 0.584313 0.590973 +v 0.638246 0.577326 0.601251 +v 0.634438 0.570343 0.611514 +v 0.630531 0.563305 0.621423 +v 0.626628 0.556269 0.631332 +v 0.622747 0.549206 0.640925 +v 0.618888 0.542162 0.650524 +v 0.615113 0.535166 0.659812 +v 0.611303 0.528188 0.669082 +v 0.607628 0.521248 0.678027 +v 0.603985 0.514321 0.686982 +v 0.600505 0.507488 0.695608 +v 0.597066 0.500678 0.704244 +v 0.593793 0.49394 0.712542 +v 0.59047 0.487206 0.720817 +v 0.587342 0.480539 0.728801 +v 0.584337 0.473861 0.736846 +v 0.581587 0.467227 0.744542 +v 0.578898 0.460587 0.752254 +v 0.576515 0.454006 0.759642 +v 0.574316 0.447453 0.767096 +v 0.57233 0.440883 0.774159 +v 0.570583 0.434349 0.781309 +v 0.56905 0.427831 0.788083 +v 0.567648 0.421337 0.794902 +v 0.566427 0.414887 0.801217 +v 0.565174 0.40844 0.807515 +v 0.564146 0.402041 0.813317 +v 0.562689 0.39559 0.818976 +v 0.561279 0.389116 0.823888 +v 0.559284 0.382625 0.828587 +v 0.557382 0.376225 0.832566 +v 0.655926 0.596007 0.553136 +v 0.652304 0.589352 0.563944 +v 0.648514 0.582589 0.57443 +v 0.644712 0.575832 0.584906 +v 0.640894 0.56901 0.595182 +v 0.637069 0.562186 0.605451 +v 0.633156 0.555318 0.615383 +v 0.629225 0.548449 0.625307 +v 0.625361 0.541561 0.63495 +v 0.621499 0.534699 0.644588 +v 0.617713 0.527867 0.653925 +v 0.613937 0.521055 0.663265 +v 0.610291 0.514303 0.672282 +v 0.606684 0.507556 0.681314 +v 0.603198 0.50089 0.690015 +v 0.59978 0.494231 0.698734 +v 0.596492 0.487674 0.707121 +v 0.593206 0.48111 0.71551 +v 0.590118 0.474594 0.723592 +v 0.587142 0.468075 0.731726 +v 0.584282 0.46163 0.739489 +v 0.581632 0.455165 0.747328 +v 0.579209 0.448759 0.754818 +v 0.576998 0.442378 0.762388 +v 0.574897 0.436012 0.769536 +v 0.573079 0.429693 0.776781 +v 0.571502 0.4234 0.78362 +v 0.570033 0.417105 0.790503 +v 0.568629 0.410824 0.796856 +v 0.567315 0.404576 0.803234 +v 0.566155 0.398383 0.809044 +v 0.564631 0.392138 0.814731 +v 0.563097 0.385923 0.819645 +v 0.561044 0.37971 0.82438 +v 0.55916 0.373565 0.828351 +v 0.658683 0.587057 0.547115 +v 0.655015 0.580554 0.557892 +v 0.651183 0.573953 0.568358 +v 0.647369 0.567355 0.578829 +v 0.643508 0.560697 0.589095 +v 0.639663 0.554036 0.599373 +v 0.635734 0.547331 0.609323 +v 0.631773 0.54063 0.619264 +v 0.627935 0.533922 0.628954 +v 0.624038 0.52723 0.63862 +v 0.62028 0.520573 0.648028 +v 0.616532 0.513927 0.65744 +v 0.612908 0.507358 0.666523 +v 0.609302 0.500785 0.67561 +v 0.605835 0.494285 0.684391 +v 0.602397 0.487784 0.693185 +v 0.599102 0.481385 0.701667 +v 0.595852 0.474995 0.710161 +v 0.59279 0.468645 0.71833 +v 0.589747 0.462282 0.726514 +v 0.586951 0.456024 0.73442 +v 0.584269 0.449738 0.742361 +v 0.581794 0.443502 0.749952 +v 0.579471 0.437282 0.757598 +v 0.577382 0.431136 0.764869 +v 0.575472 0.425024 0.772203 +v 0.573802 0.418942 0.779102 +v 0.572226 0.412844 0.786045 +v 0.570744 0.406768 0.792457 +v 0.56934 0.400715 0.798902 +v 0.568065 0.394705 0.804722 +v 0.56645 0.388689 0.810422 +v 0.564901 0.382747 0.815395 +v 0.562768 0.376768 0.820152 +v 0.560869 0.370895 0.824104 +v 0.66104 0.578234 0.541214 +v 0.657342 0.571874 0.551966 +v 0.653523 0.565421 0.562438 +v 0.64969 0.558965 0.572908 +v 0.645818 0.552471 0.583189 +v 0.64192 0.545975 0.593467 +v 0.638004 0.539433 0.603458 +v 0.634087 0.532893 0.613444 +v 0.630259 0.526375 0.623187 +v 0.626391 0.519855 0.632913 +v 0.622652 0.513361 0.64238 +v 0.618888 0.50687 0.651839 +v 0.615298 0.500457 0.661018 +v 0.611753 0.494045 0.67021 +v 0.608274 0.487702 0.679073 +v 0.604824 0.481341 0.687946 +v 0.601589 0.475095 0.696547 +v 0.598385 0.468859 0.705148 +v 0.595279 0.462685 0.713417 +v 0.59224 0.456494 0.721727 +v 0.589396 0.45041 0.729732 +v 0.586684 0.444318 0.737779 +v 0.584205 0.438265 0.745505 +v 0.581799 0.432218 0.753256 +v 0.579636 0.426248 0.760621 +v 0.577684 0.420322 0.768059 +v 0.575944 0.414443 0.77508 +v 0.574224 0.408527 0.78212 +v 0.572696 0.402668 0.78865 +v 0.571167 0.396814 0.795177 +v 0.56972 0.391042 0.80104 +v 0.568023 0.38525 0.806813 +v 0.566381 0.379537 0.811838 +v 0.564179 0.373829 0.816653 +v 0.562248 0.368223 0.820637 +v 0.663404 0.56941 0.535312 +v 0.659681 0.563193 0.546042 +v 0.655838 0.556887 0.55651 +v 0.652015 0.550579 0.566985 +v 0.648097 0.544244 0.577275 +v 0.644166 0.537913 0.587555 +v 0.640253 0.531538 0.597574 +v 0.636372 0.525167 0.607611 +v 0.632517 0.518816 0.617395 +v 0.628717 0.512474 0.627191 +v 0.624971 0.506148 0.636713 +v 0.621238 0.499823 0.646237 +v 0.617653 0.493559 0.655498 +v 0.614084 0.487287 0.664759 +v 0.610678 0.481096 0.673738 +v 0.60725 0.474901 0.682707 +v 0.604014 0.4688 0.691391 +v 0.600768 0.462717 0.700069 +v 0.597698 0.456718 0.708482 +v 0.594632 0.450711 0.716894 +v 0.591809 0.444795 0.72503 +v 0.589059 0.438892 0.73318 +v 0.586524 0.433015 0.74102 +v 0.584059 0.427146 0.748883 +v 0.581847 0.421362 0.756355 +v 0.579826 0.415611 0.763891 +v 0.57801 0.409928 0.771033 +v 0.576151 0.40421 0.778168 +v 0.574563 0.398549 0.784806 +v 0.572867 0.392896 0.791408 +v 0.57131 0.38736 0.797341 +v 0.569485 0.381777 0.803176 +v 0.567748 0.376326 0.808231 +v 0.565534 0.370895 0.813125 +v 0.563579 0.365545 0.817155 +v 0.665484 0.560694 0.529552 +v 0.661728 0.554619 0.540269 +v 0.657859 0.548457 0.550735 +v 0.654014 0.542297 0.561209 +v 0.650072 0.536114 0.571517 +v 0.646177 0.529938 0.58184 +v 0.642259 0.523717 0.591899 +v 0.638382 0.517509 0.601982 +v 0.634578 0.51131 0.611827 +v 0.630804 0.50512 0.621683 +v 0.627099 0.498956 0.631281 +v 0.623406 0.492783 0.640884 +v 0.619812 0.486658 0.650217 +v 0.61629 0.480541 0.65957 +v 0.612878 0.474497 0.668646 +v 0.609503 0.46846 0.677733 +v 0.606231 0.462514 0.686518 +v 0.603032 0.456594 0.695321 +v 0.59996 0.450743 0.703857 +v 0.596913 0.444898 0.712394 +v 0.59402 0.43915 0.720632 +v 0.591258 0.433408 0.728914 +v 0.58868 0.427706 0.73687 +v 0.586187 0.422009 0.744854 +v 0.583903 0.416399 0.752485 +v 0.581795 0.410813 0.760178 +v 0.579857 0.405307 0.767456 +v 0.577915 0.399767 0.77474 +v 0.576188 0.394324 0.78153 +v 0.574343 0.38888 0.78828 +v 0.572691 0.383555 0.794363 +v 0.570646 0.37823 0.800303 +v 0.568825 0.37315 0.805398 +v 0.56658 0.368095 0.810338 +v 0.564611 0.363113 0.814305 +v 0.667551 0.551978 0.523789 +v 0.663764 0.546043 0.534493 +v 0.659871 0.540034 0.544956 +v 0.655994 0.534024 0.555421 +v 0.652066 0.527989 0.565765 +v 0.648168 0.521958 0.576116 +v 0.644268 0.515904 0.586229 +v 0.640391 0.509851 0.596353 +v 0.636595 0.503799 0.606248 +v 0.632853 0.497766 0.616158 +v 0.62921 0.491751 0.625845 +v 0.625554 0.485735 0.635526 +v 0.621985 0.479756 0.644943 +v 0.618469 0.473794 0.654371 +v 0.61507 0.467907 0.663549 +v 0.611708 0.46202 0.672743 +v 0.608421 0.456231 0.681634 +v 0.605235 0.45046 0.69056 +v 0.602184 0.444774 0.699212 +v 0.599109 0.439089 0.707854 +v 0.596166 0.433499 0.716206 +v 0.593388 0.427923 0.724615 +v 0.590751 0.422389 0.73269 +v 0.588216 0.416864 0.740794 +v 0.585857 0.411422 0.748587 +v 0.583669 0.406003 0.756431 +v 0.581643 0.400678 0.763864 +v 0.57961 0.395326 0.771288 +v 0.57774 0.390092 0.778232 +v 0.575757 0.384825 0.785134 +v 0.573991 0.379764 0.791362 +v 0.571756 0.374682 0.797417 +v 0.569856 0.369979 0.802548 +v 0.567571 0.365299 0.807529 +v 0.56561 0.360685 0.811458 +v 0.669317 0.543368 0.518124 +v 0.665529 0.537569 0.528843 +v 0.661629 0.531706 0.539322 +v 0.657749 0.525842 0.549798 +v 0.653817 0.519944 0.560174 +v 0.649931 0.514056 0.570565 +v 0.646049 0.508153 0.580727 +v 0.642158 0.502244 0.59089 +v 0.638449 0.49633 0.600881 +v 0.634745 0.490435 0.610867 +v 0.631145 0.484563 0.620637 +v 0.627561 0.478694 0.630407 +v 0.624041 0.472873 0.639931 +v 0.620532 0.467059 0.649453 +v 0.617142 0.461324 0.65873 +v 0.6138 0.455588 0.668026 +v 0.610511 0.449941 0.677033 +v 0.607311 0.444307 0.686083 +v 0.604251 0.438772 0.694862 +v 0.601137 0.433238 0.703619 +v 0.598233 0.427813 0.712122 +v 0.595409 0.422389 0.720655 +v 0.592664 0.41702 0.728866 +v 0.590103 0.411657 0.737137 +v 0.587688 0.406387 0.745094 +v 0.585415 0.401128 0.753093 +v 0.583278 0.395974 0.760714 +v 0.581148 0.390802 0.768326 +v 0.579156 0.385708 0.775498 +v 0.576996 0.380623 0.782614 +v 0.575037 0.37575 0.789063 +v 0.572566 0.370879 0.795347 +v 0.570553 0.366496 0.800687 +v 0.568084 0.362137 0.805856 +v 0.566221 0.358473 0.809547 +v -0.550012 -0.461845 0.689991 +v -0.551181 -0.462189 0.687325 +v -0.552896 -0.463653 0.682735 +v -0.554571 -0.465112 0.678125 +v -0.556748 -0.466677 0.672399 +v -0.558768 -0.468185 0.666614 +v -0.561171 -0.469893 0.659939 +v -0.563419 -0.471543 0.653188 +v -0.565908 -0.473313 0.645734 +v -0.568258 -0.475038 0.638237 +v -0.570776 -0.476816 0.63047 +v -0.572959 -0.478483 0.62259 +v -0.575298 -0.480112 0.614534 +v -0.577526 -0.481708 0.60644 +v -0.579712 -0.483261 0.598251 +v -0.581716 -0.48475 0.589992 +v -0.583759 -0.486177 0.581699 +v -0.585577 -0.487514 0.57334 +v -0.587506 -0.488821 0.564932 +v -0.589056 -0.489967 0.556415 +v -0.59079 -0.491092 0.547862 +v -0.592173 -0.492083 0.539219 +v -0.593663 -0.493027 0.530544 +v -0.594888 -0.493876 0.521817 +v -0.596146 -0.494627 0.513038 +v -0.597199 -0.495313 0.504221 +v -0.598328 -0.495867 0.495377 +v -0.599154 -0.496293 0.486474 +v -0.600164 -0.496688 0.477565 +v -0.600865 -0.496959 0.468618 +v -0.601636 -0.497106 0.459648 +v -0.602108 -0.497139 0.450654 +v -0.602743 -0.497097 0.441673 +v -0.603173 -0.496953 0.432689 +v -0.603537 -0.496617 0.423704 +v -0.603768 -0.496223 0.414717 +v -0.603986 -0.495634 0.405739 +v -0.604054 -0.494967 0.396765 +v -0.604172 -0.494094 0.387899 +v -0.604173 -0.493179 0.379021 +v -0.604181 -0.492055 0.37019 +v -0.604091 -0.490892 0.361353 +v -0.604044 -0.489539 0.352585 +v -0.548531 -0.463169 0.692161 +v -0.549421 -0.465166 0.689009 +v -0.55087 -0.46691 0.68452 +v -0.552393 -0.468686 0.680061 +v -0.554432 -0.470569 0.674385 +v -0.556385 -0.472419 0.668681 +v -0.558607 -0.474325 0.662134 +v -0.56074 -0.476187 0.655547 +v -0.563151 -0.478126 0.648164 +v -0.565449 -0.480035 0.640732 +v -0.567832 -0.481957 0.633041 +v -0.57002 -0.483806 0.625267 +v -0.572344 -0.485636 0.617312 +v -0.57443 -0.487365 0.609269 +v -0.57656 -0.489072 0.601104 +v -0.57855 -0.490718 0.592897 +v -0.580593 -0.492293 0.584675 +v -0.58235 -0.493752 0.576365 +v -0.584212 -0.495171 0.567976 +v -0.585807 -0.49647 0.559512 +v -0.587486 -0.497711 0.550979 +v -0.588845 -0.498833 0.542364 +v -0.590282 -0.499881 0.533705 +v -0.591497 -0.500841 0.524997 +v -0.592793 -0.501718 0.516244 +v -0.593745 -0.502468 0.507424 +v -0.594813 -0.503143 0.498573 +v -0.595716 -0.503692 0.489617 +v -0.596682 -0.504135 0.480644 +v -0.597361 -0.504475 0.471661 +v -0.598133 -0.504715 0.462688 +v -0.598616 -0.504842 0.453688 +v -0.599201 -0.504861 0.444699 +v -0.599563 -0.50478 0.435697 +v -0.599968 -0.504533 0.426723 +v -0.60021 -0.504215 0.417737 +v -0.600381 -0.503676 0.408794 +v -0.60051 -0.503101 0.399853 +v -0.600633 -0.502299 0.390956 +v -0.600637 -0.501437 0.382071 +v -0.600677 -0.500392 0.373217 +v -0.600559 -0.499289 0.364356 +v -0.546403 -0.465866 0.694645 +v -0.547201 -0.468163 0.691339 +v -0.548618 -0.470299 0.686767 +v -0.550007 -0.472421 0.682172 +v -0.551991 -0.474534 0.676573 +v -0.553839 -0.476604 0.670903 +v -0.556014 -0.478752 0.664449 +v -0.558021 -0.480846 0.657918 +v -0.560355 -0.482985 0.650709 +v -0.562501 -0.485058 0.643421 +v -0.5648 -0.487156 0.635816 +v -0.566916 -0.489182 0.628145 +v -0.569217 -0.491185 0.62026 +v -0.571281 -0.493079 0.612241 +v -0.573414 -0.494947 0.604152 +v -0.575325 -0.496734 0.596001 +v -0.577321 -0.498449 0.587809 +v -0.579066 -0.500064 0.579531 +v -0.580883 -0.501602 0.571185 +v -0.582417 -0.503026 0.562761 +v -0.584095 -0.504416 0.554249 +v -0.585425 -0.505666 0.545651 +v -0.586878 -0.506839 0.537019 +v -0.588097 -0.507908 0.528327 +v -0.589311 -0.508869 0.519562 +v -0.590289 -0.509734 0.510753 +v -0.591335 -0.510505 0.501906 +v -0.592173 -0.511168 0.493003 +v -0.593087 -0.5117 0.484018 +v -0.593761 -0.512119 0.475003 +v -0.594531 -0.512422 0.465975 +v -0.595015 -0.512639 0.45696 +v -0.595524 -0.512714 0.447945 +v -0.595888 -0.512718 0.438926 +v -0.596308 -0.512573 0.429921 +v -0.596526 -0.512328 0.420916 +v -0.596717 -0.511869 0.41194 +v -0.596849 -0.511364 0.402967 +v -0.596975 -0.51063 0.394043 +v -0.596971 -0.509841 0.385123 +v -0.596992 -0.508841 0.376261 +v -0.54422 -0.468543 0.6971 +v -0.545064 -0.471185 0.693691 +v -0.546404 -0.473691 0.689018 +v -0.547744 -0.476196 0.684342 +v -0.549519 -0.478491 0.67874 +v -0.55133 -0.480814 0.673157 +v -0.553364 -0.483185 0.666731 +v -0.555335 -0.485536 0.660281 +v -0.557507 -0.487828 0.653214 +v -0.559527 -0.490074 0.64609 +v -0.561692 -0.492331 0.638557 +v -0.563774 -0.494552 0.631 +v -0.566046 -0.496713 0.623156 +v -0.568076 -0.498765 0.615222 +v -0.570106 -0.500769 0.607174 +v -0.572011 -0.502717 0.599082 +v -0.573935 -0.504561 0.5909 +v -0.575622 -0.506311 0.582646 +v -0.577362 -0.507957 0.574336 +v -0.578964 -0.509546 0.565989 +v -0.580561 -0.511063 0.55749 +v -0.581879 -0.512439 0.548913 +v -0.583323 -0.513721 0.540302 +v -0.584449 -0.514872 0.531615 +v -0.585707 -0.515969 0.522866 +v -0.586618 -0.516915 0.514049 +v -0.587619 -0.51777 0.505202 +v -0.588454 -0.51855 0.49633 +v -0.58933 -0.519189 0.487372 +v -0.589944 -0.519684 0.478346 +v -0.590678 -0.520075 0.46931 +v -0.591171 -0.520353 0.46025 +v -0.591722 -0.520506 0.451187 +v -0.592068 -0.520591 0.442145 +v -0.592486 -0.520535 0.43312 +v -0.592681 -0.520364 0.424093 +v -0.592901 -0.519993 0.415086 +v -0.592972 -0.519533 0.40608 +v -0.59312 -0.518883 0.397129 +v -0.593123 -0.518163 0.388177 +v -0.541723 -0.471669 0.699916 +v -0.542529 -0.474523 0.696266 +v -0.543875 -0.47729 0.691608 +v -0.545112 -0.480035 0.686914 +v -0.546851 -0.482616 0.681315 +v -0.548522 -0.485157 0.675672 +v -0.550512 -0.487743 0.669338 +v -0.552367 -0.490276 0.662948 +v -0.554448 -0.492769 0.655988 +v -0.556385 -0.495203 0.64897 +v -0.558501 -0.497611 0.641479 +v -0.56052 -0.499988 0.633952 +v -0.562721 -0.502319 0.626251 +v -0.564702 -0.504565 0.618447 +v -0.566715 -0.506704 0.610446 +v -0.568524 -0.508758 0.602373 +v -0.570423 -0.51073 0.594216 +v -0.572115 -0.512618 0.585977 +v -0.573836 -0.514401 0.577691 +v -0.5754 -0.51612 0.569353 +v -0.576997 -0.517756 0.560895 +v -0.578284 -0.519251 0.552364 +v -0.579687 -0.520665 0.54375 +v -0.58077 -0.52194 0.535059 +v -0.581976 -0.523129 0.526311 +v -0.582903 -0.524197 0.517502 +v -0.583882 -0.525157 0.508643 +v -0.584692 -0.526047 0.499751 +v -0.585511 -0.526787 0.490824 +v -0.586118 -0.527389 0.481807 +v -0.586804 -0.527852 0.472749 +v -0.587288 -0.528221 0.463664 +v -0.587796 -0.528443 0.454564 +v -0.588127 -0.528575 0.445455 +v -0.588507 -0.528572 0.43638 +v -0.588709 -0.528498 0.427314 +v -0.588935 -0.528215 0.41828 +v -0.58895 -0.527809 0.40925 +v -0.589131 -0.527248 0.400256 +v -0.589098 -0.52658 0.391263 +v -0.539193 -0.474782 0.702722 +v -0.54009 -0.477898 0.698875 +v -0.541262 -0.480878 0.694165 +v -0.542502 -0.483889 0.689484 +v -0.544121 -0.486704 0.683848 +v -0.545762 -0.489535 0.678212 +v -0.54755 -0.492261 0.671897 +v -0.549385 -0.495014 0.665608 +v -0.551323 -0.497671 0.65872 +v -0.553193 -0.500311 0.651804 +v -0.555228 -0.50287 0.644368 +v -0.557216 -0.505402 0.636901 +v -0.559234 -0.507865 0.629263 +v -0.561181 -0.510292 0.621599 +v -0.563168 -0.512581 0.613665 +v -0.564937 -0.514752 0.605632 +v -0.566786 -0.516852 0.59751 +v -0.568428 -0.518853 0.589322 +v -0.570123 -0.520773 0.581019 +v -0.571652 -0.52262 0.572675 +v -0.573214 -0.524363 0.564255 +v -0.574479 -0.52597 0.55576 +v -0.575829 -0.527514 0.547146 +v -0.576906 -0.528923 0.538465 +v -0.578082 -0.530222 0.529721 +v -0.578989 -0.531393 0.52092 +v -0.5799 -0.532442 0.512033 +v -0.580687 -0.533426 0.503131 +v -0.581493 -0.534272 0.494203 +v -0.582025 -0.534964 0.485241 +v -0.582719 -0.535527 0.476163 +v -0.58314 -0.535961 0.467057 +v -0.583665 -0.536283 0.457944 +v -0.584001 -0.536507 0.44882 +v -0.584331 -0.536553 0.439686 +v -0.584516 -0.53652 0.43055 +v -0.584717 -0.536329 0.421468 +v -0.584779 -0.536016 0.412416 +v -0.584904 -0.535499 0.403378 +v -0.584873 -0.534899 0.394345 +v -0.536366 -0.47824 0.705911 +v -0.537176 -0.481514 0.701947 +v -0.538421 -0.484699 0.697162 +v -0.539602 -0.487854 0.692326 +v -0.54117 -0.490927 0.686777 +v -0.542584 -0.493937 0.681157 +v -0.544413 -0.496895 0.674859 +v -0.546131 -0.499807 0.668513 +v -0.548071 -0.502636 0.661719 +v -0.549862 -0.505412 0.654855 +v -0.551836 -0.508146 0.647541 +v -0.553712 -0.510838 0.640167 +v -0.555719 -0.513451 0.63257 +v -0.55756 -0.515981 0.624904 +v -0.559518 -0.518453 0.617073 +v -0.561231 -0.520807 0.609143 +v -0.563064 -0.523042 0.601043 +v -0.564632 -0.525162 0.592856 +v -0.56628 -0.527234 0.584594 +v -0.56772 -0.529214 0.576255 +v -0.569331 -0.531113 0.567832 +v -0.570607 -0.532857 0.559327 +v -0.571904 -0.534493 0.550714 +v -0.572929 -0.535993 0.542022 +v -0.574076 -0.537398 0.533266 +v -0.574959 -0.538677 0.524469 +v -0.575873 -0.539842 0.515585 +v -0.57658 -0.540897 0.50667 +v -0.577349 -0.541828 0.497713 +v -0.57789 -0.542647 0.488724 +v -0.578487 -0.543304 0.479673 +v -0.578853 -0.543796 0.470539 +v -0.579394 -0.544209 0.461399 +v -0.579706 -0.544504 0.452243 +v -0.579982 -0.544611 0.443077 +v -0.580155 -0.544659 0.43391 +v -0.580387 -0.544546 0.424745 +v -0.580432 -0.544326 0.415603 +v -0.580542 -0.543878 0.406526 +v -0.533461 -0.481655 0.709048 +v -0.534331 -0.485158 0.705065 +v -0.535532 -0.488488 0.700127 +v -0.53674 -0.491824 0.695196 +v -0.538151 -0.49511 0.689659 +v -0.539566 -0.498403 0.684152 +v -0.541267 -0.501521 0.677804 +v -0.542943 -0.504632 0.671449 +v -0.544747 -0.507574 0.664684 +v -0.546505 -0.510503 0.657899 +v -0.548394 -0.513406 0.650704 +v -0.550132 -0.516245 0.643411 +v -0.552119 -0.518995 0.635842 +v -0.553914 -0.521657 0.628198 +v -0.555747 -0.524271 0.620426 +v -0.557398 -0.526805 0.612589 +v -0.559202 -0.529176 0.604528 +v -0.56076 -0.531437 0.596361 +v -0.562311 -0.533639 0.588115 +v -0.563676 -0.53576 0.57981 +v -0.565149 -0.537733 0.57136 +v -0.566403 -0.5396 0.562851 +v -0.567714 -0.541351 0.554226 +v -0.5688 -0.542989 0.545548 +v -0.569899 -0.544489 0.536779 +v -0.570744 -0.54586 0.527959 +v -0.571639 -0.547123 0.519072 +v -0.572317 -0.54827 0.510151 +v -0.573042 -0.549289 0.501152 +v -0.573569 -0.550204 0.492125 +v -0.574099 -0.550942 0.483073 +v -0.574458 -0.551551 0.47396 +v -0.574953 -0.552025 0.464787 +v -0.575168 -0.552348 0.455597 +v -0.575465 -0.552551 0.446412 +v -0.575629 -0.552678 0.437224 +v -0.575805 -0.552636 0.428014 +v -0.575822 -0.55248 0.418835 +v -0.5759 -0.552109 0.409684 +v -0.530278 -0.485369 0.712592 +v -0.531083 -0.489038 0.708371 +v -0.532336 -0.492548 0.703457 +v -0.533448 -0.496025 0.698482 +v -0.534892 -0.499482 0.692917 +v -0.536258 -0.502915 0.687305 +v -0.537937 -0.506213 0.681048 +v -0.539521 -0.509473 0.67472 +v -0.541291 -0.512632 0.668018 +v -0.542873 -0.51572 0.661233 +v -0.544697 -0.518758 0.654102 +v -0.546415 -0.521756 0.646913 +v -0.548303 -0.524683 0.639401 +v -0.549983 -0.527515 0.631806 +v -0.551784 -0.530226 0.62406 +v -0.553432 -0.532869 0.616261 +v -0.555175 -0.53541 0.608238 +v -0.556671 -0.537815 0.600108 +v -0.558239 -0.540136 0.591855 +v -0.559579 -0.542349 0.583539 +v -0.560983 -0.54447 0.575095 +v -0.562165 -0.54647 0.56659 +v -0.563422 -0.548319 0.557961 +v -0.564483 -0.550061 0.549275 +v -0.565535 -0.551646 0.540457 +v -0.566404 -0.553122 0.531578 +v -0.56729 -0.554497 0.522664 +v -0.567945 -0.555746 0.513714 +v -0.568639 -0.55685 0.504705 +v -0.569093 -0.557822 0.495661 +v -0.569589 -0.558636 0.486566 +v -0.569938 -0.559367 0.477464 +v -0.57037 -0.559926 0.46828 +v -0.570558 -0.560322 0.459056 +v -0.570803 -0.560578 0.44983 +v -0.570947 -0.560779 0.440601 +v -0.571093 -0.560802 0.43136 +v -0.571083 -0.560712 0.422146 +v -0.571205 -0.560439 0.412953 +v -0.527078 -0.489069 0.716122 +v -0.527957 -0.492955 0.711723 +v -0.529117 -0.49661 0.706772 +v -0.530248 -0.500248 0.701803 +v -0.53157 -0.50383 0.696144 +v -0.532972 -0.507434 0.690494 +v -0.534544 -0.510878 0.684254 +v -0.536053 -0.514299 0.677985 +v -0.537733 -0.517653 0.67131 +v -0.539267 -0.520948 0.664574 +v -0.540964 -0.524105 0.6575 +v -0.542642 -0.527238 0.650378 +v -0.544365 -0.530312 0.6429 +v -0.545955 -0.533328 0.635374 +v -0.547758 -0.536158 0.627679 +v -0.549341 -0.538893 0.619891 +v -0.551058 -0.541604 0.611909 +v -0.552501 -0.544179 0.603841 +v -0.554018 -0.546583 0.595567 +v -0.555352 -0.548904 0.58724 +v -0.556696 -0.551151 0.578809 +v -0.557781 -0.553275 0.57029 +v -0.559029 -0.555244 0.561675 +v -0.560019 -0.557063 0.552979 +v -0.561029 -0.558762 0.544173 +v -0.561858 -0.560357 0.535307 +v -0.562754 -0.561811 0.526302 +v -0.56338 -0.563118 0.517248 +v -0.564006 -0.564293 0.508212 +v -0.564456 -0.56537 0.499164 +v -0.564921 -0.566259 0.490043 +v -0.565208 -0.567046 0.480902 +v -0.565527 -0.567669 0.471743 +v -0.56573 -0.568164 0.462502 +v -0.56595 -0.568494 0.453243 +v -0.566041 -0.568751 0.443971 +v -0.566185 -0.568854 0.4347 +v -0.566151 -0.568835 0.425455 +v -0.523528 -0.49302 0.720149 +v -0.524363 -0.497012 0.715672 +v -0.52554 -0.500829 0.710622 +v -0.526629 -0.504616 0.70553 +v -0.528027 -0.508381 0.699867 +v -0.529314 -0.512113 0.694163 +v -0.530873 -0.515727 0.688008 +v -0.532241 -0.519272 0.68176 +v -0.53385 -0.522731 0.675056 +v -0.53534 -0.526144 0.668307 +v -0.537011 -0.529471 0.661271 +v -0.538585 -0.532759 0.654159 +v -0.540321 -0.536002 0.646736 +v -0.541895 -0.53917 0.639244 +v -0.543617 -0.542139 0.631564 +v -0.545164 -0.545024 0.623819 +v -0.546796 -0.547838 0.615824 +v -0.548189 -0.550537 0.60776 +v -0.549662 -0.553127 0.599539 +v -0.550927 -0.55559 0.591232 +v -0.552255 -0.557964 0.582771 +v -0.553311 -0.560199 0.574236 +v -0.55448 -0.56226 0.565597 +v -0.555409 -0.564184 0.556874 +v -0.556444 -0.566015 0.548059 +v -0.557217 -0.567713 0.539184 +v -0.558 -0.569239 0.530154 +v -0.558593 -0.570638 0.521078 +v -0.559211 -0.571861 0.511943 +v -0.559679 -0.573001 0.50279 +v -0.560121 -0.57396 0.493622 +v -0.560339 -0.574804 0.48445 +v -0.560602 -0.575507 0.475246 +v -0.560709 -0.576107 0.466026 +v -0.560873 -0.576491 0.456719 +v -0.560972 -0.576827 0.447411 +v -0.561072 -0.576979 0.438098 +v -0.561017 -0.577021 0.428803 +v -0.519898 -0.496942 0.724141 +v -0.520832 -0.501093 0.71966 +v -0.521949 -0.505035 0.714467 +v -0.523115 -0.509011 0.709292 +v -0.524393 -0.512905 0.703576 +v -0.525676 -0.516794 0.697845 +v -0.527094 -0.520557 0.691726 +v -0.528436 -0.524276 0.685554 +v -0.53004 -0.527882 0.678865 +v -0.531481 -0.531404 0.672097 +v -0.533025 -0.534855 0.665039 +v -0.534529 -0.538298 0.657957 +v -0.536181 -0.541671 0.650531 +v -0.537704 -0.544955 0.643065 +v -0.539352 -0.548059 0.635403 +v -0.540844 -0.551089 0.627686 +v -0.542403 -0.554008 0.619695 +v -0.543785 -0.556847 0.611646 +v -0.545167 -0.559581 0.603446 +v -0.54638 -0.56222 0.595181 +v -0.547683 -0.564714 0.586702 +v -0.548723 -0.567068 0.578145 +v -0.549775 -0.569199 0.569476 +v -0.550687 -0.571233 0.560743 +v -0.551614 -0.57314 0.551894 +v -0.552314 -0.574914 0.542988 +v -0.553055 -0.576554 0.53396 +v -0.553597 -0.578051 0.524867 +v -0.554213 -0.579372 0.51571 +v -0.554605 -0.580547 0.506516 +v -0.555028 -0.581558 0.497272 +v -0.555274 -0.582464 0.488007 +v -0.555501 -0.58324 0.478735 +v -0.55558 -0.583944 0.469476 +v -0.555681 -0.584416 0.460184 +v -0.555747 -0.584814 0.450835 +v -0.555808 -0.585019 0.441489 +v -0.555709 -0.58511 0.43215 +v -0.515932 -0.500957 0.728578 +v -0.516796 -0.505177 0.723966 +v -0.518057 -0.509355 0.718776 +v -0.519157 -0.513465 0.713497 +v -0.520453 -0.517541 0.707786 +v -0.521653 -0.521581 0.702007 +v -0.523105 -0.525493 0.695852 +v -0.524422 -0.529367 0.689622 +v -0.525956 -0.533085 0.683008 +v -0.52732 -0.536734 0.676297 +v -0.528851 -0.540355 0.669256 +v -0.530258 -0.543918 0.662137 +v -0.53189 -0.547398 0.654729 +v -0.533345 -0.550806 0.647256 +v -0.534936 -0.554093 0.639591 +v -0.536312 -0.557277 0.631849 +v -0.537849 -0.560352 0.623861 +v -0.539146 -0.563308 0.615785 +v -0.540555 -0.566172 0.607586 +v -0.541753 -0.568935 0.599318 +v -0.542956 -0.571488 0.590815 +v -0.54394 -0.573914 0.58223 +v -0.545 -0.576216 0.57355 +v -0.545808 -0.578365 0.564786 +v -0.54666 -0.580355 0.555896 +v -0.547307 -0.582224 0.546961 +v -0.547993 -0.583944 0.537894 +v -0.548496 -0.585544 0.528796 +v -0.549019 -0.586935 0.51961 +v -0.549374 -0.588214 0.510399 +v -0.549724 -0.589293 0.501091 +v -0.549919 -0.590272 0.491764 +v -0.55014 -0.591099 0.482418 +v -0.550262 -0.59186 0.473066 +v -0.550352 -0.592403 0.463712 +v -0.55032 -0.592865 0.454351 +v -0.550297 -0.593096 0.444962 +v -0.511903 -0.504952 0.732955 +v -0.51289 -0.509336 0.728335 +v -0.514017 -0.513604 0.72301 +v -0.515212 -0.517916 0.717708 +v -0.516471 -0.522158 0.711976 +v -0.517656 -0.526366 0.706196 +v -0.519021 -0.530395 0.699944 +v -0.520384 -0.534423 0.693682 +v -0.521755 -0.538224 0.687085 +v -0.523129 -0.542033 0.680477 +v -0.524608 -0.545815 0.673433 +v -0.525971 -0.549536 0.666318 +v -0.52741 -0.553051 0.65885 +v -0.528913 -0.556605 0.65141 +v -0.530396 -0.560052 0.643722 +v -0.531701 -0.563408 0.635979 +v -0.533199 -0.566639 0.627983 +v -0.534443 -0.569738 0.619902 +v -0.53578 -0.572686 0.611668 +v -0.536914 -0.575532 0.603373 +v -0.5381 -0.57819 0.594879 +v -0.539082 -0.580716 0.586288 +v -0.540025 -0.583124 0.577568 +v -0.540765 -0.585401 0.568789 +v -0.541589 -0.587502 0.559868 +v -0.54218 -0.589466 0.550897 +v -0.542844 -0.591277 0.541808 +v -0.543256 -0.59293 0.532664 +v -0.543718 -0.594431 0.523491 +v -0.543987 -0.595781 0.514252 +v -0.544293 -0.596942 0.504894 +v -0.544436 -0.597997 0.495509 +v -0.544606 -0.59887 0.486136 +v -0.544667 -0.599669 0.476749 +v -0.54472 -0.600251 0.467305 +v -0.544673 -0.600768 0.457858 +v -0.507526 -0.509014 0.737772 +v -0.508456 -0.513519 0.732982 +v -0.509699 -0.517976 0.727683 +v -0.510866 -0.522393 0.722331 +v -0.512183 -0.526781 0.71655 +v -0.513354 -0.531103 0.710693 +v -0.514736 -0.535312 0.704455 +v -0.515949 -0.539442 0.698146 +v -0.517384 -0.543457 0.691565 +v -0.518653 -0.547398 0.684899 +v -0.52011 -0.551316 0.677856 +v -0.521454 -0.555187 0.670755 +v -0.522938 -0.558893 0.663315 +v -0.52425 -0.562516 0.655792 +v -0.525719 -0.566109 0.648101 +v -0.527009 -0.569617 0.640334 +v -0.528396 -0.57297 0.632331 +v -0.529547 -0.576194 0.62424 +v -0.530865 -0.579278 0.615981 +v -0.531946 -0.582221 0.607653 +v -0.533074 -0.585028 0.599148 +v -0.533981 -0.587705 0.590585 +v -0.534887 -0.59021 0.581825 +v -0.535586 -0.592581 0.57299 +v -0.536323 -0.594739 0.564003 +v -0.53688 -0.596793 0.554968 +v -0.537464 -0.598668 0.545826 +v -0.537869 -0.600425 0.536649 +v -0.538256 -0.601982 0.527416 +v -0.538473 -0.603439 0.518154 +v -0.538687 -0.604642 0.508756 +v -0.538801 -0.605777 0.499337 +v -0.538898 -0.606684 0.489904 +v -0.538922 -0.607539 0.480465 +v -0.538954 -0.608212 0.470983 +v -0.538872 -0.608803 0.461502 +v -0.503175 -0.513079 0.742579 +v -0.504172 -0.517766 0.737699 +v -0.505344 -0.52233 0.732309 +v -0.50653 -0.526917 0.726937 +v -0.507807 -0.53137 0.721078 +v -0.509062 -0.535858 0.715202 +v -0.510347 -0.540188 0.708942 +v -0.511621 -0.544519 0.702668 +v -0.512968 -0.548668 0.696036 +v -0.514253 -0.552793 0.68936 +v -0.515621 -0.556829 0.682296 +v -0.516943 -0.560837 0.675189 +v -0.518363 -0.564685 0.667726 +v -0.519653 -0.568466 0.660215 +v -0.520944 -0.572107 0.652441 +v -0.522167 -0.575736 0.644643 +v -0.523496 -0.579239 0.636636 +v -0.524633 -0.582627 0.628558 +v -0.525835 -0.585787 0.620256 +v -0.526878 -0.588853 0.611903 +v -0.527913 -0.591789 0.603387 +v -0.528776 -0.594622 0.594831 +v -0.529646 -0.597236 0.586016 +v -0.530266 -0.599664 0.577092 +v -0.53094 -0.601891 0.568035 +v -0.531466 -0.604033 0.558956 +v -0.531958 -0.605964 0.549772 +v -0.532297 -0.607792 0.540562 +v -0.532614 -0.609421 0.531281 +v -0.532762 -0.61094 0.521978 +v -0.532914 -0.612227 0.512584 +v -0.533004 -0.613447 0.503143 +v -0.533044 -0.6144 0.493659 +v -0.533007 -0.615292 0.484163 +v -0.532988 -0.616045 0.474642 +v -0.532845 -0.616687 0.465125 +v -0.498372 -0.51725 0.747872 +v -0.499354 -0.522043 0.742851 +v -0.500572 -0.526765 0.737422 +v -0.501702 -0.531443 0.731938 +v -0.50302 -0.536079 0.726089 +v -0.504274 -0.540689 0.720181 +v -0.505673 -0.545165 0.713941 +v -0.506885 -0.549582 0.707603 +v -0.508271 -0.553912 0.700927 +v -0.509483 -0.558156 0.694168 +v -0.510893 -0.562399 0.687093 +v -0.512161 -0.566588 0.679944 +v -0.513507 -0.570573 0.672508 +v -0.514703 -0.574474 0.664999 +v -0.515986 -0.578284 0.657192 +v -0.517091 -0.582006 0.649304 +v -0.518367 -0.585602 0.641259 +v -0.51948 -0.589109 0.633165 +v -0.520659 -0.592431 0.624835 +v -0.521641 -0.595649 0.616443 +v -0.522646 -0.598696 0.607901 +v -0.523453 -0.601633 0.599279 +v -0.524232 -0.604317 0.590379 +v -0.524806 -0.606867 0.581423 +v -0.525404 -0.609175 0.572318 +v -0.52585 -0.611385 0.563171 +v -0.526301 -0.613407 0.55392 +v -0.526568 -0.615318 0.544641 +v -0.526786 -0.616982 0.535303 +v -0.526853 -0.618539 0.525939 +v -0.526976 -0.619918 0.516512 +v -0.526992 -0.621222 0.507062 +v -0.52699 -0.622241 0.497528 +v -0.526851 -0.623155 0.487985 +v -0.5268 -0.623949 0.478398 +v -0.49356 -0.52142 0.753149 +v -0.494704 -0.526376 0.748088 +v -0.495793 -0.531181 0.742523 +v -0.497035 -0.536048 0.737036 +v -0.498248 -0.540784 0.731091 +v -0.499503 -0.545554 0.725155 +v -0.500843 -0.550102 0.718868 +v -0.502179 -0.554662 0.712553 +v -0.503514 -0.559106 0.705791 +v -0.50477 -0.563537 0.698989 +v -0.506022 -0.567911 0.691834 +v -0.507345 -0.57232 0.684688 +v -0.508647 -0.576439 0.677276 +v -0.5098 -0.580488 0.669804 +v -0.511001 -0.584444 0.661933 +v -0.512084 -0.58833 0.654026 +v -0.513275 -0.591987 0.645926 +v -0.514343 -0.595593 0.637794 +v -0.515428 -0.599041 0.629403 +v -0.516359 -0.6024 0.620964 +v -0.517287 -0.605552 0.612386 +v -0.518 -0.608562 0.603673 +v -0.51872 -0.611332 0.594722 +v -0.519247 -0.61397 0.585721 +v -0.519769 -0.616396 0.57659 +v -0.520127 -0.618673 0.567405 +v -0.52049 -0.62077 0.55811 +v -0.520633 -0.622717 0.54876 +v -0.520821 -0.624456 0.539345 +v -0.520849 -0.626076 0.529901 +v -0.520877 -0.627496 0.520411 +v -0.520823 -0.628854 0.510925 +v -0.520766 -0.629952 0.501386 +v -0.520627 -0.630956 0.491794 +v -0.488439 -0.525589 0.758806 +v -0.48949 -0.530638 0.753595 +v -0.490727 -0.535651 0.74801 +v -0.491828 -0.540608 0.742354 +v -0.493178 -0.545528 0.736444 +v -0.494344 -0.55038 0.730421 +v -0.495745 -0.555128 0.72417 +v -0.497018 -0.559822 0.717836 +v -0.498395 -0.564505 0.711086 +v -0.499576 -0.569082 0.704205 +v -0.500908 -0.573607 0.697045 +v -0.502095 -0.578071 0.68982 +v -0.503483 -0.582364 0.682364 +v -0.504629 -0.586532 0.674807 +v -0.505792 -0.590637 0.666927 +v -0.506811 -0.594675 0.658986 +v -0.507973 -0.598474 0.650849 +v -0.508948 -0.602181 0.642629 +v -0.50998 -0.605753 0.634206 +v -0.510815 -0.60921 0.625716 +v -0.511711 -0.612478 0.617079 +v -0.512316 -0.615554 0.608256 +v -0.513022 -0.618451 0.599275 +v -0.513484 -0.621195 0.59023 +v -0.513956 -0.623716 0.581044 +v -0.514242 -0.626095 0.571808 +v -0.514525 -0.628251 0.562467 +v -0.514582 -0.630257 0.553071 +v -0.514654 -0.63202 0.543598 +v -0.514618 -0.633711 0.534098 +v -0.514599 -0.635174 0.52453 +v -0.514511 -0.636587 0.514953 +v -0.514345 -0.637707 0.505312 +v -0.483249 -0.529742 0.764441 +v -0.484376 -0.53495 0.759152 +v -0.485554 -0.540075 0.753421 +v -0.486773 -0.545227 0.747737 +v -0.487999 -0.550235 0.741719 +v -0.489353 -0.555294 0.735762 +v -0.49056 -0.560143 0.729424 +v -0.491829 -0.565016 0.723103 +v -0.493206 -0.569863 0.716313 +v -0.494411 -0.574653 0.709431 +v -0.495784 -0.579311 0.702267 +v -0.497015 -0.583907 0.695033 +v -0.49834 -0.588286 0.687483 +v -0.499525 -0.592609 0.67985 +v -0.500635 -0.596861 0.671947 +v -0.501617 -0.601058 0.663988 +v -0.502678 -0.604973 0.655767 +v -0.503551 -0.608777 0.647479 +v -0.504468 -0.612424 0.638987 +v -0.505247 -0.615993 0.630466 +v -0.50606 -0.619355 0.62172 +v -0.506646 -0.622551 0.612834 +v -0.507229 -0.625512 0.603804 +v -0.5076 -0.628333 0.59471 +v -0.508004 -0.630923 0.585459 +v -0.508218 -0.633381 0.57617 +v -0.508419 -0.635633 0.566798 +v -0.508409 -0.637707 0.557357 +v -0.508398 -0.639532 0.547823 +v -0.508298 -0.641288 0.538279 +v -0.508209 -0.642795 0.52864 +v -0.508024 -0.64423 0.51899 +v -0.477723 -0.533875 0.770437 +v -0.478822 -0.539209 0.764978 +v -0.480075 -0.544523 0.75927 +v -0.4812 -0.549798 0.753474 +v -0.482575 -0.554974 0.747424 +v -0.483797 -0.560096 0.741324 +v -0.485142 -0.565197 0.734982 +v -0.486296 -0.570221 0.728548 +v -0.487705 -0.575196 0.721746 +v -0.48896 -0.580102 0.71484 +v -0.490372 -0.584894 0.707666 +v -0.491564 -0.589607 0.700365 +v -0.492871 -0.594244 0.69281 +v -0.493975 -0.598779 0.685154 +v -0.495169 -0.60321 0.67724 +v -0.496149 -0.607525 0.669221 +v -0.497196 -0.611572 0.660956 +v -0.497994 -0.615466 0.6526 +v -0.498814 -0.619245 0.644041 +v -0.49943 -0.622911 0.635414 +v -0.500182 -0.626339 0.626568 +v -0.500707 -0.629618 0.617613 +v -0.501249 -0.632705 0.60852 +v -0.50154 -0.635629 0.599355 +v -0.501812 -0.63828 0.590051 +v -0.501899 -0.640804 0.580696 +v -0.502064 -0.643139 0.571251 +v -0.501994 -0.645286 0.56174 +v -0.501926 -0.647164 0.552156 +v -0.501741 -0.648955 0.542552 +v -0.501563 -0.650502 0.532872 +v -0.501294 -0.651976 0.523179 +v -0.47215 -0.53804 0.776398 +v -0.473346 -0.543516 0.770864 +v -0.474544 -0.548959 0.76506 +v -0.475713 -0.554402 0.759239 +v -0.477027 -0.55966 0.753096 +v -0.478362 -0.564947 0.746933 +v -0.479614 -0.570204 0.740489 +v -0.48093 -0.575497 0.734072 +v -0.482231 -0.580543 0.727184 +v -0.483506 -0.585579 0.72025 +v -0.484809 -0.590438 0.71297 +v -0.486036 -0.595285 0.70565 +v -0.487243 -0.600133 0.698041 +v -0.488314 -0.604906 0.690367 +v -0.489489 -0.609434 0.682389 +v -0.490465 -0.613867 0.674313 +v -0.49146 -0.618015 0.665995 +v -0.492287 -0.622068 0.657616 +v -0.493029 -0.626006 0.649008 +v -0.493573 -0.629822 0.640346 +v -0.494237 -0.633309 0.631393 +v -0.494683 -0.636668 0.622359 +v -0.495091 -0.639833 0.613176 +v -0.495284 -0.642844 0.603931 +v -0.495514 -0.6456 0.594591 +v -0.495528 -0.648202 0.585204 +v -0.495607 -0.650562 0.575668 +v -0.495482 -0.652772 0.566088 +v -0.495323 -0.6547 0.556463 +v -0.495053 -0.656521 0.546801 +v -0.494777 -0.658098 0.537078 +v -0.494459 -0.659633 0.527354 +v -0.466253 -0.542208 0.782742 +v -0.467392 -0.547739 0.777086 +v -0.468692 -0.553327 0.771235 +v -0.469886 -0.558866 0.765341 +v -0.4712 -0.56434 0.759165 +v -0.472422 -0.569775 0.752944 +v -0.47381 -0.575243 0.746404 +v -0.47505 -0.58065 0.739788 +v -0.476536 -0.585892 0.73288 +v -0.477818 -0.591057 0.725843 +v -0.479092 -0.596121 0.718601 +v -0.480137 -0.601078 0.711253 +v -0.481387 -0.606052 0.703634 +v -0.482405 -0.610934 0.695905 +v -0.483553 -0.615656 0.687831 +v -0.484452 -0.620243 0.67964 +v -0.485425 -0.624565 0.671291 +v -0.486215 -0.628801 0.662856 +v -0.486953 -0.63291 0.654182 +v -0.487459 -0.636879 0.645428 +v -0.488037 -0.640497 0.636468 +v -0.488419 -0.643943 0.627331 +v -0.488749 -0.64717 0.618071 +v -0.488862 -0.650238 0.608744 +v -0.488984 -0.653002 0.599299 +v -0.488944 -0.655664 0.58983 +v -0.488908 -0.658084 0.580249 +v -0.488689 -0.660365 0.570633 +v -0.488458 -0.662355 0.56095 +v -0.488121 -0.664229 0.55122 +v -0.487773 -0.665827 0.541424 +v -0.460299 -0.54635 0.789026 +v -0.461605 -0.552025 0.783399 +v -0.462796 -0.557687 0.77739 +v -0.464137 -0.563403 0.771461 +v -0.465319 -0.568992 0.765215 +v -0.466616 -0.574663 0.759005 +v -0.46793 -0.580246 0.752281 +v -0.469254 -0.585857 0.745564 +v -0.470687 -0.59118 0.738491 +v -0.47209 -0.596486 0.731394 +v -0.473294 -0.601744 0.724169 +v -0.474375 -0.606936 0.716877 +v -0.475484 -0.611966 0.709197 +v -0.476532 -0.616982 0.701469 +v -0.477563 -0.621845 0.693249 +v -0.478432 -0.626655 0.684971 +v -0.479379 -0.631177 0.676565 +v -0.480075 -0.635551 0.668066 +v -0.480722 -0.639742 0.659299 +v -0.481177 -0.643833 0.650463 +v -0.481706 -0.647578 0.641414 +v -0.482017 -0.651151 0.632275 +v -0.482259 -0.65446 0.622953 +v -0.482303 -0.657618 0.613574 +v -0.482337 -0.660423 0.604044 +v -0.482247 -0.663133 0.59449 +v -0.482084 -0.665567 0.584847 +v -0.481759 -0.667875 0.575173 +v -0.481487 -0.6699 0.565395 +v -0.481082 -0.67184 0.555611 +v -0.480661 -0.673466 0.545758 +v -0.454237 -0.550342 0.795519 +v -0.45539 -0.556154 0.789735 +v -0.456607 -0.562044 0.783819 +v -0.45775 -0.567904 0.777822 +v -0.459107 -0.573724 0.771526 +v -0.460402 -0.579501 0.765187 +v -0.461794 -0.585282 0.758519 +v -0.463002 -0.590993 0.751736 +v -0.464476 -0.596555 0.744672 +v -0.46577 -0.602057 0.7375 +v -0.467068 -0.607519 0.730171 +v -0.468179 -0.612882 0.722739 +v -0.469318 -0.618065 0.715055 +v -0.470236 -0.623168 0.707246 +v -0.471373 -0.628231 0.698992 +v -0.472191 -0.633126 0.690583 +v -0.473082 -0.637834 0.682094 +v -0.473695 -0.642382 0.673497 +v -0.474324 -0.646746 0.664663 +v -0.474712 -0.650961 0.655731 +v -0.475142 -0.654812 0.646627 +v -0.475358 -0.65851 0.637455 +v -0.47554 -0.661907 0.628078 +v -0.475489 -0.665104 0.618573 +v -0.475446 -0.667983 0.608958 +v -0.475235 -0.670733 0.599316 +v -0.474973 -0.673185 0.589603 +v -0.474569 -0.675519 0.579869 +v -0.474205 -0.677559 0.570003 +v -0.473746 -0.679524 0.560119 +v -0.473293 -0.68118 0.55019 +v -0.448104 -0.554288 0.801981 +v -0.449324 -0.560329 0.796163 +v -0.450407 -0.566401 0.790213 +v -0.451616 -0.572519 0.78433 +v -0.452908 -0.578443 0.777837 +v -0.454287 -0.584401 0.771407 +v -0.455552 -0.59025 0.764678 +v -0.456897 -0.596186 0.757969 +v -0.458207 -0.601912 0.750805 +v -0.459566 -0.607667 0.74366 +v -0.460782 -0.613245 0.736149 +v -0.462003 -0.618845 0.72862 +v -0.463129 -0.624219 0.720892 +v -0.46406 -0.629463 0.713065 +v -0.465081 -0.63454 0.704679 +v -0.465959 -0.639578 0.696196 +v -0.466726 -0.644449 0.687597 +v -0.467262 -0.649168 0.678905 +v -0.467832 -0.653678 0.66998 +v -0.468134 -0.658006 0.660957 +v -0.468464 -0.661968 0.651809 +v -0.468559 -0.66576 0.642578 +v -0.468676 -0.669235 0.633125 +v -0.468588 -0.672519 0.623544 +v -0.468437 -0.675443 0.613827 +v -0.468149 -0.67825 0.604084 +v -0.467805 -0.680717 0.594295 +v -0.467322 -0.68306 0.584485 +v -0.466877 -0.685148 0.574546 +v -0.466313 -0.687094 0.56459 +v -0.441499 -0.558276 0.808967 +v -0.442649 -0.564485 0.803026 +v -0.443909 -0.570654 0.796938 +v -0.445144 -0.576807 0.790838 +v -0.446541 -0.582948 0.784358 +v -0.447838 -0.589059 0.777843 +v -0.449201 -0.595169 0.771063 +v -0.450429 -0.601217 0.76422 +v -0.451807 -0.607205 0.757125 +v -0.453018 -0.613119 0.749905 +v -0.454371 -0.619027 0.742408 +v -0.455444 -0.624791 0.734743 +v -0.456665 -0.630408 0.72691 +v -0.457574 -0.635848 0.718915 +v -0.458583 -0.64114 0.710549 +v -0.459298 -0.646284 0.702029 +v -0.460037 -0.651255 0.69334 +v -0.46057 -0.656108 0.684558 +v -0.4611 -0.660709 0.675536 +v -0.46136 -0.665119 0.666415 +v -0.461575 -0.669158 0.657124 +v -0.461579 -0.673035 0.647765 +v -0.461595 -0.676584 0.638236 +v -0.461388 -0.679963 0.628647 +v -0.461149 -0.682931 0.618849 +v -0.460795 -0.685795 0.609026 +v -0.460341 -0.688287 0.599126 +v -0.459782 -0.690685 0.589212 +v -0.459259 -0.692762 0.579185 +v -0.458644 -0.694733 0.569142 +v -0.43485 -0.562238 0.815871 +v -0.436042 -0.568685 0.809909 +v -0.437357 -0.574898 0.803628 +v -0.438714 -0.581125 0.797366 +v -0.440101 -0.587433 0.790827 +v -0.441446 -0.593718 0.78426 +v -0.442699 -0.600012 0.777362 +v -0.444011 -0.606318 0.770491 +v -0.445328 -0.612485 0.763376 +v -0.446615 -0.618682 0.75623 +v -0.447835 -0.624755 0.748604 +v -0.448977 -0.630788 0.740917 +v -0.450101 -0.636548 0.732869 +v -0.45108 -0.642234 0.724759 +v -0.451947 -0.647665 0.716343 +v -0.45263 -0.652983 0.70785 +v -0.453301 -0.658026 0.699059 +v -0.453785 -0.662988 0.690173 +v -0.454235 -0.667637 0.681025 +v -0.454453 -0.67212 0.671791 +v -0.454591 -0.676249 0.662378 +v -0.454526 -0.680233 0.652905 +v -0.454387 -0.683829 0.643301 +v -0.454105 -0.687309 0.633657 +v -0.453784 -0.690354 0.623853 +v -0.45332 -0.693239 0.613946 +v -0.452774 -0.695777 0.603944 +v -0.452156 -0.698224 0.593933 +v -0.451534 -0.700273 0.583802 +v -0.450857 -0.702261 0.57367 +v -0.42803 -0.566048 0.823022 +v -0.429145 -0.572562 0.817014 +v -0.430507 -0.579007 0.810696 +v -0.431849 -0.585423 0.804351 +v -0.433241 -0.591974 0.797737 +v -0.434519 -0.598472 0.791064 +v -0.43586 -0.605007 0.784153 +v -0.437112 -0.611477 0.777155 +v -0.438472 -0.617909 0.769951 +v -0.439672 -0.624293 0.762617 +v -0.441027 -0.630589 0.755006 +v -0.442104 -0.636751 0.747241 +v -0.443293 -0.642763 0.7392 +v -0.444141 -0.648586 0.730976 +v -0.445027 -0.654241 0.722501 +v -0.445612 -0.659722 0.713874 +v -0.446289 -0.664901 0.705014 +v -0.446698 -0.669928 0.695969 +v -0.447109 -0.674665 0.68673 +v -0.447251 -0.679224 0.67739 +v -0.44726 -0.683453 0.667869 +v -0.447077 -0.687517 0.658283 +v -0.446853 -0.69119 0.648572 +v -0.446467 -0.694732 0.638819 +v -0.446031 -0.697816 0.62896 +v -0.445478 -0.700756 0.618967 +v -0.444862 -0.703323 0.608878 +v -0.444144 -0.705757 0.598765 +v -0.443451 -0.707834 0.588572 +v -0.421109 -0.569797 0.830085 +v -0.422289 -0.576452 0.824153 +v -0.423585 -0.583056 0.817691 +v -0.425 -0.589728 0.811305 +v -0.426279 -0.596472 0.804593 +v -0.427667 -0.603254 0.797939 +v -0.428932 -0.60995 0.79088 +v -0.430271 -0.616657 0.783849 +v -0.431601 -0.623315 0.776488 +v -0.432931 -0.629987 0.769116 +v -0.434122 -0.636382 0.761369 +v -0.435282 -0.642744 0.753597 +v -0.436344 -0.648909 0.745439 +v -0.437234 -0.654963 0.737187 +v -0.437998 -0.660776 0.728578 +v -0.438566 -0.666459 0.719864 +v -0.439234 -0.671807 0.710923 +v -0.439624 -0.676961 0.701803 +v -0.439884 -0.681745 0.692435 +v -0.439973 -0.686393 0.683005 +v -0.439889 -0.690699 0.673378 +v -0.439609 -0.694861 0.663689 +v -0.439233 -0.698538 0.653844 +v -0.438737 -0.702104 0.643977 +v -0.43823 -0.705224 0.634004 +v -0.437593 -0.708225 0.623981 +v -0.436869 -0.710771 0.613793 +v -0.43607 -0.713242 0.603589 +v -0.413674 -0.573353 0.837888 +v -0.41491 -0.580189 0.831601 +v -0.416316 -0.587083 0.825147 +v -0.417654 -0.593931 0.818651 +v -0.41905 -0.600979 0.811916 +v -0.420345 -0.607963 0.805101 +v -0.421753 -0.61485 0.798053 +v -0.423039 -0.621666 0.790916 +v -0.424428 -0.62862 0.783479 +v -0.425683 -0.635526 0.775955 +v -0.426902 -0.642219 0.768172 +v -0.427949 -0.648792 0.760239 +v -0.429069 -0.655188 0.752049 +v -0.42987 -0.661417 0.743686 +v -0.430691 -0.66744 0.735002 +v -0.431216 -0.673296 0.726199 +v -0.431804 -0.678745 0.71712 +v -0.432118 -0.684011 0.70792 +v -0.432323 -0.688956 0.698419 +v -0.432278 -0.693709 0.688821 +v -0.432113 -0.698079 0.679055 +v -0.431748 -0.702271 0.669226 +v -0.431296 -0.706018 0.659311 +v -0.430703 -0.709619 0.649327 +v -0.430069 -0.71276 0.639218 +v -0.429315 -0.71575 0.629072 +v -0.428497 -0.718309 0.618815 +v -0.427604 -0.720782 0.608531 +v -0.406096 -0.576837 0.845556 +v -0.407608 -0.583966 0.839119 +v -0.408904 -0.591037 0.832532 +v -0.410289 -0.598132 0.825985 +v -0.411657 -0.605412 0.819121 +v -0.41312 -0.612716 0.812322 +v -0.414469 -0.619692 0.805159 +v -0.41586 -0.626683 0.79802 +v -0.417191 -0.633893 0.790422 +v -0.418501 -0.641101 0.782829 +v -0.419656 -0.648017 0.774904 +v -0.42079 -0.654922 0.76696 +v -0.421735 -0.661461 0.758626 +v -0.422541 -0.667907 0.750193 +v -0.423243 -0.67403 0.741368 +v -0.423833 -0.680115 0.732525 +v -0.424286 -0.685645 0.723286 +v -0.424555 -0.69103 0.713969 +v -0.424671 -0.696103 0.704359 +v -0.42454 -0.700991 0.69461 +v -0.424272 -0.705373 0.684679 +v -0.423823 -0.709604 0.674684 +v -0.423255 -0.713372 0.664656 +v -0.422561 -0.717022 0.654595 +v -0.421797 -0.720153 0.644366 +v -0.420947 -0.723189 0.634116 +v -0.420034 -0.725783 0.623787 +v -0.419057 -0.728276 0.613406 +v -0.397994 -0.580199 0.853642 +v -0.399485 -0.587466 0.847137 +v -0.400943 -0.594849 0.840466 +v -0.402391 -0.602246 0.83376 +v -0.403854 -0.609741 0.82686 +v -0.405268 -0.617192 0.819928 +v -0.406751 -0.624509 0.812713 +v -0.4081 -0.631746 0.805391 +v -0.409498 -0.639204 0.797796 +v -0.410753 -0.64659 0.790103 +v -0.412057 -0.653835 0.782105 +v -0.413051 -0.660921 0.773939 +v -0.414115 -0.667695 0.765538 +v -0.414872 -0.674294 0.756971 +v -0.415619 -0.680688 0.748045 +v -0.416027 -0.686899 0.738968 +v -0.416409 -0.692661 0.729714 +v -0.416511 -0.69822 0.720319 +v -0.416531 -0.703339 0.710567 +v -0.416334 -0.708277 0.70063 +v -0.415953 -0.712711 0.690552 +v -0.415396 -0.716986 0.680421 +v -0.414744 -0.720793 0.670248 +v -0.413932 -0.724422 0.66002 +v -0.41308 -0.727568 0.649691 +v -0.412118 -0.730608 0.639336 +v -0.411157 -0.733246 0.628948 +v -0.389732 -0.583469 0.861608 +v -0.39137 -0.590958 0.855124 +v -0.392908 -0.59863 0.848298 +v -0.394509 -0.606378 0.841496 +v -0.395982 -0.614045 0.834533 +v -0.397483 -0.621698 0.82757 +v -0.398984 -0.629294 0.820222 +v -0.400466 -0.636875 0.812831 +v -0.401878 -0.644536 0.805179 +v -0.403169 -0.65215 0.797465 +v -0.404405 -0.65962 0.789265 +v -0.40556 -0.667045 0.78103 +v -0.406476 -0.67392 0.772452 +v -0.407264 -0.680703 0.763785 +v -0.407877 -0.687277 0.75466 +v -0.40823 -0.693677 0.745409 +v -0.408459 -0.699602 0.736061 +v -0.408415 -0.70533 0.726595 +v -0.408314 -0.710487 0.716673 +v -0.408065 -0.71551 0.706615 +v -0.407567 -0.719986 0.696403 +v -0.406954 -0.724344 0.686152 +v -0.406154 -0.728115 0.675806 +v -0.405264 -0.731766 0.665434 +v -0.404274 -0.734901 0.654991 +v -0.403246 -0.737977 0.644547 +v -0.381073 -0.586426 0.869981 +v -0.382791 -0.594251 0.863373 +v -0.384482 -0.602231 0.856612 +v -0.38607 -0.610213 0.849742 +v -0.387701 -0.618178 0.842698 +v -0.389205 -0.626077 0.83555 +v -0.390777 -0.634039 0.828154 +v -0.392263 -0.641947 0.820684 +v -0.393702 -0.649917 0.812898 +v -0.395014 -0.657828 0.805013 +v -0.396327 -0.665528 0.796754 +v -0.397381 -0.673069 0.788342 +v -0.398347 -0.680294 0.779685 +v -0.399016 -0.68734 0.770842 +v -0.399616 -0.694091 0.76163 +v -0.39992 -0.700638 0.75226 +v -0.400093 -0.706669 0.742698 +v -0.399944 -0.712462 0.73299 +v -0.399709 -0.717724 0.722951 +v -0.399273 -0.722811 0.712797 +v -0.398682 -0.727359 0.702438 +v -0.397913 -0.731725 0.692019 +v -0.397033 -0.73549 0.681521 +v -0.39605 -0.739144 0.670995 +v -0.395008 -0.742285 0.660441 +v -0.37218 -0.589259 0.878175 +v -0.37409 -0.597484 0.871516 +v -0.375841 -0.60574 0.864742 +v -0.377627 -0.614052 0.858 +v -0.379338 -0.62228 0.8508 +v -0.381033 -0.630508 0.843589 +v -0.382622 -0.6388 0.836114 +v -0.384143 -0.647061 0.828577 +v -0.38559 -0.655309 0.820616 +v -0.386947 -0.663527 0.812591 +v -0.388227 -0.671385 0.804179 +v -0.389342 -0.679155 0.795681 +v -0.390189 -0.686625 0.786845 +v -0.390845 -0.693996 0.777879 +v -0.39129 -0.700841 0.768523 +v -0.391552 -0.707555 0.759067 +v -0.391613 -0.713646 0.749276 +v -0.391458 -0.719582 0.739379 +v -0.391042 -0.724911 0.729213 +v -0.390468 -0.730091 0.718975 +v -0.389723 -0.734656 0.708444 +v -0.388877 -0.73909 0.69788 +v -0.387878 -0.742833 0.687223 +v -0.386809 -0.7465 0.676552 +v -0.36282 -0.591794 0.886737 +v -0.364849 -0.600325 0.880092 +v -0.366793 -0.609025 0.873239 +v -0.368657 -0.61773 0.866319 +v -0.370549 -0.626387 0.859231 +v -0.372254 -0.634973 0.851999 +v -0.374002 -0.64369 0.844439 +v -0.375506 -0.65229 0.836684 +v -0.377089 -0.660843 0.828653 +v -0.378405 -0.669257 0.820435 +v -0.379687 -0.67741 0.811918 +v -0.380691 -0.685419 0.803205 +v -0.381572 -0.693146 0.794264 +v -0.382111 -0.700704 0.78516 +v -0.382561 -0.707767 0.775649 +v -0.382671 -0.714593 0.765963 +v -0.382664 -0.720794 0.756014 +v -0.382379 -0.726782 0.745941 +v -0.381914 -0.732199 0.735612 +v -0.381229 -0.737424 0.725184 +v -0.380392 -0.741987 0.714516 +v -0.379421 -0.746393 0.703795 +v -0.378343 -0.750144 0.692998 +v -0.377181 -0.753799 0.682183 +v -0.353195 -0.594222 0.89505 +v -0.355447 -0.60312 0.888523 +v -0.357596 -0.612268 0.881591 +v -0.359766 -0.621436 0.874673 +v -0.36175 -0.630487 0.867628 +v -0.363633 -0.639484 0.860488 +v -0.365463 -0.648595 0.852803 +v -0.367171 -0.657667 0.845018 +v -0.368588 -0.666377 0.836687 +v -0.37005 -0.675087 0.828395 +v -0.371234 -0.683484 0.81967 +v -0.372202 -0.691764 0.810811 +v -0.372951 -0.699676 0.801695 +v -0.373499 -0.707477 0.792473 +v -0.373771 -0.714655 0.782739 +v -0.373835 -0.721668 0.77288 +v -0.373677 -0.72792 0.762734 +v -0.373346 -0.734036 0.752496 +v -0.372746 -0.739487 0.741951 +v -0.372001 -0.744798 0.731322 +v -0.371033 -0.749358 0.720482 +v -0.369927 -0.753741 0.709581 +v -0.368779 -0.757481 0.698679 +v -0.367528 -0.761109 0.687748 +v -0.343294 -0.596456 0.903369 +v -0.345688 -0.605791 0.896908 +v -0.348049 -0.615387 0.890141 +v -0.350288 -0.624929 0.88328 +v -0.352432 -0.634543 0.876211 +v -0.354435 -0.644055 0.869048 +v -0.356481 -0.653497 0.861341 +v -0.358255 -0.662817 0.853389 +v -0.359966 -0.671962 0.845081 +v -0.361378 -0.680969 0.836567 +v -0.36251 -0.68968 0.82775 +v -0.363306 -0.69823 0.818756 +v -0.364032 -0.706362 0.809449 +v -0.364403 -0.71427 0.799961 +v -0.364638 -0.72165 0.790064 +v -0.364563 -0.728798 0.779946 +v -0.364351 -0.7352 0.769589 +v -0.363891 -0.74139 0.759121 +v -0.363169 -0.746891 0.748372 +v -0.362266 -0.752222 0.737539 +v -0.36118 -0.756794 0.726572 +v -0.359976 -0.761191 0.715528 +v -0.358706 -0.764898 0.704439 +v -0.333254 -0.598629 0.911563 +v -0.335848 -0.60842 0.90521 +v -0.338377 -0.618445 0.898575 +v -0.340874 -0.62845 0.891908 +v -0.343106 -0.638545 0.884821 +v -0.345289 -0.648658 0.877652 +v -0.34755 -0.658423 0.869869 +v -0.349524 -0.668084 0.86186 +v -0.351298 -0.677548 0.85343 +v -0.35274 -0.686872 0.844747 +v -0.353751 -0.695867 0.835821 +v -0.354493 -0.704739 0.826737 +v -0.355075 -0.713019 0.817185 +v -0.355503 -0.721214 0.807532 +v -0.355561 -0.728716 0.797368 +v -0.355379 -0.736009 0.787027 +v -0.354987 -0.742448 0.776423 +v -0.354395 -0.748708 0.765722 +v -0.353506 -0.75423 0.75475 +v -0.352483 -0.759596 0.743733 +v -0.351294 -0.764195 0.732643 +v -0.349974 -0.768589 0.721464 +v -0.322655 -0.600694 0.920151 +v -0.325398 -0.61096 0.913855 +v -0.328138 -0.621526 0.907323 +v -0.330813 -0.632056 0.900716 +v -0.333417 -0.642661 0.893707 +v -0.33575 -0.6532 0.8864 +v -0.338158 -0.663481 0.878684 +v -0.340146 -0.673575 0.870687 +v -0.342062 -0.683431 0.862176 +v -0.343459 -0.693122 0.853326 +v -0.344653 -0.702365 0.84418 +v -0.345384 -0.711383 0.834757 +v -0.345951 -0.719981 0.825052 +v -0.346106 -0.728322 0.815125 +v -0.346111 -0.735985 0.804784 +v -0.345757 -0.743347 0.794201 +v -0.345226 -0.749887 0.783397 +v -0.344417 -0.756203 0.772437 +v -0.343427 -0.761714 0.76126 +v -0.342281 -0.767067 0.750023 +v -0.340999 -0.771623 0.738714 +v -0.339597 -0.775975 0.727311 +v -0.312075 -0.602761 0.928729 +v -0.31492 -0.613503 0.922618 +v -0.317836 -0.624558 0.916099 +v -0.320734 -0.635639 0.90962 +v -0.323618 -0.646743 0.90252 +v -0.326397 -0.657828 0.895319 +v -0.328789 -0.668534 0.887539 +v -0.330926 -0.679123 0.879588 +v -0.332836 -0.689375 0.870937 +v -0.334371 -0.699473 0.86202 +v -0.335557 -0.708893 0.852498 +v -0.336425 -0.718185 0.842813 +v -0.336776 -0.72695 0.832876 +v -0.336826 -0.73552 0.822782 +v -0.336609 -0.743213 0.812161 +v -0.336173 -0.75071 0.801397 +v -0.33538 -0.757269 0.790314 +v -0.334414 -0.763666 0.779144 +v -0.333294 -0.769141 0.767746 +v -0.332028 -0.77446 0.756301 +v -0.330646 -0.778968 0.744762 +v -0.329184 -0.783306 0.733155 +v -0.301146 -0.604911 0.937504 +v -0.304157 -0.616146 0.931447 +v -0.307305 -0.627771 0.925011 +v -0.31037 -0.639377 0.918572 +v -0.313456 -0.65098 0.911713 +v -0.316238 -0.662516 0.904568 +v -0.318969 -0.67377 0.896904 +v -0.321166 -0.684836 0.8888 +v -0.323265 -0.695558 0.880095 +v -0.324756 -0.706008 0.87095 +v -0.325957 -0.715755 0.861242 +v -0.326691 -0.725337 0.85128 +v -0.327014 -0.734383 0.841132 +v -0.326872 -0.743063 0.830693 +v -0.3266 -0.750777 0.819796 +v -0.326049 -0.758274 0.808731 +v -0.325109 -0.764843 0.797377 +v -0.323975 -0.771222 0.785947 +v -0.322748 -0.776678 0.774297 +v -0.321368 -0.78196 0.762597 +v -0.319884 -0.786414 0.750822 +v -0.290229 -0.607033 0.946331 +v -0.293443 -0.618839 0.940306 +v -0.296744 -0.63098 0.933932 +v -0.300051 -0.643123 0.927578 +v -0.303281 -0.655217 0.920896 +v -0.306277 -0.667277 0.91398 +v -0.309153 -0.679035 0.906303 +v -0.31157 -0.690651 0.898225 +v -0.313739 -0.701772 0.889279 +v -0.315364 -0.712678 0.880027 +v -0.316437 -0.722662 0.870029 +v -0.317186 -0.732615 0.859871 +v -0.317233 -0.741796 0.849373 +v -0.317005 -0.75068 0.838644 +v -0.316593 -0.758369 0.827416 +v -0.315952 -0.76587 0.816041 +v -0.314804 -0.772393 0.804416 +v -0.313515 -0.778767 0.792739 +v -0.312132 -0.784142 0.780822 +v -0.310662 -0.789411 0.768873 +v -0.309078 -0.793818 0.756865 +v -0.27893 -0.60926 0.955352 +v -0.282263 -0.621572 0.949436 +v -0.285728 -0.634265 0.943253 +v -0.289172 -0.646948 0.937056 +v -0.292711 -0.659708 0.930451 +v -0.295882 -0.672398 0.923595 +v -0.299001 -0.684643 0.916052 +v -0.301477 -0.696697 0.907988 +v -0.303757 -0.708317 0.898907 +v -0.305296 -0.719681 0.889392 +v -0.306422 -0.730074 0.879252 +v -0.306949 -0.740326 0.868842 +v -0.307008 -0.749664 0.858054 +v -0.306599 -0.758565 0.846926 +v -0.306016 -0.766292 0.835323 +v -0.305132 -0.773795 0.823542 +v -0.303924 -0.780237 0.811515 +v -0.302545 -0.786494 0.799424 +v -0.301009 -0.791782 0.787283 +v -0.299397 -0.796973 0.775119 +v -0.297779 -0.801248 0.762875 +v -0.26761 -0.611477 0.96436 +v -0.27115 -0.624326 0.958629 +v -0.274788 -0.637518 0.952685 +v -0.278455 -0.65081 0.946704 +v -0.282191 -0.664243 0.940098 +v -0.285701 -0.677594 0.933413 +v -0.288896 -0.690265 0.925876 +v -0.291577 -0.702815 0.917914 +v -0.293842 -0.714918 0.908632 +v -0.295427 -0.726801 0.898928 +v -0.296369 -0.73758 0.888558 +v -0.296875 -0.748103 0.877881 +v -0.296748 -0.757485 0.866692 +v -0.296282 -0.766514 0.855256 +v -0.29541 -0.774223 0.843206 +v -0.29433 -0.781746 0.831044 +v -0.293007 -0.788047 0.818597 +v -0.291555 -0.794211 0.806098 +v -0.289846 -0.799399 0.793724 +v -0.288081 -0.804531 0.781349 +v -0.255922 -0.613748 0.973526 +v -0.2595 -0.62719 0.967981 +v -0.263347 -0.640998 0.962241 +v -0.267217 -0.654846 0.956513 +v -0.271255 -0.668901 0.950163 +v -0.274963 -0.682872 0.943546 +v -0.278389 -0.696255 0.936103 +v -0.281081 -0.709513 0.928038 +v -0.28343 -0.722175 0.918811 +v -0.284898 -0.734501 0.908995 +v -0.285842 -0.745577 0.898318 +v -0.286168 -0.75628 0.887247 +v -0.285876 -0.765648 0.875572 +v -0.285172 -0.77479 0.86375 +v -0.284081 -0.782518 0.851316 +v -0.282742 -0.790017 0.838774 +v -0.281224 -0.796197 0.825996 +v -0.279582 -0.802231 0.813166 +v -0.277818 -0.80729 0.800406 +v -0.244225 -0.616226 0.982575 +v -0.247899 -0.630085 0.977334 +v -0.251994 -0.644493 0.971913 +v -0.256105 -0.658926 0.966466 +v -0.260345 -0.673569 0.960245 +v -0.264358 -0.68817 0.953744 +v -0.267842 -0.702216 0.946227 +v -0.270815 -0.716275 0.938305 +v -0.273033 -0.72947 0.92902 +v -0.274556 -0.742296 0.919176 +v -0.275277 -0.753569 0.90806 +v -0.27557 -0.764547 0.896676 +v -0.275011 -0.77396 0.884566 +v -0.274078 -0.783089 0.872267 +v -0.272669 -0.790783 0.859454 +v -0.271081 -0.798288 0.846571 +v -0.269372 -0.804316 0.833428 +v -0.267555 -0.810207 0.820236 +v -0.265719 -0.815078 0.807056 +v -0.23208 -0.618781 0.991838 +v -0.235862 -0.632965 0.986842 +v -0.240179 -0.64799 0.981892 +v -0.244438 -0.663149 0.976848 +v -0.248834 -0.678348 0.970867 +v -0.252831 -0.69362 0.964484 +v -0.25649 -0.708395 0.957028 +v -0.259489 -0.723194 0.949047 +v -0.261832 -0.736917 0.93966 +v -0.263249 -0.750278 0.9296 +v -0.263899 -0.761969 0.918158 +v -0.263927 -0.773242 0.906352 +v -0.263154 -0.782636 0.893766 +v -0.261995 -0.791727 0.880981 +v -0.260389 -0.79916 0.867621 +v -0.258623 -0.806488 0.854276 +v -0.256725 -0.81235 0.840718 +v -0.254739 -0.818098 0.827118 +v -0.252777 -0.822649 0.813578 +v -0.21983 -0.621293 1.00095 +v -0.223857 -0.636033 0.996369 +v -0.228321 -0.651741 0.991779 +v -0.232735 -0.66752 0.987125 +v -0.237222 -0.683326 0.98137 +v -0.241396 -0.699075 0.975318 +v -0.245109 -0.714607 0.967786 +v -0.248284 -0.730144 0.959884 +v -0.250447 -0.744286 0.950193 +v -0.25189 -0.758109 0.939959 +v -0.252357 -0.770205 0.928171 +v -0.252241 -0.781883 0.916017 +v -0.251192 -0.79122 0.902913 +v -0.249857 -0.800311 0.889667 +v -0.248034 -0.807529 0.875836 +v -0.246081 -0.814589 0.861942 +v -0.244007 -0.820297 0.847979 +v -0.241842 -0.82577 0.833935 +v -0.207141 -0.624071 1.01005 +v -0.21129 -0.63931 1.00607 +v -0.215952 -0.65552 1.00191 +v -0.220526 -0.671865 0.997658 +v -0.224996 -0.688424 0.992318 +v -0.228983 -0.70486 0.986508 +v -0.232775 -0.720969 0.979084 +v -0.235838 -0.737172 0.971054 +v -0.237899 -0.751915 0.961174 +v -0.239145 -0.766306 0.950648 +v -0.239498 -0.77863 0.938363 +v -0.239223 -0.790573 0.925697 +v -0.238027 -0.799855 0.912031 +v -0.236548 -0.808857 0.8982 +v -0.234561 -0.815778 0.883845 +v -0.232458 -0.822541 0.869392 +v -0.230228 -0.827789 0.85493 +v -0.227923 -0.832929 0.840468 +v -0.194299 -0.626926 1.01885 +v -0.198657 -0.642623 1.01571 +v -0.203556 -0.659321 1.01202 +v -0.208315 -0.676111 1.00813 +v -0.212644 -0.693356 1.00309 +v -0.216571 -0.710488 0.997645 +v -0.220227 -0.727374 0.99017 +v -0.223298 -0.744209 0.982146 +v -0.225101 -0.759436 0.971964 +v -0.226269 -0.774409 0.961242 +v -0.226423 -0.786916 0.948421 +v -0.226097 -0.799069 0.935281 +v -0.224752 -0.808296 0.921071 +v -0.223144 -0.817187 0.906663 +v -0.221012 -0.8238 0.891777 +v -0.218755 -0.830234 0.876768 +v -0.21637 -0.835141 0.861905 +v -0.213946 -0.840002 0.847019 +v -0.180821 -0.62958 1.02749 +v -0.185247 -0.64544 1.02525 +v -0.190223 -0.662763 1.02234 +v -0.19491 -0.680137 1.01911 +v -0.199228 -0.698187 1.01439 +v -0.202994 -0.716255 1.00911 +v -0.206533 -0.733831 1.00154 +v -0.209277 -0.751265 0.993171 +v -0.210986 -0.766916 0.98261 +v -0.212026 -0.782248 0.971442 +v -0.211994 -0.794879 0.958153 +v -0.211495 -0.807115 0.944538 +v -0.210018 -0.816204 0.929692 +v -0.208297 -0.825045 0.914653 +v -0.206308 -0.831165 0.89918 +v -0.2042 -0.837083 0.88359 +v -0.201601 -0.841709 0.86827 +v -0.198977 -0.846323 0.852934 +v -0.16715 -0.632113 1.03578 +v -0.171712 -0.648232 1.03457 +v -0.176767 -0.66621 1.03252 +v -0.18143 -0.684286 1.02996 +v -0.185615 -0.703089 1.02547 +v -0.189313 -0.721938 1.02043 +v -0.19258 -0.740107 1.01263 +v -0.195163 -0.758068 1.00403 +v -0.196641 -0.774111 0.993009 +v -0.197516 -0.789788 0.981397 +v -0.197359 -0.80261 0.967752 +v -0.196732 -0.814957 0.953681 +v -0.195144 -0.823949 0.938223 +v -0.193342 -0.832656 0.922591 +v -0.191532 -0.838415 0.906549 +v -0.189587 -0.843826 0.890386 +v -0.18679 -0.848198 0.874619 +v -0.183965 -0.852559 0.858827 +v -0.152642 -0.634378 1.04431 +v -0.15718 -0.65091 1.04417 +v -0.16222 -0.669437 1.043 +v -0.166691 -0.688141 1.04104 +v -0.170841 -0.707746 1.03683 +v -0.174276 -0.727287 1.0317 +v -0.177197 -0.745951 1.02375 +v -0.179263 -0.76444 1.01475 +v -0.180498 -0.780759 1.00319 +v -0.181114 -0.796598 0.990919 +v -0.180859 -0.809417 0.976611 +v -0.180219 -0.821767 0.9619 +v -0.178785 -0.830501 0.945897 +v -0.17719 -0.838958 0.929762 +v -0.17491 -0.84468 0.913287 +v -0.172485 -0.849968 0.896663 +v -0.16979 -0.854045 0.880519 +v -0.167072 -0.858117 0.86435 +v -0.137984 -0.636628 1.05261 +v -0.142439 -0.653634 1.05347 +v -0.147421 -0.672716 1.05316 +v -0.151688 -0.691998 1.05176 +v -0.155784 -0.712356 1.04783 +v -0.158872 -0.732599 1.04247 +v -0.161371 -0.7518 1.03435 +v -0.162996 -0.770552 1.02505 +v -0.164022 -0.787107 1.01299 +v -0.164461 -0.803105 1.00017 +v -0.164186 -0.815946 0.985294 +v -0.163551 -0.82835 0.970008 +v -0.1623 -0.836834 0.953503 +v -0.160937 -0.84504 0.936898 +v -0.158203 -0.85069 0.920029 +v -0.155321 -0.856024 0.902929 +v -0.152741 -0.859804 0.886401 +v -0.150137 -0.863589 0.869849 +v -0.122568 -0.639235 1.06095 +v -0.126696 -0.656453 1.06277 +v -0.131402 -0.67574 1.06318 +v -0.135377 -0.695197 1.06225 +v -0.139193 -0.716067 1.05864 +v -0.141791 -0.736802 1.05309 +v -0.143874 -0.756658 1.04458 +v -0.14498 -0.775929 1.03461 +v -0.145802 -0.792648 1.02196 +v -0.146017 -0.808642 1.00834 +v -0.146022 -0.821095 0.992978 +v -0.145701 -0.833214 0.977224 +v -0.144018 -0.841693 0.960056 +v -0.142275 -0.849915 0.942827 +v -0.139718 -0.855356 0.925574 +v -0.137046 -0.86053 0.90808 +v -0.134695 -0.864245 0.891152 +v -0.13235 -0.867881 0.874282 +v -0.107017 -0.641703 1.069 +v -0.110697 -0.659182 1.07156 +v -0.115018 -0.678689 1.07255 +v -0.118572 -0.698306 1.07181 +v -0.121984 -0.719607 1.06852 +v -0.124041 -0.740664 1.06263 +v -0.125779 -0.761167 1.05394 +v -0.126496 -0.780862 1.04343 +v -0.127167 -0.797721 1.03025 +v -0.127335 -0.813826 1.01615 +v -0.127649 -0.826021 1.0004 +v -0.127739 -0.837835 0.984287 +v -0.125706 -0.846381 0.966578 +v -0.123554 -0.85461 0.948691 +v -0.121196 -0.859894 0.931079 +v -0.118744 -0.864919 0.91321 +v -0.116629 -0.868528 0.89591 +v -0.114517 -0.871919 0.878673 +v -0.0905803 -0.643386 1.07642 +v -0.0937073 -0.660807 1.07947 +v -0.0974126 -0.680501 1.08118 +v -0.100172 -0.700462 1.08051 +v -0.103131 -0.72246 1.07741 +v -0.104753 -0.743995 1.07098 +v -0.106077 -0.764979 1.06177 +v -0.106482 -0.784862 1.05033 +v -0.106981 -0.80172 1.03662 +v -0.107024 -0.817708 1.02191 +v -0.107215 -0.829728 1.00577 +v -0.107317 -0.841378 0.989355 +v -0.105547 -0.849732 0.971161 +v -0.103658 -0.857812 0.952835 +v -0.101652 -0.862885 0.934951 +v -0.0995818 -0.867779 0.916828 +v -0.0978062 -0.871346 0.899339 +v -0.0960369 -0.87472 0.88192 +v -0.0737359 -0.645246 1.08296 +v -0.0762462 -0.662597 1.08598 +v -0.0789577 -0.68252 1.08785 +v -0.0808842 -0.702605 1.08695 +v -0.0832332 -0.724976 1.08391 +v -0.0846054 -0.746714 1.07703 +v -0.0856033 -0.768056 1.06779 +v -0.085969 -0.788121 1.05597 +v -0.0864196 -0.805047 1.04206 +v -0.0864319 -0.821238 1.02716 +v -0.086632 -0.833138 1.01082 +v -0.0867843 -0.844682 0.99419 +v -0.085318 -0.852917 0.975592 +v -0.0836939 -0.860911 0.956945 +v -0.0820792 -0.865748 0.938768 +v -0.0804024 -0.870554 0.920413 +v -0.0789653 -0.874029 0.902763 +v -0.0775355 -0.877425 0.885128 +v -0.0559165 -0.646401 1.08808 +v -0.057657 -0.664543 1.0911 +v -0.0597336 -0.684975 1.09307 +v -0.0611927 -0.705591 1.09159 +v -0.0628515 -0.728157 1.08832 +v -0.0638212 -0.749807 1.08066 +v -0.0643772 -0.770566 1.07131 +v -0.0645193 -0.789822 1.05928 +v -0.0648006 -0.80677 1.04509 +v -0.064871 -0.822884 1.02997 +v -0.0651043 -0.834813 1.01343 +v -0.0653355 -0.846304 0.996585 +v -0.0642935 -0.854429 0.977931 +v -0.0630983 -0.86234 0.95924 +v -0.0619356 -0.867241 0.940931 +v -0.0607362 -0.871981 0.922487 +v -0.0596765 -0.875487 0.904773 +v -0.0586184 -0.878925 0.88703 +v -0.0377548 -0.647872 1.09143 +v -0.038682 -0.666674 1.09399 +v -0.0400344 -0.687434 1.09523 +v -0.0411043 -0.708186 1.09308 +v -0.0420248 -0.730504 1.08936 +v -0.042715 -0.751875 1.08178 +v -0.0429321 -0.771849 1.0728 +v -0.042996 -0.790568 1.06141 +v -0.0432102 -0.80772 1.04746 +v -0.0433823 -0.823941 1.03244 +v -0.0436637 -0.836021 1.01585 +v -0.0440022 -0.847667 0.998926 +v -0.0432921 -0.855688 0.980288 +v -0.0425528 -0.863533 0.961519 +v -0.0418397 -0.868519 0.943089 +v -0.0410579 -0.873303 0.924604 +v -0.040372 -0.876863 0.906792 +v -0.0396887 -0.880371 0.888958 +v -0.0193522 -0.648276 1.09322 +v -0.0197222 -0.667169 1.09533 +v -0.020349 -0.688171 1.09648 +v -0.0208755 -0.709091 1.09373 +v -0.0213102 -0.731264 1.08958 +v -0.0216829 -0.752487 1.08185 +v -0.0217661 -0.772415 1.07288 +v -0.0218518 -0.791139 1.06147 +v -0.0219054 -0.807725 1.04806 +v -0.0219819 -0.823439 1.03362 +v -0.0222453 -0.835833 1.01671 +v -0.022418 -0.847738 0.99948 +v -0.0220997 -0.855754 0.980874 +v -0.0217796 -0.863612 0.962176 +v -0.0214815 -0.868686 0.943724 +v -0.0210688 -0.873589 0.925254 +v -0.0207709 -0.877209 0.907444 +v -0.0204762 -0.880788 0.88959 +v -0.000836098 -0.64891 1.09363 +v -0.000723122 -0.667754 1.09562 +v -0.000654344 -0.68884 1.0964 +v -0.000556546 -0.709857 1.09345 +v -0.00057644 -0.731802 1.08917 +v -0.000589864 -0.752878 1.08151 +v -0.000643792 -0.772809 1.07259 +v -0.000687429 -0.791492 1.06125 +v -0.000637564 -0.807558 1.04849 +v -0.000587488 -0.822751 1.03466 +v -0.000717041 -0.835511 1.01751 +v -0.000829418 -0.847702 0.999967 +v -0.000904864 -0.855768 0.981423 +v -0.000997121 -0.863639 0.962798 +v -0.00105682 -0.868854 0.944373 +v -0.00108101 -0.873853 0.925892 +v -0.00116765 -0.877507 0.908071 +v -0.00126188 -0.881153 0.8902 +v 0.0176715 -0.648226 1.09301 +v 0.0182481 -0.667094 1.09519 +v 0.0190585 -0.688023 1.09638 +v 0.0197942 -0.7089 1.09358 +v 0.0201881 -0.731074 1.08965 +v 0.0205941 -0.752152 1.08186 +v 0.0206587 -0.772155 1.07299 +v 0.020653 -0.790818 1.06146 +v 0.0207446 -0.807485 1.0481 +v 0.0208124 -0.823129 1.03354 +v 0.0208981 -0.835534 1.0166 +v 0.0208155 -0.847382 0.999318 +v 0.0203037 -0.855394 0.980694 +v 0.0197739 -0.863239 0.962007 +v 0.0193498 -0.868383 0.943601 +v 0.018893 -0.873341 0.925169 +v 0.0184153 -0.876953 0.90737 +v 0.017946 -0.880554 0.889544 +v 0.035984 -0.647808 1.09101 +v 0.0371372 -0.666578 1.09353 +v 0.0387691 -0.687165 1.09489 +v 0.0400772 -0.707815 1.09274 +v 0.0410004 -0.729985 1.08924 +v 0.041707 -0.751185 1.08164 +v 0.0419237 -0.77127 1.07291 +v 0.0419747 -0.789929 1.06139 +v 0.0421223 -0.807113 1.04744 +v 0.0421947 -0.823266 1.0323 +v 0.042324 -0.835341 1.01566 +v 0.0424498 -0.846925 0.998667 +v 0.0415058 -0.8549 0.979995 +v 0.0406086 -0.862659 0.961219 +v 0.0397489 -0.867752 0.942825 +v 0.0388583 -0.872671 0.924438 +v 0.037992 -0.876192 0.906642 +v 0.0371318 -0.879723 0.888827 +v 0.0540863 -0.646236 1.08762 +v 0.0561175 -0.664316 1.09047 +v 0.0584384 -0.684578 1.09253 +v 0.0600969 -0.705082 1.09113 +v 0.0618236 -0.727462 1.08815 +v 0.0628529 -0.748911 1.0806 +v 0.0633856 -0.769728 1.07145 +v 0.0634634 -0.788878 1.05925 +v 0.0636843 -0.805869 1.04502 +v 0.0636458 -0.821908 1.02971 +v 0.0636765 -0.833759 1.01307 +v 0.0637396 -0.845231 0.996132 +v 0.0625397 -0.853223 0.977488 +v 0.0612555 -0.861008 0.958779 +v 0.059939 -0.866045 0.940485 +v 0.0585907 -0.870896 0.922155 +v 0.0573483 -0.874388 0.904488 +v 0.0561114 -0.877906 0.886809 +v 0.0717963 -0.645066 1.08242 +v 0.0746468 -0.662334 1.08514 +v 0.0775128 -0.682107 1.087 +v 0.0795926 -0.702123 1.08624 +v 0.0821295 -0.724229 1.08346 +v 0.0835746 -0.745645 1.0767 +v 0.0845418 -0.766963 1.06761 +v 0.0848042 -0.786848 1.05574 +v 0.0852396 -0.803843 1.04183 +v 0.0851783 -0.820018 1.02679 +v 0.0851658 -0.831823 1.01034 +v 0.0851456 -0.843254 0.993596 +v 0.0835508 -0.851321 0.975019 +v 0.0818504 -0.859194 0.956368 +v 0.0800888 -0.864207 0.938177 +v 0.0783047 -0.869113 0.919906 +v 0.0766851 -0.872577 0.902364 +v 0.0750704 -0.876065 0.884807 +v 0.0886677 -0.643079 1.07593 +v 0.0920669 -0.660413 1.07857 +v 0.0956889 -0.680053 1.08031 +v 0.0984594 -0.699978 1.07978 +v 0.101777 -0.72167 1.07682 +v 0.103698 -0.742916 1.07037 +v 0.104991 -0.763813 1.06126 +v 0.105301 -0.783489 1.0498 +v 0.105732 -0.800239 1.03606 +v 0.105756 -0.816235 1.02141 +v 0.105752 -0.828076 1.00517 +v 0.105734 -0.839542 0.9887 +v 0.103811 -0.847756 0.97053 +v 0.101789 -0.855828 0.952218 +v 0.0996181 -0.860933 0.934268 +v 0.0974468 -0.865973 0.916192 +v 0.0955069 -0.869588 0.898882 +v 0.0935653 -0.873109 0.88154 +v 0.105261 -0.6412 1.06846 +v 0.109164 -0.65858 1.07089 +v 0.113302 -0.678051 1.0719 +v 0.116674 -0.697671 1.07123 +v 0.120461 -0.718665 1.06773 +v 0.122991 -0.739429 1.06178 +v 0.124699 -0.759838 1.0532 +v 0.125342 -0.779363 1.04269 +v 0.125925 -0.796052 1.02958 +v 0.126024 -0.81209 1.01558 +v 0.126246 -0.824051 0.999786 +v 0.126262 -0.835652 0.98364 +v 0.124036 -0.84406 0.965948 +v 0.121699 -0.852332 0.948081 +v 0.119156 -0.857614 0.930273 +v 0.116578 -0.862789 0.912462 +v 0.114314 -0.866501 0.89537 +v 0.112047 -0.870097 0.878245 +v 0.120939 -0.638574 1.06043 +v 0.125301 -0.655781 1.06219 +v 0.129941 -0.674934 1.06255 +v 0.13385 -0.694303 1.06154 +v 0.137865 -0.71486 1.05778 +v 0.140752 -0.735293 1.05219 +v 0.142837 -0.755033 1.04382 +v 0.143927 -0.774088 1.03389 +v 0.144626 -0.790707 1.02124 +v 0.144824 -0.806705 1.0078 +v 0.144644 -0.818945 0.99232 +v 0.144249 -0.830854 0.976537 +v 0.142384 -0.839213 0.959415 +v 0.140408 -0.847427 0.942151 +v 0.137689 -0.852873 0.924813 +v 0.134896 -0.858248 0.907397 +v 0.132392 -0.8621 0.890619 +v 0.129898 -0.865898 0.873808 +v 0.136439 -0.63595 1.05201 +v 0.141116 -0.65303 1.05287 +v 0.146181 -0.671855 1.05248 +v 0.150603 -0.690822 1.05105 +v 0.15471 -0.710908 1.04702 +v 0.157936 -0.730851 1.04166 +v 0.160457 -0.749879 1.03366 +v 0.162075 -0.768405 1.0244 +v 0.163032 -0.784954 1.01242 +v 0.163395 -0.800963 0.999662 +v 0.162919 -0.813627 0.984698 +v 0.162162 -0.825897 0.969341 +v 0.160687 -0.834246 0.952824 +v 0.1591 -0.842391 0.936229 +v 0.156188 -0.848068 0.919329 +v 0.153181 -0.853622 0.902331 +v 0.150443 -0.857578 0.885856 +v 0.147709 -0.861492 0.869357 +v 0.151022 -0.633777 1.04363 +v 0.155747 -0.650389 1.04351 +v 0.160923 -0.668581 1.04232 +v 0.165586 -0.686935 1.0404 +v 0.169923 -0.706244 1.03615 +v 0.173572 -0.72544 1.03095 +v 0.176506 -0.74392 1.02318 +v 0.178492 -0.76212 1.01419 +v 0.179657 -0.778475 1.0027 +v 0.180148 -0.794289 0.990535 +v 0.179655 -0.806949 0.976047 +v 0.178834 -0.819206 0.9612 +v 0.17723 -0.827849 0.945207 +v 0.175424 -0.836258 0.929091 +v 0.17293 -0.842021 0.912641 +v 0.170347 -0.847593 0.896107 +v 0.167486 -0.851798 0.879978 +v 0.164621 -0.856006 0.86385 +v 0.165488 -0.631453 1.03509 +v 0.17028 -0.647665 1.03396 +v 0.175522 -0.665298 1.03198 +v 0.180346 -0.683061 1.02946 +v 0.184872 -0.701518 1.02492 +v 0.188816 -0.719935 1.01971 +v 0.192121 -0.737925 1.01211 +v 0.194541 -0.755634 1.00351 +v 0.195934 -0.771703 0.992603 +v 0.196592 -0.787284 0.981079 +v 0.196192 -0.800003 0.967204 +v 0.19539 -0.81233 0.952998 +v 0.193632 -0.821268 0.937529 +v 0.19163 -0.829954 0.921903 +v 0.189577 -0.835825 0.90592 +v 0.187442 -0.841485 0.889869 +v 0.184464 -0.845978 0.8741 +v 0.181479 -0.850465 0.85833 +v 0.179077 -0.628805 1.02671 +v 0.183804 -0.644728 1.02459 +v 0.188917 -0.661786 1.02178 +v 0.193773 -0.678929 1.01868 +v 0.198406 -0.696686 1.01398 +v 0.202414 -0.714459 1.00856 +v 0.205971 -0.731783 1.00112 +v 0.208614 -0.748913 0.992736 +v 0.210255 -0.764552 0.982178 +v 0.21107 -0.779682 0.970897 +v 0.210874 -0.792341 0.95759 +v 0.210173 -0.80454 0.943914 +v 0.208505 -0.813638 0.929026 +v 0.206566 -0.822431 0.913984 +v 0.204362 -0.828656 0.898539 +v 0.202056 -0.83473 0.883062 +v 0.199292 -0.839615 0.867817 +v 0.196501 -0.844327 0.85248 +v 0.192452 -0.626078 1.01797 +v 0.197152 -0.641784 1.01494 +v 0.202186 -0.658275 1.01141 +v 0.207088 -0.674864 1.00774 +v 0.211739 -0.691954 1.00279 +v 0.215824 -0.708946 0.997251 +v 0.219545 -0.725477 0.989852 +v 0.222446 -0.741941 0.981709 +v 0.224354 -0.757111 0.971553 +v 0.225291 -0.771854 0.960561 +v 0.225332 -0.784429 0.947856 +v 0.224748 -0.796488 0.934696 +v 0.223211 -0.805799 0.920423 +v 0.221388 -0.814734 0.905992 +v 0.219076 -0.821291 0.891086 +v 0.216597 -0.82782 0.876201 +v 0.214068 -0.833151 0.861506 +v 0.211466 -0.838077 0.846597 +v 0.205212 -0.62317 1.00897 +v 0.20969 -0.638409 1.00524 +v 0.214562 -0.654412 1.00121 +v 0.219366 -0.670631 0.997164 +v 0.224081 -0.687095 0.991928 +v 0.228225 -0.703403 0.986104 +v 0.232047 -0.719168 0.978774 +v 0.234978 -0.734994 0.97076 +v 0.237036 -0.749699 0.960816 +v 0.238067 -0.763968 0.950129 +v 0.238258 -0.776304 0.937954 +v 0.237784 -0.788127 0.925305 +v 0.236437 -0.79755 0.911535 +v 0.234763 -0.806605 0.897553 +v 0.232633 -0.813378 0.883133 +v 0.230313 -0.820211 0.868781 +v 0.227926 -0.825825 0.854504 +v 0.225473 -0.831155 0.840112 +v 0.21784 -0.620294 0.999734 +v 0.222137 -0.635069 0.995434 +v 0.226888 -0.650616 0.990971 +v 0.231568 -0.666307 0.986483 +v 0.236302 -0.682067 0.980925 +v 0.240492 -0.697741 0.974823 +v 0.244282 -0.71284 0.967458 +v 0.247148 -0.728002 0.959449 +v 0.249382 -0.742147 0.949813 +v 0.250557 -0.755905 0.93949 +v 0.250919 -0.767986 0.927885 +v 0.250617 -0.779569 0.915775 +v 0.249506 -0.789113 0.902517 +v 0.247999 -0.798256 0.889031 +v 0.246075 -0.805353 0.875179 +v 0.243925 -0.812543 0.861412 +v 0.2417 -0.818392 0.847571 +v 0.23939 -0.824049 0.83364 +v 0.229982 -0.617724 0.990498 +v 0.234069 -0.631908 0.985795 +v 0.238659 -0.646868 0.981019 +v 0.243178 -0.661999 0.976183 +v 0.247759 -0.677037 0.97045 +v 0.251771 -0.692113 0.964125 +v 0.255551 -0.706743 0.95676 +v 0.258304 -0.721244 0.948631 +v 0.260606 -0.734857 0.939168 +v 0.261897 -0.748149 0.929118 +v 0.262434 -0.75999 0.917872 +v 0.262217 -0.771276 0.906082 +v 0.261382 -0.780755 0.89336 +v 0.260104 -0.789843 0.880401 +v 0.258409 -0.797267 0.867036 +v 0.256469 -0.804615 0.853703 +v 0.254454 -0.810622 0.840273 +v 0.252318 -0.816474 0.826749 +v 0.250267 -0.821308 0.813306 +v 0.242039 -0.615181 0.981155 +v 0.245969 -0.628947 0.976092 +v 0.250423 -0.643345 0.971011 +v 0.254771 -0.65774 0.965814 +v 0.259161 -0.672196 0.959864 +v 0.263039 -0.686615 0.953374 +v 0.266678 -0.700612 0.945918 +v 0.269353 -0.714448 0.937736 +v 0.271764 -0.727546 0.928546 +v 0.273115 -0.740311 0.91874 +v 0.273766 -0.751803 0.907792 +v 0.273701 -0.762859 0.896348 +v 0.273121 -0.772278 0.884164 +v 0.27208 -0.781293 0.87172 +v 0.270635 -0.789021 0.858844 +v 0.268935 -0.796535 0.845942 +v 0.267128 -0.802756 0.832943 +v 0.265184 -0.808808 0.819828 +v 0.26325 -0.81379 0.806731 +v 0.253627 -0.612728 0.971967 +v 0.257465 -0.626116 0.96678 +v 0.26169 -0.639936 0.961387 +v 0.265783 -0.653758 0.955856 +v 0.270007 -0.667625 0.949721 +v 0.273651 -0.68144 0.943012 +v 0.27718 -0.69482 0.935606 +v 0.279704 -0.707963 0.927356 +v 0.282136 -0.72064 0.918397 +v 0.283408 -0.732817 0.908631 +v 0.284279 -0.744019 0.898034 +v 0.284361 -0.754767 0.886925 +v 0.284014 -0.764142 0.875148 +v 0.283166 -0.773163 0.863162 +v 0.281993 -0.780986 0.850756 +v 0.280571 -0.788539 0.838227 +v 0.278964 -0.794867 0.825544 +v 0.27721 -0.801023 0.812753 +v 0.275364 -0.806155 0.800017 +v 0.265206 -0.610413 0.962703 +v 0.268929 -0.623256 0.957372 +v 0.272918 -0.63652 0.951693 +v 0.2768 -0.649792 0.945893 +v 0.280811 -0.663058 0.939545 +v 0.284275 -0.676261 0.932722 +v 0.287628 -0.689015 0.925221 +v 0.290073 -0.701543 0.916997 +v 0.29236 -0.713601 0.908084 +v 0.293699 -0.725318 0.898512 +v 0.294694 -0.736194 0.88822 +v 0.294971 -0.746643 0.877479 +v 0.294834 -0.755991 0.866149 +v 0.29416 -0.765035 0.854651 +v 0.293247 -0.772874 0.842638 +v 0.292098 -0.78047 0.830512 +v 0.290711 -0.786956 0.818199 +v 0.28913 -0.793231 0.805762 +v 0.287361 -0.79849 0.793393 +v 0.285508 -0.80365 0.780995 +v 0.276429 -0.608325 0.953609 +v 0.279975 -0.620692 0.948098 +v 0.283781 -0.633316 0.942251 +v 0.287451 -0.645945 0.936258 +v 0.291248 -0.658648 0.929758 +v 0.294459 -0.671252 0.922867 +v 0.297614 -0.683511 0.915309 +v 0.299953 -0.695537 0.907116 +v 0.302196 -0.70714 0.898346 +v 0.3035 -0.718386 0.888911 +v 0.304631 -0.728907 0.878846 +v 0.305001 -0.739011 0.868353 +v 0.304988 -0.748331 0.857515 +v 0.304492 -0.757282 0.846405 +v 0.303851 -0.765139 0.8348 +v 0.302887 -0.772706 0.823052 +v 0.30161 -0.779279 0.811107 +v 0.30013 -0.785633 0.799049 +v 0.298502 -0.790987 0.786935 +v 0.296796 -0.796253 0.774781 +v 0.295074 -0.800645 0.762572 +v 0.287635 -0.606241 0.944504 +v 0.29101 -0.618127 0.938799 +v 0.294648 -0.630175 0.932754 +v 0.298156 -0.642204 0.926595 +v 0.301631 -0.654247 0.920073 +v 0.304632 -0.666272 0.913069 +v 0.307668 -0.678032 0.905475 +v 0.309885 -0.689572 0.897283 +v 0.312008 -0.700676 0.88861 +v 0.313347 -0.711521 0.879358 +v 0.31452 -0.721664 0.869552 +v 0.31503 -0.731461 0.859342 +v 0.315169 -0.740672 0.848898 +v 0.314848 -0.749531 0.838155 +v 0.314378 -0.757347 0.826933 +v 0.313606 -0.764884 0.815559 +v 0.312448 -0.771547 0.803993 +v 0.311081 -0.777997 0.792298 +v 0.309637 -0.783495 0.780432 +v 0.308051 -0.788837 0.768517 +v 0.306374 -0.793332 0.756568 +v 0.298536 -0.604225 0.93562 +v 0.301807 -0.615561 0.929802 +v 0.305174 -0.627085 0.923698 +v 0.308404 -0.638583 0.917422 +v 0.311639 -0.650151 0.91075 +v 0.314459 -0.661613 0.903638 +v 0.317287 -0.672844 0.896047 +v 0.319394 -0.683863 0.887905 +v 0.321535 -0.694608 0.879301 +v 0.322872 -0.705001 0.870168 +v 0.324033 -0.714847 0.860628 +v 0.324587 -0.724408 0.850715 +v 0.324887 -0.73345 0.840599 +v 0.32468 -0.742124 0.830172 +v 0.324333 -0.749919 0.819291 +v 0.323684 -0.757457 0.808263 +v 0.322727 -0.764134 0.796941 +v 0.321534 -0.770573 0.7855 +v 0.32022 -0.776116 0.773883 +v 0.318765 -0.781505 0.762218 +v 0.317181 -0.786067 0.750504 +v 0.309325 -0.602147 0.926769 +v 0.31246 -0.612871 0.920893 +v 0.315651 -0.623926 0.914677 +v 0.318626 -0.635009 0.908253 +v 0.321663 -0.646039 0.901438 +v 0.324316 -0.656984 0.894245 +v 0.326976 -0.667726 0.886665 +v 0.328988 -0.678243 0.87858 +v 0.331033 -0.688488 0.870031 +v 0.332406 -0.698498 0.861008 +v 0.333566 -0.708085 0.851731 +v 0.33415 -0.717379 0.842085 +v 0.334499 -0.726195 0.832259 +v 0.334436 -0.734688 0.822162 +v 0.334257 -0.74249 0.811631 +v 0.333734 -0.750027 0.800922 +v 0.332948 -0.756659 0.789856 +v 0.331931 -0.763072 0.778686 +v 0.330765 -0.768685 0.767324 +v 0.329444 -0.774135 0.755908 +v 0.327938 -0.778754 0.744424 +v 0.326339 -0.783217 0.732858 +v 0.319802 -0.600117 0.918173 +v 0.322777 -0.610409 0.91216 +v 0.325785 -0.620976 0.905871 +v 0.328518 -0.631514 0.899329 +v 0.331331 -0.642048 0.89243 +v 0.333771 -0.652496 0.885226 +v 0.336216 -0.662796 0.877691 +v 0.33809 -0.67286 0.869711 +v 0.340052 -0.682734 0.861308 +v 0.341351 -0.692346 0.85246 +v 0.342527 -0.701672 0.843399 +v 0.343183 -0.710744 0.834003 +v 0.343727 -0.71936 0.824373 +v 0.343813 -0.727701 0.814474 +v 0.343768 -0.735372 0.804168 +v 0.343381 -0.742765 0.793681 +v 0.342763 -0.749388 0.782879 +v 0.341891 -0.755772 0.771963 +v 0.340857 -0.761396 0.760815 +v 0.339645 -0.766843 0.749606 +v 0.33825 -0.771483 0.738322 +v 0.336738 -0.775945 0.726938 +v 0.330263 -0.598074 0.909555 +v 0.333024 -0.60797 0.903376 +v 0.335793 -0.618041 0.896948 +v 0.338358 -0.628039 0.890324 +v 0.340937 -0.638046 0.883416 +v 0.343204 -0.648005 0.876201 +v 0.345439 -0.65785 0.868707 +v 0.347188 -0.667499 0.860867 +v 0.349117 -0.677012 0.852646 +v 0.35046 -0.686317 0.843996 +v 0.351605 -0.695397 0.835109 +v 0.352284 -0.704206 0.825891 +v 0.352888 -0.71255 0.816427 +v 0.353104 -0.72067 0.806734 +v 0.353245 -0.728231 0.796692 +v 0.353017 -0.735495 0.786428 +v 0.352543 -0.742086 0.775882 +v 0.351823 -0.74845 0.765223 +v 0.350886 -0.754053 0.754284 +v 0.34979 -0.75949 0.74328 +v 0.348499 -0.764147 0.732199 +v 0.347094 -0.768623 0.721003 +v 0.340364 -0.595917 0.901106 +v 0.34292 -0.605357 0.894792 +v 0.345379 -0.614962 0.888294 +v 0.347673 -0.624506 0.881651 +v 0.350086 -0.634112 0.87474 +v 0.352227 -0.643635 0.867576 +v 0.354391 -0.653105 0.860137 +v 0.356072 -0.662385 0.852362 +v 0.357847 -0.671545 0.844206 +v 0.35914 -0.680533 0.835702 +v 0.360382 -0.689323 0.826984 +v 0.361114 -0.697832 0.817964 +v 0.361833 -0.705977 0.808694 +v 0.362153 -0.713917 0.799208 +v 0.362379 -0.72133 0.789381 +v 0.362264 -0.728453 0.779323 +v 0.361931 -0.734969 0.768986 +v 0.361308 -0.741234 0.758524 +v 0.36052 -0.746826 0.747807 +v 0.359531 -0.752222 0.737013 +v 0.358395 -0.756918 0.726108 +v 0.357088 -0.761386 0.715075 +v 0.355668 -0.765186 0.704035 +v 0.350247 -0.593666 0.892508 +v 0.352673 -0.602645 0.886102 +v 0.354911 -0.611841 0.879617 +v 0.356987 -0.620986 0.872993 +v 0.359194 -0.630164 0.866025 +v 0.361197 -0.639251 0.858901 +v 0.363197 -0.64832 0.851468 +v 0.364851 -0.657236 0.843771 +v 0.366594 -0.666093 0.835737 +v 0.36791 -0.674774 0.827421 +v 0.369164 -0.683246 0.818848 +v 0.369976 -0.691481 0.810052 +v 0.370802 -0.699432 0.80098 +v 0.371226 -0.70719 0.79168 +v 0.371557 -0.714457 0.78205 +v 0.371523 -0.721432 0.772185 +v 0.371302 -0.72784 0.762063 +v 0.370812 -0.734024 0.751822 +v 0.370153 -0.739593 0.741332 +v 0.369282 -0.744957 0.730758 +v 0.368238 -0.749625 0.719996 +v 0.367043 -0.754104 0.70913 +v 0.365712 -0.757949 0.698226 +v 0.364296 -0.761695 0.687299 +v 0.359865 -0.591269 0.884189 +v 0.36208 -0.599901 0.877711 +v 0.364118 -0.608658 0.871195 +v 0.365978 -0.61736 0.864523 +v 0.368059 -0.62616 0.857572 +v 0.369881 -0.634846 0.850429 +v 0.371779 -0.643558 0.84307 +v 0.373368 -0.652144 0.835485 +v 0.375023 -0.660694 0.827548 +v 0.376333 -0.669085 0.819364 +v 0.377623 -0.677314 0.810983 +v 0.378494 -0.685319 0.802369 +v 0.379412 -0.693065 0.793457 +v 0.379923 -0.700612 0.784331 +v 0.380351 -0.707739 0.774901 +v 0.380387 -0.714571 0.765241 +v 0.380313 -0.720884 0.755295 +v 0.379929 -0.726944 0.745213 +v 0.379397 -0.732465 0.734928 +v 0.378609 -0.737764 0.724539 +v 0.377671 -0.742418 0.713926 +v 0.376581 -0.746905 0.703226 +v 0.375363 -0.750768 0.692483 +v 0.374064 -0.754536 0.681719 +v 0.369179 -0.588753 0.875604 +v 0.371202 -0.597053 0.869069 +v 0.373091 -0.605374 0.862578 +v 0.374867 -0.613681 0.855964 +v 0.376796 -0.622102 0.848988 +v 0.378539 -0.630447 0.84187 +v 0.380276 -0.638787 0.834594 +v 0.381836 -0.647049 0.827185 +v 0.383465 -0.655302 0.819349 +v 0.384794 -0.663417 0.811315 +v 0.386087 -0.67138 0.803118 +v 0.387006 -0.679156 0.794684 +v 0.388005 -0.6867 0.785946 +v 0.38858 -0.694022 0.776958 +v 0.389076 -0.700981 0.767718 +v 0.38921 -0.707684 0.758274 +v 0.38927 -0.713882 0.748503 +v 0.38901 -0.719842 0.73856 +v 0.38856 -0.725288 0.728438 +v 0.387905 -0.73055 0.718231 +v 0.387067 -0.73518 0.70777 +v 0.386075 -0.739654 0.69725 +v 0.384974 -0.743537 0.686683 +v 0.383739 -0.747277 0.676081 +v 0.378093 -0.585958 0.867333 +v 0.379954 -0.593886 0.860837 +v 0.381685 -0.601887 0.854296 +v 0.383326 -0.609897 0.847664 +v 0.385094 -0.617986 0.840775 +v 0.386716 -0.626004 0.833771 +v 0.388473 -0.634082 0.826552 +v 0.38998 -0.642034 0.819154 +v 0.391556 -0.650035 0.811502 +v 0.392861 -0.657893 0.803683 +v 0.394221 -0.665648 0.795616 +v 0.395204 -0.673198 0.787311 +v 0.39618 -0.680484 0.778708 +v 0.396822 -0.687584 0.76992 +v 0.397414 -0.694373 0.76078 +v 0.397653 -0.700914 0.751448 +v 0.397779 -0.707001 0.741852 +v 0.397585 -0.712842 0.732113 +v 0.397283 -0.718234 0.722132 +v 0.396731 -0.723411 0.712035 +v 0.396018 -0.728014 0.70174 +v 0.395116 -0.732431 0.69138 +v 0.394129 -0.736296 0.68094 +v 0.393003 -0.740021 0.670469 +v 0.391818 -0.743274 0.659951 +v 0.386765 -0.583019 0.858818 +v 0.388471 -0.590615 0.852374 +v 0.390135 -0.598347 0.845883 +v 0.391734 -0.606088 0.839312 +v 0.393363 -0.613847 0.832524 +v 0.394857 -0.621556 0.825651 +v 0.396592 -0.629332 0.818454 +v 0.398156 -0.637032 0.811135 +v 0.399688 -0.644774 0.80367 +v 0.400929 -0.652384 0.796044 +v 0.402303 -0.659878 0.78807 +v 0.403293 -0.667181 0.77987 +v 0.404294 -0.674231 0.771404 +v 0.404966 -0.681079 0.76272 +v 0.405658 -0.687702 0.753735 +v 0.405988 -0.694081 0.74454 +v 0.40617 -0.700037 0.735144 +v 0.406083 -0.705795 0.725633 +v 0.405921 -0.711119 0.715799 +v 0.405486 -0.716205 0.705811 +v 0.404917 -0.720791 0.695691 +v 0.404146 -0.725186 0.685502 +v 0.403233 -0.729005 0.675183 +v 0.402219 -0.732713 0.664835 +v 0.401151 -0.735991 0.654477 +v 0.399993 -0.739163 0.644101 +v 0.394983 -0.579775 0.850815 +v 0.396634 -0.587147 0.844443 +v 0.398139 -0.594629 0.838003 +v 0.399586 -0.602094 0.831482 +v 0.401222 -0.60963 0.824744 +v 0.402781 -0.617112 0.817931 +v 0.404417 -0.624602 0.810893 +v 0.405841 -0.632018 0.803693 +v 0.407396 -0.639529 0.796321 +v 0.408599 -0.646864 0.788711 +v 0.409972 -0.654117 0.780862 +v 0.410961 -0.661174 0.772786 +v 0.412072 -0.668085 0.764444 +v 0.412803 -0.674771 0.755845 +v 0.413522 -0.68119 0.746992 +v 0.413867 -0.68737 0.737969 +v 0.414207 -0.693237 0.728728 +v 0.414211 -0.698849 0.719321 +v 0.414138 -0.704092 0.709667 +v 0.413795 -0.709094 0.699837 +v 0.413322 -0.713603 0.68983 +v 0.412684 -0.717957 0.679774 +v 0.411894 -0.721779 0.669612 +v 0.410991 -0.725481 0.659424 +v 0.410013 -0.728707 0.649147 +v 0.40893 -0.731822 0.638851 +v 0.407863 -0.734504 0.628537 +v 0.403005 -0.576455 0.842656 +v 0.404537 -0.583564 0.836317 +v 0.405999 -0.59083 0.830002 +v 0.407439 -0.598109 0.823667 +v 0.409051 -0.605384 0.816937 +v 0.410553 -0.612614 0.810124 +v 0.41208 -0.619823 0.803205 +v 0.413468 -0.62699 0.796174 +v 0.415002 -0.634213 0.788835 +v 0.416262 -0.64132 0.781309 +v 0.417592 -0.648314 0.773592 +v 0.418681 -0.655188 0.765724 +v 0.419796 -0.661909 0.757431 +v 0.42056 -0.668413 0.748924 +v 0.421328 -0.674627 0.740218 +v 0.421759 -0.680657 0.731374 +v 0.42222 -0.686401 0.722248 +v 0.422293 -0.691863 0.71295 +v 0.422302 -0.697007 0.70348 +v 0.421997 -0.701893 0.693819 +v 0.421662 -0.706354 0.68395 +v 0.421121 -0.710637 0.674013 +v 0.42045 -0.714457 0.664004 +v 0.419639 -0.718134 0.653961 +v 0.418759 -0.721318 0.643779 +v 0.417802 -0.724407 0.633583 +v 0.41681 -0.727072 0.623381 +v 0.415751 -0.729651 0.613169 +v 0.410558 -0.572941 0.835047 +v 0.411971 -0.579878 0.828943 +v 0.413446 -0.586941 0.822646 +v 0.414833 -0.593962 0.816282 +v 0.416327 -0.600998 0.809627 +v 0.417853 -0.608031 0.802979 +v 0.419373 -0.615021 0.796082 +v 0.420764 -0.621962 0.789076 +v 0.422271 -0.628968 0.781832 +v 0.423516 -0.635863 0.774428 +v 0.424924 -0.642697 0.766837 +v 0.425964 -0.649328 0.759011 +v 0.427147 -0.655842 0.750829 +v 0.427938 -0.662139 0.742431 +v 0.428755 -0.668198 0.733799 +v 0.429257 -0.674089 0.725045 +v 0.429742 -0.679663 0.716036 +v 0.429904 -0.685005 0.706892 +v 0.430013 -0.690022 0.697517 +v 0.429841 -0.694814 0.687994 +v 0.429611 -0.699226 0.678283 +v 0.429141 -0.703442 0.668496 +v 0.428533 -0.707176 0.658591 +v 0.427846 -0.710815 0.648658 +v 0.427068 -0.713991 0.638603 +v 0.426216 -0.717065 0.628528 +v 0.425308 -0.71969 0.618406 +v 0.424319 -0.722235 0.60828 +v 0.41799 -0.569369 0.827352 +v 0.41924 -0.576125 0.821381 +v 0.420689 -0.582936 0.815087 +v 0.422118 -0.589745 0.808776 +v 0.423584 -0.596574 0.802273 +v 0.424991 -0.603375 0.795723 +v 0.426584 -0.610182 0.788892 +v 0.427981 -0.616902 0.781935 +v 0.429439 -0.623693 0.77478 +v 0.430751 -0.630409 0.767521 +v 0.432087 -0.636975 0.759966 +v 0.433171 -0.643415 0.752233 +v 0.434376 -0.649705 0.744156 +v 0.435233 -0.655811 0.73589 +v 0.436096 -0.661731 0.727345 +v 0.436637 -0.667449 0.718655 +v 0.437219 -0.672892 0.709809 +v 0.437453 -0.67809 0.700808 +v 0.437671 -0.682983 0.69153 +v 0.437596 -0.68767 0.682134 +v 0.437421 -0.691989 0.672548 +v 0.43703 -0.696132 0.662897 +v 0.436575 -0.699838 0.653109 +v 0.435994 -0.703415 0.643288 +v 0.435334 -0.706621 0.633374 +v 0.434529 -0.709669 0.623423 +v 0.433717 -0.712263 0.613401 +v 0.432833 -0.714781 0.603363 +v 0.424996 -0.565659 0.820225 +v 0.426255 -0.572249 0.814278 +v 0.427686 -0.578865 0.808066 +v 0.429075 -0.585468 0.801823 +v 0.430558 -0.592117 0.795425 +v 0.431945 -0.598741 0.788961 +v 0.433505 -0.605353 0.782172 +v 0.434882 -0.611879 0.775264 +v 0.436387 -0.618441 0.768175 +v 0.437641 -0.624888 0.760937 +v 0.439027 -0.631291 0.753475 +v 0.440146 -0.637551 0.745809 +v 0.441328 -0.643631 0.737824 +v 0.442197 -0.649559 0.729643 +v 0.443139 -0.655321 0.721219 +v 0.443718 -0.660862 0.712624 +v 0.444334 -0.666155 0.70388 +v 0.444674 -0.671253 0.694992 +v 0.44493 -0.676033 0.685794 +v 0.444909 -0.680605 0.676495 +v 0.444867 -0.684847 0.66703 +v 0.444625 -0.688927 0.657503 +v 0.444331 -0.692667 0.647843 +v 0.443804 -0.696205 0.638115 +v 0.443234 -0.699374 0.628294 +v 0.442514 -0.702353 0.618423 +v 0.441773 -0.70492 0.608489 +v 0.440948 -0.707374 0.598547 +v 0.44015 -0.709522 0.588543 +v 0.431883 -0.561894 0.81299 +v 0.433179 -0.568341 0.807114 +v 0.43461 -0.574763 0.800986 +v 0.435939 -0.58115 0.794798 +v 0.43736 -0.587583 0.788465 +v 0.438751 -0.594028 0.782079 +v 0.440242 -0.600441 0.775322 +v 0.441652 -0.606798 0.76851 +v 0.443191 -0.613127 0.761496 +v 0.44447 -0.61934 0.754318 +v 0.445861 -0.625545 0.746898 +v 0.446977 -0.631624 0.7393 +v 0.448183 -0.637519 0.731403 +v 0.44911 -0.643271 0.723357 +v 0.450052 -0.648835 0.715037 +v 0.450665 -0.654209 0.706554 +v 0.451317 -0.659341 0.697897 +v 0.451682 -0.664288 0.689085 +v 0.452054 -0.668983 0.68001 +v 0.452154 -0.673514 0.670837 +v 0.452236 -0.677702 0.661493 +v 0.45211 -0.681737 0.652083 +v 0.451904 -0.685409 0.642527 +v 0.4515 -0.688917 0.632915 +v 0.451015 -0.692017 0.623178 +v 0.450398 -0.694946 0.613401 +v 0.44975 -0.697491 0.603569 +v 0.449011 -0.699921 0.593725 +v 0.448277 -0.702044 0.583791 +v 0.447474 -0.704101 0.573849 +v 0.438516 -0.558027 0.806148 +v 0.439792 -0.564285 0.80036 +v 0.441173 -0.570563 0.794312 +v 0.442491 -0.576824 0.788214 +v 0.443914 -0.583102 0.781918 +v 0.445245 -0.589329 0.775555 +v 0.446802 -0.595575 0.768903 +v 0.448192 -0.601734 0.76214 +v 0.449733 -0.607884 0.7552 +v 0.451003 -0.613907 0.748081 +v 0.452419 -0.619916 0.740724 +v 0.453508 -0.625769 0.733185 +v 0.454741 -0.631498 0.725369 +v 0.455671 -0.63708 0.717394 +v 0.456673 -0.642492 0.709173 +v 0.457321 -0.647691 0.700781 +v 0.45802 -0.65267 0.69218 +v 0.458473 -0.657538 0.68344 +v 0.458944 -0.662173 0.674475 +v 0.459138 -0.666619 0.665405 +v 0.459336 -0.670745 0.656184 +v 0.459261 -0.674664 0.646869 +v 0.459133 -0.678255 0.637391 +v 0.458833 -0.681698 0.627876 +v 0.458445 -0.684723 0.618226 +v 0.457918 -0.6876 0.608541 +v 0.457365 -0.690112 0.598788 +v 0.456684 -0.692502 0.589012 +v 0.45603 -0.694596 0.579164 +v 0.45529 -0.696613 0.569305 +v 0.444964 -0.554092 0.799202 +v 0.446258 -0.560175 0.793526 +v 0.447645 -0.566338 0.787571 +v 0.448974 -0.572488 0.781564 +v 0.450382 -0.578594 0.775317 +v 0.451706 -0.584658 0.76901 +v 0.453234 -0.590655 0.762393 +v 0.45464 -0.596628 0.75571 +v 0.456185 -0.602597 0.748851 +v 0.457427 -0.608445 0.741779 +v 0.458809 -0.614213 0.734464 +v 0.459941 -0.619861 0.727007 +v 0.461171 -0.625416 0.719261 +v 0.46206 -0.630803 0.711343 +v 0.463115 -0.636064 0.703195 +v 0.463856 -0.641149 0.694907 +v 0.464696 -0.646053 0.6864 +v 0.465158 -0.650764 0.677739 +v 0.465688 -0.655279 0.668881 +v 0.465939 -0.659605 0.65992 +v 0.466242 -0.66364 0.650809 +v 0.46626 -0.667478 0.641609 +v 0.46627 -0.671025 0.632236 +v 0.466051 -0.674377 0.622803 +v 0.465762 -0.677335 0.613247 +v 0.465335 -0.680165 0.603658 +v 0.46487 -0.682642 0.593984 +v 0.464299 -0.685017 0.584289 +v 0.463676 -0.687063 0.574519 +v 0.463012 -0.689043 0.564749 +v 0.451268 -0.550124 0.792703 +v 0.452535 -0.556079 0.787069 +v 0.453931 -0.562092 0.781183 +v 0.455245 -0.568073 0.775241 +v 0.456665 -0.574012 0.76905 +v 0.458 -0.579908 0.762807 +v 0.45952 -0.585759 0.756293 +v 0.460876 -0.591525 0.749673 +v 0.462356 -0.597278 0.742781 +v 0.463637 -0.602967 0.735748 +v 0.464963 -0.608554 0.728467 +v 0.46605 -0.614026 0.721061 +v 0.467336 -0.619434 0.713392 +v 0.46834 -0.624694 0.705578 +v 0.469438 -0.629801 0.697508 +v 0.470215 -0.634728 0.689285 +v 0.471062 -0.639477 0.680826 +v 0.47163 -0.644065 0.672249 +v 0.472248 -0.648466 0.663497 +v 0.472565 -0.652677 0.654645 +v 0.472933 -0.656612 0.64561 +v 0.473013 -0.660354 0.636486 +v 0.473116 -0.66382 0.627214 +v 0.472987 -0.66709 0.617881 +v 0.472817 -0.670002 0.608415 +v 0.472464 -0.672761 0.598905 +v 0.472078 -0.675193 0.589303 +v 0.471571 -0.677521 0.57968 +v 0.47105 -0.679557 0.569989 +v 0.470476 -0.681522 0.560294 +v 0.469894 -0.683202 0.550542 +v 0.457473 -0.54612 0.786121 +v 0.458724 -0.551966 0.780538 +v 0.460113 -0.557813 0.774727 +v 0.461375 -0.563607 0.768819 +v 0.462856 -0.569378 0.762696 +v 0.464143 -0.575083 0.756468 +v 0.465655 -0.580765 0.750033 +v 0.466926 -0.58636 0.743449 +v 0.468399 -0.59194 0.73658 +v 0.469679 -0.597445 0.729595 +v 0.471058 -0.602891 0.722416 +v 0.472192 -0.608224 0.715109 +v 0.473489 -0.613443 0.707505 +v 0.474471 -0.618512 0.699744 +v 0.475604 -0.623446 0.691741 +v 0.476434 -0.628228 0.683591 +v 0.477332 -0.632832 0.675196 +v 0.477968 -0.637276 0.666702 +v 0.478671 -0.641561 0.658067 +v 0.479062 -0.645647 0.649319 +v 0.479487 -0.649473 0.640348 +v 0.479721 -0.653159 0.631325 +v 0.479902 -0.656561 0.622161 +v 0.479835 -0.659713 0.612919 +v 0.47974 -0.662556 0.603541 +v 0.479462 -0.665253 0.594123 +v 0.479174 -0.667662 0.584596 +v 0.478777 -0.669968 0.575058 +v 0.478363 -0.671993 0.565447 +v 0.477831 -0.673914 0.555821 +v 0.47731 -0.67556 0.546115 +v 0.463388 -0.542101 0.779796 +v 0.46463 -0.547826 0.774286 +v 0.466061 -0.553526 0.768563 +v 0.467301 -0.559147 0.762711 +v 0.468708 -0.564738 0.756631 +v 0.470006 -0.570283 0.750486 +v 0.471535 -0.575806 0.744084 +v 0.472804 -0.581269 0.737537 +v 0.47426 -0.586674 0.730765 +v 0.475522 -0.591996 0.723869 +v 0.476893 -0.59726 0.7167 +v 0.47799 -0.60239 0.709385 +v 0.479358 -0.607464 0.701867 +v 0.480392 -0.612383 0.694179 +v 0.481519 -0.617145 0.686224 +v 0.482424 -0.621808 0.678145 +v 0.483409 -0.626294 0.669865 +v 0.484082 -0.630596 0.661454 +v 0.48484 -0.634741 0.652877 +v 0.485351 -0.63876 0.644211 +v 0.485822 -0.642463 0.635317 +v 0.486107 -0.646036 0.626379 +v 0.486366 -0.649339 0.617284 +v 0.486376 -0.652408 0.608116 +v 0.486408 -0.655201 0.598819 +v 0.486236 -0.657845 0.589474 +v 0.486048 -0.660227 0.580031 +v 0.485712 -0.662484 0.570561 +v 0.485364 -0.664462 0.561024 +v 0.484919 -0.66637 0.551474 +v 0.484439 -0.668025 0.541787 +v 0.469249 -0.538057 0.773398 +v 0.470417 -0.543641 0.767935 +v 0.471761 -0.549134 0.762229 +v 0.473048 -0.554603 0.756491 +v 0.474458 -0.560041 0.750493 +v 0.475725 -0.565413 0.744408 +v 0.477206 -0.570793 0.738023 +v 0.478526 -0.576104 0.73154 +v 0.479996 -0.581341 0.72487 +v 0.481221 -0.586477 0.71806 +v 0.482662 -0.591591 0.71094 +v 0.483839 -0.596589 0.703684 +v 0.485148 -0.601447 0.696187 +v 0.486194 -0.606198 0.688562 +v 0.487412 -0.610843 0.680688 +v 0.488371 -0.615359 0.672692 +v 0.489391 -0.619699 0.664493 +v 0.490114 -0.62386 0.656177 +v 0.490909 -0.627866 0.647647 +v 0.491443 -0.631742 0.639034 +v 0.49205 -0.635377 0.630254 +v 0.492412 -0.638848 0.621396 +v 0.492716 -0.642021 0.612367 +v 0.492823 -0.64503 0.603285 +v 0.492915 -0.647733 0.594055 +v 0.492889 -0.650345 0.584806 +v 0.49281 -0.652697 0.575436 +v 0.49253 -0.654899 0.566032 +v 0.492235 -0.656902 0.556577 +v 0.491836 -0.658809 0.547089 +v 0.491441 -0.660416 0.537438 +v 0.490939 -0.66192 0.527768 +v 0.474852 -0.53397 0.767339 +v 0.475971 -0.539379 0.761953 +v 0.477316 -0.544734 0.756359 +v 0.478498 -0.55004 0.750663 +v 0.479888 -0.555325 0.744705 +v 0.481209 -0.560614 0.738701 +v 0.482681 -0.565838 0.732378 +v 0.484011 -0.570994 0.725962 +v 0.485501 -0.576076 0.719332 +v 0.486754 -0.581074 0.712569 +v 0.488181 -0.586006 0.705489 +v 0.489367 -0.590825 0.698289 +v 0.490752 -0.595554 0.690878 +v 0.491844 -0.600138 0.68332 +v 0.493078 -0.604641 0.675493 +v 0.494047 -0.609013 0.667556 +v 0.495132 -0.613209 0.659404 +v 0.495896 -0.617216 0.651127 +v 0.496768 -0.621109 0.642677 +v 0.497392 -0.624878 0.634143 +v 0.498063 -0.628411 0.625421 +v 0.498459 -0.631753 0.616618 +v 0.498887 -0.634832 0.607664 +v 0.499078 -0.637754 0.598649 +v 0.499288 -0.640429 0.589499 +v 0.499316 -0.642971 0.580303 +v 0.499247 -0.645286 0.570999 +v 0.499078 -0.647521 0.561684 +v 0.498855 -0.649487 0.552282 +v 0.498527 -0.651322 0.542802 +v 0.498225 -0.652908 0.53321 +v 0.497806 -0.654386 0.523602 +v 0.480345 -0.529848 0.761213 +v 0.481476 -0.535102 0.755933 +v 0.482759 -0.540313 0.750419 +v 0.483933 -0.545475 0.744825 +v 0.485336 -0.550643 0.738924 +v 0.48663 -0.55578 0.732957 +v 0.488177 -0.560881 0.726733 +v 0.489449 -0.565858 0.720352 +v 0.49091 -0.570771 0.713735 +v 0.492158 -0.575607 0.707004 +v 0.493598 -0.580368 0.699996 +v 0.494824 -0.585035 0.692875 +v 0.496183 -0.589565 0.685484 +v 0.497312 -0.594008 0.677977 +v 0.498637 -0.598396 0.670246 +v 0.499619 -0.602616 0.662372 +v 0.50078 -0.60666 0.654281 +v 0.501642 -0.610552 0.646069 +v 0.502596 -0.614339 0.637698 +v 0.503219 -0.61793 0.629208 +v 0.503967 -0.621366 0.620563 +v 0.504452 -0.624595 0.61183 +v 0.504946 -0.627567 0.60294 +v 0.505197 -0.630388 0.593984 +v 0.505472 -0.633062 0.58491 +v 0.505533 -0.635593 0.575789 +v 0.505588 -0.637876 0.566565 +v 0.505469 -0.640034 0.557308 +v 0.50534 -0.641961 0.547971 +v 0.505111 -0.643746 0.538504 +v 0.504878 -0.645285 0.528968 +v 0.50454 -0.646734 0.519416 +v 0.485509 -0.525749 0.755568 +v 0.486609 -0.530892 0.750379 +v 0.487882 -0.535941 0.744922 +v 0.489062 -0.540974 0.739402 +v 0.490457 -0.546005 0.733566 +v 0.491778 -0.551022 0.727674 +v 0.493281 -0.555932 0.721471 +v 0.494595 -0.560764 0.715159 +v 0.496053 -0.565509 0.708551 +v 0.497293 -0.570163 0.701834 +v 0.498738 -0.574758 0.694845 +v 0.500034 -0.579279 0.68777 +v 0.50147 -0.58371 0.680445 +v 0.502668 -0.58804 0.672995 +v 0.504002 -0.592249 0.665293 +v 0.505064 -0.596327 0.657472 +v 0.506217 -0.600231 0.649417 +v 0.507141 -0.604 0.641276 +v 0.508139 -0.607637 0.632948 +v 0.50884 -0.61111 0.624515 +v 0.509597 -0.614393 0.615905 +v 0.510175 -0.617505 0.607236 +v 0.510764 -0.620465 0.598411 +v 0.511072 -0.62325 0.589518 +v 0.511391 -0.625845 0.580503 +v 0.511547 -0.62831 0.571453 +v 0.511647 -0.630517 0.562273 +v 0.511613 -0.632629 0.553074 +v 0.511581 -0.634505 0.543754 +v 0.511422 -0.636234 0.534335 +v 0.511273 -0.637745 0.524856 +v 0.511007 -0.639164 0.515361 +v 0.510743 -0.640338 0.505816 +v 0.490667 -0.521653 0.749914 +v 0.491677 -0.526655 0.744783 +v 0.492934 -0.531565 0.739378 +v 0.494129 -0.536456 0.73393 +v 0.495508 -0.541356 0.728153 +v 0.496735 -0.546185 0.722294 +v 0.498278 -0.55094 0.716163 +v 0.49956 -0.555594 0.709882 +v 0.50107 -0.560191 0.703316 +v 0.502394 -0.5647 0.69665 +v 0.503928 -0.569154 0.689708 +v 0.505197 -0.573487 0.682643 +v 0.506652 -0.577797 0.675349 +v 0.507882 -0.581999 0.667952 +v 0.509255 -0.586042 0.660288 +v 0.510306 -0.589936 0.652487 +v 0.511491 -0.59371 0.644483 +v 0.512472 -0.597371 0.636402 +v 0.513526 -0.60088 0.628133 +v 0.514338 -0.604243 0.619774 +v 0.515147 -0.607409 0.611225 +v 0.515727 -0.610453 0.602629 +v 0.516389 -0.613324 0.593855 +v 0.516804 -0.61604 0.585015 +v 0.517227 -0.618558 0.576076 +v 0.517404 -0.620919 0.567081 +v 0.517604 -0.62309 0.557968 +v 0.517643 -0.625147 0.548824 +v 0.517687 -0.626942 0.53951 +v 0.517626 -0.628637 0.530154 +v 0.517567 -0.630126 0.520735 +v 0.517366 -0.6315 0.511297 +v 0.517169 -0.632635 0.501809 +v 0.516893 -0.633686 0.492319 +v 0.495444 -0.517544 0.744645 +v 0.496465 -0.522442 0.739624 +v 0.49775 -0.527247 0.734329 +v 0.498905 -0.532001 0.728958 +v 0.500296 -0.536732 0.723197 +v 0.501583 -0.541413 0.71738 +v 0.503004 -0.545963 0.711205 +v 0.504327 -0.550484 0.704971 +v 0.505916 -0.554943 0.698473 +v 0.507294 -0.559312 0.691874 +v 0.508846 -0.563621 0.684953 +v 0.510168 -0.567846 0.67792 +v 0.511634 -0.57199 0.670607 +v 0.512879 -0.576023 0.663206 +v 0.51428 -0.579934 0.655568 +v 0.515399 -0.583705 0.647821 +v 0.516673 -0.587362 0.639862 +v 0.517754 -0.590927 0.631825 +v 0.518819 -0.594305 0.623569 +v 0.519667 -0.597561 0.615246 +v 0.520567 -0.600628 0.606728 +v 0.521231 -0.603578 0.598183 +v 0.521922 -0.606337 0.589484 +v 0.522345 -0.608934 0.580712 +v 0.522804 -0.61136 0.571833 +v 0.523092 -0.613675 0.562894 +v 0.52339 -0.615791 0.553841 +v 0.523508 -0.617774 0.544731 +v 0.523631 -0.619497 0.535429 +v 0.523628 -0.62113 0.526112 +v 0.523659 -0.622571 0.516749 +v 0.523524 -0.623895 0.507363 +v 0.523407 -0.625 0.497922 +v 0.523164 -0.626004 0.488472 +v 0.522966 -0.626806 0.478988 +v 0.500155 -0.513431 0.739321 +v 0.501183 -0.5182 0.734417 +v 0.502427 -0.522867 0.729198 +v 0.503574 -0.527497 0.723921 +v 0.505018 -0.532071 0.718203 +v 0.506266 -0.536566 0.712375 +v 0.507752 -0.54101 0.706259 +v 0.509084 -0.545384 0.700057 +v 0.510683 -0.549665 0.693599 +v 0.512035 -0.553863 0.687015 +v 0.51363 -0.558038 0.680124 +v 0.514995 -0.56214 0.673119 +v 0.516541 -0.566134 0.66584 +v 0.517824 -0.570024 0.65845 +v 0.519258 -0.573805 0.650837 +v 0.520457 -0.577479 0.643127 +v 0.521758 -0.581002 0.635191 +v 0.522825 -0.584401 0.627162 +v 0.524006 -0.587684 0.618975 +v 0.524893 -0.590807 0.610682 +v 0.525851 -0.593777 0.602196 +v 0.526535 -0.596593 0.593662 +v 0.527281 -0.599255 0.585025 +v 0.52781 -0.601779 0.576328 +v 0.52832 -0.604113 0.567488 +v 0.528675 -0.606349 0.558613 +v 0.529054 -0.6084 0.549638 +v 0.529224 -0.610279 0.540553 +v 0.529438 -0.611955 0.531317 +v 0.529526 -0.613535 0.522057 +v 0.529608 -0.614912 0.512745 +v 0.52954 -0.616182 0.503412 +v 0.529483 -0.617238 0.494022 +v 0.529327 -0.618224 0.484624 +v 0.529173 -0.618991 0.475192 +v 0.528917 -0.619621 0.465759 +v 0.504552 -0.509398 0.734487 +v 0.505543 -0.514012 0.729684 +v 0.506819 -0.518562 0.724526 +v 0.507991 -0.523068 0.719304 +v 0.509401 -0.527473 0.71361 +v 0.510705 -0.531827 0.707856 +v 0.512228 -0.536121 0.701765 +v 0.5136 -0.540368 0.695613 +v 0.515153 -0.544478 0.689118 +v 0.516568 -0.548524 0.682564 +v 0.518248 -0.552592 0.675691 +v 0.519622 -0.556547 0.668674 +v 0.521189 -0.560398 0.661399 +v 0.522526 -0.564141 0.654021 +v 0.523992 -0.567781 0.646394 +v 0.525266 -0.571321 0.638691 +v 0.526676 -0.57474 0.630814 +v 0.527834 -0.578033 0.62283 +v 0.529033 -0.58119 0.614654 +v 0.529916 -0.584179 0.606362 +v 0.53092 -0.587008 0.597897 +v 0.531682 -0.589708 0.589382 +v 0.532478 -0.592281 0.580775 +v 0.533104 -0.59475 0.572128 +v 0.533719 -0.597017 0.563344 +v 0.534115 -0.599144 0.5545 +v 0.53456 -0.601114 0.545547 +v 0.534858 -0.602921 0.536455 +v 0.53513 -0.604528 0.527274 +v 0.535252 -0.606039 0.518071 +v 0.535374 -0.607345 0.5088 +v 0.535356 -0.608556 0.49951 +v 0.535336 -0.609551 0.490182 +v 0.535209 -0.61047 0.480842 +v 0.535133 -0.611203 0.471472 +v 0.534954 -0.611792 0.462114 +v 0.50887 -0.505358 0.729625 +v 0.509869 -0.509828 0.724961 +v 0.511161 -0.514244 0.719834 +v 0.512286 -0.518598 0.714623 +v 0.513758 -0.522862 0.709001 +v 0.515044 -0.527054 0.703283 +v 0.51659 -0.531218 0.697233 +v 0.517978 -0.535317 0.691098 +v 0.51956 -0.539282 0.684616 +v 0.520979 -0.54319 0.678063 +v 0.522625 -0.547077 0.67116 +v 0.524083 -0.550896 0.664161 +v 0.525693 -0.554598 0.656896 +v 0.52709 -0.558204 0.64954 +v 0.528645 -0.561714 0.641933 +v 0.529984 -0.565126 0.634234 +v 0.53138 -0.568377 0.626354 +v 0.532605 -0.571544 0.618411 +v 0.533855 -0.574589 0.610262 +v 0.534808 -0.577476 0.60201 +v 0.535885 -0.580182 0.593581 +v 0.536742 -0.582766 0.585096 +v 0.537567 -0.58524 0.576504 +v 0.538242 -0.587627 0.567892 +v 0.538978 -0.589826 0.559159 +v 0.539411 -0.591849 0.55036 +v 0.539933 -0.593711 0.541378 +v 0.540258 -0.595425 0.532312 +v 0.540594 -0.596965 0.523191 +v 0.540759 -0.598402 0.514048 +v 0.540953 -0.599651 0.504811 +v 0.541025 -0.600818 0.495555 +v 0.541039 -0.601751 0.486307 +v 0.540949 -0.602625 0.47708 +v 0.540906 -0.603277 0.467797 +v 0.540762 -0.603829 0.45847 +v 0.512954 -0.501439 0.72526 +v 0.513918 -0.505761 0.720681 +v 0.51525 -0.510032 0.715635 +v 0.516397 -0.514237 0.710492 +v 0.51784 -0.518342 0.704891 +v 0.51917 -0.522403 0.699229 +v 0.520779 -0.526417 0.693196 +v 0.522139 -0.530338 0.687036 +v 0.523716 -0.534124 0.680491 +v 0.525207 -0.537899 0.673917 +v 0.526933 -0.541653 0.667028 +v 0.528394 -0.545304 0.659987 +v 0.530072 -0.548886 0.652708 +v 0.531532 -0.552362 0.645331 +v 0.533126 -0.555732 0.637743 +v 0.534514 -0.559008 0.630074 +v 0.536027 -0.562159 0.622197 +v 0.537288 -0.565177 0.614232 +v 0.538549 -0.568074 0.606089 +v 0.53957 -0.570853 0.597869 +v 0.540696 -0.573467 0.589468 +v 0.541575 -0.575946 0.580998 +v 0.542563 -0.578343 0.57245 +v 0.543291 -0.580608 0.563862 +v 0.544049 -0.582703 0.555141 +v 0.544543 -0.584654 0.546358 +v 0.54511 -0.586407 0.537367 +v 0.545483 -0.588039 0.528344 +v 0.545913 -0.589523 0.519263 +v 0.54619 -0.590911 0.510156 +v 0.546412 -0.592081 0.500965 +v 0.546494 -0.593171 0.491817 +v 0.546561 -0.594056 0.482634 +v 0.546489 -0.59484 0.473439 +v 0.546473 -0.595417 0.464194 +v 0.546384 -0.595937 0.454933 +v 0.546242 -0.596199 0.445616 +v 0.516882 -0.497462 0.720808 +v 0.517829 -0.501649 0.71633 +v 0.519105 -0.505736 0.711308 +v 0.520306 -0.509794 0.706245 +v 0.521774 -0.513765 0.700703 +v 0.523113 -0.517682 0.695091 +v 0.524757 -0.521541 0.689057 +v 0.526157 -0.525301 0.682903 +v 0.527818 -0.52897 0.676349 +v 0.529385 -0.532593 0.669748 +v 0.531146 -0.536186 0.662851 +v 0.532665 -0.539694 0.655808 +v 0.534364 -0.543127 0.6485 +v 0.535897 -0.546492 0.64112 +v 0.537542 -0.549724 0.633542 +v 0.538936 -0.552846 0.625874 +v 0.5405 -0.55585 0.617982 +v 0.541784 -0.558724 0.609991 +v 0.543088 -0.561484 0.601873 +v 0.544199 -0.564156 0.59369 +v 0.5454 -0.566684 0.585323 +v 0.54635 -0.569083 0.576886 +v 0.54737 -0.571335 0.568343 +v 0.548088 -0.573439 0.559771 +v 0.548869 -0.575433 0.551069 +v 0.549509 -0.577333 0.542276 +v 0.550185 -0.57903 0.533345 +v 0.550635 -0.580603 0.524373 +v 0.551105 -0.581999 0.515308 +v 0.551366 -0.583285 0.506273 +v 0.551624 -0.584382 0.497193 +v 0.55176 -0.585403 0.488088 +v 0.551874 -0.586231 0.478944 +v 0.551917 -0.58697 0.469779 +v 0.551907 -0.587466 0.460575 +v 0.551814 -0.587905 0.451362 +v 0.551758 -0.588152 0.442115 +v 0.551587 -0.588309 0.432846 +v 0.520445 -0.493599 0.716857 +v 0.521413 -0.497647 0.712475 +v 0.52276 -0.501597 0.707549 +v 0.523981 -0.505503 0.702556 +v 0.525489 -0.509324 0.697022 +v 0.526768 -0.51306 0.691383 +v 0.528449 -0.516746 0.685331 +v 0.529968 -0.520381 0.679195 +v 0.531717 -0.523917 0.672641 +v 0.533322 -0.527397 0.666006 +v 0.535087 -0.530811 0.659057 +v 0.536722 -0.534196 0.652021 +v 0.538498 -0.537491 0.644683 +v 0.540043 -0.540691 0.63726 +v 0.541707 -0.543762 0.629638 +v 0.543167 -0.546741 0.621949 +v 0.544816 -0.549641 0.61407 +v 0.546144 -0.552392 0.606073 +v 0.547536 -0.555041 0.597948 +v 0.54869 -0.557559 0.589747 +v 0.549938 -0.559986 0.581404 +v 0.550947 -0.562296 0.572984 +v 0.552 -0.564431 0.564436 +v 0.552776 -0.566434 0.55586 +v 0.553653 -0.568353 0.547204 +v 0.554348 -0.570117 0.538387 +v 0.555072 -0.571734 0.529479 +v 0.555611 -0.573256 0.520564 +v 0.556113 -0.57458 0.511589 +v 0.55636 -0.575755 0.502584 +v 0.556651 -0.576771 0.493531 +v 0.556827 -0.577719 0.484461 +v 0.557008 -0.578483 0.475355 +v 0.557076 -0.57913 0.466215 +v 0.557104 -0.579564 0.457045 +v 0.557038 -0.579937 0.447878 +v 0.557008 -0.580142 0.438691 +v 0.556907 -0.580265 0.429467 +v 0.524047 -0.489751 0.712925 +v 0.524979 -0.49364 0.708596 +v 0.526299 -0.497417 0.703727 +v 0.527488 -0.501156 0.698783 +v 0.529006 -0.504824 0.693258 +v 0.530443 -0.508444 0.687686 +v 0.532185 -0.511963 0.681626 +v 0.533717 -0.515433 0.675459 +v 0.53554 -0.51884 0.668891 +v 0.53716 -0.522158 0.662217 +v 0.538998 -0.525422 0.655253 +v 0.54067 -0.528637 0.648184 +v 0.542462 -0.531776 0.64081 +v 0.544087 -0.534841 0.633368 +v 0.545876 -0.537796 0.625746 +v 0.547412 -0.540632 0.618032 +v 0.549041 -0.543389 0.610142 +v 0.550367 -0.546012 0.602136 +v 0.551874 -0.54854 0.59401 +v 0.553086 -0.550928 0.585795 +v 0.554365 -0.553239 0.577452 +v 0.555409 -0.555439 0.569043 +v 0.556509 -0.557474 0.560505 +v 0.557403 -0.559395 0.551934 +v 0.558353 -0.561202 0.543269 +v 0.559018 -0.562818 0.534479 +v 0.559735 -0.564327 0.525641 +v 0.560269 -0.565745 0.516772 +v 0.560833 -0.566993 0.507836 +v 0.56121 -0.568135 0.498871 +v 0.561571 -0.569094 0.489858 +v 0.561789 -0.569966 0.480827 +v 0.562006 -0.570637 0.471747 +v 0.562108 -0.571218 0.46264 +v 0.562201 -0.571601 0.453517 +v 0.56216 -0.571909 0.444385 +v 0.562132 -0.572045 0.435246 +v 0.562053 -0.572114 0.426083 +v 0.527301 -0.486096 0.709379 +v 0.528181 -0.489812 0.705203 +v 0.529525 -0.493409 0.700364 +v 0.530751 -0.496967 0.695465 +v 0.53235 -0.500447 0.689993 +v 0.533795 -0.503876 0.684453 +v 0.535596 -0.507266 0.678342 +v 0.537262 -0.510611 0.672162 +v 0.539147 -0.513851 0.665532 +v 0.540844 -0.517021 0.658837 +v 0.542728 -0.520129 0.651806 +v 0.544385 -0.523158 0.644668 +v 0.546249 -0.526135 0.637243 +v 0.547937 -0.529038 0.629759 +v 0.549793 -0.531863 0.622115 +v 0.551447 -0.534608 0.614396 +v 0.553081 -0.537206 0.606453 +v 0.554509 -0.539718 0.598444 +v 0.556036 -0.542112 0.590298 +v 0.557297 -0.544389 0.582076 +v 0.558678 -0.546594 0.573744 +v 0.559778 -0.548669 0.565327 +v 0.560893 -0.550583 0.556784 +v 0.561789 -0.552389 0.5482 +v 0.562861 -0.554113 0.539547 +v 0.56358 -0.555664 0.530809 +v 0.564318 -0.557075 0.52199 +v 0.564896 -0.558391 0.513142 +v 0.565502 -0.559555 0.504233 +v 0.5659 -0.560619 0.495287 +v 0.566298 -0.561498 0.486302 +v 0.566551 -0.562298 0.477296 +v 0.566791 -0.562862 0.468222 +v 0.566913 -0.563364 0.45914 +v 0.567035 -0.563675 0.450053 +v 0.567063 -0.563933 0.440966 +v 0.567053 -0.563991 0.431867 +v 0.566966 -0.564001 0.422754 +v 0.566928 -0.563762 0.413609 +v 0.53051 -0.482429 0.70582 +v 0.531386 -0.485989 0.701819 +v 0.532765 -0.489404 0.69701 +v 0.533999 -0.492771 0.69212 +v 0.535607 -0.496051 0.68669 +v 0.537132 -0.499309 0.68121 +v 0.53902 -0.502573 0.675062 +v 0.540678 -0.50574 0.66879 +v 0.542631 -0.508825 0.66213 +v 0.544349 -0.511813 0.655377 +v 0.546297 -0.514768 0.648292 +v 0.548056 -0.517655 0.641129 +v 0.550003 -0.52048 0.633663 +v 0.551778 -0.523236 0.626143 +v 0.553657 -0.52592 0.61846 +v 0.555274 -0.528493 0.610673 +v 0.556978 -0.530964 0.60272 +v 0.558466 -0.533339 0.594696 +v 0.56002 -0.535604 0.586535 +v 0.561368 -0.537775 0.578315 +v 0.562723 -0.539819 0.569961 +v 0.563874 -0.541779 0.561545 +v 0.565106 -0.543622 0.553061 +v 0.566057 -0.545325 0.544502 +v 0.567087 -0.546912 0.535824 +v 0.567861 -0.54838 0.527101 +v 0.568706 -0.549713 0.518314 +v 0.569302 -0.550918 0.509482 +v 0.569938 -0.552 0.500587 +v 0.570367 -0.552976 0.49167 +v 0.570815 -0.553786 0.482714 +v 0.571113 -0.554486 0.473728 +v 0.571416 -0.554993 0.464686 +v 0.571592 -0.55543 0.455638 +v 0.571754 -0.55568 0.446594 +v 0.571782 -0.555851 0.437541 +v 0.571813 -0.555856 0.428473 +v 0.571745 -0.555801 0.419402 +v 0.571721 -0.555508 0.410319 +v 0.533431 -0.479005 0.702634 +v 0.534326 -0.482379 0.698738 +v 0.535676 -0.485584 0.694015 +v 0.536951 -0.488763 0.689256 +v 0.538701 -0.491876 0.683821 +v 0.540262 -0.494948 0.678291 +v 0.54213 -0.497969 0.672104 +v 0.543878 -0.500951 0.665858 +v 0.545927 -0.503885 0.659121 +v 0.547737 -0.506719 0.652274 +v 0.549746 -0.509497 0.645114 +v 0.5516 -0.512215 0.637866 +v 0.553622 -0.514908 0.63038 +v 0.555431 -0.517523 0.622798 +v 0.55735 -0.520057 0.615049 +v 0.559071 -0.522493 0.607218 +v 0.560811 -0.524824 0.59924 +v 0.562331 -0.527058 0.591184 +v 0.563951 -0.529212 0.58301 +v 0.565288 -0.53124 0.574747 +v 0.566675 -0.533158 0.566408 +v 0.567905 -0.535001 0.55802 +v 0.569151 -0.536722 0.549529 +v 0.570176 -0.538331 0.540954 +v 0.571208 -0.539807 0.532279 +v 0.572045 -0.541188 0.523576 +v 0.572913 -0.542419 0.514787 +v 0.573592 -0.543559 0.505969 +v 0.574205 -0.544527 0.497087 +v 0.574676 -0.545412 0.488187 +v 0.575203 -0.546162 0.479255 +v 0.575552 -0.546778 0.470273 +v 0.575926 -0.54723 0.46126 +v 0.576109 -0.547578 0.452239 +v 0.576276 -0.547754 0.443212 +v 0.57634 -0.547863 0.434179 +v 0.576392 -0.547791 0.425142 +v 0.576341 -0.547662 0.416113 +v 0.576289 -0.547294 0.407082 +v 0.536329 -0.475571 0.69944 +v 0.537177 -0.478734 0.695612 +v 0.538522 -0.481732 0.690996 +v 0.539846 -0.484727 0.686367 +v 0.541657 -0.487661 0.680888 +v 0.543228 -0.490528 0.675284 +v 0.545176 -0.493355 0.669123 +v 0.546987 -0.496126 0.662893 +v 0.549107 -0.498893 0.656057 +v 0.551011 -0.501586 0.649139 +v 0.553126 -0.504201 0.641898 +v 0.555058 -0.506744 0.634569 +v 0.55712 -0.509283 0.627037 +v 0.558982 -0.511776 0.61942 +v 0.560997 -0.514161 0.611623 +v 0.562709 -0.516431 0.60372 +v 0.564505 -0.51863 0.595715 +v 0.566059 -0.520723 0.587627 +v 0.567713 -0.52274 0.579454 +v 0.569098 -0.524648 0.571198 +v 0.570527 -0.526441 0.562851 +v 0.571781 -0.528157 0.554459 +v 0.57307 -0.529767 0.545956 +v 0.574137 -0.53126 0.537374 +v 0.575239 -0.532646 0.528714 +v 0.576052 -0.5339 0.520012 +v 0.57699 -0.535062 0.511239 +v 0.577672 -0.536095 0.502426 +v 0.578366 -0.536989 0.493578 +v 0.578894 -0.537801 0.484708 +v 0.579446 -0.53845 0.475779 +v 0.579799 -0.538965 0.466809 +v 0.580189 -0.539332 0.457832 +v 0.580368 -0.539592 0.448839 +v 0.580585 -0.539709 0.439838 +v 0.580659 -0.539754 0.430829 +v 0.580742 -0.539605 0.421837 +v 0.58074 -0.539424 0.412844 +v 0.580729 -0.539028 0.403857 +v 0.580628 -0.53856 0.394859 +v 0.538817 -0.472448 0.696591 +v 0.539638 -0.475371 0.693009 +v 0.541086 -0.47815 0.688395 +v 0.542485 -0.480917 0.683755 +v 0.544296 -0.483593 0.678304 +v 0.54599 -0.486216 0.672785 +v 0.548106 -0.488856 0.666555 +v 0.549979 -0.491407 0.660206 +v 0.552166 -0.49398 0.653305 +v 0.554181 -0.496492 0.646332 +v 0.556394 -0.498981 0.639015 +v 0.558366 -0.50138 0.631575 +v 0.560466 -0.503732 0.623968 +v 0.562439 -0.506063 0.61628 +v 0.56448 -0.508309 0.608419 +v 0.566251 -0.510436 0.600455 +v 0.568044 -0.512477 0.592403 +v 0.569688 -0.514452 0.584314 +v 0.571386 -0.516339 0.576126 +v 0.572829 -0.518122 0.567861 +v 0.574318 -0.519821 0.559491 +v 0.575544 -0.521411 0.551062 +v 0.576873 -0.522897 0.542542 +v 0.577993 -0.524287 0.533965 +v 0.579172 -0.525583 0.52531 +v 0.579999 -0.526727 0.516588 +v 0.580931 -0.527781 0.50782 +v 0.581669 -0.528728 0.499028 +v 0.582399 -0.529538 0.490185 +v 0.582961 -0.530267 0.481322 +v 0.583518 -0.530797 0.472391 +v 0.583874 -0.531226 0.463446 +v 0.584297 -0.531527 0.454487 +v 0.584493 -0.531714 0.445515 +v 0.58472 -0.53175 0.436542 +v 0.584807 -0.531729 0.427557 +v 0.584909 -0.531534 0.418587 +v 0.584947 -0.531304 0.409624 +v 0.584915 -0.530819 0.400685 +v 0.584789 -0.530263 0.391714 +v 0.541305 -0.469319 0.693742 +v 0.542077 -0.472005 0.6904 +v 0.543619 -0.474558 0.68578 +v 0.545051 -0.477094 0.681107 +v 0.546927 -0.479509 0.675707 +v 0.548701 -0.481891 0.670265 +v 0.550925 -0.484316 0.663931 +v 0.552889 -0.486659 0.657491 +v 0.555141 -0.489035 0.650522 +v 0.557236 -0.491357 0.643479 +v 0.559536 -0.493707 0.63607 +v 0.561542 -0.495963 0.628541 +v 0.563742 -0.498156 0.620857 +v 0.565743 -0.500293 0.613084 +v 0.567817 -0.502381 0.605172 +v 0.569591 -0.504361 0.597162 +v 0.571514 -0.506295 0.589099 +v 0.573131 -0.508108 0.58094 +v 0.574855 -0.509859 0.572739 +v 0.576339 -0.511511 0.564463 +v 0.577926 -0.513127 0.55609 +v 0.579187 -0.514601 0.547638 +v 0.580496 -0.515952 0.539095 +v 0.581685 -0.517244 0.530517 +v 0.582859 -0.518415 0.521845 +v 0.583785 -0.519479 0.513138 +v 0.584703 -0.520409 0.504378 +v 0.585465 -0.521274 0.495599 +v 0.586274 -0.522012 0.486769 +v 0.58681 -0.522615 0.477898 +v 0.587374 -0.52305 0.468989 +v 0.587792 -0.523413 0.460071 +v 0.588216 -0.523627 0.451135 +v 0.588453 -0.523753 0.442178 +v 0.588703 -0.523739 0.433233 +v 0.58881 -0.523663 0.424286 +v 0.58893 -0.523394 0.415344 +v 0.588963 -0.523072 0.406395 +v 0.588965 -0.522521 0.397486 +v 0.588904 -0.52194 0.388576 +v 0.543454 -0.466646 0.691282 +v 0.544313 -0.469 0.68803 +v 0.545852 -0.471175 0.683528 +v 0.547355 -0.473355 0.679003 +v 0.5494 -0.475542 0.673527 +v 0.551286 -0.477681 0.667981 +v 0.55355 -0.479864 0.661598 +v 0.555619 -0.48199 0.655136 +v 0.557952 -0.484203 0.648001 +v 0.560187 -0.48638 0.640829 +v 0.562496 -0.488509 0.633282 +v 0.564689 -0.490604 0.62569 +v 0.566925 -0.492645 0.617945 +v 0.568954 -0.494611 0.610119 +v 0.571016 -0.496519 0.602137 +v 0.572908 -0.498369 0.594095 +v 0.574827 -0.50015 0.585985 +v 0.576523 -0.501848 0.577807 +v 0.578303 -0.503484 0.569565 +v 0.579843 -0.505025 0.561257 +v 0.581458 -0.506511 0.552866 +v 0.582736 -0.50785 0.544376 +v 0.584116 -0.509103 0.535819 +v 0.585267 -0.510268 0.527211 +v 0.586464 -0.511333 0.518546 +v 0.587416 -0.5123 0.509832 +v 0.588378 -0.51315 0.501066 +v 0.589186 -0.513927 0.492282 +v 0.590004 -0.514568 0.483464 +v 0.590537 -0.515055 0.474582 +v 0.591166 -0.515426 0.465689 +v 0.591559 -0.515684 0.456774 +v 0.592012 -0.515842 0.447852 +v 0.592285 -0.515929 0.438926 +v 0.592513 -0.515825 0.429991 +v 0.592601 -0.51566 0.421065 +v 0.592791 -0.515357 0.412147 +v 0.592843 -0.514955 0.403226 +v 0.59283 -0.514331 0.394344 +v 0.592795 -0.51369 0.385463 +v 0.592715 -0.512814 0.376629 +v 0.54557 -0.463962 0.68881 +v 0.546488 -0.465978 0.685619 +v 0.548106 -0.467799 0.681275 +v 0.549644 -0.469604 0.676889 +v 0.551769 -0.47154 0.671297 +v 0.553722 -0.473423 0.665631 +v 0.556039 -0.475369 0.659209 +v 0.558228 -0.477276 0.652728 +v 0.56075 -0.479362 0.645475 +v 0.562991 -0.481353 0.638114 +v 0.565397 -0.4833 0.630486 +v 0.567643 -0.485192 0.622797 +v 0.569987 -0.4871 0.615008 +v 0.572019 -0.488908 0.607103 +v 0.574177 -0.490682 0.59908 +v 0.576081 -0.492357 0.590961 +v 0.578041 -0.493988 0.582829 +v 0.579792 -0.495535 0.574633 +v 0.581566 -0.497032 0.566341 +v 0.583145 -0.498456 0.558 +v 0.5848 -0.499815 0.5496 +v 0.586125 -0.501033 0.541073 +v 0.587523 -0.502174 0.532498 +v 0.588703 -0.503229 0.523878 +v 0.589873 -0.504169 0.515198 +v 0.590902 -0.505058 0.506503 +v 0.591924 -0.505824 0.49773 +v 0.592721 -0.50649 0.488941 +v 0.593536 -0.507027 0.480121 +v 0.594137 -0.507437 0.471254 +v 0.594729 -0.507697 0.462365 +v 0.595189 -0.507926 0.453463 +v 0.595598 -0.507989 0.444564 +v 0.595884 -0.507995 0.43566 +v 0.596143 -0.507825 0.426751 +v 0.596327 -0.507628 0.417837 +v 0.59649 -0.50722 0.408946 +v 0.596489 -0.506728 0.400059 +v 0.59655 -0.506065 0.391207 +v 0.596484 -0.505345 0.382358 +v 0.596397 -0.504409 0.373569 +v 0.596274 -0.50344 0.364768 +v 0.547236 -0.461668 0.686891 +v 0.548299 -0.463049 0.683939 +v 0.550095 -0.464526 0.679471 +v 0.551799 -0.465973 0.67495 +v 0.553963 -0.467593 0.669309 +v 0.555982 -0.469162 0.663592 +v 0.558563 -0.470951 0.657046 +v 0.560868 -0.472651 0.650374 +v 0.563421 -0.47451 0.643021 +v 0.565768 -0.476306 0.635588 +v 0.568299 -0.478128 0.627906 +v 0.570586 -0.479869 0.620121 +v 0.572939 -0.481605 0.612234 +v 0.575056 -0.483261 0.604261 +v 0.577225 -0.484876 0.596179 +v 0.579181 -0.486418 0.588027 +v 0.581259 -0.487939 0.579863 +v 0.582987 -0.489339 0.571594 +v 0.584846 -0.490721 0.5633 +v 0.586425 -0.491996 0.55492 +v 0.588049 -0.493194 0.546458 +v 0.589414 -0.494288 0.537915 +v 0.590909 -0.495348 0.529335 +v 0.592067 -0.496283 0.520694 +v 0.593256 -0.497132 0.512004 +v 0.594279 -0.4979 0.503282 +v 0.595302 -0.49856 0.494578 +v 0.596114 -0.499132 0.485832 +v 0.596966 -0.499571 0.477021 +v 0.597608 -0.499916 0.468168 +v 0.598221 -0.500124 0.459303 +v 0.598639 -0.50025 0.450402 +v 0.599105 -0.500255 0.441506 +v 0.599362 -0.500171 0.432603 +v 0.599656 -0.499942 0.423689 +v 0.599818 -0.499662 0.414774 +v 0.600003 -0.499179 0.405847 +v 0.599982 -0.498612 0.396927 +v 0.600091 -0.4979 0.388106 +v 0.599965 -0.497089 0.379283 +v 0.599929 -0.496106 0.370519 +v 0.599789 -0.495077 0.361757 +v 0.599638 -0.49382 0.35305 +f 1 2 131 +f 1 131 130 +f 2 3 131 +f 3 132 131 +f 3 4 133 +f 3 133 132 +f 4 5 133 +f 5 134 133 +f 5 6 135 +f 5 135 134 +f 6 7 135 +f 7 136 135 +f 7 8 137 +f 7 137 136 +f 8 9 137 +f 9 138 137 +f 9 10 139 +f 9 139 138 +f 10 11 139 +f 11 140 139 +f 11 12 141 +f 11 141 140 +f 12 13 141 +f 13 142 141 +f 13 14 143 +f 13 143 142 +f 14 15 143 +f 15 144 143 +f 15 16 145 +f 15 145 144 +f 16 17 145 +f 17 146 145 +f 17 18 147 +f 17 147 146 +f 18 19 147 +f 19 148 147 +f 19 20 149 +f 19 149 148 +f 20 21 149 +f 21 150 149 +f 21 22 151 +f 21 151 150 +f 22 23 151 +f 23 152 151 +f 23 24 153 +f 23 153 152 +f 24 25 153 +f 25 154 153 +f 25 26 155 +f 25 155 154 +f 26 27 155 +f 27 156 155 +f 27 28 157 +f 27 157 156 +f 28 29 157 +f 29 158 157 +f 29 30 159 +f 29 159 158 +f 30 31 159 +f 31 160 159 +f 31 32 161 +f 31 161 160 +f 32 33 161 +f 33 162 161 +f 33 34 163 +f 33 163 162 +f 34 35 163 +f 35 164 163 +f 35 36 165 +f 35 165 164 +f 36 37 165 +f 37 166 165 +f 37 38 167 +f 37 167 166 +f 38 39 167 +f 39 168 167 +f 39 40 169 +f 39 169 168 +f 40 41 169 +f 41 170 169 +f 41 42 171 +f 41 171 170 +f 42 43 171 +f 43 172 171 +f 43 44 173 +f 43 173 172 +f 44 45 173 +f 45 174 173 +f 45 46 175 +f 45 175 174 +f 46 47 175 +f 47 176 175 +f 47 48 177 +f 47 177 176 +f 48 49 177 +f 49 178 177 +f 49 50 179 +f 49 179 178 +f 50 51 179 +f 51 180 179 +f 51 52 181 +f 51 181 180 +f 52 53 181 +f 53 182 181 +f 53 54 183 +f 53 183 182 +f 54 55 183 +f 55 184 183 +f 55 56 185 +f 55 185 184 +f 56 57 185 +f 57 186 185 +f 57 58 187 +f 57 187 186 +f 58 59 187 +f 59 188 187 +f 59 60 189 +f 59 189 188 +f 60 61 189 +f 61 190 189 +f 61 62 191 +f 61 191 190 +f 62 63 191 +f 63 192 191 +f 63 64 193 +f 63 193 192 +f 64 65 193 +f 65 194 193 +f 65 66 195 +f 65 195 194 +f 66 67 195 +f 67 196 195 +f 67 68 197 +f 67 197 196 +f 68 69 197 +f 69 198 197 +f 69 70 199 +f 69 199 198 +f 70 71 199 +f 71 200 199 +f 71 72 201 +f 71 201 200 +f 72 73 201 +f 73 202 201 +f 73 74 203 +f 73 203 202 +f 74 75 203 +f 75 204 203 +f 75 76 205 +f 75 205 204 +f 76 77 205 +f 77 206 205 +f 77 78 207 +f 77 207 206 +f 78 79 207 +f 79 208 207 +f 79 80 209 +f 79 209 208 +f 80 81 209 +f 81 210 209 +f 81 82 211 +f 81 211 210 +f 82 83 211 +f 83 212 211 +f 83 84 213 +f 83 213 212 +f 84 85 213 +f 85 214 213 +f 85 86 215 +f 85 215 214 +f 86 87 215 +f 87 216 215 +f 87 88 217 +f 87 217 216 +f 88 89 217 +f 89 218 217 +f 89 90 219 +f 89 219 218 +f 90 91 219 +f 91 220 219 +f 91 92 221 +f 91 221 220 +f 92 93 221 +f 93 222 221 +f 93 94 223 +f 93 223 222 +f 94 95 223 +f 95 224 223 +f 95 96 225 +f 95 225 224 +f 96 97 225 +f 97 226 225 +f 97 98 227 +f 97 227 226 +f 98 99 227 +f 99 228 227 +f 99 100 229 +f 99 229 228 +f 100 101 229 +f 101 230 229 +f 101 102 231 +f 101 231 230 +f 102 103 231 +f 103 232 231 +f 103 104 233 +f 103 233 232 +f 104 105 233 +f 105 234 233 +f 105 106 235 +f 105 235 234 +f 106 107 235 +f 107 236 235 +f 107 108 237 +f 107 237 236 +f 108 109 237 +f 109 238 237 +f 109 110 239 +f 109 239 238 +f 110 111 239 +f 111 240 239 +f 111 112 241 +f 111 241 240 +f 112 113 241 +f 113 242 241 +f 113 114 243 +f 113 243 242 +f 114 115 243 +f 115 244 243 +f 115 116 245 +f 115 245 244 +f 116 117 245 +f 117 246 245 +f 117 118 247 +f 117 247 246 +f 118 119 247 +f 119 248 247 +f 119 120 249 +f 119 249 248 +f 120 121 249 +f 121 250 249 +f 121 122 251 +f 121 251 250 +f 122 123 251 +f 123 252 251 +f 123 124 253 +f 123 253 252 +f 124 125 253 +f 125 254 253 +f 125 126 255 +f 125 255 254 +f 126 127 255 +f 127 256 255 +f 127 128 257 +f 127 257 256 +f 128 129 257 +f 129 258 257 +f 130 131 259 +f 131 260 259 +f 131 132 261 +f 131 261 260 +f 132 133 261 +f 133 262 261 +f 133 134 263 +f 133 263 262 +f 134 135 263 +f 135 264 263 +f 135 136 265 +f 135 265 264 +f 136 137 265 +f 137 266 265 +f 137 138 267 +f 137 267 266 +f 138 139 267 +f 139 268 267 +f 139 140 269 +f 139 269 268 +f 140 141 269 +f 141 270 269 +f 141 142 271 +f 141 271 270 +f 142 143 271 +f 143 272 271 +f 143 144 273 +f 143 273 272 +f 144 145 273 +f 145 274 273 +f 145 146 275 +f 145 275 274 +f 146 147 275 +f 147 276 275 +f 147 148 277 +f 147 277 276 +f 148 149 277 +f 149 278 277 +f 149 150 279 +f 149 279 278 +f 150 151 279 +f 151 280 279 +f 151 152 281 +f 151 281 280 +f 152 153 281 +f 153 282 281 +f 153 154 283 +f 153 283 282 +f 154 155 283 +f 155 284 283 +f 155 156 285 +f 155 285 284 +f 156 157 285 +f 157 286 285 +f 157 158 287 +f 157 287 286 +f 158 159 287 +f 159 288 287 +f 159 160 289 +f 159 289 288 +f 160 161 289 +f 161 290 289 +f 161 162 291 +f 161 291 290 +f 162 163 291 +f 163 292 291 +f 163 164 293 +f 163 293 292 +f 164 165 293 +f 165 294 293 +f 165 166 295 +f 165 295 294 +f 166 167 295 +f 167 296 295 +f 167 168 297 +f 167 297 296 +f 168 169 297 +f 169 298 297 +f 169 170 299 +f 169 299 298 +f 170 171 299 +f 171 300 299 +f 171 172 301 +f 171 301 300 +f 172 173 301 +f 173 302 301 +f 173 174 303 +f 173 303 302 +f 174 175 303 +f 175 304 303 +f 175 176 305 +f 175 305 304 +f 176 177 305 +f 177 306 305 +f 177 178 307 +f 177 307 306 +f 178 179 307 +f 179 308 307 +f 179 180 309 +f 179 309 308 +f 180 181 309 +f 181 310 309 +f 181 182 311 +f 181 311 310 +f 182 183 311 +f 183 312 311 +f 183 184 313 +f 183 313 312 +f 184 185 313 +f 185 314 313 +f 185 186 315 +f 185 315 314 +f 186 187 315 +f 187 316 315 +f 187 188 317 +f 187 317 316 +f 188 189 317 +f 189 318 317 +f 189 190 319 +f 189 319 318 +f 190 191 319 +f 191 320 319 +f 191 192 321 +f 191 321 320 +f 192 193 321 +f 193 322 321 +f 193 194 323 +f 193 323 322 +f 194 195 323 +f 195 324 323 +f 195 196 325 +f 195 325 324 +f 196 197 325 +f 197 326 325 +f 197 198 327 +f 197 327 326 +f 198 199 327 +f 199 328 327 +f 199 200 329 +f 199 329 328 +f 200 201 329 +f 201 330 329 +f 201 202 331 +f 201 331 330 +f 202 203 331 +f 203 332 331 +f 203 204 333 +f 203 333 332 +f 204 205 333 +f 205 334 333 +f 205 206 335 +f 205 335 334 +f 206 207 335 +f 207 336 335 +f 207 208 337 +f 207 337 336 +f 208 209 337 +f 209 338 337 +f 209 210 339 +f 209 339 338 +f 210 211 339 +f 211 340 339 +f 211 212 341 +f 211 341 340 +f 212 213 341 +f 213 342 341 +f 213 214 343 +f 213 343 342 +f 214 215 343 +f 215 344 343 +f 215 216 345 +f 215 345 344 +f 216 217 345 +f 217 346 345 +f 217 218 347 +f 217 347 346 +f 218 219 347 +f 219 348 347 +f 219 220 349 +f 219 349 348 +f 220 221 349 +f 221 350 349 +f 221 222 351 +f 221 351 350 +f 222 223 351 +f 223 352 351 +f 223 224 353 +f 223 353 352 +f 224 225 353 +f 225 354 353 +f 225 226 355 +f 225 355 354 +f 226 227 355 +f 227 356 355 +f 227 228 357 +f 227 357 356 +f 228 229 357 +f 229 358 357 +f 229 230 359 +f 229 359 358 +f 230 231 359 +f 231 360 359 +f 231 232 361 +f 231 361 360 +f 232 233 361 +f 233 362 361 +f 233 234 363 +f 233 363 362 +f 234 235 363 +f 235 364 363 +f 235 236 365 +f 235 365 364 +f 236 237 365 +f 237 366 365 +f 237 238 367 +f 237 367 366 +f 238 239 367 +f 239 368 367 +f 239 240 369 +f 239 369 368 +f 240 241 369 +f 241 370 369 +f 241 242 371 +f 241 371 370 +f 242 243 371 +f 243 372 371 +f 243 244 373 +f 243 373 372 +f 244 245 373 +f 245 374 373 +f 245 246 375 +f 245 375 374 +f 246 247 375 +f 247 376 375 +f 247 248 377 +f 247 377 376 +f 248 249 377 +f 249 378 377 +f 249 250 379 +f 249 379 378 +f 250 251 379 +f 251 380 379 +f 251 252 381 +f 251 381 380 +f 252 253 381 +f 253 382 381 +f 253 254 383 +f 253 383 382 +f 254 255 383 +f 255 384 383 +f 255 256 385 +f 255 385 384 +f 256 257 385 +f 257 386 385 +f 257 258 387 +f 257 387 386 +f 259 260 389 +f 259 389 388 +f 260 261 389 +f 261 390 389 +f 261 262 391 +f 261 391 390 +f 262 263 391 +f 263 392 391 +f 263 264 393 +f 263 393 392 +f 264 265 393 +f 265 394 393 +f 265 266 395 +f 265 395 394 +f 266 267 395 +f 267 396 395 +f 267 268 397 +f 267 397 396 +f 268 269 397 +f 269 398 397 +f 269 270 399 +f 269 399 398 +f 270 271 399 +f 271 400 399 +f 271 272 401 +f 271 401 400 +f 272 273 401 +f 273 402 401 +f 273 274 403 +f 273 403 402 +f 274 275 403 +f 275 404 403 +f 275 276 405 +f 275 405 404 +f 276 277 405 +f 277 406 405 +f 277 278 407 +f 277 407 406 +f 278 279 407 +f 279 408 407 +f 279 280 409 +f 279 409 408 +f 280 281 409 +f 281 410 409 +f 281 282 411 +f 281 411 410 +f 282 283 411 +f 283 412 411 +f 283 284 413 +f 283 413 412 +f 284 285 413 +f 285 414 413 +f 285 286 415 +f 285 415 414 +f 286 287 415 +f 287 416 415 +f 287 288 417 +f 287 417 416 +f 288 289 417 +f 289 418 417 +f 289 290 419 +f 289 419 418 +f 290 291 419 +f 291 420 419 +f 291 292 421 +f 291 421 420 +f 292 293 421 +f 293 422 421 +f 293 294 423 +f 293 423 422 +f 294 295 423 +f 295 424 423 +f 295 296 425 +f 295 425 424 +f 296 297 425 +f 297 426 425 +f 297 298 427 +f 297 427 426 +f 298 299 427 +f 299 428 427 +f 299 300 429 +f 299 429 428 +f 300 301 429 +f 301 430 429 +f 301 302 431 +f 301 431 430 +f 302 303 431 +f 303 432 431 +f 303 304 433 +f 303 433 432 +f 304 305 433 +f 305 434 433 +f 305 306 435 +f 305 435 434 +f 306 307 435 +f 307 436 435 +f 307 308 437 +f 307 437 436 +f 308 309 437 +f 309 438 437 +f 309 310 439 +f 309 439 438 +f 310 311 439 +f 311 440 439 +f 311 312 441 +f 311 441 440 +f 312 313 441 +f 313 442 441 +f 313 314 443 +f 313 443 442 +f 314 315 443 +f 315 444 443 +f 315 316 445 +f 315 445 444 +f 316 317 445 +f 317 446 445 +f 317 318 447 +f 317 447 446 +f 318 319 447 +f 319 448 447 +f 319 320 449 +f 319 449 448 +f 320 321 449 +f 321 450 449 +f 321 322 451 +f 321 451 450 +f 322 323 451 +f 323 452 451 +f 323 324 453 +f 323 453 452 +f 324 325 453 +f 325 454 453 +f 325 326 455 +f 325 455 454 +f 326 327 455 +f 327 456 455 +f 327 328 457 +f 327 457 456 +f 328 329 457 +f 329 458 457 +f 329 330 459 +f 329 459 458 +f 330 331 459 +f 331 460 459 +f 331 332 461 +f 331 461 460 +f 332 333 461 +f 333 462 461 +f 333 334 463 +f 333 463 462 +f 334 335 463 +f 335 464 463 +f 335 336 465 +f 335 465 464 +f 336 337 465 +f 337 466 465 +f 337 338 467 +f 337 467 466 +f 338 339 467 +f 339 468 467 +f 339 340 469 +f 339 469 468 +f 340 341 469 +f 341 470 469 +f 341 342 471 +f 341 471 470 +f 342 343 471 +f 343 472 471 +f 343 344 473 +f 343 473 472 +f 344 345 473 +f 345 474 473 +f 345 346 475 +f 345 475 474 +f 346 347 475 +f 347 476 475 +f 347 348 477 +f 347 477 476 +f 348 349 477 +f 349 478 477 +f 349 350 479 +f 349 479 478 +f 350 351 479 +f 351 480 479 +f 351 352 481 +f 351 481 480 +f 352 353 481 +f 353 482 481 +f 353 354 483 +f 353 483 482 +f 354 355 483 +f 355 484 483 +f 355 356 485 +f 355 485 484 +f 356 357 485 +f 357 486 485 +f 357 358 487 +f 357 487 486 +f 358 359 487 +f 359 488 487 +f 359 360 489 +f 359 489 488 +f 360 361 489 +f 361 490 489 +f 361 362 491 +f 361 491 490 +f 362 363 491 +f 363 492 491 +f 363 364 493 +f 363 493 492 +f 364 365 493 +f 365 494 493 +f 365 366 495 +f 365 495 494 +f 366 367 495 +f 367 496 495 +f 367 368 497 +f 367 497 496 +f 368 369 497 +f 369 498 497 +f 369 370 499 +f 369 499 498 +f 370 371 499 +f 371 500 499 +f 371 372 501 +f 371 501 500 +f 372 373 501 +f 373 502 501 +f 373 374 503 +f 373 503 502 +f 374 375 503 +f 375 504 503 +f 375 376 505 +f 375 505 504 +f 376 377 505 +f 377 506 505 +f 377 378 507 +f 377 507 506 +f 378 379 507 +f 379 508 507 +f 379 380 509 +f 379 509 508 +f 380 381 509 +f 381 510 509 +f 381 382 511 +f 381 511 510 +f 382 383 511 +f 383 512 511 +f 383 384 513 +f 383 513 512 +f 384 385 513 +f 385 514 513 +f 385 386 515 +f 385 515 514 +f 386 387 515 +f 387 516 515 +f 388 389 517 +f 389 518 517 +f 389 390 519 +f 389 519 518 +f 390 391 519 +f 391 520 519 +f 391 392 521 +f 391 521 520 +f 392 393 521 +f 393 522 521 +f 393 394 523 +f 393 523 522 +f 394 395 523 +f 395 524 523 +f 395 396 525 +f 395 525 524 +f 396 397 525 +f 397 526 525 +f 397 398 527 +f 397 527 526 +f 398 399 527 +f 399 528 527 +f 399 400 529 +f 399 529 528 +f 400 401 529 +f 401 530 529 +f 401 402 531 +f 401 531 530 +f 402 403 531 +f 403 532 531 +f 403 404 533 +f 403 533 532 +f 404 405 533 +f 405 534 533 +f 405 406 535 +f 405 535 534 +f 406 407 535 +f 407 536 535 +f 407 408 537 +f 407 537 536 +f 408 409 537 +f 409 538 537 +f 409 410 539 +f 409 539 538 +f 410 411 539 +f 411 540 539 +f 411 412 541 +f 411 541 540 +f 412 413 541 +f 413 542 541 +f 413 414 543 +f 413 543 542 +f 414 415 543 +f 415 544 543 +f 415 416 545 +f 415 545 544 +f 416 417 545 +f 417 546 545 +f 417 418 547 +f 417 547 546 +f 418 419 547 +f 419 548 547 +f 419 420 549 +f 419 549 548 +f 420 421 549 +f 421 550 549 +f 421 422 551 +f 421 551 550 +f 422 423 551 +f 423 552 551 +f 423 424 553 +f 423 553 552 +f 424 425 553 +f 425 554 553 +f 425 426 555 +f 425 555 554 +f 426 427 555 +f 427 556 555 +f 427 428 557 +f 427 557 556 +f 428 429 557 +f 429 558 557 +f 429 430 559 +f 429 559 558 +f 430 431 559 +f 431 560 559 +f 431 432 561 +f 431 561 560 +f 432 433 561 +f 433 562 561 +f 433 434 563 +f 433 563 562 +f 434 435 563 +f 435 564 563 +f 435 436 565 +f 435 565 564 +f 436 437 565 +f 437 566 565 +f 437 438 567 +f 437 567 566 +f 438 439 567 +f 439 568 567 +f 439 440 569 +f 439 569 568 +f 440 441 569 +f 441 570 569 +f 441 442 571 +f 441 571 570 +f 442 443 571 +f 443 572 571 +f 443 444 573 +f 443 573 572 +f 444 445 573 +f 445 574 573 +f 445 446 575 +f 445 575 574 +f 446 447 575 +f 447 576 575 +f 447 448 577 +f 447 577 576 +f 448 449 577 +f 449 578 577 +f 449 450 579 +f 449 579 578 +f 450 451 579 +f 451 580 579 +f 451 452 581 +f 451 581 580 +f 452 453 581 +f 453 582 581 +f 453 454 583 +f 453 583 582 +f 454 455 583 +f 455 584 583 +f 455 456 585 +f 455 585 584 +f 456 457 585 +f 457 586 585 +f 457 458 587 +f 457 587 586 +f 458 459 587 +f 459 588 587 +f 459 460 589 +f 459 589 588 +f 460 461 589 +f 461 590 589 +f 461 462 591 +f 461 591 590 +f 462 463 591 +f 463 592 591 +f 463 464 593 +f 463 593 592 +f 464 465 593 +f 465 594 593 +f 465 466 595 +f 465 595 594 +f 466 467 595 +f 467 596 595 +f 467 468 597 +f 467 597 596 +f 468 469 597 +f 469 598 597 +f 469 470 599 +f 469 599 598 +f 470 471 599 +f 471 600 599 +f 471 472 601 +f 471 601 600 +f 472 473 601 +f 473 602 601 +f 473 474 603 +f 473 603 602 +f 474 475 603 +f 475 604 603 +f 475 476 605 +f 475 605 604 +f 476 477 605 +f 477 606 605 +f 477 478 607 +f 477 607 606 +f 478 479 607 +f 479 608 607 +f 479 480 609 +f 479 609 608 +f 480 481 609 +f 481 610 609 +f 481 482 611 +f 481 611 610 +f 482 483 611 +f 483 612 611 +f 483 484 613 +f 483 613 612 +f 484 485 613 +f 485 614 613 +f 485 486 615 +f 485 615 614 +f 486 487 615 +f 487 616 615 +f 487 488 617 +f 487 617 616 +f 488 489 617 +f 489 618 617 +f 489 490 619 +f 489 619 618 +f 490 491 619 +f 491 620 619 +f 491 492 621 +f 491 621 620 +f 492 493 621 +f 493 622 621 +f 493 494 623 +f 493 623 622 +f 494 495 623 +f 495 624 623 +f 495 496 625 +f 495 625 624 +f 496 497 625 +f 497 626 625 +f 497 498 627 +f 497 627 626 +f 498 499 627 +f 499 628 627 +f 499 500 629 +f 499 629 628 +f 500 501 629 +f 501 630 629 +f 501 502 631 +f 501 631 630 +f 502 503 631 +f 503 632 631 +f 503 504 633 +f 503 633 632 +f 504 505 633 +f 505 634 633 +f 505 506 635 +f 505 635 634 +f 506 507 635 +f 507 636 635 +f 507 508 637 +f 507 637 636 +f 508 509 637 +f 509 638 637 +f 509 510 639 +f 509 639 638 +f 510 511 639 +f 511 640 639 +f 511 512 641 +f 511 641 640 +f 512 513 641 +f 513 642 641 +f 513 514 643 +f 513 643 642 +f 514 515 643 +f 515 644 643 +f 515 516 645 +f 515 645 644 +f 517 518 647 +f 517 647 646 +f 518 519 647 +f 519 648 647 +f 519 520 649 +f 519 649 648 +f 520 521 649 +f 521 650 649 +f 521 522 651 +f 521 651 650 +f 522 523 651 +f 523 652 651 +f 523 524 653 +f 523 653 652 +f 524 525 653 +f 525 654 653 +f 525 526 655 +f 525 655 654 +f 526 527 655 +f 527 656 655 +f 527 528 657 +f 527 657 656 +f 528 529 657 +f 529 658 657 +f 529 530 659 +f 529 659 658 +f 530 531 659 +f 531 660 659 +f 531 532 661 +f 531 661 660 +f 532 533 661 +f 533 662 661 +f 533 534 663 +f 533 663 662 +f 534 535 663 +f 535 664 663 +f 535 536 665 +f 535 665 664 +f 536 537 665 +f 537 666 665 +f 537 538 667 +f 537 667 666 +f 538 539 667 +f 539 668 667 +f 539 540 669 +f 539 669 668 +f 540 541 669 +f 541 670 669 +f 541 542 671 +f 541 671 670 +f 542 543 671 +f 543 672 671 +f 543 544 673 +f 543 673 672 +f 544 545 673 +f 545 674 673 +f 545 546 675 +f 545 675 674 +f 546 547 675 +f 547 676 675 +f 547 548 677 +f 547 677 676 +f 548 549 677 +f 549 678 677 +f 549 550 679 +f 549 679 678 +f 550 551 679 +f 551 680 679 +f 551 552 681 +f 551 681 680 +f 552 553 681 +f 553 682 681 +f 553 554 683 +f 553 683 682 +f 554 555 683 +f 555 684 683 +f 555 556 685 +f 555 685 684 +f 556 557 685 +f 557 686 685 +f 557 558 687 +f 557 687 686 +f 558 559 687 +f 559 688 687 +f 559 560 689 +f 559 689 688 +f 560 561 689 +f 561 690 689 +f 561 562 691 +f 561 691 690 +f 562 563 691 +f 563 692 691 +f 563 564 693 +f 563 693 692 +f 564 565 693 +f 565 694 693 +f 565 566 695 +f 565 695 694 +f 566 567 695 +f 567 696 695 +f 567 568 697 +f 567 697 696 +f 568 569 697 +f 569 698 697 +f 569 570 699 +f 569 699 698 +f 570 571 699 +f 571 700 699 +f 571 572 701 +f 571 701 700 +f 572 573 701 +f 573 702 701 +f 573 574 703 +f 573 703 702 +f 574 575 703 +f 575 704 703 +f 575 576 705 +f 575 705 704 +f 576 577 705 +f 577 706 705 +f 577 578 707 +f 577 707 706 +f 578 579 707 +f 579 708 707 +f 579 580 709 +f 579 709 708 +f 580 581 709 +f 581 710 709 +f 581 582 711 +f 581 711 710 +f 582 583 711 +f 583 712 711 +f 583 584 713 +f 583 713 712 +f 584 585 713 +f 585 714 713 +f 585 586 715 +f 585 715 714 +f 586 587 715 +f 587 716 715 +f 587 588 717 +f 587 717 716 +f 588 589 717 +f 589 718 717 +f 589 590 719 +f 589 719 718 +f 590 591 719 +f 591 720 719 +f 591 592 721 +f 591 721 720 +f 592 593 721 +f 593 722 721 +f 593 594 723 +f 593 723 722 +f 594 595 723 +f 595 724 723 +f 595 596 725 +f 595 725 724 +f 596 597 725 +f 597 726 725 +f 597 598 727 +f 597 727 726 +f 598 599 727 +f 599 728 727 +f 599 600 729 +f 599 729 728 +f 600 601 729 +f 601 730 729 +f 601 602 731 +f 601 731 730 +f 602 603 731 +f 603 732 731 +f 603 604 733 +f 603 733 732 +f 604 605 733 +f 605 734 733 +f 605 606 735 +f 605 735 734 +f 606 607 735 +f 607 736 735 +f 607 608 737 +f 607 737 736 +f 608 609 737 +f 609 738 737 +f 609 610 739 +f 609 739 738 +f 610 611 739 +f 611 740 739 +f 611 612 741 +f 611 741 740 +f 612 613 741 +f 613 742 741 +f 613 614 743 +f 613 743 742 +f 614 615 743 +f 615 744 743 +f 615 616 745 +f 615 745 744 +f 616 617 745 +f 617 746 745 +f 617 618 747 +f 617 747 746 +f 618 619 747 +f 619 748 747 +f 619 620 749 +f 619 749 748 +f 620 621 749 +f 621 750 749 +f 621 622 751 +f 621 751 750 +f 622 623 751 +f 623 752 751 +f 623 624 753 +f 623 753 752 +f 624 625 753 +f 625 754 753 +f 625 626 755 +f 625 755 754 +f 626 627 755 +f 627 756 755 +f 627 628 757 +f 627 757 756 +f 628 629 757 +f 629 758 757 +f 629 630 759 +f 629 759 758 +f 630 631 759 +f 631 760 759 +f 631 632 761 +f 631 761 760 +f 632 633 761 +f 633 762 761 +f 633 634 763 +f 633 763 762 +f 634 635 763 +f 635 764 763 +f 635 636 765 +f 635 765 764 +f 636 637 765 +f 637 766 765 +f 637 638 767 +f 637 767 766 +f 638 639 767 +f 639 768 767 +f 639 640 769 +f 639 769 768 +f 640 641 769 +f 641 770 769 +f 641 642 771 +f 641 771 770 +f 642 643 771 +f 643 772 771 +f 643 644 773 +f 643 773 772 +f 644 645 773 +f 645 774 773 +f 646 647 775 +f 647 776 775 +f 647 648 777 +f 647 777 776 +f 648 649 777 +f 649 778 777 +f 649 650 779 +f 649 779 778 +f 650 651 779 +f 651 780 779 +f 651 652 781 +f 651 781 780 +f 652 653 781 +f 653 782 781 +f 653 654 783 +f 653 783 782 +f 654 655 783 +f 655 784 783 +f 655 656 785 +f 655 785 784 +f 656 657 785 +f 657 786 785 +f 657 658 787 +f 657 787 786 +f 658 659 787 +f 659 788 787 +f 659 660 789 +f 659 789 788 +f 660 661 789 +f 661 790 789 +f 661 662 791 +f 661 791 790 +f 662 663 791 +f 663 792 791 +f 663 664 793 +f 663 793 792 +f 664 665 793 +f 665 794 793 +f 665 666 795 +f 665 795 794 +f 666 667 795 +f 667 796 795 +f 667 668 797 +f 667 797 796 +f 668 669 797 +f 669 798 797 +f 669 670 799 +f 669 799 798 +f 670 671 799 +f 671 800 799 +f 671 672 801 +f 671 801 800 +f 672 673 801 +f 673 802 801 +f 673 674 803 +f 673 803 802 +f 674 675 803 +f 675 804 803 +f 675 676 805 +f 675 805 804 +f 676 677 805 +f 677 806 805 +f 677 678 807 +f 677 807 806 +f 678 679 807 +f 679 808 807 +f 679 680 809 +f 679 809 808 +f 680 681 809 +f 681 810 809 +f 681 682 811 +f 681 811 810 +f 682 683 811 +f 683 812 811 +f 683 684 813 +f 683 813 812 +f 684 685 813 +f 685 814 813 +f 685 686 815 +f 685 815 814 +f 686 687 815 +f 687 816 815 +f 687 688 817 +f 687 817 816 +f 688 689 817 +f 689 818 817 +f 689 690 819 +f 689 819 818 +f 690 691 819 +f 691 820 819 +f 691 692 821 +f 691 821 820 +f 692 693 821 +f 693 822 821 +f 693 694 823 +f 693 823 822 +f 694 695 823 +f 695 824 823 +f 695 696 825 +f 695 825 824 +f 696 697 825 +f 697 826 825 +f 697 698 827 +f 697 827 826 +f 698 699 827 +f 699 828 827 +f 699 700 829 +f 699 829 828 +f 700 701 829 +f 701 830 829 +f 701 702 831 +f 701 831 830 +f 702 703 831 +f 703 832 831 +f 703 704 833 +f 703 833 832 +f 704 705 833 +f 705 834 833 +f 705 706 835 +f 705 835 834 +f 706 707 835 +f 707 836 835 +f 707 708 837 +f 707 837 836 +f 708 709 837 +f 709 838 837 +f 709 710 839 +f 709 839 838 +f 710 711 839 +f 711 840 839 +f 711 712 841 +f 711 841 840 +f 712 713 841 +f 713 842 841 +f 713 714 843 +f 713 843 842 +f 714 715 843 +f 715 844 843 +f 715 716 845 +f 715 845 844 +f 716 717 845 +f 717 846 845 +f 717 718 847 +f 717 847 846 +f 718 719 847 +f 719 848 847 +f 719 720 849 +f 719 849 848 +f 720 721 849 +f 721 850 849 +f 721 722 851 +f 721 851 850 +f 722 723 851 +f 723 852 851 +f 723 724 853 +f 723 853 852 +f 724 725 853 +f 725 854 853 +f 725 726 855 +f 725 855 854 +f 726 727 855 +f 727 856 855 +f 727 728 857 +f 727 857 856 +f 728 729 857 +f 729 858 857 +f 729 730 859 +f 729 859 858 +f 730 731 859 +f 731 860 859 +f 731 732 861 +f 731 861 860 +f 732 733 861 +f 733 862 861 +f 733 734 863 +f 733 863 862 +f 734 735 863 +f 735 864 863 +f 735 736 865 +f 735 865 864 +f 736 737 865 +f 737 866 865 +f 737 738 867 +f 737 867 866 +f 738 739 867 +f 739 868 867 +f 739 740 869 +f 739 869 868 +f 740 741 869 +f 741 870 869 +f 741 742 871 +f 741 871 870 +f 742 743 871 +f 743 872 871 +f 743 744 873 +f 743 873 872 +f 744 745 873 +f 745 874 873 +f 745 746 875 +f 745 875 874 +f 746 747 875 +f 747 876 875 +f 747 748 877 +f 747 877 876 +f 748 749 877 +f 749 878 877 +f 749 750 879 +f 749 879 878 +f 750 751 879 +f 751 880 879 +f 751 752 881 +f 751 881 880 +f 752 753 881 +f 753 882 881 +f 753 754 883 +f 753 883 882 +f 754 755 883 +f 755 884 883 +f 755 756 885 +f 755 885 884 +f 756 757 885 +f 757 886 885 +f 757 758 887 +f 757 887 886 +f 758 759 887 +f 759 888 887 +f 759 760 889 +f 759 889 888 +f 760 761 889 +f 761 890 889 +f 761 762 891 +f 761 891 890 +f 762 763 891 +f 763 892 891 +f 763 764 893 +f 763 893 892 +f 764 765 893 +f 765 894 893 +f 765 766 895 +f 765 895 894 +f 766 767 895 +f 767 896 895 +f 767 768 897 +f 767 897 896 +f 768 769 897 +f 769 898 897 +f 769 770 899 +f 769 899 898 +f 770 771 899 +f 771 900 899 +f 771 772 901 +f 771 901 900 +f 772 773 901 +f 773 902 901 +f 773 774 903 +f 773 903 902 +f 775 776 905 +f 775 905 904 +f 776 777 905 +f 777 906 905 +f 777 778 907 +f 777 907 906 +f 778 779 907 +f 779 908 907 +f 779 780 909 +f 779 909 908 +f 780 781 909 +f 781 910 909 +f 781 782 911 +f 781 911 910 +f 782 783 911 +f 783 912 911 +f 783 784 913 +f 783 913 912 +f 784 785 913 +f 785 914 913 +f 785 786 915 +f 785 915 914 +f 786 787 915 +f 787 916 915 +f 787 788 917 +f 787 917 916 +f 788 789 917 +f 789 918 917 +f 789 790 919 +f 789 919 918 +f 790 791 919 +f 791 920 919 +f 791 792 921 +f 791 921 920 +f 792 793 921 +f 793 922 921 +f 793 794 923 +f 793 923 922 +f 794 795 923 +f 795 924 923 +f 795 796 925 +f 795 925 924 +f 796 797 925 +f 797 926 925 +f 797 798 927 +f 797 927 926 +f 798 799 927 +f 799 928 927 +f 799 800 929 +f 799 929 928 +f 800 801 929 +f 801 930 929 +f 801 802 931 +f 801 931 930 +f 802 803 931 +f 803 932 931 +f 803 804 933 +f 803 933 932 +f 804 805 933 +f 805 934 933 +f 805 806 935 +f 805 935 934 +f 806 807 935 +f 807 936 935 +f 807 808 937 +f 807 937 936 +f 808 809 937 +f 809 938 937 +f 809 810 939 +f 809 939 938 +f 810 811 939 +f 811 940 939 +f 811 812 941 +f 811 941 940 +f 812 813 941 +f 813 942 941 +f 813 814 943 +f 813 943 942 +f 814 815 943 +f 815 944 943 +f 815 816 945 +f 815 945 944 +f 816 817 945 +f 817 946 945 +f 817 818 947 +f 817 947 946 +f 818 819 947 +f 819 948 947 +f 819 820 949 +f 819 949 948 +f 820 821 949 +f 821 950 949 +f 821 822 951 +f 821 951 950 +f 822 823 951 +f 823 952 951 +f 823 824 953 +f 823 953 952 +f 824 825 953 +f 825 954 953 +f 825 826 955 +f 825 955 954 +f 826 827 955 +f 827 956 955 +f 827 828 957 +f 827 957 956 +f 828 829 957 +f 829 958 957 +f 829 830 959 +f 829 959 958 +f 830 831 959 +f 831 960 959 +f 831 832 961 +f 831 961 960 +f 832 833 961 +f 833 962 961 +f 833 834 963 +f 833 963 962 +f 834 835 963 +f 835 964 963 +f 835 836 965 +f 835 965 964 +f 836 837 965 +f 837 966 965 +f 837 838 967 +f 837 967 966 +f 838 839 967 +f 839 968 967 +f 839 840 969 +f 839 969 968 +f 840 841 969 +f 841 970 969 +f 841 842 971 +f 841 971 970 +f 842 843 971 +f 843 972 971 +f 843 844 973 +f 843 973 972 +f 844 845 973 +f 845 974 973 +f 845 846 975 +f 845 975 974 +f 846 847 975 +f 847 976 975 +f 847 848 977 +f 847 977 976 +f 848 849 977 +f 849 978 977 +f 849 850 979 +f 849 979 978 +f 850 851 979 +f 851 980 979 +f 851 852 981 +f 851 981 980 +f 852 853 981 +f 853 982 981 +f 853 854 983 +f 853 983 982 +f 854 855 983 +f 855 984 983 +f 855 856 985 +f 855 985 984 +f 856 857 985 +f 857 986 985 +f 857 858 987 +f 857 987 986 +f 858 859 987 +f 859 988 987 +f 859 860 989 +f 859 989 988 +f 860 861 989 +f 861 990 989 +f 861 862 991 +f 861 991 990 +f 862 863 991 +f 863 992 991 +f 863 864 993 +f 863 993 992 +f 864 865 993 +f 865 994 993 +f 865 866 995 +f 865 995 994 +f 866 867 995 +f 867 996 995 +f 867 868 997 +f 867 997 996 +f 868 869 997 +f 869 998 997 +f 869 870 999 +f 869 999 998 +f 870 871 999 +f 871 1000 999 +f 871 872 1001 +f 871 1001 1000 +f 872 873 1001 +f 873 1002 1001 +f 873 874 1003 +f 873 1003 1002 +f 874 875 1003 +f 875 1004 1003 +f 875 876 1005 +f 875 1005 1004 +f 876 877 1005 +f 877 1006 1005 +f 877 878 1007 +f 877 1007 1006 +f 878 879 1007 +f 879 1008 1007 +f 879 880 1009 +f 879 1009 1008 +f 880 881 1009 +f 881 1010 1009 +f 881 882 1011 +f 881 1011 1010 +f 882 883 1011 +f 883 1012 1011 +f 883 884 1013 +f 883 1013 1012 +f 884 885 1013 +f 885 1014 1013 +f 885 886 1015 +f 885 1015 1014 +f 886 887 1015 +f 887 1016 1015 +f 887 888 1017 +f 887 1017 1016 +f 888 889 1017 +f 889 1018 1017 +f 889 890 1019 +f 889 1019 1018 +f 890 891 1019 +f 891 1020 1019 +f 891 892 1021 +f 891 1021 1020 +f 892 893 1021 +f 893 1022 1021 +f 893 894 1023 +f 893 1023 1022 +f 894 895 1023 +f 895 1024 1023 +f 895 896 1025 +f 895 1025 1024 +f 896 897 1025 +f 897 1026 1025 +f 897 898 1027 +f 897 1027 1026 +f 898 899 1027 +f 899 1028 1027 +f 899 900 1029 +f 899 1029 1028 +f 900 901 1029 +f 901 1030 1029 +f 901 902 1031 +f 901 1031 1030 +f 902 903 1031 +f 903 1032 1031 +f 904 905 1033 +f 905 1034 1033 +f 905 906 1035 +f 905 1035 1034 +f 906 907 1035 +f 907 1036 1035 +f 907 908 1037 +f 907 1037 1036 +f 908 909 1037 +f 909 1038 1037 +f 909 910 1039 +f 909 1039 1038 +f 910 911 1039 +f 911 1040 1039 +f 911 912 1041 +f 911 1041 1040 +f 912 913 1041 +f 913 1042 1041 +f 913 914 1043 +f 913 1043 1042 +f 914 915 1043 +f 915 1044 1043 +f 915 916 1045 +f 915 1045 1044 +f 916 917 1045 +f 917 1046 1045 +f 917 918 1047 +f 917 1047 1046 +f 918 919 1047 +f 919 1048 1047 +f 919 920 1049 +f 919 1049 1048 +f 920 921 1049 +f 921 1050 1049 +f 921 922 1051 +f 921 1051 1050 +f 922 923 1051 +f 923 1052 1051 +f 923 924 1053 +f 923 1053 1052 +f 924 925 1053 +f 925 1054 1053 +f 925 926 1055 +f 925 1055 1054 +f 926 927 1055 +f 927 1056 1055 +f 927 928 1057 +f 927 1057 1056 +f 928 929 1057 +f 929 1058 1057 +f 929 930 1059 +f 929 1059 1058 +f 930 931 1059 +f 931 1060 1059 +f 931 932 1061 +f 931 1061 1060 +f 932 933 1061 +f 933 1062 1061 +f 933 934 1063 +f 933 1063 1062 +f 934 935 1063 +f 935 1064 1063 +f 935 936 1065 +f 935 1065 1064 +f 936 937 1065 +f 937 1066 1065 +f 937 938 1067 +f 937 1067 1066 +f 938 939 1067 +f 939 1068 1067 +f 939 940 1069 +f 939 1069 1068 +f 940 941 1069 +f 941 1070 1069 +f 941 942 1071 +f 941 1071 1070 +f 942 943 1071 +f 943 1072 1071 +f 943 944 1073 +f 943 1073 1072 +f 944 945 1073 +f 945 1074 1073 +f 945 946 1075 +f 945 1075 1074 +f 946 947 1075 +f 947 1076 1075 +f 947 948 1077 +f 947 1077 1076 +f 948 949 1077 +f 949 1078 1077 +f 949 950 1079 +f 949 1079 1078 +f 950 951 1079 +f 951 1080 1079 +f 951 952 1081 +f 951 1081 1080 +f 952 953 1081 +f 953 1082 1081 +f 953 954 1083 +f 953 1083 1082 +f 954 955 1083 +f 955 1084 1083 +f 955 956 1085 +f 955 1085 1084 +f 956 957 1085 +f 957 1086 1085 +f 957 958 1087 +f 957 1087 1086 +f 958 959 1087 +f 959 1088 1087 +f 959 960 1089 +f 959 1089 1088 +f 960 961 1089 +f 961 1090 1089 +f 961 962 1091 +f 961 1091 1090 +f 962 963 1091 +f 963 1092 1091 +f 963 964 1093 +f 963 1093 1092 +f 964 965 1093 +f 965 1094 1093 +f 965 966 1095 +f 965 1095 1094 +f 966 967 1095 +f 967 1096 1095 +f 967 968 1097 +f 967 1097 1096 +f 968 969 1097 +f 969 1098 1097 +f 969 970 1099 +f 969 1099 1098 +f 970 971 1099 +f 971 1100 1099 +f 971 972 1101 +f 971 1101 1100 +f 972 973 1101 +f 973 1102 1101 +f 973 974 1103 +f 973 1103 1102 +f 974 975 1103 +f 975 1104 1103 +f 975 976 1105 +f 975 1105 1104 +f 976 977 1105 +f 977 1106 1105 +f 977 978 1107 +f 977 1107 1106 +f 978 979 1107 +f 979 1108 1107 +f 979 980 1109 +f 979 1109 1108 +f 980 981 1109 +f 981 1110 1109 +f 981 982 1111 +f 981 1111 1110 +f 982 983 1111 +f 983 1112 1111 +f 983 984 1113 +f 983 1113 1112 +f 984 985 1113 +f 985 1114 1113 +f 985 986 1115 +f 985 1115 1114 +f 986 987 1115 +f 987 1116 1115 +f 987 988 1117 +f 987 1117 1116 +f 988 989 1117 +f 989 1118 1117 +f 989 990 1119 +f 989 1119 1118 +f 990 991 1119 +f 991 1120 1119 +f 991 992 1121 +f 991 1121 1120 +f 992 993 1121 +f 993 1122 1121 +f 993 994 1123 +f 993 1123 1122 +f 994 995 1123 +f 995 1124 1123 +f 995 996 1125 +f 995 1125 1124 +f 996 997 1125 +f 997 1126 1125 +f 997 998 1127 +f 997 1127 1126 +f 998 999 1127 +f 999 1128 1127 +f 999 1000 1129 +f 999 1129 1128 +f 1000 1001 1129 +f 1001 1130 1129 +f 1001 1002 1131 +f 1001 1131 1130 +f 1002 1003 1131 +f 1003 1132 1131 +f 1003 1004 1133 +f 1003 1133 1132 +f 1004 1005 1133 +f 1005 1134 1133 +f 1005 1006 1135 +f 1005 1135 1134 +f 1006 1007 1135 +f 1007 1136 1135 +f 1007 1008 1137 +f 1007 1137 1136 +f 1008 1009 1137 +f 1009 1138 1137 +f 1009 1010 1139 +f 1009 1139 1138 +f 1010 1011 1139 +f 1011 1140 1139 +f 1011 1012 1141 +f 1011 1141 1140 +f 1012 1013 1141 +f 1013 1142 1141 +f 1013 1014 1143 +f 1013 1143 1142 +f 1014 1015 1143 +f 1015 1144 1143 +f 1015 1016 1145 +f 1015 1145 1144 +f 1016 1017 1145 +f 1017 1146 1145 +f 1017 1018 1147 +f 1017 1147 1146 +f 1018 1019 1147 +f 1019 1148 1147 +f 1019 1020 1149 +f 1019 1149 1148 +f 1020 1021 1149 +f 1021 1150 1149 +f 1021 1022 1151 +f 1021 1151 1150 +f 1022 1023 1151 +f 1023 1152 1151 +f 1023 1024 1153 +f 1023 1153 1152 +f 1024 1025 1153 +f 1025 1154 1153 +f 1025 1026 1155 +f 1025 1155 1154 +f 1026 1027 1155 +f 1027 1156 1155 +f 1027 1028 1157 +f 1027 1157 1156 +f 1028 1029 1157 +f 1029 1158 1157 +f 1029 1030 1159 +f 1029 1159 1158 +f 1030 1031 1159 +f 1031 1160 1159 +f 1031 1032 1161 +f 1031 1161 1160 +f 1033 1034 1163 +f 1033 1163 1162 +f 1034 1035 1163 +f 1035 1164 1163 +f 1035 1036 1165 +f 1035 1165 1164 +f 1036 1037 1165 +f 1037 1166 1165 +f 1037 1038 1167 +f 1037 1167 1166 +f 1038 1039 1167 +f 1039 1168 1167 +f 1039 1040 1169 +f 1039 1169 1168 +f 1040 1041 1169 +f 1041 1170 1169 +f 1041 1042 1171 +f 1041 1171 1170 +f 1042 1043 1171 +f 1043 1172 1171 +f 1043 1044 1173 +f 1043 1173 1172 +f 1044 1045 1173 +f 1045 1174 1173 +f 1045 1046 1175 +f 1045 1175 1174 +f 1046 1047 1175 +f 1047 1176 1175 +f 1047 1048 1177 +f 1047 1177 1176 +f 1048 1049 1177 +f 1049 1178 1177 +f 1049 1050 1179 +f 1049 1179 1178 +f 1050 1051 1179 +f 1051 1180 1179 +f 1051 1052 1181 +f 1051 1181 1180 +f 1052 1053 1181 +f 1053 1182 1181 +f 1053 1054 1183 +f 1053 1183 1182 +f 1054 1055 1183 +f 1055 1184 1183 +f 1055 1056 1185 +f 1055 1185 1184 +f 1056 1057 1185 +f 1057 1186 1185 +f 1057 1058 1187 +f 1057 1187 1186 +f 1058 1059 1187 +f 1059 1188 1187 +f 1059 1060 1189 +f 1059 1189 1188 +f 1060 1061 1189 +f 1061 1190 1189 +f 1061 1062 1191 +f 1061 1191 1190 +f 1062 1063 1191 +f 1063 1192 1191 +f 1063 1064 1193 +f 1063 1193 1192 +f 1064 1065 1193 +f 1065 1194 1193 +f 1065 1066 1195 +f 1065 1195 1194 +f 1066 1067 1195 +f 1067 1196 1195 +f 1067 1068 1197 +f 1067 1197 1196 +f 1068 1069 1197 +f 1069 1198 1197 +f 1069 1070 1199 +f 1069 1199 1198 +f 1070 1071 1199 +f 1071 1200 1199 +f 1071 1072 1201 +f 1071 1201 1200 +f 1072 1073 1201 +f 1073 1202 1201 +f 1073 1074 1203 +f 1073 1203 1202 +f 1074 1075 1203 +f 1075 1204 1203 +f 1075 1076 1205 +f 1075 1205 1204 +f 1076 1077 1205 +f 1077 1206 1205 +f 1077 1078 1207 +f 1077 1207 1206 +f 1078 1079 1207 +f 1079 1208 1207 +f 1079 1080 1209 +f 1079 1209 1208 +f 1080 1081 1209 +f 1081 1210 1209 +f 1081 1082 1211 +f 1081 1211 1210 +f 1082 1083 1211 +f 1083 1212 1211 +f 1083 1084 1213 +f 1083 1213 1212 +f 1084 1085 1213 +f 1085 1214 1213 +f 1085 1086 1215 +f 1085 1215 1214 +f 1086 1087 1215 +f 1087 1216 1215 +f 1087 1088 1217 +f 1087 1217 1216 +f 1088 1089 1217 +f 1089 1218 1217 +f 1089 1090 1219 +f 1089 1219 1218 +f 1090 1091 1219 +f 1091 1220 1219 +f 1091 1092 1221 +f 1091 1221 1220 +f 1092 1093 1221 +f 1093 1222 1221 +f 1093 1094 1223 +f 1093 1223 1222 +f 1094 1095 1223 +f 1095 1224 1223 +f 1095 1096 1225 +f 1095 1225 1224 +f 1096 1097 1225 +f 1097 1226 1225 +f 1097 1098 1227 +f 1097 1227 1226 +f 1098 1099 1227 +f 1099 1228 1227 +f 1099 1100 1229 +f 1099 1229 1228 +f 1100 1101 1229 +f 1101 1230 1229 +f 1101 1102 1231 +f 1101 1231 1230 +f 1102 1103 1231 +f 1103 1232 1231 +f 1103 1104 1233 +f 1103 1233 1232 +f 1104 1105 1233 +f 1105 1234 1233 +f 1105 1106 1235 +f 1105 1235 1234 +f 1106 1107 1235 +f 1107 1236 1235 +f 1107 1108 1237 +f 1107 1237 1236 +f 1108 1109 1237 +f 1109 1238 1237 +f 1109 1110 1239 +f 1109 1239 1238 +f 1110 1111 1239 +f 1111 1240 1239 +f 1111 1112 1241 +f 1111 1241 1240 +f 1112 1113 1241 +f 1113 1242 1241 +f 1113 1114 1243 +f 1113 1243 1242 +f 1114 1115 1243 +f 1115 1244 1243 +f 1115 1116 1245 +f 1115 1245 1244 +f 1116 1117 1245 +f 1117 1246 1245 +f 1117 1118 1247 +f 1117 1247 1246 +f 1118 1119 1247 +f 1119 1248 1247 +f 1119 1120 1249 +f 1119 1249 1248 +f 1120 1121 1249 +f 1121 1250 1249 +f 1121 1122 1251 +f 1121 1251 1250 +f 1122 1123 1251 +f 1123 1252 1251 +f 1123 1124 1253 +f 1123 1253 1252 +f 1124 1125 1253 +f 1125 1254 1253 +f 1125 1126 1255 +f 1125 1255 1254 +f 1126 1127 1255 +f 1127 1256 1255 +f 1127 1128 1257 +f 1127 1257 1256 +f 1128 1129 1257 +f 1129 1258 1257 +f 1129 1130 1259 +f 1129 1259 1258 +f 1130 1131 1259 +f 1131 1260 1259 +f 1131 1132 1261 +f 1131 1261 1260 +f 1132 1133 1261 +f 1133 1262 1261 +f 1133 1134 1263 +f 1133 1263 1262 +f 1134 1135 1263 +f 1135 1264 1263 +f 1135 1136 1265 +f 1135 1265 1264 +f 1136 1137 1265 +f 1137 1266 1265 +f 1137 1138 1267 +f 1137 1267 1266 +f 1138 1139 1267 +f 1139 1268 1267 +f 1139 1140 1269 +f 1139 1269 1268 +f 1140 1141 1269 +f 1141 1270 1269 +f 1141 1142 1271 +f 1141 1271 1270 +f 1142 1143 1271 +f 1143 1272 1271 +f 1143 1144 1273 +f 1143 1273 1272 +f 1144 1145 1273 +f 1145 1274 1273 +f 1145 1146 1275 +f 1145 1275 1274 +f 1146 1147 1275 +f 1147 1276 1275 +f 1147 1148 1277 +f 1147 1277 1276 +f 1148 1149 1277 +f 1149 1278 1277 +f 1149 1150 1279 +f 1149 1279 1278 +f 1150 1151 1279 +f 1151 1280 1279 +f 1151 1152 1281 +f 1151 1281 1280 +f 1152 1153 1281 +f 1153 1282 1281 +f 1153 1154 1283 +f 1153 1283 1282 +f 1154 1155 1283 +f 1155 1284 1283 +f 1155 1156 1285 +f 1155 1285 1284 +f 1156 1157 1285 +f 1157 1286 1285 +f 1157 1158 1287 +f 1157 1287 1286 +f 1158 1159 1287 +f 1159 1288 1287 +f 1159 1160 1289 +f 1159 1289 1288 +f 1160 1161 1289 +f 1161 1290 1289 +f 1162 1163 1291 +f 1163 1292 1291 +f 1163 1164 1293 +f 1163 1293 1292 +f 1164 1165 1293 +f 1165 1294 1293 +f 1165 1166 1295 +f 1165 1295 1294 +f 1166 1167 1295 +f 1167 1296 1295 +f 1167 1168 1297 +f 1167 1297 1296 +f 1168 1169 1297 +f 1169 1298 1297 +f 1169 1170 1299 +f 1169 1299 1298 +f 1170 1171 1299 +f 1171 1300 1299 +f 1171 1172 1301 +f 1171 1301 1300 +f 1172 1173 1301 +f 1173 1302 1301 +f 1173 1174 1303 +f 1173 1303 1302 +f 1174 1175 1303 +f 1175 1304 1303 +f 1175 1176 1305 +f 1175 1305 1304 +f 1176 1177 1305 +f 1177 1306 1305 +f 1177 1178 1307 +f 1177 1307 1306 +f 1178 1179 1307 +f 1179 1308 1307 +f 1179 1180 1309 +f 1179 1309 1308 +f 1180 1181 1309 +f 1181 1310 1309 +f 1181 1182 1311 +f 1181 1311 1310 +f 1182 1183 1311 +f 1183 1312 1311 +f 1183 1184 1313 +f 1183 1313 1312 +f 1184 1185 1313 +f 1185 1314 1313 +f 1185 1186 1315 +f 1185 1315 1314 +f 1186 1187 1315 +f 1187 1316 1315 +f 1187 1188 1317 +f 1187 1317 1316 +f 1188 1189 1317 +f 1189 1318 1317 +f 1189 1190 1319 +f 1189 1319 1318 +f 1190 1191 1319 +f 1191 1320 1319 +f 1191 1192 1321 +f 1191 1321 1320 +f 1192 1193 1321 +f 1193 1322 1321 +f 1193 1194 1323 +f 1193 1323 1322 +f 1194 1195 1323 +f 1195 1324 1323 +f 1195 1196 1325 +f 1195 1325 1324 +f 1196 1197 1325 +f 1197 1326 1325 +f 1197 1198 1327 +f 1197 1327 1326 +f 1198 1199 1327 +f 1199 1328 1327 +f 1199 1200 1329 +f 1199 1329 1328 +f 1200 1201 1329 +f 1201 1330 1329 +f 1201 1202 1331 +f 1201 1331 1330 +f 1202 1203 1331 +f 1203 1332 1331 +f 1203 1204 1333 +f 1203 1333 1332 +f 1204 1205 1333 +f 1205 1334 1333 +f 1205 1206 1335 +f 1205 1335 1334 +f 1206 1207 1335 +f 1207 1336 1335 +f 1207 1208 1337 +f 1207 1337 1336 +f 1208 1209 1337 +f 1209 1338 1337 +f 1209 1210 1339 +f 1209 1339 1338 +f 1210 1211 1339 +f 1211 1340 1339 +f 1211 1212 1341 +f 1211 1341 1340 +f 1212 1213 1341 +f 1213 1342 1341 +f 1213 1214 1343 +f 1213 1343 1342 +f 1214 1215 1343 +f 1215 1344 1343 +f 1215 1216 1345 +f 1215 1345 1344 +f 1216 1217 1345 +f 1217 1346 1345 +f 1217 1218 1347 +f 1217 1347 1346 +f 1218 1219 1347 +f 1219 1348 1347 +f 1219 1220 1349 +f 1219 1349 1348 +f 1220 1221 1349 +f 1221 1350 1349 +f 1221 1222 1351 +f 1221 1351 1350 +f 1222 1223 1351 +f 1223 1352 1351 +f 1223 1224 1353 +f 1223 1353 1352 +f 1224 1225 1353 +f 1225 1354 1353 +f 1225 1226 1355 +f 1225 1355 1354 +f 1226 1227 1355 +f 1227 1356 1355 +f 1227 1228 1357 +f 1227 1357 1356 +f 1228 1229 1357 +f 1229 1358 1357 +f 1229 1230 1359 +f 1229 1359 1358 +f 1230 1231 1359 +f 1231 1360 1359 +f 1231 1232 1361 +f 1231 1361 1360 +f 1232 1233 1361 +f 1233 1362 1361 +f 1233 1234 1363 +f 1233 1363 1362 +f 1234 1235 1363 +f 1235 1364 1363 +f 1235 1236 1365 +f 1235 1365 1364 +f 1236 1237 1365 +f 1237 1366 1365 +f 1237 1238 1367 +f 1237 1367 1366 +f 1238 1239 1367 +f 1239 1368 1367 +f 1239 1240 1369 +f 1239 1369 1368 +f 1240 1241 1369 +f 1241 1370 1369 +f 1241 1242 1371 +f 1241 1371 1370 +f 1242 1243 1371 +f 1243 1372 1371 +f 1243 1244 1373 +f 1243 1373 1372 +f 1244 1245 1373 +f 1245 1374 1373 +f 1245 1246 1375 +f 1245 1375 1374 +f 1246 1247 1375 +f 1247 1376 1375 +f 1247 1248 1377 +f 1247 1377 1376 +f 1248 1249 1377 +f 1249 1378 1377 +f 1249 1250 1379 +f 1249 1379 1378 +f 1250 1251 1379 +f 1251 1380 1379 +f 1251 1252 1381 +f 1251 1381 1380 +f 1252 1253 1381 +f 1253 1382 1381 +f 1253 1254 1383 +f 1253 1383 1382 +f 1254 1255 1383 +f 1255 1384 1383 +f 1255 1256 1385 +f 1255 1385 1384 +f 1256 1257 1385 +f 1257 1386 1385 +f 1257 1258 1387 +f 1257 1387 1386 +f 1258 1259 1387 +f 1259 1388 1387 +f 1259 1260 1389 +f 1259 1389 1388 +f 1260 1261 1389 +f 1261 1390 1389 +f 1261 1262 1391 +f 1261 1391 1390 +f 1262 1263 1391 +f 1263 1392 1391 +f 1263 1264 1393 +f 1263 1393 1392 +f 1264 1265 1393 +f 1265 1394 1393 +f 1265 1266 1395 +f 1265 1395 1394 +f 1266 1267 1395 +f 1267 1396 1395 +f 1267 1268 1397 +f 1267 1397 1396 +f 1268 1269 1397 +f 1269 1398 1397 +f 1269 1270 1399 +f 1269 1399 1398 +f 1270 1271 1399 +f 1271 1400 1399 +f 1271 1272 1401 +f 1271 1401 1400 +f 1272 1273 1401 +f 1273 1402 1401 +f 1273 1274 1403 +f 1273 1403 1402 +f 1274 1275 1403 +f 1275 1404 1403 +f 1275 1276 1405 +f 1275 1405 1404 +f 1276 1277 1405 +f 1277 1406 1405 +f 1277 1278 1407 +f 1277 1407 1406 +f 1278 1279 1407 +f 1279 1408 1407 +f 1279 1280 1409 +f 1279 1409 1408 +f 1280 1281 1409 +f 1281 1410 1409 +f 1281 1282 1411 +f 1281 1411 1410 +f 1282 1283 1411 +f 1283 1412 1411 +f 1283 1284 1413 +f 1283 1413 1412 +f 1284 1285 1413 +f 1285 1414 1413 +f 1285 1286 1415 +f 1285 1415 1414 +f 1286 1287 1415 +f 1287 1416 1415 +f 1287 1288 1417 +f 1287 1417 1416 +f 1288 1289 1417 +f 1289 1418 1417 +f 1289 1290 1419 +f 1289 1419 1418 +f 1291 1292 1421 +f 1291 1421 1420 +f 1292 1293 1421 +f 1293 1422 1421 +f 1293 1294 1423 +f 1293 1423 1422 +f 1294 1295 1423 +f 1295 1424 1423 +f 1295 1296 1425 +f 1295 1425 1424 +f 1296 1297 1425 +f 1297 1426 1425 +f 1297 1298 1427 +f 1297 1427 1426 +f 1298 1299 1427 +f 1299 1428 1427 +f 1299 1300 1429 +f 1299 1429 1428 +f 1300 1301 1429 +f 1301 1430 1429 +f 1301 1302 1431 +f 1301 1431 1430 +f 1302 1303 1431 +f 1303 1432 1431 +f 1303 1304 1433 +f 1303 1433 1432 +f 1304 1305 1433 +f 1305 1434 1433 +f 1305 1306 1435 +f 1305 1435 1434 +f 1306 1307 1435 +f 1307 1436 1435 +f 1307 1308 1437 +f 1307 1437 1436 +f 1308 1309 1437 +f 1309 1438 1437 +f 1309 1310 1439 +f 1309 1439 1438 +f 1310 1311 1439 +f 1311 1440 1439 +f 1311 1312 1441 +f 1311 1441 1440 +f 1312 1313 1441 +f 1313 1442 1441 +f 1313 1314 1443 +f 1313 1443 1442 +f 1314 1315 1443 +f 1315 1444 1443 +f 1315 1316 1445 +f 1315 1445 1444 +f 1316 1317 1445 +f 1317 1446 1445 +f 1317 1318 1447 +f 1317 1447 1446 +f 1318 1319 1447 +f 1319 1448 1447 +f 1319 1320 1449 +f 1319 1449 1448 +f 1320 1321 1449 +f 1321 1450 1449 +f 1321 1322 1451 +f 1321 1451 1450 +f 1322 1323 1451 +f 1323 1452 1451 +f 1323 1324 1453 +f 1323 1453 1452 +f 1324 1325 1453 +f 1325 1454 1453 +f 1325 1326 1455 +f 1325 1455 1454 +f 1326 1327 1455 +f 1327 1456 1455 +f 1327 1328 1457 +f 1327 1457 1456 +f 1328 1329 1457 +f 1329 1458 1457 +f 1329 1330 1459 +f 1329 1459 1458 +f 1330 1331 1459 +f 1331 1460 1459 +f 1331 1332 1461 +f 1331 1461 1460 +f 1332 1333 1461 +f 1333 1462 1461 +f 1333 1334 1463 +f 1333 1463 1462 +f 1334 1335 1463 +f 1335 1464 1463 +f 1335 1336 1465 +f 1335 1465 1464 +f 1336 1337 1465 +f 1337 1466 1465 +f 1337 1338 1467 +f 1337 1467 1466 +f 1338 1339 1467 +f 1339 1468 1467 +f 1339 1340 1469 +f 1339 1469 1468 +f 1340 1341 1469 +f 1341 1470 1469 +f 1341 1342 1471 +f 1341 1471 1470 +f 1342 1343 1471 +f 1343 1472 1471 +f 1343 1344 1473 +f 1343 1473 1472 +f 1344 1345 1473 +f 1345 1474 1473 +f 1345 1346 1475 +f 1345 1475 1474 +f 1346 1347 1475 +f 1347 1476 1475 +f 1347 1348 1477 +f 1347 1477 1476 +f 1348 1349 1477 +f 1349 1478 1477 +f 1349 1350 1479 +f 1349 1479 1478 +f 1350 1351 1479 +f 1351 1480 1479 +f 1351 1352 1481 +f 1351 1481 1480 +f 1352 1353 1481 +f 1353 1482 1481 +f 1353 1354 1483 +f 1353 1483 1482 +f 1354 1355 1483 +f 1355 1484 1483 +f 1355 1356 1485 +f 1355 1485 1484 +f 1356 1357 1485 +f 1357 1486 1485 +f 1357 1358 1487 +f 1357 1487 1486 +f 1358 1359 1487 +f 1359 1488 1487 +f 1359 1360 1489 +f 1359 1489 1488 +f 1360 1361 1489 +f 1361 1490 1489 +f 1361 1362 1491 +f 1361 1491 1490 +f 1362 1363 1491 +f 1363 1492 1491 +f 1363 1364 1493 +f 1363 1493 1492 +f 1364 1365 1493 +f 1365 1494 1493 +f 1365 1366 1495 +f 1365 1495 1494 +f 1366 1367 1495 +f 1367 1496 1495 +f 1367 1368 1497 +f 1367 1497 1496 +f 1368 1369 1497 +f 1369 1498 1497 +f 1369 1370 1499 +f 1369 1499 1498 +f 1370 1371 1499 +f 1371 1500 1499 +f 1371 1372 1501 +f 1371 1501 1500 +f 1372 1373 1501 +f 1373 1502 1501 +f 1373 1374 1503 +f 1373 1503 1502 +f 1374 1375 1503 +f 1375 1504 1503 +f 1375 1376 1505 +f 1375 1505 1504 +f 1376 1377 1505 +f 1377 1506 1505 +f 1377 1378 1507 +f 1377 1507 1506 +f 1378 1379 1507 +f 1379 1508 1507 +f 1379 1380 1509 +f 1379 1509 1508 +f 1380 1381 1509 +f 1381 1510 1509 +f 1381 1382 1511 +f 1381 1511 1510 +f 1382 1383 1511 +f 1383 1512 1511 +f 1383 1384 1513 +f 1383 1513 1512 +f 1384 1385 1513 +f 1385 1514 1513 +f 1385 1386 1515 +f 1385 1515 1514 +f 1386 1387 1515 +f 1387 1516 1515 +f 1387 1388 1517 +f 1387 1517 1516 +f 1388 1389 1517 +f 1389 1518 1517 +f 1389 1390 1519 +f 1389 1519 1518 +f 1390 1391 1519 +f 1391 1520 1519 +f 1391 1392 1521 +f 1391 1521 1520 +f 1392 1393 1521 +f 1393 1522 1521 +f 1393 1394 1523 +f 1393 1523 1522 +f 1394 1395 1523 +f 1395 1524 1523 +f 1395 1396 1525 +f 1395 1525 1524 +f 1396 1397 1525 +f 1397 1526 1525 +f 1397 1398 1527 +f 1397 1527 1526 +f 1398 1399 1527 +f 1399 1528 1527 +f 1399 1400 1529 +f 1399 1529 1528 +f 1400 1401 1529 +f 1401 1530 1529 +f 1401 1402 1531 +f 1401 1531 1530 +f 1402 1403 1531 +f 1403 1532 1531 +f 1403 1404 1533 +f 1403 1533 1532 +f 1404 1405 1533 +f 1405 1534 1533 +f 1405 1406 1535 +f 1405 1535 1534 +f 1406 1407 1535 +f 1407 1536 1535 +f 1407 1408 1537 +f 1407 1537 1536 +f 1408 1409 1537 +f 1409 1538 1537 +f 1409 1410 1539 +f 1409 1539 1538 +f 1410 1411 1539 +f 1411 1540 1539 +f 1411 1412 1541 +f 1411 1541 1540 +f 1412 1413 1541 +f 1413 1542 1541 +f 1413 1414 1543 +f 1413 1543 1542 +f 1414 1415 1543 +f 1415 1544 1543 +f 1415 1416 1545 +f 1415 1545 1544 +f 1416 1417 1545 +f 1417 1546 1545 +f 1417 1418 1547 +f 1417 1547 1546 +f 1418 1419 1547 +f 1419 1548 1547 +f 1420 1421 1549 +f 1421 1550 1549 +f 1421 1422 1551 +f 1421 1551 1550 +f 1422 1423 1551 +f 1423 1552 1551 +f 1423 1424 1553 +f 1423 1553 1552 +f 1424 1425 1553 +f 1425 1554 1553 +f 1425 1426 1555 +f 1425 1555 1554 +f 1426 1427 1555 +f 1427 1556 1555 +f 1427 1428 1557 +f 1427 1557 1556 +f 1428 1429 1557 +f 1429 1558 1557 +f 1429 1430 1559 +f 1429 1559 1558 +f 1430 1431 1559 +f 1431 1560 1559 +f 1431 1432 1561 +f 1431 1561 1560 +f 1432 1433 1561 +f 1433 1562 1561 +f 1433 1434 1563 +f 1433 1563 1562 +f 1434 1435 1563 +f 1435 1564 1563 +f 1435 1436 1565 +f 1435 1565 1564 +f 1436 1437 1565 +f 1437 1566 1565 +f 1437 1438 1567 +f 1437 1567 1566 +f 1438 1439 1567 +f 1439 1568 1567 +f 1439 1440 1569 +f 1439 1569 1568 +f 1440 1441 1569 +f 1441 1570 1569 +f 1441 1442 1571 +f 1441 1571 1570 +f 1442 1443 1571 +f 1443 1572 1571 +f 1443 1444 1573 +f 1443 1573 1572 +f 1444 1445 1573 +f 1445 1574 1573 +f 1445 1446 1575 +f 1445 1575 1574 +f 1446 1447 1575 +f 1447 1576 1575 +f 1447 1448 1577 +f 1447 1577 1576 +f 1448 1449 1577 +f 1449 1578 1577 +f 1449 1450 1579 +f 1449 1579 1578 +f 1450 1451 1579 +f 1451 1580 1579 +f 1451 1452 1581 +f 1451 1581 1580 +f 1452 1453 1581 +f 1453 1582 1581 +f 1453 1454 1583 +f 1453 1583 1582 +f 1454 1455 1583 +f 1455 1584 1583 +f 1455 1456 1585 +f 1455 1585 1584 +f 1456 1457 1585 +f 1457 1586 1585 +f 1457 1458 1587 +f 1457 1587 1586 +f 1458 1459 1587 +f 1459 1588 1587 +f 1459 1460 1589 +f 1459 1589 1588 +f 1460 1461 1589 +f 1461 1590 1589 +f 1461 1462 1591 +f 1461 1591 1590 +f 1462 1463 1591 +f 1463 1592 1591 +f 1463 1464 1593 +f 1463 1593 1592 +f 1464 1465 1593 +f 1465 1594 1593 +f 1465 1466 1595 +f 1465 1595 1594 +f 1466 1467 1595 +f 1467 1596 1595 +f 1467 1468 1597 +f 1467 1597 1596 +f 1468 1469 1597 +f 1469 1598 1597 +f 1469 1470 1599 +f 1469 1599 1598 +f 1470 1471 1599 +f 1471 1600 1599 +f 1471 1472 1601 +f 1471 1601 1600 +f 1472 1473 1601 +f 1473 1602 1601 +f 1473 1474 1603 +f 1473 1603 1602 +f 1474 1475 1603 +f 1475 1604 1603 +f 1475 1476 1605 +f 1475 1605 1604 +f 1476 1477 1605 +f 1477 1606 1605 +f 1477 1478 1607 +f 1477 1607 1606 +f 1478 1479 1607 +f 1479 1608 1607 +f 1479 1480 1609 +f 1479 1609 1608 +f 1480 1481 1609 +f 1481 1610 1609 +f 1481 1482 1611 +f 1481 1611 1610 +f 1482 1483 1611 +f 1483 1612 1611 +f 1483 1484 1613 +f 1483 1613 1612 +f 1484 1485 1613 +f 1485 1614 1613 +f 1485 1486 1615 +f 1485 1615 1614 +f 1486 1487 1615 +f 1487 1616 1615 +f 1487 1488 1617 +f 1487 1617 1616 +f 1488 1489 1617 +f 1489 1618 1617 +f 1489 1490 1619 +f 1489 1619 1618 +f 1490 1491 1619 +f 1491 1620 1619 +f 1491 1492 1621 +f 1491 1621 1620 +f 1492 1493 1621 +f 1493 1622 1621 +f 1493 1494 1623 +f 1493 1623 1622 +f 1494 1495 1623 +f 1495 1624 1623 +f 1495 1496 1625 +f 1495 1625 1624 +f 1496 1497 1625 +f 1497 1626 1625 +f 1497 1498 1627 +f 1497 1627 1626 +f 1498 1499 1627 +f 1499 1628 1627 +f 1499 1500 1629 +f 1499 1629 1628 +f 1500 1501 1629 +f 1501 1630 1629 +f 1501 1502 1631 +f 1501 1631 1630 +f 1502 1503 1631 +f 1503 1632 1631 +f 1503 1504 1633 +f 1503 1633 1632 +f 1504 1505 1633 +f 1505 1634 1633 +f 1505 1506 1635 +f 1505 1635 1634 +f 1506 1507 1635 +f 1507 1636 1635 +f 1507 1508 1637 +f 1507 1637 1636 +f 1508 1509 1637 +f 1509 1638 1637 +f 1509 1510 1639 +f 1509 1639 1638 +f 1510 1511 1639 +f 1511 1640 1639 +f 1511 1512 1641 +f 1511 1641 1640 +f 1512 1513 1641 +f 1513 1642 1641 +f 1513 1514 1643 +f 1513 1643 1642 +f 1514 1515 1643 +f 1515 1644 1643 +f 1515 1516 1645 +f 1515 1645 1644 +f 1516 1517 1645 +f 1517 1646 1645 +f 1517 1518 1647 +f 1517 1647 1646 +f 1518 1519 1647 +f 1519 1648 1647 +f 1519 1520 1649 +f 1519 1649 1648 +f 1520 1521 1649 +f 1521 1650 1649 +f 1521 1522 1651 +f 1521 1651 1650 +f 1522 1523 1651 +f 1523 1652 1651 +f 1523 1524 1653 +f 1523 1653 1652 +f 1524 1525 1653 +f 1525 1654 1653 +f 1525 1526 1655 +f 1525 1655 1654 +f 1526 1527 1655 +f 1527 1656 1655 +f 1527 1528 1657 +f 1527 1657 1656 +f 1528 1529 1657 +f 1529 1658 1657 +f 1529 1530 1659 +f 1529 1659 1658 +f 1530 1531 1659 +f 1531 1660 1659 +f 1531 1532 1661 +f 1531 1661 1660 +f 1532 1533 1661 +f 1533 1662 1661 +f 1533 1534 1663 +f 1533 1663 1662 +f 1534 1535 1663 +f 1535 1664 1663 +f 1535 1536 1665 +f 1535 1665 1664 +f 1536 1537 1665 +f 1537 1666 1665 +f 1537 1538 1667 +f 1537 1667 1666 +f 1538 1539 1667 +f 1539 1668 1667 +f 1539 1540 1669 +f 1539 1669 1668 +f 1540 1541 1669 +f 1541 1670 1669 +f 1541 1542 1671 +f 1541 1671 1670 +f 1542 1543 1671 +f 1543 1672 1671 +f 1543 1544 1673 +f 1543 1673 1672 +f 1544 1545 1673 +f 1545 1674 1673 +f 1545 1546 1675 +f 1545 1675 1674 +f 1546 1547 1675 +f 1547 1676 1675 +f 1547 1548 1677 +f 1547 1677 1676 +f 1549 1550 1679 +f 1549 1679 1678 +f 1550 1551 1679 +f 1551 1680 1679 +f 1551 1552 1681 +f 1551 1681 1680 +f 1552 1553 1681 +f 1553 1682 1681 +f 1553 1554 1683 +f 1553 1683 1682 +f 1554 1555 1683 +f 1555 1684 1683 +f 1555 1556 1685 +f 1555 1685 1684 +f 1556 1557 1685 +f 1557 1686 1685 +f 1557 1558 1687 +f 1557 1687 1686 +f 1558 1559 1687 +f 1559 1688 1687 +f 1559 1560 1689 +f 1559 1689 1688 +f 1560 1561 1689 +f 1561 1690 1689 +f 1561 1562 1691 +f 1561 1691 1690 +f 1562 1563 1691 +f 1563 1692 1691 +f 1563 1564 1693 +f 1563 1693 1692 +f 1564 1565 1693 +f 1565 1694 1693 +f 1565 1566 1695 +f 1565 1695 1694 +f 1566 1567 1695 +f 1567 1696 1695 +f 1567 1568 1697 +f 1567 1697 1696 +f 1568 1569 1697 +f 1569 1698 1697 +f 1569 1570 1699 +f 1569 1699 1698 +f 1570 1571 1699 +f 1571 1700 1699 +f 1571 1572 1701 +f 1571 1701 1700 +f 1572 1573 1701 +f 1573 1702 1701 +f 1573 1574 1703 +f 1573 1703 1702 +f 1574 1575 1703 +f 1575 1704 1703 +f 1575 1576 1705 +f 1575 1705 1704 +f 1576 1577 1705 +f 1577 1706 1705 +f 1577 1578 1707 +f 1577 1707 1706 +f 1578 1579 1707 +f 1579 1708 1707 +f 1579 1580 1709 +f 1579 1709 1708 +f 1580 1581 1709 +f 1581 1710 1709 +f 1581 1582 1711 +f 1581 1711 1710 +f 1582 1583 1711 +f 1583 1712 1711 +f 1583 1584 1713 +f 1583 1713 1712 +f 1584 1585 1713 +f 1585 1714 1713 +f 1585 1586 1715 +f 1585 1715 1714 +f 1586 1587 1715 +f 1587 1716 1715 +f 1587 1588 1717 +f 1587 1717 1716 +f 1588 1589 1717 +f 1589 1718 1717 +f 1589 1590 1719 +f 1589 1719 1718 +f 1590 1591 1719 +f 1591 1720 1719 +f 1591 1592 1721 +f 1591 1721 1720 +f 1592 1593 1721 +f 1593 1722 1721 +f 1593 1594 1723 +f 1593 1723 1722 +f 1594 1595 1723 +f 1595 1724 1723 +f 1595 1596 1725 +f 1595 1725 1724 +f 1596 1597 1725 +f 1597 1726 1725 +f 1597 1598 1727 +f 1597 1727 1726 +f 1598 1599 1727 +f 1599 1728 1727 +f 1599 1600 1729 +f 1599 1729 1728 +f 1600 1601 1729 +f 1601 1730 1729 +f 1601 1602 1731 +f 1601 1731 1730 +f 1602 1603 1731 +f 1603 1732 1731 +f 1603 1604 1733 +f 1603 1733 1732 +f 1604 1605 1733 +f 1605 1734 1733 +f 1605 1606 1735 +f 1605 1735 1734 +f 1606 1607 1735 +f 1607 1736 1735 +f 1607 1608 1737 +f 1607 1737 1736 +f 1608 1609 1737 +f 1609 1738 1737 +f 1609 1610 1739 +f 1609 1739 1738 +f 1610 1611 1739 +f 1611 1740 1739 +f 1611 1612 1741 +f 1611 1741 1740 +f 1612 1613 1741 +f 1613 1742 1741 +f 1613 1614 1743 +f 1613 1743 1742 +f 1614 1615 1743 +f 1615 1744 1743 +f 1615 1616 1745 +f 1615 1745 1744 +f 1616 1617 1745 +f 1617 1746 1745 +f 1617 1618 1747 +f 1617 1747 1746 +f 1618 1619 1747 +f 1619 1748 1747 +f 1619 1620 1749 +f 1619 1749 1748 +f 1620 1621 1749 +f 1621 1750 1749 +f 1621 1622 1751 +f 1621 1751 1750 +f 1622 1623 1751 +f 1623 1752 1751 +f 1623 1624 1753 +f 1623 1753 1752 +f 1624 1625 1753 +f 1625 1754 1753 +f 1625 1626 1755 +f 1625 1755 1754 +f 1626 1627 1755 +f 1627 1756 1755 +f 1627 1628 1757 +f 1627 1757 1756 +f 1628 1629 1757 +f 1629 1758 1757 +f 1629 1630 1759 +f 1629 1759 1758 +f 1630 1631 1759 +f 1631 1760 1759 +f 1631 1632 1761 +f 1631 1761 1760 +f 1632 1633 1761 +f 1633 1762 1761 +f 1633 1634 1763 +f 1633 1763 1762 +f 1634 1635 1763 +f 1635 1764 1763 +f 1635 1636 1765 +f 1635 1765 1764 +f 1636 1637 1765 +f 1637 1766 1765 +f 1637 1638 1767 +f 1637 1767 1766 +f 1638 1639 1767 +f 1639 1768 1767 +f 1639 1640 1769 +f 1639 1769 1768 +f 1640 1641 1769 +f 1641 1770 1769 +f 1641 1642 1771 +f 1641 1771 1770 +f 1642 1643 1771 +f 1643 1772 1771 +f 1643 1644 1773 +f 1643 1773 1772 +f 1644 1645 1773 +f 1645 1774 1773 +f 1645 1646 1775 +f 1645 1775 1774 +f 1646 1647 1775 +f 1647 1776 1775 +f 1647 1648 1777 +f 1647 1777 1776 +f 1648 1649 1777 +f 1649 1778 1777 +f 1649 1650 1779 +f 1649 1779 1778 +f 1650 1651 1779 +f 1651 1780 1779 +f 1651 1652 1781 +f 1651 1781 1780 +f 1652 1653 1781 +f 1653 1782 1781 +f 1653 1654 1783 +f 1653 1783 1782 +f 1654 1655 1783 +f 1655 1784 1783 +f 1655 1656 1785 +f 1655 1785 1784 +f 1656 1657 1785 +f 1657 1786 1785 +f 1657 1658 1787 +f 1657 1787 1786 +f 1658 1659 1787 +f 1659 1788 1787 +f 1659 1660 1789 +f 1659 1789 1788 +f 1660 1661 1789 +f 1661 1790 1789 +f 1661 1662 1791 +f 1661 1791 1790 +f 1662 1663 1791 +f 1663 1792 1791 +f 1663 1664 1793 +f 1663 1793 1792 +f 1664 1665 1793 +f 1665 1794 1793 +f 1665 1666 1795 +f 1665 1795 1794 +f 1666 1667 1795 +f 1667 1796 1795 +f 1667 1668 1797 +f 1667 1797 1796 +f 1668 1669 1797 +f 1669 1798 1797 +f 1669 1670 1799 +f 1669 1799 1798 +f 1670 1671 1799 +f 1671 1800 1799 +f 1671 1672 1801 +f 1671 1801 1800 +f 1672 1673 1801 +f 1673 1802 1801 +f 1673 1674 1803 +f 1673 1803 1802 +f 1674 1675 1803 +f 1675 1804 1803 +f 1675 1676 1805 +f 1675 1805 1804 +f 1676 1677 1805 +f 1677 1806 1805 +f 1678 1679 1807 +f 1679 1808 1807 +f 1679 1680 1809 +f 1679 1809 1808 +f 1680 1681 1809 +f 1681 1810 1809 +f 1681 1682 1811 +f 1681 1811 1810 +f 1682 1683 1811 +f 1683 1812 1811 +f 1683 1684 1813 +f 1683 1813 1812 +f 1684 1685 1813 +f 1685 1814 1813 +f 1685 1686 1815 +f 1685 1815 1814 +f 1686 1687 1815 +f 1687 1816 1815 +f 1687 1688 1817 +f 1687 1817 1816 +f 1688 1689 1817 +f 1689 1818 1817 +f 1689 1690 1819 +f 1689 1819 1818 +f 1690 1691 1819 +f 1691 1820 1819 +f 1691 1692 1821 +f 1691 1821 1820 +f 1692 1693 1821 +f 1693 1822 1821 +f 1693 1694 1823 +f 1693 1823 1822 +f 1694 1695 1823 +f 1695 1824 1823 +f 1695 1696 1825 +f 1695 1825 1824 +f 1696 1697 1825 +f 1697 1826 1825 +f 1697 1698 1827 +f 1697 1827 1826 +f 1698 1699 1827 +f 1699 1828 1827 +f 1699 1700 1829 +f 1699 1829 1828 +f 1700 1701 1829 +f 1701 1830 1829 +f 1701 1702 1831 +f 1701 1831 1830 +f 1702 1703 1831 +f 1703 1832 1831 +f 1703 1704 1833 +f 1703 1833 1832 +f 1704 1705 1833 +f 1705 1834 1833 +f 1705 1706 1835 +f 1705 1835 1834 +f 1706 1707 1835 +f 1707 1836 1835 +f 1707 1708 1837 +f 1707 1837 1836 +f 1708 1709 1837 +f 1709 1838 1837 +f 1709 1710 1839 +f 1709 1839 1838 +f 1710 1711 1839 +f 1711 1840 1839 +f 1711 1712 1841 +f 1711 1841 1840 +f 1712 1713 1841 +f 1713 1842 1841 +f 1713 1714 1843 +f 1713 1843 1842 +f 1714 1715 1843 +f 1715 1844 1843 +f 1715 1716 1845 +f 1715 1845 1844 +f 1716 1717 1845 +f 1717 1846 1845 +f 1717 1718 1847 +f 1717 1847 1846 +f 1718 1719 1847 +f 1719 1848 1847 +f 1719 1720 1849 +f 1719 1849 1848 +f 1720 1721 1849 +f 1721 1850 1849 +f 1721 1722 1851 +f 1721 1851 1850 +f 1722 1723 1851 +f 1723 1852 1851 +f 1723 1724 1853 +f 1723 1853 1852 +f 1724 1725 1853 +f 1725 1854 1853 +f 1725 1726 1855 +f 1725 1855 1854 +f 1726 1727 1855 +f 1727 1856 1855 +f 1727 1728 1857 +f 1727 1857 1856 +f 1728 1729 1857 +f 1729 1858 1857 +f 1729 1730 1859 +f 1729 1859 1858 +f 1730 1731 1859 +f 1731 1860 1859 +f 1731 1732 1861 +f 1731 1861 1860 +f 1732 1733 1861 +f 1733 1862 1861 +f 1733 1734 1863 +f 1733 1863 1862 +f 1734 1735 1863 +f 1735 1864 1863 +f 1735 1736 1865 +f 1735 1865 1864 +f 1736 1737 1865 +f 1737 1866 1865 +f 1737 1738 1867 +f 1737 1867 1866 +f 1738 1739 1867 +f 1739 1868 1867 +f 1739 1740 1869 +f 1739 1869 1868 +f 1740 1741 1869 +f 1741 1870 1869 +f 1741 1742 1871 +f 1741 1871 1870 +f 1742 1743 1871 +f 1743 1872 1871 +f 1743 1744 1873 +f 1743 1873 1872 +f 1744 1745 1873 +f 1745 1874 1873 +f 1745 1746 1875 +f 1745 1875 1874 +f 1746 1747 1875 +f 1747 1876 1875 +f 1747 1748 1877 +f 1747 1877 1876 +f 1748 1749 1877 +f 1749 1878 1877 +f 1749 1750 1879 +f 1749 1879 1878 +f 1750 1751 1879 +f 1751 1880 1879 +f 1751 1752 1881 +f 1751 1881 1880 +f 1752 1753 1881 +f 1753 1882 1881 +f 1753 1754 1883 +f 1753 1883 1882 +f 1754 1755 1883 +f 1755 1884 1883 +f 1755 1756 1885 +f 1755 1885 1884 +f 1756 1757 1885 +f 1757 1886 1885 +f 1757 1758 1887 +f 1757 1887 1886 +f 1758 1759 1887 +f 1759 1888 1887 +f 1759 1760 1889 +f 1759 1889 1888 +f 1760 1761 1889 +f 1761 1890 1889 +f 1761 1762 1891 +f 1761 1891 1890 +f 1762 1763 1891 +f 1763 1892 1891 +f 1763 1764 1893 +f 1763 1893 1892 +f 1764 1765 1893 +f 1765 1894 1893 +f 1765 1766 1895 +f 1765 1895 1894 +f 1766 1767 1895 +f 1767 1896 1895 +f 1767 1768 1897 +f 1767 1897 1896 +f 1768 1769 1897 +f 1769 1898 1897 +f 1769 1770 1899 +f 1769 1899 1898 +f 1770 1771 1899 +f 1771 1900 1899 +f 1771 1772 1901 +f 1771 1901 1900 +f 1772 1773 1901 +f 1773 1902 1901 +f 1773 1774 1903 +f 1773 1903 1902 +f 1774 1775 1903 +f 1775 1904 1903 +f 1775 1776 1905 +f 1775 1905 1904 +f 1776 1777 1905 +f 1777 1906 1905 +f 1777 1778 1907 +f 1777 1907 1906 +f 1778 1779 1907 +f 1779 1908 1907 +f 1779 1780 1909 +f 1779 1909 1908 +f 1780 1781 1909 +f 1781 1910 1909 +f 1781 1782 1911 +f 1781 1911 1910 +f 1782 1783 1911 +f 1783 1912 1911 +f 1783 1784 1913 +f 1783 1913 1912 +f 1784 1785 1913 +f 1785 1914 1913 +f 1785 1786 1915 +f 1785 1915 1914 +f 1786 1787 1915 +f 1787 1916 1915 +f 1787 1788 1917 +f 1787 1917 1916 +f 1788 1789 1917 +f 1789 1918 1917 +f 1789 1790 1919 +f 1789 1919 1918 +f 1790 1791 1919 +f 1791 1920 1919 +f 1791 1792 1921 +f 1791 1921 1920 +f 1792 1793 1921 +f 1793 1922 1921 +f 1793 1794 1923 +f 1793 1923 1922 +f 1794 1795 1923 +f 1795 1924 1923 +f 1795 1796 1925 +f 1795 1925 1924 +f 1796 1797 1925 +f 1797 1926 1925 +f 1797 1798 1927 +f 1797 1927 1926 +f 1798 1799 1927 +f 1799 1928 1927 +f 1799 1800 1929 +f 1799 1929 1928 +f 1800 1801 1929 +f 1801 1930 1929 +f 1801 1802 1931 +f 1801 1931 1930 +f 1802 1803 1931 +f 1803 1932 1931 +f 1803 1804 1933 +f 1803 1933 1932 +f 1804 1805 1933 +f 1805 1934 1933 +f 1805 1806 1935 +f 1805 1935 1934 +f 1807 1808 1937 +f 1807 1937 1936 +f 1808 1809 1937 +f 1809 1938 1937 +f 1809 1810 1939 +f 1809 1939 1938 +f 1810 1811 1939 +f 1811 1940 1939 +f 1811 1812 1941 +f 1811 1941 1940 +f 1812 1813 1941 +f 1813 1942 1941 +f 1813 1814 1943 +f 1813 1943 1942 +f 1814 1815 1943 +f 1815 1944 1943 +f 1815 1816 1945 +f 1815 1945 1944 +f 1816 1817 1945 +f 1817 1946 1945 +f 1817 1818 1947 +f 1817 1947 1946 +f 1818 1819 1947 +f 1819 1948 1947 +f 1819 1820 1949 +f 1819 1949 1948 +f 1820 1821 1949 +f 1821 1950 1949 +f 1821 1822 1951 +f 1821 1951 1950 +f 1822 1823 1951 +f 1823 1952 1951 +f 1823 1824 1953 +f 1823 1953 1952 +f 1824 1825 1953 +f 1825 1954 1953 +f 1825 1826 1955 +f 1825 1955 1954 +f 1826 1827 1955 +f 1827 1956 1955 +f 1827 1828 1957 +f 1827 1957 1956 +f 1828 1829 1957 +f 1829 1958 1957 +f 1829 1830 1959 +f 1829 1959 1958 +f 1830 1831 1959 +f 1831 1960 1959 +f 1831 1832 1961 +f 1831 1961 1960 +f 1832 1833 1961 +f 1833 1962 1961 +f 1833 1834 1963 +f 1833 1963 1962 +f 1834 1835 1963 +f 1835 1964 1963 +f 1835 1836 1965 +f 1835 1965 1964 +f 1836 1837 1965 +f 1837 1966 1965 +f 1837 1838 1967 +f 1837 1967 1966 +f 1838 1839 1967 +f 1839 1968 1967 +f 1839 1840 1969 +f 1839 1969 1968 +f 1840 1841 1969 +f 1841 1970 1969 +f 1841 1842 1971 +f 1841 1971 1970 +f 1842 1843 1971 +f 1843 1972 1971 +f 1843 1844 1973 +f 1843 1973 1972 +f 1844 1845 1973 +f 1845 1974 1973 +f 1845 1846 1975 +f 1845 1975 1974 +f 1846 1847 1975 +f 1847 1976 1975 +f 1847 1848 1977 +f 1847 1977 1976 +f 1848 1849 1977 +f 1849 1978 1977 +f 1849 1850 1979 +f 1849 1979 1978 +f 1850 1851 1979 +f 1851 1980 1979 +f 1851 1852 1981 +f 1851 1981 1980 +f 1852 1853 1981 +f 1853 1982 1981 +f 1853 1854 1983 +f 1853 1983 1982 +f 1854 1855 1983 +f 1855 1984 1983 +f 1855 1856 1985 +f 1855 1985 1984 +f 1856 1857 1985 +f 1857 1986 1985 +f 1857 1858 1987 +f 1857 1987 1986 +f 1858 1859 1987 +f 1859 1988 1987 +f 1859 1860 1989 +f 1859 1989 1988 +f 1860 1861 1989 +f 1861 1990 1989 +f 1861 1862 1991 +f 1861 1991 1990 +f 1862 1863 1991 +f 1863 1992 1991 +f 1863 1864 1993 +f 1863 1993 1992 +f 1864 1865 1993 +f 1865 1994 1993 +f 1865 1866 1995 +f 1865 1995 1994 +f 1866 1867 1995 +f 1867 1996 1995 +f 1867 1868 1997 +f 1867 1997 1996 +f 1868 1869 1997 +f 1869 1998 1997 +f 1869 1870 1999 +f 1869 1999 1998 +f 1870 1871 1999 +f 1871 2000 1999 +f 1871 1872 2001 +f 1871 2001 2000 +f 1872 1873 2001 +f 1873 2002 2001 +f 1873 1874 2003 +f 1873 2003 2002 +f 1874 1875 2003 +f 1875 2004 2003 +f 1875 1876 2005 +f 1875 2005 2004 +f 1876 1877 2005 +f 1877 2006 2005 +f 1877 1878 2007 +f 1877 2007 2006 +f 1878 1879 2007 +f 1879 2008 2007 +f 1879 1880 2009 +f 1879 2009 2008 +f 1880 1881 2009 +f 1881 2010 2009 +f 1881 1882 2011 +f 1881 2011 2010 +f 1882 1883 2011 +f 1883 2012 2011 +f 1883 1884 2013 +f 1883 2013 2012 +f 1884 1885 2013 +f 1885 2014 2013 +f 1885 1886 2015 +f 1885 2015 2014 +f 1886 1887 2015 +f 1887 2016 2015 +f 1887 1888 2017 +f 1887 2017 2016 +f 1888 1889 2017 +f 1889 2018 2017 +f 1889 1890 2019 +f 1889 2019 2018 +f 1890 1891 2019 +f 1891 2020 2019 +f 1891 1892 2021 +f 1891 2021 2020 +f 1892 1893 2021 +f 1893 2022 2021 +f 1893 1894 2023 +f 1893 2023 2022 +f 1894 1895 2023 +f 1895 2024 2023 +f 1895 1896 2025 +f 1895 2025 2024 +f 1896 1897 2025 +f 1897 2026 2025 +f 1897 1898 2027 +f 1897 2027 2026 +f 1898 1899 2027 +f 1899 2028 2027 +f 1899 1900 2029 +f 1899 2029 2028 +f 1900 1901 2029 +f 1901 2030 2029 +f 1901 1902 2031 +f 1901 2031 2030 +f 1902 1903 2031 +f 1903 2032 2031 +f 1903 1904 2033 +f 1903 2033 2032 +f 1904 1905 2033 +f 1905 2034 2033 +f 1905 1906 2035 +f 1905 2035 2034 +f 1906 1907 2035 +f 1907 2036 2035 +f 1907 1908 2037 +f 1907 2037 2036 +f 1908 1909 2037 +f 1909 2038 2037 +f 1909 1910 2039 +f 1909 2039 2038 +f 1910 1911 2039 +f 1911 2040 2039 +f 1911 1912 2041 +f 1911 2041 2040 +f 1912 1913 2041 +f 1913 2042 2041 +f 1913 1914 2043 +f 1913 2043 2042 +f 1914 1915 2043 +f 1915 2044 2043 +f 1915 1916 2045 +f 1915 2045 2044 +f 1916 1917 2045 +f 1917 2046 2045 +f 1917 1918 2047 +f 1917 2047 2046 +f 1918 1919 2047 +f 1919 2048 2047 +f 1919 1920 2049 +f 1919 2049 2048 +f 1920 1921 2049 +f 1921 2050 2049 +f 1921 1922 2051 +f 1921 2051 2050 +f 1922 1923 2051 +f 1923 2052 2051 +f 1923 1924 2053 +f 1923 2053 2052 +f 1924 1925 2053 +f 1925 2054 2053 +f 1925 1926 2055 +f 1925 2055 2054 +f 1926 1927 2055 +f 1927 2056 2055 +f 1927 1928 2057 +f 1927 2057 2056 +f 1928 1929 2057 +f 1929 2058 2057 +f 1929 1930 2059 +f 1929 2059 2058 +f 1930 1931 2059 +f 1931 2060 2059 +f 1931 1932 2061 +f 1931 2061 2060 +f 1932 1933 2061 +f 1933 2062 2061 +f 1933 1934 2063 +f 1933 2063 2062 +f 1934 1935 2063 +f 1935 2064 2063 +f 1936 1937 2065 +f 1937 2066 2065 +f 1937 1938 2067 +f 1937 2067 2066 +f 1938 1939 2067 +f 1939 2068 2067 +f 1939 1940 2069 +f 1939 2069 2068 +f 1940 1941 2069 +f 1941 2070 2069 +f 1941 1942 2071 +f 1941 2071 2070 +f 1942 1943 2071 +f 1943 2072 2071 +f 1943 1944 2073 +f 1943 2073 2072 +f 1944 1945 2073 +f 1945 2074 2073 +f 1945 1946 2075 +f 1945 2075 2074 +f 1946 1947 2075 +f 1947 2076 2075 +f 1947 1948 2077 +f 1947 2077 2076 +f 1948 1949 2077 +f 1949 2078 2077 +f 1949 1950 2079 +f 1949 2079 2078 +f 1950 1951 2079 +f 1951 2080 2079 +f 1951 1952 2081 +f 1951 2081 2080 +f 1952 1953 2081 +f 1953 2082 2081 +f 1953 1954 2083 +f 1953 2083 2082 +f 1954 1955 2083 +f 1955 2084 2083 +f 1955 1956 2085 +f 1955 2085 2084 +f 1956 1957 2085 +f 1957 2086 2085 +f 1957 1958 2087 +f 1957 2087 2086 +f 1958 1959 2087 +f 1959 2088 2087 +f 1959 1960 2089 +f 1959 2089 2088 +f 1960 1961 2089 +f 1961 2090 2089 +f 1961 1962 2091 +f 1961 2091 2090 +f 1962 1963 2091 +f 1963 2092 2091 +f 1963 1964 2093 +f 1963 2093 2092 +f 1964 1965 2093 +f 1965 2094 2093 +f 1965 1966 2095 +f 1965 2095 2094 +f 1966 1967 2095 +f 1967 2096 2095 +f 1967 1968 2097 +f 1967 2097 2096 +f 1968 1969 2097 +f 1969 2098 2097 +f 1969 1970 2099 +f 1969 2099 2098 +f 1970 1971 2099 +f 1971 2100 2099 +f 1971 1972 2101 +f 1971 2101 2100 +f 1972 1973 2101 +f 1973 2102 2101 +f 1973 1974 2103 +f 1973 2103 2102 +f 1974 1975 2103 +f 1975 2104 2103 +f 1975 1976 2105 +f 1975 2105 2104 +f 1976 1977 2105 +f 1977 2106 2105 +f 1977 1978 2107 +f 1977 2107 2106 +f 1978 1979 2107 +f 1979 2108 2107 +f 1979 1980 2109 +f 1979 2109 2108 +f 1980 1981 2109 +f 1981 2110 2109 +f 1981 1982 2111 +f 1981 2111 2110 +f 1982 1983 2111 +f 1983 2112 2111 +f 1983 1984 2113 +f 1983 2113 2112 +f 1984 1985 2113 +f 1985 2114 2113 +f 1985 1986 2115 +f 1985 2115 2114 +f 1986 1987 2115 +f 1987 2116 2115 +f 1987 1988 2117 +f 1987 2117 2116 +f 1988 1989 2117 +f 1989 2118 2117 +f 1989 1990 2119 +f 1989 2119 2118 +f 1990 1991 2119 +f 1991 2120 2119 +f 1991 1992 2121 +f 1991 2121 2120 +f 1992 1993 2121 +f 1993 2122 2121 +f 1993 1994 2123 +f 1993 2123 2122 +f 1994 1995 2123 +f 1995 2124 2123 +f 1995 1996 2125 +f 1995 2125 2124 +f 1996 1997 2125 +f 1997 2126 2125 +f 1997 1998 2127 +f 1997 2127 2126 +f 1998 1999 2127 +f 1999 2128 2127 +f 1999 2000 2129 +f 1999 2129 2128 +f 2000 2001 2129 +f 2001 2130 2129 +f 2001 2002 2131 +f 2001 2131 2130 +f 2002 2003 2131 +f 2003 2132 2131 +f 2003 2004 2133 +f 2003 2133 2132 +f 2004 2005 2133 +f 2005 2134 2133 +f 2005 2006 2135 +f 2005 2135 2134 +f 2006 2007 2135 +f 2007 2136 2135 +f 2007 2008 2137 +f 2007 2137 2136 +f 2008 2009 2137 +f 2009 2138 2137 +f 2009 2010 2139 +f 2009 2139 2138 +f 2010 2011 2139 +f 2011 2140 2139 +f 2011 2012 2141 +f 2011 2141 2140 +f 2012 2013 2141 +f 2013 2142 2141 +f 2013 2014 2143 +f 2013 2143 2142 +f 2014 2015 2143 +f 2015 2144 2143 +f 2015 2016 2145 +f 2015 2145 2144 +f 2016 2017 2145 +f 2017 2146 2145 +f 2017 2018 2147 +f 2017 2147 2146 +f 2018 2019 2147 +f 2019 2148 2147 +f 2019 2020 2149 +f 2019 2149 2148 +f 2020 2021 2149 +f 2021 2150 2149 +f 2021 2022 2151 +f 2021 2151 2150 +f 2022 2023 2151 +f 2023 2152 2151 +f 2023 2024 2153 +f 2023 2153 2152 +f 2024 2025 2153 +f 2025 2154 2153 +f 2025 2026 2155 +f 2025 2155 2154 +f 2026 2027 2155 +f 2027 2156 2155 +f 2027 2028 2157 +f 2027 2157 2156 +f 2028 2029 2157 +f 2029 2158 2157 +f 2029 2030 2159 +f 2029 2159 2158 +f 2030 2031 2159 +f 2031 2160 2159 +f 2031 2032 2161 +f 2031 2161 2160 +f 2032 2033 2161 +f 2033 2162 2161 +f 2033 2034 2163 +f 2033 2163 2162 +f 2034 2035 2163 +f 2035 2164 2163 +f 2035 2036 2165 +f 2035 2165 2164 +f 2036 2037 2165 +f 2037 2166 2165 +f 2037 2038 2167 +f 2037 2167 2166 +f 2038 2039 2167 +f 2039 2168 2167 +f 2039 2040 2169 +f 2039 2169 2168 +f 2040 2041 2169 +f 2041 2170 2169 +f 2041 2042 2171 +f 2041 2171 2170 +f 2042 2043 2171 +f 2043 2172 2171 +f 2043 2044 2173 +f 2043 2173 2172 +f 2044 2045 2173 +f 2045 2174 2173 +f 2045 2046 2175 +f 2045 2175 2174 +f 2046 2047 2175 +f 2047 2176 2175 +f 2047 2048 2177 +f 2047 2177 2176 +f 2048 2049 2177 +f 2049 2178 2177 +f 2049 2050 2179 +f 2049 2179 2178 +f 2050 2051 2179 +f 2051 2180 2179 +f 2051 2052 2181 +f 2051 2181 2180 +f 2052 2053 2181 +f 2053 2182 2181 +f 2053 2054 2183 +f 2053 2183 2182 +f 2054 2055 2183 +f 2055 2184 2183 +f 2055 2056 2185 +f 2055 2185 2184 +f 2056 2057 2185 +f 2057 2186 2185 +f 2057 2058 2187 +f 2057 2187 2186 +f 2058 2059 2187 +f 2059 2188 2187 +f 2059 2060 2189 +f 2059 2189 2188 +f 2060 2061 2189 +f 2061 2190 2189 +f 2061 2062 2191 +f 2061 2191 2190 +f 2062 2063 2191 +f 2063 2192 2191 +f 2063 2064 2193 +f 2063 2193 2192 +f 2065 2066 2195 +f 2065 2195 2194 +f 2066 2067 2195 +f 2067 2196 2195 +f 2067 2068 2197 +f 2067 2197 2196 +f 2068 2069 2197 +f 2069 2198 2197 +f 2069 2070 2199 +f 2069 2199 2198 +f 2070 2071 2199 +f 2071 2200 2199 +f 2071 2072 2201 +f 2071 2201 2200 +f 2072 2073 2201 +f 2073 2202 2201 +f 2073 2074 2203 +f 2073 2203 2202 +f 2074 2075 2203 +f 2075 2204 2203 +f 2075 2076 2205 +f 2075 2205 2204 +f 2076 2077 2205 +f 2077 2206 2205 +f 2077 2078 2207 +f 2077 2207 2206 +f 2078 2079 2207 +f 2079 2208 2207 +f 2079 2080 2209 +f 2079 2209 2208 +f 2080 2081 2209 +f 2081 2210 2209 +f 2081 2082 2211 +f 2081 2211 2210 +f 2082 2083 2211 +f 2083 2212 2211 +f 2083 2084 2213 +f 2083 2213 2212 +f 2084 2085 2213 +f 2085 2214 2213 +f 2085 2086 2215 +f 2085 2215 2214 +f 2086 2087 2215 +f 2087 2216 2215 +f 2087 2088 2217 +f 2087 2217 2216 +f 2088 2089 2217 +f 2089 2218 2217 +f 2089 2090 2219 +f 2089 2219 2218 +f 2090 2091 2219 +f 2091 2220 2219 +f 2091 2092 2221 +f 2091 2221 2220 +f 2092 2093 2221 +f 2093 2222 2221 +f 2093 2094 2223 +f 2093 2223 2222 +f 2094 2095 2223 +f 2095 2224 2223 +f 2095 2096 2225 +f 2095 2225 2224 +f 2096 2097 2225 +f 2097 2226 2225 +f 2097 2098 2227 +f 2097 2227 2226 +f 2098 2099 2227 +f 2099 2228 2227 +f 2099 2100 2229 +f 2099 2229 2228 +f 2100 2101 2229 +f 2101 2230 2229 +f 2101 2102 2231 +f 2101 2231 2230 +f 2102 2103 2231 +f 2103 2232 2231 +f 2103 2104 2233 +f 2103 2233 2232 +f 2104 2105 2233 +f 2105 2234 2233 +f 2105 2106 2235 +f 2105 2235 2234 +f 2106 2107 2235 +f 2107 2236 2235 +f 2107 2108 2237 +f 2107 2237 2236 +f 2108 2109 2237 +f 2109 2238 2237 +f 2109 2110 2239 +f 2109 2239 2238 +f 2110 2111 2239 +f 2111 2240 2239 +f 2111 2112 2241 +f 2111 2241 2240 +f 2112 2113 2241 +f 2113 2242 2241 +f 2113 2114 2243 +f 2113 2243 2242 +f 2114 2115 2243 +f 2115 2244 2243 +f 2115 2116 2245 +f 2115 2245 2244 +f 2116 2117 2245 +f 2117 2246 2245 +f 2117 2118 2247 +f 2117 2247 2246 +f 2118 2119 2247 +f 2119 2248 2247 +f 2119 2120 2249 +f 2119 2249 2248 +f 2120 2121 2249 +f 2121 2250 2249 +f 2121 2122 2251 +f 2121 2251 2250 +f 2122 2123 2251 +f 2123 2252 2251 +f 2123 2124 2253 +f 2123 2253 2252 +f 2124 2125 2253 +f 2125 2254 2253 +f 2125 2126 2255 +f 2125 2255 2254 +f 2126 2127 2255 +f 2127 2256 2255 +f 2127 2128 2257 +f 2127 2257 2256 +f 2128 2129 2257 +f 2129 2258 2257 +f 2129 2130 2259 +f 2129 2259 2258 +f 2130 2131 2259 +f 2131 2260 2259 +f 2131 2132 2261 +f 2131 2261 2260 +f 2132 2133 2261 +f 2133 2262 2261 +f 2133 2134 2263 +f 2133 2263 2262 +f 2134 2135 2263 +f 2135 2264 2263 +f 2135 2136 2265 +f 2135 2265 2264 +f 2136 2137 2265 +f 2137 2266 2265 +f 2137 2138 2267 +f 2137 2267 2266 +f 2138 2139 2267 +f 2139 2268 2267 +f 2139 2140 2269 +f 2139 2269 2268 +f 2140 2141 2269 +f 2141 2270 2269 +f 2141 2142 2271 +f 2141 2271 2270 +f 2142 2143 2271 +f 2143 2272 2271 +f 2143 2144 2273 +f 2143 2273 2272 +f 2144 2145 2273 +f 2145 2274 2273 +f 2145 2146 2275 +f 2145 2275 2274 +f 2146 2147 2275 +f 2147 2276 2275 +f 2147 2148 2277 +f 2147 2277 2276 +f 2148 2149 2277 +f 2149 2278 2277 +f 2149 2150 2279 +f 2149 2279 2278 +f 2150 2151 2279 +f 2151 2280 2279 +f 2151 2152 2281 +f 2151 2281 2280 +f 2152 2153 2281 +f 2153 2282 2281 +f 2153 2154 2283 +f 2153 2283 2282 +f 2154 2155 2283 +f 2155 2284 2283 +f 2155 2156 2285 +f 2155 2285 2284 +f 2156 2157 2285 +f 2157 2286 2285 +f 2157 2158 2287 +f 2157 2287 2286 +f 2158 2159 2287 +f 2159 2288 2287 +f 2159 2160 2289 +f 2159 2289 2288 +f 2160 2161 2289 +f 2161 2290 2289 +f 2161 2162 2291 +f 2161 2291 2290 +f 2162 2163 2291 +f 2163 2292 2291 +f 2163 2164 2293 +f 2163 2293 2292 +f 2164 2165 2293 +f 2165 2294 2293 +f 2165 2166 2295 +f 2165 2295 2294 +f 2166 2167 2295 +f 2167 2296 2295 +f 2167 2168 2297 +f 2167 2297 2296 +f 2168 2169 2297 +f 2169 2298 2297 +f 2169 2170 2299 +f 2169 2299 2298 +f 2170 2171 2299 +f 2171 2300 2299 +f 2171 2172 2301 +f 2171 2301 2300 +f 2172 2173 2301 +f 2173 2302 2301 +f 2173 2174 2303 +f 2173 2303 2302 +f 2174 2175 2303 +f 2175 2304 2303 +f 2175 2176 2305 +f 2175 2305 2304 +f 2176 2177 2305 +f 2177 2306 2305 +f 2177 2178 2307 +f 2177 2307 2306 +f 2178 2179 2307 +f 2179 2308 2307 +f 2179 2180 2309 +f 2179 2309 2308 +f 2180 2181 2309 +f 2181 2310 2309 +f 2181 2182 2311 +f 2181 2311 2310 +f 2182 2183 2311 +f 2183 2312 2311 +f 2183 2184 2313 +f 2183 2313 2312 +f 2184 2185 2313 +f 2185 2314 2313 +f 2185 2186 2315 +f 2185 2315 2314 +f 2186 2187 2315 +f 2187 2316 2315 +f 2187 2188 2317 +f 2187 2317 2316 +f 2188 2189 2317 +f 2189 2318 2317 +f 2189 2190 2319 +f 2189 2319 2318 +f 2190 2191 2319 +f 2191 2320 2319 +f 2191 2192 2321 +f 2191 2321 2320 +f 2192 2193 2321 +f 2193 2322 2321 +f 2194 2195 2323 +f 2195 2324 2323 +f 2195 2196 2325 +f 2195 2325 2324 +f 2196 2197 2325 +f 2197 2326 2325 +f 2197 2198 2327 +f 2197 2327 2326 +f 2198 2199 2327 +f 2199 2328 2327 +f 2199 2200 2329 +f 2199 2329 2328 +f 2200 2201 2329 +f 2201 2330 2329 +f 2201 2202 2331 +f 2201 2331 2330 +f 2202 2203 2331 +f 2203 2332 2331 +f 2203 2204 2333 +f 2203 2333 2332 +f 2204 2205 2333 +f 2205 2334 2333 +f 2205 2206 2335 +f 2205 2335 2334 +f 2206 2207 2335 +f 2207 2336 2335 +f 2207 2208 2337 +f 2207 2337 2336 +f 2208 2209 2337 +f 2209 2338 2337 +f 2209 2210 2339 +f 2209 2339 2338 +f 2210 2211 2339 +f 2211 2340 2339 +f 2211 2212 2341 +f 2211 2341 2340 +f 2212 2213 2341 +f 2213 2342 2341 +f 2213 2214 2343 +f 2213 2343 2342 +f 2214 2215 2343 +f 2215 2344 2343 +f 2215 2216 2345 +f 2215 2345 2344 +f 2216 2217 2345 +f 2217 2346 2345 +f 2217 2218 2347 +f 2217 2347 2346 +f 2218 2219 2347 +f 2219 2348 2347 +f 2219 2220 2349 +f 2219 2349 2348 +f 2220 2221 2349 +f 2221 2350 2349 +f 2221 2222 2351 +f 2221 2351 2350 +f 2222 2223 2351 +f 2223 2352 2351 +f 2223 2224 2353 +f 2223 2353 2352 +f 2224 2225 2353 +f 2225 2354 2353 +f 2225 2226 2355 +f 2225 2355 2354 +f 2226 2227 2355 +f 2227 2356 2355 +f 2227 2228 2357 +f 2227 2357 2356 +f 2228 2229 2357 +f 2229 2358 2357 +f 2229 2230 2359 +f 2229 2359 2358 +f 2230 2231 2359 +f 2231 2360 2359 +f 2231 2232 2361 +f 2231 2361 2360 +f 2232 2233 2361 +f 2233 2362 2361 +f 2233 2234 2363 +f 2233 2363 2362 +f 2234 2235 2363 +f 2235 2364 2363 +f 2235 2236 2365 +f 2235 2365 2364 +f 2236 2237 2365 +f 2237 2366 2365 +f 2237 2238 2367 +f 2237 2367 2366 +f 2238 2239 2367 +f 2239 2368 2367 +f 2239 2240 2369 +f 2239 2369 2368 +f 2240 2241 2369 +f 2241 2370 2369 +f 2241 2242 2371 +f 2241 2371 2370 +f 2242 2243 2371 +f 2243 2372 2371 +f 2243 2244 2373 +f 2243 2373 2372 +f 2244 2245 2373 +f 2245 2374 2373 +f 2245 2246 2375 +f 2245 2375 2374 +f 2246 2247 2375 +f 2247 2376 2375 +f 2247 2248 2377 +f 2247 2377 2376 +f 2248 2249 2377 +f 2249 2378 2377 +f 2249 2250 2379 +f 2249 2379 2378 +f 2250 2251 2379 +f 2251 2380 2379 +f 2251 2252 2381 +f 2251 2381 2380 +f 2252 2253 2381 +f 2253 2382 2381 +f 2253 2254 2383 +f 2253 2383 2382 +f 2254 2255 2383 +f 2255 2384 2383 +f 2255 2256 2385 +f 2255 2385 2384 +f 2256 2257 2385 +f 2257 2386 2385 +f 2257 2258 2387 +f 2257 2387 2386 +f 2258 2259 2387 +f 2259 2388 2387 +f 2259 2260 2389 +f 2259 2389 2388 +f 2260 2261 2389 +f 2261 2390 2389 +f 2261 2262 2391 +f 2261 2391 2390 +f 2262 2263 2391 +f 2263 2392 2391 +f 2263 2264 2393 +f 2263 2393 2392 +f 2264 2265 2393 +f 2265 2394 2393 +f 2265 2266 2395 +f 2265 2395 2394 +f 2266 2267 2395 +f 2267 2396 2395 +f 2267 2268 2397 +f 2267 2397 2396 +f 2268 2269 2397 +f 2269 2398 2397 +f 2269 2270 2399 +f 2269 2399 2398 +f 2270 2271 2399 +f 2271 2400 2399 +f 2271 2272 2401 +f 2271 2401 2400 +f 2272 2273 2401 +f 2273 2402 2401 +f 2273 2274 2403 +f 2273 2403 2402 +f 2274 2275 2403 +f 2275 2404 2403 +f 2275 2276 2405 +f 2275 2405 2404 +f 2276 2277 2405 +f 2277 2406 2405 +f 2277 2278 2407 +f 2277 2407 2406 +f 2278 2279 2407 +f 2279 2408 2407 +f 2279 2280 2409 +f 2279 2409 2408 +f 2280 2281 2409 +f 2281 2410 2409 +f 2281 2282 2411 +f 2281 2411 2410 +f 2282 2283 2411 +f 2283 2412 2411 +f 2283 2284 2413 +f 2283 2413 2412 +f 2284 2285 2413 +f 2285 2414 2413 +f 2285 2286 2415 +f 2285 2415 2414 +f 2286 2287 2415 +f 2287 2416 2415 +f 2287 2288 2417 +f 2287 2417 2416 +f 2288 2289 2417 +f 2289 2418 2417 +f 2289 2290 2419 +f 2289 2419 2418 +f 2290 2291 2419 +f 2291 2420 2419 +f 2291 2292 2421 +f 2291 2421 2420 +f 2292 2293 2421 +f 2293 2422 2421 +f 2293 2294 2423 +f 2293 2423 2422 +f 2294 2295 2423 +f 2295 2424 2423 +f 2295 2296 2425 +f 2295 2425 2424 +f 2296 2297 2425 +f 2297 2426 2425 +f 2297 2298 2427 +f 2297 2427 2426 +f 2298 2299 2427 +f 2299 2428 2427 +f 2299 2300 2429 +f 2299 2429 2428 +f 2300 2301 2429 +f 2301 2430 2429 +f 2301 2302 2431 +f 2301 2431 2430 +f 2302 2303 2431 +f 2303 2432 2431 +f 2303 2304 2433 +f 2303 2433 2432 +f 2304 2305 2433 +f 2305 2434 2433 +f 2305 2306 2435 +f 2305 2435 2434 +f 2306 2307 2435 +f 2307 2436 2435 +f 2307 2308 2437 +f 2307 2437 2436 +f 2308 2309 2437 +f 2309 2438 2437 +f 2309 2310 2439 +f 2309 2439 2438 +f 2310 2311 2439 +f 2311 2440 2439 +f 2311 2312 2441 +f 2311 2441 2440 +f 2312 2313 2441 +f 2313 2442 2441 +f 2313 2314 2443 +f 2313 2443 2442 +f 2314 2315 2443 +f 2315 2444 2443 +f 2315 2316 2445 +f 2315 2445 2444 +f 2316 2317 2445 +f 2317 2446 2445 +f 2317 2318 2447 +f 2317 2447 2446 +f 2318 2319 2447 +f 2319 2448 2447 +f 2319 2320 2449 +f 2319 2449 2448 +f 2320 2321 2449 +f 2321 2450 2449 +f 2321 2322 2451 +f 2321 2451 2450 +f 2323 2324 2453 +f 2323 2453 2452 +f 2324 2325 2453 +f 2325 2454 2453 +f 2325 2326 2455 +f 2325 2455 2454 +f 2326 2327 2455 +f 2327 2456 2455 +f 2327 2328 2457 +f 2327 2457 2456 +f 2328 2329 2457 +f 2329 2458 2457 +f 2329 2330 2459 +f 2329 2459 2458 +f 2330 2331 2459 +f 2331 2460 2459 +f 2331 2332 2461 +f 2331 2461 2460 +f 2332 2333 2461 +f 2333 2462 2461 +f 2333 2334 2463 +f 2333 2463 2462 +f 2334 2335 2463 +f 2335 2464 2463 +f 2335 2336 2465 +f 2335 2465 2464 +f 2336 2337 2465 +f 2337 2466 2465 +f 2337 2338 2467 +f 2337 2467 2466 +f 2338 2339 2467 +f 2339 2468 2467 +f 2339 2340 2469 +f 2339 2469 2468 +f 2340 2341 2469 +f 2341 2470 2469 +f 2341 2342 2471 +f 2341 2471 2470 +f 2342 2343 2471 +f 2343 2472 2471 +f 2343 2344 2473 +f 2343 2473 2472 +f 2344 2345 2473 +f 2345 2474 2473 +f 2345 2346 2475 +f 2345 2475 2474 +f 2346 2347 2475 +f 2347 2476 2475 +f 2347 2348 2477 +f 2347 2477 2476 +f 2348 2349 2477 +f 2349 2478 2477 +f 2349 2350 2479 +f 2349 2479 2478 +f 2350 2351 2479 +f 2351 2480 2479 +f 2351 2352 2481 +f 2351 2481 2480 +f 2352 2353 2481 +f 2353 2482 2481 +f 2353 2354 2483 +f 2353 2483 2482 +f 2354 2355 2483 +f 2355 2484 2483 +f 2355 2356 2485 +f 2355 2485 2484 +f 2356 2357 2485 +f 2357 2486 2485 +f 2357 2358 2487 +f 2357 2487 2486 +f 2358 2359 2487 +f 2359 2488 2487 +f 2359 2360 2489 +f 2359 2489 2488 +f 2360 2361 2489 +f 2361 2490 2489 +f 2361 2362 2491 +f 2361 2491 2490 +f 2362 2363 2491 +f 2363 2492 2491 +f 2363 2364 2493 +f 2363 2493 2492 +f 2364 2365 2493 +f 2365 2494 2493 +f 2365 2366 2495 +f 2365 2495 2494 +f 2366 2367 2495 +f 2367 2496 2495 +f 2367 2368 2497 +f 2367 2497 2496 +f 2368 2369 2497 +f 2369 2498 2497 +f 2369 2370 2499 +f 2369 2499 2498 +f 2370 2371 2499 +f 2371 2500 2499 +f 2371 2372 2501 +f 2371 2501 2500 +f 2372 2373 2501 +f 2373 2502 2501 +f 2373 2374 2503 +f 2373 2503 2502 +f 2374 2375 2503 +f 2375 2504 2503 +f 2375 2376 2505 +f 2375 2505 2504 +f 2376 2377 2505 +f 2377 2506 2505 +f 2377 2378 2507 +f 2377 2507 2506 +f 2378 2379 2507 +f 2379 2508 2507 +f 2379 2380 2509 +f 2379 2509 2508 +f 2380 2381 2509 +f 2381 2510 2509 +f 2381 2382 2511 +f 2381 2511 2510 +f 2382 2383 2511 +f 2383 2512 2511 +f 2383 2384 2513 +f 2383 2513 2512 +f 2384 2385 2513 +f 2385 2514 2513 +f 2385 2386 2515 +f 2385 2515 2514 +f 2386 2387 2515 +f 2387 2516 2515 +f 2387 2388 2517 +f 2387 2517 2516 +f 2388 2389 2517 +f 2389 2518 2517 +f 2389 2390 2519 +f 2389 2519 2518 +f 2390 2391 2519 +f 2391 2520 2519 +f 2391 2392 2521 +f 2391 2521 2520 +f 2392 2393 2521 +f 2393 2522 2521 +f 2393 2394 2523 +f 2393 2523 2522 +f 2394 2395 2523 +f 2395 2524 2523 +f 2395 2396 2525 +f 2395 2525 2524 +f 2396 2397 2525 +f 2397 2526 2525 +f 2397 2398 2527 +f 2397 2527 2526 +f 2398 2399 2527 +f 2399 2528 2527 +f 2399 2400 2529 +f 2399 2529 2528 +f 2400 2401 2529 +f 2401 2530 2529 +f 2401 2402 2531 +f 2401 2531 2530 +f 2402 2403 2531 +f 2403 2532 2531 +f 2403 2404 2533 +f 2403 2533 2532 +f 2404 2405 2533 +f 2405 2534 2533 +f 2405 2406 2535 +f 2405 2535 2534 +f 2406 2407 2535 +f 2407 2536 2535 +f 2407 2408 2537 +f 2407 2537 2536 +f 2408 2409 2537 +f 2409 2538 2537 +f 2409 2410 2539 +f 2409 2539 2538 +f 2410 2411 2539 +f 2411 2540 2539 +f 2411 2412 2541 +f 2411 2541 2540 +f 2412 2413 2541 +f 2413 2542 2541 +f 2413 2414 2543 +f 2413 2543 2542 +f 2414 2415 2543 +f 2415 2544 2543 +f 2415 2416 2545 +f 2415 2545 2544 +f 2416 2417 2545 +f 2417 2546 2545 +f 2417 2418 2547 +f 2417 2547 2546 +f 2418 2419 2547 +f 2419 2548 2547 +f 2419 2420 2549 +f 2419 2549 2548 +f 2420 2421 2549 +f 2421 2550 2549 +f 2421 2422 2551 +f 2421 2551 2550 +f 2422 2423 2551 +f 2423 2552 2551 +f 2423 2424 2553 +f 2423 2553 2552 +f 2424 2425 2553 +f 2425 2554 2553 +f 2425 2426 2555 +f 2425 2555 2554 +f 2426 2427 2555 +f 2427 2556 2555 +f 2427 2428 2557 +f 2427 2557 2556 +f 2428 2429 2557 +f 2429 2558 2557 +f 2429 2430 2559 +f 2429 2559 2558 +f 2430 2431 2559 +f 2431 2560 2559 +f 2431 2432 2561 +f 2431 2561 2560 +f 2432 2433 2561 +f 2433 2562 2561 +f 2433 2434 2563 +f 2433 2563 2562 +f 2434 2435 2563 +f 2435 2564 2563 +f 2435 2436 2565 +f 2435 2565 2564 +f 2436 2437 2565 +f 2437 2566 2565 +f 2437 2438 2567 +f 2437 2567 2566 +f 2438 2439 2567 +f 2439 2568 2567 +f 2439 2440 2569 +f 2439 2569 2568 +f 2440 2441 2569 +f 2441 2570 2569 +f 2441 2442 2571 +f 2441 2571 2570 +f 2442 2443 2571 +f 2443 2572 2571 +f 2443 2444 2573 +f 2443 2573 2572 +f 2444 2445 2573 +f 2445 2574 2573 +f 2445 2446 2575 +f 2445 2575 2574 +f 2446 2447 2575 +f 2447 2576 2575 +f 2447 2448 2577 +f 2447 2577 2576 +f 2448 2449 2577 +f 2449 2578 2577 +f 2449 2450 2579 +f 2449 2579 2578 +f 2450 2451 2579 +f 2451 2580 2579 +f 2452 2453 2581 +f 2453 2582 2581 +f 2453 2454 2583 +f 2453 2583 2582 +f 2454 2455 2583 +f 2455 2584 2583 +f 2455 2456 2585 +f 2455 2585 2584 +f 2456 2457 2585 +f 2457 2586 2585 +f 2457 2458 2587 +f 2457 2587 2586 +f 2458 2459 2587 +f 2459 2588 2587 +f 2459 2460 2589 +f 2459 2589 2588 +f 2460 2461 2589 +f 2461 2590 2589 +f 2461 2462 2591 +f 2461 2591 2590 +f 2462 2463 2591 +f 2463 2592 2591 +f 2463 2464 2593 +f 2463 2593 2592 +f 2464 2465 2593 +f 2465 2594 2593 +f 2465 2466 2595 +f 2465 2595 2594 +f 2466 2467 2595 +f 2467 2596 2595 +f 2467 2468 2597 +f 2467 2597 2596 +f 2468 2469 2597 +f 2469 2598 2597 +f 2469 2470 2599 +f 2469 2599 2598 +f 2470 2471 2599 +f 2471 2600 2599 +f 2471 2472 2601 +f 2471 2601 2600 +f 2472 2473 2601 +f 2473 2602 2601 +f 2473 2474 2603 +f 2473 2603 2602 +f 2474 2475 2603 +f 2475 2604 2603 +f 2475 2476 2605 +f 2475 2605 2604 +f 2476 2477 2605 +f 2477 2606 2605 +f 2477 2478 2607 +f 2477 2607 2606 +f 2478 2479 2607 +f 2479 2608 2607 +f 2479 2480 2609 +f 2479 2609 2608 +f 2480 2481 2609 +f 2481 2610 2609 +f 2481 2482 2611 +f 2481 2611 2610 +f 2482 2483 2611 +f 2483 2612 2611 +f 2483 2484 2613 +f 2483 2613 2612 +f 2484 2485 2613 +f 2485 2614 2613 +f 2485 2486 2615 +f 2485 2615 2614 +f 2486 2487 2615 +f 2487 2616 2615 +f 2487 2488 2617 +f 2487 2617 2616 +f 2488 2489 2617 +f 2489 2618 2617 +f 2489 2490 2619 +f 2489 2619 2618 +f 2490 2491 2619 +f 2491 2620 2619 +f 2491 2492 2621 +f 2491 2621 2620 +f 2492 2493 2621 +f 2493 2622 2621 +f 2493 2494 2623 +f 2493 2623 2622 +f 2494 2495 2623 +f 2495 2624 2623 +f 2495 2496 2625 +f 2495 2625 2624 +f 2496 2497 2625 +f 2497 2626 2625 +f 2497 2498 2627 +f 2497 2627 2626 +f 2498 2499 2627 +f 2499 2628 2627 +f 2499 2500 2629 +f 2499 2629 2628 +f 2500 2501 2629 +f 2501 2630 2629 +f 2501 2502 2631 +f 2501 2631 2630 +f 2502 2503 2631 +f 2503 2632 2631 +f 2503 2504 2633 +f 2503 2633 2632 +f 2504 2505 2633 +f 2505 2634 2633 +f 2505 2506 2635 +f 2505 2635 2634 +f 2506 2507 2635 +f 2507 2636 2635 +f 2507 2508 2637 +f 2507 2637 2636 +f 2508 2509 2637 +f 2509 2638 2637 +f 2509 2510 2639 +f 2509 2639 2638 +f 2510 2511 2639 +f 2511 2640 2639 +f 2511 2512 2641 +f 2511 2641 2640 +f 2512 2513 2641 +f 2513 2642 2641 +f 2513 2514 2643 +f 2513 2643 2642 +f 2514 2515 2643 +f 2515 2644 2643 +f 2515 2516 2645 +f 2515 2645 2644 +f 2516 2517 2645 +f 2517 2646 2645 +f 2517 2518 2647 +f 2517 2647 2646 +f 2518 2519 2647 +f 2519 2648 2647 +f 2519 2520 2649 +f 2519 2649 2648 +f 2520 2521 2649 +f 2521 2650 2649 +f 2521 2522 2651 +f 2521 2651 2650 +f 2522 2523 2651 +f 2523 2652 2651 +f 2523 2524 2653 +f 2523 2653 2652 +f 2524 2525 2653 +f 2525 2654 2653 +f 2525 2526 2655 +f 2525 2655 2654 +f 2526 2527 2655 +f 2527 2656 2655 +f 2527 2528 2657 +f 2527 2657 2656 +f 2528 2529 2657 +f 2529 2658 2657 +f 2529 2530 2659 +f 2529 2659 2658 +f 2530 2531 2659 +f 2531 2660 2659 +f 2531 2532 2661 +f 2531 2661 2660 +f 2532 2533 2661 +f 2533 2662 2661 +f 2533 2534 2663 +f 2533 2663 2662 +f 2534 2535 2663 +f 2535 2664 2663 +f 2535 2536 2665 +f 2535 2665 2664 +f 2536 2537 2665 +f 2537 2666 2665 +f 2537 2538 2667 +f 2537 2667 2666 +f 2538 2539 2667 +f 2539 2668 2667 +f 2539 2540 2669 +f 2539 2669 2668 +f 2540 2541 2669 +f 2541 2670 2669 +f 2541 2542 2671 +f 2541 2671 2670 +f 2542 2543 2671 +f 2543 2672 2671 +f 2543 2544 2673 +f 2543 2673 2672 +f 2544 2545 2673 +f 2545 2674 2673 +f 2545 2546 2675 +f 2545 2675 2674 +f 2546 2547 2675 +f 2547 2676 2675 +f 2547 2548 2677 +f 2547 2677 2676 +f 2548 2549 2677 +f 2549 2678 2677 +f 2549 2550 2679 +f 2549 2679 2678 +f 2550 2551 2679 +f 2551 2680 2679 +f 2551 2552 2681 +f 2551 2681 2680 +f 2552 2553 2681 +f 2553 2682 2681 +f 2553 2554 2683 +f 2553 2683 2682 +f 2554 2555 2683 +f 2555 2684 2683 +f 2555 2556 2685 +f 2555 2685 2684 +f 2556 2557 2685 +f 2557 2686 2685 +f 2557 2558 2687 +f 2557 2687 2686 +f 2558 2559 2687 +f 2559 2688 2687 +f 2559 2560 2689 +f 2559 2689 2688 +f 2560 2561 2689 +f 2561 2690 2689 +f 2561 2562 2691 +f 2561 2691 2690 +f 2562 2563 2691 +f 2563 2692 2691 +f 2563 2564 2693 +f 2563 2693 2692 +f 2564 2565 2693 +f 2565 2694 2693 +f 2565 2566 2695 +f 2565 2695 2694 +f 2566 2567 2695 +f 2567 2696 2695 +f 2567 2568 2697 +f 2567 2697 2696 +f 2568 2569 2697 +f 2569 2698 2697 +f 2569 2570 2699 +f 2569 2699 2698 +f 2570 2571 2699 +f 2571 2700 2699 +f 2571 2572 2701 +f 2571 2701 2700 +f 2572 2573 2701 +f 2573 2702 2701 +f 2573 2574 2703 +f 2573 2703 2702 +f 2574 2575 2703 +f 2575 2704 2703 +f 2575 2576 2705 +f 2575 2705 2704 +f 2576 2577 2705 +f 2577 2706 2705 +f 2577 2578 2707 +f 2577 2707 2706 +f 2578 2579 2707 +f 2579 2708 2707 +f 2579 2580 2709 +f 2579 2709 2708 +f 2581 2582 2711 +f 2581 2711 2710 +f 2582 2583 2711 +f 2583 2712 2711 +f 2583 2584 2713 +f 2583 2713 2712 +f 2584 2585 2713 +f 2585 2714 2713 +f 2585 2586 2715 +f 2585 2715 2714 +f 2586 2587 2715 +f 2587 2716 2715 +f 2587 2588 2717 +f 2587 2717 2716 +f 2588 2589 2717 +f 2589 2718 2717 +f 2589 2590 2719 +f 2589 2719 2718 +f 2590 2591 2719 +f 2591 2720 2719 +f 2591 2592 2721 +f 2591 2721 2720 +f 2592 2593 2721 +f 2593 2722 2721 +f 2593 2594 2723 +f 2593 2723 2722 +f 2594 2595 2723 +f 2595 2724 2723 +f 2595 2596 2725 +f 2595 2725 2724 +f 2596 2597 2725 +f 2597 2726 2725 +f 2597 2598 2727 +f 2597 2727 2726 +f 2598 2599 2727 +f 2599 2728 2727 +f 2599 2600 2729 +f 2599 2729 2728 +f 2600 2601 2729 +f 2601 2730 2729 +f 2601 2602 2731 +f 2601 2731 2730 +f 2602 2603 2731 +f 2603 2732 2731 +f 2603 2604 2733 +f 2603 2733 2732 +f 2604 2605 2733 +f 2605 2734 2733 +f 2605 2606 2735 +f 2605 2735 2734 +f 2606 2607 2735 +f 2607 2736 2735 +f 2607 2608 2737 +f 2607 2737 2736 +f 2608 2609 2737 +f 2609 2738 2737 +f 2609 2610 2739 +f 2609 2739 2738 +f 2610 2611 2739 +f 2611 2740 2739 +f 2611 2612 2741 +f 2611 2741 2740 +f 2612 2613 2741 +f 2613 2742 2741 +f 2613 2614 2743 +f 2613 2743 2742 +f 2614 2615 2743 +f 2615 2744 2743 +f 2615 2616 2745 +f 2615 2745 2744 +f 2616 2617 2745 +f 2617 2746 2745 +f 2617 2618 2747 +f 2617 2747 2746 +f 2618 2619 2747 +f 2619 2748 2747 +f 2619 2620 2749 +f 2619 2749 2748 +f 2620 2621 2749 +f 2621 2750 2749 +f 2621 2622 2751 +f 2621 2751 2750 +f 2622 2623 2751 +f 2623 2752 2751 +f 2623 2624 2753 +f 2623 2753 2752 +f 2624 2625 2753 +f 2625 2754 2753 +f 2625 2626 2755 +f 2625 2755 2754 +f 2626 2627 2755 +f 2627 2756 2755 +f 2627 2628 2757 +f 2627 2757 2756 +f 2628 2629 2757 +f 2629 2758 2757 +f 2629 2630 2759 +f 2629 2759 2758 +f 2630 2631 2759 +f 2631 2760 2759 +f 2631 2632 2761 +f 2631 2761 2760 +f 2632 2633 2761 +f 2633 2762 2761 +f 2633 2634 2763 +f 2633 2763 2762 +f 2634 2635 2763 +f 2635 2764 2763 +f 2635 2636 2765 +f 2635 2765 2764 +f 2636 2637 2765 +f 2637 2766 2765 +f 2637 2638 2767 +f 2637 2767 2766 +f 2638 2639 2767 +f 2639 2768 2767 +f 2639 2640 2769 +f 2639 2769 2768 +f 2640 2641 2769 +f 2641 2770 2769 +f 2641 2642 2771 +f 2641 2771 2770 +f 2642 2643 2771 +f 2643 2772 2771 +f 2643 2644 2773 +f 2643 2773 2772 +f 2644 2645 2773 +f 2645 2774 2773 +f 2645 2646 2775 +f 2645 2775 2774 +f 2646 2647 2775 +f 2647 2776 2775 +f 2647 2648 2777 +f 2647 2777 2776 +f 2648 2649 2777 +f 2649 2778 2777 +f 2649 2650 2779 +f 2649 2779 2778 +f 2650 2651 2779 +f 2651 2780 2779 +f 2651 2652 2781 +f 2651 2781 2780 +f 2652 2653 2781 +f 2653 2782 2781 +f 2653 2654 2783 +f 2653 2783 2782 +f 2654 2655 2783 +f 2655 2784 2783 +f 2655 2656 2785 +f 2655 2785 2784 +f 2656 2657 2785 +f 2657 2786 2785 +f 2657 2658 2787 +f 2657 2787 2786 +f 2658 2659 2787 +f 2659 2788 2787 +f 2659 2660 2789 +f 2659 2789 2788 +f 2660 2661 2789 +f 2661 2790 2789 +f 2661 2662 2791 +f 2661 2791 2790 +f 2662 2663 2791 +f 2663 2792 2791 +f 2663 2664 2793 +f 2663 2793 2792 +f 2664 2665 2793 +f 2665 2794 2793 +f 2665 2666 2795 +f 2665 2795 2794 +f 2666 2667 2795 +f 2667 2796 2795 +f 2667 2668 2797 +f 2667 2797 2796 +f 2668 2669 2797 +f 2669 2798 2797 +f 2669 2670 2799 +f 2669 2799 2798 +f 2670 2671 2799 +f 2671 2800 2799 +f 2671 2672 2801 +f 2671 2801 2800 +f 2672 2673 2801 +f 2673 2802 2801 +f 2673 2674 2803 +f 2673 2803 2802 +f 2674 2675 2803 +f 2675 2804 2803 +f 2675 2676 2805 +f 2675 2805 2804 +f 2676 2677 2805 +f 2677 2806 2805 +f 2677 2678 2807 +f 2677 2807 2806 +f 2678 2679 2807 +f 2679 2808 2807 +f 2679 2680 2809 +f 2679 2809 2808 +f 2680 2681 2809 +f 2681 2810 2809 +f 2681 2682 2811 +f 2681 2811 2810 +f 2682 2683 2811 +f 2683 2812 2811 +f 2683 2684 2813 +f 2683 2813 2812 +f 2684 2685 2813 +f 2685 2814 2813 +f 2685 2686 2815 +f 2685 2815 2814 +f 2686 2687 2815 +f 2687 2816 2815 +f 2687 2688 2817 +f 2687 2817 2816 +f 2688 2689 2817 +f 2689 2818 2817 +f 2689 2690 2819 +f 2689 2819 2818 +f 2690 2691 2819 +f 2691 2820 2819 +f 2691 2692 2821 +f 2691 2821 2820 +f 2692 2693 2821 +f 2693 2822 2821 +f 2693 2694 2823 +f 2693 2823 2822 +f 2694 2695 2823 +f 2695 2824 2823 +f 2695 2696 2825 +f 2695 2825 2824 +f 2696 2697 2825 +f 2697 2826 2825 +f 2697 2698 2827 +f 2697 2827 2826 +f 2698 2699 2827 +f 2699 2828 2827 +f 2699 2700 2829 +f 2699 2829 2828 +f 2700 2701 2829 +f 2701 2830 2829 +f 2701 2702 2831 +f 2701 2831 2830 +f 2702 2703 2831 +f 2703 2832 2831 +f 2703 2704 2833 +f 2703 2833 2832 +f 2704 2705 2833 +f 2705 2834 2833 +f 2705 2706 2835 +f 2705 2835 2834 +f 2706 2707 2835 +f 2707 2836 2835 +f 2707 2708 2837 +f 2707 2837 2836 +f 2708 2709 2837 +f 2709 2838 2837 +f 2710 2711 2839 +f 2711 2840 2839 +f 2711 2712 2841 +f 2711 2841 2840 +f 2712 2713 2841 +f 2713 2842 2841 +f 2713 2714 2843 +f 2713 2843 2842 +f 2714 2715 2843 +f 2715 2844 2843 +f 2715 2716 2845 +f 2715 2845 2844 +f 2716 2717 2845 +f 2717 2846 2845 +f 2717 2718 2847 +f 2717 2847 2846 +f 2718 2719 2847 +f 2719 2848 2847 +f 2719 2720 2849 +f 2719 2849 2848 +f 2720 2721 2849 +f 2721 2850 2849 +f 2721 2722 2851 +f 2721 2851 2850 +f 2722 2723 2851 +f 2723 2852 2851 +f 2723 2724 2853 +f 2723 2853 2852 +f 2724 2725 2853 +f 2725 2854 2853 +f 2725 2726 2855 +f 2725 2855 2854 +f 2726 2727 2855 +f 2727 2856 2855 +f 2727 2728 2857 +f 2727 2857 2856 +f 2728 2729 2857 +f 2729 2858 2857 +f 2729 2730 2859 +f 2729 2859 2858 +f 2730 2731 2859 +f 2731 2860 2859 +f 2731 2732 2861 +f 2731 2861 2860 +f 2732 2733 2861 +f 2733 2862 2861 +f 2733 2734 2863 +f 2733 2863 2862 +f 2734 2735 2863 +f 2735 2864 2863 +f 2735 2736 2865 +f 2735 2865 2864 +f 2736 2737 2865 +f 2737 2866 2865 +f 2737 2738 2867 +f 2737 2867 2866 +f 2738 2739 2867 +f 2739 2868 2867 +f 2739 2740 2869 +f 2739 2869 2868 +f 2740 2741 2869 +f 2741 2870 2869 +f 2741 2742 2871 +f 2741 2871 2870 +f 2742 2743 2871 +f 2743 2872 2871 +f 2743 2744 2873 +f 2743 2873 2872 +f 2744 2745 2873 +f 2745 2874 2873 +f 2745 2746 2875 +f 2745 2875 2874 +f 2746 2747 2875 +f 2747 2876 2875 +f 2747 2748 2877 +f 2747 2877 2876 +f 2748 2749 2877 +f 2749 2878 2877 +f 2749 2750 2879 +f 2749 2879 2878 +f 2750 2751 2879 +f 2751 2880 2879 +f 2751 2752 2881 +f 2751 2881 2880 +f 2752 2753 2881 +f 2753 2882 2881 +f 2753 2754 2883 +f 2753 2883 2882 +f 2754 2755 2883 +f 2755 2884 2883 +f 2755 2756 2885 +f 2755 2885 2884 +f 2756 2757 2885 +f 2757 2886 2885 +f 2757 2758 2887 +f 2757 2887 2886 +f 2758 2759 2887 +f 2759 2888 2887 +f 2759 2760 2889 +f 2759 2889 2888 +f 2760 2761 2889 +f 2761 2890 2889 +f 2761 2762 2891 +f 2761 2891 2890 +f 2762 2763 2891 +f 2763 2892 2891 +f 2763 2764 2893 +f 2763 2893 2892 +f 2764 2765 2893 +f 2765 2894 2893 +f 2765 2766 2895 +f 2765 2895 2894 +f 2766 2767 2895 +f 2767 2896 2895 +f 2767 2768 2897 +f 2767 2897 2896 +f 2768 2769 2897 +f 2769 2898 2897 +f 2769 2770 2899 +f 2769 2899 2898 +f 2770 2771 2899 +f 2771 2900 2899 +f 2771 2772 2901 +f 2771 2901 2900 +f 2772 2773 2901 +f 2773 2902 2901 +f 2773 2774 2903 +f 2773 2903 2902 +f 2774 2775 2903 +f 2775 2904 2903 +f 2775 2776 2905 +f 2775 2905 2904 +f 2776 2777 2905 +f 2777 2906 2905 +f 2777 2778 2907 +f 2777 2907 2906 +f 2778 2779 2907 +f 2779 2908 2907 +f 2779 2780 2909 +f 2779 2909 2908 +f 2780 2781 2909 +f 2781 2910 2909 +f 2781 2782 2911 +f 2781 2911 2910 +f 2782 2783 2911 +f 2783 2912 2911 +f 2783 2784 2913 +f 2783 2913 2912 +f 2784 2785 2913 +f 2785 2914 2913 +f 2785 2786 2915 +f 2785 2915 2914 +f 2786 2787 2915 +f 2787 2916 2915 +f 2787 2788 2917 +f 2787 2917 2916 +f 2788 2789 2917 +f 2789 2918 2917 +f 2789 2790 2919 +f 2789 2919 2918 +f 2790 2791 2919 +f 2791 2920 2919 +f 2791 2792 2921 +f 2791 2921 2920 +f 2792 2793 2921 +f 2793 2922 2921 +f 2793 2794 2923 +f 2793 2923 2922 +f 2794 2795 2923 +f 2795 2924 2923 +f 2795 2796 2925 +f 2795 2925 2924 +f 2796 2797 2925 +f 2797 2926 2925 +f 2797 2798 2927 +f 2797 2927 2926 +f 2798 2799 2927 +f 2799 2928 2927 +f 2799 2800 2929 +f 2799 2929 2928 +f 2800 2801 2929 +f 2801 2930 2929 +f 2801 2802 2931 +f 2801 2931 2930 +f 2802 2803 2931 +f 2803 2932 2931 +f 2803 2804 2933 +f 2803 2933 2932 +f 2804 2805 2933 +f 2805 2934 2933 +f 2805 2806 2935 +f 2805 2935 2934 +f 2806 2807 2935 +f 2807 2936 2935 +f 2807 2808 2937 +f 2807 2937 2936 +f 2808 2809 2937 +f 2809 2938 2937 +f 2809 2810 2939 +f 2809 2939 2938 +f 2810 2811 2939 +f 2811 2940 2939 +f 2811 2812 2941 +f 2811 2941 2940 +f 2812 2813 2941 +f 2813 2942 2941 +f 2813 2814 2943 +f 2813 2943 2942 +f 2814 2815 2943 +f 2815 2944 2943 +f 2815 2816 2945 +f 2815 2945 2944 +f 2816 2817 2945 +f 2817 2946 2945 +f 2817 2818 2947 +f 2817 2947 2946 +f 2818 2819 2947 +f 2819 2948 2947 +f 2819 2820 2949 +f 2819 2949 2948 +f 2820 2821 2949 +f 2821 2950 2949 +f 2821 2822 2951 +f 2821 2951 2950 +f 2822 2823 2951 +f 2823 2952 2951 +f 2823 2824 2953 +f 2823 2953 2952 +f 2824 2825 2953 +f 2825 2954 2953 +f 2825 2826 2955 +f 2825 2955 2954 +f 2826 2827 2955 +f 2827 2956 2955 +f 2827 2828 2957 +f 2827 2957 2956 +f 2828 2829 2957 +f 2829 2958 2957 +f 2829 2830 2959 +f 2829 2959 2958 +f 2830 2831 2959 +f 2831 2960 2959 +f 2831 2832 2961 +f 2831 2961 2960 +f 2832 2833 2961 +f 2833 2962 2961 +f 2833 2834 2963 +f 2833 2963 2962 +f 2834 2835 2963 +f 2835 2964 2963 +f 2835 2836 2965 +f 2835 2965 2964 +f 2836 2837 2965 +f 2837 2966 2965 +f 2837 2838 2967 +f 2837 2967 2966 +f 2839 2840 2969 +f 2839 2969 2968 +f 2840 2841 2969 +f 2841 2970 2969 +f 2841 2842 2971 +f 2841 2971 2970 +f 2842 2843 2971 +f 2843 2972 2971 +f 2843 2844 2973 +f 2843 2973 2972 +f 2844 2845 2973 +f 2845 2974 2973 +f 2845 2846 2975 +f 2845 2975 2974 +f 2846 2847 2975 +f 2847 2976 2975 +f 2847 2848 2977 +f 2847 2977 2976 +f 2848 2849 2977 +f 2849 2978 2977 +f 2849 2850 2979 +f 2849 2979 2978 +f 2850 2851 2979 +f 2851 2980 2979 +f 2851 2852 2981 +f 2851 2981 2980 +f 2852 2853 2981 +f 2853 2982 2981 +f 2853 2854 2983 +f 2853 2983 2982 +f 2854 2855 2983 +f 2855 2984 2983 +f 2855 2856 2985 +f 2855 2985 2984 +f 2856 2857 2985 +f 2857 2986 2985 +f 2857 2858 2987 +f 2857 2987 2986 +f 2858 2859 2987 +f 2859 2988 2987 +f 2859 2860 2989 +f 2859 2989 2988 +f 2860 2861 2989 +f 2861 2990 2989 +f 2861 2862 2991 +f 2861 2991 2990 +f 2862 2863 2991 +f 2863 2992 2991 +f 2863 2864 2993 +f 2863 2993 2992 +f 2864 2865 2993 +f 2865 2994 2993 +f 2865 2866 2995 +f 2865 2995 2994 +f 2866 2867 2995 +f 2867 2996 2995 +f 2867 2868 2997 +f 2867 2997 2996 +f 2868 2869 2997 +f 2869 2998 2997 +f 2869 2870 2999 +f 2869 2999 2998 +f 2870 2871 2999 +f 2871 3000 2999 +f 2871 2872 3001 +f 2871 3001 3000 +f 2872 2873 3001 +f 2873 3002 3001 +f 2873 2874 3003 +f 2873 3003 3002 +f 2874 2875 3003 +f 2875 3004 3003 +f 2875 2876 3005 +f 2875 3005 3004 +f 2876 2877 3005 +f 2877 3006 3005 +f 2877 2878 3007 +f 2877 3007 3006 +f 2878 2879 3007 +f 2879 3008 3007 +f 2879 2880 3009 +f 2879 3009 3008 +f 2880 2881 3009 +f 2881 3010 3009 +f 2881 2882 3011 +f 2881 3011 3010 +f 2882 2883 3011 +f 2883 3012 3011 +f 2883 2884 3013 +f 2883 3013 3012 +f 2884 2885 3013 +f 2885 3014 3013 +f 2885 2886 3015 +f 2885 3015 3014 +f 2886 2887 3015 +f 2887 3016 3015 +f 2887 2888 3017 +f 2887 3017 3016 +f 2888 2889 3017 +f 2889 3018 3017 +f 2889 2890 3019 +f 2889 3019 3018 +f 2890 2891 3019 +f 2891 3020 3019 +f 2891 2892 3021 +f 2891 3021 3020 +f 2892 2893 3021 +f 2893 3022 3021 +f 2893 2894 3023 +f 2893 3023 3022 +f 2894 2895 3023 +f 2895 3024 3023 +f 2895 2896 3025 +f 2895 3025 3024 +f 2896 2897 3025 +f 2897 3026 3025 +f 2897 2898 3027 +f 2897 3027 3026 +f 2898 2899 3027 +f 2899 3028 3027 +f 2899 2900 3029 +f 2899 3029 3028 +f 2900 2901 3029 +f 2901 3030 3029 +f 2901 2902 3031 +f 2901 3031 3030 +f 2902 2903 3031 +f 2903 3032 3031 +f 2903 2904 3033 +f 2903 3033 3032 +f 2904 2905 3033 +f 2905 3034 3033 +f 2905 2906 3035 +f 2905 3035 3034 +f 2906 2907 3035 +f 2907 3036 3035 +f 2907 2908 3037 +f 2907 3037 3036 +f 2908 2909 3037 +f 2909 3038 3037 +f 2909 2910 3039 +f 2909 3039 3038 +f 2910 2911 3039 +f 2911 3040 3039 +f 2911 2912 3041 +f 2911 3041 3040 +f 2912 2913 3041 +f 2913 3042 3041 +f 2913 2914 3043 +f 2913 3043 3042 +f 2914 2915 3043 +f 2915 3044 3043 +f 2915 2916 3045 +f 2915 3045 3044 +f 2916 2917 3045 +f 2917 3046 3045 +f 2917 2918 3047 +f 2917 3047 3046 +f 2918 2919 3047 +f 2919 3048 3047 +f 2919 2920 3049 +f 2919 3049 3048 +f 2920 2921 3049 +f 2921 3050 3049 +f 2921 2922 3051 +f 2921 3051 3050 +f 2922 2923 3051 +f 2923 3052 3051 +f 2923 2924 3053 +f 2923 3053 3052 +f 2924 2925 3053 +f 2925 3054 3053 +f 2925 2926 3055 +f 2925 3055 3054 +f 2926 2927 3055 +f 2927 3056 3055 +f 2927 2928 3057 +f 2927 3057 3056 +f 2928 2929 3057 +f 2929 3058 3057 +f 2929 2930 3059 +f 2929 3059 3058 +f 2930 2931 3059 +f 2931 3060 3059 +f 2931 2932 3061 +f 2931 3061 3060 +f 2932 2933 3061 +f 2933 3062 3061 +f 2933 2934 3063 +f 2933 3063 3062 +f 2934 2935 3063 +f 2935 3064 3063 +f 2935 2936 3065 +f 2935 3065 3064 +f 2936 2937 3065 +f 2937 3066 3065 +f 2937 2938 3067 +f 2937 3067 3066 +f 2938 2939 3067 +f 2939 3068 3067 +f 2939 2940 3069 +f 2939 3069 3068 +f 2940 2941 3069 +f 2941 3070 3069 +f 2941 2942 3071 +f 2941 3071 3070 +f 2942 2943 3071 +f 2943 3072 3071 +f 2943 2944 3073 +f 2943 3073 3072 +f 2944 2945 3073 +f 2945 3074 3073 +f 2945 2946 3075 +f 2945 3075 3074 +f 2946 2947 3075 +f 2947 3076 3075 +f 2947 2948 3077 +f 2947 3077 3076 +f 2948 2949 3077 +f 2949 3078 3077 +f 2949 2950 3079 +f 2949 3079 3078 +f 2950 2951 3079 +f 2951 3080 3079 +f 2951 2952 3081 +f 2951 3081 3080 +f 2952 2953 3081 +f 2953 3082 3081 +f 2953 2954 3083 +f 2953 3083 3082 +f 2954 2955 3083 +f 2955 3084 3083 +f 2955 2956 3085 +f 2955 3085 3084 +f 2956 2957 3085 +f 2957 3086 3085 +f 2957 2958 3087 +f 2957 3087 3086 +f 2958 2959 3087 +f 2959 3088 3087 +f 2959 2960 3089 +f 2959 3089 3088 +f 2960 2961 3089 +f 2961 3090 3089 +f 2961 2962 3091 +f 2961 3091 3090 +f 2962 2963 3091 +f 2963 3092 3091 +f 2963 2964 3093 +f 2963 3093 3092 +f 2964 2965 3093 +f 2965 3094 3093 +f 2965 2966 3095 +f 2965 3095 3094 +f 2966 2967 3095 +f 2967 3096 3095 +f 2968 2969 3097 +f 2969 3098 3097 +f 2969 2970 3099 +f 2969 3099 3098 +f 2970 2971 3099 +f 2971 3100 3099 +f 2971 2972 3101 +f 2971 3101 3100 +f 2972 2973 3101 +f 2973 3102 3101 +f 2973 2974 3103 +f 2973 3103 3102 +f 2974 2975 3103 +f 2975 3104 3103 +f 2975 2976 3105 +f 2975 3105 3104 +f 2976 2977 3105 +f 2977 3106 3105 +f 2977 2978 3107 +f 2977 3107 3106 +f 2978 2979 3107 +f 2979 3108 3107 +f 2979 2980 3109 +f 2979 3109 3108 +f 2980 2981 3109 +f 2981 3110 3109 +f 2981 2982 3111 +f 2981 3111 3110 +f 2982 2983 3111 +f 2983 3112 3111 +f 2983 2984 3113 +f 2983 3113 3112 +f 2984 2985 3113 +f 2985 3114 3113 +f 2985 2986 3115 +f 2985 3115 3114 +f 2986 2987 3115 +f 2987 3116 3115 +f 2987 2988 3117 +f 2987 3117 3116 +f 2988 2989 3117 +f 2989 3118 3117 +f 2989 2990 3119 +f 2989 3119 3118 +f 2990 2991 3119 +f 2991 3120 3119 +f 2991 2992 3121 +f 2991 3121 3120 +f 2992 2993 3121 +f 2993 3122 3121 +f 2993 2994 3123 +f 2993 3123 3122 +f 2994 2995 3123 +f 2995 3124 3123 +f 2995 2996 3125 +f 2995 3125 3124 +f 2996 2997 3125 +f 2997 3126 3125 +f 2997 2998 3127 +f 2997 3127 3126 +f 2998 2999 3127 +f 2999 3128 3127 +f 2999 3000 3129 +f 2999 3129 3128 +f 3000 3001 3129 +f 3001 3130 3129 +f 3001 3002 3131 +f 3001 3131 3130 +f 3002 3003 3131 +f 3003 3132 3131 +f 3003 3004 3133 +f 3003 3133 3132 +f 3004 3005 3133 +f 3005 3134 3133 +f 3005 3006 3135 +f 3005 3135 3134 +f 3006 3007 3135 +f 3007 3136 3135 +f 3007 3008 3137 +f 3007 3137 3136 +f 3008 3009 3137 +f 3009 3138 3137 +f 3009 3010 3139 +f 3009 3139 3138 +f 3010 3011 3139 +f 3011 3140 3139 +f 3011 3012 3141 +f 3011 3141 3140 +f 3012 3013 3141 +f 3013 3142 3141 +f 3013 3014 3143 +f 3013 3143 3142 +f 3014 3015 3143 +f 3015 3144 3143 +f 3015 3016 3145 +f 3015 3145 3144 +f 3016 3017 3145 +f 3017 3146 3145 +f 3017 3018 3147 +f 3017 3147 3146 +f 3018 3019 3147 +f 3019 3148 3147 +f 3019 3020 3149 +f 3019 3149 3148 +f 3020 3021 3149 +f 3021 3150 3149 +f 3021 3022 3151 +f 3021 3151 3150 +f 3022 3023 3151 +f 3023 3152 3151 +f 3023 3024 3153 +f 3023 3153 3152 +f 3024 3025 3153 +f 3025 3154 3153 +f 3025 3026 3155 +f 3025 3155 3154 +f 3026 3027 3155 +f 3027 3156 3155 +f 3027 3028 3157 +f 3027 3157 3156 +f 3028 3029 3157 +f 3029 3158 3157 +f 3029 3030 3159 +f 3029 3159 3158 +f 3030 3031 3159 +f 3031 3160 3159 +f 3031 3032 3161 +f 3031 3161 3160 +f 3032 3033 3161 +f 3033 3162 3161 +f 3033 3034 3163 +f 3033 3163 3162 +f 3034 3035 3163 +f 3035 3164 3163 +f 3035 3036 3165 +f 3035 3165 3164 +f 3036 3037 3165 +f 3037 3166 3165 +f 3037 3038 3167 +f 3037 3167 3166 +f 3038 3039 3167 +f 3039 3168 3167 +f 3039 3040 3169 +f 3039 3169 3168 +f 3040 3041 3169 +f 3041 3170 3169 +f 3041 3042 3171 +f 3041 3171 3170 +f 3042 3043 3171 +f 3043 3172 3171 +f 3043 3044 3173 +f 3043 3173 3172 +f 3044 3045 3173 +f 3045 3174 3173 +f 3045 3046 3175 +f 3045 3175 3174 +f 3046 3047 3175 +f 3047 3176 3175 +f 3047 3048 3177 +f 3047 3177 3176 +f 3048 3049 3177 +f 3049 3178 3177 +f 3049 3050 3179 +f 3049 3179 3178 +f 3050 3051 3179 +f 3051 3180 3179 +f 3051 3052 3181 +f 3051 3181 3180 +f 3052 3053 3181 +f 3053 3182 3181 +f 3053 3054 3183 +f 3053 3183 3182 +f 3054 3055 3183 +f 3055 3184 3183 +f 3055 3056 3185 +f 3055 3185 3184 +f 3056 3057 3185 +f 3057 3186 3185 +f 3057 3058 3187 +f 3057 3187 3186 +f 3058 3059 3187 +f 3059 3188 3187 +f 3059 3060 3189 +f 3059 3189 3188 +f 3060 3061 3189 +f 3061 3190 3189 +f 3061 3062 3191 +f 3061 3191 3190 +f 3062 3063 3191 +f 3063 3192 3191 +f 3063 3064 3193 +f 3063 3193 3192 +f 3064 3065 3193 +f 3065 3194 3193 +f 3065 3066 3195 +f 3065 3195 3194 +f 3066 3067 3195 +f 3067 3196 3195 +f 3067 3068 3197 +f 3067 3197 3196 +f 3068 3069 3197 +f 3069 3198 3197 +f 3069 3070 3199 +f 3069 3199 3198 +f 3070 3071 3199 +f 3071 3200 3199 +f 3071 3072 3201 +f 3071 3201 3200 +f 3072 3073 3201 +f 3073 3202 3201 +f 3073 3074 3203 +f 3073 3203 3202 +f 3074 3075 3203 +f 3075 3204 3203 +f 3075 3076 3205 +f 3075 3205 3204 +f 3076 3077 3205 +f 3077 3206 3205 +f 3077 3078 3207 +f 3077 3207 3206 +f 3078 3079 3207 +f 3079 3208 3207 +f 3079 3080 3209 +f 3079 3209 3208 +f 3080 3081 3209 +f 3081 3210 3209 +f 3081 3082 3211 +f 3081 3211 3210 +f 3082 3083 3211 +f 3083 3212 3211 +f 3083 3084 3213 +f 3083 3213 3212 +f 3084 3085 3213 +f 3085 3214 3213 +f 3085 3086 3215 +f 3085 3215 3214 +f 3086 3087 3215 +f 3087 3216 3215 +f 3087 3088 3217 +f 3087 3217 3216 +f 3088 3089 3217 +f 3089 3218 3217 +f 3089 3090 3219 +f 3089 3219 3218 +f 3090 3091 3219 +f 3091 3220 3219 +f 3091 3092 3221 +f 3091 3221 3220 +f 3092 3093 3221 +f 3093 3222 3221 +f 3093 3094 3223 +f 3093 3223 3222 +f 3094 3095 3223 +f 3095 3224 3223 +f 3095 3096 3225 +f 3095 3225 3224 +f 3097 3098 3227 +f 3097 3227 3226 +f 3098 3099 3227 +f 3099 3228 3227 +f 3099 3100 3229 +f 3099 3229 3228 +f 3100 3101 3229 +f 3101 3230 3229 +f 3101 3102 3231 +f 3101 3231 3230 +f 3102 3103 3231 +f 3103 3232 3231 +f 3103 3104 3233 +f 3103 3233 3232 +f 3104 3105 3233 +f 3105 3234 3233 +f 3105 3106 3235 +f 3105 3235 3234 +f 3106 3107 3235 +f 3107 3236 3235 +f 3107 3108 3237 +f 3107 3237 3236 +f 3108 3109 3237 +f 3109 3238 3237 +f 3109 3110 3239 +f 3109 3239 3238 +f 3110 3111 3239 +f 3111 3240 3239 +f 3111 3112 3241 +f 3111 3241 3240 +f 3112 3113 3241 +f 3113 3242 3241 +f 3113 3114 3243 +f 3113 3243 3242 +f 3114 3115 3243 +f 3115 3244 3243 +f 3115 3116 3245 +f 3115 3245 3244 +f 3116 3117 3245 +f 3117 3246 3245 +f 3117 3118 3247 +f 3117 3247 3246 +f 3118 3119 3247 +f 3119 3248 3247 +f 3119 3120 3249 +f 3119 3249 3248 +f 3120 3121 3249 +f 3121 3250 3249 +f 3121 3122 3251 +f 3121 3251 3250 +f 3122 3123 3251 +f 3123 3252 3251 +f 3123 3124 3253 +f 3123 3253 3252 +f 3124 3125 3253 +f 3125 3254 3253 +f 3125 3126 3255 +f 3125 3255 3254 +f 3126 3127 3255 +f 3127 3256 3255 +f 3127 3128 3257 +f 3127 3257 3256 +f 3128 3129 3257 +f 3129 3258 3257 +f 3129 3130 3259 +f 3129 3259 3258 +f 3130 3131 3259 +f 3131 3260 3259 +f 3131 3132 3261 +f 3131 3261 3260 +f 3132 3133 3261 +f 3133 3262 3261 +f 3133 3134 3263 +f 3133 3263 3262 +f 3134 3135 3263 +f 3135 3264 3263 +f 3135 3136 3265 +f 3135 3265 3264 +f 3136 3137 3265 +f 3137 3266 3265 +f 3137 3138 3267 +f 3137 3267 3266 +f 3138 3139 3267 +f 3139 3268 3267 +f 3139 3140 3269 +f 3139 3269 3268 +f 3140 3141 3269 +f 3141 3270 3269 +f 3141 3142 3271 +f 3141 3271 3270 +f 3142 3143 3271 +f 3143 3272 3271 +f 3143 3144 3273 +f 3143 3273 3272 +f 3144 3145 3273 +f 3145 3274 3273 +f 3145 3146 3275 +f 3145 3275 3274 +f 3146 3147 3275 +f 3147 3276 3275 +f 3147 3148 3277 +f 3147 3277 3276 +f 3148 3149 3277 +f 3149 3278 3277 +f 3149 3150 3279 +f 3149 3279 3278 +f 3150 3151 3279 +f 3151 3280 3279 +f 3151 3152 3281 +f 3151 3281 3280 +f 3152 3153 3281 +f 3153 3282 3281 +f 3153 3154 3283 +f 3153 3283 3282 +f 3154 3155 3283 +f 3155 3284 3283 +f 3155 3156 3285 +f 3155 3285 3284 +f 3156 3157 3285 +f 3157 3286 3285 +f 3157 3158 3287 +f 3157 3287 3286 +f 3158 3159 3287 +f 3159 3288 3287 +f 3159 3160 3289 +f 3159 3289 3288 +f 3160 3161 3289 +f 3161 3290 3289 +f 3161 3162 3291 +f 3161 3291 3290 +f 3162 3163 3291 +f 3163 3292 3291 +f 3163 3164 3293 +f 3163 3293 3292 +f 3164 3165 3293 +f 3165 3294 3293 +f 3165 3166 3295 +f 3165 3295 3294 +f 3166 3167 3295 +f 3167 3296 3295 +f 3167 3168 3297 +f 3167 3297 3296 +f 3168 3169 3297 +f 3169 3298 3297 +f 3169 3170 3299 +f 3169 3299 3298 +f 3170 3171 3299 +f 3171 3300 3299 +f 3171 3172 3301 +f 3171 3301 3300 +f 3172 3173 3301 +f 3173 3302 3301 +f 3173 3174 3303 +f 3173 3303 3302 +f 3174 3175 3303 +f 3175 3304 3303 +f 3175 3176 3305 +f 3175 3305 3304 +f 3176 3177 3305 +f 3177 3306 3305 +f 3177 3178 3307 +f 3177 3307 3306 +f 3178 3179 3307 +f 3179 3308 3307 +f 3179 3180 3309 +f 3179 3309 3308 +f 3180 3181 3309 +f 3181 3310 3309 +f 3181 3182 3311 +f 3181 3311 3310 +f 3182 3183 3311 +f 3183 3312 3311 +f 3183 3184 3313 +f 3183 3313 3312 +f 3184 3185 3313 +f 3185 3314 3313 +f 3185 3186 3315 +f 3185 3315 3314 +f 3186 3187 3315 +f 3187 3316 3315 +f 3187 3188 3317 +f 3187 3317 3316 +f 3188 3189 3317 +f 3189 3318 3317 +f 3189 3190 3319 +f 3189 3319 3318 +f 3190 3191 3319 +f 3191 3320 3319 +f 3191 3192 3321 +f 3191 3321 3320 +f 3192 3193 3321 +f 3193 3322 3321 +f 3193 3194 3323 +f 3193 3323 3322 +f 3194 3195 3323 +f 3195 3324 3323 +f 3195 3196 3325 +f 3195 3325 3324 +f 3196 3197 3325 +f 3197 3326 3325 +f 3197 3198 3327 +f 3197 3327 3326 +f 3198 3199 3327 +f 3199 3328 3327 +f 3199 3200 3329 +f 3199 3329 3328 +f 3200 3201 3329 +f 3201 3330 3329 +f 3201 3202 3331 +f 3201 3331 3330 +f 3202 3203 3331 +f 3203 3332 3331 +f 3203 3204 3333 +f 3203 3333 3332 +f 3204 3205 3333 +f 3205 3334 3333 +f 3205 3206 3335 +f 3205 3335 3334 +f 3206 3207 3335 +f 3207 3336 3335 +f 3207 3208 3337 +f 3207 3337 3336 +f 3208 3209 3337 +f 3209 3338 3337 +f 3209 3210 3339 +f 3209 3339 3338 +f 3210 3211 3339 +f 3211 3340 3339 +f 3211 3212 3341 +f 3211 3341 3340 +f 3212 3213 3341 +f 3213 3342 3341 +f 3213 3214 3343 +f 3213 3343 3342 +f 3214 3215 3343 +f 3215 3344 3343 +f 3215 3216 3345 +f 3215 3345 3344 +f 3216 3217 3345 +f 3217 3346 3345 +f 3217 3218 3347 +f 3217 3347 3346 +f 3218 3219 3347 +f 3219 3348 3347 +f 3219 3220 3349 +f 3219 3349 3348 +f 3220 3221 3349 +f 3221 3350 3349 +f 3221 3222 3351 +f 3221 3351 3350 +f 3222 3223 3351 +f 3223 3352 3351 +f 3223 3224 3353 +f 3223 3353 3352 +f 3224 3225 3353 +f 3225 3354 3353 +f 3226 3227 3355 +f 3227 3356 3355 +f 3227 3228 3357 +f 3227 3357 3356 +f 3228 3229 3357 +f 3229 3358 3357 +f 3229 3230 3359 +f 3229 3359 3358 +f 3230 3231 3359 +f 3231 3360 3359 +f 3231 3232 3361 +f 3231 3361 3360 +f 3232 3233 3361 +f 3233 3362 3361 +f 3233 3234 3363 +f 3233 3363 3362 +f 3234 3235 3363 +f 3235 3364 3363 +f 3235 3236 3365 +f 3235 3365 3364 +f 3236 3237 3365 +f 3237 3366 3365 +f 3237 3238 3367 +f 3237 3367 3366 +f 3238 3239 3367 +f 3239 3368 3367 +f 3239 3240 3369 +f 3239 3369 3368 +f 3240 3241 3369 +f 3241 3370 3369 +f 3241 3242 3371 +f 3241 3371 3370 +f 3242 3243 3371 +f 3243 3372 3371 +f 3243 3244 3373 +f 3243 3373 3372 +f 3244 3245 3373 +f 3245 3374 3373 +f 3245 3246 3375 +f 3245 3375 3374 +f 3246 3247 3375 +f 3247 3376 3375 +f 3247 3248 3377 +f 3247 3377 3376 +f 3248 3249 3377 +f 3249 3378 3377 +f 3249 3250 3379 +f 3249 3379 3378 +f 3250 3251 3379 +f 3251 3380 3379 +f 3251 3252 3381 +f 3251 3381 3380 +f 3252 3253 3381 +f 3253 3382 3381 +f 3253 3254 3383 +f 3253 3383 3382 +f 3254 3255 3383 +f 3255 3384 3383 +f 3255 3256 3385 +f 3255 3385 3384 +f 3256 3257 3385 +f 3257 3386 3385 +f 3257 3258 3387 +f 3257 3387 3386 +f 3258 3259 3387 +f 3259 3388 3387 +f 3259 3260 3389 +f 3259 3389 3388 +f 3260 3261 3389 +f 3261 3390 3389 +f 3261 3262 3391 +f 3261 3391 3390 +f 3262 3263 3391 +f 3263 3392 3391 +f 3263 3264 3393 +f 3263 3393 3392 +f 3264 3265 3393 +f 3265 3394 3393 +f 3265 3266 3395 +f 3265 3395 3394 +f 3266 3267 3395 +f 3267 3396 3395 +f 3267 3268 3397 +f 3267 3397 3396 +f 3268 3269 3397 +f 3269 3398 3397 +f 3269 3270 3399 +f 3269 3399 3398 +f 3270 3271 3399 +f 3271 3400 3399 +f 3271 3272 3401 +f 3271 3401 3400 +f 3272 3273 3401 +f 3273 3402 3401 +f 3273 3274 3403 +f 3273 3403 3402 +f 3274 3275 3403 +f 3275 3404 3403 +f 3275 3276 3405 +f 3275 3405 3404 +f 3276 3277 3405 +f 3277 3406 3405 +f 3277 3278 3407 +f 3277 3407 3406 +f 3278 3279 3407 +f 3279 3408 3407 +f 3279 3280 3409 +f 3279 3409 3408 +f 3280 3281 3409 +f 3281 3410 3409 +f 3281 3282 3411 +f 3281 3411 3410 +f 3282 3283 3411 +f 3283 3412 3411 +f 3283 3284 3413 +f 3283 3413 3412 +f 3284 3285 3413 +f 3285 3414 3413 +f 3285 3286 3415 +f 3285 3415 3414 +f 3286 3287 3415 +f 3287 3416 3415 +f 3287 3288 3417 +f 3287 3417 3416 +f 3288 3289 3417 +f 3289 3418 3417 +f 3289 3290 3419 +f 3289 3419 3418 +f 3290 3291 3419 +f 3291 3420 3419 +f 3291 3292 3421 +f 3291 3421 3420 +f 3292 3293 3421 +f 3293 3422 3421 +f 3293 3294 3423 +f 3293 3423 3422 +f 3294 3295 3423 +f 3295 3424 3423 +f 3295 3296 3425 +f 3295 3425 3424 +f 3296 3297 3425 +f 3297 3426 3425 +f 3297 3298 3427 +f 3297 3427 3426 +f 3298 3299 3427 +f 3299 3428 3427 +f 3299 3300 3429 +f 3299 3429 3428 +f 3300 3301 3429 +f 3301 3430 3429 +f 3301 3302 3431 +f 3301 3431 3430 +f 3302 3303 3431 +f 3303 3432 3431 +f 3303 3304 3433 +f 3303 3433 3432 +f 3304 3305 3433 +f 3305 3434 3433 +f 3305 3306 3435 +f 3305 3435 3434 +f 3306 3307 3435 +f 3307 3436 3435 +f 3307 3308 3437 +f 3307 3437 3436 +f 3308 3309 3437 +f 3309 3438 3437 +f 3309 3310 3439 +f 3309 3439 3438 +f 3310 3311 3439 +f 3311 3440 3439 +f 3311 3312 3441 +f 3311 3441 3440 +f 3312 3313 3441 +f 3313 3442 3441 +f 3313 3314 3443 +f 3313 3443 3442 +f 3314 3315 3443 +f 3315 3444 3443 +f 3315 3316 3445 +f 3315 3445 3444 +f 3316 3317 3445 +f 3317 3446 3445 +f 3317 3318 3447 +f 3317 3447 3446 +f 3318 3319 3447 +f 3319 3448 3447 +f 3319 3320 3449 +f 3319 3449 3448 +f 3320 3321 3449 +f 3321 3450 3449 +f 3321 3322 3451 +f 3321 3451 3450 +f 3322 3323 3451 +f 3323 3452 3451 +f 3323 3324 3453 +f 3323 3453 3452 +f 3324 3325 3453 +f 3325 3454 3453 +f 3325 3326 3455 +f 3325 3455 3454 +f 3326 3327 3455 +f 3327 3456 3455 +f 3327 3328 3457 +f 3327 3457 3456 +f 3328 3329 3457 +f 3329 3458 3457 +f 3329 3330 3459 +f 3329 3459 3458 +f 3330 3331 3459 +f 3331 3460 3459 +f 3331 3332 3461 +f 3331 3461 3460 +f 3332 3333 3461 +f 3333 3462 3461 +f 3333 3334 3463 +f 3333 3463 3462 +f 3334 3335 3463 +f 3335 3464 3463 +f 3335 3336 3465 +f 3335 3465 3464 +f 3336 3337 3465 +f 3337 3466 3465 +f 3337 3338 3467 +f 3337 3467 3466 +f 3338 3339 3467 +f 3339 3468 3467 +f 3339 3340 3469 +f 3339 3469 3468 +f 3340 3341 3469 +f 3341 3470 3469 +f 3341 3342 3471 +f 3341 3471 3470 +f 3342 3343 3471 +f 3343 3472 3471 +f 3343 3344 3473 +f 3343 3473 3472 +f 3344 3345 3473 +f 3345 3474 3473 +f 3345 3346 3475 +f 3345 3475 3474 +f 3346 3347 3475 +f 3347 3476 3475 +f 3347 3348 3477 +f 3347 3477 3476 +f 3348 3349 3477 +f 3349 3478 3477 +f 3349 3350 3479 +f 3349 3479 3478 +f 3350 3351 3479 +f 3351 3480 3479 +f 3351 3352 3481 +f 3351 3481 3480 +f 3352 3353 3481 +f 3353 3482 3481 +f 3353 3354 3483 +f 3353 3483 3482 +f 3355 3356 3485 +f 3355 3485 3484 +f 3356 3357 3485 +f 3357 3486 3485 +f 3357 3358 3487 +f 3357 3487 3486 +f 3358 3359 3487 +f 3359 3488 3487 +f 3359 3360 3489 +f 3359 3489 3488 +f 3360 3361 3489 +f 3361 3490 3489 +f 3361 3362 3491 +f 3361 3491 3490 +f 3362 3363 3491 +f 3363 3492 3491 +f 3363 3364 3493 +f 3363 3493 3492 +f 3364 3365 3493 +f 3365 3494 3493 +f 3365 3366 3495 +f 3365 3495 3494 +f 3366 3367 3495 +f 3367 3496 3495 +f 3367 3368 3497 +f 3367 3497 3496 +f 3368 3369 3497 +f 3369 3498 3497 +f 3369 3370 3499 +f 3369 3499 3498 +f 3370 3371 3499 +f 3371 3500 3499 +f 3371 3372 3501 +f 3371 3501 3500 +f 3372 3373 3501 +f 3373 3502 3501 +f 3373 3374 3503 +f 3373 3503 3502 +f 3374 3375 3503 +f 3375 3504 3503 +f 3375 3376 3505 +f 3375 3505 3504 +f 3376 3377 3505 +f 3377 3506 3505 +f 3377 3378 3507 +f 3377 3507 3506 +f 3378 3379 3507 +f 3379 3508 3507 +f 3379 3380 3509 +f 3379 3509 3508 +f 3380 3381 3509 +f 3381 3510 3509 +f 3381 3382 3511 +f 3381 3511 3510 +f 3382 3383 3511 +f 3383 3512 3511 +f 3383 3384 3513 +f 3383 3513 3512 +f 3384 3385 3513 +f 3385 3514 3513 +f 3385 3386 3515 +f 3385 3515 3514 +f 3386 3387 3515 +f 3387 3516 3515 +f 3387 3388 3517 +f 3387 3517 3516 +f 3388 3389 3517 +f 3389 3518 3517 +f 3389 3390 3519 +f 3389 3519 3518 +f 3390 3391 3519 +f 3391 3520 3519 +f 3391 3392 3521 +f 3391 3521 3520 +f 3392 3393 3521 +f 3393 3522 3521 +f 3393 3394 3523 +f 3393 3523 3522 +f 3394 3395 3523 +f 3395 3524 3523 +f 3395 3396 3525 +f 3395 3525 3524 +f 3396 3397 3525 +f 3397 3526 3525 +f 3397 3398 3527 +f 3397 3527 3526 +f 3398 3399 3527 +f 3399 3528 3527 +f 3399 3400 3529 +f 3399 3529 3528 +f 3400 3401 3529 +f 3401 3530 3529 +f 3401 3402 3531 +f 3401 3531 3530 +f 3402 3403 3531 +f 3403 3532 3531 +f 3403 3404 3533 +f 3403 3533 3532 +f 3404 3405 3533 +f 3405 3534 3533 +f 3405 3406 3535 +f 3405 3535 3534 +f 3406 3407 3535 +f 3407 3536 3535 +f 3407 3408 3537 +f 3407 3537 3536 +f 3408 3409 3537 +f 3409 3538 3537 +f 3409 3410 3539 +f 3409 3539 3538 +f 3410 3411 3539 +f 3411 3540 3539 +f 3411 3412 3541 +f 3411 3541 3540 +f 3412 3413 3541 +f 3413 3542 3541 +f 3413 3414 3543 +f 3413 3543 3542 +f 3414 3415 3543 +f 3415 3544 3543 +f 3415 3416 3545 +f 3415 3545 3544 +f 3416 3417 3545 +f 3417 3546 3545 +f 3417 3418 3547 +f 3417 3547 3546 +f 3418 3419 3547 +f 3419 3548 3547 +f 3419 3420 3549 +f 3419 3549 3548 +f 3420 3421 3549 +f 3421 3550 3549 +f 3421 3422 3551 +f 3421 3551 3550 +f 3422 3423 3551 +f 3423 3552 3551 +f 3423 3424 3553 +f 3423 3553 3552 +f 3424 3425 3553 +f 3425 3554 3553 +f 3425 3426 3555 +f 3425 3555 3554 +f 3426 3427 3555 +f 3427 3556 3555 +f 3427 3428 3557 +f 3427 3557 3556 +f 3428 3429 3557 +f 3429 3558 3557 +f 3429 3430 3559 +f 3429 3559 3558 +f 3430 3431 3559 +f 3431 3560 3559 +f 3431 3432 3561 +f 3431 3561 3560 +f 3432 3433 3561 +f 3433 3562 3561 +f 3433 3434 3563 +f 3433 3563 3562 +f 3434 3435 3563 +f 3435 3564 3563 +f 3435 3436 3565 +f 3435 3565 3564 +f 3436 3437 3565 +f 3437 3566 3565 +f 3437 3438 3567 +f 3437 3567 3566 +f 3438 3439 3567 +f 3439 3568 3567 +f 3439 3440 3569 +f 3439 3569 3568 +f 3440 3441 3569 +f 3441 3570 3569 +f 3441 3442 3571 +f 3441 3571 3570 +f 3442 3443 3571 +f 3443 3572 3571 +f 3443 3444 3573 +f 3443 3573 3572 +f 3444 3445 3573 +f 3445 3574 3573 +f 3445 3446 3575 +f 3445 3575 3574 +f 3446 3447 3575 +f 3447 3576 3575 +f 3447 3448 3577 +f 3447 3577 3576 +f 3448 3449 3577 +f 3449 3578 3577 +f 3449 3450 3579 +f 3449 3579 3578 +f 3450 3451 3579 +f 3451 3580 3579 +f 3451 3452 3581 +f 3451 3581 3580 +f 3452 3453 3581 +f 3453 3582 3581 +f 3453 3454 3583 +f 3453 3583 3582 +f 3454 3455 3583 +f 3455 3584 3583 +f 3455 3456 3585 +f 3455 3585 3584 +f 3456 3457 3585 +f 3457 3586 3585 +f 3457 3458 3587 +f 3457 3587 3586 +f 3458 3459 3587 +f 3459 3588 3587 +f 3459 3460 3589 +f 3459 3589 3588 +f 3460 3461 3589 +f 3461 3590 3589 +f 3461 3462 3591 +f 3461 3591 3590 +f 3462 3463 3591 +f 3463 3592 3591 +f 3463 3464 3593 +f 3463 3593 3592 +f 3464 3465 3593 +f 3465 3594 3593 +f 3465 3466 3595 +f 3465 3595 3594 +f 3466 3467 3595 +f 3467 3596 3595 +f 3467 3468 3597 +f 3467 3597 3596 +f 3468 3469 3597 +f 3469 3598 3597 +f 3469 3470 3599 +f 3469 3599 3598 +f 3470 3471 3599 +f 3471 3600 3599 +f 3471 3472 3601 +f 3471 3601 3600 +f 3472 3473 3601 +f 3473 3602 3601 +f 3473 3474 3603 +f 3473 3603 3602 +f 3474 3475 3603 +f 3475 3604 3603 +f 3475 3476 3605 +f 3475 3605 3604 +f 3476 3477 3605 +f 3477 3606 3605 +f 3477 3478 3607 +f 3477 3607 3606 +f 3478 3479 3607 +f 3479 3608 3607 +f 3479 3480 3609 +f 3479 3609 3608 +f 3480 3481 3609 +f 3481 3610 3609 +f 3481 3482 3611 +f 3481 3611 3610 +f 3482 3483 3611 +f 3483 3612 3611 +f 3484 3485 3613 +f 3485 3614 3613 +f 3485 3486 3615 +f 3485 3615 3614 +f 3486 3487 3615 +f 3487 3616 3615 +f 3487 3488 3617 +f 3487 3617 3616 +f 3488 3489 3617 +f 3489 3618 3617 +f 3489 3490 3619 +f 3489 3619 3618 +f 3490 3491 3619 +f 3491 3620 3619 +f 3491 3492 3621 +f 3491 3621 3620 +f 3492 3493 3621 +f 3493 3622 3621 +f 3493 3494 3623 +f 3493 3623 3622 +f 3494 3495 3623 +f 3495 3624 3623 +f 3495 3496 3625 +f 3495 3625 3624 +f 3496 3497 3625 +f 3497 3626 3625 +f 3497 3498 3627 +f 3497 3627 3626 +f 3498 3499 3627 +f 3499 3628 3627 +f 3499 3500 3629 +f 3499 3629 3628 +f 3500 3501 3629 +f 3501 3630 3629 +f 3501 3502 3631 +f 3501 3631 3630 +f 3502 3503 3631 +f 3503 3632 3631 +f 3503 3504 3633 +f 3503 3633 3632 +f 3504 3505 3633 +f 3505 3634 3633 +f 3505 3506 3635 +f 3505 3635 3634 +f 3506 3507 3635 +f 3507 3636 3635 +f 3507 3508 3637 +f 3507 3637 3636 +f 3508 3509 3637 +f 3509 3638 3637 +f 3509 3510 3639 +f 3509 3639 3638 +f 3510 3511 3639 +f 3511 3640 3639 +f 3511 3512 3641 +f 3511 3641 3640 +f 3512 3513 3641 +f 3513 3642 3641 +f 3513 3514 3643 +f 3513 3643 3642 +f 3514 3515 3643 +f 3515 3644 3643 +f 3515 3516 3645 +f 3515 3645 3644 +f 3516 3517 3645 +f 3517 3646 3645 +f 3517 3518 3647 +f 3517 3647 3646 +f 3518 3519 3647 +f 3519 3648 3647 +f 3519 3520 3649 +f 3519 3649 3648 +f 3520 3521 3649 +f 3521 3650 3649 +f 3521 3522 3651 +f 3521 3651 3650 +f 3522 3523 3651 +f 3523 3652 3651 +f 3523 3524 3653 +f 3523 3653 3652 +f 3524 3525 3653 +f 3525 3654 3653 +f 3525 3526 3655 +f 3525 3655 3654 +f 3526 3527 3655 +f 3527 3656 3655 +f 3527 3528 3657 +f 3527 3657 3656 +f 3528 3529 3657 +f 3529 3658 3657 +f 3529 3530 3659 +f 3529 3659 3658 +f 3530 3531 3659 +f 3531 3660 3659 +f 3531 3532 3661 +f 3531 3661 3660 +f 3532 3533 3661 +f 3533 3662 3661 +f 3533 3534 3663 +f 3533 3663 3662 +f 3534 3535 3663 +f 3535 3664 3663 +f 3535 3536 3665 +f 3535 3665 3664 +f 3536 3537 3665 +f 3537 3666 3665 +f 3537 3538 3667 +f 3537 3667 3666 +f 3538 3539 3667 +f 3539 3668 3667 +f 3539 3540 3669 +f 3539 3669 3668 +f 3540 3541 3669 +f 3541 3670 3669 +f 3541 3542 3671 +f 3541 3671 3670 +f 3542 3543 3671 +f 3543 3672 3671 +f 3543 3544 3673 +f 3543 3673 3672 +f 3544 3545 3673 +f 3545 3674 3673 +f 3545 3546 3675 +f 3545 3675 3674 +f 3546 3547 3675 +f 3547 3676 3675 +f 3547 3548 3677 +f 3547 3677 3676 +f 3548 3549 3677 +f 3549 3678 3677 +f 3549 3550 3679 +f 3549 3679 3678 +f 3550 3551 3679 +f 3551 3680 3679 +f 3551 3552 3681 +f 3551 3681 3680 +f 3552 3553 3681 +f 3553 3682 3681 +f 3553 3554 3683 +f 3553 3683 3682 +f 3554 3555 3683 +f 3555 3684 3683 +f 3555 3556 3685 +f 3555 3685 3684 +f 3556 3557 3685 +f 3557 3686 3685 +f 3557 3558 3687 +f 3557 3687 3686 +f 3558 3559 3687 +f 3559 3688 3687 +f 3559 3560 3689 +f 3559 3689 3688 +f 3560 3561 3689 +f 3561 3690 3689 +f 3561 3562 3691 +f 3561 3691 3690 +f 3562 3563 3691 +f 3563 3692 3691 +f 3563 3564 3693 +f 3563 3693 3692 +f 3564 3565 3693 +f 3565 3694 3693 +f 3565 3566 3695 +f 3565 3695 3694 +f 3566 3567 3695 +f 3567 3696 3695 +f 3567 3568 3697 +f 3567 3697 3696 +f 3568 3569 3697 +f 3569 3698 3697 +f 3569 3570 3699 +f 3569 3699 3698 +f 3570 3571 3699 +f 3571 3700 3699 +f 3571 3572 3701 +f 3571 3701 3700 +f 3572 3573 3701 +f 3573 3702 3701 +f 3573 3574 3703 +f 3573 3703 3702 +f 3574 3575 3703 +f 3575 3704 3703 +f 3575 3576 3705 +f 3575 3705 3704 +f 3576 3577 3705 +f 3577 3706 3705 +f 3577 3578 3707 +f 3577 3707 3706 +f 3578 3579 3707 +f 3579 3708 3707 +f 3579 3580 3709 +f 3579 3709 3708 +f 3580 3581 3709 +f 3581 3710 3709 +f 3581 3582 3711 +f 3581 3711 3710 +f 3582 3583 3711 +f 3583 3712 3711 +f 3583 3584 3713 +f 3583 3713 3712 +f 3584 3585 3713 +f 3585 3714 3713 +f 3585 3586 3715 +f 3585 3715 3714 +f 3586 3587 3715 +f 3587 3716 3715 +f 3587 3588 3717 +f 3587 3717 3716 +f 3588 3589 3717 +f 3589 3718 3717 +f 3589 3590 3719 +f 3589 3719 3718 +f 3590 3591 3719 +f 3591 3720 3719 +f 3591 3592 3721 +f 3591 3721 3720 +f 3592 3593 3721 +f 3593 3722 3721 +f 3593 3594 3723 +f 3593 3723 3722 +f 3594 3595 3723 +f 3595 3724 3723 +f 3595 3596 3725 +f 3595 3725 3724 +f 3596 3597 3725 +f 3597 3726 3725 +f 3597 3598 3727 +f 3597 3727 3726 +f 3598 3599 3727 +f 3599 3728 3727 +f 3599 3600 3729 +f 3599 3729 3728 +f 3600 3601 3729 +f 3601 3730 3729 +f 3601 3602 3731 +f 3601 3731 3730 +f 3602 3603 3731 +f 3603 3732 3731 +f 3603 3604 3733 +f 3603 3733 3732 +f 3604 3605 3733 +f 3605 3734 3733 +f 3605 3606 3735 +f 3605 3735 3734 +f 3606 3607 3735 +f 3607 3736 3735 +f 3607 3608 3737 +f 3607 3737 3736 +f 3608 3609 3737 +f 3609 3738 3737 +f 3609 3610 3739 +f 3609 3739 3738 +f 3610 3611 3739 +f 3611 3740 3739 +f 3611 3612 3741 +f 3611 3741 3740 +f 3613 3614 3743 +f 3613 3743 3742 +f 3614 3615 3743 +f 3615 3744 3743 +f 3615 3616 3745 +f 3615 3745 3744 +f 3616 3617 3745 +f 3617 3746 3745 +f 3617 3618 3747 +f 3617 3747 3746 +f 3618 3619 3747 +f 3619 3748 3747 +f 3619 3620 3749 +f 3619 3749 3748 +f 3620 3621 3749 +f 3621 3750 3749 +f 3621 3622 3751 +f 3621 3751 3750 +f 3622 3623 3751 +f 3623 3752 3751 +f 3623 3624 3753 +f 3623 3753 3752 +f 3624 3625 3753 +f 3625 3754 3753 +f 3625 3626 3755 +f 3625 3755 3754 +f 3626 3627 3755 +f 3627 3756 3755 +f 3627 3628 3757 +f 3627 3757 3756 +f 3628 3629 3757 +f 3629 3758 3757 +f 3629 3630 3759 +f 3629 3759 3758 +f 3630 3631 3759 +f 3631 3760 3759 +f 3631 3632 3761 +f 3631 3761 3760 +f 3632 3633 3761 +f 3633 3762 3761 +f 3633 3634 3763 +f 3633 3763 3762 +f 3634 3635 3763 +f 3635 3764 3763 +f 3635 3636 3765 +f 3635 3765 3764 +f 3636 3637 3765 +f 3637 3766 3765 +f 3637 3638 3767 +f 3637 3767 3766 +f 3638 3639 3767 +f 3639 3768 3767 +f 3639 3640 3769 +f 3639 3769 3768 +f 3640 3641 3769 +f 3641 3770 3769 +f 3641 3642 3771 +f 3641 3771 3770 +f 3642 3643 3771 +f 3643 3772 3771 +f 3643 3644 3773 +f 3643 3773 3772 +f 3644 3645 3773 +f 3645 3774 3773 +f 3645 3646 3775 +f 3645 3775 3774 +f 3646 3647 3775 +f 3647 3776 3775 +f 3647 3648 3777 +f 3647 3777 3776 +f 3648 3649 3777 +f 3649 3778 3777 +f 3649 3650 3779 +f 3649 3779 3778 +f 3650 3651 3779 +f 3651 3780 3779 +f 3651 3652 3781 +f 3651 3781 3780 +f 3652 3653 3781 +f 3653 3782 3781 +f 3653 3654 3783 +f 3653 3783 3782 +f 3654 3655 3783 +f 3655 3784 3783 +f 3655 3656 3785 +f 3655 3785 3784 +f 3656 3657 3785 +f 3657 3786 3785 +f 3657 3658 3787 +f 3657 3787 3786 +f 3658 3659 3787 +f 3659 3788 3787 +f 3659 3660 3789 +f 3659 3789 3788 +f 3660 3661 3789 +f 3661 3790 3789 +f 3661 3662 3791 +f 3661 3791 3790 +f 3662 3663 3791 +f 3663 3792 3791 +f 3663 3664 3793 +f 3663 3793 3792 +f 3664 3665 3793 +f 3665 3794 3793 +f 3665 3666 3795 +f 3665 3795 3794 +f 3666 3667 3795 +f 3667 3796 3795 +f 3667 3668 3797 +f 3667 3797 3796 +f 3668 3669 3797 +f 3669 3798 3797 +f 3669 3670 3799 +f 3669 3799 3798 +f 3670 3671 3799 +f 3671 3800 3799 +f 3671 3672 3801 +f 3671 3801 3800 +f 3672 3673 3801 +f 3673 3802 3801 +f 3673 3674 3803 +f 3673 3803 3802 +f 3674 3675 3803 +f 3675 3804 3803 +f 3675 3676 3805 +f 3675 3805 3804 +f 3676 3677 3805 +f 3677 3806 3805 +f 3677 3678 3807 +f 3677 3807 3806 +f 3678 3679 3807 +f 3679 3808 3807 +f 3679 3680 3809 +f 3679 3809 3808 +f 3680 3681 3809 +f 3681 3810 3809 +f 3681 3682 3811 +f 3681 3811 3810 +f 3682 3683 3811 +f 3683 3812 3811 +f 3683 3684 3813 +f 3683 3813 3812 +f 3684 3685 3813 +f 3685 3814 3813 +f 3685 3686 3815 +f 3685 3815 3814 +f 3686 3687 3815 +f 3687 3816 3815 +f 3687 3688 3817 +f 3687 3817 3816 +f 3688 3689 3817 +f 3689 3818 3817 +f 3689 3690 3819 +f 3689 3819 3818 +f 3690 3691 3819 +f 3691 3820 3819 +f 3691 3692 3821 +f 3691 3821 3820 +f 3692 3693 3821 +f 3693 3822 3821 +f 3693 3694 3823 +f 3693 3823 3822 +f 3694 3695 3823 +f 3695 3824 3823 +f 3695 3696 3825 +f 3695 3825 3824 +f 3696 3697 3825 +f 3697 3826 3825 +f 3697 3698 3827 +f 3697 3827 3826 +f 3698 3699 3827 +f 3699 3828 3827 +f 3699 3700 3829 +f 3699 3829 3828 +f 3700 3701 3829 +f 3701 3830 3829 +f 3701 3702 3831 +f 3701 3831 3830 +f 3702 3703 3831 +f 3703 3832 3831 +f 3703 3704 3833 +f 3703 3833 3832 +f 3704 3705 3833 +f 3705 3834 3833 +f 3705 3706 3835 +f 3705 3835 3834 +f 3706 3707 3835 +f 3707 3836 3835 +f 3707 3708 3837 +f 3707 3837 3836 +f 3708 3709 3837 +f 3709 3838 3837 +f 3709 3710 3839 +f 3709 3839 3838 +f 3710 3711 3839 +f 3711 3840 3839 +f 3711 3712 3841 +f 3711 3841 3840 +f 3712 3713 3841 +f 3713 3842 3841 +f 3713 3714 3843 +f 3713 3843 3842 +f 3714 3715 3843 +f 3715 3844 3843 +f 3715 3716 3845 +f 3715 3845 3844 +f 3716 3717 3845 +f 3717 3846 3845 +f 3717 3718 3847 +f 3717 3847 3846 +f 3718 3719 3847 +f 3719 3848 3847 +f 3719 3720 3849 +f 3719 3849 3848 +f 3720 3721 3849 +f 3721 3850 3849 +f 3721 3722 3851 +f 3721 3851 3850 +f 3722 3723 3851 +f 3723 3852 3851 +f 3723 3724 3853 +f 3723 3853 3852 +f 3724 3725 3853 +f 3725 3854 3853 +f 3725 3726 3855 +f 3725 3855 3854 +f 3726 3727 3855 +f 3727 3856 3855 +f 3727 3728 3857 +f 3727 3857 3856 +f 3728 3729 3857 +f 3729 3858 3857 +f 3729 3730 3859 +f 3729 3859 3858 +f 3730 3731 3859 +f 3731 3860 3859 +f 3731 3732 3861 +f 3731 3861 3860 +f 3732 3733 3861 +f 3733 3862 3861 +f 3733 3734 3863 +f 3733 3863 3862 +f 3734 3735 3863 +f 3735 3864 3863 +f 3735 3736 3865 +f 3735 3865 3864 +f 3736 3737 3865 +f 3737 3866 3865 +f 3737 3738 3867 +f 3737 3867 3866 +f 3738 3739 3867 +f 3739 3868 3867 +f 3739 3740 3869 +f 3739 3869 3868 +f 3740 3741 3869 +f 3741 3870 3869 +f 3742 3743 3871 +f 3743 3872 3871 +f 3743 3744 3873 +f 3743 3873 3872 +f 3744 3745 3873 +f 3745 3874 3873 +f 3745 3746 3875 +f 3745 3875 3874 +f 3746 3747 3875 +f 3747 3876 3875 +f 3747 3748 3877 +f 3747 3877 3876 +f 3748 3749 3877 +f 3749 3878 3877 +f 3749 3750 3879 +f 3749 3879 3878 +f 3750 3751 3879 +f 3751 3880 3879 +f 3751 3752 3881 +f 3751 3881 3880 +f 3752 3753 3881 +f 3753 3882 3881 +f 3753 3754 3883 +f 3753 3883 3882 +f 3754 3755 3883 +f 3755 3884 3883 +f 3755 3756 3885 +f 3755 3885 3884 +f 3756 3757 3885 +f 3757 3886 3885 +f 3757 3758 3887 +f 3757 3887 3886 +f 3758 3759 3887 +f 3759 3888 3887 +f 3759 3760 3889 +f 3759 3889 3888 +f 3760 3761 3889 +f 3761 3890 3889 +f 3761 3762 3891 +f 3761 3891 3890 +f 3762 3763 3891 +f 3763 3892 3891 +f 3763 3764 3893 +f 3763 3893 3892 +f 3764 3765 3893 +f 3765 3894 3893 +f 3765 3766 3895 +f 3765 3895 3894 +f 3766 3767 3895 +f 3767 3896 3895 +f 3767 3768 3897 +f 3767 3897 3896 +f 3768 3769 3897 +f 3769 3898 3897 +f 3769 3770 3899 +f 3769 3899 3898 +f 3770 3771 3899 +f 3771 3900 3899 +f 3771 3772 3901 +f 3771 3901 3900 +f 3772 3773 3901 +f 3773 3902 3901 +f 3773 3774 3903 +f 3773 3903 3902 +f 3774 3775 3903 +f 3775 3904 3903 +f 3775 3776 3905 +f 3775 3905 3904 +f 3776 3777 3905 +f 3777 3906 3905 +f 3777 3778 3907 +f 3777 3907 3906 +f 3778 3779 3907 +f 3779 3908 3907 +f 3779 3780 3909 +f 3779 3909 3908 +f 3780 3781 3909 +f 3781 3910 3909 +f 3781 3782 3911 +f 3781 3911 3910 +f 3782 3783 3911 +f 3783 3912 3911 +f 3783 3784 3913 +f 3783 3913 3912 +f 3784 3785 3913 +f 3785 3914 3913 +f 3785 3786 3915 +f 3785 3915 3914 +f 3786 3787 3915 +f 3787 3916 3915 +f 3787 3788 3917 +f 3787 3917 3916 +f 3788 3789 3917 +f 3789 3918 3917 +f 3789 3790 3919 +f 3789 3919 3918 +f 3790 3791 3919 +f 3791 3920 3919 +f 3791 3792 3921 +f 3791 3921 3920 +f 3792 3793 3921 +f 3793 3922 3921 +f 3793 3794 3923 +f 3793 3923 3922 +f 3794 3795 3923 +f 3795 3924 3923 +f 3795 3796 3925 +f 3795 3925 3924 +f 3796 3797 3925 +f 3797 3926 3925 +f 3797 3798 3927 +f 3797 3927 3926 +f 3798 3799 3927 +f 3799 3928 3927 +f 3799 3800 3929 +f 3799 3929 3928 +f 3800 3801 3929 +f 3801 3930 3929 +f 3801 3802 3931 +f 3801 3931 3930 +f 3802 3803 3931 +f 3803 3932 3931 +f 3803 3804 3933 +f 3803 3933 3932 +f 3804 3805 3933 +f 3805 3934 3933 +f 3805 3806 3935 +f 3805 3935 3934 +f 3806 3807 3935 +f 3807 3936 3935 +f 3807 3808 3937 +f 3807 3937 3936 +f 3808 3809 3937 +f 3809 3938 3937 +f 3809 3810 3939 +f 3809 3939 3938 +f 3810 3811 3939 +f 3811 3940 3939 +f 3811 3812 3941 +f 3811 3941 3940 +f 3812 3813 3941 +f 3813 3942 3941 +f 3813 3814 3943 +f 3813 3943 3942 +f 3814 3815 3943 +f 3815 3944 3943 +f 3815 3816 3945 +f 3815 3945 3944 +f 3816 3817 3945 +f 3817 3946 3945 +f 3817 3818 3947 +f 3817 3947 3946 +f 3818 3819 3947 +f 3819 3948 3947 +f 3819 3820 3949 +f 3819 3949 3948 +f 3820 3821 3949 +f 3821 3950 3949 +f 3821 3822 3951 +f 3821 3951 3950 +f 3822 3823 3951 +f 3823 3952 3951 +f 3823 3824 3953 +f 3823 3953 3952 +f 3824 3825 3953 +f 3825 3954 3953 +f 3825 3826 3955 +f 3825 3955 3954 +f 3826 3827 3955 +f 3827 3956 3955 +f 3827 3828 3957 +f 3827 3957 3956 +f 3828 3829 3957 +f 3829 3958 3957 +f 3829 3830 3959 +f 3829 3959 3958 +f 3830 3831 3959 +f 3831 3960 3959 +f 3831 3832 3961 +f 3831 3961 3960 +f 3832 3833 3961 +f 3833 3962 3961 +f 3833 3834 3963 +f 3833 3963 3962 +f 3834 3835 3963 +f 3835 3964 3963 +f 3835 3836 3965 +f 3835 3965 3964 +f 3836 3837 3965 +f 3837 3966 3965 +f 3837 3838 3967 +f 3837 3967 3966 +f 3838 3839 3967 +f 3839 3968 3967 +f 3839 3840 3969 +f 3839 3969 3968 +f 3840 3841 3969 +f 3841 3970 3969 +f 3841 3842 3971 +f 3841 3971 3970 +f 3842 3843 3971 +f 3843 3972 3971 +f 3843 3844 3973 +f 3843 3973 3972 +f 3844 3845 3973 +f 3845 3974 3973 +f 3845 3846 3975 +f 3845 3975 3974 +f 3846 3847 3975 +f 3847 3976 3975 +f 3847 3848 3977 +f 3847 3977 3976 +f 3848 3849 3977 +f 3849 3978 3977 +f 3849 3850 3979 +f 3849 3979 3978 +f 3850 3851 3979 +f 3851 3980 3979 +f 3851 3852 3981 +f 3851 3981 3980 +f 3852 3853 3981 +f 3853 3982 3981 +f 3853 3854 3983 +f 3853 3983 3982 +f 3854 3855 3983 +f 3855 3984 3983 +f 3855 3856 3985 +f 3855 3985 3984 +f 3856 3857 3985 +f 3857 3986 3985 +f 3857 3858 3987 +f 3857 3987 3986 +f 3858 3859 3987 +f 3859 3988 3987 +f 3859 3860 3989 +f 3859 3989 3988 +f 3860 3861 3989 +f 3861 3990 3989 +f 3861 3862 3991 +f 3861 3991 3990 +f 3862 3863 3991 +f 3863 3992 3991 +f 3863 3864 3993 +f 3863 3993 3992 +f 3864 3865 3993 +f 3865 3994 3993 +f 3865 3866 3995 +f 3865 3995 3994 +f 3866 3867 3995 +f 3867 3996 3995 +f 3867 3868 3997 +f 3867 3997 3996 +f 3868 3869 3997 +f 3869 3998 3997 +f 3869 3870 3999 +f 3869 3999 3998 +f 3871 3872 4001 +f 3871 4001 4000 +f 3872 3873 4001 +f 3873 4002 4001 +f 3873 3874 4003 +f 3873 4003 4002 +f 3874 3875 4003 +f 3875 4004 4003 +f 3875 3876 4005 +f 3875 4005 4004 +f 3876 3877 4005 +f 3877 4006 4005 +f 3877 3878 4007 +f 3877 4007 4006 +f 3878 3879 4007 +f 3879 4008 4007 +f 3879 3880 4009 +f 3879 4009 4008 +f 3880 3881 4009 +f 3881 4010 4009 +f 3881 3882 4011 +f 3881 4011 4010 +f 3882 3883 4011 +f 3883 4012 4011 +f 3883 3884 4013 +f 3883 4013 4012 +f 3884 3885 4013 +f 3885 4014 4013 +f 3885 3886 4015 +f 3885 4015 4014 +f 3886 3887 4015 +f 3887 4016 4015 +f 3887 3888 4017 +f 3887 4017 4016 +f 3888 3889 4017 +f 3889 4018 4017 +f 3889 3890 4019 +f 3889 4019 4018 +f 3890 3891 4019 +f 3891 4020 4019 +f 3891 3892 4021 +f 3891 4021 4020 +f 3892 3893 4021 +f 3893 4022 4021 +f 3893 3894 4023 +f 3893 4023 4022 +f 3894 3895 4023 +f 3895 4024 4023 +f 3895 3896 4025 +f 3895 4025 4024 +f 3896 3897 4025 +f 3897 4026 4025 +f 3897 3898 4027 +f 3897 4027 4026 +f 3898 3899 4027 +f 3899 4028 4027 +f 3899 3900 4029 +f 3899 4029 4028 +f 3900 3901 4029 +f 3901 4030 4029 +f 3901 3902 4031 +f 3901 4031 4030 +f 3902 3903 4031 +f 3903 4032 4031 +f 3903 3904 4033 +f 3903 4033 4032 +f 3904 3905 4033 +f 3905 4034 4033 +f 3905 3906 4035 +f 3905 4035 4034 +f 3906 3907 4035 +f 3907 4036 4035 +f 3907 3908 4037 +f 3907 4037 4036 +f 3908 3909 4037 +f 3909 4038 4037 +f 3909 3910 4039 +f 3909 4039 4038 +f 3910 3911 4039 +f 3911 4040 4039 +f 3911 3912 4041 +f 3911 4041 4040 +f 3912 3913 4041 +f 3913 4042 4041 +f 3913 3914 4043 +f 3913 4043 4042 +f 3914 3915 4043 +f 3915 4044 4043 +f 3915 3916 4045 +f 3915 4045 4044 +f 3916 3917 4045 +f 3917 4046 4045 +f 3917 3918 4047 +f 3917 4047 4046 +f 3918 3919 4047 +f 3919 4048 4047 +f 3919 3920 4049 +f 3919 4049 4048 +f 3920 3921 4049 +f 3921 4050 4049 +f 3921 3922 4051 +f 3921 4051 4050 +f 3922 3923 4051 +f 3923 4052 4051 +f 3923 3924 4053 +f 3923 4053 4052 +f 3924 3925 4053 +f 3925 4054 4053 +f 3925 3926 4055 +f 3925 4055 4054 +f 3926 3927 4055 +f 3927 4056 4055 +f 3927 3928 4057 +f 3927 4057 4056 +f 3928 3929 4057 +f 3929 4058 4057 +f 3929 3930 4059 +f 3929 4059 4058 +f 3930 3931 4059 +f 3931 4060 4059 +f 3931 3932 4061 +f 3931 4061 4060 +f 3932 3933 4061 +f 3933 4062 4061 +f 3933 3934 4063 +f 3933 4063 4062 +f 3934 3935 4063 +f 3935 4064 4063 +f 3935 3936 4065 +f 3935 4065 4064 +f 3936 3937 4065 +f 3937 4066 4065 +f 3937 3938 4067 +f 3937 4067 4066 +f 3938 3939 4067 +f 3939 4068 4067 +f 3939 3940 4069 +f 3939 4069 4068 +f 3940 3941 4069 +f 3941 4070 4069 +f 3941 3942 4071 +f 3941 4071 4070 +f 3942 3943 4071 +f 3943 4072 4071 +f 3943 3944 4073 +f 3943 4073 4072 +f 3944 3945 4073 +f 3945 4074 4073 +f 3945 3946 4075 +f 3945 4075 4074 +f 3946 3947 4075 +f 3947 4076 4075 +f 3947 3948 4077 +f 3947 4077 4076 +f 3948 3949 4077 +f 3949 4078 4077 +f 3949 3950 4079 +f 3949 4079 4078 +f 3950 3951 4079 +f 3951 4080 4079 +f 3951 3952 4081 +f 3951 4081 4080 +f 3952 3953 4081 +f 3953 4082 4081 +f 3953 3954 4083 +f 3953 4083 4082 +f 3954 3955 4083 +f 3955 4084 4083 +f 3955 3956 4085 +f 3955 4085 4084 +f 3956 3957 4085 +f 3957 4086 4085 +f 3957 3958 4087 +f 3957 4087 4086 +f 3958 3959 4087 +f 3959 4088 4087 +f 3959 3960 4089 +f 3959 4089 4088 +f 3960 3961 4089 +f 3961 4090 4089 +f 3961 3962 4091 +f 3961 4091 4090 +f 3962 3963 4091 +f 3963 4092 4091 +f 3963 3964 4093 +f 3963 4093 4092 +f 3964 3965 4093 +f 3965 4094 4093 +f 3965 3966 4095 +f 3965 4095 4094 +f 3966 3967 4095 +f 3967 4096 4095 +f 3967 3968 4097 +f 3967 4097 4096 +f 3968 3969 4097 +f 3969 4098 4097 +f 3969 3970 4099 +f 3969 4099 4098 +f 3970 3971 4099 +f 3971 4100 4099 +f 3971 3972 4101 +f 3971 4101 4100 +f 3972 3973 4101 +f 3973 4102 4101 +f 3973 3974 4103 +f 3973 4103 4102 +f 3974 3975 4103 +f 3975 4104 4103 +f 3975 3976 4105 +f 3975 4105 4104 +f 3976 3977 4105 +f 3977 4106 4105 +f 3977 3978 4107 +f 3977 4107 4106 +f 3978 3979 4107 +f 3979 4108 4107 +f 3979 3980 4109 +f 3979 4109 4108 +f 3980 3981 4109 +f 3981 4110 4109 +f 3981 3982 4111 +f 3981 4111 4110 +f 3982 3983 4111 +f 3983 4112 4111 +f 3983 3984 4113 +f 3983 4113 4112 +f 3984 3985 4113 +f 3985 4114 4113 +f 3985 3986 4115 +f 3985 4115 4114 +f 3986 3987 4115 +f 3987 4116 4115 +f 3987 3988 4117 +f 3987 4117 4116 +f 3988 3989 4117 +f 3989 4118 4117 +f 3989 3990 4119 +f 3989 4119 4118 +f 3990 3991 4119 +f 3991 4120 4119 +f 3991 3992 4121 +f 3991 4121 4120 +f 3992 3993 4121 +f 3993 4122 4121 +f 3993 3994 4123 +f 3993 4123 4122 +f 3994 3995 4123 +f 3995 4124 4123 +f 3995 3996 4125 +f 3995 4125 4124 +f 3996 3997 4125 +f 3997 4126 4125 +f 3997 3998 4127 +f 3997 4127 4126 +f 3998 3999 4127 +f 3999 4128 4127 +f 4000 4001 4129 +f 4001 4130 4129 +f 4001 4002 4131 +f 4001 4131 4130 +f 4002 4003 4131 +f 4003 4132 4131 +f 4003 4004 4133 +f 4003 4133 4132 +f 4004 4005 4133 +f 4005 4134 4133 +f 4005 4006 4135 +f 4005 4135 4134 +f 4006 4007 4135 +f 4007 4136 4135 +f 4007 4008 4137 +f 4007 4137 4136 +f 4008 4009 4137 +f 4009 4138 4137 +f 4009 4010 4139 +f 4009 4139 4138 +f 4010 4011 4139 +f 4011 4140 4139 +f 4011 4012 4141 +f 4011 4141 4140 +f 4012 4013 4141 +f 4013 4142 4141 +f 4013 4014 4143 +f 4013 4143 4142 +f 4014 4015 4143 +f 4015 4144 4143 +f 4015 4016 4145 +f 4015 4145 4144 +f 4016 4017 4145 +f 4017 4146 4145 +f 4017 4018 4147 +f 4017 4147 4146 +f 4018 4019 4147 +f 4019 4148 4147 +f 4019 4020 4149 +f 4019 4149 4148 +f 4020 4021 4149 +f 4021 4150 4149 +f 4021 4022 4151 +f 4021 4151 4150 +f 4022 4023 4151 +f 4023 4152 4151 +f 4023 4024 4153 +f 4023 4153 4152 +f 4024 4025 4153 +f 4025 4154 4153 +f 4025 4026 4155 +f 4025 4155 4154 +f 4026 4027 4155 +f 4027 4156 4155 +f 4027 4028 4157 +f 4027 4157 4156 +f 4028 4029 4157 +f 4029 4158 4157 +f 4029 4030 4159 +f 4029 4159 4158 +f 4030 4031 4159 +f 4031 4160 4159 +f 4031 4032 4161 +f 4031 4161 4160 +f 4032 4033 4161 +f 4033 4162 4161 +f 4033 4034 4163 +f 4033 4163 4162 +f 4034 4035 4163 +f 4035 4164 4163 +f 4035 4036 4165 +f 4035 4165 4164 +f 4036 4037 4165 +f 4037 4166 4165 +f 4037 4038 4167 +f 4037 4167 4166 +f 4038 4039 4167 +f 4039 4168 4167 +f 4039 4040 4169 +f 4039 4169 4168 +f 4040 4041 4169 +f 4041 4170 4169 +f 4041 4042 4171 +f 4041 4171 4170 +f 4042 4043 4171 +f 4043 4172 4171 +f 4043 4044 4173 +f 4043 4173 4172 +f 4044 4045 4173 +f 4045 4174 4173 +f 4045 4046 4175 +f 4045 4175 4174 +f 4046 4047 4175 +f 4047 4176 4175 +f 4047 4048 4177 +f 4047 4177 4176 +f 4048 4049 4177 +f 4049 4178 4177 +f 4049 4050 4179 +f 4049 4179 4178 +f 4050 4051 4179 +f 4051 4180 4179 +f 4051 4052 4181 +f 4051 4181 4180 +f 4052 4053 4181 +f 4053 4182 4181 +f 4053 4054 4183 +f 4053 4183 4182 +f 4054 4055 4183 +f 4055 4184 4183 +f 4055 4056 4185 +f 4055 4185 4184 +f 4056 4057 4185 +f 4057 4186 4185 +f 4057 4058 4187 +f 4057 4187 4186 +f 4058 4059 4187 +f 4059 4188 4187 +f 4059 4060 4189 +f 4059 4189 4188 +f 4060 4061 4189 +f 4061 4190 4189 +f 4061 4062 4191 +f 4061 4191 4190 +f 4062 4063 4191 +f 4063 4192 4191 +f 4063 4064 4193 +f 4063 4193 4192 +f 4064 4065 4193 +f 4065 4194 4193 +f 4065 4066 4195 +f 4065 4195 4194 +f 4066 4067 4195 +f 4067 4196 4195 +f 4067 4068 4197 +f 4067 4197 4196 +f 4068 4069 4197 +f 4069 4198 4197 +f 4069 4070 4199 +f 4069 4199 4198 +f 4070 4071 4199 +f 4071 4200 4199 +f 4071 4072 4201 +f 4071 4201 4200 +f 4072 4073 4201 +f 4073 4202 4201 +f 4073 4074 4203 +f 4073 4203 4202 +f 4074 4075 4203 +f 4075 4204 4203 +f 4075 4076 4205 +f 4075 4205 4204 +f 4076 4077 4205 +f 4077 4206 4205 +f 4077 4078 4207 +f 4077 4207 4206 +f 4078 4079 4207 +f 4079 4208 4207 +f 4079 4080 4209 +f 4079 4209 4208 +f 4080 4081 4209 +f 4081 4210 4209 +f 4081 4082 4211 +f 4081 4211 4210 +f 4082 4083 4211 +f 4083 4212 4211 +f 4083 4084 4213 +f 4083 4213 4212 +f 4084 4085 4213 +f 4085 4214 4213 +f 4085 4086 4215 +f 4085 4215 4214 +f 4086 4087 4215 +f 4087 4216 4215 +f 4087 4088 4217 +f 4087 4217 4216 +f 4088 4089 4217 +f 4089 4218 4217 +f 4089 4090 4219 +f 4089 4219 4218 +f 4090 4091 4219 +f 4091 4220 4219 +f 4091 4092 4221 +f 4091 4221 4220 +f 4092 4093 4221 +f 4093 4222 4221 +f 4093 4094 4223 +f 4093 4223 4222 +f 4094 4095 4223 +f 4095 4224 4223 +f 4095 4096 4225 +f 4095 4225 4224 +f 4096 4097 4225 +f 4097 4226 4225 +f 4097 4098 4227 +f 4097 4227 4226 +f 4098 4099 4227 +f 4099 4228 4227 +f 4099 4100 4229 +f 4099 4229 4228 +f 4100 4101 4229 +f 4101 4230 4229 +f 4101 4102 4231 +f 4101 4231 4230 +f 4102 4103 4231 +f 4103 4232 4231 +f 4103 4104 4233 +f 4103 4233 4232 +f 4104 4105 4233 +f 4105 4234 4233 +f 4105 4106 4235 +f 4105 4235 4234 +f 4106 4107 4235 +f 4107 4236 4235 +f 4107 4108 4237 +f 4107 4237 4236 +f 4108 4109 4237 +f 4109 4238 4237 +f 4109 4110 4239 +f 4109 4239 4238 +f 4110 4111 4239 +f 4111 4240 4239 +f 4111 4112 4241 +f 4111 4241 4240 +f 4112 4113 4241 +f 4113 4242 4241 +f 4113 4114 4243 +f 4113 4243 4242 +f 4114 4115 4243 +f 4115 4244 4243 +f 4115 4116 4245 +f 4115 4245 4244 +f 4116 4117 4245 +f 4117 4246 4245 +f 4117 4118 4247 +f 4117 4247 4246 +f 4118 4119 4247 +f 4119 4248 4247 +f 4119 4120 4249 +f 4119 4249 4248 +f 4120 4121 4249 +f 4121 4250 4249 +f 4121 4122 4251 +f 4121 4251 4250 +f 4122 4123 4251 +f 4123 4252 4251 +f 4123 4124 4253 +f 4123 4253 4252 +f 4124 4125 4253 +f 4125 4254 4253 +f 4125 4126 4255 +f 4125 4255 4254 +f 4126 4127 4255 +f 4127 4256 4255 +f 4127 4128 4257 +f 4127 4257 4256 +f 4129 4130 4259 +f 4129 4259 4258 +f 4130 4131 4259 +f 4131 4260 4259 +f 4131 4132 4261 +f 4131 4261 4260 +f 4132 4133 4261 +f 4133 4262 4261 +f 4133 4134 4263 +f 4133 4263 4262 +f 4134 4135 4263 +f 4135 4264 4263 +f 4135 4136 4265 +f 4135 4265 4264 +f 4136 4137 4265 +f 4137 4266 4265 +f 4137 4138 4267 +f 4137 4267 4266 +f 4138 4139 4267 +f 4139 4268 4267 +f 4139 4140 4269 +f 4139 4269 4268 +f 4140 4141 4269 +f 4141 4270 4269 +f 4141 4142 4271 +f 4141 4271 4270 +f 4142 4143 4271 +f 4143 4272 4271 +f 4143 4144 4273 +f 4143 4273 4272 +f 4144 4145 4273 +f 4145 4274 4273 +f 4145 4146 4275 +f 4145 4275 4274 +f 4146 4147 4275 +f 4147 4276 4275 +f 4147 4148 4277 +f 4147 4277 4276 +f 4148 4149 4277 +f 4149 4278 4277 +f 4149 4150 4279 +f 4149 4279 4278 +f 4150 4151 4279 +f 4151 4280 4279 +f 4151 4152 4281 +f 4151 4281 4280 +f 4152 4153 4281 +f 4153 4282 4281 +f 4153 4154 4283 +f 4153 4283 4282 +f 4154 4155 4283 +f 4155 4284 4283 +f 4155 4156 4285 +f 4155 4285 4284 +f 4156 4157 4285 +f 4157 4286 4285 +f 4157 4158 4287 +f 4157 4287 4286 +f 4158 4159 4287 +f 4159 4288 4287 +f 4159 4160 4289 +f 4159 4289 4288 +f 4160 4161 4289 +f 4161 4290 4289 +f 4161 4162 4291 +f 4161 4291 4290 +f 4162 4163 4291 +f 4163 4292 4291 +f 4163 4164 4293 +f 4163 4293 4292 +f 4164 4165 4293 +f 4165 4294 4293 +f 4165 4166 4295 +f 4165 4295 4294 +f 4166 4167 4295 +f 4167 4296 4295 +f 4167 4168 4297 +f 4167 4297 4296 +f 4168 4169 4297 +f 4169 4298 4297 +f 4169 4170 4299 +f 4169 4299 4298 +f 4170 4171 4299 +f 4171 4300 4299 +f 4171 4172 4301 +f 4171 4301 4300 +f 4172 4173 4301 +f 4173 4302 4301 +f 4173 4174 4303 +f 4173 4303 4302 +f 4174 4175 4303 +f 4175 4304 4303 +f 4175 4176 4305 +f 4175 4305 4304 +f 4176 4177 4305 +f 4177 4306 4305 +f 4177 4178 4307 +f 4177 4307 4306 +f 4178 4179 4307 +f 4179 4308 4307 +f 4179 4180 4309 +f 4179 4309 4308 +f 4180 4181 4309 +f 4181 4310 4309 +f 4181 4182 4311 +f 4181 4311 4310 +f 4182 4183 4311 +f 4183 4312 4311 +f 4183 4184 4313 +f 4183 4313 4312 +f 4184 4185 4313 +f 4185 4314 4313 +f 4185 4186 4315 +f 4185 4315 4314 +f 4186 4187 4315 +f 4187 4316 4315 +f 4187 4188 4317 +f 4187 4317 4316 +f 4188 4189 4317 +f 4189 4318 4317 +f 4189 4190 4319 +f 4189 4319 4318 +f 4190 4191 4319 +f 4191 4320 4319 +f 4191 4192 4321 +f 4191 4321 4320 +f 4192 4193 4321 +f 4193 4322 4321 +f 4193 4194 4323 +f 4193 4323 4322 +f 4194 4195 4323 +f 4195 4324 4323 +f 4195 4196 4325 +f 4195 4325 4324 +f 4196 4197 4325 +f 4197 4326 4325 +f 4197 4198 4327 +f 4197 4327 4326 +f 4198 4199 4327 +f 4199 4328 4327 +f 4199 4200 4329 +f 4199 4329 4328 +f 4200 4201 4329 +f 4201 4330 4329 +f 4201 4202 4331 +f 4201 4331 4330 +f 4202 4203 4331 +f 4203 4332 4331 +f 4203 4204 4333 +f 4203 4333 4332 +f 4204 4205 4333 +f 4205 4334 4333 +f 4205 4206 4335 +f 4205 4335 4334 +f 4206 4207 4335 +f 4207 4336 4335 +f 4207 4208 4337 +f 4207 4337 4336 +f 4208 4209 4337 +f 4209 4338 4337 +f 4209 4210 4339 +f 4209 4339 4338 +f 4210 4211 4339 +f 4211 4340 4339 +f 4211 4212 4341 +f 4211 4341 4340 +f 4212 4213 4341 +f 4213 4342 4341 +f 4213 4214 4343 +f 4213 4343 4342 +f 4214 4215 4343 +f 4215 4344 4343 +f 4215 4216 4345 +f 4215 4345 4344 +f 4216 4217 4345 +f 4217 4346 4345 +f 4217 4218 4347 +f 4217 4347 4346 +f 4218 4219 4347 +f 4219 4348 4347 +f 4219 4220 4349 +f 4219 4349 4348 +f 4220 4221 4349 +f 4221 4350 4349 +f 4221 4222 4351 +f 4221 4351 4350 +f 4222 4223 4351 +f 4223 4352 4351 +f 4223 4224 4353 +f 4223 4353 4352 +f 4224 4225 4353 +f 4225 4354 4353 +f 4225 4226 4355 +f 4225 4355 4354 +f 4226 4227 4355 +f 4227 4356 4355 +f 4227 4228 4357 +f 4227 4357 4356 +f 4228 4229 4357 +f 4229 4358 4357 +f 4229 4230 4359 +f 4229 4359 4358 +f 4230 4231 4359 +f 4231 4360 4359 +f 4231 4232 4361 +f 4231 4361 4360 +f 4232 4233 4361 +f 4233 4362 4361 +f 4233 4234 4363 +f 4233 4363 4362 +f 4234 4235 4363 +f 4235 4364 4363 +f 4235 4236 4365 +f 4235 4365 4364 +f 4236 4237 4365 +f 4237 4366 4365 +f 4237 4238 4367 +f 4237 4367 4366 +f 4238 4239 4367 +f 4239 4368 4367 +f 4239 4240 4369 +f 4239 4369 4368 +f 4240 4241 4369 +f 4241 4370 4369 +f 4241 4242 4371 +f 4241 4371 4370 +f 4242 4243 4371 +f 4243 4372 4371 +f 4243 4244 4373 +f 4243 4373 4372 +f 4244 4245 4373 +f 4245 4374 4373 +f 4245 4246 4375 +f 4245 4375 4374 +f 4246 4247 4375 +f 4247 4376 4375 +f 4247 4248 4377 +f 4247 4377 4376 +f 4248 4249 4377 +f 4249 4378 4377 +f 4249 4250 4379 +f 4249 4379 4378 +f 4250 4251 4379 +f 4251 4380 4379 +f 4251 4252 4381 +f 4251 4381 4380 +f 4252 4253 4381 +f 4253 4382 4381 +f 4253 4254 4383 +f 4253 4383 4382 +f 4254 4255 4383 +f 4255 4384 4383 +f 4255 4256 4385 +f 4255 4385 4384 +f 4256 4257 4385 +f 4257 4386 4385 +f 4258 4259 4387 +f 4259 4388 4387 +f 4259 4260 4389 +f 4259 4389 4388 +f 4260 4261 4389 +f 4261 4390 4389 +f 4261 4262 4391 +f 4261 4391 4390 +f 4262 4263 4391 +f 4263 4392 4391 +f 4263 4264 4393 +f 4263 4393 4392 +f 4264 4265 4393 +f 4265 4394 4393 +f 4265 4266 4395 +f 4265 4395 4394 +f 4266 4267 4395 +f 4267 4396 4395 +f 4267 4268 4397 +f 4267 4397 4396 +f 4268 4269 4397 +f 4269 4398 4397 +f 4269 4270 4399 +f 4269 4399 4398 +f 4270 4271 4399 +f 4271 4400 4399 +f 4271 4272 4401 +f 4271 4401 4400 +f 4272 4273 4401 +f 4273 4402 4401 +f 4273 4274 4403 +f 4273 4403 4402 +f 4274 4275 4403 +f 4275 4404 4403 +f 4275 4276 4405 +f 4275 4405 4404 +f 4276 4277 4405 +f 4277 4406 4405 +f 4277 4278 4407 +f 4277 4407 4406 +f 4278 4279 4407 +f 4279 4408 4407 +f 4279 4280 4409 +f 4279 4409 4408 +f 4280 4281 4409 +f 4281 4410 4409 +f 4281 4282 4411 +f 4281 4411 4410 +f 4282 4283 4411 +f 4283 4412 4411 +f 4283 4284 4413 +f 4283 4413 4412 +f 4284 4285 4413 +f 4285 4414 4413 +f 4285 4286 4415 +f 4285 4415 4414 +f 4286 4287 4415 +f 4287 4416 4415 +f 4287 4288 4417 +f 4287 4417 4416 +f 4288 4289 4417 +f 4289 4418 4417 +f 4289 4290 4419 +f 4289 4419 4418 +f 4290 4291 4419 +f 4291 4420 4419 +f 4291 4292 4421 +f 4291 4421 4420 +f 4292 4293 4421 +f 4293 4422 4421 +f 4293 4294 4423 +f 4293 4423 4422 +f 4294 4295 4423 +f 4295 4424 4423 +f 4295 4296 4425 +f 4295 4425 4424 +f 4296 4297 4425 +f 4297 4426 4425 +f 4297 4298 4427 +f 4297 4427 4426 +f 4298 4299 4427 +f 4299 4428 4427 +f 4299 4300 4429 +f 4299 4429 4428 +f 4300 4301 4429 +f 4301 4430 4429 +f 4301 4302 4431 +f 4301 4431 4430 +f 4302 4303 4431 +f 4303 4432 4431 +f 4303 4304 4433 +f 4303 4433 4432 +f 4304 4305 4433 +f 4305 4434 4433 +f 4305 4306 4435 +f 4305 4435 4434 +f 4306 4307 4435 +f 4307 4436 4435 +f 4307 4308 4437 +f 4307 4437 4436 +f 4308 4309 4437 +f 4309 4438 4437 +f 4309 4310 4439 +f 4309 4439 4438 +f 4310 4311 4439 +f 4311 4440 4439 +f 4311 4312 4441 +f 4311 4441 4440 +f 4312 4313 4441 +f 4313 4442 4441 +f 4313 4314 4443 +f 4313 4443 4442 +f 4314 4315 4443 +f 4315 4444 4443 +f 4315 4316 4445 +f 4315 4445 4444 +f 4316 4317 4445 +f 4317 4446 4445 +f 4317 4318 4447 +f 4317 4447 4446 +f 4318 4319 4447 +f 4319 4448 4447 +f 4319 4320 4449 +f 4319 4449 4448 +f 4320 4321 4449 +f 4321 4450 4449 +f 4321 4322 4451 +f 4321 4451 4450 +f 4322 4323 4451 +f 4323 4452 4451 +f 4323 4324 4453 +f 4323 4453 4452 +f 4324 4325 4453 +f 4325 4454 4453 +f 4325 4326 4455 +f 4325 4455 4454 +f 4326 4327 4455 +f 4327 4456 4455 +f 4327 4328 4457 +f 4327 4457 4456 +f 4328 4329 4457 +f 4329 4458 4457 +f 4329 4330 4459 +f 4329 4459 4458 +f 4330 4331 4459 +f 4331 4460 4459 +f 4331 4332 4461 +f 4331 4461 4460 +f 4332 4333 4461 +f 4333 4462 4461 +f 4333 4334 4463 +f 4333 4463 4462 +f 4334 4335 4463 +f 4335 4464 4463 +f 4335 4336 4465 +f 4335 4465 4464 +f 4336 4337 4465 +f 4337 4466 4465 +f 4337 4338 4467 +f 4337 4467 4466 +f 4338 4339 4467 +f 4339 4468 4467 +f 4339 4340 4469 +f 4339 4469 4468 +f 4340 4341 4469 +f 4341 4470 4469 +f 4341 4342 4471 +f 4341 4471 4470 +f 4342 4343 4471 +f 4343 4472 4471 +f 4343 4344 4473 +f 4343 4473 4472 +f 4344 4345 4473 +f 4345 4474 4473 +f 4345 4346 4475 +f 4345 4475 4474 +f 4346 4347 4475 +f 4347 4476 4475 +f 4347 4348 4477 +f 4347 4477 4476 +f 4348 4349 4477 +f 4349 4478 4477 +f 4349 4350 4479 +f 4349 4479 4478 +f 4350 4351 4479 +f 4351 4480 4479 +f 4351 4352 4481 +f 4351 4481 4480 +f 4352 4353 4481 +f 4353 4482 4481 +f 4353 4354 4483 +f 4353 4483 4482 +f 4354 4355 4483 +f 4355 4484 4483 +f 4355 4356 4485 +f 4355 4485 4484 +f 4356 4357 4485 +f 4357 4486 4485 +f 4357 4358 4487 +f 4357 4487 4486 +f 4358 4359 4487 +f 4359 4488 4487 +f 4359 4360 4489 +f 4359 4489 4488 +f 4360 4361 4489 +f 4361 4490 4489 +f 4361 4362 4491 +f 4361 4491 4490 +f 4362 4363 4491 +f 4363 4492 4491 +f 4363 4364 4493 +f 4363 4493 4492 +f 4364 4365 4493 +f 4365 4494 4493 +f 4365 4366 4495 +f 4365 4495 4494 +f 4366 4367 4495 +f 4367 4496 4495 +f 4367 4368 4497 +f 4367 4497 4496 +f 4368 4369 4497 +f 4369 4498 4497 +f 4369 4370 4499 +f 4369 4499 4498 +f 4370 4371 4499 +f 4371 4500 4499 +f 4371 4372 4501 +f 4371 4501 4500 +f 4372 4373 4501 +f 4373 4502 4501 +f 4373 4374 4503 +f 4373 4503 4502 +f 4374 4375 4503 +f 4375 4504 4503 +f 4375 4376 4505 +f 4375 4505 4504 +f 4376 4377 4505 +f 4377 4506 4505 +f 4377 4378 4507 +f 4377 4507 4506 +f 4378 4379 4507 +f 4379 4508 4507 +f 4379 4380 4509 +f 4379 4509 4508 +f 4380 4381 4509 +f 4381 4510 4509 +f 4381 4382 4511 +f 4381 4511 4510 +f 4382 4383 4511 +f 4383 4512 4511 +f 4383 4384 4513 +f 4383 4513 4512 +f 4384 4385 4513 +f 4385 4514 4513 +f 4385 4386 4515 +f 4385 4515 4514 +f 4387 4388 4517 +f 4387 4517 4516 +f 4388 4389 4517 +f 4389 4518 4517 +f 4389 4390 4519 +f 4389 4519 4518 +f 4390 4391 4519 +f 4391 4520 4519 +f 4391 4392 4521 +f 4391 4521 4520 +f 4392 4393 4521 +f 4393 4522 4521 +f 4393 4394 4523 +f 4393 4523 4522 +f 4394 4395 4523 +f 4395 4524 4523 +f 4395 4396 4525 +f 4395 4525 4524 +f 4396 4397 4525 +f 4397 4526 4525 +f 4397 4398 4527 +f 4397 4527 4526 +f 4398 4399 4527 +f 4399 4528 4527 +f 4399 4400 4529 +f 4399 4529 4528 +f 4400 4401 4529 +f 4401 4530 4529 +f 4401 4402 4531 +f 4401 4531 4530 +f 4402 4403 4531 +f 4403 4532 4531 +f 4403 4404 4533 +f 4403 4533 4532 +f 4404 4405 4533 +f 4405 4534 4533 +f 4405 4406 4535 +f 4405 4535 4534 +f 4406 4407 4535 +f 4407 4536 4535 +f 4407 4408 4537 +f 4407 4537 4536 +f 4408 4409 4537 +f 4409 4538 4537 +f 4409 4410 4539 +f 4409 4539 4538 +f 4410 4411 4539 +f 4411 4540 4539 +f 4411 4412 4541 +f 4411 4541 4540 +f 4412 4413 4541 +f 4413 4542 4541 +f 4413 4414 4543 +f 4413 4543 4542 +f 4414 4415 4543 +f 4415 4544 4543 +f 4415 4416 4545 +f 4415 4545 4544 +f 4416 4417 4545 +f 4417 4546 4545 +f 4417 4418 4547 +f 4417 4547 4546 +f 4418 4419 4547 +f 4419 4548 4547 +f 4419 4420 4549 +f 4419 4549 4548 +f 4420 4421 4549 +f 4421 4550 4549 +f 4421 4422 4551 +f 4421 4551 4550 +f 4422 4423 4551 +f 4423 4552 4551 +f 4423 4424 4553 +f 4423 4553 4552 +f 4424 4425 4553 +f 4425 4554 4553 +f 4425 4426 4555 +f 4425 4555 4554 +f 4426 4427 4555 +f 4427 4556 4555 +f 4427 4428 4557 +f 4427 4557 4556 +f 4428 4429 4557 +f 4429 4558 4557 +f 4429 4430 4559 +f 4429 4559 4558 +f 4430 4431 4559 +f 4431 4560 4559 +f 4431 4432 4561 +f 4431 4561 4560 +f 4432 4433 4561 +f 4433 4562 4561 +f 4433 4434 4563 +f 4433 4563 4562 +f 4434 4435 4563 +f 4435 4564 4563 +f 4435 4436 4565 +f 4435 4565 4564 +f 4436 4437 4565 +f 4437 4566 4565 +f 4437 4438 4567 +f 4437 4567 4566 +f 4438 4439 4567 +f 4439 4568 4567 +f 4439 4440 4569 +f 4439 4569 4568 +f 4440 4441 4569 +f 4441 4570 4569 +f 4441 4442 4571 +f 4441 4571 4570 +f 4442 4443 4571 +f 4443 4572 4571 +f 4443 4444 4573 +f 4443 4573 4572 +f 4444 4445 4573 +f 4445 4574 4573 +f 4445 4446 4575 +f 4445 4575 4574 +f 4446 4447 4575 +f 4447 4576 4575 +f 4447 4448 4577 +f 4447 4577 4576 +f 4448 4449 4577 +f 4449 4578 4577 +f 4449 4450 4579 +f 4449 4579 4578 +f 4450 4451 4579 +f 4451 4580 4579 +f 4451 4452 4581 +f 4451 4581 4580 +f 4452 4453 4581 +f 4453 4582 4581 +f 4453 4454 4583 +f 4453 4583 4582 +f 4454 4455 4583 +f 4455 4584 4583 +f 4455 4456 4585 +f 4455 4585 4584 +f 4456 4457 4585 +f 4457 4586 4585 +f 4457 4458 4587 +f 4457 4587 4586 +f 4458 4459 4587 +f 4459 4588 4587 +f 4459 4460 4589 +f 4459 4589 4588 +f 4460 4461 4589 +f 4461 4590 4589 +f 4461 4462 4591 +f 4461 4591 4590 +f 4462 4463 4591 +f 4463 4592 4591 +f 4463 4464 4593 +f 4463 4593 4592 +f 4464 4465 4593 +f 4465 4594 4593 +f 4465 4466 4595 +f 4465 4595 4594 +f 4466 4467 4595 +f 4467 4596 4595 +f 4467 4468 4597 +f 4467 4597 4596 +f 4468 4469 4597 +f 4469 4598 4597 +f 4469 4470 4599 +f 4469 4599 4598 +f 4470 4471 4599 +f 4471 4600 4599 +f 4471 4472 4601 +f 4471 4601 4600 +f 4472 4473 4601 +f 4473 4602 4601 +f 4473 4474 4603 +f 4473 4603 4602 +f 4474 4475 4603 +f 4475 4604 4603 +f 4475 4476 4605 +f 4475 4605 4604 +f 4476 4477 4605 +f 4477 4606 4605 +f 4477 4478 4607 +f 4477 4607 4606 +f 4478 4479 4607 +f 4479 4608 4607 +f 4479 4480 4609 +f 4479 4609 4608 +f 4480 4481 4609 +f 4481 4610 4609 +f 4481 4482 4611 +f 4481 4611 4610 +f 4482 4483 4611 +f 4483 4612 4611 +f 4483 4484 4613 +f 4483 4613 4612 +f 4484 4485 4613 +f 4485 4614 4613 +f 4485 4486 4615 +f 4485 4615 4614 +f 4486 4487 4615 +f 4487 4616 4615 +f 4487 4488 4617 +f 4487 4617 4616 +f 4488 4489 4617 +f 4489 4618 4617 +f 4489 4490 4619 +f 4489 4619 4618 +f 4490 4491 4619 +f 4491 4620 4619 +f 4491 4492 4621 +f 4491 4621 4620 +f 4492 4493 4621 +f 4493 4622 4621 +f 4493 4494 4623 +f 4493 4623 4622 +f 4494 4495 4623 +f 4495 4624 4623 +f 4495 4496 4625 +f 4495 4625 4624 +f 4496 4497 4625 +f 4497 4626 4625 +f 4497 4498 4627 +f 4497 4627 4626 +f 4498 4499 4627 +f 4499 4628 4627 +f 4499 4500 4629 +f 4499 4629 4628 +f 4500 4501 4629 +f 4501 4630 4629 +f 4501 4502 4631 +f 4501 4631 4630 +f 4502 4503 4631 +f 4503 4632 4631 +f 4503 4504 4633 +f 4503 4633 4632 +f 4504 4505 4633 +f 4505 4634 4633 +f 4505 4506 4635 +f 4505 4635 4634 +f 4506 4507 4635 +f 4507 4636 4635 +f 4507 4508 4637 +f 4507 4637 4636 +f 4508 4509 4637 +f 4509 4638 4637 +f 4509 4510 4639 +f 4509 4639 4638 +f 4510 4511 4639 +f 4511 4640 4639 +f 4511 4512 4641 +f 4511 4641 4640 +f 4512 4513 4641 +f 4513 4642 4641 +f 4513 4514 4643 +f 4513 4643 4642 +f 4514 4515 4643 +f 4515 4644 4643 +f 4516 4517 4645 +f 4517 4646 4645 +f 4517 4518 4647 +f 4517 4647 4646 +f 4518 4519 4647 +f 4519 4648 4647 +f 4519 4520 4649 +f 4519 4649 4648 +f 4520 4521 4649 +f 4521 4650 4649 +f 4521 4522 4651 +f 4521 4651 4650 +f 4522 4523 4651 +f 4523 4652 4651 +f 4523 4524 4653 +f 4523 4653 4652 +f 4524 4525 4653 +f 4525 4654 4653 +f 4525 4526 4655 +f 4525 4655 4654 +f 4526 4527 4655 +f 4527 4656 4655 +f 4527 4528 4657 +f 4527 4657 4656 +f 4528 4529 4657 +f 4529 4658 4657 +f 4529 4530 4659 +f 4529 4659 4658 +f 4530 4531 4659 +f 4531 4660 4659 +f 4531 4532 4661 +f 4531 4661 4660 +f 4532 4533 4661 +f 4533 4662 4661 +f 4533 4534 4663 +f 4533 4663 4662 +f 4534 4535 4663 +f 4535 4664 4663 +f 4535 4536 4665 +f 4535 4665 4664 +f 4536 4537 4665 +f 4537 4666 4665 +f 4537 4538 4667 +f 4537 4667 4666 +f 4538 4539 4667 +f 4539 4668 4667 +f 4539 4540 4669 +f 4539 4669 4668 +f 4540 4541 4669 +f 4541 4670 4669 +f 4541 4542 4671 +f 4541 4671 4670 +f 4542 4543 4671 +f 4543 4672 4671 +f 4543 4544 4673 +f 4543 4673 4672 +f 4544 4545 4673 +f 4545 4674 4673 +f 4545 4546 4675 +f 4545 4675 4674 +f 4546 4547 4675 +f 4547 4676 4675 +f 4547 4548 4677 +f 4547 4677 4676 +f 4548 4549 4677 +f 4549 4678 4677 +f 4549 4550 4679 +f 4549 4679 4678 +f 4550 4551 4679 +f 4551 4680 4679 +f 4551 4552 4681 +f 4551 4681 4680 +f 4552 4553 4681 +f 4553 4682 4681 +f 4553 4554 4683 +f 4553 4683 4682 +f 4554 4555 4683 +f 4555 4684 4683 +f 4555 4556 4685 +f 4555 4685 4684 +f 4556 4557 4685 +f 4557 4686 4685 +f 4557 4558 4687 +f 4557 4687 4686 +f 4558 4559 4687 +f 4559 4688 4687 +f 4559 4560 4689 +f 4559 4689 4688 +f 4560 4561 4689 +f 4561 4690 4689 +f 4561 4562 4691 +f 4561 4691 4690 +f 4562 4563 4691 +f 4563 4692 4691 +f 4563 4564 4693 +f 4563 4693 4692 +f 4564 4565 4693 +f 4565 4694 4693 +f 4565 4566 4695 +f 4565 4695 4694 +f 4566 4567 4695 +f 4567 4696 4695 +f 4567 4568 4697 +f 4567 4697 4696 +f 4568 4569 4697 +f 4569 4698 4697 +f 4569 4570 4699 +f 4569 4699 4698 +f 4570 4571 4699 +f 4571 4700 4699 +f 4571 4572 4701 +f 4571 4701 4700 +f 4572 4573 4701 +f 4573 4702 4701 +f 4573 4574 4703 +f 4573 4703 4702 +f 4574 4575 4703 +f 4575 4704 4703 +f 4575 4576 4705 +f 4575 4705 4704 +f 4576 4577 4705 +f 4577 4706 4705 +f 4577 4578 4707 +f 4577 4707 4706 +f 4578 4579 4707 +f 4579 4708 4707 +f 4579 4580 4709 +f 4579 4709 4708 +f 4580 4581 4709 +f 4581 4710 4709 +f 4581 4582 4711 +f 4581 4711 4710 +f 4582 4583 4711 +f 4583 4712 4711 +f 4583 4584 4713 +f 4583 4713 4712 +f 4584 4585 4713 +f 4585 4714 4713 +f 4585 4586 4715 +f 4585 4715 4714 +f 4586 4587 4715 +f 4587 4716 4715 +f 4587 4588 4717 +f 4587 4717 4716 +f 4588 4589 4717 +f 4589 4718 4717 +f 4589 4590 4719 +f 4589 4719 4718 +f 4590 4591 4719 +f 4591 4720 4719 +f 4591 4592 4721 +f 4591 4721 4720 +f 4592 4593 4721 +f 4593 4722 4721 +f 4593 4594 4723 +f 4593 4723 4722 +f 4594 4595 4723 +f 4595 4724 4723 +f 4595 4596 4725 +f 4595 4725 4724 +f 4596 4597 4725 +f 4597 4726 4725 +f 4597 4598 4727 +f 4597 4727 4726 +f 4598 4599 4727 +f 4599 4728 4727 +f 4599 4600 4729 +f 4599 4729 4728 +f 4600 4601 4729 +f 4601 4730 4729 +f 4601 4602 4731 +f 4601 4731 4730 +f 4602 4603 4731 +f 4603 4732 4731 +f 4603 4604 4733 +f 4603 4733 4732 +f 4604 4605 4733 +f 4605 4734 4733 +f 4605 4606 4735 +f 4605 4735 4734 +f 4606 4607 4735 +f 4607 4736 4735 +f 4607 4608 4737 +f 4607 4737 4736 +f 4608 4609 4737 +f 4609 4738 4737 +f 4609 4610 4739 +f 4609 4739 4738 +f 4610 4611 4739 +f 4611 4740 4739 +f 4611 4612 4741 +f 4611 4741 4740 +f 4612 4613 4741 +f 4613 4742 4741 +f 4613 4614 4743 +f 4613 4743 4742 +f 4614 4615 4743 +f 4615 4744 4743 +f 4615 4616 4745 +f 4615 4745 4744 +f 4616 4617 4745 +f 4617 4746 4745 +f 4617 4618 4747 +f 4617 4747 4746 +f 4618 4619 4747 +f 4619 4748 4747 +f 4619 4620 4749 +f 4619 4749 4748 +f 4620 4621 4749 +f 4621 4750 4749 +f 4621 4622 4751 +f 4621 4751 4750 +f 4622 4623 4751 +f 4623 4752 4751 +f 4623 4624 4753 +f 4623 4753 4752 +f 4624 4625 4753 +f 4625 4754 4753 +f 4625 4626 4755 +f 4625 4755 4754 +f 4626 4627 4755 +f 4627 4756 4755 +f 4627 4628 4757 +f 4627 4757 4756 +f 4628 4629 4757 +f 4629 4758 4757 +f 4629 4630 4759 +f 4629 4759 4758 +f 4630 4631 4759 +f 4631 4760 4759 +f 4631 4632 4761 +f 4631 4761 4760 +f 4632 4633 4761 +f 4633 4762 4761 +f 4633 4634 4763 +f 4633 4763 4762 +f 4634 4635 4763 +f 4635 4764 4763 +f 4635 4636 4765 +f 4635 4765 4764 +f 4636 4637 4765 +f 4637 4766 4765 +f 4637 4638 4767 +f 4637 4767 4766 +f 4638 4639 4767 +f 4639 4768 4767 +f 4639 4640 4769 +f 4639 4769 4768 +f 4640 4641 4769 +f 4641 4770 4769 +f 4641 4642 4771 +f 4641 4771 4770 +f 4642 4643 4771 +f 4643 4772 4771 +f 4643 4644 4773 +f 4643 4773 4772 +f 4645 4646 4775 +f 4645 4775 4774 +f 4646 4647 4775 +f 4647 4776 4775 +f 4647 4648 4777 +f 4647 4777 4776 +f 4648 4649 4777 +f 4649 4778 4777 +f 4649 4650 4779 +f 4649 4779 4778 +f 4650 4651 4779 +f 4651 4780 4779 +f 4651 4652 4781 +f 4651 4781 4780 +f 4652 4653 4781 +f 4653 4782 4781 +f 4653 4654 4783 +f 4653 4783 4782 +f 4654 4655 4783 +f 4655 4784 4783 +f 4655 4656 4785 +f 4655 4785 4784 +f 4656 4657 4785 +f 4657 4786 4785 +f 4657 4658 4787 +f 4657 4787 4786 +f 4658 4659 4787 +f 4659 4788 4787 +f 4659 4660 4789 +f 4659 4789 4788 +f 4660 4661 4789 +f 4661 4790 4789 +f 4661 4662 4791 +f 4661 4791 4790 +f 4662 4663 4791 +f 4663 4792 4791 +f 4663 4664 4793 +f 4663 4793 4792 +f 4664 4665 4793 +f 4665 4794 4793 +f 4665 4666 4795 +f 4665 4795 4794 +f 4666 4667 4795 +f 4667 4796 4795 +f 4667 4668 4797 +f 4667 4797 4796 +f 4668 4669 4797 +f 4669 4798 4797 +f 4669 4670 4799 +f 4669 4799 4798 +f 4670 4671 4799 +f 4671 4800 4799 +f 4671 4672 4801 +f 4671 4801 4800 +f 4672 4673 4801 +f 4673 4802 4801 +f 4673 4674 4803 +f 4673 4803 4802 +f 4674 4675 4803 +f 4675 4804 4803 +f 4675 4676 4805 +f 4675 4805 4804 +f 4676 4677 4805 +f 4677 4806 4805 +f 4677 4678 4807 +f 4677 4807 4806 +f 4678 4679 4807 +f 4679 4808 4807 +f 4679 4680 4809 +f 4679 4809 4808 +f 4680 4681 4809 +f 4681 4810 4809 +f 4681 4682 4811 +f 4681 4811 4810 +f 4682 4683 4811 +f 4683 4812 4811 +f 4683 4684 4813 +f 4683 4813 4812 +f 4684 4685 4813 +f 4685 4814 4813 +f 4685 4686 4815 +f 4685 4815 4814 +f 4686 4687 4815 +f 4687 4816 4815 +f 4687 4688 4817 +f 4687 4817 4816 +f 4688 4689 4817 +f 4689 4818 4817 +f 4689 4690 4819 +f 4689 4819 4818 +f 4690 4691 4819 +f 4691 4820 4819 +f 4691 4692 4821 +f 4691 4821 4820 +f 4692 4693 4821 +f 4693 4822 4821 +f 4693 4694 4823 +f 4693 4823 4822 +f 4694 4695 4823 +f 4695 4824 4823 +f 4695 4696 4825 +f 4695 4825 4824 +f 4696 4697 4825 +f 4697 4826 4825 +f 4697 4698 4827 +f 4697 4827 4826 +f 4698 4699 4827 +f 4699 4828 4827 +f 4699 4700 4829 +f 4699 4829 4828 +f 4700 4701 4829 +f 4701 4830 4829 +f 4701 4702 4831 +f 4701 4831 4830 +f 4702 4703 4831 +f 4703 4832 4831 +f 4703 4704 4833 +f 4703 4833 4832 +f 4704 4705 4833 +f 4705 4834 4833 +f 4705 4706 4835 +f 4705 4835 4834 +f 4706 4707 4835 +f 4707 4836 4835 +f 4707 4708 4837 +f 4707 4837 4836 +f 4708 4709 4837 +f 4709 4838 4837 +f 4709 4710 4839 +f 4709 4839 4838 +f 4710 4711 4839 +f 4711 4840 4839 +f 4711 4712 4841 +f 4711 4841 4840 +f 4712 4713 4841 +f 4713 4842 4841 +f 4713 4714 4843 +f 4713 4843 4842 +f 4714 4715 4843 +f 4715 4844 4843 +f 4715 4716 4845 +f 4715 4845 4844 +f 4716 4717 4845 +f 4717 4846 4845 +f 4717 4718 4847 +f 4717 4847 4846 +f 4718 4719 4847 +f 4719 4848 4847 +f 4719 4720 4849 +f 4719 4849 4848 +f 4720 4721 4849 +f 4721 4850 4849 +f 4721 4722 4851 +f 4721 4851 4850 +f 4722 4723 4851 +f 4723 4852 4851 +f 4723 4724 4853 +f 4723 4853 4852 +f 4724 4725 4853 +f 4725 4854 4853 +f 4725 4726 4855 +f 4725 4855 4854 +f 4726 4727 4855 +f 4727 4856 4855 +f 4727 4728 4857 +f 4727 4857 4856 +f 4728 4729 4857 +f 4729 4858 4857 +f 4729 4730 4859 +f 4729 4859 4858 +f 4730 4731 4859 +f 4731 4860 4859 +f 4731 4732 4861 +f 4731 4861 4860 +f 4732 4733 4861 +f 4733 4862 4861 +f 4733 4734 4863 +f 4733 4863 4862 +f 4734 4735 4863 +f 4735 4864 4863 +f 4735 4736 4865 +f 4735 4865 4864 +f 4736 4737 4865 +f 4737 4866 4865 +f 4737 4738 4867 +f 4737 4867 4866 +f 4738 4739 4867 +f 4739 4868 4867 +f 4739 4740 4869 +f 4739 4869 4868 +f 4740 4741 4869 +f 4741 4870 4869 +f 4741 4742 4871 +f 4741 4871 4870 +f 4742 4743 4871 +f 4743 4872 4871 +f 4743 4744 4873 +f 4743 4873 4872 +f 4744 4745 4873 +f 4745 4874 4873 +f 4745 4746 4875 +f 4745 4875 4874 +f 4746 4747 4875 +f 4747 4876 4875 +f 4747 4748 4877 +f 4747 4877 4876 +f 4748 4749 4877 +f 4749 4878 4877 +f 4749 4750 4879 +f 4749 4879 4878 +f 4750 4751 4879 +f 4751 4880 4879 +f 4751 4752 4881 +f 4751 4881 4880 +f 4752 4753 4881 +f 4753 4882 4881 +f 4753 4754 4883 +f 4753 4883 4882 +f 4754 4755 4883 +f 4755 4884 4883 +f 4755 4756 4885 +f 4755 4885 4884 +f 4756 4757 4885 +f 4757 4886 4885 +f 4757 4758 4887 +f 4757 4887 4886 +f 4758 4759 4887 +f 4759 4888 4887 +f 4759 4760 4889 +f 4759 4889 4888 +f 4760 4761 4889 +f 4761 4890 4889 +f 4761 4762 4891 +f 4761 4891 4890 +f 4762 4763 4891 +f 4763 4892 4891 +f 4763 4764 4893 +f 4763 4893 4892 +f 4764 4765 4893 +f 4765 4894 4893 +f 4765 4766 4895 +f 4765 4895 4894 +f 4766 4767 4895 +f 4767 4896 4895 +f 4767 4768 4897 +f 4767 4897 4896 +f 4768 4769 4897 +f 4769 4898 4897 +f 4769 4770 4899 +f 4769 4899 4898 +f 4770 4771 4899 +f 4771 4900 4899 +f 4771 4772 4901 +f 4771 4901 4900 +f 4772 4773 4901 +f 4773 4902 4901 +f 4774 4775 4903 +f 4775 4904 4903 +f 4775 4776 4905 +f 4775 4905 4904 +f 4776 4777 4905 +f 4777 4906 4905 +f 4777 4778 4907 +f 4777 4907 4906 +f 4778 4779 4907 +f 4779 4908 4907 +f 4779 4780 4909 +f 4779 4909 4908 +f 4780 4781 4909 +f 4781 4910 4909 +f 4781 4782 4911 +f 4781 4911 4910 +f 4782 4783 4911 +f 4783 4912 4911 +f 4783 4784 4913 +f 4783 4913 4912 +f 4784 4785 4913 +f 4785 4914 4913 +f 4785 4786 4915 +f 4785 4915 4914 +f 4786 4787 4915 +f 4787 4916 4915 +f 4787 4788 4917 +f 4787 4917 4916 +f 4788 4789 4917 +f 4789 4918 4917 +f 4789 4790 4919 +f 4789 4919 4918 +f 4790 4791 4919 +f 4791 4920 4919 +f 4791 4792 4921 +f 4791 4921 4920 +f 4792 4793 4921 +f 4793 4922 4921 +f 4793 4794 4923 +f 4793 4923 4922 +f 4794 4795 4923 +f 4795 4924 4923 +f 4795 4796 4925 +f 4795 4925 4924 +f 4796 4797 4925 +f 4797 4926 4925 +f 4797 4798 4927 +f 4797 4927 4926 +f 4798 4799 4927 +f 4799 4928 4927 +f 4799 4800 4929 +f 4799 4929 4928 +f 4800 4801 4929 +f 4801 4930 4929 +f 4801 4802 4931 +f 4801 4931 4930 +f 4802 4803 4931 +f 4803 4932 4931 +f 4803 4804 4933 +f 4803 4933 4932 +f 4804 4805 4933 +f 4805 4934 4933 +f 4805 4806 4935 +f 4805 4935 4934 +f 4806 4807 4935 +f 4807 4936 4935 +f 4807 4808 4937 +f 4807 4937 4936 +f 4808 4809 4937 +f 4809 4938 4937 +f 4809 4810 4939 +f 4809 4939 4938 +f 4810 4811 4939 +f 4811 4940 4939 +f 4811 4812 4941 +f 4811 4941 4940 +f 4812 4813 4941 +f 4813 4942 4941 +f 4813 4814 4943 +f 4813 4943 4942 +f 4814 4815 4943 +f 4815 4944 4943 +f 4815 4816 4945 +f 4815 4945 4944 +f 4816 4817 4945 +f 4817 4946 4945 +f 4817 4818 4947 +f 4817 4947 4946 +f 4818 4819 4947 +f 4819 4948 4947 +f 4819 4820 4949 +f 4819 4949 4948 +f 4820 4821 4949 +f 4821 4950 4949 +f 4821 4822 4951 +f 4821 4951 4950 +f 4822 4823 4951 +f 4823 4952 4951 +f 4823 4824 4953 +f 4823 4953 4952 +f 4824 4825 4953 +f 4825 4954 4953 +f 4825 4826 4955 +f 4825 4955 4954 +f 4826 4827 4955 +f 4827 4956 4955 +f 4827 4828 4957 +f 4827 4957 4956 +f 4828 4829 4957 +f 4829 4958 4957 +f 4829 4830 4959 +f 4829 4959 4958 +f 4830 4831 4959 +f 4831 4960 4959 +f 4831 4832 4961 +f 4831 4961 4960 +f 4832 4833 4961 +f 4833 4962 4961 +f 4833 4834 4963 +f 4833 4963 4962 +f 4834 4835 4963 +f 4835 4964 4963 +f 4835 4836 4965 +f 4835 4965 4964 +f 4836 4837 4965 +f 4837 4966 4965 +f 4837 4838 4967 +f 4837 4967 4966 +f 4838 4839 4967 +f 4839 4968 4967 +f 4839 4840 4969 +f 4839 4969 4968 +f 4840 4841 4969 +f 4841 4970 4969 +f 4841 4842 4971 +f 4841 4971 4970 +f 4842 4843 4971 +f 4843 4972 4971 +f 4843 4844 4973 +f 4843 4973 4972 +f 4844 4845 4973 +f 4845 4974 4973 +f 4845 4846 4975 +f 4845 4975 4974 +f 4846 4847 4975 +f 4847 4976 4975 +f 4847 4848 4977 +f 4847 4977 4976 +f 4848 4849 4977 +f 4849 4978 4977 +f 4849 4850 4979 +f 4849 4979 4978 +f 4850 4851 4979 +f 4851 4980 4979 +f 4851 4852 4981 +f 4851 4981 4980 +f 4852 4853 4981 +f 4853 4982 4981 +f 4853 4854 4983 +f 4853 4983 4982 +f 4854 4855 4983 +f 4855 4984 4983 +f 4855 4856 4985 +f 4855 4985 4984 +f 4856 4857 4985 +f 4857 4986 4985 +f 4857 4858 4987 +f 4857 4987 4986 +f 4858 4859 4987 +f 4859 4988 4987 +f 4859 4860 4989 +f 4859 4989 4988 +f 4860 4861 4989 +f 4861 4990 4989 +f 4861 4862 4991 +f 4861 4991 4990 +f 4862 4863 4991 +f 4863 4992 4991 +f 4863 4864 4993 +f 4863 4993 4992 +f 4864 4865 4993 +f 4865 4994 4993 +f 4865 4866 4995 +f 4865 4995 4994 +f 4866 4867 4995 +f 4867 4996 4995 +f 4867 4868 4997 +f 4867 4997 4996 +f 4868 4869 4997 +f 4869 4998 4997 +f 4869 4870 4999 +f 4869 4999 4998 +f 4870 4871 4999 +f 4871 5000 4999 +f 4871 4872 5001 +f 4871 5001 5000 +f 4872 4873 5001 +f 4873 5002 5001 +f 4873 4874 5003 +f 4873 5003 5002 +f 4874 4875 5003 +f 4875 5004 5003 +f 4875 4876 5005 +f 4875 5005 5004 +f 4876 4877 5005 +f 4877 5006 5005 +f 4877 4878 5007 +f 4877 5007 5006 +f 4878 4879 5007 +f 4879 5008 5007 +f 4879 4880 5009 +f 4879 5009 5008 +f 4880 4881 5009 +f 4881 5010 5009 +f 4881 4882 5011 +f 4881 5011 5010 +f 4882 4883 5011 +f 4883 5012 5011 +f 4883 4884 5013 +f 4883 5013 5012 +f 4884 4885 5013 +f 4885 5014 5013 +f 4885 4886 5015 +f 4885 5015 5014 +f 4886 4887 5015 +f 4887 5016 5015 +f 4887 4888 5017 +f 4887 5017 5016 +f 4888 4889 5017 +f 4889 5018 5017 +f 4889 4890 5019 +f 4889 5019 5018 +f 4890 4891 5019 +f 4891 5020 5019 +f 4891 4892 5021 +f 4891 5021 5020 +f 4892 4893 5021 +f 4893 5022 5021 +f 4893 4894 5023 +f 4893 5023 5022 +f 4894 4895 5023 +f 4895 5024 5023 +f 4895 4896 5025 +f 4895 5025 5024 +f 4896 4897 5025 +f 4897 5026 5025 +f 4897 4898 5027 +f 4897 5027 5026 +f 4898 4899 5027 +f 4899 5028 5027 +f 4899 4900 5029 +f 4899 5029 5028 +f 4900 4901 5029 +f 4901 5030 5029 +f 4901 4902 5031 +f 4901 5031 5030 +f 4903 4904 5033 +f 4903 5033 5032 +f 4904 4905 5033 +f 4905 5034 5033 +f 4905 4906 5035 +f 4905 5035 5034 +f 4906 4907 5035 +f 4907 5036 5035 +f 4907 4908 5037 +f 4907 5037 5036 +f 4908 4909 5037 +f 4909 5038 5037 +f 4909 4910 5039 +f 4909 5039 5038 +f 4910 4911 5039 +f 4911 5040 5039 +f 4911 4912 5041 +f 4911 5041 5040 +f 4912 4913 5041 +f 4913 5042 5041 +f 4913 4914 5043 +f 4913 5043 5042 +f 4914 4915 5043 +f 4915 5044 5043 +f 4915 4916 5045 +f 4915 5045 5044 +f 4916 4917 5045 +f 4917 5046 5045 +f 4917 4918 5047 +f 4917 5047 5046 +f 4918 4919 5047 +f 4919 5048 5047 +f 4919 4920 5049 +f 4919 5049 5048 +f 4920 4921 5049 +f 4921 5050 5049 +f 4921 4922 5051 +f 4921 5051 5050 +f 4922 4923 5051 +f 4923 5052 5051 +f 4923 4924 5053 +f 4923 5053 5052 +f 4924 4925 5053 +f 4925 5054 5053 +f 4925 4926 5055 +f 4925 5055 5054 +f 4926 4927 5055 +f 4927 5056 5055 +f 4927 4928 5057 +f 4927 5057 5056 +f 4928 4929 5057 +f 4929 5058 5057 +f 4929 4930 5059 +f 4929 5059 5058 +f 4930 4931 5059 +f 4931 5060 5059 +f 4931 4932 5061 +f 4931 5061 5060 +f 4932 4933 5061 +f 4933 5062 5061 +f 4933 4934 5063 +f 4933 5063 5062 +f 4934 4935 5063 +f 4935 5064 5063 +f 4935 4936 5065 +f 4935 5065 5064 +f 4936 4937 5065 +f 4937 5066 5065 +f 4937 4938 5067 +f 4937 5067 5066 +f 4938 4939 5067 +f 4939 5068 5067 +f 4939 4940 5069 +f 4939 5069 5068 +f 4940 4941 5069 +f 4941 5070 5069 +f 4941 4942 5071 +f 4941 5071 5070 +f 4942 4943 5071 +f 4943 5072 5071 +f 4943 4944 5073 +f 4943 5073 5072 +f 4944 4945 5073 +f 4945 5074 5073 +f 4945 4946 5075 +f 4945 5075 5074 +f 4946 4947 5075 +f 4947 5076 5075 +f 4947 4948 5077 +f 4947 5077 5076 +f 4948 4949 5077 +f 4949 5078 5077 +f 4949 4950 5079 +f 4949 5079 5078 +f 4950 4951 5079 +f 4951 5080 5079 +f 4951 4952 5081 +f 4951 5081 5080 +f 4952 4953 5081 +f 4953 5082 5081 +f 4953 4954 5083 +f 4953 5083 5082 +f 4954 4955 5083 +f 4955 5084 5083 +f 4955 4956 5085 +f 4955 5085 5084 +f 4956 4957 5085 +f 4957 5086 5085 +f 4957 4958 5087 +f 4957 5087 5086 +f 4958 4959 5087 +f 4959 5088 5087 +f 4959 4960 5089 +f 4959 5089 5088 +f 4960 4961 5089 +f 4961 5090 5089 +f 4961 4962 5091 +f 4961 5091 5090 +f 4962 4963 5091 +f 4963 5092 5091 +f 4963 4964 5093 +f 4963 5093 5092 +f 4964 4965 5093 +f 4965 5094 5093 +f 4965 4966 5095 +f 4965 5095 5094 +f 4966 4967 5095 +f 4967 5096 5095 +f 4967 4968 5097 +f 4967 5097 5096 +f 4968 4969 5097 +f 4969 5098 5097 +f 4969 4970 5099 +f 4969 5099 5098 +f 4970 4971 5099 +f 4971 5100 5099 +f 4971 4972 5101 +f 4971 5101 5100 +f 4972 4973 5101 +f 4973 5102 5101 +f 4973 4974 5103 +f 4973 5103 5102 +f 4974 4975 5103 +f 4975 5104 5103 +f 4975 4976 5105 +f 4975 5105 5104 +f 4976 4977 5105 +f 4977 5106 5105 +f 4977 4978 5107 +f 4977 5107 5106 +f 4978 4979 5107 +f 4979 5108 5107 +f 4979 4980 5109 +f 4979 5109 5108 +f 4980 4981 5109 +f 4981 5110 5109 +f 4981 4982 5111 +f 4981 5111 5110 +f 4982 4983 5111 +f 4983 5112 5111 +f 4983 4984 5113 +f 4983 5113 5112 +f 4984 4985 5113 +f 4985 5114 5113 +f 4985 4986 5115 +f 4985 5115 5114 +f 4986 4987 5115 +f 4987 5116 5115 +f 4987 4988 5117 +f 4987 5117 5116 +f 4988 4989 5117 +f 4989 5118 5117 +f 4989 4990 5119 +f 4989 5119 5118 +f 4990 4991 5119 +f 4991 5120 5119 +f 4991 4992 5121 +f 4991 5121 5120 +f 4992 4993 5121 +f 4993 5122 5121 +f 4993 4994 5123 +f 4993 5123 5122 +f 4994 4995 5123 +f 4995 5124 5123 +f 4995 4996 5125 +f 4995 5125 5124 +f 4996 4997 5125 +f 4997 5126 5125 +f 4997 4998 5127 +f 4997 5127 5126 +f 4998 4999 5127 +f 4999 5128 5127 +f 4999 5000 5129 +f 4999 5129 5128 +f 5000 5001 5129 +f 5001 5130 5129 +f 5001 5002 5131 +f 5001 5131 5130 +f 5002 5003 5131 +f 5003 5132 5131 +f 5003 5004 5133 +f 5003 5133 5132 +f 5004 5005 5133 +f 5005 5134 5133 +f 5005 5006 5135 +f 5005 5135 5134 +f 5006 5007 5135 +f 5007 5136 5135 +f 5007 5008 5137 +f 5007 5137 5136 +f 5008 5009 5137 +f 5009 5138 5137 +f 5009 5010 5139 +f 5009 5139 5138 +f 5010 5011 5139 +f 5011 5140 5139 +f 5011 5012 5141 +f 5011 5141 5140 +f 5012 5013 5141 +f 5013 5142 5141 +f 5013 5014 5143 +f 5013 5143 5142 +f 5014 5015 5143 +f 5015 5144 5143 +f 5015 5016 5145 +f 5015 5145 5144 +f 5016 5017 5145 +f 5017 5146 5145 +f 5017 5018 5147 +f 5017 5147 5146 +f 5018 5019 5147 +f 5019 5148 5147 +f 5019 5020 5149 +f 5019 5149 5148 +f 5020 5021 5149 +f 5021 5150 5149 +f 5021 5022 5151 +f 5021 5151 5150 +f 5022 5023 5151 +f 5023 5152 5151 +f 5023 5024 5153 +f 5023 5153 5152 +f 5024 5025 5153 +f 5025 5154 5153 +f 5025 5026 5155 +f 5025 5155 5154 +f 5026 5027 5155 +f 5027 5156 5155 +f 5027 5028 5157 +f 5027 5157 5156 +f 5028 5029 5157 +f 5029 5158 5157 +f 5029 5030 5159 +f 5029 5159 5158 +f 5030 5031 5159 +f 5031 5160 5159 +f 5032 5033 5161 +f 5033 5162 5161 +f 5033 5034 5163 +f 5033 5163 5162 +f 5034 5035 5163 +f 5035 5164 5163 +f 5035 5036 5165 +f 5035 5165 5164 +f 5036 5037 5165 +f 5037 5166 5165 +f 5037 5038 5167 +f 5037 5167 5166 +f 5038 5039 5167 +f 5039 5168 5167 +f 5039 5040 5169 +f 5039 5169 5168 +f 5040 5041 5169 +f 5041 5170 5169 +f 5041 5042 5171 +f 5041 5171 5170 +f 5042 5043 5171 +f 5043 5172 5171 +f 5043 5044 5173 +f 5043 5173 5172 +f 5044 5045 5173 +f 5045 5174 5173 +f 5045 5046 5175 +f 5045 5175 5174 +f 5046 5047 5175 +f 5047 5176 5175 +f 5047 5048 5177 +f 5047 5177 5176 +f 5048 5049 5177 +f 5049 5178 5177 +f 5049 5050 5179 +f 5049 5179 5178 +f 5050 5051 5179 +f 5051 5180 5179 +f 5051 5052 5181 +f 5051 5181 5180 +f 5052 5053 5181 +f 5053 5182 5181 +f 5053 5054 5183 +f 5053 5183 5182 +f 5054 5055 5183 +f 5055 5184 5183 +f 5055 5056 5185 +f 5055 5185 5184 +f 5056 5057 5185 +f 5057 5186 5185 +f 5057 5058 5187 +f 5057 5187 5186 +f 5058 5059 5187 +f 5059 5188 5187 +f 5059 5060 5189 +f 5059 5189 5188 +f 5060 5061 5189 +f 5061 5190 5189 +f 5061 5062 5191 +f 5061 5191 5190 +f 5062 5063 5191 +f 5063 5192 5191 +f 5063 5064 5193 +f 5063 5193 5192 +f 5064 5065 5193 +f 5065 5194 5193 +f 5065 5066 5195 +f 5065 5195 5194 +f 5066 5067 5195 +f 5067 5196 5195 +f 5067 5068 5197 +f 5067 5197 5196 +f 5068 5069 5197 +f 5069 5198 5197 +f 5069 5070 5199 +f 5069 5199 5198 +f 5070 5071 5199 +f 5071 5200 5199 +f 5071 5072 5201 +f 5071 5201 5200 +f 5072 5073 5201 +f 5073 5202 5201 +f 5073 5074 5203 +f 5073 5203 5202 +f 5074 5075 5203 +f 5075 5204 5203 +f 5075 5076 5205 +f 5075 5205 5204 +f 5076 5077 5205 +f 5077 5206 5205 +f 5077 5078 5207 +f 5077 5207 5206 +f 5078 5079 5207 +f 5079 5208 5207 +f 5079 5080 5209 +f 5079 5209 5208 +f 5080 5081 5209 +f 5081 5210 5209 +f 5081 5082 5211 +f 5081 5211 5210 +f 5082 5083 5211 +f 5083 5212 5211 +f 5083 5084 5213 +f 5083 5213 5212 +f 5084 5085 5213 +f 5085 5214 5213 +f 5085 5086 5215 +f 5085 5215 5214 +f 5086 5087 5215 +f 5087 5216 5215 +f 5087 5088 5217 +f 5087 5217 5216 +f 5088 5089 5217 +f 5089 5218 5217 +f 5089 5090 5219 +f 5089 5219 5218 +f 5090 5091 5219 +f 5091 5220 5219 +f 5091 5092 5221 +f 5091 5221 5220 +f 5092 5093 5221 +f 5093 5222 5221 +f 5093 5094 5223 +f 5093 5223 5222 +f 5094 5095 5223 +f 5095 5224 5223 +f 5095 5096 5225 +f 5095 5225 5224 +f 5096 5097 5225 +f 5097 5226 5225 +f 5097 5098 5227 +f 5097 5227 5226 +f 5098 5099 5227 +f 5099 5228 5227 +f 5099 5100 5229 +f 5099 5229 5228 +f 5100 5101 5229 +f 5101 5230 5229 +f 5101 5102 5231 +f 5101 5231 5230 +f 5102 5103 5231 +f 5103 5232 5231 +f 5103 5104 5233 +f 5103 5233 5232 +f 5104 5105 5233 +f 5105 5234 5233 +f 5105 5106 5235 +f 5105 5235 5234 +f 5106 5107 5235 +f 5107 5236 5235 +f 5107 5108 5237 +f 5107 5237 5236 +f 5108 5109 5237 +f 5109 5238 5237 +f 5109 5110 5239 +f 5109 5239 5238 +f 5110 5111 5239 +f 5111 5240 5239 +f 5111 5112 5241 +f 5111 5241 5240 +f 5112 5113 5241 +f 5113 5242 5241 +f 5113 5114 5243 +f 5113 5243 5242 +f 5114 5115 5243 +f 5115 5244 5243 +f 5115 5116 5245 +f 5115 5245 5244 +f 5116 5117 5245 +f 5117 5246 5245 +f 5117 5118 5247 +f 5117 5247 5246 +f 5118 5119 5247 +f 5119 5248 5247 +f 5119 5120 5249 +f 5119 5249 5248 +f 5120 5121 5249 +f 5121 5250 5249 +f 5121 5122 5251 +f 5121 5251 5250 +f 5122 5123 5251 +f 5123 5252 5251 +f 5123 5124 5253 +f 5123 5253 5252 +f 5124 5125 5253 +f 5125 5254 5253 +f 5125 5126 5255 +f 5125 5255 5254 +f 5126 5127 5255 +f 5127 5256 5255 +f 5127 5128 5257 +f 5127 5257 5256 +f 5128 5129 5257 +f 5129 5258 5257 +f 5129 5130 5259 +f 5129 5259 5258 +f 5130 5131 5259 +f 5131 5260 5259 +f 5131 5132 5261 +f 5131 5261 5260 +f 5132 5133 5261 +f 5133 5262 5261 +f 5133 5134 5263 +f 5133 5263 5262 +f 5134 5135 5263 +f 5135 5264 5263 +f 5135 5136 5265 +f 5135 5265 5264 +f 5136 5137 5265 +f 5137 5266 5265 +f 5137 5138 5267 +f 5137 5267 5266 +f 5138 5139 5267 +f 5139 5268 5267 +f 5139 5140 5269 +f 5139 5269 5268 +f 5140 5141 5269 +f 5141 5270 5269 +f 5141 5142 5271 +f 5141 5271 5270 +f 5142 5143 5271 +f 5143 5272 5271 +f 5143 5144 5273 +f 5143 5273 5272 +f 5144 5145 5273 +f 5145 5274 5273 +f 5145 5146 5275 +f 5145 5275 5274 +f 5146 5147 5275 +f 5147 5276 5275 +f 5147 5148 5277 +f 5147 5277 5276 +f 5148 5149 5277 +f 5149 5278 5277 +f 5149 5150 5279 +f 5149 5279 5278 +f 5150 5151 5279 +f 5151 5280 5279 +f 5151 5152 5281 +f 5151 5281 5280 +f 5152 5153 5281 +f 5153 5282 5281 +f 5153 5154 5283 +f 5153 5283 5282 +f 5154 5155 5283 +f 5155 5284 5283 +f 5155 5156 5285 +f 5155 5285 5284 +f 5156 5157 5285 +f 5157 5286 5285 +f 5157 5158 5287 +f 5157 5287 5286 +f 5158 5159 5287 +f 5159 5288 5287 +f 5159 5160 5289 +f 5159 5289 5288 +f 5161 5162 5291 +f 5161 5291 5290 +f 5162 5163 5291 +f 5163 5292 5291 +f 5163 5164 5293 +f 5163 5293 5292 +f 5164 5165 5293 +f 5165 5294 5293 +f 5165 5166 5295 +f 5165 5295 5294 +f 5166 5167 5295 +f 5167 5296 5295 +f 5167 5168 5297 +f 5167 5297 5296 +f 5168 5169 5297 +f 5169 5298 5297 +f 5169 5170 5299 +f 5169 5299 5298 +f 5170 5171 5299 +f 5171 5300 5299 +f 5171 5172 5301 +f 5171 5301 5300 +f 5172 5173 5301 +f 5173 5302 5301 +f 5173 5174 5303 +f 5173 5303 5302 +f 5174 5175 5303 +f 5175 5304 5303 +f 5175 5176 5305 +f 5175 5305 5304 +f 5176 5177 5305 +f 5177 5306 5305 +f 5177 5178 5307 +f 5177 5307 5306 +f 5178 5179 5307 +f 5179 5308 5307 +f 5179 5180 5309 +f 5179 5309 5308 +f 5180 5181 5309 +f 5181 5310 5309 +f 5181 5182 5311 +f 5181 5311 5310 +f 5182 5183 5311 +f 5183 5312 5311 +f 5183 5184 5313 +f 5183 5313 5312 +f 5184 5185 5313 +f 5185 5314 5313 +f 5185 5186 5315 +f 5185 5315 5314 +f 5186 5187 5315 +f 5187 5316 5315 +f 5187 5188 5317 +f 5187 5317 5316 +f 5188 5189 5317 +f 5189 5318 5317 +f 5189 5190 5319 +f 5189 5319 5318 +f 5190 5191 5319 +f 5191 5320 5319 +f 5191 5192 5321 +f 5191 5321 5320 +f 5192 5193 5321 +f 5193 5322 5321 +f 5193 5194 5323 +f 5193 5323 5322 +f 5194 5195 5323 +f 5195 5324 5323 +f 5195 5196 5325 +f 5195 5325 5324 +f 5196 5197 5325 +f 5197 5326 5325 +f 5197 5198 5327 +f 5197 5327 5326 +f 5198 5199 5327 +f 5199 5328 5327 +f 5199 5200 5329 +f 5199 5329 5328 +f 5200 5201 5329 +f 5201 5330 5329 +f 5201 5202 5331 +f 5201 5331 5330 +f 5202 5203 5331 +f 5203 5332 5331 +f 5203 5204 5333 +f 5203 5333 5332 +f 5204 5205 5333 +f 5205 5334 5333 +f 5205 5206 5335 +f 5205 5335 5334 +f 5206 5207 5335 +f 5207 5336 5335 +f 5207 5208 5337 +f 5207 5337 5336 +f 5208 5209 5337 +f 5209 5338 5337 +f 5209 5210 5339 +f 5209 5339 5338 +f 5210 5211 5339 +f 5211 5340 5339 +f 5211 5212 5341 +f 5211 5341 5340 +f 5212 5213 5341 +f 5213 5342 5341 +f 5213 5214 5343 +f 5213 5343 5342 +f 5214 5215 5343 +f 5215 5344 5343 +f 5215 5216 5345 +f 5215 5345 5344 +f 5216 5217 5345 +f 5217 5346 5345 +f 5217 5218 5347 +f 5217 5347 5346 +f 5218 5219 5347 +f 5219 5348 5347 +f 5219 5220 5349 +f 5219 5349 5348 +f 5220 5221 5349 +f 5221 5350 5349 +f 5221 5222 5351 +f 5221 5351 5350 +f 5222 5223 5351 +f 5223 5352 5351 +f 5223 5224 5353 +f 5223 5353 5352 +f 5224 5225 5353 +f 5225 5354 5353 +f 5225 5226 5355 +f 5225 5355 5354 +f 5226 5227 5355 +f 5227 5356 5355 +f 5227 5228 5357 +f 5227 5357 5356 +f 5228 5229 5357 +f 5229 5358 5357 +f 5229 5230 5359 +f 5229 5359 5358 +f 5230 5231 5359 +f 5231 5360 5359 +f 5231 5232 5361 +f 5231 5361 5360 +f 5232 5233 5361 +f 5233 5362 5361 +f 5233 5234 5363 +f 5233 5363 5362 +f 5234 5235 5363 +f 5235 5364 5363 +f 5235 5236 5365 +f 5235 5365 5364 +f 5236 5237 5365 +f 5237 5366 5365 +f 5237 5238 5367 +f 5237 5367 5366 +f 5238 5239 5367 +f 5239 5368 5367 +f 5239 5240 5369 +f 5239 5369 5368 +f 5240 5241 5369 +f 5241 5370 5369 +f 5241 5242 5371 +f 5241 5371 5370 +f 5242 5243 5371 +f 5243 5372 5371 +f 5243 5244 5373 +f 5243 5373 5372 +f 5244 5245 5373 +f 5245 5374 5373 +f 5245 5246 5375 +f 5245 5375 5374 +f 5246 5247 5375 +f 5247 5376 5375 +f 5247 5248 5377 +f 5247 5377 5376 +f 5248 5249 5377 +f 5249 5378 5377 +f 5249 5250 5379 +f 5249 5379 5378 +f 5250 5251 5379 +f 5251 5380 5379 +f 5251 5252 5381 +f 5251 5381 5380 +f 5252 5253 5381 +f 5253 5382 5381 +f 5253 5254 5383 +f 5253 5383 5382 +f 5254 5255 5383 +f 5255 5384 5383 +f 5255 5256 5385 +f 5255 5385 5384 +f 5256 5257 5385 +f 5257 5386 5385 +f 5257 5258 5387 +f 5257 5387 5386 +f 5258 5259 5387 +f 5259 5388 5387 +f 5259 5260 5389 +f 5259 5389 5388 +f 5260 5261 5389 +f 5261 5390 5389 +f 5261 5262 5391 +f 5261 5391 5390 +f 5262 5263 5391 +f 5263 5392 5391 +f 5263 5264 5393 +f 5263 5393 5392 +f 5264 5265 5393 +f 5265 5394 5393 +f 5265 5266 5395 +f 5265 5395 5394 +f 5266 5267 5395 +f 5267 5396 5395 +f 5267 5268 5397 +f 5267 5397 5396 +f 5268 5269 5397 +f 5269 5398 5397 +f 5269 5270 5399 +f 5269 5399 5398 +f 5270 5271 5399 +f 5271 5400 5399 +f 5271 5272 5401 +f 5271 5401 5400 +f 5272 5273 5401 +f 5273 5402 5401 +f 5273 5274 5403 +f 5273 5403 5402 +f 5274 5275 5403 +f 5275 5404 5403 +f 5275 5276 5405 +f 5275 5405 5404 +f 5276 5277 5405 +f 5277 5406 5405 +f 5277 5278 5407 +f 5277 5407 5406 +f 5278 5279 5407 +f 5279 5408 5407 +f 5279 5280 5409 +f 5279 5409 5408 +f 5280 5281 5409 +f 5281 5410 5409 +f 5281 5282 5411 +f 5281 5411 5410 +f 5282 5283 5411 +f 5283 5412 5411 +f 5283 5284 5413 +f 5283 5413 5412 +f 5284 5285 5413 +f 5285 5414 5413 +f 5285 5286 5415 +f 5285 5415 5414 +f 5286 5287 5415 +f 5287 5416 5415 +f 5287 5288 5417 +f 5287 5417 5416 +f 5288 5289 5417 +f 5289 5418 5417 +f 5290 5291 5419 +f 5291 5420 5419 +f 5291 5292 5421 +f 5291 5421 5420 +f 5292 5293 5421 +f 5293 5422 5421 +f 5293 5294 5423 +f 5293 5423 5422 +f 5294 5295 5423 +f 5295 5424 5423 +f 5295 5296 5425 +f 5295 5425 5424 +f 5296 5297 5425 +f 5297 5426 5425 +f 5297 5298 5427 +f 5297 5427 5426 +f 5298 5299 5427 +f 5299 5428 5427 +f 5299 5300 5429 +f 5299 5429 5428 +f 5300 5301 5429 +f 5301 5430 5429 +f 5301 5302 5431 +f 5301 5431 5430 +f 5302 5303 5431 +f 5303 5432 5431 +f 5303 5304 5433 +f 5303 5433 5432 +f 5304 5305 5433 +f 5305 5434 5433 +f 5305 5306 5435 +f 5305 5435 5434 +f 5306 5307 5435 +f 5307 5436 5435 +f 5307 5308 5437 +f 5307 5437 5436 +f 5308 5309 5437 +f 5309 5438 5437 +f 5309 5310 5439 +f 5309 5439 5438 +f 5310 5311 5439 +f 5311 5440 5439 +f 5311 5312 5441 +f 5311 5441 5440 +f 5312 5313 5441 +f 5313 5442 5441 +f 5313 5314 5443 +f 5313 5443 5442 +f 5314 5315 5443 +f 5315 5444 5443 +f 5315 5316 5445 +f 5315 5445 5444 +f 5316 5317 5445 +f 5317 5446 5445 +f 5317 5318 5447 +f 5317 5447 5446 +f 5318 5319 5447 +f 5319 5448 5447 +f 5319 5320 5449 +f 5319 5449 5448 +f 5320 5321 5449 +f 5321 5450 5449 +f 5321 5322 5451 +f 5321 5451 5450 +f 5322 5323 5451 +f 5323 5452 5451 +f 5323 5324 5453 +f 5323 5453 5452 +f 5324 5325 5453 +f 5325 5454 5453 +f 5325 5326 5455 +f 5325 5455 5454 +f 5326 5327 5455 +f 5327 5456 5455 +f 5327 5328 5457 +f 5327 5457 5456 +f 5328 5329 5457 +f 5329 5458 5457 +f 5329 5330 5459 +f 5329 5459 5458 +f 5330 5331 5459 +f 5331 5460 5459 +f 5331 5332 5461 +f 5331 5461 5460 +f 5332 5333 5461 +f 5333 5462 5461 +f 5333 5334 5463 +f 5333 5463 5462 +f 5334 5335 5463 +f 5335 5464 5463 +f 5335 5336 5465 +f 5335 5465 5464 +f 5336 5337 5465 +f 5337 5466 5465 +f 5337 5338 5467 +f 5337 5467 5466 +f 5338 5339 5467 +f 5339 5468 5467 +f 5339 5340 5469 +f 5339 5469 5468 +f 5340 5341 5469 +f 5341 5470 5469 +f 5341 5342 5471 +f 5341 5471 5470 +f 5342 5343 5471 +f 5343 5472 5471 +f 5343 5344 5473 +f 5343 5473 5472 +f 5344 5345 5473 +f 5345 5474 5473 +f 5345 5346 5475 +f 5345 5475 5474 +f 5346 5347 5475 +f 5347 5476 5475 +f 5347 5348 5477 +f 5347 5477 5476 +f 5348 5349 5477 +f 5349 5478 5477 +f 5349 5350 5479 +f 5349 5479 5478 +f 5350 5351 5479 +f 5351 5480 5479 +f 5351 5352 5481 +f 5351 5481 5480 +f 5352 5353 5481 +f 5353 5482 5481 +f 5353 5354 5483 +f 5353 5483 5482 +f 5354 5355 5483 +f 5355 5484 5483 +f 5355 5356 5485 +f 5355 5485 5484 +f 5356 5357 5485 +f 5357 5486 5485 +f 5357 5358 5487 +f 5357 5487 5486 +f 5358 5359 5487 +f 5359 5488 5487 +f 5359 5360 5489 +f 5359 5489 5488 +f 5360 5361 5489 +f 5361 5490 5489 +f 5361 5362 5491 +f 5361 5491 5490 +f 5362 5363 5491 +f 5363 5492 5491 +f 5363 5364 5493 +f 5363 5493 5492 +f 5364 5365 5493 +f 5365 5494 5493 +f 5365 5366 5495 +f 5365 5495 5494 +f 5366 5367 5495 +f 5367 5496 5495 +f 5367 5368 5497 +f 5367 5497 5496 +f 5368 5369 5497 +f 5369 5498 5497 +f 5369 5370 5499 +f 5369 5499 5498 +f 5370 5371 5499 +f 5371 5500 5499 +f 5371 5372 5501 +f 5371 5501 5500 +f 5372 5373 5501 +f 5373 5502 5501 +f 5373 5374 5503 +f 5373 5503 5502 +f 5374 5375 5503 +f 5375 5504 5503 +f 5375 5376 5505 +f 5375 5505 5504 +f 5376 5377 5505 +f 5377 5506 5505 +f 5377 5378 5507 +f 5377 5507 5506 +f 5378 5379 5507 +f 5379 5508 5507 +f 5379 5380 5509 +f 5379 5509 5508 +f 5380 5381 5509 +f 5381 5510 5509 +f 5381 5382 5511 +f 5381 5511 5510 +f 5382 5383 5511 +f 5383 5512 5511 +f 5383 5384 5513 +f 5383 5513 5512 +f 5384 5385 5513 +f 5385 5514 5513 +f 5385 5386 5515 +f 5385 5515 5514 +f 5386 5387 5515 +f 5387 5516 5515 +f 5387 5388 5517 +f 5387 5517 5516 +f 5388 5389 5517 +f 5389 5518 5517 +f 5389 5390 5519 +f 5389 5519 5518 +f 5390 5391 5519 +f 5391 5520 5519 +f 5391 5392 5521 +f 5391 5521 5520 +f 5392 5393 5521 +f 5393 5522 5521 +f 5393 5394 5523 +f 5393 5523 5522 +f 5394 5395 5523 +f 5395 5524 5523 +f 5395 5396 5525 +f 5395 5525 5524 +f 5396 5397 5525 +f 5397 5526 5525 +f 5397 5398 5527 +f 5397 5527 5526 +f 5398 5399 5527 +f 5399 5528 5527 +f 5399 5400 5529 +f 5399 5529 5528 +f 5400 5401 5529 +f 5401 5530 5529 +f 5401 5402 5531 +f 5401 5531 5530 +f 5402 5403 5531 +f 5403 5532 5531 +f 5403 5404 5533 +f 5403 5533 5532 +f 5404 5405 5533 +f 5405 5534 5533 +f 5405 5406 5535 +f 5405 5535 5534 +f 5406 5407 5535 +f 5407 5536 5535 +f 5407 5408 5537 +f 5407 5537 5536 +f 5408 5409 5537 +f 5409 5538 5537 +f 5409 5410 5539 +f 5409 5539 5538 +f 5410 5411 5539 +f 5411 5540 5539 +f 5411 5412 5541 +f 5411 5541 5540 +f 5412 5413 5541 +f 5413 5542 5541 +f 5413 5414 5543 +f 5413 5543 5542 +f 5414 5415 5543 +f 5415 5544 5543 +f 5415 5416 5545 +f 5415 5545 5544 +f 5416 5417 5545 +f 5417 5546 5545 +f 5417 5418 5547 +f 5417 5547 5546 +f 5419 5420 5549 +f 5419 5549 5548 +f 5420 5421 5549 +f 5421 5550 5549 +f 5421 5422 5551 +f 5421 5551 5550 +f 5422 5423 5551 +f 5423 5552 5551 +f 5423 5424 5553 +f 5423 5553 5552 +f 5424 5425 5553 +f 5425 5554 5553 +f 5425 5426 5555 +f 5425 5555 5554 +f 5426 5427 5555 +f 5427 5556 5555 +f 5427 5428 5557 +f 5427 5557 5556 +f 5428 5429 5557 +f 5429 5558 5557 +f 5429 5430 5559 +f 5429 5559 5558 +f 5430 5431 5559 +f 5431 5560 5559 +f 5431 5432 5561 +f 5431 5561 5560 +f 5432 5433 5561 +f 5433 5562 5561 +f 5433 5434 5563 +f 5433 5563 5562 +f 5434 5435 5563 +f 5435 5564 5563 +f 5435 5436 5565 +f 5435 5565 5564 +f 5436 5437 5565 +f 5437 5566 5565 +f 5437 5438 5567 +f 5437 5567 5566 +f 5438 5439 5567 +f 5439 5568 5567 +f 5439 5440 5569 +f 5439 5569 5568 +f 5440 5441 5569 +f 5441 5570 5569 +f 5441 5442 5571 +f 5441 5571 5570 +f 5442 5443 5571 +f 5443 5572 5571 +f 5443 5444 5573 +f 5443 5573 5572 +f 5444 5445 5573 +f 5445 5574 5573 +f 5445 5446 5575 +f 5445 5575 5574 +f 5446 5447 5575 +f 5447 5576 5575 +f 5447 5448 5577 +f 5447 5577 5576 +f 5448 5449 5577 +f 5449 5578 5577 +f 5449 5450 5579 +f 5449 5579 5578 +f 5450 5451 5579 +f 5451 5580 5579 +f 5451 5452 5581 +f 5451 5581 5580 +f 5452 5453 5581 +f 5453 5582 5581 +f 5453 5454 5583 +f 5453 5583 5582 +f 5454 5455 5583 +f 5455 5584 5583 +f 5455 5456 5585 +f 5455 5585 5584 +f 5456 5457 5585 +f 5457 5586 5585 +f 5457 5458 5587 +f 5457 5587 5586 +f 5458 5459 5587 +f 5459 5588 5587 +f 5459 5460 5589 +f 5459 5589 5588 +f 5460 5461 5589 +f 5461 5590 5589 +f 5461 5462 5591 +f 5461 5591 5590 +f 5462 5463 5591 +f 5463 5592 5591 +f 5463 5464 5593 +f 5463 5593 5592 +f 5464 5465 5593 +f 5465 5594 5593 +f 5465 5466 5595 +f 5465 5595 5594 +f 5466 5467 5595 +f 5467 5596 5595 +f 5467 5468 5597 +f 5467 5597 5596 +f 5468 5469 5597 +f 5469 5598 5597 +f 5469 5470 5599 +f 5469 5599 5598 +f 5470 5471 5599 +f 5471 5600 5599 +f 5471 5472 5601 +f 5471 5601 5600 +f 5472 5473 5601 +f 5473 5602 5601 +f 5473 5474 5603 +f 5473 5603 5602 +f 5474 5475 5603 +f 5475 5604 5603 +f 5475 5476 5605 +f 5475 5605 5604 +f 5476 5477 5605 +f 5477 5606 5605 +f 5477 5478 5607 +f 5477 5607 5606 +f 5478 5479 5607 +f 5479 5608 5607 +f 5479 5480 5609 +f 5479 5609 5608 +f 5480 5481 5609 +f 5481 5610 5609 +f 5481 5482 5611 +f 5481 5611 5610 +f 5482 5483 5611 +f 5483 5612 5611 +f 5483 5484 5613 +f 5483 5613 5612 +f 5484 5485 5613 +f 5485 5614 5613 +f 5485 5486 5615 +f 5485 5615 5614 +f 5486 5487 5615 +f 5487 5616 5615 +f 5487 5488 5617 +f 5487 5617 5616 +f 5488 5489 5617 +f 5489 5618 5617 +f 5489 5490 5619 +f 5489 5619 5618 +f 5490 5491 5619 +f 5491 5620 5619 +f 5491 5492 5621 +f 5491 5621 5620 +f 5492 5493 5621 +f 5493 5622 5621 +f 5493 5494 5623 +f 5493 5623 5622 +f 5494 5495 5623 +f 5495 5624 5623 +f 5495 5496 5625 +f 5495 5625 5624 +f 5496 5497 5625 +f 5497 5626 5625 +f 5497 5498 5627 +f 5497 5627 5626 +f 5498 5499 5627 +f 5499 5628 5627 +f 5499 5500 5629 +f 5499 5629 5628 +f 5500 5501 5629 +f 5501 5630 5629 +f 5501 5502 5631 +f 5501 5631 5630 +f 5502 5503 5631 +f 5503 5632 5631 +f 5503 5504 5633 +f 5503 5633 5632 +f 5504 5505 5633 +f 5505 5634 5633 +f 5505 5506 5635 +f 5505 5635 5634 +f 5506 5507 5635 +f 5507 5636 5635 +f 5507 5508 5637 +f 5507 5637 5636 +f 5508 5509 5637 +f 5509 5638 5637 +f 5509 5510 5639 +f 5509 5639 5638 +f 5510 5511 5639 +f 5511 5640 5639 +f 5511 5512 5641 +f 5511 5641 5640 +f 5512 5513 5641 +f 5513 5642 5641 +f 5513 5514 5643 +f 5513 5643 5642 +f 5514 5515 5643 +f 5515 5644 5643 +f 5515 5516 5645 +f 5515 5645 5644 +f 5516 5517 5645 +f 5517 5646 5645 +f 5517 5518 5647 +f 5517 5647 5646 +f 5518 5519 5647 +f 5519 5648 5647 +f 5519 5520 5649 +f 5519 5649 5648 +f 5520 5521 5649 +f 5521 5650 5649 +f 5521 5522 5651 +f 5521 5651 5650 +f 5522 5523 5651 +f 5523 5652 5651 +f 5523 5524 5653 +f 5523 5653 5652 +f 5524 5525 5653 +f 5525 5654 5653 +f 5525 5526 5655 +f 5525 5655 5654 +f 5526 5527 5655 +f 5527 5656 5655 +f 5527 5528 5657 +f 5527 5657 5656 +f 5528 5529 5657 +f 5529 5658 5657 +f 5529 5530 5659 +f 5529 5659 5658 +f 5530 5531 5659 +f 5531 5660 5659 +f 5531 5532 5661 +f 5531 5661 5660 +f 5532 5533 5661 +f 5533 5662 5661 +f 5533 5534 5663 +f 5533 5663 5662 +f 5534 5535 5663 +f 5535 5664 5663 +f 5535 5536 5665 +f 5535 5665 5664 +f 5536 5537 5665 +f 5537 5666 5665 +f 5537 5538 5667 +f 5537 5667 5666 +f 5538 5539 5667 +f 5539 5668 5667 +f 5539 5540 5669 +f 5539 5669 5668 +f 5540 5541 5669 +f 5541 5670 5669 +f 5541 5542 5671 +f 5541 5671 5670 +f 5542 5543 5671 +f 5543 5672 5671 +f 5543 5544 5673 +f 5543 5673 5672 +f 5544 5545 5673 +f 5545 5674 5673 +f 5545 5546 5675 +f 5545 5675 5674 +f 5546 5547 5675 +f 5547 5676 5675 +f 5548 5549 5677 +f 5549 5678 5677 +f 5549 5550 5679 +f 5549 5679 5678 +f 5550 5551 5679 +f 5551 5680 5679 +f 5551 5552 5681 +f 5551 5681 5680 +f 5552 5553 5681 +f 5553 5682 5681 +f 5553 5554 5683 +f 5553 5683 5682 +f 5554 5555 5683 +f 5555 5684 5683 +f 5555 5556 5685 +f 5555 5685 5684 +f 5556 5557 5685 +f 5557 5686 5685 +f 5557 5558 5687 +f 5557 5687 5686 +f 5558 5559 5687 +f 5559 5688 5687 +f 5559 5560 5689 +f 5559 5689 5688 +f 5560 5561 5689 +f 5561 5690 5689 +f 5561 5562 5691 +f 5561 5691 5690 +f 5562 5563 5691 +f 5563 5692 5691 +f 5563 5564 5693 +f 5563 5693 5692 +f 5564 5565 5693 +f 5565 5694 5693 +f 5565 5566 5695 +f 5565 5695 5694 +f 5566 5567 5695 +f 5567 5696 5695 +f 5567 5568 5697 +f 5567 5697 5696 +f 5568 5569 5697 +f 5569 5698 5697 +f 5569 5570 5699 +f 5569 5699 5698 +f 5570 5571 5699 +f 5571 5700 5699 +f 5571 5572 5701 +f 5571 5701 5700 +f 5572 5573 5701 +f 5573 5702 5701 +f 5573 5574 5703 +f 5573 5703 5702 +f 5574 5575 5703 +f 5575 5704 5703 +f 5575 5576 5705 +f 5575 5705 5704 +f 5576 5577 5705 +f 5577 5706 5705 +f 5577 5578 5707 +f 5577 5707 5706 +f 5578 5579 5707 +f 5579 5708 5707 +f 5579 5580 5709 +f 5579 5709 5708 +f 5580 5581 5709 +f 5581 5710 5709 +f 5581 5582 5711 +f 5581 5711 5710 +f 5582 5583 5711 +f 5583 5712 5711 +f 5583 5584 5713 +f 5583 5713 5712 +f 5584 5585 5713 +f 5585 5714 5713 +f 5585 5586 5715 +f 5585 5715 5714 +f 5586 5587 5715 +f 5587 5716 5715 +f 5587 5588 5717 +f 5587 5717 5716 +f 5588 5589 5717 +f 5589 5718 5717 +f 5589 5590 5719 +f 5589 5719 5718 +f 5590 5591 5719 +f 5591 5720 5719 +f 5591 5592 5721 +f 5591 5721 5720 +f 5592 5593 5721 +f 5593 5722 5721 +f 5593 5594 5723 +f 5593 5723 5722 +f 5594 5595 5723 +f 5595 5724 5723 +f 5595 5596 5725 +f 5595 5725 5724 +f 5596 5597 5725 +f 5597 5726 5725 +f 5597 5598 5727 +f 5597 5727 5726 +f 5598 5599 5727 +f 5599 5728 5727 +f 5599 5600 5729 +f 5599 5729 5728 +f 5600 5601 5729 +f 5601 5730 5729 +f 5601 5602 5731 +f 5601 5731 5730 +f 5602 5603 5731 +f 5603 5732 5731 +f 5603 5604 5733 +f 5603 5733 5732 +f 5604 5605 5733 +f 5605 5734 5733 +f 5605 5606 5735 +f 5605 5735 5734 +f 5606 5607 5735 +f 5607 5736 5735 +f 5607 5608 5737 +f 5607 5737 5736 +f 5608 5609 5737 +f 5609 5738 5737 +f 5609 5610 5739 +f 5609 5739 5738 +f 5610 5611 5739 +f 5611 5740 5739 +f 5611 5612 5741 +f 5611 5741 5740 +f 5612 5613 5741 +f 5613 5742 5741 +f 5613 5614 5743 +f 5613 5743 5742 +f 5614 5615 5743 +f 5615 5744 5743 +f 5615 5616 5745 +f 5615 5745 5744 +f 5616 5617 5745 +f 5617 5746 5745 +f 5617 5618 5747 +f 5617 5747 5746 +f 5618 5619 5747 +f 5619 5748 5747 +f 5619 5620 5749 +f 5619 5749 5748 +f 5620 5621 5749 +f 5621 5750 5749 +f 5621 5622 5751 +f 5621 5751 5750 +f 5622 5623 5751 +f 5623 5752 5751 +f 5623 5624 5753 +f 5623 5753 5752 +f 5624 5625 5753 +f 5625 5754 5753 +f 5625 5626 5755 +f 5625 5755 5754 +f 5626 5627 5755 +f 5627 5756 5755 +f 5627 5628 5757 +f 5627 5757 5756 +f 5628 5629 5757 +f 5629 5758 5757 +f 5629 5630 5759 +f 5629 5759 5758 +f 5630 5631 5759 +f 5631 5760 5759 +f 5631 5632 5761 +f 5631 5761 5760 +f 5632 5633 5761 +f 5633 5762 5761 +f 5633 5634 5763 +f 5633 5763 5762 +f 5634 5635 5763 +f 5635 5764 5763 +f 5635 5636 5765 +f 5635 5765 5764 +f 5636 5637 5765 +f 5637 5766 5765 +f 5637 5638 5767 +f 5637 5767 5766 +f 5638 5639 5767 +f 5639 5768 5767 +f 5639 5640 5769 +f 5639 5769 5768 +f 5640 5641 5769 +f 5641 5770 5769 +f 5641 5642 5771 +f 5641 5771 5770 +f 5642 5643 5771 +f 5643 5772 5771 +f 5643 5644 5773 +f 5643 5773 5772 +f 5644 5645 5773 +f 5645 5774 5773 +f 5645 5646 5775 +f 5645 5775 5774 +f 5646 5647 5775 +f 5647 5776 5775 +f 5647 5648 5777 +f 5647 5777 5776 +f 5648 5649 5777 +f 5649 5778 5777 +f 5649 5650 5779 +f 5649 5779 5778 +f 5650 5651 5779 +f 5651 5780 5779 +f 5651 5652 5781 +f 5651 5781 5780 +f 5652 5653 5781 +f 5653 5782 5781 +f 5653 5654 5783 +f 5653 5783 5782 +f 5654 5655 5783 +f 5655 5784 5783 +f 5655 5656 5785 +f 5655 5785 5784 +f 5656 5657 5785 +f 5657 5786 5785 +f 5657 5658 5787 +f 5657 5787 5786 +f 5658 5659 5787 +f 5659 5788 5787 +f 5659 5660 5789 +f 5659 5789 5788 +f 5660 5661 5789 +f 5661 5790 5789 +f 5661 5662 5791 +f 5661 5791 5790 +f 5662 5663 5791 +f 5663 5792 5791 +f 5663 5664 5793 +f 5663 5793 5792 +f 5664 5665 5793 +f 5665 5794 5793 +f 5665 5666 5795 +f 5665 5795 5794 +f 5666 5667 5795 +f 5667 5796 5795 +f 5667 5668 5797 +f 5667 5797 5796 +f 5668 5669 5797 +f 5669 5798 5797 +f 5669 5670 5799 +f 5669 5799 5798 +f 5670 5671 5799 +f 5671 5800 5799 +f 5671 5672 5801 +f 5671 5801 5800 +f 5672 5673 5801 +f 5673 5802 5801 +f 5673 5674 5803 +f 5673 5803 5802 +f 5674 5675 5803 +f 5675 5804 5803 +f 5675 5676 5805 +f 5675 5805 5804 +f 5677 5678 5807 +f 5677 5807 5806 +f 5678 5679 5807 +f 5679 5808 5807 +f 5679 5680 5809 +f 5679 5809 5808 +f 5680 5681 5809 +f 5681 5810 5809 +f 5681 5682 5811 +f 5681 5811 5810 +f 5682 5683 5811 +f 5683 5812 5811 +f 5683 5684 5813 +f 5683 5813 5812 +f 5684 5685 5813 +f 5685 5814 5813 +f 5685 5686 5815 +f 5685 5815 5814 +f 5686 5687 5815 +f 5687 5816 5815 +f 5687 5688 5817 +f 5687 5817 5816 +f 5688 5689 5817 +f 5689 5818 5817 +f 5689 5690 5819 +f 5689 5819 5818 +f 5690 5691 5819 +f 5691 5820 5819 +f 5691 5692 5821 +f 5691 5821 5820 +f 5692 5693 5821 +f 5693 5822 5821 +f 5693 5694 5823 +f 5693 5823 5822 +f 5694 5695 5823 +f 5695 5824 5823 +f 5695 5696 5825 +f 5695 5825 5824 +f 5696 5697 5825 +f 5697 5826 5825 +f 5697 5698 5827 +f 5697 5827 5826 +f 5698 5699 5827 +f 5699 5828 5827 +f 5699 5700 5829 +f 5699 5829 5828 +f 5700 5701 5829 +f 5701 5830 5829 +f 5701 5702 5831 +f 5701 5831 5830 +f 5702 5703 5831 +f 5703 5832 5831 +f 5703 5704 5833 +f 5703 5833 5832 +f 5704 5705 5833 +f 5705 5834 5833 +f 5705 5706 5835 +f 5705 5835 5834 +f 5706 5707 5835 +f 5707 5836 5835 +f 5707 5708 5837 +f 5707 5837 5836 +f 5708 5709 5837 +f 5709 5838 5837 +f 5709 5710 5839 +f 5709 5839 5838 +f 5710 5711 5839 +f 5711 5840 5839 +f 5711 5712 5841 +f 5711 5841 5840 +f 5712 5713 5841 +f 5713 5842 5841 +f 5713 5714 5843 +f 5713 5843 5842 +f 5714 5715 5843 +f 5715 5844 5843 +f 5715 5716 5845 +f 5715 5845 5844 +f 5716 5717 5845 +f 5717 5846 5845 +f 5717 5718 5847 +f 5717 5847 5846 +f 5718 5719 5847 +f 5719 5848 5847 +f 5719 5720 5849 +f 5719 5849 5848 +f 5720 5721 5849 +f 5721 5850 5849 +f 5721 5722 5851 +f 5721 5851 5850 +f 5722 5723 5851 +f 5723 5852 5851 +f 5723 5724 5853 +f 5723 5853 5852 +f 5724 5725 5853 +f 5725 5854 5853 +f 5725 5726 5855 +f 5725 5855 5854 +f 5726 5727 5855 +f 5727 5856 5855 +f 5727 5728 5857 +f 5727 5857 5856 +f 5728 5729 5857 +f 5729 5858 5857 +f 5729 5730 5859 +f 5729 5859 5858 +f 5730 5731 5859 +f 5731 5860 5859 +f 5731 5732 5861 +f 5731 5861 5860 +f 5732 5733 5861 +f 5733 5862 5861 +f 5733 5734 5863 +f 5733 5863 5862 +f 5734 5735 5863 +f 5735 5864 5863 +f 5735 5736 5865 +f 5735 5865 5864 +f 5736 5737 5865 +f 5737 5866 5865 +f 5737 5738 5867 +f 5737 5867 5866 +f 5738 5739 5867 +f 5739 5868 5867 +f 5739 5740 5869 +f 5739 5869 5868 +f 5740 5741 5869 +f 5741 5870 5869 +f 5741 5742 5871 +f 5741 5871 5870 +f 5742 5743 5871 +f 5743 5872 5871 +f 5743 5744 5873 +f 5743 5873 5872 +f 5744 5745 5873 +f 5745 5874 5873 +f 5745 5746 5875 +f 5745 5875 5874 +f 5746 5747 5875 +f 5747 5876 5875 +f 5747 5748 5877 +f 5747 5877 5876 +f 5748 5749 5877 +f 5749 5878 5877 +f 5749 5750 5879 +f 5749 5879 5878 +f 5750 5751 5879 +f 5751 5880 5879 +f 5751 5752 5881 +f 5751 5881 5880 +f 5752 5753 5881 +f 5753 5882 5881 +f 5753 5754 5883 +f 5753 5883 5882 +f 5754 5755 5883 +f 5755 5884 5883 +f 5755 5756 5885 +f 5755 5885 5884 +f 5756 5757 5885 +f 5757 5886 5885 +f 5757 5758 5887 +f 5757 5887 5886 +f 5758 5759 5887 +f 5759 5888 5887 +f 5759 5760 5889 +f 5759 5889 5888 +f 5760 5761 5889 +f 5761 5890 5889 +f 5761 5762 5891 +f 5761 5891 5890 +f 5762 5763 5891 +f 5763 5892 5891 +f 5763 5764 5893 +f 5763 5893 5892 +f 5764 5765 5893 +f 5765 5894 5893 +f 5765 5766 5895 +f 5765 5895 5894 +f 5766 5767 5895 +f 5767 5896 5895 +f 5767 5768 5897 +f 5767 5897 5896 +f 5768 5769 5897 +f 5769 5898 5897 +f 5769 5770 5899 +f 5769 5899 5898 +f 5770 5771 5899 +f 5771 5900 5899 +f 5771 5772 5901 +f 5771 5901 5900 +f 5772 5773 5901 +f 5773 5902 5901 +f 5773 5774 5903 +f 5773 5903 5902 +f 5774 5775 5903 +f 5775 5904 5903 +f 5775 5776 5905 +f 5775 5905 5904 +f 5776 5777 5905 +f 5777 5906 5905 +f 5777 5778 5907 +f 5777 5907 5906 +f 5778 5779 5907 +f 5779 5908 5907 +f 5779 5780 5909 +f 5779 5909 5908 +f 5780 5781 5909 +f 5781 5910 5909 +f 5781 5782 5911 +f 5781 5911 5910 +f 5782 5783 5911 +f 5783 5912 5911 +f 5783 5784 5913 +f 5783 5913 5912 +f 5784 5785 5913 +f 5785 5914 5913 +f 5785 5786 5915 +f 5785 5915 5914 +f 5786 5787 5915 +f 5787 5916 5915 +f 5787 5788 5917 +f 5787 5917 5916 +f 5788 5789 5917 +f 5789 5918 5917 +f 5789 5790 5919 +f 5789 5919 5918 +f 5790 5791 5919 +f 5791 5920 5919 +f 5791 5792 5921 +f 5791 5921 5920 +f 5792 5793 5921 +f 5793 5922 5921 +f 5793 5794 5923 +f 5793 5923 5922 +f 5794 5795 5923 +f 5795 5924 5923 +f 5795 5796 5925 +f 5795 5925 5924 +f 5796 5797 5925 +f 5797 5926 5925 +f 5797 5798 5927 +f 5797 5927 5926 +f 5798 5799 5927 +f 5799 5928 5927 +f 5799 5800 5929 +f 5799 5929 5928 +f 5800 5801 5929 +f 5801 5930 5929 +f 5801 5802 5931 +f 5801 5931 5930 +f 5802 5803 5931 +f 5803 5932 5931 +f 5803 5804 5933 +f 5803 5933 5932 +f 5804 5805 5933 +f 5805 5934 5933 +f 5806 5807 5935 +f 5807 5936 5935 +f 5807 5808 5937 +f 5807 5937 5936 +f 5808 5809 5937 +f 5809 5938 5937 +f 5809 5810 5939 +f 5809 5939 5938 +f 5810 5811 5939 +f 5811 5940 5939 +f 5811 5812 5941 +f 5811 5941 5940 +f 5812 5813 5941 +f 5813 5942 5941 +f 5813 5814 5943 +f 5813 5943 5942 +f 5814 5815 5943 +f 5815 5944 5943 +f 5815 5816 5945 +f 5815 5945 5944 +f 5816 5817 5945 +f 5817 5946 5945 +f 5817 5818 5947 +f 5817 5947 5946 +f 5818 5819 5947 +f 5819 5948 5947 +f 5819 5820 5949 +f 5819 5949 5948 +f 5820 5821 5949 +f 5821 5950 5949 +f 5821 5822 5951 +f 5821 5951 5950 +f 5822 5823 5951 +f 5823 5952 5951 +f 5823 5824 5953 +f 5823 5953 5952 +f 5824 5825 5953 +f 5825 5954 5953 +f 5825 5826 5955 +f 5825 5955 5954 +f 5826 5827 5955 +f 5827 5956 5955 +f 5827 5828 5957 +f 5827 5957 5956 +f 5828 5829 5957 +f 5829 5958 5957 +f 5829 5830 5959 +f 5829 5959 5958 +f 5830 5831 5959 +f 5831 5960 5959 +f 5831 5832 5961 +f 5831 5961 5960 +f 5832 5833 5961 +f 5833 5962 5961 +f 5833 5834 5963 +f 5833 5963 5962 +f 5834 5835 5963 +f 5835 5964 5963 +f 5835 5836 5965 +f 5835 5965 5964 +f 5836 5837 5965 +f 5837 5966 5965 +f 5837 5838 5967 +f 5837 5967 5966 +f 5838 5839 5967 +f 5839 5968 5967 +f 5839 5840 5969 +f 5839 5969 5968 +f 5840 5841 5969 +f 5841 5970 5969 +f 5841 5842 5971 +f 5841 5971 5970 +f 5842 5843 5971 +f 5843 5972 5971 +f 5843 5844 5973 +f 5843 5973 5972 +f 5844 5845 5973 +f 5845 5974 5973 +f 5845 5846 5975 +f 5845 5975 5974 +f 5846 5847 5975 +f 5847 5976 5975 +f 5847 5848 5977 +f 5847 5977 5976 +f 5848 5849 5977 +f 5849 5978 5977 +f 5849 5850 5979 +f 5849 5979 5978 +f 5850 5851 5979 +f 5851 5980 5979 +f 5851 5852 5981 +f 5851 5981 5980 +f 5852 5853 5981 +f 5853 5982 5981 +f 5853 5854 5983 +f 5853 5983 5982 +f 5854 5855 5983 +f 5855 5984 5983 +f 5855 5856 5985 +f 5855 5985 5984 +f 5856 5857 5985 +f 5857 5986 5985 +f 5857 5858 5987 +f 5857 5987 5986 +f 5858 5859 5987 +f 5859 5988 5987 +f 5859 5860 5989 +f 5859 5989 5988 +f 5860 5861 5989 +f 5861 5990 5989 +f 5861 5862 5991 +f 5861 5991 5990 +f 5862 5863 5991 +f 5863 5992 5991 +f 5863 5864 5993 +f 5863 5993 5992 +f 5864 5865 5993 +f 5865 5994 5993 +f 5865 5866 5995 +f 5865 5995 5994 +f 5866 5867 5995 +f 5867 5996 5995 +f 5867 5868 5997 +f 5867 5997 5996 +f 5868 5869 5997 +f 5869 5998 5997 +f 5869 5870 5999 +f 5869 5999 5998 +f 5870 5871 5999 +f 5871 6000 5999 +f 5871 5872 6001 +f 5871 6001 6000 +f 5872 5873 6001 +f 5873 6002 6001 +f 5873 5874 6003 +f 5873 6003 6002 +f 5874 5875 6003 +f 5875 6004 6003 +f 5875 5876 6005 +f 5875 6005 6004 +f 5876 5877 6005 +f 5877 6006 6005 +f 5877 5878 6007 +f 5877 6007 6006 +f 5878 5879 6007 +f 5879 6008 6007 +f 5879 5880 6009 +f 5879 6009 6008 +f 5880 5881 6009 +f 5881 6010 6009 +f 5881 5882 6011 +f 5881 6011 6010 +f 5882 5883 6011 +f 5883 6012 6011 +f 5883 5884 6013 +f 5883 6013 6012 +f 5884 5885 6013 +f 5885 6014 6013 +f 5885 5886 6015 +f 5885 6015 6014 +f 5886 5887 6015 +f 5887 6016 6015 +f 5887 5888 6017 +f 5887 6017 6016 +f 5888 5889 6017 +f 5889 6018 6017 +f 5889 5890 6019 +f 5889 6019 6018 +f 5890 5891 6019 +f 5891 6020 6019 +f 5891 5892 6021 +f 5891 6021 6020 +f 5892 5893 6021 +f 5893 6022 6021 +f 5893 5894 6023 +f 5893 6023 6022 +f 5894 5895 6023 +f 5895 6024 6023 +f 5895 5896 6025 +f 5895 6025 6024 +f 5896 5897 6025 +f 5897 6026 6025 +f 5897 5898 6027 +f 5897 6027 6026 +f 5898 5899 6027 +f 5899 6028 6027 +f 5899 5900 6029 +f 5899 6029 6028 +f 5900 5901 6029 +f 5901 6030 6029 +f 5901 5902 6031 +f 5901 6031 6030 +f 5902 5903 6031 +f 5903 6032 6031 +f 5903 5904 6033 +f 5903 6033 6032 +f 5904 5905 6033 +f 5905 6034 6033 +f 5905 5906 6035 +f 5905 6035 6034 +f 5906 5907 6035 +f 5907 6036 6035 +f 5907 5908 6037 +f 5907 6037 6036 +f 5908 5909 6037 +f 5909 6038 6037 +f 5909 5910 6039 +f 5909 6039 6038 +f 5910 5911 6039 +f 5911 6040 6039 +f 5911 5912 6041 +f 5911 6041 6040 +f 5912 5913 6041 +f 5913 6042 6041 +f 5913 5914 6043 +f 5913 6043 6042 +f 5914 5915 6043 +f 5915 6044 6043 +f 5915 5916 6045 +f 5915 6045 6044 +f 5916 5917 6045 +f 5917 6046 6045 +f 5917 5918 6047 +f 5917 6047 6046 +f 5918 5919 6047 +f 5919 6048 6047 +f 5919 5920 6049 +f 5919 6049 6048 +f 5920 5921 6049 +f 5921 6050 6049 +f 5921 5922 6051 +f 5921 6051 6050 +f 5922 5923 6051 +f 5923 6052 6051 +f 5923 5924 6053 +f 5923 6053 6052 +f 5924 5925 6053 +f 5925 6054 6053 +f 5925 5926 6055 +f 5925 6055 6054 +f 5926 5927 6055 +f 5927 6056 6055 +f 5927 5928 6057 +f 5927 6057 6056 +f 5928 5929 6057 +f 5929 6058 6057 +f 5929 5930 6059 +f 5929 6059 6058 +f 5930 5931 6059 +f 5931 6060 6059 +f 5931 5932 6061 +f 5931 6061 6060 +f 5932 5933 6061 +f 5933 6062 6061 +f 5933 5934 6063 +f 5933 6063 6062 +f 5935 5936 6065 +f 5935 6065 6064 +f 5936 5937 6065 +f 5937 6066 6065 +f 5937 5938 6067 +f 5937 6067 6066 +f 5938 5939 6067 +f 5939 6068 6067 +f 5939 5940 6069 +f 5939 6069 6068 +f 5940 5941 6069 +f 5941 6070 6069 +f 5941 5942 6071 +f 5941 6071 6070 +f 5942 5943 6071 +f 5943 6072 6071 +f 5943 5944 6073 +f 5943 6073 6072 +f 5944 5945 6073 +f 5945 6074 6073 +f 5945 5946 6075 +f 5945 6075 6074 +f 5946 5947 6075 +f 5947 6076 6075 +f 5947 5948 6077 +f 5947 6077 6076 +f 5948 5949 6077 +f 5949 6078 6077 +f 5949 5950 6079 +f 5949 6079 6078 +f 5950 5951 6079 +f 5951 6080 6079 +f 5951 5952 6081 +f 5951 6081 6080 +f 5952 5953 6081 +f 5953 6082 6081 +f 5953 5954 6083 +f 5953 6083 6082 +f 5954 5955 6083 +f 5955 6084 6083 +f 5955 5956 6085 +f 5955 6085 6084 +f 5956 5957 6085 +f 5957 6086 6085 +f 5957 5958 6087 +f 5957 6087 6086 +f 5958 5959 6087 +f 5959 6088 6087 +f 5959 5960 6089 +f 5959 6089 6088 +f 5960 5961 6089 +f 5961 6090 6089 +f 5961 5962 6091 +f 5961 6091 6090 +f 5962 5963 6091 +f 5963 6092 6091 +f 5963 5964 6093 +f 5963 6093 6092 +f 5964 5965 6093 +f 5965 6094 6093 +f 5965 5966 6095 +f 5965 6095 6094 +f 5966 5967 6095 +f 5967 6096 6095 +f 5967 5968 6097 +f 5967 6097 6096 +f 5968 5969 6097 +f 5969 6098 6097 +f 5969 5970 6099 +f 5969 6099 6098 +f 5970 5971 6099 +f 5971 6100 6099 +f 5971 5972 6101 +f 5971 6101 6100 +f 5972 5973 6101 +f 5973 6102 6101 +f 5973 5974 6103 +f 5973 6103 6102 +f 5974 5975 6103 +f 5975 6104 6103 +f 5975 5976 6105 +f 5975 6105 6104 +f 5976 5977 6105 +f 5977 6106 6105 +f 5977 5978 6107 +f 5977 6107 6106 +f 5978 5979 6107 +f 5979 6108 6107 +f 5979 5980 6109 +f 5979 6109 6108 +f 5980 5981 6109 +f 5981 6110 6109 +f 5981 5982 6111 +f 5981 6111 6110 +f 5982 5983 6111 +f 5983 6112 6111 +f 5983 5984 6113 +f 5983 6113 6112 +f 5984 5985 6113 +f 5985 6114 6113 +f 5985 5986 6115 +f 5985 6115 6114 +f 5986 5987 6115 +f 5987 6116 6115 +f 5987 5988 6117 +f 5987 6117 6116 +f 5988 5989 6117 +f 5989 6118 6117 +f 5989 5990 6119 +f 5989 6119 6118 +f 5990 5991 6119 +f 5991 6120 6119 +f 5991 5992 6121 +f 5991 6121 6120 +f 5992 5993 6121 +f 5993 6122 6121 +f 5993 5994 6123 +f 5993 6123 6122 +f 5994 5995 6123 +f 5995 6124 6123 +f 5995 5996 6125 +f 5995 6125 6124 +f 5996 5997 6125 +f 5997 6126 6125 +f 5997 5998 6127 +f 5997 6127 6126 +f 5998 5999 6127 +f 5999 6128 6127 +f 5999 6000 6129 +f 5999 6129 6128 +f 6000 6001 6129 +f 6001 6130 6129 +f 6001 6002 6131 +f 6001 6131 6130 +f 6002 6003 6131 +f 6003 6132 6131 +f 6003 6004 6133 +f 6003 6133 6132 +f 6004 6005 6133 +f 6005 6134 6133 +f 6005 6006 6135 +f 6005 6135 6134 +f 6006 6007 6135 +f 6007 6136 6135 +f 6007 6008 6137 +f 6007 6137 6136 +f 6008 6009 6137 +f 6009 6138 6137 +f 6009 6010 6139 +f 6009 6139 6138 +f 6010 6011 6139 +f 6011 6140 6139 +f 6011 6012 6141 +f 6011 6141 6140 +f 6012 6013 6141 +f 6013 6142 6141 +f 6013 6014 6143 +f 6013 6143 6142 +f 6014 6015 6143 +f 6015 6144 6143 +f 6015 6016 6145 +f 6015 6145 6144 +f 6016 6017 6145 +f 6017 6146 6145 +f 6017 6018 6147 +f 6017 6147 6146 +f 6018 6019 6147 +f 6019 6148 6147 +f 6019 6020 6149 +f 6019 6149 6148 +f 6020 6021 6149 +f 6021 6150 6149 +f 6021 6022 6151 +f 6021 6151 6150 +f 6022 6023 6151 +f 6023 6152 6151 +f 6023 6024 6153 +f 6023 6153 6152 +f 6024 6025 6153 +f 6025 6154 6153 +f 6025 6026 6155 +f 6025 6155 6154 +f 6026 6027 6155 +f 6027 6156 6155 +f 6027 6028 6157 +f 6027 6157 6156 +f 6028 6029 6157 +f 6029 6158 6157 +f 6029 6030 6159 +f 6029 6159 6158 +f 6030 6031 6159 +f 6031 6160 6159 +f 6031 6032 6161 +f 6031 6161 6160 +f 6032 6033 6161 +f 6033 6162 6161 +f 6033 6034 6163 +f 6033 6163 6162 +f 6034 6035 6163 +f 6035 6164 6163 +f 6035 6036 6165 +f 6035 6165 6164 +f 6036 6037 6165 +f 6037 6166 6165 +f 6037 6038 6167 +f 6037 6167 6166 +f 6038 6039 6167 +f 6039 6040 6168 +f 6040 6041 6168 +f 6041 6169 6168 +f 6041 6042 6170 +f 6041 6170 6169 +f 6042 6043 6170 +f 6043 6171 6170 +f 6043 6044 6172 +f 6043 6172 6171 +f 6044 6045 6172 +f 6045 6173 6172 +f 6045 6046 6174 +f 6045 6174 6173 +f 6046 6047 6174 +f 6047 6175 6174 +f 6047 6048 6176 +f 6047 6176 6175 +f 6048 6049 6176 +f 6049 6177 6176 +f 6049 6050 6178 +f 6049 6178 6177 +f 6050 6051 6178 +f 6051 6179 6178 +f 6051 6052 6180 +f 6051 6180 6179 +f 6052 6053 6180 +f 6053 6181 6180 +f 6053 6054 6182 +f 6053 6182 6181 +f 6054 6055 6182 +f 6055 6183 6182 +f 6055 6056 6184 +f 6055 6184 6183 +f 6056 6057 6184 +f 6057 6185 6184 +f 6057 6058 6186 +f 6057 6186 6185 +f 6058 6059 6186 +f 6059 6187 6186 +f 6059 6060 6188 +f 6059 6188 6187 +f 6060 6061 6188 +f 6061 6189 6188 +f 6061 6062 6190 +f 6061 6190 6189 +f 6062 6063 6190 +f 6063 6191 6190 +f 6064 6065 6192 +f 6065 6193 6192 +f 6065 6066 6194 +f 6065 6194 6193 +f 6066 6067 6194 +f 6067 6195 6194 +f 6067 6068 6196 +f 6067 6196 6195 +f 6068 6069 6196 +f 6069 6197 6196 +f 6069 6070 6198 +f 6069 6198 6197 +f 6070 6071 6198 +f 6071 6199 6198 +f 6071 6072 6200 +f 6071 6200 6199 +f 6072 6073 6200 +f 6073 6201 6200 +f 6073 6074 6202 +f 6073 6202 6201 +f 6074 6075 6202 +f 6075 6203 6202 +f 6075 6076 6204 +f 6075 6204 6203 +f 6076 6077 6204 +f 6077 6205 6204 +f 6077 6078 6206 +f 6077 6206 6205 +f 6078 6079 6206 +f 6079 6207 6206 +f 6079 6080 6208 +f 6079 6208 6207 +f 6080 6081 6208 +f 6081 6209 6208 +f 6081 6082 6210 +f 6081 6210 6209 +f 6082 6083 6210 +f 6083 6211 6210 +f 6083 6084 6212 +f 6083 6212 6211 +f 6084 6085 6212 +f 6085 6213 6212 +f 6085 6086 6214 +f 6085 6214 6213 +f 6086 6087 6214 +f 6087 6215 6214 +f 6087 6088 6216 +f 6087 6216 6215 +f 6088 6089 6216 +f 6089 6217 6216 +f 6089 6090 6218 +f 6089 6218 6217 +f 6090 6091 6218 +f 6091 6219 6218 +f 6091 6092 6220 +f 6091 6220 6219 +f 6092 6093 6220 +f 6093 6221 6220 +f 6093 6094 6222 +f 6093 6222 6221 +f 6094 6095 6222 +f 6095 6223 6222 +f 6095 6096 6224 +f 6095 6224 6223 +f 6096 6097 6224 +f 6097 6225 6224 +f 6097 6098 6226 +f 6097 6226 6225 +f 6098 6099 6226 +f 6099 6227 6226 +f 6099 6100 6228 +f 6099 6228 6227 +f 6100 6101 6228 +f 6101 6229 6228 +f 6101 6102 6230 +f 6101 6230 6229 +f 6102 6103 6230 +f 6103 6231 6230 +f 6103 6104 6232 +f 6103 6232 6231 +f 6104 6105 6232 +f 6105 6233 6232 +f 6105 6106 6234 +f 6105 6234 6233 +f 6106 6107 6234 +f 6107 6235 6234 +f 6107 6108 6236 +f 6107 6236 6235 +f 6108 6109 6236 +f 6109 6237 6236 +f 6109 6110 6238 +f 6109 6238 6237 +f 6110 6111 6238 +f 6111 6239 6238 +f 6111 6112 6240 +f 6111 6240 6239 +f 6112 6113 6240 +f 6113 6241 6240 +f 6113 6114 6242 +f 6113 6242 6241 +f 6114 6115 6242 +f 6115 6243 6242 +f 6115 6116 6244 +f 6115 6244 6243 +f 6116 6117 6244 +f 6117 6245 6244 +f 6117 6118 6246 +f 6117 6246 6245 +f 6118 6119 6246 +f 6119 6247 6246 +f 6119 6120 6248 +f 6119 6248 6247 +f 6120 6121 6248 +f 6121 6249 6248 +f 6121 6122 6250 +f 6121 6250 6249 +f 6122 6123 6250 +f 6123 6251 6250 +f 6123 6124 6252 +f 6123 6252 6251 +f 6124 6125 6252 +f 6125 6253 6252 +f 6125 6126 6254 +f 6125 6254 6253 +f 6126 6127 6254 +f 6127 6255 6254 +f 6127 6128 6256 +f 6127 6256 6255 +f 6128 6129 6256 +f 6129 6257 6256 +f 6129 6130 6258 +f 6129 6258 6257 +f 6130 6131 6258 +f 6131 6259 6258 +f 6131 6132 6260 +f 6131 6260 6259 +f 6132 6133 6260 +f 6133 6261 6260 +f 6133 6134 6262 +f 6133 6262 6261 +f 6134 6135 6262 +f 6135 6263 6262 +f 6135 6136 6264 +f 6135 6264 6263 +f 6136 6137 6264 +f 6137 6265 6264 +f 6137 6138 6266 +f 6137 6266 6265 +f 6138 6139 6266 +f 6139 6267 6266 +f 6139 6140 6268 +f 6139 6268 6267 +f 6140 6141 6268 +f 6141 6269 6268 +f 6141 6142 6270 +f 6141 6270 6269 +f 6142 6143 6270 +f 6143 6271 6270 +f 6143 6144 6272 +f 6143 6272 6271 +f 6144 6145 6272 +f 6145 6273 6272 +f 6145 6146 6274 +f 6145 6274 6273 +f 6146 6147 6274 +f 6147 6275 6274 +f 6147 6148 6276 +f 6147 6276 6275 +f 6148 6149 6276 +f 6149 6277 6276 +f 6149 6150 6278 +f 6149 6278 6277 +f 6150 6151 6278 +f 6151 6279 6278 +f 6151 6152 6280 +f 6151 6280 6279 +f 6152 6153 6280 +f 6153 6281 6280 +f 6153 6154 6282 +f 6153 6282 6281 +f 6154 6155 6282 +f 6155 6283 6282 +f 6155 6156 6284 +f 6155 6284 6283 +f 6156 6157 6284 +f 6157 6285 6284 +f 6157 6158 6286 +f 6157 6286 6285 +f 6158 6159 6286 +f 6159 6287 6286 +f 6159 6160 6288 +f 6159 6288 6287 +f 6160 6161 6288 +f 6161 6289 6288 +f 6161 6162 6290 +f 6161 6290 6289 +f 6162 6163 6290 +f 6163 6291 6290 +f 6163 6164 6292 +f 6163 6292 6291 +f 6164 6165 6292 +f 6165 6293 6292 +f 6165 6166 6294 +f 6165 6294 6293 +f 6166 6167 6294 +f 6168 6169 6295 +f 6169 6170 6295 +f 6170 6296 6295 +f 6170 6171 6297 +f 6170 6297 6296 +f 6171 6172 6297 +f 6172 6298 6297 +f 6172 6173 6299 +f 6172 6299 6298 +f 6173 6174 6299 +f 6174 6300 6299 +f 6174 6175 6301 +f 6174 6301 6300 +f 6175 6176 6301 +f 6176 6302 6301 +f 6176 6177 6303 +f 6176 6303 6302 +f 6177 6178 6303 +f 6178 6304 6303 +f 6178 6179 6305 +f 6178 6305 6304 +f 6179 6180 6305 +f 6180 6306 6305 +f 6180 6181 6307 +f 6180 6307 6306 +f 6181 6182 6307 +f 6182 6308 6307 +f 6182 6183 6309 +f 6182 6309 6308 +f 6183 6184 6309 +f 6184 6310 6309 +f 6184 6185 6311 +f 6184 6311 6310 +f 6185 6186 6311 +f 6186 6312 6311 +f 6186 6187 6313 +f 6186 6313 6312 +f 6187 6188 6313 +f 6188 6314 6313 +f 6188 6189 6315 +f 6188 6315 6314 +f 6189 6190 6315 +f 6190 6316 6315 +f 6190 6191 6317 +f 6190 6317 6316 +f 6192 6193 6319 +f 6192 6319 6318 +f 6193 6194 6319 +f 6194 6320 6319 +f 6194 6195 6321 +f 6194 6321 6320 +f 6195 6196 6321 +f 6196 6322 6321 +f 6196 6197 6323 +f 6196 6323 6322 +f 6197 6198 6323 +f 6198 6324 6323 +f 6198 6199 6325 +f 6198 6325 6324 +f 6199 6200 6325 +f 6200 6326 6325 +f 6200 6201 6327 +f 6200 6327 6326 +f 6201 6202 6327 +f 6202 6328 6327 +f 6202 6203 6329 +f 6202 6329 6328 +f 6203 6204 6329 +f 6204 6330 6329 +f 6204 6205 6331 +f 6204 6331 6330 +f 6205 6206 6331 +f 6206 6332 6331 +f 6206 6207 6333 +f 6206 6333 6332 +f 6207 6208 6333 +f 6208 6334 6333 +f 6208 6209 6335 +f 6208 6335 6334 +f 6209 6210 6335 +f 6210 6336 6335 +f 6210 6211 6337 +f 6210 6337 6336 +f 6211 6212 6337 +f 6212 6338 6337 +f 6212 6213 6339 +f 6212 6339 6338 +f 6213 6214 6339 +f 6214 6340 6339 +f 6214 6215 6341 +f 6214 6341 6340 +f 6215 6216 6341 +f 6216 6342 6341 +f 6216 6217 6343 +f 6216 6343 6342 +f 6217 6218 6343 +f 6218 6344 6343 +f 6218 6219 6345 +f 6218 6345 6344 +f 6219 6220 6345 +f 6220 6346 6345 +f 6220 6221 6347 +f 6220 6347 6346 +f 6221 6222 6347 +f 6222 6348 6347 +f 6222 6223 6349 +f 6222 6349 6348 +f 6223 6224 6349 +f 6224 6350 6349 +f 6224 6225 6351 +f 6224 6351 6350 +f 6225 6226 6351 +f 6226 6352 6351 +f 6226 6227 6353 +f 6226 6353 6352 +f 6227 6228 6353 +f 6228 6354 6353 +f 6228 6229 6355 +f 6228 6355 6354 +f 6229 6230 6355 +f 6230 6356 6355 +f 6230 6231 6357 +f 6230 6357 6356 +f 6231 6232 6357 +f 6232 6358 6357 +f 6232 6233 6359 +f 6232 6359 6358 +f 6233 6234 6359 +f 6234 6360 6359 +f 6234 6235 6361 +f 6234 6361 6360 +f 6235 6236 6361 +f 6236 6362 6361 +f 6236 6237 6363 +f 6236 6363 6362 +f 6237 6238 6363 +f 6238 6364 6363 +f 6238 6239 6365 +f 6238 6365 6364 +f 6239 6240 6365 +f 6240 6366 6365 +f 6240 6241 6367 +f 6240 6367 6366 +f 6241 6242 6367 +f 6242 6368 6367 +f 6242 6243 6369 +f 6242 6369 6368 +f 6243 6244 6369 +f 6244 6370 6369 +f 6244 6245 6371 +f 6244 6371 6370 +f 6245 6246 6371 +f 6246 6372 6371 +f 6246 6247 6373 +f 6246 6373 6372 +f 6247 6248 6373 +f 6248 6374 6373 +f 6248 6249 6375 +f 6248 6375 6374 +f 6249 6250 6375 +f 6250 6376 6375 +f 6250 6251 6377 +f 6250 6377 6376 +f 6251 6252 6377 +f 6252 6378 6377 +f 6252 6253 6379 +f 6252 6379 6378 +f 6253 6254 6379 +f 6254 6380 6379 +f 6254 6255 6381 +f 6254 6381 6380 +f 6255 6256 6381 +f 6256 6382 6381 +f 6256 6257 6383 +f 6256 6383 6382 +f 6257 6258 6383 +f 6258 6384 6383 +f 6258 6259 6385 +f 6258 6385 6384 +f 6259 6260 6385 +f 6260 6386 6385 +f 6260 6261 6387 +f 6260 6387 6386 +f 6261 6262 6387 +f 6262 6388 6387 +f 6262 6263 6389 +f 6262 6389 6388 +f 6263 6264 6389 +f 6264 6390 6389 +f 6264 6265 6391 +f 6264 6391 6390 +f 6265 6266 6391 +f 6266 6392 6391 +f 6266 6267 6393 +f 6266 6393 6392 +f 6267 6268 6393 +f 6268 6394 6393 +f 6268 6269 6395 +f 6268 6395 6394 +f 6269 6270 6395 +f 6270 6396 6395 +f 6270 6271 6397 +f 6270 6397 6396 +f 6271 6272 6397 +f 6272 6398 6397 +f 6272 6273 6399 +f 6272 6399 6398 +f 6273 6274 6399 +f 6274 6400 6399 +f 6274 6275 6401 +f 6274 6401 6400 +f 6275 6276 6401 +f 6276 6402 6401 +f 6276 6277 6403 +f 6276 6403 6402 +f 6277 6278 6403 +f 6278 6404 6403 +f 6278 6279 6405 +f 6278 6405 6404 +f 6279 6280 6405 +f 6280 6406 6405 +f 6280 6281 6407 +f 6280 6407 6406 +f 6281 6282 6407 +f 6282 6408 6407 +f 6282 6283 6409 +f 6282 6409 6408 +f 6283 6284 6409 +f 6284 6410 6409 +f 6284 6285 6411 +f 6284 6411 6410 +f 6285 6286 6411 +f 6286 6412 6411 +f 6286 6287 6413 +f 6286 6413 6412 +f 6287 6288 6413 +f 6288 6414 6413 +f 6288 6289 6415 +f 6288 6415 6414 +f 6289 6290 6415 +f 6290 6416 6415 +f 6290 6291 6417 +f 6290 6417 6416 +f 6291 6292 6417 +f 6292 6418 6417 +f 6292 6293 6419 +f 6292 6419 6418 +f 6293 6294 6419 +f 6297 6298 6421 +f 6297 6421 6420 +f 6298 6299 6421 +f 6299 6422 6421 +f 6299 6300 6423 +f 6299 6423 6422 +f 6300 6301 6423 +f 6301 6424 6423 +f 6301 6302 6425 +f 6301 6425 6424 +f 6302 6303 6425 +f 6303 6426 6425 +f 6303 6304 6427 +f 6303 6427 6426 +f 6304 6305 6427 +f 6305 6428 6427 +f 6305 6306 6429 +f 6305 6429 6428 +f 6306 6307 6429 +f 6307 6430 6429 +f 6307 6308 6431 +f 6307 6431 6430 +f 6308 6309 6431 +f 6309 6432 6431 +f 6309 6310 6433 +f 6309 6433 6432 +f 6310 6311 6433 +f 6311 6434 6433 +f 6311 6312 6435 +f 6311 6435 6434 +f 6312 6313 6435 +f 6313 6436 6435 +f 6313 6314 6437 +f 6313 6437 6436 +f 6314 6315 6437 +f 6315 6438 6437 +f 6315 6316 6439 +f 6315 6439 6438 +f 6316 6317 6439 +f 6317 6440 6439 +f 6318 6319 6441 +f 6319 6442 6441 +f 6319 6320 6443 +f 6319 6443 6442 +f 6320 6321 6443 +f 6321 6444 6443 +f 6321 6322 6445 +f 6321 6445 6444 +f 6322 6323 6445 +f 6323 6446 6445 +f 6323 6324 6447 +f 6323 6447 6446 +f 6324 6325 6447 +f 6325 6448 6447 +f 6325 6326 6449 +f 6325 6449 6448 +f 6326 6327 6449 +f 6327 6450 6449 +f 6327 6328 6451 +f 6327 6451 6450 +f 6328 6329 6451 +f 6329 6452 6451 +f 6329 6330 6453 +f 6329 6453 6452 +f 6330 6331 6453 +f 6331 6454 6453 +f 6331 6332 6455 +f 6331 6455 6454 +f 6332 6333 6455 +f 6333 6456 6455 +f 6333 6334 6457 +f 6333 6457 6456 +f 6334 6335 6457 +f 6335 6458 6457 +f 6335 6336 6459 +f 6335 6459 6458 +f 6336 6337 6459 +f 6337 6460 6459 +f 6337 6338 6461 +f 6337 6461 6460 +f 6338 6339 6461 +f 6339 6462 6461 +f 6339 6340 6463 +f 6339 6463 6462 +f 6340 6341 6463 +f 6341 6464 6463 +f 6341 6342 6465 +f 6341 6465 6464 +f 6342 6343 6465 +f 6343 6466 6465 +f 6343 6344 6467 +f 6343 6467 6466 +f 6344 6345 6467 +f 6345 6468 6467 +f 6345 6346 6469 +f 6345 6469 6468 +f 6346 6347 6469 +f 6347 6470 6469 +f 6347 6348 6471 +f 6347 6471 6470 +f 6348 6349 6471 +f 6349 6472 6471 +f 6349 6350 6473 +f 6349 6473 6472 +f 6350 6351 6473 +f 6351 6474 6473 +f 6351 6352 6475 +f 6351 6475 6474 +f 6352 6353 6475 +f 6353 6476 6475 +f 6353 6354 6477 +f 6353 6477 6476 +f 6354 6355 6477 +f 6355 6478 6477 +f 6355 6356 6479 +f 6355 6479 6478 +f 6356 6357 6479 +f 6357 6480 6479 +f 6357 6358 6481 +f 6357 6481 6480 +f 6358 6359 6481 +f 6359 6482 6481 +f 6359 6360 6483 +f 6359 6483 6482 +f 6360 6361 6483 +f 6361 6484 6483 +f 6361 6362 6485 +f 6361 6485 6484 +f 6362 6363 6485 +f 6363 6486 6485 +f 6363 6364 6487 +f 6363 6487 6486 +f 6364 6365 6487 +f 6365 6488 6487 +f 6365 6366 6489 +f 6365 6489 6488 +f 6366 6367 6489 +f 6367 6490 6489 +f 6367 6368 6491 +f 6367 6491 6490 +f 6368 6369 6491 +f 6369 6492 6491 +f 6369 6370 6493 +f 6369 6493 6492 +f 6370 6371 6493 +f 6371 6494 6493 +f 6371 6372 6495 +f 6371 6495 6494 +f 6372 6373 6495 +f 6373 6496 6495 +f 6373 6374 6497 +f 6373 6497 6496 +f 6374 6375 6497 +f 6375 6498 6497 +f 6375 6376 6499 +f 6375 6499 6498 +f 6376 6377 6499 +f 6377 6500 6499 +f 6377 6378 6501 +f 6377 6501 6500 +f 6378 6379 6501 +f 6379 6502 6501 +f 6379 6380 6503 +f 6379 6503 6502 +f 6380 6381 6503 +f 6381 6504 6503 +f 6381 6382 6505 +f 6381 6505 6504 +f 6382 6383 6505 +f 6383 6506 6505 +f 6383 6384 6507 +f 6383 6507 6506 +f 6384 6385 6507 +f 6385 6508 6507 +f 6385 6386 6509 +f 6385 6509 6508 +f 6386 6387 6509 +f 6387 6510 6509 +f 6387 6388 6511 +f 6387 6511 6510 +f 6388 6389 6511 +f 6389 6512 6511 +f 6389 6390 6513 +f 6389 6513 6512 +f 6390 6391 6513 +f 6391 6514 6513 +f 6391 6392 6515 +f 6391 6515 6514 +f 6392 6393 6515 +f 6393 6516 6515 +f 6393 6394 6517 +f 6393 6517 6516 +f 6394 6395 6517 +f 6395 6518 6517 +f 6395 6396 6519 +f 6395 6519 6518 +f 6396 6397 6519 +f 6397 6520 6519 +f 6397 6398 6521 +f 6397 6521 6520 +f 6398 6399 6521 +f 6399 6522 6521 +f 6399 6400 6523 +f 6399 6523 6522 +f 6400 6401 6523 +f 6401 6524 6523 +f 6401 6402 6525 +f 6401 6525 6524 +f 6402 6403 6525 +f 6403 6526 6525 +f 6403 6404 6527 +f 6403 6527 6526 +f 6404 6405 6527 +f 6405 6528 6527 +f 6405 6406 6529 +f 6405 6529 6528 +f 6406 6407 6529 +f 6407 6530 6529 +f 6407 6408 6531 +f 6407 6531 6530 +f 6408 6409 6531 +f 6409 6532 6531 +f 6409 6410 6533 +f 6409 6533 6532 +f 6410 6411 6533 +f 6411 6534 6533 +f 6411 6412 6535 +f 6411 6535 6534 +f 6412 6413 6535 +f 6413 6536 6535 +f 6413 6414 6537 +f 6413 6537 6536 +f 6414 6415 6537 +f 6415 6538 6537 +f 6415 6416 6539 +f 6415 6539 6538 +f 6416 6417 6539 +f 6417 6540 6539 +f 6417 6418 6541 +f 6417 6541 6540 +f 6418 6419 6541 +f 6420 6421 6542 +f 6421 6543 6542 +f 6421 6422 6544 +f 6421 6544 6543 +f 6422 6423 6544 +f 6423 6545 6544 +f 6423 6424 6546 +f 6423 6546 6545 +f 6424 6425 6546 +f 6425 6547 6546 +f 6425 6426 6548 +f 6425 6548 6547 +f 6426 6427 6548 +f 6427 6549 6548 +f 6427 6428 6550 +f 6427 6550 6549 +f 6428 6429 6550 +f 6429 6551 6550 +f 6429 6430 6552 +f 6429 6552 6551 +f 6430 6431 6552 +f 6431 6553 6552 +f 6431 6432 6554 +f 6431 6554 6553 +f 6432 6433 6554 +f 6433 6555 6554 +f 6433 6434 6556 +f 6433 6556 6555 +f 6434 6435 6556 +f 6435 6557 6556 +f 6435 6436 6558 +f 6435 6558 6557 +f 6436 6437 6558 +f 6437 6559 6558 +f 6437 6438 6560 +f 6437 6560 6559 +f 6438 6439 6560 +f 6439 6561 6560 +f 6439 6440 6562 +f 6439 6562 6561 +f 6441 6442 6564 +f 6441 6564 6563 +f 6442 6443 6564 +f 6443 6565 6564 +f 6443 6444 6566 +f 6443 6566 6565 +f 6444 6445 6566 +f 6445 6567 6566 +f 6445 6446 6568 +f 6445 6568 6567 +f 6446 6447 6568 +f 6447 6569 6568 +f 6447 6448 6570 +f 6447 6570 6569 +f 6448 6449 6570 +f 6449 6571 6570 +f 6449 6450 6572 +f 6449 6572 6571 +f 6450 6451 6572 +f 6451 6573 6572 +f 6451 6452 6574 +f 6451 6574 6573 +f 6452 6453 6574 +f 6453 6575 6574 +f 6453 6454 6576 +f 6453 6576 6575 +f 6454 6455 6576 +f 6455 6577 6576 +f 6455 6456 6578 +f 6455 6578 6577 +f 6456 6457 6578 +f 6457 6579 6578 +f 6457 6458 6580 +f 6457 6580 6579 +f 6458 6459 6580 +f 6459 6581 6580 +f 6459 6460 6582 +f 6459 6582 6581 +f 6460 6461 6582 +f 6461 6583 6582 +f 6461 6462 6584 +f 6461 6584 6583 +f 6462 6463 6584 +f 6463 6585 6584 +f 6463 6464 6586 +f 6463 6586 6585 +f 6464 6465 6586 +f 6465 6587 6586 +f 6465 6466 6588 +f 6465 6588 6587 +f 6466 6467 6588 +f 6467 6589 6588 +f 6467 6468 6590 +f 6467 6590 6589 +f 6468 6469 6590 +f 6469 6591 6590 +f 6469 6470 6592 +f 6469 6592 6591 +f 6470 6471 6592 +f 6471 6593 6592 +f 6471 6472 6594 +f 6471 6594 6593 +f 6472 6473 6594 +f 6473 6595 6594 +f 6473 6474 6596 +f 6473 6596 6595 +f 6474 6475 6596 +f 6475 6597 6596 +f 6475 6476 6598 +f 6475 6598 6597 +f 6476 6477 6598 +f 6477 6599 6598 +f 6477 6478 6600 +f 6477 6600 6599 +f 6478 6479 6600 +f 6479 6601 6600 +f 6479 6480 6602 +f 6479 6602 6601 +f 6480 6481 6602 +f 6481 6603 6602 +f 6481 6482 6604 +f 6481 6604 6603 +f 6482 6483 6604 +f 6483 6605 6604 +f 6483 6484 6606 +f 6483 6606 6605 +f 6484 6485 6606 +f 6485 6607 6606 +f 6485 6486 6608 +f 6485 6608 6607 +f 6486 6487 6608 +f 6487 6609 6608 +f 6487 6488 6610 +f 6487 6610 6609 +f 6488 6489 6610 +f 6489 6611 6610 +f 6489 6490 6612 +f 6489 6612 6611 +f 6490 6491 6612 +f 6491 6613 6612 +f 6491 6492 6614 +f 6491 6614 6613 +f 6492 6493 6614 +f 6493 6615 6614 +f 6493 6494 6616 +f 6493 6616 6615 +f 6494 6495 6616 +f 6495 6617 6616 +f 6495 6496 6618 +f 6495 6618 6617 +f 6496 6497 6618 +f 6497 6619 6618 +f 6497 6498 6620 +f 6497 6620 6619 +f 6498 6499 6620 +f 6499 6621 6620 +f 6499 6500 6622 +f 6499 6622 6621 +f 6500 6501 6622 +f 6501 6623 6622 +f 6501 6502 6624 +f 6501 6624 6623 +f 6502 6503 6624 +f 6503 6625 6624 +f 6503 6504 6626 +f 6503 6626 6625 +f 6504 6505 6626 +f 6505 6627 6626 +f 6505 6506 6628 +f 6505 6628 6627 +f 6506 6507 6628 +f 6507 6629 6628 +f 6507 6508 6630 +f 6507 6630 6629 +f 6508 6509 6630 +f 6509 6631 6630 +f 6509 6510 6632 +f 6509 6632 6631 +f 6510 6511 6632 +f 6511 6633 6632 +f 6511 6512 6634 +f 6511 6634 6633 +f 6512 6513 6634 +f 6513 6635 6634 +f 6513 6514 6636 +f 6513 6636 6635 +f 6514 6515 6636 +f 6515 6637 6636 +f 6515 6516 6638 +f 6515 6638 6637 +f 6516 6517 6638 +f 6517 6639 6638 +f 6517 6518 6640 +f 6517 6640 6639 +f 6518 6519 6640 +f 6519 6641 6640 +f 6519 6520 6642 +f 6519 6642 6641 +f 6520 6521 6642 +f 6521 6643 6642 +f 6521 6522 6644 +f 6521 6644 6643 +f 6522 6523 6644 +f 6523 6645 6644 +f 6523 6524 6646 +f 6523 6646 6645 +f 6524 6525 6646 +f 6525 6647 6646 +f 6525 6526 6648 +f 6525 6648 6647 +f 6526 6527 6648 +f 6527 6649 6648 +f 6527 6528 6650 +f 6527 6650 6649 +f 6528 6529 6650 +f 6529 6651 6650 +f 6529 6530 6652 +f 6529 6652 6651 +f 6530 6531 6652 +f 6531 6653 6652 +f 6531 6532 6654 +f 6531 6654 6653 +f 6532 6533 6654 +f 6533 6655 6654 +f 6533 6534 6656 +f 6533 6656 6655 +f 6534 6535 6656 +f 6535 6657 6656 +f 6535 6536 6658 +f 6535 6658 6657 +f 6536 6537 6658 +f 6537 6659 6658 +f 6537 6538 6660 +f 6537 6660 6659 +f 6538 6539 6660 +f 6539 6661 6660 +f 6539 6540 6662 +f 6539 6662 6661 +f 6540 6541 6662 +f 6541 6663 6662 +f 6542 6543 6665 +f 6542 6665 6664 +f 6543 6544 6665 +f 6544 6666 6665 +f 6544 6545 6667 +f 6544 6667 6666 +f 6545 6546 6667 +f 6546 6668 6667 +f 6546 6547 6669 +f 6546 6669 6668 +f 6547 6548 6669 +f 6548 6670 6669 +f 6548 6549 6671 +f 6548 6671 6670 +f 6549 6550 6671 +f 6550 6672 6671 +f 6550 6551 6673 +f 6550 6673 6672 +f 6551 6552 6673 +f 6552 6674 6673 +f 6552 6553 6675 +f 6552 6675 6674 +f 6553 6554 6675 +f 6554 6676 6675 +f 6554 6555 6677 +f 6554 6677 6676 +f 6555 6556 6677 +f 6556 6678 6677 +f 6556 6557 6679 +f 6556 6679 6678 +f 6557 6558 6679 +f 6558 6680 6679 +f 6558 6559 6681 +f 6558 6681 6680 +f 6559 6560 6681 +f 6560 6682 6681 +f 6560 6561 6683 +f 6560 6683 6682 +f 6561 6562 6683 +f 6562 6684 6683 +f 6563 6564 6685 +f 6564 6686 6685 +f 6564 6565 6687 +f 6564 6687 6686 +f 6565 6566 6687 +f 6566 6688 6687 +f 6566 6567 6689 +f 6566 6689 6688 +f 6567 6568 6689 +f 6568 6690 6689 +f 6568 6569 6691 +f 6568 6691 6690 +f 6569 6570 6691 +f 6570 6692 6691 +f 6570 6571 6693 +f 6570 6693 6692 +f 6571 6572 6693 +f 6572 6694 6693 +f 6572 6573 6695 +f 6572 6695 6694 +f 6573 6574 6695 +f 6574 6696 6695 +f 6574 6575 6697 +f 6574 6697 6696 +f 6575 6576 6697 +f 6576 6698 6697 +f 6576 6577 6699 +f 6576 6699 6698 +f 6577 6578 6699 +f 6578 6700 6699 +f 6578 6579 6701 +f 6578 6701 6700 +f 6579 6580 6701 +f 6580 6702 6701 +f 6580 6581 6703 +f 6580 6703 6702 +f 6581 6582 6703 +f 6582 6704 6703 +f 6582 6583 6705 +f 6582 6705 6704 +f 6583 6584 6705 +f 6584 6706 6705 +f 6584 6585 6707 +f 6584 6707 6706 +f 6585 6586 6707 +f 6586 6708 6707 +f 6586 6587 6709 +f 6586 6709 6708 +f 6587 6588 6709 +f 6588 6710 6709 +f 6588 6589 6711 +f 6588 6711 6710 +f 6589 6590 6711 +f 6590 6712 6711 +f 6590 6591 6713 +f 6590 6713 6712 +f 6591 6592 6713 +f 6592 6714 6713 +f 6592 6593 6715 +f 6592 6715 6714 +f 6593 6594 6715 +f 6594 6716 6715 +f 6594 6595 6717 +f 6594 6717 6716 +f 6595 6596 6717 +f 6596 6718 6717 +f 6596 6597 6719 +f 6596 6719 6718 +f 6597 6598 6719 +f 6598 6720 6719 +f 6598 6599 6721 +f 6598 6721 6720 +f 6599 6600 6721 +f 6600 6722 6721 +f 6600 6601 6723 +f 6600 6723 6722 +f 6601 6602 6723 +f 6602 6724 6723 +f 6602 6603 6725 +f 6602 6725 6724 +f 6603 6604 6725 +f 6604 6726 6725 +f 6604 6605 6727 +f 6604 6727 6726 +f 6605 6606 6727 +f 6606 6728 6727 +f 6606 6607 6729 +f 6606 6729 6728 +f 6607 6608 6729 +f 6608 6730 6729 +f 6608 6609 6731 +f 6608 6731 6730 +f 6609 6610 6731 +f 6610 6732 6731 +f 6610 6611 6733 +f 6610 6733 6732 +f 6611 6612 6733 +f 6612 6734 6733 +f 6612 6613 6735 +f 6612 6735 6734 +f 6613 6614 6735 +f 6614 6736 6735 +f 6614 6615 6737 +f 6614 6737 6736 +f 6615 6616 6737 +f 6616 6738 6737 +f 6616 6617 6739 +f 6616 6739 6738 +f 6617 6618 6739 +f 6618 6740 6739 +f 6618 6619 6741 +f 6618 6741 6740 +f 6619 6620 6741 +f 6620 6742 6741 +f 6620 6621 6743 +f 6620 6743 6742 +f 6621 6622 6743 +f 6622 6744 6743 +f 6622 6623 6745 +f 6622 6745 6744 +f 6623 6624 6745 +f 6624 6746 6745 +f 6624 6625 6747 +f 6624 6747 6746 +f 6625 6626 6747 +f 6626 6748 6747 +f 6626 6627 6749 +f 6626 6749 6748 +f 6627 6628 6749 +f 6628 6750 6749 +f 6628 6629 6751 +f 6628 6751 6750 +f 6629 6630 6751 +f 6630 6752 6751 +f 6630 6631 6753 +f 6630 6753 6752 +f 6631 6632 6753 +f 6632 6754 6753 +f 6632 6633 6755 +f 6632 6755 6754 +f 6633 6634 6755 +f 6634 6756 6755 +f 6634 6635 6757 +f 6634 6757 6756 +f 6635 6636 6757 +f 6636 6758 6757 +f 6636 6637 6759 +f 6636 6759 6758 +f 6637 6638 6759 +f 6638 6760 6759 +f 6638 6639 6761 +f 6638 6761 6760 +f 6639 6640 6761 +f 6640 6762 6761 +f 6640 6641 6763 +f 6640 6763 6762 +f 6641 6642 6763 +f 6642 6764 6763 +f 6642 6643 6765 +f 6642 6765 6764 +f 6643 6644 6765 +f 6644 6766 6765 +f 6644 6645 6767 +f 6644 6767 6766 +f 6645 6646 6767 +f 6646 6768 6767 +f 6646 6647 6769 +f 6646 6769 6768 +f 6647 6648 6769 +f 6648 6770 6769 +f 6648 6649 6771 +f 6648 6771 6770 +f 6649 6650 6771 +f 6650 6772 6771 +f 6650 6651 6773 +f 6650 6773 6772 +f 6651 6652 6773 +f 6652 6774 6773 +f 6652 6653 6775 +f 6652 6775 6774 +f 6653 6654 6775 +f 6654 6776 6775 +f 6654 6655 6777 +f 6654 6777 6776 +f 6655 6656 6777 +f 6656 6778 6777 +f 6656 6657 6779 +f 6656 6779 6778 +f 6657 6658 6779 +f 6658 6780 6779 +f 6658 6659 6781 +f 6658 6781 6780 +f 6659 6660 6781 +f 6660 6782 6781 +f 6660 6661 6783 +f 6660 6783 6782 +f 6661 6662 6783 +f 6662 6784 6783 +f 6662 6663 6785 +f 6662 6785 6784 +f 6664 6665 6786 +f 6665 6787 6786 +f 6665 6666 6788 +f 6665 6788 6787 +f 6666 6667 6788 +f 6667 6789 6788 +f 6667 6668 6790 +f 6667 6790 6789 +f 6668 6669 6790 +f 6669 6791 6790 +f 6669 6670 6792 +f 6669 6792 6791 +f 6670 6671 6792 +f 6671 6793 6792 +f 6671 6672 6794 +f 6671 6794 6793 +f 6672 6673 6794 +f 6673 6795 6794 +f 6673 6674 6796 +f 6673 6796 6795 +f 6674 6675 6796 +f 6675 6797 6796 +f 6675 6676 6798 +f 6675 6798 6797 +f 6676 6677 6798 +f 6677 6799 6798 +f 6677 6678 6800 +f 6677 6800 6799 +f 6678 6679 6800 +f 6679 6801 6800 +f 6679 6680 6802 +f 6679 6802 6801 +f 6680 6681 6802 +f 6681 6803 6802 +f 6681 6682 6804 +f 6681 6804 6803 +f 6682 6683 6804 +f 6683 6805 6804 +f 6683 6684 6806 +f 6683 6806 6805 +f 6685 6686 6808 +f 6685 6808 6807 +f 6686 6687 6808 +f 6687 6809 6808 +f 6687 6688 6810 +f 6687 6810 6809 +f 6688 6689 6810 +f 6689 6811 6810 +f 6689 6690 6812 +f 6689 6812 6811 +f 6690 6691 6812 +f 6691 6813 6812 +f 6691 6692 6814 +f 6691 6814 6813 +f 6692 6693 6814 +f 6693 6815 6814 +f 6693 6694 6816 +f 6693 6816 6815 +f 6694 6695 6816 +f 6695 6817 6816 +f 6695 6696 6818 +f 6695 6818 6817 +f 6696 6697 6818 +f 6697 6819 6818 +f 6697 6698 6820 +f 6697 6820 6819 +f 6698 6699 6820 +f 6699 6821 6820 +f 6699 6700 6822 +f 6699 6822 6821 +f 6700 6701 6822 +f 6701 6823 6822 +f 6701 6702 6824 +f 6701 6824 6823 +f 6702 6703 6824 +f 6703 6825 6824 +f 6703 6704 6826 +f 6703 6826 6825 +f 6704 6705 6826 +f 6705 6827 6826 +f 6705 6706 6828 +f 6705 6828 6827 +f 6706 6707 6828 +f 6707 6829 6828 +f 6707 6708 6830 +f 6707 6830 6829 +f 6708 6709 6830 +f 6709 6831 6830 +f 6709 6710 6832 +f 6709 6832 6831 +f 6710 6711 6832 +f 6711 6833 6832 +f 6711 6712 6834 +f 6711 6834 6833 +f 6712 6713 6834 +f 6713 6835 6834 +f 6713 6714 6836 +f 6713 6836 6835 +f 6714 6715 6836 +f 6715 6837 6836 +f 6715 6716 6838 +f 6715 6838 6837 +f 6716 6717 6838 +f 6717 6839 6838 +f 6717 6718 6840 +f 6717 6840 6839 +f 6718 6719 6840 +f 6719 6841 6840 +f 6719 6720 6842 +f 6719 6842 6841 +f 6720 6721 6842 +f 6721 6843 6842 +f 6721 6722 6844 +f 6721 6844 6843 +f 6722 6723 6844 +f 6723 6845 6844 +f 6723 6724 6846 +f 6723 6846 6845 +f 6724 6725 6846 +f 6725 6847 6846 +f 6725 6726 6848 +f 6725 6848 6847 +f 6726 6727 6848 +f 6727 6849 6848 +f 6727 6728 6850 +f 6727 6850 6849 +f 6728 6729 6850 +f 6729 6851 6850 +f 6729 6730 6852 +f 6729 6852 6851 +f 6730 6731 6852 +f 6731 6853 6852 +f 6731 6732 6854 +f 6731 6854 6853 +f 6732 6733 6854 +f 6733 6855 6854 +f 6733 6734 6856 +f 6733 6856 6855 +f 6734 6735 6856 +f 6735 6857 6856 +f 6735 6736 6858 +f 6735 6858 6857 +f 6736 6737 6858 +f 6737 6859 6858 +f 6737 6738 6860 +f 6737 6860 6859 +f 6738 6739 6860 +f 6739 6861 6860 +f 6739 6740 6862 +f 6739 6862 6861 +f 6740 6741 6862 +f 6741 6863 6862 +f 6741 6742 6864 +f 6741 6864 6863 +f 6742 6743 6864 +f 6743 6865 6864 +f 6743 6744 6866 +f 6743 6866 6865 +f 6744 6745 6866 +f 6745 6867 6866 +f 6745 6746 6868 +f 6745 6868 6867 +f 6746 6747 6868 +f 6747 6869 6868 +f 6747 6748 6870 +f 6747 6870 6869 +f 6748 6749 6870 +f 6749 6871 6870 +f 6749 6750 6872 +f 6749 6872 6871 +f 6750 6751 6872 +f 6751 6873 6872 +f 6751 6752 6874 +f 6751 6874 6873 +f 6752 6753 6874 +f 6753 6875 6874 +f 6753 6754 6876 +f 6753 6876 6875 +f 6754 6755 6876 +f 6755 6877 6876 +f 6755 6756 6878 +f 6755 6878 6877 +f 6756 6757 6878 +f 6757 6879 6878 +f 6757 6758 6880 +f 6757 6880 6879 +f 6758 6759 6880 +f 6759 6881 6880 +f 6759 6760 6882 +f 6759 6882 6881 +f 6760 6761 6882 +f 6761 6883 6882 +f 6761 6762 6884 +f 6761 6884 6883 +f 6762 6763 6884 +f 6763 6885 6884 +f 6763 6764 6886 +f 6763 6886 6885 +f 6764 6765 6886 +f 6765 6887 6886 +f 6765 6766 6888 +f 6765 6888 6887 +f 6766 6767 6888 +f 6767 6889 6888 +f 6767 6768 6890 +f 6767 6890 6889 +f 6768 6769 6890 +f 6769 6891 6890 +f 6769 6770 6892 +f 6769 6892 6891 +f 6770 6771 6892 +f 6771 6893 6892 +f 6771 6772 6894 +f 6771 6894 6893 +f 6772 6773 6894 +f 6773 6895 6894 +f 6773 6774 6896 +f 6773 6896 6895 +f 6774 6775 6896 +f 6775 6897 6896 +f 6775 6776 6898 +f 6775 6898 6897 +f 6776 6777 6898 +f 6777 6899 6898 +f 6777 6778 6900 +f 6777 6900 6899 +f 6778 6779 6900 +f 6779 6901 6900 +f 6779 6780 6902 +f 6779 6902 6901 +f 6780 6781 6902 +f 6781 6903 6902 +f 6781 6782 6904 +f 6781 6904 6903 +f 6782 6783 6904 +f 6783 6905 6904 +f 6783 6784 6906 +f 6783 6906 6905 +f 6784 6785 6906 +f 6785 6907 6906 +f 6786 6787 6908 +f 6787 6788 6908 +f 6788 6909 6908 +f 6788 6789 6910 +f 6788 6910 6909 +f 6789 6790 6910 +f 6790 6911 6910 +f 6790 6791 6912 +f 6790 6912 6911 +f 6791 6792 6912 +f 6792 6913 6912 +f 6792 6793 6914 +f 6792 6914 6913 +f 6793 6794 6914 +f 6794 6915 6914 +f 6794 6795 6916 +f 6794 6916 6915 +f 6795 6796 6916 +f 6796 6917 6916 +f 6796 6797 6918 +f 6796 6918 6917 +f 6797 6798 6918 +f 6798 6919 6918 +f 6798 6799 6920 +f 6798 6920 6919 +f 6799 6800 6920 +f 6800 6921 6920 +f 6800 6801 6922 +f 6800 6922 6921 +f 6801 6802 6922 +f 6802 6923 6922 +f 6802 6803 6924 +f 6802 6924 6923 +f 6803 6804 6924 +f 6804 6925 6924 +f 6804 6805 6926 +f 6804 6926 6925 +f 6805 6806 6926 +f 6806 6927 6926 +f 6807 6808 6928 +f 6808 6929 6928 +f 6808 6809 6930 +f 6808 6930 6929 +f 6809 6810 6930 +f 6810 6931 6930 +f 6810 6811 6932 +f 6810 6932 6931 +f 6811 6812 6932 +f 6812 6933 6932 +f 6812 6813 6934 +f 6812 6934 6933 +f 6813 6814 6934 +f 6814 6935 6934 +f 6814 6815 6936 +f 6814 6936 6935 +f 6815 6816 6936 +f 6816 6937 6936 +f 6816 6817 6938 +f 6816 6938 6937 +f 6817 6818 6938 +f 6818 6939 6938 +f 6818 6819 6940 +f 6818 6940 6939 +f 6819 6820 6940 +f 6820 6941 6940 +f 6820 6821 6942 +f 6820 6942 6941 +f 6821 6822 6942 +f 6822 6943 6942 +f 6822 6823 6944 +f 6822 6944 6943 +f 6823 6824 6944 +f 6824 6945 6944 +f 6824 6825 6946 +f 6824 6946 6945 +f 6825 6826 6946 +f 6826 6947 6946 +f 6826 6827 6948 +f 6826 6948 6947 +f 6827 6828 6948 +f 6828 6949 6948 +f 6828 6829 6950 +f 6828 6950 6949 +f 6829 6830 6950 +f 6830 6951 6950 +f 6830 6831 6952 +f 6830 6952 6951 +f 6831 6832 6952 +f 6832 6953 6952 +f 6832 6833 6954 +f 6832 6954 6953 +f 6833 6834 6954 +f 6834 6955 6954 +f 6834 6835 6956 +f 6834 6956 6955 +f 6835 6836 6956 +f 6836 6957 6956 +f 6836 6837 6958 +f 6836 6958 6957 +f 6837 6838 6958 +f 6838 6959 6958 +f 6838 6839 6960 +f 6838 6960 6959 +f 6839 6840 6960 +f 6840 6961 6960 +f 6840 6841 6962 +f 6840 6962 6961 +f 6841 6842 6962 +f 6842 6963 6962 +f 6842 6843 6964 +f 6842 6964 6963 +f 6843 6844 6964 +f 6844 6965 6964 +f 6844 6845 6966 +f 6844 6966 6965 +f 6845 6846 6966 +f 6846 6967 6966 +f 6846 6847 6968 +f 6846 6968 6967 +f 6847 6848 6968 +f 6848 6969 6968 +f 6848 6849 6970 +f 6848 6970 6969 +f 6849 6850 6970 +f 6850 6971 6970 +f 6850 6851 6972 +f 6850 6972 6971 +f 6851 6852 6972 +f 6852 6973 6972 +f 6852 6853 6974 +f 6852 6974 6973 +f 6853 6854 6974 +f 6854 6975 6974 +f 6854 6855 6976 +f 6854 6976 6975 +f 6855 6856 6976 +f 6856 6977 6976 +f 6856 6857 6978 +f 6856 6978 6977 +f 6857 6858 6978 +f 6858 6979 6978 +f 6858 6859 6980 +f 6858 6980 6979 +f 6859 6860 6980 +f 6860 6981 6980 +f 6860 6861 6982 +f 6860 6982 6981 +f 6861 6862 6982 +f 6862 6983 6982 +f 6862 6863 6984 +f 6862 6984 6983 +f 6863 6864 6984 +f 6864 6985 6984 +f 6864 6865 6986 +f 6864 6986 6985 +f 6865 6866 6986 +f 6866 6987 6986 +f 6866 6867 6988 +f 6866 6988 6987 +f 6867 6868 6988 +f 6868 6989 6988 +f 6868 6869 6990 +f 6868 6990 6989 +f 6869 6870 6990 +f 6870 6991 6990 +f 6870 6871 6992 +f 6870 6992 6991 +f 6871 6872 6992 +f 6872 6993 6992 +f 6872 6873 6994 +f 6872 6994 6993 +f 6873 6874 6994 +f 6874 6995 6994 +f 6874 6875 6996 +f 6874 6996 6995 +f 6875 6876 6996 +f 6876 6997 6996 +f 6876 6877 6998 +f 6876 6998 6997 +f 6877 6878 6998 +f 6878 6999 6998 +f 6878 6879 7000 +f 6878 7000 6999 +f 6879 6880 7000 +f 6880 7001 7000 +f 6880 6881 7002 +f 6880 7002 7001 +f 6881 6882 7002 +f 6882 7003 7002 +f 6882 6883 7004 +f 6882 7004 7003 +f 6883 6884 7004 +f 6884 7005 7004 +f 6884 6885 7006 +f 6884 7006 7005 +f 6885 6886 7006 +f 6886 7007 7006 +f 6886 6887 7008 +f 6886 7008 7007 +f 6887 6888 7008 +f 6888 7009 7008 +f 6888 6889 7010 +f 6888 7010 7009 +f 6889 6890 7010 +f 6890 7011 7010 +f 6890 6891 7012 +f 6890 7012 7011 +f 6891 6892 7012 +f 6892 7013 7012 +f 6892 6893 7014 +f 6892 7014 7013 +f 6893 6894 7014 +f 6894 7015 7014 +f 6894 6895 7016 +f 6894 7016 7015 +f 6895 6896 7016 +f 6896 7017 7016 +f 6896 6897 7018 +f 6896 7018 7017 +f 6897 6898 7018 +f 6898 7019 7018 +f 6898 6899 7020 +f 6898 7020 7019 +f 6899 6900 7020 +f 6900 7021 7020 +f 6900 6901 7022 +f 6900 7022 7021 +f 6901 6902 7022 +f 6902 7023 7022 +f 6902 6903 7024 +f 6902 7024 7023 +f 6903 6904 7024 +f 6904 7025 7024 +f 6904 6905 7026 +f 6904 7026 7025 +f 6905 6906 7026 +f 6906 7027 7026 +f 6906 6907 7028 +f 6906 7028 7027 +f 6908 6909 7029 +f 6909 6910 7029 +f 6910 7030 7029 +f 6910 6911 7031 +f 6910 7031 7030 +f 6911 6912 7031 +f 6912 7032 7031 +f 6912 6913 7033 +f 6912 7033 7032 +f 6913 6914 7033 +f 6914 7034 7033 +f 6914 6915 7035 +f 6914 7035 7034 +f 6915 6916 7035 +f 6916 7036 7035 +f 6916 6917 7037 +f 6916 7037 7036 +f 6917 6918 7037 +f 6918 7038 7037 +f 6918 6919 7039 +f 6918 7039 7038 +f 6919 6920 7039 +f 6920 7040 7039 +f 6920 6921 7041 +f 6920 7041 7040 +f 6921 6922 7041 +f 6922 7042 7041 +f 6922 6923 7043 +f 6922 7043 7042 +f 6923 6924 7043 +f 6924 7044 7043 +f 6924 6925 7045 +f 6924 7045 7044 +f 6925 6926 7045 +f 6926 7046 7045 +f 6926 6927 7047 +f 6926 7047 7046 +f 6928 6929 7049 +f 6928 7049 7048 +f 6929 6930 7049 +f 6930 7050 7049 +f 6930 6931 7051 +f 6930 7051 7050 +f 6931 6932 7051 +f 6932 7052 7051 +f 6932 6933 7053 +f 6932 7053 7052 +f 6933 6934 7053 +f 6934 7054 7053 +f 6934 6935 7055 +f 6934 7055 7054 +f 6935 6936 7055 +f 6936 7056 7055 +f 6936 6937 7057 +f 6936 7057 7056 +f 6937 6938 7057 +f 6938 7058 7057 +f 6938 6939 7059 +f 6938 7059 7058 +f 6939 6940 7059 +f 6940 7060 7059 +f 6940 6941 7061 +f 6940 7061 7060 +f 6941 6942 7061 +f 6942 7062 7061 +f 6942 6943 7063 +f 6942 7063 7062 +f 6943 6944 7063 +f 6944 7064 7063 +f 6944 6945 7065 +f 6944 7065 7064 +f 6945 6946 7065 +f 6946 7066 7065 +f 6946 6947 7067 +f 6946 7067 7066 +f 6947 6948 7067 +f 6948 7068 7067 +f 6948 6949 7069 +f 6948 7069 7068 +f 6949 6950 7069 +f 6950 7070 7069 +f 6950 6951 7071 +f 6950 7071 7070 +f 6951 6952 7071 +f 6952 7072 7071 +f 6952 6953 7073 +f 6952 7073 7072 +f 6953 6954 7073 +f 6954 7074 7073 +f 6954 6955 7075 +f 6954 7075 7074 +f 6955 6956 7075 +f 6956 7076 7075 +f 6956 6957 7077 +f 6956 7077 7076 +f 6957 6958 7077 +f 6958 7078 7077 +f 6958 6959 7079 +f 6958 7079 7078 +f 6959 6960 7079 +f 6960 7080 7079 +f 6960 6961 7081 +f 6960 7081 7080 +f 6961 6962 7081 +f 6962 7082 7081 +f 6962 6963 7083 +f 6962 7083 7082 +f 6963 6964 7083 +f 6964 7084 7083 +f 6964 6965 7085 +f 6964 7085 7084 +f 6965 6966 7085 +f 6966 7086 7085 +f 6966 6967 7087 +f 6966 7087 7086 +f 6967 6968 7087 +f 6968 7088 7087 +f 6968 6969 7089 +f 6968 7089 7088 +f 6969 6970 7089 +f 6970 7090 7089 +f 6970 6971 7091 +f 6970 7091 7090 +f 6971 6972 7091 +f 6972 7092 7091 +f 6972 6973 7093 +f 6972 7093 7092 +f 6973 6974 7093 +f 6974 7094 7093 +f 6974 6975 7095 +f 6974 7095 7094 +f 6975 6976 7095 +f 6976 7096 7095 +f 6976 6977 7097 +f 6976 7097 7096 +f 6977 6978 7097 +f 6978 7098 7097 +f 6978 6979 7099 +f 6978 7099 7098 +f 6979 6980 7099 +f 6980 7100 7099 +f 6980 6981 7101 +f 6980 7101 7100 +f 6981 6982 7101 +f 6982 7102 7101 +f 6982 6983 7103 +f 6982 7103 7102 +f 6983 6984 7103 +f 6984 7104 7103 +f 6984 6985 7105 +f 6984 7105 7104 +f 6985 6986 7105 +f 6986 7106 7105 +f 6986 6987 7107 +f 6986 7107 7106 +f 6987 6988 7107 +f 6988 7108 7107 +f 6988 6989 7109 +f 6988 7109 7108 +f 6989 6990 7109 +f 6990 7110 7109 +f 6990 6991 7111 +f 6990 7111 7110 +f 6991 6992 7111 +f 6992 7112 7111 +f 6992 6993 7113 +f 6992 7113 7112 +f 6993 6994 7113 +f 6994 7114 7113 +f 6994 6995 7115 +f 6994 7115 7114 +f 6995 6996 7115 +f 6996 7116 7115 +f 6996 6997 7117 +f 6996 7117 7116 +f 6997 6998 7117 +f 6998 7118 7117 +f 6998 6999 7119 +f 6998 7119 7118 +f 6999 7000 7119 +f 7000 7120 7119 +f 7000 7001 7121 +f 7000 7121 7120 +f 7001 7002 7121 +f 7002 7122 7121 +f 7002 7003 7123 +f 7002 7123 7122 +f 7003 7004 7123 +f 7004 7124 7123 +f 7004 7005 7125 +f 7004 7125 7124 +f 7005 7006 7125 +f 7006 7126 7125 +f 7006 7007 7127 +f 7006 7127 7126 +f 7007 7008 7127 +f 7008 7128 7127 +f 7008 7009 7129 +f 7008 7129 7128 +f 7009 7010 7129 +f 7010 7130 7129 +f 7010 7011 7131 +f 7010 7131 7130 +f 7011 7012 7131 +f 7012 7132 7131 +f 7012 7013 7133 +f 7012 7133 7132 +f 7013 7014 7133 +f 7014 7134 7133 +f 7014 7015 7135 +f 7014 7135 7134 +f 7015 7016 7135 +f 7016 7136 7135 +f 7016 7017 7137 +f 7016 7137 7136 +f 7017 7018 7137 +f 7018 7138 7137 +f 7018 7019 7139 +f 7018 7139 7138 +f 7019 7020 7139 +f 7020 7140 7139 +f 7020 7021 7141 +f 7020 7141 7140 +f 7021 7022 7141 +f 7022 7142 7141 +f 7022 7023 7143 +f 7022 7143 7142 +f 7023 7024 7143 +f 7024 7144 7143 +f 7024 7025 7145 +f 7024 7145 7144 +f 7025 7026 7145 +f 7026 7146 7145 +f 7026 7027 7147 +f 7026 7147 7146 +f 7027 7028 7147 +f 7028 7148 7147 +f 7029 7030 7150 +f 7029 7150 7149 +f 7030 7031 7150 +f 7031 7151 7150 +f 7031 7032 7152 +f 7031 7152 7151 +f 7032 7033 7152 +f 7033 7153 7152 +f 7033 7034 7154 +f 7033 7154 7153 +f 7034 7035 7154 +f 7035 7155 7154 +f 7035 7036 7156 +f 7035 7156 7155 +f 7036 7037 7156 +f 7037 7157 7156 +f 7037 7038 7158 +f 7037 7158 7157 +f 7038 7039 7158 +f 7039 7159 7158 +f 7039 7040 7160 +f 7039 7160 7159 +f 7040 7041 7160 +f 7041 7161 7160 +f 7041 7042 7162 +f 7041 7162 7161 +f 7042 7043 7162 +f 7043 7163 7162 +f 7043 7044 7164 +f 7043 7164 7163 +f 7044 7045 7164 +f 7045 7165 7164 +f 7045 7046 7166 +f 7045 7166 7165 +f 7046 7047 7166 +f 7047 7167 7166 +f 7048 7049 7168 +f 7049 7169 7168 +f 7049 7050 7170 +f 7049 7170 7169 +f 7050 7051 7170 +f 7051 7171 7170 +f 7051 7052 7172 +f 7051 7172 7171 +f 7052 7053 7172 +f 7053 7173 7172 +f 7053 7054 7174 +f 7053 7174 7173 +f 7054 7055 7174 +f 7055 7175 7174 +f 7055 7056 7176 +f 7055 7176 7175 +f 7056 7057 7176 +f 7057 7177 7176 +f 7057 7058 7178 +f 7057 7178 7177 +f 7058 7059 7178 +f 7059 7179 7178 +f 7059 7060 7180 +f 7059 7180 7179 +f 7060 7061 7180 +f 7061 7181 7180 +f 7061 7062 7182 +f 7061 7182 7181 +f 7062 7063 7182 +f 7063 7183 7182 +f 7063 7064 7184 +f 7063 7184 7183 +f 7064 7065 7184 +f 7065 7185 7184 +f 7065 7066 7186 +f 7065 7186 7185 +f 7066 7067 7186 +f 7067 7187 7186 +f 7067 7068 7188 +f 7067 7188 7187 +f 7068 7069 7188 +f 7069 7189 7188 +f 7069 7070 7190 +f 7069 7190 7189 +f 7070 7071 7190 +f 7071 7191 7190 +f 7071 7072 7192 +f 7071 7192 7191 +f 7072 7073 7192 +f 7073 7193 7192 +f 7073 7074 7194 +f 7073 7194 7193 +f 7074 7075 7194 +f 7075 7195 7194 +f 7075 7076 7196 +f 7075 7196 7195 +f 7076 7077 7196 +f 7077 7197 7196 +f 7077 7078 7198 +f 7077 7198 7197 +f 7078 7079 7198 +f 7079 7199 7198 +f 7079 7080 7200 +f 7079 7200 7199 +f 7080 7081 7200 +f 7081 7201 7200 +f 7081 7082 7202 +f 7081 7202 7201 +f 7082 7083 7202 +f 7083 7203 7202 +f 7083 7084 7204 +f 7083 7204 7203 +f 7084 7085 7204 +f 7085 7205 7204 +f 7085 7086 7206 +f 7085 7206 7205 +f 7086 7087 7206 +f 7087 7207 7206 +f 7087 7088 7208 +f 7087 7208 7207 +f 7088 7089 7208 +f 7089 7209 7208 +f 7089 7090 7210 +f 7089 7210 7209 +f 7090 7091 7210 +f 7091 7211 7210 +f 7091 7092 7212 +f 7091 7212 7211 +f 7092 7093 7212 +f 7093 7213 7212 +f 7093 7094 7214 +f 7093 7214 7213 +f 7094 7095 7214 +f 7095 7215 7214 +f 7095 7096 7216 +f 7095 7216 7215 +f 7096 7097 7216 +f 7097 7217 7216 +f 7097 7098 7218 +f 7097 7218 7217 +f 7098 7099 7218 +f 7099 7219 7218 +f 7099 7100 7220 +f 7099 7220 7219 +f 7100 7101 7220 +f 7101 7221 7220 +f 7101 7102 7222 +f 7101 7222 7221 +f 7102 7103 7222 +f 7103 7223 7222 +f 7103 7104 7224 +f 7103 7224 7223 +f 7104 7105 7224 +f 7105 7225 7224 +f 7105 7106 7226 +f 7105 7226 7225 +f 7106 7107 7226 +f 7107 7227 7226 +f 7107 7108 7228 +f 7107 7228 7227 +f 7108 7109 7228 +f 7109 7229 7228 +f 7109 7110 7230 +f 7109 7230 7229 +f 7110 7111 7230 +f 7111 7231 7230 +f 7111 7112 7232 +f 7111 7232 7231 +f 7112 7113 7232 +f 7113 7233 7232 +f 7113 7114 7234 +f 7113 7234 7233 +f 7114 7115 7234 +f 7115 7235 7234 +f 7115 7116 7236 +f 7115 7236 7235 +f 7116 7117 7236 +f 7117 7237 7236 +f 7117 7118 7238 +f 7117 7238 7237 +f 7118 7119 7238 +f 7119 7239 7238 +f 7119 7120 7240 +f 7119 7240 7239 +f 7120 7121 7240 +f 7121 7241 7240 +f 7121 7122 7242 +f 7121 7242 7241 +f 7122 7123 7242 +f 7123 7243 7242 +f 7123 7124 7244 +f 7123 7244 7243 +f 7124 7125 7244 +f 7125 7245 7244 +f 7125 7126 7246 +f 7125 7246 7245 +f 7126 7127 7246 +f 7127 7247 7246 +f 7127 7128 7248 +f 7127 7248 7247 +f 7128 7129 7248 +f 7129 7249 7248 +f 7129 7130 7250 +f 7129 7250 7249 +f 7130 7131 7250 +f 7131 7251 7250 +f 7131 7132 7252 +f 7131 7252 7251 +f 7132 7133 7252 +f 7133 7253 7252 +f 7133 7134 7254 +f 7133 7254 7253 +f 7134 7135 7254 +f 7135 7255 7254 +f 7135 7136 7256 +f 7135 7256 7255 +f 7136 7137 7256 +f 7137 7257 7256 +f 7137 7138 7258 +f 7137 7258 7257 +f 7138 7139 7258 +f 7139 7259 7258 +f 7139 7140 7260 +f 7139 7260 7259 +f 7140 7141 7260 +f 7141 7261 7260 +f 7141 7142 7262 +f 7141 7262 7261 +f 7142 7143 7262 +f 7143 7263 7262 +f 7143 7144 7264 +f 7143 7264 7263 +f 7144 7145 7264 +f 7145 7265 7264 +f 7145 7146 7266 +f 7145 7266 7265 +f 7146 7147 7266 +f 7147 7267 7266 +f 7147 7148 7268 +f 7147 7268 7267 +f 7149 7150 7269 +f 7150 7270 7269 +f 7150 7151 7271 +f 7150 7271 7270 +f 7151 7152 7271 +f 7152 7272 7271 +f 7152 7153 7273 +f 7152 7273 7272 +f 7153 7154 7273 +f 7154 7274 7273 +f 7154 7155 7275 +f 7154 7275 7274 +f 7155 7156 7275 +f 7156 7276 7275 +f 7156 7157 7277 +f 7156 7277 7276 +f 7157 7158 7277 +f 7158 7278 7277 +f 7158 7159 7279 +f 7158 7279 7278 +f 7159 7160 7279 +f 7160 7280 7279 +f 7160 7161 7281 +f 7160 7281 7280 +f 7161 7162 7281 +f 7162 7282 7281 +f 7162 7163 7283 +f 7162 7283 7282 +f 7163 7164 7283 +f 7164 7284 7283 +f 7164 7165 7285 +f 7164 7285 7284 +f 7165 7166 7285 +f 7166 7286 7285 +f 7166 7167 7287 +f 7166 7287 7286 +f 7168 7169 7289 +f 7168 7289 7288 +f 7169 7170 7289 +f 7170 7290 7289 +f 7170 7171 7291 +f 7170 7291 7290 +f 7171 7172 7291 +f 7172 7292 7291 +f 7172 7173 7293 +f 7172 7293 7292 +f 7173 7174 7293 +f 7174 7294 7293 +f 7174 7175 7295 +f 7174 7295 7294 +f 7175 7176 7295 +f 7176 7296 7295 +f 7176 7177 7297 +f 7176 7297 7296 +f 7177 7178 7297 +f 7178 7298 7297 +f 7178 7179 7299 +f 7178 7299 7298 +f 7179 7180 7299 +f 7180 7300 7299 +f 7180 7181 7301 +f 7180 7301 7300 +f 7181 7182 7301 +f 7182 7302 7301 +f 7182 7183 7303 +f 7182 7303 7302 +f 7183 7184 7303 +f 7184 7304 7303 +f 7184 7185 7305 +f 7184 7305 7304 +f 7185 7186 7305 +f 7186 7306 7305 +f 7186 7187 7307 +f 7186 7307 7306 +f 7187 7188 7307 +f 7188 7308 7307 +f 7188 7189 7309 +f 7188 7309 7308 +f 7189 7190 7309 +f 7190 7310 7309 +f 7190 7191 7311 +f 7190 7311 7310 +f 7191 7192 7311 +f 7192 7312 7311 +f 7192 7193 7313 +f 7192 7313 7312 +f 7193 7194 7313 +f 7194 7314 7313 +f 7194 7195 7315 +f 7194 7315 7314 +f 7195 7196 7315 +f 7196 7316 7315 +f 7196 7197 7317 +f 7196 7317 7316 +f 7197 7198 7317 +f 7198 7318 7317 +f 7198 7199 7319 +f 7198 7319 7318 +f 7199 7200 7319 +f 7200 7320 7319 +f 7200 7201 7321 +f 7200 7321 7320 +f 7201 7202 7321 +f 7202 7322 7321 +f 7202 7203 7323 +f 7202 7323 7322 +f 7203 7204 7323 +f 7204 7324 7323 +f 7204 7205 7325 +f 7204 7325 7324 +f 7205 7206 7325 +f 7206 7326 7325 +f 7206 7207 7327 +f 7206 7327 7326 +f 7207 7208 7327 +f 7208 7328 7327 +f 7208 7209 7329 +f 7208 7329 7328 +f 7209 7210 7329 +f 7210 7330 7329 +f 7210 7211 7331 +f 7210 7331 7330 +f 7211 7212 7331 +f 7212 7332 7331 +f 7212 7213 7333 +f 7212 7333 7332 +f 7213 7214 7333 +f 7214 7334 7333 +f 7214 7215 7335 +f 7214 7335 7334 +f 7215 7216 7335 +f 7216 7336 7335 +f 7216 7217 7337 +f 7216 7337 7336 +f 7217 7218 7337 +f 7218 7338 7337 +f 7218 7219 7339 +f 7218 7339 7338 +f 7219 7220 7339 +f 7220 7340 7339 +f 7220 7221 7341 +f 7220 7341 7340 +f 7221 7222 7341 +f 7222 7342 7341 +f 7222 7223 7343 +f 7222 7343 7342 +f 7223 7224 7343 +f 7224 7344 7343 +f 7224 7225 7345 +f 7224 7345 7344 +f 7225 7226 7345 +f 7226 7346 7345 +f 7226 7227 7347 +f 7226 7347 7346 +f 7227 7228 7347 +f 7228 7348 7347 +f 7228 7229 7349 +f 7228 7349 7348 +f 7229 7230 7349 +f 7230 7350 7349 +f 7230 7231 7351 +f 7230 7351 7350 +f 7231 7232 7351 +f 7232 7352 7351 +f 7232 7233 7353 +f 7232 7353 7352 +f 7233 7234 7353 +f 7234 7354 7353 +f 7234 7235 7355 +f 7234 7355 7354 +f 7235 7236 7355 +f 7236 7356 7355 +f 7236 7237 7357 +f 7236 7357 7356 +f 7237 7238 7357 +f 7238 7358 7357 +f 7238 7239 7359 +f 7238 7359 7358 +f 7239 7240 7359 +f 7240 7360 7359 +f 7240 7241 7361 +f 7240 7361 7360 +f 7241 7242 7361 +f 7242 7362 7361 +f 7242 7243 7363 +f 7242 7363 7362 +f 7243 7244 7363 +f 7244 7364 7363 +f 7244 7245 7365 +f 7244 7365 7364 +f 7245 7246 7365 +f 7246 7366 7365 +f 7246 7247 7367 +f 7246 7367 7366 +f 7247 7248 7367 +f 7248 7368 7367 +f 7248 7249 7369 +f 7248 7369 7368 +f 7249 7250 7369 +f 7250 7370 7369 +f 7250 7251 7371 +f 7250 7371 7370 +f 7251 7252 7371 +f 7252 7372 7371 +f 7252 7253 7373 +f 7252 7373 7372 +f 7253 7254 7373 +f 7254 7374 7373 +f 7254 7255 7375 +f 7254 7375 7374 +f 7255 7256 7375 +f 7256 7376 7375 +f 7256 7257 7377 +f 7256 7377 7376 +f 7257 7258 7377 +f 7258 7378 7377 +f 7258 7259 7379 +f 7258 7379 7378 +f 7259 7260 7379 +f 7260 7380 7379 +f 7260 7261 7381 +f 7260 7381 7380 +f 7261 7262 7381 +f 7262 7382 7381 +f 7262 7263 7383 +f 7262 7383 7382 +f 7263 7264 7383 +f 7264 7384 7383 +f 7264 7265 7385 +f 7264 7385 7384 +f 7265 7266 7385 +f 7266 7386 7385 +f 7266 7267 7387 +f 7266 7387 7386 +f 7267 7268 7387 +f 7268 7388 7387 +f 7269 7270 7390 +f 7269 7390 7389 +f 7270 7271 7390 +f 7271 7391 7390 +f 7271 7272 7392 +f 7271 7392 7391 +f 7272 7273 7392 +f 7273 7393 7392 +f 7273 7274 7394 +f 7273 7394 7393 +f 7274 7275 7394 +f 7275 7395 7394 +f 7275 7276 7396 +f 7275 7396 7395 +f 7276 7277 7396 +f 7277 7397 7396 +f 7277 7278 7398 +f 7277 7398 7397 +f 7278 7279 7398 +f 7279 7399 7398 +f 7279 7280 7400 +f 7279 7400 7399 +f 7280 7281 7400 +f 7281 7401 7400 +f 7281 7282 7402 +f 7281 7402 7401 +f 7282 7283 7402 +f 7283 7403 7402 +f 7283 7284 7404 +f 7283 7404 7403 +f 7284 7285 7404 +f 7285 7405 7404 +f 7285 7286 7406 +f 7285 7406 7405 +f 7286 7287 7406 +f 7287 7407 7406 +f 7288 7289 7408 +f 7289 7409 7408 +f 7289 7290 7410 +f 7289 7410 7409 +f 7290 7291 7410 +f 7291 7411 7410 +f 7291 7292 7412 +f 7291 7412 7411 +f 7292 7293 7412 +f 7293 7413 7412 +f 7293 7294 7414 +f 7293 7414 7413 +f 7294 7295 7414 +f 7295 7415 7414 +f 7295 7296 7416 +f 7295 7416 7415 +f 7296 7297 7416 +f 7297 7417 7416 +f 7297 7298 7418 +f 7297 7418 7417 +f 7298 7299 7418 +f 7299 7419 7418 +f 7299 7300 7420 +f 7299 7420 7419 +f 7300 7301 7420 +f 7301 7421 7420 +f 7301 7302 7422 +f 7301 7422 7421 +f 7302 7303 7422 +f 7303 7423 7422 +f 7303 7304 7424 +f 7303 7424 7423 +f 7304 7305 7424 +f 7305 7425 7424 +f 7305 7306 7426 +f 7305 7426 7425 +f 7306 7307 7426 +f 7307 7427 7426 +f 7307 7308 7428 +f 7307 7428 7427 +f 7308 7309 7428 +f 7309 7429 7428 +f 7309 7310 7430 +f 7309 7430 7429 +f 7310 7311 7430 +f 7311 7431 7430 +f 7311 7312 7432 +f 7311 7432 7431 +f 7312 7313 7432 +f 7313 7433 7432 +f 7313 7314 7434 +f 7313 7434 7433 +f 7314 7315 7434 +f 7315 7435 7434 +f 7315 7316 7436 +f 7315 7436 7435 +f 7316 7317 7436 +f 7317 7437 7436 +f 7317 7318 7438 +f 7317 7438 7437 +f 7318 7319 7438 +f 7319 7439 7438 +f 7319 7320 7440 +f 7319 7440 7439 +f 7320 7321 7440 +f 7321 7441 7440 +f 7321 7322 7442 +f 7321 7442 7441 +f 7322 7323 7442 +f 7323 7443 7442 +f 7323 7324 7444 +f 7323 7444 7443 +f 7324 7325 7444 +f 7325 7445 7444 +f 7325 7326 7446 +f 7325 7446 7445 +f 7326 7327 7446 +f 7327 7447 7446 +f 7327 7328 7448 +f 7327 7448 7447 +f 7328 7329 7448 +f 7329 7449 7448 +f 7329 7330 7450 +f 7329 7450 7449 +f 7330 7331 7450 +f 7331 7451 7450 +f 7331 7332 7452 +f 7331 7452 7451 +f 7332 7333 7452 +f 7333 7453 7452 +f 7333 7334 7454 +f 7333 7454 7453 +f 7334 7335 7454 +f 7335 7455 7454 +f 7335 7336 7456 +f 7335 7456 7455 +f 7336 7337 7456 +f 7337 7457 7456 +f 7337 7338 7458 +f 7337 7458 7457 +f 7338 7339 7458 +f 7339 7459 7458 +f 7339 7340 7460 +f 7339 7460 7459 +f 7340 7341 7460 +f 7341 7461 7460 +f 7341 7342 7462 +f 7341 7462 7461 +f 7342 7343 7462 +f 7343 7463 7462 +f 7343 7344 7464 +f 7343 7464 7463 +f 7344 7345 7464 +f 7345 7465 7464 +f 7345 7346 7466 +f 7345 7466 7465 +f 7346 7347 7466 +f 7347 7467 7466 +f 7347 7348 7468 +f 7347 7468 7467 +f 7348 7349 7468 +f 7349 7469 7468 +f 7349 7350 7470 +f 7349 7470 7469 +f 7350 7351 7470 +f 7351 7471 7470 +f 7351 7352 7472 +f 7351 7472 7471 +f 7352 7353 7472 +f 7353 7473 7472 +f 7353 7354 7474 +f 7353 7474 7473 +f 7354 7355 7474 +f 7355 7475 7474 +f 7355 7356 7476 +f 7355 7476 7475 +f 7356 7357 7476 +f 7357 7477 7476 +f 7357 7358 7478 +f 7357 7478 7477 +f 7358 7359 7478 +f 7359 7479 7478 +f 7359 7360 7480 +f 7359 7480 7479 +f 7360 7361 7480 +f 7361 7481 7480 +f 7361 7362 7482 +f 7361 7482 7481 +f 7362 7363 7482 +f 7363 7483 7482 +f 7363 7364 7484 +f 7363 7484 7483 +f 7364 7365 7484 +f 7365 7485 7484 +f 7365 7366 7486 +f 7365 7486 7485 +f 7366 7367 7486 +f 7367 7487 7486 +f 7367 7368 7488 +f 7367 7488 7487 +f 7368 7369 7488 +f 7369 7489 7488 +f 7369 7370 7490 +f 7369 7490 7489 +f 7370 7371 7490 +f 7371 7491 7490 +f 7371 7372 7492 +f 7371 7492 7491 +f 7372 7373 7492 +f 7373 7493 7492 +f 7373 7374 7494 +f 7373 7494 7493 +f 7374 7375 7494 +f 7375 7495 7494 +f 7375 7376 7496 +f 7375 7496 7495 +f 7376 7377 7496 +f 7377 7497 7496 +f 7377 7378 7498 +f 7377 7498 7497 +f 7378 7379 7498 +f 7379 7499 7498 +f 7379 7380 7500 +f 7379 7500 7499 +f 7380 7381 7500 +f 7381 7501 7500 +f 7381 7382 7502 +f 7381 7502 7501 +f 7382 7383 7502 +f 7383 7503 7502 +f 7383 7384 7504 +f 7383 7504 7503 +f 7384 7385 7504 +f 7385 7505 7504 +f 7385 7386 7506 +f 7385 7506 7505 +f 7386 7387 7506 +f 7387 7507 7506 +f 7387 7388 7508 +f 7387 7508 7507 +f 7389 7390 7509 +f 7390 7510 7509 +f 7390 7391 7511 +f 7390 7511 7510 +f 7391 7392 7511 +f 7392 7512 7511 +f 7392 7393 7513 +f 7392 7513 7512 +f 7393 7394 7513 +f 7394 7514 7513 +f 7394 7395 7515 +f 7394 7515 7514 +f 7395 7396 7515 +f 7396 7516 7515 +f 7396 7397 7517 +f 7396 7517 7516 +f 7397 7398 7517 +f 7398 7518 7517 +f 7398 7399 7519 +f 7398 7519 7518 +f 7399 7400 7519 +f 7400 7520 7519 +f 7400 7401 7521 +f 7400 7521 7520 +f 7401 7402 7521 +f 7402 7522 7521 +f 7402 7403 7523 +f 7402 7523 7522 +f 7403 7404 7523 +f 7404 7524 7523 +f 7404 7405 7525 +f 7404 7525 7524 +f 7405 7406 7525 +f 7406 7526 7525 +f 7406 7407 7527 +f 7406 7527 7526 +f 7408 7409 7529 +f 7408 7529 7528 +f 7409 7410 7529 +f 7410 7530 7529 +f 7410 7411 7531 +f 7410 7531 7530 +f 7411 7412 7531 +f 7412 7532 7531 +f 7412 7413 7533 +f 7412 7533 7532 +f 7413 7414 7533 +f 7414 7534 7533 +f 7414 7415 7535 +f 7414 7535 7534 +f 7415 7416 7535 +f 7416 7536 7535 +f 7416 7417 7537 +f 7416 7537 7536 +f 7417 7418 7537 +f 7418 7538 7537 +f 7418 7419 7539 +f 7418 7539 7538 +f 7419 7420 7539 +f 7420 7540 7539 +f 7420 7421 7541 +f 7420 7541 7540 +f 7421 7422 7541 +f 7422 7542 7541 +f 7422 7423 7543 +f 7422 7543 7542 +f 7423 7424 7543 +f 7424 7544 7543 +f 7424 7425 7545 +f 7424 7545 7544 +f 7425 7426 7545 +f 7426 7546 7545 +f 7426 7427 7547 +f 7426 7547 7546 +f 7427 7428 7547 +f 7428 7548 7547 +f 7428 7429 7549 +f 7428 7549 7548 +f 7429 7430 7549 +f 7430 7550 7549 +f 7430 7431 7551 +f 7430 7551 7550 +f 7431 7432 7551 +f 7432 7552 7551 +f 7432 7433 7553 +f 7432 7553 7552 +f 7433 7434 7553 +f 7434 7554 7553 +f 7434 7435 7555 +f 7434 7555 7554 +f 7435 7436 7555 +f 7436 7556 7555 +f 7436 7437 7557 +f 7436 7557 7556 +f 7437 7438 7557 +f 7438 7558 7557 +f 7438 7439 7559 +f 7438 7559 7558 +f 7439 7440 7559 +f 7440 7560 7559 +f 7440 7441 7561 +f 7440 7561 7560 +f 7441 7442 7561 +f 7442 7562 7561 +f 7442 7443 7563 +f 7442 7563 7562 +f 7443 7444 7563 +f 7444 7564 7563 +f 7444 7445 7565 +f 7444 7565 7564 +f 7445 7446 7565 +f 7446 7566 7565 +f 7446 7447 7567 +f 7446 7567 7566 +f 7447 7448 7567 +f 7448 7568 7567 +f 7448 7449 7569 +f 7448 7569 7568 +f 7449 7450 7569 +f 7450 7570 7569 +f 7450 7451 7571 +f 7450 7571 7570 +f 7451 7452 7571 +f 7452 7572 7571 +f 7452 7453 7573 +f 7452 7573 7572 +f 7453 7454 7573 +f 7454 7574 7573 +f 7454 7455 7575 +f 7454 7575 7574 +f 7455 7456 7575 +f 7456 7576 7575 +f 7456 7457 7577 +f 7456 7577 7576 +f 7457 7458 7577 +f 7458 7578 7577 +f 7458 7459 7579 +f 7458 7579 7578 +f 7459 7460 7579 +f 7460 7580 7579 +f 7460 7461 7581 +f 7460 7581 7580 +f 7461 7462 7581 +f 7462 7582 7581 +f 7462 7463 7583 +f 7462 7583 7582 +f 7463 7464 7583 +f 7464 7584 7583 +f 7464 7465 7585 +f 7464 7585 7584 +f 7465 7466 7585 +f 7466 7586 7585 +f 7466 7467 7587 +f 7466 7587 7586 +f 7467 7468 7587 +f 7468 7588 7587 +f 7468 7469 7589 +f 7468 7589 7588 +f 7469 7470 7589 +f 7470 7590 7589 +f 7470 7471 7591 +f 7470 7591 7590 +f 7471 7472 7591 +f 7472 7592 7591 +f 7472 7473 7593 +f 7472 7593 7592 +f 7473 7474 7593 +f 7474 7594 7593 +f 7474 7475 7595 +f 7474 7595 7594 +f 7475 7476 7595 +f 7476 7596 7595 +f 7476 7477 7597 +f 7476 7597 7596 +f 7477 7478 7597 +f 7478 7598 7597 +f 7478 7479 7599 +f 7478 7599 7598 +f 7479 7480 7599 +f 7480 7600 7599 +f 7480 7481 7601 +f 7480 7601 7600 +f 7481 7482 7601 +f 7482 7602 7601 +f 7482 7483 7603 +f 7482 7603 7602 +f 7483 7484 7603 +f 7484 7604 7603 +f 7484 7485 7605 +f 7484 7605 7604 +f 7485 7486 7605 +f 7486 7606 7605 +f 7486 7487 7607 +f 7486 7607 7606 +f 7487 7488 7607 +f 7488 7608 7607 +f 7488 7489 7609 +f 7488 7609 7608 +f 7489 7490 7609 +f 7490 7610 7609 +f 7490 7491 7611 +f 7490 7611 7610 +f 7491 7492 7611 +f 7492 7612 7611 +f 7492 7493 7613 +f 7492 7613 7612 +f 7493 7494 7613 +f 7494 7614 7613 +f 7494 7495 7615 +f 7494 7615 7614 +f 7495 7496 7615 +f 7496 7616 7615 +f 7496 7497 7617 +f 7496 7617 7616 +f 7497 7498 7617 +f 7498 7618 7617 +f 7498 7499 7619 +f 7498 7619 7618 +f 7499 7500 7619 +f 7500 7620 7619 +f 7500 7501 7621 +f 7500 7621 7620 +f 7501 7502 7621 +f 7502 7622 7621 +f 7502 7503 7623 +f 7502 7623 7622 +f 7503 7504 7623 +f 7504 7624 7623 +f 7504 7505 7625 +f 7504 7625 7624 +f 7505 7506 7625 +f 7506 7626 7625 +f 7506 7507 7627 +f 7506 7627 7626 +f 7507 7508 7627 +f 7508 7628 7627 +f 7509 7510 7630 +f 7509 7630 7629 +f 7510 7511 7630 +f 7511 7631 7630 +f 7511 7512 7632 +f 7511 7632 7631 +f 7512 7513 7632 +f 7513 7633 7632 +f 7513 7514 7634 +f 7513 7634 7633 +f 7514 7515 7634 +f 7515 7635 7634 +f 7515 7516 7636 +f 7515 7636 7635 +f 7516 7517 7636 +f 7517 7637 7636 +f 7517 7518 7638 +f 7517 7638 7637 +f 7518 7519 7638 +f 7519 7639 7638 +f 7519 7520 7640 +f 7519 7640 7639 +f 7520 7521 7640 +f 7521 7641 7640 +f 7521 7522 7642 +f 7521 7642 7641 +f 7522 7523 7642 +f 7523 7643 7642 +f 7523 7524 7644 +f 7523 7644 7643 +f 7524 7525 7644 +f 7525 7645 7644 +f 7525 7526 7646 +f 7525 7646 7645 +f 7526 7527 7646 +f 7527 7647 7646 +f 7528 7529 7648 +f 7529 7649 7648 +f 7529 7530 7650 +f 7529 7650 7649 +f 7530 7531 7650 +f 7531 7651 7650 +f 7531 7532 7652 +f 7531 7652 7651 +f 7532 7533 7652 +f 7533 7653 7652 +f 7533 7534 7654 +f 7533 7654 7653 +f 7534 7535 7654 +f 7535 7655 7654 +f 7535 7536 7656 +f 7535 7656 7655 +f 7536 7537 7656 +f 7537 7657 7656 +f 7537 7538 7658 +f 7537 7658 7657 +f 7538 7539 7658 +f 7539 7659 7658 +f 7539 7540 7660 +f 7539 7660 7659 +f 7540 7541 7660 +f 7541 7661 7660 +f 7541 7542 7662 +f 7541 7662 7661 +f 7542 7543 7662 +f 7543 7663 7662 +f 7543 7544 7664 +f 7543 7664 7663 +f 7544 7545 7664 +f 7545 7665 7664 +f 7545 7546 7666 +f 7545 7666 7665 +f 7546 7547 7666 +f 7547 7667 7666 +f 7547 7548 7668 +f 7547 7668 7667 +f 7548 7549 7668 +f 7549 7669 7668 +f 7549 7550 7670 +f 7549 7670 7669 +f 7550 7551 7670 +f 7551 7671 7670 +f 7551 7552 7672 +f 7551 7672 7671 +f 7552 7553 7672 +f 7553 7673 7672 +f 7553 7554 7674 +f 7553 7674 7673 +f 7554 7555 7674 +f 7555 7675 7674 +f 7555 7556 7676 +f 7555 7676 7675 +f 7556 7557 7676 +f 7557 7677 7676 +f 7557 7558 7678 +f 7557 7678 7677 +f 7558 7559 7678 +f 7559 7679 7678 +f 7559 7560 7680 +f 7559 7680 7679 +f 7560 7561 7680 +f 7561 7681 7680 +f 7561 7562 7682 +f 7561 7682 7681 +f 7562 7563 7682 +f 7563 7683 7682 +f 7563 7564 7684 +f 7563 7684 7683 +f 7564 7565 7684 +f 7565 7685 7684 +f 7565 7566 7686 +f 7565 7686 7685 +f 7566 7567 7686 +f 7567 7687 7686 +f 7567 7568 7688 +f 7567 7688 7687 +f 7568 7569 7688 +f 7569 7689 7688 +f 7569 7570 7690 +f 7569 7690 7689 +f 7570 7571 7690 +f 7571 7691 7690 +f 7571 7572 7692 +f 7571 7692 7691 +f 7572 7573 7692 +f 7573 7693 7692 +f 7573 7574 7694 +f 7573 7694 7693 +f 7574 7575 7694 +f 7575 7695 7694 +f 7575 7576 7696 +f 7575 7696 7695 +f 7576 7577 7696 +f 7577 7697 7696 +f 7577 7578 7698 +f 7577 7698 7697 +f 7578 7579 7698 +f 7579 7699 7698 +f 7579 7580 7700 +f 7579 7700 7699 +f 7580 7581 7700 +f 7581 7701 7700 +f 7581 7582 7702 +f 7581 7702 7701 +f 7582 7583 7702 +f 7583 7703 7702 +f 7583 7584 7704 +f 7583 7704 7703 +f 7584 7585 7704 +f 7585 7705 7704 +f 7585 7586 7706 +f 7585 7706 7705 +f 7586 7587 7706 +f 7587 7707 7706 +f 7587 7588 7708 +f 7587 7708 7707 +f 7588 7589 7708 +f 7589 7709 7708 +f 7589 7590 7710 +f 7589 7710 7709 +f 7590 7591 7710 +f 7591 7711 7710 +f 7591 7592 7712 +f 7591 7712 7711 +f 7592 7593 7712 +f 7593 7713 7712 +f 7593 7594 7714 +f 7593 7714 7713 +f 7594 7595 7714 +f 7595 7715 7714 +f 7595 7596 7716 +f 7595 7716 7715 +f 7596 7597 7716 +f 7597 7717 7716 +f 7597 7598 7718 +f 7597 7718 7717 +f 7598 7599 7718 +f 7599 7719 7718 +f 7599 7600 7720 +f 7599 7720 7719 +f 7600 7601 7720 +f 7601 7721 7720 +f 7601 7602 7722 +f 7601 7722 7721 +f 7602 7603 7722 +f 7603 7723 7722 +f 7603 7604 7724 +f 7603 7724 7723 +f 7604 7605 7724 +f 7605 7725 7724 +f 7605 7606 7726 +f 7605 7726 7725 +f 7606 7607 7726 +f 7607 7727 7726 +f 7607 7608 7728 +f 7607 7728 7727 +f 7608 7609 7728 +f 7609 7729 7728 +f 7609 7610 7730 +f 7609 7730 7729 +f 7610 7611 7730 +f 7611 7731 7730 +f 7611 7612 7732 +f 7611 7732 7731 +f 7612 7613 7732 +f 7613 7733 7732 +f 7613 7614 7734 +f 7613 7734 7733 +f 7614 7615 7734 +f 7615 7735 7734 +f 7615 7616 7736 +f 7615 7736 7735 +f 7616 7617 7736 +f 7617 7737 7736 +f 7617 7618 7738 +f 7617 7738 7737 +f 7618 7619 7738 +f 7619 7739 7738 +f 7619 7620 7740 +f 7619 7740 7739 +f 7620 7621 7740 +f 7621 7741 7740 +f 7621 7622 7742 +f 7621 7742 7741 +f 7622 7623 7742 +f 7623 7743 7742 +f 7623 7624 7744 +f 7623 7744 7743 +f 7624 7625 7744 +f 7625 7745 7744 +f 7625 7626 7746 +f 7625 7746 7745 +f 7626 7627 7746 +f 7627 7747 7746 +f 7627 7628 7748 +f 7627 7748 7747 +f 7629 7630 7749 +f 7630 7750 7749 +f 7630 7631 7751 +f 7630 7751 7750 +f 7631 7632 7751 +f 7632 7752 7751 +f 7632 7633 7753 +f 7632 7753 7752 +f 7633 7634 7753 +f 7634 7754 7753 +f 7634 7635 7755 +f 7634 7755 7754 +f 7635 7636 7755 +f 7636 7756 7755 +f 7636 7637 7757 +f 7636 7757 7756 +f 7637 7638 7757 +f 7638 7758 7757 +f 7638 7639 7759 +f 7638 7759 7758 +f 7639 7640 7759 +f 7640 7760 7759 +f 7640 7641 7761 +f 7640 7761 7760 +f 7641 7642 7761 +f 7642 7762 7761 +f 7642 7643 7763 +f 7642 7763 7762 +f 7643 7644 7763 +f 7644 7764 7763 +f 7644 7645 7765 +f 7644 7765 7764 +f 7645 7646 7765 +f 7646 7766 7765 +f 7646 7647 7767 +f 7646 7767 7766 +f 7648 7649 7769 +f 7648 7769 7768 +f 7649 7650 7769 +f 7650 7770 7769 +f 7650 7651 7771 +f 7650 7771 7770 +f 7651 7652 7771 +f 7652 7772 7771 +f 7652 7653 7773 +f 7652 7773 7772 +f 7653 7654 7773 +f 7654 7774 7773 +f 7654 7655 7775 +f 7654 7775 7774 +f 7655 7656 7775 +f 7656 7776 7775 +f 7656 7657 7777 +f 7656 7777 7776 +f 7657 7658 7777 +f 7658 7778 7777 +f 7658 7659 7779 +f 7658 7779 7778 +f 7659 7660 7779 +f 7660 7780 7779 +f 7660 7661 7781 +f 7660 7781 7780 +f 7661 7662 7781 +f 7662 7782 7781 +f 7662 7663 7783 +f 7662 7783 7782 +f 7663 7664 7783 +f 7664 7784 7783 +f 7664 7665 7785 +f 7664 7785 7784 +f 7665 7666 7785 +f 7666 7786 7785 +f 7666 7667 7787 +f 7666 7787 7786 +f 7667 7668 7787 +f 7668 7788 7787 +f 7668 7669 7789 +f 7668 7789 7788 +f 7669 7670 7789 +f 7670 7790 7789 +f 7670 7671 7791 +f 7670 7791 7790 +f 7671 7672 7791 +f 7672 7792 7791 +f 7672 7673 7793 +f 7672 7793 7792 +f 7673 7674 7793 +f 7674 7794 7793 +f 7674 7675 7795 +f 7674 7795 7794 +f 7675 7676 7795 +f 7676 7796 7795 +f 7676 7677 7797 +f 7676 7797 7796 +f 7677 7678 7797 +f 7678 7798 7797 +f 7678 7679 7799 +f 7678 7799 7798 +f 7679 7680 7799 +f 7680 7800 7799 +f 7680 7681 7801 +f 7680 7801 7800 +f 7681 7682 7801 +f 7682 7802 7801 +f 7682 7683 7803 +f 7682 7803 7802 +f 7683 7684 7803 +f 7684 7804 7803 +f 7684 7685 7805 +f 7684 7805 7804 +f 7685 7686 7805 +f 7686 7806 7805 +f 7686 7687 7807 +f 7686 7807 7806 +f 7687 7688 7807 +f 7688 7808 7807 +f 7688 7689 7809 +f 7688 7809 7808 +f 7689 7690 7809 +f 7690 7810 7809 +f 7690 7691 7811 +f 7690 7811 7810 +f 7691 7692 7811 +f 7692 7812 7811 +f 7692 7693 7813 +f 7692 7813 7812 +f 7693 7694 7813 +f 7694 7814 7813 +f 7694 7695 7815 +f 7694 7815 7814 +f 7695 7696 7815 +f 7696 7816 7815 +f 7696 7697 7817 +f 7696 7817 7816 +f 7697 7698 7817 +f 7698 7818 7817 +f 7698 7699 7819 +f 7698 7819 7818 +f 7699 7700 7819 +f 7700 7820 7819 +f 7700 7701 7821 +f 7700 7821 7820 +f 7701 7702 7821 +f 7702 7822 7821 +f 7702 7703 7823 +f 7702 7823 7822 +f 7703 7704 7823 +f 7704 7824 7823 +f 7704 7705 7825 +f 7704 7825 7824 +f 7705 7706 7825 +f 7706 7826 7825 +f 7706 7707 7827 +f 7706 7827 7826 +f 7707 7708 7827 +f 7708 7828 7827 +f 7708 7709 7829 +f 7708 7829 7828 +f 7709 7710 7829 +f 7710 7830 7829 +f 7710 7711 7831 +f 7710 7831 7830 +f 7711 7712 7831 +f 7712 7832 7831 +f 7712 7713 7833 +f 7712 7833 7832 +f 7713 7714 7833 +f 7714 7834 7833 +f 7714 7715 7835 +f 7714 7835 7834 +f 7715 7716 7835 +f 7716 7836 7835 +f 7716 7717 7837 +f 7716 7837 7836 +f 7717 7718 7837 +f 7718 7838 7837 +f 7718 7719 7839 +f 7718 7839 7838 +f 7719 7720 7839 +f 7720 7840 7839 +f 7720 7721 7841 +f 7720 7841 7840 +f 7721 7722 7841 +f 7722 7842 7841 +f 7722 7723 7843 +f 7722 7843 7842 +f 7723 7724 7843 +f 7724 7844 7843 +f 7724 7725 7845 +f 7724 7845 7844 +f 7725 7726 7845 +f 7726 7846 7845 +f 7726 7727 7847 +f 7726 7847 7846 +f 7727 7728 7847 +f 7728 7848 7847 +f 7728 7729 7849 +f 7728 7849 7848 +f 7729 7730 7849 +f 7730 7850 7849 +f 7730 7731 7851 +f 7730 7851 7850 +f 7731 7732 7851 +f 7732 7852 7851 +f 7732 7733 7853 +f 7732 7853 7852 +f 7733 7734 7853 +f 7734 7854 7853 +f 7734 7735 7855 +f 7734 7855 7854 +f 7735 7736 7855 +f 7736 7856 7855 +f 7736 7737 7857 +f 7736 7857 7856 +f 7737 7738 7857 +f 7738 7858 7857 +f 7738 7739 7859 +f 7738 7859 7858 +f 7739 7740 7859 +f 7740 7860 7859 +f 7740 7741 7861 +f 7740 7861 7860 +f 7741 7742 7861 +f 7742 7862 7861 +f 7742 7743 7863 +f 7742 7863 7862 +f 7743 7744 7863 +f 7744 7864 7863 +f 7744 7745 7865 +f 7744 7865 7864 +f 7745 7746 7865 +f 7746 7866 7865 +f 7746 7747 7867 +f 7746 7867 7866 +f 7747 7748 7867 +f 7748 7868 7867 +f 7749 7750 7870 +f 7749 7870 7869 +f 7750 7751 7870 +f 7751 7871 7870 +f 7751 7752 7872 +f 7751 7872 7871 +f 7752 7753 7872 +f 7753 7873 7872 +f 7753 7754 7874 +f 7753 7874 7873 +f 7754 7755 7874 +f 7755 7875 7874 +f 7755 7756 7876 +f 7755 7876 7875 +f 7756 7757 7876 +f 7757 7877 7876 +f 7757 7758 7878 +f 7757 7878 7877 +f 7758 7759 7878 +f 7759 7879 7878 +f 7759 7760 7880 +f 7759 7880 7879 +f 7760 7761 7880 +f 7761 7881 7880 +f 7761 7762 7882 +f 7761 7882 7881 +f 7762 7763 7882 +f 7763 7883 7882 +f 7763 7764 7884 +f 7763 7884 7883 +f 7764 7765 7884 +f 7765 7885 7884 +f 7765 7766 7886 +f 7765 7886 7885 +f 7766 7767 7886 +f 7767 7887 7886 +f 7768 7769 7888 +f 7769 7889 7888 +f 7769 7770 7890 +f 7769 7890 7889 +f 7770 7771 7890 +f 7771 7891 7890 +f 7771 7772 7892 +f 7771 7892 7891 +f 7772 7773 7892 +f 7773 7893 7892 +f 7773 7774 7894 +f 7773 7894 7893 +f 7774 7775 7894 +f 7775 7895 7894 +f 7775 7776 7896 +f 7775 7896 7895 +f 7776 7777 7896 +f 7777 7897 7896 +f 7777 7778 7898 +f 7777 7898 7897 +f 7778 7779 7898 +f 7779 7899 7898 +f 7779 7780 7900 +f 7779 7900 7899 +f 7780 7781 7900 +f 7781 7901 7900 +f 7781 7782 7902 +f 7781 7902 7901 +f 7782 7783 7902 +f 7783 7903 7902 +f 7783 7784 7904 +f 7783 7904 7903 +f 7784 7785 7904 +f 7785 7905 7904 +f 7785 7786 7906 +f 7785 7906 7905 +f 7786 7787 7906 +f 7787 7907 7906 +f 7787 7788 7908 +f 7787 7908 7907 +f 7788 7789 7908 +f 7789 7909 7908 +f 7789 7790 7910 +f 7789 7910 7909 +f 7790 7791 7910 +f 7791 7911 7910 +f 7791 7792 7912 +f 7791 7912 7911 +f 7792 7793 7912 +f 7793 7913 7912 +f 7793 7794 7914 +f 7793 7914 7913 +f 7794 7795 7914 +f 7795 7915 7914 +f 7795 7796 7916 +f 7795 7916 7915 +f 7796 7797 7916 +f 7797 7917 7916 +f 7797 7798 7918 +f 7797 7918 7917 +f 7798 7799 7918 +f 7799 7919 7918 +f 7799 7800 7920 +f 7799 7920 7919 +f 7800 7801 7920 +f 7801 7921 7920 +f 7801 7802 7922 +f 7801 7922 7921 +f 7802 7803 7922 +f 7803 7923 7922 +f 7803 7804 7924 +f 7803 7924 7923 +f 7804 7805 7924 +f 7805 7925 7924 +f 7805 7806 7926 +f 7805 7926 7925 +f 7806 7807 7926 +f 7807 7927 7926 +f 7807 7808 7928 +f 7807 7928 7927 +f 7808 7809 7928 +f 7809 7929 7928 +f 7809 7810 7930 +f 7809 7930 7929 +f 7810 7811 7930 +f 7811 7931 7930 +f 7811 7812 7932 +f 7811 7932 7931 +f 7812 7813 7932 +f 7813 7933 7932 +f 7813 7814 7934 +f 7813 7934 7933 +f 7814 7815 7934 +f 7815 7935 7934 +f 7815 7816 7936 +f 7815 7936 7935 +f 7816 7817 7936 +f 7817 7937 7936 +f 7817 7818 7938 +f 7817 7938 7937 +f 7818 7819 7938 +f 7819 7939 7938 +f 7819 7820 7940 +f 7819 7940 7939 +f 7820 7821 7940 +f 7821 7941 7940 +f 7821 7822 7942 +f 7821 7942 7941 +f 7822 7823 7942 +f 7823 7943 7942 +f 7823 7824 7944 +f 7823 7944 7943 +f 7824 7825 7944 +f 7825 7945 7944 +f 7825 7826 7946 +f 7825 7946 7945 +f 7826 7827 7946 +f 7827 7947 7946 +f 7827 7828 7948 +f 7827 7948 7947 +f 7828 7829 7948 +f 7829 7949 7948 +f 7829 7830 7950 +f 7829 7950 7949 +f 7830 7831 7950 +f 7831 7951 7950 +f 7831 7832 7952 +f 7831 7952 7951 +f 7832 7833 7952 +f 7833 7953 7952 +f 7833 7834 7954 +f 7833 7954 7953 +f 7834 7835 7954 +f 7835 7955 7954 +f 7835 7836 7956 +f 7835 7956 7955 +f 7836 7837 7956 +f 7837 7957 7956 +f 7837 7838 7958 +f 7837 7958 7957 +f 7838 7839 7958 +f 7839 7959 7958 +f 7839 7840 7960 +f 7839 7960 7959 +f 7840 7841 7960 +f 7841 7961 7960 +f 7841 7842 7962 +f 7841 7962 7961 +f 7842 7843 7962 +f 7843 7963 7962 +f 7843 7844 7964 +f 7843 7964 7963 +f 7844 7845 7964 +f 7845 7965 7964 +f 7845 7846 7966 +f 7845 7966 7965 +f 7846 7847 7966 +f 7847 7967 7966 +f 7847 7848 7968 +f 7847 7968 7967 +f 7848 7849 7968 +f 7849 7969 7968 +f 7849 7850 7970 +f 7849 7970 7969 +f 7850 7851 7970 +f 7851 7971 7970 +f 7851 7852 7972 +f 7851 7972 7971 +f 7852 7853 7972 +f 7853 7973 7972 +f 7853 7854 7974 +f 7853 7974 7973 +f 7854 7855 7974 +f 7855 7975 7974 +f 7855 7856 7976 +f 7855 7976 7975 +f 7856 7857 7976 +f 7857 7977 7976 +f 7857 7858 7978 +f 7857 7978 7977 +f 7858 7859 7978 +f 7859 7979 7978 +f 7859 7860 7980 +f 7859 7980 7979 +f 7860 7861 7980 +f 7861 7981 7980 +f 7861 7862 7982 +f 7861 7982 7981 +f 7862 7863 7982 +f 7863 7983 7982 +f 7863 7864 7984 +f 7863 7984 7983 +f 7864 7865 7984 +f 7865 7985 7984 +f 7865 7866 7986 +f 7865 7986 7985 +f 7866 7867 7986 +f 7867 7987 7986 +f 7867 7868 7988 +f 7867 7988 7987 +f 7869 7870 7989 +f 7870 7990 7989 +f 7870 7871 7991 +f 7870 7991 7990 +f 7871 7872 7991 +f 7872 7992 7991 +f 7872 7873 7993 +f 7872 7993 7992 +f 7873 7874 7993 +f 7874 7994 7993 +f 7874 7875 7995 +f 7874 7995 7994 +f 7875 7876 7995 +f 7876 7996 7995 +f 7876 7877 7997 +f 7876 7997 7996 +f 7877 7878 7997 +f 7878 7998 7997 +f 7878 7879 7999 +f 7878 7999 7998 +f 7879 7880 7999 +f 7880 8000 7999 +f 7880 7881 8001 +f 7880 8001 8000 +f 7881 7882 8001 +f 7882 8002 8001 +f 7882 7883 8003 +f 7882 8003 8002 +f 7883 7884 8003 +f 7884 8004 8003 +f 7884 7885 8005 +f 7884 8005 8004 +f 7885 7886 8005 +f 7886 8006 8005 +f 7886 7887 8007 +f 7886 8007 8006 +f 7888 7889 8009 +f 7888 8009 8008 +f 7889 7890 8009 +f 7890 8010 8009 +f 7890 7891 8011 +f 7890 8011 8010 +f 7891 7892 8011 +f 7892 8012 8011 +f 7892 7893 8013 +f 7892 8013 8012 +f 7893 7894 8013 +f 7894 8014 8013 +f 7894 7895 8015 +f 7894 8015 8014 +f 7895 7896 8015 +f 7896 8016 8015 +f 7896 7897 8017 +f 7896 8017 8016 +f 7897 7898 8017 +f 7898 8018 8017 +f 7898 7899 8019 +f 7898 8019 8018 +f 7899 7900 8019 +f 7900 8020 8019 +f 7900 7901 8021 +f 7900 8021 8020 +f 7901 7902 8021 +f 7902 8022 8021 +f 7902 7903 8023 +f 7902 8023 8022 +f 7903 7904 8023 +f 7904 8024 8023 +f 7904 7905 8025 +f 7904 8025 8024 +f 7905 7906 8025 +f 7906 8026 8025 +f 7906 7907 8027 +f 7906 8027 8026 +f 7907 7908 8027 +f 7908 8028 8027 +f 7908 7909 8029 +f 7908 8029 8028 +f 7909 7910 8029 +f 7910 8030 8029 +f 7910 7911 8031 +f 7910 8031 8030 +f 7911 7912 8031 +f 7912 8032 8031 +f 7912 7913 8033 +f 7912 8033 8032 +f 7913 7914 8033 +f 7914 8034 8033 +f 7914 7915 8035 +f 7914 8035 8034 +f 7915 7916 8035 +f 7916 8036 8035 +f 7916 7917 8037 +f 7916 8037 8036 +f 7917 7918 8037 +f 7918 8038 8037 +f 7918 7919 8039 +f 7918 8039 8038 +f 7919 7920 8039 +f 7920 8040 8039 +f 7920 7921 8041 +f 7920 8041 8040 +f 7921 7922 8041 +f 7922 8042 8041 +f 7922 7923 8043 +f 7922 8043 8042 +f 7923 7924 8043 +f 7924 8044 8043 +f 7924 7925 8045 +f 7924 8045 8044 +f 7925 7926 8045 +f 7926 8046 8045 +f 7926 7927 8047 +f 7926 8047 8046 +f 7927 7928 8047 +f 7928 8048 8047 +f 7928 7929 8049 +f 7928 8049 8048 +f 7929 7930 8049 +f 7930 8050 8049 +f 7930 7931 8051 +f 7930 8051 8050 +f 7931 7932 8051 +f 7932 8052 8051 +f 7932 7933 8053 +f 7932 8053 8052 +f 7933 7934 8053 +f 7934 8054 8053 +f 7934 7935 8055 +f 7934 8055 8054 +f 7935 7936 8055 +f 7936 8056 8055 +f 7936 7937 8057 +f 7936 8057 8056 +f 7937 7938 8057 +f 7938 8058 8057 +f 7938 7939 8059 +f 7938 8059 8058 +f 7939 7940 8059 +f 7940 8060 8059 +f 7940 7941 8061 +f 7940 8061 8060 +f 7941 7942 8061 +f 7942 8062 8061 +f 7942 7943 8063 +f 7942 8063 8062 +f 7943 7944 8063 +f 7944 8064 8063 +f 7944 7945 8065 +f 7944 8065 8064 +f 7945 7946 8065 +f 7946 8066 8065 +f 7946 7947 8067 +f 7946 8067 8066 +f 7947 7948 8067 +f 7948 8068 8067 +f 7948 7949 8069 +f 7948 8069 8068 +f 7949 7950 8069 +f 7950 8070 8069 +f 7950 7951 8071 +f 7950 8071 8070 +f 7951 7952 8071 +f 7952 8072 8071 +f 7952 7953 8073 +f 7952 8073 8072 +f 7953 7954 8073 +f 7954 8074 8073 +f 7954 7955 8075 +f 7954 8075 8074 +f 7955 7956 8075 +f 7956 8076 8075 +f 7956 7957 8077 +f 7956 8077 8076 +f 7957 7958 8077 +f 7958 8078 8077 +f 7958 7959 8079 +f 7958 8079 8078 +f 7959 7960 8079 +f 7960 8080 8079 +f 7960 7961 8081 +f 7960 8081 8080 +f 7961 7962 8081 +f 7962 8082 8081 +f 7962 7963 8083 +f 7962 8083 8082 +f 7963 7964 8083 +f 7964 8084 8083 +f 7964 7965 8085 +f 7964 8085 8084 +f 7965 7966 8085 +f 7966 8086 8085 +f 7966 7967 8087 +f 7966 8087 8086 +f 7967 7968 8087 +f 7968 8088 8087 +f 7968 7969 8089 +f 7968 8089 8088 +f 7969 7970 8089 +f 7970 8090 8089 +f 7970 7971 8091 +f 7970 8091 8090 +f 7971 7972 8091 +f 7972 8092 8091 +f 7972 7973 8093 +f 7972 8093 8092 +f 7973 7974 8093 +f 7974 8094 8093 +f 7974 7975 8095 +f 7974 8095 8094 +f 7975 7976 8095 +f 7976 8096 8095 +f 7976 7977 8097 +f 7976 8097 8096 +f 7977 7978 8097 +f 7978 8098 8097 +f 7978 7979 8099 +f 7978 8099 8098 +f 7979 7980 8099 +f 7980 8100 8099 +f 7980 7981 8101 +f 7980 8101 8100 +f 7981 7982 8101 +f 7982 8102 8101 +f 7982 7983 8103 +f 7982 8103 8102 +f 7983 7984 8103 +f 7984 8104 8103 +f 7984 7985 8105 +f 7984 8105 8104 +f 7985 7986 8105 +f 7986 8106 8105 +f 7986 7987 8107 +f 7986 8107 8106 +f 7987 7988 8107 +f 7988 8108 8107 +f 7989 7990 8110 +f 7989 8110 8109 +f 7990 7991 8110 +f 7991 8111 8110 +f 7991 7992 8112 +f 7991 8112 8111 +f 7992 7993 8112 +f 7993 8113 8112 +f 7993 7994 8114 +f 7993 8114 8113 +f 7994 7995 8114 +f 7995 8115 8114 +f 7995 7996 8116 +f 7995 8116 8115 +f 7996 7997 8116 +f 7997 8117 8116 +f 7997 7998 8118 +f 7997 8118 8117 +f 7998 7999 8118 +f 7999 8119 8118 +f 7999 8000 8120 +f 7999 8120 8119 +f 8000 8001 8120 +f 8001 8121 8120 +f 8001 8002 8122 +f 8001 8122 8121 +f 8002 8003 8122 +f 8003 8123 8122 +f 8003 8004 8124 +f 8003 8124 8123 +f 8004 8005 8124 +f 8005 8125 8124 +f 8005 8006 8126 +f 8005 8126 8125 +f 8006 8007 8126 +f 8007 8127 8126 +f 8008 8009 8128 +f 8009 8129 8128 +f 8009 8010 8130 +f 8009 8130 8129 +f 8010 8011 8130 +f 8011 8131 8130 +f 8011 8012 8132 +f 8011 8132 8131 +f 8012 8013 8132 +f 8013 8133 8132 +f 8013 8014 8134 +f 8013 8134 8133 +f 8014 8015 8134 +f 8015 8135 8134 +f 8015 8016 8136 +f 8015 8136 8135 +f 8016 8017 8136 +f 8017 8137 8136 +f 8017 8018 8138 +f 8017 8138 8137 +f 8018 8019 8138 +f 8019 8139 8138 +f 8019 8020 8140 +f 8019 8140 8139 +f 8020 8021 8140 +f 8021 8141 8140 +f 8021 8022 8142 +f 8021 8142 8141 +f 8022 8023 8142 +f 8023 8143 8142 +f 8023 8024 8144 +f 8023 8144 8143 +f 8024 8025 8144 +f 8025 8145 8144 +f 8025 8026 8146 +f 8025 8146 8145 +f 8026 8027 8146 +f 8027 8147 8146 +f 8027 8028 8148 +f 8027 8148 8147 +f 8028 8029 8148 +f 8029 8149 8148 +f 8029 8030 8150 +f 8029 8150 8149 +f 8030 8031 8150 +f 8031 8151 8150 +f 8031 8032 8152 +f 8031 8152 8151 +f 8032 8033 8152 +f 8033 8153 8152 +f 8033 8034 8154 +f 8033 8154 8153 +f 8034 8035 8154 +f 8035 8155 8154 +f 8035 8036 8156 +f 8035 8156 8155 +f 8036 8037 8156 +f 8037 8157 8156 +f 8037 8038 8158 +f 8037 8158 8157 +f 8038 8039 8158 +f 8039 8159 8158 +f 8039 8040 8160 +f 8039 8160 8159 +f 8040 8041 8160 +f 8041 8161 8160 +f 8041 8042 8162 +f 8041 8162 8161 +f 8042 8043 8162 +f 8043 8163 8162 +f 8043 8044 8164 +f 8043 8164 8163 +f 8044 8045 8164 +f 8045 8165 8164 +f 8045 8046 8166 +f 8045 8166 8165 +f 8046 8047 8166 +f 8047 8167 8166 +f 8047 8048 8168 +f 8047 8168 8167 +f 8048 8049 8168 +f 8049 8169 8168 +f 8049 8050 8170 +f 8049 8170 8169 +f 8050 8051 8170 +f 8051 8171 8170 +f 8051 8052 8172 +f 8051 8172 8171 +f 8052 8053 8172 +f 8053 8173 8172 +f 8053 8054 8174 +f 8053 8174 8173 +f 8054 8055 8174 +f 8055 8175 8174 +f 8055 8056 8176 +f 8055 8176 8175 +f 8056 8057 8176 +f 8057 8177 8176 +f 8057 8058 8178 +f 8057 8178 8177 +f 8058 8059 8178 +f 8059 8179 8178 +f 8059 8060 8180 +f 8059 8180 8179 +f 8060 8061 8180 +f 8061 8181 8180 +f 8061 8062 8182 +f 8061 8182 8181 +f 8062 8063 8182 +f 8063 8183 8182 +f 8063 8064 8184 +f 8063 8184 8183 +f 8064 8065 8184 +f 8065 8185 8184 +f 8065 8066 8186 +f 8065 8186 8185 +f 8066 8067 8186 +f 8067 8187 8186 +f 8067 8068 8188 +f 8067 8188 8187 +f 8068 8069 8188 +f 8069 8189 8188 +f 8069 8070 8190 +f 8069 8190 8189 +f 8070 8071 8190 +f 8071 8191 8190 +f 8071 8072 8192 +f 8071 8192 8191 +f 8072 8073 8192 +f 8073 8193 8192 +f 8073 8074 8194 +f 8073 8194 8193 +f 8074 8075 8194 +f 8075 8195 8194 +f 8075 8076 8196 +f 8075 8196 8195 +f 8076 8077 8196 +f 8077 8197 8196 +f 8077 8078 8198 +f 8077 8198 8197 +f 8078 8079 8198 +f 8079 8199 8198 +f 8079 8080 8200 +f 8079 8200 8199 +f 8080 8081 8200 +f 8081 8201 8200 +f 8081 8082 8202 +f 8081 8202 8201 +f 8082 8083 8202 +f 8083 8203 8202 +f 8083 8084 8204 +f 8083 8204 8203 +f 8084 8085 8204 +f 8085 8205 8204 +f 8085 8086 8206 +f 8085 8206 8205 +f 8086 8087 8206 +f 8087 8207 8206 +f 8087 8088 8208 +f 8087 8208 8207 +f 8088 8089 8208 +f 8089 8209 8208 +f 8089 8090 8210 +f 8089 8210 8209 +f 8090 8091 8210 +f 8091 8211 8210 +f 8091 8092 8212 +f 8091 8212 8211 +f 8092 8093 8212 +f 8093 8213 8212 +f 8093 8094 8214 +f 8093 8214 8213 +f 8094 8095 8214 +f 8095 8215 8214 +f 8095 8096 8216 +f 8095 8216 8215 +f 8096 8097 8216 +f 8097 8217 8216 +f 8097 8098 8218 +f 8097 8218 8217 +f 8098 8099 8218 +f 8099 8219 8218 +f 8099 8100 8220 +f 8099 8220 8219 +f 8100 8101 8220 +f 8101 8221 8220 +f 8101 8102 8222 +f 8101 8222 8221 +f 8102 8103 8222 +f 8103 8223 8222 +f 8103 8104 8224 +f 8103 8224 8223 +f 8104 8105 8224 +f 8105 8225 8224 +f 8105 8106 8226 +f 8105 8226 8225 +f 8106 8107 8226 +f 8107 8227 8226 +f 8107 8108 8228 +f 8107 8228 8227 +f 8109 8110 8229 +f 8110 8230 8229 +f 8110 8111 8231 +f 8110 8231 8230 +f 8111 8112 8231 +f 8112 8232 8231 +f 8112 8113 8233 +f 8112 8233 8232 +f 8113 8114 8233 +f 8114 8234 8233 +f 8114 8115 8235 +f 8114 8235 8234 +f 8115 8116 8235 +f 8116 8236 8235 +f 8116 8117 8237 +f 8116 8237 8236 +f 8117 8118 8237 +f 8118 8238 8237 +f 8118 8119 8239 +f 8118 8239 8238 +f 8119 8120 8239 +f 8120 8240 8239 +f 8120 8121 8241 +f 8120 8241 8240 +f 8121 8122 8241 +f 8122 8242 8241 +f 8122 8123 8243 +f 8122 8243 8242 +f 8123 8124 8243 +f 8124 8244 8243 +f 8124 8125 8245 +f 8124 8245 8244 +f 8125 8126 8245 +f 8126 8246 8245 +f 8126 8127 8247 +f 8126 8247 8246 +f 8128 8129 8249 +f 8128 8249 8248 +f 8129 8130 8249 +f 8130 8250 8249 +f 8130 8131 8251 +f 8130 8251 8250 +f 8131 8132 8251 +f 8132 8252 8251 +f 8132 8133 8253 +f 8132 8253 8252 +f 8133 8134 8253 +f 8134 8254 8253 +f 8134 8135 8255 +f 8134 8255 8254 +f 8135 8136 8255 +f 8136 8256 8255 +f 8136 8137 8257 +f 8136 8257 8256 +f 8137 8138 8257 +f 8138 8258 8257 +f 8138 8139 8259 +f 8138 8259 8258 +f 8139 8140 8259 +f 8140 8260 8259 +f 8140 8141 8261 +f 8140 8261 8260 +f 8141 8142 8261 +f 8142 8262 8261 +f 8142 8143 8263 +f 8142 8263 8262 +f 8143 8144 8263 +f 8144 8264 8263 +f 8144 8145 8265 +f 8144 8265 8264 +f 8145 8146 8265 +f 8146 8266 8265 +f 8146 8147 8267 +f 8146 8267 8266 +f 8147 8148 8267 +f 8148 8268 8267 +f 8148 8149 8269 +f 8148 8269 8268 +f 8149 8150 8269 +f 8150 8270 8269 +f 8150 8151 8271 +f 8150 8271 8270 +f 8151 8152 8271 +f 8152 8272 8271 +f 8152 8153 8273 +f 8152 8273 8272 +f 8153 8154 8273 +f 8154 8274 8273 +f 8154 8155 8275 +f 8154 8275 8274 +f 8155 8156 8275 +f 8156 8276 8275 +f 8156 8157 8277 +f 8156 8277 8276 +f 8157 8158 8277 +f 8158 8278 8277 +f 8158 8159 8279 +f 8158 8279 8278 +f 8159 8160 8279 +f 8160 8280 8279 +f 8160 8161 8281 +f 8160 8281 8280 +f 8161 8162 8281 +f 8162 8282 8281 +f 8162 8163 8283 +f 8162 8283 8282 +f 8163 8164 8283 +f 8164 8284 8283 +f 8164 8165 8285 +f 8164 8285 8284 +f 8165 8166 8285 +f 8166 8286 8285 +f 8166 8167 8287 +f 8166 8287 8286 +f 8167 8168 8287 +f 8168 8288 8287 +f 8168 8169 8289 +f 8168 8289 8288 +f 8169 8170 8289 +f 8170 8290 8289 +f 8170 8171 8291 +f 8170 8291 8290 +f 8171 8172 8291 +f 8172 8292 8291 +f 8172 8173 8293 +f 8172 8293 8292 +f 8173 8174 8293 +f 8174 8294 8293 +f 8174 8175 8295 +f 8174 8295 8294 +f 8175 8176 8295 +f 8176 8296 8295 +f 8176 8177 8297 +f 8176 8297 8296 +f 8177 8178 8297 +f 8178 8298 8297 +f 8178 8179 8299 +f 8178 8299 8298 +f 8179 8180 8299 +f 8180 8300 8299 +f 8180 8181 8301 +f 8180 8301 8300 +f 8181 8182 8301 +f 8182 8302 8301 +f 8182 8183 8303 +f 8182 8303 8302 +f 8183 8184 8303 +f 8184 8304 8303 +f 8184 8185 8305 +f 8184 8305 8304 +f 8185 8186 8305 +f 8186 8306 8305 +f 8186 8187 8307 +f 8186 8307 8306 +f 8187 8188 8307 +f 8188 8308 8307 +f 8188 8189 8309 +f 8188 8309 8308 +f 8189 8190 8309 +f 8190 8310 8309 +f 8190 8191 8311 +f 8190 8311 8310 +f 8191 8192 8311 +f 8192 8312 8311 +f 8192 8193 8313 +f 8192 8313 8312 +f 8193 8194 8313 +f 8194 8314 8313 +f 8194 8195 8315 +f 8194 8315 8314 +f 8195 8196 8315 +f 8196 8316 8315 +f 8196 8197 8317 +f 8196 8317 8316 +f 8197 8198 8317 +f 8198 8318 8317 +f 8198 8199 8319 +f 8198 8319 8318 +f 8199 8200 8319 +f 8200 8320 8319 +f 8200 8201 8321 +f 8200 8321 8320 +f 8201 8202 8321 +f 8202 8322 8321 +f 8202 8203 8323 +f 8202 8323 8322 +f 8203 8204 8323 +f 8204 8324 8323 +f 8204 8205 8325 +f 8204 8325 8324 +f 8205 8206 8325 +f 8206 8326 8325 +f 8206 8207 8327 +f 8206 8327 8326 +f 8207 8208 8327 +f 8208 8328 8327 +f 8208 8209 8329 +f 8208 8329 8328 +f 8209 8210 8329 +f 8210 8330 8329 +f 8210 8211 8331 +f 8210 8331 8330 +f 8211 8212 8331 +f 8212 8332 8331 +f 8212 8213 8333 +f 8212 8333 8332 +f 8213 8214 8333 +f 8214 8334 8333 +f 8214 8215 8335 +f 8214 8335 8334 +f 8215 8216 8335 +f 8216 8336 8335 +f 8216 8217 8337 +f 8216 8337 8336 +f 8217 8218 8337 +f 8218 8338 8337 +f 8218 8219 8339 +f 8218 8339 8338 +f 8219 8220 8339 +f 8220 8340 8339 +f 8220 8221 8341 +f 8220 8341 8340 +f 8221 8222 8341 +f 8222 8342 8341 +f 8222 8223 8343 +f 8222 8343 8342 +f 8223 8224 8343 +f 8224 8344 8343 +f 8224 8225 8345 +f 8224 8345 8344 +f 8225 8226 8345 +f 8226 8346 8345 +f 8226 8227 8347 +f 8226 8347 8346 +f 8227 8228 8347 +f 8228 8348 8347 +f 8229 8230 8350 +f 8229 8350 8349 +f 8230 8231 8350 +f 8231 8351 8350 +f 8231 8232 8352 +f 8231 8352 8351 +f 8232 8233 8352 +f 8233 8353 8352 +f 8233 8234 8354 +f 8233 8354 8353 +f 8234 8235 8354 +f 8235 8355 8354 +f 8235 8236 8356 +f 8235 8356 8355 +f 8236 8237 8356 +f 8237 8357 8356 +f 8237 8238 8358 +f 8237 8358 8357 +f 8238 8239 8358 +f 8239 8359 8358 +f 8239 8240 8360 +f 8239 8360 8359 +f 8240 8241 8360 +f 8241 8361 8360 +f 8241 8242 8362 +f 8241 8362 8361 +f 8242 8243 8362 +f 8243 8363 8362 +f 8243 8244 8364 +f 8243 8364 8363 +f 8244 8245 8364 +f 8245 8365 8364 +f 8245 8246 8366 +f 8245 8366 8365 +f 8246 8247 8366 +f 8247 8367 8366 +f 8248 8249 8368 +f 8249 8369 8368 +f 8249 8250 8370 +f 8249 8370 8369 +f 8250 8251 8370 +f 8251 8371 8370 +f 8251 8252 8372 +f 8251 8372 8371 +f 8252 8253 8372 +f 8253 8373 8372 +f 8253 8254 8374 +f 8253 8374 8373 +f 8254 8255 8374 +f 8255 8375 8374 +f 8255 8256 8376 +f 8255 8376 8375 +f 8256 8257 8376 +f 8257 8377 8376 +f 8257 8258 8378 +f 8257 8378 8377 +f 8258 8259 8378 +f 8259 8379 8378 +f 8259 8260 8380 +f 8259 8380 8379 +f 8260 8261 8380 +f 8261 8381 8380 +f 8261 8262 8382 +f 8261 8382 8381 +f 8262 8263 8382 +f 8263 8383 8382 +f 8263 8264 8384 +f 8263 8384 8383 +f 8264 8265 8384 +f 8265 8385 8384 +f 8265 8266 8386 +f 8265 8386 8385 +f 8266 8267 8386 +f 8267 8387 8386 +f 8267 8268 8388 +f 8267 8388 8387 +f 8268 8269 8388 +f 8269 8389 8388 +f 8269 8270 8390 +f 8269 8390 8389 +f 8270 8271 8390 +f 8271 8391 8390 +f 8271 8272 8392 +f 8271 8392 8391 +f 8272 8273 8392 +f 8273 8393 8392 +f 8273 8274 8394 +f 8273 8394 8393 +f 8274 8275 8394 +f 8275 8395 8394 +f 8275 8276 8396 +f 8275 8396 8395 +f 8276 8277 8396 +f 8277 8397 8396 +f 8277 8278 8398 +f 8277 8398 8397 +f 8278 8279 8398 +f 8279 8399 8398 +f 8279 8280 8400 +f 8279 8400 8399 +f 8280 8281 8400 +f 8281 8401 8400 +f 8281 8282 8402 +f 8281 8402 8401 +f 8282 8283 8402 +f 8283 8403 8402 +f 8283 8284 8404 +f 8283 8404 8403 +f 8284 8285 8404 +f 8285 8405 8404 +f 8285 8286 8406 +f 8285 8406 8405 +f 8286 8287 8406 +f 8287 8407 8406 +f 8287 8288 8408 +f 8287 8408 8407 +f 8288 8289 8408 +f 8289 8409 8408 +f 8289 8290 8410 +f 8289 8410 8409 +f 8290 8291 8410 +f 8291 8411 8410 +f 8291 8292 8412 +f 8291 8412 8411 +f 8292 8293 8412 +f 8293 8413 8412 +f 8293 8294 8414 +f 8293 8414 8413 +f 8294 8295 8414 +f 8295 8415 8414 +f 8295 8296 8416 +f 8295 8416 8415 +f 8296 8297 8416 +f 8297 8417 8416 +f 8297 8298 8418 +f 8297 8418 8417 +f 8298 8299 8418 +f 8299 8419 8418 +f 8299 8300 8420 +f 8299 8420 8419 +f 8300 8301 8420 +f 8301 8421 8420 +f 8301 8302 8422 +f 8301 8422 8421 +f 8302 8303 8422 +f 8303 8423 8422 +f 8303 8304 8424 +f 8303 8424 8423 +f 8304 8305 8424 +f 8305 8425 8424 +f 8305 8306 8426 +f 8305 8426 8425 +f 8306 8307 8426 +f 8307 8427 8426 +f 8307 8308 8428 +f 8307 8428 8427 +f 8308 8309 8428 +f 8309 8429 8428 +f 8309 8310 8430 +f 8309 8430 8429 +f 8310 8311 8430 +f 8311 8431 8430 +f 8311 8312 8432 +f 8311 8432 8431 +f 8312 8313 8432 +f 8313 8433 8432 +f 8313 8314 8434 +f 8313 8434 8433 +f 8314 8315 8434 +f 8315 8435 8434 +f 8315 8316 8436 +f 8315 8436 8435 +f 8316 8317 8436 +f 8317 8437 8436 +f 8317 8318 8438 +f 8317 8438 8437 +f 8318 8319 8438 +f 8319 8439 8438 +f 8319 8320 8440 +f 8319 8440 8439 +f 8320 8321 8440 +f 8321 8441 8440 +f 8321 8322 8442 +f 8321 8442 8441 +f 8322 8323 8442 +f 8323 8443 8442 +f 8323 8324 8444 +f 8323 8444 8443 +f 8324 8325 8444 +f 8325 8445 8444 +f 8325 8326 8446 +f 8325 8446 8445 +f 8326 8327 8446 +f 8327 8447 8446 +f 8327 8328 8448 +f 8327 8448 8447 +f 8328 8329 8448 +f 8329 8449 8448 +f 8329 8330 8450 +f 8329 8450 8449 +f 8330 8331 8450 +f 8331 8451 8450 +f 8331 8332 8452 +f 8331 8452 8451 +f 8332 8333 8452 +f 8333 8453 8452 +f 8333 8334 8454 +f 8333 8454 8453 +f 8334 8335 8454 +f 8335 8455 8454 +f 8335 8336 8456 +f 8335 8456 8455 +f 8336 8337 8456 +f 8337 8457 8456 +f 8337 8338 8458 +f 8337 8458 8457 +f 8338 8339 8458 +f 8339 8459 8458 +f 8339 8340 8460 +f 8339 8460 8459 +f 8340 8341 8460 +f 8341 8461 8460 +f 8341 8342 8462 +f 8341 8462 8461 +f 8342 8343 8462 +f 8343 8463 8462 +f 8343 8344 8464 +f 8343 8464 8463 +f 8344 8345 8464 +f 8345 8465 8464 +f 8345 8346 8466 +f 8345 8466 8465 +f 8346 8347 8466 +f 8347 8467 8466 +f 8347 8348 8468 +f 8347 8468 8467 +f 8349 8350 8469 +f 8350 8470 8469 +f 8350 8351 8471 +f 8350 8471 8470 +f 8351 8352 8471 +f 8352 8472 8471 +f 8352 8353 8473 +f 8352 8473 8472 +f 8353 8354 8473 +f 8354 8474 8473 +f 8354 8355 8475 +f 8354 8475 8474 +f 8355 8356 8475 +f 8356 8476 8475 +f 8356 8357 8477 +f 8356 8477 8476 +f 8357 8358 8477 +f 8358 8478 8477 +f 8358 8359 8479 +f 8358 8479 8478 +f 8359 8360 8479 +f 8360 8480 8479 +f 8360 8361 8481 +f 8360 8481 8480 +f 8361 8362 8481 +f 8362 8482 8481 +f 8362 8363 8483 +f 8362 8483 8482 +f 8363 8364 8483 +f 8364 8484 8483 +f 8364 8365 8485 +f 8364 8485 8484 +f 8365 8366 8485 +f 8366 8486 8485 +f 8366 8367 8487 +f 8366 8487 8486 +f 8368 8369 8489 +f 8368 8489 8488 +f 8369 8370 8489 +f 8370 8490 8489 +f 8370 8371 8491 +f 8370 8491 8490 +f 8371 8372 8491 +f 8372 8492 8491 +f 8372 8373 8493 +f 8372 8493 8492 +f 8373 8374 8493 +f 8374 8494 8493 +f 8374 8375 8495 +f 8374 8495 8494 +f 8375 8376 8495 +f 8376 8496 8495 +f 8376 8377 8497 +f 8376 8497 8496 +f 8377 8378 8497 +f 8378 8498 8497 +f 8378 8379 8499 +f 8378 8499 8498 +f 8379 8380 8499 +f 8380 8500 8499 +f 8380 8381 8501 +f 8380 8501 8500 +f 8381 8382 8501 +f 8382 8502 8501 +f 8382 8383 8503 +f 8382 8503 8502 +f 8383 8384 8503 +f 8384 8504 8503 +f 8384 8385 8505 +f 8384 8505 8504 +f 8385 8386 8505 +f 8386 8506 8505 +f 8386 8387 8507 +f 8386 8507 8506 +f 8387 8388 8507 +f 8388 8508 8507 +f 8388 8389 8509 +f 8388 8509 8508 +f 8389 8390 8509 +f 8390 8510 8509 +f 8390 8391 8511 +f 8390 8511 8510 +f 8391 8392 8511 +f 8392 8512 8511 +f 8392 8393 8513 +f 8392 8513 8512 +f 8393 8394 8513 +f 8394 8514 8513 +f 8394 8395 8515 +f 8394 8515 8514 +f 8395 8396 8515 +f 8396 8516 8515 +f 8396 8397 8517 +f 8396 8517 8516 +f 8397 8398 8517 +f 8398 8518 8517 +f 8398 8399 8519 +f 8398 8519 8518 +f 8399 8400 8519 +f 8400 8520 8519 +f 8400 8401 8521 +f 8400 8521 8520 +f 8401 8402 8521 +f 8402 8522 8521 +f 8402 8403 8523 +f 8402 8523 8522 +f 8403 8404 8523 +f 8404 8524 8523 +f 8404 8405 8525 +f 8404 8525 8524 +f 8405 8406 8525 +f 8406 8526 8525 +f 8406 8407 8527 +f 8406 8527 8526 +f 8407 8408 8527 +f 8408 8528 8527 +f 8408 8409 8529 +f 8408 8529 8528 +f 8409 8410 8529 +f 8410 8530 8529 +f 8410 8411 8531 +f 8410 8531 8530 +f 8411 8412 8531 +f 8412 8532 8531 +f 8412 8413 8533 +f 8412 8533 8532 +f 8413 8414 8533 +f 8414 8534 8533 +f 8414 8415 8535 +f 8414 8535 8534 +f 8415 8416 8535 +f 8416 8536 8535 +f 8416 8417 8537 +f 8416 8537 8536 +f 8417 8418 8537 +f 8418 8538 8537 +f 8418 8419 8539 +f 8418 8539 8538 +f 8419 8420 8539 +f 8420 8540 8539 +f 8420 8421 8541 +f 8420 8541 8540 +f 8421 8422 8541 +f 8422 8542 8541 +f 8422 8423 8543 +f 8422 8543 8542 +f 8423 8424 8543 +f 8424 8544 8543 +f 8424 8425 8545 +f 8424 8545 8544 +f 8425 8426 8545 +f 8426 8546 8545 +f 8426 8427 8547 +f 8426 8547 8546 +f 8427 8428 8547 +f 8428 8548 8547 +f 8428 8429 8549 +f 8428 8549 8548 +f 8429 8430 8549 +f 8430 8550 8549 +f 8430 8431 8551 +f 8430 8551 8550 +f 8431 8432 8551 +f 8432 8552 8551 +f 8432 8433 8553 +f 8432 8553 8552 +f 8433 8434 8553 +f 8434 8554 8553 +f 8434 8435 8555 +f 8434 8555 8554 +f 8435 8436 8555 +f 8436 8556 8555 +f 8436 8437 8557 +f 8436 8557 8556 +f 8437 8438 8557 +f 8438 8558 8557 +f 8438 8439 8559 +f 8438 8559 8558 +f 8439 8440 8559 +f 8440 8560 8559 +f 8440 8441 8561 +f 8440 8561 8560 +f 8441 8442 8561 +f 8442 8562 8561 +f 8442 8443 8563 +f 8442 8563 8562 +f 8443 8444 8563 +f 8444 8564 8563 +f 8444 8445 8565 +f 8444 8565 8564 +f 8445 8446 8565 +f 8446 8566 8565 +f 8446 8447 8567 +f 8446 8567 8566 +f 8447 8448 8567 +f 8448 8568 8567 +f 8448 8449 8569 +f 8448 8569 8568 +f 8449 8450 8569 +f 8450 8570 8569 +f 8450 8451 8571 +f 8450 8571 8570 +f 8451 8452 8571 +f 8452 8572 8571 +f 8452 8453 8573 +f 8452 8573 8572 +f 8453 8454 8573 +f 8454 8574 8573 +f 8454 8455 8575 +f 8454 8575 8574 +f 8455 8456 8575 +f 8456 8576 8575 +f 8456 8457 8577 +f 8456 8577 8576 +f 8457 8458 8577 +f 8458 8578 8577 +f 8458 8459 8579 +f 8458 8579 8578 +f 8459 8460 8579 +f 8460 8580 8579 +f 8460 8461 8581 +f 8460 8581 8580 +f 8461 8462 8581 +f 8462 8582 8581 +f 8462 8463 8583 +f 8462 8583 8582 +f 8463 8464 8583 +f 8464 8584 8583 +f 8464 8465 8585 +f 8464 8585 8584 +f 8465 8466 8585 +f 8466 8586 8585 +f 8466 8467 8587 +f 8466 8587 8586 +f 8467 8468 8587 +f 8468 8588 8587 +f 8469 8470 8590 +f 8469 8590 8589 +f 8470 8471 8590 +f 8471 8591 8590 +f 8471 8472 8592 +f 8471 8592 8591 +f 8472 8473 8592 +f 8473 8593 8592 +f 8473 8474 8594 +f 8473 8594 8593 +f 8474 8475 8594 +f 8475 8595 8594 +f 8475 8476 8596 +f 8475 8596 8595 +f 8476 8477 8596 +f 8477 8597 8596 +f 8477 8478 8598 +f 8477 8598 8597 +f 8478 8479 8598 +f 8479 8599 8598 +f 8479 8480 8600 +f 8479 8600 8599 +f 8480 8481 8600 +f 8481 8601 8600 +f 8481 8482 8602 +f 8481 8602 8601 +f 8482 8483 8602 +f 8483 8603 8602 +f 8483 8484 8604 +f 8483 8604 8603 +f 8484 8485 8604 +f 8485 8605 8604 +f 8485 8486 8606 +f 8485 8606 8605 +f 8486 8487 8606 +f 8487 8607 8606 +f 8488 8489 8608 +f 8489 8609 8608 +f 8489 8490 8610 +f 8489 8610 8609 +f 8490 8491 8610 +f 8491 8611 8610 +f 8491 8492 8612 +f 8491 8612 8611 +f 8492 8493 8612 +f 8493 8613 8612 +f 8493 8494 8614 +f 8493 8614 8613 +f 8494 8495 8614 +f 8495 8615 8614 +f 8495 8496 8616 +f 8495 8616 8615 +f 8496 8497 8616 +f 8497 8617 8616 +f 8497 8498 8618 +f 8497 8618 8617 +f 8498 8499 8618 +f 8499 8619 8618 +f 8499 8500 8620 +f 8499 8620 8619 +f 8500 8501 8620 +f 8501 8621 8620 +f 8501 8502 8622 +f 8501 8622 8621 +f 8502 8503 8622 +f 8503 8623 8622 +f 8503 8504 8624 +f 8503 8624 8623 +f 8504 8505 8624 +f 8505 8625 8624 +f 8505 8506 8626 +f 8505 8626 8625 +f 8506 8507 8626 +f 8507 8627 8626 +f 8507 8508 8628 +f 8507 8628 8627 +f 8508 8509 8628 +f 8509 8629 8628 +f 8509 8510 8630 +f 8509 8630 8629 +f 8510 8511 8630 +f 8511 8631 8630 +f 8511 8512 8632 +f 8511 8632 8631 +f 8512 8513 8632 +f 8513 8633 8632 +f 8513 8514 8634 +f 8513 8634 8633 +f 8514 8515 8634 +f 8515 8635 8634 +f 8515 8516 8636 +f 8515 8636 8635 +f 8516 8517 8636 +f 8517 8637 8636 +f 8517 8518 8638 +f 8517 8638 8637 +f 8518 8519 8638 +f 8519 8639 8638 +f 8519 8520 8640 +f 8519 8640 8639 +f 8520 8521 8640 +f 8521 8641 8640 +f 8521 8522 8642 +f 8521 8642 8641 +f 8522 8523 8642 +f 8523 8643 8642 +f 8523 8524 8644 +f 8523 8644 8643 +f 8524 8525 8644 +f 8525 8645 8644 +f 8525 8526 8646 +f 8525 8646 8645 +f 8526 8527 8646 +f 8527 8647 8646 +f 8527 8528 8648 +f 8527 8648 8647 +f 8528 8529 8648 +f 8529 8649 8648 +f 8529 8530 8650 +f 8529 8650 8649 +f 8530 8531 8650 +f 8531 8651 8650 +f 8531 8532 8652 +f 8531 8652 8651 +f 8532 8533 8652 +f 8533 8653 8652 +f 8533 8534 8654 +f 8533 8654 8653 +f 8534 8535 8654 +f 8535 8655 8654 +f 8535 8536 8656 +f 8535 8656 8655 +f 8536 8537 8656 +f 8537 8657 8656 +f 8537 8538 8658 +f 8537 8658 8657 +f 8538 8539 8658 +f 8539 8659 8658 +f 8539 8540 8660 +f 8539 8660 8659 +f 8540 8541 8660 +f 8541 8661 8660 +f 8541 8542 8662 +f 8541 8662 8661 +f 8542 8543 8662 +f 8543 8663 8662 +f 8543 8544 8664 +f 8543 8664 8663 +f 8544 8545 8664 +f 8545 8665 8664 +f 8545 8546 8666 +f 8545 8666 8665 +f 8546 8547 8666 +f 8547 8667 8666 +f 8547 8548 8668 +f 8547 8668 8667 +f 8548 8549 8668 +f 8549 8669 8668 +f 8549 8550 8670 +f 8549 8670 8669 +f 8550 8551 8670 +f 8551 8671 8670 +f 8551 8552 8672 +f 8551 8672 8671 +f 8552 8553 8672 +f 8553 8673 8672 +f 8553 8554 8674 +f 8553 8674 8673 +f 8554 8555 8674 +f 8555 8675 8674 +f 8555 8556 8676 +f 8555 8676 8675 +f 8556 8557 8676 +f 8557 8677 8676 +f 8557 8558 8678 +f 8557 8678 8677 +f 8558 8559 8678 +f 8559 8679 8678 +f 8559 8560 8680 +f 8559 8680 8679 +f 8560 8561 8680 +f 8561 8681 8680 +f 8561 8562 8682 +f 8561 8682 8681 +f 8562 8563 8682 +f 8563 8683 8682 +f 8563 8564 8684 +f 8563 8684 8683 +f 8564 8565 8684 +f 8565 8685 8684 +f 8565 8566 8686 +f 8565 8686 8685 +f 8566 8567 8686 +f 8567 8687 8686 +f 8567 8568 8688 +f 8567 8688 8687 +f 8568 8569 8688 +f 8569 8689 8688 +f 8569 8570 8690 +f 8569 8690 8689 +f 8570 8571 8690 +f 8571 8691 8690 +f 8571 8572 8692 +f 8571 8692 8691 +f 8572 8573 8692 +f 8573 8693 8692 +f 8573 8574 8694 +f 8573 8694 8693 +f 8574 8575 8694 +f 8575 8695 8694 +f 8575 8576 8696 +f 8575 8696 8695 +f 8576 8577 8696 +f 8577 8697 8696 +f 8577 8578 8698 +f 8577 8698 8697 +f 8578 8579 8698 +f 8579 8699 8698 +f 8579 8580 8700 +f 8579 8700 8699 +f 8580 8581 8700 +f 8581 8701 8700 +f 8581 8582 8702 +f 8581 8702 8701 +f 8582 8583 8702 +f 8583 8703 8702 +f 8583 8584 8704 +f 8583 8704 8703 +f 8584 8585 8704 +f 8585 8705 8704 +f 8585 8586 8706 +f 8585 8706 8705 +f 8586 8587 8706 +f 8587 8707 8706 +f 8587 8588 8708 +f 8587 8708 8707 +f 8589 8590 8709 +f 8590 8710 8709 +f 8590 8591 8711 +f 8590 8711 8710 +f 8591 8592 8711 +f 8592 8712 8711 +f 8592 8593 8713 +f 8592 8713 8712 +f 8593 8594 8713 +f 8594 8714 8713 +f 8594 8595 8715 +f 8594 8715 8714 +f 8595 8596 8715 +f 8596 8716 8715 +f 8596 8597 8717 +f 8596 8717 8716 +f 8597 8598 8717 +f 8598 8718 8717 +f 8598 8599 8719 +f 8598 8719 8718 +f 8599 8600 8719 +f 8600 8720 8719 +f 8600 8601 8721 +f 8600 8721 8720 +f 8601 8602 8721 +f 8602 8722 8721 +f 8602 8603 8723 +f 8602 8723 8722 +f 8603 8604 8723 +f 8604 8724 8723 +f 8604 8605 8725 +f 8604 8725 8724 +f 8605 8606 8725 +f 8606 8726 8725 +f 8606 8607 8727 +f 8606 8727 8726 +f 8608 8609 8729 +f 8608 8729 8728 +f 8609 8610 8729 +f 8610 8730 8729 +f 8610 8611 8731 +f 8610 8731 8730 +f 8611 8612 8731 +f 8612 8732 8731 +f 8612 8613 8733 +f 8612 8733 8732 +f 8613 8614 8733 +f 8614 8734 8733 +f 8614 8615 8735 +f 8614 8735 8734 +f 8615 8616 8735 +f 8616 8736 8735 +f 8616 8617 8737 +f 8616 8737 8736 +f 8617 8618 8737 +f 8618 8738 8737 +f 8618 8619 8739 +f 8618 8739 8738 +f 8619 8620 8739 +f 8620 8740 8739 +f 8620 8621 8741 +f 8620 8741 8740 +f 8621 8622 8741 +f 8622 8742 8741 +f 8622 8623 8743 +f 8622 8743 8742 +f 8623 8624 8743 +f 8624 8744 8743 +f 8624 8625 8745 +f 8624 8745 8744 +f 8625 8626 8745 +f 8626 8746 8745 +f 8626 8627 8747 +f 8626 8747 8746 +f 8627 8628 8747 +f 8628 8748 8747 +f 8628 8629 8749 +f 8628 8749 8748 +f 8629 8630 8749 +f 8630 8750 8749 +f 8630 8631 8751 +f 8630 8751 8750 +f 8631 8632 8751 +f 8632 8752 8751 +f 8632 8633 8753 +f 8632 8753 8752 +f 8633 8634 8753 +f 8634 8754 8753 +f 8634 8635 8755 +f 8634 8755 8754 +f 8635 8636 8755 +f 8636 8756 8755 +f 8636 8637 8757 +f 8636 8757 8756 +f 8637 8638 8757 +f 8638 8758 8757 +f 8638 8639 8759 +f 8638 8759 8758 +f 8639 8640 8759 +f 8640 8760 8759 +f 8640 8641 8761 +f 8640 8761 8760 +f 8641 8642 8761 +f 8642 8762 8761 +f 8642 8643 8763 +f 8642 8763 8762 +f 8643 8644 8763 +f 8644 8764 8763 +f 8644 8645 8765 +f 8644 8765 8764 +f 8645 8646 8765 +f 8646 8766 8765 +f 8646 8647 8767 +f 8646 8767 8766 +f 8647 8648 8767 +f 8648 8768 8767 +f 8648 8649 8769 +f 8648 8769 8768 +f 8649 8650 8769 +f 8650 8770 8769 +f 8650 8651 8771 +f 8650 8771 8770 +f 8651 8652 8771 +f 8652 8772 8771 +f 8652 8653 8773 +f 8652 8773 8772 +f 8653 8654 8773 +f 8654 8774 8773 +f 8654 8655 8775 +f 8654 8775 8774 +f 8655 8656 8775 +f 8656 8776 8775 +f 8656 8657 8777 +f 8656 8777 8776 +f 8657 8658 8777 +f 8658 8778 8777 +f 8658 8659 8779 +f 8658 8779 8778 +f 8659 8660 8779 +f 8660 8780 8779 +f 8660 8661 8781 +f 8660 8781 8780 +f 8661 8662 8781 +f 8662 8782 8781 +f 8662 8663 8783 +f 8662 8783 8782 +f 8663 8664 8783 +f 8664 8784 8783 +f 8664 8665 8785 +f 8664 8785 8784 +f 8665 8666 8785 +f 8666 8786 8785 +f 8666 8667 8787 +f 8666 8787 8786 +f 8667 8668 8787 +f 8668 8788 8787 +f 8668 8669 8789 +f 8668 8789 8788 +f 8669 8670 8789 +f 8670 8790 8789 +f 8670 8671 8791 +f 8670 8791 8790 +f 8671 8672 8791 +f 8672 8792 8791 +f 8672 8673 8793 +f 8672 8793 8792 +f 8673 8674 8793 +f 8674 8794 8793 +f 8674 8675 8795 +f 8674 8795 8794 +f 8675 8676 8795 +f 8676 8796 8795 +f 8676 8677 8797 +f 8676 8797 8796 +f 8677 8678 8797 +f 8678 8798 8797 +f 8678 8679 8799 +f 8678 8799 8798 +f 8679 8680 8799 +f 8680 8800 8799 +f 8680 8681 8801 +f 8680 8801 8800 +f 8681 8682 8801 +f 8682 8802 8801 +f 8682 8683 8803 +f 8682 8803 8802 +f 8683 8684 8803 +f 8684 8804 8803 +f 8684 8685 8805 +f 8684 8805 8804 +f 8685 8686 8805 +f 8686 8806 8805 +f 8686 8687 8807 +f 8686 8807 8806 +f 8687 8688 8807 +f 8688 8808 8807 +f 8688 8689 8809 +f 8688 8809 8808 +f 8689 8690 8809 +f 8690 8810 8809 +f 8690 8691 8811 +f 8690 8811 8810 +f 8691 8692 8811 +f 8692 8812 8811 +f 8692 8693 8813 +f 8692 8813 8812 +f 8693 8694 8813 +f 8694 8814 8813 +f 8694 8695 8815 +f 8694 8815 8814 +f 8695 8696 8815 +f 8696 8816 8815 +f 8696 8697 8817 +f 8696 8817 8816 +f 8697 8698 8817 +f 8698 8818 8817 +f 8698 8699 8819 +f 8698 8819 8818 +f 8699 8700 8819 +f 8700 8820 8819 +f 8700 8701 8821 +f 8700 8821 8820 +f 8701 8702 8821 +f 8702 8822 8821 +f 8702 8703 8823 +f 8702 8823 8822 +f 8703 8704 8823 +f 8704 8824 8823 +f 8704 8705 8825 +f 8704 8825 8824 +f 8705 8706 8825 +f 8706 8826 8825 +f 8706 8707 8827 +f 8706 8827 8826 +f 8707 8708 8827 +f 8708 8828 8827 +f 8709 8710 8830 +f 8709 8830 8829 +f 8710 8711 8830 +f 8711 8831 8830 +f 8711 8712 8832 +f 8711 8832 8831 +f 8712 8713 8832 +f 8713 8833 8832 +f 8713 8714 8834 +f 8713 8834 8833 +f 8714 8715 8834 +f 8715 8835 8834 +f 8715 8716 8836 +f 8715 8836 8835 +f 8716 8717 8836 +f 8717 8837 8836 +f 8717 8718 8838 +f 8717 8838 8837 +f 8718 8719 8838 +f 8719 8839 8838 +f 8719 8720 8840 +f 8719 8840 8839 +f 8720 8721 8840 +f 8721 8841 8840 +f 8721 8722 8842 +f 8721 8842 8841 +f 8722 8723 8842 +f 8723 8843 8842 +f 8723 8724 8844 +f 8723 8844 8843 +f 8724 8725 8844 +f 8725 8845 8844 +f 8725 8726 8846 +f 8725 8846 8845 +f 8726 8727 8846 +f 8727 8847 8846 +f 8728 8729 8848 +f 8729 8849 8848 +f 8729 8730 8850 +f 8729 8850 8849 +f 8730 8731 8850 +f 8731 8851 8850 +f 8731 8732 8852 +f 8731 8852 8851 +f 8732 8733 8852 +f 8733 8853 8852 +f 8733 8734 8854 +f 8733 8854 8853 +f 8734 8735 8854 +f 8735 8855 8854 +f 8735 8736 8856 +f 8735 8856 8855 +f 8736 8737 8856 +f 8737 8857 8856 +f 8737 8738 8858 +f 8737 8858 8857 +f 8738 8739 8858 +f 8739 8859 8858 +f 8739 8740 8860 +f 8739 8860 8859 +f 8740 8741 8860 +f 8741 8861 8860 +f 8741 8742 8862 +f 8741 8862 8861 +f 8742 8743 8862 +f 8743 8863 8862 +f 8743 8744 8864 +f 8743 8864 8863 +f 8744 8745 8864 +f 8745 8865 8864 +f 8745 8746 8866 +f 8745 8866 8865 +f 8746 8747 8866 +f 8747 8867 8866 +f 8747 8748 8868 +f 8747 8868 8867 +f 8748 8749 8868 +f 8749 8869 8868 +f 8749 8750 8870 +f 8749 8870 8869 +f 8750 8751 8870 +f 8751 8871 8870 +f 8751 8752 8872 +f 8751 8872 8871 +f 8752 8753 8872 +f 8753 8873 8872 +f 8753 8754 8874 +f 8753 8874 8873 +f 8754 8755 8874 +f 8755 8875 8874 +f 8755 8756 8876 +f 8755 8876 8875 +f 8756 8757 8876 +f 8757 8877 8876 +f 8757 8758 8878 +f 8757 8878 8877 +f 8758 8759 8878 +f 8759 8879 8878 +f 8759 8760 8880 +f 8759 8880 8879 +f 8760 8761 8880 +f 8761 8881 8880 +f 8761 8762 8882 +f 8761 8882 8881 +f 8762 8763 8882 +f 8763 8883 8882 +f 8763 8764 8884 +f 8763 8884 8883 +f 8764 8765 8884 +f 8765 8885 8884 +f 8765 8766 8886 +f 8765 8886 8885 +f 8766 8767 8886 +f 8767 8887 8886 +f 8767 8768 8888 +f 8767 8888 8887 +f 8768 8769 8888 +f 8769 8889 8888 +f 8769 8770 8890 +f 8769 8890 8889 +f 8770 8771 8890 +f 8771 8891 8890 +f 8771 8772 8892 +f 8771 8892 8891 +f 8772 8773 8892 +f 8773 8893 8892 +f 8773 8774 8894 +f 8773 8894 8893 +f 8774 8775 8894 +f 8775 8895 8894 +f 8775 8776 8896 +f 8775 8896 8895 +f 8776 8777 8896 +f 8777 8897 8896 +f 8777 8778 8898 +f 8777 8898 8897 +f 8778 8779 8898 +f 8779 8899 8898 +f 8779 8780 8900 +f 8779 8900 8899 +f 8780 8781 8900 +f 8781 8901 8900 +f 8781 8782 8902 +f 8781 8902 8901 +f 8782 8783 8902 +f 8783 8903 8902 +f 8783 8784 8904 +f 8783 8904 8903 +f 8784 8785 8904 +f 8785 8905 8904 +f 8785 8786 8906 +f 8785 8906 8905 +f 8786 8787 8906 +f 8787 8907 8906 +f 8787 8788 8908 +f 8787 8908 8907 +f 8788 8789 8908 +f 8789 8909 8908 +f 8789 8790 8910 +f 8789 8910 8909 +f 8790 8791 8910 +f 8791 8911 8910 +f 8791 8792 8912 +f 8791 8912 8911 +f 8792 8793 8912 +f 8793 8913 8912 +f 8793 8794 8914 +f 8793 8914 8913 +f 8794 8795 8914 +f 8795 8915 8914 +f 8795 8796 8916 +f 8795 8916 8915 +f 8796 8797 8916 +f 8797 8917 8916 +f 8797 8798 8918 +f 8797 8918 8917 +f 8798 8799 8918 +f 8799 8919 8918 +f 8799 8800 8920 +f 8799 8920 8919 +f 8800 8801 8920 +f 8801 8921 8920 +f 8801 8802 8922 +f 8801 8922 8921 +f 8802 8803 8922 +f 8803 8923 8922 +f 8803 8804 8924 +f 8803 8924 8923 +f 8804 8805 8924 +f 8805 8925 8924 +f 8805 8806 8926 +f 8805 8926 8925 +f 8806 8807 8926 +f 8807 8927 8926 +f 8807 8808 8928 +f 8807 8928 8927 +f 8808 8809 8928 +f 8809 8929 8928 +f 8809 8810 8930 +f 8809 8930 8929 +f 8810 8811 8930 +f 8811 8931 8930 +f 8811 8812 8932 +f 8811 8932 8931 +f 8812 8813 8932 +f 8813 8933 8932 +f 8813 8814 8934 +f 8813 8934 8933 +f 8814 8815 8934 +f 8815 8935 8934 +f 8815 8816 8936 +f 8815 8936 8935 +f 8816 8817 8936 +f 8817 8937 8936 +f 8817 8818 8938 +f 8817 8938 8937 +f 8818 8819 8938 +f 8819 8939 8938 +f 8819 8820 8940 +f 8819 8940 8939 +f 8820 8821 8940 +f 8821 8941 8940 +f 8821 8822 8942 +f 8821 8942 8941 +f 8822 8823 8942 +f 8823 8943 8942 +f 8823 8824 8944 +f 8823 8944 8943 +f 8824 8825 8944 +f 8825 8945 8944 +f 8825 8826 8946 +f 8825 8946 8945 +f 8826 8827 8946 +f 8827 8947 8946 +f 8827 8828 8948 +f 8827 8948 8947 +f 8829 8830 8949 +f 8830 8950 8949 +f 8830 8831 8951 +f 8830 8951 8950 +f 8831 8832 8951 +f 8832 8952 8951 +f 8832 8833 8953 +f 8832 8953 8952 +f 8833 8834 8953 +f 8834 8954 8953 +f 8834 8835 8955 +f 8834 8955 8954 +f 8835 8836 8955 +f 8836 8956 8955 +f 8836 8837 8957 +f 8836 8957 8956 +f 8837 8838 8957 +f 8838 8958 8957 +f 8838 8839 8959 +f 8838 8959 8958 +f 8839 8840 8959 +f 8840 8960 8959 +f 8840 8841 8961 +f 8840 8961 8960 +f 8841 8842 8961 +f 8842 8962 8961 +f 8842 8843 8963 +f 8842 8963 8962 +f 8843 8844 8963 +f 8844 8964 8963 +f 8844 8845 8965 +f 8844 8965 8964 +f 8845 8846 8965 +f 8846 8966 8965 +f 8846 8847 8967 +f 8846 8967 8966 +f 8848 8849 8969 +f 8848 8969 8968 +f 8849 8850 8969 +f 8850 8970 8969 +f 8850 8851 8971 +f 8850 8971 8970 +f 8851 8852 8971 +f 8852 8972 8971 +f 8852 8853 8973 +f 8852 8973 8972 +f 8853 8854 8973 +f 8854 8974 8973 +f 8854 8855 8975 +f 8854 8975 8974 +f 8855 8856 8975 +f 8856 8976 8975 +f 8856 8857 8977 +f 8856 8977 8976 +f 8857 8858 8977 +f 8858 8978 8977 +f 8858 8859 8979 +f 8858 8979 8978 +f 8859 8860 8979 +f 8860 8980 8979 +f 8860 8861 8981 +f 8860 8981 8980 +f 8861 8862 8981 +f 8862 8982 8981 +f 8862 8863 8983 +f 8862 8983 8982 +f 8863 8864 8983 +f 8864 8984 8983 +f 8864 8865 8985 +f 8864 8985 8984 +f 8865 8866 8985 +f 8866 8986 8985 +f 8866 8867 8987 +f 8866 8987 8986 +f 8867 8868 8987 +f 8868 8988 8987 +f 8868 8869 8989 +f 8868 8989 8988 +f 8869 8870 8989 +f 8870 8990 8989 +f 8870 8871 8991 +f 8870 8991 8990 +f 8871 8872 8991 +f 8872 8992 8991 +f 8872 8873 8993 +f 8872 8993 8992 +f 8873 8874 8993 +f 8874 8994 8993 +f 8874 8875 8995 +f 8874 8995 8994 +f 8875 8876 8995 +f 8876 8996 8995 +f 8876 8877 8997 +f 8876 8997 8996 +f 8877 8878 8997 +f 8878 8998 8997 +f 8878 8879 8999 +f 8878 8999 8998 +f 8879 8880 8999 +f 8880 9000 8999 +f 8880 8881 9001 +f 8880 9001 9000 +f 8881 8882 9001 +f 8882 9002 9001 +f 8882 8883 9003 +f 8882 9003 9002 +f 8883 8884 9003 +f 8884 9004 9003 +f 8884 8885 9005 +f 8884 9005 9004 +f 8885 8886 9005 +f 8886 9006 9005 +f 8886 8887 9007 +f 8886 9007 9006 +f 8887 8888 9007 +f 8888 9008 9007 +f 8888 8889 9009 +f 8888 9009 9008 +f 8889 8890 9009 +f 8890 9010 9009 +f 8890 8891 9011 +f 8890 9011 9010 +f 8891 8892 9011 +f 8892 9012 9011 +f 8892 8893 9013 +f 8892 9013 9012 +f 8893 8894 9013 +f 8894 9014 9013 +f 8894 8895 9015 +f 8894 9015 9014 +f 8895 8896 9015 +f 8896 9016 9015 +f 8896 8897 9017 +f 8896 9017 9016 +f 8897 8898 9017 +f 8898 9018 9017 +f 8898 8899 9019 +f 8898 9019 9018 +f 8899 8900 9019 +f 8900 9020 9019 +f 8900 8901 9021 +f 8900 9021 9020 +f 8901 8902 9021 +f 8902 9022 9021 +f 8902 8903 9023 +f 8902 9023 9022 +f 8903 8904 9023 +f 8904 9024 9023 +f 8904 8905 9025 +f 8904 9025 9024 +f 8905 8906 9025 +f 8906 9026 9025 +f 8906 8907 9027 +f 8906 9027 9026 +f 8907 8908 9027 +f 8908 9028 9027 +f 8908 8909 9029 +f 8908 9029 9028 +f 8909 8910 9029 +f 8910 9030 9029 +f 8910 8911 9031 +f 8910 9031 9030 +f 8911 8912 9031 +f 8912 9032 9031 +f 8912 8913 9033 +f 8912 9033 9032 +f 8913 8914 9033 +f 8914 9034 9033 +f 8914 8915 9035 +f 8914 9035 9034 +f 8915 8916 9035 +f 8916 9036 9035 +f 8916 8917 9037 +f 8916 9037 9036 +f 8917 8918 9037 +f 8918 9038 9037 +f 8918 8919 9039 +f 8918 9039 9038 +f 8919 8920 9039 +f 8920 9040 9039 +f 8920 8921 9041 +f 8920 9041 9040 +f 8921 8922 9041 +f 8922 9042 9041 +f 8922 8923 9043 +f 8922 9043 9042 +f 8923 8924 9043 +f 8924 9044 9043 +f 8924 8925 9045 +f 8924 9045 9044 +f 8925 8926 9045 +f 8926 9046 9045 +f 8926 8927 9047 +f 8926 9047 9046 +f 8927 8928 9047 +f 8928 9048 9047 +f 8928 8929 9049 +f 8928 9049 9048 +f 8929 8930 9049 +f 8930 9050 9049 +f 8930 8931 9051 +f 8930 9051 9050 +f 8931 8932 9051 +f 8932 9052 9051 +f 8932 8933 9053 +f 8932 9053 9052 +f 8933 8934 9053 +f 8934 9054 9053 +f 8934 8935 9055 +f 8934 9055 9054 +f 8935 8936 9055 +f 8936 9056 9055 +f 8936 8937 9057 +f 8936 9057 9056 +f 8937 8938 9057 +f 8938 9058 9057 +f 8938 8939 9059 +f 8938 9059 9058 +f 8939 8940 9059 +f 8940 9060 9059 +f 8940 8941 9061 +f 8940 9061 9060 +f 8941 8942 9061 +f 8942 9062 9061 +f 8942 8943 9063 +f 8942 9063 9062 +f 8943 8944 9063 +f 8944 9064 9063 +f 8944 8945 9065 +f 8944 9065 9064 +f 8945 8946 9065 +f 8946 9066 9065 +f 8946 8947 9067 +f 8946 9067 9066 +f 8947 8948 9067 +f 8948 9068 9067 +f 8949 8950 9070 +f 8949 9070 9069 +f 8950 8951 9070 +f 8951 9071 9070 +f 8951 8952 9072 +f 8951 9072 9071 +f 8952 8953 9072 +f 8953 9073 9072 +f 8953 8954 9074 +f 8953 9074 9073 +f 8954 8955 9074 +f 8955 9075 9074 +f 8955 8956 9076 +f 8955 9076 9075 +f 8956 8957 9076 +f 8957 9077 9076 +f 8957 8958 9078 +f 8957 9078 9077 +f 8958 8959 9078 +f 8959 9079 9078 +f 8959 8960 9080 +f 8959 9080 9079 +f 8960 8961 9080 +f 8961 9081 9080 +f 8961 8962 9082 +f 8961 9082 9081 +f 8962 8963 9082 +f 8963 9083 9082 +f 8963 8964 9084 +f 8963 9084 9083 +f 8964 8965 9084 +f 8965 9085 9084 +f 8965 8966 9086 +f 8965 9086 9085 +f 8966 8967 9086 +f 8967 9087 9086 +f 8968 8969 9088 +f 8969 9089 9088 +f 8969 8970 9090 +f 8969 9090 9089 +f 8970 8971 9090 +f 8971 9091 9090 +f 8971 8972 9092 +f 8971 9092 9091 +f 8972 8973 9092 +f 8973 9093 9092 +f 8973 8974 9094 +f 8973 9094 9093 +f 8974 8975 9094 +f 8975 9095 9094 +f 8975 8976 9096 +f 8975 9096 9095 +f 8976 8977 9096 +f 8977 9097 9096 +f 8977 8978 9098 +f 8977 9098 9097 +f 8978 8979 9098 +f 8979 9099 9098 +f 8979 8980 9100 +f 8979 9100 9099 +f 8980 8981 9100 +f 8981 9101 9100 +f 8981 8982 9102 +f 8981 9102 9101 +f 8982 8983 9102 +f 8983 9103 9102 +f 8983 8984 9104 +f 8983 9104 9103 +f 8984 8985 9104 +f 8985 9105 9104 +f 8985 8986 9106 +f 8985 9106 9105 +f 8986 8987 9106 +f 8987 9107 9106 +f 8987 8988 9108 +f 8987 9108 9107 +f 8988 8989 9108 +f 8989 9109 9108 +f 8989 8990 9110 +f 8989 9110 9109 +f 8990 8991 9110 +f 8991 9111 9110 +f 8991 8992 9112 +f 8991 9112 9111 +f 8992 8993 9112 +f 8993 9113 9112 +f 8993 8994 9114 +f 8993 9114 9113 +f 8994 8995 9114 +f 8995 9115 9114 +f 8995 8996 9116 +f 8995 9116 9115 +f 8996 8997 9116 +f 8997 9117 9116 +f 8997 8998 9118 +f 8997 9118 9117 +f 8998 8999 9118 +f 8999 9119 9118 +f 8999 9000 9120 +f 8999 9120 9119 +f 9000 9001 9120 +f 9001 9121 9120 +f 9001 9002 9122 +f 9001 9122 9121 +f 9002 9003 9122 +f 9003 9123 9122 +f 9003 9004 9124 +f 9003 9124 9123 +f 9004 9005 9124 +f 9005 9125 9124 +f 9005 9006 9126 +f 9005 9126 9125 +f 9006 9007 9126 +f 9007 9127 9126 +f 9007 9008 9128 +f 9007 9128 9127 +f 9008 9009 9128 +f 9009 9129 9128 +f 9009 9010 9130 +f 9009 9130 9129 +f 9010 9011 9130 +f 9011 9131 9130 +f 9011 9012 9132 +f 9011 9132 9131 +f 9012 9013 9132 +f 9013 9133 9132 +f 9013 9014 9134 +f 9013 9134 9133 +f 9014 9015 9134 +f 9015 9135 9134 +f 9015 9016 9136 +f 9015 9136 9135 +f 9016 9017 9136 +f 9017 9137 9136 +f 9017 9018 9138 +f 9017 9138 9137 +f 9018 9019 9138 +f 9019 9139 9138 +f 9019 9020 9140 +f 9019 9140 9139 +f 9020 9021 9140 +f 9021 9141 9140 +f 9021 9022 9142 +f 9021 9142 9141 +f 9022 9023 9142 +f 9023 9143 9142 +f 9023 9024 9144 +f 9023 9144 9143 +f 9024 9025 9144 +f 9025 9145 9144 +f 9025 9026 9146 +f 9025 9146 9145 +f 9026 9027 9146 +f 9027 9147 9146 +f 9027 9028 9148 +f 9027 9148 9147 +f 9028 9029 9148 +f 9029 9149 9148 +f 9029 9030 9150 +f 9029 9150 9149 +f 9030 9031 9150 +f 9031 9151 9150 +f 9031 9032 9152 +f 9031 9152 9151 +f 9032 9033 9152 +f 9033 9153 9152 +f 9033 9034 9154 +f 9033 9154 9153 +f 9034 9035 9154 +f 9035 9155 9154 +f 9035 9036 9156 +f 9035 9156 9155 +f 9036 9037 9156 +f 9037 9157 9156 +f 9037 9038 9158 +f 9037 9158 9157 +f 9038 9039 9158 +f 9039 9159 9158 +f 9039 9040 9160 +f 9039 9160 9159 +f 9040 9041 9160 +f 9041 9161 9160 +f 9041 9042 9162 +f 9041 9162 9161 +f 9042 9043 9162 +f 9043 9163 9162 +f 9043 9044 9164 +f 9043 9164 9163 +f 9044 9045 9164 +f 9045 9165 9164 +f 9045 9046 9166 +f 9045 9166 9165 +f 9046 9047 9166 +f 9047 9167 9166 +f 9047 9048 9168 +f 9047 9168 9167 +f 9048 9049 9168 +f 9049 9169 9168 +f 9049 9050 9170 +f 9049 9170 9169 +f 9050 9051 9170 +f 9051 9171 9170 +f 9051 9052 9172 +f 9051 9172 9171 +f 9052 9053 9172 +f 9053 9173 9172 +f 9053 9054 9174 +f 9053 9174 9173 +f 9054 9055 9174 +f 9055 9175 9174 +f 9055 9056 9176 +f 9055 9176 9175 +f 9056 9057 9176 +f 9057 9177 9176 +f 9057 9058 9178 +f 9057 9178 9177 +f 9058 9059 9178 +f 9059 9179 9178 +f 9059 9060 9180 +f 9059 9180 9179 +f 9060 9061 9180 +f 9061 9181 9180 +f 9061 9062 9182 +f 9061 9182 9181 +f 9062 9063 9182 +f 9063 9183 9182 +f 9063 9064 9184 +f 9063 9184 9183 +f 9064 9065 9184 +f 9065 9185 9184 +f 9065 9066 9186 +f 9065 9186 9185 +f 9066 9067 9186 +f 9067 9187 9186 +f 9067 9068 9188 +f 9067 9188 9187 +f 9069 9070 9189 +f 9070 9190 9189 +f 9070 9071 9191 +f 9070 9191 9190 +f 9071 9072 9191 +f 9072 9192 9191 +f 9072 9073 9193 +f 9072 9193 9192 +f 9073 9074 9193 +f 9074 9194 9193 +f 9074 9075 9195 +f 9074 9195 9194 +f 9075 9076 9195 +f 9076 9196 9195 +f 9076 9077 9197 +f 9076 9197 9196 +f 9077 9078 9197 +f 9078 9198 9197 +f 9078 9079 9199 +f 9078 9199 9198 +f 9079 9080 9199 +f 9080 9200 9199 +f 9080 9081 9201 +f 9080 9201 9200 +f 9081 9082 9201 +f 9082 9202 9201 +f 9082 9083 9203 +f 9082 9203 9202 +f 9083 9084 9203 +f 9084 9204 9203 +f 9084 9085 9205 +f 9084 9205 9204 +f 9085 9086 9205 +f 9086 9206 9205 +f 9086 9087 9207 +f 9086 9207 9206 +f 9088 9089 9209 +f 9088 9209 9208 +f 9089 9090 9209 +f 9090 9210 9209 +f 9090 9091 9211 +f 9090 9211 9210 +f 9091 9092 9211 +f 9092 9212 9211 +f 9092 9093 9213 +f 9092 9213 9212 +f 9093 9094 9213 +f 9094 9214 9213 +f 9094 9095 9215 +f 9094 9215 9214 +f 9095 9096 9215 +f 9096 9216 9215 +f 9096 9097 9217 +f 9096 9217 9216 +f 9097 9098 9217 +f 9098 9218 9217 +f 9098 9099 9219 +f 9098 9219 9218 +f 9099 9100 9219 +f 9100 9220 9219 +f 9100 9101 9221 +f 9100 9221 9220 +f 9101 9102 9221 +f 9102 9222 9221 +f 9102 9103 9223 +f 9102 9223 9222 +f 9103 9104 9223 +f 9104 9224 9223 +f 9104 9105 9225 +f 9104 9225 9224 +f 9105 9106 9225 +f 9106 9226 9225 +f 9106 9107 9227 +f 9106 9227 9226 +f 9107 9108 9227 +f 9108 9228 9227 +f 9108 9109 9229 +f 9108 9229 9228 +f 9109 9110 9229 +f 9110 9230 9229 +f 9110 9111 9231 +f 9110 9231 9230 +f 9111 9112 9231 +f 9112 9232 9231 +f 9112 9113 9233 +f 9112 9233 9232 +f 9113 9114 9233 +f 9114 9234 9233 +f 9114 9115 9235 +f 9114 9235 9234 +f 9115 9116 9235 +f 9116 9236 9235 +f 9116 9117 9237 +f 9116 9237 9236 +f 9117 9118 9237 +f 9118 9238 9237 +f 9118 9119 9239 +f 9118 9239 9238 +f 9119 9120 9239 +f 9120 9240 9239 +f 9120 9121 9241 +f 9120 9241 9240 +f 9121 9122 9241 +f 9122 9242 9241 +f 9122 9123 9243 +f 9122 9243 9242 +f 9123 9124 9243 +f 9124 9244 9243 +f 9124 9125 9245 +f 9124 9245 9244 +f 9125 9126 9245 +f 9126 9246 9245 +f 9126 9127 9247 +f 9126 9247 9246 +f 9127 9128 9247 +f 9128 9248 9247 +f 9128 9129 9249 +f 9128 9249 9248 +f 9129 9130 9249 +f 9130 9250 9249 +f 9130 9131 9251 +f 9130 9251 9250 +f 9131 9132 9251 +f 9132 9252 9251 +f 9132 9133 9253 +f 9132 9253 9252 +f 9133 9134 9253 +f 9134 9254 9253 +f 9134 9135 9255 +f 9134 9255 9254 +f 9135 9136 9255 +f 9136 9256 9255 +f 9136 9137 9257 +f 9136 9257 9256 +f 9137 9138 9257 +f 9138 9258 9257 +f 9138 9139 9259 +f 9138 9259 9258 +f 9139 9140 9259 +f 9140 9260 9259 +f 9140 9141 9261 +f 9140 9261 9260 +f 9141 9142 9261 +f 9142 9262 9261 +f 9142 9143 9263 +f 9142 9263 9262 +f 9143 9144 9263 +f 9144 9264 9263 +f 9144 9145 9265 +f 9144 9265 9264 +f 9145 9146 9265 +f 9146 9266 9265 +f 9146 9147 9267 +f 9146 9267 9266 +f 9147 9148 9267 +f 9148 9268 9267 +f 9148 9149 9269 +f 9148 9269 9268 +f 9149 9150 9269 +f 9150 9270 9269 +f 9150 9151 9271 +f 9150 9271 9270 +f 9151 9152 9271 +f 9152 9272 9271 +f 9152 9153 9273 +f 9152 9273 9272 +f 9153 9154 9273 +f 9154 9274 9273 +f 9154 9155 9275 +f 9154 9275 9274 +f 9155 9156 9275 +f 9156 9276 9275 +f 9156 9157 9277 +f 9156 9277 9276 +f 9157 9158 9277 +f 9158 9278 9277 +f 9158 9159 9279 +f 9158 9279 9278 +f 9159 9160 9279 +f 9160 9280 9279 +f 9160 9161 9281 +f 9160 9281 9280 +f 9161 9162 9281 +f 9162 9282 9281 +f 9162 9163 9283 +f 9162 9283 9282 +f 9163 9164 9283 +f 9164 9284 9283 +f 9164 9165 9285 +f 9164 9285 9284 +f 9165 9166 9285 +f 9166 9286 9285 +f 9166 9167 9287 +f 9166 9287 9286 +f 9167 9168 9287 +f 9168 9288 9287 +f 9168 9169 9289 +f 9168 9289 9288 +f 9169 9170 9289 +f 9170 9290 9289 +f 9170 9171 9291 +f 9170 9291 9290 +f 9171 9172 9291 +f 9172 9292 9291 +f 9172 9173 9293 +f 9172 9293 9292 +f 9173 9174 9293 +f 9174 9294 9293 +f 9174 9175 9295 +f 9174 9295 9294 +f 9175 9176 9295 +f 9176 9296 9295 +f 9176 9177 9297 +f 9176 9297 9296 +f 9177 9178 9297 +f 9178 9298 9297 +f 9178 9179 9299 +f 9178 9299 9298 +f 9179 9180 9299 +f 9180 9300 9299 +f 9180 9181 9301 +f 9180 9301 9300 +f 9181 9182 9301 +f 9182 9302 9301 +f 9182 9183 9303 +f 9182 9303 9302 +f 9183 9184 9303 +f 9184 9304 9303 +f 9184 9185 9305 +f 9184 9305 9304 +f 9185 9186 9305 +f 9186 9306 9305 +f 9186 9187 9307 +f 9186 9307 9306 +f 9187 9188 9307 +f 9188 9308 9307 +f 9189 9190 9310 +f 9189 9310 9309 +f 9190 9191 9310 +f 9191 9311 9310 +f 9191 9192 9312 +f 9191 9312 9311 +f 9192 9193 9312 +f 9193 9313 9312 +f 9193 9194 9314 +f 9193 9314 9313 +f 9194 9195 9314 +f 9195 9315 9314 +f 9195 9196 9316 +f 9195 9316 9315 +f 9196 9197 9316 +f 9197 9317 9316 +f 9197 9198 9318 +f 9197 9318 9317 +f 9198 9199 9318 +f 9199 9319 9318 +f 9199 9200 9320 +f 9199 9320 9319 +f 9200 9201 9320 +f 9201 9321 9320 +f 9201 9202 9322 +f 9201 9322 9321 +f 9202 9203 9322 +f 9203 9323 9322 +f 9203 9204 9324 +f 9203 9324 9323 +f 9204 9205 9324 +f 9205 9325 9324 +f 9205 9206 9326 +f 9205 9326 9325 +f 9206 9207 9326 +f 9207 9327 9326 +f 9208 9209 9328 +f 9209 9329 9328 +f 9209 9210 9330 +f 9209 9330 9329 +f 9210 9211 9330 +f 9211 9331 9330 +f 9211 9212 9332 +f 9211 9332 9331 +f 9212 9213 9332 +f 9213 9333 9332 +f 9213 9214 9334 +f 9213 9334 9333 +f 9214 9215 9334 +f 9215 9335 9334 +f 9215 9216 9336 +f 9215 9336 9335 +f 9216 9217 9336 +f 9217 9337 9336 +f 9217 9218 9338 +f 9217 9338 9337 +f 9218 9219 9338 +f 9219 9339 9338 +f 9219 9220 9340 +f 9219 9340 9339 +f 9220 9221 9340 +f 9221 9341 9340 +f 9221 9222 9342 +f 9221 9342 9341 +f 9222 9223 9342 +f 9223 9343 9342 +f 9223 9224 9344 +f 9223 9344 9343 +f 9224 9225 9344 +f 9225 9345 9344 +f 9225 9226 9346 +f 9225 9346 9345 +f 9226 9227 9346 +f 9227 9347 9346 +f 9227 9228 9348 +f 9227 9348 9347 +f 9228 9229 9348 +f 9229 9349 9348 +f 9229 9230 9350 +f 9229 9350 9349 +f 9230 9231 9350 +f 9231 9351 9350 +f 9231 9232 9352 +f 9231 9352 9351 +f 9232 9233 9352 +f 9233 9353 9352 +f 9233 9234 9354 +f 9233 9354 9353 +f 9234 9235 9354 +f 9235 9355 9354 +f 9235 9236 9356 +f 9235 9356 9355 +f 9236 9237 9356 +f 9237 9357 9356 +f 9237 9238 9358 +f 9237 9358 9357 +f 9238 9239 9358 +f 9239 9359 9358 +f 9239 9240 9360 +f 9239 9360 9359 +f 9240 9241 9360 +f 9241 9361 9360 +f 9241 9242 9362 +f 9241 9362 9361 +f 9242 9243 9362 +f 9243 9363 9362 +f 9243 9244 9364 +f 9243 9364 9363 +f 9244 9245 9364 +f 9245 9365 9364 +f 9245 9246 9366 +f 9245 9366 9365 +f 9246 9247 9366 +f 9247 9367 9366 +f 9247 9248 9368 +f 9247 9368 9367 +f 9248 9249 9368 +f 9249 9369 9368 +f 9249 9250 9370 +f 9249 9370 9369 +f 9250 9251 9370 +f 9251 9371 9370 +f 9251 9252 9372 +f 9251 9372 9371 +f 9252 9253 9372 +f 9253 9373 9372 +f 9253 9254 9374 +f 9253 9374 9373 +f 9254 9255 9374 +f 9255 9375 9374 +f 9255 9256 9376 +f 9255 9376 9375 +f 9256 9257 9376 +f 9257 9377 9376 +f 9257 9258 9378 +f 9257 9378 9377 +f 9258 9259 9378 +f 9259 9379 9378 +f 9259 9260 9380 +f 9259 9380 9379 +f 9260 9261 9380 +f 9261 9381 9380 +f 9261 9262 9382 +f 9261 9382 9381 +f 9262 9263 9382 +f 9263 9383 9382 +f 9263 9264 9384 +f 9263 9384 9383 +f 9264 9265 9384 +f 9265 9385 9384 +f 9265 9266 9386 +f 9265 9386 9385 +f 9266 9267 9386 +f 9267 9387 9386 +f 9267 9268 9388 +f 9267 9388 9387 +f 9268 9269 9388 +f 9269 9389 9388 +f 9269 9270 9390 +f 9269 9390 9389 +f 9270 9271 9390 +f 9271 9391 9390 +f 9271 9272 9392 +f 9271 9392 9391 +f 9272 9273 9392 +f 9273 9393 9392 +f 9273 9274 9394 +f 9273 9394 9393 +f 9274 9275 9394 +f 9275 9395 9394 +f 9275 9276 9396 +f 9275 9396 9395 +f 9276 9277 9396 +f 9277 9397 9396 +f 9277 9278 9398 +f 9277 9398 9397 +f 9278 9279 9398 +f 9279 9399 9398 +f 9279 9280 9400 +f 9279 9400 9399 +f 9280 9281 9400 +f 9281 9401 9400 +f 9281 9282 9402 +f 9281 9402 9401 +f 9282 9283 9402 +f 9283 9403 9402 +f 9283 9284 9404 +f 9283 9404 9403 +f 9284 9285 9404 +f 9285 9405 9404 +f 9285 9286 9406 +f 9285 9406 9405 +f 9286 9287 9406 +f 9287 9407 9406 +f 9287 9288 9408 +f 9287 9408 9407 +f 9288 9289 9408 +f 9289 9409 9408 +f 9289 9290 9410 +f 9289 9410 9409 +f 9290 9291 9410 +f 9291 9411 9410 +f 9291 9292 9412 +f 9291 9412 9411 +f 9292 9293 9412 +f 9293 9413 9412 +f 9293 9294 9414 +f 9293 9414 9413 +f 9294 9295 9414 +f 9295 9415 9414 +f 9295 9296 9416 +f 9295 9416 9415 +f 9296 9297 9416 +f 9297 9417 9416 +f 9297 9298 9418 +f 9297 9418 9417 +f 9298 9299 9418 +f 9299 9419 9418 +f 9299 9300 9420 +f 9299 9420 9419 +f 9300 9301 9420 +f 9301 9421 9420 +f 9301 9302 9422 +f 9301 9422 9421 +f 9302 9303 9422 +f 9303 9423 9422 +f 9303 9304 9424 +f 9303 9424 9423 +f 9304 9305 9424 +f 9305 9425 9424 +f 9305 9306 9426 +f 9305 9426 9425 +f 9306 9307 9426 +f 9307 9427 9426 +f 9307 9308 9428 +f 9307 9428 9427 +f 9309 9310 9429 +f 9310 9430 9429 +f 9310 9311 9431 +f 9310 9431 9430 +f 9311 9312 9431 +f 9312 9432 9431 +f 9312 9313 9433 +f 9312 9433 9432 +f 9313 9314 9433 +f 9314 9434 9433 +f 9314 9315 9435 +f 9314 9435 9434 +f 9315 9316 9435 +f 9316 9436 9435 +f 9316 9317 9437 +f 9316 9437 9436 +f 9317 9318 9437 +f 9318 9438 9437 +f 9318 9319 9439 +f 9318 9439 9438 +f 9319 9320 9439 +f 9320 9440 9439 +f 9320 9321 9441 +f 9320 9441 9440 +f 9321 9322 9441 +f 9322 9442 9441 +f 9322 9323 9443 +f 9322 9443 9442 +f 9323 9324 9443 +f 9324 9444 9443 +f 9324 9325 9445 +f 9324 9445 9444 +f 9325 9326 9445 +f 9326 9446 9445 +f 9326 9327 9447 +f 9326 9447 9446 +f 9328 9329 9449 +f 9328 9449 9448 +f 9329 9330 9449 +f 9330 9450 9449 +f 9330 9331 9451 +f 9330 9451 9450 +f 9331 9332 9451 +f 9332 9452 9451 +f 9332 9333 9453 +f 9332 9453 9452 +f 9333 9334 9453 +f 9334 9454 9453 +f 9334 9335 9455 +f 9334 9455 9454 +f 9335 9336 9455 +f 9336 9456 9455 +f 9336 9337 9457 +f 9336 9457 9456 +f 9337 9338 9457 +f 9338 9458 9457 +f 9338 9339 9459 +f 9338 9459 9458 +f 9339 9340 9459 +f 9340 9460 9459 +f 9340 9341 9461 +f 9340 9461 9460 +f 9341 9342 9461 +f 9342 9462 9461 +f 9342 9343 9463 +f 9342 9463 9462 +f 9343 9344 9463 +f 9344 9464 9463 +f 9344 9345 9465 +f 9344 9465 9464 +f 9345 9346 9465 +f 9346 9466 9465 +f 9346 9347 9467 +f 9346 9467 9466 +f 9347 9348 9467 +f 9348 9468 9467 +f 9348 9349 9469 +f 9348 9469 9468 +f 9349 9350 9469 +f 9350 9470 9469 +f 9350 9351 9471 +f 9350 9471 9470 +f 9351 9352 9471 +f 9352 9472 9471 +f 9352 9353 9473 +f 9352 9473 9472 +f 9353 9354 9473 +f 9354 9474 9473 +f 9354 9355 9475 +f 9354 9475 9474 +f 9355 9356 9475 +f 9356 9476 9475 +f 9356 9357 9477 +f 9356 9477 9476 +f 9357 9358 9477 +f 9358 9478 9477 +f 9358 9359 9479 +f 9358 9479 9478 +f 9359 9360 9479 +f 9360 9480 9479 +f 9360 9361 9481 +f 9360 9481 9480 +f 9361 9362 9481 +f 9362 9482 9481 +f 9362 9363 9483 +f 9362 9483 9482 +f 9363 9364 9483 +f 9364 9484 9483 +f 9364 9365 9485 +f 9364 9485 9484 +f 9365 9366 9485 +f 9366 9486 9485 +f 9366 9367 9487 +f 9366 9487 9486 +f 9367 9368 9487 +f 9368 9488 9487 +f 9368 9369 9489 +f 9368 9489 9488 +f 9369 9370 9489 +f 9370 9490 9489 +f 9370 9371 9491 +f 9370 9491 9490 +f 9371 9372 9491 +f 9372 9492 9491 +f 9372 9373 9493 +f 9372 9493 9492 +f 9373 9374 9493 +f 9374 9494 9493 +f 9374 9375 9495 +f 9374 9495 9494 +f 9375 9376 9495 +f 9376 9496 9495 +f 9376 9377 9497 +f 9376 9497 9496 +f 9377 9378 9497 +f 9378 9498 9497 +f 9378 9379 9499 +f 9378 9499 9498 +f 9379 9380 9499 +f 9380 9500 9499 +f 9380 9381 9501 +f 9380 9501 9500 +f 9381 9382 9501 +f 9382 9502 9501 +f 9382 9383 9503 +f 9382 9503 9502 +f 9383 9384 9503 +f 9384 9504 9503 +f 9384 9385 9505 +f 9384 9505 9504 +f 9385 9386 9505 +f 9386 9506 9505 +f 9386 9387 9507 +f 9386 9507 9506 +f 9387 9388 9507 +f 9388 9508 9507 +f 9388 9389 9509 +f 9388 9509 9508 +f 9389 9390 9509 +f 9390 9510 9509 +f 9390 9391 9511 +f 9390 9511 9510 +f 9391 9392 9511 +f 9392 9512 9511 +f 9392 9393 9513 +f 9392 9513 9512 +f 9393 9394 9513 +f 9394 9514 9513 +f 9394 9395 9515 +f 9394 9515 9514 +f 9395 9396 9515 +f 9396 9516 9515 +f 9396 9397 9517 +f 9396 9517 9516 +f 9397 9398 9517 +f 9398 9518 9517 +f 9398 9399 9519 +f 9398 9519 9518 +f 9399 9400 9519 +f 9400 9520 9519 +f 9400 9401 9521 +f 9400 9521 9520 +f 9401 9402 9521 +f 9402 9522 9521 +f 9402 9403 9523 +f 9402 9523 9522 +f 9403 9404 9523 +f 9404 9524 9523 +f 9404 9405 9525 +f 9404 9525 9524 +f 9405 9406 9525 +f 9406 9526 9525 +f 9406 9407 9527 +f 9406 9527 9526 +f 9407 9408 9527 +f 9408 9528 9527 +f 9408 9409 9529 +f 9408 9529 9528 +f 9409 9410 9529 +f 9410 9530 9529 +f 9410 9411 9531 +f 9410 9531 9530 +f 9411 9412 9531 +f 9412 9532 9531 +f 9412 9413 9533 +f 9412 9533 9532 +f 9413 9414 9533 +f 9414 9534 9533 +f 9414 9415 9535 +f 9414 9535 9534 +f 9415 9416 9535 +f 9416 9536 9535 +f 9416 9417 9537 +f 9416 9537 9536 +f 9417 9418 9537 +f 9418 9538 9537 +f 9418 9419 9539 +f 9418 9539 9538 +f 9419 9420 9539 +f 9420 9540 9539 +f 9420 9421 9541 +f 9420 9541 9540 +f 9421 9422 9541 +f 9422 9542 9541 +f 9422 9423 9543 +f 9422 9543 9542 +f 9423 9424 9543 +f 9424 9544 9543 +f 9424 9425 9545 +f 9424 9545 9544 +f 9425 9426 9545 +f 9426 9546 9545 +f 9426 9427 9547 +f 9426 9547 9546 +f 9427 9428 9547 +f 9428 9548 9547 +f 9429 9430 9550 +f 9429 9550 9549 +f 9430 9431 9550 +f 9431 9551 9550 +f 9431 9432 9552 +f 9431 9552 9551 +f 9432 9433 9552 +f 9433 9553 9552 +f 9433 9434 9554 +f 9433 9554 9553 +f 9434 9435 9554 +f 9435 9555 9554 +f 9435 9436 9556 +f 9435 9556 9555 +f 9436 9437 9556 +f 9437 9557 9556 +f 9437 9438 9558 +f 9437 9558 9557 +f 9438 9439 9558 +f 9439 9559 9558 +f 9439 9440 9560 +f 9439 9560 9559 +f 9440 9441 9560 +f 9441 9561 9560 +f 9441 9442 9562 +f 9441 9562 9561 +f 9442 9443 9562 +f 9443 9563 9562 +f 9443 9444 9564 +f 9443 9564 9563 +f 9444 9445 9564 +f 9445 9565 9564 +f 9445 9446 9566 +f 9445 9566 9565 +f 9446 9447 9566 +f 9447 9567 9566 +f 9448 9449 9568 +f 9449 9569 9568 +f 9449 9450 9570 +f 9449 9570 9569 +f 9450 9451 9570 +f 9451 9571 9570 +f 9451 9452 9572 +f 9451 9572 9571 +f 9452 9453 9572 +f 9453 9573 9572 +f 9453 9454 9574 +f 9453 9574 9573 +f 9454 9455 9574 +f 9455 9575 9574 +f 9455 9456 9576 +f 9455 9576 9575 +f 9456 9457 9576 +f 9457 9577 9576 +f 9457 9458 9578 +f 9457 9578 9577 +f 9458 9459 9578 +f 9459 9579 9578 +f 9459 9460 9580 +f 9459 9580 9579 +f 9460 9461 9580 +f 9461 9581 9580 +f 9461 9462 9582 +f 9461 9582 9581 +f 9462 9463 9582 +f 9463 9583 9582 +f 9463 9464 9584 +f 9463 9584 9583 +f 9464 9465 9584 +f 9465 9585 9584 +f 9465 9466 9586 +f 9465 9586 9585 +f 9466 9467 9586 +f 9467 9587 9586 +f 9467 9468 9588 +f 9467 9588 9587 +f 9468 9469 9588 +f 9469 9589 9588 +f 9469 9470 9590 +f 9469 9590 9589 +f 9470 9471 9590 +f 9471 9591 9590 +f 9471 9472 9592 +f 9471 9592 9591 +f 9472 9473 9592 +f 9473 9593 9592 +f 9473 9474 9594 +f 9473 9594 9593 +f 9474 9475 9594 +f 9475 9595 9594 +f 9475 9476 9596 +f 9475 9596 9595 +f 9476 9477 9596 +f 9477 9597 9596 +f 9477 9478 9598 +f 9477 9598 9597 +f 9478 9479 9598 +f 9479 9599 9598 +f 9479 9480 9600 +f 9479 9600 9599 +f 9480 9481 9600 +f 9481 9601 9600 +f 9481 9482 9602 +f 9481 9602 9601 +f 9482 9483 9602 +f 9483 9603 9602 +f 9483 9484 9604 +f 9483 9604 9603 +f 9484 9485 9604 +f 9485 9605 9604 +f 9485 9486 9606 +f 9485 9606 9605 +f 9486 9487 9606 +f 9487 9607 9606 +f 9487 9488 9608 +f 9487 9608 9607 +f 9488 9489 9608 +f 9489 9609 9608 +f 9489 9490 9610 +f 9489 9610 9609 +f 9490 9491 9610 +f 9491 9611 9610 +f 9491 9492 9612 +f 9491 9612 9611 +f 9492 9493 9612 +f 9493 9613 9612 +f 9493 9494 9614 +f 9493 9614 9613 +f 9494 9495 9614 +f 9495 9615 9614 +f 9495 9496 9616 +f 9495 9616 9615 +f 9496 9497 9616 +f 9497 9617 9616 +f 9497 9498 9618 +f 9497 9618 9617 +f 9498 9499 9618 +f 9499 9619 9618 +f 9499 9500 9620 +f 9499 9620 9619 +f 9500 9501 9620 +f 9501 9621 9620 +f 9501 9502 9622 +f 9501 9622 9621 +f 9502 9503 9622 +f 9503 9623 9622 +f 9503 9504 9624 +f 9503 9624 9623 +f 9504 9505 9624 +f 9505 9625 9624 +f 9505 9506 9626 +f 9505 9626 9625 +f 9506 9507 9626 +f 9507 9627 9626 +f 9507 9508 9628 +f 9507 9628 9627 +f 9508 9509 9628 +f 9509 9629 9628 +f 9509 9510 9630 +f 9509 9630 9629 +f 9510 9511 9630 +f 9511 9631 9630 +f 9511 9512 9632 +f 9511 9632 9631 +f 9512 9513 9632 +f 9513 9633 9632 +f 9513 9514 9634 +f 9513 9634 9633 +f 9514 9515 9634 +f 9515 9635 9634 +f 9515 9516 9636 +f 9515 9636 9635 +f 9516 9517 9636 +f 9517 9637 9636 +f 9517 9518 9638 +f 9517 9638 9637 +f 9518 9519 9638 +f 9519 9639 9638 +f 9519 9520 9640 +f 9519 9640 9639 +f 9520 9521 9640 +f 9521 9641 9640 +f 9521 9522 9642 +f 9521 9642 9641 +f 9522 9523 9642 +f 9523 9643 9642 +f 9523 9524 9644 +f 9523 9644 9643 +f 9524 9525 9644 +f 9525 9645 9644 +f 9525 9526 9646 +f 9525 9646 9645 +f 9526 9527 9646 +f 9527 9647 9646 +f 9527 9528 9648 +f 9527 9648 9647 +f 9528 9529 9648 +f 9529 9649 9648 +f 9529 9530 9650 +f 9529 9650 9649 +f 9530 9531 9650 +f 9531 9651 9650 +f 9531 9532 9652 +f 9531 9652 9651 +f 9532 9533 9652 +f 9533 9653 9652 +f 9533 9534 9654 +f 9533 9654 9653 +f 9534 9535 9654 +f 9535 9655 9654 +f 9535 9536 9656 +f 9535 9656 9655 +f 9536 9537 9656 +f 9537 9657 9656 +f 9537 9538 9658 +f 9537 9658 9657 +f 9538 9539 9658 +f 9539 9659 9658 +f 9539 9540 9660 +f 9539 9660 9659 +f 9540 9541 9660 +f 9541 9661 9660 +f 9541 9542 9662 +f 9541 9662 9661 +f 9542 9543 9662 +f 9543 9663 9662 +f 9543 9544 9664 +f 9543 9664 9663 +f 9544 9545 9664 +f 9545 9665 9664 +f 9545 9546 9666 +f 9545 9666 9665 +f 9546 9547 9666 +f 9547 9667 9666 +f 9547 9548 9668 +f 9547 9668 9667 +f 9549 9550 9669 +f 9550 9670 9669 +f 9550 9551 9671 +f 9550 9671 9670 +f 9551 9552 9671 +f 9552 9672 9671 +f 9552 9553 9673 +f 9552 9673 9672 +f 9553 9554 9673 +f 9554 9674 9673 +f 9554 9555 9675 +f 9554 9675 9674 +f 9555 9556 9675 +f 9556 9676 9675 +f 9556 9557 9677 +f 9556 9677 9676 +f 9557 9558 9677 +f 9558 9678 9677 +f 9558 9559 9679 +f 9558 9679 9678 +f 9559 9560 9679 +f 9560 9680 9679 +f 9560 9561 9681 +f 9560 9681 9680 +f 9561 9562 9681 +f 9562 9682 9681 +f 9562 9563 9683 +f 9562 9683 9682 +f 9563 9564 9683 +f 9564 9684 9683 +f 9564 9565 9685 +f 9564 9685 9684 +f 9565 9566 9685 +f 9566 9686 9685 +f 9566 9567 9687 +f 9566 9687 9686 +f 9568 9569 9689 +f 9568 9689 9688 +f 9569 9570 9689 +f 9570 9690 9689 +f 9570 9571 9691 +f 9570 9691 9690 +f 9571 9572 9691 +f 9572 9692 9691 +f 9572 9573 9693 +f 9572 9693 9692 +f 9573 9574 9693 +f 9574 9694 9693 +f 9574 9575 9695 +f 9574 9695 9694 +f 9575 9576 9695 +f 9576 9696 9695 +f 9576 9577 9697 +f 9576 9697 9696 +f 9577 9578 9697 +f 9578 9698 9697 +f 9578 9579 9699 +f 9578 9699 9698 +f 9579 9580 9699 +f 9580 9700 9699 +f 9580 9581 9701 +f 9580 9701 9700 +f 9581 9582 9701 +f 9582 9702 9701 +f 9582 9583 9703 +f 9582 9703 9702 +f 9583 9584 9703 +f 9584 9704 9703 +f 9584 9585 9705 +f 9584 9705 9704 +f 9585 9586 9705 +f 9586 9706 9705 +f 9586 9587 9707 +f 9586 9707 9706 +f 9587 9588 9707 +f 9588 9708 9707 +f 9588 9589 9709 +f 9588 9709 9708 +f 9589 9590 9709 +f 9590 9710 9709 +f 9590 9591 9711 +f 9590 9711 9710 +f 9591 9592 9711 +f 9592 9712 9711 +f 9592 9593 9713 +f 9592 9713 9712 +f 9593 9594 9713 +f 9594 9714 9713 +f 9594 9595 9715 +f 9594 9715 9714 +f 9595 9596 9715 +f 9596 9716 9715 +f 9596 9597 9717 +f 9596 9717 9716 +f 9597 9598 9717 +f 9598 9718 9717 +f 9598 9599 9719 +f 9598 9719 9718 +f 9599 9600 9719 +f 9600 9720 9719 +f 9600 9601 9721 +f 9600 9721 9720 +f 9601 9602 9721 +f 9602 9722 9721 +f 9602 9603 9723 +f 9602 9723 9722 +f 9603 9604 9723 +f 9604 9724 9723 +f 9604 9605 9725 +f 9604 9725 9724 +f 9605 9606 9725 +f 9606 9726 9725 +f 9606 9607 9727 +f 9606 9727 9726 +f 9607 9608 9727 +f 9608 9728 9727 +f 9608 9609 9729 +f 9608 9729 9728 +f 9609 9610 9729 +f 9610 9730 9729 +f 9610 9611 9731 +f 9610 9731 9730 +f 9611 9612 9731 +f 9612 9732 9731 +f 9612 9613 9733 +f 9612 9733 9732 +f 9613 9614 9733 +f 9614 9734 9733 +f 9614 9615 9735 +f 9614 9735 9734 +f 9615 9616 9735 +f 9616 9736 9735 +f 9616 9617 9737 +f 9616 9737 9736 +f 9617 9618 9737 +f 9618 9738 9737 +f 9618 9619 9739 +f 9618 9739 9738 +f 9619 9620 9739 +f 9620 9740 9739 +f 9620 9621 9741 +f 9620 9741 9740 +f 9621 9622 9741 +f 9622 9742 9741 +f 9622 9623 9743 +f 9622 9743 9742 +f 9623 9624 9743 +f 9624 9744 9743 +f 9624 9625 9745 +f 9624 9745 9744 +f 9625 9626 9745 +f 9626 9746 9745 +f 9626 9627 9747 +f 9626 9747 9746 +f 9627 9628 9747 +f 9628 9748 9747 +f 9628 9629 9749 +f 9628 9749 9748 +f 9629 9630 9749 +f 9630 9750 9749 +f 9630 9631 9751 +f 9630 9751 9750 +f 9631 9632 9751 +f 9632 9752 9751 +f 9632 9633 9753 +f 9632 9753 9752 +f 9633 9634 9753 +f 9634 9754 9753 +f 9634 9635 9755 +f 9634 9755 9754 +f 9635 9636 9755 +f 9636 9756 9755 +f 9636 9637 9757 +f 9636 9757 9756 +f 9637 9638 9757 +f 9638 9758 9757 +f 9638 9639 9759 +f 9638 9759 9758 +f 9639 9640 9759 +f 9640 9760 9759 +f 9640 9641 9761 +f 9640 9761 9760 +f 9641 9642 9761 +f 9642 9762 9761 +f 9642 9643 9763 +f 9642 9763 9762 +f 9643 9644 9763 +f 9644 9764 9763 +f 9644 9645 9765 +f 9644 9765 9764 +f 9645 9646 9765 +f 9646 9766 9765 +f 9646 9647 9767 +f 9646 9767 9766 +f 9647 9648 9767 +f 9648 9768 9767 +f 9648 9649 9769 +f 9648 9769 9768 +f 9649 9650 9769 +f 9650 9770 9769 +f 9650 9651 9771 +f 9650 9771 9770 +f 9651 9652 9771 +f 9652 9772 9771 +f 9652 9653 9773 +f 9652 9773 9772 +f 9653 9654 9773 +f 9654 9774 9773 +f 9654 9655 9775 +f 9654 9775 9774 +f 9655 9656 9775 +f 9656 9776 9775 +f 9656 9657 9777 +f 9656 9777 9776 +f 9657 9658 9777 +f 9658 9778 9777 +f 9658 9659 9779 +f 9658 9779 9778 +f 9659 9660 9779 +f 9660 9780 9779 +f 9660 9661 9781 +f 9660 9781 9780 +f 9661 9662 9781 +f 9662 9782 9781 +f 9662 9663 9783 +f 9662 9783 9782 +f 9663 9664 9783 +f 9664 9784 9783 +f 9664 9665 9785 +f 9664 9785 9784 +f 9665 9666 9785 +f 9666 9786 9785 +f 9666 9667 9787 +f 9666 9787 9786 +f 9667 9668 9787 +f 9668 9788 9787 +f 9669 9790 9789 +f 9669 9670 9791 +f 9669 9791 9790 +f 9670 9671 9791 +f 9671 9792 9791 +f 9671 9672 9793 +f 9671 9793 9792 +f 9672 9673 9793 +f 9673 9794 9793 +f 9673 9674 9795 +f 9673 9795 9794 +f 9674 9675 9795 +f 9675 9796 9795 +f 9675 9676 9797 +f 9675 9797 9796 +f 9676 9677 9797 +f 9677 9798 9797 +f 9677 9678 9799 +f 9677 9799 9798 +f 9678 9679 9799 +f 9679 9800 9799 +f 9679 9680 9801 +f 9679 9801 9800 +f 9680 9681 9801 +f 9681 9802 9801 +f 9681 9682 9803 +f 9681 9803 9802 +f 9682 9683 9803 +f 9683 9804 9803 +f 9683 9684 9805 +f 9683 9805 9804 +f 9684 9685 9805 +f 9685 9806 9805 +f 9685 9686 9807 +f 9685 9807 9806 +f 9686 9687 9807 +f 9687 9808 9807 +f 9688 9689 9809 +f 9689 9810 9809 +f 9689 9690 9811 +f 9689 9811 9810 +f 9690 9691 9811 +f 9691 9812 9811 +f 9691 9692 9813 +f 9691 9813 9812 +f 9692 9693 9813 +f 9693 9814 9813 +f 9693 9694 9815 +f 9693 9815 9814 +f 9694 9695 9815 +f 9695 9816 9815 +f 9695 9696 9817 +f 9695 9817 9816 +f 9696 9697 9817 +f 9697 9818 9817 +f 9697 9698 9819 +f 9697 9819 9818 +f 9698 9699 9819 +f 9699 9820 9819 +f 9699 9700 9821 +f 9699 9821 9820 +f 9700 9701 9821 +f 9701 9822 9821 +f 9701 9702 9823 +f 9701 9823 9822 +f 9702 9703 9823 +f 9703 9824 9823 +f 9703 9704 9825 +f 9703 9825 9824 +f 9704 9705 9825 +f 9705 9826 9825 +f 9705 9706 9827 +f 9705 9827 9826 +f 9706 9707 9827 +f 9707 9828 9827 +f 9707 9708 9829 +f 9707 9829 9828 +f 9708 9709 9829 +f 9709 9830 9829 +f 9709 9710 9831 +f 9709 9831 9830 +f 9710 9711 9831 +f 9711 9832 9831 +f 9711 9712 9833 +f 9711 9833 9832 +f 9712 9713 9833 +f 9713 9834 9833 +f 9713 9714 9835 +f 9713 9835 9834 +f 9714 9715 9835 +f 9715 9836 9835 +f 9715 9716 9837 +f 9715 9837 9836 +f 9716 9717 9837 +f 9717 9838 9837 +f 9717 9718 9839 +f 9717 9839 9838 +f 9718 9719 9839 +f 9719 9840 9839 +f 9719 9720 9841 +f 9719 9841 9840 +f 9720 9721 9841 +f 9721 9842 9841 +f 9721 9722 9843 +f 9721 9843 9842 +f 9722 9723 9843 +f 9723 9844 9843 +f 9723 9724 9845 +f 9723 9845 9844 +f 9724 9725 9845 +f 9725 9846 9845 +f 9725 9726 9847 +f 9725 9847 9846 +f 9726 9727 9847 +f 9727 9848 9847 +f 9727 9728 9849 +f 9727 9849 9848 +f 9728 9729 9849 +f 9729 9850 9849 +f 9729 9730 9851 +f 9729 9851 9850 +f 9730 9731 9851 +f 9731 9852 9851 +f 9731 9732 9853 +f 9731 9853 9852 +f 9732 9733 9853 +f 9733 9854 9853 +f 9733 9734 9855 +f 9733 9855 9854 +f 9734 9735 9855 +f 9735 9856 9855 +f 9735 9736 9857 +f 9735 9857 9856 +f 9736 9737 9857 +f 9737 9858 9857 +f 9737 9738 9859 +f 9737 9859 9858 +f 9738 9739 9859 +f 9739 9860 9859 +f 9739 9740 9861 +f 9739 9861 9860 +f 9740 9741 9861 +f 9741 9862 9861 +f 9741 9742 9863 +f 9741 9863 9862 +f 9742 9743 9863 +f 9743 9864 9863 +f 9743 9744 9865 +f 9743 9865 9864 +f 9744 9745 9865 +f 9745 9866 9865 +f 9745 9746 9867 +f 9745 9867 9866 +f 9746 9747 9867 +f 9747 9868 9867 +f 9747 9748 9869 +f 9747 9869 9868 +f 9748 9749 9869 +f 9749 9870 9869 +f 9749 9750 9871 +f 9749 9871 9870 +f 9750 9751 9871 +f 9751 9872 9871 +f 9751 9752 9873 +f 9751 9873 9872 +f 9752 9753 9873 +f 9753 9874 9873 +f 9753 9754 9875 +f 9753 9875 9874 +f 9754 9755 9875 +f 9755 9876 9875 +f 9755 9756 9877 +f 9755 9877 9876 +f 9756 9757 9877 +f 9757 9878 9877 +f 9757 9758 9879 +f 9757 9879 9878 +f 9758 9759 9879 +f 9759 9880 9879 +f 9759 9760 9881 +f 9759 9881 9880 +f 9760 9761 9881 +f 9761 9882 9881 +f 9761 9762 9883 +f 9761 9883 9882 +f 9762 9763 9883 +f 9763 9884 9883 +f 9763 9764 9885 +f 9763 9885 9884 +f 9764 9765 9885 +f 9765 9886 9885 +f 9765 9766 9887 +f 9765 9887 9886 +f 9766 9767 9887 +f 9767 9888 9887 +f 9767 9768 9889 +f 9767 9889 9888 +f 9768 9769 9889 +f 9769 9890 9889 +f 9769 9770 9891 +f 9769 9891 9890 +f 9770 9771 9891 +f 9771 9892 9891 +f 9771 9772 9893 +f 9771 9893 9892 +f 9772 9773 9893 +f 9773 9894 9893 +f 9773 9774 9895 +f 9773 9895 9894 +f 9774 9775 9895 +f 9775 9896 9895 +f 9775 9776 9897 +f 9775 9897 9896 +f 9776 9777 9897 +f 9777 9898 9897 +f 9777 9778 9899 +f 9777 9899 9898 +f 9778 9779 9899 +f 9779 9900 9899 +f 9779 9780 9901 +f 9779 9901 9900 +f 9780 9781 9901 +f 9781 9902 9901 +f 9781 9782 9903 +f 9781 9903 9902 +f 9782 9783 9903 +f 9783 9904 9903 +f 9783 9784 9905 +f 9783 9905 9904 +f 9784 9785 9905 +f 9785 9906 9905 +f 9785 9786 9907 +f 9785 9907 9906 +f 9786 9787 9907 +f 9787 9908 9907 +f 9787 9788 9909 +f 9787 9909 9908 +f 9789 9911 9910 +f 9789 9790 9912 +f 9789 9912 9911 +f 9790 9791 9912 +f 9791 9913 9912 +f 9791 9792 9914 +f 9791 9914 9913 +f 9792 9793 9914 +f 9793 9915 9914 +f 9793 9794 9916 +f 9793 9916 9915 +f 9794 9795 9916 +f 9795 9917 9916 +f 9795 9796 9918 +f 9795 9918 9917 +f 9796 9797 9918 +f 9797 9919 9918 +f 9797 9798 9920 +f 9797 9920 9919 +f 9798 9799 9920 +f 9799 9921 9920 +f 9799 9800 9922 +f 9799 9922 9921 +f 9800 9801 9922 +f 9801 9923 9922 +f 9801 9802 9924 +f 9801 9924 9923 +f 9802 9803 9924 +f 9803 9925 9924 +f 9803 9804 9926 +f 9803 9926 9925 +f 9804 9805 9926 +f 9805 9927 9926 +f 9805 9806 9928 +f 9805 9928 9927 +f 9806 9807 9928 +f 9807 9929 9928 +f 9807 9808 9930 +f 9807 9930 9929 +f 9809 9810 9932 +f 9809 9932 9931 +f 9810 9811 9932 +f 9811 9933 9932 +f 9811 9812 9934 +f 9811 9934 9933 +f 9812 9813 9934 +f 9813 9935 9934 +f 9813 9814 9936 +f 9813 9936 9935 +f 9814 9815 9936 +f 9815 9937 9936 +f 9815 9816 9938 +f 9815 9938 9937 +f 9816 9817 9938 +f 9817 9939 9938 +f 9817 9818 9940 +f 9817 9940 9939 +f 9818 9819 9940 +f 9819 9941 9940 +f 9819 9820 9942 +f 9819 9942 9941 +f 9820 9821 9942 +f 9821 9943 9942 +f 9821 9822 9944 +f 9821 9944 9943 +f 9822 9823 9944 +f 9823 9945 9944 +f 9823 9824 9946 +f 9823 9946 9945 +f 9824 9825 9946 +f 9825 9947 9946 +f 9825 9826 9948 +f 9825 9948 9947 +f 9826 9827 9948 +f 9827 9949 9948 +f 9827 9828 9950 +f 9827 9950 9949 +f 9828 9829 9950 +f 9829 9951 9950 +f 9829 9830 9952 +f 9829 9952 9951 +f 9830 9831 9952 +f 9831 9953 9952 +f 9831 9832 9954 +f 9831 9954 9953 +f 9832 9833 9954 +f 9833 9955 9954 +f 9833 9834 9956 +f 9833 9956 9955 +f 9834 9835 9956 +f 9835 9957 9956 +f 9835 9836 9958 +f 9835 9958 9957 +f 9836 9837 9958 +f 9837 9959 9958 +f 9837 9838 9960 +f 9837 9960 9959 +f 9838 9839 9960 +f 9839 9961 9960 +f 9839 9840 9962 +f 9839 9962 9961 +f 9840 9841 9962 +f 9841 9963 9962 +f 9841 9842 9964 +f 9841 9964 9963 +f 9842 9843 9964 +f 9843 9965 9964 +f 9843 9844 9966 +f 9843 9966 9965 +f 9844 9845 9966 +f 9845 9967 9966 +f 9845 9846 9968 +f 9845 9968 9967 +f 9846 9847 9968 +f 9847 9969 9968 +f 9847 9848 9970 +f 9847 9970 9969 +f 9848 9849 9970 +f 9849 9971 9970 +f 9849 9850 9972 +f 9849 9972 9971 +f 9850 9851 9972 +f 9851 9973 9972 +f 9851 9852 9974 +f 9851 9974 9973 +f 9852 9853 9974 +f 9853 9975 9974 +f 9853 9854 9976 +f 9853 9976 9975 +f 9854 9855 9976 +f 9855 9977 9976 +f 9855 9856 9978 +f 9855 9978 9977 +f 9856 9857 9978 +f 9857 9979 9978 +f 9857 9858 9980 +f 9857 9980 9979 +f 9858 9859 9980 +f 9859 9981 9980 +f 9859 9860 9982 +f 9859 9982 9981 +f 9860 9861 9982 +f 9861 9983 9982 +f 9861 9862 9984 +f 9861 9984 9983 +f 9862 9863 9984 +f 9863 9985 9984 +f 9863 9864 9986 +f 9863 9986 9985 +f 9864 9865 9986 +f 9865 9987 9986 +f 9865 9866 9988 +f 9865 9988 9987 +f 9866 9867 9988 +f 9867 9989 9988 +f 9867 9868 9990 +f 9867 9990 9989 +f 9868 9869 9990 +f 9869 9991 9990 +f 9869 9870 9992 +f 9869 9992 9991 +f 9870 9871 9992 +f 9871 9993 9992 +f 9871 9872 9994 +f 9871 9994 9993 +f 9872 9873 9994 +f 9873 9995 9994 +f 9873 9874 9996 +f 9873 9996 9995 +f 9874 9875 9996 +f 9875 9997 9996 +f 9875 9876 9998 +f 9875 9998 9997 +f 9876 9877 9998 +f 9877 9999 9998 +f 9877 9878 10000 +f 9877 10000 9999 +f 9878 9879 10000 +f 9879 10001 10000 +f 9879 9880 10002 +f 9879 10002 10001 +f 9880 9881 10002 +f 9881 10003 10002 +f 9881 9882 10004 +f 9881 10004 10003 +f 9882 9883 10004 +f 9883 10005 10004 +f 9883 9884 10006 +f 9883 10006 10005 +f 9884 9885 10006 +f 9885 10007 10006 +f 9885 9886 10008 +f 9885 10008 10007 +f 9886 9887 10008 +f 9887 10009 10008 +f 9887 9888 10010 +f 9887 10010 10009 +f 9888 9889 10010 +f 9889 10011 10010 +f 9889 9890 10012 +f 9889 10012 10011 +f 9890 9891 10012 +f 9891 10013 10012 +f 9891 9892 10014 +f 9891 10014 10013 +f 9892 9893 10014 +f 9893 10015 10014 +f 9893 9894 10016 +f 9893 10016 10015 +f 9894 9895 10016 +f 9895 10017 10016 +f 9895 9896 10018 +f 9895 10018 10017 +f 9896 9897 10018 +f 9897 10019 10018 +f 9897 9898 10020 +f 9897 10020 10019 +f 9898 9899 10020 +f 9899 10021 10020 +f 9899 9900 10022 +f 9899 10022 10021 +f 9900 9901 10022 +f 9901 10023 10022 +f 9901 9902 10024 +f 9901 10024 10023 +f 9902 9903 10024 +f 9903 10025 10024 +f 9903 9904 10026 +f 9903 10026 10025 +f 9904 9905 10026 +f 9905 10027 10026 +f 9905 9906 10028 +f 9905 10028 10027 +f 9906 9907 10028 +f 9907 10029 10028 +f 9907 9908 10030 +f 9907 10030 10029 +f 9908 9909 10030 +f 9909 10031 10030 +f 9910 10033 10032 +f 9910 9911 10034 +f 9910 10034 10033 +f 9911 9912 10034 +f 9912 10035 10034 +f 9912 9913 10036 +f 9912 10036 10035 +f 9913 9914 10036 +f 9914 10037 10036 +f 9914 9915 10038 +f 9914 10038 10037 +f 9915 9916 10038 +f 9916 10039 10038 +f 9916 9917 10040 +f 9916 10040 10039 +f 9917 9918 10040 +f 9918 10041 10040 +f 9918 9919 10042 +f 9918 10042 10041 +f 9919 9920 10042 +f 9920 10043 10042 +f 9920 9921 10044 +f 9920 10044 10043 +f 9921 9922 10044 +f 9922 10045 10044 +f 9922 9923 10046 +f 9922 10046 10045 +f 9923 9924 10046 +f 9924 10047 10046 +f 9924 9925 10048 +f 9924 10048 10047 +f 9925 9926 10048 +f 9926 10049 10048 +f 9926 9927 10050 +f 9926 10050 10049 +f 9927 9928 10050 +f 9928 10051 10050 +f 9928 9929 10052 +f 9928 10052 10051 +f 9929 9930 10052 +f 9930 10053 10052 +f 9931 9932 10054 +f 9932 10055 10054 +f 9932 9933 10056 +f 9932 10056 10055 +f 9933 9934 10056 +f 9934 10057 10056 +f 9934 9935 10058 +f 9934 10058 10057 +f 9935 9936 10058 +f 9936 10059 10058 +f 9936 9937 10060 +f 9936 10060 10059 +f 9937 9938 10060 +f 9938 10061 10060 +f 9938 9939 10062 +f 9938 10062 10061 +f 9939 9940 10062 +f 9940 10063 10062 +f 9940 9941 10064 +f 9940 10064 10063 +f 9941 9942 10064 +f 9942 10065 10064 +f 9942 9943 10066 +f 9942 10066 10065 +f 9943 9944 10066 +f 9944 10067 10066 +f 9944 9945 10068 +f 9944 10068 10067 +f 9945 9946 10068 +f 9946 10069 10068 +f 9946 9947 10070 +f 9946 10070 10069 +f 9947 9948 10070 +f 9948 10071 10070 +f 9948 9949 10072 +f 9948 10072 10071 +f 9949 9950 10072 +f 9950 10073 10072 +f 9950 9951 10074 +f 9950 10074 10073 +f 9951 9952 10074 +f 9952 10075 10074 +f 9952 9953 10076 +f 9952 10076 10075 +f 9953 9954 10076 +f 9954 10077 10076 +f 9954 9955 10078 +f 9954 10078 10077 +f 9955 9956 10078 +f 9956 10079 10078 +f 9956 9957 10080 +f 9956 10080 10079 +f 9957 9958 10080 +f 9958 10081 10080 +f 9958 9959 10082 +f 9958 10082 10081 +f 9959 9960 10082 +f 9960 10083 10082 +f 9960 9961 10084 +f 9960 10084 10083 +f 9961 9962 10084 +f 9962 10085 10084 +f 9962 9963 10086 +f 9962 10086 10085 +f 9963 9964 10086 +f 9964 10087 10086 +f 9964 9965 10088 +f 9964 10088 10087 +f 9965 9966 10088 +f 9966 10089 10088 +f 9966 9967 10090 +f 9966 10090 10089 +f 9967 9968 10090 +f 9968 10091 10090 +f 9968 9969 10092 +f 9968 10092 10091 +f 9969 9970 10092 +f 9970 10093 10092 +f 9970 9971 10094 +f 9970 10094 10093 +f 9971 9972 10094 +f 9972 10095 10094 +f 9972 9973 10096 +f 9972 10096 10095 +f 9973 9974 10096 +f 9974 10097 10096 +f 9974 9975 10098 +f 9974 10098 10097 +f 9975 9976 10098 +f 9976 10099 10098 +f 9976 9977 10100 +f 9976 10100 10099 +f 9977 9978 10100 +f 9978 10101 10100 +f 9978 9979 10102 +f 9978 10102 10101 +f 9979 9980 10102 +f 9980 10103 10102 +f 9980 9981 10104 +f 9980 10104 10103 +f 9981 9982 10104 +f 9982 10105 10104 +f 9982 9983 10106 +f 9982 10106 10105 +f 9983 9984 10106 +f 9984 10107 10106 +f 9984 9985 10108 +f 9984 10108 10107 +f 9985 9986 10108 +f 9986 10109 10108 +f 9986 9987 10110 +f 9986 10110 10109 +f 9987 9988 10110 +f 9988 10111 10110 +f 9988 9989 10112 +f 9988 10112 10111 +f 9989 9990 10112 +f 9990 10113 10112 +f 9990 9991 10114 +f 9990 10114 10113 +f 9991 9992 10114 +f 9992 10115 10114 +f 9992 9993 10116 +f 9992 10116 10115 +f 9993 9994 10116 +f 9994 10117 10116 +f 9994 9995 10118 +f 9994 10118 10117 +f 9995 9996 10118 +f 9996 10119 10118 +f 9996 9997 10120 +f 9996 10120 10119 +f 9997 9998 10120 +f 9998 10121 10120 +f 9998 9999 10122 +f 9998 10122 10121 +f 9999 10000 10122 +f 10000 10123 10122 +f 10000 10001 10124 +f 10000 10124 10123 +f 10001 10002 10124 +f 10002 10125 10124 +f 10002 10003 10126 +f 10002 10126 10125 +f 10003 10004 10126 +f 10004 10127 10126 +f 10004 10005 10128 +f 10004 10128 10127 +f 10005 10006 10128 +f 10006 10129 10128 +f 10006 10007 10130 +f 10006 10130 10129 +f 10007 10008 10130 +f 10008 10131 10130 +f 10008 10009 10132 +f 10008 10132 10131 +f 10009 10010 10132 +f 10010 10133 10132 +f 10010 10011 10134 +f 10010 10134 10133 +f 10011 10012 10134 +f 10012 10135 10134 +f 10012 10013 10136 +f 10012 10136 10135 +f 10013 10014 10136 +f 10014 10137 10136 +f 10014 10015 10138 +f 10014 10138 10137 +f 10015 10016 10138 +f 10016 10139 10138 +f 10016 10017 10140 +f 10016 10140 10139 +f 10017 10018 10140 +f 10018 10141 10140 +f 10018 10019 10142 +f 10018 10142 10141 +f 10019 10020 10142 +f 10020 10143 10142 +f 10020 10021 10144 +f 10020 10144 10143 +f 10021 10022 10144 +f 10022 10145 10144 +f 10022 10023 10146 +f 10022 10146 10145 +f 10023 10024 10146 +f 10024 10147 10146 +f 10024 10025 10148 +f 10024 10148 10147 +f 10025 10026 10148 +f 10026 10149 10148 +f 10026 10027 10150 +f 10026 10150 10149 +f 10027 10028 10150 +f 10028 10151 10150 +f 10028 10029 10152 +f 10028 10152 10151 +f 10029 10030 10152 +f 10030 10153 10152 +f 10030 10031 10154 +f 10030 10154 10153 +f 10032 10157 10156 +f 10032 10033 10158 +f 10032 10158 10157 +f 10033 10034 10158 +f 10034 10159 10158 +f 10034 10035 10160 +f 10034 10160 10159 +f 10035 10036 10160 +f 10036 10161 10160 +f 10036 10037 10162 +f 10036 10162 10161 +f 10037 10038 10162 +f 10038 10163 10162 +f 10038 10039 10164 +f 10038 10164 10163 +f 10039 10040 10164 +f 10040 10165 10164 +f 10040 10041 10166 +f 10040 10166 10165 +f 10041 10042 10166 +f 10042 10167 10166 +f 10042 10043 10168 +f 10042 10168 10167 +f 10043 10044 10168 +f 10044 10169 10168 +f 10044 10045 10170 +f 10044 10170 10169 +f 10045 10046 10170 +f 10046 10171 10170 +f 10046 10047 10172 +f 10046 10172 10171 +f 10047 10048 10172 +f 10048 10173 10172 +f 10048 10049 10174 +f 10048 10174 10173 +f 10049 10050 10174 +f 10050 10175 10174 +f 10050 10051 10176 +f 10050 10176 10175 +f 10051 10052 10176 +f 10052 10177 10176 +f 10052 10053 10178 +f 10052 10178 10177 +f 10054 10055 10180 +f 10054 10180 10179 +f 10055 10056 10180 +f 10056 10181 10180 +f 10056 10057 10182 +f 10056 10182 10181 +f 10057 10058 10182 +f 10058 10183 10182 +f 10058 10059 10184 +f 10058 10184 10183 +f 10059 10060 10184 +f 10060 10185 10184 +f 10060 10061 10186 +f 10060 10186 10185 +f 10061 10062 10186 +f 10062 10187 10186 +f 10062 10063 10188 +f 10062 10188 10187 +f 10063 10064 10188 +f 10064 10189 10188 +f 10064 10065 10190 +f 10064 10190 10189 +f 10065 10066 10190 +f 10066 10191 10190 +f 10066 10067 10192 +f 10066 10192 10191 +f 10067 10068 10192 +f 10068 10193 10192 +f 10068 10069 10194 +f 10068 10194 10193 +f 10069 10070 10194 +f 10070 10195 10194 +f 10070 10071 10196 +f 10070 10196 10195 +f 10071 10072 10196 +f 10072 10197 10196 +f 10072 10073 10198 +f 10072 10198 10197 +f 10073 10074 10198 +f 10074 10199 10198 +f 10074 10075 10200 +f 10074 10200 10199 +f 10075 10076 10200 +f 10076 10201 10200 +f 10076 10077 10202 +f 10076 10202 10201 +f 10077 10078 10202 +f 10078 10203 10202 +f 10078 10079 10204 +f 10078 10204 10203 +f 10079 10080 10204 +f 10080 10205 10204 +f 10080 10081 10206 +f 10080 10206 10205 +f 10081 10082 10206 +f 10082 10207 10206 +f 10082 10083 10208 +f 10082 10208 10207 +f 10083 10084 10208 +f 10084 10209 10208 +f 10084 10085 10210 +f 10084 10210 10209 +f 10085 10086 10210 +f 10086 10211 10210 +f 10086 10087 10212 +f 10086 10212 10211 +f 10087 10088 10212 +f 10088 10213 10212 +f 10088 10089 10214 +f 10088 10214 10213 +f 10089 10090 10214 +f 10090 10215 10214 +f 10090 10091 10216 +f 10090 10216 10215 +f 10091 10092 10216 +f 10092 10217 10216 +f 10092 10093 10218 +f 10092 10218 10217 +f 10093 10094 10218 +f 10094 10219 10218 +f 10094 10095 10220 +f 10094 10220 10219 +f 10095 10096 10220 +f 10096 10221 10220 +f 10096 10097 10222 +f 10096 10222 10221 +f 10097 10098 10222 +f 10098 10223 10222 +f 10098 10099 10224 +f 10098 10224 10223 +f 10099 10100 10224 +f 10100 10225 10224 +f 10100 10101 10226 +f 10100 10226 10225 +f 10101 10102 10226 +f 10102 10227 10226 +f 10102 10103 10228 +f 10102 10228 10227 +f 10103 10104 10228 +f 10104 10229 10228 +f 10104 10105 10230 +f 10104 10230 10229 +f 10105 10106 10230 +f 10106 10231 10230 +f 10106 10107 10232 +f 10106 10232 10231 +f 10107 10108 10232 +f 10108 10233 10232 +f 10108 10109 10234 +f 10108 10234 10233 +f 10109 10110 10234 +f 10110 10235 10234 +f 10110 10111 10236 +f 10110 10236 10235 +f 10111 10112 10236 +f 10112 10237 10236 +f 10112 10113 10238 +f 10112 10238 10237 +f 10113 10114 10238 +f 10114 10239 10238 +f 10114 10115 10240 +f 10114 10240 10239 +f 10115 10116 10240 +f 10116 10241 10240 +f 10116 10117 10242 +f 10116 10242 10241 +f 10117 10118 10242 +f 10118 10243 10242 +f 10118 10119 10244 +f 10118 10244 10243 +f 10119 10120 10244 +f 10120 10245 10244 +f 10120 10121 10246 +f 10120 10246 10245 +f 10121 10122 10246 +f 10122 10247 10246 +f 10122 10123 10248 +f 10122 10248 10247 +f 10123 10124 10248 +f 10124 10249 10248 +f 10124 10125 10250 +f 10124 10250 10249 +f 10125 10126 10250 +f 10126 10251 10250 +f 10126 10127 10252 +f 10126 10252 10251 +f 10127 10128 10252 +f 10128 10253 10252 +f 10128 10129 10254 +f 10128 10254 10253 +f 10129 10130 10254 +f 10130 10255 10254 +f 10130 10131 10256 +f 10130 10256 10255 +f 10131 10132 10256 +f 10132 10257 10256 +f 10132 10133 10258 +f 10132 10258 10257 +f 10133 10134 10258 +f 10134 10259 10258 +f 10134 10135 10260 +f 10134 10260 10259 +f 10135 10136 10260 +f 10136 10261 10260 +f 10136 10137 10262 +f 10136 10262 10261 +f 10137 10138 10262 +f 10138 10263 10262 +f 10138 10139 10264 +f 10138 10264 10263 +f 10139 10140 10264 +f 10140 10265 10264 +f 10140 10141 10266 +f 10140 10266 10265 +f 10141 10142 10266 +f 10142 10267 10266 +f 10142 10143 10268 +f 10142 10268 10267 +f 10143 10144 10268 +f 10144 10269 10268 +f 10144 10145 10270 +f 10144 10270 10269 +f 10145 10146 10270 +f 10146 10271 10270 +f 10146 10147 10272 +f 10146 10272 10271 +f 10147 10148 10272 +f 10148 10273 10272 +f 10148 10149 10274 +f 10148 10274 10273 +f 10149 10150 10274 +f 10150 10275 10274 +f 10150 10151 10276 +f 10150 10276 10275 +f 10151 10152 10276 +f 10152 10277 10276 +f 10152 10153 10278 +f 10152 10278 10277 +f 10153 10154 10278 +f 10155 10280 10279 +f 10155 10281 10280 +f 10156 10282 10281 +f 10156 10157 10283 +f 10156 10283 10282 +f 10157 10158 10283 +f 10158 10284 10283 +f 10158 10159 10285 +f 10158 10285 10284 +f 10159 10160 10285 +f 10160 10286 10285 +f 10160 10161 10287 +f 10160 10287 10286 +f 10161 10162 10287 +f 10162 10288 10287 +f 10162 10163 10289 +f 10162 10289 10288 +f 10163 10164 10289 +f 10164 10290 10289 +f 10164 10165 10291 +f 10164 10291 10290 +f 10165 10166 10291 +f 10166 10292 10291 +f 10166 10167 10293 +f 10166 10293 10292 +f 10167 10168 10293 +f 10168 10294 10293 +f 10168 10169 10295 +f 10168 10295 10294 +f 10169 10170 10295 +f 10170 10296 10295 +f 10170 10171 10297 +f 10170 10297 10296 +f 10171 10172 10297 +f 10172 10298 10297 +f 10172 10173 10299 +f 10172 10299 10298 +f 10173 10174 10299 +f 10174 10300 10299 +f 10174 10175 10301 +f 10174 10301 10300 +f 10175 10176 10301 +f 10176 10302 10301 +f 10176 10177 10303 +f 10176 10303 10302 +f 10177 10178 10303 +f 10178 10304 10303 +f 10179 10180 10305 +f 10180 10306 10305 +f 10180 10181 10307 +f 10180 10307 10306 +f 10181 10182 10307 +f 10182 10308 10307 +f 10182 10183 10309 +f 10182 10309 10308 +f 10183 10184 10309 +f 10184 10310 10309 +f 10184 10185 10311 +f 10184 10311 10310 +f 10185 10186 10311 +f 10186 10312 10311 +f 10186 10187 10313 +f 10186 10313 10312 +f 10187 10188 10313 +f 10188 10314 10313 +f 10188 10189 10315 +f 10188 10315 10314 +f 10189 10190 10315 +f 10190 10316 10315 +f 10190 10191 10317 +f 10190 10317 10316 +f 10191 10192 10317 +f 10192 10318 10317 +f 10192 10193 10319 +f 10192 10319 10318 +f 10193 10194 10319 +f 10194 10320 10319 +f 10194 10195 10321 +f 10194 10321 10320 +f 10195 10196 10321 +f 10196 10322 10321 +f 10196 10197 10323 +f 10196 10323 10322 +f 10197 10198 10323 +f 10198 10324 10323 +f 10198 10199 10325 +f 10198 10325 10324 +f 10199 10200 10325 +f 10200 10326 10325 +f 10200 10201 10327 +f 10200 10327 10326 +f 10201 10202 10327 +f 10202 10328 10327 +f 10202 10203 10329 +f 10202 10329 10328 +f 10203 10204 10329 +f 10204 10330 10329 +f 10204 10205 10331 +f 10204 10331 10330 +f 10205 10206 10331 +f 10206 10332 10331 +f 10206 10207 10333 +f 10206 10333 10332 +f 10207 10208 10333 +f 10208 10334 10333 +f 10208 10209 10335 +f 10208 10335 10334 +f 10209 10210 10335 +f 10210 10336 10335 +f 10210 10211 10337 +f 10210 10337 10336 +f 10211 10212 10337 +f 10212 10338 10337 +f 10212 10213 10339 +f 10212 10339 10338 +f 10213 10214 10339 +f 10214 10340 10339 +f 10214 10215 10341 +f 10214 10341 10340 +f 10215 10216 10341 +f 10216 10342 10341 +f 10216 10217 10343 +f 10216 10343 10342 +f 10217 10218 10343 +f 10218 10344 10343 +f 10218 10219 10345 +f 10218 10345 10344 +f 10219 10220 10345 +f 10220 10346 10345 +f 10220 10221 10347 +f 10220 10347 10346 +f 10221 10222 10347 +f 10222 10348 10347 +f 10222 10223 10349 +f 10222 10349 10348 +f 10223 10224 10349 +f 10224 10350 10349 +f 10224 10225 10351 +f 10224 10351 10350 +f 10225 10226 10351 +f 10226 10352 10351 +f 10226 10227 10353 +f 10226 10353 10352 +f 10227 10228 10353 +f 10228 10354 10353 +f 10228 10229 10355 +f 10228 10355 10354 +f 10229 10230 10355 +f 10230 10356 10355 +f 10230 10231 10357 +f 10230 10357 10356 +f 10231 10232 10357 +f 10232 10358 10357 +f 10232 10233 10359 +f 10232 10359 10358 +f 10233 10234 10359 +f 10234 10360 10359 +f 10234 10235 10361 +f 10234 10361 10360 +f 10235 10236 10361 +f 10236 10362 10361 +f 10236 10237 10363 +f 10236 10363 10362 +f 10237 10238 10363 +f 10238 10364 10363 +f 10238 10239 10365 +f 10238 10365 10364 +f 10239 10240 10365 +f 10240 10366 10365 +f 10240 10241 10367 +f 10240 10367 10366 +f 10241 10242 10367 +f 10242 10368 10367 +f 10242 10243 10369 +f 10242 10369 10368 +f 10243 10244 10369 +f 10244 10370 10369 +f 10244 10245 10371 +f 10244 10371 10370 +f 10245 10246 10371 +f 10246 10372 10371 +f 10246 10247 10373 +f 10246 10373 10372 +f 10247 10248 10373 +f 10248 10374 10373 +f 10248 10249 10375 +f 10248 10375 10374 +f 10249 10250 10375 +f 10250 10376 10375 +f 10250 10251 10377 +f 10250 10377 10376 +f 10251 10252 10377 +f 10252 10378 10377 +f 10252 10253 10379 +f 10252 10379 10378 +f 10253 10254 10379 +f 10254 10380 10379 +f 10254 10255 10381 +f 10254 10381 10380 +f 10255 10256 10381 +f 10256 10382 10381 +f 10256 10257 10383 +f 10256 10383 10382 +f 10257 10258 10383 +f 10258 10384 10383 +f 10258 10259 10385 +f 10258 10385 10384 +f 10259 10260 10385 +f 10260 10386 10385 +f 10260 10261 10387 +f 10260 10387 10386 +f 10261 10262 10387 +f 10262 10388 10387 +f 10262 10263 10389 +f 10262 10389 10388 +f 10263 10264 10389 +f 10264 10390 10389 +f 10264 10265 10391 +f 10264 10391 10390 +f 10265 10266 10391 +f 10266 10392 10391 +f 10266 10267 10393 +f 10266 10393 10392 +f 10267 10268 10393 +f 10268 10394 10393 +f 10268 10269 10395 +f 10268 10395 10394 +f 10269 10270 10395 +f 10270 10396 10395 +f 10270 10271 10397 +f 10270 10397 10396 +f 10271 10272 10397 +f 10272 10398 10397 +f 10272 10273 10399 +f 10272 10399 10398 +f 10273 10274 10399 +f 10274 10400 10399 +f 10274 10275 10401 +f 10274 10401 10400 +f 10275 10276 10401 +f 10276 10402 10401 +f 10276 10277 10403 +f 10276 10403 10402 +f 10277 10278 10403 +f 10278 10404 10403 +f 10278 10405 10404 +f 10279 10407 10406 +f 10279 10280 10408 +f 10279 10408 10407 +f 10280 10281 10408 +f 10281 10409 10408 +f 10281 10282 10410 +f 10281 10410 10409 +f 10282 10283 10410 +f 10283 10411 10410 +f 10283 10284 10412 +f 10283 10412 10411 +f 10284 10285 10412 +f 10285 10413 10412 +f 10285 10286 10414 +f 10285 10414 10413 +f 10286 10287 10414 +f 10287 10415 10414 +f 10287 10288 10416 +f 10287 10416 10415 +f 10288 10289 10416 +f 10289 10417 10416 +f 10289 10290 10418 +f 10289 10418 10417 +f 10290 10291 10418 +f 10291 10419 10418 +f 10291 10292 10420 +f 10291 10420 10419 +f 10292 10293 10420 +f 10293 10421 10420 +f 10293 10294 10422 +f 10293 10422 10421 +f 10294 10295 10422 +f 10295 10423 10422 +f 10295 10296 10424 +f 10295 10424 10423 +f 10296 10297 10424 +f 10297 10425 10424 +f 10297 10298 10426 +f 10297 10426 10425 +f 10298 10299 10426 +f 10299 10427 10426 +f 10299 10300 10428 +f 10299 10428 10427 +f 10300 10301 10428 +f 10301 10429 10428 +f 10301 10302 10430 +f 10301 10430 10429 +f 10302 10303 10430 +f 10303 10431 10430 +f 10303 10304 10432 +f 10303 10432 10431 +f 10305 10306 10434 +f 10305 10434 10433 +f 10306 10307 10434 +f 10307 10435 10434 +f 10307 10308 10436 +f 10307 10436 10435 +f 10308 10309 10436 +f 10309 10437 10436 +f 10309 10310 10438 +f 10309 10438 10437 +f 10310 10311 10438 +f 10311 10439 10438 +f 10311 10312 10440 +f 10311 10440 10439 +f 10312 10313 10440 +f 10313 10441 10440 +f 10313 10314 10442 +f 10313 10442 10441 +f 10314 10315 10442 +f 10315 10443 10442 +f 10315 10316 10444 +f 10315 10444 10443 +f 10316 10317 10444 +f 10317 10445 10444 +f 10317 10318 10446 +f 10317 10446 10445 +f 10318 10319 10446 +f 10319 10447 10446 +f 10319 10320 10448 +f 10319 10448 10447 +f 10320 10321 10448 +f 10321 10449 10448 +f 10321 10322 10450 +f 10321 10450 10449 +f 10322 10323 10450 +f 10323 10451 10450 +f 10323 10324 10452 +f 10323 10452 10451 +f 10324 10325 10452 +f 10325 10453 10452 +f 10325 10326 10454 +f 10325 10454 10453 +f 10326 10327 10454 +f 10327 10455 10454 +f 10327 10328 10456 +f 10327 10456 10455 +f 10328 10329 10456 +f 10329 10457 10456 +f 10329 10330 10458 +f 10329 10458 10457 +f 10330 10331 10458 +f 10331 10459 10458 +f 10331 10332 10460 +f 10331 10460 10459 +f 10332 10333 10460 +f 10333 10461 10460 +f 10333 10334 10462 +f 10333 10462 10461 +f 10334 10335 10462 +f 10335 10463 10462 +f 10335 10336 10464 +f 10335 10464 10463 +f 10336 10337 10464 +f 10337 10465 10464 +f 10337 10338 10466 +f 10337 10466 10465 +f 10338 10339 10466 +f 10339 10467 10466 +f 10339 10340 10468 +f 10339 10468 10467 +f 10340 10341 10468 +f 10341 10469 10468 +f 10341 10342 10470 +f 10341 10470 10469 +f 10342 10343 10470 +f 10343 10471 10470 +f 10343 10344 10472 +f 10343 10472 10471 +f 10344 10345 10472 +f 10345 10473 10472 +f 10345 10346 10474 +f 10345 10474 10473 +f 10346 10347 10474 +f 10347 10475 10474 +f 10347 10348 10476 +f 10347 10476 10475 +f 10348 10349 10476 +f 10349 10477 10476 +f 10349 10350 10478 +f 10349 10478 10477 +f 10350 10351 10478 +f 10351 10479 10478 +f 10351 10352 10480 +f 10351 10480 10479 +f 10352 10353 10480 +f 10353 10481 10480 +f 10353 10354 10482 +f 10353 10482 10481 +f 10354 10355 10482 +f 10355 10483 10482 +f 10355 10356 10484 +f 10355 10484 10483 +f 10356 10357 10484 +f 10357 10485 10484 +f 10357 10358 10486 +f 10357 10486 10485 +f 10358 10359 10486 +f 10359 10487 10486 +f 10359 10360 10488 +f 10359 10488 10487 +f 10360 10361 10488 +f 10361 10489 10488 +f 10361 10362 10490 +f 10361 10490 10489 +f 10362 10363 10490 +f 10363 10491 10490 +f 10363 10364 10492 +f 10363 10492 10491 +f 10364 10365 10492 +f 10365 10493 10492 +f 10365 10366 10494 +f 10365 10494 10493 +f 10366 10367 10494 +f 10367 10495 10494 +f 10367 10368 10496 +f 10367 10496 10495 +f 10368 10369 10496 +f 10369 10497 10496 +f 10369 10370 10498 +f 10369 10498 10497 +f 10370 10371 10498 +f 10371 10499 10498 +f 10371 10372 10500 +f 10371 10500 10499 +f 10372 10373 10500 +f 10373 10501 10500 +f 10373 10374 10502 +f 10373 10502 10501 +f 10374 10375 10502 +f 10375 10503 10502 +f 10375 10376 10504 +f 10375 10504 10503 +f 10376 10377 10504 +f 10377 10505 10504 +f 10377 10378 10506 +f 10377 10506 10505 +f 10378 10379 10506 +f 10379 10507 10506 +f 10379 10380 10508 +f 10379 10508 10507 +f 10380 10381 10508 +f 10381 10509 10508 +f 10381 10382 10510 +f 10381 10510 10509 +f 10382 10383 10510 +f 10383 10511 10510 +f 10383 10384 10512 +f 10383 10512 10511 +f 10384 10385 10512 +f 10385 10513 10512 +f 10385 10386 10514 +f 10385 10514 10513 +f 10386 10387 10514 +f 10387 10515 10514 +f 10387 10388 10516 +f 10387 10516 10515 +f 10388 10389 10516 +f 10389 10517 10516 +f 10389 10390 10518 +f 10389 10518 10517 +f 10390 10391 10518 +f 10391 10519 10518 +f 10391 10392 10520 +f 10391 10520 10519 +f 10392 10393 10520 +f 10393 10521 10520 +f 10393 10394 10522 +f 10393 10522 10521 +f 10394 10395 10522 +f 10395 10523 10522 +f 10395 10396 10524 +f 10395 10524 10523 +f 10396 10397 10524 +f 10397 10525 10524 +f 10397 10398 10526 +f 10397 10526 10525 +f 10398 10399 10526 +f 10399 10527 10526 +f 10399 10400 10528 +f 10399 10528 10527 +f 10400 10401 10528 +f 10401 10529 10528 +f 10401 10402 10530 +f 10401 10530 10529 +f 10402 10403 10530 +f 10403 10531 10530 +f 10403 10404 10532 +f 10403 10532 10531 +f 10404 10405 10532 +f 10405 10533 10532 +f 10405 10534 10533 +f 10406 10535 10534 +f 10406 10407 10536 +f 10406 10536 10535 +f 10407 10408 10536 +f 10408 10537 10536 +f 10408 10409 10538 +f 10408 10538 10537 +f 10409 10410 10538 +f 10410 10539 10538 +f 10410 10411 10540 +f 10410 10540 10539 +f 10411 10412 10540 +f 10412 10541 10540 +f 10412 10413 10542 +f 10412 10542 10541 +f 10413 10414 10542 +f 10414 10543 10542 +f 10414 10415 10544 +f 10414 10544 10543 +f 10415 10416 10544 +f 10416 10545 10544 +f 10416 10417 10546 +f 10416 10546 10545 +f 10417 10418 10546 +f 10418 10547 10546 +f 10418 10419 10548 +f 10418 10548 10547 +f 10419 10420 10548 +f 10420 10549 10548 +f 10420 10421 10550 +f 10420 10550 10549 +f 10421 10422 10550 +f 10422 10551 10550 +f 10422 10423 10552 +f 10422 10552 10551 +f 10423 10424 10552 +f 10424 10553 10552 +f 10424 10425 10554 +f 10424 10554 10553 +f 10425 10426 10554 +f 10426 10555 10554 +f 10426 10427 10556 +f 10426 10556 10555 +f 10427 10428 10556 +f 10428 10557 10556 +f 10428 10429 10558 +f 10428 10558 10557 +f 10429 10430 10558 +f 10430 10559 10558 +f 10430 10431 10560 +f 10430 10560 10559 +f 10431 10432 10560 +f 10432 10561 10560 +f 10433 10434 10562 +f 10434 10563 10562 +f 10434 10435 10564 +f 10434 10564 10563 +f 10435 10436 10564 +f 10436 10565 10564 +f 10436 10437 10566 +f 10436 10566 10565 +f 10437 10438 10566 +f 10438 10567 10566 +f 10438 10439 10568 +f 10438 10568 10567 +f 10439 10440 10568 +f 10440 10569 10568 +f 10440 10441 10570 +f 10440 10570 10569 +f 10441 10442 10570 +f 10442 10571 10570 +f 10442 10443 10572 +f 10442 10572 10571 +f 10443 10444 10572 +f 10444 10573 10572 +f 10444 10445 10574 +f 10444 10574 10573 +f 10445 10446 10574 +f 10446 10575 10574 +f 10446 10447 10576 +f 10446 10576 10575 +f 10447 10448 10576 +f 10448 10577 10576 +f 10448 10449 10578 +f 10448 10578 10577 +f 10449 10450 10578 +f 10450 10579 10578 +f 10450 10451 10580 +f 10450 10580 10579 +f 10451 10452 10580 +f 10452 10581 10580 +f 10452 10453 10582 +f 10452 10582 10581 +f 10453 10454 10582 +f 10454 10583 10582 +f 10454 10455 10584 +f 10454 10584 10583 +f 10455 10456 10584 +f 10456 10585 10584 +f 10456 10457 10586 +f 10456 10586 10585 +f 10457 10458 10586 +f 10458 10587 10586 +f 10458 10459 10588 +f 10458 10588 10587 +f 10459 10460 10588 +f 10460 10589 10588 +f 10460 10461 10590 +f 10460 10590 10589 +f 10461 10462 10590 +f 10462 10591 10590 +f 10462 10463 10592 +f 10462 10592 10591 +f 10463 10464 10592 +f 10464 10593 10592 +f 10464 10465 10594 +f 10464 10594 10593 +f 10465 10466 10594 +f 10466 10595 10594 +f 10466 10467 10596 +f 10466 10596 10595 +f 10467 10468 10596 +f 10468 10597 10596 +f 10468 10469 10598 +f 10468 10598 10597 +f 10469 10470 10598 +f 10470 10599 10598 +f 10470 10471 10600 +f 10470 10600 10599 +f 10471 10472 10600 +f 10472 10601 10600 +f 10472 10473 10602 +f 10472 10602 10601 +f 10473 10474 10602 +f 10474 10603 10602 +f 10474 10475 10604 +f 10474 10604 10603 +f 10475 10476 10604 +f 10476 10605 10604 +f 10476 10477 10606 +f 10476 10606 10605 +f 10477 10478 10606 +f 10478 10607 10606 +f 10478 10479 10608 +f 10478 10608 10607 +f 10479 10480 10608 +f 10480 10609 10608 +f 10480 10481 10610 +f 10480 10610 10609 +f 10481 10482 10610 +f 10482 10611 10610 +f 10482 10483 10612 +f 10482 10612 10611 +f 10483 10484 10612 +f 10484 10613 10612 +f 10484 10485 10614 +f 10484 10614 10613 +f 10485 10486 10614 +f 10486 10615 10614 +f 10486 10487 10616 +f 10486 10616 10615 +f 10487 10488 10616 +f 10488 10617 10616 +f 10488 10489 10618 +f 10488 10618 10617 +f 10489 10490 10618 +f 10490 10619 10618 +f 10490 10491 10620 +f 10490 10620 10619 +f 10491 10492 10620 +f 10492 10621 10620 +f 10492 10493 10622 +f 10492 10622 10621 +f 10493 10494 10622 +f 10494 10623 10622 +f 10494 10495 10624 +f 10494 10624 10623 +f 10495 10496 10624 +f 10496 10625 10624 +f 10496 10497 10626 +f 10496 10626 10625 +f 10497 10498 10626 +f 10498 10627 10626 +f 10498 10499 10628 +f 10498 10628 10627 +f 10499 10500 10628 +f 10500 10629 10628 +f 10500 10501 10630 +f 10500 10630 10629 +f 10501 10502 10630 +f 10502 10631 10630 +f 10502 10503 10632 +f 10502 10632 10631 +f 10503 10504 10632 +f 10504 10633 10632 +f 10504 10505 10634 +f 10504 10634 10633 +f 10505 10506 10634 +f 10506 10635 10634 +f 10506 10507 10636 +f 10506 10636 10635 +f 10507 10508 10636 +f 10508 10637 10636 +f 10508 10509 10638 +f 10508 10638 10637 +f 10509 10510 10638 +f 10510 10639 10638 +f 10510 10511 10640 +f 10510 10640 10639 +f 10511 10512 10640 +f 10512 10641 10640 +f 10512 10513 10642 +f 10512 10642 10641 +f 10513 10514 10642 +f 10514 10643 10642 +f 10514 10515 10644 +f 10514 10644 10643 +f 10515 10516 10644 +f 10516 10645 10644 +f 10516 10517 10646 +f 10516 10646 10645 +f 10517 10518 10646 +f 10518 10647 10646 +f 10518 10519 10648 +f 10518 10648 10647 +f 10519 10520 10648 +f 10520 10649 10648 +f 10520 10521 10650 +f 10520 10650 10649 +f 10521 10522 10650 +f 10522 10651 10650 +f 10522 10523 10652 +f 10522 10652 10651 +f 10523 10524 10652 +f 10524 10653 10652 +f 10524 10525 10654 +f 10524 10654 10653 +f 10525 10526 10654 +f 10526 10655 10654 +f 10526 10527 10656 +f 10526 10656 10655 +f 10527 10528 10656 +f 10528 10657 10656 +f 10528 10529 10658 +f 10528 10658 10657 +f 10529 10530 10658 +f 10530 10659 10658 +f 10530 10531 10660 +f 10530 10660 10659 +f 10531 10532 10660 +f 10532 10661 10660 +f 10532 10533 10662 +f 10532 10662 10661 +f 10533 10534 10662 +f 10534 10663 10662 +f 10534 10535 10664 +f 10534 10664 10663 +f 10535 10536 10664 +f 10536 10665 10664 +f 10536 10537 10666 +f 10536 10666 10665 +f 10537 10538 10666 +f 10538 10667 10666 +f 10538 10539 10668 +f 10538 10668 10667 +f 10539 10540 10668 +f 10540 10669 10668 +f 10540 10541 10670 +f 10540 10670 10669 +f 10541 10542 10670 +f 10542 10671 10670 +f 10542 10543 10672 +f 10542 10672 10671 +f 10543 10544 10672 +f 10544 10673 10672 +f 10544 10545 10674 +f 10544 10674 10673 +f 10545 10546 10674 +f 10546 10675 10674 +f 10546 10547 10676 +f 10546 10676 10675 +f 10547 10548 10676 +f 10548 10677 10676 +f 10548 10549 10678 +f 10548 10678 10677 +f 10549 10550 10678 +f 10550 10679 10678 +f 10550 10551 10680 +f 10550 10680 10679 +f 10551 10552 10680 +f 10552 10681 10680 +f 10552 10553 10682 +f 10552 10682 10681 +f 10553 10554 10682 +f 10554 10683 10682 +f 10554 10555 10684 +f 10554 10684 10683 +f 10555 10556 10684 +f 10556 10685 10684 +f 10556 10557 10686 +f 10556 10686 10685 +f 10557 10558 10686 +f 10558 10687 10686 +f 10558 10559 10688 +f 10558 10688 10687 +f 10559 10560 10688 +f 10560 10689 10688 +f 10560 10561 10690 +f 10560 10690 10689 +f 10562 10563 10692 +f 10562 10692 10691 +f 10563 10564 10692 +f 10564 10693 10692 +f 10564 10565 10694 +f 10564 10694 10693 +f 10565 10566 10694 +f 10566 10695 10694 +f 10566 10567 10696 +f 10566 10696 10695 +f 10567 10568 10696 +f 10568 10697 10696 +f 10568 10569 10698 +f 10568 10698 10697 +f 10569 10570 10698 +f 10570 10699 10698 +f 10570 10571 10700 +f 10570 10700 10699 +f 10571 10572 10700 +f 10572 10701 10700 +f 10572 10573 10702 +f 10572 10702 10701 +f 10573 10574 10702 +f 10574 10703 10702 +f 10574 10575 10704 +f 10574 10704 10703 +f 10575 10576 10704 +f 10576 10705 10704 +f 10576 10577 10706 +f 10576 10706 10705 +f 10577 10578 10706 +f 10578 10707 10706 +f 10578 10579 10708 +f 10578 10708 10707 +f 10579 10580 10708 +f 10580 10709 10708 +f 10580 10581 10710 +f 10580 10710 10709 +f 10581 10582 10710 +f 10582 10711 10710 +f 10582 10583 10712 +f 10582 10712 10711 +f 10583 10584 10712 +f 10584 10713 10712 +f 10584 10585 10714 +f 10584 10714 10713 +f 10585 10586 10714 +f 10586 10715 10714 +f 10586 10587 10716 +f 10586 10716 10715 +f 10587 10588 10716 +f 10588 10717 10716 +f 10588 10589 10718 +f 10588 10718 10717 +f 10589 10590 10718 +f 10590 10719 10718 +f 10590 10591 10720 +f 10590 10720 10719 +f 10591 10592 10720 +f 10592 10721 10720 +f 10592 10593 10722 +f 10592 10722 10721 +f 10593 10594 10722 +f 10594 10723 10722 +f 10594 10595 10724 +f 10594 10724 10723 +f 10595 10596 10724 +f 10596 10725 10724 +f 10596 10597 10726 +f 10596 10726 10725 +f 10597 10598 10726 +f 10598 10727 10726 +f 10598 10599 10728 +f 10598 10728 10727 +f 10599 10600 10728 +f 10600 10729 10728 +f 10600 10601 10730 +f 10600 10730 10729 +f 10601 10602 10730 +f 10602 10731 10730 +f 10602 10603 10732 +f 10602 10732 10731 +f 10603 10604 10732 +f 10604 10733 10732 +f 10604 10605 10734 +f 10604 10734 10733 +f 10605 10606 10734 +f 10606 10735 10734 +f 10606 10607 10736 +f 10606 10736 10735 +f 10607 10608 10736 +f 10608 10737 10736 +f 10608 10609 10738 +f 10608 10738 10737 +f 10609 10610 10738 +f 10610 10739 10738 +f 10610 10611 10740 +f 10610 10740 10739 +f 10611 10612 10740 +f 10612 10741 10740 +f 10612 10613 10742 +f 10612 10742 10741 +f 10613 10614 10742 +f 10614 10743 10742 +f 10614 10615 10744 +f 10614 10744 10743 +f 10615 10616 10744 +f 10616 10745 10744 +f 10616 10617 10746 +f 10616 10746 10745 +f 10617 10618 10746 +f 10618 10747 10746 +f 10618 10619 10748 +f 10618 10748 10747 +f 10619 10620 10748 +f 10620 10749 10748 +f 10620 10621 10750 +f 10620 10750 10749 +f 10621 10622 10750 +f 10622 10751 10750 +f 10622 10623 10752 +f 10622 10752 10751 +f 10623 10624 10752 +f 10624 10753 10752 +f 10624 10625 10754 +f 10624 10754 10753 +f 10625 10626 10754 +f 10626 10755 10754 +f 10626 10627 10756 +f 10626 10756 10755 +f 10627 10628 10756 +f 10628 10757 10756 +f 10628 10629 10758 +f 10628 10758 10757 +f 10629 10630 10758 +f 10630 10759 10758 +f 10630 10631 10760 +f 10630 10760 10759 +f 10631 10632 10760 +f 10632 10761 10760 +f 10632 10633 10762 +f 10632 10762 10761 +f 10633 10634 10762 +f 10634 10763 10762 +f 10634 10635 10764 +f 10634 10764 10763 +f 10635 10636 10764 +f 10636 10765 10764 +f 10636 10637 10766 +f 10636 10766 10765 +f 10637 10638 10766 +f 10638 10767 10766 +f 10638 10639 10768 +f 10638 10768 10767 +f 10639 10640 10768 +f 10640 10769 10768 +f 10640 10641 10770 +f 10640 10770 10769 +f 10641 10642 10770 +f 10642 10771 10770 +f 10642 10643 10772 +f 10642 10772 10771 +f 10643 10644 10772 +f 10644 10773 10772 +f 10644 10645 10774 +f 10644 10774 10773 +f 10645 10646 10774 +f 10646 10775 10774 +f 10646 10647 10776 +f 10646 10776 10775 +f 10647 10648 10776 +f 10648 10777 10776 +f 10648 10649 10778 +f 10648 10778 10777 +f 10649 10650 10778 +f 10650 10779 10778 +f 10650 10651 10780 +f 10650 10780 10779 +f 10651 10652 10780 +f 10652 10781 10780 +f 10652 10653 10782 +f 10652 10782 10781 +f 10653 10654 10782 +f 10654 10783 10782 +f 10654 10655 10784 +f 10654 10784 10783 +f 10655 10656 10784 +f 10656 10785 10784 +f 10656 10657 10786 +f 10656 10786 10785 +f 10657 10658 10786 +f 10658 10787 10786 +f 10658 10659 10788 +f 10658 10788 10787 +f 10659 10660 10788 +f 10660 10789 10788 +f 10660 10661 10790 +f 10660 10790 10789 +f 10661 10662 10790 +f 10662 10791 10790 +f 10662 10663 10792 +f 10662 10792 10791 +f 10663 10664 10792 +f 10664 10793 10792 +f 10664 10665 10794 +f 10664 10794 10793 +f 10665 10666 10794 +f 10666 10795 10794 +f 10666 10667 10796 +f 10666 10796 10795 +f 10667 10668 10796 +f 10668 10797 10796 +f 10668 10669 10798 +f 10668 10798 10797 +f 10669 10670 10798 +f 10670 10799 10798 +f 10670 10671 10800 +f 10670 10800 10799 +f 10671 10672 10800 +f 10672 10801 10800 +f 10672 10673 10802 +f 10672 10802 10801 +f 10673 10674 10802 +f 10674 10803 10802 +f 10674 10675 10804 +f 10674 10804 10803 +f 10675 10676 10804 +f 10676 10805 10804 +f 10676 10677 10806 +f 10676 10806 10805 +f 10677 10678 10806 +f 10678 10807 10806 +f 10678 10679 10808 +f 10678 10808 10807 +f 10679 10680 10808 +f 10680 10809 10808 +f 10680 10681 10810 +f 10680 10810 10809 +f 10681 10682 10810 +f 10682 10811 10810 +f 10682 10683 10812 +f 10682 10812 10811 +f 10683 10684 10812 +f 10684 10813 10812 +f 10684 10685 10814 +f 10684 10814 10813 +f 10685 10686 10814 +f 10686 10815 10814 +f 10686 10687 10816 +f 10686 10816 10815 +f 10687 10688 10816 +f 10688 10817 10816 +f 10688 10689 10818 +f 10688 10818 10817 +f 10689 10690 10818 +f 10690 10819 10818 +f 10691 10692 10820 +f 10692 10821 10820 +f 10692 10693 10822 +f 10692 10822 10821 +f 10693 10694 10822 +f 10694 10823 10822 +f 10694 10695 10824 +f 10694 10824 10823 +f 10695 10696 10824 +f 10696 10825 10824 +f 10696 10697 10826 +f 10696 10826 10825 +f 10697 10698 10826 +f 10698 10827 10826 +f 10698 10699 10828 +f 10698 10828 10827 +f 10699 10700 10828 +f 10700 10829 10828 +f 10700 10701 10830 +f 10700 10830 10829 +f 10701 10702 10830 +f 10702 10831 10830 +f 10702 10703 10832 +f 10702 10832 10831 +f 10703 10704 10832 +f 10704 10833 10832 +f 10704 10705 10834 +f 10704 10834 10833 +f 10705 10706 10834 +f 10706 10835 10834 +f 10706 10707 10836 +f 10706 10836 10835 +f 10707 10708 10836 +f 10708 10837 10836 +f 10708 10709 10838 +f 10708 10838 10837 +f 10709 10710 10838 +f 10710 10839 10838 +f 10710 10711 10840 +f 10710 10840 10839 +f 10711 10712 10840 +f 10712 10841 10840 +f 10712 10713 10842 +f 10712 10842 10841 +f 10713 10714 10842 +f 10714 10843 10842 +f 10714 10715 10844 +f 10714 10844 10843 +f 10715 10716 10844 +f 10716 10845 10844 +f 10716 10717 10846 +f 10716 10846 10845 +f 10717 10718 10846 +f 10718 10847 10846 +f 10718 10719 10848 +f 10718 10848 10847 +f 10719 10720 10848 +f 10720 10849 10848 +f 10720 10721 10850 +f 10720 10850 10849 +f 10721 10722 10850 +f 10722 10851 10850 +f 10722 10723 10852 +f 10722 10852 10851 +f 10723 10724 10852 +f 10724 10853 10852 +f 10724 10725 10854 +f 10724 10854 10853 +f 10725 10726 10854 +f 10726 10855 10854 +f 10726 10727 10856 +f 10726 10856 10855 +f 10727 10728 10856 +f 10728 10857 10856 +f 10728 10729 10858 +f 10728 10858 10857 +f 10729 10730 10858 +f 10730 10859 10858 +f 10730 10731 10860 +f 10730 10860 10859 +f 10731 10732 10860 +f 10732 10861 10860 +f 10732 10733 10862 +f 10732 10862 10861 +f 10733 10734 10862 +f 10734 10863 10862 +f 10734 10735 10864 +f 10734 10864 10863 +f 10735 10736 10864 +f 10736 10865 10864 +f 10736 10737 10866 +f 10736 10866 10865 +f 10737 10738 10866 +f 10738 10867 10866 +f 10738 10739 10868 +f 10738 10868 10867 +f 10739 10740 10868 +f 10740 10869 10868 +f 10740 10741 10870 +f 10740 10870 10869 +f 10741 10742 10870 +f 10742 10871 10870 +f 10742 10743 10872 +f 10742 10872 10871 +f 10743 10744 10872 +f 10744 10873 10872 +f 10744 10745 10874 +f 10744 10874 10873 +f 10745 10746 10874 +f 10746 10875 10874 +f 10746 10747 10876 +f 10746 10876 10875 +f 10747 10748 10876 +f 10748 10877 10876 +f 10748 10749 10878 +f 10748 10878 10877 +f 10749 10750 10878 +f 10750 10879 10878 +f 10750 10751 10880 +f 10750 10880 10879 +f 10751 10752 10880 +f 10752 10881 10880 +f 10752 10753 10882 +f 10752 10882 10881 +f 10753 10754 10882 +f 10754 10883 10882 +f 10754 10755 10884 +f 10754 10884 10883 +f 10755 10756 10884 +f 10756 10885 10884 +f 10756 10757 10886 +f 10756 10886 10885 +f 10757 10758 10886 +f 10758 10887 10886 +f 10758 10759 10888 +f 10758 10888 10887 +f 10759 10760 10888 +f 10760 10889 10888 +f 10760 10761 10890 +f 10760 10890 10889 +f 10761 10762 10890 +f 10762 10891 10890 +f 10762 10763 10892 +f 10762 10892 10891 +f 10763 10764 10892 +f 10764 10893 10892 +f 10764 10765 10894 +f 10764 10894 10893 +f 10765 10766 10894 +f 10766 10895 10894 +f 10766 10767 10896 +f 10766 10896 10895 +f 10767 10768 10896 +f 10768 10897 10896 +f 10768 10769 10898 +f 10768 10898 10897 +f 10769 10770 10898 +f 10770 10899 10898 +f 10770 10771 10900 +f 10770 10900 10899 +f 10771 10772 10900 +f 10772 10901 10900 +f 10772 10773 10902 +f 10772 10902 10901 +f 10773 10774 10902 +f 10774 10903 10902 +f 10774 10775 10904 +f 10774 10904 10903 +f 10775 10776 10904 +f 10776 10905 10904 +f 10776 10777 10906 +f 10776 10906 10905 +f 10777 10778 10906 +f 10778 10907 10906 +f 10778 10779 10908 +f 10778 10908 10907 +f 10779 10780 10908 +f 10780 10909 10908 +f 10780 10781 10910 +f 10780 10910 10909 +f 10781 10782 10910 +f 10782 10911 10910 +f 10782 10783 10912 +f 10782 10912 10911 +f 10783 10784 10912 +f 10784 10913 10912 +f 10784 10785 10914 +f 10784 10914 10913 +f 10785 10786 10914 +f 10786 10915 10914 +f 10786 10787 10916 +f 10786 10916 10915 +f 10787 10788 10916 +f 10788 10917 10916 +f 10788 10789 10918 +f 10788 10918 10917 +f 10789 10790 10918 +f 10790 10919 10918 +f 10790 10791 10920 +f 10790 10920 10919 +f 10791 10792 10920 +f 10792 10921 10920 +f 10792 10793 10922 +f 10792 10922 10921 +f 10793 10794 10922 +f 10794 10923 10922 +f 10794 10795 10924 +f 10794 10924 10923 +f 10795 10796 10924 +f 10796 10925 10924 +f 10796 10797 10926 +f 10796 10926 10925 +f 10797 10798 10926 +f 10798 10927 10926 +f 10798 10799 10928 +f 10798 10928 10927 +f 10799 10800 10928 +f 10800 10929 10928 +f 10800 10801 10930 +f 10800 10930 10929 +f 10801 10802 10930 +f 10802 10931 10930 +f 10802 10803 10932 +f 10802 10932 10931 +f 10803 10804 10932 +f 10804 10933 10932 +f 10804 10805 10934 +f 10804 10934 10933 +f 10805 10806 10934 +f 10806 10935 10934 +f 10806 10807 10936 +f 10806 10936 10935 +f 10807 10808 10936 +f 10808 10937 10936 +f 10808 10809 10938 +f 10808 10938 10937 +f 10809 10810 10938 +f 10810 10939 10938 +f 10810 10811 10940 +f 10810 10940 10939 +f 10811 10812 10940 +f 10812 10941 10940 +f 10812 10813 10942 +f 10812 10942 10941 +f 10813 10814 10942 +f 10814 10943 10942 +f 10814 10815 10944 +f 10814 10944 10943 +f 10815 10816 10944 +f 10816 10945 10944 +f 10816 10817 10946 +f 10816 10946 10945 +f 10817 10818 10946 +f 10818 10947 10946 +f 10818 10819 10948 +f 10818 10948 10947 +f 10820 10821 10950 +f 10820 10950 10949 +f 10821 10822 10950 +f 10822 10951 10950 +f 10822 10823 10952 +f 10822 10952 10951 +f 10823 10824 10952 +f 10824 10953 10952 +f 10824 10825 10954 +f 10824 10954 10953 +f 10825 10826 10954 +f 10826 10955 10954 +f 10826 10827 10956 +f 10826 10956 10955 +f 10827 10828 10956 +f 10828 10957 10956 +f 10828 10829 10958 +f 10828 10958 10957 +f 10829 10830 10958 +f 10830 10959 10958 +f 10830 10831 10960 +f 10830 10960 10959 +f 10831 10832 10960 +f 10832 10961 10960 +f 10832 10833 10962 +f 10832 10962 10961 +f 10833 10834 10962 +f 10834 10963 10962 +f 10834 10835 10964 +f 10834 10964 10963 +f 10835 10836 10964 +f 10836 10965 10964 +f 10836 10837 10966 +f 10836 10966 10965 +f 10837 10838 10966 +f 10838 10967 10966 +f 10838 10839 10968 +f 10838 10968 10967 +f 10839 10840 10968 +f 10840 10969 10968 +f 10840 10841 10970 +f 10840 10970 10969 +f 10841 10842 10970 +f 10842 10971 10970 +f 10842 10843 10972 +f 10842 10972 10971 +f 10843 10844 10972 +f 10844 10973 10972 +f 10844 10845 10974 +f 10844 10974 10973 +f 10845 10846 10974 +f 10846 10975 10974 +f 10846 10847 10976 +f 10846 10976 10975 +f 10847 10848 10976 +f 10848 10977 10976 +f 10848 10849 10978 +f 10848 10978 10977 +f 10849 10850 10978 +f 10850 10979 10978 +f 10850 10851 10980 +f 10850 10980 10979 +f 10851 10852 10980 +f 10852 10981 10980 +f 10852 10853 10982 +f 10852 10982 10981 +f 10853 10854 10982 +f 10854 10983 10982 +f 10854 10855 10984 +f 10854 10984 10983 +f 10855 10856 10984 +f 10856 10985 10984 +f 10856 10857 10986 +f 10856 10986 10985 +f 10857 10858 10986 +f 10858 10987 10986 +f 10858 10859 10988 +f 10858 10988 10987 +f 10859 10860 10988 +f 10860 10989 10988 +f 10860 10861 10990 +f 10860 10990 10989 +f 10861 10862 10990 +f 10862 10991 10990 +f 10862 10863 10992 +f 10862 10992 10991 +f 10863 10864 10992 +f 10864 10993 10992 +f 10864 10865 10994 +f 10864 10994 10993 +f 10865 10866 10994 +f 10866 10995 10994 +f 10866 10867 10996 +f 10866 10996 10995 +f 10867 10868 10996 +f 10868 10997 10996 +f 10868 10869 10998 +f 10868 10998 10997 +f 10869 10870 10998 +f 10870 10999 10998 +f 10870 10871 11000 +f 10870 11000 10999 +f 10871 10872 11000 +f 10872 11001 11000 +f 10872 10873 11002 +f 10872 11002 11001 +f 10873 10874 11002 +f 10874 11003 11002 +f 10874 10875 11004 +f 10874 11004 11003 +f 10875 10876 11004 +f 10876 11005 11004 +f 10876 10877 11006 +f 10876 11006 11005 +f 10877 10878 11006 +f 10878 11007 11006 +f 10878 10879 11008 +f 10878 11008 11007 +f 10879 10880 11008 +f 10880 11009 11008 +f 10880 10881 11010 +f 10880 11010 11009 +f 10881 10882 11010 +f 10882 11011 11010 +f 10882 10883 11012 +f 10882 11012 11011 +f 10883 10884 11012 +f 10884 11013 11012 +f 10884 10885 11014 +f 10884 11014 11013 +f 10885 10886 11014 +f 10886 11015 11014 +f 10886 10887 11016 +f 10886 11016 11015 +f 10887 10888 11016 +f 10888 11017 11016 +f 10888 10889 11018 +f 10888 11018 11017 +f 10889 10890 11018 +f 10890 11019 11018 +f 10890 10891 11020 +f 10890 11020 11019 +f 10891 10892 11020 +f 10892 11021 11020 +f 10892 10893 11022 +f 10892 11022 11021 +f 10893 10894 11022 +f 10894 11023 11022 +f 10894 10895 11024 +f 10894 11024 11023 +f 10895 10896 11024 +f 10896 11025 11024 +f 10896 10897 11026 +f 10896 11026 11025 +f 10897 10898 11026 +f 10898 11027 11026 +f 10898 10899 11028 +f 10898 11028 11027 +f 10899 10900 11028 +f 10900 11029 11028 +f 10900 10901 11030 +f 10900 11030 11029 +f 10901 10902 11030 +f 10902 11031 11030 +f 10902 10903 11032 +f 10902 11032 11031 +f 10903 10904 11032 +f 10904 11033 11032 +f 10904 10905 11034 +f 10904 11034 11033 +f 10905 10906 11034 +f 10906 11035 11034 +f 10906 10907 11036 +f 10906 11036 11035 +f 10907 10908 11036 +f 10908 11037 11036 +f 10908 10909 11038 +f 10908 11038 11037 +f 10909 10910 11038 +f 10910 11039 11038 +f 10910 10911 11040 +f 10910 11040 11039 +f 10911 10912 11040 +f 10912 11041 11040 +f 10912 10913 11042 +f 10912 11042 11041 +f 10913 10914 11042 +f 10914 11043 11042 +f 10914 10915 11044 +f 10914 11044 11043 +f 10915 10916 11044 +f 10916 11045 11044 +f 10916 10917 11046 +f 10916 11046 11045 +f 10917 10918 11046 +f 10918 11047 11046 +f 10918 10919 11048 +f 10918 11048 11047 +f 10919 10920 11048 +f 10920 11049 11048 +f 10920 10921 11050 +f 10920 11050 11049 +f 10921 10922 11050 +f 10922 11051 11050 +f 10922 10923 11052 +f 10922 11052 11051 +f 10923 10924 11052 +f 10924 11053 11052 +f 10924 10925 11054 +f 10924 11054 11053 +f 10925 10926 11054 +f 10926 11055 11054 +f 10926 10927 11056 +f 10926 11056 11055 +f 10927 10928 11056 +f 10928 11057 11056 +f 10928 10929 11058 +f 10928 11058 11057 +f 10929 10930 11058 +f 10930 11059 11058 +f 10930 10931 11060 +f 10930 11060 11059 +f 10931 10932 11060 +f 10932 11061 11060 +f 10932 10933 11062 +f 10932 11062 11061 +f 10933 10934 11062 +f 10934 11063 11062 +f 10934 10935 11064 +f 10934 11064 11063 +f 10935 10936 11064 +f 10936 11065 11064 +f 10936 10937 11066 +f 10936 11066 11065 +f 10937 10938 11066 +f 10938 11067 11066 +f 10938 10939 11068 +f 10938 11068 11067 +f 10939 10940 11068 +f 10940 11069 11068 +f 10940 10941 11070 +f 10940 11070 11069 +f 10941 10942 11070 +f 10942 11071 11070 +f 10942 10943 11072 +f 10942 11072 11071 +f 10943 10944 11072 +f 10944 11073 11072 +f 10944 10945 11074 +f 10944 11074 11073 +f 10945 10946 11074 +f 10946 11075 11074 +f 10946 10947 11076 +f 10946 11076 11075 +f 10947 10948 11076 +f 10948 11077 11076 +f 10949 10950 11078 +f 10950 11079 11078 +f 10950 10951 11080 +f 10950 11080 11079 +f 10951 10952 11080 +f 10952 11081 11080 +f 10952 10953 11082 +f 10952 11082 11081 +f 10953 10954 11082 +f 10954 11083 11082 +f 10954 10955 11084 +f 10954 11084 11083 +f 10955 10956 11084 +f 10956 11085 11084 +f 10956 10957 11086 +f 10956 11086 11085 +f 10957 10958 11086 +f 10958 11087 11086 +f 10958 10959 11088 +f 10958 11088 11087 +f 10959 10960 11088 +f 10960 11089 11088 +f 10960 10961 11090 +f 10960 11090 11089 +f 10961 10962 11090 +f 10962 11091 11090 +f 10962 10963 11092 +f 10962 11092 11091 +f 10963 10964 11092 +f 10964 11093 11092 +f 10964 10965 11094 +f 10964 11094 11093 +f 10965 10966 11094 +f 10966 11095 11094 +f 10966 10967 11096 +f 10966 11096 11095 +f 10967 10968 11096 +f 10968 11097 11096 +f 10968 10969 11098 +f 10968 11098 11097 +f 10969 10970 11098 +f 10970 11099 11098 +f 10970 10971 11100 +f 10970 11100 11099 +f 10971 10972 11100 +f 10972 11101 11100 +f 10972 10973 11102 +f 10972 11102 11101 +f 10973 10974 11102 +f 10974 11103 11102 +f 10974 10975 11104 +f 10974 11104 11103 +f 10975 10976 11104 +f 10976 11105 11104 +f 10976 10977 11106 +f 10976 11106 11105 +f 10977 10978 11106 +f 10978 11107 11106 +f 10978 10979 11108 +f 10978 11108 11107 +f 10979 10980 11108 +f 10980 11109 11108 +f 10980 10981 11110 +f 10980 11110 11109 +f 10981 10982 11110 +f 10982 11111 11110 +f 10982 10983 11112 +f 10982 11112 11111 +f 10983 10984 11112 +f 10984 11113 11112 +f 10984 10985 11114 +f 10984 11114 11113 +f 10985 10986 11114 +f 10986 11115 11114 +f 10986 10987 11116 +f 10986 11116 11115 +f 10987 10988 11116 +f 10988 11117 11116 +f 10988 10989 11118 +f 10988 11118 11117 +f 10989 10990 11118 +f 10990 11119 11118 +f 10990 10991 11120 +f 10990 11120 11119 +f 10991 10992 11120 +f 10992 11121 11120 +f 10992 10993 11122 +f 10992 11122 11121 +f 10993 10994 11122 +f 10994 11123 11122 +f 10994 10995 11124 +f 10994 11124 11123 +f 10995 10996 11124 +f 10996 11125 11124 +f 10996 10997 11126 +f 10996 11126 11125 +f 10997 10998 11126 +f 10998 11127 11126 +f 10998 10999 11128 +f 10998 11128 11127 +f 10999 11000 11128 +f 11000 11129 11128 +f 11000 11001 11130 +f 11000 11130 11129 +f 11001 11002 11130 +f 11002 11131 11130 +f 11002 11003 11132 +f 11002 11132 11131 +f 11003 11004 11132 +f 11004 11133 11132 +f 11004 11005 11134 +f 11004 11134 11133 +f 11005 11006 11134 +f 11006 11135 11134 +f 11006 11007 11136 +f 11006 11136 11135 +f 11007 11008 11136 +f 11008 11137 11136 +f 11008 11009 11138 +f 11008 11138 11137 +f 11009 11010 11138 +f 11010 11139 11138 +f 11010 11011 11140 +f 11010 11140 11139 +f 11011 11012 11140 +f 11012 11141 11140 +f 11012 11013 11142 +f 11012 11142 11141 +f 11013 11014 11142 +f 11014 11143 11142 +f 11014 11015 11144 +f 11014 11144 11143 +f 11015 11016 11144 +f 11016 11145 11144 +f 11016 11017 11146 +f 11016 11146 11145 +f 11017 11018 11146 +f 11018 11147 11146 +f 11018 11019 11148 +f 11018 11148 11147 +f 11019 11020 11148 +f 11020 11149 11148 +f 11020 11021 11150 +f 11020 11150 11149 +f 11021 11022 11150 +f 11022 11151 11150 +f 11022 11023 11152 +f 11022 11152 11151 +f 11023 11024 11152 +f 11024 11153 11152 +f 11024 11025 11154 +f 11024 11154 11153 +f 11025 11026 11154 +f 11026 11155 11154 +f 11026 11027 11156 +f 11026 11156 11155 +f 11027 11028 11156 +f 11028 11157 11156 +f 11028 11029 11158 +f 11028 11158 11157 +f 11029 11030 11158 +f 11030 11159 11158 +f 11030 11031 11160 +f 11030 11160 11159 +f 11031 11032 11160 +f 11032 11161 11160 +f 11032 11033 11162 +f 11032 11162 11161 +f 11033 11034 11162 +f 11034 11163 11162 +f 11034 11035 11164 +f 11034 11164 11163 +f 11035 11036 11164 +f 11036 11165 11164 +f 11036 11037 11166 +f 11036 11166 11165 +f 11037 11038 11166 +f 11038 11167 11166 +f 11038 11039 11168 +f 11038 11168 11167 +f 11039 11040 11168 +f 11040 11169 11168 +f 11040 11041 11170 +f 11040 11170 11169 +f 11041 11042 11170 +f 11042 11171 11170 +f 11042 11043 11172 +f 11042 11172 11171 +f 11043 11044 11172 +f 11044 11173 11172 +f 11044 11045 11174 +f 11044 11174 11173 +f 11045 11046 11174 +f 11046 11175 11174 +f 11046 11047 11176 +f 11046 11176 11175 +f 11047 11048 11176 +f 11048 11177 11176 +f 11048 11049 11178 +f 11048 11178 11177 +f 11049 11050 11178 +f 11050 11179 11178 +f 11050 11051 11180 +f 11050 11180 11179 +f 11051 11052 11180 +f 11052 11181 11180 +f 11052 11053 11182 +f 11052 11182 11181 +f 11053 11054 11182 +f 11054 11183 11182 +f 11054 11055 11184 +f 11054 11184 11183 +f 11055 11056 11184 +f 11056 11185 11184 +f 11056 11057 11186 +f 11056 11186 11185 +f 11057 11058 11186 +f 11058 11187 11186 +f 11058 11059 11188 +f 11058 11188 11187 +f 11059 11060 11188 +f 11060 11189 11188 +f 11060 11061 11190 +f 11060 11190 11189 +f 11061 11062 11190 +f 11062 11191 11190 +f 11062 11063 11192 +f 11062 11192 11191 +f 11063 11064 11192 +f 11064 11193 11192 +f 11064 11065 11194 +f 11064 11194 11193 +f 11065 11066 11194 +f 11066 11195 11194 +f 11066 11067 11196 +f 11066 11196 11195 +f 11067 11068 11196 +f 11068 11197 11196 +f 11068 11069 11198 +f 11068 11198 11197 +f 11069 11070 11198 +f 11070 11199 11198 +f 11070 11071 11200 +f 11070 11200 11199 +f 11071 11072 11200 +f 11072 11201 11200 +f 11072 11073 11202 +f 11072 11202 11201 +f 11073 11074 11202 +f 11074 11203 11202 +f 11074 11075 11204 +f 11074 11204 11203 +f 11075 11076 11204 +f 11076 11205 11204 +f 11076 11077 11206 +f 11076 11206 11205 +f 11078 11079 11208 +f 11078 11208 11207 +f 11079 11080 11208 +f 11080 11209 11208 +f 11080 11081 11210 +f 11080 11210 11209 +f 11081 11082 11210 +f 11082 11211 11210 +f 11082 11083 11212 +f 11082 11212 11211 +f 11083 11084 11212 +f 11084 11213 11212 +f 11084 11085 11214 +f 11084 11214 11213 +f 11085 11086 11214 +f 11086 11215 11214 +f 11086 11087 11216 +f 11086 11216 11215 +f 11087 11088 11216 +f 11088 11217 11216 +f 11088 11089 11218 +f 11088 11218 11217 +f 11089 11090 11218 +f 11090 11219 11218 +f 11090 11091 11220 +f 11090 11220 11219 +f 11091 11092 11220 +f 11092 11221 11220 +f 11092 11093 11222 +f 11092 11222 11221 +f 11093 11094 11222 +f 11094 11223 11222 +f 11094 11095 11224 +f 11094 11224 11223 +f 11095 11096 11224 +f 11096 11225 11224 +f 11096 11097 11226 +f 11096 11226 11225 +f 11097 11098 11226 +f 11098 11227 11226 +f 11098 11099 11228 +f 11098 11228 11227 +f 11099 11100 11228 +f 11100 11229 11228 +f 11100 11101 11230 +f 11100 11230 11229 +f 11101 11102 11230 +f 11102 11231 11230 +f 11102 11103 11232 +f 11102 11232 11231 +f 11103 11104 11232 +f 11104 11233 11232 +f 11104 11105 11234 +f 11104 11234 11233 +f 11105 11106 11234 +f 11106 11235 11234 +f 11106 11107 11236 +f 11106 11236 11235 +f 11107 11108 11236 +f 11108 11237 11236 +f 11108 11109 11238 +f 11108 11238 11237 +f 11109 11110 11238 +f 11110 11239 11238 +f 11110 11111 11240 +f 11110 11240 11239 +f 11111 11112 11240 +f 11112 11241 11240 +f 11112 11113 11242 +f 11112 11242 11241 +f 11113 11114 11242 +f 11114 11243 11242 +f 11114 11115 11244 +f 11114 11244 11243 +f 11115 11116 11244 +f 11116 11245 11244 +f 11116 11117 11246 +f 11116 11246 11245 +f 11117 11118 11246 +f 11118 11247 11246 +f 11118 11119 11248 +f 11118 11248 11247 +f 11119 11120 11248 +f 11120 11249 11248 +f 11120 11121 11250 +f 11120 11250 11249 +f 11121 11122 11250 +f 11122 11251 11250 +f 11122 11123 11252 +f 11122 11252 11251 +f 11123 11124 11252 +f 11124 11253 11252 +f 11124 11125 11254 +f 11124 11254 11253 +f 11125 11126 11254 +f 11126 11255 11254 +f 11126 11127 11256 +f 11126 11256 11255 +f 11127 11128 11256 +f 11128 11257 11256 +f 11128 11129 11258 +f 11128 11258 11257 +f 11129 11130 11258 +f 11130 11259 11258 +f 11130 11131 11260 +f 11130 11260 11259 +f 11131 11132 11260 +f 11132 11261 11260 +f 11132 11133 11262 +f 11132 11262 11261 +f 11133 11134 11262 +f 11134 11263 11262 +f 11134 11135 11264 +f 11134 11264 11263 +f 11135 11136 11264 +f 11136 11265 11264 +f 11136 11137 11266 +f 11136 11266 11265 +f 11137 11138 11266 +f 11138 11267 11266 +f 11138 11139 11268 +f 11138 11268 11267 +f 11139 11140 11268 +f 11140 11269 11268 +f 11140 11141 11270 +f 11140 11270 11269 +f 11141 11142 11270 +f 11142 11271 11270 +f 11142 11143 11272 +f 11142 11272 11271 +f 11143 11144 11272 +f 11144 11273 11272 +f 11144 11145 11274 +f 11144 11274 11273 +f 11145 11146 11274 +f 11146 11275 11274 +f 11146 11147 11276 +f 11146 11276 11275 +f 11147 11148 11276 +f 11148 11277 11276 +f 11148 11149 11278 +f 11148 11278 11277 +f 11149 11150 11278 +f 11150 11279 11278 +f 11150 11151 11280 +f 11150 11280 11279 +f 11151 11152 11280 +f 11152 11281 11280 +f 11152 11153 11282 +f 11152 11282 11281 +f 11153 11154 11282 +f 11154 11283 11282 +f 11154 11155 11284 +f 11154 11284 11283 +f 11155 11156 11284 +f 11156 11285 11284 +f 11156 11157 11286 +f 11156 11286 11285 +f 11157 11158 11286 +f 11158 11287 11286 +f 11158 11159 11288 +f 11158 11288 11287 +f 11159 11160 11288 +f 11160 11289 11288 +f 11160 11161 11290 +f 11160 11290 11289 +f 11161 11162 11290 +f 11162 11291 11290 +f 11162 11163 11292 +f 11162 11292 11291 +f 11163 11164 11292 +f 11164 11293 11292 +f 11164 11165 11294 +f 11164 11294 11293 +f 11165 11166 11294 +f 11166 11295 11294 +f 11166 11167 11296 +f 11166 11296 11295 +f 11167 11168 11296 +f 11168 11297 11296 +f 11168 11169 11298 +f 11168 11298 11297 +f 11169 11170 11298 +f 11170 11299 11298 +f 11170 11171 11300 +f 11170 11300 11299 +f 11171 11172 11300 +f 11172 11301 11300 +f 11172 11173 11302 +f 11172 11302 11301 +f 11173 11174 11302 +f 11174 11303 11302 +f 11174 11175 11304 +f 11174 11304 11303 +f 11175 11176 11304 +f 11176 11305 11304 +f 11176 11177 11306 +f 11176 11306 11305 +f 11177 11178 11306 +f 11178 11307 11306 +f 11178 11179 11308 +f 11178 11308 11307 +f 11179 11180 11308 +f 11180 11309 11308 +f 11180 11181 11310 +f 11180 11310 11309 +f 11181 11182 11310 +f 11182 11311 11310 +f 11182 11183 11312 +f 11182 11312 11311 +f 11183 11184 11312 +f 11184 11313 11312 +f 11184 11185 11314 +f 11184 11314 11313 +f 11185 11186 11314 +f 11186 11315 11314 +f 11186 11187 11316 +f 11186 11316 11315 +f 11187 11188 11316 +f 11188 11317 11316 +f 11188 11189 11318 +f 11188 11318 11317 +f 11189 11190 11318 +f 11190 11319 11318 +f 11190 11191 11320 +f 11190 11320 11319 +f 11191 11192 11320 +f 11192 11321 11320 +f 11192 11193 11322 +f 11192 11322 11321 +f 11193 11194 11322 +f 11194 11323 11322 +f 11194 11195 11324 +f 11194 11324 11323 +f 11195 11196 11324 +f 11196 11325 11324 +f 11196 11197 11326 +f 11196 11326 11325 +f 11197 11198 11326 +f 11198 11327 11326 +f 11198 11199 11328 +f 11198 11328 11327 +f 11199 11200 11328 +f 11200 11329 11328 +f 11200 11201 11330 +f 11200 11330 11329 +f 11201 11202 11330 +f 11202 11331 11330 +f 11202 11203 11332 +f 11202 11332 11331 +f 11203 11204 11332 +f 11204 11333 11332 +f 11204 11205 11334 +f 11204 11334 11333 +f 11205 11206 11334 +f 11206 11335 11334 +f 11207 11208 11336 +f 11208 11337 11336 +f 11208 11209 11338 +f 11208 11338 11337 +f 11209 11210 11338 +f 11210 11339 11338 +f 11210 11211 11340 +f 11210 11340 11339 +f 11211 11212 11340 +f 11212 11341 11340 +f 11212 11213 11342 +f 11212 11342 11341 +f 11213 11214 11342 +f 11214 11343 11342 +f 11214 11215 11344 +f 11214 11344 11343 +f 11215 11216 11344 +f 11216 11345 11344 +f 11216 11217 11346 +f 11216 11346 11345 +f 11217 11218 11346 +f 11218 11347 11346 +f 11218 11219 11348 +f 11218 11348 11347 +f 11219 11220 11348 +f 11220 11349 11348 +f 11220 11221 11350 +f 11220 11350 11349 +f 11221 11222 11350 +f 11222 11351 11350 +f 11222 11223 11352 +f 11222 11352 11351 +f 11223 11224 11352 +f 11224 11353 11352 +f 11224 11225 11354 +f 11224 11354 11353 +f 11225 11226 11354 +f 11226 11355 11354 +f 11226 11227 11356 +f 11226 11356 11355 +f 11227 11228 11356 +f 11228 11357 11356 +f 11228 11229 11358 +f 11228 11358 11357 +f 11229 11230 11358 +f 11230 11359 11358 +f 11230 11231 11360 +f 11230 11360 11359 +f 11231 11232 11360 +f 11232 11361 11360 +f 11232 11233 11362 +f 11232 11362 11361 +f 11233 11234 11362 +f 11234 11363 11362 +f 11234 11235 11364 +f 11234 11364 11363 +f 11235 11236 11364 +f 11236 11365 11364 +f 11236 11237 11366 +f 11236 11366 11365 +f 11237 11238 11366 +f 11238 11367 11366 +f 11238 11239 11368 +f 11238 11368 11367 +f 11239 11240 11368 +f 11240 11369 11368 +f 11240 11241 11370 +f 11240 11370 11369 +f 11241 11242 11370 +f 11242 11371 11370 +f 11242 11243 11372 +f 11242 11372 11371 +f 11243 11244 11372 +f 11244 11373 11372 +f 11244 11245 11374 +f 11244 11374 11373 +f 11245 11246 11374 +f 11246 11375 11374 +f 11246 11247 11376 +f 11246 11376 11375 +f 11247 11248 11376 +f 11248 11377 11376 +f 11248 11249 11378 +f 11248 11378 11377 +f 11249 11250 11378 +f 11250 11379 11378 +f 11250 11251 11380 +f 11250 11380 11379 +f 11251 11252 11380 +f 11252 11381 11380 +f 11252 11253 11382 +f 11252 11382 11381 +f 11253 11254 11382 +f 11254 11383 11382 +f 11254 11255 11384 +f 11254 11384 11383 +f 11255 11256 11384 +f 11256 11385 11384 +f 11256 11257 11386 +f 11256 11386 11385 +f 11257 11258 11386 +f 11258 11387 11386 +f 11258 11259 11388 +f 11258 11388 11387 +f 11259 11260 11388 +f 11260 11389 11388 +f 11260 11261 11390 +f 11260 11390 11389 +f 11261 11262 11390 +f 11262 11391 11390 +f 11262 11263 11392 +f 11262 11392 11391 +f 11263 11264 11392 +f 11264 11393 11392 +f 11264 11265 11394 +f 11264 11394 11393 +f 11265 11266 11394 +f 11266 11395 11394 +f 11266 11267 11396 +f 11266 11396 11395 +f 11267 11268 11396 +f 11268 11397 11396 +f 11268 11269 11398 +f 11268 11398 11397 +f 11269 11270 11398 +f 11270 11399 11398 +f 11270 11271 11400 +f 11270 11400 11399 +f 11271 11272 11400 +f 11272 11401 11400 +f 11272 11273 11402 +f 11272 11402 11401 +f 11273 11274 11402 +f 11274 11403 11402 +f 11274 11275 11404 +f 11274 11404 11403 +f 11275 11276 11404 +f 11276 11405 11404 +f 11276 11277 11406 +f 11276 11406 11405 +f 11277 11278 11406 +f 11278 11407 11406 +f 11278 11279 11408 +f 11278 11408 11407 +f 11279 11280 11408 +f 11280 11409 11408 +f 11280 11281 11410 +f 11280 11410 11409 +f 11281 11282 11410 +f 11282 11411 11410 +f 11282 11283 11412 +f 11282 11412 11411 +f 11283 11284 11412 +f 11284 11413 11412 +f 11284 11285 11414 +f 11284 11414 11413 +f 11285 11286 11414 +f 11286 11415 11414 +f 11286 11287 11416 +f 11286 11416 11415 +f 11287 11288 11416 +f 11288 11417 11416 +f 11288 11289 11418 +f 11288 11418 11417 +f 11289 11290 11418 +f 11290 11419 11418 +f 11290 11291 11420 +f 11290 11420 11419 +f 11291 11292 11420 +f 11292 11421 11420 +f 11292 11293 11422 +f 11292 11422 11421 +f 11293 11294 11422 +f 11294 11423 11422 +f 11294 11295 11424 +f 11294 11424 11423 +f 11295 11296 11424 +f 11296 11425 11424 +f 11296 11297 11426 +f 11296 11426 11425 +f 11297 11298 11426 +f 11298 11427 11426 +f 11298 11299 11428 +f 11298 11428 11427 +f 11299 11300 11428 +f 11300 11429 11428 +f 11300 11301 11430 +f 11300 11430 11429 +f 11301 11302 11430 +f 11302 11431 11430 +f 11302 11303 11432 +f 11302 11432 11431 +f 11303 11304 11432 +f 11304 11433 11432 +f 11304 11305 11434 +f 11304 11434 11433 +f 11305 11306 11434 +f 11306 11435 11434 +f 11306 11307 11436 +f 11306 11436 11435 +f 11307 11308 11436 +f 11308 11437 11436 +f 11308 11309 11438 +f 11308 11438 11437 +f 11309 11310 11438 +f 11310 11439 11438 +f 11310 11311 11440 +f 11310 11440 11439 +f 11311 11312 11440 +f 11312 11441 11440 +f 11312 11313 11442 +f 11312 11442 11441 +f 11313 11314 11442 +f 11314 11443 11442 +f 11314 11315 11444 +f 11314 11444 11443 +f 11315 11316 11444 +f 11316 11445 11444 +f 11316 11317 11446 +f 11316 11446 11445 +f 11317 11318 11446 +f 11318 11447 11446 +f 11318 11319 11448 +f 11318 11448 11447 +f 11319 11320 11448 +f 11320 11449 11448 +f 11320 11321 11450 +f 11320 11450 11449 +f 11321 11322 11450 +f 11322 11451 11450 +f 11322 11323 11452 +f 11322 11452 11451 +f 11323 11324 11452 +f 11324 11453 11452 +f 11324 11325 11454 +f 11324 11454 11453 +f 11325 11326 11454 +f 11326 11455 11454 +f 11326 11327 11456 +f 11326 11456 11455 +f 11327 11328 11456 +f 11328 11457 11456 +f 11328 11329 11458 +f 11328 11458 11457 +f 11329 11330 11458 +f 11330 11459 11458 +f 11330 11331 11460 +f 11330 11460 11459 +f 11331 11332 11460 +f 11332 11461 11460 +f 11332 11333 11462 +f 11332 11462 11461 +f 11333 11334 11462 +f 11334 11463 11462 +f 11334 11335 11464 +f 11334 11464 11463 +f 11336 11337 11466 +f 11336 11466 11465 +f 11337 11338 11466 +f 11338 11467 11466 +f 11338 11339 11468 +f 11338 11468 11467 +f 11339 11340 11468 +f 11340 11469 11468 +f 11340 11341 11470 +f 11340 11470 11469 +f 11341 11342 11470 +f 11342 11471 11470 +f 11342 11343 11472 +f 11342 11472 11471 +f 11343 11344 11472 +f 11344 11473 11472 +f 11344 11345 11474 +f 11344 11474 11473 +f 11345 11346 11474 +f 11346 11475 11474 +f 11346 11347 11476 +f 11346 11476 11475 +f 11347 11348 11476 +f 11348 11477 11476 +f 11348 11349 11478 +f 11348 11478 11477 +f 11349 11350 11478 +f 11350 11479 11478 +f 11350 11351 11480 +f 11350 11480 11479 +f 11351 11352 11480 +f 11352 11481 11480 +f 11352 11353 11482 +f 11352 11482 11481 +f 11353 11354 11482 +f 11354 11483 11482 +f 11354 11355 11484 +f 11354 11484 11483 +f 11355 11356 11484 +f 11356 11485 11484 +f 11356 11357 11486 +f 11356 11486 11485 +f 11357 11358 11486 +f 11358 11487 11486 +f 11358 11359 11488 +f 11358 11488 11487 +f 11359 11360 11488 +f 11360 11489 11488 +f 11360 11361 11490 +f 11360 11490 11489 +f 11361 11362 11490 +f 11362 11491 11490 +f 11362 11363 11492 +f 11362 11492 11491 +f 11363 11364 11492 +f 11364 11493 11492 +f 11364 11365 11494 +f 11364 11494 11493 +f 11365 11366 11494 +f 11366 11495 11494 +f 11366 11367 11496 +f 11366 11496 11495 +f 11367 11368 11496 +f 11368 11497 11496 +f 11368 11369 11498 +f 11368 11498 11497 +f 11369 11370 11498 +f 11370 11499 11498 +f 11370 11371 11500 +f 11370 11500 11499 +f 11371 11372 11500 +f 11372 11501 11500 +f 11372 11373 11502 +f 11372 11502 11501 +f 11373 11374 11502 +f 11374 11503 11502 +f 11374 11375 11504 +f 11374 11504 11503 +f 11375 11376 11504 +f 11376 11505 11504 +f 11376 11377 11506 +f 11376 11506 11505 +f 11377 11378 11506 +f 11378 11507 11506 +f 11378 11379 11508 +f 11378 11508 11507 +f 11379 11380 11508 +f 11380 11509 11508 +f 11380 11381 11510 +f 11380 11510 11509 +f 11381 11382 11510 +f 11382 11511 11510 +f 11382 11383 11512 +f 11382 11512 11511 +f 11383 11384 11512 +f 11384 11513 11512 +f 11384 11385 11514 +f 11384 11514 11513 +f 11385 11386 11514 +f 11386 11515 11514 +f 11386 11387 11516 +f 11386 11516 11515 +f 11387 11388 11516 +f 11388 11517 11516 +f 11388 11389 11518 +f 11388 11518 11517 +f 11389 11390 11518 +f 11390 11519 11518 +f 11390 11391 11520 +f 11390 11520 11519 +f 11391 11392 11520 +f 11392 11521 11520 +f 11392 11393 11522 +f 11392 11522 11521 +f 11393 11394 11522 +f 11394 11523 11522 +f 11394 11395 11524 +f 11394 11524 11523 +f 11395 11396 11524 +f 11396 11525 11524 +f 11396 11397 11526 +f 11396 11526 11525 +f 11397 11398 11526 +f 11398 11527 11526 +f 11398 11399 11528 +f 11398 11528 11527 +f 11399 11400 11528 +f 11400 11529 11528 +f 11400 11401 11530 +f 11400 11530 11529 +f 11401 11402 11530 +f 11402 11531 11530 +f 11402 11403 11532 +f 11402 11532 11531 +f 11403 11404 11532 +f 11404 11533 11532 +f 11404 11405 11534 +f 11404 11534 11533 +f 11405 11406 11534 +f 11406 11535 11534 +f 11406 11407 11536 +f 11406 11536 11535 +f 11407 11408 11536 +f 11408 11537 11536 +f 11408 11409 11538 +f 11408 11538 11537 +f 11409 11410 11538 +f 11410 11539 11538 +f 11410 11411 11540 +f 11410 11540 11539 +f 11411 11412 11540 +f 11412 11541 11540 +f 11412 11413 11542 +f 11412 11542 11541 +f 11413 11414 11542 +f 11414 11543 11542 +f 11414 11415 11544 +f 11414 11544 11543 +f 11415 11416 11544 +f 11416 11545 11544 +f 11416 11417 11546 +f 11416 11546 11545 +f 11417 11418 11546 +f 11418 11547 11546 +f 11418 11419 11548 +f 11418 11548 11547 +f 11419 11420 11548 +f 11420 11549 11548 +f 11420 11421 11550 +f 11420 11550 11549 +f 11421 11422 11550 +f 11422 11551 11550 +f 11422 11423 11552 +f 11422 11552 11551 +f 11423 11424 11552 +f 11424 11553 11552 +f 11424 11425 11554 +f 11424 11554 11553 +f 11425 11426 11554 +f 11426 11555 11554 +f 11426 11427 11556 +f 11426 11556 11555 +f 11427 11428 11556 +f 11428 11557 11556 +f 11428 11429 11558 +f 11428 11558 11557 +f 11429 11430 11558 +f 11430 11559 11558 +f 11430 11431 11560 +f 11430 11560 11559 +f 11431 11432 11560 +f 11432 11561 11560 +f 11432 11433 11562 +f 11432 11562 11561 +f 11433 11434 11562 +f 11434 11563 11562 +f 11434 11435 11564 +f 11434 11564 11563 +f 11435 11436 11564 +f 11436 11565 11564 +f 11436 11437 11566 +f 11436 11566 11565 +f 11437 11438 11566 +f 11438 11567 11566 +f 11438 11439 11568 +f 11438 11568 11567 +f 11439 11440 11568 +f 11440 11569 11568 +f 11440 11441 11570 +f 11440 11570 11569 +f 11441 11442 11570 +f 11442 11571 11570 +f 11442 11443 11572 +f 11442 11572 11571 +f 11443 11444 11572 +f 11444 11573 11572 +f 11444 11445 11574 +f 11444 11574 11573 +f 11445 11446 11574 +f 11446 11575 11574 +f 11446 11447 11576 +f 11446 11576 11575 +f 11447 11448 11576 +f 11448 11577 11576 +f 11448 11449 11578 +f 11448 11578 11577 +f 11449 11450 11578 +f 11450 11579 11578 +f 11450 11451 11580 +f 11450 11580 11579 +f 11451 11452 11580 +f 11452 11581 11580 +f 11452 11453 11582 +f 11452 11582 11581 +f 11453 11454 11582 +f 11454 11583 11582 +f 11454 11455 11584 +f 11454 11584 11583 +f 11455 11456 11584 +f 11456 11585 11584 +f 11456 11457 11586 +f 11456 11586 11585 +f 11457 11458 11586 +f 11458 11587 11586 +f 11458 11459 11588 +f 11458 11588 11587 +f 11459 11460 11588 +f 11460 11589 11588 +f 11460 11461 11590 +f 11460 11590 11589 +f 11461 11462 11590 +f 11462 11591 11590 +f 11462 11463 11592 +f 11462 11592 11591 +f 11463 11464 11592 +f 11464 11593 11592 +f 11465 11466 11594 +f 11466 11595 11594 +f 11466 11467 11596 +f 11466 11596 11595 +f 11467 11468 11596 +f 11468 11597 11596 +f 11468 11469 11598 +f 11468 11598 11597 +f 11469 11470 11598 +f 11470 11599 11598 +f 11470 11471 11600 +f 11470 11600 11599 +f 11471 11472 11600 +f 11472 11601 11600 +f 11472 11473 11602 +f 11472 11602 11601 +f 11473 11474 11602 +f 11474 11603 11602 +f 11474 11475 11604 +f 11474 11604 11603 +f 11475 11476 11604 +f 11476 11605 11604 +f 11476 11477 11606 +f 11476 11606 11605 +f 11477 11478 11606 +f 11478 11607 11606 +f 11478 11479 11608 +f 11478 11608 11607 +f 11479 11480 11608 +f 11480 11609 11608 +f 11480 11481 11610 +f 11480 11610 11609 +f 11481 11482 11610 +f 11482 11611 11610 +f 11482 11483 11612 +f 11482 11612 11611 +f 11483 11484 11612 +f 11484 11613 11612 +f 11484 11485 11614 +f 11484 11614 11613 +f 11485 11486 11614 +f 11486 11615 11614 +f 11486 11487 11616 +f 11486 11616 11615 +f 11487 11488 11616 +f 11488 11617 11616 +f 11488 11489 11618 +f 11488 11618 11617 +f 11489 11490 11618 +f 11490 11619 11618 +f 11490 11491 11620 +f 11490 11620 11619 +f 11491 11492 11620 +f 11492 11621 11620 +f 11492 11493 11622 +f 11492 11622 11621 +f 11493 11494 11622 +f 11494 11623 11622 +f 11494 11495 11624 +f 11494 11624 11623 +f 11495 11496 11624 +f 11496 11625 11624 +f 11496 11497 11626 +f 11496 11626 11625 +f 11497 11498 11626 +f 11498 11627 11626 +f 11498 11499 11628 +f 11498 11628 11627 +f 11499 11500 11628 +f 11500 11629 11628 +f 11500 11501 11630 +f 11500 11630 11629 +f 11501 11502 11630 +f 11502 11631 11630 +f 11502 11503 11632 +f 11502 11632 11631 +f 11503 11504 11632 +f 11504 11633 11632 +f 11504 11505 11634 +f 11504 11634 11633 +f 11505 11506 11634 +f 11506 11635 11634 +f 11506 11507 11636 +f 11506 11636 11635 +f 11507 11508 11636 +f 11508 11637 11636 +f 11508 11509 11638 +f 11508 11638 11637 +f 11509 11510 11638 +f 11510 11639 11638 +f 11510 11511 11640 +f 11510 11640 11639 +f 11511 11512 11640 +f 11512 11641 11640 +f 11512 11513 11642 +f 11512 11642 11641 +f 11513 11514 11642 +f 11514 11643 11642 +f 11514 11515 11644 +f 11514 11644 11643 +f 11515 11516 11644 +f 11516 11645 11644 +f 11516 11517 11646 +f 11516 11646 11645 +f 11517 11518 11646 +f 11518 11647 11646 +f 11518 11519 11648 +f 11518 11648 11647 +f 11519 11520 11648 +f 11520 11649 11648 +f 11520 11521 11650 +f 11520 11650 11649 +f 11521 11522 11650 +f 11522 11651 11650 +f 11522 11523 11652 +f 11522 11652 11651 +f 11523 11524 11652 +f 11524 11653 11652 +f 11524 11525 11654 +f 11524 11654 11653 +f 11525 11526 11654 +f 11526 11655 11654 +f 11526 11527 11656 +f 11526 11656 11655 +f 11527 11528 11656 +f 11528 11657 11656 +f 11528 11529 11658 +f 11528 11658 11657 +f 11529 11530 11658 +f 11530 11659 11658 +f 11530 11531 11660 +f 11530 11660 11659 +f 11531 11532 11660 +f 11532 11661 11660 +f 11532 11533 11662 +f 11532 11662 11661 +f 11533 11534 11662 +f 11534 11663 11662 +f 11534 11535 11664 +f 11534 11664 11663 +f 11535 11536 11664 +f 11536 11665 11664 +f 11536 11537 11666 +f 11536 11666 11665 +f 11537 11538 11666 +f 11538 11667 11666 +f 11538 11539 11668 +f 11538 11668 11667 +f 11539 11540 11668 +f 11540 11669 11668 +f 11540 11541 11670 +f 11540 11670 11669 +f 11541 11542 11670 +f 11542 11671 11670 +f 11542 11543 11672 +f 11542 11672 11671 +f 11543 11544 11672 +f 11544 11673 11672 +f 11544 11545 11674 +f 11544 11674 11673 +f 11545 11546 11674 +f 11546 11675 11674 +f 11546 11547 11676 +f 11546 11676 11675 +f 11547 11548 11676 +f 11548 11677 11676 +f 11548 11549 11678 +f 11548 11678 11677 +f 11549 11550 11678 +f 11550 11679 11678 +f 11550 11551 11680 +f 11550 11680 11679 +f 11551 11552 11680 +f 11552 11681 11680 +f 11552 11553 11682 +f 11552 11682 11681 +f 11553 11554 11682 +f 11554 11683 11682 +f 11554 11555 11684 +f 11554 11684 11683 +f 11555 11556 11684 +f 11556 11685 11684 +f 11556 11557 11686 +f 11556 11686 11685 +f 11557 11558 11686 +f 11558 11687 11686 +f 11558 11559 11688 +f 11558 11688 11687 +f 11559 11560 11688 +f 11560 11689 11688 +f 11560 11561 11690 +f 11560 11690 11689 +f 11561 11562 11690 +f 11562 11691 11690 +f 11562 11563 11692 +f 11562 11692 11691 +f 11563 11564 11692 +f 11564 11693 11692 +f 11564 11565 11694 +f 11564 11694 11693 +f 11565 11566 11694 +f 11566 11695 11694 +f 11566 11567 11696 +f 11566 11696 11695 +f 11567 11568 11696 +f 11568 11697 11696 +f 11568 11569 11698 +f 11568 11698 11697 +f 11569 11570 11698 +f 11570 11699 11698 +f 11570 11571 11700 +f 11570 11700 11699 +f 11571 11572 11700 +f 11572 11701 11700 +f 11572 11573 11702 +f 11572 11702 11701 +f 11573 11574 11702 +f 11574 11703 11702 +f 11574 11575 11704 +f 11574 11704 11703 +f 11575 11576 11704 +f 11576 11705 11704 +f 11576 11577 11706 +f 11576 11706 11705 +f 11577 11578 11706 +f 11578 11707 11706 +f 11578 11579 11708 +f 11578 11708 11707 +f 11579 11580 11708 +f 11580 11709 11708 +f 11580 11581 11710 +f 11580 11710 11709 +f 11581 11582 11710 +f 11582 11711 11710 +f 11582 11583 11712 +f 11582 11712 11711 +f 11583 11584 11712 +f 11584 11713 11712 +f 11584 11585 11714 +f 11584 11714 11713 +f 11585 11586 11714 +f 11586 11715 11714 +f 11586 11587 11716 +f 11586 11716 11715 +f 11587 11588 11716 +f 11588 11717 11716 +f 11588 11589 11718 +f 11588 11718 11717 +f 11589 11590 11718 +f 11590 11719 11718 +f 11590 11591 11720 +f 11590 11720 11719 +f 11591 11592 11720 +f 11592 11721 11720 +f 11592 11593 11722 +f 11592 11722 11721 +f 11594 11595 11724 +f 11594 11724 11723 +f 11595 11596 11724 +f 11596 11725 11724 +f 11596 11597 11726 +f 11596 11726 11725 +f 11597 11598 11726 +f 11598 11727 11726 +f 11598 11599 11728 +f 11598 11728 11727 +f 11599 11600 11728 +f 11600 11729 11728 +f 11600 11601 11730 +f 11600 11730 11729 +f 11601 11602 11730 +f 11602 11731 11730 +f 11602 11603 11732 +f 11602 11732 11731 +f 11603 11604 11732 +f 11604 11733 11732 +f 11604 11605 11734 +f 11604 11734 11733 +f 11605 11606 11734 +f 11606 11735 11734 +f 11606 11607 11736 +f 11606 11736 11735 +f 11607 11608 11736 +f 11608 11737 11736 +f 11608 11609 11738 +f 11608 11738 11737 +f 11609 11610 11738 +f 11610 11739 11738 +f 11610 11611 11740 +f 11610 11740 11739 +f 11611 11612 11740 +f 11612 11741 11740 +f 11612 11613 11742 +f 11612 11742 11741 +f 11613 11614 11742 +f 11614 11743 11742 +f 11614 11615 11744 +f 11614 11744 11743 +f 11615 11616 11744 +f 11616 11745 11744 +f 11616 11617 11746 +f 11616 11746 11745 +f 11617 11618 11746 +f 11618 11747 11746 +f 11618 11619 11748 +f 11618 11748 11747 +f 11619 11620 11748 +f 11620 11749 11748 +f 11620 11621 11750 +f 11620 11750 11749 +f 11621 11622 11750 +f 11622 11751 11750 +f 11622 11623 11752 +f 11622 11752 11751 +f 11623 11624 11752 +f 11624 11753 11752 +f 11624 11625 11754 +f 11624 11754 11753 +f 11625 11626 11754 +f 11626 11755 11754 +f 11626 11627 11756 +f 11626 11756 11755 +f 11627 11628 11756 +f 11628 11757 11756 +f 11628 11629 11758 +f 11628 11758 11757 +f 11629 11630 11758 +f 11630 11759 11758 +f 11630 11631 11760 +f 11630 11760 11759 +f 11631 11632 11760 +f 11632 11761 11760 +f 11632 11633 11762 +f 11632 11762 11761 +f 11633 11634 11762 +f 11634 11763 11762 +f 11634 11635 11764 +f 11634 11764 11763 +f 11635 11636 11764 +f 11636 11765 11764 +f 11636 11637 11766 +f 11636 11766 11765 +f 11637 11638 11766 +f 11638 11767 11766 +f 11638 11639 11768 +f 11638 11768 11767 +f 11639 11640 11768 +f 11640 11769 11768 +f 11640 11641 11770 +f 11640 11770 11769 +f 11641 11642 11770 +f 11642 11771 11770 +f 11642 11643 11772 +f 11642 11772 11771 +f 11643 11644 11772 +f 11644 11773 11772 +f 11644 11645 11774 +f 11644 11774 11773 +f 11645 11646 11774 +f 11646 11775 11774 +f 11646 11647 11776 +f 11646 11776 11775 +f 11647 11648 11776 +f 11648 11777 11776 +f 11648 11649 11778 +f 11648 11778 11777 +f 11649 11650 11778 +f 11650 11779 11778 +f 11650 11651 11780 +f 11650 11780 11779 +f 11651 11652 11780 +f 11652 11781 11780 +f 11652 11653 11782 +f 11652 11782 11781 +f 11653 11654 11782 +f 11654 11783 11782 +f 11654 11655 11784 +f 11654 11784 11783 +f 11655 11656 11784 +f 11656 11785 11784 +f 11656 11657 11786 +f 11656 11786 11785 +f 11657 11658 11786 +f 11658 11787 11786 +f 11658 11659 11788 +f 11658 11788 11787 +f 11659 11660 11788 +f 11660 11789 11788 +f 11660 11661 11790 +f 11660 11790 11789 +f 11661 11662 11790 +f 11662 11791 11790 +f 11662 11663 11792 +f 11662 11792 11791 +f 11663 11664 11792 +f 11664 11793 11792 +f 11664 11665 11794 +f 11664 11794 11793 +f 11665 11666 11794 +f 11666 11795 11794 +f 11666 11667 11796 +f 11666 11796 11795 +f 11667 11668 11796 +f 11668 11797 11796 +f 11668 11669 11798 +f 11668 11798 11797 +f 11669 11670 11798 +f 11670 11799 11798 +f 11670 11671 11800 +f 11670 11800 11799 +f 11671 11672 11800 +f 11672 11801 11800 +f 11672 11673 11802 +f 11672 11802 11801 +f 11673 11674 11802 +f 11674 11803 11802 +f 11674 11675 11804 +f 11674 11804 11803 +f 11675 11676 11804 +f 11676 11805 11804 +f 11676 11677 11806 +f 11676 11806 11805 +f 11677 11678 11806 +f 11678 11807 11806 +f 11678 11679 11808 +f 11678 11808 11807 +f 11679 11680 11808 +f 11680 11809 11808 +f 11680 11681 11810 +f 11680 11810 11809 +f 11681 11682 11810 +f 11682 11811 11810 +f 11682 11683 11812 +f 11682 11812 11811 +f 11683 11684 11812 +f 11684 11813 11812 +f 11684 11685 11814 +f 11684 11814 11813 +f 11685 11686 11814 +f 11686 11815 11814 +f 11686 11687 11816 +f 11686 11816 11815 +f 11687 11688 11816 +f 11688 11817 11816 +f 11688 11689 11818 +f 11688 11818 11817 +f 11689 11690 11818 +f 11690 11819 11818 +f 11690 11691 11820 +f 11690 11820 11819 +f 11691 11692 11820 +f 11692 11821 11820 +f 11692 11693 11822 +f 11692 11822 11821 +f 11693 11694 11822 +f 11694 11823 11822 +f 11694 11695 11824 +f 11694 11824 11823 +f 11695 11696 11824 +f 11696 11825 11824 +f 11696 11697 11826 +f 11696 11826 11825 +f 11697 11698 11826 +f 11698 11827 11826 +f 11698 11699 11828 +f 11698 11828 11827 +f 11699 11700 11828 +f 11700 11829 11828 +f 11700 11701 11830 +f 11700 11830 11829 +f 11701 11702 11830 +f 11702 11831 11830 +f 11702 11703 11832 +f 11702 11832 11831 +f 11703 11704 11832 +f 11704 11833 11832 +f 11704 11705 11834 +f 11704 11834 11833 +f 11705 11706 11834 +f 11706 11835 11834 +f 11706 11707 11836 +f 11706 11836 11835 +f 11707 11708 11836 +f 11708 11837 11836 +f 11708 11709 11838 +f 11708 11838 11837 +f 11709 11710 11838 +f 11710 11839 11838 +f 11710 11711 11840 +f 11710 11840 11839 +f 11711 11712 11840 +f 11712 11841 11840 +f 11712 11713 11842 +f 11712 11842 11841 +f 11713 11714 11842 +f 11714 11843 11842 +f 11714 11715 11844 +f 11714 11844 11843 +f 11715 11716 11844 +f 11716 11845 11844 +f 11716 11717 11846 +f 11716 11846 11845 +f 11717 11718 11846 +f 11718 11847 11846 +f 11718 11719 11848 +f 11718 11848 11847 +f 11719 11720 11848 +f 11720 11849 11848 +f 11720 11721 11850 +f 11720 11850 11849 +f 11721 11722 11850 +f 11722 11851 11850 +f 11723 11724 11852 +f 11724 11853 11852 +f 11724 11725 11854 +f 11724 11854 11853 +f 11725 11726 11854 +f 11726 11855 11854 +f 11726 11727 11856 +f 11726 11856 11855 +f 11727 11728 11856 +f 11728 11857 11856 +f 11728 11729 11858 +f 11728 11858 11857 +f 11729 11730 11858 +f 11730 11859 11858 +f 11730 11731 11860 +f 11730 11860 11859 +f 11731 11732 11860 +f 11732 11861 11860 +f 11732 11733 11862 +f 11732 11862 11861 +f 11733 11734 11862 +f 11734 11863 11862 +f 11734 11735 11864 +f 11734 11864 11863 +f 11735 11736 11864 +f 11736 11865 11864 +f 11736 11737 11866 +f 11736 11866 11865 +f 11737 11738 11866 +f 11738 11867 11866 +f 11738 11739 11868 +f 11738 11868 11867 +f 11739 11740 11868 +f 11740 11869 11868 +f 11740 11741 11870 +f 11740 11870 11869 +f 11741 11742 11870 +f 11742 11871 11870 +f 11742 11743 11872 +f 11742 11872 11871 +f 11743 11744 11872 +f 11744 11873 11872 +f 11744 11745 11874 +f 11744 11874 11873 +f 11745 11746 11874 +f 11746 11875 11874 +f 11746 11747 11876 +f 11746 11876 11875 +f 11747 11748 11876 +f 11748 11877 11876 +f 11748 11749 11878 +f 11748 11878 11877 +f 11749 11750 11878 +f 11750 11879 11878 +f 11750 11751 11880 +f 11750 11880 11879 +f 11751 11752 11880 +f 11752 11881 11880 +f 11752 11753 11882 +f 11752 11882 11881 +f 11753 11754 11882 +f 11754 11883 11882 +f 11754 11755 11884 +f 11754 11884 11883 +f 11755 11756 11884 +f 11756 11885 11884 +f 11756 11757 11886 +f 11756 11886 11885 +f 11757 11758 11886 +f 11758 11887 11886 +f 11758 11759 11888 +f 11758 11888 11887 +f 11759 11760 11888 +f 11760 11889 11888 +f 11760 11761 11890 +f 11760 11890 11889 +f 11761 11762 11890 +f 11762 11891 11890 +f 11762 11763 11892 +f 11762 11892 11891 +f 11763 11764 11892 +f 11764 11893 11892 +f 11764 11765 11894 +f 11764 11894 11893 +f 11765 11766 11894 +f 11766 11895 11894 +f 11766 11767 11896 +f 11766 11896 11895 +f 11767 11768 11896 +f 11768 11897 11896 +f 11768 11769 11898 +f 11768 11898 11897 +f 11769 11770 11898 +f 11770 11899 11898 +f 11770 11771 11900 +f 11770 11900 11899 +f 11771 11772 11900 +f 11772 11901 11900 +f 11772 11773 11902 +f 11772 11902 11901 +f 11773 11774 11902 +f 11774 11903 11902 +f 11774 11775 11904 +f 11774 11904 11903 +f 11775 11776 11904 +f 11776 11905 11904 +f 11776 11777 11906 +f 11776 11906 11905 +f 11777 11778 11906 +f 11778 11907 11906 +f 11778 11779 11908 +f 11778 11908 11907 +f 11779 11780 11908 +f 11780 11909 11908 +f 11780 11781 11910 +f 11780 11910 11909 +f 11781 11782 11910 +f 11782 11911 11910 +f 11782 11783 11912 +f 11782 11912 11911 +f 11783 11784 11912 +f 11784 11913 11912 +f 11784 11785 11914 +f 11784 11914 11913 +f 11785 11786 11914 +f 11786 11915 11914 +f 11786 11787 11916 +f 11786 11916 11915 +f 11787 11788 11916 +f 11788 11917 11916 +f 11788 11789 11918 +f 11788 11918 11917 +f 11789 11790 11918 +f 11790 11919 11918 +f 11790 11791 11920 +f 11790 11920 11919 +f 11791 11792 11920 +f 11792 11921 11920 +f 11792 11793 11922 +f 11792 11922 11921 +f 11793 11794 11922 +f 11794 11923 11922 +f 11794 11795 11924 +f 11794 11924 11923 +f 11795 11796 11924 +f 11796 11925 11924 +f 11796 11797 11926 +f 11796 11926 11925 +f 11797 11798 11926 +f 11798 11927 11926 +f 11798 11799 11928 +f 11798 11928 11927 +f 11799 11800 11928 +f 11800 11929 11928 +f 11800 11801 11930 +f 11800 11930 11929 +f 11801 11802 11930 +f 11802 11931 11930 +f 11802 11803 11932 +f 11802 11932 11931 +f 11803 11804 11932 +f 11804 11933 11932 +f 11804 11805 11934 +f 11804 11934 11933 +f 11805 11806 11934 +f 11806 11935 11934 +f 11806 11807 11936 +f 11806 11936 11935 +f 11807 11808 11936 +f 11808 11937 11936 +f 11808 11809 11938 +f 11808 11938 11937 +f 11809 11810 11938 +f 11810 11939 11938 +f 11810 11811 11940 +f 11810 11940 11939 +f 11811 11812 11940 +f 11812 11941 11940 +f 11812 11813 11942 +f 11812 11942 11941 +f 11813 11814 11942 +f 11814 11943 11942 +f 11814 11815 11944 +f 11814 11944 11943 +f 11815 11816 11944 +f 11816 11945 11944 +f 11816 11817 11946 +f 11816 11946 11945 +f 11817 11818 11946 +f 11818 11947 11946 +f 11818 11819 11948 +f 11818 11948 11947 +f 11819 11820 11948 +f 11820 11949 11948 +f 11820 11821 11950 +f 11820 11950 11949 +f 11821 11822 11950 +f 11822 11951 11950 +f 11822 11823 11952 +f 11822 11952 11951 +f 11823 11824 11952 +f 11824 11953 11952 +f 11824 11825 11954 +f 11824 11954 11953 +f 11825 11826 11954 +f 11826 11955 11954 +f 11826 11827 11956 +f 11826 11956 11955 +f 11827 11828 11956 +f 11828 11957 11956 +f 11828 11829 11958 +f 11828 11958 11957 +f 11829 11830 11958 +f 11830 11959 11958 +f 11830 11831 11960 +f 11830 11960 11959 +f 11831 11832 11960 +f 11832 11961 11960 +f 11832 11833 11962 +f 11832 11962 11961 +f 11833 11834 11962 +f 11834 11963 11962 +f 11834 11835 11964 +f 11834 11964 11963 +f 11835 11836 11964 +f 11836 11965 11964 +f 11836 11837 11966 +f 11836 11966 11965 +f 11837 11838 11966 +f 11838 11967 11966 +f 11838 11839 11968 +f 11838 11968 11967 +f 11839 11840 11968 +f 11840 11969 11968 +f 11840 11841 11970 +f 11840 11970 11969 +f 11841 11842 11970 +f 11842 11971 11970 +f 11842 11843 11972 +f 11842 11972 11971 +f 11843 11844 11972 +f 11844 11973 11972 +f 11844 11845 11974 +f 11844 11974 11973 +f 11845 11846 11974 +f 11846 11975 11974 +f 11846 11847 11976 +f 11846 11976 11975 +f 11847 11848 11976 +f 11848 11977 11976 +f 11848 11849 11978 +f 11848 11978 11977 +f 11849 11850 11978 +f 11850 11979 11978 +f 11850 11851 11980 +f 11850 11980 11979 +f 11852 11853 11982 +f 11852 11982 11981 +f 11853 11854 11982 +f 11854 11983 11982 +f 11854 11855 11984 +f 11854 11984 11983 +f 11855 11856 11984 +f 11856 11985 11984 +f 11856 11857 11986 +f 11856 11986 11985 +f 11857 11858 11986 +f 11858 11987 11986 +f 11858 11859 11988 +f 11858 11988 11987 +f 11859 11860 11988 +f 11860 11989 11988 +f 11860 11861 11990 +f 11860 11990 11989 +f 11861 11862 11990 +f 11862 11991 11990 +f 11862 11863 11992 +f 11862 11992 11991 +f 11863 11864 11992 +f 11864 11993 11992 +f 11864 11865 11994 +f 11864 11994 11993 +f 11865 11866 11994 +f 11866 11995 11994 +f 11866 11867 11996 +f 11866 11996 11995 +f 11867 11868 11996 +f 11868 11997 11996 +f 11868 11869 11998 +f 11868 11998 11997 +f 11869 11870 11998 +f 11870 11999 11998 +f 11870 11871 12000 +f 11870 12000 11999 +f 11871 11872 12000 +f 11872 12001 12000 +f 11872 11873 12002 +f 11872 12002 12001 +f 11873 11874 12002 +f 11874 12003 12002 +f 11874 11875 12004 +f 11874 12004 12003 +f 11875 11876 12004 +f 11876 12005 12004 +f 11876 11877 12006 +f 11876 12006 12005 +f 11877 11878 12006 +f 11878 12007 12006 +f 11878 11879 12008 +f 11878 12008 12007 +f 11879 11880 12008 +f 11880 12009 12008 +f 11880 11881 12010 +f 11880 12010 12009 +f 11881 11882 12010 +f 11882 12011 12010 +f 11882 11883 12012 +f 11882 12012 12011 +f 11883 11884 12012 +f 11884 12013 12012 +f 11884 11885 12014 +f 11884 12014 12013 +f 11885 11886 12014 +f 11886 12015 12014 +f 11886 11887 12016 +f 11886 12016 12015 +f 11887 11888 12016 +f 11888 12017 12016 +f 11888 11889 12018 +f 11888 12018 12017 +f 11889 11890 12018 +f 11890 12019 12018 +f 11890 11891 12020 +f 11890 12020 12019 +f 11891 11892 12020 +f 11892 12021 12020 +f 11892 11893 12022 +f 11892 12022 12021 +f 11893 11894 12022 +f 11894 12023 12022 +f 11894 11895 12024 +f 11894 12024 12023 +f 11895 11896 12024 +f 11896 12025 12024 +f 11896 11897 12026 +f 11896 12026 12025 +f 11897 11898 12026 +f 11898 12027 12026 +f 11898 11899 12028 +f 11898 12028 12027 +f 11899 11900 12028 +f 11900 12029 12028 +f 11900 11901 12030 +f 11900 12030 12029 +f 11901 11902 12030 +f 11902 12031 12030 +f 11902 11903 12032 +f 11902 12032 12031 +f 11903 11904 12032 +f 11904 12033 12032 +f 11904 11905 12034 +f 11904 12034 12033 +f 11905 11906 12034 +f 11906 12035 12034 +f 11906 11907 12036 +f 11906 12036 12035 +f 11907 11908 12036 +f 11908 12037 12036 +f 11908 11909 12038 +f 11908 12038 12037 +f 11909 11910 12038 +f 11910 12039 12038 +f 11910 11911 12040 +f 11910 12040 12039 +f 11911 11912 12040 +f 11912 12041 12040 +f 11912 11913 12042 +f 11912 12042 12041 +f 11913 11914 12042 +f 11914 12043 12042 +f 11914 11915 12044 +f 11914 12044 12043 +f 11915 11916 12044 +f 11916 12045 12044 +f 11916 11917 12046 +f 11916 12046 12045 +f 11917 11918 12046 +f 11918 12047 12046 +f 11918 11919 12048 +f 11918 12048 12047 +f 11919 11920 12048 +f 11920 12049 12048 +f 11920 11921 12050 +f 11920 12050 12049 +f 11921 11922 12050 +f 11922 12051 12050 +f 11922 11923 12052 +f 11922 12052 12051 +f 11923 11924 12052 +f 11924 12053 12052 +f 11924 11925 12054 +f 11924 12054 12053 +f 11925 11926 12054 +f 11926 12055 12054 +f 11926 11927 12056 +f 11926 12056 12055 +f 11927 11928 12056 +f 11928 12057 12056 +f 11928 11929 12058 +f 11928 12058 12057 +f 11929 11930 12058 +f 11930 12059 12058 +f 11930 11931 12060 +f 11930 12060 12059 +f 11931 11932 12060 +f 11932 12061 12060 +f 11932 11933 12062 +f 11932 12062 12061 +f 11933 11934 12062 +f 11934 12063 12062 +f 11934 11935 12064 +f 11934 12064 12063 +f 11935 11936 12064 +f 11936 12065 12064 +f 11936 11937 12066 +f 11936 12066 12065 +f 11937 11938 12066 +f 11938 12067 12066 +f 11938 11939 12068 +f 11938 12068 12067 +f 11939 11940 12068 +f 11940 12069 12068 +f 11940 11941 12070 +f 11940 12070 12069 +f 11941 11942 12070 +f 11942 12071 12070 +f 11942 11943 12072 +f 11942 12072 12071 +f 11943 11944 12072 +f 11944 12073 12072 +f 11944 11945 12074 +f 11944 12074 12073 +f 11945 11946 12074 +f 11946 12075 12074 +f 11946 11947 12076 +f 11946 12076 12075 +f 11947 11948 12076 +f 11948 12077 12076 +f 11948 11949 12078 +f 11948 12078 12077 +f 11949 11950 12078 +f 11950 12079 12078 +f 11950 11951 12080 +f 11950 12080 12079 +f 11951 11952 12080 +f 11952 12081 12080 +f 11952 11953 12082 +f 11952 12082 12081 +f 11953 11954 12082 +f 11954 12083 12082 +f 11954 11955 12084 +f 11954 12084 12083 +f 11955 11956 12084 +f 11956 12085 12084 +f 11956 11957 12086 +f 11956 12086 12085 +f 11957 11958 12086 +f 11958 12087 12086 +f 11958 11959 12088 +f 11958 12088 12087 +f 11959 11960 12088 +f 11960 12089 12088 +f 11960 11961 12090 +f 11960 12090 12089 +f 11961 11962 12090 +f 11962 12091 12090 +f 11962 11963 12092 +f 11962 12092 12091 +f 11963 11964 12092 +f 11964 12093 12092 +f 11964 11965 12094 +f 11964 12094 12093 +f 11965 11966 12094 +f 11966 12095 12094 +f 11966 11967 12096 +f 11966 12096 12095 +f 11967 11968 12096 +f 11968 12097 12096 +f 11968 11969 12098 +f 11968 12098 12097 +f 11969 11970 12098 +f 11970 12099 12098 +f 11970 11971 12100 +f 11970 12100 12099 +f 11971 11972 12100 +f 11972 12101 12100 +f 11972 11973 12102 +f 11972 12102 12101 +f 11973 11974 12102 +f 11974 12103 12102 +f 11974 11975 12104 +f 11974 12104 12103 +f 11975 11976 12104 +f 11976 12105 12104 +f 11976 11977 12106 +f 11976 12106 12105 +f 11977 11978 12106 +f 11978 12107 12106 +f 11978 11979 12108 +f 11978 12108 12107 +f 11979 11980 12108 +f 11980 12109 12108 +f 11981 11982 12110 +f 11982 12111 12110 +f 11982 11983 12112 +f 11982 12112 12111 +f 11983 11984 12112 +f 11984 12113 12112 +f 11984 11985 12114 +f 11984 12114 12113 +f 11985 11986 12114 +f 11986 12115 12114 +f 11986 11987 12116 +f 11986 12116 12115 +f 11987 11988 12116 +f 11988 12117 12116 +f 11988 11989 12118 +f 11988 12118 12117 +f 11989 11990 12118 +f 11990 12119 12118 +f 11990 11991 12120 +f 11990 12120 12119 +f 11991 11992 12120 +f 11992 12121 12120 +f 11992 11993 12122 +f 11992 12122 12121 +f 11993 11994 12122 +f 11994 12123 12122 +f 11994 11995 12124 +f 11994 12124 12123 +f 11995 11996 12124 +f 11996 12125 12124 +f 11996 11997 12126 +f 11996 12126 12125 +f 11997 11998 12126 +f 11998 12127 12126 +f 11998 11999 12128 +f 11998 12128 12127 +f 11999 12000 12128 +f 12000 12129 12128 +f 12000 12001 12130 +f 12000 12130 12129 +f 12001 12002 12130 +f 12002 12131 12130 +f 12002 12003 12132 +f 12002 12132 12131 +f 12003 12004 12132 +f 12004 12133 12132 +f 12004 12005 12134 +f 12004 12134 12133 +f 12005 12006 12134 +f 12006 12135 12134 +f 12006 12007 12136 +f 12006 12136 12135 +f 12007 12008 12136 +f 12008 12137 12136 +f 12008 12009 12138 +f 12008 12138 12137 +f 12009 12010 12138 +f 12010 12139 12138 +f 12010 12011 12140 +f 12010 12140 12139 +f 12011 12012 12140 +f 12012 12141 12140 +f 12012 12013 12142 +f 12012 12142 12141 +f 12013 12014 12142 +f 12014 12143 12142 +f 12014 12015 12144 +f 12014 12144 12143 +f 12015 12016 12144 +f 12016 12145 12144 +f 12016 12017 12146 +f 12016 12146 12145 +f 12017 12018 12146 +f 12018 12147 12146 +f 12018 12019 12148 +f 12018 12148 12147 +f 12019 12020 12148 +f 12020 12149 12148 +f 12020 12021 12150 +f 12020 12150 12149 +f 12021 12022 12150 +f 12022 12151 12150 +f 12022 12023 12152 +f 12022 12152 12151 +f 12023 12024 12152 +f 12024 12153 12152 +f 12024 12025 12154 +f 12024 12154 12153 +f 12025 12026 12154 +f 12026 12155 12154 +f 12026 12027 12156 +f 12026 12156 12155 +f 12027 12028 12156 +f 12028 12157 12156 +f 12028 12029 12158 +f 12028 12158 12157 +f 12029 12030 12158 +f 12030 12159 12158 +f 12030 12031 12160 +f 12030 12160 12159 +f 12031 12032 12160 +f 12032 12161 12160 +f 12032 12033 12162 +f 12032 12162 12161 +f 12033 12034 12162 +f 12034 12163 12162 +f 12034 12035 12164 +f 12034 12164 12163 +f 12035 12036 12164 +f 12036 12165 12164 +f 12036 12037 12166 +f 12036 12166 12165 +f 12037 12038 12166 +f 12038 12167 12166 +f 12038 12039 12168 +f 12038 12168 12167 +f 12039 12040 12168 +f 12040 12169 12168 +f 12040 12041 12170 +f 12040 12170 12169 +f 12041 12042 12170 +f 12042 12171 12170 +f 12042 12043 12172 +f 12042 12172 12171 +f 12043 12044 12172 +f 12044 12173 12172 +f 12044 12045 12174 +f 12044 12174 12173 +f 12045 12046 12174 +f 12046 12175 12174 +f 12046 12047 12176 +f 12046 12176 12175 +f 12047 12048 12176 +f 12048 12177 12176 +f 12048 12049 12178 +f 12048 12178 12177 +f 12049 12050 12178 +f 12050 12179 12178 +f 12050 12051 12180 +f 12050 12180 12179 +f 12051 12052 12180 +f 12052 12181 12180 +f 12052 12053 12182 +f 12052 12182 12181 +f 12053 12054 12182 +f 12054 12183 12182 +f 12054 12055 12184 +f 12054 12184 12183 +f 12055 12056 12184 +f 12056 12185 12184 +f 12056 12057 12186 +f 12056 12186 12185 +f 12057 12058 12186 +f 12058 12187 12186 +f 12058 12059 12188 +f 12058 12188 12187 +f 12059 12060 12188 +f 12060 12189 12188 +f 12060 12061 12190 +f 12060 12190 12189 +f 12061 12062 12190 +f 12062 12191 12190 +f 12062 12063 12192 +f 12062 12192 12191 +f 12063 12064 12192 +f 12064 12193 12192 +f 12064 12065 12194 +f 12064 12194 12193 +f 12065 12066 12194 +f 12066 12195 12194 +f 12066 12067 12196 +f 12066 12196 12195 +f 12067 12068 12196 +f 12068 12197 12196 +f 12068 12069 12198 +f 12068 12198 12197 +f 12069 12070 12198 +f 12070 12199 12198 +f 12070 12071 12200 +f 12070 12200 12199 +f 12071 12072 12200 +f 12072 12201 12200 +f 12072 12073 12202 +f 12072 12202 12201 +f 12073 12074 12202 +f 12074 12203 12202 +f 12074 12075 12204 +f 12074 12204 12203 +f 12075 12076 12204 +f 12076 12205 12204 +f 12076 12077 12206 +f 12076 12206 12205 +f 12077 12078 12206 +f 12078 12207 12206 +f 12078 12079 12208 +f 12078 12208 12207 +f 12079 12080 12208 +f 12080 12209 12208 +f 12080 12081 12210 +f 12080 12210 12209 +f 12081 12082 12210 +f 12082 12211 12210 +f 12082 12083 12212 +f 12082 12212 12211 +f 12083 12084 12212 +f 12084 12213 12212 +f 12084 12085 12214 +f 12084 12214 12213 +f 12085 12086 12214 +f 12086 12215 12214 +f 12086 12087 12216 +f 12086 12216 12215 +f 12087 12088 12216 +f 12088 12217 12216 +f 12088 12089 12218 +f 12088 12218 12217 +f 12089 12090 12218 +f 12090 12219 12218 +f 12090 12091 12220 +f 12090 12220 12219 +f 12091 12092 12220 +f 12092 12221 12220 +f 12092 12093 12222 +f 12092 12222 12221 +f 12093 12094 12222 +f 12094 12223 12222 +f 12094 12095 12224 +f 12094 12224 12223 +f 12095 12096 12224 +f 12096 12225 12224 +f 12096 12097 12226 +f 12096 12226 12225 +f 12097 12098 12226 +f 12098 12227 12226 +f 12098 12099 12228 +f 12098 12228 12227 +f 12099 12100 12228 +f 12100 12229 12228 +f 12100 12101 12230 +f 12100 12230 12229 +f 12101 12102 12230 +f 12102 12231 12230 +f 12102 12103 12232 +f 12102 12232 12231 +f 12103 12104 12232 +f 12104 12233 12232 +f 12104 12105 12234 +f 12104 12234 12233 +f 12105 12106 12234 +f 12106 12235 12234 +f 12106 12107 12236 +f 12106 12236 12235 +f 12107 12108 12236 +f 12108 12237 12236 +f 12108 12109 12238 +f 12108 12238 12237 +f 12110 12111 12240 +f 12110 12240 12239 +f 12111 12112 12240 +f 12112 12241 12240 +f 12112 12113 12242 +f 12112 12242 12241 +f 12113 12114 12242 +f 12114 12243 12242 +f 12114 12115 12244 +f 12114 12244 12243 +f 12115 12116 12244 +f 12116 12245 12244 +f 12116 12117 12246 +f 12116 12246 12245 +f 12117 12118 12246 +f 12118 12247 12246 +f 12118 12119 12248 +f 12118 12248 12247 +f 12119 12120 12248 +f 12120 12249 12248 +f 12120 12121 12250 +f 12120 12250 12249 +f 12121 12122 12250 +f 12122 12251 12250 +f 12122 12123 12252 +f 12122 12252 12251 +f 12123 12124 12252 +f 12124 12253 12252 +f 12124 12125 12254 +f 12124 12254 12253 +f 12125 12126 12254 +f 12126 12255 12254 +f 12126 12127 12256 +f 12126 12256 12255 +f 12127 12128 12256 +f 12128 12257 12256 +f 12128 12129 12258 +f 12128 12258 12257 +f 12129 12130 12258 +f 12130 12259 12258 +f 12130 12131 12260 +f 12130 12260 12259 +f 12131 12132 12260 +f 12132 12261 12260 +f 12132 12133 12262 +f 12132 12262 12261 +f 12133 12134 12262 +f 12134 12263 12262 +f 12134 12135 12264 +f 12134 12264 12263 +f 12135 12136 12264 +f 12136 12265 12264 +f 12136 12137 12266 +f 12136 12266 12265 +f 12137 12138 12266 +f 12138 12267 12266 +f 12138 12139 12268 +f 12138 12268 12267 +f 12139 12140 12268 +f 12140 12269 12268 +f 12140 12141 12270 +f 12140 12270 12269 +f 12141 12142 12270 +f 12142 12271 12270 +f 12142 12143 12272 +f 12142 12272 12271 +f 12143 12144 12272 +f 12144 12273 12272 +f 12144 12145 12274 +f 12144 12274 12273 +f 12145 12146 12274 +f 12146 12275 12274 +f 12146 12147 12276 +f 12146 12276 12275 +f 12147 12148 12276 +f 12148 12277 12276 +f 12148 12149 12278 +f 12148 12278 12277 +f 12149 12150 12278 +f 12150 12279 12278 +f 12150 12151 12280 +f 12150 12280 12279 +f 12151 12152 12280 +f 12152 12281 12280 +f 12152 12153 12282 +f 12152 12282 12281 +f 12153 12154 12282 +f 12154 12283 12282 +f 12154 12155 12284 +f 12154 12284 12283 +f 12155 12156 12284 +f 12156 12285 12284 +f 12156 12157 12286 +f 12156 12286 12285 +f 12157 12158 12286 +f 12158 12287 12286 +f 12158 12159 12288 +f 12158 12288 12287 +f 12159 12160 12288 +f 12160 12289 12288 +f 12160 12161 12290 +f 12160 12290 12289 +f 12161 12162 12290 +f 12162 12291 12290 +f 12162 12163 12292 +f 12162 12292 12291 +f 12163 12164 12292 +f 12164 12293 12292 +f 12164 12165 12294 +f 12164 12294 12293 +f 12165 12166 12294 +f 12166 12295 12294 +f 12166 12167 12296 +f 12166 12296 12295 +f 12167 12168 12296 +f 12168 12297 12296 +f 12168 12169 12298 +f 12168 12298 12297 +f 12169 12170 12298 +f 12170 12299 12298 +f 12170 12171 12300 +f 12170 12300 12299 +f 12171 12172 12300 +f 12172 12301 12300 +f 12172 12173 12302 +f 12172 12302 12301 +f 12173 12174 12302 +f 12174 12303 12302 +f 12174 12175 12304 +f 12174 12304 12303 +f 12175 12176 12304 +f 12176 12305 12304 +f 12176 12177 12306 +f 12176 12306 12305 +f 12177 12178 12306 +f 12178 12307 12306 +f 12178 12179 12308 +f 12178 12308 12307 +f 12179 12180 12308 +f 12180 12309 12308 +f 12180 12181 12310 +f 12180 12310 12309 +f 12181 12182 12310 +f 12182 12311 12310 +f 12182 12183 12312 +f 12182 12312 12311 +f 12183 12184 12312 +f 12184 12313 12312 +f 12184 12185 12314 +f 12184 12314 12313 +f 12185 12186 12314 +f 12186 12315 12314 +f 12186 12187 12316 +f 12186 12316 12315 +f 12187 12188 12316 +f 12188 12317 12316 +f 12188 12189 12318 +f 12188 12318 12317 +f 12189 12190 12318 +f 12190 12319 12318 +f 12190 12191 12320 +f 12190 12320 12319 +f 12191 12192 12320 +f 12192 12321 12320 +f 12192 12193 12322 +f 12192 12322 12321 +f 12193 12194 12322 +f 12194 12323 12322 +f 12194 12195 12324 +f 12194 12324 12323 +f 12195 12196 12324 +f 12196 12325 12324 +f 12196 12197 12326 +f 12196 12326 12325 +f 12197 12198 12326 +f 12198 12327 12326 +f 12198 12199 12328 +f 12198 12328 12327 +f 12199 12200 12328 +f 12200 12329 12328 +f 12200 12201 12330 +f 12200 12330 12329 +f 12201 12202 12330 +f 12202 12331 12330 +f 12202 12203 12332 +f 12202 12332 12331 +f 12203 12204 12332 +f 12204 12333 12332 +f 12204 12205 12334 +f 12204 12334 12333 +f 12205 12206 12334 +f 12206 12335 12334 +f 12206 12207 12336 +f 12206 12336 12335 +f 12207 12208 12336 +f 12208 12337 12336 +f 12208 12209 12338 +f 12208 12338 12337 +f 12209 12210 12338 +f 12210 12339 12338 +f 12210 12211 12340 +f 12210 12340 12339 +f 12211 12212 12340 +f 12212 12341 12340 +f 12212 12213 12342 +f 12212 12342 12341 +f 12213 12214 12342 +f 12214 12343 12342 +f 12214 12215 12344 +f 12214 12344 12343 +f 12215 12216 12344 +f 12216 12345 12344 +f 12216 12217 12346 +f 12216 12346 12345 +f 12217 12218 12346 +f 12218 12347 12346 +f 12218 12219 12348 +f 12218 12348 12347 +f 12219 12220 12348 +f 12220 12349 12348 +f 12220 12221 12350 +f 12220 12350 12349 +f 12221 12222 12350 +f 12222 12351 12350 +f 12222 12223 12352 +f 12222 12352 12351 +f 12223 12224 12352 +f 12224 12353 12352 +f 12224 12225 12354 +f 12224 12354 12353 +f 12225 12226 12354 +f 12226 12355 12354 +f 12226 12227 12356 +f 12226 12356 12355 +f 12227 12228 12356 +f 12228 12357 12356 +f 12228 12229 12358 +f 12228 12358 12357 +f 12229 12230 12358 +f 12230 12359 12358 +f 12230 12231 12360 +f 12230 12360 12359 +f 12231 12232 12360 +f 12232 12361 12360 +f 12232 12233 12362 +f 12232 12362 12361 +f 12233 12234 12362 +f 12234 12363 12362 +f 12234 12235 12364 +f 12234 12364 12363 +f 12235 12236 12364 +f 12236 12365 12364 +f 12236 12237 12366 +f 12236 12366 12365 +f 12237 12238 12366 +f 12238 12367 12366 +f 12239 12240 12368 +f 12240 12369 12368 +f 12240 12241 12370 +f 12240 12370 12369 +f 12241 12242 12370 +f 12242 12371 12370 +f 12242 12243 12372 +f 12242 12372 12371 +f 12243 12244 12372 +f 12244 12373 12372 +f 12244 12245 12374 +f 12244 12374 12373 +f 12245 12246 12374 +f 12246 12375 12374 +f 12246 12247 12376 +f 12246 12376 12375 +f 12247 12248 12376 +f 12248 12377 12376 +f 12248 12249 12378 +f 12248 12378 12377 +f 12249 12250 12378 +f 12250 12379 12378 +f 12250 12251 12380 +f 12250 12380 12379 +f 12251 12252 12380 +f 12252 12381 12380 +f 12252 12253 12382 +f 12252 12382 12381 +f 12253 12254 12382 +f 12254 12383 12382 +f 12254 12255 12384 +f 12254 12384 12383 +f 12255 12256 12384 +f 12256 12385 12384 +f 12256 12257 12386 +f 12256 12386 12385 +f 12257 12258 12386 +f 12258 12387 12386 +f 12258 12259 12388 +f 12258 12388 12387 +f 12259 12260 12388 +f 12260 12389 12388 +f 12260 12261 12390 +f 12260 12390 12389 +f 12261 12262 12390 +f 12262 12391 12390 +f 12262 12263 12392 +f 12262 12392 12391 +f 12263 12264 12392 +f 12264 12393 12392 +f 12264 12265 12394 +f 12264 12394 12393 +f 12265 12266 12394 +f 12266 12395 12394 +f 12266 12267 12396 +f 12266 12396 12395 +f 12267 12268 12396 +f 12268 12397 12396 +f 12268 12269 12398 +f 12268 12398 12397 +f 12269 12270 12398 +f 12270 12399 12398 +f 12270 12271 12400 +f 12270 12400 12399 +f 12271 12272 12400 +f 12272 12401 12400 +f 12272 12273 12402 +f 12272 12402 12401 +f 12273 12274 12402 +f 12274 12403 12402 +f 12274 12275 12404 +f 12274 12404 12403 +f 12275 12276 12404 +f 12276 12405 12404 +f 12276 12277 12406 +f 12276 12406 12405 +f 12277 12278 12406 +f 12278 12407 12406 +f 12278 12279 12408 +f 12278 12408 12407 +f 12279 12280 12408 +f 12280 12409 12408 +f 12280 12281 12410 +f 12280 12410 12409 +f 12281 12282 12410 +f 12282 12411 12410 +f 12282 12283 12412 +f 12282 12412 12411 +f 12283 12284 12412 +f 12284 12413 12412 +f 12284 12285 12414 +f 12284 12414 12413 +f 12285 12286 12414 +f 12286 12415 12414 +f 12286 12287 12416 +f 12286 12416 12415 +f 12287 12288 12416 +f 12288 12417 12416 +f 12288 12289 12418 +f 12288 12418 12417 +f 12289 12290 12418 +f 12290 12419 12418 +f 12290 12291 12420 +f 12290 12420 12419 +f 12291 12292 12420 +f 12292 12421 12420 +f 12292 12293 12422 +f 12292 12422 12421 +f 12293 12294 12422 +f 12294 12423 12422 +f 12294 12295 12424 +f 12294 12424 12423 +f 12295 12296 12424 +f 12296 12425 12424 +f 12296 12297 12426 +f 12296 12426 12425 +f 12297 12298 12426 +f 12298 12427 12426 +f 12298 12299 12428 +f 12298 12428 12427 +f 12299 12300 12428 +f 12300 12429 12428 +f 12300 12301 12430 +f 12300 12430 12429 +f 12301 12302 12430 +f 12302 12431 12430 +f 12302 12303 12432 +f 12302 12432 12431 +f 12303 12304 12432 +f 12304 12433 12432 +f 12304 12305 12434 +f 12304 12434 12433 +f 12305 12306 12434 +f 12306 12435 12434 +f 12306 12307 12436 +f 12306 12436 12435 +f 12307 12308 12436 +f 12308 12437 12436 +f 12308 12309 12438 +f 12308 12438 12437 +f 12309 12310 12438 +f 12310 12439 12438 +f 12310 12311 12440 +f 12310 12440 12439 +f 12311 12312 12440 +f 12312 12441 12440 +f 12312 12313 12442 +f 12312 12442 12441 +f 12313 12314 12442 +f 12314 12443 12442 +f 12314 12315 12444 +f 12314 12444 12443 +f 12315 12316 12444 +f 12316 12445 12444 +f 12316 12317 12446 +f 12316 12446 12445 +f 12317 12318 12446 +f 12318 12447 12446 +f 12318 12319 12448 +f 12318 12448 12447 +f 12319 12320 12448 +f 12320 12449 12448 +f 12320 12321 12450 +f 12320 12450 12449 +f 12321 12322 12450 +f 12322 12451 12450 +f 12322 12323 12452 +f 12322 12452 12451 +f 12323 12324 12452 +f 12324 12453 12452 +f 12324 12325 12454 +f 12324 12454 12453 +f 12325 12326 12454 +f 12326 12455 12454 +f 12326 12327 12456 +f 12326 12456 12455 +f 12327 12328 12456 +f 12328 12457 12456 +f 12328 12329 12458 +f 12328 12458 12457 +f 12329 12330 12458 +f 12330 12459 12458 +f 12330 12331 12460 +f 12330 12460 12459 +f 12331 12332 12460 +f 12332 12461 12460 +f 12332 12333 12462 +f 12332 12462 12461 +f 12333 12334 12462 +f 12334 12463 12462 +f 12334 12335 12464 +f 12334 12464 12463 +f 12335 12336 12464 +f 12336 12465 12464 +f 12336 12337 12466 +f 12336 12466 12465 +f 12337 12338 12466 +f 12338 12467 12466 +f 12338 12339 12468 +f 12338 12468 12467 +f 12339 12340 12468 +f 12340 12469 12468 +f 12340 12341 12470 +f 12340 12470 12469 +f 12341 12342 12470 +f 12342 12471 12470 +f 12342 12343 12472 +f 12342 12472 12471 +f 12343 12344 12472 +f 12344 12473 12472 +f 12344 12345 12474 +f 12344 12474 12473 +f 12345 12346 12474 +f 12346 12475 12474 +f 12346 12347 12476 +f 12346 12476 12475 +f 12347 12348 12476 +f 12348 12477 12476 +f 12348 12349 12478 +f 12348 12478 12477 +f 12349 12350 12478 +f 12350 12479 12478 +f 12350 12351 12480 +f 12350 12480 12479 +f 12351 12352 12480 +f 12352 12481 12480 +f 12352 12353 12482 +f 12352 12482 12481 +f 12353 12354 12482 +f 12354 12483 12482 +f 12354 12355 12484 +f 12354 12484 12483 +f 12355 12356 12484 +f 12356 12485 12484 +f 12356 12357 12486 +f 12356 12486 12485 +f 12357 12358 12486 +f 12358 12487 12486 +f 12358 12359 12488 +f 12358 12488 12487 +f 12359 12360 12488 +f 12360 12489 12488 +f 12360 12361 12490 +f 12360 12490 12489 +f 12361 12362 12490 +f 12362 12491 12490 +f 12362 12363 12492 +f 12362 12492 12491 +f 12363 12364 12492 +f 12364 12493 12492 +f 12364 12365 12494 +f 12364 12494 12493 +f 12365 12366 12494 +f 12366 12495 12494 +f 12366 12367 12496 +f 12366 12496 12495 +f 12368 12369 12498 +f 12368 12498 12497 +f 12369 12370 12498 +f 12370 12499 12498 +f 12370 12371 12500 +f 12370 12500 12499 +f 12371 12372 12500 +f 12372 12501 12500 +f 12372 12373 12502 +f 12372 12502 12501 +f 12373 12374 12502 +f 12374 12503 12502 +f 12374 12375 12504 +f 12374 12504 12503 +f 12375 12376 12504 +f 12376 12505 12504 +f 12376 12377 12506 +f 12376 12506 12505 +f 12377 12378 12506 +f 12378 12507 12506 +f 12378 12379 12508 +f 12378 12508 12507 +f 12379 12380 12508 +f 12380 12509 12508 +f 12380 12381 12510 +f 12380 12510 12509 +f 12381 12382 12510 +f 12382 12511 12510 +f 12382 12383 12512 +f 12382 12512 12511 +f 12383 12384 12512 +f 12384 12513 12512 +f 12384 12385 12514 +f 12384 12514 12513 +f 12385 12386 12514 +f 12386 12515 12514 +f 12386 12387 12516 +f 12386 12516 12515 +f 12387 12388 12516 +f 12388 12517 12516 +f 12388 12389 12518 +f 12388 12518 12517 +f 12389 12390 12518 +f 12390 12519 12518 +f 12390 12391 12520 +f 12390 12520 12519 +f 12391 12392 12520 +f 12392 12521 12520 +f 12392 12393 12522 +f 12392 12522 12521 +f 12393 12394 12522 +f 12394 12523 12522 +f 12394 12395 12524 +f 12394 12524 12523 +f 12395 12396 12524 +f 12396 12525 12524 +f 12396 12397 12526 +f 12396 12526 12525 +f 12397 12398 12526 +f 12398 12527 12526 +f 12398 12399 12528 +f 12398 12528 12527 +f 12399 12400 12528 +f 12400 12529 12528 +f 12400 12401 12530 +f 12400 12530 12529 +f 12401 12402 12530 +f 12402 12531 12530 +f 12402 12403 12532 +f 12402 12532 12531 +f 12403 12404 12532 +f 12404 12533 12532 +f 12404 12405 12534 +f 12404 12534 12533 +f 12405 12406 12534 +f 12406 12535 12534 +f 12406 12407 12536 +f 12406 12536 12535 +f 12407 12408 12536 +f 12408 12537 12536 +f 12408 12409 12538 +f 12408 12538 12537 +f 12409 12410 12538 +f 12410 12539 12538 +f 12410 12411 12540 +f 12410 12540 12539 +f 12411 12412 12540 +f 12412 12541 12540 +f 12412 12413 12542 +f 12412 12542 12541 +f 12413 12414 12542 +f 12414 12543 12542 +f 12414 12415 12544 +f 12414 12544 12543 +f 12415 12416 12544 +f 12416 12545 12544 +f 12416 12417 12546 +f 12416 12546 12545 +f 12417 12418 12546 +f 12418 12547 12546 +f 12418 12419 12548 +f 12418 12548 12547 +f 12419 12420 12548 +f 12420 12549 12548 +f 12420 12421 12550 +f 12420 12550 12549 +f 12421 12422 12550 +f 12422 12551 12550 +f 12422 12423 12552 +f 12422 12552 12551 +f 12423 12424 12552 +f 12424 12553 12552 +f 12424 12425 12554 +f 12424 12554 12553 +f 12425 12426 12554 +f 12426 12555 12554 +f 12426 12427 12556 +f 12426 12556 12555 +f 12427 12428 12556 +f 12428 12557 12556 +f 12428 12429 12558 +f 12428 12558 12557 +f 12429 12430 12558 +f 12430 12559 12558 +f 12430 12431 12560 +f 12430 12560 12559 +f 12431 12432 12560 +f 12432 12561 12560 +f 12432 12433 12562 +f 12432 12562 12561 +f 12433 12434 12562 +f 12434 12563 12562 +f 12434 12435 12564 +f 12434 12564 12563 +f 12435 12436 12564 +f 12436 12565 12564 +f 12436 12437 12566 +f 12436 12566 12565 +f 12437 12438 12566 +f 12438 12567 12566 +f 12438 12439 12568 +f 12438 12568 12567 +f 12439 12440 12568 +f 12440 12569 12568 +f 12440 12441 12570 +f 12440 12570 12569 +f 12441 12442 12570 +f 12442 12571 12570 +f 12442 12443 12572 +f 12442 12572 12571 +f 12443 12444 12572 +f 12444 12573 12572 +f 12444 12445 12574 +f 12444 12574 12573 +f 12445 12446 12574 +f 12446 12575 12574 +f 12446 12447 12576 +f 12446 12576 12575 +f 12447 12448 12576 +f 12448 12577 12576 +f 12448 12449 12578 +f 12448 12578 12577 +f 12449 12450 12578 +f 12450 12579 12578 +f 12450 12451 12580 +f 12450 12580 12579 +f 12451 12452 12580 +f 12452 12581 12580 +f 12452 12453 12582 +f 12452 12582 12581 +f 12453 12454 12582 +f 12454 12583 12582 +f 12454 12455 12584 +f 12454 12584 12583 +f 12455 12456 12584 +f 12456 12585 12584 +f 12456 12457 12586 +f 12456 12586 12585 +f 12457 12458 12586 +f 12458 12587 12586 +f 12458 12459 12588 +f 12458 12588 12587 +f 12459 12460 12588 +f 12460 12589 12588 +f 12460 12461 12590 +f 12460 12590 12589 +f 12461 12462 12590 +f 12462 12591 12590 +f 12462 12463 12592 +f 12462 12592 12591 +f 12463 12464 12592 +f 12464 12593 12592 +f 12464 12465 12594 +f 12464 12594 12593 +f 12465 12466 12594 +f 12466 12595 12594 +f 12466 12467 12596 +f 12466 12596 12595 +f 12467 12468 12596 +f 12468 12597 12596 +f 12468 12469 12598 +f 12468 12598 12597 +f 12469 12470 12598 +f 12470 12599 12598 +f 12470 12471 12600 +f 12470 12600 12599 +f 12471 12472 12600 +f 12472 12601 12600 +f 12472 12473 12602 +f 12472 12602 12601 +f 12473 12474 12602 +f 12474 12603 12602 +f 12474 12475 12604 +f 12474 12604 12603 +f 12475 12476 12604 +f 12476 12605 12604 +f 12476 12477 12606 +f 12476 12606 12605 +f 12477 12478 12606 +f 12478 12607 12606 +f 12478 12479 12608 +f 12478 12608 12607 +f 12479 12480 12608 +f 12480 12609 12608 +f 12480 12481 12610 +f 12480 12610 12609 +f 12481 12482 12610 +f 12482 12611 12610 +f 12482 12483 12612 +f 12482 12612 12611 +f 12483 12484 12612 +f 12484 12613 12612 +f 12484 12485 12614 +f 12484 12614 12613 +f 12485 12486 12614 +f 12486 12615 12614 +f 12486 12487 12616 +f 12486 12616 12615 +f 12487 12488 12616 +f 12488 12617 12616 +f 12488 12489 12618 +f 12488 12618 12617 +f 12489 12490 12618 +f 12490 12619 12618 +f 12490 12491 12620 +f 12490 12620 12619 +f 12491 12492 12620 +f 12492 12621 12620 +f 12492 12493 12622 +f 12492 12622 12621 +f 12493 12494 12622 +f 12494 12623 12622 +f 12494 12495 12624 +f 12494 12624 12623 +f 12495 12496 12624 +f 12496 12625 12624 +f 12497 12498 12626 +f 12498 12627 12626 +f 12498 12499 12628 +f 12498 12628 12627 +f 12499 12500 12628 +f 12500 12629 12628 +f 12500 12501 12630 +f 12500 12630 12629 +f 12501 12502 12630 +f 12502 12631 12630 +f 12502 12503 12632 +f 12502 12632 12631 +f 12503 12504 12632 +f 12504 12633 12632 +f 12504 12505 12634 +f 12504 12634 12633 +f 12505 12506 12634 +f 12506 12635 12634 +f 12506 12507 12636 +f 12506 12636 12635 +f 12507 12508 12636 +f 12508 12637 12636 +f 12508 12509 12638 +f 12508 12638 12637 +f 12509 12510 12638 +f 12510 12639 12638 +f 12510 12511 12640 +f 12510 12640 12639 +f 12511 12512 12640 +f 12512 12641 12640 +f 12512 12513 12642 +f 12512 12642 12641 +f 12513 12514 12642 +f 12514 12643 12642 +f 12514 12515 12644 +f 12514 12644 12643 +f 12515 12516 12644 +f 12516 12645 12644 +f 12516 12517 12646 +f 12516 12646 12645 +f 12517 12518 12646 +f 12518 12647 12646 +f 12518 12519 12648 +f 12518 12648 12647 +f 12519 12520 12648 +f 12520 12649 12648 +f 12520 12521 12650 +f 12520 12650 12649 +f 12521 12522 12650 +f 12522 12651 12650 +f 12522 12523 12652 +f 12522 12652 12651 +f 12523 12524 12652 +f 12524 12653 12652 +f 12524 12525 12654 +f 12524 12654 12653 +f 12525 12526 12654 +f 12526 12655 12654 +f 12526 12527 12656 +f 12526 12656 12655 +f 12527 12528 12656 +f 12528 12657 12656 +f 12528 12529 12658 +f 12528 12658 12657 +f 12529 12530 12658 +f 12530 12659 12658 +f 12530 12531 12660 +f 12530 12660 12659 +f 12531 12532 12660 +f 12532 12661 12660 +f 12532 12533 12662 +f 12532 12662 12661 +f 12533 12534 12662 +f 12534 12663 12662 +f 12534 12535 12664 +f 12534 12664 12663 +f 12535 12536 12664 +f 12536 12665 12664 +f 12536 12537 12666 +f 12536 12666 12665 +f 12537 12538 12666 +f 12538 12667 12666 +f 12538 12539 12668 +f 12538 12668 12667 +f 12539 12540 12668 +f 12540 12669 12668 +f 12540 12541 12670 +f 12540 12670 12669 +f 12541 12542 12670 +f 12542 12671 12670 +f 12542 12543 12672 +f 12542 12672 12671 +f 12543 12544 12672 +f 12544 12673 12672 +f 12544 12545 12674 +f 12544 12674 12673 +f 12545 12546 12674 +f 12546 12675 12674 +f 12546 12547 12676 +f 12546 12676 12675 +f 12547 12548 12676 +f 12548 12677 12676 +f 12548 12549 12678 +f 12548 12678 12677 +f 12549 12550 12678 +f 12550 12679 12678 +f 12550 12551 12680 +f 12550 12680 12679 +f 12551 12552 12680 +f 12552 12681 12680 +f 12552 12553 12682 +f 12552 12682 12681 +f 12553 12554 12682 +f 12554 12683 12682 +f 12554 12555 12684 +f 12554 12684 12683 +f 12555 12556 12684 +f 12556 12685 12684 +f 12556 12557 12686 +f 12556 12686 12685 +f 12557 12558 12686 +f 12558 12687 12686 +f 12558 12559 12688 +f 12558 12688 12687 +f 12559 12560 12688 +f 12560 12689 12688 +f 12560 12561 12690 +f 12560 12690 12689 +f 12561 12562 12690 +f 12562 12691 12690 +f 12562 12563 12692 +f 12562 12692 12691 +f 12563 12564 12692 +f 12564 12693 12692 +f 12564 12565 12694 +f 12564 12694 12693 +f 12565 12566 12694 +f 12566 12695 12694 +f 12566 12567 12696 +f 12566 12696 12695 +f 12567 12568 12696 +f 12568 12697 12696 +f 12568 12569 12698 +f 12568 12698 12697 +f 12569 12570 12698 +f 12570 12699 12698 +f 12570 12571 12700 +f 12570 12700 12699 +f 12571 12572 12700 +f 12572 12701 12700 +f 12572 12573 12702 +f 12572 12702 12701 +f 12573 12574 12702 +f 12574 12703 12702 +f 12574 12575 12704 +f 12574 12704 12703 +f 12575 12576 12704 +f 12576 12705 12704 +f 12576 12577 12706 +f 12576 12706 12705 +f 12577 12578 12706 +f 12578 12707 12706 +f 12578 12579 12708 +f 12578 12708 12707 +f 12579 12580 12708 +f 12580 12709 12708 +f 12580 12581 12710 +f 12580 12710 12709 +f 12581 12582 12710 +f 12582 12711 12710 +f 12582 12583 12712 +f 12582 12712 12711 +f 12583 12584 12712 +f 12584 12713 12712 +f 12584 12585 12714 +f 12584 12714 12713 +f 12585 12586 12714 +f 12586 12715 12714 +f 12586 12587 12716 +f 12586 12716 12715 +f 12587 12588 12716 +f 12588 12717 12716 +f 12588 12589 12718 +f 12588 12718 12717 +f 12589 12590 12718 +f 12590 12719 12718 +f 12590 12591 12720 +f 12590 12720 12719 +f 12591 12592 12720 +f 12592 12721 12720 +f 12592 12593 12722 +f 12592 12722 12721 +f 12593 12594 12722 +f 12594 12723 12722 +f 12594 12595 12724 +f 12594 12724 12723 +f 12595 12596 12724 +f 12596 12725 12724 +f 12596 12597 12726 +f 12596 12726 12725 +f 12597 12598 12726 +f 12598 12727 12726 +f 12598 12599 12728 +f 12598 12728 12727 +f 12599 12600 12728 +f 12600 12729 12728 +f 12600 12601 12730 +f 12600 12730 12729 +f 12601 12602 12730 +f 12602 12731 12730 +f 12602 12603 12732 +f 12602 12732 12731 +f 12603 12604 12732 +f 12604 12733 12732 +f 12604 12605 12734 +f 12604 12734 12733 +f 12605 12606 12734 +f 12606 12735 12734 +f 12606 12607 12736 +f 12606 12736 12735 +f 12607 12608 12736 +f 12608 12737 12736 +f 12608 12609 12738 +f 12608 12738 12737 +f 12609 12610 12738 +f 12610 12739 12738 +f 12610 12611 12740 +f 12610 12740 12739 +f 12611 12612 12740 +f 12612 12741 12740 +f 12612 12613 12742 +f 12612 12742 12741 +f 12613 12614 12742 +f 12614 12743 12742 +f 12614 12615 12744 +f 12614 12744 12743 +f 12615 12616 12744 +f 12616 12745 12744 +f 12616 12617 12746 +f 12616 12746 12745 +f 12617 12618 12746 +f 12618 12747 12746 +f 12618 12619 12748 +f 12618 12748 12747 +f 12619 12620 12748 +f 12620 12749 12748 +f 12620 12621 12750 +f 12620 12750 12749 +f 12621 12622 12750 +f 12622 12751 12750 +f 12622 12623 12752 +f 12622 12752 12751 +f 12623 12624 12752 +f 12624 12753 12752 +f 12624 12625 12754 +f 12624 12754 12753 +f 12626 12627 12756 +f 12626 12756 12755 +f 12627 12628 12756 +f 12628 12757 12756 +f 12628 12629 12758 +f 12628 12758 12757 +f 12629 12630 12758 +f 12630 12759 12758 +f 12630 12631 12760 +f 12630 12760 12759 +f 12631 12632 12760 +f 12632 12761 12760 +f 12632 12633 12762 +f 12632 12762 12761 +f 12633 12634 12762 +f 12634 12763 12762 +f 12634 12635 12764 +f 12634 12764 12763 +f 12635 12636 12764 +f 12636 12765 12764 +f 12636 12637 12766 +f 12636 12766 12765 +f 12637 12638 12766 +f 12638 12767 12766 +f 12638 12639 12768 +f 12638 12768 12767 +f 12639 12640 12768 +f 12640 12769 12768 +f 12640 12641 12770 +f 12640 12770 12769 +f 12641 12642 12770 +f 12642 12771 12770 +f 12642 12643 12772 +f 12642 12772 12771 +f 12643 12644 12772 +f 12644 12773 12772 +f 12644 12645 12774 +f 12644 12774 12773 +f 12645 12646 12774 +f 12646 12775 12774 +f 12646 12647 12776 +f 12646 12776 12775 +f 12647 12648 12776 +f 12648 12777 12776 +f 12648 12649 12778 +f 12648 12778 12777 +f 12649 12650 12778 +f 12650 12779 12778 +f 12650 12651 12780 +f 12650 12780 12779 +f 12651 12652 12780 +f 12652 12781 12780 +f 12652 12653 12782 +f 12652 12782 12781 +f 12653 12654 12782 +f 12654 12783 12782 +f 12654 12655 12784 +f 12654 12784 12783 +f 12655 12656 12784 +f 12656 12785 12784 +f 12656 12657 12786 +f 12656 12786 12785 +f 12657 12658 12786 +f 12658 12787 12786 +f 12658 12659 12788 +f 12658 12788 12787 +f 12659 12660 12788 +f 12660 12789 12788 +f 12660 12661 12790 +f 12660 12790 12789 +f 12661 12662 12790 +f 12662 12791 12790 +f 12662 12663 12792 +f 12662 12792 12791 +f 12663 12664 12792 +f 12664 12793 12792 +f 12664 12665 12794 +f 12664 12794 12793 +f 12665 12666 12794 +f 12666 12795 12794 +f 12666 12667 12796 +f 12666 12796 12795 +f 12667 12668 12796 +f 12668 12797 12796 +f 12668 12669 12798 +f 12668 12798 12797 +f 12669 12670 12798 +f 12670 12799 12798 +f 12670 12671 12800 +f 12670 12800 12799 +f 12671 12672 12800 +f 12672 12801 12800 +f 12672 12673 12802 +f 12672 12802 12801 +f 12673 12674 12802 +f 12674 12803 12802 +f 12674 12675 12804 +f 12674 12804 12803 +f 12675 12676 12804 +f 12676 12805 12804 +f 12676 12677 12806 +f 12676 12806 12805 +f 12677 12678 12806 +f 12678 12807 12806 +f 12678 12679 12808 +f 12678 12808 12807 +f 12679 12680 12808 +f 12680 12809 12808 +f 12680 12681 12810 +f 12680 12810 12809 +f 12681 12682 12810 +f 12682 12811 12810 +f 12682 12683 12812 +f 12682 12812 12811 +f 12683 12684 12812 +f 12684 12813 12812 +f 12684 12685 12814 +f 12684 12814 12813 +f 12685 12686 12814 +f 12686 12815 12814 +f 12686 12687 12816 +f 12686 12816 12815 +f 12687 12688 12816 +f 12688 12817 12816 +f 12688 12689 12818 +f 12688 12818 12817 +f 12689 12690 12818 +f 12690 12819 12818 +f 12690 12691 12820 +f 12690 12820 12819 +f 12691 12692 12820 +f 12692 12821 12820 +f 12692 12693 12822 +f 12692 12822 12821 +f 12693 12694 12822 +f 12694 12823 12822 +f 12694 12695 12824 +f 12694 12824 12823 +f 12695 12696 12824 +f 12696 12825 12824 +f 12696 12697 12826 +f 12696 12826 12825 +f 12697 12698 12826 +f 12698 12827 12826 +f 12698 12699 12828 +f 12698 12828 12827 +f 12699 12700 12828 +f 12700 12829 12828 +f 12700 12701 12830 +f 12700 12830 12829 +f 12701 12702 12830 +f 12702 12831 12830 +f 12702 12703 12832 +f 12702 12832 12831 +f 12703 12704 12832 +f 12704 12833 12832 +f 12704 12705 12834 +f 12704 12834 12833 +f 12705 12706 12834 +f 12706 12835 12834 +f 12706 12707 12836 +f 12706 12836 12835 +f 12707 12708 12836 +f 12708 12837 12836 +f 12708 12709 12838 +f 12708 12838 12837 +f 12709 12710 12838 +f 12710 12839 12838 +f 12710 12711 12840 +f 12710 12840 12839 +f 12711 12712 12840 +f 12712 12841 12840 +f 12712 12713 12842 +f 12712 12842 12841 +f 12713 12714 12842 +f 12714 12843 12842 +f 12714 12715 12844 +f 12714 12844 12843 +f 12715 12716 12844 +f 12716 12845 12844 +f 12716 12717 12846 +f 12716 12846 12845 +f 12717 12718 12846 +f 12718 12847 12846 +f 12718 12719 12848 +f 12718 12848 12847 +f 12719 12720 12848 +f 12720 12849 12848 +f 12720 12721 12850 +f 12720 12850 12849 +f 12721 12722 12850 +f 12722 12851 12850 +f 12722 12723 12852 +f 12722 12852 12851 +f 12723 12724 12852 +f 12724 12853 12852 +f 12724 12725 12854 +f 12724 12854 12853 +f 12725 12726 12854 +f 12726 12855 12854 +f 12726 12727 12856 +f 12726 12856 12855 +f 12727 12728 12856 +f 12728 12857 12856 +f 12728 12729 12858 +f 12728 12858 12857 +f 12729 12730 12858 +f 12730 12859 12858 +f 12730 12731 12860 +f 12730 12860 12859 +f 12731 12732 12860 +f 12732 12861 12860 +f 12732 12733 12862 +f 12732 12862 12861 +f 12733 12734 12862 +f 12734 12863 12862 +f 12734 12735 12864 +f 12734 12864 12863 +f 12735 12736 12864 +f 12736 12865 12864 +f 12736 12737 12866 +f 12736 12866 12865 +f 12737 12738 12866 +f 12738 12867 12866 +f 12738 12739 12868 +f 12738 12868 12867 +f 12739 12740 12868 +f 12740 12869 12868 +f 12740 12741 12870 +f 12740 12870 12869 +f 12741 12742 12870 +f 12742 12871 12870 +f 12742 12743 12872 +f 12742 12872 12871 +f 12743 12744 12872 +f 12744 12873 12872 +f 12744 12745 12874 +f 12744 12874 12873 +f 12745 12746 12874 +f 12746 12875 12874 +f 12746 12747 12876 +f 12746 12876 12875 +f 12747 12748 12876 +f 12748 12877 12876 +f 12748 12749 12878 +f 12748 12878 12877 +f 12749 12750 12878 +f 12750 12879 12878 +f 12750 12751 12880 +f 12750 12880 12879 +f 12751 12752 12880 +f 12752 12881 12880 +f 12752 12753 12882 +f 12752 12882 12881 +f 12753 12754 12882 +f 12754 12883 12882 +f 12755 12756 12884 +f 12756 12885 12884 +f 12756 12757 12886 +f 12756 12886 12885 +f 12757 12758 12886 +f 12758 12887 12886 +f 12758 12759 12888 +f 12758 12888 12887 +f 12759 12760 12888 +f 12760 12889 12888 +f 12760 12761 12890 +f 12760 12890 12889 +f 12761 12762 12890 +f 12762 12891 12890 +f 12762 12763 12892 +f 12762 12892 12891 +f 12763 12764 12892 +f 12764 12893 12892 +f 12764 12765 12894 +f 12764 12894 12893 +f 12765 12766 12894 +f 12766 12895 12894 +f 12766 12767 12896 +f 12766 12896 12895 +f 12767 12768 12896 +f 12768 12897 12896 +f 12768 12769 12898 +f 12768 12898 12897 +f 12769 12770 12898 +f 12770 12899 12898 +f 12770 12771 12900 +f 12770 12900 12899 +f 12771 12772 12900 +f 12772 12901 12900 +f 12772 12773 12902 +f 12772 12902 12901 +f 12773 12774 12902 +f 12774 12903 12902 +f 12774 12775 12904 +f 12774 12904 12903 +f 12775 12776 12904 +f 12776 12905 12904 +f 12776 12777 12906 +f 12776 12906 12905 +f 12777 12778 12906 +f 12778 12907 12906 +f 12778 12779 12908 +f 12778 12908 12907 +f 12779 12780 12908 +f 12780 12909 12908 +f 12780 12781 12910 +f 12780 12910 12909 +f 12781 12782 12910 +f 12782 12911 12910 +f 12782 12783 12912 +f 12782 12912 12911 +f 12783 12784 12912 +f 12784 12913 12912 +f 12784 12785 12914 +f 12784 12914 12913 +f 12785 12786 12914 +f 12786 12915 12914 +f 12786 12787 12916 +f 12786 12916 12915 +f 12787 12788 12916 +f 12788 12917 12916 +f 12788 12789 12918 +f 12788 12918 12917 +f 12789 12790 12918 +f 12790 12919 12918 +f 12790 12791 12920 +f 12790 12920 12919 +f 12791 12792 12920 +f 12792 12921 12920 +f 12792 12793 12922 +f 12792 12922 12921 +f 12793 12794 12922 +f 12794 12923 12922 +f 12794 12795 12924 +f 12794 12924 12923 +f 12795 12796 12924 +f 12796 12925 12924 +f 12796 12797 12926 +f 12796 12926 12925 +f 12797 12798 12926 +f 12798 12927 12926 +f 12798 12799 12928 +f 12798 12928 12927 +f 12799 12800 12928 +f 12800 12929 12928 +f 12800 12801 12930 +f 12800 12930 12929 +f 12801 12802 12930 +f 12802 12931 12930 +f 12802 12803 12932 +f 12802 12932 12931 +f 12803 12804 12932 +f 12804 12933 12932 +f 12804 12805 12934 +f 12804 12934 12933 +f 12805 12806 12934 +f 12806 12935 12934 +f 12806 12807 12936 +f 12806 12936 12935 +f 12807 12808 12936 +f 12808 12937 12936 +f 12808 12809 12938 +f 12808 12938 12937 +f 12809 12810 12938 +f 12810 12939 12938 +f 12810 12811 12940 +f 12810 12940 12939 +f 12811 12812 12940 +f 12812 12941 12940 +f 12812 12813 12942 +f 12812 12942 12941 +f 12813 12814 12942 +f 12814 12943 12942 +f 12814 12815 12944 +f 12814 12944 12943 +f 12815 12816 12944 +f 12816 12945 12944 +f 12816 12817 12946 +f 12816 12946 12945 +f 12817 12818 12946 +f 12818 12947 12946 +f 12818 12819 12948 +f 12818 12948 12947 +f 12819 12820 12948 +f 12820 12949 12948 +f 12820 12821 12950 +f 12820 12950 12949 +f 12821 12822 12950 +f 12822 12951 12950 +f 12822 12823 12952 +f 12822 12952 12951 +f 12823 12824 12952 +f 12824 12953 12952 +f 12824 12825 12954 +f 12824 12954 12953 +f 12825 12826 12954 +f 12826 12955 12954 +f 12826 12827 12956 +f 12826 12956 12955 +f 12827 12828 12956 +f 12828 12957 12956 +f 12828 12829 12958 +f 12828 12958 12957 +f 12829 12830 12958 +f 12830 12959 12958 +f 12830 12831 12960 +f 12830 12960 12959 +f 12831 12832 12960 +f 12832 12961 12960 +f 12832 12833 12962 +f 12832 12962 12961 +f 12833 12834 12962 +f 12834 12963 12962 +f 12834 12835 12964 +f 12834 12964 12963 +f 12835 12836 12964 +f 12836 12965 12964 +f 12836 12837 12966 +f 12836 12966 12965 +f 12837 12838 12966 +f 12838 12967 12966 +f 12838 12839 12968 +f 12838 12968 12967 +f 12839 12840 12968 +f 12840 12969 12968 +f 12840 12841 12970 +f 12840 12970 12969 +f 12841 12842 12970 +f 12842 12971 12970 +f 12842 12843 12972 +f 12842 12972 12971 +f 12843 12844 12972 +f 12844 12973 12972 +f 12844 12845 12974 +f 12844 12974 12973 +f 12845 12846 12974 +f 12846 12975 12974 +f 12846 12847 12976 +f 12846 12976 12975 +f 12847 12848 12976 +f 12848 12977 12976 +f 12848 12849 12978 +f 12848 12978 12977 +f 12849 12850 12978 +f 12850 12979 12978 +f 12850 12851 12980 +f 12850 12980 12979 +f 12851 12852 12980 +f 12852 12981 12980 +f 12852 12853 12982 +f 12852 12982 12981 +f 12853 12854 12982 +f 12854 12983 12982 +f 12854 12855 12984 +f 12854 12984 12983 +f 12855 12856 12984 +f 12856 12985 12984 +f 12856 12857 12986 +f 12856 12986 12985 +f 12857 12858 12986 +f 12858 12987 12986 +f 12858 12859 12988 +f 12858 12988 12987 +f 12859 12860 12988 +f 12860 12989 12988 +f 12860 12861 12990 +f 12860 12990 12989 +f 12861 12862 12990 +f 12862 12991 12990 +f 12862 12863 12992 +f 12862 12992 12991 +f 12863 12864 12992 +f 12864 12993 12992 +f 12864 12865 12994 +f 12864 12994 12993 +f 12865 12866 12994 +f 12866 12995 12994 +f 12866 12867 12996 +f 12866 12996 12995 +f 12867 12868 12996 +f 12868 12997 12996 +f 12868 12869 12998 +f 12868 12998 12997 +f 12869 12870 12998 +f 12870 12999 12998 +f 12870 12871 13000 +f 12870 13000 12999 +f 12871 12872 13000 +f 12872 13001 13000 +f 12872 12873 13002 +f 12872 13002 13001 +f 12873 12874 13002 +f 12874 13003 13002 +f 12874 12875 13004 +f 12874 13004 13003 +f 12875 12876 13004 +f 12876 13005 13004 +f 12876 12877 13006 +f 12876 13006 13005 +f 12877 12878 13006 +f 12878 13007 13006 +f 12878 12879 13008 +f 12878 13008 13007 +f 12879 12880 13008 +f 12880 13009 13008 +f 12880 12881 13010 +f 12880 13010 13009 +f 12881 12882 13010 +f 12882 13011 13010 +f 12882 12883 13012 +f 12882 13012 13011 +f 12884 12885 13014 +f 12884 13014 13013 +f 12885 12886 13014 +f 12886 13015 13014 +f 12886 12887 13016 +f 12886 13016 13015 +f 12887 12888 13016 +f 12888 13017 13016 +f 12888 12889 13018 +f 12888 13018 13017 +f 12889 12890 13018 +f 12890 13019 13018 +f 12890 12891 13020 +f 12890 13020 13019 +f 12891 12892 13020 +f 12892 13021 13020 +f 12892 12893 13022 +f 12892 13022 13021 +f 12893 12894 13022 +f 12894 13023 13022 +f 12894 12895 13024 +f 12894 13024 13023 +f 12895 12896 13024 +f 12896 13025 13024 +f 12896 12897 13026 +f 12896 13026 13025 +f 12897 12898 13026 +f 12898 13027 13026 +f 12898 12899 13028 +f 12898 13028 13027 +f 12899 12900 13028 +f 12900 13029 13028 +f 12900 12901 13030 +f 12900 13030 13029 +f 12901 12902 13030 +f 12902 13031 13030 +f 12902 12903 13032 +f 12902 13032 13031 +f 12903 12904 13032 +f 12904 13033 13032 +f 12904 12905 13034 +f 12904 13034 13033 +f 12905 12906 13034 +f 12906 13035 13034 +f 12906 12907 13036 +f 12906 13036 13035 +f 12907 12908 13036 +f 12908 13037 13036 +f 12908 12909 13038 +f 12908 13038 13037 +f 12909 12910 13038 +f 12910 13039 13038 +f 12910 12911 13040 +f 12910 13040 13039 +f 12911 12912 13040 +f 12912 13041 13040 +f 12912 12913 13042 +f 12912 13042 13041 +f 12913 12914 13042 +f 12914 13043 13042 +f 12914 12915 13044 +f 12914 13044 13043 +f 12915 12916 13044 +f 12916 13045 13044 +f 12916 12917 13046 +f 12916 13046 13045 +f 12917 12918 13046 +f 12918 13047 13046 +f 12918 12919 13048 +f 12918 13048 13047 +f 12919 12920 13048 +f 12920 13049 13048 +f 12920 12921 13050 +f 12920 13050 13049 +f 12921 12922 13050 +f 12922 13051 13050 +f 12922 12923 13052 +f 12922 13052 13051 +f 12923 12924 13052 +f 12924 13053 13052 +f 12924 12925 13054 +f 12924 13054 13053 +f 12925 12926 13054 +f 12926 13055 13054 +f 12926 12927 13056 +f 12926 13056 13055 +f 12927 12928 13056 +f 12928 13057 13056 +f 12928 12929 13058 +f 12928 13058 13057 +f 12929 12930 13058 +f 12930 13059 13058 +f 12930 12931 13060 +f 12930 13060 13059 +f 12931 12932 13060 +f 12932 13061 13060 +f 12932 12933 13062 +f 12932 13062 13061 +f 12933 12934 13062 +f 12934 13063 13062 +f 12934 12935 13064 +f 12934 13064 13063 +f 12935 12936 13064 +f 12936 13065 13064 +f 12936 12937 13066 +f 12936 13066 13065 +f 12937 12938 13066 +f 12938 13067 13066 +f 12938 12939 13068 +f 12938 13068 13067 +f 12939 12940 13068 +f 12940 13069 13068 +f 12940 12941 13070 +f 12940 13070 13069 +f 12941 12942 13070 +f 12942 13071 13070 +f 12942 12943 13072 +f 12942 13072 13071 +f 12943 12944 13072 +f 12944 13073 13072 +f 12944 12945 13074 +f 12944 13074 13073 +f 12945 12946 13074 +f 12946 13075 13074 +f 12946 12947 13076 +f 12946 13076 13075 +f 12947 12948 13076 +f 12948 13077 13076 +f 12948 12949 13078 +f 12948 13078 13077 +f 12949 12950 13078 +f 12950 13079 13078 +f 12950 12951 13080 +f 12950 13080 13079 +f 12951 12952 13080 +f 12952 13081 13080 +f 12952 12953 13082 +f 12952 13082 13081 +f 12953 12954 13082 +f 12954 13083 13082 +f 12954 12955 13084 +f 12954 13084 13083 +f 12955 12956 13084 +f 12956 13085 13084 +f 12956 12957 13086 +f 12956 13086 13085 +f 12957 12958 13086 +f 12958 13087 13086 +f 12958 12959 13088 +f 12958 13088 13087 +f 12959 12960 13088 +f 12960 13089 13088 +f 12960 12961 13090 +f 12960 13090 13089 +f 12961 12962 13090 +f 12962 13091 13090 +f 12962 12963 13092 +f 12962 13092 13091 +f 12963 12964 13092 +f 12964 13093 13092 +f 12964 12965 13094 +f 12964 13094 13093 +f 12965 12966 13094 +f 12966 13095 13094 +f 12966 12967 13096 +f 12966 13096 13095 +f 12967 12968 13096 +f 12968 13097 13096 +f 12968 12969 13098 +f 12968 13098 13097 +f 12969 12970 13098 +f 12970 13099 13098 +f 12970 12971 13100 +f 12970 13100 13099 +f 12971 12972 13100 +f 12972 13101 13100 +f 12972 12973 13102 +f 12972 13102 13101 +f 12973 12974 13102 +f 12974 13103 13102 +f 12974 12975 13104 +f 12974 13104 13103 +f 12975 12976 13104 +f 12976 13105 13104 +f 12976 12977 13106 +f 12976 13106 13105 +f 12977 12978 13106 +f 12978 13107 13106 +f 12978 12979 13108 +f 12978 13108 13107 +f 12979 12980 13108 +f 12980 13109 13108 +f 12980 12981 13110 +f 12980 13110 13109 +f 12981 12982 13110 +f 12982 13111 13110 +f 12982 12983 13112 +f 12982 13112 13111 +f 12983 12984 13112 +f 12984 13113 13112 +f 12984 12985 13114 +f 12984 13114 13113 +f 12985 12986 13114 +f 12986 13115 13114 +f 12986 12987 13116 +f 12986 13116 13115 +f 12987 12988 13116 +f 12988 13117 13116 +f 12988 12989 13118 +f 12988 13118 13117 +f 12989 12990 13118 +f 12990 13119 13118 +f 12990 12991 13120 +f 12990 13120 13119 +f 12991 12992 13120 +f 12992 13121 13120 +f 12992 12993 13122 +f 12992 13122 13121 +f 12993 12994 13122 +f 12994 13123 13122 +f 12994 12995 13124 +f 12994 13124 13123 +f 12995 12996 13124 +f 12996 13125 13124 +f 12996 12997 13126 +f 12996 13126 13125 +f 12997 12998 13126 +f 12998 13127 13126 +f 12998 12999 13128 +f 12998 13128 13127 +f 12999 13000 13128 +f 13000 13129 13128 +f 13000 13001 13130 +f 13000 13130 13129 +f 13001 13002 13130 +f 13002 13131 13130 +f 13002 13003 13132 +f 13002 13132 13131 +f 13003 13004 13132 +f 13004 13133 13132 +f 13004 13005 13134 +f 13004 13134 13133 +f 13005 13006 13134 +f 13006 13135 13134 +f 13006 13007 13136 +f 13006 13136 13135 +f 13007 13008 13136 +f 13008 13137 13136 +f 13008 13009 13138 +f 13008 13138 13137 +f 13009 13010 13138 +f 13010 13139 13138 +f 13010 13011 13140 +f 13010 13140 13139 +f 13011 13012 13140 +f 13012 13141 13140 +f 13013 13014 13142 +f 13014 13143 13142 +f 13014 13015 13144 +f 13014 13144 13143 +f 13015 13016 13144 +f 13016 13145 13144 +f 13016 13017 13146 +f 13016 13146 13145 +f 13017 13018 13146 +f 13018 13147 13146 +f 13018 13019 13148 +f 13018 13148 13147 +f 13019 13020 13148 +f 13020 13149 13148 +f 13020 13021 13150 +f 13020 13150 13149 +f 13021 13022 13150 +f 13022 13151 13150 +f 13022 13023 13152 +f 13022 13152 13151 +f 13023 13024 13152 +f 13024 13153 13152 +f 13024 13025 13154 +f 13024 13154 13153 +f 13025 13026 13154 +f 13026 13155 13154 +f 13026 13027 13156 +f 13026 13156 13155 +f 13027 13028 13156 +f 13028 13157 13156 +f 13028 13029 13158 +f 13028 13158 13157 +f 13029 13030 13158 +f 13030 13159 13158 +f 13030 13031 13160 +f 13030 13160 13159 +f 13031 13032 13160 +f 13032 13161 13160 +f 13032 13033 13162 +f 13032 13162 13161 +f 13033 13034 13162 +f 13034 13163 13162 +f 13034 13035 13164 +f 13034 13164 13163 +f 13035 13036 13164 +f 13036 13165 13164 +f 13036 13037 13166 +f 13036 13166 13165 +f 13037 13038 13166 +f 13038 13167 13166 +f 13038 13039 13168 +f 13038 13168 13167 +f 13039 13040 13168 +f 13040 13169 13168 +f 13040 13041 13170 +f 13040 13170 13169 +f 13041 13042 13170 +f 13042 13171 13170 +f 13042 13043 13172 +f 13042 13172 13171 +f 13043 13044 13172 +f 13044 13173 13172 +f 13044 13045 13174 +f 13044 13174 13173 +f 13045 13046 13174 +f 13046 13175 13174 +f 13046 13047 13176 +f 13046 13176 13175 +f 13047 13048 13176 +f 13048 13177 13176 +f 13048 13049 13178 +f 13048 13178 13177 +f 13049 13050 13178 +f 13050 13179 13178 +f 13050 13051 13180 +f 13050 13180 13179 +f 13051 13052 13180 +f 13052 13181 13180 +f 13052 13053 13182 +f 13052 13182 13181 +f 13053 13054 13182 +f 13054 13183 13182 +f 13054 13055 13184 +f 13054 13184 13183 +f 13055 13056 13184 +f 13056 13185 13184 +f 13056 13057 13186 +f 13056 13186 13185 +f 13057 13058 13186 +f 13058 13187 13186 +f 13058 13059 13188 +f 13058 13188 13187 +f 13059 13060 13188 +f 13060 13189 13188 +f 13060 13061 13190 +f 13060 13190 13189 +f 13061 13062 13190 +f 13062 13191 13190 +f 13062 13063 13192 +f 13062 13192 13191 +f 13063 13064 13192 +f 13064 13193 13192 +f 13064 13065 13194 +f 13064 13194 13193 +f 13065 13066 13194 +f 13066 13195 13194 +f 13066 13067 13196 +f 13066 13196 13195 +f 13067 13068 13196 +f 13068 13197 13196 +f 13068 13069 13198 +f 13068 13198 13197 +f 13069 13070 13198 +f 13070 13199 13198 +f 13070 13071 13200 +f 13070 13200 13199 +f 13071 13072 13200 +f 13072 13201 13200 +f 13072 13073 13202 +f 13072 13202 13201 +f 13073 13074 13202 +f 13074 13203 13202 +f 13074 13075 13204 +f 13074 13204 13203 +f 13075 13076 13204 +f 13076 13205 13204 +f 13076 13077 13206 +f 13076 13206 13205 +f 13077 13078 13206 +f 13078 13207 13206 +f 13078 13079 13208 +f 13078 13208 13207 +f 13079 13080 13208 +f 13080 13209 13208 +f 13080 13081 13210 +f 13080 13210 13209 +f 13081 13082 13210 +f 13082 13211 13210 +f 13082 13083 13212 +f 13082 13212 13211 +f 13083 13084 13212 +f 13084 13213 13212 +f 13084 13085 13214 +f 13084 13214 13213 +f 13085 13086 13214 +f 13086 13215 13214 +f 13086 13087 13216 +f 13086 13216 13215 +f 13087 13088 13216 +f 13088 13217 13216 +f 13088 13089 13218 +f 13088 13218 13217 +f 13089 13090 13218 +f 13090 13219 13218 +f 13090 13091 13220 +f 13090 13220 13219 +f 13091 13092 13220 +f 13092 13221 13220 +f 13092 13093 13222 +f 13092 13222 13221 +f 13093 13094 13222 +f 13094 13223 13222 +f 13094 13095 13224 +f 13094 13224 13223 +f 13095 13096 13224 +f 13096 13225 13224 +f 13096 13097 13226 +f 13096 13226 13225 +f 13097 13098 13226 +f 13098 13227 13226 +f 13098 13099 13228 +f 13098 13228 13227 +f 13099 13100 13228 +f 13100 13229 13228 +f 13100 13101 13230 +f 13100 13230 13229 +f 13101 13102 13230 +f 13102 13231 13230 +f 13102 13103 13232 +f 13102 13232 13231 +f 13103 13104 13232 +f 13104 13233 13232 +f 13104 13105 13234 +f 13104 13234 13233 +f 13105 13106 13234 +f 13106 13235 13234 +f 13106 13107 13236 +f 13106 13236 13235 +f 13107 13108 13236 +f 13108 13237 13236 +f 13108 13109 13238 +f 13108 13238 13237 +f 13109 13110 13238 +f 13110 13239 13238 +f 13110 13111 13240 +f 13110 13240 13239 +f 13111 13112 13240 +f 13112 13241 13240 +f 13112 13113 13242 +f 13112 13242 13241 +f 13113 13114 13242 +f 13114 13243 13242 +f 13114 13115 13244 +f 13114 13244 13243 +f 13115 13116 13244 +f 13116 13245 13244 +f 13116 13117 13246 +f 13116 13246 13245 +f 13117 13118 13246 +f 13118 13247 13246 +f 13118 13119 13248 +f 13118 13248 13247 +f 13119 13120 13248 +f 13120 13249 13248 +f 13120 13121 13250 +f 13120 13250 13249 +f 13121 13122 13250 +f 13122 13251 13250 +f 13122 13123 13252 +f 13122 13252 13251 +f 13123 13124 13252 +f 13124 13253 13252 +f 13124 13125 13254 +f 13124 13254 13253 +f 13125 13126 13254 +f 13126 13255 13254 +f 13126 13127 13256 +f 13126 13256 13255 +f 13127 13128 13256 +f 13128 13257 13256 +f 13128 13129 13258 +f 13128 13258 13257 +f 13129 13130 13258 +f 13130 13259 13258 +f 13130 13131 13260 +f 13130 13260 13259 +f 13131 13132 13260 +f 13132 13261 13260 +f 13132 13133 13262 +f 13132 13262 13261 +f 13133 13134 13262 +f 13134 13263 13262 +f 13134 13135 13264 +f 13134 13264 13263 +f 13135 13136 13264 +f 13136 13265 13264 +f 13136 13137 13266 +f 13136 13266 13265 +f 13137 13138 13266 +f 13138 13267 13266 +f 13138 13139 13268 +f 13138 13268 13267 +f 13139 13140 13268 +f 13140 13269 13268 +f 13140 13141 13270 +f 13140 13270 13269 +f 13142 13143 13272 +f 13142 13272 13271 +f 13143 13144 13272 +f 13144 13273 13272 +f 13144 13145 13274 +f 13144 13274 13273 +f 13145 13146 13274 +f 13146 13275 13274 +f 13146 13147 13276 +f 13146 13276 13275 +f 13147 13148 13276 +f 13148 13277 13276 +f 13148 13149 13278 +f 13148 13278 13277 +f 13149 13150 13278 +f 13150 13279 13278 +f 13150 13151 13280 +f 13150 13280 13279 +f 13151 13152 13280 +f 13152 13281 13280 +f 13152 13153 13282 +f 13152 13282 13281 +f 13153 13154 13282 +f 13154 13283 13282 +f 13154 13155 13284 +f 13154 13284 13283 +f 13155 13156 13284 +f 13156 13285 13284 +f 13156 13157 13286 +f 13156 13286 13285 +f 13157 13158 13286 +f 13158 13287 13286 +f 13158 13159 13288 +f 13158 13288 13287 +f 13159 13160 13288 +f 13160 13289 13288 +f 13160 13161 13290 +f 13160 13290 13289 +f 13161 13162 13290 +f 13162 13291 13290 +f 13162 13163 13292 +f 13162 13292 13291 +f 13163 13164 13292 +f 13164 13293 13292 +f 13164 13165 13294 +f 13164 13294 13293 +f 13165 13166 13294 +f 13166 13295 13294 +f 13166 13167 13296 +f 13166 13296 13295 +f 13167 13168 13296 +f 13168 13297 13296 +f 13168 13169 13298 +f 13168 13298 13297 +f 13169 13170 13298 +f 13170 13299 13298 +f 13170 13171 13300 +f 13170 13300 13299 +f 13171 13172 13300 +f 13172 13301 13300 +f 13172 13173 13302 +f 13172 13302 13301 +f 13173 13174 13302 +f 13174 13303 13302 +f 13174 13175 13304 +f 13174 13304 13303 +f 13175 13176 13304 +f 13176 13305 13304 +f 13176 13177 13306 +f 13176 13306 13305 +f 13177 13178 13306 +f 13178 13307 13306 +f 13178 13179 13308 +f 13178 13308 13307 +f 13179 13180 13308 +f 13180 13309 13308 +f 13180 13181 13310 +f 13180 13310 13309 +f 13181 13182 13310 +f 13182 13311 13310 +f 13182 13183 13312 +f 13182 13312 13311 +f 13183 13184 13312 +f 13184 13313 13312 +f 13184 13185 13314 +f 13184 13314 13313 +f 13185 13186 13314 +f 13186 13315 13314 +f 13186 13187 13316 +f 13186 13316 13315 +f 13187 13188 13316 +f 13188 13317 13316 +f 13188 13189 13318 +f 13188 13318 13317 +f 13189 13190 13318 +f 13190 13319 13318 +f 13190 13191 13320 +f 13190 13320 13319 +f 13191 13192 13320 +f 13192 13321 13320 +f 13192 13193 13322 +f 13192 13322 13321 +f 13193 13194 13322 +f 13194 13323 13322 +f 13194 13195 13324 +f 13194 13324 13323 +f 13195 13196 13324 +f 13196 13325 13324 +f 13196 13197 13326 +f 13196 13326 13325 +f 13197 13198 13326 +f 13198 13327 13326 +f 13198 13199 13328 +f 13198 13328 13327 +f 13199 13200 13328 +f 13200 13329 13328 +f 13200 13201 13330 +f 13200 13330 13329 +f 13201 13202 13330 +f 13202 13331 13330 +f 13202 13203 13332 +f 13202 13332 13331 +f 13203 13204 13332 +f 13204 13333 13332 +f 13204 13205 13334 +f 13204 13334 13333 +f 13205 13206 13334 +f 13206 13335 13334 +f 13206 13207 13336 +f 13206 13336 13335 +f 13207 13208 13336 +f 13208 13337 13336 +f 13208 13209 13338 +f 13208 13338 13337 +f 13209 13210 13338 +f 13210 13339 13338 +f 13210 13211 13340 +f 13210 13340 13339 +f 13211 13212 13340 +f 13212 13341 13340 +f 13212 13213 13342 +f 13212 13342 13341 +f 13213 13214 13342 +f 13214 13343 13342 +f 13214 13215 13344 +f 13214 13344 13343 +f 13215 13216 13344 +f 13216 13345 13344 +f 13216 13217 13346 +f 13216 13346 13345 +f 13217 13218 13346 +f 13218 13347 13346 +f 13218 13219 13348 +f 13218 13348 13347 +f 13219 13220 13348 +f 13220 13349 13348 +f 13220 13221 13350 +f 13220 13350 13349 +f 13221 13222 13350 +f 13222 13351 13350 +f 13222 13223 13352 +f 13222 13352 13351 +f 13223 13224 13352 +f 13224 13353 13352 +f 13224 13225 13354 +f 13224 13354 13353 +f 13225 13226 13354 +f 13226 13355 13354 +f 13226 13227 13356 +f 13226 13356 13355 +f 13227 13228 13356 +f 13228 13357 13356 +f 13228 13229 13358 +f 13228 13358 13357 +f 13229 13230 13358 +f 13230 13359 13358 +f 13230 13231 13360 +f 13230 13360 13359 +f 13231 13232 13360 +f 13232 13361 13360 +f 13232 13233 13362 +f 13232 13362 13361 +f 13233 13234 13362 +f 13234 13363 13362 +f 13234 13235 13364 +f 13234 13364 13363 +f 13235 13236 13364 +f 13236 13365 13364 +f 13236 13237 13366 +f 13236 13366 13365 +f 13237 13238 13366 +f 13238 13367 13366 +f 13238 13239 13368 +f 13238 13368 13367 +f 13239 13240 13368 +f 13240 13369 13368 +f 13240 13241 13370 +f 13240 13370 13369 +f 13241 13242 13370 +f 13242 13371 13370 +f 13242 13243 13372 +f 13242 13372 13371 +f 13243 13244 13372 +f 13244 13373 13372 +f 13244 13245 13374 +f 13244 13374 13373 +f 13245 13246 13374 +f 13246 13375 13374 +f 13246 13247 13376 +f 13246 13376 13375 +f 13247 13248 13376 +f 13248 13377 13376 +f 13248 13249 13378 +f 13248 13378 13377 +f 13249 13250 13378 +f 13250 13379 13378 +f 13250 13251 13380 +f 13250 13380 13379 +f 13251 13252 13380 +f 13252 13381 13380 +f 13252 13253 13382 +f 13252 13382 13381 +f 13253 13254 13382 +f 13254 13383 13382 +f 13254 13255 13384 +f 13254 13384 13383 +f 13255 13256 13384 +f 13256 13385 13384 +f 13256 13257 13386 +f 13256 13386 13385 +f 13257 13258 13386 +f 13258 13387 13386 +f 13258 13259 13388 +f 13258 13388 13387 +f 13259 13260 13388 +f 13260 13389 13388 +f 13260 13261 13390 +f 13260 13390 13389 +f 13261 13262 13390 +f 13262 13391 13390 +f 13262 13263 13392 +f 13262 13392 13391 +f 13263 13264 13392 +f 13264 13393 13392 +f 13264 13265 13394 +f 13264 13394 13393 +f 13265 13266 13394 +f 13266 13395 13394 +f 13266 13267 13396 +f 13266 13396 13395 +f 13267 13268 13396 +f 13268 13397 13396 +f 13268 13269 13398 +f 13268 13398 13397 +f 13269 13270 13398 +f 13270 13399 13398 +f 13271 13272 13400 +f 13272 13401 13400 +f 13272 13273 13402 +f 13272 13402 13401 +f 13273 13274 13402 +f 13274 13403 13402 +f 13274 13275 13404 +f 13274 13404 13403 +f 13275 13276 13404 +f 13276 13405 13404 +f 13276 13277 13406 +f 13276 13406 13405 +f 13277 13278 13406 +f 13278 13407 13406 +f 13278 13279 13408 +f 13278 13408 13407 +f 13279 13280 13408 +f 13280 13409 13408 +f 13280 13281 13410 +f 13280 13410 13409 +f 13281 13282 13410 +f 13282 13411 13410 +f 13282 13283 13412 +f 13282 13412 13411 +f 13283 13284 13412 +f 13284 13413 13412 +f 13284 13285 13414 +f 13284 13414 13413 +f 13285 13286 13414 +f 13286 13415 13414 +f 13286 13287 13416 +f 13286 13416 13415 +f 13287 13288 13416 +f 13288 13417 13416 +f 13288 13289 13418 +f 13288 13418 13417 +f 13289 13290 13418 +f 13290 13419 13418 +f 13290 13291 13420 +f 13290 13420 13419 +f 13291 13292 13420 +f 13292 13421 13420 +f 13292 13293 13422 +f 13292 13422 13421 +f 13293 13294 13422 +f 13294 13423 13422 +f 13294 13295 13424 +f 13294 13424 13423 +f 13295 13296 13424 +f 13296 13425 13424 +f 13296 13297 13426 +f 13296 13426 13425 +f 13297 13298 13426 +f 13298 13427 13426 +f 13298 13299 13428 +f 13298 13428 13427 +f 13299 13300 13428 +f 13300 13429 13428 +f 13300 13301 13430 +f 13300 13430 13429 +f 13301 13302 13430 +f 13302 13431 13430 +f 13302 13303 13432 +f 13302 13432 13431 +f 13303 13304 13432 +f 13304 13433 13432 +f 13304 13305 13434 +f 13304 13434 13433 +f 13305 13306 13434 +f 13306 13435 13434 +f 13306 13307 13436 +f 13306 13436 13435 +f 13307 13308 13436 +f 13308 13437 13436 +f 13308 13309 13438 +f 13308 13438 13437 +f 13309 13310 13438 +f 13310 13439 13438 +f 13310 13311 13440 +f 13310 13440 13439 +f 13311 13312 13440 +f 13312 13441 13440 +f 13312 13313 13442 +f 13312 13442 13441 +f 13313 13314 13442 +f 13314 13443 13442 +f 13314 13315 13444 +f 13314 13444 13443 +f 13315 13316 13444 +f 13316 13445 13444 +f 13316 13317 13446 +f 13316 13446 13445 +f 13317 13318 13446 +f 13318 13447 13446 +f 13318 13319 13448 +f 13318 13448 13447 +f 13319 13320 13448 +f 13320 13449 13448 +f 13320 13321 13450 +f 13320 13450 13449 +f 13321 13322 13450 +f 13322 13451 13450 +f 13322 13323 13452 +f 13322 13452 13451 +f 13323 13324 13452 +f 13324 13453 13452 +f 13324 13325 13454 +f 13324 13454 13453 +f 13325 13326 13454 +f 13326 13455 13454 +f 13326 13327 13456 +f 13326 13456 13455 +f 13327 13328 13456 +f 13328 13457 13456 +f 13328 13329 13458 +f 13328 13458 13457 +f 13329 13330 13458 +f 13330 13459 13458 +f 13330 13331 13460 +f 13330 13460 13459 +f 13331 13332 13460 +f 13332 13461 13460 +f 13332 13333 13462 +f 13332 13462 13461 +f 13333 13334 13462 +f 13334 13463 13462 +f 13334 13335 13464 +f 13334 13464 13463 +f 13335 13336 13464 +f 13336 13465 13464 +f 13336 13337 13466 +f 13336 13466 13465 +f 13337 13338 13466 +f 13338 13467 13466 +f 13338 13339 13468 +f 13338 13468 13467 +f 13339 13340 13468 +f 13340 13469 13468 +f 13340 13341 13470 +f 13340 13470 13469 +f 13341 13342 13470 +f 13342 13471 13470 +f 13342 13343 13472 +f 13342 13472 13471 +f 13343 13344 13472 +f 13344 13473 13472 +f 13344 13345 13474 +f 13344 13474 13473 +f 13345 13346 13474 +f 13346 13475 13474 +f 13346 13347 13476 +f 13346 13476 13475 +f 13347 13348 13476 +f 13348 13477 13476 +f 13348 13349 13478 +f 13348 13478 13477 +f 13349 13350 13478 +f 13350 13479 13478 +f 13350 13351 13480 +f 13350 13480 13479 +f 13351 13352 13480 +f 13352 13481 13480 +f 13352 13353 13482 +f 13352 13482 13481 +f 13353 13354 13482 +f 13354 13483 13482 +f 13354 13355 13484 +f 13354 13484 13483 +f 13355 13356 13484 +f 13356 13485 13484 +f 13356 13357 13486 +f 13356 13486 13485 +f 13357 13358 13486 +f 13358 13487 13486 +f 13358 13359 13488 +f 13358 13488 13487 +f 13359 13360 13488 +f 13360 13489 13488 +f 13360 13361 13490 +f 13360 13490 13489 +f 13361 13362 13490 +f 13362 13491 13490 +f 13362 13363 13492 +f 13362 13492 13491 +f 13363 13364 13492 +f 13364 13493 13492 +f 13364 13365 13494 +f 13364 13494 13493 +f 13365 13366 13494 +f 13366 13495 13494 +f 13366 13367 13496 +f 13366 13496 13495 +f 13367 13368 13496 +f 13368 13497 13496 +f 13368 13369 13498 +f 13368 13498 13497 +f 13369 13370 13498 +f 13370 13499 13498 +f 13370 13371 13500 +f 13370 13500 13499 +f 13371 13372 13500 +f 13372 13501 13500 +f 13372 13373 13502 +f 13372 13502 13501 +f 13373 13374 13502 +f 13374 13503 13502 +f 13374 13375 13504 +f 13374 13504 13503 +f 13375 13376 13504 +f 13376 13505 13504 +f 13376 13377 13506 +f 13376 13506 13505 +f 13377 13378 13506 +f 13378 13507 13506 +f 13378 13379 13508 +f 13378 13508 13507 +f 13379 13380 13508 +f 13380 13509 13508 +f 13380 13381 13510 +f 13380 13510 13509 +f 13381 13382 13510 +f 13382 13511 13510 +f 13382 13383 13512 +f 13382 13512 13511 +f 13383 13384 13512 +f 13384 13513 13512 +f 13384 13385 13514 +f 13384 13514 13513 +f 13385 13386 13514 +f 13386 13515 13514 +f 13386 13387 13516 +f 13386 13516 13515 +f 13387 13388 13516 +f 13388 13517 13516 +f 13388 13389 13518 +f 13388 13518 13517 +f 13389 13390 13518 +f 13390 13519 13518 +f 13390 13391 13520 +f 13390 13520 13519 +f 13391 13392 13520 +f 13392 13521 13520 +f 13392 13393 13522 +f 13392 13522 13521 +f 13393 13394 13522 +f 13394 13523 13522 +f 13394 13395 13524 +f 13394 13524 13523 +f 13395 13396 13524 +f 13396 13525 13524 +f 13396 13397 13526 +f 13396 13526 13525 +f 13397 13398 13526 +f 13398 13527 13526 +f 13398 13399 13528 +f 13398 13528 13527 +f 13400 13401 13530 +f 13400 13530 13529 +f 13401 13402 13530 +f 13402 13531 13530 +f 13402 13403 13532 +f 13402 13532 13531 +f 13403 13404 13532 +f 13404 13533 13532 +f 13404 13405 13534 +f 13404 13534 13533 +f 13405 13406 13534 +f 13406 13535 13534 +f 13406 13407 13536 +f 13406 13536 13535 +f 13407 13408 13536 +f 13408 13537 13536 +f 13408 13409 13538 +f 13408 13538 13537 +f 13409 13410 13538 +f 13410 13539 13538 +f 13410 13411 13540 +f 13410 13540 13539 +f 13411 13412 13540 +f 13412 13541 13540 +f 13412 13413 13542 +f 13412 13542 13541 +f 13413 13414 13542 +f 13414 13543 13542 +f 13414 13415 13544 +f 13414 13544 13543 +f 13415 13416 13544 +f 13416 13545 13544 +f 13416 13417 13546 +f 13416 13546 13545 +f 13417 13418 13546 +f 13418 13547 13546 +f 13418 13419 13548 +f 13418 13548 13547 +f 13419 13420 13548 +f 13420 13549 13548 +f 13420 13421 13550 +f 13420 13550 13549 +f 13421 13422 13550 +f 13422 13551 13550 +f 13422 13423 13552 +f 13422 13552 13551 +f 13423 13424 13552 +f 13424 13553 13552 +f 13424 13425 13554 +f 13424 13554 13553 +f 13425 13426 13554 +f 13426 13555 13554 +f 13426 13427 13556 +f 13426 13556 13555 +f 13427 13428 13556 +f 13428 13557 13556 +f 13428 13429 13558 +f 13428 13558 13557 +f 13429 13430 13558 +f 13430 13559 13558 +f 13430 13431 13560 +f 13430 13560 13559 +f 13431 13432 13560 +f 13432 13561 13560 +f 13432 13433 13562 +f 13432 13562 13561 +f 13433 13434 13562 +f 13434 13563 13562 +f 13434 13435 13564 +f 13434 13564 13563 +f 13435 13436 13564 +f 13436 13565 13564 +f 13436 13437 13566 +f 13436 13566 13565 +f 13437 13438 13566 +f 13438 13567 13566 +f 13438 13439 13568 +f 13438 13568 13567 +f 13439 13440 13568 +f 13440 13569 13568 +f 13440 13441 13570 +f 13440 13570 13569 +f 13441 13442 13570 +f 13442 13571 13570 +f 13442 13443 13572 +f 13442 13572 13571 +f 13443 13444 13572 +f 13444 13573 13572 +f 13444 13445 13574 +f 13444 13574 13573 +f 13445 13446 13574 +f 13446 13575 13574 +f 13446 13447 13576 +f 13446 13576 13575 +f 13447 13448 13576 +f 13448 13577 13576 +f 13448 13449 13578 +f 13448 13578 13577 +f 13449 13450 13578 +f 13450 13579 13578 +f 13450 13451 13580 +f 13450 13580 13579 +f 13451 13452 13580 +f 13452 13581 13580 +f 13452 13453 13582 +f 13452 13582 13581 +f 13453 13454 13582 +f 13454 13583 13582 +f 13454 13455 13584 +f 13454 13584 13583 +f 13455 13456 13584 +f 13456 13585 13584 +f 13456 13457 13586 +f 13456 13586 13585 +f 13457 13458 13586 +f 13458 13587 13586 +f 13458 13459 13588 +f 13458 13588 13587 +f 13459 13460 13588 +f 13460 13589 13588 +f 13460 13461 13590 +f 13460 13590 13589 +f 13461 13462 13590 +f 13462 13591 13590 +f 13462 13463 13592 +f 13462 13592 13591 +f 13463 13464 13592 +f 13464 13593 13592 +f 13464 13465 13594 +f 13464 13594 13593 +f 13465 13466 13594 +f 13466 13595 13594 +f 13466 13467 13596 +f 13466 13596 13595 +f 13467 13468 13596 +f 13468 13597 13596 +f 13468 13469 13598 +f 13468 13598 13597 +f 13469 13470 13598 +f 13470 13599 13598 +f 13470 13471 13600 +f 13470 13600 13599 +f 13471 13472 13600 +f 13472 13601 13600 +f 13472 13473 13602 +f 13472 13602 13601 +f 13473 13474 13602 +f 13474 13603 13602 +f 13474 13475 13604 +f 13474 13604 13603 +f 13475 13476 13604 +f 13476 13605 13604 +f 13476 13477 13606 +f 13476 13606 13605 +f 13477 13478 13606 +f 13478 13607 13606 +f 13478 13479 13608 +f 13478 13608 13607 +f 13479 13480 13608 +f 13480 13609 13608 +f 13480 13481 13610 +f 13480 13610 13609 +f 13481 13482 13610 +f 13482 13611 13610 +f 13482 13483 13612 +f 13482 13612 13611 +f 13483 13484 13612 +f 13484 13613 13612 +f 13484 13485 13614 +f 13484 13614 13613 +f 13485 13486 13614 +f 13486 13615 13614 +f 13486 13487 13616 +f 13486 13616 13615 +f 13487 13488 13616 +f 13488 13617 13616 +f 13488 13489 13618 +f 13488 13618 13617 +f 13489 13490 13618 +f 13490 13619 13618 +f 13490 13491 13620 +f 13490 13620 13619 +f 13491 13492 13620 +f 13492 13621 13620 +f 13492 13493 13622 +f 13492 13622 13621 +f 13493 13494 13622 +f 13494 13623 13622 +f 13494 13495 13624 +f 13494 13624 13623 +f 13495 13496 13624 +f 13496 13625 13624 +f 13496 13497 13626 +f 13496 13626 13625 +f 13497 13498 13626 +f 13498 13627 13626 +f 13498 13499 13628 +f 13498 13628 13627 +f 13499 13500 13628 +f 13500 13629 13628 +f 13500 13501 13630 +f 13500 13630 13629 +f 13501 13502 13630 +f 13502 13631 13630 +f 13502 13503 13632 +f 13502 13632 13631 +f 13503 13504 13632 +f 13504 13633 13632 +f 13504 13505 13634 +f 13504 13634 13633 +f 13505 13506 13634 +f 13506 13635 13634 +f 13506 13507 13636 +f 13506 13636 13635 +f 13507 13508 13636 +f 13508 13637 13636 +f 13508 13509 13638 +f 13508 13638 13637 +f 13509 13510 13638 +f 13510 13639 13638 +f 13510 13511 13640 +f 13510 13640 13639 +f 13511 13512 13640 +f 13512 13641 13640 +f 13512 13513 13642 +f 13512 13642 13641 +f 13513 13514 13642 +f 13514 13643 13642 +f 13514 13515 13644 +f 13514 13644 13643 +f 13515 13516 13644 +f 13516 13645 13644 +f 13516 13517 13646 +f 13516 13646 13645 +f 13517 13518 13646 +f 13518 13647 13646 +f 13518 13519 13648 +f 13518 13648 13647 +f 13519 13520 13648 +f 13520 13649 13648 +f 13520 13521 13650 +f 13520 13650 13649 +f 13521 13522 13650 +f 13522 13651 13650 +f 13522 13523 13652 +f 13522 13652 13651 +f 13523 13524 13652 +f 13524 13653 13652 +f 13524 13525 13654 +f 13524 13654 13653 +f 13525 13526 13654 +f 13526 13655 13654 +f 13526 13527 13656 +f 13526 13656 13655 +f 13527 13528 13656 +f 13528 13657 13656 +f 13529 13530 13658 +f 13530 13659 13658 +f 13530 13531 13660 +f 13530 13660 13659 +f 13531 13532 13660 +f 13532 13661 13660 +f 13532 13533 13662 +f 13532 13662 13661 +f 13533 13534 13662 +f 13534 13663 13662 +f 13534 13535 13664 +f 13534 13664 13663 +f 13535 13536 13664 +f 13536 13665 13664 +f 13536 13537 13666 +f 13536 13666 13665 +f 13537 13538 13666 +f 13538 13667 13666 +f 13538 13539 13668 +f 13538 13668 13667 +f 13539 13540 13668 +f 13540 13669 13668 +f 13540 13541 13670 +f 13540 13670 13669 +f 13541 13542 13670 +f 13542 13671 13670 +f 13542 13543 13672 +f 13542 13672 13671 +f 13543 13544 13672 +f 13544 13673 13672 +f 13544 13545 13674 +f 13544 13674 13673 +f 13545 13546 13674 +f 13546 13675 13674 +f 13546 13547 13676 +f 13546 13676 13675 +f 13547 13548 13676 +f 13548 13677 13676 +f 13548 13549 13678 +f 13548 13678 13677 +f 13549 13550 13678 +f 13550 13679 13678 +f 13550 13551 13680 +f 13550 13680 13679 +f 13551 13552 13680 +f 13552 13681 13680 +f 13552 13553 13682 +f 13552 13682 13681 +f 13553 13554 13682 +f 13554 13683 13682 +f 13554 13555 13684 +f 13554 13684 13683 +f 13555 13556 13684 +f 13556 13685 13684 +f 13556 13557 13686 +f 13556 13686 13685 +f 13557 13558 13686 +f 13558 13687 13686 +f 13558 13559 13688 +f 13558 13688 13687 +f 13559 13560 13688 +f 13560 13689 13688 +f 13560 13561 13690 +f 13560 13690 13689 +f 13561 13562 13690 +f 13562 13691 13690 +f 13562 13563 13692 +f 13562 13692 13691 +f 13563 13564 13692 +f 13564 13693 13692 +f 13564 13565 13694 +f 13564 13694 13693 +f 13565 13566 13694 +f 13566 13695 13694 +f 13566 13567 13696 +f 13566 13696 13695 +f 13567 13568 13696 +f 13568 13697 13696 +f 13568 13569 13698 +f 13568 13698 13697 +f 13569 13570 13698 +f 13570 13699 13698 +f 13570 13571 13700 +f 13570 13700 13699 +f 13571 13572 13700 +f 13572 13701 13700 +f 13572 13573 13702 +f 13572 13702 13701 +f 13573 13574 13702 +f 13574 13703 13702 +f 13574 13575 13704 +f 13574 13704 13703 +f 13575 13576 13704 +f 13576 13705 13704 +f 13576 13577 13706 +f 13576 13706 13705 +f 13577 13578 13706 +f 13578 13707 13706 +f 13578 13579 13708 +f 13578 13708 13707 +f 13579 13580 13708 +f 13580 13709 13708 +f 13580 13581 13710 +f 13580 13710 13709 +f 13581 13582 13710 +f 13582 13711 13710 +f 13582 13583 13712 +f 13582 13712 13711 +f 13583 13584 13712 +f 13584 13713 13712 +f 13584 13585 13714 +f 13584 13714 13713 +f 13585 13586 13714 +f 13586 13715 13714 +f 13586 13587 13716 +f 13586 13716 13715 +f 13587 13588 13716 +f 13588 13717 13716 +f 13588 13589 13718 +f 13588 13718 13717 +f 13589 13590 13718 +f 13590 13719 13718 +f 13590 13591 13720 +f 13590 13720 13719 +f 13591 13592 13720 +f 13592 13721 13720 +f 13592 13593 13722 +f 13592 13722 13721 +f 13593 13594 13722 +f 13594 13723 13722 +f 13594 13595 13724 +f 13594 13724 13723 +f 13595 13596 13724 +f 13596 13725 13724 +f 13596 13597 13726 +f 13596 13726 13725 +f 13597 13598 13726 +f 13598 13727 13726 +f 13598 13599 13728 +f 13598 13728 13727 +f 13599 13600 13728 +f 13600 13729 13728 +f 13600 13601 13730 +f 13600 13730 13729 +f 13601 13602 13730 +f 13602 13731 13730 +f 13602 13603 13732 +f 13602 13732 13731 +f 13603 13604 13732 +f 13604 13733 13732 +f 13604 13605 13734 +f 13604 13734 13733 +f 13605 13606 13734 +f 13606 13735 13734 +f 13606 13607 13736 +f 13606 13736 13735 +f 13607 13608 13736 +f 13608 13737 13736 +f 13608 13609 13738 +f 13608 13738 13737 +f 13609 13610 13738 +f 13610 13739 13738 +f 13610 13611 13740 +f 13610 13740 13739 +f 13611 13612 13740 +f 13612 13741 13740 +f 13612 13613 13742 +f 13612 13742 13741 +f 13613 13614 13742 +f 13614 13743 13742 +f 13614 13615 13744 +f 13614 13744 13743 +f 13615 13616 13744 +f 13616 13745 13744 +f 13616 13617 13746 +f 13616 13746 13745 +f 13617 13618 13746 +f 13618 13747 13746 +f 13618 13619 13748 +f 13618 13748 13747 +f 13619 13620 13748 +f 13620 13749 13748 +f 13620 13621 13750 +f 13620 13750 13749 +f 13621 13622 13750 +f 13622 13751 13750 +f 13622 13623 13752 +f 13622 13752 13751 +f 13623 13624 13752 +f 13624 13753 13752 +f 13624 13625 13754 +f 13624 13754 13753 +f 13625 13626 13754 +f 13626 13755 13754 +f 13626 13627 13756 +f 13626 13756 13755 +f 13627 13628 13756 +f 13628 13757 13756 +f 13628 13629 13758 +f 13628 13758 13757 +f 13629 13630 13758 +f 13630 13759 13758 +f 13630 13631 13760 +f 13630 13760 13759 +f 13631 13632 13760 +f 13632 13761 13760 +f 13632 13633 13762 +f 13632 13762 13761 +f 13633 13634 13762 +f 13634 13763 13762 +f 13634 13635 13764 +f 13634 13764 13763 +f 13635 13636 13764 +f 13636 13765 13764 +f 13636 13637 13766 +f 13636 13766 13765 +f 13637 13638 13766 +f 13638 13767 13766 +f 13638 13639 13768 +f 13638 13768 13767 +f 13639 13640 13768 +f 13640 13769 13768 +f 13640 13641 13770 +f 13640 13770 13769 +f 13641 13642 13770 +f 13642 13771 13770 +f 13642 13643 13772 +f 13642 13772 13771 +f 13643 13644 13772 +f 13644 13773 13772 +f 13644 13645 13774 +f 13644 13774 13773 +f 13645 13646 13774 +f 13646 13775 13774 +f 13646 13647 13776 +f 13646 13776 13775 +f 13647 13648 13776 +f 13648 13777 13776 +f 13648 13649 13778 +f 13648 13778 13777 +f 13649 13650 13778 +f 13650 13779 13778 +f 13650 13651 13780 +f 13650 13780 13779 +f 13651 13652 13780 +f 13652 13781 13780 +f 13652 13653 13782 +f 13652 13782 13781 +f 13653 13654 13782 +f 13654 13783 13782 +f 13654 13655 13784 +f 13654 13784 13783 +f 13655 13656 13784 +f 13656 13785 13784 +f 13656 13657 13786 +f 13656 13786 13785 +f 13658 13659 13788 +f 13658 13788 13787 +f 13659 13660 13788 +f 13660 13789 13788 +f 13660 13661 13790 +f 13660 13790 13789 +f 13661 13662 13790 +f 13662 13791 13790 +f 13662 13663 13792 +f 13662 13792 13791 +f 13663 13664 13792 +f 13664 13793 13792 +f 13664 13665 13794 +f 13664 13794 13793 +f 13665 13666 13794 +f 13666 13795 13794 +f 13666 13667 13796 +f 13666 13796 13795 +f 13667 13668 13796 +f 13668 13797 13796 +f 13668 13669 13798 +f 13668 13798 13797 +f 13669 13670 13798 +f 13670 13799 13798 +f 13670 13671 13800 +f 13670 13800 13799 +f 13671 13672 13800 +f 13672 13801 13800 +f 13672 13673 13802 +f 13672 13802 13801 +f 13673 13674 13802 +f 13674 13803 13802 +f 13674 13675 13804 +f 13674 13804 13803 +f 13675 13676 13804 +f 13676 13805 13804 +f 13676 13677 13806 +f 13676 13806 13805 +f 13677 13678 13806 +f 13678 13807 13806 +f 13678 13679 13808 +f 13678 13808 13807 +f 13679 13680 13808 +f 13680 13809 13808 +f 13680 13681 13810 +f 13680 13810 13809 +f 13681 13682 13810 +f 13682 13811 13810 +f 13682 13683 13812 +f 13682 13812 13811 +f 13683 13684 13812 +f 13684 13813 13812 +f 13684 13685 13814 +f 13684 13814 13813 +f 13685 13686 13814 +f 13686 13815 13814 +f 13686 13687 13816 +f 13686 13816 13815 +f 13687 13688 13816 +f 13688 13817 13816 +f 13688 13689 13818 +f 13688 13818 13817 +f 13689 13690 13818 +f 13690 13819 13818 +f 13690 13691 13820 +f 13690 13820 13819 +f 13691 13692 13820 +f 13692 13821 13820 +f 13692 13693 13822 +f 13692 13822 13821 +f 13693 13694 13822 +f 13694 13823 13822 +f 13694 13695 13824 +f 13694 13824 13823 +f 13695 13696 13824 +f 13696 13825 13824 +f 13696 13697 13826 +f 13696 13826 13825 +f 13697 13698 13826 +f 13698 13827 13826 +f 13698 13699 13828 +f 13698 13828 13827 +f 13699 13700 13828 +f 13700 13829 13828 +f 13700 13701 13830 +f 13700 13830 13829 +f 13701 13702 13830 +f 13702 13831 13830 +f 13702 13703 13832 +f 13702 13832 13831 +f 13703 13704 13832 +f 13704 13833 13832 +f 13704 13705 13834 +f 13704 13834 13833 +f 13705 13706 13834 +f 13706 13835 13834 +f 13706 13707 13836 +f 13706 13836 13835 +f 13707 13708 13836 +f 13708 13837 13836 +f 13708 13709 13838 +f 13708 13838 13837 +f 13709 13710 13838 +f 13710 13839 13838 +f 13710 13711 13840 +f 13710 13840 13839 +f 13711 13712 13840 +f 13712 13841 13840 +f 13712 13713 13842 +f 13712 13842 13841 +f 13713 13714 13842 +f 13714 13843 13842 +f 13714 13715 13844 +f 13714 13844 13843 +f 13715 13716 13844 +f 13716 13845 13844 +f 13716 13717 13846 +f 13716 13846 13845 +f 13717 13718 13846 +f 13718 13847 13846 +f 13718 13719 13848 +f 13718 13848 13847 +f 13719 13720 13848 +f 13720 13849 13848 +f 13720 13721 13850 +f 13720 13850 13849 +f 13721 13722 13850 +f 13722 13851 13850 +f 13722 13723 13852 +f 13722 13852 13851 +f 13723 13724 13852 +f 13724 13853 13852 +f 13724 13725 13854 +f 13724 13854 13853 +f 13725 13726 13854 +f 13726 13855 13854 +f 13726 13727 13856 +f 13726 13856 13855 +f 13727 13728 13856 +f 13728 13857 13856 +f 13728 13729 13858 +f 13728 13858 13857 +f 13729 13730 13858 +f 13730 13859 13858 +f 13730 13731 13860 +f 13730 13860 13859 +f 13731 13732 13860 +f 13732 13861 13860 +f 13732 13733 13862 +f 13732 13862 13861 +f 13733 13734 13862 +f 13734 13863 13862 +f 13734 13735 13864 +f 13734 13864 13863 +f 13735 13736 13864 +f 13736 13865 13864 +f 13736 13737 13866 +f 13736 13866 13865 +f 13737 13738 13866 +f 13738 13867 13866 +f 13738 13739 13868 +f 13738 13868 13867 +f 13739 13740 13868 +f 13740 13869 13868 +f 13740 13741 13870 +f 13740 13870 13869 +f 13741 13742 13870 +f 13742 13871 13870 +f 13742 13743 13872 +f 13742 13872 13871 +f 13743 13744 13872 +f 13744 13873 13872 +f 13744 13745 13874 +f 13744 13874 13873 +f 13745 13746 13874 +f 13746 13875 13874 +f 13746 13747 13876 +f 13746 13876 13875 +f 13747 13748 13876 +f 13748 13877 13876 +f 13748 13749 13878 +f 13748 13878 13877 +f 13749 13750 13878 +f 13750 13879 13878 +f 13750 13751 13880 +f 13750 13880 13879 +f 13751 13752 13880 +f 13752 13881 13880 +f 13752 13753 13882 +f 13752 13882 13881 +f 13753 13754 13882 +f 13754 13883 13882 +f 13754 13755 13884 +f 13754 13884 13883 +f 13755 13756 13884 +f 13756 13885 13884 +f 13756 13757 13886 +f 13756 13886 13885 +f 13757 13758 13886 +f 13758 13887 13886 +f 13758 13759 13888 +f 13758 13888 13887 +f 13759 13760 13888 +f 13760 13889 13888 +f 13760 13761 13890 +f 13760 13890 13889 +f 13761 13762 13890 +f 13762 13891 13890 +f 13762 13763 13892 +f 13762 13892 13891 +f 13763 13764 13892 +f 13764 13893 13892 +f 13764 13765 13894 +f 13764 13894 13893 +f 13765 13766 13894 +f 13766 13895 13894 +f 13766 13767 13896 +f 13766 13896 13895 +f 13767 13768 13896 +f 13768 13897 13896 +f 13768 13769 13898 +f 13768 13898 13897 +f 13769 13770 13898 +f 13770 13899 13898 +f 13770 13771 13900 +f 13770 13900 13899 +f 13771 13772 13900 +f 13772 13901 13900 +f 13772 13773 13902 +f 13772 13902 13901 +f 13773 13774 13902 +f 13774 13903 13902 +f 13774 13775 13904 +f 13774 13904 13903 +f 13775 13776 13904 +f 13776 13905 13904 +f 13776 13777 13906 +f 13776 13906 13905 +f 13777 13778 13906 +f 13778 13907 13906 +f 13778 13779 13908 +f 13778 13908 13907 +f 13779 13780 13908 +f 13780 13909 13908 +f 13780 13781 13910 +f 13780 13910 13909 +f 13781 13782 13910 +f 13782 13911 13910 +f 13782 13783 13912 +f 13782 13912 13911 +f 13783 13784 13912 +f 13784 13913 13912 +f 13784 13785 13914 +f 13784 13914 13913 +f 13785 13786 13914 +f 13786 13915 13914 +f 13787 13788 13916 +f 13788 13917 13916 +f 13788 13789 13918 +f 13788 13918 13917 +f 13789 13790 13918 +f 13790 13919 13918 +f 13790 13791 13920 +f 13790 13920 13919 +f 13791 13792 13920 +f 13792 13921 13920 +f 13792 13793 13922 +f 13792 13922 13921 +f 13793 13794 13922 +f 13794 13923 13922 +f 13794 13795 13924 +f 13794 13924 13923 +f 13795 13796 13924 +f 13796 13925 13924 +f 13796 13797 13926 +f 13796 13926 13925 +f 13797 13798 13926 +f 13798 13927 13926 +f 13798 13799 13928 +f 13798 13928 13927 +f 13799 13800 13928 +f 13800 13929 13928 +f 13800 13801 13930 +f 13800 13930 13929 +f 13801 13802 13930 +f 13802 13931 13930 +f 13802 13803 13932 +f 13802 13932 13931 +f 13803 13804 13932 +f 13804 13933 13932 +f 13804 13805 13934 +f 13804 13934 13933 +f 13805 13806 13934 +f 13806 13935 13934 +f 13806 13807 13936 +f 13806 13936 13935 +f 13807 13808 13936 +f 13808 13937 13936 +f 13808 13809 13938 +f 13808 13938 13937 +f 13809 13810 13938 +f 13810 13939 13938 +f 13810 13811 13940 +f 13810 13940 13939 +f 13811 13812 13940 +f 13812 13941 13940 +f 13812 13813 13942 +f 13812 13942 13941 +f 13813 13814 13942 +f 13814 13943 13942 +f 13814 13815 13944 +f 13814 13944 13943 +f 13815 13816 13944 +f 13816 13945 13944 +f 13816 13817 13946 +f 13816 13946 13945 +f 13817 13818 13946 +f 13818 13947 13946 +f 13818 13819 13948 +f 13818 13948 13947 +f 13819 13820 13948 +f 13820 13949 13948 +f 13820 13821 13950 +f 13820 13950 13949 +f 13821 13822 13950 +f 13822 13951 13950 +f 13822 13823 13952 +f 13822 13952 13951 +f 13823 13824 13952 +f 13824 13953 13952 +f 13824 13825 13954 +f 13824 13954 13953 +f 13825 13826 13954 +f 13826 13955 13954 +f 13826 13827 13956 +f 13826 13956 13955 +f 13827 13828 13956 +f 13828 13957 13956 +f 13828 13829 13958 +f 13828 13958 13957 +f 13829 13830 13958 +f 13830 13959 13958 +f 13830 13831 13960 +f 13830 13960 13959 +f 13831 13832 13960 +f 13832 13961 13960 +f 13832 13833 13962 +f 13832 13962 13961 +f 13833 13834 13962 +f 13834 13963 13962 +f 13834 13835 13964 +f 13834 13964 13963 +f 13835 13836 13964 +f 13836 13965 13964 +f 13836 13837 13966 +f 13836 13966 13965 +f 13837 13838 13966 +f 13838 13967 13966 +f 13838 13839 13968 +f 13838 13968 13967 +f 13839 13840 13968 +f 13840 13969 13968 +f 13840 13841 13970 +f 13840 13970 13969 +f 13841 13842 13970 +f 13842 13971 13970 +f 13842 13843 13972 +f 13842 13972 13971 +f 13843 13844 13972 +f 13844 13973 13972 +f 13844 13845 13974 +f 13844 13974 13973 +f 13845 13846 13974 +f 13846 13975 13974 +f 13846 13847 13976 +f 13846 13976 13975 +f 13847 13848 13976 +f 13848 13977 13976 +f 13848 13849 13978 +f 13848 13978 13977 +f 13849 13850 13978 +f 13850 13979 13978 +f 13850 13851 13980 +f 13850 13980 13979 +f 13851 13852 13980 +f 13852 13981 13980 +f 13852 13853 13982 +f 13852 13982 13981 +f 13853 13854 13982 +f 13854 13983 13982 +f 13854 13855 13984 +f 13854 13984 13983 +f 13855 13856 13984 +f 13856 13985 13984 +f 13856 13857 13986 +f 13856 13986 13985 +f 13857 13858 13986 +f 13858 13987 13986 +f 13858 13859 13988 +f 13858 13988 13987 +f 13859 13860 13988 +f 13860 13989 13988 +f 13860 13861 13990 +f 13860 13990 13989 +f 13861 13862 13990 +f 13862 13991 13990 +f 13862 13863 13992 +f 13862 13992 13991 +f 13863 13864 13992 +f 13864 13993 13992 +f 13864 13865 13994 +f 13864 13994 13993 +f 13865 13866 13994 +f 13866 13995 13994 +f 13866 13867 13996 +f 13866 13996 13995 +f 13867 13868 13996 +f 13868 13997 13996 +f 13868 13869 13998 +f 13868 13998 13997 +f 13869 13870 13998 +f 13870 13999 13998 +f 13870 13871 14000 +f 13870 14000 13999 +f 13871 13872 14000 +f 13872 14001 14000 +f 13872 13873 14002 +f 13872 14002 14001 +f 13873 13874 14002 +f 13874 14003 14002 +f 13874 13875 14004 +f 13874 14004 14003 +f 13875 13876 14004 +f 13876 14005 14004 +f 13876 13877 14006 +f 13876 14006 14005 +f 13877 13878 14006 +f 13878 14007 14006 +f 13878 13879 14008 +f 13878 14008 14007 +f 13879 13880 14008 +f 13880 14009 14008 +f 13880 13881 14010 +f 13880 14010 14009 +f 13881 13882 14010 +f 13882 14011 14010 +f 13882 13883 14012 +f 13882 14012 14011 +f 13883 13884 14012 +f 13884 14013 14012 +f 13884 13885 14014 +f 13884 14014 14013 +f 13885 13886 14014 +f 13886 14015 14014 +f 13886 13887 14016 +f 13886 14016 14015 +f 13887 13888 14016 +f 13888 14017 14016 +f 13888 13889 14018 +f 13888 14018 14017 +f 13889 13890 14018 +f 13890 14019 14018 +f 13890 13891 14020 +f 13890 14020 14019 +f 13891 13892 14020 +f 13892 14021 14020 +f 13892 13893 14022 +f 13892 14022 14021 +f 13893 13894 14022 +f 13894 14023 14022 +f 13894 13895 14024 +f 13894 14024 14023 +f 13895 13896 14024 +f 13896 14025 14024 +f 13896 13897 14026 +f 13896 14026 14025 +f 13897 13898 14026 +f 13898 14027 14026 +f 13898 13899 14028 +f 13898 14028 14027 +f 13899 13900 14028 +f 13900 14029 14028 +f 13900 13901 14030 +f 13900 14030 14029 +f 13901 13902 14030 +f 13902 14031 14030 +f 13902 13903 14032 +f 13902 14032 14031 +f 13903 13904 14032 +f 13904 14033 14032 +f 13904 13905 14034 +f 13904 14034 14033 +f 13905 13906 14034 +f 13906 14035 14034 +f 13906 13907 14036 +f 13906 14036 14035 +f 13907 13908 14036 +f 13908 14037 14036 +f 13908 13909 14038 +f 13908 14038 14037 +f 13909 13910 14038 +f 13910 14039 14038 +f 13910 13911 14040 +f 13910 14040 14039 +f 13911 13912 14040 +f 13912 14041 14040 +f 13912 13913 14042 +f 13912 14042 14041 +f 13913 13914 14042 +f 13914 14043 14042 +f 13914 13915 14044 +f 13914 14044 14043 +f 13916 13917 14046 +f 13916 14046 14045 +f 13917 13918 14046 +f 13918 14047 14046 +f 13918 13919 14048 +f 13918 14048 14047 +f 13919 13920 14048 +f 13920 14049 14048 +f 13920 13921 14050 +f 13920 14050 14049 +f 13921 13922 14050 +f 13922 14051 14050 +f 13922 13923 14052 +f 13922 14052 14051 +f 13923 13924 14052 +f 13924 14053 14052 +f 13924 13925 14054 +f 13924 14054 14053 +f 13925 13926 14054 +f 13926 14055 14054 +f 13926 13927 14056 +f 13926 14056 14055 +f 13927 13928 14056 +f 13928 14057 14056 +f 13928 13929 14058 +f 13928 14058 14057 +f 13929 13930 14058 +f 13930 14059 14058 +f 13930 13931 14060 +f 13930 14060 14059 +f 13931 13932 14060 +f 13932 14061 14060 +f 13932 13933 14062 +f 13932 14062 14061 +f 13933 13934 14062 +f 13934 14063 14062 +f 13934 13935 14064 +f 13934 14064 14063 +f 13935 13936 14064 +f 13936 14065 14064 +f 13936 13937 14066 +f 13936 14066 14065 +f 13937 13938 14066 +f 13938 14067 14066 +f 13938 13939 14068 +f 13938 14068 14067 +f 13939 13940 14068 +f 13940 14069 14068 +f 13940 13941 14070 +f 13940 14070 14069 +f 13941 13942 14070 +f 13942 14071 14070 +f 13942 13943 14072 +f 13942 14072 14071 +f 13943 13944 14072 +f 13944 14073 14072 +f 13944 13945 14074 +f 13944 14074 14073 +f 13945 13946 14074 +f 13946 14075 14074 +f 13946 13947 14076 +f 13946 14076 14075 +f 13947 13948 14076 +f 13948 14077 14076 +f 13948 13949 14078 +f 13948 14078 14077 +f 13949 13950 14078 +f 13950 14079 14078 +f 13950 13951 14080 +f 13950 14080 14079 +f 13951 13952 14080 +f 13952 14081 14080 +f 13952 13953 14082 +f 13952 14082 14081 +f 13953 13954 14082 +f 13954 14083 14082 +f 13954 13955 14084 +f 13954 14084 14083 +f 13955 13956 14084 +f 13956 14085 14084 +f 13956 13957 14086 +f 13956 14086 14085 +f 13957 13958 14086 +f 13958 14087 14086 +f 13958 13959 14088 +f 13958 14088 14087 +f 13959 13960 14088 +f 13960 14089 14088 +f 13960 13961 14090 +f 13960 14090 14089 +f 13961 13962 14090 +f 13962 14091 14090 +f 13962 13963 14092 +f 13962 14092 14091 +f 13963 13964 14092 +f 13964 14093 14092 +f 13964 13965 14094 +f 13964 14094 14093 +f 13965 13966 14094 +f 13966 14095 14094 +f 13966 13967 14096 +f 13966 14096 14095 +f 13967 13968 14096 +f 13968 14097 14096 +f 13968 13969 14098 +f 13968 14098 14097 +f 13969 13970 14098 +f 13970 14099 14098 +f 13970 13971 14100 +f 13970 14100 14099 +f 13971 13972 14100 +f 13972 14101 14100 +f 13972 13973 14102 +f 13972 14102 14101 +f 13973 13974 14102 +f 13974 14103 14102 +f 13974 13975 14104 +f 13974 14104 14103 +f 13975 13976 14104 +f 13976 14105 14104 +f 13976 13977 14106 +f 13976 14106 14105 +f 13977 13978 14106 +f 13978 14107 14106 +f 13978 13979 14108 +f 13978 14108 14107 +f 13979 13980 14108 +f 13980 14109 14108 +f 13980 13981 14110 +f 13980 14110 14109 +f 13981 13982 14110 +f 13982 14111 14110 +f 13982 13983 14112 +f 13982 14112 14111 +f 13983 13984 14112 +f 13984 14113 14112 +f 13984 13985 14114 +f 13984 14114 14113 +f 13985 13986 14114 +f 13986 14115 14114 +f 13986 13987 14116 +f 13986 14116 14115 +f 13987 13988 14116 +f 13988 14117 14116 +f 13988 13989 14118 +f 13988 14118 14117 +f 13989 13990 14118 +f 13990 14119 14118 +f 13990 13991 14120 +f 13990 14120 14119 +f 13991 13992 14120 +f 13992 14121 14120 +f 13992 13993 14122 +f 13992 14122 14121 +f 13993 13994 14122 +f 13994 14123 14122 +f 13994 13995 14124 +f 13994 14124 14123 +f 13995 13996 14124 +f 13996 14125 14124 +f 13996 13997 14126 +f 13996 14126 14125 +f 13997 13998 14126 +f 13998 14127 14126 +f 13998 13999 14128 +f 13998 14128 14127 +f 13999 14000 14128 +f 14000 14129 14128 +f 14000 14001 14130 +f 14000 14130 14129 +f 14001 14002 14130 +f 14002 14131 14130 +f 14002 14003 14132 +f 14002 14132 14131 +f 14003 14004 14132 +f 14004 14133 14132 +f 14004 14005 14134 +f 14004 14134 14133 +f 14005 14006 14134 +f 14006 14135 14134 +f 14006 14007 14136 +f 14006 14136 14135 +f 14007 14008 14136 +f 14008 14137 14136 +f 14008 14009 14138 +f 14008 14138 14137 +f 14009 14010 14138 +f 14010 14139 14138 +f 14010 14011 14140 +f 14010 14140 14139 +f 14011 14012 14140 +f 14012 14141 14140 +f 14012 14013 14142 +f 14012 14142 14141 +f 14013 14014 14142 +f 14014 14143 14142 +f 14014 14015 14144 +f 14014 14144 14143 +f 14015 14016 14144 +f 14016 14145 14144 +f 14016 14017 14146 +f 14016 14146 14145 +f 14017 14018 14146 +f 14018 14147 14146 +f 14018 14019 14148 +f 14018 14148 14147 +f 14019 14020 14148 +f 14020 14149 14148 +f 14020 14021 14150 +f 14020 14150 14149 +f 14021 14022 14150 +f 14022 14151 14150 +f 14022 14023 14152 +f 14022 14152 14151 +f 14023 14024 14152 +f 14024 14153 14152 +f 14024 14025 14154 +f 14024 14154 14153 +f 14025 14026 14154 +f 14026 14155 14154 +f 14026 14027 14156 +f 14026 14156 14155 +f 14027 14028 14156 +f 14028 14157 14156 +f 14028 14029 14158 +f 14028 14158 14157 +f 14029 14030 14158 +f 14030 14159 14158 +f 14030 14031 14160 +f 14030 14160 14159 +f 14031 14032 14160 +f 14032 14161 14160 +f 14032 14033 14162 +f 14032 14162 14161 +f 14033 14034 14162 +f 14034 14163 14162 +f 14034 14035 14164 +f 14034 14164 14163 +f 14035 14036 14164 +f 14036 14165 14164 +f 14036 14037 14166 +f 14036 14166 14165 +f 14037 14038 14166 +f 14038 14167 14166 +f 14038 14039 14168 +f 14038 14168 14167 +f 14039 14040 14168 +f 14040 14169 14168 +f 14040 14041 14170 +f 14040 14170 14169 +f 14041 14042 14170 +f 14042 14171 14170 +f 14042 14043 14172 +f 14042 14172 14171 +f 14043 14044 14172 +f 14044 14173 14172 +f 14045 14046 14174 +f 14046 14175 14174 +f 14046 14047 14176 +f 14046 14176 14175 +f 14047 14048 14176 +f 14048 14177 14176 +f 14048 14049 14178 +f 14048 14178 14177 +f 14049 14050 14178 +f 14050 14179 14178 +f 14050 14051 14180 +f 14050 14180 14179 +f 14051 14052 14180 +f 14052 14181 14180 +f 14052 14053 14182 +f 14052 14182 14181 +f 14053 14054 14182 +f 14054 14183 14182 +f 14054 14055 14184 +f 14054 14184 14183 +f 14055 14056 14184 +f 14056 14185 14184 +f 14056 14057 14186 +f 14056 14186 14185 +f 14057 14058 14186 +f 14058 14187 14186 +f 14058 14059 14188 +f 14058 14188 14187 +f 14059 14060 14188 +f 14060 14189 14188 +f 14060 14061 14190 +f 14060 14190 14189 +f 14061 14062 14190 +f 14062 14191 14190 +f 14062 14063 14192 +f 14062 14192 14191 +f 14063 14064 14192 +f 14064 14193 14192 +f 14064 14065 14194 +f 14064 14194 14193 +f 14065 14066 14194 +f 14066 14195 14194 +f 14066 14067 14196 +f 14066 14196 14195 +f 14067 14068 14196 +f 14068 14197 14196 +f 14068 14069 14198 +f 14068 14198 14197 +f 14069 14070 14198 +f 14070 14199 14198 +f 14070 14071 14200 +f 14070 14200 14199 +f 14071 14072 14200 +f 14072 14201 14200 +f 14072 14073 14202 +f 14072 14202 14201 +f 14073 14074 14202 +f 14074 14203 14202 +f 14074 14075 14204 +f 14074 14204 14203 +f 14075 14076 14204 +f 14076 14205 14204 +f 14076 14077 14206 +f 14076 14206 14205 +f 14077 14078 14206 +f 14078 14207 14206 +f 14078 14079 14208 +f 14078 14208 14207 +f 14079 14080 14208 +f 14080 14209 14208 +f 14080 14081 14210 +f 14080 14210 14209 +f 14081 14082 14210 +f 14082 14211 14210 +f 14082 14083 14212 +f 14082 14212 14211 +f 14083 14084 14212 +f 14084 14213 14212 +f 14084 14085 14214 +f 14084 14214 14213 +f 14085 14086 14214 +f 14086 14215 14214 +f 14086 14087 14216 +f 14086 14216 14215 +f 14087 14088 14216 +f 14088 14217 14216 +f 14088 14089 14218 +f 14088 14218 14217 +f 14089 14090 14218 +f 14090 14219 14218 +f 14090 14091 14220 +f 14090 14220 14219 +f 14091 14092 14220 +f 14092 14221 14220 +f 14092 14093 14222 +f 14092 14222 14221 +f 14093 14094 14222 +f 14094 14223 14222 +f 14094 14095 14224 +f 14094 14224 14223 +f 14095 14096 14224 +f 14096 14225 14224 +f 14096 14097 14226 +f 14096 14226 14225 +f 14097 14098 14226 +f 14098 14227 14226 +f 14098 14099 14228 +f 14098 14228 14227 +f 14099 14100 14228 +f 14100 14229 14228 +f 14100 14101 14230 +f 14100 14230 14229 +f 14101 14102 14230 +f 14102 14231 14230 +f 14102 14103 14232 +f 14102 14232 14231 +f 14103 14104 14232 +f 14104 14233 14232 +f 14104 14105 14234 +f 14104 14234 14233 +f 14105 14106 14234 +f 14106 14235 14234 +f 14106 14107 14236 +f 14106 14236 14235 +f 14107 14108 14236 +f 14108 14237 14236 +f 14108 14109 14238 +f 14108 14238 14237 +f 14109 14110 14238 +f 14110 14239 14238 +f 14110 14111 14240 +f 14110 14240 14239 +f 14111 14112 14240 +f 14112 14241 14240 +f 14112 14113 14242 +f 14112 14242 14241 +f 14113 14114 14242 +f 14114 14243 14242 +f 14114 14115 14244 +f 14114 14244 14243 +f 14115 14116 14244 +f 14116 14245 14244 +f 14116 14117 14246 +f 14116 14246 14245 +f 14117 14118 14246 +f 14118 14247 14246 +f 14118 14119 14248 +f 14118 14248 14247 +f 14119 14120 14248 +f 14120 14249 14248 +f 14120 14121 14250 +f 14120 14250 14249 +f 14121 14122 14250 +f 14122 14251 14250 +f 14122 14123 14252 +f 14122 14252 14251 +f 14123 14124 14252 +f 14124 14253 14252 +f 14124 14125 14254 +f 14124 14254 14253 +f 14125 14126 14254 +f 14126 14255 14254 +f 14126 14127 14256 +f 14126 14256 14255 +f 14127 14128 14256 +f 14128 14257 14256 +f 14128 14129 14258 +f 14128 14258 14257 +f 14129 14130 14258 +f 14130 14259 14258 +f 14130 14131 14260 +f 14130 14260 14259 +f 14131 14132 14260 +f 14132 14261 14260 +f 14132 14133 14262 +f 14132 14262 14261 +f 14133 14134 14262 +f 14134 14263 14262 +f 14134 14135 14264 +f 14134 14264 14263 +f 14135 14136 14264 +f 14136 14265 14264 +f 14136 14137 14266 +f 14136 14266 14265 +f 14137 14138 14266 +f 14138 14267 14266 +f 14138 14139 14268 +f 14138 14268 14267 +f 14139 14140 14268 +f 14140 14269 14268 +f 14140 14141 14270 +f 14140 14270 14269 +f 14141 14142 14270 +f 14142 14271 14270 +f 14142 14143 14272 +f 14142 14272 14271 +f 14143 14144 14272 +f 14144 14273 14272 +f 14144 14145 14274 +f 14144 14274 14273 +f 14145 14146 14274 +f 14146 14275 14274 +f 14146 14147 14276 +f 14146 14276 14275 +f 14147 14148 14276 +f 14148 14277 14276 +f 14148 14149 14278 +f 14148 14278 14277 +f 14149 14150 14278 +f 14150 14279 14278 +f 14150 14151 14280 +f 14150 14280 14279 +f 14151 14152 14280 +f 14152 14281 14280 +f 14152 14153 14282 +f 14152 14282 14281 +f 14153 14154 14282 +f 14154 14283 14282 +f 14154 14155 14284 +f 14154 14284 14283 +f 14155 14156 14284 +f 14156 14285 14284 +f 14156 14157 14286 +f 14156 14286 14285 +f 14157 14158 14286 +f 14158 14287 14286 +f 14158 14159 14288 +f 14158 14288 14287 +f 14159 14160 14288 +f 14160 14289 14288 +f 14160 14161 14290 +f 14160 14290 14289 +f 14161 14162 14290 +f 14162 14291 14290 +f 14162 14163 14292 +f 14162 14292 14291 +f 14163 14164 14292 +f 14164 14293 14292 +f 14164 14165 14294 +f 14164 14294 14293 +f 14165 14166 14294 +f 14166 14295 14294 +f 14166 14167 14296 +f 14166 14296 14295 +f 14167 14168 14296 +f 14168 14297 14296 +f 14168 14169 14298 +f 14168 14298 14297 +f 14169 14170 14298 +f 14170 14299 14298 +f 14170 14171 14300 +f 14170 14300 14299 +f 14171 14172 14300 +f 14172 14301 14300 +f 14172 14173 14302 +f 14172 14302 14301 +f 14174 14175 14304 +f 14174 14304 14303 +f 14175 14176 14304 +f 14176 14305 14304 +f 14176 14177 14306 +f 14176 14306 14305 +f 14177 14178 14306 +f 14178 14307 14306 +f 14178 14179 14308 +f 14178 14308 14307 +f 14179 14180 14308 +f 14180 14309 14308 +f 14180 14181 14310 +f 14180 14310 14309 +f 14181 14182 14310 +f 14182 14311 14310 +f 14182 14183 14312 +f 14182 14312 14311 +f 14183 14184 14312 +f 14184 14313 14312 +f 14184 14185 14314 +f 14184 14314 14313 +f 14185 14186 14314 +f 14186 14315 14314 +f 14186 14187 14316 +f 14186 14316 14315 +f 14187 14188 14316 +f 14188 14317 14316 +f 14188 14189 14318 +f 14188 14318 14317 +f 14189 14190 14318 +f 14190 14319 14318 +f 14190 14191 14320 +f 14190 14320 14319 +f 14191 14192 14320 +f 14192 14321 14320 +f 14192 14193 14322 +f 14192 14322 14321 +f 14193 14194 14322 +f 14194 14323 14322 +f 14194 14195 14324 +f 14194 14324 14323 +f 14195 14196 14324 +f 14196 14325 14324 +f 14196 14197 14326 +f 14196 14326 14325 +f 14197 14198 14326 +f 14198 14327 14326 +f 14198 14199 14328 +f 14198 14328 14327 +f 14199 14200 14328 +f 14200 14329 14328 +f 14200 14201 14330 +f 14200 14330 14329 +f 14201 14202 14330 +f 14202 14331 14330 +f 14202 14203 14332 +f 14202 14332 14331 +f 14203 14204 14332 +f 14204 14333 14332 +f 14204 14205 14334 +f 14204 14334 14333 +f 14205 14206 14334 +f 14206 14335 14334 +f 14206 14207 14336 +f 14206 14336 14335 +f 14207 14208 14336 +f 14208 14337 14336 +f 14208 14209 14338 +f 14208 14338 14337 +f 14209 14210 14338 +f 14210 14339 14338 +f 14210 14211 14340 +f 14210 14340 14339 +f 14211 14212 14340 +f 14212 14341 14340 +f 14212 14213 14342 +f 14212 14342 14341 +f 14213 14214 14342 +f 14214 14343 14342 +f 14214 14215 14344 +f 14214 14344 14343 +f 14215 14216 14344 +f 14216 14345 14344 +f 14216 14217 14346 +f 14216 14346 14345 +f 14217 14218 14346 +f 14218 14347 14346 +f 14218 14219 14348 +f 14218 14348 14347 +f 14219 14220 14348 +f 14220 14349 14348 +f 14220 14221 14350 +f 14220 14350 14349 +f 14221 14222 14350 +f 14222 14351 14350 +f 14222 14223 14352 +f 14222 14352 14351 +f 14223 14224 14352 +f 14224 14353 14352 +f 14224 14225 14354 +f 14224 14354 14353 +f 14225 14226 14354 +f 14226 14355 14354 +f 14226 14227 14356 +f 14226 14356 14355 +f 14227 14228 14356 +f 14228 14357 14356 +f 14228 14229 14358 +f 14228 14358 14357 +f 14229 14230 14358 +f 14230 14359 14358 +f 14230 14231 14360 +f 14230 14360 14359 +f 14231 14232 14360 +f 14232 14361 14360 +f 14232 14233 14362 +f 14232 14362 14361 +f 14233 14234 14362 +f 14234 14363 14362 +f 14234 14235 14364 +f 14234 14364 14363 +f 14235 14236 14364 +f 14236 14365 14364 +f 14236 14237 14366 +f 14236 14366 14365 +f 14237 14238 14366 +f 14238 14367 14366 +f 14238 14239 14368 +f 14238 14368 14367 +f 14239 14240 14368 +f 14240 14369 14368 +f 14240 14241 14370 +f 14240 14370 14369 +f 14241 14242 14370 +f 14242 14371 14370 +f 14242 14243 14372 +f 14242 14372 14371 +f 14243 14244 14372 +f 14244 14373 14372 +f 14244 14245 14374 +f 14244 14374 14373 +f 14245 14246 14374 +f 14246 14375 14374 +f 14246 14247 14376 +f 14246 14376 14375 +f 14247 14248 14376 +f 14248 14377 14376 +f 14248 14249 14378 +f 14248 14378 14377 +f 14249 14250 14378 +f 14250 14379 14378 +f 14250 14251 14380 +f 14250 14380 14379 +f 14251 14252 14380 +f 14252 14381 14380 +f 14252 14253 14382 +f 14252 14382 14381 +f 14253 14254 14382 +f 14254 14383 14382 +f 14254 14255 14384 +f 14254 14384 14383 +f 14255 14256 14384 +f 14256 14385 14384 +f 14256 14257 14386 +f 14256 14386 14385 +f 14257 14258 14386 +f 14258 14387 14386 +f 14258 14259 14388 +f 14258 14388 14387 +f 14259 14260 14388 +f 14260 14389 14388 +f 14260 14261 14390 +f 14260 14390 14389 +f 14261 14262 14390 +f 14262 14391 14390 +f 14262 14263 14392 +f 14262 14392 14391 +f 14263 14264 14392 +f 14264 14393 14392 +f 14264 14265 14394 +f 14264 14394 14393 +f 14265 14266 14394 +f 14266 14395 14394 +f 14266 14267 14396 +f 14266 14396 14395 +f 14267 14268 14396 +f 14268 14397 14396 +f 14268 14269 14398 +f 14268 14398 14397 +f 14269 14270 14398 +f 14270 14399 14398 +f 14270 14271 14400 +f 14270 14400 14399 +f 14271 14272 14400 +f 14272 14401 14400 +f 14272 14273 14402 +f 14272 14402 14401 +f 14273 14274 14402 +f 14274 14403 14402 +f 14274 14275 14404 +f 14274 14404 14403 +f 14275 14276 14404 +f 14276 14405 14404 +f 14276 14277 14406 +f 14276 14406 14405 +f 14277 14278 14406 +f 14278 14407 14406 +f 14278 14279 14408 +f 14278 14408 14407 +f 14279 14280 14408 +f 14280 14409 14408 +f 14280 14281 14410 +f 14280 14410 14409 +f 14281 14282 14410 +f 14282 14411 14410 +f 14282 14283 14412 +f 14282 14412 14411 +f 14283 14284 14412 +f 14284 14413 14412 +f 14284 14285 14414 +f 14284 14414 14413 +f 14285 14286 14414 +f 14286 14415 14414 +f 14286 14287 14416 +f 14286 14416 14415 +f 14287 14288 14416 +f 14288 14417 14416 +f 14288 14289 14418 +f 14288 14418 14417 +f 14289 14290 14418 +f 14290 14419 14418 +f 14290 14291 14420 +f 14290 14420 14419 +f 14291 14292 14420 +f 14292 14421 14420 +f 14292 14293 14422 +f 14292 14422 14421 +f 14293 14294 14422 +f 14294 14423 14422 +f 14294 14295 14424 +f 14294 14424 14423 +f 14295 14296 14424 +f 14296 14425 14424 +f 14296 14297 14426 +f 14296 14426 14425 +f 14297 14298 14426 +f 14298 14427 14426 +f 14298 14299 14428 +f 14298 14428 14427 +f 14299 14300 14428 +f 14300 14429 14428 +f 14300 14301 14430 +f 14300 14430 14429 +f 14301 14302 14430 +f 14302 14431 14430 +f 14303 14304 14432 +f 14304 14433 14432 +f 14304 14305 14434 +f 14304 14434 14433 +f 14305 14306 14434 +f 14306 14435 14434 +f 14306 14307 14436 +f 14306 14436 14435 +f 14307 14308 14436 +f 14308 14437 14436 +f 14308 14309 14438 +f 14308 14438 14437 +f 14309 14310 14438 +f 14310 14439 14438 +f 14310 14311 14440 +f 14310 14440 14439 +f 14311 14312 14440 +f 14312 14441 14440 +f 14312 14313 14442 +f 14312 14442 14441 +f 14313 14314 14442 +f 14314 14443 14442 +f 14314 14315 14444 +f 14314 14444 14443 +f 14315 14316 14444 +f 14316 14445 14444 +f 14316 14317 14446 +f 14316 14446 14445 +f 14317 14318 14446 +f 14318 14447 14446 +f 14318 14319 14448 +f 14318 14448 14447 +f 14319 14320 14448 +f 14320 14449 14448 +f 14320 14321 14450 +f 14320 14450 14449 +f 14321 14322 14450 +f 14322 14451 14450 +f 14322 14323 14452 +f 14322 14452 14451 +f 14323 14324 14452 +f 14324 14453 14452 +f 14324 14325 14454 +f 14324 14454 14453 +f 14325 14326 14454 +f 14326 14455 14454 +f 14326 14327 14456 +f 14326 14456 14455 +f 14327 14328 14456 +f 14328 14457 14456 +f 14328 14329 14458 +f 14328 14458 14457 +f 14329 14330 14458 +f 14330 14459 14458 +f 14330 14331 14460 +f 14330 14460 14459 +f 14331 14332 14460 +f 14332 14461 14460 +f 14332 14333 14462 +f 14332 14462 14461 +f 14333 14334 14462 +f 14334 14463 14462 +f 14334 14335 14464 +f 14334 14464 14463 +f 14335 14336 14464 +f 14336 14465 14464 +f 14336 14337 14466 +f 14336 14466 14465 +f 14337 14338 14466 +f 14338 14467 14466 +f 14338 14339 14468 +f 14338 14468 14467 +f 14339 14340 14468 +f 14340 14469 14468 +f 14340 14341 14470 +f 14340 14470 14469 +f 14341 14342 14470 +f 14342 14471 14470 +f 14342 14343 14472 +f 14342 14472 14471 +f 14343 14344 14472 +f 14344 14473 14472 +f 14344 14345 14474 +f 14344 14474 14473 +f 14345 14346 14474 +f 14346 14475 14474 +f 14346 14347 14476 +f 14346 14476 14475 +f 14347 14348 14476 +f 14348 14477 14476 +f 14348 14349 14478 +f 14348 14478 14477 +f 14349 14350 14478 +f 14350 14479 14478 +f 14350 14351 14480 +f 14350 14480 14479 +f 14351 14352 14480 +f 14352 14481 14480 +f 14352 14353 14482 +f 14352 14482 14481 +f 14353 14354 14482 +f 14354 14483 14482 +f 14354 14355 14484 +f 14354 14484 14483 +f 14355 14356 14484 +f 14356 14485 14484 +f 14356 14357 14486 +f 14356 14486 14485 +f 14357 14358 14486 +f 14358 14487 14486 +f 14358 14359 14488 +f 14358 14488 14487 +f 14359 14360 14488 +f 14360 14489 14488 +f 14360 14361 14490 +f 14360 14490 14489 +f 14361 14362 14490 +f 14362 14491 14490 +f 14362 14363 14492 +f 14362 14492 14491 +f 14363 14364 14492 +f 14364 14493 14492 +f 14364 14365 14494 +f 14364 14494 14493 +f 14365 14366 14494 +f 14366 14495 14494 +f 14366 14367 14496 +f 14366 14496 14495 +f 14367 14368 14496 +f 14368 14497 14496 +f 14368 14369 14498 +f 14368 14498 14497 +f 14369 14370 14498 +f 14370 14499 14498 +f 14370 14371 14500 +f 14370 14500 14499 +f 14371 14372 14500 +f 14372 14501 14500 +f 14372 14373 14502 +f 14372 14502 14501 +f 14373 14374 14502 +f 14374 14503 14502 +f 14374 14375 14504 +f 14374 14504 14503 +f 14375 14376 14504 +f 14376 14505 14504 +f 14376 14377 14506 +f 14376 14506 14505 +f 14377 14378 14506 +f 14378 14507 14506 +f 14378 14379 14508 +f 14378 14508 14507 +f 14379 14380 14508 +f 14380 14509 14508 +f 14380 14381 14510 +f 14380 14510 14509 +f 14381 14382 14510 +f 14382 14511 14510 +f 14382 14383 14512 +f 14382 14512 14511 +f 14383 14384 14512 +f 14384 14513 14512 +f 14384 14385 14514 +f 14384 14514 14513 +f 14385 14386 14514 +f 14386 14515 14514 +f 14386 14387 14516 +f 14386 14516 14515 +f 14387 14388 14516 +f 14388 14517 14516 +f 14388 14389 14518 +f 14388 14518 14517 +f 14389 14390 14518 +f 14390 14519 14518 +f 14390 14391 14520 +f 14390 14520 14519 +f 14391 14392 14520 +f 14392 14521 14520 +f 14392 14393 14522 +f 14392 14522 14521 +f 14393 14394 14522 +f 14394 14523 14522 +f 14394 14395 14524 +f 14394 14524 14523 +f 14395 14396 14524 +f 14396 14525 14524 +f 14396 14397 14526 +f 14396 14526 14525 +f 14397 14398 14526 +f 14398 14527 14526 +f 14398 14399 14528 +f 14398 14528 14527 +f 14399 14400 14528 +f 14400 14529 14528 +f 14400 14401 14530 +f 14400 14530 14529 +f 14401 14402 14530 +f 14402 14531 14530 +f 14402 14403 14532 +f 14402 14532 14531 +f 14403 14404 14532 +f 14404 14533 14532 +f 14404 14405 14534 +f 14404 14534 14533 +f 14405 14406 14534 +f 14406 14535 14534 +f 14406 14407 14536 +f 14406 14536 14535 +f 14407 14408 14536 +f 14408 14537 14536 +f 14408 14409 14538 +f 14408 14538 14537 +f 14409 14410 14538 +f 14410 14539 14538 +f 14410 14411 14540 +f 14410 14540 14539 +f 14411 14412 14540 +f 14412 14541 14540 +f 14412 14413 14542 +f 14412 14542 14541 +f 14413 14414 14542 +f 14414 14543 14542 +f 14414 14415 14544 +f 14414 14544 14543 +f 14415 14416 14544 +f 14416 14545 14544 +f 14416 14417 14546 +f 14416 14546 14545 +f 14417 14418 14546 +f 14418 14547 14546 +f 14418 14419 14548 +f 14418 14548 14547 +f 14419 14420 14548 +f 14420 14549 14548 +f 14420 14421 14550 +f 14420 14550 14549 +f 14421 14422 14550 +f 14422 14551 14550 +f 14422 14423 14552 +f 14422 14552 14551 +f 14423 14424 14552 +f 14424 14553 14552 +f 14424 14425 14554 +f 14424 14554 14553 +f 14425 14426 14554 +f 14426 14555 14554 +f 14426 14427 14556 +f 14426 14556 14555 +f 14427 14428 14556 +f 14428 14557 14556 +f 14428 14429 14558 +f 14428 14558 14557 +f 14429 14430 14558 +f 14430 14559 14558 +f 14430 14431 14560 +f 14430 14560 14559 +f 14432 14433 14562 +f 14432 14562 14561 +f 14433 14434 14562 +f 14434 14563 14562 +f 14434 14435 14564 +f 14434 14564 14563 +f 14435 14436 14564 +f 14436 14565 14564 +f 14436 14437 14566 +f 14436 14566 14565 +f 14437 14438 14566 +f 14438 14567 14566 +f 14438 14439 14568 +f 14438 14568 14567 +f 14439 14440 14568 +f 14440 14569 14568 +f 14440 14441 14570 +f 14440 14570 14569 +f 14441 14442 14570 +f 14442 14571 14570 +f 14442 14443 14572 +f 14442 14572 14571 +f 14443 14444 14572 +f 14444 14573 14572 +f 14444 14445 14574 +f 14444 14574 14573 +f 14445 14446 14574 +f 14446 14575 14574 +f 14446 14447 14576 +f 14446 14576 14575 +f 14447 14448 14576 +f 14448 14577 14576 +f 14448 14449 14578 +f 14448 14578 14577 +f 14449 14450 14578 +f 14450 14579 14578 +f 14450 14451 14580 +f 14450 14580 14579 +f 14451 14452 14580 +f 14452 14581 14580 +f 14452 14453 14582 +f 14452 14582 14581 +f 14453 14454 14582 +f 14454 14583 14582 +f 14454 14455 14584 +f 14454 14584 14583 +f 14455 14456 14584 +f 14456 14585 14584 +f 14456 14457 14586 +f 14456 14586 14585 +f 14457 14458 14586 +f 14458 14587 14586 +f 14458 14459 14588 +f 14458 14588 14587 +f 14459 14460 14588 +f 14460 14589 14588 +f 14460 14461 14590 +f 14460 14590 14589 +f 14461 14462 14590 +f 14462 14591 14590 +f 14462 14463 14592 +f 14462 14592 14591 +f 14463 14464 14592 +f 14464 14593 14592 +f 14464 14465 14594 +f 14464 14594 14593 +f 14465 14466 14594 +f 14466 14595 14594 +f 14466 14467 14596 +f 14466 14596 14595 +f 14467 14468 14596 +f 14468 14597 14596 +f 14468 14469 14598 +f 14468 14598 14597 +f 14469 14470 14598 +f 14470 14599 14598 +f 14470 14471 14600 +f 14470 14600 14599 +f 14471 14472 14600 +f 14472 14601 14600 +f 14472 14473 14602 +f 14472 14602 14601 +f 14473 14474 14602 +f 14474 14603 14602 +f 14474 14475 14604 +f 14474 14604 14603 +f 14475 14476 14604 +f 14476 14605 14604 +f 14476 14477 14606 +f 14476 14606 14605 +f 14477 14478 14606 +f 14478 14607 14606 +f 14478 14479 14608 +f 14478 14608 14607 +f 14479 14480 14608 +f 14480 14609 14608 +f 14480 14481 14610 +f 14480 14610 14609 +f 14481 14482 14610 +f 14482 14611 14610 +f 14482 14483 14612 +f 14482 14612 14611 +f 14483 14484 14612 +f 14484 14613 14612 +f 14484 14485 14614 +f 14484 14614 14613 +f 14485 14486 14614 +f 14486 14615 14614 +f 14486 14487 14616 +f 14486 14616 14615 +f 14487 14488 14616 +f 14488 14617 14616 +f 14488 14489 14618 +f 14488 14618 14617 +f 14489 14490 14618 +f 14490 14619 14618 +f 14490 14491 14620 +f 14490 14620 14619 +f 14491 14492 14620 +f 14492 14621 14620 +f 14492 14493 14622 +f 14492 14622 14621 +f 14493 14494 14622 +f 14494 14623 14622 +f 14494 14495 14624 +f 14494 14624 14623 +f 14495 14496 14624 +f 14496 14625 14624 +f 14496 14497 14626 +f 14496 14626 14625 +f 14497 14498 14626 +f 14498 14627 14626 +f 14498 14499 14628 +f 14498 14628 14627 +f 14499 14500 14628 +f 14500 14629 14628 +f 14500 14501 14630 +f 14500 14630 14629 +f 14501 14502 14630 +f 14502 14631 14630 +f 14502 14503 14632 +f 14502 14632 14631 +f 14503 14504 14632 +f 14504 14633 14632 +f 14504 14505 14634 +f 14504 14634 14633 +f 14505 14506 14634 +f 14506 14635 14634 +f 14506 14507 14636 +f 14506 14636 14635 +f 14507 14508 14636 +f 14508 14637 14636 +f 14508 14509 14638 +f 14508 14638 14637 +f 14509 14510 14638 +f 14510 14639 14638 +f 14510 14511 14640 +f 14510 14640 14639 +f 14511 14512 14640 +f 14512 14641 14640 +f 14512 14513 14642 +f 14512 14642 14641 +f 14513 14514 14642 +f 14514 14643 14642 +f 14514 14515 14644 +f 14514 14644 14643 +f 14515 14516 14644 +f 14516 14645 14644 +f 14516 14517 14646 +f 14516 14646 14645 +f 14517 14518 14646 +f 14518 14647 14646 +f 14518 14519 14648 +f 14518 14648 14647 +f 14519 14520 14648 +f 14520 14649 14648 +f 14520 14521 14650 +f 14520 14650 14649 +f 14521 14522 14650 +f 14522 14651 14650 +f 14522 14523 14652 +f 14522 14652 14651 +f 14523 14524 14652 +f 14524 14653 14652 +f 14524 14525 14654 +f 14524 14654 14653 +f 14525 14526 14654 +f 14526 14655 14654 +f 14526 14527 14656 +f 14526 14656 14655 +f 14527 14528 14656 +f 14528 14657 14656 +f 14528 14529 14658 +f 14528 14658 14657 +f 14529 14530 14658 +f 14530 14659 14658 +f 14530 14531 14660 +f 14530 14660 14659 +f 14531 14532 14660 +f 14532 14661 14660 +f 14532 14533 14662 +f 14532 14662 14661 +f 14533 14534 14662 +f 14534 14663 14662 +f 14534 14535 14664 +f 14534 14664 14663 +f 14535 14536 14664 +f 14536 14665 14664 +f 14536 14537 14666 +f 14536 14666 14665 +f 14537 14538 14666 +f 14538 14667 14666 +f 14538 14539 14668 +f 14538 14668 14667 +f 14539 14540 14668 +f 14540 14669 14668 +f 14540 14541 14670 +f 14540 14670 14669 +f 14541 14542 14670 +f 14542 14671 14670 +f 14542 14543 14672 +f 14542 14672 14671 +f 14543 14544 14672 +f 14544 14673 14672 +f 14544 14545 14674 +f 14544 14674 14673 +f 14545 14546 14674 +f 14546 14675 14674 +f 14546 14547 14676 +f 14546 14676 14675 +f 14547 14548 14676 +f 14548 14677 14676 +f 14548 14549 14678 +f 14548 14678 14677 +f 14549 14550 14678 +f 14550 14679 14678 +f 14550 14551 14680 +f 14550 14680 14679 +f 14551 14552 14680 +f 14552 14681 14680 +f 14552 14553 14682 +f 14552 14682 14681 +f 14553 14554 14682 +f 14554 14683 14682 +f 14554 14555 14684 +f 14554 14684 14683 +f 14555 14556 14684 +f 14556 14685 14684 +f 14556 14557 14686 +f 14556 14686 14685 +f 14557 14558 14686 +f 14558 14687 14686 +f 14558 14559 14688 +f 14558 14688 14687 +f 14559 14560 14688 +f 14560 14689 14688 +f 14561 14562 14690 +f 14562 14691 14690 +f 14562 14563 14692 +f 14562 14692 14691 +f 14563 14564 14692 +f 14564 14693 14692 +f 14564 14565 14694 +f 14564 14694 14693 +f 14565 14566 14694 +f 14566 14695 14694 +f 14566 14567 14696 +f 14566 14696 14695 +f 14567 14568 14696 +f 14568 14697 14696 +f 14568 14569 14698 +f 14568 14698 14697 +f 14569 14570 14698 +f 14570 14699 14698 +f 14570 14571 14700 +f 14570 14700 14699 +f 14571 14572 14700 +f 14572 14701 14700 +f 14572 14573 14702 +f 14572 14702 14701 +f 14573 14574 14702 +f 14574 14703 14702 +f 14574 14575 14704 +f 14574 14704 14703 +f 14575 14576 14704 +f 14576 14705 14704 +f 14576 14577 14706 +f 14576 14706 14705 +f 14577 14578 14706 +f 14578 14707 14706 +f 14578 14579 14708 +f 14578 14708 14707 +f 14579 14580 14708 +f 14580 14709 14708 +f 14580 14581 14710 +f 14580 14710 14709 +f 14581 14582 14710 +f 14582 14711 14710 +f 14582 14583 14712 +f 14582 14712 14711 +f 14583 14584 14712 +f 14584 14713 14712 +f 14584 14585 14714 +f 14584 14714 14713 +f 14585 14586 14714 +f 14586 14715 14714 +f 14586 14587 14716 +f 14586 14716 14715 +f 14587 14588 14716 +f 14588 14717 14716 +f 14588 14589 14718 +f 14588 14718 14717 +f 14589 14590 14718 +f 14590 14719 14718 +f 14590 14591 14720 +f 14590 14720 14719 +f 14591 14592 14720 +f 14592 14721 14720 +f 14592 14593 14722 +f 14592 14722 14721 +f 14593 14594 14722 +f 14594 14723 14722 +f 14594 14595 14724 +f 14594 14724 14723 +f 14595 14596 14724 +f 14596 14725 14724 +f 14596 14597 14726 +f 14596 14726 14725 +f 14597 14598 14726 +f 14598 14727 14726 +f 14598 14599 14728 +f 14598 14728 14727 +f 14599 14600 14728 +f 14600 14729 14728 +f 14600 14601 14730 +f 14600 14730 14729 +f 14601 14602 14730 +f 14602 14731 14730 +f 14602 14603 14732 +f 14602 14732 14731 +f 14603 14604 14732 +f 14604 14733 14732 +f 14604 14605 14734 +f 14604 14734 14733 +f 14605 14606 14734 +f 14606 14735 14734 +f 14606 14607 14736 +f 14606 14736 14735 +f 14607 14608 14736 +f 14608 14737 14736 +f 14608 14609 14738 +f 14608 14738 14737 +f 14609 14610 14738 +f 14610 14739 14738 +f 14610 14611 14740 +f 14610 14740 14739 +f 14611 14612 14740 +f 14612 14741 14740 +f 14612 14613 14742 +f 14612 14742 14741 +f 14613 14614 14742 +f 14614 14743 14742 +f 14614 14615 14744 +f 14614 14744 14743 +f 14615 14616 14744 +f 14616 14745 14744 +f 14616 14617 14746 +f 14616 14746 14745 +f 14617 14618 14746 +f 14618 14747 14746 +f 14618 14619 14748 +f 14618 14748 14747 +f 14619 14620 14748 +f 14620 14749 14748 +f 14620 14621 14750 +f 14620 14750 14749 +f 14621 14622 14750 +f 14622 14751 14750 +f 14622 14623 14752 +f 14622 14752 14751 +f 14623 14624 14752 +f 14624 14753 14752 +f 14624 14625 14754 +f 14624 14754 14753 +f 14625 14626 14754 +f 14626 14755 14754 +f 14626 14627 14756 +f 14626 14756 14755 +f 14627 14628 14756 +f 14628 14757 14756 +f 14628 14629 14758 +f 14628 14758 14757 +f 14629 14630 14758 +f 14630 14759 14758 +f 14630 14631 14760 +f 14630 14760 14759 +f 14631 14632 14760 +f 14632 14761 14760 +f 14632 14633 14762 +f 14632 14762 14761 +f 14633 14634 14762 +f 14634 14763 14762 +f 14634 14635 14764 +f 14634 14764 14763 +f 14635 14636 14764 +f 14636 14765 14764 +f 14636 14637 14766 +f 14636 14766 14765 +f 14637 14638 14766 +f 14638 14767 14766 +f 14638 14639 14768 +f 14638 14768 14767 +f 14639 14640 14768 +f 14640 14769 14768 +f 14640 14641 14770 +f 14640 14770 14769 +f 14641 14642 14770 +f 14642 14771 14770 +f 14642 14643 14772 +f 14642 14772 14771 +f 14643 14644 14772 +f 14644 14773 14772 +f 14644 14645 14774 +f 14644 14774 14773 +f 14645 14646 14774 +f 14646 14775 14774 +f 14646 14647 14776 +f 14646 14776 14775 +f 14647 14648 14776 +f 14648 14777 14776 +f 14648 14649 14778 +f 14648 14778 14777 +f 14649 14650 14778 +f 14650 14779 14778 +f 14650 14651 14780 +f 14650 14780 14779 +f 14651 14652 14780 +f 14652 14781 14780 +f 14652 14653 14782 +f 14652 14782 14781 +f 14653 14654 14782 +f 14654 14783 14782 +f 14654 14655 14784 +f 14654 14784 14783 +f 14655 14656 14784 +f 14656 14785 14784 +f 14656 14657 14786 +f 14656 14786 14785 +f 14657 14658 14786 +f 14658 14787 14786 +f 14658 14659 14788 +f 14658 14788 14787 +f 14659 14660 14788 +f 14660 14789 14788 +f 14660 14661 14790 +f 14660 14790 14789 +f 14661 14662 14790 +f 14662 14791 14790 +f 14662 14663 14792 +f 14662 14792 14791 +f 14663 14664 14792 +f 14664 14793 14792 +f 14664 14665 14794 +f 14664 14794 14793 +f 14665 14666 14794 +f 14666 14795 14794 +f 14666 14667 14796 +f 14666 14796 14795 +f 14667 14668 14796 +f 14668 14797 14796 +f 14668 14669 14798 +f 14668 14798 14797 +f 14669 14670 14798 +f 14670 14799 14798 +f 14670 14671 14800 +f 14670 14800 14799 +f 14671 14672 14800 +f 14672 14801 14800 +f 14672 14673 14802 +f 14672 14802 14801 +f 14673 14674 14802 +f 14674 14803 14802 +f 14674 14675 14804 +f 14674 14804 14803 +f 14675 14676 14804 +f 14676 14805 14804 +f 14676 14677 14806 +f 14676 14806 14805 +f 14677 14678 14806 +f 14678 14807 14806 +f 14678 14679 14808 +f 14678 14808 14807 +f 14679 14680 14808 +f 14680 14809 14808 +f 14680 14681 14810 +f 14680 14810 14809 +f 14681 14682 14810 +f 14682 14811 14810 +f 14682 14683 14812 +f 14682 14812 14811 +f 14683 14684 14812 +f 14684 14813 14812 +f 14684 14685 14814 +f 14684 14814 14813 +f 14685 14686 14814 +f 14686 14815 14814 +f 14686 14687 14816 +f 14686 14816 14815 +f 14687 14688 14816 +f 14688 14817 14816 +f 14688 14689 14818 +f 14688 14818 14817 +f 14690 14691 14820 +f 14690 14820 14819 +f 14691 14692 14820 +f 14692 14821 14820 +f 14692 14693 14822 +f 14692 14822 14821 +f 14693 14694 14822 +f 14694 14823 14822 +f 14694 14695 14824 +f 14694 14824 14823 +f 14695 14696 14824 +f 14696 14825 14824 +f 14696 14697 14826 +f 14696 14826 14825 +f 14697 14698 14826 +f 14698 14827 14826 +f 14698 14699 14828 +f 14698 14828 14827 +f 14699 14700 14828 +f 14700 14829 14828 +f 14700 14701 14830 +f 14700 14830 14829 +f 14701 14702 14830 +f 14702 14831 14830 +f 14702 14703 14832 +f 14702 14832 14831 +f 14703 14704 14832 +f 14704 14833 14832 +f 14704 14705 14834 +f 14704 14834 14833 +f 14705 14706 14834 +f 14706 14835 14834 +f 14706 14707 14836 +f 14706 14836 14835 +f 14707 14708 14836 +f 14708 14837 14836 +f 14708 14709 14838 +f 14708 14838 14837 +f 14709 14710 14838 +f 14710 14839 14838 +f 14710 14711 14840 +f 14710 14840 14839 +f 14711 14712 14840 +f 14712 14841 14840 +f 14712 14713 14842 +f 14712 14842 14841 +f 14713 14714 14842 +f 14714 14843 14842 +f 14714 14715 14844 +f 14714 14844 14843 +f 14715 14716 14844 +f 14716 14845 14844 +f 14716 14717 14846 +f 14716 14846 14845 +f 14717 14718 14846 +f 14718 14847 14846 +f 14718 14719 14848 +f 14718 14848 14847 +f 14719 14720 14848 +f 14720 14849 14848 +f 14720 14721 14850 +f 14720 14850 14849 +f 14721 14722 14850 +f 14722 14851 14850 +f 14722 14723 14852 +f 14722 14852 14851 +f 14723 14724 14852 +f 14724 14853 14852 +f 14724 14725 14854 +f 14724 14854 14853 +f 14725 14726 14854 +f 14726 14855 14854 +f 14726 14727 14856 +f 14726 14856 14855 +f 14727 14728 14856 +f 14728 14857 14856 +f 14728 14729 14858 +f 14728 14858 14857 +f 14729 14730 14858 +f 14730 14859 14858 +f 14730 14731 14860 +f 14730 14860 14859 +f 14731 14732 14860 +f 14732 14861 14860 +f 14732 14733 14862 +f 14732 14862 14861 +f 14733 14734 14862 +f 14734 14863 14862 +f 14734 14735 14864 +f 14734 14864 14863 +f 14735 14736 14864 +f 14736 14865 14864 +f 14736 14737 14866 +f 14736 14866 14865 +f 14737 14738 14866 +f 14738 14867 14866 +f 14738 14739 14868 +f 14738 14868 14867 +f 14739 14740 14868 +f 14740 14869 14868 +f 14740 14741 14870 +f 14740 14870 14869 +f 14741 14742 14870 +f 14742 14871 14870 +f 14742 14743 14872 +f 14742 14872 14871 +f 14743 14744 14872 +f 14744 14873 14872 +f 14744 14745 14874 +f 14744 14874 14873 +f 14745 14746 14874 +f 14746 14875 14874 +f 14746 14747 14876 +f 14746 14876 14875 +f 14747 14748 14876 +f 14748 14877 14876 +f 14748 14749 14878 +f 14748 14878 14877 +f 14749 14750 14878 +f 14750 14879 14878 +f 14750 14751 14880 +f 14750 14880 14879 +f 14751 14752 14880 +f 14752 14881 14880 +f 14752 14753 14882 +f 14752 14882 14881 +f 14753 14754 14882 +f 14754 14883 14882 +f 14754 14755 14884 +f 14754 14884 14883 +f 14755 14756 14884 +f 14756 14885 14884 +f 14756 14757 14886 +f 14756 14886 14885 +f 14757 14758 14886 +f 14758 14887 14886 +f 14758 14759 14888 +f 14758 14888 14887 +f 14759 14760 14888 +f 14760 14889 14888 +f 14760 14761 14890 +f 14760 14890 14889 +f 14761 14762 14890 +f 14762 14891 14890 +f 14762 14763 14892 +f 14762 14892 14891 +f 14763 14764 14892 +f 14764 14893 14892 +f 14764 14765 14894 +f 14764 14894 14893 +f 14765 14766 14894 +f 14766 14895 14894 +f 14766 14767 14896 +f 14766 14896 14895 +f 14767 14768 14896 +f 14768 14897 14896 +f 14768 14769 14898 +f 14768 14898 14897 +f 14769 14770 14898 +f 14770 14899 14898 +f 14770 14771 14900 +f 14770 14900 14899 +f 14771 14772 14900 +f 14772 14901 14900 +f 14772 14773 14902 +f 14772 14902 14901 +f 14773 14774 14902 +f 14774 14903 14902 +f 14774 14775 14904 +f 14774 14904 14903 +f 14775 14776 14904 +f 14776 14905 14904 +f 14776 14777 14906 +f 14776 14906 14905 +f 14777 14778 14906 +f 14778 14907 14906 +f 14778 14779 14908 +f 14778 14908 14907 +f 14779 14780 14908 +f 14780 14909 14908 +f 14780 14781 14910 +f 14780 14910 14909 +f 14781 14782 14910 +f 14782 14911 14910 +f 14782 14783 14912 +f 14782 14912 14911 +f 14783 14784 14912 +f 14784 14913 14912 +f 14784 14785 14914 +f 14784 14914 14913 +f 14785 14786 14914 +f 14786 14915 14914 +f 14786 14787 14916 +f 14786 14916 14915 +f 14787 14788 14916 +f 14788 14917 14916 +f 14788 14789 14918 +f 14788 14918 14917 +f 14789 14790 14918 +f 14790 14919 14918 +f 14790 14791 14920 +f 14790 14920 14919 +f 14791 14792 14920 +f 14792 14921 14920 +f 14792 14793 14922 +f 14792 14922 14921 +f 14793 14794 14922 +f 14794 14923 14922 +f 14794 14795 14924 +f 14794 14924 14923 +f 14795 14796 14924 +f 14796 14925 14924 +f 14796 14797 14926 +f 14796 14926 14925 +f 14797 14798 14926 +f 14798 14927 14926 +f 14798 14799 14928 +f 14798 14928 14927 +f 14799 14800 14928 +f 14800 14929 14928 +f 14800 14801 14930 +f 14800 14930 14929 +f 14801 14802 14930 +f 14802 14931 14930 +f 14802 14803 14932 +f 14802 14932 14931 +f 14803 14804 14932 +f 14804 14933 14932 +f 14804 14805 14934 +f 14804 14934 14933 +f 14805 14806 14934 +f 14806 14935 14934 +f 14806 14807 14936 +f 14806 14936 14935 +f 14807 14808 14936 +f 14808 14937 14936 +f 14808 14809 14938 +f 14808 14938 14937 +f 14809 14810 14938 +f 14810 14939 14938 +f 14810 14811 14940 +f 14810 14940 14939 +f 14811 14812 14940 +f 14812 14941 14940 +f 14812 14813 14942 +f 14812 14942 14941 +f 14813 14814 14942 +f 14814 14943 14942 +f 14814 14815 14944 +f 14814 14944 14943 +f 14815 14816 14944 +f 14816 14945 14944 +f 14816 14817 14946 +f 14816 14946 14945 +f 14817 14818 14946 +f 14818 14947 14946 +f 14819 14820 14948 +f 14820 14949 14948 +f 14820 14821 14950 +f 14820 14950 14949 +f 14821 14822 14950 +f 14822 14951 14950 +f 14822 14823 14952 +f 14822 14952 14951 +f 14823 14824 14952 +f 14824 14953 14952 +f 14824 14825 14954 +f 14824 14954 14953 +f 14825 14826 14954 +f 14826 14955 14954 +f 14826 14827 14956 +f 14826 14956 14955 +f 14827 14828 14956 +f 14828 14957 14956 +f 14828 14829 14958 +f 14828 14958 14957 +f 14829 14830 14958 +f 14830 14959 14958 +f 14830 14831 14960 +f 14830 14960 14959 +f 14831 14832 14960 +f 14832 14961 14960 +f 14832 14833 14962 +f 14832 14962 14961 +f 14833 14834 14962 +f 14834 14963 14962 +f 14834 14835 14964 +f 14834 14964 14963 +f 14835 14836 14964 +f 14836 14965 14964 +f 14836 14837 14966 +f 14836 14966 14965 +f 14837 14838 14966 +f 14838 14967 14966 +f 14838 14839 14968 +f 14838 14968 14967 +f 14839 14840 14968 +f 14840 14969 14968 +f 14840 14841 14970 +f 14840 14970 14969 +f 14841 14842 14970 +f 14842 14971 14970 +f 14842 14843 14972 +f 14842 14972 14971 +f 14843 14844 14972 +f 14844 14973 14972 +f 14844 14845 14974 +f 14844 14974 14973 +f 14845 14846 14974 +f 14846 14975 14974 +f 14846 14847 14976 +f 14846 14976 14975 +f 14847 14848 14976 +f 14848 14977 14976 +f 14848 14849 14978 +f 14848 14978 14977 +f 14849 14850 14978 +f 14850 14979 14978 +f 14850 14851 14980 +f 14850 14980 14979 +f 14851 14852 14980 +f 14852 14981 14980 +f 14852 14853 14982 +f 14852 14982 14981 +f 14853 14854 14982 +f 14854 14983 14982 +f 14854 14855 14984 +f 14854 14984 14983 +f 14855 14856 14984 +f 14856 14985 14984 +f 14856 14857 14986 +f 14856 14986 14985 +f 14857 14858 14986 +f 14858 14987 14986 +f 14858 14859 14988 +f 14858 14988 14987 +f 14859 14860 14988 +f 14860 14989 14988 +f 14860 14861 14990 +f 14860 14990 14989 +f 14861 14862 14990 +f 14862 14991 14990 +f 14862 14863 14992 +f 14862 14992 14991 +f 14863 14864 14992 +f 14864 14993 14992 +f 14864 14865 14994 +f 14864 14994 14993 +f 14865 14866 14994 +f 14866 14995 14994 +f 14866 14867 14996 +f 14866 14996 14995 +f 14867 14868 14996 +f 14868 14997 14996 +f 14868 14869 14998 +f 14868 14998 14997 +f 14869 14870 14998 +f 14870 14999 14998 +f 14870 14871 15000 +f 14870 15000 14999 +f 14871 14872 15000 +f 14872 15001 15000 +f 14872 14873 15002 +f 14872 15002 15001 +f 14873 14874 15002 +f 14874 15003 15002 +f 14874 14875 15004 +f 14874 15004 15003 +f 14875 14876 15004 +f 14876 15005 15004 +f 14876 14877 15006 +f 14876 15006 15005 +f 14877 14878 15006 +f 14878 15007 15006 +f 14878 14879 15008 +f 14878 15008 15007 +f 14879 14880 15008 +f 14880 15009 15008 +f 14880 14881 15010 +f 14880 15010 15009 +f 14881 14882 15010 +f 14882 15011 15010 +f 14882 14883 15012 +f 14882 15012 15011 +f 14883 14884 15012 +f 14884 15013 15012 +f 14884 14885 15014 +f 14884 15014 15013 +f 14885 14886 15014 +f 14886 15015 15014 +f 14886 14887 15016 +f 14886 15016 15015 +f 14887 14888 15016 +f 14888 15017 15016 +f 14888 14889 15018 +f 14888 15018 15017 +f 14889 14890 15018 +f 14890 15019 15018 +f 14890 14891 15020 +f 14890 15020 15019 +f 14891 14892 15020 +f 14892 15021 15020 +f 14892 14893 15022 +f 14892 15022 15021 +f 14893 14894 15022 +f 14894 15023 15022 +f 14894 14895 15024 +f 14894 15024 15023 +f 14895 14896 15024 +f 14896 15025 15024 +f 14896 14897 15026 +f 14896 15026 15025 +f 14897 14898 15026 +f 14898 15027 15026 +f 14898 14899 15028 +f 14898 15028 15027 +f 14899 14900 15028 +f 14900 15029 15028 +f 14900 14901 15030 +f 14900 15030 15029 +f 14901 14902 15030 +f 14902 15031 15030 +f 14902 14903 15032 +f 14902 15032 15031 +f 14903 14904 15032 +f 14904 15033 15032 +f 14904 14905 15034 +f 14904 15034 15033 +f 14905 14906 15034 +f 14906 15035 15034 +f 14906 14907 15036 +f 14906 15036 15035 +f 14907 14908 15036 +f 14908 15037 15036 +f 14908 14909 15038 +f 14908 15038 15037 +f 14909 14910 15038 +f 14910 15039 15038 +f 14910 14911 15040 +f 14910 15040 15039 +f 14911 14912 15040 +f 14912 15041 15040 +f 14912 14913 15042 +f 14912 15042 15041 +f 14913 14914 15042 +f 14914 15043 15042 +f 14914 14915 15044 +f 14914 15044 15043 +f 14915 14916 15044 +f 14916 15045 15044 +f 14916 14917 15046 +f 14916 15046 15045 +f 14917 14918 15046 +f 14918 15047 15046 +f 14918 14919 15048 +f 14918 15048 15047 +f 14919 14920 15048 +f 14920 15049 15048 +f 14920 14921 15050 +f 14920 15050 15049 +f 14921 14922 15050 +f 14922 15051 15050 +f 14922 14923 15052 +f 14922 15052 15051 +f 14923 14924 15052 +f 14924 15053 15052 +f 14924 14925 15054 +f 14924 15054 15053 +f 14925 14926 15054 +f 14926 15055 15054 +f 14926 14927 15056 +f 14926 15056 15055 +f 14927 14928 15056 +f 14928 15057 15056 +f 14928 14929 15058 +f 14928 15058 15057 +f 14929 14930 15058 +f 14930 15059 15058 +f 14930 14931 15060 +f 14930 15060 15059 +f 14931 14932 15060 +f 14932 15061 15060 +f 14932 14933 15062 +f 14932 15062 15061 +f 14933 14934 15062 +f 14934 15063 15062 +f 14934 14935 15064 +f 14934 15064 15063 +f 14935 14936 15064 +f 14936 15065 15064 +f 14936 14937 15066 +f 14936 15066 15065 +f 14937 14938 15066 +f 14938 15067 15066 +f 14938 14939 15068 +f 14938 15068 15067 +f 14939 14940 15068 +f 14940 15069 15068 +f 14940 14941 15070 +f 14940 15070 15069 +f 14941 14942 15070 +f 14942 15071 15070 +f 14942 14943 15072 +f 14942 15072 15071 +f 14943 14944 15072 +f 14944 15073 15072 +f 14944 14945 15074 +f 14944 15074 15073 +f 14945 14946 15074 +f 14946 15075 15074 +f 14946 14947 15076 +f 14946 15076 15075 +f 14948 14949 15078 +f 14948 15078 15077 +f 14949 14950 15078 +f 14950 15079 15078 +f 14950 14951 15080 +f 14950 15080 15079 +f 14951 14952 15080 +f 14952 15081 15080 +f 14952 14953 15082 +f 14952 15082 15081 +f 14953 14954 15082 +f 14954 15083 15082 +f 14954 14955 15084 +f 14954 15084 15083 +f 14955 14956 15084 +f 14956 15085 15084 +f 14956 14957 15086 +f 14956 15086 15085 +f 14957 14958 15086 +f 14958 15087 15086 +f 14958 14959 15088 +f 14958 15088 15087 +f 14959 14960 15088 +f 14960 15089 15088 +f 14960 14961 15090 +f 14960 15090 15089 +f 14961 14962 15090 +f 14962 15091 15090 +f 14962 14963 15092 +f 14962 15092 15091 +f 14963 14964 15092 +f 14964 15093 15092 +f 14964 14965 15094 +f 14964 15094 15093 +f 14965 14966 15094 +f 14966 15095 15094 +f 14966 14967 15096 +f 14966 15096 15095 +f 14967 14968 15096 +f 14968 15097 15096 +f 14968 14969 15098 +f 14968 15098 15097 +f 14969 14970 15098 +f 14970 15099 15098 +f 14970 14971 15100 +f 14970 15100 15099 +f 14971 14972 15100 +f 14972 15101 15100 +f 14972 14973 15102 +f 14972 15102 15101 +f 14973 14974 15102 +f 14974 15103 15102 +f 14974 14975 15104 +f 14974 15104 15103 +f 14975 14976 15104 +f 14976 15105 15104 +f 14976 14977 15106 +f 14976 15106 15105 +f 14977 14978 15106 +f 14978 15107 15106 +f 14978 14979 15108 +f 14978 15108 15107 +f 14979 14980 15108 +f 14980 15109 15108 +f 14980 14981 15110 +f 14980 15110 15109 +f 14981 14982 15110 +f 14982 15111 15110 +f 14982 14983 15112 +f 14982 15112 15111 +f 14983 14984 15112 +f 14984 15113 15112 +f 14984 14985 15114 +f 14984 15114 15113 +f 14985 14986 15114 +f 14986 15115 15114 +f 14986 14987 15116 +f 14986 15116 15115 +f 14987 14988 15116 +f 14988 15117 15116 +f 14988 14989 15118 +f 14988 15118 15117 +f 14989 14990 15118 +f 14990 15119 15118 +f 14990 14991 15120 +f 14990 15120 15119 +f 14991 14992 15120 +f 14992 15121 15120 +f 14992 14993 15122 +f 14992 15122 15121 +f 14993 14994 15122 +f 14994 15123 15122 +f 14994 14995 15124 +f 14994 15124 15123 +f 14995 14996 15124 +f 14996 15125 15124 +f 14996 14997 15126 +f 14996 15126 15125 +f 14997 14998 15126 +f 14998 15127 15126 +f 14998 14999 15128 +f 14998 15128 15127 +f 14999 15000 15128 +f 15000 15129 15128 +f 15000 15001 15130 +f 15000 15130 15129 +f 15001 15002 15130 +f 15002 15131 15130 +f 15002 15003 15132 +f 15002 15132 15131 +f 15003 15004 15132 +f 15004 15133 15132 +f 15004 15005 15134 +f 15004 15134 15133 +f 15005 15006 15134 +f 15006 15135 15134 +f 15006 15007 15136 +f 15006 15136 15135 +f 15007 15008 15136 +f 15008 15137 15136 +f 15008 15009 15138 +f 15008 15138 15137 +f 15009 15010 15138 +f 15010 15139 15138 +f 15010 15011 15140 +f 15010 15140 15139 +f 15011 15012 15140 +f 15012 15141 15140 +f 15012 15013 15142 +f 15012 15142 15141 +f 15013 15014 15142 +f 15014 15143 15142 +f 15014 15015 15144 +f 15014 15144 15143 +f 15015 15016 15144 +f 15016 15145 15144 +f 15016 15017 15146 +f 15016 15146 15145 +f 15017 15018 15146 +f 15018 15147 15146 +f 15018 15019 15148 +f 15018 15148 15147 +f 15019 15020 15148 +f 15020 15149 15148 +f 15020 15021 15150 +f 15020 15150 15149 +f 15021 15022 15150 +f 15022 15151 15150 +f 15022 15023 15152 +f 15022 15152 15151 +f 15023 15024 15152 +f 15024 15153 15152 +f 15024 15025 15154 +f 15024 15154 15153 +f 15025 15026 15154 +f 15026 15155 15154 +f 15026 15027 15156 +f 15026 15156 15155 +f 15027 15028 15156 +f 15028 15157 15156 +f 15028 15029 15158 +f 15028 15158 15157 +f 15029 15030 15158 +f 15030 15159 15158 +f 15030 15031 15160 +f 15030 15160 15159 +f 15031 15032 15160 +f 15032 15161 15160 +f 15032 15033 15162 +f 15032 15162 15161 +f 15033 15034 15162 +f 15034 15163 15162 +f 15034 15035 15164 +f 15034 15164 15163 +f 15035 15036 15164 +f 15036 15165 15164 +f 15036 15037 15166 +f 15036 15166 15165 +f 15037 15038 15166 +f 15038 15167 15166 +f 15038 15039 15168 +f 15038 15168 15167 +f 15039 15040 15168 +f 15040 15169 15168 +f 15040 15041 15170 +f 15040 15170 15169 +f 15041 15042 15170 +f 15042 15171 15170 +f 15042 15043 15172 +f 15042 15172 15171 +f 15043 15044 15172 +f 15044 15173 15172 +f 15044 15045 15174 +f 15044 15174 15173 +f 15045 15046 15174 +f 15046 15175 15174 +f 15046 15047 15176 +f 15046 15176 15175 +f 15047 15048 15176 +f 15048 15177 15176 +f 15048 15049 15178 +f 15048 15178 15177 +f 15049 15050 15178 +f 15050 15179 15178 +f 15050 15051 15180 +f 15050 15180 15179 +f 15051 15052 15180 +f 15052 15181 15180 +f 15052 15053 15182 +f 15052 15182 15181 +f 15053 15054 15182 +f 15054 15183 15182 +f 15054 15055 15184 +f 15054 15184 15183 +f 15055 15056 15184 +f 15056 15185 15184 +f 15056 15057 15186 +f 15056 15186 15185 +f 15057 15058 15186 +f 15058 15187 15186 +f 15058 15059 15188 +f 15058 15188 15187 +f 15059 15060 15188 +f 15060 15189 15188 +f 15060 15061 15190 +f 15060 15190 15189 +f 15061 15062 15190 +f 15062 15191 15190 +f 15062 15063 15192 +f 15062 15192 15191 +f 15063 15064 15192 +f 15064 15193 15192 +f 15064 15065 15194 +f 15064 15194 15193 +f 15065 15066 15194 +f 15066 15195 15194 +f 15066 15067 15196 +f 15066 15196 15195 +f 15067 15068 15196 +f 15068 15197 15196 +f 15068 15069 15198 +f 15068 15198 15197 +f 15069 15070 15198 +f 15070 15199 15198 +f 15070 15071 15200 +f 15070 15200 15199 +f 15071 15072 15200 +f 15072 15201 15200 +f 15072 15073 15202 +f 15072 15202 15201 +f 15073 15074 15202 +f 15074 15203 15202 +f 15074 15075 15204 +f 15074 15204 15203 +f 15075 15076 15204 +f 15076 15205 15204 +f 15077 15078 15206 +f 15078 15207 15206 +f 15078 15079 15208 +f 15078 15208 15207 +f 15079 15080 15208 +f 15080 15209 15208 +f 15080 15081 15210 +f 15080 15210 15209 +f 15081 15082 15210 +f 15082 15211 15210 +f 15082 15083 15212 +f 15082 15212 15211 +f 15083 15084 15212 +f 15084 15213 15212 +f 15084 15085 15214 +f 15084 15214 15213 +f 15085 15086 15214 +f 15086 15215 15214 +f 15086 15087 15216 +f 15086 15216 15215 +f 15087 15088 15216 +f 15088 15217 15216 +f 15088 15089 15218 +f 15088 15218 15217 +f 15089 15090 15218 +f 15090 15219 15218 +f 15090 15091 15220 +f 15090 15220 15219 +f 15091 15092 15220 +f 15092 15221 15220 +f 15092 15093 15222 +f 15092 15222 15221 +f 15093 15094 15222 +f 15094 15223 15222 +f 15094 15095 15224 +f 15094 15224 15223 +f 15095 15096 15224 +f 15096 15225 15224 +f 15096 15097 15226 +f 15096 15226 15225 +f 15097 15098 15226 +f 15098 15227 15226 +f 15098 15099 15228 +f 15098 15228 15227 +f 15099 15100 15228 +f 15100 15229 15228 +f 15100 15101 15230 +f 15100 15230 15229 +f 15101 15102 15230 +f 15102 15231 15230 +f 15102 15103 15232 +f 15102 15232 15231 +f 15103 15104 15232 +f 15104 15233 15232 +f 15104 15105 15234 +f 15104 15234 15233 +f 15105 15106 15234 +f 15106 15235 15234 +f 15106 15107 15236 +f 15106 15236 15235 +f 15107 15108 15236 +f 15108 15237 15236 +f 15108 15109 15238 +f 15108 15238 15237 +f 15109 15110 15238 +f 15110 15239 15238 +f 15110 15111 15240 +f 15110 15240 15239 +f 15111 15112 15240 +f 15112 15241 15240 +f 15112 15113 15242 +f 15112 15242 15241 +f 15113 15114 15242 +f 15114 15243 15242 +f 15114 15115 15244 +f 15114 15244 15243 +f 15115 15116 15244 +f 15116 15245 15244 +f 15116 15117 15246 +f 15116 15246 15245 +f 15117 15118 15246 +f 15118 15247 15246 +f 15118 15119 15248 +f 15118 15248 15247 +f 15119 15120 15248 +f 15120 15249 15248 +f 15120 15121 15250 +f 15120 15250 15249 +f 15121 15122 15250 +f 15122 15251 15250 +f 15122 15123 15252 +f 15122 15252 15251 +f 15123 15124 15252 +f 15124 15253 15252 +f 15124 15125 15254 +f 15124 15254 15253 +f 15125 15126 15254 +f 15126 15255 15254 +f 15126 15127 15256 +f 15126 15256 15255 +f 15127 15128 15256 +f 15128 15257 15256 +f 15128 15129 15258 +f 15128 15258 15257 +f 15129 15130 15258 +f 15130 15259 15258 +f 15130 15131 15260 +f 15130 15260 15259 +f 15131 15132 15260 +f 15132 15261 15260 +f 15132 15133 15262 +f 15132 15262 15261 +f 15133 15134 15262 +f 15134 15263 15262 +f 15134 15135 15264 +f 15134 15264 15263 +f 15135 15136 15264 +f 15136 15265 15264 +f 15136 15137 15266 +f 15136 15266 15265 +f 15137 15138 15266 +f 15138 15267 15266 +f 15138 15139 15268 +f 15138 15268 15267 +f 15139 15140 15268 +f 15140 15269 15268 +f 15140 15141 15270 +f 15140 15270 15269 +f 15141 15142 15270 +f 15142 15271 15270 +f 15142 15143 15272 +f 15142 15272 15271 +f 15143 15144 15272 +f 15144 15273 15272 +f 15144 15145 15274 +f 15144 15274 15273 +f 15145 15146 15274 +f 15146 15275 15274 +f 15146 15147 15276 +f 15146 15276 15275 +f 15147 15148 15276 +f 15148 15277 15276 +f 15148 15149 15278 +f 15148 15278 15277 +f 15149 15150 15278 +f 15150 15279 15278 +f 15150 15151 15280 +f 15150 15280 15279 +f 15151 15152 15280 +f 15152 15281 15280 +f 15152 15153 15282 +f 15152 15282 15281 +f 15153 15154 15282 +f 15154 15283 15282 +f 15154 15155 15284 +f 15154 15284 15283 +f 15155 15156 15284 +f 15156 15285 15284 +f 15156 15157 15286 +f 15156 15286 15285 +f 15157 15158 15286 +f 15158 15287 15286 +f 15158 15159 15288 +f 15158 15288 15287 +f 15159 15160 15288 +f 15160 15289 15288 +f 15160 15161 15290 +f 15160 15290 15289 +f 15161 15162 15290 +f 15162 15291 15290 +f 15162 15163 15292 +f 15162 15292 15291 +f 15163 15164 15292 +f 15164 15293 15292 +f 15164 15165 15294 +f 15164 15294 15293 +f 15165 15166 15294 +f 15166 15295 15294 +f 15166 15167 15296 +f 15166 15296 15295 +f 15167 15168 15296 +f 15168 15297 15296 +f 15168 15169 15298 +f 15168 15298 15297 +f 15169 15170 15298 +f 15170 15299 15298 +f 15170 15171 15300 +f 15170 15300 15299 +f 15171 15172 15300 +f 15172 15301 15300 +f 15172 15173 15302 +f 15172 15302 15301 +f 15173 15174 15302 +f 15174 15303 15302 +f 15174 15175 15304 +f 15174 15304 15303 +f 15175 15176 15304 +f 15176 15305 15304 +f 15176 15177 15306 +f 15176 15306 15305 +f 15177 15178 15306 +f 15178 15307 15306 +f 15178 15179 15308 +f 15178 15308 15307 +f 15179 15180 15308 +f 15180 15309 15308 +f 15180 15181 15310 +f 15180 15310 15309 +f 15181 15182 15310 +f 15182 15311 15310 +f 15182 15183 15312 +f 15182 15312 15311 +f 15183 15184 15312 +f 15184 15313 15312 +f 15184 15185 15314 +f 15184 15314 15313 +f 15185 15186 15314 +f 15186 15315 15314 +f 15186 15187 15316 +f 15186 15316 15315 +f 15187 15188 15316 +f 15188 15317 15316 +f 15188 15189 15318 +f 15188 15318 15317 +f 15189 15190 15318 +f 15190 15319 15318 +f 15190 15191 15320 +f 15190 15320 15319 +f 15191 15192 15320 +f 15192 15321 15320 +f 15192 15193 15322 +f 15192 15322 15321 +f 15193 15194 15322 +f 15194 15323 15322 +f 15194 15195 15324 +f 15194 15324 15323 +f 15195 15196 15324 +f 15196 15325 15324 +f 15196 15197 15326 +f 15196 15326 15325 +f 15197 15198 15326 +f 15198 15327 15326 +f 15198 15199 15328 +f 15198 15328 15327 +f 15199 15200 15328 +f 15200 15329 15328 +f 15200 15201 15330 +f 15200 15330 15329 +f 15201 15202 15330 +f 15202 15331 15330 +f 15202 15203 15332 +f 15202 15332 15331 +f 15203 15204 15332 +f 15204 15333 15332 +f 15204 15205 15334 +f 15204 15334 15333 +f 15206 15207 15336 +f 15206 15336 15335 +f 15207 15208 15336 +f 15208 15337 15336 +f 15208 15209 15338 +f 15208 15338 15337 +f 15209 15210 15338 +f 15210 15339 15338 +f 15210 15211 15340 +f 15210 15340 15339 +f 15211 15212 15340 +f 15212 15341 15340 +f 15212 15213 15342 +f 15212 15342 15341 +f 15213 15214 15342 +f 15214 15343 15342 +f 15214 15215 15344 +f 15214 15344 15343 +f 15215 15216 15344 +f 15216 15345 15344 +f 15216 15217 15346 +f 15216 15346 15345 +f 15217 15218 15346 +f 15218 15347 15346 +f 15218 15219 15348 +f 15218 15348 15347 +f 15219 15220 15348 +f 15220 15349 15348 +f 15220 15221 15350 +f 15220 15350 15349 +f 15221 15222 15350 +f 15222 15351 15350 +f 15222 15223 15352 +f 15222 15352 15351 +f 15223 15224 15352 +f 15224 15353 15352 +f 15224 15225 15354 +f 15224 15354 15353 +f 15225 15226 15354 +f 15226 15355 15354 +f 15226 15227 15356 +f 15226 15356 15355 +f 15227 15228 15356 +f 15228 15357 15356 +f 15228 15229 15358 +f 15228 15358 15357 +f 15229 15230 15358 +f 15230 15359 15358 +f 15230 15231 15360 +f 15230 15360 15359 +f 15231 15232 15360 +f 15232 15361 15360 +f 15232 15233 15362 +f 15232 15362 15361 +f 15233 15234 15362 +f 15234 15363 15362 +f 15234 15235 15364 +f 15234 15364 15363 +f 15235 15236 15364 +f 15236 15365 15364 +f 15236 15237 15366 +f 15236 15366 15365 +f 15237 15238 15366 +f 15238 15367 15366 +f 15238 15239 15368 +f 15238 15368 15367 +f 15239 15240 15368 +f 15240 15369 15368 +f 15240 15241 15370 +f 15240 15370 15369 +f 15241 15242 15370 +f 15242 15371 15370 +f 15242 15243 15372 +f 15242 15372 15371 +f 15243 15244 15372 +f 15244 15373 15372 +f 15244 15245 15374 +f 15244 15374 15373 +f 15245 15246 15374 +f 15246 15375 15374 +f 15246 15247 15376 +f 15246 15376 15375 +f 15247 15248 15376 +f 15248 15377 15376 +f 15248 15249 15378 +f 15248 15378 15377 +f 15249 15250 15378 +f 15250 15379 15378 +f 15250 15251 15380 +f 15250 15380 15379 +f 15251 15252 15380 +f 15252 15381 15380 +f 15252 15253 15382 +f 15252 15382 15381 +f 15253 15254 15382 +f 15254 15383 15382 +f 15254 15255 15384 +f 15254 15384 15383 +f 15255 15256 15384 +f 15256 15385 15384 +f 15256 15257 15386 +f 15256 15386 15385 +f 15257 15258 15386 +f 15258 15387 15386 +f 15258 15259 15388 +f 15258 15388 15387 +f 15259 15260 15388 +f 15260 15389 15388 +f 15260 15261 15390 +f 15260 15390 15389 +f 15261 15262 15390 +f 15262 15391 15390 +f 15262 15263 15392 +f 15262 15392 15391 +f 15263 15264 15392 +f 15264 15393 15392 +f 15264 15265 15394 +f 15264 15394 15393 +f 15265 15266 15394 +f 15266 15395 15394 +f 15266 15267 15396 +f 15266 15396 15395 +f 15267 15268 15396 +f 15268 15397 15396 +f 15268 15269 15398 +f 15268 15398 15397 +f 15269 15270 15398 +f 15270 15399 15398 +f 15270 15271 15400 +f 15270 15400 15399 +f 15271 15272 15400 +f 15272 15401 15400 +f 15272 15273 15402 +f 15272 15402 15401 +f 15273 15274 15402 +f 15274 15403 15402 +f 15274 15275 15404 +f 15274 15404 15403 +f 15275 15276 15404 +f 15276 15405 15404 +f 15276 15277 15406 +f 15276 15406 15405 +f 15277 15278 15406 +f 15278 15407 15406 +f 15278 15279 15408 +f 15278 15408 15407 +f 15279 15280 15408 +f 15280 15409 15408 +f 15280 15281 15410 +f 15280 15410 15409 +f 15281 15282 15410 +f 15282 15411 15410 +f 15282 15283 15412 +f 15282 15412 15411 +f 15283 15284 15412 +f 15284 15413 15412 +f 15284 15285 15414 +f 15284 15414 15413 +f 15285 15286 15414 +f 15286 15415 15414 +f 15286 15287 15416 +f 15286 15416 15415 +f 15287 15288 15416 +f 15288 15417 15416 +f 15288 15289 15418 +f 15288 15418 15417 +f 15289 15290 15418 +f 15290 15419 15418 +f 15290 15291 15420 +f 15290 15420 15419 +f 15291 15292 15420 +f 15292 15421 15420 +f 15292 15293 15422 +f 15292 15422 15421 +f 15293 15294 15422 +f 15294 15423 15422 +f 15294 15295 15424 +f 15294 15424 15423 +f 15295 15296 15424 +f 15296 15425 15424 +f 15296 15297 15426 +f 15296 15426 15425 +f 15297 15298 15426 +f 15298 15427 15426 +f 15298 15299 15428 +f 15298 15428 15427 +f 15299 15300 15428 +f 15300 15429 15428 +f 15300 15301 15430 +f 15300 15430 15429 +f 15301 15302 15430 +f 15302 15431 15430 +f 15302 15303 15432 +f 15302 15432 15431 +f 15303 15304 15432 +f 15304 15433 15432 +f 15304 15305 15434 +f 15304 15434 15433 +f 15305 15306 15434 +f 15306 15435 15434 +f 15306 15307 15436 +f 15306 15436 15435 +f 15307 15308 15436 +f 15308 15437 15436 +f 15308 15309 15438 +f 15308 15438 15437 +f 15309 15310 15438 +f 15310 15439 15438 +f 15310 15311 15440 +f 15310 15440 15439 +f 15311 15312 15440 +f 15312 15441 15440 +f 15312 15313 15442 +f 15312 15442 15441 +f 15313 15314 15442 +f 15314 15443 15442 +f 15314 15315 15444 +f 15314 15444 15443 +f 15315 15316 15444 +f 15316 15445 15444 +f 15316 15317 15446 +f 15316 15446 15445 +f 15317 15318 15446 +f 15318 15447 15446 +f 15318 15319 15448 +f 15318 15448 15447 +f 15319 15320 15448 +f 15320 15449 15448 +f 15320 15321 15450 +f 15320 15450 15449 +f 15321 15322 15450 +f 15322 15451 15450 +f 15322 15323 15452 +f 15322 15452 15451 +f 15323 15324 15452 +f 15324 15453 15452 +f 15324 15325 15454 +f 15324 15454 15453 +f 15325 15326 15454 +f 15326 15455 15454 +f 15326 15327 15456 +f 15326 15456 15455 +f 15327 15328 15456 +f 15328 15457 15456 +f 15328 15329 15458 +f 15328 15458 15457 +f 15329 15330 15458 +f 15330 15459 15458 +f 15330 15331 15460 +f 15330 15460 15459 +f 15331 15332 15460 +f 15332 15461 15460 +f 15332 15333 15462 +f 15332 15462 15461 +f 15333 15334 15462 +f 15334 15463 15462 +f 15335 15336 15464 +f 15336 15465 15464 +f 15336 15337 15466 +f 15336 15466 15465 +f 15337 15338 15466 +f 15338 15467 15466 +f 15338 15339 15468 +f 15338 15468 15467 +f 15339 15340 15468 +f 15340 15469 15468 +f 15340 15341 15470 +f 15340 15470 15469 +f 15341 15342 15470 +f 15342 15471 15470 +f 15342 15343 15472 +f 15342 15472 15471 +f 15343 15344 15472 +f 15344 15473 15472 +f 15344 15345 15474 +f 15344 15474 15473 +f 15345 15346 15474 +f 15346 15475 15474 +f 15346 15347 15476 +f 15346 15476 15475 +f 15347 15348 15476 +f 15348 15477 15476 +f 15348 15349 15478 +f 15348 15478 15477 +f 15349 15350 15478 +f 15350 15479 15478 +f 15350 15351 15480 +f 15350 15480 15479 +f 15351 15352 15480 +f 15352 15481 15480 +f 15352 15353 15482 +f 15352 15482 15481 +f 15353 15354 15482 +f 15354 15483 15482 +f 15354 15355 15484 +f 15354 15484 15483 +f 15355 15356 15484 +f 15356 15485 15484 +f 15356 15357 15486 +f 15356 15486 15485 +f 15357 15358 15486 +f 15358 15487 15486 +f 15358 15359 15488 +f 15358 15488 15487 +f 15359 15360 15488 +f 15360 15489 15488 +f 15360 15361 15490 +f 15360 15490 15489 +f 15361 15362 15490 +f 15362 15491 15490 +f 15362 15363 15492 +f 15362 15492 15491 +f 15363 15364 15492 +f 15364 15493 15492 +f 15364 15365 15494 +f 15364 15494 15493 +f 15365 15366 15494 +f 15366 15495 15494 +f 15366 15367 15496 +f 15366 15496 15495 +f 15367 15368 15496 +f 15368 15497 15496 +f 15368 15369 15498 +f 15368 15498 15497 +f 15369 15370 15498 +f 15370 15499 15498 +f 15370 15371 15500 +f 15370 15500 15499 +f 15371 15372 15500 +f 15372 15501 15500 +f 15372 15373 15502 +f 15372 15502 15501 +f 15373 15374 15502 +f 15374 15503 15502 +f 15374 15375 15504 +f 15374 15504 15503 +f 15375 15376 15504 +f 15376 15505 15504 +f 15376 15377 15506 +f 15376 15506 15505 +f 15377 15378 15506 +f 15378 15507 15506 +f 15378 15379 15508 +f 15378 15508 15507 +f 15379 15380 15508 +f 15380 15509 15508 +f 15380 15381 15510 +f 15380 15510 15509 +f 15381 15382 15510 +f 15382 15511 15510 +f 15382 15383 15512 +f 15382 15512 15511 +f 15383 15384 15512 +f 15384 15513 15512 +f 15384 15385 15514 +f 15384 15514 15513 +f 15385 15386 15514 +f 15386 15515 15514 +f 15386 15387 15516 +f 15386 15516 15515 +f 15387 15388 15516 +f 15388 15517 15516 +f 15388 15389 15518 +f 15388 15518 15517 +f 15389 15390 15518 +f 15390 15519 15518 +f 15390 15391 15520 +f 15390 15520 15519 +f 15391 15392 15520 +f 15392 15521 15520 +f 15392 15393 15522 +f 15392 15522 15521 +f 15393 15394 15522 +f 15394 15523 15522 +f 15394 15395 15524 +f 15394 15524 15523 +f 15395 15396 15524 +f 15396 15525 15524 +f 15396 15397 15526 +f 15396 15526 15525 +f 15397 15398 15526 +f 15398 15527 15526 +f 15398 15399 15528 +f 15398 15528 15527 +f 15399 15400 15528 +f 15400 15529 15528 +f 15400 15401 15530 +f 15400 15530 15529 +f 15401 15402 15530 +f 15402 15531 15530 +f 15402 15403 15532 +f 15402 15532 15531 +f 15403 15404 15532 +f 15404 15533 15532 +f 15404 15405 15534 +f 15404 15534 15533 +f 15405 15406 15534 +f 15406 15535 15534 +f 15406 15407 15536 +f 15406 15536 15535 +f 15407 15408 15536 +f 15408 15537 15536 +f 15408 15409 15538 +f 15408 15538 15537 +f 15409 15410 15538 +f 15410 15539 15538 +f 15410 15411 15540 +f 15410 15540 15539 +f 15411 15412 15540 +f 15412 15541 15540 +f 15412 15413 15542 +f 15412 15542 15541 +f 15413 15414 15542 +f 15414 15543 15542 +f 15414 15415 15544 +f 15414 15544 15543 +f 15415 15416 15544 +f 15416 15545 15544 +f 15416 15417 15546 +f 15416 15546 15545 +f 15417 15418 15546 +f 15418 15547 15546 +f 15418 15419 15548 +f 15418 15548 15547 +f 15419 15420 15548 +f 15420 15549 15548 +f 15420 15421 15550 +f 15420 15550 15549 +f 15421 15422 15550 +f 15422 15551 15550 +f 15422 15423 15552 +f 15422 15552 15551 +f 15423 15424 15552 +f 15424 15553 15552 +f 15424 15425 15554 +f 15424 15554 15553 +f 15425 15426 15554 +f 15426 15555 15554 +f 15426 15427 15556 +f 15426 15556 15555 +f 15427 15428 15556 +f 15428 15557 15556 +f 15428 15429 15558 +f 15428 15558 15557 +f 15429 15430 15558 +f 15430 15559 15558 +f 15430 15431 15560 +f 15430 15560 15559 +f 15431 15432 15560 +f 15432 15561 15560 +f 15432 15433 15562 +f 15432 15562 15561 +f 15433 15434 15562 +f 15434 15563 15562 +f 15434 15435 15564 +f 15434 15564 15563 +f 15435 15436 15564 +f 15436 15565 15564 +f 15436 15437 15566 +f 15436 15566 15565 +f 15437 15438 15566 +f 15438 15567 15566 +f 15438 15439 15568 +f 15438 15568 15567 +f 15439 15440 15568 +f 15440 15569 15568 +f 15440 15441 15570 +f 15440 15570 15569 +f 15441 15442 15570 +f 15442 15571 15570 +f 15442 15443 15572 +f 15442 15572 15571 +f 15443 15444 15572 +f 15444 15573 15572 +f 15444 15445 15574 +f 15444 15574 15573 +f 15445 15446 15574 +f 15446 15575 15574 +f 15446 15447 15576 +f 15446 15576 15575 +f 15447 15448 15576 +f 15448 15577 15576 +f 15448 15449 15578 +f 15448 15578 15577 +f 15449 15450 15578 +f 15450 15579 15578 +f 15450 15451 15580 +f 15450 15580 15579 +f 15451 15452 15580 +f 15452 15581 15580 +f 15452 15453 15582 +f 15452 15582 15581 +f 15453 15454 15582 +f 15454 15583 15582 +f 15454 15455 15584 +f 15454 15584 15583 +f 15455 15456 15584 +f 15456 15585 15584 +f 15456 15457 15586 +f 15456 15586 15585 +f 15457 15458 15586 +f 15458 15587 15586 +f 15458 15459 15588 +f 15458 15588 15587 +f 15459 15460 15588 +f 15460 15589 15588 +f 15460 15461 15590 +f 15460 15590 15589 +f 15461 15462 15590 +f 15462 15591 15590 +f 15462 15463 15592 +f 15462 15592 15591 +f 15464 15465 15594 +f 15464 15594 15593 +f 15465 15466 15594 +f 15466 15595 15594 +f 15466 15467 15596 +f 15466 15596 15595 +f 15467 15468 15596 +f 15468 15597 15596 +f 15468 15469 15598 +f 15468 15598 15597 +f 15469 15470 15598 +f 15470 15599 15598 +f 15470 15471 15600 +f 15470 15600 15599 +f 15471 15472 15600 +f 15472 15601 15600 +f 15472 15473 15602 +f 15472 15602 15601 +f 15473 15474 15602 +f 15474 15603 15602 +f 15474 15475 15604 +f 15474 15604 15603 +f 15475 15476 15604 +f 15476 15605 15604 +f 15476 15477 15606 +f 15476 15606 15605 +f 15477 15478 15606 +f 15478 15607 15606 +f 15478 15479 15608 +f 15478 15608 15607 +f 15479 15480 15608 +f 15480 15609 15608 +f 15480 15481 15610 +f 15480 15610 15609 +f 15481 15482 15610 +f 15482 15611 15610 +f 15482 15483 15612 +f 15482 15612 15611 +f 15483 15484 15612 +f 15484 15613 15612 +f 15484 15485 15614 +f 15484 15614 15613 +f 15485 15486 15614 +f 15486 15615 15614 +f 15486 15487 15616 +f 15486 15616 15615 +f 15487 15488 15616 +f 15488 15617 15616 +f 15488 15489 15618 +f 15488 15618 15617 +f 15489 15490 15618 +f 15490 15619 15618 +f 15490 15491 15620 +f 15490 15620 15619 +f 15491 15492 15620 +f 15492 15621 15620 +f 15492 15493 15622 +f 15492 15622 15621 +f 15493 15494 15622 +f 15494 15623 15622 +f 15494 15495 15624 +f 15494 15624 15623 +f 15495 15496 15624 +f 15496 15625 15624 +f 15496 15497 15626 +f 15496 15626 15625 +f 15497 15498 15626 +f 15498 15627 15626 +f 15498 15499 15628 +f 15498 15628 15627 +f 15499 15500 15628 +f 15500 15629 15628 +f 15500 15501 15630 +f 15500 15630 15629 +f 15501 15502 15630 +f 15502 15631 15630 +f 15502 15503 15632 +f 15502 15632 15631 +f 15503 15504 15632 +f 15504 15633 15632 +f 15504 15505 15634 +f 15504 15634 15633 +f 15505 15506 15634 +f 15506 15635 15634 +f 15506 15507 15636 +f 15506 15636 15635 +f 15507 15508 15636 +f 15508 15637 15636 +f 15508 15509 15638 +f 15508 15638 15637 +f 15509 15510 15638 +f 15510 15639 15638 +f 15510 15511 15640 +f 15510 15640 15639 +f 15511 15512 15640 +f 15512 15641 15640 +f 15512 15513 15642 +f 15512 15642 15641 +f 15513 15514 15642 +f 15514 15643 15642 +f 15514 15515 15644 +f 15514 15644 15643 +f 15515 15516 15644 +f 15516 15645 15644 +f 15516 15517 15646 +f 15516 15646 15645 +f 15517 15518 15646 +f 15518 15647 15646 +f 15518 15519 15648 +f 15518 15648 15647 +f 15519 15520 15648 +f 15520 15649 15648 +f 15520 15521 15650 +f 15520 15650 15649 +f 15521 15522 15650 +f 15522 15651 15650 +f 15522 15523 15652 +f 15522 15652 15651 +f 15523 15524 15652 +f 15524 15653 15652 +f 15524 15525 15654 +f 15524 15654 15653 +f 15525 15526 15654 +f 15526 15655 15654 +f 15526 15527 15656 +f 15526 15656 15655 +f 15527 15528 15656 +f 15528 15657 15656 +f 15528 15529 15658 +f 15528 15658 15657 +f 15529 15530 15658 +f 15530 15659 15658 +f 15530 15531 15660 +f 15530 15660 15659 +f 15531 15532 15660 +f 15532 15661 15660 +f 15532 15533 15662 +f 15532 15662 15661 +f 15533 15534 15662 +f 15534 15663 15662 +f 15534 15535 15664 +f 15534 15664 15663 +f 15535 15536 15664 +f 15536 15665 15664 +f 15536 15537 15666 +f 15536 15666 15665 +f 15537 15538 15666 +f 15538 15667 15666 +f 15538 15539 15668 +f 15538 15668 15667 +f 15539 15540 15668 +f 15540 15669 15668 +f 15540 15541 15670 +f 15540 15670 15669 +f 15541 15542 15670 +f 15542 15671 15670 +f 15542 15543 15672 +f 15542 15672 15671 +f 15543 15544 15672 +f 15544 15673 15672 +f 15544 15545 15674 +f 15544 15674 15673 +f 15545 15546 15674 +f 15546 15675 15674 +f 15546 15547 15676 +f 15546 15676 15675 +f 15547 15548 15676 +f 15548 15677 15676 +f 15548 15549 15678 +f 15548 15678 15677 +f 15549 15550 15678 +f 15550 15679 15678 +f 15550 15551 15680 +f 15550 15680 15679 +f 15551 15552 15680 +f 15552 15681 15680 +f 15552 15553 15682 +f 15552 15682 15681 +f 15553 15554 15682 +f 15554 15683 15682 +f 15554 15555 15684 +f 15554 15684 15683 +f 15555 15556 15684 +f 15556 15685 15684 +f 15556 15557 15686 +f 15556 15686 15685 +f 15557 15558 15686 +f 15558 15687 15686 +f 15558 15559 15688 +f 15558 15688 15687 +f 15559 15560 15688 +f 15560 15689 15688 +f 15560 15561 15690 +f 15560 15690 15689 +f 15561 15562 15690 +f 15562 15691 15690 +f 15562 15563 15692 +f 15562 15692 15691 +f 15563 15564 15692 +f 15564 15693 15692 +f 15564 15565 15694 +f 15564 15694 15693 +f 15565 15566 15694 +f 15566 15695 15694 +f 15566 15567 15696 +f 15566 15696 15695 +f 15567 15568 15696 +f 15568 15697 15696 +f 15568 15569 15698 +f 15568 15698 15697 +f 15569 15570 15698 +f 15570 15699 15698 +f 15570 15571 15700 +f 15570 15700 15699 +f 15571 15572 15700 +f 15572 15701 15700 +f 15572 15573 15702 +f 15572 15702 15701 +f 15573 15574 15702 +f 15574 15703 15702 +f 15574 15575 15704 +f 15574 15704 15703 +f 15575 15576 15704 +f 15576 15705 15704 +f 15576 15577 15706 +f 15576 15706 15705 +f 15577 15578 15706 +f 15578 15707 15706 +f 15578 15579 15708 +f 15578 15708 15707 +f 15579 15580 15708 +f 15580 15709 15708 +f 15580 15581 15710 +f 15580 15710 15709 +f 15581 15582 15710 +f 15582 15711 15710 +f 15582 15583 15712 +f 15582 15712 15711 +f 15583 15584 15712 +f 15584 15713 15712 +f 15584 15585 15714 +f 15584 15714 15713 +f 15585 15586 15714 +f 15586 15715 15714 +f 15586 15587 15716 +f 15586 15716 15715 +f 15587 15588 15716 +f 15588 15717 15716 +f 15588 15589 15718 +f 15588 15718 15717 +f 15589 15590 15718 +f 15590 15719 15718 +f 15590 15591 15720 +f 15590 15720 15719 +f 15591 15592 15720 +f 15592 15721 15720 +f 15593 15594 15722 +f 15594 15723 15722 +f 15594 15595 15724 +f 15594 15724 15723 +f 15595 15596 15724 +f 15596 15725 15724 +f 15596 15597 15726 +f 15596 15726 15725 +f 15597 15598 15726 +f 15598 15727 15726 +f 15598 15599 15728 +f 15598 15728 15727 +f 15599 15600 15728 +f 15600 15729 15728 +f 15600 15601 15730 +f 15600 15730 15729 +f 15601 15602 15730 +f 15602 15731 15730 +f 15602 15603 15732 +f 15602 15732 15731 +f 15603 15604 15732 +f 15604 15733 15732 +f 15604 15605 15734 +f 15604 15734 15733 +f 15605 15606 15734 +f 15606 15735 15734 +f 15606 15607 15736 +f 15606 15736 15735 +f 15607 15608 15736 +f 15608 15737 15736 +f 15608 15609 15738 +f 15608 15738 15737 +f 15609 15610 15738 +f 15610 15739 15738 +f 15610 15611 15740 +f 15610 15740 15739 +f 15611 15612 15740 +f 15612 15741 15740 +f 15612 15613 15742 +f 15612 15742 15741 +f 15613 15614 15742 +f 15614 15743 15742 +f 15614 15615 15744 +f 15614 15744 15743 +f 15615 15616 15744 +f 15616 15745 15744 +f 15616 15617 15746 +f 15616 15746 15745 +f 15617 15618 15746 +f 15618 15747 15746 +f 15618 15619 15748 +f 15618 15748 15747 +f 15619 15620 15748 +f 15620 15749 15748 +f 15620 15621 15750 +f 15620 15750 15749 +f 15621 15622 15750 +f 15622 15751 15750 +f 15622 15623 15752 +f 15622 15752 15751 +f 15623 15624 15752 +f 15624 15753 15752 +f 15624 15625 15754 +f 15624 15754 15753 +f 15625 15626 15754 +f 15626 15755 15754 +f 15626 15627 15756 +f 15626 15756 15755 +f 15627 15628 15756 +f 15628 15757 15756 +f 15628 15629 15758 +f 15628 15758 15757 +f 15629 15630 15758 +f 15630 15759 15758 +f 15630 15631 15760 +f 15630 15760 15759 +f 15631 15632 15760 +f 15632 15761 15760 +f 15632 15633 15762 +f 15632 15762 15761 +f 15633 15634 15762 +f 15634 15763 15762 +f 15634 15635 15764 +f 15634 15764 15763 +f 15635 15636 15764 +f 15636 15765 15764 +f 15636 15637 15766 +f 15636 15766 15765 +f 15637 15638 15766 +f 15638 15767 15766 +f 15638 15639 15768 +f 15638 15768 15767 +f 15639 15640 15768 +f 15640 15769 15768 +f 15640 15641 15770 +f 15640 15770 15769 +f 15641 15642 15770 +f 15642 15771 15770 +f 15642 15643 15772 +f 15642 15772 15771 +f 15643 15644 15772 +f 15644 15773 15772 +f 15644 15645 15774 +f 15644 15774 15773 +f 15645 15646 15774 +f 15646 15775 15774 +f 15646 15647 15776 +f 15646 15776 15775 +f 15647 15648 15776 +f 15648 15777 15776 +f 15648 15649 15778 +f 15648 15778 15777 +f 15649 15650 15778 +f 15650 15779 15778 +f 15650 15651 15780 +f 15650 15780 15779 +f 15651 15652 15780 +f 15652 15781 15780 +f 15652 15653 15782 +f 15652 15782 15781 +f 15653 15654 15782 +f 15654 15783 15782 +f 15654 15655 15784 +f 15654 15784 15783 +f 15655 15656 15784 +f 15656 15785 15784 +f 15656 15657 15786 +f 15656 15786 15785 +f 15657 15658 15786 +f 15658 15787 15786 +f 15658 15659 15788 +f 15658 15788 15787 +f 15659 15660 15788 +f 15660 15789 15788 +f 15660 15661 15790 +f 15660 15790 15789 +f 15661 15662 15790 +f 15662 15791 15790 +f 15662 15663 15792 +f 15662 15792 15791 +f 15663 15664 15792 +f 15664 15793 15792 +f 15664 15665 15794 +f 15664 15794 15793 +f 15665 15666 15794 +f 15666 15795 15794 +f 15666 15667 15796 +f 15666 15796 15795 +f 15667 15668 15796 +f 15668 15797 15796 +f 15668 15669 15798 +f 15668 15798 15797 +f 15669 15670 15798 +f 15670 15799 15798 +f 15670 15671 15800 +f 15670 15800 15799 +f 15671 15672 15800 +f 15672 15801 15800 +f 15672 15673 15802 +f 15672 15802 15801 +f 15673 15674 15802 +f 15674 15803 15802 +f 15674 15675 15804 +f 15674 15804 15803 +f 15675 15676 15804 +f 15676 15805 15804 +f 15676 15677 15806 +f 15676 15806 15805 +f 15677 15678 15806 +f 15678 15807 15806 +f 15678 15679 15808 +f 15678 15808 15807 +f 15679 15680 15808 +f 15680 15809 15808 +f 15680 15681 15810 +f 15680 15810 15809 +f 15681 15682 15810 +f 15682 15811 15810 +f 15682 15683 15812 +f 15682 15812 15811 +f 15683 15684 15812 +f 15684 15813 15812 +f 15684 15685 15814 +f 15684 15814 15813 +f 15685 15686 15814 +f 15686 15815 15814 +f 15686 15687 15816 +f 15686 15816 15815 +f 15687 15688 15816 +f 15688 15817 15816 +f 15688 15689 15818 +f 15688 15818 15817 +f 15689 15690 15818 +f 15690 15819 15818 +f 15690 15691 15820 +f 15690 15820 15819 +f 15691 15692 15820 +f 15692 15821 15820 +f 15692 15693 15822 +f 15692 15822 15821 +f 15693 15694 15822 +f 15694 15823 15822 +f 15694 15695 15824 +f 15694 15824 15823 +f 15695 15696 15824 +f 15696 15825 15824 +f 15696 15697 15826 +f 15696 15826 15825 +f 15697 15698 15826 +f 15698 15827 15826 +f 15698 15699 15828 +f 15698 15828 15827 +f 15699 15700 15828 +f 15700 15829 15828 +f 15700 15701 15830 +f 15700 15830 15829 +f 15701 15702 15830 +f 15702 15831 15830 +f 15702 15703 15832 +f 15702 15832 15831 +f 15703 15704 15832 +f 15704 15833 15832 +f 15704 15705 15834 +f 15704 15834 15833 +f 15705 15706 15834 +f 15706 15835 15834 +f 15706 15707 15836 +f 15706 15836 15835 +f 15707 15708 15836 +f 15708 15837 15836 +f 15708 15709 15838 +f 15708 15838 15837 +f 15709 15710 15838 +f 15710 15839 15838 +f 15710 15711 15840 +f 15710 15840 15839 +f 15711 15712 15840 +f 15712 15841 15840 +f 15712 15713 15842 +f 15712 15842 15841 +f 15713 15714 15842 +f 15714 15843 15842 +f 15714 15715 15844 +f 15714 15844 15843 +f 15715 15716 15844 +f 15716 15845 15844 +f 15716 15717 15846 +f 15716 15846 15845 +f 15717 15718 15846 +f 15718 15847 15846 +f 15718 15719 15848 +f 15718 15848 15847 +f 15719 15720 15848 +f 15720 15849 15848 +f 15720 15721 15850 +f 15720 15850 15849 +f 15722 15723 15852 +f 15722 15852 15851 +f 15723 15724 15852 +f 15724 15853 15852 +f 15724 15725 15854 +f 15724 15854 15853 +f 15725 15726 15854 +f 15726 15855 15854 +f 15726 15727 15856 +f 15726 15856 15855 +f 15727 15728 15856 +f 15728 15857 15856 +f 15728 15729 15858 +f 15728 15858 15857 +f 15729 15730 15858 +f 15730 15859 15858 +f 15730 15731 15860 +f 15730 15860 15859 +f 15731 15732 15860 +f 15732 15861 15860 +f 15732 15733 15862 +f 15732 15862 15861 +f 15733 15734 15862 +f 15734 15863 15862 +f 15734 15735 15864 +f 15734 15864 15863 +f 15735 15736 15864 +f 15736 15865 15864 +f 15736 15737 15866 +f 15736 15866 15865 +f 15737 15738 15866 +f 15738 15867 15866 +f 15738 15739 15868 +f 15738 15868 15867 +f 15739 15740 15868 +f 15740 15869 15868 +f 15740 15741 15870 +f 15740 15870 15869 +f 15741 15742 15870 +f 15742 15871 15870 +f 15742 15743 15872 +f 15742 15872 15871 +f 15743 15744 15872 +f 15744 15873 15872 +f 15744 15745 15874 +f 15744 15874 15873 +f 15745 15746 15874 +f 15746 15875 15874 +f 15746 15747 15876 +f 15746 15876 15875 +f 15747 15748 15876 +f 15748 15877 15876 +f 15748 15749 15878 +f 15748 15878 15877 +f 15749 15750 15878 +f 15750 15879 15878 +f 15750 15751 15880 +f 15750 15880 15879 +f 15751 15752 15880 +f 15752 15881 15880 +f 15752 15753 15882 +f 15752 15882 15881 +f 15753 15754 15882 +f 15754 15883 15882 +f 15754 15755 15884 +f 15754 15884 15883 +f 15755 15756 15884 +f 15756 15885 15884 +f 15756 15757 15886 +f 15756 15886 15885 +f 15757 15758 15886 +f 15758 15887 15886 +f 15758 15759 15888 +f 15758 15888 15887 +f 15759 15760 15888 +f 15760 15889 15888 +f 15760 15761 15890 +f 15760 15890 15889 +f 15761 15762 15890 +f 15762 15891 15890 +f 15762 15763 15892 +f 15762 15892 15891 +f 15763 15764 15892 +f 15764 15893 15892 +f 15764 15765 15894 +f 15764 15894 15893 +f 15765 15766 15894 +f 15766 15895 15894 +f 15766 15767 15896 +f 15766 15896 15895 +f 15767 15768 15896 +f 15768 15897 15896 +f 15768 15769 15898 +f 15768 15898 15897 +f 15769 15770 15898 +f 15770 15899 15898 +f 15770 15771 15900 +f 15770 15900 15899 +f 15771 15772 15900 +f 15772 15901 15900 +f 15772 15773 15902 +f 15772 15902 15901 +f 15773 15774 15902 +f 15774 15903 15902 +f 15774 15775 15904 +f 15774 15904 15903 +f 15775 15776 15904 +f 15776 15905 15904 +f 15776 15777 15906 +f 15776 15906 15905 +f 15777 15778 15906 +f 15778 15907 15906 +f 15778 15779 15908 +f 15778 15908 15907 +f 15779 15780 15908 +f 15780 15909 15908 +f 15780 15781 15910 +f 15780 15910 15909 +f 15781 15782 15910 +f 15782 15911 15910 +f 15782 15783 15912 +f 15782 15912 15911 +f 15783 15784 15912 +f 15784 15913 15912 +f 15784 15785 15914 +f 15784 15914 15913 +f 15785 15786 15914 +f 15786 15915 15914 +f 15786 15787 15916 +f 15786 15916 15915 +f 15787 15788 15916 +f 15788 15917 15916 +f 15788 15789 15918 +f 15788 15918 15917 +f 15789 15790 15918 +f 15790 15919 15918 +f 15790 15791 15920 +f 15790 15920 15919 +f 15791 15792 15920 +f 15792 15921 15920 +f 15792 15793 15922 +f 15792 15922 15921 +f 15793 15794 15922 +f 15794 15923 15922 +f 15794 15795 15924 +f 15794 15924 15923 +f 15795 15796 15924 +f 15796 15925 15924 +f 15796 15797 15926 +f 15796 15926 15925 +f 15797 15798 15926 +f 15798 15927 15926 +f 15798 15799 15928 +f 15798 15928 15927 +f 15799 15800 15928 +f 15800 15929 15928 +f 15800 15801 15930 +f 15800 15930 15929 +f 15801 15802 15930 +f 15802 15931 15930 +f 15802 15803 15932 +f 15802 15932 15931 +f 15803 15804 15932 +f 15804 15933 15932 +f 15804 15805 15934 +f 15804 15934 15933 +f 15805 15806 15934 +f 15806 15935 15934 +f 15806 15807 15936 +f 15806 15936 15935 +f 15807 15808 15936 +f 15808 15937 15936 +f 15808 15809 15938 +f 15808 15938 15937 +f 15809 15810 15938 +f 15810 15939 15938 +f 15810 15811 15940 +f 15810 15940 15939 +f 15811 15812 15940 +f 15812 15941 15940 +f 15812 15813 15942 +f 15812 15942 15941 +f 15813 15814 15942 +f 15814 15943 15942 +f 15814 15815 15944 +f 15814 15944 15943 +f 15815 15816 15944 +f 15816 15945 15944 +f 15816 15817 15946 +f 15816 15946 15945 +f 15817 15818 15946 +f 15818 15947 15946 +f 15818 15819 15948 +f 15818 15948 15947 +f 15819 15820 15948 +f 15820 15949 15948 +f 15820 15821 15950 +f 15820 15950 15949 +f 15821 15822 15950 +f 15822 15951 15950 +f 15822 15823 15952 +f 15822 15952 15951 +f 15823 15824 15952 +f 15824 15953 15952 +f 15824 15825 15954 +f 15824 15954 15953 +f 15825 15826 15954 +f 15826 15955 15954 +f 15826 15827 15956 +f 15826 15956 15955 +f 15827 15828 15956 +f 15828 15957 15956 +f 15828 15829 15958 +f 15828 15958 15957 +f 15829 15830 15958 +f 15830 15959 15958 +f 15830 15831 15960 +f 15830 15960 15959 +f 15831 15832 15960 +f 15832 15961 15960 +f 15832 15833 15962 +f 15832 15962 15961 +f 15833 15834 15962 +f 15834 15963 15962 +f 15834 15835 15964 +f 15834 15964 15963 +f 15835 15836 15964 +f 15836 15965 15964 +f 15836 15837 15966 +f 15836 15966 15965 +f 15837 15838 15966 +f 15838 15967 15966 +f 15838 15839 15968 +f 15838 15968 15967 +f 15839 15840 15968 +f 15840 15969 15968 +f 15840 15841 15970 +f 15840 15970 15969 +f 15841 15842 15970 +f 15842 15971 15970 +f 15842 15843 15972 +f 15842 15972 15971 +f 15843 15844 15972 +f 15844 15973 15972 +f 15844 15845 15974 +f 15844 15974 15973 +f 15845 15846 15974 +f 15846 15975 15974 +f 15846 15847 15976 +f 15846 15976 15975 +f 15847 15848 15976 +f 15848 15977 15976 +f 15848 15849 15978 +f 15848 15978 15977 +f 15849 15850 15978 +f 15850 15979 15978 +f 15851 15852 15980 +f 15852 15981 15980 +f 15852 15853 15982 +f 15852 15982 15981 +f 15853 15854 15982 +f 15854 15983 15982 +f 15854 15855 15984 +f 15854 15984 15983 +f 15855 15856 15984 +f 15856 15985 15984 +f 15856 15857 15986 +f 15856 15986 15985 +f 15857 15858 15986 +f 15858 15987 15986 +f 15858 15859 15988 +f 15858 15988 15987 +f 15859 15860 15988 +f 15860 15989 15988 +f 15860 15861 15990 +f 15860 15990 15989 +f 15861 15862 15990 +f 15862 15991 15990 +f 15862 15863 15992 +f 15862 15992 15991 +f 15863 15864 15992 +f 15864 15993 15992 +f 15864 15865 15994 +f 15864 15994 15993 +f 15865 15866 15994 +f 15866 15995 15994 +f 15866 15867 15996 +f 15866 15996 15995 +f 15867 15868 15996 +f 15868 15997 15996 +f 15868 15869 15998 +f 15868 15998 15997 +f 15869 15870 15998 +f 15870 15999 15998 +f 15870 15871 16000 +f 15870 16000 15999 +f 15871 15872 16000 +f 15872 16001 16000 +f 15872 15873 16002 +f 15872 16002 16001 +f 15873 15874 16002 +f 15874 16003 16002 +f 15874 15875 16004 +f 15874 16004 16003 +f 15875 15876 16004 +f 15876 16005 16004 +f 15876 15877 16006 +f 15876 16006 16005 +f 15877 15878 16006 +f 15878 16007 16006 +f 15878 15879 16008 +f 15878 16008 16007 +f 15879 15880 16008 +f 15880 16009 16008 +f 15880 15881 16010 +f 15880 16010 16009 +f 15881 15882 16010 +f 15882 16011 16010 +f 15882 15883 16012 +f 15882 16012 16011 +f 15883 15884 16012 +f 15884 16013 16012 +f 15884 15885 16014 +f 15884 16014 16013 +f 15885 15886 16014 +f 15886 16015 16014 +f 15886 15887 16016 +f 15886 16016 16015 +f 15887 15888 16016 +f 15888 16017 16016 +f 15888 15889 16018 +f 15888 16018 16017 +f 15889 15890 16018 +f 15890 16019 16018 +f 15890 15891 16020 +f 15890 16020 16019 +f 15891 15892 16020 +f 15892 16021 16020 +f 15892 15893 16022 +f 15892 16022 16021 +f 15893 15894 16022 +f 15894 16023 16022 +f 15894 15895 16024 +f 15894 16024 16023 +f 15895 15896 16024 +f 15896 16025 16024 +f 15896 15897 16026 +f 15896 16026 16025 +f 15897 15898 16026 +f 15898 16027 16026 +f 15898 15899 16028 +f 15898 16028 16027 +f 15899 15900 16028 +f 15900 16029 16028 +f 15900 15901 16030 +f 15900 16030 16029 +f 15901 15902 16030 +f 15902 16031 16030 +f 15902 15903 16032 +f 15902 16032 16031 +f 15903 15904 16032 +f 15904 16033 16032 +f 15904 15905 16034 +f 15904 16034 16033 +f 15905 15906 16034 +f 15906 16035 16034 +f 15906 15907 16036 +f 15906 16036 16035 +f 15907 15908 16036 +f 15908 16037 16036 +f 15908 15909 16038 +f 15908 16038 16037 +f 15909 15910 16038 +f 15910 16039 16038 +f 15910 15911 16040 +f 15910 16040 16039 +f 15911 15912 16040 +f 15912 16041 16040 +f 15912 15913 16042 +f 15912 16042 16041 +f 15913 15914 16042 +f 15914 16043 16042 +f 15914 15915 16044 +f 15914 16044 16043 +f 15915 15916 16044 +f 15916 16045 16044 +f 15916 15917 16046 +f 15916 16046 16045 +f 15917 15918 16046 +f 15918 16047 16046 +f 15918 15919 16048 +f 15918 16048 16047 +f 15919 15920 16048 +f 15920 16049 16048 +f 15920 15921 16050 +f 15920 16050 16049 +f 15921 15922 16050 +f 15922 16051 16050 +f 15922 15923 16052 +f 15922 16052 16051 +f 15923 15924 16052 +f 15924 16053 16052 +f 15924 15925 16054 +f 15924 16054 16053 +f 15925 15926 16054 +f 15926 16055 16054 +f 15926 15927 16056 +f 15926 16056 16055 +f 15927 15928 16056 +f 15928 16057 16056 +f 15928 15929 16058 +f 15928 16058 16057 +f 15929 15930 16058 +f 15930 16059 16058 +f 15930 15931 16060 +f 15930 16060 16059 +f 15931 15932 16060 +f 15932 16061 16060 +f 15932 15933 16062 +f 15932 16062 16061 +f 15933 15934 16062 +f 15934 16063 16062 +f 15934 15935 16064 +f 15934 16064 16063 +f 15935 15936 16064 +f 15936 16065 16064 +f 15936 15937 16066 +f 15936 16066 16065 +f 15937 15938 16066 +f 15938 16067 16066 +f 15938 15939 16068 +f 15938 16068 16067 +f 15939 15940 16068 +f 15940 16069 16068 +f 15940 15941 16070 +f 15940 16070 16069 +f 15941 15942 16070 +f 15942 16071 16070 +f 15942 15943 16072 +f 15942 16072 16071 +f 15943 15944 16072 +f 15944 16073 16072 +f 15944 15945 16074 +f 15944 16074 16073 +f 15945 15946 16074 +f 15946 16075 16074 +f 15946 15947 16076 +f 15946 16076 16075 +f 15947 15948 16076 +f 15948 16077 16076 +f 15948 15949 16078 +f 15948 16078 16077 +f 15949 15950 16078 +f 15950 16079 16078 +f 15950 15951 16080 +f 15950 16080 16079 +f 15951 15952 16080 +f 15952 16081 16080 +f 15952 15953 16082 +f 15952 16082 16081 +f 15953 15954 16082 +f 15954 16083 16082 +f 15954 15955 16084 +f 15954 16084 16083 +f 15955 15956 16084 +f 15956 16085 16084 +f 15956 15957 16086 +f 15956 16086 16085 +f 15957 15958 16086 +f 15958 16087 16086 +f 15958 15959 16088 +f 15958 16088 16087 +f 15959 15960 16088 +f 15960 16089 16088 +f 15960 15961 16090 +f 15960 16090 16089 +f 15961 15962 16090 +f 15962 16091 16090 +f 15962 15963 16092 +f 15962 16092 16091 +f 15963 15964 16092 +f 15964 16093 16092 +f 15964 15965 16094 +f 15964 16094 16093 +f 15965 15966 16094 +f 15966 16095 16094 +f 15966 15967 16096 +f 15966 16096 16095 +f 15967 15968 16096 +f 15968 16097 16096 +f 15968 15969 16098 +f 15968 16098 16097 +f 15969 15970 16098 +f 15970 16099 16098 +f 15970 15971 16100 +f 15970 16100 16099 +f 15971 15972 16100 +f 15972 16101 16100 +f 15972 15973 16102 +f 15972 16102 16101 +f 15973 15974 16102 +f 15974 16103 16102 +f 15974 15975 16104 +f 15974 16104 16103 +f 15975 15976 16104 +f 15976 16105 16104 +f 15976 15977 16106 +f 15976 16106 16105 +f 15977 15978 16106 +f 15978 16107 16106 +f 15978 15979 16108 +f 15978 16108 16107 +f 15980 15981 16110 +f 15980 16110 16109 +f 15981 15982 16110 +f 15982 16111 16110 +f 15982 15983 16112 +f 15982 16112 16111 +f 15983 15984 16112 +f 15984 16113 16112 +f 15984 15985 16114 +f 15984 16114 16113 +f 15985 15986 16114 +f 15986 16115 16114 +f 15986 15987 16116 +f 15986 16116 16115 +f 15987 15988 16116 +f 15988 16117 16116 +f 15988 15989 16118 +f 15988 16118 16117 +f 15989 15990 16118 +f 15990 16119 16118 +f 15990 15991 16120 +f 15990 16120 16119 +f 15991 15992 16120 +f 15992 16121 16120 +f 15992 15993 16122 +f 15992 16122 16121 +f 15993 15994 16122 +f 15994 16123 16122 +f 15994 15995 16124 +f 15994 16124 16123 +f 15995 15996 16124 +f 15996 16125 16124 +f 15996 15997 16126 +f 15996 16126 16125 +f 15997 15998 16126 +f 15998 16127 16126 +f 15998 15999 16128 +f 15998 16128 16127 +f 15999 16000 16128 +f 16000 16129 16128 +f 16000 16001 16130 +f 16000 16130 16129 +f 16001 16002 16130 +f 16002 16131 16130 +f 16002 16003 16132 +f 16002 16132 16131 +f 16003 16004 16132 +f 16004 16133 16132 +f 16004 16005 16134 +f 16004 16134 16133 +f 16005 16006 16134 +f 16006 16135 16134 +f 16006 16007 16136 +f 16006 16136 16135 +f 16007 16008 16136 +f 16008 16137 16136 +f 16008 16009 16138 +f 16008 16138 16137 +f 16009 16010 16138 +f 16010 16139 16138 +f 16010 16011 16140 +f 16010 16140 16139 +f 16011 16012 16140 +f 16012 16141 16140 +f 16012 16013 16142 +f 16012 16142 16141 +f 16013 16014 16142 +f 16014 16143 16142 +f 16014 16015 16144 +f 16014 16144 16143 +f 16015 16016 16144 +f 16016 16145 16144 +f 16016 16017 16146 +f 16016 16146 16145 +f 16017 16018 16146 +f 16018 16147 16146 +f 16018 16019 16148 +f 16018 16148 16147 +f 16019 16020 16148 +f 16020 16149 16148 +f 16020 16021 16150 +f 16020 16150 16149 +f 16021 16022 16150 +f 16022 16151 16150 +f 16022 16023 16152 +f 16022 16152 16151 +f 16023 16024 16152 +f 16024 16153 16152 +f 16024 16025 16154 +f 16024 16154 16153 +f 16025 16026 16154 +f 16026 16155 16154 +f 16026 16027 16156 +f 16026 16156 16155 +f 16027 16028 16156 +f 16028 16157 16156 +f 16028 16029 16158 +f 16028 16158 16157 +f 16029 16030 16158 +f 16030 16159 16158 +f 16030 16031 16160 +f 16030 16160 16159 +f 16031 16032 16160 +f 16032 16161 16160 +f 16032 16033 16162 +f 16032 16162 16161 +f 16033 16034 16162 +f 16034 16163 16162 +f 16034 16035 16164 +f 16034 16164 16163 +f 16035 16036 16164 +f 16036 16165 16164 +f 16036 16037 16166 +f 16036 16166 16165 +f 16037 16038 16166 +f 16038 16167 16166 +f 16038 16039 16168 +f 16038 16168 16167 +f 16039 16040 16168 +f 16040 16169 16168 +f 16040 16041 16170 +f 16040 16170 16169 +f 16041 16042 16170 +f 16042 16171 16170 +f 16042 16043 16172 +f 16042 16172 16171 +f 16043 16044 16172 +f 16044 16173 16172 +f 16044 16045 16174 +f 16044 16174 16173 +f 16045 16046 16174 +f 16046 16175 16174 +f 16046 16047 16176 +f 16046 16176 16175 +f 16047 16048 16176 +f 16048 16177 16176 +f 16048 16049 16178 +f 16048 16178 16177 +f 16049 16050 16178 +f 16050 16179 16178 +f 16050 16051 16180 +f 16050 16180 16179 +f 16051 16052 16180 +f 16052 16181 16180 +f 16052 16053 16182 +f 16052 16182 16181 +f 16053 16054 16182 +f 16054 16183 16182 +f 16054 16055 16184 +f 16054 16184 16183 +f 16055 16056 16184 +f 16056 16185 16184 +f 16056 16057 16186 +f 16056 16186 16185 +f 16057 16058 16186 +f 16058 16187 16186 +f 16058 16059 16188 +f 16058 16188 16187 +f 16059 16060 16188 +f 16060 16189 16188 +f 16060 16061 16190 +f 16060 16190 16189 +f 16061 16062 16190 +f 16062 16191 16190 +f 16062 16063 16192 +f 16062 16192 16191 +f 16063 16064 16192 +f 16064 16193 16192 +f 16064 16065 16194 +f 16064 16194 16193 +f 16065 16066 16194 +f 16066 16195 16194 +f 16066 16067 16196 +f 16066 16196 16195 +f 16067 16068 16196 +f 16068 16197 16196 +f 16068 16069 16198 +f 16068 16198 16197 +f 16069 16070 16198 +f 16070 16199 16198 +f 16070 16071 16200 +f 16070 16200 16199 +f 16071 16072 16200 +f 16072 16201 16200 +f 16072 16073 16202 +f 16072 16202 16201 +f 16073 16074 16202 +f 16074 16203 16202 +f 16074 16075 16204 +f 16074 16204 16203 +f 16075 16076 16204 +f 16076 16205 16204 +f 16076 16077 16206 +f 16076 16206 16205 +f 16077 16078 16206 +f 16078 16207 16206 +f 16078 16079 16208 +f 16078 16208 16207 +f 16079 16080 16208 +f 16080 16209 16208 +f 16080 16081 16210 +f 16080 16210 16209 +f 16081 16082 16210 +f 16082 16211 16210 +f 16082 16083 16212 +f 16082 16212 16211 +f 16083 16084 16212 +f 16084 16213 16212 +f 16084 16085 16214 +f 16084 16214 16213 +f 16085 16086 16214 +f 16086 16215 16214 +f 16086 16087 16216 +f 16086 16216 16215 +f 16087 16088 16216 +f 16088 16217 16216 +f 16088 16089 16218 +f 16088 16218 16217 +f 16089 16090 16218 +f 16090 16219 16218 +f 16090 16091 16220 +f 16090 16220 16219 +f 16091 16092 16220 +f 16092 16221 16220 +f 16092 16093 16222 +f 16092 16222 16221 +f 16093 16094 16222 +f 16094 16223 16222 +f 16094 16095 16224 +f 16094 16224 16223 +f 16095 16096 16224 +f 16096 16225 16224 +f 16096 16097 16226 +f 16096 16226 16225 +f 16097 16098 16226 +f 16098 16227 16226 +f 16098 16099 16228 +f 16098 16228 16227 +f 16099 16100 16228 +f 16100 16229 16228 +f 16100 16101 16230 +f 16100 16230 16229 +f 16101 16102 16230 +f 16102 16231 16230 +f 16102 16103 16232 +f 16102 16232 16231 +f 16103 16104 16232 +f 16104 16233 16232 +f 16104 16105 16234 +f 16104 16234 16233 +f 16105 16106 16234 +f 16106 16235 16234 +f 16106 16107 16236 +f 16106 16236 16235 +f 16107 16108 16236 +f 16108 16237 16236 +f 16109 16110 21754 +f 16110 21755 21754 +f 16110 16111 21756 +f 16110 21756 21755 +f 16111 16112 21756 +f 16112 21757 21756 +f 16112 16113 21758 +f 16112 21758 21757 +f 16113 16114 21758 +f 16114 21759 21758 +f 16114 16115 21760 +f 16114 21760 21759 +f 16115 16116 21760 +f 16116 21761 21760 +f 16116 16117 21762 +f 16116 21762 21761 +f 16117 16118 21762 +f 16118 21763 21762 +f 16118 16119 21764 +f 16118 21764 21763 +f 16119 16120 21764 +f 16120 21765 21764 +f 16120 16121 21766 +f 16120 21766 21765 +f 16121 16122 21766 +f 16122 21767 21766 +f 16122 16123 21768 +f 16122 21768 21767 +f 16123 16124 21768 +f 16124 21769 21768 +f 16124 16125 21770 +f 16124 21770 21769 +f 16125 16126 21770 +f 16126 21771 21770 +f 16126 16127 21772 +f 16126 21772 21771 +f 16127 16128 21772 +f 16128 21773 21772 +f 16128 16129 21774 +f 16128 21774 21773 +f 16129 16130 21774 +f 16130 21775 21774 +f 16130 16131 21776 +f 16130 21776 21775 +f 16131 16132 21776 +f 16132 21777 21776 +f 16132 16133 21778 +f 16132 21778 21777 +f 16133 16134 21778 +f 16134 21779 21778 +f 16134 16135 21780 +f 16134 21780 21779 +f 16135 16136 21780 +f 16136 21781 21780 +f 16136 16137 21782 +f 16136 21782 21781 +f 16137 16138 21782 +f 16138 21783 21782 +f 16138 16139 21784 +f 16138 21784 21783 +f 16139 16140 21784 +f 16140 21785 21784 +f 16140 16141 21786 +f 16140 21786 21785 +f 16141 16142 21786 +f 16142 21787 21786 +f 16142 16143 21788 +f 16142 21788 21787 +f 16143 16144 21788 +f 16144 21789 21788 +f 16144 16145 21790 +f 16144 21790 21789 +f 16145 16146 21790 +f 16146 21791 21790 +f 16146 16147 21792 +f 16146 21792 21791 +f 16147 16148 21792 +f 16148 21793 21792 +f 16148 16149 21794 +f 16148 21794 21793 +f 16149 16150 21794 +f 16150 21795 21794 +f 16150 16151 21796 +f 16150 21796 21795 +f 16151 16152 21796 +f 16152 21797 21796 +f 16152 16153 21798 +f 16152 21798 21797 +f 16153 16154 21798 +f 16154 21799 21798 +f 16154 16155 21800 +f 16154 21800 21799 +f 16155 16156 21800 +f 16156 21801 21800 +f 16156 16157 21802 +f 16156 21802 21801 +f 16157 16158 21802 +f 16158 21803 21802 +f 16158 16159 21804 +f 16158 21804 21803 +f 16159 16160 21804 +f 16160 21805 21804 +f 16160 16161 21806 +f 16160 21806 21805 +f 16161 16162 21806 +f 16162 21807 21806 +f 16162 16163 21808 +f 16162 21808 21807 +f 16163 16164 21808 +f 16164 21809 21808 +f 16164 16165 21810 +f 16164 21810 21809 +f 16165 16166 21810 +f 16166 21811 21810 +f 16166 16167 21812 +f 16166 21812 21811 +f 16167 16168 21812 +f 16168 21813 21812 +f 16168 16169 21814 +f 16168 21814 21813 +f 16169 16170 21814 +f 16170 21815 21814 +f 16170 16171 21816 +f 16170 21816 21815 +f 16171 16172 21816 +f 16172 21817 21816 +f 16172 16173 21818 +f 16172 21818 21817 +f 16173 16174 21818 +f 16174 21819 21818 +f 16174 16175 21820 +f 16174 21820 21819 +f 16175 16176 21820 +f 16176 21821 21820 +f 16176 16177 21822 +f 16176 21822 21821 +f 16177 16178 21822 +f 16178 21823 21822 +f 16178 16179 21824 +f 16178 21824 21823 +f 16179 16180 21824 +f 16180 21825 21824 +f 16180 16181 21826 +f 16180 21826 21825 +f 16181 16182 21826 +f 16182 21827 21826 +f 16182 16183 21828 +f 16182 21828 21827 +f 16183 16184 21828 +f 16184 21829 21828 +f 16184 16185 21830 +f 16184 21830 21829 +f 16185 16186 21830 +f 16186 21831 21830 +f 16186 16187 21832 +f 16186 21832 21831 +f 16187 16188 21832 +f 16188 21833 21832 +f 16188 16189 21834 +f 16188 21834 21833 +f 16189 16190 21834 +f 16190 21835 21834 +f 16190 16191 21836 +f 16190 21836 21835 +f 16191 16192 21836 +f 16192 21837 21836 +f 16192 16193 21838 +f 16192 21838 21837 +f 16193 16194 21838 +f 16194 21839 21838 +f 16194 16195 21840 +f 16194 21840 21839 +f 16195 16196 21840 +f 16196 21841 21840 +f 16196 16197 21842 +f 16196 21842 21841 +f 16197 16198 21842 +f 16198 21843 21842 +f 16198 16199 21844 +f 16198 21844 21843 +f 16199 16200 21844 +f 16200 21845 21844 +f 16200 16201 21846 +f 16200 21846 21845 +f 16201 16202 21846 +f 16202 21847 21846 +f 16202 16203 21848 +f 16202 21848 21847 +f 16203 16204 21848 +f 16204 21849 21848 +f 16204 16205 21850 +f 16204 21850 21849 +f 16205 16206 21850 +f 16206 21851 21850 +f 16206 16207 21852 +f 16206 21852 21851 +f 16207 16208 21852 +f 16208 21853 21852 +f 16208 16209 21854 +f 16208 21854 21853 +f 16209 16210 21854 +f 16210 21855 21854 +f 16210 16211 21856 +f 16210 21856 21855 +f 16211 16212 21856 +f 16212 21857 21856 +f 16212 16213 21858 +f 16212 21858 21857 +f 16213 16214 21858 +f 16214 21859 21858 +f 16214 16215 21860 +f 16214 21860 21859 +f 16215 16216 21860 +f 16216 21861 21860 +f 16216 16217 21862 +f 16216 21862 21861 +f 16217 16218 21862 +f 16218 21863 21862 +f 16218 16219 21864 +f 16218 21864 21863 +f 16219 16220 21864 +f 16220 21865 21864 +f 16220 16221 21866 +f 16220 21866 21865 +f 16221 16222 21866 +f 16222 21867 21866 +f 16222 16223 21868 +f 16222 21868 21867 +f 16223 16224 21868 +f 16224 21869 21868 +f 16224 16225 21870 +f 16224 21870 21869 +f 16225 16226 21870 +f 16226 21871 21870 +f 16226 16227 21872 +f 16226 21872 21871 +f 16227 16228 21872 +f 16228 21873 21872 +f 16228 16229 21874 +f 16228 21874 21873 +f 16229 16230 21874 +f 16230 21875 21874 +f 16230 16231 21876 +f 16230 21876 21875 +f 16231 16232 21876 +f 16232 21877 21876 +f 16232 16233 21878 +f 16232 21878 21877 +f 16233 16234 21878 +f 16234 21879 21878 +f 16234 16235 21880 +f 16234 21880 21879 +f 16235 16236 21880 +f 16236 21881 21880 +f 16236 16237 21882 +f 16236 21882 21881 +f 16238 16282 16281 +f 16238 16239 16283 +f 16238 16283 16282 +f 16239 16240 16283 +f 16240 16284 16283 +f 16240 16241 16285 +f 16240 16285 16284 +f 16241 16242 16285 +f 16242 16286 16285 +f 16242 16243 16287 +f 16242 16287 16286 +f 16243 16244 16287 +f 16244 16288 16287 +f 16244 16245 16289 +f 16244 16289 16288 +f 16245 16246 16289 +f 16246 16290 16289 +f 16246 16247 16291 +f 16246 16291 16290 +f 16247 16248 16291 +f 16248 16292 16291 +f 16248 16249 16293 +f 16248 16293 16292 +f 16249 16250 16293 +f 16250 16294 16293 +f 16250 16251 16295 +f 16250 16295 16294 +f 16251 16252 16295 +f 16252 16296 16295 +f 16252 16253 16297 +f 16252 16297 16296 +f 16253 16254 16297 +f 16254 16298 16297 +f 16254 16255 16299 +f 16254 16299 16298 +f 16255 16256 16299 +f 16256 16300 16299 +f 16256 16257 16301 +f 16256 16301 16300 +f 16257 16258 16301 +f 16258 16302 16301 +f 16258 16259 16303 +f 16258 16303 16302 +f 16259 16260 16303 +f 16260 16304 16303 +f 16260 16261 16305 +f 16260 16305 16304 +f 16261 16262 16305 +f 16262 16306 16305 +f 16262 16263 16307 +f 16262 16307 16306 +f 16263 16264 16307 +f 16264 16308 16307 +f 16264 16265 16309 +f 16264 16309 16308 +f 16265 16266 16309 +f 16266 16310 16309 +f 16266 16267 16311 +f 16266 16311 16310 +f 16267 16268 16311 +f 16268 16312 16311 +f 16268 16269 16313 +f 16268 16313 16312 +f 16269 16270 16313 +f 16270 16314 16313 +f 16270 16271 16315 +f 16270 16315 16314 +f 16271 16272 16315 +f 16272 16316 16315 +f 16272 16273 16317 +f 16272 16317 16316 +f 16273 16274 16317 +f 16274 16318 16317 +f 16274 16275 16319 +f 16274 16319 16318 +f 16275 16276 16319 +f 16276 16320 16319 +f 16276 16277 16321 +f 16276 16321 16320 +f 16277 16278 16321 +f 16278 16322 16321 +f 16278 16323 16322 +f 16279 16326 16325 +f 16279 16280 16327 +f 16279 16327 16326 +f 16280 16281 16327 +f 16281 16328 16327 +f 16281 16282 16329 +f 16281 16329 16328 +f 16282 16283 16329 +f 16283 16330 16329 +f 16283 16284 16331 +f 16283 16331 16330 +f 16284 16285 16331 +f 16285 16332 16331 +f 16285 16286 16333 +f 16285 16333 16332 +f 16286 16287 16333 +f 16287 16334 16333 +f 16287 16288 16335 +f 16287 16335 16334 +f 16288 16289 16335 +f 16289 16336 16335 +f 16289 16290 16337 +f 16289 16337 16336 +f 16290 16291 16337 +f 16291 16338 16337 +f 16291 16292 16339 +f 16291 16339 16338 +f 16292 16293 16339 +f 16293 16340 16339 +f 16293 16294 16341 +f 16293 16341 16340 +f 16294 16295 16341 +f 16295 16342 16341 +f 16295 16296 16343 +f 16295 16343 16342 +f 16296 16297 16343 +f 16297 16344 16343 +f 16297 16298 16345 +f 16297 16345 16344 +f 16298 16299 16345 +f 16299 16346 16345 +f 16299 16300 16347 +f 16299 16347 16346 +f 16300 16301 16347 +f 16301 16348 16347 +f 16301 16302 16349 +f 16301 16349 16348 +f 16302 16303 16349 +f 16303 16350 16349 +f 16303 16304 16351 +f 16303 16351 16350 +f 16304 16305 16351 +f 16305 16352 16351 +f 16305 16306 16353 +f 16305 16353 16352 +f 16306 16307 16353 +f 16307 16354 16353 +f 16307 16308 16355 +f 16307 16355 16354 +f 16308 16309 16355 +f 16309 16356 16355 +f 16309 16310 16357 +f 16309 16357 16356 +f 16310 16311 16357 +f 16311 16358 16357 +f 16311 16312 16359 +f 16311 16359 16358 +f 16312 16313 16359 +f 16313 16360 16359 +f 16313 16314 16361 +f 16313 16361 16360 +f 16314 16315 16361 +f 16315 16362 16361 +f 16315 16316 16363 +f 16315 16363 16362 +f 16316 16317 16363 +f 16317 16364 16363 +f 16317 16318 16365 +f 16317 16365 16364 +f 16318 16319 16365 +f 16319 16366 16365 +f 16319 16320 16367 +f 16319 16367 16366 +f 16320 16321 16367 +f 16321 16368 16367 +f 16321 16322 16369 +f 16321 16369 16368 +f 16322 16323 16369 +f 16323 16370 16369 +f 16323 16371 16370 +f 16324 16325 16397 +f 16325 16398 16397 +f 16325 16326 16399 +f 16325 16399 16398 +f 16326 16327 16399 +f 16327 16400 16399 +f 16327 16328 16401 +f 16327 16401 16400 +f 16328 16329 16401 +f 16329 16402 16401 +f 16329 16330 16403 +f 16329 16403 16402 +f 16330 16331 16403 +f 16331 16404 16403 +f 16331 16332 16405 +f 16331 16405 16404 +f 16332 16333 16405 +f 16333 16406 16405 +f 16333 16334 16407 +f 16333 16407 16406 +f 16334 16335 16407 +f 16335 16408 16407 +f 16335 16336 16409 +f 16335 16409 16408 +f 16336 16337 16409 +f 16337 16410 16409 +f 16337 16338 16411 +f 16337 16411 16410 +f 16338 16339 16411 +f 16339 16412 16411 +f 16339 16340 16413 +f 16339 16413 16412 +f 16340 16341 16413 +f 16341 16414 16413 +f 16341 16342 16415 +f 16341 16415 16414 +f 16342 16343 16415 +f 16343 16416 16415 +f 16343 16344 16417 +f 16343 16417 16416 +f 16344 16345 16417 +f 16345 16418 16417 +f 16345 16346 16419 +f 16345 16419 16418 +f 16346 16347 16419 +f 16347 16420 16419 +f 16347 16348 16421 +f 16347 16421 16420 +f 16348 16349 16421 +f 16349 16422 16421 +f 16349 16350 16423 +f 16349 16423 16422 +f 16350 16351 16423 +f 16351 16424 16423 +f 16351 16352 16425 +f 16351 16425 16424 +f 16352 16353 16425 +f 16353 16426 16425 +f 16353 16354 16427 +f 16353 16427 16426 +f 16354 16355 16427 +f 16355 16428 16427 +f 16355 16356 16429 +f 16355 16429 16428 +f 16356 16357 16429 +f 16357 16430 16429 +f 16357 16358 16431 +f 16357 16431 16430 +f 16358 16359 16431 +f 16359 16432 16431 +f 16359 16360 16433 +f 16359 16433 16432 +f 16360 16361 16433 +f 16361 16434 16433 +f 16361 16362 16435 +f 16361 16435 16434 +f 16362 16363 16435 +f 16363 16436 16435 +f 16363 16364 16437 +f 16363 16437 16436 +f 16364 16365 16437 +f 16365 16438 16437 +f 16365 16366 16439 +f 16365 16439 16438 +f 16366 16367 16439 +f 16367 16440 16439 +f 16367 16368 16441 +f 16367 16441 16440 +f 16368 16369 16441 +f 16369 16442 16441 +f 16369 16370 16443 +f 16369 16443 16442 +f 16370 16371 16443 +f 16371 16444 16443 +f 16372 16448 16447 +f 16372 16373 16449 +f 16372 16449 16448 +f 16373 16374 16449 +f 16374 16450 16449 +f 16374 16375 16451 +f 16374 16451 16450 +f 16375 16376 16451 +f 16376 16452 16451 +f 16376 16377 16453 +f 16376 16453 16452 +f 16377 16378 16453 +f 16378 16454 16453 +f 16378 16379 16455 +f 16378 16455 16454 +f 16379 16380 16455 +f 16380 16456 16455 +f 16380 16381 16457 +f 16380 16457 16456 +f 16381 16382 16457 +f 16382 16458 16457 +f 16382 16383 16459 +f 16382 16459 16458 +f 16383 16384 16459 +f 16384 16460 16459 +f 16384 16385 16461 +f 16384 16461 16460 +f 16385 16386 16461 +f 16386 16462 16461 +f 16386 16387 16463 +f 16386 16463 16462 +f 16387 16388 16463 +f 16388 16464 16463 +f 16388 16389 16465 +f 16388 16465 16464 +f 16389 16390 16465 +f 16390 16466 16465 +f 16390 16391 16467 +f 16390 16467 16466 +f 16391 16392 16467 +f 16392 16468 16467 +f 16392 16393 16469 +f 16392 16469 16468 +f 16393 16394 16469 +f 16394 16470 16469 +f 16394 16471 16470 +f 16395 16477 16476 +f 16395 16396 16478 +f 16395 16478 16477 +f 16396 16397 16478 +f 16397 16479 16478 +f 16397 16398 16480 +f 16397 16480 16479 +f 16398 16399 16480 +f 16399 16481 16480 +f 16399 16400 16482 +f 16399 16482 16481 +f 16400 16401 16482 +f 16401 16483 16482 +f 16401 16402 16484 +f 16401 16484 16483 +f 16402 16403 16484 +f 16403 16485 16484 +f 16403 16404 16486 +f 16403 16486 16485 +f 16404 16405 16486 +f 16405 16487 16486 +f 16405 16406 16488 +f 16405 16488 16487 +f 16406 16407 16488 +f 16407 16489 16488 +f 16407 16408 16490 +f 16407 16490 16489 +f 16408 16409 16490 +f 16409 16491 16490 +f 16409 16410 16492 +f 16409 16492 16491 +f 16410 16411 16492 +f 16411 16493 16492 +f 16411 16412 16494 +f 16411 16494 16493 +f 16412 16413 16494 +f 16413 16495 16494 +f 16413 16414 16496 +f 16413 16496 16495 +f 16414 16415 16496 +f 16415 16497 16496 +f 16415 16416 16498 +f 16415 16498 16497 +f 16416 16417 16498 +f 16417 16499 16498 +f 16417 16418 16500 +f 16417 16500 16499 +f 16418 16419 16500 +f 16419 16501 16500 +f 16419 16420 16502 +f 16419 16502 16501 +f 16420 16421 16502 +f 16421 16503 16502 +f 16421 16422 16504 +f 16421 16504 16503 +f 16422 16423 16504 +f 16423 16505 16504 +f 16423 16424 16506 +f 16423 16506 16505 +f 16424 16425 16506 +f 16425 16507 16506 +f 16425 16426 16508 +f 16425 16508 16507 +f 16426 16427 16508 +f 16427 16509 16508 +f 16427 16428 16510 +f 16427 16510 16509 +f 16428 16429 16510 +f 16429 16511 16510 +f 16429 16430 16512 +f 16429 16512 16511 +f 16430 16431 16512 +f 16431 16513 16512 +f 16431 16432 16514 +f 16431 16514 16513 +f 16432 16433 16514 +f 16433 16515 16514 +f 16433 16434 16516 +f 16433 16516 16515 +f 16434 16435 16516 +f 16435 16517 16516 +f 16435 16436 16518 +f 16435 16518 16517 +f 16436 16437 16518 +f 16437 16519 16518 +f 16437 16438 16520 +f 16437 16520 16519 +f 16438 16439 16520 +f 16439 16521 16520 +f 16439 16440 16522 +f 16439 16522 16521 +f 16440 16441 16522 +f 16441 16523 16522 +f 16441 16442 16524 +f 16441 16524 16523 +f 16442 16443 16524 +f 16443 16525 16524 +f 16443 16444 16526 +f 16443 16526 16525 +f 16445 16528 16527 +f 16445 16446 16529 +f 16445 16529 16528 +f 16446 16447 16529 +f 16447 16530 16529 +f 16447 16448 16531 +f 16447 16531 16530 +f 16448 16449 16531 +f 16449 16532 16531 +f 16449 16450 16533 +f 16449 16533 16532 +f 16450 16451 16533 +f 16451 16534 16533 +f 16451 16452 16535 +f 16451 16535 16534 +f 16452 16453 16535 +f 16453 16536 16535 +f 16453 16454 16537 +f 16453 16537 16536 +f 16454 16455 16537 +f 16455 16538 16537 +f 16455 16456 16539 +f 16455 16539 16538 +f 16456 16457 16539 +f 16457 16540 16539 +f 16457 16458 16541 +f 16457 16541 16540 +f 16458 16459 16541 +f 16459 16542 16541 +f 16459 16460 16543 +f 16459 16543 16542 +f 16460 16461 16543 +f 16461 16544 16543 +f 16461 16462 16545 +f 16461 16545 16544 +f 16462 16463 16545 +f 16463 16546 16545 +f 16463 16464 16547 +f 16463 16547 16546 +f 16464 16465 16547 +f 16465 16548 16547 +f 16465 16466 16549 +f 16465 16549 16548 +f 16466 16467 16549 +f 16467 16550 16549 +f 16467 16468 16551 +f 16467 16551 16550 +f 16468 16469 16551 +f 16469 16552 16551 +f 16469 16470 16553 +f 16469 16553 16552 +f 16470 16471 16553 +f 16471 16554 16553 +f 16471 16472 16555 +f 16471 16555 16554 +f 16472 16473 16555 +f 16473 16556 16555 +f 16473 16557 16556 +f 16474 16563 16562 +f 16474 16475 16564 +f 16474 16564 16563 +f 16475 16476 16564 +f 16476 16565 16564 +f 16476 16477 16566 +f 16476 16566 16565 +f 16477 16478 16566 +f 16478 16567 16566 +f 16478 16479 16568 +f 16478 16568 16567 +f 16479 16480 16568 +f 16480 16569 16568 +f 16480 16481 16570 +f 16480 16570 16569 +f 16481 16482 16570 +f 16482 16571 16570 +f 16482 16483 16572 +f 16482 16572 16571 +f 16483 16484 16572 +f 16484 16573 16572 +f 16484 16485 16574 +f 16484 16574 16573 +f 16485 16486 16574 +f 16486 16575 16574 +f 16486 16487 16576 +f 16486 16576 16575 +f 16487 16488 16576 +f 16488 16577 16576 +f 16488 16489 16578 +f 16488 16578 16577 +f 16489 16490 16578 +f 16490 16579 16578 +f 16490 16491 16580 +f 16490 16580 16579 +f 16491 16492 16580 +f 16492 16581 16580 +f 16492 16493 16582 +f 16492 16582 16581 +f 16493 16494 16582 +f 16494 16583 16582 +f 16494 16495 16584 +f 16494 16584 16583 +f 16495 16496 16584 +f 16496 16585 16584 +f 16496 16497 16586 +f 16496 16586 16585 +f 16497 16498 16586 +f 16498 16587 16586 +f 16498 16499 16588 +f 16498 16588 16587 +f 16499 16500 16588 +f 16500 16589 16588 +f 16500 16501 16590 +f 16500 16590 16589 +f 16501 16502 16590 +f 16502 16591 16590 +f 16502 16503 16592 +f 16502 16592 16591 +f 16503 16504 16592 +f 16504 16593 16592 +f 16504 16505 16594 +f 16504 16594 16593 +f 16505 16506 16594 +f 16506 16595 16594 +f 16506 16507 16596 +f 16506 16596 16595 +f 16507 16508 16596 +f 16508 16597 16596 +f 16508 16509 16598 +f 16508 16598 16597 +f 16509 16510 16598 +f 16510 16599 16598 +f 16510 16511 16600 +f 16510 16600 16599 +f 16511 16512 16600 +f 16512 16601 16600 +f 16512 16513 16602 +f 16512 16602 16601 +f 16513 16514 16602 +f 16514 16603 16602 +f 16514 16515 16604 +f 16514 16604 16603 +f 16515 16516 16604 +f 16516 16605 16604 +f 16516 16517 16606 +f 16516 16606 16605 +f 16517 16518 16606 +f 16518 16607 16606 +f 16518 16519 16608 +f 16518 16608 16607 +f 16519 16520 16608 +f 16520 16609 16608 +f 16520 16521 16610 +f 16520 16610 16609 +f 16521 16522 16610 +f 16522 16611 16610 +f 16522 16523 16612 +f 16522 16612 16611 +f 16523 16524 16612 +f 16524 16613 16612 +f 16524 16525 16614 +f 16524 16614 16613 +f 16525 16526 16614 +f 16526 16615 16614 +f 16527 16619 16618 +f 16527 16528 16620 +f 16527 16620 16619 +f 16528 16529 16620 +f 16529 16621 16620 +f 16529 16530 16622 +f 16529 16622 16621 +f 16530 16531 16622 +f 16531 16623 16622 +f 16531 16532 16624 +f 16531 16624 16623 +f 16532 16533 16624 +f 16533 16625 16624 +f 16533 16534 16626 +f 16533 16626 16625 +f 16534 16535 16626 +f 16535 16627 16626 +f 16535 16536 16628 +f 16535 16628 16627 +f 16536 16537 16628 +f 16537 16629 16628 +f 16537 16538 16630 +f 16537 16630 16629 +f 16538 16539 16630 +f 16539 16631 16630 +f 16539 16540 16632 +f 16539 16632 16631 +f 16540 16541 16632 +f 16541 16633 16632 +f 16541 16542 16634 +f 16541 16634 16633 +f 16542 16543 16634 +f 16543 16635 16634 +f 16543 16544 16636 +f 16543 16636 16635 +f 16544 16545 16636 +f 16545 16637 16636 +f 16545 16546 16638 +f 16545 16638 16637 +f 16546 16547 16638 +f 16547 16639 16638 +f 16547 16548 16640 +f 16547 16640 16639 +f 16548 16549 16640 +f 16549 16641 16640 +f 16549 16550 16642 +f 16549 16642 16641 +f 16550 16551 16642 +f 16551 16643 16642 +f 16551 16552 16644 +f 16551 16644 16643 +f 16552 16553 16644 +f 16553 16645 16644 +f 16553 16554 16646 +f 16553 16646 16645 +f 16554 16555 16646 +f 16555 16647 16646 +f 16555 16556 16648 +f 16555 16648 16647 +f 16556 16557 16648 +f 16557 16649 16648 +f 16557 16558 16650 +f 16557 16650 16649 +f 16558 16559 16650 +f 16559 16651 16650 +f 16559 16652 16651 +f 16560 16681 16680 +f 16560 16561 16682 +f 16560 16682 16681 +f 16561 16562 16682 +f 16562 16683 16682 +f 16562 16563 16684 +f 16562 16684 16683 +f 16563 16564 16684 +f 16564 16685 16684 +f 16564 16565 16686 +f 16564 16686 16685 +f 16565 16566 16686 +f 16566 16687 16686 +f 16566 16567 16688 +f 16566 16688 16687 +f 16567 16568 16688 +f 16568 16689 16688 +f 16568 16569 16690 +f 16568 16690 16689 +f 16569 16570 16690 +f 16570 16691 16690 +f 16570 16571 16692 +f 16570 16692 16691 +f 16571 16572 16692 +f 16572 16693 16692 +f 16572 16573 16694 +f 16572 16694 16693 +f 16573 16574 16694 +f 16574 16695 16694 +f 16574 16575 16696 +f 16574 16696 16695 +f 16575 16576 16696 +f 16576 16697 16696 +f 16576 16577 16698 +f 16576 16698 16697 +f 16577 16578 16698 +f 16578 16699 16698 +f 16578 16579 16700 +f 16578 16700 16699 +f 16579 16580 16700 +f 16580 16701 16700 +f 16580 16581 16702 +f 16580 16702 16701 +f 16581 16582 16702 +f 16582 16703 16702 +f 16582 16583 16704 +f 16582 16704 16703 +f 16583 16584 16704 +f 16584 16705 16704 +f 16584 16585 16706 +f 16584 16706 16705 +f 16585 16586 16706 +f 16586 16707 16706 +f 16586 16587 16708 +f 16586 16708 16707 +f 16587 16588 16708 +f 16588 16709 16708 +f 16588 16589 16710 +f 16588 16710 16709 +f 16589 16590 16710 +f 16590 16711 16710 +f 16590 16591 16712 +f 16590 16712 16711 +f 16591 16592 16712 +f 16592 16713 16712 +f 16592 16593 16714 +f 16592 16714 16713 +f 16593 16594 16714 +f 16594 16715 16714 +f 16594 16595 16716 +f 16594 16716 16715 +f 16595 16596 16716 +f 16596 16717 16716 +f 16596 16597 16718 +f 16596 16718 16717 +f 16597 16598 16718 +f 16598 16719 16718 +f 16598 16599 16720 +f 16598 16720 16719 +f 16599 16600 16720 +f 16600 16721 16720 +f 16600 16601 16722 +f 16600 16722 16721 +f 16601 16602 16722 +f 16602 16723 16722 +f 16602 16603 16724 +f 16602 16724 16723 +f 16603 16604 16724 +f 16604 16725 16724 +f 16604 16605 16726 +f 16604 16726 16725 +f 16605 16606 16726 +f 16606 16727 16726 +f 16606 16607 16728 +f 16606 16728 16727 +f 16607 16608 16728 +f 16608 16729 16728 +f 16608 16609 16730 +f 16608 16730 16729 +f 16609 16610 16730 +f 16610 16731 16730 +f 16610 16611 16732 +f 16610 16732 16731 +f 16611 16612 16732 +f 16612 16733 16732 +f 16612 16613 16734 +f 16612 16734 16733 +f 16613 16614 16734 +f 16614 16735 16734 +f 16614 16615 16736 +f 16614 16736 16735 +f 16616 16738 16737 +f 16616 16617 16739 +f 16616 16739 16738 +f 16617 16618 16739 +f 16618 16740 16739 +f 16618 16619 16741 +f 16618 16741 16740 +f 16619 16620 16741 +f 16620 16742 16741 +f 16620 16621 16743 +f 16620 16743 16742 +f 16621 16622 16743 +f 16622 16744 16743 +f 16622 16623 16745 +f 16622 16745 16744 +f 16623 16624 16745 +f 16624 16746 16745 +f 16624 16625 16747 +f 16624 16747 16746 +f 16625 16626 16747 +f 16626 16748 16747 +f 16626 16627 16749 +f 16626 16749 16748 +f 16627 16628 16749 +f 16628 16750 16749 +f 16628 16629 16751 +f 16628 16751 16750 +f 16629 16630 16751 +f 16630 16752 16751 +f 16630 16631 16753 +f 16630 16753 16752 +f 16631 16632 16753 +f 16632 16754 16753 +f 16632 16633 16755 +f 16632 16755 16754 +f 16633 16634 16755 +f 16634 16756 16755 +f 16634 16635 16757 +f 16634 16757 16756 +f 16635 16636 16757 +f 16636 16758 16757 +f 16636 16637 16759 +f 16636 16759 16758 +f 16637 16638 16759 +f 16638 16760 16759 +f 16638 16639 16761 +f 16638 16761 16760 +f 16639 16640 16761 +f 16640 16762 16761 +f 16640 16641 16763 +f 16640 16763 16762 +f 16641 16642 16763 +f 16642 16764 16763 +f 16642 16643 16765 +f 16642 16765 16764 +f 16643 16644 16765 +f 16644 16766 16765 +f 16644 16645 16767 +f 16644 16767 16766 +f 16645 16646 16767 +f 16646 16768 16767 +f 16646 16647 16769 +f 16646 16769 16768 +f 16647 16648 16769 +f 16648 16770 16769 +f 16648 16649 16771 +f 16648 16771 16770 +f 16649 16650 16771 +f 16650 16772 16771 +f 16650 16651 16773 +f 16650 16773 16772 +f 16651 16652 16773 +f 16652 16774 16773 +f 16652 16653 16775 +f 16652 16775 16774 +f 16653 16654 16775 +f 16654 16776 16775 +f 16654 16655 16777 +f 16654 16777 16776 +f 16655 16656 16777 +f 16656 16778 16777 +f 16656 16657 16779 +f 16656 16779 16778 +f 16657 16658 16779 +f 16658 16780 16779 +f 16658 16659 16781 +f 16658 16781 16780 +f 16659 16660 16781 +f 16660 16782 16781 +f 16660 16661 16783 +f 16660 16783 16782 +f 16661 16662 16783 +f 16662 16784 16783 +f 16662 16663 16785 +f 16662 16785 16784 +f 16663 16664 16785 +f 16664 16786 16785 +f 16664 16665 16787 +f 16664 16787 16786 +f 16665 16666 16787 +f 16666 16788 16787 +f 16666 16667 16789 +f 16666 16789 16788 +f 16667 16668 16789 +f 16668 16790 16789 +f 16668 16669 16791 +f 16668 16791 16790 +f 16669 16670 16791 +f 16670 16792 16791 +f 16670 16671 16793 +f 16670 16793 16792 +f 16671 16672 16793 +f 16672 16794 16793 +f 16672 16673 16795 +f 16672 16795 16794 +f 16673 16674 16795 +f 16674 16796 16795 +f 16674 16675 16797 +f 16674 16797 16796 +f 16675 16676 16797 +f 16676 16798 16797 +f 16676 16677 16799 +f 16676 16799 16798 +f 16677 16678 16799 +f 16678 16800 16799 +f 16678 16679 16801 +f 16678 16801 16800 +f 16679 16680 16801 +f 16680 16802 16801 +f 16680 16681 16803 +f 16680 16803 16802 +f 16681 16682 16803 +f 16682 16804 16803 +f 16682 16683 16805 +f 16682 16805 16804 +f 16683 16684 16805 +f 16684 16806 16805 +f 16684 16685 16807 +f 16684 16807 16806 +f 16685 16686 16807 +f 16686 16808 16807 +f 16686 16687 16809 +f 16686 16809 16808 +f 16687 16688 16809 +f 16688 16810 16809 +f 16688 16689 16811 +f 16688 16811 16810 +f 16689 16690 16811 +f 16690 16812 16811 +f 16690 16691 16813 +f 16690 16813 16812 +f 16691 16692 16813 +f 16692 16814 16813 +f 16692 16693 16815 +f 16692 16815 16814 +f 16693 16694 16815 +f 16694 16816 16815 +f 16694 16695 16817 +f 16694 16817 16816 +f 16695 16696 16817 +f 16696 16818 16817 +f 16696 16697 16819 +f 16696 16819 16818 +f 16697 16698 16819 +f 16698 16820 16819 +f 16698 16699 16821 +f 16698 16821 16820 +f 16699 16700 16821 +f 16700 16822 16821 +f 16700 16701 16823 +f 16700 16823 16822 +f 16701 16702 16823 +f 16702 16824 16823 +f 16702 16703 16825 +f 16702 16825 16824 +f 16703 16704 16825 +f 16704 16826 16825 +f 16704 16705 16827 +f 16704 16827 16826 +f 16705 16706 16827 +f 16706 16828 16827 +f 16706 16707 16829 +f 16706 16829 16828 +f 16707 16708 16829 +f 16708 16830 16829 +f 16708 16709 16831 +f 16708 16831 16830 +f 16709 16710 16831 +f 16710 16832 16831 +f 16710 16711 16833 +f 16710 16833 16832 +f 16711 16712 16833 +f 16712 16834 16833 +f 16712 16713 16835 +f 16712 16835 16834 +f 16713 16714 16835 +f 16714 16836 16835 +f 16714 16715 16837 +f 16714 16837 16836 +f 16715 16716 16837 +f 16716 16838 16837 +f 16716 16717 16839 +f 16716 16839 16838 +f 16717 16718 16839 +f 16718 16840 16839 +f 16718 16719 16841 +f 16718 16841 16840 +f 16719 16720 16841 +f 16720 16842 16841 +f 16720 16721 16843 +f 16720 16843 16842 +f 16721 16722 16843 +f 16722 16844 16843 +f 16722 16723 16845 +f 16722 16845 16844 +f 16723 16724 16845 +f 16724 16846 16845 +f 16724 16725 16847 +f 16724 16847 16846 +f 16725 16726 16847 +f 16726 16848 16847 +f 16726 16727 16849 +f 16726 16849 16848 +f 16727 16728 16849 +f 16728 16850 16849 +f 16728 16729 16851 +f 16728 16851 16850 +f 16729 16730 16851 +f 16730 16852 16851 +f 16730 16731 16853 +f 16730 16853 16852 +f 16731 16732 16853 +f 16732 16854 16853 +f 16732 16733 16855 +f 16732 16855 16854 +f 16733 16734 16855 +f 16734 16856 16855 +f 16734 16735 16857 +f 16734 16857 16856 +f 16735 16736 16857 +f 16736 16858 16857 +f 16737 16862 16861 +f 16737 16738 16863 +f 16737 16863 16862 +f 16738 16739 16863 +f 16739 16864 16863 +f 16739 16740 16865 +f 16739 16865 16864 +f 16740 16741 16865 +f 16741 16866 16865 +f 16741 16742 16867 +f 16741 16867 16866 +f 16742 16743 16867 +f 16743 16868 16867 +f 16743 16744 16869 +f 16743 16869 16868 +f 16744 16745 16869 +f 16745 16870 16869 +f 16745 16746 16871 +f 16745 16871 16870 +f 16746 16747 16871 +f 16747 16872 16871 +f 16747 16748 16873 +f 16747 16873 16872 +f 16748 16749 16873 +f 16749 16874 16873 +f 16749 16750 16875 +f 16749 16875 16874 +f 16750 16751 16875 +f 16751 16876 16875 +f 16751 16752 16877 +f 16751 16877 16876 +f 16752 16753 16877 +f 16753 16878 16877 +f 16753 16754 16879 +f 16753 16879 16878 +f 16754 16755 16879 +f 16755 16880 16879 +f 16755 16756 16881 +f 16755 16881 16880 +f 16756 16757 16881 +f 16757 16882 16881 +f 16757 16758 16883 +f 16757 16883 16882 +f 16758 16759 16883 +f 16759 16884 16883 +f 16759 16760 16885 +f 16759 16885 16884 +f 16760 16761 16885 +f 16761 16886 16885 +f 16761 16762 16887 +f 16761 16887 16886 +f 16762 16763 16887 +f 16763 16888 16887 +f 16763 16764 16889 +f 16763 16889 16888 +f 16764 16765 16889 +f 16765 16890 16889 +f 16765 16766 16891 +f 16765 16891 16890 +f 16766 16767 16891 +f 16767 16892 16891 +f 16767 16768 16893 +f 16767 16893 16892 +f 16768 16769 16893 +f 16769 16894 16893 +f 16769 16770 16895 +f 16769 16895 16894 +f 16770 16771 16895 +f 16771 16896 16895 +f 16771 16772 16897 +f 16771 16897 16896 +f 16772 16773 16897 +f 16773 16898 16897 +f 16773 16774 16899 +f 16773 16899 16898 +f 16774 16775 16899 +f 16775 16900 16899 +f 16775 16776 16901 +f 16775 16901 16900 +f 16776 16777 16901 +f 16777 16902 16901 +f 16777 16778 16903 +f 16777 16903 16902 +f 16778 16779 16903 +f 16779 16904 16903 +f 16779 16780 16905 +f 16779 16905 16904 +f 16780 16781 16905 +f 16781 16906 16905 +f 16781 16782 16907 +f 16781 16907 16906 +f 16782 16783 16907 +f 16783 16908 16907 +f 16783 16784 16909 +f 16783 16909 16908 +f 16784 16785 16909 +f 16785 16910 16909 +f 16785 16786 16911 +f 16785 16911 16910 +f 16786 16787 16911 +f 16787 16912 16911 +f 16787 16788 16913 +f 16787 16913 16912 +f 16788 16789 16913 +f 16789 16914 16913 +f 16789 16790 16915 +f 16789 16915 16914 +f 16790 16791 16915 +f 16791 16916 16915 +f 16791 16792 16917 +f 16791 16917 16916 +f 16792 16793 16917 +f 16793 16918 16917 +f 16793 16794 16919 +f 16793 16919 16918 +f 16794 16795 16919 +f 16795 16920 16919 +f 16795 16796 16921 +f 16795 16921 16920 +f 16796 16797 16921 +f 16797 16922 16921 +f 16797 16798 16923 +f 16797 16923 16922 +f 16798 16799 16923 +f 16799 16924 16923 +f 16799 16800 16925 +f 16799 16925 16924 +f 16800 16801 16925 +f 16801 16926 16925 +f 16801 16802 16927 +f 16801 16927 16926 +f 16802 16803 16927 +f 16803 16928 16927 +f 16803 16804 16929 +f 16803 16929 16928 +f 16804 16805 16929 +f 16805 16930 16929 +f 16805 16806 16931 +f 16805 16931 16930 +f 16806 16807 16931 +f 16807 16932 16931 +f 16807 16808 16933 +f 16807 16933 16932 +f 16808 16809 16933 +f 16809 16934 16933 +f 16809 16810 16935 +f 16809 16935 16934 +f 16810 16811 16935 +f 16811 16936 16935 +f 16811 16812 16937 +f 16811 16937 16936 +f 16812 16813 16937 +f 16813 16938 16937 +f 16813 16814 16939 +f 16813 16939 16938 +f 16814 16815 16939 +f 16815 16940 16939 +f 16815 16816 16941 +f 16815 16941 16940 +f 16816 16817 16941 +f 16817 16942 16941 +f 16817 16818 16943 +f 16817 16943 16942 +f 16818 16819 16943 +f 16819 16944 16943 +f 16819 16820 16945 +f 16819 16945 16944 +f 16820 16821 16945 +f 16821 16946 16945 +f 16821 16822 16947 +f 16821 16947 16946 +f 16822 16823 16947 +f 16823 16948 16947 +f 16823 16824 16949 +f 16823 16949 16948 +f 16824 16825 16949 +f 16825 16950 16949 +f 16825 16826 16951 +f 16825 16951 16950 +f 16826 16827 16951 +f 16827 16952 16951 +f 16827 16828 16953 +f 16827 16953 16952 +f 16828 16829 16953 +f 16829 16954 16953 +f 16829 16830 16955 +f 16829 16955 16954 +f 16830 16831 16955 +f 16831 16956 16955 +f 16831 16832 16957 +f 16831 16957 16956 +f 16832 16833 16957 +f 16833 16958 16957 +f 16833 16834 16959 +f 16833 16959 16958 +f 16834 16835 16959 +f 16835 16960 16959 +f 16835 16836 16961 +f 16835 16961 16960 +f 16836 16837 16961 +f 16837 16962 16961 +f 16837 16838 16963 +f 16837 16963 16962 +f 16838 16839 16963 +f 16839 16964 16963 +f 16839 16840 16965 +f 16839 16965 16964 +f 16840 16841 16965 +f 16841 16966 16965 +f 16841 16842 16967 +f 16841 16967 16966 +f 16842 16843 16967 +f 16843 16968 16967 +f 16843 16844 16969 +f 16843 16969 16968 +f 16844 16845 16969 +f 16845 16970 16969 +f 16845 16846 16971 +f 16845 16971 16970 +f 16846 16847 16971 +f 16847 16972 16971 +f 16847 16848 16973 +f 16847 16973 16972 +f 16848 16849 16973 +f 16849 16974 16973 +f 16849 16850 16975 +f 16849 16975 16974 +f 16850 16851 16975 +f 16851 16976 16975 +f 16851 16852 16977 +f 16851 16977 16976 +f 16852 16853 16977 +f 16853 16978 16977 +f 16853 16854 16979 +f 16853 16979 16978 +f 16854 16855 16979 +f 16855 16980 16979 +f 16855 16856 16981 +f 16855 16981 16980 +f 16856 16857 16981 +f 16857 16982 16981 +f 16857 16858 16983 +f 16857 16983 16982 +f 16859 16985 16984 +f 16859 16860 16986 +f 16859 16986 16985 +f 16860 16861 16986 +f 16861 16987 16986 +f 16861 16862 16988 +f 16861 16988 16987 +f 16862 16863 16988 +f 16863 16989 16988 +f 16863 16864 16990 +f 16863 16990 16989 +f 16864 16865 16990 +f 16865 16991 16990 +f 16865 16866 16992 +f 16865 16992 16991 +f 16866 16867 16992 +f 16867 16993 16992 +f 16867 16868 16994 +f 16867 16994 16993 +f 16868 16869 16994 +f 16869 16995 16994 +f 16869 16870 16996 +f 16869 16996 16995 +f 16870 16871 16996 +f 16871 16997 16996 +f 16871 16872 16998 +f 16871 16998 16997 +f 16872 16873 16998 +f 16873 16999 16998 +f 16873 16874 17000 +f 16873 17000 16999 +f 16874 16875 17000 +f 16875 17001 17000 +f 16875 16876 17002 +f 16875 17002 17001 +f 16876 16877 17002 +f 16877 17003 17002 +f 16877 16878 17004 +f 16877 17004 17003 +f 16878 16879 17004 +f 16879 17005 17004 +f 16879 16880 17006 +f 16879 17006 17005 +f 16880 16881 17006 +f 16881 17007 17006 +f 16881 16882 17008 +f 16881 17008 17007 +f 16882 16883 17008 +f 16883 17009 17008 +f 16883 16884 17010 +f 16883 17010 17009 +f 16884 16885 17010 +f 16885 17011 17010 +f 16885 16886 17012 +f 16885 17012 17011 +f 16886 16887 17012 +f 16887 17013 17012 +f 16887 16888 17014 +f 16887 17014 17013 +f 16888 16889 17014 +f 16889 17015 17014 +f 16889 16890 17016 +f 16889 17016 17015 +f 16890 16891 17016 +f 16891 17017 17016 +f 16891 16892 17018 +f 16891 17018 17017 +f 16892 16893 17018 +f 16893 17019 17018 +f 16893 16894 17020 +f 16893 17020 17019 +f 16894 16895 17020 +f 16895 17021 17020 +f 16895 16896 17022 +f 16895 17022 17021 +f 16896 16897 17022 +f 16897 17023 17022 +f 16897 16898 17024 +f 16897 17024 17023 +f 16898 16899 17024 +f 16899 17025 17024 +f 16899 16900 17026 +f 16899 17026 17025 +f 16900 16901 17026 +f 16901 17027 17026 +f 16901 16902 17028 +f 16901 17028 17027 +f 16902 16903 17028 +f 16903 17029 17028 +f 16903 16904 17030 +f 16903 17030 17029 +f 16904 16905 17030 +f 16905 17031 17030 +f 16905 16906 17032 +f 16905 17032 17031 +f 16906 16907 17032 +f 16907 17033 17032 +f 16907 16908 17034 +f 16907 17034 17033 +f 16908 16909 17034 +f 16909 17035 17034 +f 16909 16910 17036 +f 16909 17036 17035 +f 16910 16911 17036 +f 16911 17037 17036 +f 16911 16912 17038 +f 16911 17038 17037 +f 16912 16913 17038 +f 16913 17039 17038 +f 16913 16914 17040 +f 16913 17040 17039 +f 16914 16915 17040 +f 16915 17041 17040 +f 16915 16916 17042 +f 16915 17042 17041 +f 16916 16917 17042 +f 16917 17043 17042 +f 16917 16918 17044 +f 16917 17044 17043 +f 16918 16919 17044 +f 16919 17045 17044 +f 16919 16920 17046 +f 16919 17046 17045 +f 16920 16921 17046 +f 16921 17047 17046 +f 16921 16922 17048 +f 16921 17048 17047 +f 16922 16923 17048 +f 16923 17049 17048 +f 16923 16924 17050 +f 16923 17050 17049 +f 16924 16925 17050 +f 16925 17051 17050 +f 16925 16926 17052 +f 16925 17052 17051 +f 16926 16927 17052 +f 16927 17053 17052 +f 16927 16928 17054 +f 16927 17054 17053 +f 16928 16929 17054 +f 16929 17055 17054 +f 16929 16930 17056 +f 16929 17056 17055 +f 16930 16931 17056 +f 16931 17057 17056 +f 16931 16932 17058 +f 16931 17058 17057 +f 16932 16933 17058 +f 16933 17059 17058 +f 16933 16934 17060 +f 16933 17060 17059 +f 16934 16935 17060 +f 16935 17061 17060 +f 16935 16936 17062 +f 16935 17062 17061 +f 16936 16937 17062 +f 16937 17063 17062 +f 16937 16938 17064 +f 16937 17064 17063 +f 16938 16939 17064 +f 16939 17065 17064 +f 16939 16940 17066 +f 16939 17066 17065 +f 16940 16941 17066 +f 16941 17067 17066 +f 16941 16942 17068 +f 16941 17068 17067 +f 16942 16943 17068 +f 16943 17069 17068 +f 16943 16944 17070 +f 16943 17070 17069 +f 16944 16945 17070 +f 16945 17071 17070 +f 16945 16946 17072 +f 16945 17072 17071 +f 16946 16947 17072 +f 16947 17073 17072 +f 16947 16948 17074 +f 16947 17074 17073 +f 16948 16949 17074 +f 16949 17075 17074 +f 16949 16950 17076 +f 16949 17076 17075 +f 16950 16951 17076 +f 16951 17077 17076 +f 16951 16952 17078 +f 16951 17078 17077 +f 16952 16953 17078 +f 16953 17079 17078 +f 16953 16954 17080 +f 16953 17080 17079 +f 16954 16955 17080 +f 16955 17081 17080 +f 16955 16956 17082 +f 16955 17082 17081 +f 16956 16957 17082 +f 16957 17083 17082 +f 16957 16958 17084 +f 16957 17084 17083 +f 16958 16959 17084 +f 16959 17085 17084 +f 16959 16960 17086 +f 16959 17086 17085 +f 16960 16961 17086 +f 16961 17087 17086 +f 16961 16962 17088 +f 16961 17088 17087 +f 16962 16963 17088 +f 16963 17089 17088 +f 16963 16964 17090 +f 16963 17090 17089 +f 16964 16965 17090 +f 16965 17091 17090 +f 16965 16966 17092 +f 16965 17092 17091 +f 16966 16967 17092 +f 16967 17093 17092 +f 16967 16968 17094 +f 16967 17094 17093 +f 16968 16969 17094 +f 16969 17095 17094 +f 16969 16970 17096 +f 16969 17096 17095 +f 16970 16971 17096 +f 16971 17097 17096 +f 16971 16972 17098 +f 16971 17098 17097 +f 16972 16973 17098 +f 16973 17099 17098 +f 16973 16974 17100 +f 16973 17100 17099 +f 16974 16975 17100 +f 16975 17101 17100 +f 16975 16976 17102 +f 16975 17102 17101 +f 16976 16977 17102 +f 16977 17103 17102 +f 16977 16978 17104 +f 16977 17104 17103 +f 16978 16979 17104 +f 16979 17105 17104 +f 16979 16980 17106 +f 16979 17106 17105 +f 16980 16981 17106 +f 16981 17107 17106 +f 16981 16982 17108 +f 16981 17108 17107 +f 16982 16983 17108 +f 16983 17109 17108 +f 16984 17113 17112 +f 16984 16985 17114 +f 16984 17114 17113 +f 16985 16986 17114 +f 16986 17115 17114 +f 16986 16987 17116 +f 16986 17116 17115 +f 16987 16988 17116 +f 16988 17117 17116 +f 16988 16989 17118 +f 16988 17118 17117 +f 16989 16990 17118 +f 16990 17119 17118 +f 16990 16991 17120 +f 16990 17120 17119 +f 16991 16992 17120 +f 16992 17121 17120 +f 16992 16993 17122 +f 16992 17122 17121 +f 16993 16994 17122 +f 16994 17123 17122 +f 16994 16995 17124 +f 16994 17124 17123 +f 16995 16996 17124 +f 16996 17125 17124 +f 16996 16997 17126 +f 16996 17126 17125 +f 16997 16998 17126 +f 16998 17127 17126 +f 16998 16999 17128 +f 16998 17128 17127 +f 16999 17000 17128 +f 17000 17129 17128 +f 17000 17001 17130 +f 17000 17130 17129 +f 17001 17002 17130 +f 17002 17131 17130 +f 17002 17003 17132 +f 17002 17132 17131 +f 17003 17004 17132 +f 17004 17133 17132 +f 17004 17005 17134 +f 17004 17134 17133 +f 17005 17006 17134 +f 17006 17135 17134 +f 17006 17007 17136 +f 17006 17136 17135 +f 17007 17008 17136 +f 17008 17137 17136 +f 17008 17009 17138 +f 17008 17138 17137 +f 17009 17010 17138 +f 17010 17139 17138 +f 17010 17011 17140 +f 17010 17140 17139 +f 17011 17012 17140 +f 17012 17141 17140 +f 17012 17013 17142 +f 17012 17142 17141 +f 17013 17014 17142 +f 17014 17143 17142 +f 17014 17015 17144 +f 17014 17144 17143 +f 17015 17016 17144 +f 17016 17145 17144 +f 17016 17017 17146 +f 17016 17146 17145 +f 17017 17018 17146 +f 17018 17147 17146 +f 17018 17019 17148 +f 17018 17148 17147 +f 17019 17020 17148 +f 17020 17149 17148 +f 17020 17021 17150 +f 17020 17150 17149 +f 17021 17022 17150 +f 17022 17151 17150 +f 17022 17023 17152 +f 17022 17152 17151 +f 17023 17024 17152 +f 17024 17153 17152 +f 17024 17025 17154 +f 17024 17154 17153 +f 17025 17026 17154 +f 17026 17155 17154 +f 17026 17027 17156 +f 17026 17156 17155 +f 17027 17028 17156 +f 17028 17157 17156 +f 17028 17029 17158 +f 17028 17158 17157 +f 17029 17030 17158 +f 17030 17159 17158 +f 17030 17031 17160 +f 17030 17160 17159 +f 17031 17032 17160 +f 17032 17161 17160 +f 17032 17033 17162 +f 17032 17162 17161 +f 17033 17034 17162 +f 17034 17163 17162 +f 17034 17035 17164 +f 17034 17164 17163 +f 17035 17036 17164 +f 17036 17165 17164 +f 17036 17037 17166 +f 17036 17166 17165 +f 17037 17038 17166 +f 17038 17167 17166 +f 17038 17039 17168 +f 17038 17168 17167 +f 17039 17040 17168 +f 17040 17169 17168 +f 17040 17041 17170 +f 17040 17170 17169 +f 17041 17042 17170 +f 17042 17171 17170 +f 17042 17043 17172 +f 17042 17172 17171 +f 17043 17044 17172 +f 17044 17173 17172 +f 17044 17045 17174 +f 17044 17174 17173 +f 17045 17046 17174 +f 17046 17175 17174 +f 17046 17047 17176 +f 17046 17176 17175 +f 17047 17048 17176 +f 17048 17177 17176 +f 17048 17049 17178 +f 17048 17178 17177 +f 17049 17050 17178 +f 17050 17179 17178 +f 17050 17051 17180 +f 17050 17180 17179 +f 17051 17052 17180 +f 17052 17181 17180 +f 17052 17053 17182 +f 17052 17182 17181 +f 17053 17054 17182 +f 17054 17183 17182 +f 17054 17055 17184 +f 17054 17184 17183 +f 17055 17056 17184 +f 17056 17185 17184 +f 17056 17057 17186 +f 17056 17186 17185 +f 17057 17058 17186 +f 17058 17187 17186 +f 17058 17059 17188 +f 17058 17188 17187 +f 17059 17060 17188 +f 17060 17189 17188 +f 17060 17061 17190 +f 17060 17190 17189 +f 17061 17062 17190 +f 17062 17191 17190 +f 17062 17063 17192 +f 17062 17192 17191 +f 17063 17064 17192 +f 17064 17193 17192 +f 17064 17065 17194 +f 17064 17194 17193 +f 17065 17066 17194 +f 17066 17195 17194 +f 17066 17067 17196 +f 17066 17196 17195 +f 17067 17068 17196 +f 17068 17197 17196 +f 17068 17069 17198 +f 17068 17198 17197 +f 17069 17070 17198 +f 17070 17199 17198 +f 17070 17071 17200 +f 17070 17200 17199 +f 17071 17072 17200 +f 17072 17201 17200 +f 17072 17073 17202 +f 17072 17202 17201 +f 17073 17074 17202 +f 17074 17203 17202 +f 17074 17075 17204 +f 17074 17204 17203 +f 17075 17076 17204 +f 17076 17205 17204 +f 17076 17077 17206 +f 17076 17206 17205 +f 17077 17078 17206 +f 17078 17207 17206 +f 17078 17079 17208 +f 17078 17208 17207 +f 17079 17080 17208 +f 17080 17209 17208 +f 17080 17081 17210 +f 17080 17210 17209 +f 17081 17082 17210 +f 17082 17211 17210 +f 17082 17083 17212 +f 17082 17212 17211 +f 17083 17084 17212 +f 17084 17213 17212 +f 17084 17085 17214 +f 17084 17214 17213 +f 17085 17086 17214 +f 17086 17215 17214 +f 17086 17087 17216 +f 17086 17216 17215 +f 17087 17088 17216 +f 17088 17217 17216 +f 17088 17089 17218 +f 17088 17218 17217 +f 17089 17090 17218 +f 17090 17219 17218 +f 17090 17091 17220 +f 17090 17220 17219 +f 17091 17092 17220 +f 17092 17221 17220 +f 17092 17093 17222 +f 17092 17222 17221 +f 17093 17094 17222 +f 17094 17223 17222 +f 17094 17095 17224 +f 17094 17224 17223 +f 17095 17096 17224 +f 17096 17225 17224 +f 17096 17097 17226 +f 17096 17226 17225 +f 17097 17098 17226 +f 17098 17227 17226 +f 17098 17099 17228 +f 17098 17228 17227 +f 17099 17100 17228 +f 17100 17229 17228 +f 17100 17101 17230 +f 17100 17230 17229 +f 17101 17102 17230 +f 17102 17231 17230 +f 17102 17103 17232 +f 17102 17232 17231 +f 17103 17104 17232 +f 17104 17233 17232 +f 17104 17105 17234 +f 17104 17234 17233 +f 17105 17106 17234 +f 17106 17235 17234 +f 17106 17107 17236 +f 17106 17236 17235 +f 17107 17108 17236 +f 17108 17237 17236 +f 17108 17109 17238 +f 17108 17238 17237 +f 17110 17111 17240 +f 17110 17240 17239 +f 17111 17112 17240 +f 17112 17241 17240 +f 17112 17113 17242 +f 17112 17242 17241 +f 17113 17114 17242 +f 17114 17243 17242 +f 17114 17115 17244 +f 17114 17244 17243 +f 17115 17116 17244 +f 17116 17245 17244 +f 17116 17117 17246 +f 17116 17246 17245 +f 17117 17118 17246 +f 17118 17247 17246 +f 17118 17119 17248 +f 17118 17248 17247 +f 17119 17120 17248 +f 17120 17249 17248 +f 17120 17121 17250 +f 17120 17250 17249 +f 17121 17122 17250 +f 17122 17251 17250 +f 17122 17123 17252 +f 17122 17252 17251 +f 17123 17124 17252 +f 17124 17253 17252 +f 17124 17125 17254 +f 17124 17254 17253 +f 17125 17126 17254 +f 17126 17255 17254 +f 17126 17127 17256 +f 17126 17256 17255 +f 17127 17128 17256 +f 17128 17257 17256 +f 17128 17129 17258 +f 17128 17258 17257 +f 17129 17130 17258 +f 17130 17259 17258 +f 17130 17131 17260 +f 17130 17260 17259 +f 17131 17132 17260 +f 17132 17261 17260 +f 17132 17133 17262 +f 17132 17262 17261 +f 17133 17134 17262 +f 17134 17263 17262 +f 17134 17135 17264 +f 17134 17264 17263 +f 17135 17136 17264 +f 17136 17265 17264 +f 17136 17137 17266 +f 17136 17266 17265 +f 17137 17138 17266 +f 17138 17267 17266 +f 17138 17139 17268 +f 17138 17268 17267 +f 17139 17140 17268 +f 17140 17269 17268 +f 17140 17141 17270 +f 17140 17270 17269 +f 17141 17142 17270 +f 17142 17271 17270 +f 17142 17143 17272 +f 17142 17272 17271 +f 17143 17144 17272 +f 17144 17273 17272 +f 17144 17145 17274 +f 17144 17274 17273 +f 17145 17146 17274 +f 17146 17275 17274 +f 17146 17147 17276 +f 17146 17276 17275 +f 17147 17148 17276 +f 17148 17277 17276 +f 17148 17149 17278 +f 17148 17278 17277 +f 17149 17150 17278 +f 17150 17279 17278 +f 17150 17151 17280 +f 17150 17280 17279 +f 17151 17152 17280 +f 17152 17281 17280 +f 17152 17153 17282 +f 17152 17282 17281 +f 17153 17154 17282 +f 17154 17283 17282 +f 17154 17155 17284 +f 17154 17284 17283 +f 17155 17156 17284 +f 17156 17285 17284 +f 17156 17157 17286 +f 17156 17286 17285 +f 17157 17158 17286 +f 17158 17287 17286 +f 17158 17159 17288 +f 17158 17288 17287 +f 17159 17160 17288 +f 17160 17289 17288 +f 17160 17161 17290 +f 17160 17290 17289 +f 17161 17162 17290 +f 17162 17291 17290 +f 17162 17163 17292 +f 17162 17292 17291 +f 17163 17164 17292 +f 17164 17293 17292 +f 17164 17165 17294 +f 17164 17294 17293 +f 17165 17166 17294 +f 17166 17295 17294 +f 17166 17167 17296 +f 17166 17296 17295 +f 17167 17168 17296 +f 17168 17297 17296 +f 17168 17169 17298 +f 17168 17298 17297 +f 17169 17170 17298 +f 17170 17299 17298 +f 17170 17171 17300 +f 17170 17300 17299 +f 17171 17172 17300 +f 17172 17301 17300 +f 17172 17173 17302 +f 17172 17302 17301 +f 17173 17174 17302 +f 17174 17303 17302 +f 17174 17175 17304 +f 17174 17304 17303 +f 17175 17176 17304 +f 17176 17305 17304 +f 17176 17177 17306 +f 17176 17306 17305 +f 17177 17178 17306 +f 17178 17307 17306 +f 17178 17179 17308 +f 17178 17308 17307 +f 17179 17180 17308 +f 17180 17309 17308 +f 17180 17181 17310 +f 17180 17310 17309 +f 17181 17182 17310 +f 17182 17311 17310 +f 17182 17183 17312 +f 17182 17312 17311 +f 17183 17184 17312 +f 17184 17313 17312 +f 17184 17185 17314 +f 17184 17314 17313 +f 17185 17186 17314 +f 17186 17315 17314 +f 17186 17187 17316 +f 17186 17316 17315 +f 17187 17188 17316 +f 17188 17317 17316 +f 17188 17189 17318 +f 17188 17318 17317 +f 17189 17190 17318 +f 17190 17319 17318 +f 17190 17191 17320 +f 17190 17320 17319 +f 17191 17192 17320 +f 17192 17321 17320 +f 17192 17193 17322 +f 17192 17322 17321 +f 17193 17194 17322 +f 17194 17323 17322 +f 17194 17195 17324 +f 17194 17324 17323 +f 17195 17196 17324 +f 17196 17325 17324 +f 17196 17197 17326 +f 17196 17326 17325 +f 17197 17198 17326 +f 17198 17327 17326 +f 17198 17199 17328 +f 17198 17328 17327 +f 17199 17200 17328 +f 17200 17329 17328 +f 17200 17201 17330 +f 17200 17330 17329 +f 17201 17202 17330 +f 17202 17331 17330 +f 17202 17203 17332 +f 17202 17332 17331 +f 17203 17204 17332 +f 17204 17333 17332 +f 17204 17205 17334 +f 17204 17334 17333 +f 17205 17206 17334 +f 17206 17335 17334 +f 17206 17207 17336 +f 17206 17336 17335 +f 17207 17208 17336 +f 17208 17337 17336 +f 17208 17209 17338 +f 17208 17338 17337 +f 17209 17210 17338 +f 17210 17339 17338 +f 17210 17211 17340 +f 17210 17340 17339 +f 17211 17212 17340 +f 17212 17341 17340 +f 17212 17213 17342 +f 17212 17342 17341 +f 17213 17214 17342 +f 17214 17343 17342 +f 17214 17215 17344 +f 17214 17344 17343 +f 17215 17216 17344 +f 17216 17345 17344 +f 17216 17217 17346 +f 17216 17346 17345 +f 17217 17218 17346 +f 17218 17347 17346 +f 17218 17219 17348 +f 17218 17348 17347 +f 17219 17220 17348 +f 17220 17349 17348 +f 17220 17221 17350 +f 17220 17350 17349 +f 17221 17222 17350 +f 17222 17351 17350 +f 17222 17223 17352 +f 17222 17352 17351 +f 17223 17224 17352 +f 17224 17353 17352 +f 17224 17225 17354 +f 17224 17354 17353 +f 17225 17226 17354 +f 17226 17355 17354 +f 17226 17227 17356 +f 17226 17356 17355 +f 17227 17228 17356 +f 17228 17357 17356 +f 17228 17229 17358 +f 17228 17358 17357 +f 17229 17230 17358 +f 17230 17359 17358 +f 17230 17231 17360 +f 17230 17360 17359 +f 17231 17232 17360 +f 17232 17361 17360 +f 17232 17233 17362 +f 17232 17362 17361 +f 17233 17234 17362 +f 17234 17363 17362 +f 17234 17235 17364 +f 17234 17364 17363 +f 17235 17236 17364 +f 17236 17365 17364 +f 17236 17237 17366 +f 17236 17366 17365 +f 17237 17238 17366 +f 17238 17367 17366 +f 17239 17240 17368 +f 17240 17369 17368 +f 17240 17241 17370 +f 17240 17370 17369 +f 17241 17242 17370 +f 17242 17371 17370 +f 17242 17243 17372 +f 17242 17372 17371 +f 17243 17244 17372 +f 17244 17373 17372 +f 17244 17245 17374 +f 17244 17374 17373 +f 17245 17246 17374 +f 17246 17375 17374 +f 17246 17247 17376 +f 17246 17376 17375 +f 17247 17248 17376 +f 17248 17377 17376 +f 17248 17249 17378 +f 17248 17378 17377 +f 17249 17250 17378 +f 17250 17379 17378 +f 17250 17251 17380 +f 17250 17380 17379 +f 17251 17252 17380 +f 17252 17381 17380 +f 17252 17253 17382 +f 17252 17382 17381 +f 17253 17254 17382 +f 17254 17383 17382 +f 17254 17255 17384 +f 17254 17384 17383 +f 17255 17256 17384 +f 17256 17385 17384 +f 17256 17257 17386 +f 17256 17386 17385 +f 17257 17258 17386 +f 17258 17387 17386 +f 17258 17259 17388 +f 17258 17388 17387 +f 17259 17260 17388 +f 17260 17389 17388 +f 17260 17261 17390 +f 17260 17390 17389 +f 17261 17262 17390 +f 17262 17391 17390 +f 17262 17263 17392 +f 17262 17392 17391 +f 17263 17264 17392 +f 17264 17393 17392 +f 17264 17265 17394 +f 17264 17394 17393 +f 17265 17266 17394 +f 17266 17395 17394 +f 17266 17267 17396 +f 17266 17396 17395 +f 17267 17268 17396 +f 17268 17397 17396 +f 17268 17269 17398 +f 17268 17398 17397 +f 17269 17270 17398 +f 17270 17399 17398 +f 17270 17271 17400 +f 17270 17400 17399 +f 17271 17272 17400 +f 17272 17401 17400 +f 17272 17273 17402 +f 17272 17402 17401 +f 17273 17274 17402 +f 17274 17403 17402 +f 17274 17275 17404 +f 17274 17404 17403 +f 17275 17276 17404 +f 17276 17405 17404 +f 17276 17277 17406 +f 17276 17406 17405 +f 17277 17278 17406 +f 17278 17407 17406 +f 17278 17279 17408 +f 17278 17408 17407 +f 17279 17280 17408 +f 17280 17409 17408 +f 17280 17281 17410 +f 17280 17410 17409 +f 17281 17282 17410 +f 17282 17411 17410 +f 17282 17283 17412 +f 17282 17412 17411 +f 17283 17284 17412 +f 17284 17413 17412 +f 17284 17285 17414 +f 17284 17414 17413 +f 17285 17286 17414 +f 17286 17415 17414 +f 17286 17287 17416 +f 17286 17416 17415 +f 17287 17288 17416 +f 17288 17417 17416 +f 17288 17289 17418 +f 17288 17418 17417 +f 17289 17290 17418 +f 17290 17419 17418 +f 17290 17291 17420 +f 17290 17420 17419 +f 17291 17292 17420 +f 17292 17421 17420 +f 17292 17293 17422 +f 17292 17422 17421 +f 17293 17294 17422 +f 17294 17423 17422 +f 17294 17295 17424 +f 17294 17424 17423 +f 17295 17296 17424 +f 17296 17425 17424 +f 17296 17297 17426 +f 17296 17426 17425 +f 17297 17298 17426 +f 17298 17427 17426 +f 17298 17299 17428 +f 17298 17428 17427 +f 17299 17300 17428 +f 17300 17429 17428 +f 17300 17301 17430 +f 17300 17430 17429 +f 17301 17302 17430 +f 17302 17431 17430 +f 17302 17303 17432 +f 17302 17432 17431 +f 17303 17304 17432 +f 17304 17433 17432 +f 17304 17305 17434 +f 17304 17434 17433 +f 17305 17306 17434 +f 17306 17435 17434 +f 17306 17307 17436 +f 17306 17436 17435 +f 17307 17308 17436 +f 17308 17437 17436 +f 17308 17309 17438 +f 17308 17438 17437 +f 17309 17310 17438 +f 17310 17439 17438 +f 17310 17311 17440 +f 17310 17440 17439 +f 17311 17312 17440 +f 17312 17441 17440 +f 17312 17313 17442 +f 17312 17442 17441 +f 17313 17314 17442 +f 17314 17443 17442 +f 17314 17315 17444 +f 17314 17444 17443 +f 17315 17316 17444 +f 17316 17445 17444 +f 17316 17317 17446 +f 17316 17446 17445 +f 17317 17318 17446 +f 17318 17447 17446 +f 17318 17319 17448 +f 17318 17448 17447 +f 17319 17320 17448 +f 17320 17449 17448 +f 17320 17321 17450 +f 17320 17450 17449 +f 17321 17322 17450 +f 17322 17451 17450 +f 17322 17323 17452 +f 17322 17452 17451 +f 17323 17324 17452 +f 17324 17453 17452 +f 17324 17325 17454 +f 17324 17454 17453 +f 17325 17326 17454 +f 17326 17455 17454 +f 17326 17327 17456 +f 17326 17456 17455 +f 17327 17328 17456 +f 17328 17457 17456 +f 17328 17329 17458 +f 17328 17458 17457 +f 17329 17330 17458 +f 17330 17459 17458 +f 17330 17331 17460 +f 17330 17460 17459 +f 17331 17332 17460 +f 17332 17461 17460 +f 17332 17333 17462 +f 17332 17462 17461 +f 17333 17334 17462 +f 17334 17463 17462 +f 17334 17335 17464 +f 17334 17464 17463 +f 17335 17336 17464 +f 17336 17465 17464 +f 17336 17337 17466 +f 17336 17466 17465 +f 17337 17338 17466 +f 17338 17467 17466 +f 17338 17339 17468 +f 17338 17468 17467 +f 17339 17340 17468 +f 17340 17469 17468 +f 17340 17341 17470 +f 17340 17470 17469 +f 17341 17342 17470 +f 17342 17471 17470 +f 17342 17343 17472 +f 17342 17472 17471 +f 17343 17344 17472 +f 17344 17473 17472 +f 17344 17345 17474 +f 17344 17474 17473 +f 17345 17346 17474 +f 17346 17475 17474 +f 17346 17347 17476 +f 17346 17476 17475 +f 17347 17348 17476 +f 17348 17477 17476 +f 17348 17349 17478 +f 17348 17478 17477 +f 17349 17350 17478 +f 17350 17479 17478 +f 17350 17351 17480 +f 17350 17480 17479 +f 17351 17352 17480 +f 17352 17481 17480 +f 17352 17353 17482 +f 17352 17482 17481 +f 17353 17354 17482 +f 17354 17483 17482 +f 17354 17355 17484 +f 17354 17484 17483 +f 17355 17356 17484 +f 17356 17485 17484 +f 17356 17357 17486 +f 17356 17486 17485 +f 17357 17358 17486 +f 17358 17487 17486 +f 17358 17359 17488 +f 17358 17488 17487 +f 17359 17360 17488 +f 17360 17489 17488 +f 17360 17361 17490 +f 17360 17490 17489 +f 17361 17362 17490 +f 17362 17491 17490 +f 17362 17363 17492 +f 17362 17492 17491 +f 17363 17364 17492 +f 17364 17493 17492 +f 17364 17365 17494 +f 17364 17494 17493 +f 17365 17366 17494 +f 17366 17495 17494 +f 17366 17367 17496 +f 17366 17496 17495 +f 17368 17369 17498 +f 17368 17498 17497 +f 17369 17370 17498 +f 17370 17499 17498 +f 17370 17371 17500 +f 17370 17500 17499 +f 17371 17372 17500 +f 17372 17501 17500 +f 17372 17373 17502 +f 17372 17502 17501 +f 17373 17374 17502 +f 17374 17503 17502 +f 17374 17375 17504 +f 17374 17504 17503 +f 17375 17376 17504 +f 17376 17505 17504 +f 17376 17377 17506 +f 17376 17506 17505 +f 17377 17378 17506 +f 17378 17507 17506 +f 17378 17379 17508 +f 17378 17508 17507 +f 17379 17380 17508 +f 17380 17509 17508 +f 17380 17381 17510 +f 17380 17510 17509 +f 17381 17382 17510 +f 17382 17511 17510 +f 17382 17383 17512 +f 17382 17512 17511 +f 17383 17384 17512 +f 17384 17513 17512 +f 17384 17385 17514 +f 17384 17514 17513 +f 17385 17386 17514 +f 17386 17515 17514 +f 17386 17387 17516 +f 17386 17516 17515 +f 17387 17388 17516 +f 17388 17517 17516 +f 17388 17389 17518 +f 17388 17518 17517 +f 17389 17390 17518 +f 17390 17519 17518 +f 17390 17391 17520 +f 17390 17520 17519 +f 17391 17392 17520 +f 17392 17521 17520 +f 17392 17393 17522 +f 17392 17522 17521 +f 17393 17394 17522 +f 17394 17523 17522 +f 17394 17395 17524 +f 17394 17524 17523 +f 17395 17396 17524 +f 17396 17525 17524 +f 17396 17397 17526 +f 17396 17526 17525 +f 17397 17398 17526 +f 17398 17527 17526 +f 17398 17399 17528 +f 17398 17528 17527 +f 17399 17400 17528 +f 17400 17529 17528 +f 17400 17401 17530 +f 17400 17530 17529 +f 17401 17402 17530 +f 17402 17531 17530 +f 17402 17403 17532 +f 17402 17532 17531 +f 17403 17404 17532 +f 17404 17533 17532 +f 17404 17405 17534 +f 17404 17534 17533 +f 17405 17406 17534 +f 17406 17535 17534 +f 17406 17407 17536 +f 17406 17536 17535 +f 17407 17408 17536 +f 17408 17537 17536 +f 17408 17409 17538 +f 17408 17538 17537 +f 17409 17410 17538 +f 17410 17539 17538 +f 17410 17411 17540 +f 17410 17540 17539 +f 17411 17412 17540 +f 17412 17541 17540 +f 17412 17413 17542 +f 17412 17542 17541 +f 17413 17414 17542 +f 17414 17543 17542 +f 17414 17415 17544 +f 17414 17544 17543 +f 17415 17416 17544 +f 17416 17545 17544 +f 17416 17417 17546 +f 17416 17546 17545 +f 17417 17418 17546 +f 17418 17547 17546 +f 17418 17419 17548 +f 17418 17548 17547 +f 17419 17420 17548 +f 17420 17549 17548 +f 17420 17421 17550 +f 17420 17550 17549 +f 17421 17422 17550 +f 17422 17551 17550 +f 17422 17423 17552 +f 17422 17552 17551 +f 17423 17424 17552 +f 17424 17553 17552 +f 17424 17425 17554 +f 17424 17554 17553 +f 17425 17426 17554 +f 17426 17555 17554 +f 17426 17427 17556 +f 17426 17556 17555 +f 17427 17428 17556 +f 17428 17557 17556 +f 17428 17429 17558 +f 17428 17558 17557 +f 17429 17430 17558 +f 17430 17559 17558 +f 17430 17431 17560 +f 17430 17560 17559 +f 17431 17432 17560 +f 17432 17561 17560 +f 17432 17433 17562 +f 17432 17562 17561 +f 17433 17434 17562 +f 17434 17563 17562 +f 17434 17435 17564 +f 17434 17564 17563 +f 17435 17436 17564 +f 17436 17565 17564 +f 17436 17437 17566 +f 17436 17566 17565 +f 17437 17438 17566 +f 17438 17567 17566 +f 17438 17439 17568 +f 17438 17568 17567 +f 17439 17440 17568 +f 17440 17569 17568 +f 17440 17441 17570 +f 17440 17570 17569 +f 17441 17442 17570 +f 17442 17571 17570 +f 17442 17443 17572 +f 17442 17572 17571 +f 17443 17444 17572 +f 17444 17573 17572 +f 17444 17445 17574 +f 17444 17574 17573 +f 17445 17446 17574 +f 17446 17575 17574 +f 17446 17447 17576 +f 17446 17576 17575 +f 17447 17448 17576 +f 17448 17577 17576 +f 17448 17449 17578 +f 17448 17578 17577 +f 17449 17450 17578 +f 17450 17579 17578 +f 17450 17451 17580 +f 17450 17580 17579 +f 17451 17452 17580 +f 17452 17581 17580 +f 17452 17453 17582 +f 17452 17582 17581 +f 17453 17454 17582 +f 17454 17583 17582 +f 17454 17455 17584 +f 17454 17584 17583 +f 17455 17456 17584 +f 17456 17585 17584 +f 17456 17457 17586 +f 17456 17586 17585 +f 17457 17458 17586 +f 17458 17587 17586 +f 17458 17459 17588 +f 17458 17588 17587 +f 17459 17460 17588 +f 17460 17589 17588 +f 17460 17461 17590 +f 17460 17590 17589 +f 17461 17462 17590 +f 17462 17591 17590 +f 17462 17463 17592 +f 17462 17592 17591 +f 17463 17464 17592 +f 17464 17593 17592 +f 17464 17465 17594 +f 17464 17594 17593 +f 17465 17466 17594 +f 17466 17595 17594 +f 17466 17467 17596 +f 17466 17596 17595 +f 17467 17468 17596 +f 17468 17597 17596 +f 17468 17469 17598 +f 17468 17598 17597 +f 17469 17470 17598 +f 17470 17599 17598 +f 17470 17471 17600 +f 17470 17600 17599 +f 17471 17472 17600 +f 17472 17601 17600 +f 17472 17473 17602 +f 17472 17602 17601 +f 17473 17474 17602 +f 17474 17603 17602 +f 17474 17475 17604 +f 17474 17604 17603 +f 17475 17476 17604 +f 17476 17605 17604 +f 17476 17477 17606 +f 17476 17606 17605 +f 17477 17478 17606 +f 17478 17607 17606 +f 17478 17479 17608 +f 17478 17608 17607 +f 17479 17480 17608 +f 17480 17609 17608 +f 17480 17481 17610 +f 17480 17610 17609 +f 17481 17482 17610 +f 17482 17611 17610 +f 17482 17483 17612 +f 17482 17612 17611 +f 17483 17484 17612 +f 17484 17613 17612 +f 17484 17485 17614 +f 17484 17614 17613 +f 17485 17486 17614 +f 17486 17615 17614 +f 17486 17487 17616 +f 17486 17616 17615 +f 17487 17488 17616 +f 17488 17617 17616 +f 17488 17489 17618 +f 17488 17618 17617 +f 17489 17490 17618 +f 17490 17619 17618 +f 17490 17491 17620 +f 17490 17620 17619 +f 17491 17492 17620 +f 17492 17621 17620 +f 17492 17493 17622 +f 17492 17622 17621 +f 17493 17494 17622 +f 17494 17623 17622 +f 17494 17495 17624 +f 17494 17624 17623 +f 17495 17496 17624 +f 17496 17625 17624 +f 17497 17498 17626 +f 17498 17627 17626 +f 17498 17499 17628 +f 17498 17628 17627 +f 17499 17500 17628 +f 17500 17629 17628 +f 17500 17501 17630 +f 17500 17630 17629 +f 17501 17502 17630 +f 17502 17631 17630 +f 17502 17503 17632 +f 17502 17632 17631 +f 17503 17504 17632 +f 17504 17633 17632 +f 17504 17505 17634 +f 17504 17634 17633 +f 17505 17506 17634 +f 17506 17635 17634 +f 17506 17507 17636 +f 17506 17636 17635 +f 17507 17508 17636 +f 17508 17637 17636 +f 17508 17509 17638 +f 17508 17638 17637 +f 17509 17510 17638 +f 17510 17639 17638 +f 17510 17511 17640 +f 17510 17640 17639 +f 17511 17512 17640 +f 17512 17641 17640 +f 17512 17513 17642 +f 17512 17642 17641 +f 17513 17514 17642 +f 17514 17643 17642 +f 17514 17515 17644 +f 17514 17644 17643 +f 17515 17516 17644 +f 17516 17645 17644 +f 17516 17517 17646 +f 17516 17646 17645 +f 17517 17518 17646 +f 17518 17647 17646 +f 17518 17519 17648 +f 17518 17648 17647 +f 17519 17520 17648 +f 17520 17649 17648 +f 17520 17521 17650 +f 17520 17650 17649 +f 17521 17522 17650 +f 17522 17651 17650 +f 17522 17523 17652 +f 17522 17652 17651 +f 17523 17524 17652 +f 17524 17653 17652 +f 17524 17525 17654 +f 17524 17654 17653 +f 17525 17526 17654 +f 17526 17655 17654 +f 17526 17527 17656 +f 17526 17656 17655 +f 17527 17528 17656 +f 17528 17657 17656 +f 17528 17529 17658 +f 17528 17658 17657 +f 17529 17530 17658 +f 17530 17659 17658 +f 17530 17531 17660 +f 17530 17660 17659 +f 17531 17532 17660 +f 17532 17661 17660 +f 17532 17533 17662 +f 17532 17662 17661 +f 17533 17534 17662 +f 17534 17663 17662 +f 17534 17535 17664 +f 17534 17664 17663 +f 17535 17536 17664 +f 17536 17665 17664 +f 17536 17537 17666 +f 17536 17666 17665 +f 17537 17538 17666 +f 17538 17667 17666 +f 17538 17539 17668 +f 17538 17668 17667 +f 17539 17540 17668 +f 17540 17669 17668 +f 17540 17541 17670 +f 17540 17670 17669 +f 17541 17542 17670 +f 17542 17671 17670 +f 17542 17543 17672 +f 17542 17672 17671 +f 17543 17544 17672 +f 17544 17673 17672 +f 17544 17545 17674 +f 17544 17674 17673 +f 17545 17546 17674 +f 17546 17675 17674 +f 17546 17547 17676 +f 17546 17676 17675 +f 17547 17548 17676 +f 17548 17677 17676 +f 17548 17549 17678 +f 17548 17678 17677 +f 17549 17550 17678 +f 17550 17679 17678 +f 17550 17551 17680 +f 17550 17680 17679 +f 17551 17552 17680 +f 17552 17681 17680 +f 17552 17553 17682 +f 17552 17682 17681 +f 17553 17554 17682 +f 17554 17683 17682 +f 17554 17555 17684 +f 17554 17684 17683 +f 17555 17556 17684 +f 17556 17685 17684 +f 17556 17557 17686 +f 17556 17686 17685 +f 17557 17558 17686 +f 17558 17687 17686 +f 17558 17559 17688 +f 17558 17688 17687 +f 17559 17560 17688 +f 17560 17689 17688 +f 17560 17561 17690 +f 17560 17690 17689 +f 17561 17562 17690 +f 17562 17691 17690 +f 17562 17563 17692 +f 17562 17692 17691 +f 17563 17564 17692 +f 17564 17693 17692 +f 17564 17565 17694 +f 17564 17694 17693 +f 17565 17566 17694 +f 17566 17695 17694 +f 17566 17567 17696 +f 17566 17696 17695 +f 17567 17568 17696 +f 17568 17697 17696 +f 17568 17569 17698 +f 17568 17698 17697 +f 17569 17570 17698 +f 17570 17699 17698 +f 17570 17571 17700 +f 17570 17700 17699 +f 17571 17572 17700 +f 17572 17701 17700 +f 17572 17573 17702 +f 17572 17702 17701 +f 17573 17574 17702 +f 17574 17703 17702 +f 17574 17575 17704 +f 17574 17704 17703 +f 17575 17576 17704 +f 17576 17705 17704 +f 17576 17577 17706 +f 17576 17706 17705 +f 17577 17578 17706 +f 17578 17707 17706 +f 17578 17579 17708 +f 17578 17708 17707 +f 17579 17580 17708 +f 17580 17709 17708 +f 17580 17581 17710 +f 17580 17710 17709 +f 17581 17582 17710 +f 17582 17711 17710 +f 17582 17583 17712 +f 17582 17712 17711 +f 17583 17584 17712 +f 17584 17713 17712 +f 17584 17585 17714 +f 17584 17714 17713 +f 17585 17586 17714 +f 17586 17715 17714 +f 17586 17587 17716 +f 17586 17716 17715 +f 17587 17588 17716 +f 17588 17717 17716 +f 17588 17589 17718 +f 17588 17718 17717 +f 17589 17590 17718 +f 17590 17719 17718 +f 17590 17591 17720 +f 17590 17720 17719 +f 17591 17592 17720 +f 17592 17721 17720 +f 17592 17593 17722 +f 17592 17722 17721 +f 17593 17594 17722 +f 17594 17723 17722 +f 17594 17595 17724 +f 17594 17724 17723 +f 17595 17596 17724 +f 17596 17725 17724 +f 17596 17597 17726 +f 17596 17726 17725 +f 17597 17598 17726 +f 17598 17727 17726 +f 17598 17599 17728 +f 17598 17728 17727 +f 17599 17600 17728 +f 17600 17729 17728 +f 17600 17601 17730 +f 17600 17730 17729 +f 17601 17602 17730 +f 17602 17731 17730 +f 17602 17603 17732 +f 17602 17732 17731 +f 17603 17604 17732 +f 17604 17733 17732 +f 17604 17605 17734 +f 17604 17734 17733 +f 17605 17606 17734 +f 17606 17735 17734 +f 17606 17607 17736 +f 17606 17736 17735 +f 17607 17608 17736 +f 17608 17737 17736 +f 17608 17609 17738 +f 17608 17738 17737 +f 17609 17610 17738 +f 17610 17739 17738 +f 17610 17611 17740 +f 17610 17740 17739 +f 17611 17612 17740 +f 17612 17741 17740 +f 17612 17613 17742 +f 17612 17742 17741 +f 17613 17614 17742 +f 17614 17743 17742 +f 17614 17615 17744 +f 17614 17744 17743 +f 17615 17616 17744 +f 17616 17745 17744 +f 17616 17617 17746 +f 17616 17746 17745 +f 17617 17618 17746 +f 17618 17747 17746 +f 17618 17619 17748 +f 17618 17748 17747 +f 17619 17620 17748 +f 17620 17749 17748 +f 17620 17621 17750 +f 17620 17750 17749 +f 17621 17622 17750 +f 17622 17751 17750 +f 17622 17623 17752 +f 17622 17752 17751 +f 17623 17624 17752 +f 17624 17753 17752 +f 17624 17625 17754 +f 17624 17754 17753 +f 17626 17627 17756 +f 17626 17756 17755 +f 17627 17628 17756 +f 17628 17757 17756 +f 17628 17629 17758 +f 17628 17758 17757 +f 17629 17630 17758 +f 17630 17759 17758 +f 17630 17631 17760 +f 17630 17760 17759 +f 17631 17632 17760 +f 17632 17761 17760 +f 17632 17633 17762 +f 17632 17762 17761 +f 17633 17634 17762 +f 17634 17763 17762 +f 17634 17635 17764 +f 17634 17764 17763 +f 17635 17636 17764 +f 17636 17765 17764 +f 17636 17637 17766 +f 17636 17766 17765 +f 17637 17638 17766 +f 17638 17767 17766 +f 17638 17639 17768 +f 17638 17768 17767 +f 17639 17640 17768 +f 17640 17769 17768 +f 17640 17641 17770 +f 17640 17770 17769 +f 17641 17642 17770 +f 17642 17771 17770 +f 17642 17643 17772 +f 17642 17772 17771 +f 17643 17644 17772 +f 17644 17773 17772 +f 17644 17645 17774 +f 17644 17774 17773 +f 17645 17646 17774 +f 17646 17775 17774 +f 17646 17647 17776 +f 17646 17776 17775 +f 17647 17648 17776 +f 17648 17777 17776 +f 17648 17649 17778 +f 17648 17778 17777 +f 17649 17650 17778 +f 17650 17779 17778 +f 17650 17651 17780 +f 17650 17780 17779 +f 17651 17652 17780 +f 17652 17781 17780 +f 17652 17653 17782 +f 17652 17782 17781 +f 17653 17654 17782 +f 17654 17783 17782 +f 17654 17655 17784 +f 17654 17784 17783 +f 17655 17656 17784 +f 17656 17785 17784 +f 17656 17657 17786 +f 17656 17786 17785 +f 17657 17658 17786 +f 17658 17787 17786 +f 17658 17659 17788 +f 17658 17788 17787 +f 17659 17660 17788 +f 17660 17789 17788 +f 17660 17661 17790 +f 17660 17790 17789 +f 17661 17662 17790 +f 17662 17791 17790 +f 17662 17663 17792 +f 17662 17792 17791 +f 17663 17664 17792 +f 17664 17793 17792 +f 17664 17665 17794 +f 17664 17794 17793 +f 17665 17666 17794 +f 17666 17795 17794 +f 17666 17667 17796 +f 17666 17796 17795 +f 17667 17668 17796 +f 17668 17797 17796 +f 17668 17669 17798 +f 17668 17798 17797 +f 17669 17670 17798 +f 17670 17799 17798 +f 17670 17671 17800 +f 17670 17800 17799 +f 17671 17672 17800 +f 17672 17801 17800 +f 17672 17673 17802 +f 17672 17802 17801 +f 17673 17674 17802 +f 17674 17803 17802 +f 17674 17675 17804 +f 17674 17804 17803 +f 17675 17676 17804 +f 17676 17805 17804 +f 17676 17677 17806 +f 17676 17806 17805 +f 17677 17678 17806 +f 17678 17807 17806 +f 17678 17679 17808 +f 17678 17808 17807 +f 17679 17680 17808 +f 17680 17809 17808 +f 17680 17681 17810 +f 17680 17810 17809 +f 17681 17682 17810 +f 17682 17811 17810 +f 17682 17683 17812 +f 17682 17812 17811 +f 17683 17684 17812 +f 17684 17813 17812 +f 17684 17685 17814 +f 17684 17814 17813 +f 17685 17686 17814 +f 17686 17815 17814 +f 17686 17687 17816 +f 17686 17816 17815 +f 17687 17688 17816 +f 17688 17817 17816 +f 17688 17689 17818 +f 17688 17818 17817 +f 17689 17690 17818 +f 17690 17819 17818 +f 17690 17691 17820 +f 17690 17820 17819 +f 17691 17692 17820 +f 17692 17821 17820 +f 17692 17693 17822 +f 17692 17822 17821 +f 17693 17694 17822 +f 17694 17823 17822 +f 17694 17695 17824 +f 17694 17824 17823 +f 17695 17696 17824 +f 17696 17825 17824 +f 17696 17697 17826 +f 17696 17826 17825 +f 17697 17698 17826 +f 17698 17827 17826 +f 17698 17699 17828 +f 17698 17828 17827 +f 17699 17700 17828 +f 17700 17829 17828 +f 17700 17701 17830 +f 17700 17830 17829 +f 17701 17702 17830 +f 17702 17831 17830 +f 17702 17703 17832 +f 17702 17832 17831 +f 17703 17704 17832 +f 17704 17833 17832 +f 17704 17705 17834 +f 17704 17834 17833 +f 17705 17706 17834 +f 17706 17835 17834 +f 17706 17707 17836 +f 17706 17836 17835 +f 17707 17708 17836 +f 17708 17837 17836 +f 17708 17709 17838 +f 17708 17838 17837 +f 17709 17710 17838 +f 17710 17839 17838 +f 17710 17711 17840 +f 17710 17840 17839 +f 17711 17712 17840 +f 17712 17841 17840 +f 17712 17713 17842 +f 17712 17842 17841 +f 17713 17714 17842 +f 17714 17843 17842 +f 17714 17715 17844 +f 17714 17844 17843 +f 17715 17716 17844 +f 17716 17845 17844 +f 17716 17717 17846 +f 17716 17846 17845 +f 17717 17718 17846 +f 17718 17847 17846 +f 17718 17719 17848 +f 17718 17848 17847 +f 17719 17720 17848 +f 17720 17849 17848 +f 17720 17721 17850 +f 17720 17850 17849 +f 17721 17722 17850 +f 17722 17851 17850 +f 17722 17723 17852 +f 17722 17852 17851 +f 17723 17724 17852 +f 17724 17853 17852 +f 17724 17725 17854 +f 17724 17854 17853 +f 17725 17726 17854 +f 17726 17855 17854 +f 17726 17727 17856 +f 17726 17856 17855 +f 17727 17728 17856 +f 17728 17857 17856 +f 17728 17729 17858 +f 17728 17858 17857 +f 17729 17730 17858 +f 17730 17859 17858 +f 17730 17731 17860 +f 17730 17860 17859 +f 17731 17732 17860 +f 17732 17861 17860 +f 17732 17733 17862 +f 17732 17862 17861 +f 17733 17734 17862 +f 17734 17863 17862 +f 17734 17735 17864 +f 17734 17864 17863 +f 17735 17736 17864 +f 17736 17865 17864 +f 17736 17737 17866 +f 17736 17866 17865 +f 17737 17738 17866 +f 17738 17867 17866 +f 17738 17739 17868 +f 17738 17868 17867 +f 17739 17740 17868 +f 17740 17869 17868 +f 17740 17741 17870 +f 17740 17870 17869 +f 17741 17742 17870 +f 17742 17871 17870 +f 17742 17743 17872 +f 17742 17872 17871 +f 17743 17744 17872 +f 17744 17873 17872 +f 17744 17745 17874 +f 17744 17874 17873 +f 17745 17746 17874 +f 17746 17875 17874 +f 17746 17747 17876 +f 17746 17876 17875 +f 17747 17748 17876 +f 17748 17877 17876 +f 17748 17749 17878 +f 17748 17878 17877 +f 17749 17750 17878 +f 17750 17879 17878 +f 17750 17751 17880 +f 17750 17880 17879 +f 17751 17752 17880 +f 17752 17881 17880 +f 17752 17753 17882 +f 17752 17882 17881 +f 17753 17754 17882 +f 17754 17883 17882 +f 17755 17756 17884 +f 17756 17885 17884 +f 17756 17757 17886 +f 17756 17886 17885 +f 17757 17758 17886 +f 17758 17887 17886 +f 17758 17759 17888 +f 17758 17888 17887 +f 17759 17760 17888 +f 17760 17889 17888 +f 17760 17761 17890 +f 17760 17890 17889 +f 17761 17762 17890 +f 17762 17891 17890 +f 17762 17763 17892 +f 17762 17892 17891 +f 17763 17764 17892 +f 17764 17893 17892 +f 17764 17765 17894 +f 17764 17894 17893 +f 17765 17766 17894 +f 17766 17895 17894 +f 17766 17767 17896 +f 17766 17896 17895 +f 17767 17768 17896 +f 17768 17897 17896 +f 17768 17769 17898 +f 17768 17898 17897 +f 17769 17770 17898 +f 17770 17899 17898 +f 17770 17771 17900 +f 17770 17900 17899 +f 17771 17772 17900 +f 17772 17901 17900 +f 17772 17773 17902 +f 17772 17902 17901 +f 17773 17774 17902 +f 17774 17903 17902 +f 17774 17775 17904 +f 17774 17904 17903 +f 17775 17776 17904 +f 17776 17905 17904 +f 17776 17777 17906 +f 17776 17906 17905 +f 17777 17778 17906 +f 17778 17907 17906 +f 17778 17779 17908 +f 17778 17908 17907 +f 17779 17780 17908 +f 17780 17909 17908 +f 17780 17781 17910 +f 17780 17910 17909 +f 17781 17782 17910 +f 17782 17911 17910 +f 17782 17783 17912 +f 17782 17912 17911 +f 17783 17784 17912 +f 17784 17913 17912 +f 17784 17785 17914 +f 17784 17914 17913 +f 17785 17786 17914 +f 17786 17915 17914 +f 17786 17787 17916 +f 17786 17916 17915 +f 17787 17788 17916 +f 17788 17917 17916 +f 17788 17789 17918 +f 17788 17918 17917 +f 17789 17790 17918 +f 17790 17919 17918 +f 17790 17791 17920 +f 17790 17920 17919 +f 17791 17792 17920 +f 17792 17921 17920 +f 17792 17793 17922 +f 17792 17922 17921 +f 17793 17794 17922 +f 17794 17923 17922 +f 17794 17795 17924 +f 17794 17924 17923 +f 17795 17796 17924 +f 17796 17925 17924 +f 17796 17797 17926 +f 17796 17926 17925 +f 17797 17798 17926 +f 17798 17927 17926 +f 17798 17799 17928 +f 17798 17928 17927 +f 17799 17800 17928 +f 17800 17929 17928 +f 17800 17801 17930 +f 17800 17930 17929 +f 17801 17802 17930 +f 17802 17931 17930 +f 17802 17803 17932 +f 17802 17932 17931 +f 17803 17804 17932 +f 17804 17933 17932 +f 17804 17805 17934 +f 17804 17934 17933 +f 17805 17806 17934 +f 17806 17935 17934 +f 17806 17807 17936 +f 17806 17936 17935 +f 17807 17808 17936 +f 17808 17937 17936 +f 17808 17809 17938 +f 17808 17938 17937 +f 17809 17810 17938 +f 17810 17939 17938 +f 17810 17811 17940 +f 17810 17940 17939 +f 17811 17812 17940 +f 17812 17941 17940 +f 17812 17813 17942 +f 17812 17942 17941 +f 17813 17814 17942 +f 17814 17943 17942 +f 17814 17815 17944 +f 17814 17944 17943 +f 17815 17816 17944 +f 17816 17945 17944 +f 17816 17817 17946 +f 17816 17946 17945 +f 17817 17818 17946 +f 17818 17947 17946 +f 17818 17819 17948 +f 17818 17948 17947 +f 17819 17820 17948 +f 17820 17949 17948 +f 17820 17821 17950 +f 17820 17950 17949 +f 17821 17822 17950 +f 17822 17951 17950 +f 17822 17823 17952 +f 17822 17952 17951 +f 17823 17824 17952 +f 17824 17953 17952 +f 17824 17825 17954 +f 17824 17954 17953 +f 17825 17826 17954 +f 17826 17955 17954 +f 17826 17827 17956 +f 17826 17956 17955 +f 17827 17828 17956 +f 17828 17957 17956 +f 17828 17829 17958 +f 17828 17958 17957 +f 17829 17830 17958 +f 17830 17959 17958 +f 17830 17831 17960 +f 17830 17960 17959 +f 17831 17832 17960 +f 17832 17961 17960 +f 17832 17833 17962 +f 17832 17962 17961 +f 17833 17834 17962 +f 17834 17963 17962 +f 17834 17835 17964 +f 17834 17964 17963 +f 17835 17836 17964 +f 17836 17965 17964 +f 17836 17837 17966 +f 17836 17966 17965 +f 17837 17838 17966 +f 17838 17967 17966 +f 17838 17839 17968 +f 17838 17968 17967 +f 17839 17840 17968 +f 17840 17969 17968 +f 17840 17841 17970 +f 17840 17970 17969 +f 17841 17842 17970 +f 17842 17971 17970 +f 17842 17843 17972 +f 17842 17972 17971 +f 17843 17844 17972 +f 17844 17973 17972 +f 17844 17845 17974 +f 17844 17974 17973 +f 17845 17846 17974 +f 17846 17975 17974 +f 17846 17847 17976 +f 17846 17976 17975 +f 17847 17848 17976 +f 17848 17977 17976 +f 17848 17849 17978 +f 17848 17978 17977 +f 17849 17850 17978 +f 17850 17979 17978 +f 17850 17851 17980 +f 17850 17980 17979 +f 17851 17852 17980 +f 17852 17981 17980 +f 17852 17853 17982 +f 17852 17982 17981 +f 17853 17854 17982 +f 17854 17983 17982 +f 17854 17855 17984 +f 17854 17984 17983 +f 17855 17856 17984 +f 17856 17985 17984 +f 17856 17857 17986 +f 17856 17986 17985 +f 17857 17858 17986 +f 17858 17987 17986 +f 17858 17859 17988 +f 17858 17988 17987 +f 17859 17860 17988 +f 17860 17989 17988 +f 17860 17861 17990 +f 17860 17990 17989 +f 17861 17862 17990 +f 17862 17991 17990 +f 17862 17863 17992 +f 17862 17992 17991 +f 17863 17864 17992 +f 17864 17993 17992 +f 17864 17865 17994 +f 17864 17994 17993 +f 17865 17866 17994 +f 17866 17995 17994 +f 17866 17867 17996 +f 17866 17996 17995 +f 17867 17868 17996 +f 17868 17997 17996 +f 17868 17869 17998 +f 17868 17998 17997 +f 17869 17870 17998 +f 17870 17999 17998 +f 17870 17871 18000 +f 17870 18000 17999 +f 17871 17872 18000 +f 17872 18001 18000 +f 17872 17873 18002 +f 17872 18002 18001 +f 17873 17874 18002 +f 17874 18003 18002 +f 17874 17875 18004 +f 17874 18004 18003 +f 17875 17876 18004 +f 17876 18005 18004 +f 17876 17877 18006 +f 17876 18006 18005 +f 17877 17878 18006 +f 17878 18007 18006 +f 17878 17879 18008 +f 17878 18008 18007 +f 17879 17880 18008 +f 17880 18009 18008 +f 17880 17881 18010 +f 17880 18010 18009 +f 17881 17882 18010 +f 17882 18011 18010 +f 17882 17883 18012 +f 17882 18012 18011 +f 17884 17885 18014 +f 17884 18014 18013 +f 17885 17886 18014 +f 17886 18015 18014 +f 17886 17887 18016 +f 17886 18016 18015 +f 17887 17888 18016 +f 17888 18017 18016 +f 17888 17889 18018 +f 17888 18018 18017 +f 17889 17890 18018 +f 17890 18019 18018 +f 17890 17891 18020 +f 17890 18020 18019 +f 17891 17892 18020 +f 17892 18021 18020 +f 17892 17893 18022 +f 17892 18022 18021 +f 17893 17894 18022 +f 17894 18023 18022 +f 17894 17895 18024 +f 17894 18024 18023 +f 17895 17896 18024 +f 17896 18025 18024 +f 17896 17897 18026 +f 17896 18026 18025 +f 17897 17898 18026 +f 17898 18027 18026 +f 17898 17899 18028 +f 17898 18028 18027 +f 17899 17900 18028 +f 17900 18029 18028 +f 17900 17901 18030 +f 17900 18030 18029 +f 17901 17902 18030 +f 17902 18031 18030 +f 17902 17903 18032 +f 17902 18032 18031 +f 17903 17904 18032 +f 17904 18033 18032 +f 17904 17905 18034 +f 17904 18034 18033 +f 17905 17906 18034 +f 17906 18035 18034 +f 17906 17907 18036 +f 17906 18036 18035 +f 17907 17908 18036 +f 17908 18037 18036 +f 17908 17909 18038 +f 17908 18038 18037 +f 17909 17910 18038 +f 17910 18039 18038 +f 17910 17911 18040 +f 17910 18040 18039 +f 17911 17912 18040 +f 17912 18041 18040 +f 17912 17913 18042 +f 17912 18042 18041 +f 17913 17914 18042 +f 17914 18043 18042 +f 17914 17915 18044 +f 17914 18044 18043 +f 17915 17916 18044 +f 17916 18045 18044 +f 17916 17917 18046 +f 17916 18046 18045 +f 17917 17918 18046 +f 17918 18047 18046 +f 17918 17919 18048 +f 17918 18048 18047 +f 17919 17920 18048 +f 17920 18049 18048 +f 17920 17921 18050 +f 17920 18050 18049 +f 17921 17922 18050 +f 17922 18051 18050 +f 17922 17923 18052 +f 17922 18052 18051 +f 17923 17924 18052 +f 17924 18053 18052 +f 17924 17925 18054 +f 17924 18054 18053 +f 17925 17926 18054 +f 17926 18055 18054 +f 17926 17927 18056 +f 17926 18056 18055 +f 17927 17928 18056 +f 17928 18057 18056 +f 17928 17929 18058 +f 17928 18058 18057 +f 17929 17930 18058 +f 17930 18059 18058 +f 17930 17931 18060 +f 17930 18060 18059 +f 17931 17932 18060 +f 17932 18061 18060 +f 17932 17933 18062 +f 17932 18062 18061 +f 17933 17934 18062 +f 17934 18063 18062 +f 17934 17935 18064 +f 17934 18064 18063 +f 17935 17936 18064 +f 17936 18065 18064 +f 17936 17937 18066 +f 17936 18066 18065 +f 17937 17938 18066 +f 17938 18067 18066 +f 17938 17939 18068 +f 17938 18068 18067 +f 17939 17940 18068 +f 17940 18069 18068 +f 17940 17941 18070 +f 17940 18070 18069 +f 17941 17942 18070 +f 17942 18071 18070 +f 17942 17943 18072 +f 17942 18072 18071 +f 17943 17944 18072 +f 17944 18073 18072 +f 17944 17945 18074 +f 17944 18074 18073 +f 17945 17946 18074 +f 17946 18075 18074 +f 17946 17947 18076 +f 17946 18076 18075 +f 17947 17948 18076 +f 17948 18077 18076 +f 17948 17949 18078 +f 17948 18078 18077 +f 17949 17950 18078 +f 17950 18079 18078 +f 17950 17951 18080 +f 17950 18080 18079 +f 17951 17952 18080 +f 17952 18081 18080 +f 17952 17953 18082 +f 17952 18082 18081 +f 17953 17954 18082 +f 17954 18083 18082 +f 17954 17955 18084 +f 17954 18084 18083 +f 17955 17956 18084 +f 17956 18085 18084 +f 17956 17957 18086 +f 17956 18086 18085 +f 17957 17958 18086 +f 17958 18087 18086 +f 17958 17959 18088 +f 17958 18088 18087 +f 17959 17960 18088 +f 17960 18089 18088 +f 17960 17961 18090 +f 17960 18090 18089 +f 17961 17962 18090 +f 17962 18091 18090 +f 17962 17963 18092 +f 17962 18092 18091 +f 17963 17964 18092 +f 17964 18093 18092 +f 17964 17965 18094 +f 17964 18094 18093 +f 17965 17966 18094 +f 17966 18095 18094 +f 17966 17967 18096 +f 17966 18096 18095 +f 17967 17968 18096 +f 17968 18097 18096 +f 17968 17969 18098 +f 17968 18098 18097 +f 17969 17970 18098 +f 17970 18099 18098 +f 17970 17971 18100 +f 17970 18100 18099 +f 17971 17972 18100 +f 17972 18101 18100 +f 17972 17973 18102 +f 17972 18102 18101 +f 17973 17974 18102 +f 17974 18103 18102 +f 17974 17975 18104 +f 17974 18104 18103 +f 17975 17976 18104 +f 17976 18105 18104 +f 17976 17977 18106 +f 17976 18106 18105 +f 17977 17978 18106 +f 17978 18107 18106 +f 17978 17979 18108 +f 17978 18108 18107 +f 17979 17980 18108 +f 17980 18109 18108 +f 17980 17981 18110 +f 17980 18110 18109 +f 17981 17982 18110 +f 17982 18111 18110 +f 17982 17983 18112 +f 17982 18112 18111 +f 17983 17984 18112 +f 17984 18113 18112 +f 17984 17985 18114 +f 17984 18114 18113 +f 17985 17986 18114 +f 17986 18115 18114 +f 17986 17987 18116 +f 17986 18116 18115 +f 17987 17988 18116 +f 17988 18117 18116 +f 17988 17989 18118 +f 17988 18118 18117 +f 17989 17990 18118 +f 17990 18119 18118 +f 17990 17991 18120 +f 17990 18120 18119 +f 17991 17992 18120 +f 17992 18121 18120 +f 17992 17993 18122 +f 17992 18122 18121 +f 17993 17994 18122 +f 17994 18123 18122 +f 17994 17995 18124 +f 17994 18124 18123 +f 17995 17996 18124 +f 17996 18125 18124 +f 17996 17997 18126 +f 17996 18126 18125 +f 17997 17998 18126 +f 17998 18127 18126 +f 17998 17999 18128 +f 17998 18128 18127 +f 17999 18000 18128 +f 18000 18129 18128 +f 18000 18001 18130 +f 18000 18130 18129 +f 18001 18002 18130 +f 18002 18131 18130 +f 18002 18003 18132 +f 18002 18132 18131 +f 18003 18004 18132 +f 18004 18133 18132 +f 18004 18005 18134 +f 18004 18134 18133 +f 18005 18006 18134 +f 18006 18135 18134 +f 18006 18007 18136 +f 18006 18136 18135 +f 18007 18008 18136 +f 18008 18137 18136 +f 18008 18009 18138 +f 18008 18138 18137 +f 18009 18010 18138 +f 18010 18139 18138 +f 18010 18011 18140 +f 18010 18140 18139 +f 18011 18012 18140 +f 18012 18141 18140 +f 18013 18014 18142 +f 18014 18143 18142 +f 18014 18015 18144 +f 18014 18144 18143 +f 18015 18016 18144 +f 18016 18145 18144 +f 18016 18017 18146 +f 18016 18146 18145 +f 18017 18018 18146 +f 18018 18147 18146 +f 18018 18019 18148 +f 18018 18148 18147 +f 18019 18020 18148 +f 18020 18149 18148 +f 18020 18021 18150 +f 18020 18150 18149 +f 18021 18022 18150 +f 18022 18151 18150 +f 18022 18023 18152 +f 18022 18152 18151 +f 18023 18024 18152 +f 18024 18153 18152 +f 18024 18025 18154 +f 18024 18154 18153 +f 18025 18026 18154 +f 18026 18155 18154 +f 18026 18027 18156 +f 18026 18156 18155 +f 18027 18028 18156 +f 18028 18157 18156 +f 18028 18029 18158 +f 18028 18158 18157 +f 18029 18030 18158 +f 18030 18159 18158 +f 18030 18031 18160 +f 18030 18160 18159 +f 18031 18032 18160 +f 18032 18161 18160 +f 18032 18033 18162 +f 18032 18162 18161 +f 18033 18034 18162 +f 18034 18163 18162 +f 18034 18035 18164 +f 18034 18164 18163 +f 18035 18036 18164 +f 18036 18165 18164 +f 18036 18037 18166 +f 18036 18166 18165 +f 18037 18038 18166 +f 18038 18167 18166 +f 18038 18039 18168 +f 18038 18168 18167 +f 18039 18040 18168 +f 18040 18169 18168 +f 18040 18041 18170 +f 18040 18170 18169 +f 18041 18042 18170 +f 18042 18171 18170 +f 18042 18043 18172 +f 18042 18172 18171 +f 18043 18044 18172 +f 18044 18173 18172 +f 18044 18045 18174 +f 18044 18174 18173 +f 18045 18046 18174 +f 18046 18175 18174 +f 18046 18047 18176 +f 18046 18176 18175 +f 18047 18048 18176 +f 18048 18177 18176 +f 18048 18049 18178 +f 18048 18178 18177 +f 18049 18050 18178 +f 18050 18179 18178 +f 18050 18051 18180 +f 18050 18180 18179 +f 18051 18052 18180 +f 18052 18181 18180 +f 18052 18053 18182 +f 18052 18182 18181 +f 18053 18054 18182 +f 18054 18183 18182 +f 18054 18055 18184 +f 18054 18184 18183 +f 18055 18056 18184 +f 18056 18185 18184 +f 18056 18057 18186 +f 18056 18186 18185 +f 18057 18058 18186 +f 18058 18187 18186 +f 18058 18059 18188 +f 18058 18188 18187 +f 18059 18060 18188 +f 18060 18189 18188 +f 18060 18061 18190 +f 18060 18190 18189 +f 18061 18062 18190 +f 18062 18191 18190 +f 18062 18063 18192 +f 18062 18192 18191 +f 18063 18064 18192 +f 18064 18193 18192 +f 18064 18065 18194 +f 18064 18194 18193 +f 18065 18066 18194 +f 18066 18195 18194 +f 18066 18067 18196 +f 18066 18196 18195 +f 18067 18068 18196 +f 18068 18197 18196 +f 18068 18069 18198 +f 18068 18198 18197 +f 18069 18070 18198 +f 18070 18199 18198 +f 18070 18071 18200 +f 18070 18200 18199 +f 18071 18072 18200 +f 18072 18201 18200 +f 18072 18073 18202 +f 18072 18202 18201 +f 18073 18074 18202 +f 18074 18203 18202 +f 18074 18075 18204 +f 18074 18204 18203 +f 18075 18076 18204 +f 18076 18205 18204 +f 18076 18077 18206 +f 18076 18206 18205 +f 18077 18078 18206 +f 18078 18207 18206 +f 18078 18079 18208 +f 18078 18208 18207 +f 18079 18080 18208 +f 18080 18209 18208 +f 18080 18081 18210 +f 18080 18210 18209 +f 18081 18082 18210 +f 18082 18211 18210 +f 18082 18083 18212 +f 18082 18212 18211 +f 18083 18084 18212 +f 18084 18213 18212 +f 18084 18085 18214 +f 18084 18214 18213 +f 18085 18086 18214 +f 18086 18215 18214 +f 18086 18087 18216 +f 18086 18216 18215 +f 18087 18088 18216 +f 18088 18217 18216 +f 18088 18089 18218 +f 18088 18218 18217 +f 18089 18090 18218 +f 18090 18219 18218 +f 18090 18091 18220 +f 18090 18220 18219 +f 18091 18092 18220 +f 18092 18221 18220 +f 18092 18093 18222 +f 18092 18222 18221 +f 18093 18094 18222 +f 18094 18223 18222 +f 18094 18095 18224 +f 18094 18224 18223 +f 18095 18096 18224 +f 18096 18225 18224 +f 18096 18097 18226 +f 18096 18226 18225 +f 18097 18098 18226 +f 18098 18227 18226 +f 18098 18099 18228 +f 18098 18228 18227 +f 18099 18100 18228 +f 18100 18229 18228 +f 18100 18101 18230 +f 18100 18230 18229 +f 18101 18102 18230 +f 18102 18231 18230 +f 18102 18103 18232 +f 18102 18232 18231 +f 18103 18104 18232 +f 18104 18233 18232 +f 18104 18105 18234 +f 18104 18234 18233 +f 18105 18106 18234 +f 18106 18235 18234 +f 18106 18107 18236 +f 18106 18236 18235 +f 18107 18108 18236 +f 18108 18237 18236 +f 18108 18109 18238 +f 18108 18238 18237 +f 18109 18110 18238 +f 18110 18239 18238 +f 18110 18111 18240 +f 18110 18240 18239 +f 18111 18112 18240 +f 18112 18241 18240 +f 18112 18113 18242 +f 18112 18242 18241 +f 18113 18114 18242 +f 18114 18243 18242 +f 18114 18115 18244 +f 18114 18244 18243 +f 18115 18116 18244 +f 18116 18245 18244 +f 18116 18117 18246 +f 18116 18246 18245 +f 18117 18118 18246 +f 18118 18247 18246 +f 18118 18119 18248 +f 18118 18248 18247 +f 18119 18120 18248 +f 18120 18249 18248 +f 18120 18121 18250 +f 18120 18250 18249 +f 18121 18122 18250 +f 18122 18251 18250 +f 18122 18123 18252 +f 18122 18252 18251 +f 18123 18124 18252 +f 18124 18253 18252 +f 18124 18125 18254 +f 18124 18254 18253 +f 18125 18126 18254 +f 18126 18255 18254 +f 18126 18127 18256 +f 18126 18256 18255 +f 18127 18128 18256 +f 18128 18257 18256 +f 18128 18129 18258 +f 18128 18258 18257 +f 18129 18130 18258 +f 18130 18259 18258 +f 18130 18131 18260 +f 18130 18260 18259 +f 18131 18132 18260 +f 18132 18261 18260 +f 18132 18133 18262 +f 18132 18262 18261 +f 18133 18134 18262 +f 18134 18263 18262 +f 18134 18135 18264 +f 18134 18264 18263 +f 18135 18136 18264 +f 18136 18265 18264 +f 18136 18137 18266 +f 18136 18266 18265 +f 18137 18138 18266 +f 18138 18267 18266 +f 18138 18139 18268 +f 18138 18268 18267 +f 18139 18140 18268 +f 18140 18269 18268 +f 18140 18141 18270 +f 18140 18270 18269 +f 18142 18143 18272 +f 18142 18272 18271 +f 18143 18144 18272 +f 18144 18273 18272 +f 18144 18145 18274 +f 18144 18274 18273 +f 18145 18146 18274 +f 18146 18275 18274 +f 18146 18147 18276 +f 18146 18276 18275 +f 18147 18148 18276 +f 18148 18277 18276 +f 18148 18149 18278 +f 18148 18278 18277 +f 18149 18150 18278 +f 18150 18279 18278 +f 18150 18151 18280 +f 18150 18280 18279 +f 18151 18152 18280 +f 18152 18281 18280 +f 18152 18153 18282 +f 18152 18282 18281 +f 18153 18154 18282 +f 18154 18283 18282 +f 18154 18155 18284 +f 18154 18284 18283 +f 18155 18156 18284 +f 18156 18285 18284 +f 18156 18157 18286 +f 18156 18286 18285 +f 18157 18158 18286 +f 18158 18287 18286 +f 18158 18159 18288 +f 18158 18288 18287 +f 18159 18160 18288 +f 18160 18289 18288 +f 18160 18161 18290 +f 18160 18290 18289 +f 18161 18162 18290 +f 18162 18291 18290 +f 18162 18163 18292 +f 18162 18292 18291 +f 18163 18164 18292 +f 18164 18293 18292 +f 18164 18165 18294 +f 18164 18294 18293 +f 18165 18166 18294 +f 18166 18295 18294 +f 18166 18167 18296 +f 18166 18296 18295 +f 18167 18168 18296 +f 18168 18297 18296 +f 18168 18169 18298 +f 18168 18298 18297 +f 18169 18170 18298 +f 18170 18299 18298 +f 18170 18171 18300 +f 18170 18300 18299 +f 18171 18172 18300 +f 18172 18301 18300 +f 18172 18173 18302 +f 18172 18302 18301 +f 18173 18174 18302 +f 18174 18303 18302 +f 18174 18175 18304 +f 18174 18304 18303 +f 18175 18176 18304 +f 18176 18305 18304 +f 18176 18177 18306 +f 18176 18306 18305 +f 18177 18178 18306 +f 18178 18307 18306 +f 18178 18179 18308 +f 18178 18308 18307 +f 18179 18180 18308 +f 18180 18309 18308 +f 18180 18181 18310 +f 18180 18310 18309 +f 18181 18182 18310 +f 18182 18311 18310 +f 18182 18183 18312 +f 18182 18312 18311 +f 18183 18184 18312 +f 18184 18313 18312 +f 18184 18185 18314 +f 18184 18314 18313 +f 18185 18186 18314 +f 18186 18315 18314 +f 18186 18187 18316 +f 18186 18316 18315 +f 18187 18188 18316 +f 18188 18317 18316 +f 18188 18189 18318 +f 18188 18318 18317 +f 18189 18190 18318 +f 18190 18319 18318 +f 18190 18191 18320 +f 18190 18320 18319 +f 18191 18192 18320 +f 18192 18321 18320 +f 18192 18193 18322 +f 18192 18322 18321 +f 18193 18194 18322 +f 18194 18323 18322 +f 18194 18195 18324 +f 18194 18324 18323 +f 18195 18196 18324 +f 18196 18325 18324 +f 18196 18197 18326 +f 18196 18326 18325 +f 18197 18198 18326 +f 18198 18327 18326 +f 18198 18199 18328 +f 18198 18328 18327 +f 18199 18200 18328 +f 18200 18329 18328 +f 18200 18201 18330 +f 18200 18330 18329 +f 18201 18202 18330 +f 18202 18331 18330 +f 18202 18203 18332 +f 18202 18332 18331 +f 18203 18204 18332 +f 18204 18333 18332 +f 18204 18205 18334 +f 18204 18334 18333 +f 18205 18206 18334 +f 18206 18335 18334 +f 18206 18207 18336 +f 18206 18336 18335 +f 18207 18208 18336 +f 18208 18337 18336 +f 18208 18209 18338 +f 18208 18338 18337 +f 18209 18210 18338 +f 18210 18339 18338 +f 18210 18211 18340 +f 18210 18340 18339 +f 18211 18212 18340 +f 18212 18341 18340 +f 18212 18213 18342 +f 18212 18342 18341 +f 18213 18214 18342 +f 18214 18343 18342 +f 18214 18215 18344 +f 18214 18344 18343 +f 18215 18216 18344 +f 18216 18345 18344 +f 18216 18217 18346 +f 18216 18346 18345 +f 18217 18218 18346 +f 18218 18347 18346 +f 18218 18219 18348 +f 18218 18348 18347 +f 18219 18220 18348 +f 18220 18349 18348 +f 18220 18221 18350 +f 18220 18350 18349 +f 18221 18222 18350 +f 18222 18351 18350 +f 18222 18223 18352 +f 18222 18352 18351 +f 18223 18224 18352 +f 18224 18353 18352 +f 18224 18225 18354 +f 18224 18354 18353 +f 18225 18226 18354 +f 18226 18355 18354 +f 18226 18227 18356 +f 18226 18356 18355 +f 18227 18228 18356 +f 18228 18357 18356 +f 18228 18229 18358 +f 18228 18358 18357 +f 18229 18230 18358 +f 18230 18359 18358 +f 18230 18231 18360 +f 18230 18360 18359 +f 18231 18232 18360 +f 18232 18361 18360 +f 18232 18233 18362 +f 18232 18362 18361 +f 18233 18234 18362 +f 18234 18363 18362 +f 18234 18235 18364 +f 18234 18364 18363 +f 18235 18236 18364 +f 18236 18365 18364 +f 18236 18237 18366 +f 18236 18366 18365 +f 18237 18238 18366 +f 18238 18367 18366 +f 18238 18239 18368 +f 18238 18368 18367 +f 18239 18240 18368 +f 18240 18369 18368 +f 18240 18241 18370 +f 18240 18370 18369 +f 18241 18242 18370 +f 18242 18371 18370 +f 18242 18243 18372 +f 18242 18372 18371 +f 18243 18244 18372 +f 18244 18373 18372 +f 18244 18245 18374 +f 18244 18374 18373 +f 18245 18246 18374 +f 18246 18375 18374 +f 18246 18247 18376 +f 18246 18376 18375 +f 18247 18248 18376 +f 18248 18377 18376 +f 18248 18249 18378 +f 18248 18378 18377 +f 18249 18250 18378 +f 18250 18379 18378 +f 18250 18251 18380 +f 18250 18380 18379 +f 18251 18252 18380 +f 18252 18381 18380 +f 18252 18253 18382 +f 18252 18382 18381 +f 18253 18254 18382 +f 18254 18383 18382 +f 18254 18255 18384 +f 18254 18384 18383 +f 18255 18256 18384 +f 18256 18385 18384 +f 18256 18257 18386 +f 18256 18386 18385 +f 18257 18258 18386 +f 18258 18387 18386 +f 18258 18259 18388 +f 18258 18388 18387 +f 18259 18260 18388 +f 18260 18389 18388 +f 18260 18261 18390 +f 18260 18390 18389 +f 18261 18262 18390 +f 18262 18391 18390 +f 18262 18263 18392 +f 18262 18392 18391 +f 18263 18264 18392 +f 18264 18393 18392 +f 18264 18265 18394 +f 18264 18394 18393 +f 18265 18266 18394 +f 18266 18395 18394 +f 18266 18267 18396 +f 18266 18396 18395 +f 18267 18268 18396 +f 18268 18397 18396 +f 18268 18269 18398 +f 18268 18398 18397 +f 18269 18270 18398 +f 18270 18399 18398 +f 18271 18272 18400 +f 18272 18401 18400 +f 18272 18273 18402 +f 18272 18402 18401 +f 18273 18274 18402 +f 18274 18403 18402 +f 18274 18275 18404 +f 18274 18404 18403 +f 18275 18276 18404 +f 18276 18405 18404 +f 18276 18277 18406 +f 18276 18406 18405 +f 18277 18278 18406 +f 18278 18407 18406 +f 18278 18279 18408 +f 18278 18408 18407 +f 18279 18280 18408 +f 18280 18409 18408 +f 18280 18281 18410 +f 18280 18410 18409 +f 18281 18282 18410 +f 18282 18411 18410 +f 18282 18283 18412 +f 18282 18412 18411 +f 18283 18284 18412 +f 18284 18413 18412 +f 18284 18285 18414 +f 18284 18414 18413 +f 18285 18286 18414 +f 18286 18415 18414 +f 18286 18287 18416 +f 18286 18416 18415 +f 18287 18288 18416 +f 18288 18417 18416 +f 18288 18289 18418 +f 18288 18418 18417 +f 18289 18290 18418 +f 18290 18419 18418 +f 18290 18291 18420 +f 18290 18420 18419 +f 18291 18292 18420 +f 18292 18421 18420 +f 18292 18293 18422 +f 18292 18422 18421 +f 18293 18294 18422 +f 18294 18423 18422 +f 18294 18295 18424 +f 18294 18424 18423 +f 18295 18296 18424 +f 18296 18425 18424 +f 18296 18297 18426 +f 18296 18426 18425 +f 18297 18298 18426 +f 18298 18427 18426 +f 18298 18299 18428 +f 18298 18428 18427 +f 18299 18300 18428 +f 18300 18429 18428 +f 18300 18301 18430 +f 18300 18430 18429 +f 18301 18302 18430 +f 18302 18431 18430 +f 18302 18303 18432 +f 18302 18432 18431 +f 18303 18304 18432 +f 18304 18433 18432 +f 18304 18305 18434 +f 18304 18434 18433 +f 18305 18306 18434 +f 18306 18435 18434 +f 18306 18307 18436 +f 18306 18436 18435 +f 18307 18308 18436 +f 18308 18437 18436 +f 18308 18309 18438 +f 18308 18438 18437 +f 18309 18310 18438 +f 18310 18439 18438 +f 18310 18311 18440 +f 18310 18440 18439 +f 18311 18312 18440 +f 18312 18441 18440 +f 18312 18313 18442 +f 18312 18442 18441 +f 18313 18314 18442 +f 18314 18443 18442 +f 18314 18315 18444 +f 18314 18444 18443 +f 18315 18316 18444 +f 18316 18445 18444 +f 18316 18317 18446 +f 18316 18446 18445 +f 18317 18318 18446 +f 18318 18447 18446 +f 18318 18319 18448 +f 18318 18448 18447 +f 18319 18320 18448 +f 18320 18449 18448 +f 18320 18321 18450 +f 18320 18450 18449 +f 18321 18322 18450 +f 18322 18451 18450 +f 18322 18323 18452 +f 18322 18452 18451 +f 18323 18324 18452 +f 18324 18453 18452 +f 18324 18325 18454 +f 18324 18454 18453 +f 18325 18326 18454 +f 18326 18455 18454 +f 18326 18327 18456 +f 18326 18456 18455 +f 18327 18328 18456 +f 18328 18457 18456 +f 18328 18329 18458 +f 18328 18458 18457 +f 18329 18330 18458 +f 18330 18459 18458 +f 18330 18331 18460 +f 18330 18460 18459 +f 18331 18332 18460 +f 18332 18461 18460 +f 18332 18333 18462 +f 18332 18462 18461 +f 18333 18334 18462 +f 18334 18463 18462 +f 18334 18335 18464 +f 18334 18464 18463 +f 18335 18336 18464 +f 18336 18465 18464 +f 18336 18337 18466 +f 18336 18466 18465 +f 18337 18338 18466 +f 18338 18467 18466 +f 18338 18339 18468 +f 18338 18468 18467 +f 18339 18340 18468 +f 18340 18469 18468 +f 18340 18341 18470 +f 18340 18470 18469 +f 18341 18342 18470 +f 18342 18471 18470 +f 18342 18343 18472 +f 18342 18472 18471 +f 18343 18344 18472 +f 18344 18473 18472 +f 18344 18345 18474 +f 18344 18474 18473 +f 18345 18346 18474 +f 18346 18475 18474 +f 18346 18347 18476 +f 18346 18476 18475 +f 18347 18348 18476 +f 18348 18477 18476 +f 18348 18349 18478 +f 18348 18478 18477 +f 18349 18350 18478 +f 18350 18479 18478 +f 18350 18351 18480 +f 18350 18480 18479 +f 18351 18352 18480 +f 18352 18481 18480 +f 18352 18353 18482 +f 18352 18482 18481 +f 18353 18354 18482 +f 18354 18483 18482 +f 18354 18355 18484 +f 18354 18484 18483 +f 18355 18356 18484 +f 18356 18485 18484 +f 18356 18357 18486 +f 18356 18486 18485 +f 18357 18358 18486 +f 18358 18487 18486 +f 18358 18359 18488 +f 18358 18488 18487 +f 18359 18360 18488 +f 18360 18489 18488 +f 18360 18361 18490 +f 18360 18490 18489 +f 18361 18362 18490 +f 18362 18491 18490 +f 18362 18363 18492 +f 18362 18492 18491 +f 18363 18364 18492 +f 18364 18493 18492 +f 18364 18365 18494 +f 18364 18494 18493 +f 18365 18366 18494 +f 18366 18495 18494 +f 18366 18367 18496 +f 18366 18496 18495 +f 18367 18368 18496 +f 18368 18497 18496 +f 18368 18369 18498 +f 18368 18498 18497 +f 18369 18370 18498 +f 18370 18499 18498 +f 18370 18371 18500 +f 18370 18500 18499 +f 18371 18372 18500 +f 18372 18501 18500 +f 18372 18373 18502 +f 18372 18502 18501 +f 18373 18374 18502 +f 18374 18503 18502 +f 18374 18375 18504 +f 18374 18504 18503 +f 18375 18376 18504 +f 18376 18505 18504 +f 18376 18377 18506 +f 18376 18506 18505 +f 18377 18378 18506 +f 18378 18507 18506 +f 18378 18379 18508 +f 18378 18508 18507 +f 18379 18380 18508 +f 18380 18509 18508 +f 18380 18381 18510 +f 18380 18510 18509 +f 18381 18382 18510 +f 18382 18511 18510 +f 18382 18383 18512 +f 18382 18512 18511 +f 18383 18384 18512 +f 18384 18513 18512 +f 18384 18385 18514 +f 18384 18514 18513 +f 18385 18386 18514 +f 18386 18515 18514 +f 18386 18387 18516 +f 18386 18516 18515 +f 18387 18388 18516 +f 18388 18517 18516 +f 18388 18389 18518 +f 18388 18518 18517 +f 18389 18390 18518 +f 18390 18519 18518 +f 18390 18391 18520 +f 18390 18520 18519 +f 18391 18392 18520 +f 18392 18521 18520 +f 18392 18393 18522 +f 18392 18522 18521 +f 18393 18394 18522 +f 18394 18523 18522 +f 18394 18395 18524 +f 18394 18524 18523 +f 18395 18396 18524 +f 18396 18525 18524 +f 18396 18397 18526 +f 18396 18526 18525 +f 18397 18398 18526 +f 18398 18527 18526 +f 18398 18399 18528 +f 18398 18528 18527 +f 18400 18401 18530 +f 18400 18530 18529 +f 18401 18402 18530 +f 18402 18531 18530 +f 18402 18403 18532 +f 18402 18532 18531 +f 18403 18404 18532 +f 18404 18533 18532 +f 18404 18405 18534 +f 18404 18534 18533 +f 18405 18406 18534 +f 18406 18535 18534 +f 18406 18407 18536 +f 18406 18536 18535 +f 18407 18408 18536 +f 18408 18537 18536 +f 18408 18409 18538 +f 18408 18538 18537 +f 18409 18410 18538 +f 18410 18539 18538 +f 18410 18411 18540 +f 18410 18540 18539 +f 18411 18412 18540 +f 18412 18541 18540 +f 18412 18413 18542 +f 18412 18542 18541 +f 18413 18414 18542 +f 18414 18543 18542 +f 18414 18415 18544 +f 18414 18544 18543 +f 18415 18416 18544 +f 18416 18545 18544 +f 18416 18417 18546 +f 18416 18546 18545 +f 18417 18418 18546 +f 18418 18547 18546 +f 18418 18419 18548 +f 18418 18548 18547 +f 18419 18420 18548 +f 18420 18549 18548 +f 18420 18421 18550 +f 18420 18550 18549 +f 18421 18422 18550 +f 18422 18551 18550 +f 18422 18423 18552 +f 18422 18552 18551 +f 18423 18424 18552 +f 18424 18553 18552 +f 18424 18425 18554 +f 18424 18554 18553 +f 18425 18426 18554 +f 18426 18555 18554 +f 18426 18427 18556 +f 18426 18556 18555 +f 18427 18428 18556 +f 18428 18557 18556 +f 18428 18429 18558 +f 18428 18558 18557 +f 18429 18430 18558 +f 18430 18559 18558 +f 18430 18431 18560 +f 18430 18560 18559 +f 18431 18432 18560 +f 18432 18561 18560 +f 18432 18433 18562 +f 18432 18562 18561 +f 18433 18434 18562 +f 18434 18563 18562 +f 18434 18435 18564 +f 18434 18564 18563 +f 18435 18436 18564 +f 18436 18565 18564 +f 18436 18437 18566 +f 18436 18566 18565 +f 18437 18438 18566 +f 18438 18567 18566 +f 18438 18439 18568 +f 18438 18568 18567 +f 18439 18440 18568 +f 18440 18569 18568 +f 18440 18441 18570 +f 18440 18570 18569 +f 18441 18442 18570 +f 18442 18571 18570 +f 18442 18443 18572 +f 18442 18572 18571 +f 18443 18444 18572 +f 18444 18573 18572 +f 18444 18445 18574 +f 18444 18574 18573 +f 18445 18446 18574 +f 18446 18575 18574 +f 18446 18447 18576 +f 18446 18576 18575 +f 18447 18448 18576 +f 18448 18577 18576 +f 18448 18449 18578 +f 18448 18578 18577 +f 18449 18450 18578 +f 18450 18579 18578 +f 18450 18451 18580 +f 18450 18580 18579 +f 18451 18452 18580 +f 18452 18581 18580 +f 18452 18453 18582 +f 18452 18582 18581 +f 18453 18454 18582 +f 18454 18583 18582 +f 18454 18455 18584 +f 18454 18584 18583 +f 18455 18456 18584 +f 18456 18585 18584 +f 18456 18457 18586 +f 18456 18586 18585 +f 18457 18458 18586 +f 18458 18587 18586 +f 18458 18459 18588 +f 18458 18588 18587 +f 18459 18460 18588 +f 18460 18589 18588 +f 18460 18461 18590 +f 18460 18590 18589 +f 18461 18462 18590 +f 18462 18591 18590 +f 18462 18463 18592 +f 18462 18592 18591 +f 18463 18464 18592 +f 18464 18593 18592 +f 18464 18465 18594 +f 18464 18594 18593 +f 18465 18466 18594 +f 18466 18595 18594 +f 18466 18467 18596 +f 18466 18596 18595 +f 18467 18468 18596 +f 18468 18597 18596 +f 18468 18469 18598 +f 18468 18598 18597 +f 18469 18470 18598 +f 18470 18599 18598 +f 18470 18471 18600 +f 18470 18600 18599 +f 18471 18472 18600 +f 18472 18601 18600 +f 18472 18473 18602 +f 18472 18602 18601 +f 18473 18474 18602 +f 18474 18603 18602 +f 18474 18475 18604 +f 18474 18604 18603 +f 18475 18476 18604 +f 18476 18605 18604 +f 18476 18477 18606 +f 18476 18606 18605 +f 18477 18478 18606 +f 18478 18607 18606 +f 18478 18479 18608 +f 18478 18608 18607 +f 18479 18480 18608 +f 18480 18609 18608 +f 18480 18481 18610 +f 18480 18610 18609 +f 18481 18482 18610 +f 18482 18611 18610 +f 18482 18483 18612 +f 18482 18612 18611 +f 18483 18484 18612 +f 18484 18613 18612 +f 18484 18485 18614 +f 18484 18614 18613 +f 18485 18486 18614 +f 18486 18615 18614 +f 18486 18487 18616 +f 18486 18616 18615 +f 18487 18488 18616 +f 18488 18617 18616 +f 18488 18489 18618 +f 18488 18618 18617 +f 18489 18490 18618 +f 18490 18619 18618 +f 18490 18491 18620 +f 18490 18620 18619 +f 18491 18492 18620 +f 18492 18621 18620 +f 18492 18493 18622 +f 18492 18622 18621 +f 18493 18494 18622 +f 18494 18623 18622 +f 18494 18495 18624 +f 18494 18624 18623 +f 18495 18496 18624 +f 18496 18625 18624 +f 18496 18497 18626 +f 18496 18626 18625 +f 18497 18498 18626 +f 18498 18627 18626 +f 18498 18499 18628 +f 18498 18628 18627 +f 18499 18500 18628 +f 18500 18629 18628 +f 18500 18501 18630 +f 18500 18630 18629 +f 18501 18502 18630 +f 18502 18631 18630 +f 18502 18503 18632 +f 18502 18632 18631 +f 18503 18504 18632 +f 18504 18633 18632 +f 18504 18505 18634 +f 18504 18634 18633 +f 18505 18506 18634 +f 18506 18635 18634 +f 18506 18507 18636 +f 18506 18636 18635 +f 18507 18508 18636 +f 18508 18637 18636 +f 18508 18509 18638 +f 18508 18638 18637 +f 18509 18510 18638 +f 18510 18639 18638 +f 18510 18511 18640 +f 18510 18640 18639 +f 18511 18512 18640 +f 18512 18641 18640 +f 18512 18513 18642 +f 18512 18642 18641 +f 18513 18514 18642 +f 18514 18643 18642 +f 18514 18515 18644 +f 18514 18644 18643 +f 18515 18516 18644 +f 18516 18645 18644 +f 18516 18517 18646 +f 18516 18646 18645 +f 18517 18518 18646 +f 18518 18647 18646 +f 18518 18519 18648 +f 18518 18648 18647 +f 18519 18520 18648 +f 18520 18649 18648 +f 18520 18521 18650 +f 18520 18650 18649 +f 18521 18522 18650 +f 18522 18651 18650 +f 18522 18523 18652 +f 18522 18652 18651 +f 18523 18524 18652 +f 18524 18653 18652 +f 18524 18525 18654 +f 18524 18654 18653 +f 18525 18526 18654 +f 18526 18655 18654 +f 18526 18527 18656 +f 18526 18656 18655 +f 18527 18528 18656 +f 18528 18657 18656 +f 18529 18530 18658 +f 18530 18659 18658 +f 18530 18531 18660 +f 18530 18660 18659 +f 18531 18532 18660 +f 18532 18661 18660 +f 18532 18533 18662 +f 18532 18662 18661 +f 18533 18534 18662 +f 18534 18663 18662 +f 18534 18535 18664 +f 18534 18664 18663 +f 18535 18536 18664 +f 18536 18665 18664 +f 18536 18537 18666 +f 18536 18666 18665 +f 18537 18538 18666 +f 18538 18667 18666 +f 18538 18539 18668 +f 18538 18668 18667 +f 18539 18540 18668 +f 18540 18669 18668 +f 18540 18541 18670 +f 18540 18670 18669 +f 18541 18542 18670 +f 18542 18671 18670 +f 18542 18543 18672 +f 18542 18672 18671 +f 18543 18544 18672 +f 18544 18673 18672 +f 18544 18545 18674 +f 18544 18674 18673 +f 18545 18546 18674 +f 18546 18675 18674 +f 18546 18547 18676 +f 18546 18676 18675 +f 18547 18548 18676 +f 18548 18677 18676 +f 18548 18549 18678 +f 18548 18678 18677 +f 18549 18550 18678 +f 18550 18679 18678 +f 18550 18551 18680 +f 18550 18680 18679 +f 18551 18552 18680 +f 18552 18681 18680 +f 18552 18553 18682 +f 18552 18682 18681 +f 18553 18554 18682 +f 18554 18683 18682 +f 18554 18555 18684 +f 18554 18684 18683 +f 18555 18556 18684 +f 18556 18685 18684 +f 18556 18557 18686 +f 18556 18686 18685 +f 18557 18558 18686 +f 18558 18687 18686 +f 18558 18559 18688 +f 18558 18688 18687 +f 18559 18560 18688 +f 18560 18689 18688 +f 18560 18561 18690 +f 18560 18690 18689 +f 18561 18562 18690 +f 18562 18691 18690 +f 18562 18563 18692 +f 18562 18692 18691 +f 18563 18564 18692 +f 18564 18693 18692 +f 18564 18565 18694 +f 18564 18694 18693 +f 18565 18566 18694 +f 18566 18695 18694 +f 18566 18567 18696 +f 18566 18696 18695 +f 18567 18568 18696 +f 18568 18697 18696 +f 18568 18569 18698 +f 18568 18698 18697 +f 18569 18570 18698 +f 18570 18699 18698 +f 18570 18571 18700 +f 18570 18700 18699 +f 18571 18572 18700 +f 18572 18701 18700 +f 18572 18573 18702 +f 18572 18702 18701 +f 18573 18574 18702 +f 18574 18703 18702 +f 18574 18575 18704 +f 18574 18704 18703 +f 18575 18576 18704 +f 18576 18705 18704 +f 18576 18577 18706 +f 18576 18706 18705 +f 18577 18578 18706 +f 18578 18707 18706 +f 18578 18579 18708 +f 18578 18708 18707 +f 18579 18580 18708 +f 18580 18709 18708 +f 18580 18581 18710 +f 18580 18710 18709 +f 18581 18582 18710 +f 18582 18711 18710 +f 18582 18583 18712 +f 18582 18712 18711 +f 18583 18584 18712 +f 18584 18713 18712 +f 18584 18585 18714 +f 18584 18714 18713 +f 18585 18586 18714 +f 18586 18715 18714 +f 18586 18587 18716 +f 18586 18716 18715 +f 18587 18588 18716 +f 18588 18717 18716 +f 18588 18589 18718 +f 18588 18718 18717 +f 18589 18590 18718 +f 18590 18719 18718 +f 18590 18591 18720 +f 18590 18720 18719 +f 18591 18592 18720 +f 18592 18721 18720 +f 18592 18593 18722 +f 18592 18722 18721 +f 18593 18594 18722 +f 18594 18723 18722 +f 18594 18595 18724 +f 18594 18724 18723 +f 18595 18596 18724 +f 18596 18725 18724 +f 18596 18597 18726 +f 18596 18726 18725 +f 18597 18598 18726 +f 18598 18727 18726 +f 18598 18599 18728 +f 18598 18728 18727 +f 18599 18600 18728 +f 18600 18729 18728 +f 18600 18601 18730 +f 18600 18730 18729 +f 18601 18602 18730 +f 18602 18731 18730 +f 18602 18603 18732 +f 18602 18732 18731 +f 18603 18604 18732 +f 18604 18733 18732 +f 18604 18605 18734 +f 18604 18734 18733 +f 18605 18606 18734 +f 18606 18735 18734 +f 18606 18607 18736 +f 18606 18736 18735 +f 18607 18608 18736 +f 18608 18737 18736 +f 18608 18609 18738 +f 18608 18738 18737 +f 18609 18610 18738 +f 18610 18739 18738 +f 18610 18611 18740 +f 18610 18740 18739 +f 18611 18612 18740 +f 18612 18741 18740 +f 18612 18613 18742 +f 18612 18742 18741 +f 18613 18614 18742 +f 18614 18743 18742 +f 18614 18615 18744 +f 18614 18744 18743 +f 18615 18616 18744 +f 18616 18745 18744 +f 18616 18617 18746 +f 18616 18746 18745 +f 18617 18618 18746 +f 18618 18747 18746 +f 18618 18619 18748 +f 18618 18748 18747 +f 18619 18620 18748 +f 18620 18749 18748 +f 18620 18621 18750 +f 18620 18750 18749 +f 18621 18622 18750 +f 18622 18751 18750 +f 18622 18623 18752 +f 18622 18752 18751 +f 18623 18624 18752 +f 18624 18753 18752 +f 18624 18625 18754 +f 18624 18754 18753 +f 18625 18626 18754 +f 18626 18755 18754 +f 18626 18627 18756 +f 18626 18756 18755 +f 18627 18628 18756 +f 18628 18757 18756 +f 18628 18629 18758 +f 18628 18758 18757 +f 18629 18630 18758 +f 18630 18759 18758 +f 18630 18631 18760 +f 18630 18760 18759 +f 18631 18632 18760 +f 18632 18761 18760 +f 18632 18633 18762 +f 18632 18762 18761 +f 18633 18634 18762 +f 18634 18763 18762 +f 18634 18635 18764 +f 18634 18764 18763 +f 18635 18636 18764 +f 18636 18765 18764 +f 18636 18637 18766 +f 18636 18766 18765 +f 18637 18638 18766 +f 18638 18767 18766 +f 18638 18639 18768 +f 18638 18768 18767 +f 18639 18640 18768 +f 18640 18769 18768 +f 18640 18641 18770 +f 18640 18770 18769 +f 18641 18642 18770 +f 18642 18771 18770 +f 18642 18643 18772 +f 18642 18772 18771 +f 18643 18644 18772 +f 18644 18773 18772 +f 18644 18645 18774 +f 18644 18774 18773 +f 18645 18646 18774 +f 18646 18775 18774 +f 18646 18647 18776 +f 18646 18776 18775 +f 18647 18648 18776 +f 18648 18777 18776 +f 18648 18649 18778 +f 18648 18778 18777 +f 18649 18650 18778 +f 18650 18779 18778 +f 18650 18651 18780 +f 18650 18780 18779 +f 18651 18652 18780 +f 18652 18781 18780 +f 18652 18653 18782 +f 18652 18782 18781 +f 18653 18654 18782 +f 18654 18783 18782 +f 18654 18655 18784 +f 18654 18784 18783 +f 18655 18656 18784 +f 18656 18785 18784 +f 18656 18657 18786 +f 18656 18786 18785 +f 18658 18659 18788 +f 18658 18788 18787 +f 18659 18660 18788 +f 18660 18789 18788 +f 18660 18661 18790 +f 18660 18790 18789 +f 18661 18662 18790 +f 18662 18791 18790 +f 18662 18663 18792 +f 18662 18792 18791 +f 18663 18664 18792 +f 18664 18793 18792 +f 18664 18665 18794 +f 18664 18794 18793 +f 18665 18666 18794 +f 18666 18795 18794 +f 18666 18667 18796 +f 18666 18796 18795 +f 18667 18668 18796 +f 18668 18797 18796 +f 18668 18669 18798 +f 18668 18798 18797 +f 18669 18670 18798 +f 18670 18799 18798 +f 18670 18671 18800 +f 18670 18800 18799 +f 18671 18672 18800 +f 18672 18801 18800 +f 18672 18673 18802 +f 18672 18802 18801 +f 18673 18674 18802 +f 18674 18803 18802 +f 18674 18675 18804 +f 18674 18804 18803 +f 18675 18676 18804 +f 18676 18805 18804 +f 18676 18677 18806 +f 18676 18806 18805 +f 18677 18678 18806 +f 18678 18807 18806 +f 18678 18679 18808 +f 18678 18808 18807 +f 18679 18680 18808 +f 18680 18809 18808 +f 18680 18681 18810 +f 18680 18810 18809 +f 18681 18682 18810 +f 18682 18811 18810 +f 18682 18683 18812 +f 18682 18812 18811 +f 18683 18684 18812 +f 18684 18813 18812 +f 18684 18685 18814 +f 18684 18814 18813 +f 18685 18686 18814 +f 18686 18815 18814 +f 18686 18687 18816 +f 18686 18816 18815 +f 18687 18688 18816 +f 18688 18817 18816 +f 18688 18689 18818 +f 18688 18818 18817 +f 18689 18690 18818 +f 18690 18819 18818 +f 18690 18691 18820 +f 18690 18820 18819 +f 18691 18692 18820 +f 18692 18821 18820 +f 18692 18693 18822 +f 18692 18822 18821 +f 18693 18694 18822 +f 18694 18823 18822 +f 18694 18695 18824 +f 18694 18824 18823 +f 18695 18696 18824 +f 18696 18825 18824 +f 18696 18697 18826 +f 18696 18826 18825 +f 18697 18698 18826 +f 18698 18827 18826 +f 18698 18699 18828 +f 18698 18828 18827 +f 18699 18700 18828 +f 18700 18829 18828 +f 18700 18701 18830 +f 18700 18830 18829 +f 18701 18702 18830 +f 18702 18831 18830 +f 18702 18703 18832 +f 18702 18832 18831 +f 18703 18704 18832 +f 18704 18833 18832 +f 18704 18705 18834 +f 18704 18834 18833 +f 18705 18706 18834 +f 18706 18835 18834 +f 18706 18707 18836 +f 18706 18836 18835 +f 18707 18708 18836 +f 18708 18837 18836 +f 18708 18709 18838 +f 18708 18838 18837 +f 18709 18710 18838 +f 18710 18839 18838 +f 18710 18711 18840 +f 18710 18840 18839 +f 18711 18712 18840 +f 18712 18841 18840 +f 18712 18713 18842 +f 18712 18842 18841 +f 18713 18714 18842 +f 18714 18843 18842 +f 18714 18715 18844 +f 18714 18844 18843 +f 18715 18716 18844 +f 18716 18845 18844 +f 18716 18717 18846 +f 18716 18846 18845 +f 18717 18718 18846 +f 18718 18847 18846 +f 18718 18719 18848 +f 18718 18848 18847 +f 18719 18720 18848 +f 18720 18849 18848 +f 18720 18721 18850 +f 18720 18850 18849 +f 18721 18722 18850 +f 18722 18851 18850 +f 18722 18723 18852 +f 18722 18852 18851 +f 18723 18724 18852 +f 18724 18853 18852 +f 18724 18725 18854 +f 18724 18854 18853 +f 18725 18726 18854 +f 18726 18855 18854 +f 18726 18727 18856 +f 18726 18856 18855 +f 18727 18728 18856 +f 18728 18857 18856 +f 18728 18729 18858 +f 18728 18858 18857 +f 18729 18730 18858 +f 18730 18859 18858 +f 18730 18731 18860 +f 18730 18860 18859 +f 18731 18732 18860 +f 18732 18861 18860 +f 18732 18733 18862 +f 18732 18862 18861 +f 18733 18734 18862 +f 18734 18863 18862 +f 18734 18735 18864 +f 18734 18864 18863 +f 18735 18736 18864 +f 18736 18865 18864 +f 18736 18737 18866 +f 18736 18866 18865 +f 18737 18738 18866 +f 18738 18867 18866 +f 18738 18739 18868 +f 18738 18868 18867 +f 18739 18740 18868 +f 18740 18869 18868 +f 18740 18741 18870 +f 18740 18870 18869 +f 18741 18742 18870 +f 18742 18871 18870 +f 18742 18743 18872 +f 18742 18872 18871 +f 18743 18744 18872 +f 18744 18873 18872 +f 18744 18745 18874 +f 18744 18874 18873 +f 18745 18746 18874 +f 18746 18875 18874 +f 18746 18747 18876 +f 18746 18876 18875 +f 18747 18748 18876 +f 18748 18877 18876 +f 18748 18749 18878 +f 18748 18878 18877 +f 18749 18750 18878 +f 18750 18879 18878 +f 18750 18751 18880 +f 18750 18880 18879 +f 18751 18752 18880 +f 18752 18881 18880 +f 18752 18753 18882 +f 18752 18882 18881 +f 18753 18754 18882 +f 18754 18883 18882 +f 18754 18755 18884 +f 18754 18884 18883 +f 18755 18756 18884 +f 18756 18885 18884 +f 18756 18757 18886 +f 18756 18886 18885 +f 18757 18758 18886 +f 18758 18887 18886 +f 18758 18759 18888 +f 18758 18888 18887 +f 18759 18760 18888 +f 18760 18889 18888 +f 18760 18761 18890 +f 18760 18890 18889 +f 18761 18762 18890 +f 18762 18891 18890 +f 18762 18763 18892 +f 18762 18892 18891 +f 18763 18764 18892 +f 18764 18893 18892 +f 18764 18765 18894 +f 18764 18894 18893 +f 18765 18766 18894 +f 18766 18895 18894 +f 18766 18767 18896 +f 18766 18896 18895 +f 18767 18768 18896 +f 18768 18897 18896 +f 18768 18769 18898 +f 18768 18898 18897 +f 18769 18770 18898 +f 18770 18899 18898 +f 18770 18771 18900 +f 18770 18900 18899 +f 18771 18772 18900 +f 18772 18901 18900 +f 18772 18773 18902 +f 18772 18902 18901 +f 18773 18774 18902 +f 18774 18903 18902 +f 18774 18775 18904 +f 18774 18904 18903 +f 18775 18776 18904 +f 18776 18905 18904 +f 18776 18777 18906 +f 18776 18906 18905 +f 18777 18778 18906 +f 18778 18907 18906 +f 18778 18779 18908 +f 18778 18908 18907 +f 18779 18780 18908 +f 18780 18909 18908 +f 18780 18781 18910 +f 18780 18910 18909 +f 18781 18782 18910 +f 18782 18911 18910 +f 18782 18783 18912 +f 18782 18912 18911 +f 18783 18784 18912 +f 18784 18913 18912 +f 18784 18785 18914 +f 18784 18914 18913 +f 18785 18786 18914 +f 18786 18915 18914 +f 18787 18788 18916 +f 18788 18917 18916 +f 18788 18789 18918 +f 18788 18918 18917 +f 18789 18790 18918 +f 18790 18919 18918 +f 18790 18791 18920 +f 18790 18920 18919 +f 18791 18792 18920 +f 18792 18921 18920 +f 18792 18793 18922 +f 18792 18922 18921 +f 18793 18794 18922 +f 18794 18923 18922 +f 18794 18795 18924 +f 18794 18924 18923 +f 18795 18796 18924 +f 18796 18925 18924 +f 18796 18797 18926 +f 18796 18926 18925 +f 18797 18798 18926 +f 18798 18927 18926 +f 18798 18799 18928 +f 18798 18928 18927 +f 18799 18800 18928 +f 18800 18929 18928 +f 18800 18801 18930 +f 18800 18930 18929 +f 18801 18802 18930 +f 18802 18931 18930 +f 18802 18803 18932 +f 18802 18932 18931 +f 18803 18804 18932 +f 18804 18933 18932 +f 18804 18805 18934 +f 18804 18934 18933 +f 18805 18806 18934 +f 18806 18935 18934 +f 18806 18807 18936 +f 18806 18936 18935 +f 18807 18808 18936 +f 18808 18937 18936 +f 18808 18809 18938 +f 18808 18938 18937 +f 18809 18810 18938 +f 18810 18939 18938 +f 18810 18811 18940 +f 18810 18940 18939 +f 18811 18812 18940 +f 18812 18941 18940 +f 18812 18813 18942 +f 18812 18942 18941 +f 18813 18814 18942 +f 18814 18943 18942 +f 18814 18815 18944 +f 18814 18944 18943 +f 18815 18816 18944 +f 18816 18945 18944 +f 18816 18817 18946 +f 18816 18946 18945 +f 18817 18818 18946 +f 18818 18947 18946 +f 18818 18819 18948 +f 18818 18948 18947 +f 18819 18820 18948 +f 18820 18949 18948 +f 18820 18821 18950 +f 18820 18950 18949 +f 18821 18822 18950 +f 18822 18951 18950 +f 18822 18823 18952 +f 18822 18952 18951 +f 18823 18824 18952 +f 18824 18953 18952 +f 18824 18825 18954 +f 18824 18954 18953 +f 18825 18826 18954 +f 18826 18955 18954 +f 18826 18827 18956 +f 18826 18956 18955 +f 18827 18828 18956 +f 18828 18957 18956 +f 18828 18829 18958 +f 18828 18958 18957 +f 18829 18830 18958 +f 18830 18959 18958 +f 18830 18831 18960 +f 18830 18960 18959 +f 18831 18832 18960 +f 18832 18961 18960 +f 18832 18833 18962 +f 18832 18962 18961 +f 18833 18834 18962 +f 18834 18963 18962 +f 18834 18835 18964 +f 18834 18964 18963 +f 18835 18836 18964 +f 18836 18965 18964 +f 18836 18837 18966 +f 18836 18966 18965 +f 18837 18838 18966 +f 18838 18967 18966 +f 18838 18839 18968 +f 18838 18968 18967 +f 18839 18840 18968 +f 18840 18969 18968 +f 18840 18841 18970 +f 18840 18970 18969 +f 18841 18842 18970 +f 18842 18971 18970 +f 18842 18843 18972 +f 18842 18972 18971 +f 18843 18844 18972 +f 18844 18973 18972 +f 18844 18845 18974 +f 18844 18974 18973 +f 18845 18846 18974 +f 18846 18975 18974 +f 18846 18847 18976 +f 18846 18976 18975 +f 18847 18848 18976 +f 18848 18977 18976 +f 18848 18849 18978 +f 18848 18978 18977 +f 18849 18850 18978 +f 18850 18979 18978 +f 18850 18851 18980 +f 18850 18980 18979 +f 18851 18852 18980 +f 18852 18981 18980 +f 18852 18853 18982 +f 18852 18982 18981 +f 18853 18854 18982 +f 18854 18983 18982 +f 18854 18855 18984 +f 18854 18984 18983 +f 18855 18856 18984 +f 18856 18985 18984 +f 18856 18857 18986 +f 18856 18986 18985 +f 18857 18858 18986 +f 18858 18987 18986 +f 18858 18859 18988 +f 18858 18988 18987 +f 18859 18860 18988 +f 18860 18989 18988 +f 18860 18861 18990 +f 18860 18990 18989 +f 18861 18862 18990 +f 18862 18991 18990 +f 18862 18863 18992 +f 18862 18992 18991 +f 18863 18864 18992 +f 18864 18993 18992 +f 18864 18865 18994 +f 18864 18994 18993 +f 18865 18866 18994 +f 18866 18995 18994 +f 18866 18867 18996 +f 18866 18996 18995 +f 18867 18868 18996 +f 18868 18997 18996 +f 18868 18869 18998 +f 18868 18998 18997 +f 18869 18870 18998 +f 18870 18999 18998 +f 18870 18871 19000 +f 18870 19000 18999 +f 18871 18872 19000 +f 18872 19001 19000 +f 18872 18873 19002 +f 18872 19002 19001 +f 18873 18874 19002 +f 18874 19003 19002 +f 18874 18875 19004 +f 18874 19004 19003 +f 18875 18876 19004 +f 18876 19005 19004 +f 18876 18877 19006 +f 18876 19006 19005 +f 18877 18878 19006 +f 18878 19007 19006 +f 18878 18879 19008 +f 18878 19008 19007 +f 18879 18880 19008 +f 18880 19009 19008 +f 18880 18881 19010 +f 18880 19010 19009 +f 18881 18882 19010 +f 18882 19011 19010 +f 18882 18883 19012 +f 18882 19012 19011 +f 18883 18884 19012 +f 18884 19013 19012 +f 18884 18885 19014 +f 18884 19014 19013 +f 18885 18886 19014 +f 18886 19015 19014 +f 18886 18887 19016 +f 18886 19016 19015 +f 18887 18888 19016 +f 18888 19017 19016 +f 18888 18889 19018 +f 18888 19018 19017 +f 18889 18890 19018 +f 18890 19019 19018 +f 18890 18891 19020 +f 18890 19020 19019 +f 18891 18892 19020 +f 18892 19021 19020 +f 18892 18893 19022 +f 18892 19022 19021 +f 18893 18894 19022 +f 18894 19023 19022 +f 18894 18895 19024 +f 18894 19024 19023 +f 18895 18896 19024 +f 18896 19025 19024 +f 18896 18897 19026 +f 18896 19026 19025 +f 18897 18898 19026 +f 18898 19027 19026 +f 18898 18899 19028 +f 18898 19028 19027 +f 18899 18900 19028 +f 18900 19029 19028 +f 18900 18901 19030 +f 18900 19030 19029 +f 18901 18902 19030 +f 18902 19031 19030 +f 18902 18903 19032 +f 18902 19032 19031 +f 18903 18904 19032 +f 18904 19033 19032 +f 18904 18905 19034 +f 18904 19034 19033 +f 18905 18906 19034 +f 18906 19035 19034 +f 18906 18907 19036 +f 18906 19036 19035 +f 18907 18908 19036 +f 18908 19037 19036 +f 18908 18909 19038 +f 18908 19038 19037 +f 18909 18910 19038 +f 18910 19039 19038 +f 18910 18911 19040 +f 18910 19040 19039 +f 18911 18912 19040 +f 18912 19041 19040 +f 18912 18913 19042 +f 18912 19042 19041 +f 18913 18914 19042 +f 18914 19043 19042 +f 18914 18915 19044 +f 18914 19044 19043 +f 18916 18917 19046 +f 18916 19046 19045 +f 18917 18918 19046 +f 18918 19047 19046 +f 18918 18919 19048 +f 18918 19048 19047 +f 18919 18920 19048 +f 18920 19049 19048 +f 18920 18921 19050 +f 18920 19050 19049 +f 18921 18922 19050 +f 18922 19051 19050 +f 18922 18923 19052 +f 18922 19052 19051 +f 18923 18924 19052 +f 18924 19053 19052 +f 18924 18925 19054 +f 18924 19054 19053 +f 18925 18926 19054 +f 18926 19055 19054 +f 18926 18927 19056 +f 18926 19056 19055 +f 18927 18928 19056 +f 18928 19057 19056 +f 18928 18929 19058 +f 18928 19058 19057 +f 18929 18930 19058 +f 18930 19059 19058 +f 18930 18931 19060 +f 18930 19060 19059 +f 18931 18932 19060 +f 18932 19061 19060 +f 18932 18933 19062 +f 18932 19062 19061 +f 18933 18934 19062 +f 18934 19063 19062 +f 18934 18935 19064 +f 18934 19064 19063 +f 18935 18936 19064 +f 18936 19065 19064 +f 18936 18937 19066 +f 18936 19066 19065 +f 18937 18938 19066 +f 18938 19067 19066 +f 18938 18939 19068 +f 18938 19068 19067 +f 18939 18940 19068 +f 18940 19069 19068 +f 18940 18941 19070 +f 18940 19070 19069 +f 18941 18942 19070 +f 18942 19071 19070 +f 18942 18943 19072 +f 18942 19072 19071 +f 18943 18944 19072 +f 18944 19073 19072 +f 18944 18945 19074 +f 18944 19074 19073 +f 18945 18946 19074 +f 18946 19075 19074 +f 18946 18947 19076 +f 18946 19076 19075 +f 18947 18948 19076 +f 18948 19077 19076 +f 18948 18949 19078 +f 18948 19078 19077 +f 18949 18950 19078 +f 18950 19079 19078 +f 18950 18951 19080 +f 18950 19080 19079 +f 18951 18952 19080 +f 18952 19081 19080 +f 18952 18953 19082 +f 18952 19082 19081 +f 18953 18954 19082 +f 18954 19083 19082 +f 18954 18955 19084 +f 18954 19084 19083 +f 18955 18956 19084 +f 18956 19085 19084 +f 18956 18957 19086 +f 18956 19086 19085 +f 18957 18958 19086 +f 18958 19087 19086 +f 18958 18959 19088 +f 18958 19088 19087 +f 18959 18960 19088 +f 18960 19089 19088 +f 18960 18961 19090 +f 18960 19090 19089 +f 18961 18962 19090 +f 18962 19091 19090 +f 18962 18963 19092 +f 18962 19092 19091 +f 18963 18964 19092 +f 18964 19093 19092 +f 18964 18965 19094 +f 18964 19094 19093 +f 18965 18966 19094 +f 18966 19095 19094 +f 18966 18967 19096 +f 18966 19096 19095 +f 18967 18968 19096 +f 18968 19097 19096 +f 18968 18969 19098 +f 18968 19098 19097 +f 18969 18970 19098 +f 18970 19099 19098 +f 18970 18971 19100 +f 18970 19100 19099 +f 18971 18972 19100 +f 18972 19101 19100 +f 18972 18973 19102 +f 18972 19102 19101 +f 18973 18974 19102 +f 18974 19103 19102 +f 18974 18975 19104 +f 18974 19104 19103 +f 18975 18976 19104 +f 18976 19105 19104 +f 18976 18977 19106 +f 18976 19106 19105 +f 18977 18978 19106 +f 18978 19107 19106 +f 18978 18979 19108 +f 18978 19108 19107 +f 18979 18980 19108 +f 18980 19109 19108 +f 18980 18981 19110 +f 18980 19110 19109 +f 18981 18982 19110 +f 18982 19111 19110 +f 18982 18983 19112 +f 18982 19112 19111 +f 18983 18984 19112 +f 18984 19113 19112 +f 18984 18985 19114 +f 18984 19114 19113 +f 18985 18986 19114 +f 18986 19115 19114 +f 18986 18987 19116 +f 18986 19116 19115 +f 18987 18988 19116 +f 18988 19117 19116 +f 18988 18989 19118 +f 18988 19118 19117 +f 18989 18990 19118 +f 18990 19119 19118 +f 18990 18991 19120 +f 18990 19120 19119 +f 18991 18992 19120 +f 18992 19121 19120 +f 18992 18993 19122 +f 18992 19122 19121 +f 18993 18994 19122 +f 18994 19123 19122 +f 18994 18995 19124 +f 18994 19124 19123 +f 18995 18996 19124 +f 18996 19125 19124 +f 18996 18997 19126 +f 18996 19126 19125 +f 18997 18998 19126 +f 18998 19127 19126 +f 18998 18999 19128 +f 18998 19128 19127 +f 18999 19000 19128 +f 19000 19129 19128 +f 19000 19001 19130 +f 19000 19130 19129 +f 19001 19002 19130 +f 19002 19131 19130 +f 19002 19003 19132 +f 19002 19132 19131 +f 19003 19004 19132 +f 19004 19133 19132 +f 19004 19005 19134 +f 19004 19134 19133 +f 19005 19006 19134 +f 19006 19135 19134 +f 19006 19007 19136 +f 19006 19136 19135 +f 19007 19008 19136 +f 19008 19137 19136 +f 19008 19009 19138 +f 19008 19138 19137 +f 19009 19010 19138 +f 19010 19139 19138 +f 19010 19011 19140 +f 19010 19140 19139 +f 19011 19012 19140 +f 19012 19141 19140 +f 19012 19013 19142 +f 19012 19142 19141 +f 19013 19014 19142 +f 19014 19143 19142 +f 19014 19015 19144 +f 19014 19144 19143 +f 19015 19016 19144 +f 19016 19145 19144 +f 19016 19017 19146 +f 19016 19146 19145 +f 19017 19018 19146 +f 19018 19147 19146 +f 19018 19019 19148 +f 19018 19148 19147 +f 19019 19020 19148 +f 19020 19149 19148 +f 19020 19021 19150 +f 19020 19150 19149 +f 19021 19022 19150 +f 19022 19151 19150 +f 19022 19023 19152 +f 19022 19152 19151 +f 19023 19024 19152 +f 19024 19153 19152 +f 19024 19025 19154 +f 19024 19154 19153 +f 19025 19026 19154 +f 19026 19155 19154 +f 19026 19027 19156 +f 19026 19156 19155 +f 19027 19028 19156 +f 19028 19157 19156 +f 19028 19029 19158 +f 19028 19158 19157 +f 19029 19030 19158 +f 19030 19159 19158 +f 19030 19031 19160 +f 19030 19160 19159 +f 19031 19032 19160 +f 19032 19161 19160 +f 19032 19033 19162 +f 19032 19162 19161 +f 19033 19034 19162 +f 19034 19163 19162 +f 19034 19035 19164 +f 19034 19164 19163 +f 19035 19036 19164 +f 19036 19165 19164 +f 19036 19037 19166 +f 19036 19166 19165 +f 19037 19038 19166 +f 19038 19167 19166 +f 19038 19039 19168 +f 19038 19168 19167 +f 19039 19040 19168 +f 19040 19169 19168 +f 19040 19041 19170 +f 19040 19170 19169 +f 19041 19042 19170 +f 19042 19171 19170 +f 19042 19043 19172 +f 19042 19172 19171 +f 19043 19044 19172 +f 19044 19173 19172 +f 19045 19046 19174 +f 19046 19175 19174 +f 19046 19047 19176 +f 19046 19176 19175 +f 19047 19048 19176 +f 19048 19177 19176 +f 19048 19049 19178 +f 19048 19178 19177 +f 19049 19050 19178 +f 19050 19179 19178 +f 19050 19051 19180 +f 19050 19180 19179 +f 19051 19052 19180 +f 19052 19181 19180 +f 19052 19053 19182 +f 19052 19182 19181 +f 19053 19054 19182 +f 19054 19183 19182 +f 19054 19055 19184 +f 19054 19184 19183 +f 19055 19056 19184 +f 19056 19185 19184 +f 19056 19057 19186 +f 19056 19186 19185 +f 19057 19058 19186 +f 19058 19187 19186 +f 19058 19059 19188 +f 19058 19188 19187 +f 19059 19060 19188 +f 19060 19189 19188 +f 19060 19061 19190 +f 19060 19190 19189 +f 19061 19062 19190 +f 19062 19191 19190 +f 19062 19063 19192 +f 19062 19192 19191 +f 19063 19064 19192 +f 19064 19193 19192 +f 19064 19065 19194 +f 19064 19194 19193 +f 19065 19066 19194 +f 19066 19195 19194 +f 19066 19067 19196 +f 19066 19196 19195 +f 19067 19068 19196 +f 19068 19197 19196 +f 19068 19069 19198 +f 19068 19198 19197 +f 19069 19070 19198 +f 19070 19199 19198 +f 19070 19071 19200 +f 19070 19200 19199 +f 19071 19072 19200 +f 19072 19201 19200 +f 19072 19073 19202 +f 19072 19202 19201 +f 19073 19074 19202 +f 19074 19203 19202 +f 19074 19075 19204 +f 19074 19204 19203 +f 19075 19076 19204 +f 19076 19205 19204 +f 19076 19077 19206 +f 19076 19206 19205 +f 19077 19078 19206 +f 19078 19207 19206 +f 19078 19079 19208 +f 19078 19208 19207 +f 19079 19080 19208 +f 19080 19209 19208 +f 19080 19081 19210 +f 19080 19210 19209 +f 19081 19082 19210 +f 19082 19211 19210 +f 19082 19083 19212 +f 19082 19212 19211 +f 19083 19084 19212 +f 19084 19213 19212 +f 19084 19085 19214 +f 19084 19214 19213 +f 19085 19086 19214 +f 19086 19215 19214 +f 19086 19087 19216 +f 19086 19216 19215 +f 19087 19088 19216 +f 19088 19217 19216 +f 19088 19089 19218 +f 19088 19218 19217 +f 19089 19090 19218 +f 19090 19219 19218 +f 19090 19091 19220 +f 19090 19220 19219 +f 19091 19092 19220 +f 19092 19221 19220 +f 19092 19093 19222 +f 19092 19222 19221 +f 19093 19094 19222 +f 19094 19223 19222 +f 19094 19095 19224 +f 19094 19224 19223 +f 19095 19096 19224 +f 19096 19225 19224 +f 19096 19097 19226 +f 19096 19226 19225 +f 19097 19098 19226 +f 19098 19227 19226 +f 19098 19099 19228 +f 19098 19228 19227 +f 19099 19100 19228 +f 19100 19229 19228 +f 19100 19101 19230 +f 19100 19230 19229 +f 19101 19102 19230 +f 19102 19231 19230 +f 19102 19103 19232 +f 19102 19232 19231 +f 19103 19104 19232 +f 19104 19233 19232 +f 19104 19105 19234 +f 19104 19234 19233 +f 19105 19106 19234 +f 19106 19235 19234 +f 19106 19107 19236 +f 19106 19236 19235 +f 19107 19108 19236 +f 19108 19237 19236 +f 19108 19109 19238 +f 19108 19238 19237 +f 19109 19110 19238 +f 19110 19239 19238 +f 19110 19111 19240 +f 19110 19240 19239 +f 19111 19112 19240 +f 19112 19241 19240 +f 19112 19113 19242 +f 19112 19242 19241 +f 19113 19114 19242 +f 19114 19243 19242 +f 19114 19115 19244 +f 19114 19244 19243 +f 19115 19116 19244 +f 19116 19245 19244 +f 19116 19117 19246 +f 19116 19246 19245 +f 19117 19118 19246 +f 19118 19247 19246 +f 19118 19119 19248 +f 19118 19248 19247 +f 19119 19120 19248 +f 19120 19249 19248 +f 19120 19121 19250 +f 19120 19250 19249 +f 19121 19122 19250 +f 19122 19251 19250 +f 19122 19123 19252 +f 19122 19252 19251 +f 19123 19124 19252 +f 19124 19253 19252 +f 19124 19125 19254 +f 19124 19254 19253 +f 19125 19126 19254 +f 19126 19255 19254 +f 19126 19127 19256 +f 19126 19256 19255 +f 19127 19128 19256 +f 19128 19257 19256 +f 19128 19129 19258 +f 19128 19258 19257 +f 19129 19130 19258 +f 19130 19259 19258 +f 19130 19131 19260 +f 19130 19260 19259 +f 19131 19132 19260 +f 19132 19261 19260 +f 19132 19133 19262 +f 19132 19262 19261 +f 19133 19134 19262 +f 19134 19263 19262 +f 19134 19135 19264 +f 19134 19264 19263 +f 19135 19136 19264 +f 19136 19265 19264 +f 19136 19137 19266 +f 19136 19266 19265 +f 19137 19138 19266 +f 19138 19267 19266 +f 19138 19139 19268 +f 19138 19268 19267 +f 19139 19140 19268 +f 19140 19269 19268 +f 19140 19141 19270 +f 19140 19270 19269 +f 19141 19142 19270 +f 19142 19271 19270 +f 19142 19143 19272 +f 19142 19272 19271 +f 19143 19144 19272 +f 19144 19273 19272 +f 19144 19145 19274 +f 19144 19274 19273 +f 19145 19146 19274 +f 19146 19275 19274 +f 19146 19147 19276 +f 19146 19276 19275 +f 19147 19148 19276 +f 19148 19277 19276 +f 19148 19149 19278 +f 19148 19278 19277 +f 19149 19150 19278 +f 19150 19279 19278 +f 19150 19151 19280 +f 19150 19280 19279 +f 19151 19152 19280 +f 19152 19281 19280 +f 19152 19153 19282 +f 19152 19282 19281 +f 19153 19154 19282 +f 19154 19283 19282 +f 19154 19155 19284 +f 19154 19284 19283 +f 19155 19156 19284 +f 19156 19285 19284 +f 19156 19157 19286 +f 19156 19286 19285 +f 19157 19158 19286 +f 19158 19287 19286 +f 19158 19159 19288 +f 19158 19288 19287 +f 19159 19160 19288 +f 19160 19289 19288 +f 19160 19161 19290 +f 19160 19290 19289 +f 19161 19162 19290 +f 19162 19291 19290 +f 19162 19163 19292 +f 19162 19292 19291 +f 19163 19164 19292 +f 19164 19293 19292 +f 19164 19165 19294 +f 19164 19294 19293 +f 19165 19166 19294 +f 19166 19295 19294 +f 19166 19167 19296 +f 19166 19296 19295 +f 19167 19168 19296 +f 19168 19297 19296 +f 19168 19169 19298 +f 19168 19298 19297 +f 19169 19170 19298 +f 19170 19299 19298 +f 19170 19171 19300 +f 19170 19300 19299 +f 19171 19172 19300 +f 19172 19301 19300 +f 19172 19173 19302 +f 19172 19302 19301 +f 19174 19175 19304 +f 19174 19304 19303 +f 19175 19176 19304 +f 19176 19305 19304 +f 19176 19177 19306 +f 19176 19306 19305 +f 19177 19178 19306 +f 19178 19307 19306 +f 19178 19179 19308 +f 19178 19308 19307 +f 19179 19180 19308 +f 19180 19309 19308 +f 19180 19181 19310 +f 19180 19310 19309 +f 19181 19182 19310 +f 19182 19311 19310 +f 19182 19183 19312 +f 19182 19312 19311 +f 19183 19184 19312 +f 19184 19313 19312 +f 19184 19185 19314 +f 19184 19314 19313 +f 19185 19186 19314 +f 19186 19315 19314 +f 19186 19187 19316 +f 19186 19316 19315 +f 19187 19188 19316 +f 19188 19317 19316 +f 19188 19189 19318 +f 19188 19318 19317 +f 19189 19190 19318 +f 19190 19319 19318 +f 19190 19191 19320 +f 19190 19320 19319 +f 19191 19192 19320 +f 19192 19321 19320 +f 19192 19193 19322 +f 19192 19322 19321 +f 19193 19194 19322 +f 19194 19323 19322 +f 19194 19195 19324 +f 19194 19324 19323 +f 19195 19196 19324 +f 19196 19325 19324 +f 19196 19197 19326 +f 19196 19326 19325 +f 19197 19198 19326 +f 19198 19327 19326 +f 19198 19199 19328 +f 19198 19328 19327 +f 19199 19200 19328 +f 19200 19329 19328 +f 19200 19201 19330 +f 19200 19330 19329 +f 19201 19202 19330 +f 19202 19331 19330 +f 19202 19203 19332 +f 19202 19332 19331 +f 19203 19204 19332 +f 19204 19333 19332 +f 19204 19205 19334 +f 19204 19334 19333 +f 19205 19206 19334 +f 19206 19335 19334 +f 19206 19207 19336 +f 19206 19336 19335 +f 19207 19208 19336 +f 19208 19337 19336 +f 19208 19209 19338 +f 19208 19338 19337 +f 19209 19210 19338 +f 19210 19339 19338 +f 19210 19211 19340 +f 19210 19340 19339 +f 19211 19212 19340 +f 19212 19341 19340 +f 19212 19213 19342 +f 19212 19342 19341 +f 19213 19214 19342 +f 19214 19343 19342 +f 19214 19215 19344 +f 19214 19344 19343 +f 19215 19216 19344 +f 19216 19345 19344 +f 19216 19217 19346 +f 19216 19346 19345 +f 19217 19218 19346 +f 19218 19347 19346 +f 19218 19219 19348 +f 19218 19348 19347 +f 19219 19220 19348 +f 19220 19349 19348 +f 19220 19221 19350 +f 19220 19350 19349 +f 19221 19222 19350 +f 19222 19351 19350 +f 19222 19223 19352 +f 19222 19352 19351 +f 19223 19224 19352 +f 19224 19353 19352 +f 19224 19225 19354 +f 19224 19354 19353 +f 19225 19226 19354 +f 19226 19355 19354 +f 19226 19227 19356 +f 19226 19356 19355 +f 19227 19228 19356 +f 19228 19357 19356 +f 19228 19229 19358 +f 19228 19358 19357 +f 19229 19230 19358 +f 19230 19359 19358 +f 19230 19231 19360 +f 19230 19360 19359 +f 19231 19232 19360 +f 19232 19361 19360 +f 19232 19233 19362 +f 19232 19362 19361 +f 19233 19234 19362 +f 19234 19363 19362 +f 19234 19235 19364 +f 19234 19364 19363 +f 19235 19236 19364 +f 19236 19365 19364 +f 19236 19237 19366 +f 19236 19366 19365 +f 19237 19238 19366 +f 19238 19367 19366 +f 19238 19239 19368 +f 19238 19368 19367 +f 19239 19240 19368 +f 19240 19369 19368 +f 19240 19241 19370 +f 19240 19370 19369 +f 19241 19242 19370 +f 19242 19371 19370 +f 19242 19243 19372 +f 19242 19372 19371 +f 19243 19244 19372 +f 19244 19373 19372 +f 19244 19245 19374 +f 19244 19374 19373 +f 19245 19246 19374 +f 19246 19375 19374 +f 19246 19247 19376 +f 19246 19376 19375 +f 19247 19248 19376 +f 19248 19377 19376 +f 19248 19249 19378 +f 19248 19378 19377 +f 19249 19250 19378 +f 19250 19379 19378 +f 19250 19251 19380 +f 19250 19380 19379 +f 19251 19252 19380 +f 19252 19381 19380 +f 19252 19253 19382 +f 19252 19382 19381 +f 19253 19254 19382 +f 19254 19383 19382 +f 19254 19255 19384 +f 19254 19384 19383 +f 19255 19256 19384 +f 19256 19385 19384 +f 19256 19257 19386 +f 19256 19386 19385 +f 19257 19258 19386 +f 19258 19387 19386 +f 19258 19259 19388 +f 19258 19388 19387 +f 19259 19260 19388 +f 19260 19389 19388 +f 19260 19261 19390 +f 19260 19390 19389 +f 19261 19262 19390 +f 19262 19391 19390 +f 19262 19263 19392 +f 19262 19392 19391 +f 19263 19264 19392 +f 19264 19393 19392 +f 19264 19265 19394 +f 19264 19394 19393 +f 19265 19266 19394 +f 19266 19395 19394 +f 19266 19267 19396 +f 19266 19396 19395 +f 19267 19268 19396 +f 19268 19397 19396 +f 19268 19269 19398 +f 19268 19398 19397 +f 19269 19270 19398 +f 19270 19399 19398 +f 19270 19271 19400 +f 19270 19400 19399 +f 19271 19272 19400 +f 19272 19401 19400 +f 19272 19273 19402 +f 19272 19402 19401 +f 19273 19274 19402 +f 19274 19403 19402 +f 19274 19275 19404 +f 19274 19404 19403 +f 19275 19276 19404 +f 19276 19405 19404 +f 19276 19277 19406 +f 19276 19406 19405 +f 19277 19278 19406 +f 19278 19407 19406 +f 19278 19279 19408 +f 19278 19408 19407 +f 19279 19280 19408 +f 19280 19409 19408 +f 19280 19281 19410 +f 19280 19410 19409 +f 19281 19282 19410 +f 19282 19411 19410 +f 19282 19283 19412 +f 19282 19412 19411 +f 19283 19284 19412 +f 19284 19413 19412 +f 19284 19285 19414 +f 19284 19414 19413 +f 19285 19286 19414 +f 19286 19415 19414 +f 19286 19287 19416 +f 19286 19416 19415 +f 19287 19288 19416 +f 19288 19417 19416 +f 19288 19289 19418 +f 19288 19418 19417 +f 19289 19290 19418 +f 19290 19419 19418 +f 19290 19291 19420 +f 19290 19420 19419 +f 19291 19292 19420 +f 19292 19421 19420 +f 19292 19293 19422 +f 19292 19422 19421 +f 19293 19294 19422 +f 19294 19423 19422 +f 19294 19295 19424 +f 19294 19424 19423 +f 19295 19296 19424 +f 19296 19425 19424 +f 19296 19297 19426 +f 19296 19426 19425 +f 19297 19298 19426 +f 19298 19427 19426 +f 19298 19299 19428 +f 19298 19428 19427 +f 19299 19300 19428 +f 19300 19429 19428 +f 19300 19301 19430 +f 19300 19430 19429 +f 19301 19302 19430 +f 19302 19431 19430 +f 19303 19304 19432 +f 19304 19433 19432 +f 19304 19305 19434 +f 19304 19434 19433 +f 19305 19306 19434 +f 19306 19435 19434 +f 19306 19307 19436 +f 19306 19436 19435 +f 19307 19308 19436 +f 19308 19437 19436 +f 19308 19309 19438 +f 19308 19438 19437 +f 19309 19310 19438 +f 19310 19439 19438 +f 19310 19311 19440 +f 19310 19440 19439 +f 19311 19312 19440 +f 19312 19441 19440 +f 19312 19313 19442 +f 19312 19442 19441 +f 19313 19314 19442 +f 19314 19443 19442 +f 19314 19315 19444 +f 19314 19444 19443 +f 19315 19316 19444 +f 19316 19445 19444 +f 19316 19317 19446 +f 19316 19446 19445 +f 19317 19318 19446 +f 19318 19447 19446 +f 19318 19319 19448 +f 19318 19448 19447 +f 19319 19320 19448 +f 19320 19449 19448 +f 19320 19321 19450 +f 19320 19450 19449 +f 19321 19322 19450 +f 19322 19451 19450 +f 19322 19323 19452 +f 19322 19452 19451 +f 19323 19324 19452 +f 19324 19453 19452 +f 19324 19325 19454 +f 19324 19454 19453 +f 19325 19326 19454 +f 19326 19455 19454 +f 19326 19327 19456 +f 19326 19456 19455 +f 19327 19328 19456 +f 19328 19457 19456 +f 19328 19329 19458 +f 19328 19458 19457 +f 19329 19330 19458 +f 19330 19459 19458 +f 19330 19331 19460 +f 19330 19460 19459 +f 19331 19332 19460 +f 19332 19461 19460 +f 19332 19333 19462 +f 19332 19462 19461 +f 19333 19334 19462 +f 19334 19463 19462 +f 19334 19335 19464 +f 19334 19464 19463 +f 19335 19336 19464 +f 19336 19465 19464 +f 19336 19337 19466 +f 19336 19466 19465 +f 19337 19338 19466 +f 19338 19467 19466 +f 19338 19339 19468 +f 19338 19468 19467 +f 19339 19340 19468 +f 19340 19469 19468 +f 19340 19341 19470 +f 19340 19470 19469 +f 19341 19342 19470 +f 19342 19471 19470 +f 19342 19343 19472 +f 19342 19472 19471 +f 19343 19344 19472 +f 19344 19473 19472 +f 19344 19345 19474 +f 19344 19474 19473 +f 19345 19346 19474 +f 19346 19475 19474 +f 19346 19347 19476 +f 19346 19476 19475 +f 19347 19348 19476 +f 19348 19477 19476 +f 19348 19349 19478 +f 19348 19478 19477 +f 19349 19350 19478 +f 19350 19479 19478 +f 19350 19351 19480 +f 19350 19480 19479 +f 19351 19352 19480 +f 19352 19481 19480 +f 19352 19353 19482 +f 19352 19482 19481 +f 19353 19354 19482 +f 19354 19483 19482 +f 19354 19355 19484 +f 19354 19484 19483 +f 19355 19356 19484 +f 19356 19485 19484 +f 19356 19357 19486 +f 19356 19486 19485 +f 19357 19358 19486 +f 19358 19487 19486 +f 19358 19359 19488 +f 19358 19488 19487 +f 19359 19360 19488 +f 19360 19489 19488 +f 19360 19361 19490 +f 19360 19490 19489 +f 19361 19362 19490 +f 19362 19491 19490 +f 19362 19363 19492 +f 19362 19492 19491 +f 19363 19364 19492 +f 19364 19493 19492 +f 19364 19365 19494 +f 19364 19494 19493 +f 19365 19366 19494 +f 19366 19495 19494 +f 19366 19367 19496 +f 19366 19496 19495 +f 19367 19368 19496 +f 19368 19497 19496 +f 19368 19369 19498 +f 19368 19498 19497 +f 19369 19370 19498 +f 19370 19499 19498 +f 19370 19371 19500 +f 19370 19500 19499 +f 19371 19372 19500 +f 19372 19501 19500 +f 19372 19373 19502 +f 19372 19502 19501 +f 19373 19374 19502 +f 19374 19503 19502 +f 19374 19375 19504 +f 19374 19504 19503 +f 19375 19376 19504 +f 19376 19505 19504 +f 19376 19377 19506 +f 19376 19506 19505 +f 19377 19378 19506 +f 19378 19507 19506 +f 19378 19379 19508 +f 19378 19508 19507 +f 19379 19380 19508 +f 19380 19509 19508 +f 19380 19381 19510 +f 19380 19510 19509 +f 19381 19382 19510 +f 19382 19511 19510 +f 19382 19383 19512 +f 19382 19512 19511 +f 19383 19384 19512 +f 19384 19513 19512 +f 19384 19385 19514 +f 19384 19514 19513 +f 19385 19386 19514 +f 19386 19515 19514 +f 19386 19387 19516 +f 19386 19516 19515 +f 19387 19388 19516 +f 19388 19517 19516 +f 19388 19389 19518 +f 19388 19518 19517 +f 19389 19390 19518 +f 19390 19519 19518 +f 19390 19391 19520 +f 19390 19520 19519 +f 19391 19392 19520 +f 19392 19521 19520 +f 19392 19393 19522 +f 19392 19522 19521 +f 19393 19394 19522 +f 19394 19523 19522 +f 19394 19395 19524 +f 19394 19524 19523 +f 19395 19396 19524 +f 19396 19525 19524 +f 19396 19397 19526 +f 19396 19526 19525 +f 19397 19398 19526 +f 19398 19527 19526 +f 19398 19399 19528 +f 19398 19528 19527 +f 19399 19400 19528 +f 19400 19529 19528 +f 19400 19401 19530 +f 19400 19530 19529 +f 19401 19402 19530 +f 19402 19531 19530 +f 19402 19403 19532 +f 19402 19532 19531 +f 19403 19404 19532 +f 19404 19533 19532 +f 19404 19405 19534 +f 19404 19534 19533 +f 19405 19406 19534 +f 19406 19535 19534 +f 19406 19407 19536 +f 19406 19536 19535 +f 19407 19408 19536 +f 19408 19537 19536 +f 19408 19409 19538 +f 19408 19538 19537 +f 19409 19410 19538 +f 19410 19539 19538 +f 19410 19411 19540 +f 19410 19540 19539 +f 19411 19412 19540 +f 19412 19541 19540 +f 19412 19413 19542 +f 19412 19542 19541 +f 19413 19414 19542 +f 19414 19543 19542 +f 19414 19415 19544 +f 19414 19544 19543 +f 19415 19416 19544 +f 19416 19545 19544 +f 19416 19417 19546 +f 19416 19546 19545 +f 19417 19418 19546 +f 19418 19547 19546 +f 19418 19419 19548 +f 19418 19548 19547 +f 19419 19420 19548 +f 19420 19549 19548 +f 19420 19421 19550 +f 19420 19550 19549 +f 19421 19422 19550 +f 19422 19551 19550 +f 19422 19423 19552 +f 19422 19552 19551 +f 19423 19424 19552 +f 19424 19553 19552 +f 19424 19425 19554 +f 19424 19554 19553 +f 19425 19426 19554 +f 19426 19555 19554 +f 19426 19427 19556 +f 19426 19556 19555 +f 19427 19428 19556 +f 19428 19557 19556 +f 19428 19429 19558 +f 19428 19558 19557 +f 19429 19430 19558 +f 19430 19559 19558 +f 19430 19431 19560 +f 19430 19560 19559 +f 19432 19433 19562 +f 19432 19562 19561 +f 19433 19434 19562 +f 19434 19563 19562 +f 19434 19435 19564 +f 19434 19564 19563 +f 19435 19436 19564 +f 19436 19565 19564 +f 19436 19437 19566 +f 19436 19566 19565 +f 19437 19438 19566 +f 19438 19567 19566 +f 19438 19439 19568 +f 19438 19568 19567 +f 19439 19440 19568 +f 19440 19569 19568 +f 19440 19441 19570 +f 19440 19570 19569 +f 19441 19442 19570 +f 19442 19571 19570 +f 19442 19443 19572 +f 19442 19572 19571 +f 19443 19444 19572 +f 19444 19573 19572 +f 19444 19445 19574 +f 19444 19574 19573 +f 19445 19446 19574 +f 19446 19575 19574 +f 19446 19447 19576 +f 19446 19576 19575 +f 19447 19448 19576 +f 19448 19577 19576 +f 19448 19449 19578 +f 19448 19578 19577 +f 19449 19450 19578 +f 19450 19579 19578 +f 19450 19451 19580 +f 19450 19580 19579 +f 19451 19452 19580 +f 19452 19581 19580 +f 19452 19453 19582 +f 19452 19582 19581 +f 19453 19454 19582 +f 19454 19583 19582 +f 19454 19455 19584 +f 19454 19584 19583 +f 19455 19456 19584 +f 19456 19585 19584 +f 19456 19457 19586 +f 19456 19586 19585 +f 19457 19458 19586 +f 19458 19587 19586 +f 19458 19459 19588 +f 19458 19588 19587 +f 19459 19460 19588 +f 19460 19589 19588 +f 19460 19461 19590 +f 19460 19590 19589 +f 19461 19462 19590 +f 19462 19591 19590 +f 19462 19463 19592 +f 19462 19592 19591 +f 19463 19464 19592 +f 19464 19593 19592 +f 19464 19465 19594 +f 19464 19594 19593 +f 19465 19466 19594 +f 19466 19595 19594 +f 19466 19467 19596 +f 19466 19596 19595 +f 19467 19468 19596 +f 19468 19597 19596 +f 19468 19469 19598 +f 19468 19598 19597 +f 19469 19470 19598 +f 19470 19599 19598 +f 19470 19471 19600 +f 19470 19600 19599 +f 19471 19472 19600 +f 19472 19601 19600 +f 19472 19473 19602 +f 19472 19602 19601 +f 19473 19474 19602 +f 19474 19603 19602 +f 19474 19475 19604 +f 19474 19604 19603 +f 19475 19476 19604 +f 19476 19605 19604 +f 19476 19477 19606 +f 19476 19606 19605 +f 19477 19478 19606 +f 19478 19607 19606 +f 19478 19479 19608 +f 19478 19608 19607 +f 19479 19480 19608 +f 19480 19609 19608 +f 19480 19481 19610 +f 19480 19610 19609 +f 19481 19482 19610 +f 19482 19611 19610 +f 19482 19483 19612 +f 19482 19612 19611 +f 19483 19484 19612 +f 19484 19613 19612 +f 19484 19485 19614 +f 19484 19614 19613 +f 19485 19486 19614 +f 19486 19615 19614 +f 19486 19487 19616 +f 19486 19616 19615 +f 19487 19488 19616 +f 19488 19617 19616 +f 19488 19489 19618 +f 19488 19618 19617 +f 19489 19490 19618 +f 19490 19619 19618 +f 19490 19491 19620 +f 19490 19620 19619 +f 19491 19492 19620 +f 19492 19621 19620 +f 19492 19493 19622 +f 19492 19622 19621 +f 19493 19494 19622 +f 19494 19623 19622 +f 19494 19495 19624 +f 19494 19624 19623 +f 19495 19496 19624 +f 19496 19625 19624 +f 19496 19497 19626 +f 19496 19626 19625 +f 19497 19498 19626 +f 19498 19627 19626 +f 19498 19499 19628 +f 19498 19628 19627 +f 19499 19500 19628 +f 19500 19629 19628 +f 19500 19501 19630 +f 19500 19630 19629 +f 19501 19502 19630 +f 19502 19631 19630 +f 19502 19503 19632 +f 19502 19632 19631 +f 19503 19504 19632 +f 19504 19633 19632 +f 19504 19505 19634 +f 19504 19634 19633 +f 19505 19506 19634 +f 19506 19635 19634 +f 19506 19507 19636 +f 19506 19636 19635 +f 19507 19508 19636 +f 19508 19637 19636 +f 19508 19509 19638 +f 19508 19638 19637 +f 19509 19510 19638 +f 19510 19639 19638 +f 19510 19511 19640 +f 19510 19640 19639 +f 19511 19512 19640 +f 19512 19641 19640 +f 19512 19513 19642 +f 19512 19642 19641 +f 19513 19514 19642 +f 19514 19643 19642 +f 19514 19515 19644 +f 19514 19644 19643 +f 19515 19516 19644 +f 19516 19645 19644 +f 19516 19517 19646 +f 19516 19646 19645 +f 19517 19518 19646 +f 19518 19647 19646 +f 19518 19519 19648 +f 19518 19648 19647 +f 19519 19520 19648 +f 19520 19649 19648 +f 19520 19521 19650 +f 19520 19650 19649 +f 19521 19522 19650 +f 19522 19651 19650 +f 19522 19523 19652 +f 19522 19652 19651 +f 19523 19524 19652 +f 19524 19653 19652 +f 19524 19525 19654 +f 19524 19654 19653 +f 19525 19526 19654 +f 19526 19655 19654 +f 19526 19527 19656 +f 19526 19656 19655 +f 19527 19528 19656 +f 19528 19657 19656 +f 19528 19529 19658 +f 19528 19658 19657 +f 19529 19530 19658 +f 19530 19659 19658 +f 19530 19531 19660 +f 19530 19660 19659 +f 19531 19532 19660 +f 19532 19661 19660 +f 19532 19533 19662 +f 19532 19662 19661 +f 19533 19534 19662 +f 19534 19663 19662 +f 19534 19535 19664 +f 19534 19664 19663 +f 19535 19536 19664 +f 19536 19665 19664 +f 19536 19537 19666 +f 19536 19666 19665 +f 19537 19538 19666 +f 19538 19667 19666 +f 19538 19539 19668 +f 19538 19668 19667 +f 19539 19540 19668 +f 19540 19669 19668 +f 19540 19541 19670 +f 19540 19670 19669 +f 19541 19542 19670 +f 19542 19671 19670 +f 19542 19543 19672 +f 19542 19672 19671 +f 19543 19544 19672 +f 19544 19673 19672 +f 19544 19545 19674 +f 19544 19674 19673 +f 19545 19546 19674 +f 19546 19675 19674 +f 19546 19547 19676 +f 19546 19676 19675 +f 19547 19548 19676 +f 19548 19677 19676 +f 19548 19549 19678 +f 19548 19678 19677 +f 19549 19550 19678 +f 19550 19679 19678 +f 19550 19551 19680 +f 19550 19680 19679 +f 19551 19552 19680 +f 19552 19681 19680 +f 19552 19553 19682 +f 19552 19682 19681 +f 19553 19554 19682 +f 19554 19683 19682 +f 19554 19555 19684 +f 19554 19684 19683 +f 19555 19556 19684 +f 19556 19685 19684 +f 19556 19557 19686 +f 19556 19686 19685 +f 19557 19558 19686 +f 19558 19687 19686 +f 19558 19559 19688 +f 19558 19688 19687 +f 19559 19560 19688 +f 19560 19689 19688 +f 19561 19562 19690 +f 19562 19691 19690 +f 19562 19563 19692 +f 19562 19692 19691 +f 19563 19564 19692 +f 19564 19693 19692 +f 19564 19565 19694 +f 19564 19694 19693 +f 19565 19566 19694 +f 19566 19695 19694 +f 19566 19567 19696 +f 19566 19696 19695 +f 19567 19568 19696 +f 19568 19697 19696 +f 19568 19569 19698 +f 19568 19698 19697 +f 19569 19570 19698 +f 19570 19699 19698 +f 19570 19571 19700 +f 19570 19700 19699 +f 19571 19572 19700 +f 19572 19701 19700 +f 19572 19573 19702 +f 19572 19702 19701 +f 19573 19574 19702 +f 19574 19703 19702 +f 19574 19575 19704 +f 19574 19704 19703 +f 19575 19576 19704 +f 19576 19705 19704 +f 19576 19577 19706 +f 19576 19706 19705 +f 19577 19578 19706 +f 19578 19707 19706 +f 19578 19579 19708 +f 19578 19708 19707 +f 19579 19580 19708 +f 19580 19709 19708 +f 19580 19581 19710 +f 19580 19710 19709 +f 19581 19582 19710 +f 19582 19711 19710 +f 19582 19583 19712 +f 19582 19712 19711 +f 19583 19584 19712 +f 19584 19713 19712 +f 19584 19585 19714 +f 19584 19714 19713 +f 19585 19586 19714 +f 19586 19715 19714 +f 19586 19587 19716 +f 19586 19716 19715 +f 19587 19588 19716 +f 19588 19717 19716 +f 19588 19589 19718 +f 19588 19718 19717 +f 19589 19590 19718 +f 19590 19719 19718 +f 19590 19591 19720 +f 19590 19720 19719 +f 19591 19592 19720 +f 19592 19721 19720 +f 19592 19593 19722 +f 19592 19722 19721 +f 19593 19594 19722 +f 19594 19723 19722 +f 19594 19595 19724 +f 19594 19724 19723 +f 19595 19596 19724 +f 19596 19725 19724 +f 19596 19597 19726 +f 19596 19726 19725 +f 19597 19598 19726 +f 19598 19727 19726 +f 19598 19599 19728 +f 19598 19728 19727 +f 19599 19600 19728 +f 19600 19729 19728 +f 19600 19601 19730 +f 19600 19730 19729 +f 19601 19602 19730 +f 19602 19731 19730 +f 19602 19603 19732 +f 19602 19732 19731 +f 19603 19604 19732 +f 19604 19733 19732 +f 19604 19605 19734 +f 19604 19734 19733 +f 19605 19606 19734 +f 19606 19735 19734 +f 19606 19607 19736 +f 19606 19736 19735 +f 19607 19608 19736 +f 19608 19737 19736 +f 19608 19609 19738 +f 19608 19738 19737 +f 19609 19610 19738 +f 19610 19739 19738 +f 19610 19611 19740 +f 19610 19740 19739 +f 19611 19612 19740 +f 19612 19741 19740 +f 19612 19613 19742 +f 19612 19742 19741 +f 19613 19614 19742 +f 19614 19743 19742 +f 19614 19615 19744 +f 19614 19744 19743 +f 19615 19616 19744 +f 19616 19745 19744 +f 19616 19617 19746 +f 19616 19746 19745 +f 19617 19618 19746 +f 19618 19747 19746 +f 19618 19619 19748 +f 19618 19748 19747 +f 19619 19620 19748 +f 19620 19749 19748 +f 19620 19621 19750 +f 19620 19750 19749 +f 19621 19622 19750 +f 19622 19751 19750 +f 19622 19623 19752 +f 19622 19752 19751 +f 19623 19624 19752 +f 19624 19753 19752 +f 19624 19625 19754 +f 19624 19754 19753 +f 19625 19626 19754 +f 19626 19755 19754 +f 19626 19627 19756 +f 19626 19756 19755 +f 19627 19628 19756 +f 19628 19757 19756 +f 19628 19629 19758 +f 19628 19758 19757 +f 19629 19630 19758 +f 19630 19759 19758 +f 19630 19631 19760 +f 19630 19760 19759 +f 19631 19632 19760 +f 19632 19761 19760 +f 19632 19633 19762 +f 19632 19762 19761 +f 19633 19634 19762 +f 19634 19763 19762 +f 19634 19635 19764 +f 19634 19764 19763 +f 19635 19636 19764 +f 19636 19765 19764 +f 19636 19637 19766 +f 19636 19766 19765 +f 19637 19638 19766 +f 19638 19767 19766 +f 19638 19639 19768 +f 19638 19768 19767 +f 19639 19640 19768 +f 19640 19769 19768 +f 19640 19641 19770 +f 19640 19770 19769 +f 19641 19642 19770 +f 19642 19771 19770 +f 19642 19643 19772 +f 19642 19772 19771 +f 19643 19644 19772 +f 19644 19773 19772 +f 19644 19645 19774 +f 19644 19774 19773 +f 19645 19646 19774 +f 19646 19775 19774 +f 19646 19647 19776 +f 19646 19776 19775 +f 19647 19648 19776 +f 19648 19777 19776 +f 19648 19649 19778 +f 19648 19778 19777 +f 19649 19650 19778 +f 19650 19779 19778 +f 19650 19651 19780 +f 19650 19780 19779 +f 19651 19652 19780 +f 19652 19781 19780 +f 19652 19653 19782 +f 19652 19782 19781 +f 19653 19654 19782 +f 19654 19783 19782 +f 19654 19655 19784 +f 19654 19784 19783 +f 19655 19656 19784 +f 19656 19785 19784 +f 19656 19657 19786 +f 19656 19786 19785 +f 19657 19658 19786 +f 19658 19787 19786 +f 19658 19659 19788 +f 19658 19788 19787 +f 19659 19660 19788 +f 19660 19789 19788 +f 19660 19661 19790 +f 19660 19790 19789 +f 19661 19662 19790 +f 19662 19791 19790 +f 19662 19663 19792 +f 19662 19792 19791 +f 19663 19664 19792 +f 19664 19793 19792 +f 19664 19665 19794 +f 19664 19794 19793 +f 19665 19666 19794 +f 19666 19795 19794 +f 19666 19667 19796 +f 19666 19796 19795 +f 19667 19668 19796 +f 19668 19797 19796 +f 19668 19669 19798 +f 19668 19798 19797 +f 19669 19670 19798 +f 19670 19799 19798 +f 19670 19671 19800 +f 19670 19800 19799 +f 19671 19672 19800 +f 19672 19801 19800 +f 19672 19673 19802 +f 19672 19802 19801 +f 19673 19674 19802 +f 19674 19803 19802 +f 19674 19675 19804 +f 19674 19804 19803 +f 19675 19676 19804 +f 19676 19805 19804 +f 19676 19677 19806 +f 19676 19806 19805 +f 19677 19678 19806 +f 19678 19807 19806 +f 19678 19679 19808 +f 19678 19808 19807 +f 19679 19680 19808 +f 19680 19809 19808 +f 19680 19681 19810 +f 19680 19810 19809 +f 19681 19682 19810 +f 19682 19811 19810 +f 19682 19683 19812 +f 19682 19812 19811 +f 19683 19684 19812 +f 19684 19813 19812 +f 19684 19685 19814 +f 19684 19814 19813 +f 19685 19686 19814 +f 19686 19815 19814 +f 19686 19687 19816 +f 19686 19816 19815 +f 19687 19688 19816 +f 19688 19817 19816 +f 19688 19689 19818 +f 19688 19818 19817 +f 19690 19691 19820 +f 19690 19820 19819 +f 19691 19692 19820 +f 19692 19821 19820 +f 19692 19693 19822 +f 19692 19822 19821 +f 19693 19694 19822 +f 19694 19823 19822 +f 19694 19695 19824 +f 19694 19824 19823 +f 19695 19696 19824 +f 19696 19825 19824 +f 19696 19697 19826 +f 19696 19826 19825 +f 19697 19698 19826 +f 19698 19827 19826 +f 19698 19699 19828 +f 19698 19828 19827 +f 19699 19700 19828 +f 19700 19829 19828 +f 19700 19701 19830 +f 19700 19830 19829 +f 19701 19702 19830 +f 19702 19831 19830 +f 19702 19703 19832 +f 19702 19832 19831 +f 19703 19704 19832 +f 19704 19833 19832 +f 19704 19705 19834 +f 19704 19834 19833 +f 19705 19706 19834 +f 19706 19835 19834 +f 19706 19707 19836 +f 19706 19836 19835 +f 19707 19708 19836 +f 19708 19837 19836 +f 19708 19709 19838 +f 19708 19838 19837 +f 19709 19710 19838 +f 19710 19839 19838 +f 19710 19711 19840 +f 19710 19840 19839 +f 19711 19712 19840 +f 19712 19841 19840 +f 19712 19713 19842 +f 19712 19842 19841 +f 19713 19714 19842 +f 19714 19843 19842 +f 19714 19715 19844 +f 19714 19844 19843 +f 19715 19716 19844 +f 19716 19845 19844 +f 19716 19717 19846 +f 19716 19846 19845 +f 19717 19718 19846 +f 19718 19847 19846 +f 19718 19719 19848 +f 19718 19848 19847 +f 19719 19720 19848 +f 19720 19849 19848 +f 19720 19721 19850 +f 19720 19850 19849 +f 19721 19722 19850 +f 19722 19851 19850 +f 19722 19723 19852 +f 19722 19852 19851 +f 19723 19724 19852 +f 19724 19853 19852 +f 19724 19725 19854 +f 19724 19854 19853 +f 19725 19726 19854 +f 19726 19855 19854 +f 19726 19727 19856 +f 19726 19856 19855 +f 19727 19728 19856 +f 19728 19857 19856 +f 19728 19729 19858 +f 19728 19858 19857 +f 19729 19730 19858 +f 19730 19859 19858 +f 19730 19731 19860 +f 19730 19860 19859 +f 19731 19732 19860 +f 19732 19861 19860 +f 19732 19733 19862 +f 19732 19862 19861 +f 19733 19734 19862 +f 19734 19863 19862 +f 19734 19735 19864 +f 19734 19864 19863 +f 19735 19736 19864 +f 19736 19865 19864 +f 19736 19737 19866 +f 19736 19866 19865 +f 19737 19738 19866 +f 19738 19867 19866 +f 19738 19739 19868 +f 19738 19868 19867 +f 19739 19740 19868 +f 19740 19869 19868 +f 19740 19741 19870 +f 19740 19870 19869 +f 19741 19742 19870 +f 19742 19871 19870 +f 19742 19743 19872 +f 19742 19872 19871 +f 19743 19744 19872 +f 19744 19873 19872 +f 19744 19745 19874 +f 19744 19874 19873 +f 19745 19746 19874 +f 19746 19875 19874 +f 19746 19747 19876 +f 19746 19876 19875 +f 19747 19748 19876 +f 19748 19877 19876 +f 19748 19749 19878 +f 19748 19878 19877 +f 19749 19750 19878 +f 19750 19879 19878 +f 19750 19751 19880 +f 19750 19880 19879 +f 19751 19752 19880 +f 19752 19881 19880 +f 19752 19753 19882 +f 19752 19882 19881 +f 19753 19754 19882 +f 19754 19883 19882 +f 19754 19755 19884 +f 19754 19884 19883 +f 19755 19756 19884 +f 19756 19885 19884 +f 19756 19757 19886 +f 19756 19886 19885 +f 19757 19758 19886 +f 19758 19887 19886 +f 19758 19759 19888 +f 19758 19888 19887 +f 19759 19760 19888 +f 19760 19889 19888 +f 19760 19761 19890 +f 19760 19890 19889 +f 19761 19762 19890 +f 19762 19891 19890 +f 19762 19763 19892 +f 19762 19892 19891 +f 19763 19764 19892 +f 19764 19893 19892 +f 19764 19765 19894 +f 19764 19894 19893 +f 19765 19766 19894 +f 19766 19895 19894 +f 19766 19767 19896 +f 19766 19896 19895 +f 19767 19768 19896 +f 19768 19897 19896 +f 19768 19769 19898 +f 19768 19898 19897 +f 19769 19770 19898 +f 19770 19899 19898 +f 19770 19771 19900 +f 19770 19900 19899 +f 19771 19772 19900 +f 19772 19901 19900 +f 19772 19773 19902 +f 19772 19902 19901 +f 19773 19774 19902 +f 19774 19903 19902 +f 19774 19775 19904 +f 19774 19904 19903 +f 19775 19776 19904 +f 19776 19905 19904 +f 19776 19777 19906 +f 19776 19906 19905 +f 19777 19778 19906 +f 19778 19907 19906 +f 19778 19779 19908 +f 19778 19908 19907 +f 19779 19780 19908 +f 19780 19909 19908 +f 19780 19781 19910 +f 19780 19910 19909 +f 19781 19782 19910 +f 19782 19911 19910 +f 19782 19783 19912 +f 19782 19912 19911 +f 19783 19784 19912 +f 19784 19913 19912 +f 19784 19785 19914 +f 19784 19914 19913 +f 19785 19786 19914 +f 19786 19915 19914 +f 19786 19787 19916 +f 19786 19916 19915 +f 19787 19788 19916 +f 19788 19917 19916 +f 19788 19789 19918 +f 19788 19918 19917 +f 19789 19790 19918 +f 19790 19919 19918 +f 19790 19791 19920 +f 19790 19920 19919 +f 19791 19792 19920 +f 19792 19921 19920 +f 19792 19793 19922 +f 19792 19922 19921 +f 19793 19794 19922 +f 19794 19923 19922 +f 19794 19795 19924 +f 19794 19924 19923 +f 19795 19796 19924 +f 19796 19925 19924 +f 19796 19797 19926 +f 19796 19926 19925 +f 19797 19798 19926 +f 19798 19927 19926 +f 19798 19799 19928 +f 19798 19928 19927 +f 19799 19800 19928 +f 19800 19929 19928 +f 19800 19801 19930 +f 19800 19930 19929 +f 19801 19802 19930 +f 19802 19931 19930 +f 19802 19803 19932 +f 19802 19932 19931 +f 19803 19804 19932 +f 19804 19933 19932 +f 19804 19805 19934 +f 19804 19934 19933 +f 19805 19806 19934 +f 19806 19935 19934 +f 19806 19807 19936 +f 19806 19936 19935 +f 19807 19808 19936 +f 19808 19937 19936 +f 19808 19809 19938 +f 19808 19938 19937 +f 19809 19810 19938 +f 19810 19939 19938 +f 19810 19811 19940 +f 19810 19940 19939 +f 19811 19812 19940 +f 19812 19941 19940 +f 19812 19813 19942 +f 19812 19942 19941 +f 19813 19814 19942 +f 19814 19943 19942 +f 19814 19815 19944 +f 19814 19944 19943 +f 19815 19816 19944 +f 19816 19945 19944 +f 19816 19817 19946 +f 19816 19946 19945 +f 19817 19818 19946 +f 19818 19947 19946 +f 19819 19820 19948 +f 19820 19949 19948 +f 19820 19821 19950 +f 19820 19950 19949 +f 19821 19822 19950 +f 19822 19951 19950 +f 19822 19823 19952 +f 19822 19952 19951 +f 19823 19824 19952 +f 19824 19953 19952 +f 19824 19825 19954 +f 19824 19954 19953 +f 19825 19826 19954 +f 19826 19955 19954 +f 19826 19827 19956 +f 19826 19956 19955 +f 19827 19828 19956 +f 19828 19957 19956 +f 19828 19829 19958 +f 19828 19958 19957 +f 19829 19830 19958 +f 19830 19959 19958 +f 19830 19831 19960 +f 19830 19960 19959 +f 19831 19832 19960 +f 19832 19961 19960 +f 19832 19833 19962 +f 19832 19962 19961 +f 19833 19834 19962 +f 19834 19963 19962 +f 19834 19835 19964 +f 19834 19964 19963 +f 19835 19836 19964 +f 19836 19965 19964 +f 19836 19837 19966 +f 19836 19966 19965 +f 19837 19838 19966 +f 19838 19967 19966 +f 19838 19839 19968 +f 19838 19968 19967 +f 19839 19840 19968 +f 19840 19969 19968 +f 19840 19841 19970 +f 19840 19970 19969 +f 19841 19842 19970 +f 19842 19971 19970 +f 19842 19843 19972 +f 19842 19972 19971 +f 19843 19844 19972 +f 19844 19973 19972 +f 19844 19845 19974 +f 19844 19974 19973 +f 19845 19846 19974 +f 19846 19975 19974 +f 19846 19847 19976 +f 19846 19976 19975 +f 19847 19848 19976 +f 19848 19977 19976 +f 19848 19849 19978 +f 19848 19978 19977 +f 19849 19850 19978 +f 19850 19979 19978 +f 19850 19851 19980 +f 19850 19980 19979 +f 19851 19852 19980 +f 19852 19981 19980 +f 19852 19853 19982 +f 19852 19982 19981 +f 19853 19854 19982 +f 19854 19983 19982 +f 19854 19855 19984 +f 19854 19984 19983 +f 19855 19856 19984 +f 19856 19985 19984 +f 19856 19857 19986 +f 19856 19986 19985 +f 19857 19858 19986 +f 19858 19987 19986 +f 19858 19859 19988 +f 19858 19988 19987 +f 19859 19860 19988 +f 19860 19989 19988 +f 19860 19861 19990 +f 19860 19990 19989 +f 19861 19862 19990 +f 19862 19991 19990 +f 19862 19863 19992 +f 19862 19992 19991 +f 19863 19864 19992 +f 19864 19993 19992 +f 19864 19865 19994 +f 19864 19994 19993 +f 19865 19866 19994 +f 19866 19995 19994 +f 19866 19867 19996 +f 19866 19996 19995 +f 19867 19868 19996 +f 19868 19997 19996 +f 19868 19869 19998 +f 19868 19998 19997 +f 19869 19870 19998 +f 19870 19999 19998 +f 19870 19871 20000 +f 19870 20000 19999 +f 19871 19872 20000 +f 19872 20001 20000 +f 19872 19873 20002 +f 19872 20002 20001 +f 19873 19874 20002 +f 19874 20003 20002 +f 19874 19875 20004 +f 19874 20004 20003 +f 19875 19876 20004 +f 19876 20005 20004 +f 19876 19877 20006 +f 19876 20006 20005 +f 19877 19878 20006 +f 19878 20007 20006 +f 19878 19879 20008 +f 19878 20008 20007 +f 19879 19880 20008 +f 19880 20009 20008 +f 19880 19881 20010 +f 19880 20010 20009 +f 19881 19882 20010 +f 19882 20011 20010 +f 19882 19883 20012 +f 19882 20012 20011 +f 19883 19884 20012 +f 19884 20013 20012 +f 19884 19885 20014 +f 19884 20014 20013 +f 19885 19886 20014 +f 19886 20015 20014 +f 19886 19887 20016 +f 19886 20016 20015 +f 19887 19888 20016 +f 19888 20017 20016 +f 19888 19889 20018 +f 19888 20018 20017 +f 19889 19890 20018 +f 19890 20019 20018 +f 19890 19891 20020 +f 19890 20020 20019 +f 19891 19892 20020 +f 19892 20021 20020 +f 19892 19893 20022 +f 19892 20022 20021 +f 19893 19894 20022 +f 19894 20023 20022 +f 19894 19895 20024 +f 19894 20024 20023 +f 19895 19896 20024 +f 19896 20025 20024 +f 19896 19897 20026 +f 19896 20026 20025 +f 19897 19898 20026 +f 19898 20027 20026 +f 19898 19899 20028 +f 19898 20028 20027 +f 19899 19900 20028 +f 19900 20029 20028 +f 19900 19901 20030 +f 19900 20030 20029 +f 19901 19902 20030 +f 19902 20031 20030 +f 19902 19903 20032 +f 19902 20032 20031 +f 19903 19904 20032 +f 19904 20033 20032 +f 19904 19905 20034 +f 19904 20034 20033 +f 19905 19906 20034 +f 19906 20035 20034 +f 19906 19907 20036 +f 19906 20036 20035 +f 19907 19908 20036 +f 19908 20037 20036 +f 19908 19909 20038 +f 19908 20038 20037 +f 19909 19910 20038 +f 19910 20039 20038 +f 19910 19911 20040 +f 19910 20040 20039 +f 19911 19912 20040 +f 19912 20041 20040 +f 19912 19913 20042 +f 19912 20042 20041 +f 19913 19914 20042 +f 19914 20043 20042 +f 19914 19915 20044 +f 19914 20044 20043 +f 19915 19916 20044 +f 19916 20045 20044 +f 19916 19917 20046 +f 19916 20046 20045 +f 19917 19918 20046 +f 19918 20047 20046 +f 19918 19919 20048 +f 19918 20048 20047 +f 19919 19920 20048 +f 19920 20049 20048 +f 19920 19921 20050 +f 19920 20050 20049 +f 19921 19922 20050 +f 19922 20051 20050 +f 19922 19923 20052 +f 19922 20052 20051 +f 19923 19924 20052 +f 19924 20053 20052 +f 19924 19925 20054 +f 19924 20054 20053 +f 19925 19926 20054 +f 19926 20055 20054 +f 19926 19927 20056 +f 19926 20056 20055 +f 19927 19928 20056 +f 19928 20057 20056 +f 19928 19929 20058 +f 19928 20058 20057 +f 19929 19930 20058 +f 19930 20059 20058 +f 19930 19931 20060 +f 19930 20060 20059 +f 19931 19932 20060 +f 19932 20061 20060 +f 19932 19933 20062 +f 19932 20062 20061 +f 19933 19934 20062 +f 19934 20063 20062 +f 19934 19935 20064 +f 19934 20064 20063 +f 19935 19936 20064 +f 19936 20065 20064 +f 19936 19937 20066 +f 19936 20066 20065 +f 19937 19938 20066 +f 19938 20067 20066 +f 19938 19939 20068 +f 19938 20068 20067 +f 19939 19940 20068 +f 19940 20069 20068 +f 19940 19941 20070 +f 19940 20070 20069 +f 19941 19942 20070 +f 19942 20071 20070 +f 19942 19943 20072 +f 19942 20072 20071 +f 19943 19944 20072 +f 19944 20073 20072 +f 19944 19945 20074 +f 19944 20074 20073 +f 19945 19946 20074 +f 19946 20075 20074 +f 19946 19947 20076 +f 19946 20076 20075 +f 19948 19949 20078 +f 19948 20078 20077 +f 19949 19950 20078 +f 19950 20079 20078 +f 19950 19951 20080 +f 19950 20080 20079 +f 19951 19952 20080 +f 19952 20081 20080 +f 19952 19953 20082 +f 19952 20082 20081 +f 19953 19954 20082 +f 19954 20083 20082 +f 19954 19955 20084 +f 19954 20084 20083 +f 19955 19956 20084 +f 19956 20085 20084 +f 19956 19957 20086 +f 19956 20086 20085 +f 19957 19958 20086 +f 19958 20087 20086 +f 19958 19959 20088 +f 19958 20088 20087 +f 19959 19960 20088 +f 19960 20089 20088 +f 19960 19961 20090 +f 19960 20090 20089 +f 19961 19962 20090 +f 19962 20091 20090 +f 19962 19963 20092 +f 19962 20092 20091 +f 19963 19964 20092 +f 19964 20093 20092 +f 19964 19965 20094 +f 19964 20094 20093 +f 19965 19966 20094 +f 19966 20095 20094 +f 19966 19967 20096 +f 19966 20096 20095 +f 19967 19968 20096 +f 19968 20097 20096 +f 19968 19969 20098 +f 19968 20098 20097 +f 19969 19970 20098 +f 19970 20099 20098 +f 19970 19971 20100 +f 19970 20100 20099 +f 19971 19972 20100 +f 19972 20101 20100 +f 19972 19973 20102 +f 19972 20102 20101 +f 19973 19974 20102 +f 19974 20103 20102 +f 19974 19975 20104 +f 19974 20104 20103 +f 19975 19976 20104 +f 19976 20105 20104 +f 19976 19977 20106 +f 19976 20106 20105 +f 19977 19978 20106 +f 19978 20107 20106 +f 19978 19979 20108 +f 19978 20108 20107 +f 19979 19980 20108 +f 19980 20109 20108 +f 19980 19981 20110 +f 19980 20110 20109 +f 19981 19982 20110 +f 19982 20111 20110 +f 19982 19983 20112 +f 19982 20112 20111 +f 19983 19984 20112 +f 19984 20113 20112 +f 19984 19985 20114 +f 19984 20114 20113 +f 19985 19986 20114 +f 19986 20115 20114 +f 19986 19987 20116 +f 19986 20116 20115 +f 19987 19988 20116 +f 19988 20117 20116 +f 19988 19989 20118 +f 19988 20118 20117 +f 19989 19990 20118 +f 19990 20119 20118 +f 19990 19991 20120 +f 19990 20120 20119 +f 19991 19992 20120 +f 19992 20121 20120 +f 19992 19993 20122 +f 19992 20122 20121 +f 19993 19994 20122 +f 19994 20123 20122 +f 19994 19995 20124 +f 19994 20124 20123 +f 19995 19996 20124 +f 19996 20125 20124 +f 19996 19997 20126 +f 19996 20126 20125 +f 19997 19998 20126 +f 19998 20127 20126 +f 19998 19999 20128 +f 19998 20128 20127 +f 19999 20000 20128 +f 20000 20129 20128 +f 20000 20001 20130 +f 20000 20130 20129 +f 20001 20002 20130 +f 20002 20131 20130 +f 20002 20003 20132 +f 20002 20132 20131 +f 20003 20004 20132 +f 20004 20133 20132 +f 20004 20005 20134 +f 20004 20134 20133 +f 20005 20006 20134 +f 20006 20135 20134 +f 20006 20007 20136 +f 20006 20136 20135 +f 20007 20008 20136 +f 20008 20137 20136 +f 20008 20009 20138 +f 20008 20138 20137 +f 20009 20010 20138 +f 20010 20139 20138 +f 20010 20011 20140 +f 20010 20140 20139 +f 20011 20012 20140 +f 20012 20141 20140 +f 20012 20013 20142 +f 20012 20142 20141 +f 20013 20014 20142 +f 20014 20143 20142 +f 20014 20015 20144 +f 20014 20144 20143 +f 20015 20016 20144 +f 20016 20145 20144 +f 20016 20017 20146 +f 20016 20146 20145 +f 20017 20018 20146 +f 20018 20147 20146 +f 20018 20019 20148 +f 20018 20148 20147 +f 20019 20020 20148 +f 20020 20149 20148 +f 20020 20021 20150 +f 20020 20150 20149 +f 20021 20022 20150 +f 20022 20151 20150 +f 20022 20023 20152 +f 20022 20152 20151 +f 20023 20024 20152 +f 20024 20153 20152 +f 20024 20025 20154 +f 20024 20154 20153 +f 20025 20026 20154 +f 20026 20155 20154 +f 20026 20027 20156 +f 20026 20156 20155 +f 20027 20028 20156 +f 20028 20157 20156 +f 20028 20029 20158 +f 20028 20158 20157 +f 20029 20030 20158 +f 20030 20159 20158 +f 20030 20031 20160 +f 20030 20160 20159 +f 20031 20032 20160 +f 20032 20161 20160 +f 20032 20033 20162 +f 20032 20162 20161 +f 20033 20034 20162 +f 20034 20163 20162 +f 20034 20035 20164 +f 20034 20164 20163 +f 20035 20036 20164 +f 20036 20165 20164 +f 20036 20037 20166 +f 20036 20166 20165 +f 20037 20038 20166 +f 20038 20167 20166 +f 20038 20039 20168 +f 20038 20168 20167 +f 20039 20040 20168 +f 20040 20169 20168 +f 20040 20041 20170 +f 20040 20170 20169 +f 20041 20042 20170 +f 20042 20171 20170 +f 20042 20043 20172 +f 20042 20172 20171 +f 20043 20044 20172 +f 20044 20173 20172 +f 20044 20045 20174 +f 20044 20174 20173 +f 20045 20046 20174 +f 20046 20175 20174 +f 20046 20047 20176 +f 20046 20176 20175 +f 20047 20048 20176 +f 20048 20177 20176 +f 20048 20049 20178 +f 20048 20178 20177 +f 20049 20050 20178 +f 20050 20179 20178 +f 20050 20051 20180 +f 20050 20180 20179 +f 20051 20052 20180 +f 20052 20181 20180 +f 20052 20053 20182 +f 20052 20182 20181 +f 20053 20054 20182 +f 20054 20183 20182 +f 20054 20055 20184 +f 20054 20184 20183 +f 20055 20056 20184 +f 20056 20185 20184 +f 20056 20057 20186 +f 20056 20186 20185 +f 20057 20058 20186 +f 20058 20187 20186 +f 20058 20059 20188 +f 20058 20188 20187 +f 20059 20060 20188 +f 20060 20189 20188 +f 20060 20061 20190 +f 20060 20190 20189 +f 20061 20062 20190 +f 20062 20191 20190 +f 20062 20063 20192 +f 20062 20192 20191 +f 20063 20064 20192 +f 20064 20193 20192 +f 20064 20065 20194 +f 20064 20194 20193 +f 20065 20066 20194 +f 20066 20195 20194 +f 20066 20067 20196 +f 20066 20196 20195 +f 20067 20068 20196 +f 20068 20197 20196 +f 20068 20069 20198 +f 20068 20198 20197 +f 20069 20070 20198 +f 20070 20199 20198 +f 20070 20071 20200 +f 20070 20200 20199 +f 20071 20072 20200 +f 20072 20201 20200 +f 20072 20073 20202 +f 20072 20202 20201 +f 20073 20074 20202 +f 20074 20203 20202 +f 20074 20075 20204 +f 20074 20204 20203 +f 20075 20076 20204 +f 20076 20205 20204 +f 20077 20078 20206 +f 20078 20207 20206 +f 20078 20079 20208 +f 20078 20208 20207 +f 20079 20080 20208 +f 20080 20209 20208 +f 20080 20081 20210 +f 20080 20210 20209 +f 20081 20082 20210 +f 20082 20211 20210 +f 20082 20083 20212 +f 20082 20212 20211 +f 20083 20084 20212 +f 20084 20213 20212 +f 20084 20085 20214 +f 20084 20214 20213 +f 20085 20086 20214 +f 20086 20215 20214 +f 20086 20087 20216 +f 20086 20216 20215 +f 20087 20088 20216 +f 20088 20217 20216 +f 20088 20089 20218 +f 20088 20218 20217 +f 20089 20090 20218 +f 20090 20219 20218 +f 20090 20091 20220 +f 20090 20220 20219 +f 20091 20092 20220 +f 20092 20221 20220 +f 20092 20093 20222 +f 20092 20222 20221 +f 20093 20094 20222 +f 20094 20223 20222 +f 20094 20095 20224 +f 20094 20224 20223 +f 20095 20096 20224 +f 20096 20225 20224 +f 20096 20097 20226 +f 20096 20226 20225 +f 20097 20098 20226 +f 20098 20227 20226 +f 20098 20099 20228 +f 20098 20228 20227 +f 20099 20100 20228 +f 20100 20229 20228 +f 20100 20101 20230 +f 20100 20230 20229 +f 20101 20102 20230 +f 20102 20231 20230 +f 20102 20103 20232 +f 20102 20232 20231 +f 20103 20104 20232 +f 20104 20233 20232 +f 20104 20105 20234 +f 20104 20234 20233 +f 20105 20106 20234 +f 20106 20235 20234 +f 20106 20107 20236 +f 20106 20236 20235 +f 20107 20108 20236 +f 20108 20237 20236 +f 20108 20109 20238 +f 20108 20238 20237 +f 20109 20110 20238 +f 20110 20239 20238 +f 20110 20111 20240 +f 20110 20240 20239 +f 20111 20112 20240 +f 20112 20241 20240 +f 20112 20113 20242 +f 20112 20242 20241 +f 20113 20114 20242 +f 20114 20243 20242 +f 20114 20115 20244 +f 20114 20244 20243 +f 20115 20116 20244 +f 20116 20245 20244 +f 20116 20117 20246 +f 20116 20246 20245 +f 20117 20118 20246 +f 20118 20247 20246 +f 20118 20119 20248 +f 20118 20248 20247 +f 20119 20120 20248 +f 20120 20249 20248 +f 20120 20121 20250 +f 20120 20250 20249 +f 20121 20122 20250 +f 20122 20251 20250 +f 20122 20123 20252 +f 20122 20252 20251 +f 20123 20124 20252 +f 20124 20253 20252 +f 20124 20125 20254 +f 20124 20254 20253 +f 20125 20126 20254 +f 20126 20255 20254 +f 20126 20127 20256 +f 20126 20256 20255 +f 20127 20128 20256 +f 20128 20257 20256 +f 20128 20129 20258 +f 20128 20258 20257 +f 20129 20130 20258 +f 20130 20259 20258 +f 20130 20131 20260 +f 20130 20260 20259 +f 20131 20132 20260 +f 20132 20261 20260 +f 20132 20133 20262 +f 20132 20262 20261 +f 20133 20134 20262 +f 20134 20263 20262 +f 20134 20135 20264 +f 20134 20264 20263 +f 20135 20136 20264 +f 20136 20265 20264 +f 20136 20137 20266 +f 20136 20266 20265 +f 20137 20138 20266 +f 20138 20267 20266 +f 20138 20139 20268 +f 20138 20268 20267 +f 20139 20140 20268 +f 20140 20269 20268 +f 20140 20141 20270 +f 20140 20270 20269 +f 20141 20142 20270 +f 20142 20271 20270 +f 20142 20143 20272 +f 20142 20272 20271 +f 20143 20144 20272 +f 20144 20273 20272 +f 20144 20145 20274 +f 20144 20274 20273 +f 20145 20146 20274 +f 20146 20275 20274 +f 20146 20147 20276 +f 20146 20276 20275 +f 20147 20148 20276 +f 20148 20277 20276 +f 20148 20149 20278 +f 20148 20278 20277 +f 20149 20150 20278 +f 20150 20279 20278 +f 20150 20151 20280 +f 20150 20280 20279 +f 20151 20152 20280 +f 20152 20281 20280 +f 20152 20153 20282 +f 20152 20282 20281 +f 20153 20154 20282 +f 20154 20283 20282 +f 20154 20155 20284 +f 20154 20284 20283 +f 20155 20156 20284 +f 20156 20285 20284 +f 20156 20157 20286 +f 20156 20286 20285 +f 20157 20158 20286 +f 20158 20287 20286 +f 20158 20159 20288 +f 20158 20288 20287 +f 20159 20160 20288 +f 20160 20289 20288 +f 20160 20161 20290 +f 20160 20290 20289 +f 20161 20162 20290 +f 20162 20291 20290 +f 20162 20163 20292 +f 20162 20292 20291 +f 20163 20164 20292 +f 20164 20293 20292 +f 20164 20165 20294 +f 20164 20294 20293 +f 20165 20166 20294 +f 20166 20295 20294 +f 20166 20167 20296 +f 20166 20296 20295 +f 20167 20168 20296 +f 20168 20297 20296 +f 20168 20169 20298 +f 20168 20298 20297 +f 20169 20170 20298 +f 20170 20299 20298 +f 20170 20171 20300 +f 20170 20300 20299 +f 20171 20172 20300 +f 20172 20301 20300 +f 20172 20173 20302 +f 20172 20302 20301 +f 20173 20174 20302 +f 20174 20303 20302 +f 20174 20175 20304 +f 20174 20304 20303 +f 20175 20176 20304 +f 20176 20305 20304 +f 20176 20177 20306 +f 20176 20306 20305 +f 20177 20178 20306 +f 20178 20307 20306 +f 20178 20179 20308 +f 20178 20308 20307 +f 20179 20180 20308 +f 20180 20309 20308 +f 20180 20181 20310 +f 20180 20310 20309 +f 20181 20182 20310 +f 20182 20311 20310 +f 20182 20183 20312 +f 20182 20312 20311 +f 20183 20184 20312 +f 20184 20313 20312 +f 20184 20185 20314 +f 20184 20314 20313 +f 20185 20186 20314 +f 20186 20315 20314 +f 20186 20187 20316 +f 20186 20316 20315 +f 20187 20188 20316 +f 20188 20317 20316 +f 20188 20189 20318 +f 20188 20318 20317 +f 20189 20190 20318 +f 20190 20319 20318 +f 20190 20191 20320 +f 20190 20320 20319 +f 20191 20192 20320 +f 20192 20321 20320 +f 20192 20193 20322 +f 20192 20322 20321 +f 20193 20194 20322 +f 20194 20323 20322 +f 20194 20195 20324 +f 20194 20324 20323 +f 20195 20196 20324 +f 20196 20325 20324 +f 20196 20197 20326 +f 20196 20326 20325 +f 20197 20198 20326 +f 20198 20327 20326 +f 20198 20199 20328 +f 20198 20328 20327 +f 20199 20200 20328 +f 20200 20329 20328 +f 20200 20201 20330 +f 20200 20330 20329 +f 20201 20202 20330 +f 20202 20331 20330 +f 20202 20203 20332 +f 20202 20332 20331 +f 20203 20204 20332 +f 20204 20333 20332 +f 20204 20205 20334 +f 20204 20334 20333 +f 20206 20207 20336 +f 20206 20336 20335 +f 20207 20208 20336 +f 20208 20337 20336 +f 20208 20209 20338 +f 20208 20338 20337 +f 20209 20210 20338 +f 20210 20339 20338 +f 20210 20211 20340 +f 20210 20340 20339 +f 20211 20212 20340 +f 20212 20341 20340 +f 20212 20213 20342 +f 20212 20342 20341 +f 20213 20214 20342 +f 20214 20343 20342 +f 20214 20215 20344 +f 20214 20344 20343 +f 20215 20216 20344 +f 20216 20345 20344 +f 20216 20217 20346 +f 20216 20346 20345 +f 20217 20218 20346 +f 20218 20347 20346 +f 20218 20219 20348 +f 20218 20348 20347 +f 20219 20220 20348 +f 20220 20349 20348 +f 20220 20221 20350 +f 20220 20350 20349 +f 20221 20222 20350 +f 20222 20351 20350 +f 20222 20223 20352 +f 20222 20352 20351 +f 20223 20224 20352 +f 20224 20353 20352 +f 20224 20225 20354 +f 20224 20354 20353 +f 20225 20226 20354 +f 20226 20355 20354 +f 20226 20227 20356 +f 20226 20356 20355 +f 20227 20228 20356 +f 20228 20357 20356 +f 20228 20229 20358 +f 20228 20358 20357 +f 20229 20230 20358 +f 20230 20359 20358 +f 20230 20231 20360 +f 20230 20360 20359 +f 20231 20232 20360 +f 20232 20361 20360 +f 20232 20233 20362 +f 20232 20362 20361 +f 20233 20234 20362 +f 20234 20363 20362 +f 20234 20235 20364 +f 20234 20364 20363 +f 20235 20236 20364 +f 20236 20365 20364 +f 20236 20237 20366 +f 20236 20366 20365 +f 20237 20238 20366 +f 20238 20367 20366 +f 20238 20239 20368 +f 20238 20368 20367 +f 20239 20240 20368 +f 20240 20369 20368 +f 20240 20241 20370 +f 20240 20370 20369 +f 20241 20242 20370 +f 20242 20371 20370 +f 20242 20243 20372 +f 20242 20372 20371 +f 20243 20244 20372 +f 20244 20373 20372 +f 20244 20245 20374 +f 20244 20374 20373 +f 20245 20246 20374 +f 20246 20375 20374 +f 20246 20247 20376 +f 20246 20376 20375 +f 20247 20248 20376 +f 20248 20377 20376 +f 20248 20249 20378 +f 20248 20378 20377 +f 20249 20250 20378 +f 20250 20379 20378 +f 20250 20251 20380 +f 20250 20380 20379 +f 20251 20252 20380 +f 20252 20381 20380 +f 20252 20253 20382 +f 20252 20382 20381 +f 20253 20254 20382 +f 20254 20383 20382 +f 20254 20255 20384 +f 20254 20384 20383 +f 20255 20256 20384 +f 20256 20385 20384 +f 20256 20257 20386 +f 20256 20386 20385 +f 20257 20258 20386 +f 20258 20387 20386 +f 20258 20259 20388 +f 20258 20388 20387 +f 20259 20260 20388 +f 20260 20389 20388 +f 20260 20261 20390 +f 20260 20390 20389 +f 20261 20262 20390 +f 20262 20391 20390 +f 20262 20263 20392 +f 20262 20392 20391 +f 20263 20264 20392 +f 20264 20393 20392 +f 20264 20265 20394 +f 20264 20394 20393 +f 20265 20266 20394 +f 20266 20395 20394 +f 20266 20267 20396 +f 20266 20396 20395 +f 20267 20268 20396 +f 20268 20397 20396 +f 20268 20269 20398 +f 20268 20398 20397 +f 20269 20270 20398 +f 20270 20399 20398 +f 20270 20271 20400 +f 20270 20400 20399 +f 20271 20272 20400 +f 20272 20401 20400 +f 20272 20273 20402 +f 20272 20402 20401 +f 20273 20274 20402 +f 20274 20403 20402 +f 20274 20275 20404 +f 20274 20404 20403 +f 20275 20276 20404 +f 20276 20405 20404 +f 20276 20277 20406 +f 20276 20406 20405 +f 20277 20278 20406 +f 20278 20407 20406 +f 20278 20279 20408 +f 20278 20408 20407 +f 20279 20280 20408 +f 20280 20409 20408 +f 20280 20281 20410 +f 20280 20410 20409 +f 20281 20282 20410 +f 20282 20411 20410 +f 20282 20283 20412 +f 20282 20412 20411 +f 20283 20284 20412 +f 20284 20413 20412 +f 20284 20285 20414 +f 20284 20414 20413 +f 20285 20286 20414 +f 20286 20415 20414 +f 20286 20287 20416 +f 20286 20416 20415 +f 20287 20288 20416 +f 20288 20417 20416 +f 20288 20289 20418 +f 20288 20418 20417 +f 20289 20290 20418 +f 20290 20419 20418 +f 20290 20291 20420 +f 20290 20420 20419 +f 20291 20292 20420 +f 20292 20421 20420 +f 20292 20293 20422 +f 20292 20422 20421 +f 20293 20294 20422 +f 20294 20423 20422 +f 20294 20295 20424 +f 20294 20424 20423 +f 20295 20296 20424 +f 20296 20425 20424 +f 20296 20297 20426 +f 20296 20426 20425 +f 20297 20298 20426 +f 20298 20427 20426 +f 20298 20299 20428 +f 20298 20428 20427 +f 20299 20300 20428 +f 20300 20429 20428 +f 20300 20301 20430 +f 20300 20430 20429 +f 20301 20302 20430 +f 20302 20431 20430 +f 20302 20303 20432 +f 20302 20432 20431 +f 20303 20304 20432 +f 20304 20433 20432 +f 20304 20305 20434 +f 20304 20434 20433 +f 20305 20306 20434 +f 20306 20435 20434 +f 20306 20307 20436 +f 20306 20436 20435 +f 20307 20308 20436 +f 20308 20437 20436 +f 20308 20309 20438 +f 20308 20438 20437 +f 20309 20310 20438 +f 20310 20439 20438 +f 20310 20311 20440 +f 20310 20440 20439 +f 20311 20312 20440 +f 20312 20441 20440 +f 20312 20313 20442 +f 20312 20442 20441 +f 20313 20314 20442 +f 20314 20443 20442 +f 20314 20315 20444 +f 20314 20444 20443 +f 20315 20316 20444 +f 20316 20445 20444 +f 20316 20317 20446 +f 20316 20446 20445 +f 20317 20318 20446 +f 20318 20447 20446 +f 20318 20319 20448 +f 20318 20448 20447 +f 20319 20320 20448 +f 20320 20449 20448 +f 20320 20321 20450 +f 20320 20450 20449 +f 20321 20322 20450 +f 20322 20451 20450 +f 20322 20323 20452 +f 20322 20452 20451 +f 20323 20324 20452 +f 20324 20453 20452 +f 20324 20325 20454 +f 20324 20454 20453 +f 20325 20326 20454 +f 20326 20455 20454 +f 20326 20327 20456 +f 20326 20456 20455 +f 20327 20328 20456 +f 20328 20457 20456 +f 20328 20329 20458 +f 20328 20458 20457 +f 20329 20330 20458 +f 20330 20459 20458 +f 20330 20331 20460 +f 20330 20460 20459 +f 20331 20332 20460 +f 20332 20461 20460 +f 20332 20333 20462 +f 20332 20462 20461 +f 20333 20334 20462 +f 20334 20463 20462 +f 20335 20336 20464 +f 20336 20465 20464 +f 20336 20337 20466 +f 20336 20466 20465 +f 20337 20338 20466 +f 20338 20467 20466 +f 20338 20339 20468 +f 20338 20468 20467 +f 20339 20340 20468 +f 20340 20469 20468 +f 20340 20341 20470 +f 20340 20470 20469 +f 20341 20342 20470 +f 20342 20471 20470 +f 20342 20343 20472 +f 20342 20472 20471 +f 20343 20344 20472 +f 20344 20473 20472 +f 20344 20345 20474 +f 20344 20474 20473 +f 20345 20346 20474 +f 20346 20475 20474 +f 20346 20347 20476 +f 20346 20476 20475 +f 20347 20348 20476 +f 20348 20477 20476 +f 20348 20349 20478 +f 20348 20478 20477 +f 20349 20350 20478 +f 20350 20479 20478 +f 20350 20351 20480 +f 20350 20480 20479 +f 20351 20352 20480 +f 20352 20481 20480 +f 20352 20353 20482 +f 20352 20482 20481 +f 20353 20354 20482 +f 20354 20483 20482 +f 20354 20355 20484 +f 20354 20484 20483 +f 20355 20356 20484 +f 20356 20485 20484 +f 20356 20357 20486 +f 20356 20486 20485 +f 20357 20358 20486 +f 20358 20487 20486 +f 20358 20359 20488 +f 20358 20488 20487 +f 20359 20360 20488 +f 20360 20489 20488 +f 20360 20361 20490 +f 20360 20490 20489 +f 20361 20362 20490 +f 20362 20491 20490 +f 20362 20363 20492 +f 20362 20492 20491 +f 20363 20364 20492 +f 20364 20493 20492 +f 20364 20365 20494 +f 20364 20494 20493 +f 20365 20366 20494 +f 20366 20495 20494 +f 20366 20367 20496 +f 20366 20496 20495 +f 20367 20368 20496 +f 20368 20497 20496 +f 20368 20369 20498 +f 20368 20498 20497 +f 20369 20370 20498 +f 20370 20499 20498 +f 20370 20371 20500 +f 20370 20500 20499 +f 20371 20372 20500 +f 20372 20501 20500 +f 20372 20373 20502 +f 20372 20502 20501 +f 20373 20374 20502 +f 20374 20503 20502 +f 20374 20375 20504 +f 20374 20504 20503 +f 20375 20376 20504 +f 20376 20505 20504 +f 20376 20377 20506 +f 20376 20506 20505 +f 20377 20378 20506 +f 20378 20507 20506 +f 20378 20379 20508 +f 20378 20508 20507 +f 20379 20380 20508 +f 20380 20509 20508 +f 20380 20381 20510 +f 20380 20510 20509 +f 20381 20382 20510 +f 20382 20511 20510 +f 20382 20383 20512 +f 20382 20512 20511 +f 20383 20384 20512 +f 20384 20513 20512 +f 20384 20385 20514 +f 20384 20514 20513 +f 20385 20386 20514 +f 20386 20515 20514 +f 20386 20387 20516 +f 20386 20516 20515 +f 20387 20388 20516 +f 20388 20517 20516 +f 20388 20389 20518 +f 20388 20518 20517 +f 20389 20390 20518 +f 20390 20519 20518 +f 20390 20391 20520 +f 20390 20520 20519 +f 20391 20392 20520 +f 20392 20521 20520 +f 20392 20393 20522 +f 20392 20522 20521 +f 20393 20394 20522 +f 20394 20523 20522 +f 20394 20395 20524 +f 20394 20524 20523 +f 20395 20396 20524 +f 20396 20525 20524 +f 20396 20397 20526 +f 20396 20526 20525 +f 20397 20398 20526 +f 20398 20527 20526 +f 20398 20399 20528 +f 20398 20528 20527 +f 20399 20400 20528 +f 20400 20529 20528 +f 20400 20401 20530 +f 20400 20530 20529 +f 20401 20402 20530 +f 20402 20531 20530 +f 20402 20403 20532 +f 20402 20532 20531 +f 20403 20404 20532 +f 20404 20533 20532 +f 20404 20405 20534 +f 20404 20534 20533 +f 20405 20406 20534 +f 20406 20535 20534 +f 20406 20407 20536 +f 20406 20536 20535 +f 20407 20408 20536 +f 20408 20537 20536 +f 20408 20409 20538 +f 20408 20538 20537 +f 20409 20410 20538 +f 20410 20539 20538 +f 20410 20411 20540 +f 20410 20540 20539 +f 20411 20412 20540 +f 20412 20541 20540 +f 20412 20413 20542 +f 20412 20542 20541 +f 20413 20414 20542 +f 20414 20543 20542 +f 20414 20415 20544 +f 20414 20544 20543 +f 20415 20416 20544 +f 20416 20545 20544 +f 20416 20417 20546 +f 20416 20546 20545 +f 20417 20418 20546 +f 20418 20547 20546 +f 20418 20419 20548 +f 20418 20548 20547 +f 20419 20420 20548 +f 20420 20549 20548 +f 20420 20421 20550 +f 20420 20550 20549 +f 20421 20422 20550 +f 20422 20551 20550 +f 20422 20423 20552 +f 20422 20552 20551 +f 20423 20424 20552 +f 20424 20553 20552 +f 20424 20425 20554 +f 20424 20554 20553 +f 20425 20426 20554 +f 20426 20555 20554 +f 20426 20427 20556 +f 20426 20556 20555 +f 20427 20428 20556 +f 20428 20557 20556 +f 20428 20429 20558 +f 20428 20558 20557 +f 20429 20430 20558 +f 20430 20559 20558 +f 20430 20431 20560 +f 20430 20560 20559 +f 20431 20432 20560 +f 20432 20561 20560 +f 20432 20433 20562 +f 20432 20562 20561 +f 20433 20434 20562 +f 20434 20563 20562 +f 20434 20435 20564 +f 20434 20564 20563 +f 20435 20436 20564 +f 20436 20565 20564 +f 20436 20437 20566 +f 20436 20566 20565 +f 20437 20438 20566 +f 20438 20567 20566 +f 20438 20439 20568 +f 20438 20568 20567 +f 20439 20440 20568 +f 20440 20569 20568 +f 20440 20441 20570 +f 20440 20570 20569 +f 20441 20442 20570 +f 20442 20571 20570 +f 20442 20443 20572 +f 20442 20572 20571 +f 20443 20444 20572 +f 20444 20573 20572 +f 20444 20445 20574 +f 20444 20574 20573 +f 20445 20446 20574 +f 20446 20575 20574 +f 20446 20447 20576 +f 20446 20576 20575 +f 20447 20448 20576 +f 20448 20577 20576 +f 20448 20449 20578 +f 20448 20578 20577 +f 20449 20450 20578 +f 20450 20579 20578 +f 20450 20451 20580 +f 20450 20580 20579 +f 20451 20452 20580 +f 20452 20581 20580 +f 20452 20453 20582 +f 20452 20582 20581 +f 20453 20454 20582 +f 20454 20583 20582 +f 20454 20455 20584 +f 20454 20584 20583 +f 20455 20456 20584 +f 20456 20585 20584 +f 20456 20457 20586 +f 20456 20586 20585 +f 20457 20458 20586 +f 20458 20587 20586 +f 20458 20459 20588 +f 20458 20588 20587 +f 20459 20460 20588 +f 20460 20589 20588 +f 20460 20461 20590 +f 20460 20590 20589 +f 20461 20462 20590 +f 20462 20591 20590 +f 20462 20463 20592 +f 20462 20592 20591 +f 20464 20465 20594 +f 20464 20594 20593 +f 20465 20466 20594 +f 20466 20595 20594 +f 20466 20467 20596 +f 20466 20596 20595 +f 20467 20468 20596 +f 20468 20597 20596 +f 20468 20469 20598 +f 20468 20598 20597 +f 20469 20470 20598 +f 20470 20599 20598 +f 20470 20471 20600 +f 20470 20600 20599 +f 20471 20472 20600 +f 20472 20601 20600 +f 20472 20473 20602 +f 20472 20602 20601 +f 20473 20474 20602 +f 20474 20603 20602 +f 20474 20475 20604 +f 20474 20604 20603 +f 20475 20476 20604 +f 20476 20605 20604 +f 20476 20477 20606 +f 20476 20606 20605 +f 20477 20478 20606 +f 20478 20607 20606 +f 20478 20479 20608 +f 20478 20608 20607 +f 20479 20480 20608 +f 20480 20609 20608 +f 20480 20481 20610 +f 20480 20610 20609 +f 20481 20482 20610 +f 20482 20611 20610 +f 20482 20483 20612 +f 20482 20612 20611 +f 20483 20484 20612 +f 20484 20613 20612 +f 20484 20485 20614 +f 20484 20614 20613 +f 20485 20486 20614 +f 20486 20615 20614 +f 20486 20487 20616 +f 20486 20616 20615 +f 20487 20488 20616 +f 20488 20617 20616 +f 20488 20489 20618 +f 20488 20618 20617 +f 20489 20490 20618 +f 20490 20619 20618 +f 20490 20491 20620 +f 20490 20620 20619 +f 20491 20492 20620 +f 20492 20621 20620 +f 20492 20493 20622 +f 20492 20622 20621 +f 20493 20494 20622 +f 20494 20623 20622 +f 20494 20495 20624 +f 20494 20624 20623 +f 20495 20496 20624 +f 20496 20625 20624 +f 20496 20497 20626 +f 20496 20626 20625 +f 20497 20498 20626 +f 20498 20627 20626 +f 20498 20499 20628 +f 20498 20628 20627 +f 20499 20500 20628 +f 20500 20629 20628 +f 20500 20501 20630 +f 20500 20630 20629 +f 20501 20502 20630 +f 20502 20631 20630 +f 20502 20503 20632 +f 20502 20632 20631 +f 20503 20504 20632 +f 20504 20633 20632 +f 20504 20505 20634 +f 20504 20634 20633 +f 20505 20506 20634 +f 20506 20635 20634 +f 20506 20507 20636 +f 20506 20636 20635 +f 20507 20508 20636 +f 20508 20637 20636 +f 20508 20509 20638 +f 20508 20638 20637 +f 20509 20510 20638 +f 20510 20639 20638 +f 20510 20511 20640 +f 20510 20640 20639 +f 20511 20512 20640 +f 20512 20641 20640 +f 20512 20513 20642 +f 20512 20642 20641 +f 20513 20514 20642 +f 20514 20643 20642 +f 20514 20515 20644 +f 20514 20644 20643 +f 20515 20516 20644 +f 20516 20645 20644 +f 20516 20517 20646 +f 20516 20646 20645 +f 20517 20518 20646 +f 20518 20647 20646 +f 20518 20519 20648 +f 20518 20648 20647 +f 20519 20520 20648 +f 20520 20649 20648 +f 20520 20521 20650 +f 20520 20650 20649 +f 20521 20522 20650 +f 20522 20651 20650 +f 20522 20523 20652 +f 20522 20652 20651 +f 20523 20524 20652 +f 20524 20653 20652 +f 20524 20525 20654 +f 20524 20654 20653 +f 20525 20526 20654 +f 20526 20655 20654 +f 20526 20527 20656 +f 20526 20656 20655 +f 20527 20528 20656 +f 20528 20657 20656 +f 20528 20529 20658 +f 20528 20658 20657 +f 20529 20530 20658 +f 20530 20659 20658 +f 20530 20531 20660 +f 20530 20660 20659 +f 20531 20532 20660 +f 20532 20661 20660 +f 20532 20533 20662 +f 20532 20662 20661 +f 20533 20534 20662 +f 20534 20663 20662 +f 20534 20535 20664 +f 20534 20664 20663 +f 20535 20536 20664 +f 20536 20665 20664 +f 20536 20537 20666 +f 20536 20666 20665 +f 20537 20538 20666 +f 20538 20667 20666 +f 20538 20539 20668 +f 20538 20668 20667 +f 20539 20540 20668 +f 20540 20669 20668 +f 20540 20541 20670 +f 20540 20670 20669 +f 20541 20542 20670 +f 20542 20671 20670 +f 20542 20543 20672 +f 20542 20672 20671 +f 20543 20544 20672 +f 20544 20673 20672 +f 20544 20545 20674 +f 20544 20674 20673 +f 20545 20546 20674 +f 20546 20675 20674 +f 20546 20547 20676 +f 20546 20676 20675 +f 20547 20548 20676 +f 20548 20677 20676 +f 20548 20549 20678 +f 20548 20678 20677 +f 20549 20550 20678 +f 20550 20679 20678 +f 20550 20551 20680 +f 20550 20680 20679 +f 20551 20552 20680 +f 20552 20681 20680 +f 20552 20553 20682 +f 20552 20682 20681 +f 20553 20554 20682 +f 20554 20683 20682 +f 20554 20555 20684 +f 20554 20684 20683 +f 20555 20556 20684 +f 20556 20685 20684 +f 20556 20557 20686 +f 20556 20686 20685 +f 20557 20558 20686 +f 20558 20687 20686 +f 20558 20559 20688 +f 20558 20688 20687 +f 20559 20560 20688 +f 20560 20689 20688 +f 20560 20561 20690 +f 20560 20690 20689 +f 20561 20562 20690 +f 20562 20691 20690 +f 20562 20563 20692 +f 20562 20692 20691 +f 20563 20564 20692 +f 20564 20693 20692 +f 20564 20565 20694 +f 20564 20694 20693 +f 20565 20566 20694 +f 20566 20695 20694 +f 20566 20567 20696 +f 20566 20696 20695 +f 20567 20568 20696 +f 20568 20697 20696 +f 20568 20569 20698 +f 20568 20698 20697 +f 20569 20570 20698 +f 20570 20699 20698 +f 20570 20571 20700 +f 20570 20700 20699 +f 20571 20572 20700 +f 20572 20701 20700 +f 20572 20573 20702 +f 20572 20702 20701 +f 20573 20574 20702 +f 20574 20703 20702 +f 20574 20575 20704 +f 20574 20704 20703 +f 20575 20576 20704 +f 20576 20705 20704 +f 20576 20577 20706 +f 20576 20706 20705 +f 20577 20578 20706 +f 20578 20707 20706 +f 20578 20579 20708 +f 20578 20708 20707 +f 20579 20580 20708 +f 20580 20709 20708 +f 20580 20581 20710 +f 20580 20710 20709 +f 20581 20582 20710 +f 20582 20711 20710 +f 20582 20583 20712 +f 20582 20712 20711 +f 20583 20584 20712 +f 20584 20713 20712 +f 20584 20585 20714 +f 20584 20714 20713 +f 20585 20586 20714 +f 20586 20715 20714 +f 20586 20587 20716 +f 20586 20716 20715 +f 20587 20588 20716 +f 20588 20717 20716 +f 20588 20589 20718 +f 20588 20718 20717 +f 20589 20590 20718 +f 20590 20719 20718 +f 20590 20591 20720 +f 20590 20720 20719 +f 20591 20592 20720 +f 20592 20721 20720 +f 20593 20594 20722 +f 20594 20723 20722 +f 20594 20595 20724 +f 20594 20724 20723 +f 20595 20596 20724 +f 20596 20725 20724 +f 20596 20597 20726 +f 20596 20726 20725 +f 20597 20598 20726 +f 20598 20727 20726 +f 20598 20599 20728 +f 20598 20728 20727 +f 20599 20600 20728 +f 20600 20729 20728 +f 20600 20601 20730 +f 20600 20730 20729 +f 20601 20602 20730 +f 20602 20731 20730 +f 20602 20603 20732 +f 20602 20732 20731 +f 20603 20604 20732 +f 20604 20733 20732 +f 20604 20605 20734 +f 20604 20734 20733 +f 20605 20606 20734 +f 20606 20735 20734 +f 20606 20607 20736 +f 20606 20736 20735 +f 20607 20608 20736 +f 20608 20737 20736 +f 20608 20609 20738 +f 20608 20738 20737 +f 20609 20610 20738 +f 20610 20739 20738 +f 20610 20611 20740 +f 20610 20740 20739 +f 20611 20612 20740 +f 20612 20741 20740 +f 20612 20613 20742 +f 20612 20742 20741 +f 20613 20614 20742 +f 20614 20743 20742 +f 20614 20615 20744 +f 20614 20744 20743 +f 20615 20616 20744 +f 20616 20745 20744 +f 20616 20617 20746 +f 20616 20746 20745 +f 20617 20618 20746 +f 20618 20747 20746 +f 20618 20619 20748 +f 20618 20748 20747 +f 20619 20620 20748 +f 20620 20749 20748 +f 20620 20621 20750 +f 20620 20750 20749 +f 20621 20622 20750 +f 20622 20751 20750 +f 20622 20623 20752 +f 20622 20752 20751 +f 20623 20624 20752 +f 20624 20753 20752 +f 20624 20625 20754 +f 20624 20754 20753 +f 20625 20626 20754 +f 20626 20755 20754 +f 20626 20627 20756 +f 20626 20756 20755 +f 20627 20628 20756 +f 20628 20757 20756 +f 20628 20629 20758 +f 20628 20758 20757 +f 20629 20630 20758 +f 20630 20759 20758 +f 20630 20631 20760 +f 20630 20760 20759 +f 20631 20632 20760 +f 20632 20761 20760 +f 20632 20633 20762 +f 20632 20762 20761 +f 20633 20634 20762 +f 20634 20763 20762 +f 20634 20635 20764 +f 20634 20764 20763 +f 20635 20636 20764 +f 20636 20765 20764 +f 20636 20637 20766 +f 20636 20766 20765 +f 20637 20638 20766 +f 20638 20767 20766 +f 20638 20639 20768 +f 20638 20768 20767 +f 20639 20640 20768 +f 20640 20769 20768 +f 20640 20641 20770 +f 20640 20770 20769 +f 20641 20642 20770 +f 20642 20771 20770 +f 20642 20643 20772 +f 20642 20772 20771 +f 20643 20644 20772 +f 20644 20773 20772 +f 20644 20645 20774 +f 20644 20774 20773 +f 20645 20646 20774 +f 20646 20775 20774 +f 20646 20647 20776 +f 20646 20776 20775 +f 20647 20648 20776 +f 20648 20777 20776 +f 20648 20649 20778 +f 20648 20778 20777 +f 20649 20650 20778 +f 20650 20779 20778 +f 20650 20651 20780 +f 20650 20780 20779 +f 20651 20652 20780 +f 20652 20781 20780 +f 20652 20653 20782 +f 20652 20782 20781 +f 20653 20654 20782 +f 20654 20783 20782 +f 20654 20655 20784 +f 20654 20784 20783 +f 20655 20656 20784 +f 20656 20785 20784 +f 20656 20657 20786 +f 20656 20786 20785 +f 20657 20658 20786 +f 20658 20787 20786 +f 20658 20659 20788 +f 20658 20788 20787 +f 20659 20660 20788 +f 20660 20789 20788 +f 20660 20661 20790 +f 20660 20790 20789 +f 20661 20662 20790 +f 20662 20791 20790 +f 20662 20663 20792 +f 20662 20792 20791 +f 20663 20664 20792 +f 20664 20793 20792 +f 20664 20665 20794 +f 20664 20794 20793 +f 20665 20666 20794 +f 20666 20795 20794 +f 20666 20667 20796 +f 20666 20796 20795 +f 20667 20668 20796 +f 20668 20797 20796 +f 20668 20669 20798 +f 20668 20798 20797 +f 20669 20670 20798 +f 20670 20799 20798 +f 20670 20671 20800 +f 20670 20800 20799 +f 20671 20672 20800 +f 20672 20801 20800 +f 20672 20673 20802 +f 20672 20802 20801 +f 20673 20674 20802 +f 20674 20803 20802 +f 20674 20675 20804 +f 20674 20804 20803 +f 20675 20676 20804 +f 20676 20805 20804 +f 20676 20677 20806 +f 20676 20806 20805 +f 20677 20678 20806 +f 20678 20807 20806 +f 20678 20679 20808 +f 20678 20808 20807 +f 20679 20680 20808 +f 20680 20809 20808 +f 20680 20681 20810 +f 20680 20810 20809 +f 20681 20682 20810 +f 20682 20811 20810 +f 20682 20683 20812 +f 20682 20812 20811 +f 20683 20684 20812 +f 20684 20813 20812 +f 20684 20685 20814 +f 20684 20814 20813 +f 20685 20686 20814 +f 20686 20815 20814 +f 20686 20687 20816 +f 20686 20816 20815 +f 20687 20688 20816 +f 20688 20817 20816 +f 20688 20689 20818 +f 20688 20818 20817 +f 20689 20690 20818 +f 20690 20819 20818 +f 20690 20691 20820 +f 20690 20820 20819 +f 20691 20692 20820 +f 20692 20821 20820 +f 20692 20693 20822 +f 20692 20822 20821 +f 20693 20694 20822 +f 20694 20823 20822 +f 20694 20695 20824 +f 20694 20824 20823 +f 20695 20696 20824 +f 20696 20825 20824 +f 20696 20697 20826 +f 20696 20826 20825 +f 20697 20698 20826 +f 20698 20827 20826 +f 20698 20699 20828 +f 20698 20828 20827 +f 20699 20700 20828 +f 20700 20829 20828 +f 20700 20701 20830 +f 20700 20830 20829 +f 20701 20702 20830 +f 20702 20831 20830 +f 20702 20703 20832 +f 20702 20832 20831 +f 20703 20704 20832 +f 20704 20833 20832 +f 20704 20705 20834 +f 20704 20834 20833 +f 20705 20706 20834 +f 20706 20835 20834 +f 20706 20707 20836 +f 20706 20836 20835 +f 20707 20708 20836 +f 20708 20837 20836 +f 20708 20709 20838 +f 20708 20838 20837 +f 20709 20710 20838 +f 20710 20839 20838 +f 20710 20711 20840 +f 20710 20840 20839 +f 20711 20712 20840 +f 20712 20841 20840 +f 20712 20713 20842 +f 20712 20842 20841 +f 20713 20714 20842 +f 20714 20843 20842 +f 20714 20715 20844 +f 20714 20844 20843 +f 20715 20716 20844 +f 20716 20845 20844 +f 20716 20717 20846 +f 20716 20846 20845 +f 20717 20718 20846 +f 20718 20847 20846 +f 20718 20719 20848 +f 20718 20848 20847 +f 20719 20720 20848 +f 20720 20849 20848 +f 20720 20721 20850 +f 20720 20850 20849 +f 20722 20723 20852 +f 20722 20852 20851 +f 20723 20724 20852 +f 20724 20853 20852 +f 20724 20725 20854 +f 20724 20854 20853 +f 20725 20726 20854 +f 20726 20855 20854 +f 20726 20727 20856 +f 20726 20856 20855 +f 20727 20728 20856 +f 20728 20857 20856 +f 20728 20729 20858 +f 20728 20858 20857 +f 20729 20730 20858 +f 20730 20859 20858 +f 20730 20731 20860 +f 20730 20860 20859 +f 20731 20732 20860 +f 20732 20861 20860 +f 20732 20733 20862 +f 20732 20862 20861 +f 20733 20734 20862 +f 20734 20863 20862 +f 20734 20735 20864 +f 20734 20864 20863 +f 20735 20736 20864 +f 20736 20865 20864 +f 20736 20737 20866 +f 20736 20866 20865 +f 20737 20738 20866 +f 20738 20867 20866 +f 20738 20739 20868 +f 20738 20868 20867 +f 20739 20740 20868 +f 20740 20869 20868 +f 20740 20741 20870 +f 20740 20870 20869 +f 20741 20742 20870 +f 20742 20871 20870 +f 20742 20743 20872 +f 20742 20872 20871 +f 20743 20744 20872 +f 20744 20873 20872 +f 20744 20745 20874 +f 20744 20874 20873 +f 20745 20746 20874 +f 20746 20875 20874 +f 20746 20747 20876 +f 20746 20876 20875 +f 20747 20748 20876 +f 20748 20877 20876 +f 20748 20749 20878 +f 20748 20878 20877 +f 20749 20750 20878 +f 20750 20879 20878 +f 20750 20751 20880 +f 20750 20880 20879 +f 20751 20752 20880 +f 20752 20881 20880 +f 20752 20753 20882 +f 20752 20882 20881 +f 20753 20754 20882 +f 20754 20883 20882 +f 20754 20755 20884 +f 20754 20884 20883 +f 20755 20756 20884 +f 20756 20885 20884 +f 20756 20757 20886 +f 20756 20886 20885 +f 20757 20758 20886 +f 20758 20887 20886 +f 20758 20759 20888 +f 20758 20888 20887 +f 20759 20760 20888 +f 20760 20889 20888 +f 20760 20761 20890 +f 20760 20890 20889 +f 20761 20762 20890 +f 20762 20891 20890 +f 20762 20763 20892 +f 20762 20892 20891 +f 20763 20764 20892 +f 20764 20893 20892 +f 20764 20765 20894 +f 20764 20894 20893 +f 20765 20766 20894 +f 20766 20895 20894 +f 20766 20767 20896 +f 20766 20896 20895 +f 20767 20768 20896 +f 20768 20897 20896 +f 20768 20769 20898 +f 20768 20898 20897 +f 20769 20770 20898 +f 20770 20899 20898 +f 20770 20771 20900 +f 20770 20900 20899 +f 20771 20772 20900 +f 20772 20901 20900 +f 20772 20773 20902 +f 20772 20902 20901 +f 20773 20774 20902 +f 20774 20903 20902 +f 20774 20775 20904 +f 20774 20904 20903 +f 20775 20776 20904 +f 20776 20905 20904 +f 20776 20777 20906 +f 20776 20906 20905 +f 20777 20778 20906 +f 20778 20907 20906 +f 20778 20779 20908 +f 20778 20908 20907 +f 20779 20780 20908 +f 20780 20909 20908 +f 20780 20781 20910 +f 20780 20910 20909 +f 20781 20782 20910 +f 20782 20911 20910 +f 20782 20783 20912 +f 20782 20912 20911 +f 20783 20784 20912 +f 20784 20913 20912 +f 20784 20785 20914 +f 20784 20914 20913 +f 20785 20786 20914 +f 20786 20915 20914 +f 20786 20787 20916 +f 20786 20916 20915 +f 20787 20788 20916 +f 20788 20917 20916 +f 20788 20789 20918 +f 20788 20918 20917 +f 20789 20790 20918 +f 20790 20919 20918 +f 20790 20791 20920 +f 20790 20920 20919 +f 20791 20792 20920 +f 20792 20921 20920 +f 20792 20793 20922 +f 20792 20922 20921 +f 20793 20794 20922 +f 20794 20923 20922 +f 20794 20795 20924 +f 20794 20924 20923 +f 20795 20796 20924 +f 20796 20925 20924 +f 20796 20797 20926 +f 20796 20926 20925 +f 20797 20798 20926 +f 20798 20927 20926 +f 20798 20799 20928 +f 20798 20928 20927 +f 20799 20800 20928 +f 20800 20929 20928 +f 20800 20801 20930 +f 20800 20930 20929 +f 20801 20802 20930 +f 20802 20931 20930 +f 20802 20803 20932 +f 20802 20932 20931 +f 20803 20804 20932 +f 20804 20933 20932 +f 20804 20805 20934 +f 20804 20934 20933 +f 20805 20806 20934 +f 20806 20935 20934 +f 20806 20807 20936 +f 20806 20936 20935 +f 20807 20808 20936 +f 20808 20937 20936 +f 20808 20809 20938 +f 20808 20938 20937 +f 20809 20810 20938 +f 20810 20939 20938 +f 20810 20811 20940 +f 20810 20940 20939 +f 20811 20812 20940 +f 20812 20941 20940 +f 20812 20813 20942 +f 20812 20942 20941 +f 20813 20814 20942 +f 20814 20943 20942 +f 20814 20815 20944 +f 20814 20944 20943 +f 20815 20816 20944 +f 20816 20945 20944 +f 20816 20817 20946 +f 20816 20946 20945 +f 20817 20818 20946 +f 20818 20947 20946 +f 20818 20819 20948 +f 20818 20948 20947 +f 20819 20820 20948 +f 20820 20949 20948 +f 20820 20821 20950 +f 20820 20950 20949 +f 20821 20822 20950 +f 20822 20951 20950 +f 20822 20823 20952 +f 20822 20952 20951 +f 20823 20824 20952 +f 20824 20953 20952 +f 20824 20825 20954 +f 20824 20954 20953 +f 20825 20826 20954 +f 20826 20955 20954 +f 20826 20827 20956 +f 20826 20956 20955 +f 20827 20828 20956 +f 20828 20957 20956 +f 20828 20829 20958 +f 20828 20958 20957 +f 20829 20830 20958 +f 20830 20959 20958 +f 20830 20831 20960 +f 20830 20960 20959 +f 20831 20832 20960 +f 20832 20961 20960 +f 20832 20833 20962 +f 20832 20962 20961 +f 20833 20834 20962 +f 20834 20963 20962 +f 20834 20835 20964 +f 20834 20964 20963 +f 20835 20836 20964 +f 20836 20965 20964 +f 20836 20837 20966 +f 20836 20966 20965 +f 20837 20838 20966 +f 20838 20967 20966 +f 20838 20839 20968 +f 20838 20968 20967 +f 20839 20840 20968 +f 20840 20969 20968 +f 20840 20841 20970 +f 20840 20970 20969 +f 20841 20842 20970 +f 20842 20971 20970 +f 20842 20843 20972 +f 20842 20972 20971 +f 20843 20844 20972 +f 20844 20973 20972 +f 20844 20845 20974 +f 20844 20974 20973 +f 20845 20846 20974 +f 20846 20975 20974 +f 20846 20847 20976 +f 20846 20976 20975 +f 20847 20848 20976 +f 20848 20977 20976 +f 20848 20849 20978 +f 20848 20978 20977 +f 20849 20850 20978 +f 20850 20979 20978 +f 20851 20852 20980 +f 20852 20981 20980 +f 20852 20853 20982 +f 20852 20982 20981 +f 20853 20854 20982 +f 20854 20983 20982 +f 20854 20855 20984 +f 20854 20984 20983 +f 20855 20856 20984 +f 20856 20985 20984 +f 20856 20857 20986 +f 20856 20986 20985 +f 20857 20858 20986 +f 20858 20987 20986 +f 20858 20859 20988 +f 20858 20988 20987 +f 20859 20860 20988 +f 20860 20989 20988 +f 20860 20861 20990 +f 20860 20990 20989 +f 20861 20862 20990 +f 20862 20991 20990 +f 20862 20863 20992 +f 20862 20992 20991 +f 20863 20864 20992 +f 20864 20993 20992 +f 20864 20865 20994 +f 20864 20994 20993 +f 20865 20866 20994 +f 20866 20995 20994 +f 20866 20867 20996 +f 20866 20996 20995 +f 20867 20868 20996 +f 20868 20997 20996 +f 20868 20869 20998 +f 20868 20998 20997 +f 20869 20870 20998 +f 20870 20999 20998 +f 20870 20871 21000 +f 20870 21000 20999 +f 20871 20872 21000 +f 20872 21001 21000 +f 20872 20873 21002 +f 20872 21002 21001 +f 20873 20874 21002 +f 20874 21003 21002 +f 20874 20875 21004 +f 20874 21004 21003 +f 20875 20876 21004 +f 20876 21005 21004 +f 20876 20877 21006 +f 20876 21006 21005 +f 20877 20878 21006 +f 20878 21007 21006 +f 20878 20879 21008 +f 20878 21008 21007 +f 20879 20880 21008 +f 20880 21009 21008 +f 20880 20881 21010 +f 20880 21010 21009 +f 20881 20882 21010 +f 20882 21011 21010 +f 20882 20883 21012 +f 20882 21012 21011 +f 20883 20884 21012 +f 20884 21013 21012 +f 20884 20885 21014 +f 20884 21014 21013 +f 20885 20886 21014 +f 20886 21015 21014 +f 20886 20887 21016 +f 20886 21016 21015 +f 20887 20888 21016 +f 20888 21017 21016 +f 20888 20889 21018 +f 20888 21018 21017 +f 20889 20890 21018 +f 20890 21019 21018 +f 20890 20891 21020 +f 20890 21020 21019 +f 20891 20892 21020 +f 20892 21021 21020 +f 20892 20893 21022 +f 20892 21022 21021 +f 20893 20894 21022 +f 20894 21023 21022 +f 20894 20895 21024 +f 20894 21024 21023 +f 20895 20896 21024 +f 20896 21025 21024 +f 20896 20897 21026 +f 20896 21026 21025 +f 20897 20898 21026 +f 20898 21027 21026 +f 20898 20899 21028 +f 20898 21028 21027 +f 20899 20900 21028 +f 20900 21029 21028 +f 20900 20901 21030 +f 20900 21030 21029 +f 20901 20902 21030 +f 20902 21031 21030 +f 20902 20903 21032 +f 20902 21032 21031 +f 20903 20904 21032 +f 20904 21033 21032 +f 20904 20905 21034 +f 20904 21034 21033 +f 20905 20906 21034 +f 20906 21035 21034 +f 20906 20907 21036 +f 20906 21036 21035 +f 20907 20908 21036 +f 20908 21037 21036 +f 20908 20909 21038 +f 20908 21038 21037 +f 20909 20910 21038 +f 20910 21039 21038 +f 20910 20911 21040 +f 20910 21040 21039 +f 20911 20912 21040 +f 20912 21041 21040 +f 20912 20913 21042 +f 20912 21042 21041 +f 20913 20914 21042 +f 20914 21043 21042 +f 20914 20915 21044 +f 20914 21044 21043 +f 20915 20916 21044 +f 20916 21045 21044 +f 20916 20917 21046 +f 20916 21046 21045 +f 20917 20918 21046 +f 20918 21047 21046 +f 20918 20919 21048 +f 20918 21048 21047 +f 20919 20920 21048 +f 20920 21049 21048 +f 20920 20921 21050 +f 20920 21050 21049 +f 20921 20922 21050 +f 20922 21051 21050 +f 20922 20923 21052 +f 20922 21052 21051 +f 20923 20924 21052 +f 20924 21053 21052 +f 20924 20925 21054 +f 20924 21054 21053 +f 20925 20926 21054 +f 20926 21055 21054 +f 20926 20927 21056 +f 20926 21056 21055 +f 20927 20928 21056 +f 20928 21057 21056 +f 20928 20929 21058 +f 20928 21058 21057 +f 20929 20930 21058 +f 20930 21059 21058 +f 20930 20931 21060 +f 20930 21060 21059 +f 20931 20932 21060 +f 20932 21061 21060 +f 20932 20933 21062 +f 20932 21062 21061 +f 20933 20934 21062 +f 20934 21063 21062 +f 20934 20935 21064 +f 20934 21064 21063 +f 20935 20936 21064 +f 20936 21065 21064 +f 20936 20937 21066 +f 20936 21066 21065 +f 20937 20938 21066 +f 20938 21067 21066 +f 20938 20939 21068 +f 20938 21068 21067 +f 20939 20940 21068 +f 20940 21069 21068 +f 20940 20941 21070 +f 20940 21070 21069 +f 20941 20942 21070 +f 20942 21071 21070 +f 20942 20943 21072 +f 20942 21072 21071 +f 20943 20944 21072 +f 20944 21073 21072 +f 20944 20945 21074 +f 20944 21074 21073 +f 20945 20946 21074 +f 20946 21075 21074 +f 20946 20947 21076 +f 20946 21076 21075 +f 20947 20948 21076 +f 20948 21077 21076 +f 20948 20949 21078 +f 20948 21078 21077 +f 20949 20950 21078 +f 20950 21079 21078 +f 20950 20951 21080 +f 20950 21080 21079 +f 20951 20952 21080 +f 20952 21081 21080 +f 20952 20953 21082 +f 20952 21082 21081 +f 20953 20954 21082 +f 20954 21083 21082 +f 20954 20955 21084 +f 20954 21084 21083 +f 20955 20956 21084 +f 20956 21085 21084 +f 20956 20957 21086 +f 20956 21086 21085 +f 20957 20958 21086 +f 20958 21087 21086 +f 20958 20959 21088 +f 20958 21088 21087 +f 20959 20960 21088 +f 20960 21089 21088 +f 20960 20961 21090 +f 20960 21090 21089 +f 20961 20962 21090 +f 20962 21091 21090 +f 20962 20963 21092 +f 20962 21092 21091 +f 20963 20964 21092 +f 20964 21093 21092 +f 20964 20965 21094 +f 20964 21094 21093 +f 20965 20966 21094 +f 20966 21095 21094 +f 20966 20967 21096 +f 20966 21096 21095 +f 20967 20968 21096 +f 20968 21097 21096 +f 20968 20969 21098 +f 20968 21098 21097 +f 20969 20970 21098 +f 20970 21099 21098 +f 20970 20971 21100 +f 20970 21100 21099 +f 20971 20972 21100 +f 20972 21101 21100 +f 20972 20973 21102 +f 20972 21102 21101 +f 20973 20974 21102 +f 20974 21103 21102 +f 20974 20975 21104 +f 20974 21104 21103 +f 20975 20976 21104 +f 20976 21105 21104 +f 20976 20977 21106 +f 20976 21106 21105 +f 20977 20978 21106 +f 20978 21107 21106 +f 20978 20979 21108 +f 20978 21108 21107 +f 20980 20981 21110 +f 20980 21110 21109 +f 20981 20982 21110 +f 20982 21111 21110 +f 20982 20983 21112 +f 20982 21112 21111 +f 20983 20984 21112 +f 20984 21113 21112 +f 20984 20985 21114 +f 20984 21114 21113 +f 20985 20986 21114 +f 20986 21115 21114 +f 20986 20987 21116 +f 20986 21116 21115 +f 20987 20988 21116 +f 20988 21117 21116 +f 20988 20989 21118 +f 20988 21118 21117 +f 20989 20990 21118 +f 20990 21119 21118 +f 20990 20991 21120 +f 20990 21120 21119 +f 20991 20992 21120 +f 20992 21121 21120 +f 20992 20993 21122 +f 20992 21122 21121 +f 20993 20994 21122 +f 20994 21123 21122 +f 20994 20995 21124 +f 20994 21124 21123 +f 20995 20996 21124 +f 20996 21125 21124 +f 20996 20997 21126 +f 20996 21126 21125 +f 20997 20998 21126 +f 20998 21127 21126 +f 20998 20999 21128 +f 20998 21128 21127 +f 20999 21000 21128 +f 21000 21129 21128 +f 21000 21001 21130 +f 21000 21130 21129 +f 21001 21002 21130 +f 21002 21131 21130 +f 21002 21003 21132 +f 21002 21132 21131 +f 21003 21004 21132 +f 21004 21133 21132 +f 21004 21005 21134 +f 21004 21134 21133 +f 21005 21006 21134 +f 21006 21135 21134 +f 21006 21007 21136 +f 21006 21136 21135 +f 21007 21008 21136 +f 21008 21137 21136 +f 21008 21009 21138 +f 21008 21138 21137 +f 21009 21010 21138 +f 21010 21139 21138 +f 21010 21011 21140 +f 21010 21140 21139 +f 21011 21012 21140 +f 21012 21141 21140 +f 21012 21013 21142 +f 21012 21142 21141 +f 21013 21014 21142 +f 21014 21143 21142 +f 21014 21015 21144 +f 21014 21144 21143 +f 21015 21016 21144 +f 21016 21145 21144 +f 21016 21017 21146 +f 21016 21146 21145 +f 21017 21018 21146 +f 21018 21147 21146 +f 21018 21019 21148 +f 21018 21148 21147 +f 21019 21020 21148 +f 21020 21149 21148 +f 21020 21021 21150 +f 21020 21150 21149 +f 21021 21022 21150 +f 21022 21151 21150 +f 21022 21023 21152 +f 21022 21152 21151 +f 21023 21024 21152 +f 21024 21153 21152 +f 21024 21025 21154 +f 21024 21154 21153 +f 21025 21026 21154 +f 21026 21155 21154 +f 21026 21027 21156 +f 21026 21156 21155 +f 21027 21028 21156 +f 21028 21157 21156 +f 21028 21029 21158 +f 21028 21158 21157 +f 21029 21030 21158 +f 21030 21159 21158 +f 21030 21031 21160 +f 21030 21160 21159 +f 21031 21032 21160 +f 21032 21161 21160 +f 21032 21033 21162 +f 21032 21162 21161 +f 21033 21034 21162 +f 21034 21163 21162 +f 21034 21035 21164 +f 21034 21164 21163 +f 21035 21036 21164 +f 21036 21165 21164 +f 21036 21037 21166 +f 21036 21166 21165 +f 21037 21038 21166 +f 21038 21167 21166 +f 21038 21039 21168 +f 21038 21168 21167 +f 21039 21040 21168 +f 21040 21169 21168 +f 21040 21041 21170 +f 21040 21170 21169 +f 21041 21042 21170 +f 21042 21171 21170 +f 21042 21043 21172 +f 21042 21172 21171 +f 21043 21044 21172 +f 21044 21173 21172 +f 21044 21045 21174 +f 21044 21174 21173 +f 21045 21046 21174 +f 21046 21175 21174 +f 21046 21047 21176 +f 21046 21176 21175 +f 21047 21048 21176 +f 21048 21177 21176 +f 21048 21049 21178 +f 21048 21178 21177 +f 21049 21050 21178 +f 21050 21179 21178 +f 21050 21051 21180 +f 21050 21180 21179 +f 21051 21052 21180 +f 21052 21181 21180 +f 21052 21053 21182 +f 21052 21182 21181 +f 21053 21054 21182 +f 21054 21183 21182 +f 21054 21055 21184 +f 21054 21184 21183 +f 21055 21056 21184 +f 21056 21185 21184 +f 21056 21057 21186 +f 21056 21186 21185 +f 21057 21058 21186 +f 21058 21187 21186 +f 21058 21059 21188 +f 21058 21188 21187 +f 21059 21060 21188 +f 21060 21189 21188 +f 21060 21061 21190 +f 21060 21190 21189 +f 21061 21062 21190 +f 21062 21191 21190 +f 21062 21063 21192 +f 21062 21192 21191 +f 21063 21064 21192 +f 21064 21193 21192 +f 21064 21065 21194 +f 21064 21194 21193 +f 21065 21066 21194 +f 21066 21195 21194 +f 21066 21067 21196 +f 21066 21196 21195 +f 21067 21068 21196 +f 21068 21197 21196 +f 21068 21069 21198 +f 21068 21198 21197 +f 21069 21070 21198 +f 21070 21199 21198 +f 21070 21071 21200 +f 21070 21200 21199 +f 21071 21072 21200 +f 21072 21201 21200 +f 21072 21073 21202 +f 21072 21202 21201 +f 21073 21074 21202 +f 21074 21203 21202 +f 21074 21075 21204 +f 21074 21204 21203 +f 21075 21076 21204 +f 21076 21205 21204 +f 21076 21077 21206 +f 21076 21206 21205 +f 21077 21078 21206 +f 21078 21207 21206 +f 21078 21079 21208 +f 21078 21208 21207 +f 21079 21080 21208 +f 21080 21209 21208 +f 21080 21081 21210 +f 21080 21210 21209 +f 21081 21082 21210 +f 21082 21211 21210 +f 21082 21083 21212 +f 21082 21212 21211 +f 21083 21084 21212 +f 21084 21213 21212 +f 21084 21085 21214 +f 21084 21214 21213 +f 21085 21086 21214 +f 21086 21215 21214 +f 21086 21087 21216 +f 21086 21216 21215 +f 21087 21088 21216 +f 21088 21217 21216 +f 21088 21089 21218 +f 21088 21218 21217 +f 21089 21090 21218 +f 21090 21219 21218 +f 21090 21091 21220 +f 21090 21220 21219 +f 21091 21092 21220 +f 21092 21221 21220 +f 21092 21093 21222 +f 21092 21222 21221 +f 21093 21094 21222 +f 21094 21223 21222 +f 21094 21095 21224 +f 21094 21224 21223 +f 21095 21096 21224 +f 21096 21225 21224 +f 21096 21097 21226 +f 21096 21226 21225 +f 21097 21098 21226 +f 21098 21227 21226 +f 21098 21099 21228 +f 21098 21228 21227 +f 21099 21100 21228 +f 21100 21229 21228 +f 21100 21101 21230 +f 21100 21230 21229 +f 21101 21102 21230 +f 21102 21231 21230 +f 21102 21103 21232 +f 21102 21232 21231 +f 21103 21104 21232 +f 21104 21233 21232 +f 21104 21105 21234 +f 21104 21234 21233 +f 21105 21106 21234 +f 21106 21235 21234 +f 21106 21107 21236 +f 21106 21236 21235 +f 21107 21108 21236 +f 21108 21237 21236 +f 21109 21110 21238 +f 21110 21239 21238 +f 21110 21111 21240 +f 21110 21240 21239 +f 21111 21112 21240 +f 21112 21241 21240 +f 21112 21113 21242 +f 21112 21242 21241 +f 21113 21114 21242 +f 21114 21243 21242 +f 21114 21115 21244 +f 21114 21244 21243 +f 21115 21116 21244 +f 21116 21245 21244 +f 21116 21117 21246 +f 21116 21246 21245 +f 21117 21118 21246 +f 21118 21247 21246 +f 21118 21119 21248 +f 21118 21248 21247 +f 21119 21120 21248 +f 21120 21249 21248 +f 21120 21121 21250 +f 21120 21250 21249 +f 21121 21122 21250 +f 21122 21251 21250 +f 21122 21123 21252 +f 21122 21252 21251 +f 21123 21124 21252 +f 21124 21253 21252 +f 21124 21125 21254 +f 21124 21254 21253 +f 21125 21126 21254 +f 21126 21255 21254 +f 21126 21127 21256 +f 21126 21256 21255 +f 21127 21128 21256 +f 21128 21257 21256 +f 21128 21129 21258 +f 21128 21258 21257 +f 21129 21130 21258 +f 21130 21259 21258 +f 21130 21131 21260 +f 21130 21260 21259 +f 21131 21132 21260 +f 21132 21261 21260 +f 21132 21133 21262 +f 21132 21262 21261 +f 21133 21134 21262 +f 21134 21263 21262 +f 21134 21135 21264 +f 21134 21264 21263 +f 21135 21136 21264 +f 21136 21265 21264 +f 21136 21137 21266 +f 21136 21266 21265 +f 21137 21138 21266 +f 21138 21267 21266 +f 21138 21139 21268 +f 21138 21268 21267 +f 21139 21140 21268 +f 21140 21269 21268 +f 21140 21141 21270 +f 21140 21270 21269 +f 21141 21142 21270 +f 21142 21271 21270 +f 21142 21143 21272 +f 21142 21272 21271 +f 21143 21144 21272 +f 21144 21273 21272 +f 21144 21145 21274 +f 21144 21274 21273 +f 21145 21146 21274 +f 21146 21275 21274 +f 21146 21147 21276 +f 21146 21276 21275 +f 21147 21148 21276 +f 21148 21277 21276 +f 21148 21149 21278 +f 21148 21278 21277 +f 21149 21150 21278 +f 21150 21279 21278 +f 21150 21151 21280 +f 21150 21280 21279 +f 21151 21152 21280 +f 21152 21281 21280 +f 21152 21153 21282 +f 21152 21282 21281 +f 21153 21154 21282 +f 21154 21283 21282 +f 21154 21155 21284 +f 21154 21284 21283 +f 21155 21156 21284 +f 21156 21285 21284 +f 21156 21157 21286 +f 21156 21286 21285 +f 21157 21158 21286 +f 21158 21287 21286 +f 21158 21159 21288 +f 21158 21288 21287 +f 21159 21160 21288 +f 21160 21289 21288 +f 21160 21161 21290 +f 21160 21290 21289 +f 21161 21162 21290 +f 21162 21291 21290 +f 21162 21163 21292 +f 21162 21292 21291 +f 21163 21164 21292 +f 21164 21293 21292 +f 21164 21165 21294 +f 21164 21294 21293 +f 21165 21166 21294 +f 21166 21295 21294 +f 21166 21167 21296 +f 21166 21296 21295 +f 21167 21168 21296 +f 21168 21297 21296 +f 21168 21169 21298 +f 21168 21298 21297 +f 21169 21170 21298 +f 21170 21299 21298 +f 21170 21171 21300 +f 21170 21300 21299 +f 21171 21172 21300 +f 21172 21301 21300 +f 21172 21173 21302 +f 21172 21302 21301 +f 21173 21174 21302 +f 21174 21303 21302 +f 21174 21175 21304 +f 21174 21304 21303 +f 21175 21176 21304 +f 21176 21305 21304 +f 21176 21177 21306 +f 21176 21306 21305 +f 21177 21178 21306 +f 21178 21307 21306 +f 21178 21179 21308 +f 21178 21308 21307 +f 21179 21180 21308 +f 21180 21309 21308 +f 21180 21181 21310 +f 21180 21310 21309 +f 21181 21182 21310 +f 21182 21311 21310 +f 21182 21183 21312 +f 21182 21312 21311 +f 21183 21184 21312 +f 21184 21313 21312 +f 21184 21185 21314 +f 21184 21314 21313 +f 21185 21186 21314 +f 21186 21315 21314 +f 21186 21187 21316 +f 21186 21316 21315 +f 21187 21188 21316 +f 21188 21317 21316 +f 21188 21189 21318 +f 21188 21318 21317 +f 21189 21190 21318 +f 21190 21319 21318 +f 21190 21191 21320 +f 21190 21320 21319 +f 21191 21192 21320 +f 21192 21321 21320 +f 21192 21193 21322 +f 21192 21322 21321 +f 21193 21194 21322 +f 21194 21323 21322 +f 21194 21195 21324 +f 21194 21324 21323 +f 21195 21196 21324 +f 21196 21325 21324 +f 21196 21197 21326 +f 21196 21326 21325 +f 21197 21198 21326 +f 21198 21327 21326 +f 21198 21199 21328 +f 21198 21328 21327 +f 21199 21200 21328 +f 21200 21329 21328 +f 21200 21201 21330 +f 21200 21330 21329 +f 21201 21202 21330 +f 21202 21331 21330 +f 21202 21203 21332 +f 21202 21332 21331 +f 21203 21204 21332 +f 21204 21333 21332 +f 21204 21205 21334 +f 21204 21334 21333 +f 21205 21206 21334 +f 21206 21335 21334 +f 21206 21207 21336 +f 21206 21336 21335 +f 21207 21208 21336 +f 21208 21337 21336 +f 21208 21209 21338 +f 21208 21338 21337 +f 21209 21210 21338 +f 21210 21339 21338 +f 21210 21211 21340 +f 21210 21340 21339 +f 21211 21212 21340 +f 21212 21341 21340 +f 21212 21213 21342 +f 21212 21342 21341 +f 21213 21214 21342 +f 21214 21343 21342 +f 21214 21215 21344 +f 21214 21344 21343 +f 21215 21216 21344 +f 21216 21345 21344 +f 21216 21217 21346 +f 21216 21346 21345 +f 21217 21218 21346 +f 21218 21347 21346 +f 21218 21219 21348 +f 21218 21348 21347 +f 21219 21220 21348 +f 21220 21349 21348 +f 21220 21221 21350 +f 21220 21350 21349 +f 21221 21222 21350 +f 21222 21351 21350 +f 21222 21223 21352 +f 21222 21352 21351 +f 21223 21224 21352 +f 21224 21353 21352 +f 21224 21225 21354 +f 21224 21354 21353 +f 21225 21226 21354 +f 21226 21355 21354 +f 21226 21227 21356 +f 21226 21356 21355 +f 21227 21228 21356 +f 21228 21357 21356 +f 21228 21229 21358 +f 21228 21358 21357 +f 21229 21230 21358 +f 21230 21359 21358 +f 21230 21231 21360 +f 21230 21360 21359 +f 21231 21232 21360 +f 21232 21361 21360 +f 21232 21233 21362 +f 21232 21362 21361 +f 21233 21234 21362 +f 21234 21363 21362 +f 21234 21235 21364 +f 21234 21364 21363 +f 21235 21236 21364 +f 21236 21365 21364 +f 21236 21237 21366 +f 21236 21366 21365 +f 21238 21239 21368 +f 21238 21368 21367 +f 21239 21240 21368 +f 21240 21369 21368 +f 21240 21241 21370 +f 21240 21370 21369 +f 21241 21242 21370 +f 21242 21371 21370 +f 21242 21243 21372 +f 21242 21372 21371 +f 21243 21244 21372 +f 21244 21373 21372 +f 21244 21245 21374 +f 21244 21374 21373 +f 21245 21246 21374 +f 21246 21375 21374 +f 21246 21247 21376 +f 21246 21376 21375 +f 21247 21248 21376 +f 21248 21377 21376 +f 21248 21249 21378 +f 21248 21378 21377 +f 21249 21250 21378 +f 21250 21379 21378 +f 21250 21251 21380 +f 21250 21380 21379 +f 21251 21252 21380 +f 21252 21381 21380 +f 21252 21253 21382 +f 21252 21382 21381 +f 21253 21254 21382 +f 21254 21383 21382 +f 21254 21255 21384 +f 21254 21384 21383 +f 21255 21256 21384 +f 21256 21385 21384 +f 21256 21257 21386 +f 21256 21386 21385 +f 21257 21258 21386 +f 21258 21387 21386 +f 21258 21259 21388 +f 21258 21388 21387 +f 21259 21260 21388 +f 21260 21389 21388 +f 21260 21261 21390 +f 21260 21390 21389 +f 21261 21262 21390 +f 21262 21391 21390 +f 21262 21263 21392 +f 21262 21392 21391 +f 21263 21264 21392 +f 21264 21393 21392 +f 21264 21265 21394 +f 21264 21394 21393 +f 21265 21266 21394 +f 21266 21395 21394 +f 21266 21267 21396 +f 21266 21396 21395 +f 21267 21268 21396 +f 21268 21397 21396 +f 21268 21269 21398 +f 21268 21398 21397 +f 21269 21270 21398 +f 21270 21399 21398 +f 21270 21271 21400 +f 21270 21400 21399 +f 21271 21272 21400 +f 21272 21401 21400 +f 21272 21273 21402 +f 21272 21402 21401 +f 21273 21274 21402 +f 21274 21403 21402 +f 21274 21275 21404 +f 21274 21404 21403 +f 21275 21276 21404 +f 21276 21405 21404 +f 21276 21277 21406 +f 21276 21406 21405 +f 21277 21278 21406 +f 21278 21407 21406 +f 21278 21279 21408 +f 21278 21408 21407 +f 21279 21280 21408 +f 21280 21409 21408 +f 21280 21281 21410 +f 21280 21410 21409 +f 21281 21282 21410 +f 21282 21411 21410 +f 21282 21283 21412 +f 21282 21412 21411 +f 21283 21284 21412 +f 21284 21413 21412 +f 21284 21285 21414 +f 21284 21414 21413 +f 21285 21286 21414 +f 21286 21415 21414 +f 21286 21287 21416 +f 21286 21416 21415 +f 21287 21288 21416 +f 21288 21417 21416 +f 21288 21289 21418 +f 21288 21418 21417 +f 21289 21290 21418 +f 21290 21419 21418 +f 21290 21291 21420 +f 21290 21420 21419 +f 21291 21292 21420 +f 21292 21421 21420 +f 21292 21293 21422 +f 21292 21422 21421 +f 21293 21294 21422 +f 21294 21423 21422 +f 21294 21295 21424 +f 21294 21424 21423 +f 21295 21296 21424 +f 21296 21425 21424 +f 21296 21297 21426 +f 21296 21426 21425 +f 21297 21298 21426 +f 21298 21427 21426 +f 21298 21299 21428 +f 21298 21428 21427 +f 21299 21300 21428 +f 21300 21429 21428 +f 21300 21301 21430 +f 21300 21430 21429 +f 21301 21302 21430 +f 21302 21431 21430 +f 21302 21303 21432 +f 21302 21432 21431 +f 21303 21304 21432 +f 21304 21433 21432 +f 21304 21305 21434 +f 21304 21434 21433 +f 21305 21306 21434 +f 21306 21435 21434 +f 21306 21307 21436 +f 21306 21436 21435 +f 21307 21308 21436 +f 21308 21437 21436 +f 21308 21309 21438 +f 21308 21438 21437 +f 21309 21310 21438 +f 21310 21439 21438 +f 21310 21311 21440 +f 21310 21440 21439 +f 21311 21312 21440 +f 21312 21441 21440 +f 21312 21313 21442 +f 21312 21442 21441 +f 21313 21314 21442 +f 21314 21443 21442 +f 21314 21315 21444 +f 21314 21444 21443 +f 21315 21316 21444 +f 21316 21445 21444 +f 21316 21317 21446 +f 21316 21446 21445 +f 21317 21318 21446 +f 21318 21447 21446 +f 21318 21319 21448 +f 21318 21448 21447 +f 21319 21320 21448 +f 21320 21449 21448 +f 21320 21321 21450 +f 21320 21450 21449 +f 21321 21322 21450 +f 21322 21451 21450 +f 21322 21323 21452 +f 21322 21452 21451 +f 21323 21324 21452 +f 21324 21453 21452 +f 21324 21325 21454 +f 21324 21454 21453 +f 21325 21326 21454 +f 21326 21455 21454 +f 21326 21327 21456 +f 21326 21456 21455 +f 21327 21328 21456 +f 21328 21457 21456 +f 21328 21329 21458 +f 21328 21458 21457 +f 21329 21330 21458 +f 21330 21459 21458 +f 21330 21331 21460 +f 21330 21460 21459 +f 21331 21332 21460 +f 21332 21461 21460 +f 21332 21333 21462 +f 21332 21462 21461 +f 21333 21334 21462 +f 21334 21463 21462 +f 21334 21335 21464 +f 21334 21464 21463 +f 21335 21336 21464 +f 21336 21465 21464 +f 21336 21337 21466 +f 21336 21466 21465 +f 21337 21338 21466 +f 21338 21467 21466 +f 21338 21339 21468 +f 21338 21468 21467 +f 21339 21340 21468 +f 21340 21469 21468 +f 21340 21341 21470 +f 21340 21470 21469 +f 21341 21342 21470 +f 21342 21471 21470 +f 21342 21343 21472 +f 21342 21472 21471 +f 21343 21344 21472 +f 21344 21473 21472 +f 21344 21345 21474 +f 21344 21474 21473 +f 21345 21346 21474 +f 21346 21475 21474 +f 21346 21347 21476 +f 21346 21476 21475 +f 21347 21348 21476 +f 21348 21477 21476 +f 21348 21349 21478 +f 21348 21478 21477 +f 21349 21350 21478 +f 21350 21479 21478 +f 21350 21351 21480 +f 21350 21480 21479 +f 21351 21352 21480 +f 21352 21481 21480 +f 21352 21353 21482 +f 21352 21482 21481 +f 21353 21354 21482 +f 21354 21483 21482 +f 21354 21355 21484 +f 21354 21484 21483 +f 21355 21356 21484 +f 21356 21485 21484 +f 21356 21357 21486 +f 21356 21486 21485 +f 21357 21358 21486 +f 21358 21487 21486 +f 21358 21359 21488 +f 21358 21488 21487 +f 21359 21360 21488 +f 21360 21489 21488 +f 21360 21361 21490 +f 21360 21490 21489 +f 21361 21362 21490 +f 21362 21491 21490 +f 21362 21363 21492 +f 21362 21492 21491 +f 21363 21364 21492 +f 21364 21493 21492 +f 21364 21365 21494 +f 21364 21494 21493 +f 21365 21366 21494 +f 21366 21495 21494 +f 21367 21368 21496 +f 21368 21497 21496 +f 21368 21369 21498 +f 21368 21498 21497 +f 21369 21370 21498 +f 21370 21499 21498 +f 21370 21371 21500 +f 21370 21500 21499 +f 21371 21372 21500 +f 21372 21501 21500 +f 21372 21373 21502 +f 21372 21502 21501 +f 21373 21374 21502 +f 21374 21503 21502 +f 21374 21375 21504 +f 21374 21504 21503 +f 21375 21376 21504 +f 21376 21505 21504 +f 21376 21377 21506 +f 21376 21506 21505 +f 21377 21378 21506 +f 21378 21507 21506 +f 21378 21379 21508 +f 21378 21508 21507 +f 21379 21380 21508 +f 21380 21509 21508 +f 21380 21381 21510 +f 21380 21510 21509 +f 21381 21382 21510 +f 21382 21511 21510 +f 21382 21383 21512 +f 21382 21512 21511 +f 21383 21384 21512 +f 21384 21513 21512 +f 21384 21385 21514 +f 21384 21514 21513 +f 21385 21386 21514 +f 21386 21515 21514 +f 21386 21387 21516 +f 21386 21516 21515 +f 21387 21388 21516 +f 21388 21517 21516 +f 21388 21389 21518 +f 21388 21518 21517 +f 21389 21390 21518 +f 21390 21519 21518 +f 21390 21391 21520 +f 21390 21520 21519 +f 21391 21392 21520 +f 21392 21521 21520 +f 21392 21393 21522 +f 21392 21522 21521 +f 21393 21394 21522 +f 21394 21523 21522 +f 21394 21395 21524 +f 21394 21524 21523 +f 21395 21396 21524 +f 21396 21525 21524 +f 21396 21397 21526 +f 21396 21526 21525 +f 21397 21398 21526 +f 21398 21527 21526 +f 21398 21399 21528 +f 21398 21528 21527 +f 21399 21400 21528 +f 21400 21529 21528 +f 21400 21401 21530 +f 21400 21530 21529 +f 21401 21402 21530 +f 21402 21531 21530 +f 21402 21403 21532 +f 21402 21532 21531 +f 21403 21404 21532 +f 21404 21533 21532 +f 21404 21405 21534 +f 21404 21534 21533 +f 21405 21406 21534 +f 21406 21535 21534 +f 21406 21407 21536 +f 21406 21536 21535 +f 21407 21408 21536 +f 21408 21537 21536 +f 21408 21409 21538 +f 21408 21538 21537 +f 21409 21410 21538 +f 21410 21539 21538 +f 21410 21411 21540 +f 21410 21540 21539 +f 21411 21412 21540 +f 21412 21541 21540 +f 21412 21413 21542 +f 21412 21542 21541 +f 21413 21414 21542 +f 21414 21543 21542 +f 21414 21415 21544 +f 21414 21544 21543 +f 21415 21416 21544 +f 21416 21545 21544 +f 21416 21417 21546 +f 21416 21546 21545 +f 21417 21418 21546 +f 21418 21547 21546 +f 21418 21419 21548 +f 21418 21548 21547 +f 21419 21420 21548 +f 21420 21549 21548 +f 21420 21421 21550 +f 21420 21550 21549 +f 21421 21422 21550 +f 21422 21551 21550 +f 21422 21423 21552 +f 21422 21552 21551 +f 21423 21424 21552 +f 21424 21553 21552 +f 21424 21425 21554 +f 21424 21554 21553 +f 21425 21426 21554 +f 21426 21555 21554 +f 21426 21427 21556 +f 21426 21556 21555 +f 21427 21428 21556 +f 21428 21557 21556 +f 21428 21429 21558 +f 21428 21558 21557 +f 21429 21430 21558 +f 21430 21559 21558 +f 21430 21431 21560 +f 21430 21560 21559 +f 21431 21432 21560 +f 21432 21561 21560 +f 21432 21433 21562 +f 21432 21562 21561 +f 21433 21434 21562 +f 21434 21563 21562 +f 21434 21435 21564 +f 21434 21564 21563 +f 21435 21436 21564 +f 21436 21565 21564 +f 21436 21437 21566 +f 21436 21566 21565 +f 21437 21438 21566 +f 21438 21567 21566 +f 21438 21439 21568 +f 21438 21568 21567 +f 21439 21440 21568 +f 21440 21569 21568 +f 21440 21441 21570 +f 21440 21570 21569 +f 21441 21442 21570 +f 21442 21571 21570 +f 21442 21443 21572 +f 21442 21572 21571 +f 21443 21444 21572 +f 21444 21573 21572 +f 21444 21445 21574 +f 21444 21574 21573 +f 21445 21446 21574 +f 21446 21575 21574 +f 21446 21447 21576 +f 21446 21576 21575 +f 21447 21448 21576 +f 21448 21577 21576 +f 21448 21449 21578 +f 21448 21578 21577 +f 21449 21450 21578 +f 21450 21579 21578 +f 21450 21451 21580 +f 21450 21580 21579 +f 21451 21452 21580 +f 21452 21581 21580 +f 21452 21453 21582 +f 21452 21582 21581 +f 21453 21454 21582 +f 21454 21583 21582 +f 21454 21455 21584 +f 21454 21584 21583 +f 21455 21456 21584 +f 21456 21585 21584 +f 21456 21457 21586 +f 21456 21586 21585 +f 21457 21458 21586 +f 21458 21587 21586 +f 21458 21459 21588 +f 21458 21588 21587 +f 21459 21460 21588 +f 21460 21589 21588 +f 21460 21461 21590 +f 21460 21590 21589 +f 21461 21462 21590 +f 21462 21591 21590 +f 21462 21463 21592 +f 21462 21592 21591 +f 21463 21464 21592 +f 21464 21593 21592 +f 21464 21465 21594 +f 21464 21594 21593 +f 21465 21466 21594 +f 21466 21595 21594 +f 21466 21467 21596 +f 21466 21596 21595 +f 21467 21468 21596 +f 21468 21597 21596 +f 21468 21469 21598 +f 21468 21598 21597 +f 21469 21470 21598 +f 21470 21599 21598 +f 21470 21471 21600 +f 21470 21600 21599 +f 21471 21472 21600 +f 21472 21601 21600 +f 21472 21473 21602 +f 21472 21602 21601 +f 21473 21474 21602 +f 21474 21603 21602 +f 21474 21475 21604 +f 21474 21604 21603 +f 21475 21476 21604 +f 21476 21605 21604 +f 21476 21477 21606 +f 21476 21606 21605 +f 21477 21478 21606 +f 21478 21607 21606 +f 21478 21479 21608 +f 21478 21608 21607 +f 21479 21480 21608 +f 21480 21609 21608 +f 21480 21481 21610 +f 21480 21610 21609 +f 21481 21482 21610 +f 21482 21611 21610 +f 21482 21483 21612 +f 21482 21612 21611 +f 21483 21484 21612 +f 21484 21613 21612 +f 21484 21485 21614 +f 21484 21614 21613 +f 21485 21486 21614 +f 21486 21615 21614 +f 21486 21487 21616 +f 21486 21616 21615 +f 21487 21488 21616 +f 21488 21617 21616 +f 21488 21489 21618 +f 21488 21618 21617 +f 21489 21490 21618 +f 21490 21619 21618 +f 21490 21491 21620 +f 21490 21620 21619 +f 21491 21492 21620 +f 21492 21621 21620 +f 21492 21493 21622 +f 21492 21622 21621 +f 21493 21494 21622 +f 21494 21623 21622 +f 21494 21495 21624 +f 21494 21624 21623 +f 21496 21497 21626 +f 21496 21626 21625 +f 21497 21498 21626 +f 21498 21627 21626 +f 21498 21499 21628 +f 21498 21628 21627 +f 21499 21500 21628 +f 21500 21629 21628 +f 21500 21501 21630 +f 21500 21630 21629 +f 21501 21502 21630 +f 21502 21631 21630 +f 21502 21503 21632 +f 21502 21632 21631 +f 21503 21504 21632 +f 21504 21633 21632 +f 21504 21505 21634 +f 21504 21634 21633 +f 21505 21506 21634 +f 21506 21635 21634 +f 21506 21507 21636 +f 21506 21636 21635 +f 21507 21508 21636 +f 21508 21637 21636 +f 21508 21509 21638 +f 21508 21638 21637 +f 21509 21510 21638 +f 21510 21639 21638 +f 21510 21511 21640 +f 21510 21640 21639 +f 21511 21512 21640 +f 21512 21641 21640 +f 21512 21513 21642 +f 21512 21642 21641 +f 21513 21514 21642 +f 21514 21643 21642 +f 21514 21515 21644 +f 21514 21644 21643 +f 21515 21516 21644 +f 21516 21645 21644 +f 21516 21517 21646 +f 21516 21646 21645 +f 21517 21518 21646 +f 21518 21647 21646 +f 21518 21519 21648 +f 21518 21648 21647 +f 21519 21520 21648 +f 21520 21649 21648 +f 21520 21521 21650 +f 21520 21650 21649 +f 21521 21522 21650 +f 21522 21651 21650 +f 21522 21523 21652 +f 21522 21652 21651 +f 21523 21524 21652 +f 21524 21653 21652 +f 21524 21525 21654 +f 21524 21654 21653 +f 21525 21526 21654 +f 21526 21655 21654 +f 21526 21527 21656 +f 21526 21656 21655 +f 21527 21528 21656 +f 21528 21657 21656 +f 21528 21529 21658 +f 21528 21658 21657 +f 21529 21530 21658 +f 21530 21659 21658 +f 21530 21531 21660 +f 21530 21660 21659 +f 21531 21532 21660 +f 21532 21661 21660 +f 21532 21533 21662 +f 21532 21662 21661 +f 21533 21534 21662 +f 21534 21663 21662 +f 21534 21535 21664 +f 21534 21664 21663 +f 21535 21536 21664 +f 21536 21665 21664 +f 21536 21537 21666 +f 21536 21666 21665 +f 21537 21538 21666 +f 21538 21667 21666 +f 21538 21539 21668 +f 21538 21668 21667 +f 21539 21540 21668 +f 21540 21669 21668 +f 21540 21541 21670 +f 21540 21670 21669 +f 21541 21542 21670 +f 21542 21671 21670 +f 21542 21543 21672 +f 21542 21672 21671 +f 21543 21544 21672 +f 21544 21673 21672 +f 21544 21545 21674 +f 21544 21674 21673 +f 21545 21546 21674 +f 21546 21675 21674 +f 21546 21547 21676 +f 21546 21676 21675 +f 21547 21548 21676 +f 21548 21677 21676 +f 21548 21549 21678 +f 21548 21678 21677 +f 21549 21550 21678 +f 21550 21679 21678 +f 21550 21551 21680 +f 21550 21680 21679 +f 21551 21552 21680 +f 21552 21681 21680 +f 21552 21553 21682 +f 21552 21682 21681 +f 21553 21554 21682 +f 21554 21683 21682 +f 21554 21555 21684 +f 21554 21684 21683 +f 21555 21556 21684 +f 21556 21685 21684 +f 21556 21557 21686 +f 21556 21686 21685 +f 21557 21558 21686 +f 21558 21687 21686 +f 21558 21559 21688 +f 21558 21688 21687 +f 21559 21560 21688 +f 21560 21689 21688 +f 21560 21561 21690 +f 21560 21690 21689 +f 21561 21562 21690 +f 21562 21691 21690 +f 21562 21563 21692 +f 21562 21692 21691 +f 21563 21564 21692 +f 21564 21693 21692 +f 21564 21565 21694 +f 21564 21694 21693 +f 21565 21566 21694 +f 21566 21695 21694 +f 21566 21567 21696 +f 21566 21696 21695 +f 21567 21568 21696 +f 21568 21697 21696 +f 21568 21569 21698 +f 21568 21698 21697 +f 21569 21570 21698 +f 21570 21699 21698 +f 21570 21571 21700 +f 21570 21700 21699 +f 21571 21572 21700 +f 21572 21701 21700 +f 21572 21573 21702 +f 21572 21702 21701 +f 21573 21574 21702 +f 21574 21703 21702 +f 21574 21575 21704 +f 21574 21704 21703 +f 21575 21576 21704 +f 21576 21705 21704 +f 21576 21577 21706 +f 21576 21706 21705 +f 21577 21578 21706 +f 21578 21707 21706 +f 21578 21579 21708 +f 21578 21708 21707 +f 21579 21580 21708 +f 21580 21709 21708 +f 21580 21581 21710 +f 21580 21710 21709 +f 21581 21582 21710 +f 21582 21711 21710 +f 21582 21583 21712 +f 21582 21712 21711 +f 21583 21584 21712 +f 21584 21713 21712 +f 21584 21585 21714 +f 21584 21714 21713 +f 21585 21586 21714 +f 21586 21715 21714 +f 21586 21587 21716 +f 21586 21716 21715 +f 21587 21588 21716 +f 21588 21717 21716 +f 21588 21589 21718 +f 21588 21718 21717 +f 21589 21590 21718 +f 21590 21719 21718 +f 21590 21591 21720 +f 21590 21720 21719 +f 21591 21592 21720 +f 21592 21721 21720 +f 21592 21593 21722 +f 21592 21722 21721 +f 21593 21594 21722 +f 21594 21723 21722 +f 21594 21595 21724 +f 21594 21724 21723 +f 21595 21596 21724 +f 21596 21725 21724 +f 21596 21597 21726 +f 21596 21726 21725 +f 21597 21598 21726 +f 21598 21727 21726 +f 21598 21599 21728 +f 21598 21728 21727 +f 21599 21600 21728 +f 21600 21729 21728 +f 21600 21601 21730 +f 21600 21730 21729 +f 21601 21602 21730 +f 21602 21731 21730 +f 21602 21603 21732 +f 21602 21732 21731 +f 21603 21604 21732 +f 21604 21733 21732 +f 21604 21605 21734 +f 21604 21734 21733 +f 21605 21606 21734 +f 21606 21735 21734 +f 21606 21607 21736 +f 21606 21736 21735 +f 21607 21608 21736 +f 21608 21737 21736 +f 21608 21609 21738 +f 21608 21738 21737 +f 21609 21610 21738 +f 21610 21739 21738 +f 21610 21611 21740 +f 21610 21740 21739 +f 21611 21612 21740 +f 21612 21741 21740 +f 21612 21613 21742 +f 21612 21742 21741 +f 21613 21614 21742 +f 21614 21743 21742 +f 21614 21615 21744 +f 21614 21744 21743 +f 21615 21616 21744 +f 21616 21745 21744 +f 21616 21617 21746 +f 21616 21746 21745 +f 21617 21618 21746 +f 21618 21747 21746 +f 21618 21619 21748 +f 21618 21748 21747 +f 21619 21620 21748 +f 21620 21749 21748 +f 21620 21621 21750 +f 21620 21750 21749 +f 21621 21622 21750 +f 21622 21751 21750 +f 21622 21623 21752 +f 21622 21752 21751 +f 21623 21624 21752 +f 21624 21753 21752 +f 21625 21626 1 +f 21626 2 1 +f 21626 21627 3 +f 21626 3 2 +f 21627 21628 3 +f 21628 4 3 +f 21628 21629 5 +f 21628 5 4 +f 21629 21630 5 +f 21630 6 5 +f 21630 21631 7 +f 21630 7 6 +f 21631 21632 7 +f 21632 8 7 +f 21632 21633 9 +f 21632 9 8 +f 21633 21634 9 +f 21634 10 9 +f 21634 21635 11 +f 21634 11 10 +f 21635 21636 11 +f 21636 12 11 +f 21636 21637 13 +f 21636 13 12 +f 21637 21638 13 +f 21638 14 13 +f 21638 21639 15 +f 21638 15 14 +f 21639 21640 15 +f 21640 16 15 +f 21640 21641 17 +f 21640 17 16 +f 21641 21642 17 +f 21642 18 17 +f 21642 21643 19 +f 21642 19 18 +f 21643 21644 19 +f 21644 20 19 +f 21644 21645 21 +f 21644 21 20 +f 21645 21646 21 +f 21646 22 21 +f 21646 21647 23 +f 21646 23 22 +f 21647 21648 23 +f 21648 24 23 +f 21648 21649 25 +f 21648 25 24 +f 21649 21650 25 +f 21650 26 25 +f 21650 21651 27 +f 21650 27 26 +f 21651 21652 27 +f 21652 28 27 +f 21652 21653 29 +f 21652 29 28 +f 21653 21654 29 +f 21654 30 29 +f 21654 21655 31 +f 21654 31 30 +f 21655 21656 31 +f 21656 32 31 +f 21656 21657 33 +f 21656 33 32 +f 21657 21658 33 +f 21658 34 33 +f 21658 21659 35 +f 21658 35 34 +f 21659 21660 35 +f 21660 36 35 +f 21660 21661 37 +f 21660 37 36 +f 21661 21662 37 +f 21662 38 37 +f 21662 21663 39 +f 21662 39 38 +f 21663 21664 39 +f 21664 40 39 +f 21664 21665 41 +f 21664 41 40 +f 21665 21666 41 +f 21666 42 41 +f 21666 21667 43 +f 21666 43 42 +f 21667 21668 43 +f 21668 44 43 +f 21668 21669 45 +f 21668 45 44 +f 21669 21670 45 +f 21670 46 45 +f 21670 21671 47 +f 21670 47 46 +f 21671 21672 47 +f 21672 48 47 +f 21672 21673 49 +f 21672 49 48 +f 21673 21674 49 +f 21674 50 49 +f 21674 21675 51 +f 21674 51 50 +f 21675 21676 51 +f 21676 52 51 +f 21676 21677 53 +f 21676 53 52 +f 21677 21678 53 +f 21678 54 53 +f 21678 21679 55 +f 21678 55 54 +f 21679 21680 55 +f 21680 56 55 +f 21680 21681 57 +f 21680 57 56 +f 21681 21682 57 +f 21682 58 57 +f 21682 21683 59 +f 21682 59 58 +f 21683 21684 59 +f 21684 60 59 +f 21684 21685 61 +f 21684 61 60 +f 21685 21686 61 +f 21686 62 61 +f 21686 21687 63 +f 21686 63 62 +f 21687 21688 63 +f 21688 64 63 +f 21688 21689 65 +f 21688 65 64 +f 21689 21690 65 +f 21690 66 65 +f 21690 21691 67 +f 21690 67 66 +f 21691 21692 67 +f 21692 68 67 +f 21692 21693 69 +f 21692 69 68 +f 21693 21694 69 +f 21694 70 69 +f 21694 21695 71 +f 21694 71 70 +f 21695 21696 71 +f 21696 72 71 +f 21696 21697 73 +f 21696 73 72 +f 21697 21698 73 +f 21698 74 73 +f 21698 21699 75 +f 21698 75 74 +f 21699 21700 75 +f 21700 76 75 +f 21700 21701 77 +f 21700 77 76 +f 21701 21702 77 +f 21702 78 77 +f 21702 21703 79 +f 21702 79 78 +f 21703 21704 79 +f 21704 80 79 +f 21704 21705 81 +f 21704 81 80 +f 21705 21706 81 +f 21706 82 81 +f 21706 21707 83 +f 21706 83 82 +f 21707 21708 83 +f 21708 84 83 +f 21708 21709 85 +f 21708 85 84 +f 21709 21710 85 +f 21710 86 85 +f 21710 21711 87 +f 21710 87 86 +f 21711 21712 87 +f 21712 88 87 +f 21712 21713 89 +f 21712 89 88 +f 21713 21714 89 +f 21714 90 89 +f 21714 21715 91 +f 21714 91 90 +f 21715 21716 91 +f 21716 92 91 +f 21716 21717 93 +f 21716 93 92 +f 21717 21718 93 +f 21718 94 93 +f 21718 21719 95 +f 21718 95 94 +f 21719 21720 95 +f 21720 96 95 +f 21720 21721 97 +f 21720 97 96 +f 21721 21722 97 +f 21722 98 97 +f 21722 21723 99 +f 21722 99 98 +f 21723 21724 99 +f 21724 100 99 +f 21724 21725 101 +f 21724 101 100 +f 21725 21726 101 +f 21726 102 101 +f 21726 21727 103 +f 21726 103 102 +f 21727 21728 103 +f 21728 104 103 +f 21728 21729 105 +f 21728 105 104 +f 21729 21730 105 +f 21730 106 105 +f 21730 21731 107 +f 21730 107 106 +f 21731 21732 107 +f 21732 108 107 +f 21732 21733 109 +f 21732 109 108 +f 21733 21734 109 +f 21734 110 109 +f 21734 21735 111 +f 21734 111 110 +f 21735 21736 111 +f 21736 112 111 +f 21736 21737 113 +f 21736 113 112 +f 21737 21738 113 +f 21738 114 113 +f 21738 21739 115 +f 21738 115 114 +f 21739 21740 115 +f 21740 116 115 +f 21740 21741 117 +f 21740 117 116 +f 21741 21742 117 +f 21742 118 117 +f 21742 21743 119 +f 21742 119 118 +f 21743 21744 119 +f 21744 120 119 +f 21744 21745 121 +f 21744 121 120 +f 21745 21746 121 +f 21746 122 121 +f 21746 21747 123 +f 21746 123 122 +f 21747 21748 123 +f 21748 124 123 +f 21748 21749 125 +f 21748 125 124 +f 21749 21750 125 +f 21750 126 125 +f 21750 21751 127 +f 21750 127 126 +f 21751 21752 127 +f 21752 128 127 +f 21752 21753 129 +f 21752 129 128 +f 21754 21755 21884 +f 21754 21884 21883 +f 21755 21756 21884 +f 21756 21885 21884 +f 21756 21757 21886 +f 21756 21886 21885 +f 21757 21758 21886 +f 21758 21887 21886 +f 21758 21759 21888 +f 21758 21888 21887 +f 21759 21760 21888 +f 21760 21889 21888 +f 21760 21761 21890 +f 21760 21890 21889 +f 21761 21762 21890 +f 21762 21891 21890 +f 21762 21763 21892 +f 21762 21892 21891 +f 21763 21764 21892 +f 21764 21893 21892 +f 21764 21765 21894 +f 21764 21894 21893 +f 21765 21766 21894 +f 21766 21895 21894 +f 21766 21767 21896 +f 21766 21896 21895 +f 21767 21768 21896 +f 21768 21897 21896 +f 21768 21769 21898 +f 21768 21898 21897 +f 21769 21770 21898 +f 21770 21899 21898 +f 21770 21771 21900 +f 21770 21900 21899 +f 21771 21772 21900 +f 21772 21901 21900 +f 21772 21773 21902 +f 21772 21902 21901 +f 21773 21774 21902 +f 21774 21903 21902 +f 21774 21775 21904 +f 21774 21904 21903 +f 21775 21776 21904 +f 21776 21905 21904 +f 21776 21777 21906 +f 21776 21906 21905 +f 21777 21778 21906 +f 21778 21907 21906 +f 21778 21779 21908 +f 21778 21908 21907 +f 21779 21780 21908 +f 21780 21909 21908 +f 21780 21781 21910 +f 21780 21910 21909 +f 21781 21782 21910 +f 21782 21911 21910 +f 21782 21783 21912 +f 21782 21912 21911 +f 21783 21784 21912 +f 21784 21913 21912 +f 21784 21785 21914 +f 21784 21914 21913 +f 21785 21786 21914 +f 21786 21915 21914 +f 21786 21787 21916 +f 21786 21916 21915 +f 21787 21788 21916 +f 21788 21917 21916 +f 21788 21789 21918 +f 21788 21918 21917 +f 21789 21790 21918 +f 21790 21919 21918 +f 21790 21791 21920 +f 21790 21920 21919 +f 21791 21792 21920 +f 21792 21921 21920 +f 21792 21793 21922 +f 21792 21922 21921 +f 21793 21794 21922 +f 21794 21923 21922 +f 21794 21795 21924 +f 21794 21924 21923 +f 21795 21796 21924 +f 21796 21925 21924 +f 21796 21797 21926 +f 21796 21926 21925 +f 21797 21798 21926 +f 21798 21927 21926 +f 21798 21799 21928 +f 21798 21928 21927 +f 21799 21800 21928 +f 21800 21929 21928 +f 21800 21801 21930 +f 21800 21930 21929 +f 21801 21802 21930 +f 21802 21931 21930 +f 21802 21803 21932 +f 21802 21932 21931 +f 21803 21804 21932 +f 21804 21933 21932 +f 21804 21805 21934 +f 21804 21934 21933 +f 21805 21806 21934 +f 21806 21935 21934 +f 21806 21807 21936 +f 21806 21936 21935 +f 21807 21808 21936 +f 21808 21937 21936 +f 21808 21809 21938 +f 21808 21938 21937 +f 21809 21810 21938 +f 21810 21939 21938 +f 21810 21811 21940 +f 21810 21940 21939 +f 21811 21812 21940 +f 21812 21941 21940 +f 21812 21813 21942 +f 21812 21942 21941 +f 21813 21814 21942 +f 21814 21943 21942 +f 21814 21815 21944 +f 21814 21944 21943 +f 21815 21816 21944 +f 21816 21945 21944 +f 21816 21817 21946 +f 21816 21946 21945 +f 21817 21818 21946 +f 21818 21947 21946 +f 21818 21819 21948 +f 21818 21948 21947 +f 21819 21820 21948 +f 21820 21949 21948 +f 21820 21821 21950 +f 21820 21950 21949 +f 21821 21822 21950 +f 21822 21951 21950 +f 21822 21823 21952 +f 21822 21952 21951 +f 21823 21824 21952 +f 21824 21953 21952 +f 21824 21825 21954 +f 21824 21954 21953 +f 21825 21826 21954 +f 21826 21955 21954 +f 21826 21827 21956 +f 21826 21956 21955 +f 21827 21828 21956 +f 21828 21957 21956 +f 21828 21829 21958 +f 21828 21958 21957 +f 21829 21830 21958 +f 21830 21959 21958 +f 21830 21831 21960 +f 21830 21960 21959 +f 21831 21832 21960 +f 21832 21961 21960 +f 21832 21833 21962 +f 21832 21962 21961 +f 21833 21834 21962 +f 21834 21963 21962 +f 21834 21835 21964 +f 21834 21964 21963 +f 21835 21836 21964 +f 21836 21965 21964 +f 21836 21837 21966 +f 21836 21966 21965 +f 21837 21838 21966 +f 21838 21967 21966 +f 21838 21839 21968 +f 21838 21968 21967 +f 21839 21840 21968 +f 21840 21969 21968 +f 21840 21841 21970 +f 21840 21970 21969 +f 21841 21842 21970 +f 21842 21971 21970 +f 21842 21843 21972 +f 21842 21972 21971 +f 21843 21844 21972 +f 21844 21973 21972 +f 21844 21845 21974 +f 21844 21974 21973 +f 21845 21846 21974 +f 21846 21975 21974 +f 21846 21847 21976 +f 21846 21976 21975 +f 21847 21848 21976 +f 21848 21977 21976 +f 21848 21849 21978 +f 21848 21978 21977 +f 21849 21850 21978 +f 21850 21979 21978 +f 21850 21851 21980 +f 21850 21980 21979 +f 21851 21852 21980 +f 21852 21981 21980 +f 21852 21853 21982 +f 21852 21982 21981 +f 21853 21854 21982 +f 21854 21983 21982 +f 21854 21855 21984 +f 21854 21984 21983 +f 21855 21856 21984 +f 21856 21985 21984 +f 21856 21857 21986 +f 21856 21986 21985 +f 21857 21858 21986 +f 21858 21987 21986 +f 21858 21859 21988 +f 21858 21988 21987 +f 21859 21860 21988 +f 21860 21989 21988 +f 21860 21861 21990 +f 21860 21990 21989 +f 21861 21862 21990 +f 21862 21991 21990 +f 21862 21863 21992 +f 21862 21992 21991 +f 21863 21864 21992 +f 21864 21993 21992 +f 21864 21865 21994 +f 21864 21994 21993 +f 21865 21866 21994 +f 21866 21995 21994 +f 21866 21867 21996 +f 21866 21996 21995 +f 21867 21868 21996 +f 21868 21997 21996 +f 21868 21869 21998 +f 21868 21998 21997 +f 21869 21870 21998 +f 21870 21999 21998 +f 21870 21871 22000 +f 21870 22000 21999 +f 21871 21872 22000 +f 21872 22001 22000 +f 21872 21873 22002 +f 21872 22002 22001 +f 21873 21874 22002 +f 21874 22003 22002 +f 21874 21875 22004 +f 21874 22004 22003 +f 21875 21876 22004 +f 21876 22005 22004 +f 21876 21877 22006 +f 21876 22006 22005 +f 21877 21878 22006 +f 21878 22007 22006 +f 21878 21879 22008 +f 21878 22008 22007 +f 21879 21880 22008 +f 21880 22009 22008 +f 21880 21881 22010 +f 21880 22010 22009 +f 21881 21882 22010 +f 21882 22011 22010 +f 21883 21884 22012 +f 21884 22013 22012 +f 21884 21885 22014 +f 21884 22014 22013 +f 21885 21886 22014 +f 21886 22015 22014 +f 21886 21887 22016 +f 21886 22016 22015 +f 21887 21888 22016 +f 21888 22017 22016 +f 21888 21889 22018 +f 21888 22018 22017 +f 21889 21890 22018 +f 21890 22019 22018 +f 21890 21891 22020 +f 21890 22020 22019 +f 21891 21892 22020 +f 21892 22021 22020 +f 21892 21893 22022 +f 21892 22022 22021 +f 21893 21894 22022 +f 21894 22023 22022 +f 21894 21895 22024 +f 21894 22024 22023 +f 21895 21896 22024 +f 21896 22025 22024 +f 21896 21897 22026 +f 21896 22026 22025 +f 21897 21898 22026 +f 21898 22027 22026 +f 21898 21899 22028 +f 21898 22028 22027 +f 21899 21900 22028 +f 21900 22029 22028 +f 21900 21901 22030 +f 21900 22030 22029 +f 21901 21902 22030 +f 21902 22031 22030 +f 21902 21903 22032 +f 21902 22032 22031 +f 21903 21904 22032 +f 21904 22033 22032 +f 21904 21905 22034 +f 21904 22034 22033 +f 21905 21906 22034 +f 21906 22035 22034 +f 21906 21907 22036 +f 21906 22036 22035 +f 21907 21908 22036 +f 21908 22037 22036 +f 21908 21909 22038 +f 21908 22038 22037 +f 21909 21910 22038 +f 21910 22039 22038 +f 21910 21911 22040 +f 21910 22040 22039 +f 21911 21912 22040 +f 21912 22041 22040 +f 21912 21913 22042 +f 21912 22042 22041 +f 21913 21914 22042 +f 21914 22043 22042 +f 21914 21915 22044 +f 21914 22044 22043 +f 21915 21916 22044 +f 21916 22045 22044 +f 21916 21917 22046 +f 21916 22046 22045 +f 21917 21918 22046 +f 21918 22047 22046 +f 21918 21919 22048 +f 21918 22048 22047 +f 21919 21920 22048 +f 21920 22049 22048 +f 21920 21921 22050 +f 21920 22050 22049 +f 21921 21922 22050 +f 21922 22051 22050 +f 21922 21923 22052 +f 21922 22052 22051 +f 21923 21924 22052 +f 21924 22053 22052 +f 21924 21925 22054 +f 21924 22054 22053 +f 21925 21926 22054 +f 21926 22055 22054 +f 21926 21927 22056 +f 21926 22056 22055 +f 21927 21928 22056 +f 21928 22057 22056 +f 21928 21929 22058 +f 21928 22058 22057 +f 21929 21930 22058 +f 21930 22059 22058 +f 21930 21931 22060 +f 21930 22060 22059 +f 21931 21932 22060 +f 21932 22061 22060 +f 21932 21933 22062 +f 21932 22062 22061 +f 21933 21934 22062 +f 21934 22063 22062 +f 21934 21935 22064 +f 21934 22064 22063 +f 21935 21936 22064 +f 21936 22065 22064 +f 21936 21937 22066 +f 21936 22066 22065 +f 21937 21938 22066 +f 21938 22067 22066 +f 21938 21939 22068 +f 21938 22068 22067 +f 21939 21940 22068 +f 21940 22069 22068 +f 21940 21941 22070 +f 21940 22070 22069 +f 21941 21942 22070 +f 21942 22071 22070 +f 21942 21943 22072 +f 21942 22072 22071 +f 21943 21944 22072 +f 21944 22073 22072 +f 21944 21945 22074 +f 21944 22074 22073 +f 21945 21946 22074 +f 21946 22075 22074 +f 21946 21947 22076 +f 21946 22076 22075 +f 21947 21948 22076 +f 21948 22077 22076 +f 21948 21949 22078 +f 21948 22078 22077 +f 21949 21950 22078 +f 21950 22079 22078 +f 21950 21951 22080 +f 21950 22080 22079 +f 21951 21952 22080 +f 21952 22081 22080 +f 21952 21953 22082 +f 21952 22082 22081 +f 21953 21954 22082 +f 21954 22083 22082 +f 21954 21955 22084 +f 21954 22084 22083 +f 21955 21956 22084 +f 21956 22085 22084 +f 21956 21957 22086 +f 21956 22086 22085 +f 21957 21958 22086 +f 21958 22087 22086 +f 21958 21959 22088 +f 21958 22088 22087 +f 21959 21960 22088 +f 21960 22089 22088 +f 21960 21961 22090 +f 21960 22090 22089 +f 21961 21962 22090 +f 21962 22091 22090 +f 21962 21963 22092 +f 21962 22092 22091 +f 21963 21964 22092 +f 21964 22093 22092 +f 21964 21965 22094 +f 21964 22094 22093 +f 21965 21966 22094 +f 21966 22095 22094 +f 21966 21967 22096 +f 21966 22096 22095 +f 21967 21968 22096 +f 21968 22097 22096 +f 21968 21969 22098 +f 21968 22098 22097 +f 21969 21970 22098 +f 21970 22099 22098 +f 21970 21971 22100 +f 21970 22100 22099 +f 21971 21972 22100 +f 21972 22101 22100 +f 21972 21973 22102 +f 21972 22102 22101 +f 21973 21974 22102 +f 21974 22103 22102 +f 21974 21975 22104 +f 21974 22104 22103 +f 21975 21976 22104 +f 21976 22105 22104 +f 21976 21977 22106 +f 21976 22106 22105 +f 21977 21978 22106 +f 21978 22107 22106 +f 21978 21979 22108 +f 21978 22108 22107 +f 21979 21980 22108 +f 21980 22109 22108 +f 21980 21981 22110 +f 21980 22110 22109 +f 21981 21982 22110 +f 21982 22111 22110 +f 21982 21983 22112 +f 21982 22112 22111 +f 21983 21984 22112 +f 21984 22113 22112 +f 21984 21985 22114 +f 21984 22114 22113 +f 21985 21986 22114 +f 21986 22115 22114 +f 21986 21987 22116 +f 21986 22116 22115 +f 21987 21988 22116 +f 21988 22117 22116 +f 21988 21989 22118 +f 21988 22118 22117 +f 21989 21990 22118 +f 21990 22119 22118 +f 21990 21991 22120 +f 21990 22120 22119 +f 21991 21992 22120 +f 21992 22121 22120 +f 21992 21993 22122 +f 21992 22122 22121 +f 21993 21994 22122 +f 21994 22123 22122 +f 21994 21995 22124 +f 21994 22124 22123 +f 21995 21996 22124 +f 21996 22125 22124 +f 21996 21997 22126 +f 21996 22126 22125 +f 21997 21998 22126 +f 21998 22127 22126 +f 21998 21999 22128 +f 21998 22128 22127 +f 21999 22000 22128 +f 22000 22129 22128 +f 22000 22001 22130 +f 22000 22130 22129 +f 22001 22002 22130 +f 22002 22131 22130 +f 22002 22003 22132 +f 22002 22132 22131 +f 22003 22004 22132 +f 22004 22133 22132 +f 22004 22005 22134 +f 22004 22134 22133 +f 22005 22006 22134 +f 22006 22135 22134 +f 22006 22007 22136 +f 22006 22136 22135 +f 22007 22008 22136 +f 22008 22137 22136 +f 22008 22009 22138 +f 22008 22138 22137 +f 22009 22010 22138 +f 22010 22139 22138 +f 22010 22011 22140 +f 22010 22140 22139 +f 22012 22013 22142 +f 22012 22142 22141 +f 22013 22014 22142 +f 22014 22143 22142 +f 22014 22015 22144 +f 22014 22144 22143 +f 22015 22016 22144 +f 22016 22145 22144 +f 22016 22017 22146 +f 22016 22146 22145 +f 22017 22018 22146 +f 22018 22147 22146 +f 22018 22019 22148 +f 22018 22148 22147 +f 22019 22020 22148 +f 22020 22149 22148 +f 22020 22021 22150 +f 22020 22150 22149 +f 22021 22022 22150 +f 22022 22151 22150 +f 22022 22023 22152 +f 22022 22152 22151 +f 22023 22024 22152 +f 22024 22153 22152 +f 22024 22025 22154 +f 22024 22154 22153 +f 22025 22026 22154 +f 22026 22155 22154 +f 22026 22027 22156 +f 22026 22156 22155 +f 22027 22028 22156 +f 22028 22157 22156 +f 22028 22029 22158 +f 22028 22158 22157 +f 22029 22030 22158 +f 22030 22159 22158 +f 22030 22031 22160 +f 22030 22160 22159 +f 22031 22032 22160 +f 22032 22161 22160 +f 22032 22033 22162 +f 22032 22162 22161 +f 22033 22034 22162 +f 22034 22163 22162 +f 22034 22035 22164 +f 22034 22164 22163 +f 22035 22036 22164 +f 22036 22165 22164 +f 22036 22037 22166 +f 22036 22166 22165 +f 22037 22038 22166 +f 22038 22167 22166 +f 22038 22039 22168 +f 22038 22168 22167 +f 22039 22040 22168 +f 22040 22169 22168 +f 22040 22041 22170 +f 22040 22170 22169 +f 22041 22042 22170 +f 22042 22171 22170 +f 22042 22043 22172 +f 22042 22172 22171 +f 22043 22044 22172 +f 22044 22173 22172 +f 22044 22045 22174 +f 22044 22174 22173 +f 22045 22046 22174 +f 22046 22175 22174 +f 22046 22047 22176 +f 22046 22176 22175 +f 22047 22048 22176 +f 22048 22177 22176 +f 22048 22049 22178 +f 22048 22178 22177 +f 22049 22050 22178 +f 22050 22179 22178 +f 22050 22051 22180 +f 22050 22180 22179 +f 22051 22052 22180 +f 22052 22181 22180 +f 22052 22053 22182 +f 22052 22182 22181 +f 22053 22054 22182 +f 22054 22183 22182 +f 22054 22055 22184 +f 22054 22184 22183 +f 22055 22056 22184 +f 22056 22185 22184 +f 22056 22057 22186 +f 22056 22186 22185 +f 22057 22058 22186 +f 22058 22187 22186 +f 22058 22059 22188 +f 22058 22188 22187 +f 22059 22060 22188 +f 22060 22189 22188 +f 22060 22061 22190 +f 22060 22190 22189 +f 22061 22062 22190 +f 22062 22191 22190 +f 22062 22063 22192 +f 22062 22192 22191 +f 22063 22064 22192 +f 22064 22193 22192 +f 22064 22065 22194 +f 22064 22194 22193 +f 22065 22066 22194 +f 22066 22195 22194 +f 22066 22067 22196 +f 22066 22196 22195 +f 22067 22068 22196 +f 22068 22197 22196 +f 22068 22069 22198 +f 22068 22198 22197 +f 22069 22070 22198 +f 22070 22199 22198 +f 22070 22071 22200 +f 22070 22200 22199 +f 22071 22072 22200 +f 22072 22201 22200 +f 22072 22073 22202 +f 22072 22202 22201 +f 22073 22074 22202 +f 22074 22203 22202 +f 22074 22075 22204 +f 22074 22204 22203 +f 22075 22076 22204 +f 22076 22205 22204 +f 22076 22077 22206 +f 22076 22206 22205 +f 22077 22078 22206 +f 22078 22207 22206 +f 22078 22079 22208 +f 22078 22208 22207 +f 22079 22080 22208 +f 22080 22209 22208 +f 22080 22081 22210 +f 22080 22210 22209 +f 22081 22082 22210 +f 22082 22211 22210 +f 22082 22083 22212 +f 22082 22212 22211 +f 22083 22084 22212 +f 22084 22213 22212 +f 22084 22085 22214 +f 22084 22214 22213 +f 22085 22086 22214 +f 22086 22215 22214 +f 22086 22087 22216 +f 22086 22216 22215 +f 22087 22088 22216 +f 22088 22217 22216 +f 22088 22089 22218 +f 22088 22218 22217 +f 22089 22090 22218 +f 22090 22219 22218 +f 22090 22091 22220 +f 22090 22220 22219 +f 22091 22092 22220 +f 22092 22221 22220 +f 22092 22093 22222 +f 22092 22222 22221 +f 22093 22094 22222 +f 22094 22223 22222 +f 22094 22095 22224 +f 22094 22224 22223 +f 22095 22096 22224 +f 22096 22225 22224 +f 22096 22097 22226 +f 22096 22226 22225 +f 22097 22098 22226 +f 22098 22227 22226 +f 22098 22099 22228 +f 22098 22228 22227 +f 22099 22100 22228 +f 22100 22229 22228 +f 22100 22101 22230 +f 22100 22230 22229 +f 22101 22102 22230 +f 22102 22231 22230 +f 22102 22103 22232 +f 22102 22232 22231 +f 22103 22104 22232 +f 22104 22233 22232 +f 22104 22105 22234 +f 22104 22234 22233 +f 22105 22106 22234 +f 22106 22235 22234 +f 22106 22107 22236 +f 22106 22236 22235 +f 22107 22108 22236 +f 22108 22237 22236 +f 22108 22109 22238 +f 22108 22238 22237 +f 22109 22110 22238 +f 22110 22239 22238 +f 22110 22111 22240 +f 22110 22240 22239 +f 22111 22112 22240 +f 22112 22241 22240 +f 22112 22113 22242 +f 22112 22242 22241 +f 22113 22114 22242 +f 22114 22243 22242 +f 22114 22115 22244 +f 22114 22244 22243 +f 22115 22116 22244 +f 22116 22245 22244 +f 22116 22117 22246 +f 22116 22246 22245 +f 22117 22118 22246 +f 22118 22247 22246 +f 22118 22119 22248 +f 22118 22248 22247 +f 22119 22120 22248 +f 22120 22249 22248 +f 22120 22121 22250 +f 22120 22250 22249 +f 22121 22122 22250 +f 22122 22251 22250 +f 22122 22123 22252 +f 22122 22252 22251 +f 22123 22124 22252 +f 22124 22253 22252 +f 22124 22125 22254 +f 22124 22254 22253 +f 22125 22126 22254 +f 22126 22255 22254 +f 22126 22127 22256 +f 22126 22256 22255 +f 22127 22128 22256 +f 22128 22257 22256 +f 22128 22129 22258 +f 22128 22258 22257 +f 22129 22130 22258 +f 22130 22259 22258 +f 22130 22131 22260 +f 22130 22260 22259 +f 22131 22132 22260 +f 22132 22261 22260 +f 22132 22133 22262 +f 22132 22262 22261 +f 22133 22134 22262 +f 22134 22263 22262 +f 22134 22135 22264 +f 22134 22264 22263 +f 22135 22136 22264 +f 22136 22265 22264 +f 22136 22137 22266 +f 22136 22266 22265 +f 22137 22138 22266 +f 22138 22267 22266 +f 22138 22139 22268 +f 22138 22268 22267 +f 22139 22140 22268 +f 22140 22269 22268 +f 22141 22142 22270 +f 22142 22271 22270 +f 22142 22143 22272 +f 22142 22272 22271 +f 22143 22144 22272 +f 22144 22273 22272 +f 22144 22145 22274 +f 22144 22274 22273 +f 22145 22146 22274 +f 22146 22275 22274 +f 22146 22147 22276 +f 22146 22276 22275 +f 22147 22148 22276 +f 22148 22277 22276 +f 22148 22149 22278 +f 22148 22278 22277 +f 22149 22150 22278 +f 22150 22279 22278 +f 22150 22151 22280 +f 22150 22280 22279 +f 22151 22152 22280 +f 22152 22281 22280 +f 22152 22153 22282 +f 22152 22282 22281 +f 22153 22154 22282 +f 22154 22283 22282 +f 22154 22155 22284 +f 22154 22284 22283 +f 22155 22156 22284 +f 22156 22285 22284 +f 22156 22157 22286 +f 22156 22286 22285 +f 22157 22158 22286 +f 22158 22287 22286 +f 22158 22159 22288 +f 22158 22288 22287 +f 22159 22160 22288 +f 22160 22289 22288 +f 22160 22161 22290 +f 22160 22290 22289 +f 22161 22162 22290 +f 22162 22291 22290 +f 22162 22163 22292 +f 22162 22292 22291 +f 22163 22164 22292 +f 22164 22293 22292 +f 22164 22165 22294 +f 22164 22294 22293 +f 22165 22166 22294 +f 22166 22295 22294 +f 22166 22167 22296 +f 22166 22296 22295 +f 22167 22168 22296 +f 22168 22297 22296 +f 22168 22169 22298 +f 22168 22298 22297 +f 22169 22170 22298 +f 22170 22299 22298 +f 22170 22171 22300 +f 22170 22300 22299 +f 22171 22172 22300 +f 22172 22301 22300 +f 22172 22173 22302 +f 22172 22302 22301 +f 22173 22174 22302 +f 22174 22303 22302 +f 22174 22175 22304 +f 22174 22304 22303 +f 22175 22176 22304 +f 22176 22305 22304 +f 22176 22177 22306 +f 22176 22306 22305 +f 22177 22178 22306 +f 22178 22307 22306 +f 22178 22179 22308 +f 22178 22308 22307 +f 22179 22180 22308 +f 22180 22309 22308 +f 22180 22181 22310 +f 22180 22310 22309 +f 22181 22182 22310 +f 22182 22311 22310 +f 22182 22183 22312 +f 22182 22312 22311 +f 22183 22184 22312 +f 22184 22313 22312 +f 22184 22185 22314 +f 22184 22314 22313 +f 22185 22186 22314 +f 22186 22315 22314 +f 22186 22187 22316 +f 22186 22316 22315 +f 22187 22188 22316 +f 22188 22317 22316 +f 22188 22189 22318 +f 22188 22318 22317 +f 22189 22190 22318 +f 22190 22319 22318 +f 22190 22191 22320 +f 22190 22320 22319 +f 22191 22192 22320 +f 22192 22321 22320 +f 22192 22193 22322 +f 22192 22322 22321 +f 22193 22194 22322 +f 22194 22323 22322 +f 22194 22195 22324 +f 22194 22324 22323 +f 22195 22196 22324 +f 22196 22325 22324 +f 22196 22197 22326 +f 22196 22326 22325 +f 22197 22198 22326 +f 22198 22327 22326 +f 22198 22199 22328 +f 22198 22328 22327 +f 22199 22200 22328 +f 22200 22329 22328 +f 22200 22201 22330 +f 22200 22330 22329 +f 22201 22202 22330 +f 22202 22331 22330 +f 22202 22203 22332 +f 22202 22332 22331 +f 22203 22204 22332 +f 22204 22333 22332 +f 22204 22205 22334 +f 22204 22334 22333 +f 22205 22206 22334 +f 22206 22335 22334 +f 22206 22207 22336 +f 22206 22336 22335 +f 22207 22208 22336 +f 22208 22337 22336 +f 22208 22209 22338 +f 22208 22338 22337 +f 22209 22210 22338 +f 22210 22339 22338 +f 22210 22211 22340 +f 22210 22340 22339 +f 22211 22212 22340 +f 22212 22341 22340 +f 22212 22213 22342 +f 22212 22342 22341 +f 22213 22214 22342 +f 22214 22343 22342 +f 22214 22215 22344 +f 22214 22344 22343 +f 22215 22216 22344 +f 22216 22345 22344 +f 22216 22217 22346 +f 22216 22346 22345 +f 22217 22218 22346 +f 22218 22347 22346 +f 22218 22219 22348 +f 22218 22348 22347 +f 22219 22220 22348 +f 22220 22349 22348 +f 22220 22221 22350 +f 22220 22350 22349 +f 22221 22222 22350 +f 22222 22351 22350 +f 22222 22223 22352 +f 22222 22352 22351 +f 22223 22224 22352 +f 22224 22353 22352 +f 22224 22225 22354 +f 22224 22354 22353 +f 22225 22226 22354 +f 22226 22355 22354 +f 22226 22227 22356 +f 22226 22356 22355 +f 22227 22228 22356 +f 22228 22357 22356 +f 22228 22229 22358 +f 22228 22358 22357 +f 22229 22230 22358 +f 22230 22359 22358 +f 22230 22231 22360 +f 22230 22360 22359 +f 22231 22232 22360 +f 22232 22361 22360 +f 22232 22233 22362 +f 22232 22362 22361 +f 22233 22234 22362 +f 22234 22363 22362 +f 22234 22235 22364 +f 22234 22364 22363 +f 22235 22236 22364 +f 22236 22365 22364 +f 22236 22237 22366 +f 22236 22366 22365 +f 22237 22238 22366 +f 22238 22367 22366 +f 22238 22239 22368 +f 22238 22368 22367 +f 22239 22240 22368 +f 22240 22369 22368 +f 22240 22241 22370 +f 22240 22370 22369 +f 22241 22242 22370 +f 22242 22371 22370 +f 22242 22243 22372 +f 22242 22372 22371 +f 22243 22244 22372 +f 22244 22373 22372 +f 22244 22245 22374 +f 22244 22374 22373 +f 22245 22246 22374 +f 22246 22375 22374 +f 22246 22247 22376 +f 22246 22376 22375 +f 22247 22248 22376 +f 22248 22377 22376 +f 22248 22249 22378 +f 22248 22378 22377 +f 22249 22250 22378 +f 22250 22379 22378 +f 22250 22251 22380 +f 22250 22380 22379 +f 22251 22252 22380 +f 22252 22381 22380 +f 22252 22253 22382 +f 22252 22382 22381 +f 22253 22254 22382 +f 22254 22383 22382 +f 22254 22255 22384 +f 22254 22384 22383 +f 22255 22256 22384 +f 22256 22385 22384 +f 22256 22257 22386 +f 22256 22386 22385 +f 22257 22258 22386 +f 22258 22387 22386 +f 22258 22259 22388 +f 22258 22388 22387 +f 22259 22260 22388 +f 22260 22389 22388 +f 22260 22261 22390 +f 22260 22390 22389 +f 22261 22262 22390 +f 22262 22391 22390 +f 22262 22263 22392 +f 22262 22392 22391 +f 22263 22264 22392 +f 22264 22393 22392 +f 22264 22265 22394 +f 22264 22394 22393 +f 22265 22266 22394 +f 22266 22395 22394 +f 22266 22267 22396 +f 22266 22396 22395 +f 22267 22268 22396 +f 22268 22397 22396 +f 22268 22269 22398 +f 22268 22398 22397 +f 22270 22271 22400 +f 22270 22400 22399 +f 22271 22272 22400 +f 22272 22401 22400 +f 22272 22273 22402 +f 22272 22402 22401 +f 22273 22274 22402 +f 22274 22403 22402 +f 22274 22275 22404 +f 22274 22404 22403 +f 22275 22276 22404 +f 22276 22405 22404 +f 22276 22277 22406 +f 22276 22406 22405 +f 22277 22278 22406 +f 22278 22407 22406 +f 22278 22279 22408 +f 22278 22408 22407 +f 22279 22280 22408 +f 22280 22409 22408 +f 22280 22281 22410 +f 22280 22410 22409 +f 22281 22282 22410 +f 22282 22411 22410 +f 22282 22283 22412 +f 22282 22412 22411 +f 22283 22284 22412 +f 22284 22413 22412 +f 22284 22285 22414 +f 22284 22414 22413 +f 22285 22286 22414 +f 22286 22415 22414 +f 22286 22287 22416 +f 22286 22416 22415 +f 22287 22288 22416 +f 22288 22417 22416 +f 22288 22289 22418 +f 22288 22418 22417 +f 22289 22290 22418 +f 22290 22419 22418 +f 22290 22291 22420 +f 22290 22420 22419 +f 22291 22292 22420 +f 22292 22421 22420 +f 22292 22293 22422 +f 22292 22422 22421 +f 22293 22294 22422 +f 22294 22423 22422 +f 22294 22295 22424 +f 22294 22424 22423 +f 22295 22296 22424 +f 22296 22425 22424 +f 22296 22297 22426 +f 22296 22426 22425 +f 22297 22298 22426 +f 22298 22427 22426 +f 22298 22299 22428 +f 22298 22428 22427 +f 22299 22300 22428 +f 22300 22429 22428 +f 22300 22301 22430 +f 22300 22430 22429 +f 22301 22302 22430 +f 22302 22431 22430 +f 22302 22303 22432 +f 22302 22432 22431 +f 22303 22304 22432 +f 22304 22433 22432 +f 22304 22305 22434 +f 22304 22434 22433 +f 22305 22306 22434 +f 22306 22435 22434 +f 22306 22307 22436 +f 22306 22436 22435 +f 22307 22308 22436 +f 22308 22437 22436 +f 22308 22309 22438 +f 22308 22438 22437 +f 22309 22310 22438 +f 22310 22439 22438 +f 22310 22311 22440 +f 22310 22440 22439 +f 22311 22312 22440 +f 22312 22441 22440 +f 22312 22313 22442 +f 22312 22442 22441 +f 22313 22314 22442 +f 22314 22443 22442 +f 22314 22315 22444 +f 22314 22444 22443 +f 22315 22316 22444 +f 22316 22445 22444 +f 22316 22317 22446 +f 22316 22446 22445 +f 22317 22318 22446 +f 22318 22447 22446 +f 22318 22319 22448 +f 22318 22448 22447 +f 22319 22320 22448 +f 22320 22449 22448 +f 22320 22321 22450 +f 22320 22450 22449 +f 22321 22322 22450 +f 22322 22451 22450 +f 22322 22323 22452 +f 22322 22452 22451 +f 22323 22324 22452 +f 22324 22453 22452 +f 22324 22325 22454 +f 22324 22454 22453 +f 22325 22326 22454 +f 22326 22455 22454 +f 22326 22327 22456 +f 22326 22456 22455 +f 22327 22328 22456 +f 22328 22457 22456 +f 22328 22329 22458 +f 22328 22458 22457 +f 22329 22330 22458 +f 22330 22459 22458 +f 22330 22331 22460 +f 22330 22460 22459 +f 22331 22332 22460 +f 22332 22461 22460 +f 22332 22333 22462 +f 22332 22462 22461 +f 22333 22334 22462 +f 22334 22463 22462 +f 22334 22335 22464 +f 22334 22464 22463 +f 22335 22336 22464 +f 22336 22465 22464 +f 22336 22337 22466 +f 22336 22466 22465 +f 22337 22338 22466 +f 22338 22467 22466 +f 22338 22339 22468 +f 22338 22468 22467 +f 22339 22340 22468 +f 22340 22469 22468 +f 22340 22341 22470 +f 22340 22470 22469 +f 22341 22342 22470 +f 22342 22471 22470 +f 22342 22343 22472 +f 22342 22472 22471 +f 22343 22344 22472 +f 22344 22473 22472 +f 22344 22345 22474 +f 22344 22474 22473 +f 22345 22346 22474 +f 22346 22475 22474 +f 22346 22347 22476 +f 22346 22476 22475 +f 22347 22348 22476 +f 22348 22477 22476 +f 22348 22349 22478 +f 22348 22478 22477 +f 22349 22350 22478 +f 22350 22479 22478 +f 22350 22351 22480 +f 22350 22480 22479 +f 22351 22352 22480 +f 22352 22481 22480 +f 22352 22353 22482 +f 22352 22482 22481 +f 22353 22354 22482 +f 22354 22483 22482 +f 22354 22355 22484 +f 22354 22484 22483 +f 22355 22356 22484 +f 22356 22485 22484 +f 22356 22357 22486 +f 22356 22486 22485 +f 22357 22358 22486 +f 22358 22487 22486 +f 22358 22359 22488 +f 22358 22488 22487 +f 22359 22360 22488 +f 22360 22489 22488 +f 22360 22361 22490 +f 22360 22490 22489 +f 22361 22362 22490 +f 22362 22491 22490 +f 22362 22363 22492 +f 22362 22492 22491 +f 22363 22364 22492 +f 22364 22493 22492 +f 22364 22365 22494 +f 22364 22494 22493 +f 22365 22366 22494 +f 22366 22495 22494 +f 22366 22367 22496 +f 22366 22496 22495 +f 22367 22368 22496 +f 22368 22497 22496 +f 22368 22369 22498 +f 22368 22498 22497 +f 22369 22370 22498 +f 22370 22499 22498 +f 22370 22371 22500 +f 22370 22500 22499 +f 22371 22372 22500 +f 22372 22501 22500 +f 22372 22373 22502 +f 22372 22502 22501 +f 22373 22374 22502 +f 22374 22503 22502 +f 22374 22375 22504 +f 22374 22504 22503 +f 22375 22376 22504 +f 22376 22505 22504 +f 22376 22377 22506 +f 22376 22506 22505 +f 22377 22378 22506 +f 22378 22507 22506 +f 22378 22379 22508 +f 22378 22508 22507 +f 22379 22380 22508 +f 22380 22509 22508 +f 22380 22381 22510 +f 22380 22510 22509 +f 22381 22382 22510 +f 22382 22511 22510 +f 22382 22383 22512 +f 22382 22512 22511 +f 22383 22384 22512 +f 22384 22513 22512 +f 22384 22385 22514 +f 22384 22514 22513 +f 22385 22386 22514 +f 22386 22515 22514 +f 22386 22387 22516 +f 22386 22516 22515 +f 22387 22388 22516 +f 22388 22517 22516 +f 22388 22389 22518 +f 22388 22518 22517 +f 22389 22390 22518 +f 22390 22519 22518 +f 22390 22391 22520 +f 22390 22520 22519 +f 22391 22392 22520 +f 22392 22521 22520 +f 22392 22393 22522 +f 22392 22522 22521 +f 22393 22394 22522 +f 22394 22523 22522 +f 22394 22395 22524 +f 22394 22524 22523 +f 22395 22396 22524 +f 22396 22525 22524 +f 22396 22397 22526 +f 22396 22526 22525 +f 22397 22398 22526 +f 22398 22527 22526 +f 22399 22400 22528 +f 22400 22529 22528 +f 22400 22401 22530 +f 22400 22530 22529 +f 22401 22402 22530 +f 22402 22531 22530 +f 22402 22403 22532 +f 22402 22532 22531 +f 22403 22404 22532 +f 22404 22533 22532 +f 22404 22405 22534 +f 22404 22534 22533 +f 22405 22406 22534 +f 22406 22535 22534 +f 22406 22407 22536 +f 22406 22536 22535 +f 22407 22408 22536 +f 22408 22537 22536 +f 22408 22409 22538 +f 22408 22538 22537 +f 22409 22410 22538 +f 22410 22539 22538 +f 22410 22411 22540 +f 22410 22540 22539 +f 22411 22412 22540 +f 22412 22541 22540 +f 22412 22413 22542 +f 22412 22542 22541 +f 22413 22414 22542 +f 22414 22543 22542 +f 22414 22415 22544 +f 22414 22544 22543 +f 22415 22416 22544 +f 22416 22545 22544 +f 22416 22417 22546 +f 22416 22546 22545 +f 22417 22418 22546 +f 22418 22547 22546 +f 22418 22419 22548 +f 22418 22548 22547 +f 22419 22420 22548 +f 22420 22549 22548 +f 22420 22421 22550 +f 22420 22550 22549 +f 22421 22422 22550 +f 22422 22551 22550 +f 22422 22423 22552 +f 22422 22552 22551 +f 22423 22424 22552 +f 22424 22553 22552 +f 22424 22425 22554 +f 22424 22554 22553 +f 22425 22426 22554 +f 22426 22555 22554 +f 22426 22427 22556 +f 22426 22556 22555 +f 22427 22428 22556 +f 22428 22557 22556 +f 22428 22429 22558 +f 22428 22558 22557 +f 22429 22430 22558 +f 22430 22559 22558 +f 22430 22431 22560 +f 22430 22560 22559 +f 22431 22432 22560 +f 22432 22561 22560 +f 22432 22433 22562 +f 22432 22562 22561 +f 22433 22434 22562 +f 22434 22563 22562 +f 22434 22435 22564 +f 22434 22564 22563 +f 22435 22436 22564 +f 22436 22565 22564 +f 22436 22437 22566 +f 22436 22566 22565 +f 22437 22438 22566 +f 22438 22567 22566 +f 22438 22439 22568 +f 22438 22568 22567 +f 22439 22440 22568 +f 22440 22569 22568 +f 22440 22441 22570 +f 22440 22570 22569 +f 22441 22442 22570 +f 22442 22571 22570 +f 22442 22443 22572 +f 22442 22572 22571 +f 22443 22444 22572 +f 22444 22573 22572 +f 22444 22445 22574 +f 22444 22574 22573 +f 22445 22446 22574 +f 22446 22575 22574 +f 22446 22447 22576 +f 22446 22576 22575 +f 22447 22448 22576 +f 22448 22577 22576 +f 22448 22449 22578 +f 22448 22578 22577 +f 22449 22450 22578 +f 22450 22579 22578 +f 22450 22451 22580 +f 22450 22580 22579 +f 22451 22452 22580 +f 22452 22581 22580 +f 22452 22453 22582 +f 22452 22582 22581 +f 22453 22454 22582 +f 22454 22583 22582 +f 22454 22455 22584 +f 22454 22584 22583 +f 22455 22456 22584 +f 22456 22585 22584 +f 22456 22457 22586 +f 22456 22586 22585 +f 22457 22458 22586 +f 22458 22587 22586 +f 22458 22459 22588 +f 22458 22588 22587 +f 22459 22460 22588 +f 22460 22589 22588 +f 22460 22461 22590 +f 22460 22590 22589 +f 22461 22462 22590 +f 22462 22591 22590 +f 22462 22463 22592 +f 22462 22592 22591 +f 22463 22464 22592 +f 22464 22593 22592 +f 22464 22465 22594 +f 22464 22594 22593 +f 22465 22466 22594 +f 22466 22595 22594 +f 22466 22467 22596 +f 22466 22596 22595 +f 22467 22468 22596 +f 22468 22597 22596 +f 22468 22469 22598 +f 22468 22598 22597 +f 22469 22470 22598 +f 22470 22599 22598 +f 22470 22471 22600 +f 22470 22600 22599 +f 22471 22472 22600 +f 22472 22601 22600 +f 22472 22473 22602 +f 22472 22602 22601 +f 22473 22474 22602 +f 22474 22603 22602 +f 22474 22475 22604 +f 22474 22604 22603 +f 22475 22476 22604 +f 22476 22605 22604 +f 22476 22477 22606 +f 22476 22606 22605 +f 22477 22478 22606 +f 22478 22607 22606 +f 22478 22479 22608 +f 22478 22608 22607 +f 22479 22480 22608 +f 22480 22609 22608 +f 22480 22481 22610 +f 22480 22610 22609 +f 22481 22482 22610 +f 22482 22611 22610 +f 22482 22483 22612 +f 22482 22612 22611 +f 22483 22484 22612 +f 22484 22613 22612 +f 22484 22485 22614 +f 22484 22614 22613 +f 22485 22486 22614 +f 22486 22615 22614 +f 22486 22487 22616 +f 22486 22616 22615 +f 22487 22488 22616 +f 22488 22617 22616 +f 22488 22489 22618 +f 22488 22618 22617 +f 22489 22490 22618 +f 22490 22619 22618 +f 22490 22491 22620 +f 22490 22620 22619 +f 22491 22492 22620 +f 22492 22621 22620 +f 22492 22493 22622 +f 22492 22622 22621 +f 22493 22494 22622 +f 22494 22623 22622 +f 22494 22495 22624 +f 22494 22624 22623 +f 22495 22496 22624 +f 22496 22625 22624 +f 22496 22497 22626 +f 22496 22626 22625 +f 22497 22498 22626 +f 22498 22627 22626 +f 22498 22499 22628 +f 22498 22628 22627 +f 22499 22500 22628 +f 22500 22629 22628 +f 22500 22501 22630 +f 22500 22630 22629 +f 22501 22502 22630 +f 22502 22631 22630 +f 22502 22503 22632 +f 22502 22632 22631 +f 22503 22504 22632 +f 22504 22633 22632 +f 22504 22505 22634 +f 22504 22634 22633 +f 22505 22506 22634 +f 22506 22635 22634 +f 22506 22507 22636 +f 22506 22636 22635 +f 22507 22508 22636 +f 22508 22637 22636 +f 22508 22509 22638 +f 22508 22638 22637 +f 22509 22510 22638 +f 22510 22639 22638 +f 22510 22511 22640 +f 22510 22640 22639 +f 22511 22512 22640 +f 22512 22641 22640 +f 22512 22513 22642 +f 22512 22642 22641 +f 22513 22514 22642 +f 22514 22643 22642 +f 22514 22515 22644 +f 22514 22644 22643 +f 22515 22516 22644 +f 22516 22645 22644 +f 22516 22517 22646 +f 22516 22646 22645 +f 22517 22518 22646 +f 22518 22647 22646 +f 22518 22519 22648 +f 22518 22648 22647 +f 22519 22520 22648 +f 22520 22649 22648 +f 22520 22521 22650 +f 22520 22650 22649 +f 22521 22522 22650 +f 22522 22651 22650 +f 22522 22523 22652 +f 22522 22652 22651 +f 22523 22524 22652 +f 22524 22653 22652 +f 22524 22525 22654 +f 22524 22654 22653 +f 22525 22526 22654 +f 22526 22655 22654 +f 22526 22527 22656 +f 22526 22656 22655 +f 22528 22529 22658 +f 22528 22658 22657 +f 22529 22530 22658 +f 22530 22659 22658 +f 22530 22531 22660 +f 22530 22660 22659 +f 22531 22532 22660 +f 22532 22661 22660 +f 22532 22533 22662 +f 22532 22662 22661 +f 22533 22534 22662 +f 22534 22663 22662 +f 22534 22535 22664 +f 22534 22664 22663 +f 22535 22536 22664 +f 22536 22665 22664 +f 22536 22537 22666 +f 22536 22666 22665 +f 22537 22538 22666 +f 22538 22667 22666 +f 22538 22539 22668 +f 22538 22668 22667 +f 22539 22540 22668 +f 22540 22669 22668 +f 22540 22541 22670 +f 22540 22670 22669 +f 22541 22542 22670 +f 22542 22671 22670 +f 22542 22543 22672 +f 22542 22672 22671 +f 22543 22544 22672 +f 22544 22673 22672 +f 22544 22545 22674 +f 22544 22674 22673 +f 22545 22546 22674 +f 22546 22675 22674 +f 22546 22547 22676 +f 22546 22676 22675 +f 22547 22548 22676 +f 22548 22677 22676 +f 22548 22549 22678 +f 22548 22678 22677 +f 22549 22550 22678 +f 22550 22679 22678 +f 22550 22551 22680 +f 22550 22680 22679 +f 22551 22552 22680 +f 22552 22681 22680 +f 22552 22553 22682 +f 22552 22682 22681 +f 22553 22554 22682 +f 22554 22683 22682 +f 22554 22555 22684 +f 22554 22684 22683 +f 22555 22556 22684 +f 22556 22685 22684 +f 22556 22557 22686 +f 22556 22686 22685 +f 22557 22558 22686 +f 22558 22687 22686 +f 22558 22559 22688 +f 22558 22688 22687 +f 22559 22560 22688 +f 22560 22689 22688 +f 22560 22561 22690 +f 22560 22690 22689 +f 22561 22562 22690 +f 22562 22691 22690 +f 22562 22563 22692 +f 22562 22692 22691 +f 22563 22564 22692 +f 22564 22693 22692 +f 22564 22565 22694 +f 22564 22694 22693 +f 22565 22566 22694 +f 22566 22695 22694 +f 22566 22567 22696 +f 22566 22696 22695 +f 22567 22568 22696 +f 22568 22697 22696 +f 22568 22569 22698 +f 22568 22698 22697 +f 22569 22570 22698 +f 22570 22699 22698 +f 22570 22571 22700 +f 22570 22700 22699 +f 22571 22572 22700 +f 22572 22701 22700 +f 22572 22573 22702 +f 22572 22702 22701 +f 22573 22574 22702 +f 22574 22703 22702 +f 22574 22575 22704 +f 22574 22704 22703 +f 22575 22576 22704 +f 22576 22705 22704 +f 22576 22577 22706 +f 22576 22706 22705 +f 22577 22578 22706 +f 22578 22707 22706 +f 22578 22579 22708 +f 22578 22708 22707 +f 22579 22580 22708 +f 22580 22709 22708 +f 22580 22581 22710 +f 22580 22710 22709 +f 22581 22582 22710 +f 22582 22711 22710 +f 22582 22583 22712 +f 22582 22712 22711 +f 22583 22584 22712 +f 22584 22713 22712 +f 22584 22585 22714 +f 22584 22714 22713 +f 22585 22586 22714 +f 22586 22715 22714 +f 22586 22587 22716 +f 22586 22716 22715 +f 22587 22588 22716 +f 22588 22717 22716 +f 22588 22589 22718 +f 22588 22718 22717 +f 22589 22590 22718 +f 22590 22719 22718 +f 22590 22591 22720 +f 22590 22720 22719 +f 22591 22592 22720 +f 22592 22721 22720 +f 22592 22593 22722 +f 22592 22722 22721 +f 22593 22594 22722 +f 22594 22723 22722 +f 22594 22595 22724 +f 22594 22724 22723 +f 22595 22596 22724 +f 22596 22725 22724 +f 22596 22597 22726 +f 22596 22726 22725 +f 22597 22598 22726 +f 22598 22727 22726 +f 22598 22599 22728 +f 22598 22728 22727 +f 22599 22600 22728 +f 22600 22729 22728 +f 22600 22601 22730 +f 22600 22730 22729 +f 22601 22602 22730 +f 22602 22731 22730 +f 22602 22603 22732 +f 22602 22732 22731 +f 22603 22604 22732 +f 22604 22733 22732 +f 22604 22605 22734 +f 22604 22734 22733 +f 22605 22606 22734 +f 22606 22735 22734 +f 22606 22607 22736 +f 22606 22736 22735 +f 22607 22608 22736 +f 22608 22737 22736 +f 22608 22609 22738 +f 22608 22738 22737 +f 22609 22610 22738 +f 22610 22739 22738 +f 22610 22611 22740 +f 22610 22740 22739 +f 22611 22612 22740 +f 22612 22741 22740 +f 22612 22613 22742 +f 22612 22742 22741 +f 22613 22614 22742 +f 22614 22743 22742 +f 22614 22615 22744 +f 22614 22744 22743 +f 22615 22616 22744 +f 22616 22745 22744 +f 22616 22617 22746 +f 22616 22746 22745 +f 22617 22618 22746 +f 22618 22747 22746 +f 22618 22619 22748 +f 22618 22748 22747 +f 22619 22620 22748 +f 22620 22749 22748 +f 22620 22621 22750 +f 22620 22750 22749 +f 22621 22622 22750 +f 22622 22751 22750 +f 22622 22623 22752 +f 22622 22752 22751 +f 22623 22624 22752 +f 22624 22753 22752 +f 22624 22625 22754 +f 22624 22754 22753 +f 22625 22626 22754 +f 22626 22755 22754 +f 22626 22627 22756 +f 22626 22756 22755 +f 22627 22628 22756 +f 22628 22757 22756 +f 22628 22629 22758 +f 22628 22758 22757 +f 22629 22630 22758 +f 22630 22759 22758 +f 22630 22631 22760 +f 22630 22760 22759 +f 22631 22632 22760 +f 22632 22761 22760 +f 22632 22633 22762 +f 22632 22762 22761 +f 22633 22634 22762 +f 22634 22763 22762 +f 22634 22635 22764 +f 22634 22764 22763 +f 22635 22636 22764 +f 22636 22765 22764 +f 22636 22637 22766 +f 22636 22766 22765 +f 22637 22638 22766 +f 22638 22767 22766 +f 22638 22639 22768 +f 22638 22768 22767 +f 22639 22640 22768 +f 22640 22769 22768 +f 22640 22641 22770 +f 22640 22770 22769 +f 22641 22642 22770 +f 22642 22771 22770 +f 22642 22643 22772 +f 22642 22772 22771 +f 22643 22644 22772 +f 22644 22773 22772 +f 22644 22645 22774 +f 22644 22774 22773 +f 22645 22646 22774 +f 22646 22775 22774 +f 22646 22647 22776 +f 22646 22776 22775 +f 22647 22648 22776 +f 22648 22777 22776 +f 22648 22649 22778 +f 22648 22778 22777 +f 22649 22650 22778 +f 22650 22779 22778 +f 22650 22651 22780 +f 22650 22780 22779 +f 22651 22652 22780 +f 22652 22781 22780 +f 22652 22653 22782 +f 22652 22782 22781 +f 22653 22654 22782 +f 22654 22783 22782 +f 22654 22655 22784 +f 22654 22784 22783 +f 22655 22656 22784 +f 22656 22785 22784 +f 22657 22658 22786 +f 22658 22787 22786 +f 22658 22659 22788 +f 22658 22788 22787 +f 22659 22660 22788 +f 22660 22789 22788 +f 22660 22661 22790 +f 22660 22790 22789 +f 22661 22662 22790 +f 22662 22791 22790 +f 22662 22663 22792 +f 22662 22792 22791 +f 22663 22664 22792 +f 22664 22793 22792 +f 22664 22665 22794 +f 22664 22794 22793 +f 22665 22666 22794 +f 22666 22795 22794 +f 22666 22667 22796 +f 22666 22796 22795 +f 22667 22668 22796 +f 22668 22797 22796 +f 22668 22669 22798 +f 22668 22798 22797 +f 22669 22670 22798 +f 22670 22799 22798 +f 22670 22671 22800 +f 22670 22800 22799 +f 22671 22672 22800 +f 22672 22801 22800 +f 22672 22673 22802 +f 22672 22802 22801 +f 22673 22674 22802 +f 22674 22803 22802 +f 22674 22675 22804 +f 22674 22804 22803 +f 22675 22676 22804 +f 22676 22805 22804 +f 22676 22677 22806 +f 22676 22806 22805 +f 22677 22678 22806 +f 22678 22807 22806 +f 22678 22679 22808 +f 22678 22808 22807 +f 22679 22680 22808 +f 22680 22809 22808 +f 22680 22681 22810 +f 22680 22810 22809 +f 22681 22682 22810 +f 22682 22811 22810 +f 22682 22683 22812 +f 22682 22812 22811 +f 22683 22684 22812 +f 22684 22813 22812 +f 22684 22685 22814 +f 22684 22814 22813 +f 22685 22686 22814 +f 22686 22815 22814 +f 22686 22687 22816 +f 22686 22816 22815 +f 22687 22688 22816 +f 22688 22817 22816 +f 22688 22689 22818 +f 22688 22818 22817 +f 22689 22690 22818 +f 22690 22819 22818 +f 22690 22691 22820 +f 22690 22820 22819 +f 22691 22692 22820 +f 22692 22821 22820 +f 22692 22693 22822 +f 22692 22822 22821 +f 22693 22694 22822 +f 22694 22823 22822 +f 22694 22695 22824 +f 22694 22824 22823 +f 22695 22696 22824 +f 22696 22825 22824 +f 22696 22697 22826 +f 22696 22826 22825 +f 22697 22698 22826 +f 22698 22827 22826 +f 22698 22699 22828 +f 22698 22828 22827 +f 22699 22700 22828 +f 22700 22829 22828 +f 22700 22701 22830 +f 22700 22830 22829 +f 22701 22702 22830 +f 22702 22831 22830 +f 22702 22703 22832 +f 22702 22832 22831 +f 22703 22704 22832 +f 22704 22833 22832 +f 22704 22705 22834 +f 22704 22834 22833 +f 22705 22706 22834 +f 22706 22835 22834 +f 22706 22707 22836 +f 22706 22836 22835 +f 22707 22708 22836 +f 22708 22837 22836 +f 22708 22709 22838 +f 22708 22838 22837 +f 22709 22710 22838 +f 22710 22839 22838 +f 22710 22711 22840 +f 22710 22840 22839 +f 22711 22712 22840 +f 22712 22841 22840 +f 22712 22713 22842 +f 22712 22842 22841 +f 22713 22714 22842 +f 22714 22843 22842 +f 22714 22715 22844 +f 22714 22844 22843 +f 22715 22716 22844 +f 22716 22845 22844 +f 22716 22717 22846 +f 22716 22846 22845 +f 22717 22718 22846 +f 22718 22847 22846 +f 22718 22719 22848 +f 22718 22848 22847 +f 22719 22720 22848 +f 22720 22849 22848 +f 22720 22721 22850 +f 22720 22850 22849 +f 22721 22722 22850 +f 22722 22851 22850 +f 22722 22723 22852 +f 22722 22852 22851 +f 22723 22724 22852 +f 22724 22853 22852 +f 22724 22725 22854 +f 22724 22854 22853 +f 22725 22726 22854 +f 22726 22855 22854 +f 22726 22727 22856 +f 22726 22856 22855 +f 22727 22728 22856 +f 22728 22857 22856 +f 22728 22729 22858 +f 22728 22858 22857 +f 22729 22730 22858 +f 22730 22859 22858 +f 22730 22731 22860 +f 22730 22860 22859 +f 22731 22732 22860 +f 22732 22861 22860 +f 22732 22733 22862 +f 22732 22862 22861 +f 22733 22734 22862 +f 22734 22863 22862 +f 22734 22735 22864 +f 22734 22864 22863 +f 22735 22736 22864 +f 22736 22865 22864 +f 22736 22737 22866 +f 22736 22866 22865 +f 22737 22738 22866 +f 22738 22867 22866 +f 22738 22739 22868 +f 22738 22868 22867 +f 22739 22740 22868 +f 22740 22869 22868 +f 22740 22741 22870 +f 22740 22870 22869 +f 22741 22742 22870 +f 22742 22871 22870 +f 22742 22743 22872 +f 22742 22872 22871 +f 22743 22744 22872 +f 22744 22873 22872 +f 22744 22745 22874 +f 22744 22874 22873 +f 22745 22746 22874 +f 22746 22875 22874 +f 22746 22747 22876 +f 22746 22876 22875 +f 22747 22748 22876 +f 22748 22877 22876 +f 22748 22749 22878 +f 22748 22878 22877 +f 22749 22750 22878 +f 22750 22879 22878 +f 22750 22751 22880 +f 22750 22880 22879 +f 22751 22752 22880 +f 22752 22881 22880 +f 22752 22753 22882 +f 22752 22882 22881 +f 22753 22754 22882 +f 22754 22883 22882 +f 22754 22755 22884 +f 22754 22884 22883 +f 22755 22756 22884 +f 22756 22885 22884 +f 22756 22757 22886 +f 22756 22886 22885 +f 22757 22758 22886 +f 22758 22887 22886 +f 22758 22759 22888 +f 22758 22888 22887 +f 22759 22760 22888 +f 22760 22889 22888 +f 22760 22761 22890 +f 22760 22890 22889 +f 22761 22762 22890 +f 22762 22891 22890 +f 22762 22763 22892 +f 22762 22892 22891 +f 22763 22764 22892 +f 22764 22893 22892 +f 22764 22765 22894 +f 22764 22894 22893 +f 22765 22766 22894 +f 22766 22895 22894 +f 22766 22767 22896 +f 22766 22896 22895 +f 22767 22768 22896 +f 22768 22897 22896 +f 22768 22769 22898 +f 22768 22898 22897 +f 22769 22770 22898 +f 22770 22899 22898 +f 22770 22771 22900 +f 22770 22900 22899 +f 22771 22772 22900 +f 22772 22901 22900 +f 22772 22773 22902 +f 22772 22902 22901 +f 22773 22774 22902 +f 22774 22903 22902 +f 22774 22775 22904 +f 22774 22904 22903 +f 22775 22776 22904 +f 22776 22905 22904 +f 22776 22777 22906 +f 22776 22906 22905 +f 22777 22778 22906 +f 22778 22907 22906 +f 22778 22779 22908 +f 22778 22908 22907 +f 22779 22780 22908 +f 22780 22909 22908 +f 22780 22781 22910 +f 22780 22910 22909 +f 22781 22782 22910 +f 22782 22911 22910 +f 22782 22783 22912 +f 22782 22912 22911 +f 22783 22784 22912 +f 22784 22913 22912 +f 22784 22785 22914 +f 22784 22914 22913 +f 22786 22787 22916 +f 22786 22916 22915 +f 22787 22788 22916 +f 22788 22917 22916 +f 22788 22789 22918 +f 22788 22918 22917 +f 22789 22790 22918 +f 22790 22919 22918 +f 22790 22791 22920 +f 22790 22920 22919 +f 22791 22792 22920 +f 22792 22921 22920 +f 22792 22793 22922 +f 22792 22922 22921 +f 22793 22794 22922 +f 22794 22923 22922 +f 22794 22795 22924 +f 22794 22924 22923 +f 22795 22796 22924 +f 22796 22925 22924 +f 22796 22797 22926 +f 22796 22926 22925 +f 22797 22798 22926 +f 22798 22927 22926 +f 22798 22799 22928 +f 22798 22928 22927 +f 22799 22800 22928 +f 22800 22929 22928 +f 22800 22801 22930 +f 22800 22930 22929 +f 22801 22802 22930 +f 22802 22931 22930 +f 22802 22803 22932 +f 22802 22932 22931 +f 22803 22804 22932 +f 22804 22933 22932 +f 22804 22805 22934 +f 22804 22934 22933 +f 22805 22806 22934 +f 22806 22935 22934 +f 22806 22807 22936 +f 22806 22936 22935 +f 22807 22808 22936 +f 22808 22937 22936 +f 22808 22809 22938 +f 22808 22938 22937 +f 22809 22810 22938 +f 22810 22939 22938 +f 22810 22811 22940 +f 22810 22940 22939 +f 22811 22812 22940 +f 22812 22941 22940 +f 22812 22813 22942 +f 22812 22942 22941 +f 22813 22814 22942 +f 22814 22943 22942 +f 22814 22815 22944 +f 22814 22944 22943 +f 22815 22816 22944 +f 22816 22945 22944 +f 22816 22817 22946 +f 22816 22946 22945 +f 22817 22818 22946 +f 22818 22947 22946 +f 22818 22819 22948 +f 22818 22948 22947 +f 22819 22820 22948 +f 22820 22949 22948 +f 22820 22821 22950 +f 22820 22950 22949 +f 22821 22822 22950 +f 22822 22951 22950 +f 22822 22823 22952 +f 22822 22952 22951 +f 22823 22824 22952 +f 22824 22953 22952 +f 22824 22825 22954 +f 22824 22954 22953 +f 22825 22826 22954 +f 22826 22955 22954 +f 22826 22827 22956 +f 22826 22956 22955 +f 22827 22828 22956 +f 22828 22957 22956 +f 22828 22829 22958 +f 22828 22958 22957 +f 22829 22830 22958 +f 22830 22959 22958 +f 22830 22831 22960 +f 22830 22960 22959 +f 22831 22832 22960 +f 22832 22961 22960 +f 22832 22833 22962 +f 22832 22962 22961 +f 22833 22834 22962 +f 22834 22963 22962 +f 22834 22835 22964 +f 22834 22964 22963 +f 22835 22836 22964 +f 22836 22965 22964 +f 22836 22837 22966 +f 22836 22966 22965 +f 22837 22838 22966 +f 22838 22967 22966 +f 22838 22839 22968 +f 22838 22968 22967 +f 22839 22840 22968 +f 22840 22969 22968 +f 22840 22841 22970 +f 22840 22970 22969 +f 22841 22842 22970 +f 22842 22971 22970 +f 22842 22843 22972 +f 22842 22972 22971 +f 22843 22844 22972 +f 22844 22973 22972 +f 22844 22845 22974 +f 22844 22974 22973 +f 22845 22846 22974 +f 22846 22975 22974 +f 22846 22847 22976 +f 22846 22976 22975 +f 22847 22848 22976 +f 22848 22977 22976 +f 22848 22849 22978 +f 22848 22978 22977 +f 22849 22850 22978 +f 22850 22979 22978 +f 22850 22851 22980 +f 22850 22980 22979 +f 22851 22852 22980 +f 22852 22981 22980 +f 22852 22853 22982 +f 22852 22982 22981 +f 22853 22854 22982 +f 22854 22983 22982 +f 22854 22855 22984 +f 22854 22984 22983 +f 22855 22856 22984 +f 22856 22985 22984 +f 22856 22857 22986 +f 22856 22986 22985 +f 22857 22858 22986 +f 22858 22987 22986 +f 22858 22859 22988 +f 22858 22988 22987 +f 22859 22860 22988 +f 22860 22989 22988 +f 22860 22861 22990 +f 22860 22990 22989 +f 22861 22862 22990 +f 22862 22991 22990 +f 22862 22863 22992 +f 22862 22992 22991 +f 22863 22864 22992 +f 22864 22993 22992 +f 22864 22865 22994 +f 22864 22994 22993 +f 22865 22866 22994 +f 22866 22995 22994 +f 22866 22867 22996 +f 22866 22996 22995 +f 22867 22868 22996 +f 22868 22997 22996 +f 22868 22869 22998 +f 22868 22998 22997 +f 22869 22870 22998 +f 22870 22999 22998 +f 22870 22871 23000 +f 22870 23000 22999 +f 22871 22872 23000 +f 22872 23001 23000 +f 22872 22873 23002 +f 22872 23002 23001 +f 22873 22874 23002 +f 22874 23003 23002 +f 22874 22875 23004 +f 22874 23004 23003 +f 22875 22876 23004 +f 22876 23005 23004 +f 22876 22877 23006 +f 22876 23006 23005 +f 22877 22878 23006 +f 22878 23007 23006 +f 22878 22879 23008 +f 22878 23008 23007 +f 22879 22880 23008 +f 22880 23009 23008 +f 22880 22881 23010 +f 22880 23010 23009 +f 22881 22882 23010 +f 22882 23011 23010 +f 22882 22883 23012 +f 22882 23012 23011 +f 22883 22884 23012 +f 22884 23013 23012 +f 22884 22885 23014 +f 22884 23014 23013 +f 22885 22886 23014 +f 22886 23015 23014 +f 22886 22887 23016 +f 22886 23016 23015 +f 22887 22888 23016 +f 22888 23017 23016 +f 22888 22889 23018 +f 22888 23018 23017 +f 22889 22890 23018 +f 22890 23019 23018 +f 22890 22891 23020 +f 22890 23020 23019 +f 22891 22892 23020 +f 22892 23021 23020 +f 22892 22893 23022 +f 22892 23022 23021 +f 22893 22894 23022 +f 22894 23023 23022 +f 22894 22895 23024 +f 22894 23024 23023 +f 22895 22896 23024 +f 22896 23025 23024 +f 22896 22897 23026 +f 22896 23026 23025 +f 22897 22898 23026 +f 22898 23027 23026 +f 22898 22899 23028 +f 22898 23028 23027 +f 22899 22900 23028 +f 22900 23029 23028 +f 22900 22901 23030 +f 22900 23030 23029 +f 22901 22902 23030 +f 22902 23031 23030 +f 22902 22903 23032 +f 22902 23032 23031 +f 22903 22904 23032 +f 22904 23033 23032 +f 22904 22905 23034 +f 22904 23034 23033 +f 22905 22906 23034 +f 22906 23035 23034 +f 22906 22907 23036 +f 22906 23036 23035 +f 22907 22908 23036 +f 22908 23037 23036 +f 22908 22909 23038 +f 22908 23038 23037 +f 22909 22910 23038 +f 22910 23039 23038 +f 22910 22911 23040 +f 22910 23040 23039 +f 22911 22912 23040 +f 22912 23041 23040 +f 22912 22913 23042 +f 22912 23042 23041 +f 22913 22914 23042 +f 22914 23043 23042 +f 22915 22916 23044 +f 22916 23045 23044 +f 22916 22917 23046 +f 22916 23046 23045 +f 22917 22918 23046 +f 22918 23047 23046 +f 22918 22919 23048 +f 22918 23048 23047 +f 22919 22920 23048 +f 22920 23049 23048 +f 22920 22921 23050 +f 22920 23050 23049 +f 22921 22922 23050 +f 22922 23051 23050 +f 22922 22923 23052 +f 22922 23052 23051 +f 22923 22924 23052 +f 22924 23053 23052 +f 22924 22925 23054 +f 22924 23054 23053 +f 22925 22926 23054 +f 22926 23055 23054 +f 22926 22927 23056 +f 22926 23056 23055 +f 22927 22928 23056 +f 22928 23057 23056 +f 22928 22929 23058 +f 22928 23058 23057 +f 22929 22930 23058 +f 22930 23059 23058 +f 22930 22931 23060 +f 22930 23060 23059 +f 22931 22932 23060 +f 22932 23061 23060 +f 22932 22933 23062 +f 22932 23062 23061 +f 22933 22934 23062 +f 22934 23063 23062 +f 22934 22935 23064 +f 22934 23064 23063 +f 22935 22936 23064 +f 22936 23065 23064 +f 22936 22937 23066 +f 22936 23066 23065 +f 22937 22938 23066 +f 22938 23067 23066 +f 22938 22939 23068 +f 22938 23068 23067 +f 22939 22940 23068 +f 22940 23069 23068 +f 22940 22941 23070 +f 22940 23070 23069 +f 22941 22942 23070 +f 22942 23071 23070 +f 22942 22943 23072 +f 22942 23072 23071 +f 22943 22944 23072 +f 22944 23073 23072 +f 22944 22945 23074 +f 22944 23074 23073 +f 22945 22946 23074 +f 22946 23075 23074 +f 22946 22947 23076 +f 22946 23076 23075 +f 22947 22948 23076 +f 22948 23077 23076 +f 22948 22949 23078 +f 22948 23078 23077 +f 22949 22950 23078 +f 22950 23079 23078 +f 22950 22951 23080 +f 22950 23080 23079 +f 22951 22952 23080 +f 22952 23081 23080 +f 22952 22953 23082 +f 22952 23082 23081 +f 22953 22954 23082 +f 22954 23083 23082 +f 22954 22955 23084 +f 22954 23084 23083 +f 22955 22956 23084 +f 22956 23085 23084 +f 22956 22957 23086 +f 22956 23086 23085 +f 22957 22958 23086 +f 22958 23087 23086 +f 22958 22959 23088 +f 22958 23088 23087 +f 22959 22960 23088 +f 22960 23089 23088 +f 22960 22961 23090 +f 22960 23090 23089 +f 22961 22962 23090 +f 22962 23091 23090 +f 22962 22963 23092 +f 22962 23092 23091 +f 22963 22964 23092 +f 22964 23093 23092 +f 22964 22965 23094 +f 22964 23094 23093 +f 22965 22966 23094 +f 22966 23095 23094 +f 22966 22967 23096 +f 22966 23096 23095 +f 22967 22968 23096 +f 22968 23097 23096 +f 22968 22969 23098 +f 22968 23098 23097 +f 22969 22970 23098 +f 22970 23099 23098 +f 22970 22971 23100 +f 22970 23100 23099 +f 22971 22972 23100 +f 22972 23101 23100 +f 22972 22973 23102 +f 22972 23102 23101 +f 22973 22974 23102 +f 22974 23103 23102 +f 22974 22975 23104 +f 22974 23104 23103 +f 22975 22976 23104 +f 22976 23105 23104 +f 22976 22977 23106 +f 22976 23106 23105 +f 22977 22978 23106 +f 22978 23107 23106 +f 22978 22979 23108 +f 22978 23108 23107 +f 22979 22980 23108 +f 22980 23109 23108 +f 22980 22981 23110 +f 22980 23110 23109 +f 22981 22982 23110 +f 22982 23111 23110 +f 22982 22983 23112 +f 22982 23112 23111 +f 22983 22984 23112 +f 22984 23113 23112 +f 22984 22985 23114 +f 22984 23114 23113 +f 22985 22986 23114 +f 22986 23115 23114 +f 22986 22987 23116 +f 22986 23116 23115 +f 22987 22988 23116 +f 22988 23117 23116 +f 22988 22989 23118 +f 22988 23118 23117 +f 22989 22990 23118 +f 22990 23119 23118 +f 22990 22991 23120 +f 22990 23120 23119 +f 22991 22992 23120 +f 22992 23121 23120 +f 22992 22993 23122 +f 22992 23122 23121 +f 22993 22994 23122 +f 22994 23123 23122 +f 22994 22995 23124 +f 22994 23124 23123 +f 22995 22996 23124 +f 22996 23125 23124 +f 22996 22997 23126 +f 22996 23126 23125 +f 22997 22998 23126 +f 22998 23127 23126 +f 22998 22999 23128 +f 22998 23128 23127 +f 22999 23000 23128 +f 23000 23129 23128 +f 23000 23001 23130 +f 23000 23130 23129 +f 23001 23002 23130 +f 23002 23131 23130 +f 23002 23003 23132 +f 23002 23132 23131 +f 23003 23004 23132 +f 23004 23133 23132 +f 23004 23005 23134 +f 23004 23134 23133 +f 23005 23006 23134 +f 23006 23135 23134 +f 23006 23007 23136 +f 23006 23136 23135 +f 23007 23008 23136 +f 23008 23137 23136 +f 23008 23009 23138 +f 23008 23138 23137 +f 23009 23010 23138 +f 23010 23139 23138 +f 23010 23011 23140 +f 23010 23140 23139 +f 23011 23012 23140 +f 23012 23141 23140 +f 23012 23013 23142 +f 23012 23142 23141 +f 23013 23014 23142 +f 23014 23143 23142 +f 23014 23015 23144 +f 23014 23144 23143 +f 23015 23016 23144 +f 23016 23145 23144 +f 23016 23017 23146 +f 23016 23146 23145 +f 23017 23018 23146 +f 23018 23147 23146 +f 23018 23019 23148 +f 23018 23148 23147 +f 23019 23020 23148 +f 23020 23149 23148 +f 23020 23021 23150 +f 23020 23150 23149 +f 23021 23022 23150 +f 23022 23151 23150 +f 23022 23023 23152 +f 23022 23152 23151 +f 23023 23024 23152 +f 23024 23153 23152 +f 23024 23025 23154 +f 23024 23154 23153 +f 23025 23026 23154 +f 23026 23155 23154 +f 23026 23027 23156 +f 23026 23156 23155 +f 23027 23028 23156 +f 23028 23157 23156 +f 23028 23029 23158 +f 23028 23158 23157 +f 23029 23030 23158 +f 23030 23159 23158 +f 23030 23031 23160 +f 23030 23160 23159 +f 23031 23032 23160 +f 23032 23161 23160 +f 23032 23033 23162 +f 23032 23162 23161 +f 23033 23034 23162 +f 23034 23163 23162 +f 23034 23035 23164 +f 23034 23164 23163 +f 23035 23036 23164 +f 23036 23165 23164 +f 23036 23037 23166 +f 23036 23166 23165 +f 23037 23038 23166 +f 23038 23167 23166 +f 23038 23039 23168 +f 23038 23168 23167 +f 23039 23040 23168 +f 23040 23169 23168 +f 23040 23041 23170 +f 23040 23170 23169 +f 23041 23042 23170 +f 23042 23171 23170 +f 23042 23043 23172 +f 23042 23172 23171 +f 23044 23045 23174 +f 23044 23174 23173 +f 23045 23046 23174 +f 23046 23175 23174 +f 23046 23047 23176 +f 23046 23176 23175 +f 23047 23048 23176 +f 23048 23177 23176 +f 23048 23049 23178 +f 23048 23178 23177 +f 23049 23050 23178 +f 23050 23179 23178 +f 23050 23051 23180 +f 23050 23180 23179 +f 23051 23052 23180 +f 23052 23181 23180 +f 23052 23053 23182 +f 23052 23182 23181 +f 23053 23054 23182 +f 23054 23183 23182 +f 23054 23055 23184 +f 23054 23184 23183 +f 23055 23056 23184 +f 23056 23185 23184 +f 23056 23057 23186 +f 23056 23186 23185 +f 23057 23058 23186 +f 23058 23187 23186 +f 23058 23059 23188 +f 23058 23188 23187 +f 23059 23060 23188 +f 23060 23189 23188 +f 23060 23061 23190 +f 23060 23190 23189 +f 23061 23062 23190 +f 23062 23191 23190 +f 23062 23063 23192 +f 23062 23192 23191 +f 23063 23064 23192 +f 23064 23193 23192 +f 23064 23065 23194 +f 23064 23194 23193 +f 23065 23066 23194 +f 23066 23195 23194 +f 23066 23067 23196 +f 23066 23196 23195 +f 23067 23068 23196 +f 23068 23197 23196 +f 23068 23069 23198 +f 23068 23198 23197 +f 23069 23070 23198 +f 23070 23199 23198 +f 23070 23071 23200 +f 23070 23200 23199 +f 23071 23072 23200 +f 23072 23201 23200 +f 23072 23073 23202 +f 23072 23202 23201 +f 23073 23074 23202 +f 23074 23203 23202 +f 23074 23075 23204 +f 23074 23204 23203 +f 23075 23076 23204 +f 23076 23205 23204 +f 23076 23077 23206 +f 23076 23206 23205 +f 23077 23078 23206 +f 23078 23207 23206 +f 23078 23079 23208 +f 23078 23208 23207 +f 23079 23080 23208 +f 23080 23209 23208 +f 23080 23081 23210 +f 23080 23210 23209 +f 23081 23082 23210 +f 23082 23211 23210 +f 23082 23083 23212 +f 23082 23212 23211 +f 23083 23084 23212 +f 23084 23213 23212 +f 23084 23085 23214 +f 23084 23214 23213 +f 23085 23086 23214 +f 23086 23215 23214 +f 23086 23087 23216 +f 23086 23216 23215 +f 23087 23088 23216 +f 23088 23217 23216 +f 23088 23089 23218 +f 23088 23218 23217 +f 23089 23090 23218 +f 23090 23219 23218 +f 23090 23091 23220 +f 23090 23220 23219 +f 23091 23092 23220 +f 23092 23221 23220 +f 23092 23093 23222 +f 23092 23222 23221 +f 23093 23094 23222 +f 23094 23223 23222 +f 23094 23095 23224 +f 23094 23224 23223 +f 23095 23096 23224 +f 23096 23225 23224 +f 23096 23097 23226 +f 23096 23226 23225 +f 23097 23098 23226 +f 23098 23227 23226 +f 23098 23099 23228 +f 23098 23228 23227 +f 23099 23100 23228 +f 23100 23229 23228 +f 23100 23101 23230 +f 23100 23230 23229 +f 23101 23102 23230 +f 23102 23231 23230 +f 23102 23103 23232 +f 23102 23232 23231 +f 23103 23104 23232 +f 23104 23233 23232 +f 23104 23105 23234 +f 23104 23234 23233 +f 23105 23106 23234 +f 23106 23235 23234 +f 23106 23107 23236 +f 23106 23236 23235 +f 23107 23108 23236 +f 23108 23237 23236 +f 23108 23109 23238 +f 23108 23238 23237 +f 23109 23110 23238 +f 23110 23239 23238 +f 23110 23111 23240 +f 23110 23240 23239 +f 23111 23112 23240 +f 23112 23241 23240 +f 23112 23113 23242 +f 23112 23242 23241 +f 23113 23114 23242 +f 23114 23243 23242 +f 23114 23115 23244 +f 23114 23244 23243 +f 23115 23116 23244 +f 23116 23245 23244 +f 23116 23117 23246 +f 23116 23246 23245 +f 23117 23118 23246 +f 23118 23247 23246 +f 23118 23119 23248 +f 23118 23248 23247 +f 23119 23120 23248 +f 23120 23249 23248 +f 23120 23121 23250 +f 23120 23250 23249 +f 23121 23122 23250 +f 23122 23251 23250 +f 23122 23123 23252 +f 23122 23252 23251 +f 23123 23124 23252 +f 23124 23253 23252 +f 23124 23125 23254 +f 23124 23254 23253 +f 23125 23126 23254 +f 23126 23255 23254 +f 23126 23127 23256 +f 23126 23256 23255 +f 23127 23128 23256 +f 23128 23257 23256 +f 23128 23129 23258 +f 23128 23258 23257 +f 23129 23130 23258 +f 23130 23259 23258 +f 23130 23131 23260 +f 23130 23260 23259 +f 23131 23132 23260 +f 23132 23261 23260 +f 23132 23133 23262 +f 23132 23262 23261 +f 23133 23134 23262 +f 23134 23263 23262 +f 23134 23135 23264 +f 23134 23264 23263 +f 23135 23136 23264 +f 23136 23265 23264 +f 23136 23137 23266 +f 23136 23266 23265 +f 23137 23138 23266 +f 23138 23267 23266 +f 23138 23139 23268 +f 23138 23268 23267 +f 23139 23140 23268 +f 23140 23269 23268 +f 23140 23141 23270 +f 23140 23270 23269 +f 23141 23142 23270 +f 23142 23271 23270 +f 23142 23143 23272 +f 23142 23272 23271 +f 23143 23144 23272 +f 23144 23273 23272 +f 23144 23145 23274 +f 23144 23274 23273 +f 23145 23146 23274 +f 23146 23275 23274 +f 23146 23147 23276 +f 23146 23276 23275 +f 23147 23148 23276 +f 23148 23277 23276 +f 23148 23149 23278 +f 23148 23278 23277 +f 23149 23150 23278 +f 23150 23279 23278 +f 23150 23151 23280 +f 23150 23280 23279 +f 23151 23152 23280 +f 23152 23281 23280 +f 23152 23153 23282 +f 23152 23282 23281 +f 23153 23154 23282 +f 23154 23283 23282 +f 23154 23155 23284 +f 23154 23284 23283 +f 23155 23156 23284 +f 23156 23285 23284 +f 23156 23157 23286 +f 23156 23286 23285 +f 23157 23158 23286 +f 23158 23287 23286 +f 23158 23159 23288 +f 23158 23288 23287 +f 23159 23160 23288 +f 23160 23289 23288 +f 23160 23161 23290 +f 23160 23290 23289 +f 23161 23162 23290 +f 23162 23291 23290 +f 23162 23163 23292 +f 23162 23292 23291 +f 23163 23164 23292 +f 23164 23293 23292 +f 23164 23165 23294 +f 23164 23294 23293 +f 23165 23166 23294 +f 23166 23295 23294 +f 23166 23167 23296 +f 23166 23296 23295 +f 23167 23168 23296 +f 23168 23297 23296 +f 23168 23169 23298 +f 23168 23298 23297 +f 23169 23170 23298 +f 23170 23299 23298 +f 23170 23171 23300 +f 23170 23300 23299 +f 23171 23172 23300 +f 23172 23301 23300 +f 23173 23174 23302 +f 23174 23303 23302 +f 23174 23175 23304 +f 23174 23304 23303 +f 23175 23176 23304 +f 23176 23305 23304 +f 23176 23177 23306 +f 23176 23306 23305 +f 23177 23178 23306 +f 23178 23307 23306 +f 23178 23179 23308 +f 23178 23308 23307 +f 23179 23180 23308 +f 23180 23309 23308 +f 23180 23181 23310 +f 23180 23310 23309 +f 23181 23182 23310 +f 23182 23311 23310 +f 23182 23183 23312 +f 23182 23312 23311 +f 23183 23184 23312 +f 23184 23313 23312 +f 23184 23185 23314 +f 23184 23314 23313 +f 23185 23186 23314 +f 23186 23315 23314 +f 23186 23187 23316 +f 23186 23316 23315 +f 23187 23188 23316 +f 23188 23317 23316 +f 23188 23189 23318 +f 23188 23318 23317 +f 23189 23190 23318 +f 23190 23319 23318 +f 23190 23191 23320 +f 23190 23320 23319 +f 23191 23192 23320 +f 23192 23321 23320 +f 23192 23193 23322 +f 23192 23322 23321 +f 23193 23194 23322 +f 23194 23323 23322 +f 23194 23195 23324 +f 23194 23324 23323 +f 23195 23196 23324 +f 23196 23325 23324 +f 23196 23197 23326 +f 23196 23326 23325 +f 23197 23198 23326 +f 23198 23327 23326 +f 23198 23199 23328 +f 23198 23328 23327 +f 23199 23200 23328 +f 23200 23329 23328 +f 23200 23201 23330 +f 23200 23330 23329 +f 23201 23202 23330 +f 23202 23331 23330 +f 23202 23203 23332 +f 23202 23332 23331 +f 23203 23204 23332 +f 23204 23333 23332 +f 23204 23205 23334 +f 23204 23334 23333 +f 23205 23206 23334 +f 23206 23335 23334 +f 23206 23207 23336 +f 23206 23336 23335 +f 23207 23208 23336 +f 23208 23337 23336 +f 23208 23209 23338 +f 23208 23338 23337 +f 23209 23210 23338 +f 23210 23339 23338 +f 23210 23211 23340 +f 23210 23340 23339 +f 23211 23212 23340 +f 23212 23341 23340 +f 23212 23213 23342 +f 23212 23342 23341 +f 23213 23214 23342 +f 23214 23343 23342 +f 23214 23215 23344 +f 23214 23344 23343 +f 23215 23216 23344 +f 23216 23345 23344 +f 23216 23217 23346 +f 23216 23346 23345 +f 23217 23218 23346 +f 23218 23347 23346 +f 23218 23219 23348 +f 23218 23348 23347 +f 23219 23220 23348 +f 23220 23349 23348 +f 23220 23221 23350 +f 23220 23350 23349 +f 23221 23222 23350 +f 23222 23351 23350 +f 23222 23223 23352 +f 23222 23352 23351 +f 23223 23224 23352 +f 23224 23353 23352 +f 23224 23225 23354 +f 23224 23354 23353 +f 23225 23226 23354 +f 23226 23355 23354 +f 23226 23227 23356 +f 23226 23356 23355 +f 23227 23228 23356 +f 23228 23357 23356 +f 23228 23229 23358 +f 23228 23358 23357 +f 23229 23230 23358 +f 23230 23359 23358 +f 23230 23231 23360 +f 23230 23360 23359 +f 23231 23232 23360 +f 23232 23361 23360 +f 23232 23233 23362 +f 23232 23362 23361 +f 23233 23234 23362 +f 23234 23363 23362 +f 23234 23235 23364 +f 23234 23364 23363 +f 23235 23236 23364 +f 23236 23365 23364 +f 23236 23237 23366 +f 23236 23366 23365 +f 23237 23238 23366 +f 23238 23367 23366 +f 23238 23239 23368 +f 23238 23368 23367 +f 23239 23240 23368 +f 23240 23369 23368 +f 23240 23241 23370 +f 23240 23370 23369 +f 23241 23242 23370 +f 23242 23371 23370 +f 23242 23243 23372 +f 23242 23372 23371 +f 23243 23244 23372 +f 23244 23373 23372 +f 23244 23245 23374 +f 23244 23374 23373 +f 23245 23246 23374 +f 23246 23375 23374 +f 23246 23247 23376 +f 23246 23376 23375 +f 23247 23248 23376 +f 23248 23377 23376 +f 23248 23249 23378 +f 23248 23378 23377 +f 23249 23250 23378 +f 23250 23379 23378 +f 23250 23251 23380 +f 23250 23380 23379 +f 23251 23252 23380 +f 23252 23381 23380 +f 23252 23253 23382 +f 23252 23382 23381 +f 23253 23254 23382 +f 23254 23383 23382 +f 23254 23255 23384 +f 23254 23384 23383 +f 23255 23256 23384 +f 23256 23385 23384 +f 23256 23257 23386 +f 23256 23386 23385 +f 23257 23258 23386 +f 23258 23387 23386 +f 23258 23259 23388 +f 23258 23388 23387 +f 23259 23260 23388 +f 23260 23389 23388 +f 23260 23261 23390 +f 23260 23390 23389 +f 23261 23262 23390 +f 23262 23391 23390 +f 23262 23263 23392 +f 23262 23392 23391 +f 23263 23264 23392 +f 23264 23393 23392 +f 23264 23265 23394 +f 23264 23394 23393 +f 23265 23266 23394 +f 23266 23395 23394 +f 23266 23267 23396 +f 23266 23396 23395 +f 23267 23268 23396 +f 23268 23397 23396 +f 23268 23269 23398 +f 23268 23398 23397 +f 23269 23270 23398 +f 23270 23399 23398 +f 23270 23271 23400 +f 23270 23400 23399 +f 23271 23272 23400 +f 23272 23401 23400 +f 23272 23273 23402 +f 23272 23402 23401 +f 23273 23274 23402 +f 23274 23403 23402 +f 23274 23275 23404 +f 23274 23404 23403 +f 23275 23276 23404 +f 23276 23405 23404 +f 23276 23277 23406 +f 23276 23406 23405 +f 23277 23278 23406 +f 23278 23407 23406 +f 23278 23279 23408 +f 23278 23408 23407 +f 23279 23280 23408 +f 23280 23409 23408 +f 23280 23281 23410 +f 23280 23410 23409 +f 23281 23282 23410 +f 23282 23411 23410 +f 23282 23283 23412 +f 23282 23412 23411 +f 23283 23284 23412 +f 23284 23413 23412 +f 23284 23285 23414 +f 23284 23414 23413 +f 23285 23286 23414 +f 23286 23415 23414 +f 23286 23287 23416 +f 23286 23416 23415 +f 23287 23288 23416 +f 23288 23417 23416 +f 23288 23289 23418 +f 23288 23418 23417 +f 23289 23290 23418 +f 23290 23419 23418 +f 23290 23291 23420 +f 23290 23420 23419 +f 23291 23292 23420 +f 23292 23421 23420 +f 23292 23293 23422 +f 23292 23422 23421 +f 23293 23294 23422 +f 23294 23423 23422 +f 23294 23295 23424 +f 23294 23424 23423 +f 23295 23296 23424 +f 23296 23425 23424 +f 23296 23297 23426 +f 23296 23426 23425 +f 23297 23298 23426 +f 23298 23427 23426 +f 23298 23299 23428 +f 23298 23428 23427 +f 23299 23300 23428 +f 23300 23429 23428 +f 23300 23301 23430 +f 23300 23430 23429 +f 23302 23303 23432 +f 23302 23432 23431 +f 23303 23304 23432 +f 23304 23433 23432 +f 23304 23305 23434 +f 23304 23434 23433 +f 23305 23306 23434 +f 23306 23435 23434 +f 23306 23307 23436 +f 23306 23436 23435 +f 23307 23308 23436 +f 23308 23437 23436 +f 23308 23309 23438 +f 23308 23438 23437 +f 23309 23310 23438 +f 23310 23439 23438 +f 23310 23311 23440 +f 23310 23440 23439 +f 23311 23312 23440 +f 23312 23441 23440 +f 23312 23313 23442 +f 23312 23442 23441 +f 23313 23314 23442 +f 23314 23443 23442 +f 23314 23315 23444 +f 23314 23444 23443 +f 23315 23316 23444 +f 23316 23445 23444 +f 23316 23317 23446 +f 23316 23446 23445 +f 23317 23318 23446 +f 23318 23447 23446 +f 23318 23319 23448 +f 23318 23448 23447 +f 23319 23320 23448 +f 23320 23449 23448 +f 23320 23321 23450 +f 23320 23450 23449 +f 23321 23322 23450 +f 23322 23451 23450 +f 23322 23323 23452 +f 23322 23452 23451 +f 23323 23324 23452 +f 23324 23453 23452 +f 23324 23325 23454 +f 23324 23454 23453 +f 23325 23326 23454 +f 23326 23455 23454 +f 23326 23327 23456 +f 23326 23456 23455 +f 23327 23328 23456 +f 23328 23457 23456 +f 23328 23329 23458 +f 23328 23458 23457 +f 23329 23330 23458 +f 23330 23459 23458 +f 23330 23331 23460 +f 23330 23460 23459 +f 23331 23332 23460 +f 23332 23461 23460 +f 23332 23333 23462 +f 23332 23462 23461 +f 23333 23334 23462 +f 23334 23463 23462 +f 23334 23335 23464 +f 23334 23464 23463 +f 23335 23336 23464 +f 23336 23465 23464 +f 23336 23337 23466 +f 23336 23466 23465 +f 23337 23338 23466 +f 23338 23467 23466 +f 23338 23339 23468 +f 23338 23468 23467 +f 23339 23340 23468 +f 23340 23469 23468 +f 23340 23341 23470 +f 23340 23470 23469 +f 23341 23342 23470 +f 23342 23471 23470 +f 23342 23343 23472 +f 23342 23472 23471 +f 23343 23344 23472 +f 23344 23473 23472 +f 23344 23345 23474 +f 23344 23474 23473 +f 23345 23346 23474 +f 23346 23475 23474 +f 23346 23347 23476 +f 23346 23476 23475 +f 23347 23348 23476 +f 23348 23477 23476 +f 23348 23349 23478 +f 23348 23478 23477 +f 23349 23350 23478 +f 23350 23479 23478 +f 23350 23351 23480 +f 23350 23480 23479 +f 23351 23352 23480 +f 23352 23481 23480 +f 23352 23353 23482 +f 23352 23482 23481 +f 23353 23354 23482 +f 23354 23483 23482 +f 23354 23355 23484 +f 23354 23484 23483 +f 23355 23356 23484 +f 23356 23485 23484 +f 23356 23357 23486 +f 23356 23486 23485 +f 23357 23358 23486 +f 23358 23487 23486 +f 23358 23359 23488 +f 23358 23488 23487 +f 23359 23360 23488 +f 23360 23489 23488 +f 23360 23361 23490 +f 23360 23490 23489 +f 23361 23362 23490 +f 23362 23491 23490 +f 23362 23363 23492 +f 23362 23492 23491 +f 23363 23364 23492 +f 23364 23493 23492 +f 23364 23365 23494 +f 23364 23494 23493 +f 23365 23366 23494 +f 23366 23495 23494 +f 23366 23367 23496 +f 23366 23496 23495 +f 23367 23368 23496 +f 23368 23497 23496 +f 23368 23369 23498 +f 23368 23498 23497 +f 23369 23370 23498 +f 23370 23499 23498 +f 23370 23371 23500 +f 23370 23500 23499 +f 23371 23372 23500 +f 23372 23501 23500 +f 23372 23373 23502 +f 23372 23502 23501 +f 23373 23374 23502 +f 23374 23503 23502 +f 23374 23375 23504 +f 23374 23504 23503 +f 23375 23376 23504 +f 23376 23505 23504 +f 23376 23377 23506 +f 23376 23506 23505 +f 23377 23378 23506 +f 23378 23507 23506 +f 23378 23379 23508 +f 23378 23508 23507 +f 23379 23380 23508 +f 23380 23509 23508 +f 23380 23381 23510 +f 23380 23510 23509 +f 23381 23382 23510 +f 23382 23511 23510 +f 23382 23383 23512 +f 23382 23512 23511 +f 23383 23384 23512 +f 23384 23513 23512 +f 23384 23385 23514 +f 23384 23514 23513 +f 23385 23386 23514 +f 23386 23515 23514 +f 23386 23387 23516 +f 23386 23516 23515 +f 23387 23388 23516 +f 23388 23517 23516 +f 23388 23389 23518 +f 23388 23518 23517 +f 23389 23390 23518 +f 23390 23519 23518 +f 23390 23391 23520 +f 23390 23520 23519 +f 23391 23392 23520 +f 23392 23521 23520 +f 23392 23393 23522 +f 23392 23522 23521 +f 23393 23394 23522 +f 23394 23523 23522 +f 23394 23395 23524 +f 23394 23524 23523 +f 23395 23396 23524 +f 23396 23525 23524 +f 23396 23397 23526 +f 23396 23526 23525 +f 23397 23398 23526 +f 23398 23527 23526 +f 23398 23399 23528 +f 23398 23528 23527 +f 23399 23400 23528 +f 23400 23529 23528 +f 23400 23401 23530 +f 23400 23530 23529 +f 23401 23402 23530 +f 23402 23531 23530 +f 23402 23403 23532 +f 23402 23532 23531 +f 23403 23404 23532 +f 23404 23533 23532 +f 23404 23405 23534 +f 23404 23534 23533 +f 23405 23406 23534 +f 23406 23535 23534 +f 23406 23407 23536 +f 23406 23536 23535 +f 23407 23408 23536 +f 23408 23537 23536 +f 23408 23409 23538 +f 23408 23538 23537 +f 23409 23410 23538 +f 23410 23539 23538 +f 23410 23411 23540 +f 23410 23540 23539 +f 23411 23412 23540 +f 23412 23541 23540 +f 23412 23413 23542 +f 23412 23542 23541 +f 23413 23414 23542 +f 23414 23543 23542 +f 23414 23415 23544 +f 23414 23544 23543 +f 23415 23416 23544 +f 23416 23545 23544 +f 23416 23417 23546 +f 23416 23546 23545 +f 23417 23418 23546 +f 23418 23547 23546 +f 23418 23419 23548 +f 23418 23548 23547 +f 23419 23420 23548 +f 23420 23549 23548 +f 23420 23421 23550 +f 23420 23550 23549 +f 23421 23422 23550 +f 23422 23551 23550 +f 23422 23423 23552 +f 23422 23552 23551 +f 23423 23424 23552 +f 23424 23553 23552 +f 23424 23425 23554 +f 23424 23554 23553 +f 23425 23426 23554 +f 23426 23555 23554 +f 23426 23427 23556 +f 23426 23556 23555 +f 23427 23428 23556 +f 23428 23557 23556 +f 23428 23429 23558 +f 23428 23558 23557 +f 23429 23430 23558 +f 23430 23559 23558 +f 23431 23432 23560 +f 23432 23561 23560 +f 23432 23433 23562 +f 23432 23562 23561 +f 23433 23434 23562 +f 23434 23563 23562 +f 23434 23435 23564 +f 23434 23564 23563 +f 23435 23436 23564 +f 23436 23565 23564 +f 23436 23437 23566 +f 23436 23566 23565 +f 23437 23438 23566 +f 23438 23567 23566 +f 23438 23439 23568 +f 23438 23568 23567 +f 23439 23440 23568 +f 23440 23569 23568 +f 23440 23441 23570 +f 23440 23570 23569 +f 23441 23442 23570 +f 23442 23571 23570 +f 23442 23443 23572 +f 23442 23572 23571 +f 23443 23444 23572 +f 23444 23573 23572 +f 23444 23445 23574 +f 23444 23574 23573 +f 23445 23446 23574 +f 23446 23575 23574 +f 23446 23447 23576 +f 23446 23576 23575 +f 23447 23448 23576 +f 23448 23577 23576 +f 23448 23449 23578 +f 23448 23578 23577 +f 23449 23450 23578 +f 23450 23579 23578 +f 23450 23451 23580 +f 23450 23580 23579 +f 23451 23452 23580 +f 23452 23581 23580 +f 23452 23453 23582 +f 23452 23582 23581 +f 23453 23454 23582 +f 23454 23583 23582 +f 23454 23455 23584 +f 23454 23584 23583 +f 23455 23456 23584 +f 23456 23585 23584 +f 23456 23457 23586 +f 23456 23586 23585 +f 23457 23458 23586 +f 23458 23587 23586 +f 23458 23459 23588 +f 23458 23588 23587 +f 23459 23460 23588 +f 23460 23589 23588 +f 23460 23461 23590 +f 23460 23590 23589 +f 23461 23462 23590 +f 23462 23591 23590 +f 23462 23463 23592 +f 23462 23592 23591 +f 23463 23464 23592 +f 23464 23593 23592 +f 23464 23465 23594 +f 23464 23594 23593 +f 23465 23466 23594 +f 23466 23595 23594 +f 23466 23467 23596 +f 23466 23596 23595 +f 23467 23468 23596 +f 23468 23597 23596 +f 23468 23469 23598 +f 23468 23598 23597 +f 23469 23470 23598 +f 23470 23599 23598 +f 23470 23471 23600 +f 23470 23600 23599 +f 23471 23472 23600 +f 23472 23601 23600 +f 23472 23473 23602 +f 23472 23602 23601 +f 23473 23474 23602 +f 23474 23603 23602 +f 23474 23475 23604 +f 23474 23604 23603 +f 23475 23476 23604 +f 23476 23605 23604 +f 23476 23477 23606 +f 23476 23606 23605 +f 23477 23478 23606 +f 23478 23607 23606 +f 23478 23479 23608 +f 23478 23608 23607 +f 23479 23480 23608 +f 23480 23609 23608 +f 23480 23481 23610 +f 23480 23610 23609 +f 23481 23482 23610 +f 23482 23611 23610 +f 23482 23483 23612 +f 23482 23612 23611 +f 23483 23484 23612 +f 23484 23613 23612 +f 23484 23485 23614 +f 23484 23614 23613 +f 23485 23486 23614 +f 23486 23615 23614 +f 23486 23487 23616 +f 23486 23616 23615 +f 23487 23488 23616 +f 23488 23617 23616 +f 23488 23489 23618 +f 23488 23618 23617 +f 23489 23490 23618 +f 23490 23619 23618 +f 23490 23491 23620 +f 23490 23620 23619 +f 23491 23492 23620 +f 23492 23621 23620 +f 23492 23493 23622 +f 23492 23622 23621 +f 23493 23494 23622 +f 23494 23623 23622 +f 23494 23495 23624 +f 23494 23624 23623 +f 23495 23496 23624 +f 23496 23625 23624 +f 23496 23497 23626 +f 23496 23626 23625 +f 23497 23498 23626 +f 23498 23627 23626 +f 23498 23499 23628 +f 23498 23628 23627 +f 23499 23500 23628 +f 23500 23629 23628 +f 23500 23501 23630 +f 23500 23630 23629 +f 23501 23502 23630 +f 23502 23631 23630 +f 23502 23503 23632 +f 23502 23632 23631 +f 23503 23504 23632 +f 23504 23633 23632 +f 23504 23505 23634 +f 23504 23634 23633 +f 23505 23506 23634 +f 23506 23635 23634 +f 23506 23507 23636 +f 23506 23636 23635 +f 23507 23508 23636 +f 23508 23637 23636 +f 23508 23509 23638 +f 23508 23638 23637 +f 23509 23510 23638 +f 23510 23639 23638 +f 23510 23511 23640 +f 23510 23640 23639 +f 23511 23512 23640 +f 23512 23641 23640 +f 23512 23513 23642 +f 23512 23642 23641 +f 23513 23514 23642 +f 23514 23643 23642 +f 23514 23515 23644 +f 23514 23644 23643 +f 23515 23516 23644 +f 23516 23645 23644 +f 23516 23517 23646 +f 23516 23646 23645 +f 23517 23518 23646 +f 23518 23647 23646 +f 23518 23519 23648 +f 23518 23648 23647 +f 23519 23520 23648 +f 23520 23649 23648 +f 23520 23521 23650 +f 23520 23650 23649 +f 23521 23522 23650 +f 23522 23651 23650 +f 23522 23523 23652 +f 23522 23652 23651 +f 23523 23524 23652 +f 23524 23653 23652 +f 23524 23525 23654 +f 23524 23654 23653 +f 23525 23526 23654 +f 23526 23655 23654 +f 23526 23527 23656 +f 23526 23656 23655 +f 23527 23528 23656 +f 23528 23657 23656 +f 23528 23529 23658 +f 23528 23658 23657 +f 23529 23530 23658 +f 23530 23659 23658 +f 23530 23531 23660 +f 23530 23660 23659 +f 23531 23532 23660 +f 23532 23661 23660 +f 23532 23533 23662 +f 23532 23662 23661 +f 23533 23534 23662 +f 23534 23663 23662 +f 23534 23535 23664 +f 23534 23664 23663 +f 23535 23536 23664 +f 23536 23665 23664 +f 23536 23537 23666 +f 23536 23666 23665 +f 23537 23538 23666 +f 23538 23667 23666 +f 23538 23539 23668 +f 23538 23668 23667 +f 23539 23540 23668 +f 23540 23669 23668 +f 23540 23541 23670 +f 23540 23670 23669 +f 23541 23542 23670 +f 23542 23671 23670 +f 23542 23543 23672 +f 23542 23672 23671 +f 23543 23544 23672 +f 23544 23673 23672 +f 23544 23545 23674 +f 23544 23674 23673 +f 23545 23546 23674 +f 23546 23675 23674 +f 23546 23547 23676 +f 23546 23676 23675 +f 23547 23548 23676 +f 23548 23677 23676 +f 23548 23549 23678 +f 23548 23678 23677 +f 23549 23550 23678 +f 23550 23679 23678 +f 23550 23551 23680 +f 23550 23680 23679 +f 23551 23552 23680 +f 23552 23681 23680 +f 23552 23553 23682 +f 23552 23682 23681 +f 23553 23554 23682 +f 23554 23683 23682 +f 23554 23555 23684 +f 23554 23684 23683 +f 23555 23556 23684 +f 23556 23685 23684 +f 23556 23557 23686 +f 23556 23686 23685 +f 23557 23558 23686 +f 23558 23687 23686 +f 23558 23559 23688 +f 23558 23688 23687 +f 23560 23561 23690 +f 23560 23690 23689 +f 23561 23562 23690 +f 23562 23691 23690 +f 23562 23563 23692 +f 23562 23692 23691 +f 23563 23564 23692 +f 23564 23693 23692 +f 23564 23565 23694 +f 23564 23694 23693 +f 23565 23566 23694 +f 23566 23695 23694 +f 23566 23567 23696 +f 23566 23696 23695 +f 23567 23568 23696 +f 23568 23697 23696 +f 23568 23569 23698 +f 23568 23698 23697 +f 23569 23570 23698 +f 23570 23699 23698 +f 23570 23571 23700 +f 23570 23700 23699 +f 23571 23572 23700 +f 23572 23701 23700 +f 23572 23573 23702 +f 23572 23702 23701 +f 23573 23574 23702 +f 23574 23703 23702 +f 23574 23575 23704 +f 23574 23704 23703 +f 23575 23576 23704 +f 23576 23705 23704 +f 23576 23577 23706 +f 23576 23706 23705 +f 23577 23578 23706 +f 23578 23707 23706 +f 23578 23579 23708 +f 23578 23708 23707 +f 23579 23580 23708 +f 23580 23709 23708 +f 23580 23581 23710 +f 23580 23710 23709 +f 23581 23582 23710 +f 23582 23711 23710 +f 23582 23583 23712 +f 23582 23712 23711 +f 23583 23584 23712 +f 23584 23713 23712 +f 23584 23585 23714 +f 23584 23714 23713 +f 23585 23586 23714 +f 23586 23715 23714 +f 23586 23587 23716 +f 23586 23716 23715 +f 23587 23588 23716 +f 23588 23717 23716 +f 23588 23589 23718 +f 23588 23718 23717 +f 23589 23590 23718 +f 23590 23719 23718 +f 23590 23591 23720 +f 23590 23720 23719 +f 23591 23592 23720 +f 23592 23721 23720 +f 23592 23593 23722 +f 23592 23722 23721 +f 23593 23594 23722 +f 23594 23723 23722 +f 23594 23595 23724 +f 23594 23724 23723 +f 23595 23596 23724 +f 23596 23725 23724 +f 23596 23597 23726 +f 23596 23726 23725 +f 23597 23598 23726 +f 23598 23727 23726 +f 23598 23599 23728 +f 23598 23728 23727 +f 23599 23600 23728 +f 23600 23729 23728 +f 23600 23601 23730 +f 23600 23730 23729 +f 23601 23602 23730 +f 23602 23731 23730 +f 23602 23603 23732 +f 23602 23732 23731 +f 23603 23604 23732 +f 23604 23733 23732 +f 23604 23605 23734 +f 23604 23734 23733 +f 23605 23606 23734 +f 23606 23735 23734 +f 23606 23607 23736 +f 23606 23736 23735 +f 23607 23608 23736 +f 23608 23737 23736 +f 23608 23609 23738 +f 23608 23738 23737 +f 23609 23610 23738 +f 23610 23739 23738 +f 23610 23611 23740 +f 23610 23740 23739 +f 23611 23612 23740 +f 23612 23741 23740 +f 23612 23613 23742 +f 23612 23742 23741 +f 23613 23614 23742 +f 23614 23743 23742 +f 23614 23615 23744 +f 23614 23744 23743 +f 23615 23616 23744 +f 23616 23745 23744 +f 23616 23617 23746 +f 23616 23746 23745 +f 23617 23618 23746 +f 23618 23747 23746 +f 23618 23619 23748 +f 23618 23748 23747 +f 23619 23620 23748 +f 23620 23749 23748 +f 23620 23621 23750 +f 23620 23750 23749 +f 23621 23622 23750 +f 23622 23751 23750 +f 23622 23623 23752 +f 23622 23752 23751 +f 23623 23624 23752 +f 23624 23753 23752 +f 23624 23625 23754 +f 23624 23754 23753 +f 23625 23626 23754 +f 23626 23755 23754 +f 23626 23627 23756 +f 23626 23756 23755 +f 23627 23628 23756 +f 23628 23757 23756 +f 23628 23629 23758 +f 23628 23758 23757 +f 23629 23630 23758 +f 23630 23759 23758 +f 23630 23631 23760 +f 23630 23760 23759 +f 23631 23632 23760 +f 23632 23761 23760 +f 23632 23633 23762 +f 23632 23762 23761 +f 23633 23634 23762 +f 23634 23763 23762 +f 23634 23635 23764 +f 23634 23764 23763 +f 23635 23636 23764 +f 23636 23765 23764 +f 23636 23637 23766 +f 23636 23766 23765 +f 23637 23638 23766 +f 23638 23767 23766 +f 23638 23639 23768 +f 23638 23768 23767 +f 23639 23640 23768 +f 23640 23769 23768 +f 23640 23641 23770 +f 23640 23770 23769 +f 23641 23642 23770 +f 23642 23771 23770 +f 23642 23643 23772 +f 23642 23772 23771 +f 23643 23644 23772 +f 23644 23773 23772 +f 23644 23645 23774 +f 23644 23774 23773 +f 23645 23646 23774 +f 23646 23775 23774 +f 23646 23647 23776 +f 23646 23776 23775 +f 23647 23648 23776 +f 23648 23777 23776 +f 23648 23649 23778 +f 23648 23778 23777 +f 23649 23650 23778 +f 23650 23779 23778 +f 23650 23651 23780 +f 23650 23780 23779 +f 23651 23652 23780 +f 23652 23781 23780 +f 23652 23653 23782 +f 23652 23782 23781 +f 23653 23654 23782 +f 23654 23783 23782 +f 23654 23655 23784 +f 23654 23784 23783 +f 23655 23656 23784 +f 23656 23785 23784 +f 23656 23657 23786 +f 23656 23786 23785 +f 23657 23658 23786 +f 23658 23787 23786 +f 23658 23659 23788 +f 23658 23788 23787 +f 23659 23660 23788 +f 23660 23789 23788 +f 23660 23661 23790 +f 23660 23790 23789 +f 23661 23662 23790 +f 23662 23791 23790 +f 23662 23663 23792 +f 23662 23792 23791 +f 23663 23664 23792 +f 23664 23793 23792 +f 23664 23665 23794 +f 23664 23794 23793 +f 23665 23666 23794 +f 23666 23795 23794 +f 23666 23667 23796 +f 23666 23796 23795 +f 23667 23668 23796 +f 23668 23797 23796 +f 23668 23669 23798 +f 23668 23798 23797 +f 23669 23670 23798 +f 23670 23799 23798 +f 23670 23671 23800 +f 23670 23800 23799 +f 23671 23672 23800 +f 23672 23801 23800 +f 23672 23673 23802 +f 23672 23802 23801 +f 23673 23674 23802 +f 23674 23803 23802 +f 23674 23675 23804 +f 23674 23804 23803 +f 23675 23676 23804 +f 23676 23805 23804 +f 23676 23677 23806 +f 23676 23806 23805 +f 23677 23678 23806 +f 23678 23807 23806 +f 23678 23679 23808 +f 23678 23808 23807 +f 23679 23680 23808 +f 23680 23809 23808 +f 23680 23681 23810 +f 23680 23810 23809 +f 23681 23682 23810 +f 23682 23811 23810 +f 23682 23683 23812 +f 23682 23812 23811 +f 23683 23684 23812 +f 23684 23813 23812 +f 23684 23685 23814 +f 23684 23814 23813 +f 23685 23686 23814 +f 23686 23815 23814 +f 23686 23687 23816 +f 23686 23816 23815 +f 23687 23688 23816 +f 23688 23817 23816 +f 23689 23690 23818 +f 23690 23819 23818 +f 23690 23691 23820 +f 23690 23820 23819 +f 23691 23692 23820 +f 23692 23821 23820 +f 23692 23693 23822 +f 23692 23822 23821 +f 23693 23694 23822 +f 23694 23823 23822 +f 23694 23695 23824 +f 23694 23824 23823 +f 23695 23696 23824 +f 23696 23825 23824 +f 23696 23697 23826 +f 23696 23826 23825 +f 23697 23698 23826 +f 23698 23827 23826 +f 23698 23699 23828 +f 23698 23828 23827 +f 23699 23700 23828 +f 23700 23829 23828 +f 23700 23701 23830 +f 23700 23830 23829 +f 23701 23702 23830 +f 23702 23831 23830 +f 23702 23703 23832 +f 23702 23832 23831 +f 23703 23704 23832 +f 23704 23833 23832 +f 23704 23705 23834 +f 23704 23834 23833 +f 23705 23706 23834 +f 23706 23835 23834 +f 23706 23707 23836 +f 23706 23836 23835 +f 23707 23708 23836 +f 23708 23837 23836 +f 23708 23709 23838 +f 23708 23838 23837 +f 23709 23710 23838 +f 23710 23839 23838 +f 23710 23711 23840 +f 23710 23840 23839 +f 23711 23712 23840 +f 23712 23841 23840 +f 23712 23713 23842 +f 23712 23842 23841 +f 23713 23714 23842 +f 23714 23843 23842 +f 23714 23715 23844 +f 23714 23844 23843 +f 23715 23716 23844 +f 23716 23845 23844 +f 23716 23717 23846 +f 23716 23846 23845 +f 23717 23718 23846 +f 23718 23847 23846 +f 23718 23719 23848 +f 23718 23848 23847 +f 23719 23720 23848 +f 23720 23849 23848 +f 23720 23721 23850 +f 23720 23850 23849 +f 23721 23722 23850 +f 23722 23851 23850 +f 23722 23723 23852 +f 23722 23852 23851 +f 23723 23724 23852 +f 23724 23853 23852 +f 23724 23725 23854 +f 23724 23854 23853 +f 23725 23726 23854 +f 23726 23855 23854 +f 23726 23727 23856 +f 23726 23856 23855 +f 23727 23728 23856 +f 23728 23857 23856 +f 23728 23729 23858 +f 23728 23858 23857 +f 23729 23730 23858 +f 23730 23859 23858 +f 23730 23731 23860 +f 23730 23860 23859 +f 23731 23732 23860 +f 23732 23861 23860 +f 23732 23733 23862 +f 23732 23862 23861 +f 23733 23734 23862 +f 23734 23863 23862 +f 23734 23735 23864 +f 23734 23864 23863 +f 23735 23736 23864 +f 23736 23865 23864 +f 23736 23737 23866 +f 23736 23866 23865 +f 23737 23738 23866 +f 23738 23867 23866 +f 23738 23739 23868 +f 23738 23868 23867 +f 23739 23740 23868 +f 23740 23869 23868 +f 23740 23741 23870 +f 23740 23870 23869 +f 23741 23742 23870 +f 23742 23871 23870 +f 23742 23743 23872 +f 23742 23872 23871 +f 23743 23744 23872 +f 23744 23873 23872 +f 23744 23745 23874 +f 23744 23874 23873 +f 23745 23746 23874 +f 23746 23875 23874 +f 23746 23747 23876 +f 23746 23876 23875 +f 23747 23748 23876 +f 23748 23877 23876 +f 23748 23749 23878 +f 23748 23878 23877 +f 23749 23750 23878 +f 23750 23879 23878 +f 23750 23751 23880 +f 23750 23880 23879 +f 23751 23752 23880 +f 23752 23881 23880 +f 23752 23753 23882 +f 23752 23882 23881 +f 23753 23754 23882 +f 23754 23883 23882 +f 23754 23755 23884 +f 23754 23884 23883 +f 23755 23756 23884 +f 23756 23885 23884 +f 23756 23757 23886 +f 23756 23886 23885 +f 23757 23758 23886 +f 23758 23887 23886 +f 23758 23759 23888 +f 23758 23888 23887 +f 23759 23760 23888 +f 23760 23889 23888 +f 23760 23761 23890 +f 23760 23890 23889 +f 23761 23762 23890 +f 23762 23891 23890 +f 23762 23763 23892 +f 23762 23892 23891 +f 23763 23764 23892 +f 23764 23893 23892 +f 23764 23765 23894 +f 23764 23894 23893 +f 23765 23766 23894 +f 23766 23895 23894 +f 23766 23767 23896 +f 23766 23896 23895 +f 23767 23768 23896 +f 23768 23897 23896 +f 23768 23769 23898 +f 23768 23898 23897 +f 23769 23770 23898 +f 23770 23899 23898 +f 23770 23771 23900 +f 23770 23900 23899 +f 23771 23772 23900 +f 23772 23901 23900 +f 23772 23773 23902 +f 23772 23902 23901 +f 23773 23774 23902 +f 23774 23903 23902 +f 23774 23775 23904 +f 23774 23904 23903 +f 23775 23776 23904 +f 23776 23905 23904 +f 23776 23777 23906 +f 23776 23906 23905 +f 23777 23778 23906 +f 23778 23907 23906 +f 23778 23779 23908 +f 23778 23908 23907 +f 23779 23780 23908 +f 23780 23909 23908 +f 23780 23781 23910 +f 23780 23910 23909 +f 23781 23782 23910 +f 23782 23911 23910 +f 23782 23783 23912 +f 23782 23912 23911 +f 23783 23784 23912 +f 23784 23913 23912 +f 23784 23785 23914 +f 23784 23914 23913 +f 23785 23786 23914 +f 23786 23915 23914 +f 23786 23787 23916 +f 23786 23916 23915 +f 23787 23788 23916 +f 23788 23917 23916 +f 23788 23789 23918 +f 23788 23918 23917 +f 23789 23790 23918 +f 23790 23919 23918 +f 23790 23791 23920 +f 23790 23920 23919 +f 23791 23792 23920 +f 23792 23921 23920 +f 23792 23793 23922 +f 23792 23922 23921 +f 23793 23794 23922 +f 23794 23923 23922 +f 23794 23795 23924 +f 23794 23924 23923 +f 23795 23796 23924 +f 23796 23925 23924 +f 23796 23797 23926 +f 23796 23926 23925 +f 23797 23798 23926 +f 23798 23927 23926 +f 23798 23799 23928 +f 23798 23928 23927 +f 23799 23800 23928 +f 23800 23929 23928 +f 23800 23801 23930 +f 23800 23930 23929 +f 23801 23802 23930 +f 23802 23931 23930 +f 23802 23803 23932 +f 23802 23932 23931 +f 23803 23804 23932 +f 23804 23933 23932 +f 23804 23805 23934 +f 23804 23934 23933 +f 23805 23806 23934 +f 23806 23935 23934 +f 23806 23807 23936 +f 23806 23936 23935 +f 23807 23808 23936 +f 23808 23937 23936 +f 23808 23809 23938 +f 23808 23938 23937 +f 23809 23810 23938 +f 23810 23939 23938 +f 23810 23811 23940 +f 23810 23940 23939 +f 23811 23812 23940 +f 23812 23941 23940 +f 23812 23813 23942 +f 23812 23942 23941 +f 23813 23814 23942 +f 23814 23943 23942 +f 23814 23815 23944 +f 23814 23944 23943 +f 23815 23816 23944 +f 23816 23945 23944 +f 23816 23817 23946 +f 23816 23946 23945 +f 23818 23819 23948 +f 23818 23948 23947 +f 23819 23820 23948 +f 23820 23949 23948 +f 23820 23821 23950 +f 23820 23950 23949 +f 23821 23822 23950 +f 23822 23951 23950 +f 23822 23823 23952 +f 23822 23952 23951 +f 23823 23824 23952 +f 23824 23953 23952 +f 23824 23825 23954 +f 23824 23954 23953 +f 23825 23826 23954 +f 23826 23955 23954 +f 23826 23827 23956 +f 23826 23956 23955 +f 23827 23828 23956 +f 23828 23957 23956 +f 23828 23829 23958 +f 23828 23958 23957 +f 23829 23830 23958 +f 23830 23959 23958 +f 23830 23831 23960 +f 23830 23960 23959 +f 23831 23832 23960 +f 23832 23961 23960 +f 23832 23833 23962 +f 23832 23962 23961 +f 23833 23834 23962 +f 23834 23963 23962 +f 23834 23835 23964 +f 23834 23964 23963 +f 23835 23836 23964 +f 23836 23965 23964 +f 23836 23837 23966 +f 23836 23966 23965 +f 23837 23838 23966 +f 23838 23967 23966 +f 23838 23839 23968 +f 23838 23968 23967 +f 23839 23840 23968 +f 23840 23969 23968 +f 23840 23841 23970 +f 23840 23970 23969 +f 23841 23842 23970 +f 23842 23971 23970 +f 23842 23843 23972 +f 23842 23972 23971 +f 23843 23844 23972 +f 23844 23973 23972 +f 23844 23845 23974 +f 23844 23974 23973 +f 23845 23846 23974 +f 23846 23975 23974 +f 23846 23847 23976 +f 23846 23976 23975 +f 23847 23848 23976 +f 23848 23977 23976 +f 23848 23849 23978 +f 23848 23978 23977 +f 23849 23850 23978 +f 23850 23979 23978 +f 23850 23851 23980 +f 23850 23980 23979 +f 23851 23852 23980 +f 23852 23981 23980 +f 23852 23853 23982 +f 23852 23982 23981 +f 23853 23854 23982 +f 23854 23983 23982 +f 23854 23855 23984 +f 23854 23984 23983 +f 23855 23856 23984 +f 23856 23985 23984 +f 23856 23857 23986 +f 23856 23986 23985 +f 23857 23858 23986 +f 23858 23987 23986 +f 23858 23859 23988 +f 23858 23988 23987 +f 23859 23860 23988 +f 23860 23989 23988 +f 23860 23861 23990 +f 23860 23990 23989 +f 23861 23862 23990 +f 23862 23991 23990 +f 23862 23863 23992 +f 23862 23992 23991 +f 23863 23864 23992 +f 23864 23993 23992 +f 23864 23865 23994 +f 23864 23994 23993 +f 23865 23866 23994 +f 23866 23995 23994 +f 23866 23867 23996 +f 23866 23996 23995 +f 23867 23868 23996 +f 23868 23997 23996 +f 23868 23869 23998 +f 23868 23998 23997 +f 23869 23870 23998 +f 23870 23999 23998 +f 23870 23871 24000 +f 23870 24000 23999 +f 23871 23872 24000 +f 23872 24001 24000 +f 23872 23873 24002 +f 23872 24002 24001 +f 23873 23874 24002 +f 23874 24003 24002 +f 23874 23875 24004 +f 23874 24004 24003 +f 23875 23876 24004 +f 23876 24005 24004 +f 23876 23877 24006 +f 23876 24006 24005 +f 23877 23878 24006 +f 23878 24007 24006 +f 23878 23879 24008 +f 23878 24008 24007 +f 23879 23880 24008 +f 23880 24009 24008 +f 23880 23881 24010 +f 23880 24010 24009 +f 23881 23882 24010 +f 23882 24011 24010 +f 23882 23883 24012 +f 23882 24012 24011 +f 23883 23884 24012 +f 23884 24013 24012 +f 23884 23885 24014 +f 23884 24014 24013 +f 23885 23886 24014 +f 23886 24015 24014 +f 23886 23887 24016 +f 23886 24016 24015 +f 23887 23888 24016 +f 23888 24017 24016 +f 23888 23889 24018 +f 23888 24018 24017 +f 23889 23890 24018 +f 23890 24019 24018 +f 23890 23891 24020 +f 23890 24020 24019 +f 23891 23892 24020 +f 23892 24021 24020 +f 23892 23893 24022 +f 23892 24022 24021 +f 23893 23894 24022 +f 23894 24023 24022 +f 23894 23895 24024 +f 23894 24024 24023 +f 23895 23896 24024 +f 23896 24025 24024 +f 23896 23897 24026 +f 23896 24026 24025 +f 23897 23898 24026 +f 23898 24027 24026 +f 23898 23899 24028 +f 23898 24028 24027 +f 23899 23900 24028 +f 23900 24029 24028 +f 23900 23901 24030 +f 23900 24030 24029 +f 23901 23902 24030 +f 23902 24031 24030 +f 23902 23903 24032 +f 23902 24032 24031 +f 23903 23904 24032 +f 23904 24033 24032 +f 23904 23905 24034 +f 23904 24034 24033 +f 23905 23906 24034 +f 23906 24035 24034 +f 23906 23907 24036 +f 23906 24036 24035 +f 23907 23908 24036 +f 23908 24037 24036 +f 23908 23909 24038 +f 23908 24038 24037 +f 23909 23910 24038 +f 23910 24039 24038 +f 23910 23911 24040 +f 23910 24040 24039 +f 23911 23912 24040 +f 23912 24041 24040 +f 23912 23913 24042 +f 23912 24042 24041 +f 23913 23914 24042 +f 23914 24043 24042 +f 23914 23915 24044 +f 23914 24044 24043 +f 23915 23916 24044 +f 23916 24045 24044 +f 23916 23917 24046 +f 23916 24046 24045 +f 23917 23918 24046 +f 23918 24047 24046 +f 23918 23919 24048 +f 23918 24048 24047 +f 23919 23920 24048 +f 23920 24049 24048 +f 23920 23921 24050 +f 23920 24050 24049 +f 23921 23922 24050 +f 23922 24051 24050 +f 23922 23923 24052 +f 23922 24052 24051 +f 23923 23924 24052 +f 23924 24053 24052 +f 23924 23925 24054 +f 23924 24054 24053 +f 23925 23926 24054 +f 23926 24055 24054 +f 23926 23927 24056 +f 23926 24056 24055 +f 23927 23928 24056 +f 23928 24057 24056 +f 23928 23929 24058 +f 23928 24058 24057 +f 23929 23930 24058 +f 23930 24059 24058 +f 23930 23931 24060 +f 23930 24060 24059 +f 23931 23932 24060 +f 23932 24061 24060 +f 23932 23933 24062 +f 23932 24062 24061 +f 23933 23934 24062 +f 23934 24063 24062 +f 23934 23935 24064 +f 23934 24064 24063 +f 23935 23936 24064 +f 23936 24065 24064 +f 23936 23937 24066 +f 23936 24066 24065 +f 23937 23938 24066 +f 23938 24067 24066 +f 23938 23939 24068 +f 23938 24068 24067 +f 23939 23940 24068 +f 23940 24069 24068 +f 23940 23941 24070 +f 23940 24070 24069 +f 23941 23942 24070 +f 23942 24071 24070 +f 23942 23943 24072 +f 23942 24072 24071 +f 23943 23944 24072 +f 23944 24073 24072 +f 23944 23945 24074 +f 23944 24074 24073 +f 23945 23946 24074 +f 23946 24075 24074 +f 23947 23948 24076 +f 23948 24077 24076 +f 23948 23949 24078 +f 23948 24078 24077 +f 23949 23950 24078 +f 23950 24079 24078 +f 23950 23951 24080 +f 23950 24080 24079 +f 23951 23952 24080 +f 23952 24081 24080 +f 23952 23953 24082 +f 23952 24082 24081 +f 23953 23954 24082 +f 23954 24083 24082 +f 23954 23955 24084 +f 23954 24084 24083 +f 23955 23956 24084 +f 23956 24085 24084 +f 23956 23957 24086 +f 23956 24086 24085 +f 23957 23958 24086 +f 23958 24087 24086 +f 23958 23959 24088 +f 23958 24088 24087 +f 23959 23960 24088 +f 23960 24089 24088 +f 23960 23961 24090 +f 23960 24090 24089 +f 23961 23962 24090 +f 23962 24091 24090 +f 23962 23963 24092 +f 23962 24092 24091 +f 23963 23964 24092 +f 23964 24093 24092 +f 23964 23965 24094 +f 23964 24094 24093 +f 23965 23966 24094 +f 23966 24095 24094 +f 23966 23967 24096 +f 23966 24096 24095 +f 23967 23968 24096 +f 23968 24097 24096 +f 23968 23969 24098 +f 23968 24098 24097 +f 23969 23970 24098 +f 23970 24099 24098 +f 23970 23971 24100 +f 23970 24100 24099 +f 23971 23972 24100 +f 23972 24101 24100 +f 23972 23973 24102 +f 23972 24102 24101 +f 23973 23974 24102 +f 23974 24103 24102 +f 23974 23975 24104 +f 23974 24104 24103 +f 23975 23976 24104 +f 23976 24105 24104 +f 23976 23977 24106 +f 23976 24106 24105 +f 23977 23978 24106 +f 23978 24107 24106 +f 23978 23979 24108 +f 23978 24108 24107 +f 23979 23980 24108 +f 23980 24109 24108 +f 23980 23981 24110 +f 23980 24110 24109 +f 23981 23982 24110 +f 23982 24111 24110 +f 23982 23983 24112 +f 23982 24112 24111 +f 23983 23984 24112 +f 23984 24113 24112 +f 23984 23985 24114 +f 23984 24114 24113 +f 23985 23986 24114 +f 23986 24115 24114 +f 23986 23987 24116 +f 23986 24116 24115 +f 23987 23988 24116 +f 23988 24117 24116 +f 23988 23989 24118 +f 23988 24118 24117 +f 23989 23990 24118 +f 23990 24119 24118 +f 23990 23991 24120 +f 23990 24120 24119 +f 23991 23992 24120 +f 23992 24121 24120 +f 23992 23993 24122 +f 23992 24122 24121 +f 23993 23994 24122 +f 23994 24123 24122 +f 23994 23995 24124 +f 23994 24124 24123 +f 23995 23996 24124 +f 23996 24125 24124 +f 23996 23997 24126 +f 23996 24126 24125 +f 23997 23998 24126 +f 23998 24127 24126 +f 23998 23999 24128 +f 23998 24128 24127 +f 23999 24000 24128 +f 24000 24129 24128 +f 24000 24001 24130 +f 24000 24130 24129 +f 24001 24002 24130 +f 24002 24131 24130 +f 24002 24003 24132 +f 24002 24132 24131 +f 24003 24004 24132 +f 24004 24133 24132 +f 24004 24005 24134 +f 24004 24134 24133 +f 24005 24006 24134 +f 24006 24135 24134 +f 24006 24007 24136 +f 24006 24136 24135 +f 24007 24008 24136 +f 24008 24137 24136 +f 24008 24009 24138 +f 24008 24138 24137 +f 24009 24010 24138 +f 24010 24139 24138 +f 24010 24011 24140 +f 24010 24140 24139 +f 24011 24012 24140 +f 24012 24141 24140 +f 24012 24013 24142 +f 24012 24142 24141 +f 24013 24014 24142 +f 24014 24143 24142 +f 24014 24015 24144 +f 24014 24144 24143 +f 24015 24016 24144 +f 24016 24145 24144 +f 24016 24017 24146 +f 24016 24146 24145 +f 24017 24018 24146 +f 24018 24147 24146 +f 24018 24019 24148 +f 24018 24148 24147 +f 24019 24020 24148 +f 24020 24149 24148 +f 24020 24021 24150 +f 24020 24150 24149 +f 24021 24022 24150 +f 24022 24151 24150 +f 24022 24023 24152 +f 24022 24152 24151 +f 24023 24024 24152 +f 24024 24153 24152 +f 24024 24025 24154 +f 24024 24154 24153 +f 24025 24026 24154 +f 24026 24155 24154 +f 24026 24027 24156 +f 24026 24156 24155 +f 24027 24028 24156 +f 24028 24157 24156 +f 24028 24029 24158 +f 24028 24158 24157 +f 24029 24030 24158 +f 24030 24159 24158 +f 24030 24031 24160 +f 24030 24160 24159 +f 24031 24032 24160 +f 24032 24161 24160 +f 24032 24033 24162 +f 24032 24162 24161 +f 24033 24034 24162 +f 24034 24163 24162 +f 24034 24035 24164 +f 24034 24164 24163 +f 24035 24036 24164 +f 24036 24165 24164 +f 24036 24037 24166 +f 24036 24166 24165 +f 24037 24038 24166 +f 24038 24167 24166 +f 24038 24039 24168 +f 24038 24168 24167 +f 24039 24040 24168 +f 24040 24169 24168 +f 24040 24041 24170 +f 24040 24170 24169 +f 24041 24042 24170 +f 24042 24171 24170 +f 24042 24043 24172 +f 24042 24172 24171 +f 24043 24044 24172 +f 24044 24173 24172 +f 24044 24045 24174 +f 24044 24174 24173 +f 24045 24046 24174 +f 24046 24175 24174 +f 24046 24047 24176 +f 24046 24176 24175 +f 24047 24048 24176 +f 24048 24177 24176 +f 24048 24049 24178 +f 24048 24178 24177 +f 24049 24050 24178 +f 24050 24179 24178 +f 24050 24051 24180 +f 24050 24180 24179 +f 24051 24052 24180 +f 24052 24181 24180 +f 24052 24053 24182 +f 24052 24182 24181 +f 24053 24054 24182 +f 24054 24183 24182 +f 24054 24055 24184 +f 24054 24184 24183 +f 24055 24056 24184 +f 24056 24185 24184 +f 24056 24057 24186 +f 24056 24186 24185 +f 24057 24058 24186 +f 24058 24187 24186 +f 24058 24059 24188 +f 24058 24188 24187 +f 24059 24060 24188 +f 24060 24189 24188 +f 24060 24061 24190 +f 24060 24190 24189 +f 24061 24062 24190 +f 24062 24191 24190 +f 24062 24063 24192 +f 24062 24192 24191 +f 24063 24064 24192 +f 24064 24193 24192 +f 24064 24065 24194 +f 24064 24194 24193 +f 24065 24066 24194 +f 24066 24195 24194 +f 24066 24067 24196 +f 24066 24196 24195 +f 24067 24068 24196 +f 24068 24197 24196 +f 24068 24069 24198 +f 24068 24198 24197 +f 24069 24070 24198 +f 24070 24199 24198 +f 24070 24071 24200 +f 24070 24200 24199 +f 24071 24072 24200 +f 24072 24201 24200 +f 24072 24073 24202 +f 24072 24202 24201 +f 24073 24074 24202 +f 24074 24203 24202 +f 24074 24075 24204 +f 24074 24204 24203 +f 24076 24077 24206 +f 24076 24206 24205 +f 24077 24078 24206 +f 24078 24207 24206 +f 24078 24079 24208 +f 24078 24208 24207 +f 24079 24080 24208 +f 24080 24209 24208 +f 24080 24081 24210 +f 24080 24210 24209 +f 24081 24082 24210 +f 24082 24211 24210 +f 24082 24083 24212 +f 24082 24212 24211 +f 24083 24084 24212 +f 24084 24213 24212 +f 24084 24085 24214 +f 24084 24214 24213 +f 24085 24086 24214 +f 24086 24215 24214 +f 24086 24087 24216 +f 24086 24216 24215 +f 24087 24088 24216 +f 24088 24217 24216 +f 24088 24089 24218 +f 24088 24218 24217 +f 24089 24090 24218 +f 24090 24219 24218 +f 24090 24091 24220 +f 24090 24220 24219 +f 24091 24092 24220 +f 24092 24221 24220 +f 24092 24093 24222 +f 24092 24222 24221 +f 24093 24094 24222 +f 24094 24223 24222 +f 24094 24095 24224 +f 24094 24224 24223 +f 24095 24096 24224 +f 24096 24225 24224 +f 24096 24097 24226 +f 24096 24226 24225 +f 24097 24098 24226 +f 24098 24227 24226 +f 24098 24099 24228 +f 24098 24228 24227 +f 24099 24100 24228 +f 24100 24229 24228 +f 24100 24101 24230 +f 24100 24230 24229 +f 24101 24102 24230 +f 24102 24231 24230 +f 24102 24103 24232 +f 24102 24232 24231 +f 24103 24104 24232 +f 24104 24233 24232 +f 24104 24105 24234 +f 24104 24234 24233 +f 24105 24106 24234 +f 24106 24235 24234 +f 24106 24107 24236 +f 24106 24236 24235 +f 24107 24108 24236 +f 24108 24237 24236 +f 24108 24109 24238 +f 24108 24238 24237 +f 24109 24110 24238 +f 24110 24239 24238 +f 24110 24111 24240 +f 24110 24240 24239 +f 24111 24112 24240 +f 24112 24241 24240 +f 24112 24113 24242 +f 24112 24242 24241 +f 24113 24114 24242 +f 24114 24243 24242 +f 24114 24115 24244 +f 24114 24244 24243 +f 24115 24116 24244 +f 24116 24245 24244 +f 24116 24117 24246 +f 24116 24246 24245 +f 24117 24118 24246 +f 24118 24247 24246 +f 24118 24119 24248 +f 24118 24248 24247 +f 24119 24120 24248 +f 24120 24249 24248 +f 24120 24121 24250 +f 24120 24250 24249 +f 24121 24122 24250 +f 24122 24251 24250 +f 24122 24123 24252 +f 24122 24252 24251 +f 24123 24124 24252 +f 24124 24253 24252 +f 24124 24125 24254 +f 24124 24254 24253 +f 24125 24126 24254 +f 24126 24255 24254 +f 24126 24127 24256 +f 24126 24256 24255 +f 24127 24128 24256 +f 24128 24257 24256 +f 24128 24129 24258 +f 24128 24258 24257 +f 24129 24130 24258 +f 24130 24259 24258 +f 24130 24131 24260 +f 24130 24260 24259 +f 24131 24132 24260 +f 24132 24261 24260 +f 24132 24133 24262 +f 24132 24262 24261 +f 24133 24134 24262 +f 24134 24263 24262 +f 24134 24135 24264 +f 24134 24264 24263 +f 24135 24136 24264 +f 24136 24265 24264 +f 24136 24137 24266 +f 24136 24266 24265 +f 24137 24138 24266 +f 24138 24267 24266 +f 24138 24139 24268 +f 24138 24268 24267 +f 24139 24140 24268 +f 24140 24269 24268 +f 24140 24141 24270 +f 24140 24270 24269 +f 24141 24142 24270 +f 24142 24271 24270 +f 24142 24143 24272 +f 24142 24272 24271 +f 24143 24144 24272 +f 24144 24273 24272 +f 24144 24145 24274 +f 24144 24274 24273 +f 24145 24146 24274 +f 24146 24275 24274 +f 24146 24147 24276 +f 24146 24276 24275 +f 24147 24148 24276 +f 24148 24277 24276 +f 24148 24149 24278 +f 24148 24278 24277 +f 24149 24150 24278 +f 24150 24279 24278 +f 24150 24151 24280 +f 24150 24280 24279 +f 24151 24152 24280 +f 24152 24281 24280 +f 24152 24153 24282 +f 24152 24282 24281 +f 24153 24154 24282 +f 24154 24283 24282 +f 24154 24155 24284 +f 24154 24284 24283 +f 24155 24156 24284 +f 24156 24285 24284 +f 24156 24157 24286 +f 24156 24286 24285 +f 24157 24158 24286 +f 24158 24287 24286 +f 24158 24159 24288 +f 24158 24288 24287 +f 24159 24160 24288 +f 24160 24289 24288 +f 24160 24161 24290 +f 24160 24290 24289 +f 24161 24162 24290 +f 24162 24291 24290 +f 24162 24163 24292 +f 24162 24292 24291 +f 24163 24164 24292 +f 24164 24293 24292 +f 24164 24165 24294 +f 24164 24294 24293 +f 24165 24166 24294 +f 24166 24295 24294 +f 24166 24167 24296 +f 24166 24296 24295 +f 24167 24168 24296 +f 24168 24297 24296 +f 24168 24169 24298 +f 24168 24298 24297 +f 24169 24170 24298 +f 24170 24299 24298 +f 24170 24171 24300 +f 24170 24300 24299 +f 24171 24172 24300 +f 24172 24301 24300 +f 24172 24173 24302 +f 24172 24302 24301 +f 24173 24174 24302 +f 24174 24303 24302 +f 24174 24175 24304 +f 24174 24304 24303 +f 24175 24176 24304 +f 24176 24305 24304 +f 24176 24177 24306 +f 24176 24306 24305 +f 24177 24178 24306 +f 24178 24307 24306 +f 24178 24179 24308 +f 24178 24308 24307 +f 24179 24180 24308 +f 24180 24309 24308 +f 24180 24181 24310 +f 24180 24310 24309 +f 24181 24182 24310 +f 24182 24311 24310 +f 24182 24183 24312 +f 24182 24312 24311 +f 24183 24184 24312 +f 24184 24313 24312 +f 24184 24185 24314 +f 24184 24314 24313 +f 24185 24186 24314 +f 24186 24315 24314 +f 24186 24187 24316 +f 24186 24316 24315 +f 24187 24188 24316 +f 24188 24317 24316 +f 24188 24189 24318 +f 24188 24318 24317 +f 24189 24190 24318 +f 24190 24319 24318 +f 24190 24191 24320 +f 24190 24320 24319 +f 24191 24192 24320 +f 24192 24321 24320 +f 24192 24193 24322 +f 24192 24322 24321 +f 24193 24194 24322 +f 24194 24323 24322 +f 24194 24195 24324 +f 24194 24324 24323 +f 24195 24196 24324 +f 24196 24325 24324 +f 24196 24197 24326 +f 24196 24326 24325 +f 24197 24198 24326 +f 24198 24327 24326 +f 24198 24199 24328 +f 24198 24328 24327 +f 24199 24200 24328 +f 24200 24329 24328 +f 24200 24201 24330 +f 24200 24330 24329 +f 24201 24202 24330 +f 24202 24331 24330 +f 24202 24203 24332 +f 24202 24332 24331 +f 24203 24204 24332 +f 24204 24333 24332 +f 24205 24206 24334 +f 24206 24335 24334 +f 24206 24207 24336 +f 24206 24336 24335 +f 24207 24208 24336 +f 24208 24337 24336 +f 24208 24209 24338 +f 24208 24338 24337 +f 24209 24210 24338 +f 24210 24339 24338 +f 24210 24211 24340 +f 24210 24340 24339 +f 24211 24212 24340 +f 24212 24341 24340 +f 24212 24213 24342 +f 24212 24342 24341 +f 24213 24214 24342 +f 24214 24343 24342 +f 24214 24215 24344 +f 24214 24344 24343 +f 24215 24216 24344 +f 24216 24345 24344 +f 24216 24217 24346 +f 24216 24346 24345 +f 24217 24218 24346 +f 24218 24347 24346 +f 24218 24219 24348 +f 24218 24348 24347 +f 24219 24220 24348 +f 24220 24349 24348 +f 24220 24221 24350 +f 24220 24350 24349 +f 24221 24222 24350 +f 24222 24351 24350 +f 24222 24223 24352 +f 24222 24352 24351 +f 24223 24224 24352 +f 24224 24353 24352 +f 24224 24225 24354 +f 24224 24354 24353 +f 24225 24226 24354 +f 24226 24355 24354 +f 24226 24227 24356 +f 24226 24356 24355 +f 24227 24228 24356 +f 24228 24357 24356 +f 24228 24229 24358 +f 24228 24358 24357 +f 24229 24230 24358 +f 24230 24359 24358 +f 24230 24231 24360 +f 24230 24360 24359 +f 24231 24232 24360 +f 24232 24361 24360 +f 24232 24233 24362 +f 24232 24362 24361 +f 24233 24234 24362 +f 24234 24363 24362 +f 24234 24235 24364 +f 24234 24364 24363 +f 24235 24236 24364 +f 24236 24365 24364 +f 24236 24237 24366 +f 24236 24366 24365 +f 24237 24238 24366 +f 24238 24367 24366 +f 24238 24239 24368 +f 24238 24368 24367 +f 24239 24240 24368 +f 24240 24369 24368 +f 24240 24241 24370 +f 24240 24370 24369 +f 24241 24242 24370 +f 24242 24371 24370 +f 24242 24243 24372 +f 24242 24372 24371 +f 24243 24244 24372 +f 24244 24373 24372 +f 24244 24245 24374 +f 24244 24374 24373 +f 24245 24246 24374 +f 24246 24375 24374 +f 24246 24247 24376 +f 24246 24376 24375 +f 24247 24248 24376 +f 24248 24377 24376 +f 24248 24249 24378 +f 24248 24378 24377 +f 24249 24250 24378 +f 24250 24379 24378 +f 24250 24251 24380 +f 24250 24380 24379 +f 24251 24252 24380 +f 24252 24381 24380 +f 24252 24253 24382 +f 24252 24382 24381 +f 24253 24254 24382 +f 24254 24383 24382 +f 24254 24255 24384 +f 24254 24384 24383 +f 24255 24256 24384 +f 24256 24385 24384 +f 24256 24257 24386 +f 24256 24386 24385 +f 24257 24258 24386 +f 24258 24387 24386 +f 24258 24259 24388 +f 24258 24388 24387 +f 24259 24260 24388 +f 24260 24389 24388 +f 24260 24261 24390 +f 24260 24390 24389 +f 24261 24262 24390 +f 24262 24391 24390 +f 24262 24263 24392 +f 24262 24392 24391 +f 24263 24264 24392 +f 24264 24393 24392 +f 24264 24265 24394 +f 24264 24394 24393 +f 24265 24266 24394 +f 24266 24395 24394 +f 24266 24267 24396 +f 24266 24396 24395 +f 24267 24268 24396 +f 24268 24397 24396 +f 24268 24269 24398 +f 24268 24398 24397 +f 24269 24270 24398 +f 24270 24399 24398 +f 24270 24271 24400 +f 24270 24400 24399 +f 24271 24272 24400 +f 24272 24401 24400 +f 24272 24273 24402 +f 24272 24402 24401 +f 24273 24274 24402 +f 24274 24403 24402 +f 24274 24275 24404 +f 24274 24404 24403 +f 24275 24276 24404 +f 24276 24405 24404 +f 24276 24277 24406 +f 24276 24406 24405 +f 24277 24278 24406 +f 24278 24407 24406 +f 24278 24279 24408 +f 24278 24408 24407 +f 24279 24280 24408 +f 24280 24409 24408 +f 24280 24281 24410 +f 24280 24410 24409 +f 24281 24282 24410 +f 24282 24411 24410 +f 24282 24283 24412 +f 24282 24412 24411 +f 24283 24284 24412 +f 24284 24413 24412 +f 24284 24285 24414 +f 24284 24414 24413 +f 24285 24286 24414 +f 24286 24415 24414 +f 24286 24287 24416 +f 24286 24416 24415 +f 24287 24288 24416 +f 24288 24417 24416 +f 24288 24289 24418 +f 24288 24418 24417 +f 24289 24290 24418 +f 24290 24419 24418 +f 24290 24291 24420 +f 24290 24420 24419 +f 24291 24292 24420 +f 24292 24421 24420 +f 24292 24293 24422 +f 24292 24422 24421 +f 24293 24294 24422 +f 24294 24423 24422 +f 24294 24295 24424 +f 24294 24424 24423 +f 24295 24296 24424 +f 24296 24425 24424 +f 24296 24297 24426 +f 24296 24426 24425 +f 24297 24298 24426 +f 24298 24427 24426 +f 24298 24299 24428 +f 24298 24428 24427 +f 24299 24300 24428 +f 24300 24429 24428 +f 24300 24301 24430 +f 24300 24430 24429 +f 24301 24302 24430 +f 24302 24431 24430 +f 24302 24303 24432 +f 24302 24432 24431 +f 24303 24304 24432 +f 24304 24433 24432 +f 24304 24305 24434 +f 24304 24434 24433 +f 24305 24306 24434 +f 24306 24435 24434 +f 24306 24307 24436 +f 24306 24436 24435 +f 24307 24308 24436 +f 24308 24437 24436 +f 24308 24309 24438 +f 24308 24438 24437 +f 24309 24310 24438 +f 24310 24439 24438 +f 24310 24311 24440 +f 24310 24440 24439 +f 24311 24312 24440 +f 24312 24441 24440 +f 24312 24313 24442 +f 24312 24442 24441 +f 24313 24314 24442 +f 24314 24443 24442 +f 24314 24315 24444 +f 24314 24444 24443 +f 24315 24316 24444 +f 24316 24445 24444 +f 24316 24317 24446 +f 24316 24446 24445 +f 24317 24318 24446 +f 24318 24447 24446 +f 24318 24319 24448 +f 24318 24448 24447 +f 24319 24320 24448 +f 24320 24449 24448 +f 24320 24321 24450 +f 24320 24450 24449 +f 24321 24322 24450 +f 24322 24451 24450 +f 24322 24323 24452 +f 24322 24452 24451 +f 24323 24324 24452 +f 24324 24453 24452 +f 24324 24325 24454 +f 24324 24454 24453 +f 24325 24326 24454 +f 24326 24455 24454 +f 24326 24327 24456 +f 24326 24456 24455 +f 24327 24328 24456 +f 24328 24457 24456 +f 24328 24329 24458 +f 24328 24458 24457 +f 24329 24330 24458 +f 24330 24459 24458 +f 24330 24331 24460 +f 24330 24460 24459 +f 24331 24332 24460 +f 24332 24461 24460 +f 24332 24333 24462 +f 24332 24462 24461 +f 24334 24335 24464 +f 24334 24464 24463 +f 24335 24336 24464 +f 24336 24465 24464 +f 24336 24337 24466 +f 24336 24466 24465 +f 24337 24338 24466 +f 24338 24467 24466 +f 24338 24339 24468 +f 24338 24468 24467 +f 24339 24340 24468 +f 24340 24469 24468 +f 24340 24341 24470 +f 24340 24470 24469 +f 24341 24342 24470 +f 24342 24471 24470 +f 24342 24343 24472 +f 24342 24472 24471 +f 24343 24344 24472 +f 24344 24473 24472 +f 24344 24345 24474 +f 24344 24474 24473 +f 24345 24346 24474 +f 24346 24475 24474 +f 24346 24347 24476 +f 24346 24476 24475 +f 24347 24348 24476 +f 24348 24477 24476 +f 24348 24349 24478 +f 24348 24478 24477 +f 24349 24350 24478 +f 24350 24479 24478 +f 24350 24351 24480 +f 24350 24480 24479 +f 24351 24352 24480 +f 24352 24481 24480 +f 24352 24353 24482 +f 24352 24482 24481 +f 24353 24354 24482 +f 24354 24483 24482 +f 24354 24355 24484 +f 24354 24484 24483 +f 24355 24356 24484 +f 24356 24485 24484 +f 24356 24357 24486 +f 24356 24486 24485 +f 24357 24358 24486 +f 24358 24487 24486 +f 24358 24359 24488 +f 24358 24488 24487 +f 24359 24360 24488 +f 24360 24489 24488 +f 24360 24361 24490 +f 24360 24490 24489 +f 24361 24362 24490 +f 24362 24491 24490 +f 24362 24363 24492 +f 24362 24492 24491 +f 24363 24364 24492 +f 24364 24493 24492 +f 24364 24365 24494 +f 24364 24494 24493 +f 24365 24366 24494 +f 24366 24495 24494 +f 24366 24367 24496 +f 24366 24496 24495 +f 24367 24368 24496 +f 24368 24497 24496 +f 24368 24369 24498 +f 24368 24498 24497 +f 24369 24370 24498 +f 24370 24499 24498 +f 24370 24371 24500 +f 24370 24500 24499 +f 24371 24372 24500 +f 24372 24501 24500 +f 24372 24373 24502 +f 24372 24502 24501 +f 24373 24374 24502 +f 24374 24503 24502 +f 24374 24375 24504 +f 24374 24504 24503 +f 24375 24376 24504 +f 24376 24505 24504 +f 24376 24377 24506 +f 24376 24506 24505 +f 24377 24378 24506 +f 24378 24507 24506 +f 24378 24379 24508 +f 24378 24508 24507 +f 24379 24380 24508 +f 24380 24509 24508 +f 24380 24381 24510 +f 24380 24510 24509 +f 24381 24382 24510 +f 24382 24511 24510 +f 24382 24383 24512 +f 24382 24512 24511 +f 24383 24384 24512 +f 24384 24513 24512 +f 24384 24385 24514 +f 24384 24514 24513 +f 24385 24386 24514 +f 24386 24515 24514 +f 24386 24387 24516 +f 24386 24516 24515 +f 24387 24388 24516 +f 24388 24517 24516 +f 24388 24389 24518 +f 24388 24518 24517 +f 24389 24390 24518 +f 24390 24519 24518 +f 24390 24391 24520 +f 24390 24520 24519 +f 24391 24392 24520 +f 24392 24521 24520 +f 24392 24393 24522 +f 24392 24522 24521 +f 24393 24394 24522 +f 24394 24523 24522 +f 24394 24395 24524 +f 24394 24524 24523 +f 24395 24396 24524 +f 24396 24525 24524 +f 24396 24397 24526 +f 24396 24526 24525 +f 24397 24398 24526 +f 24398 24527 24526 +f 24398 24399 24528 +f 24398 24528 24527 +f 24399 24400 24528 +f 24400 24529 24528 +f 24400 24401 24530 +f 24400 24530 24529 +f 24401 24402 24530 +f 24402 24531 24530 +f 24402 24403 24532 +f 24402 24532 24531 +f 24403 24404 24532 +f 24404 24533 24532 +f 24404 24405 24534 +f 24404 24534 24533 +f 24405 24406 24534 +f 24406 24535 24534 +f 24406 24407 24536 +f 24406 24536 24535 +f 24407 24408 24536 +f 24408 24537 24536 +f 24408 24409 24538 +f 24408 24538 24537 +f 24409 24410 24538 +f 24410 24539 24538 +f 24410 24411 24540 +f 24410 24540 24539 +f 24411 24412 24540 +f 24412 24541 24540 +f 24412 24413 24542 +f 24412 24542 24541 +f 24413 24414 24542 +f 24414 24543 24542 +f 24414 24415 24544 +f 24414 24544 24543 +f 24415 24416 24544 +f 24416 24545 24544 +f 24416 24417 24546 +f 24416 24546 24545 +f 24417 24418 24546 +f 24418 24547 24546 +f 24418 24419 24548 +f 24418 24548 24547 +f 24419 24420 24548 +f 24420 24549 24548 +f 24420 24421 24550 +f 24420 24550 24549 +f 24421 24422 24550 +f 24422 24551 24550 +f 24422 24423 24552 +f 24422 24552 24551 +f 24423 24424 24552 +f 24424 24553 24552 +f 24424 24425 24554 +f 24424 24554 24553 +f 24425 24426 24554 +f 24426 24555 24554 +f 24426 24427 24556 +f 24426 24556 24555 +f 24427 24428 24556 +f 24428 24557 24556 +f 24428 24429 24558 +f 24428 24558 24557 +f 24429 24430 24558 +f 24430 24559 24558 +f 24430 24431 24560 +f 24430 24560 24559 +f 24431 24432 24560 +f 24432 24561 24560 +f 24432 24433 24562 +f 24432 24562 24561 +f 24433 24434 24562 +f 24434 24563 24562 +f 24434 24435 24564 +f 24434 24564 24563 +f 24435 24436 24564 +f 24436 24565 24564 +f 24436 24437 24566 +f 24436 24566 24565 +f 24437 24438 24566 +f 24438 24567 24566 +f 24438 24439 24568 +f 24438 24568 24567 +f 24439 24440 24568 +f 24440 24569 24568 +f 24440 24441 24570 +f 24440 24570 24569 +f 24441 24442 24570 +f 24442 24571 24570 +f 24442 24443 24572 +f 24442 24572 24571 +f 24443 24444 24572 +f 24444 24573 24572 +f 24444 24445 24574 +f 24444 24574 24573 +f 24445 24446 24574 +f 24446 24575 24574 +f 24446 24447 24576 +f 24446 24576 24575 +f 24447 24448 24576 +f 24448 24577 24576 +f 24448 24449 24578 +f 24448 24578 24577 +f 24449 24450 24578 +f 24450 24579 24578 +f 24450 24451 24580 +f 24450 24580 24579 +f 24451 24452 24580 +f 24452 24581 24580 +f 24452 24453 24582 +f 24452 24582 24581 +f 24453 24454 24582 +f 24454 24583 24582 +f 24454 24455 24584 +f 24454 24584 24583 +f 24455 24456 24584 +f 24456 24585 24584 +f 24456 24457 24586 +f 24456 24586 24585 +f 24457 24458 24586 +f 24458 24587 24586 +f 24458 24459 24588 +f 24458 24588 24587 +f 24459 24460 24588 +f 24460 24589 24588 +f 24460 24461 24590 +f 24460 24590 24589 +f 24461 24462 24590 +f 24462 24591 24590 +f 24463 24464 24592 +f 24464 24593 24592 +f 24464 24465 24594 +f 24464 24594 24593 +f 24465 24466 24594 +f 24466 24595 24594 +f 24466 24467 24596 +f 24466 24596 24595 +f 24467 24468 24596 +f 24468 24597 24596 +f 24468 24469 24598 +f 24468 24598 24597 +f 24469 24470 24598 +f 24470 24599 24598 +f 24470 24471 24600 +f 24470 24600 24599 +f 24471 24472 24600 +f 24472 24601 24600 +f 24472 24473 24602 +f 24472 24602 24601 +f 24473 24474 24602 +f 24474 24603 24602 +f 24474 24475 24604 +f 24474 24604 24603 +f 24475 24476 24604 +f 24476 24605 24604 +f 24476 24477 24606 +f 24476 24606 24605 +f 24477 24478 24606 +f 24478 24607 24606 +f 24478 24479 24608 +f 24478 24608 24607 +f 24479 24480 24608 +f 24480 24609 24608 +f 24480 24481 24610 +f 24480 24610 24609 +f 24481 24482 24610 +f 24482 24611 24610 +f 24482 24483 24612 +f 24482 24612 24611 +f 24483 24484 24612 +f 24484 24613 24612 +f 24484 24485 24614 +f 24484 24614 24613 +f 24485 24486 24614 +f 24486 24615 24614 +f 24486 24487 24616 +f 24486 24616 24615 +f 24487 24488 24616 +f 24488 24617 24616 +f 24488 24489 24618 +f 24488 24618 24617 +f 24489 24490 24618 +f 24490 24619 24618 +f 24490 24491 24620 +f 24490 24620 24619 +f 24491 24492 24620 +f 24492 24621 24620 +f 24492 24493 24622 +f 24492 24622 24621 +f 24493 24494 24622 +f 24494 24623 24622 +f 24494 24495 24624 +f 24494 24624 24623 +f 24495 24496 24624 +f 24496 24625 24624 +f 24496 24497 24626 +f 24496 24626 24625 +f 24497 24498 24626 +f 24498 24627 24626 +f 24498 24499 24628 +f 24498 24628 24627 +f 24499 24500 24628 +f 24500 24629 24628 +f 24500 24501 24630 +f 24500 24630 24629 +f 24501 24502 24630 +f 24502 24631 24630 +f 24502 24503 24632 +f 24502 24632 24631 +f 24503 24504 24632 +f 24504 24633 24632 +f 24504 24505 24634 +f 24504 24634 24633 +f 24505 24506 24634 +f 24506 24635 24634 +f 24506 24507 24636 +f 24506 24636 24635 +f 24507 24508 24636 +f 24508 24637 24636 +f 24508 24509 24638 +f 24508 24638 24637 +f 24509 24510 24638 +f 24510 24639 24638 +f 24510 24511 24640 +f 24510 24640 24639 +f 24511 24512 24640 +f 24512 24641 24640 +f 24512 24513 24642 +f 24512 24642 24641 +f 24513 24514 24642 +f 24514 24643 24642 +f 24514 24515 24644 +f 24514 24644 24643 +f 24515 24516 24644 +f 24516 24645 24644 +f 24516 24517 24646 +f 24516 24646 24645 +f 24517 24518 24646 +f 24518 24647 24646 +f 24518 24519 24648 +f 24518 24648 24647 +f 24519 24520 24648 +f 24520 24649 24648 +f 24520 24521 24650 +f 24520 24650 24649 +f 24521 24522 24650 +f 24522 24651 24650 +f 24522 24523 24652 +f 24522 24652 24651 +f 24523 24524 24652 +f 24524 24653 24652 +f 24524 24525 24654 +f 24524 24654 24653 +f 24525 24526 24654 +f 24526 24655 24654 +f 24526 24527 24656 +f 24526 24656 24655 +f 24527 24528 24656 +f 24528 24657 24656 +f 24528 24529 24658 +f 24528 24658 24657 +f 24529 24530 24658 +f 24530 24659 24658 +f 24530 24531 24660 +f 24530 24660 24659 +f 24531 24532 24660 +f 24532 24661 24660 +f 24532 24533 24662 +f 24532 24662 24661 +f 24533 24534 24662 +f 24534 24663 24662 +f 24534 24535 24664 +f 24534 24664 24663 +f 24535 24536 24664 +f 24536 24665 24664 +f 24536 24537 24666 +f 24536 24666 24665 +f 24537 24538 24666 +f 24538 24667 24666 +f 24538 24539 24668 +f 24538 24668 24667 +f 24539 24540 24668 +f 24540 24669 24668 +f 24540 24541 24670 +f 24540 24670 24669 +f 24541 24542 24670 +f 24542 24671 24670 +f 24542 24543 24672 +f 24542 24672 24671 +f 24543 24544 24672 +f 24544 24673 24672 +f 24544 24545 24674 +f 24544 24674 24673 +f 24545 24546 24674 +f 24546 24675 24674 +f 24546 24547 24676 +f 24546 24676 24675 +f 24547 24548 24676 +f 24548 24677 24676 +f 24548 24549 24678 +f 24548 24678 24677 +f 24549 24550 24678 +f 24550 24679 24678 +f 24550 24551 24680 +f 24550 24680 24679 +f 24551 24552 24680 +f 24552 24681 24680 +f 24552 24553 24682 +f 24552 24682 24681 +f 24553 24554 24682 +f 24554 24683 24682 +f 24554 24555 24684 +f 24554 24684 24683 +f 24555 24556 24684 +f 24556 24685 24684 +f 24556 24557 24686 +f 24556 24686 24685 +f 24557 24558 24686 +f 24558 24687 24686 +f 24558 24559 24688 +f 24558 24688 24687 +f 24559 24560 24688 +f 24560 24689 24688 +f 24560 24561 24690 +f 24560 24690 24689 +f 24561 24562 24690 +f 24562 24691 24690 +f 24562 24563 24692 +f 24562 24692 24691 +f 24563 24564 24692 +f 24564 24693 24692 +f 24564 24565 24694 +f 24564 24694 24693 +f 24565 24566 24694 +f 24566 24695 24694 +f 24566 24567 24696 +f 24566 24696 24695 +f 24567 24568 24696 +f 24568 24697 24696 +f 24568 24569 24698 +f 24568 24698 24697 +f 24569 24570 24698 +f 24570 24699 24698 +f 24570 24571 24700 +f 24570 24700 24699 +f 24571 24572 24700 +f 24572 24701 24700 +f 24572 24573 24702 +f 24572 24702 24701 +f 24573 24574 24702 +f 24574 24703 24702 +f 24574 24575 24704 +f 24574 24704 24703 +f 24575 24576 24704 +f 24576 24705 24704 +f 24576 24577 24706 +f 24576 24706 24705 +f 24577 24578 24706 +f 24578 24707 24706 +f 24578 24579 24708 +f 24578 24708 24707 +f 24579 24580 24708 +f 24580 24709 24708 +f 24580 24581 24710 +f 24580 24710 24709 +f 24581 24582 24710 +f 24582 24711 24710 +f 24582 24583 24712 +f 24582 24712 24711 +f 24583 24584 24712 +f 24584 24713 24712 +f 24584 24585 24714 +f 24584 24714 24713 +f 24585 24586 24714 +f 24586 24715 24714 +f 24586 24587 24716 +f 24586 24716 24715 +f 24587 24588 24716 +f 24588 24717 24716 +f 24588 24589 24718 +f 24588 24718 24717 +f 24589 24590 24718 +f 24590 24719 24718 +f 24590 24591 24720 +f 24590 24720 24719 +f 24592 24593 24722 +f 24592 24722 24721 +f 24593 24594 24722 +f 24594 24723 24722 +f 24594 24595 24724 +f 24594 24724 24723 +f 24595 24596 24724 +f 24596 24725 24724 +f 24596 24597 24726 +f 24596 24726 24725 +f 24597 24598 24726 +f 24598 24727 24726 +f 24598 24599 24728 +f 24598 24728 24727 +f 24599 24600 24728 +f 24600 24729 24728 +f 24600 24601 24730 +f 24600 24730 24729 +f 24601 24602 24730 +f 24602 24731 24730 +f 24602 24603 24732 +f 24602 24732 24731 +f 24603 24604 24732 +f 24604 24733 24732 +f 24604 24605 24734 +f 24604 24734 24733 +f 24605 24606 24734 +f 24606 24735 24734 +f 24606 24607 24736 +f 24606 24736 24735 +f 24607 24608 24736 +f 24608 24737 24736 +f 24608 24609 24738 +f 24608 24738 24737 +f 24609 24610 24738 +f 24610 24739 24738 +f 24610 24611 24740 +f 24610 24740 24739 +f 24611 24612 24740 +f 24612 24741 24740 +f 24612 24613 24742 +f 24612 24742 24741 +f 24613 24614 24742 +f 24614 24743 24742 +f 24614 24615 24744 +f 24614 24744 24743 +f 24615 24616 24744 +f 24616 24745 24744 +f 24616 24617 24746 +f 24616 24746 24745 +f 24617 24618 24746 +f 24618 24747 24746 +f 24618 24619 24748 +f 24618 24748 24747 +f 24619 24620 24748 +f 24620 24749 24748 +f 24620 24621 24750 +f 24620 24750 24749 +f 24621 24622 24750 +f 24622 24751 24750 +f 24622 24623 24752 +f 24622 24752 24751 +f 24623 24624 24752 +f 24624 24753 24752 +f 24624 24625 24754 +f 24624 24754 24753 +f 24625 24626 24754 +f 24626 24755 24754 +f 24626 24627 24756 +f 24626 24756 24755 +f 24627 24628 24756 +f 24628 24757 24756 +f 24628 24629 24758 +f 24628 24758 24757 +f 24629 24630 24758 +f 24630 24759 24758 +f 24630 24631 24760 +f 24630 24760 24759 +f 24631 24632 24760 +f 24632 24761 24760 +f 24632 24633 24762 +f 24632 24762 24761 +f 24633 24634 24762 +f 24634 24763 24762 +f 24634 24635 24764 +f 24634 24764 24763 +f 24635 24636 24764 +f 24636 24765 24764 +f 24636 24637 24766 +f 24636 24766 24765 +f 24637 24638 24766 +f 24638 24767 24766 +f 24638 24639 24768 +f 24638 24768 24767 +f 24639 24640 24768 +f 24640 24769 24768 +f 24640 24641 24770 +f 24640 24770 24769 +f 24641 24642 24770 +f 24642 24771 24770 +f 24642 24643 24772 +f 24642 24772 24771 +f 24643 24644 24772 +f 24644 24773 24772 +f 24644 24645 24774 +f 24644 24774 24773 +f 24645 24646 24774 +f 24646 24775 24774 +f 24646 24647 24776 +f 24646 24776 24775 +f 24647 24648 24776 +f 24648 24777 24776 +f 24648 24649 24778 +f 24648 24778 24777 +f 24649 24650 24778 +f 24650 24779 24778 +f 24650 24651 24780 +f 24650 24780 24779 +f 24651 24652 24780 +f 24652 24781 24780 +f 24652 24653 24782 +f 24652 24782 24781 +f 24653 24654 24782 +f 24654 24783 24782 +f 24654 24655 24784 +f 24654 24784 24783 +f 24655 24656 24784 +f 24656 24785 24784 +f 24656 24657 24786 +f 24656 24786 24785 +f 24657 24658 24786 +f 24658 24787 24786 +f 24658 24659 24788 +f 24658 24788 24787 +f 24659 24660 24788 +f 24660 24789 24788 +f 24660 24661 24790 +f 24660 24790 24789 +f 24661 24662 24790 +f 24662 24791 24790 +f 24662 24663 24792 +f 24662 24792 24791 +f 24663 24664 24792 +f 24664 24793 24792 +f 24664 24665 24794 +f 24664 24794 24793 +f 24665 24666 24794 +f 24666 24795 24794 +f 24666 24667 24796 +f 24666 24796 24795 +f 24667 24668 24796 +f 24668 24797 24796 +f 24668 24669 24798 +f 24668 24798 24797 +f 24669 24670 24798 +f 24670 24799 24798 +f 24670 24671 24800 +f 24670 24800 24799 +f 24671 24672 24800 +f 24672 24801 24800 +f 24672 24673 24802 +f 24672 24802 24801 +f 24673 24674 24802 +f 24674 24803 24802 +f 24674 24675 24804 +f 24674 24804 24803 +f 24675 24676 24804 +f 24676 24805 24804 +f 24676 24677 24806 +f 24676 24806 24805 +f 24677 24678 24806 +f 24678 24807 24806 +f 24678 24679 24808 +f 24678 24808 24807 +f 24679 24680 24808 +f 24680 24809 24808 +f 24680 24681 24810 +f 24680 24810 24809 +f 24681 24682 24810 +f 24682 24811 24810 +f 24682 24683 24812 +f 24682 24812 24811 +f 24683 24684 24812 +f 24684 24813 24812 +f 24684 24685 24814 +f 24684 24814 24813 +f 24685 24686 24814 +f 24686 24815 24814 +f 24686 24687 24816 +f 24686 24816 24815 +f 24687 24688 24816 +f 24688 24817 24816 +f 24688 24689 24818 +f 24688 24818 24817 +f 24689 24690 24818 +f 24690 24819 24818 +f 24690 24691 24820 +f 24690 24820 24819 +f 24691 24692 24820 +f 24692 24821 24820 +f 24692 24693 24822 +f 24692 24822 24821 +f 24693 24694 24822 +f 24694 24823 24822 +f 24694 24695 24824 +f 24694 24824 24823 +f 24695 24696 24824 +f 24696 24825 24824 +f 24696 24697 24826 +f 24696 24826 24825 +f 24697 24698 24826 +f 24698 24827 24826 +f 24698 24699 24828 +f 24698 24828 24827 +f 24699 24700 24828 +f 24700 24829 24828 +f 24700 24701 24830 +f 24700 24830 24829 +f 24701 24702 24830 +f 24702 24831 24830 +f 24702 24703 24832 +f 24702 24832 24831 +f 24703 24704 24832 +f 24704 24833 24832 +f 24704 24705 24834 +f 24704 24834 24833 +f 24705 24706 24834 +f 24706 24835 24834 +f 24706 24707 24836 +f 24706 24836 24835 +f 24707 24708 24836 +f 24708 24837 24836 +f 24708 24709 24838 +f 24708 24838 24837 +f 24709 24710 24838 +f 24710 24839 24838 +f 24710 24711 24840 +f 24710 24840 24839 +f 24711 24712 24840 +f 24712 24841 24840 +f 24712 24713 24842 +f 24712 24842 24841 +f 24713 24714 24842 +f 24714 24843 24842 +f 24714 24715 24844 +f 24714 24844 24843 +f 24715 24716 24844 +f 24716 24845 24844 +f 24716 24717 24846 +f 24716 24846 24845 +f 24717 24718 24846 +f 24718 24847 24846 +f 24718 24719 24848 +f 24718 24848 24847 +f 24719 24720 24848 +f 24720 24849 24848 +f 24721 24722 24850 +f 24722 24851 24850 +f 24722 24723 24852 +f 24722 24852 24851 +f 24723 24724 24852 +f 24724 24853 24852 +f 24724 24725 24854 +f 24724 24854 24853 +f 24725 24726 24854 +f 24726 24855 24854 +f 24726 24727 24856 +f 24726 24856 24855 +f 24727 24728 24856 +f 24728 24857 24856 +f 24728 24729 24858 +f 24728 24858 24857 +f 24729 24730 24858 +f 24730 24859 24858 +f 24730 24731 24860 +f 24730 24860 24859 +f 24731 24732 24860 +f 24732 24861 24860 +f 24732 24733 24862 +f 24732 24862 24861 +f 24733 24734 24862 +f 24734 24863 24862 +f 24734 24735 24864 +f 24734 24864 24863 +f 24735 24736 24864 +f 24736 24865 24864 +f 24736 24737 24866 +f 24736 24866 24865 +f 24737 24738 24866 +f 24738 24867 24866 +f 24738 24739 24868 +f 24738 24868 24867 +f 24739 24740 24868 +f 24740 24869 24868 +f 24740 24741 24870 +f 24740 24870 24869 +f 24741 24742 24870 +f 24742 24871 24870 +f 24742 24743 24872 +f 24742 24872 24871 +f 24743 24744 24872 +f 24744 24873 24872 +f 24744 24745 24874 +f 24744 24874 24873 +f 24745 24746 24874 +f 24746 24875 24874 +f 24746 24747 24876 +f 24746 24876 24875 +f 24747 24748 24876 +f 24748 24877 24876 +f 24748 24749 24878 +f 24748 24878 24877 +f 24749 24750 24878 +f 24750 24879 24878 +f 24750 24751 24880 +f 24750 24880 24879 +f 24751 24752 24880 +f 24752 24881 24880 +f 24752 24753 24882 +f 24752 24882 24881 +f 24753 24754 24882 +f 24754 24883 24882 +f 24754 24755 24884 +f 24754 24884 24883 +f 24755 24756 24884 +f 24756 24885 24884 +f 24756 24757 24886 +f 24756 24886 24885 +f 24757 24758 24886 +f 24758 24887 24886 +f 24758 24759 24888 +f 24758 24888 24887 +f 24759 24760 24888 +f 24760 24889 24888 +f 24760 24761 24890 +f 24760 24890 24889 +f 24761 24762 24890 +f 24762 24891 24890 +f 24762 24763 24892 +f 24762 24892 24891 +f 24763 24764 24892 +f 24764 24893 24892 +f 24764 24765 24894 +f 24764 24894 24893 +f 24765 24766 24894 +f 24766 24895 24894 +f 24766 24767 24896 +f 24766 24896 24895 +f 24767 24768 24896 +f 24768 24897 24896 +f 24768 24769 24898 +f 24768 24898 24897 +f 24769 24770 24898 +f 24770 24899 24898 +f 24770 24771 24900 +f 24770 24900 24899 +f 24771 24772 24900 +f 24772 24901 24900 +f 24772 24773 24902 +f 24772 24902 24901 +f 24773 24774 24902 +f 24774 24903 24902 +f 24774 24775 24904 +f 24774 24904 24903 +f 24775 24776 24904 +f 24776 24905 24904 +f 24776 24777 24906 +f 24776 24906 24905 +f 24777 24778 24906 +f 24778 24907 24906 +f 24778 24779 24908 +f 24778 24908 24907 +f 24779 24780 24908 +f 24780 24909 24908 +f 24780 24781 24910 +f 24780 24910 24909 +f 24781 24782 24910 +f 24782 24911 24910 +f 24782 24783 24912 +f 24782 24912 24911 +f 24783 24784 24912 +f 24784 24913 24912 +f 24784 24785 24914 +f 24784 24914 24913 +f 24785 24786 24914 +f 24786 24915 24914 +f 24786 24787 24916 +f 24786 24916 24915 +f 24787 24788 24916 +f 24788 24917 24916 +f 24788 24789 24918 +f 24788 24918 24917 +f 24789 24790 24918 +f 24790 24919 24918 +f 24790 24791 24920 +f 24790 24920 24919 +f 24791 24792 24920 +f 24792 24921 24920 +f 24792 24793 24922 +f 24792 24922 24921 +f 24793 24794 24922 +f 24794 24923 24922 +f 24794 24795 24924 +f 24794 24924 24923 +f 24795 24796 24924 +f 24796 24925 24924 +f 24796 24797 24926 +f 24796 24926 24925 +f 24797 24798 24926 +f 24798 24927 24926 +f 24798 24799 24928 +f 24798 24928 24927 +f 24799 24800 24928 +f 24800 24929 24928 +f 24800 24801 24930 +f 24800 24930 24929 +f 24801 24802 24930 +f 24802 24931 24930 +f 24802 24803 24932 +f 24802 24932 24931 +f 24803 24804 24932 +f 24804 24933 24932 +f 24804 24805 24934 +f 24804 24934 24933 +f 24805 24806 24934 +f 24806 24935 24934 +f 24806 24807 24936 +f 24806 24936 24935 +f 24807 24808 24936 +f 24808 24937 24936 +f 24808 24809 24938 +f 24808 24938 24937 +f 24809 24810 24938 +f 24810 24939 24938 +f 24810 24811 24940 +f 24810 24940 24939 +f 24811 24812 24940 +f 24812 24941 24940 +f 24812 24813 24942 +f 24812 24942 24941 +f 24813 24814 24942 +f 24814 24943 24942 +f 24814 24815 24944 +f 24814 24944 24943 +f 24815 24816 24944 +f 24816 24945 24944 +f 24816 24817 24946 +f 24816 24946 24945 +f 24817 24818 24946 +f 24818 24947 24946 +f 24818 24819 24948 +f 24818 24948 24947 +f 24819 24820 24948 +f 24820 24949 24948 +f 24820 24821 24950 +f 24820 24950 24949 +f 24821 24822 24950 +f 24822 24951 24950 +f 24822 24823 24952 +f 24822 24952 24951 +f 24823 24824 24952 +f 24824 24953 24952 +f 24824 24825 24954 +f 24824 24954 24953 +f 24825 24826 24954 +f 24826 24955 24954 +f 24826 24827 24956 +f 24826 24956 24955 +f 24827 24828 24956 +f 24828 24957 24956 +f 24828 24829 24958 +f 24828 24958 24957 +f 24829 24830 24958 +f 24830 24959 24958 +f 24830 24831 24960 +f 24830 24960 24959 +f 24831 24832 24960 +f 24832 24961 24960 +f 24832 24833 24962 +f 24832 24962 24961 +f 24833 24834 24962 +f 24834 24963 24962 +f 24834 24835 24964 +f 24834 24964 24963 +f 24835 24836 24964 +f 24836 24965 24964 +f 24836 24837 24966 +f 24836 24966 24965 +f 24837 24838 24966 +f 24838 24967 24966 +f 24838 24839 24968 +f 24838 24968 24967 +f 24839 24840 24968 +f 24840 24969 24968 +f 24840 24841 24970 +f 24840 24970 24969 +f 24841 24842 24970 +f 24842 24971 24970 +f 24842 24843 24972 +f 24842 24972 24971 +f 24843 24844 24972 +f 24844 24973 24972 +f 24844 24845 24974 +f 24844 24974 24973 +f 24845 24846 24974 +f 24846 24975 24974 +f 24846 24847 24976 +f 24846 24976 24975 +f 24847 24848 24976 +f 24848 24977 24976 +f 24848 24849 24978 +f 24848 24978 24977 +f 24850 24851 24980 +f 24850 24980 24979 +f 24851 24852 24980 +f 24852 24981 24980 +f 24852 24853 24982 +f 24852 24982 24981 +f 24853 24854 24982 +f 24854 24983 24982 +f 24854 24855 24984 +f 24854 24984 24983 +f 24855 24856 24984 +f 24856 24985 24984 +f 24856 24857 24986 +f 24856 24986 24985 +f 24857 24858 24986 +f 24858 24987 24986 +f 24858 24859 24988 +f 24858 24988 24987 +f 24859 24860 24988 +f 24860 24989 24988 +f 24860 24861 24990 +f 24860 24990 24989 +f 24861 24862 24990 +f 24862 24991 24990 +f 24862 24863 24992 +f 24862 24992 24991 +f 24863 24864 24992 +f 24864 24993 24992 +f 24864 24865 24994 +f 24864 24994 24993 +f 24865 24866 24994 +f 24866 24995 24994 +f 24866 24867 24996 +f 24866 24996 24995 +f 24867 24868 24996 +f 24868 24997 24996 +f 24868 24869 24998 +f 24868 24998 24997 +f 24869 24870 24998 +f 24870 24999 24998 +f 24870 24871 25000 +f 24870 25000 24999 +f 24871 24872 25000 +f 24872 25001 25000 +f 24872 24873 25002 +f 24872 25002 25001 +f 24873 24874 25002 +f 24874 25003 25002 +f 24874 24875 25004 +f 24874 25004 25003 +f 24875 24876 25004 +f 24876 25005 25004 +f 24876 24877 25006 +f 24876 25006 25005 +f 24877 24878 25006 +f 24878 25007 25006 +f 24878 24879 25008 +f 24878 25008 25007 +f 24879 24880 25008 +f 24880 25009 25008 +f 24880 24881 25010 +f 24880 25010 25009 +f 24881 24882 25010 +f 24882 25011 25010 +f 24882 24883 25012 +f 24882 25012 25011 +f 24883 24884 25012 +f 24884 25013 25012 +f 24884 24885 25014 +f 24884 25014 25013 +f 24885 24886 25014 +f 24886 25015 25014 +f 24886 24887 25016 +f 24886 25016 25015 +f 24887 24888 25016 +f 24888 25017 25016 +f 24888 24889 25018 +f 24888 25018 25017 +f 24889 24890 25018 +f 24890 25019 25018 +f 24890 24891 25020 +f 24890 25020 25019 +f 24891 24892 25020 +f 24892 25021 25020 +f 24892 24893 25022 +f 24892 25022 25021 +f 24893 24894 25022 +f 24894 25023 25022 +f 24894 24895 25024 +f 24894 25024 25023 +f 24895 24896 25024 +f 24896 25025 25024 +f 24896 24897 25026 +f 24896 25026 25025 +f 24897 24898 25026 +f 24898 25027 25026 +f 24898 24899 25028 +f 24898 25028 25027 +f 24899 24900 25028 +f 24900 25029 25028 +f 24900 24901 25030 +f 24900 25030 25029 +f 24901 24902 25030 +f 24902 25031 25030 +f 24902 24903 25032 +f 24902 25032 25031 +f 24903 24904 25032 +f 24904 25033 25032 +f 24904 24905 25034 +f 24904 25034 25033 +f 24905 24906 25034 +f 24906 25035 25034 +f 24906 24907 25036 +f 24906 25036 25035 +f 24907 24908 25036 +f 24908 25037 25036 +f 24908 24909 25038 +f 24908 25038 25037 +f 24909 24910 25038 +f 24910 25039 25038 +f 24910 24911 25040 +f 24910 25040 25039 +f 24911 24912 25040 +f 24912 25041 25040 +f 24912 24913 25042 +f 24912 25042 25041 +f 24913 24914 25042 +f 24914 25043 25042 +f 24914 24915 25044 +f 24914 25044 25043 +f 24915 24916 25044 +f 24916 25045 25044 +f 24916 24917 25046 +f 24916 25046 25045 +f 24917 24918 25046 +f 24918 25047 25046 +f 24918 24919 25048 +f 24918 25048 25047 +f 24919 24920 25048 +f 24920 25049 25048 +f 24920 24921 25050 +f 24920 25050 25049 +f 24921 24922 25050 +f 24922 25051 25050 +f 24922 24923 25052 +f 24922 25052 25051 +f 24923 24924 25052 +f 24924 25053 25052 +f 24924 24925 25054 +f 24924 25054 25053 +f 24925 24926 25054 +f 24926 25055 25054 +f 24926 24927 25056 +f 24926 25056 25055 +f 24927 24928 25056 +f 24928 25057 25056 +f 24928 24929 25058 +f 24928 25058 25057 +f 24929 24930 25058 +f 24930 25059 25058 +f 24930 24931 25060 +f 24930 25060 25059 +f 24931 24932 25060 +f 24932 25061 25060 +f 24932 24933 25062 +f 24932 25062 25061 +f 24933 24934 25062 +f 24934 25063 25062 +f 24934 24935 25064 +f 24934 25064 25063 +f 24935 24936 25064 +f 24936 25065 25064 +f 24936 24937 25066 +f 24936 25066 25065 +f 24937 24938 25066 +f 24938 25067 25066 +f 24938 24939 25068 +f 24938 25068 25067 +f 24939 24940 25068 +f 24940 25069 25068 +f 24940 24941 25070 +f 24940 25070 25069 +f 24941 24942 25070 +f 24942 25071 25070 +f 24942 24943 25072 +f 24942 25072 25071 +f 24943 24944 25072 +f 24944 25073 25072 +f 24944 24945 25074 +f 24944 25074 25073 +f 24945 24946 25074 +f 24946 25075 25074 +f 24946 24947 25076 +f 24946 25076 25075 +f 24947 24948 25076 +f 24948 25077 25076 +f 24948 24949 25078 +f 24948 25078 25077 +f 24949 24950 25078 +f 24950 25079 25078 +f 24950 24951 25080 +f 24950 25080 25079 +f 24951 24952 25080 +f 24952 25081 25080 +f 24952 24953 25082 +f 24952 25082 25081 +f 24953 24954 25082 +f 24954 25083 25082 +f 24954 24955 25084 +f 24954 25084 25083 +f 24955 24956 25084 +f 24956 25085 25084 +f 24956 24957 25086 +f 24956 25086 25085 +f 24957 24958 25086 +f 24958 25087 25086 +f 24958 24959 25088 +f 24958 25088 25087 +f 24959 24960 25088 +f 24960 25089 25088 +f 24960 24961 25090 +f 24960 25090 25089 +f 24961 24962 25090 +f 24962 25091 25090 +f 24962 24963 25092 +f 24962 25092 25091 +f 24963 24964 25092 +f 24964 25093 25092 +f 24964 24965 25094 +f 24964 25094 25093 +f 24965 24966 25094 +f 24966 25095 25094 +f 24966 24967 25096 +f 24966 25096 25095 +f 24967 24968 25096 +f 24968 25097 25096 +f 24968 24969 25098 +f 24968 25098 25097 +f 24969 24970 25098 +f 24970 25099 25098 +f 24970 24971 25100 +f 24970 25100 25099 +f 24971 24972 25100 +f 24972 25101 25100 +f 24972 24973 25102 +f 24972 25102 25101 +f 24973 24974 25102 +f 24974 25103 25102 +f 24974 24975 25104 +f 24974 25104 25103 +f 24975 24976 25104 +f 24976 25105 25104 +f 24976 24977 25106 +f 24976 25106 25105 +f 24977 24978 25106 +f 24978 25107 25106 +f 24979 24980 25108 +f 24980 25109 25108 +f 24980 24981 25110 +f 24980 25110 25109 +f 24981 24982 25110 +f 24982 25111 25110 +f 24982 24983 25112 +f 24982 25112 25111 +f 24983 24984 25112 +f 24984 25113 25112 +f 24984 24985 25114 +f 24984 25114 25113 +f 24985 24986 25114 +f 24986 25115 25114 +f 24986 24987 25116 +f 24986 25116 25115 +f 24987 24988 25116 +f 24988 25117 25116 +f 24988 24989 25118 +f 24988 25118 25117 +f 24989 24990 25118 +f 24990 25119 25118 +f 24990 24991 25120 +f 24990 25120 25119 +f 24991 24992 25120 +f 24992 25121 25120 +f 24992 24993 25122 +f 24992 25122 25121 +f 24993 24994 25122 +f 24994 25123 25122 +f 24994 24995 25124 +f 24994 25124 25123 +f 24995 24996 25124 +f 24996 25125 25124 +f 24996 24997 25126 +f 24996 25126 25125 +f 24997 24998 25126 +f 24998 25127 25126 +f 24998 24999 25128 +f 24998 25128 25127 +f 24999 25000 25128 +f 25000 25129 25128 +f 25000 25001 25130 +f 25000 25130 25129 +f 25001 25002 25130 +f 25002 25131 25130 +f 25002 25003 25132 +f 25002 25132 25131 +f 25003 25004 25132 +f 25004 25133 25132 +f 25004 25005 25134 +f 25004 25134 25133 +f 25005 25006 25134 +f 25006 25135 25134 +f 25006 25007 25136 +f 25006 25136 25135 +f 25007 25008 25136 +f 25008 25137 25136 +f 25008 25009 25138 +f 25008 25138 25137 +f 25009 25010 25138 +f 25010 25139 25138 +f 25010 25011 25140 +f 25010 25140 25139 +f 25011 25012 25140 +f 25012 25141 25140 +f 25012 25013 25142 +f 25012 25142 25141 +f 25013 25014 25142 +f 25014 25143 25142 +f 25014 25015 25144 +f 25014 25144 25143 +f 25015 25016 25144 +f 25016 25145 25144 +f 25016 25017 25146 +f 25016 25146 25145 +f 25017 25018 25146 +f 25018 25147 25146 +f 25018 25019 25148 +f 25018 25148 25147 +f 25019 25020 25148 +f 25020 25149 25148 +f 25020 25021 25150 +f 25020 25150 25149 +f 25021 25022 25150 +f 25022 25151 25150 +f 25022 25023 25152 +f 25022 25152 25151 +f 25023 25024 25152 +f 25024 25153 25152 +f 25024 25025 25154 +f 25024 25154 25153 +f 25025 25026 25154 +f 25026 25155 25154 +f 25026 25027 25156 +f 25026 25156 25155 +f 25027 25028 25156 +f 25028 25157 25156 +f 25028 25029 25158 +f 25028 25158 25157 +f 25029 25030 25158 +f 25030 25159 25158 +f 25030 25031 25160 +f 25030 25160 25159 +f 25031 25032 25160 +f 25032 25161 25160 +f 25032 25033 25162 +f 25032 25162 25161 +f 25033 25034 25162 +f 25034 25163 25162 +f 25034 25035 25164 +f 25034 25164 25163 +f 25035 25036 25164 +f 25036 25165 25164 +f 25036 25037 25166 +f 25036 25166 25165 +f 25037 25038 25166 +f 25038 25167 25166 +f 25038 25039 25168 +f 25038 25168 25167 +f 25039 25040 25168 +f 25040 25169 25168 +f 25040 25041 25170 +f 25040 25170 25169 +f 25041 25042 25170 +f 25042 25171 25170 +f 25042 25043 25172 +f 25042 25172 25171 +f 25043 25044 25172 +f 25044 25173 25172 +f 25044 25045 25174 +f 25044 25174 25173 +f 25045 25046 25174 +f 25046 25175 25174 +f 25046 25047 25176 +f 25046 25176 25175 +f 25047 25048 25176 +f 25048 25177 25176 +f 25048 25049 25178 +f 25048 25178 25177 +f 25049 25050 25178 +f 25050 25179 25178 +f 25050 25051 25180 +f 25050 25180 25179 +f 25051 25052 25180 +f 25052 25181 25180 +f 25052 25053 25182 +f 25052 25182 25181 +f 25053 25054 25182 +f 25054 25183 25182 +f 25054 25055 25184 +f 25054 25184 25183 +f 25055 25056 25184 +f 25056 25185 25184 +f 25056 25057 25186 +f 25056 25186 25185 +f 25057 25058 25186 +f 25058 25187 25186 +f 25058 25059 25188 +f 25058 25188 25187 +f 25059 25060 25188 +f 25060 25189 25188 +f 25060 25061 25190 +f 25060 25190 25189 +f 25061 25062 25190 +f 25062 25191 25190 +f 25062 25063 25192 +f 25062 25192 25191 +f 25063 25064 25192 +f 25064 25193 25192 +f 25064 25065 25194 +f 25064 25194 25193 +f 25065 25066 25194 +f 25066 25195 25194 +f 25066 25067 25196 +f 25066 25196 25195 +f 25067 25068 25196 +f 25068 25197 25196 +f 25068 25069 25198 +f 25068 25198 25197 +f 25069 25070 25198 +f 25070 25199 25198 +f 25070 25071 25200 +f 25070 25200 25199 +f 25071 25072 25200 +f 25072 25201 25200 +f 25072 25073 25202 +f 25072 25202 25201 +f 25073 25074 25202 +f 25074 25203 25202 +f 25074 25075 25204 +f 25074 25204 25203 +f 25075 25076 25204 +f 25076 25205 25204 +f 25076 25077 25206 +f 25076 25206 25205 +f 25077 25078 25206 +f 25078 25207 25206 +f 25078 25079 25208 +f 25078 25208 25207 +f 25079 25080 25208 +f 25080 25209 25208 +f 25080 25081 25210 +f 25080 25210 25209 +f 25081 25082 25210 +f 25082 25211 25210 +f 25082 25083 25212 +f 25082 25212 25211 +f 25083 25084 25212 +f 25084 25213 25212 +f 25084 25085 25214 +f 25084 25214 25213 +f 25085 25086 25214 +f 25086 25215 25214 +f 25086 25087 25216 +f 25086 25216 25215 +f 25087 25088 25216 +f 25088 25217 25216 +f 25088 25089 25218 +f 25088 25218 25217 +f 25089 25090 25218 +f 25090 25219 25218 +f 25090 25091 25220 +f 25090 25220 25219 +f 25091 25092 25220 +f 25092 25221 25220 +f 25092 25093 25222 +f 25092 25222 25221 +f 25093 25094 25222 +f 25094 25223 25222 +f 25094 25095 25224 +f 25094 25224 25223 +f 25095 25096 25224 +f 25096 25225 25224 +f 25096 25097 25226 +f 25096 25226 25225 +f 25097 25098 25226 +f 25098 25227 25226 +f 25098 25099 25228 +f 25098 25228 25227 +f 25099 25100 25228 +f 25100 25229 25228 +f 25100 25101 25230 +f 25100 25230 25229 +f 25101 25102 25230 +f 25102 25231 25230 +f 25102 25103 25232 +f 25102 25232 25231 +f 25103 25104 25232 +f 25104 25233 25232 +f 25104 25105 25234 +f 25104 25234 25233 +f 25105 25106 25234 +f 25106 25235 25234 +f 25106 25107 25236 +f 25106 25236 25235 +f 25108 25109 25238 +f 25108 25238 25237 +f 25109 25110 25238 +f 25110 25239 25238 +f 25110 25111 25240 +f 25110 25240 25239 +f 25111 25112 25240 +f 25112 25241 25240 +f 25112 25113 25242 +f 25112 25242 25241 +f 25113 25114 25242 +f 25114 25243 25242 +f 25114 25115 25244 +f 25114 25244 25243 +f 25115 25116 25244 +f 25116 25245 25244 +f 25116 25117 25246 +f 25116 25246 25245 +f 25117 25118 25246 +f 25118 25247 25246 +f 25118 25119 25248 +f 25118 25248 25247 +f 25119 25120 25248 +f 25120 25249 25248 +f 25120 25121 25250 +f 25120 25250 25249 +f 25121 25122 25250 +f 25122 25251 25250 +f 25122 25123 25252 +f 25122 25252 25251 +f 25123 25124 25252 +f 25124 25253 25252 +f 25124 25125 25254 +f 25124 25254 25253 +f 25125 25126 25254 +f 25126 25255 25254 +f 25126 25127 25256 +f 25126 25256 25255 +f 25127 25128 25256 +f 25128 25257 25256 +f 25128 25129 25258 +f 25128 25258 25257 +f 25129 25130 25258 +f 25130 25259 25258 +f 25130 25131 25260 +f 25130 25260 25259 +f 25131 25132 25260 +f 25132 25261 25260 +f 25132 25133 25262 +f 25132 25262 25261 +f 25133 25134 25262 +f 25134 25263 25262 +f 25134 25135 25264 +f 25134 25264 25263 +f 25135 25136 25264 +f 25136 25265 25264 +f 25136 25137 25266 +f 25136 25266 25265 +f 25137 25138 25266 +f 25138 25267 25266 +f 25138 25139 25268 +f 25138 25268 25267 +f 25139 25140 25268 +f 25140 25269 25268 +f 25140 25141 25270 +f 25140 25270 25269 +f 25141 25142 25270 +f 25142 25271 25270 +f 25142 25143 25272 +f 25142 25272 25271 +f 25143 25144 25272 +f 25144 25273 25272 +f 25144 25145 25274 +f 25144 25274 25273 +f 25145 25146 25274 +f 25146 25275 25274 +f 25146 25147 25276 +f 25146 25276 25275 +f 25147 25148 25276 +f 25148 25277 25276 +f 25148 25149 25278 +f 25148 25278 25277 +f 25149 25150 25278 +f 25150 25279 25278 +f 25150 25151 25280 +f 25150 25280 25279 +f 25151 25152 25280 +f 25152 25281 25280 +f 25152 25153 25282 +f 25152 25282 25281 +f 25153 25154 25282 +f 25154 25283 25282 +f 25154 25155 25284 +f 25154 25284 25283 +f 25155 25156 25284 +f 25156 25285 25284 +f 25156 25157 25286 +f 25156 25286 25285 +f 25157 25158 25286 +f 25158 25287 25286 +f 25158 25159 25288 +f 25158 25288 25287 +f 25159 25160 25288 +f 25160 25289 25288 +f 25160 25161 25290 +f 25160 25290 25289 +f 25161 25162 25290 +f 25162 25291 25290 +f 25162 25163 25292 +f 25162 25292 25291 +f 25163 25164 25292 +f 25164 25293 25292 +f 25164 25165 25294 +f 25164 25294 25293 +f 25165 25166 25294 +f 25166 25295 25294 +f 25166 25167 25296 +f 25166 25296 25295 +f 25167 25168 25296 +f 25168 25297 25296 +f 25168 25169 25298 +f 25168 25298 25297 +f 25169 25170 25298 +f 25170 25299 25298 +f 25170 25171 25300 +f 25170 25300 25299 +f 25171 25172 25300 +f 25172 25301 25300 +f 25172 25173 25302 +f 25172 25302 25301 +f 25173 25174 25302 +f 25174 25303 25302 +f 25174 25175 25304 +f 25174 25304 25303 +f 25175 25176 25304 +f 25176 25305 25304 +f 25176 25177 25306 +f 25176 25306 25305 +f 25177 25178 25306 +f 25178 25307 25306 +f 25178 25179 25308 +f 25178 25308 25307 +f 25179 25180 25308 +f 25180 25309 25308 +f 25180 25181 25310 +f 25180 25310 25309 +f 25181 25182 25310 +f 25182 25311 25310 +f 25182 25183 25312 +f 25182 25312 25311 +f 25183 25184 25312 +f 25184 25313 25312 +f 25184 25185 25314 +f 25184 25314 25313 +f 25185 25186 25314 +f 25186 25315 25314 +f 25186 25187 25316 +f 25186 25316 25315 +f 25187 25188 25316 +f 25188 25317 25316 +f 25188 25189 25318 +f 25188 25318 25317 +f 25189 25190 25318 +f 25190 25319 25318 +f 25190 25191 25320 +f 25190 25320 25319 +f 25191 25192 25320 +f 25192 25321 25320 +f 25192 25193 25322 +f 25192 25322 25321 +f 25193 25194 25322 +f 25194 25323 25322 +f 25194 25195 25324 +f 25194 25324 25323 +f 25195 25196 25324 +f 25196 25325 25324 +f 25196 25197 25326 +f 25196 25326 25325 +f 25197 25198 25326 +f 25198 25327 25326 +f 25198 25199 25328 +f 25198 25328 25327 +f 25199 25200 25328 +f 25200 25329 25328 +f 25200 25201 25330 +f 25200 25330 25329 +f 25201 25202 25330 +f 25202 25331 25330 +f 25202 25203 25332 +f 25202 25332 25331 +f 25203 25204 25332 +f 25204 25333 25332 +f 25204 25205 25334 +f 25204 25334 25333 +f 25205 25206 25334 +f 25206 25335 25334 +f 25206 25207 25336 +f 25206 25336 25335 +f 25207 25208 25336 +f 25208 25337 25336 +f 25208 25209 25338 +f 25208 25338 25337 +f 25209 25210 25338 +f 25210 25339 25338 +f 25210 25211 25340 +f 25210 25340 25339 +f 25211 25212 25340 +f 25212 25341 25340 +f 25212 25213 25342 +f 25212 25342 25341 +f 25213 25214 25342 +f 25214 25343 25342 +f 25214 25215 25344 +f 25214 25344 25343 +f 25215 25216 25344 +f 25216 25345 25344 +f 25216 25217 25346 +f 25216 25346 25345 +f 25217 25218 25346 +f 25218 25347 25346 +f 25218 25219 25348 +f 25218 25348 25347 +f 25219 25220 25348 +f 25220 25349 25348 +f 25220 25221 25350 +f 25220 25350 25349 +f 25221 25222 25350 +f 25222 25351 25350 +f 25222 25223 25352 +f 25222 25352 25351 +f 25223 25224 25352 +f 25224 25353 25352 +f 25224 25225 25354 +f 25224 25354 25353 +f 25225 25226 25354 +f 25226 25355 25354 +f 25226 25227 25356 +f 25226 25356 25355 +f 25227 25228 25356 +f 25228 25357 25356 +f 25228 25229 25358 +f 25228 25358 25357 +f 25229 25230 25358 +f 25230 25359 25358 +f 25230 25231 25360 +f 25230 25360 25359 +f 25231 25232 25360 +f 25232 25361 25360 +f 25232 25233 25362 +f 25232 25362 25361 +f 25233 25234 25362 +f 25234 25363 25362 +f 25234 25235 25364 +f 25234 25364 25363 +f 25235 25236 25364 +f 25236 25365 25364 +f 25237 25238 25366 +f 25238 25367 25366 +f 25238 25239 25368 +f 25238 25368 25367 +f 25239 25240 25368 +f 25240 25369 25368 +f 25240 25241 25370 +f 25240 25370 25369 +f 25241 25242 25370 +f 25242 25371 25370 +f 25242 25243 25372 +f 25242 25372 25371 +f 25243 25244 25372 +f 25244 25373 25372 +f 25244 25245 25374 +f 25244 25374 25373 +f 25245 25246 25374 +f 25246 25375 25374 +f 25246 25247 25376 +f 25246 25376 25375 +f 25247 25248 25376 +f 25248 25377 25376 +f 25248 25249 25378 +f 25248 25378 25377 +f 25249 25250 25378 +f 25250 25379 25378 +f 25250 25251 25380 +f 25250 25380 25379 +f 25251 25252 25380 +f 25252 25381 25380 +f 25252 25253 25382 +f 25252 25382 25381 +f 25253 25254 25382 +f 25254 25383 25382 +f 25254 25255 25384 +f 25254 25384 25383 +f 25255 25256 25384 +f 25256 25385 25384 +f 25256 25257 25386 +f 25256 25386 25385 +f 25257 25258 25386 +f 25258 25387 25386 +f 25258 25259 25388 +f 25258 25388 25387 +f 25259 25260 25388 +f 25260 25389 25388 +f 25260 25261 25390 +f 25260 25390 25389 +f 25261 25262 25390 +f 25262 25391 25390 +f 25262 25263 25392 +f 25262 25392 25391 +f 25263 25264 25392 +f 25264 25393 25392 +f 25264 25265 25394 +f 25264 25394 25393 +f 25265 25266 25394 +f 25266 25395 25394 +f 25266 25267 25396 +f 25266 25396 25395 +f 25267 25268 25396 +f 25268 25397 25396 +f 25268 25269 25398 +f 25268 25398 25397 +f 25269 25270 25398 +f 25270 25399 25398 +f 25270 25271 25400 +f 25270 25400 25399 +f 25271 25272 25400 +f 25272 25401 25400 +f 25272 25273 25402 +f 25272 25402 25401 +f 25273 25274 25402 +f 25274 25403 25402 +f 25274 25275 25404 +f 25274 25404 25403 +f 25275 25276 25404 +f 25276 25405 25404 +f 25276 25277 25406 +f 25276 25406 25405 +f 25277 25278 25406 +f 25278 25407 25406 +f 25278 25279 25408 +f 25278 25408 25407 +f 25279 25280 25408 +f 25280 25409 25408 +f 25280 25281 25410 +f 25280 25410 25409 +f 25281 25282 25410 +f 25282 25411 25410 +f 25282 25283 25412 +f 25282 25412 25411 +f 25283 25284 25412 +f 25284 25413 25412 +f 25284 25285 25414 +f 25284 25414 25413 +f 25285 25286 25414 +f 25286 25415 25414 +f 25286 25287 25416 +f 25286 25416 25415 +f 25287 25288 25416 +f 25288 25417 25416 +f 25288 25289 25418 +f 25288 25418 25417 +f 25289 25290 25418 +f 25290 25419 25418 +f 25290 25291 25420 +f 25290 25420 25419 +f 25291 25292 25420 +f 25292 25421 25420 +f 25292 25293 25422 +f 25292 25422 25421 +f 25293 25294 25422 +f 25294 25423 25422 +f 25294 25295 25424 +f 25294 25424 25423 +f 25295 25296 25424 +f 25296 25425 25424 +f 25296 25297 25426 +f 25296 25426 25425 +f 25297 25298 25426 +f 25298 25427 25426 +f 25298 25299 25428 +f 25298 25428 25427 +f 25299 25300 25428 +f 25300 25429 25428 +f 25300 25301 25430 +f 25300 25430 25429 +f 25301 25302 25430 +f 25302 25431 25430 +f 25302 25303 25432 +f 25302 25432 25431 +f 25303 25304 25432 +f 25304 25433 25432 +f 25304 25305 25434 +f 25304 25434 25433 +f 25305 25306 25434 +f 25306 25435 25434 +f 25306 25307 25436 +f 25306 25436 25435 +f 25307 25308 25436 +f 25308 25437 25436 +f 25308 25309 25438 +f 25308 25438 25437 +f 25309 25310 25438 +f 25310 25439 25438 +f 25310 25311 25440 +f 25310 25440 25439 +f 25311 25312 25440 +f 25312 25441 25440 +f 25312 25313 25442 +f 25312 25442 25441 +f 25313 25314 25442 +f 25314 25443 25442 +f 25314 25315 25444 +f 25314 25444 25443 +f 25315 25316 25444 +f 25316 25445 25444 +f 25316 25317 25446 +f 25316 25446 25445 +f 25317 25318 25446 +f 25318 25447 25446 +f 25318 25319 25448 +f 25318 25448 25447 +f 25319 25320 25448 +f 25320 25449 25448 +f 25320 25321 25450 +f 25320 25450 25449 +f 25321 25322 25450 +f 25322 25451 25450 +f 25322 25323 25452 +f 25322 25452 25451 +f 25323 25324 25452 +f 25324 25453 25452 +f 25324 25325 25454 +f 25324 25454 25453 +f 25325 25326 25454 +f 25326 25455 25454 +f 25326 25327 25456 +f 25326 25456 25455 +f 25327 25328 25456 +f 25328 25457 25456 +f 25328 25329 25458 +f 25328 25458 25457 +f 25329 25330 25458 +f 25330 25459 25458 +f 25330 25331 25460 +f 25330 25460 25459 +f 25331 25332 25460 +f 25332 25461 25460 +f 25332 25333 25462 +f 25332 25462 25461 +f 25333 25334 25462 +f 25334 25463 25462 +f 25334 25335 25464 +f 25334 25464 25463 +f 25335 25336 25464 +f 25336 25465 25464 +f 25336 25337 25466 +f 25336 25466 25465 +f 25337 25338 25466 +f 25338 25467 25466 +f 25338 25339 25468 +f 25338 25468 25467 +f 25339 25340 25468 +f 25340 25469 25468 +f 25340 25341 25470 +f 25340 25470 25469 +f 25341 25342 25470 +f 25342 25471 25470 +f 25342 25343 25472 +f 25342 25472 25471 +f 25343 25344 25472 +f 25344 25473 25472 +f 25344 25345 25474 +f 25344 25474 25473 +f 25345 25346 25474 +f 25346 25475 25474 +f 25346 25347 25476 +f 25346 25476 25475 +f 25347 25348 25476 +f 25348 25477 25476 +f 25348 25349 25478 +f 25348 25478 25477 +f 25349 25350 25478 +f 25350 25479 25478 +f 25350 25351 25480 +f 25350 25480 25479 +f 25351 25352 25480 +f 25352 25481 25480 +f 25352 25353 25482 +f 25352 25482 25481 +f 25353 25354 25482 +f 25354 25483 25482 +f 25354 25355 25484 +f 25354 25484 25483 +f 25355 25356 25484 +f 25356 25485 25484 +f 25356 25357 25486 +f 25356 25486 25485 +f 25357 25358 25486 +f 25358 25487 25486 +f 25358 25359 25488 +f 25358 25488 25487 +f 25359 25360 25488 +f 25360 25489 25488 +f 25360 25361 25490 +f 25360 25490 25489 +f 25361 25362 25490 +f 25362 25491 25490 +f 25362 25363 25492 +f 25362 25492 25491 +f 25363 25364 25492 +f 25364 25493 25492 +f 25364 25365 25494 +f 25364 25494 25493 +f 25366 25367 25496 +f 25366 25496 25495 +f 25367 25368 25496 +f 25368 25497 25496 +f 25368 25369 25498 +f 25368 25498 25497 +f 25369 25370 25498 +f 25370 25499 25498 +f 25370 25371 25500 +f 25370 25500 25499 +f 25371 25372 25500 +f 25372 25501 25500 +f 25372 25373 25502 +f 25372 25502 25501 +f 25373 25374 25502 +f 25374 25503 25502 +f 25374 25375 25504 +f 25374 25504 25503 +f 25375 25376 25504 +f 25376 25505 25504 +f 25376 25377 25506 +f 25376 25506 25505 +f 25377 25378 25506 +f 25378 25507 25506 +f 25378 25379 25508 +f 25378 25508 25507 +f 25379 25380 25508 +f 25380 25509 25508 +f 25380 25381 25510 +f 25380 25510 25509 +f 25381 25382 25510 +f 25382 25511 25510 +f 25382 25383 25512 +f 25382 25512 25511 +f 25383 25384 25512 +f 25384 25513 25512 +f 25384 25385 25514 +f 25384 25514 25513 +f 25385 25386 25514 +f 25386 25515 25514 +f 25386 25387 25516 +f 25386 25516 25515 +f 25387 25388 25516 +f 25388 25517 25516 +f 25388 25389 25518 +f 25388 25518 25517 +f 25389 25390 25518 +f 25390 25519 25518 +f 25390 25391 25520 +f 25390 25520 25519 +f 25391 25392 25520 +f 25392 25521 25520 +f 25392 25393 25522 +f 25392 25522 25521 +f 25393 25394 25522 +f 25394 25523 25522 +f 25394 25395 25524 +f 25394 25524 25523 +f 25395 25396 25524 +f 25396 25525 25524 +f 25396 25397 25526 +f 25396 25526 25525 +f 25397 25398 25526 +f 25398 25527 25526 +f 25398 25399 25528 +f 25398 25528 25527 +f 25399 25400 25528 +f 25400 25529 25528 +f 25400 25401 25530 +f 25400 25530 25529 +f 25401 25402 25530 +f 25402 25531 25530 +f 25402 25403 25532 +f 25402 25532 25531 +f 25403 25404 25532 +f 25404 25533 25532 +f 25404 25405 25534 +f 25404 25534 25533 +f 25405 25406 25534 +f 25406 25535 25534 +f 25406 25407 25536 +f 25406 25536 25535 +f 25407 25408 25536 +f 25408 25537 25536 +f 25408 25409 25538 +f 25408 25538 25537 +f 25409 25410 25538 +f 25410 25539 25538 +f 25410 25411 25540 +f 25410 25540 25539 +f 25411 25412 25540 +f 25412 25541 25540 +f 25412 25413 25542 +f 25412 25542 25541 +f 25413 25414 25542 +f 25414 25543 25542 +f 25414 25415 25544 +f 25414 25544 25543 +f 25415 25416 25544 +f 25416 25545 25544 +f 25416 25417 25546 +f 25416 25546 25545 +f 25417 25418 25546 +f 25418 25547 25546 +f 25418 25419 25548 +f 25418 25548 25547 +f 25419 25420 25548 +f 25420 25549 25548 +f 25420 25421 25550 +f 25420 25550 25549 +f 25421 25422 25550 +f 25422 25551 25550 +f 25422 25423 25552 +f 25422 25552 25551 +f 25423 25424 25552 +f 25424 25553 25552 +f 25424 25425 25554 +f 25424 25554 25553 +f 25425 25426 25554 +f 25426 25555 25554 +f 25426 25427 25556 +f 25426 25556 25555 +f 25427 25428 25556 +f 25428 25557 25556 +f 25428 25429 25558 +f 25428 25558 25557 +f 25429 25430 25558 +f 25430 25559 25558 +f 25430 25431 25560 +f 25430 25560 25559 +f 25431 25432 25560 +f 25432 25561 25560 +f 25432 25433 25562 +f 25432 25562 25561 +f 25433 25434 25562 +f 25434 25563 25562 +f 25434 25435 25564 +f 25434 25564 25563 +f 25435 25436 25564 +f 25436 25565 25564 +f 25436 25437 25566 +f 25436 25566 25565 +f 25437 25438 25566 +f 25438 25567 25566 +f 25438 25439 25568 +f 25438 25568 25567 +f 25439 25440 25568 +f 25440 25569 25568 +f 25440 25441 25570 +f 25440 25570 25569 +f 25441 25442 25570 +f 25442 25571 25570 +f 25442 25443 25572 +f 25442 25572 25571 +f 25443 25444 25572 +f 25444 25573 25572 +f 25444 25445 25574 +f 25444 25574 25573 +f 25445 25446 25574 +f 25446 25575 25574 +f 25446 25447 25576 +f 25446 25576 25575 +f 25447 25448 25576 +f 25448 25577 25576 +f 25448 25449 25578 +f 25448 25578 25577 +f 25449 25450 25578 +f 25450 25579 25578 +f 25450 25451 25580 +f 25450 25580 25579 +f 25451 25452 25580 +f 25452 25581 25580 +f 25452 25453 25582 +f 25452 25582 25581 +f 25453 25454 25582 +f 25454 25583 25582 +f 25454 25455 25584 +f 25454 25584 25583 +f 25455 25456 25584 +f 25456 25585 25584 +f 25456 25457 25586 +f 25456 25586 25585 +f 25457 25458 25586 +f 25458 25587 25586 +f 25458 25459 25588 +f 25458 25588 25587 +f 25459 25460 25588 +f 25460 25589 25588 +f 25460 25461 25590 +f 25460 25590 25589 +f 25461 25462 25590 +f 25462 25591 25590 +f 25462 25463 25592 +f 25462 25592 25591 +f 25463 25464 25592 +f 25464 25593 25592 +f 25464 25465 25594 +f 25464 25594 25593 +f 25465 25466 25594 +f 25466 25595 25594 +f 25466 25467 25596 +f 25466 25596 25595 +f 25467 25468 25596 +f 25468 25597 25596 +f 25468 25469 25598 +f 25468 25598 25597 +f 25469 25470 25598 +f 25470 25599 25598 +f 25470 25471 25600 +f 25470 25600 25599 +f 25471 25472 25600 +f 25472 25601 25600 +f 25472 25473 25602 +f 25472 25602 25601 +f 25473 25474 25602 +f 25474 25603 25602 +f 25474 25475 25604 +f 25474 25604 25603 +f 25475 25476 25604 +f 25476 25605 25604 +f 25476 25477 25606 +f 25476 25606 25605 +f 25477 25478 25606 +f 25478 25607 25606 +f 25478 25479 25608 +f 25478 25608 25607 +f 25479 25480 25608 +f 25480 25609 25608 +f 25480 25481 25610 +f 25480 25610 25609 +f 25481 25482 25610 +f 25482 25611 25610 +f 25482 25483 25612 +f 25482 25612 25611 +f 25483 25484 25612 +f 25484 25613 25612 +f 25484 25485 25614 +f 25484 25614 25613 +f 25485 25486 25614 +f 25486 25615 25614 +f 25486 25487 25616 +f 25486 25616 25615 +f 25487 25488 25616 +f 25488 25617 25616 +f 25488 25489 25618 +f 25488 25618 25617 +f 25489 25490 25618 +f 25490 25619 25618 +f 25490 25491 25620 +f 25490 25620 25619 +f 25491 25492 25620 +f 25492 25621 25620 +f 25492 25493 25622 +f 25492 25622 25621 +f 25493 25494 25622 +f 25494 25623 25622 +f 25495 25496 25624 +f 25496 25625 25624 +f 25496 25497 25626 +f 25496 25626 25625 +f 25497 25498 25626 +f 25498 25627 25626 +f 25498 25499 25628 +f 25498 25628 25627 +f 25499 25500 25628 +f 25500 25629 25628 +f 25500 25501 25630 +f 25500 25630 25629 +f 25501 25502 25630 +f 25502 25631 25630 +f 25502 25503 25632 +f 25502 25632 25631 +f 25503 25504 25632 +f 25504 25633 25632 +f 25504 25505 25634 +f 25504 25634 25633 +f 25505 25506 25634 +f 25506 25635 25634 +f 25506 25507 25636 +f 25506 25636 25635 +f 25507 25508 25636 +f 25508 25637 25636 +f 25508 25509 25638 +f 25508 25638 25637 +f 25509 25510 25638 +f 25510 25639 25638 +f 25510 25511 25640 +f 25510 25640 25639 +f 25511 25512 25640 +f 25512 25641 25640 +f 25512 25513 25642 +f 25512 25642 25641 +f 25513 25514 25642 +f 25514 25643 25642 +f 25514 25515 25644 +f 25514 25644 25643 +f 25515 25516 25644 +f 25516 25645 25644 +f 25516 25517 25646 +f 25516 25646 25645 +f 25517 25518 25646 +f 25518 25647 25646 +f 25518 25519 25648 +f 25518 25648 25647 +f 25519 25520 25648 +f 25520 25649 25648 +f 25520 25521 25650 +f 25520 25650 25649 +f 25521 25522 25650 +f 25522 25651 25650 +f 25522 25523 25652 +f 25522 25652 25651 +f 25523 25524 25652 +f 25524 25653 25652 +f 25524 25525 25654 +f 25524 25654 25653 +f 25525 25526 25654 +f 25526 25655 25654 +f 25526 25527 25656 +f 25526 25656 25655 +f 25527 25528 25656 +f 25528 25657 25656 +f 25528 25529 25658 +f 25528 25658 25657 +f 25529 25530 25658 +f 25530 25659 25658 +f 25530 25531 25660 +f 25530 25660 25659 +f 25531 25532 25660 +f 25532 25661 25660 +f 25532 25533 25662 +f 25532 25662 25661 +f 25533 25534 25662 +f 25534 25663 25662 +f 25534 25535 25664 +f 25534 25664 25663 +f 25535 25536 25664 +f 25536 25665 25664 +f 25536 25537 25666 +f 25536 25666 25665 +f 25537 25538 25666 +f 25538 25667 25666 +f 25538 25539 25668 +f 25538 25668 25667 +f 25539 25540 25668 +f 25540 25669 25668 +f 25540 25541 25670 +f 25540 25670 25669 +f 25541 25542 25670 +f 25542 25671 25670 +f 25542 25543 25672 +f 25542 25672 25671 +f 25543 25544 25672 +f 25544 25673 25672 +f 25544 25545 25674 +f 25544 25674 25673 +f 25545 25546 25674 +f 25546 25675 25674 +f 25546 25547 25676 +f 25546 25676 25675 +f 25547 25548 25676 +f 25548 25677 25676 +f 25548 25549 25678 +f 25548 25678 25677 +f 25549 25550 25678 +f 25550 25679 25678 +f 25550 25551 25680 +f 25550 25680 25679 +f 25551 25552 25680 +f 25552 25681 25680 +f 25552 25553 25682 +f 25552 25682 25681 +f 25553 25554 25682 +f 25554 25683 25682 +f 25554 25555 25684 +f 25554 25684 25683 +f 25555 25556 25684 +f 25556 25685 25684 +f 25556 25557 25686 +f 25556 25686 25685 +f 25557 25558 25686 +f 25558 25687 25686 +f 25558 25559 25688 +f 25558 25688 25687 +f 25559 25560 25688 +f 25560 25689 25688 +f 25560 25561 25690 +f 25560 25690 25689 +f 25561 25562 25690 +f 25562 25691 25690 +f 25562 25563 25692 +f 25562 25692 25691 +f 25563 25564 25692 +f 25564 25693 25692 +f 25564 25565 25694 +f 25564 25694 25693 +f 25565 25566 25694 +f 25566 25695 25694 +f 25566 25567 25696 +f 25566 25696 25695 +f 25567 25568 25696 +f 25568 25697 25696 +f 25568 25569 25698 +f 25568 25698 25697 +f 25569 25570 25698 +f 25570 25699 25698 +f 25570 25571 25700 +f 25570 25700 25699 +f 25571 25572 25700 +f 25572 25701 25700 +f 25572 25573 25702 +f 25572 25702 25701 +f 25573 25574 25702 +f 25574 25703 25702 +f 25574 25575 25704 +f 25574 25704 25703 +f 25575 25576 25704 +f 25576 25705 25704 +f 25576 25577 25706 +f 25576 25706 25705 +f 25577 25578 25706 +f 25578 25707 25706 +f 25578 25579 25708 +f 25578 25708 25707 +f 25579 25580 25708 +f 25580 25709 25708 +f 25580 25581 25710 +f 25580 25710 25709 +f 25581 25582 25710 +f 25582 25711 25710 +f 25582 25583 25712 +f 25582 25712 25711 +f 25583 25584 25712 +f 25584 25713 25712 +f 25584 25585 25714 +f 25584 25714 25713 +f 25585 25586 25714 +f 25586 25715 25714 +f 25586 25587 25716 +f 25586 25716 25715 +f 25587 25588 25716 +f 25588 25717 25716 +f 25588 25589 25718 +f 25588 25718 25717 +f 25589 25590 25718 +f 25590 25719 25718 +f 25590 25591 25720 +f 25590 25720 25719 +f 25591 25592 25720 +f 25592 25721 25720 +f 25592 25593 25722 +f 25592 25722 25721 +f 25593 25594 25722 +f 25594 25723 25722 +f 25594 25595 25724 +f 25594 25724 25723 +f 25595 25596 25724 +f 25596 25725 25724 +f 25596 25597 25726 +f 25596 25726 25725 +f 25597 25598 25726 +f 25598 25727 25726 +f 25598 25599 25728 +f 25598 25728 25727 +f 25599 25600 25728 +f 25600 25729 25728 +f 25600 25601 25730 +f 25600 25730 25729 +f 25601 25602 25730 +f 25602 25731 25730 +f 25602 25603 25732 +f 25602 25732 25731 +f 25603 25604 25732 +f 25604 25733 25732 +f 25604 25605 25734 +f 25604 25734 25733 +f 25605 25606 25734 +f 25606 25735 25734 +f 25606 25607 25736 +f 25606 25736 25735 +f 25607 25608 25736 +f 25608 25737 25736 +f 25608 25609 25738 +f 25608 25738 25737 +f 25609 25610 25738 +f 25610 25739 25738 +f 25610 25611 25740 +f 25610 25740 25739 +f 25611 25612 25740 +f 25612 25741 25740 +f 25612 25613 25742 +f 25612 25742 25741 +f 25613 25614 25742 +f 25614 25743 25742 +f 25614 25615 25744 +f 25614 25744 25743 +f 25615 25616 25744 +f 25616 25745 25744 +f 25616 25617 25746 +f 25616 25746 25745 +f 25617 25618 25746 +f 25618 25747 25746 +f 25618 25619 25748 +f 25618 25748 25747 +f 25619 25620 25748 +f 25620 25749 25748 +f 25620 25621 25750 +f 25620 25750 25749 +f 25621 25622 25750 +f 25622 25751 25750 +f 25622 25623 25752 +f 25622 25752 25751 +f 25624 25625 25754 +f 25624 25754 25753 +f 25625 25626 25754 +f 25626 25755 25754 +f 25626 25627 25756 +f 25626 25756 25755 +f 25627 25628 25756 +f 25628 25757 25756 +f 25628 25629 25758 +f 25628 25758 25757 +f 25629 25630 25758 +f 25630 25759 25758 +f 25630 25631 25760 +f 25630 25760 25759 +f 25631 25632 25760 +f 25632 25761 25760 +f 25632 25633 25762 +f 25632 25762 25761 +f 25633 25634 25762 +f 25634 25763 25762 +f 25634 25635 25764 +f 25634 25764 25763 +f 25635 25636 25764 +f 25636 25765 25764 +f 25636 25637 25766 +f 25636 25766 25765 +f 25637 25638 25766 +f 25638 25767 25766 +f 25638 25639 25768 +f 25638 25768 25767 +f 25639 25640 25768 +f 25640 25769 25768 +f 25640 25641 25770 +f 25640 25770 25769 +f 25641 25642 25770 +f 25642 25771 25770 +f 25642 25643 25772 +f 25642 25772 25771 +f 25643 25644 25772 +f 25644 25773 25772 +f 25644 25645 25774 +f 25644 25774 25773 +f 25645 25646 25774 +f 25646 25775 25774 +f 25646 25647 25776 +f 25646 25776 25775 +f 25647 25648 25776 +f 25648 25777 25776 +f 25648 25649 25778 +f 25648 25778 25777 +f 25649 25650 25778 +f 25650 25779 25778 +f 25650 25651 25780 +f 25650 25780 25779 +f 25651 25652 25780 +f 25652 25781 25780 +f 25652 25653 25782 +f 25652 25782 25781 +f 25653 25654 25782 +f 25654 25783 25782 +f 25654 25655 25784 +f 25654 25784 25783 +f 25655 25656 25784 +f 25656 25785 25784 +f 25656 25657 25786 +f 25656 25786 25785 +f 25657 25658 25786 +f 25658 25787 25786 +f 25658 25659 25788 +f 25658 25788 25787 +f 25659 25660 25788 +f 25660 25789 25788 +f 25660 25661 25790 +f 25660 25790 25789 +f 25661 25662 25790 +f 25662 25791 25790 +f 25662 25663 25792 +f 25662 25792 25791 +f 25663 25664 25792 +f 25664 25793 25792 +f 25664 25665 25794 +f 25664 25794 25793 +f 25665 25666 25794 +f 25666 25795 25794 +f 25666 25667 25796 +f 25666 25796 25795 +f 25667 25668 25796 +f 25668 25797 25796 +f 25668 25669 25798 +f 25668 25798 25797 +f 25669 25670 25798 +f 25670 25799 25798 +f 25670 25671 25800 +f 25670 25800 25799 +f 25671 25672 25800 +f 25672 25801 25800 +f 25672 25673 25802 +f 25672 25802 25801 +f 25673 25674 25802 +f 25674 25803 25802 +f 25674 25675 25804 +f 25674 25804 25803 +f 25675 25676 25804 +f 25676 25805 25804 +f 25676 25677 25806 +f 25676 25806 25805 +f 25677 25678 25806 +f 25678 25807 25806 +f 25678 25679 25808 +f 25678 25808 25807 +f 25679 25680 25808 +f 25680 25809 25808 +f 25680 25681 25810 +f 25680 25810 25809 +f 25681 25682 25810 +f 25682 25811 25810 +f 25682 25683 25812 +f 25682 25812 25811 +f 25683 25684 25812 +f 25684 25813 25812 +f 25684 25685 25814 +f 25684 25814 25813 +f 25685 25686 25814 +f 25686 25815 25814 +f 25686 25687 25816 +f 25686 25816 25815 +f 25687 25688 25816 +f 25688 25817 25816 +f 25688 25689 25818 +f 25688 25818 25817 +f 25689 25690 25818 +f 25690 25819 25818 +f 25690 25691 25820 +f 25690 25820 25819 +f 25691 25692 25820 +f 25692 25821 25820 +f 25692 25693 25822 +f 25692 25822 25821 +f 25693 25694 25822 +f 25694 25823 25822 +f 25694 25695 25824 +f 25694 25824 25823 +f 25695 25696 25824 +f 25696 25825 25824 +f 25696 25697 25826 +f 25696 25826 25825 +f 25697 25698 25826 +f 25698 25827 25826 +f 25698 25699 25828 +f 25698 25828 25827 +f 25699 25700 25828 +f 25700 25829 25828 +f 25700 25701 25830 +f 25700 25830 25829 +f 25701 25702 25830 +f 25702 25831 25830 +f 25702 25703 25832 +f 25702 25832 25831 +f 25703 25704 25832 +f 25704 25833 25832 +f 25704 25705 25834 +f 25704 25834 25833 +f 25705 25706 25834 +f 25706 25835 25834 +f 25706 25707 25836 +f 25706 25836 25835 +f 25707 25708 25836 +f 25708 25837 25836 +f 25708 25709 25838 +f 25708 25838 25837 +f 25709 25710 25838 +f 25710 25839 25838 +f 25710 25711 25840 +f 25710 25840 25839 +f 25711 25712 25840 +f 25712 25841 25840 +f 25712 25713 25842 +f 25712 25842 25841 +f 25713 25714 25842 +f 25714 25843 25842 +f 25714 25715 25844 +f 25714 25844 25843 +f 25715 25716 25844 +f 25716 25845 25844 +f 25716 25717 25846 +f 25716 25846 25845 +f 25717 25718 25846 +f 25718 25847 25846 +f 25718 25719 25848 +f 25718 25848 25847 +f 25719 25720 25848 +f 25720 25849 25848 +f 25720 25721 25850 +f 25720 25850 25849 +f 25721 25722 25850 +f 25722 25851 25850 +f 25722 25723 25852 +f 25722 25852 25851 +f 25723 25724 25852 +f 25724 25853 25852 +f 25724 25725 25854 +f 25724 25854 25853 +f 25725 25726 25854 +f 25726 25855 25854 +f 25726 25727 25856 +f 25726 25856 25855 +f 25727 25728 25856 +f 25728 25857 25856 +f 25728 25729 25858 +f 25728 25858 25857 +f 25729 25730 25858 +f 25730 25859 25858 +f 25730 25731 25860 +f 25730 25860 25859 +f 25731 25732 25860 +f 25732 25861 25860 +f 25732 25733 25862 +f 25732 25862 25861 +f 25733 25734 25862 +f 25734 25863 25862 +f 25734 25735 25864 +f 25734 25864 25863 +f 25735 25736 25864 +f 25736 25865 25864 +f 25736 25737 25866 +f 25736 25866 25865 +f 25737 25738 25866 +f 25738 25867 25866 +f 25738 25739 25868 +f 25738 25868 25867 +f 25739 25740 25868 +f 25740 25869 25868 +f 25740 25741 25870 +f 25740 25870 25869 +f 25741 25742 25870 +f 25742 25871 25870 +f 25742 25743 25872 +f 25742 25872 25871 +f 25743 25744 25872 +f 25744 25873 25872 +f 25744 25745 25874 +f 25744 25874 25873 +f 25745 25746 25874 +f 25746 25875 25874 +f 25746 25747 25876 +f 25746 25876 25875 +f 25747 25748 25876 +f 25748 25877 25876 +f 25748 25749 25878 +f 25748 25878 25877 +f 25749 25750 25878 +f 25750 25879 25878 +f 25750 25751 25880 +f 25750 25880 25879 +f 25751 25752 25880 +f 25752 25881 25880 +f 25753 25754 25882 +f 25754 25883 25882 +f 25754 25755 25884 +f 25754 25884 25883 +f 25755 25756 25884 +f 25756 25885 25884 +f 25756 25757 25886 +f 25756 25886 25885 +f 25757 25758 25886 +f 25758 25887 25886 +f 25758 25759 25888 +f 25758 25888 25887 +f 25759 25760 25888 +f 25760 25889 25888 +f 25760 25761 25890 +f 25760 25890 25889 +f 25761 25762 25890 +f 25762 25891 25890 +f 25762 25763 25892 +f 25762 25892 25891 +f 25763 25764 25892 +f 25764 25893 25892 +f 25764 25765 25894 +f 25764 25894 25893 +f 25765 25766 25894 +f 25766 25895 25894 +f 25766 25767 25896 +f 25766 25896 25895 +f 25767 25768 25896 +f 25768 25897 25896 +f 25768 25769 25898 +f 25768 25898 25897 +f 25769 25770 25898 +f 25770 25899 25898 +f 25770 25771 25900 +f 25770 25900 25899 +f 25771 25772 25900 +f 25772 25901 25900 +f 25772 25773 25902 +f 25772 25902 25901 +f 25773 25774 25902 +f 25774 25903 25902 +f 25774 25775 25904 +f 25774 25904 25903 +f 25775 25776 25904 +f 25776 25905 25904 +f 25776 25777 25906 +f 25776 25906 25905 +f 25777 25778 25906 +f 25778 25907 25906 +f 25778 25779 25908 +f 25778 25908 25907 +f 25779 25780 25908 +f 25780 25909 25908 +f 25780 25781 25910 +f 25780 25910 25909 +f 25781 25782 25910 +f 25782 25911 25910 +f 25782 25783 25912 +f 25782 25912 25911 +f 25783 25784 25912 +f 25784 25913 25912 +f 25784 25785 25914 +f 25784 25914 25913 +f 25785 25786 25914 +f 25786 25915 25914 +f 25786 25787 25916 +f 25786 25916 25915 +f 25787 25788 25916 +f 25788 25917 25916 +f 25788 25789 25918 +f 25788 25918 25917 +f 25789 25790 25918 +f 25790 25919 25918 +f 25790 25791 25920 +f 25790 25920 25919 +f 25791 25792 25920 +f 25792 25921 25920 +f 25792 25793 25922 +f 25792 25922 25921 +f 25793 25794 25922 +f 25794 25923 25922 +f 25794 25795 25924 +f 25794 25924 25923 +f 25795 25796 25924 +f 25796 25925 25924 +f 25796 25797 25926 +f 25796 25926 25925 +f 25797 25798 25926 +f 25798 25927 25926 +f 25798 25799 25928 +f 25798 25928 25927 +f 25799 25800 25928 +f 25800 25929 25928 +f 25800 25801 25930 +f 25800 25930 25929 +f 25801 25802 25930 +f 25802 25931 25930 +f 25802 25803 25932 +f 25802 25932 25931 +f 25803 25804 25932 +f 25804 25933 25932 +f 25804 25805 25934 +f 25804 25934 25933 +f 25805 25806 25934 +f 25806 25935 25934 +f 25806 25807 25936 +f 25806 25936 25935 +f 25807 25808 25936 +f 25808 25937 25936 +f 25808 25809 25938 +f 25808 25938 25937 +f 25809 25810 25938 +f 25810 25939 25938 +f 25810 25811 25940 +f 25810 25940 25939 +f 25811 25812 25940 +f 25812 25941 25940 +f 25812 25813 25942 +f 25812 25942 25941 +f 25813 25814 25942 +f 25814 25943 25942 +f 25814 25815 25944 +f 25814 25944 25943 +f 25815 25816 25944 +f 25816 25945 25944 +f 25816 25817 25946 +f 25816 25946 25945 +f 25817 25818 25946 +f 25818 25947 25946 +f 25818 25819 25948 +f 25818 25948 25947 +f 25819 25820 25948 +f 25820 25949 25948 +f 25820 25821 25950 +f 25820 25950 25949 +f 25821 25822 25950 +f 25822 25951 25950 +f 25822 25823 25952 +f 25822 25952 25951 +f 25823 25824 25952 +f 25824 25953 25952 +f 25824 25825 25954 +f 25824 25954 25953 +f 25825 25826 25954 +f 25826 25955 25954 +f 25826 25827 25956 +f 25826 25956 25955 +f 25827 25828 25956 +f 25828 25957 25956 +f 25828 25829 25958 +f 25828 25958 25957 +f 25829 25830 25958 +f 25830 25959 25958 +f 25830 25831 25960 +f 25830 25960 25959 +f 25831 25832 25960 +f 25832 25961 25960 +f 25832 25833 25962 +f 25832 25962 25961 +f 25833 25834 25962 +f 25834 25963 25962 +f 25834 25835 25964 +f 25834 25964 25963 +f 25835 25836 25964 +f 25836 25965 25964 +f 25836 25837 25966 +f 25836 25966 25965 +f 25837 25838 25966 +f 25838 25967 25966 +f 25838 25839 25968 +f 25838 25968 25967 +f 25839 25840 25968 +f 25840 25969 25968 +f 25840 25841 25970 +f 25840 25970 25969 +f 25841 25842 25970 +f 25842 25971 25970 +f 25842 25843 25972 +f 25842 25972 25971 +f 25843 25844 25972 +f 25844 25973 25972 +f 25844 25845 25974 +f 25844 25974 25973 +f 25845 25846 25974 +f 25846 25975 25974 +f 25846 25847 25976 +f 25846 25976 25975 +f 25847 25848 25976 +f 25848 25977 25976 +f 25848 25849 25978 +f 25848 25978 25977 +f 25849 25850 25978 +f 25850 25979 25978 +f 25850 25851 25980 +f 25850 25980 25979 +f 25851 25852 25980 +f 25852 25981 25980 +f 25852 25853 25982 +f 25852 25982 25981 +f 25853 25854 25982 +f 25854 25983 25982 +f 25854 25855 25984 +f 25854 25984 25983 +f 25855 25856 25984 +f 25856 25985 25984 +f 25856 25857 25986 +f 25856 25986 25985 +f 25857 25858 25986 +f 25858 25987 25986 +f 25858 25859 25988 +f 25858 25988 25987 +f 25859 25860 25988 +f 25860 25989 25988 +f 25860 25861 25990 +f 25860 25990 25989 +f 25861 25862 25990 +f 25862 25991 25990 +f 25862 25863 25992 +f 25862 25992 25991 +f 25863 25864 25992 +f 25864 25993 25992 +f 25864 25865 25994 +f 25864 25994 25993 +f 25865 25866 25994 +f 25866 25995 25994 +f 25866 25867 25996 +f 25866 25996 25995 +f 25867 25868 25996 +f 25868 25997 25996 +f 25868 25869 25998 +f 25868 25998 25997 +f 25869 25870 25998 +f 25870 25999 25998 +f 25870 25871 26000 +f 25870 26000 25999 +f 25871 25872 26000 +f 25872 26001 26000 +f 25872 25873 26002 +f 25872 26002 26001 +f 25873 25874 26002 +f 25874 26003 26002 +f 25874 25875 26004 +f 25874 26004 26003 +f 25875 25876 26004 +f 25876 26005 26004 +f 25876 25877 26006 +f 25876 26006 26005 +f 25877 25878 26006 +f 25878 26007 26006 +f 25878 25879 26008 +f 25878 26008 26007 +f 25879 25880 26008 +f 25880 26009 26008 +f 25880 25881 26010 +f 25880 26010 26009 +f 25882 25883 26012 +f 25882 26012 26011 +f 25883 25884 26012 +f 25884 26013 26012 +f 25884 25885 26014 +f 25884 26014 26013 +f 25885 25886 26014 +f 25886 26015 26014 +f 25886 25887 26016 +f 25886 26016 26015 +f 25887 25888 26016 +f 25888 26017 26016 +f 25888 25889 26018 +f 25888 26018 26017 +f 25889 25890 26018 +f 25890 26019 26018 +f 25890 25891 26020 +f 25890 26020 26019 +f 25891 25892 26020 +f 25892 26021 26020 +f 25892 25893 26022 +f 25892 26022 26021 +f 25893 25894 26022 +f 25894 26023 26022 +f 25894 25895 26024 +f 25894 26024 26023 +f 25895 25896 26024 +f 25896 26025 26024 +f 25896 25897 26026 +f 25896 26026 26025 +f 25897 25898 26026 +f 25898 26027 26026 +f 25898 25899 26028 +f 25898 26028 26027 +f 25899 25900 26028 +f 25900 26029 26028 +f 25900 25901 26030 +f 25900 26030 26029 +f 25901 25902 26030 +f 25902 26031 26030 +f 25902 25903 26032 +f 25902 26032 26031 +f 25903 25904 26032 +f 25904 26033 26032 +f 25904 25905 26034 +f 25904 26034 26033 +f 25905 25906 26034 +f 25906 26035 26034 +f 25906 25907 26036 +f 25906 26036 26035 +f 25907 25908 26036 +f 25908 26037 26036 +f 25908 25909 26038 +f 25908 26038 26037 +f 25909 25910 26038 +f 25910 26039 26038 +f 25910 25911 26040 +f 25910 26040 26039 +f 25911 25912 26040 +f 25912 26041 26040 +f 25912 25913 26042 +f 25912 26042 26041 +f 25913 25914 26042 +f 25914 26043 26042 +f 25914 25915 26044 +f 25914 26044 26043 +f 25915 25916 26044 +f 25916 26045 26044 +f 25916 25917 26046 +f 25916 26046 26045 +f 25917 25918 26046 +f 25918 26047 26046 +f 25918 25919 26048 +f 25918 26048 26047 +f 25919 25920 26048 +f 25920 26049 26048 +f 25920 25921 26050 +f 25920 26050 26049 +f 25921 25922 26050 +f 25922 26051 26050 +f 25922 25923 26052 +f 25922 26052 26051 +f 25923 25924 26052 +f 25924 26053 26052 +f 25924 25925 26054 +f 25924 26054 26053 +f 25925 25926 26054 +f 25926 26055 26054 +f 25926 25927 26056 +f 25926 26056 26055 +f 25927 25928 26056 +f 25928 26057 26056 +f 25928 25929 26058 +f 25928 26058 26057 +f 25929 25930 26058 +f 25930 26059 26058 +f 25930 25931 26060 +f 25930 26060 26059 +f 25931 25932 26060 +f 25932 26061 26060 +f 25932 25933 26062 +f 25932 26062 26061 +f 25933 25934 26062 +f 25934 26063 26062 +f 25934 25935 26064 +f 25934 26064 26063 +f 25935 25936 26064 +f 25936 26065 26064 +f 25936 25937 26066 +f 25936 26066 26065 +f 25937 25938 26066 +f 25938 26067 26066 +f 25938 25939 26068 +f 25938 26068 26067 +f 25939 25940 26068 +f 25940 26069 26068 +f 25940 25941 26070 +f 25940 26070 26069 +f 25941 25942 26070 +f 25942 26071 26070 +f 25942 25943 26072 +f 25942 26072 26071 +f 25943 25944 26072 +f 25944 26073 26072 +f 25944 25945 26074 +f 25944 26074 26073 +f 25945 25946 26074 +f 25946 26075 26074 +f 25946 25947 26076 +f 25946 26076 26075 +f 25947 25948 26076 +f 25948 26077 26076 +f 25948 25949 26078 +f 25948 26078 26077 +f 25949 25950 26078 +f 25950 26079 26078 +f 25950 25951 26080 +f 25950 26080 26079 +f 25951 25952 26080 +f 25952 26081 26080 +f 25952 25953 26082 +f 25952 26082 26081 +f 25953 25954 26082 +f 25954 26083 26082 +f 25954 25955 26084 +f 25954 26084 26083 +f 25955 25956 26084 +f 25956 26085 26084 +f 25956 25957 26086 +f 25956 26086 26085 +f 25957 25958 26086 +f 25958 26087 26086 +f 25958 25959 26088 +f 25958 26088 26087 +f 25959 25960 26088 +f 25960 26089 26088 +f 25960 25961 26090 +f 25960 26090 26089 +f 25961 25962 26090 +f 25962 26091 26090 +f 25962 25963 26092 +f 25962 26092 26091 +f 25963 25964 26092 +f 25964 26093 26092 +f 25964 25965 26094 +f 25964 26094 26093 +f 25965 25966 26094 +f 25966 26095 26094 +f 25966 25967 26096 +f 25966 26096 26095 +f 25967 25968 26096 +f 25968 26097 26096 +f 25968 25969 26098 +f 25968 26098 26097 +f 25969 25970 26098 +f 25970 26099 26098 +f 25970 25971 26100 +f 25970 26100 26099 +f 25971 25972 26100 +f 25972 26101 26100 +f 25972 25973 26102 +f 25972 26102 26101 +f 25973 25974 26102 +f 25974 26103 26102 +f 25974 25975 26104 +f 25974 26104 26103 +f 25975 25976 26104 +f 25976 26105 26104 +f 25976 25977 26106 +f 25976 26106 26105 +f 25977 25978 26106 +f 25978 26107 26106 +f 25978 25979 26108 +f 25978 26108 26107 +f 25979 25980 26108 +f 25980 26109 26108 +f 25980 25981 26110 +f 25980 26110 26109 +f 25981 25982 26110 +f 25982 26111 26110 +f 25982 25983 26112 +f 25982 26112 26111 +f 25983 25984 26112 +f 25984 26113 26112 +f 25984 25985 26114 +f 25984 26114 26113 +f 25985 25986 26114 +f 25986 26115 26114 +f 25986 25987 26116 +f 25986 26116 26115 +f 25987 25988 26116 +f 25988 26117 26116 +f 25988 25989 26118 +f 25988 26118 26117 +f 25989 25990 26118 +f 25990 26119 26118 +f 25990 25991 26120 +f 25990 26120 26119 +f 25991 25992 26120 +f 25992 26121 26120 +f 25992 25993 26122 +f 25992 26122 26121 +f 25993 25994 26122 +f 25994 26123 26122 +f 25994 25995 26124 +f 25994 26124 26123 +f 25995 25996 26124 +f 25996 26125 26124 +f 25996 25997 26126 +f 25996 26126 26125 +f 25997 25998 26126 +f 25998 26127 26126 +f 25998 25999 26128 +f 25998 26128 26127 +f 25999 26000 26128 +f 26000 26129 26128 +f 26000 26001 26130 +f 26000 26130 26129 +f 26001 26002 26130 +f 26002 26131 26130 +f 26002 26003 26132 +f 26002 26132 26131 +f 26003 26004 26132 +f 26004 26133 26132 +f 26004 26005 26134 +f 26004 26134 26133 +f 26005 26006 26134 +f 26006 26135 26134 +f 26006 26007 26136 +f 26006 26136 26135 +f 26007 26008 26136 +f 26008 26137 26136 +f 26008 26009 26138 +f 26008 26138 26137 +f 26009 26010 26138 +f 26010 26139 26138 +f 26011 26012 26140 +f 26012 26141 26140 +f 26012 26013 26142 +f 26012 26142 26141 +f 26013 26014 26142 +f 26014 26143 26142 +f 26014 26015 26144 +f 26014 26144 26143 +f 26015 26016 26144 +f 26016 26145 26144 +f 26016 26017 26146 +f 26016 26146 26145 +f 26017 26018 26146 +f 26018 26147 26146 +f 26018 26019 26148 +f 26018 26148 26147 +f 26019 26020 26148 +f 26020 26149 26148 +f 26020 26021 26150 +f 26020 26150 26149 +f 26021 26022 26150 +f 26022 26151 26150 +f 26022 26023 26152 +f 26022 26152 26151 +f 26023 26024 26152 +f 26024 26153 26152 +f 26024 26025 26154 +f 26024 26154 26153 +f 26025 26026 26154 +f 26026 26155 26154 +f 26026 26027 26156 +f 26026 26156 26155 +f 26027 26028 26156 +f 26028 26157 26156 +f 26028 26029 26158 +f 26028 26158 26157 +f 26029 26030 26158 +f 26030 26159 26158 +f 26030 26031 26160 +f 26030 26160 26159 +f 26031 26032 26160 +f 26032 26161 26160 +f 26032 26033 26162 +f 26032 26162 26161 +f 26033 26034 26162 +f 26034 26163 26162 +f 26034 26035 26164 +f 26034 26164 26163 +f 26035 26036 26164 +f 26036 26165 26164 +f 26036 26037 26166 +f 26036 26166 26165 +f 26037 26038 26166 +f 26038 26167 26166 +f 26038 26039 26168 +f 26038 26168 26167 +f 26039 26040 26168 +f 26040 26169 26168 +f 26040 26041 26170 +f 26040 26170 26169 +f 26041 26042 26170 +f 26042 26171 26170 +f 26042 26043 26172 +f 26042 26172 26171 +f 26043 26044 26172 +f 26044 26173 26172 +f 26044 26045 26174 +f 26044 26174 26173 +f 26045 26046 26174 +f 26046 26175 26174 +f 26046 26047 26176 +f 26046 26176 26175 +f 26047 26048 26176 +f 26048 26177 26176 +f 26048 26049 26178 +f 26048 26178 26177 +f 26049 26050 26178 +f 26050 26179 26178 +f 26050 26051 26180 +f 26050 26180 26179 +f 26051 26052 26180 +f 26052 26181 26180 +f 26052 26053 26182 +f 26052 26182 26181 +f 26053 26054 26182 +f 26054 26183 26182 +f 26054 26055 26184 +f 26054 26184 26183 +f 26055 26056 26184 +f 26056 26185 26184 +f 26056 26057 26186 +f 26056 26186 26185 +f 26057 26058 26186 +f 26058 26187 26186 +f 26058 26059 26188 +f 26058 26188 26187 +f 26059 26060 26188 +f 26060 26189 26188 +f 26060 26061 26190 +f 26060 26190 26189 +f 26061 26062 26190 +f 26062 26191 26190 +f 26062 26063 26192 +f 26062 26192 26191 +f 26063 26064 26192 +f 26064 26193 26192 +f 26064 26065 26194 +f 26064 26194 26193 +f 26065 26066 26194 +f 26066 26195 26194 +f 26066 26067 26196 +f 26066 26196 26195 +f 26067 26068 26196 +f 26068 26197 26196 +f 26068 26069 26198 +f 26068 26198 26197 +f 26069 26070 26198 +f 26070 26199 26198 +f 26070 26071 26200 +f 26070 26200 26199 +f 26071 26072 26200 +f 26072 26201 26200 +f 26072 26073 26202 +f 26072 26202 26201 +f 26073 26074 26202 +f 26074 26203 26202 +f 26074 26075 26204 +f 26074 26204 26203 +f 26075 26076 26204 +f 26076 26205 26204 +f 26076 26077 26206 +f 26076 26206 26205 +f 26077 26078 26206 +f 26078 26207 26206 +f 26078 26079 26208 +f 26078 26208 26207 +f 26079 26080 26208 +f 26080 26209 26208 +f 26080 26081 26210 +f 26080 26210 26209 +f 26081 26082 26210 +f 26082 26211 26210 +f 26082 26083 26212 +f 26082 26212 26211 +f 26083 26084 26212 +f 26084 26213 26212 +f 26084 26085 26214 +f 26084 26214 26213 +f 26085 26086 26214 +f 26086 26215 26214 +f 26086 26087 26216 +f 26086 26216 26215 +f 26087 26088 26216 +f 26088 26217 26216 +f 26088 26089 26218 +f 26088 26218 26217 +f 26089 26090 26218 +f 26090 26219 26218 +f 26090 26091 26220 +f 26090 26220 26219 +f 26091 26092 26220 +f 26092 26221 26220 +f 26092 26093 26222 +f 26092 26222 26221 +f 26093 26094 26222 +f 26094 26223 26222 +f 26094 26095 26224 +f 26094 26224 26223 +f 26095 26096 26224 +f 26096 26225 26224 +f 26096 26097 26226 +f 26096 26226 26225 +f 26097 26098 26226 +f 26098 26227 26226 +f 26098 26099 26228 +f 26098 26228 26227 +f 26099 26100 26228 +f 26100 26229 26228 +f 26100 26101 26230 +f 26100 26230 26229 +f 26101 26102 26230 +f 26102 26231 26230 +f 26102 26103 26232 +f 26102 26232 26231 +f 26103 26104 26232 +f 26104 26233 26232 +f 26104 26105 26234 +f 26104 26234 26233 +f 26105 26106 26234 +f 26106 26235 26234 +f 26106 26107 26236 +f 26106 26236 26235 +f 26107 26108 26236 +f 26108 26237 26236 +f 26108 26109 26238 +f 26108 26238 26237 +f 26109 26110 26238 +f 26110 26239 26238 +f 26110 26111 26240 +f 26110 26240 26239 +f 26111 26112 26240 +f 26112 26241 26240 +f 26112 26113 26242 +f 26112 26242 26241 +f 26113 26114 26242 +f 26114 26243 26242 +f 26114 26115 26244 +f 26114 26244 26243 +f 26115 26116 26244 +f 26116 26245 26244 +f 26116 26117 26246 +f 26116 26246 26245 +f 26117 26118 26246 +f 26118 26247 26246 +f 26118 26119 26248 +f 26118 26248 26247 +f 26119 26120 26248 +f 26120 26249 26248 +f 26120 26121 26250 +f 26120 26250 26249 +f 26121 26122 26250 +f 26122 26251 26250 +f 26122 26123 26252 +f 26122 26252 26251 +f 26123 26124 26252 +f 26124 26253 26252 +f 26124 26125 26254 +f 26124 26254 26253 +f 26125 26126 26254 +f 26126 26255 26254 +f 26126 26127 26256 +f 26126 26256 26255 +f 26127 26128 26256 +f 26128 26257 26256 +f 26128 26129 26258 +f 26128 26258 26257 +f 26129 26130 26258 +f 26130 26259 26258 +f 26130 26131 26260 +f 26130 26260 26259 +f 26131 26132 26260 +f 26132 26261 26260 +f 26132 26133 26262 +f 26132 26262 26261 +f 26133 26134 26262 +f 26134 26263 26262 +f 26134 26135 26264 +f 26134 26264 26263 +f 26135 26136 26264 +f 26136 26265 26264 +f 26136 26137 26266 +f 26136 26266 26265 +f 26137 26138 26266 +f 26138 26267 26266 +f 26138 26139 26268 +f 26138 26268 26267 +f 26140 26141 26270 +f 26140 26270 26269 +f 26141 26142 26270 +f 26142 26271 26270 +f 26142 26143 26272 +f 26142 26272 26271 +f 26143 26144 26272 +f 26144 26273 26272 +f 26144 26145 26274 +f 26144 26274 26273 +f 26145 26146 26274 +f 26146 26275 26274 +f 26146 26147 26276 +f 26146 26276 26275 +f 26147 26148 26276 +f 26148 26277 26276 +f 26148 26149 26278 +f 26148 26278 26277 +f 26149 26150 26278 +f 26150 26279 26278 +f 26150 26151 26280 +f 26150 26280 26279 +f 26151 26152 26280 +f 26152 26281 26280 +f 26152 26153 26282 +f 26152 26282 26281 +f 26153 26154 26282 +f 26154 26283 26282 +f 26154 26155 26284 +f 26154 26284 26283 +f 26155 26156 26284 +f 26156 26285 26284 +f 26156 26157 26286 +f 26156 26286 26285 +f 26157 26158 26286 +f 26158 26287 26286 +f 26158 26159 26288 +f 26158 26288 26287 +f 26159 26160 26288 +f 26160 26289 26288 +f 26160 26161 26290 +f 26160 26290 26289 +f 26161 26162 26290 +f 26162 26291 26290 +f 26162 26163 26292 +f 26162 26292 26291 +f 26163 26164 26292 +f 26164 26293 26292 +f 26164 26165 26294 +f 26164 26294 26293 +f 26165 26166 26294 +f 26166 26295 26294 +f 26166 26167 26296 +f 26166 26296 26295 +f 26167 26168 26296 +f 26168 26297 26296 +f 26168 26169 26298 +f 26168 26298 26297 +f 26169 26170 26298 +f 26170 26299 26298 +f 26170 26171 26300 +f 26170 26300 26299 +f 26171 26172 26300 +f 26172 26301 26300 +f 26172 26173 26302 +f 26172 26302 26301 +f 26173 26174 26302 +f 26174 26303 26302 +f 26174 26175 26304 +f 26174 26304 26303 +f 26175 26176 26304 +f 26176 26305 26304 +f 26176 26177 26306 +f 26176 26306 26305 +f 26177 26178 26306 +f 26178 26307 26306 +f 26178 26179 26308 +f 26178 26308 26307 +f 26179 26180 26308 +f 26180 26309 26308 +f 26180 26181 26310 +f 26180 26310 26309 +f 26181 26182 26310 +f 26182 26311 26310 +f 26182 26183 26312 +f 26182 26312 26311 +f 26183 26184 26312 +f 26184 26313 26312 +f 26184 26185 26314 +f 26184 26314 26313 +f 26185 26186 26314 +f 26186 26315 26314 +f 26186 26187 26316 +f 26186 26316 26315 +f 26187 26188 26316 +f 26188 26317 26316 +f 26188 26189 26318 +f 26188 26318 26317 +f 26189 26190 26318 +f 26190 26319 26318 +f 26190 26191 26320 +f 26190 26320 26319 +f 26191 26192 26320 +f 26192 26321 26320 +f 26192 26193 26322 +f 26192 26322 26321 +f 26193 26194 26322 +f 26194 26323 26322 +f 26194 26195 26324 +f 26194 26324 26323 +f 26195 26196 26324 +f 26196 26325 26324 +f 26196 26197 26326 +f 26196 26326 26325 +f 26197 26198 26326 +f 26198 26327 26326 +f 26198 26199 26328 +f 26198 26328 26327 +f 26199 26200 26328 +f 26200 26329 26328 +f 26200 26201 26330 +f 26200 26330 26329 +f 26201 26202 26330 +f 26202 26331 26330 +f 26202 26203 26332 +f 26202 26332 26331 +f 26203 26204 26332 +f 26204 26333 26332 +f 26204 26205 26334 +f 26204 26334 26333 +f 26205 26206 26334 +f 26206 26335 26334 +f 26206 26207 26336 +f 26206 26336 26335 +f 26207 26208 26336 +f 26208 26337 26336 +f 26208 26209 26338 +f 26208 26338 26337 +f 26209 26210 26338 +f 26210 26339 26338 +f 26210 26211 26340 +f 26210 26340 26339 +f 26211 26212 26340 +f 26212 26341 26340 +f 26212 26213 26342 +f 26212 26342 26341 +f 26213 26214 26342 +f 26214 26343 26342 +f 26214 26215 26344 +f 26214 26344 26343 +f 26215 26216 26344 +f 26216 26345 26344 +f 26216 26217 26346 +f 26216 26346 26345 +f 26217 26218 26346 +f 26218 26347 26346 +f 26218 26219 26348 +f 26218 26348 26347 +f 26219 26220 26348 +f 26220 26349 26348 +f 26220 26221 26350 +f 26220 26350 26349 +f 26221 26222 26350 +f 26222 26351 26350 +f 26222 26223 26352 +f 26222 26352 26351 +f 26223 26224 26352 +f 26224 26353 26352 +f 26224 26225 26354 +f 26224 26354 26353 +f 26225 26226 26354 +f 26226 26355 26354 +f 26226 26227 26356 +f 26226 26356 26355 +f 26227 26228 26356 +f 26228 26357 26356 +f 26228 26229 26358 +f 26228 26358 26357 +f 26229 26230 26358 +f 26230 26359 26358 +f 26230 26231 26360 +f 26230 26360 26359 +f 26231 26232 26360 +f 26232 26361 26360 +f 26232 26233 26362 +f 26232 26362 26361 +f 26233 26234 26362 +f 26234 26363 26362 +f 26234 26235 26364 +f 26234 26364 26363 +f 26235 26236 26364 +f 26236 26365 26364 +f 26236 26237 26366 +f 26236 26366 26365 +f 26237 26238 26366 +f 26238 26367 26366 +f 26238 26239 26368 +f 26238 26368 26367 +f 26239 26240 26368 +f 26240 26369 26368 +f 26240 26241 26370 +f 26240 26370 26369 +f 26241 26242 26370 +f 26242 26371 26370 +f 26242 26243 26372 +f 26242 26372 26371 +f 26243 26244 26372 +f 26244 26373 26372 +f 26244 26245 26374 +f 26244 26374 26373 +f 26245 26246 26374 +f 26246 26375 26374 +f 26246 26247 26376 +f 26246 26376 26375 +f 26247 26248 26376 +f 26248 26377 26376 +f 26248 26249 26378 +f 26248 26378 26377 +f 26249 26250 26378 +f 26250 26379 26378 +f 26250 26251 26380 +f 26250 26380 26379 +f 26251 26252 26380 +f 26252 26381 26380 +f 26252 26253 26382 +f 26252 26382 26381 +f 26253 26254 26382 +f 26254 26383 26382 +f 26254 26255 26384 +f 26254 26384 26383 +f 26255 26256 26384 +f 26256 26385 26384 +f 26256 26257 26386 +f 26256 26386 26385 +f 26257 26258 26386 +f 26258 26387 26386 +f 26258 26259 26388 +f 26258 26388 26387 +f 26259 26260 26388 +f 26260 26389 26388 +f 26260 26261 26390 +f 26260 26390 26389 +f 26261 26262 26390 +f 26262 26391 26390 +f 26262 26263 26392 +f 26262 26392 26391 +f 26263 26264 26392 +f 26264 26393 26392 +f 26264 26265 26394 +f 26264 26394 26393 +f 26265 26266 26394 +f 26266 26395 26394 +f 26266 26267 26396 +f 26266 26396 26395 +f 26267 26268 26396 +f 26268 26397 26396 +f 26269 26270 26398 +f 26270 26399 26398 +f 26270 26271 26400 +f 26270 26400 26399 +f 26271 26272 26400 +f 26272 26401 26400 +f 26272 26273 26402 +f 26272 26402 26401 +f 26273 26274 26402 +f 26274 26403 26402 +f 26274 26275 26404 +f 26274 26404 26403 +f 26275 26276 26404 +f 26276 26405 26404 +f 26276 26277 26406 +f 26276 26406 26405 +f 26277 26278 26406 +f 26278 26407 26406 +f 26278 26279 26408 +f 26278 26408 26407 +f 26279 26280 26408 +f 26280 26409 26408 +f 26280 26281 26410 +f 26280 26410 26409 +f 26281 26282 26410 +f 26282 26411 26410 +f 26282 26283 26412 +f 26282 26412 26411 +f 26283 26284 26412 +f 26284 26413 26412 +f 26284 26285 26414 +f 26284 26414 26413 +f 26285 26286 26414 +f 26286 26415 26414 +f 26286 26287 26416 +f 26286 26416 26415 +f 26287 26288 26416 +f 26288 26417 26416 +f 26288 26289 26418 +f 26288 26418 26417 +f 26289 26290 26418 +f 26290 26419 26418 +f 26290 26291 26420 +f 26290 26420 26419 +f 26291 26292 26420 +f 26292 26421 26420 +f 26292 26293 26422 +f 26292 26422 26421 +f 26293 26294 26422 +f 26294 26423 26422 +f 26294 26295 26424 +f 26294 26424 26423 +f 26295 26296 26424 +f 26296 26425 26424 +f 26296 26297 26426 +f 26296 26426 26425 +f 26297 26298 26426 +f 26298 26427 26426 +f 26298 26299 26428 +f 26298 26428 26427 +f 26299 26300 26428 +f 26300 26429 26428 +f 26300 26301 26430 +f 26300 26430 26429 +f 26301 26302 26430 +f 26302 26431 26430 +f 26302 26303 26432 +f 26302 26432 26431 +f 26303 26304 26432 +f 26304 26433 26432 +f 26304 26305 26434 +f 26304 26434 26433 +f 26305 26306 26434 +f 26306 26435 26434 +f 26306 26307 26436 +f 26306 26436 26435 +f 26307 26308 26436 +f 26308 26437 26436 +f 26308 26309 26438 +f 26308 26438 26437 +f 26309 26310 26438 +f 26310 26439 26438 +f 26310 26311 26440 +f 26310 26440 26439 +f 26311 26312 26440 +f 26312 26441 26440 +f 26312 26313 26442 +f 26312 26442 26441 +f 26313 26314 26442 +f 26314 26443 26442 +f 26314 26315 26444 +f 26314 26444 26443 +f 26315 26316 26444 +f 26316 26445 26444 +f 26316 26317 26446 +f 26316 26446 26445 +f 26317 26318 26446 +f 26318 26447 26446 +f 26318 26319 26448 +f 26318 26448 26447 +f 26319 26320 26448 +f 26320 26449 26448 +f 26320 26321 26450 +f 26320 26450 26449 +f 26321 26322 26450 +f 26322 26451 26450 +f 26322 26323 26452 +f 26322 26452 26451 +f 26323 26324 26452 +f 26324 26453 26452 +f 26324 26325 26454 +f 26324 26454 26453 +f 26325 26326 26454 +f 26326 26455 26454 +f 26326 26327 26456 +f 26326 26456 26455 +f 26327 26328 26456 +f 26328 26457 26456 +f 26328 26329 26458 +f 26328 26458 26457 +f 26329 26330 26458 +f 26330 26459 26458 +f 26330 26331 26460 +f 26330 26460 26459 +f 26331 26332 26460 +f 26332 26461 26460 +f 26332 26333 26462 +f 26332 26462 26461 +f 26333 26334 26462 +f 26334 26463 26462 +f 26334 26335 26464 +f 26334 26464 26463 +f 26335 26336 26464 +f 26336 26465 26464 +f 26336 26337 26466 +f 26336 26466 26465 +f 26337 26338 26466 +f 26338 26467 26466 +f 26338 26339 26468 +f 26338 26468 26467 +f 26339 26340 26468 +f 26340 26469 26468 +f 26340 26341 26470 +f 26340 26470 26469 +f 26341 26342 26470 +f 26342 26471 26470 +f 26342 26343 26472 +f 26342 26472 26471 +f 26343 26344 26472 +f 26344 26473 26472 +f 26344 26345 26474 +f 26344 26474 26473 +f 26345 26346 26474 +f 26346 26475 26474 +f 26346 26347 26476 +f 26346 26476 26475 +f 26347 26348 26476 +f 26348 26477 26476 +f 26348 26349 26478 +f 26348 26478 26477 +f 26349 26350 26478 +f 26350 26479 26478 +f 26350 26351 26480 +f 26350 26480 26479 +f 26351 26352 26480 +f 26352 26481 26480 +f 26352 26353 26482 +f 26352 26482 26481 +f 26353 26354 26482 +f 26354 26483 26482 +f 26354 26355 26484 +f 26354 26484 26483 +f 26355 26356 26484 +f 26356 26485 26484 +f 26356 26357 26486 +f 26356 26486 26485 +f 26357 26358 26486 +f 26358 26487 26486 +f 26358 26359 26488 +f 26358 26488 26487 +f 26359 26360 26488 +f 26360 26489 26488 +f 26360 26361 26490 +f 26360 26490 26489 +f 26361 26362 26490 +f 26362 26491 26490 +f 26362 26363 26492 +f 26362 26492 26491 +f 26363 26364 26492 +f 26364 26493 26492 +f 26364 26365 26494 +f 26364 26494 26493 +f 26365 26366 26494 +f 26366 26495 26494 +f 26366 26367 26496 +f 26366 26496 26495 +f 26367 26368 26496 +f 26368 26497 26496 +f 26368 26369 26498 +f 26368 26498 26497 +f 26369 26370 26498 +f 26370 26499 26498 +f 26370 26371 26500 +f 26370 26500 26499 +f 26371 26372 26500 +f 26372 26501 26500 +f 26372 26373 26502 +f 26372 26502 26501 +f 26373 26374 26502 +f 26374 26503 26502 +f 26374 26375 26504 +f 26374 26504 26503 +f 26375 26376 26504 +f 26376 26505 26504 +f 26376 26377 26506 +f 26376 26506 26505 +f 26377 26378 26506 +f 26378 26507 26506 +f 26378 26379 26508 +f 26378 26508 26507 +f 26379 26380 26508 +f 26380 26509 26508 +f 26380 26381 26510 +f 26380 26510 26509 +f 26381 26382 26510 +f 26382 26511 26510 +f 26382 26383 26512 +f 26382 26512 26511 +f 26383 26384 26512 +f 26384 26513 26512 +f 26384 26385 26514 +f 26384 26514 26513 +f 26385 26386 26514 +f 26386 26515 26514 +f 26386 26387 26516 +f 26386 26516 26515 +f 26387 26388 26516 +f 26388 26517 26516 +f 26388 26389 26518 +f 26388 26518 26517 +f 26389 26390 26518 +f 26390 26519 26518 +f 26390 26391 26520 +f 26390 26520 26519 +f 26391 26392 26520 +f 26392 26521 26520 +f 26392 26393 26522 +f 26392 26522 26521 +f 26393 26394 26522 +f 26394 26523 26522 +f 26394 26395 26524 +f 26394 26524 26523 +f 26395 26396 26524 +f 26396 26525 26524 +f 26396 26397 26526 +f 26396 26526 26525 +f 26400 26401 26527 +f 26401 26402 26527 +f 26402 26528 26527 +f 26402 26403 26529 +f 26402 26529 26528 +f 26403 26404 26529 +f 26404 26530 26529 +f 26404 26405 26531 +f 26404 26531 26530 +f 26405 26406 26531 +f 26406 26532 26531 +f 26406 26407 26533 +f 26406 26533 26532 +f 26407 26408 26533 +f 26408 26534 26533 +f 26408 26409 26535 +f 26408 26535 26534 +f 26409 26410 26535 +f 26410 26536 26535 +f 26410 26411 26537 +f 26410 26537 26536 +f 26411 26412 26537 +f 26412 26538 26537 +f 26412 26413 26539 +f 26412 26539 26538 +f 26413 26414 26539 +f 26414 26540 26539 +f 26414 26415 26541 +f 26414 26541 26540 +f 26415 26416 26541 +f 26416 26542 26541 +f 26416 26417 26543 +f 26416 26543 26542 +f 26417 26418 26543 +f 26418 26544 26543 +f 26418 26419 26545 +f 26418 26545 26544 +f 26419 26420 26545 +f 26420 26546 26545 +f 26420 26421 26547 +f 26420 26547 26546 +f 26421 26422 26547 +f 26422 26548 26547 +f 26422 26423 26549 +f 26422 26549 26548 +f 26423 26424 26549 +f 26424 26550 26549 +f 26424 26425 26551 +f 26424 26551 26550 +f 26425 26426 26551 +f 26426 26552 26551 +f 26426 26427 26553 +f 26426 26553 26552 +f 26427 26428 26553 +f 26428 26554 26553 +f 26428 26429 26555 +f 26428 26555 26554 +f 26429 26430 26555 +f 26430 26556 26555 +f 26430 26431 26557 +f 26430 26557 26556 +f 26431 26432 26557 +f 26432 26558 26557 +f 26432 26433 26559 +f 26432 26559 26558 +f 26433 26434 26559 +f 26434 26560 26559 +f 26434 26435 26561 +f 26434 26561 26560 +f 26435 26436 26561 +f 26436 26562 26561 +f 26436 26437 26563 +f 26436 26563 26562 +f 26437 26438 26563 +f 26438 26564 26563 +f 26438 26439 26565 +f 26438 26565 26564 +f 26439 26440 26565 +f 26440 26566 26565 +f 26440 26441 26567 +f 26440 26567 26566 +f 26441 26442 26567 +f 26442 26568 26567 +f 26442 26443 26569 +f 26442 26569 26568 +f 26443 26444 26569 +f 26444 26570 26569 +f 26444 26445 26571 +f 26444 26571 26570 +f 26445 26446 26571 +f 26446 26572 26571 +f 26446 26447 26573 +f 26446 26573 26572 +f 26447 26448 26573 +f 26448 26574 26573 +f 26448 26449 26575 +f 26448 26575 26574 +f 26449 26450 26575 +f 26450 26576 26575 +f 26450 26451 26577 +f 26450 26577 26576 +f 26451 26452 26577 +f 26452 26578 26577 +f 26452 26453 26579 +f 26452 26579 26578 +f 26453 26454 26579 +f 26454 26580 26579 +f 26454 26455 26581 +f 26454 26581 26580 +f 26455 26456 26581 +f 26456 26582 26581 +f 26456 26457 26583 +f 26456 26583 26582 +f 26457 26458 26583 +f 26458 26584 26583 +f 26458 26459 26585 +f 26458 26585 26584 +f 26459 26460 26585 +f 26460 26586 26585 +f 26460 26461 26587 +f 26460 26587 26586 +f 26461 26462 26587 +f 26462 26588 26587 +f 26462 26463 26589 +f 26462 26589 26588 +f 26463 26464 26589 +f 26464 26590 26589 +f 26464 26465 26591 +f 26464 26591 26590 +f 26465 26466 26591 +f 26466 26592 26591 +f 26466 26467 26593 +f 26466 26593 26592 +f 26467 26468 26593 +f 26468 26594 26593 +f 26468 26469 26595 +f 26468 26595 26594 +f 26469 26470 26595 +f 26470 26596 26595 +f 26470 26471 26597 +f 26470 26597 26596 +f 26471 26472 26597 +f 26472 26598 26597 +f 26472 26473 26599 +f 26472 26599 26598 +f 26473 26474 26599 +f 26474 26600 26599 +f 26474 26475 26601 +f 26474 26601 26600 +f 26475 26476 26601 +f 26476 26602 26601 +f 26476 26477 26603 +f 26476 26603 26602 +f 26477 26478 26603 +f 26478 26604 26603 +f 26478 26479 26605 +f 26478 26605 26604 +f 26479 26480 26605 +f 26480 26606 26605 +f 26480 26481 26607 +f 26480 26607 26606 +f 26481 26482 26607 +f 26482 26608 26607 +f 26482 26483 26609 +f 26482 26609 26608 +f 26483 26484 26609 +f 26484 26610 26609 +f 26484 26485 26611 +f 26484 26611 26610 +f 26485 26486 26611 +f 26486 26612 26611 +f 26486 26487 26613 +f 26486 26613 26612 +f 26487 26488 26613 +f 26488 26614 26613 +f 26488 26489 26615 +f 26488 26615 26614 +f 26489 26490 26615 +f 26490 26616 26615 +f 26490 26491 26617 +f 26490 26617 26616 +f 26491 26492 26617 +f 26492 26618 26617 +f 26492 26493 26619 +f 26492 26619 26618 +f 26493 26494 26619 +f 26494 26620 26619 +f 26494 26495 26621 +f 26494 26621 26620 +f 26495 26496 26621 +f 26496 26622 26621 +f 26496 26497 26623 +f 26496 26623 26622 +f 26497 26498 26623 +f 26498 26624 26623 +f 26498 26499 26625 +f 26498 26625 26624 +f 26499 26500 26625 +f 26500 26626 26625 +f 26500 26501 26627 +f 26500 26627 26626 +f 26501 26502 26627 +f 26502 26628 26627 +f 26502 26503 26629 +f 26502 26629 26628 +f 26503 26504 26629 +f 26504 26630 26629 +f 26504 26505 26631 +f 26504 26631 26630 +f 26505 26506 26631 +f 26506 26632 26631 +f 26506 26507 26633 +f 26506 26633 26632 +f 26507 26508 26633 +f 26508 26634 26633 +f 26508 26509 26635 +f 26508 26635 26634 +f 26509 26510 26635 +f 26510 26636 26635 +f 26510 26511 26637 +f 26510 26637 26636 +f 26511 26512 26637 +f 26512 26638 26637 +f 26512 26513 26639 +f 26512 26639 26638 +f 26513 26514 26639 +f 26514 26640 26639 +f 26514 26515 26641 +f 26514 26641 26640 +f 26515 26516 26641 +f 26516 26642 26641 +f 26516 26517 26643 +f 26516 26643 26642 +f 26517 26518 26643 +f 26518 26644 26643 +f 26518 26519 26645 +f 26518 26645 26644 +f 26519 26520 26645 +f 26520 26646 26645 +f 26520 26521 26647 +f 26520 26647 26646 +f 26521 26522 26647 +f 26522 26648 26647 +f 26522 26523 26649 +f 26522 26649 26648 +f 26523 26524 26649 +f 26524 26650 26649 +f 26524 26525 26651 +f 26524 26651 26650 +f 26525 26526 26651 +f 26526 26652 26651 +f 26527 26528 26653 +f 26528 26529 26653 +f 26529 26654 26653 +f 26529 26530 26655 +f 26529 26655 26654 +f 26530 26531 26655 +f 26531 26656 26655 +f 26531 26532 26657 +f 26531 26657 26656 +f 26532 26533 26657 +f 26533 26658 26657 +f 26533 26534 26659 +f 26533 26659 26658 +f 26534 26535 26659 +f 26535 26660 26659 +f 26535 26536 26661 +f 26535 26661 26660 +f 26536 26537 26661 +f 26537 26662 26661 +f 26537 26538 26663 +f 26537 26663 26662 +f 26538 26539 26663 +f 26539 26664 26663 +f 26539 26540 26665 +f 26539 26665 26664 +f 26540 26541 26665 +f 26541 26666 26665 +f 26541 26542 26667 +f 26541 26667 26666 +f 26542 26543 26667 +f 26543 26668 26667 +f 26543 26544 26669 +f 26543 26669 26668 +f 26544 26545 26669 +f 26545 26670 26669 +f 26545 26546 26671 +f 26545 26671 26670 +f 26546 26547 26671 +f 26547 26672 26671 +f 26547 26548 26673 +f 26547 26673 26672 +f 26548 26549 26673 +f 26549 26674 26673 +f 26549 26550 26675 +f 26549 26675 26674 +f 26550 26551 26675 +f 26551 26676 26675 +f 26551 26552 26677 +f 26551 26677 26676 +f 26552 26553 26677 +f 26553 26678 26677 +f 26553 26554 26679 +f 26553 26679 26678 +f 26554 26555 26679 +f 26555 26680 26679 +f 26555 26556 26681 +f 26555 26681 26680 +f 26556 26557 26681 +f 26557 26682 26681 +f 26557 26558 26683 +f 26557 26683 26682 +f 26558 26559 26683 +f 26559 26684 26683 +f 26559 26560 26685 +f 26559 26685 26684 +f 26560 26561 26685 +f 26561 26686 26685 +f 26561 26562 26687 +f 26561 26687 26686 +f 26562 26563 26687 +f 26563 26688 26687 +f 26563 26564 26689 +f 26563 26689 26688 +f 26564 26565 26689 +f 26565 26690 26689 +f 26565 26566 26691 +f 26565 26691 26690 +f 26566 26567 26691 +f 26567 26692 26691 +f 26567 26568 26693 +f 26567 26693 26692 +f 26568 26569 26693 +f 26569 26694 26693 +f 26569 26570 26695 +f 26569 26695 26694 +f 26570 26571 26695 +f 26571 26696 26695 +f 26571 26572 26697 +f 26571 26697 26696 +f 26572 26573 26697 +f 26573 26698 26697 +f 26573 26574 26699 +f 26573 26699 26698 +f 26574 26575 26699 +f 26575 26700 26699 +f 26575 26576 26701 +f 26575 26701 26700 +f 26576 26577 26701 +f 26577 26702 26701 +f 26577 26578 26703 +f 26577 26703 26702 +f 26578 26579 26703 +f 26579 26704 26703 +f 26579 26580 26705 +f 26579 26705 26704 +f 26580 26581 26705 +f 26581 26706 26705 +f 26581 26582 26707 +f 26581 26707 26706 +f 26582 26583 26707 +f 26583 26708 26707 +f 26583 26584 26709 +f 26583 26709 26708 +f 26584 26585 26709 +f 26585 26710 26709 +f 26585 26586 26711 +f 26585 26711 26710 +f 26586 26587 26711 +f 26587 26712 26711 +f 26587 26588 26713 +f 26587 26713 26712 +f 26588 26589 26713 +f 26589 26714 26713 +f 26589 26590 26715 +f 26589 26715 26714 +f 26590 26591 26715 +f 26591 26716 26715 +f 26591 26592 26717 +f 26591 26717 26716 +f 26592 26593 26717 +f 26593 26718 26717 +f 26593 26594 26719 +f 26593 26719 26718 +f 26594 26595 26719 +f 26595 26720 26719 +f 26595 26596 26721 +f 26595 26721 26720 +f 26596 26597 26721 +f 26597 26722 26721 +f 26597 26598 26723 +f 26597 26723 26722 +f 26598 26599 26723 +f 26599 26724 26723 +f 26599 26600 26725 +f 26599 26725 26724 +f 26600 26601 26725 +f 26601 26726 26725 +f 26601 26602 26727 +f 26601 26727 26726 +f 26602 26603 26727 +f 26603 26728 26727 +f 26603 26604 26729 +f 26603 26729 26728 +f 26604 26605 26729 +f 26605 26730 26729 +f 26605 26606 26731 +f 26605 26731 26730 +f 26606 26607 26731 +f 26607 26732 26731 +f 26607 26608 26733 +f 26607 26733 26732 +f 26608 26609 26733 +f 26609 26734 26733 +f 26609 26610 26735 +f 26609 26735 26734 +f 26610 26611 26735 +f 26611 26736 26735 +f 26611 26612 26737 +f 26611 26737 26736 +f 26612 26613 26737 +f 26613 26738 26737 +f 26613 26614 26739 +f 26613 26739 26738 +f 26614 26615 26739 +f 26615 26740 26739 +f 26615 26616 26741 +f 26615 26741 26740 +f 26616 26617 26741 +f 26617 26742 26741 +f 26617 26618 26743 +f 26617 26743 26742 +f 26618 26619 26743 +f 26619 26744 26743 +f 26619 26620 26745 +f 26619 26745 26744 +f 26620 26621 26745 +f 26621 26746 26745 +f 26621 26622 26747 +f 26621 26747 26746 +f 26622 26623 26747 +f 26623 26748 26747 +f 26623 26624 26749 +f 26623 26749 26748 +f 26624 26625 26749 +f 26625 26750 26749 +f 26625 26626 26751 +f 26625 26751 26750 +f 26626 26627 26751 +f 26627 26752 26751 +f 26627 26628 26753 +f 26627 26753 26752 +f 26628 26629 26753 +f 26629 26754 26753 +f 26629 26630 26755 +f 26629 26755 26754 +f 26630 26631 26755 +f 26631 26756 26755 +f 26631 26632 26757 +f 26631 26757 26756 +f 26632 26633 26757 +f 26633 26758 26757 +f 26633 26634 26759 +f 26633 26759 26758 +f 26634 26635 26759 +f 26635 26760 26759 +f 26635 26636 26761 +f 26635 26761 26760 +f 26636 26637 26761 +f 26637 26762 26761 +f 26637 26638 26763 +f 26637 26763 26762 +f 26638 26639 26763 +f 26639 26764 26763 +f 26639 26640 26765 +f 26639 26765 26764 +f 26640 26641 26765 +f 26641 26766 26765 +f 26641 26642 26767 +f 26641 26767 26766 +f 26642 26643 26767 +f 26643 26768 26767 +f 26643 26644 26769 +f 26643 26769 26768 +f 26644 26645 26769 +f 26645 26770 26769 +f 26645 26646 26771 +f 26645 26771 26770 +f 26646 26647 26771 +f 26647 26772 26771 +f 26647 26648 26773 +f 26647 26773 26772 +f 26648 26649 26773 +f 26649 26774 26773 +f 26649 26650 26775 +f 26649 26775 26774 +f 26650 26651 26775 +f 26651 26776 26775 +f 26651 26652 26777 +f 26651 26777 26776 +f 26655 26656 26778 +f 26656 26657 26778 +f 26657 26779 26778 +f 26657 26658 26780 +f 26657 26780 26779 +f 26658 26659 26780 +f 26659 26781 26780 +f 26659 26660 26782 +f 26659 26782 26781 +f 26660 26661 26782 +f 26661 26783 26782 +f 26661 26662 26784 +f 26661 26784 26783 +f 26662 26663 26784 +f 26663 26785 26784 +f 26663 26664 26786 +f 26663 26786 26785 +f 26664 26665 26786 +f 26665 26787 26786 +f 26665 26666 26788 +f 26665 26788 26787 +f 26666 26667 26788 +f 26667 26789 26788 +f 26667 26668 26790 +f 26667 26790 26789 +f 26668 26669 26790 +f 26669 26791 26790 +f 26669 26670 26792 +f 26669 26792 26791 +f 26670 26671 26792 +f 26671 26793 26792 +f 26671 26672 26794 +f 26671 26794 26793 +f 26672 26673 26794 +f 26673 26795 26794 +f 26673 26674 26796 +f 26673 26796 26795 +f 26674 26675 26796 +f 26675 26797 26796 +f 26675 26676 26798 +f 26675 26798 26797 +f 26676 26677 26798 +f 26677 26799 26798 +f 26677 26678 26800 +f 26677 26800 26799 +f 26678 26679 26800 +f 26679 26801 26800 +f 26679 26680 26802 +f 26679 26802 26801 +f 26680 26681 26802 +f 26681 26803 26802 +f 26681 26682 26804 +f 26681 26804 26803 +f 26682 26683 26804 +f 26683 26805 26804 +f 26683 26684 26806 +f 26683 26806 26805 +f 26684 26685 26806 +f 26685 26807 26806 +f 26685 26686 26808 +f 26685 26808 26807 +f 26686 26687 26808 +f 26687 26809 26808 +f 26687 26688 26810 +f 26687 26810 26809 +f 26688 26689 26810 +f 26689 26811 26810 +f 26689 26690 26812 +f 26689 26812 26811 +f 26690 26691 26812 +f 26691 26813 26812 +f 26691 26692 26814 +f 26691 26814 26813 +f 26692 26693 26814 +f 26693 26815 26814 +f 26693 26694 26816 +f 26693 26816 26815 +f 26694 26695 26816 +f 26695 26817 26816 +f 26695 26696 26818 +f 26695 26818 26817 +f 26696 26697 26818 +f 26697 26819 26818 +f 26697 26698 26820 +f 26697 26820 26819 +f 26698 26699 26820 +f 26699 26821 26820 +f 26699 26700 26822 +f 26699 26822 26821 +f 26700 26701 26822 +f 26701 26823 26822 +f 26701 26702 26824 +f 26701 26824 26823 +f 26702 26703 26824 +f 26703 26825 26824 +f 26703 26704 26826 +f 26703 26826 26825 +f 26704 26705 26826 +f 26705 26827 26826 +f 26705 26706 26828 +f 26705 26828 26827 +f 26706 26707 26828 +f 26707 26829 26828 +f 26707 26708 26830 +f 26707 26830 26829 +f 26708 26709 26830 +f 26709 26831 26830 +f 26709 26710 26832 +f 26709 26832 26831 +f 26710 26711 26832 +f 26711 26833 26832 +f 26711 26712 26834 +f 26711 26834 26833 +f 26712 26713 26834 +f 26713 26835 26834 +f 26713 26714 26836 +f 26713 26836 26835 +f 26714 26715 26836 +f 26715 26837 26836 +f 26715 26716 26838 +f 26715 26838 26837 +f 26716 26717 26838 +f 26717 26839 26838 +f 26717 26718 26840 +f 26717 26840 26839 +f 26718 26719 26840 +f 26719 26841 26840 +f 26719 26720 26842 +f 26719 26842 26841 +f 26720 26721 26842 +f 26721 26843 26842 +f 26721 26722 26844 +f 26721 26844 26843 +f 26722 26723 26844 +f 26723 26845 26844 +f 26723 26724 26846 +f 26723 26846 26845 +f 26724 26725 26846 +f 26725 26847 26846 +f 26725 26726 26848 +f 26725 26848 26847 +f 26726 26727 26848 +f 26727 26849 26848 +f 26727 26728 26850 +f 26727 26850 26849 +f 26728 26729 26850 +f 26729 26851 26850 +f 26729 26730 26852 +f 26729 26852 26851 +f 26730 26731 26852 +f 26731 26853 26852 +f 26731 26732 26854 +f 26731 26854 26853 +f 26732 26733 26854 +f 26733 26855 26854 +f 26733 26734 26856 +f 26733 26856 26855 +f 26734 26735 26856 +f 26735 26857 26856 +f 26735 26736 26858 +f 26735 26858 26857 +f 26736 26737 26858 +f 26737 26859 26858 +f 26737 26738 26860 +f 26737 26860 26859 +f 26738 26739 26860 +f 26739 26861 26860 +f 26739 26740 26862 +f 26739 26862 26861 +f 26740 26741 26862 +f 26741 26863 26862 +f 26741 26742 26864 +f 26741 26864 26863 +f 26742 26743 26864 +f 26743 26865 26864 +f 26743 26744 26866 +f 26743 26866 26865 +f 26744 26745 26866 +f 26745 26867 26866 +f 26745 26746 26868 +f 26745 26868 26867 +f 26746 26747 26868 +f 26747 26869 26868 +f 26747 26748 26870 +f 26747 26870 26869 +f 26748 26749 26870 +f 26749 26871 26870 +f 26749 26750 26872 +f 26749 26872 26871 +f 26750 26751 26872 +f 26751 26873 26872 +f 26751 26752 26874 +f 26751 26874 26873 +f 26752 26753 26874 +f 26753 26875 26874 +f 26753 26754 26876 +f 26753 26876 26875 +f 26754 26755 26876 +f 26755 26877 26876 +f 26755 26756 26878 +f 26755 26878 26877 +f 26756 26757 26878 +f 26757 26879 26878 +f 26757 26758 26880 +f 26757 26880 26879 +f 26758 26759 26880 +f 26759 26881 26880 +f 26759 26760 26882 +f 26759 26882 26881 +f 26760 26761 26882 +f 26761 26883 26882 +f 26761 26762 26884 +f 26761 26884 26883 +f 26762 26763 26884 +f 26763 26885 26884 +f 26763 26764 26886 +f 26763 26886 26885 +f 26764 26765 26886 +f 26765 26887 26886 +f 26765 26766 26888 +f 26765 26888 26887 +f 26766 26767 26888 +f 26767 26889 26888 +f 26767 26768 26890 +f 26767 26890 26889 +f 26768 26769 26890 +f 26769 26891 26890 +f 26769 26770 26892 +f 26769 26892 26891 +f 26770 26771 26892 +f 26771 26893 26892 +f 26771 26772 26894 +f 26771 26894 26893 +f 26772 26773 26894 +f 26773 26895 26894 +f 26773 26774 26896 +f 26773 26896 26895 +f 26774 26775 26896 +f 26775 26897 26896 +f 26775 26776 26898 +f 26775 26898 26897 +f 26776 26777 26898 +f 26777 26899 26898 +f 26778 26779 26900 +f 26779 26780 26900 +f 26780 26901 26900 +f 26780 26781 26902 +f 26780 26902 26901 +f 26781 26782 26902 +f 26782 26903 26902 +f 26782 26783 26904 +f 26782 26904 26903 +f 26783 26784 26904 +f 26784 26905 26904 +f 26784 26785 26906 +f 26784 26906 26905 +f 26785 26786 26906 +f 26786 26907 26906 +f 26786 26787 26908 +f 26786 26908 26907 +f 26787 26788 26908 +f 26788 26909 26908 +f 26788 26789 26910 +f 26788 26910 26909 +f 26789 26790 26910 +f 26790 26911 26910 +f 26790 26791 26912 +f 26790 26912 26911 +f 26791 26792 26912 +f 26792 26913 26912 +f 26792 26793 26914 +f 26792 26914 26913 +f 26793 26794 26914 +f 26794 26915 26914 +f 26794 26795 26916 +f 26794 26916 26915 +f 26795 26796 26916 +f 26796 26917 26916 +f 26796 26797 26918 +f 26796 26918 26917 +f 26797 26798 26918 +f 26798 26919 26918 +f 26798 26799 26920 +f 26798 26920 26919 +f 26799 26800 26920 +f 26800 26921 26920 +f 26800 26801 26922 +f 26800 26922 26921 +f 26801 26802 26922 +f 26802 26923 26922 +f 26802 26803 26924 +f 26802 26924 26923 +f 26803 26804 26924 +f 26804 26925 26924 +f 26804 26805 26926 +f 26804 26926 26925 +f 26805 26806 26926 +f 26806 26927 26926 +f 26806 26807 26928 +f 26806 26928 26927 +f 26807 26808 26928 +f 26808 26929 26928 +f 26808 26809 26930 +f 26808 26930 26929 +f 26809 26810 26930 +f 26810 26931 26930 +f 26810 26811 26932 +f 26810 26932 26931 +f 26811 26812 26932 +f 26812 26933 26932 +f 26812 26813 26934 +f 26812 26934 26933 +f 26813 26814 26934 +f 26814 26935 26934 +f 26814 26815 26936 +f 26814 26936 26935 +f 26815 26816 26936 +f 26816 26937 26936 +f 26816 26817 26938 +f 26816 26938 26937 +f 26817 26818 26938 +f 26818 26939 26938 +f 26818 26819 26940 +f 26818 26940 26939 +f 26819 26820 26940 +f 26820 26941 26940 +f 26820 26821 26942 +f 26820 26942 26941 +f 26821 26822 26942 +f 26822 26943 26942 +f 26822 26823 26944 +f 26822 26944 26943 +f 26823 26824 26944 +f 26824 26945 26944 +f 26824 26825 26946 +f 26824 26946 26945 +f 26825 26826 26946 +f 26826 26947 26946 +f 26826 26827 26948 +f 26826 26948 26947 +f 26827 26828 26948 +f 26828 26949 26948 +f 26828 26829 26950 +f 26828 26950 26949 +f 26829 26830 26950 +f 26830 26951 26950 +f 26830 26831 26952 +f 26830 26952 26951 +f 26831 26832 26952 +f 26832 26953 26952 +f 26832 26833 26954 +f 26832 26954 26953 +f 26833 26834 26954 +f 26834 26955 26954 +f 26834 26835 26956 +f 26834 26956 26955 +f 26835 26836 26956 +f 26836 26957 26956 +f 26836 26837 26958 +f 26836 26958 26957 +f 26837 26838 26958 +f 26838 26959 26958 +f 26838 26839 26960 +f 26838 26960 26959 +f 26839 26840 26960 +f 26840 26961 26960 +f 26840 26841 26962 +f 26840 26962 26961 +f 26841 26842 26962 +f 26842 26963 26962 +f 26842 26843 26964 +f 26842 26964 26963 +f 26843 26844 26964 +f 26844 26965 26964 +f 26844 26845 26966 +f 26844 26966 26965 +f 26845 26846 26966 +f 26846 26967 26966 +f 26846 26847 26968 +f 26846 26968 26967 +f 26847 26848 26968 +f 26848 26969 26968 +f 26848 26849 26970 +f 26848 26970 26969 +f 26849 26850 26970 +f 26850 26971 26970 +f 26850 26851 26972 +f 26850 26972 26971 +f 26851 26852 26972 +f 26852 26973 26972 +f 26852 26853 26974 +f 26852 26974 26973 +f 26853 26854 26974 +f 26854 26975 26974 +f 26854 26855 26976 +f 26854 26976 26975 +f 26855 26856 26976 +f 26856 26977 26976 +f 26856 26857 26978 +f 26856 26978 26977 +f 26857 26858 26978 +f 26858 26979 26978 +f 26858 26859 26980 +f 26858 26980 26979 +f 26859 26860 26980 +f 26860 26981 26980 +f 26860 26861 26982 +f 26860 26982 26981 +f 26861 26862 26982 +f 26862 26983 26982 +f 26862 26863 26984 +f 26862 26984 26983 +f 26863 26864 26984 +f 26864 26985 26984 +f 26864 26865 26986 +f 26864 26986 26985 +f 26865 26866 26986 +f 26866 26987 26986 +f 26866 26867 26988 +f 26866 26988 26987 +f 26867 26868 26988 +f 26868 26989 26988 +f 26868 26869 26990 +f 26868 26990 26989 +f 26869 26870 26990 +f 26870 26991 26990 +f 26870 26871 26992 +f 26870 26992 26991 +f 26871 26872 26992 +f 26872 26993 26992 +f 26872 26873 26994 +f 26872 26994 26993 +f 26873 26874 26994 +f 26874 26995 26994 +f 26874 26875 26996 +f 26874 26996 26995 +f 26875 26876 26996 +f 26876 26997 26996 +f 26876 26877 26998 +f 26876 26998 26997 +f 26877 26878 26998 +f 26878 26999 26998 +f 26878 26879 27000 +f 26878 27000 26999 +f 26879 26880 27000 +f 26880 27001 27000 +f 26880 26881 27002 +f 26880 27002 27001 +f 26881 26882 27002 +f 26882 27003 27002 +f 26882 26883 27004 +f 26882 27004 27003 +f 26883 26884 27004 +f 26884 27005 27004 +f 26884 26885 27006 +f 26884 27006 27005 +f 26885 26886 27006 +f 26886 27007 27006 +f 26886 26887 27008 +f 26886 27008 27007 +f 26887 26888 27008 +f 26888 27009 27008 +f 26888 26889 27010 +f 26888 27010 27009 +f 26889 26890 27010 +f 26890 27011 27010 +f 26890 26891 27012 +f 26890 27012 27011 +f 26891 26892 27012 +f 26892 27013 27012 +f 26892 26893 27014 +f 26892 27014 27013 +f 26893 26894 27014 +f 26894 27015 27014 +f 26894 26895 27016 +f 26894 27016 27015 +f 26895 26896 27016 +f 26896 27017 27016 +f 26896 26897 27018 +f 26896 27018 27017 +f 26897 26898 27018 +f 26898 27019 27018 +f 26898 26899 27020 +f 26898 27020 27019 +f 26902 26903 27021 +f 26903 26904 27021 +f 26904 27022 27021 +f 26904 26905 27023 +f 26904 27023 27022 +f 26905 26906 27023 +f 26906 27024 27023 +f 26906 26907 27025 +f 26906 27025 27024 +f 26907 26908 27025 +f 26908 27026 27025 +f 26908 26909 27027 +f 26908 27027 27026 +f 26909 26910 27027 +f 26910 27028 27027 +f 26910 26911 27029 +f 26910 27029 27028 +f 26911 26912 27029 +f 26912 27030 27029 +f 26912 26913 27031 +f 26912 27031 27030 +f 26913 26914 27031 +f 26914 27032 27031 +f 26914 26915 27033 +f 26914 27033 27032 +f 26915 26916 27033 +f 26916 27034 27033 +f 26916 26917 27035 +f 26916 27035 27034 +f 26917 26918 27035 +f 26918 27036 27035 +f 26918 26919 27037 +f 26918 27037 27036 +f 26919 26920 27037 +f 26920 27038 27037 +f 26920 26921 27039 +f 26920 27039 27038 +f 26921 26922 27039 +f 26922 27040 27039 +f 26922 26923 27041 +f 26922 27041 27040 +f 26923 26924 27041 +f 26924 27042 27041 +f 26924 26925 27043 +f 26924 27043 27042 +f 26925 26926 27043 +f 26926 27044 27043 +f 26926 26927 27045 +f 26926 27045 27044 +f 26927 26928 27045 +f 26928 27046 27045 +f 26928 26929 27047 +f 26928 27047 27046 +f 26929 26930 27047 +f 26930 27048 27047 +f 26930 26931 27049 +f 26930 27049 27048 +f 26931 26932 27049 +f 26932 27050 27049 +f 26932 26933 27051 +f 26932 27051 27050 +f 26933 26934 27051 +f 26934 27052 27051 +f 26934 26935 27053 +f 26934 27053 27052 +f 26935 26936 27053 +f 26964 26965 27054 +f 26965 26966 27054 +f 26966 27055 27054 +f 26966 26967 27056 +f 26966 27056 27055 +f 26967 26968 27056 +f 26968 27057 27056 +f 26968 26969 27058 +f 26968 27058 27057 +f 26969 26970 27058 +f 26970 27059 27058 +f 26970 26971 27060 +f 26970 27060 27059 +f 26971 26972 27060 +f 26972 27061 27060 +f 26972 26973 27062 +f 26972 27062 27061 +f 26973 26974 27062 +f 26974 27063 27062 +f 26974 26975 27064 +f 26974 27064 27063 +f 26975 26976 27064 +f 26976 27065 27064 +f 26976 26977 27066 +f 26976 27066 27065 +f 26977 26978 27066 +f 26978 27067 27066 +f 26978 26979 27068 +f 26978 27068 27067 +f 26979 26980 27068 +f 26980 27069 27068 +f 26980 26981 27070 +f 26980 27070 27069 +f 26981 26982 27070 +f 26982 27071 27070 +f 26982 26983 27072 +f 26982 27072 27071 +f 26983 26984 27072 +f 26984 27073 27072 +f 26984 26985 27074 +f 26984 27074 27073 +f 26985 26986 27074 +f 26986 27075 27074 +f 26986 26987 27076 +f 26986 27076 27075 +f 26987 26988 27076 +f 26988 27077 27076 +f 26988 26989 27078 +f 26988 27078 27077 +f 26989 26990 27078 +f 26990 27079 27078 +f 26990 26991 27080 +f 26990 27080 27079 +f 26991 26992 27080 +f 26992 27081 27080 +f 26992 26993 27082 +f 26992 27082 27081 +f 26993 26994 27082 +f 26994 27083 27082 +f 26994 26995 27084 +f 26994 27084 27083 +f 26995 26996 27084 +f 26996 27085 27084 +f 26996 26997 27086 +f 26996 27086 27085 +f 26997 26998 27086 +f 26998 27087 27086 +f 26998 26999 27088 +f 26998 27088 27087 +f 26999 27000 27088 +f 27000 27089 27088 +f 27000 27001 27090 +f 27000 27090 27089 +f 27001 27002 27090 +f 27002 27091 27090 +f 27002 27003 27092 +f 27002 27092 27091 +f 27003 27004 27092 +f 27004 27093 27092 +f 27004 27005 27094 +f 27004 27094 27093 +f 27005 27006 27094 +f 27006 27095 27094 +f 27006 27007 27096 +f 27006 27096 27095 +f 27007 27008 27096 +f 27008 27097 27096 +f 27008 27009 27098 +f 27008 27098 27097 +f 27009 27010 27098 +f 27010 27099 27098 +f 27010 27011 27100 +f 27010 27100 27099 +f 27011 27012 27100 +f 27012 27101 27100 +f 27012 27013 27102 +f 27012 27102 27101 +f 27013 27014 27102 +f 27014 27103 27102 +f 27014 27015 27104 +f 27014 27104 27103 +f 27015 27016 27104 +f 27016 27105 27104 +f 27016 27017 27106 +f 27016 27106 27105 +f 27017 27018 27106 +f 27018 27107 27106 +f 27018 27019 27108 +f 27018 27108 27107 +f 27019 27020 27108 +f 27020 27109 27108 +f 27021 27022 27110 +f 27022 27023 27110 +f 27023 27111 27110 +f 27023 27024 27112 +f 27023 27112 27111 +f 27024 27025 27112 +f 27025 27113 27112 +f 27025 27026 27114 +f 27025 27114 27113 +f 27026 27027 27114 +f 27027 27115 27114 +f 27027 27028 27116 +f 27027 27116 27115 +f 27028 27029 27116 +f 27029 27117 27116 +f 27029 27030 27118 +f 27029 27118 27117 +f 27030 27031 27118 +f 27031 27119 27118 +f 27031 27032 27120 +f 27031 27120 27119 +f 27032 27033 27120 +f 27033 27121 27120 +f 27033 27034 27122 +f 27033 27122 27121 +f 27034 27035 27122 +f 27035 27123 27122 +f 27035 27036 27124 +f 27035 27124 27123 +f 27036 27037 27124 +f 27037 27125 27124 +f 27037 27038 27126 +f 27037 27126 27125 +f 27038 27039 27126 +f 27039 27127 27126 +f 27039 27040 27128 +f 27039 27128 27127 +f 27040 27041 27128 +f 27041 27129 27128 +f 27041 27042 27130 +f 27041 27130 27129 +f 27042 27043 27130 +f 27043 27131 27130 +f 27043 27044 27132 +f 27043 27132 27131 +f 27044 27045 27132 +f 27045 27133 27132 +f 27045 27046 27134 +f 27045 27134 27133 +f 27046 27047 27134 +f 27047 27135 27134 +f 27047 27048 27136 +f 27047 27136 27135 +f 27048 27049 27136 +f 27049 27137 27136 +f 27049 27050 27138 +f 27049 27138 27137 +f 27050 27051 27138 +f 27056 27057 27139 +f 27057 27058 27139 +f 27058 27140 27139 +f 27058 27059 27141 +f 27058 27141 27140 +f 27059 27060 27141 +f 27060 27142 27141 +f 27060 27061 27143 +f 27060 27143 27142 +f 27061 27062 27143 +f 27062 27144 27143 +f 27062 27063 27145 +f 27062 27145 27144 +f 27063 27064 27145 +f 27064 27146 27145 +f 27064 27065 27147 +f 27064 27147 27146 +f 27065 27066 27147 +f 27066 27148 27147 +f 27066 27067 27149 +f 27066 27149 27148 +f 27067 27068 27149 +f 27068 27150 27149 +f 27068 27069 27151 +f 27068 27151 27150 +f 27069 27070 27151 +f 27070 27152 27151 +f 27070 27071 27153 +f 27070 27153 27152 +f 27071 27072 27153 +f 27072 27154 27153 +f 27072 27073 27155 +f 27072 27155 27154 +f 27073 27074 27155 +f 27074 27156 27155 +f 27074 27075 27157 +f 27074 27157 27156 +f 27075 27076 27157 +f 27076 27158 27157 +f 27076 27077 27159 +f 27076 27159 27158 +f 27077 27078 27159 +f 27078 27160 27159 +f 27078 27079 27161 +f 27078 27161 27160 +f 27079 27080 27161 +f 27080 27162 27161 +f 27080 27081 27163 +f 27080 27163 27162 +f 27081 27082 27163 +f 27082 27164 27163 +f 27082 27083 27165 +f 27082 27165 27164 +f 27083 27084 27165 +f 27084 27166 27165 +f 27084 27085 27167 +f 27084 27167 27166 +f 27085 27086 27167 +f 27086 27168 27167 +f 27086 27087 27169 +f 27086 27169 27168 +f 27087 27088 27169 +f 27088 27170 27169 +f 27088 27089 27171 +f 27088 27171 27170 +f 27089 27090 27171 +f 27090 27172 27171 +f 27090 27091 27173 +f 27090 27173 27172 +f 27091 27092 27173 +f 27092 27174 27173 +f 27092 27093 27175 +f 27092 27175 27174 +f 27093 27094 27175 +f 27094 27176 27175 +f 27094 27095 27177 +f 27094 27177 27176 +f 27095 27096 27177 +f 27096 27178 27177 +f 27096 27097 27179 +f 27096 27179 27178 +f 27097 27098 27179 +f 27098 27180 27179 +f 27098 27099 27181 +f 27098 27181 27180 +f 27099 27100 27181 +f 27100 27182 27181 +f 27100 27101 27183 +f 27100 27183 27182 +f 27101 27102 27183 +f 27102 27184 27183 +f 27102 27103 27185 +f 27102 27185 27184 +f 27103 27104 27185 +f 27104 27186 27185 +f 27104 27105 27187 +f 27104 27187 27186 +f 27105 27106 27187 +f 27106 27188 27187 +f 27106 27107 27189 +f 27106 27189 27188 +f 27107 27108 27189 +f 27108 27190 27189 +f 27108 27109 27191 +f 27108 27191 27190 +f 27112 27113 27192 +f 27113 27114 27192 +f 27114 27193 27192 +f 27114 27115 27194 +f 27114 27194 27193 +f 27115 27116 27194 +f 27116 27195 27194 +f 27116 27117 27196 +f 27116 27196 27195 +f 27117 27118 27196 +f 27118 27197 27196 +f 27118 27119 27198 +f 27118 27198 27197 +f 27119 27120 27198 +f 27120 27199 27198 +f 27120 27121 27200 +f 27120 27200 27199 +f 27121 27122 27200 +f 27122 27201 27200 +f 27122 27123 27202 +f 27122 27202 27201 +f 27123 27124 27202 +f 27124 27203 27202 +f 27124 27125 27204 +f 27124 27204 27203 +f 27125 27126 27204 +f 27126 27205 27204 +f 27126 27127 27206 +f 27126 27206 27205 +f 27127 27128 27206 +f 27128 27207 27206 +f 27128 27129 27208 +f 27128 27208 27207 +f 27129 27130 27208 +f 27130 27209 27208 +f 27130 27131 27210 +f 27130 27210 27209 +f 27131 27132 27210 +f 27132 27211 27210 +f 27132 27133 27212 +f 27132 27212 27211 +f 27133 27134 27212 +f 27134 27213 27212 +f 27134 27135 27214 +f 27134 27214 27213 +f 27135 27136 27214 +f 27141 27142 27215 +f 27142 27143 27215 +f 27143 27216 27215 +f 27143 27144 27217 +f 27143 27217 27216 +f 27144 27145 27217 +f 27145 27218 27217 +f 27145 27146 27219 +f 27145 27219 27218 +f 27146 27147 27219 +f 27147 27220 27219 +f 27147 27148 27221 +f 27147 27221 27220 +f 27148 27149 27221 +f 27149 27222 27221 +f 27149 27150 27223 +f 27149 27223 27222 +f 27150 27151 27223 +f 27151 27224 27223 +f 27151 27152 27225 +f 27151 27225 27224 +f 27152 27153 27225 +f 27153 27226 27225 +f 27153 27154 27227 +f 27153 27227 27226 +f 27154 27155 27227 +f 27155 27228 27227 +f 27155 27156 27229 +f 27155 27229 27228 +f 27156 27157 27229 +f 27157 27230 27229 +f 27157 27158 27231 +f 27157 27231 27230 +f 27158 27159 27231 +f 27159 27232 27231 +f 27159 27160 27233 +f 27159 27233 27232 +f 27160 27161 27233 +f 27161 27234 27233 +f 27161 27162 27235 +f 27161 27235 27234 +f 27162 27163 27235 +f 27163 27236 27235 +f 27163 27164 27237 +f 27163 27237 27236 +f 27164 27165 27237 +f 27165 27238 27237 +f 27165 27166 27239 +f 27165 27239 27238 +f 27166 27167 27239 +f 27167 27240 27239 +f 27167 27168 27241 +f 27167 27241 27240 +f 27168 27169 27241 +f 27169 27242 27241 +f 27169 27170 27243 +f 27169 27243 27242 +f 27170 27171 27243 +f 27171 27244 27243 +f 27171 27172 27245 +f 27171 27245 27244 +f 27172 27173 27245 +f 27173 27246 27245 +f 27173 27174 27247 +f 27173 27247 27246 +f 27174 27175 27247 +f 27175 27248 27247 +f 27175 27176 27249 +f 27175 27249 27248 +f 27176 27177 27249 +f 27177 27250 27249 +f 27177 27178 27251 +f 27177 27251 27250 +f 27178 27179 27251 +f 27179 27252 27251 +f 27179 27180 27253 +f 27179 27253 27252 +f 27180 27181 27253 +f 27181 27254 27253 +f 27181 27182 27255 +f 27181 27255 27254 +f 27182 27183 27255 +f 27183 27256 27255 +f 27183 27184 27257 +f 27183 27257 27256 +f 27184 27185 27257 +f 27185 27258 27257 +f 27185 27186 27259 +f 27185 27259 27258 +f 27186 27187 27259 +f 27187 27260 27259 +f 27187 27188 27261 +f 27187 27261 27260 +f 27188 27189 27261 +f 27189 27262 27261 +f 27189 27190 27263 +f 27189 27263 27262 +f 27190 27191 27263 +f 27191 27264 27263 +f 27217 27218 27266 +f 27217 27266 27265 +f 27218 27219 27266 +f 27219 27267 27266 +f 27219 27220 27268 +f 27219 27268 27267 +f 27220 27221 27268 +f 27221 27269 27268 +f 27221 27222 27270 +f 27221 27270 27269 +f 27222 27223 27270 +f 27223 27271 27270 +f 27223 27224 27272 +f 27223 27272 27271 +f 27224 27225 27272 +f 27225 27273 27272 +f 27225 27226 27274 +f 27225 27274 27273 +f 27226 27227 27274 +f 27227 27275 27274 +f 27227 27228 27276 +f 27227 27276 27275 +f 27228 27229 27276 +f 27229 27277 27276 +f 27229 27230 27278 +f 27229 27278 27277 +f 27230 27231 27278 +f 27231 27279 27278 +f 27231 27232 27280 +f 27231 27280 27279 +f 27232 27233 27280 +f 27233 27281 27280 +f 27233 27234 27282 +f 27233 27282 27281 +f 27234 27235 27282 +f 27235 27283 27282 +f 27235 27236 27284 +f 27235 27284 27283 +f 27236 27237 27284 +f 27237 27285 27284 +f 27237 27238 27286 +f 27237 27286 27285 +f 27238 27239 27286 +f 27239 27287 27286 +f 27239 27240 27288 +f 27239 27288 27287 +f 27240 27241 27288 +f 27241 27289 27288 +f 27241 27242 27290 +f 27241 27290 27289 +f 27242 27243 27290 +f 27243 27291 27290 +f 27243 27244 27292 +f 27243 27292 27291 +f 27244 27245 27292 +f 27245 27293 27292 +f 27245 27246 27294 +f 27245 27294 27293 +f 27246 27247 27294 +f 27247 27295 27294 +f 27247 27248 27296 +f 27247 27296 27295 +f 27248 27249 27296 +f 27249 27297 27296 +f 27249 27250 27298 +f 27249 27298 27297 +f 27250 27251 27298 +f 27251 27299 27298 +f 27251 27252 27300 +f 27251 27300 27299 +f 27252 27253 27300 +f 27253 27301 27300 +f 27253 27254 27302 +f 27253 27302 27301 +f 27254 27255 27302 +f 27255 27303 27302 +f 27255 27256 27304 +f 27255 27304 27303 +f 27256 27257 27304 +f 27257 27305 27304 +f 27257 27258 27306 +f 27257 27306 27305 +f 27258 27259 27306 +f 27259 27307 27306 +f 27259 27260 27308 +f 27259 27308 27307 +f 27260 27261 27308 +f 27261 27309 27308 +f 27261 27262 27310 +f 27261 27310 27309 +f 27262 27263 27310 +f 27263 27311 27310 +f 27263 27264 27312 +f 27263 27312 27311 +f 27266 27267 27313 +f 27267 27268 27313 +f 27268 27314 27313 +f 27268 27269 27315 +f 27268 27315 27314 +f 27269 27270 27315 +f 27270 27316 27315 +f 27270 27271 27317 +f 27270 27317 27316 +f 27271 27272 27317 +f 27272 27318 27317 +f 27272 27273 27319 +f 27272 27319 27318 +f 27273 27274 27319 +f 27274 27320 27319 +f 27274 27275 27321 +f 27274 27321 27320 +f 27275 27276 27321 +f 27276 27322 27321 +f 27276 27277 27323 +f 27276 27323 27322 +f 27277 27278 27323 +f 27278 27324 27323 +f 27278 27279 27325 +f 27278 27325 27324 +f 27279 27280 27325 +f 27280 27326 27325 +f 27280 27281 27327 +f 27280 27327 27326 +f 27281 27282 27327 +f 27282 27328 27327 +f 27282 27283 27329 +f 27282 27329 27328 +f 27283 27284 27329 +f 27284 27330 27329 +f 27284 27285 27331 +f 27284 27331 27330 +f 27285 27286 27331 +f 27286 27332 27331 +f 27286 27287 27333 +f 27286 27333 27332 +f 27287 27288 27333 +f 27288 27334 27333 +f 27288 27289 27335 +f 27288 27335 27334 +f 27289 27290 27335 +f 27290 27336 27335 +f 27290 27291 27337 +f 27290 27337 27336 +f 27291 27292 27337 +f 27292 27338 27337 +f 27292 27293 27339 +f 27292 27339 27338 +f 27293 27294 27339 +f 27294 27340 27339 +f 27294 27295 27341 +f 27294 27341 27340 +f 27295 27296 27341 +f 27296 27342 27341 +f 27296 27297 27343 +f 27296 27343 27342 +f 27297 27298 27343 +f 27298 27344 27343 +f 27298 27299 27345 +f 27298 27345 27344 +f 27299 27300 27345 +f 27300 27346 27345 +f 27300 27301 27347 +f 27300 27347 27346 +f 27301 27302 27347 +f 27302 27348 27347 +f 27302 27303 27349 +f 27302 27349 27348 +f 27303 27304 27349 +f 27304 27350 27349 +f 27304 27305 27351 +f 27304 27351 27350 +f 27305 27306 27351 +f 27306 27352 27351 +f 27306 27307 27353 +f 27306 27353 27352 +f 27307 27308 27353 +f 27308 27354 27353 +f 27308 27309 27355 +f 27308 27355 27354 +f 27309 27310 27355 +f 27310 27356 27355 +f 27310 27311 27357 +f 27310 27357 27356 +f 27311 27312 27357 +f 27315 27316 27358 +f 27316 27317 27358 +f 27317 27359 27358 +f 27317 27318 27360 +f 27317 27360 27359 +f 27318 27319 27360 +f 27319 27361 27360 +f 27319 27320 27362 +f 27319 27362 27361 +f 27320 27321 27362 +f 27321 27363 27362 +f 27321 27322 27364 +f 27321 27364 27363 +f 27322 27323 27364 +f 27323 27365 27364 +f 27323 27324 27366 +f 27323 27366 27365 +f 27324 27325 27366 +f 27325 27367 27366 +f 27325 27326 27368 +f 27325 27368 27367 +f 27326 27327 27368 +f 27327 27369 27368 +f 27327 27328 27370 +f 27327 27370 27369 +f 27328 27329 27370 +f 27329 27371 27370 +f 27329 27330 27372 +f 27329 27372 27371 +f 27330 27331 27372 +f 27331 27373 27372 +f 27331 27332 27374 +f 27331 27374 27373 +f 27332 27333 27374 +f 27333 27375 27374 +f 27333 27334 27376 +f 27333 27376 27375 +f 27334 27335 27376 +f 27335 27377 27376 +f 27335 27336 27378 +f 27335 27378 27377 +f 27336 27337 27378 +f 27337 27379 27378 +f 27337 27338 27380 +f 27337 27380 27379 +f 27338 27339 27380 +f 27339 27381 27380 +f 27339 27340 27382 +f 27339 27382 27381 +f 27340 27341 27382 +f 27341 27383 27382 +f 27341 27342 27384 +f 27341 27384 27383 +f 27342 27343 27384 +f 27343 27385 27384 +f 27343 27344 27386 +f 27343 27386 27385 +f 27344 27345 27386 +f 27345 27387 27386 +f 27345 27346 27388 +f 27345 27388 27387 +f 27346 27347 27388 +f 27347 27389 27388 +f 27347 27348 27390 +f 27347 27390 27389 +f 27348 27349 27390 +f 27349 27391 27390 +f 27349 27350 27392 +f 27349 27392 27391 +f 27350 27351 27392 +f 27351 27393 27392 +f 27351 27352 27394 +f 27351 27394 27393 +f 27352 27353 27394 +f 27353 27395 27394 +f 27353 27354 27396 +f 27353 27396 27395 +f 27354 27355 27396 +f 27355 27397 27396 +f 27355 27356 27398 +f 27355 27398 27397 +f 27356 27357 27398 +f 17110 17239 27399 +f 17239 17368 27399 +f 17368 27400 27399 +f 17368 17497 27401 +f 17368 27401 27400 +f 17497 17626 27401 +f 17626 27402 27401 +f 17626 17755 27403 +f 17626 27403 27402 +f 17755 17884 27403 +f 17884 27404 27403 +f 17884 18013 27405 +f 17884 27405 27404 +f 18013 18142 27405 +f 18142 27406 27405 +f 18142 18271 27407 +f 18142 27407 27406 +f 18271 18400 27407 +f 18400 27408 27407 +f 18400 18529 27409 +f 18400 27409 27408 +f 18529 18658 27409 +f 18658 27410 27409 +f 18658 18787 27411 +f 18658 27411 27410 +f 18787 18916 27411 +f 18916 27412 27411 +f 18916 19045 27413 +f 18916 27413 27412 +f 19045 19174 27413 +f 19174 27414 27413 +f 19174 19303 27415 +f 19174 27415 27414 +f 19303 19432 27415 +f 19432 27416 27415 +f 19432 19561 27417 +f 19432 27417 27416 +f 19561 19690 27417 +f 19690 27418 27417 +f 19690 19819 27419 +f 19690 27419 27418 +f 19819 19948 27419 +f 19948 27420 27419 +f 19948 20077 27421 +f 19948 27421 27420 +f 20077 20206 27421 +f 20206 27422 27421 +f 20206 20335 27423 +f 20206 27423 27422 +f 20335 20464 27423 +f 20464 27424 27423 +f 20464 20593 27425 +f 20464 27425 27424 +f 20593 20722 27425 +f 20722 27426 27425 +f 20722 20851 27427 +f 20722 27427 27426 +f 20851 20980 27427 +f 20980 27428 27427 +f 20980 21109 27429 +f 20980 27429 27428 +f 21109 21238 27429 +f 21238 27430 27429 +f 21238 21367 27431 +f 21238 27431 27430 +f 21367 21496 27431 +f 21496 27432 27431 +f 21496 21625 27433 +f 21496 27433 27432 +f 21625 1 27433 +f 1 130 27433 +f 27399 27400 27435 +f 27399 27435 27434 +f 27400 27401 27435 +f 27401 27436 27435 +f 27401 27402 27437 +f 27401 27437 27436 +f 27402 27403 27437 +f 27403 27438 27437 +f 27403 27404 27439 +f 27403 27439 27438 +f 27404 27405 27439 +f 27405 27440 27439 +f 27405 27406 27441 +f 27405 27441 27440 +f 27406 27407 27441 +f 27407 27442 27441 +f 27407 27408 27443 +f 27407 27443 27442 +f 27408 27409 27443 +f 27409 27444 27443 +f 27409 27410 27445 +f 27409 27445 27444 +f 27410 27411 27445 +f 27411 27446 27445 +f 27411 27412 27447 +f 27411 27447 27446 +f 27412 27413 27447 +f 27413 27448 27447 +f 27413 27414 27449 +f 27413 27449 27448 +f 27414 27415 27449 +f 27415 27450 27449 +f 27415 27416 27451 +f 27415 27451 27450 +f 27416 27417 27451 +f 27417 27452 27451 +f 27417 27418 27453 +f 27417 27453 27452 +f 27418 27419 27453 +f 27419 27454 27453 +f 27419 27420 27455 +f 27419 27455 27454 +f 27420 27421 27455 +f 27421 27456 27455 +f 27421 27422 27457 +f 27421 27457 27456 +f 27422 27423 27457 +f 27423 27458 27457 +f 27423 27424 27459 +f 27423 27459 27458 +f 27424 27425 27459 +f 27425 27460 27459 +f 27425 27426 27461 +f 27425 27461 27460 +f 27426 27427 27461 +f 27427 27462 27461 +f 27427 27428 27463 +f 27427 27463 27462 +f 27428 27429 27463 +f 27429 27464 27463 +f 27429 27430 27465 +f 27429 27465 27464 +f 27430 27431 27465 +f 27431 27466 27465 +f 27431 27432 27467 +f 27431 27467 27466 +f 27432 27433 27467 +f 27433 27468 27467 +f 27433 130 259 +f 27433 259 27468 +f 27434 27435 27469 +f 27435 27470 27469 +f 27435 27436 27471 +f 27435 27471 27470 +f 27436 27437 27471 +f 27437 27472 27471 +f 27437 27438 27473 +f 27437 27473 27472 +f 27438 27439 27473 +f 27439 27474 27473 +f 27439 27440 27475 +f 27439 27475 27474 +f 27440 27441 27475 +f 27441 27476 27475 +f 27441 27442 27477 +f 27441 27477 27476 +f 27442 27443 27477 +f 27443 27478 27477 +f 27443 27444 27479 +f 27443 27479 27478 +f 27444 27445 27479 +f 27445 27480 27479 +f 27445 27446 27481 +f 27445 27481 27480 +f 27446 27447 27481 +f 27447 27482 27481 +f 27447 27448 27483 +f 27447 27483 27482 +f 27448 27449 27483 +f 27449 27484 27483 +f 27449 27450 27485 +f 27449 27485 27484 +f 27450 27451 27485 +f 27451 27486 27485 +f 27451 27452 27487 +f 27451 27487 27486 +f 27452 27453 27487 +f 27453 27488 27487 +f 27453 27454 27489 +f 27453 27489 27488 +f 27454 27455 27489 +f 27455 27490 27489 +f 27455 27456 27491 +f 27455 27491 27490 +f 27456 27457 27491 +f 27457 27492 27491 +f 27457 27458 27493 +f 27457 27493 27492 +f 27458 27459 27493 +f 27459 27494 27493 +f 27459 27460 27495 +f 27459 27495 27494 +f 27460 27461 27495 +f 27461 27496 27495 +f 27461 27462 27497 +f 27461 27497 27496 +f 27462 27463 27497 +f 27463 27498 27497 +f 27463 27464 27499 +f 27463 27499 27498 +f 27464 27465 27499 +f 27465 27500 27499 +f 27465 27466 27501 +f 27465 27501 27500 +f 27466 27467 27501 +f 27467 27502 27501 +f 27467 27468 27503 +f 27467 27503 27502 +f 27468 259 27503 +f 259 388 27503 +f 27469 27470 27505 +f 27469 27505 27504 +f 27470 27471 27505 +f 27471 27506 27505 +f 27471 27472 27507 +f 27471 27507 27506 +f 27472 27473 27507 +f 27473 27508 27507 +f 27473 27474 27509 +f 27473 27509 27508 +f 27474 27475 27509 +f 27475 27510 27509 +f 27475 27476 27511 +f 27475 27511 27510 +f 27476 27477 27511 +f 27477 27512 27511 +f 27477 27478 27513 +f 27477 27513 27512 +f 27478 27479 27513 +f 27479 27514 27513 +f 27479 27480 27515 +f 27479 27515 27514 +f 27480 27481 27515 +f 27481 27516 27515 +f 27481 27482 27517 +f 27481 27517 27516 +f 27482 27483 27517 +f 27483 27518 27517 +f 27483 27484 27519 +f 27483 27519 27518 +f 27484 27485 27519 +f 27485 27520 27519 +f 27485 27486 27521 +f 27485 27521 27520 +f 27486 27487 27521 +f 27487 27522 27521 +f 27487 27488 27523 +f 27487 27523 27522 +f 27488 27489 27523 +f 27489 27524 27523 +f 27489 27490 27525 +f 27489 27525 27524 +f 27490 27491 27525 +f 27491 27526 27525 +f 27491 27492 27527 +f 27491 27527 27526 +f 27492 27493 27527 +f 27493 27528 27527 +f 27493 27494 27529 +f 27493 27529 27528 +f 27494 27495 27529 +f 27495 27530 27529 +f 27495 27496 27531 +f 27495 27531 27530 +f 27496 27497 27531 +f 27497 27532 27531 +f 27497 27498 27533 +f 27497 27533 27532 +f 27498 27499 27533 +f 27499 27534 27533 +f 27499 27500 27535 +f 27499 27535 27534 +f 27500 27501 27535 +f 27501 27536 27535 +f 27501 27502 27537 +f 27501 27537 27536 +f 27502 27503 27537 +f 27503 27538 27537 +f 27503 388 517 +f 27503 517 27538 +f 27504 27505 27539 +f 27505 27540 27539 +f 27505 27506 27541 +f 27505 27541 27540 +f 27506 27507 27541 +f 27507 27542 27541 +f 27507 27508 27543 +f 27507 27543 27542 +f 27508 27509 27543 +f 27509 27544 27543 +f 27509 27510 27545 +f 27509 27545 27544 +f 27510 27511 27545 +f 27511 27546 27545 +f 27511 27512 27547 +f 27511 27547 27546 +f 27512 27513 27547 +f 27513 27548 27547 +f 27513 27514 27549 +f 27513 27549 27548 +f 27514 27515 27549 +f 27515 27550 27549 +f 27515 27516 27551 +f 27515 27551 27550 +f 27516 27517 27551 +f 27517 27552 27551 +f 27517 27518 27553 +f 27517 27553 27552 +f 27518 27519 27553 +f 27519 27554 27553 +f 27519 27520 27555 +f 27519 27555 27554 +f 27520 27521 27555 +f 27521 27556 27555 +f 27521 27522 27557 +f 27521 27557 27556 +f 27522 27523 27557 +f 27523 27558 27557 +f 27523 27524 27559 +f 27523 27559 27558 +f 27524 27525 27559 +f 27525 27560 27559 +f 27525 27526 27561 +f 27525 27561 27560 +f 27526 27527 27561 +f 27527 27562 27561 +f 27527 27528 27563 +f 27527 27563 27562 +f 27528 27529 27563 +f 27529 27564 27563 +f 27529 27530 27565 +f 27529 27565 27564 +f 27530 27531 27565 +f 27531 27566 27565 +f 27531 27532 27567 +f 27531 27567 27566 +f 27532 27533 27567 +f 27533 27568 27567 +f 27533 27534 27569 +f 27533 27569 27568 +f 27534 27535 27569 +f 27535 27570 27569 +f 27535 27536 27571 +f 27535 27571 27570 +f 27536 27537 27571 +f 27537 27572 27571 +f 27537 27538 27573 +f 27537 27573 27572 +f 27538 517 27573 +f 517 646 27573 +f 27539 27540 27575 +f 27539 27575 27574 +f 27540 27541 27575 +f 27541 27576 27575 +f 27541 27542 27577 +f 27541 27577 27576 +f 27542 27543 27577 +f 27543 27578 27577 +f 27543 27544 27579 +f 27543 27579 27578 +f 27544 27545 27579 +f 27545 27580 27579 +f 27545 27546 27581 +f 27545 27581 27580 +f 27546 27547 27581 +f 27547 27582 27581 +f 27547 27548 27583 +f 27547 27583 27582 +f 27548 27549 27583 +f 27549 27584 27583 +f 27549 27550 27585 +f 27549 27585 27584 +f 27550 27551 27585 +f 27551 27586 27585 +f 27551 27552 27587 +f 27551 27587 27586 +f 27552 27553 27587 +f 27553 27588 27587 +f 27553 27554 27589 +f 27553 27589 27588 +f 27554 27555 27589 +f 27555 27590 27589 +f 27555 27556 27591 +f 27555 27591 27590 +f 27556 27557 27591 +f 27557 27592 27591 +f 27557 27558 27593 +f 27557 27593 27592 +f 27558 27559 27593 +f 27559 27594 27593 +f 27559 27560 27595 +f 27559 27595 27594 +f 27560 27561 27595 +f 27561 27596 27595 +f 27561 27562 27597 +f 27561 27597 27596 +f 27562 27563 27597 +f 27563 27598 27597 +f 27563 27564 27599 +f 27563 27599 27598 +f 27564 27565 27599 +f 27565 27600 27599 +f 27565 27566 27601 +f 27565 27601 27600 +f 27566 27567 27601 +f 27567 27602 27601 +f 27567 27568 27603 +f 27567 27603 27602 +f 27568 27569 27603 +f 27569 27604 27603 +f 27569 27570 27605 +f 27569 27605 27604 +f 27570 27571 27605 +f 27571 27606 27605 +f 27571 27572 27607 +f 27571 27607 27606 +f 27572 27573 27607 +f 27573 27608 27607 +f 27573 646 775 +f 27573 775 27608 +f 27574 27575 27609 +f 27575 27610 27609 +f 27575 27576 27611 +f 27575 27611 27610 +f 27576 27577 27611 +f 27577 27612 27611 +f 27577 27578 27613 +f 27577 27613 27612 +f 27578 27579 27613 +f 27579 27614 27613 +f 27579 27580 27615 +f 27579 27615 27614 +f 27580 27581 27615 +f 27581 27616 27615 +f 27581 27582 27617 +f 27581 27617 27616 +f 27582 27583 27617 +f 27583 27618 27617 +f 27583 27584 27619 +f 27583 27619 27618 +f 27584 27585 27619 +f 27585 27620 27619 +f 27585 27586 27621 +f 27585 27621 27620 +f 27586 27587 27621 +f 27587 27622 27621 +f 27587 27588 27623 +f 27587 27623 27622 +f 27588 27589 27623 +f 27589 27624 27623 +f 27589 27590 27625 +f 27589 27625 27624 +f 27590 27591 27625 +f 27591 27626 27625 +f 27591 27592 27627 +f 27591 27627 27626 +f 27592 27593 27627 +f 27593 27628 27627 +f 27593 27594 27629 +f 27593 27629 27628 +f 27594 27595 27629 +f 27595 27630 27629 +f 27595 27596 27631 +f 27595 27631 27630 +f 27596 27597 27631 +f 27597 27632 27631 +f 27597 27598 27633 +f 27597 27633 27632 +f 27598 27599 27633 +f 27599 27634 27633 +f 27599 27600 27635 +f 27599 27635 27634 +f 27600 27601 27635 +f 27601 27636 27635 +f 27601 27602 27637 +f 27601 27637 27636 +f 27602 27603 27637 +f 27603 27638 27637 +f 27603 27604 27639 +f 27603 27639 27638 +f 27604 27605 27639 +f 27605 27640 27639 +f 27605 27606 27641 +f 27605 27641 27640 +f 27606 27607 27641 +f 27607 27642 27641 +f 27607 27608 27643 +f 27607 27643 27642 +f 27608 775 27643 +f 775 904 27643 +f 27609 27610 27644 +f 27610 27611 27644 +f 27611 27645 27644 +f 27611 27612 27646 +f 27611 27646 27645 +f 27612 27613 27646 +f 27613 27647 27646 +f 27613 27614 27648 +f 27613 27648 27647 +f 27614 27615 27648 +f 27615 27649 27648 +f 27615 27616 27650 +f 27615 27650 27649 +f 27616 27617 27650 +f 27617 27651 27650 +f 27617 27618 27652 +f 27617 27652 27651 +f 27618 27619 27652 +f 27619 27653 27652 +f 27619 27620 27654 +f 27619 27654 27653 +f 27620 27621 27654 +f 27621 27655 27654 +f 27621 27622 27656 +f 27621 27656 27655 +f 27622 27623 27656 +f 27623 27657 27656 +f 27623 27624 27658 +f 27623 27658 27657 +f 27624 27625 27658 +f 27625 27659 27658 +f 27625 27626 27660 +f 27625 27660 27659 +f 27626 27627 27660 +f 27627 27661 27660 +f 27627 27628 27662 +f 27627 27662 27661 +f 27628 27629 27662 +f 27629 27663 27662 +f 27629 27630 27664 +f 27629 27664 27663 +f 27630 27631 27664 +f 27631 27665 27664 +f 27631 27632 27666 +f 27631 27666 27665 +f 27632 27633 27666 +f 27633 27667 27666 +f 27633 27634 27668 +f 27633 27668 27667 +f 27634 27635 27668 +f 27635 27669 27668 +f 27635 27636 27670 +f 27635 27670 27669 +f 27636 27637 27670 +f 27637 27671 27670 +f 27637 27638 27672 +f 27637 27672 27671 +f 27638 27639 27672 +f 27639 27673 27672 +f 27639 27640 27674 +f 27639 27674 27673 +f 27640 27641 27674 +f 27641 27675 27674 +f 27641 27642 27676 +f 27641 27676 27675 +f 27642 27643 27676 +f 27643 27677 27676 +f 27643 904 1033 +f 27643 1033 27677 +f 27644 27645 27679 +f 27644 27679 27678 +f 27645 27646 27679 +f 27646 27680 27679 +f 27646 27647 27681 +f 27646 27681 27680 +f 27647 27648 27681 +f 27648 27682 27681 +f 27648 27649 27683 +f 27648 27683 27682 +f 27649 27650 27683 +f 27650 27684 27683 +f 27650 27651 27685 +f 27650 27685 27684 +f 27651 27652 27685 +f 27652 27686 27685 +f 27652 27653 27687 +f 27652 27687 27686 +f 27653 27654 27687 +f 27654 27688 27687 +f 27654 27655 27689 +f 27654 27689 27688 +f 27655 27656 27689 +f 27656 27690 27689 +f 27656 27657 27691 +f 27656 27691 27690 +f 27657 27658 27691 +f 27658 27692 27691 +f 27658 27659 27693 +f 27658 27693 27692 +f 27659 27660 27693 +f 27660 27694 27693 +f 27660 27661 27695 +f 27660 27695 27694 +f 27661 27662 27695 +f 27662 27696 27695 +f 27662 27663 27697 +f 27662 27697 27696 +f 27663 27664 27697 +f 27664 27698 27697 +f 27664 27665 27699 +f 27664 27699 27698 +f 27665 27666 27699 +f 27666 27700 27699 +f 27666 27667 27701 +f 27666 27701 27700 +f 27667 27668 27701 +f 27668 27702 27701 +f 27668 27669 27703 +f 27668 27703 27702 +f 27669 27670 27703 +f 27670 27704 27703 +f 27670 27671 27705 +f 27670 27705 27704 +f 27671 27672 27705 +f 27672 27706 27705 +f 27672 27673 27707 +f 27672 27707 27706 +f 27673 27674 27707 +f 27674 27708 27707 +f 27674 27675 27709 +f 27674 27709 27708 +f 27675 27676 27709 +f 27676 27710 27709 +f 27676 27677 27711 +f 27676 27711 27710 +f 27677 1033 27711 +f 1033 1162 27711 +f 27678 27679 27712 +f 27679 27713 27712 +f 27679 27680 27714 +f 27679 27714 27713 +f 27680 27681 27714 +f 27681 27715 27714 +f 27681 27682 27716 +f 27681 27716 27715 +f 27682 27683 27716 +f 27683 27717 27716 +f 27683 27684 27718 +f 27683 27718 27717 +f 27684 27685 27718 +f 27685 27719 27718 +f 27685 27686 27720 +f 27685 27720 27719 +f 27686 27687 27720 +f 27687 27721 27720 +f 27687 27688 27722 +f 27687 27722 27721 +f 27688 27689 27722 +f 27689 27723 27722 +f 27689 27690 27724 +f 27689 27724 27723 +f 27690 27691 27724 +f 27691 27725 27724 +f 27691 27692 27726 +f 27691 27726 27725 +f 27692 27693 27726 +f 27693 27727 27726 +f 27693 27694 27728 +f 27693 27728 27727 +f 27694 27695 27728 +f 27695 27729 27728 +f 27695 27696 27730 +f 27695 27730 27729 +f 27696 27697 27730 +f 27697 27731 27730 +f 27697 27698 27732 +f 27697 27732 27731 +f 27698 27699 27732 +f 27699 27733 27732 +f 27699 27700 27734 +f 27699 27734 27733 +f 27700 27701 27734 +f 27701 27735 27734 +f 27701 27702 27736 +f 27701 27736 27735 +f 27702 27703 27736 +f 27703 27737 27736 +f 27703 27704 27738 +f 27703 27738 27737 +f 27704 27705 27738 +f 27705 27739 27738 +f 27705 27706 27740 +f 27705 27740 27739 +f 27706 27707 27740 +f 27707 27741 27740 +f 27707 27708 27742 +f 27707 27742 27741 +f 27708 27709 27742 +f 27709 27743 27742 +f 27709 27710 27744 +f 27709 27744 27743 +f 27710 27711 27744 +f 27711 27745 27744 +f 27711 1162 1291 +f 27711 1291 27745 +f 27712 27713 27746 +f 27713 27714 27746 +f 27714 27747 27746 +f 27714 27715 27748 +f 27714 27748 27747 +f 27715 27716 27748 +f 27716 27749 27748 +f 27716 27717 27750 +f 27716 27750 27749 +f 27717 27718 27750 +f 27718 27751 27750 +f 27718 27719 27752 +f 27718 27752 27751 +f 27719 27720 27752 +f 27720 27753 27752 +f 27720 27721 27754 +f 27720 27754 27753 +f 27721 27722 27754 +f 27722 27755 27754 +f 27722 27723 27756 +f 27722 27756 27755 +f 27723 27724 27756 +f 27724 27757 27756 +f 27724 27725 27758 +f 27724 27758 27757 +f 27725 27726 27758 +f 27726 27759 27758 +f 27726 27727 27760 +f 27726 27760 27759 +f 27727 27728 27760 +f 27728 27761 27760 +f 27728 27729 27762 +f 27728 27762 27761 +f 27729 27730 27762 +f 27730 27763 27762 +f 27730 27731 27764 +f 27730 27764 27763 +f 27731 27732 27764 +f 27732 27765 27764 +f 27732 27733 27766 +f 27732 27766 27765 +f 27733 27734 27766 +f 27734 27767 27766 +f 27734 27735 27768 +f 27734 27768 27767 +f 27735 27736 27768 +f 27736 27769 27768 +f 27736 27737 27770 +f 27736 27770 27769 +f 27737 27738 27770 +f 27738 27771 27770 +f 27738 27739 27772 +f 27738 27772 27771 +f 27739 27740 27772 +f 27740 27773 27772 +f 27740 27741 27774 +f 27740 27774 27773 +f 27741 27742 27774 +f 27742 27775 27774 +f 27742 27743 27776 +f 27742 27776 27775 +f 27743 27744 27776 +f 27744 27777 27776 +f 27744 27745 27778 +f 27744 27778 27777 +f 27745 1291 27778 +f 1291 1420 27778 +f 27746 27747 27779 +f 27747 27748 27779 +f 27748 27780 27779 +f 27748 27749 27781 +f 27748 27781 27780 +f 27749 27750 27781 +f 27750 27782 27781 +f 27750 27751 27783 +f 27750 27783 27782 +f 27751 27752 27783 +f 27752 27784 27783 +f 27752 27753 27785 +f 27752 27785 27784 +f 27753 27754 27785 +f 27754 27786 27785 +f 27754 27755 27787 +f 27754 27787 27786 +f 27755 27756 27787 +f 27756 27788 27787 +f 27756 27757 27789 +f 27756 27789 27788 +f 27757 27758 27789 +f 27758 27790 27789 +f 27758 27759 27791 +f 27758 27791 27790 +f 27759 27760 27791 +f 27760 27792 27791 +f 27760 27761 27793 +f 27760 27793 27792 +f 27761 27762 27793 +f 27762 27794 27793 +f 27762 27763 27795 +f 27762 27795 27794 +f 27763 27764 27795 +f 27764 27796 27795 +f 27764 27765 27797 +f 27764 27797 27796 +f 27765 27766 27797 +f 27766 27798 27797 +f 27766 27767 27799 +f 27766 27799 27798 +f 27767 27768 27799 +f 27768 27800 27799 +f 27768 27769 27801 +f 27768 27801 27800 +f 27769 27770 27801 +f 27770 27802 27801 +f 27770 27771 27803 +f 27770 27803 27802 +f 27771 27772 27803 +f 27772 27804 27803 +f 27772 27773 27805 +f 27772 27805 27804 +f 27773 27774 27805 +f 27774 27806 27805 +f 27774 27775 27807 +f 27774 27807 27806 +f 27775 27776 27807 +f 27776 27808 27807 +f 27776 27777 27809 +f 27776 27809 27808 +f 27777 27778 27809 +f 27778 27810 27809 +f 27778 1420 1549 +f 27778 1549 27810 +f 27779 27780 27812 +f 27779 27812 27811 +f 27780 27781 27812 +f 27781 27813 27812 +f 27781 27782 27814 +f 27781 27814 27813 +f 27782 27783 27814 +f 27783 27815 27814 +f 27783 27784 27816 +f 27783 27816 27815 +f 27784 27785 27816 +f 27785 27817 27816 +f 27785 27786 27818 +f 27785 27818 27817 +f 27786 27787 27818 +f 27787 27819 27818 +f 27787 27788 27820 +f 27787 27820 27819 +f 27788 27789 27820 +f 27789 27821 27820 +f 27789 27790 27822 +f 27789 27822 27821 +f 27790 27791 27822 +f 27791 27823 27822 +f 27791 27792 27824 +f 27791 27824 27823 +f 27792 27793 27824 +f 27793 27825 27824 +f 27793 27794 27826 +f 27793 27826 27825 +f 27794 27795 27826 +f 27795 27827 27826 +f 27795 27796 27828 +f 27795 27828 27827 +f 27796 27797 27828 +f 27797 27829 27828 +f 27797 27798 27830 +f 27797 27830 27829 +f 27798 27799 27830 +f 27799 27831 27830 +f 27799 27800 27832 +f 27799 27832 27831 +f 27800 27801 27832 +f 27801 27833 27832 +f 27801 27802 27834 +f 27801 27834 27833 +f 27802 27803 27834 +f 27803 27835 27834 +f 27803 27804 27836 +f 27803 27836 27835 +f 27804 27805 27836 +f 27805 27837 27836 +f 27805 27806 27838 +f 27805 27838 27837 +f 27806 27807 27838 +f 27807 27839 27838 +f 27807 27808 27840 +f 27807 27840 27839 +f 27808 27809 27840 +f 27809 27841 27840 +f 27809 27810 27842 +f 27809 27842 27841 +f 27810 1549 27842 +f 1549 1678 27842 +f 27811 27812 27843 +f 27812 27844 27843 +f 27812 27813 27845 +f 27812 27845 27844 +f 27813 27814 27845 +f 27814 27846 27845 +f 27814 27815 27847 +f 27814 27847 27846 +f 27815 27816 27847 +f 27816 27848 27847 +f 27816 27817 27849 +f 27816 27849 27848 +f 27817 27818 27849 +f 27818 27850 27849 +f 27818 27819 27851 +f 27818 27851 27850 +f 27819 27820 27851 +f 27820 27852 27851 +f 27820 27821 27853 +f 27820 27853 27852 +f 27821 27822 27853 +f 27822 27854 27853 +f 27822 27823 27855 +f 27822 27855 27854 +f 27823 27824 27855 +f 27824 27856 27855 +f 27824 27825 27857 +f 27824 27857 27856 +f 27825 27826 27857 +f 27826 27858 27857 +f 27826 27827 27859 +f 27826 27859 27858 +f 27827 27828 27859 +f 27828 27860 27859 +f 27828 27829 27861 +f 27828 27861 27860 +f 27829 27830 27861 +f 27830 27862 27861 +f 27830 27831 27863 +f 27830 27863 27862 +f 27831 27832 27863 +f 27832 27864 27863 +f 27832 27833 27865 +f 27832 27865 27864 +f 27833 27834 27865 +f 27834 27866 27865 +f 27834 27835 27867 +f 27834 27867 27866 +f 27835 27836 27867 +f 27836 27868 27867 +f 27836 27837 27869 +f 27836 27869 27868 +f 27837 27838 27869 +f 27838 27870 27869 +f 27838 27839 27871 +f 27838 27871 27870 +f 27839 27840 27871 +f 27840 27872 27871 +f 27840 27841 27873 +f 27840 27873 27872 +f 27841 27842 27873 +f 27842 27874 27873 +f 27842 1678 1807 +f 27842 1807 27874 +f 27843 27844 27875 +f 27844 27845 27875 +f 27845 27876 27875 +f 27845 27846 27877 +f 27845 27877 27876 +f 27846 27847 27877 +f 27847 27878 27877 +f 27847 27848 27879 +f 27847 27879 27878 +f 27848 27849 27879 +f 27849 27880 27879 +f 27849 27850 27881 +f 27849 27881 27880 +f 27850 27851 27881 +f 27851 27882 27881 +f 27851 27852 27883 +f 27851 27883 27882 +f 27852 27853 27883 +f 27853 27884 27883 +f 27853 27854 27885 +f 27853 27885 27884 +f 27854 27855 27885 +f 27855 27886 27885 +f 27855 27856 27887 +f 27855 27887 27886 +f 27856 27857 27887 +f 27857 27888 27887 +f 27857 27858 27889 +f 27857 27889 27888 +f 27858 27859 27889 +f 27859 27890 27889 +f 27859 27860 27891 +f 27859 27891 27890 +f 27860 27861 27891 +f 27861 27892 27891 +f 27861 27862 27893 +f 27861 27893 27892 +f 27862 27863 27893 +f 27863 27894 27893 +f 27863 27864 27895 +f 27863 27895 27894 +f 27864 27865 27895 +f 27865 27896 27895 +f 27865 27866 27897 +f 27865 27897 27896 +f 27866 27867 27897 +f 27867 27898 27897 +f 27867 27868 27899 +f 27867 27899 27898 +f 27868 27869 27899 +f 27869 27900 27899 +f 27869 27870 27901 +f 27869 27901 27900 +f 27870 27871 27901 +f 27871 27902 27901 +f 27871 27872 27903 +f 27871 27903 27902 +f 27872 27873 27903 +f 27873 27904 27903 +f 27873 27874 27905 +f 27873 27905 27904 +f 27874 1807 27905 +f 1807 1936 27905 +f 27875 27876 27907 +f 27875 27907 27906 +f 27876 27877 27907 +f 27877 27908 27907 +f 27877 27878 27909 +f 27877 27909 27908 +f 27878 27879 27909 +f 27879 27910 27909 +f 27879 27880 27911 +f 27879 27911 27910 +f 27880 27881 27911 +f 27881 27912 27911 +f 27881 27882 27913 +f 27881 27913 27912 +f 27882 27883 27913 +f 27883 27914 27913 +f 27883 27884 27915 +f 27883 27915 27914 +f 27884 27885 27915 +f 27885 27916 27915 +f 27885 27886 27917 +f 27885 27917 27916 +f 27886 27887 27917 +f 27887 27918 27917 +f 27887 27888 27919 +f 27887 27919 27918 +f 27888 27889 27919 +f 27889 27920 27919 +f 27889 27890 27921 +f 27889 27921 27920 +f 27890 27891 27921 +f 27891 27922 27921 +f 27891 27892 27923 +f 27891 27923 27922 +f 27892 27893 27923 +f 27893 27924 27923 +f 27893 27894 27925 +f 27893 27925 27924 +f 27894 27895 27925 +f 27895 27926 27925 +f 27895 27896 27927 +f 27895 27927 27926 +f 27896 27897 27927 +f 27897 27928 27927 +f 27897 27898 27929 +f 27897 27929 27928 +f 27898 27899 27929 +f 27899 27930 27929 +f 27899 27900 27931 +f 27899 27931 27930 +f 27900 27901 27931 +f 27901 27932 27931 +f 27901 27902 27933 +f 27901 27933 27932 +f 27902 27903 27933 +f 27903 27934 27933 +f 27903 27904 27935 +f 27903 27935 27934 +f 27904 27905 27935 +f 27905 27936 27935 +f 27905 1936 2065 +f 27905 2065 27936 +f 27906 27907 27937 +f 27907 27938 27937 +f 27907 27908 27939 +f 27907 27939 27938 +f 27908 27909 27939 +f 27909 27940 27939 +f 27909 27910 27941 +f 27909 27941 27940 +f 27910 27911 27941 +f 27911 27942 27941 +f 27911 27912 27943 +f 27911 27943 27942 +f 27912 27913 27943 +f 27913 27944 27943 +f 27913 27914 27945 +f 27913 27945 27944 +f 27914 27915 27945 +f 27915 27946 27945 +f 27915 27916 27947 +f 27915 27947 27946 +f 27916 27917 27947 +f 27917 27948 27947 +f 27917 27918 27949 +f 27917 27949 27948 +f 27918 27919 27949 +f 27919 27950 27949 +f 27919 27920 27951 +f 27919 27951 27950 +f 27920 27921 27951 +f 27921 27952 27951 +f 27921 27922 27953 +f 27921 27953 27952 +f 27922 27923 27953 +f 27923 27954 27953 +f 27923 27924 27955 +f 27923 27955 27954 +f 27924 27925 27955 +f 27925 27956 27955 +f 27925 27926 27957 +f 27925 27957 27956 +f 27926 27927 27957 +f 27927 27958 27957 +f 27927 27928 27959 +f 27927 27959 27958 +f 27928 27929 27959 +f 27929 27960 27959 +f 27929 27930 27961 +f 27929 27961 27960 +f 27930 27931 27961 +f 27931 27962 27961 +f 27931 27932 27963 +f 27931 27963 27962 +f 27932 27933 27963 +f 27933 27964 27963 +f 27933 27934 27965 +f 27933 27965 27964 +f 27934 27935 27965 +f 27935 27966 27965 +f 27935 27936 27967 +f 27935 27967 27966 +f 27936 2065 27967 +f 2065 2194 27967 +f 27937 27938 27969 +f 27937 27969 27968 +f 27938 27939 27969 +f 27939 27970 27969 +f 27939 27940 27971 +f 27939 27971 27970 +f 27940 27941 27971 +f 27941 27972 27971 +f 27941 27942 27973 +f 27941 27973 27972 +f 27942 27943 27973 +f 27943 27974 27973 +f 27943 27944 27975 +f 27943 27975 27974 +f 27944 27945 27975 +f 27945 27976 27975 +f 27945 27946 27977 +f 27945 27977 27976 +f 27946 27947 27977 +f 27947 27978 27977 +f 27947 27948 27979 +f 27947 27979 27978 +f 27948 27949 27979 +f 27949 27980 27979 +f 27949 27950 27981 +f 27949 27981 27980 +f 27950 27951 27981 +f 27951 27982 27981 +f 27951 27952 27983 +f 27951 27983 27982 +f 27952 27953 27983 +f 27953 27984 27983 +f 27953 27954 27985 +f 27953 27985 27984 +f 27954 27955 27985 +f 27955 27986 27985 +f 27955 27956 27987 +f 27955 27987 27986 +f 27956 27957 27987 +f 27957 27988 27987 +f 27957 27958 27989 +f 27957 27989 27988 +f 27958 27959 27989 +f 27959 27990 27989 +f 27959 27960 27991 +f 27959 27991 27990 +f 27960 27961 27991 +f 27961 27992 27991 +f 27961 27962 27993 +f 27961 27993 27992 +f 27962 27963 27993 +f 27963 27994 27993 +f 27963 27964 27995 +f 27963 27995 27994 +f 27964 27965 27995 +f 27965 27996 27995 +f 27965 27966 27997 +f 27965 27997 27996 +f 27966 27967 27997 +f 27967 27998 27997 +f 27967 2194 2323 +f 27967 2323 27998 +f 27968 27969 27999 +f 27969 28000 27999 +f 27969 27970 28001 +f 27969 28001 28000 +f 27970 27971 28001 +f 27971 28002 28001 +f 27971 27972 28003 +f 27971 28003 28002 +f 27972 27973 28003 +f 27973 28004 28003 +f 27973 27974 28005 +f 27973 28005 28004 +f 27974 27975 28005 +f 27975 28006 28005 +f 27975 27976 28007 +f 27975 28007 28006 +f 27976 27977 28007 +f 27977 28008 28007 +f 27977 27978 28009 +f 27977 28009 28008 +f 27978 27979 28009 +f 27979 28010 28009 +f 27979 27980 28011 +f 27979 28011 28010 +f 27980 27981 28011 +f 27981 28012 28011 +f 27981 27982 28013 +f 27981 28013 28012 +f 27982 27983 28013 +f 27983 28014 28013 +f 27983 27984 28015 +f 27983 28015 28014 +f 27984 27985 28015 +f 27985 28016 28015 +f 27985 27986 28017 +f 27985 28017 28016 +f 27986 27987 28017 +f 27987 28018 28017 +f 27987 27988 28019 +f 27987 28019 28018 +f 27988 27989 28019 +f 27989 28020 28019 +f 27989 27990 28021 +f 27989 28021 28020 +f 27990 27991 28021 +f 27991 28022 28021 +f 27991 27992 28023 +f 27991 28023 28022 +f 27992 27993 28023 +f 27993 28024 28023 +f 27993 27994 28025 +f 27993 28025 28024 +f 27994 27995 28025 +f 27995 28026 28025 +f 27995 27996 28027 +f 27995 28027 28026 +f 27996 27997 28027 +f 27997 28028 28027 +f 27997 27998 28029 +f 27997 28029 28028 +f 27998 2323 28029 +f 2323 2452 28029 +f 27999 28000 28031 +f 27999 28031 28030 +f 28000 28001 28031 +f 28001 28032 28031 +f 28001 28002 28033 +f 28001 28033 28032 +f 28002 28003 28033 +f 28003 28034 28033 +f 28003 28004 28035 +f 28003 28035 28034 +f 28004 28005 28035 +f 28005 28036 28035 +f 28005 28006 28037 +f 28005 28037 28036 +f 28006 28007 28037 +f 28007 28038 28037 +f 28007 28008 28039 +f 28007 28039 28038 +f 28008 28009 28039 +f 28009 28040 28039 +f 28009 28010 28041 +f 28009 28041 28040 +f 28010 28011 28041 +f 28011 28042 28041 +f 28011 28012 28043 +f 28011 28043 28042 +f 28012 28013 28043 +f 28013 28044 28043 +f 28013 28014 28045 +f 28013 28045 28044 +f 28014 28015 28045 +f 28015 28046 28045 +f 28015 28016 28047 +f 28015 28047 28046 +f 28016 28017 28047 +f 28017 28048 28047 +f 28017 28018 28049 +f 28017 28049 28048 +f 28018 28019 28049 +f 28019 28050 28049 +f 28019 28020 28051 +f 28019 28051 28050 +f 28020 28021 28051 +f 28021 28052 28051 +f 28021 28022 28053 +f 28021 28053 28052 +f 28022 28023 28053 +f 28023 28054 28053 +f 28023 28024 28055 +f 28023 28055 28054 +f 28024 28025 28055 +f 28025 28056 28055 +f 28025 28026 28057 +f 28025 28057 28056 +f 28026 28027 28057 +f 28027 28058 28057 +f 28027 28028 28059 +f 28027 28059 28058 +f 28028 28029 28059 +f 28029 28060 28059 +f 28029 2452 2581 +f 28029 2581 28060 +f 28030 28031 28061 +f 28031 28062 28061 +f 28031 28032 28063 +f 28031 28063 28062 +f 28032 28033 28063 +f 28033 28064 28063 +f 28033 28034 28065 +f 28033 28065 28064 +f 28034 28035 28065 +f 28035 28066 28065 +f 28035 28036 28067 +f 28035 28067 28066 +f 28036 28037 28067 +f 28037 28068 28067 +f 28037 28038 28069 +f 28037 28069 28068 +f 28038 28039 28069 +f 28039 28070 28069 +f 28039 28040 28071 +f 28039 28071 28070 +f 28040 28041 28071 +f 28041 28072 28071 +f 28041 28042 28073 +f 28041 28073 28072 +f 28042 28043 28073 +f 28043 28074 28073 +f 28043 28044 28075 +f 28043 28075 28074 +f 28044 28045 28075 +f 28045 28076 28075 +f 28045 28046 28077 +f 28045 28077 28076 +f 28046 28047 28077 +f 28047 28078 28077 +f 28047 28048 28079 +f 28047 28079 28078 +f 28048 28049 28079 +f 28049 28080 28079 +f 28049 28050 28081 +f 28049 28081 28080 +f 28050 28051 28081 +f 28051 28082 28081 +f 28051 28052 28083 +f 28051 28083 28082 +f 28052 28053 28083 +f 28053 28084 28083 +f 28053 28054 28085 +f 28053 28085 28084 +f 28054 28055 28085 +f 28055 28086 28085 +f 28055 28056 28087 +f 28055 28087 28086 +f 28056 28057 28087 +f 28057 28088 28087 +f 28057 28058 28089 +f 28057 28089 28088 +f 28058 28059 28089 +f 28059 28090 28089 +f 28059 28060 28091 +f 28059 28091 28090 +f 28060 2581 28091 +f 2581 2710 28091 +f 28061 28062 28093 +f 28061 28093 28092 +f 28062 28063 28093 +f 28063 28094 28093 +f 28063 28064 28095 +f 28063 28095 28094 +f 28064 28065 28095 +f 28065 28096 28095 +f 28065 28066 28097 +f 28065 28097 28096 +f 28066 28067 28097 +f 28067 28098 28097 +f 28067 28068 28099 +f 28067 28099 28098 +f 28068 28069 28099 +f 28069 28100 28099 +f 28069 28070 28101 +f 28069 28101 28100 +f 28070 28071 28101 +f 28071 28102 28101 +f 28071 28072 28103 +f 28071 28103 28102 +f 28072 28073 28103 +f 28073 28104 28103 +f 28073 28074 28105 +f 28073 28105 28104 +f 28074 28075 28105 +f 28075 28106 28105 +f 28075 28076 28107 +f 28075 28107 28106 +f 28076 28077 28107 +f 28077 28108 28107 +f 28077 28078 28109 +f 28077 28109 28108 +f 28078 28079 28109 +f 28079 28110 28109 +f 28079 28080 28111 +f 28079 28111 28110 +f 28080 28081 28111 +f 28081 28112 28111 +f 28081 28082 28113 +f 28081 28113 28112 +f 28082 28083 28113 +f 28083 28114 28113 +f 28083 28084 28115 +f 28083 28115 28114 +f 28084 28085 28115 +f 28085 28116 28115 +f 28085 28086 28117 +f 28085 28117 28116 +f 28086 28087 28117 +f 28087 28118 28117 +f 28087 28088 28119 +f 28087 28119 28118 +f 28088 28089 28119 +f 28089 28120 28119 +f 28089 28090 28121 +f 28089 28121 28120 +f 28090 28091 28121 +f 28091 28122 28121 +f 28091 2710 2839 +f 28091 2839 28122 +f 28092 28093 28123 +f 28093 28124 28123 +f 28093 28094 28125 +f 28093 28125 28124 +f 28094 28095 28125 +f 28095 28126 28125 +f 28095 28096 28127 +f 28095 28127 28126 +f 28096 28097 28127 +f 28097 28128 28127 +f 28097 28098 28129 +f 28097 28129 28128 +f 28098 28099 28129 +f 28099 28130 28129 +f 28099 28100 28131 +f 28099 28131 28130 +f 28100 28101 28131 +f 28101 28132 28131 +f 28101 28102 28133 +f 28101 28133 28132 +f 28102 28103 28133 +f 28103 28134 28133 +f 28103 28104 28135 +f 28103 28135 28134 +f 28104 28105 28135 +f 28105 28136 28135 +f 28105 28106 28137 +f 28105 28137 28136 +f 28106 28107 28137 +f 28107 28138 28137 +f 28107 28108 28139 +f 28107 28139 28138 +f 28108 28109 28139 +f 28109 28140 28139 +f 28109 28110 28141 +f 28109 28141 28140 +f 28110 28111 28141 +f 28111 28142 28141 +f 28111 28112 28143 +f 28111 28143 28142 +f 28112 28113 28143 +f 28113 28144 28143 +f 28113 28114 28145 +f 28113 28145 28144 +f 28114 28115 28145 +f 28115 28146 28145 +f 28115 28116 28147 +f 28115 28147 28146 +f 28116 28117 28147 +f 28117 28148 28147 +f 28117 28118 28149 +f 28117 28149 28148 +f 28118 28119 28149 +f 28119 28150 28149 +f 28119 28120 28151 +f 28119 28151 28150 +f 28120 28121 28151 +f 28121 28152 28151 +f 28121 28122 28153 +f 28121 28153 28152 +f 28122 2839 28153 +f 2839 2968 28153 +f 28123 28124 28155 +f 28123 28155 28154 +f 28124 28125 28155 +f 28125 28156 28155 +f 28125 28126 28157 +f 28125 28157 28156 +f 28126 28127 28157 +f 28127 28158 28157 +f 28127 28128 28159 +f 28127 28159 28158 +f 28128 28129 28159 +f 28129 28160 28159 +f 28129 28130 28161 +f 28129 28161 28160 +f 28130 28131 28161 +f 28131 28162 28161 +f 28131 28132 28163 +f 28131 28163 28162 +f 28132 28133 28163 +f 28133 28164 28163 +f 28133 28134 28165 +f 28133 28165 28164 +f 28134 28135 28165 +f 28135 28166 28165 +f 28135 28136 28167 +f 28135 28167 28166 +f 28136 28137 28167 +f 28137 28168 28167 +f 28137 28138 28169 +f 28137 28169 28168 +f 28138 28139 28169 +f 28139 28170 28169 +f 28139 28140 28171 +f 28139 28171 28170 +f 28140 28141 28171 +f 28141 28172 28171 +f 28141 28142 28173 +f 28141 28173 28172 +f 28142 28143 28173 +f 28143 28174 28173 +f 28143 28144 28175 +f 28143 28175 28174 +f 28144 28145 28175 +f 28145 28176 28175 +f 28145 28146 28177 +f 28145 28177 28176 +f 28146 28147 28177 +f 28147 28178 28177 +f 28147 28148 28179 +f 28147 28179 28178 +f 28148 28149 28179 +f 28149 28180 28179 +f 28149 28150 28181 +f 28149 28181 28180 +f 28150 28151 28181 +f 28151 28182 28181 +f 28151 28152 28183 +f 28151 28183 28182 +f 28152 28153 28183 +f 28153 28184 28183 +f 28153 2968 3097 +f 28153 3097 28184 +f 28154 28155 28185 +f 28155 28186 28185 +f 28155 28156 28187 +f 28155 28187 28186 +f 28156 28157 28187 +f 28157 28188 28187 +f 28157 28158 28189 +f 28157 28189 28188 +f 28158 28159 28189 +f 28159 28190 28189 +f 28159 28160 28191 +f 28159 28191 28190 +f 28160 28161 28191 +f 28161 28192 28191 +f 28161 28162 28193 +f 28161 28193 28192 +f 28162 28163 28193 +f 28163 28194 28193 +f 28163 28164 28195 +f 28163 28195 28194 +f 28164 28165 28195 +f 28165 28196 28195 +f 28165 28166 28197 +f 28165 28197 28196 +f 28166 28167 28197 +f 28167 28198 28197 +f 28167 28168 28199 +f 28167 28199 28198 +f 28168 28169 28199 +f 28169 28200 28199 +f 28169 28170 28201 +f 28169 28201 28200 +f 28170 28171 28201 +f 28171 28202 28201 +f 28171 28172 28203 +f 28171 28203 28202 +f 28172 28173 28203 +f 28173 28204 28203 +f 28173 28174 28205 +f 28173 28205 28204 +f 28174 28175 28205 +f 28175 28206 28205 +f 28175 28176 28207 +f 28175 28207 28206 +f 28176 28177 28207 +f 28177 28208 28207 +f 28177 28178 28209 +f 28177 28209 28208 +f 28178 28179 28209 +f 28179 28210 28209 +f 28179 28180 28211 +f 28179 28211 28210 +f 28180 28181 28211 +f 28181 28212 28211 +f 28181 28182 28213 +f 28181 28213 28212 +f 28182 28183 28213 +f 28183 28214 28213 +f 28183 28184 28215 +f 28183 28215 28214 +f 28184 3097 28215 +f 3097 3226 28215 +f 28185 28186 28216 +f 28186 28187 28216 +f 28187 28217 28216 +f 28187 28188 28218 +f 28187 28218 28217 +f 28188 28189 28218 +f 28189 28219 28218 +f 28189 28190 28220 +f 28189 28220 28219 +f 28190 28191 28220 +f 28191 28221 28220 +f 28191 28192 28222 +f 28191 28222 28221 +f 28192 28193 28222 +f 28193 28223 28222 +f 28193 28194 28224 +f 28193 28224 28223 +f 28194 28195 28224 +f 28195 28225 28224 +f 28195 28196 28226 +f 28195 28226 28225 +f 28196 28197 28226 +f 28197 28227 28226 +f 28197 28198 28228 +f 28197 28228 28227 +f 28198 28199 28228 +f 28199 28229 28228 +f 28199 28200 28230 +f 28199 28230 28229 +f 28200 28201 28230 +f 28201 28231 28230 +f 28201 28202 28232 +f 28201 28232 28231 +f 28202 28203 28232 +f 28203 28233 28232 +f 28203 28204 28234 +f 28203 28234 28233 +f 28204 28205 28234 +f 28205 28235 28234 +f 28205 28206 28236 +f 28205 28236 28235 +f 28206 28207 28236 +f 28207 28237 28236 +f 28207 28208 28238 +f 28207 28238 28237 +f 28208 28209 28238 +f 28209 28239 28238 +f 28209 28210 28240 +f 28209 28240 28239 +f 28210 28211 28240 +f 28211 28241 28240 +f 28211 28212 28242 +f 28211 28242 28241 +f 28212 28213 28242 +f 28213 28243 28242 +f 28213 28214 28244 +f 28213 28244 28243 +f 28214 28215 28244 +f 28215 28245 28244 +f 28215 3226 3355 +f 28215 3355 28245 +f 28216 28217 28247 +f 28216 28247 28246 +f 28217 28218 28247 +f 28218 28248 28247 +f 28218 28219 28249 +f 28218 28249 28248 +f 28219 28220 28249 +f 28220 28250 28249 +f 28220 28221 28251 +f 28220 28251 28250 +f 28221 28222 28251 +f 28222 28252 28251 +f 28222 28223 28253 +f 28222 28253 28252 +f 28223 28224 28253 +f 28224 28254 28253 +f 28224 28225 28255 +f 28224 28255 28254 +f 28225 28226 28255 +f 28226 28256 28255 +f 28226 28227 28257 +f 28226 28257 28256 +f 28227 28228 28257 +f 28228 28258 28257 +f 28228 28229 28259 +f 28228 28259 28258 +f 28229 28230 28259 +f 28230 28260 28259 +f 28230 28231 28261 +f 28230 28261 28260 +f 28231 28232 28261 +f 28232 28262 28261 +f 28232 28233 28263 +f 28232 28263 28262 +f 28233 28234 28263 +f 28234 28264 28263 +f 28234 28235 28265 +f 28234 28265 28264 +f 28235 28236 28265 +f 28236 28266 28265 +f 28236 28237 28267 +f 28236 28267 28266 +f 28237 28238 28267 +f 28238 28268 28267 +f 28238 28239 28269 +f 28238 28269 28268 +f 28239 28240 28269 +f 28240 28270 28269 +f 28240 28241 28271 +f 28240 28271 28270 +f 28241 28242 28271 +f 28242 28272 28271 +f 28242 28243 28273 +f 28242 28273 28272 +f 28243 28244 28273 +f 28244 28274 28273 +f 28244 28245 28275 +f 28244 28275 28274 +f 28245 3355 28275 +f 3355 3484 28275 +f 28246 28247 28276 +f 28247 28277 28276 +f 28247 28248 28278 +f 28247 28278 28277 +f 28248 28249 28278 +f 28249 28279 28278 +f 28249 28250 28280 +f 28249 28280 28279 +f 28250 28251 28280 +f 28251 28281 28280 +f 28251 28252 28282 +f 28251 28282 28281 +f 28252 28253 28282 +f 28253 28283 28282 +f 28253 28254 28284 +f 28253 28284 28283 +f 28254 28255 28284 +f 28255 28285 28284 +f 28255 28256 28286 +f 28255 28286 28285 +f 28256 28257 28286 +f 28257 28287 28286 +f 28257 28258 28288 +f 28257 28288 28287 +f 28258 28259 28288 +f 28259 28289 28288 +f 28259 28260 28290 +f 28259 28290 28289 +f 28260 28261 28290 +f 28261 28291 28290 +f 28261 28262 28292 +f 28261 28292 28291 +f 28262 28263 28292 +f 28263 28293 28292 +f 28263 28264 28294 +f 28263 28294 28293 +f 28264 28265 28294 +f 28265 28295 28294 +f 28265 28266 28296 +f 28265 28296 28295 +f 28266 28267 28296 +f 28267 28297 28296 +f 28267 28268 28298 +f 28267 28298 28297 +f 28268 28269 28298 +f 28269 28299 28298 +f 28269 28270 28300 +f 28269 28300 28299 +f 28270 28271 28300 +f 28271 28301 28300 +f 28271 28272 28302 +f 28271 28302 28301 +f 28272 28273 28302 +f 28273 28303 28302 +f 28273 28274 28304 +f 28273 28304 28303 +f 28274 28275 28304 +f 28275 28305 28304 +f 28275 3484 3613 +f 28275 3613 28305 +f 28276 28277 28306 +f 28277 28278 28306 +f 28278 28307 28306 +f 28278 28279 28308 +f 28278 28308 28307 +f 28279 28280 28308 +f 28280 28309 28308 +f 28280 28281 28310 +f 28280 28310 28309 +f 28281 28282 28310 +f 28282 28311 28310 +f 28282 28283 28312 +f 28282 28312 28311 +f 28283 28284 28312 +f 28284 28313 28312 +f 28284 28285 28314 +f 28284 28314 28313 +f 28285 28286 28314 +f 28286 28315 28314 +f 28286 28287 28316 +f 28286 28316 28315 +f 28287 28288 28316 +f 28288 28317 28316 +f 28288 28289 28318 +f 28288 28318 28317 +f 28289 28290 28318 +f 28290 28319 28318 +f 28290 28291 28320 +f 28290 28320 28319 +f 28291 28292 28320 +f 28292 28321 28320 +f 28292 28293 28322 +f 28292 28322 28321 +f 28293 28294 28322 +f 28294 28323 28322 +f 28294 28295 28324 +f 28294 28324 28323 +f 28295 28296 28324 +f 28296 28325 28324 +f 28296 28297 28326 +f 28296 28326 28325 +f 28297 28298 28326 +f 28298 28327 28326 +f 28298 28299 28328 +f 28298 28328 28327 +f 28299 28300 28328 +f 28300 28329 28328 +f 28300 28301 28330 +f 28300 28330 28329 +f 28301 28302 28330 +f 28302 28331 28330 +f 28302 28303 28332 +f 28302 28332 28331 +f 28303 28304 28332 +f 28304 28333 28332 +f 28304 28305 28334 +f 28304 28334 28333 +f 28305 3613 28334 +f 3613 3742 28334 +f 28306 28307 28335 +f 28307 28308 28335 +f 28308 28336 28335 +f 28308 28309 28337 +f 28308 28337 28336 +f 28309 28310 28337 +f 28310 28338 28337 +f 28310 28311 28339 +f 28310 28339 28338 +f 28311 28312 28339 +f 28312 28340 28339 +f 28312 28313 28341 +f 28312 28341 28340 +f 28313 28314 28341 +f 28314 28342 28341 +f 28314 28315 28343 +f 28314 28343 28342 +f 28315 28316 28343 +f 28316 28344 28343 +f 28316 28317 28345 +f 28316 28345 28344 +f 28317 28318 28345 +f 28318 28346 28345 +f 28318 28319 28347 +f 28318 28347 28346 +f 28319 28320 28347 +f 28320 28348 28347 +f 28320 28321 28349 +f 28320 28349 28348 +f 28321 28322 28349 +f 28322 28350 28349 +f 28322 28323 28351 +f 28322 28351 28350 +f 28323 28324 28351 +f 28324 28352 28351 +f 28324 28325 28353 +f 28324 28353 28352 +f 28325 28326 28353 +f 28326 28354 28353 +f 28326 28327 28355 +f 28326 28355 28354 +f 28327 28328 28355 +f 28328 28356 28355 +f 28328 28329 28357 +f 28328 28357 28356 +f 28329 28330 28357 +f 28330 28358 28357 +f 28330 28331 28359 +f 28330 28359 28358 +f 28331 28332 28359 +f 28332 28360 28359 +f 28332 28333 28361 +f 28332 28361 28360 +f 28333 28334 28361 +f 28334 28362 28361 +f 28334 3742 3871 +f 28334 3871 28362 +f 28335 28336 28364 +f 28335 28364 28363 +f 28336 28337 28364 +f 28337 28365 28364 +f 28337 28338 28366 +f 28337 28366 28365 +f 28338 28339 28366 +f 28339 28367 28366 +f 28339 28340 28368 +f 28339 28368 28367 +f 28340 28341 28368 +f 28341 28369 28368 +f 28341 28342 28370 +f 28341 28370 28369 +f 28342 28343 28370 +f 28343 28371 28370 +f 28343 28344 28372 +f 28343 28372 28371 +f 28344 28345 28372 +f 28345 28373 28372 +f 28345 28346 28374 +f 28345 28374 28373 +f 28346 28347 28374 +f 28347 28375 28374 +f 28347 28348 28376 +f 28347 28376 28375 +f 28348 28349 28376 +f 28349 28377 28376 +f 28349 28350 28378 +f 28349 28378 28377 +f 28350 28351 28378 +f 28351 28379 28378 +f 28351 28352 28380 +f 28351 28380 28379 +f 28352 28353 28380 +f 28353 28381 28380 +f 28353 28354 28382 +f 28353 28382 28381 +f 28354 28355 28382 +f 28355 28383 28382 +f 28355 28356 28384 +f 28355 28384 28383 +f 28356 28357 28384 +f 28357 28385 28384 +f 28357 28358 28386 +f 28357 28386 28385 +f 28358 28359 28386 +f 28359 28387 28386 +f 28359 28360 28388 +f 28359 28388 28387 +f 28360 28361 28388 +f 28361 28389 28388 +f 28361 28362 28390 +f 28361 28390 28389 +f 28362 3871 28390 +f 3871 4000 28390 +f 28363 28364 28391 +f 28364 28392 28391 +f 28364 28365 28393 +f 28364 28393 28392 +f 28365 28366 28393 +f 28366 28394 28393 +f 28366 28367 28395 +f 28366 28395 28394 +f 28367 28368 28395 +f 28368 28396 28395 +f 28368 28369 28397 +f 28368 28397 28396 +f 28369 28370 28397 +f 28370 28398 28397 +f 28370 28371 28399 +f 28370 28399 28398 +f 28371 28372 28399 +f 28372 28400 28399 +f 28372 28373 28401 +f 28372 28401 28400 +f 28373 28374 28401 +f 28374 28402 28401 +f 28374 28375 28403 +f 28374 28403 28402 +f 28375 28376 28403 +f 28376 28404 28403 +f 28376 28377 28405 +f 28376 28405 28404 +f 28377 28378 28405 +f 28378 28406 28405 +f 28378 28379 28407 +f 28378 28407 28406 +f 28379 28380 28407 +f 28380 28408 28407 +f 28380 28381 28409 +f 28380 28409 28408 +f 28381 28382 28409 +f 28382 28410 28409 +f 28382 28383 28411 +f 28382 28411 28410 +f 28383 28384 28411 +f 28384 28412 28411 +f 28384 28385 28413 +f 28384 28413 28412 +f 28385 28386 28413 +f 28386 28414 28413 +f 28386 28387 28415 +f 28386 28415 28414 +f 28387 28388 28415 +f 28388 28416 28415 +f 28388 28389 28417 +f 28388 28417 28416 +f 28389 28390 28417 +f 28390 28418 28417 +f 28390 4000 4129 +f 28390 4129 28418 +f 28391 28392 28420 +f 28391 28420 28419 +f 28392 28393 28420 +f 28393 28421 28420 +f 28393 28394 28422 +f 28393 28422 28421 +f 28394 28395 28422 +f 28395 28423 28422 +f 28395 28396 28424 +f 28395 28424 28423 +f 28396 28397 28424 +f 28397 28425 28424 +f 28397 28398 28426 +f 28397 28426 28425 +f 28398 28399 28426 +f 28399 28427 28426 +f 28399 28400 28428 +f 28399 28428 28427 +f 28400 28401 28428 +f 28401 28429 28428 +f 28401 28402 28430 +f 28401 28430 28429 +f 28402 28403 28430 +f 28403 28431 28430 +f 28403 28404 28432 +f 28403 28432 28431 +f 28404 28405 28432 +f 28405 28433 28432 +f 28405 28406 28434 +f 28405 28434 28433 +f 28406 28407 28434 +f 28407 28435 28434 +f 28407 28408 28436 +f 28407 28436 28435 +f 28408 28409 28436 +f 28409 28437 28436 +f 28409 28410 28438 +f 28409 28438 28437 +f 28410 28411 28438 +f 28411 28439 28438 +f 28411 28412 28440 +f 28411 28440 28439 +f 28412 28413 28440 +f 28413 28441 28440 +f 28413 28414 28442 +f 28413 28442 28441 +f 28414 28415 28442 +f 28415 28443 28442 +f 28415 28416 28444 +f 28415 28444 28443 +f 28416 28417 28444 +f 28417 28445 28444 +f 28417 28418 28446 +f 28417 28446 28445 +f 28418 4129 28446 +f 4129 4258 28446 +f 28419 28420 28447 +f 28420 28448 28447 +f 28420 28421 28449 +f 28420 28449 28448 +f 28421 28422 28449 +f 28422 28450 28449 +f 28422 28423 28451 +f 28422 28451 28450 +f 28423 28424 28451 +f 28424 28452 28451 +f 28424 28425 28453 +f 28424 28453 28452 +f 28425 28426 28453 +f 28426 28454 28453 +f 28426 28427 28455 +f 28426 28455 28454 +f 28427 28428 28455 +f 28428 28456 28455 +f 28428 28429 28457 +f 28428 28457 28456 +f 28429 28430 28457 +f 28430 28458 28457 +f 28430 28431 28459 +f 28430 28459 28458 +f 28431 28432 28459 +f 28432 28460 28459 +f 28432 28433 28461 +f 28432 28461 28460 +f 28433 28434 28461 +f 28434 28462 28461 +f 28434 28435 28463 +f 28434 28463 28462 +f 28435 28436 28463 +f 28436 28464 28463 +f 28436 28437 28465 +f 28436 28465 28464 +f 28437 28438 28465 +f 28438 28466 28465 +f 28438 28439 28467 +f 28438 28467 28466 +f 28439 28440 28467 +f 28440 28468 28467 +f 28440 28441 28469 +f 28440 28469 28468 +f 28441 28442 28469 +f 28442 28470 28469 +f 28442 28443 28471 +f 28442 28471 28470 +f 28443 28444 28471 +f 28444 28472 28471 +f 28444 28445 28473 +f 28444 28473 28472 +f 28445 28446 28473 +f 28446 28474 28473 +f 28446 4258 4387 +f 28446 4387 28474 +f 28447 28448 28476 +f 28447 28476 28475 +f 28448 28449 28476 +f 28449 28477 28476 +f 28449 28450 28478 +f 28449 28478 28477 +f 28450 28451 28478 +f 28451 28479 28478 +f 28451 28452 28480 +f 28451 28480 28479 +f 28452 28453 28480 +f 28453 28481 28480 +f 28453 28454 28482 +f 28453 28482 28481 +f 28454 28455 28482 +f 28455 28483 28482 +f 28455 28456 28484 +f 28455 28484 28483 +f 28456 28457 28484 +f 28457 28485 28484 +f 28457 28458 28486 +f 28457 28486 28485 +f 28458 28459 28486 +f 28459 28487 28486 +f 28459 28460 28488 +f 28459 28488 28487 +f 28460 28461 28488 +f 28461 28489 28488 +f 28461 28462 28490 +f 28461 28490 28489 +f 28462 28463 28490 +f 28463 28491 28490 +f 28463 28464 28492 +f 28463 28492 28491 +f 28464 28465 28492 +f 28465 28493 28492 +f 28465 28466 28494 +f 28465 28494 28493 +f 28466 28467 28494 +f 28467 28495 28494 +f 28467 28468 28496 +f 28467 28496 28495 +f 28468 28469 28496 +f 28469 28497 28496 +f 28469 28470 28498 +f 28469 28498 28497 +f 28470 28471 28498 +f 28471 28499 28498 +f 28471 28472 28500 +f 28471 28500 28499 +f 28472 28473 28500 +f 28473 28501 28500 +f 28473 28474 28502 +f 28473 28502 28501 +f 28474 4387 28502 +f 4387 4516 28502 +f 28475 28476 28503 +f 28476 28504 28503 +f 28476 28477 28505 +f 28476 28505 28504 +f 28477 28478 28505 +f 28478 28506 28505 +f 28478 28479 28507 +f 28478 28507 28506 +f 28479 28480 28507 +f 28480 28508 28507 +f 28480 28481 28509 +f 28480 28509 28508 +f 28481 28482 28509 +f 28482 28510 28509 +f 28482 28483 28511 +f 28482 28511 28510 +f 28483 28484 28511 +f 28484 28512 28511 +f 28484 28485 28513 +f 28484 28513 28512 +f 28485 28486 28513 +f 28486 28514 28513 +f 28486 28487 28515 +f 28486 28515 28514 +f 28487 28488 28515 +f 28488 28516 28515 +f 28488 28489 28517 +f 28488 28517 28516 +f 28489 28490 28517 +f 28490 28518 28517 +f 28490 28491 28519 +f 28490 28519 28518 +f 28491 28492 28519 +f 28492 28520 28519 +f 28492 28493 28521 +f 28492 28521 28520 +f 28493 28494 28521 +f 28494 28522 28521 +f 28494 28495 28523 +f 28494 28523 28522 +f 28495 28496 28523 +f 28496 28524 28523 +f 28496 28497 28525 +f 28496 28525 28524 +f 28497 28498 28525 +f 28498 28526 28525 +f 28498 28499 28527 +f 28498 28527 28526 +f 28499 28500 28527 +f 28500 28528 28527 +f 28500 28501 28529 +f 28500 28529 28528 +f 28501 28502 28529 +f 28502 28530 28529 +f 28502 4516 4645 +f 28502 4645 28530 +f 28503 28504 28532 +f 28503 28532 28531 +f 28504 28505 28532 +f 28505 28533 28532 +f 28505 28506 28534 +f 28505 28534 28533 +f 28506 28507 28534 +f 28507 28535 28534 +f 28507 28508 28536 +f 28507 28536 28535 +f 28508 28509 28536 +f 28509 28537 28536 +f 28509 28510 28538 +f 28509 28538 28537 +f 28510 28511 28538 +f 28511 28539 28538 +f 28511 28512 28540 +f 28511 28540 28539 +f 28512 28513 28540 +f 28513 28541 28540 +f 28513 28514 28542 +f 28513 28542 28541 +f 28514 28515 28542 +f 28515 28543 28542 +f 28515 28516 28544 +f 28515 28544 28543 +f 28516 28517 28544 +f 28517 28545 28544 +f 28517 28518 28546 +f 28517 28546 28545 +f 28518 28519 28546 +f 28519 28547 28546 +f 28519 28520 28548 +f 28519 28548 28547 +f 28520 28521 28548 +f 28521 28549 28548 +f 28521 28522 28550 +f 28521 28550 28549 +f 28522 28523 28550 +f 28523 28551 28550 +f 28523 28524 28552 +f 28523 28552 28551 +f 28524 28525 28552 +f 28525 28553 28552 +f 28525 28526 28554 +f 28525 28554 28553 +f 28526 28527 28554 +f 28527 28555 28554 +f 28527 28528 28556 +f 28527 28556 28555 +f 28528 28529 28556 +f 28529 28557 28556 +f 28529 28530 28558 +f 28529 28558 28557 +f 28530 4645 28558 +f 4645 4774 28558 +f 28531 28532 28559 +f 28532 28560 28559 +f 28532 28533 28561 +f 28532 28561 28560 +f 28533 28534 28561 +f 28534 28562 28561 +f 28534 28535 28563 +f 28534 28563 28562 +f 28535 28536 28563 +f 28536 28564 28563 +f 28536 28537 28565 +f 28536 28565 28564 +f 28537 28538 28565 +f 28538 28566 28565 +f 28538 28539 28567 +f 28538 28567 28566 +f 28539 28540 28567 +f 28540 28568 28567 +f 28540 28541 28569 +f 28540 28569 28568 +f 28541 28542 28569 +f 28542 28570 28569 +f 28542 28543 28571 +f 28542 28571 28570 +f 28543 28544 28571 +f 28544 28572 28571 +f 28544 28545 28573 +f 28544 28573 28572 +f 28545 28546 28573 +f 28546 28574 28573 +f 28546 28547 28575 +f 28546 28575 28574 +f 28547 28548 28575 +f 28548 28576 28575 +f 28548 28549 28577 +f 28548 28577 28576 +f 28549 28550 28577 +f 28550 28578 28577 +f 28550 28551 28579 +f 28550 28579 28578 +f 28551 28552 28579 +f 28552 28580 28579 +f 28552 28553 28581 +f 28552 28581 28580 +f 28553 28554 28581 +f 28554 28582 28581 +f 28554 28555 28583 +f 28554 28583 28582 +f 28555 28556 28583 +f 28556 28584 28583 +f 28556 28557 28585 +f 28556 28585 28584 +f 28557 28558 28585 +f 28558 28586 28585 +f 28558 4774 4903 +f 28558 4903 28586 +f 28559 28560 28588 +f 28559 28588 28587 +f 28560 28561 28588 +f 28561 28589 28588 +f 28561 28562 28590 +f 28561 28590 28589 +f 28562 28563 28590 +f 28563 28591 28590 +f 28563 28564 28592 +f 28563 28592 28591 +f 28564 28565 28592 +f 28565 28593 28592 +f 28565 28566 28594 +f 28565 28594 28593 +f 28566 28567 28594 +f 28567 28595 28594 +f 28567 28568 28596 +f 28567 28596 28595 +f 28568 28569 28596 +f 28569 28597 28596 +f 28569 28570 28598 +f 28569 28598 28597 +f 28570 28571 28598 +f 28571 28599 28598 +f 28571 28572 28600 +f 28571 28600 28599 +f 28572 28573 28600 +f 28573 28601 28600 +f 28573 28574 28602 +f 28573 28602 28601 +f 28574 28575 28602 +f 28575 28603 28602 +f 28575 28576 28604 +f 28575 28604 28603 +f 28576 28577 28604 +f 28577 28605 28604 +f 28577 28578 28606 +f 28577 28606 28605 +f 28578 28579 28606 +f 28579 28607 28606 +f 28579 28580 28608 +f 28579 28608 28607 +f 28580 28581 28608 +f 28581 28609 28608 +f 28581 28582 28610 +f 28581 28610 28609 +f 28582 28583 28610 +f 28583 28611 28610 +f 28583 28584 28612 +f 28583 28612 28611 +f 28584 28585 28612 +f 28585 28613 28612 +f 28585 28586 28614 +f 28585 28614 28613 +f 28586 4903 28614 +f 4903 5032 28614 +f 28587 28588 28615 +f 28588 28616 28615 +f 28588 28589 28617 +f 28588 28617 28616 +f 28589 28590 28617 +f 28590 28618 28617 +f 28590 28591 28619 +f 28590 28619 28618 +f 28591 28592 28619 +f 28592 28620 28619 +f 28592 28593 28621 +f 28592 28621 28620 +f 28593 28594 28621 +f 28594 28622 28621 +f 28594 28595 28623 +f 28594 28623 28622 +f 28595 28596 28623 +f 28596 28624 28623 +f 28596 28597 28625 +f 28596 28625 28624 +f 28597 28598 28625 +f 28598 28626 28625 +f 28598 28599 28627 +f 28598 28627 28626 +f 28599 28600 28627 +f 28600 28628 28627 +f 28600 28601 28629 +f 28600 28629 28628 +f 28601 28602 28629 +f 28602 28630 28629 +f 28602 28603 28631 +f 28602 28631 28630 +f 28603 28604 28631 +f 28604 28632 28631 +f 28604 28605 28633 +f 28604 28633 28632 +f 28605 28606 28633 +f 28606 28634 28633 +f 28606 28607 28635 +f 28606 28635 28634 +f 28607 28608 28635 +f 28608 28636 28635 +f 28608 28609 28637 +f 28608 28637 28636 +f 28609 28610 28637 +f 28610 28638 28637 +f 28610 28611 28639 +f 28610 28639 28638 +f 28611 28612 28639 +f 28612 28640 28639 +f 28612 28613 28641 +f 28612 28641 28640 +f 28613 28614 28641 +f 28614 28642 28641 +f 28614 5032 5161 +f 28614 5161 28642 +f 28615 28616 28643 +f 28616 28617 28643 +f 28617 28644 28643 +f 28617 28618 28645 +f 28617 28645 28644 +f 28618 28619 28645 +f 28619 28646 28645 +f 28619 28620 28647 +f 28619 28647 28646 +f 28620 28621 28647 +f 28621 28648 28647 +f 28621 28622 28649 +f 28621 28649 28648 +f 28622 28623 28649 +f 28623 28650 28649 +f 28623 28624 28651 +f 28623 28651 28650 +f 28624 28625 28651 +f 28625 28652 28651 +f 28625 28626 28653 +f 28625 28653 28652 +f 28626 28627 28653 +f 28627 28654 28653 +f 28627 28628 28655 +f 28627 28655 28654 +f 28628 28629 28655 +f 28629 28656 28655 +f 28629 28630 28657 +f 28629 28657 28656 +f 28630 28631 28657 +f 28631 28658 28657 +f 28631 28632 28659 +f 28631 28659 28658 +f 28632 28633 28659 +f 28633 28660 28659 +f 28633 28634 28661 +f 28633 28661 28660 +f 28634 28635 28661 +f 28635 28662 28661 +f 28635 28636 28663 +f 28635 28663 28662 +f 28636 28637 28663 +f 28637 28664 28663 +f 28637 28638 28665 +f 28637 28665 28664 +f 28638 28639 28665 +f 28639 28666 28665 +f 28639 28640 28667 +f 28639 28667 28666 +f 28640 28641 28667 +f 28641 28668 28667 +f 28641 28642 28669 +f 28641 28669 28668 +f 28642 5161 28669 +f 5161 5290 28669 +f 28643 28644 28671 +f 28643 28671 28670 +f 28644 28645 28671 +f 28645 28672 28671 +f 28645 28646 28673 +f 28645 28673 28672 +f 28646 28647 28673 +f 28647 28674 28673 +f 28647 28648 28675 +f 28647 28675 28674 +f 28648 28649 28675 +f 28649 28676 28675 +f 28649 28650 28677 +f 28649 28677 28676 +f 28650 28651 28677 +f 28651 28678 28677 +f 28651 28652 28679 +f 28651 28679 28678 +f 28652 28653 28679 +f 28653 28680 28679 +f 28653 28654 28681 +f 28653 28681 28680 +f 28654 28655 28681 +f 28655 28682 28681 +f 28655 28656 28683 +f 28655 28683 28682 +f 28656 28657 28683 +f 28657 28684 28683 +f 28657 28658 28685 +f 28657 28685 28684 +f 28658 28659 28685 +f 28659 28686 28685 +f 28659 28660 28687 +f 28659 28687 28686 +f 28660 28661 28687 +f 28661 28688 28687 +f 28661 28662 28689 +f 28661 28689 28688 +f 28662 28663 28689 +f 28663 28690 28689 +f 28663 28664 28691 +f 28663 28691 28690 +f 28664 28665 28691 +f 28665 28692 28691 +f 28665 28666 28693 +f 28665 28693 28692 +f 28666 28667 28693 +f 28667 28694 28693 +f 28667 28668 28695 +f 28667 28695 28694 +f 28668 28669 28695 +f 28669 28696 28695 +f 28669 5290 5419 +f 28669 5419 28696 +f 28670 28671 28697 +f 28671 28698 28697 +f 28671 28672 28699 +f 28671 28699 28698 +f 28672 28673 28699 +f 28673 28700 28699 +f 28673 28674 28701 +f 28673 28701 28700 +f 28674 28675 28701 +f 28675 28702 28701 +f 28675 28676 28703 +f 28675 28703 28702 +f 28676 28677 28703 +f 28677 28704 28703 +f 28677 28678 28705 +f 28677 28705 28704 +f 28678 28679 28705 +f 28679 28706 28705 +f 28679 28680 28707 +f 28679 28707 28706 +f 28680 28681 28707 +f 28681 28708 28707 +f 28681 28682 28709 +f 28681 28709 28708 +f 28682 28683 28709 +f 28683 28710 28709 +f 28683 28684 28711 +f 28683 28711 28710 +f 28684 28685 28711 +f 28685 28712 28711 +f 28685 28686 28713 +f 28685 28713 28712 +f 28686 28687 28713 +f 28687 28714 28713 +f 28687 28688 28715 +f 28687 28715 28714 +f 28688 28689 28715 +f 28689 28716 28715 +f 28689 28690 28717 +f 28689 28717 28716 +f 28690 28691 28717 +f 28691 28718 28717 +f 28691 28692 28719 +f 28691 28719 28718 +f 28692 28693 28719 +f 28693 28720 28719 +f 28693 28694 28721 +f 28693 28721 28720 +f 28694 28695 28721 +f 28695 28722 28721 +f 28695 28696 28723 +f 28695 28723 28722 +f 28696 5419 28723 +f 5419 5548 28723 +f 28697 28698 28725 +f 28697 28725 28724 +f 28698 28699 28725 +f 28699 28726 28725 +f 28699 28700 28727 +f 28699 28727 28726 +f 28700 28701 28727 +f 28701 28728 28727 +f 28701 28702 28729 +f 28701 28729 28728 +f 28702 28703 28729 +f 28703 28730 28729 +f 28703 28704 28731 +f 28703 28731 28730 +f 28704 28705 28731 +f 28705 28732 28731 +f 28705 28706 28733 +f 28705 28733 28732 +f 28706 28707 28733 +f 28707 28734 28733 +f 28707 28708 28735 +f 28707 28735 28734 +f 28708 28709 28735 +f 28709 28736 28735 +f 28709 28710 28737 +f 28709 28737 28736 +f 28710 28711 28737 +f 28711 28738 28737 +f 28711 28712 28739 +f 28711 28739 28738 +f 28712 28713 28739 +f 28713 28740 28739 +f 28713 28714 28741 +f 28713 28741 28740 +f 28714 28715 28741 +f 28715 28742 28741 +f 28715 28716 28743 +f 28715 28743 28742 +f 28716 28717 28743 +f 28717 28744 28743 +f 28717 28718 28745 +f 28717 28745 28744 +f 28718 28719 28745 +f 28719 28746 28745 +f 28719 28720 28747 +f 28719 28747 28746 +f 28720 28721 28747 +f 28721 28748 28747 +f 28721 28722 28749 +f 28721 28749 28748 +f 28722 28723 28749 +f 28723 28750 28749 +f 28723 5548 5677 +f 28723 5677 28750 +f 28724 28725 28751 +f 28725 28752 28751 +f 28725 28726 28753 +f 28725 28753 28752 +f 28726 28727 28753 +f 28727 28754 28753 +f 28727 28728 28755 +f 28727 28755 28754 +f 28728 28729 28755 +f 28729 28756 28755 +f 28729 28730 28757 +f 28729 28757 28756 +f 28730 28731 28757 +f 28731 28758 28757 +f 28731 28732 28759 +f 28731 28759 28758 +f 28732 28733 28759 +f 28733 28760 28759 +f 28733 28734 28761 +f 28733 28761 28760 +f 28734 28735 28761 +f 28735 28762 28761 +f 28735 28736 28763 +f 28735 28763 28762 +f 28736 28737 28763 +f 28737 28764 28763 +f 28737 28738 28765 +f 28737 28765 28764 +f 28738 28739 28765 +f 28739 28766 28765 +f 28739 28740 28767 +f 28739 28767 28766 +f 28740 28741 28767 +f 28741 28768 28767 +f 28741 28742 28769 +f 28741 28769 28768 +f 28742 28743 28769 +f 28743 28770 28769 +f 28743 28744 28771 +f 28743 28771 28770 +f 28744 28745 28771 +f 28745 28772 28771 +f 28745 28746 28773 +f 28745 28773 28772 +f 28746 28747 28773 +f 28747 28774 28773 +f 28747 28748 28775 +f 28747 28775 28774 +f 28748 28749 28775 +f 28749 28776 28775 +f 28749 28750 28777 +f 28749 28777 28776 +f 28750 5677 28777 +f 5677 5806 28777 +f 28751 28752 28779 +f 28751 28779 28778 +f 28752 28753 28779 +f 28753 28780 28779 +f 28753 28754 28781 +f 28753 28781 28780 +f 28754 28755 28781 +f 28755 28782 28781 +f 28755 28756 28783 +f 28755 28783 28782 +f 28756 28757 28783 +f 28757 28784 28783 +f 28757 28758 28785 +f 28757 28785 28784 +f 28758 28759 28785 +f 28759 28786 28785 +f 28759 28760 28787 +f 28759 28787 28786 +f 28760 28761 28787 +f 28761 28788 28787 +f 28761 28762 28789 +f 28761 28789 28788 +f 28762 28763 28789 +f 28763 28790 28789 +f 28763 28764 28791 +f 28763 28791 28790 +f 28764 28765 28791 +f 28765 28792 28791 +f 28765 28766 28793 +f 28765 28793 28792 +f 28766 28767 28793 +f 28767 28794 28793 +f 28767 28768 28795 +f 28767 28795 28794 +f 28768 28769 28795 +f 28769 28796 28795 +f 28769 28770 28797 +f 28769 28797 28796 +f 28770 28771 28797 +f 28771 28798 28797 +f 28771 28772 28799 +f 28771 28799 28798 +f 28772 28773 28799 +f 28773 28800 28799 +f 28773 28774 28801 +f 28773 28801 28800 +f 28774 28775 28801 +f 28775 28802 28801 +f 28775 28776 28803 +f 28775 28803 28802 +f 28776 28777 28803 +f 28777 28804 28803 +f 28777 5806 5935 +f 28777 5935 28804 +f 28778 28779 28805 +f 28779 28806 28805 +f 28779 28780 28807 +f 28779 28807 28806 +f 28780 28781 28807 +f 28781 28808 28807 +f 28781 28782 28809 +f 28781 28809 28808 +f 28782 28783 28809 +f 28783 28810 28809 +f 28783 28784 28811 +f 28783 28811 28810 +f 28784 28785 28811 +f 28785 28812 28811 +f 28785 28786 28813 +f 28785 28813 28812 +f 28786 28787 28813 +f 28787 28814 28813 +f 28787 28788 28815 +f 28787 28815 28814 +f 28788 28789 28815 +f 28789 28816 28815 +f 28789 28790 28817 +f 28789 28817 28816 +f 28790 28791 28817 +f 28791 28818 28817 +f 28791 28792 28819 +f 28791 28819 28818 +f 28792 28793 28819 +f 28793 28820 28819 +f 28793 28794 28821 +f 28793 28821 28820 +f 28794 28795 28821 +f 28795 28822 28821 +f 28795 28796 28823 +f 28795 28823 28822 +f 28796 28797 28823 +f 28797 28824 28823 +f 28797 28798 28825 +f 28797 28825 28824 +f 28798 28799 28825 +f 28799 28826 28825 +f 28799 28800 28827 +f 28799 28827 28826 +f 28800 28801 28827 +f 28801 28828 28827 +f 28801 28802 28829 +f 28801 28829 28828 +f 28802 28803 28829 +f 28803 28830 28829 +f 28803 28804 28831 +f 28803 28831 28830 +f 28804 5935 28831 +f 5935 6064 28831 +f 28805 28806 28833 +f 28805 28833 28832 +f 28806 28807 28833 +f 28807 28834 28833 +f 28807 28808 28835 +f 28807 28835 28834 +f 28808 28809 28835 +f 28809 28836 28835 +f 28809 28810 28837 +f 28809 28837 28836 +f 28810 28811 28837 +f 28811 28838 28837 +f 28811 28812 28839 +f 28811 28839 28838 +f 28812 28813 28839 +f 28813 28840 28839 +f 28813 28814 28841 +f 28813 28841 28840 +f 28814 28815 28841 +f 28815 28842 28841 +f 28815 28816 28843 +f 28815 28843 28842 +f 28816 28817 28843 +f 28817 28844 28843 +f 28817 28818 28845 +f 28817 28845 28844 +f 28818 28819 28845 +f 28819 28846 28845 +f 28819 28820 28847 +f 28819 28847 28846 +f 28820 28821 28847 +f 28821 28848 28847 +f 28821 28822 28849 +f 28821 28849 28848 +f 28822 28823 28849 +f 28823 28850 28849 +f 28823 28824 28851 +f 28823 28851 28850 +f 28824 28825 28851 +f 28825 28852 28851 +f 28825 28826 28853 +f 28825 28853 28852 +f 28826 28827 28853 +f 28827 28854 28853 +f 28827 28828 28855 +f 28827 28855 28854 +f 28828 28829 28855 +f 28829 28856 28855 +f 28829 28830 28857 +f 28829 28857 28856 +f 28830 28831 28857 +f 28831 28858 28857 +f 28831 6064 6192 +f 28831 6192 28858 +f 28832 28833 28859 +f 28833 28860 28859 +f 28833 28834 28861 +f 28833 28861 28860 +f 28834 28835 28861 +f 28835 28862 28861 +f 28835 28836 28863 +f 28835 28863 28862 +f 28836 28837 28863 +f 28837 28864 28863 +f 28837 28838 28865 +f 28837 28865 28864 +f 28838 28839 28865 +f 28839 28866 28865 +f 28839 28840 28867 +f 28839 28867 28866 +f 28840 28841 28867 +f 28841 28868 28867 +f 28841 28842 28869 +f 28841 28869 28868 +f 28842 28843 28869 +f 28843 28870 28869 +f 28843 28844 28871 +f 28843 28871 28870 +f 28844 28845 28871 +f 28845 28872 28871 +f 28845 28846 28873 +f 28845 28873 28872 +f 28846 28847 28873 +f 28847 28874 28873 +f 28847 28848 28875 +f 28847 28875 28874 +f 28848 28849 28875 +f 28849 28876 28875 +f 28849 28850 28877 +f 28849 28877 28876 +f 28850 28851 28877 +f 28851 28878 28877 +f 28851 28852 28879 +f 28851 28879 28878 +f 28852 28853 28879 +f 28853 28880 28879 +f 28853 28854 28881 +f 28853 28881 28880 +f 28854 28855 28881 +f 28855 28882 28881 +f 28855 28856 28883 +f 28855 28883 28882 +f 28856 28857 28883 +f 28857 28884 28883 +f 28857 28858 28885 +f 28857 28885 28884 +f 28858 6192 28885 +f 6192 6318 28885 +f 28859 28860 28887 +f 28859 28887 28886 +f 28860 28861 28887 +f 28861 28888 28887 +f 28861 28862 28889 +f 28861 28889 28888 +f 28862 28863 28889 +f 28863 28890 28889 +f 28863 28864 28891 +f 28863 28891 28890 +f 28864 28865 28891 +f 28865 28892 28891 +f 28865 28866 28893 +f 28865 28893 28892 +f 28866 28867 28893 +f 28867 28894 28893 +f 28867 28868 28895 +f 28867 28895 28894 +f 28868 28869 28895 +f 28869 28896 28895 +f 28869 28870 28897 +f 28869 28897 28896 +f 28870 28871 28897 +f 28871 28898 28897 +f 28871 28872 28899 +f 28871 28899 28898 +f 28872 28873 28899 +f 28873 28900 28899 +f 28873 28874 28901 +f 28873 28901 28900 +f 28874 28875 28901 +f 28875 28902 28901 +f 28875 28876 28903 +f 28875 28903 28902 +f 28876 28877 28903 +f 28877 28904 28903 +f 28877 28878 28905 +f 28877 28905 28904 +f 28878 28879 28905 +f 28879 28906 28905 +f 28879 28880 28907 +f 28879 28907 28906 +f 28880 28881 28907 +f 28881 28908 28907 +f 28881 28882 28909 +f 28881 28909 28908 +f 28882 28883 28909 +f 28883 28910 28909 +f 28883 28884 28911 +f 28883 28911 28910 +f 28884 28885 28911 +f 28885 28912 28911 +f 28885 6318 6441 +f 28885 6441 28912 +f 28886 28887 28913 +f 28887 28914 28913 +f 28887 28888 28915 +f 28887 28915 28914 +f 28888 28889 28915 +f 28889 28916 28915 +f 28889 28890 28917 +f 28889 28917 28916 +f 28890 28891 28917 +f 28891 28918 28917 +f 28891 28892 28919 +f 28891 28919 28918 +f 28892 28893 28919 +f 28893 28920 28919 +f 28893 28894 28921 +f 28893 28921 28920 +f 28894 28895 28921 +f 28895 28922 28921 +f 28895 28896 28923 +f 28895 28923 28922 +f 28896 28897 28923 +f 28897 28924 28923 +f 28897 28898 28925 +f 28897 28925 28924 +f 28898 28899 28925 +f 28899 28926 28925 +f 28899 28900 28927 +f 28899 28927 28926 +f 28900 28901 28927 +f 28901 28928 28927 +f 28901 28902 28929 +f 28901 28929 28928 +f 28902 28903 28929 +f 28903 28930 28929 +f 28903 28904 28931 +f 28903 28931 28930 +f 28904 28905 28931 +f 28905 28932 28931 +f 28905 28906 28933 +f 28905 28933 28932 +f 28906 28907 28933 +f 28907 28934 28933 +f 28907 28908 28935 +f 28907 28935 28934 +f 28908 28909 28935 +f 28909 28936 28935 +f 28909 28910 28937 +f 28909 28937 28936 +f 28910 28911 28937 +f 28911 28938 28937 +f 28911 28912 28939 +f 28911 28939 28938 +f 28912 6441 28939 +f 6441 6563 28939 +f 28913 28914 28941 +f 28913 28941 28940 +f 28914 28915 28941 +f 28915 28942 28941 +f 28915 28916 28943 +f 28915 28943 28942 +f 28916 28917 28943 +f 28917 28944 28943 +f 28917 28918 28945 +f 28917 28945 28944 +f 28918 28919 28945 +f 28919 28946 28945 +f 28919 28920 28947 +f 28919 28947 28946 +f 28920 28921 28947 +f 28921 28948 28947 +f 28921 28922 28949 +f 28921 28949 28948 +f 28922 28923 28949 +f 28923 28950 28949 +f 28923 28924 28951 +f 28923 28951 28950 +f 28924 28925 28951 +f 28925 28952 28951 +f 28925 28926 28953 +f 28925 28953 28952 +f 28926 28927 28953 +f 28927 28954 28953 +f 28927 28928 28955 +f 28927 28955 28954 +f 28928 28929 28955 +f 28929 28956 28955 +f 28929 28930 28957 +f 28929 28957 28956 +f 28930 28931 28957 +f 28931 28958 28957 +f 28931 28932 28959 +f 28931 28959 28958 +f 28932 28933 28959 +f 28933 28960 28959 +f 28933 28934 28961 +f 28933 28961 28960 +f 28934 28935 28961 +f 28935 28962 28961 +f 28935 28936 28963 +f 28935 28963 28962 +f 28936 28937 28963 +f 28937 28964 28963 +f 28937 28938 28965 +f 28937 28965 28964 +f 28938 28939 28965 +f 28939 28966 28965 +f 28939 6563 6685 +f 28939 6685 28966 +f 28940 28941 28967 +f 28941 28968 28967 +f 28941 28942 28969 +f 28941 28969 28968 +f 28942 28943 28969 +f 28943 28970 28969 +f 28943 28944 28971 +f 28943 28971 28970 +f 28944 28945 28971 +f 28945 28972 28971 +f 28945 28946 28973 +f 28945 28973 28972 +f 28946 28947 28973 +f 28947 28974 28973 +f 28947 28948 28975 +f 28947 28975 28974 +f 28948 28949 28975 +f 28949 28976 28975 +f 28949 28950 28977 +f 28949 28977 28976 +f 28950 28951 28977 +f 28951 28978 28977 +f 28951 28952 28979 +f 28951 28979 28978 +f 28952 28953 28979 +f 28953 28980 28979 +f 28953 28954 28981 +f 28953 28981 28980 +f 28954 28955 28981 +f 28955 28982 28981 +f 28955 28956 28983 +f 28955 28983 28982 +f 28956 28957 28983 +f 28957 28984 28983 +f 28957 28958 28985 +f 28957 28985 28984 +f 28958 28959 28985 +f 28959 28986 28985 +f 28959 28960 28987 +f 28959 28987 28986 +f 28960 28961 28987 +f 28961 28988 28987 +f 28961 28962 28989 +f 28961 28989 28988 +f 28962 28963 28989 +f 28963 28990 28989 +f 28963 28964 28991 +f 28963 28991 28990 +f 28964 28965 28991 +f 28965 28992 28991 +f 28965 28966 28993 +f 28965 28993 28992 +f 28966 6685 28993 +f 6685 6807 28993 +f 28967 28968 28995 +f 28967 28995 28994 +f 28968 28969 28995 +f 28969 28996 28995 +f 28969 28970 28997 +f 28969 28997 28996 +f 28970 28971 28997 +f 28971 28998 28997 +f 28971 28972 28999 +f 28971 28999 28998 +f 28972 28973 28999 +f 28973 29000 28999 +f 28973 28974 29001 +f 28973 29001 29000 +f 28974 28975 29001 +f 28975 29002 29001 +f 28975 28976 29003 +f 28975 29003 29002 +f 28976 28977 29003 +f 28977 29004 29003 +f 28977 28978 29005 +f 28977 29005 29004 +f 28978 28979 29005 +f 28979 29006 29005 +f 28979 28980 29007 +f 28979 29007 29006 +f 28980 28981 29007 +f 28981 29008 29007 +f 28981 28982 29009 +f 28981 29009 29008 +f 28982 28983 29009 +f 28983 29010 29009 +f 28983 28984 29011 +f 28983 29011 29010 +f 28984 28985 29011 +f 28985 29012 29011 +f 28985 28986 29013 +f 28985 29013 29012 +f 28986 28987 29013 +f 28987 29014 29013 +f 28987 28988 29015 +f 28987 29015 29014 +f 28988 28989 29015 +f 28989 29016 29015 +f 28989 28990 29017 +f 28989 29017 29016 +f 28990 28991 29017 +f 28991 29018 29017 +f 28991 28992 29019 +f 28991 29019 29018 +f 28992 28993 29019 +f 28993 29020 29019 +f 28993 6807 6928 +f 28993 6928 29020 +f 28994 28995 29021 +f 28995 29022 29021 +f 28995 28996 29023 +f 28995 29023 29022 +f 28996 28997 29023 +f 28997 29024 29023 +f 28997 28998 29025 +f 28997 29025 29024 +f 28998 28999 29025 +f 28999 29026 29025 +f 28999 29000 29027 +f 28999 29027 29026 +f 29000 29001 29027 +f 29001 29028 29027 +f 29001 29002 29029 +f 29001 29029 29028 +f 29002 29003 29029 +f 29003 29030 29029 +f 29003 29004 29031 +f 29003 29031 29030 +f 29004 29005 29031 +f 29005 29032 29031 +f 29005 29006 29033 +f 29005 29033 29032 +f 29006 29007 29033 +f 29007 29034 29033 +f 29007 29008 29035 +f 29007 29035 29034 +f 29008 29009 29035 +f 29009 29036 29035 +f 29009 29010 29037 +f 29009 29037 29036 +f 29010 29011 29037 +f 29011 29038 29037 +f 29011 29012 29039 +f 29011 29039 29038 +f 29012 29013 29039 +f 29013 29040 29039 +f 29013 29014 29041 +f 29013 29041 29040 +f 29014 29015 29041 +f 29015 29042 29041 +f 29015 29016 29043 +f 29015 29043 29042 +f 29016 29017 29043 +f 29017 29044 29043 +f 29017 29018 29045 +f 29017 29045 29044 +f 29018 29019 29045 +f 29019 29046 29045 +f 29019 29020 29047 +f 29019 29047 29046 +f 29020 6928 29047 +f 6928 7048 29047 +f 29021 29022 29048 +f 29022 29023 29048 +f 29023 29049 29048 +f 29023 29024 29050 +f 29023 29050 29049 +f 29024 29025 29050 +f 29025 29051 29050 +f 29025 29026 29052 +f 29025 29052 29051 +f 29026 29027 29052 +f 29027 29053 29052 +f 29027 29028 29054 +f 29027 29054 29053 +f 29028 29029 29054 +f 29029 29055 29054 +f 29029 29030 29056 +f 29029 29056 29055 +f 29030 29031 29056 +f 29031 29057 29056 +f 29031 29032 29058 +f 29031 29058 29057 +f 29032 29033 29058 +f 29033 29059 29058 +f 29033 29034 29060 +f 29033 29060 29059 +f 29034 29035 29060 +f 29035 29061 29060 +f 29035 29036 29062 +f 29035 29062 29061 +f 29036 29037 29062 +f 29037 29063 29062 +f 29037 29038 29064 +f 29037 29064 29063 +f 29038 29039 29064 +f 29039 29065 29064 +f 29039 29040 29066 +f 29039 29066 29065 +f 29040 29041 29066 +f 29041 29067 29066 +f 29041 29042 29068 +f 29041 29068 29067 +f 29042 29043 29068 +f 29043 29069 29068 +f 29043 29044 29070 +f 29043 29070 29069 +f 29044 29045 29070 +f 29045 29071 29070 +f 29045 29046 29072 +f 29045 29072 29071 +f 29046 29047 29072 +f 29047 29073 29072 +f 29047 7048 7168 +f 29047 7168 29073 +f 29048 29049 29075 +f 29048 29075 29074 +f 29049 29050 29075 +f 29050 29076 29075 +f 29050 29051 29077 +f 29050 29077 29076 +f 29051 29052 29077 +f 29052 29078 29077 +f 29052 29053 29079 +f 29052 29079 29078 +f 29053 29054 29079 +f 29054 29080 29079 +f 29054 29055 29081 +f 29054 29081 29080 +f 29055 29056 29081 +f 29056 29082 29081 +f 29056 29057 29083 +f 29056 29083 29082 +f 29057 29058 29083 +f 29058 29084 29083 +f 29058 29059 29085 +f 29058 29085 29084 +f 29059 29060 29085 +f 29060 29086 29085 +f 29060 29061 29087 +f 29060 29087 29086 +f 29061 29062 29087 +f 29062 29088 29087 +f 29062 29063 29089 +f 29062 29089 29088 +f 29063 29064 29089 +f 29064 29090 29089 +f 29064 29065 29091 +f 29064 29091 29090 +f 29065 29066 29091 +f 29066 29092 29091 +f 29066 29067 29093 +f 29066 29093 29092 +f 29067 29068 29093 +f 29068 29094 29093 +f 29068 29069 29095 +f 29068 29095 29094 +f 29069 29070 29095 +f 29070 29096 29095 +f 29070 29071 29097 +f 29070 29097 29096 +f 29071 29072 29097 +f 29072 29098 29097 +f 29072 29073 29099 +f 29072 29099 29098 +f 29073 7168 29099 +f 7168 7288 29099 +f 29074 29075 29100 +f 29075 29101 29100 +f 29075 29076 29102 +f 29075 29102 29101 +f 29076 29077 29102 +f 29077 29103 29102 +f 29077 29078 29104 +f 29077 29104 29103 +f 29078 29079 29104 +f 29079 29105 29104 +f 29079 29080 29106 +f 29079 29106 29105 +f 29080 29081 29106 +f 29081 29107 29106 +f 29081 29082 29108 +f 29081 29108 29107 +f 29082 29083 29108 +f 29083 29109 29108 +f 29083 29084 29110 +f 29083 29110 29109 +f 29084 29085 29110 +f 29085 29111 29110 +f 29085 29086 29112 +f 29085 29112 29111 +f 29086 29087 29112 +f 29087 29113 29112 +f 29087 29088 29114 +f 29087 29114 29113 +f 29088 29089 29114 +f 29089 29115 29114 +f 29089 29090 29116 +f 29089 29116 29115 +f 29090 29091 29116 +f 29091 29117 29116 +f 29091 29092 29118 +f 29091 29118 29117 +f 29092 29093 29118 +f 29093 29119 29118 +f 29093 29094 29120 +f 29093 29120 29119 +f 29094 29095 29120 +f 29095 29121 29120 +f 29095 29096 29122 +f 29095 29122 29121 +f 29096 29097 29122 +f 29097 29123 29122 +f 29097 29098 29124 +f 29097 29124 29123 +f 29098 29099 29124 +f 29099 29125 29124 +f 29099 7288 7408 +f 29099 7408 29125 +f 29100 29101 29127 +f 29100 29127 29126 +f 29101 29102 29127 +f 29102 29128 29127 +f 29102 29103 29129 +f 29102 29129 29128 +f 29103 29104 29129 +f 29104 29130 29129 +f 29104 29105 29131 +f 29104 29131 29130 +f 29105 29106 29131 +f 29106 29132 29131 +f 29106 29107 29133 +f 29106 29133 29132 +f 29107 29108 29133 +f 29108 29134 29133 +f 29108 29109 29135 +f 29108 29135 29134 +f 29109 29110 29135 +f 29110 29136 29135 +f 29110 29111 29137 +f 29110 29137 29136 +f 29111 29112 29137 +f 29112 29138 29137 +f 29112 29113 29139 +f 29112 29139 29138 +f 29113 29114 29139 +f 29114 29140 29139 +f 29114 29115 29141 +f 29114 29141 29140 +f 29115 29116 29141 +f 29116 29142 29141 +f 29116 29117 29143 +f 29116 29143 29142 +f 29117 29118 29143 +f 29118 29144 29143 +f 29118 29119 29145 +f 29118 29145 29144 +f 29119 29120 29145 +f 29120 29146 29145 +f 29120 29121 29147 +f 29120 29147 29146 +f 29121 29122 29147 +f 29122 29148 29147 +f 29122 29123 29149 +f 29122 29149 29148 +f 29123 29124 29149 +f 29124 29150 29149 +f 29124 29125 29151 +f 29124 29151 29150 +f 29125 7408 29151 +f 7408 7528 29151 +f 29126 29127 29152 +f 29127 29153 29152 +f 29127 29128 29154 +f 29127 29154 29153 +f 29128 29129 29154 +f 29129 29155 29154 +f 29129 29130 29156 +f 29129 29156 29155 +f 29130 29131 29156 +f 29131 29157 29156 +f 29131 29132 29158 +f 29131 29158 29157 +f 29132 29133 29158 +f 29133 29159 29158 +f 29133 29134 29160 +f 29133 29160 29159 +f 29134 29135 29160 +f 29135 29161 29160 +f 29135 29136 29162 +f 29135 29162 29161 +f 29136 29137 29162 +f 29137 29163 29162 +f 29137 29138 29164 +f 29137 29164 29163 +f 29138 29139 29164 +f 29139 29165 29164 +f 29139 29140 29166 +f 29139 29166 29165 +f 29140 29141 29166 +f 29141 29167 29166 +f 29141 29142 29168 +f 29141 29168 29167 +f 29142 29143 29168 +f 29143 29169 29168 +f 29143 29144 29170 +f 29143 29170 29169 +f 29144 29145 29170 +f 29145 29171 29170 +f 29145 29146 29172 +f 29145 29172 29171 +f 29146 29147 29172 +f 29147 29173 29172 +f 29147 29148 29174 +f 29147 29174 29173 +f 29148 29149 29174 +f 29149 29175 29174 +f 29149 29150 29176 +f 29149 29176 29175 +f 29150 29151 29176 +f 29151 29177 29176 +f 29151 7528 7648 +f 29151 7648 29177 +f 29152 29153 29179 +f 29152 29179 29178 +f 29153 29154 29179 +f 29154 29180 29179 +f 29154 29155 29181 +f 29154 29181 29180 +f 29155 29156 29181 +f 29156 29182 29181 +f 29156 29157 29183 +f 29156 29183 29182 +f 29157 29158 29183 +f 29158 29184 29183 +f 29158 29159 29185 +f 29158 29185 29184 +f 29159 29160 29185 +f 29160 29186 29185 +f 29160 29161 29187 +f 29160 29187 29186 +f 29161 29162 29187 +f 29162 29188 29187 +f 29162 29163 29189 +f 29162 29189 29188 +f 29163 29164 29189 +f 29164 29190 29189 +f 29164 29165 29191 +f 29164 29191 29190 +f 29165 29166 29191 +f 29166 29192 29191 +f 29166 29167 29193 +f 29166 29193 29192 +f 29167 29168 29193 +f 29168 29194 29193 +f 29168 29169 29195 +f 29168 29195 29194 +f 29169 29170 29195 +f 29170 29196 29195 +f 29170 29171 29197 +f 29170 29197 29196 +f 29171 29172 29197 +f 29172 29198 29197 +f 29172 29173 29199 +f 29172 29199 29198 +f 29173 29174 29199 +f 29174 29200 29199 +f 29174 29175 29201 +f 29174 29201 29200 +f 29175 29176 29201 +f 29176 29202 29201 +f 29176 29177 29203 +f 29176 29203 29202 +f 29177 7648 29203 +f 7648 7768 29203 +f 29178 29179 29204 +f 29179 29205 29204 +f 29179 29180 29206 +f 29179 29206 29205 +f 29180 29181 29206 +f 29181 29207 29206 +f 29181 29182 29208 +f 29181 29208 29207 +f 29182 29183 29208 +f 29183 29209 29208 +f 29183 29184 29210 +f 29183 29210 29209 +f 29184 29185 29210 +f 29185 29211 29210 +f 29185 29186 29212 +f 29185 29212 29211 +f 29186 29187 29212 +f 29187 29213 29212 +f 29187 29188 29214 +f 29187 29214 29213 +f 29188 29189 29214 +f 29189 29215 29214 +f 29189 29190 29216 +f 29189 29216 29215 +f 29190 29191 29216 +f 29191 29217 29216 +f 29191 29192 29218 +f 29191 29218 29217 +f 29192 29193 29218 +f 29193 29219 29218 +f 29193 29194 29220 +f 29193 29220 29219 +f 29194 29195 29220 +f 29195 29221 29220 +f 29195 29196 29222 +f 29195 29222 29221 +f 29196 29197 29222 +f 29197 29223 29222 +f 29197 29198 29224 +f 29197 29224 29223 +f 29198 29199 29224 +f 29199 29225 29224 +f 29199 29200 29226 +f 29199 29226 29225 +f 29200 29201 29226 +f 29201 29227 29226 +f 29201 29202 29228 +f 29201 29228 29227 +f 29202 29203 29228 +f 29203 29229 29228 +f 29203 7768 7888 +f 29203 7888 29229 +f 29204 29205 29231 +f 29204 29231 29230 +f 29205 29206 29231 +f 29206 29232 29231 +f 29206 29207 29233 +f 29206 29233 29232 +f 29207 29208 29233 +f 29208 29234 29233 +f 29208 29209 29235 +f 29208 29235 29234 +f 29209 29210 29235 +f 29210 29236 29235 +f 29210 29211 29237 +f 29210 29237 29236 +f 29211 29212 29237 +f 29212 29238 29237 +f 29212 29213 29239 +f 29212 29239 29238 +f 29213 29214 29239 +f 29214 29240 29239 +f 29214 29215 29241 +f 29214 29241 29240 +f 29215 29216 29241 +f 29216 29242 29241 +f 29216 29217 29243 +f 29216 29243 29242 +f 29217 29218 29243 +f 29218 29244 29243 +f 29218 29219 29245 +f 29218 29245 29244 +f 29219 29220 29245 +f 29220 29246 29245 +f 29220 29221 29247 +f 29220 29247 29246 +f 29221 29222 29247 +f 29222 29248 29247 +f 29222 29223 29249 +f 29222 29249 29248 +f 29223 29224 29249 +f 29224 29250 29249 +f 29224 29225 29251 +f 29224 29251 29250 +f 29225 29226 29251 +f 29226 29252 29251 +f 29226 29227 29253 +f 29226 29253 29252 +f 29227 29228 29253 +f 29228 29254 29253 +f 29228 29229 29255 +f 29228 29255 29254 +f 29229 7888 29255 +f 7888 8008 29255 +f 29230 29231 29256 +f 29231 29257 29256 +f 29231 29232 29258 +f 29231 29258 29257 +f 29232 29233 29258 +f 29233 29259 29258 +f 29233 29234 29260 +f 29233 29260 29259 +f 29234 29235 29260 +f 29235 29261 29260 +f 29235 29236 29262 +f 29235 29262 29261 +f 29236 29237 29262 +f 29237 29263 29262 +f 29237 29238 29264 +f 29237 29264 29263 +f 29238 29239 29264 +f 29239 29265 29264 +f 29239 29240 29266 +f 29239 29266 29265 +f 29240 29241 29266 +f 29241 29267 29266 +f 29241 29242 29268 +f 29241 29268 29267 +f 29242 29243 29268 +f 29243 29269 29268 +f 29243 29244 29270 +f 29243 29270 29269 +f 29244 29245 29270 +f 29245 29271 29270 +f 29245 29246 29272 +f 29245 29272 29271 +f 29246 29247 29272 +f 29247 29273 29272 +f 29247 29248 29274 +f 29247 29274 29273 +f 29248 29249 29274 +f 29249 29275 29274 +f 29249 29250 29276 +f 29249 29276 29275 +f 29250 29251 29276 +f 29251 29277 29276 +f 29251 29252 29278 +f 29251 29278 29277 +f 29252 29253 29278 +f 29253 29279 29278 +f 29253 29254 29280 +f 29253 29280 29279 +f 29254 29255 29280 +f 29255 29281 29280 +f 29255 8008 8128 +f 29255 8128 29281 +f 29256 29257 29283 +f 29256 29283 29282 +f 29257 29258 29283 +f 29258 29284 29283 +f 29258 29259 29285 +f 29258 29285 29284 +f 29259 29260 29285 +f 29260 29286 29285 +f 29260 29261 29287 +f 29260 29287 29286 +f 29261 29262 29287 +f 29262 29288 29287 +f 29262 29263 29289 +f 29262 29289 29288 +f 29263 29264 29289 +f 29264 29290 29289 +f 29264 29265 29291 +f 29264 29291 29290 +f 29265 29266 29291 +f 29266 29292 29291 +f 29266 29267 29293 +f 29266 29293 29292 +f 29267 29268 29293 +f 29268 29294 29293 +f 29268 29269 29295 +f 29268 29295 29294 +f 29269 29270 29295 +f 29270 29296 29295 +f 29270 29271 29297 +f 29270 29297 29296 +f 29271 29272 29297 +f 29272 29298 29297 +f 29272 29273 29299 +f 29272 29299 29298 +f 29273 29274 29299 +f 29274 29300 29299 +f 29274 29275 29301 +f 29274 29301 29300 +f 29275 29276 29301 +f 29276 29302 29301 +f 29276 29277 29303 +f 29276 29303 29302 +f 29277 29278 29303 +f 29278 29304 29303 +f 29278 29279 29305 +f 29278 29305 29304 +f 29279 29280 29305 +f 29280 29306 29305 +f 29280 29281 29307 +f 29280 29307 29306 +f 29281 8128 29307 +f 8128 8248 29307 +f 29282 29283 29308 +f 29283 29309 29308 +f 29283 29284 29310 +f 29283 29310 29309 +f 29284 29285 29310 +f 29285 29311 29310 +f 29285 29286 29312 +f 29285 29312 29311 +f 29286 29287 29312 +f 29287 29313 29312 +f 29287 29288 29314 +f 29287 29314 29313 +f 29288 29289 29314 +f 29289 29315 29314 +f 29289 29290 29316 +f 29289 29316 29315 +f 29290 29291 29316 +f 29291 29317 29316 +f 29291 29292 29318 +f 29291 29318 29317 +f 29292 29293 29318 +f 29293 29319 29318 +f 29293 29294 29320 +f 29293 29320 29319 +f 29294 29295 29320 +f 29295 29321 29320 +f 29295 29296 29322 +f 29295 29322 29321 +f 29296 29297 29322 +f 29297 29323 29322 +f 29297 29298 29324 +f 29297 29324 29323 +f 29298 29299 29324 +f 29299 29325 29324 +f 29299 29300 29326 +f 29299 29326 29325 +f 29300 29301 29326 +f 29301 29327 29326 +f 29301 29302 29328 +f 29301 29328 29327 +f 29302 29303 29328 +f 29303 29329 29328 +f 29303 29304 29330 +f 29303 29330 29329 +f 29304 29305 29330 +f 29305 29331 29330 +f 29305 29306 29332 +f 29305 29332 29331 +f 29306 29307 29332 +f 29307 29333 29332 +f 29307 8248 8368 +f 29307 8368 29333 +f 29308 29309 29335 +f 29308 29335 29334 +f 29309 29310 29335 +f 29310 29336 29335 +f 29310 29311 29337 +f 29310 29337 29336 +f 29311 29312 29337 +f 29312 29338 29337 +f 29312 29313 29339 +f 29312 29339 29338 +f 29313 29314 29339 +f 29314 29340 29339 +f 29314 29315 29341 +f 29314 29341 29340 +f 29315 29316 29341 +f 29316 29342 29341 +f 29316 29317 29343 +f 29316 29343 29342 +f 29317 29318 29343 +f 29318 29344 29343 +f 29318 29319 29345 +f 29318 29345 29344 +f 29319 29320 29345 +f 29320 29346 29345 +f 29320 29321 29347 +f 29320 29347 29346 +f 29321 29322 29347 +f 29322 29348 29347 +f 29322 29323 29349 +f 29322 29349 29348 +f 29323 29324 29349 +f 29324 29350 29349 +f 29324 29325 29351 +f 29324 29351 29350 +f 29325 29326 29351 +f 29326 29352 29351 +f 29326 29327 29353 +f 29326 29353 29352 +f 29327 29328 29353 +f 29328 29354 29353 +f 29328 29329 29355 +f 29328 29355 29354 +f 29329 29330 29355 +f 29330 29356 29355 +f 29330 29331 29357 +f 29330 29357 29356 +f 29331 29332 29357 +f 29332 29358 29357 +f 29332 29333 29359 +f 29332 29359 29358 +f 29333 8368 29359 +f 8368 8488 29359 +f 29334 29335 29360 +f 29335 29361 29360 +f 29335 29336 29362 +f 29335 29362 29361 +f 29336 29337 29362 +f 29337 29363 29362 +f 29337 29338 29364 +f 29337 29364 29363 +f 29338 29339 29364 +f 29339 29365 29364 +f 29339 29340 29366 +f 29339 29366 29365 +f 29340 29341 29366 +f 29341 29367 29366 +f 29341 29342 29368 +f 29341 29368 29367 +f 29342 29343 29368 +f 29343 29369 29368 +f 29343 29344 29370 +f 29343 29370 29369 +f 29344 29345 29370 +f 29345 29371 29370 +f 29345 29346 29372 +f 29345 29372 29371 +f 29346 29347 29372 +f 29347 29373 29372 +f 29347 29348 29374 +f 29347 29374 29373 +f 29348 29349 29374 +f 29349 29375 29374 +f 29349 29350 29376 +f 29349 29376 29375 +f 29350 29351 29376 +f 29351 29377 29376 +f 29351 29352 29378 +f 29351 29378 29377 +f 29352 29353 29378 +f 29353 29379 29378 +f 29353 29354 29380 +f 29353 29380 29379 +f 29354 29355 29380 +f 29355 29381 29380 +f 29355 29356 29382 +f 29355 29382 29381 +f 29356 29357 29382 +f 29357 29383 29382 +f 29357 29358 29384 +f 29357 29384 29383 +f 29358 29359 29384 +f 29359 29385 29384 +f 29359 8488 8608 +f 29359 8608 29385 +f 29360 29361 29387 +f 29360 29387 29386 +f 29361 29362 29387 +f 29362 29388 29387 +f 29362 29363 29389 +f 29362 29389 29388 +f 29363 29364 29389 +f 29364 29390 29389 +f 29364 29365 29391 +f 29364 29391 29390 +f 29365 29366 29391 +f 29366 29392 29391 +f 29366 29367 29393 +f 29366 29393 29392 +f 29367 29368 29393 +f 29368 29394 29393 +f 29368 29369 29395 +f 29368 29395 29394 +f 29369 29370 29395 +f 29370 29396 29395 +f 29370 29371 29397 +f 29370 29397 29396 +f 29371 29372 29397 +f 29372 29398 29397 +f 29372 29373 29399 +f 29372 29399 29398 +f 29373 29374 29399 +f 29374 29400 29399 +f 29374 29375 29401 +f 29374 29401 29400 +f 29375 29376 29401 +f 29376 29402 29401 +f 29376 29377 29403 +f 29376 29403 29402 +f 29377 29378 29403 +f 29378 29404 29403 +f 29378 29379 29405 +f 29378 29405 29404 +f 29379 29380 29405 +f 29380 29406 29405 +f 29380 29381 29407 +f 29380 29407 29406 +f 29381 29382 29407 +f 29382 29408 29407 +f 29382 29383 29409 +f 29382 29409 29408 +f 29383 29384 29409 +f 29384 29410 29409 +f 29384 29385 29411 +f 29384 29411 29410 +f 29385 8608 29411 +f 8608 8728 29411 +f 29386 29387 29412 +f 29387 29413 29412 +f 29387 29388 29414 +f 29387 29414 29413 +f 29388 29389 29414 +f 29389 29415 29414 +f 29389 29390 29416 +f 29389 29416 29415 +f 29390 29391 29416 +f 29391 29417 29416 +f 29391 29392 29418 +f 29391 29418 29417 +f 29392 29393 29418 +f 29393 29419 29418 +f 29393 29394 29420 +f 29393 29420 29419 +f 29394 29395 29420 +f 29395 29421 29420 +f 29395 29396 29422 +f 29395 29422 29421 +f 29396 29397 29422 +f 29397 29423 29422 +f 29397 29398 29424 +f 29397 29424 29423 +f 29398 29399 29424 +f 29399 29425 29424 +f 29399 29400 29426 +f 29399 29426 29425 +f 29400 29401 29426 +f 29401 29427 29426 +f 29401 29402 29428 +f 29401 29428 29427 +f 29402 29403 29428 +f 29403 29429 29428 +f 29403 29404 29430 +f 29403 29430 29429 +f 29404 29405 29430 +f 29405 29431 29430 +f 29405 29406 29432 +f 29405 29432 29431 +f 29406 29407 29432 +f 29407 29433 29432 +f 29407 29408 29434 +f 29407 29434 29433 +f 29408 29409 29434 +f 29409 29435 29434 +f 29409 29410 29436 +f 29409 29436 29435 +f 29410 29411 29436 +f 29411 29437 29436 +f 29411 8728 8848 +f 29411 8848 29437 +f 29412 29413 29439 +f 29412 29439 29438 +f 29413 29414 29439 +f 29414 29440 29439 +f 29414 29415 29441 +f 29414 29441 29440 +f 29415 29416 29441 +f 29416 29442 29441 +f 29416 29417 29443 +f 29416 29443 29442 +f 29417 29418 29443 +f 29418 29444 29443 +f 29418 29419 29445 +f 29418 29445 29444 +f 29419 29420 29445 +f 29420 29446 29445 +f 29420 29421 29447 +f 29420 29447 29446 +f 29421 29422 29447 +f 29422 29448 29447 +f 29422 29423 29449 +f 29422 29449 29448 +f 29423 29424 29449 +f 29424 29450 29449 +f 29424 29425 29451 +f 29424 29451 29450 +f 29425 29426 29451 +f 29426 29452 29451 +f 29426 29427 29453 +f 29426 29453 29452 +f 29427 29428 29453 +f 29428 29454 29453 +f 29428 29429 29455 +f 29428 29455 29454 +f 29429 29430 29455 +f 29430 29456 29455 +f 29430 29431 29457 +f 29430 29457 29456 +f 29431 29432 29457 +f 29432 29458 29457 +f 29432 29433 29459 +f 29432 29459 29458 +f 29433 29434 29459 +f 29434 29460 29459 +f 29434 29435 29461 +f 29434 29461 29460 +f 29435 29436 29461 +f 29436 29462 29461 +f 29436 29437 29463 +f 29436 29463 29462 +f 29437 8848 29463 +f 8848 8968 29463 +f 29438 29439 29464 +f 29439 29465 29464 +f 29439 29440 29466 +f 29439 29466 29465 +f 29440 29441 29466 +f 29441 29467 29466 +f 29441 29442 29468 +f 29441 29468 29467 +f 29442 29443 29468 +f 29443 29469 29468 +f 29443 29444 29470 +f 29443 29470 29469 +f 29444 29445 29470 +f 29445 29471 29470 +f 29445 29446 29472 +f 29445 29472 29471 +f 29446 29447 29472 +f 29447 29473 29472 +f 29447 29448 29474 +f 29447 29474 29473 +f 29448 29449 29474 +f 29449 29475 29474 +f 29449 29450 29476 +f 29449 29476 29475 +f 29450 29451 29476 +f 29451 29477 29476 +f 29451 29452 29478 +f 29451 29478 29477 +f 29452 29453 29478 +f 29453 29479 29478 +f 29453 29454 29480 +f 29453 29480 29479 +f 29454 29455 29480 +f 29455 29481 29480 +f 29455 29456 29482 +f 29455 29482 29481 +f 29456 29457 29482 +f 29457 29483 29482 +f 29457 29458 29484 +f 29457 29484 29483 +f 29458 29459 29484 +f 29459 29485 29484 +f 29459 29460 29486 +f 29459 29486 29485 +f 29460 29461 29486 +f 29461 29487 29486 +f 29461 29462 29488 +f 29461 29488 29487 +f 29462 29463 29488 +f 29463 29489 29488 +f 29463 8968 9088 +f 29463 9088 29489 +f 29464 29491 29490 +f 29464 29465 29492 +f 29464 29492 29491 +f 29465 29466 29492 +f 29466 29493 29492 +f 29466 29467 29494 +f 29466 29494 29493 +f 29467 29468 29494 +f 29468 29495 29494 +f 29468 29469 29496 +f 29468 29496 29495 +f 29469 29470 29496 +f 29470 29497 29496 +f 29470 29471 29498 +f 29470 29498 29497 +f 29471 29472 29498 +f 29472 29499 29498 +f 29472 29473 29500 +f 29472 29500 29499 +f 29473 29474 29500 +f 29474 29501 29500 +f 29474 29475 29502 +f 29474 29502 29501 +f 29475 29476 29502 +f 29476 29503 29502 +f 29476 29477 29504 +f 29476 29504 29503 +f 29477 29478 29504 +f 29478 29505 29504 +f 29478 29479 29506 +f 29478 29506 29505 +f 29479 29480 29506 +f 29480 29507 29506 +f 29480 29481 29508 +f 29480 29508 29507 +f 29481 29482 29508 +f 29482 29509 29508 +f 29482 29483 29510 +f 29482 29510 29509 +f 29483 29484 29510 +f 29484 29511 29510 +f 29484 29485 29512 +f 29484 29512 29511 +f 29485 29486 29512 +f 29486 29513 29512 +f 29486 29487 29514 +f 29486 29514 29513 +f 29487 29488 29514 +f 29488 29515 29514 +f 29488 29489 29516 +f 29488 29516 29515 +f 29489 9088 29516 +f 9088 9208 29516 +f 29490 29491 29518 +f 29490 29518 29517 +f 29491 29492 29518 +f 29492 29519 29518 +f 29492 29493 29520 +f 29492 29520 29519 +f 29493 29494 29520 +f 29494 29521 29520 +f 29494 29495 29522 +f 29494 29522 29521 +f 29495 29496 29522 +f 29496 29523 29522 +f 29496 29497 29524 +f 29496 29524 29523 +f 29497 29498 29524 +f 29498 29525 29524 +f 29498 29499 29526 +f 29498 29526 29525 +f 29499 29500 29526 +f 29500 29527 29526 +f 29500 29501 29528 +f 29500 29528 29527 +f 29501 29502 29528 +f 29502 29529 29528 +f 29502 29503 29530 +f 29502 29530 29529 +f 29503 29504 29530 +f 29504 29531 29530 +f 29504 29505 29532 +f 29504 29532 29531 +f 29505 29506 29532 +f 29506 29533 29532 +f 29506 29507 29534 +f 29506 29534 29533 +f 29507 29508 29534 +f 29508 29535 29534 +f 29508 29509 29536 +f 29508 29536 29535 +f 29509 29510 29536 +f 29510 29537 29536 +f 29510 29511 29538 +f 29510 29538 29537 +f 29511 29512 29538 +f 29512 29539 29538 +f 29512 29513 29540 +f 29512 29540 29539 +f 29513 29514 29540 +f 29514 29541 29540 +f 29514 29515 29542 +f 29514 29542 29541 +f 29515 29516 29542 +f 29516 29543 29542 +f 29516 9208 9328 +f 29516 9328 29543 +f 29517 29518 29544 +f 29518 29545 29544 +f 29518 29519 29546 +f 29518 29546 29545 +f 29519 29520 29546 +f 29520 29547 29546 +f 29520 29521 29548 +f 29520 29548 29547 +f 29521 29522 29548 +f 29522 29549 29548 +f 29522 29523 29550 +f 29522 29550 29549 +f 29523 29524 29550 +f 29524 29551 29550 +f 29524 29525 29552 +f 29524 29552 29551 +f 29525 29526 29552 +f 29526 29553 29552 +f 29526 29527 29554 +f 29526 29554 29553 +f 29527 29528 29554 +f 29528 29555 29554 +f 29528 29529 29556 +f 29528 29556 29555 +f 29529 29530 29556 +f 29530 29557 29556 +f 29530 29531 29558 +f 29530 29558 29557 +f 29531 29532 29558 +f 29532 29559 29558 +f 29532 29533 29560 +f 29532 29560 29559 +f 29533 29534 29560 +f 29534 29561 29560 +f 29534 29535 29562 +f 29534 29562 29561 +f 29535 29536 29562 +f 29536 29563 29562 +f 29536 29537 29564 +f 29536 29564 29563 +f 29537 29538 29564 +f 29538 29565 29564 +f 29538 29539 29566 +f 29538 29566 29565 +f 29539 29540 29566 +f 29540 29567 29566 +f 29540 29541 29568 +f 29540 29568 29567 +f 29541 29542 29568 +f 29542 29569 29568 +f 29542 29543 29570 +f 29542 29570 29569 +f 29543 9328 29570 +f 9328 9448 29570 +f 29544 29545 29572 +f 29544 29572 29571 +f 29545 29546 29572 +f 29546 29573 29572 +f 29546 29547 29574 +f 29546 29574 29573 +f 29547 29548 29574 +f 29548 29575 29574 +f 29548 29549 29576 +f 29548 29576 29575 +f 29549 29550 29576 +f 29550 29577 29576 +f 29550 29551 29578 +f 29550 29578 29577 +f 29551 29552 29578 +f 29552 29579 29578 +f 29552 29553 29580 +f 29552 29580 29579 +f 29553 29554 29580 +f 29554 29581 29580 +f 29554 29555 29582 +f 29554 29582 29581 +f 29555 29556 29582 +f 29556 29583 29582 +f 29556 29557 29584 +f 29556 29584 29583 +f 29557 29558 29584 +f 29558 29585 29584 +f 29558 29559 29586 +f 29558 29586 29585 +f 29559 29560 29586 +f 29560 29587 29586 +f 29560 29561 29588 +f 29560 29588 29587 +f 29561 29562 29588 +f 29562 29589 29588 +f 29562 29563 29590 +f 29562 29590 29589 +f 29563 29564 29590 +f 29564 29591 29590 +f 29564 29565 29592 +f 29564 29592 29591 +f 29565 29566 29592 +f 29566 29593 29592 +f 29566 29567 29594 +f 29566 29594 29593 +f 29567 29568 29594 +f 29568 29595 29594 +f 29568 29569 29596 +f 29568 29596 29595 +f 29569 29570 29596 +f 29570 29597 29596 +f 29570 9448 9568 +f 29570 9568 29597 +f 29571 29572 29598 +f 29572 29599 29598 +f 29572 29573 29600 +f 29572 29600 29599 +f 29573 29574 29600 +f 29574 29601 29600 +f 29574 29575 29602 +f 29574 29602 29601 +f 29575 29576 29602 +f 29576 29603 29602 +f 29576 29577 29604 +f 29576 29604 29603 +f 29577 29578 29604 +f 29578 29605 29604 +f 29578 29579 29606 +f 29578 29606 29605 +f 29579 29580 29606 +f 29580 29607 29606 +f 29580 29581 29608 +f 29580 29608 29607 +f 29581 29582 29608 +f 29582 29609 29608 +f 29582 29583 29610 +f 29582 29610 29609 +f 29583 29584 29610 +f 29584 29611 29610 +f 29584 29585 29612 +f 29584 29612 29611 +f 29585 29586 29612 +f 29586 29613 29612 +f 29586 29587 29614 +f 29586 29614 29613 +f 29587 29588 29614 +f 29588 29615 29614 +f 29588 29589 29616 +f 29588 29616 29615 +f 29589 29590 29616 +f 29590 29617 29616 +f 29590 29591 29618 +f 29590 29618 29617 +f 29591 29592 29618 +f 29592 29619 29618 +f 29592 29593 29620 +f 29592 29620 29619 +f 29593 29594 29620 +f 29594 29621 29620 +f 29594 29595 29622 +f 29594 29622 29621 +f 29595 29596 29622 +f 29596 29623 29622 +f 29596 29597 29624 +f 29596 29624 29623 +f 29597 9568 29624 +f 9568 9688 29624 +f 29598 29599 29626 +f 29598 29626 29625 +f 29599 29600 29626 +f 29600 29627 29626 +f 29600 29601 29628 +f 29600 29628 29627 +f 29601 29602 29628 +f 29602 29629 29628 +f 29602 29603 29630 +f 29602 29630 29629 +f 29603 29604 29630 +f 29604 29631 29630 +f 29604 29605 29632 +f 29604 29632 29631 +f 29605 29606 29632 +f 29606 29633 29632 +f 29606 29607 29634 +f 29606 29634 29633 +f 29607 29608 29634 +f 29608 29635 29634 +f 29608 29609 29636 +f 29608 29636 29635 +f 29609 29610 29636 +f 29610 29637 29636 +f 29610 29611 29638 +f 29610 29638 29637 +f 29611 29612 29638 +f 29612 29639 29638 +f 29612 29613 29640 +f 29612 29640 29639 +f 29613 29614 29640 +f 29614 29641 29640 +f 29614 29615 29642 +f 29614 29642 29641 +f 29615 29616 29642 +f 29616 29643 29642 +f 29616 29617 29644 +f 29616 29644 29643 +f 29617 29618 29644 +f 29618 29645 29644 +f 29618 29619 29646 +f 29618 29646 29645 +f 29619 29620 29646 +f 29620 29647 29646 +f 29620 29621 29648 +f 29620 29648 29647 +f 29621 29622 29648 +f 29622 29649 29648 +f 29622 29623 29650 +f 29622 29650 29649 +f 29623 29624 29650 +f 29624 29651 29650 +f 29624 9688 9809 +f 29624 9809 29651 +f 29625 29626 29652 +f 29626 29653 29652 +f 29626 29627 29654 +f 29626 29654 29653 +f 29627 29628 29654 +f 29628 29655 29654 +f 29628 29629 29656 +f 29628 29656 29655 +f 29629 29630 29656 +f 29630 29657 29656 +f 29630 29631 29658 +f 29630 29658 29657 +f 29631 29632 29658 +f 29632 29659 29658 +f 29632 29633 29660 +f 29632 29660 29659 +f 29633 29634 29660 +f 29634 29661 29660 +f 29634 29635 29662 +f 29634 29662 29661 +f 29635 29636 29662 +f 29636 29663 29662 +f 29636 29637 29664 +f 29636 29664 29663 +f 29637 29638 29664 +f 29638 29665 29664 +f 29638 29639 29666 +f 29638 29666 29665 +f 29639 29640 29666 +f 29640 29667 29666 +f 29640 29641 29668 +f 29640 29668 29667 +f 29641 29642 29668 +f 29642 29669 29668 +f 29642 29643 29670 +f 29642 29670 29669 +f 29643 29644 29670 +f 29644 29671 29670 +f 29644 29645 29672 +f 29644 29672 29671 +f 29645 29646 29672 +f 29646 29673 29672 +f 29646 29647 29674 +f 29646 29674 29673 +f 29647 29648 29674 +f 29648 29675 29674 +f 29648 29649 29676 +f 29648 29676 29675 +f 29649 29650 29676 +f 29650 29677 29676 +f 29650 29651 29678 +f 29650 29678 29677 +f 29651 9809 29678 +f 9809 9931 29678 +f 29652 29653 29680 +f 29652 29680 29679 +f 29653 29654 29680 +f 29654 29681 29680 +f 29654 29655 29682 +f 29654 29682 29681 +f 29655 29656 29682 +f 29656 29683 29682 +f 29656 29657 29684 +f 29656 29684 29683 +f 29657 29658 29684 +f 29658 29685 29684 +f 29658 29659 29686 +f 29658 29686 29685 +f 29659 29660 29686 +f 29660 29687 29686 +f 29660 29661 29688 +f 29660 29688 29687 +f 29661 29662 29688 +f 29662 29689 29688 +f 29662 29663 29690 +f 29662 29690 29689 +f 29663 29664 29690 +f 29664 29691 29690 +f 29664 29665 29692 +f 29664 29692 29691 +f 29665 29666 29692 +f 29666 29693 29692 +f 29666 29667 29694 +f 29666 29694 29693 +f 29667 29668 29694 +f 29668 29695 29694 +f 29668 29669 29696 +f 29668 29696 29695 +f 29669 29670 29696 +f 29670 29697 29696 +f 29670 29671 29698 +f 29670 29698 29697 +f 29671 29672 29698 +f 29672 29699 29698 +f 29672 29673 29700 +f 29672 29700 29699 +f 29673 29674 29700 +f 29674 29701 29700 +f 29674 29675 29702 +f 29674 29702 29701 +f 29675 29676 29702 +f 29676 29703 29702 +f 29676 29677 29704 +f 29676 29704 29703 +f 29677 29678 29704 +f 29678 29705 29704 +f 29678 9931 10054 +f 29678 10054 29705 +f 29679 29680 29706 +f 29680 29707 29706 +f 29680 29681 29708 +f 29680 29708 29707 +f 29681 29682 29708 +f 29682 29709 29708 +f 29682 29683 29710 +f 29682 29710 29709 +f 29683 29684 29710 +f 29684 29711 29710 +f 29684 29685 29712 +f 29684 29712 29711 +f 29685 29686 29712 +f 29686 29713 29712 +f 29686 29687 29714 +f 29686 29714 29713 +f 29687 29688 29714 +f 29688 29715 29714 +f 29688 29689 29716 +f 29688 29716 29715 +f 29689 29690 29716 +f 29690 29717 29716 +f 29690 29691 29718 +f 29690 29718 29717 +f 29691 29692 29718 +f 29692 29719 29718 +f 29692 29693 29720 +f 29692 29720 29719 +f 29693 29694 29720 +f 29694 29721 29720 +f 29694 29695 29722 +f 29694 29722 29721 +f 29695 29696 29722 +f 29696 29723 29722 +f 29696 29697 29724 +f 29696 29724 29723 +f 29697 29698 29724 +f 29698 29725 29724 +f 29698 29699 29726 +f 29698 29726 29725 +f 29699 29700 29726 +f 29700 29727 29726 +f 29700 29701 29728 +f 29700 29728 29727 +f 29701 29702 29728 +f 29702 29729 29728 +f 29702 29703 29730 +f 29702 29730 29729 +f 29703 29704 29730 +f 29704 29731 29730 +f 29704 29705 29732 +f 29704 29732 29731 +f 29705 10054 29732 +f 10054 10179 29732 +f 29706 29707 29734 +f 29706 29734 29733 +f 29707 29708 29734 +f 29708 29735 29734 +f 29708 29709 29736 +f 29708 29736 29735 +f 29709 29710 29736 +f 29710 29737 29736 +f 29710 29711 29738 +f 29710 29738 29737 +f 29711 29712 29738 +f 29712 29739 29738 +f 29712 29713 29740 +f 29712 29740 29739 +f 29713 29714 29740 +f 29714 29741 29740 +f 29714 29715 29742 +f 29714 29742 29741 +f 29715 29716 29742 +f 29716 29743 29742 +f 29716 29717 29744 +f 29716 29744 29743 +f 29717 29718 29744 +f 29718 29745 29744 +f 29718 29719 29746 +f 29718 29746 29745 +f 29719 29720 29746 +f 29720 29747 29746 +f 29720 29721 29748 +f 29720 29748 29747 +f 29721 29722 29748 +f 29722 29749 29748 +f 29722 29723 29750 +f 29722 29750 29749 +f 29723 29724 29750 +f 29724 29751 29750 +f 29724 29725 29752 +f 29724 29752 29751 +f 29725 29726 29752 +f 29726 29753 29752 +f 29726 29727 29754 +f 29726 29754 29753 +f 29727 29728 29754 +f 29728 29755 29754 +f 29728 29729 29756 +f 29728 29756 29755 +f 29729 29730 29756 +f 29730 29757 29756 +f 29730 29731 29758 +f 29730 29758 29757 +f 29731 29732 29758 +f 29732 29759 29758 +f 29732 10179 10305 +f 29732 10305 29759 +f 29733 29734 29760 +f 29734 29761 29760 +f 29734 29735 29762 +f 29734 29762 29761 +f 29735 29736 29762 +f 29736 29763 29762 +f 29736 29737 29764 +f 29736 29764 29763 +f 29737 29738 29764 +f 29738 29765 29764 +f 29738 29739 29766 +f 29738 29766 29765 +f 29739 29740 29766 +f 29740 29767 29766 +f 29740 29741 29768 +f 29740 29768 29767 +f 29741 29742 29768 +f 29742 29769 29768 +f 29742 29743 29770 +f 29742 29770 29769 +f 29743 29744 29770 +f 29744 29771 29770 +f 29744 29745 29772 +f 29744 29772 29771 +f 29745 29746 29772 +f 29746 29773 29772 +f 29746 29747 29774 +f 29746 29774 29773 +f 29747 29748 29774 +f 29748 29775 29774 +f 29748 29749 29776 +f 29748 29776 29775 +f 29749 29750 29776 +f 29750 29777 29776 +f 29750 29751 29778 +f 29750 29778 29777 +f 29751 29752 29778 +f 29752 29779 29778 +f 29752 29753 29780 +f 29752 29780 29779 +f 29753 29754 29780 +f 29754 29781 29780 +f 29754 29755 29782 +f 29754 29782 29781 +f 29755 29756 29782 +f 29756 29783 29782 +f 29756 29757 29784 +f 29756 29784 29783 +f 29757 29758 29784 +f 29758 29785 29784 +f 29758 29759 29786 +f 29758 29786 29785 +f 29759 10305 29786 +f 10305 10433 29786 +f 29760 29761 29788 +f 29760 29788 29787 +f 29761 29762 29788 +f 29762 29789 29788 +f 29762 29763 29790 +f 29762 29790 29789 +f 29763 29764 29790 +f 29764 29791 29790 +f 29764 29765 29792 +f 29764 29792 29791 +f 29765 29766 29792 +f 29766 29793 29792 +f 29766 29767 29794 +f 29766 29794 29793 +f 29767 29768 29794 +f 29768 29795 29794 +f 29768 29769 29796 +f 29768 29796 29795 +f 29769 29770 29796 +f 29770 29797 29796 +f 29770 29771 29798 +f 29770 29798 29797 +f 29771 29772 29798 +f 29772 29799 29798 +f 29772 29773 29800 +f 29772 29800 29799 +f 29773 29774 29800 +f 29774 29801 29800 +f 29774 29775 29802 +f 29774 29802 29801 +f 29775 29776 29802 +f 29776 29803 29802 +f 29776 29777 29804 +f 29776 29804 29803 +f 29777 29778 29804 +f 29778 29805 29804 +f 29778 29779 29806 +f 29778 29806 29805 +f 29779 29780 29806 +f 29780 29807 29806 +f 29780 29781 29808 +f 29780 29808 29807 +f 29781 29782 29808 +f 29782 29809 29808 +f 29782 29783 29810 +f 29782 29810 29809 +f 29783 29784 29810 +f 29784 29811 29810 +f 29784 29785 29812 +f 29784 29812 29811 +f 29785 29786 29812 +f 29786 29813 29812 +f 29786 10433 10562 +f 29786 10562 29813 +f 29787 29788 29814 +f 29788 29815 29814 +f 29788 29789 29816 +f 29788 29816 29815 +f 29789 29790 29816 +f 29790 29817 29816 +f 29790 29791 29818 +f 29790 29818 29817 +f 29791 29792 29818 +f 29792 29819 29818 +f 29792 29793 29820 +f 29792 29820 29819 +f 29793 29794 29820 +f 29794 29821 29820 +f 29794 29795 29822 +f 29794 29822 29821 +f 29795 29796 29822 +f 29796 29823 29822 +f 29796 29797 29824 +f 29796 29824 29823 +f 29797 29798 29824 +f 29798 29825 29824 +f 29798 29799 29826 +f 29798 29826 29825 +f 29799 29800 29826 +f 29800 29827 29826 +f 29800 29801 29828 +f 29800 29828 29827 +f 29801 29802 29828 +f 29802 29829 29828 +f 29802 29803 29830 +f 29802 29830 29829 +f 29803 29804 29830 +f 29804 29831 29830 +f 29804 29805 29832 +f 29804 29832 29831 +f 29805 29806 29832 +f 29806 29833 29832 +f 29806 29807 29834 +f 29806 29834 29833 +f 29807 29808 29834 +f 29808 29835 29834 +f 29808 29809 29836 +f 29808 29836 29835 +f 29809 29810 29836 +f 29810 29837 29836 +f 29810 29811 29838 +f 29810 29838 29837 +f 29811 29812 29838 +f 29812 29839 29838 +f 29812 29813 29840 +f 29812 29840 29839 +f 29813 10562 29840 +f 10562 10691 29840 +f 29814 29815 29842 +f 29814 29842 29841 +f 29815 29816 29842 +f 29816 29843 29842 +f 29816 29817 29844 +f 29816 29844 29843 +f 29817 29818 29844 +f 29818 29845 29844 +f 29818 29819 29846 +f 29818 29846 29845 +f 29819 29820 29846 +f 29820 29847 29846 +f 29820 29821 29848 +f 29820 29848 29847 +f 29821 29822 29848 +f 29822 29849 29848 +f 29822 29823 29850 +f 29822 29850 29849 +f 29823 29824 29850 +f 29824 29851 29850 +f 29824 29825 29852 +f 29824 29852 29851 +f 29825 29826 29852 +f 29826 29853 29852 +f 29826 29827 29854 +f 29826 29854 29853 +f 29827 29828 29854 +f 29828 29855 29854 +f 29828 29829 29856 +f 29828 29856 29855 +f 29829 29830 29856 +f 29830 29857 29856 +f 29830 29831 29858 +f 29830 29858 29857 +f 29831 29832 29858 +f 29832 29859 29858 +f 29832 29833 29860 +f 29832 29860 29859 +f 29833 29834 29860 +f 29834 29861 29860 +f 29834 29835 29862 +f 29834 29862 29861 +f 29835 29836 29862 +f 29836 29863 29862 +f 29836 29837 29864 +f 29836 29864 29863 +f 29837 29838 29864 +f 29838 29865 29864 +f 29838 29839 29866 +f 29838 29866 29865 +f 29839 29840 29866 +f 29840 29867 29866 +f 29840 10691 10820 +f 29840 10820 29867 +f 29841 29842 29868 +f 29842 29869 29868 +f 29842 29843 29870 +f 29842 29870 29869 +f 29843 29844 29870 +f 29844 29871 29870 +f 29844 29845 29872 +f 29844 29872 29871 +f 29845 29846 29872 +f 29846 29873 29872 +f 29846 29847 29874 +f 29846 29874 29873 +f 29847 29848 29874 +f 29848 29875 29874 +f 29848 29849 29876 +f 29848 29876 29875 +f 29849 29850 29876 +f 29850 29877 29876 +f 29850 29851 29878 +f 29850 29878 29877 +f 29851 29852 29878 +f 29852 29879 29878 +f 29852 29853 29880 +f 29852 29880 29879 +f 29853 29854 29880 +f 29854 29881 29880 +f 29854 29855 29882 +f 29854 29882 29881 +f 29855 29856 29882 +f 29856 29883 29882 +f 29856 29857 29884 +f 29856 29884 29883 +f 29857 29858 29884 +f 29858 29885 29884 +f 29858 29859 29886 +f 29858 29886 29885 +f 29859 29860 29886 +f 29860 29887 29886 +f 29860 29861 29888 +f 29860 29888 29887 +f 29861 29862 29888 +f 29862 29889 29888 +f 29862 29863 29890 +f 29862 29890 29889 +f 29863 29864 29890 +f 29864 29891 29890 +f 29864 29865 29892 +f 29864 29892 29891 +f 29865 29866 29892 +f 29866 29893 29892 +f 29866 29867 29894 +f 29866 29894 29893 +f 29867 10820 29894 +f 10820 10949 29894 +f 29868 29896 29895 +f 29868 29869 29897 +f 29868 29897 29896 +f 29869 29870 29897 +f 29870 29898 29897 +f 29870 29871 29899 +f 29870 29899 29898 +f 29871 29872 29899 +f 29872 29900 29899 +f 29872 29873 29901 +f 29872 29901 29900 +f 29873 29874 29901 +f 29874 29902 29901 +f 29874 29875 29903 +f 29874 29903 29902 +f 29875 29876 29903 +f 29876 29904 29903 +f 29876 29877 29905 +f 29876 29905 29904 +f 29877 29878 29905 +f 29878 29906 29905 +f 29878 29879 29907 +f 29878 29907 29906 +f 29879 29880 29907 +f 29880 29908 29907 +f 29880 29881 29909 +f 29880 29909 29908 +f 29881 29882 29909 +f 29882 29910 29909 +f 29882 29883 29911 +f 29882 29911 29910 +f 29883 29884 29911 +f 29884 29912 29911 +f 29884 29885 29913 +f 29884 29913 29912 +f 29885 29886 29913 +f 29886 29914 29913 +f 29886 29887 29915 +f 29886 29915 29914 +f 29887 29888 29915 +f 29888 29916 29915 +f 29888 29889 29917 +f 29888 29917 29916 +f 29889 29890 29917 +f 29890 29918 29917 +f 29890 29891 29919 +f 29890 29919 29918 +f 29891 29892 29919 +f 29892 29920 29919 +f 29892 29893 29921 +f 29892 29921 29920 +f 29893 29894 29921 +f 29894 29922 29921 +f 29894 10949 11078 +f 29894 11078 29922 +f 29895 29896 29924 +f 29895 29924 29923 +f 29896 29897 29924 +f 29897 29925 29924 +f 29897 29898 29926 +f 29897 29926 29925 +f 29898 29899 29926 +f 29899 29927 29926 +f 29899 29900 29928 +f 29899 29928 29927 +f 29900 29901 29928 +f 29901 29929 29928 +f 29901 29902 29930 +f 29901 29930 29929 +f 29902 29903 29930 +f 29903 29931 29930 +f 29903 29904 29932 +f 29903 29932 29931 +f 29904 29905 29932 +f 29905 29933 29932 +f 29905 29906 29934 +f 29905 29934 29933 +f 29906 29907 29934 +f 29907 29935 29934 +f 29907 29908 29936 +f 29907 29936 29935 +f 29908 29909 29936 +f 29909 29937 29936 +f 29909 29910 29938 +f 29909 29938 29937 +f 29910 29911 29938 +f 29911 29939 29938 +f 29911 29912 29940 +f 29911 29940 29939 +f 29912 29913 29940 +f 29913 29941 29940 +f 29913 29914 29942 +f 29913 29942 29941 +f 29914 29915 29942 +f 29915 29943 29942 +f 29915 29916 29944 +f 29915 29944 29943 +f 29916 29917 29944 +f 29917 29945 29944 +f 29917 29918 29946 +f 29917 29946 29945 +f 29918 29919 29946 +f 29919 29947 29946 +f 29919 29920 29948 +f 29919 29948 29947 +f 29920 29921 29948 +f 29921 29949 29948 +f 29921 29922 29950 +f 29921 29950 29949 +f 29922 11078 29950 +f 11078 11207 29950 +f 29923 29924 29951 +f 29924 29952 29951 +f 29924 29925 29953 +f 29924 29953 29952 +f 29925 29926 29953 +f 29926 29954 29953 +f 29926 29927 29955 +f 29926 29955 29954 +f 29927 29928 29955 +f 29928 29956 29955 +f 29928 29929 29957 +f 29928 29957 29956 +f 29929 29930 29957 +f 29930 29958 29957 +f 29930 29931 29959 +f 29930 29959 29958 +f 29931 29932 29959 +f 29932 29960 29959 +f 29932 29933 29961 +f 29932 29961 29960 +f 29933 29934 29961 +f 29934 29962 29961 +f 29934 29935 29963 +f 29934 29963 29962 +f 29935 29936 29963 +f 29936 29964 29963 +f 29936 29937 29965 +f 29936 29965 29964 +f 29937 29938 29965 +f 29938 29966 29965 +f 29938 29939 29967 +f 29938 29967 29966 +f 29939 29940 29967 +f 29940 29968 29967 +f 29940 29941 29969 +f 29940 29969 29968 +f 29941 29942 29969 +f 29942 29970 29969 +f 29942 29943 29971 +f 29942 29971 29970 +f 29943 29944 29971 +f 29944 29972 29971 +f 29944 29945 29973 +f 29944 29973 29972 +f 29945 29946 29973 +f 29946 29974 29973 +f 29946 29947 29975 +f 29946 29975 29974 +f 29947 29948 29975 +f 29948 29976 29975 +f 29948 29949 29977 +f 29948 29977 29976 +f 29949 29950 29977 +f 29950 29978 29977 +f 29950 11207 11336 +f 29950 11336 29978 +f 29951 29952 29980 +f 29951 29980 29979 +f 29952 29953 29980 +f 29953 29981 29980 +f 29953 29954 29982 +f 29953 29982 29981 +f 29954 29955 29982 +f 29955 29983 29982 +f 29955 29956 29984 +f 29955 29984 29983 +f 29956 29957 29984 +f 29957 29985 29984 +f 29957 29958 29986 +f 29957 29986 29985 +f 29958 29959 29986 +f 29959 29987 29986 +f 29959 29960 29988 +f 29959 29988 29987 +f 29960 29961 29988 +f 29961 29989 29988 +f 29961 29962 29990 +f 29961 29990 29989 +f 29962 29963 29990 +f 29963 29991 29990 +f 29963 29964 29992 +f 29963 29992 29991 +f 29964 29965 29992 +f 29965 29993 29992 +f 29965 29966 29994 +f 29965 29994 29993 +f 29966 29967 29994 +f 29967 29995 29994 +f 29967 29968 29996 +f 29967 29996 29995 +f 29968 29969 29996 +f 29969 29997 29996 +f 29969 29970 29998 +f 29969 29998 29997 +f 29970 29971 29998 +f 29971 29999 29998 +f 29971 29972 30000 +f 29971 30000 29999 +f 29972 29973 30000 +f 29973 30001 30000 +f 29973 29974 30002 +f 29973 30002 30001 +f 29974 29975 30002 +f 29975 30003 30002 +f 29975 29976 30004 +f 29975 30004 30003 +f 29976 29977 30004 +f 29977 30005 30004 +f 29977 29978 30006 +f 29977 30006 30005 +f 29978 11336 30006 +f 11336 11465 30006 +f 29979 29980 30007 +f 29980 30008 30007 +f 29980 29981 30009 +f 29980 30009 30008 +f 29981 29982 30009 +f 29982 30010 30009 +f 29982 29983 30011 +f 29982 30011 30010 +f 29983 29984 30011 +f 29984 30012 30011 +f 29984 29985 30013 +f 29984 30013 30012 +f 29985 29986 30013 +f 29986 30014 30013 +f 29986 29987 30015 +f 29986 30015 30014 +f 29987 29988 30015 +f 29988 30016 30015 +f 29988 29989 30017 +f 29988 30017 30016 +f 29989 29990 30017 +f 29990 30018 30017 +f 29990 29991 30019 +f 29990 30019 30018 +f 29991 29992 30019 +f 29992 30020 30019 +f 29992 29993 30021 +f 29992 30021 30020 +f 29993 29994 30021 +f 29994 30022 30021 +f 29994 29995 30023 +f 29994 30023 30022 +f 29995 29996 30023 +f 29996 30024 30023 +f 29996 29997 30025 +f 29996 30025 30024 +f 29997 29998 30025 +f 29998 30026 30025 +f 29998 29999 30027 +f 29998 30027 30026 +f 29999 30000 30027 +f 30000 30028 30027 +f 30000 30001 30029 +f 30000 30029 30028 +f 30001 30002 30029 +f 30002 30030 30029 +f 30002 30003 30031 +f 30002 30031 30030 +f 30003 30004 30031 +f 30004 30032 30031 +f 30004 30005 30033 +f 30004 30033 30032 +f 30005 30006 30033 +f 30006 30034 30033 +f 30006 11465 11594 +f 30006 11594 30034 +f 30007 30008 30036 +f 30007 30036 30035 +f 30008 30009 30036 +f 30009 30037 30036 +f 30009 30010 30038 +f 30009 30038 30037 +f 30010 30011 30038 +f 30011 30039 30038 +f 30011 30012 30040 +f 30011 30040 30039 +f 30012 30013 30040 +f 30013 30041 30040 +f 30013 30014 30042 +f 30013 30042 30041 +f 30014 30015 30042 +f 30015 30043 30042 +f 30015 30016 30044 +f 30015 30044 30043 +f 30016 30017 30044 +f 30017 30045 30044 +f 30017 30018 30046 +f 30017 30046 30045 +f 30018 30019 30046 +f 30019 30047 30046 +f 30019 30020 30048 +f 30019 30048 30047 +f 30020 30021 30048 +f 30021 30049 30048 +f 30021 30022 30050 +f 30021 30050 30049 +f 30022 30023 30050 +f 30023 30051 30050 +f 30023 30024 30052 +f 30023 30052 30051 +f 30024 30025 30052 +f 30025 30053 30052 +f 30025 30026 30054 +f 30025 30054 30053 +f 30026 30027 30054 +f 30027 30055 30054 +f 30027 30028 30056 +f 30027 30056 30055 +f 30028 30029 30056 +f 30029 30057 30056 +f 30029 30030 30058 +f 30029 30058 30057 +f 30030 30031 30058 +f 30031 30059 30058 +f 30031 30032 30060 +f 30031 30060 30059 +f 30032 30033 30060 +f 30033 30061 30060 +f 30033 30034 30062 +f 30033 30062 30061 +f 30034 11594 30062 +f 11594 11723 30062 +f 30035 30036 30063 +f 30036 30064 30063 +f 30036 30037 30065 +f 30036 30065 30064 +f 30037 30038 30065 +f 30038 30066 30065 +f 30038 30039 30067 +f 30038 30067 30066 +f 30039 30040 30067 +f 30040 30068 30067 +f 30040 30041 30069 +f 30040 30069 30068 +f 30041 30042 30069 +f 30042 30070 30069 +f 30042 30043 30071 +f 30042 30071 30070 +f 30043 30044 30071 +f 30044 30072 30071 +f 30044 30045 30073 +f 30044 30073 30072 +f 30045 30046 30073 +f 30046 30074 30073 +f 30046 30047 30075 +f 30046 30075 30074 +f 30047 30048 30075 +f 30048 30076 30075 +f 30048 30049 30077 +f 30048 30077 30076 +f 30049 30050 30077 +f 30050 30078 30077 +f 30050 30051 30079 +f 30050 30079 30078 +f 30051 30052 30079 +f 30052 30080 30079 +f 30052 30053 30081 +f 30052 30081 30080 +f 30053 30054 30081 +f 30054 30082 30081 +f 30054 30055 30083 +f 30054 30083 30082 +f 30055 30056 30083 +f 30056 30084 30083 +f 30056 30057 30085 +f 30056 30085 30084 +f 30057 30058 30085 +f 30058 30086 30085 +f 30058 30059 30087 +f 30058 30087 30086 +f 30059 30060 30087 +f 30060 30088 30087 +f 30060 30061 30089 +f 30060 30089 30088 +f 30061 30062 30089 +f 30062 30090 30089 +f 30062 11723 11852 +f 30062 11852 30090 +f 30063 30064 30092 +f 30063 30092 30091 +f 30064 30065 30092 +f 30065 30093 30092 +f 30065 30066 30094 +f 30065 30094 30093 +f 30066 30067 30094 +f 30067 30095 30094 +f 30067 30068 30096 +f 30067 30096 30095 +f 30068 30069 30096 +f 30069 30097 30096 +f 30069 30070 30098 +f 30069 30098 30097 +f 30070 30071 30098 +f 30071 30099 30098 +f 30071 30072 30100 +f 30071 30100 30099 +f 30072 30073 30100 +f 30073 30101 30100 +f 30073 30074 30102 +f 30073 30102 30101 +f 30074 30075 30102 +f 30075 30103 30102 +f 30075 30076 30104 +f 30075 30104 30103 +f 30076 30077 30104 +f 30077 30105 30104 +f 30077 30078 30106 +f 30077 30106 30105 +f 30078 30079 30106 +f 30079 30107 30106 +f 30079 30080 30108 +f 30079 30108 30107 +f 30080 30081 30108 +f 30081 30109 30108 +f 30081 30082 30110 +f 30081 30110 30109 +f 30082 30083 30110 +f 30083 30111 30110 +f 30083 30084 30112 +f 30083 30112 30111 +f 30084 30085 30112 +f 30085 30113 30112 +f 30085 30086 30114 +f 30085 30114 30113 +f 30086 30087 30114 +f 30087 30115 30114 +f 30087 30088 30116 +f 30087 30116 30115 +f 30088 30089 30116 +f 30089 30117 30116 +f 30089 30090 30118 +f 30089 30118 30117 +f 30090 11852 30118 +f 11852 11981 30118 +f 30091 30092 30119 +f 30092 30120 30119 +f 30092 30093 30121 +f 30092 30121 30120 +f 30093 30094 30121 +f 30094 30122 30121 +f 30094 30095 30123 +f 30094 30123 30122 +f 30095 30096 30123 +f 30096 30124 30123 +f 30096 30097 30125 +f 30096 30125 30124 +f 30097 30098 30125 +f 30098 30126 30125 +f 30098 30099 30127 +f 30098 30127 30126 +f 30099 30100 30127 +f 30100 30128 30127 +f 30100 30101 30129 +f 30100 30129 30128 +f 30101 30102 30129 +f 30102 30130 30129 +f 30102 30103 30131 +f 30102 30131 30130 +f 30103 30104 30131 +f 30104 30132 30131 +f 30104 30105 30133 +f 30104 30133 30132 +f 30105 30106 30133 +f 30106 30134 30133 +f 30106 30107 30135 +f 30106 30135 30134 +f 30107 30108 30135 +f 30108 30136 30135 +f 30108 30109 30137 +f 30108 30137 30136 +f 30109 30110 30137 +f 30110 30138 30137 +f 30110 30111 30139 +f 30110 30139 30138 +f 30111 30112 30139 +f 30112 30140 30139 +f 30112 30113 30141 +f 30112 30141 30140 +f 30113 30114 30141 +f 30114 30142 30141 +f 30114 30115 30143 +f 30114 30143 30142 +f 30115 30116 30143 +f 30116 30144 30143 +f 30116 30117 30145 +f 30116 30145 30144 +f 30117 30118 30145 +f 30118 30146 30145 +f 30118 11981 12110 +f 30118 12110 30146 +f 30119 30120 30148 +f 30119 30148 30147 +f 30120 30121 30148 +f 30121 30149 30148 +f 30121 30122 30150 +f 30121 30150 30149 +f 30122 30123 30150 +f 30123 30151 30150 +f 30123 30124 30152 +f 30123 30152 30151 +f 30124 30125 30152 +f 30125 30153 30152 +f 30125 30126 30154 +f 30125 30154 30153 +f 30126 30127 30154 +f 30127 30155 30154 +f 30127 30128 30156 +f 30127 30156 30155 +f 30128 30129 30156 +f 30129 30157 30156 +f 30129 30130 30158 +f 30129 30158 30157 +f 30130 30131 30158 +f 30131 30159 30158 +f 30131 30132 30160 +f 30131 30160 30159 +f 30132 30133 30160 +f 30133 30161 30160 +f 30133 30134 30162 +f 30133 30162 30161 +f 30134 30135 30162 +f 30135 30163 30162 +f 30135 30136 30164 +f 30135 30164 30163 +f 30136 30137 30164 +f 30137 30165 30164 +f 30137 30138 30166 +f 30137 30166 30165 +f 30138 30139 30166 +f 30139 30167 30166 +f 30139 30140 30168 +f 30139 30168 30167 +f 30140 30141 30168 +f 30141 30169 30168 +f 30141 30142 30170 +f 30141 30170 30169 +f 30142 30143 30170 +f 30143 30171 30170 +f 30143 30144 30172 +f 30143 30172 30171 +f 30144 30145 30172 +f 30145 30173 30172 +f 30145 30146 30174 +f 30145 30174 30173 +f 30146 12110 30174 +f 12110 12239 30174 +f 30147 30148 30175 +f 30148 30176 30175 +f 30148 30149 30177 +f 30148 30177 30176 +f 30149 30150 30177 +f 30150 30178 30177 +f 30150 30151 30179 +f 30150 30179 30178 +f 30151 30152 30179 +f 30152 30180 30179 +f 30152 30153 30181 +f 30152 30181 30180 +f 30153 30154 30181 +f 30154 30182 30181 +f 30154 30155 30183 +f 30154 30183 30182 +f 30155 30156 30183 +f 30156 30184 30183 +f 30156 30157 30185 +f 30156 30185 30184 +f 30157 30158 30185 +f 30158 30186 30185 +f 30158 30159 30187 +f 30158 30187 30186 +f 30159 30160 30187 +f 30160 30188 30187 +f 30160 30161 30189 +f 30160 30189 30188 +f 30161 30162 30189 +f 30162 30190 30189 +f 30162 30163 30191 +f 30162 30191 30190 +f 30163 30164 30191 +f 30164 30192 30191 +f 30164 30165 30193 +f 30164 30193 30192 +f 30165 30166 30193 +f 30166 30194 30193 +f 30166 30167 30195 +f 30166 30195 30194 +f 30167 30168 30195 +f 30168 30196 30195 +f 30168 30169 30197 +f 30168 30197 30196 +f 30169 30170 30197 +f 30170 30198 30197 +f 30170 30171 30199 +f 30170 30199 30198 +f 30171 30172 30199 +f 30172 30200 30199 +f 30172 30173 30201 +f 30172 30201 30200 +f 30173 30174 30201 +f 30174 30202 30201 +f 30174 12239 12368 +f 30174 12368 30202 +f 30175 30204 30203 +f 30175 30176 30205 +f 30175 30205 30204 +f 30176 30177 30205 +f 30177 30206 30205 +f 30177 30178 30207 +f 30177 30207 30206 +f 30178 30179 30207 +f 30179 30208 30207 +f 30179 30180 30209 +f 30179 30209 30208 +f 30180 30181 30209 +f 30181 30210 30209 +f 30181 30182 30211 +f 30181 30211 30210 +f 30182 30183 30211 +f 30183 30212 30211 +f 30183 30184 30213 +f 30183 30213 30212 +f 30184 30185 30213 +f 30185 30214 30213 +f 30185 30186 30215 +f 30185 30215 30214 +f 30186 30187 30215 +f 30187 30216 30215 +f 30187 30188 30217 +f 30187 30217 30216 +f 30188 30189 30217 +f 30189 30218 30217 +f 30189 30190 30219 +f 30189 30219 30218 +f 30190 30191 30219 +f 30191 30220 30219 +f 30191 30192 30221 +f 30191 30221 30220 +f 30192 30193 30221 +f 30193 30222 30221 +f 30193 30194 30223 +f 30193 30223 30222 +f 30194 30195 30223 +f 30195 30224 30223 +f 30195 30196 30225 +f 30195 30225 30224 +f 30196 30197 30225 +f 30197 30226 30225 +f 30197 30198 30227 +f 30197 30227 30226 +f 30198 30199 30227 +f 30199 30228 30227 +f 30199 30200 30229 +f 30199 30229 30228 +f 30200 30201 30229 +f 30201 30230 30229 +f 30201 30202 30231 +f 30201 30231 30230 +f 30202 12368 30231 +f 12368 12497 30231 +f 30203 30233 30232 +f 30203 30204 30234 +f 30203 30234 30233 +f 30204 30205 30234 +f 30205 30235 30234 +f 30205 30206 30236 +f 30205 30236 30235 +f 30206 30207 30236 +f 30207 30237 30236 +f 30207 30208 30238 +f 30207 30238 30237 +f 30208 30209 30238 +f 30209 30239 30238 +f 30209 30210 30240 +f 30209 30240 30239 +f 30210 30211 30240 +f 30211 30241 30240 +f 30211 30212 30242 +f 30211 30242 30241 +f 30212 30213 30242 +f 30213 30243 30242 +f 30213 30214 30244 +f 30213 30244 30243 +f 30214 30215 30244 +f 30215 30245 30244 +f 30215 30216 30246 +f 30215 30246 30245 +f 30216 30217 30246 +f 30217 30247 30246 +f 30217 30218 30248 +f 30217 30248 30247 +f 30218 30219 30248 +f 30219 30249 30248 +f 30219 30220 30250 +f 30219 30250 30249 +f 30220 30221 30250 +f 30221 30251 30250 +f 30221 30222 30252 +f 30221 30252 30251 +f 30222 30223 30252 +f 30223 30253 30252 +f 30223 30224 30254 +f 30223 30254 30253 +f 30224 30225 30254 +f 30225 30255 30254 +f 30225 30226 30256 +f 30225 30256 30255 +f 30226 30227 30256 +f 30227 30257 30256 +f 30227 30228 30258 +f 30227 30258 30257 +f 30228 30229 30258 +f 30229 30259 30258 +f 30229 30230 30260 +f 30229 30260 30259 +f 30230 30231 30260 +f 30231 30261 30260 +f 30231 12497 12626 +f 30231 12626 30261 +f 30232 30233 30263 +f 30232 30263 30262 +f 30233 30234 30263 +f 30234 30264 30263 +f 30234 30235 30265 +f 30234 30265 30264 +f 30235 30236 30265 +f 30236 30266 30265 +f 30236 30237 30267 +f 30236 30267 30266 +f 30237 30238 30267 +f 30238 30268 30267 +f 30238 30239 30269 +f 30238 30269 30268 +f 30239 30240 30269 +f 30240 30270 30269 +f 30240 30241 30271 +f 30240 30271 30270 +f 30241 30242 30271 +f 30242 30272 30271 +f 30242 30243 30273 +f 30242 30273 30272 +f 30243 30244 30273 +f 30244 30274 30273 +f 30244 30245 30275 +f 30244 30275 30274 +f 30245 30246 30275 +f 30246 30276 30275 +f 30246 30247 30277 +f 30246 30277 30276 +f 30247 30248 30277 +f 30248 30278 30277 +f 30248 30249 30279 +f 30248 30279 30278 +f 30249 30250 30279 +f 30250 30280 30279 +f 30250 30251 30281 +f 30250 30281 30280 +f 30251 30252 30281 +f 30252 30282 30281 +f 30252 30253 30283 +f 30252 30283 30282 +f 30253 30254 30283 +f 30254 30284 30283 +f 30254 30255 30285 +f 30254 30285 30284 +f 30255 30256 30285 +f 30256 30286 30285 +f 30256 30257 30287 +f 30256 30287 30286 +f 30257 30258 30287 +f 30258 30288 30287 +f 30258 30259 30289 +f 30258 30289 30288 +f 30259 30260 30289 +f 30260 30290 30289 +f 30260 30261 30291 +f 30260 30291 30290 +f 30261 12626 30291 +f 12626 12755 30291 +f 30262 30263 30292 +f 30263 30293 30292 +f 30263 30264 30294 +f 30263 30294 30293 +f 30264 30265 30294 +f 30265 30295 30294 +f 30265 30266 30296 +f 30265 30296 30295 +f 30266 30267 30296 +f 30267 30297 30296 +f 30267 30268 30298 +f 30267 30298 30297 +f 30268 30269 30298 +f 30269 30299 30298 +f 30269 30270 30300 +f 30269 30300 30299 +f 30270 30271 30300 +f 30271 30301 30300 +f 30271 30272 30302 +f 30271 30302 30301 +f 30272 30273 30302 +f 30273 30303 30302 +f 30273 30274 30304 +f 30273 30304 30303 +f 30274 30275 30304 +f 30275 30305 30304 +f 30275 30276 30306 +f 30275 30306 30305 +f 30276 30277 30306 +f 30277 30307 30306 +f 30277 30278 30308 +f 30277 30308 30307 +f 30278 30279 30308 +f 30279 30309 30308 +f 30279 30280 30310 +f 30279 30310 30309 +f 30280 30281 30310 +f 30281 30311 30310 +f 30281 30282 30312 +f 30281 30312 30311 +f 30282 30283 30312 +f 30283 30313 30312 +f 30283 30284 30314 +f 30283 30314 30313 +f 30284 30285 30314 +f 30285 30315 30314 +f 30285 30286 30316 +f 30285 30316 30315 +f 30286 30287 30316 +f 30287 30317 30316 +f 30287 30288 30318 +f 30287 30318 30317 +f 30288 30289 30318 +f 30289 30319 30318 +f 30289 30290 30320 +f 30289 30320 30319 +f 30290 30291 30320 +f 30291 30321 30320 +f 30291 12755 12884 +f 30291 12884 30321 +f 30292 30323 30322 +f 30292 30293 30324 +f 30292 30324 30323 +f 30293 30294 30324 +f 30294 30325 30324 +f 30294 30295 30326 +f 30294 30326 30325 +f 30295 30296 30326 +f 30296 30327 30326 +f 30296 30297 30328 +f 30296 30328 30327 +f 30297 30298 30328 +f 30298 30329 30328 +f 30298 30299 30330 +f 30298 30330 30329 +f 30299 30300 30330 +f 30300 30331 30330 +f 30300 30301 30332 +f 30300 30332 30331 +f 30301 30302 30332 +f 30302 30333 30332 +f 30302 30303 30334 +f 30302 30334 30333 +f 30303 30304 30334 +f 30304 30335 30334 +f 30304 30305 30336 +f 30304 30336 30335 +f 30305 30306 30336 +f 30306 30337 30336 +f 30306 30307 30338 +f 30306 30338 30337 +f 30307 30308 30338 +f 30308 30339 30338 +f 30308 30309 30340 +f 30308 30340 30339 +f 30309 30310 30340 +f 30310 30341 30340 +f 30310 30311 30342 +f 30310 30342 30341 +f 30311 30312 30342 +f 30312 30343 30342 +f 30312 30313 30344 +f 30312 30344 30343 +f 30313 30314 30344 +f 30314 30345 30344 +f 30314 30315 30346 +f 30314 30346 30345 +f 30315 30316 30346 +f 30316 30347 30346 +f 30316 30317 30348 +f 30316 30348 30347 +f 30317 30318 30348 +f 30318 30349 30348 +f 30318 30319 30350 +f 30318 30350 30349 +f 30319 30320 30350 +f 30320 30351 30350 +f 30320 30321 30352 +f 30320 30352 30351 +f 30321 12884 30352 +f 12884 13013 30352 +f 30322 30323 30354 +f 30322 30354 30353 +f 30323 30324 30354 +f 30324 30355 30354 +f 30324 30325 30356 +f 30324 30356 30355 +f 30325 30326 30356 +f 30326 30357 30356 +f 30326 30327 30358 +f 30326 30358 30357 +f 30327 30328 30358 +f 30328 30359 30358 +f 30328 30329 30360 +f 30328 30360 30359 +f 30329 30330 30360 +f 30330 30361 30360 +f 30330 30331 30362 +f 30330 30362 30361 +f 30331 30332 30362 +f 30332 30363 30362 +f 30332 30333 30364 +f 30332 30364 30363 +f 30333 30334 30364 +f 30334 30365 30364 +f 30334 30335 30366 +f 30334 30366 30365 +f 30335 30336 30366 +f 30336 30367 30366 +f 30336 30337 30368 +f 30336 30368 30367 +f 30337 30338 30368 +f 30338 30369 30368 +f 30338 30339 30370 +f 30338 30370 30369 +f 30339 30340 30370 +f 30340 30371 30370 +f 30340 30341 30372 +f 30340 30372 30371 +f 30341 30342 30372 +f 30342 30373 30372 +f 30342 30343 30374 +f 30342 30374 30373 +f 30343 30344 30374 +f 30344 30375 30374 +f 30344 30345 30376 +f 30344 30376 30375 +f 30345 30346 30376 +f 30346 30377 30376 +f 30346 30347 30378 +f 30346 30378 30377 +f 30347 30348 30378 +f 30348 30379 30378 +f 30348 30349 30380 +f 30348 30380 30379 +f 30349 30350 30380 +f 30350 30381 30380 +f 30350 30351 30382 +f 30350 30382 30381 +f 30351 30352 30382 +f 30352 30383 30382 +f 30352 13013 13142 +f 30352 13142 30383 +f 30353 30354 30384 +f 30354 30385 30384 +f 30354 30355 30386 +f 30354 30386 30385 +f 30355 30356 30386 +f 30356 30387 30386 +f 30356 30357 30388 +f 30356 30388 30387 +f 30357 30358 30388 +f 30358 30389 30388 +f 30358 30359 30390 +f 30358 30390 30389 +f 30359 30360 30390 +f 30360 30391 30390 +f 30360 30361 30392 +f 30360 30392 30391 +f 30361 30362 30392 +f 30362 30393 30392 +f 30362 30363 30394 +f 30362 30394 30393 +f 30363 30364 30394 +f 30364 30395 30394 +f 30364 30365 30396 +f 30364 30396 30395 +f 30365 30366 30396 +f 30366 30397 30396 +f 30366 30367 30398 +f 30366 30398 30397 +f 30367 30368 30398 +f 30368 30399 30398 +f 30368 30369 30400 +f 30368 30400 30399 +f 30369 30370 30400 +f 30370 30401 30400 +f 30370 30371 30402 +f 30370 30402 30401 +f 30371 30372 30402 +f 30372 30403 30402 +f 30372 30373 30404 +f 30372 30404 30403 +f 30373 30374 30404 +f 30374 30405 30404 +f 30374 30375 30406 +f 30374 30406 30405 +f 30375 30376 30406 +f 30376 30407 30406 +f 30376 30377 30408 +f 30376 30408 30407 +f 30377 30378 30408 +f 30378 30409 30408 +f 30378 30379 30410 +f 30378 30410 30409 +f 30379 30380 30410 +f 30380 30411 30410 +f 30380 30381 30412 +f 30380 30412 30411 +f 30381 30382 30412 +f 30382 30413 30412 +f 30382 30383 30414 +f 30382 30414 30413 +f 30383 13142 30414 +f 13142 13271 30414 +f 30384 30385 30416 +f 30384 30416 30415 +f 30385 30386 30416 +f 30386 30417 30416 +f 30386 30387 30418 +f 30386 30418 30417 +f 30387 30388 30418 +f 30388 30419 30418 +f 30388 30389 30420 +f 30388 30420 30419 +f 30389 30390 30420 +f 30390 30421 30420 +f 30390 30391 30422 +f 30390 30422 30421 +f 30391 30392 30422 +f 30392 30423 30422 +f 30392 30393 30424 +f 30392 30424 30423 +f 30393 30394 30424 +f 30394 30425 30424 +f 30394 30395 30426 +f 30394 30426 30425 +f 30395 30396 30426 +f 30396 30427 30426 +f 30396 30397 30428 +f 30396 30428 30427 +f 30397 30398 30428 +f 30398 30429 30428 +f 30398 30399 30430 +f 30398 30430 30429 +f 30399 30400 30430 +f 30400 30431 30430 +f 30400 30401 30432 +f 30400 30432 30431 +f 30401 30402 30432 +f 30402 30433 30432 +f 30402 30403 30434 +f 30402 30434 30433 +f 30403 30404 30434 +f 30404 30435 30434 +f 30404 30405 30436 +f 30404 30436 30435 +f 30405 30406 30436 +f 30406 30437 30436 +f 30406 30407 30438 +f 30406 30438 30437 +f 30407 30408 30438 +f 30408 30439 30438 +f 30408 30409 30440 +f 30408 30440 30439 +f 30409 30410 30440 +f 30410 30441 30440 +f 30410 30411 30442 +f 30410 30442 30441 +f 30411 30412 30442 +f 30412 30443 30442 +f 30412 30413 30444 +f 30412 30444 30443 +f 30413 30414 30444 +f 30414 30445 30444 +f 30414 13271 13400 +f 30414 13400 30445 +f 30415 30416 30446 +f 30416 30447 30446 +f 30416 30417 30448 +f 30416 30448 30447 +f 30417 30418 30448 +f 30418 30449 30448 +f 30418 30419 30450 +f 30418 30450 30449 +f 30419 30420 30450 +f 30420 30451 30450 +f 30420 30421 30452 +f 30420 30452 30451 +f 30421 30422 30452 +f 30422 30453 30452 +f 30422 30423 30454 +f 30422 30454 30453 +f 30423 30424 30454 +f 30424 30455 30454 +f 30424 30425 30456 +f 30424 30456 30455 +f 30425 30426 30456 +f 30426 30457 30456 +f 30426 30427 30458 +f 30426 30458 30457 +f 30427 30428 30458 +f 30428 30459 30458 +f 30428 30429 30460 +f 30428 30460 30459 +f 30429 30430 30460 +f 30430 30461 30460 +f 30430 30431 30462 +f 30430 30462 30461 +f 30431 30432 30462 +f 30432 30463 30462 +f 30432 30433 30464 +f 30432 30464 30463 +f 30433 30434 30464 +f 30434 30465 30464 +f 30434 30435 30466 +f 30434 30466 30465 +f 30435 30436 30466 +f 30436 30467 30466 +f 30436 30437 30468 +f 30436 30468 30467 +f 30437 30438 30468 +f 30438 30469 30468 +f 30438 30439 30470 +f 30438 30470 30469 +f 30439 30440 30470 +f 30440 30471 30470 +f 30440 30441 30472 +f 30440 30472 30471 +f 30441 30442 30472 +f 30442 30473 30472 +f 30442 30443 30474 +f 30442 30474 30473 +f 30443 30444 30474 +f 30444 30475 30474 +f 30444 30445 30476 +f 30444 30476 30475 +f 30445 13400 30476 +f 13400 13529 30476 +f 30446 30447 30478 +f 30446 30478 30477 +f 30447 30448 30478 +f 30448 30479 30478 +f 30448 30449 30480 +f 30448 30480 30479 +f 30449 30450 30480 +f 30450 30481 30480 +f 30450 30451 30482 +f 30450 30482 30481 +f 30451 30452 30482 +f 30452 30483 30482 +f 30452 30453 30484 +f 30452 30484 30483 +f 30453 30454 30484 +f 30454 30485 30484 +f 30454 30455 30486 +f 30454 30486 30485 +f 30455 30456 30486 +f 30456 30487 30486 +f 30456 30457 30488 +f 30456 30488 30487 +f 30457 30458 30488 +f 30458 30489 30488 +f 30458 30459 30490 +f 30458 30490 30489 +f 30459 30460 30490 +f 30460 30491 30490 +f 30460 30461 30492 +f 30460 30492 30491 +f 30461 30462 30492 +f 30462 30493 30492 +f 30462 30463 30494 +f 30462 30494 30493 +f 30463 30464 30494 +f 30464 30495 30494 +f 30464 30465 30496 +f 30464 30496 30495 +f 30465 30466 30496 +f 30466 30497 30496 +f 30466 30467 30498 +f 30466 30498 30497 +f 30467 30468 30498 +f 30468 30499 30498 +f 30468 30469 30500 +f 30468 30500 30499 +f 30469 30470 30500 +f 30470 30501 30500 +f 30470 30471 30502 +f 30470 30502 30501 +f 30471 30472 30502 +f 30472 30503 30502 +f 30472 30473 30504 +f 30472 30504 30503 +f 30473 30474 30504 +f 30474 30505 30504 +f 30474 30475 30506 +f 30474 30506 30505 +f 30475 30476 30506 +f 30476 30507 30506 +f 30476 13529 13658 +f 30476 13658 30507 +f 30477 30478 30508 +f 30478 30509 30508 +f 30478 30479 30510 +f 30478 30510 30509 +f 30479 30480 30510 +f 30480 30511 30510 +f 30480 30481 30512 +f 30480 30512 30511 +f 30481 30482 30512 +f 30482 30513 30512 +f 30482 30483 30514 +f 30482 30514 30513 +f 30483 30484 30514 +f 30484 30515 30514 +f 30484 30485 30516 +f 30484 30516 30515 +f 30485 30486 30516 +f 30486 30517 30516 +f 30486 30487 30518 +f 30486 30518 30517 +f 30487 30488 30518 +f 30488 30519 30518 +f 30488 30489 30520 +f 30488 30520 30519 +f 30489 30490 30520 +f 30490 30521 30520 +f 30490 30491 30522 +f 30490 30522 30521 +f 30491 30492 30522 +f 30492 30523 30522 +f 30492 30493 30524 +f 30492 30524 30523 +f 30493 30494 30524 +f 30494 30525 30524 +f 30494 30495 30526 +f 30494 30526 30525 +f 30495 30496 30526 +f 30496 30527 30526 +f 30496 30497 30528 +f 30496 30528 30527 +f 30497 30498 30528 +f 30498 30529 30528 +f 30498 30499 30530 +f 30498 30530 30529 +f 30499 30500 30530 +f 30500 30531 30530 +f 30500 30501 30532 +f 30500 30532 30531 +f 30501 30502 30532 +f 30502 30533 30532 +f 30502 30503 30534 +f 30502 30534 30533 +f 30503 30504 30534 +f 30504 30535 30534 +f 30504 30505 30536 +f 30504 30536 30535 +f 30505 30506 30536 +f 30506 30537 30536 +f 30506 30507 30538 +f 30506 30538 30537 +f 30507 13658 30538 +f 13658 13787 30538 +f 30508 30509 30540 +f 30508 30540 30539 +f 30509 30510 30540 +f 30510 30541 30540 +f 30510 30511 30542 +f 30510 30542 30541 +f 30511 30512 30542 +f 30512 30543 30542 +f 30512 30513 30544 +f 30512 30544 30543 +f 30513 30514 30544 +f 30514 30545 30544 +f 30514 30515 30546 +f 30514 30546 30545 +f 30515 30516 30546 +f 30516 30547 30546 +f 30516 30517 30548 +f 30516 30548 30547 +f 30517 30518 30548 +f 30518 30549 30548 +f 30518 30519 30550 +f 30518 30550 30549 +f 30519 30520 30550 +f 30520 30551 30550 +f 30520 30521 30552 +f 30520 30552 30551 +f 30521 30522 30552 +f 30522 30553 30552 +f 30522 30523 30554 +f 30522 30554 30553 +f 30523 30524 30554 +f 30524 30555 30554 +f 30524 30525 30556 +f 30524 30556 30555 +f 30525 30526 30556 +f 30526 30557 30556 +f 30526 30527 30558 +f 30526 30558 30557 +f 30527 30528 30558 +f 30528 30559 30558 +f 30528 30529 30560 +f 30528 30560 30559 +f 30529 30530 30560 +f 30530 30561 30560 +f 30530 30531 30562 +f 30530 30562 30561 +f 30531 30532 30562 +f 30532 30563 30562 +f 30532 30533 30564 +f 30532 30564 30563 +f 30533 30534 30564 +f 30534 30565 30564 +f 30534 30535 30566 +f 30534 30566 30565 +f 30535 30536 30566 +f 30536 30567 30566 +f 30536 30537 30568 +f 30536 30568 30567 +f 30537 30538 30568 +f 30538 30569 30568 +f 30538 13787 13916 +f 30538 13916 30569 +f 30539 30540 30570 +f 30540 30571 30570 +f 30540 30541 30572 +f 30540 30572 30571 +f 30541 30542 30572 +f 30542 30573 30572 +f 30542 30543 30574 +f 30542 30574 30573 +f 30543 30544 30574 +f 30544 30575 30574 +f 30544 30545 30576 +f 30544 30576 30575 +f 30545 30546 30576 +f 30546 30577 30576 +f 30546 30547 30578 +f 30546 30578 30577 +f 30547 30548 30578 +f 30548 30579 30578 +f 30548 30549 30580 +f 30548 30580 30579 +f 30549 30550 30580 +f 30550 30581 30580 +f 30550 30551 30582 +f 30550 30582 30581 +f 30551 30552 30582 +f 30552 30583 30582 +f 30552 30553 30584 +f 30552 30584 30583 +f 30553 30554 30584 +f 30554 30585 30584 +f 30554 30555 30586 +f 30554 30586 30585 +f 30555 30556 30586 +f 30556 30587 30586 +f 30556 30557 30588 +f 30556 30588 30587 +f 30557 30558 30588 +f 30558 30589 30588 +f 30558 30559 30590 +f 30558 30590 30589 +f 30559 30560 30590 +f 30560 30591 30590 +f 30560 30561 30592 +f 30560 30592 30591 +f 30561 30562 30592 +f 30562 30593 30592 +f 30562 30563 30594 +f 30562 30594 30593 +f 30563 30564 30594 +f 30564 30595 30594 +f 30564 30565 30596 +f 30564 30596 30595 +f 30565 30566 30596 +f 30566 30597 30596 +f 30566 30567 30598 +f 30566 30598 30597 +f 30567 30568 30598 +f 30568 30599 30598 +f 30568 30569 30600 +f 30568 30600 30599 +f 30569 13916 30600 +f 13916 14045 30600 +f 30570 30571 30602 +f 30570 30602 30601 +f 30571 30572 30602 +f 30572 30603 30602 +f 30572 30573 30604 +f 30572 30604 30603 +f 30573 30574 30604 +f 30574 30605 30604 +f 30574 30575 30606 +f 30574 30606 30605 +f 30575 30576 30606 +f 30576 30607 30606 +f 30576 30577 30608 +f 30576 30608 30607 +f 30577 30578 30608 +f 30578 30609 30608 +f 30578 30579 30610 +f 30578 30610 30609 +f 30579 30580 30610 +f 30580 30611 30610 +f 30580 30581 30612 +f 30580 30612 30611 +f 30581 30582 30612 +f 30582 30613 30612 +f 30582 30583 30614 +f 30582 30614 30613 +f 30583 30584 30614 +f 30584 30615 30614 +f 30584 30585 30616 +f 30584 30616 30615 +f 30585 30586 30616 +f 30586 30617 30616 +f 30586 30587 30618 +f 30586 30618 30617 +f 30587 30588 30618 +f 30588 30619 30618 +f 30588 30589 30620 +f 30588 30620 30619 +f 30589 30590 30620 +f 30590 30621 30620 +f 30590 30591 30622 +f 30590 30622 30621 +f 30591 30592 30622 +f 30592 30623 30622 +f 30592 30593 30624 +f 30592 30624 30623 +f 30593 30594 30624 +f 30594 30625 30624 +f 30594 30595 30626 +f 30594 30626 30625 +f 30595 30596 30626 +f 30596 30627 30626 +f 30596 30597 30628 +f 30596 30628 30627 +f 30597 30598 30628 +f 30598 30629 30628 +f 30598 30599 30630 +f 30598 30630 30629 +f 30599 30600 30630 +f 30600 30631 30630 +f 30600 14045 14174 +f 30600 14174 30631 +f 30601 30602 30632 +f 30602 30633 30632 +f 30602 30603 30634 +f 30602 30634 30633 +f 30603 30604 30634 +f 30604 30635 30634 +f 30604 30605 30636 +f 30604 30636 30635 +f 30605 30606 30636 +f 30606 30637 30636 +f 30606 30607 30638 +f 30606 30638 30637 +f 30607 30608 30638 +f 30608 30639 30638 +f 30608 30609 30640 +f 30608 30640 30639 +f 30609 30610 30640 +f 30610 30641 30640 +f 30610 30611 30642 +f 30610 30642 30641 +f 30611 30612 30642 +f 30612 30643 30642 +f 30612 30613 30644 +f 30612 30644 30643 +f 30613 30614 30644 +f 30614 30645 30644 +f 30614 30615 30646 +f 30614 30646 30645 +f 30615 30616 30646 +f 30616 30647 30646 +f 30616 30617 30648 +f 30616 30648 30647 +f 30617 30618 30648 +f 30618 30649 30648 +f 30618 30619 30650 +f 30618 30650 30649 +f 30619 30620 30650 +f 30620 30651 30650 +f 30620 30621 30652 +f 30620 30652 30651 +f 30621 30622 30652 +f 30622 30653 30652 +f 30622 30623 30654 +f 30622 30654 30653 +f 30623 30624 30654 +f 30624 30655 30654 +f 30624 30625 30656 +f 30624 30656 30655 +f 30625 30626 30656 +f 30626 30657 30656 +f 30626 30627 30658 +f 30626 30658 30657 +f 30627 30628 30658 +f 30628 30659 30658 +f 30628 30629 30660 +f 30628 30660 30659 +f 30629 30630 30660 +f 30630 30661 30660 +f 30630 30631 30662 +f 30630 30662 30661 +f 30631 14174 30662 +f 14174 14303 30662 +f 30632 30664 30663 +f 30632 30633 30665 +f 30632 30665 30664 +f 30633 30634 30665 +f 30634 30666 30665 +f 30634 30635 30667 +f 30634 30667 30666 +f 30635 30636 30667 +f 30636 30668 30667 +f 30636 30637 30669 +f 30636 30669 30668 +f 30637 30638 30669 +f 30638 30670 30669 +f 30638 30639 30671 +f 30638 30671 30670 +f 30639 30640 30671 +f 30640 30672 30671 +f 30640 30641 30673 +f 30640 30673 30672 +f 30641 30642 30673 +f 30642 30674 30673 +f 30642 30643 30675 +f 30642 30675 30674 +f 30643 30644 30675 +f 30644 30676 30675 +f 30644 30645 30677 +f 30644 30677 30676 +f 30645 30646 30677 +f 30646 30678 30677 +f 30646 30647 30679 +f 30646 30679 30678 +f 30647 30648 30679 +f 30648 30680 30679 +f 30648 30649 30681 +f 30648 30681 30680 +f 30649 30650 30681 +f 30650 30682 30681 +f 30650 30651 30683 +f 30650 30683 30682 +f 30651 30652 30683 +f 30652 30684 30683 +f 30652 30653 30685 +f 30652 30685 30684 +f 30653 30654 30685 +f 30654 30686 30685 +f 30654 30655 30687 +f 30654 30687 30686 +f 30655 30656 30687 +f 30656 30688 30687 +f 30656 30657 30689 +f 30656 30689 30688 +f 30657 30658 30689 +f 30658 30690 30689 +f 30658 30659 30691 +f 30658 30691 30690 +f 30659 30660 30691 +f 30660 30692 30691 +f 30660 30661 30693 +f 30660 30693 30692 +f 30661 30662 30693 +f 30662 30694 30693 +f 30662 14303 14432 +f 30662 14432 30694 +f 30663 30664 30696 +f 30663 30696 30695 +f 30664 30665 30696 +f 30665 30697 30696 +f 30665 30666 30698 +f 30665 30698 30697 +f 30666 30667 30698 +f 30667 30699 30698 +f 30667 30668 30700 +f 30667 30700 30699 +f 30668 30669 30700 +f 30669 30701 30700 +f 30669 30670 30702 +f 30669 30702 30701 +f 30670 30671 30702 +f 30671 30703 30702 +f 30671 30672 30704 +f 30671 30704 30703 +f 30672 30673 30704 +f 30673 30705 30704 +f 30673 30674 30706 +f 30673 30706 30705 +f 30674 30675 30706 +f 30675 30707 30706 +f 30675 30676 30708 +f 30675 30708 30707 +f 30676 30677 30708 +f 30677 30709 30708 +f 30677 30678 30710 +f 30677 30710 30709 +f 30678 30679 30710 +f 30679 30711 30710 +f 30679 30680 30712 +f 30679 30712 30711 +f 30680 30681 30712 +f 30681 30713 30712 +f 30681 30682 30714 +f 30681 30714 30713 +f 30682 30683 30714 +f 30683 30715 30714 +f 30683 30684 30716 +f 30683 30716 30715 +f 30684 30685 30716 +f 30685 30717 30716 +f 30685 30686 30718 +f 30685 30718 30717 +f 30686 30687 30718 +f 30687 30719 30718 +f 30687 30688 30720 +f 30687 30720 30719 +f 30688 30689 30720 +f 30689 30721 30720 +f 30689 30690 30722 +f 30689 30722 30721 +f 30690 30691 30722 +f 30691 30723 30722 +f 30691 30692 30724 +f 30691 30724 30723 +f 30692 30693 30724 +f 30693 30725 30724 +f 30693 30694 30726 +f 30693 30726 30725 +f 30694 14432 30726 +f 14432 14561 30726 +f 30695 30696 30727 +f 30696 30728 30727 +f 30696 30697 30729 +f 30696 30729 30728 +f 30697 30698 30729 +f 30698 30730 30729 +f 30698 30699 30731 +f 30698 30731 30730 +f 30699 30700 30731 +f 30700 30732 30731 +f 30700 30701 30733 +f 30700 30733 30732 +f 30701 30702 30733 +f 30702 30734 30733 +f 30702 30703 30735 +f 30702 30735 30734 +f 30703 30704 30735 +f 30704 30736 30735 +f 30704 30705 30737 +f 30704 30737 30736 +f 30705 30706 30737 +f 30706 30738 30737 +f 30706 30707 30739 +f 30706 30739 30738 +f 30707 30708 30739 +f 30708 30740 30739 +f 30708 30709 30741 +f 30708 30741 30740 +f 30709 30710 30741 +f 30710 30742 30741 +f 30710 30711 30743 +f 30710 30743 30742 +f 30711 30712 30743 +f 30712 30744 30743 +f 30712 30713 30745 +f 30712 30745 30744 +f 30713 30714 30745 +f 30714 30746 30745 +f 30714 30715 30747 +f 30714 30747 30746 +f 30715 30716 30747 +f 30716 30748 30747 +f 30716 30717 30749 +f 30716 30749 30748 +f 30717 30718 30749 +f 30718 30750 30749 +f 30718 30719 30751 +f 30718 30751 30750 +f 30719 30720 30751 +f 30720 30752 30751 +f 30720 30721 30753 +f 30720 30753 30752 +f 30721 30722 30753 +f 30722 30754 30753 +f 30722 30723 30755 +f 30722 30755 30754 +f 30723 30724 30755 +f 30724 30756 30755 +f 30724 30725 30757 +f 30724 30757 30756 +f 30725 30726 30757 +f 30726 30758 30757 +f 30726 14561 14690 +f 30726 14690 30758 +f 30727 30760 30759 +f 30727 30728 30761 +f 30727 30761 30760 +f 30728 30729 30761 +f 30729 30762 30761 +f 30729 30730 30763 +f 30729 30763 30762 +f 30730 30731 30763 +f 30731 30764 30763 +f 30731 30732 30765 +f 30731 30765 30764 +f 30732 30733 30765 +f 30733 30766 30765 +f 30733 30734 30767 +f 30733 30767 30766 +f 30734 30735 30767 +f 30735 30768 30767 +f 30735 30736 30769 +f 30735 30769 30768 +f 30736 30737 30769 +f 30737 30770 30769 +f 30737 30738 30771 +f 30737 30771 30770 +f 30738 30739 30771 +f 30739 30772 30771 +f 30739 30740 30773 +f 30739 30773 30772 +f 30740 30741 30773 +f 30741 30774 30773 +f 30741 30742 30775 +f 30741 30775 30774 +f 30742 30743 30775 +f 30743 30776 30775 +f 30743 30744 30777 +f 30743 30777 30776 +f 30744 30745 30777 +f 30745 30778 30777 +f 30745 30746 30779 +f 30745 30779 30778 +f 30746 30747 30779 +f 30747 30780 30779 +f 30747 30748 30781 +f 30747 30781 30780 +f 30748 30749 30781 +f 30749 30782 30781 +f 30749 30750 30783 +f 30749 30783 30782 +f 30750 30751 30783 +f 30751 30784 30783 +f 30751 30752 30785 +f 30751 30785 30784 +f 30752 30753 30785 +f 30753 30786 30785 +f 30753 30754 30787 +f 30753 30787 30786 +f 30754 30755 30787 +f 30755 30788 30787 +f 30755 30756 30789 +f 30755 30789 30788 +f 30756 30757 30789 +f 30757 30790 30789 +f 30757 30758 30791 +f 30757 30791 30790 +f 30758 14690 30791 +f 14690 14819 30791 +f 30759 30793 30792 +f 30759 30760 30794 +f 30759 30794 30793 +f 30760 30761 30794 +f 30761 30795 30794 +f 30761 30762 30796 +f 30761 30796 30795 +f 30762 30763 30796 +f 30763 30797 30796 +f 30763 30764 30798 +f 30763 30798 30797 +f 30764 30765 30798 +f 30765 30799 30798 +f 30765 30766 30800 +f 30765 30800 30799 +f 30766 30767 30800 +f 30767 30801 30800 +f 30767 30768 30802 +f 30767 30802 30801 +f 30768 30769 30802 +f 30769 30803 30802 +f 30769 30770 30804 +f 30769 30804 30803 +f 30770 30771 30804 +f 30771 30805 30804 +f 30771 30772 30806 +f 30771 30806 30805 +f 30772 30773 30806 +f 30773 30807 30806 +f 30773 30774 30808 +f 30773 30808 30807 +f 30774 30775 30808 +f 30775 30809 30808 +f 30775 30776 30810 +f 30775 30810 30809 +f 30776 30777 30810 +f 30777 30811 30810 +f 30777 30778 30812 +f 30777 30812 30811 +f 30778 30779 30812 +f 30779 30813 30812 +f 30779 30780 30814 +f 30779 30814 30813 +f 30780 30781 30814 +f 30781 30815 30814 +f 30781 30782 30816 +f 30781 30816 30815 +f 30782 30783 30816 +f 30783 30817 30816 +f 30783 30784 30818 +f 30783 30818 30817 +f 30784 30785 30818 +f 30785 30819 30818 +f 30785 30786 30820 +f 30785 30820 30819 +f 30786 30787 30820 +f 30787 30821 30820 +f 30787 30788 30822 +f 30787 30822 30821 +f 30788 30789 30822 +f 30789 30823 30822 +f 30789 30790 30824 +f 30789 30824 30823 +f 30790 30791 30824 +f 30791 30825 30824 +f 30791 14819 14948 +f 30791 14948 30825 +f 30792 30793 30827 +f 30792 30827 30826 +f 30793 30794 30827 +f 30794 30828 30827 +f 30794 30795 30829 +f 30794 30829 30828 +f 30795 30796 30829 +f 30796 30830 30829 +f 30796 30797 30831 +f 30796 30831 30830 +f 30797 30798 30831 +f 30798 30832 30831 +f 30798 30799 30833 +f 30798 30833 30832 +f 30799 30800 30833 +f 30800 30834 30833 +f 30800 30801 30835 +f 30800 30835 30834 +f 30801 30802 30835 +f 30802 30836 30835 +f 30802 30803 30837 +f 30802 30837 30836 +f 30803 30804 30837 +f 30804 30838 30837 +f 30804 30805 30839 +f 30804 30839 30838 +f 30805 30806 30839 +f 30806 30840 30839 +f 30806 30807 30841 +f 30806 30841 30840 +f 30807 30808 30841 +f 30808 30842 30841 +f 30808 30809 30843 +f 30808 30843 30842 +f 30809 30810 30843 +f 30810 30844 30843 +f 30810 30811 30845 +f 30810 30845 30844 +f 30811 30812 30845 +f 30812 30846 30845 +f 30812 30813 30847 +f 30812 30847 30846 +f 30813 30814 30847 +f 30814 30848 30847 +f 30814 30815 30849 +f 30814 30849 30848 +f 30815 30816 30849 +f 30816 30850 30849 +f 30816 30817 30851 +f 30816 30851 30850 +f 30817 30818 30851 +f 30818 30852 30851 +f 30818 30819 30853 +f 30818 30853 30852 +f 30819 30820 30853 +f 30820 30854 30853 +f 30820 30821 30855 +f 30820 30855 30854 +f 30821 30822 30855 +f 30822 30856 30855 +f 30822 30823 30857 +f 30822 30857 30856 +f 30823 30824 30857 +f 30824 30858 30857 +f 30824 30825 30859 +f 30824 30859 30858 +f 30825 14948 30859 +f 14948 15077 30859 +f 30826 30827 30860 +f 30827 30861 30860 +f 30827 30828 30862 +f 30827 30862 30861 +f 30828 30829 30862 +f 30829 30863 30862 +f 30829 30830 30864 +f 30829 30864 30863 +f 30830 30831 30864 +f 30831 30865 30864 +f 30831 30832 30866 +f 30831 30866 30865 +f 30832 30833 30866 +f 30833 30867 30866 +f 30833 30834 30868 +f 30833 30868 30867 +f 30834 30835 30868 +f 30835 30869 30868 +f 30835 30836 30870 +f 30835 30870 30869 +f 30836 30837 30870 +f 30837 30871 30870 +f 30837 30838 30872 +f 30837 30872 30871 +f 30838 30839 30872 +f 30839 30873 30872 +f 30839 30840 30874 +f 30839 30874 30873 +f 30840 30841 30874 +f 30841 30875 30874 +f 30841 30842 30876 +f 30841 30876 30875 +f 30842 30843 30876 +f 30843 30877 30876 +f 30843 30844 30878 +f 30843 30878 30877 +f 30844 30845 30878 +f 30845 30879 30878 +f 30845 30846 30880 +f 30845 30880 30879 +f 30846 30847 30880 +f 30847 30881 30880 +f 30847 30848 30882 +f 30847 30882 30881 +f 30848 30849 30882 +f 30849 30883 30882 +f 30849 30850 30884 +f 30849 30884 30883 +f 30850 30851 30884 +f 30851 30885 30884 +f 30851 30852 30886 +f 30851 30886 30885 +f 30852 30853 30886 +f 30853 30887 30886 +f 30853 30854 30888 +f 30853 30888 30887 +f 30854 30855 30888 +f 30855 30889 30888 +f 30855 30856 30890 +f 30855 30890 30889 +f 30856 30857 30890 +f 30857 30891 30890 +f 30857 30858 30892 +f 30857 30892 30891 +f 30858 30859 30892 +f 30859 30893 30892 +f 30859 15077 15206 +f 30859 15206 30893 +f 30860 30895 30894 +f 30860 30861 30896 +f 30860 30896 30895 +f 30861 30862 30896 +f 30862 30897 30896 +f 30862 30863 30898 +f 30862 30898 30897 +f 30863 30864 30898 +f 30864 30899 30898 +f 30864 30865 30900 +f 30864 30900 30899 +f 30865 30866 30900 +f 30866 30901 30900 +f 30866 30867 30902 +f 30866 30902 30901 +f 30867 30868 30902 +f 30868 30903 30902 +f 30868 30869 30904 +f 30868 30904 30903 +f 30869 30870 30904 +f 30870 30905 30904 +f 30870 30871 30906 +f 30870 30906 30905 +f 30871 30872 30906 +f 30872 30907 30906 +f 30872 30873 30908 +f 30872 30908 30907 +f 30873 30874 30908 +f 30874 30909 30908 +f 30874 30875 30910 +f 30874 30910 30909 +f 30875 30876 30910 +f 30876 30911 30910 +f 30876 30877 30912 +f 30876 30912 30911 +f 30877 30878 30912 +f 30878 30913 30912 +f 30878 30879 30914 +f 30878 30914 30913 +f 30879 30880 30914 +f 30880 30915 30914 +f 30880 30881 30916 +f 30880 30916 30915 +f 30881 30882 30916 +f 30882 30917 30916 +f 30882 30883 30918 +f 30882 30918 30917 +f 30883 30884 30918 +f 30884 30919 30918 +f 30884 30885 30920 +f 30884 30920 30919 +f 30885 30886 30920 +f 30886 30921 30920 +f 30886 30887 30922 +f 30886 30922 30921 +f 30887 30888 30922 +f 30888 30923 30922 +f 30888 30889 30924 +f 30888 30924 30923 +f 30889 30890 30924 +f 30890 30925 30924 +f 30890 30891 30926 +f 30890 30926 30925 +f 30891 30892 30926 +f 30892 30927 30926 +f 30892 30893 30928 +f 30892 30928 30927 +f 30893 15206 30928 +f 15206 15335 30928 +f 30894 30895 30930 +f 30894 30930 30929 +f 30895 30896 30930 +f 30896 30931 30930 +f 30896 30897 30932 +f 30896 30932 30931 +f 30897 30898 30932 +f 30898 30933 30932 +f 30898 30899 30934 +f 30898 30934 30933 +f 30899 30900 30934 +f 30900 30935 30934 +f 30900 30901 30936 +f 30900 30936 30935 +f 30901 30902 30936 +f 30902 30937 30936 +f 30902 30903 30938 +f 30902 30938 30937 +f 30903 30904 30938 +f 30904 30939 30938 +f 30904 30905 30940 +f 30904 30940 30939 +f 30905 30906 30940 +f 30906 30941 30940 +f 30906 30907 30942 +f 30906 30942 30941 +f 30907 30908 30942 +f 30908 30943 30942 +f 30908 30909 30944 +f 30908 30944 30943 +f 30909 30910 30944 +f 30910 30945 30944 +f 30910 30911 30946 +f 30910 30946 30945 +f 30911 30912 30946 +f 30912 30947 30946 +f 30912 30913 30948 +f 30912 30948 30947 +f 30913 30914 30948 +f 30914 30949 30948 +f 30914 30915 30950 +f 30914 30950 30949 +f 30915 30916 30950 +f 30916 30951 30950 +f 30916 30917 30952 +f 30916 30952 30951 +f 30917 30918 30952 +f 30918 30953 30952 +f 30918 30919 30954 +f 30918 30954 30953 +f 30919 30920 30954 +f 30920 30955 30954 +f 30920 30921 30956 +f 30920 30956 30955 +f 30921 30922 30956 +f 30922 30957 30956 +f 30922 30923 30958 +f 30922 30958 30957 +f 30923 30924 30958 +f 30924 30959 30958 +f 30924 30925 30960 +f 30924 30960 30959 +f 30925 30926 30960 +f 30926 30961 30960 +f 30926 30927 30962 +f 30926 30962 30961 +f 30927 30928 30962 +f 30928 30963 30962 +f 30928 15335 15464 +f 30928 15464 30963 +f 30929 30930 30964 +f 30930 30965 30964 +f 30930 30931 30966 +f 30930 30966 30965 +f 30931 30932 30966 +f 30932 30967 30966 +f 30932 30933 30968 +f 30932 30968 30967 +f 30933 30934 30968 +f 30934 30969 30968 +f 30934 30935 30970 +f 30934 30970 30969 +f 30935 30936 30970 +f 30936 30971 30970 +f 30936 30937 30972 +f 30936 30972 30971 +f 30937 30938 30972 +f 30938 30973 30972 +f 30938 30939 30974 +f 30938 30974 30973 +f 30939 30940 30974 +f 30940 30975 30974 +f 30940 30941 30976 +f 30940 30976 30975 +f 30941 30942 30976 +f 30942 30977 30976 +f 30942 30943 30978 +f 30942 30978 30977 +f 30943 30944 30978 +f 30944 30979 30978 +f 30944 30945 30980 +f 30944 30980 30979 +f 30945 30946 30980 +f 30946 30981 30980 +f 30946 30947 30982 +f 30946 30982 30981 +f 30947 30948 30982 +f 30948 30983 30982 +f 30948 30949 30984 +f 30948 30984 30983 +f 30949 30950 30984 +f 30950 30985 30984 +f 30950 30951 30986 +f 30950 30986 30985 +f 30951 30952 30986 +f 30952 30987 30986 +f 30952 30953 30988 +f 30952 30988 30987 +f 30953 30954 30988 +f 30954 30989 30988 +f 30954 30955 30990 +f 30954 30990 30989 +f 30955 30956 30990 +f 30956 30991 30990 +f 30956 30957 30992 +f 30956 30992 30991 +f 30957 30958 30992 +f 30958 30993 30992 +f 30958 30959 30994 +f 30958 30994 30993 +f 30959 30960 30994 +f 30960 30995 30994 +f 30960 30961 30996 +f 30960 30996 30995 +f 30961 30962 30996 +f 30962 30997 30996 +f 30962 30963 30998 +f 30962 30998 30997 +f 30963 15464 30998 +f 15464 15593 30998 +f 30964 30965 31000 +f 30964 31000 30999 +f 30965 30966 31000 +f 30966 31001 31000 +f 30966 30967 31002 +f 30966 31002 31001 +f 30967 30968 31002 +f 30968 31003 31002 +f 30968 30969 31004 +f 30968 31004 31003 +f 30969 30970 31004 +f 30970 31005 31004 +f 30970 30971 31006 +f 30970 31006 31005 +f 30971 30972 31006 +f 30972 31007 31006 +f 30972 30973 31008 +f 30972 31008 31007 +f 30973 30974 31008 +f 30974 31009 31008 +f 30974 30975 31010 +f 30974 31010 31009 +f 30975 30976 31010 +f 30976 31011 31010 +f 30976 30977 31012 +f 30976 31012 31011 +f 30977 30978 31012 +f 30978 31013 31012 +f 30978 30979 31014 +f 30978 31014 31013 +f 30979 30980 31014 +f 30980 31015 31014 +f 30980 30981 31016 +f 30980 31016 31015 +f 30981 30982 31016 +f 30982 31017 31016 +f 30982 30983 31018 +f 30982 31018 31017 +f 30983 30984 31018 +f 30984 31019 31018 +f 30984 30985 31020 +f 30984 31020 31019 +f 30985 30986 31020 +f 30986 31021 31020 +f 30986 30987 31022 +f 30986 31022 31021 +f 30987 30988 31022 +f 30988 31023 31022 +f 30988 30989 31024 +f 30988 31024 31023 +f 30989 30990 31024 +f 30990 31025 31024 +f 30990 30991 31026 +f 30990 31026 31025 +f 30991 30992 31026 +f 30992 31027 31026 +f 30992 30993 31028 +f 30992 31028 31027 +f 30993 30994 31028 +f 30994 31029 31028 +f 30994 30995 31030 +f 30994 31030 31029 +f 30995 30996 31030 +f 30996 31031 31030 +f 30996 30997 31032 +f 30996 31032 31031 +f 30997 30998 31032 +f 30998 31033 31032 +f 30998 15593 15722 +f 30998 15722 31033 +f 30999 31000 31034 +f 31000 31035 31034 +f 31000 31001 31036 +f 31000 31036 31035 +f 31001 31002 31036 +f 31002 31037 31036 +f 31002 31003 31038 +f 31002 31038 31037 +f 31003 31004 31038 +f 31004 31039 31038 +f 31004 31005 31040 +f 31004 31040 31039 +f 31005 31006 31040 +f 31006 31041 31040 +f 31006 31007 31042 +f 31006 31042 31041 +f 31007 31008 31042 +f 31008 31043 31042 +f 31008 31009 31044 +f 31008 31044 31043 +f 31009 31010 31044 +f 31010 31045 31044 +f 31010 31011 31046 +f 31010 31046 31045 +f 31011 31012 31046 +f 31012 31047 31046 +f 31012 31013 31048 +f 31012 31048 31047 +f 31013 31014 31048 +f 31014 31049 31048 +f 31014 31015 31050 +f 31014 31050 31049 +f 31015 31016 31050 +f 31016 31051 31050 +f 31016 31017 31052 +f 31016 31052 31051 +f 31017 31018 31052 +f 31018 31053 31052 +f 31018 31019 31054 +f 31018 31054 31053 +f 31019 31020 31054 +f 31020 31055 31054 +f 31020 31021 31056 +f 31020 31056 31055 +f 31021 31022 31056 +f 31022 31057 31056 +f 31022 31023 31058 +f 31022 31058 31057 +f 31023 31024 31058 +f 31024 31059 31058 +f 31024 31025 31060 +f 31024 31060 31059 +f 31025 31026 31060 +f 31026 31061 31060 +f 31026 31027 31062 +f 31026 31062 31061 +f 31027 31028 31062 +f 31028 31063 31062 +f 31028 31029 31064 +f 31028 31064 31063 +f 31029 31030 31064 +f 31030 31065 31064 +f 31030 31031 31066 +f 31030 31066 31065 +f 31031 31032 31066 +f 31032 31067 31066 +f 31032 31033 31068 +f 31032 31068 31067 +f 31033 15722 31068 +f 15722 15851 31068 +f 31034 31035 31070 +f 31034 31070 31069 +f 31035 31036 31070 +f 31036 31071 31070 +f 31036 31037 31072 +f 31036 31072 31071 +f 31037 31038 31072 +f 31038 31073 31072 +f 31038 31039 31074 +f 31038 31074 31073 +f 31039 31040 31074 +f 31040 31075 31074 +f 31040 31041 31076 +f 31040 31076 31075 +f 31041 31042 31076 +f 31042 31077 31076 +f 31042 31043 31078 +f 31042 31078 31077 +f 31043 31044 31078 +f 31044 31079 31078 +f 31044 31045 31080 +f 31044 31080 31079 +f 31045 31046 31080 +f 31046 31081 31080 +f 31046 31047 31082 +f 31046 31082 31081 +f 31047 31048 31082 +f 31048 31083 31082 +f 31048 31049 31084 +f 31048 31084 31083 +f 31049 31050 31084 +f 31050 31085 31084 +f 31050 31051 31086 +f 31050 31086 31085 +f 31051 31052 31086 +f 31052 31087 31086 +f 31052 31053 31088 +f 31052 31088 31087 +f 31053 31054 31088 +f 31054 31089 31088 +f 31054 31055 31090 +f 31054 31090 31089 +f 31055 31056 31090 +f 31056 31091 31090 +f 31056 31057 31092 +f 31056 31092 31091 +f 31057 31058 31092 +f 31058 31093 31092 +f 31058 31059 31094 +f 31058 31094 31093 +f 31059 31060 31094 +f 31060 31095 31094 +f 31060 31061 31096 +f 31060 31096 31095 +f 31061 31062 31096 +f 31062 31097 31096 +f 31062 31063 31098 +f 31062 31098 31097 +f 31063 31064 31098 +f 31064 31099 31098 +f 31064 31065 31100 +f 31064 31100 31099 +f 31065 31066 31100 +f 31066 31101 31100 +f 31066 31067 31102 +f 31066 31102 31101 +f 31067 31068 31102 +f 31068 31103 31102 +f 31068 15851 15980 +f 31068 15980 31103 +f 31069 31070 31104 +f 31070 31105 31104 +f 31070 31071 31106 +f 31070 31106 31105 +f 31071 31072 31106 +f 31072 31107 31106 +f 31072 31073 31108 +f 31072 31108 31107 +f 31073 31074 31108 +f 31074 31109 31108 +f 31074 31075 31110 +f 31074 31110 31109 +f 31075 31076 31110 +f 31076 31111 31110 +f 31076 31077 31112 +f 31076 31112 31111 +f 31077 31078 31112 +f 31078 31113 31112 +f 31078 31079 31114 +f 31078 31114 31113 +f 31079 31080 31114 +f 31080 31115 31114 +f 31080 31081 31116 +f 31080 31116 31115 +f 31081 31082 31116 +f 31082 31117 31116 +f 31082 31083 31118 +f 31082 31118 31117 +f 31083 31084 31118 +f 31084 31119 31118 +f 31084 31085 31120 +f 31084 31120 31119 +f 31085 31086 31120 +f 31086 31121 31120 +f 31086 31087 31122 +f 31086 31122 31121 +f 31087 31088 31122 +f 31088 31123 31122 +f 31088 31089 31124 +f 31088 31124 31123 +f 31089 31090 31124 +f 31090 31125 31124 +f 31090 31091 31126 +f 31090 31126 31125 +f 31091 31092 31126 +f 31092 31127 31126 +f 31092 31093 31128 +f 31092 31128 31127 +f 31093 31094 31128 +f 31094 31129 31128 +f 31094 31095 31130 +f 31094 31130 31129 +f 31095 31096 31130 +f 31096 31131 31130 +f 31096 31097 31132 +f 31096 31132 31131 +f 31097 31098 31132 +f 31098 31133 31132 +f 31098 31099 31134 +f 31098 31134 31133 +f 31099 31100 31134 +f 31100 31135 31134 +f 31100 31101 31136 +f 31100 31136 31135 +f 31101 31102 31136 +f 31102 31137 31136 +f 31102 31103 31138 +f 31102 31138 31137 +f 31103 15980 31138 +f 15980 16109 31138 +f 31104 26269 26398 +f 31104 31105 26140 +f 31104 26140 26269 +f 31105 31106 26140 +f 31106 26011 26140 +f 31106 31107 25882 +f 31106 25882 26011 +f 31107 31108 25882 +f 31108 25753 25882 +f 31108 31109 25624 +f 31108 25624 25753 +f 31109 31110 25624 +f 31110 25495 25624 +f 31110 31111 25366 +f 31110 25366 25495 +f 31111 31112 25366 +f 31112 25237 25366 +f 31112 31113 25108 +f 31112 25108 25237 +f 31113 31114 25108 +f 31114 24979 25108 +f 31114 31115 24850 +f 31114 24850 24979 +f 31115 31116 24850 +f 31116 24721 24850 +f 31116 31117 24592 +f 31116 24592 24721 +f 31117 31118 24592 +f 31118 24463 24592 +f 31118 31119 24334 +f 31118 24334 24463 +f 31119 31120 24334 +f 31120 24205 24334 +f 31120 31121 24076 +f 31120 24076 24205 +f 31121 31122 24076 +f 31122 23947 24076 +f 31122 31123 23818 +f 31122 23818 23947 +f 31123 31124 23818 +f 31124 23689 23818 +f 31124 31125 23560 +f 31124 23560 23689 +f 31125 31126 23560 +f 31126 23431 23560 +f 31126 31127 23302 +f 31126 23302 23431 +f 31127 31128 23302 +f 31128 23173 23302 +f 31128 31129 23044 +f 31128 23044 23173 +f 31129 31130 23044 +f 31130 22915 23044 +f 31130 31131 22786 +f 31130 22786 22915 +f 31131 31132 22786 +f 31132 22657 22786 +f 31132 31133 22528 +f 31132 22528 22657 +f 31133 31134 22528 +f 31134 22399 22528 +f 31134 31135 22270 +f 31134 22270 22399 +f 31135 31136 22270 +f 31136 22141 22270 +f 31136 31137 22012 +f 31136 22012 22141 +f 31137 31138 22012 +f 31138 21883 22012 +f 31138 16109 21754 +f 31138 21754 21883 +f 129 21753 31139 +f 129 31139 258 +f 21753 21624 31139 +f 21624 31140 31139 +f 21624 21495 31141 +f 21624 31141 31140 +f 21495 21366 31141 +f 21366 31142 31141 +f 21366 21237 31143 +f 21366 31143 31142 +f 21237 21108 31143 +f 21108 31144 31143 +f 21108 20979 31145 +f 21108 31145 31144 +f 20979 20850 31145 +f 20850 31146 31145 +f 20850 20721 31147 +f 20850 31147 31146 +f 20721 20592 31147 +f 20592 31148 31147 +f 20592 20463 31149 +f 20592 31149 31148 +f 20463 20334 31149 +f 20334 31150 31149 +f 20334 20205 31151 +f 20334 31151 31150 +f 20205 20076 31151 +f 20076 31152 31151 +f 20076 19947 31153 +f 20076 31153 31152 +f 19947 19818 31153 +f 19818 31154 31153 +f 19818 19689 31155 +f 19818 31155 31154 +f 19689 19560 31155 +f 19560 31156 31155 +f 19560 19431 31157 +f 19560 31157 31156 +f 19431 19302 31157 +f 19302 31158 31157 +f 19302 19173 31159 +f 19302 31159 31158 +f 19173 19044 31159 +f 19044 31160 31159 +f 19044 18915 31161 +f 19044 31161 31160 +f 18915 18786 31161 +f 18786 31162 31161 +f 18786 18657 31163 +f 18786 31163 31162 +f 18657 18528 31163 +f 18528 31164 31163 +f 18528 18399 31165 +f 18528 31165 31164 +f 18399 18270 31165 +f 18270 31166 31165 +f 18270 18141 31167 +f 18270 31167 31166 +f 18141 18012 31167 +f 18012 31168 31167 +f 18012 17883 31169 +f 18012 31169 31168 +f 17883 17754 31169 +f 17754 31170 31169 +f 17754 17625 31171 +f 17754 31171 31170 +f 17625 17496 31171 +f 17496 31172 31171 +f 17496 17367 31173 +f 17496 31173 31172 +f 17367 17238 31173 +f 17238 31174 31173 +f 17238 17109 31175 +f 17238 31175 31174 +f 17109 16983 31175 +f 16983 31176 31175 +f 16983 16858 31177 +f 16983 31177 31176 +f 16858 16736 31177 +f 16736 31178 31177 +f 16736 16615 31179 +f 16736 31179 31178 +f 16615 16526 31179 +f 16526 31180 31179 +f 16526 16444 31181 +f 16526 31181 31180 +f 16444 16371 31181 +f 258 31139 387 +f 31139 31182 387 +f 31139 31140 31183 +f 31139 31183 31182 +f 31140 31141 31183 +f 31141 31184 31183 +f 31141 31142 31185 +f 31141 31185 31184 +f 31142 31143 31185 +f 31143 31186 31185 +f 31143 31144 31187 +f 31143 31187 31186 +f 31144 31145 31187 +f 31145 31188 31187 +f 31145 31146 31189 +f 31145 31189 31188 +f 31146 31147 31189 +f 31147 31190 31189 +f 31147 31148 31191 +f 31147 31191 31190 +f 31148 31149 31191 +f 31149 31192 31191 +f 31149 31150 31193 +f 31149 31193 31192 +f 31150 31151 31193 +f 31151 31194 31193 +f 31151 31152 31195 +f 31151 31195 31194 +f 31152 31153 31195 +f 31153 31196 31195 +f 31153 31154 31197 +f 31153 31197 31196 +f 31154 31155 31197 +f 31155 31198 31197 +f 31155 31156 31199 +f 31155 31199 31198 +f 31156 31157 31199 +f 31157 31200 31199 +f 31157 31158 31201 +f 31157 31201 31200 +f 31158 31159 31201 +f 31159 31202 31201 +f 31159 31160 31203 +f 31159 31203 31202 +f 31160 31161 31203 +f 31161 31204 31203 +f 31161 31162 31205 +f 31161 31205 31204 +f 31162 31163 31205 +f 31163 31206 31205 +f 31163 31164 31207 +f 31163 31207 31206 +f 31164 31165 31207 +f 31165 31208 31207 +f 31165 31166 31209 +f 31165 31209 31208 +f 31166 31167 31209 +f 31167 31210 31209 +f 31167 31168 31211 +f 31167 31211 31210 +f 31168 31169 31211 +f 31169 31212 31211 +f 31169 31170 31213 +f 31169 31213 31212 +f 31170 31171 31213 +f 31171 31214 31213 +f 31171 31172 31215 +f 31171 31215 31214 +f 31172 31173 31215 +f 31173 31216 31215 +f 31173 31174 31217 +f 31173 31217 31216 +f 31174 31175 31217 +f 31175 31218 31217 +f 31175 31176 31219 +f 31175 31219 31218 +f 31176 31177 31219 +f 31177 31220 31219 +f 31177 31178 31221 +f 31177 31221 31220 +f 31178 31179 31221 +f 31179 31222 31221 +f 31179 31180 31223 +f 31179 31223 31222 +f 31180 31181 31223 +f 387 31182 31224 +f 387 31224 516 +f 31182 31183 31224 +f 31183 31225 31224 +f 31183 31184 31226 +f 31183 31226 31225 +f 31184 31185 31226 +f 31185 31227 31226 +f 31185 31186 31228 +f 31185 31228 31227 +f 31186 31187 31228 +f 31187 31229 31228 +f 31187 31188 31230 +f 31187 31230 31229 +f 31188 31189 31230 +f 31189 31231 31230 +f 31189 31190 31232 +f 31189 31232 31231 +f 31190 31191 31232 +f 31191 31233 31232 +f 31191 31192 31234 +f 31191 31234 31233 +f 31192 31193 31234 +f 31193 31235 31234 +f 31193 31194 31236 +f 31193 31236 31235 +f 31194 31195 31236 +f 31195 31237 31236 +f 31195 31196 31238 +f 31195 31238 31237 +f 31196 31197 31238 +f 31197 31239 31238 +f 31197 31198 31240 +f 31197 31240 31239 +f 31198 31199 31240 +f 31199 31241 31240 +f 31199 31200 31242 +f 31199 31242 31241 +f 31200 31201 31242 +f 31201 31243 31242 +f 31201 31202 31244 +f 31201 31244 31243 +f 31202 31203 31244 +f 31203 31245 31244 +f 31203 31204 31246 +f 31203 31246 31245 +f 31204 31205 31246 +f 31205 31247 31246 +f 31205 31206 31248 +f 31205 31248 31247 +f 31206 31207 31248 +f 31207 31249 31248 +f 31207 31208 31250 +f 31207 31250 31249 +f 31208 31209 31250 +f 31209 31251 31250 +f 31209 31210 31252 +f 31209 31252 31251 +f 31210 31211 31252 +f 31211 31253 31252 +f 31211 31212 31254 +f 31211 31254 31253 +f 31212 31213 31254 +f 31213 31255 31254 +f 31213 31214 31256 +f 31213 31256 31255 +f 31214 31215 31256 +f 31215 31257 31256 +f 31215 31216 31258 +f 31215 31258 31257 +f 31216 31217 31258 +f 31217 31259 31258 +f 31217 31218 31260 +f 31217 31260 31259 +f 31218 31219 31260 +f 31219 31261 31260 +f 31219 31220 31262 +f 31219 31262 31261 +f 31220 31221 31262 +f 31221 31263 31262 +f 31221 31222 31264 +f 31221 31264 31263 +f 31222 31223 31264 +f 516 31224 645 +f 31224 31265 645 +f 31224 31225 31266 +f 31224 31266 31265 +f 31225 31226 31266 +f 31226 31267 31266 +f 31226 31227 31268 +f 31226 31268 31267 +f 31227 31228 31268 +f 31228 31269 31268 +f 31228 31229 31270 +f 31228 31270 31269 +f 31229 31230 31270 +f 31230 31271 31270 +f 31230 31231 31272 +f 31230 31272 31271 +f 31231 31232 31272 +f 31232 31273 31272 +f 31232 31233 31274 +f 31232 31274 31273 +f 31233 31234 31274 +f 31234 31275 31274 +f 31234 31235 31276 +f 31234 31276 31275 +f 31235 31236 31276 +f 31236 31277 31276 +f 31236 31237 31278 +f 31236 31278 31277 +f 31237 31238 31278 +f 31238 31279 31278 +f 31238 31239 31280 +f 31238 31280 31279 +f 31239 31240 31280 +f 31240 31281 31280 +f 31240 31241 31282 +f 31240 31282 31281 +f 31241 31242 31282 +f 31242 31283 31282 +f 31242 31243 31284 +f 31242 31284 31283 +f 31243 31244 31284 +f 31244 31285 31284 +f 31244 31245 31286 +f 31244 31286 31285 +f 31245 31246 31286 +f 31246 31287 31286 +f 31246 31247 31288 +f 31246 31288 31287 +f 31247 31248 31288 +f 31248 31289 31288 +f 31248 31249 31290 +f 31248 31290 31289 +f 31249 31250 31290 +f 31250 31291 31290 +f 31250 31251 31292 +f 31250 31292 31291 +f 31251 31252 31292 +f 31252 31293 31292 +f 31252 31253 31294 +f 31252 31294 31293 +f 31253 31254 31294 +f 31254 31295 31294 +f 31254 31255 31296 +f 31254 31296 31295 +f 31255 31256 31296 +f 31256 31297 31296 +f 31256 31257 31298 +f 31256 31298 31297 +f 31257 31258 31298 +f 31258 31299 31298 +f 31258 31259 31300 +f 31258 31300 31299 +f 31259 31260 31300 +f 31260 31301 31300 +f 31260 31261 31302 +f 31260 31302 31301 +f 31261 31262 31302 +f 31262 31303 31302 +f 31262 31263 31304 +f 31262 31304 31303 +f 31263 31264 31304 +f 645 31265 31305 +f 645 31305 774 +f 31265 31266 31305 +f 31266 31306 31305 +f 31266 31267 31307 +f 31266 31307 31306 +f 31267 31268 31307 +f 31268 31308 31307 +f 31268 31269 31309 +f 31268 31309 31308 +f 31269 31270 31309 +f 31270 31310 31309 +f 31270 31271 31311 +f 31270 31311 31310 +f 31271 31272 31311 +f 31272 31312 31311 +f 31272 31273 31313 +f 31272 31313 31312 +f 31273 31274 31313 +f 31274 31314 31313 +f 31274 31275 31315 +f 31274 31315 31314 +f 31275 31276 31315 +f 31276 31316 31315 +f 31276 31277 31317 +f 31276 31317 31316 +f 31277 31278 31317 +f 31278 31318 31317 +f 31278 31279 31319 +f 31278 31319 31318 +f 31279 31280 31319 +f 31280 31320 31319 +f 31280 31281 31321 +f 31280 31321 31320 +f 31281 31282 31321 +f 31282 31322 31321 +f 31282 31283 31323 +f 31282 31323 31322 +f 31283 31284 31323 +f 31284 31324 31323 +f 31284 31285 31325 +f 31284 31325 31324 +f 31285 31286 31325 +f 31286 31326 31325 +f 31286 31287 31327 +f 31286 31327 31326 +f 31287 31288 31327 +f 31288 31328 31327 +f 31288 31289 31329 +f 31288 31329 31328 +f 31289 31290 31329 +f 31290 31330 31329 +f 31290 31291 31331 +f 31290 31331 31330 +f 31291 31292 31331 +f 31292 31332 31331 +f 31292 31293 31333 +f 31292 31333 31332 +f 31293 31294 31333 +f 31294 31334 31333 +f 31294 31295 31335 +f 31294 31335 31334 +f 31295 31296 31335 +f 31296 31336 31335 +f 31296 31297 31337 +f 31296 31337 31336 +f 31297 31298 31337 +f 31298 31338 31337 +f 31298 31299 31339 +f 31298 31339 31338 +f 31299 31300 31339 +f 31300 31340 31339 +f 31300 31301 31341 +f 31300 31341 31340 +f 31301 31302 31341 +f 31302 31342 31341 +f 31302 31303 31343 +f 31302 31343 31342 +f 31303 31304 31343 +f 31304 31344 31343 +f 774 31305 903 +f 31305 31345 903 +f 31305 31306 31346 +f 31305 31346 31345 +f 31306 31307 31346 +f 31307 31347 31346 +f 31307 31308 31348 +f 31307 31348 31347 +f 31308 31309 31348 +f 31309 31349 31348 +f 31309 31310 31350 +f 31309 31350 31349 +f 31310 31311 31350 +f 31311 31351 31350 +f 31311 31312 31352 +f 31311 31352 31351 +f 31312 31313 31352 +f 31313 31353 31352 +f 31313 31314 31354 +f 31313 31354 31353 +f 31314 31315 31354 +f 31315 31355 31354 +f 31315 31316 31356 +f 31315 31356 31355 +f 31316 31317 31356 +f 31317 31357 31356 +f 31317 31318 31358 +f 31317 31358 31357 +f 31318 31319 31358 +f 31319 31359 31358 +f 31319 31320 31360 +f 31319 31360 31359 +f 31320 31321 31360 +f 31321 31361 31360 +f 31321 31322 31362 +f 31321 31362 31361 +f 31322 31323 31362 +f 31323 31363 31362 +f 31323 31324 31364 +f 31323 31364 31363 +f 31324 31325 31364 +f 31325 31365 31364 +f 31325 31326 31366 +f 31325 31366 31365 +f 31326 31327 31366 +f 31327 31367 31366 +f 31327 31328 31368 +f 31327 31368 31367 +f 31328 31329 31368 +f 31329 31369 31368 +f 31329 31330 31370 +f 31329 31370 31369 +f 31330 31331 31370 +f 31331 31371 31370 +f 31331 31332 31372 +f 31331 31372 31371 +f 31332 31333 31372 +f 31333 31373 31372 +f 31333 31334 31374 +f 31333 31374 31373 +f 31334 31335 31374 +f 31335 31375 31374 +f 31335 31336 31376 +f 31335 31376 31375 +f 31336 31337 31376 +f 31337 31377 31376 +f 31337 31338 31378 +f 31337 31378 31377 +f 31338 31339 31378 +f 31339 31379 31378 +f 31339 31340 31380 +f 31339 31380 31379 +f 31340 31341 31380 +f 31341 31381 31380 +f 31341 31342 31382 +f 31341 31382 31381 +f 31342 31343 31382 +f 31343 31383 31382 +f 31343 31344 31384 +f 31343 31384 31383 +f 903 31345 31385 +f 903 31385 1032 +f 31345 31346 31385 +f 31346 31386 31385 +f 31346 31347 31387 +f 31346 31387 31386 +f 31347 31348 31387 +f 31348 31388 31387 +f 31348 31349 31389 +f 31348 31389 31388 +f 31349 31350 31389 +f 31350 31390 31389 +f 31350 31351 31391 +f 31350 31391 31390 +f 31351 31352 31391 +f 31352 31392 31391 +f 31352 31353 31393 +f 31352 31393 31392 +f 31353 31354 31393 +f 31354 31394 31393 +f 31354 31355 31395 +f 31354 31395 31394 +f 31355 31356 31395 +f 31356 31396 31395 +f 31356 31357 31397 +f 31356 31397 31396 +f 31357 31358 31397 +f 31358 31398 31397 +f 31358 31359 31399 +f 31358 31399 31398 +f 31359 31360 31399 +f 31360 31400 31399 +f 31360 31361 31401 +f 31360 31401 31400 +f 31361 31362 31401 +f 31362 31402 31401 +f 31362 31363 31403 +f 31362 31403 31402 +f 31363 31364 31403 +f 31364 31404 31403 +f 31364 31365 31405 +f 31364 31405 31404 +f 31365 31366 31405 +f 31366 31406 31405 +f 31366 31367 31407 +f 31366 31407 31406 +f 31367 31368 31407 +f 31368 31408 31407 +f 31368 31369 31409 +f 31368 31409 31408 +f 31369 31370 31409 +f 31370 31410 31409 +f 31370 31371 31411 +f 31370 31411 31410 +f 31371 31372 31411 +f 31372 31412 31411 +f 31372 31373 31413 +f 31372 31413 31412 +f 31373 31374 31413 +f 31374 31414 31413 +f 31374 31375 31415 +f 31374 31415 31414 +f 31375 31376 31415 +f 31376 31416 31415 +f 31376 31377 31417 +f 31376 31417 31416 +f 31377 31378 31417 +f 31378 31418 31417 +f 31378 31379 31419 +f 31378 31419 31418 +f 31379 31380 31419 +f 31380 31420 31419 +f 31380 31381 31421 +f 31380 31421 31420 +f 31381 31382 31421 +f 31382 31422 31421 +f 31382 31383 31423 +f 31382 31423 31422 +f 31383 31384 31423 +f 1032 31385 1161 +f 31385 31424 1161 +f 31385 31386 31425 +f 31385 31425 31424 +f 31386 31387 31425 +f 31387 31426 31425 +f 31387 31388 31427 +f 31387 31427 31426 +f 31388 31389 31427 +f 31389 31428 31427 +f 31389 31390 31429 +f 31389 31429 31428 +f 31390 31391 31429 +f 31391 31430 31429 +f 31391 31392 31431 +f 31391 31431 31430 +f 31392 31393 31431 +f 31393 31432 31431 +f 31393 31394 31433 +f 31393 31433 31432 +f 31394 31395 31433 +f 31395 31434 31433 +f 31395 31396 31435 +f 31395 31435 31434 +f 31396 31397 31435 +f 31397 31436 31435 +f 31397 31398 31437 +f 31397 31437 31436 +f 31398 31399 31437 +f 31399 31438 31437 +f 31399 31400 31439 +f 31399 31439 31438 +f 31400 31401 31439 +f 31401 31440 31439 +f 31401 31402 31441 +f 31401 31441 31440 +f 31402 31403 31441 +f 31403 31442 31441 +f 31403 31404 31443 +f 31403 31443 31442 +f 31404 31405 31443 +f 31405 31444 31443 +f 31405 31406 31445 +f 31405 31445 31444 +f 31406 31407 31445 +f 31407 31446 31445 +f 31407 31408 31447 +f 31407 31447 31446 +f 31408 31409 31447 +f 31409 31448 31447 +f 31409 31410 31449 +f 31409 31449 31448 +f 31410 31411 31449 +f 31411 31450 31449 +f 31411 31412 31451 +f 31411 31451 31450 +f 31412 31413 31451 +f 31413 31452 31451 +f 31413 31414 31453 +f 31413 31453 31452 +f 31414 31415 31453 +f 31415 31454 31453 +f 31415 31416 31455 +f 31415 31455 31454 +f 31416 31417 31455 +f 31417 31456 31455 +f 31417 31418 31457 +f 31417 31457 31456 +f 31418 31419 31457 +f 31419 31458 31457 +f 31419 31420 31459 +f 31419 31459 31458 +f 31420 31421 31459 +f 31421 31460 31459 +f 31421 31422 31461 +f 31421 31461 31460 +f 31422 31423 31461 +f 31423 31462 31461 +f 1161 31424 31463 +f 1161 31463 1290 +f 31424 31425 31463 +f 31425 31464 31463 +f 31425 31426 31465 +f 31425 31465 31464 +f 31426 31427 31465 +f 31427 31466 31465 +f 31427 31428 31467 +f 31427 31467 31466 +f 31428 31429 31467 +f 31429 31468 31467 +f 31429 31430 31469 +f 31429 31469 31468 +f 31430 31431 31469 +f 31431 31470 31469 +f 31431 31432 31471 +f 31431 31471 31470 +f 31432 31433 31471 +f 31433 31472 31471 +f 31433 31434 31473 +f 31433 31473 31472 +f 31434 31435 31473 +f 31435 31474 31473 +f 31435 31436 31475 +f 31435 31475 31474 +f 31436 31437 31475 +f 31437 31476 31475 +f 31437 31438 31477 +f 31437 31477 31476 +f 31438 31439 31477 +f 31439 31478 31477 +f 31439 31440 31479 +f 31439 31479 31478 +f 31440 31441 31479 +f 31441 31480 31479 +f 31441 31442 31481 +f 31441 31481 31480 +f 31442 31443 31481 +f 31443 31482 31481 +f 31443 31444 31483 +f 31443 31483 31482 +f 31444 31445 31483 +f 31445 31484 31483 +f 31445 31446 31485 +f 31445 31485 31484 +f 31446 31447 31485 +f 31447 31486 31485 +f 31447 31448 31487 +f 31447 31487 31486 +f 31448 31449 31487 +f 31449 31488 31487 +f 31449 31450 31489 +f 31449 31489 31488 +f 31450 31451 31489 +f 31451 31490 31489 +f 31451 31452 31491 +f 31451 31491 31490 +f 31452 31453 31491 +f 31453 31492 31491 +f 31453 31454 31493 +f 31453 31493 31492 +f 31454 31455 31493 +f 31455 31494 31493 +f 31455 31456 31495 +f 31455 31495 31494 +f 31456 31457 31495 +f 31457 31496 31495 +f 31457 31458 31497 +f 31457 31497 31496 +f 31458 31459 31497 +f 31459 31498 31497 +f 31459 31460 31499 +f 31459 31499 31498 +f 31460 31461 31499 +f 31461 31500 31499 +f 31461 31462 31501 +f 31461 31501 31500 +f 1290 31463 1419 +f 31463 31502 1419 +f 31463 31464 31503 +f 31463 31503 31502 +f 31464 31465 31503 +f 31465 31504 31503 +f 31465 31466 31505 +f 31465 31505 31504 +f 31466 31467 31505 +f 31467 31506 31505 +f 31467 31468 31507 +f 31467 31507 31506 +f 31468 31469 31507 +f 31469 31508 31507 +f 31469 31470 31509 +f 31469 31509 31508 +f 31470 31471 31509 +f 31471 31510 31509 +f 31471 31472 31511 +f 31471 31511 31510 +f 31472 31473 31511 +f 31473 31512 31511 +f 31473 31474 31513 +f 31473 31513 31512 +f 31474 31475 31513 +f 31475 31514 31513 +f 31475 31476 31515 +f 31475 31515 31514 +f 31476 31477 31515 +f 31477 31516 31515 +f 31477 31478 31517 +f 31477 31517 31516 +f 31478 31479 31517 +f 31479 31518 31517 +f 31479 31480 31519 +f 31479 31519 31518 +f 31480 31481 31519 +f 31481 31520 31519 +f 31481 31482 31521 +f 31481 31521 31520 +f 31482 31483 31521 +f 31483 31522 31521 +f 31483 31484 31523 +f 31483 31523 31522 +f 31484 31485 31523 +f 31485 31524 31523 +f 31485 31486 31525 +f 31485 31525 31524 +f 31486 31487 31525 +f 31487 31526 31525 +f 31487 31488 31527 +f 31487 31527 31526 +f 31488 31489 31527 +f 31489 31528 31527 +f 31489 31490 31529 +f 31489 31529 31528 +f 31490 31491 31529 +f 31491 31530 31529 +f 31491 31492 31531 +f 31491 31531 31530 +f 31492 31493 31531 +f 31493 31532 31531 +f 31493 31494 31533 +f 31493 31533 31532 +f 31494 31495 31533 +f 31495 31534 31533 +f 31495 31496 31535 +f 31495 31535 31534 +f 31496 31497 31535 +f 31497 31536 31535 +f 31497 31498 31537 +f 31497 31537 31536 +f 31498 31499 31537 +f 31499 31538 31537 +f 31499 31500 31539 +f 31499 31539 31538 +f 31500 31501 31539 +f 1419 31502 31540 +f 1419 31540 1548 +f 31502 31503 31540 +f 31503 31541 31540 +f 31503 31504 31542 +f 31503 31542 31541 +f 31504 31505 31542 +f 31505 31543 31542 +f 31505 31506 31544 +f 31505 31544 31543 +f 31506 31507 31544 +f 31507 31545 31544 +f 31507 31508 31546 +f 31507 31546 31545 +f 31508 31509 31546 +f 31509 31547 31546 +f 31509 31510 31548 +f 31509 31548 31547 +f 31510 31511 31548 +f 31511 31549 31548 +f 31511 31512 31550 +f 31511 31550 31549 +f 31512 31513 31550 +f 31513 31551 31550 +f 31513 31514 31552 +f 31513 31552 31551 +f 31514 31515 31552 +f 31515 31553 31552 +f 31515 31516 31554 +f 31515 31554 31553 +f 31516 31517 31554 +f 31517 31555 31554 +f 31517 31518 31556 +f 31517 31556 31555 +f 31518 31519 31556 +f 31519 31557 31556 +f 31519 31520 31558 +f 31519 31558 31557 +f 31520 31521 31558 +f 31521 31559 31558 +f 31521 31522 31560 +f 31521 31560 31559 +f 31522 31523 31560 +f 31523 31561 31560 +f 31523 31524 31562 +f 31523 31562 31561 +f 31524 31525 31562 +f 31525 31563 31562 +f 31525 31526 31564 +f 31525 31564 31563 +f 31526 31527 31564 +f 31527 31565 31564 +f 31527 31528 31566 +f 31527 31566 31565 +f 31528 31529 31566 +f 31529 31567 31566 +f 31529 31530 31568 +f 31529 31568 31567 +f 31530 31531 31568 +f 31531 31569 31568 +f 31531 31532 31570 +f 31531 31570 31569 +f 31532 31533 31570 +f 31533 31571 31570 +f 31533 31534 31572 +f 31533 31572 31571 +f 31534 31535 31572 +f 31535 31573 31572 +f 31535 31536 31574 +f 31535 31574 31573 +f 31536 31537 31574 +f 31537 31575 31574 +f 31537 31538 31576 +f 31537 31576 31575 +f 31538 31539 31576 +f 31539 31577 31576 +f 1548 31540 1677 +f 31540 31578 1677 +f 31540 31541 31579 +f 31540 31579 31578 +f 31541 31542 31579 +f 31542 31580 31579 +f 31542 31543 31581 +f 31542 31581 31580 +f 31543 31544 31581 +f 31544 31582 31581 +f 31544 31545 31583 +f 31544 31583 31582 +f 31545 31546 31583 +f 31546 31584 31583 +f 31546 31547 31585 +f 31546 31585 31584 +f 31547 31548 31585 +f 31548 31586 31585 +f 31548 31549 31587 +f 31548 31587 31586 +f 31549 31550 31587 +f 31550 31588 31587 +f 31550 31551 31589 +f 31550 31589 31588 +f 31551 31552 31589 +f 31552 31590 31589 +f 31552 31553 31591 +f 31552 31591 31590 +f 31553 31554 31591 +f 31554 31592 31591 +f 31554 31555 31593 +f 31554 31593 31592 +f 31555 31556 31593 +f 31556 31594 31593 +f 31556 31557 31595 +f 31556 31595 31594 +f 31557 31558 31595 +f 31558 31596 31595 +f 31558 31559 31597 +f 31558 31597 31596 +f 31559 31560 31597 +f 31560 31598 31597 +f 31560 31561 31599 +f 31560 31599 31598 +f 31561 31562 31599 +f 31562 31600 31599 +f 31562 31563 31601 +f 31562 31601 31600 +f 31563 31564 31601 +f 31564 31602 31601 +f 31564 31565 31603 +f 31564 31603 31602 +f 31565 31566 31603 +f 31566 31604 31603 +f 31566 31567 31605 +f 31566 31605 31604 +f 31567 31568 31605 +f 31568 31606 31605 +f 31568 31569 31607 +f 31568 31607 31606 +f 31569 31570 31607 +f 31570 31608 31607 +f 31570 31571 31609 +f 31570 31609 31608 +f 31571 31572 31609 +f 31572 31610 31609 +f 31572 31573 31611 +f 31572 31611 31610 +f 31573 31574 31611 +f 31574 31612 31611 +f 31574 31575 31613 +f 31574 31613 31612 +f 31575 31576 31613 +f 31576 31614 31613 +f 31576 31577 31615 +f 31576 31615 31614 +f 1677 31578 31616 +f 1677 31616 1806 +f 31578 31579 31616 +f 31579 31617 31616 +f 31579 31580 31618 +f 31579 31618 31617 +f 31580 31581 31618 +f 31581 31619 31618 +f 31581 31582 31620 +f 31581 31620 31619 +f 31582 31583 31620 +f 31583 31621 31620 +f 31583 31584 31622 +f 31583 31622 31621 +f 31584 31585 31622 +f 31585 31623 31622 +f 31585 31586 31624 +f 31585 31624 31623 +f 31586 31587 31624 +f 31587 31625 31624 +f 31587 31588 31626 +f 31587 31626 31625 +f 31588 31589 31626 +f 31589 31627 31626 +f 31589 31590 31628 +f 31589 31628 31627 +f 31590 31591 31628 +f 31591 31629 31628 +f 31591 31592 31630 +f 31591 31630 31629 +f 31592 31593 31630 +f 31593 31631 31630 +f 31593 31594 31632 +f 31593 31632 31631 +f 31594 31595 31632 +f 31595 31633 31632 +f 31595 31596 31634 +f 31595 31634 31633 +f 31596 31597 31634 +f 31597 31635 31634 +f 31597 31598 31636 +f 31597 31636 31635 +f 31598 31599 31636 +f 31599 31637 31636 +f 31599 31600 31638 +f 31599 31638 31637 +f 31600 31601 31638 +f 31601 31639 31638 +f 31601 31602 31640 +f 31601 31640 31639 +f 31602 31603 31640 +f 31603 31641 31640 +f 31603 31604 31642 +f 31603 31642 31641 +f 31604 31605 31642 +f 31605 31643 31642 +f 31605 31606 31644 +f 31605 31644 31643 +f 31606 31607 31644 +f 31607 31645 31644 +f 31607 31608 31646 +f 31607 31646 31645 +f 31608 31609 31646 +f 31609 31647 31646 +f 31609 31610 31648 +f 31609 31648 31647 +f 31610 31611 31648 +f 31611 31649 31648 +f 31611 31612 31650 +f 31611 31650 31649 +f 31612 31613 31650 +f 31613 31651 31650 +f 31613 31614 31652 +f 31613 31652 31651 +f 31614 31615 31652 +f 1806 31616 1935 +f 31616 31653 1935 +f 31616 31617 31654 +f 31616 31654 31653 +f 31617 31618 31654 +f 31618 31655 31654 +f 31618 31619 31656 +f 31618 31656 31655 +f 31619 31620 31656 +f 31620 31657 31656 +f 31620 31621 31658 +f 31620 31658 31657 +f 31621 31622 31658 +f 31622 31659 31658 +f 31622 31623 31660 +f 31622 31660 31659 +f 31623 31624 31660 +f 31624 31661 31660 +f 31624 31625 31662 +f 31624 31662 31661 +f 31625 31626 31662 +f 31626 31663 31662 +f 31626 31627 31664 +f 31626 31664 31663 +f 31627 31628 31664 +f 31628 31665 31664 +f 31628 31629 31666 +f 31628 31666 31665 +f 31629 31630 31666 +f 31630 31667 31666 +f 31630 31631 31668 +f 31630 31668 31667 +f 31631 31632 31668 +f 31632 31669 31668 +f 31632 31633 31670 +f 31632 31670 31669 +f 31633 31634 31670 +f 31634 31671 31670 +f 31634 31635 31672 +f 31634 31672 31671 +f 31635 31636 31672 +f 31636 31673 31672 +f 31636 31637 31674 +f 31636 31674 31673 +f 31637 31638 31674 +f 31638 31675 31674 +f 31638 31639 31676 +f 31638 31676 31675 +f 31639 31640 31676 +f 31640 31677 31676 +f 31640 31641 31678 +f 31640 31678 31677 +f 31641 31642 31678 +f 31642 31679 31678 +f 31642 31643 31680 +f 31642 31680 31679 +f 31643 31644 31680 +f 31644 31681 31680 +f 31644 31645 31682 +f 31644 31682 31681 +f 31645 31646 31682 +f 31646 31683 31682 +f 31646 31647 31684 +f 31646 31684 31683 +f 31647 31648 31684 +f 31648 31685 31684 +f 31648 31649 31686 +f 31648 31686 31685 +f 31649 31650 31686 +f 31650 31687 31686 +f 31650 31651 31688 +f 31650 31688 31687 +f 31651 31652 31688 +f 1935 31653 31689 +f 1935 31689 2064 +f 31653 31654 31689 +f 31654 31690 31689 +f 31654 31655 31691 +f 31654 31691 31690 +f 31655 31656 31691 +f 31656 31692 31691 +f 31656 31657 31693 +f 31656 31693 31692 +f 31657 31658 31693 +f 31658 31694 31693 +f 31658 31659 31695 +f 31658 31695 31694 +f 31659 31660 31695 +f 31660 31696 31695 +f 31660 31661 31697 +f 31660 31697 31696 +f 31661 31662 31697 +f 31662 31698 31697 +f 31662 31663 31699 +f 31662 31699 31698 +f 31663 31664 31699 +f 31664 31700 31699 +f 31664 31665 31701 +f 31664 31701 31700 +f 31665 31666 31701 +f 31666 31702 31701 +f 31666 31667 31703 +f 31666 31703 31702 +f 31667 31668 31703 +f 31668 31704 31703 +f 31668 31669 31705 +f 31668 31705 31704 +f 31669 31670 31705 +f 31670 31706 31705 +f 31670 31671 31707 +f 31670 31707 31706 +f 31671 31672 31707 +f 31672 31708 31707 +f 31672 31673 31709 +f 31672 31709 31708 +f 31673 31674 31709 +f 31674 31710 31709 +f 31674 31675 31711 +f 31674 31711 31710 +f 31675 31676 31711 +f 31676 31712 31711 +f 31676 31677 31713 +f 31676 31713 31712 +f 31677 31678 31713 +f 31678 31714 31713 +f 31678 31679 31715 +f 31678 31715 31714 +f 31679 31680 31715 +f 31680 31716 31715 +f 31680 31681 31717 +f 31680 31717 31716 +f 31681 31682 31717 +f 31682 31718 31717 +f 31682 31683 31719 +f 31682 31719 31718 +f 31683 31684 31719 +f 31684 31720 31719 +f 31684 31685 31721 +f 31684 31721 31720 +f 31685 31686 31721 +f 31686 31722 31721 +f 31686 31687 31723 +f 31686 31723 31722 +f 31687 31688 31723 +f 31688 31724 31723 +f 2064 31689 2193 +f 31689 31725 2193 +f 31689 31690 31726 +f 31689 31726 31725 +f 31690 31691 31726 +f 31691 31727 31726 +f 31691 31692 31728 +f 31691 31728 31727 +f 31692 31693 31728 +f 31693 31729 31728 +f 31693 31694 31730 +f 31693 31730 31729 +f 31694 31695 31730 +f 31695 31731 31730 +f 31695 31696 31732 +f 31695 31732 31731 +f 31696 31697 31732 +f 31697 31733 31732 +f 31697 31698 31734 +f 31697 31734 31733 +f 31698 31699 31734 +f 31699 31735 31734 +f 31699 31700 31736 +f 31699 31736 31735 +f 31700 31701 31736 +f 31701 31737 31736 +f 31701 31702 31738 +f 31701 31738 31737 +f 31702 31703 31738 +f 31703 31739 31738 +f 31703 31704 31740 +f 31703 31740 31739 +f 31704 31705 31740 +f 31705 31741 31740 +f 31705 31706 31742 +f 31705 31742 31741 +f 31706 31707 31742 +f 31707 31743 31742 +f 31707 31708 31744 +f 31707 31744 31743 +f 31708 31709 31744 +f 31709 31745 31744 +f 31709 31710 31746 +f 31709 31746 31745 +f 31710 31711 31746 +f 31711 31747 31746 +f 31711 31712 31748 +f 31711 31748 31747 +f 31712 31713 31748 +f 31713 31749 31748 +f 31713 31714 31750 +f 31713 31750 31749 +f 31714 31715 31750 +f 31715 31751 31750 +f 31715 31716 31752 +f 31715 31752 31751 +f 31716 31717 31752 +f 31717 31753 31752 +f 31717 31718 31754 +f 31717 31754 31753 +f 31718 31719 31754 +f 31719 31755 31754 +f 31719 31720 31756 +f 31719 31756 31755 +f 31720 31721 31756 +f 31721 31757 31756 +f 31721 31722 31758 +f 31721 31758 31757 +f 31722 31723 31758 +f 31723 31759 31758 +f 31723 31724 31760 +f 31723 31760 31759 +f 2193 31725 31761 +f 2193 31761 2322 +f 31725 31726 31761 +f 31726 31762 31761 +f 31726 31727 31763 +f 31726 31763 31762 +f 31727 31728 31763 +f 31728 31764 31763 +f 31728 31729 31765 +f 31728 31765 31764 +f 31729 31730 31765 +f 31730 31766 31765 +f 31730 31731 31767 +f 31730 31767 31766 +f 31731 31732 31767 +f 31732 31768 31767 +f 31732 31733 31769 +f 31732 31769 31768 +f 31733 31734 31769 +f 31734 31770 31769 +f 31734 31735 31771 +f 31734 31771 31770 +f 31735 31736 31771 +f 31736 31772 31771 +f 31736 31737 31773 +f 31736 31773 31772 +f 31737 31738 31773 +f 31738 31774 31773 +f 31738 31739 31775 +f 31738 31775 31774 +f 31739 31740 31775 +f 31740 31776 31775 +f 31740 31741 31777 +f 31740 31777 31776 +f 31741 31742 31777 +f 31742 31778 31777 +f 31742 31743 31779 +f 31742 31779 31778 +f 31743 31744 31779 +f 31744 31780 31779 +f 31744 31745 31781 +f 31744 31781 31780 +f 31745 31746 31781 +f 31746 31782 31781 +f 31746 31747 31783 +f 31746 31783 31782 +f 31747 31748 31783 +f 31748 31784 31783 +f 31748 31749 31785 +f 31748 31785 31784 +f 31749 31750 31785 +f 31750 31786 31785 +f 31750 31751 31787 +f 31750 31787 31786 +f 31751 31752 31787 +f 31752 31788 31787 +f 31752 31753 31789 +f 31752 31789 31788 +f 31753 31754 31789 +f 31754 31790 31789 +f 31754 31755 31791 +f 31754 31791 31790 +f 31755 31756 31791 +f 31756 31792 31791 +f 31756 31757 31793 +f 31756 31793 31792 +f 31757 31758 31793 +f 31758 31794 31793 +f 31758 31759 31795 +f 31758 31795 31794 +f 31759 31760 31795 +f 2322 31761 2451 +f 31761 31796 2451 +f 31761 31762 31797 +f 31761 31797 31796 +f 31762 31763 31797 +f 31763 31798 31797 +f 31763 31764 31799 +f 31763 31799 31798 +f 31764 31765 31799 +f 31765 31800 31799 +f 31765 31766 31801 +f 31765 31801 31800 +f 31766 31767 31801 +f 31767 31802 31801 +f 31767 31768 31803 +f 31767 31803 31802 +f 31768 31769 31803 +f 31769 31804 31803 +f 31769 31770 31805 +f 31769 31805 31804 +f 31770 31771 31805 +f 31771 31806 31805 +f 31771 31772 31807 +f 31771 31807 31806 +f 31772 31773 31807 +f 31773 31808 31807 +f 31773 31774 31809 +f 31773 31809 31808 +f 31774 31775 31809 +f 31775 31810 31809 +f 31775 31776 31811 +f 31775 31811 31810 +f 31776 31777 31811 +f 31777 31812 31811 +f 31777 31778 31813 +f 31777 31813 31812 +f 31778 31779 31813 +f 31779 31814 31813 +f 31779 31780 31815 +f 31779 31815 31814 +f 31780 31781 31815 +f 31781 31816 31815 +f 31781 31782 31817 +f 31781 31817 31816 +f 31782 31783 31817 +f 31783 31818 31817 +f 31783 31784 31819 +f 31783 31819 31818 +f 31784 31785 31819 +f 31785 31820 31819 +f 31785 31786 31821 +f 31785 31821 31820 +f 31786 31787 31821 +f 31787 31822 31821 +f 31787 31788 31823 +f 31787 31823 31822 +f 31788 31789 31823 +f 31789 31824 31823 +f 31789 31790 31825 +f 31789 31825 31824 +f 31790 31791 31825 +f 31791 31826 31825 +f 31791 31792 31827 +f 31791 31827 31826 +f 31792 31793 31827 +f 31793 31828 31827 +f 31793 31794 31829 +f 31793 31829 31828 +f 31794 31795 31829 +f 2451 31796 31830 +f 2451 31830 2580 +f 31796 31797 31830 +f 31797 31831 31830 +f 31797 31798 31832 +f 31797 31832 31831 +f 31798 31799 31832 +f 31799 31833 31832 +f 31799 31800 31834 +f 31799 31834 31833 +f 31800 31801 31834 +f 31801 31835 31834 +f 31801 31802 31836 +f 31801 31836 31835 +f 31802 31803 31836 +f 31803 31837 31836 +f 31803 31804 31838 +f 31803 31838 31837 +f 31804 31805 31838 +f 31805 31839 31838 +f 31805 31806 31840 +f 31805 31840 31839 +f 31806 31807 31840 +f 31807 31841 31840 +f 31807 31808 31842 +f 31807 31842 31841 +f 31808 31809 31842 +f 31809 31843 31842 +f 31809 31810 31844 +f 31809 31844 31843 +f 31810 31811 31844 +f 31811 31845 31844 +f 31811 31812 31846 +f 31811 31846 31845 +f 31812 31813 31846 +f 31813 31847 31846 +f 31813 31814 31848 +f 31813 31848 31847 +f 31814 31815 31848 +f 31815 31849 31848 +f 31815 31816 31850 +f 31815 31850 31849 +f 31816 31817 31850 +f 31817 31851 31850 +f 31817 31818 31852 +f 31817 31852 31851 +f 31818 31819 31852 +f 31819 31853 31852 +f 31819 31820 31854 +f 31819 31854 31853 +f 31820 31821 31854 +f 31821 31855 31854 +f 31821 31822 31856 +f 31821 31856 31855 +f 31822 31823 31856 +f 31823 31857 31856 +f 31823 31824 31858 +f 31823 31858 31857 +f 31824 31825 31858 +f 31825 31859 31858 +f 31825 31826 31860 +f 31825 31860 31859 +f 31826 31827 31860 +f 31827 31861 31860 +f 31827 31828 31862 +f 31827 31862 31861 +f 31828 31829 31862 +f 2580 31830 2709 +f 31830 31863 2709 +f 31830 31831 31864 +f 31830 31864 31863 +f 31831 31832 31864 +f 31832 31865 31864 +f 31832 31833 31866 +f 31832 31866 31865 +f 31833 31834 31866 +f 31834 31867 31866 +f 31834 31835 31868 +f 31834 31868 31867 +f 31835 31836 31868 +f 31836 31869 31868 +f 31836 31837 31870 +f 31836 31870 31869 +f 31837 31838 31870 +f 31838 31871 31870 +f 31838 31839 31872 +f 31838 31872 31871 +f 31839 31840 31872 +f 31840 31873 31872 +f 31840 31841 31874 +f 31840 31874 31873 +f 31841 31842 31874 +f 31842 31875 31874 +f 31842 31843 31876 +f 31842 31876 31875 +f 31843 31844 31876 +f 31844 31877 31876 +f 31844 31845 31878 +f 31844 31878 31877 +f 31845 31846 31878 +f 31846 31879 31878 +f 31846 31847 31880 +f 31846 31880 31879 +f 31847 31848 31880 +f 31848 31881 31880 +f 31848 31849 31882 +f 31848 31882 31881 +f 31849 31850 31882 +f 31850 31883 31882 +f 31850 31851 31884 +f 31850 31884 31883 +f 31851 31852 31884 +f 31852 31885 31884 +f 31852 31853 31886 +f 31852 31886 31885 +f 31853 31854 31886 +f 31854 31887 31886 +f 31854 31855 31888 +f 31854 31888 31887 +f 31855 31856 31888 +f 31856 31889 31888 +f 31856 31857 31890 +f 31856 31890 31889 +f 31857 31858 31890 +f 31858 31891 31890 +f 31858 31859 31892 +f 31858 31892 31891 +f 31859 31860 31892 +f 31860 31893 31892 +f 31860 31861 31894 +f 31860 31894 31893 +f 31861 31862 31894 +f 2709 31863 31895 +f 2709 31895 2838 +f 31863 31864 31895 +f 31864 31896 31895 +f 31864 31865 31897 +f 31864 31897 31896 +f 31865 31866 31897 +f 31866 31898 31897 +f 31866 31867 31899 +f 31866 31899 31898 +f 31867 31868 31899 +f 31868 31900 31899 +f 31868 31869 31901 +f 31868 31901 31900 +f 31869 31870 31901 +f 31870 31902 31901 +f 31870 31871 31903 +f 31870 31903 31902 +f 31871 31872 31903 +f 31872 31904 31903 +f 31872 31873 31905 +f 31872 31905 31904 +f 31873 31874 31905 +f 31874 31906 31905 +f 31874 31875 31907 +f 31874 31907 31906 +f 31875 31876 31907 +f 31876 31908 31907 +f 31876 31877 31909 +f 31876 31909 31908 +f 31877 31878 31909 +f 31878 31910 31909 +f 31878 31879 31911 +f 31878 31911 31910 +f 31879 31880 31911 +f 31880 31912 31911 +f 31880 31881 31913 +f 31880 31913 31912 +f 31881 31882 31913 +f 31882 31914 31913 +f 31882 31883 31915 +f 31882 31915 31914 +f 31883 31884 31915 +f 31884 31916 31915 +f 31884 31885 31917 +f 31884 31917 31916 +f 31885 31886 31917 +f 31886 31918 31917 +f 31886 31887 31919 +f 31886 31919 31918 +f 31887 31888 31919 +f 31888 31920 31919 +f 31888 31889 31921 +f 31888 31921 31920 +f 31889 31890 31921 +f 31890 31922 31921 +f 31890 31891 31923 +f 31890 31923 31922 +f 31891 31892 31923 +f 31892 31924 31923 +f 31892 31893 31925 +f 31892 31925 31924 +f 31893 31894 31925 +f 31894 31926 31925 +f 2838 31895 2967 +f 31895 31927 2967 +f 31895 31896 31928 +f 31895 31928 31927 +f 31896 31897 31928 +f 31897 31929 31928 +f 31897 31898 31930 +f 31897 31930 31929 +f 31898 31899 31930 +f 31899 31931 31930 +f 31899 31900 31932 +f 31899 31932 31931 +f 31900 31901 31932 +f 31901 31933 31932 +f 31901 31902 31934 +f 31901 31934 31933 +f 31902 31903 31934 +f 31903 31935 31934 +f 31903 31904 31936 +f 31903 31936 31935 +f 31904 31905 31936 +f 31905 31937 31936 +f 31905 31906 31938 +f 31905 31938 31937 +f 31906 31907 31938 +f 31907 31939 31938 +f 31907 31908 31940 +f 31907 31940 31939 +f 31908 31909 31940 +f 31909 31941 31940 +f 31909 31910 31942 +f 31909 31942 31941 +f 31910 31911 31942 +f 31911 31943 31942 +f 31911 31912 31944 +f 31911 31944 31943 +f 31912 31913 31944 +f 31913 31945 31944 +f 31913 31914 31946 +f 31913 31946 31945 +f 31914 31915 31946 +f 31915 31947 31946 +f 31915 31916 31948 +f 31915 31948 31947 +f 31916 31917 31948 +f 31917 31949 31948 +f 31917 31918 31950 +f 31917 31950 31949 +f 31918 31919 31950 +f 31919 31951 31950 +f 31919 31920 31952 +f 31919 31952 31951 +f 31920 31921 31952 +f 31921 31953 31952 +f 31921 31922 31954 +f 31921 31954 31953 +f 31922 31923 31954 +f 31923 31955 31954 +f 31923 31924 31956 +f 31923 31956 31955 +f 31924 31925 31956 +f 31925 31957 31956 +f 31925 31926 31958 +f 31925 31958 31957 +f 2967 31927 31959 +f 2967 31959 3096 +f 31927 31928 31959 +f 31928 31960 31959 +f 31928 31929 31961 +f 31928 31961 31960 +f 31929 31930 31961 +f 31930 31962 31961 +f 31930 31931 31963 +f 31930 31963 31962 +f 31931 31932 31963 +f 31932 31964 31963 +f 31932 31933 31965 +f 31932 31965 31964 +f 31933 31934 31965 +f 31934 31966 31965 +f 31934 31935 31967 +f 31934 31967 31966 +f 31935 31936 31967 +f 31936 31968 31967 +f 31936 31937 31969 +f 31936 31969 31968 +f 31937 31938 31969 +f 31938 31970 31969 +f 31938 31939 31971 +f 31938 31971 31970 +f 31939 31940 31971 +f 31940 31972 31971 +f 31940 31941 31973 +f 31940 31973 31972 +f 31941 31942 31973 +f 31942 31974 31973 +f 31942 31943 31975 +f 31942 31975 31974 +f 31943 31944 31975 +f 31944 31976 31975 +f 31944 31945 31977 +f 31944 31977 31976 +f 31945 31946 31977 +f 31946 31978 31977 +f 31946 31947 31979 +f 31946 31979 31978 +f 31947 31948 31979 +f 31948 31980 31979 +f 31948 31949 31981 +f 31948 31981 31980 +f 31949 31950 31981 +f 31950 31982 31981 +f 31950 31951 31983 +f 31950 31983 31982 +f 31951 31952 31983 +f 31952 31984 31983 +f 31952 31953 31985 +f 31952 31985 31984 +f 31953 31954 31985 +f 31954 31986 31985 +f 31954 31955 31987 +f 31954 31987 31986 +f 31955 31956 31987 +f 31956 31988 31987 +f 31956 31957 31989 +f 31956 31989 31988 +f 31957 31958 31989 +f 3096 31959 3225 +f 31959 31990 3225 +f 31959 31960 31991 +f 31959 31991 31990 +f 31960 31961 31991 +f 31961 31992 31991 +f 31961 31962 31993 +f 31961 31993 31992 +f 31962 31963 31993 +f 31963 31994 31993 +f 31963 31964 31995 +f 31963 31995 31994 +f 31964 31965 31995 +f 31965 31996 31995 +f 31965 31966 31997 +f 31965 31997 31996 +f 31966 31967 31997 +f 31967 31998 31997 +f 31967 31968 31999 +f 31967 31999 31998 +f 31968 31969 31999 +f 31969 32000 31999 +f 31969 31970 32001 +f 31969 32001 32000 +f 31970 31971 32001 +f 31971 32002 32001 +f 31971 31972 32003 +f 31971 32003 32002 +f 31972 31973 32003 +f 31973 32004 32003 +f 31973 31974 32005 +f 31973 32005 32004 +f 31974 31975 32005 +f 31975 32006 32005 +f 31975 31976 32007 +f 31975 32007 32006 +f 31976 31977 32007 +f 31977 32008 32007 +f 31977 31978 32009 +f 31977 32009 32008 +f 31978 31979 32009 +f 31979 32010 32009 +f 31979 31980 32011 +f 31979 32011 32010 +f 31980 31981 32011 +f 31981 32012 32011 +f 31981 31982 32013 +f 31981 32013 32012 +f 31982 31983 32013 +f 31983 32014 32013 +f 31983 31984 32015 +f 31983 32015 32014 +f 31984 31985 32015 +f 31985 32016 32015 +f 31985 31986 32017 +f 31985 32017 32016 +f 31986 31987 32017 +f 31987 32018 32017 +f 31987 31988 32019 +f 31987 32019 32018 +f 31988 31989 32019 +f 31989 32020 32019 +f 3225 31990 32021 +f 3225 32021 3354 +f 31990 31991 32021 +f 31991 32022 32021 +f 31991 31992 32023 +f 31991 32023 32022 +f 31992 31993 32023 +f 31993 32024 32023 +f 31993 31994 32025 +f 31993 32025 32024 +f 31994 31995 32025 +f 31995 32026 32025 +f 31995 31996 32027 +f 31995 32027 32026 +f 31996 31997 32027 +f 31997 32028 32027 +f 31997 31998 32029 +f 31997 32029 32028 +f 31998 31999 32029 +f 31999 32030 32029 +f 31999 32000 32031 +f 31999 32031 32030 +f 32000 32001 32031 +f 32001 32032 32031 +f 32001 32002 32033 +f 32001 32033 32032 +f 32002 32003 32033 +f 32003 32034 32033 +f 32003 32004 32035 +f 32003 32035 32034 +f 32004 32005 32035 +f 32005 32036 32035 +f 32005 32006 32037 +f 32005 32037 32036 +f 32006 32007 32037 +f 32007 32038 32037 +f 32007 32008 32039 +f 32007 32039 32038 +f 32008 32009 32039 +f 32009 32040 32039 +f 32009 32010 32041 +f 32009 32041 32040 +f 32010 32011 32041 +f 32011 32042 32041 +f 32011 32012 32043 +f 32011 32043 32042 +f 32012 32013 32043 +f 32013 32044 32043 +f 32013 32014 32045 +f 32013 32045 32044 +f 32014 32015 32045 +f 32015 32046 32045 +f 32015 32016 32047 +f 32015 32047 32046 +f 32016 32017 32047 +f 32017 32048 32047 +f 32017 32018 32049 +f 32017 32049 32048 +f 32018 32019 32049 +f 32019 32050 32049 +f 32019 32020 32051 +f 32019 32051 32050 +f 3354 32021 3483 +f 32021 32052 3483 +f 32021 32022 32053 +f 32021 32053 32052 +f 32022 32023 32053 +f 32023 32054 32053 +f 32023 32024 32055 +f 32023 32055 32054 +f 32024 32025 32055 +f 32025 32056 32055 +f 32025 32026 32057 +f 32025 32057 32056 +f 32026 32027 32057 +f 32027 32058 32057 +f 32027 32028 32059 +f 32027 32059 32058 +f 32028 32029 32059 +f 32029 32060 32059 +f 32029 32030 32061 +f 32029 32061 32060 +f 32030 32031 32061 +f 32031 32062 32061 +f 32031 32032 32063 +f 32031 32063 32062 +f 32032 32033 32063 +f 32033 32064 32063 +f 32033 32034 32065 +f 32033 32065 32064 +f 32034 32035 32065 +f 32035 32066 32065 +f 32035 32036 32067 +f 32035 32067 32066 +f 32036 32037 32067 +f 32037 32068 32067 +f 32037 32038 32069 +f 32037 32069 32068 +f 32038 32039 32069 +f 32039 32070 32069 +f 32039 32040 32071 +f 32039 32071 32070 +f 32040 32041 32071 +f 32041 32072 32071 +f 32041 32042 32073 +f 32041 32073 32072 +f 32042 32043 32073 +f 32043 32074 32073 +f 32043 32044 32075 +f 32043 32075 32074 +f 32044 32045 32075 +f 32045 32076 32075 +f 32045 32046 32077 +f 32045 32077 32076 +f 32046 32047 32077 +f 32047 32078 32077 +f 32047 32048 32079 +f 32047 32079 32078 +f 32048 32049 32079 +f 32049 32080 32079 +f 32049 32050 32081 +f 32049 32081 32080 +f 32050 32051 32081 +f 3483 32052 32082 +f 3483 32082 3612 +f 32052 32053 32082 +f 32053 32083 32082 +f 32053 32054 32084 +f 32053 32084 32083 +f 32054 32055 32084 +f 32055 32085 32084 +f 32055 32056 32086 +f 32055 32086 32085 +f 32056 32057 32086 +f 32057 32087 32086 +f 32057 32058 32088 +f 32057 32088 32087 +f 32058 32059 32088 +f 32059 32089 32088 +f 32059 32060 32090 +f 32059 32090 32089 +f 32060 32061 32090 +f 32061 32091 32090 +f 32061 32062 32092 +f 32061 32092 32091 +f 32062 32063 32092 +f 32063 32093 32092 +f 32063 32064 32094 +f 32063 32094 32093 +f 32064 32065 32094 +f 32065 32095 32094 +f 32065 32066 32096 +f 32065 32096 32095 +f 32066 32067 32096 +f 32067 32097 32096 +f 32067 32068 32098 +f 32067 32098 32097 +f 32068 32069 32098 +f 32069 32099 32098 +f 32069 32070 32100 +f 32069 32100 32099 +f 32070 32071 32100 +f 32071 32101 32100 +f 32071 32072 32102 +f 32071 32102 32101 +f 32072 32073 32102 +f 32073 32103 32102 +f 32073 32074 32104 +f 32073 32104 32103 +f 32074 32075 32104 +f 32075 32105 32104 +f 32075 32076 32106 +f 32075 32106 32105 +f 32076 32077 32106 +f 32077 32107 32106 +f 32077 32078 32108 +f 32077 32108 32107 +f 32078 32079 32108 +f 32079 32109 32108 +f 32079 32080 32110 +f 32079 32110 32109 +f 32080 32081 32110 +f 32081 32111 32110 +f 3612 32082 3741 +f 32082 32112 3741 +f 32082 32083 32113 +f 32082 32113 32112 +f 32083 32084 32113 +f 32084 32114 32113 +f 32084 32085 32115 +f 32084 32115 32114 +f 32085 32086 32115 +f 32086 32116 32115 +f 32086 32087 32117 +f 32086 32117 32116 +f 32087 32088 32117 +f 32088 32118 32117 +f 32088 32089 32119 +f 32088 32119 32118 +f 32089 32090 32119 +f 32090 32120 32119 +f 32090 32091 32121 +f 32090 32121 32120 +f 32091 32092 32121 +f 32092 32122 32121 +f 32092 32093 32123 +f 32092 32123 32122 +f 32093 32094 32123 +f 32094 32124 32123 +f 32094 32095 32125 +f 32094 32125 32124 +f 32095 32096 32125 +f 32096 32126 32125 +f 32096 32097 32127 +f 32096 32127 32126 +f 32097 32098 32127 +f 32098 32128 32127 +f 32098 32099 32129 +f 32098 32129 32128 +f 32099 32100 32129 +f 32100 32130 32129 +f 32100 32101 32131 +f 32100 32131 32130 +f 32101 32102 32131 +f 32102 32132 32131 +f 32102 32103 32133 +f 32102 32133 32132 +f 32103 32104 32133 +f 32104 32134 32133 +f 32104 32105 32135 +f 32104 32135 32134 +f 32105 32106 32135 +f 32106 32136 32135 +f 32106 32107 32137 +f 32106 32137 32136 +f 32107 32108 32137 +f 32108 32138 32137 +f 32108 32109 32139 +f 32108 32139 32138 +f 32109 32110 32139 +f 32110 32140 32139 +f 32110 32111 32141 +f 32110 32141 32140 +f 3741 32112 32142 +f 3741 32142 3870 +f 32112 32113 32142 +f 32113 32143 32142 +f 32113 32114 32144 +f 32113 32144 32143 +f 32114 32115 32144 +f 32115 32145 32144 +f 32115 32116 32146 +f 32115 32146 32145 +f 32116 32117 32146 +f 32117 32147 32146 +f 32117 32118 32148 +f 32117 32148 32147 +f 32118 32119 32148 +f 32119 32149 32148 +f 32119 32120 32150 +f 32119 32150 32149 +f 32120 32121 32150 +f 32121 32151 32150 +f 32121 32122 32152 +f 32121 32152 32151 +f 32122 32123 32152 +f 32123 32153 32152 +f 32123 32124 32154 +f 32123 32154 32153 +f 32124 32125 32154 +f 32125 32155 32154 +f 32125 32126 32156 +f 32125 32156 32155 +f 32126 32127 32156 +f 32127 32157 32156 +f 32127 32128 32158 +f 32127 32158 32157 +f 32128 32129 32158 +f 32129 32159 32158 +f 32129 32130 32160 +f 32129 32160 32159 +f 32130 32131 32160 +f 32131 32161 32160 +f 32131 32132 32162 +f 32131 32162 32161 +f 32132 32133 32162 +f 32133 32163 32162 +f 32133 32134 32164 +f 32133 32164 32163 +f 32134 32135 32164 +f 32135 32165 32164 +f 32135 32136 32166 +f 32135 32166 32165 +f 32136 32137 32166 +f 32137 32167 32166 +f 32137 32138 32168 +f 32137 32168 32167 +f 32138 32139 32168 +f 32139 32169 32168 +f 32139 32140 32170 +f 32139 32170 32169 +f 32140 32141 32170 +f 3870 32142 3999 +f 32142 32171 3999 +f 32142 32143 32172 +f 32142 32172 32171 +f 32143 32144 32172 +f 32144 32173 32172 +f 32144 32145 32174 +f 32144 32174 32173 +f 32145 32146 32174 +f 32146 32175 32174 +f 32146 32147 32176 +f 32146 32176 32175 +f 32147 32148 32176 +f 32148 32177 32176 +f 32148 32149 32178 +f 32148 32178 32177 +f 32149 32150 32178 +f 32150 32179 32178 +f 32150 32151 32180 +f 32150 32180 32179 +f 32151 32152 32180 +f 32152 32181 32180 +f 32152 32153 32182 +f 32152 32182 32181 +f 32153 32154 32182 +f 32154 32183 32182 +f 32154 32155 32184 +f 32154 32184 32183 +f 32155 32156 32184 +f 32156 32185 32184 +f 32156 32157 32186 +f 32156 32186 32185 +f 32157 32158 32186 +f 32158 32187 32186 +f 32158 32159 32188 +f 32158 32188 32187 +f 32159 32160 32188 +f 32160 32189 32188 +f 32160 32161 32190 +f 32160 32190 32189 +f 32161 32162 32190 +f 32162 32191 32190 +f 32162 32163 32192 +f 32162 32192 32191 +f 32163 32164 32192 +f 32164 32193 32192 +f 32164 32165 32194 +f 32164 32194 32193 +f 32165 32166 32194 +f 32166 32195 32194 +f 32166 32167 32196 +f 32166 32196 32195 +f 32167 32168 32196 +f 32168 32197 32196 +f 32168 32169 32198 +f 32168 32198 32197 +f 32169 32170 32198 +f 3999 32171 32199 +f 3999 32199 4128 +f 32171 32172 32199 +f 32172 32200 32199 +f 32172 32173 32201 +f 32172 32201 32200 +f 32173 32174 32201 +f 32174 32202 32201 +f 32174 32175 32203 +f 32174 32203 32202 +f 32175 32176 32203 +f 32176 32204 32203 +f 32176 32177 32205 +f 32176 32205 32204 +f 32177 32178 32205 +f 32178 32206 32205 +f 32178 32179 32207 +f 32178 32207 32206 +f 32179 32180 32207 +f 32180 32208 32207 +f 32180 32181 32209 +f 32180 32209 32208 +f 32181 32182 32209 +f 32182 32210 32209 +f 32182 32183 32211 +f 32182 32211 32210 +f 32183 32184 32211 +f 32184 32212 32211 +f 32184 32185 32213 +f 32184 32213 32212 +f 32185 32186 32213 +f 32186 32214 32213 +f 32186 32187 32215 +f 32186 32215 32214 +f 32187 32188 32215 +f 32188 32216 32215 +f 32188 32189 32217 +f 32188 32217 32216 +f 32189 32190 32217 +f 32190 32218 32217 +f 32190 32191 32219 +f 32190 32219 32218 +f 32191 32192 32219 +f 32192 32220 32219 +f 32192 32193 32221 +f 32192 32221 32220 +f 32193 32194 32221 +f 32194 32222 32221 +f 32194 32195 32223 +f 32194 32223 32222 +f 32195 32196 32223 +f 32196 32224 32223 +f 32196 32197 32225 +f 32196 32225 32224 +f 32197 32198 32225 +f 32198 32226 32225 +f 4128 32199 4257 +f 32199 32227 4257 +f 32199 32200 32228 +f 32199 32228 32227 +f 32200 32201 32228 +f 32201 32229 32228 +f 32201 32202 32230 +f 32201 32230 32229 +f 32202 32203 32230 +f 32203 32231 32230 +f 32203 32204 32232 +f 32203 32232 32231 +f 32204 32205 32232 +f 32205 32233 32232 +f 32205 32206 32234 +f 32205 32234 32233 +f 32206 32207 32234 +f 32207 32235 32234 +f 32207 32208 32236 +f 32207 32236 32235 +f 32208 32209 32236 +f 32209 32237 32236 +f 32209 32210 32238 +f 32209 32238 32237 +f 32210 32211 32238 +f 32211 32239 32238 +f 32211 32212 32240 +f 32211 32240 32239 +f 32212 32213 32240 +f 32213 32241 32240 +f 32213 32214 32242 +f 32213 32242 32241 +f 32214 32215 32242 +f 32215 32243 32242 +f 32215 32216 32244 +f 32215 32244 32243 +f 32216 32217 32244 +f 32217 32245 32244 +f 32217 32218 32246 +f 32217 32246 32245 +f 32218 32219 32246 +f 32219 32247 32246 +f 32219 32220 32248 +f 32219 32248 32247 +f 32220 32221 32248 +f 32221 32249 32248 +f 32221 32222 32250 +f 32221 32250 32249 +f 32222 32223 32250 +f 32223 32251 32250 +f 32223 32224 32252 +f 32223 32252 32251 +f 32224 32225 32252 +f 32225 32253 32252 +f 32225 32226 32254 +f 32225 32254 32253 +f 4257 32227 32255 +f 4257 32255 4386 +f 32227 32228 32255 +f 32228 32256 32255 +f 32228 32229 32257 +f 32228 32257 32256 +f 32229 32230 32257 +f 32230 32258 32257 +f 32230 32231 32259 +f 32230 32259 32258 +f 32231 32232 32259 +f 32232 32260 32259 +f 32232 32233 32261 +f 32232 32261 32260 +f 32233 32234 32261 +f 32234 32262 32261 +f 32234 32235 32263 +f 32234 32263 32262 +f 32235 32236 32263 +f 32236 32264 32263 +f 32236 32237 32265 +f 32236 32265 32264 +f 32237 32238 32265 +f 32238 32266 32265 +f 32238 32239 32267 +f 32238 32267 32266 +f 32239 32240 32267 +f 32240 32268 32267 +f 32240 32241 32269 +f 32240 32269 32268 +f 32241 32242 32269 +f 32242 32270 32269 +f 32242 32243 32271 +f 32242 32271 32270 +f 32243 32244 32271 +f 32244 32272 32271 +f 32244 32245 32273 +f 32244 32273 32272 +f 32245 32246 32273 +f 32246 32274 32273 +f 32246 32247 32275 +f 32246 32275 32274 +f 32247 32248 32275 +f 32248 32276 32275 +f 32248 32249 32277 +f 32248 32277 32276 +f 32249 32250 32277 +f 32250 32278 32277 +f 32250 32251 32279 +f 32250 32279 32278 +f 32251 32252 32279 +f 32252 32280 32279 +f 32252 32253 32281 +f 32252 32281 32280 +f 32253 32254 32281 +f 4386 32255 4515 +f 32255 32282 4515 +f 32255 32256 32283 +f 32255 32283 32282 +f 32256 32257 32283 +f 32257 32284 32283 +f 32257 32258 32285 +f 32257 32285 32284 +f 32258 32259 32285 +f 32259 32286 32285 +f 32259 32260 32287 +f 32259 32287 32286 +f 32260 32261 32287 +f 32261 32288 32287 +f 32261 32262 32289 +f 32261 32289 32288 +f 32262 32263 32289 +f 32263 32290 32289 +f 32263 32264 32291 +f 32263 32291 32290 +f 32264 32265 32291 +f 32265 32292 32291 +f 32265 32266 32293 +f 32265 32293 32292 +f 32266 32267 32293 +f 32267 32294 32293 +f 32267 32268 32295 +f 32267 32295 32294 +f 32268 32269 32295 +f 32269 32296 32295 +f 32269 32270 32297 +f 32269 32297 32296 +f 32270 32271 32297 +f 32271 32298 32297 +f 32271 32272 32299 +f 32271 32299 32298 +f 32272 32273 32299 +f 32273 32300 32299 +f 32273 32274 32301 +f 32273 32301 32300 +f 32274 32275 32301 +f 32275 32302 32301 +f 32275 32276 32303 +f 32275 32303 32302 +f 32276 32277 32303 +f 32277 32304 32303 +f 32277 32278 32305 +f 32277 32305 32304 +f 32278 32279 32305 +f 32279 32306 32305 +f 32279 32280 32307 +f 32279 32307 32306 +f 32280 32281 32307 +f 4515 32282 32308 +f 4515 32308 4644 +f 32282 32283 32308 +f 32283 32309 32308 +f 32283 32284 32310 +f 32283 32310 32309 +f 32284 32285 32310 +f 32285 32311 32310 +f 32285 32286 32312 +f 32285 32312 32311 +f 32286 32287 32312 +f 32287 32313 32312 +f 32287 32288 32314 +f 32287 32314 32313 +f 32288 32289 32314 +f 32289 32315 32314 +f 32289 32290 32316 +f 32289 32316 32315 +f 32290 32291 32316 +f 32291 32317 32316 +f 32291 32292 32318 +f 32291 32318 32317 +f 32292 32293 32318 +f 32293 32319 32318 +f 32293 32294 32320 +f 32293 32320 32319 +f 32294 32295 32320 +f 32295 32321 32320 +f 32295 32296 32322 +f 32295 32322 32321 +f 32296 32297 32322 +f 32297 32323 32322 +f 32297 32298 32324 +f 32297 32324 32323 +f 32298 32299 32324 +f 32299 32325 32324 +f 32299 32300 32326 +f 32299 32326 32325 +f 32300 32301 32326 +f 32301 32327 32326 +f 32301 32302 32328 +f 32301 32328 32327 +f 32302 32303 32328 +f 32303 32329 32328 +f 32303 32304 32330 +f 32303 32330 32329 +f 32304 32305 32330 +f 32305 32331 32330 +f 32305 32306 32332 +f 32305 32332 32331 +f 32306 32307 32332 +f 4644 32308 4773 +f 32308 32333 4773 +f 32308 32309 32334 +f 32308 32334 32333 +f 32309 32310 32334 +f 32310 32335 32334 +f 32310 32311 32336 +f 32310 32336 32335 +f 32311 32312 32336 +f 32312 32337 32336 +f 32312 32313 32338 +f 32312 32338 32337 +f 32313 32314 32338 +f 32314 32339 32338 +f 32314 32315 32340 +f 32314 32340 32339 +f 32315 32316 32340 +f 32316 32341 32340 +f 32316 32317 32342 +f 32316 32342 32341 +f 32317 32318 32342 +f 32318 32343 32342 +f 32318 32319 32344 +f 32318 32344 32343 +f 32319 32320 32344 +f 32320 32345 32344 +f 32320 32321 32346 +f 32320 32346 32345 +f 32321 32322 32346 +f 32322 32347 32346 +f 32322 32323 32348 +f 32322 32348 32347 +f 32323 32324 32348 +f 32324 32349 32348 +f 32324 32325 32350 +f 32324 32350 32349 +f 32325 32326 32350 +f 32326 32351 32350 +f 32326 32327 32352 +f 32326 32352 32351 +f 32327 32328 32352 +f 32328 32353 32352 +f 32328 32329 32354 +f 32328 32354 32353 +f 32329 32330 32354 +f 32330 32355 32354 +f 32330 32331 32356 +f 32330 32356 32355 +f 32331 32332 32356 +f 4773 32333 32357 +f 4773 32357 4902 +f 32333 32334 32357 +f 32334 32358 32357 +f 32334 32335 32359 +f 32334 32359 32358 +f 32335 32336 32359 +f 32336 32360 32359 +f 32336 32337 32361 +f 32336 32361 32360 +f 32337 32338 32361 +f 32338 32362 32361 +f 32338 32339 32363 +f 32338 32363 32362 +f 32339 32340 32363 +f 32340 32364 32363 +f 32340 32341 32365 +f 32340 32365 32364 +f 32341 32342 32365 +f 32342 32366 32365 +f 32342 32343 32367 +f 32342 32367 32366 +f 32343 32344 32367 +f 32344 32368 32367 +f 32344 32345 32369 +f 32344 32369 32368 +f 32345 32346 32369 +f 32346 32370 32369 +f 32346 32347 32371 +f 32346 32371 32370 +f 32347 32348 32371 +f 32348 32372 32371 +f 32348 32349 32373 +f 32348 32373 32372 +f 32349 32350 32373 +f 32350 32374 32373 +f 32350 32351 32375 +f 32350 32375 32374 +f 32351 32352 32375 +f 32352 32376 32375 +f 32352 32353 32377 +f 32352 32377 32376 +f 32353 32354 32377 +f 32354 32378 32377 +f 32354 32355 32379 +f 32354 32379 32378 +f 32355 32356 32379 +f 32356 32380 32379 +f 4902 32357 5031 +f 32357 32381 5031 +f 32357 32358 32382 +f 32357 32382 32381 +f 32358 32359 32382 +f 32359 32383 32382 +f 32359 32360 32384 +f 32359 32384 32383 +f 32360 32361 32384 +f 32361 32385 32384 +f 32361 32362 32386 +f 32361 32386 32385 +f 32362 32363 32386 +f 32363 32387 32386 +f 32363 32364 32388 +f 32363 32388 32387 +f 32364 32365 32388 +f 32365 32389 32388 +f 32365 32366 32390 +f 32365 32390 32389 +f 32366 32367 32390 +f 32367 32391 32390 +f 32367 32368 32392 +f 32367 32392 32391 +f 32368 32369 32392 +f 32369 32393 32392 +f 32369 32370 32394 +f 32369 32394 32393 +f 32370 32371 32394 +f 32371 32395 32394 +f 32371 32372 32396 +f 32371 32396 32395 +f 32372 32373 32396 +f 32373 32397 32396 +f 32373 32374 32398 +f 32373 32398 32397 +f 32374 32375 32398 +f 32375 32399 32398 +f 32375 32376 32400 +f 32375 32400 32399 +f 32376 32377 32400 +f 32377 32401 32400 +f 32377 32378 32402 +f 32377 32402 32401 +f 32378 32379 32402 +f 32379 32403 32402 +f 32379 32380 32404 +f 32379 32404 32403 +f 5031 32381 32405 +f 5031 32405 5160 +f 32381 32382 32405 +f 32382 32406 32405 +f 32382 32383 32407 +f 32382 32407 32406 +f 32383 32384 32407 +f 32384 32408 32407 +f 32384 32385 32409 +f 32384 32409 32408 +f 32385 32386 32409 +f 32386 32410 32409 +f 32386 32387 32411 +f 32386 32411 32410 +f 32387 32388 32411 +f 32388 32412 32411 +f 32388 32389 32413 +f 32388 32413 32412 +f 32389 32390 32413 +f 32390 32414 32413 +f 32390 32391 32415 +f 32390 32415 32414 +f 32391 32392 32415 +f 32392 32416 32415 +f 32392 32393 32417 +f 32392 32417 32416 +f 32393 32394 32417 +f 32394 32418 32417 +f 32394 32395 32419 +f 32394 32419 32418 +f 32395 32396 32419 +f 32396 32420 32419 +f 32396 32397 32421 +f 32396 32421 32420 +f 32397 32398 32421 +f 32398 32422 32421 +f 32398 32399 32423 +f 32398 32423 32422 +f 32399 32400 32423 +f 32400 32424 32423 +f 32400 32401 32425 +f 32400 32425 32424 +f 32401 32402 32425 +f 32402 32426 32425 +f 32402 32403 32427 +f 32402 32427 32426 +f 32403 32404 32427 +f 5160 32405 5289 +f 32405 32428 5289 +f 32405 32406 32429 +f 32405 32429 32428 +f 32406 32407 32429 +f 32407 32430 32429 +f 32407 32408 32431 +f 32407 32431 32430 +f 32408 32409 32431 +f 32409 32432 32431 +f 32409 32410 32433 +f 32409 32433 32432 +f 32410 32411 32433 +f 32411 32434 32433 +f 32411 32412 32435 +f 32411 32435 32434 +f 32412 32413 32435 +f 32413 32436 32435 +f 32413 32414 32437 +f 32413 32437 32436 +f 32414 32415 32437 +f 32415 32438 32437 +f 32415 32416 32439 +f 32415 32439 32438 +f 32416 32417 32439 +f 32417 32440 32439 +f 32417 32418 32441 +f 32417 32441 32440 +f 32418 32419 32441 +f 32419 32442 32441 +f 32419 32420 32443 +f 32419 32443 32442 +f 32420 32421 32443 +f 32421 32444 32443 +f 32421 32422 32445 +f 32421 32445 32444 +f 32422 32423 32445 +f 32423 32446 32445 +f 32423 32424 32447 +f 32423 32447 32446 +f 32424 32425 32447 +f 32425 32448 32447 +f 32425 32426 32449 +f 32425 32449 32448 +f 32426 32427 32449 +f 5289 32428 32450 +f 5289 32450 5418 +f 32428 32429 32450 +f 32429 32451 32450 +f 32429 32430 32452 +f 32429 32452 32451 +f 32430 32431 32452 +f 32431 32453 32452 +f 32431 32432 32454 +f 32431 32454 32453 +f 32432 32433 32454 +f 32433 32455 32454 +f 32433 32434 32456 +f 32433 32456 32455 +f 32434 32435 32456 +f 32435 32457 32456 +f 32435 32436 32458 +f 32435 32458 32457 +f 32436 32437 32458 +f 32437 32459 32458 +f 32437 32438 32460 +f 32437 32460 32459 +f 32438 32439 32460 +f 32439 32461 32460 +f 32439 32440 32462 +f 32439 32462 32461 +f 32440 32441 32462 +f 32441 32463 32462 +f 32441 32442 32464 +f 32441 32464 32463 +f 32442 32443 32464 +f 32443 32465 32464 +f 32443 32444 32466 +f 32443 32466 32465 +f 32444 32445 32466 +f 32445 32467 32466 +f 32445 32446 32468 +f 32445 32468 32467 +f 32446 32447 32468 +f 32447 32469 32468 +f 32447 32448 32470 +f 32447 32470 32469 +f 32448 32449 32470 +f 32449 32471 32470 +f 5418 32450 5547 +f 32450 32472 5547 +f 32450 32451 32473 +f 32450 32473 32472 +f 32451 32452 32473 +f 32452 32474 32473 +f 32452 32453 32475 +f 32452 32475 32474 +f 32453 32454 32475 +f 32454 32476 32475 +f 32454 32455 32477 +f 32454 32477 32476 +f 32455 32456 32477 +f 32456 32478 32477 +f 32456 32457 32479 +f 32456 32479 32478 +f 32457 32458 32479 +f 32458 32480 32479 +f 32458 32459 32481 +f 32458 32481 32480 +f 32459 32460 32481 +f 32460 32482 32481 +f 32460 32461 32483 +f 32460 32483 32482 +f 32461 32462 32483 +f 32462 32484 32483 +f 32462 32463 32485 +f 32462 32485 32484 +f 32463 32464 32485 +f 32464 32486 32485 +f 32464 32465 32487 +f 32464 32487 32486 +f 32465 32466 32487 +f 32466 32488 32487 +f 32466 32467 32489 +f 32466 32489 32488 +f 32467 32468 32489 +f 32468 32490 32489 +f 32468 32469 32491 +f 32468 32491 32490 +f 32469 32470 32491 +f 32470 32492 32491 +f 32470 32471 32493 +f 32470 32493 32492 +f 5547 32472 32494 +f 5547 32494 5676 +f 32472 32473 32494 +f 32473 32495 32494 +f 32473 32474 32496 +f 32473 32496 32495 +f 32474 32475 32496 +f 32475 32497 32496 +f 32475 32476 32498 +f 32475 32498 32497 +f 32476 32477 32498 +f 32477 32499 32498 +f 32477 32478 32500 +f 32477 32500 32499 +f 32478 32479 32500 +f 32479 32501 32500 +f 32479 32480 32502 +f 32479 32502 32501 +f 32480 32481 32502 +f 32481 32503 32502 +f 32481 32482 32504 +f 32481 32504 32503 +f 32482 32483 32504 +f 32483 32505 32504 +f 32483 32484 32506 +f 32483 32506 32505 +f 32484 32485 32506 +f 32485 32507 32506 +f 32485 32486 32508 +f 32485 32508 32507 +f 32486 32487 32508 +f 32487 32509 32508 +f 32487 32488 32510 +f 32487 32510 32509 +f 32488 32489 32510 +f 32489 32511 32510 +f 32489 32490 32512 +f 32489 32512 32511 +f 32490 32491 32512 +f 32491 32513 32512 +f 32491 32492 32514 +f 32491 32514 32513 +f 32492 32493 32514 +f 5676 32494 5805 +f 32494 32515 5805 +f 32494 32495 32516 +f 32494 32516 32515 +f 32495 32496 32516 +f 32496 32517 32516 +f 32496 32497 32518 +f 32496 32518 32517 +f 32497 32498 32518 +f 32498 32519 32518 +f 32498 32499 32520 +f 32498 32520 32519 +f 32499 32500 32520 +f 32500 32521 32520 +f 32500 32501 32522 +f 32500 32522 32521 +f 32501 32502 32522 +f 32502 32523 32522 +f 32502 32503 32524 +f 32502 32524 32523 +f 32503 32504 32524 +f 32504 32525 32524 +f 32504 32505 32526 +f 32504 32526 32525 +f 32505 32506 32526 +f 32506 32527 32526 +f 32506 32507 32528 +f 32506 32528 32527 +f 32507 32508 32528 +f 32508 32529 32528 +f 32508 32509 32530 +f 32508 32530 32529 +f 32509 32510 32530 +f 32510 32531 32530 +f 32510 32511 32532 +f 32510 32532 32531 +f 32511 32512 32532 +f 32512 32533 32532 +f 32512 32513 32534 +f 32512 32534 32533 +f 32513 32514 32534 +f 32514 32535 32534 +f 5805 32515 32536 +f 5805 32536 5934 +f 32515 32516 32536 +f 32516 32537 32536 +f 32516 32517 32538 +f 32516 32538 32537 +f 32517 32518 32538 +f 32518 32539 32538 +f 32518 32519 32540 +f 32518 32540 32539 +f 32519 32520 32540 +f 32520 32541 32540 +f 32520 32521 32542 +f 32520 32542 32541 +f 32521 32522 32542 +f 32522 32543 32542 +f 32522 32523 32544 +f 32522 32544 32543 +f 32523 32524 32544 +f 32524 32545 32544 +f 32524 32525 32546 +f 32524 32546 32545 +f 32525 32526 32546 +f 32526 32547 32546 +f 32526 32527 32548 +f 32526 32548 32547 +f 32527 32528 32548 +f 32528 32549 32548 +f 32528 32529 32550 +f 32528 32550 32549 +f 32529 32530 32550 +f 32530 32551 32550 +f 32530 32531 32552 +f 32530 32552 32551 +f 32531 32532 32552 +f 32532 32553 32552 +f 32532 32533 32554 +f 32532 32554 32553 +f 32533 32534 32554 +f 32534 32555 32554 +f 32534 32535 32556 +f 32534 32556 32555 +f 5934 32536 6063 +f 32536 32557 6063 +f 32536 32537 32558 +f 32536 32558 32557 +f 32537 32538 32558 +f 32538 32559 32558 +f 32538 32539 32560 +f 32538 32560 32559 +f 32539 32540 32560 +f 32540 32561 32560 +f 32540 32541 32562 +f 32540 32562 32561 +f 32541 32542 32562 +f 32542 32563 32562 +f 32542 32543 32564 +f 32542 32564 32563 +f 32543 32544 32564 +f 32544 32565 32564 +f 32544 32545 32566 +f 32544 32566 32565 +f 32545 32546 32566 +f 32546 32567 32566 +f 32546 32547 32568 +f 32546 32568 32567 +f 32547 32548 32568 +f 32548 32569 32568 +f 32548 32549 32570 +f 32548 32570 32569 +f 32549 32550 32570 +f 32550 32571 32570 +f 32550 32551 32572 +f 32550 32572 32571 +f 32551 32552 32572 +f 32552 32573 32572 +f 32552 32553 32574 +f 32552 32574 32573 +f 32553 32554 32574 +f 32554 32575 32574 +f 32554 32555 32576 +f 32554 32576 32575 +f 32555 32556 32576 +f 6063 32557 32577 +f 6063 32577 6191 +f 32557 32558 32577 +f 32558 32578 32577 +f 32558 32559 32579 +f 32558 32579 32578 +f 32559 32560 32579 +f 32560 32580 32579 +f 32560 32561 32581 +f 32560 32581 32580 +f 32561 32562 32581 +f 32562 32582 32581 +f 32562 32563 32583 +f 32562 32583 32582 +f 32563 32564 32583 +f 32564 32584 32583 +f 32564 32565 32585 +f 32564 32585 32584 +f 32565 32566 32585 +f 32566 32586 32585 +f 32566 32567 32587 +f 32566 32587 32586 +f 32567 32568 32587 +f 32568 32588 32587 +f 32568 32569 32589 +f 32568 32589 32588 +f 32569 32570 32589 +f 32570 32590 32589 +f 32570 32571 32591 +f 32570 32591 32590 +f 32571 32572 32591 +f 32572 32592 32591 +f 32572 32573 32593 +f 32572 32593 32592 +f 32573 32574 32593 +f 32574 32594 32593 +f 32574 32575 32595 +f 32574 32595 32594 +f 32575 32576 32595 +f 6191 32577 6317 +f 32577 32596 6317 +f 32577 32578 32597 +f 32577 32597 32596 +f 32578 32579 32597 +f 32579 32598 32597 +f 32579 32580 32599 +f 32579 32599 32598 +f 32580 32581 32599 +f 32581 32600 32599 +f 32581 32582 32601 +f 32581 32601 32600 +f 32582 32583 32601 +f 32583 32602 32601 +f 32583 32584 32603 +f 32583 32603 32602 +f 32584 32585 32603 +f 32585 32604 32603 +f 32585 32586 32605 +f 32585 32605 32604 +f 32586 32587 32605 +f 32587 32606 32605 +f 32587 32588 32607 +f 32587 32607 32606 +f 32588 32589 32607 +f 32589 32608 32607 +f 32589 32590 32609 +f 32589 32609 32608 +f 32590 32591 32609 +f 32591 32610 32609 +f 32591 32592 32611 +f 32591 32611 32610 +f 32592 32593 32611 +f 32593 32612 32611 +f 32593 32594 32613 +f 32593 32613 32612 +f 32594 32595 32613 +f 32595 32614 32613 +f 6317 32596 32615 +f 6317 32615 6440 +f 32596 32597 32615 +f 32597 32616 32615 +f 32597 32598 32617 +f 32597 32617 32616 +f 32598 32599 32617 +f 32599 32618 32617 +f 32599 32600 32619 +f 32599 32619 32618 +f 32600 32601 32619 +f 32601 32620 32619 +f 32601 32602 32621 +f 32601 32621 32620 +f 32602 32603 32621 +f 32603 32622 32621 +f 32603 32604 32623 +f 32603 32623 32622 +f 32604 32605 32623 +f 32605 32624 32623 +f 32605 32606 32625 +f 32605 32625 32624 +f 32606 32607 32625 +f 32607 32626 32625 +f 32607 32608 32627 +f 32607 32627 32626 +f 32608 32609 32627 +f 32609 32628 32627 +f 32609 32610 32629 +f 32609 32629 32628 +f 32610 32611 32629 +f 32611 32630 32629 +f 32611 32612 32631 +f 32611 32631 32630 +f 32612 32613 32631 +f 32613 32632 32631 +f 32613 32614 32633 +f 32613 32633 32632 +f 6440 32615 6562 +f 32615 32634 6562 +f 32615 32616 32635 +f 32615 32635 32634 +f 32616 32617 32635 +f 32617 32636 32635 +f 32617 32618 32637 +f 32617 32637 32636 +f 32618 32619 32637 +f 32619 32638 32637 +f 32619 32620 32639 +f 32619 32639 32638 +f 32620 32621 32639 +f 32621 32640 32639 +f 32621 32622 32641 +f 32621 32641 32640 +f 32622 32623 32641 +f 32623 32642 32641 +f 32623 32624 32643 +f 32623 32643 32642 +f 32624 32625 32643 +f 32625 32644 32643 +f 32625 32626 32645 +f 32625 32645 32644 +f 32626 32627 32645 +f 32627 32646 32645 +f 32627 32628 32647 +f 32627 32647 32646 +f 32628 32629 32647 +f 32629 32648 32647 +f 32629 32630 32649 +f 32629 32649 32648 +f 32630 32631 32649 +f 32631 32650 32649 +f 32631 32632 32651 +f 32631 32651 32650 +f 32632 32633 32651 +f 6562 32634 32652 +f 6562 32652 6684 +f 32634 32635 32652 +f 32635 32653 32652 +f 32635 32636 32654 +f 32635 32654 32653 +f 32636 32637 32654 +f 32637 32655 32654 +f 32637 32638 32656 +f 32637 32656 32655 +f 32638 32639 32656 +f 32639 32657 32656 +f 32639 32640 32658 +f 32639 32658 32657 +f 32640 32641 32658 +f 32641 32659 32658 +f 32641 32642 32660 +f 32641 32660 32659 +f 32642 32643 32660 +f 32643 32661 32660 +f 32643 32644 32662 +f 32643 32662 32661 +f 32644 32645 32662 +f 32645 32663 32662 +f 32645 32646 32664 +f 32645 32664 32663 +f 32646 32647 32664 +f 32647 32665 32664 +f 32647 32648 32666 +f 32647 32666 32665 +f 32648 32649 32666 +f 32649 32667 32666 +f 32649 32650 32668 +f 32649 32668 32667 +f 32650 32651 32668 +f 32651 32669 32668 +f 6684 32652 6806 +f 32652 32670 6806 +f 32652 32653 32671 +f 32652 32671 32670 +f 32653 32654 32671 +f 32654 32672 32671 +f 32654 32655 32673 +f 32654 32673 32672 +f 32655 32656 32673 +f 32656 32674 32673 +f 32656 32657 32675 +f 32656 32675 32674 +f 32657 32658 32675 +f 32658 32676 32675 +f 32658 32659 32677 +f 32658 32677 32676 +f 32659 32660 32677 +f 32660 32678 32677 +f 32660 32661 32679 +f 32660 32679 32678 +f 32661 32662 32679 +f 32662 32680 32679 +f 32662 32663 32681 +f 32662 32681 32680 +f 32663 32664 32681 +f 32664 32682 32681 +f 32664 32665 32683 +f 32664 32683 32682 +f 32665 32666 32683 +f 32666 32684 32683 +f 32666 32667 32685 +f 32666 32685 32684 +f 32667 32668 32685 +f 32668 32686 32685 +f 32668 32669 32687 +f 32668 32687 32686 +f 6806 32670 32688 +f 6806 32688 6927 +f 32670 32671 32688 +f 32671 32689 32688 +f 32671 32672 32690 +f 32671 32690 32689 +f 32672 32673 32690 +f 32673 32691 32690 +f 32673 32674 32692 +f 32673 32692 32691 +f 32674 32675 32692 +f 32675 32693 32692 +f 32675 32676 32694 +f 32675 32694 32693 +f 32676 32677 32694 +f 32677 32695 32694 +f 32677 32678 32696 +f 32677 32696 32695 +f 32678 32679 32696 +f 32679 32697 32696 +f 32679 32680 32698 +f 32679 32698 32697 +f 32680 32681 32698 +f 32681 32699 32698 +f 32681 32682 32700 +f 32681 32700 32699 +f 32682 32683 32700 +f 32683 32701 32700 +f 32683 32684 32702 +f 32683 32702 32701 +f 32684 32685 32702 +f 32685 32703 32702 +f 32685 32686 32704 +f 32685 32704 32703 +f 32686 32687 32704 +f 32687 32705 32704 +f 6927 32688 7047 +f 32688 32706 7047 +f 32688 32689 32707 +f 32688 32707 32706 +f 32689 32690 32707 +f 32690 32708 32707 +f 32690 32691 32709 +f 32690 32709 32708 +f 32691 32692 32709 +f 32692 32710 32709 +f 32692 32693 32711 +f 32692 32711 32710 +f 32693 32694 32711 +f 32694 32712 32711 +f 32694 32695 32713 +f 32694 32713 32712 +f 32695 32696 32713 +f 32696 32714 32713 +f 32696 32697 32715 +f 32696 32715 32714 +f 32697 32698 32715 +f 32698 32716 32715 +f 32698 32699 32717 +f 32698 32717 32716 +f 32699 32700 32717 +f 32700 32718 32717 +f 32700 32701 32719 +f 32700 32719 32718 +f 32701 32702 32719 +f 32702 32720 32719 +f 32702 32703 32721 +f 32702 32721 32720 +f 32703 32704 32721 +f 32704 32722 32721 +f 32704 32705 32723 +f 32704 32723 32722 +f 7047 32706 32724 +f 7047 32724 7167 +f 32706 32707 32724 +f 32707 32725 32724 +f 32707 32708 32726 +f 32707 32726 32725 +f 32708 32709 32726 +f 32709 32727 32726 +f 32709 32710 32728 +f 32709 32728 32727 +f 32710 32711 32728 +f 32711 32729 32728 +f 32711 32712 32730 +f 32711 32730 32729 +f 32712 32713 32730 +f 32713 32731 32730 +f 32713 32714 32732 +f 32713 32732 32731 +f 32714 32715 32732 +f 32715 32733 32732 +f 32715 32716 32734 +f 32715 32734 32733 +f 32716 32717 32734 +f 32717 32735 32734 +f 32717 32718 32736 +f 32717 32736 32735 +f 32718 32719 32736 +f 32719 32737 32736 +f 32719 32720 32738 +f 32719 32738 32737 +f 32720 32721 32738 +f 32721 32739 32738 +f 32721 32722 32740 +f 32721 32740 32739 +f 32722 32723 32740 +f 32723 32741 32740 +f 7167 32724 7287 +f 32724 32742 7287 +f 32724 32725 32743 +f 32724 32743 32742 +f 32725 32726 32743 +f 32726 32744 32743 +f 32726 32727 32745 +f 32726 32745 32744 +f 32727 32728 32745 +f 32728 32746 32745 +f 32728 32729 32747 +f 32728 32747 32746 +f 32729 32730 32747 +f 32730 32748 32747 +f 32730 32731 32749 +f 32730 32749 32748 +f 32731 32732 32749 +f 32732 32750 32749 +f 32732 32733 32751 +f 32732 32751 32750 +f 32733 32734 32751 +f 32734 32752 32751 +f 32734 32735 32753 +f 32734 32753 32752 +f 32735 32736 32753 +f 32736 32754 32753 +f 32736 32737 32755 +f 32736 32755 32754 +f 32737 32738 32755 +f 32738 32756 32755 +f 32738 32739 32757 +f 32738 32757 32756 +f 32739 32740 32757 +f 32740 32758 32757 +f 32740 32741 32759 +f 32740 32759 32758 +f 7287 32742 32760 +f 7287 32760 7407 +f 32742 32743 32760 +f 32743 32761 32760 +f 32743 32744 32762 +f 32743 32762 32761 +f 32744 32745 32762 +f 32745 32763 32762 +f 32745 32746 32764 +f 32745 32764 32763 +f 32746 32747 32764 +f 32747 32765 32764 +f 32747 32748 32766 +f 32747 32766 32765 +f 32748 32749 32766 +f 32749 32767 32766 +f 32749 32750 32768 +f 32749 32768 32767 +f 32750 32751 32768 +f 32751 32769 32768 +f 32751 32752 32770 +f 32751 32770 32769 +f 32752 32753 32770 +f 32753 32771 32770 +f 32753 32754 32772 +f 32753 32772 32771 +f 32754 32755 32772 +f 32755 32773 32772 +f 32755 32756 32774 +f 32755 32774 32773 +f 32756 32757 32774 +f 32757 32775 32774 +f 32757 32758 32776 +f 32757 32776 32775 +f 32758 32759 32776 +f 32759 32777 32776 +f 7407 32760 7527 +f 32760 32778 7527 +f 32760 32761 32779 +f 32760 32779 32778 +f 32761 32762 32779 +f 32762 32780 32779 +f 32762 32763 32781 +f 32762 32781 32780 +f 32763 32764 32781 +f 32764 32782 32781 +f 32764 32765 32783 +f 32764 32783 32782 +f 32765 32766 32783 +f 32766 32784 32783 +f 32766 32767 32785 +f 32766 32785 32784 +f 32767 32768 32785 +f 32768 32786 32785 +f 32768 32769 32787 +f 32768 32787 32786 +f 32769 32770 32787 +f 32770 32788 32787 +f 32770 32771 32789 +f 32770 32789 32788 +f 32771 32772 32789 +f 32772 32790 32789 +f 32772 32773 32791 +f 32772 32791 32790 +f 32773 32774 32791 +f 32774 32792 32791 +f 32774 32775 32793 +f 32774 32793 32792 +f 32775 32776 32793 +f 32776 32794 32793 +f 32776 32777 32795 +f 32776 32795 32794 +f 7527 32778 32796 +f 7527 32796 7647 +f 32778 32779 32796 +f 32779 32797 32796 +f 32779 32780 32798 +f 32779 32798 32797 +f 32780 32781 32798 +f 32781 32799 32798 +f 32781 32782 32800 +f 32781 32800 32799 +f 32782 32783 32800 +f 32783 32801 32800 +f 32783 32784 32802 +f 32783 32802 32801 +f 32784 32785 32802 +f 32785 32803 32802 +f 32785 32786 32804 +f 32785 32804 32803 +f 32786 32787 32804 +f 32787 32805 32804 +f 32787 32788 32806 +f 32787 32806 32805 +f 32788 32789 32806 +f 32789 32807 32806 +f 32789 32790 32808 +f 32789 32808 32807 +f 32790 32791 32808 +f 32791 32809 32808 +f 32791 32792 32810 +f 32791 32810 32809 +f 32792 32793 32810 +f 32793 32811 32810 +f 32793 32794 32812 +f 32793 32812 32811 +f 32794 32795 32812 +f 32795 32813 32812 +f 7647 32796 7767 +f 32796 32814 7767 +f 32796 32797 32815 +f 32796 32815 32814 +f 32797 32798 32815 +f 32798 32816 32815 +f 32798 32799 32817 +f 32798 32817 32816 +f 32799 32800 32817 +f 32800 32818 32817 +f 32800 32801 32819 +f 32800 32819 32818 +f 32801 32802 32819 +f 32802 32820 32819 +f 32802 32803 32821 +f 32802 32821 32820 +f 32803 32804 32821 +f 32804 32822 32821 +f 32804 32805 32823 +f 32804 32823 32822 +f 32805 32806 32823 +f 32806 32824 32823 +f 32806 32807 32825 +f 32806 32825 32824 +f 32807 32808 32825 +f 32808 32826 32825 +f 32808 32809 32827 +f 32808 32827 32826 +f 32809 32810 32827 +f 32810 32828 32827 +f 32810 32811 32829 +f 32810 32829 32828 +f 32811 32812 32829 +f 32812 32830 32829 +f 32812 32813 32831 +f 32812 32831 32830 +f 7767 32814 32832 +f 7767 32832 7887 +f 32814 32815 32832 +f 32815 32833 32832 +f 32815 32816 32834 +f 32815 32834 32833 +f 32816 32817 32834 +f 32817 32835 32834 +f 32817 32818 32836 +f 32817 32836 32835 +f 32818 32819 32836 +f 32819 32837 32836 +f 32819 32820 32838 +f 32819 32838 32837 +f 32820 32821 32838 +f 32821 32839 32838 +f 32821 32822 32840 +f 32821 32840 32839 +f 32822 32823 32840 +f 32823 32841 32840 +f 32823 32824 32842 +f 32823 32842 32841 +f 32824 32825 32842 +f 32825 32843 32842 +f 32825 32826 32844 +f 32825 32844 32843 +f 32826 32827 32844 +f 32827 32845 32844 +f 32827 32828 32846 +f 32827 32846 32845 +f 32828 32829 32846 +f 32829 32847 32846 +f 32829 32830 32848 +f 32829 32848 32847 +f 32830 32831 32848 +f 32831 32849 32848 +f 7887 32832 8007 +f 32832 32850 8007 +f 32832 32833 32851 +f 32832 32851 32850 +f 32833 32834 32851 +f 32834 32852 32851 +f 32834 32835 32853 +f 32834 32853 32852 +f 32835 32836 32853 +f 32836 32854 32853 +f 32836 32837 32855 +f 32836 32855 32854 +f 32837 32838 32855 +f 32838 32856 32855 +f 32838 32839 32857 +f 32838 32857 32856 +f 32839 32840 32857 +f 32840 32858 32857 +f 32840 32841 32859 +f 32840 32859 32858 +f 32841 32842 32859 +f 32842 32860 32859 +f 32842 32843 32861 +f 32842 32861 32860 +f 32843 32844 32861 +f 32844 32862 32861 +f 32844 32845 32863 +f 32844 32863 32862 +f 32845 32846 32863 +f 32846 32864 32863 +f 32846 32847 32865 +f 32846 32865 32864 +f 32847 32848 32865 +f 32848 32866 32865 +f 32848 32849 32867 +f 32848 32867 32866 +f 8007 32850 32868 +f 8007 32868 8127 +f 32850 32851 32868 +f 32851 32869 32868 +f 32851 32852 32870 +f 32851 32870 32869 +f 32852 32853 32870 +f 32853 32871 32870 +f 32853 32854 32872 +f 32853 32872 32871 +f 32854 32855 32872 +f 32855 32873 32872 +f 32855 32856 32874 +f 32855 32874 32873 +f 32856 32857 32874 +f 32857 32875 32874 +f 32857 32858 32876 +f 32857 32876 32875 +f 32858 32859 32876 +f 32859 32877 32876 +f 32859 32860 32878 +f 32859 32878 32877 +f 32860 32861 32878 +f 32861 32879 32878 +f 32861 32862 32880 +f 32861 32880 32879 +f 32862 32863 32880 +f 32863 32881 32880 +f 32863 32864 32882 +f 32863 32882 32881 +f 32864 32865 32882 +f 32865 32883 32882 +f 32865 32866 32884 +f 32865 32884 32883 +f 32866 32867 32884 +f 32867 32885 32884 +f 8127 32868 8247 +f 32868 32886 8247 +f 32868 32869 32887 +f 32868 32887 32886 +f 32869 32870 32887 +f 32870 32888 32887 +f 32870 32871 32889 +f 32870 32889 32888 +f 32871 32872 32889 +f 32872 32890 32889 +f 32872 32873 32891 +f 32872 32891 32890 +f 32873 32874 32891 +f 32874 32892 32891 +f 32874 32875 32893 +f 32874 32893 32892 +f 32875 32876 32893 +f 32876 32894 32893 +f 32876 32877 32895 +f 32876 32895 32894 +f 32877 32878 32895 +f 32878 32896 32895 +f 32878 32879 32897 +f 32878 32897 32896 +f 32879 32880 32897 +f 32880 32898 32897 +f 32880 32881 32899 +f 32880 32899 32898 +f 32881 32882 32899 +f 32882 32900 32899 +f 32882 32883 32901 +f 32882 32901 32900 +f 32883 32884 32901 +f 32884 32902 32901 +f 32884 32885 32903 +f 32884 32903 32902 +f 8247 32886 32904 +f 8247 32904 8367 +f 32886 32887 32904 +f 32887 32905 32904 +f 32887 32888 32906 +f 32887 32906 32905 +f 32888 32889 32906 +f 32889 32907 32906 +f 32889 32890 32908 +f 32889 32908 32907 +f 32890 32891 32908 +f 32891 32909 32908 +f 32891 32892 32910 +f 32891 32910 32909 +f 32892 32893 32910 +f 32893 32911 32910 +f 32893 32894 32912 +f 32893 32912 32911 +f 32894 32895 32912 +f 32895 32913 32912 +f 32895 32896 32914 +f 32895 32914 32913 +f 32896 32897 32914 +f 32897 32915 32914 +f 32897 32898 32916 +f 32897 32916 32915 +f 32898 32899 32916 +f 32899 32917 32916 +f 32899 32900 32918 +f 32899 32918 32917 +f 32900 32901 32918 +f 32901 32919 32918 +f 32901 32902 32920 +f 32901 32920 32919 +f 32902 32903 32920 +f 32903 32921 32920 +f 8367 32904 8487 +f 32904 32922 8487 +f 32904 32905 32923 +f 32904 32923 32922 +f 32905 32906 32923 +f 32906 32924 32923 +f 32906 32907 32925 +f 32906 32925 32924 +f 32907 32908 32925 +f 32908 32926 32925 +f 32908 32909 32927 +f 32908 32927 32926 +f 32909 32910 32927 +f 32910 32928 32927 +f 32910 32911 32929 +f 32910 32929 32928 +f 32911 32912 32929 +f 32912 32930 32929 +f 32912 32913 32931 +f 32912 32931 32930 +f 32913 32914 32931 +f 32914 32932 32931 +f 32914 32915 32933 +f 32914 32933 32932 +f 32915 32916 32933 +f 32916 32934 32933 +f 32916 32917 32935 +f 32916 32935 32934 +f 32917 32918 32935 +f 32918 32936 32935 +f 32918 32919 32937 +f 32918 32937 32936 +f 32919 32920 32937 +f 32920 32938 32937 +f 32920 32921 32939 +f 32920 32939 32938 +f 8487 32922 32940 +f 8487 32940 8607 +f 32922 32923 32940 +f 32923 32941 32940 +f 32923 32924 32942 +f 32923 32942 32941 +f 32924 32925 32942 +f 32925 32943 32942 +f 32925 32926 32944 +f 32925 32944 32943 +f 32926 32927 32944 +f 32927 32945 32944 +f 32927 32928 32946 +f 32927 32946 32945 +f 32928 32929 32946 +f 32929 32947 32946 +f 32929 32930 32948 +f 32929 32948 32947 +f 32930 32931 32948 +f 32931 32949 32948 +f 32931 32932 32950 +f 32931 32950 32949 +f 32932 32933 32950 +f 32933 32951 32950 +f 32933 32934 32952 +f 32933 32952 32951 +f 32934 32935 32952 +f 32935 32953 32952 +f 32935 32936 32954 +f 32935 32954 32953 +f 32936 32937 32954 +f 32937 32955 32954 +f 32937 32938 32956 +f 32937 32956 32955 +f 32938 32939 32956 +f 32939 32957 32956 +f 8607 32940 8727 +f 32940 32958 8727 +f 32940 32941 32959 +f 32940 32959 32958 +f 32941 32942 32959 +f 32942 32960 32959 +f 32942 32943 32961 +f 32942 32961 32960 +f 32943 32944 32961 +f 32944 32962 32961 +f 32944 32945 32963 +f 32944 32963 32962 +f 32945 32946 32963 +f 32946 32964 32963 +f 32946 32947 32965 +f 32946 32965 32964 +f 32947 32948 32965 +f 32948 32966 32965 +f 32948 32949 32967 +f 32948 32967 32966 +f 32949 32950 32967 +f 32950 32968 32967 +f 32950 32951 32969 +f 32950 32969 32968 +f 32951 32952 32969 +f 32952 32970 32969 +f 32952 32953 32971 +f 32952 32971 32970 +f 32953 32954 32971 +f 32954 32972 32971 +f 32954 32955 32973 +f 32954 32973 32972 +f 32955 32956 32973 +f 32956 32974 32973 +f 32956 32957 32975 +f 32956 32975 32974 +f 8727 32958 32976 +f 8727 32976 8847 +f 32958 32959 32976 +f 32959 32977 32976 +f 32959 32960 32978 +f 32959 32978 32977 +f 32960 32961 32978 +f 32961 32979 32978 +f 32961 32962 32980 +f 32961 32980 32979 +f 32962 32963 32980 +f 32963 32981 32980 +f 32963 32964 32982 +f 32963 32982 32981 +f 32964 32965 32982 +f 32965 32983 32982 +f 32965 32966 32984 +f 32965 32984 32983 +f 32966 32967 32984 +f 32967 32985 32984 +f 32967 32968 32986 +f 32967 32986 32985 +f 32968 32969 32986 +f 32969 32987 32986 +f 32969 32970 32988 +f 32969 32988 32987 +f 32970 32971 32988 +f 32971 32989 32988 +f 32971 32972 32990 +f 32971 32990 32989 +f 32972 32973 32990 +f 32973 32991 32990 +f 32973 32974 32992 +f 32973 32992 32991 +f 32974 32975 32992 +f 32975 32993 32992 +f 8847 32976 8967 +f 32976 32994 8967 +f 32976 32977 32995 +f 32976 32995 32994 +f 32977 32978 32995 +f 32978 32996 32995 +f 32978 32979 32997 +f 32978 32997 32996 +f 32979 32980 32997 +f 32980 32998 32997 +f 32980 32981 32999 +f 32980 32999 32998 +f 32981 32982 32999 +f 32982 33000 32999 +f 32982 32983 33001 +f 32982 33001 33000 +f 32983 32984 33001 +f 32984 33002 33001 +f 32984 32985 33003 +f 32984 33003 33002 +f 32985 32986 33003 +f 32986 33004 33003 +f 32986 32987 33005 +f 32986 33005 33004 +f 32987 32988 33005 +f 32988 33006 33005 +f 32988 32989 33007 +f 32988 33007 33006 +f 32989 32990 33007 +f 32990 33008 33007 +f 32990 32991 33009 +f 32990 33009 33008 +f 32991 32992 33009 +f 32992 33010 33009 +f 32992 32993 33011 +f 32992 33011 33010 +f 8967 32994 33012 +f 8967 33012 9087 +f 32994 32995 33012 +f 32995 33013 33012 +f 32995 32996 33014 +f 32995 33014 33013 +f 32996 32997 33014 +f 32997 33015 33014 +f 32997 32998 33016 +f 32997 33016 33015 +f 32998 32999 33016 +f 32999 33017 33016 +f 32999 33000 33018 +f 32999 33018 33017 +f 33000 33001 33018 +f 33001 33019 33018 +f 33001 33002 33020 +f 33001 33020 33019 +f 33002 33003 33020 +f 33003 33021 33020 +f 33003 33004 33022 +f 33003 33022 33021 +f 33004 33005 33022 +f 33005 33023 33022 +f 33005 33006 33024 +f 33005 33024 33023 +f 33006 33007 33024 +f 33007 33025 33024 +f 33007 33008 33026 +f 33007 33026 33025 +f 33008 33009 33026 +f 33009 33027 33026 +f 33009 33010 33028 +f 33009 33028 33027 +f 33010 33011 33028 +f 33011 33029 33028 +f 9087 33012 9207 +f 33012 33030 9207 +f 33012 33013 33031 +f 33012 33031 33030 +f 33013 33014 33031 +f 33014 33032 33031 +f 33014 33015 33033 +f 33014 33033 33032 +f 33015 33016 33033 +f 33016 33034 33033 +f 33016 33017 33035 +f 33016 33035 33034 +f 33017 33018 33035 +f 33018 33036 33035 +f 33018 33019 33037 +f 33018 33037 33036 +f 33019 33020 33037 +f 33020 33038 33037 +f 33020 33021 33039 +f 33020 33039 33038 +f 33021 33022 33039 +f 33022 33040 33039 +f 33022 33023 33041 +f 33022 33041 33040 +f 33023 33024 33041 +f 33024 33042 33041 +f 33024 33025 33043 +f 33024 33043 33042 +f 33025 33026 33043 +f 33026 33044 33043 +f 33026 33027 33045 +f 33026 33045 33044 +f 33027 33028 33045 +f 33028 33046 33045 +f 33028 33029 33047 +f 33028 33047 33046 +f 9207 33030 33048 +f 9207 33048 9327 +f 33030 33031 33048 +f 33031 33049 33048 +f 33031 33032 33050 +f 33031 33050 33049 +f 33032 33033 33050 +f 33033 33051 33050 +f 33033 33034 33052 +f 33033 33052 33051 +f 33034 33035 33052 +f 33035 33053 33052 +f 33035 33036 33054 +f 33035 33054 33053 +f 33036 33037 33054 +f 33037 33055 33054 +f 33037 33038 33056 +f 33037 33056 33055 +f 33038 33039 33056 +f 33039 33057 33056 +f 33039 33040 33058 +f 33039 33058 33057 +f 33040 33041 33058 +f 33041 33059 33058 +f 33041 33042 33060 +f 33041 33060 33059 +f 33042 33043 33060 +f 33043 33061 33060 +f 33043 33044 33062 +f 33043 33062 33061 +f 33044 33045 33062 +f 33045 33063 33062 +f 33045 33046 33064 +f 33045 33064 33063 +f 33046 33047 33064 +f 33047 33065 33064 +f 9327 33048 9447 +f 33048 33066 9447 +f 33048 33049 33067 +f 33048 33067 33066 +f 33049 33050 33067 +f 33050 33068 33067 +f 33050 33051 33069 +f 33050 33069 33068 +f 33051 33052 33069 +f 33052 33070 33069 +f 33052 33053 33071 +f 33052 33071 33070 +f 33053 33054 33071 +f 33054 33072 33071 +f 33054 33055 33073 +f 33054 33073 33072 +f 33055 33056 33073 +f 33056 33074 33073 +f 33056 33057 33075 +f 33056 33075 33074 +f 33057 33058 33075 +f 33058 33076 33075 +f 33058 33059 33077 +f 33058 33077 33076 +f 33059 33060 33077 +f 33060 33078 33077 +f 33060 33061 33079 +f 33060 33079 33078 +f 33061 33062 33079 +f 33062 33080 33079 +f 33062 33063 33081 +f 33062 33081 33080 +f 33063 33064 33081 +f 33064 33082 33081 +f 33064 33065 33083 +f 33064 33083 33082 +f 9447 33066 33084 +f 9447 33084 9567 +f 33066 33067 33084 +f 33067 33085 33084 +f 33067 33068 33086 +f 33067 33086 33085 +f 33068 33069 33086 +f 33069 33087 33086 +f 33069 33070 33088 +f 33069 33088 33087 +f 33070 33071 33088 +f 33071 33089 33088 +f 33071 33072 33090 +f 33071 33090 33089 +f 33072 33073 33090 +f 33073 33091 33090 +f 33073 33074 33092 +f 33073 33092 33091 +f 33074 33075 33092 +f 33075 33093 33092 +f 33075 33076 33094 +f 33075 33094 33093 +f 33076 33077 33094 +f 33077 33095 33094 +f 33077 33078 33096 +f 33077 33096 33095 +f 33078 33079 33096 +f 33079 33097 33096 +f 33079 33080 33098 +f 33079 33098 33097 +f 33080 33081 33098 +f 33081 33099 33098 +f 33081 33082 33100 +f 33081 33100 33099 +f 33082 33083 33100 +f 33083 33101 33100 +f 9567 33084 9687 +f 33084 33102 9687 +f 33084 33085 33103 +f 33084 33103 33102 +f 33085 33086 33103 +f 33086 33104 33103 +f 33086 33087 33105 +f 33086 33105 33104 +f 33087 33088 33105 +f 33088 33106 33105 +f 33088 33089 33107 +f 33088 33107 33106 +f 33089 33090 33107 +f 33090 33108 33107 +f 33090 33091 33109 +f 33090 33109 33108 +f 33091 33092 33109 +f 33092 33110 33109 +f 33092 33093 33111 +f 33092 33111 33110 +f 33093 33094 33111 +f 33094 33112 33111 +f 33094 33095 33113 +f 33094 33113 33112 +f 33095 33096 33113 +f 33096 33114 33113 +f 33096 33097 33115 +f 33096 33115 33114 +f 33097 33098 33115 +f 33098 33116 33115 +f 33098 33099 33117 +f 33098 33117 33116 +f 33099 33100 33117 +f 33100 33118 33117 +f 33100 33101 33119 +f 33100 33119 33118 +f 9687 33102 33120 +f 9687 33120 9808 +f 33102 33103 33120 +f 33103 33121 33120 +f 33103 33104 33122 +f 33103 33122 33121 +f 33104 33105 33122 +f 33105 33123 33122 +f 33105 33106 33124 +f 33105 33124 33123 +f 33106 33107 33124 +f 33107 33125 33124 +f 33107 33108 33126 +f 33107 33126 33125 +f 33108 33109 33126 +f 33109 33127 33126 +f 33109 33110 33128 +f 33109 33128 33127 +f 33110 33111 33128 +f 33111 33129 33128 +f 33111 33112 33130 +f 33111 33130 33129 +f 33112 33113 33130 +f 33113 33131 33130 +f 33113 33114 33132 +f 33113 33132 33131 +f 33114 33115 33132 +f 33115 33133 33132 +f 33115 33116 33134 +f 33115 33134 33133 +f 33116 33117 33134 +f 33117 33135 33134 +f 33117 33118 33136 +f 33117 33136 33135 +f 33118 33119 33136 +f 33119 33137 33136 +f 9808 33120 9930 +f 33120 33138 9930 +f 33120 33121 33139 +f 33120 33139 33138 +f 33121 33122 33139 +f 33122 33140 33139 +f 33122 33123 33141 +f 33122 33141 33140 +f 33123 33124 33141 +f 33124 33142 33141 +f 33124 33125 33143 +f 33124 33143 33142 +f 33125 33126 33143 +f 33126 33144 33143 +f 33126 33127 33145 +f 33126 33145 33144 +f 33127 33128 33145 +f 33128 33146 33145 +f 33128 33129 33147 +f 33128 33147 33146 +f 33129 33130 33147 +f 33130 33148 33147 +f 33130 33131 33149 +f 33130 33149 33148 +f 33131 33132 33149 +f 33132 33150 33149 +f 33132 33133 33151 +f 33132 33151 33150 +f 33133 33134 33151 +f 33134 33152 33151 +f 33134 33135 33153 +f 33134 33153 33152 +f 33135 33136 33153 +f 33136 33154 33153 +f 33136 33137 33155 +f 33136 33155 33154 +f 9930 33138 33156 +f 9930 33156 10053 +f 33138 33139 33156 +f 33139 33157 33156 +f 33139 33140 33158 +f 33139 33158 33157 +f 33140 33141 33158 +f 33141 33159 33158 +f 33141 33142 33160 +f 33141 33160 33159 +f 33142 33143 33160 +f 33143 33161 33160 +f 33143 33144 33162 +f 33143 33162 33161 +f 33144 33145 33162 +f 33145 33163 33162 +f 33145 33146 33164 +f 33145 33164 33163 +f 33146 33147 33164 +f 33147 33165 33164 +f 33147 33148 33166 +f 33147 33166 33165 +f 33148 33149 33166 +f 33149 33167 33166 +f 33149 33150 33168 +f 33149 33168 33167 +f 33150 33151 33168 +f 33151 33169 33168 +f 33151 33152 33170 +f 33151 33170 33169 +f 33152 33153 33170 +f 33153 33171 33170 +f 33153 33154 33172 +f 33153 33172 33171 +f 33154 33155 33172 +f 33155 33173 33172 +f 33155 33174 33173 +f 10053 33156 10178 +f 33156 33175 10178 +f 33156 33157 33176 +f 33156 33176 33175 +f 33157 33158 33176 +f 33158 33177 33176 +f 33158 33159 33178 +f 33158 33178 33177 +f 33159 33160 33178 +f 33160 33179 33178 +f 33160 33161 33180 +f 33160 33180 33179 +f 33161 33162 33180 +f 33162 33181 33180 +f 33162 33163 33182 +f 33162 33182 33181 +f 33163 33164 33182 +f 33164 33183 33182 +f 33164 33165 33184 +f 33164 33184 33183 +f 33165 33166 33184 +f 33166 33185 33184 +f 33166 33167 33186 +f 33166 33186 33185 +f 33167 33168 33186 +f 33168 33187 33186 +f 33168 33169 33188 +f 33168 33188 33187 +f 33169 33170 33188 +f 33170 33189 33188 +f 33170 33171 33190 +f 33170 33190 33189 +f 33171 33172 33190 +f 33172 33191 33190 +f 33172 33173 33192 +f 33172 33192 33191 +f 33173 33174 33192 +f 33174 33193 33192 +f 10178 33175 33194 +f 10178 33194 10304 +f 33175 33176 33194 +f 33176 33195 33194 +f 33176 33177 33196 +f 33176 33196 33195 +f 33177 33178 33196 +f 33178 33197 33196 +f 33178 33179 33198 +f 33178 33198 33197 +f 33179 33180 33198 +f 33180 33199 33198 +f 33180 33181 33200 +f 33180 33200 33199 +f 33181 33182 33200 +f 33182 33201 33200 +f 33182 33183 33202 +f 33182 33202 33201 +f 33183 33184 33202 +f 33184 33203 33202 +f 33184 33185 33204 +f 33184 33204 33203 +f 33185 33186 33204 +f 33186 33205 33204 +f 33186 33187 33206 +f 33186 33206 33205 +f 33187 33188 33206 +f 33188 33207 33206 +f 33188 33189 33208 +f 33188 33208 33207 +f 33189 33190 33208 +f 33190 33209 33208 +f 33190 33191 33210 +f 33190 33210 33209 +f 33191 33192 33210 +f 33192 33211 33210 +f 33192 33193 33212 +f 33192 33212 33211 +f 10304 33194 10432 +f 33194 33213 10432 +f 33194 33195 33214 +f 33194 33214 33213 +f 33195 33196 33214 +f 33196 33215 33214 +f 33196 33197 33216 +f 33196 33216 33215 +f 33197 33198 33216 +f 33198 33217 33216 +f 33198 33199 33218 +f 33198 33218 33217 +f 33199 33200 33218 +f 33200 33219 33218 +f 33200 33201 33220 +f 33200 33220 33219 +f 33201 33202 33220 +f 33202 33221 33220 +f 33202 33203 33222 +f 33202 33222 33221 +f 33203 33204 33222 +f 33204 33223 33222 +f 33204 33205 33224 +f 33204 33224 33223 +f 33205 33206 33224 +f 33206 33225 33224 +f 33206 33207 33226 +f 33206 33226 33225 +f 33207 33208 33226 +f 33208 33227 33226 +f 33208 33209 33228 +f 33208 33228 33227 +f 33209 33210 33228 +f 33210 33229 33228 +f 33210 33211 33230 +f 33210 33230 33229 +f 33211 33212 33230 +f 33212 33231 33230 +f 33212 33232 33231 +f 10432 33213 33233 +f 10432 33233 10561 +f 33213 33214 33233 +f 33214 33234 33233 +f 33214 33215 33235 +f 33214 33235 33234 +f 33215 33216 33235 +f 33216 33236 33235 +f 33216 33217 33237 +f 33216 33237 33236 +f 33217 33218 33237 +f 33218 33238 33237 +f 33218 33219 33239 +f 33218 33239 33238 +f 33219 33220 33239 +f 33220 33240 33239 +f 33220 33221 33241 +f 33220 33241 33240 +f 33221 33222 33241 +f 33222 33242 33241 +f 33222 33223 33243 +f 33222 33243 33242 +f 33223 33224 33243 +f 33224 33244 33243 +f 33224 33225 33245 +f 33224 33245 33244 +f 33225 33226 33245 +f 33226 33246 33245 +f 33226 33227 33247 +f 33226 33247 33246 +f 33227 33228 33247 +f 33228 33248 33247 +f 33228 33229 33249 +f 33228 33249 33248 +f 33229 33230 33249 +f 33230 33250 33249 +f 33230 33231 33251 +f 33230 33251 33250 +f 33231 33232 33251 +f 33232 33252 33251 +f 33232 33253 33252 +f 10561 33233 10690 +f 33233 33254 10690 +f 33233 33234 33255 +f 33233 33255 33254 +f 33234 33235 33255 +f 33235 33256 33255 +f 33235 33236 33257 +f 33235 33257 33256 +f 33236 33237 33257 +f 33237 33258 33257 +f 33237 33238 33259 +f 33237 33259 33258 +f 33238 33239 33259 +f 33239 33260 33259 +f 33239 33240 33261 +f 33239 33261 33260 +f 33240 33241 33261 +f 33241 33262 33261 +f 33241 33242 33263 +f 33241 33263 33262 +f 33242 33243 33263 +f 33243 33264 33263 +f 33243 33244 33265 +f 33243 33265 33264 +f 33244 33245 33265 +f 33245 33266 33265 +f 33245 33246 33267 +f 33245 33267 33266 +f 33246 33247 33267 +f 33247 33268 33267 +f 33247 33248 33269 +f 33247 33269 33268 +f 33248 33249 33269 +f 33249 33270 33269 +f 33249 33250 33271 +f 33249 33271 33270 +f 33250 33251 33271 +f 33251 33272 33271 +f 33251 33252 33273 +f 33251 33273 33272 +f 33252 33253 33273 +f 33253 33274 33273 +f 10690 33254 33275 +f 10690 33275 10819 +f 33254 33255 33275 +f 33255 33276 33275 +f 33255 33256 33277 +f 33255 33277 33276 +f 33256 33257 33277 +f 33257 33278 33277 +f 33257 33258 33279 +f 33257 33279 33278 +f 33258 33259 33279 +f 33259 33280 33279 +f 33259 33260 33281 +f 33259 33281 33280 +f 33260 33261 33281 +f 33261 33282 33281 +f 33261 33262 33283 +f 33261 33283 33282 +f 33262 33263 33283 +f 33263 33284 33283 +f 33263 33264 33285 +f 33263 33285 33284 +f 33264 33265 33285 +f 33265 33286 33285 +f 33265 33266 33287 +f 33265 33287 33286 +f 33266 33267 33287 +f 33267 33288 33287 +f 33267 33268 33289 +f 33267 33289 33288 +f 33268 33269 33289 +f 33269 33290 33289 +f 33269 33270 33291 +f 33269 33291 33290 +f 33270 33271 33291 +f 33271 33292 33291 +f 33271 33272 33293 +f 33271 33293 33292 +f 33272 33273 33293 +f 33273 33294 33293 +f 33273 33274 33295 +f 33273 33295 33294 +f 10819 33275 10948 +f 33275 33296 10948 +f 33275 33276 33297 +f 33275 33297 33296 +f 33276 33277 33297 +f 33277 33298 33297 +f 33277 33278 33299 +f 33277 33299 33298 +f 33278 33279 33299 +f 33279 33300 33299 +f 33279 33280 33301 +f 33279 33301 33300 +f 33280 33281 33301 +f 33281 33302 33301 +f 33281 33282 33303 +f 33281 33303 33302 +f 33282 33283 33303 +f 33283 33304 33303 +f 33283 33284 33305 +f 33283 33305 33304 +f 33284 33285 33305 +f 33285 33306 33305 +f 33285 33286 33307 +f 33285 33307 33306 +f 33286 33287 33307 +f 33287 33308 33307 +f 33287 33288 33309 +f 33287 33309 33308 +f 33288 33289 33309 +f 33289 33310 33309 +f 33289 33290 33311 +f 33289 33311 33310 +f 33290 33291 33311 +f 33291 33312 33311 +f 33291 33292 33313 +f 33291 33313 33312 +f 33292 33293 33313 +f 33293 33314 33313 +f 33293 33294 33315 +f 33293 33315 33314 +f 33294 33295 33315 +f 33295 33316 33315 +f 33295 33317 33316 +f 10948 33296 33318 +f 10948 33318 11077 +f 33296 33297 33318 +f 33297 33319 33318 +f 33297 33298 33320 +f 33297 33320 33319 +f 33298 33299 33320 +f 33299 33321 33320 +f 33299 33300 33322 +f 33299 33322 33321 +f 33300 33301 33322 +f 33301 33323 33322 +f 33301 33302 33324 +f 33301 33324 33323 +f 33302 33303 33324 +f 33303 33325 33324 +f 33303 33304 33326 +f 33303 33326 33325 +f 33304 33305 33326 +f 33305 33327 33326 +f 33305 33306 33328 +f 33305 33328 33327 +f 33306 33307 33328 +f 33307 33329 33328 +f 33307 33308 33330 +f 33307 33330 33329 +f 33308 33309 33330 +f 33309 33331 33330 +f 33309 33310 33332 +f 33309 33332 33331 +f 33310 33311 33332 +f 33311 33333 33332 +f 33311 33312 33334 +f 33311 33334 33333 +f 33312 33313 33334 +f 33313 33335 33334 +f 33313 33314 33336 +f 33313 33336 33335 +f 33314 33315 33336 +f 33315 33337 33336 +f 33315 33316 33338 +f 33315 33338 33337 +f 33316 33317 33338 +f 33317 33339 33338 +f 11077 33318 11206 +f 33318 33340 11206 +f 33318 33319 33341 +f 33318 33341 33340 +f 33319 33320 33341 +f 33320 33342 33341 +f 33320 33321 33343 +f 33320 33343 33342 +f 33321 33322 33343 +f 33322 33344 33343 +f 33322 33323 33345 +f 33322 33345 33344 +f 33323 33324 33345 +f 33324 33346 33345 +f 33324 33325 33347 +f 33324 33347 33346 +f 33325 33326 33347 +f 33326 33348 33347 +f 33326 33327 33349 +f 33326 33349 33348 +f 33327 33328 33349 +f 33328 33350 33349 +f 33328 33329 33351 +f 33328 33351 33350 +f 33329 33330 33351 +f 33330 33352 33351 +f 33330 33331 33353 +f 33330 33353 33352 +f 33331 33332 33353 +f 33332 33354 33353 +f 33332 33333 33355 +f 33332 33355 33354 +f 33333 33334 33355 +f 33334 33356 33355 +f 33334 33335 33357 +f 33334 33357 33356 +f 33335 33336 33357 +f 33336 33358 33357 +f 33336 33337 33359 +f 33336 33359 33358 +f 33337 33338 33359 +f 33338 33360 33359 +f 33338 33339 33361 +f 33338 33361 33360 +f 11206 33340 33362 +f 11206 33362 11335 +f 33340 33341 33362 +f 33341 33363 33362 +f 33341 33342 33364 +f 33341 33364 33363 +f 33342 33343 33364 +f 33343 33365 33364 +f 33343 33344 33366 +f 33343 33366 33365 +f 33344 33345 33366 +f 33345 33367 33366 +f 33345 33346 33368 +f 33345 33368 33367 +f 33346 33347 33368 +f 33347 33369 33368 +f 33347 33348 33370 +f 33347 33370 33369 +f 33348 33349 33370 +f 33349 33371 33370 +f 33349 33350 33372 +f 33349 33372 33371 +f 33350 33351 33372 +f 33351 33373 33372 +f 33351 33352 33374 +f 33351 33374 33373 +f 33352 33353 33374 +f 33353 33375 33374 +f 33353 33354 33376 +f 33353 33376 33375 +f 33354 33355 33376 +f 33355 33377 33376 +f 33355 33356 33378 +f 33355 33378 33377 +f 33356 33357 33378 +f 33357 33379 33378 +f 33357 33358 33380 +f 33357 33380 33379 +f 33358 33359 33380 +f 33359 33381 33380 +f 33359 33360 33382 +f 33359 33382 33381 +f 33360 33361 33382 +f 33361 33383 33382 +f 33361 33384 33383 +f 11335 33362 11464 +f 33362 33385 11464 +f 33362 33363 33386 +f 33362 33386 33385 +f 33363 33364 33386 +f 33364 33387 33386 +f 33364 33365 33388 +f 33364 33388 33387 +f 33365 33366 33388 +f 33366 33389 33388 +f 33366 33367 33390 +f 33366 33390 33389 +f 33367 33368 33390 +f 33368 33391 33390 +f 33368 33369 33392 +f 33368 33392 33391 +f 33369 33370 33392 +f 33370 33393 33392 +f 33370 33371 33394 +f 33370 33394 33393 +f 33371 33372 33394 +f 33372 33395 33394 +f 33372 33373 33396 +f 33372 33396 33395 +f 33373 33374 33396 +f 33374 33397 33396 +f 33374 33375 33398 +f 33374 33398 33397 +f 33375 33376 33398 +f 33376 33399 33398 +f 33376 33377 33400 +f 33376 33400 33399 +f 33377 33378 33400 +f 33378 33401 33400 +f 33378 33379 33402 +f 33378 33402 33401 +f 33379 33380 33402 +f 33380 33403 33402 +f 33380 33381 33404 +f 33380 33404 33403 +f 33381 33382 33404 +f 33382 33405 33404 +f 33382 33383 33406 +f 33382 33406 33405 +f 33383 33384 33406 +f 33384 33407 33406 +f 33384 33408 33407 +f 11464 33385 33409 +f 11464 33409 11593 +f 33385 33386 33409 +f 33386 33410 33409 +f 33386 33387 33411 +f 33386 33411 33410 +f 33387 33388 33411 +f 33388 33412 33411 +f 33388 33389 33413 +f 33388 33413 33412 +f 33389 33390 33413 +f 33390 33414 33413 +f 33390 33391 33415 +f 33390 33415 33414 +f 33391 33392 33415 +f 33392 33416 33415 +f 33392 33393 33417 +f 33392 33417 33416 +f 33393 33394 33417 +f 33394 33418 33417 +f 33394 33395 33419 +f 33394 33419 33418 +f 33395 33396 33419 +f 33396 33420 33419 +f 33396 33397 33421 +f 33396 33421 33420 +f 33397 33398 33421 +f 33398 33422 33421 +f 33398 33399 33423 +f 33398 33423 33422 +f 33399 33400 33423 +f 33400 33424 33423 +f 33400 33401 33425 +f 33400 33425 33424 +f 33401 33402 33425 +f 33402 33426 33425 +f 33402 33403 33427 +f 33402 33427 33426 +f 33403 33404 33427 +f 33404 33428 33427 +f 33404 33405 33429 +f 33404 33429 33428 +f 33405 33406 33429 +f 33406 33430 33429 +f 33406 33407 33431 +f 33406 33431 33430 +f 33407 33408 33431 +f 33408 33432 33431 +f 11593 33409 11722 +f 33409 33433 11722 +f 33409 33410 33434 +f 33409 33434 33433 +f 33410 33411 33434 +f 33411 33435 33434 +f 33411 33412 33436 +f 33411 33436 33435 +f 33412 33413 33436 +f 33413 33437 33436 +f 33413 33414 33438 +f 33413 33438 33437 +f 33414 33415 33438 +f 33415 33439 33438 +f 33415 33416 33440 +f 33415 33440 33439 +f 33416 33417 33440 +f 33417 33441 33440 +f 33417 33418 33442 +f 33417 33442 33441 +f 33418 33419 33442 +f 33419 33443 33442 +f 33419 33420 33444 +f 33419 33444 33443 +f 33420 33421 33444 +f 33421 33445 33444 +f 33421 33422 33446 +f 33421 33446 33445 +f 33422 33423 33446 +f 33423 33447 33446 +f 33423 33424 33448 +f 33423 33448 33447 +f 33424 33425 33448 +f 33425 33449 33448 +f 33425 33426 33450 +f 33425 33450 33449 +f 33426 33427 33450 +f 33427 33451 33450 +f 33427 33428 33452 +f 33427 33452 33451 +f 33428 33429 33452 +f 33429 33453 33452 +f 33429 33430 33454 +f 33429 33454 33453 +f 33430 33431 33454 +f 33431 33455 33454 +f 33431 33432 33456 +f 33431 33456 33455 +f 11722 33433 33457 +f 11722 33457 11851 +f 33433 33434 33457 +f 33434 33458 33457 +f 33434 33435 33459 +f 33434 33459 33458 +f 33435 33436 33459 +f 33436 33460 33459 +f 33436 33437 33461 +f 33436 33461 33460 +f 33437 33438 33461 +f 33438 33462 33461 +f 33438 33439 33463 +f 33438 33463 33462 +f 33439 33440 33463 +f 33440 33464 33463 +f 33440 33441 33465 +f 33440 33465 33464 +f 33441 33442 33465 +f 33442 33466 33465 +f 33442 33443 33467 +f 33442 33467 33466 +f 33443 33444 33467 +f 33444 33468 33467 +f 33444 33445 33469 +f 33444 33469 33468 +f 33445 33446 33469 +f 33446 33470 33469 +f 33446 33447 33471 +f 33446 33471 33470 +f 33447 33448 33471 +f 33448 33472 33471 +f 33448 33449 33473 +f 33448 33473 33472 +f 33449 33450 33473 +f 33450 33474 33473 +f 33450 33451 33475 +f 33450 33475 33474 +f 33451 33452 33475 +f 33452 33476 33475 +f 33452 33453 33477 +f 33452 33477 33476 +f 33453 33454 33477 +f 33454 33478 33477 +f 33454 33455 33479 +f 33454 33479 33478 +f 33455 33456 33479 +f 33456 33480 33479 +f 33456 33481 33480 +f 11851 33457 11980 +f 33457 33482 11980 +f 33457 33458 33483 +f 33457 33483 33482 +f 33458 33459 33483 +f 33459 33484 33483 +f 33459 33460 33485 +f 33459 33485 33484 +f 33460 33461 33485 +f 33461 33486 33485 +f 33461 33462 33487 +f 33461 33487 33486 +f 33462 33463 33487 +f 33463 33488 33487 +f 33463 33464 33489 +f 33463 33489 33488 +f 33464 33465 33489 +f 33465 33490 33489 +f 33465 33466 33491 +f 33465 33491 33490 +f 33466 33467 33491 +f 33467 33492 33491 +f 33467 33468 33493 +f 33467 33493 33492 +f 33468 33469 33493 +f 33469 33494 33493 +f 33469 33470 33495 +f 33469 33495 33494 +f 33470 33471 33495 +f 33471 33496 33495 +f 33471 33472 33497 +f 33471 33497 33496 +f 33472 33473 33497 +f 33473 33498 33497 +f 33473 33474 33499 +f 33473 33499 33498 +f 33474 33475 33499 +f 33475 33500 33499 +f 33475 33476 33501 +f 33475 33501 33500 +f 33476 33477 33501 +f 33477 33502 33501 +f 33477 33478 33503 +f 33477 33503 33502 +f 33478 33479 33503 +f 33479 33504 33503 +f 33479 33480 33505 +f 33479 33505 33504 +f 33480 33481 33505 +f 33481 33506 33505 +f 33481 33507 33506 +f 11980 33482 33508 +f 11980 33508 12109 +f 33482 33483 33508 +f 33483 33509 33508 +f 33483 33484 33510 +f 33483 33510 33509 +f 33484 33485 33510 +f 33485 33511 33510 +f 33485 33486 33512 +f 33485 33512 33511 +f 33486 33487 33512 +f 33487 33513 33512 +f 33487 33488 33514 +f 33487 33514 33513 +f 33488 33489 33514 +f 33489 33515 33514 +f 33489 33490 33516 +f 33489 33516 33515 +f 33490 33491 33516 +f 33491 33517 33516 +f 33491 33492 33518 +f 33491 33518 33517 +f 33492 33493 33518 +f 33493 33519 33518 +f 33493 33494 33520 +f 33493 33520 33519 +f 33494 33495 33520 +f 33495 33521 33520 +f 33495 33496 33522 +f 33495 33522 33521 +f 33496 33497 33522 +f 33497 33523 33522 +f 33497 33498 33524 +f 33497 33524 33523 +f 33498 33499 33524 +f 33499 33525 33524 +f 33499 33500 33526 +f 33499 33526 33525 +f 33500 33501 33526 +f 33501 33527 33526 +f 33501 33502 33528 +f 33501 33528 33527 +f 33502 33503 33528 +f 33503 33529 33528 +f 33503 33504 33530 +f 33503 33530 33529 +f 33504 33505 33530 +f 33505 33531 33530 +f 33505 33506 33532 +f 33505 33532 33531 +f 33506 33507 33532 +f 33507 33533 33532 +f 33507 33534 33533 +f 12109 33508 12238 +f 33508 33535 12238 +f 33508 33509 33536 +f 33508 33536 33535 +f 33509 33510 33536 +f 33510 33537 33536 +f 33510 33511 33538 +f 33510 33538 33537 +f 33511 33512 33538 +f 33512 33539 33538 +f 33512 33513 33540 +f 33512 33540 33539 +f 33513 33514 33540 +f 33514 33541 33540 +f 33514 33515 33542 +f 33514 33542 33541 +f 33515 33516 33542 +f 33516 33543 33542 +f 33516 33517 33544 +f 33516 33544 33543 +f 33517 33518 33544 +f 33518 33545 33544 +f 33518 33519 33546 +f 33518 33546 33545 +f 33519 33520 33546 +f 33520 33547 33546 +f 33520 33521 33548 +f 33520 33548 33547 +f 33521 33522 33548 +f 33522 33549 33548 +f 33522 33523 33550 +f 33522 33550 33549 +f 33523 33524 33550 +f 33524 33551 33550 +f 33524 33525 33552 +f 33524 33552 33551 +f 33525 33526 33552 +f 33526 33553 33552 +f 33526 33527 33554 +f 33526 33554 33553 +f 33527 33528 33554 +f 33528 33555 33554 +f 33528 33529 33556 +f 33528 33556 33555 +f 33529 33530 33556 +f 33530 33557 33556 +f 33530 33531 33558 +f 33530 33558 33557 +f 33531 33532 33558 +f 33532 33559 33558 +f 33532 33533 33560 +f 33532 33560 33559 +f 33533 33534 33560 +f 33534 33561 33560 +f 33534 33562 33561 +f 12238 33535 33563 +f 12238 33563 12367 +f 33535 33536 33563 +f 33536 33564 33563 +f 33536 33537 33565 +f 33536 33565 33564 +f 33537 33538 33565 +f 33538 33566 33565 +f 33538 33539 33567 +f 33538 33567 33566 +f 33539 33540 33567 +f 33540 33568 33567 +f 33540 33541 33569 +f 33540 33569 33568 +f 33541 33542 33569 +f 33542 33570 33569 +f 33542 33543 33571 +f 33542 33571 33570 +f 33543 33544 33571 +f 33544 33572 33571 +f 33544 33545 33573 +f 33544 33573 33572 +f 33545 33546 33573 +f 33546 33574 33573 +f 33546 33547 33575 +f 33546 33575 33574 +f 33547 33548 33575 +f 33548 33576 33575 +f 33548 33549 33577 +f 33548 33577 33576 +f 33549 33550 33577 +f 33550 33578 33577 +f 33550 33551 33579 +f 33550 33579 33578 +f 33551 33552 33579 +f 33552 33580 33579 +f 33552 33553 33581 +f 33552 33581 33580 +f 33553 33554 33581 +f 33554 33582 33581 +f 33554 33555 33583 +f 33554 33583 33582 +f 33555 33556 33583 +f 33556 33584 33583 +f 33556 33557 33585 +f 33556 33585 33584 +f 33557 33558 33585 +f 33558 33586 33585 +f 33558 33559 33587 +f 33558 33587 33586 +f 33559 33560 33587 +f 33560 33588 33587 +f 33560 33561 33589 +f 33560 33589 33588 +f 33561 33562 33589 +f 33562 33590 33589 +f 12367 33563 12496 +f 33563 33591 12496 +f 33563 33564 33592 +f 33563 33592 33591 +f 33564 33565 33592 +f 33565 33593 33592 +f 33565 33566 33594 +f 33565 33594 33593 +f 33566 33567 33594 +f 33567 33595 33594 +f 33567 33568 33596 +f 33567 33596 33595 +f 33568 33569 33596 +f 33569 33597 33596 +f 33569 33570 33598 +f 33569 33598 33597 +f 33570 33571 33598 +f 33571 33599 33598 +f 33571 33572 33600 +f 33571 33600 33599 +f 33572 33573 33600 +f 33573 33601 33600 +f 33573 33574 33602 +f 33573 33602 33601 +f 33574 33575 33602 +f 33575 33603 33602 +f 33575 33576 33604 +f 33575 33604 33603 +f 33576 33577 33604 +f 33577 33605 33604 +f 33577 33578 33606 +f 33577 33606 33605 +f 33578 33579 33606 +f 33579 33607 33606 +f 33579 33580 33608 +f 33579 33608 33607 +f 33580 33581 33608 +f 33581 33609 33608 +f 33581 33582 33610 +f 33581 33610 33609 +f 33582 33583 33610 +f 33583 33611 33610 +f 33583 33584 33612 +f 33583 33612 33611 +f 33584 33585 33612 +f 33585 33613 33612 +f 33585 33586 33614 +f 33585 33614 33613 +f 33586 33587 33614 +f 33587 33615 33614 +f 33587 33588 33616 +f 33587 33616 33615 +f 33588 33589 33616 +f 33589 33617 33616 +f 33589 33590 33618 +f 33589 33618 33617 +f 12496 33591 33619 +f 12496 33619 12625 +f 33591 33592 33619 +f 33592 33620 33619 +f 33592 33593 33621 +f 33592 33621 33620 +f 33593 33594 33621 +f 33594 33622 33621 +f 33594 33595 33623 +f 33594 33623 33622 +f 33595 33596 33623 +f 33596 33624 33623 +f 33596 33597 33625 +f 33596 33625 33624 +f 33597 33598 33625 +f 33598 33626 33625 +f 33598 33599 33627 +f 33598 33627 33626 +f 33599 33600 33627 +f 33600 33628 33627 +f 33600 33601 33629 +f 33600 33629 33628 +f 33601 33602 33629 +f 33602 33630 33629 +f 33602 33603 33631 +f 33602 33631 33630 +f 33603 33604 33631 +f 33604 33632 33631 +f 33604 33605 33633 +f 33604 33633 33632 +f 33605 33606 33633 +f 33606 33634 33633 +f 33606 33607 33635 +f 33606 33635 33634 +f 33607 33608 33635 +f 33608 33636 33635 +f 33608 33609 33637 +f 33608 33637 33636 +f 33609 33610 33637 +f 33610 33638 33637 +f 33610 33611 33639 +f 33610 33639 33638 +f 33611 33612 33639 +f 33612 33640 33639 +f 33612 33613 33641 +f 33612 33641 33640 +f 33613 33614 33641 +f 33614 33642 33641 +f 33614 33615 33643 +f 33614 33643 33642 +f 33615 33616 33643 +f 33616 33644 33643 +f 33616 33617 33645 +f 33616 33645 33644 +f 33617 33618 33645 +f 33618 33646 33645 +f 33618 33647 33646 +f 12625 33619 12754 +f 33619 33648 12754 +f 33619 33620 33649 +f 33619 33649 33648 +f 33620 33621 33649 +f 33621 33650 33649 +f 33621 33622 33651 +f 33621 33651 33650 +f 33622 33623 33651 +f 33623 33652 33651 +f 33623 33624 33653 +f 33623 33653 33652 +f 33624 33625 33653 +f 33625 33654 33653 +f 33625 33626 33655 +f 33625 33655 33654 +f 33626 33627 33655 +f 33627 33656 33655 +f 33627 33628 33657 +f 33627 33657 33656 +f 33628 33629 33657 +f 33629 33658 33657 +f 33629 33630 33659 +f 33629 33659 33658 +f 33630 33631 33659 +f 33631 33660 33659 +f 33631 33632 33661 +f 33631 33661 33660 +f 33632 33633 33661 +f 33633 33662 33661 +f 33633 33634 33663 +f 33633 33663 33662 +f 33634 33635 33663 +f 33635 33664 33663 +f 33635 33636 33665 +f 33635 33665 33664 +f 33636 33637 33665 +f 33637 33666 33665 +f 33637 33638 33667 +f 33637 33667 33666 +f 33638 33639 33667 +f 33639 33668 33667 +f 33639 33640 33669 +f 33639 33669 33668 +f 33640 33641 33669 +f 33641 33670 33669 +f 33641 33642 33671 +f 33641 33671 33670 +f 33642 33643 33671 +f 33643 33672 33671 +f 33643 33644 33673 +f 33643 33673 33672 +f 33644 33645 33673 +f 33645 33674 33673 +f 33645 33646 33675 +f 33645 33675 33674 +f 33646 33647 33675 +f 33647 33676 33675 +f 33647 33677 33676 +f 12754 33648 33678 +f 12754 33678 12883 +f 33648 33649 33678 +f 33649 33679 33678 +f 33649 33650 33680 +f 33649 33680 33679 +f 33650 33651 33680 +f 33651 33681 33680 +f 33651 33652 33682 +f 33651 33682 33681 +f 33652 33653 33682 +f 33653 33683 33682 +f 33653 33654 33684 +f 33653 33684 33683 +f 33654 33655 33684 +f 33655 33685 33684 +f 33655 33656 33686 +f 33655 33686 33685 +f 33656 33657 33686 +f 33657 33687 33686 +f 33657 33658 33688 +f 33657 33688 33687 +f 33658 33659 33688 +f 33659 33689 33688 +f 33659 33660 33690 +f 33659 33690 33689 +f 33660 33661 33690 +f 33661 33691 33690 +f 33661 33662 33692 +f 33661 33692 33691 +f 33662 33663 33692 +f 33663 33693 33692 +f 33663 33664 33694 +f 33663 33694 33693 +f 33664 33665 33694 +f 33665 33695 33694 +f 33665 33666 33696 +f 33665 33696 33695 +f 33666 33667 33696 +f 33667 33697 33696 +f 33667 33668 33698 +f 33667 33698 33697 +f 33668 33669 33698 +f 33669 33699 33698 +f 33669 33670 33700 +f 33669 33700 33699 +f 33670 33671 33700 +f 33671 33701 33700 +f 33671 33672 33702 +f 33671 33702 33701 +f 33672 33673 33702 +f 33673 33703 33702 +f 33673 33674 33704 +f 33673 33704 33703 +f 33674 33675 33704 +f 33675 33705 33704 +f 33675 33676 33706 +f 33675 33706 33705 +f 33676 33677 33706 +f 33677 33707 33706 +f 12883 33678 13012 +f 33678 33708 13012 +f 33678 33679 33709 +f 33678 33709 33708 +f 33679 33680 33709 +f 33680 33710 33709 +f 33680 33681 33711 +f 33680 33711 33710 +f 33681 33682 33711 +f 33682 33712 33711 +f 33682 33683 33713 +f 33682 33713 33712 +f 33683 33684 33713 +f 33684 33714 33713 +f 33684 33685 33715 +f 33684 33715 33714 +f 33685 33686 33715 +f 33686 33716 33715 +f 33686 33687 33717 +f 33686 33717 33716 +f 33687 33688 33717 +f 33688 33718 33717 +f 33688 33689 33719 +f 33688 33719 33718 +f 33689 33690 33719 +f 33690 33720 33719 +f 33690 33691 33721 +f 33690 33721 33720 +f 33691 33692 33721 +f 33692 33722 33721 +f 33692 33693 33723 +f 33692 33723 33722 +f 33693 33694 33723 +f 33694 33724 33723 +f 33694 33695 33725 +f 33694 33725 33724 +f 33695 33696 33725 +f 33696 33726 33725 +f 33696 33697 33727 +f 33696 33727 33726 +f 33697 33698 33727 +f 33698 33728 33727 +f 33698 33699 33729 +f 33698 33729 33728 +f 33699 33700 33729 +f 33700 33730 33729 +f 33700 33701 33731 +f 33700 33731 33730 +f 33701 33702 33731 +f 33702 33732 33731 +f 33702 33703 33733 +f 33702 33733 33732 +f 33703 33704 33733 +f 33704 33734 33733 +f 33704 33705 33735 +f 33704 33735 33734 +f 33705 33706 33735 +f 33706 33736 33735 +f 33706 33707 33737 +f 33706 33737 33736 +f 13012 33708 33738 +f 13012 33738 13141 +f 33708 33709 33738 +f 33709 33739 33738 +f 33709 33710 33740 +f 33709 33740 33739 +f 33710 33711 33740 +f 33711 33741 33740 +f 33711 33712 33742 +f 33711 33742 33741 +f 33712 33713 33742 +f 33713 33743 33742 +f 33713 33714 33744 +f 33713 33744 33743 +f 33714 33715 33744 +f 33715 33745 33744 +f 33715 33716 33746 +f 33715 33746 33745 +f 33716 33717 33746 +f 33717 33747 33746 +f 33717 33718 33748 +f 33717 33748 33747 +f 33718 33719 33748 +f 33719 33749 33748 +f 33719 33720 33750 +f 33719 33750 33749 +f 33720 33721 33750 +f 33721 33751 33750 +f 33721 33722 33752 +f 33721 33752 33751 +f 33722 33723 33752 +f 33723 33753 33752 +f 33723 33724 33754 +f 33723 33754 33753 +f 33724 33725 33754 +f 33725 33755 33754 +f 33725 33726 33756 +f 33725 33756 33755 +f 33726 33727 33756 +f 33727 33757 33756 +f 33727 33728 33758 +f 33727 33758 33757 +f 33728 33729 33758 +f 33729 33759 33758 +f 33729 33730 33760 +f 33729 33760 33759 +f 33730 33731 33760 +f 33731 33761 33760 +f 33731 33732 33762 +f 33731 33762 33761 +f 33732 33733 33762 +f 33733 33763 33762 +f 33733 33734 33764 +f 33733 33764 33763 +f 33734 33735 33764 +f 33735 33765 33764 +f 33735 33736 33766 +f 33735 33766 33765 +f 33736 33737 33766 +f 33737 33767 33766 +f 33737 33768 33767 +f 13141 33738 13270 +f 33738 33769 13270 +f 33738 33739 33770 +f 33738 33770 33769 +f 33739 33740 33770 +f 33740 33771 33770 +f 33740 33741 33772 +f 33740 33772 33771 +f 33741 33742 33772 +f 33742 33773 33772 +f 33742 33743 33774 +f 33742 33774 33773 +f 33743 33744 33774 +f 33744 33775 33774 +f 33744 33745 33776 +f 33744 33776 33775 +f 33745 33746 33776 +f 33746 33777 33776 +f 33746 33747 33778 +f 33746 33778 33777 +f 33747 33748 33778 +f 33748 33779 33778 +f 33748 33749 33780 +f 33748 33780 33779 +f 33749 33750 33780 +f 33750 33781 33780 +f 33750 33751 33782 +f 33750 33782 33781 +f 33751 33752 33782 +f 33752 33783 33782 +f 33752 33753 33784 +f 33752 33784 33783 +f 33753 33754 33784 +f 33754 33785 33784 +f 33754 33755 33786 +f 33754 33786 33785 +f 33755 33756 33786 +f 33756 33787 33786 +f 33756 33757 33788 +f 33756 33788 33787 +f 33757 33758 33788 +f 33758 33789 33788 +f 33758 33759 33790 +f 33758 33790 33789 +f 33759 33760 33790 +f 33760 33791 33790 +f 33760 33761 33792 +f 33760 33792 33791 +f 33761 33762 33792 +f 33762 33793 33792 +f 33762 33763 33794 +f 33762 33794 33793 +f 33763 33764 33794 +f 33764 33795 33794 +f 33764 33765 33796 +f 33764 33796 33795 +f 33765 33766 33796 +f 33766 33797 33796 +f 33766 33767 33798 +f 33766 33798 33797 +f 33767 33768 33798 +f 33768 33799 33798 +f 13270 33769 33800 +f 13270 33800 13399 +f 33769 33770 33800 +f 33770 33801 33800 +f 33770 33771 33802 +f 33770 33802 33801 +f 33771 33772 33802 +f 33772 33803 33802 +f 33772 33773 33804 +f 33772 33804 33803 +f 33773 33774 33804 +f 33774 33805 33804 +f 33774 33775 33806 +f 33774 33806 33805 +f 33775 33776 33806 +f 33776 33807 33806 +f 33776 33777 33808 +f 33776 33808 33807 +f 33777 33778 33808 +f 33778 33809 33808 +f 33778 33779 33810 +f 33778 33810 33809 +f 33779 33780 33810 +f 33780 33811 33810 +f 33780 33781 33812 +f 33780 33812 33811 +f 33781 33782 33812 +f 33782 33813 33812 +f 33782 33783 33814 +f 33782 33814 33813 +f 33783 33784 33814 +f 33784 33815 33814 +f 33784 33785 33816 +f 33784 33816 33815 +f 33785 33786 33816 +f 33786 33817 33816 +f 33786 33787 33818 +f 33786 33818 33817 +f 33787 33788 33818 +f 33788 33819 33818 +f 33788 33789 33820 +f 33788 33820 33819 +f 33789 33790 33820 +f 33790 33821 33820 +f 33790 33791 33822 +f 33790 33822 33821 +f 33791 33792 33822 +f 33792 33823 33822 +f 33792 33793 33824 +f 33792 33824 33823 +f 33793 33794 33824 +f 33794 33825 33824 +f 33794 33795 33826 +f 33794 33826 33825 +f 33795 33796 33826 +f 33796 33827 33826 +f 33796 33797 33828 +f 33796 33828 33827 +f 33797 33798 33828 +f 33798 33829 33828 +f 33798 33799 33830 +f 33798 33830 33829 +f 13399 33800 13528 +f 33800 33831 13528 +f 33800 33801 33832 +f 33800 33832 33831 +f 33801 33802 33832 +f 33802 33833 33832 +f 33802 33803 33834 +f 33802 33834 33833 +f 33803 33804 33834 +f 33804 33835 33834 +f 33804 33805 33836 +f 33804 33836 33835 +f 33805 33806 33836 +f 33806 33837 33836 +f 33806 33807 33838 +f 33806 33838 33837 +f 33807 33808 33838 +f 33808 33839 33838 +f 33808 33809 33840 +f 33808 33840 33839 +f 33809 33810 33840 +f 33810 33841 33840 +f 33810 33811 33842 +f 33810 33842 33841 +f 33811 33812 33842 +f 33812 33843 33842 +f 33812 33813 33844 +f 33812 33844 33843 +f 33813 33814 33844 +f 33814 33845 33844 +f 33814 33815 33846 +f 33814 33846 33845 +f 33815 33816 33846 +f 33816 33847 33846 +f 33816 33817 33848 +f 33816 33848 33847 +f 33817 33818 33848 +f 33818 33849 33848 +f 33818 33819 33850 +f 33818 33850 33849 +f 33819 33820 33850 +f 33820 33851 33850 +f 33820 33821 33852 +f 33820 33852 33851 +f 33821 33822 33852 +f 33822 33853 33852 +f 33822 33823 33854 +f 33822 33854 33853 +f 33823 33824 33854 +f 33824 33855 33854 +f 33824 33825 33856 +f 33824 33856 33855 +f 33825 33826 33856 +f 33826 33857 33856 +f 33826 33827 33858 +f 33826 33858 33857 +f 33827 33828 33858 +f 33828 33859 33858 +f 33828 33829 33860 +f 33828 33860 33859 +f 33829 33830 33860 +f 33830 33861 33860 +f 33830 33862 33861 +f 13528 33831 33863 +f 13528 33863 13657 +f 33831 33832 33863 +f 33832 33864 33863 +f 33832 33833 33865 +f 33832 33865 33864 +f 33833 33834 33865 +f 33834 33866 33865 +f 33834 33835 33867 +f 33834 33867 33866 +f 33835 33836 33867 +f 33836 33868 33867 +f 33836 33837 33869 +f 33836 33869 33868 +f 33837 33838 33869 +f 33838 33870 33869 +f 33838 33839 33871 +f 33838 33871 33870 +f 33839 33840 33871 +f 33840 33872 33871 +f 33840 33841 33873 +f 33840 33873 33872 +f 33841 33842 33873 +f 33842 33874 33873 +f 33842 33843 33875 +f 33842 33875 33874 +f 33843 33844 33875 +f 33844 33876 33875 +f 33844 33845 33877 +f 33844 33877 33876 +f 33845 33846 33877 +f 33846 33878 33877 +f 33846 33847 33879 +f 33846 33879 33878 +f 33847 33848 33879 +f 33848 33880 33879 +f 33848 33849 33881 +f 33848 33881 33880 +f 33849 33850 33881 +f 33850 33882 33881 +f 33850 33851 33883 +f 33850 33883 33882 +f 33851 33852 33883 +f 33852 33884 33883 +f 33852 33853 33885 +f 33852 33885 33884 +f 33853 33854 33885 +f 33854 33886 33885 +f 33854 33855 33887 +f 33854 33887 33886 +f 33855 33856 33887 +f 33856 33888 33887 +f 33856 33857 33889 +f 33856 33889 33888 +f 33857 33858 33889 +f 33858 33890 33889 +f 33858 33859 33891 +f 33858 33891 33890 +f 33859 33860 33891 +f 33860 33892 33891 +f 33860 33861 33893 +f 33860 33893 33892 +f 33861 33862 33893 +f 33862 33894 33893 +f 13657 33863 13786 +f 33863 33895 13786 +f 33863 33864 33896 +f 33863 33896 33895 +f 33864 33865 33896 +f 33865 33897 33896 +f 33865 33866 33898 +f 33865 33898 33897 +f 33866 33867 33898 +f 33867 33899 33898 +f 33867 33868 33900 +f 33867 33900 33899 +f 33868 33869 33900 +f 33869 33901 33900 +f 33869 33870 33902 +f 33869 33902 33901 +f 33870 33871 33902 +f 33871 33903 33902 +f 33871 33872 33904 +f 33871 33904 33903 +f 33872 33873 33904 +f 33873 33905 33904 +f 33873 33874 33906 +f 33873 33906 33905 +f 33874 33875 33906 +f 33875 33907 33906 +f 33875 33876 33908 +f 33875 33908 33907 +f 33876 33877 33908 +f 33877 33909 33908 +f 33877 33878 33910 +f 33877 33910 33909 +f 33878 33879 33910 +f 33879 33911 33910 +f 33879 33880 33912 +f 33879 33912 33911 +f 33880 33881 33912 +f 33881 33913 33912 +f 33881 33882 33914 +f 33881 33914 33913 +f 33882 33883 33914 +f 33883 33915 33914 +f 33883 33884 33916 +f 33883 33916 33915 +f 33884 33885 33916 +f 33885 33917 33916 +f 33885 33886 33918 +f 33885 33918 33917 +f 33886 33887 33918 +f 33887 33919 33918 +f 33887 33888 33920 +f 33887 33920 33919 +f 33888 33889 33920 +f 33889 33921 33920 +f 33889 33890 33922 +f 33889 33922 33921 +f 33890 33891 33922 +f 33891 33923 33922 +f 33891 33892 33924 +f 33891 33924 33923 +f 33892 33893 33924 +f 33893 33925 33924 +f 33893 33894 33926 +f 33893 33926 33925 +f 13786 33895 33927 +f 13786 33927 13915 +f 33895 33896 33927 +f 33896 33928 33927 +f 33896 33897 33929 +f 33896 33929 33928 +f 33897 33898 33929 +f 33898 33930 33929 +f 33898 33899 33931 +f 33898 33931 33930 +f 33899 33900 33931 +f 33900 33932 33931 +f 33900 33901 33933 +f 33900 33933 33932 +f 33901 33902 33933 +f 33902 33934 33933 +f 33902 33903 33935 +f 33902 33935 33934 +f 33903 33904 33935 +f 33904 33936 33935 +f 33904 33905 33937 +f 33904 33937 33936 +f 33905 33906 33937 +f 33906 33938 33937 +f 33906 33907 33939 +f 33906 33939 33938 +f 33907 33908 33939 +f 33908 33940 33939 +f 33908 33909 33941 +f 33908 33941 33940 +f 33909 33910 33941 +f 33910 33942 33941 +f 33910 33911 33943 +f 33910 33943 33942 +f 33911 33912 33943 +f 33912 33944 33943 +f 33912 33913 33945 +f 33912 33945 33944 +f 33913 33914 33945 +f 33914 33946 33945 +f 33914 33915 33947 +f 33914 33947 33946 +f 33915 33916 33947 +f 33916 33948 33947 +f 33916 33917 33949 +f 33916 33949 33948 +f 33917 33918 33949 +f 33918 33950 33949 +f 33918 33919 33951 +f 33918 33951 33950 +f 33919 33920 33951 +f 33920 33952 33951 +f 33920 33921 33953 +f 33920 33953 33952 +f 33921 33922 33953 +f 33922 33954 33953 +f 33922 33923 33955 +f 33922 33955 33954 +f 33923 33924 33955 +f 33924 33956 33955 +f 33924 33925 33957 +f 33924 33957 33956 +f 33925 33926 33957 +f 33926 33958 33957 +f 33926 33959 33958 +f 13915 33927 14044 +f 33927 33960 14044 +f 33927 33928 33961 +f 33927 33961 33960 +f 33928 33929 33961 +f 33929 33962 33961 +f 33929 33930 33963 +f 33929 33963 33962 +f 33930 33931 33963 +f 33931 33964 33963 +f 33931 33932 33965 +f 33931 33965 33964 +f 33932 33933 33965 +f 33933 33966 33965 +f 33933 33934 33967 +f 33933 33967 33966 +f 33934 33935 33967 +f 33935 33968 33967 +f 33935 33936 33969 +f 33935 33969 33968 +f 33936 33937 33969 +f 33937 33970 33969 +f 33937 33938 33971 +f 33937 33971 33970 +f 33938 33939 33971 +f 33939 33972 33971 +f 33939 33940 33973 +f 33939 33973 33972 +f 33940 33941 33973 +f 33941 33974 33973 +f 33941 33942 33975 +f 33941 33975 33974 +f 33942 33943 33975 +f 33943 33976 33975 +f 33943 33944 33977 +f 33943 33977 33976 +f 33944 33945 33977 +f 33945 33978 33977 +f 33945 33946 33979 +f 33945 33979 33978 +f 33946 33947 33979 +f 33947 33980 33979 +f 33947 33948 33981 +f 33947 33981 33980 +f 33948 33949 33981 +f 33949 33982 33981 +f 33949 33950 33983 +f 33949 33983 33982 +f 33950 33951 33983 +f 33951 33984 33983 +f 33951 33952 33985 +f 33951 33985 33984 +f 33952 33953 33985 +f 33953 33986 33985 +f 33953 33954 33987 +f 33953 33987 33986 +f 33954 33955 33987 +f 33955 33988 33987 +f 33955 33956 33989 +f 33955 33989 33988 +f 33956 33957 33989 +f 33957 33990 33989 +f 33957 33958 33991 +f 33957 33991 33990 +f 33958 33959 33991 +f 33959 33992 33991 +f 33959 33993 33992 +f 14044 33960 33994 +f 14044 33994 14173 +f 33960 33961 33994 +f 33961 33995 33994 +f 33961 33962 33996 +f 33961 33996 33995 +f 33962 33963 33996 +f 33963 33997 33996 +f 33963 33964 33998 +f 33963 33998 33997 +f 33964 33965 33998 +f 33965 33999 33998 +f 33965 33966 34000 +f 33965 34000 33999 +f 33966 33967 34000 +f 33967 34001 34000 +f 33967 33968 34002 +f 33967 34002 34001 +f 33968 33969 34002 +f 33969 34003 34002 +f 33969 33970 34004 +f 33969 34004 34003 +f 33970 33971 34004 +f 33971 34005 34004 +f 33971 33972 34006 +f 33971 34006 34005 +f 33972 33973 34006 +f 33973 34007 34006 +f 33973 33974 34008 +f 33973 34008 34007 +f 33974 33975 34008 +f 33975 34009 34008 +f 33975 33976 34010 +f 33975 34010 34009 +f 33976 33977 34010 +f 33977 34011 34010 +f 33977 33978 34012 +f 33977 34012 34011 +f 33978 33979 34012 +f 33979 34013 34012 +f 33979 33980 34014 +f 33979 34014 34013 +f 33980 33981 34014 +f 33981 34015 34014 +f 33981 33982 34016 +f 33981 34016 34015 +f 33982 33983 34016 +f 33983 34017 34016 +f 33983 33984 34018 +f 33983 34018 34017 +f 33984 33985 34018 +f 33985 34019 34018 +f 33985 33986 34020 +f 33985 34020 34019 +f 33986 33987 34020 +f 33987 34021 34020 +f 33987 33988 34022 +f 33987 34022 34021 +f 33988 33989 34022 +f 33989 34023 34022 +f 33989 33990 34024 +f 33989 34024 34023 +f 33990 33991 34024 +f 33991 34025 34024 +f 33991 33992 34026 +f 33991 34026 34025 +f 33992 33993 34026 +f 33993 34027 34026 +f 33993 34028 34027 +f 14173 33994 14302 +f 33994 34029 14302 +f 33994 33995 34030 +f 33994 34030 34029 +f 33995 33996 34030 +f 33996 34031 34030 +f 33996 33997 34032 +f 33996 34032 34031 +f 33997 33998 34032 +f 33998 34033 34032 +f 33998 33999 34034 +f 33998 34034 34033 +f 33999 34000 34034 +f 34000 34035 34034 +f 34000 34001 34036 +f 34000 34036 34035 +f 34001 34002 34036 +f 34002 34037 34036 +f 34002 34003 34038 +f 34002 34038 34037 +f 34003 34004 34038 +f 34004 34039 34038 +f 34004 34005 34040 +f 34004 34040 34039 +f 34005 34006 34040 +f 34006 34041 34040 +f 34006 34007 34042 +f 34006 34042 34041 +f 34007 34008 34042 +f 34008 34043 34042 +f 34008 34009 34044 +f 34008 34044 34043 +f 34009 34010 34044 +f 34010 34045 34044 +f 34010 34011 34046 +f 34010 34046 34045 +f 34011 34012 34046 +f 34012 34047 34046 +f 34012 34013 34048 +f 34012 34048 34047 +f 34013 34014 34048 +f 34014 34049 34048 +f 34014 34015 34050 +f 34014 34050 34049 +f 34015 34016 34050 +f 34016 34051 34050 +f 34016 34017 34052 +f 34016 34052 34051 +f 34017 34018 34052 +f 34018 34053 34052 +f 34018 34019 34054 +f 34018 34054 34053 +f 34019 34020 34054 +f 34020 34055 34054 +f 34020 34021 34056 +f 34020 34056 34055 +f 34021 34022 34056 +f 34022 34057 34056 +f 34022 34023 34058 +f 34022 34058 34057 +f 34023 34024 34058 +f 34024 34059 34058 +f 34024 34025 34060 +f 34024 34060 34059 +f 34025 34026 34060 +f 34026 34061 34060 +f 34026 34027 34062 +f 34026 34062 34061 +f 34027 34028 34062 +f 34028 34063 34062 +f 34028 34064 34063 +f 14302 34029 34065 +f 14302 34065 14431 +f 34029 34030 34065 +f 34030 34066 34065 +f 34030 34031 34067 +f 34030 34067 34066 +f 34031 34032 34067 +f 34032 34068 34067 +f 34032 34033 34069 +f 34032 34069 34068 +f 34033 34034 34069 +f 34034 34070 34069 +f 34034 34035 34071 +f 34034 34071 34070 +f 34035 34036 34071 +f 34036 34072 34071 +f 34036 34037 34073 +f 34036 34073 34072 +f 34037 34038 34073 +f 34038 34074 34073 +f 34038 34039 34075 +f 34038 34075 34074 +f 34039 34040 34075 +f 34040 34076 34075 +f 34040 34041 34077 +f 34040 34077 34076 +f 34041 34042 34077 +f 34042 34078 34077 +f 34042 34043 34079 +f 34042 34079 34078 +f 34043 34044 34079 +f 34044 34080 34079 +f 34044 34045 34081 +f 34044 34081 34080 +f 34045 34046 34081 +f 34046 34082 34081 +f 34046 34047 34083 +f 34046 34083 34082 +f 34047 34048 34083 +f 34048 34084 34083 +f 34048 34049 34085 +f 34048 34085 34084 +f 34049 34050 34085 +f 34050 34086 34085 +f 34050 34051 34087 +f 34050 34087 34086 +f 34051 34052 34087 +f 34052 34088 34087 +f 34052 34053 34089 +f 34052 34089 34088 +f 34053 34054 34089 +f 34054 34090 34089 +f 34054 34055 34091 +f 34054 34091 34090 +f 34055 34056 34091 +f 34056 34092 34091 +f 34056 34057 34093 +f 34056 34093 34092 +f 34057 34058 34093 +f 34058 34094 34093 +f 34058 34059 34095 +f 34058 34095 34094 +f 34059 34060 34095 +f 34060 34096 34095 +f 34060 34061 34097 +f 34060 34097 34096 +f 34061 34062 34097 +f 34062 34098 34097 +f 34062 34063 34099 +f 34062 34099 34098 +f 34063 34064 34099 +f 34064 34100 34099 +f 14431 34065 14560 +f 34065 34101 14560 +f 34065 34066 34102 +f 34065 34102 34101 +f 34066 34067 34102 +f 34067 34103 34102 +f 34067 34068 34104 +f 34067 34104 34103 +f 34068 34069 34104 +f 34069 34105 34104 +f 34069 34070 34106 +f 34069 34106 34105 +f 34070 34071 34106 +f 34071 34107 34106 +f 34071 34072 34108 +f 34071 34108 34107 +f 34072 34073 34108 +f 34073 34109 34108 +f 34073 34074 34110 +f 34073 34110 34109 +f 34074 34075 34110 +f 34075 34111 34110 +f 34075 34076 34112 +f 34075 34112 34111 +f 34076 34077 34112 +f 34077 34113 34112 +f 34077 34078 34114 +f 34077 34114 34113 +f 34078 34079 34114 +f 34079 34115 34114 +f 34079 34080 34116 +f 34079 34116 34115 +f 34080 34081 34116 +f 34081 34117 34116 +f 34081 34082 34118 +f 34081 34118 34117 +f 34082 34083 34118 +f 34083 34119 34118 +f 34083 34084 34120 +f 34083 34120 34119 +f 34084 34085 34120 +f 34085 34121 34120 +f 34085 34086 34122 +f 34085 34122 34121 +f 34086 34087 34122 +f 34087 34123 34122 +f 34087 34088 34124 +f 34087 34124 34123 +f 34088 34089 34124 +f 34089 34125 34124 +f 34089 34090 34126 +f 34089 34126 34125 +f 34090 34091 34126 +f 34091 34127 34126 +f 34091 34092 34128 +f 34091 34128 34127 +f 34092 34093 34128 +f 34093 34129 34128 +f 34093 34094 34130 +f 34093 34130 34129 +f 34094 34095 34130 +f 34095 34131 34130 +f 34095 34096 34132 +f 34095 34132 34131 +f 34096 34097 34132 +f 34097 34133 34132 +f 34097 34098 34134 +f 34097 34134 34133 +f 34098 34099 34134 +f 34099 34135 34134 +f 34099 34100 34136 +f 34099 34136 34135 +f 14560 34101 34137 +f 14560 34137 14689 +f 34101 34102 34137 +f 34102 34138 34137 +f 34102 34103 34139 +f 34102 34139 34138 +f 34103 34104 34139 +f 34104 34140 34139 +f 34104 34105 34141 +f 34104 34141 34140 +f 34105 34106 34141 +f 34106 34142 34141 +f 34106 34107 34143 +f 34106 34143 34142 +f 34107 34108 34143 +f 34108 34144 34143 +f 34108 34109 34145 +f 34108 34145 34144 +f 34109 34110 34145 +f 34110 34146 34145 +f 34110 34111 34147 +f 34110 34147 34146 +f 34111 34112 34147 +f 34112 34148 34147 +f 34112 34113 34149 +f 34112 34149 34148 +f 34113 34114 34149 +f 34114 34150 34149 +f 34114 34115 34151 +f 34114 34151 34150 +f 34115 34116 34151 +f 34116 34152 34151 +f 34116 34117 34153 +f 34116 34153 34152 +f 34117 34118 34153 +f 34118 34154 34153 +f 34118 34119 34155 +f 34118 34155 34154 +f 34119 34120 34155 +f 34120 34156 34155 +f 34120 34121 34157 +f 34120 34157 34156 +f 34121 34122 34157 +f 34122 34158 34157 +f 34122 34123 34159 +f 34122 34159 34158 +f 34123 34124 34159 +f 34124 34160 34159 +f 34124 34125 34161 +f 34124 34161 34160 +f 34125 34126 34161 +f 34126 34162 34161 +f 34126 34127 34163 +f 34126 34163 34162 +f 34127 34128 34163 +f 34128 34164 34163 +f 34128 34129 34165 +f 34128 34165 34164 +f 34129 34130 34165 +f 34130 34166 34165 +f 34130 34131 34167 +f 34130 34167 34166 +f 34131 34132 34167 +f 34132 34168 34167 +f 34132 34133 34169 +f 34132 34169 34168 +f 34133 34134 34169 +f 34134 34170 34169 +f 34134 34135 34171 +f 34134 34171 34170 +f 34135 34136 34171 +f 34136 34172 34171 +f 34136 34173 34172 +f 14689 34137 14818 +f 34137 34174 14818 +f 34137 34138 34175 +f 34137 34175 34174 +f 34138 34139 34175 +f 34139 34176 34175 +f 34139 34140 34177 +f 34139 34177 34176 +f 34140 34141 34177 +f 34141 34178 34177 +f 34141 34142 34179 +f 34141 34179 34178 +f 34142 34143 34179 +f 34143 34180 34179 +f 34143 34144 34181 +f 34143 34181 34180 +f 34144 34145 34181 +f 34145 34182 34181 +f 34145 34146 34183 +f 34145 34183 34182 +f 34146 34147 34183 +f 34147 34184 34183 +f 34147 34148 34185 +f 34147 34185 34184 +f 34148 34149 34185 +f 34149 34186 34185 +f 34149 34150 34187 +f 34149 34187 34186 +f 34150 34151 34187 +f 34151 34188 34187 +f 34151 34152 34189 +f 34151 34189 34188 +f 34152 34153 34189 +f 34153 34190 34189 +f 34153 34154 34191 +f 34153 34191 34190 +f 34154 34155 34191 +f 34155 34192 34191 +f 34155 34156 34193 +f 34155 34193 34192 +f 34156 34157 34193 +f 34157 34194 34193 +f 34157 34158 34195 +f 34157 34195 34194 +f 34158 34159 34195 +f 34159 34196 34195 +f 34159 34160 34197 +f 34159 34197 34196 +f 34160 34161 34197 +f 34161 34198 34197 +f 34161 34162 34199 +f 34161 34199 34198 +f 34162 34163 34199 +f 34163 34200 34199 +f 34163 34164 34201 +f 34163 34201 34200 +f 34164 34165 34201 +f 34165 34202 34201 +f 34165 34166 34203 +f 34165 34203 34202 +f 34166 34167 34203 +f 34167 34204 34203 +f 34167 34168 34205 +f 34167 34205 34204 +f 34168 34169 34205 +f 34169 34206 34205 +f 34169 34170 34207 +f 34169 34207 34206 +f 34170 34171 34207 +f 34171 34208 34207 +f 34171 34172 34209 +f 34171 34209 34208 +f 34172 34173 34209 +f 34173 34210 34209 +f 34173 34211 34210 +f 14818 34174 34212 +f 14818 34212 14947 +f 34174 34175 34212 +f 34175 34213 34212 +f 34175 34176 34214 +f 34175 34214 34213 +f 34176 34177 34214 +f 34177 34215 34214 +f 34177 34178 34216 +f 34177 34216 34215 +f 34178 34179 34216 +f 34179 34217 34216 +f 34179 34180 34218 +f 34179 34218 34217 +f 34180 34181 34218 +f 34181 34219 34218 +f 34181 34182 34220 +f 34181 34220 34219 +f 34182 34183 34220 +f 34183 34221 34220 +f 34183 34184 34222 +f 34183 34222 34221 +f 34184 34185 34222 +f 34185 34223 34222 +f 34185 34186 34224 +f 34185 34224 34223 +f 34186 34187 34224 +f 34187 34225 34224 +f 34187 34188 34226 +f 34187 34226 34225 +f 34188 34189 34226 +f 34189 34227 34226 +f 34189 34190 34228 +f 34189 34228 34227 +f 34190 34191 34228 +f 34191 34229 34228 +f 34191 34192 34230 +f 34191 34230 34229 +f 34192 34193 34230 +f 34193 34231 34230 +f 34193 34194 34232 +f 34193 34232 34231 +f 34194 34195 34232 +f 34195 34233 34232 +f 34195 34196 34234 +f 34195 34234 34233 +f 34196 34197 34234 +f 34197 34235 34234 +f 34197 34198 34236 +f 34197 34236 34235 +f 34198 34199 34236 +f 34199 34237 34236 +f 34199 34200 34238 +f 34199 34238 34237 +f 34200 34201 34238 +f 34201 34239 34238 +f 34201 34202 34240 +f 34201 34240 34239 +f 34202 34203 34240 +f 34203 34241 34240 +f 34203 34204 34242 +f 34203 34242 34241 +f 34204 34205 34242 +f 34205 34243 34242 +f 34205 34206 34244 +f 34205 34244 34243 +f 34206 34207 34244 +f 34207 34245 34244 +f 34207 34208 34246 +f 34207 34246 34245 +f 34208 34209 34246 +f 34209 34247 34246 +f 34209 34210 34248 +f 34209 34248 34247 +f 34210 34211 34248 +f 34211 34249 34248 +f 14947 34212 15076 +f 34212 34250 15076 +f 34212 34213 34251 +f 34212 34251 34250 +f 34213 34214 34251 +f 34214 34252 34251 +f 34214 34215 34253 +f 34214 34253 34252 +f 34215 34216 34253 +f 34216 34254 34253 +f 34216 34217 34255 +f 34216 34255 34254 +f 34217 34218 34255 +f 34218 34256 34255 +f 34218 34219 34257 +f 34218 34257 34256 +f 34219 34220 34257 +f 34220 34258 34257 +f 34220 34221 34259 +f 34220 34259 34258 +f 34221 34222 34259 +f 34222 34260 34259 +f 34222 34223 34261 +f 34222 34261 34260 +f 34223 34224 34261 +f 34224 34262 34261 +f 34224 34225 34263 +f 34224 34263 34262 +f 34225 34226 34263 +f 34226 34264 34263 +f 34226 34227 34265 +f 34226 34265 34264 +f 34227 34228 34265 +f 34228 34266 34265 +f 34228 34229 34267 +f 34228 34267 34266 +f 34229 34230 34267 +f 34230 34268 34267 +f 34230 34231 34269 +f 34230 34269 34268 +f 34231 34232 34269 +f 34232 34270 34269 +f 34232 34233 34271 +f 34232 34271 34270 +f 34233 34234 34271 +f 34234 34272 34271 +f 34234 34235 34273 +f 34234 34273 34272 +f 34235 34236 34273 +f 34236 34274 34273 +f 34236 34237 34275 +f 34236 34275 34274 +f 34237 34238 34275 +f 34238 34276 34275 +f 34238 34239 34277 +f 34238 34277 34276 +f 34239 34240 34277 +f 34240 34278 34277 +f 34240 34241 34279 +f 34240 34279 34278 +f 34241 34242 34279 +f 34242 34280 34279 +f 34242 34243 34281 +f 34242 34281 34280 +f 34243 34244 34281 +f 34244 34282 34281 +f 34244 34245 34283 +f 34244 34283 34282 +f 34245 34246 34283 +f 34246 34284 34283 +f 34246 34247 34285 +f 34246 34285 34284 +f 34247 34248 34285 +f 34248 34286 34285 +f 34248 34249 34287 +f 34248 34287 34286 +f 15076 34250 34288 +f 15076 34288 15205 +f 34250 34251 34288 +f 34251 34289 34288 +f 34251 34252 34290 +f 34251 34290 34289 +f 34252 34253 34290 +f 34253 34291 34290 +f 34253 34254 34292 +f 34253 34292 34291 +f 34254 34255 34292 +f 34255 34293 34292 +f 34255 34256 34294 +f 34255 34294 34293 +f 34256 34257 34294 +f 34257 34295 34294 +f 34257 34258 34296 +f 34257 34296 34295 +f 34258 34259 34296 +f 34259 34297 34296 +f 34259 34260 34298 +f 34259 34298 34297 +f 34260 34261 34298 +f 34261 34299 34298 +f 34261 34262 34300 +f 34261 34300 34299 +f 34262 34263 34300 +f 34263 34301 34300 +f 34263 34264 34302 +f 34263 34302 34301 +f 34264 34265 34302 +f 34265 34303 34302 +f 34265 34266 34304 +f 34265 34304 34303 +f 34266 34267 34304 +f 34267 34305 34304 +f 34267 34268 34306 +f 34267 34306 34305 +f 34268 34269 34306 +f 34269 34307 34306 +f 34269 34270 34308 +f 34269 34308 34307 +f 34270 34271 34308 +f 34271 34309 34308 +f 34271 34272 34310 +f 34271 34310 34309 +f 34272 34273 34310 +f 34273 34311 34310 +f 34273 34274 34312 +f 34273 34312 34311 +f 34274 34275 34312 +f 34275 34313 34312 +f 34275 34276 34314 +f 34275 34314 34313 +f 34276 34277 34314 +f 34277 34315 34314 +f 34277 34278 34316 +f 34277 34316 34315 +f 34278 34279 34316 +f 34279 34317 34316 +f 34279 34280 34318 +f 34279 34318 34317 +f 34280 34281 34318 +f 34281 34319 34318 +f 34281 34282 34320 +f 34281 34320 34319 +f 34282 34283 34320 +f 34283 34321 34320 +f 34283 34284 34322 +f 34283 34322 34321 +f 34284 34285 34322 +f 34285 34323 34322 +f 34285 34286 34324 +f 34285 34324 34323 +f 34286 34287 34324 +f 34287 34325 34324 +f 34287 34326 34325 +f 15205 34288 15334 +f 34288 34327 15334 +f 34288 34289 34328 +f 34288 34328 34327 +f 34289 34290 34328 +f 34290 34329 34328 +f 34290 34291 34330 +f 34290 34330 34329 +f 34291 34292 34330 +f 34292 34331 34330 +f 34292 34293 34332 +f 34292 34332 34331 +f 34293 34294 34332 +f 34294 34333 34332 +f 34294 34295 34334 +f 34294 34334 34333 +f 34295 34296 34334 +f 34296 34335 34334 +f 34296 34297 34336 +f 34296 34336 34335 +f 34297 34298 34336 +f 34298 34337 34336 +f 34298 34299 34338 +f 34298 34338 34337 +f 34299 34300 34338 +f 34300 34339 34338 +f 34300 34301 34340 +f 34300 34340 34339 +f 34301 34302 34340 +f 34302 34341 34340 +f 34302 34303 34342 +f 34302 34342 34341 +f 34303 34304 34342 +f 34304 34343 34342 +f 34304 34305 34344 +f 34304 34344 34343 +f 34305 34306 34344 +f 34306 34345 34344 +f 34306 34307 34346 +f 34306 34346 34345 +f 34307 34308 34346 +f 34308 34347 34346 +f 34308 34309 34348 +f 34308 34348 34347 +f 34309 34310 34348 +f 34310 34349 34348 +f 34310 34311 34350 +f 34310 34350 34349 +f 34311 34312 34350 +f 34312 34351 34350 +f 34312 34313 34352 +f 34312 34352 34351 +f 34313 34314 34352 +f 34314 34353 34352 +f 34314 34315 34354 +f 34314 34354 34353 +f 34315 34316 34354 +f 34316 34355 34354 +f 34316 34317 34356 +f 34316 34356 34355 +f 34317 34318 34356 +f 34318 34357 34356 +f 34318 34319 34358 +f 34318 34358 34357 +f 34319 34320 34358 +f 34320 34359 34358 +f 34320 34321 34360 +f 34320 34360 34359 +f 34321 34322 34360 +f 34322 34361 34360 +f 34322 34323 34362 +f 34322 34362 34361 +f 34323 34324 34362 +f 34324 34363 34362 +f 34324 34325 34364 +f 34324 34364 34363 +f 34325 34326 34364 +f 34326 34365 34364 +f 15334 34327 34366 +f 15334 34366 15463 +f 34327 34328 34366 +f 34328 34367 34366 +f 34328 34329 34368 +f 34328 34368 34367 +f 34329 34330 34368 +f 34330 34369 34368 +f 34330 34331 34370 +f 34330 34370 34369 +f 34331 34332 34370 +f 34332 34371 34370 +f 34332 34333 34372 +f 34332 34372 34371 +f 34333 34334 34372 +f 34334 34373 34372 +f 34334 34335 34374 +f 34334 34374 34373 +f 34335 34336 34374 +f 34336 34375 34374 +f 34336 34337 34376 +f 34336 34376 34375 +f 34337 34338 34376 +f 34338 34377 34376 +f 34338 34339 34378 +f 34338 34378 34377 +f 34339 34340 34378 +f 34340 34379 34378 +f 34340 34341 34380 +f 34340 34380 34379 +f 34341 34342 34380 +f 34342 34381 34380 +f 34342 34343 34382 +f 34342 34382 34381 +f 34343 34344 34382 +f 34344 34383 34382 +f 34344 34345 34384 +f 34344 34384 34383 +f 34345 34346 34384 +f 34346 34385 34384 +f 34346 34347 34386 +f 34346 34386 34385 +f 34347 34348 34386 +f 34348 34387 34386 +f 34348 34349 34388 +f 34348 34388 34387 +f 34349 34350 34388 +f 34350 34389 34388 +f 34350 34351 34390 +f 34350 34390 34389 +f 34351 34352 34390 +f 34352 34391 34390 +f 34352 34353 34392 +f 34352 34392 34391 +f 34353 34354 34392 +f 34354 34393 34392 +f 34354 34355 34394 +f 34354 34394 34393 +f 34355 34356 34394 +f 34356 34395 34394 +f 34356 34357 34396 +f 34356 34396 34395 +f 34357 34358 34396 +f 34358 34397 34396 +f 34358 34359 34398 +f 34358 34398 34397 +f 34359 34360 34398 +f 34360 34399 34398 +f 34360 34361 34400 +f 34360 34400 34399 +f 34361 34362 34400 +f 34362 34401 34400 +f 34362 34363 34402 +f 34362 34402 34401 +f 34363 34364 34402 +f 34364 34403 34402 +f 34364 34365 34404 +f 34364 34404 34403 +f 15463 34366 15592 +f 34366 34405 15592 +f 34366 34367 34406 +f 34366 34406 34405 +f 34367 34368 34406 +f 34368 34407 34406 +f 34368 34369 34408 +f 34368 34408 34407 +f 34369 34370 34408 +f 34370 34409 34408 +f 34370 34371 34410 +f 34370 34410 34409 +f 34371 34372 34410 +f 34372 34411 34410 +f 34372 34373 34412 +f 34372 34412 34411 +f 34373 34374 34412 +f 34374 34413 34412 +f 34374 34375 34414 +f 34374 34414 34413 +f 34375 34376 34414 +f 34376 34415 34414 +f 34376 34377 34416 +f 34376 34416 34415 +f 34377 34378 34416 +f 34378 34417 34416 +f 34378 34379 34418 +f 34378 34418 34417 +f 34379 34380 34418 +f 34380 34419 34418 +f 34380 34381 34420 +f 34380 34420 34419 +f 34381 34382 34420 +f 34382 34421 34420 +f 34382 34383 34422 +f 34382 34422 34421 +f 34383 34384 34422 +f 34384 34423 34422 +f 34384 34385 34424 +f 34384 34424 34423 +f 34385 34386 34424 +f 34386 34425 34424 +f 34386 34387 34426 +f 34386 34426 34425 +f 34387 34388 34426 +f 34388 34427 34426 +f 34388 34389 34428 +f 34388 34428 34427 +f 34389 34390 34428 +f 34390 34429 34428 +f 34390 34391 34430 +f 34390 34430 34429 +f 34391 34392 34430 +f 34392 34431 34430 +f 34392 34393 34432 +f 34392 34432 34431 +f 34393 34394 34432 +f 34394 34433 34432 +f 34394 34395 34434 +f 34394 34434 34433 +f 34395 34396 34434 +f 34396 34435 34434 +f 34396 34397 34436 +f 34396 34436 34435 +f 34397 34398 34436 +f 34398 34437 34436 +f 34398 34399 34438 +f 34398 34438 34437 +f 34399 34400 34438 +f 34400 34439 34438 +f 34400 34401 34440 +f 34400 34440 34439 +f 34401 34402 34440 +f 34402 34441 34440 +f 34402 34403 34442 +f 34402 34442 34441 +f 34403 34404 34442 +f 34404 34443 34442 +f 34404 34444 34443 +f 15592 34405 34445 +f 15592 34445 15721 +f 34405 34406 34445 +f 34406 34446 34445 +f 34406 34407 34447 +f 34406 34447 34446 +f 34407 34408 34447 +f 34408 34448 34447 +f 34408 34409 34449 +f 34408 34449 34448 +f 34409 34410 34449 +f 34410 34450 34449 +f 34410 34411 34451 +f 34410 34451 34450 +f 34411 34412 34451 +f 34412 34452 34451 +f 34412 34413 34453 +f 34412 34453 34452 +f 34413 34414 34453 +f 34414 34454 34453 +f 34414 34415 34455 +f 34414 34455 34454 +f 34415 34416 34455 +f 34416 34456 34455 +f 34416 34417 34457 +f 34416 34457 34456 +f 34417 34418 34457 +f 34418 34458 34457 +f 34418 34419 34459 +f 34418 34459 34458 +f 34419 34420 34459 +f 34420 34460 34459 +f 34420 34421 34461 +f 34420 34461 34460 +f 34421 34422 34461 +f 34422 34462 34461 +f 34422 34423 34463 +f 34422 34463 34462 +f 34423 34424 34463 +f 34424 34464 34463 +f 34424 34425 34465 +f 34424 34465 34464 +f 34425 34426 34465 +f 34426 34466 34465 +f 34426 34427 34467 +f 34426 34467 34466 +f 34427 34428 34467 +f 34428 34468 34467 +f 34428 34429 34469 +f 34428 34469 34468 +f 34429 34430 34469 +f 34430 34470 34469 +f 34430 34431 34471 +f 34430 34471 34470 +f 34431 34432 34471 +f 34432 34472 34471 +f 34432 34433 34473 +f 34432 34473 34472 +f 34433 34434 34473 +f 34434 34474 34473 +f 34434 34435 34475 +f 34434 34475 34474 +f 34435 34436 34475 +f 34436 34476 34475 +f 34436 34437 34477 +f 34436 34477 34476 +f 34437 34438 34477 +f 34438 34478 34477 +f 34438 34439 34479 +f 34438 34479 34478 +f 34439 34440 34479 +f 34440 34480 34479 +f 34440 34441 34481 +f 34440 34481 34480 +f 34441 34442 34481 +f 34442 34482 34481 +f 34442 34443 34483 +f 34442 34483 34482 +f 34443 34444 34483 +f 34444 34484 34483 +f 15721 34445 15850 +f 34445 34485 15850 +f 34445 34446 34486 +f 34445 34486 34485 +f 34446 34447 34486 +f 34447 34487 34486 +f 34447 34448 34488 +f 34447 34488 34487 +f 34448 34449 34488 +f 34449 34489 34488 +f 34449 34450 34490 +f 34449 34490 34489 +f 34450 34451 34490 +f 34451 34491 34490 +f 34451 34452 34492 +f 34451 34492 34491 +f 34452 34453 34492 +f 34453 34493 34492 +f 34453 34454 34494 +f 34453 34494 34493 +f 34454 34455 34494 +f 34455 34495 34494 +f 34455 34456 34496 +f 34455 34496 34495 +f 34456 34457 34496 +f 34457 34497 34496 +f 34457 34458 34498 +f 34457 34498 34497 +f 34458 34459 34498 +f 34459 34499 34498 +f 34459 34460 34500 +f 34459 34500 34499 +f 34460 34461 34500 +f 34461 34501 34500 +f 34461 34462 34502 +f 34461 34502 34501 +f 34462 34463 34502 +f 34463 34503 34502 +f 34463 34464 34504 +f 34463 34504 34503 +f 34464 34465 34504 +f 34465 34505 34504 +f 34465 34466 34506 +f 34465 34506 34505 +f 34466 34467 34506 +f 34467 34507 34506 +f 34467 34468 34508 +f 34467 34508 34507 +f 34468 34469 34508 +f 34469 34509 34508 +f 34469 34470 34510 +f 34469 34510 34509 +f 34470 34471 34510 +f 34471 34511 34510 +f 34471 34472 34512 +f 34471 34512 34511 +f 34472 34473 34512 +f 34473 34513 34512 +f 34473 34474 34514 +f 34473 34514 34513 +f 34474 34475 34514 +f 34475 34515 34514 +f 34475 34476 34516 +f 34475 34516 34515 +f 34476 34477 34516 +f 34477 34517 34516 +f 34477 34478 34518 +f 34477 34518 34517 +f 34478 34479 34518 +f 34479 34519 34518 +f 34479 34480 34520 +f 34479 34520 34519 +f 34480 34481 34520 +f 34481 34521 34520 +f 34481 34482 34522 +f 34481 34522 34521 +f 34482 34483 34522 +f 34483 34523 34522 +f 34483 34484 34524 +f 34483 34524 34523 +f 15850 34485 34525 +f 15850 34525 15979 +f 34485 34486 34525 +f 34486 34526 34525 +f 34486 34487 34527 +f 34486 34527 34526 +f 34487 34488 34527 +f 34488 34528 34527 +f 34488 34489 34529 +f 34488 34529 34528 +f 34489 34490 34529 +f 34490 34530 34529 +f 34490 34491 34531 +f 34490 34531 34530 +f 34491 34492 34531 +f 34492 34532 34531 +f 34492 34493 34533 +f 34492 34533 34532 +f 34493 34494 34533 +f 34494 34534 34533 +f 34494 34495 34535 +f 34494 34535 34534 +f 34495 34496 34535 +f 34496 34536 34535 +f 34496 34497 34537 +f 34496 34537 34536 +f 34497 34498 34537 +f 34498 34538 34537 +f 34498 34499 34539 +f 34498 34539 34538 +f 34499 34500 34539 +f 34500 34540 34539 +f 34500 34501 34541 +f 34500 34541 34540 +f 34501 34502 34541 +f 34502 34542 34541 +f 34502 34503 34543 +f 34502 34543 34542 +f 34503 34504 34543 +f 34504 34544 34543 +f 34504 34505 34545 +f 34504 34545 34544 +f 34505 34506 34545 +f 34506 34546 34545 +f 34506 34507 34547 +f 34506 34547 34546 +f 34507 34508 34547 +f 34508 34548 34547 +f 34508 34509 34549 +f 34508 34549 34548 +f 34509 34510 34549 +f 34510 34550 34549 +f 34510 34511 34551 +f 34510 34551 34550 +f 34511 34512 34551 +f 34512 34552 34551 +f 34512 34513 34553 +f 34512 34553 34552 +f 34513 34514 34553 +f 34514 34554 34553 +f 34514 34515 34555 +f 34514 34555 34554 +f 34515 34516 34555 +f 34516 34556 34555 +f 34516 34517 34557 +f 34516 34557 34556 +f 34517 34518 34557 +f 34518 34558 34557 +f 34518 34519 34559 +f 34518 34559 34558 +f 34519 34520 34559 +f 34520 34560 34559 +f 34520 34521 34561 +f 34520 34561 34560 +f 34521 34522 34561 +f 34522 34562 34561 +f 34522 34523 34563 +f 34522 34563 34562 +f 34523 34524 34563 +f 34524 34564 34563 +f 34524 34565 34564 +f 15979 34525 16108 +f 34525 34566 16108 +f 34525 34526 34567 +f 34525 34567 34566 +f 34526 34527 34567 +f 34527 34568 34567 +f 34527 34528 34569 +f 34527 34569 34568 +f 34528 34529 34569 +f 34529 34570 34569 +f 34529 34530 34571 +f 34529 34571 34570 +f 34530 34531 34571 +f 34531 34572 34571 +f 34531 34532 34573 +f 34531 34573 34572 +f 34532 34533 34573 +f 34533 34574 34573 +f 34533 34534 34575 +f 34533 34575 34574 +f 34534 34535 34575 +f 34535 34576 34575 +f 34535 34536 34577 +f 34535 34577 34576 +f 34536 34537 34577 +f 34537 34578 34577 +f 34537 34538 34579 +f 34537 34579 34578 +f 34538 34539 34579 +f 34539 34580 34579 +f 34539 34540 34581 +f 34539 34581 34580 +f 34540 34541 34581 +f 34541 34582 34581 +f 34541 34542 34583 +f 34541 34583 34582 +f 34542 34543 34583 +f 34543 34584 34583 +f 34543 34544 34585 +f 34543 34585 34584 +f 34544 34545 34585 +f 34545 34586 34585 +f 34545 34546 34587 +f 34545 34587 34586 +f 34546 34547 34587 +f 34547 34588 34587 +f 34547 34548 34589 +f 34547 34589 34588 +f 34548 34549 34589 +f 34549 34590 34589 +f 34549 34550 34591 +f 34549 34591 34590 +f 34550 34551 34591 +f 34551 34592 34591 +f 34551 34552 34593 +f 34551 34593 34592 +f 34552 34553 34593 +f 34553 34594 34593 +f 34553 34554 34595 +f 34553 34595 34594 +f 34554 34555 34595 +f 34555 34596 34595 +f 34555 34556 34597 +f 34555 34597 34596 +f 34556 34557 34597 +f 34557 34598 34597 +f 34557 34558 34599 +f 34557 34599 34598 +f 34558 34559 34599 +f 34559 34600 34599 +f 34559 34560 34601 +f 34559 34601 34600 +f 34560 34561 34601 +f 34561 34602 34601 +f 34561 34562 34603 +f 34561 34603 34602 +f 34562 34563 34603 +f 34563 34604 34603 +f 34563 34564 34605 +f 34563 34605 34604 +f 34564 34565 34605 +f 34565 34606 34605 +f 34565 34607 34606 +f 16108 34566 34608 +f 16108 34608 16237 +f 34566 34567 34608 +f 34567 34609 34608 +f 34567 34568 34610 +f 34567 34610 34609 +f 34568 34569 34610 +f 34569 34611 34610 +f 34569 34570 34612 +f 34569 34612 34611 +f 34570 34571 34612 +f 34571 34613 34612 +f 34571 34572 34614 +f 34571 34614 34613 +f 34572 34573 34614 +f 34573 34615 34614 +f 34573 34574 34616 +f 34573 34616 34615 +f 34574 34575 34616 +f 34575 34617 34616 +f 34575 34576 34618 +f 34575 34618 34617 +f 34576 34577 34618 +f 34577 34619 34618 +f 34577 34578 34620 +f 34577 34620 34619 +f 34578 34579 34620 +f 34579 34621 34620 +f 34579 34580 34622 +f 34579 34622 34621 +f 34580 34581 34622 +f 34581 34623 34622 +f 34581 34582 34624 +f 34581 34624 34623 +f 34582 34583 34624 +f 34583 34625 34624 +f 34583 34584 34626 +f 34583 34626 34625 +f 34584 34585 34626 +f 34585 34627 34626 +f 34585 34586 34628 +f 34585 34628 34627 +f 34586 34587 34628 +f 34587 34629 34628 +f 34587 34588 34630 +f 34587 34630 34629 +f 34588 34589 34630 +f 34589 34631 34630 +f 34589 34590 34632 +f 34589 34632 34631 +f 34590 34591 34632 +f 34591 34633 34632 +f 34591 34592 34634 +f 34591 34634 34633 +f 34592 34593 34634 +f 34593 34635 34634 +f 34593 34594 34636 +f 34593 34636 34635 +f 34594 34595 34636 +f 34595 34637 34636 +f 34595 34596 34638 +f 34595 34638 34637 +f 34596 34597 34638 +f 34597 34639 34638 +f 34597 34598 34640 +f 34597 34640 34639 +f 34598 34599 34640 +f 34599 34641 34640 +f 34599 34600 34642 +f 34599 34642 34641 +f 34600 34601 34642 +f 34601 34643 34642 +f 34601 34602 34644 +f 34601 34644 34643 +f 34602 34603 34644 +f 34603 34645 34644 +f 34603 34604 34646 +f 34603 34646 34645 +f 34604 34605 34646 +f 34605 34647 34646 +f 34605 34606 34648 +f 34605 34648 34647 +f 34606 34607 34648 +f 34607 34649 34648 +f 34607 34650 34649 +f 16237 34608 21882 +f 34608 22011 21882 +f 34608 34609 22140 +f 34608 22140 22011 +f 34609 34610 22140 +f 34610 22269 22140 +f 34610 34611 22398 +f 34610 22398 22269 +f 34611 34612 22398 +f 34612 22527 22398 +f 34612 34613 22656 +f 34612 22656 22527 +f 34613 34614 22656 +f 34614 22785 22656 +f 34614 34615 22914 +f 34614 22914 22785 +f 34615 34616 22914 +f 34616 23043 22914 +f 34616 34617 23172 +f 34616 23172 23043 +f 34617 34618 23172 +f 34618 23301 23172 +f 34618 34619 23430 +f 34618 23430 23301 +f 34619 34620 23430 +f 34620 23559 23430 +f 34620 34621 23688 +f 34620 23688 23559 +f 34621 34622 23688 +f 34622 23817 23688 +f 34622 34623 23946 +f 34622 23946 23817 +f 34623 34624 23946 +f 34624 24075 23946 +f 34624 34625 24204 +f 34624 24204 24075 +f 34625 34626 24204 +f 34626 24333 24204 +f 34626 34627 24462 +f 34626 24462 24333 +f 34627 34628 24462 +f 34628 24591 24462 +f 34628 34629 24720 +f 34628 24720 24591 +f 34629 34630 24720 +f 34630 24849 24720 +f 34630 34631 24978 +f 34630 24978 24849 +f 34631 34632 24978 +f 34632 25107 24978 +f 34632 34633 25236 +f 34632 25236 25107 +f 34633 34634 25236 +f 34634 25365 25236 +f 34634 34635 25494 +f 34634 25494 25365 +f 34635 34636 25494 +f 34636 25623 25494 +f 34636 34637 25752 +f 34636 25752 25623 +f 34637 34638 25752 +f 34638 25881 25752 +f 34638 34639 26010 +f 34638 26010 25881 +f 34639 34640 26010 +f 34640 26139 26010 +f 34640 34641 26268 +f 34640 26268 26139 +f 34641 34642 26268 +f 34642 26397 26268 +f 34642 34643 26526 +f 34642 26526 26397 +f 34643 34644 26526 +f 34644 26652 26526 +f 34644 34645 26777 +f 34644 26777 26652 +f 34645 34646 26777 +f 34646 26899 26777 +f 34646 34647 27020 +f 34646 27020 26899 +f 34647 34648 27020 +f 34648 27109 27020 +f 34648 34649 27191 +f 34648 27191 27109 +f 34649 34650 27191 +f 34650 27264 27191 +f 34650 27312 27264 +f 16394 16472 16471 +f 27137 27214 27136 +f 16394 16473 16472 +f 27138 27214 27137 +f 16473 16558 16557 +f 27052 27138 27051 +f 16473 16559 16558 +f 27053 27138 27052 +f 16559 16653 16652 +f 26937 27053 26936 +f 16559 16654 16653 +f 26938 27053 26937 +f 16678 16560 16679 +f 27054 26962 26963 +f 16679 16560 16680 +f 27054 26963 26964 +f 16560 16474 16561 +f 27139 27054 27055 +f 16561 16474 16562 +f 27139 27055 27056 +f 16474 16395 16475 +f 27215 27139 27140 +f 16475 16395 16476 +f 27215 27140 27141 +f 16395 16324 16396 +f 27265 27215 27216 +f 16396 16324 16397 +f 27265 27216 27217 +f 16324 16279 16325 +f 27313 27265 27266 +f 16279 16238 16280 +f 27358 27313 27314 +f 16280 16238 16281 +f 27358 27314 27315 +f 16445 16372 16446 +f 27192 27110 27111 +f 16446 16372 16447 +f 27192 27111 27112 diff --git a/motion-gan-pipeline/preprocessing/face_tracking/3DMM/topology_info.npy b/motion-gan-pipeline/preprocessing/face_tracking/3DMM/topology_info.npy new file mode 100644 index 0000000..854f184 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/3DMM/topology_info.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2edff6a6ad574d2dddf0d0815e0beabfea9369d7c2d6e53e0ba81f809b81e963 +size 4145201 diff --git a/motion-gan-pipeline/preprocessing/face_tracking/FLAME/FLAME.py b/motion-gan-pipeline/preprocessing/face_tracking/FLAME/FLAME.py new file mode 100755 index 0000000..4aaeda8 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/FLAME/FLAME.py @@ -0,0 +1,332 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import torch +import torch.nn as nn +import numpy as np +import pickle +import torch.nn.functional as F + +from .lbs import lbs, batch_rodrigues, vertices2landmarks, rot_mat_to_euler +from ..util import * + + +def to_tensor(array, dtype=torch.float32): + if 'torch.tensor' not in str(type(array)): + return torch.tensor(array, dtype=dtype) + + +def to_np(array, dtype=np.float32): + if 'scipy.sparse' in str(type(array)): + array = array.todense() + return np.array(array, dtype=dtype) + + +class Struct(object): + def __init__(self, **kwargs): + for key, val in kwargs.items(): + setattr(self, key, val) + + +class FLAME(nn.Module): + """ + borrowed from https://github.com/soubhiksanyal/FLAME_PyTorch/blob/master/FLAME.py + Given flame parameters this class generates a differentiable FLAME function + which outputs the a mesh and 2D/3D facial landmarks + """ + def __init__(self, config): + super(FLAME, self).__init__() + print("creating the FLAME Decoder") + with open(config.flame_model_path, 'rb') as f: + ss = pickle.load(f, encoding='latin1') + flame_model = Struct(**ss) + + self.scale_factor = torch.tensor(7.4254, dtype=torch.float).cuda() + # self.scale_factor = torch.tensor(1.0, dtype=torch.float).cuda() + + # self.dtype = torch.float32 + self.dtype = torch.double + + self.register_buffer('faces_tensor', to_tensor(to_np(flame_model.f, dtype=np.int64), dtype=torch.long).cuda()) + # The vertices of the template model + self.register_buffer('v_template', to_tensor(to_np(flame_model.v_template), dtype=self.dtype).cuda()) + # The shape components and expression + shapedirs = to_tensor(to_np(flame_model.shapedirs), dtype=self.dtype) + shapedirs = torch.cat([shapedirs[:,:,:config.n_shape], shapedirs[:,:,300:300+config.n_exp]], 2).cuda() + self.register_buffer('shapedirs', shapedirs) + # The pose components + num_pose_basis = flame_model.posedirs.shape[-1] + posedirs = np.reshape(flame_model.posedirs, [-1, num_pose_basis]).T + self.register_buffer('posedirs', to_tensor(to_np(posedirs), dtype=self.dtype).cuda()) + # + self.register_buffer('J_regressor', to_tensor(to_np(flame_model.J_regressor), dtype=self.dtype).cuda()) + parents = to_tensor(to_np(flame_model.kintree_table[0])).long(); parents[0] = -1 + self.register_buffer('parents', parents) + self.register_buffer('lbs_weights', to_tensor(to_np(flame_model.weights), dtype=self.dtype).cuda()) + + # Fixing Eyeball and neck rotation + default_eyball_pose = torch.zeros([1, 6], dtype=self.dtype, requires_grad=False).cuda() + self.register_parameter('eye_pose', nn.Parameter(default_eyball_pose, + requires_grad=False)) + default_neck_pose = torch.zeros([1, 3], dtype=self.dtype, requires_grad=False).cuda() + self.register_parameter('neck_pose', nn.Parameter(default_neck_pose, + requires_grad=False)) + + default_pose = torch.zeros([1, config.n_pose], dtype=self.dtype, requires_grad=False).cuda() + self.register_parameter('base_pose', nn.Parameter(default_pose, + requires_grad=False)) + + # Static and Dynamic Landmark embeddings for FLAME + lmk_embeddings = np.load(config.flame_lmk_embedding_path, allow_pickle=True, encoding='latin1') + lmk_embeddings = lmk_embeddings[()] + self.register_buffer('lmk_faces_idx', torch.from_numpy(lmk_embeddings['static_lmk_faces_idx']).long().cuda()) + self.register_buffer('lmk_bary_coords', torch.from_numpy(lmk_embeddings['static_lmk_bary_coords']).to(self.dtype).cuda()) + self.register_buffer('dynamic_lmk_faces_idx', lmk_embeddings['dynamic_lmk_faces_idx'].long()) + self.register_buffer('dynamic_lmk_bary_coords', lmk_embeddings['dynamic_lmk_bary_coords'].to(self.dtype)) + self.register_buffer('full_lmk_faces_idx', torch.from_numpy(lmk_embeddings['full_lmk_faces_idx']).long().cuda()) + self.register_buffer('full_lmk_bary_coords', torch.from_numpy(lmk_embeddings['full_lmk_bary_coords']).to(self.dtype).cuda()) + + neck_kin_chain = []; NECK_IDX=1 + curr_idx = torch.tensor(NECK_IDX, dtype=torch.long) + while curr_idx != -1: + neck_kin_chain.append(curr_idx) + curr_idx = self.parents[curr_idx] + self.register_buffer('neck_kin_chain', torch.stack(neck_kin_chain)) + + def _find_dynamic_lmk_idx_and_bcoords(self, pose, dynamic_lmk_faces_idx, + dynamic_lmk_b_coords, + neck_kin_chain, dtype=torch.float32): + """ + Selects the face contour depending on the reletive position of the head + Input: + vertices: N X num_of_vertices X 3 + pose: N X full pose + dynamic_lmk_faces_idx: The list of contour face indexes + dynamic_lmk_b_coords: The list of contour barycentric weights + neck_kin_chain: The tree to consider for the relative rotation + dtype: Data type + return: + The contour face indexes and the corresponding barycentric weights + """ + + batch_size = pose.shape[0] + + aa_pose = torch.index_select(pose.view(batch_size, -1, 3), 1, + neck_kin_chain) + rot_mats = batch_rodrigues( + aa_pose.view(-1, 3), dtype=dtype).view(batch_size, -1, 3, 3) + + rel_rot_mat = torch.eye(3, device=pose.device, + dtype=dtype).unsqueeze_(dim=0).expand(batch_size, -1, -1) + for idx in range(len(neck_kin_chain)): + rel_rot_mat = torch.bmm(rot_mats[:, idx], rel_rot_mat) + + y_rot_angle = torch.round( + torch.clamp(rot_mat_to_euler(rel_rot_mat) * 180.0 / np.pi, + max=39)).to(dtype=torch.long) + + neg_mask = y_rot_angle.lt(0).to(dtype=torch.long) + mask = y_rot_angle.lt(-39).to(dtype=torch.long) + neg_vals = mask * 78 + (1 - mask) * (39 - y_rot_angle) + y_rot_angle = (neg_mask * neg_vals + + (1 - neg_mask) * y_rot_angle) + + dyn_lmk_faces_idx = torch.index_select(dynamic_lmk_faces_idx, + 0, y_rot_angle) + dyn_lmk_b_coords = torch.index_select(dynamic_lmk_b_coords, + 0, y_rot_angle) + return dyn_lmk_faces_idx, dyn_lmk_b_coords + + def _vertices2landmarks(self, vertices, faces, lmk_faces_idx, lmk_bary_coords): + """ + Calculates landmarks by barycentric interpolation + Input: + vertices: torch.tensor NxVx3, dtype = torch.float32 + The tensor of input vertices + faces: torch.tensor (N*F)x3, dtype = torch.long + The faces of the mesh + lmk_faces_idx: torch.tensor N X L, dtype = torch.long + The tensor with the indices of the faces used to calculate the + landmarks. + lmk_bary_coords: torch.tensor N X L X 3, dtype = torch.float32 + The tensor of barycentric coordinates that are used to interpolate + the landmarks + + Returns: + landmarks: torch.tensor NxLx3, dtype = torch.float32 + The coordinates of the landmarks for each mesh in the batch + """ + # Extract the indices of the vertices for each face + # NxLx3 + batch_size, num_verts = vertices.shape[:dd2] + lmk_faces = torch.index_select(faces, 0, lmk_faces_idx.view(-1)).view( + 1, -1, 3).view(batch_size, lmk_faces_idx.shape[1], -1) + + lmk_faces += torch.arange(batch_size, dtype=torch.long).view(-1, 1, 1).to( + device=vertices.device) * num_verts + + lmk_vertices = vertices.view(-1, 3)[lmk_faces] + landmarks = torch.einsum('blfi,blf->bli', [lmk_vertices, lmk_bary_coords]) + return landmarks + + def seletec_3d68(self, vertices): + landmarks3d = vertices2landmarks(vertices, self.faces_tensor, + self.full_lmk_faces_idx.repeat(vertices.shape[0], 1), + self.full_lmk_bary_coords.repeat(vertices.shape[0], 1, 1)) + return landmarks3d + + def get_3dlandmarks(self, shape_params, expression_params, euler_angle, trans, focal_length, cxy): + + vertices = self.forward_geo(shape_params, expression_params, euler_angle, trans, rot=True) + + bz = vertices.shape[0] + landmarks3d = vertices2landmarks(vertices, self.faces_tensor, + self.full_lmk_faces_idx.repeat(bz, 1), + self.full_lmk_bary_coords.repeat(bz, 1, 1)) + + return landmarks3d.cuda() + + def get_3dlandmarks_special(self, shape_params, expression_params, euler_angle, trans, idx): + + vertices = self.forward_geo(shape_params, expression_params, euler_angle, trans, rot=True) + + # New vertices + new_faces = np.array([idx]) + device = shape_params.get_device() + new_faces_idx = torch.from_numpy(new_faces).unsqueeze(0).to(device) + new_bary_coord = torch.from_numpy(np.repeat([[0., 0., 1.]], new_faces.shape[0], axis=0)).unsqueeze(0).to(device) + + ldk_faces_idx = torch.cat((self.full_lmk_faces_idx, new_faces_idx), dim=1) + ldk_bary_coord = torch.cat((self.full_lmk_bary_coords, new_bary_coord), dim=1) + + bz = vertices.shape[0] + landmarks3d = vertices2landmarks(vertices, self.faces_tensor, + ldk_faces_idx.repeat(bz, 1), + ldk_bary_coord.repeat(bz, 1, 1)) + + return landmarks3d.cuda() + + def get_3dlandmarks_forehead(self, shape_params, expression_params, euler_angle, trans, arg_focal, cxy): + + vertices = self.forward_geo(shape_params, expression_params, euler_angle, trans, rot=True) + + # New vertices + # new_faces = np.array([2883, 2875, 2878, 7926, 2865, 7970, 2909, 2912, 2933, 7473, 2411, 161, 3788, 1407, 1459, 2540, 1915, 1328, 1355, 1358, 1361]) + # new_faces = np.array([2875, 7926, 7748, 1293, 1344, 3836, 1431, 1328, 1358]) # manually found in mesh + new_faces = np.array([1358, 1328, 1431, 3836, 1344, 1293, 7748, 7926, 2875]) # manually found in mesh + + device = shape_params.get_device() + new_faces_idx = torch.from_numpy(new_faces).unsqueeze(0).to(device) + new_bary_coord = torch.from_numpy(np.repeat([[0., 0., 1.]], new_faces.shape[0], axis=0)).unsqueeze(0).to(device) + + ldk_faces_idx = torch.cat((self.full_lmk_faces_idx, new_faces_idx), dim=1) + ldk_bary_coord = torch.cat((self.full_lmk_bary_coords, new_bary_coord), dim=1) + + bz = vertices.shape[0] + landmarks3d = vertices2landmarks(vertices, self.faces_tensor, + ldk_faces_idx.repeat(bz, 1), + ldk_bary_coord.repeat(bz, 1, 1)) + + return landmarks3d.cuda() + + def get_3dlandmarks_all(self, shape_params, expression_params, euler_angle, trans): + + vertices = self.forward_geo(shape_params, expression_params, euler_angle, trans, rot=True) + + # New vertices + device = shape_params.get_device() + new_faces_idx = torch.from_numpy(np.arange(0, self.faces_tensor.shape[0], step=1)).unsqueeze(0).to(device) + new_bary_coord = torch.from_numpy(np.repeat([[0., 0., 1.]], self.faces_tensor.shape[0], axis=0)).unsqueeze(0).to(device) + + bz = vertices.shape[0] + landmarks3d = vertices2landmarks(vertices, self.faces_tensor, + new_faces_idx.repeat(bz, 1), + new_bary_coord.repeat(bz, 1, 1)) + + return landmarks3d.cuda() + + def forward_geo(self, shape_params, expression_params, euler_angle, trans, rot=True): + batch_size = shape_params.shape[0] + + eye_pose_params = self.eye_pose.expand(batch_size, -1).cuda() + betas = torch.cat([shape_params, expression_params[:, :50]], dim=1) + + # TODO: fix FLAME jaw pose + full_pose = torch.cat( + [torch.zeros_like(euler_angle), + self.neck_pose.expand(batch_size, -1), + expression_params[:, 50:].expand(batch_size, -1), + eye_pose_params], dim=1) + + template_vertices = self.v_template.unsqueeze(0).expand(batch_size, -1, -1).cuda() + + vertices, _ = lbs(betas, full_pose, template_vertices, + self.shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, dtype=self.dtype) + + # Scale to Basel scale + scale = self.scale_factor.expand(1, 3) + vertices = torch.mul(vertices, scale).double() + + # Rotate and Translate mesh + if rot: + vertices = forward_rott(vertices, euler_angle, trans) + + return vertices.cuda() + + def forward(self, shape_params=None, expression_params=None, pose_params=None, eye_pose_params=None): + """ + Input: + shape_params: N X number of shape parameters + expression_params: N X number of expression parameters + pose_params: N X number of pose parameters (6) + return:d + vertices: N X V X 3 + landmarks: N X number of landmarks X 3 + """ + batch_size = shape_params.shape[0] + if eye_pose_params is None: + eye_pose_params = self.eye_pose.expand(batch_size, -1).cuda() + betas = torch.cat([shape_params, expression_params[:, :50]], dim=1) + full_pose = torch.cat([pose_params[:, :3], self.neck_pose.expand(batch_size, -1), pose_params[:, 3:], eye_pose_params], dim=1) + template_vertices = self.v_template.unsqueeze(0).expand(batch_size, -1, -1).cuda() + + vertices, _ = lbs(betas, full_pose, template_vertices, + self.shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, dtype=self.dtype) + + scale = self.scale_factor.expand(1, 3) + vertices = torch.mul(vertices, scale).double() + + lmk_faces_idx = self.lmk_faces_idx.unsqueeze(dim=0).expand(batch_size, -1) + lmk_bary_coords = self.lmk_bary_coords.unsqueeze(dim=0).expand(batch_size, -1, -1) + + dyn_lmk_faces_idx, dyn_lmk_bary_coords = self._find_dynamic_lmk_idx_and_bcoords( + full_pose, self.dynamic_lmk_faces_idx.cuda(), + self.dynamic_lmk_bary_coords.cuda(), + self.neck_kin_chain.cuda(), dtype=self.dtype) + lmk_faces_idx = torch.cat([dyn_lmk_faces_idx, lmk_faces_idx], 1) + lmk_bary_coords = torch.cat([dyn_lmk_bary_coords, lmk_bary_coords], 1) + + landmarks2d = vertices2landmarks(vertices, self.faces_tensor, + lmk_faces_idx, + lmk_bary_coords) + bz = vertices.shape[0] + landmarks3d = vertices2landmarks(vertices, self.faces_tensor, + self.full_lmk_faces_idx.repeat(bz, 1), + self.full_lmk_bary_coords.repeat(bz, 1, 1)) + return vertices, landmarks2d, landmarks3d diff --git a/motion-gan-pipeline/preprocessing/face_tracking/FLAME/__init__.py b/motion-gan-pipeline/preprocessing/face_tracking/FLAME/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/preprocessing/face_tracking/FLAME/config.py b/motion-gan-pipeline/preprocessing/face_tracking/FLAME/config.py new file mode 100644 index 0000000..7bd0827 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/FLAME/config.py @@ -0,0 +1,80 @@ +''' +Default config for DECA +''' +from yacs.config import CfgNode as CN +import argparse +import yaml +import os + +cfg = CN() + +abs_deca_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) +cfg.deca_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '../../third/DECA/')) + +cfg.device = 'cuda' +cfg.device_id = '0' + +cfg.pretrained_modelpath = os.path.join(cfg.deca_dir, 'data', 'deca_model.tar') + +# ---------------------------------------------------------------------------- # +# Options for Face model +# ---------------------------------------------------------------------------- # +cfg.model = CN() +cfg.model.topology_path = os.path.join(cfg.deca_dir, 'data', 'head_template.obj') +# texture data original from http://files.is.tue.mpg.de/tbolkart/FLAME/FLAME_texture_data.zip +cfg.model.dense_template_path = os.path.join(cfg.deca_dir, 'data', 'texture_data_256.npy') +cfg.model.fixed_displacement_path = os.path.join(cfg.deca_dir, 'data', 'fixed_displacement_256.npy') +cfg.model.flame_model_path = os.path.join(cfg.deca_dir, 'data', 'generic_model.pkl') +cfg.model.flame_lmk_embedding_path = os.path.join(cfg.deca_dir, 'data', 'landmark_embedding.npy') +cfg.model.face_mask_path = os.path.join(cfg.deca_dir, 'data', 'uv_face_mask.png') +cfg.model.face_eye_mask_path = os.path.join(cfg.deca_dir, 'data', 'uv_face_eye_mask.png') +cfg.model.mean_tex_path = os.path.join(cfg.deca_dir, 'data', 'mean_texture.jpg') +cfg.model.tex_path = os.path.join(cfg.deca_dir, 'data', 'FLAME_albedo_from_BFM.npz') +cfg.model.tex_type = 'BFM' # BFM, FLAME, albedoMM +cfg.model.uv_size = 256 +cfg.model.param_list = ['shape', 'tex', 'exp', 'pose', 'cam', 'light'] +cfg.model.n_shape = 100 +cfg.model.n_tex = 50 +cfg.model.n_exp = 50 +cfg.model.n_cam = 3 +cfg.model.n_pose = 6 +cfg.model.n_light = 27 +cfg.model.use_tex = False +cfg.model.jaw_type = 'aa' # default use axis angle, another option: euler + +## details +cfg.model.n_detail = 128 +cfg.model.max_z = 0.01 + +# ---------------------------------------------------------------------------- # +# Options for Dataset +# ---------------------------------------------------------------------------- # +cfg.dataset = CN() +cfg.dataset.batch_size = 24 +cfg.dataset.num_workers = 2 +cfg.dataset.image_size = 224 + +def get_cfg_defaults(): + """Get a yacs CfgNode object with default values for my_project.""" + # Return a clone so that the defaults will not be altered + # This is for the "local variable" use pattern + return cfg.clone() + +def update_cfg(cfg, cfg_file): + cfg.merge_from_file(cfg_file) + return cfg.clone() + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--cfg', type=str, help='cfg file path') + + args = parser.parse_args() + print(args, end='\n\n') + + cfg = get_cfg_defaults() + if args.cfg is not None: + cfg_file = args.cfg + cfg = update_cfg(cfg, args.cfg) + cfg.cfg_file = cfg_file + + return cfg diff --git a/motion-gan-pipeline/preprocessing/face_tracking/FLAME/lbs.py b/motion-gan-pipeline/preprocessing/face_tracking/FLAME/lbs.py new file mode 100755 index 0000000..9c0fd0a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/FLAME/lbs.py @@ -0,0 +1,378 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from __future__ import absolute_import +from __future__ import print_function +from __future__ import division + +import numpy as np + +import torch +import torch.nn.functional as F + +def rot_mat_to_euler(rot_mats): + # Calculates rotation matrix to euler angles + # Careful for extreme cases of eular angles like [0.0, pi, 0.0] + + sy = torch.sqrt(rot_mats[:, 0, 0] * rot_mats[:, 0, 0] + + rot_mats[:, 1, 0] * rot_mats[:, 1, 0]) + return torch.atan2(-rot_mats[:, 2, 0], sy) + +def find_dynamic_lmk_idx_and_bcoords(vertices, pose, dynamic_lmk_faces_idx, + dynamic_lmk_b_coords, + neck_kin_chain, dtype=torch.float32): + ''' Compute the faces, barycentric coordinates for the dynamic landmarks + + + To do so, we first compute the rotation of the neck around the y-axis + and then use a pre-computed look-up table to find the faces and the + barycentric coordinates that will be used. + + Special thanks to Soubhik Sanyal (soubhik.sanyal@tuebingen.mpg.de) + for providing the original TensorFlow implementation and for the LUT. + + Parameters + ---------- + vertices: torch.tensor BxVx3, dtype = torch.float32 + The tensor of input vertices + pose: torch.tensor Bx(Jx3), dtype = torch.float32 + The current pose of the body model + dynamic_lmk_faces_idx: torch.tensor L, dtype = torch.long + The look-up table from neck rotation to faces + dynamic_lmk_b_coords: torch.tensor Lx3, dtype = torch.float32 + The look-up table from neck rotation to barycentric coordinates + neck_kin_chain: list + A python list that contains the indices of the joints that form the + kinematic chain of the neck. + dtype: torch.dtype, optional + + Returns + ------- + dyn_lmk_faces_idx: torch.tensor, dtype = torch.long + A tensor of size BxL that contains the indices of the faces that + will be used to compute the current dynamic landmarks. + dyn_lmk_b_coords: torch.tensor, dtype = torch.float32 + A tensor of size BxL that contains the indices of the faces that + will be used to compute the current dynamic landmarks. + ''' + + batch_size = vertices.shape[0] + + aa_pose = torch.index_select(pose.view(batch_size, -1, 3), 1, + neck_kin_chain) + rot_mats = batch_rodrigues( + aa_pose.view(-1, 3), dtype=dtype).view(batch_size, -1, 3, 3) + + rel_rot_mat = torch.eye(3, device=vertices.device, + dtype=dtype).unsqueeze_(dim=0) + for idx in range(len(neck_kin_chain)): + rel_rot_mat = torch.bmm(rot_mats[:, idx], rel_rot_mat) + + y_rot_angle = torch.round( + torch.clamp(-rot_mat_to_euler(rel_rot_mat) * 180.0 / np.pi, + max=39)).to(dtype=torch.long) + neg_mask = y_rot_angle.lt(0).to(dtype=torch.long) + mask = y_rot_angle.lt(-39).to(dtype=torch.long) + neg_vals = mask * 78 + (1 - mask) * (39 - y_rot_angle) + y_rot_angle = (neg_mask * neg_vals + + (1 - neg_mask) * y_rot_angle) + + dyn_lmk_faces_idx = torch.index_select(dynamic_lmk_faces_idx, + 0, y_rot_angle) + dyn_lmk_b_coords = torch.index_select(dynamic_lmk_b_coords, + 0, y_rot_angle) + + return dyn_lmk_faces_idx, dyn_lmk_b_coords + + +def vertices2landmarks(vertices, faces, lmk_faces_idx, lmk_bary_coords): + ''' Calculates landmarks by barycentric interpolation + + Parameters + ---------- + vertices: torch.tensor BxVx3, dtype = torch.float32 + The tensor of input vertices + faces: torch.tensor Fx3, dtype = torch.long + The faces of the mesh + lmk_faces_idx: torch.tensor L, dtype = torch.long + The tensor with the indices of the faces used to calculate the + landmarks. + lmk_bary_coords: torch.tensor Lx3, dtype = torch.float32 + The tensor of barycentric coordinates that are used to interpolate + the landmarks + + Returns + ------- + landmarks: torch.tensor BxLx3, dtype = torch.float32 + The coordinates of the landmarks for each mesh in the batch + ''' + # Extract the indices of the vertices for each face + # BxLx3 + batch_size, num_verts = vertices.shape[:2] + device = vertices.device + + lmk_faces = torch.index_select(faces, 0, lmk_faces_idx.view(-1)).view( + batch_size, -1, 3) + + lmk_faces += torch.arange( + batch_size, dtype=torch.long, device=device).view(-1, 1, 1) * num_verts + + lmk_vertices = vertices.contiguous().view(-1, 3)[lmk_faces].view( + batch_size, -1, 3, 3) + + landmarks = torch.einsum('blfi,blf->bli', [lmk_vertices, lmk_bary_coords]) + return landmarks + + +def lbs(betas, pose, v_template, shapedirs, posedirs, J_regressor, parents, + lbs_weights, pose2rot=True, dtype=torch.double): + ''' Performs Linear Blend Skinning with the given shape and pose parameters + + Parameters + ---------- + betas : torch.tensor BxNB + The tensor of shape parameters + pose : torch.tensor Bx(J + 1) * 3 + The pose parameters in axis-angle format + v_template torch.tensor BxVx3 + The template mesh that will be deformed + shapedirs : torch.tensor 1xNB + The tensor of PCA shape displacements + posedirs : torch.tensor Px(V * 3) + The pose PCA coefficients + J_regressor : torch.tensor JxV + The regressor array that is used to calculate the joints from + the position of the vertices + parents: torch.tensor J + The array that describes the kinematic tree for the model + lbs_weights: torch.tensor N x V x (J + 1) + The linear blend skinning weights that represent how much the + rotation matrix of each part affects each vertex + pose2rot: bool, optional + Flag on whether to convert the input pose tensor to rotation + matrices. The default value is True. If False, then the pose tensor + should already contain rotation matrices and have a size of + Bx(J + 1)x9 + dtype: torch.dtype, optional + + Returns + ------- + verts: torch.tensor BxVx3 + The vertices of the mesh after applying the shape and pose + displacements. + joints: torch.tensor BxJx3 + The joints of the model + ''' + + batch_size = max(betas.shape[0], pose.shape[0]) + device = betas.device + + # Add shape contribution + v_shaped = v_template + blend_shapes(betas, shapedirs) + + # Get the joints + # NxJx3 array + J = vertices2joints(J_regressor.type(dtype), v_shaped.type(dtype)) + + # 3. Add pose blend shapes + # N x J x 3 x 3 + ident = torch.eye(3, dtype=dtype, device=device) + if pose2rot: + rot_mats = batch_rodrigues( + pose.view(-1, 3), dtype=dtype).view([batch_size, -1, 3, 3]) + + pose_feature = (rot_mats[:, 1:, :, :] - ident).view([batch_size, -1]) + # (N x P) x (P, V * 3) -> N x V x 3 + pose_offsets = torch.matmul(pose_feature, posedirs) \ + .view(batch_size, -1, 3) + else: + pose_feature = pose[:, 1:].view(batch_size, -1, 3, 3) - ident + rot_mats = pose.view(batch_size, -1, 3, 3) + + pose_offsets = torch.matmul(pose_feature.view(batch_size, -1), + posedirs).view(batch_size, -1, 3) + + v_posed = pose_offsets + v_shaped + # 4. Get the global joint location + J_transformed, A = batch_rigid_transform(rot_mats, J, parents, dtype=dtype) + + # 5. Do skinning: + # W is N x V x (J + 1) + W = lbs_weights.unsqueeze(dim=0).expand([batch_size, -1, -1]) + # (N x V x (J + 1)) x (N x (J + 1) x 16) + num_joints = J_regressor.shape[0] + T = torch.matmul(W, A.view(batch_size, num_joints, 16)) \ + .view(batch_size, -1, 4, 4) + + homogen_coord = torch.ones([batch_size, v_posed.shape[1], 1], + dtype=dtype, device=device) + v_posed_homo = torch.cat([v_posed, homogen_coord], dim=2) + v_homo = torch.matmul(T, torch.unsqueeze(v_posed_homo, dim=-1)) + + verts = v_homo[:, :, :3, 0] + + return verts, J_transformed + + +def vertices2joints(J_regressor, vertices): + ''' Calculates the 3D joint locations from the vertices + + Parameters + ---------- + J_regressor : torch.tensor JxV + The regressor array that is used to calculate the joints from the + position of the vertices + vertices : torch.tensor BxVx3 + The tensor of mesh vertices + + Returns + ------- + torch.tensor BxJx3 + The location of the joints + ''' + + return torch.einsum('bik,ji->bjk', [vertices, J_regressor]) + + +def blend_shapes(betas, shape_disps): + ''' Calculates the per vertex displacement due to the blend shapes + + + Parameters + ---------- + betas : torch.tensor Bx(num_betas) + Blend shape coefficients + shape_disps: torch.tensor Vx3x(num_betas) + Blend shapes + + Returns + ------- + torch.tensor BxVx3 + The per-vertex displacement due to shape deformation + ''' + + # Displacement[b, m, k] = sum_{l} betas[b, l] * shape_disps[m, k, l] + # i.e. Multiply each shape displacement by its corresponding beta and + # then sum them. + blend_shape = torch.einsum('bl,mkl->bmk', [betas.double(), shape_disps.double()]) + return blend_shape + + +def batch_rodrigues(rot_vecs, epsilon=1e-8, dtype=torch.float32): + ''' Calculates the rotation matrices for a batch of rotation vectors + Parameters + ---------- + rot_vecs: torch.tensor Nx3 + array of N axis-angle vectors + Returns + ------- + R: torch.tensor Nx3x3 + The rotation matrices for the given axis-angle parameters + ''' + + batch_size = rot_vecs.shape[0] + device = rot_vecs.device + + angle = torch.norm(rot_vecs + 1e-8, dim=1, keepdim=True) + rot_dir = rot_vecs / angle + + cos = torch.unsqueeze(torch.cos(angle), dim=1) + sin = torch.unsqueeze(torch.sin(angle), dim=1) + + # Bx1 arrays + rx, ry, rz = torch.split(rot_dir, 1, dim=1) + K = torch.zeros((batch_size, 3, 3), dtype=dtype, device=device) + + zeros = torch.zeros((batch_size, 1), dtype=dtype, device=device) + K = torch.cat([zeros, -rz, ry, rz, zeros, -rx, -ry, rx, zeros], dim=1) \ + .view((batch_size, 3, 3)) + + ident = torch.eye(3, dtype=dtype, device=device).unsqueeze(dim=0) + rot_mat = ident + sin * K + (1 - cos) * torch.bmm(K, K) + return rot_mat + + +def transform_mat(R, t): + ''' Creates a batch of transformation matrices + Args: + - R: Bx3x3 array of a batch of rotation matrices + - t: Bx3x1 array of a batch of translation vectors + Returns: + - T: Bx4x4 Transformation matrix + ''' + # No padding left or right, only add an extra row + return torch.cat([F.pad(R, [0, 0, 0, 1]), + F.pad(t, [0, 0, 0, 1], value=1)], dim=2) + + +def batch_rigid_transform(rot_mats, joints, parents, dtype=torch.float32): + """ + Applies a batch of rigid transformations to the joints + + Parameters + ---------- + rot_mats : torch.tensor BxNx3x3 + Tensor of rotation matrices + joints : torch.tensor BxNx3 + Locations of joints + parents : torch.tensor BxN + The kinematic tree of each object + dtype : torch.dtype, optional: + The data type of the created tensors, the default is torch.float32 + + Returns + ------- + posed_joints : torch.tensor BxNx3 + The locations of the joints after applying the pose rotations + rel_transforms : torch.tensor BxNx4x4 + The relative (with respect to the root joint) rigid transformations + for all the joints + """ + + joints = torch.unsqueeze(joints, dim=-1) + + rel_joints = joints.clone() + rel_joints[:, 1:] -= joints[:, parents[1:]] + + # transforms_mat = transform_mat( + # rot_mats.view(-1, 3, 3), + # rel_joints.view(-1, 3, 1)).view(-1, joints.shape[1], 4, 4) + transforms_mat = transform_mat( + rot_mats.view(-1, 3, 3), + rel_joints.reshape(-1, 3, 1)).reshape(-1, joints.shape[1], 4, 4) + + transform_chain = [transforms_mat[:, 0]] + for i in range(1, parents.shape[0]): + # Subtract the joint location at the rest pose + # No need for rotation, since it's identity when at rest + curr_res = torch.matmul(transform_chain[parents[i]], + transforms_mat[:, i]) + transform_chain.append(curr_res) + + transforms = torch.stack(transform_chain, dim=1) + + # The last column of the transformations contains the posed joints + posed_joints = transforms[:, :, :3, 3] + + # The last column of the transformations contains the posed joints + posed_joints = transforms[:, :, :3, 3] + + joints_homogen = F.pad(joints, [0, 0, 0, 1]) + + rel_transforms = transforms - F.pad( + torch.matmul(transforms, joints_homogen), [3, 0, 0, 0, 0, 0, 0, 0]) + + return posed_joints, rel_transforms \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/face_tracking/__init__.py b/motion-gan-pipeline/preprocessing/face_tracking/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/preprocessing/face_tracking/convert_BFM.py b/motion-gan-pipeline/preprocessing/face_tracking/convert_BFM.py new file mode 100644 index 0000000..88bc010 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/convert_BFM.py @@ -0,0 +1,61 @@ +import numpy as np +from scipy.io import loadmat + +original_BFM = loadmat('3DMM/01_MorphableModel.mat') +sub_inds = np.load('3DMM/topology_info.npy', + allow_pickle=True).item()['sub_inds'] + +print(len(sub_inds)) + +shapePC = original_BFM['shapePC'] +shapeEV = original_BFM['shapeEV'] +shapeMU = original_BFM['shapeMU'] +texPC = original_BFM['texPC'] +texEV = original_BFM['texEV'] +texMU = original_BFM['texMU'] + +print('shapePC: ', shapePC.shape) +print('shapeMU: ', shapeMU.shape) +print('texPC: ', texPC.shape) +print('texMU: ', texMU.shape) + + +b_shape = shapePC.reshape(-1, 199).transpose(1, 0).reshape(199, -1, 3) +mu_shape = shapeMU.reshape(-1, 3) +print('\nshapePC -> b_shape : ', b_shape.shape) +print('shapeMU -> mu_shape : ', mu_shape.shape) + +b_shape = b_shape[:, sub_inds, :].reshape(199, -1) +mu_shape = mu_shape[sub_inds, :].reshape(-1) +print('b_shape -> b_shape sampled : ', b_shape.shape) +print('mu_shape -> mu_shape sampled: ', mu_shape.shape) + +b_tex = texPC.reshape(-1, 199).transpose(1, 0).reshape(199, -1, 3) +mu_tex = texMU.reshape(-1, 3) +print('\ntexPC -> b_tex : ', b_tex.shape) +print('texMU -> mu_tex : ', mu_tex.shape) +b_tex = b_tex[:, sub_inds, :].reshape(199, -1) +mu_tex = mu_tex[sub_inds, :].reshape(-1) +print('b_tex -> b_tex sampled : ', b_tex.shape) +print('mu_tex -> mu_tex sampled: ', mu_tex.shape) + +exp_info = np.load('3DMM/exp_info.npy', allow_pickle=True).item() +print('\nexp_info: ') +for key in exp_info.keys(): + print(f'{key} shape: {exp_info[key].shape}') + +info = {'mu_shape': mu_shape, + 'b_shape': b_shape, + 'sig_shape': shapeEV.reshape(-1), + 'mu_exp': exp_info['mu_exp'], + 'b_exp': exp_info['base_exp'], + 'sig_exp': exp_info['sig_exp'], + 'mu_tex': mu_tex, + 'b_tex': b_tex, + 'sig_tex': texEV.reshape(-1)} + +print('\nFinal: ') +for key in info.keys(): + print(f'{key} shape: {info[key].shape}') + +np.save('3DMM/3DMM_info.npy', info) diff --git a/motion-gan-pipeline/preprocessing/face_tracking/data_loader.py b/motion-gan-pipeline/preprocessing/face_tracking/data_loader.py new file mode 100644 index 0000000..64a292e --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/data_loader.py @@ -0,0 +1,18 @@ +import torch +import cv2 +import numpy as np +import os + + +def load_dir(lmspath, framepath, start, end): + lmss = [] + imgs_paths = [] + for i in range(start, end): + if os.path.isfile(os.path.join(lmspath, '%05d.lms' % i)): + lms = np.loadtxt(os.path.join( + lmspath, '%05d.lms' % i), dtype=np.float32) + lmss.append(lms) + imgs_paths.append(os.path.join(framepath, '%05d.jpg' % i)) + lmss = np.stack(lmss) + lmss = torch.as_tensor(lmss).cuda() + return lmss, imgs_paths diff --git a/motion-gan-pipeline/preprocessing/face_tracking/face_tracker.py b/motion-gan-pipeline/preprocessing/face_tracking/face_tracker.py new file mode 100644 index 0000000..4c2dd37 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/face_tracker.py @@ -0,0 +1,663 @@ +from numpy.core.numeric import require +from numpy.lib.function_base import quantile +import torch +import numpy as np +from .facemodel import Face_3DMM +from .data_loader import load_dir +from .util import * +from .render_3dmm import Render_3DMM, Render_FLAME +from .FLAME.FLAME import FLAME +from .FLAME.config import cfg +import os +import sys +# import openmesh +import cv2 +import argparse +from pathlib import Path +from tqdm import tqdm +import matplotlib.pyplot as plt +from PIL import Image + +from pytorch3d.io import IO, save_obj + +# def np2mesh(mesh, xnp, path): +# mesh.points()[:] = xnp +# openmesh.write_mesh(path, mesh, binary=True) + + +def track_face_FLAME(dataset_base, h, w, frame_num, out_path, decaexpr_dir, expr_masks_dir): + ''' + Face tracker using FLAME model. + Used to have geometry prior for nerf sampling. + ''' + + def set_requires_grad(tensor_list): + for tensor in tensor_list: + tensor.requires_grad = True + + start_id = 0 + end_id = frame_num + + id_dir = dataset_base + + debug_ldk_dir = os.path.join(id_dir, 'debug', 'debug_landmarks') + Path(debug_ldk_dir).mkdir(parents=True, exist_ok=True) + debug_render_dir = os.path.join(id_dir, 'debug', 'debug_render') + Path(debug_render_dir).mkdir(parents=True, exist_ok=True) + debug_meshes_dir = os.path.join(id_dir, 'debug', 'debug_meshes') + Path(debug_meshes_dir).mkdir(parents=True, exist_ok=True) + + lms, img_paths = load_dir(os.path.join(id_dir, 'landmarks'), os.path.join(id_dir, 'frames'), start_id, end_id) + + num_frames = lms.shape[0] + cxy = torch.tensor((w / 2.0, h / 2.0), dtype=torch.float).cuda() + # TODO: include jaw pose flame + id_dim, exp_dim, tex_dim = 100, 53, 50 + model_3dmm = FLAME(cfg.model) + + sel_ids = np.arange(0, num_frames, 40) + sel_num = sel_ids.shape[0] + arg_focal = 1600 + arg_landis = 1e5 + + for focal in tqdm(range(600, 1500, 100)): + id_para = lms.new_zeros((1, id_dim), requires_grad=True) + exp_para = lms.new_zeros((sel_num, exp_dim), requires_grad=True) + euler_angle = lms.new_zeros((sel_num, 3), requires_grad=True) + trans = lms.new_zeros((sel_num, 3), requires_grad=True) + # trans.data[:, 2] -= 1 # DIFFERENT + trans.data[:, 2] -= 7 # ORIGINAL + focal_length = lms.new_zeros(1, requires_grad=False) + focal_length.data += focal + set_requires_grad([id_para, exp_para, euler_angle, trans]) + + optimizer_idexp = torch.optim.Adam([id_para, exp_para], lr=.1) + optimizer_frame = torch.optim.Adam([euler_angle, trans], lr=.1) # Change + + for iter in range(2000): + id_para_batch = id_para.expand(sel_num, -1) + + geometry = model_3dmm.get_3dlandmarks( + id_para_batch, exp_para, euler_angle, trans, focal_length, cxy) + + proj_geo = proj_pts(geometry, focal_length, cxy) # Different: landmarks are already rotated here + + loss_lan = cal_lan_loss( + proj_geo[:, :, :2], lms[sel_ids].detach()) + loss = loss_lan + optimizer_frame.zero_grad() + loss.backward() + optimizer_frame.step() + if iter % 100 == 0 and False: + print(focal, 'pose', iter, loss.item()) + + for iter in range(2500): + id_para_batch = id_para.expand(sel_num, -1) + + geometry = model_3dmm.get_3dlandmarks( + id_para_batch, exp_para, euler_angle, trans, focal_length, cxy) + + proj_geo = proj_pts(geometry, focal_length, cxy) + + loss_lan = cal_lan_loss( + proj_geo[:, :, :2], lms[sel_ids].detach()) + loss_regid = torch.mean(id_para * id_para) + loss_regexp = torch.mean(exp_para * exp_para) + loss = loss_lan + loss_regid * 0.5 + loss_regexp * 0.4 + optimizer_idexp.zero_grad() + optimizer_frame.zero_grad() + loss.backward() + optimizer_idexp.step() + optimizer_frame.step() + if iter % 100 == 0 and False: + print(focal, 'poseidexp', iter, loss_lan.item(), + loss_regid.item(), loss_regexp.item()) + if iter % 1500 == 0 and iter >= 1500: + for param_group in optimizer_idexp.param_groups: + param_group['lr'] *= 0.2 + for param_group in optimizer_frame.param_groups: + param_group['lr'] *= 0.2 + # print(focal, loss_lan.item(), torch.mean(trans[:, 2]).item()) + + if loss_lan.item() < arg_landis: + arg_landis = loss_lan.item() + arg_focal = focal + + print('find best focal', arg_focal) + + id_para = lms.new_zeros((1, id_dim), requires_grad=True) + exp_para = lms.new_zeros((num_frames, exp_dim), requires_grad=True) + tex_para = lms.new_zeros((1, tex_dim), requires_grad=True) + euler_angle = lms.new_zeros((num_frames, 3), requires_grad=True) + trans = lms.new_zeros((num_frames, 3), requires_grad=True) + # trans.data[:, 2] -= 1 # DIFFERENT + trans.data[:, 2] -= 7 # ORIGINAL + + light_para = lms.new_zeros((num_frames, 27), requires_grad=True) + + focal_length = lms.new_zeros(1, requires_grad=True) + focal_length.data += arg_focal + + set_requires_grad([id_para, exp_para, tex_para, + euler_angle, trans, light_para]) + + # DIFFERENT + # optimizer_idexp = torch.optim.Adam([id_para, exp_para], lr=.05) + # optimizer_frame = torch.optim.Adam([euler_angle, trans], lr=.05) + + # ORIGINAL + optimizer_idexp = torch.optim.Adam([id_para, exp_para], lr=.1) + optimizer_frame = torch.optim.Adam([euler_angle, trans], lr=1) + + for iter in tqdm(range(1500)): + id_para_batch = id_para.expand(num_frames, -1) + + geometry = model_3dmm.get_3dlandmarks( + id_para_batch, exp_para, euler_angle, trans, focal_length, cxy) + + proj_geo = proj_pts(geometry, focal_length, cxy) # DIFFERENT + + loss_lan = cal_lan_loss( + proj_geo[:, :, :2], lms.detach()) + loss = loss_lan + optimizer_frame.zero_grad() + loss.backward() + optimizer_frame.step() + if iter == 1000: + for param_group in optimizer_frame.param_groups: + param_group['lr'] = 0.1 # ORIGINAL + # param_group['lr'] = 0.01 # DIFFERENT + if iter % 100 == 0 and True: + print('pose', iter, loss.item()) + + # Added save landmarks images for debug + img = Image.open(img_paths[0]) + colormap_blue = plt.cm.Blues + colormap_red = plt.cm.Reds + + # plt.imshow(img) + for num, lin in enumerate(np.linspace(0, 0.9, len(lms[0, :, 0]))): + plt.scatter(lms[0, num, 0].detach().cpu(), + lms[0, num, 1].detach().cpu(), + color=colormap_blue(lin), + s=10) + + plt.scatter(proj_geo[0, num, 0].detach().cpu(), + proj_geo[0, num, 1].detach().cpu(), + color=colormap_red(lin), + s=10) + + plt.savefig(os.path.join(debug_ldk_dir, f'ldk_1_{iter}.png')) + plt.close() + + for param_group in optimizer_frame.param_groups: + param_group['lr'] = 0.1 # ORIGINAL + # param_group['lr'] = 0.05 # DIFFERENT + + for iter in tqdm(range(2000)): + id_para_batch = id_para.expand(num_frames, -1) + + geometry = model_3dmm.get_3dlandmarks( + id_para_batch, exp_para, euler_angle, trans, focal_length, cxy) + + proj_geo = proj_pts(geometry, focal_length, cxy) # DIFFERENT + + loss_lan = cal_lan_loss( + proj_geo[:, :, :2], lms.detach()) + loss_regid = torch.mean(id_para * id_para) + loss_regexp = torch.mean(exp_para * exp_para) + loss = loss_lan + loss_regid * 0.5 + loss_regexp * 0.4 + optimizer_idexp.zero_grad() + optimizer_frame.zero_grad() + loss.backward() + optimizer_idexp.step() + optimizer_frame.step() + if iter % 100 == 0 and True: + print('poseidexp', iter, loss_lan.item(), + loss_regid.item(), loss_regexp.item()) + + img = Image.open(img_paths[0]) + colormap_blue = plt.cm.Blues + colormap_red = plt.cm.Reds + + # plt.imshow(img) + for num, lin in enumerate(np.linspace(0, 0.9, len(lms[0, :, 0]))): + plt.scatter(lms[0, num, 0].detach().cpu(), + lms[0, num, 1].detach().cpu(), + color=colormap_blue(lin), + s=10) + + plt.scatter(proj_geo[0, num, 0].detach().cpu(), + proj_geo[0, num, 1].detach().cpu(), + color=colormap_red(lin), + s=10) + + plt.savefig(os.path.join(debug_ldk_dir, f'ldk_2_{iter}.png')) + plt.close() + + if iter % 1000 == 0 and iter >= 1000: + for param_group in optimizer_idexp.param_groups: + param_group['lr'] *= 0.2 + for param_group in optimizer_frame.param_groups: + param_group['lr'] *= 0.2 + + # THEY DO THIS + exp_para = exp_para.detach() + euler_angle = euler_angle.detach() + trans = trans.detach() + light_para = light_para.detach() + + batch_size = 10 + device_default = torch.device('cuda:0') + device_render = torch.device('cuda:0') + renderer = Render_FLAME(model_3dmm.faces_tensor, arg_focal, h, w, batch_size, device_render) + + for i in tqdm(range(int((num_frames-1)/batch_size+1))): + if (i + 1) * batch_size > num_frames: + sel_ids = np.arange(num_frames - batch_size, num_frames) + else: + sel_ids = np.arange(i * batch_size, i * batch_size + batch_size) + imgs = [] + for sel_id in sel_ids: + imgs.append(cv2.imread(img_paths[sel_id])[:, :, ::-1]) + imgs = np.stack(imgs) + sel_imgs = torch.as_tensor(imgs).cuda() + + sel_exp_para = exp_para.new_zeros( + (batch_size, exp_dim), requires_grad=False) + sel_exp_para.data = exp_para[sel_ids].clone() + sel_euler = euler_angle.new_zeros( + (batch_size, 3), requires_grad=False) + sel_euler.data = euler_angle[sel_ids].clone() + sel_trans = trans.new_zeros((batch_size, 3), requires_grad=False) + sel_trans.data = trans[sel_ids].clone() + sel_light = light_para.new_zeros( + (batch_size, 27), requires_grad=False) + sel_light.data = light_para[sel_ids].clone() + + sel_id_para = id_para.expand(batch_size, -1).detach() + + rott_geo = model_3dmm.forward_geo(sel_id_para, sel_exp_para, sel_euler, sel_trans) + + render_imgs = renderer(rott_geo.to(device_render), model_3dmm.faces_tensor.to(device_render)) + render_imgs = render_imgs.to(device_default) + + mask = (render_imgs[:, :, :, 3]).detach() > 0.0 + mask_img = mask.clone().cpu().numpy() * 255. + + render_proj = sel_imgs.clone() + render_proj[mask] = render_imgs[mask][..., :3].byte() + + for j in range(sel_ids.shape[0]): + img_arr = render_proj[j, :, :, :3].byte().detach().cpu().numpy()[ + :, :, ::-1] + cv2.imwrite(os.path.join(debug_render_dir, str(sel_ids[j]) + '.jpg'), + img_arr) + + cv2.imwrite(os.path.join(expr_masks_dir, str(sel_ids[j]) + '.jpg'), + mask_img[j]) + + # Save expr + np.save(os.path.join(decaexpr_dir, f'{sel_ids[j]}.npy'), sel_exp_para[j].cpu().numpy()) + + # renderer.get_and_save_mesh(rott_geo, os.path.join(debug_meshes_dir, str(sel_ids[j]) + '.obj')) + + print('about to save params..') + + torch.save({'id': id_para.detach().cpu(), 'exp': exp_para.detach().cpu(), + 'euler': euler_angle.detach().cpu(), 'trans': trans.detach().cpu(), + 'focal': focal_length.detach().cpu(), 'light': light_para.detach().cpu(), + 'text': tex_para.detach().cpu()}, + out_path) + + print('params saved') + + +def track_face(dataset_base, h, w, frame_num, out_path, expr_masks_dir): + ''' + Face tracker using partial Basel 2009 model with less vertices. + ''' + + dir_path = os.path.dirname(os.path.realpath(__file__)) + + def set_requires_grad(tensor_list): + for tensor in tensor_list: + tensor.requires_grad = True + + start_id = 0 + end_id = frame_num + + id_dir = dataset_base + lms, img_paths = load_dir(os.path.join(id_dir, 'landmarks'), os.path.join(id_dir, 'frames'), start_id, end_id) + num_frames = lms.shape[0] + cxy = torch.tensor((w/2.0, h/2.0), dtype=torch.float).cuda() + id_dim, exp_dim, tex_dim, point_num = 100, 79, 100, 34650 + model_3dmm = Face_3DMM(os.path.join(dir_path, '3DMM'), + id_dim, exp_dim, tex_dim, point_num) + + sel_ids = np.arange(0, num_frames, 40) + sel_num = sel_ids.shape[0] + arg_focal = 1600 + arg_landis = 1e5 + + # FINDING BEST FOCAL + for focal in tqdm(range(600, 1500, 100)): + id_para = lms.new_zeros((1, id_dim), requires_grad=True) + exp_para = lms.new_zeros((sel_num, exp_dim), requires_grad=True) + euler_angle = lms.new_zeros((sel_num, 3), requires_grad=True) + trans = lms.new_zeros((sel_num, 3), requires_grad=True) + trans.data[:, 2] -= 7 + focal_length = lms.new_zeros(1, requires_grad=False) + focal_length.data += focal + set_requires_grad([id_para, exp_para, euler_angle, trans]) + + optimizer_idexp = torch.optim.Adam([id_para, exp_para], lr=.1) + optimizer_frame = torch.optim.Adam( + [euler_angle, trans], lr=.1) + + for iter in range(2000): + id_para_batch = id_para.expand(sel_num, -1) + geometry = model_3dmm.get_3dlandmarks( + id_para_batch, exp_para, euler_angle, trans, focal_length, cxy) + proj_geo = forward_transform( + geometry, euler_angle, trans, focal_length, cxy) + loss_lan = cal_lan_loss( + proj_geo[:, :, :2], lms[sel_ids].detach()) + loss = loss_lan + optimizer_frame.zero_grad() + loss.backward() + optimizer_frame.step() + if iter % 100 == 0 and False: + print(focal, 'pose', iter, loss.item()) + + for iter in range(2500): + id_para_batch = id_para.expand(sel_num, -1) + geometry = model_3dmm.get_3dlandmarks( + id_para_batch, exp_para, euler_angle, trans, focal_length, cxy) + proj_geo = forward_transform( + geometry, euler_angle, trans, focal_length, cxy) + loss_lan = cal_lan_loss( + proj_geo[:, :, :2], lms[sel_ids].detach()) + loss_regid = torch.mean(id_para*id_para) + loss_regexp = torch.mean(exp_para*exp_para) + loss = loss_lan + loss_regid*0.5 + loss_regexp*0.4 + optimizer_idexp.zero_grad() + optimizer_frame.zero_grad() + loss.backward() + optimizer_idexp.step() + optimizer_frame.step() + if iter % 100 == 0 and False: + print(focal, 'poseidexp', iter, loss_lan.item(), + loss_regid.item(), loss_regexp.item()) + if iter % 1500 == 0 and iter >= 1500: + for param_group in optimizer_idexp.param_groups: + param_group['lr'] *= 0.2 + for param_group in optimizer_frame.param_groups: + param_group['lr'] *= 0.2 + print(focal, loss_lan.item(), torch.mean(trans[:, 2]).item()) + + if loss_lan.item() < arg_landis: + arg_landis = loss_lan.item() + arg_focal = focal + + print('find best focal', arg_focal) + + id_para = lms.new_zeros((1, id_dim), requires_grad=True) + exp_para = lms.new_zeros((num_frames, exp_dim), requires_grad=True) + tex_para = lms.new_zeros((1, tex_dim), requires_grad=True) + euler_angle = lms.new_zeros((num_frames, 3), requires_grad=True) + trans = lms.new_zeros((num_frames, 3), requires_grad=True) + light_para = lms.new_zeros((num_frames, 27), requires_grad=True) + trans.data[:, 2] -= 7 + focal_length = lms.new_zeros(1, requires_grad=True) + focal_length.data += arg_focal + + set_requires_grad([id_para, exp_para, tex_para, + euler_angle, trans, light_para]) + + optimizer_idexp = torch.optim.Adam([id_para, exp_para], lr=.1) + optimizer_frame = torch.optim.Adam([euler_angle, trans], lr=1) + + # LANDMARK BASED OPTIMIZATION + for iter in tqdm(range(1500)): + id_para_batch = id_para.expand(num_frames, -1) + geometry = model_3dmm.get_3dlandmarks( + id_para_batch, exp_para, euler_angle, trans, focal_length, cxy) + proj_geo = forward_transform( + geometry, euler_angle, trans, focal_length, cxy) + loss_lan = cal_lan_loss( + proj_geo[:, :, :2], lms.detach()) + loss = loss_lan + optimizer_frame.zero_grad() + loss.backward() + optimizer_frame.step() + if iter == 1000: + for param_group in optimizer_frame.param_groups: + param_group['lr'] = 0.1 + if iter % 100 == 0 and False: + print('pose', iter, loss.item()) + + for param_group in optimizer_frame.param_groups: + param_group['lr'] = 0.1 + + img = Image.open(img_paths[0]) + # plt.imshow(img) + plt.scatter(lms[0, :, 0].detach().cpu(), lms[0, :, 1].detach().cpu(), c='r', s=10) + plt.scatter(proj_geo[0, :, 0].detach().cpu(), proj_geo[0, :, 1].detach().cpu(), c='b', s=10) + # plt.show() + + for iter in tqdm(range(2000)): + id_para_batch = id_para.expand(num_frames, -1) + geometry = model_3dmm.get_3dlandmarks( + id_para_batch, exp_para, euler_angle, trans, focal_length, cxy) + proj_geo = forward_transform( + geometry, euler_angle, trans, focal_length, cxy) + loss_lan = cal_lan_loss( + proj_geo[:, :, :2], lms.detach()) + loss_regid = torch.mean(id_para*id_para) + loss_regexp = torch.mean(exp_para*exp_para) + loss = loss_lan + loss_regid*0.5 + loss_regexp*0.4 + optimizer_idexp.zero_grad() + optimizer_frame.zero_grad() + loss.backward() + optimizer_idexp.step() + optimizer_frame.step() + if iter % 100 == 0 and False: + print('poseidexp', iter, loss_lan.item(), + loss_regid.item(), loss_regexp.item()) + if iter % 1000 == 0 and iter >= 1000: + for param_group in optimizer_idexp.param_groups: + param_group['lr'] *= 0.2 + for param_group in optimizer_frame.param_groups: + param_group['lr'] *= 0.2 + print(loss_lan.item(), torch.mean(trans[:, 2]).item()) + + img = Image.open(img_paths[0]) + # plt.imshow(img) + plt.scatter(lms[0, :, 0].detach().cpu(), lms[0, :, 1].detach().cpu(), c='r', s=10) + plt.scatter(proj_geo[0, :, 0].detach().cpu(), proj_geo[0, :, 1].detach().cpu(), c='b', s=10) + # plt.show() + + batch_size = 50 + + device_default = torch.device('cuda:0') + device_render = torch.device('cuda:0') + renderer = Render_3DMM(arg_focal, h, w, batch_size, device_render) + + sel_ids = np.arange(0, num_frames, int(num_frames/batch_size))[:batch_size] + imgs = [] + for sel_id in sel_ids: + imgs.append(cv2.imread(img_paths[sel_id])[:, :, ::-1]) + imgs = np.stack(imgs) + sel_imgs = torch.as_tensor(imgs).cuda() + sel_lms = lms[sel_ids] + sel_light = light_para.new_zeros((batch_size, 27), requires_grad=True) + set_requires_grad([sel_light]) + optimizer_tl = torch.optim.Adam([tex_para, sel_light], lr=.1) + optimizer_id_frame = torch.optim.Adam( + [euler_angle, trans, exp_para, id_para], lr=.01) + + # RENDERING BASED OPTIMIZATION + for iter in tqdm(range(71)): + sel_exp_para, sel_euler, sel_trans = exp_para[sel_ids], euler_angle[sel_ids], trans[sel_ids] + sel_id_para = id_para.expand(batch_size, -1) + geometry = model_3dmm.get_3dlandmarks( + sel_id_para, sel_exp_para, sel_euler, sel_trans, focal_length, cxy) + proj_geo = forward_transform( + geometry, sel_euler, sel_trans, focal_length, cxy) + loss_lan = cal_lan_loss(proj_geo[:, :, :2], sel_lms.detach()) + loss_regid = torch.mean(id_para*id_para) + loss_regexp = torch.mean(sel_exp_para*sel_exp_para) + + sel_tex_para = tex_para.expand(batch_size, -1) + sel_texture = model_3dmm.forward_tex(sel_tex_para) + geometry = model_3dmm.forward_geo(sel_id_para, sel_exp_para) + rott_geo = forward_rott(geometry, sel_euler, sel_trans) + render_imgs = renderer(rott_geo.to(device_render), + sel_texture.to(device_render), + sel_light.to(device_render)) + render_imgs = render_imgs.to(device_default) + + mask = (render_imgs[:, :, :, 3]).detach() > 0.0 + render_proj = sel_imgs.clone() + render_proj[mask] = render_imgs[mask][..., :3].byte() + loss_col = cal_col_loss(render_imgs[:, :, :, :3], sel_imgs.float(), mask) + loss = loss_col + loss_lan*3 + loss_regid*2.0 + loss_regexp*1.0 + if iter > 50: + loss = loss_col + loss_lan*0.05 + loss_regid*1.0 + loss_regexp*0.8 + optimizer_tl.zero_grad() + optimizer_id_frame.zero_grad() + loss.backward() + optimizer_tl.step() + optimizer_id_frame.step() + if iter % 50 == 0 and iter >= 5: + for param_group in optimizer_id_frame.param_groups: + param_group['lr'] *= 0.2 + for param_group in optimizer_tl.param_groups: + param_group['lr'] *= 0.2 + #print(iter, loss_col.item(), loss_lan.item(), loss_regid.item(), loss_regexp.item()) + + # np2mesh(mesh, geometry[0, ...].detach().cpu().numpy( + # ), os.path.join(id_dir, 'debug', 'id.ply')) + + light_mean = torch.mean(sel_light, 0).unsqueeze(0).repeat(num_frames, 1) + light_para.data = light_mean + + exp_para = exp_para.detach() + euler_angle = euler_angle.detach() + trans = trans.detach() + light_para = light_para.detach() + + for i in range(int((num_frames-1)/batch_size+1)): + if (i+1)*batch_size > num_frames: + start_n = num_frames-batch_size + sel_ids = np.arange(num_frames-batch_size, num_frames) + else: + start_n = i*batch_size + sel_ids = np.arange(i*batch_size, i*batch_size+batch_size) + imgs = [] + for sel_id in sel_ids: + imgs.append(cv2.imread(img_paths[sel_id])[:, :, ::-1]) + imgs = np.stack(imgs) + sel_imgs = torch.as_tensor(imgs).cuda() + sel_lms = lms[sel_ids] + + sel_exp_para = exp_para.new_zeros( + (batch_size, exp_dim), requires_grad=True) + sel_exp_para.data = exp_para[sel_ids].clone() + sel_euler = euler_angle.new_zeros( + (batch_size, 3), requires_grad=True) + sel_euler.data = euler_angle[sel_ids].clone() + sel_trans = trans.new_zeros((batch_size, 3), requires_grad=True) + sel_trans.data = trans[sel_ids].clone() + sel_light = light_para.new_zeros( + (batch_size, 27), requires_grad=True) + sel_light.data = light_para[sel_ids].clone() + + set_requires_grad([sel_exp_para, sel_euler, sel_trans, sel_light]) + + optimizer_cur_batch = torch.optim.Adam( + [sel_exp_para, sel_euler, sel_trans, sel_light], lr=0.005) + + sel_id_para = id_para.expand(batch_size, -1).detach() + sel_tex_para = tex_para.expand(batch_size, -1).detach() + + pre_num = 5 + if i > 0: + pre_ids = np.arange( + start_n-pre_num, start_n) + + for iter in tqdm(range(50)): + geometry = model_3dmm.get_3dlandmarks( + sel_id_para, sel_exp_para, sel_euler, sel_trans, focal_length, cxy) + proj_geo = forward_transform( + geometry, sel_euler, sel_trans, focal_length, cxy) + loss_lan = cal_lan_loss(proj_geo[:, :, :2], sel_lms.detach()) + loss_regexp = torch.mean(sel_exp_para*sel_exp_para) + + sel_geometry = model_3dmm.forward_geo(sel_id_para, sel_exp_para) + sel_texture = model_3dmm.forward_tex(sel_tex_para) + geometry = model_3dmm.forward_geo(sel_id_para, sel_exp_para) + rott_geo = forward_rott(geometry, sel_euler, sel_trans) + render_imgs = renderer(rott_geo.to(device_render), + sel_texture.to(device_render), + sel_light.to(device_render)) + render_imgs = render_imgs.to(device_default) + + mask = (render_imgs[:, :, :, 3]).detach() > 0.0 + + loss_col = cal_col_loss( + render_imgs[:, :, :, :3], sel_imgs.float(), mask) + + if i > 0: + geometry_lap = model_3dmm.forward_geo_sub(id_para.expand( + batch_size+pre_num, -1).detach(), torch.cat((exp_para[pre_ids].detach(), sel_exp_para)), model_3dmm.rigid_ids) + rott_geo_lap = forward_rott(geometry_lap, torch.cat( + (euler_angle[pre_ids].detach(), sel_euler)), torch.cat((trans[pre_ids].detach(), sel_trans))) + + loss_lap = cal_lap_loss([rott_geo_lap.reshape(rott_geo_lap.shape[0], -1).permute(1, 0)], + [1.0]) + else: + geometry_lap = model_3dmm.forward_geo_sub( + id_para.expand(batch_size, -1).detach(), sel_exp_para, model_3dmm.rigid_ids) + rott_geo_lap = forward_rott(geometry_lap, sel_euler, sel_trans) + loss_lap = cal_lap_loss([rott_geo_lap.reshape(rott_geo_lap.shape[0], -1).permute(1, 0)], + [1.0]) + + loss = loss_col*0.5 + loss_lan*8 + loss_lap*100000 + loss_regexp*1.0 + if iter > 30: + loss = loss_col*0.5 + loss_lan*1.5 + loss_lap*100000 + loss_regexp*1.0 + optimizer_cur_batch.zero_grad() + loss.backward() + optimizer_cur_batch.step() + #print(i, iter, loss_col.item(), loss_lan.item(), loss_lap.item(), loss_regexp.item()) + print(str(i) + ' of ' + str(int((num_frames-1)/batch_size+1)) + ' done') + render_proj = sel_imgs.clone() + render_proj[mask] = render_imgs[mask][..., :3].byte() + mask_img = mask.clone().cpu().numpy() * 255. + debug_render_dir = os.path.join(id_dir, 'debug', 'debug_render') + debug_meshes_dir = os.path.join(id_dir, 'debug', 'debug_meshes') + Path(debug_render_dir).mkdir(parents=True, exist_ok=True) + # Path(debug_meshes_dir).mkdir(parents=True, exist_ok=True) + for j in range(sel_ids.shape[0]): + img_arr = render_proj[j, :, :, :3].byte().detach().cpu().numpy()[ + :, :, ::-1] + cv2.imwrite(os.path.join(debug_render_dir, str(sel_ids[j]) + '.jpg'), + img_arr) + + cv2.imwrite(os.path.join(expr_masks_dir, str(sel_ids[j]) + '.jpg'), + mask_img[j]) + + # renderer.get_and_save_mesh(rott_geo, os.path.join(debug_meshes_dir, str(sel_ids[j]) + '.obj')) + + exp_para[sel_ids] = sel_exp_para.clone() + euler_angle[sel_ids] = sel_euler.clone() + trans[sel_ids] = sel_trans.clone() + light_para[sel_ids] = sel_light.clone() + + torch.save({'id': id_para.detach().cpu(), 'exp': exp_para.detach().cpu(), + 'euler': euler_angle.detach().cpu(), 'trans': trans.detach().cpu(), + 'focal': focal_length.detach().cpu(), 'light': light_para.detach().cpu(), + 'text': tex_para.detach().cpu()} + , out_path) + print('params saved') diff --git a/motion-gan-pipeline/preprocessing/face_tracking/face_tracker_deca.py b/motion-gan-pipeline/preprocessing/face_tracking/face_tracker_deca.py new file mode 100644 index 0000000..294b456 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/face_tracker_deca.py @@ -0,0 +1,410 @@ +import os +import sys + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../third/DECA/'))) + +from decalib.deca import DECA +from decalib.datasets import datasets +from decalib.utils import util +from decalib.utils.config import cfg as deca_cfg + +from skimage.io import imread, imsave + +import numpy as np +from .data_loader import load_dir +from .util import * +from .render_3dmm import Render_FLAME +from .FLAME.FLAME import FLAME +from .FLAME.config import cfg +from .FLAME.lbs import vertices2landmarks +import cv2 +from pathlib import Path +from tqdm import tqdm +import matplotlib.pyplot as plt +from PIL import Image, ImageDraw + + +class DECA_tracker: + def __init__(self): + + # run DECA + deca_cfg.model.use_tex = False + self.deca = DECA(config=deca_cfg, device='cuda') + + def __call__(self, images, tform=None): + + codedict = self.deca.encode(images) + + return codedict + + +def track_face_DECA(dataset_base, h, w, frame_num, out_path, decaexpr_dir, expr_masks_dir): + ''' + Face tracker using FLAME model. + Used to have geometry prior for nerf sampling. + ''' + + def set_requires_grad(tensor_list): + for tensor in tensor_list: + tensor.requires_grad = True + + start_id = 0 + end_id = frame_num + + id_dir = dataset_base + + debug_ldk_dir = os.path.join(id_dir, 'debug', 'debug_landmarks') + Path(debug_ldk_dir).mkdir(parents=True, exist_ok=True) + debug_render_dir = os.path.join(id_dir, 'debug', 'debug_render') + Path(debug_render_dir).mkdir(parents=True, exist_ok=True) + debug_mix_dir = os.path.join(id_dir, 'debug', 'debug_mixed') + Path(debug_mix_dir).mkdir(parents=True, exist_ok=True) + debug_land_dir = os.path.join(id_dir, 'debug', 'proj_landmarks') + Path(debug_land_dir).mkdir(parents=True, exist_ok=True) + debug_land_img_dir = os.path.join(id_dir, 'debug', 'proj_landmarks_img') + Path(debug_land_img_dir).mkdir(parents=True, exist_ok=True) + debug_meshes_dir = os.path.join(id_dir, 'debug', 'debug_meshes') + # Path(debug_meshes_dir).mkdir(parents=True, exist_ok=True) + + lms, img_paths = load_dir(os.path.join(id_dir, 'landmarks'), os.path.join(id_dir, 'frames'), start_id, end_id) + num_frames = lms.shape[0] + cxy = torch.tensor((w / 2.0, h / 2.0), dtype=torch.float).cuda() + id_dim, exp_dim, tex_dim = 100, 53, 50 + model_3dmm = FLAME(cfg.model) + + device_default = torch.device('cuda:0') + device_render = torch.device('cuda:0') + + deca_tracker = DECA_tracker() + arg_focal = 600 + # renderer = Render_FLAME(model_3dmm.faces_tensor, arg_focal, h, w, 1, torch.device('cuda:0')) + + # id_para = lms.new_zeros((1, id_dim), requires_grad=True) + id_para = lms.new_zeros((num_frames, id_dim), requires_grad=True) + exp_para = lms.new_zeros((num_frames, exp_dim), requires_grad=False) + + # Run deca on all frames + testdata = datasets.TestData(img_paths, iscrop=True, face_detector='fan') + for i, data in enumerate(tqdm(testdata)): + images = data['image'].cuda()[None, ...] + codedict = deca_tracker(images) + to_show = codedict['images'] + shape_params = codedict['shape'] + expression_params = codedict['exp'] + pose_params = codedict['pose'] + + id_para.data[i] = shape_params + exp_para.data[i] = torch.cat((expression_params.flatten(), pose_params[:, 3:].flatten()), dim=0) + np.save(os.path.join(decaexpr_dir, '%05d.npy' % i), exp_para.data[i].unsqueeze(0).cpu().numpy()) + + '''# TO TEST DECA + pose_params = codedict['pose'] + + opdict, visdict = deca_tracker.deca.decode(codedict) + shape_detail_images = visdict['shape_detail_images'][0].permute(1, 2, 0).cpu().numpy() + shape_detail_images = shape_detail_images * 255. + shape_detail_images = shape_detail_images.astype(np.uint8) + img = Image.fromarray(shape_detail_images) + + euler_angle = torch.zeros((1, 3), dtype=torch.double).cuda() + trans = torch.from_numpy(np.array([[0., 0., -3.]])).double().cuda() + exp_para_i = torch.cat((expression_params.flatten(), pose_params[:, 3:].flatten()), dim=0).unsqueeze(0) + + rott_geo = model_3dmm.forward_geo(shape_params, exp_para_i, euler_angle, trans).float() + render_imgs = renderer(rott_geo.cuda(), model_3dmm.faces_tensor.cuda()) + render_imgs = render_imgs.cpu().detach().numpy()[0, :, :, :3] + render_imgs *= 255 + render_imgs = render_imgs.astype(np.uint8) + + # Save landmarks + im = Image.new('RGB', (w, h), (255, 255, 255)) + geometry = model_3dmm.get_3dlandmarks( + shape_params, exp_para_i, euler_angle, trans, arg_focal, cxy) + + proj_geo = proj_pts(geometry, arg_focal, cxy)[0] + for point in proj_geo: + margin = (max(h, w) // 500) + 1 + ldmks = ([point[0] - margin, point[1] - margin, point[0] + margin, point[1] + margin]) + draw = ImageDraw.Draw(im) + draw.ellipse(ldmks, fill=(255, 0, 0)) + + plt.imshow(im) + plt.show() + + if i % 500 == 0: + print(pose_params) + fig, axs = plt.subplots(1, 3) + axs[0].imshow(to_show[0].permute(1, 2, 0).cpu().numpy()) + axs[1].imshow(img) + axs[2].imshow(render_imgs) + plt.show() + # ''' + + # mean of shape + id_para = torch.mean(id_para, axis=0).unsqueeze(0) + + # Find best focal + arg_focal = 600 + arg_landis = 1e5 + sel_ids = np.arange(0, num_frames, 40) + sel_num = sel_ids.shape[0] + + for focal in tqdm(range(700, 1000, 100)): + + euler_angle = lms.new_zeros((sel_num, 3), dtype=torch.double, requires_grad=True) + trans = lms.new_zeros((sel_num, 3), dtype=torch.double, requires_grad=True) + trans.data[:, 2] -= 1 # DIFFERENT + # trans.data[:, 2] -= 7 # ORIGINAL + focal_length = lms.new_zeros(1, requires_grad=False) + focal_length.data += focal + set_requires_grad([euler_angle, trans]) + + optimizer_frame = torch.optim.Adam([euler_angle, trans], lr=.1) + + for iter in range(2000): + id_para_batch = id_para.expand(sel_num, -1) + exp_para_batch = exp_para[sel_ids] + geometry = model_3dmm.get_3dlandmarks( + id_para_batch, exp_para_batch, euler_angle, trans, focal_length, cxy) + + proj_geo = proj_pts(geometry, focal_length, cxy) + + loss_lan = cal_lan_loss( + proj_geo[:, :, :2], lms[sel_ids].detach()) + loss = loss_lan + optimizer_frame.zero_grad() + loss.backward(retain_graph=True) + optimizer_frame.step() + if iter % 100 == 0 and False: + print(focal, 'pose', iter, loss.item()) + + for iter in range(2500): + id_para_batch = id_para.expand(sel_num, -1) + exp_para_batch = exp_para[sel_ids] + + geometry = model_3dmm.get_3dlandmarks( + id_para_batch, exp_para_batch, euler_angle, trans, focal_length, cxy) + + proj_geo = proj_pts(geometry, focal_length, cxy) + + loss_lan = cal_lan_loss( + proj_geo[:, :, :2], lms[sel_ids].detach()) + loss = loss_lan + optimizer_frame.zero_grad() + loss.backward(retain_graph=True) + optimizer_frame.step() + if iter % 100 == 0 and False: + print(focal, 'poseidexp', iter, loss_lan.item(), + loss_regid.item(), loss_regexp.item()) + if iter % 1500 == 0 and iter >= 1500: + for param_group in optimizer_frame.param_groups: + param_group['lr'] *= 0.2 + # print(focal, loss_lan.item(), torch.mean(trans[:, 2]).item()) + + if loss_lan.item() < arg_landis: + arg_landis = loss_lan.item() + arg_focal = focal + #''' + print('find best focal', arg_focal) + + # Free up some memory + torch.cuda.empty_cache() + + euler_angle = lms.new_zeros((num_frames, 3), dtype=torch.double, requires_grad=True) + trans = lms.new_zeros((num_frames, 3), dtype=torch.double, requires_grad=True) + trans.data[:, 2] -= 1 # DIFFERENT + # trans.data[:, 2] -= 7 # ORIGINAL + light_para = lms.new_zeros((num_frames, 27), requires_grad=True) + + focal_length = lms.new_zeros(1, requires_grad=False) + focal_length.data += arg_focal + + set_requires_grad([euler_angle, trans, light_para]) + + # ORIGINAL + # optimizer_idexp = torch.optim.Adam([id_para, exp_para], lr=.1) + optimizer_frame = torch.optim.Adam([euler_angle, trans], lr=1) + + for iter in tqdm(range(1500)): + id_para_batch = id_para.expand(num_frames, -1) + + geometry = model_3dmm.get_3dlandmarks( + id_para_batch, exp_para, euler_angle, trans, focal_length, cxy) + + proj_geo = proj_pts(geometry, focal_length, cxy) + + loss_lan = cal_lan_loss( + proj_geo[:, :, :2], lms.detach()) + loss = loss_lan + optimizer_frame.zero_grad() + loss.backward(retain_graph=True) + optimizer_frame.step() + if iter == 1000: + for param_group in optimizer_frame.param_groups: + param_group['lr'] = 0.1 + if iter % 250 == 0 and False: + # print('pose', iter, loss.item()) + + img = Image.open(img_paths[0]) + colormap_blue = plt.cm.Blues + colormap_red = plt.cm.Reds + + plt.imshow(img) + for num, lin in enumerate(np.linspace(0, 0.9, len(lms[0, :, 0]))): + plt.scatter(lms[0, num, 0].detach().cpu(), + lms[0, num, 1].detach().cpu(), + color=colormap_blue(lin), + s=10) + + plt.scatter(proj_geo[0, num, 0].detach().cpu(), + proj_geo[0, num, 1].detach().cpu(), + color=colormap_red(lin), + s=10) + + plt.savefig(os.path.join(debug_ldk_dir, f'ldk_1_{iter}.png')) + plt.close() + + for param_group in optimizer_frame.param_groups: + param_group['lr'] = 0.1 # ORIGINAL + + for iter in tqdm(range(2000)): + id_para_batch = id_para.expand(num_frames, -1) + + geometry = model_3dmm.get_3dlandmarks( + id_para_batch, exp_para, euler_angle, trans, focal_length, cxy) + + proj_geo = proj_pts(geometry, focal_length, cxy) + + loss_lan = cal_lan_loss( + proj_geo[:, :, :2], lms.detach()) + loss = loss_lan + optimizer_frame.zero_grad() + loss.backward(retain_graph=True) + optimizer_frame.step() + + if iter % 100 == 0 and False: + # print('pose', iter, loss_lan.item()) + + img = Image.open(img_paths[0]) + colormap_blue = plt.cm.Blues + colormap_red = plt.cm.Reds + + plt.imshow(img) + for num, lin in enumerate(np.linspace(0, 0.9, len(lms[0, :, 0]))): + plt.scatter(lms[0, num, 0].detach().cpu(), + lms[0, num, 1].detach().cpu(), + color=colormap_blue(lin), + s=10) + + plt.scatter(proj_geo[0, num, 0].detach().cpu(), + proj_geo[0, num, 1].detach().cpu(), + color=colormap_red(lin), + s=10) + + plt.savefig(os.path.join(debug_ldk_dir, f'ldk_2_{iter}.png')) + plt.close() + + if iter % 1000 == 0 and iter >= 1000: + for param_group in optimizer_frame.param_groups: + param_group['lr'] *= 0.2 + + # THEY DO THIS + exp_para = exp_para.detach() + euler_angle = euler_angle.detach() + trans = trans.detach() + light_para = light_para.detach() + + batch_size = 10 + renderer = Render_FLAME(model_3dmm.faces_tensor, arg_focal, h, w, batch_size, device_render) + pts3D = [] + + for i in tqdm(range(int((num_frames - 1) / batch_size + 1))): + if (i + 1) * batch_size > num_frames: + sel_ids = np.arange(num_frames - batch_size, num_frames) + else: + sel_ids = np.arange(i * batch_size, i * batch_size + batch_size) + imgs = [] + for sel_id in sel_ids: + imgs.append(cv2.imread(img_paths[sel_id])[:, :, ::-1]) + imgs = np.stack(imgs) + sel_imgs = torch.as_tensor(imgs).cuda() + + sel_exp_para = exp_para.new_zeros( + (batch_size, exp_dim), requires_grad=False) + sel_exp_para.data = exp_para[sel_ids].clone() + sel_euler = euler_angle.new_zeros( + (batch_size, 3), requires_grad=False) + sel_euler.data = euler_angle[sel_ids].clone() + sel_trans = trans.new_zeros((batch_size, 3), requires_grad=False) + sel_trans.data = trans[sel_ids].clone() + sel_light = light_para.new_zeros( + (batch_size, 27), requires_grad=False) + sel_light.data = light_para[sel_ids].clone() + + sel_id_para = id_para.expand(batch_size, -1).detach() + + rott_geo = model_3dmm.forward_geo(sel_id_para, sel_exp_para, sel_euler, sel_trans).float() + bz = rott_geo.shape[0] + + sel_pts3D = vertices2landmarks(rott_geo, model_3dmm.faces_tensor, + model_3dmm.full_lmk_faces_idx.repeat(bz, 1), + model_3dmm.full_lmk_bary_coords.repeat(bz, 1, 1).float()) + pts3D.append(sel_pts3D) + + ldmks = model_3dmm.get_3dlandmarks_forehead( + sel_id_para, sel_exp_para, sel_euler, sel_trans, arg_focal, cxy) + proj_geos = proj_pts(ldmks, focal_length, cxy) + + render_imgs = renderer(rott_geo.to(device_render), model_3dmm.faces_tensor.to(device_render)) + render_imgs = render_imgs.to(device_default).detach() + og_imgs = sel_imgs.clone() + + for j in range(sel_ids.shape[0]): + # Save render + img_arr = render_imgs[j, :, :, :3].cpu().numpy() + img_arr *= 255 + img_arr = img_arr.astype(np.uint8) + im = Image.fromarray(img_arr) + im.save(os.path.join(debug_render_dir, '%05d.jpg' % sel_ids[j])) + + # Save mask + black_pixels_mask = np.all(img_arr != [255, 255, 255], axis=-1) + mask_img = np.zeros_like(img_arr) + mask_img[black_pixels_mask] = [255, 255, 255] + + im = Image.fromarray(mask_img) + im.save(os.path.join(expr_masks_dir, '%05d.jpg' % sel_ids[j])) + + # Save mixed + alpha_blend= 0.1 + og_img = og_imgs[j,:,:,:3].cpu().numpy() + og_img[black_pixels_mask] = og_img[black_pixels_mask] * alpha_blend + \ + img_arr[black_pixels_mask] * (1 - alpha_blend) + + og_img = og_img.astype(np.uint8) + im = Image.fromarray(og_img) + im.save(os.path.join(debug_mix_dir, '%05d.jpg' % sel_ids[j])) + + # Save landmarks + im = Image.new('RGB', (w, h), (255, 255, 255)) + proj_geo = proj_geos[j] + for point in proj_geo: + margin = (max(h, w) // 500) + 1 + ldmks = ([point[0] - margin, point[1] - margin, point[0] + margin, point[1] + margin]) + draw = ImageDraw.Draw(im) + draw.ellipse(ldmks, fill=(255, 0, 0)) + + im.save(os.path.join(debug_land_img_dir, '%05d.jpg' % sel_ids[j])) + np.savetxt(os.path.join(debug_land_dir, '%05d.lms' % sel_ids[j]), proj_geo.detach().cpu().numpy()) + + + pts3D = torch.cat(pts3D, dim=0) + + print('about to save params..') + + torch.save({'id': id_para.detach().cpu(), 'exp': exp_para.detach().cpu(), + 'euler': euler_angle.detach().cpu(), 'trans': trans.detach().cpu(), + 'focal': focal_length.detach().cpu(), 'light': light_para.detach().cpu(), + 'pts3D': pts3D.detach().cpu(), 'h': h, 'w': w}, out_path) + + print('params saved') \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/face_tracking/facemodel.py b/motion-gan-pipeline/preprocessing/face_tracking/facemodel.py new file mode 100644 index 0000000..e853748 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/facemodel.py @@ -0,0 +1,113 @@ +import torch +import torch.nn as nn +import numpy as np +import os +from .util import * + + +class Face_3DMM(nn.Module): + def __init__(self, modelpath, id_dim, exp_dim, tex_dim, point_num): + super(Face_3DMM, self).__init__() + # id_dim = 100 + # exp_dim = 79 + # tex_dim = 100 + self.point_num = point_num + DMM_info = np.load(os.path.join( + modelpath, '3DMM_info.npy'), allow_pickle=True).item() + base_id = DMM_info['b_shape'][:id_dim, :] + mu_id = DMM_info['mu_shape'] + base_exp = DMM_info['b_exp'][:exp_dim, :] + mu_exp = DMM_info['mu_exp'] + mu = mu_id + mu_exp + mu = mu.reshape(-1, 3) + for i in range(3): + mu[:, i] -= np.mean(mu[:, i]) + mu = mu.reshape(-1) + self.base_id = torch.as_tensor(base_id).cuda()/100000.0 + self.base_exp = torch.as_tensor(base_exp).cuda()/100000.0 + self.mu = torch.as_tensor(mu).cuda()/100000.0 + base_tex = DMM_info['b_tex'][:tex_dim, :] + mu_tex = DMM_info['mu_tex'] + self.base_tex = torch.as_tensor(base_tex).cuda() + self.mu_tex = torch.as_tensor(mu_tex).cuda() + sig_id = DMM_info['sig_shape'][:id_dim] + sig_tex = DMM_info['sig_tex'][:tex_dim] + sig_exp = DMM_info['sig_exp'][:exp_dim] + self.sig_id = torch.as_tensor(sig_id).cuda() + self.sig_tex = torch.as_tensor(sig_tex).cuda() + self.sig_exp = torch.as_tensor(sig_exp).cuda() + + keys_info = np.load(os.path.join( + modelpath, 'keys_info.npy'), allow_pickle=True).item() + self.keyinds = torch.as_tensor(keys_info['keyinds']).cuda() + self.left_contours = torch.as_tensor(keys_info['left_contour']).cuda() + self.right_contours = torch.as_tensor( + keys_info['right_contour']).cuda() + self.rigid_ids = torch.as_tensor(keys_info['rigid_ids']).cuda() + + def get_3dlandmarks(self, id_para, exp_para, euler_angle, trans, focal_length, cxy): + id_para = id_para*self.sig_id + exp_para = exp_para*self.sig_exp + batch_size = id_para.shape[0] + num_per_contour = self.left_contours.shape[1] + left_contours_flat = self.left_contours.reshape(-1) + right_contours_flat = self.right_contours.reshape(-1) + sel_index = torch.cat((3*left_contours_flat.unsqueeze(1), 3*left_contours_flat.unsqueeze(1)+1, + 3*left_contours_flat.unsqueeze(1)+2), dim=1).reshape(-1) + left_geometry = torch.mm(id_para, self.base_id[:, sel_index]) + \ + torch.mm(exp_para, self.base_exp[:, + sel_index]) + self.mu[sel_index] + left_geometry = left_geometry.view(batch_size, -1, 3) + proj_x = forward_transform( + left_geometry, euler_angle, trans, focal_length, cxy)[:, :, 0] + proj_x = proj_x.reshape(batch_size, 8, num_per_contour) + arg_min = proj_x.argmin(dim=2) + left_geometry = left_geometry.view(batch_size*8, num_per_contour, 3) + left_3dlands = left_geometry[torch.arange( + batch_size*8), arg_min.view(-1), :].view(batch_size, 8, 3) + + sel_index = torch.cat((3*right_contours_flat.unsqueeze(1), 3*right_contours_flat.unsqueeze(1)+1, + 3*right_contours_flat.unsqueeze(1)+2), dim=1).reshape(-1) + right_geometry = torch.mm(id_para, self.base_id[:, sel_index]) + \ + torch.mm(exp_para, self.base_exp[:, + sel_index]) + self.mu[sel_index] + right_geometry = right_geometry.view(batch_size, -1, 3) + proj_x = forward_transform( + right_geometry, euler_angle, trans, focal_length, cxy)[:, :, 0] + proj_x = proj_x.reshape(batch_size, 8, num_per_contour) + arg_max = proj_x.argmax(dim=2) + right_geometry = right_geometry.view(batch_size*8, num_per_contour, 3) + right_3dlands = right_geometry[torch.arange( + batch_size*8), arg_max.view(-1), :].view(batch_size, 8, 3) + + sel_index = torch.cat((3*self.keyinds.unsqueeze(1), 3*self.keyinds.unsqueeze(1)+1, + 3*self.keyinds.unsqueeze(1)+2), dim=1).reshape(-1) + geometry = torch.mm(id_para, self.base_id[:, sel_index]) + \ + torch.mm(exp_para, self.base_exp[:, + sel_index]) + self.mu[sel_index] + lands_3d = geometry.view(-1, self.keyinds.shape[0], 3) + lands_3d[:, :8, :] = left_3dlands + lands_3d[:, 9:17, :] = right_3dlands + return lands_3d + + def forward_geo_sub(self, id_para, exp_para, sub_index): + id_para = id_para*self.sig_id + exp_para = exp_para*self.sig_exp + sel_index = torch.cat((3*sub_index.unsqueeze(1), 3*sub_index.unsqueeze(1)+1, + 3*sub_index.unsqueeze(1)+2), dim=1).reshape(-1) + geometry = torch.mm(id_para, self.base_id[:, sel_index]) + \ + torch.mm(exp_para, self.base_exp[:, + sel_index]) + self.mu[sel_index] + return geometry.reshape(-1, sub_index.shape[0], 3) + + def forward_geo(self, id_para, exp_para): + id_para = id_para*self.sig_id + exp_para = exp_para*self.sig_exp + geometry = torch.mm(id_para, self.base_id) + \ + torch.mm(exp_para, self.base_exp) + self.mu + return geometry.reshape(-1, self.point_num, 3) + + def forward_tex(self, tex_para): + tex_para = tex_para*self.sig_tex + texture = torch.mm(tex_para, self.base_tex) + self.mu_tex + return texture.reshape(-1, self.point_num, 3) diff --git a/motion-gan-pipeline/preprocessing/face_tracking/geo_transform.py b/motion-gan-pipeline/preprocessing/face_tracking/geo_transform.py new file mode 100644 index 0000000..d9b2308 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/geo_transform.py @@ -0,0 +1,39 @@ +"""This module contains functions for geometry transform and camera projection""" +import torch +import torch.nn as nn +import numpy as np +from pytorch3d.transforms import euler_angles_to_matrix + +def euler2rot(euler_angle): + rot = torch.deg2rad(euler_angle) + rot = euler_angles_to_matrix(rot, convention='XYZ') + return rot + + +def rot_trans_geo(geometry, rot, trans): + rott_geo = torch.bmm(rot, geometry.permute(0, 2, 1)) + trans.view(-1, 3, 1) + return rott_geo.permute(0, 2, 1) + + +def euler_trans_geo(geometry, euler, trans): + rot = euler2rot(euler) + return rot_trans_geo(geometry, rot, trans) + + +def proj_geo(rott_geo, camera_para): + fx = camera_para[:, 0] + fy = camera_para[:, 0] + cx = camera_para[:, 1] + cy = camera_para[:, 2] + + X = rott_geo[:, :, 0] + Y = rott_geo[:, :, 1] + Z = rott_geo[:, :, 2] + + fxX = fx[:, None]*X + fyY = fy[:, None]*Y + + proj_x = -fxX/Z + cx[:, None] + proj_y = fyY/Z + cy[:, None] + + return torch.cat((proj_x[:, :, None], proj_y[:, :, None], Z[:, :, None]), 2) diff --git a/motion-gan-pipeline/preprocessing/face_tracking/render_3dmm.py b/motion-gan-pipeline/preprocessing/face_tracking/render_3dmm.py new file mode 100644 index 0000000..45ca741 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/render_3dmm.py @@ -0,0 +1,332 @@ +from matplotlib.pyplot import step +import torch +import torch.nn as nn +import numpy as np +import os +from pytorch3d.structures import Meshes +from pytorch3d.renderer import * + +import torch.nn.functional as F + +from pytorch3d.io import IO, save_obj + +from pytorch3d.ops import interpolate_face_attributes + +from pytorch3d.renderer.blending import ( + BlendParams, + hard_rgb_blend, + sigmoid_alpha_blend, + softmax_rgb_blend, +) + + +class SoftSimpleShader(nn.Module): + """ + Per pixel lighting - the lighting model is applied using the interpolated + coordinates and normals for each pixel. The blending function returns the + soft aggregated color using all the faces per pixel. + + To use the default values, simply initialize the shader with the desired + device e.g. + + """ + + def __init__( + self, device="cpu", cameras=None, lights=None, materials=None, blend_params=None + ): + super().__init__() + self.lights = lights if lights is not None else PointLights( + device=device) + self.materials = ( + materials if materials is not None else Materials(device=device) + ) + self.cameras = cameras + self.blend_params = blend_params if blend_params is not None else BlendParams() + + def to(self, device): + # Manually move to device modules which are not subclasses of nn.Module + self.cameras = self.cameras.to(device) + self.materials = self.materials.to(device) + self.lights = self.lights.to(device) + return self + + def forward(self, fragments, meshes, **kwargs) -> torch.Tensor: + + texels = meshes.sample_textures(fragments) + blend_params = kwargs.get("blend_params", self.blend_params) + + cameras = kwargs.get("cameras", self.cameras) + if cameras is None: + msg = "Cameras must be specified either at initialization \ + or in the forward pass of SoftPhongShader" + raise ValueError(msg) + znear = kwargs.get("znear", getattr(cameras, "znear", 1.0)) + zfar = kwargs.get("zfar", getattr(cameras, "zfar", 100.0)) + images = softmax_rgb_blend( + texels, fragments, blend_params, znear=znear, zfar=zfar + ) + return images + + +class Render_3DMM(nn.Module): + def __init__(self, focal=1015, img_h=500, img_w=500, batch_size=1, device=torch.device('cuda:0')): + super(Render_3DMM, self).__init__() + + self.focal = focal + self.img_h = img_h + self.img_w = img_w + self.device = device + self.renderer = self.get_render(batch_size) + + dir_path = os.path.dirname(os.path.realpath(__file__)) + topo_info = np.load(os.path.join( + dir_path, '3DMM', 'topology_info.npy'), allow_pickle=True).item() + + self.tris = torch.as_tensor(topo_info['tris']).to(self.device) + self.vert_tris = torch.as_tensor(topo_info['vert_tris']).to(self.device) + + def compute_normal(self, geometry): + vert_1 = torch.index_select(geometry, 1, self.tris[:, 0]) + vert_2 = torch.index_select(geometry, 1, self.tris[:, 1]) + vert_3 = torch.index_select(geometry, 1, self.tris[:, 2]) + nnorm = torch.cross(vert_2-vert_1, vert_3-vert_1, 2) + tri_normal = nn.functional.normalize(nnorm) + v_norm = tri_normal[:, self.vert_tris, :].sum(2) + vert_normal = v_norm / v_norm.norm(dim=2).unsqueeze(2) + return vert_normal + + + def get_render(self, batch_size=1): + half_s = self.img_w * 0.5 + R, T = look_at_view_transform(10, 0, 0) + R = R.repeat(batch_size, 1, 1) + T = torch.zeros((batch_size, 3), dtype=torch.float32).to(self.device) + + cameras = FoVPerspectiveCameras(device=self.device, R=R, T=T, znear=0.01, zfar=20, + fov=2*np.arctan(self.img_w//2/self.focal)*180./np.pi) + lights = PointLights( + device=self.device, + location=[[0.0, 0.0, 1e5]], + ambient_color=[[1, 1, 1]], + specular_color=[[0., 0., 0.]], + diffuse_color=[[0., 0., 0.]] + ) + sigma = 1e-4 + raster_settings = RasterizationSettings( + image_size=(self.img_h, self.img_w), + blur_radius=np.log(1. / 1e-4 - 1.)*sigma / 18.0, + faces_per_pixel=2, + perspective_correct=False, + ) + blend_params = blending.BlendParams(background_color=[0, 0, 0]) + renderer = MeshRenderer( + rasterizer=MeshRasterizer( + raster_settings=raster_settings, + cameras=cameras + ), + shader=SoftSimpleShader( + lights=lights, + blend_params=blend_params, + cameras=cameras + ), + ) + return renderer.to(self.device) + + @staticmethod + def Illumination_layer(face_texture, norm, gamma): + + n_b, num_vertex, _ = face_texture.size() + n_v_full = n_b * num_vertex + gamma = gamma.view(-1, 3, 9).clone() + gamma[:, :, 0] += 0.8 + + gamma = gamma.permute(0, 2, 1) + + a0 = np.pi + a1 = 2 * np.pi / np.sqrt(3.0) + a2 = 2 * np.pi / np.sqrt(8.0) + c0 = 1 / np.sqrt(4 * np.pi) + c1 = np.sqrt(3.0) / np.sqrt(4 * np.pi) + c2 = 3 * np.sqrt(5.0) / np.sqrt(12 * np.pi) + d0 = 0.5 / np.sqrt(3.0) + + Y0 = torch.ones(n_v_full).to(gamma.device).float() * a0 * c0 + norm = norm.view(-1, 3) + nx, ny, nz = norm[:, 0], norm[:, 1], norm[:, 2] + arrH = [] + + arrH.append(Y0) + arrH.append(-a1 * c1 * ny) + arrH.append(a1 * c1 * nz) + arrH.append(-a1 * c1 * nx) + arrH.append(a2 * c2 * nx * ny) + arrH.append(-a2 * c2 * ny * nz) + arrH.append(a2 * c2 * d0 * (3 * nz.pow(2) - 1)) + arrH.append(-a2 * c2 * nx * nz) + arrH.append(a2 * c2 * 0.5 * (nx.pow(2) - ny.pow(2))) + + H = torch.stack(arrH, 1) + Y = H.view(n_b, num_vertex, 9) + lighting = Y.bmm(gamma) + + face_color = face_texture * lighting + return face_color + + def get_mesh(self, rott_geometry): + mesh = Meshes(rott_geometry, self.tris.float().repeat( + rott_geometry.shape[0], 1, 1)) + + return mesh + + def get_and_save_mesh(self, rott_geometry, save_path): + + verts = rott_geometry.cpu()[0] + faces = self.tris.float() + + save_obj(save_path, verts=verts, faces=faces) + + def save_mesh(self, mesh, save_path): + saver = IO() + saver.save_mesh(mesh, save_path) + + def forward(self, rott_geometry, texture, diffuse_sh): + face_normal = self.compute_normal(rott_geometry) + + face_color = self.Illumination_layer(texture, face_normal, diffuse_sh) + face_color = TexturesVertex(face_color) + mesh = Meshes(rott_geometry, self.tris.float().repeat( + rott_geometry.shape[0], 1, 1), face_color) + + rendered_img = self.renderer(mesh) + rendered_img = torch.clamp(rendered_img, 0, 255) + + return rendered_img + + +class Render_FLAME(nn.Module): + def __init__(self, faces, focal=1015, img_h=500, img_w=500, batch_size=1, device=torch.device('cuda:0')): + super(Render_FLAME, self).__init__() + + self.focal = focal + self.img_h = img_h + self.img_w = img_w + self.device = device + self.renderer = self.get_render(batch_size) + + # dir_path = os.path.dirname(os.path.realpath(__file__)) + + self.tris = faces.to(self.device) + + def vertex_normals(self, vertices, faces): + """ + :param vertices: [batch size, number of vertices, 3] + :param faces: [batch size, number of faces, 3] + :return: [batch size, number of vertices, 3] + """ + assert (vertices.ndimension() == 3) + assert (faces.ndimension() == 3) + assert (vertices.shape[0] == faces.shape[0]) + assert (vertices.shape[2] == 3) + assert (faces.shape[2] == 3) + bs, nv = vertices.shape[:2] + bs, nf = faces.shape[:2] + device = vertices.device + normals = torch.zeros(bs * nv, 3).to(device) + + faces = faces + (torch.arange(bs, dtype=torch.int32).to(device) * nv)[:, None, None] # expanded faces + vertices_faces = vertices.reshape((bs * nv, 3))[faces.long()] + + faces = faces.reshape(-1, 3) + vertices_faces = vertices_faces.reshape(-1, 3, 3) + + normals.index_add_(0, faces[:, 1].long(), + torch.cross(vertices_faces[:, 2] - vertices_faces[:, 1], + vertices_faces[:, 0] - vertices_faces[:, 1])) + normals.index_add_(0, faces[:, 2].long(), + torch.cross(vertices_faces[:, 0] - vertices_faces[:, 2], + vertices_faces[:, 1] - vertices_faces[:, 2])) + normals.index_add_(0, faces[:, 0].long(), + torch.cross(vertices_faces[:, 1] - vertices_faces[:, 0], + vertices_faces[:, 2] - vertices_faces[:, 0])) + + normals = F.normalize(normals, eps=1e-6, dim=1) + normals = normals.reshape((bs, nv, 3)) + # pytorch only supports long and byte tensors for indexing + return normals + + def get_render(self, batch_size=1): + # half_s = self.img_w * 0.5 + R, T = look_at_view_transform(1, 0, 0) + R = R.repeat(batch_size, 1, 1) + T = torch.zeros((batch_size, 3), dtype=torch.float32).to(self.device) + + cxy = torch.tensor((self.img_w / 2.0, self.img_h / 2.0), dtype=torch.float).unsqueeze(0) + image_size=torch.tensor((self.img_h, self.img_w)).unsqueeze(0) + cameras = PerspectiveCameras(device=self.device, R=R, T=T, + focal_length=self.focal, + principal_point=cxy, + image_size=image_size, + in_ndc=False) + + lights = PointLights( + device=self.device, + location=[[0.0, 0.0, 0.0]], + ) + + # lights = DirectionalLights( + # device=self.device, + # direction= [[0.0, 0.0, 1.0]], + # ) + + # lights= AmbientLights( + # device=self.device + # ) + + raster_settings = RasterizationSettings( + image_size=(self.img_h, self.img_w), + blur_radius=0.0, + faces_per_pixel=1, + ) + + renderer = MeshRenderer( + rasterizer=MeshRasterizer( + raster_settings=raster_settings, + cameras=cameras + ), + shader = SoftPhongShader( + lights=lights, + cameras=cameras, + ), + ) + return renderer.to(self.device) + + def get_mesh(self, rott_geometry): + mesh = Meshes(rott_geometry, self.tris.float().repeat( + rott_geometry.shape[0], 1, 1)) + + return mesh + + def get_and_save_mesh(self, rott_geometry, save_path): + + verts = rott_geometry.cpu()[0] + faces = self.tris.float() + + save_obj(save_path, verts=verts, faces=faces) + + def save_mesh(self, mesh, save_path): + saver = IO() + saver.save_mesh(mesh, save_path) + + def forward(self, rott_geometry, faces): + + verts_rgb = (torch.ones_like(rott_geometry)) * 0.5 + + textures = Textures(verts_rgb=verts_rgb.cuda()) + + mesh = Meshes(verts=rott_geometry, + faces=faces.float().repeat(rott_geometry.shape[0], 1, 1), + textures=textures) + + rendered_img = self.renderer(mesh) + rendered_img = torch.clamp(rendered_img, 0, 255) + return rendered_img \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/face_tracking/render_land.py b/motion-gan-pipeline/preprocessing/face_tracking/render_land.py new file mode 100644 index 0000000..9dd324d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/render_land.py @@ -0,0 +1,146 @@ +import torch +import torch.nn as nn +import render_util +import geo_transform +import numpy as np + + +def compute_tri_normal(geometry, tris): + geometry = geometry.permute(0, 2, 1) + tri_1 = tris[:, 0] + tri_2 = tris[:, 1] + tri_3 = tris[:, 2] + + vert_1 = torch.index_select(geometry, 2, tri_1) + vert_2 = torch.index_select(geometry, 2, tri_2) + vert_3 = torch.index_select(geometry, 2, tri_3) + + nnorm = torch.cross(vert_2-vert_1, vert_3-vert_1, 1) + normal = nn.functional.normalize(nnorm).permute(0, 2, 1) + return normal + + +class Compute_normal_base(torch.autograd.Function): + @staticmethod + def forward(ctx, normal): + normal_b, = render_util.normal_base_forward(normal) + ctx.save_for_backward(normal) + return normal_b + + @staticmethod + def backward(ctx, grad_normal_b): + normal, = ctx.saved_tensors + grad_normal, = render_util.normal_base_backward(grad_normal_b, normal) + return grad_normal + + +class Normal_Base(torch.nn.Module): + def __init__(self): + super(Normal_Base, self).__init__() + + def forward(self, normal): + return Compute_normal_base.apply(normal) + + +def preprocess_render(geometry, euler, trans, cam, tris, vert_tris, ori_img): + point_num = geometry.shape[1] + rott_geo = geo_transform.euler_trans_geo(geometry, euler, trans) + proj_geo = geo_transform.proj_geo(rott_geo, cam) + rot_tri_normal = compute_tri_normal(rott_geo, tris) + rot_vert_normal = torch.index_select(rot_tri_normal, 1, vert_tris) + is_visible = -torch.bmm(rot_vert_normal.reshape(-1, 1, 3), + nn.functional.normalize(rott_geo.reshape(-1, 3, 1))).reshape(-1, point_num) + is_visible[is_visible < 0.01] = -1 + pixel_valid = torch.zeros((ori_img.shape[0], ori_img.shape[1]*ori_img.shape[2]), + dtype=torch.float32, device=ori_img.device) + return rott_geo, proj_geo, rot_tri_normal, is_visible, pixel_valid + + +class Render_Face(torch.autograd.Function): + @staticmethod + def forward(ctx, proj_geo, texture, nbl, ori_img, is_visible, tri_inds, + pixel_valid): + batch_size, h, w, _ = ori_img.shape + ori_img = ori_img.view(batch_size, -1, 3) + ori_size = torch.cat((torch.ones((batch_size, 1), dtype=torch.int32, device=ori_img.device)*h, + torch.ones((batch_size, 1), dtype=torch.int32, device=ori_img.device)*w), + dim=1).view(-1) + tri_index, tri_coord, render, real = render_util.render_face_forward( + proj_geo, ori_img, ori_size, texture, nbl, is_visible, tri_inds, pixel_valid) + ctx.save_for_backward(ori_img, ori_size, proj_geo, texture, nbl, + tri_inds, tri_index, tri_coord) + return render, real + + @staticmethod + def backward(ctx, grad_render, grad_real): + ori_img, ori_size, proj_geo, texture, nbl, tri_inds, tri_index, tri_coord = \ + ctx.saved_tensors + grad_proj_geo, grad_texture, grad_nbl = render_util.render_face_backward( + grad_render, grad_real, ori_img, ori_size, proj_geo, texture, nbl, tri_inds, + tri_index, tri_coord) + return grad_proj_geo, grad_texture, grad_nbl, None, None, None, None + + +class Render_RGB(nn.Module): + def __init__(self): + super(Render_RGB, self).__init__() + + def forward(self, proj_geo, texture, nbl, ori_img, is_visible, tri_inds, pixel_valid): + return Render_Face.apply(proj_geo, texture, nbl, ori_img, is_visible, + tri_inds, pixel_valid) + + +def cal_land(proj_geo, is_visible, lands_info, land_num): + land_index, = render_util.update_contour( + lands_info, is_visible, land_num) + proj_land = torch.index_select( + proj_geo.reshape(-1, 3), 0, land_index)[:, :2].reshape(-1, land_num, 2) + return proj_land + + +class Render_Land(nn.Module): + def __init__(self): + super(Render_Land, self).__init__() + lands_info = np.loadtxt('../data/3DMM/lands_info.txt', dtype=np.int32) + self.lands_info = torch.as_tensor(lands_info).cuda() + tris = np.loadtxt('../data/3DMM/tris.txt', dtype=np.int64) + self.tris = torch.as_tensor(tris).cuda() - 1 + vert_tris = np.loadtxt('../data/3DMM/vert_tris.txt', dtype=np.int64) + self.vert_tris = torch.as_tensor(vert_tris).cuda() + self.normal_baser = Normal_Base().cuda() + self.renderer = Render_RGB().cuda() + + def render_mesh(self, geometry, euler, trans, cam, ori_img, light): + batch_size, h, w, _ = ori_img.shape + ori_img = ori_img.view(batch_size, -1, 3) + ori_size = torch.cat((torch.ones((batch_size, 1), dtype=torch.int32, device=ori_img.device)*h, + torch.ones((batch_size, 1), dtype=torch.int32, device=ori_img.device)*w), + dim=1).view(-1) + rott_geo, proj_geo, rot_tri_normal, _, _ = preprocess_render( + geometry, euler, trans, cam, self.tris, self.vert_tris, ori_img) + tri_nb = self.normal_baser(rot_tri_normal.contiguous()) + nbl = torch.bmm(tri_nb, (light.reshape(-1, 9, 3)) + [:, :, 0].unsqueeze(-1).repeat(1, 1, 3)) + texture = torch.ones_like(geometry) * 200 + render, = render_util.render_mesh( + proj_geo, ori_img, ori_size, texture, nbl, self.tris) + return render.view(batch_size, h, w, 3).byte() + + def cal_loss_rgb(self, geometry, euler, trans, cam, ori_img, light, texture, lands): + rott_geo, proj_geo, rot_tri_normal, is_visible, pixel_valid = \ + preprocess_render(geometry, euler, trans, cam, + self.tris, self.vert_tris, ori_img) + tri_nb = self.normal_baser(rot_tri_normal.contiguous()) + nbl = torch.bmm(tri_nb, light.reshape(-1, 9, 3)) + render, real = self.renderer( + proj_geo, texture, nbl, ori_img, is_visible, self.tris, pixel_valid) + proj_land = cal_land(proj_geo, is_visible, + self.lands_info, lands.shape[1]) + col_minus = torch.norm((render-real).reshape(-1, 3), + dim=1).reshape(ori_img.shape[0], -1) + col_dis = torch.mean(col_minus*pixel_valid) / \ + (torch.mean(pixel_valid)+0.00001) + land_dists = torch.norm( + (proj_land-lands).reshape(-1, 2), dim=1).reshape(ori_img.shape[0], -1) + lan_dis = torch.mean(land_dists) + return col_dis, lan_dis diff --git a/motion-gan-pipeline/preprocessing/face_tracking/util.py b/motion-gan-pipeline/preprocessing/face_tracking/util.py new file mode 100644 index 0000000..e04e226 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/face_tracking/util.py @@ -0,0 +1,66 @@ +import torch +import torch.nn as nn +import torch.nn.functional as F +from .geo_transform import euler2rot + + +def compute_tri_normal(geometry, tris): + tri_1 = tris[:, 0] + tri_2 = tris[:, 1] + tri_3 = tris[:, 2] + vert_1 = torch.index_select(geometry, 1, tri_1) + vert_2 = torch.index_select(geometry, 1, tri_2) + vert_3 = torch.index_select(geometry, 1, tri_3) + nnorm = torch.cross(vert_2-vert_1, vert_3-vert_1, 2) + normal = nn.functional.normalize(nnorm) + return normal + + +def rot_trans_pts(geometry, rot, trans): + rott_geo = torch.bmm(rot, geometry.permute(0, 2, 1)) + trans[:, :, None] + return rott_geo.permute(0, 2, 1) + + +def cal_lap_loss(tensor_list, weight_list): + lap_kernel = torch.Tensor((-0.5, 1.0, -0.5)).unsqueeze(0).unsqueeze(0).float().to(tensor_list[0].device) + loss_lap = 0 + for i in range(len(tensor_list)): + in_tensor = tensor_list[i] + in_tensor = in_tensor.view(-1, 1, in_tensor.shape[-1]) + out_tensor = F.conv1d(in_tensor, lap_kernel) + loss_lap += torch.mean(out_tensor**2)*weight_list[i] + return loss_lap + + +def proj_pts(rott_geo, focal_length, cxy): + cx, cy = cxy[0], cxy[1] + X = rott_geo[:, :, 0] + Y = rott_geo[:, :, 1] + Z = rott_geo[:, :, 2] + fxX = focal_length*X + fyY = focal_length*Y + proj_x = -fxX/Z + cx + proj_y = fyY/Z + cy + return torch.cat((proj_x[:, :, None], proj_y[:, :, None], 1/Z[:, :, None]), 2) + +def forward_rott(geometry, euler_angle, trans): + rot = euler2rot(euler_angle) + rott_geo = rot_trans_pts(geometry, rot, trans) + return rott_geo + +def forward_transform(geometry, euler_angle, trans, focal_length, cxy): + rot = euler2rot(euler_angle) + rott_geo = rot_trans_pts(geometry, rot, trans) + proj_geo = proj_pts(rott_geo, focal_length, cxy) + return proj_geo + + +def cal_lan_loss(proj_lan, gt_lan): + return torch.mean((proj_lan-gt_lan)**2) + +def cal_col_loss(pred_img, gt_img, img_mask): + pred_img = pred_img.float() + loss = torch.sqrt(torch.sum(torch.square(pred_img - gt_img), 3))*img_mask/255 + loss = torch.sum(loss, dim=(1, 2)) / torch.sum(img_mask, dim=(1, 2)) + loss = torch.mean(loss) + return loss diff --git a/motion-gan-pipeline/preprocessing/facemesh_generator.py b/motion-gan-pipeline/preprocessing/facemesh_generator.py new file mode 100644 index 0000000..55efb13 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/facemesh_generator.py @@ -0,0 +1,68 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os + +from face_tracking.data_loader import load_dir +from face_tracking.FLAME.FLAME import FLAME +from face_tracking.FLAME.config import cfg +from face_tracking.render_3dmm import Render_FLAME +from face_tracking.util import * + +id_dim, exp_dim, tex_dim = 100, 50, 50 + + +class GeometryGenerator: + + def __init__(self, dataset_base, h, w, frame_num, trackparamspath, mesh_dir): + + self.dataset_base = dataset_base + self.mesh_dir = mesh_dir + + # Load and unpack tracking params + params = torch.load(trackparamspath) + self.id_para = params['id'] + self.exp_para = params['exp'] + self.euler_angle = params['euler'] + self.trans = params['trans'] + self.focal_length = params['focal'] + self.arg_focal = torch.max(self.focal_length).numpy() + + self.light_para = params['light'] + # self.text_para = params['text'] + + start_id = 0 + end_id = frame_num + id_dir = dataset_base + self.lms, self.img_paths = load_dir(os.path.join(id_dir, 'landmarks'), + os.path.join(id_dir, 'frames'), + start_id, end_id) + + self.num_frames = self.lms.shape[0] + self.batchsize = 1 + + # 3D model and Renderer + self.model_3dmm = FLAME(cfg.model) + + self.device_render = 'cuda' + self.renderer = Render_FLAME(self.model_3dmm.faces_tensor, + self.arg_focal, + h, w, + self.batchsize, + self.device_render) + + def generate_mesh(self, index): + + id_para = self.id_para.expand(self.batchsize, -1).cuda() + exp_para = self.exp_para[index].expand(self.batchsize, -1).cuda() + euler = self.euler_angle[index].expand(self.batchsize, -1).cuda() + trans = self.trans[index].expand(self.batchsize, -1).cuda() + + geometry = self.model_3dmm.forward_geo(id_para, exp_para, euler, trans, False) + + save_path = os.path.join(self.mesh_dir, '%5d.obj' % index) + + self.renderer.get_and_save_mesh(geometry, save_path) + + + diff --git a/motion-gan-pipeline/preprocessing/get_densepose.py b/motion-gan-pipeline/preprocessing/get_densepose.py new file mode 100644 index 0000000..35eabba --- /dev/null +++ b/motion-gan-pipeline/preprocessing/get_densepose.py @@ -0,0 +1,116 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os, sys +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), 'third/'))) + +import cv2 +import numpy as np +from typing import ClassVar, Dict +import matplotlib.pyplot as plt +from detectron2.config import get_cfg +from detectron2.engine.defaults import DefaultPredictor +from densepose import add_densepose_config +from densepose.vis.base import CompoundVisualizer +from densepose.vis.bounding_box import ScoredBoundingBoxVisualizer +from densepose.vis.extractor import CompoundExtractor, create_extractor +from densepose.vis.densepose_results import ( + DensePoseResultsContourVisualizer, + DensePoseResultsFineSegmentationVisualizer, + DensePoseResultsUVisualizer, + DensePoseResultsVVisualizer, +) +from pathlib import Path + + +def get_extractor_and_visualizer(): + + VISUALIZERS: ClassVar[Dict[str, object]] = { + "dp_contour": DensePoseResultsContourVisualizer, + "dp_segm": DensePoseResultsFineSegmentationVisualizer, + "dp_u": DensePoseResultsUVisualizer, + "dp_v": DensePoseResultsVVisualizer, + "bbox": ScoredBoundingBoxVisualizer, + } + + vis_specs = ['dp_contour', 'bbox'] + visualizers = [] + extractors = [] + for vis_spec in vis_specs: + vis = VISUALIZERS[vis_spec]() + visualizers.append(vis) + extractor = create_extractor(vis) + extractors.append(extractor) + visualizer = CompoundVisualizer(visualizers) + extractor = CompoundExtractor(extractors) + + context = { + "extractor": extractor, + "visualizer": visualizer + } + + visualizer = context["visualizer"] + extractor = context["extractor"] + + return extractor, visualizer + + +def predict(img, cfg): + predictor = DefaultPredictor(cfg) + outputs = predictor(img)['instances'] + + image = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY) + image = np.tile(image[:, :, np.newaxis], [1, 1, 3]) + + extractor, visualizer = get_extractor_and_visualizer() + + data = extractor(outputs) + image_vis = visualizer.visualize(image, data) + return image_vis, data + + +def extract_denseposes(video_name, input_path, output_path): + + cfg = get_cfg() + add_densepose_config(cfg) + cfg.merge_from_file("third/densepose/densepose_rcnn_R_50_FPN_s1x.yaml") + cfg.MODEL.DEVICE = "cuda" + cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.5 + cfg.MODEL.WEIGHTS = "third/densepose/model_final_162be9.pkl" + + # process video + video_path = Path(input_path, video_name + '.mp4') + captura = cv2.VideoCapture(str(video_path)) + i = 0 + while(captura.isOpened()): + + ret, img = captura.read() + if ret == False: + break + + image_vis, data = predict(img, cfg) + + if data[0][0] is None: + print("error") + #cv2.imwrite("image.png", image_vis) + continue + + uv_map = data[0][0][0].uv*255.0 + uv_map = uv_map.cpu().numpy().transpose(1,2,0) + labels = data[0][0][0].labels.unsqueeze(2).cpu().numpy() + + npy_file = str(output_path) + '/uv_%5d.npy' % i + np.save(npy_file, uv_map) + i +=1 + + captura.release() + #cv2.destroyAllWindows() + + +if __name__ == "__main__": + + video_name = 'shortTED.mp4' + input_path = '../input_data/videos' + densepose_output_path = Path('../output_data', video_name, 'densepose') + extract_denseposes(video_name, input_path, densepose_output_path) + diff --git a/motion-gan-pipeline/preprocessing/get_mediapipe.py b/motion-gan-pipeline/preprocessing/get_mediapipe.py new file mode 100644 index 0000000..128cb65 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/get_mediapipe.py @@ -0,0 +1,82 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import mediapipe as mp +import os +from PIL import Image, ImageDraw +import cv2 +import numpy as np +from tqdm import tqdm + +def extract_mediapipe(input_path, output_path): + + # process video + frames_path = os.path.join(input_path, 'frames') + + mp_drawing = mp.solutions.drawing_utils + mp_drawing_styles = mp.solutions.drawing_styles + mp_pose = mp.solutions.pose + + pose = mp_pose.Pose( + static_image_mode=True, + model_complexity=2, + enable_segmentation=True, + min_detection_confidence=0.5) + + for idx, img_path in enumerate(tqdm([os.path.join(frames_path, f) for f in sorted(os.listdir(frames_path))])): + image = cv2.imread(img_path) + image_height, image_width, _ = image.shape + # Convert the BGR image to RGB before processing. + results = pose.process(cv2.cvtColor(image, cv2.COLOR_BGR2RGB)) + + save_path = os.path.join(output_path, '%05d.npy' % idx) + + if not results.pose_landmarks: + ldk_poses = np.zeros(shape=(33, 4)) + + else: + ldk_poses = [] + # Save as numpy + for i in range(33): + # landmarks: x, y, z, visibility + ldk = results.pose_landmarks.landmark[i] + ldk_poses.append([ldk.x * image_width , ldk.y * image_height , ldk.z, ldk.visibility]) + + np.save(save_path, ldk_poses) + + # Save landmarks and edges for DEBUGGING + debug = False + if debug: + debug_path = os.path.join(output_path, 'debug') + os.makedirs(debug_path, exist_ok=True) + + annotated_image = image.copy() + condition = np.stack((results.segmentation_mask,) * 3, axis=-1) > 0.1 + bg_image = np.zeros(image.shape, dtype=np.uint8) + bg_image[:] = (192, 192, 192) # gray + annotated_image = np.where(condition, annotated_image, bg_image) + # Draw pose landmarks on the image. + mp_drawing.draw_landmarks( + annotated_image, + results.pose_landmarks, + mp_pose.POSE_CONNECTIONS, + landmark_drawing_spec=mp_drawing_styles.get_default_pose_landmarks_style()) + + cv2.imwrite(os.path.join(debug_path, str(idx) + '.png'), annotated_image) + + tmp = Image.open(img_path) + size = (image_height, image_width) + + for point in ldk_poses: + margin = (max(size) // 500) + 3 + ldmks = ([point[0] - margin, point[1] - margin, point[0] + margin, point[1] + margin]) + draw = ImageDraw.Draw(tmp) + draw.ellipse(ldmks, fill=(255)) + tmp.save(os.path.join(debug_path, str(idx) + '_landmarks.png')) + + +if __name__ == "__main__": + + input_path = '/home/alberto/motion-gan-pipeline/input_data/video/Clara' + output_path = '/home/alberto/motion-gan-pipeline/input_data/video/Clara/body_pose' + extract_mediapipe(input_path, output_path) diff --git a/motion-gan-pipeline/preprocessing/get_optical_flow.py b/motion-gan-pipeline/preprocessing/get_optical_flow.py new file mode 100644 index 0000000..59e56b6 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/get_optical_flow.py @@ -0,0 +1,52 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import numpy as np +from skimage.color import rgb2gray +from skimage.registration import optical_flow_tvl1 +from PIL import Image +from tqdm import tqdm +import os + +def compute_optical_flow_slow(frames, opticalflow_dir, debug_opticalflow_dir): + + for i in tqdm(range(len(frames)-1)): + out_name = os.path.join(opticalflow_dir,'%05d.npy' % i) + # --- Load the sequence + image0 = np.array(Image.open(frames[i])) + image1 = np.array(Image.open(frames[i+1])) + + # --- Convert the images to gray level: color is not supported. + image0 = rgb2gray(image0) + image1 = rgb2gray(image1) + + # --- Compute the optical flow + v, u = optical_flow_tvl1(image0, image1) + + of = np.stack((v,u),axis=0) + np.save(out_name, of) + + return + +def compute_optical_flow(frames, opticalflow_dir, debug_opticalflow_dir): + from mmflow.apis import inference_model, init_model + from mmflow.datasets import visualize_flow, write_flow + + import mmcv + + # Specify the path to model config and checkpoint file + config_file = 'third/mmflow/flownet2CS/flownet2cs_8x1_sfine_flyingthings3d_subset_384x768.py' + checkpoint_file = 'third/mmflow/flownet2CS/flownet2cs_8x1_sfine_flyingthings3d_subset_384x768.pth' + device = 'cuda:0' + + # init a model + model = init_model(config_file, checkpoint_file, device=device) + + # inference the demo image + for i in tqdm(range(len(frames)-1)): + out_name = os.path.join(opticalflow_dir,'%05d.flo' % i) + result = inference_model(model, frames[i] , frames[i+1]) + write_flow(result, flow_file=out_name) + # save the visualized flow map + debug_file = os.path.join(debug_opticalflow_dir, '%05d.png' % i) + flow_map = visualize_flow(result, save_file=debug_file) diff --git a/motion-gan-pipeline/preprocessing/get_scale_factor.py b/motion-gan-pipeline/preprocessing/get_scale_factor.py new file mode 100644 index 0000000..af2e1a2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/get_scale_factor.py @@ -0,0 +1,118 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import numpy as np +import torch +import os +from preprocessing.face_tracking.FLAME.FLAME import FLAME, FLAMETex +from preprocessing.face_tracking.FLAME.config import cfg +from pytorch3d.io import IO, save_obj +import trimesh +from preprocessing.face_tracking.facemodel import Face_3DMM + +# Create FLAME model +sel_num = 1 +id_dim, exp_dim, tex_dim = 100, 50, 50 + +id_para = torch.zeros((1, id_dim), dtype=torch.float32).cuda() +id_para = id_para.expand(sel_num, -1) +exp_para = torch.zeros((sel_num, exp_dim), dtype=torch.float32).cuda() +euler_angle = torch.zeros((sel_num, 3), dtype=torch.float32).cuda() +trans = torch.zeros((sel_num, 3), dtype=torch.float32).cuda() +focal_length = torch.tensor([1100], dtype=torch.float32).cuda() +w, h = (512, 512) +cxy = torch.tensor((w/2.0, h/2.0), dtype=torch.float32).cuda() + +# Make model +FLAME_3dmm = FLAME(cfg.model) + +flame_landmarks = FLAME_3dmm.get_3dlandmarks(id_para, exp_para, euler_angle, trans, focal_length, cxy) +flame_geometry = FLAME_3dmm.forward_geo(id_para, exp_para, euler_angle, trans) + +print(flame_geometry.size()) +print(flame_landmarks.size()) + +# Save mesh +save_path= 'test_FLAME_mesh.obj' +verts = flame_geometry.cpu()[0] +faces = FLAME_3dmm.faces_tensor.cpu() +save_obj(save_path, verts=verts, faces=faces) + +# Save landmarks +save_path = 'test_FLAME_landmarks.obj' +flame_landmarks = flame_landmarks.cpu()[0] +pcl = trimesh.PointCloud(flame_landmarks) +pcl.export(save_path) + + +############################ + +# Create weird Basel model + +dir_path = os.path.dirname(os.path.realpath(__file__)) +sel_num = 1 +id_dim, exp_dim, tex_dim, point_num = 100, 79, 100, 34650 + +id_para = torch.zeros((1, id_dim), dtype=torch.float32).cuda() +id_para = id_para.expand(sel_num, -1) +exp_para = torch.zeros((sel_num, exp_dim), dtype=torch.float32).cuda() +euler_angle = torch.zeros((sel_num, 3), dtype=torch.float32).cuda() +trans = torch.zeros((sel_num, 3), dtype=torch.float32).cuda() +focal_length = torch.tensor([1100], dtype=torch.float32).cuda() +w, h = (512, 512) +cxy = torch.tensor((w/2.0, h/2.0), dtype=torch.float32).cuda() + +Basel_3dmm = Face_3DMM(os.path.join(dir_path, 'face_tracking/3DMM'), id_dim, exp_dim, tex_dim, point_num) + +basel_landmarks = Basel_3dmm.get_3dlandmarks(id_para, exp_para, euler_angle, trans, focal_length, cxy) +basel_geometry = Basel_3dmm.forward_geo(id_para, exp_para) + +print(basel_geometry.size()) +print(basel_landmarks.size()) + +# Save mesh +save_path= 'test_BASEL_mesh.obj' +verts = basel_geometry.cpu()[0] +topo_info = np.load(os.path.join( + dir_path, 'face_tracking/3DMM', 'topology_info.npy'), allow_pickle=True).item() +faces = torch.as_tensor(topo_info['tris']).cuda().float() + +save_obj(save_path, verts=verts, faces=faces) + +# Save landmarks +save_path= 'test_BASEL_landmarks.obj' +basel_landmarks = basel_landmarks.cpu()[0] +pcl = trimesh.PointCloud(basel_landmarks) +pcl.export(save_path) + +# find scale + +def cal_lan_loss(proj_lan, gt_lan): + return torch.mean((proj_lan-gt_lan)**2) + +scale = torch.ones((1, 1), requires_grad=True) +optimizer = torch.optim.Adam([scale], lr=.1) + +for i in range(200): + scale_batch = scale.expand(68, 3) + scaled_landmark_FLAME = torch.mul(flame_landmarks, scale_batch) + loss_lan = cal_lan_loss(scaled_landmark_FLAME, basel_landmarks) + + loss = loss_lan + optimizer.zero_grad() + loss.backward() + optimizer.step() + + if i == 100: + for param_group in optimizer.param_groups: + param_group['lr'] = 0.01 + + +print('Final scale: ', scale) + +# Save mesh +save_path= 'test_FLAME_scaled_mesh.obj' +scale = scale.expand(1, 3) +verts = torch.mul(flame_geometry[0].cpu(), scale) +faces = FLAME_3dmm.faces_tensor.cpu() +save_obj(save_path, verts=verts, faces=faces) diff --git a/motion-gan-pipeline/preprocessing/preprocessing.py b/motion-gan-pipeline/preprocessing/preprocessing.py new file mode 100644 index 0000000..a468a0a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/preprocessing.py @@ -0,0 +1,592 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +import os +import cv2 +from skimage import io +import numpy as np +from tqdm import tqdm +import torch +import json + +from autils.options import PreprocessingOptions + + + +class Preprocessor: + + def __init__(self, opt): + + self.opt = opt + + self.step = opt.step + + self.dataroot = opt.dataroot + self.name = opt.name + self.target_fps = opt.target_fps + self.type = opt.preprocessing_type + self.train_split = opt.train_split + self.val_split = opt.val_split + + self.dataset_base = os.path.join(self.dataroot, self.name) + + self.mapp = { + '0': lambda: self.deepspeech_preprocessing(), + '1': lambda: self.extract_images(), + '2': lambda: self.landmark_detection(), + '3': lambda: self.head_pose_estimation(), + '4': lambda: self.audioexpression_features(), + '5': lambda: self.face_matting(), + '6': lambda: self.extract_meshes(), + '7': lambda: self.save_params(), + '8': lambda: self.speech_to_text(), + '9': lambda: self.body_tracking(), + '10': lambda: self.emotion_detection(), + '11': lambda: self.edge_detection(), + '12': lambda: self.audio_noise_reduction(), + '13': lambda: self.optical_flow(), + } + + def initialize(self): + + self.audiofeature_dir = os.path.join(self.dataset_base, "audio_feature") + + self.audioexpr_dir = os.path.join(self.dataset_base, 'audio_expr') + + self.decaexpr_dir = os.path.join(self.dataset_base, 'deca_expr') + + self.framesdir = os.path.join(self.dataset_base, 'frames') + + self.landmarkdir = os.path.join(self.dataset_base, 'landmarks') + + self.projectedlankmarkdir = os.path.join(self.dataset_base, 'debug', 'proj_landmarks') + + self.trackparamspath = os.path.join(self.dataset_base, 'track_params.pt') + self.textpath = os.path.join(self.dataset_base, 'transcript.txt') + + self.mattdir = os.path.join(self.dataset_base, 'matting') + + self.debug_dir = os.path.join(self.dataset_base, 'debug') + + self.mesh_dir = os.path.join(self.dataset_base, 'meshes') + + self.expr_masks_dir = os.path.join(self.dataset_base, 'expr_masks') + + self.body_dir = os.path.join(self.dataset_base, 'body_pose') + + self.emotion_dir = os.path.join(self.dataset_base, 'emotions') + + self.edges_dir = os.path.join(self.dataset_base, 'edges') + + self.cropped_dir = os.path.join(self.dataset_base, 'cropped') + + self.APC_path = os.path.join(self.dataset_base, 'APC_feat.npy') + + self.opticalflow_dir = os.path.join(self.dataset_base, 'opticalflow') + self.debug_opticalflow_dir = os.path.join(self.dataset_base, 'debug', 'opticalflow') + + def get_valid_frames(self): + + max_frame_num = len(os.listdir(self.framesdir)) + + valid_img_ids = [] + for i in range(max_frame_num): + if os.path.isfile(os.path.join(self.landmarkdir, '%05d.lms' % i)): + valid_img_ids.append(i) + + valid_img_num = len(valid_img_ids) + tmp_img = cv2.imread(os.path.join(self.framesdir, '%05d.jpg' % valid_img_ids[0])) + # tmp_img = cv2.imread(os.path.join(self.framesdir, str(valid_img_ids[0]) + '.png')) + + h, w = tmp_img.shape[0], tmp_img.shape[1] + + self.max_frame_num, self.valid_img_ids, self.valid_img_num, self.h, self.w = \ + max_frame_num, valid_img_ids, valid_img_num, h, w + + def deepspeech_preprocessing(self): + from deepspeech_features import extract_ds + + print(f'\n\n--- Step 0: Extracting DeepSpeech features ---\n\n') + + os.makedirs(self.audiofeature_dir, exist_ok=True) + + if len(os.listdir(self.audiofeature_dir)) != 0: + print('Already done.\n\n') + + return + + extract_ds(self.dataset_base, self.name, self.type, self.target_fps) + + def extract_images(self): + print(f'\n\n--- Step 1: Extracting images from video ---\n\n') + + os.makedirs(self.framesdir, exist_ok=True) + + if len(os.listdir(self.framesdir)) != 0: + print('Already done.\n\n') + + return + + def image_resize(image, width = None, height = None, inter = cv2.INTER_AREA): + # initialize the dimensions of the image to be resized and + # grab the image size + dim = None + (h, w) = image.shape[:2] + + # if both the width and height are None, then return the + # original image + if width is None and height is None: + return image + + # check to see if the width is None + if width is None: + # calculate the ratio of the height and construct the + # dimensions + r = height / float(h) + dim = (int(w * r), height) + + # otherwise, the height is None + else: + # calculate the ratio of the width and construct the + # dimensions + r = width / float(w) + dim = (width, int(h * r)) + + # resize the image + resized = cv2.resize(image, dim, interpolation = inter) + + # return the resized image + return resized + + vid_file = os.path.join(self.dataset_base, self.name + '.mp4') + + cap = cv2.VideoCapture(vid_file) + frame_num = 0 + while (True): + _, frame = cap.read() + if frame is None: + break + # resize frame to be have max dimention = 720 + (h, w) = frame.shape[:2] + if w > h: + frame = image_resize(frame, width=720) + else: + frame = image_resize(frame, height=720) + + cv2.imwrite(os.path.join(self.framesdir, '%05d.jpg' % frame_num), frame) + frame_num = frame_num + 1 + cap.release() + + def landmark_detection(self): + import face_alignment + + print('\n\n--- Step 2: Detect Landmarks ---\n\n') + + os.makedirs(self.landmarkdir, exist_ok=True) + + if len(os.listdir(self.landmarkdir)) != 0: + print('Already done.\n\n') + return + + fa = face_alignment.FaceAlignment(face_alignment.LandmarksType._2D, flip_input=False) + for image_path in tqdm(os.listdir(self.framesdir)): + if image_path.endswith('.jpg') or image_path.endswith('.png'): + input = io.imread(os.path.join(self.framesdir, image_path))[:, :, :3] + preds = fa.get_landmarks(input) + try: + if len(preds) > 0: + lands = preds[0].reshape(-1, 2)[:, :2] + np.savetxt(os.path.join(self.landmarkdir, image_path[:-3] + 'lms'), lands, '%f') + + except TypeError: + pass + + def head_pose_estimation(self): + from face_tracking.face_tracker import track_face_FLAME, track_face + from face_tracking.face_tracker_deca import track_face_DECA + + print('\n\n--- Step 3: Estimate Head Pose ---\n\n') + self.get_valid_frames() + + os.makedirs(self.debug_dir, exist_ok=True) + os.makedirs(self.expr_masks_dir, exist_ok=True) + + if os.path.isfile(self.trackparamspath): + print('Already done.\n\n') + return + + if self.opt.use_DECA: + os.makedirs(self.decaexpr_dir, exist_ok=True) + + print('Using DECA tracking..\n') + track_face_DECA(self.dataset_base, self.h, self.w, + self.max_frame_num, + self.trackparamspath, + self.decaexpr_dir, + self.expr_masks_dir) + + elif self.opt.use_FLAME: + print('Using FLAME tracking..\n') + track_face_FLAME(self.dataset_base, self.h, self.w, + self.max_frame_num, + self.trackparamspath, + self.decaexpr_dir, + self.expr_masks_dir) + + elif self.opt.use_BASEL: + print('Using BASEL tracking..\n') + track_face(self.dataset_base, + self.h, + self.w, + self.max_frame_num, + self.trackparamspath, + self.expr_masks_dir) + + def audioexpression_features(self): + from third.Audio2ExpressionNet.get_audioexpr import get_audioexpr + + print('\n\n--- Step 4: Extract Audio-Expressions ---\n\n') + + os.makedirs(self.audioexpr_dir, exist_ok=True) + + if len(os.listdir(self.audioexpr_dir)) != 0: + print('Already done.\n\n') + + return + + get_audioexpr(self.name, self.dataset_base, self.audioexpr_dir) + + def face_matting(self): + from third.RobustVideoMatting.model import MattingNetwork + from third.RobustVideoMatting.inference import convert_video + + print('\n\n--- Step 5: Face Matting ---\n\n') + + os.makedirs(self.mattdir, exist_ok=True) + + if len(os.listdir(self.mattdir)) != 0: + print('Already done.\n\n') + return + + matting_path = './third/RobustVideoMatting/' + + checkpoint_path = matting_path + 'checkpoints/' + # vid_file = os.path.join(self.dataset_base, self.name + '.mp4') + + # if not os.path.isfile(vid_file): + vid_file = self.framesdir + + model = MattingNetwork('mobilenetv3').eval().cuda() # or "resnet50" + model.load_state_dict(torch.load(checkpoint_path + 'rvm_mobilenetv3.pth')) + + convert_video( + model, # The model, can be on any device (cpu or cuda). + input_source=vid_file, # A video file or an image sequence directory. + output_type='png_sequence', # Choose "video" or "png_sequence" + output_composition=self.mattdir, # File path if video; directory path if png sequence. + output_video_mbps=4, # Output video mbps. Not needed for png sequence. + downsample_ratio=None, # A hyperparameter to adjust or use None for auto. + seq_chunk=12, # Process n frames at once for better parallelism. + ) + + def extract_meshes(self): + from facemesh_generator import GeometryGenerator + + print('\n\n--- Step 6: Extract Meshes ---\n\n') + self.get_valid_frames() + + os.makedirs(self.mesh_dir, exist_ok=True) + + if len(os.listdir(self.mesh_dir)) != 0: + print('Already done.\n\n') + return + + generator = GeometryGenerator(self.dataset_base, + self.h, self.w, + self.max_frame_num, + self.trackparamspath, + self.mesh_dir) + + for i in tqdm(range(self.valid_img_num)): + generator.generate_mesh(i) + + def save_params(self): + from face_tracking.geo_transform import euler2rot + + print('\n\n--- Step 7: Save Transform Param ---\n\n') + self.get_valid_frames() + + params_dict = torch.load(self.trackparamspath) + w, h, valid_img_ids = self.w, self.h, self.valid_img_ids + focal_len = params_dict['focal'] + euler_angle = params_dict['euler'] + trans = params_dict['trans'] / 10.0 # TODO: why? + valid_num = euler_angle.shape[0] - 1 # Last element not valid + print(valid_num, self.valid_img_num) + + train_split = int(self.train_split) + val_split = int(self.val_split) + train_split + + train_ids = torch.arange(0, train_split) + val_ids = torch.arange(train_split, val_split) + test_ids = torch.arange(val_split, valid_num) + + rot = euler2rot(euler_angle) + rot_inv = rot.permute(0, 2, 1) + trans_inv = -torch.bmm(rot_inv, trans.unsqueeze(2)) + + pose = torch.eye(4, dtype=torch.float32) + save_ids = ['train', 'val', 'test'] + train_val_ids = [train_ids, val_ids, test_ids] + mean_z = float(torch.mean(trans_inv[:, 2, 0]).item()) + for i in range(3): + transform_dict = dict() + transform_dict['focal_len'] = float(focal_len[0]) + transform_dict['camera_angle_x'] = float(w / 2.0) + transform_dict['camera_angle_y'] = float(h / 2.0) + transform_dict['frames'] = [] + ids = train_val_ids[i] + save_id = save_ids[i] + + for i in tqdm(ids): + i = i.item() + frame_dict = dict() + + # image and audio id + frame_dict['img_id'] = int(valid_img_ids[i]) + frame_dict['aud_id'] = int(valid_img_ids[i]) + + # add audio-expression + try: + # add audio_feature + frame_dict['audio_feature'] = np.load(os.path.join(self.audiofeature_dir, + '%05d.deepspeech.npy' % int(valid_img_ids[i])) + ).tolist() + + except FileNotFoundError: + print('Missing: %05d.deepspeech.npy' % int(valid_img_ids[i])) + pass + + # add audio-expression + try: + frame_dict['audio_expr'] = np.load(os.path.join(self.audioexpr_dir, + 'audio_expression_%05d.npy' % int(valid_img_ids[i])) + ).tolist() + except FileNotFoundError: + print('Missing: audio_expression_%5d.npy' % int(valid_img_ids[i])) + pass + + # add deca-expression + try: + frame_dict['deca_expr'] = np.load(os.path.join(self.decaexpr_dir, + '%05d.npy' % i) + ).tolist() + except FileNotFoundError: + print(f'Missing: %05d.npy' % i) + pass + + # add mesh path + try: + frame_dict['mesh_path'] = os.path.join(self.mesh_dir, '%05d.obj' % i) + + except FileNotFoundError: + print('Missing: ', os.path.join(self.mesh_dir, '%05d.obj' % i)) + pass + + # add expression mask + try: + frame_dict['expr_mask'] = os.path.join(self.expr_masks_dir, '%05d.jpg' % i) + + except FileNotFoundError: + print('Missing: ', os.path.join(self.expr_masks_dir, '%05d.jpg' % i)) + pass + + pose[:3, :3] = rot_inv[i] + pose[:3, 3] = trans_inv[i, :, 0] + frame_dict['transform_matrix'] = pose.numpy().tolist() + + dir_pose = torch.eye(4, dtype=torch.float32) + dir_pose[:3, :3] = rot[i] + dir_pose[:3, 3] = trans[i] + frame_dict['direct_transform'] = dir_pose.numpy().tolist() + + lms = np.loadtxt(os.path.join(self.landmarkdir, '%05d.lms' % valid_img_ids[i])) + min_x, max_x = np.min(lms, 0)[0], np.max(lms, 0)[0] + + cx = int((min_x + max_x) / 2.0) + cy = int(lms[27, 1]) + + h_w = int((max_x - cx) * 1.5) + h_h = int((lms[8, 1] - cy) * 1.15) + + rect_x = cx - h_w + rect_y = cy - h_h + if rect_x < 0: + rect_x = 0 + if rect_y < 0: + rect_y = 0 + rect_w = min(w - 1 - rect_x, 2 * h_w) + rect_h = min(h - 1 - rect_y, 2 * h_h) + + rect = np.array((rect_x, rect_y, rect_w, rect_h), dtype=np.int32) + frame_dict['bbox'] = rect.tolist() + + # add dict + transform_dict['frames'].append(frame_dict) + + with open(os.path.join(self.dataset_base, 'transforms_' + save_id + '.json'), 'w') as fp: + json.dump(transform_dict, fp, indent=2, separators=(',', ': ')) + + near_far = {'near': str(mean_z - 0.2), + 'far': str(mean_z + 0.4)} + + near_far_json = os.path.join(self.dataset_base, 'near-far.json') + with open(near_far_json, 'w') as fp: + json.dump(near_far, fp, indent=2, separators=(',', ': ')) + + def speech_to_text(self): + from preprocessing.speech_to_text import speech_to_text + print('\n\n--- Step 8: Speech to text ---\n\n') + + if os.path.isfile(self.textpath): + print('Already done.\n\n') + return + + speech_to_text(self.name, self.dataset_base, self.textpath) + + def body_tracking(self): + # from get_densepose import extract_denseposes + from get_mediapipe import extract_mediapipe + + print('\n\n--- Step 9: Body tracking ---\n\n') + + os.makedirs(self.body_dir, exist_ok=True) + + if len(os.listdir(self.body_dir)) != 0: + print('Already done.\n\n') + return + + extract_mediapipe(self.dataset_base, self.body_dir) + + def emotion_detection(self): + from emoca_tracker import emotion_detection + + print('\n\n--- Step 10: Emotion Detection ---\n\n') + self.get_valid_frames() + + os.makedirs(self.emotion_dir, exist_ok=True) + + if len(os.listdir(self.emotion_dir)) != 0: + print('Already done.\n\n') + return + + print('Using EMOCA emotion tracking..\n') + emotion_detection(self.dataset_base, + self.emotion_dir) + + return + + def edge_detection(self): + from edge_creation.utils import make_edges, make_edges_body + + print('\n\n--- Step 11: Edge Detection ---\n\n') + self.get_valid_frames() + + os.makedirs(self.edges_dir, exist_ok=True) + os.makedirs(self.cropped_dir, exist_ok=True) + + if len(os.listdir(self.edges_dir)) != 0: + print('Already done.\n\n') + return + + ## Switch here for body edges or tracked body pose edges + tracked = True + + if not tracked: + print('Making body edges using extracted edges..') + make_edges(self.mattdir, + self.projectedlankmarkdir, + self.edges_dir, + self.cropped_dir) + + else: + # TODO: REMOVE split_val + print('Making body edges using tracked body poses..') + make_edges_body(self.mattdir, + self.projectedlankmarkdir, + self.edges_dir, + self.cropped_dir, + self.body_dir, + split_val=0.1) + + return + + def audio_noise_reduction(self): + from scipy.io import wavfile + import noisereduce as nr + print('\n\n--- Step 12: Noise Reduction ---\n\n') + print('Skipping for now') + return + audio_path = os.path.join(self.dataset_base, self.name + '.wav') + # load data + rate, audio = wavfile.read(audio_path) + # perform noise reduction + reduced_noise = nr.reduce_noise(y=audio, sr=rate) + wavfile.write(audio_path, rate, reduced_noise) + return self + + def optical_flow(self): + from get_optical_flow import compute_optical_flow + + print('\n\n--- Step 13: Optical Flow ---\n\n') + self.get_valid_frames() + + os.makedirs(self.opticalflow_dir, exist_ok=True) + os.makedirs(self.debug_opticalflow_dir, exist_ok=True) + + if len(os.listdir(self.opticalflow_dir)) != 0: + print('Already done.\n\n') + return + + frames = [os.path.join(self.mattdir, f) for f in sorted(os.listdir(self.mattdir))] + + compute_optical_flow(frames, self.opticalflow_dir, self.debug_opticalflow_dir) + + return + + def preprocess_video(self): + + print('Preprocessing video: ', self.name) + + self.mapp[self.step]() + + def preprocess_audio(self): + + print('Preprocessing audio: ', self.name) + + self.mapp[self.step]() + + def __call__(self): + if self.type == 'audio': + self.preprocess_audio() + + else: + self.preprocess_video() + + +if __name__ == '__main__': + + opt = PreprocessingOptions().parse() + from pathlib import Path + opt.name = Path(opt.name).stem + preprocessor = Preprocessor(opt) + preprocessor.initialize() + preprocessor() + + + + + + + + diff --git a/motion-gan-pipeline/preprocessing/process_audio.sh b/motion-gan-pipeline/preprocessing/process_audio.sh new file mode 100644 index 0000000..a2674d9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/process_audio.sh @@ -0,0 +1,32 @@ + +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details +eval "$(conda shell.bash hook)" +conda activate deepspeech + +DATAROOT=$1 +NAME=$2 + +TARGET_FPS=$3 +SAMPLERATE=$4 + +TYPE=audio + +tracker=--use_DECA + +STEPS=(12 0) + +for STEP in ${STEPS[@]} +do + python -W ignore preprocessing.py --dataroot $DATAROOT \ + --name $NAME \ + --target_fps $TARGET_FPS \ + --preprocessing_type $TYPE \ + --step $STEP \ + $tracker + +done + +conda deactivate + + diff --git a/motion-gan-pipeline/preprocessing/process_audio_dataset.sh b/motion-gan-pipeline/preprocessing/process_audio_dataset.sh new file mode 100644 index 0000000..f750d99 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/process_audio_dataset.sh @@ -0,0 +1,39 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +# DATAROOT=/media/apennino/TED384-v2 +# DATAROOT=/media/apennino/MTC +# DATASETS=("Test") +DATAROOT=$1 +DATASETS=$2 + +# TARGET_FPS=25 +# SAMPLERATE=16000 +TARGET_FPS=$3 +SAMPLERATE=$4 + +TYPE=audio + +tracker=--use_DECA + +STEPS=(0) + +for DATASET in ${DATASETS[@]} +do + for entry in "$DATAROOT/$DATASET"/* + do + for STEP in ${STEPS[@]} + do + + python preprocessing.py --dataroot $DATAROOT/$DATASET \ + --name $entry \ + --target_fps $TARGET_FPS \ + --preprocessing_type $TYPE \ + --step $STEP \ + $tracker + + done + done +done + + diff --git a/motion-gan-pipeline/preprocessing/process_video.sh b/motion-gan-pipeline/preprocessing/process_video.sh new file mode 100644 index 0000000..3662e63 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/process_video.sh @@ -0,0 +1,50 @@ +# ''' +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details +# ''' +eval "$(conda shell.bash hook)" +conda activate deepspeech +DATAROOT=$1 +NAME=$2 + +TARGET_FPS=$3 +SAMPLERATE=$4 + +TYPE=video + +tracker=--use_DECA + +# STEPS=(12 0 1 2 3 4 5 9 11 13) +STEPS=(12 0 1 2 3 4 5 9 11) + +process=$true + +if $process; +then + # # ffmpeg -i "$entry/$tmp.mp4" -r $TARGET_FPS -y "$entry/$tmp.tmp.mp4" + # mkvmerge --default-duration 0:${TARGET_FPS}fps --fix-bitstream-timing-information 0 "$DATAROOT/$NAME/$NAME.mp4" -o "$DATAROOT/$NAME/$NAME.tmp.mkv" + # ffmpeg -i "$DATAROOT/$NAME/$NAME.tmp.mkv" -c:v copy -y "$DATAROOT/$NAME/$NAME.mp4" + # rm "$DATAROOT/$NAME/$NAME.tmp.mkv" + + ffmpeg -i "$DATAROOT/$NAME/$NAME.mp4" -ar $SAMPLERATE -y "$DATAROOT/$NAME/$NAME.wav" + +fi + +for STEP in ${STEPS[@]} +do + + if [ $STEP = "0" ]; + then + conda activate deepspeech + else + conda activate pyenv + fi + python -W ignore preprocessing.py --dataroot $DATAROOT \ + --name $NAME \ + --target_fps $TARGET_FPS \ + --preprocessing_type $TYPE \ + --step $STEP \ + $tracker +done + +conda deactivate \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/process_video_dataset.sh b/motion-gan-pipeline/preprocessing/process_video_dataset.sh new file mode 100644 index 0000000..a582b64 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/process_video_dataset.sh @@ -0,0 +1,69 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +#!/bin/bash + +eval "$(conda shell.bash hook)" + +# DATAROOT=/media/apennino/TED384-v2 +# DATAROOT=/media/apennino/MTC +# DATAROOT=/media/apennino/EmotionDetection +# DATAROOT=/media/apennino/TMP_AVSpeech +# DATASETS=("Test") +# DATAROOT=/media/apennino/ +# DATASETS=("AI+Art") +DATAROOT=$1 +DATASETS=$2 + +# TARGET_FPS=25 +# SAMPLERATE=16000 +TARGET_FPS=$3 +SAMPLERATE=$4 + +TYPE=video +# + +tracker=--use_DECA + +STEPS=(0 2 3 4 5 1 11) +# STEPS=(0 2 3 4 1) +process=$true + +for DATASET in ${DATASETS[@]} +do + for entry in "$DATAROOT/$DATASET"/* + do + if $process; + then + tmp=$(basename "$entry") + # ffmpeg -i "$entry/$tmp.mp4" -r $TARGET_FPS -y "$entry/$tmp.tmp.mp4" + mkvmerge --default-duration 0:${TARGET_FPS}fps --fix-bitstream-timing-information 0 "$entry/$tmp.mp4" -o "$entry/$tmp.tmp.mkv" + ffmpeg -i "$entry/$tmp.tmp.mkv" -c:v copy -y "$entry/$tmp.mp4" + rm "$entry/$tmp.tmp.mkv" + + ffmpeg -i "$entry/$tmp.mp4" -ar $SAMPLERATE -y "$entry/$tmp.wav" + + fi + + tmp=$(basename "$entry") + + for STEP in ${STEPS[@]} + do + + if [ $STEP = "10" ]; + then + conda activate work36_cu11 + else + conda activate adnerf + fi + + python preprocessing.py --dataroot $DATAROOT/$DATASET \ + --name $tmp \ + --target_fps $TARGET_FPS \ + --preprocessing_type $TYPE \ + --step $STEP \ + $tracker + + done + done +done diff --git a/motion-gan-pipeline/preprocessing/speech_to_text.py b/motion-gan-pipeline/preprocessing/speech_to_text.py new file mode 100644 index 0000000..71ca159 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/speech_to_text.py @@ -0,0 +1,34 @@ +# SPDX-License-Identifier: MIT +# © 2020-2022 ETH Zurich and other contributors, see AUTHORS.txt for details + +from transformers import Speech2TextProcessor, Speech2TextForConditionalGeneration +import soundfile as sf +import librosa +import os +import torch + + +def map_to_array(batch): + speech, _ = sf.read(batch["file"]) + batch["speech"] = speech + return batch + + +def speech_to_text(name, dataset_base, textpath): + data, samplerate = librosa.load(os.path.join(dataset_base, name +'.wav'), sr=16000) + data = torch.from_numpy(data) + + model = Speech2TextForConditionalGeneration.from_pretrained("facebook/s2t-small-librispeech-asr") + processor = Speech2TextProcessor.from_pretrained("facebook/s2t-small-librispeech-asr") + + inputs = processor(data, sampling_rate=samplerate, return_tensors="pt") + generated_ids = model.generate(inputs["input_features"], attention_mask=inputs["attention_mask"]) + + transcription = processor.batch_decode(generated_ids) + + textfile = open(textpath, "w") + for element in transcription: + textfile.write(element + "\n") + textfile.close() + + diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/BaselModel/__init__.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/BaselModel/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/BaselModel/basel_model.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/BaselModel/basel_model.py new file mode 100644 index 0000000..26a50c0 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/BaselModel/basel_model.py @@ -0,0 +1,94 @@ +import os +import torch +import torch.nn as nn +import torchvision.transforms as transforms +import numpy as np + +######################### +N_EXPRESSIONS=76 # <<<<<< NEEDS TO BE SPECIFIED ACCORDING TO THE USED FACE MODEL +# N_EXPRESSIONS=299 +######################### + +#import soft_renderer as sr +# +#class MorphableModel(nn.Module): +# def __init__(self, filename_average=''): +# super(MorphableModel, self).__init__() +# +# print('Load Morphable Model (Basel)') +# +# #filename_mesh = os.path.join(opt.dataroot, opt.phase + '/average_model.obj') +# filename_mesh = filename_average +# if filename_average=='': +# print('use default identity') +# filename_mesh = './BaselModel/average.obj' +# mesh = sr.Mesh.from_obj(filename_mesh, normalization=False, load_texture=True) +# self.average_vertices = mesh.vertices[0] +# self.faces = mesh.faces[0] +# self.average_vertices = self.average_vertices[None, :, :] # [num_vertices, XYZ] -> [batch_size=1, num_vertices, XYZ] +# self.faces = self.faces[None, :, :] # [num_faces, 3] -> [batch_size=1, num_faces, 3] +# self.textures = mesh.textures +# +# self.num_vertices = self.average_vertices.shape[1] +# self.num_faces = self.faces.shape[1] +# print('vertices:', self.average_vertices.shape) +# print('faces:', self.faces.shape) +# +# ## basis function +# self.expression_basis = np.memmap('./BaselModel/ExpressionBasis.matrix', dtype='float32', mode='r').__array__()[1:] # first entry is the size +# self.expression_basis = np.resize(self.expression_basis, (N_EXPRESSIONS, self.num_vertices, 4))[:,:,0:3] +# self.expression_basis = torch.tensor(self.expression_basis.astype(np.float32)).cuda() # N_EXPRESSIONS x num_vertices x 3 +# self.expression_basis = torch.transpose(self.expression_basis,0,2) # transpose for matmul +# print('expression_basis', self.expression_basis.shape) +# +# # default expression +# zeroExpr = torch.zeros(1, N_EXPRESSIONS, dtype=torch.float32).cuda() +# self.morph(zeroExpr) +# +# +# def save_model_to_obj_file(self, filename, mask=None): +# faces_cpu = self.faces.detach().cpu().numpy() +# vertices_cpu = self.vertices.detach().cpu().numpy() +# +# mask_cpu = None +# if not type(mask) == type(None): +# mask_cpu = mask.detach().cpu().numpy() +# +# f = open(filename, 'w') +# if type(mask) == type(None): +# for i in range(0, self.num_vertices): +# f.write('v ' + str(vertices_cpu[0, i, 0]) + ' ' + str(vertices_cpu[0, i, 1]) + ' ' + str(vertices_cpu[0, i, 2]) + '\n') +# else: +# for i in range(0, self.num_vertices): +# f.write('v ' + str(vertices_cpu[0, i, 0]) + ' ' + str(vertices_cpu[0, i, 1]) + ' ' + str(vertices_cpu[0, i, 2]) + ' ' + str(mask_cpu[i]) + ' ' + str(mask_cpu[i]) + ' ' + str(mask_cpu[i]) + ' 1'+ '\n') +# +# for i in range(0, self.num_faces): +# f.write('f ' + str(faces_cpu[0, i, 0]+1) + '// ' + str(faces_cpu[0, i, 1]+1) + '// ' + str(faces_cpu[0, i, 2]+1) + '//\n') +# +# f.close() +# +# def compute_expression_delta(self, expressions): +# return torch.transpose(torch.matmul(self.expression_basis, torch.transpose(expressions, 0,1)), 0, 2) # note that matmul wants to have this order: (a x b x c) x (c x m) => (a x b x m) +# +# def morph(self, expressions): +# self.vertices = self.average_vertices + self.compute_expression_delta(expressions) +# return self.vertices +# +# +# +# def LoadMask(self, filename=''): +# if filename=='': +# print('use default mask') +# filename = './BaselModel/mask/defaultMask_mouth.obj' +# +# mask = np.zeros(self.num_vertices) +# file = open(filename, 'r') +# i=0 +# for line in file: +# if line[0] == 'v': +# floats = [float(x) for x in line[1:].split()] +# if floats[3] == 1.0 and floats[4] == 0.0 and floats[5] == 0.0: +# mask[i] = 1.0 +# i += 1 +# file.close() +# return torch.tensor(mask.astype(np.float32)).cuda() \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/ReadMe.txt b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/ReadMe.txt new file mode 100644 index 0000000..9f5dd53 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/ReadMe.txt @@ -0,0 +1,21 @@ +This code runs the inference given a source audio and a target video that has been tracked. +The process is started using "transfer.sh" (where you can also specify the target sequences -> TARGET_ACTOR_LIST). +In the "transfer.py" you can specify the source sequences (search for "source_actors"). +Make sure that you specify the dimensions of your blendshape model in "BaselModel/basel_model.py" (-> N_EXPRESSIONS). + +Note that you have to extract the deepspeech features in a preprocessing step. +In the datasets folder you will find an example how the data should look like. +The deepspeech features are provided as npy files, while for the target sequence you also have to provide + the expressions (visually tracked blendshape coefficients). +If you have a different data format you need to adapt the data loader (data/face_dataset.py). + +Once you have the prepared data you can run the script. +It will optimize for the mapping from the audio-expression space to your blendshape model space. +The mapping is stored in the "mappings" folder (note: that it caches the mappings there and reuses it for the next run. + If you change something, you need to delete this cache). +The final output is stored in the "datasets/TRANSFERS" folder as a list of estimated expressions using the + source audio features. + +Given these expressions you need to generate new uv maps for the target video using the rigid pose + of the target video (only replacing the expressions). +These can then be used in the deferred neural rendering framework. \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/audio2expr_with_map.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/audio2expr_with_map.py new file mode 100644 index 0000000..fa4d4c1 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/audio2expr_with_map.py @@ -0,0 +1,107 @@ +import os +import os.path +from options.transfer_options import Audio2ExprOptions +from data import CreateDataLoader +from data.face_dataset import FaceDataset +from data.audio_dataset import AudioDataset +from models import create_model +from autil.visualizer import save_images +from autil import html +import torch +import torch.nn as nn +import torchvision.transforms as transforms +import numpy as np +from PIL import Image +import time +import random +import copy +from shutil import copyfile +from tqdm import tqdm + +from BaselModel.basel_model import * + + +def load_model(opt): + opt.output_audio_expressions = True + opt.nTrainObjects = 116 + + print('#train objects = %d' % opt.nTrainObjects) + + print('>>> create model <<<') + model = create_model(opt) + print('>>> setup model <<<') + model.setup(opt) + + return model + +def load_source_sequence(opt): + opt_source = copy.copy(opt) # create a clone + opt_source.dataroot = opt.source_actor # overwrite root directory + print(opt_source.dataroot) + opt_source.dataset_mode = 'audio' + opt_source.phase = 'train' + + dataset_source = AudioDataset() + + dataset_source.initialize(opt_source) + + dataloader = torch.utils.data.DataLoader( + dataset_source, + batch_size=opt.batch_size, + shuffle=not opt.serial_batches, + num_workers=int(opt.num_threads)) + + return dataset_source, dataloader + + +if __name__ == '__main__': + # read options + opt = Audio2ExprOptions().parse() + + # load model + model = load_model(opt) + print('model version:', opt.name) + + if opt.use_mapping: + # read mapping + mapping_fn = opt.mapping_path + + # load mapping from file + map_cpu = np.load(mapping_fn + '.npy') + mapping = torch.tensor(map_cpu.astype(np.float32)).cuda() + print('loaded mapping from file', mapping.shape) + + # make outdir + source_name = opt.source_actor.split("/")[-1] + out_dir = opt.out_dir + source_name + os.makedirs(out_dir, exist_ok=True) + audio2expr_dir = os.path.join(out_dir, 'audio_expr') + os.makedirs(audio2expr_dir, exist_ok=True) + # if opt.use_mapping: + # expr_dir = os.path.join(out_dir, 'expr') + # os.makedirs(expr_dir, exist_ok=True) + + # read source sequence + dataset_source, data_loader_source = load_source_sequence(opt) + dataset_source_size = len(dataset_source) + print('#source_actor frames = %d' % dataset_source_size) + + expression_multiplier = 1.0 # default + + # run over data + for i, data in tqdm(enumerate(data_loader_source)): + model.set_input(data) + model.test() + audio_expression = model.fake_expressions.data[0, :, 0] + + if opt.use_mapping: + expression = expression_multiplier * 10.0 * torch.matmul(mapping, audio_expression) + expression = expression[None, :] + # np.save(os.path.join(audio2expr_dir, f'expr_{i}.npy'), expression.cpu().numpy()) + np.save(os.path.join(audio2expr_dir, 'audio_expression_%05d.npy' % i), expression.cpu().numpy()) + + + # audio_expression = audio_expression[None, :] + # np.save(os.path.join(audio2expr_dir, f'audioexpr_{i}.npy'), audio_expression.cpu().numpy()) + + exit() \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/__init__.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/get_data.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/get_data.py new file mode 100644 index 0000000..c2da238 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/get_data.py @@ -0,0 +1,115 @@ +from __future__ import print_function +import os +import tarfile +import requests +from warnings import warn +from zipfile import ZipFile +from bs4 import BeautifulSoup +from os.path import abspath, isdir, join, basename + + +class GetData(object): + """ + + Download CycleGAN or Pix2Pix Data. + + Args: + technique : str + One of: 'cyclegan' or 'pix2pix'. + verbose : bool + If True, print additional information. + + Examples: + >>> from util.get_data import GetData + >>> gd = GetData(technique='cyclegan') + >>> new_data_path = gd.get(save_path='./datasets') # options will be displayed. + + """ + + def __init__(self, technique='cyclegan', verbose=True): + url_dict = { + 'pix2pix': 'https://people.eecs.berkeley.edu/~tinghuiz/projects/pix2pix/datasets', + 'cyclegan': 'https://people.eecs.berkeley.edu/~taesung_park/CycleGAN/datasets' + } + self.url = url_dict.get(technique.lower()) + self._verbose = verbose + + def _print(self, text): + if self._verbose: + print(text) + + @staticmethod + def _get_options(r): + soup = BeautifulSoup(r.text, 'lxml') + options = [h.text for h in soup.find_all('a', href=True) + if h.text.endswith(('.zip', 'tar.gz'))] + return options + + def _present_options(self): + r = requests.get(self.url) + options = self._get_options(r) + print('Options:\n') + for i, o in enumerate(options): + print("{0}: {1}".format(i, o)) + choice = input("\nPlease enter the number of the " + "datasets above you wish to download:") + return options[int(choice)] + + def _download_data(self, dataset_url, save_path): + if not isdir(save_path): + os.makedirs(save_path) + + base = basename(dataset_url) + temp_save_path = join(save_path, base) + + with open(temp_save_path, "wb") as f: + r = requests.get(dataset_url) + f.write(r.content) + + if base.endswith('.tar.gz'): + obj = tarfile.open(temp_save_path) + elif base.endswith('.zip'): + obj = ZipFile(temp_save_path, 'r') + else: + raise ValueError("Unknown File Type: {0}.".format(base)) + + self._print("Unpacking Data...") + obj.extractall(save_path) + obj.close() + os.remove(temp_save_path) + + def get(self, save_path, dataset=None): + """ + + Download a datasets. + + Args: + save_path : str + A directory to save the data to. + dataset : str, optional + A specific datasets to download. + Note: this must include the file extension. + If None, options will be presented for you + to choose from. + + Returns: + save_path_full : str + The absolute path to the downloaded data. + + """ + if dataset is None: + selected_dataset = self._present_options() + else: + selected_dataset = dataset + + save_path_full = join(save_path, selected_dataset.split('.')[0]) + + if isdir(save_path_full): + warn("\n'{0}' already exists. Voiding Download.".format( + save_path_full)) + else: + self._print('Downloading Data...') + url = "{0}/{1}".format(self.url, selected_dataset) + self._download_data(url, save_path=save_path) + + return abspath(save_path_full) diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/html.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/html.py new file mode 100644 index 0000000..1e7aab9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/html.py @@ -0,0 +1,64 @@ +import dominate +from dominate.tags import meta, h3, table, tr, td, p, a, img, br +import os + + +class HTML: + def __init__(self, web_dir, title, reflesh=0): + self.title = title + self.web_dir = web_dir + self.img_dir = os.path.join(self.web_dir, 'images') + if not os.path.exists(self.web_dir): + os.makedirs(self.web_dir) + if not os.path.exists(self.img_dir): + os.makedirs(self.img_dir) + # print(self.img_dir) + + self.doc = dominate.document(title=title) + if reflesh > 0: + with self.doc.head: + meta(http_equiv="reflesh", content=str(reflesh)) + + def get_image_dir(self): + return self.img_dir + + def add_header(self, str): + with self.doc: + h3(str) + + def add_table(self, border=1): + self.t = table(border=border, style="table-layout: fixed;") + self.doc.add(self.t) + + def add_images(self, ims, txts, links, width=400): + self.add_table() + with self.t: + with tr(): + for im, txt, link in zip(ims, txts, links): + with td(style="word-wrap: break-word;", halign="center", valign="top"): + with p(): + with a(href=os.path.join('images', link)): + img(style="width:%dpx" % width, src=os.path.join('images', im)) + br() + p(txt) + + def save(self): + html_file = '%s/index.html' % self.web_dir + f = open(html_file, 'wt') + f.write(self.doc.render()) + f.close() + + +if __name__ == '__main__': + html = HTML('web/', 'test_html') + html.add_header('hello world') + + ims = [] + txts = [] + links = [] + for n in range(4): + ims.append('image_%d.png' % n) + txts.append('text_%d' % n) + links.append('image_%d.png' % n) + html.add_images(ims, txts, links) + html.save() diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/image_pool.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/image_pool.py new file mode 100644 index 0000000..52413e0 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/image_pool.py @@ -0,0 +1,32 @@ +import random +import torch + + +class ImagePool(): + def __init__(self, pool_size): + self.pool_size = pool_size + if self.pool_size > 0: + self.num_imgs = 0 + self.images = [] + + def query(self, images): + if self.pool_size == 0: + return images + return_images = [] + for image in images: + image = torch.unsqueeze(image.data, 0) + if self.num_imgs < self.pool_size: + self.num_imgs = self.num_imgs + 1 + self.images.append(image) + return_images.append(image) + else: + p = random.uniform(0, 1) + if p > 0.5: + random_id = random.randint(0, self.pool_size - 1) # randint is inclusive + tmp = self.images[random_id].clone() + self.images[random_id] = image + return_images.append(tmp) + else: + return_images.append(image) + return_images = torch.cat(return_images, 0) + return return_images diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/util.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/util.py new file mode 100644 index 0000000..df72b66 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/util.py @@ -0,0 +1,82 @@ +from __future__ import print_function +import torch +import numpy as np +from PIL import Image +import os +import sys +import array +# import OpenEXR +# import Imath + +'''def load_exr(image_path): + # Open the input file + file = OpenEXR.InputFile(image_path) + + # Compute the size + dw = file.header()['dataWindow'] + w, h = (dw.max.x - dw.min.x + 1, dw.max.y - dw.min.y + 1) + + # Read the three color channels as 32-bit floats + FLOAT = Imath.PixelType(Imath.PixelType.FLOAT) + #(R,G,B) = [np.array(array.array('f', file.channel(Chan, FLOAT)).tolist()).reshape((w, h, 1)) for Chan in ("R", "G", "B") ] + + (r, g, b) = file.channels("RGB") + R = np.array(array.array('f', r).tolist()).reshape((w, h, 1)) + G = np.array(array.array('f', g).tolist()).reshape((w, h, 1)) + B = np.array(array.array('f', b).tolist()).reshape((w, h, 1)) + + return np.concatenate((R, G, B), axis=2)''' + +# Converts a Tensor into an image array (numpy) +# |imtype|: the desired type of the converted numpy array +def tensor2im(input_image, imtype=np.uint8): + if isinstance(input_image, torch.Tensor): + input_image = torch.clamp(input_image, -1.0, 1.0) + image_tensor = input_image.data + else: + return input_image + image_numpy = image_tensor[0].cpu().float().numpy() + if image_numpy.shape[0] == 1: + image_numpy = np.tile(image_numpy, (3, 1, 1)) + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 + return image_numpy.astype(imtype) + + +def diagnose_network(net, name='network'): + mean = 0.0 + count = 0 + for param in net.parameters(): + if param.grad is not None: + mean += torch.mean(torch.abs(param.grad.data)) + count += 1 + if count > 0: + mean = mean / count + print(name) + print(mean) + + +def save_image(image_numpy, image_path): + image_pil = Image.fromarray(image_numpy) + image_pil.save(image_path) + +def print_numpy(x, val=True, shp=False): + x = x.astype(np.float64) + if shp: + print('shape,', x.shape) + if val: + x = x.flatten() + print('mean = %3.3f, min = %3.3f, max = %3.3f, median = %3.3f, std=%3.3f' % ( + np.mean(x), np.min(x), np.max(x), np.median(x), np.std(x))) + + +def mkdirs(paths): + if isinstance(paths, list) and not isinstance(paths, str): + for path in paths: + mkdir(path) + else: + mkdir(paths) + + +def mkdir(path): + if not os.path.exists(path): + os.makedirs(path) diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/visualizer.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/visualizer.py new file mode 100644 index 0000000..7cb7750 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/autil/visualizer.py @@ -0,0 +1,207 @@ +import numpy as np +import os +import sys +import ntpath +import time +from . import util +from . import html +#from scipy.misc import imresize +import cv2 + +if sys.version_info[0] == 2: + VisdomExceptionBase = Exception +else: + VisdomExceptionBase = ConnectionError + + +# save image to the disk +def save_images(webpage, visuals, image_path, aspect_ratio=1.0, width=256): + image_dir = webpage.get_image_dir() + short_path = ntpath.basename(image_path[0]) + name = os.path.splitext(short_path)[0] + + webpage.add_header(name) + ims, txts, links = [], [], [] + + for label, im_data in visuals.items(): + im = util.tensor2im(im_data) + image_name = '%s_%s.png' % (name, label) + save_path = os.path.join(image_dir, image_name) + h, w, _ = im.shape + + height = int(width * h / float(w)) + #im = imresize(im, (height,width), interp='bicubic') + im = cv2.resize(src=im, dsize=(height,width), interpolation=cv2.INTER_CUBIC) + + #im = imresize(im, (height,widht), interp='bicubic') + #if aspect_ratio > 1.0: + # im = imresize(im, (h, int(w * aspect_ratio)), interp='bicubic') + #if aspect_ratio < 1.0: + # im = imresize(im, (int(h / aspect_ratio), w), interp='bicubic') + + util.save_image(im, save_path) + + ims.append(image_name) + txts.append(label) + links.append(image_name) + webpage.add_images(ims, txts, links, width=width) + + +class Visualizer(): + def __init__(self, opt): + self.display_id = opt.display_id + self.use_html = opt.isTrain and not opt.no_html + self.win_size = opt.display_winsize + self.name = opt.name + self.opt = opt + self.saved = False + if self.display_id > 0: + import visdom + self.ncols = opt.display_ncols + self.vis = visdom.Visdom(server=opt.display_server, port=opt.display_port, env=opt.display_env, raise_exceptions=True) + + if self.use_html: + self.web_dir = os.path.join(opt.checkpoints_dir, opt.name, 'web') + self.img_dir = os.path.join(self.web_dir, 'images') + print('create web directory %s...' % self.web_dir) + util.mkdirs([self.web_dir, self.img_dir]) + self.log_name = os.path.join(opt.checkpoints_dir, opt.name, 'loss_log.txt') + with open(self.log_name, "a") as log_file: + now = time.strftime("%c") + log_file.write('================ Training Loss (%s) ================\n' % now) + + def reset(self): + self.saved = False + + def throw_visdom_connection_error(self): + print('\n\nCould not connect to Visdom server (https://github.com/facebookresearch/visdom) for displaying training progress.\nYou can suppress connection to Visdom using the option --display_id -1. To install visdom, run \n$ pip install visdom\n, and start the server by \n$ python -m visdom.server.\n\n') + exit(1) + + # |visuals|: dictionary of images to display or save + def display_current_results(self, visuals, epoch, save_result, aspect_ratio=1.0, width=256): + if self.display_id > 0: # show images in the browser + ncols = self.ncols + if ncols > 0: + ncols = min(ncols, len(visuals)) + h, w = next(iter(visuals.values())).shape[2:4] + height = int(width * h / float(w)) + h = height + w = width + table_css = """""" % (w, h) + title = self.name + label_html = '' + label_html_row = '' + images = [] + idx = 0 + for label, image in visuals.items(): + # + image_numpy = util.tensor2im(image) + #image_numpy = imresize(image_numpy, (h, w), interp='bicubic') + image_numpy = cv2.resize(src=image_numpy, dsize=(h, w), interpolation=cv2.INTER_CUBIC) + image_numpy = image_numpy.transpose([2, 0, 1]) + label_html_row += '%s' % label + images.append(image_numpy) + idx += 1 + if idx % ncols == 0: + label_html += '%s' % label_html_row + label_html_row = '' + white_image = np.ones_like(image_numpy) * 255 + while idx % ncols != 0: + images.append(white_image) + label_html_row += '' + idx += 1 + if label_html_row != '': + label_html += '%s' % label_html_row + # pane col = image row + try: + self.vis.images(images, nrow=ncols, win=self.display_id + 1, padding=2, opts=dict(title=title + ' images')) + label_html = '%s
' % label_html + self.vis.text(table_css + label_html, win=self.display_id + 2, + opts=dict(title=title + ' labels')) + except VisdomExceptionBase: + self.throw_visdom_connection_error() + + else: + idx = 1 + for label, image in visuals.items(): + image_numpy = util.tensor2im(image) + self.vis.image(image_numpy.transpose([2, 0, 1]), opts=dict(title=label), + win=self.display_id + idx) + idx += 1 + + if self.use_html and (save_result or not self.saved): # save images to a html file + self.saved = True + for label, image in visuals.items(): + image_numpy = util.tensor2im(image) + img_path = os.path.join(self.img_dir, 'epoch%.3d_%s.png' % (epoch, label)) + util.save_image(image_numpy, img_path) + # update website + webpage = html.HTML(self.web_dir, 'Experiment name = %s' % self.name, reflesh=1) + for n in range(epoch, 0, -1): + webpage.add_header('epoch [%d]' % n) + ims, txts, links = [], [], [] + + for label, image_numpy in visuals.items(): + image_numpy = util.tensor2im(image) + img_path = 'epoch%.3d_%s.png' % (n, label) + ims.append(img_path) + txts.append(label) + links.append(img_path) + webpage.add_images(ims, txts, links, width=self.win_size) + webpage.save() + + # losses: dictionary of error labels and values + def plot_current_losses(self, epoch, counter_ratio, opt, losses): + if not hasattr(self, 'plot_data'): + self.plot_data = {'X': [], 'Y': [], 'legend': list(losses.keys())} + self.plot_data['X'].append(epoch + counter_ratio) + self.plot_data['Y'].append([losses[k] for k in self.plot_data['legend']]) + try: + self.vis.line( + X=np.stack([np.array(self.plot_data['X'])] * len(self.plot_data['legend']), 1), + Y=np.array(self.plot_data['Y']), + opts={ + 'title': self.name + ' loss over time', + 'legend': self.plot_data['legend'], + 'xlabel': 'epoch', + 'ylabel': 'loss'}, + win=self.display_id) + except VisdomExceptionBase: + self.throw_visdom_connection_error() + + # losses: same format as |losses| of plot_current_losses + def print_current_losses(self, epoch, i, losses, t, t_data): + message = '(epoch: %d, iters: %d, time: %.3f, data: %.3f) ' % (epoch, i, t, t_data) + for k, v in losses.items(): + message += '%s: %.3f ' % (k, v) + + print(message) + with open(self.log_name, "a") as log_file: + log_file.write('%s\n' % message) + + + + + + + # losses: dictionary of error labels and values + def plot_current_validation_error(self, epoch, counter_ratio, losses): + if not hasattr(self, 'plot_validation_data'): + self.plot_validation_data = {'X': [], 'Y': [], 'legend': list(losses.keys())} + self.plot_validation_data['X'].append(epoch + counter_ratio) + self.plot_validation_data['Y'].append([losses[k] for k in self.plot_validation_data['legend']]) + try: + self.vis.line( + X=np.stack([np.array(self.plot_validation_data['X'])] * len(self.plot_validation_data['legend']), 1), + Y=np.array(self.plot_validation_data['Y']), + opts={ + 'title': self.name + ' validation error over time', + 'legend': self.plot_validation_data['legend'], + 'xlabel': 'epoch', + 'ylabel': 'error'}, + win=self.display_id+1) + except VisdomExceptionBase: + self.throw_visdom_connection_error() \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/10_netG.pth b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/10_netG.pth new file mode 100644 index 0000000..86f917f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/10_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:450ac758c0b1c86078f6c22a3971d1ada8b4f7b8bec59e0facb34a1d34b36623 +size 1314132 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/15_netG.pth b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/15_netG.pth new file mode 100644 index 0000000..867717f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/15_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e64c2e037305352fd15233ca3b7db7ed5dd205f2c70e14ccff24db0e644858d5 +size 1314132 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/20_netG.pth b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/20_netG.pth new file mode 100644 index 0000000..4759659 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/20_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:40ebd6414acdede17d4b329af1e784876da3940b62a63c4560c047351fd04b25 +size 1314132 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/25_netG.pth b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/25_netG.pth new file mode 100644 index 0000000..0cee87c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/25_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3d65789804c76602e57d25c200a1b4f0758ac4cc2f85c190a989fda9fda7fb30 +size 1314132 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/30_netG.pth b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/30_netG.pth new file mode 100644 index 0000000..c1cf871 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/30_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:31d87402f3adc96d14b20645935be68f0f38fd1acc0797f035114c098001bd15 +size 1314132 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/35_netG.pth b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/35_netG.pth new file mode 100644 index 0000000..f8eafcc --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/35_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1a59642e0450ed198d4107a4a31b688bdfa03bcd586f4b3ad8b0dfc78c889521 +size 1314132 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/40_netG.pth b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/40_netG.pth new file mode 100644 index 0000000..5ae6c21 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/40_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6040b71bf8abea82dc58fc1b36a3a8af9fa4a88f6bfb5676a90cf0987612376a +size 1314132 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/45_netG.pth b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/45_netG.pth new file mode 100644 index 0000000..0d98d7c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/45_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ba76a02cb27239dab8c2a1e231d0c6728c5283b17c2ab4c811be13a20a8115bc +size 1314132 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/50_netG.pth b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/50_netG.pth new file mode 100644 index 0000000..d2866a5 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/50_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c143f554a882250cc74b80649c5a8d4499b4962d71bbf124dbcac8b888d21914 +size 1314132 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/5_netG.pth b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/5_netG.pth new file mode 100644 index 0000000..5cf58e6 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/5_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:58b3419dd86e8643bcee26409d5329d58e6dd08ce596e43bb85c18885d421b3e +size 1314132 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/latest_netG.pth b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/latest_netG.pth new file mode 100644 index 0000000..159e954 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/latest_netG.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:64b408c2a7d19300c7d1c5a828a0780e1cb1748d16105f6f3338ce9f01d411e2 +size 1314132 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/loss_log.txt b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/loss_log.txt new file mode 100644 index 0000000..7431bb2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/loss_log.txt @@ -0,0 +1,7569 @@ +================ Training Loss (Tue Nov 5 11:55:26 2019) ================ +(epoch: 1, iters: 2000, time: 0.527, data: 0.543) G_L1: 20.533 G_L1_ABSOLUTE: 6.121 G_L1_RELATIVE: 14.412 G_Regularizer: 0.000 validation_error: 26.195 +(epoch: 1, iters: 4000, time: 0.519, data: 0.000) G_L1: 22.297 G_L1_ABSOLUTE: 7.213 G_L1_RELATIVE: 15.085 G_Regularizer: 0.000 validation_error: 26.295 +(epoch: 1, iters: 6000, time: 0.523, data: 0.000) G_L1: 26.460 G_L1_ABSOLUTE: 7.193 G_L1_RELATIVE: 19.267 G_Regularizer: 0.000 validation_error: 26.111 +(epoch: 1, iters: 8000, time: 0.525, data: 0.000) G_L1: 25.699 G_L1_ABSOLUTE: 6.721 G_L1_RELATIVE: 18.978 G_Regularizer: 0.000 validation_error: 25.961 +(epoch: 1, iters: 10000, time: 0.522, data: 0.000) G_L1: 25.790 G_L1_ABSOLUTE: 6.689 G_L1_RELATIVE: 19.102 G_Regularizer: 0.000 validation_error: 26.259 +(epoch: 1, iters: 12000, time: 0.524, data: 0.000) G_L1: 21.361 G_L1_ABSOLUTE: 6.775 G_L1_RELATIVE: 14.586 G_Regularizer: 0.000 validation_error: 25.803 +(epoch: 1, iters: 14000, time: 0.524, data: 0.000) G_L1: 26.366 G_L1_ABSOLUTE: 6.800 G_L1_RELATIVE: 19.566 G_Regularizer: 0.000 validation_error: 26.035 +(epoch: 1, iters: 16000, time: 0.521, data: 0.000) G_L1: 22.563 G_L1_ABSOLUTE: 6.146 G_L1_RELATIVE: 16.416 G_Regularizer: 0.000 validation_error: 25.958 +(epoch: 1, iters: 18000, time: 0.521, data: 0.000) G_L1: 23.015 G_L1_ABSOLUTE: 6.598 G_L1_RELATIVE: 16.417 G_Regularizer: 0.000 validation_error: 26.145 +(epoch: 1, iters: 20000, time: 0.521, data: 0.000) G_L1: 27.604 G_L1_ABSOLUTE: 6.411 G_L1_RELATIVE: 21.193 G_Regularizer: 0.000 validation_error: 26.835 +(epoch: 1, iters: 22000, time: 0.524, data: 0.000) G_L1: 23.692 G_L1_ABSOLUTE: 6.645 G_L1_RELATIVE: 17.046 G_Regularizer: 0.000 validation_error: 26.400 +(epoch: 1, iters: 24000, time: 0.525, data: 0.000) G_L1: 31.754 G_L1_ABSOLUTE: 6.221 G_L1_RELATIVE: 25.533 G_Regularizer: 0.000 validation_error: 25.967 +(epoch: 1, iters: 26000, time: 0.534, data: 0.000) G_L1: 20.770 G_L1_ABSOLUTE: 6.981 G_L1_RELATIVE: 13.789 G_Regularizer: 0.000 validation_error: 26.519 +(epoch: 1, iters: 28000, time: 0.521, data: 0.000) G_L1: 21.386 G_L1_ABSOLUTE: 5.921 G_L1_RELATIVE: 15.465 G_Regularizer: 0.000 validation_error: 26.218 +(epoch: 1, iters: 30000, time: 0.527, data: 0.000) G_L1: 20.580 G_L1_ABSOLUTE: 5.905 G_L1_RELATIVE: 14.675 G_Regularizer: 0.000 validation_error: 26.542 +(epoch: 1, iters: 32000, time: 0.528, data: 0.000) G_L1: 22.306 G_L1_ABSOLUTE: 5.767 G_L1_RELATIVE: 16.538 G_Regularizer: 0.000 validation_error: 25.869 +(epoch: 1, iters: 34000, time: 0.530, data: 0.000) G_L1: 27.841 G_L1_ABSOLUTE: 6.845 G_L1_RELATIVE: 20.997 G_Regularizer: 0.000 validation_error: 25.692 +(epoch: 1, iters: 36000, time: 0.528, data: 0.000) G_L1: 24.150 G_L1_ABSOLUTE: 6.119 G_L1_RELATIVE: 18.030 G_Regularizer: 0.000 validation_error: 25.631 +(epoch: 1, iters: 38000, time: 0.530, data: 0.000) G_L1: 28.152 G_L1_ABSOLUTE: 5.918 G_L1_RELATIVE: 22.234 G_Regularizer: 0.000 validation_error: 26.156 +(epoch: 1, iters: 40000, time: 0.525, data: 0.000) G_L1: 25.895 G_L1_ABSOLUTE: 5.195 G_L1_RELATIVE: 20.700 G_Regularizer: 0.000 validation_error: 25.479 +(epoch: 1, iters: 42000, time: 0.530, data: 0.000) G_L1: 23.754 G_L1_ABSOLUTE: 5.785 G_L1_RELATIVE: 17.969 G_Regularizer: 0.000 validation_error: 25.697 +(epoch: 1, iters: 44000, time: 0.524, data: 0.000) G_L1: 23.181 G_L1_ABSOLUTE: 5.283 G_L1_RELATIVE: 17.897 G_Regularizer: 0.000 validation_error: 25.307 +(epoch: 1, iters: 46000, time: 0.528, data: 0.000) G_L1: 23.469 G_L1_ABSOLUTE: 6.536 G_L1_RELATIVE: 16.933 G_Regularizer: 0.000 validation_error: 25.563 +(epoch: 1, iters: 48000, time: 0.532, data: 0.000) G_L1: 24.980 G_L1_ABSOLUTE: 6.330 G_L1_RELATIVE: 18.649 G_Regularizer: 0.000 validation_error: 25.355 +(epoch: 1, iters: 50000, time: 0.528, data: 0.000) G_L1: 22.681 G_L1_ABSOLUTE: 5.501 G_L1_RELATIVE: 17.180 G_Regularizer: 0.000 validation_error: 25.428 +(epoch: 1, iters: 52000, time: 0.528, data: 0.000) G_L1: 18.113 G_L1_ABSOLUTE: 5.679 G_L1_RELATIVE: 12.434 G_Regularizer: 0.000 validation_error: 25.698 +(epoch: 1, iters: 54000, time: 0.527, data: 0.000) G_L1: 19.100 G_L1_ABSOLUTE: 5.162 G_L1_RELATIVE: 13.939 G_Regularizer: 0.000 validation_error: 25.779 +(epoch: 1, iters: 56000, time: 0.522, data: 0.000) G_L1: 24.206 G_L1_ABSOLUTE: 5.879 G_L1_RELATIVE: 18.327 G_Regularizer: 0.000 validation_error: 26.205 +(epoch: 1, iters: 58000, time: 0.532, data: 0.000) G_L1: 27.497 G_L1_ABSOLUTE: 5.718 G_L1_RELATIVE: 21.779 G_Regularizer: 0.000 validation_error: 25.873 +(epoch: 1, iters: 60000, time: 0.528, data: 0.000) G_L1: 23.159 G_L1_ABSOLUTE: 6.134 G_L1_RELATIVE: 17.025 G_Regularizer: 0.000 validation_error: 26.379 +(epoch: 1, iters: 62000, time: 0.533, data: 0.000) G_L1: 19.434 G_L1_ABSOLUTE: 4.600 G_L1_RELATIVE: 14.834 G_Regularizer: 0.000 validation_error: 25.891 +(epoch: 1, iters: 64000, time: 0.525, data: 0.001) G_L1: 23.563 G_L1_ABSOLUTE: 5.571 G_L1_RELATIVE: 17.992 G_Regularizer: 0.000 validation_error: 26.009 +(epoch: 1, iters: 66000, time: 0.531, data: 0.000) G_L1: 20.647 G_L1_ABSOLUTE: 5.063 G_L1_RELATIVE: 15.585 G_Regularizer: 0.000 validation_error: 25.800 +(epoch: 1, iters: 68000, time: 0.532, data: 0.000) G_L1: 24.282 G_L1_ABSOLUTE: 5.969 G_L1_RELATIVE: 18.312 G_Regularizer: 0.000 validation_error: 25.929 +(epoch: 1, iters: 70000, time: 0.530, data: 0.000) G_L1: 23.425 G_L1_ABSOLUTE: 5.420 G_L1_RELATIVE: 18.005 G_Regularizer: 0.000 validation_error: 26.189 +(epoch: 1, iters: 72000, time: 0.523, data: 0.000) G_L1: 19.160 G_L1_ABSOLUTE: 5.065 G_L1_RELATIVE: 14.095 G_Regularizer: 0.000 validation_error: 25.471 +(epoch: 1, iters: 74000, time: 0.526, data: 0.000) G_L1: 21.785 G_L1_ABSOLUTE: 5.365 G_L1_RELATIVE: 16.420 G_Regularizer: 0.000 validation_error: 25.427 +(epoch: 1, iters: 76000, time: 0.531, data: 0.000) G_L1: 24.412 G_L1_ABSOLUTE: 4.703 G_L1_RELATIVE: 19.709 G_Regularizer: 0.000 validation_error: 25.933 +(epoch: 1, iters: 78000, time: 0.523, data: 0.000) G_L1: 20.388 G_L1_ABSOLUTE: 4.958 G_L1_RELATIVE: 15.430 G_Regularizer: 0.000 validation_error: 25.459 +(epoch: 1, iters: 80000, time: 0.527, data: 0.000) G_L1: 24.715 G_L1_ABSOLUTE: 4.889 G_L1_RELATIVE: 19.826 G_Regularizer: 0.000 validation_error: 25.478 +(epoch: 1, iters: 82000, time: 0.531, data: 0.000) G_L1: 17.904 G_L1_ABSOLUTE: 4.281 G_L1_RELATIVE: 13.622 G_Regularizer: 0.000 validation_error: 25.462 +(epoch: 1, iters: 84000, time: 0.525, data: 0.000) G_L1: 23.077 G_L1_ABSOLUTE: 4.637 G_L1_RELATIVE: 18.440 G_Regularizer: 0.000 validation_error: 25.672 +(epoch: 1, iters: 86000, time: 0.538, data: 0.000) G_L1: 25.415 G_L1_ABSOLUTE: 5.170 G_L1_RELATIVE: 20.246 G_Regularizer: 0.000 validation_error: 26.016 +(epoch: 1, iters: 88000, time: 0.525, data: 0.000) G_L1: 21.505 G_L1_ABSOLUTE: 4.944 G_L1_RELATIVE: 16.561 G_Regularizer: 0.000 validation_error: 25.608 +(epoch: 1, iters: 90000, time: 0.522, data: 0.000) G_L1: 22.340 G_L1_ABSOLUTE: 4.808 G_L1_RELATIVE: 17.533 G_Regularizer: 0.000 validation_error: 25.641 +(epoch: 1, iters: 92000, time: 0.528, data: 0.000) G_L1: 17.184 G_L1_ABSOLUTE: 5.004 G_L1_RELATIVE: 12.179 G_Regularizer: 0.000 validation_error: 25.229 +(epoch: 1, iters: 94000, time: 0.526, data: 0.000) G_L1: 22.259 G_L1_ABSOLUTE: 4.649 G_L1_RELATIVE: 17.610 G_Regularizer: 0.000 validation_error: 25.663 +(epoch: 1, iters: 96000, time: 0.531, data: 0.000) G_L1: 20.482 G_L1_ABSOLUTE: 4.829 G_L1_RELATIVE: 15.653 G_Regularizer: 0.000 validation_error: 25.542 +(epoch: 1, iters: 98000, time: 0.526, data: 0.000) G_L1: 20.044 G_L1_ABSOLUTE: 4.605 G_L1_RELATIVE: 15.439 G_Regularizer: 0.000 validation_error: 25.474 +(epoch: 1, iters: 100000, time: 0.524, data: 0.000) G_L1: 24.498 G_L1_ABSOLUTE: 4.666 G_L1_RELATIVE: 19.831 G_Regularizer: 0.000 validation_error: 25.764 +(epoch: 1, iters: 102000, time: 0.529, data: 0.000) G_L1: 28.585 G_L1_ABSOLUTE: 5.306 G_L1_RELATIVE: 23.278 G_Regularizer: 0.000 validation_error: 25.472 +(epoch: 1, iters: 104000, time: 0.525, data: 0.000) G_L1: 22.490 G_L1_ABSOLUTE: 4.104 G_L1_RELATIVE: 18.385 G_Regularizer: 0.000 validation_error: 25.019 +(epoch: 1, iters: 106000, time: 0.530, data: 0.000) G_L1: 23.004 G_L1_ABSOLUTE: 5.039 G_L1_RELATIVE: 17.965 G_Regularizer: 0.000 validation_error: 25.303 +(epoch: 1, iters: 108000, time: 0.530, data: 0.000) G_L1: 27.354 G_L1_ABSOLUTE: 4.710 G_L1_RELATIVE: 22.644 G_Regularizer: 0.000 validation_error: 25.447 +(epoch: 1, iters: 110000, time: 0.523, data: 0.000) G_L1: 21.328 G_L1_ABSOLUTE: 4.957 G_L1_RELATIVE: 16.371 G_Regularizer: 0.000 validation_error: 25.714 +(epoch: 1, iters: 112000, time: 0.525, data: 0.000) G_L1: 19.985 G_L1_ABSOLUTE: 4.154 G_L1_RELATIVE: 15.831 G_Regularizer: 0.000 validation_error: 25.122 +(epoch: 1, iters: 114000, time: 0.529, data: 0.000) G_L1: 22.220 G_L1_ABSOLUTE: 4.470 G_L1_RELATIVE: 17.750 G_Regularizer: 0.000 validation_error: 25.690 +(epoch: 1, iters: 116000, time: 0.527, data: 0.000) G_L1: 20.264 G_L1_ABSOLUTE: 3.744 G_L1_RELATIVE: 16.520 G_Regularizer: 0.000 validation_error: 25.616 +(epoch: 1, iters: 118000, time: 0.524, data: 0.000) G_L1: 23.164 G_L1_ABSOLUTE: 4.250 G_L1_RELATIVE: 18.915 G_Regularizer: 0.000 validation_error: 25.461 +(epoch: 1, iters: 120000, time: 0.535, data: 0.000) G_L1: 24.064 G_L1_ABSOLUTE: 4.503 G_L1_RELATIVE: 19.561 G_Regularizer: 0.000 validation_error: 25.242 +(epoch: 1, iters: 122000, time: 0.524, data: 0.000) G_L1: 20.198 G_L1_ABSOLUTE: 3.989 G_L1_RELATIVE: 16.209 G_Regularizer: 0.000 validation_error: 25.856 +(epoch: 1, iters: 124000, time: 0.535, data: 0.000) G_L1: 22.604 G_L1_ABSOLUTE: 4.396 G_L1_RELATIVE: 18.208 G_Regularizer: 0.000 validation_error: 25.615 +(epoch: 1, iters: 126000, time: 0.532, data: 0.000) G_L1: 17.857 G_L1_ABSOLUTE: 3.804 G_L1_RELATIVE: 14.053 G_Regularizer: 0.000 validation_error: 24.820 +(epoch: 1, iters: 128000, time: 0.527, data: 0.000) G_L1: 22.076 G_L1_ABSOLUTE: 4.793 G_L1_RELATIVE: 17.284 G_Regularizer: 0.000 validation_error: 25.246 +(epoch: 1, iters: 130000, time: 0.526, data: 0.000) G_L1: 22.058 G_L1_ABSOLUTE: 4.431 G_L1_RELATIVE: 17.628 G_Regularizer: 0.000 validation_error: 24.596 +(epoch: 1, iters: 132000, time: 0.524, data: 0.000) G_L1: 25.289 G_L1_ABSOLUTE: 4.253 G_L1_RELATIVE: 21.036 G_Regularizer: 0.000 validation_error: 25.387 +(epoch: 1, iters: 134000, time: 0.527, data: 0.000) G_L1: 21.595 G_L1_ABSOLUTE: 3.987 G_L1_RELATIVE: 17.607 G_Regularizer: 0.000 validation_error: 25.077 +(epoch: 1, iters: 136000, time: 0.525, data: 0.000) G_L1: 21.235 G_L1_ABSOLUTE: 4.392 G_L1_RELATIVE: 16.843 G_Regularizer: 0.000 validation_error: 24.855 +(epoch: 1, iters: 138000, time: 0.534, data: 0.000) G_L1: 22.488 G_L1_ABSOLUTE: 4.404 G_L1_RELATIVE: 18.084 G_Regularizer: 0.000 validation_error: 25.866 +(epoch: 1, iters: 140000, time: 0.528, data: 0.000) G_L1: 23.761 G_L1_ABSOLUTE: 4.339 G_L1_RELATIVE: 19.422 G_Regularizer: 0.000 validation_error: 25.059 +(epoch: 1, iters: 142000, time: 0.527, data: 0.001) G_L1: 18.897 G_L1_ABSOLUTE: 4.298 G_L1_RELATIVE: 14.599 G_Regularizer: 0.000 validation_error: 25.060 +(epoch: 1, iters: 144000, time: 0.529, data: 0.000) G_L1: 18.984 G_L1_ABSOLUTE: 4.345 G_L1_RELATIVE: 14.639 G_Regularizer: 0.000 validation_error: 25.088 +(epoch: 1, iters: 146000, time: 0.531, data: 0.000) G_L1: 17.956 G_L1_ABSOLUTE: 4.423 G_L1_RELATIVE: 13.533 G_Regularizer: 0.000 validation_error: 25.111 +(epoch: 1, iters: 148000, time: 0.530, data: 0.000) G_L1: 19.417 G_L1_ABSOLUTE: 4.514 G_L1_RELATIVE: 14.903 G_Regularizer: 0.000 validation_error: 24.421 +(epoch: 1, iters: 150000, time: 0.525, data: 0.000) G_L1: 19.327 G_L1_ABSOLUTE: 4.618 G_L1_RELATIVE: 14.709 G_Regularizer: 0.000 validation_error: 24.444 +(epoch: 1, iters: 152000, time: 0.529, data: 0.000) G_L1: 22.617 G_L1_ABSOLUTE: 4.578 G_L1_RELATIVE: 18.040 G_Regularizer: 0.000 validation_error: 24.705 +(epoch: 1, iters: 154000, time: 0.524, data: 0.000) G_L1: 24.957 G_L1_ABSOLUTE: 3.963 G_L1_RELATIVE: 20.994 G_Regularizer: 0.000 validation_error: 24.430 +(epoch: 1, iters: 156000, time: 0.521, data: 0.000) G_L1: 20.307 G_L1_ABSOLUTE: 3.911 G_L1_RELATIVE: 16.396 G_Regularizer: 0.000 validation_error: 24.632 +(epoch: 1, iters: 158000, time: 0.527, data: 0.000) G_L1: 18.463 G_L1_ABSOLUTE: 4.156 G_L1_RELATIVE: 14.307 G_Regularizer: 0.000 validation_error: 24.343 +(epoch: 1, iters: 160000, time: 0.531, data: 0.000) G_L1: 20.475 G_L1_ABSOLUTE: 4.195 G_L1_RELATIVE: 16.280 G_Regularizer: 0.000 validation_error: 24.677 +(epoch: 1, iters: 162000, time: 0.525, data: 0.000) G_L1: 20.776 G_L1_ABSOLUTE: 4.367 G_L1_RELATIVE: 16.409 G_Regularizer: 0.000 validation_error: 24.303 +(epoch: 1, iters: 164000, time: 0.531, data: 0.000) G_L1: 18.067 G_L1_ABSOLUTE: 4.091 G_L1_RELATIVE: 13.976 G_Regularizer: 0.000 validation_error: 24.560 +(epoch: 1, iters: 166000, time: 0.526, data: 0.000) G_L1: 22.843 G_L1_ABSOLUTE: 4.290 G_L1_RELATIVE: 18.553 G_Regularizer: 0.000 validation_error: 24.721 +(epoch: 1, iters: 168000, time: 0.524, data: 0.000) G_L1: 20.924 G_L1_ABSOLUTE: 3.742 G_L1_RELATIVE: 17.182 G_Regularizer: 0.000 validation_error: 24.209 +(epoch: 1, iters: 170000, time: 0.526, data: 0.000) G_L1: 17.669 G_L1_ABSOLUTE: 4.038 G_L1_RELATIVE: 13.631 G_Regularizer: 0.000 validation_error: 24.377 +(epoch: 1, iters: 172000, time: 0.526, data: 0.000) G_L1: 20.277 G_L1_ABSOLUTE: 4.201 G_L1_RELATIVE: 16.076 G_Regularizer: 0.000 validation_error: 24.538 +(epoch: 1, iters: 174000, time: 0.527, data: 0.000) G_L1: 22.542 G_L1_ABSOLUTE: 3.949 G_L1_RELATIVE: 18.593 G_Regularizer: 0.000 validation_error: 24.260 +(epoch: 1, iters: 176000, time: 0.529, data: 0.000) G_L1: 21.529 G_L1_ABSOLUTE: 3.585 G_L1_RELATIVE: 17.944 G_Regularizer: 0.000 validation_error: 24.700 +(epoch: 1, iters: 178000, time: 0.529, data: 0.000) G_L1: 21.695 G_L1_ABSOLUTE: 4.256 G_L1_RELATIVE: 17.439 G_Regularizer: 0.000 validation_error: 24.722 +(epoch: 1, iters: 180000, time: 0.528, data: 0.000) G_L1: 18.981 G_L1_ABSOLUTE: 3.781 G_L1_RELATIVE: 15.200 G_Regularizer: 0.000 validation_error: 24.588 +(epoch: 1, iters: 182000, time: 0.531, data: 0.000) G_L1: 25.291 G_L1_ABSOLUTE: 4.210 G_L1_RELATIVE: 21.081 G_Regularizer: 0.000 validation_error: 24.406 +(epoch: 1, iters: 184000, time: 0.522, data: 0.000) G_L1: 19.536 G_L1_ABSOLUTE: 3.747 G_L1_RELATIVE: 15.789 G_Regularizer: 0.000 validation_error: 24.321 +(epoch: 1, iters: 186000, time: 0.525, data: 0.000) G_L1: 20.605 G_L1_ABSOLUTE: 4.199 G_L1_RELATIVE: 16.406 G_Regularizer: 0.000 validation_error: 24.567 +(epoch: 1, iters: 188000, time: 0.524, data: 0.000) G_L1: 18.809 G_L1_ABSOLUTE: 3.932 G_L1_RELATIVE: 14.877 G_Regularizer: 0.000 validation_error: 24.504 +(epoch: 1, iters: 190000, time: 0.527, data: 0.000) G_L1: 18.435 G_L1_ABSOLUTE: 4.072 G_L1_RELATIVE: 14.363 G_Regularizer: 0.000 validation_error: 24.471 +(epoch: 1, iters: 192000, time: 0.527, data: 0.000) G_L1: 24.383 G_L1_ABSOLUTE: 3.989 G_L1_RELATIVE: 20.394 G_Regularizer: 0.000 validation_error: 24.320 +(epoch: 1, iters: 194000, time: 0.524, data: 0.000) G_L1: 18.557 G_L1_ABSOLUTE: 4.491 G_L1_RELATIVE: 14.066 G_Regularizer: 0.000 validation_error: 24.349 +(epoch: 1, iters: 196000, time: 0.527, data: 0.000) G_L1: 22.087 G_L1_ABSOLUTE: 4.181 G_L1_RELATIVE: 17.906 G_Regularizer: 0.000 validation_error: 24.555 +(epoch: 1, iters: 198000, time: 0.532, data: 0.001) G_L1: 21.366 G_L1_ABSOLUTE: 3.752 G_L1_RELATIVE: 17.614 G_Regularizer: 0.000 validation_error: 24.559 +(epoch: 1, iters: 200000, time: 0.524, data: 0.000) G_L1: 19.948 G_L1_ABSOLUTE: 4.048 G_L1_RELATIVE: 15.900 G_Regularizer: 0.000 validation_error: 24.242 +(epoch: 1, iters: 202000, time: 0.530, data: 0.000) G_L1: 20.864 G_L1_ABSOLUTE: 3.799 G_L1_RELATIVE: 17.066 G_Regularizer: 0.000 validation_error: 24.322 +(epoch: 1, iters: 204000, time: 0.531, data: 0.000) G_L1: 20.734 G_L1_ABSOLUTE: 3.939 G_L1_RELATIVE: 16.795 G_Regularizer: 0.000 validation_error: 23.832 +(epoch: 1, iters: 206000, time: 0.527, data: 0.000) G_L1: 17.837 G_L1_ABSOLUTE: 4.024 G_L1_RELATIVE: 13.813 G_Regularizer: 0.000 validation_error: 24.326 +(epoch: 1, iters: 208000, time: 0.523, data: 0.000) G_L1: 21.147 G_L1_ABSOLUTE: 3.896 G_L1_RELATIVE: 17.251 G_Regularizer: 0.000 validation_error: 24.181 +(epoch: 1, iters: 210000, time: 0.527, data: 0.000) G_L1: 18.972 G_L1_ABSOLUTE: 3.557 G_L1_RELATIVE: 15.415 G_Regularizer: 0.000 validation_error: 24.457 +(epoch: 1, iters: 212000, time: 0.524, data: 0.000) G_L1: 21.846 G_L1_ABSOLUTE: 3.925 G_L1_RELATIVE: 17.921 G_Regularizer: 0.000 validation_error: 24.408 +(epoch: 1, iters: 214000, time: 0.528, data: 0.000) G_L1: 19.201 G_L1_ABSOLUTE: 3.623 G_L1_RELATIVE: 15.577 G_Regularizer: 0.000 validation_error: 24.239 +(epoch: 1, iters: 216000, time: 0.531, data: 0.000) G_L1: 17.034 G_L1_ABSOLUTE: 4.041 G_L1_RELATIVE: 12.993 G_Regularizer: 0.000 validation_error: 24.544 +(epoch: 1, iters: 218000, time: 0.525, data: 0.000) G_L1: 22.975 G_L1_ABSOLUTE: 3.899 G_L1_RELATIVE: 19.076 G_Regularizer: 0.000 validation_error: 23.697 +(epoch: 1, iters: 220000, time: 0.523, data: 0.000) G_L1: 18.786 G_L1_ABSOLUTE: 3.943 G_L1_RELATIVE: 14.844 G_Regularizer: 0.000 validation_error: 24.047 +(epoch: 1, iters: 222000, time: 0.527, data: 0.000) G_L1: 16.579 G_L1_ABSOLUTE: 4.034 G_L1_RELATIVE: 12.545 G_Regularizer: 0.000 validation_error: 23.986 +(epoch: 1, iters: 224000, time: 0.530, data: 0.000) G_L1: 18.551 G_L1_ABSOLUTE: 3.978 G_L1_RELATIVE: 14.573 G_Regularizer: 0.000 validation_error: 24.706 +(epoch: 1, iters: 226000, time: 0.526, data: 0.000) G_L1: 21.904 G_L1_ABSOLUTE: 3.952 G_L1_RELATIVE: 17.952 G_Regularizer: 0.000 validation_error: 24.628 +(epoch: 1, iters: 228000, time: 0.531, data: 0.000) G_L1: 18.624 G_L1_ABSOLUTE: 3.560 G_L1_RELATIVE: 15.064 G_Regularizer: 0.000 validation_error: 23.511 +(epoch: 1, iters: 230000, time: 0.524, data: 0.000) G_L1: 17.772 G_L1_ABSOLUTE: 3.938 G_L1_RELATIVE: 13.834 G_Regularizer: 0.000 validation_error: 23.734 +(epoch: 1, iters: 232000, time: 0.531, data: 0.000) G_L1: 20.067 G_L1_ABSOLUTE: 4.079 G_L1_RELATIVE: 15.988 G_Regularizer: 0.000 validation_error: 24.111 +(epoch: 1, iters: 234000, time: 0.529, data: 0.000) G_L1: 17.543 G_L1_ABSOLUTE: 4.035 G_L1_RELATIVE: 13.507 G_Regularizer: 0.000 validation_error: 24.288 +(epoch: 1, iters: 236000, time: 0.527, data: 0.000) G_L1: 23.722 G_L1_ABSOLUTE: 3.983 G_L1_RELATIVE: 19.739 G_Regularizer: 0.000 validation_error: 23.954 +(epoch: 1, iters: 238000, time: 0.526, data: 0.000) G_L1: 19.177 G_L1_ABSOLUTE: 3.632 G_L1_RELATIVE: 15.546 G_Regularizer: 0.000 validation_error: 24.831 +(epoch: 1, iters: 240000, time: 0.531, data: 0.000) G_L1: 18.756 G_L1_ABSOLUTE: 3.548 G_L1_RELATIVE: 15.207 G_Regularizer: 0.000 validation_error: 24.057 +(epoch: 1, iters: 242000, time: 0.526, data: 0.000) G_L1: 15.468 G_L1_ABSOLUTE: 3.767 G_L1_RELATIVE: 11.701 G_Regularizer: 0.000 validation_error: 24.242 +(epoch: 1, iters: 244000, time: 0.529, data: 0.000) G_L1: 18.866 G_L1_ABSOLUTE: 3.760 G_L1_RELATIVE: 15.105 G_Regularizer: 0.000 validation_error: 24.002 +(epoch: 1, iters: 246000, time: 0.522, data: 0.001) G_L1: 18.390 G_L1_ABSOLUTE: 3.824 G_L1_RELATIVE: 14.566 G_Regularizer: 0.000 validation_error: 23.662 +(epoch: 1, iters: 248000, time: 0.524, data: 0.000) G_L1: 19.327 G_L1_ABSOLUTE: 3.222 G_L1_RELATIVE: 16.105 G_Regularizer: 0.000 validation_error: 23.933 +(epoch: 1, iters: 250000, time: 0.525, data: 0.000) G_L1: 16.488 G_L1_ABSOLUTE: 4.071 G_L1_RELATIVE: 12.417 G_Regularizer: 0.000 validation_error: 23.823 +(epoch: 1, iters: 252000, time: 0.529, data: 0.001) G_L1: 19.932 G_L1_ABSOLUTE: 3.844 G_L1_RELATIVE: 16.088 G_Regularizer: 0.000 validation_error: 24.064 +(epoch: 1, iters: 254000, time: 0.528, data: 0.000) G_L1: 21.046 G_L1_ABSOLUTE: 3.934 G_L1_RELATIVE: 17.112 G_Regularizer: 0.000 validation_error: 23.750 +(epoch: 1, iters: 256000, time: 0.523, data: 0.000) G_L1: 20.878 G_L1_ABSOLUTE: 4.014 G_L1_RELATIVE: 16.864 G_Regularizer: 0.000 validation_error: 24.444 +(epoch: 1, iters: 258000, time: 0.525, data: 0.000) G_L1: 15.624 G_L1_ABSOLUTE: 3.351 G_L1_RELATIVE: 12.272 G_Regularizer: 0.000 validation_error: 23.646 +(epoch: 1, iters: 260000, time: 0.522, data: 0.000) G_L1: 21.103 G_L1_ABSOLUTE: 3.355 G_L1_RELATIVE: 17.748 G_Regularizer: 0.000 validation_error: 24.173 +(epoch: 1, iters: 262000, time: 0.528, data: 0.000) G_L1: 15.254 G_L1_ABSOLUTE: 3.802 G_L1_RELATIVE: 11.452 G_Regularizer: 0.000 validation_error: 24.097 +(epoch: 1, iters: 264000, time: 0.529, data: 0.000) G_L1: 17.711 G_L1_ABSOLUTE: 3.441 G_L1_RELATIVE: 14.270 G_Regularizer: 0.000 validation_error: 23.639 +(epoch: 1, iters: 266000, time: 0.531, data: 0.000) G_L1: 22.043 G_L1_ABSOLUTE: 3.804 G_L1_RELATIVE: 18.239 G_Regularizer: 0.000 validation_error: 24.509 +(epoch: 1, iters: 268000, time: 0.529, data: 0.000) G_L1: 15.759 G_L1_ABSOLUTE: 3.534 G_L1_RELATIVE: 12.226 G_Regularizer: 0.000 validation_error: 23.430 +(epoch: 1, iters: 270000, time: 0.527, data: 0.000) G_L1: 19.133 G_L1_ABSOLUTE: 4.115 G_L1_RELATIVE: 15.018 G_Regularizer: 0.000 validation_error: 24.018 +(epoch: 1, iters: 272000, time: 0.529, data: 0.000) G_L1: 15.650 G_L1_ABSOLUTE: 3.156 G_L1_RELATIVE: 12.494 G_Regularizer: 0.000 validation_error: 24.029 +(epoch: 1, iters: 274000, time: 0.532, data: 0.000) G_L1: 22.627 G_L1_ABSOLUTE: 3.961 G_L1_RELATIVE: 18.666 G_Regularizer: 0.000 validation_error: 24.322 +(epoch: 1, iters: 276000, time: 0.530, data: 0.000) G_L1: 21.083 G_L1_ABSOLUTE: 4.061 G_L1_RELATIVE: 17.022 G_Regularizer: 0.000 validation_error: 23.667 +(epoch: 1, iters: 278000, time: 0.531, data: 0.000) G_L1: 20.596 G_L1_ABSOLUTE: 3.878 G_L1_RELATIVE: 16.718 G_Regularizer: 0.000 validation_error: 24.293 +(epoch: 1, iters: 280000, time: 0.534, data: 0.001) G_L1: 22.605 G_L1_ABSOLUTE: 3.707 G_L1_RELATIVE: 18.898 G_Regularizer: 0.000 validation_error: 23.754 +(epoch: 1, iters: 282000, time: 0.527, data: 0.000) G_L1: 15.923 G_L1_ABSOLUTE: 3.235 G_L1_RELATIVE: 12.688 G_Regularizer: 0.000 validation_error: 23.480 +(epoch: 1, iters: 284000, time: 0.531, data: 0.000) G_L1: 20.187 G_L1_ABSOLUTE: 4.283 G_L1_RELATIVE: 15.904 G_Regularizer: 0.000 validation_error: 23.692 +(epoch: 1, iters: 286000, time: 0.528, data: 0.000) G_L1: 21.816 G_L1_ABSOLUTE: 3.407 G_L1_RELATIVE: 18.409 G_Regularizer: 0.000 validation_error: 23.527 +(epoch: 1, iters: 288000, time: 0.527, data: 0.000) G_L1: 19.219 G_L1_ABSOLUTE: 3.813 G_L1_RELATIVE: 15.405 G_Regularizer: 0.000 validation_error: 23.934 +(epoch: 1, iters: 290000, time: 0.531, data: 0.000) G_L1: 15.926 G_L1_ABSOLUTE: 3.455 G_L1_RELATIVE: 12.470 G_Regularizer: 0.000 validation_error: 23.804 +(epoch: 1, iters: 292000, time: 0.525, data: 0.000) G_L1: 23.940 G_L1_ABSOLUTE: 3.763 G_L1_RELATIVE: 20.177 G_Regularizer: 0.000 validation_error: 23.613 +(epoch: 1, iters: 294000, time: 0.532, data: 0.000) G_L1: 19.805 G_L1_ABSOLUTE: 3.906 G_L1_RELATIVE: 15.899 G_Regularizer: 0.000 validation_error: 23.512 +(epoch: 1, iters: 296000, time: 0.526, data: 0.000) G_L1: 18.564 G_L1_ABSOLUTE: 3.842 G_L1_RELATIVE: 14.722 G_Regularizer: 0.000 validation_error: 24.080 +(epoch: 1, iters: 298000, time: 0.523, data: 0.000) G_L1: 21.355 G_L1_ABSOLUTE: 4.216 G_L1_RELATIVE: 17.139 G_Regularizer: 0.000 validation_error: 23.654 +(epoch: 1, iters: 300000, time: 0.524, data: 0.000) G_L1: 16.615 G_L1_ABSOLUTE: 3.394 G_L1_RELATIVE: 13.222 G_Regularizer: 0.000 validation_error: 23.533 +(epoch: 1, iters: 302000, time: 0.531, data: 0.000) G_L1: 22.100 G_L1_ABSOLUTE: 4.024 G_L1_RELATIVE: 18.076 G_Regularizer: 0.000 validation_error: 23.328 +(epoch: 2, iters: 1248, time: 0.531, data: 0.000) G_L1: 17.071 G_L1_ABSOLUTE: 3.598 G_L1_RELATIVE: 13.473 G_Regularizer: 0.000 validation_error: 23.080 +(epoch: 2, iters: 3248, time: 0.527, data: 0.000) G_L1: 20.515 G_L1_ABSOLUTE: 3.594 G_L1_RELATIVE: 16.921 G_Regularizer: 0.000 validation_error: 23.704 +(epoch: 2, iters: 5248, time: 0.527, data: 0.000) G_L1: 16.561 G_L1_ABSOLUTE: 3.182 G_L1_RELATIVE: 13.379 G_Regularizer: 0.000 validation_error: 23.516 +(epoch: 2, iters: 7248, time: 0.531, data: 0.000) G_L1: 15.340 G_L1_ABSOLUTE: 3.116 G_L1_RELATIVE: 12.225 G_Regularizer: 0.000 validation_error: 23.368 +(epoch: 2, iters: 9248, time: 0.531, data: 0.000) G_L1: 21.704 G_L1_ABSOLUTE: 3.737 G_L1_RELATIVE: 17.967 G_Regularizer: 0.000 validation_error: 23.156 +(epoch: 2, iters: 11248, time: 0.531, data: 0.000) G_L1: 16.057 G_L1_ABSOLUTE: 3.588 G_L1_RELATIVE: 12.469 G_Regularizer: 0.000 validation_error: 23.250 +(epoch: 2, iters: 13248, time: 0.528, data: 0.000) G_L1: 21.139 G_L1_ABSOLUTE: 3.705 G_L1_RELATIVE: 17.434 G_Regularizer: 0.000 validation_error: 23.455 +(epoch: 2, iters: 15248, time: 0.527, data: 0.000) G_L1: 17.701 G_L1_ABSOLUTE: 3.750 G_L1_RELATIVE: 13.951 G_Regularizer: 0.000 validation_error: 23.150 +(epoch: 2, iters: 17248, time: 0.521, data: 0.000) G_L1: 18.372 G_L1_ABSOLUTE: 3.927 G_L1_RELATIVE: 14.445 G_Regularizer: 0.000 validation_error: 22.785 +(epoch: 2, iters: 19248, time: 0.531, data: 0.000) G_L1: 22.116 G_L1_ABSOLUTE: 3.609 G_L1_RELATIVE: 18.507 G_Regularizer: 0.000 validation_error: 23.102 +(epoch: 2, iters: 21248, time: 0.531, data: 0.000) G_L1: 21.331 G_L1_ABSOLUTE: 4.043 G_L1_RELATIVE: 17.288 G_Regularizer: 0.000 validation_error: 23.610 +(epoch: 2, iters: 23248, time: 0.532, data: 0.000) G_L1: 20.874 G_L1_ABSOLUTE: 3.990 G_L1_RELATIVE: 16.884 G_Regularizer: 0.000 validation_error: 23.080 +(epoch: 2, iters: 25248, time: 0.530, data: 0.000) G_L1: 20.500 G_L1_ABSOLUTE: 4.043 G_L1_RELATIVE: 16.458 G_Regularizer: 0.000 validation_error: 22.868 +(epoch: 2, iters: 27248, time: 0.521, data: 0.000) G_L1: 23.737 G_L1_ABSOLUTE: 3.618 G_L1_RELATIVE: 20.119 G_Regularizer: 0.000 validation_error: 23.424 +(epoch: 2, iters: 29248, time: 0.528, data: 0.000) G_L1: 16.543 G_L1_ABSOLUTE: 3.437 G_L1_RELATIVE: 13.107 G_Regularizer: 0.000 validation_error: 23.103 +(epoch: 2, iters: 31248, time: 0.532, data: 0.000) G_L1: 18.316 G_L1_ABSOLUTE: 3.081 G_L1_RELATIVE: 15.235 G_Regularizer: 0.000 validation_error: 23.809 +(epoch: 2, iters: 33248, time: 0.523, data: 0.000) G_L1: 19.593 G_L1_ABSOLUTE: 3.451 G_L1_RELATIVE: 16.142 G_Regularizer: 0.000 validation_error: 23.355 +(epoch: 2, iters: 35248, time: 0.529, data: 0.000) G_L1: 20.985 G_L1_ABSOLUTE: 3.616 G_L1_RELATIVE: 17.369 G_Regularizer: 0.000 validation_error: 23.562 +(epoch: 2, iters: 37248, time: 0.520, data: 0.000) G_L1: 19.825 G_L1_ABSOLUTE: 3.324 G_L1_RELATIVE: 16.501 G_Regularizer: 0.000 validation_error: 23.243 +(epoch: 2, iters: 39248, time: 0.524, data: 0.000) G_L1: 14.658 G_L1_ABSOLUTE: 3.161 G_L1_RELATIVE: 11.497 G_Regularizer: 0.000 validation_error: 23.368 +(epoch: 2, iters: 41248, time: 0.534, data: 0.000) G_L1: 17.936 G_L1_ABSOLUTE: 4.023 G_L1_RELATIVE: 13.913 G_Regularizer: 0.000 validation_error: 23.358 +(epoch: 2, iters: 43248, time: 0.525, data: 0.000) G_L1: 18.571 G_L1_ABSOLUTE: 3.913 G_L1_RELATIVE: 14.659 G_Regularizer: 0.000 validation_error: 23.244 +(epoch: 2, iters: 45248, time: 0.524, data: 0.000) G_L1: 21.359 G_L1_ABSOLUTE: 4.021 G_L1_RELATIVE: 17.338 G_Regularizer: 0.000 validation_error: 23.670 +(epoch: 2, iters: 47248, time: 0.523, data: 0.000) G_L1: 17.232 G_L1_ABSOLUTE: 3.526 G_L1_RELATIVE: 13.706 G_Regularizer: 0.000 validation_error: 23.741 +(epoch: 2, iters: 49248, time: 0.525, data: 0.000) G_L1: 19.561 G_L1_ABSOLUTE: 3.473 G_L1_RELATIVE: 16.088 G_Regularizer: 0.000 validation_error: 22.551 +(epoch: 2, iters: 51248, time: 0.527, data: 0.000) G_L1: 18.534 G_L1_ABSOLUTE: 3.302 G_L1_RELATIVE: 15.232 G_Regularizer: 0.000 validation_error: 23.145 +(epoch: 2, iters: 53248, time: 0.525, data: 0.000) G_L1: 16.864 G_L1_ABSOLUTE: 3.113 G_L1_RELATIVE: 13.750 G_Regularizer: 0.000 validation_error: 23.217 +(epoch: 2, iters: 55248, time: 0.528, data: 0.000) G_L1: 17.958 G_L1_ABSOLUTE: 3.384 G_L1_RELATIVE: 14.574 G_Regularizer: 0.000 validation_error: 23.627 +(epoch: 2, iters: 57248, time: 0.522, data: 0.000) G_L1: 19.117 G_L1_ABSOLUTE: 3.716 G_L1_RELATIVE: 15.401 G_Regularizer: 0.000 validation_error: 23.102 +(epoch: 2, iters: 59248, time: 0.532, data: 0.000) G_L1: 19.338 G_L1_ABSOLUTE: 3.483 G_L1_RELATIVE: 15.856 G_Regularizer: 0.000 validation_error: 23.934 +(epoch: 2, iters: 61248, time: 0.531, data: 0.000) G_L1: 20.639 G_L1_ABSOLUTE: 3.676 G_L1_RELATIVE: 16.963 G_Regularizer: 0.000 validation_error: 23.446 +(epoch: 2, iters: 63248, time: 0.528, data: 0.000) G_L1: 19.802 G_L1_ABSOLUTE: 3.931 G_L1_RELATIVE: 15.870 G_Regularizer: 0.000 validation_error: 23.258 +(epoch: 2, iters: 65248, time: 0.528, data: 0.000) G_L1: 19.555 G_L1_ABSOLUTE: 3.193 G_L1_RELATIVE: 16.362 G_Regularizer: 0.000 validation_error: 22.816 +(epoch: 2, iters: 67248, time: 0.529, data: 0.000) G_L1: 15.286 G_L1_ABSOLUTE: 3.287 G_L1_RELATIVE: 11.998 G_Regularizer: 0.000 validation_error: 23.291 +(epoch: 2, iters: 69248, time: 0.527, data: 0.000) G_L1: 16.987 G_L1_ABSOLUTE: 3.303 G_L1_RELATIVE: 13.684 G_Regularizer: 0.000 validation_error: 22.931 +(epoch: 2, iters: 71248, time: 0.522, data: 0.000) G_L1: 15.496 G_L1_ABSOLUTE: 4.119 G_L1_RELATIVE: 11.377 G_Regularizer: 0.000 validation_error: 23.433 +(epoch: 2, iters: 73248, time: 0.531, data: 0.000) G_L1: 22.068 G_L1_ABSOLUTE: 3.235 G_L1_RELATIVE: 18.833 G_Regularizer: 0.000 validation_error: 22.599 +(epoch: 2, iters: 75248, time: 0.524, data: 0.000) G_L1: 14.787 G_L1_ABSOLUTE: 3.646 G_L1_RELATIVE: 11.141 G_Regularizer: 0.000 validation_error: 23.038 +(epoch: 2, iters: 77248, time: 0.525, data: 0.000) G_L1: 19.725 G_L1_ABSOLUTE: 3.895 G_L1_RELATIVE: 15.830 G_Regularizer: 0.000 validation_error: 22.628 +(epoch: 2, iters: 79248, time: 0.524, data: 0.000) G_L1: 17.329 G_L1_ABSOLUTE: 3.965 G_L1_RELATIVE: 13.364 G_Regularizer: 0.000 validation_error: 22.406 +(epoch: 2, iters: 81248, time: 0.521, data: 0.000) G_L1: 20.568 G_L1_ABSOLUTE: 3.516 G_L1_RELATIVE: 17.052 G_Regularizer: 0.000 validation_error: 22.773 +(epoch: 2, iters: 83248, time: 0.530, data: 0.000) G_L1: 19.684 G_L1_ABSOLUTE: 3.353 G_L1_RELATIVE: 16.330 G_Regularizer: 0.000 validation_error: 22.096 +(epoch: 2, iters: 85248, time: 0.526, data: 0.000) G_L1: 18.110 G_L1_ABSOLUTE: 3.267 G_L1_RELATIVE: 14.843 G_Regularizer: 0.000 validation_error: 22.325 +(epoch: 2, iters: 87248, time: 0.523, data: 0.000) G_L1: 20.769 G_L1_ABSOLUTE: 3.615 G_L1_RELATIVE: 17.154 G_Regularizer: 0.000 validation_error: 22.466 +(epoch: 2, iters: 89248, time: 0.524, data: 0.000) G_L1: 21.016 G_L1_ABSOLUTE: 3.439 G_L1_RELATIVE: 17.577 G_Regularizer: 0.000 validation_error: 23.083 +(epoch: 2, iters: 91248, time: 0.523, data: 0.000) G_L1: 16.418 G_L1_ABSOLUTE: 3.475 G_L1_RELATIVE: 12.942 G_Regularizer: 0.000 validation_error: 22.785 +(epoch: 2, iters: 93248, time: 0.529, data: 0.000) G_L1: 16.625 G_L1_ABSOLUTE: 3.387 G_L1_RELATIVE: 13.238 G_Regularizer: 0.000 validation_error: 22.552 +(epoch: 2, iters: 95248, time: 0.524, data: 0.000) G_L1: 16.428 G_L1_ABSOLUTE: 3.582 G_L1_RELATIVE: 12.846 G_Regularizer: 0.000 validation_error: 22.430 +(epoch: 2, iters: 97248, time: 0.525, data: 0.000) G_L1: 19.261 G_L1_ABSOLUTE: 3.571 G_L1_RELATIVE: 15.690 G_Regularizer: 0.000 validation_error: 22.355 +(epoch: 2, iters: 99248, time: 0.527, data: 0.000) G_L1: 15.921 G_L1_ABSOLUTE: 3.310 G_L1_RELATIVE: 12.611 G_Regularizer: 0.000 validation_error: 22.780 +(epoch: 2, iters: 101248, time: 0.523, data: 0.000) G_L1: 18.042 G_L1_ABSOLUTE: 3.489 G_L1_RELATIVE: 14.554 G_Regularizer: 0.000 validation_error: 23.101 +(epoch: 2, iters: 103248, time: 0.529, data: 0.000) G_L1: 20.851 G_L1_ABSOLUTE: 3.513 G_L1_RELATIVE: 17.338 G_Regularizer: 0.000 validation_error: 22.728 +(epoch: 2, iters: 105248, time: 0.528, data: 0.000) G_L1: 16.144 G_L1_ABSOLUTE: 3.327 G_L1_RELATIVE: 12.817 G_Regularizer: 0.000 validation_error: 22.925 +(epoch: 2, iters: 107248, time: 0.521, data: 0.000) G_L1: 16.086 G_L1_ABSOLUTE: 3.030 G_L1_RELATIVE: 13.057 G_Regularizer: 0.000 validation_error: 22.383 +(epoch: 2, iters: 109248, time: 0.525, data: 0.000) G_L1: 19.658 G_L1_ABSOLUTE: 3.804 G_L1_RELATIVE: 15.854 G_Regularizer: 0.000 validation_error: 23.037 +(epoch: 2, iters: 111248, time: 0.524, data: 0.000) G_L1: 18.013 G_L1_ABSOLUTE: 3.070 G_L1_RELATIVE: 14.944 G_Regularizer: 0.000 validation_error: 23.241 +(epoch: 2, iters: 113248, time: 0.521, data: 0.000) G_L1: 16.377 G_L1_ABSOLUTE: 3.382 G_L1_RELATIVE: 12.995 G_Regularizer: 0.000 validation_error: 22.550 +(epoch: 2, iters: 115248, time: 0.527, data: 0.000) G_L1: 22.772 G_L1_ABSOLUTE: 3.368 G_L1_RELATIVE: 19.404 G_Regularizer: 0.000 validation_error: 22.492 +(epoch: 2, iters: 117248, time: 0.529, data: 0.000) G_L1: 17.029 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 13.853 G_Regularizer: 0.000 validation_error: 22.841 +(epoch: 2, iters: 119248, time: 0.533, data: 0.000) G_L1: 19.395 G_L1_ABSOLUTE: 3.149 G_L1_RELATIVE: 16.245 G_Regularizer: 0.000 validation_error: 22.454 +(epoch: 2, iters: 121248, time: 0.522, data: 0.001) G_L1: 23.174 G_L1_ABSOLUTE: 3.557 G_L1_RELATIVE: 19.618 G_Regularizer: 0.000 validation_error: 23.497 +(epoch: 2, iters: 123248, time: 0.528, data: 0.000) G_L1: 16.769 G_L1_ABSOLUTE: 3.241 G_L1_RELATIVE: 13.528 G_Regularizer: 0.000 validation_error: 22.927 +(epoch: 2, iters: 125248, time: 0.526, data: 0.000) G_L1: 19.578 G_L1_ABSOLUTE: 3.705 G_L1_RELATIVE: 15.873 G_Regularizer: 0.000 validation_error: 23.087 +(epoch: 2, iters: 127248, time: 0.529, data: 0.000) G_L1: 21.452 G_L1_ABSOLUTE: 3.541 G_L1_RELATIVE: 17.911 G_Regularizer: 0.000 validation_error: 23.492 +(epoch: 2, iters: 129248, time: 0.528, data: 0.000) G_L1: 18.801 G_L1_ABSOLUTE: 3.398 G_L1_RELATIVE: 15.404 G_Regularizer: 0.000 validation_error: 22.291 +(epoch: 2, iters: 131248, time: 0.534, data: 0.000) G_L1: 16.445 G_L1_ABSOLUTE: 3.393 G_L1_RELATIVE: 13.052 G_Regularizer: 0.000 validation_error: 21.906 +(epoch: 2, iters: 133248, time: 0.525, data: 0.000) G_L1: 18.212 G_L1_ABSOLUTE: 3.306 G_L1_RELATIVE: 14.906 G_Regularizer: 0.000 validation_error: 22.626 +(epoch: 2, iters: 135248, time: 0.525, data: 0.000) G_L1: 16.989 G_L1_ABSOLUTE: 2.998 G_L1_RELATIVE: 13.991 G_Regularizer: 0.000 validation_error: 22.980 +(epoch: 2, iters: 137248, time: 0.531, data: 0.000) G_L1: 23.062 G_L1_ABSOLUTE: 3.350 G_L1_RELATIVE: 19.711 G_Regularizer: 0.000 validation_error: 22.804 +(epoch: 2, iters: 139248, time: 0.527, data: 0.000) G_L1: 13.959 G_L1_ABSOLUTE: 3.508 G_L1_RELATIVE: 10.451 G_Regularizer: 0.000 validation_error: 22.635 +(epoch: 2, iters: 141248, time: 0.527, data: 0.001) G_L1: 15.024 G_L1_ABSOLUTE: 3.493 G_L1_RELATIVE: 11.531 G_Regularizer: 0.000 validation_error: 22.477 +(epoch: 2, iters: 143248, time: 0.530, data: 0.000) G_L1: 19.662 G_L1_ABSOLUTE: 3.676 G_L1_RELATIVE: 15.986 G_Regularizer: 0.000 validation_error: 22.946 +(epoch: 2, iters: 145248, time: 0.527, data: 0.000) G_L1: 16.094 G_L1_ABSOLUTE: 3.977 G_L1_RELATIVE: 12.117 G_Regularizer: 0.000 validation_error: 22.937 +(epoch: 2, iters: 147248, time: 0.522, data: 0.000) G_L1: 19.488 G_L1_ABSOLUTE: 3.716 G_L1_RELATIVE: 15.772 G_Regularizer: 0.000 validation_error: 22.969 +(epoch: 2, iters: 149248, time: 0.525, data: 0.000) G_L1: 16.429 G_L1_ABSOLUTE: 3.325 G_L1_RELATIVE: 13.104 G_Regularizer: 0.000 validation_error: 22.564 +(epoch: 2, iters: 151248, time: 0.525, data: 0.000) G_L1: 19.893 G_L1_ABSOLUTE: 3.890 G_L1_RELATIVE: 16.003 G_Regularizer: 0.000 validation_error: 22.257 +(epoch: 2, iters: 153248, time: 0.527, data: 0.000) G_L1: 21.929 G_L1_ABSOLUTE: 3.938 G_L1_RELATIVE: 17.991 G_Regularizer: 0.000 validation_error: 22.666 +(epoch: 2, iters: 155248, time: 0.529, data: 0.000) G_L1: 17.609 G_L1_ABSOLUTE: 3.660 G_L1_RELATIVE: 13.949 G_Regularizer: 0.000 validation_error: 22.663 +(epoch: 2, iters: 157248, time: 0.527, data: 0.000) G_L1: 19.777 G_L1_ABSOLUTE: 3.402 G_L1_RELATIVE: 16.375 G_Regularizer: 0.000 validation_error: 21.971 +(epoch: 2, iters: 159248, time: 0.523, data: 0.000) G_L1: 16.893 G_L1_ABSOLUTE: 4.129 G_L1_RELATIVE: 12.764 G_Regularizer: 0.000 validation_error: 22.675 +(epoch: 2, iters: 161248, time: 0.526, data: 0.000) G_L1: 15.298 G_L1_ABSOLUTE: 3.091 G_L1_RELATIVE: 12.207 G_Regularizer: 0.000 validation_error: 22.544 +(epoch: 2, iters: 163248, time: 0.531, data: 0.000) G_L1: 21.016 G_L1_ABSOLUTE: 3.173 G_L1_RELATIVE: 17.843 G_Regularizer: 0.000 validation_error: 22.432 +(epoch: 2, iters: 165248, time: 0.525, data: 0.000) G_L1: 23.441 G_L1_ABSOLUTE: 3.821 G_L1_RELATIVE: 19.620 G_Regularizer: 0.000 validation_error: 22.382 +(epoch: 2, iters: 167248, time: 0.528, data: 0.000) G_L1: 16.872 G_L1_ABSOLUTE: 3.352 G_L1_RELATIVE: 13.520 G_Regularizer: 0.000 validation_error: 22.550 +(epoch: 2, iters: 169248, time: 0.519, data: 0.000) G_L1: 21.007 G_L1_ABSOLUTE: 3.747 G_L1_RELATIVE: 17.260 G_Regularizer: 0.000 validation_error: 22.080 +(epoch: 2, iters: 171248, time: 0.527, data: 0.000) G_L1: 17.800 G_L1_ABSOLUTE: 3.790 G_L1_RELATIVE: 14.010 G_Regularizer: 0.000 validation_error: 22.413 +(epoch: 2, iters: 173248, time: 0.525, data: 0.001) G_L1: 19.482 G_L1_ABSOLUTE: 3.269 G_L1_RELATIVE: 16.213 G_Regularizer: 0.000 validation_error: 22.347 +(epoch: 2, iters: 175248, time: 0.527, data: 0.000) G_L1: 18.551 G_L1_ABSOLUTE: 3.339 G_L1_RELATIVE: 15.212 G_Regularizer: 0.000 validation_error: 22.609 +(epoch: 2, iters: 177248, time: 0.526, data: 0.000) G_L1: 16.063 G_L1_ABSOLUTE: 2.951 G_L1_RELATIVE: 13.112 G_Regularizer: 0.000 validation_error: 23.254 +(epoch: 2, iters: 179248, time: 0.529, data: 0.000) G_L1: 13.859 G_L1_ABSOLUTE: 3.016 G_L1_RELATIVE: 10.844 G_Regularizer: 0.000 validation_error: 22.355 +(epoch: 2, iters: 181248, time: 0.526, data: 0.000) G_L1: 17.330 G_L1_ABSOLUTE: 3.341 G_L1_RELATIVE: 13.989 G_Regularizer: 0.000 validation_error: 22.650 +(epoch: 2, iters: 183248, time: 0.524, data: 0.000) G_L1: 21.017 G_L1_ABSOLUTE: 3.245 G_L1_RELATIVE: 17.773 G_Regularizer: 0.000 validation_error: 22.689 +(epoch: 2, iters: 185248, time: 0.527, data: 0.000) G_L1: 18.863 G_L1_ABSOLUTE: 3.178 G_L1_RELATIVE: 15.685 G_Regularizer: 0.000 validation_error: 22.150 +(epoch: 2, iters: 187248, time: 0.530, data: 0.000) G_L1: 18.754 G_L1_ABSOLUTE: 3.526 G_L1_RELATIVE: 15.229 G_Regularizer: 0.000 validation_error: 21.877 +(epoch: 2, iters: 189248, time: 0.526, data: 0.000) G_L1: 16.535 G_L1_ABSOLUTE: 2.914 G_L1_RELATIVE: 13.620 G_Regularizer: 0.000 validation_error: 22.509 +(epoch: 2, iters: 191248, time: 0.531, data: 0.000) G_L1: 19.439 G_L1_ABSOLUTE: 3.230 G_L1_RELATIVE: 16.209 G_Regularizer: 0.000 validation_error: 22.330 +(epoch: 2, iters: 193248, time: 0.530, data: 0.000) G_L1: 18.030 G_L1_ABSOLUTE: 3.428 G_L1_RELATIVE: 14.602 G_Regularizer: 0.000 validation_error: 22.655 +(epoch: 2, iters: 195248, time: 0.524, data: 0.000) G_L1: 19.138 G_L1_ABSOLUTE: 3.059 G_L1_RELATIVE: 16.079 G_Regularizer: 0.000 validation_error: 22.307 +(epoch: 2, iters: 197248, time: 0.530, data: 0.000) G_L1: 14.541 G_L1_ABSOLUTE: 3.254 G_L1_RELATIVE: 11.287 G_Regularizer: 0.000 validation_error: 22.990 +(epoch: 2, iters: 199248, time: 0.536, data: 0.000) G_L1: 19.795 G_L1_ABSOLUTE: 3.369 G_L1_RELATIVE: 16.426 G_Regularizer: 0.000 validation_error: 23.097 +(epoch: 2, iters: 201248, time: 0.528, data: 0.000) G_L1: 19.176 G_L1_ABSOLUTE: 3.318 G_L1_RELATIVE: 15.858 G_Regularizer: 0.000 validation_error: 22.049 +(epoch: 2, iters: 203248, time: 0.524, data: 0.000) G_L1: 18.021 G_L1_ABSOLUTE: 3.264 G_L1_RELATIVE: 14.757 G_Regularizer: 0.000 validation_error: 22.431 +(epoch: 2, iters: 205248, time: 0.528, data: 0.000) G_L1: 16.555 G_L1_ABSOLUTE: 3.110 G_L1_RELATIVE: 13.445 G_Regularizer: 0.000 validation_error: 23.123 +(epoch: 2, iters: 207248, time: 0.529, data: 0.000) G_L1: 16.991 G_L1_ABSOLUTE: 3.376 G_L1_RELATIVE: 13.616 G_Regularizer: 0.000 validation_error: 22.364 +(epoch: 2, iters: 209248, time: 0.525, data: 0.000) G_L1: 18.126 G_L1_ABSOLUTE: 3.660 G_L1_RELATIVE: 14.466 G_Regularizer: 0.000 validation_error: 22.964 +(epoch: 2, iters: 211248, time: 0.527, data: 0.000) G_L1: 18.180 G_L1_ABSOLUTE: 3.626 G_L1_RELATIVE: 14.554 G_Regularizer: 0.000 validation_error: 22.443 +(epoch: 2, iters: 213248, time: 0.531, data: 0.000) G_L1: 18.043 G_L1_ABSOLUTE: 3.998 G_L1_RELATIVE: 14.044 G_Regularizer: 0.000 validation_error: 21.764 +(epoch: 2, iters: 215248, time: 0.523, data: 0.000) G_L1: 20.519 G_L1_ABSOLUTE: 3.863 G_L1_RELATIVE: 16.656 G_Regularizer: 0.000 validation_error: 22.037 +(epoch: 2, iters: 217248, time: 0.532, data: 0.000) G_L1: 16.253 G_L1_ABSOLUTE: 2.979 G_L1_RELATIVE: 13.274 G_Regularizer: 0.000 validation_error: 22.081 +(epoch: 2, iters: 219248, time: 0.528, data: 0.000) G_L1: 18.731 G_L1_ABSOLUTE: 3.423 G_L1_RELATIVE: 15.307 G_Regularizer: 0.000 validation_error: 21.826 +(epoch: 2, iters: 221248, time: 0.532, data: 0.000) G_L1: 17.620 G_L1_ABSOLUTE: 3.538 G_L1_RELATIVE: 14.082 G_Regularizer: 0.000 validation_error: 22.069 +(epoch: 2, iters: 223248, time: 0.529, data: 0.000) G_L1: 19.015 G_L1_ABSOLUTE: 2.803 G_L1_RELATIVE: 16.212 G_Regularizer: 0.000 validation_error: 22.037 +(epoch: 2, iters: 225248, time: 0.531, data: 0.000) G_L1: 15.481 G_L1_ABSOLUTE: 3.166 G_L1_RELATIVE: 12.315 G_Regularizer: 0.000 validation_error: 22.581 +(epoch: 2, iters: 227248, time: 0.530, data: 0.000) G_L1: 15.880 G_L1_ABSOLUTE: 3.149 G_L1_RELATIVE: 12.730 G_Regularizer: 0.000 validation_error: 22.800 +(epoch: 2, iters: 229248, time: 0.528, data: 0.001) G_L1: 14.752 G_L1_ABSOLUTE: 3.355 G_L1_RELATIVE: 11.397 G_Regularizer: 0.000 validation_error: 22.253 +(epoch: 2, iters: 231248, time: 0.527, data: 0.000) G_L1: 17.916 G_L1_ABSOLUTE: 3.297 G_L1_RELATIVE: 14.620 G_Regularizer: 0.000 validation_error: 22.567 +(epoch: 2, iters: 233248, time: 0.531, data: 0.000) G_L1: 19.253 G_L1_ABSOLUTE: 2.872 G_L1_RELATIVE: 16.381 G_Regularizer: 0.000 validation_error: 21.814 +(epoch: 2, iters: 235248, time: 0.528, data: 0.000) G_L1: 15.784 G_L1_ABSOLUTE: 2.995 G_L1_RELATIVE: 12.789 G_Regularizer: 0.000 validation_error: 22.220 +(epoch: 2, iters: 237248, time: 0.525, data: 0.000) G_L1: 18.267 G_L1_ABSOLUTE: 3.682 G_L1_RELATIVE: 14.585 G_Regularizer: 0.000 validation_error: 22.397 +(epoch: 2, iters: 239248, time: 0.531, data: 0.000) G_L1: 13.722 G_L1_ABSOLUTE: 3.123 G_L1_RELATIVE: 10.598 G_Regularizer: 0.000 validation_error: 22.534 +(epoch: 2, iters: 241248, time: 0.527, data: 0.000) G_L1: 18.989 G_L1_ABSOLUTE: 3.674 G_L1_RELATIVE: 15.315 G_Regularizer: 0.000 validation_error: 22.702 +(epoch: 2, iters: 243248, time: 0.528, data: 0.000) G_L1: 19.537 G_L1_ABSOLUTE: 3.492 G_L1_RELATIVE: 16.045 G_Regularizer: 0.000 validation_error: 22.421 +(epoch: 2, iters: 245248, time: 0.525, data: 0.000) G_L1: 16.567 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 13.790 G_Regularizer: 0.000 validation_error: 22.657 +(epoch: 2, iters: 247248, time: 0.528, data: 0.000) G_L1: 19.837 G_L1_ABSOLUTE: 3.300 G_L1_RELATIVE: 16.537 G_Regularizer: 0.000 validation_error: 22.231 +(epoch: 2, iters: 249248, time: 0.525, data: 0.000) G_L1: 17.652 G_L1_ABSOLUTE: 2.874 G_L1_RELATIVE: 14.778 G_Regularizer: 0.000 validation_error: 22.521 +(epoch: 2, iters: 251248, time: 0.536, data: 0.000) G_L1: 14.739 G_L1_ABSOLUTE: 3.446 G_L1_RELATIVE: 11.293 G_Regularizer: 0.000 validation_error: 22.760 +(epoch: 2, iters: 253248, time: 0.525, data: 0.000) G_L1: 15.555 G_L1_ABSOLUTE: 3.050 G_L1_RELATIVE: 12.505 G_Regularizer: 0.000 validation_error: 21.859 +(epoch: 2, iters: 255248, time: 0.536, data: 0.000) G_L1: 17.642 G_L1_ABSOLUTE: 2.963 G_L1_RELATIVE: 14.678 G_Regularizer: 0.000 validation_error: 22.491 +(epoch: 2, iters: 257248, time: 0.537, data: 0.000) G_L1: 15.999 G_L1_ABSOLUTE: 3.013 G_L1_RELATIVE: 12.986 G_Regularizer: 0.000 validation_error: 21.722 +(epoch: 2, iters: 259248, time: 0.561, data: 0.000) G_L1: 18.104 G_L1_ABSOLUTE: 3.312 G_L1_RELATIVE: 14.792 G_Regularizer: 0.000 validation_error: 21.919 +(epoch: 2, iters: 261248, time: 0.531, data: 0.000) G_L1: 19.184 G_L1_ABSOLUTE: 3.353 G_L1_RELATIVE: 15.831 G_Regularizer: 0.000 validation_error: 22.411 +(epoch: 2, iters: 263248, time: 0.532, data: 0.000) G_L1: 19.732 G_L1_ABSOLUTE: 3.769 G_L1_RELATIVE: 15.963 G_Regularizer: 0.000 validation_error: 22.091 +(epoch: 2, iters: 265248, time: 0.531, data: 0.000) G_L1: 17.633 G_L1_ABSOLUTE: 2.899 G_L1_RELATIVE: 14.734 G_Regularizer: 0.000 validation_error: 22.655 +(epoch: 2, iters: 267248, time: 0.529, data: 0.000) G_L1: 15.106 G_L1_ABSOLUTE: 3.098 G_L1_RELATIVE: 12.008 G_Regularizer: 0.000 validation_error: 22.111 +(epoch: 2, iters: 269248, time: 0.527, data: 0.000) G_L1: 15.622 G_L1_ABSOLUTE: 3.032 G_L1_RELATIVE: 12.589 G_Regularizer: 0.000 validation_error: 22.442 +(epoch: 2, iters: 271248, time: 0.527, data: 0.000) G_L1: 19.038 G_L1_ABSOLUTE: 3.374 G_L1_RELATIVE: 15.664 G_Regularizer: 0.000 validation_error: 21.477 +(epoch: 2, iters: 273248, time: 0.529, data: 0.000) G_L1: 15.576 G_L1_ABSOLUTE: 3.535 G_L1_RELATIVE: 12.041 G_Regularizer: 0.000 validation_error: 22.186 +(epoch: 2, iters: 275248, time: 0.530, data: 0.000) G_L1: 20.849 G_L1_ABSOLUTE: 3.242 G_L1_RELATIVE: 17.607 G_Regularizer: 0.000 validation_error: 22.657 +(epoch: 2, iters: 277248, time: 0.541, data: 0.000) G_L1: 16.140 G_L1_ABSOLUTE: 3.040 G_L1_RELATIVE: 13.099 G_Regularizer: 0.000 validation_error: 21.840 +(epoch: 2, iters: 279248, time: 0.545, data: 0.000) G_L1: 17.551 G_L1_ABSOLUTE: 3.423 G_L1_RELATIVE: 14.128 G_Regularizer: 0.000 validation_error: 22.008 +(epoch: 2, iters: 281248, time: 0.539, data: 0.000) G_L1: 18.892 G_L1_ABSOLUTE: 3.339 G_L1_RELATIVE: 15.553 G_Regularizer: 0.000 validation_error: 22.458 +(epoch: 2, iters: 283248, time: 0.535, data: 0.000) G_L1: 16.882 G_L1_ABSOLUTE: 3.635 G_L1_RELATIVE: 13.247 G_Regularizer: 0.000 validation_error: 22.282 +(epoch: 2, iters: 285248, time: 0.534, data: 0.000) G_L1: 19.265 G_L1_ABSOLUTE: 3.230 G_L1_RELATIVE: 16.036 G_Regularizer: 0.000 validation_error: 21.618 +(epoch: 2, iters: 287248, time: 0.534, data: 0.000) G_L1: 16.428 G_L1_ABSOLUTE: 2.852 G_L1_RELATIVE: 13.575 G_Regularizer: 0.000 validation_error: 21.424 +(epoch: 2, iters: 289248, time: 0.538, data: 0.000) G_L1: 17.082 G_L1_ABSOLUTE: 3.566 G_L1_RELATIVE: 13.516 G_Regularizer: 0.000 validation_error: 21.639 +(epoch: 2, iters: 291248, time: 0.534, data: 0.000) G_L1: 19.217 G_L1_ABSOLUTE: 3.050 G_L1_RELATIVE: 16.167 G_Regularizer: 0.000 validation_error: 21.731 +(epoch: 2, iters: 293248, time: 0.537, data: 0.000) G_L1: 17.839 G_L1_ABSOLUTE: 3.147 G_L1_RELATIVE: 14.692 G_Regularizer: 0.000 validation_error: 21.612 +(epoch: 2, iters: 295248, time: 0.549, data: 0.000) G_L1: 16.399 G_L1_ABSOLUTE: 2.961 G_L1_RELATIVE: 13.438 G_Regularizer: 0.000 validation_error: 22.383 +(epoch: 2, iters: 297248, time: 0.529, data: 0.000) G_L1: 18.376 G_L1_ABSOLUTE: 3.261 G_L1_RELATIVE: 15.115 G_Regularizer: 0.000 validation_error: 22.006 +(epoch: 2, iters: 299248, time: 0.539, data: 0.000) G_L1: 19.169 G_L1_ABSOLUTE: 3.104 G_L1_RELATIVE: 16.066 G_Regularizer: 0.000 validation_error: 22.928 +(epoch: 2, iters: 301248, time: 0.537, data: 0.000) G_L1: 18.682 G_L1_ABSOLUTE: 3.289 G_L1_RELATIVE: 15.393 G_Regularizer: 0.000 validation_error: 21.984 +(epoch: 3, iters: 496, time: 0.539, data: 0.000) G_L1: 16.565 G_L1_ABSOLUTE: 3.553 G_L1_RELATIVE: 13.012 G_Regularizer: 0.000 validation_error: 22.182 +(epoch: 3, iters: 2496, time: 0.870, data: 0.000) G_L1: 21.922 G_L1_ABSOLUTE: 3.485 G_L1_RELATIVE: 18.437 G_Regularizer: 0.000 validation_error: 22.160 +(epoch: 3, iters: 4496, time: 0.926, data: 0.000) G_L1: 17.519 G_L1_ABSOLUTE: 3.493 G_L1_RELATIVE: 14.026 G_Regularizer: 0.000 validation_error: 21.903 +(epoch: 3, iters: 6496, time: 0.929, data: 0.000) G_L1: 17.317 G_L1_ABSOLUTE: 2.849 G_L1_RELATIVE: 14.468 G_Regularizer: 0.000 validation_error: 22.253 +(epoch: 3, iters: 8496, time: 0.933, data: 0.000) G_L1: 16.292 G_L1_ABSOLUTE: 3.001 G_L1_RELATIVE: 13.291 G_Regularizer: 0.000 validation_error: 21.738 +(epoch: 3, iters: 10496, time: 0.928, data: 0.000) G_L1: 17.132 G_L1_ABSOLUTE: 3.135 G_L1_RELATIVE: 13.997 G_Regularizer: 0.000 validation_error: 21.739 +(epoch: 3, iters: 12496, time: 0.933, data: 0.000) G_L1: 20.222 G_L1_ABSOLUTE: 3.591 G_L1_RELATIVE: 16.631 G_Regularizer: 0.000 validation_error: 22.351 +(epoch: 3, iters: 14496, time: 0.937, data: 0.000) G_L1: 22.101 G_L1_ABSOLUTE: 4.080 G_L1_RELATIVE: 18.021 G_Regularizer: 0.000 validation_error: 21.922 +(epoch: 3, iters: 16496, time: 0.939, data: 0.000) G_L1: 20.988 G_L1_ABSOLUTE: 3.306 G_L1_RELATIVE: 17.682 G_Regularizer: 0.000 validation_error: 22.179 +(epoch: 3, iters: 18496, time: 0.941, data: 0.000) G_L1: 18.434 G_L1_ABSOLUTE: 2.928 G_L1_RELATIVE: 15.506 G_Regularizer: 0.000 validation_error: 21.850 +(epoch: 3, iters: 20496, time: 0.936, data: 0.000) G_L1: 17.564 G_L1_ABSOLUTE: 3.211 G_L1_RELATIVE: 14.353 G_Regularizer: 0.000 validation_error: 22.317 +(epoch: 3, iters: 22496, time: 0.941, data: 0.000) G_L1: 16.734 G_L1_ABSOLUTE: 3.227 G_L1_RELATIVE: 13.506 G_Regularizer: 0.000 validation_error: 21.512 +(epoch: 3, iters: 24496, time: 0.943, data: 0.000) G_L1: 15.422 G_L1_ABSOLUTE: 3.271 G_L1_RELATIVE: 12.151 G_Regularizer: 0.000 validation_error: 21.823 +(epoch: 3, iters: 26496, time: 0.943, data: 0.000) G_L1: 16.184 G_L1_ABSOLUTE: 3.400 G_L1_RELATIVE: 12.784 G_Regularizer: 0.000 validation_error: 22.031 +(epoch: 3, iters: 28496, time: 0.945, data: 0.000) G_L1: 16.703 G_L1_ABSOLUTE: 3.048 G_L1_RELATIVE: 13.654 G_Regularizer: 0.000 validation_error: 21.609 +(epoch: 3, iters: 30496, time: 0.936, data: 0.000) G_L1: 15.875 G_L1_ABSOLUTE: 3.278 G_L1_RELATIVE: 12.597 G_Regularizer: 0.000 validation_error: 22.192 +(epoch: 3, iters: 32496, time: 0.939, data: 0.000) G_L1: 17.809 G_L1_ABSOLUTE: 3.370 G_L1_RELATIVE: 14.440 G_Regularizer: 0.000 validation_error: 21.414 +(epoch: 3, iters: 34496, time: 0.945, data: 0.000) G_L1: 18.109 G_L1_ABSOLUTE: 3.659 G_L1_RELATIVE: 14.450 G_Regularizer: 0.000 validation_error: 21.787 +(epoch: 3, iters: 36496, time: 0.942, data: 0.000) G_L1: 15.798 G_L1_ABSOLUTE: 2.897 G_L1_RELATIVE: 12.901 G_Regularizer: 0.000 validation_error: 21.267 +(epoch: 3, iters: 38496, time: 0.947, data: 0.000) G_L1: 17.948 G_L1_ABSOLUTE: 3.016 G_L1_RELATIVE: 14.932 G_Regularizer: 0.000 validation_error: 21.690 +(epoch: 3, iters: 40496, time: 0.948, data: 0.000) G_L1: 15.979 G_L1_ABSOLUTE: 3.450 G_L1_RELATIVE: 12.529 G_Regularizer: 0.000 validation_error: 21.998 +(epoch: 3, iters: 42496, time: 0.942, data: 0.000) G_L1: 17.612 G_L1_ABSOLUTE: 3.498 G_L1_RELATIVE: 14.114 G_Regularizer: 0.000 validation_error: 21.841 +(epoch: 3, iters: 44496, time: 0.947, data: 0.000) G_L1: 15.908 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 13.152 G_Regularizer: 0.000 validation_error: 21.506 +(epoch: 3, iters: 46496, time: 0.941, data: 0.000) G_L1: 16.567 G_L1_ABSOLUTE: 3.162 G_L1_RELATIVE: 13.405 G_Regularizer: 0.000 validation_error: 21.237 +(epoch: 3, iters: 48496, time: 0.951, data: 0.000) G_L1: 18.775 G_L1_ABSOLUTE: 3.128 G_L1_RELATIVE: 15.647 G_Regularizer: 0.000 validation_error: 22.224 +(epoch: 3, iters: 50496, time: 0.942, data: 0.000) G_L1: 16.853 G_L1_ABSOLUTE: 3.432 G_L1_RELATIVE: 13.421 G_Regularizer: 0.000 validation_error: 21.711 +(epoch: 3, iters: 52496, time: 0.943, data: 0.000) G_L1: 15.349 G_L1_ABSOLUTE: 2.979 G_L1_RELATIVE: 12.369 G_Regularizer: 0.000 validation_error: 22.062 +(epoch: 3, iters: 54496, time: 0.950, data: 0.000) G_L1: 15.604 G_L1_ABSOLUTE: 3.300 G_L1_RELATIVE: 12.304 G_Regularizer: 0.000 validation_error: 21.327 +(epoch: 3, iters: 56496, time: 0.947, data: 0.000) G_L1: 19.685 G_L1_ABSOLUTE: 3.272 G_L1_RELATIVE: 16.413 G_Regularizer: 0.000 validation_error: 22.060 +(epoch: 3, iters: 58496, time: 0.943, data: 0.000) G_L1: 18.089 G_L1_ABSOLUTE: 3.214 G_L1_RELATIVE: 14.875 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 3, iters: 60496, time: 0.946, data: 0.000) G_L1: 17.823 G_L1_ABSOLUTE: 3.441 G_L1_RELATIVE: 14.382 G_Regularizer: 0.000 validation_error: 21.906 +(epoch: 3, iters: 62496, time: 0.943, data: 0.000) G_L1: 16.541 G_L1_ABSOLUTE: 3.503 G_L1_RELATIVE: 13.038 G_Regularizer: 0.000 validation_error: 21.393 +(epoch: 3, iters: 64496, time: 0.941, data: 0.000) G_L1: 18.917 G_L1_ABSOLUTE: 3.573 G_L1_RELATIVE: 15.344 G_Regularizer: 0.000 validation_error: 21.662 +(epoch: 3, iters: 66496, time: 0.941, data: 0.000) G_L1: 15.879 G_L1_ABSOLUTE: 2.843 G_L1_RELATIVE: 13.036 G_Regularizer: 0.000 validation_error: 22.285 +(epoch: 3, iters: 68496, time: 0.946, data: 0.000) G_L1: 19.113 G_L1_ABSOLUTE: 2.847 G_L1_RELATIVE: 16.266 G_Regularizer: 0.000 validation_error: 21.788 +(epoch: 3, iters: 70496, time: 0.941, data: 0.000) G_L1: 18.363 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 15.567 G_Regularizer: 0.000 validation_error: 22.055 +(epoch: 3, iters: 72496, time: 0.946, data: 0.000) G_L1: 15.999 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 13.262 G_Regularizer: 0.000 validation_error: 21.737 +(epoch: 3, iters: 74496, time: 0.946, data: 0.000) G_L1: 18.528 G_L1_ABSOLUTE: 3.078 G_L1_RELATIVE: 15.450 G_Regularizer: 0.000 validation_error: 21.563 +(epoch: 3, iters: 76496, time: 0.951, data: 0.000) G_L1: 13.771 G_L1_ABSOLUTE: 3.166 G_L1_RELATIVE: 10.605 G_Regularizer: 0.000 validation_error: 21.817 +(epoch: 3, iters: 78496, time: 0.947, data: 0.000) G_L1: 21.609 G_L1_ABSOLUTE: 3.524 G_L1_RELATIVE: 18.085 G_Regularizer: 0.000 validation_error: 21.049 +(epoch: 3, iters: 80496, time: 0.943, data: 0.000) G_L1: 19.271 G_L1_ABSOLUTE: 3.639 G_L1_RELATIVE: 15.632 G_Regularizer: 0.000 validation_error: 22.086 +(epoch: 3, iters: 82496, time: 0.576, data: 0.000) G_L1: 15.880 G_L1_ABSOLUTE: 3.172 G_L1_RELATIVE: 12.708 G_Regularizer: 0.000 validation_error: 21.125 +(epoch: 3, iters: 84496, time: 0.586, data: 0.000) G_L1: 17.768 G_L1_ABSOLUTE: 3.853 G_L1_RELATIVE: 13.915 G_Regularizer: 0.000 validation_error: 21.905 +(epoch: 3, iters: 86496, time: 0.534, data: 0.000) G_L1: 18.121 G_L1_ABSOLUTE: 3.262 G_L1_RELATIVE: 14.859 G_Regularizer: 0.000 validation_error: 21.787 +(epoch: 3, iters: 88496, time: 0.527, data: 0.000) G_L1: 18.524 G_L1_ABSOLUTE: 2.971 G_L1_RELATIVE: 15.553 G_Regularizer: 0.000 validation_error: 22.054 +(epoch: 3, iters: 90496, time: 0.528, data: 0.000) G_L1: 21.795 G_L1_ABSOLUTE: 3.187 G_L1_RELATIVE: 18.609 G_Regularizer: 0.000 validation_error: 21.429 +(epoch: 3, iters: 92496, time: 0.528, data: 0.000) G_L1: 19.608 G_L1_ABSOLUTE: 2.978 G_L1_RELATIVE: 16.630 G_Regularizer: 0.000 validation_error: 21.598 +(epoch: 3, iters: 94496, time: 0.526, data: 0.000) G_L1: 16.730 G_L1_ABSOLUTE: 3.196 G_L1_RELATIVE: 13.534 G_Regularizer: 0.000 validation_error: 22.201 +(epoch: 3, iters: 96496, time: 0.531, data: 0.000) G_L1: 16.715 G_L1_ABSOLUTE: 2.896 G_L1_RELATIVE: 13.819 G_Regularizer: 0.000 validation_error: 21.440 +(epoch: 3, iters: 98496, time: 0.521, data: 0.000) G_L1: 18.180 G_L1_ABSOLUTE: 3.171 G_L1_RELATIVE: 15.009 G_Regularizer: 0.000 validation_error: 21.957 +(epoch: 3, iters: 100496, time: 0.529, data: 0.000) G_L1: 15.614 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 12.661 G_Regularizer: 0.000 validation_error: 22.274 +(epoch: 3, iters: 102496, time: 0.527, data: 0.000) G_L1: 18.841 G_L1_ABSOLUTE: 3.175 G_L1_RELATIVE: 15.666 G_Regularizer: 0.000 validation_error: 21.431 +(epoch: 3, iters: 104496, time: 0.527, data: 0.000) G_L1: 16.661 G_L1_ABSOLUTE: 3.162 G_L1_RELATIVE: 13.500 G_Regularizer: 0.000 validation_error: 21.564 +(epoch: 3, iters: 106496, time: 0.521, data: 0.000) G_L1: 20.301 G_L1_ABSOLUTE: 3.380 G_L1_RELATIVE: 16.921 G_Regularizer: 0.000 validation_error: 21.512 +(epoch: 3, iters: 108496, time: 0.526, data: 0.000) G_L1: 18.253 G_L1_ABSOLUTE: 3.154 G_L1_RELATIVE: 15.099 G_Regularizer: 0.000 validation_error: 21.463 +(epoch: 3, iters: 110496, time: 0.525, data: 0.000) G_L1: 14.442 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 11.644 G_Regularizer: 0.000 validation_error: 21.249 +(epoch: 3, iters: 112496, time: 0.533, data: 0.000) G_L1: 14.927 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 12.211 G_Regularizer: 0.000 validation_error: 21.361 +(epoch: 3, iters: 114496, time: 0.526, data: 0.000) G_L1: 19.963 G_L1_ABSOLUTE: 2.703 G_L1_RELATIVE: 17.259 G_Regularizer: 0.000 validation_error: 21.335 +(epoch: 3, iters: 116496, time: 0.531, data: 0.000) G_L1: 17.456 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 14.477 G_Regularizer: 0.000 validation_error: 21.344 +(epoch: 3, iters: 118496, time: 0.531, data: 0.000) G_L1: 21.592 G_L1_ABSOLUTE: 2.833 G_L1_RELATIVE: 18.759 G_Regularizer: 0.000 validation_error: 21.740 +(epoch: 3, iters: 120496, time: 0.526, data: 0.000) G_L1: 16.403 G_L1_ABSOLUTE: 3.315 G_L1_RELATIVE: 13.088 G_Regularizer: 0.000 validation_error: 21.648 +(epoch: 3, iters: 122496, time: 0.524, data: 0.000) G_L1: 16.938 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 14.061 G_Regularizer: 0.000 validation_error: 21.674 +(epoch: 3, iters: 124496, time: 0.529, data: 0.000) G_L1: 22.409 G_L1_ABSOLUTE: 3.748 G_L1_RELATIVE: 18.661 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 3, iters: 126496, time: 0.534, data: 0.000) G_L1: 20.650 G_L1_ABSOLUTE: 3.539 G_L1_RELATIVE: 17.111 G_Regularizer: 0.000 validation_error: 21.293 +(epoch: 3, iters: 128496, time: 0.528, data: 0.000) G_L1: 18.110 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 15.225 G_Regularizer: 0.000 validation_error: 21.701 +(epoch: 3, iters: 130496, time: 0.524, data: 0.000) G_L1: 20.200 G_L1_ABSOLUTE: 3.020 G_L1_RELATIVE: 17.180 G_Regularizer: 0.000 validation_error: 21.355 +(epoch: 3, iters: 132496, time: 0.533, data: 0.000) G_L1: 18.152 G_L1_ABSOLUTE: 3.460 G_L1_RELATIVE: 14.692 G_Regularizer: 0.000 validation_error: 21.807 +(epoch: 3, iters: 134496, time: 0.529, data: 0.000) G_L1: 21.824 G_L1_ABSOLUTE: 3.691 G_L1_RELATIVE: 18.133 G_Regularizer: 0.000 validation_error: 21.716 +(epoch: 3, iters: 136496, time: 0.528, data: 0.000) G_L1: 16.460 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 13.284 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 3, iters: 138496, time: 0.531, data: 0.000) G_L1: 14.454 G_L1_ABSOLUTE: 3.432 G_L1_RELATIVE: 11.022 G_Regularizer: 0.000 validation_error: 21.599 +(epoch: 3, iters: 140496, time: 0.535, data: 0.000) G_L1: 14.606 G_L1_ABSOLUTE: 3.254 G_L1_RELATIVE: 11.352 G_Regularizer: 0.000 validation_error: 22.054 +(epoch: 3, iters: 142496, time: 0.529, data: 0.000) G_L1: 15.329 G_L1_ABSOLUTE: 3.208 G_L1_RELATIVE: 12.121 G_Regularizer: 0.000 validation_error: 21.600 +(epoch: 3, iters: 144496, time: 0.532, data: 0.000) G_L1: 15.620 G_L1_ABSOLUTE: 3.564 G_L1_RELATIVE: 12.055 G_Regularizer: 0.000 validation_error: 21.926 +(epoch: 3, iters: 146496, time: 0.529, data: 0.000) G_L1: 19.399 G_L1_ABSOLUTE: 3.785 G_L1_RELATIVE: 15.614 G_Regularizer: 0.000 validation_error: 21.624 +(epoch: 3, iters: 148496, time: 0.526, data: 0.000) G_L1: 22.538 G_L1_ABSOLUTE: 3.848 G_L1_RELATIVE: 18.689 G_Regularizer: 0.000 validation_error: 21.926 +(epoch: 3, iters: 150496, time: 0.566, data: 0.000) G_L1: 15.952 G_L1_ABSOLUTE: 2.944 G_L1_RELATIVE: 13.009 G_Regularizer: 0.000 validation_error: 21.905 +(epoch: 3, iters: 152496, time: 0.633, data: 0.000) G_L1: 20.764 G_L1_ABSOLUTE: 3.469 G_L1_RELATIVE: 17.295 G_Regularizer: 0.000 validation_error: 21.631 +(epoch: 3, iters: 154496, time: 0.621, data: 0.000) G_L1: 17.313 G_L1_ABSOLUTE: 3.185 G_L1_RELATIVE: 14.128 G_Regularizer: 0.000 validation_error: 21.827 +(epoch: 3, iters: 156496, time: 0.627, data: 0.000) G_L1: 19.455 G_L1_ABSOLUTE: 3.362 G_L1_RELATIVE: 16.094 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 3, iters: 158496, time: 0.589, data: 0.000) G_L1: 17.687 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 14.899 G_Regularizer: 0.000 validation_error: 21.908 +(epoch: 3, iters: 160496, time: 0.632, data: 0.000) G_L1: 16.040 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 13.084 G_Regularizer: 0.000 validation_error: 21.339 +(epoch: 3, iters: 162496, time: 0.630, data: 0.000) G_L1: 15.586 G_L1_ABSOLUTE: 2.893 G_L1_RELATIVE: 12.693 G_Regularizer: 0.000 validation_error: 21.502 +(epoch: 3, iters: 164496, time: 0.636, data: 0.000) G_L1: 15.950 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 13.052 G_Regularizer: 0.000 validation_error: 21.982 +(epoch: 3, iters: 166496, time: 0.622, data: 0.000) G_L1: 14.508 G_L1_ABSOLUTE: 2.720 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 20.869 +(epoch: 3, iters: 168496, time: 0.585, data: 0.000) G_L1: 15.632 G_L1_ABSOLUTE: 3.198 G_L1_RELATIVE: 12.434 G_Regularizer: 0.000 validation_error: 21.817 +(epoch: 3, iters: 170496, time: 0.630, data: 0.000) G_L1: 16.885 G_L1_ABSOLUTE: 2.935 G_L1_RELATIVE: 13.951 G_Regularizer: 0.000 validation_error: 21.575 +(epoch: 3, iters: 172496, time: 0.621, data: 0.000) G_L1: 15.546 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 12.896 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 3, iters: 174496, time: 0.600, data: 0.000) G_L1: 15.884 G_L1_ABSOLUTE: 3.753 G_L1_RELATIVE: 12.131 G_Regularizer: 0.000 validation_error: 21.523 +(epoch: 3, iters: 176496, time: 0.602, data: 0.000) G_L1: 14.790 G_L1_ABSOLUTE: 2.998 G_L1_RELATIVE: 11.792 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 3, iters: 178496, time: 0.548, data: 0.000) G_L1: 16.725 G_L1_ABSOLUTE: 2.855 G_L1_RELATIVE: 13.870 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 3, iters: 180496, time: 0.525, data: 0.000) G_L1: 17.632 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 14.856 G_Regularizer: 0.000 validation_error: 21.914 +(epoch: 3, iters: 182496, time: 0.527, data: 0.000) G_L1: 20.213 G_L1_ABSOLUTE: 3.651 G_L1_RELATIVE: 16.562 G_Regularizer: 0.000 validation_error: 21.575 +(epoch: 3, iters: 184496, time: 0.530, data: 0.000) G_L1: 17.158 G_L1_ABSOLUTE: 3.185 G_L1_RELATIVE: 13.974 G_Regularizer: 0.000 validation_error: 21.702 +(epoch: 3, iters: 186496, time: 0.528, data: 0.000) G_L1: 15.908 G_L1_ABSOLUTE: 3.113 G_L1_RELATIVE: 12.795 G_Regularizer: 0.000 validation_error: 21.021 +(epoch: 3, iters: 188496, time: 0.529, data: 0.000) G_L1: 17.625 G_L1_ABSOLUTE: 3.218 G_L1_RELATIVE: 14.407 G_Regularizer: 0.000 validation_error: 21.256 +(epoch: 3, iters: 190496, time: 0.531, data: 0.000) G_L1: 20.941 G_L1_ABSOLUTE: 3.107 G_L1_RELATIVE: 17.834 G_Regularizer: 0.000 validation_error: 21.122 +(epoch: 3, iters: 192496, time: 0.527, data: 0.000) G_L1: 18.809 G_L1_ABSOLUTE: 2.966 G_L1_RELATIVE: 15.843 G_Regularizer: 0.000 validation_error: 21.592 +(epoch: 3, iters: 194496, time: 0.524, data: 0.000) G_L1: 15.818 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 12.955 G_Regularizer: 0.000 validation_error: 21.230 +(epoch: 3, iters: 196496, time: 0.537, data: 0.000) G_L1: 16.811 G_L1_ABSOLUTE: 3.618 G_L1_RELATIVE: 13.194 G_Regularizer: 0.000 validation_error: 21.419 +(epoch: 3, iters: 198496, time: 0.529, data: 0.000) G_L1: 17.341 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 14.487 G_Regularizer: 0.000 validation_error: 21.866 +(epoch: 3, iters: 200496, time: 0.533, data: 0.000) G_L1: 17.436 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 14.652 G_Regularizer: 0.000 validation_error: 22.129 +(epoch: 3, iters: 202496, time: 0.531, data: 0.000) G_L1: 17.934 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 15.306 G_Regularizer: 0.000 validation_error: 22.251 +(epoch: 3, iters: 204496, time: 0.531, data: 0.000) G_L1: 16.221 G_L1_ABSOLUTE: 3.189 G_L1_RELATIVE: 13.032 G_Regularizer: 0.000 validation_error: 21.341 +(epoch: 3, iters: 206496, time: 0.527, data: 0.000) G_L1: 22.852 G_L1_ABSOLUTE: 3.629 G_L1_RELATIVE: 19.223 G_Regularizer: 0.000 validation_error: 22.227 +(epoch: 3, iters: 208496, time: 0.525, data: 0.000) G_L1: 15.474 G_L1_ABSOLUTE: 3.251 G_L1_RELATIVE: 12.223 G_Regularizer: 0.000 validation_error: 21.987 +(epoch: 3, iters: 210496, time: 0.528, data: 0.000) G_L1: 19.697 G_L1_ABSOLUTE: 3.347 G_L1_RELATIVE: 16.350 G_Regularizer: 0.000 validation_error: 21.738 +(epoch: 3, iters: 212496, time: 0.522, data: 0.000) G_L1: 15.983 G_L1_ABSOLUTE: 3.269 G_L1_RELATIVE: 12.714 G_Regularizer: 0.000 validation_error: 21.409 +(epoch: 3, iters: 214496, time: 0.532, data: 0.000) G_L1: 15.208 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 12.348 G_Regularizer: 0.000 validation_error: 21.843 +(epoch: 3, iters: 216496, time: 0.530, data: 0.000) G_L1: 14.705 G_L1_ABSOLUTE: 2.995 G_L1_RELATIVE: 11.710 G_Regularizer: 0.000 validation_error: 21.481 +(epoch: 3, iters: 218496, time: 0.523, data: 0.000) G_L1: 16.089 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 13.359 G_Regularizer: 0.000 validation_error: 21.103 +(epoch: 3, iters: 220496, time: 0.524, data: 0.000) G_L1: 19.863 G_L1_ABSOLUTE: 3.676 G_L1_RELATIVE: 16.187 G_Regularizer: 0.000 validation_error: 21.371 +(epoch: 3, iters: 222496, time: 0.525, data: 0.000) G_L1: 18.292 G_L1_ABSOLUTE: 3.061 G_L1_RELATIVE: 15.231 G_Regularizer: 0.000 validation_error: 21.496 +(epoch: 3, iters: 224496, time: 0.536, data: 0.000) G_L1: 16.496 G_L1_ABSOLUTE: 3.288 G_L1_RELATIVE: 13.207 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 3, iters: 226496, time: 0.534, data: 0.000) G_L1: 17.648 G_L1_ABSOLUTE: 3.121 G_L1_RELATIVE: 14.527 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 3, iters: 228496, time: 0.528, data: 0.000) G_L1: 15.308 G_L1_ABSOLUTE: 3.200 G_L1_RELATIVE: 12.108 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 3, iters: 230496, time: 0.535, data: 0.000) G_L1: 18.682 G_L1_ABSOLUTE: 3.223 G_L1_RELATIVE: 15.459 G_Regularizer: 0.000 validation_error: 21.030 +(epoch: 3, iters: 232496, time: 0.530, data: 0.000) G_L1: 18.377 G_L1_ABSOLUTE: 3.179 G_L1_RELATIVE: 15.198 G_Regularizer: 0.000 validation_error: 21.094 +(epoch: 3, iters: 234496, time: 0.526, data: 0.001) G_L1: 17.366 G_L1_ABSOLUTE: 3.299 G_L1_RELATIVE: 14.067 G_Regularizer: 0.000 validation_error: 21.876 +(epoch: 3, iters: 236496, time: 0.535, data: 0.000) G_L1: 21.660 G_L1_ABSOLUTE: 3.146 G_L1_RELATIVE: 18.513 G_Regularizer: 0.000 validation_error: 21.934 +(epoch: 3, iters: 238496, time: 0.535, data: 0.000) G_L1: 17.813 G_L1_ABSOLUTE: 2.901 G_L1_RELATIVE: 14.913 G_Regularizer: 0.000 validation_error: 21.841 +(epoch: 3, iters: 240496, time: 0.529, data: 0.000) G_L1: 16.320 G_L1_ABSOLUTE: 3.248 G_L1_RELATIVE: 13.072 G_Regularizer: 0.000 validation_error: 21.190 +(epoch: 3, iters: 242496, time: 0.530, data: 0.000) G_L1: 16.815 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 13.811 G_Regularizer: 0.000 validation_error: 21.124 +(epoch: 3, iters: 244496, time: 0.533, data: 0.000) G_L1: 15.978 G_L1_ABSOLUTE: 3.700 G_L1_RELATIVE: 12.278 G_Regularizer: 0.000 validation_error: 20.896 +(epoch: 3, iters: 246496, time: 0.527, data: 0.000) G_L1: 16.128 G_L1_ABSOLUTE: 3.414 G_L1_RELATIVE: 12.714 G_Regularizer: 0.000 validation_error: 21.267 +(epoch: 3, iters: 248496, time: 0.530, data: 0.000) G_L1: 14.625 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 11.741 G_Regularizer: 0.000 validation_error: 21.809 +(epoch: 3, iters: 250496, time: 0.530, data: 0.000) G_L1: 15.678 G_L1_ABSOLUTE: 3.089 G_L1_RELATIVE: 12.590 G_Regularizer: 0.000 validation_error: 21.367 +(epoch: 3, iters: 252496, time: 0.527, data: 0.000) G_L1: 13.569 G_L1_ABSOLUTE: 2.763 G_L1_RELATIVE: 10.806 G_Regularizer: 0.000 validation_error: 21.784 +(epoch: 3, iters: 254496, time: 0.531, data: 0.000) G_L1: 16.397 G_L1_ABSOLUTE: 2.992 G_L1_RELATIVE: 13.405 G_Regularizer: 0.000 validation_error: 21.863 +(epoch: 3, iters: 256496, time: 0.535, data: 0.000) G_L1: 15.573 G_L1_ABSOLUTE: 3.270 G_L1_RELATIVE: 12.303 G_Regularizer: 0.000 validation_error: 21.346 +(epoch: 3, iters: 258496, time: 0.530, data: 0.000) G_L1: 18.379 G_L1_ABSOLUTE: 3.364 G_L1_RELATIVE: 15.016 G_Regularizer: 0.000 validation_error: 21.315 +(epoch: 3, iters: 260496, time: 0.530, data: 0.000) G_L1: 16.494 G_L1_ABSOLUTE: 2.729 G_L1_RELATIVE: 13.766 G_Regularizer: 0.000 validation_error: 21.313 +(epoch: 3, iters: 262496, time: 0.527, data: 0.000) G_L1: 16.103 G_L1_ABSOLUTE: 3.063 G_L1_RELATIVE: 13.040 G_Regularizer: 0.000 validation_error: 22.026 +(epoch: 3, iters: 264496, time: 0.545, data: 0.000) G_L1: 16.610 G_L1_ABSOLUTE: 2.852 G_L1_RELATIVE: 13.758 G_Regularizer: 0.000 validation_error: 22.296 +(epoch: 3, iters: 266496, time: 0.633, data: 0.000) G_L1: 18.416 G_L1_ABSOLUTE: 3.152 G_L1_RELATIVE: 15.264 G_Regularizer: 0.000 validation_error: 21.743 +(epoch: 3, iters: 268496, time: 0.630, data: 0.000) G_L1: 13.812 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 11.113 G_Regularizer: 0.000 validation_error: 21.977 +(epoch: 3, iters: 270496, time: 0.628, data: 0.001) G_L1: 15.533 G_L1_ABSOLUTE: 3.260 G_L1_RELATIVE: 12.273 G_Regularizer: 0.000 validation_error: 22.139 +(epoch: 3, iters: 272496, time: 0.633, data: 0.000) G_L1: 16.503 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 13.671 G_Regularizer: 0.000 validation_error: 21.469 +(epoch: 3, iters: 274496, time: 0.627, data: 0.000) G_L1: 24.646 G_L1_ABSOLUTE: 3.712 G_L1_RELATIVE: 20.934 G_Regularizer: 0.000 validation_error: 21.617 +(epoch: 3, iters: 276496, time: 0.620, data: 0.000) G_L1: 14.582 G_L1_ABSOLUTE: 2.820 G_L1_RELATIVE: 11.762 G_Regularizer: 0.000 validation_error: 21.755 +(epoch: 3, iters: 278496, time: 0.629, data: 0.000) G_L1: 20.605 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 17.868 G_Regularizer: 0.000 validation_error: 21.140 +(epoch: 3, iters: 280496, time: 0.591, data: 0.000) G_L1: 18.820 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 16.083 G_Regularizer: 0.000 validation_error: 21.585 +(epoch: 3, iters: 282496, time: 0.627, data: 0.000) G_L1: 17.572 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 14.702 G_Regularizer: 0.000 validation_error: 21.225 +(epoch: 3, iters: 284496, time: 0.586, data: 0.000) G_L1: 16.094 G_L1_ABSOLUTE: 2.991 G_L1_RELATIVE: 13.103 G_Regularizer: 0.000 validation_error: 21.506 +(epoch: 3, iters: 286496, time: 0.528, data: 0.000) G_L1: 19.949 G_L1_ABSOLUTE: 4.025 G_L1_RELATIVE: 15.924 G_Regularizer: 0.000 validation_error: 21.667 +(epoch: 3, iters: 288496, time: 0.526, data: 0.000) G_L1: 22.480 G_L1_ABSOLUTE: 3.173 G_L1_RELATIVE: 19.307 G_Regularizer: 0.000 validation_error: 22.052 +(epoch: 3, iters: 290496, time: 0.527, data: 0.000) G_L1: 16.482 G_L1_ABSOLUTE: 2.931 G_L1_RELATIVE: 13.551 G_Regularizer: 0.000 validation_error: 21.638 +(epoch: 3, iters: 292496, time: 0.534, data: 0.000) G_L1: 16.128 G_L1_ABSOLUTE: 3.177 G_L1_RELATIVE: 12.952 G_Regularizer: 0.000 validation_error: 22.034 +(epoch: 3, iters: 294496, time: 0.522, data: 0.000) G_L1: 14.329 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 11.340 G_Regularizer: 0.000 validation_error: 21.526 +(epoch: 3, iters: 296496, time: 0.545, data: 0.000) G_L1: 19.908 G_L1_ABSOLUTE: 3.168 G_L1_RELATIVE: 16.740 G_Regularizer: 0.000 validation_error: 21.747 +(epoch: 3, iters: 298496, time: 0.624, data: 0.000) G_L1: 17.762 G_L1_ABSOLUTE: 3.407 G_L1_RELATIVE: 14.355 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 3, iters: 300496, time: 0.575, data: 0.000) G_L1: 16.927 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 13.974 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 3, iters: 302496, time: 0.530, data: 0.000) G_L1: 16.064 G_L1_ABSOLUTE: 2.922 G_L1_RELATIVE: 13.141 G_Regularizer: 0.000 validation_error: 21.503 +(epoch: 4, iters: 1744, time: 0.527, data: 0.000) G_L1: 17.335 G_L1_ABSOLUTE: 3.433 G_L1_RELATIVE: 13.902 G_Regularizer: 0.000 validation_error: 21.591 +(epoch: 4, iters: 3744, time: 0.530, data: 0.000) G_L1: 22.076 G_L1_ABSOLUTE: 3.282 G_L1_RELATIVE: 18.794 G_Regularizer: 0.000 validation_error: 21.497 +(epoch: 4, iters: 5744, time: 0.532, data: 0.000) G_L1: 14.521 G_L1_ABSOLUTE: 3.097 G_L1_RELATIVE: 11.425 G_Regularizer: 0.000 validation_error: 21.582 +(epoch: 4, iters: 7744, time: 0.532, data: 0.000) G_L1: 16.551 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 13.755 G_Regularizer: 0.000 validation_error: 21.678 +(epoch: 4, iters: 9744, time: 0.532, data: 0.000) G_L1: 16.196 G_L1_ABSOLUTE: 2.996 G_L1_RELATIVE: 13.200 G_Regularizer: 0.000 validation_error: 21.293 +(epoch: 4, iters: 11744, time: 0.524, data: 0.000) G_L1: 16.523 G_L1_ABSOLUTE: 2.955 G_L1_RELATIVE: 13.568 G_Regularizer: 0.000 validation_error: 21.899 +(epoch: 4, iters: 13744, time: 0.524, data: 0.000) G_L1: 16.354 G_L1_ABSOLUTE: 3.167 G_L1_RELATIVE: 13.187 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 4, iters: 15744, time: 0.521, data: 0.000) G_L1: 18.166 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 15.306 G_Regularizer: 0.000 validation_error: 21.599 +(epoch: 4, iters: 17744, time: 0.537, data: 0.000) G_L1: 13.495 G_L1_ABSOLUTE: 3.052 G_L1_RELATIVE: 10.444 G_Regularizer: 0.000 validation_error: 21.734 +(epoch: 4, iters: 19744, time: 0.525, data: 0.000) G_L1: 16.257 G_L1_ABSOLUTE: 3.035 G_L1_RELATIVE: 13.223 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 4, iters: 21744, time: 0.531, data: 0.000) G_L1: 16.905 G_L1_ABSOLUTE: 3.068 G_L1_RELATIVE: 13.837 G_Regularizer: 0.000 validation_error: 21.744 +(epoch: 4, iters: 23744, time: 0.527, data: 0.000) G_L1: 16.217 G_L1_ABSOLUTE: 3.039 G_L1_RELATIVE: 13.178 G_Regularizer: 0.000 validation_error: 21.468 +(epoch: 4, iters: 25744, time: 0.529, data: 0.000) G_L1: 19.822 G_L1_ABSOLUTE: 3.210 G_L1_RELATIVE: 16.611 G_Regularizer: 0.000 validation_error: 21.523 +(epoch: 4, iters: 27744, time: 0.528, data: 0.000) G_L1: 14.909 G_L1_ABSOLUTE: 2.909 G_L1_RELATIVE: 11.999 G_Regularizer: 0.000 validation_error: 22.234 +(epoch: 4, iters: 29744, time: 0.528, data: 0.000) G_L1: 17.506 G_L1_ABSOLUTE: 3.258 G_L1_RELATIVE: 14.248 G_Regularizer: 0.000 validation_error: 21.357 +(epoch: 4, iters: 31744, time: 0.528, data: 0.000) G_L1: 13.906 G_L1_ABSOLUTE: 2.935 G_L1_RELATIVE: 10.971 G_Regularizer: 0.000 validation_error: 21.327 +(epoch: 4, iters: 33744, time: 0.533, data: 0.000) G_L1: 15.678 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 12.886 G_Regularizer: 0.000 validation_error: 21.464 +(epoch: 4, iters: 35744, time: 0.536, data: 0.000) G_L1: 19.474 G_L1_ABSOLUTE: 2.922 G_L1_RELATIVE: 16.553 G_Regularizer: 0.000 validation_error: 21.793 +(epoch: 4, iters: 37744, time: 0.532, data: 0.001) G_L1: 16.319 G_L1_ABSOLUTE: 2.996 G_L1_RELATIVE: 13.323 G_Regularizer: 0.000 validation_error: 21.906 +(epoch: 4, iters: 39744, time: 0.529, data: 0.000) G_L1: 16.452 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 13.746 G_Regularizer: 0.000 validation_error: 22.104 +(epoch: 4, iters: 41744, time: 0.533, data: 0.000) G_L1: 18.475 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 15.583 G_Regularizer: 0.000 validation_error: 21.524 +(epoch: 4, iters: 43744, time: 0.532, data: 0.000) G_L1: 17.585 G_L1_ABSOLUTE: 3.704 G_L1_RELATIVE: 13.881 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 4, iters: 45744, time: 0.534, data: 0.000) G_L1: 15.930 G_L1_ABSOLUTE: 3.445 G_L1_RELATIVE: 12.484 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 4, iters: 47744, time: 0.529, data: 0.000) G_L1: 18.882 G_L1_ABSOLUTE: 3.354 G_L1_RELATIVE: 15.527 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 4, iters: 49744, time: 0.532, data: 0.000) G_L1: 16.443 G_L1_ABSOLUTE: 2.837 G_L1_RELATIVE: 13.606 G_Regularizer: 0.000 validation_error: 21.443 +(epoch: 4, iters: 51744, time: 0.528, data: 0.000) G_L1: 18.152 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 15.381 G_Regularizer: 0.000 validation_error: 21.118 +(epoch: 4, iters: 53744, time: 0.536, data: 0.000) G_L1: 18.071 G_L1_ABSOLUTE: 3.799 G_L1_RELATIVE: 14.272 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 4, iters: 55744, time: 0.534, data: 0.000) G_L1: 21.285 G_L1_ABSOLUTE: 3.057 G_L1_RELATIVE: 18.228 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 4, iters: 57744, time: 0.533, data: 0.000) G_L1: 16.574 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 13.952 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 4, iters: 59744, time: 0.533, data: 0.000) G_L1: 15.908 G_L1_ABSOLUTE: 3.138 G_L1_RELATIVE: 12.771 G_Regularizer: 0.000 validation_error: 21.746 +(epoch: 4, iters: 61744, time: 0.526, data: 0.000) G_L1: 15.686 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 13.182 G_Regularizer: 0.000 validation_error: 21.012 +(epoch: 4, iters: 63744, time: 0.533, data: 0.000) G_L1: 18.535 G_L1_ABSOLUTE: 2.883 G_L1_RELATIVE: 15.652 G_Regularizer: 0.000 validation_error: 21.244 +(epoch: 4, iters: 65744, time: 0.527, data: 0.000) G_L1: 14.268 G_L1_ABSOLUTE: 2.974 G_L1_RELATIVE: 11.294 G_Regularizer: 0.000 validation_error: 21.373 +(epoch: 4, iters: 67744, time: 0.536, data: 0.000) G_L1: 15.090 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 12.283 G_Regularizer: 0.000 validation_error: 20.765 +(epoch: 4, iters: 69744, time: 0.531, data: 0.000) G_L1: 16.255 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 13.263 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 4, iters: 71744, time: 0.532, data: 0.000) G_L1: 21.011 G_L1_ABSOLUTE: 3.050 G_L1_RELATIVE: 17.961 G_Regularizer: 0.000 validation_error: 21.989 +(epoch: 4, iters: 73744, time: 0.527, data: 0.000) G_L1: 17.072 G_L1_ABSOLUTE: 3.097 G_L1_RELATIVE: 13.975 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 4, iters: 75744, time: 0.530, data: 0.000) G_L1: 17.241 G_L1_ABSOLUTE: 2.941 G_L1_RELATIVE: 14.299 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 4, iters: 77744, time: 0.532, data: 0.000) G_L1: 19.436 G_L1_ABSOLUTE: 3.997 G_L1_RELATIVE: 15.439 G_Regularizer: 0.000 validation_error: 21.525 +(epoch: 4, iters: 79744, time: 0.533, data: 0.000) G_L1: 13.746 G_L1_ABSOLUTE: 3.092 G_L1_RELATIVE: 10.655 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 4, iters: 81744, time: 0.529, data: 0.000) G_L1: 19.946 G_L1_ABSOLUTE: 3.184 G_L1_RELATIVE: 16.762 G_Regularizer: 0.000 validation_error: 21.729 +(epoch: 4, iters: 83744, time: 0.530, data: 0.000) G_L1: 16.279 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 13.443 G_Regularizer: 0.000 validation_error: 21.631 +(epoch: 4, iters: 85744, time: 0.533, data: 0.000) G_L1: 19.037 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 16.152 G_Regularizer: 0.000 validation_error: 21.862 +(epoch: 4, iters: 87744, time: 0.529, data: 0.000) G_L1: 15.886 G_L1_ABSOLUTE: 2.925 G_L1_RELATIVE: 12.961 G_Regularizer: 0.000 validation_error: 21.782 +(epoch: 4, iters: 89744, time: 0.531, data: 0.000) G_L1: 15.377 G_L1_ABSOLUTE: 3.318 G_L1_RELATIVE: 12.059 G_Regularizer: 0.000 validation_error: 22.207 +(epoch: 4, iters: 91744, time: 0.530, data: 0.000) G_L1: 16.127 G_L1_ABSOLUTE: 3.342 G_L1_RELATIVE: 12.785 G_Regularizer: 0.000 validation_error: 21.526 +(epoch: 4, iters: 93744, time: 0.535, data: 0.000) G_L1: 18.349 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 15.460 G_Regularizer: 0.000 validation_error: 20.724 +(epoch: 4, iters: 95744, time: 0.536, data: 0.000) G_L1: 18.555 G_L1_ABSOLUTE: 3.254 G_L1_RELATIVE: 15.301 G_Regularizer: 0.000 validation_error: 21.752 +(epoch: 4, iters: 97744, time: 0.533, data: 0.000) G_L1: 17.785 G_L1_ABSOLUTE: 3.590 G_L1_RELATIVE: 14.194 G_Regularizer: 0.000 validation_error: 20.822 +(epoch: 4, iters: 99744, time: 0.529, data: 0.000) G_L1: 20.644 G_L1_ABSOLUTE: 3.022 G_L1_RELATIVE: 17.622 G_Regularizer: 0.000 validation_error: 22.042 +(epoch: 4, iters: 101744, time: 0.529, data: 0.000) G_L1: 14.574 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 11.714 G_Regularizer: 0.000 validation_error: 21.241 +(epoch: 4, iters: 103744, time: 0.525, data: 0.000) G_L1: 17.009 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 14.263 G_Regularizer: 0.000 validation_error: 21.882 +(epoch: 4, iters: 105744, time: 0.521, data: 0.000) G_L1: 15.014 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 12.202 G_Regularizer: 0.000 validation_error: 21.068 +(epoch: 4, iters: 107744, time: 0.536, data: 0.000) G_L1: 19.033 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 16.040 G_Regularizer: 0.000 validation_error: 21.504 +(epoch: 4, iters: 109744, time: 0.531, data: 0.000) G_L1: 14.486 G_L1_ABSOLUTE: 3.258 G_L1_RELATIVE: 11.228 G_Regularizer: 0.000 validation_error: 21.350 +(epoch: 4, iters: 111744, time: 0.527, data: 0.000) G_L1: 18.351 G_L1_ABSOLUTE: 3.072 G_L1_RELATIVE: 15.280 G_Regularizer: 0.000 validation_error: 21.327 +(epoch: 4, iters: 113744, time: 0.528, data: 0.000) G_L1: 20.497 G_L1_ABSOLUTE: 2.910 G_L1_RELATIVE: 17.587 G_Regularizer: 0.000 validation_error: 21.737 +(epoch: 4, iters: 115744, time: 0.532, data: 0.000) G_L1: 15.026 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 12.375 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 4, iters: 117744, time: 0.526, data: 0.000) G_L1: 14.562 G_L1_ABSOLUTE: 2.875 G_L1_RELATIVE: 11.687 G_Regularizer: 0.000 validation_error: 21.202 +(epoch: 4, iters: 119744, time: 0.530, data: 0.001) G_L1: 17.048 G_L1_ABSOLUTE: 3.127 G_L1_RELATIVE: 13.921 G_Regularizer: 0.000 validation_error: 21.372 +(epoch: 4, iters: 121744, time: 0.532, data: 0.000) G_L1: 16.221 G_L1_ABSOLUTE: 3.169 G_L1_RELATIVE: 13.052 G_Regularizer: 0.000 validation_error: 21.882 +(epoch: 4, iters: 123744, time: 0.532, data: 0.000) G_L1: 15.657 G_L1_ABSOLUTE: 3.314 G_L1_RELATIVE: 12.342 G_Regularizer: 0.000 validation_error: 21.763 +(epoch: 4, iters: 125744, time: 0.538, data: 0.000) G_L1: 19.169 G_L1_ABSOLUTE: 3.709 G_L1_RELATIVE: 15.460 G_Regularizer: 0.000 validation_error: 21.599 +(epoch: 4, iters: 127744, time: 0.521, data: 0.000) G_L1: 19.321 G_L1_ABSOLUTE: 3.477 G_L1_RELATIVE: 15.844 G_Regularizer: 0.000 validation_error: 21.398 +(epoch: 4, iters: 129744, time: 0.527, data: 0.000) G_L1: 15.468 G_L1_ABSOLUTE: 3.087 G_L1_RELATIVE: 12.382 G_Regularizer: 0.000 validation_error: 20.620 +(epoch: 4, iters: 131744, time: 0.530, data: 0.000) G_L1: 15.220 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 12.264 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 4, iters: 133744, time: 0.526, data: 0.000) G_L1: 17.284 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 14.530 G_Regularizer: 0.000 validation_error: 21.734 +(epoch: 4, iters: 135744, time: 0.528, data: 0.000) G_L1: 16.186 G_L1_ABSOLUTE: 2.850 G_L1_RELATIVE: 13.336 G_Regularizer: 0.000 validation_error: 21.040 +(epoch: 4, iters: 137744, time: 0.533, data: 0.000) G_L1: 19.734 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 16.974 G_Regularizer: 0.000 validation_error: 20.510 +(epoch: 4, iters: 139744, time: 0.524, data: 0.000) G_L1: 18.493 G_L1_ABSOLUTE: 2.643 G_L1_RELATIVE: 15.849 G_Regularizer: 0.000 validation_error: 21.152 +(epoch: 4, iters: 141744, time: 0.533, data: 0.000) G_L1: 16.483 G_L1_ABSOLUTE: 2.991 G_L1_RELATIVE: 13.493 G_Regularizer: 0.000 validation_error: 21.489 +(epoch: 4, iters: 143744, time: 0.533, data: 0.000) G_L1: 21.410 G_L1_ABSOLUTE: 3.390 G_L1_RELATIVE: 18.019 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 4, iters: 145744, time: 0.529, data: 0.000) G_L1: 19.219 G_L1_ABSOLUTE: 3.381 G_L1_RELATIVE: 15.838 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 4, iters: 147744, time: 0.533, data: 0.000) G_L1: 17.485 G_L1_ABSOLUTE: 3.072 G_L1_RELATIVE: 14.413 G_Regularizer: 0.000 validation_error: 21.507 +(epoch: 4, iters: 149744, time: 0.530, data: 0.000) G_L1: 17.711 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 14.680 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 4, iters: 151744, time: 0.529, data: 0.000) G_L1: 19.921 G_L1_ABSOLUTE: 3.418 G_L1_RELATIVE: 16.503 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 4, iters: 153744, time: 0.525, data: 0.000) G_L1: 17.161 G_L1_ABSOLUTE: 2.845 G_L1_RELATIVE: 14.316 G_Regularizer: 0.000 validation_error: 21.186 +(epoch: 4, iters: 155744, time: 0.526, data: 0.000) G_L1: 17.292 G_L1_ABSOLUTE: 3.476 G_L1_RELATIVE: 13.816 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 4, iters: 157744, time: 0.529, data: 0.000) G_L1: 14.090 G_L1_ABSOLUTE: 3.078 G_L1_RELATIVE: 11.012 G_Regularizer: 0.000 validation_error: 21.323 +(epoch: 4, iters: 159744, time: 0.527, data: 0.000) G_L1: 14.811 G_L1_ABSOLUTE: 2.711 G_L1_RELATIVE: 12.100 G_Regularizer: 0.000 validation_error: 21.647 +(epoch: 4, iters: 161744, time: 0.529, data: 0.000) G_L1: 16.950 G_L1_ABSOLUTE: 3.252 G_L1_RELATIVE: 13.699 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 4, iters: 163744, time: 0.528, data: 0.000) G_L1: 14.781 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 12.027 G_Regularizer: 0.000 validation_error: 21.249 +(epoch: 4, iters: 165744, time: 0.529, data: 0.000) G_L1: 18.643 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 15.467 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 4, iters: 167744, time: 0.529, data: 0.001) G_L1: 16.096 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 13.297 G_Regularizer: 0.000 validation_error: 20.323 +(epoch: 4, iters: 169744, time: 0.532, data: 0.000) G_L1: 23.842 G_L1_ABSOLUTE: 3.075 G_L1_RELATIVE: 20.766 G_Regularizer: 0.000 validation_error: 21.659 +(epoch: 4, iters: 171744, time: 0.527, data: 0.000) G_L1: 18.038 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 15.162 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 4, iters: 173744, time: 0.529, data: 0.000) G_L1: 17.841 G_L1_ABSOLUTE: 3.190 G_L1_RELATIVE: 14.650 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 4, iters: 175744, time: 0.531, data: 0.000) G_L1: 18.511 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 15.468 G_Regularizer: 0.000 validation_error: 20.358 +(epoch: 4, iters: 177744, time: 0.527, data: 0.000) G_L1: 15.785 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 13.116 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 4, iters: 179744, time: 0.526, data: 0.000) G_L1: 15.732 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 12.963 G_Regularizer: 0.000 validation_error: 21.321 +(epoch: 4, iters: 181744, time: 0.530, data: 0.000) G_L1: 17.038 G_L1_ABSOLUTE: 3.448 G_L1_RELATIVE: 13.590 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 4, iters: 183744, time: 0.527, data: 0.000) G_L1: 17.348 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 14.611 G_Regularizer: 0.000 validation_error: 21.898 +(epoch: 4, iters: 185744, time: 0.528, data: 0.000) G_L1: 16.733 G_L1_ABSOLUTE: 3.018 G_L1_RELATIVE: 13.715 G_Regularizer: 0.000 validation_error: 21.037 +(epoch: 4, iters: 187744, time: 0.537, data: 0.000) G_L1: 15.632 G_L1_ABSOLUTE: 3.297 G_L1_RELATIVE: 12.335 G_Regularizer: 0.000 validation_error: 21.629 +(epoch: 4, iters: 189744, time: 0.527, data: 0.000) G_L1: 15.297 G_L1_ABSOLUTE: 2.958 G_L1_RELATIVE: 12.340 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 4, iters: 191744, time: 0.529, data: 0.000) G_L1: 17.970 G_L1_ABSOLUTE: 3.312 G_L1_RELATIVE: 14.657 G_Regularizer: 0.000 validation_error: 20.910 +(epoch: 4, iters: 193744, time: 0.527, data: 0.000) G_L1: 16.879 G_L1_ABSOLUTE: 3.324 G_L1_RELATIVE: 13.555 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 4, iters: 195744, time: 0.527, data: 0.000) G_L1: 15.272 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 12.623 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 4, iters: 197744, time: 0.533, data: 0.000) G_L1: 17.341 G_L1_ABSOLUTE: 3.230 G_L1_RELATIVE: 14.112 G_Regularizer: 0.000 validation_error: 21.608 +(epoch: 4, iters: 199744, time: 0.529, data: 0.000) G_L1: 17.586 G_L1_ABSOLUTE: 2.879 G_L1_RELATIVE: 14.707 G_Regularizer: 0.000 validation_error: 21.232 +(epoch: 4, iters: 201744, time: 0.542, data: 0.000) G_L1: 14.913 G_L1_ABSOLUTE: 3.138 G_L1_RELATIVE: 11.774 G_Regularizer: 0.000 validation_error: 21.487 +(epoch: 4, iters: 203744, time: 0.530, data: 0.000) G_L1: 15.769 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 12.992 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 4, iters: 205744, time: 0.532, data: 0.000) G_L1: 19.274 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 16.516 G_Regularizer: 0.000 validation_error: 21.382 +(epoch: 4, iters: 207744, time: 0.529, data: 0.000) G_L1: 16.524 G_L1_ABSOLUTE: 3.157 G_L1_RELATIVE: 13.367 G_Regularizer: 0.000 validation_error: 21.272 +(epoch: 4, iters: 209744, time: 0.529, data: 0.000) G_L1: 14.329 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 11.671 G_Regularizer: 0.000 validation_error: 21.864 +(epoch: 4, iters: 211744, time: 0.530, data: 0.000) G_L1: 18.949 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 16.028 G_Regularizer: 0.000 validation_error: 21.542 +(epoch: 4, iters: 213744, time: 0.532, data: 0.000) G_L1: 15.176 G_L1_ABSOLUTE: 3.045 G_L1_RELATIVE: 12.131 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 4, iters: 215744, time: 0.529, data: 0.000) G_L1: 21.322 G_L1_ABSOLUTE: 2.912 G_L1_RELATIVE: 18.410 G_Regularizer: 0.000 validation_error: 21.861 +(epoch: 4, iters: 217744, time: 0.527, data: 0.000) G_L1: 17.395 G_L1_ABSOLUTE: 3.124 G_L1_RELATIVE: 14.271 G_Regularizer: 0.000 validation_error: 21.358 +(epoch: 4, iters: 219744, time: 0.527, data: 0.000) G_L1: 21.469 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 18.855 G_Regularizer: 0.000 validation_error: 21.423 +(epoch: 4, iters: 221744, time: 0.532, data: 0.000) G_L1: 14.882 G_L1_ABSOLUTE: 3.062 G_L1_RELATIVE: 11.820 G_Regularizer: 0.000 validation_error: 21.286 +(epoch: 4, iters: 223744, time: 0.527, data: 0.000) G_L1: 17.217 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 14.567 G_Regularizer: 0.000 validation_error: 21.546 +(epoch: 4, iters: 225744, time: 0.529, data: 0.000) G_L1: 19.188 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 16.644 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 4, iters: 227744, time: 0.534, data: 0.000) G_L1: 17.153 G_L1_ABSOLUTE: 3.044 G_L1_RELATIVE: 14.110 G_Regularizer: 0.000 validation_error: 21.080 +(epoch: 4, iters: 229744, time: 0.524, data: 0.000) G_L1: 17.809 G_L1_ABSOLUTE: 3.119 G_L1_RELATIVE: 14.690 G_Regularizer: 0.000 validation_error: 21.191 +(epoch: 4, iters: 231744, time: 0.528, data: 0.000) G_L1: 19.376 G_L1_ABSOLUTE: 2.941 G_L1_RELATIVE: 16.436 G_Regularizer: 0.000 validation_error: 21.467 +(epoch: 4, iters: 233744, time: 0.536, data: 0.000) G_L1: 16.162 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 13.173 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 4, iters: 235744, time: 0.529, data: 0.000) G_L1: 15.474 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 12.714 G_Regularizer: 0.000 validation_error: 21.654 +(epoch: 4, iters: 237744, time: 0.528, data: 0.000) G_L1: 14.889 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 12.110 G_Regularizer: 0.000 validation_error: 21.755 +(epoch: 4, iters: 239744, time: 0.527, data: 0.000) G_L1: 15.106 G_L1_ABSOLUTE: 2.891 G_L1_RELATIVE: 12.215 G_Regularizer: 0.000 validation_error: 21.126 +(epoch: 4, iters: 241744, time: 0.530, data: 0.000) G_L1: 13.297 G_L1_ABSOLUTE: 3.214 G_L1_RELATIVE: 10.083 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 4, iters: 243744, time: 0.535, data: 0.000) G_L1: 17.786 G_L1_ABSOLUTE: 3.253 G_L1_RELATIVE: 14.533 G_Regularizer: 0.000 validation_error: 21.769 +(epoch: 4, iters: 245744, time: 0.523, data: 0.000) G_L1: 17.610 G_L1_ABSOLUTE: 3.050 G_L1_RELATIVE: 14.560 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 4, iters: 247744, time: 0.532, data: 0.000) G_L1: 18.648 G_L1_ABSOLUTE: 3.270 G_L1_RELATIVE: 15.378 G_Regularizer: 0.000 validation_error: 21.160 +(epoch: 4, iters: 249744, time: 0.529, data: 0.000) G_L1: 15.973 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 13.229 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 4, iters: 251744, time: 0.527, data: 0.000) G_L1: 18.756 G_L1_ABSOLUTE: 3.282 G_L1_RELATIVE: 15.474 G_Regularizer: 0.000 validation_error: 21.351 +(epoch: 4, iters: 253744, time: 0.530, data: 0.000) G_L1: 14.392 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 11.590 G_Regularizer: 0.000 validation_error: 21.784 +(epoch: 4, iters: 255744, time: 0.528, data: 0.000) G_L1: 18.928 G_L1_ABSOLUTE: 3.237 G_L1_RELATIVE: 15.691 G_Regularizer: 0.000 validation_error: 21.813 +(epoch: 4, iters: 257744, time: 0.532, data: 0.000) G_L1: 15.665 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 12.840 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 4, iters: 259744, time: 0.531, data: 0.000) G_L1: 15.276 G_L1_ABSOLUTE: 2.671 G_L1_RELATIVE: 12.605 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 4, iters: 261744, time: 0.526, data: 0.000) G_L1: 16.180 G_L1_ABSOLUTE: 3.049 G_L1_RELATIVE: 13.132 G_Regularizer: 0.000 validation_error: 21.627 +(epoch: 4, iters: 263744, time: 0.527, data: 0.000) G_L1: 14.778 G_L1_ABSOLUTE: 2.682 G_L1_RELATIVE: 12.097 G_Regularizer: 0.000 validation_error: 20.331 +(epoch: 4, iters: 265744, time: 0.532, data: 0.000) G_L1: 15.793 G_L1_ABSOLUTE: 2.893 G_L1_RELATIVE: 12.900 G_Regularizer: 0.000 validation_error: 21.787 +(epoch: 4, iters: 267744, time: 0.531, data: 0.000) G_L1: 16.914 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 14.253 G_Regularizer: 0.000 validation_error: 21.357 +(epoch: 4, iters: 269744, time: 0.531, data: 0.000) G_L1: 13.687 G_L1_ABSOLUTE: 3.093 G_L1_RELATIVE: 10.594 G_Regularizer: 0.000 validation_error: 21.418 +(epoch: 4, iters: 271744, time: 0.529, data: 0.000) G_L1: 15.516 G_L1_ABSOLUTE: 3.515 G_L1_RELATIVE: 12.001 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 4, iters: 273744, time: 0.534, data: 0.000) G_L1: 16.192 G_L1_ABSOLUTE: 3.194 G_L1_RELATIVE: 12.998 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 4, iters: 275744, time: 0.531, data: 0.000) G_L1: 18.883 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 16.246 G_Regularizer: 0.000 validation_error: 20.991 +(epoch: 4, iters: 277744, time: 0.527, data: 0.000) G_L1: 15.228 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 12.642 G_Regularizer: 0.000 validation_error: 21.353 +(epoch: 4, iters: 279744, time: 0.533, data: 0.000) G_L1: 19.209 G_L1_ABSOLUTE: 3.341 G_L1_RELATIVE: 15.868 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 4, iters: 281744, time: 0.533, data: 0.000) G_L1: 17.870 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 15.218 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 4, iters: 283744, time: 0.527, data: 0.000) G_L1: 18.653 G_L1_ABSOLUTE: 3.365 G_L1_RELATIVE: 15.287 G_Regularizer: 0.000 validation_error: 21.291 +(epoch: 4, iters: 285744, time: 0.527, data: 0.000) G_L1: 12.783 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 9.926 G_Regularizer: 0.000 validation_error: 21.252 +(epoch: 4, iters: 287744, time: 0.530, data: 0.000) G_L1: 16.768 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 13.724 G_Regularizer: 0.000 validation_error: 21.249 +(epoch: 4, iters: 289744, time: 0.521, data: 0.000) G_L1: 18.966 G_L1_ABSOLUTE: 3.215 G_L1_RELATIVE: 15.751 G_Regularizer: 0.000 validation_error: 21.186 +(epoch: 4, iters: 291744, time: 0.531, data: 0.000) G_L1: 19.073 G_L1_ABSOLUTE: 3.205 G_L1_RELATIVE: 15.868 G_Regularizer: 0.000 validation_error: 20.856 +(epoch: 4, iters: 293744, time: 0.531, data: 0.000) G_L1: 16.995 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 14.102 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 4, iters: 295744, time: 0.527, data: 0.000) G_L1: 14.628 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 12.072 G_Regularizer: 0.000 validation_error: 21.348 +(epoch: 4, iters: 297744, time: 0.530, data: 0.000) G_L1: 16.006 G_L1_ABSOLUTE: 3.033 G_L1_RELATIVE: 12.973 G_Regularizer: 0.000 validation_error: 21.497 +(epoch: 4, iters: 299744, time: 0.526, data: 0.000) G_L1: 18.891 G_L1_ABSOLUTE: 3.243 G_L1_RELATIVE: 15.647 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 4, iters: 301744, time: 0.525, data: 0.000) G_L1: 17.897 G_L1_ABSOLUTE: 3.047 G_L1_RELATIVE: 14.850 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 5, iters: 992, time: 0.534, data: 0.000) G_L1: 14.018 G_L1_ABSOLUTE: 3.353 G_L1_RELATIVE: 10.665 G_Regularizer: 0.000 validation_error: 21.132 +(epoch: 5, iters: 2992, time: 0.537, data: 0.000) G_L1: 16.055 G_L1_ABSOLUTE: 2.983 G_L1_RELATIVE: 13.072 G_Regularizer: 0.000 validation_error: 22.075 +(epoch: 5, iters: 4992, time: 0.533, data: 0.000) G_L1: 17.554 G_L1_ABSOLUTE: 2.966 G_L1_RELATIVE: 14.588 G_Regularizer: 0.000 validation_error: 21.824 +(epoch: 5, iters: 6992, time: 0.536, data: 0.000) G_L1: 16.833 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 14.133 G_Regularizer: 0.000 validation_error: 21.336 +(epoch: 5, iters: 8992, time: 0.527, data: 0.000) G_L1: 17.394 G_L1_ABSOLUTE: 2.753 G_L1_RELATIVE: 14.640 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 5, iters: 10992, time: 0.528, data: 0.000) G_L1: 15.535 G_L1_ABSOLUTE: 3.328 G_L1_RELATIVE: 12.207 G_Regularizer: 0.000 validation_error: 21.223 +(epoch: 5, iters: 12992, time: 0.531, data: 0.000) G_L1: 14.813 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 12.256 G_Regularizer: 0.000 validation_error: 21.005 +(epoch: 5, iters: 14992, time: 0.536, data: 0.000) G_L1: 19.054 G_L1_ABSOLUTE: 3.761 G_L1_RELATIVE: 15.293 G_Regularizer: 0.000 validation_error: 21.727 +(epoch: 5, iters: 16992, time: 0.528, data: 0.000) G_L1: 16.415 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 13.823 G_Regularizer: 0.000 validation_error: 21.115 +(epoch: 5, iters: 18992, time: 0.529, data: 0.000) G_L1: 12.564 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 9.915 G_Regularizer: 0.000 validation_error: 20.410 +(epoch: 5, iters: 20992, time: 0.533, data: 0.000) G_L1: 15.131 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 12.426 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 5, iters: 22992, time: 0.533, data: 0.000) G_L1: 15.767 G_L1_ABSOLUTE: 2.759 G_L1_RELATIVE: 13.008 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 5, iters: 24992, time: 0.530, data: 0.000) G_L1: 14.083 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 11.090 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 5, iters: 26992, time: 0.528, data: 0.000) G_L1: 16.393 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 13.701 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 5, iters: 28992, time: 0.531, data: 0.000) G_L1: 14.112 G_L1_ABSOLUTE: 2.931 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 21.705 +(epoch: 5, iters: 30992, time: 0.531, data: 0.000) G_L1: 17.419 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 14.549 G_Regularizer: 0.000 validation_error: 21.454 +(epoch: 5, iters: 32992, time: 0.529, data: 0.000) G_L1: 17.585 G_L1_ABSOLUTE: 3.425 G_L1_RELATIVE: 14.160 G_Regularizer: 0.000 validation_error: 21.233 +(epoch: 5, iters: 34992, time: 0.530, data: 0.000) G_L1: 14.840 G_L1_ABSOLUTE: 2.803 G_L1_RELATIVE: 12.037 G_Regularizer: 0.000 validation_error: 21.131 +(epoch: 5, iters: 36992, time: 0.528, data: 0.000) G_L1: 14.474 G_L1_ABSOLUTE: 2.846 G_L1_RELATIVE: 11.628 G_Regularizer: 0.000 validation_error: 21.431 +(epoch: 5, iters: 38992, time: 0.531, data: 0.000) G_L1: 20.901 G_L1_ABSOLUTE: 3.795 G_L1_RELATIVE: 17.106 G_Regularizer: 0.000 validation_error: 21.382 +(epoch: 5, iters: 40992, time: 0.531, data: 0.000) G_L1: 13.466 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 10.982 G_Regularizer: 0.000 validation_error: 21.642 +(epoch: 5, iters: 42992, time: 0.533, data: 0.000) G_L1: 14.916 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 12.178 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 5, iters: 44992, time: 0.527, data: 0.000) G_L1: 13.906 G_L1_ABSOLUTE: 3.123 G_L1_RELATIVE: 10.784 G_Regularizer: 0.000 validation_error: 21.438 +(epoch: 5, iters: 46992, time: 0.532, data: 0.000) G_L1: 12.649 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 10.308 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 5, iters: 48992, time: 0.534, data: 0.000) G_L1: 15.013 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 12.377 G_Regularizer: 0.000 validation_error: 21.041 +(epoch: 5, iters: 50992, time: 0.532, data: 0.000) G_L1: 17.935 G_L1_ABSOLUTE: 3.055 G_L1_RELATIVE: 14.881 G_Regularizer: 0.000 validation_error: 21.096 +(epoch: 5, iters: 52992, time: 0.534, data: 0.000) G_L1: 18.116 G_L1_ABSOLUTE: 3.148 G_L1_RELATIVE: 14.968 G_Regularizer: 0.000 validation_error: 21.188 +(epoch: 5, iters: 54992, time: 0.537, data: 0.000) G_L1: 15.619 G_L1_ABSOLUTE: 3.478 G_L1_RELATIVE: 12.141 G_Regularizer: 0.000 validation_error: 21.112 +(epoch: 5, iters: 56992, time: 0.531, data: 0.000) G_L1: 16.092 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 13.371 G_Regularizer: 0.000 validation_error: 21.418 +(epoch: 5, iters: 58992, time: 0.520, data: 0.000) G_L1: 19.706 G_L1_ABSOLUTE: 3.103 G_L1_RELATIVE: 16.603 G_Regularizer: 0.000 validation_error: 22.348 +(epoch: 5, iters: 60992, time: 0.524, data: 0.000) G_L1: 14.858 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 11.945 G_Regularizer: 0.000 validation_error: 20.937 +(epoch: 5, iters: 62992, time: 0.525, data: 0.000) G_L1: 16.800 G_L1_ABSOLUTE: 3.368 G_L1_RELATIVE: 13.432 G_Regularizer: 0.000 validation_error: 21.463 +(epoch: 5, iters: 64992, time: 0.525, data: 0.000) G_L1: 15.168 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 12.483 G_Regularizer: 0.000 validation_error: 21.306 +(epoch: 5, iters: 66992, time: 0.521, data: 0.000) G_L1: 14.167 G_L1_ABSOLUTE: 3.052 G_L1_RELATIVE: 11.115 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 5, iters: 68992, time: 0.524, data: 0.000) G_L1: 16.224 G_L1_ABSOLUTE: 3.014 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 21.593 +(epoch: 5, iters: 70992, time: 0.525, data: 0.000) G_L1: 16.905 G_L1_ABSOLUTE: 2.849 G_L1_RELATIVE: 14.057 G_Regularizer: 0.000 validation_error: 21.296 +(epoch: 5, iters: 72992, time: 0.526, data: 0.000) G_L1: 15.040 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 12.472 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 5, iters: 74992, time: 0.526, data: 0.000) G_L1: 15.518 G_L1_ABSOLUTE: 2.873 G_L1_RELATIVE: 12.645 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 5, iters: 76992, time: 0.532, data: 0.000) G_L1: 15.827 G_L1_ABSOLUTE: 2.922 G_L1_RELATIVE: 12.904 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 5, iters: 78992, time: 0.530, data: 0.000) G_L1: 14.993 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 12.372 G_Regularizer: 0.000 validation_error: 21.610 +(epoch: 5, iters: 80992, time: 0.530, data: 0.000) G_L1: 15.138 G_L1_ABSOLUTE: 2.687 G_L1_RELATIVE: 12.451 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 5, iters: 82992, time: 0.532, data: 0.000) G_L1: 16.424 G_L1_ABSOLUTE: 3.520 G_L1_RELATIVE: 12.904 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 5, iters: 84992, time: 0.527, data: 0.000) G_L1: 13.955 G_L1_ABSOLUTE: 2.840 G_L1_RELATIVE: 11.115 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 5, iters: 86992, time: 0.534, data: 0.000) G_L1: 19.030 G_L1_ABSOLUTE: 3.118 G_L1_RELATIVE: 15.912 G_Regularizer: 0.000 validation_error: 21.587 +(epoch: 5, iters: 88992, time: 0.528, data: 0.000) G_L1: 14.833 G_L1_ABSOLUTE: 2.819 G_L1_RELATIVE: 12.014 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 5, iters: 90992, time: 0.532, data: 0.000) G_L1: 17.091 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 14.310 G_Regularizer: 0.000 validation_error: 20.106 +(epoch: 5, iters: 92992, time: 0.530, data: 0.000) G_L1: 17.745 G_L1_ABSOLUTE: 2.801 G_L1_RELATIVE: 14.944 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 5, iters: 94992, time: 0.531, data: 0.000) G_L1: 17.968 G_L1_ABSOLUTE: 3.460 G_L1_RELATIVE: 14.508 G_Regularizer: 0.000 validation_error: 21.294 +(epoch: 5, iters: 96992, time: 0.516, data: 0.000) G_L1: 18.130 G_L1_ABSOLUTE: 3.123 G_L1_RELATIVE: 15.007 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 5, iters: 98992, time: 0.520, data: 0.000) G_L1: 17.821 G_L1_ABSOLUTE: 2.968 G_L1_RELATIVE: 14.853 G_Regularizer: 0.000 validation_error: 21.699 +(epoch: 5, iters: 100992, time: 0.517, data: 0.000) G_L1: 15.793 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 13.306 G_Regularizer: 0.000 validation_error: 21.338 +(epoch: 5, iters: 102992, time: 0.516, data: 0.000) G_L1: 18.522 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 15.700 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 5, iters: 104992, time: 0.536, data: 0.000) G_L1: 18.343 G_L1_ABSOLUTE: 3.039 G_L1_RELATIVE: 15.304 G_Regularizer: 0.000 validation_error: 21.680 +(epoch: 5, iters: 106992, time: 0.525, data: 0.000) G_L1: 16.912 G_L1_ABSOLUTE: 2.909 G_L1_RELATIVE: 14.002 G_Regularizer: 0.000 validation_error: 21.150 +(epoch: 5, iters: 108992, time: 0.524, data: 0.000) G_L1: 19.879 G_L1_ABSOLUTE: 3.014 G_L1_RELATIVE: 16.865 G_Regularizer: 0.000 validation_error: 21.211 +(epoch: 5, iters: 110992, time: 0.521, data: 0.000) G_L1: 17.852 G_L1_ABSOLUTE: 3.022 G_L1_RELATIVE: 14.829 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 5, iters: 112992, time: 0.524, data: 0.000) G_L1: 15.363 G_L1_ABSOLUTE: 3.192 G_L1_RELATIVE: 12.171 G_Regularizer: 0.000 validation_error: 20.461 +(epoch: 5, iters: 114992, time: 0.521, data: 0.000) G_L1: 14.951 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 12.386 G_Regularizer: 0.000 validation_error: 20.140 +(epoch: 5, iters: 116992, time: 0.528, data: 0.000) G_L1: 15.852 G_L1_ABSOLUTE: 3.393 G_L1_RELATIVE: 12.459 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 5, iters: 118992, time: 0.526, data: 0.000) G_L1: 15.606 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 12.917 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 5, iters: 120992, time: 0.527, data: 0.000) G_L1: 17.720 G_L1_ABSOLUTE: 2.948 G_L1_RELATIVE: 14.772 G_Regularizer: 0.000 validation_error: 21.456 +(epoch: 5, iters: 122992, time: 0.537, data: 0.000) G_L1: 14.697 G_L1_ABSOLUTE: 2.932 G_L1_RELATIVE: 11.765 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 5, iters: 124992, time: 0.535, data: 0.000) G_L1: 13.932 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 5, iters: 126992, time: 0.522, data: 0.000) G_L1: 16.197 G_L1_ABSOLUTE: 3.104 G_L1_RELATIVE: 13.093 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 5, iters: 128992, time: 0.527, data: 0.000) G_L1: 14.652 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 11.826 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 5, iters: 130992, time: 0.527, data: 0.000) G_L1: 17.918 G_L1_ABSOLUTE: 3.116 G_L1_RELATIVE: 14.802 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 5, iters: 132992, time: 0.525, data: 0.000) G_L1: 17.076 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 14.167 G_Regularizer: 0.000 validation_error: 20.011 +(epoch: 5, iters: 134992, time: 0.529, data: 0.000) G_L1: 16.228 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 13.623 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 5, iters: 136992, time: 0.526, data: 0.000) G_L1: 15.628 G_L1_ABSOLUTE: 3.199 G_L1_RELATIVE: 12.429 G_Regularizer: 0.000 validation_error: 21.596 +(epoch: 5, iters: 138992, time: 0.528, data: 0.000) G_L1: 16.223 G_L1_ABSOLUTE: 3.144 G_L1_RELATIVE: 13.080 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 5, iters: 140992, time: 0.532, data: 0.000) G_L1: 19.719 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 17.058 G_Regularizer: 0.000 validation_error: 21.485 +(epoch: 5, iters: 142992, time: 0.532, data: 0.000) G_L1: 18.376 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 15.642 G_Regularizer: 0.000 validation_error: 21.511 +(epoch: 5, iters: 144992, time: 0.528, data: 0.000) G_L1: 16.820 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 14.171 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 5, iters: 146992, time: 0.528, data: 0.000) G_L1: 15.625 G_L1_ABSOLUTE: 3.514 G_L1_RELATIVE: 12.111 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 5, iters: 148992, time: 0.534, data: 0.000) G_L1: 15.255 G_L1_ABSOLUTE: 2.739 G_L1_RELATIVE: 12.516 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 5, iters: 150992, time: 0.525, data: 0.000) G_L1: 15.172 G_L1_ABSOLUTE: 2.842 G_L1_RELATIVE: 12.329 G_Regularizer: 0.000 validation_error: 20.742 +(epoch: 5, iters: 152992, time: 0.525, data: 0.000) G_L1: 17.636 G_L1_ABSOLUTE: 3.128 G_L1_RELATIVE: 14.509 G_Regularizer: 0.000 validation_error: 20.232 +(epoch: 5, iters: 154992, time: 0.529, data: 0.000) G_L1: 19.175 G_L1_ABSOLUTE: 3.514 G_L1_RELATIVE: 15.661 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 5, iters: 156992, time: 0.529, data: 0.000) G_L1: 16.192 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 13.521 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 5, iters: 158992, time: 0.533, data: 0.000) G_L1: 16.512 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 14.096 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 5, iters: 160992, time: 0.538, data: 0.000) G_L1: 17.621 G_L1_ABSOLUTE: 2.973 G_L1_RELATIVE: 14.649 G_Regularizer: 0.000 validation_error: 20.191 +(epoch: 5, iters: 162992, time: 0.527, data: 0.000) G_L1: 18.010 G_L1_ABSOLUTE: 3.079 G_L1_RELATIVE: 14.931 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 5, iters: 164992, time: 0.532, data: 0.000) G_L1: 12.928 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 10.315 G_Regularizer: 0.000 validation_error: 21.217 +(epoch: 5, iters: 166992, time: 0.527, data: 0.000) G_L1: 15.709 G_L1_ABSOLUTE: 3.197 G_L1_RELATIVE: 12.512 G_Regularizer: 0.000 validation_error: 21.032 +(epoch: 5, iters: 168992, time: 0.530, data: 0.000) G_L1: 14.896 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 12.200 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 5, iters: 170992, time: 0.531, data: 0.000) G_L1: 17.794 G_L1_ABSOLUTE: 2.842 G_L1_RELATIVE: 14.951 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 5, iters: 172992, time: 0.535, data: 0.000) G_L1: 16.485 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 13.700 G_Regularizer: 0.000 validation_error: 21.376 +(epoch: 5, iters: 174992, time: 0.530, data: 0.000) G_L1: 16.218 G_L1_ABSOLUTE: 3.070 G_L1_RELATIVE: 13.148 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 5, iters: 176992, time: 0.528, data: 0.000) G_L1: 17.142 G_L1_ABSOLUTE: 2.713 G_L1_RELATIVE: 14.428 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 5, iters: 178992, time: 0.526, data: 0.000) G_L1: 14.390 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 11.513 G_Regularizer: 0.000 validation_error: 20.437 +(epoch: 5, iters: 180992, time: 0.528, data: 0.000) G_L1: 17.505 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 14.864 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 5, iters: 182992, time: 0.530, data: 0.000) G_L1: 16.481 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 13.749 G_Regularizer: 0.000 validation_error: 21.315 +(epoch: 5, iters: 184992, time: 0.530, data: 0.000) G_L1: 15.490 G_L1_ABSOLUTE: 3.180 G_L1_RELATIVE: 12.310 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 5, iters: 186992, time: 0.523, data: 0.000) G_L1: 19.083 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 16.414 G_Regularizer: 0.000 validation_error: 21.482 +(epoch: 5, iters: 188992, time: 0.522, data: 0.000) G_L1: 12.842 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 10.120 G_Regularizer: 0.000 validation_error: 21.589 +(epoch: 5, iters: 190992, time: 0.523, data: 0.000) G_L1: 17.215 G_L1_ABSOLUTE: 3.183 G_L1_RELATIVE: 14.031 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 5, iters: 192992, time: 0.532, data: 0.000) G_L1: 17.388 G_L1_ABSOLUTE: 3.215 G_L1_RELATIVE: 14.173 G_Regularizer: 0.000 validation_error: 20.514 +(epoch: 5, iters: 194992, time: 0.523, data: 0.000) G_L1: 15.192 G_L1_ABSOLUTE: 3.067 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 21.642 +(epoch: 5, iters: 196992, time: 0.524, data: 0.000) G_L1: 13.670 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 11.081 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 5, iters: 198992, time: 0.531, data: 0.000) G_L1: 13.167 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 10.378 G_Regularizer: 0.000 validation_error: 20.930 +(epoch: 5, iters: 200992, time: 0.549, data: 0.000) G_L1: 17.505 G_L1_ABSOLUTE: 3.022 G_L1_RELATIVE: 14.482 G_Regularizer: 0.000 validation_error: 21.435 +(epoch: 5, iters: 202992, time: 0.543, data: 0.000) G_L1: 16.052 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 13.207 G_Regularizer: 0.000 validation_error: 19.793 +(epoch: 5, iters: 204992, time: 0.549, data: 0.000) G_L1: 16.025 G_L1_ABSOLUTE: 3.137 G_L1_RELATIVE: 12.888 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 5, iters: 206992, time: 0.545, data: 0.001) G_L1: 15.921 G_L1_ABSOLUTE: 3.198 G_L1_RELATIVE: 12.723 G_Regularizer: 0.000 validation_error: 21.422 +(epoch: 5, iters: 208992, time: 0.550, data: 0.000) G_L1: 17.209 G_L1_ABSOLUTE: 3.196 G_L1_RELATIVE: 14.013 G_Regularizer: 0.000 validation_error: 20.998 +(epoch: 5, iters: 210992, time: 0.544, data: 0.000) G_L1: 15.802 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 13.077 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 5, iters: 212992, time: 0.551, data: 0.000) G_L1: 20.727 G_L1_ABSOLUTE: 3.232 G_L1_RELATIVE: 17.495 G_Regularizer: 0.000 validation_error: 21.001 +(epoch: 5, iters: 214992, time: 0.546, data: 0.000) G_L1: 15.088 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 12.297 G_Regularizer: 0.000 validation_error: 21.220 +(epoch: 5, iters: 216992, time: 0.542, data: 0.000) G_L1: 19.945 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 16.947 G_Regularizer: 0.000 validation_error: 21.522 +(epoch: 5, iters: 218992, time: 0.541, data: 0.000) G_L1: 16.690 G_L1_ABSOLUTE: 3.029 G_L1_RELATIVE: 13.661 G_Regularizer: 0.000 validation_error: 20.675 +(epoch: 5, iters: 220992, time: 0.542, data: 0.000) G_L1: 15.623 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 12.866 G_Regularizer: 0.000 validation_error: 20.291 +(epoch: 5, iters: 222992, time: 0.549, data: 0.000) G_L1: 14.722 G_L1_ABSOLUTE: 3.237 G_L1_RELATIVE: 11.485 G_Regularizer: 0.000 validation_error: 21.328 +(epoch: 5, iters: 224992, time: 0.543, data: 0.000) G_L1: 15.755 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 13.361 G_Regularizer: 0.000 validation_error: 21.384 +(epoch: 5, iters: 226992, time: 0.541, data: 0.000) G_L1: 15.782 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 12.694 G_Regularizer: 0.000 validation_error: 21.200 +(epoch: 5, iters: 228992, time: 0.542, data: 0.000) G_L1: 14.813 G_L1_ABSOLUTE: 2.869 G_L1_RELATIVE: 11.944 G_Regularizer: 0.000 validation_error: 21.240 +(epoch: 5, iters: 230992, time: 0.545, data: 0.000) G_L1: 17.611 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 14.816 G_Regularizer: 0.000 validation_error: 21.975 +(epoch: 5, iters: 232992, time: 0.549, data: 0.000) G_L1: 14.335 G_L1_ABSOLUTE: 2.866 G_L1_RELATIVE: 11.470 G_Regularizer: 0.000 validation_error: 21.296 +(epoch: 5, iters: 234992, time: 0.543, data: 0.000) G_L1: 16.163 G_L1_ABSOLUTE: 3.306 G_L1_RELATIVE: 12.857 G_Regularizer: 0.000 validation_error: 21.280 +(epoch: 5, iters: 236992, time: 0.544, data: 0.000) G_L1: 13.168 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 10.658 G_Regularizer: 0.000 validation_error: 21.811 +(epoch: 5, iters: 238992, time: 0.544, data: 0.000) G_L1: 15.478 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 12.570 G_Regularizer: 0.000 validation_error: 21.095 +(epoch: 5, iters: 240992, time: 0.537, data: 0.000) G_L1: 16.343 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 13.363 G_Regularizer: 0.000 validation_error: 21.650 +(epoch: 5, iters: 242992, time: 0.543, data: 0.000) G_L1: 17.232 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 14.557 G_Regularizer: 0.000 validation_error: 21.806 +(epoch: 5, iters: 244992, time: 0.541, data: 0.000) G_L1: 12.887 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 10.095 G_Regularizer: 0.000 validation_error: 21.543 +(epoch: 5, iters: 246992, time: 0.551, data: 0.000) G_L1: 18.945 G_L1_ABSOLUTE: 2.895 G_L1_RELATIVE: 16.051 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 5, iters: 248992, time: 0.543, data: 0.000) G_L1: 17.340 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 14.518 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 5, iters: 250992, time: 0.546, data: 0.000) G_L1: 13.879 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 11.272 G_Regularizer: 0.000 validation_error: 21.294 +(epoch: 5, iters: 252992, time: 0.548, data: 0.000) G_L1: 16.900 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 14.029 G_Regularizer: 0.000 validation_error: 21.443 +(epoch: 5, iters: 254992, time: 0.545, data: 0.001) G_L1: 15.761 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 12.856 G_Regularizer: 0.000 validation_error: 21.263 +(epoch: 5, iters: 256992, time: 0.547, data: 0.000) G_L1: 15.379 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 12.390 G_Regularizer: 0.000 validation_error: 21.476 +(epoch: 5, iters: 258992, time: 0.549, data: 0.000) G_L1: 18.406 G_L1_ABSOLUTE: 3.294 G_L1_RELATIVE: 15.112 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 5, iters: 260992, time: 0.554, data: 0.000) G_L1: 15.002 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 12.255 G_Regularizer: 0.000 validation_error: 21.839 +(epoch: 5, iters: 262992, time: 0.541, data: 0.000) G_L1: 18.592 G_L1_ABSOLUTE: 2.828 G_L1_RELATIVE: 15.764 G_Regularizer: 0.000 validation_error: 21.559 +(epoch: 5, iters: 264992, time: 0.548, data: 0.000) G_L1: 16.400 G_L1_ABSOLUTE: 2.888 G_L1_RELATIVE: 13.512 G_Regularizer: 0.000 validation_error: 21.228 +(epoch: 5, iters: 266992, time: 0.539, data: 0.000) G_L1: 16.922 G_L1_ABSOLUTE: 3.311 G_L1_RELATIVE: 13.611 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 5, iters: 268992, time: 0.547, data: 0.000) G_L1: 16.334 G_L1_ABSOLUTE: 3.304 G_L1_RELATIVE: 13.030 G_Regularizer: 0.000 validation_error: 21.511 +(epoch: 5, iters: 270992, time: 0.545, data: 0.000) G_L1: 16.556 G_L1_ABSOLUTE: 2.924 G_L1_RELATIVE: 13.632 G_Regularizer: 0.000 validation_error: 20.518 +(epoch: 5, iters: 272992, time: 0.555, data: 0.000) G_L1: 20.760 G_L1_ABSOLUTE: 3.002 G_L1_RELATIVE: 17.758 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 5, iters: 274992, time: 0.545, data: 0.000) G_L1: 17.560 G_L1_ABSOLUTE: 3.227 G_L1_RELATIVE: 14.333 G_Regularizer: 0.000 validation_error: 21.247 +(epoch: 5, iters: 276992, time: 0.540, data: 0.000) G_L1: 14.889 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 12.441 G_Regularizer: 0.000 validation_error: 21.005 +(epoch: 5, iters: 278992, time: 0.544, data: 0.000) G_L1: 13.757 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 10.947 G_Regularizer: 0.000 validation_error: 21.299 +(epoch: 5, iters: 280992, time: 0.533, data: 0.000) G_L1: 18.719 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 15.828 G_Regularizer: 0.000 validation_error: 20.717 +(epoch: 5, iters: 282992, time: 0.544, data: 0.000) G_L1: 19.028 G_L1_ABSOLUTE: 3.058 G_L1_RELATIVE: 15.970 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 5, iters: 284992, time: 0.540, data: 0.000) G_L1: 16.437 G_L1_ABSOLUTE: 3.398 G_L1_RELATIVE: 13.039 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 5, iters: 286992, time: 0.544, data: 0.000) G_L1: 16.851 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 14.196 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 5, iters: 288992, time: 0.544, data: 0.000) G_L1: 17.301 G_L1_ABSOLUTE: 2.647 G_L1_RELATIVE: 14.654 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 5, iters: 290992, time: 0.538, data: 0.000) G_L1: 18.381 G_L1_ABSOLUTE: 3.082 G_L1_RELATIVE: 15.299 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 5, iters: 292992, time: 0.539, data: 0.000) G_L1: 19.436 G_L1_ABSOLUTE: 3.374 G_L1_RELATIVE: 16.062 G_Regularizer: 0.000 validation_error: 21.706 +(epoch: 5, iters: 294992, time: 0.548, data: 0.000) G_L1: 15.186 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 12.294 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 5, iters: 296992, time: 0.535, data: 0.000) G_L1: 16.207 G_L1_ABSOLUTE: 2.579 G_L1_RELATIVE: 13.628 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 5, iters: 298992, time: 0.543, data: 0.000) G_L1: 15.561 G_L1_ABSOLUTE: 3.015 G_L1_RELATIVE: 12.546 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 5, iters: 300992, time: 0.545, data: 0.000) G_L1: 17.729 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 14.906 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 6, iters: 240, time: 0.542, data: 0.000) G_L1: 16.193 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 13.507 G_Regularizer: 0.000 validation_error: 21.195 +(epoch: 6, iters: 2240, time: 0.549, data: 0.000) G_L1: 15.056 G_L1_ABSOLUTE: 2.957 G_L1_RELATIVE: 12.099 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 6, iters: 4240, time: 0.548, data: 0.000) G_L1: 17.886 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 15.023 G_Regularizer: 0.000 validation_error: 21.398 +(epoch: 6, iters: 6240, time: 0.543, data: 0.000) G_L1: 18.168 G_L1_ABSOLUTE: 2.751 G_L1_RELATIVE: 15.416 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 6, iters: 8240, time: 0.547, data: 0.000) G_L1: 15.935 G_L1_ABSOLUTE: 3.023 G_L1_RELATIVE: 12.912 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 6, iters: 10240, time: 0.540, data: 0.000) G_L1: 19.362 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 16.186 G_Regularizer: 0.000 validation_error: 22.686 +(epoch: 6, iters: 12240, time: 0.546, data: 0.000) G_L1: 14.796 G_L1_ABSOLUTE: 3.008 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 6, iters: 14240, time: 0.540, data: 0.001) G_L1: 16.099 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 13.284 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 6, iters: 16240, time: 0.543, data: 0.000) G_L1: 16.948 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 14.180 G_Regularizer: 0.000 validation_error: 21.257 +(epoch: 6, iters: 18240, time: 0.550, data: 0.000) G_L1: 16.655 G_L1_ABSOLUTE: 3.055 G_L1_RELATIVE: 13.601 G_Regularizer: 0.000 validation_error: 20.445 +(epoch: 6, iters: 20240, time: 0.542, data: 0.000) G_L1: 15.364 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 12.842 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 6, iters: 22240, time: 0.543, data: 0.000) G_L1: 17.821 G_L1_ABSOLUTE: 2.992 G_L1_RELATIVE: 14.829 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 6, iters: 24240, time: 0.543, data: 0.000) G_L1: 14.774 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 12.135 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 6, iters: 26240, time: 0.542, data: 0.000) G_L1: 21.330 G_L1_ABSOLUTE: 3.396 G_L1_RELATIVE: 17.934 G_Regularizer: 0.000 validation_error: 21.287 +(epoch: 6, iters: 28240, time: 0.539, data: 0.000) G_L1: 17.168 G_L1_ABSOLUTE: 3.573 G_L1_RELATIVE: 13.594 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 6, iters: 30240, time: 0.534, data: 0.000) G_L1: 13.083 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 10.276 G_Regularizer: 0.000 validation_error: 20.281 +(epoch: 6, iters: 32240, time: 0.546, data: 0.000) G_L1: 14.334 G_L1_ABSOLUTE: 2.988 G_L1_RELATIVE: 11.347 G_Regularizer: 0.000 validation_error: 21.197 +(epoch: 6, iters: 34240, time: 0.545, data: 0.000) G_L1: 15.807 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 12.913 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 6, iters: 36240, time: 0.540, data: 0.000) G_L1: 16.104 G_L1_ABSOLUTE: 3.217 G_L1_RELATIVE: 12.887 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 6, iters: 38240, time: 0.540, data: 0.000) G_L1: 13.744 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 11.011 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 6, iters: 40240, time: 0.546, data: 0.000) G_L1: 16.080 G_L1_ABSOLUTE: 3.224 G_L1_RELATIVE: 12.856 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 6, iters: 42240, time: 0.544, data: 0.000) G_L1: 13.914 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 11.665 G_Regularizer: 0.000 validation_error: 20.292 +(epoch: 6, iters: 44240, time: 0.546, data: 0.000) G_L1: 17.629 G_L1_ABSOLUTE: 2.979 G_L1_RELATIVE: 14.650 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 6, iters: 46240, time: 0.547, data: 0.000) G_L1: 17.605 G_L1_ABSOLUTE: 2.720 G_L1_RELATIVE: 14.884 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 6, iters: 48240, time: 0.546, data: 0.000) G_L1: 15.357 G_L1_ABSOLUTE: 3.188 G_L1_RELATIVE: 12.169 G_Regularizer: 0.000 validation_error: 21.207 +(epoch: 6, iters: 50240, time: 0.549, data: 0.000) G_L1: 16.898 G_L1_ABSOLUTE: 3.118 G_L1_RELATIVE: 13.780 G_Regularizer: 0.000 validation_error: 21.300 +(epoch: 6, iters: 52240, time: 0.541, data: 0.000) G_L1: 19.373 G_L1_ABSOLUTE: 3.246 G_L1_RELATIVE: 16.127 G_Regularizer: 0.000 validation_error: 20.442 +(epoch: 6, iters: 54240, time: 0.547, data: 0.000) G_L1: 18.230 G_L1_ABSOLUTE: 2.957 G_L1_RELATIVE: 15.272 G_Regularizer: 0.000 validation_error: 21.316 +(epoch: 6, iters: 56240, time: 0.543, data: 0.000) G_L1: 14.731 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 12.177 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 6, iters: 58240, time: 0.549, data: 0.000) G_L1: 17.763 G_L1_ABSOLUTE: 3.005 G_L1_RELATIVE: 14.758 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 6, iters: 60240, time: 0.539, data: 0.000) G_L1: 14.467 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 11.561 G_Regularizer: 0.000 validation_error: 21.407 +(epoch: 6, iters: 62240, time: 0.547, data: 0.000) G_L1: 16.870 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 14.213 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 6, iters: 64240, time: 0.546, data: 0.000) G_L1: 18.308 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 15.531 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 6, iters: 66240, time: 0.541, data: 0.000) G_L1: 16.301 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 13.576 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 6, iters: 68240, time: 0.547, data: 0.000) G_L1: 17.460 G_L1_ABSOLUTE: 2.747 G_L1_RELATIVE: 14.712 G_Regularizer: 0.000 validation_error: 21.079 +(epoch: 6, iters: 70240, time: 0.549, data: 0.000) G_L1: 20.230 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 17.353 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 6, iters: 72240, time: 0.547, data: 0.000) G_L1: 18.004 G_L1_ABSOLUTE: 3.023 G_L1_RELATIVE: 14.981 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 6, iters: 74240, time: 0.548, data: 0.000) G_L1: 15.911 G_L1_ABSOLUTE: 2.925 G_L1_RELATIVE: 12.986 G_Regularizer: 0.000 validation_error: 20.854 +(epoch: 6, iters: 76240, time: 0.542, data: 0.000) G_L1: 16.619 G_L1_ABSOLUTE: 2.946 G_L1_RELATIVE: 13.673 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 6, iters: 78240, time: 0.546, data: 0.000) G_L1: 15.331 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 12.743 G_Regularizer: 0.000 validation_error: 21.296 +(epoch: 6, iters: 80240, time: 0.542, data: 0.000) G_L1: 14.614 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 12.046 G_Regularizer: 0.000 validation_error: 21.117 +(epoch: 6, iters: 82240, time: 0.547, data: 0.000) G_L1: 15.960 G_L1_ABSOLUTE: 2.543 G_L1_RELATIVE: 13.417 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 6, iters: 84240, time: 0.550, data: 0.000) G_L1: 15.596 G_L1_ABSOLUTE: 2.662 G_L1_RELATIVE: 12.934 G_Regularizer: 0.000 validation_error: 21.694 +(epoch: 6, iters: 86240, time: 0.542, data: 0.000) G_L1: 16.010 G_L1_ABSOLUTE: 2.647 G_L1_RELATIVE: 13.362 G_Regularizer: 0.000 validation_error: 21.115 +(epoch: 6, iters: 88240, time: 0.544, data: 0.000) G_L1: 18.031 G_L1_ABSOLUTE: 3.014 G_L1_RELATIVE: 15.016 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 6, iters: 90240, time: 0.546, data: 0.001) G_L1: 17.389 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 14.739 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 6, iters: 92240, time: 0.542, data: 0.000) G_L1: 14.932 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 12.240 G_Regularizer: 0.000 validation_error: 20.896 +(epoch: 6, iters: 94240, time: 0.548, data: 0.000) G_L1: 15.241 G_L1_ABSOLUTE: 3.093 G_L1_RELATIVE: 12.147 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 6, iters: 96240, time: 0.547, data: 0.000) G_L1: 18.588 G_L1_ABSOLUTE: 2.930 G_L1_RELATIVE: 15.658 G_Regularizer: 0.000 validation_error: 21.328 +(epoch: 6, iters: 98240, time: 0.544, data: 0.000) G_L1: 18.605 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 15.748 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 6, iters: 100240, time: 0.543, data: 0.000) G_L1: 15.938 G_L1_ABSOLUTE: 2.886 G_L1_RELATIVE: 13.053 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 6, iters: 102240, time: 0.542, data: 0.000) G_L1: 15.017 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 12.326 G_Regularizer: 0.000 validation_error: 21.923 +(epoch: 6, iters: 104240, time: 0.555, data: 0.000) G_L1: 17.065 G_L1_ABSOLUTE: 3.598 G_L1_RELATIVE: 13.467 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 6, iters: 106240, time: 0.542, data: 0.000) G_L1: 16.373 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 13.639 G_Regularizer: 0.000 validation_error: 20.512 +(epoch: 6, iters: 108240, time: 0.547, data: 0.000) G_L1: 17.522 G_L1_ABSOLUTE: 2.490 G_L1_RELATIVE: 15.031 G_Regularizer: 0.000 validation_error: 20.971 +(epoch: 6, iters: 110240, time: 0.548, data: 0.000) G_L1: 16.326 G_L1_ABSOLUTE: 3.191 G_L1_RELATIVE: 13.135 G_Regularizer: 0.000 validation_error: 21.113 +(epoch: 6, iters: 112240, time: 0.542, data: 0.000) G_L1: 15.629 G_L1_ABSOLUTE: 3.486 G_L1_RELATIVE: 12.143 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 6, iters: 114240, time: 0.544, data: 0.000) G_L1: 18.839 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 16.026 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 6, iters: 116240, time: 0.541, data: 0.000) G_L1: 13.580 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 11.155 G_Regularizer: 0.000 validation_error: 21.138 +(epoch: 6, iters: 118240, time: 0.546, data: 0.000) G_L1: 16.520 G_L1_ABSOLUTE: 2.534 G_L1_RELATIVE: 13.986 G_Regularizer: 0.000 validation_error: 21.329 +(epoch: 6, iters: 120240, time: 0.549, data: 0.000) G_L1: 16.249 G_L1_ABSOLUTE: 2.978 G_L1_RELATIVE: 13.270 G_Regularizer: 0.000 validation_error: 20.401 +(epoch: 6, iters: 122240, time: 0.544, data: 0.000) G_L1: 14.663 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 12.039 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 6, iters: 124240, time: 0.536, data: 0.000) G_L1: 15.925 G_L1_ABSOLUTE: 3.021 G_L1_RELATIVE: 12.903 G_Regularizer: 0.000 validation_error: 21.166 +(epoch: 6, iters: 126240, time: 0.539, data: 0.000) G_L1: 21.046 G_L1_ABSOLUTE: 2.758 G_L1_RELATIVE: 18.287 G_Regularizer: 0.000 validation_error: 21.553 +(epoch: 6, iters: 128240, time: 0.547, data: 0.000) G_L1: 16.882 G_L1_ABSOLUTE: 3.099 G_L1_RELATIVE: 13.783 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 6, iters: 130240, time: 0.555, data: 0.000) G_L1: 16.164 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 13.256 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 6, iters: 132240, time: 0.549, data: 0.000) G_L1: 14.700 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 12.128 G_Regularizer: 0.000 validation_error: 21.349 +(epoch: 6, iters: 134240, time: 0.541, data: 0.000) G_L1: 15.702 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 13.032 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 6, iters: 136240, time: 0.553, data: 0.000) G_L1: 21.830 G_L1_ABSOLUTE: 3.493 G_L1_RELATIVE: 18.337 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 6, iters: 138240, time: 0.549, data: 0.000) G_L1: 16.293 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 13.205 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 6, iters: 140240, time: 0.542, data: 0.000) G_L1: 13.667 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 11.381 G_Regularizer: 0.000 validation_error: 21.096 +(epoch: 6, iters: 142240, time: 0.552, data: 0.000) G_L1: 21.931 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 19.161 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 6, iters: 144240, time: 0.543, data: 0.000) G_L1: 16.510 G_L1_ABSOLUTE: 2.922 G_L1_RELATIVE: 13.588 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 6, iters: 146240, time: 0.550, data: 0.000) G_L1: 15.841 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 13.241 G_Regularizer: 0.000 validation_error: 21.473 +(epoch: 6, iters: 148240, time: 0.541, data: 0.000) G_L1: 16.553 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 13.816 G_Regularizer: 0.000 validation_error: 21.066 +(epoch: 6, iters: 150240, time: 0.542, data: 0.000) G_L1: 15.483 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 12.562 G_Regularizer: 0.000 validation_error: 22.378 +(epoch: 6, iters: 152240, time: 0.547, data: 0.000) G_L1: 16.069 G_L1_ABSOLUTE: 3.265 G_L1_RELATIVE: 12.803 G_Regularizer: 0.000 validation_error: 21.070 +(epoch: 6, iters: 154240, time: 0.539, data: 0.000) G_L1: 16.541 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 14.169 G_Regularizer: 0.000 validation_error: 21.590 +(epoch: 6, iters: 156240, time: 0.548, data: 0.000) G_L1: 17.000 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 14.212 G_Regularizer: 0.000 validation_error: 21.530 +(epoch: 6, iters: 158240, time: 0.535, data: 0.000) G_L1: 13.460 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 11.087 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 6, iters: 160240, time: 0.543, data: 0.001) G_L1: 14.255 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 11.808 G_Regularizer: 0.000 validation_error: 21.350 +(epoch: 6, iters: 162240, time: 0.545, data: 0.000) G_L1: 17.160 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 14.436 G_Regularizer: 0.000 validation_error: 21.780 +(epoch: 6, iters: 164240, time: 0.540, data: 0.000) G_L1: 17.760 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 15.114 G_Regularizer: 0.000 validation_error: 20.557 +(epoch: 6, iters: 166240, time: 0.542, data: 0.000) G_L1: 15.203 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 12.476 G_Regularizer: 0.000 validation_error: 20.299 +(epoch: 6, iters: 168240, time: 0.542, data: 0.000) G_L1: 17.053 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 14.010 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 6, iters: 170240, time: 0.548, data: 0.000) G_L1: 17.217 G_L1_ABSOLUTE: 2.785 G_L1_RELATIVE: 14.432 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 6, iters: 172240, time: 0.537, data: 0.000) G_L1: 25.567 G_L1_ABSOLUTE: 2.867 G_L1_RELATIVE: 22.699 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 6, iters: 174240, time: 0.547, data: 0.000) G_L1: 14.403 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 11.817 G_Regularizer: 0.000 validation_error: 20.336 +(epoch: 6, iters: 176240, time: 0.549, data: 0.000) G_L1: 15.679 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 12.766 G_Regularizer: 0.000 validation_error: 20.993 +(epoch: 6, iters: 178240, time: 0.551, data: 0.000) G_L1: 15.552 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 12.698 G_Regularizer: 0.000 validation_error: 21.157 +(epoch: 6, iters: 180240, time: 0.550, data: 0.000) G_L1: 17.262 G_L1_ABSOLUTE: 3.042 G_L1_RELATIVE: 14.220 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 6, iters: 182240, time: 0.546, data: 0.000) G_L1: 16.092 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 13.478 G_Regularizer: 0.000 validation_error: 21.095 +(epoch: 6, iters: 184240, time: 0.553, data: 0.000) G_L1: 16.253 G_L1_ABSOLUTE: 3.051 G_L1_RELATIVE: 13.202 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 6, iters: 186240, time: 0.548, data: 0.000) G_L1: 17.756 G_L1_ABSOLUTE: 2.643 G_L1_RELATIVE: 15.113 G_Regularizer: 0.000 validation_error: 21.175 +(epoch: 6, iters: 188240, time: 0.536, data: 0.000) G_L1: 15.022 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 12.509 G_Regularizer: 0.000 validation_error: 21.060 +(epoch: 6, iters: 190240, time: 0.544, data: 0.000) G_L1: 15.218 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 12.655 G_Regularizer: 0.000 validation_error: 21.270 +(epoch: 6, iters: 192240, time: 0.546, data: 0.000) G_L1: 16.688 G_L1_ABSOLUTE: 2.937 G_L1_RELATIVE: 13.751 G_Regularizer: 0.000 validation_error: 20.366 +(epoch: 6, iters: 194240, time: 0.546, data: 0.000) G_L1: 16.584 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 13.886 G_Regularizer: 0.000 validation_error: 21.354 +(epoch: 6, iters: 196240, time: 0.545, data: 0.000) G_L1: 15.798 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 13.059 G_Regularizer: 0.000 validation_error: 21.148 +(epoch: 6, iters: 198240, time: 0.541, data: 0.000) G_L1: 16.513 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 13.994 G_Regularizer: 0.000 validation_error: 21.365 +(epoch: 6, iters: 200240, time: 0.548, data: 0.000) G_L1: 16.807 G_L1_ABSOLUTE: 3.509 G_L1_RELATIVE: 13.298 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 6, iters: 202240, time: 0.544, data: 0.000) G_L1: 16.552 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 14.004 G_Regularizer: 0.000 validation_error: 21.044 +(epoch: 6, iters: 204240, time: 0.544, data: 0.000) G_L1: 14.405 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 11.581 G_Regularizer: 0.000 validation_error: 21.549 +(epoch: 6, iters: 206240, time: 0.540, data: 0.000) G_L1: 16.689 G_L1_ABSOLUTE: 2.982 G_L1_RELATIVE: 13.707 G_Regularizer: 0.000 validation_error: 20.910 +(epoch: 6, iters: 208240, time: 0.544, data: 0.001) G_L1: 15.751 G_L1_ABSOLUTE: 2.970 G_L1_RELATIVE: 12.781 G_Regularizer: 0.000 validation_error: 20.422 +(epoch: 6, iters: 210240, time: 0.544, data: 0.000) G_L1: 13.601 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 11.160 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 6, iters: 212240, time: 0.549, data: 0.000) G_L1: 15.976 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 13.363 G_Regularizer: 0.000 validation_error: 20.545 +(epoch: 6, iters: 214240, time: 0.544, data: 0.000) G_L1: 15.391 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 12.683 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 6, iters: 216240, time: 0.546, data: 0.000) G_L1: 17.680 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 14.884 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 6, iters: 218240, time: 0.543, data: 0.000) G_L1: 11.809 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 9.031 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 6, iters: 220240, time: 0.544, data: 0.000) G_L1: 15.956 G_L1_ABSOLUTE: 3.297 G_L1_RELATIVE: 12.659 G_Regularizer: 0.000 validation_error: 21.823 +(epoch: 6, iters: 222240, time: 0.542, data: 0.000) G_L1: 16.555 G_L1_ABSOLUTE: 3.246 G_L1_RELATIVE: 13.308 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 6, iters: 224240, time: 0.548, data: 0.000) G_L1: 14.384 G_L1_ABSOLUTE: 2.859 G_L1_RELATIVE: 11.525 G_Regularizer: 0.000 validation_error: 21.103 +(epoch: 6, iters: 226240, time: 0.546, data: 0.000) G_L1: 14.149 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 11.456 G_Regularizer: 0.000 validation_error: 20.217 +(epoch: 6, iters: 228240, time: 0.542, data: 0.000) G_L1: 17.708 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 15.118 G_Regularizer: 0.000 validation_error: 20.540 +(epoch: 6, iters: 230240, time: 0.544, data: 0.000) G_L1: 19.234 G_L1_ABSOLUTE: 2.976 G_L1_RELATIVE: 16.258 G_Regularizer: 0.000 validation_error: 21.519 +(epoch: 6, iters: 232240, time: 0.542, data: 0.000) G_L1: 15.313 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 12.543 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 6, iters: 234240, time: 0.540, data: 0.000) G_L1: 16.593 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 13.832 G_Regularizer: 0.000 validation_error: 21.124 +(epoch: 6, iters: 236240, time: 0.546, data: 0.000) G_L1: 15.655 G_L1_ABSOLUTE: 3.120 G_L1_RELATIVE: 12.535 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 6, iters: 238240, time: 0.549, data: 0.000) G_L1: 23.141 G_L1_ABSOLUTE: 3.225 G_L1_RELATIVE: 19.915 G_Regularizer: 0.000 validation_error: 21.092 +(epoch: 6, iters: 240240, time: 0.546, data: 0.000) G_L1: 16.912 G_L1_ABSOLUTE: 2.991 G_L1_RELATIVE: 13.921 G_Regularizer: 0.000 validation_error: 20.274 +(epoch: 6, iters: 242240, time: 0.547, data: 0.000) G_L1: 15.885 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 13.186 G_Regularizer: 0.000 validation_error: 21.735 +(epoch: 6, iters: 244240, time: 0.548, data: 0.000) G_L1: 17.707 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 14.907 G_Regularizer: 0.000 validation_error: 20.557 +(epoch: 6, iters: 246240, time: 0.543, data: 0.000) G_L1: 19.591 G_L1_ABSOLUTE: 2.865 G_L1_RELATIVE: 16.726 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 6, iters: 248240, time: 0.552, data: 0.000) G_L1: 18.637 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 16.097 G_Regularizer: 0.000 validation_error: 21.345 +(epoch: 6, iters: 250240, time: 0.547, data: 0.000) G_L1: 16.012 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 13.235 G_Regularizer: 0.000 validation_error: 20.373 +(epoch: 6, iters: 252240, time: 0.545, data: 0.000) G_L1: 17.392 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 14.661 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 6, iters: 254240, time: 0.544, data: 0.000) G_L1: 15.459 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 12.648 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 6, iters: 256240, time: 0.548, data: 0.000) G_L1: 15.094 G_L1_ABSOLUTE: 2.984 G_L1_RELATIVE: 12.110 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 6, iters: 258240, time: 0.548, data: 0.000) G_L1: 16.904 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 14.378 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 6, iters: 260240, time: 0.544, data: 0.001) G_L1: 13.811 G_L1_ABSOLUTE: 2.348 G_L1_RELATIVE: 11.463 G_Regularizer: 0.000 validation_error: 20.306 +(epoch: 6, iters: 262240, time: 0.535, data: 0.000) G_L1: 12.540 G_L1_ABSOLUTE: 2.617 G_L1_RELATIVE: 9.923 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 6, iters: 264240, time: 0.546, data: 0.000) G_L1: 14.167 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 11.457 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 6, iters: 266240, time: 0.544, data: 0.000) G_L1: 18.652 G_L1_ABSOLUTE: 2.944 G_L1_RELATIVE: 15.707 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 6, iters: 268240, time: 0.540, data: 0.000) G_L1: 19.862 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 17.324 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 6, iters: 270240, time: 0.540, data: 0.000) G_L1: 13.911 G_L1_ABSOLUTE: 3.011 G_L1_RELATIVE: 10.901 G_Regularizer: 0.000 validation_error: 20.904 +(epoch: 6, iters: 272240, time: 0.543, data: 0.000) G_L1: 15.678 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 13.343 G_Regularizer: 0.000 validation_error: 21.303 +(epoch: 6, iters: 274240, time: 0.541, data: 0.000) G_L1: 15.646 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 13.046 G_Regularizer: 0.000 validation_error: 20.612 +(epoch: 6, iters: 276240, time: 0.547, data: 0.000) G_L1: 15.791 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 12.928 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 6, iters: 278240, time: 0.542, data: 0.000) G_L1: 16.174 G_L1_ABSOLUTE: 3.083 G_L1_RELATIVE: 13.091 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 6, iters: 280240, time: 0.549, data: 0.000) G_L1: 19.565 G_L1_ABSOLUTE: 3.290 G_L1_RELATIVE: 16.276 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 6, iters: 282240, time: 0.543, data: 0.000) G_L1: 12.985 G_L1_ABSOLUTE: 3.002 G_L1_RELATIVE: 9.982 G_Regularizer: 0.000 validation_error: 21.824 +(epoch: 6, iters: 284240, time: 0.542, data: 0.000) G_L1: 13.479 G_L1_ABSOLUTE: 2.392 G_L1_RELATIVE: 11.087 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 6, iters: 286240, time: 0.548, data: 0.000) G_L1: 14.722 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 12.026 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 6, iters: 288240, time: 0.544, data: 0.000) G_L1: 15.923 G_L1_ABSOLUTE: 2.603 G_L1_RELATIVE: 13.320 G_Regularizer: 0.000 validation_error: 20.170 +(epoch: 6, iters: 290240, time: 0.544, data: 0.000) G_L1: 14.956 G_L1_ABSOLUTE: 3.179 G_L1_RELATIVE: 11.777 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 6, iters: 292240, time: 0.546, data: 0.000) G_L1: 19.723 G_L1_ABSOLUTE: 3.184 G_L1_RELATIVE: 16.539 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 6, iters: 294240, time: 0.549, data: 0.000) G_L1: 16.031 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 13.151 G_Regularizer: 0.000 validation_error: 21.341 +(epoch: 6, iters: 296240, time: 0.542, data: 0.000) G_L1: 18.362 G_L1_ABSOLUTE: 3.022 G_L1_RELATIVE: 15.340 G_Regularizer: 0.000 validation_error: 21.096 +(epoch: 6, iters: 298240, time: 0.546, data: 0.000) G_L1: 20.405 G_L1_ABSOLUTE: 3.140 G_L1_RELATIVE: 17.264 G_Regularizer: 0.000 validation_error: 21.096 +(epoch: 6, iters: 300240, time: 0.544, data: 0.000) G_L1: 19.462 G_L1_ABSOLUTE: 3.233 G_L1_RELATIVE: 16.229 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 6, iters: 302240, time: 0.545, data: 0.000) G_L1: 18.690 G_L1_ABSOLUTE: 3.216 G_L1_RELATIVE: 15.474 G_Regularizer: 0.000 validation_error: 21.379 +(epoch: 7, iters: 1488, time: 0.539, data: 0.000) G_L1: 15.882 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 13.064 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 7, iters: 3488, time: 0.543, data: 0.000) G_L1: 16.438 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 13.440 G_Regularizer: 0.000 validation_error: 20.629 +(epoch: 7, iters: 5488, time: 0.549, data: 0.000) G_L1: 15.449 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 12.833 G_Regularizer: 0.000 validation_error: 20.333 +(epoch: 7, iters: 7488, time: 0.542, data: 0.000) G_L1: 16.196 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 13.400 G_Regularizer: 0.000 validation_error: 20.108 +(epoch: 7, iters: 9488, time: 0.540, data: 0.000) G_L1: 14.910 G_L1_ABSOLUTE: 2.872 G_L1_RELATIVE: 12.038 G_Regularizer: 0.000 validation_error: 20.521 +(epoch: 7, iters: 11488, time: 0.547, data: 0.000) G_L1: 17.306 G_L1_ABSOLUTE: 2.950 G_L1_RELATIVE: 14.356 G_Regularizer: 0.000 validation_error: 21.329 +(epoch: 7, iters: 13488, time: 0.540, data: 0.000) G_L1: 16.909 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 14.289 G_Regularizer: 0.000 validation_error: 21.121 +(epoch: 7, iters: 15488, time: 0.546, data: 0.000) G_L1: 15.133 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 12.525 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 7, iters: 17488, time: 0.546, data: 0.000) G_L1: 16.227 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 13.846 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 7, iters: 19488, time: 0.548, data: 0.000) G_L1: 13.446 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 11.031 G_Regularizer: 0.000 validation_error: 20.445 +(epoch: 7, iters: 21488, time: 0.546, data: 0.000) G_L1: 16.821 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 14.043 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 7, iters: 23488, time: 0.547, data: 0.000) G_L1: 16.599 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 14.136 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 7, iters: 25488, time: 0.547, data: 0.000) G_L1: 13.995 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 11.458 G_Regularizer: 0.000 validation_error: 20.340 +(epoch: 7, iters: 27488, time: 0.542, data: 0.000) G_L1: 18.691 G_L1_ABSOLUTE: 2.950 G_L1_RELATIVE: 15.741 G_Regularizer: 0.000 validation_error: 20.634 +(epoch: 7, iters: 29488, time: 0.543, data: 0.000) G_L1: 17.314 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 14.460 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 7, iters: 31488, time: 0.552, data: 0.000) G_L1: 15.181 G_L1_ABSOLUTE: 2.743 G_L1_RELATIVE: 12.438 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 7, iters: 33488, time: 0.543, data: 0.000) G_L1: 15.187 G_L1_ABSOLUTE: 2.926 G_L1_RELATIVE: 12.261 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 7, iters: 35488, time: 0.545, data: 0.001) G_L1: 14.766 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 11.885 G_Regularizer: 0.000 validation_error: 21.656 +(epoch: 7, iters: 37488, time: 0.547, data: 0.000) G_L1: 15.962 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 13.278 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 7, iters: 39488, time: 0.547, data: 0.000) G_L1: 18.470 G_L1_ABSOLUTE: 3.467 G_L1_RELATIVE: 15.002 G_Regularizer: 0.000 validation_error: 21.208 +(epoch: 7, iters: 41488, time: 0.545, data: 0.000) G_L1: 14.587 G_L1_ABSOLUTE: 3.014 G_L1_RELATIVE: 11.572 G_Regularizer: 0.000 validation_error: 21.597 +(epoch: 7, iters: 43488, time: 0.541, data: 0.000) G_L1: 18.191 G_L1_ABSOLUTE: 3.081 G_L1_RELATIVE: 15.110 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 7, iters: 45488, time: 0.550, data: 0.000) G_L1: 16.190 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 13.646 G_Regularizer: 0.000 validation_error: 21.036 +(epoch: 7, iters: 47488, time: 0.545, data: 0.000) G_L1: 19.757 G_L1_ABSOLUTE: 3.506 G_L1_RELATIVE: 16.251 G_Regularizer: 0.000 validation_error: 21.215 +(epoch: 7, iters: 49488, time: 0.550, data: 0.000) G_L1: 16.511 G_L1_ABSOLUTE: 2.932 G_L1_RELATIVE: 13.579 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 7, iters: 51488, time: 0.545, data: 0.000) G_L1: 16.738 G_L1_ABSOLUTE: 3.006 G_L1_RELATIVE: 13.733 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 7, iters: 53488, time: 0.545, data: 0.000) G_L1: 18.144 G_L1_ABSOLUTE: 3.038 G_L1_RELATIVE: 15.106 G_Regularizer: 0.000 validation_error: 21.288 +(epoch: 7, iters: 55488, time: 0.549, data: 0.000) G_L1: 15.120 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 12.625 G_Regularizer: 0.000 validation_error: 21.472 +(epoch: 7, iters: 57488, time: 0.552, data: 0.000) G_L1: 14.521 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 11.698 G_Regularizer: 0.000 validation_error: 20.431 +(epoch: 7, iters: 59488, time: 0.545, data: 0.000) G_L1: 16.929 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 14.115 G_Regularizer: 0.000 validation_error: 21.118 +(epoch: 7, iters: 61488, time: 0.559, data: 0.000) G_L1: 15.309 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 12.746 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 7, iters: 63488, time: 0.551, data: 0.000) G_L1: 17.289 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 14.841 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 7, iters: 65488, time: 0.554, data: 0.000) G_L1: 16.505 G_L1_ABSOLUTE: 2.874 G_L1_RELATIVE: 13.630 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 7, iters: 67488, time: 0.552, data: 0.000) G_L1: 13.462 G_L1_ABSOLUTE: 2.853 G_L1_RELATIVE: 10.609 G_Regularizer: 0.000 validation_error: 21.350 +(epoch: 7, iters: 69488, time: 0.550, data: 0.000) G_L1: 19.528 G_L1_ABSOLUTE: 3.199 G_L1_RELATIVE: 16.329 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 7, iters: 71488, time: 0.546, data: 0.000) G_L1: 17.029 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 14.281 G_Regularizer: 0.000 validation_error: 21.144 +(epoch: 7, iters: 73488, time: 0.546, data: 0.000) G_L1: 16.107 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 13.296 G_Regularizer: 0.000 validation_error: 21.319 +(epoch: 7, iters: 75488, time: 0.554, data: 0.000) G_L1: 15.360 G_L1_ABSOLUTE: 2.868 G_L1_RELATIVE: 12.492 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 7, iters: 77488, time: 0.549, data: 0.001) G_L1: 17.641 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 14.820 G_Regularizer: 0.000 validation_error: 21.300 +(epoch: 7, iters: 79488, time: 0.550, data: 0.000) G_L1: 16.862 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 14.099 G_Regularizer: 0.000 validation_error: 20.632 +(epoch: 7, iters: 81488, time: 0.549, data: 0.000) G_L1: 15.642 G_L1_ABSOLUTE: 2.794 G_L1_RELATIVE: 12.848 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 7, iters: 83488, time: 0.556, data: 0.001) G_L1: 16.978 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 14.315 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 7, iters: 85488, time: 0.549, data: 0.000) G_L1: 17.241 G_L1_ABSOLUTE: 2.966 G_L1_RELATIVE: 14.274 G_Regularizer: 0.000 validation_error: 20.216 +(epoch: 7, iters: 87488, time: 0.561, data: 0.000) G_L1: 17.939 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 15.125 G_Regularizer: 0.000 validation_error: 20.442 +(epoch: 7, iters: 89488, time: 0.543, data: 0.000) G_L1: 12.685 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 10.216 G_Regularizer: 0.000 validation_error: 21.291 +(epoch: 7, iters: 91488, time: 0.548, data: 0.000) G_L1: 15.271 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 12.669 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 7, iters: 93488, time: 0.545, data: 0.000) G_L1: 16.596 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 14.102 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 7, iters: 95488, time: 0.542, data: 0.000) G_L1: 15.808 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 13.331 G_Regularizer: 0.000 validation_error: 21.248 +(epoch: 7, iters: 97488, time: 0.543, data: 0.000) G_L1: 17.778 G_L1_ABSOLUTE: 3.207 G_L1_RELATIVE: 14.571 G_Regularizer: 0.000 validation_error: 21.453 +(epoch: 7, iters: 99488, time: 0.541, data: 0.000) G_L1: 17.883 G_L1_ABSOLUTE: 3.667 G_L1_RELATIVE: 14.216 G_Regularizer: 0.000 validation_error: 21.235 +(epoch: 7, iters: 101488, time: 0.546, data: 0.000) G_L1: 15.022 G_L1_ABSOLUTE: 2.801 G_L1_RELATIVE: 12.221 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 7, iters: 103488, time: 0.543, data: 0.000) G_L1: 16.312 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 13.567 G_Regularizer: 0.000 validation_error: 21.468 +(epoch: 7, iters: 105488, time: 0.545, data: 0.000) G_L1: 15.513 G_L1_ABSOLUTE: 3.156 G_L1_RELATIVE: 12.356 G_Regularizer: 0.000 validation_error: 20.670 +(epoch: 7, iters: 107488, time: 0.541, data: 0.000) G_L1: 16.795 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 13.984 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 7, iters: 109488, time: 0.540, data: 0.000) G_L1: 12.669 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 9.689 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 7, iters: 111488, time: 0.550, data: 0.000) G_L1: 22.348 G_L1_ABSOLUTE: 2.791 G_L1_RELATIVE: 19.557 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 7, iters: 113488, time: 0.542, data: 0.000) G_L1: 13.866 G_L1_ABSOLUTE: 3.062 G_L1_RELATIVE: 10.804 G_Regularizer: 0.000 validation_error: 20.384 +(epoch: 7, iters: 115488, time: 0.542, data: 0.000) G_L1: 15.793 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 13.257 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 7, iters: 117488, time: 0.543, data: 0.000) G_L1: 14.482 G_L1_ABSOLUTE: 2.719 G_L1_RELATIVE: 11.763 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 7, iters: 119488, time: 0.539, data: 0.000) G_L1: 14.201 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 11.578 G_Regularizer: 0.000 validation_error: 20.291 +(epoch: 7, iters: 121488, time: 0.540, data: 0.000) G_L1: 15.631 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 13.023 G_Regularizer: 0.000 validation_error: 21.198 +(epoch: 7, iters: 123488, time: 0.539, data: 0.000) G_L1: 15.074 G_L1_ABSOLUTE: 2.869 G_L1_RELATIVE: 12.205 G_Regularizer: 0.000 validation_error: 19.916 +(epoch: 7, iters: 125488, time: 0.547, data: 0.000) G_L1: 16.335 G_L1_ABSOLUTE: 3.257 G_L1_RELATIVE: 13.077 G_Regularizer: 0.000 validation_error: 21.910 +(epoch: 7, iters: 127488, time: 0.536, data: 0.000) G_L1: 16.518 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 14.253 G_Regularizer: 0.000 validation_error: 21.432 +(epoch: 7, iters: 129488, time: 0.541, data: 0.000) G_L1: 16.800 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 14.158 G_Regularizer: 0.000 validation_error: 20.313 +(epoch: 7, iters: 131488, time: 0.543, data: 0.000) G_L1: 14.252 G_L1_ABSOLUTE: 2.902 G_L1_RELATIVE: 11.350 G_Regularizer: 0.000 validation_error: 21.259 +(epoch: 7, iters: 133488, time: 0.540, data: 0.000) G_L1: 14.494 G_L1_ABSOLUTE: 2.831 G_L1_RELATIVE: 11.663 G_Regularizer: 0.000 validation_error: 21.392 +(epoch: 7, iters: 135488, time: 0.546, data: 0.000) G_L1: 15.983 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 13.283 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 7, iters: 137488, time: 0.543, data: 0.000) G_L1: 13.936 G_L1_ABSOLUTE: 2.297 G_L1_RELATIVE: 11.639 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 7, iters: 139488, time: 0.541, data: 0.000) G_L1: 16.582 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 14.199 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 7, iters: 141488, time: 0.547, data: 0.000) G_L1: 14.801 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 12.183 G_Regularizer: 0.000 validation_error: 20.370 +(epoch: 7, iters: 143488, time: 0.543, data: 0.000) G_L1: 13.359 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 10.722 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 7, iters: 145488, time: 0.540, data: 0.000) G_L1: 14.873 G_L1_ABSOLUTE: 2.772 G_L1_RELATIVE: 12.102 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 7, iters: 147488, time: 0.552, data: 0.000) G_L1: 15.189 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 12.414 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 7, iters: 149488, time: 0.541, data: 0.000) G_L1: 16.811 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 14.392 G_Regularizer: 0.000 validation_error: 21.655 +(epoch: 7, iters: 151488, time: 0.543, data: 0.000) G_L1: 13.832 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 11.147 G_Regularizer: 0.000 validation_error: 21.180 +(epoch: 7, iters: 153488, time: 0.543, data: 0.000) G_L1: 16.855 G_L1_ABSOLUTE: 3.334 G_L1_RELATIVE: 13.521 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 7, iters: 155488, time: 0.543, data: 0.000) G_L1: 13.690 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 11.339 G_Regularizer: 0.000 validation_error: 21.328 +(epoch: 7, iters: 157488, time: 0.545, data: 0.000) G_L1: 13.795 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 10.986 G_Regularizer: 0.000 validation_error: 20.917 +(epoch: 7, iters: 159488, time: 0.544, data: 0.000) G_L1: 12.846 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 10.214 G_Regularizer: 0.000 validation_error: 21.252 +(epoch: 7, iters: 161488, time: 0.538, data: 0.000) G_L1: 20.032 G_L1_ABSOLUTE: 3.413 G_L1_RELATIVE: 16.619 G_Regularizer: 0.000 validation_error: 21.123 +(epoch: 7, iters: 163488, time: 0.546, data: 0.000) G_L1: 13.905 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 11.444 G_Regularizer: 0.000 validation_error: 21.584 +(epoch: 7, iters: 165488, time: 0.540, data: 0.000) G_L1: 15.271 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 12.822 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 7, iters: 167488, time: 0.537, data: 0.000) G_L1: 17.307 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 14.793 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 7, iters: 169488, time: 0.535, data: 0.000) G_L1: 17.404 G_L1_ABSOLUTE: 3.677 G_L1_RELATIVE: 13.727 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 7, iters: 171488, time: 0.544, data: 0.000) G_L1: 14.203 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 11.446 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 7, iters: 173488, time: 0.538, data: 0.000) G_L1: 14.211 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 11.970 G_Regularizer: 0.000 validation_error: 21.401 +(epoch: 7, iters: 175488, time: 0.543, data: 0.000) G_L1: 15.308 G_L1_ABSOLUTE: 2.838 G_L1_RELATIVE: 12.471 G_Regularizer: 0.000 validation_error: 21.791 +(epoch: 7, iters: 177488, time: 0.548, data: 0.000) G_L1: 18.719 G_L1_ABSOLUTE: 3.481 G_L1_RELATIVE: 15.238 G_Regularizer: 0.000 validation_error: 21.239 +(epoch: 7, iters: 179488, time: 0.539, data: 0.000) G_L1: 16.404 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 13.637 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 7, iters: 181488, time: 0.557, data: 0.000) G_L1: 14.715 G_L1_ABSOLUTE: 2.865 G_L1_RELATIVE: 11.850 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 7, iters: 183488, time: 0.555, data: 0.000) G_L1: 13.604 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 11.151 G_Regularizer: 0.000 validation_error: 21.630 +(epoch: 7, iters: 185488, time: 0.539, data: 0.000) G_L1: 15.135 G_L1_ABSOLUTE: 2.842 G_L1_RELATIVE: 12.293 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 7, iters: 187488, time: 0.546, data: 0.000) G_L1: 16.448 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 13.512 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 7, iters: 189488, time: 0.542, data: 0.000) G_L1: 16.333 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 13.397 G_Regularizer: 0.000 validation_error: 20.362 +(epoch: 7, iters: 191488, time: 0.547, data: 0.000) G_L1: 16.083 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 13.725 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 7, iters: 193488, time: 0.547, data: 0.000) G_L1: 15.457 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 12.549 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 7, iters: 195488, time: 0.543, data: 0.000) G_L1: 15.379 G_L1_ABSOLUTE: 2.272 G_L1_RELATIVE: 13.108 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 7, iters: 197488, time: 0.543, data: 0.000) G_L1: 13.654 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 10.819 G_Regularizer: 0.000 validation_error: 21.407 +(epoch: 7, iters: 199488, time: 0.548, data: 0.000) G_L1: 17.973 G_L1_ABSOLUTE: 3.258 G_L1_RELATIVE: 14.716 G_Regularizer: 0.000 validation_error: 19.863 +(epoch: 7, iters: 201488, time: 0.540, data: 0.000) G_L1: 15.333 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 12.528 G_Regularizer: 0.000 validation_error: 21.430 +(epoch: 7, iters: 203488, time: 0.534, data: 0.000) G_L1: 15.083 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 12.447 G_Regularizer: 0.000 validation_error: 21.256 +(epoch: 7, iters: 205488, time: 0.534, data: 0.000) G_L1: 14.415 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 11.507 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 7, iters: 207488, time: 0.551, data: 0.000) G_L1: 15.934 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 13.216 G_Regularizer: 0.000 validation_error: 20.611 +(epoch: 7, iters: 209488, time: 0.536, data: 0.000) G_L1: 17.534 G_L1_ABSOLUTE: 2.711 G_L1_RELATIVE: 14.822 G_Regularizer: 0.000 validation_error: 19.974 +(epoch: 7, iters: 211488, time: 0.542, data: 0.000) G_L1: 15.730 G_L1_ABSOLUTE: 2.859 G_L1_RELATIVE: 12.872 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 7, iters: 213488, time: 0.952, data: 0.000) G_L1: 15.402 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 12.784 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 7, iters: 215488, time: 0.950, data: 0.000) G_L1: 17.083 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 14.754 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 7, iters: 217488, time: 0.952, data: 0.000) G_L1: 17.356 G_L1_ABSOLUTE: 3.062 G_L1_RELATIVE: 14.293 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 7, iters: 219488, time: 0.945, data: 0.000) G_L1: 18.794 G_L1_ABSOLUTE: 3.116 G_L1_RELATIVE: 15.678 G_Regularizer: 0.000 validation_error: 21.487 +(epoch: 7, iters: 221488, time: 0.946, data: 0.000) G_L1: 16.169 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 13.315 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 7, iters: 223488, time: 0.952, data: 0.000) G_L1: 16.591 G_L1_ABSOLUTE: 2.643 G_L1_RELATIVE: 13.949 G_Regularizer: 0.000 validation_error: 21.398 +(epoch: 7, iters: 225488, time: 0.950, data: 0.000) G_L1: 13.806 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 11.361 G_Regularizer: 0.000 validation_error: 21.101 +(epoch: 7, iters: 227488, time: 0.950, data: 0.000) G_L1: 15.240 G_L1_ABSOLUTE: 3.177 G_L1_RELATIVE: 12.062 G_Regularizer: 0.000 validation_error: 21.106 +(epoch: 7, iters: 229488, time: 0.959, data: 0.000) G_L1: 16.177 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 13.365 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 7, iters: 231488, time: 0.956, data: 0.000) G_L1: 21.500 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 18.707 G_Regularizer: 0.000 validation_error: 21.081 +(epoch: 7, iters: 233488, time: 0.960, data: 0.000) G_L1: 16.471 G_L1_ABSOLUTE: 3.149 G_L1_RELATIVE: 13.322 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 7, iters: 235488, time: 0.948, data: 0.000) G_L1: 14.604 G_L1_ABSOLUTE: 2.768 G_L1_RELATIVE: 11.835 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 7, iters: 237488, time: 0.962, data: 0.000) G_L1: 17.029 G_L1_ABSOLUTE: 3.118 G_L1_RELATIVE: 13.911 G_Regularizer: 0.000 validation_error: 20.459 +(epoch: 7, iters: 239488, time: 0.957, data: 0.000) G_L1: 14.547 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 11.905 G_Regularizer: 0.000 validation_error: 20.421 +(epoch: 7, iters: 241488, time: 0.966, data: 0.000) G_L1: 16.705 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 13.800 G_Regularizer: 0.000 validation_error: 21.885 +(epoch: 7, iters: 243488, time: 0.954, data: 0.000) G_L1: 14.955 G_L1_ABSOLUTE: 2.868 G_L1_RELATIVE: 12.088 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 7, iters: 245488, time: 0.964, data: 0.000) G_L1: 17.937 G_L1_ABSOLUTE: 3.135 G_L1_RELATIVE: 14.803 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 7, iters: 247488, time: 0.961, data: 0.000) G_L1: 14.924 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 12.354 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 7, iters: 249488, time: 0.961, data: 0.000) G_L1: 16.443 G_L1_ABSOLUTE: 3.196 G_L1_RELATIVE: 13.246 G_Regularizer: 0.000 validation_error: 20.158 +(epoch: 7, iters: 251488, time: 0.964, data: 0.004) G_L1: 13.191 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 10.580 G_Regularizer: 0.000 validation_error: 21.441 +(epoch: 7, iters: 253488, time: 0.967, data: 0.000) G_L1: 16.307 G_L1_ABSOLUTE: 3.398 G_L1_RELATIVE: 12.909 G_Regularizer: 0.000 validation_error: 19.433 +(epoch: 7, iters: 255488, time: 0.960, data: 0.000) G_L1: 13.085 G_L1_ABSOLUTE: 3.145 G_L1_RELATIVE: 9.940 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 7, iters: 257488, time: 0.964, data: 0.000) G_L1: 15.551 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 12.759 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 7, iters: 259488, time: 0.962, data: 0.000) G_L1: 13.729 G_L1_ABSOLUTE: 3.238 G_L1_RELATIVE: 10.491 G_Regularizer: 0.000 validation_error: 20.112 +(epoch: 7, iters: 261488, time: 0.961, data: 0.000) G_L1: 14.181 G_L1_ABSOLUTE: 2.983 G_L1_RELATIVE: 11.198 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 7, iters: 263488, time: 0.964, data: 0.000) G_L1: 16.138 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 13.459 G_Regularizer: 0.000 validation_error: 20.066 +(epoch: 7, iters: 265488, time: 0.963, data: 0.000) G_L1: 18.784 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 15.974 G_Regularizer: 0.000 validation_error: 21.068 +(epoch: 7, iters: 267488, time: 0.960, data: 0.000) G_L1: 14.018 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 11.220 G_Regularizer: 0.000 validation_error: 20.661 +(epoch: 7, iters: 269488, time: 0.962, data: 0.000) G_L1: 20.672 G_L1_ABSOLUTE: 3.066 G_L1_RELATIVE: 17.606 G_Regularizer: 0.000 validation_error: 21.336 +(epoch: 7, iters: 271488, time: 0.966, data: 0.000) G_L1: 16.259 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 13.370 G_Regularizer: 0.000 validation_error: 20.412 +(epoch: 7, iters: 273488, time: 0.957, data: 0.000) G_L1: 15.082 G_L1_ABSOLUTE: 3.035 G_L1_RELATIVE: 12.047 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 7, iters: 275488, time: 0.966, data: 0.000) G_L1: 14.281 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 11.689 G_Regularizer: 0.000 validation_error: 21.335 +(epoch: 7, iters: 277488, time: 0.968, data: 0.000) G_L1: 16.842 G_L1_ABSOLUTE: 3.163 G_L1_RELATIVE: 13.679 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 7, iters: 279488, time: 0.962, data: 0.000) G_L1: 17.019 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 14.615 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 7, iters: 281488, time: 0.969, data: 0.000) G_L1: 19.542 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 16.740 G_Regularizer: 0.000 validation_error: 20.332 +(epoch: 7, iters: 283488, time: 0.963, data: 0.000) G_L1: 13.776 G_L1_ABSOLUTE: 2.583 G_L1_RELATIVE: 11.193 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 7, iters: 285488, time: 0.967, data: 0.000) G_L1: 13.699 G_L1_ABSOLUTE: 2.873 G_L1_RELATIVE: 10.826 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 7, iters: 287488, time: 0.970, data: 0.000) G_L1: 14.708 G_L1_ABSOLUTE: 3.108 G_L1_RELATIVE: 11.600 G_Regularizer: 0.000 validation_error: 20.441 +(epoch: 7, iters: 289488, time: 0.969, data: 0.000) G_L1: 15.487 G_L1_ABSOLUTE: 3.355 G_L1_RELATIVE: 12.132 G_Regularizer: 0.000 validation_error: 21.359 +(epoch: 7, iters: 291488, time: 0.960, data: 0.000) G_L1: 16.891 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 14.379 G_Regularizer: 0.000 validation_error: 20.388 +(epoch: 7, iters: 293488, time: 0.960, data: 0.000) G_L1: 15.866 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 13.009 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 7, iters: 295488, time: 0.968, data: 0.000) G_L1: 14.271 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 11.807 G_Regularizer: 0.000 validation_error: 20.226 +(epoch: 7, iters: 297488, time: 0.969, data: 0.000) G_L1: 16.518 G_L1_ABSOLUTE: 3.038 G_L1_RELATIVE: 13.480 G_Regularizer: 0.000 validation_error: 20.260 +(epoch: 7, iters: 299488, time: 0.968, data: 0.000) G_L1: 18.973 G_L1_ABSOLUTE: 3.642 G_L1_RELATIVE: 15.332 G_Regularizer: 0.000 validation_error: 20.323 +(epoch: 7, iters: 301488, time: 0.965, data: 0.000) G_L1: 16.323 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 13.565 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 8, iters: 736, time: 0.967, data: 0.000) G_L1: 13.612 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 11.073 G_Regularizer: 0.000 validation_error: 20.894 +(epoch: 8, iters: 2736, time: 0.966, data: 0.000) G_L1: 14.812 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 12.029 G_Regularizer: 0.000 validation_error: 20.003 +(epoch: 8, iters: 4736, time: 0.962, data: 0.000) G_L1: 13.494 G_L1_ABSOLUTE: 2.641 G_L1_RELATIVE: 10.853 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 8, iters: 6736, time: 0.968, data: 0.000) G_L1: 13.571 G_L1_ABSOLUTE: 2.840 G_L1_RELATIVE: 10.731 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 8, iters: 8736, time: 0.963, data: 0.000) G_L1: 17.155 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 14.275 G_Regularizer: 0.000 validation_error: 19.969 +(epoch: 8, iters: 10736, time: 0.967, data: 0.000) G_L1: 17.185 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 14.659 G_Regularizer: 0.000 validation_error: 20.665 +(epoch: 8, iters: 12736, time: 0.963, data: 0.000) G_L1: 15.634 G_L1_ABSOLUTE: 3.076 G_L1_RELATIVE: 12.558 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 8, iters: 14736, time: 0.965, data: 0.000) G_L1: 17.600 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 14.744 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 8, iters: 16736, time: 0.964, data: 0.000) G_L1: 14.371 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 11.654 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 8, iters: 18736, time: 0.968, data: 0.000) G_L1: 15.998 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 13.551 G_Regularizer: 0.000 validation_error: 22.041 +(epoch: 8, iters: 20736, time: 0.967, data: 0.000) G_L1: 16.580 G_L1_ABSOLUTE: 3.075 G_L1_RELATIVE: 13.505 G_Regularizer: 0.000 validation_error: 20.944 +(epoch: 8, iters: 22736, time: 0.967, data: 0.000) G_L1: 15.155 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 12.550 G_Regularizer: 0.000 validation_error: 20.506 +(epoch: 8, iters: 24736, time: 0.966, data: 0.001) G_L1: 19.403 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 16.629 G_Regularizer: 0.000 validation_error: 20.771 +(epoch: 8, iters: 26736, time: 0.969, data: 0.000) G_L1: 12.731 G_L1_ABSOLUTE: 2.154 G_L1_RELATIVE: 10.578 G_Regularizer: 0.000 validation_error: 21.552 +(epoch: 8, iters: 28736, time: 0.965, data: 0.002) G_L1: 14.734 G_L1_ABSOLUTE: 3.341 G_L1_RELATIVE: 11.393 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 8, iters: 30736, time: 0.961, data: 0.000) G_L1: 15.440 G_L1_ABSOLUTE: 3.515 G_L1_RELATIVE: 11.925 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 8, iters: 32736, time: 0.965, data: 0.000) G_L1: 14.674 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 11.903 G_Regularizer: 0.000 validation_error: 21.198 +(epoch: 8, iters: 34736, time: 0.955, data: 0.000) G_L1: 16.497 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 13.809 G_Regularizer: 0.000 validation_error: 21.251 +(epoch: 8, iters: 36736, time: 0.957, data: 0.000) G_L1: 13.661 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 11.315 G_Regularizer: 0.000 validation_error: 20.202 +(epoch: 8, iters: 38736, time: 0.966, data: 0.000) G_L1: 17.127 G_L1_ABSOLUTE: 3.039 G_L1_RELATIVE: 14.088 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 8, iters: 40736, time: 0.960, data: 0.000) G_L1: 13.988 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 11.569 G_Regularizer: 0.000 validation_error: 20.324 +(epoch: 8, iters: 42736, time: 0.967, data: 0.000) G_L1: 14.757 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 12.184 G_Regularizer: 0.000 validation_error: 20.436 +(epoch: 8, iters: 44736, time: 0.962, data: 0.000) G_L1: 15.931 G_L1_ABSOLUTE: 3.102 G_L1_RELATIVE: 12.828 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 8, iters: 46736, time: 0.968, data: 0.000) G_L1: 15.296 G_L1_ABSOLUTE: 3.449 G_L1_RELATIVE: 11.846 G_Regularizer: 0.000 validation_error: 20.290 +(epoch: 8, iters: 48736, time: 0.962, data: 0.000) G_L1: 15.891 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 13.231 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 8, iters: 50736, time: 0.966, data: 0.000) G_L1: 16.759 G_L1_ABSOLUTE: 2.961 G_L1_RELATIVE: 13.798 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 8, iters: 52736, time: 0.964, data: 0.000) G_L1: 14.306 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 11.561 G_Regularizer: 0.000 validation_error: 20.297 +(epoch: 8, iters: 54736, time: 0.963, data: 0.000) G_L1: 16.300 G_L1_ABSOLUTE: 3.089 G_L1_RELATIVE: 13.211 G_Regularizer: 0.000 validation_error: 20.478 +(epoch: 8, iters: 56736, time: 0.969, data: 0.000) G_L1: 12.454 G_L1_ABSOLUTE: 2.156 G_L1_RELATIVE: 10.299 G_Regularizer: 0.000 validation_error: 20.183 +(epoch: 8, iters: 58736, time: 0.963, data: 0.000) G_L1: 18.585 G_L1_ABSOLUTE: 2.876 G_L1_RELATIVE: 15.710 G_Regularizer: 0.000 validation_error: 21.060 +(epoch: 8, iters: 60736, time: 0.970, data: 0.000) G_L1: 13.725 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 10.969 G_Regularizer: 0.000 validation_error: 21.520 +(epoch: 8, iters: 62736, time: 0.965, data: 0.000) G_L1: 17.687 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 14.683 G_Regularizer: 0.000 validation_error: 20.616 +(epoch: 8, iters: 64736, time: 0.964, data: 0.000) G_L1: 15.836 G_L1_ABSOLUTE: 3.160 G_L1_RELATIVE: 12.676 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 8, iters: 66736, time: 0.972, data: 0.000) G_L1: 15.817 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 13.297 G_Regularizer: 0.000 validation_error: 20.188 +(epoch: 8, iters: 68736, time: 0.966, data: 0.000) G_L1: 13.620 G_L1_ABSOLUTE: 2.895 G_L1_RELATIVE: 10.725 G_Regularizer: 0.000 validation_error: 20.218 +(epoch: 8, iters: 70736, time: 0.967, data: 0.000) G_L1: 16.255 G_L1_ABSOLUTE: 3.095 G_L1_RELATIVE: 13.160 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 8, iters: 72736, time: 0.966, data: 0.000) G_L1: 19.138 G_L1_ABSOLUTE: 2.639 G_L1_RELATIVE: 16.500 G_Regularizer: 0.000 validation_error: 20.409 +(epoch: 8, iters: 74736, time: 0.965, data: 0.000) G_L1: 15.723 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 13.448 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 8, iters: 76736, time: 0.965, data: 0.000) G_L1: 13.382 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 10.736 G_Regularizer: 0.000 validation_error: 21.307 +(epoch: 8, iters: 78736, time: 0.964, data: 0.000) G_L1: 14.600 G_L1_ABSOLUTE: 2.252 G_L1_RELATIVE: 12.348 G_Regularizer: 0.000 validation_error: 21.214 +(epoch: 8, iters: 80736, time: 0.965, data: 0.000) G_L1: 17.443 G_L1_ABSOLUTE: 3.210 G_L1_RELATIVE: 14.232 G_Regularizer: 0.000 validation_error: 20.518 +(epoch: 8, iters: 82736, time: 0.957, data: 0.000) G_L1: 17.354 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 14.491 G_Regularizer: 0.000 validation_error: 21.371 +(epoch: 8, iters: 84736, time: 0.972, data: 0.000) G_L1: 14.949 G_L1_ABSOLUTE: 3.037 G_L1_RELATIVE: 11.912 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 8, iters: 86736, time: 0.963, data: 0.000) G_L1: 15.421 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 12.898 G_Regularizer: 0.000 validation_error: 20.156 +(epoch: 8, iters: 88736, time: 0.966, data: 0.000) G_L1: 16.252 G_L1_ABSOLUTE: 2.960 G_L1_RELATIVE: 13.292 G_Regularizer: 0.000 validation_error: 20.066 +(epoch: 8, iters: 90736, time: 0.711, data: 0.000) G_L1: 15.504 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 12.989 G_Regularizer: 0.000 validation_error: 19.965 +(epoch: 8, iters: 92736, time: 0.557, data: 0.000) G_L1: 16.518 G_L1_ABSOLUTE: 2.833 G_L1_RELATIVE: 13.686 G_Regularizer: 0.000 validation_error: 19.995 +(epoch: 8, iters: 94736, time: 0.573, data: 0.000) G_L1: 18.142 G_L1_ABSOLUTE: 2.728 G_L1_RELATIVE: 15.414 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 8, iters: 96736, time: 0.542, data: 0.000) G_L1: 14.603 G_L1_ABSOLUTE: 2.965 G_L1_RELATIVE: 11.637 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 8, iters: 98736, time: 0.551, data: 0.000) G_L1: 17.956 G_L1_ABSOLUTE: 2.862 G_L1_RELATIVE: 15.095 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 8, iters: 100736, time: 0.543, data: 0.000) G_L1: 17.222 G_L1_ABSOLUTE: 2.848 G_L1_RELATIVE: 14.375 G_Regularizer: 0.000 validation_error: 21.441 +(epoch: 8, iters: 102736, time: 0.547, data: 0.000) G_L1: 15.981 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 13.409 G_Regularizer: 0.000 validation_error: 21.885 +(epoch: 8, iters: 104736, time: 0.542, data: 0.000) G_L1: 14.450 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 11.900 G_Regularizer: 0.000 validation_error: 20.391 +(epoch: 8, iters: 106736, time: 0.540, data: 0.000) G_L1: 13.132 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 10.591 G_Regularizer: 0.000 validation_error: 20.402 +(epoch: 8, iters: 108736, time: 0.544, data: 0.000) G_L1: 16.296 G_L1_ABSOLUTE: 3.268 G_L1_RELATIVE: 13.028 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 8, iters: 110736, time: 0.548, data: 0.000) G_L1: 17.367 G_L1_ABSOLUTE: 3.012 G_L1_RELATIVE: 14.355 G_Regularizer: 0.000 validation_error: 21.134 +(epoch: 8, iters: 112736, time: 0.550, data: 0.000) G_L1: 14.403 G_L1_ABSOLUTE: 2.986 G_L1_RELATIVE: 11.416 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 8, iters: 114736, time: 0.540, data: 0.000) G_L1: 15.408 G_L1_ABSOLUTE: 3.190 G_L1_RELATIVE: 12.218 G_Regularizer: 0.000 validation_error: 20.336 +(epoch: 8, iters: 116736, time: 0.551, data: 0.000) G_L1: 17.879 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 15.158 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 8, iters: 118736, time: 0.542, data: 0.000) G_L1: 15.972 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 13.382 G_Regularizer: 0.000 validation_error: 20.459 +(epoch: 8, iters: 120736, time: 0.548, data: 0.000) G_L1: 14.268 G_L1_ABSOLUTE: 2.862 G_L1_RELATIVE: 11.406 G_Regularizer: 0.000 validation_error: 20.406 +(epoch: 8, iters: 122736, time: 0.542, data: 0.000) G_L1: 14.050 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 11.521 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 8, iters: 124736, time: 0.547, data: 0.000) G_L1: 15.425 G_L1_ABSOLUTE: 2.988 G_L1_RELATIVE: 12.437 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 8, iters: 126736, time: 0.553, data: 0.000) G_L1: 16.021 G_L1_ABSOLUTE: 3.221 G_L1_RELATIVE: 12.800 G_Regularizer: 0.000 validation_error: 20.475 +(epoch: 8, iters: 128736, time: 0.549, data: 0.000) G_L1: 16.918 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 14.239 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 8, iters: 130736, time: 0.552, data: 0.000) G_L1: 16.067 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 13.075 G_Regularizer: 0.000 validation_error: 20.427 +(epoch: 8, iters: 132736, time: 0.547, data: 0.000) G_L1: 14.398 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 11.508 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 8, iters: 134736, time: 0.545, data: 0.000) G_L1: 16.672 G_L1_ABSOLUTE: 3.076 G_L1_RELATIVE: 13.596 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 8, iters: 136736, time: 0.546, data: 0.000) G_L1: 16.482 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 13.605 G_Regularizer: 0.000 validation_error: 20.708 +(epoch: 8, iters: 138736, time: 0.550, data: 0.000) G_L1: 17.175 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 14.604 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 8, iters: 140736, time: 0.546, data: 0.000) G_L1: 13.729 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 11.371 G_Regularizer: 0.000 validation_error: 21.048 +(epoch: 8, iters: 142736, time: 0.545, data: 0.000) G_L1: 18.831 G_L1_ABSOLUTE: 3.262 G_L1_RELATIVE: 15.570 G_Regularizer: 0.000 validation_error: 19.818 +(epoch: 8, iters: 144736, time: 0.544, data: 0.000) G_L1: 16.613 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 13.978 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 8, iters: 146736, time: 0.545, data: 0.000) G_L1: 15.680 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 13.235 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 8, iters: 148736, time: 0.549, data: 0.000) G_L1: 16.197 G_L1_ABSOLUTE: 3.263 G_L1_RELATIVE: 12.934 G_Regularizer: 0.000 validation_error: 20.628 +(epoch: 8, iters: 150736, time: 0.544, data: 0.000) G_L1: 19.209 G_L1_ABSOLUTE: 3.083 G_L1_RELATIVE: 16.126 G_Regularizer: 0.000 validation_error: 20.394 +(epoch: 8, iters: 152736, time: 0.536, data: 0.000) G_L1: 14.369 G_L1_ABSOLUTE: 2.768 G_L1_RELATIVE: 11.601 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 8, iters: 154736, time: 0.547, data: 0.000) G_L1: 15.404 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 12.741 G_Regularizer: 0.000 validation_error: 20.092 +(epoch: 8, iters: 156736, time: 0.551, data: 0.000) G_L1: 15.602 G_L1_ABSOLUTE: 3.300 G_L1_RELATIVE: 12.302 G_Regularizer: 0.000 validation_error: 20.547 +(epoch: 8, iters: 158736, time: 0.543, data: 0.000) G_L1: 16.330 G_L1_ABSOLUTE: 2.772 G_L1_RELATIVE: 13.559 G_Regularizer: 0.000 validation_error: 21.238 +(epoch: 8, iters: 160736, time: 0.545, data: 0.000) G_L1: 19.179 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 16.437 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 8, iters: 162736, time: 0.551, data: 0.000) G_L1: 15.590 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 13.191 G_Regularizer: 0.000 validation_error: 20.494 +(epoch: 8, iters: 164736, time: 0.542, data: 0.000) G_L1: 14.680 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 12.289 G_Regularizer: 0.000 validation_error: 21.076 +(epoch: 8, iters: 166736, time: 0.539, data: 0.000) G_L1: 18.449 G_L1_ABSOLUTE: 3.356 G_L1_RELATIVE: 15.092 G_Regularizer: 0.000 validation_error: 20.249 +(epoch: 8, iters: 168736, time: 0.543, data: 0.000) G_L1: 17.683 G_L1_ABSOLUTE: 3.361 G_L1_RELATIVE: 14.322 G_Regularizer: 0.000 validation_error: 20.285 +(epoch: 8, iters: 170736, time: 0.554, data: 0.000) G_L1: 13.039 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 10.485 G_Regularizer: 0.000 validation_error: 19.896 +(epoch: 8, iters: 172736, time: 0.555, data: 0.000) G_L1: 13.817 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 11.313 G_Regularizer: 0.000 validation_error: 21.357 +(epoch: 8, iters: 174736, time: 0.555, data: 0.001) G_L1: 12.515 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 10.103 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 8, iters: 176736, time: 0.545, data: 0.000) G_L1: 15.166 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 12.509 G_Regularizer: 0.000 validation_error: 21.538 +(epoch: 8, iters: 178736, time: 0.549, data: 0.000) G_L1: 18.090 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 15.395 G_Regularizer: 0.000 validation_error: 21.251 +(epoch: 8, iters: 180736, time: 0.544, data: 0.000) G_L1: 16.903 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 14.285 G_Regularizer: 0.000 validation_error: 21.493 +(epoch: 8, iters: 182736, time: 0.537, data: 0.000) G_L1: 13.108 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 10.696 G_Regularizer: 0.000 validation_error: 19.816 +(epoch: 8, iters: 184736, time: 0.539, data: 0.000) G_L1: 18.393 G_L1_ABSOLUTE: 3.445 G_L1_RELATIVE: 14.949 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 8, iters: 186736, time: 0.549, data: 0.000) G_L1: 17.608 G_L1_ABSOLUTE: 2.939 G_L1_RELATIVE: 14.669 G_Regularizer: 0.000 validation_error: 20.057 +(epoch: 8, iters: 188736, time: 0.549, data: 0.000) G_L1: 15.536 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 12.967 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 8, iters: 190736, time: 0.543, data: 0.000) G_L1: 15.097 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 12.843 G_Regularizer: 0.000 validation_error: 21.409 +(epoch: 8, iters: 192736, time: 0.543, data: 0.000) G_L1: 15.143 G_L1_ABSOLUTE: 2.852 G_L1_RELATIVE: 12.292 G_Regularizer: 0.000 validation_error: 20.420 +(epoch: 8, iters: 194736, time: 0.546, data: 0.000) G_L1: 15.580 G_L1_ABSOLUTE: 2.916 G_L1_RELATIVE: 12.664 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 8, iters: 196736, time: 0.546, data: 0.000) G_L1: 12.428 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 9.913 G_Regularizer: 0.000 validation_error: 21.258 +(epoch: 8, iters: 198736, time: 0.545, data: 0.000) G_L1: 13.673 G_L1_ABSOLUTE: 3.224 G_L1_RELATIVE: 10.450 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 8, iters: 200736, time: 0.553, data: 0.000) G_L1: 15.773 G_L1_ABSOLUTE: 4.077 G_L1_RELATIVE: 11.695 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 8, iters: 202736, time: 0.541, data: 0.000) G_L1: 15.781 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 13.218 G_Regularizer: 0.000 validation_error: 20.315 +(epoch: 8, iters: 204736, time: 0.543, data: 0.000) G_L1: 15.969 G_L1_ABSOLUTE: 2.423 G_L1_RELATIVE: 13.545 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 8, iters: 206736, time: 0.547, data: 0.000) G_L1: 14.095 G_L1_ABSOLUTE: 2.471 G_L1_RELATIVE: 11.624 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 8, iters: 208736, time: 0.553, data: 0.000) G_L1: 17.238 G_L1_ABSOLUTE: 3.357 G_L1_RELATIVE: 13.881 G_Regularizer: 0.000 validation_error: 20.240 +(epoch: 8, iters: 210736, time: 0.542, data: 0.000) G_L1: 13.716 G_L1_ABSOLUTE: 2.307 G_L1_RELATIVE: 11.408 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 8, iters: 212736, time: 0.544, data: 0.000) G_L1: 15.258 G_L1_ABSOLUTE: 2.969 G_L1_RELATIVE: 12.289 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 8, iters: 214736, time: 0.537, data: 0.000) G_L1: 15.738 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 13.193 G_Regularizer: 0.000 validation_error: 20.583 +(epoch: 8, iters: 216736, time: 0.546, data: 0.000) G_L1: 13.430 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 11.028 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 8, iters: 218736, time: 0.950, data: 0.000) G_L1: 18.152 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 15.502 G_Regularizer: 0.000 validation_error: 20.148 +(epoch: 8, iters: 220736, time: 0.955, data: 0.000) G_L1: 15.518 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 12.620 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 8, iters: 222736, time: 0.950, data: 0.000) G_L1: 10.679 G_L1_ABSOLUTE: 2.643 G_L1_RELATIVE: 8.035 G_Regularizer: 0.000 validation_error: 21.310 +(epoch: 8, iters: 224736, time: 0.955, data: 0.000) G_L1: 18.038 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 15.104 G_Regularizer: 0.000 validation_error: 21.303 +(epoch: 8, iters: 226736, time: 0.954, data: 0.000) G_L1: 17.917 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 14.966 G_Regularizer: 0.000 validation_error: 21.494 +(epoch: 8, iters: 228736, time: 0.953, data: 0.000) G_L1: 16.140 G_L1_ABSOLUTE: 2.838 G_L1_RELATIVE: 13.302 G_Regularizer: 0.000 validation_error: 20.266 +(epoch: 8, iters: 230736, time: 0.962, data: 0.000) G_L1: 14.914 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 12.236 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 8, iters: 232736, time: 0.965, data: 0.000) G_L1: 13.346 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 10.817 G_Regularizer: 0.000 validation_error: 21.059 +(epoch: 8, iters: 234736, time: 0.962, data: 0.000) G_L1: 17.994 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 15.359 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 8, iters: 236736, time: 0.962, data: 0.000) G_L1: 15.085 G_L1_ABSOLUTE: 3.021 G_L1_RELATIVE: 12.064 G_Regularizer: 0.000 validation_error: 19.728 +(epoch: 8, iters: 238736, time: 0.962, data: 0.000) G_L1: 15.655 G_L1_ABSOLUTE: 3.387 G_L1_RELATIVE: 12.268 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 8, iters: 240736, time: 0.964, data: 0.000) G_L1: 18.510 G_L1_ABSOLUTE: 3.149 G_L1_RELATIVE: 15.361 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 8, iters: 242736, time: 0.960, data: 0.000) G_L1: 13.298 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 10.779 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 8, iters: 244736, time: 0.958, data: 0.000) G_L1: 14.388 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 11.678 G_Regularizer: 0.000 validation_error: 20.631 +(epoch: 8, iters: 246736, time: 0.966, data: 0.000) G_L1: 14.198 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 11.488 G_Regularizer: 0.000 validation_error: 20.568 +(epoch: 8, iters: 248736, time: 0.965, data: 0.000) G_L1: 16.713 G_L1_ABSOLUTE: 3.385 G_L1_RELATIVE: 13.328 G_Regularizer: 0.000 validation_error: 20.020 +(epoch: 8, iters: 250736, time: 0.963, data: 0.000) G_L1: 14.918 G_L1_ABSOLUTE: 2.751 G_L1_RELATIVE: 12.167 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 8, iters: 252736, time: 0.962, data: 0.000) G_L1: 19.178 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 16.426 G_Regularizer: 0.000 validation_error: 20.505 +(epoch: 8, iters: 254736, time: 0.962, data: 0.000) G_L1: 12.395 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 9.835 G_Regularizer: 0.000 validation_error: 20.434 +(epoch: 8, iters: 256736, time: 0.964, data: 0.000) G_L1: 18.127 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 15.497 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 8, iters: 258736, time: 0.966, data: 0.000) G_L1: 13.785 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 11.148 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 8, iters: 260736, time: 0.960, data: 0.000) G_L1: 14.304 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 11.780 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 8, iters: 262736, time: 0.965, data: 0.000) G_L1: 14.036 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 11.344 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 8, iters: 264736, time: 0.965, data: 0.000) G_L1: 16.708 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 13.932 G_Regularizer: 0.000 validation_error: 20.542 +(epoch: 8, iters: 266736, time: 0.963, data: 0.000) G_L1: 15.014 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 12.426 G_Regularizer: 0.000 validation_error: 20.406 +(epoch: 8, iters: 268736, time: 0.968, data: 0.000) G_L1: 15.856 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 13.021 G_Regularizer: 0.000 validation_error: 21.142 +(epoch: 8, iters: 270736, time: 0.969, data: 0.000) G_L1: 17.382 G_L1_ABSOLUTE: 3.273 G_L1_RELATIVE: 14.110 G_Regularizer: 0.000 validation_error: 21.355 +(epoch: 8, iters: 272736, time: 0.963, data: 0.000) G_L1: 16.861 G_L1_ABSOLUTE: 3.114 G_L1_RELATIVE: 13.747 G_Regularizer: 0.000 validation_error: 20.128 +(epoch: 8, iters: 274736, time: 0.966, data: 0.000) G_L1: 15.006 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 12.360 G_Regularizer: 0.000 validation_error: 20.285 +(epoch: 8, iters: 276736, time: 0.958, data: 0.001) G_L1: 16.904 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 14.325 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 8, iters: 278736, time: 0.964, data: 0.000) G_L1: 18.011 G_L1_ABSOLUTE: 3.172 G_L1_RELATIVE: 14.839 G_Regularizer: 0.000 validation_error: 20.392 +(epoch: 8, iters: 280736, time: 0.970, data: 0.000) G_L1: 17.662 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 15.165 G_Regularizer: 0.000 validation_error: 20.215 +(epoch: 8, iters: 282736, time: 0.968, data: 0.000) G_L1: 14.436 G_L1_ABSOLUTE: 2.816 G_L1_RELATIVE: 11.621 G_Regularizer: 0.000 validation_error: 21.121 +(epoch: 8, iters: 284736, time: 0.971, data: 0.000) G_L1: 15.203 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 12.740 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 8, iters: 286736, time: 0.962, data: 0.000) G_L1: 16.904 G_L1_ABSOLUTE: 2.726 G_L1_RELATIVE: 14.178 G_Regularizer: 0.000 validation_error: 19.925 +(epoch: 8, iters: 288736, time: 0.964, data: 0.000) G_L1: 15.568 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 13.180 G_Regularizer: 0.000 validation_error: 20.339 +(epoch: 8, iters: 290736, time: 0.962, data: 0.000) G_L1: 17.917 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 15.082 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 8, iters: 292736, time: 0.973, data: 0.000) G_L1: 16.667 G_L1_ABSOLUTE: 3.062 G_L1_RELATIVE: 13.605 G_Regularizer: 0.000 validation_error: 20.607 +(epoch: 8, iters: 294736, time: 0.966, data: 0.000) G_L1: 18.075 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 15.304 G_Regularizer: 0.000 validation_error: 20.076 +(epoch: 8, iters: 296736, time: 0.968, data: 0.000) G_L1: 16.862 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 14.069 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 8, iters: 298736, time: 0.967, data: 0.000) G_L1: 17.603 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 14.895 G_Regularizer: 0.000 validation_error: 20.511 +(epoch: 8, iters: 300736, time: 0.967, data: 0.000) G_L1: 15.650 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 12.745 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 8, iters: 302736, time: 0.966, data: 0.000) G_L1: 15.968 G_L1_ABSOLUTE: 2.739 G_L1_RELATIVE: 13.229 G_Regularizer: 0.000 validation_error: 20.544 +(epoch: 9, iters: 1984, time: 0.960, data: 0.000) G_L1: 15.391 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 12.348 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 9, iters: 3984, time: 0.964, data: 0.000) G_L1: 15.321 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 12.854 G_Regularizer: 0.000 validation_error: 21.430 +(epoch: 9, iters: 5984, time: 0.969, data: 0.000) G_L1: 12.995 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 10.235 G_Regularizer: 0.000 validation_error: 20.382 +(epoch: 9, iters: 7984, time: 0.970, data: 0.000) G_L1: 17.092 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 14.458 G_Regularizer: 0.000 validation_error: 20.504 +(epoch: 9, iters: 9984, time: 0.970, data: 0.000) G_L1: 16.882 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 13.926 G_Regularizer: 0.000 validation_error: 20.484 +(epoch: 9, iters: 11984, time: 0.965, data: 0.000) G_L1: 22.417 G_L1_ABSOLUTE: 3.428 G_L1_RELATIVE: 18.989 G_Regularizer: 0.000 validation_error: 20.157 +(epoch: 9, iters: 13984, time: 0.967, data: 0.000) G_L1: 18.115 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 15.531 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 9, iters: 15984, time: 0.968, data: 0.000) G_L1: 17.238 G_L1_ABSOLUTE: 3.226 G_L1_RELATIVE: 14.013 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 9, iters: 17984, time: 0.966, data: 0.000) G_L1: 12.049 G_L1_ABSOLUTE: 2.225 G_L1_RELATIVE: 9.824 G_Regularizer: 0.000 validation_error: 20.956 +(epoch: 9, iters: 19984, time: 0.967, data: 0.000) G_L1: 14.266 G_L1_ABSOLUTE: 2.346 G_L1_RELATIVE: 11.920 G_Regularizer: 0.000 validation_error: 20.479 +(epoch: 9, iters: 21984, time: 0.969, data: 0.000) G_L1: 15.944 G_L1_ABSOLUTE: 3.077 G_L1_RELATIVE: 12.867 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 9, iters: 23984, time: 0.971, data: 0.000) G_L1: 19.269 G_L1_ABSOLUTE: 3.584 G_L1_RELATIVE: 15.684 G_Regularizer: 0.000 validation_error: 20.180 +(epoch: 9, iters: 25984, time: 0.968, data: 0.000) G_L1: 17.748 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 15.041 G_Regularizer: 0.000 validation_error: 21.080 +(epoch: 9, iters: 27984, time: 0.967, data: 0.000) G_L1: 16.678 G_L1_ABSOLUTE: 2.747 G_L1_RELATIVE: 13.931 G_Regularizer: 0.000 validation_error: 20.578 +(epoch: 9, iters: 29984, time: 0.958, data: 0.000) G_L1: 16.547 G_L1_ABSOLUTE: 3.203 G_L1_RELATIVE: 13.344 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 9, iters: 31984, time: 0.964, data: 0.000) G_L1: 13.849 G_L1_ABSOLUTE: 2.232 G_L1_RELATIVE: 11.617 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 9, iters: 33984, time: 0.968, data: 0.000) G_L1: 18.135 G_L1_ABSOLUTE: 3.049 G_L1_RELATIVE: 15.086 G_Regularizer: 0.000 validation_error: 20.022 +(epoch: 9, iters: 35984, time: 0.965, data: 0.000) G_L1: 12.726 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 10.320 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 9, iters: 37984, time: 0.964, data: 0.000) G_L1: 18.169 G_L1_ABSOLUTE: 3.230 G_L1_RELATIVE: 14.939 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 9, iters: 39984, time: 0.963, data: 0.000) G_L1: 17.010 G_L1_ABSOLUTE: 3.178 G_L1_RELATIVE: 13.832 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 9, iters: 41984, time: 0.969, data: 0.000) G_L1: 15.596 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 13.227 G_Regularizer: 0.000 validation_error: 19.750 +(epoch: 9, iters: 43984, time: 0.965, data: 0.000) G_L1: 17.123 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 14.542 G_Regularizer: 0.000 validation_error: 20.237 +(epoch: 9, iters: 45984, time: 0.965, data: 0.000) G_L1: 17.597 G_L1_ABSOLUTE: 3.007 G_L1_RELATIVE: 14.590 G_Regularizer: 0.000 validation_error: 20.175 +(epoch: 9, iters: 47984, time: 0.966, data: 0.000) G_L1: 14.714 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 11.943 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 9, iters: 49984, time: 0.963, data: 0.000) G_L1: 14.604 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 12.053 G_Regularizer: 0.000 validation_error: 21.180 +(epoch: 9, iters: 51984, time: 0.963, data: 0.000) G_L1: 14.205 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.536 G_Regularizer: 0.000 validation_error: 20.072 +(epoch: 9, iters: 53984, time: 0.965, data: 0.000) G_L1: 15.603 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 12.718 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 9, iters: 55984, time: 0.961, data: 0.000) G_L1: 18.314 G_L1_ABSOLUTE: 3.172 G_L1_RELATIVE: 15.142 G_Regularizer: 0.000 validation_error: 20.462 +(epoch: 9, iters: 57984, time: 0.965, data: 0.000) G_L1: 17.680 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 14.991 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 9, iters: 59984, time: 0.969, data: 0.000) G_L1: 15.235 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 12.552 G_Regularizer: 0.000 validation_error: 20.628 +(epoch: 9, iters: 61984, time: 0.967, data: 0.000) G_L1: 14.881 G_L1_ABSOLUTE: 2.555 G_L1_RELATIVE: 12.326 G_Regularizer: 0.000 validation_error: 20.616 +(epoch: 9, iters: 63984, time: 0.969, data: 0.000) G_L1: 14.579 G_L1_ABSOLUTE: 2.831 G_L1_RELATIVE: 11.748 G_Regularizer: 0.000 validation_error: 20.340 +(epoch: 9, iters: 65984, time: 0.969, data: 0.000) G_L1: 14.290 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 11.664 G_Regularizer: 0.000 validation_error: 20.917 +(epoch: 9, iters: 67984, time: 0.963, data: 0.000) G_L1: 15.652 G_L1_ABSOLUTE: 2.865 G_L1_RELATIVE: 12.787 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 9, iters: 69984, time: 0.962, data: 0.000) G_L1: 17.347 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 14.776 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 9, iters: 71984, time: 0.968, data: 0.000) G_L1: 17.116 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 14.389 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 9, iters: 73984, time: 0.967, data: 0.000) G_L1: 14.543 G_L1_ABSOLUTE: 3.130 G_L1_RELATIVE: 11.413 G_Regularizer: 0.000 validation_error: 20.029 +(epoch: 9, iters: 75984, time: 0.968, data: 0.000) G_L1: 17.660 G_L1_ABSOLUTE: 2.923 G_L1_RELATIVE: 14.737 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 9, iters: 77984, time: 0.966, data: 0.000) G_L1: 13.225 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 10.547 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 9, iters: 79984, time: 0.967, data: 0.000) G_L1: 15.474 G_L1_ABSOLUTE: 2.785 G_L1_RELATIVE: 12.689 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 9, iters: 81984, time: 0.964, data: 0.000) G_L1: 18.693 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 15.834 G_Regularizer: 0.000 validation_error: 20.523 +(epoch: 9, iters: 83984, time: 0.960, data: 0.000) G_L1: 17.049 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 14.408 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 9, iters: 85984, time: 0.959, data: 0.001) G_L1: 14.629 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 11.803 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 9, iters: 87984, time: 0.968, data: 0.001) G_L1: 16.568 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 14.128 G_Regularizer: 0.000 validation_error: 20.076 +(epoch: 9, iters: 89984, time: 0.969, data: 0.000) G_L1: 17.583 G_L1_ABSOLUTE: 2.981 G_L1_RELATIVE: 14.602 G_Regularizer: 0.000 validation_error: 21.361 +(epoch: 9, iters: 91984, time: 0.966, data: 0.000) G_L1: 17.062 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 14.321 G_Regularizer: 0.000 validation_error: 20.198 +(epoch: 9, iters: 93984, time: 0.965, data: 0.000) G_L1: 16.944 G_L1_ABSOLUTE: 2.690 G_L1_RELATIVE: 14.254 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 9, iters: 95984, time: 0.969, data: 0.000) G_L1: 15.196 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 12.622 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 9, iters: 97984, time: 0.968, data: 0.000) G_L1: 19.242 G_L1_ABSOLUTE: 3.369 G_L1_RELATIVE: 15.872 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 9, iters: 99984, time: 0.968, data: 0.000) G_L1: 17.298 G_L1_ABSOLUTE: 3.143 G_L1_RELATIVE: 14.155 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 9, iters: 101984, time: 0.972, data: 0.000) G_L1: 18.865 G_L1_ABSOLUTE: 3.097 G_L1_RELATIVE: 15.768 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 9, iters: 103984, time: 0.962, data: 0.000) G_L1: 15.893 G_L1_ABSOLUTE: 3.196 G_L1_RELATIVE: 12.697 G_Regularizer: 0.000 validation_error: 21.222 +(epoch: 9, iters: 105984, time: 0.969, data: 0.000) G_L1: 16.690 G_L1_ABSOLUTE: 2.795 G_L1_RELATIVE: 13.895 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 9, iters: 107984, time: 0.970, data: 0.000) G_L1: 15.500 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 12.794 G_Regularizer: 0.000 validation_error: 20.929 +(epoch: 9, iters: 109984, time: 0.965, data: 0.000) G_L1: 16.343 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 13.959 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 9, iters: 111984, time: 0.963, data: 0.000) G_L1: 15.450 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 12.658 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 9, iters: 113984, time: 0.966, data: 0.000) G_L1: 18.040 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 15.201 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 9, iters: 115984, time: 0.685, data: 0.000) G_L1: 17.283 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 14.413 G_Regularizer: 0.000 validation_error: 21.065 +(epoch: 9, iters: 117984, time: 0.546, data: 0.000) G_L1: 13.555 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 10.942 G_Regularizer: 0.000 validation_error: 20.506 +(epoch: 9, iters: 119984, time: 0.539, data: 0.000) G_L1: 16.151 G_L1_ABSOLUTE: 3.106 G_L1_RELATIVE: 13.045 G_Regularizer: 0.000 validation_error: 20.723 +(epoch: 9, iters: 121984, time: 0.553, data: 0.000) G_L1: 15.498 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 12.962 G_Regularizer: 0.000 validation_error: 20.471 +(epoch: 9, iters: 123984, time: 0.554, data: 0.000) G_L1: 15.203 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 12.763 G_Regularizer: 0.000 validation_error: 20.070 +(epoch: 9, iters: 125984, time: 0.555, data: 0.000) G_L1: 15.588 G_L1_ABSOLUTE: 2.986 G_L1_RELATIVE: 12.602 G_Regularizer: 0.000 validation_error: 20.765 +(epoch: 9, iters: 127984, time: 0.549, data: 0.000) G_L1: 15.423 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 12.819 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 9, iters: 129984, time: 0.553, data: 0.000) G_L1: 14.916 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 12.381 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 9, iters: 131984, time: 0.546, data: 0.000) G_L1: 16.137 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 13.593 G_Regularizer: 0.000 validation_error: 20.159 +(epoch: 9, iters: 133984, time: 0.545, data: 0.000) G_L1: 15.612 G_L1_ABSOLUTE: 2.543 G_L1_RELATIVE: 13.070 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 9, iters: 135984, time: 0.545, data: 0.000) G_L1: 14.613 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 12.174 G_Regularizer: 0.000 validation_error: 21.282 +(epoch: 9, iters: 137984, time: 0.548, data: 0.000) G_L1: 15.895 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 13.160 G_Regularizer: 0.000 validation_error: 21.194 +(epoch: 9, iters: 139984, time: 0.544, data: 0.000) G_L1: 13.625 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 11.038 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 9, iters: 141984, time: 0.549, data: 0.000) G_L1: 15.495 G_L1_ABSOLUTE: 2.868 G_L1_RELATIVE: 12.627 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 9, iters: 143984, time: 0.546, data: 0.000) G_L1: 13.926 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 11.560 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 9, iters: 145984, time: 0.548, data: 0.000) G_L1: 15.235 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 12.665 G_Regularizer: 0.000 validation_error: 20.537 +(epoch: 9, iters: 147984, time: 0.550, data: 0.000) G_L1: 14.352 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 11.937 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 9, iters: 149984, time: 0.544, data: 0.000) G_L1: 18.128 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 15.297 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 9, iters: 151984, time: 0.546, data: 0.000) G_L1: 16.792 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 14.253 G_Regularizer: 0.000 validation_error: 21.160 +(epoch: 9, iters: 153984, time: 0.553, data: 0.000) G_L1: 16.713 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 14.148 G_Regularizer: 0.000 validation_error: 20.294 +(epoch: 9, iters: 155984, time: 0.550, data: 0.000) G_L1: 17.000 G_L1_ABSOLUTE: 3.398 G_L1_RELATIVE: 13.602 G_Regularizer: 0.000 validation_error: 21.202 +(epoch: 9, iters: 157984, time: 0.546, data: 0.000) G_L1: 14.628 G_L1_ABSOLUTE: 3.023 G_L1_RELATIVE: 11.605 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 9, iters: 159984, time: 0.549, data: 0.000) G_L1: 15.870 G_L1_ABSOLUTE: 2.558 G_L1_RELATIVE: 13.312 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 9, iters: 161984, time: 0.542, data: 0.000) G_L1: 13.638 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 20.561 +(epoch: 9, iters: 163984, time: 0.549, data: 0.000) G_L1: 13.394 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 10.992 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 9, iters: 165984, time: 0.547, data: 0.000) G_L1: 14.899 G_L1_ABSOLUTE: 2.992 G_L1_RELATIVE: 11.907 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 9, iters: 167984, time: 0.550, data: 0.000) G_L1: 17.892 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 15.036 G_Regularizer: 0.000 validation_error: 21.693 +(epoch: 9, iters: 169984, time: 0.543, data: 0.000) G_L1: 14.069 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 11.524 G_Regularizer: 0.000 validation_error: 20.057 +(epoch: 9, iters: 171984, time: 0.542, data: 0.000) G_L1: 16.640 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 14.013 G_Regularizer: 0.000 validation_error: 21.367 +(epoch: 9, iters: 173984, time: 0.542, data: 0.000) G_L1: 14.436 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 12.044 G_Regularizer: 0.000 validation_error: 20.504 +(epoch: 9, iters: 175984, time: 0.551, data: 0.000) G_L1: 13.468 G_L1_ABSOLUTE: 2.392 G_L1_RELATIVE: 11.075 G_Regularizer: 0.000 validation_error: 20.037 +(epoch: 9, iters: 177984, time: 0.554, data: 0.000) G_L1: 15.018 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 12.556 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 9, iters: 179984, time: 0.541, data: 0.000) G_L1: 18.841 G_L1_ABSOLUTE: 3.417 G_L1_RELATIVE: 15.424 G_Regularizer: 0.000 validation_error: 21.483 +(epoch: 9, iters: 181984, time: 0.542, data: 0.000) G_L1: 13.313 G_L1_ABSOLUTE: 2.827 G_L1_RELATIVE: 10.486 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 9, iters: 183984, time: 0.543, data: 0.000) G_L1: 14.184 G_L1_ABSOLUTE: 2.875 G_L1_RELATIVE: 11.309 G_Regularizer: 0.000 validation_error: 20.609 +(epoch: 9, iters: 185984, time: 0.548, data: 0.000) G_L1: 15.670 G_L1_ABSOLUTE: 2.330 G_L1_RELATIVE: 13.341 G_Regularizer: 0.000 validation_error: 21.245 +(epoch: 9, iters: 187984, time: 0.542, data: 0.000) G_L1: 14.869 G_L1_ABSOLUTE: 2.346 G_L1_RELATIVE: 12.524 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 9, iters: 189984, time: 0.542, data: 0.000) G_L1: 17.299 G_L1_ABSOLUTE: 3.104 G_L1_RELATIVE: 14.195 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 9, iters: 191984, time: 0.552, data: 0.000) G_L1: 14.024 G_L1_ABSOLUTE: 2.350 G_L1_RELATIVE: 11.674 G_Regularizer: 0.000 validation_error: 20.832 +(epoch: 9, iters: 193984, time: 0.543, data: 0.000) G_L1: 16.882 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 14.529 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 9, iters: 195984, time: 0.546, data: 0.000) G_L1: 13.199 G_L1_ABSOLUTE: 2.644 G_L1_RELATIVE: 10.555 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 9, iters: 197984, time: 0.548, data: 0.000) G_L1: 14.760 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.227 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 9, iters: 199984, time: 0.545, data: 0.000) G_L1: 18.370 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 15.411 G_Regularizer: 0.000 validation_error: 20.337 +(epoch: 9, iters: 201984, time: 0.548, data: 0.000) G_L1: 11.193 G_L1_ABSOLUTE: 2.337 G_L1_RELATIVE: 8.856 G_Regularizer: 0.000 validation_error: 21.236 +(epoch: 9, iters: 203984, time: 0.544, data: 0.000) G_L1: 15.613 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 12.976 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 9, iters: 205984, time: 0.554, data: 0.000) G_L1: 18.866 G_L1_ABSOLUTE: 3.138 G_L1_RELATIVE: 15.728 G_Regularizer: 0.000 validation_error: 21.234 +(epoch: 9, iters: 207984, time: 0.544, data: 0.000) G_L1: 15.386 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 12.884 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 9, iters: 209984, time: 0.549, data: 0.000) G_L1: 17.763 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 14.988 G_Regularizer: 0.000 validation_error: 21.195 +(epoch: 9, iters: 211984, time: 0.543, data: 0.000) G_L1: 15.748 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 13.037 G_Regularizer: 0.000 validation_error: 20.436 +(epoch: 9, iters: 213984, time: 0.549, data: 0.000) G_L1: 14.852 G_L1_ABSOLUTE: 3.001 G_L1_RELATIVE: 11.852 G_Regularizer: 0.000 validation_error: 21.267 +(epoch: 9, iters: 215984, time: 0.548, data: 0.000) G_L1: 17.066 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 14.500 G_Regularizer: 0.000 validation_error: 20.058 +(epoch: 9, iters: 217984, time: 0.549, data: 0.000) G_L1: 15.206 G_L1_ABSOLUTE: 2.472 G_L1_RELATIVE: 12.735 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 9, iters: 219984, time: 0.539, data: 0.000) G_L1: 18.198 G_L1_ABSOLUTE: 2.922 G_L1_RELATIVE: 15.276 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 9, iters: 221984, time: 0.541, data: 0.000) G_L1: 16.940 G_L1_ABSOLUTE: 3.160 G_L1_RELATIVE: 13.781 G_Regularizer: 0.000 validation_error: 20.947 +(epoch: 9, iters: 223984, time: 0.552, data: 0.000) G_L1: 13.963 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 11.606 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 9, iters: 225984, time: 0.555, data: 0.000) G_L1: 13.250 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 10.915 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 9, iters: 227984, time: 0.545, data: 0.001) G_L1: 13.509 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 10.421 G_Regularizer: 0.000 validation_error: 20.153 +(epoch: 9, iters: 229984, time: 0.546, data: 0.000) G_L1: 15.049 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 12.584 G_Regularizer: 0.000 validation_error: 20.307 +(epoch: 9, iters: 231984, time: 0.630, data: 0.000) G_L1: 13.619 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 10.949 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 9, iters: 233984, time: 0.624, data: 0.000) G_L1: 15.607 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 13.017 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 9, iters: 235984, time: 0.617, data: 0.000) G_L1: 14.109 G_L1_ABSOLUTE: 3.144 G_L1_RELATIVE: 10.965 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 9, iters: 237984, time: 0.548, data: 0.000) G_L1: 19.180 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 16.222 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 9, iters: 239984, time: 0.545, data: 0.000) G_L1: 13.808 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 11.168 G_Regularizer: 0.000 validation_error: 20.471 +(epoch: 9, iters: 241984, time: 0.550, data: 0.000) G_L1: 12.543 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 9.923 G_Regularizer: 0.000 validation_error: 21.082 +(epoch: 9, iters: 243984, time: 0.551, data: 0.000) G_L1: 15.811 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 13.225 G_Regularizer: 0.000 validation_error: 20.474 +(epoch: 9, iters: 245984, time: 0.545, data: 0.000) G_L1: 14.515 G_L1_ABSOLUTE: 3.198 G_L1_RELATIVE: 11.317 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 9, iters: 247984, time: 0.538, data: 0.000) G_L1: 12.108 G_L1_ABSOLUTE: 3.111 G_L1_RELATIVE: 8.996 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 9, iters: 249984, time: 0.540, data: 0.000) G_L1: 15.355 G_L1_ABSOLUTE: 2.145 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 20.125 +(epoch: 9, iters: 251984, time: 0.543, data: 0.000) G_L1: 16.205 G_L1_ABSOLUTE: 3.125 G_L1_RELATIVE: 13.081 G_Regularizer: 0.000 validation_error: 20.205 +(epoch: 9, iters: 253984, time: 0.548, data: 0.000) G_L1: 13.934 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 11.382 G_Regularizer: 0.000 validation_error: 20.574 +(epoch: 9, iters: 255984, time: 0.548, data: 0.000) G_L1: 12.349 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 9.647 G_Regularizer: 0.000 validation_error: 21.705 +(epoch: 9, iters: 257984, time: 0.545, data: 0.001) G_L1: 14.679 G_L1_ABSOLUTE: 2.703 G_L1_RELATIVE: 11.976 G_Regularizer: 0.000 validation_error: 20.335 +(epoch: 9, iters: 259984, time: 0.545, data: 0.000) G_L1: 13.881 G_L1_ABSOLUTE: 2.299 G_L1_RELATIVE: 11.582 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 9, iters: 261984, time: 0.543, data: 0.000) G_L1: 15.650 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 13.040 G_Regularizer: 0.000 validation_error: 20.567 +(epoch: 9, iters: 263984, time: 0.546, data: 0.000) G_L1: 15.620 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 12.998 G_Regularizer: 0.000 validation_error: 20.940 +(epoch: 9, iters: 265984, time: 0.549, data: 0.000) G_L1: 15.056 G_L1_ABSOLUTE: 2.268 G_L1_RELATIVE: 12.788 G_Regularizer: 0.000 validation_error: 20.753 +(epoch: 9, iters: 267984, time: 0.546, data: 0.000) G_L1: 15.834 G_L1_ABSOLUTE: 2.958 G_L1_RELATIVE: 12.875 G_Regularizer: 0.000 validation_error: 19.715 +(epoch: 9, iters: 269984, time: 0.546, data: 0.000) G_L1: 16.128 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 13.233 G_Regularizer: 0.000 validation_error: 20.263 +(epoch: 9, iters: 271984, time: 0.549, data: 0.000) G_L1: 15.477 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 13.058 G_Regularizer: 0.000 validation_error: 21.156 +(epoch: 9, iters: 273984, time: 0.548, data: 0.000) G_L1: 14.259 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 11.553 G_Regularizer: 0.000 validation_error: 21.190 +(epoch: 9, iters: 275984, time: 0.545, data: 0.000) G_L1: 14.822 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 12.026 G_Regularizer: 0.000 validation_error: 20.103 +(epoch: 9, iters: 277984, time: 0.545, data: 0.000) G_L1: 14.809 G_L1_ABSOLUTE: 2.932 G_L1_RELATIVE: 11.877 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 9, iters: 279984, time: 0.546, data: 0.000) G_L1: 15.399 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 12.871 G_Regularizer: 0.000 validation_error: 20.763 +(epoch: 9, iters: 281984, time: 0.547, data: 0.000) G_L1: 15.017 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 12.403 G_Regularizer: 0.000 validation_error: 20.419 +(epoch: 9, iters: 283984, time: 0.542, data: 0.000) G_L1: 22.202 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 19.572 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 9, iters: 285984, time: 0.551, data: 0.000) G_L1: 15.115 G_L1_ABSOLUTE: 2.395 G_L1_RELATIVE: 12.721 G_Regularizer: 0.000 validation_error: 19.677 +(epoch: 9, iters: 287984, time: 0.546, data: 0.000) G_L1: 14.265 G_L1_ABSOLUTE: 3.070 G_L1_RELATIVE: 11.195 G_Regularizer: 0.000 validation_error: 20.434 +(epoch: 9, iters: 289984, time: 0.542, data: 0.000) G_L1: 15.141 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 12.616 G_Regularizer: 0.000 validation_error: 20.186 +(epoch: 9, iters: 291984, time: 0.547, data: 0.000) G_L1: 15.511 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 12.781 G_Regularizer: 0.000 validation_error: 20.067 +(epoch: 9, iters: 293984, time: 0.550, data: 0.000) G_L1: 15.141 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 12.674 G_Regularizer: 0.000 validation_error: 21.343 +(epoch: 9, iters: 295984, time: 0.548, data: 0.000) G_L1: 15.472 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 12.789 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 9, iters: 297984, time: 0.549, data: 0.000) G_L1: 17.730 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 14.953 G_Regularizer: 0.000 validation_error: 20.460 +(epoch: 9, iters: 299984, time: 0.548, data: 0.000) G_L1: 17.515 G_L1_ABSOLUTE: 2.872 G_L1_RELATIVE: 14.643 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 9, iters: 301984, time: 0.542, data: 0.000) G_L1: 17.828 G_L1_ABSOLUTE: 3.263 G_L1_RELATIVE: 14.565 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 10, iters: 1232, time: 0.544, data: 0.000) G_L1: 15.572 G_L1_ABSOLUTE: 2.912 G_L1_RELATIVE: 12.661 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 10, iters: 3232, time: 0.551, data: 0.000) G_L1: 17.238 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 14.611 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 10, iters: 5232, time: 0.545, data: 0.000) G_L1: 15.144 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 12.453 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 10, iters: 7232, time: 0.547, data: 0.001) G_L1: 15.155 G_L1_ABSOLUTE: 3.141 G_L1_RELATIVE: 12.014 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 10, iters: 9232, time: 0.548, data: 0.000) G_L1: 18.825 G_L1_ABSOLUTE: 2.671 G_L1_RELATIVE: 16.154 G_Regularizer: 0.000 validation_error: 20.148 +(epoch: 10, iters: 11232, time: 0.548, data: 0.000) G_L1: 14.238 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 11.716 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 10, iters: 13232, time: 0.539, data: 0.000) G_L1: 19.797 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 17.278 G_Regularizer: 0.000 validation_error: 20.471 +(epoch: 10, iters: 15232, time: 0.545, data: 0.001) G_L1: 15.785 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 13.122 G_Regularizer: 0.000 validation_error: 20.022 +(epoch: 10, iters: 17232, time: 0.547, data: 0.000) G_L1: 14.396 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 11.719 G_Regularizer: 0.000 validation_error: 20.340 +(epoch: 10, iters: 19232, time: 0.550, data: 0.000) G_L1: 16.141 G_L1_ABSOLUTE: 3.529 G_L1_RELATIVE: 12.612 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 10, iters: 21232, time: 0.547, data: 0.000) G_L1: 17.689 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 15.227 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 10, iters: 23232, time: 0.540, data: 0.000) G_L1: 17.677 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 14.928 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 10, iters: 25232, time: 0.545, data: 0.000) G_L1: 15.482 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 12.693 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 10, iters: 27232, time: 0.542, data: 0.000) G_L1: 16.848 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 14.320 G_Regularizer: 0.000 validation_error: 20.402 +(epoch: 10, iters: 29232, time: 0.542, data: 0.000) G_L1: 18.471 G_L1_ABSOLUTE: 3.160 G_L1_RELATIVE: 15.311 G_Regularizer: 0.000 validation_error: 20.398 +(epoch: 10, iters: 31232, time: 0.550, data: 0.000) G_L1: 13.583 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 10.945 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 10, iters: 33232, time: 0.545, data: 0.000) G_L1: 18.439 G_L1_ABSOLUTE: 2.983 G_L1_RELATIVE: 15.456 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 10, iters: 35232, time: 0.547, data: 0.000) G_L1: 14.395 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 11.942 G_Regularizer: 0.000 validation_error: 21.390 +(epoch: 10, iters: 37232, time: 0.552, data: 0.000) G_L1: 15.150 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 12.388 G_Regularizer: 0.000 validation_error: 20.524 +(epoch: 10, iters: 39232, time: 0.538, data: 0.000) G_L1: 15.819 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 12.881 G_Regularizer: 0.000 validation_error: 21.298 +(epoch: 10, iters: 41232, time: 0.543, data: 0.000) G_L1: 14.680 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 12.264 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 10, iters: 43232, time: 0.553, data: 0.000) G_L1: 15.394 G_L1_ABSOLUTE: 2.847 G_L1_RELATIVE: 12.547 G_Regularizer: 0.000 validation_error: 20.938 +(epoch: 10, iters: 45232, time: 0.542, data: 0.000) G_L1: 17.995 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 15.449 G_Regularizer: 0.000 validation_error: 20.217 +(epoch: 10, iters: 47232, time: 0.547, data: 0.000) G_L1: 14.848 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 12.247 G_Regularizer: 0.000 validation_error: 20.282 +(epoch: 10, iters: 49232, time: 0.545, data: 0.000) G_L1: 15.107 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 12.708 G_Regularizer: 0.000 validation_error: 21.832 +(epoch: 10, iters: 51232, time: 0.541, data: 0.000) G_L1: 16.661 G_L1_ABSOLUTE: 2.961 G_L1_RELATIVE: 13.700 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 10, iters: 53232, time: 0.546, data: 0.000) G_L1: 15.706 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 13.008 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 10, iters: 55232, time: 0.546, data: 0.000) G_L1: 16.812 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 14.243 G_Regularizer: 0.000 validation_error: 21.406 +(epoch: 10, iters: 57232, time: 0.548, data: 0.000) G_L1: 14.104 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 11.426 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 10, iters: 59232, time: 0.548, data: 0.000) G_L1: 14.951 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 12.362 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 10, iters: 61232, time: 0.549, data: 0.000) G_L1: 14.276 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 11.560 G_Regularizer: 0.000 validation_error: 20.417 +(epoch: 10, iters: 63232, time: 0.544, data: 0.000) G_L1: 16.909 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 14.203 G_Regularizer: 0.000 validation_error: 20.492 +(epoch: 10, iters: 65232, time: 0.543, data: 0.000) G_L1: 17.253 G_L1_ABSOLUTE: 2.288 G_L1_RELATIVE: 14.965 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 10, iters: 67232, time: 0.554, data: 0.000) G_L1: 14.807 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 12.144 G_Regularizer: 0.000 validation_error: 21.235 +(epoch: 10, iters: 69232, time: 0.542, data: 0.000) G_L1: 14.487 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 11.979 G_Regularizer: 0.000 validation_error: 20.401 +(epoch: 10, iters: 71232, time: 0.549, data: 0.000) G_L1: 14.774 G_L1_ABSOLUTE: 2.907 G_L1_RELATIVE: 11.868 G_Regularizer: 0.000 validation_error: 20.380 +(epoch: 10, iters: 73232, time: 0.542, data: 0.000) G_L1: 13.222 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 10.658 G_Regularizer: 0.000 validation_error: 21.290 +(epoch: 10, iters: 75232, time: 0.542, data: 0.001) G_L1: 14.659 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 12.395 G_Regularizer: 0.000 validation_error: 19.907 +(epoch: 10, iters: 77232, time: 0.550, data: 0.000) G_L1: 21.355 G_L1_ABSOLUTE: 3.249 G_L1_RELATIVE: 18.107 G_Regularizer: 0.000 validation_error: 20.249 +(epoch: 10, iters: 79232, time: 0.556, data: 0.000) G_L1: 15.954 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 13.326 G_Regularizer: 0.000 validation_error: 21.347 +(epoch: 10, iters: 81232, time: 0.547, data: 0.000) G_L1: 17.125 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 14.448 G_Regularizer: 0.000 validation_error: 20.140 +(epoch: 10, iters: 83232, time: 0.539, data: 0.000) G_L1: 16.122 G_L1_ABSOLUTE: 2.933 G_L1_RELATIVE: 13.189 G_Regularizer: 0.000 validation_error: 20.130 +(epoch: 10, iters: 85232, time: 0.544, data: 0.000) G_L1: 14.880 G_L1_ABSOLUTE: 3.152 G_L1_RELATIVE: 11.728 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 10, iters: 87232, time: 0.545, data: 0.000) G_L1: 18.042 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 15.459 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 10, iters: 89232, time: 0.543, data: 0.000) G_L1: 15.052 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 12.674 G_Regularizer: 0.000 validation_error: 22.012 +(epoch: 10, iters: 91232, time: 0.541, data: 0.000) G_L1: 14.189 G_L1_ABSOLUTE: 3.181 G_L1_RELATIVE: 11.009 G_Regularizer: 0.000 validation_error: 20.145 +(epoch: 10, iters: 93232, time: 0.545, data: 0.000) G_L1: 15.443 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 12.824 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 10, iters: 95232, time: 0.545, data: 0.000) G_L1: 16.188 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 13.685 G_Regularizer: 0.000 validation_error: 20.505 +(epoch: 10, iters: 97232, time: 0.544, data: 0.000) G_L1: 18.122 G_L1_ABSOLUTE: 3.304 G_L1_RELATIVE: 14.818 G_Regularizer: 0.000 validation_error: 20.263 +(epoch: 10, iters: 99232, time: 0.544, data: 0.000) G_L1: 19.433 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 16.645 G_Regularizer: 0.000 validation_error: 21.333 +(epoch: 10, iters: 101232, time: 0.539, data: 0.000) G_L1: 16.613 G_L1_ABSOLUTE: 2.299 G_L1_RELATIVE: 14.314 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 10, iters: 103232, time: 0.539, data: 0.000) G_L1: 22.337 G_L1_ABSOLUTE: 2.918 G_L1_RELATIVE: 19.419 G_Regularizer: 0.000 validation_error: 20.227 +(epoch: 10, iters: 105232, time: 0.548, data: 0.000) G_L1: 13.335 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 10.987 G_Regularizer: 0.000 validation_error: 20.627 +(epoch: 10, iters: 107232, time: 0.551, data: 0.000) G_L1: 17.967 G_L1_ABSOLUTE: 3.361 G_L1_RELATIVE: 14.606 G_Regularizer: 0.000 validation_error: 21.174 +(epoch: 10, iters: 109232, time: 0.550, data: 0.000) G_L1: 18.124 G_L1_ABSOLUTE: 3.704 G_L1_RELATIVE: 14.420 G_Regularizer: 0.000 validation_error: 20.492 +(epoch: 10, iters: 111232, time: 0.544, data: 0.000) G_L1: 13.830 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 11.052 G_Regularizer: 0.000 validation_error: 19.846 +(epoch: 10, iters: 113232, time: 0.542, data: 0.000) G_L1: 15.722 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 12.828 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 10, iters: 115232, time: 0.542, data: 0.000) G_L1: 14.628 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 12.163 G_Regularizer: 0.000 validation_error: 20.289 +(epoch: 10, iters: 117232, time: 0.549, data: 0.000) G_L1: 16.276 G_L1_ABSOLUTE: 2.963 G_L1_RELATIVE: 13.313 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 10, iters: 119232, time: 0.545, data: 0.000) G_L1: 16.165 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 13.360 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 10, iters: 121232, time: 0.541, data: 0.000) G_L1: 15.414 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 12.617 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 10, iters: 123232, time: 0.541, data: 0.000) G_L1: 17.959 G_L1_ABSOLUTE: 3.439 G_L1_RELATIVE: 14.519 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 10, iters: 125232, time: 0.543, data: 0.000) G_L1: 16.408 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 13.750 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 10, iters: 127232, time: 0.549, data: 0.000) G_L1: 15.866 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 13.322 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 10, iters: 129232, time: 0.545, data: 0.000) G_L1: 14.576 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 11.699 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 10, iters: 131232, time: 0.552, data: 0.000) G_L1: 18.244 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 15.726 G_Regularizer: 0.000 validation_error: 21.169 +(epoch: 10, iters: 133232, time: 0.543, data: 0.000) G_L1: 14.349 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 11.711 G_Regularizer: 0.000 validation_error: 20.050 +(epoch: 10, iters: 135232, time: 0.546, data: 0.000) G_L1: 14.903 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 12.092 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 10, iters: 137232, time: 0.554, data: 0.000) G_L1: 17.455 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 14.836 G_Regularizer: 0.000 validation_error: 20.345 +(epoch: 10, iters: 139232, time: 0.547, data: 0.000) G_L1: 14.852 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 12.339 G_Regularizer: 0.000 validation_error: 20.586 +(epoch: 10, iters: 141232, time: 0.545, data: 0.000) G_L1: 14.591 G_L1_ABSOLUTE: 2.423 G_L1_RELATIVE: 12.168 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 10, iters: 143232, time: 0.542, data: 0.001) G_L1: 12.710 G_L1_ABSOLUTE: 2.120 G_L1_RELATIVE: 10.590 G_Regularizer: 0.000 validation_error: 20.434 +(epoch: 10, iters: 145232, time: 0.546, data: 0.000) G_L1: 12.996 G_L1_ABSOLUTE: 2.380 G_L1_RELATIVE: 10.617 G_Regularizer: 0.000 validation_error: 21.059 +(epoch: 10, iters: 147232, time: 0.550, data: 0.000) G_L1: 16.103 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 13.443 G_Regularizer: 0.000 validation_error: 20.179 +(epoch: 10, iters: 149232, time: 0.545, data: 0.000) G_L1: 15.736 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 13.277 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 10, iters: 151232, time: 0.548, data: 0.000) G_L1: 15.790 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 13.176 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 10, iters: 153232, time: 0.548, data: 0.000) G_L1: 19.871 G_L1_ABSOLUTE: 3.389 G_L1_RELATIVE: 16.481 G_Regularizer: 0.000 validation_error: 20.670 +(epoch: 10, iters: 155232, time: 0.546, data: 0.000) G_L1: 14.880 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 12.421 G_Regularizer: 0.000 validation_error: 20.503 +(epoch: 10, iters: 157232, time: 0.544, data: 0.000) G_L1: 16.626 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 13.720 G_Regularizer: 0.000 validation_error: 20.956 +(epoch: 10, iters: 159232, time: 0.547, data: 0.000) G_L1: 14.700 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 11.864 G_Regularizer: 0.000 validation_error: 21.093 +(epoch: 10, iters: 161232, time: 0.541, data: 0.000) G_L1: 16.012 G_L1_ABSOLUTE: 2.472 G_L1_RELATIVE: 13.540 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 10, iters: 163232, time: 0.540, data: 0.000) G_L1: 16.534 G_L1_ABSOLUTE: 3.063 G_L1_RELATIVE: 13.471 G_Regularizer: 0.000 validation_error: 20.047 +(epoch: 10, iters: 165232, time: 0.555, data: 0.001) G_L1: 17.346 G_L1_ABSOLUTE: 3.091 G_L1_RELATIVE: 14.255 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 10, iters: 167232, time: 0.553, data: 0.000) G_L1: 14.109 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 11.556 G_Regularizer: 0.000 validation_error: 20.547 +(epoch: 10, iters: 169232, time: 0.544, data: 0.000) G_L1: 14.122 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 11.391 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 10, iters: 171232, time: 0.545, data: 0.000) G_L1: 12.755 G_L1_ABSOLUTE: 2.328 G_L1_RELATIVE: 10.427 G_Regularizer: 0.000 validation_error: 20.380 +(epoch: 10, iters: 173232, time: 0.549, data: 0.000) G_L1: 17.591 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 14.918 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 10, iters: 175232, time: 0.545, data: 0.000) G_L1: 16.113 G_L1_ABSOLUTE: 2.831 G_L1_RELATIVE: 13.282 G_Regularizer: 0.000 validation_error: 20.408 +(epoch: 10, iters: 177232, time: 0.549, data: 0.000) G_L1: 13.938 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 11.182 G_Regularizer: 0.000 validation_error: 21.442 +(epoch: 10, iters: 179232, time: 0.544, data: 0.000) G_L1: 12.992 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 10.590 G_Regularizer: 0.000 validation_error: 20.125 +(epoch: 10, iters: 181232, time: 0.544, data: 0.000) G_L1: 14.906 G_L1_ABSOLUTE: 3.289 G_L1_RELATIVE: 11.617 G_Regularizer: 0.000 validation_error: 21.198 +(epoch: 10, iters: 183232, time: 0.543, data: 0.000) G_L1: 14.732 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 11.970 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 10, iters: 185232, time: 0.547, data: 0.000) G_L1: 14.420 G_L1_ABSOLUTE: 3.417 G_L1_RELATIVE: 11.002 G_Regularizer: 0.000 validation_error: 20.052 +(epoch: 10, iters: 187232, time: 0.547, data: 0.000) G_L1: 14.213 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 11.768 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 10, iters: 189232, time: 0.549, data: 0.000) G_L1: 13.537 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 11.181 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 10, iters: 191232, time: 0.545, data: 0.000) G_L1: 18.029 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 15.523 G_Regularizer: 0.000 validation_error: 20.999 +(epoch: 10, iters: 193232, time: 0.546, data: 0.000) G_L1: 12.655 G_L1_ABSOLUTE: 2.248 G_L1_RELATIVE: 10.407 G_Regularizer: 0.000 validation_error: 21.423 +(epoch: 10, iters: 195232, time: 0.549, data: 0.000) G_L1: 14.996 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 12.236 G_Regularizer: 0.000 validation_error: 20.178 +(epoch: 10, iters: 197232, time: 0.548, data: 0.000) G_L1: 14.274 G_L1_ABSOLUTE: 3.032 G_L1_RELATIVE: 11.242 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 10, iters: 199232, time: 0.545, data: 0.000) G_L1: 13.755 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 11.365 G_Regularizer: 0.000 validation_error: 20.429 +(epoch: 10, iters: 201232, time: 0.551, data: 0.000) G_L1: 14.799 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 12.258 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 10, iters: 203232, time: 0.545, data: 0.000) G_L1: 13.486 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 10.871 G_Regularizer: 0.000 validation_error: 20.114 +(epoch: 10, iters: 205232, time: 0.546, data: 0.000) G_L1: 15.124 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 12.194 G_Regularizer: 0.000 validation_error: 20.145 +(epoch: 10, iters: 207232, time: 0.553, data: 0.000) G_L1: 18.527 G_L1_ABSOLUTE: 3.160 G_L1_RELATIVE: 15.367 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 10, iters: 209232, time: 0.547, data: 0.000) G_L1: 14.778 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 12.479 G_Regularizer: 0.000 validation_error: 20.730 +(epoch: 10, iters: 211232, time: 0.547, data: 0.001) G_L1: 15.017 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 12.508 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 10, iters: 213232, time: 0.554, data: 0.000) G_L1: 17.296 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 14.661 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 10, iters: 215232, time: 0.552, data: 0.000) G_L1: 15.949 G_L1_ABSOLUTE: 2.191 G_L1_RELATIVE: 13.758 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 10, iters: 217232, time: 0.551, data: 0.000) G_L1: 14.761 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 12.112 G_Regularizer: 0.000 validation_error: 21.312 +(epoch: 10, iters: 219232, time: 0.553, data: 0.001) G_L1: 12.814 G_L1_ABSOLUTE: 2.382 G_L1_RELATIVE: 10.433 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 10, iters: 221232, time: 0.556, data: 0.000) G_L1: 15.400 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 12.916 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 10, iters: 223232, time: 0.549, data: 0.000) G_L1: 15.091 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 12.752 G_Regularizer: 0.000 validation_error: 21.217 +(epoch: 10, iters: 225232, time: 0.565, data: 0.000) G_L1: 17.512 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 14.576 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 10, iters: 227232, time: 0.553, data: 0.000) G_L1: 16.970 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 14.232 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 10, iters: 229232, time: 0.554, data: 0.000) G_L1: 17.336 G_L1_ABSOLUTE: 3.443 G_L1_RELATIVE: 13.893 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 10, iters: 231232, time: 0.554, data: 0.000) G_L1: 14.998 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 12.562 G_Regularizer: 0.000 validation_error: 20.045 +(epoch: 10, iters: 233232, time: 0.551, data: 0.000) G_L1: 16.179 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 13.381 G_Regularizer: 0.000 validation_error: 20.145 +(epoch: 10, iters: 235232, time: 0.552, data: 0.000) G_L1: 14.076 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 11.503 G_Regularizer: 0.000 validation_error: 20.510 +(epoch: 10, iters: 237232, time: 0.545, data: 0.000) G_L1: 16.731 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 13.920 G_Regularizer: 0.000 validation_error: 20.528 +(epoch: 10, iters: 239232, time: 0.553, data: 0.000) G_L1: 14.125 G_L1_ABSOLUTE: 2.992 G_L1_RELATIVE: 11.134 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 10, iters: 241232, time: 0.549, data: 0.000) G_L1: 15.174 G_L1_ABSOLUTE: 3.535 G_L1_RELATIVE: 11.639 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 10, iters: 243232, time: 0.545, data: 0.000) G_L1: 16.767 G_L1_ABSOLUTE: 2.897 G_L1_RELATIVE: 13.869 G_Regularizer: 0.000 validation_error: 20.551 +(epoch: 10, iters: 245232, time: 0.553, data: 0.000) G_L1: 23.020 G_L1_ABSOLUTE: 2.900 G_L1_RELATIVE: 20.120 G_Regularizer: 0.000 validation_error: 19.363 +(epoch: 10, iters: 247232, time: 0.552, data: 0.000) G_L1: 17.410 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 14.550 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 10, iters: 249232, time: 0.545, data: 0.000) G_L1: 14.205 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 11.403 G_Regularizer: 0.000 validation_error: 19.963 +(epoch: 10, iters: 251232, time: 0.548, data: 0.000) G_L1: 17.507 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 14.753 G_Regularizer: 0.000 validation_error: 19.682 +(epoch: 10, iters: 253232, time: 0.555, data: 0.000) G_L1: 14.642 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.974 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 10, iters: 255232, time: 0.552, data: 0.000) G_L1: 14.878 G_L1_ABSOLUTE: 3.030 G_L1_RELATIVE: 11.847 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 10, iters: 257232, time: 0.547, data: 0.000) G_L1: 13.846 G_L1_ABSOLUTE: 2.307 G_L1_RELATIVE: 11.538 G_Regularizer: 0.000 validation_error: 21.463 +(epoch: 10, iters: 259232, time: 0.545, data: 0.000) G_L1: 11.461 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 8.911 G_Regularizer: 0.000 validation_error: 20.281 +(epoch: 10, iters: 261232, time: 0.545, data: 0.000) G_L1: 16.568 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 14.028 G_Regularizer: 0.000 validation_error: 19.805 +(epoch: 10, iters: 263232, time: 0.550, data: 0.000) G_L1: 14.315 G_L1_ABSOLUTE: 2.639 G_L1_RELATIVE: 11.676 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 10, iters: 265232, time: 0.541, data: 0.000) G_L1: 15.470 G_L1_ABSOLUTE: 2.918 G_L1_RELATIVE: 12.551 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 10, iters: 267232, time: 0.551, data: 0.000) G_L1: 15.038 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 12.278 G_Regularizer: 0.000 validation_error: 21.138 +(epoch: 10, iters: 269232, time: 0.549, data: 0.000) G_L1: 15.097 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 12.282 G_Regularizer: 0.000 validation_error: 20.063 +(epoch: 10, iters: 271232, time: 0.547, data: 0.000) G_L1: 16.487 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 13.820 G_Regularizer: 0.000 validation_error: 20.431 +(epoch: 10, iters: 273232, time: 0.545, data: 0.000) G_L1: 13.480 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 10.773 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 10, iters: 275232, time: 0.545, data: 0.000) G_L1: 17.855 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 15.399 G_Regularizer: 0.000 validation_error: 21.204 +(epoch: 10, iters: 277232, time: 0.547, data: 0.000) G_L1: 13.515 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 11.099 G_Regularizer: 0.000 validation_error: 20.197 +(epoch: 10, iters: 279232, time: 0.538, data: 0.000) G_L1: 18.028 G_L1_ABSOLUTE: 2.765 G_L1_RELATIVE: 15.263 G_Regularizer: 0.000 validation_error: 20.173 +(epoch: 10, iters: 281232, time: 0.553, data: 0.000) G_L1: 15.266 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 12.561 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 10, iters: 283232, time: 0.551, data: 0.000) G_L1: 15.726 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 13.080 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 10, iters: 285232, time: 0.549, data: 0.000) G_L1: 17.751 G_L1_ABSOLUTE: 3.109 G_L1_RELATIVE: 14.642 G_Regularizer: 0.000 validation_error: 20.437 +(epoch: 10, iters: 287232, time: 0.552, data: 0.001) G_L1: 14.509 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.840 G_Regularizer: 0.000 validation_error: 20.600 +(epoch: 10, iters: 289232, time: 0.536, data: 0.000) G_L1: 17.136 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 14.279 G_Regularizer: 0.000 validation_error: 21.193 +(epoch: 10, iters: 291232, time: 0.545, data: 0.000) G_L1: 17.186 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 14.485 G_Regularizer: 0.000 validation_error: 21.151 +(epoch: 10, iters: 293232, time: 0.547, data: 0.000) G_L1: 14.177 G_L1_ABSOLUTE: 3.148 G_L1_RELATIVE: 11.029 G_Regularizer: 0.000 validation_error: 20.417 +(epoch: 10, iters: 295232, time: 0.550, data: 0.000) G_L1: 13.360 G_L1_ABSOLUTE: 2.162 G_L1_RELATIVE: 11.198 G_Regularizer: 0.000 validation_error: 19.833 +(epoch: 10, iters: 297232, time: 0.545, data: 0.000) G_L1: 13.799 G_L1_ABSOLUTE: 2.319 G_L1_RELATIVE: 11.479 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 10, iters: 299232, time: 0.546, data: 0.000) G_L1: 14.907 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 12.217 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 10, iters: 301232, time: 0.544, data: 0.000) G_L1: 13.607 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 10.997 G_Regularizer: 0.000 validation_error: 20.512 +(epoch: 11, iters: 480, time: 0.549, data: 0.000) G_L1: 16.376 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 13.707 G_Regularizer: 0.000 validation_error: 20.742 +(epoch: 11, iters: 2480, time: 0.546, data: 0.000) G_L1: 13.902 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 11.401 G_Regularizer: 0.000 validation_error: 21.817 +(epoch: 11, iters: 4480, time: 0.542, data: 0.000) G_L1: 18.020 G_L1_ABSOLUTE: 3.025 G_L1_RELATIVE: 14.994 G_Regularizer: 0.000 validation_error: 20.607 +(epoch: 11, iters: 6480, time: 0.547, data: 0.000) G_L1: 13.064 G_L1_ABSOLUTE: 2.252 G_L1_RELATIVE: 10.812 G_Regularizer: 0.000 validation_error: 20.303 +(epoch: 11, iters: 8480, time: 0.548, data: 0.000) G_L1: 17.276 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 14.656 G_Regularizer: 0.000 validation_error: 20.267 +(epoch: 11, iters: 10480, time: 0.549, data: 0.000) G_L1: 17.090 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 14.402 G_Regularizer: 0.000 validation_error: 21.273 +(epoch: 11, iters: 12480, time: 0.555, data: 0.000) G_L1: 14.087 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 11.378 G_Regularizer: 0.000 validation_error: 21.315 +(epoch: 11, iters: 14480, time: 0.554, data: 0.000) G_L1: 14.267 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 11.792 G_Regularizer: 0.000 validation_error: 20.320 +(epoch: 11, iters: 16480, time: 0.546, data: 0.001) G_L1: 15.473 G_L1_ABSOLUTE: 2.939 G_L1_RELATIVE: 12.534 G_Regularizer: 0.000 validation_error: 19.640 +(epoch: 11, iters: 18480, time: 0.540, data: 0.000) G_L1: 17.398 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 14.765 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 11, iters: 20480, time: 0.543, data: 0.000) G_L1: 14.396 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 11.765 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 11, iters: 22480, time: 0.546, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 12.010 G_Regularizer: 0.000 validation_error: 22.118 +(epoch: 11, iters: 24480, time: 0.546, data: 0.000) G_L1: 13.481 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 10.681 G_Regularizer: 0.000 validation_error: 21.266 +(epoch: 11, iters: 26480, time: 0.553, data: 0.000) G_L1: 13.165 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 10.420 G_Regularizer: 0.000 validation_error: 20.287 +(epoch: 11, iters: 28480, time: 0.547, data: 0.001) G_L1: 14.899 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 12.422 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 11, iters: 30480, time: 0.543, data: 0.000) G_L1: 18.560 G_L1_ABSOLUTE: 3.533 G_L1_RELATIVE: 15.027 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 11, iters: 32480, time: 0.542, data: 0.000) G_L1: 15.031 G_L1_ABSOLUTE: 2.764 G_L1_RELATIVE: 12.267 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 11, iters: 34480, time: 0.545, data: 0.000) G_L1: 17.692 G_L1_ABSOLUTE: 2.758 G_L1_RELATIVE: 14.934 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 11, iters: 36480, time: 0.549, data: 0.000) G_L1: 16.194 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 13.679 G_Regularizer: 0.000 validation_error: 20.206 +(epoch: 11, iters: 38480, time: 0.546, data: 0.000) G_L1: 19.133 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 16.220 G_Regularizer: 0.000 validation_error: 20.541 +(epoch: 11, iters: 40480, time: 0.543, data: 0.000) G_L1: 16.560 G_L1_ABSOLUTE: 2.455 G_L1_RELATIVE: 14.104 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 11, iters: 42480, time: 0.550, data: 0.000) G_L1: 15.410 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 13.205 G_Regularizer: 0.000 validation_error: 20.208 +(epoch: 11, iters: 44480, time: 0.543, data: 0.000) G_L1: 18.799 G_L1_ABSOLUTE: 3.002 G_L1_RELATIVE: 15.797 G_Regularizer: 0.000 validation_error: 20.021 +(epoch: 11, iters: 46480, time: 0.549, data: 0.000) G_L1: 16.024 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 13.537 G_Regularizer: 0.000 validation_error: 21.482 +(epoch: 11, iters: 48480, time: 0.546, data: 0.000) G_L1: 16.181 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 13.375 G_Regularizer: 0.000 validation_error: 20.675 +(epoch: 11, iters: 50480, time: 0.547, data: 0.000) G_L1: 15.226 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 12.823 G_Regularizer: 0.000 validation_error: 20.059 +(epoch: 11, iters: 52480, time: 0.551, data: 0.000) G_L1: 14.939 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 12.485 G_Regularizer: 0.000 validation_error: 20.214 +(epoch: 11, iters: 54480, time: 0.547, data: 0.000) G_L1: 18.968 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 16.616 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 11, iters: 56480, time: 0.543, data: 0.000) G_L1: 16.062 G_L1_ABSOLUTE: 2.405 G_L1_RELATIVE: 13.657 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 11, iters: 58480, time: 0.547, data: 0.001) G_L1: 14.969 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 12.172 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 11, iters: 60480, time: 0.547, data: 0.000) G_L1: 16.544 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 13.848 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 11, iters: 62480, time: 0.544, data: 0.000) G_L1: 15.426 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 12.932 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 11, iters: 64480, time: 0.549, data: 0.000) G_L1: 13.320 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 10.769 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 11, iters: 66480, time: 0.548, data: 0.000) G_L1: 15.972 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 13.115 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 11, iters: 68480, time: 0.545, data: 0.000) G_L1: 13.796 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 11.354 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 11, iters: 70480, time: 0.550, data: 0.000) G_L1: 14.291 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 11.909 G_Regularizer: 0.000 validation_error: 21.171 +(epoch: 11, iters: 72480, time: 0.542, data: 0.000) G_L1: 15.998 G_L1_ABSOLUTE: 2.923 G_L1_RELATIVE: 13.075 G_Regularizer: 0.000 validation_error: 21.129 +(epoch: 11, iters: 74480, time: 0.560, data: 0.000) G_L1: 13.909 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 11.284 G_Regularizer: 0.000 validation_error: 20.157 +(epoch: 11, iters: 76480, time: 0.547, data: 0.000) G_L1: 13.925 G_L1_ABSOLUTE: 2.875 G_L1_RELATIVE: 11.050 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 11, iters: 78480, time: 0.550, data: 0.000) G_L1: 16.667 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 13.935 G_Regularizer: 0.000 validation_error: 20.937 +(epoch: 11, iters: 80480, time: 0.548, data: 0.000) G_L1: 18.635 G_L1_ABSOLUTE: 3.035 G_L1_RELATIVE: 15.600 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 11, iters: 82480, time: 0.539, data: 0.000) G_L1: 14.653 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 12.306 G_Regularizer: 0.000 validation_error: 21.124 +(epoch: 11, iters: 84480, time: 0.536, data: 0.000) G_L1: 15.658 G_L1_ABSOLUTE: 2.220 G_L1_RELATIVE: 13.438 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 11, iters: 86480, time: 0.548, data: 0.000) G_L1: 14.230 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 11.409 G_Regularizer: 0.000 validation_error: 20.718 +(epoch: 11, iters: 88480, time: 0.544, data: 0.000) G_L1: 15.662 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 13.112 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 11, iters: 90480, time: 0.544, data: 0.000) G_L1: 13.886 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 11.087 G_Regularizer: 0.000 validation_error: 19.992 +(epoch: 11, iters: 92480, time: 0.533, data: 0.000) G_L1: 12.838 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 10.183 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 11, iters: 94480, time: 0.546, data: 0.000) G_L1: 17.600 G_L1_ABSOLUTE: 3.057 G_L1_RELATIVE: 14.543 G_Regularizer: 0.000 validation_error: 20.485 +(epoch: 11, iters: 96480, time: 0.535, data: 0.000) G_L1: 14.893 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 12.139 G_Regularizer: 0.000 validation_error: 21.407 +(epoch: 11, iters: 98480, time: 0.530, data: 0.000) G_L1: 14.051 G_L1_ABSOLUTE: 3.233 G_L1_RELATIVE: 10.818 G_Regularizer: 0.000 validation_error: 21.251 +(epoch: 11, iters: 100480, time: 0.534, data: 0.000) G_L1: 13.616 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 11.019 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 11, iters: 102480, time: 0.536, data: 0.000) G_L1: 15.465 G_L1_ABSOLUTE: 2.761 G_L1_RELATIVE: 12.704 G_Regularizer: 0.000 validation_error: 20.061 +(epoch: 11, iters: 104480, time: 0.529, data: 0.000) G_L1: 16.854 G_L1_ABSOLUTE: 2.210 G_L1_RELATIVE: 14.644 G_Regularizer: 0.000 validation_error: 20.299 +(epoch: 11, iters: 106480, time: 0.544, data: 0.000) G_L1: 14.665 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 12.064 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 11, iters: 108480, time: 0.534, data: 0.000) G_L1: 14.821 G_L1_ABSOLUTE: 3.057 G_L1_RELATIVE: 11.764 G_Regularizer: 0.000 validation_error: 20.255 +(epoch: 11, iters: 110480, time: 0.526, data: 0.000) G_L1: 15.236 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 12.803 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 11, iters: 112480, time: 0.548, data: 0.000) G_L1: 14.774 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 12.212 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 11, iters: 114480, time: 0.539, data: 0.000) G_L1: 13.771 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 11.293 G_Regularizer: 0.000 validation_error: 19.755 +(epoch: 11, iters: 116480, time: 0.535, data: 0.000) G_L1: 16.205 G_L1_ABSOLUTE: 2.886 G_L1_RELATIVE: 13.318 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 11, iters: 118480, time: 0.522, data: 0.000) G_L1: 12.835 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 10.559 G_Regularizer: 0.000 validation_error: 20.497 +(epoch: 11, iters: 120480, time: 0.544, data: 0.000) G_L1: 16.972 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 14.508 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 11, iters: 122480, time: 0.542, data: 0.000) G_L1: 13.825 G_L1_ABSOLUTE: 3.029 G_L1_RELATIVE: 10.796 G_Regularizer: 0.000 validation_error: 21.800 +(epoch: 11, iters: 124480, time: 0.531, data: 0.000) G_L1: 16.790 G_L1_ABSOLUTE: 2.346 G_L1_RELATIVE: 14.443 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 11, iters: 126480, time: 0.536, data: 0.000) G_L1: 15.606 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 12.831 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 11, iters: 128480, time: 0.544, data: 0.000) G_L1: 13.453 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 11.176 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 11, iters: 130480, time: 0.540, data: 0.000) G_L1: 16.980 G_L1_ABSOLUTE: 2.972 G_L1_RELATIVE: 14.008 G_Regularizer: 0.000 validation_error: 21.207 +(epoch: 11, iters: 132480, time: 0.905, data: 0.000) G_L1: 13.569 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 11.194 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 11, iters: 134480, time: 0.918, data: 0.000) G_L1: 16.084 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 13.366 G_Regularizer: 0.000 validation_error: 20.218 +(epoch: 11, iters: 136480, time: 0.920, data: 0.000) G_L1: 14.828 G_L1_ABSOLUTE: 3.328 G_L1_RELATIVE: 11.500 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 11, iters: 138480, time: 0.920, data: 0.000) G_L1: 20.499 G_L1_ABSOLUTE: 2.876 G_L1_RELATIVE: 17.623 G_Regularizer: 0.000 validation_error: 20.491 +(epoch: 11, iters: 140480, time: 0.920, data: 0.000) G_L1: 14.632 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 12.000 G_Regularizer: 0.000 validation_error: 20.408 +(epoch: 11, iters: 142480, time: 0.919, data: 0.000) G_L1: 16.573 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 13.679 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 11, iters: 144480, time: 0.918, data: 0.000) G_L1: 16.041 G_L1_ABSOLUTE: 2.957 G_L1_RELATIVE: 13.084 G_Regularizer: 0.000 validation_error: 20.929 +(epoch: 11, iters: 146480, time: 0.925, data: 0.001) G_L1: 14.816 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 12.533 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 11, iters: 148480, time: 0.918, data: 0.000) G_L1: 17.811 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 14.853 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 11, iters: 150480, time: 0.919, data: 0.000) G_L1: 13.809 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 11.355 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 11, iters: 152480, time: 0.919, data: 0.000) G_L1: 13.772 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 11.270 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 11, iters: 154480, time: 0.923, data: 0.000) G_L1: 15.495 G_L1_ABSOLUTE: 2.855 G_L1_RELATIVE: 12.640 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 11, iters: 156480, time: 0.927, data: 0.000) G_L1: 14.256 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 11.881 G_Regularizer: 0.000 validation_error: 19.980 +(epoch: 11, iters: 158480, time: 0.920, data: 0.000) G_L1: 16.756 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 14.194 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 11, iters: 160480, time: 0.925, data: 0.000) G_L1: 17.864 G_L1_ABSOLUTE: 2.916 G_L1_RELATIVE: 14.947 G_Regularizer: 0.000 validation_error: 20.221 +(epoch: 11, iters: 162480, time: 0.917, data: 0.000) G_L1: 15.868 G_L1_ABSOLUTE: 2.927 G_L1_RELATIVE: 12.941 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 11, iters: 164480, time: 0.919, data: 0.000) G_L1: 12.488 G_L1_ABSOLUTE: 2.594 G_L1_RELATIVE: 9.894 G_Regularizer: 0.000 validation_error: 20.631 +(epoch: 11, iters: 166480, time: 0.925, data: 0.000) G_L1: 18.878 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 16.249 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 11, iters: 168480, time: 0.923, data: 0.000) G_L1: 15.654 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 13.055 G_Regularizer: 0.000 validation_error: 20.369 +(epoch: 11, iters: 170480, time: 0.923, data: 0.000) G_L1: 15.099 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 12.279 G_Regularizer: 0.000 validation_error: 20.030 +(epoch: 11, iters: 172480, time: 0.922, data: 0.000) G_L1: 16.376 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 13.815 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 11, iters: 174480, time: 0.924, data: 0.000) G_L1: 13.603 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 10.911 G_Regularizer: 0.000 validation_error: 19.996 +(epoch: 11, iters: 176480, time: 0.930, data: 0.000) G_L1: 16.873 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 14.181 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 11, iters: 178480, time: 0.923, data: 0.000) G_L1: 19.851 G_L1_ABSOLUTE: 3.333 G_L1_RELATIVE: 16.518 G_Regularizer: 0.000 validation_error: 20.615 +(epoch: 11, iters: 180480, time: 0.928, data: 0.000) G_L1: 17.791 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 15.049 G_Regularizer: 0.000 validation_error: 20.285 +(epoch: 11, iters: 182480, time: 0.919, data: 0.000) G_L1: 19.061 G_L1_ABSOLUTE: 3.048 G_L1_RELATIVE: 16.013 G_Regularizer: 0.000 validation_error: 21.129 +(epoch: 11, iters: 184480, time: 0.915, data: 0.000) G_L1: 15.264 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 12.638 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 11, iters: 186480, time: 0.920, data: 0.000) G_L1: 13.892 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 11.555 G_Regularizer: 0.000 validation_error: 21.263 +(epoch: 11, iters: 188480, time: 0.921, data: 0.000) G_L1: 15.035 G_L1_ABSOLUTE: 2.827 G_L1_RELATIVE: 12.209 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 11, iters: 190480, time: 0.925, data: 0.000) G_L1: 15.177 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 12.713 G_Regularizer: 0.000 validation_error: 21.315 +(epoch: 11, iters: 192480, time: 0.924, data: 0.000) G_L1: 15.585 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 13.320 G_Regularizer: 0.000 validation_error: 21.604 +(epoch: 11, iters: 194480, time: 0.926, data: 0.000) G_L1: 12.338 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 9.768 G_Regularizer: 0.000 validation_error: 21.072 +(epoch: 11, iters: 196480, time: 0.921, data: 0.000) G_L1: 18.636 G_L1_ABSOLUTE: 2.819 G_L1_RELATIVE: 15.817 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 11, iters: 198480, time: 0.925, data: 0.000) G_L1: 14.545 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 12.021 G_Regularizer: 0.000 validation_error: 20.528 +(epoch: 11, iters: 200480, time: 0.920, data: 0.000) G_L1: 20.975 G_L1_ABSOLUTE: 3.117 G_L1_RELATIVE: 17.858 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 11, iters: 202480, time: 0.928, data: 0.000) G_L1: 13.589 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 11.132 G_Regularizer: 0.000 validation_error: 21.420 +(epoch: 11, iters: 204480, time: 0.921, data: 0.000) G_L1: 13.505 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 11.010 G_Regularizer: 0.000 validation_error: 20.684 +(epoch: 11, iters: 206480, time: 0.926, data: 0.000) G_L1: 16.471 G_L1_ABSOLUTE: 2.922 G_L1_RELATIVE: 13.549 G_Regularizer: 0.000 validation_error: 20.504 +(epoch: 11, iters: 208480, time: 0.921, data: 0.000) G_L1: 16.531 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 13.865 G_Regularizer: 0.000 validation_error: 20.447 +(epoch: 11, iters: 210480, time: 0.886, data: 0.000) G_L1: 15.495 G_L1_ABSOLUTE: 2.795 G_L1_RELATIVE: 12.700 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 11, iters: 212480, time: 0.557, data: 0.000) G_L1: 15.242 G_L1_ABSOLUTE: 3.111 G_L1_RELATIVE: 12.131 G_Regularizer: 0.000 validation_error: 21.078 +(epoch: 11, iters: 214480, time: 0.533, data: 0.000) G_L1: 17.768 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 15.428 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 11, iters: 216480, time: 0.532, data: 0.001) G_L1: 17.826 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 15.012 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 11, iters: 218480, time: 0.529, data: 0.001) G_L1: 26.968 G_L1_ABSOLUTE: 3.524 G_L1_RELATIVE: 23.444 G_Regularizer: 0.000 validation_error: 20.216 +(epoch: 11, iters: 220480, time: 0.526, data: 0.000) G_L1: 16.722 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 14.228 G_Regularizer: 0.000 validation_error: 19.986 +(epoch: 11, iters: 222480, time: 0.529, data: 0.000) G_L1: 15.934 G_L1_ABSOLUTE: 2.429 G_L1_RELATIVE: 13.505 G_Regularizer: 0.000 validation_error: 20.684 +(epoch: 11, iters: 224480, time: 0.528, data: 0.000) G_L1: 16.382 G_L1_ABSOLUTE: 2.853 G_L1_RELATIVE: 13.529 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 11, iters: 226480, time: 0.530, data: 0.001) G_L1: 15.439 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 12.813 G_Regularizer: 0.000 validation_error: 21.076 +(epoch: 11, iters: 228480, time: 0.533, data: 0.000) G_L1: 14.611 G_L1_ABSOLUTE: 2.051 G_L1_RELATIVE: 12.560 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 11, iters: 230480, time: 0.528, data: 0.001) G_L1: 15.223 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 12.546 G_Regularizer: 0.000 validation_error: 20.168 +(epoch: 11, iters: 232480, time: 0.521, data: 0.000) G_L1: 18.622 G_L1_ABSOLUTE: 2.998 G_L1_RELATIVE: 15.624 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 11, iters: 234480, time: 0.530, data: 0.000) G_L1: 17.019 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 14.343 G_Regularizer: 0.000 validation_error: 19.730 +(epoch: 11, iters: 236480, time: 0.525, data: 0.000) G_L1: 11.582 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 8.746 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 11, iters: 238480, time: 0.528, data: 0.000) G_L1: 17.524 G_L1_ABSOLUTE: 3.012 G_L1_RELATIVE: 14.511 G_Regularizer: 0.000 validation_error: 20.177 +(epoch: 11, iters: 240480, time: 0.530, data: 0.000) G_L1: 16.383 G_L1_ABSOLUTE: 2.977 G_L1_RELATIVE: 13.406 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 11, iters: 242480, time: 0.531, data: 0.000) G_L1: 12.936 G_L1_ABSOLUTE: 3.339 G_L1_RELATIVE: 9.596 G_Regularizer: 0.000 validation_error: 20.097 +(epoch: 11, iters: 244480, time: 0.526, data: 0.000) G_L1: 16.490 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 14.145 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 11, iters: 246480, time: 0.526, data: 0.000) G_L1: 19.234 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 16.453 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 11, iters: 248480, time: 0.528, data: 0.000) G_L1: 13.478 G_L1_ABSOLUTE: 2.899 G_L1_RELATIVE: 10.579 G_Regularizer: 0.000 validation_error: 19.934 +(epoch: 11, iters: 250480, time: 0.530, data: 0.000) G_L1: 14.532 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 11.949 G_Regularizer: 0.000 validation_error: 20.152 +(epoch: 11, iters: 252480, time: 0.528, data: 0.000) G_L1: 12.507 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 10.015 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 11, iters: 254480, time: 0.526, data: 0.000) G_L1: 13.786 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 11.160 G_Regularizer: 0.000 validation_error: 20.327 +(epoch: 11, iters: 256480, time: 0.527, data: 0.000) G_L1: 14.052 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 11.514 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 11, iters: 258480, time: 0.530, data: 0.000) G_L1: 14.081 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 11.725 G_Regularizer: 0.000 validation_error: 20.433 +(epoch: 11, iters: 260480, time: 0.524, data: 0.000) G_L1: 15.754 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 12.932 G_Regularizer: 0.000 validation_error: 20.470 +(epoch: 11, iters: 262480, time: 0.526, data: 0.000) G_L1: 16.718 G_L1_ABSOLUTE: 3.036 G_L1_RELATIVE: 13.682 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 11, iters: 264480, time: 0.530, data: 0.000) G_L1: 13.801 G_L1_ABSOLUTE: 2.472 G_L1_RELATIVE: 11.329 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 11, iters: 266480, time: 0.527, data: 0.000) G_L1: 14.400 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 11.864 G_Regularizer: 0.000 validation_error: 20.235 +(epoch: 11, iters: 268480, time: 0.533, data: 0.000) G_L1: 13.516 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 11.005 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 11, iters: 270480, time: 0.528, data: 0.000) G_L1: 16.431 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 13.550 G_Regularizer: 0.000 validation_error: 20.257 +(epoch: 11, iters: 272480, time: 0.526, data: 0.000) G_L1: 16.999 G_L1_ABSOLUTE: 3.061 G_L1_RELATIVE: 13.938 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 11, iters: 274480, time: 0.527, data: 0.000) G_L1: 14.973 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 12.235 G_Regularizer: 0.000 validation_error: 20.146 +(epoch: 11, iters: 276480, time: 0.528, data: 0.000) G_L1: 20.296 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 17.337 G_Regularizer: 0.000 validation_error: 21.501 +(epoch: 11, iters: 278480, time: 0.522, data: 0.000) G_L1: 13.399 G_L1_ABSOLUTE: 3.155 G_L1_RELATIVE: 10.243 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 11, iters: 280480, time: 0.529, data: 0.000) G_L1: 15.377 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 12.610 G_Regularizer: 0.000 validation_error: 21.688 +(epoch: 11, iters: 282480, time: 0.530, data: 0.000) G_L1: 17.750 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 15.281 G_Regularizer: 0.000 validation_error: 20.327 +(epoch: 11, iters: 284480, time: 0.531, data: 0.000) G_L1: 17.927 G_L1_ABSOLUTE: 2.972 G_L1_RELATIVE: 14.955 G_Regularizer: 0.000 validation_error: 20.068 +(epoch: 11, iters: 286480, time: 0.523, data: 0.000) G_L1: 13.003 G_L1_ABSOLUTE: 2.220 G_L1_RELATIVE: 10.782 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 11, iters: 288480, time: 0.530, data: 0.000) G_L1: 14.933 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 12.226 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 11, iters: 290480, time: 0.536, data: 0.000) G_L1: 17.147 G_L1_ABSOLUTE: 3.229 G_L1_RELATIVE: 13.917 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 11, iters: 292480, time: 0.525, data: 0.000) G_L1: 13.382 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 10.942 G_Regularizer: 0.000 validation_error: 20.436 +(epoch: 11, iters: 294480, time: 0.524, data: 0.000) G_L1: 19.117 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 16.372 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 11, iters: 296480, time: 0.529, data: 0.000) G_L1: 14.440 G_L1_ABSOLUTE: 3.084 G_L1_RELATIVE: 11.356 G_Regularizer: 0.000 validation_error: 20.279 +(epoch: 11, iters: 298480, time: 0.531, data: 0.000) G_L1: 14.145 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 11.338 G_Regularizer: 0.000 validation_error: 21.221 +(epoch: 11, iters: 300480, time: 0.530, data: 0.000) G_L1: 16.273 G_L1_ABSOLUTE: 2.911 G_L1_RELATIVE: 13.362 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 11, iters: 302480, time: 0.534, data: 0.000) G_L1: 15.915 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 13.282 G_Regularizer: 0.000 validation_error: 20.481 +(epoch: 12, iters: 1728, time: 0.522, data: 0.000) G_L1: 15.724 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 21.135 +(epoch: 12, iters: 3728, time: 0.532, data: 0.000) G_L1: 17.741 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 15.019 G_Regularizer: 0.000 validation_error: 19.812 +(epoch: 12, iters: 5728, time: 0.535, data: 0.000) G_L1: 12.924 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 10.573 G_Regularizer: 0.000 validation_error: 20.099 +(epoch: 12, iters: 7728, time: 0.525, data: 0.000) G_L1: 14.210 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 11.850 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 12, iters: 9728, time: 0.527, data: 0.000) G_L1: 16.350 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 13.870 G_Regularizer: 0.000 validation_error: 21.565 +(epoch: 12, iters: 11728, time: 0.529, data: 0.000) G_L1: 13.257 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 10.620 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 12, iters: 13728, time: 0.533, data: 0.000) G_L1: 15.167 G_L1_ABSOLUTE: 2.471 G_L1_RELATIVE: 12.696 G_Regularizer: 0.000 validation_error: 22.384 +(epoch: 12, iters: 15728, time: 0.530, data: 0.000) G_L1: 13.293 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 10.660 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 12, iters: 17728, time: 0.526, data: 0.000) G_L1: 14.108 G_L1_ABSOLUTE: 2.909 G_L1_RELATIVE: 11.200 G_Regularizer: 0.000 validation_error: 20.206 +(epoch: 12, iters: 19728, time: 0.521, data: 0.000) G_L1: 15.960 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 13.274 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 12, iters: 21728, time: 0.523, data: 0.000) G_L1: 13.859 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 11.471 G_Regularizer: 0.000 validation_error: 20.943 +(epoch: 12, iters: 23728, time: 0.527, data: 0.000) G_L1: 14.965 G_L1_ABSOLUTE: 2.251 G_L1_RELATIVE: 12.714 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 12, iters: 25728, time: 0.532, data: 0.000) G_L1: 13.731 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 11.171 G_Regularizer: 0.000 validation_error: 20.535 +(epoch: 12, iters: 27728, time: 0.522, data: 0.000) G_L1: 15.238 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 12.575 G_Regularizer: 0.000 validation_error: 20.522 +(epoch: 12, iters: 29728, time: 0.523, data: 0.000) G_L1: 16.831 G_L1_ABSOLUTE: 2.886 G_L1_RELATIVE: 13.945 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 12, iters: 31728, time: 0.529, data: 0.000) G_L1: 14.177 G_L1_ABSOLUTE: 2.996 G_L1_RELATIVE: 11.181 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 12, iters: 33728, time: 0.527, data: 0.000) G_L1: 18.937 G_L1_ABSOLUTE: 2.977 G_L1_RELATIVE: 15.960 G_Regularizer: 0.000 validation_error: 21.125 +(epoch: 12, iters: 35728, time: 0.527, data: 0.000) G_L1: 14.674 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.141 G_Regularizer: 0.000 validation_error: 21.017 +(epoch: 12, iters: 37728, time: 0.522, data: 0.000) G_L1: 17.610 G_L1_ABSOLUTE: 2.907 G_L1_RELATIVE: 14.703 G_Regularizer: 0.000 validation_error: 20.417 +(epoch: 12, iters: 39728, time: 0.525, data: 0.001) G_L1: 15.945 G_L1_ABSOLUTE: 2.330 G_L1_RELATIVE: 13.615 G_Regularizer: 0.000 validation_error: 19.909 +(epoch: 12, iters: 41728, time: 0.523, data: 0.000) G_L1: 17.535 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 14.606 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 12, iters: 43728, time: 0.529, data: 0.000) G_L1: 16.762 G_L1_ABSOLUTE: 2.672 G_L1_RELATIVE: 14.090 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 12, iters: 45728, time: 0.533, data: 0.000) G_L1: 16.834 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 14.252 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 12, iters: 47728, time: 0.530, data: 0.000) G_L1: 17.534 G_L1_ABSOLUTE: 2.719 G_L1_RELATIVE: 14.815 G_Regularizer: 0.000 validation_error: 20.067 +(epoch: 12, iters: 49728, time: 0.529, data: 0.000) G_L1: 15.151 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 12.765 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 12, iters: 51728, time: 0.526, data: 0.000) G_L1: 14.129 G_L1_ABSOLUTE: 2.848 G_L1_RELATIVE: 11.281 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 12, iters: 53728, time: 0.528, data: 0.000) G_L1: 14.174 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.572 G_Regularizer: 0.000 validation_error: 20.471 +(epoch: 12, iters: 55728, time: 0.528, data: 0.000) G_L1: 17.472 G_L1_ABSOLUTE: 2.875 G_L1_RELATIVE: 14.597 G_Regularizer: 0.000 validation_error: 21.149 +(epoch: 12, iters: 57728, time: 0.533, data: 0.000) G_L1: 16.179 G_L1_ABSOLUTE: 2.851 G_L1_RELATIVE: 13.328 G_Regularizer: 0.000 validation_error: 21.284 +(epoch: 12, iters: 59728, time: 0.534, data: 0.000) G_L1: 18.631 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 15.856 G_Regularizer: 0.000 validation_error: 20.998 +(epoch: 12, iters: 61728, time: 0.528, data: 0.000) G_L1: 14.799 G_L1_ABSOLUTE: 3.099 G_L1_RELATIVE: 11.700 G_Regularizer: 0.000 validation_error: 19.772 +(epoch: 12, iters: 63728, time: 0.525, data: 0.000) G_L1: 14.629 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 12.023 G_Regularizer: 0.000 validation_error: 21.674 +(epoch: 12, iters: 65728, time: 0.526, data: 0.001) G_L1: 15.409 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 12.797 G_Regularizer: 0.000 validation_error: 20.369 +(epoch: 12, iters: 67728, time: 0.528, data: 0.000) G_L1: 13.566 G_L1_ABSOLUTE: 2.931 G_L1_RELATIVE: 10.635 G_Regularizer: 0.000 validation_error: 20.993 +(epoch: 12, iters: 69728, time: 0.530, data: 0.000) G_L1: 15.074 G_L1_ABSOLUTE: 3.131 G_L1_RELATIVE: 11.943 G_Regularizer: 0.000 validation_error: 20.041 +(epoch: 12, iters: 71728, time: 0.526, data: 0.000) G_L1: 13.059 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 10.395 G_Regularizer: 0.000 validation_error: 20.370 +(epoch: 12, iters: 73728, time: 0.524, data: 0.000) G_L1: 12.749 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 10.337 G_Regularizer: 0.000 validation_error: 20.185 +(epoch: 12, iters: 75728, time: 0.527, data: 0.000) G_L1: 15.351 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 12.572 G_Regularizer: 0.000 validation_error: 20.555 +(epoch: 12, iters: 77728, time: 0.527, data: 0.000) G_L1: 13.469 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 11.025 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 12, iters: 79728, time: 0.530, data: 0.001) G_L1: 17.070 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 14.342 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 12, iters: 81728, time: 0.527, data: 0.000) G_L1: 17.607 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 15.244 G_Regularizer: 0.000 validation_error: 20.839 +(epoch: 12, iters: 83728, time: 0.527, data: 0.000) G_L1: 16.949 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 14.365 G_Regularizer: 0.000 validation_error: 20.403 +(epoch: 12, iters: 85728, time: 0.531, data: 0.000) G_L1: 15.851 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 13.227 G_Regularizer: 0.000 validation_error: 21.052 +(epoch: 12, iters: 87728, time: 0.527, data: 0.000) G_L1: 16.581 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 13.835 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 12, iters: 89728, time: 0.529, data: 0.000) G_L1: 13.167 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 10.461 G_Regularizer: 0.000 validation_error: 20.096 +(epoch: 12, iters: 91728, time: 0.532, data: 0.000) G_L1: 15.076 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.662 G_Regularizer: 0.000 validation_error: 21.315 +(epoch: 12, iters: 93728, time: 0.526, data: 0.000) G_L1: 17.040 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 14.394 G_Regularizer: 0.000 validation_error: 21.336 +(epoch: 12, iters: 95728, time: 0.526, data: 0.000) G_L1: 12.777 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 10.295 G_Regularizer: 0.000 validation_error: 20.283 +(epoch: 12, iters: 97728, time: 0.529, data: 0.000) G_L1: 19.675 G_L1_ABSOLUTE: 3.133 G_L1_RELATIVE: 16.542 G_Regularizer: 0.000 validation_error: 20.522 +(epoch: 12, iters: 99728, time: 0.531, data: 0.000) G_L1: 18.210 G_L1_ABSOLUTE: 3.050 G_L1_RELATIVE: 15.160 G_Regularizer: 0.000 validation_error: 21.378 +(epoch: 12, iters: 101728, time: 0.528, data: 0.000) G_L1: 13.621 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 11.121 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 12, iters: 103728, time: 0.527, data: 0.000) G_L1: 14.532 G_L1_ABSOLUTE: 2.985 G_L1_RELATIVE: 11.547 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 12, iters: 105728, time: 0.526, data: 0.000) G_L1: 16.584 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 13.958 G_Regularizer: 0.000 validation_error: 21.020 +(epoch: 12, iters: 107728, time: 0.526, data: 0.000) G_L1: 15.125 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 12.497 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 12, iters: 109728, time: 0.526, data: 0.000) G_L1: 13.830 G_L1_ABSOLUTE: 2.643 G_L1_RELATIVE: 11.187 G_Regularizer: 0.000 validation_error: 20.117 +(epoch: 12, iters: 111728, time: 0.528, data: 0.000) G_L1: 14.338 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 11.765 G_Regularizer: 0.000 validation_error: 19.777 +(epoch: 12, iters: 113728, time: 0.529, data: 0.000) G_L1: 15.024 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 12.590 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 12, iters: 115728, time: 0.523, data: 0.000) G_L1: 14.143 G_L1_ABSOLUTE: 2.887 G_L1_RELATIVE: 11.256 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 12, iters: 117728, time: 0.531, data: 0.000) G_L1: 13.189 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 10.531 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 12, iters: 119728, time: 0.527, data: 0.000) G_L1: 15.810 G_L1_ABSOLUTE: 2.939 G_L1_RELATIVE: 12.871 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 12, iters: 121728, time: 0.527, data: 0.000) G_L1: 14.398 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 12.141 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 12, iters: 123728, time: 0.536, data: 0.000) G_L1: 18.415 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 15.760 G_Regularizer: 0.000 validation_error: 20.453 +(epoch: 12, iters: 125728, time: 0.525, data: 0.000) G_L1: 16.323 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 13.530 G_Regularizer: 0.000 validation_error: 20.003 +(epoch: 12, iters: 127728, time: 0.524, data: 0.000) G_L1: 17.923 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 15.289 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 12, iters: 129728, time: 0.529, data: 0.000) G_L1: 13.750 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 10.928 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 12, iters: 131728, time: 0.523, data: 0.000) G_L1: 12.818 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 10.506 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 12, iters: 133728, time: 0.527, data: 0.000) G_L1: 18.953 G_L1_ABSOLUTE: 3.169 G_L1_RELATIVE: 15.784 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 12, iters: 135728, time: 0.528, data: 0.000) G_L1: 16.005 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 13.453 G_Regularizer: 0.000 validation_error: 20.369 +(epoch: 12, iters: 137728, time: 0.526, data: 0.000) G_L1: 14.852 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 12.188 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 12, iters: 139728, time: 0.527, data: 0.000) G_L1: 14.041 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 11.367 G_Regularizer: 0.000 validation_error: 20.450 +(epoch: 12, iters: 141728, time: 0.524, data: 0.000) G_L1: 15.628 G_L1_ABSOLUTE: 3.054 G_L1_RELATIVE: 12.574 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 12, iters: 143728, time: 0.532, data: 0.000) G_L1: 16.777 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 14.172 G_Regularizer: 0.000 validation_error: 19.954 +(epoch: 12, iters: 145728, time: 0.526, data: 0.000) G_L1: 16.162 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 13.605 G_Regularizer: 0.000 validation_error: 20.296 +(epoch: 12, iters: 147728, time: 0.531, data: 0.000) G_L1: 16.454 G_L1_ABSOLUTE: 2.851 G_L1_RELATIVE: 13.603 G_Regularizer: 0.000 validation_error: 20.313 +(epoch: 12, iters: 149728, time: 0.529, data: 0.000) G_L1: 15.254 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 12.265 G_Regularizer: 0.000 validation_error: 19.959 +(epoch: 12, iters: 151728, time: 0.529, data: 0.000) G_L1: 16.641 G_L1_ABSOLUTE: 3.272 G_L1_RELATIVE: 13.370 G_Regularizer: 0.000 validation_error: 21.006 +(epoch: 12, iters: 153728, time: 0.529, data: 0.000) G_L1: 15.600 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 12.787 G_Regularizer: 0.000 validation_error: 19.959 +(epoch: 12, iters: 155728, time: 0.528, data: 0.000) G_L1: 15.437 G_L1_ABSOLUTE: 3.343 G_L1_RELATIVE: 12.094 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 12, iters: 157728, time: 0.525, data: 0.000) G_L1: 20.543 G_L1_ABSOLUTE: 2.935 G_L1_RELATIVE: 17.608 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 12, iters: 159728, time: 0.530, data: 0.000) G_L1: 14.562 G_L1_ABSOLUTE: 2.534 G_L1_RELATIVE: 12.028 G_Regularizer: 0.000 validation_error: 20.600 +(epoch: 12, iters: 161728, time: 0.528, data: 0.000) G_L1: 12.041 G_L1_ABSOLUTE: 2.456 G_L1_RELATIVE: 9.585 G_Regularizer: 0.000 validation_error: 20.802 +(epoch: 12, iters: 163728, time: 0.529, data: 0.000) G_L1: 14.755 G_L1_ABSOLUTE: 3.024 G_L1_RELATIVE: 11.731 G_Regularizer: 0.000 validation_error: 20.393 +(epoch: 12, iters: 165728, time: 0.542, data: 0.000) G_L1: 12.314 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 9.943 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 12, iters: 167728, time: 0.578, data: 0.000) G_L1: 16.751 G_L1_ABSOLUTE: 2.829 G_L1_RELATIVE: 13.922 G_Regularizer: 0.000 validation_error: 21.163 +(epoch: 12, iters: 169728, time: 0.586, data: 0.000) G_L1: 15.225 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 12.697 G_Regularizer: 0.000 validation_error: 20.814 +(epoch: 12, iters: 171728, time: 0.577, data: 0.000) G_L1: 13.867 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 11.210 G_Regularizer: 0.000 validation_error: 20.297 +(epoch: 12, iters: 173728, time: 0.571, data: 0.000) G_L1: 15.300 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 12.475 G_Regularizer: 0.000 validation_error: 20.375 +(epoch: 12, iters: 175728, time: 0.570, data: 0.000) G_L1: 21.425 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 18.890 G_Regularizer: 0.000 validation_error: 20.098 +(epoch: 12, iters: 177728, time: 0.584, data: 0.000) G_L1: 13.583 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 10.974 G_Regularizer: 0.000 validation_error: 20.568 +(epoch: 12, iters: 179728, time: 0.580, data: 0.000) G_L1: 14.473 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 12.115 G_Regularizer: 0.000 validation_error: 20.723 +(epoch: 12, iters: 181728, time: 0.587, data: 0.000) G_L1: 16.857 G_L1_ABSOLUTE: 2.949 G_L1_RELATIVE: 13.908 G_Regularizer: 0.000 validation_error: 20.352 +(epoch: 12, iters: 183728, time: 0.586, data: 0.000) G_L1: 11.880 G_L1_ABSOLUTE: 2.180 G_L1_RELATIVE: 9.700 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 12, iters: 185728, time: 0.527, data: 0.000) G_L1: 16.344 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 13.850 G_Regularizer: 0.000 validation_error: 20.724 +(epoch: 12, iters: 187728, time: 0.526, data: 0.000) G_L1: 13.986 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 11.522 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 12, iters: 189728, time: 0.525, data: 0.000) G_L1: 17.501 G_L1_ABSOLUTE: 3.148 G_L1_RELATIVE: 14.353 G_Regularizer: 0.000 validation_error: 21.521 +(epoch: 12, iters: 191728, time: 0.529, data: 0.000) G_L1: 13.334 G_L1_ABSOLUTE: 2.828 G_L1_RELATIVE: 10.505 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 12, iters: 193728, time: 0.526, data: 0.000) G_L1: 13.050 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 10.522 G_Regularizer: 0.000 validation_error: 20.246 +(epoch: 12, iters: 195728, time: 0.527, data: 0.000) G_L1: 15.878 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 13.538 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 12, iters: 197728, time: 0.527, data: 0.000) G_L1: 14.032 G_L1_ABSOLUTE: 2.179 G_L1_RELATIVE: 11.852 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 12, iters: 199728, time: 0.528, data: 0.000) G_L1: 15.836 G_L1_ABSOLUTE: 2.838 G_L1_RELATIVE: 12.998 G_Regularizer: 0.000 validation_error: 21.012 +(epoch: 12, iters: 201728, time: 0.526, data: 0.000) G_L1: 16.718 G_L1_ABSOLUTE: 2.281 G_L1_RELATIVE: 14.438 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 12, iters: 203728, time: 0.533, data: 0.000) G_L1: 17.392 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 14.640 G_Regularizer: 0.000 validation_error: 20.563 +(epoch: 12, iters: 205728, time: 0.529, data: 0.001) G_L1: 15.247 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 12.478 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 12, iters: 207728, time: 0.529, data: 0.000) G_L1: 15.448 G_L1_ABSOLUTE: 2.872 G_L1_RELATIVE: 12.576 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 12, iters: 209728, time: 0.537, data: 0.000) G_L1: 14.785 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 12.171 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 12, iters: 211728, time: 0.524, data: 0.000) G_L1: 17.222 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 14.498 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 12, iters: 213728, time: 0.532, data: 0.000) G_L1: 14.576 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 11.980 G_Regularizer: 0.000 validation_error: 19.973 +(epoch: 12, iters: 215728, time: 0.533, data: 0.000) G_L1: 16.782 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 14.147 G_Regularizer: 0.000 validation_error: 19.712 +(epoch: 12, iters: 217728, time: 0.528, data: 0.000) G_L1: 13.155 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 10.869 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 12, iters: 219728, time: 0.528, data: 0.000) G_L1: 14.675 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 12.107 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 12, iters: 221728, time: 0.527, data: 0.000) G_L1: 15.247 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 12.463 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 12, iters: 223728, time: 0.541, data: 0.000) G_L1: 15.439 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 13.085 G_Regularizer: 0.000 validation_error: 20.293 +(epoch: 12, iters: 225728, time: 0.544, data: 0.000) G_L1: 15.914 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 13.582 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 12, iters: 227728, time: 0.547, data: 0.000) G_L1: 14.425 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 11.766 G_Regularizer: 0.000 validation_error: 21.276 +(epoch: 12, iters: 229728, time: 0.555, data: 0.000) G_L1: 15.796 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 12.919 G_Regularizer: 0.000 validation_error: 20.359 +(epoch: 12, iters: 231728, time: 0.527, data: 0.000) G_L1: 13.090 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 10.638 G_Regularizer: 0.000 validation_error: 19.841 +(epoch: 12, iters: 233728, time: 0.535, data: 0.000) G_L1: 14.464 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 11.656 G_Regularizer: 0.000 validation_error: 21.254 +(epoch: 12, iters: 235728, time: 0.554, data: 0.000) G_L1: 16.927 G_L1_ABSOLUTE: 2.803 G_L1_RELATIVE: 14.124 G_Regularizer: 0.000 validation_error: 20.020 +(epoch: 12, iters: 237728, time: 0.551, data: 0.000) G_L1: 18.946 G_L1_ABSOLUTE: 3.049 G_L1_RELATIVE: 15.897 G_Regularizer: 0.000 validation_error: 19.887 +(epoch: 12, iters: 239728, time: 0.544, data: 0.000) G_L1: 14.554 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 12.121 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 12, iters: 241728, time: 0.550, data: 0.000) G_L1: 14.955 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 12.482 G_Regularizer: 0.000 validation_error: 20.197 +(epoch: 12, iters: 243728, time: 0.556, data: 0.000) G_L1: 15.971 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 12.982 G_Regularizer: 0.000 validation_error: 20.488 +(epoch: 12, iters: 245728, time: 0.554, data: 0.000) G_L1: 17.786 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 14.951 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 12, iters: 247728, time: 0.559, data: 0.000) G_L1: 15.590 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 12.875 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 12, iters: 249728, time: 0.530, data: 0.001) G_L1: 15.817 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 13.144 G_Regularizer: 0.000 validation_error: 20.531 +(epoch: 12, iters: 251728, time: 0.545, data: 0.000) G_L1: 17.129 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 14.351 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 12, iters: 253728, time: 0.562, data: 0.000) G_L1: 15.595 G_L1_ABSOLUTE: 3.490 G_L1_RELATIVE: 12.105 G_Regularizer: 0.000 validation_error: 20.769 +(epoch: 12, iters: 255728, time: 0.554, data: 0.000) G_L1: 16.554 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 13.888 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 12, iters: 257728, time: 0.530, data: 0.000) G_L1: 14.065 G_L1_ABSOLUTE: 2.874 G_L1_RELATIVE: 11.191 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 12, iters: 259728, time: 0.559, data: 0.000) G_L1: 14.618 G_L1_ABSOLUTE: 2.899 G_L1_RELATIVE: 11.719 G_Regularizer: 0.000 validation_error: 21.422 +(epoch: 12, iters: 261728, time: 0.565, data: 0.000) G_L1: 14.307 G_L1_ABSOLUTE: 3.278 G_L1_RELATIVE: 11.029 G_Regularizer: 0.000 validation_error: 21.353 +(epoch: 12, iters: 263728, time: 0.556, data: 0.000) G_L1: 12.485 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 10.167 G_Regularizer: 0.000 validation_error: 20.369 +(epoch: 12, iters: 265728, time: 0.526, data: 0.000) G_L1: 14.852 G_L1_ABSOLUTE: 2.833 G_L1_RELATIVE: 12.019 G_Regularizer: 0.000 validation_error: 20.661 +(epoch: 12, iters: 267728, time: 0.532, data: 0.000) G_L1: 14.449 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 11.773 G_Regularizer: 0.000 validation_error: 20.399 +(epoch: 12, iters: 269728, time: 0.538, data: 0.000) G_L1: 14.787 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 12.365 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 12, iters: 271728, time: 0.550, data: 0.000) G_L1: 14.876 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 12.140 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 12, iters: 273728, time: 0.543, data: 0.001) G_L1: 14.125 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 11.704 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 12, iters: 275728, time: 0.542, data: 0.000) G_L1: 16.684 G_L1_ABSOLUTE: 3.195 G_L1_RELATIVE: 13.489 G_Regularizer: 0.000 validation_error: 20.960 +(epoch: 12, iters: 277728, time: 0.561, data: 0.000) G_L1: 16.461 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 13.737 G_Regularizer: 0.000 validation_error: 21.175 +(epoch: 12, iters: 279728, time: 0.537, data: 0.000) G_L1: 17.806 G_L1_ABSOLUTE: 2.990 G_L1_RELATIVE: 14.816 G_Regularizer: 0.000 validation_error: 21.170 +(epoch: 12, iters: 281728, time: 0.537, data: 0.000) G_L1: 14.342 G_L1_ABSOLUTE: 2.961 G_L1_RELATIVE: 11.381 G_Regularizer: 0.000 validation_error: 20.239 +(epoch: 12, iters: 283728, time: 0.532, data: 0.000) G_L1: 18.945 G_L1_ABSOLUTE: 2.579 G_L1_RELATIVE: 16.366 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 12, iters: 285728, time: 0.531, data: 0.000) G_L1: 12.938 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 10.281 G_Regularizer: 0.000 validation_error: 21.294 +(epoch: 12, iters: 287728, time: 0.547, data: 0.000) G_L1: 17.512 G_L1_ABSOLUTE: 2.974 G_L1_RELATIVE: 14.537 G_Regularizer: 0.000 validation_error: 20.894 +(epoch: 12, iters: 289728, time: 0.545, data: 0.000) G_L1: 16.963 G_L1_ABSOLUTE: 3.036 G_L1_RELATIVE: 13.927 G_Regularizer: 0.000 validation_error: 22.002 +(epoch: 12, iters: 291728, time: 0.540, data: 0.000) G_L1: 15.221 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 12.696 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 12, iters: 293728, time: 0.555, data: 0.001) G_L1: 14.782 G_L1_ABSOLUTE: 3.108 G_L1_RELATIVE: 11.674 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 12, iters: 295728, time: 0.554, data: 0.000) G_L1: 14.291 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 11.792 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 12, iters: 297728, time: 0.558, data: 0.000) G_L1: 17.145 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 14.634 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 12, iters: 299728, time: 0.549, data: 0.000) G_L1: 17.186 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 14.316 G_Regularizer: 0.000 validation_error: 20.579 +(epoch: 12, iters: 301728, time: 0.561, data: 0.000) G_L1: 13.919 G_L1_ABSOLUTE: 2.747 G_L1_RELATIVE: 11.172 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 13, iters: 976, time: 0.544, data: 0.000) G_L1: 16.858 G_L1_ABSOLUTE: 3.082 G_L1_RELATIVE: 13.777 G_Regularizer: 0.000 validation_error: 20.114 +(epoch: 13, iters: 2976, time: 0.562, data: 0.000) G_L1: 15.040 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 12.487 G_Regularizer: 0.000 validation_error: 20.042 +(epoch: 13, iters: 4976, time: 0.540, data: 0.000) G_L1: 15.250 G_L1_ABSOLUTE: 3.119 G_L1_RELATIVE: 12.131 G_Regularizer: 0.000 validation_error: 21.293 +(epoch: 13, iters: 6976, time: 0.554, data: 0.000) G_L1: 16.756 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 14.099 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 13, iters: 8976, time: 0.549, data: 0.000) G_L1: 15.237 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 12.647 G_Regularizer: 0.000 validation_error: 20.301 +(epoch: 13, iters: 10976, time: 0.550, data: 0.000) G_L1: 19.038 G_L1_ABSOLUTE: 3.167 G_L1_RELATIVE: 15.871 G_Regularizer: 0.000 validation_error: 20.470 +(epoch: 13, iters: 12976, time: 0.541, data: 0.000) G_L1: 17.155 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 14.604 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 13, iters: 14976, time: 0.549, data: 0.000) G_L1: 13.155 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 10.992 G_Regularizer: 0.000 validation_error: 20.383 +(epoch: 13, iters: 16976, time: 0.554, data: 0.000) G_L1: 12.324 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 9.879 G_Regularizer: 0.000 validation_error: 20.944 +(epoch: 13, iters: 18976, time: 0.555, data: 0.000) G_L1: 13.218 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 10.766 G_Regularizer: 0.000 validation_error: 21.233 +(epoch: 13, iters: 20976, time: 0.548, data: 0.000) G_L1: 15.478 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 12.588 G_Regularizer: 0.000 validation_error: 20.501 +(epoch: 13, iters: 22976, time: 0.556, data: 0.000) G_L1: 15.290 G_L1_ABSOLUTE: 3.475 G_L1_RELATIVE: 11.815 G_Regularizer: 0.000 validation_error: 21.195 +(epoch: 13, iters: 24976, time: 0.561, data: 0.000) G_L1: 13.541 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 10.744 G_Regularizer: 0.000 validation_error: 20.109 +(epoch: 13, iters: 26976, time: 0.558, data: 0.000) G_L1: 13.602 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 11.129 G_Regularizer: 0.000 validation_error: 20.068 +(epoch: 13, iters: 28976, time: 0.526, data: 0.000) G_L1: 17.087 G_L1_ABSOLUTE: 2.764 G_L1_RELATIVE: 14.323 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 13, iters: 30976, time: 0.538, data: 0.000) G_L1: 15.520 G_L1_ABSOLUTE: 3.129 G_L1_RELATIVE: 12.391 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 13, iters: 32976, time: 0.556, data: 0.000) G_L1: 17.732 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 14.986 G_Regularizer: 0.000 validation_error: 21.526 +(epoch: 13, iters: 34976, time: 0.554, data: 0.000) G_L1: 14.897 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 12.477 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 13, iters: 36976, time: 0.564, data: 0.000) G_L1: 16.279 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 13.509 G_Regularizer: 0.000 validation_error: 21.424 +(epoch: 13, iters: 38976, time: 0.550, data: 0.001) G_L1: 16.573 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 13.397 G_Regularizer: 0.000 validation_error: 20.671 +(epoch: 13, iters: 40976, time: 0.549, data: 0.000) G_L1: 15.057 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 12.468 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 13, iters: 42976, time: 0.556, data: 0.000) G_L1: 15.123 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 12.377 G_Regularizer: 0.000 validation_error: 20.620 +(epoch: 13, iters: 44976, time: 0.556, data: 0.000) G_L1: 16.213 G_L1_ABSOLUTE: 2.662 G_L1_RELATIVE: 13.551 G_Regularizer: 0.000 validation_error: 20.859 +(epoch: 13, iters: 46976, time: 0.538, data: 0.000) G_L1: 12.090 G_L1_ABSOLUTE: 2.532 G_L1_RELATIVE: 9.558 G_Regularizer: 0.000 validation_error: 20.435 +(epoch: 13, iters: 48976, time: 0.531, data: 0.000) G_L1: 15.107 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 12.526 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 13, iters: 50976, time: 0.547, data: 0.000) G_L1: 16.637 G_L1_ABSOLUTE: 2.963 G_L1_RELATIVE: 13.675 G_Regularizer: 0.000 validation_error: 20.904 +(epoch: 13, iters: 52976, time: 0.551, data: 0.000) G_L1: 14.541 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 12.243 G_Regularizer: 0.000 validation_error: 20.662 +(epoch: 13, iters: 54976, time: 0.544, data: 0.001) G_L1: 16.875 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 14.427 G_Regularizer: 0.000 validation_error: 20.852 +(epoch: 13, iters: 56976, time: 0.537, data: 0.000) G_L1: 18.710 G_L1_ABSOLUTE: 3.124 G_L1_RELATIVE: 15.586 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 13, iters: 58976, time: 0.552, data: 0.000) G_L1: 17.448 G_L1_ABSOLUTE: 3.101 G_L1_RELATIVE: 14.347 G_Regularizer: 0.000 validation_error: 21.337 +(epoch: 13, iters: 60976, time: 0.555, data: 0.000) G_L1: 13.407 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 10.996 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 13, iters: 62976, time: 0.550, data: 0.000) G_L1: 16.745 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 14.161 G_Regularizer: 0.000 validation_error: 20.230 +(epoch: 13, iters: 64976, time: 0.540, data: 0.000) G_L1: 14.258 G_L1_ABSOLUTE: 2.364 G_L1_RELATIVE: 11.894 G_Regularizer: 0.000 validation_error: 20.502 +(epoch: 13, iters: 66976, time: 0.530, data: 0.000) G_L1: 15.699 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 12.878 G_Regularizer: 0.000 validation_error: 20.006 +(epoch: 13, iters: 68976, time: 0.545, data: 0.000) G_L1: 13.681 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 11.241 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 13, iters: 70976, time: 0.539, data: 0.000) G_L1: 14.305 G_L1_ABSOLUTE: 2.851 G_L1_RELATIVE: 11.454 G_Regularizer: 0.000 validation_error: 20.435 +(epoch: 13, iters: 72976, time: 0.542, data: 0.000) G_L1: 13.469 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 11.050 G_Regularizer: 0.000 validation_error: 20.148 +(epoch: 13, iters: 74976, time: 0.543, data: 0.000) G_L1: 16.010 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 13.675 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 13, iters: 76976, time: 0.549, data: 0.000) G_L1: 14.021 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 11.366 G_Regularizer: 0.000 validation_error: 19.921 +(epoch: 13, iters: 78976, time: 0.554, data: 0.000) G_L1: 16.178 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 13.814 G_Regularizer: 0.000 validation_error: 20.473 +(epoch: 13, iters: 80976, time: 0.540, data: 0.000) G_L1: 14.161 G_L1_ABSOLUTE: 2.135 G_L1_RELATIVE: 12.027 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 13, iters: 82976, time: 0.539, data: 0.000) G_L1: 14.157 G_L1_ABSOLUTE: 2.843 G_L1_RELATIVE: 11.314 G_Regularizer: 0.000 validation_error: 20.412 +(epoch: 13, iters: 84976, time: 0.558, data: 0.000) G_L1: 16.261 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 13.602 G_Regularizer: 0.000 validation_error: 20.473 +(epoch: 13, iters: 86976, time: 0.555, data: 0.001) G_L1: 18.562 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 16.230 G_Regularizer: 0.000 validation_error: 21.262 +(epoch: 13, iters: 88976, time: 0.531, data: 0.000) G_L1: 13.676 G_L1_ABSOLUTE: 2.299 G_L1_RELATIVE: 11.377 G_Regularizer: 0.000 validation_error: 20.505 +(epoch: 13, iters: 90976, time: 0.552, data: 0.000) G_L1: 13.200 G_L1_ABSOLUTE: 2.987 G_L1_RELATIVE: 10.213 G_Regularizer: 0.000 validation_error: 20.622 +(epoch: 13, iters: 92976, time: 0.553, data: 0.000) G_L1: 14.376 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 11.601 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 13, iters: 94976, time: 0.554, data: 0.000) G_L1: 14.770 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 12.208 G_Regularizer: 0.000 validation_error: 20.313 +(epoch: 13, iters: 96976, time: 0.540, data: 0.000) G_L1: 15.290 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 12.922 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 13, iters: 98976, time: 0.535, data: 0.000) G_L1: 14.612 G_L1_ABSOLUTE: 2.939 G_L1_RELATIVE: 11.673 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 13, iters: 100976, time: 0.553, data: 0.000) G_L1: 17.365 G_L1_ABSOLUTE: 3.105 G_L1_RELATIVE: 14.260 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 13, iters: 102976, time: 0.551, data: 0.000) G_L1: 15.983 G_L1_ABSOLUTE: 3.114 G_L1_RELATIVE: 12.868 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 13, iters: 104976, time: 0.540, data: 0.000) G_L1: 12.562 G_L1_ABSOLUTE: 2.594 G_L1_RELATIVE: 9.968 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 13, iters: 106976, time: 0.550, data: 0.000) G_L1: 14.977 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 12.093 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 13, iters: 108976, time: 0.561, data: 0.001) G_L1: 15.194 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 12.597 G_Regularizer: 0.000 validation_error: 20.220 +(epoch: 13, iters: 110976, time: 0.556, data: 0.000) G_L1: 15.224 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 12.698 G_Regularizer: 0.000 validation_error: 20.510 +(epoch: 13, iters: 112976, time: 0.554, data: 0.000) G_L1: 12.690 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 10.277 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 13, iters: 114976, time: 0.542, data: 0.000) G_L1: 19.616 G_L1_ABSOLUTE: 3.087 G_L1_RELATIVE: 16.529 G_Regularizer: 0.000 validation_error: 21.171 +(epoch: 13, iters: 116976, time: 0.553, data: 0.001) G_L1: 12.822 G_L1_ABSOLUTE: 2.315 G_L1_RELATIVE: 10.508 G_Regularizer: 0.000 validation_error: 20.274 +(epoch: 13, iters: 118976, time: 0.563, data: 0.000) G_L1: 17.822 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 15.141 G_Regularizer: 0.000 validation_error: 22.026 +(epoch: 13, iters: 120976, time: 0.562, data: 0.000) G_L1: 13.213 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 10.664 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 13, iters: 122976, time: 0.529, data: 0.000) G_L1: 15.571 G_L1_ABSOLUTE: 3.059 G_L1_RELATIVE: 12.512 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 13, iters: 124976, time: 0.533, data: 0.000) G_L1: 18.208 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 15.800 G_Regularizer: 0.000 validation_error: 21.545 +(epoch: 13, iters: 126976, time: 0.544, data: 0.000) G_L1: 14.760 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 12.172 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 13, iters: 128976, time: 0.562, data: 0.000) G_L1: 15.249 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.738 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 13, iters: 130976, time: 0.531, data: 0.000) G_L1: 15.656 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 13.063 G_Regularizer: 0.000 validation_error: 19.508 +(epoch: 13, iters: 132976, time: 0.543, data: 0.000) G_L1: 13.916 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 11.407 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 13, iters: 134976, time: 0.555, data: 0.000) G_L1: 13.957 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 11.460 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 13, iters: 136976, time: 0.554, data: 0.000) G_L1: 15.795 G_L1_ABSOLUTE: 2.290 G_L1_RELATIVE: 13.505 G_Regularizer: 0.000 validation_error: 20.251 +(epoch: 13, iters: 138976, time: 0.547, data: 0.000) G_L1: 14.870 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 12.327 G_Regularizer: 0.000 validation_error: 20.366 +(epoch: 13, iters: 140976, time: 0.550, data: 0.000) G_L1: 18.950 G_L1_ABSOLUTE: 3.091 G_L1_RELATIVE: 15.859 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 13, iters: 142976, time: 0.561, data: 0.000) G_L1: 15.685 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 13.168 G_Regularizer: 0.000 validation_error: 20.146 +(epoch: 13, iters: 144976, time: 0.552, data: 0.000) G_L1: 12.027 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 9.689 G_Regularizer: 0.000 validation_error: 20.240 +(epoch: 13, iters: 146976, time: 0.542, data: 0.000) G_L1: 15.488 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 12.819 G_Regularizer: 0.000 validation_error: 20.413 +(epoch: 13, iters: 148976, time: 0.560, data: 0.001) G_L1: 17.268 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 14.312 G_Regularizer: 0.000 validation_error: 20.271 +(epoch: 13, iters: 150976, time: 0.558, data: 0.000) G_L1: 16.290 G_L1_ABSOLUTE: 2.804 G_L1_RELATIVE: 13.486 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 13, iters: 152976, time: 0.563, data: 0.000) G_L1: 15.585 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 12.657 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 13, iters: 154976, time: 0.548, data: 0.000) G_L1: 15.288 G_L1_ABSOLUTE: 3.083 G_L1_RELATIVE: 12.205 G_Regularizer: 0.000 validation_error: 21.066 +(epoch: 13, iters: 156976, time: 0.535, data: 0.000) G_L1: 15.031 G_L1_ABSOLUTE: 2.307 G_L1_RELATIVE: 12.724 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 13, iters: 158976, time: 0.560, data: 0.000) G_L1: 13.922 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 11.220 G_Regularizer: 0.000 validation_error: 20.862 +(epoch: 13, iters: 160976, time: 0.558, data: 0.000) G_L1: 14.386 G_L1_ABSOLUTE: 2.712 G_L1_RELATIVE: 11.674 G_Regularizer: 0.000 validation_error: 20.471 +(epoch: 13, iters: 162976, time: 0.553, data: 0.000) G_L1: 15.780 G_L1_ABSOLUTE: 2.932 G_L1_RELATIVE: 12.849 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 13, iters: 164976, time: 0.541, data: 0.000) G_L1: 17.217 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 14.186 G_Regularizer: 0.000 validation_error: 21.197 +(epoch: 13, iters: 166976, time: 0.532, data: 0.000) G_L1: 14.763 G_L1_ABSOLUTE: 2.820 G_L1_RELATIVE: 11.943 G_Regularizer: 0.000 validation_error: 20.441 +(epoch: 13, iters: 168976, time: 0.542, data: 0.000) G_L1: 12.840 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 10.523 G_Regularizer: 0.000 validation_error: 21.131 +(epoch: 13, iters: 170976, time: 0.532, data: 0.000) G_L1: 12.160 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 9.483 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 13, iters: 172976, time: 0.527, data: 0.000) G_L1: 15.002 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 12.044 G_Regularizer: 0.000 validation_error: 20.702 +(epoch: 13, iters: 174976, time: 0.544, data: 0.000) G_L1: 17.234 G_L1_ABSOLUTE: 2.703 G_L1_RELATIVE: 14.532 G_Regularizer: 0.000 validation_error: 20.904 +(epoch: 13, iters: 176976, time: 0.553, data: 0.000) G_L1: 15.465 G_L1_ABSOLUTE: 3.074 G_L1_RELATIVE: 12.391 G_Regularizer: 0.000 validation_error: 20.205 +(epoch: 13, iters: 178976, time: 0.558, data: 0.000) G_L1: 20.354 G_L1_ABSOLUTE: 3.295 G_L1_RELATIVE: 17.059 G_Regularizer: 0.000 validation_error: 20.416 +(epoch: 13, iters: 180976, time: 0.540, data: 0.000) G_L1: 13.678 G_L1_ABSOLUTE: 2.213 G_L1_RELATIVE: 11.465 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 13, iters: 182976, time: 0.548, data: 0.000) G_L1: 14.003 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 11.377 G_Regularizer: 0.000 validation_error: 20.396 +(epoch: 13, iters: 184976, time: 0.560, data: 0.000) G_L1: 13.682 G_L1_ABSOLUTE: 2.300 G_L1_RELATIVE: 11.382 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 13, iters: 186976, time: 0.558, data: 0.000) G_L1: 15.921 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 13.459 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 13, iters: 188976, time: 0.533, data: 0.000) G_L1: 14.549 G_L1_ABSOLUTE: 3.106 G_L1_RELATIVE: 11.443 G_Regularizer: 0.000 validation_error: 21.312 +(epoch: 13, iters: 190976, time: 0.549, data: 0.000) G_L1: 19.071 G_L1_ABSOLUTE: 3.293 G_L1_RELATIVE: 15.778 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 13, iters: 192976, time: 0.558, data: 0.000) G_L1: 12.730 G_L1_ABSOLUTE: 2.217 G_L1_RELATIVE: 10.513 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 13, iters: 194976, time: 0.563, data: 0.000) G_L1: 15.409 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 13.040 G_Regularizer: 0.000 validation_error: 20.395 +(epoch: 13, iters: 196976, time: 0.551, data: 0.000) G_L1: 11.787 G_L1_ABSOLUTE: 2.161 G_L1_RELATIVE: 9.626 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 13, iters: 198976, time: 0.554, data: 0.000) G_L1: 15.777 G_L1_ABSOLUTE: 3.023 G_L1_RELATIVE: 12.754 G_Regularizer: 0.000 validation_error: 20.496 +(epoch: 13, iters: 200976, time: 0.558, data: 0.000) G_L1: 15.247 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 12.430 G_Regularizer: 0.000 validation_error: 21.662 +(epoch: 13, iters: 202976, time: 0.555, data: 0.000) G_L1: 19.820 G_L1_ABSOLUTE: 3.537 G_L1_RELATIVE: 16.283 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 13, iters: 204976, time: 0.552, data: 0.001) G_L1: 16.276 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 13.570 G_Regularizer: 0.000 validation_error: 20.453 +(epoch: 13, iters: 206976, time: 0.535, data: 0.000) G_L1: 14.404 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 11.974 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 13, iters: 208976, time: 0.528, data: 0.000) G_L1: 15.424 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 12.644 G_Regularizer: 0.000 validation_error: 20.246 +(epoch: 13, iters: 210976, time: 0.537, data: 0.001) G_L1: 13.346 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 10.489 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 13, iters: 212976, time: 0.559, data: 0.000) G_L1: 12.006 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 9.497 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 13, iters: 214976, time: 0.530, data: 0.000) G_L1: 14.671 G_L1_ABSOLUTE: 2.196 G_L1_RELATIVE: 12.475 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 13, iters: 216976, time: 0.553, data: 0.000) G_L1: 17.917 G_L1_ABSOLUTE: 3.110 G_L1_RELATIVE: 14.807 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 13, iters: 218976, time: 0.547, data: 0.000) G_L1: 15.271 G_L1_ABSOLUTE: 3.704 G_L1_RELATIVE: 11.568 G_Regularizer: 0.000 validation_error: 20.354 +(epoch: 13, iters: 220976, time: 0.561, data: 0.000) G_L1: 14.334 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 11.999 G_Regularizer: 0.000 validation_error: 21.103 +(epoch: 13, iters: 222976, time: 0.535, data: 0.000) G_L1: 15.187 G_L1_ABSOLUTE: 3.125 G_L1_RELATIVE: 12.062 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 13, iters: 224976, time: 0.559, data: 0.000) G_L1: 16.925 G_L1_ABSOLUTE: 3.393 G_L1_RELATIVE: 13.532 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 13, iters: 226976, time: 0.553, data: 0.000) G_L1: 15.152 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 12.443 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 13, iters: 228976, time: 0.551, data: 0.000) G_L1: 14.851 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 12.040 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 13, iters: 230976, time: 0.540, data: 0.000) G_L1: 19.207 G_L1_ABSOLUTE: 3.127 G_L1_RELATIVE: 16.080 G_Regularizer: 0.000 validation_error: 20.479 +(epoch: 13, iters: 232976, time: 0.544, data: 0.000) G_L1: 15.016 G_L1_ABSOLUTE: 2.878 G_L1_RELATIVE: 12.138 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 13, iters: 234976, time: 0.552, data: 0.001) G_L1: 14.377 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 11.543 G_Regularizer: 0.000 validation_error: 21.381 +(epoch: 13, iters: 236976, time: 0.552, data: 0.000) G_L1: 14.846 G_L1_ABSOLUTE: 2.729 G_L1_RELATIVE: 12.117 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 13, iters: 238976, time: 0.559, data: 0.000) G_L1: 15.752 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 13.303 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 13, iters: 240976, time: 0.552, data: 0.000) G_L1: 16.469 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 13.438 G_Regularizer: 0.000 validation_error: 20.025 +(epoch: 13, iters: 242976, time: 0.550, data: 0.000) G_L1: 15.704 G_L1_ABSOLUTE: 2.309 G_L1_RELATIVE: 13.395 G_Regularizer: 0.000 validation_error: 20.190 +(epoch: 13, iters: 244976, time: 0.555, data: 0.000) G_L1: 14.215 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 11.757 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 13, iters: 246976, time: 0.560, data: 0.000) G_L1: 14.174 G_L1_ABSOLUTE: 3.193 G_L1_RELATIVE: 10.982 G_Regularizer: 0.000 validation_error: 20.322 +(epoch: 13, iters: 248976, time: 0.542, data: 0.000) G_L1: 14.630 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 11.641 G_Regularizer: 0.000 validation_error: 20.240 +(epoch: 13, iters: 250976, time: 0.547, data: 0.001) G_L1: 16.934 G_L1_ABSOLUTE: 2.794 G_L1_RELATIVE: 14.139 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 13, iters: 252976, time: 0.555, data: 0.000) G_L1: 17.732 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 15.401 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 13, iters: 254976, time: 0.565, data: 0.000) G_L1: 16.282 G_L1_ABSOLUTE: 2.964 G_L1_RELATIVE: 13.317 G_Regularizer: 0.000 validation_error: 20.568 +(epoch: 13, iters: 256976, time: 0.535, data: 0.000) G_L1: 15.360 G_L1_ABSOLUTE: 3.134 G_L1_RELATIVE: 12.226 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 13, iters: 258976, time: 0.531, data: 0.000) G_L1: 16.790 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 14.153 G_Regularizer: 0.000 validation_error: 20.742 +(epoch: 13, iters: 260976, time: 0.558, data: 0.001) G_L1: 13.683 G_L1_ABSOLUTE: 2.579 G_L1_RELATIVE: 11.105 G_Regularizer: 0.000 validation_error: 20.139 +(epoch: 13, iters: 262976, time: 0.557, data: 0.000) G_L1: 17.135 G_L1_ABSOLUTE: 2.128 G_L1_RELATIVE: 15.007 G_Regularizer: 0.000 validation_error: 20.347 +(epoch: 13, iters: 264976, time: 0.543, data: 0.000) G_L1: 18.879 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 16.203 G_Regularizer: 0.000 validation_error: 21.254 +(epoch: 13, iters: 266976, time: 0.534, data: 0.000) G_L1: 14.312 G_L1_ABSOLUTE: 2.873 G_L1_RELATIVE: 11.438 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 13, iters: 268976, time: 0.547, data: 0.001) G_L1: 17.528 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 14.792 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 13, iters: 270976, time: 0.557, data: 0.000) G_L1: 15.771 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 13.170 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 13, iters: 272976, time: 0.548, data: 0.000) G_L1: 17.449 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 14.829 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 13, iters: 274976, time: 0.552, data: 0.000) G_L1: 16.237 G_L1_ABSOLUTE: 2.641 G_L1_RELATIVE: 13.596 G_Regularizer: 0.000 validation_error: 20.163 +(epoch: 13, iters: 276976, time: 0.562, data: 0.000) G_L1: 16.604 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 13.963 G_Regularizer: 0.000 validation_error: 20.237 +(epoch: 13, iters: 278976, time: 0.549, data: 0.000) G_L1: 16.955 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 14.341 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 13, iters: 280976, time: 0.542, data: 0.000) G_L1: 16.656 G_L1_ABSOLUTE: 2.816 G_L1_RELATIVE: 13.839 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 13, iters: 282976, time: 0.533, data: 0.000) G_L1: 15.946 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 13.389 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 13, iters: 284976, time: 0.557, data: 0.000) G_L1: 16.758 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 14.129 G_Regularizer: 0.000 validation_error: 20.746 +(epoch: 13, iters: 286976, time: 0.549, data: 0.000) G_L1: 15.261 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 12.630 G_Regularizer: 0.000 validation_error: 21.089 +(epoch: 13, iters: 288976, time: 0.563, data: 0.000) G_L1: 13.659 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 13, iters: 290976, time: 0.531, data: 0.000) G_L1: 13.772 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.171 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 13, iters: 292976, time: 0.549, data: 0.000) G_L1: 12.840 G_L1_ABSOLUTE: 2.326 G_L1_RELATIVE: 10.514 G_Regularizer: 0.000 validation_error: 20.628 +(epoch: 13, iters: 294976, time: 0.554, data: 0.000) G_L1: 15.227 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 12.543 G_Regularizer: 0.000 validation_error: 21.275 +(epoch: 13, iters: 296976, time: 0.553, data: 0.000) G_L1: 13.760 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 11.132 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 13, iters: 298976, time: 0.532, data: 0.001) G_L1: 13.696 G_L1_ABSOLUTE: 2.295 G_L1_RELATIVE: 11.401 G_Regularizer: 0.000 validation_error: 20.447 +(epoch: 13, iters: 300976, time: 0.538, data: 0.000) G_L1: 17.344 G_L1_ABSOLUTE: 3.016 G_L1_RELATIVE: 14.327 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 14, iters: 224, time: 0.532, data: 0.000) G_L1: 13.684 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 11.173 G_Regularizer: 0.000 validation_error: 20.417 +(epoch: 14, iters: 2224, time: 0.561, data: 0.000) G_L1: 14.627 G_L1_ABSOLUTE: 2.888 G_L1_RELATIVE: 11.739 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 14, iters: 4224, time: 0.542, data: 0.000) G_L1: 13.497 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 11.154 G_Regularizer: 0.000 validation_error: 20.458 +(epoch: 14, iters: 6224, time: 0.535, data: 0.000) G_L1: 13.816 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 11.157 G_Regularizer: 0.000 validation_error: 20.100 +(epoch: 14, iters: 8224, time: 0.558, data: 0.000) G_L1: 15.192 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 12.542 G_Regularizer: 0.000 validation_error: 21.564 +(epoch: 14, iters: 10224, time: 0.551, data: 0.000) G_L1: 15.258 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 12.640 G_Regularizer: 0.000 validation_error: 20.359 +(epoch: 14, iters: 12224, time: 0.551, data: 0.000) G_L1: 14.358 G_L1_ABSOLUTE: 2.842 G_L1_RELATIVE: 11.516 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 14, iters: 14224, time: 0.536, data: 0.000) G_L1: 13.070 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 10.595 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 14, iters: 16224, time: 0.561, data: 0.001) G_L1: 16.479 G_L1_ABSOLUTE: 2.830 G_L1_RELATIVE: 13.649 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 14, iters: 18224, time: 0.565, data: 0.000) G_L1: 16.760 G_L1_ABSOLUTE: 3.008 G_L1_RELATIVE: 13.753 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 14, iters: 20224, time: 0.548, data: 0.000) G_L1: 18.038 G_L1_ABSOLUTE: 3.077 G_L1_RELATIVE: 14.961 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 14, iters: 22224, time: 0.543, data: 0.000) G_L1: 15.946 G_L1_ABSOLUTE: 2.244 G_L1_RELATIVE: 13.703 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 14, iters: 24224, time: 0.554, data: 0.001) G_L1: 15.020 G_L1_ABSOLUTE: 2.939 G_L1_RELATIVE: 12.081 G_Regularizer: 0.000 validation_error: 21.037 +(epoch: 14, iters: 26224, time: 0.534, data: 0.000) G_L1: 14.694 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 12.000 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 14, iters: 28224, time: 0.561, data: 0.000) G_L1: 19.784 G_L1_ABSOLUTE: 3.016 G_L1_RELATIVE: 16.768 G_Regularizer: 0.000 validation_error: 20.063 +(epoch: 14, iters: 30224, time: 0.530, data: 0.000) G_L1: 14.794 G_L1_ABSOLUTE: 2.327 G_L1_RELATIVE: 12.467 G_Regularizer: 0.000 validation_error: 20.854 +(epoch: 14, iters: 32224, time: 0.549, data: 0.000) G_L1: 15.858 G_L1_ABSOLUTE: 2.976 G_L1_RELATIVE: 12.881 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 14, iters: 34224, time: 0.556, data: 0.000) G_L1: 14.560 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 11.745 G_Regularizer: 0.000 validation_error: 20.597 +(epoch: 14, iters: 36224, time: 0.559, data: 0.000) G_L1: 14.073 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 11.628 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 14, iters: 38224, time: 0.541, data: 0.000) G_L1: 18.371 G_L1_ABSOLUTE: 3.390 G_L1_RELATIVE: 14.981 G_Regularizer: 0.000 validation_error: 20.252 +(epoch: 14, iters: 40224, time: 0.562, data: 0.000) G_L1: 16.922 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 14.143 G_Regularizer: 0.000 validation_error: 21.084 +(epoch: 14, iters: 42224, time: 0.557, data: 0.000) G_L1: 16.187 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 13.634 G_Regularizer: 0.000 validation_error: 20.217 +(epoch: 14, iters: 44224, time: 0.560, data: 0.000) G_L1: 14.905 G_L1_ABSOLUTE: 3.128 G_L1_RELATIVE: 11.777 G_Regularizer: 0.000 validation_error: 21.246 +(epoch: 14, iters: 46224, time: 0.537, data: 0.000) G_L1: 30.416 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 27.830 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 14, iters: 48224, time: 0.558, data: 0.000) G_L1: 14.523 G_L1_ABSOLUTE: 2.675 G_L1_RELATIVE: 11.848 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 14, iters: 50224, time: 0.556, data: 0.000) G_L1: 13.315 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 10.974 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 14, iters: 52224, time: 0.559, data: 0.000) G_L1: 15.785 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 13.360 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 14, iters: 54224, time: 0.544, data: 0.000) G_L1: 17.110 G_L1_ABSOLUTE: 3.062 G_L1_RELATIVE: 14.048 G_Regularizer: 0.000 validation_error: 21.070 +(epoch: 14, iters: 56224, time: 0.552, data: 0.000) G_L1: 14.542 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.032 G_Regularizer: 0.000 validation_error: 21.195 +(epoch: 14, iters: 58224, time: 0.562, data: 0.000) G_L1: 15.618 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 12.885 G_Regularizer: 0.000 validation_error: 21.401 +(epoch: 14, iters: 60224, time: 0.552, data: 0.000) G_L1: 14.941 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 12.131 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 14, iters: 62224, time: 0.549, data: 0.001) G_L1: 15.085 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 12.516 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 14, iters: 64224, time: 0.532, data: 0.000) G_L1: 12.951 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 10.490 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 14, iters: 66224, time: 0.546, data: 0.000) G_L1: 15.685 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 13.121 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 14, iters: 68224, time: 0.557, data: 0.000) G_L1: 14.376 G_L1_ABSOLUTE: 2.423 G_L1_RELATIVE: 11.954 G_Regularizer: 0.000 validation_error: 21.306 +(epoch: 14, iters: 70224, time: 0.567, data: 0.000) G_L1: 14.034 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 11.328 G_Regularizer: 0.000 validation_error: 20.702 +(epoch: 14, iters: 72224, time: 0.528, data: 0.000) G_L1: 12.247 G_L1_ABSOLUTE: 2.203 G_L1_RELATIVE: 10.043 G_Regularizer: 0.000 validation_error: 20.385 +(epoch: 14, iters: 74224, time: 0.531, data: 0.000) G_L1: 15.744 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 12.923 G_Regularizer: 0.000 validation_error: 21.137 +(epoch: 14, iters: 76224, time: 0.554, data: 0.000) G_L1: 16.642 G_L1_ABSOLUTE: 3.066 G_L1_RELATIVE: 13.575 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 14, iters: 78224, time: 0.558, data: 0.000) G_L1: 15.446 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 12.712 G_Regularizer: 0.000 validation_error: 21.316 +(epoch: 14, iters: 80224, time: 0.533, data: 0.000) G_L1: 11.685 G_L1_ABSOLUTE: 2.253 G_L1_RELATIVE: 9.432 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 14, iters: 82224, time: 0.536, data: 0.000) G_L1: 14.890 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 12.479 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 14, iters: 84224, time: 0.548, data: 0.000) G_L1: 13.770 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 11.377 G_Regularizer: 0.000 validation_error: 20.631 +(epoch: 14, iters: 86224, time: 0.556, data: 0.000) G_L1: 17.046 G_L1_ABSOLUTE: 3.220 G_L1_RELATIVE: 13.827 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 14, iters: 88224, time: 0.541, data: 0.000) G_L1: 14.803 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 12.362 G_Regularizer: 0.000 validation_error: 21.492 +(epoch: 14, iters: 90224, time: 0.549, data: 0.000) G_L1: 16.064 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 13.283 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 14, iters: 92224, time: 0.545, data: 0.000) G_L1: 15.302 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 12.860 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 14, iters: 94224, time: 0.535, data: 0.000) G_L1: 15.026 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 12.252 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 14, iters: 96224, time: 0.550, data: 0.000) G_L1: 16.932 G_L1_ABSOLUTE: 2.935 G_L1_RELATIVE: 13.996 G_Regularizer: 0.000 validation_error: 20.729 +(epoch: 14, iters: 98224, time: 0.541, data: 0.000) G_L1: 16.517 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 14.254 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 14, iters: 100224, time: 0.558, data: 0.000) G_L1: 14.135 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 11.182 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 14, iters: 102224, time: 0.554, data: 0.001) G_L1: 14.996 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 12.174 G_Regularizer: 0.000 validation_error: 19.977 +(epoch: 14, iters: 104224, time: 0.562, data: 0.000) G_L1: 14.915 G_L1_ABSOLUTE: 2.490 G_L1_RELATIVE: 12.426 G_Regularizer: 0.000 validation_error: 20.161 +(epoch: 14, iters: 106224, time: 0.532, data: 0.000) G_L1: 16.644 G_L1_ABSOLUTE: 2.306 G_L1_RELATIVE: 14.338 G_Regularizer: 0.000 validation_error: 20.285 +(epoch: 14, iters: 108224, time: 0.528, data: 0.000) G_L1: 15.151 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 12.580 G_Regularizer: 0.000 validation_error: 20.085 +(epoch: 14, iters: 110224, time: 0.546, data: 0.000) G_L1: 14.887 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 12.104 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 14, iters: 112224, time: 0.547, data: 0.001) G_L1: 15.276 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 12.800 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 14, iters: 114224, time: 0.535, data: 0.000) G_L1: 13.229 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 10.711 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 14, iters: 116224, time: 0.559, data: 0.000) G_L1: 15.714 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 13.221 G_Regularizer: 0.000 validation_error: 20.663 +(epoch: 14, iters: 118224, time: 0.560, data: 0.000) G_L1: 16.992 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 14.603 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 14, iters: 120224, time: 0.556, data: 0.000) G_L1: 15.779 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 13.171 G_Regularizer: 0.000 validation_error: 20.491 +(epoch: 14, iters: 122224, time: 0.533, data: 0.001) G_L1: 14.952 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 12.320 G_Regularizer: 0.000 validation_error: 20.212 +(epoch: 14, iters: 124224, time: 0.552, data: 0.000) G_L1: 22.967 G_L1_ABSOLUTE: 2.943 G_L1_RELATIVE: 20.024 G_Regularizer: 0.000 validation_error: 21.244 +(epoch: 14, iters: 126224, time: 0.563, data: 0.000) G_L1: 18.744 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 16.011 G_Regularizer: 0.000 validation_error: 20.537 +(epoch: 14, iters: 128224, time: 0.551, data: 0.001) G_L1: 15.218 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 12.627 G_Regularizer: 0.000 validation_error: 20.076 +(epoch: 14, iters: 130224, time: 0.549, data: 0.001) G_L1: 14.472 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 11.766 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 14, iters: 132224, time: 0.551, data: 0.000) G_L1: 16.892 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 14.324 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 14, iters: 134224, time: 0.537, data: 0.000) G_L1: 16.180 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 13.356 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 14, iters: 136224, time: 0.560, data: 0.000) G_L1: 12.685 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 10.114 G_Regularizer: 0.000 validation_error: 20.090 +(epoch: 14, iters: 138224, time: 0.547, data: 0.000) G_L1: 18.553 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 16.033 G_Regularizer: 0.000 validation_error: 19.844 +(epoch: 14, iters: 140224, time: 0.533, data: 0.000) G_L1: 13.461 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 10.940 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 14, iters: 142224, time: 0.553, data: 0.000) G_L1: 14.492 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 12.032 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 14, iters: 144224, time: 0.561, data: 0.000) G_L1: 15.609 G_L1_ABSOLUTE: 2.818 G_L1_RELATIVE: 12.791 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 14, iters: 146224, time: 0.556, data: 0.000) G_L1: 30.928 G_L1_ABSOLUTE: 2.967 G_L1_RELATIVE: 27.962 G_Regularizer: 0.000 validation_error: 21.021 +(epoch: 14, iters: 148224, time: 0.555, data: 0.000) G_L1: 12.491 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 10.093 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 14, iters: 150224, time: 0.559, data: 0.000) G_L1: 15.139 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 12.656 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 14, iters: 152224, time: 0.560, data: 0.000) G_L1: 16.724 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 14.177 G_Regularizer: 0.000 validation_error: 20.225 +(epoch: 14, iters: 154224, time: 0.553, data: 0.000) G_L1: 15.024 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 12.746 G_Regularizer: 0.000 validation_error: 21.184 +(epoch: 14, iters: 156224, time: 0.534, data: 0.000) G_L1: 16.824 G_L1_ABSOLUTE: 2.901 G_L1_RELATIVE: 13.923 G_Regularizer: 0.000 validation_error: 20.680 +(epoch: 14, iters: 158224, time: 0.541, data: 0.000) G_L1: 12.779 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 10.297 G_Regularizer: 0.000 validation_error: 20.394 +(epoch: 14, iters: 160224, time: 0.535, data: 0.000) G_L1: 14.315 G_L1_ABSOLUTE: 3.194 G_L1_RELATIVE: 11.121 G_Regularizer: 0.000 validation_error: 21.799 +(epoch: 14, iters: 162224, time: 0.555, data: 0.000) G_L1: 14.730 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 12.247 G_Regularizer: 0.000 validation_error: 20.036 +(epoch: 14, iters: 164224, time: 0.541, data: 0.000) G_L1: 16.525 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 13.718 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 14, iters: 166224, time: 0.556, data: 0.000) G_L1: 12.857 G_L1_ABSOLUTE: 3.065 G_L1_RELATIVE: 9.792 G_Regularizer: 0.000 validation_error: 20.506 +(epoch: 14, iters: 168224, time: 0.533, data: 0.000) G_L1: 16.778 G_L1_ABSOLUTE: 2.534 G_L1_RELATIVE: 14.244 G_Regularizer: 0.000 validation_error: 20.100 +(epoch: 14, iters: 170224, time: 0.533, data: 0.000) G_L1: 14.420 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 11.606 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 14, iters: 172224, time: 0.539, data: 0.000) G_L1: 17.305 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 14.570 G_Regularizer: 0.000 validation_error: 21.149 +(epoch: 14, iters: 174224, time: 0.550, data: 0.000) G_L1: 15.405 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 12.844 G_Regularizer: 0.000 validation_error: 21.260 +(epoch: 14, iters: 176224, time: 0.548, data: 0.000) G_L1: 13.658 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 11.056 G_Regularizer: 0.000 validation_error: 21.290 +(epoch: 14, iters: 178224, time: 0.562, data: 0.000) G_L1: 13.634 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 11.277 G_Regularizer: 0.000 validation_error: 20.202 +(epoch: 14, iters: 180224, time: 0.543, data: 0.000) G_L1: 13.252 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 10.824 G_Regularizer: 0.000 validation_error: 20.100 +(epoch: 14, iters: 182224, time: 0.547, data: 0.000) G_L1: 15.642 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 12.908 G_Regularizer: 0.000 validation_error: 20.310 +(epoch: 14, iters: 184224, time: 0.557, data: 0.000) G_L1: 13.468 G_L1_ABSOLUTE: 2.949 G_L1_RELATIVE: 10.518 G_Regularizer: 0.000 validation_error: 21.233 +(epoch: 14, iters: 186224, time: 0.542, data: 0.001) G_L1: 16.542 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 13.716 G_Regularizer: 0.000 validation_error: 21.418 +(epoch: 14, iters: 188224, time: 0.557, data: 0.000) G_L1: 17.850 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 15.033 G_Regularizer: 0.000 validation_error: 20.894 +(epoch: 14, iters: 190224, time: 0.531, data: 0.000) G_L1: 12.831 G_L1_ABSOLUTE: 2.313 G_L1_RELATIVE: 10.518 G_Regularizer: 0.000 validation_error: 21.340 +(epoch: 14, iters: 192224, time: 0.557, data: 0.000) G_L1: 15.403 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 12.719 G_Regularizer: 0.000 validation_error: 20.593 +(epoch: 14, iters: 194224, time: 0.560, data: 0.000) G_L1: 17.601 G_L1_ABSOLUTE: 2.794 G_L1_RELATIVE: 14.807 G_Regularizer: 0.000 validation_error: 20.466 +(epoch: 14, iters: 196224, time: 0.556, data: 0.000) G_L1: 15.566 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 12.804 G_Regularizer: 0.000 validation_error: 20.077 +(epoch: 14, iters: 198224, time: 0.530, data: 0.000) G_L1: 15.816 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 13.241 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 14, iters: 200224, time: 0.551, data: 0.000) G_L1: 15.988 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 13.418 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 14, iters: 202224, time: 0.544, data: 0.000) G_L1: 16.594 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 13.656 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 14, iters: 204224, time: 0.533, data: 0.001) G_L1: 15.443 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 12.636 G_Regularizer: 0.000 validation_error: 21.076 +(epoch: 14, iters: 206224, time: 0.540, data: 0.000) G_L1: 14.741 G_L1_ABSOLUTE: 2.751 G_L1_RELATIVE: 11.991 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 14, iters: 208224, time: 0.531, data: 0.000) G_L1: 16.643 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 13.763 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 14, iters: 210224, time: 0.534, data: 0.000) G_L1: 13.667 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 10.983 G_Regularizer: 0.000 validation_error: 20.226 +(epoch: 14, iters: 212224, time: 0.548, data: 0.000) G_L1: 13.905 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 11.398 G_Regularizer: 0.000 validation_error: 20.373 +(epoch: 14, iters: 214224, time: 0.553, data: 0.000) G_L1: 13.585 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 11.211 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 14, iters: 216224, time: 0.547, data: 0.000) G_L1: 15.356 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.955 G_Regularizer: 0.000 validation_error: 21.687 +(epoch: 14, iters: 218224, time: 0.556, data: 0.000) G_L1: 14.291 G_L1_ABSOLUTE: 2.376 G_L1_RELATIVE: 11.915 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 14, iters: 220224, time: 0.549, data: 0.000) G_L1: 17.474 G_L1_ABSOLUTE: 3.008 G_L1_RELATIVE: 14.466 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 14, iters: 222224, time: 0.548, data: 0.000) G_L1: 14.063 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 11.413 G_Regularizer: 0.000 validation_error: 20.204 +(epoch: 14, iters: 224224, time: 0.532, data: 0.000) G_L1: 13.770 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 11.423 G_Regularizer: 0.000 validation_error: 21.275 +(epoch: 14, iters: 226224, time: 0.565, data: 0.000) G_L1: 18.916 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 16.296 G_Regularizer: 0.000 validation_error: 20.632 +(epoch: 14, iters: 228224, time: 0.562, data: 0.000) G_L1: 16.020 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 13.106 G_Regularizer: 0.000 validation_error: 20.003 +(epoch: 14, iters: 230224, time: 0.542, data: 0.000) G_L1: 14.386 G_L1_ABSOLUTE: 3.006 G_L1_RELATIVE: 11.380 G_Regularizer: 0.000 validation_error: 20.365 +(epoch: 14, iters: 232224, time: 0.537, data: 0.000) G_L1: 17.274 G_L1_ABSOLUTE: 3.228 G_L1_RELATIVE: 14.045 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 14, iters: 234224, time: 0.551, data: 0.000) G_L1: 14.806 G_L1_ABSOLUTE: 2.438 G_L1_RELATIVE: 12.368 G_Regularizer: 0.000 validation_error: 20.425 +(epoch: 14, iters: 236224, time: 0.537, data: 0.000) G_L1: 13.294 G_L1_ABSOLUTE: 2.250 G_L1_RELATIVE: 11.044 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 14, iters: 238224, time: 0.533, data: 0.000) G_L1: 14.870 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 12.065 G_Regularizer: 0.000 validation_error: 20.802 +(epoch: 14, iters: 240224, time: 0.529, data: 0.000) G_L1: 15.175 G_L1_ABSOLUTE: 2.376 G_L1_RELATIVE: 12.799 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 14, iters: 242224, time: 0.560, data: 0.000) G_L1: 11.894 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 9.488 G_Regularizer: 0.000 validation_error: 21.107 +(epoch: 14, iters: 244224, time: 0.557, data: 0.000) G_L1: 15.985 G_L1_ABSOLUTE: 2.472 G_L1_RELATIVE: 13.513 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 14, iters: 246224, time: 0.558, data: 0.000) G_L1: 14.717 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 14, iters: 248224, time: 0.544, data: 0.000) G_L1: 12.281 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 10.032 G_Regularizer: 0.000 validation_error: 20.250 +(epoch: 14, iters: 250224, time: 0.537, data: 0.000) G_L1: 16.302 G_L1_ABSOLUTE: 3.177 G_L1_RELATIVE: 13.124 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 14, iters: 252224, time: 0.548, data: 0.000) G_L1: 14.173 G_L1_ABSOLUTE: 3.021 G_L1_RELATIVE: 11.152 G_Regularizer: 0.000 validation_error: 20.010 +(epoch: 14, iters: 254224, time: 0.559, data: 0.000) G_L1: 15.591 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 13.098 G_Regularizer: 0.000 validation_error: 20.111 +(epoch: 14, iters: 256224, time: 0.554, data: 0.000) G_L1: 14.896 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 12.298 G_Regularizer: 0.000 validation_error: 20.202 +(epoch: 14, iters: 258224, time: 0.530, data: 0.000) G_L1: 15.855 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 13.249 G_Regularizer: 0.000 validation_error: 20.258 +(epoch: 14, iters: 260224, time: 0.547, data: 0.000) G_L1: 16.619 G_L1_ABSOLUTE: 2.965 G_L1_RELATIVE: 13.654 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 14, iters: 262224, time: 0.551, data: 0.000) G_L1: 14.176 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 11.462 G_Regularizer: 0.000 validation_error: 20.537 +(epoch: 14, iters: 264224, time: 0.545, data: 0.000) G_L1: 15.293 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 12.523 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 14, iters: 266224, time: 0.535, data: 0.000) G_L1: 16.441 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 13.791 G_Regularizer: 0.000 validation_error: 21.230 +(epoch: 14, iters: 268224, time: 0.558, data: 0.000) G_L1: 16.647 G_L1_ABSOLUTE: 3.582 G_L1_RELATIVE: 13.064 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 14, iters: 270224, time: 0.524, data: 0.000) G_L1: 14.827 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 12.060 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 14, iters: 272224, time: 0.554, data: 0.000) G_L1: 14.202 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 11.345 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 14, iters: 274224, time: 0.534, data: 0.001) G_L1: 17.657 G_L1_ABSOLUTE: 3.009 G_L1_RELATIVE: 14.648 G_Regularizer: 0.000 validation_error: 21.150 +(epoch: 14, iters: 276224, time: 0.534, data: 0.000) G_L1: 15.536 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 12.758 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 14, iters: 278224, time: 0.542, data: 0.000) G_L1: 15.977 G_L1_ABSOLUTE: 3.250 G_L1_RELATIVE: 12.727 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 14, iters: 280224, time: 0.544, data: 0.001) G_L1: 18.644 G_L1_ABSOLUTE: 2.893 G_L1_RELATIVE: 15.751 G_Regularizer: 0.000 validation_error: 20.105 +(epoch: 14, iters: 282224, time: 0.545, data: 0.000) G_L1: 13.348 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 11.030 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 14, iters: 284224, time: 0.553, data: 0.001) G_L1: 14.323 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 12.027 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 14, iters: 286224, time: 0.554, data: 0.000) G_L1: 13.136 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 10.527 G_Regularizer: 0.000 validation_error: 21.321 +(epoch: 14, iters: 288224, time: 0.556, data: 0.000) G_L1: 12.455 G_L1_ABSOLUTE: 2.156 G_L1_RELATIVE: 10.299 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 14, iters: 290224, time: 0.551, data: 0.001) G_L1: 15.343 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 12.753 G_Regularizer: 0.000 validation_error: 20.200 +(epoch: 14, iters: 292224, time: 0.534, data: 0.000) G_L1: 16.609 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 14.120 G_Regularizer: 0.000 validation_error: 21.462 +(epoch: 14, iters: 294224, time: 0.533, data: 0.000) G_L1: 13.999 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 11.668 G_Regularizer: 0.000 validation_error: 20.362 +(epoch: 14, iters: 296224, time: 0.551, data: 0.000) G_L1: 14.865 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 12.311 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 14, iters: 298224, time: 0.555, data: 0.000) G_L1: 17.743 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 14.907 G_Regularizer: 0.000 validation_error: 20.222 +(epoch: 14, iters: 300224, time: 0.535, data: 0.000) G_L1: 15.517 G_L1_ABSOLUTE: 2.126 G_L1_RELATIVE: 13.392 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 14, iters: 302224, time: 0.553, data: 0.000) G_L1: 14.543 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 11.923 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 15, iters: 1472, time: 0.553, data: 0.000) G_L1: 15.186 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 12.624 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 15, iters: 3472, time: 0.548, data: 0.001) G_L1: 13.347 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 10.721 G_Regularizer: 0.000 validation_error: 20.254 +(epoch: 15, iters: 5472, time: 0.539, data: 0.000) G_L1: 13.893 G_L1_ABSOLUTE: 2.246 G_L1_RELATIVE: 11.647 G_Regularizer: 0.000 validation_error: 20.409 +(epoch: 15, iters: 7472, time: 0.537, data: 0.000) G_L1: 12.412 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 9.815 G_Regularizer: 0.000 validation_error: 20.101 +(epoch: 15, iters: 9472, time: 0.535, data: 0.000) G_L1: 14.820 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 12.484 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 15, iters: 11472, time: 0.543, data: 0.000) G_L1: 15.621 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 12.847 G_Regularizer: 0.000 validation_error: 21.129 +(epoch: 15, iters: 13472, time: 0.549, data: 0.000) G_L1: 16.442 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 13.608 G_Regularizer: 0.000 validation_error: 20.632 +(epoch: 15, iters: 15472, time: 0.538, data: 0.000) G_L1: 14.048 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 11.425 G_Regularizer: 0.000 validation_error: 21.495 +(epoch: 15, iters: 17472, time: 0.559, data: 0.000) G_L1: 17.324 G_L1_ABSOLUTE: 3.275 G_L1_RELATIVE: 14.049 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 15, iters: 19472, time: 0.562, data: 0.000) G_L1: 16.229 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 13.572 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 15, iters: 21472, time: 0.541, data: 0.000) G_L1: 14.771 G_L1_ABSOLUTE: 2.803 G_L1_RELATIVE: 11.969 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 15, iters: 23472, time: 0.560, data: 0.000) G_L1: 14.570 G_L1_ABSOLUTE: 2.900 G_L1_RELATIVE: 11.670 G_Regularizer: 0.000 validation_error: 20.954 +(epoch: 15, iters: 25472, time: 0.552, data: 0.000) G_L1: 17.073 G_L1_ABSOLUTE: 2.873 G_L1_RELATIVE: 14.200 G_Regularizer: 0.000 validation_error: 21.467 +(epoch: 15, iters: 27472, time: 0.552, data: 0.000) G_L1: 15.421 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 12.754 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 15, iters: 29472, time: 0.551, data: 0.000) G_L1: 14.491 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 11.793 G_Regularizer: 0.000 validation_error: 20.260 +(epoch: 15, iters: 31472, time: 0.531, data: 0.000) G_L1: 13.707 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 11.304 G_Regularizer: 0.000 validation_error: 19.965 +(epoch: 15, iters: 33472, time: 0.556, data: 0.000) G_L1: 14.878 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 11.984 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 15, iters: 35472, time: 0.557, data: 0.000) G_L1: 14.907 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.374 G_Regularizer: 0.000 validation_error: 20.938 +(epoch: 15, iters: 37472, time: 0.556, data: 0.001) G_L1: 15.687 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 12.758 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 15, iters: 39472, time: 0.558, data: 0.000) G_L1: 13.030 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 10.462 G_Regularizer: 0.000 validation_error: 21.352 +(epoch: 15, iters: 41472, time: 0.551, data: 0.000) G_L1: 18.711 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 15.993 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 15, iters: 43472, time: 0.540, data: 0.000) G_L1: 16.082 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 13.761 G_Regularizer: 0.000 validation_error: 20.229 +(epoch: 15, iters: 45472, time: 0.558, data: 0.000) G_L1: 13.439 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 10.628 G_Regularizer: 0.000 validation_error: 20.856 +(epoch: 15, iters: 47472, time: 0.532, data: 0.000) G_L1: 14.323 G_L1_ABSOLUTE: 3.060 G_L1_RELATIVE: 11.263 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 15, iters: 49472, time: 0.530, data: 0.000) G_L1: 14.825 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 12.234 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 15, iters: 51472, time: 0.563, data: 0.000) G_L1: 16.566 G_L1_ABSOLUTE: 3.555 G_L1_RELATIVE: 13.011 G_Regularizer: 0.000 validation_error: 21.219 +(epoch: 15, iters: 53472, time: 0.537, data: 0.000) G_L1: 15.074 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 12.454 G_Regularizer: 0.000 validation_error: 21.111 +(epoch: 15, iters: 55472, time: 0.537, data: 0.000) G_L1: 15.761 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 13.302 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 15, iters: 57472, time: 0.532, data: 0.000) G_L1: 17.178 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 14.364 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 15, iters: 59472, time: 0.556, data: 0.000) G_L1: 14.585 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 12.013 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 15, iters: 61472, time: 0.535, data: 0.000) G_L1: 12.946 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 10.354 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 15, iters: 63472, time: 0.546, data: 0.000) G_L1: 16.376 G_L1_ABSOLUTE: 3.204 G_L1_RELATIVE: 13.172 G_Regularizer: 0.000 validation_error: 20.435 +(epoch: 15, iters: 65472, time: 0.539, data: 0.000) G_L1: 13.115 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 10.784 G_Regularizer: 0.000 validation_error: 21.298 +(epoch: 15, iters: 67472, time: 0.532, data: 0.000) G_L1: 13.339 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 10.607 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 15, iters: 69472, time: 0.560, data: 0.000) G_L1: 16.132 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 13.409 G_Regularizer: 0.000 validation_error: 20.346 +(epoch: 15, iters: 71472, time: 0.541, data: 0.001) G_L1: 15.479 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 12.775 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 15, iters: 73472, time: 0.545, data: 0.000) G_L1: 15.688 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 12.957 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 15, iters: 75472, time: 0.560, data: 0.000) G_L1: 16.235 G_L1_ABSOLUTE: 3.098 G_L1_RELATIVE: 13.137 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 15, iters: 77472, time: 0.548, data: 0.000) G_L1: 16.214 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 13.536 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 15, iters: 79472, time: 0.559, data: 0.000) G_L1: 16.152 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 13.521 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 15, iters: 81472, time: 0.541, data: 0.000) G_L1: 13.228 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 10.755 G_Regularizer: 0.000 validation_error: 21.006 +(epoch: 15, iters: 83472, time: 0.547, data: 0.000) G_L1: 17.289 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 14.476 G_Regularizer: 0.000 validation_error: 20.583 +(epoch: 15, iters: 85472, time: 0.565, data: 0.000) G_L1: 14.868 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 12.480 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 15, iters: 87472, time: 0.559, data: 0.000) G_L1: 15.980 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 13.648 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 15, iters: 89472, time: 0.544, data: 0.000) G_L1: 13.769 G_L1_ABSOLUTE: 2.308 G_L1_RELATIVE: 11.461 G_Regularizer: 0.000 validation_error: 20.474 +(epoch: 15, iters: 91472, time: 0.539, data: 0.000) G_L1: 14.901 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 15, iters: 93472, time: 0.553, data: 0.000) G_L1: 14.701 G_L1_ABSOLUTE: 2.352 G_L1_RELATIVE: 12.349 G_Regularizer: 0.000 validation_error: 20.351 +(epoch: 15, iters: 95472, time: 0.547, data: 0.000) G_L1: 16.253 G_L1_ABSOLUTE: 2.931 G_L1_RELATIVE: 13.322 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 15, iters: 97472, time: 0.543, data: 0.000) G_L1: 13.488 G_L1_ABSOLUTE: 2.337 G_L1_RELATIVE: 11.152 G_Regularizer: 0.000 validation_error: 20.618 +(epoch: 15, iters: 99472, time: 0.550, data: 0.000) G_L1: 15.606 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 12.795 G_Regularizer: 0.000 validation_error: 20.326 +(epoch: 15, iters: 101472, time: 0.551, data: 0.000) G_L1: 14.492 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 11.838 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 15, iters: 103472, time: 0.559, data: 0.000) G_L1: 13.631 G_L1_ABSOLUTE: 3.128 G_L1_RELATIVE: 10.503 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 15, iters: 105472, time: 0.554, data: 0.000) G_L1: 14.674 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 11.498 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 15, iters: 107472, time: 0.535, data: 0.000) G_L1: 15.979 G_L1_ABSOLUTE: 3.190 G_L1_RELATIVE: 12.789 G_Regularizer: 0.000 validation_error: 20.396 +(epoch: 15, iters: 109472, time: 0.547, data: 0.000) G_L1: 15.654 G_L1_ABSOLUTE: 2.904 G_L1_RELATIVE: 12.750 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 15, iters: 111472, time: 0.544, data: 0.000) G_L1: 14.869 G_L1_ABSOLUTE: 2.645 G_L1_RELATIVE: 12.224 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 15, iters: 113472, time: 0.546, data: 0.000) G_L1: 15.339 G_L1_ABSOLUTE: 2.876 G_L1_RELATIVE: 12.463 G_Regularizer: 0.000 validation_error: 20.944 +(epoch: 15, iters: 115472, time: 0.531, data: 0.000) G_L1: 13.937 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 11.474 G_Regularizer: 0.000 validation_error: 19.658 +(epoch: 15, iters: 117472, time: 0.557, data: 0.000) G_L1: 17.793 G_L1_ABSOLUTE: 3.152 G_L1_RELATIVE: 14.641 G_Regularizer: 0.000 validation_error: 20.536 +(epoch: 15, iters: 119472, time: 0.555, data: 0.000) G_L1: 15.269 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 12.781 G_Regularizer: 0.000 validation_error: 21.558 +(epoch: 15, iters: 121472, time: 0.542, data: 0.000) G_L1: 15.236 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 12.599 G_Regularizer: 0.000 validation_error: 19.879 +(epoch: 15, iters: 123472, time: 0.549, data: 0.000) G_L1: 15.470 G_L1_ABSOLUTE: 2.739 G_L1_RELATIVE: 12.732 G_Regularizer: 0.000 validation_error: 20.528 +(epoch: 15, iters: 125472, time: 0.563, data: 0.000) G_L1: 14.923 G_L1_ABSOLUTE: 3.223 G_L1_RELATIVE: 11.701 G_Regularizer: 0.000 validation_error: 20.444 +(epoch: 15, iters: 127472, time: 0.546, data: 0.000) G_L1: 26.215 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 23.859 G_Regularizer: 0.000 validation_error: 20.420 +(epoch: 15, iters: 129472, time: 0.558, data: 0.000) G_L1: 17.561 G_L1_ABSOLUTE: 3.160 G_L1_RELATIVE: 14.401 G_Regularizer: 0.000 validation_error: 20.373 +(epoch: 15, iters: 131472, time: 0.542, data: 0.000) G_L1: 14.723 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 12.093 G_Regularizer: 0.000 validation_error: 19.950 +(epoch: 15, iters: 133472, time: 0.560, data: 0.000) G_L1: 12.727 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 10.092 G_Regularizer: 0.000 validation_error: 19.751 +(epoch: 15, iters: 135472, time: 0.560, data: 0.000) G_L1: 15.908 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 13.065 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 15, iters: 137472, time: 0.558, data: 0.000) G_L1: 14.417 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 11.764 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 15, iters: 139472, time: 0.542, data: 0.000) G_L1: 13.710 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 11.184 G_Regularizer: 0.000 validation_error: 19.684 +(epoch: 15, iters: 141472, time: 0.562, data: 0.000) G_L1: 15.427 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 12.992 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 15, iters: 143472, time: 0.559, data: 0.000) G_L1: 14.828 G_L1_ABSOLUTE: 2.765 G_L1_RELATIVE: 12.063 G_Regularizer: 0.000 validation_error: 19.963 +(epoch: 15, iters: 145472, time: 0.559, data: 0.000) G_L1: 15.792 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 13.192 G_Regularizer: 0.000 validation_error: 20.550 +(epoch: 15, iters: 147472, time: 0.545, data: 0.000) G_L1: 17.714 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 14.966 G_Regularizer: 0.000 validation_error: 20.238 +(epoch: 15, iters: 149472, time: 0.534, data: 0.000) G_L1: 15.828 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 13.058 G_Regularizer: 0.000 validation_error: 20.618 +(epoch: 15, iters: 151472, time: 0.533, data: 0.000) G_L1: 14.684 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 12.027 G_Regularizer: 0.000 validation_error: 20.303 +(epoch: 15, iters: 153472, time: 0.556, data: 0.000) G_L1: 17.120 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 14.284 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 15, iters: 155472, time: 0.541, data: 0.000) G_L1: 15.430 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 12.951 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 15, iters: 157472, time: 0.534, data: 0.000) G_L1: 15.051 G_L1_ABSOLUTE: 2.990 G_L1_RELATIVE: 12.061 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 15, iters: 159472, time: 0.560, data: 0.000) G_L1: 14.118 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 11.255 G_Regularizer: 0.000 validation_error: 20.757 +(epoch: 15, iters: 161472, time: 0.557, data: 0.000) G_L1: 15.586 G_L1_ABSOLUTE: 2.910 G_L1_RELATIVE: 12.675 G_Regularizer: 0.000 validation_error: 20.439 +(epoch: 15, iters: 163472, time: 0.566, data: 0.000) G_L1: 18.545 G_L1_ABSOLUTE: 3.261 G_L1_RELATIVE: 15.285 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 15, iters: 165472, time: 0.535, data: 0.000) G_L1: 14.515 G_L1_ABSOLUTE: 2.846 G_L1_RELATIVE: 11.669 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 15, iters: 167472, time: 0.539, data: 0.000) G_L1: 13.231 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 10.834 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 15, iters: 169472, time: 0.554, data: 0.000) G_L1: 14.182 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 11.615 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 15, iters: 171472, time: 0.564, data: 0.000) G_L1: 17.483 G_L1_ABSOLUTE: 3.067 G_L1_RELATIVE: 14.417 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 15, iters: 173472, time: 0.533, data: 0.001) G_L1: 16.094 G_L1_ABSOLUTE: 2.831 G_L1_RELATIVE: 13.263 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 15, iters: 175472, time: 0.550, data: 0.000) G_L1: 13.102 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 10.772 G_Regularizer: 0.000 validation_error: 20.769 +(epoch: 15, iters: 177472, time: 0.554, data: 0.000) G_L1: 16.650 G_L1_ABSOLUTE: 2.933 G_L1_RELATIVE: 13.716 G_Regularizer: 0.000 validation_error: 20.180 +(epoch: 15, iters: 179472, time: 0.550, data: 0.000) G_L1: 16.171 G_L1_ABSOLUTE: 2.977 G_L1_RELATIVE: 13.194 G_Regularizer: 0.000 validation_error: 20.300 +(epoch: 15, iters: 181472, time: 0.542, data: 0.000) G_L1: 14.661 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 11.892 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 15, iters: 183472, time: 0.555, data: 0.000) G_L1: 18.622 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 15.805 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 15, iters: 185472, time: 0.543, data: 0.000) G_L1: 13.532 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 11.270 G_Regularizer: 0.000 validation_error: 20.405 +(epoch: 15, iters: 187472, time: 0.559, data: 0.000) G_L1: 14.766 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 11.988 G_Regularizer: 0.000 validation_error: 20.122 +(epoch: 15, iters: 189472, time: 0.552, data: 0.000) G_L1: 16.124 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 13.297 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 15, iters: 191472, time: 0.559, data: 0.000) G_L1: 15.698 G_L1_ABSOLUTE: 2.067 G_L1_RELATIVE: 13.631 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 15, iters: 193472, time: 0.557, data: 0.000) G_L1: 12.478 G_L1_ABSOLUTE: 2.662 G_L1_RELATIVE: 9.817 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 15, iters: 195472, time: 0.558, data: 0.000) G_L1: 14.350 G_L1_ABSOLUTE: 2.147 G_L1_RELATIVE: 12.203 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 15, iters: 197472, time: 0.550, data: 0.000) G_L1: 17.161 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 14.749 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 15, iters: 199472, time: 0.536, data: 0.000) G_L1: 17.677 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 15.023 G_Regularizer: 0.000 validation_error: 21.571 +(epoch: 15, iters: 201472, time: 0.560, data: 0.000) G_L1: 14.986 G_L1_ABSOLUTE: 3.197 G_L1_RELATIVE: 11.789 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 15, iters: 203472, time: 0.559, data: 0.000) G_L1: 15.386 G_L1_ABSOLUTE: 3.146 G_L1_RELATIVE: 12.241 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 15, iters: 205472, time: 0.556, data: 0.000) G_L1: 15.276 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 12.453 G_Regularizer: 0.000 validation_error: 20.298 +(epoch: 15, iters: 207472, time: 0.549, data: 0.000) G_L1: 14.406 G_L1_ABSOLUTE: 2.617 G_L1_RELATIVE: 11.789 G_Regularizer: 0.000 validation_error: 20.629 +(epoch: 15, iters: 209472, time: 0.556, data: 0.000) G_L1: 14.142 G_L1_ABSOLUTE: 2.166 G_L1_RELATIVE: 11.976 G_Regularizer: 0.000 validation_error: 21.290 +(epoch: 15, iters: 211472, time: 0.541, data: 0.000) G_L1: 14.233 G_L1_ABSOLUTE: 2.392 G_L1_RELATIVE: 11.841 G_Regularizer: 0.000 validation_error: 21.752 +(epoch: 15, iters: 213472, time: 0.550, data: 0.001) G_L1: 14.178 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 11.298 G_Regularizer: 0.000 validation_error: 20.306 +(epoch: 15, iters: 215472, time: 0.547, data: 0.000) G_L1: 16.368 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 13.787 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 15, iters: 217472, time: 0.535, data: 0.000) G_L1: 12.199 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 9.766 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 15, iters: 219472, time: 0.532, data: 0.001) G_L1: 14.260 G_L1_ABSOLUTE: 2.973 G_L1_RELATIVE: 11.286 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 15, iters: 221472, time: 0.555, data: 0.000) G_L1: 14.878 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 12.124 G_Regularizer: 0.000 validation_error: 20.326 +(epoch: 15, iters: 223472, time: 0.545, data: 0.001) G_L1: 17.159 G_L1_ABSOLUTE: 3.035 G_L1_RELATIVE: 14.124 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 15, iters: 225472, time: 0.560, data: 0.000) G_L1: 15.612 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 13.001 G_Regularizer: 0.000 validation_error: 20.145 +(epoch: 15, iters: 227472, time: 0.561, data: 0.000) G_L1: 14.660 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 12.104 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 15, iters: 229472, time: 0.538, data: 0.000) G_L1: 14.824 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 12.459 G_Regularizer: 0.000 validation_error: 19.850 +(epoch: 15, iters: 231472, time: 0.534, data: 0.000) G_L1: 17.321 G_L1_ABSOLUTE: 3.581 G_L1_RELATIVE: 13.740 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 15, iters: 233472, time: 0.532, data: 0.000) G_L1: 14.237 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 11.822 G_Regularizer: 0.000 validation_error: 21.171 +(epoch: 15, iters: 235472, time: 0.541, data: 0.000) G_L1: 16.495 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 13.582 G_Regularizer: 0.000 validation_error: 20.304 +(epoch: 15, iters: 237472, time: 0.537, data: 0.000) G_L1: 13.676 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 11.235 G_Regularizer: 0.000 validation_error: 20.770 +(epoch: 15, iters: 239472, time: 0.556, data: 0.000) G_L1: 13.043 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 10.827 G_Regularizer: 0.000 validation_error: 20.675 +(epoch: 15, iters: 241472, time: 0.550, data: 0.000) G_L1: 14.686 G_L1_ABSOLUTE: 2.617 G_L1_RELATIVE: 12.069 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 15, iters: 243472, time: 0.565, data: 0.000) G_L1: 15.374 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 13.004 G_Regularizer: 0.000 validation_error: 20.375 +(epoch: 15, iters: 245472, time: 0.549, data: 0.000) G_L1: 13.655 G_L1_ABSOLUTE: 2.772 G_L1_RELATIVE: 10.883 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 15, iters: 247472, time: 0.564, data: 0.000) G_L1: 12.804 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 10.457 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 15, iters: 249472, time: 0.537, data: 0.000) G_L1: 13.787 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 11.060 G_Regularizer: 0.000 validation_error: 20.352 +(epoch: 15, iters: 251472, time: 0.559, data: 0.000) G_L1: 17.749 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 14.963 G_Regularizer: 0.000 validation_error: 21.260 +(epoch: 15, iters: 253472, time: 0.566, data: 0.000) G_L1: 15.926 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 13.515 G_Regularizer: 0.000 validation_error: 19.741 +(epoch: 15, iters: 255472, time: 0.558, data: 0.000) G_L1: 13.374 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 10.978 G_Regularizer: 0.000 validation_error: 20.073 +(epoch: 15, iters: 257472, time: 0.543, data: 0.000) G_L1: 16.039 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 13.513 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 15, iters: 259472, time: 0.560, data: 0.000) G_L1: 15.081 G_L1_ABSOLUTE: 3.131 G_L1_RELATIVE: 11.950 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 15, iters: 261472, time: 0.556, data: 0.000) G_L1: 12.990 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 10.707 G_Regularizer: 0.000 validation_error: 20.537 +(epoch: 15, iters: 263472, time: 0.540, data: 0.000) G_L1: 13.021 G_L1_ABSOLUTE: 2.313 G_L1_RELATIVE: 10.708 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 15, iters: 265472, time: 0.543, data: 0.001) G_L1: 14.745 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 12.428 G_Regularizer: 0.000 validation_error: 20.096 +(epoch: 15, iters: 267472, time: 0.559, data: 0.000) G_L1: 15.903 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 13.327 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 15, iters: 269472, time: 0.549, data: 0.000) G_L1: 16.743 G_L1_ABSOLUTE: 3.025 G_L1_RELATIVE: 13.718 G_Regularizer: 0.000 validation_error: 20.451 +(epoch: 15, iters: 271472, time: 0.541, data: 0.000) G_L1: 20.410 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 17.752 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 15, iters: 273472, time: 0.536, data: 0.000) G_L1: 13.005 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 10.555 G_Regularizer: 0.000 validation_error: 20.445 +(epoch: 15, iters: 275472, time: 0.553, data: 0.000) G_L1: 13.376 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 11.032 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 15, iters: 277472, time: 0.544, data: 0.000) G_L1: 15.083 G_L1_ABSOLUTE: 3.236 G_L1_RELATIVE: 11.846 G_Regularizer: 0.000 validation_error: 19.986 +(epoch: 15, iters: 279472, time: 0.560, data: 0.000) G_L1: 17.279 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 14.764 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 15, iters: 281472, time: 0.550, data: 0.000) G_L1: 17.204 G_L1_ABSOLUTE: 3.145 G_L1_RELATIVE: 14.059 G_Regularizer: 0.000 validation_error: 19.612 +(epoch: 15, iters: 283472, time: 0.554, data: 0.001) G_L1: 15.606 G_L1_ABSOLUTE: 2.927 G_L1_RELATIVE: 12.679 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 15, iters: 285472, time: 0.558, data: 0.000) G_L1: 14.598 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 11.925 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 15, iters: 287472, time: 0.549, data: 0.000) G_L1: 17.151 G_L1_ABSOLUTE: 3.517 G_L1_RELATIVE: 13.634 G_Regularizer: 0.000 validation_error: 20.366 +(epoch: 15, iters: 289472, time: 0.558, data: 0.001) G_L1: 14.643 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 12.182 G_Regularizer: 0.000 validation_error: 20.107 +(epoch: 15, iters: 291472, time: 0.533, data: 0.000) G_L1: 15.612 G_L1_ABSOLUTE: 2.245 G_L1_RELATIVE: 13.367 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 15, iters: 293472, time: 0.549, data: 0.000) G_L1: 15.987 G_L1_ABSOLUTE: 2.311 G_L1_RELATIVE: 13.675 G_Regularizer: 0.000 validation_error: 19.834 +(epoch: 15, iters: 295472, time: 0.547, data: 0.000) G_L1: 16.113 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 13.428 G_Regularizer: 0.000 validation_error: 20.233 +(epoch: 15, iters: 297472, time: 0.554, data: 0.000) G_L1: 14.056 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 11.675 G_Regularizer: 0.000 validation_error: 20.371 +(epoch: 15, iters: 299472, time: 0.539, data: 0.000) G_L1: 16.233 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 13.765 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 15, iters: 301472, time: 0.559, data: 0.000) G_L1: 12.279 G_L1_ABSOLUTE: 3.032 G_L1_RELATIVE: 9.247 G_Regularizer: 0.000 validation_error: 20.330 +(epoch: 16, iters: 720, time: 0.529, data: 0.000) G_L1: 16.885 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 14.273 G_Regularizer: 0.000 validation_error: 20.362 +(epoch: 16, iters: 2720, time: 0.560, data: 0.000) G_L1: 11.384 G_L1_ABSOLUTE: 2.176 G_L1_RELATIVE: 9.208 G_Regularizer: 0.000 validation_error: 20.468 +(epoch: 16, iters: 4720, time: 0.543, data: 0.000) G_L1: 17.008 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 14.563 G_Regularizer: 0.000 validation_error: 21.485 +(epoch: 16, iters: 6720, time: 0.554, data: 0.000) G_L1: 14.577 G_L1_ABSOLUTE: 3.018 G_L1_RELATIVE: 11.559 G_Regularizer: 0.000 validation_error: 20.296 +(epoch: 16, iters: 8720, time: 0.558, data: 0.000) G_L1: 17.480 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 15.096 G_Regularizer: 0.000 validation_error: 21.332 +(epoch: 16, iters: 10720, time: 0.554, data: 0.000) G_L1: 15.768 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 13.249 G_Regularizer: 0.000 validation_error: 20.545 +(epoch: 16, iters: 12720, time: 0.538, data: 0.000) G_L1: 15.892 G_L1_ABSOLUTE: 2.488 G_L1_RELATIVE: 13.403 G_Regularizer: 0.000 validation_error: 21.166 +(epoch: 16, iters: 14720, time: 0.555, data: 0.000) G_L1: 19.379 G_L1_ABSOLUTE: 2.667 G_L1_RELATIVE: 16.712 G_Regularizer: 0.000 validation_error: 20.631 +(epoch: 16, iters: 16720, time: 0.550, data: 0.000) G_L1: 17.897 G_L1_ABSOLUTE: 3.307 G_L1_RELATIVE: 14.590 G_Regularizer: 0.000 validation_error: 21.294 +(epoch: 16, iters: 18720, time: 0.551, data: 0.000) G_L1: 15.584 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 12.595 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 16, iters: 20720, time: 0.553, data: 0.000) G_L1: 14.203 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 11.462 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 16, iters: 22720, time: 0.539, data: 0.000) G_L1: 14.078 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 11.347 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 16, iters: 24720, time: 0.558, data: 0.000) G_L1: 16.643 G_L1_ABSOLUTE: 3.107 G_L1_RELATIVE: 13.536 G_Regularizer: 0.000 validation_error: 20.228 +(epoch: 16, iters: 26720, time: 0.563, data: 0.000) G_L1: 13.955 G_L1_ABSOLUTE: 2.099 G_L1_RELATIVE: 11.856 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 16, iters: 28720, time: 0.557, data: 0.000) G_L1: 14.943 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 12.594 G_Regularizer: 0.000 validation_error: 21.253 +(epoch: 16, iters: 30720, time: 0.559, data: 0.000) G_L1: 15.303 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 12.728 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 16, iters: 32720, time: 0.568, data: 0.000) G_L1: 16.298 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 13.725 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 16, iters: 34720, time: 0.556, data: 0.000) G_L1: 21.052 G_L1_ABSOLUTE: 3.165 G_L1_RELATIVE: 17.887 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 16, iters: 36720, time: 0.562, data: 0.000) G_L1: 15.074 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 12.260 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 16, iters: 38720, time: 0.535, data: 0.000) G_L1: 14.326 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.658 G_Regularizer: 0.000 validation_error: 20.344 +(epoch: 16, iters: 40720, time: 0.564, data: 0.000) G_L1: 17.524 G_L1_ABSOLUTE: 3.234 G_L1_RELATIVE: 14.290 G_Regularizer: 0.000 validation_error: 21.422 +(epoch: 16, iters: 42720, time: 0.564, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 12.225 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 16, iters: 44720, time: 0.559, data: 0.000) G_L1: 17.483 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 14.620 G_Regularizer: 0.000 validation_error: 19.984 +(epoch: 16, iters: 46720, time: 0.531, data: 0.000) G_L1: 15.632 G_L1_ABSOLUTE: 2.662 G_L1_RELATIVE: 12.970 G_Regularizer: 0.000 validation_error: 20.311 +(epoch: 16, iters: 48720, time: 0.536, data: 0.000) G_L1: 16.441 G_L1_ABSOLUTE: 2.882 G_L1_RELATIVE: 13.559 G_Regularizer: 0.000 validation_error: 20.579 +(epoch: 16, iters: 50720, time: 0.551, data: 0.000) G_L1: 14.654 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 12.191 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 16, iters: 52720, time: 0.551, data: 0.000) G_L1: 13.702 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 11.218 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 16, iters: 54720, time: 0.544, data: 0.000) G_L1: 15.200 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 12.811 G_Regularizer: 0.000 validation_error: 20.317 +(epoch: 16, iters: 56720, time: 0.542, data: 0.000) G_L1: 13.088 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 10.725 G_Regularizer: 0.000 validation_error: 20.112 +(epoch: 16, iters: 58720, time: 0.567, data: 0.000) G_L1: 15.605 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 13.101 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 16, iters: 60720, time: 0.558, data: 0.000) G_L1: 15.511 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 13.080 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 16, iters: 62720, time: 0.548, data: 0.000) G_L1: 16.407 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 13.489 G_Regularizer: 0.000 validation_error: 20.244 +(epoch: 16, iters: 64720, time: 0.553, data: 0.000) G_L1: 15.366 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 12.907 G_Regularizer: 0.000 validation_error: 20.309 +(epoch: 16, iters: 66720, time: 0.557, data: 0.000) G_L1: 18.665 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 15.969 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 16, iters: 68720, time: 0.557, data: 0.000) G_L1: 13.910 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 11.617 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 16, iters: 70720, time: 0.553, data: 0.000) G_L1: 14.873 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 12.449 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 16, iters: 72720, time: 0.543, data: 0.000) G_L1: 13.579 G_L1_ABSOLUTE: 2.212 G_L1_RELATIVE: 11.367 G_Regularizer: 0.000 validation_error: 20.463 +(epoch: 16, iters: 74720, time: 0.537, data: 0.000) G_L1: 15.235 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 12.611 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 16, iters: 76720, time: 0.550, data: 0.000) G_L1: 14.233 G_L1_ABSOLUTE: 2.180 G_L1_RELATIVE: 12.053 G_Regularizer: 0.000 validation_error: 19.776 +(epoch: 16, iters: 78720, time: 0.564, data: 0.000) G_L1: 13.733 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 11.241 G_Regularizer: 0.000 validation_error: 20.120 +(epoch: 16, iters: 80720, time: 0.533, data: 0.000) G_L1: 14.295 G_L1_ABSOLUTE: 2.713 G_L1_RELATIVE: 11.582 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 16, iters: 82720, time: 0.551, data: 0.000) G_L1: 14.630 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 11.831 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 16, iters: 84720, time: 0.559, data: 0.000) G_L1: 23.292 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 20.550 G_Regularizer: 0.000 validation_error: 19.963 +(epoch: 16, iters: 86720, time: 0.557, data: 0.000) G_L1: 16.564 G_L1_ABSOLUTE: 2.852 G_L1_RELATIVE: 13.713 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 16, iters: 88720, time: 0.532, data: 0.000) G_L1: 17.371 G_L1_ABSOLUTE: 2.845 G_L1_RELATIVE: 14.526 G_Regularizer: 0.000 validation_error: 20.331 +(epoch: 16, iters: 90720, time: 0.561, data: 0.001) G_L1: 16.049 G_L1_ABSOLUTE: 2.719 G_L1_RELATIVE: 13.330 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 16, iters: 92720, time: 0.558, data: 0.003) G_L1: 13.792 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 11.457 G_Regularizer: 0.000 validation_error: 20.620 +(epoch: 16, iters: 94720, time: 0.558, data: 0.000) G_L1: 16.870 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 13.877 G_Regularizer: 0.000 validation_error: 21.130 +(epoch: 16, iters: 96720, time: 0.547, data: 0.000) G_L1: 14.726 G_L1_ABSOLUTE: 2.751 G_L1_RELATIVE: 11.975 G_Regularizer: 0.000 validation_error: 20.240 +(epoch: 16, iters: 98720, time: 0.558, data: 0.000) G_L1: 17.494 G_L1_ABSOLUTE: 3.166 G_L1_RELATIVE: 14.327 G_Regularizer: 0.000 validation_error: 20.665 +(epoch: 16, iters: 100720, time: 0.561, data: 0.000) G_L1: 18.075 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 15.044 G_Regularizer: 0.000 validation_error: 20.292 +(epoch: 16, iters: 102720, time: 0.558, data: 0.000) G_L1: 16.573 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 13.983 G_Regularizer: 0.000 validation_error: 20.221 +(epoch: 16, iters: 104720, time: 0.541, data: 0.000) G_L1: 15.126 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 12.690 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 16, iters: 106720, time: 0.535, data: 0.000) G_L1: 15.484 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 13.099 G_Regularizer: 0.000 validation_error: 20.765 +(epoch: 16, iters: 108720, time: 0.548, data: 0.000) G_L1: 17.463 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 14.650 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 16, iters: 110720, time: 0.556, data: 0.000) G_L1: 13.720 G_L1_ABSOLUTE: 3.238 G_L1_RELATIVE: 10.482 G_Regularizer: 0.000 validation_error: 20.202 +(epoch: 16, iters: 112720, time: 0.544, data: 0.000) G_L1: 15.258 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 12.762 G_Regularizer: 0.000 validation_error: 20.320 +(epoch: 16, iters: 114720, time: 0.544, data: 0.000) G_L1: 15.533 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 13.028 G_Regularizer: 0.000 validation_error: 21.620 +(epoch: 16, iters: 116720, time: 0.545, data: 0.000) G_L1: 14.462 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 11.830 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 16, iters: 118720, time: 0.560, data: 0.000) G_L1: 13.131 G_L1_ABSOLUTE: 2.765 G_L1_RELATIVE: 10.367 G_Regularizer: 0.000 validation_error: 20.187 +(epoch: 16, iters: 120720, time: 0.548, data: 0.000) G_L1: 15.572 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 13.118 G_Regularizer: 0.000 validation_error: 20.012 +(epoch: 16, iters: 122720, time: 0.534, data: 0.000) G_L1: 16.460 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 13.993 G_Regularizer: 0.000 validation_error: 20.167 +(epoch: 16, iters: 124720, time: 0.550, data: 0.000) G_L1: 14.460 G_L1_ABSOLUTE: 2.809 G_L1_RELATIVE: 11.651 G_Regularizer: 0.000 validation_error: 19.905 +(epoch: 16, iters: 126720, time: 0.555, data: 0.001) G_L1: 13.489 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 11.024 G_Regularizer: 0.000 validation_error: 20.026 +(epoch: 16, iters: 128720, time: 0.559, data: 0.000) G_L1: 14.814 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 11.909 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 16, iters: 130720, time: 0.553, data: 0.000) G_L1: 16.321 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 13.999 G_Regularizer: 0.000 validation_error: 20.522 +(epoch: 16, iters: 132720, time: 0.554, data: 0.000) G_L1: 15.255 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 12.357 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 16, iters: 134720, time: 0.537, data: 0.000) G_L1: 17.156 G_L1_ABSOLUTE: 3.070 G_L1_RELATIVE: 14.086 G_Regularizer: 0.000 validation_error: 20.517 +(epoch: 16, iters: 136720, time: 0.544, data: 0.000) G_L1: 12.925 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 10.536 G_Regularizer: 0.000 validation_error: 20.453 +(epoch: 16, iters: 138720, time: 0.536, data: 0.000) G_L1: 16.760 G_L1_ABSOLUTE: 3.422 G_L1_RELATIVE: 13.338 G_Regularizer: 0.000 validation_error: 20.411 +(epoch: 16, iters: 140720, time: 0.556, data: 0.000) G_L1: 13.775 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 11.392 G_Regularizer: 0.000 validation_error: 20.108 +(epoch: 16, iters: 142720, time: 0.529, data: 0.000) G_L1: 15.551 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 13.114 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 16, iters: 144720, time: 0.537, data: 0.000) G_L1: 13.227 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 10.293 G_Regularizer: 0.000 validation_error: 21.079 +(epoch: 16, iters: 146720, time: 0.555, data: 0.000) G_L1: 14.019 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 11.039 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 16, iters: 148720, time: 0.553, data: 0.000) G_L1: 17.428 G_L1_ABSOLUTE: 3.080 G_L1_RELATIVE: 14.348 G_Regularizer: 0.000 validation_error: 21.541 +(epoch: 16, iters: 150720, time: 0.536, data: 0.000) G_L1: 11.972 G_L1_ABSOLUTE: 2.114 G_L1_RELATIVE: 9.858 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 16, iters: 152720, time: 0.548, data: 0.000) G_L1: 15.307 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 12.560 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 16, iters: 154720, time: 0.543, data: 0.000) G_L1: 14.417 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 16, iters: 156720, time: 0.546, data: 0.000) G_L1: 16.045 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 13.343 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 16, iters: 158720, time: 0.558, data: 0.000) G_L1: 16.841 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 14.213 G_Regularizer: 0.000 validation_error: 20.771 +(epoch: 16, iters: 160720, time: 0.547, data: 0.000) G_L1: 15.947 G_L1_ABSOLUTE: 2.038 G_L1_RELATIVE: 13.909 G_Regularizer: 0.000 validation_error: 19.876 +(epoch: 16, iters: 162720, time: 0.537, data: 0.000) G_L1: 16.784 G_L1_ABSOLUTE: 2.912 G_L1_RELATIVE: 13.872 G_Regularizer: 0.000 validation_error: 20.312 +(epoch: 16, iters: 164720, time: 0.539, data: 0.000) G_L1: 18.464 G_L1_ABSOLUTE: 2.804 G_L1_RELATIVE: 15.660 G_Regularizer: 0.000 validation_error: 21.184 +(epoch: 16, iters: 166720, time: 0.560, data: 0.000) G_L1: 15.393 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 12.999 G_Regularizer: 0.000 validation_error: 21.163 +(epoch: 16, iters: 168720, time: 0.558, data: 0.000) G_L1: 14.255 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 11.694 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 16, iters: 170720, time: 0.553, data: 0.000) G_L1: 17.315 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 14.761 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 16, iters: 172720, time: 0.532, data: 0.000) G_L1: 15.116 G_L1_ABSOLUTE: 2.186 G_L1_RELATIVE: 12.930 G_Regularizer: 0.000 validation_error: 21.568 +(epoch: 16, iters: 174720, time: 0.533, data: 0.000) G_L1: 16.115 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 13.384 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 16, iters: 176720, time: 0.537, data: 0.000) G_L1: 14.471 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 11.647 G_Regularizer: 0.000 validation_error: 20.380 +(epoch: 16, iters: 178720, time: 0.556, data: 0.000) G_L1: 14.840 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 12.327 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 16, iters: 180720, time: 0.548, data: 0.000) G_L1: 15.483 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 12.944 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 16, iters: 182720, time: 0.561, data: 0.000) G_L1: 13.437 G_L1_ABSOLUTE: 2.957 G_L1_RELATIVE: 10.480 G_Regularizer: 0.000 validation_error: 20.717 +(epoch: 16, iters: 184720, time: 0.557, data: 0.000) G_L1: 13.706 G_L1_ABSOLUTE: 2.864 G_L1_RELATIVE: 10.842 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 16, iters: 186720, time: 0.543, data: 0.000) G_L1: 13.478 G_L1_ABSOLUTE: 3.309 G_L1_RELATIVE: 10.168 G_Regularizer: 0.000 validation_error: 20.468 +(epoch: 16, iters: 188720, time: 0.546, data: 0.000) G_L1: 14.468 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 11.921 G_Regularizer: 0.000 validation_error: 20.320 +(epoch: 16, iters: 190720, time: 0.560, data: 0.000) G_L1: 13.578 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 10.905 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 16, iters: 192720, time: 0.555, data: 0.000) G_L1: 15.430 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 12.818 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 16, iters: 194720, time: 0.552, data: 0.000) G_L1: 17.654 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 15.089 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 16, iters: 196720, time: 0.546, data: 0.000) G_L1: 12.817 G_L1_ABSOLUTE: 2.246 G_L1_RELATIVE: 10.571 G_Regularizer: 0.000 validation_error: 19.939 +(epoch: 16, iters: 198720, time: 0.536, data: 0.000) G_L1: 16.361 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 13.561 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 16, iters: 200720, time: 0.532, data: 0.000) G_L1: 13.658 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 11.052 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 16, iters: 202720, time: 0.552, data: 0.000) G_L1: 16.023 G_L1_ABSOLUTE: 2.360 G_L1_RELATIVE: 13.663 G_Regularizer: 0.000 validation_error: 21.128 +(epoch: 16, iters: 204720, time: 0.561, data: 0.000) G_L1: 15.265 G_L1_ABSOLUTE: 3.236 G_L1_RELATIVE: 12.028 G_Regularizer: 0.000 validation_error: 20.401 +(epoch: 16, iters: 206720, time: 0.528, data: 0.000) G_L1: 16.003 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 13.511 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 16, iters: 208720, time: 0.539, data: 0.000) G_L1: 14.003 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 11.550 G_Regularizer: 0.000 validation_error: 19.942 +(epoch: 16, iters: 210720, time: 0.548, data: 0.000) G_L1: 16.765 G_L1_ABSOLUTE: 3.080 G_L1_RELATIVE: 13.686 G_Regularizer: 0.000 validation_error: 20.114 +(epoch: 16, iters: 212720, time: 0.536, data: 0.000) G_L1: 13.262 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 10.449 G_Regularizer: 0.000 validation_error: 20.501 +(epoch: 16, iters: 214720, time: 0.533, data: 0.000) G_L1: 16.756 G_L1_ABSOLUTE: 2.891 G_L1_RELATIVE: 13.865 G_Regularizer: 0.000 validation_error: 20.385 +(epoch: 16, iters: 216720, time: 0.555, data: 0.000) G_L1: 15.796 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 12.994 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 16, iters: 218720, time: 0.552, data: 0.000) G_L1: 14.234 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 11.538 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 16, iters: 220720, time: 0.559, data: 0.000) G_L1: 19.184 G_L1_ABSOLUTE: 3.437 G_L1_RELATIVE: 15.747 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 16, iters: 222720, time: 0.545, data: 0.000) G_L1: 14.050 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 11.424 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 16, iters: 224720, time: 0.559, data: 0.000) G_L1: 14.392 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 11.850 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 16, iters: 226720, time: 0.560, data: 0.000) G_L1: 16.596 G_L1_ABSOLUTE: 2.899 G_L1_RELATIVE: 13.697 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 16, iters: 228720, time: 0.563, data: 0.000) G_L1: 13.605 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 11.075 G_Regularizer: 0.000 validation_error: 20.021 +(epoch: 16, iters: 230720, time: 0.550, data: 0.000) G_L1: 15.686 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 13.278 G_Regularizer: 0.000 validation_error: 21.021 +(epoch: 16, iters: 232720, time: 0.558, data: 0.000) G_L1: 14.688 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 12.199 G_Regularizer: 0.000 validation_error: 19.775 +(epoch: 16, iters: 234720, time: 0.563, data: 0.002) G_L1: 15.675 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 13.160 G_Regularizer: 0.000 validation_error: 20.201 +(epoch: 16, iters: 236720, time: 0.544, data: 0.000) G_L1: 14.422 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 11.882 G_Regularizer: 0.000 validation_error: 20.182 +(epoch: 16, iters: 238720, time: 0.554, data: 0.000) G_L1: 15.008 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 20.468 +(epoch: 16, iters: 240720, time: 0.540, data: 0.000) G_L1: 14.674 G_L1_ABSOLUTE: 2.837 G_L1_RELATIVE: 11.837 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 16, iters: 242720, time: 0.548, data: 0.000) G_L1: 14.527 G_L1_ABSOLUTE: 2.804 G_L1_RELATIVE: 11.723 G_Regularizer: 0.000 validation_error: 21.132 +(epoch: 16, iters: 244720, time: 0.545, data: 0.000) G_L1: 12.441 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 10.033 G_Regularizer: 0.000 validation_error: 20.225 +(epoch: 16, iters: 246720, time: 0.560, data: 0.001) G_L1: 13.330 G_L1_ABSOLUTE: 2.256 G_L1_RELATIVE: 11.074 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 16, iters: 248720, time: 0.559, data: 0.000) G_L1: 19.203 G_L1_ABSOLUTE: 3.099 G_L1_RELATIVE: 16.104 G_Regularizer: 0.000 validation_error: 20.244 +(epoch: 16, iters: 250720, time: 0.553, data: 0.000) G_L1: 15.022 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 12.508 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 16, iters: 252720, time: 0.529, data: 0.000) G_L1: 16.017 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 13.261 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 16, iters: 254720, time: 0.548, data: 0.000) G_L1: 13.318 G_L1_ABSOLUTE: 2.232 G_L1_RELATIVE: 11.086 G_Regularizer: 0.000 validation_error: 20.183 +(epoch: 16, iters: 256720, time: 0.541, data: 0.000) G_L1: 14.801 G_L1_ABSOLUTE: 3.037 G_L1_RELATIVE: 11.764 G_Regularizer: 0.000 validation_error: 20.579 +(epoch: 16, iters: 258720, time: 0.563, data: 0.000) G_L1: 13.756 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 11.174 G_Regularizer: 0.000 validation_error: 19.838 +(epoch: 16, iters: 260720, time: 0.558, data: 0.000) G_L1: 17.510 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 14.905 G_Regularizer: 0.000 validation_error: 19.520 +(epoch: 16, iters: 262720, time: 0.555, data: 0.001) G_L1: 13.722 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 10.898 G_Regularizer: 0.000 validation_error: 20.350 +(epoch: 16, iters: 264720, time: 0.534, data: 0.000) G_L1: 15.876 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 13.237 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 16, iters: 266720, time: 0.559, data: 0.000) G_L1: 15.286 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 12.513 G_Regularizer: 0.000 validation_error: 19.742 +(epoch: 16, iters: 268720, time: 0.545, data: 0.000) G_L1: 14.128 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 11.353 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 16, iters: 270720, time: 0.555, data: 0.000) G_L1: 17.456 G_L1_ABSOLUTE: 3.059 G_L1_RELATIVE: 14.397 G_Regularizer: 0.000 validation_error: 19.679 +(epoch: 16, iters: 272720, time: 0.540, data: 0.000) G_L1: 15.453 G_L1_ABSOLUTE: 2.924 G_L1_RELATIVE: 12.529 G_Regularizer: 0.000 validation_error: 20.869 +(epoch: 16, iters: 274720, time: 0.555, data: 0.000) G_L1: 17.890 G_L1_ABSOLUTE: 3.044 G_L1_RELATIVE: 14.845 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 16, iters: 276720, time: 0.549, data: 0.000) G_L1: 16.135 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 13.254 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 16, iters: 278720, time: 0.556, data: 0.000) G_L1: 15.921 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 13.107 G_Regularizer: 0.000 validation_error: 20.116 +(epoch: 16, iters: 280720, time: 0.553, data: 0.000) G_L1: 13.633 G_L1_ABSOLUTE: 3.046 G_L1_RELATIVE: 10.587 G_Regularizer: 0.000 validation_error: 20.333 +(epoch: 16, iters: 282720, time: 0.564, data: 0.000) G_L1: 13.566 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 11.164 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 16, iters: 284720, time: 0.552, data: 0.000) G_L1: 15.336 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 12.511 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 16, iters: 286720, time: 0.560, data: 0.000) G_L1: 13.536 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 10.680 G_Regularizer: 0.000 validation_error: 20.155 +(epoch: 16, iters: 288720, time: 0.538, data: 0.000) G_L1: 23.352 G_L1_ABSOLUTE: 2.919 G_L1_RELATIVE: 20.433 G_Regularizer: 0.000 validation_error: 20.100 +(epoch: 16, iters: 290720, time: 0.537, data: 0.000) G_L1: 13.752 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 11.133 G_Regularizer: 0.000 validation_error: 20.518 +(epoch: 16, iters: 292720, time: 0.546, data: 0.000) G_L1: 14.433 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 11.859 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 16, iters: 294720, time: 0.563, data: 0.000) G_L1: 15.785 G_L1_ABSOLUTE: 2.950 G_L1_RELATIVE: 12.834 G_Regularizer: 0.000 validation_error: 20.279 +(epoch: 16, iters: 296720, time: 0.559, data: 0.001) G_L1: 14.784 G_L1_ABSOLUTE: 2.977 G_L1_RELATIVE: 11.807 G_Regularizer: 0.000 validation_error: 20.326 +(epoch: 16, iters: 298720, time: 0.528, data: 0.000) G_L1: 12.828 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 10.417 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 16, iters: 300720, time: 0.553, data: 0.000) G_L1: 17.238 G_L1_ABSOLUTE: 3.026 G_L1_RELATIVE: 14.212 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 16, iters: 302720, time: 0.557, data: 0.000) G_L1: 15.511 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 12.603 G_Regularizer: 0.000 validation_error: 20.427 +(epoch: 17, iters: 1968, time: 0.557, data: 0.000) G_L1: 11.920 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 9.096 G_Regularizer: 0.000 validation_error: 19.892 +(epoch: 17, iters: 3968, time: 0.546, data: 0.000) G_L1: 19.525 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 16.872 G_Regularizer: 0.000 validation_error: 19.894 +(epoch: 17, iters: 5968, time: 0.560, data: 0.000) G_L1: 16.621 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 13.989 G_Regularizer: 0.000 validation_error: 20.372 +(epoch: 17, iters: 7968, time: 0.554, data: 0.000) G_L1: 15.289 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 12.519 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 17, iters: 9968, time: 0.558, data: 0.000) G_L1: 15.042 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 12.783 G_Regularizer: 0.000 validation_error: 20.298 +(epoch: 17, iters: 11968, time: 0.537, data: 0.000) G_L1: 14.694 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 12.043 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 17, iters: 13968, time: 0.563, data: 0.000) G_L1: 15.037 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 12.722 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 17, iters: 15968, time: 0.552, data: 0.000) G_L1: 13.194 G_L1_ABSOLUTE: 2.869 G_L1_RELATIVE: 10.325 G_Regularizer: 0.000 validation_error: 21.020 +(epoch: 17, iters: 17968, time: 0.559, data: 0.000) G_L1: 18.673 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 15.892 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 17, iters: 19968, time: 0.549, data: 0.000) G_L1: 15.147 G_L1_ABSOLUTE: 3.294 G_L1_RELATIVE: 11.852 G_Regularizer: 0.000 validation_error: 20.494 +(epoch: 17, iters: 21968, time: 0.534, data: 0.000) G_L1: 12.430 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 10.090 G_Regularizer: 0.000 validation_error: 20.157 +(epoch: 17, iters: 23968, time: 0.538, data: 0.000) G_L1: 15.988 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 13.576 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 17, iters: 25968, time: 0.540, data: 0.000) G_L1: 16.052 G_L1_ABSOLUTE: 3.089 G_L1_RELATIVE: 12.962 G_Regularizer: 0.000 validation_error: 20.218 +(epoch: 17, iters: 27968, time: 0.554, data: 0.000) G_L1: 15.197 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 12.532 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 17, iters: 29968, time: 0.541, data: 0.000) G_L1: 13.195 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 10.946 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 17, iters: 31968, time: 0.543, data: 0.000) G_L1: 16.461 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 13.772 G_Regularizer: 0.000 validation_error: 20.102 +(epoch: 17, iters: 33968, time: 0.540, data: 0.000) G_L1: 14.395 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 11.596 G_Regularizer: 0.000 validation_error: 20.578 +(epoch: 17, iters: 35968, time: 0.548, data: 0.000) G_L1: 12.853 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 10.615 G_Regularizer: 0.000 validation_error: 20.708 +(epoch: 17, iters: 37968, time: 0.538, data: 0.000) G_L1: 14.160 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 11.252 G_Regularizer: 0.000 validation_error: 20.349 +(epoch: 17, iters: 39968, time: 0.556, data: 0.000) G_L1: 12.741 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 10.459 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 17, iters: 41968, time: 0.540, data: 0.000) G_L1: 15.529 G_L1_ABSOLUTE: 3.307 G_L1_RELATIVE: 12.222 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 17, iters: 43968, time: 0.554, data: 0.000) G_L1: 18.339 G_L1_ABSOLUTE: 3.094 G_L1_RELATIVE: 15.245 G_Regularizer: 0.000 validation_error: 19.930 +(epoch: 17, iters: 45968, time: 0.537, data: 0.000) G_L1: 17.984 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 15.150 G_Regularizer: 0.000 validation_error: 20.168 +(epoch: 17, iters: 47968, time: 0.531, data: 0.000) G_L1: 18.809 G_L1_ABSOLUTE: 2.300 G_L1_RELATIVE: 16.509 G_Regularizer: 0.000 validation_error: 21.123 +(epoch: 17, iters: 49968, time: 0.552, data: 0.000) G_L1: 16.118 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 13.596 G_Regularizer: 0.000 validation_error: 21.257 +(epoch: 17, iters: 51968, time: 0.553, data: 0.000) G_L1: 17.296 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 14.608 G_Regularizer: 0.000 validation_error: 20.086 +(epoch: 17, iters: 53968, time: 0.539, data: 0.000) G_L1: 13.852 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 11.374 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 17, iters: 55968, time: 0.560, data: 0.000) G_L1: 13.626 G_L1_ABSOLUTE: 2.204 G_L1_RELATIVE: 11.421 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 17, iters: 57968, time: 0.561, data: 0.000) G_L1: 15.592 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 12.792 G_Regularizer: 0.000 validation_error: 20.227 +(epoch: 17, iters: 59968, time: 0.541, data: 0.000) G_L1: 13.263 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 10.909 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 17, iters: 61968, time: 0.537, data: 0.000) G_L1: 14.913 G_L1_ABSOLUTE: 3.380 G_L1_RELATIVE: 11.533 G_Regularizer: 0.000 validation_error: 20.181 +(epoch: 17, iters: 63968, time: 0.544, data: 0.000) G_L1: 14.263 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 11.622 G_Regularizer: 0.000 validation_error: 20.114 +(epoch: 17, iters: 65968, time: 0.555, data: 0.000) G_L1: 14.417 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 11.853 G_Regularizer: 0.000 validation_error: 19.570 +(epoch: 17, iters: 67968, time: 0.557, data: 0.000) G_L1: 14.157 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 11.523 G_Regularizer: 0.000 validation_error: 20.097 +(epoch: 17, iters: 69968, time: 0.553, data: 0.000) G_L1: 17.874 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 15.201 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 17, iters: 71968, time: 0.551, data: 0.000) G_L1: 14.719 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 11.919 G_Regularizer: 0.000 validation_error: 20.586 +(epoch: 17, iters: 73968, time: 0.552, data: 0.000) G_L1: 15.799 G_L1_ABSOLUTE: 3.008 G_L1_RELATIVE: 12.791 G_Regularizer: 0.000 validation_error: 19.934 +(epoch: 17, iters: 75968, time: 0.559, data: 0.000) G_L1: 13.372 G_L1_ABSOLUTE: 2.763 G_L1_RELATIVE: 10.609 G_Regularizer: 0.000 validation_error: 20.629 +(epoch: 17, iters: 77968, time: 0.566, data: 0.000) G_L1: 15.469 G_L1_ABSOLUTE: 3.039 G_L1_RELATIVE: 12.430 G_Regularizer: 0.000 validation_error: 20.494 +(epoch: 17, iters: 79968, time: 0.540, data: 0.001) G_L1: 11.991 G_L1_ABSOLUTE: 1.882 G_L1_RELATIVE: 10.109 G_Regularizer: 0.000 validation_error: 20.113 +(epoch: 17, iters: 81968, time: 0.555, data: 0.000) G_L1: 14.806 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 12.220 G_Regularizer: 0.000 validation_error: 20.285 +(epoch: 17, iters: 83968, time: 0.540, data: 0.000) G_L1: 14.778 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 12.336 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 17, iters: 85968, time: 0.563, data: 0.000) G_L1: 17.656 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 15.204 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 17, iters: 87968, time: 0.535, data: 0.000) G_L1: 14.340 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 11.722 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 17, iters: 89968, time: 0.536, data: 0.000) G_L1: 15.230 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 12.910 G_Regularizer: 0.000 validation_error: 20.491 +(epoch: 17, iters: 91968, time: 0.553, data: 0.000) G_L1: 12.511 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 10.304 G_Regularizer: 0.000 validation_error: 20.627 +(epoch: 17, iters: 93968, time: 0.559, data: 0.001) G_L1: 12.857 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 10.334 G_Regularizer: 0.000 validation_error: 19.803 +(epoch: 17, iters: 95968, time: 0.538, data: 0.000) G_L1: 14.705 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 11.870 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 17, iters: 97968, time: 0.532, data: 0.000) G_L1: 12.738 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 10.330 G_Regularizer: 0.000 validation_error: 19.981 +(epoch: 17, iters: 99968, time: 0.531, data: 0.000) G_L1: 16.750 G_L1_ABSOLUTE: 3.009 G_L1_RELATIVE: 13.741 G_Regularizer: 0.000 validation_error: 19.988 +(epoch: 17, iters: 101968, time: 0.534, data: 0.000) G_L1: 18.399 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 15.676 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 17, iters: 103968, time: 0.529, data: 0.000) G_L1: 15.688 G_L1_ABSOLUTE: 3.399 G_L1_RELATIVE: 12.289 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 17, iters: 105968, time: 0.534, data: 0.000) G_L1: 16.067 G_L1_ABSOLUTE: 2.895 G_L1_RELATIVE: 13.172 G_Regularizer: 0.000 validation_error: 19.969 +(epoch: 17, iters: 107968, time: 0.532, data: 0.001) G_L1: 15.416 G_L1_ABSOLUTE: 2.585 G_L1_RELATIVE: 12.831 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 17, iters: 109968, time: 0.556, data: 0.000) G_L1: 16.782 G_L1_ABSOLUTE: 2.972 G_L1_RELATIVE: 13.810 G_Regularizer: 0.000 validation_error: 20.361 +(epoch: 17, iters: 111968, time: 0.563, data: 0.000) G_L1: 13.588 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 11.019 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 17, iters: 113968, time: 0.534, data: 0.000) G_L1: 14.154 G_L1_ABSOLUTE: 3.342 G_L1_RELATIVE: 10.812 G_Regularizer: 0.000 validation_error: 20.355 +(epoch: 17, iters: 115968, time: 0.543, data: 0.000) G_L1: 15.585 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 13.036 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 17, iters: 117968, time: 0.566, data: 0.000) G_L1: 13.967 G_L1_ABSOLUTE: 2.611 G_L1_RELATIVE: 11.356 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 17, iters: 119968, time: 0.551, data: 0.000) G_L1: 16.432 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 13.926 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 17, iters: 121968, time: 0.542, data: 0.000) G_L1: 16.600 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 14.180 G_Regularizer: 0.000 validation_error: 20.882 +(epoch: 17, iters: 123968, time: 0.559, data: 0.000) G_L1: 13.620 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 10.904 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 17, iters: 125968, time: 0.547, data: 0.000) G_L1: 13.021 G_L1_ABSOLUTE: 2.077 G_L1_RELATIVE: 10.944 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 17, iters: 127968, time: 0.554, data: 0.000) G_L1: 13.997 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 11.323 G_Regularizer: 0.000 validation_error: 21.351 +(epoch: 17, iters: 129968, time: 0.550, data: 0.000) G_L1: 15.293 G_L1_ABSOLUTE: 2.119 G_L1_RELATIVE: 13.174 G_Regularizer: 0.000 validation_error: 20.310 +(epoch: 17, iters: 131968, time: 0.556, data: 0.000) G_L1: 15.260 G_L1_ABSOLUTE: 3.054 G_L1_RELATIVE: 12.206 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 17, iters: 133968, time: 0.565, data: 0.000) G_L1: 16.629 G_L1_ABSOLUTE: 2.715 G_L1_RELATIVE: 13.914 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 17, iters: 135968, time: 0.554, data: 0.001) G_L1: 15.744 G_L1_ABSOLUTE: 2.753 G_L1_RELATIVE: 12.991 G_Regularizer: 0.000 validation_error: 21.411 +(epoch: 17, iters: 137968, time: 0.545, data: 0.001) G_L1: 15.926 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 13.121 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 17, iters: 139968, time: 0.552, data: 0.000) G_L1: 15.891 G_L1_ABSOLUTE: 2.971 G_L1_RELATIVE: 12.919 G_Regularizer: 0.000 validation_error: 20.543 +(epoch: 17, iters: 141968, time: 0.562, data: 0.000) G_L1: 13.408 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 10.638 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 17, iters: 143968, time: 0.564, data: 0.000) G_L1: 14.026 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 11.601 G_Regularizer: 0.000 validation_error: 20.320 +(epoch: 17, iters: 145968, time: 0.548, data: 0.000) G_L1: 16.352 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 13.951 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 17, iters: 147968, time: 0.555, data: 0.000) G_L1: 13.706 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.165 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 17, iters: 149968, time: 0.548, data: 0.000) G_L1: 14.768 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 12.194 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 17, iters: 151968, time: 0.551, data: 0.000) G_L1: 14.084 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 11.405 G_Regularizer: 0.000 validation_error: 20.028 +(epoch: 17, iters: 153968, time: 0.560, data: 0.000) G_L1: 13.537 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 11.076 G_Regularizer: 0.000 validation_error: 20.499 +(epoch: 17, iters: 155968, time: 0.538, data: 0.000) G_L1: 15.545 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 13.061 G_Regularizer: 0.000 validation_error: 20.798 +(epoch: 17, iters: 157968, time: 0.555, data: 0.000) G_L1: 14.579 G_L1_ABSOLUTE: 2.288 G_L1_RELATIVE: 12.291 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 17, iters: 159968, time: 0.557, data: 0.000) G_L1: 15.402 G_L1_ABSOLUTE: 3.150 G_L1_RELATIVE: 12.252 G_Regularizer: 0.000 validation_error: 20.325 +(epoch: 17, iters: 161968, time: 0.558, data: 0.000) G_L1: 16.114 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 13.468 G_Regularizer: 0.000 validation_error: 20.220 +(epoch: 17, iters: 163968, time: 0.536, data: 0.001) G_L1: 16.297 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 13.486 G_Regularizer: 0.000 validation_error: 20.723 +(epoch: 17, iters: 165968, time: 0.540, data: 0.000) G_L1: 12.516 G_L1_ABSOLUTE: 2.671 G_L1_RELATIVE: 9.845 G_Regularizer: 0.000 validation_error: 22.236 +(epoch: 17, iters: 167968, time: 0.553, data: 0.000) G_L1: 14.838 G_L1_ABSOLUTE: 2.850 G_L1_RELATIVE: 11.989 G_Regularizer: 0.000 validation_error: 20.175 +(epoch: 17, iters: 169968, time: 0.557, data: 0.000) G_L1: 16.682 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 13.948 G_Regularizer: 0.000 validation_error: 20.221 +(epoch: 17, iters: 171968, time: 0.543, data: 0.000) G_L1: 13.896 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 11.286 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 17, iters: 173968, time: 0.553, data: 0.000) G_L1: 13.196 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 10.646 G_Regularizer: 0.000 validation_error: 21.175 +(epoch: 17, iters: 175968, time: 0.551, data: 0.000) G_L1: 17.553 G_L1_ABSOLUTE: 2.859 G_L1_RELATIVE: 14.695 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 17, iters: 177968, time: 0.540, data: 0.000) G_L1: 16.289 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 13.603 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 17, iters: 179968, time: 0.540, data: 0.001) G_L1: 13.996 G_L1_ABSOLUTE: 2.446 G_L1_RELATIVE: 11.551 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 17, iters: 181968, time: 0.546, data: 0.000) G_L1: 12.996 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 10.430 G_Regularizer: 0.000 validation_error: 20.439 +(epoch: 17, iters: 183968, time: 0.545, data: 0.000) G_L1: 14.641 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 12.064 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 17, iters: 185968, time: 0.565, data: 0.000) G_L1: 15.966 G_L1_ABSOLUTE: 2.979 G_L1_RELATIVE: 12.987 G_Regularizer: 0.000 validation_error: 20.597 +(epoch: 17, iters: 187968, time: 0.540, data: 0.000) G_L1: 16.219 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 13.446 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 17, iters: 189968, time: 0.550, data: 0.000) G_L1: 13.807 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 10.887 G_Regularizer: 0.000 validation_error: 21.272 +(epoch: 17, iters: 191968, time: 0.557, data: 0.000) G_L1: 14.659 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 12.194 G_Regularizer: 0.000 validation_error: 20.120 +(epoch: 17, iters: 193968, time: 0.560, data: 0.000) G_L1: 14.254 G_L1_ABSOLUTE: 2.490 G_L1_RELATIVE: 11.764 G_Regularizer: 0.000 validation_error: 20.234 +(epoch: 17, iters: 195968, time: 0.555, data: 0.000) G_L1: 15.314 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 12.957 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 17, iters: 197968, time: 0.532, data: 0.000) G_L1: 16.058 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 13.562 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 17, iters: 199968, time: 0.552, data: 0.000) G_L1: 19.091 G_L1_ABSOLUTE: 3.121 G_L1_RELATIVE: 15.970 G_Regularizer: 0.000 validation_error: 20.167 +(epoch: 17, iters: 201968, time: 0.560, data: 0.000) G_L1: 13.293 G_L1_ABSOLUTE: 2.840 G_L1_RELATIVE: 10.453 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 17, iters: 203968, time: 0.560, data: 0.000) G_L1: 13.949 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 11.367 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 17, iters: 205968, time: 0.530, data: 0.000) G_L1: 16.915 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 13.826 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 17, iters: 207968, time: 0.555, data: 0.000) G_L1: 16.828 G_L1_ABSOLUTE: 3.042 G_L1_RELATIVE: 13.785 G_Regularizer: 0.000 validation_error: 20.137 +(epoch: 17, iters: 209968, time: 0.560, data: 0.000) G_L1: 14.972 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 12.695 G_Regularizer: 0.000 validation_error: 20.365 +(epoch: 17, iters: 211968, time: 0.557, data: 0.000) G_L1: 14.074 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 11.563 G_Regularizer: 0.000 validation_error: 19.955 +(epoch: 17, iters: 213968, time: 0.528, data: 0.000) G_L1: 15.085 G_L1_ABSOLUTE: 2.703 G_L1_RELATIVE: 12.382 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 17, iters: 215968, time: 0.554, data: 0.000) G_L1: 14.957 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 12.357 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 17, iters: 217968, time: 0.562, data: 0.000) G_L1: 16.228 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 13.494 G_Regularizer: 0.000 validation_error: 19.508 +(epoch: 17, iters: 219968, time: 0.541, data: 0.000) G_L1: 26.527 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 23.819 G_Regularizer: 0.000 validation_error: 20.338 +(epoch: 17, iters: 221968, time: 0.546, data: 0.000) G_L1: 15.703 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 12.995 G_Regularizer: 0.000 validation_error: 20.376 +(epoch: 17, iters: 223968, time: 0.531, data: 0.000) G_L1: 14.644 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 12.083 G_Regularizer: 0.000 validation_error: 20.094 +(epoch: 17, iters: 225968, time: 0.535, data: 0.000) G_L1: 16.403 G_L1_ABSOLUTE: 2.984 G_L1_RELATIVE: 13.419 G_Regularizer: 0.000 validation_error: 20.410 +(epoch: 17, iters: 227968, time: 0.539, data: 0.000) G_L1: 13.928 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 11.389 G_Regularizer: 0.000 validation_error: 20.420 +(epoch: 17, iters: 229968, time: 0.537, data: 0.000) G_L1: 17.750 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 14.952 G_Regularizer: 0.000 validation_error: 20.428 +(epoch: 17, iters: 231968, time: 0.543, data: 0.000) G_L1: 12.163 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 9.831 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 17, iters: 233968, time: 0.558, data: 0.000) G_L1: 13.506 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 11.082 G_Regularizer: 0.000 validation_error: 20.072 +(epoch: 17, iters: 235968, time: 0.557, data: 0.001) G_L1: 12.007 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 9.632 G_Regularizer: 0.000 validation_error: 21.491 +(epoch: 17, iters: 237968, time: 0.562, data: 0.000) G_L1: 15.590 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 12.839 G_Regularizer: 0.000 validation_error: 20.424 +(epoch: 17, iters: 239968, time: 0.540, data: 0.000) G_L1: 17.080 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 14.290 G_Regularizer: 0.000 validation_error: 21.168 +(epoch: 17, iters: 241968, time: 0.543, data: 0.000) G_L1: 13.154 G_L1_ABSOLUTE: 2.204 G_L1_RELATIVE: 10.950 G_Regularizer: 0.000 validation_error: 21.441 +(epoch: 17, iters: 243968, time: 0.542, data: 0.000) G_L1: 15.872 G_L1_ABSOLUTE: 3.244 G_L1_RELATIVE: 12.628 G_Regularizer: 0.000 validation_error: 20.419 +(epoch: 17, iters: 245968, time: 0.549, data: 0.000) G_L1: 14.376 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 11.949 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 17, iters: 247968, time: 0.540, data: 0.000) G_L1: 15.255 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 12.643 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 17, iters: 249968, time: 0.529, data: 0.000) G_L1: 17.225 G_L1_ABSOLUTE: 3.111 G_L1_RELATIVE: 14.114 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 17, iters: 251968, time: 0.563, data: 0.000) G_L1: 15.128 G_L1_ABSOLUTE: 3.274 G_L1_RELATIVE: 11.854 G_Regularizer: 0.000 validation_error: 19.942 +(epoch: 17, iters: 253968, time: 0.537, data: 0.000) G_L1: 14.670 G_L1_ABSOLUTE: 2.979 G_L1_RELATIVE: 11.692 G_Regularizer: 0.000 validation_error: 20.444 +(epoch: 17, iters: 255968, time: 0.553, data: 0.000) G_L1: 15.665 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 13.016 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 17, iters: 257968, time: 0.558, data: 0.000) G_L1: 16.477 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 13.694 G_Regularizer: 0.000 validation_error: 20.482 +(epoch: 17, iters: 259968, time: 0.563, data: 0.000) G_L1: 13.218 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 10.524 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 17, iters: 261968, time: 0.552, data: 0.001) G_L1: 16.081 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 13.429 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 17, iters: 263968, time: 0.548, data: 0.000) G_L1: 14.530 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 11.894 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 17, iters: 265968, time: 0.544, data: 0.000) G_L1: 14.587 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 11.809 G_Regularizer: 0.000 validation_error: 20.227 +(epoch: 17, iters: 267968, time: 0.562, data: 0.000) G_L1: 12.878 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 10.257 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 17, iters: 269968, time: 0.562, data: 0.000) G_L1: 15.606 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 13.009 G_Regularizer: 0.000 validation_error: 20.416 +(epoch: 17, iters: 271968, time: 0.556, data: 0.000) G_L1: 15.309 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 12.712 G_Regularizer: 0.000 validation_error: 20.253 +(epoch: 17, iters: 273968, time: 0.554, data: 0.000) G_L1: 15.118 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 12.440 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 17, iters: 275968, time: 0.558, data: 0.000) G_L1: 16.377 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 13.623 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 17, iters: 277968, time: 0.558, data: 0.001) G_L1: 13.261 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 10.586 G_Regularizer: 0.000 validation_error: 19.896 +(epoch: 17, iters: 279968, time: 0.560, data: 0.000) G_L1: 14.669 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 12.129 G_Regularizer: 0.000 validation_error: 20.038 +(epoch: 17, iters: 281968, time: 0.545, data: 0.000) G_L1: 13.214 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 10.830 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 17, iters: 283968, time: 0.559, data: 0.000) G_L1: 16.798 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 14.242 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 17, iters: 285968, time: 0.560, data: 0.000) G_L1: 15.719 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 13.051 G_Regularizer: 0.000 validation_error: 20.663 +(epoch: 17, iters: 287968, time: 0.559, data: 0.000) G_L1: 16.138 G_L1_ABSOLUTE: 2.985 G_L1_RELATIVE: 13.153 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 17, iters: 289968, time: 0.531, data: 0.000) G_L1: 14.275 G_L1_ABSOLUTE: 2.264 G_L1_RELATIVE: 12.011 G_Regularizer: 0.000 validation_error: 20.113 +(epoch: 17, iters: 291968, time: 0.553, data: 0.000) G_L1: 15.133 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 12.519 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 17, iters: 293968, time: 0.555, data: 0.000) G_L1: 15.072 G_L1_ABSOLUTE: 3.032 G_L1_RELATIVE: 12.040 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 17, iters: 295968, time: 0.564, data: 0.000) G_L1: 16.255 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 13.551 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 17, iters: 297968, time: 0.549, data: 0.000) G_L1: 14.665 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 12.136 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 17, iters: 299968, time: 0.559, data: 0.000) G_L1: 14.294 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 11.928 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 17, iters: 301968, time: 0.560, data: 0.001) G_L1: 17.263 G_L1_ABSOLUTE: 3.306 G_L1_RELATIVE: 13.958 G_Regularizer: 0.000 validation_error: 20.609 +(epoch: 18, iters: 1216, time: 0.558, data: 0.000) G_L1: 16.176 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 13.337 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 18, iters: 3216, time: 0.543, data: 0.000) G_L1: 14.646 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 12.042 G_Regularizer: 0.000 validation_error: 21.444 +(epoch: 18, iters: 5216, time: 0.563, data: 0.000) G_L1: 14.041 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 11.635 G_Regularizer: 0.000 validation_error: 20.387 +(epoch: 18, iters: 7216, time: 0.559, data: 0.000) G_L1: 13.743 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.142 G_Regularizer: 0.000 validation_error: 20.406 +(epoch: 18, iters: 9216, time: 0.558, data: 0.000) G_L1: 12.023 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 9.603 G_Regularizer: 0.000 validation_error: 20.997 +(epoch: 18, iters: 11216, time: 0.543, data: 0.000) G_L1: 14.247 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 11.349 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 18, iters: 13216, time: 0.545, data: 0.000) G_L1: 12.613 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 10.117 G_Regularizer: 0.000 validation_error: 20.375 +(epoch: 18, iters: 15216, time: 0.555, data: 0.000) G_L1: 13.729 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 11.139 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 18, iters: 17216, time: 0.568, data: 0.001) G_L1: 14.390 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 11.842 G_Regularizer: 0.000 validation_error: 20.540 +(epoch: 18, iters: 19216, time: 0.553, data: 0.000) G_L1: 12.291 G_L1_ABSOLUTE: 2.082 G_L1_RELATIVE: 10.208 G_Regularizer: 0.000 validation_error: 20.345 +(epoch: 18, iters: 21216, time: 0.534, data: 0.000) G_L1: 16.179 G_L1_ABSOLUTE: 3.110 G_L1_RELATIVE: 13.069 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 18, iters: 23216, time: 0.557, data: 0.000) G_L1: 12.535 G_L1_ABSOLUTE: 2.210 G_L1_RELATIVE: 10.325 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 18, iters: 25216, time: 0.564, data: 0.000) G_L1: 15.132 G_L1_ABSOLUTE: 2.215 G_L1_RELATIVE: 12.917 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 18, iters: 27216, time: 0.541, data: 0.001) G_L1: 13.412 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 10.684 G_Regularizer: 0.000 validation_error: 21.135 +(epoch: 18, iters: 29216, time: 0.533, data: 0.000) G_L1: 14.811 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 12.272 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 18, iters: 31216, time: 0.556, data: 0.000) G_L1: 14.716 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 12.119 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 18, iters: 33216, time: 0.556, data: 0.000) G_L1: 16.690 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 14.114 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 18, iters: 35216, time: 0.561, data: 0.000) G_L1: 14.212 G_L1_ABSOLUTE: 3.041 G_L1_RELATIVE: 11.171 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 18, iters: 37216, time: 0.538, data: 0.000) G_L1: 15.275 G_L1_ABSOLUTE: 2.991 G_L1_RELATIVE: 12.284 G_Regularizer: 0.000 validation_error: 21.030 +(epoch: 18, iters: 39216, time: 0.561, data: 0.000) G_L1: 12.672 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 10.331 G_Regularizer: 0.000 validation_error: 21.241 +(epoch: 18, iters: 41216, time: 0.555, data: 0.000) G_L1: 13.011 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 10.671 G_Regularizer: 0.000 validation_error: 20.723 +(epoch: 18, iters: 43216, time: 0.557, data: 0.000) G_L1: 15.643 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 13.101 G_Regularizer: 0.000 validation_error: 21.163 +(epoch: 18, iters: 45216, time: 0.530, data: 0.000) G_L1: 13.586 G_L1_ABSOLUTE: 2.984 G_L1_RELATIVE: 10.603 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 18, iters: 47216, time: 0.556, data: 0.000) G_L1: 15.771 G_L1_ABSOLUTE: 2.789 G_L1_RELATIVE: 12.982 G_Regularizer: 0.000 validation_error: 20.293 +(epoch: 18, iters: 49216, time: 0.562, data: 0.000) G_L1: 14.453 G_L1_ABSOLUTE: 2.726 G_L1_RELATIVE: 11.727 G_Regularizer: 0.000 validation_error: 20.249 +(epoch: 18, iters: 51216, time: 0.564, data: 0.000) G_L1: 11.702 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 9.160 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 18, iters: 53216, time: 0.546, data: 0.000) G_L1: 15.676 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 13.234 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 18, iters: 55216, time: 0.546, data: 0.000) G_L1: 17.108 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 14.231 G_Regularizer: 0.000 validation_error: 21.528 +(epoch: 18, iters: 57216, time: 0.562, data: 0.001) G_L1: 16.457 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 14.099 G_Regularizer: 0.000 validation_error: 21.925 +(epoch: 18, iters: 59216, time: 0.561, data: 0.000) G_L1: 17.877 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 15.381 G_Regularizer: 0.000 validation_error: 20.492 +(epoch: 18, iters: 61216, time: 0.549, data: 0.000) G_L1: 15.558 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 12.732 G_Regularizer: 0.000 validation_error: 20.249 +(epoch: 18, iters: 63216, time: 0.533, data: 0.000) G_L1: 14.077 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.640 G_Regularizer: 0.000 validation_error: 20.995 +(epoch: 18, iters: 65216, time: 0.553, data: 0.000) G_L1: 15.755 G_L1_ABSOLUTE: 3.205 G_L1_RELATIVE: 12.550 G_Regularizer: 0.000 validation_error: 20.008 +(epoch: 18, iters: 67216, time: 0.546, data: 0.000) G_L1: 11.631 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 9.148 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 18, iters: 69216, time: 0.540, data: 0.000) G_L1: 14.422 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 11.612 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 18, iters: 71216, time: 0.542, data: 0.000) G_L1: 15.464 G_L1_ABSOLUTE: 3.263 G_L1_RELATIVE: 12.201 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 18, iters: 73216, time: 0.549, data: 0.000) G_L1: 14.880 G_L1_ABSOLUTE: 3.110 G_L1_RELATIVE: 11.770 G_Regularizer: 0.000 validation_error: 21.127 +(epoch: 18, iters: 75216, time: 0.563, data: 0.000) G_L1: 17.239 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 14.555 G_Regularizer: 0.000 validation_error: 20.222 +(epoch: 18, iters: 77216, time: 0.558, data: 0.000) G_L1: 13.780 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 11.046 G_Regularizer: 0.000 validation_error: 20.310 +(epoch: 18, iters: 79216, time: 0.552, data: 0.000) G_L1: 15.787 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 13.190 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 18, iters: 81216, time: 0.554, data: 0.000) G_L1: 14.247 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 11.830 G_Regularizer: 0.000 validation_error: 21.327 +(epoch: 18, iters: 83216, time: 0.531, data: 0.000) G_L1: 15.223 G_L1_ABSOLUTE: 3.059 G_L1_RELATIVE: 12.164 G_Regularizer: 0.000 validation_error: 20.062 +(epoch: 18, iters: 85216, time: 0.559, data: 0.000) G_L1: 18.024 G_L1_ABSOLUTE: 3.053 G_L1_RELATIVE: 14.971 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 18, iters: 87216, time: 0.532, data: 0.001) G_L1: 13.467 G_L1_ABSOLUTE: 2.174 G_L1_RELATIVE: 11.293 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 18, iters: 89216, time: 0.531, data: 0.000) G_L1: 17.445 G_L1_ABSOLUTE: 2.942 G_L1_RELATIVE: 14.503 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 18, iters: 91216, time: 0.573, data: 0.000) G_L1: 13.877 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 11.268 G_Regularizer: 0.000 validation_error: 20.346 +(epoch: 18, iters: 93216, time: 0.557, data: 0.000) G_L1: 13.989 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 11.559 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 18, iters: 95216, time: 0.546, data: 0.000) G_L1: 15.038 G_L1_ABSOLUTE: 2.583 G_L1_RELATIVE: 12.455 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 18, iters: 97216, time: 0.566, data: 0.000) G_L1: 14.544 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.130 G_Regularizer: 0.000 validation_error: 21.269 +(epoch: 18, iters: 99216, time: 0.546, data: 0.000) G_L1: 17.851 G_L1_ABSOLUTE: 2.675 G_L1_RELATIVE: 15.176 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 18, iters: 101216, time: 0.547, data: 0.000) G_L1: 15.127 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 12.237 G_Regularizer: 0.000 validation_error: 20.463 +(epoch: 18, iters: 103216, time: 0.552, data: 0.000) G_L1: 14.622 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 12.226 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 18, iters: 105216, time: 0.549, data: 0.000) G_L1: 16.605 G_L1_ABSOLUTE: 2.787 G_L1_RELATIVE: 13.818 G_Regularizer: 0.000 validation_error: 20.429 +(epoch: 18, iters: 107216, time: 0.560, data: 0.001) G_L1: 17.746 G_L1_ABSOLUTE: 3.226 G_L1_RELATIVE: 14.520 G_Regularizer: 0.000 validation_error: 21.239 +(epoch: 18, iters: 109216, time: 0.556, data: 0.000) G_L1: 16.168 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 13.546 G_Regularizer: 0.000 validation_error: 20.418 +(epoch: 18, iters: 111216, time: 0.541, data: 0.000) G_L1: 15.366 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 12.912 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 18, iters: 113216, time: 0.536, data: 0.000) G_L1: 13.260 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 10.995 G_Regularizer: 0.000 validation_error: 20.334 +(epoch: 18, iters: 115216, time: 0.560, data: 0.000) G_L1: 16.590 G_L1_ABSOLUTE: 2.903 G_L1_RELATIVE: 13.686 G_Regularizer: 0.000 validation_error: 20.425 +(epoch: 18, iters: 117216, time: 0.560, data: 0.000) G_L1: 14.155 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 11.664 G_Regularizer: 0.000 validation_error: 21.314 +(epoch: 18, iters: 119216, time: 0.566, data: 0.000) G_L1: 14.299 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 11.671 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 18, iters: 121216, time: 0.537, data: 0.000) G_L1: 14.783 G_L1_ABSOLUTE: 2.787 G_L1_RELATIVE: 11.995 G_Regularizer: 0.000 validation_error: 19.758 +(epoch: 18, iters: 123216, time: 0.558, data: 0.000) G_L1: 14.921 G_L1_ABSOLUTE: 3.135 G_L1_RELATIVE: 11.787 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 18, iters: 125216, time: 0.564, data: 0.000) G_L1: 17.450 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 14.905 G_Regularizer: 0.000 validation_error: 20.258 +(epoch: 18, iters: 127216, time: 0.554, data: 0.000) G_L1: 13.985 G_L1_ABSOLUTE: 2.928 G_L1_RELATIVE: 11.058 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 18, iters: 129216, time: 0.531, data: 0.000) G_L1: 15.816 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 13.220 G_Regularizer: 0.000 validation_error: 20.407 +(epoch: 18, iters: 131216, time: 0.543, data: 0.000) G_L1: 16.696 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 13.989 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 18, iters: 133216, time: 0.565, data: 0.000) G_L1: 18.754 G_L1_ABSOLUTE: 3.401 G_L1_RELATIVE: 15.354 G_Regularizer: 0.000 validation_error: 21.752 +(epoch: 18, iters: 135216, time: 0.561, data: 0.001) G_L1: 14.709 G_L1_ABSOLUTE: 2.240 G_L1_RELATIVE: 12.470 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 18, iters: 137216, time: 0.536, data: 0.001) G_L1: 13.303 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 10.706 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 18, iters: 139216, time: 0.559, data: 0.000) G_L1: 15.003 G_L1_ABSOLUTE: 3.174 G_L1_RELATIVE: 11.829 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 18, iters: 141216, time: 0.559, data: 0.001) G_L1: 17.664 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 15.301 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 18, iters: 143216, time: 0.556, data: 0.000) G_L1: 12.305 G_L1_ABSOLUTE: 2.076 G_L1_RELATIVE: 10.228 G_Regularizer: 0.000 validation_error: 20.056 +(epoch: 18, iters: 145216, time: 0.553, data: 0.000) G_L1: 15.035 G_L1_ABSOLUTE: 2.376 G_L1_RELATIVE: 12.659 G_Regularizer: 0.000 validation_error: 21.290 +(epoch: 18, iters: 147216, time: 0.559, data: 0.000) G_L1: 20.920 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 17.987 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 18, iters: 149216, time: 0.558, data: 0.000) G_L1: 14.125 G_L1_ABSOLUTE: 2.350 G_L1_RELATIVE: 11.775 G_Regularizer: 0.000 validation_error: 21.245 +(epoch: 18, iters: 151216, time: 0.553, data: 0.000) G_L1: 15.524 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 13.041 G_Regularizer: 0.000 validation_error: 21.081 +(epoch: 18, iters: 153216, time: 0.554, data: 0.000) G_L1: 23.109 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 20.298 G_Regularizer: 0.000 validation_error: 20.291 +(epoch: 18, iters: 155216, time: 0.545, data: 0.001) G_L1: 16.714 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 13.991 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 18, iters: 157216, time: 0.560, data: 0.000) G_L1: 15.369 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 12.488 G_Regularizer: 0.000 validation_error: 20.535 +(epoch: 18, iters: 159216, time: 0.549, data: 0.000) G_L1: 14.507 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 12.013 G_Regularizer: 0.000 validation_error: 20.067 +(epoch: 18, iters: 161216, time: 0.552, data: 0.001) G_L1: 17.323 G_L1_ABSOLUTE: 3.133 G_L1_RELATIVE: 14.190 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 18, iters: 163216, time: 0.561, data: 0.000) G_L1: 11.816 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 8.972 G_Regularizer: 0.000 validation_error: 20.425 +(epoch: 18, iters: 165216, time: 0.558, data: 0.000) G_L1: 14.764 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 12.162 G_Regularizer: 0.000 validation_error: 20.955 +(epoch: 18, iters: 167216, time: 0.549, data: 0.000) G_L1: 11.201 G_L1_ABSOLUTE: 2.072 G_L1_RELATIVE: 9.129 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 18, iters: 169216, time: 0.553, data: 0.000) G_L1: 17.063 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 14.264 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 18, iters: 171216, time: 0.551, data: 0.001) G_L1: 17.420 G_L1_ABSOLUTE: 2.971 G_L1_RELATIVE: 14.449 G_Regularizer: 0.000 validation_error: 20.160 +(epoch: 18, iters: 173216, time: 0.539, data: 0.000) G_L1: 17.074 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 14.453 G_Regularizer: 0.000 validation_error: 20.329 +(epoch: 18, iters: 175216, time: 0.555, data: 0.000) G_L1: 14.226 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 11.465 G_Regularizer: 0.000 validation_error: 21.001 +(epoch: 18, iters: 177216, time: 0.553, data: 0.000) G_L1: 16.457 G_L1_ABSOLUTE: 3.215 G_L1_RELATIVE: 13.242 G_Regularizer: 0.000 validation_error: 20.255 +(epoch: 18, iters: 179216, time: 0.550, data: 0.000) G_L1: 17.378 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 14.580 G_Regularizer: 0.000 validation_error: 20.267 +(epoch: 18, iters: 181216, time: 0.560, data: 0.001) G_L1: 15.439 G_L1_ABSOLUTE: 2.861 G_L1_RELATIVE: 12.578 G_Regularizer: 0.000 validation_error: 20.372 +(epoch: 18, iters: 183216, time: 0.561, data: 0.000) G_L1: 12.987 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 10.647 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 18, iters: 185216, time: 0.541, data: 0.000) G_L1: 17.012 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 14.585 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 18, iters: 187216, time: 0.553, data: 0.000) G_L1: 15.516 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 12.898 G_Regularizer: 0.000 validation_error: 20.450 +(epoch: 18, iters: 189216, time: 0.559, data: 0.001) G_L1: 16.233 G_L1_ABSOLUTE: 2.288 G_L1_RELATIVE: 13.945 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 18, iters: 191216, time: 0.557, data: 0.000) G_L1: 14.026 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 11.212 G_Regularizer: 0.000 validation_error: 20.025 +(epoch: 18, iters: 193216, time: 0.558, data: 0.000) G_L1: 15.920 G_L1_ABSOLUTE: 2.838 G_L1_RELATIVE: 13.082 G_Regularizer: 0.000 validation_error: 20.295 +(epoch: 18, iters: 195216, time: 0.540, data: 0.000) G_L1: 15.112 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 12.849 G_Regularizer: 0.000 validation_error: 20.416 +(epoch: 18, iters: 197216, time: 0.539, data: 0.000) G_L1: 11.663 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 9.295 G_Regularizer: 0.000 validation_error: 19.968 +(epoch: 18, iters: 199216, time: 0.556, data: 0.001) G_L1: 17.890 G_L1_ABSOLUTE: 2.643 G_L1_RELATIVE: 15.247 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 18, iters: 201216, time: 0.554, data: 0.000) G_L1: 11.252 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 8.950 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 18, iters: 203216, time: 0.552, data: 0.000) G_L1: 14.577 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 11.683 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 18, iters: 205216, time: 0.543, data: 0.000) G_L1: 15.436 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 12.692 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 18, iters: 207216, time: 0.559, data: 0.000) G_L1: 12.824 G_L1_ABSOLUTE: 2.281 G_L1_RELATIVE: 10.544 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 18, iters: 209216, time: 0.564, data: 0.000) G_L1: 13.086 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 10.305 G_Regularizer: 0.000 validation_error: 19.755 +(epoch: 18, iters: 211216, time: 0.548, data: 0.000) G_L1: 14.105 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 11.545 G_Regularizer: 0.000 validation_error: 21.111 +(epoch: 18, iters: 213216, time: 0.538, data: 0.000) G_L1: 13.234 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 10.819 G_Regularizer: 0.000 validation_error: 20.550 +(epoch: 18, iters: 215216, time: 0.549, data: 0.000) G_L1: 13.325 G_L1_ABSOLUTE: 2.155 G_L1_RELATIVE: 11.170 G_Regularizer: 0.000 validation_error: 21.060 +(epoch: 18, iters: 217216, time: 0.538, data: 0.000) G_L1: 15.643 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 12.960 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 18, iters: 219216, time: 0.550, data: 0.000) G_L1: 12.902 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.397 G_Regularizer: 0.000 validation_error: 20.178 +(epoch: 18, iters: 221216, time: 0.538, data: 0.000) G_L1: 12.255 G_L1_ABSOLUTE: 2.327 G_L1_RELATIVE: 9.928 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 18, iters: 223216, time: 0.548, data: 0.001) G_L1: 16.002 G_L1_ABSOLUTE: 3.029 G_L1_RELATIVE: 12.973 G_Regularizer: 0.000 validation_error: 20.392 +(epoch: 18, iters: 225216, time: 0.563, data: 0.000) G_L1: 17.544 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 14.664 G_Regularizer: 0.000 validation_error: 20.049 +(epoch: 18, iters: 227216, time: 0.553, data: 0.000) G_L1: 13.862 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 11.358 G_Regularizer: 0.000 validation_error: 20.255 +(epoch: 18, iters: 229216, time: 0.538, data: 0.000) G_L1: 14.428 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 11.596 G_Regularizer: 0.000 validation_error: 20.107 +(epoch: 18, iters: 231216, time: 0.559, data: 0.000) G_L1: 17.832 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 15.172 G_Regularizer: 0.000 validation_error: 21.467 +(epoch: 18, iters: 233216, time: 0.556, data: 0.000) G_L1: 13.670 G_L1_ABSOLUTE: 2.761 G_L1_RELATIVE: 10.909 G_Regularizer: 0.000 validation_error: 20.486 +(epoch: 18, iters: 235216, time: 0.538, data: 0.000) G_L1: 16.803 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 14.006 G_Regularizer: 0.000 validation_error: 20.393 +(epoch: 18, iters: 237216, time: 0.561, data: 0.000) G_L1: 13.685 G_L1_ABSOLUTE: 2.064 G_L1_RELATIVE: 11.620 G_Regularizer: 0.000 validation_error: 21.200 +(epoch: 18, iters: 239216, time: 0.531, data: 0.000) G_L1: 13.574 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 11.003 G_Regularizer: 0.000 validation_error: 20.065 +(epoch: 18, iters: 241216, time: 0.556, data: 0.000) G_L1: 16.014 G_L1_ABSOLUTE: 3.533 G_L1_RELATIVE: 12.481 G_Regularizer: 0.000 validation_error: 20.607 +(epoch: 18, iters: 243216, time: 0.546, data: 0.000) G_L1: 13.984 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 11.645 G_Regularizer: 0.000 validation_error: 20.289 +(epoch: 18, iters: 245216, time: 0.562, data: 0.000) G_L1: 18.028 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 15.424 G_Regularizer: 0.000 validation_error: 21.240 +(epoch: 18, iters: 247216, time: 0.537, data: 0.000) G_L1: 13.711 G_L1_ABSOLUTE: 2.177 G_L1_RELATIVE: 11.534 G_Regularizer: 0.000 validation_error: 21.157 +(epoch: 18, iters: 249216, time: 0.559, data: 0.000) G_L1: 16.200 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 13.429 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 18, iters: 251216, time: 0.553, data: 0.000) G_L1: 15.787 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 13.201 G_Regularizer: 0.000 validation_error: 20.544 +(epoch: 18, iters: 253216, time: 0.553, data: 0.000) G_L1: 15.760 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 12.981 G_Regularizer: 0.000 validation_error: 21.431 +(epoch: 18, iters: 255216, time: 0.542, data: 0.000) G_L1: 30.076 G_L1_ABSOLUTE: 2.713 G_L1_RELATIVE: 27.364 G_Regularizer: 0.000 validation_error: 21.041 +(epoch: 18, iters: 257216, time: 0.551, data: 0.000) G_L1: 18.217 G_L1_ABSOLUTE: 2.901 G_L1_RELATIVE: 15.316 G_Regularizer: 0.000 validation_error: 20.549 +(epoch: 18, iters: 259216, time: 0.547, data: 0.000) G_L1: 21.037 G_L1_ABSOLUTE: 3.265 G_L1_RELATIVE: 17.772 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 18, iters: 261216, time: 0.559, data: 0.000) G_L1: 14.459 G_L1_ABSOLUTE: 2.270 G_L1_RELATIVE: 12.190 G_Regularizer: 0.000 validation_error: 20.007 +(epoch: 18, iters: 263216, time: 0.539, data: 0.000) G_L1: 15.024 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 12.478 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 18, iters: 265216, time: 0.556, data: 0.002) G_L1: 15.508 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 12.844 G_Regularizer: 0.000 validation_error: 20.646 +(epoch: 18, iters: 267216, time: 0.559, data: 0.000) G_L1: 15.475 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 12.774 G_Regularizer: 0.000 validation_error: 20.175 +(epoch: 18, iters: 269216, time: 0.559, data: 0.000) G_L1: 14.271 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 11.916 G_Regularizer: 0.000 validation_error: 21.079 +(epoch: 18, iters: 271216, time: 0.540, data: 0.000) G_L1: 18.815 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 16.374 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 18, iters: 273216, time: 0.553, data: 0.000) G_L1: 18.615 G_L1_ABSOLUTE: 2.833 G_L1_RELATIVE: 15.782 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 18, iters: 275216, time: 0.537, data: 0.000) G_L1: 15.630 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 12.985 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 18, iters: 277216, time: 0.540, data: 0.000) G_L1: 14.254 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 11.667 G_Regularizer: 0.000 validation_error: 20.609 +(epoch: 18, iters: 279216, time: 0.548, data: 0.000) G_L1: 16.233 G_L1_ABSOLUTE: 2.405 G_L1_RELATIVE: 13.828 G_Regularizer: 0.000 validation_error: 20.647 +(epoch: 18, iters: 281216, time: 0.555, data: 0.000) G_L1: 15.125 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 12.628 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 18, iters: 283216, time: 0.561, data: 0.000) G_L1: 16.377 G_L1_ABSOLUTE: 2.289 G_L1_RELATIVE: 14.088 G_Regularizer: 0.000 validation_error: 21.426 +(epoch: 18, iters: 285216, time: 0.561, data: 0.000) G_L1: 14.843 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 12.095 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 18, iters: 287216, time: 0.560, data: 0.000) G_L1: 15.489 G_L1_ABSOLUTE: 2.866 G_L1_RELATIVE: 12.623 G_Regularizer: 0.000 validation_error: 21.113 +(epoch: 18, iters: 289216, time: 0.537, data: 0.000) G_L1: 18.288 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 15.604 G_Regularizer: 0.000 validation_error: 20.504 +(epoch: 18, iters: 291216, time: 0.565, data: 0.000) G_L1: 17.055 G_L1_ABSOLUTE: 3.277 G_L1_RELATIVE: 13.779 G_Regularizer: 0.000 validation_error: 21.628 +(epoch: 18, iters: 293216, time: 0.552, data: 0.000) G_L1: 15.469 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 12.913 G_Regularizer: 0.000 validation_error: 20.241 +(epoch: 18, iters: 295216, time: 0.534, data: 0.000) G_L1: 18.294 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 15.752 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 18, iters: 297216, time: 0.537, data: 0.000) G_L1: 13.243 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 10.940 G_Regularizer: 0.000 validation_error: 20.117 +(epoch: 18, iters: 299216, time: 0.553, data: 0.000) G_L1: 14.747 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 12.232 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 18, iters: 301216, time: 0.556, data: 0.000) G_L1: 16.174 G_L1_ABSOLUTE: 2.869 G_L1_RELATIVE: 13.305 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 19, iters: 464, time: 0.551, data: 0.000) G_L1: 15.367 G_L1_ABSOLUTE: 3.116 G_L1_RELATIVE: 12.251 G_Regularizer: 0.000 validation_error: 20.358 +(epoch: 19, iters: 2464, time: 0.540, data: 0.000) G_L1: 15.152 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 12.720 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 19, iters: 4464, time: 0.556, data: 0.000) G_L1: 12.386 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 9.886 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 19, iters: 6464, time: 0.549, data: 0.000) G_L1: 14.758 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 12.465 G_Regularizer: 0.000 validation_error: 20.409 +(epoch: 19, iters: 8464, time: 0.550, data: 0.000) G_L1: 16.797 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 14.225 G_Regularizer: 0.000 validation_error: 21.302 +(epoch: 19, iters: 10464, time: 0.547, data: 0.000) G_L1: 14.510 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 12.137 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 19, iters: 12464, time: 0.533, data: 0.000) G_L1: 22.304 G_L1_ABSOLUTE: 3.062 G_L1_RELATIVE: 19.242 G_Regularizer: 0.000 validation_error: 20.399 +(epoch: 19, iters: 14464, time: 0.546, data: 0.000) G_L1: 14.920 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 12.302 G_Regularizer: 0.000 validation_error: 20.402 +(epoch: 19, iters: 16464, time: 0.558, data: 0.000) G_L1: 14.168 G_L1_ABSOLUTE: 2.758 G_L1_RELATIVE: 11.410 G_Regularizer: 0.000 validation_error: 20.291 +(epoch: 19, iters: 18464, time: 0.545, data: 0.000) G_L1: 13.241 G_L1_ABSOLUTE: 2.970 G_L1_RELATIVE: 10.271 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 19, iters: 20464, time: 0.547, data: 0.000) G_L1: 13.955 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 11.291 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 19, iters: 22464, time: 0.553, data: 0.000) G_L1: 16.244 G_L1_ABSOLUTE: 3.079 G_L1_RELATIVE: 13.165 G_Regularizer: 0.000 validation_error: 20.496 +(epoch: 19, iters: 24464, time: 0.553, data: 0.000) G_L1: 14.189 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 11.765 G_Regularizer: 0.000 validation_error: 21.137 +(epoch: 19, iters: 26464, time: 0.554, data: 0.000) G_L1: 17.305 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 14.384 G_Regularizer: 0.000 validation_error: 20.158 +(epoch: 19, iters: 28464, time: 0.539, data: 0.000) G_L1: 14.232 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 11.778 G_Regularizer: 0.000 validation_error: 20.618 +(epoch: 19, iters: 30464, time: 0.561, data: 0.000) G_L1: 18.771 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 16.199 G_Regularizer: 0.000 validation_error: 20.516 +(epoch: 19, iters: 32464, time: 0.565, data: 0.000) G_L1: 16.101 G_L1_ABSOLUTE: 3.052 G_L1_RELATIVE: 13.049 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 19, iters: 34464, time: 0.552, data: 0.001) G_L1: 15.848 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 13.057 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 19, iters: 36464, time: 0.541, data: 0.000) G_L1: 12.889 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 10.212 G_Regularizer: 0.000 validation_error: 21.344 +(epoch: 19, iters: 38464, time: 0.557, data: 0.000) G_L1: 12.939 G_L1_ABSOLUTE: 2.697 G_L1_RELATIVE: 10.241 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 19, iters: 40464, time: 0.562, data: 0.000) G_L1: 12.421 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 9.967 G_Regularizer: 0.000 validation_error: 20.007 +(epoch: 19, iters: 42464, time: 0.546, data: 0.000) G_L1: 14.580 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 11.818 G_Regularizer: 0.000 validation_error: 20.814 +(epoch: 19, iters: 44464, time: 0.537, data: 0.000) G_L1: 14.803 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 12.135 G_Regularizer: 0.000 validation_error: 20.194 +(epoch: 19, iters: 46464, time: 0.560, data: 0.000) G_L1: 15.727 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 13.256 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 19, iters: 48464, time: 0.558, data: 0.000) G_L1: 16.829 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 14.238 G_Regularizer: 0.000 validation_error: 20.325 +(epoch: 19, iters: 50464, time: 0.553, data: 0.000) G_L1: 14.140 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 11.492 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 19, iters: 52464, time: 0.530, data: 0.000) G_L1: 16.743 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 13.986 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 19, iters: 54464, time: 0.560, data: 0.000) G_L1: 16.116 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 13.277 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 19, iters: 56464, time: 0.552, data: 0.000) G_L1: 13.345 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 10.758 G_Regularizer: 0.000 validation_error: 21.104 +(epoch: 19, iters: 58464, time: 0.558, data: 0.000) G_L1: 15.797 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 13.521 G_Regularizer: 0.000 validation_error: 20.150 +(epoch: 19, iters: 60464, time: 0.533, data: 0.000) G_L1: 16.582 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 13.698 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 19, iters: 62464, time: 0.531, data: 0.000) G_L1: 14.153 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 11.871 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 19, iters: 64464, time: 0.551, data: 0.000) G_L1: 12.850 G_L1_ABSOLUTE: 2.180 G_L1_RELATIVE: 10.670 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 19, iters: 66464, time: 0.558, data: 0.000) G_L1: 14.965 G_L1_ABSOLUTE: 2.978 G_L1_RELATIVE: 11.987 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 19, iters: 68464, time: 0.537, data: 0.000) G_L1: 15.289 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 12.692 G_Regularizer: 0.000 validation_error: 20.697 +(epoch: 19, iters: 70464, time: 0.546, data: 0.001) G_L1: 12.357 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 10.116 G_Regularizer: 0.000 validation_error: 20.994 +(epoch: 19, iters: 72464, time: 0.563, data: 0.001) G_L1: 14.091 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 11.752 G_Regularizer: 0.000 validation_error: 20.434 +(epoch: 19, iters: 74464, time: 0.557, data: 0.000) G_L1: 14.433 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 12.109 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 19, iters: 76464, time: 0.563, data: 0.000) G_L1: 17.195 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 14.990 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 19, iters: 78464, time: 0.552, data: 0.000) G_L1: 15.788 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 13.126 G_Regularizer: 0.000 validation_error: 21.358 +(epoch: 19, iters: 80464, time: 0.564, data: 0.000) G_L1: 14.927 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 12.329 G_Regularizer: 0.000 validation_error: 20.387 +(epoch: 19, iters: 82464, time: 0.561, data: 0.000) G_L1: 19.138 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 16.718 G_Regularizer: 0.000 validation_error: 20.237 +(epoch: 19, iters: 84464, time: 0.557, data: 0.000) G_L1: 16.363 G_L1_ABSOLUTE: 2.794 G_L1_RELATIVE: 13.569 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 19, iters: 86464, time: 0.548, data: 0.000) G_L1: 15.052 G_L1_ABSOLUTE: 2.933 G_L1_RELATIVE: 12.119 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 19, iters: 88464, time: 0.554, data: 0.000) G_L1: 16.366 G_L1_ABSOLUTE: 2.758 G_L1_RELATIVE: 13.608 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 19, iters: 90464, time: 0.561, data: 0.000) G_L1: 16.883 G_L1_ABSOLUTE: 2.534 G_L1_RELATIVE: 14.348 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 19, iters: 92464, time: 0.534, data: 0.000) G_L1: 13.485 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 10.651 G_Regularizer: 0.000 validation_error: 20.450 +(epoch: 19, iters: 94464, time: 0.536, data: 0.000) G_L1: 14.322 G_L1_ABSOLUTE: 3.047 G_L1_RELATIVE: 11.275 G_Regularizer: 0.000 validation_error: 21.266 +(epoch: 19, iters: 96464, time: 0.538, data: 0.000) G_L1: 13.121 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 10.785 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 19, iters: 98464, time: 0.551, data: 0.000) G_L1: 16.684 G_L1_ABSOLUTE: 2.853 G_L1_RELATIVE: 13.831 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 19, iters: 100464, time: 0.563, data: 0.000) G_L1: 13.774 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 10.977 G_Regularizer: 0.000 validation_error: 19.794 +(epoch: 19, iters: 102464, time: 0.547, data: 0.000) G_L1: 18.478 G_L1_ABSOLUTE: 3.217 G_L1_RELATIVE: 15.261 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 19, iters: 104464, time: 0.546, data: 0.001) G_L1: 15.116 G_L1_ABSOLUTE: 3.028 G_L1_RELATIVE: 12.088 G_Regularizer: 0.000 validation_error: 20.502 +(epoch: 19, iters: 106464, time: 0.560, data: 0.000) G_L1: 16.216 G_L1_ABSOLUTE: 3.163 G_L1_RELATIVE: 13.053 G_Regularizer: 0.000 validation_error: 20.441 +(epoch: 19, iters: 108464, time: 0.559, data: 0.000) G_L1: 15.052 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 12.529 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 19, iters: 110464, time: 0.543, data: 0.000) G_L1: 18.565 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 15.991 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 19, iters: 112464, time: 0.562, data: 0.000) G_L1: 14.327 G_L1_ABSOLUTE: 2.583 G_L1_RELATIVE: 11.744 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 19, iters: 114464, time: 0.561, data: 0.000) G_L1: 13.605 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 11.152 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 19, iters: 116464, time: 0.557, data: 0.000) G_L1: 14.028 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 11.562 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 19, iters: 118464, time: 0.552, data: 0.000) G_L1: 16.283 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 13.734 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 19, iters: 120464, time: 0.532, data: 0.000) G_L1: 14.463 G_L1_ABSOLUTE: 3.139 G_L1_RELATIVE: 11.324 G_Regularizer: 0.000 validation_error: 20.627 +(epoch: 19, iters: 122464, time: 0.561, data: 0.000) G_L1: 16.128 G_L1_ABSOLUTE: 2.975 G_L1_RELATIVE: 13.154 G_Regularizer: 0.000 validation_error: 20.032 +(epoch: 19, iters: 124464, time: 0.562, data: 0.000) G_L1: 13.943 G_L1_ABSOLUTE: 2.222 G_L1_RELATIVE: 11.721 G_Regularizer: 0.000 validation_error: 20.681 +(epoch: 19, iters: 126464, time: 0.552, data: 0.000) G_L1: 11.543 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 9.336 G_Regularizer: 0.000 validation_error: 20.372 +(epoch: 19, iters: 128464, time: 0.539, data: 0.000) G_L1: 13.510 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 10.611 G_Regularizer: 0.000 validation_error: 21.280 +(epoch: 19, iters: 130464, time: 0.558, data: 0.000) G_L1: 14.039 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 11.554 G_Regularizer: 0.000 validation_error: 20.204 +(epoch: 19, iters: 132464, time: 0.566, data: 0.000) G_L1: 15.535 G_L1_ABSOLUTE: 2.833 G_L1_RELATIVE: 12.702 G_Regularizer: 0.000 validation_error: 21.550 +(epoch: 19, iters: 134464, time: 0.563, data: 0.000) G_L1: 19.217 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 16.768 G_Regularizer: 0.000 validation_error: 21.616 +(epoch: 19, iters: 136464, time: 0.534, data: 0.000) G_L1: 14.811 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 12.450 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 19, iters: 138464, time: 0.537, data: 0.000) G_L1: 15.159 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 12.816 G_Regularizer: 0.000 validation_error: 20.502 +(epoch: 19, iters: 140464, time: 0.558, data: 0.000) G_L1: 16.230 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 13.598 G_Regularizer: 0.000 validation_error: 20.622 +(epoch: 19, iters: 142464, time: 0.556, data: 0.001) G_L1: 13.698 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 11.086 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 19, iters: 144464, time: 0.551, data: 0.000) G_L1: 14.963 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 12.544 G_Regularizer: 0.000 validation_error: 20.305 +(epoch: 19, iters: 146464, time: 0.533, data: 0.000) G_L1: 11.375 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 8.962 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 19, iters: 148464, time: 0.538, data: 0.000) G_L1: 14.273 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 11.738 G_Regularizer: 0.000 validation_error: 20.407 +(epoch: 19, iters: 150464, time: 0.557, data: 0.000) G_L1: 14.813 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 12.565 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 19, iters: 152464, time: 0.547, data: 0.000) G_L1: 16.240 G_L1_ABSOLUTE: 2.896 G_L1_RELATIVE: 13.343 G_Regularizer: 0.000 validation_error: 20.470 +(epoch: 19, iters: 154464, time: 0.551, data: 0.000) G_L1: 13.387 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 10.929 G_Regularizer: 0.000 validation_error: 20.117 +(epoch: 19, iters: 156464, time: 0.559, data: 0.000) G_L1: 15.117 G_L1_ABSOLUTE: 2.969 G_L1_RELATIVE: 12.148 G_Regularizer: 0.000 validation_error: 21.090 +(epoch: 19, iters: 158464, time: 0.556, data: 0.000) G_L1: 13.023 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 10.325 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 19, iters: 160464, time: 0.528, data: 0.000) G_L1: 14.413 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 12.006 G_Regularizer: 0.000 validation_error: 20.160 +(epoch: 19, iters: 162464, time: 0.549, data: 0.000) G_L1: 15.285 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 12.886 G_Regularizer: 0.000 validation_error: 20.410 +(epoch: 19, iters: 164464, time: 0.553, data: 0.000) G_L1: 15.926 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 13.353 G_Regularizer: 0.000 validation_error: 21.419 +(epoch: 19, iters: 166464, time: 0.558, data: 0.000) G_L1: 15.601 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 13.107 G_Regularizer: 0.000 validation_error: 20.482 +(epoch: 19, iters: 168464, time: 0.555, data: 0.000) G_L1: 11.379 G_L1_ABSOLUTE: 2.123 G_L1_RELATIVE: 9.256 G_Regularizer: 0.000 validation_error: 20.094 +(epoch: 19, iters: 170464, time: 0.561, data: 0.000) G_L1: 15.207 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 12.677 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 19, iters: 172464, time: 0.556, data: 0.000) G_L1: 13.897 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 11.098 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 19, iters: 174464, time: 0.553, data: 0.000) G_L1: 15.087 G_L1_ABSOLUTE: 2.585 G_L1_RELATIVE: 12.501 G_Regularizer: 0.000 validation_error: 21.157 +(epoch: 19, iters: 176464, time: 0.561, data: 0.000) G_L1: 16.418 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 14.004 G_Regularizer: 0.000 validation_error: 21.435 +(epoch: 19, iters: 178464, time: 0.549, data: 0.000) G_L1: 16.224 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 13.389 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 19, iters: 180464, time: 0.556, data: 0.000) G_L1: 18.171 G_L1_ABSOLUTE: 3.185 G_L1_RELATIVE: 14.986 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 19, iters: 182464, time: 0.562, data: 0.000) G_L1: 19.761 G_L1_ABSOLUTE: 3.100 G_L1_RELATIVE: 16.661 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 19, iters: 184464, time: 0.569, data: 0.000) G_L1: 15.198 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 12.694 G_Regularizer: 0.000 validation_error: 21.203 +(epoch: 19, iters: 186464, time: 0.549, data: 0.000) G_L1: 13.735 G_L1_ABSOLUTE: 2.395 G_L1_RELATIVE: 11.340 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 19, iters: 188464, time: 0.542, data: 0.000) G_L1: 16.593 G_L1_ABSOLUTE: 2.967 G_L1_RELATIVE: 13.626 G_Regularizer: 0.000 validation_error: 20.775 +(epoch: 19, iters: 190464, time: 0.558, data: 0.000) G_L1: 13.408 G_L1_ABSOLUTE: 2.644 G_L1_RELATIVE: 10.765 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 19, iters: 192464, time: 0.536, data: 0.001) G_L1: 13.301 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 10.895 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 19, iters: 194464, time: 0.553, data: 0.000) G_L1: 16.433 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 14.044 G_Regularizer: 0.000 validation_error: 20.224 +(epoch: 19, iters: 196464, time: 0.552, data: 0.000) G_L1: 15.682 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 13.021 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 19, iters: 198464, time: 0.540, data: 0.000) G_L1: 14.566 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 11.758 G_Regularizer: 0.000 validation_error: 21.055 +(epoch: 19, iters: 200464, time: 0.543, data: 0.000) G_L1: 12.281 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 9.573 G_Regularizer: 0.000 validation_error: 20.428 +(epoch: 19, iters: 202464, time: 0.556, data: 0.000) G_L1: 14.448 G_L1_ABSOLUTE: 2.534 G_L1_RELATIVE: 11.914 G_Regularizer: 0.000 validation_error: 20.551 +(epoch: 19, iters: 204464, time: 0.543, data: 0.000) G_L1: 13.210 G_L1_ABSOLUTE: 2.585 G_L1_RELATIVE: 10.625 G_Regularizer: 0.000 validation_error: 20.378 +(epoch: 19, iters: 206464, time: 0.560, data: 0.000) G_L1: 16.016 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 13.267 G_Regularizer: 0.000 validation_error: 20.729 +(epoch: 19, iters: 208464, time: 0.555, data: 0.001) G_L1: 15.054 G_L1_ABSOLUTE: 2.675 G_L1_RELATIVE: 12.380 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 19, iters: 210464, time: 0.561, data: 0.000) G_L1: 14.048 G_L1_ABSOLUTE: 2.024 G_L1_RELATIVE: 12.024 G_Regularizer: 0.000 validation_error: 20.126 +(epoch: 19, iters: 212464, time: 0.530, data: 0.000) G_L1: 12.941 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 10.586 G_Regularizer: 0.000 validation_error: 21.458 +(epoch: 19, iters: 214464, time: 0.562, data: 0.000) G_L1: 14.947 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 12.351 G_Regularizer: 0.000 validation_error: 20.632 +(epoch: 19, iters: 216464, time: 0.556, data: 0.000) G_L1: 11.938 G_L1_ABSOLUTE: 2.244 G_L1_RELATIVE: 9.694 G_Regularizer: 0.000 validation_error: 20.312 +(epoch: 19, iters: 218464, time: 0.560, data: 0.000) G_L1: 15.287 G_L1_ABSOLUTE: 2.996 G_L1_RELATIVE: 12.291 G_Regularizer: 0.000 validation_error: 21.428 +(epoch: 19, iters: 220464, time: 0.531, data: 0.000) G_L1: 16.462 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 13.944 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 19, iters: 222464, time: 0.548, data: 0.001) G_L1: 16.299 G_L1_ABSOLUTE: 2.862 G_L1_RELATIVE: 13.437 G_Regularizer: 0.000 validation_error: 21.230 +(epoch: 19, iters: 224464, time: 0.566, data: 0.000) G_L1: 14.828 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 12.205 G_Regularizer: 0.000 validation_error: 20.512 +(epoch: 19, iters: 226464, time: 0.562, data: 0.000) G_L1: 15.328 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 12.698 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 19, iters: 228464, time: 0.540, data: 0.000) G_L1: 14.298 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 11.709 G_Regularizer: 0.000 validation_error: 21.248 +(epoch: 19, iters: 230464, time: 0.550, data: 0.000) G_L1: 16.469 G_L1_ABSOLUTE: 2.062 G_L1_RELATIVE: 14.407 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 19, iters: 232464, time: 0.541, data: 0.000) G_L1: 18.113 G_L1_ABSOLUTE: 3.111 G_L1_RELATIVE: 15.003 G_Regularizer: 0.000 validation_error: 21.186 +(epoch: 19, iters: 234464, time: 0.554, data: 0.000) G_L1: 19.876 G_L1_ABSOLUTE: 3.223 G_L1_RELATIVE: 16.653 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 19, iters: 236464, time: 0.553, data: 0.000) G_L1: 14.758 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 11.778 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 19, iters: 238464, time: 0.559, data: 0.000) G_L1: 13.041 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 10.444 G_Regularizer: 0.000 validation_error: 21.080 +(epoch: 19, iters: 240464, time: 0.559, data: 0.001) G_L1: 16.267 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 13.861 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 19, iters: 242464, time: 0.552, data: 0.000) G_L1: 14.442 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 11.998 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 19, iters: 244464, time: 0.552, data: 0.000) G_L1: 13.572 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 11.314 G_Regularizer: 0.000 validation_error: 20.002 +(epoch: 19, iters: 246464, time: 0.543, data: 0.000) G_L1: 16.713 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 13.997 G_Regularizer: 0.000 validation_error: 19.997 +(epoch: 19, iters: 248464, time: 0.559, data: 0.001) G_L1: 16.037 G_L1_ABSOLUTE: 3.002 G_L1_RELATIVE: 13.035 G_Regularizer: 0.000 validation_error: 20.414 +(epoch: 19, iters: 250464, time: 0.561, data: 0.000) G_L1: 14.102 G_L1_ABSOLUTE: 3.022 G_L1_RELATIVE: 11.080 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 19, iters: 252464, time: 0.559, data: 0.000) G_L1: 11.413 G_L1_ABSOLUTE: 2.161 G_L1_RELATIVE: 9.252 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 19, iters: 254464, time: 0.564, data: 0.000) G_L1: 13.805 G_L1_ABSOLUTE: 2.940 G_L1_RELATIVE: 10.865 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 19, iters: 256464, time: 0.562, data: 0.000) G_L1: 15.724 G_L1_ABSOLUTE: 3.154 G_L1_RELATIVE: 12.571 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 19, iters: 258464, time: 0.556, data: 0.001) G_L1: 13.393 G_L1_ABSOLUTE: 2.828 G_L1_RELATIVE: 10.564 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 19, iters: 260464, time: 0.570, data: 0.000) G_L1: 12.988 G_L1_ABSOLUTE: 2.703 G_L1_RELATIVE: 10.285 G_Regularizer: 0.000 validation_error: 21.557 +(epoch: 19, iters: 262464, time: 0.539, data: 0.000) G_L1: 15.013 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 12.236 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 19, iters: 264464, time: 0.555, data: 0.000) G_L1: 14.775 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 19, iters: 266464, time: 0.566, data: 0.000) G_L1: 15.779 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 13.127 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 19, iters: 268464, time: 0.563, data: 0.000) G_L1: 14.394 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 11.699 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 19, iters: 270464, time: 0.550, data: 0.001) G_L1: 12.722 G_L1_ABSOLUTE: 2.203 G_L1_RELATIVE: 10.519 G_Regularizer: 0.000 validation_error: 21.519 +(epoch: 19, iters: 272464, time: 0.562, data: 0.000) G_L1: 12.909 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 10.587 G_Regularizer: 0.000 validation_error: 20.732 +(epoch: 19, iters: 274464, time: 0.561, data: 0.000) G_L1: 15.075 G_L1_ABSOLUTE: 2.382 G_L1_RELATIVE: 12.693 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 19, iters: 276464, time: 0.552, data: 0.000) G_L1: 16.129 G_L1_ABSOLUTE: 3.120 G_L1_RELATIVE: 13.009 G_Regularizer: 0.000 validation_error: 21.236 +(epoch: 19, iters: 278464, time: 0.553, data: 0.000) G_L1: 15.697 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 13.303 G_Regularizer: 0.000 validation_error: 20.442 +(epoch: 19, iters: 280464, time: 0.558, data: 0.000) G_L1: 12.489 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 10.075 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 19, iters: 282464, time: 0.560, data: 0.000) G_L1: 15.038 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 12.240 G_Regularizer: 0.000 validation_error: 20.316 +(epoch: 19, iters: 284464, time: 0.561, data: 0.000) G_L1: 14.210 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 11.728 G_Regularizer: 0.000 validation_error: 19.859 +(epoch: 19, iters: 286464, time: 0.540, data: 0.001) G_L1: 14.200 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 11.705 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 19, iters: 288464, time: 0.547, data: 0.000) G_L1: 17.944 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 15.144 G_Regularizer: 0.000 validation_error: 20.043 +(epoch: 19, iters: 290464, time: 0.565, data: 0.000) G_L1: 15.249 G_L1_ABSOLUTE: 2.804 G_L1_RELATIVE: 12.445 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 19, iters: 292464, time: 0.544, data: 0.000) G_L1: 13.994 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 11.578 G_Regularizer: 0.000 validation_error: 20.051 +(epoch: 19, iters: 294464, time: 0.542, data: 0.000) G_L1: 14.150 G_L1_ABSOLUTE: 2.173 G_L1_RELATIVE: 11.977 G_Regularizer: 0.000 validation_error: 20.356 +(epoch: 19, iters: 296464, time: 0.553, data: 0.000) G_L1: 14.967 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 12.482 G_Regularizer: 0.000 validation_error: 20.412 +(epoch: 19, iters: 298464, time: 0.556, data: 0.000) G_L1: 13.791 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 11.210 G_Regularizer: 0.000 validation_error: 20.647 +(epoch: 19, iters: 300464, time: 0.557, data: 0.000) G_L1: 16.250 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 13.321 G_Regularizer: 0.000 validation_error: 20.461 +(epoch: 19, iters: 302464, time: 0.558, data: 0.001) G_L1: 16.661 G_L1_ABSOLUTE: 2.951 G_L1_RELATIVE: 13.710 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 20, iters: 1712, time: 0.542, data: 0.000) G_L1: 13.294 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 10.718 G_Regularizer: 0.000 validation_error: 19.992 +(epoch: 20, iters: 3712, time: 0.550, data: 0.000) G_L1: 14.482 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 11.910 G_Regularizer: 0.000 validation_error: 20.263 +(epoch: 20, iters: 5712, time: 0.548, data: 0.000) G_L1: 15.929 G_L1_ABSOLUTE: 2.295 G_L1_RELATIVE: 13.634 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 20, iters: 7712, time: 0.570, data: 0.001) G_L1: 12.130 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 9.693 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 20, iters: 9712, time: 0.541, data: 0.000) G_L1: 13.645 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 11.239 G_Regularizer: 0.000 validation_error: 20.322 +(epoch: 20, iters: 11712, time: 0.545, data: 0.000) G_L1: 15.685 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 12.941 G_Regularizer: 0.000 validation_error: 19.595 +(epoch: 20, iters: 13712, time: 0.561, data: 0.000) G_L1: 16.539 G_L1_ABSOLUTE: 3.046 G_L1_RELATIVE: 13.492 G_Regularizer: 0.000 validation_error: 20.558 +(epoch: 20, iters: 15712, time: 0.566, data: 0.000) G_L1: 13.760 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 11.318 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 20, iters: 17712, time: 0.528, data: 0.000) G_L1: 17.161 G_L1_ABSOLUTE: 2.641 G_L1_RELATIVE: 14.520 G_Regularizer: 0.000 validation_error: 20.256 +(epoch: 20, iters: 19712, time: 0.556, data: 0.000) G_L1: 13.413 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 10.787 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 20, iters: 21712, time: 0.557, data: 0.000) G_L1: 14.865 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 12.318 G_Regularizer: 0.000 validation_error: 20.280 +(epoch: 20, iters: 23712, time: 0.538, data: 0.000) G_L1: 12.584 G_L1_ABSOLUTE: 2.203 G_L1_RELATIVE: 10.380 G_Regularizer: 0.000 validation_error: 20.168 +(epoch: 20, iters: 25712, time: 0.549, data: 0.000) G_L1: 14.139 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 11.650 G_Regularizer: 0.000 validation_error: 20.220 +(epoch: 20, iters: 27712, time: 0.545, data: 0.000) G_L1: 14.669 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 12.038 G_Regularizer: 0.000 validation_error: 21.688 +(epoch: 20, iters: 29712, time: 0.562, data: 0.000) G_L1: 14.389 G_L1_ABSOLUTE: 1.899 G_L1_RELATIVE: 12.490 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 20, iters: 31712, time: 0.548, data: 0.000) G_L1: 17.641 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 14.801 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 20, iters: 33712, time: 0.551, data: 0.000) G_L1: 16.171 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 13.653 G_Regularizer: 0.000 validation_error: 19.895 +(epoch: 20, iters: 35712, time: 0.559, data: 0.000) G_L1: 14.244 G_L1_ABSOLUTE: 2.405 G_L1_RELATIVE: 11.839 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 20, iters: 37712, time: 0.564, data: 0.000) G_L1: 16.852 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 13.938 G_Regularizer: 0.000 validation_error: 21.262 +(epoch: 20, iters: 39712, time: 0.560, data: 0.000) G_L1: 13.470 G_L1_ABSOLUTE: 3.007 G_L1_RELATIVE: 10.463 G_Regularizer: 0.000 validation_error: 20.346 +(epoch: 20, iters: 41712, time: 0.545, data: 0.000) G_L1: 13.327 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 10.941 G_Regularizer: 0.000 validation_error: 20.286 +(epoch: 20, iters: 43712, time: 0.541, data: 0.000) G_L1: 14.136 G_L1_ABSOLUTE: 2.795 G_L1_RELATIVE: 11.341 G_Regularizer: 0.000 validation_error: 19.731 +(epoch: 20, iters: 45712, time: 0.538, data: 0.001) G_L1: 14.796 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 12.317 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 20, iters: 47712, time: 0.538, data: 0.000) G_L1: 16.569 G_L1_ABSOLUTE: 3.045 G_L1_RELATIVE: 13.523 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 20, iters: 49712, time: 0.543, data: 0.000) G_L1: 14.120 G_L1_ABSOLUTE: 3.059 G_L1_RELATIVE: 11.060 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 20, iters: 51712, time: 0.548, data: 0.000) G_L1: 19.758 G_L1_ABSOLUTE: 2.875 G_L1_RELATIVE: 16.883 G_Regularizer: 0.000 validation_error: 21.265 +(epoch: 20, iters: 53712, time: 0.561, data: 0.000) G_L1: 16.173 G_L1_ABSOLUTE: 2.902 G_L1_RELATIVE: 13.272 G_Regularizer: 0.000 validation_error: 21.558 +(epoch: 20, iters: 55712, time: 0.558, data: 0.000) G_L1: 18.107 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 15.216 G_Regularizer: 0.000 validation_error: 21.047 +(epoch: 20, iters: 57712, time: 0.559, data: 0.000) G_L1: 16.029 G_L1_ABSOLUTE: 2.270 G_L1_RELATIVE: 13.759 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 20, iters: 59712, time: 0.546, data: 0.000) G_L1: 15.108 G_L1_ABSOLUTE: 2.850 G_L1_RELATIVE: 12.259 G_Regularizer: 0.000 validation_error: 20.466 +(epoch: 20, iters: 61712, time: 0.562, data: 0.001) G_L1: 13.365 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 10.620 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 20, iters: 63712, time: 0.555, data: 0.000) G_L1: 12.254 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 9.841 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 20, iters: 65712, time: 0.557, data: 0.001) G_L1: 12.039 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 9.358 G_Regularizer: 0.000 validation_error: 20.496 +(epoch: 20, iters: 67712, time: 0.546, data: 0.001) G_L1: 12.054 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 9.370 G_Regularizer: 0.000 validation_error: 20.070 +(epoch: 20, iters: 69712, time: 0.553, data: 0.000) G_L1: 15.241 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 12.790 G_Regularizer: 0.000 validation_error: 21.048 +(epoch: 20, iters: 71712, time: 0.556, data: 0.000) G_L1: 13.573 G_L1_ABSOLUTE: 2.532 G_L1_RELATIVE: 11.041 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 20, iters: 73712, time: 0.558, data: 0.000) G_L1: 14.532 G_L1_ABSOLUTE: 2.763 G_L1_RELATIVE: 11.769 G_Regularizer: 0.000 validation_error: 20.206 +(epoch: 20, iters: 75712, time: 0.545, data: 0.001) G_L1: 14.106 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 11.735 G_Regularizer: 0.000 validation_error: 20.494 +(epoch: 20, iters: 77712, time: 0.534, data: 0.000) G_L1: 12.649 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 10.050 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 20, iters: 79712, time: 0.557, data: 0.000) G_L1: 16.299 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 13.599 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 20, iters: 81712, time: 0.561, data: 0.000) G_L1: 14.904 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 12.374 G_Regularizer: 0.000 validation_error: 20.959 +(epoch: 20, iters: 83712, time: 0.547, data: 0.000) G_L1: 14.523 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 11.942 G_Regularizer: 0.000 validation_error: 20.551 +(epoch: 20, iters: 85712, time: 0.561, data: 0.000) G_L1: 13.507 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 11.139 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 20, iters: 87712, time: 0.554, data: 0.000) G_L1: 13.606 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 10.871 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 20, iters: 89712, time: 0.559, data: 0.000) G_L1: 17.434 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 15.069 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 20, iters: 91712, time: 0.545, data: 0.001) G_L1: 12.433 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 9.930 G_Regularizer: 0.000 validation_error: 20.372 +(epoch: 20, iters: 93712, time: 0.537, data: 0.000) G_L1: 15.699 G_L1_ABSOLUTE: 3.113 G_L1_RELATIVE: 12.585 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 20, iters: 95712, time: 0.535, data: 0.000) G_L1: 14.181 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 11.750 G_Regularizer: 0.000 validation_error: 20.028 +(epoch: 20, iters: 97712, time: 0.566, data: 0.000) G_L1: 17.770 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 15.297 G_Regularizer: 0.000 validation_error: 20.944 +(epoch: 20, iters: 99712, time: 0.547, data: 0.000) G_L1: 12.943 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 10.545 G_Regularizer: 0.000 validation_error: 20.304 +(epoch: 20, iters: 101712, time: 0.531, data: 0.000) G_L1: 14.338 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 11.672 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 20, iters: 103712, time: 0.541, data: 0.000) G_L1: 14.233 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 11.426 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 20, iters: 105712, time: 0.559, data: 0.000) G_L1: 13.696 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.155 G_Regularizer: 0.000 validation_error: 21.181 +(epoch: 20, iters: 107712, time: 0.558, data: 0.000) G_L1: 15.298 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 12.836 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 20, iters: 109712, time: 0.533, data: 0.000) G_L1: 15.229 G_L1_ABSOLUTE: 2.532 G_L1_RELATIVE: 12.697 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 20, iters: 111712, time: 0.536, data: 0.000) G_L1: 15.135 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 12.229 G_Regularizer: 0.000 validation_error: 20.312 +(epoch: 20, iters: 113712, time: 0.559, data: 0.000) G_L1: 14.096 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 11.282 G_Regularizer: 0.000 validation_error: 19.982 +(epoch: 20, iters: 115712, time: 0.549, data: 0.001) G_L1: 16.537 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 13.879 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 20, iters: 117712, time: 0.560, data: 0.000) G_L1: 14.532 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 11.615 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 20, iters: 119712, time: 0.560, data: 0.000) G_L1: 16.893 G_L1_ABSOLUTE: 2.337 G_L1_RELATIVE: 14.556 G_Regularizer: 0.000 validation_error: 20.462 +(epoch: 20, iters: 121712, time: 0.558, data: 0.000) G_L1: 12.785 G_L1_ABSOLUTE: 2.127 G_L1_RELATIVE: 10.658 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 20, iters: 123712, time: 0.553, data: 0.000) G_L1: 13.511 G_L1_ABSOLUTE: 2.900 G_L1_RELATIVE: 10.612 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 20, iters: 125712, time: 0.554, data: 0.000) G_L1: 15.083 G_L1_ABSOLUTE: 2.184 G_L1_RELATIVE: 12.899 G_Regularizer: 0.000 validation_error: 20.305 +(epoch: 20, iters: 127712, time: 0.551, data: 0.000) G_L1: 15.096 G_L1_ABSOLUTE: 2.740 G_L1_RELATIVE: 12.356 G_Regularizer: 0.000 validation_error: 20.015 +(epoch: 20, iters: 129712, time: 0.553, data: 0.000) G_L1: 13.688 G_L1_ABSOLUTE: 2.068 G_L1_RELATIVE: 11.620 G_Regularizer: 0.000 validation_error: 20.409 +(epoch: 20, iters: 131712, time: 0.551, data: 0.000) G_L1: 15.090 G_L1_ABSOLUTE: 2.946 G_L1_RELATIVE: 12.144 G_Regularizer: 0.000 validation_error: 20.453 +(epoch: 20, iters: 133712, time: 0.547, data: 0.000) G_L1: 13.897 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 11.230 G_Regularizer: 0.000 validation_error: 21.005 +(epoch: 20, iters: 135712, time: 0.550, data: 0.000) G_L1: 14.331 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 12.017 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 20, iters: 137712, time: 0.562, data: 0.000) G_L1: 13.897 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 11.357 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 20, iters: 139712, time: 0.559, data: 0.000) G_L1: 15.935 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 13.415 G_Regularizer: 0.000 validation_error: 20.971 +(epoch: 20, iters: 141712, time: 0.558, data: 0.000) G_L1: 13.656 G_L1_ABSOLUTE: 2.052 G_L1_RELATIVE: 11.603 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 20, iters: 143712, time: 0.530, data: 0.000) G_L1: 15.467 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 12.888 G_Regularizer: 0.000 validation_error: 20.269 +(epoch: 20, iters: 145712, time: 0.562, data: 0.000) G_L1: 17.814 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 15.272 G_Regularizer: 0.000 validation_error: 19.935 +(epoch: 20, iters: 147712, time: 0.551, data: 0.000) G_L1: 13.837 G_L1_ABSOLUTE: 2.275 G_L1_RELATIVE: 11.561 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 20, iters: 149712, time: 0.550, data: 0.000) G_L1: 14.251 G_L1_ABSOLUTE: 2.223 G_L1_RELATIVE: 12.028 G_Regularizer: 0.000 validation_error: 21.090 +(epoch: 20, iters: 151712, time: 0.554, data: 0.000) G_L1: 13.915 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 11.288 G_Regularizer: 0.000 validation_error: 20.133 +(epoch: 20, iters: 153712, time: 0.548, data: 0.000) G_L1: 12.948 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 10.431 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 20, iters: 155712, time: 0.561, data: 0.000) G_L1: 14.393 G_L1_ABSOLUTE: 2.690 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 20.046 +(epoch: 20, iters: 157712, time: 0.560, data: 0.000) G_L1: 15.802 G_L1_ABSOLUTE: 2.878 G_L1_RELATIVE: 12.924 G_Regularizer: 0.000 validation_error: 20.309 +(epoch: 20, iters: 159712, time: 0.551, data: 0.000) G_L1: 15.897 G_L1_ABSOLUTE: 3.229 G_L1_RELATIVE: 12.668 G_Regularizer: 0.000 validation_error: 20.640 +(epoch: 20, iters: 161712, time: 0.560, data: 0.000) G_L1: 12.938 G_L1_ABSOLUTE: 2.181 G_L1_RELATIVE: 10.758 G_Regularizer: 0.000 validation_error: 20.378 +(epoch: 20, iters: 163712, time: 0.557, data: 0.000) G_L1: 13.757 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 11.168 G_Regularizer: 0.000 validation_error: 20.204 +(epoch: 20, iters: 165712, time: 0.559, data: 0.000) G_L1: 14.497 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 11.776 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 20, iters: 167712, time: 0.549, data: 0.000) G_L1: 13.481 G_L1_ABSOLUTE: 2.201 G_L1_RELATIVE: 11.280 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 20, iters: 169712, time: 0.548, data: 0.000) G_L1: 12.913 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 10.416 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 20, iters: 171712, time: 0.557, data: 0.000) G_L1: 15.840 G_L1_ABSOLUTE: 2.889 G_L1_RELATIVE: 12.951 G_Regularizer: 0.000 validation_error: 20.615 +(epoch: 20, iters: 173712, time: 0.559, data: 0.000) G_L1: 15.864 G_L1_ABSOLUTE: 2.680 G_L1_RELATIVE: 13.184 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 20, iters: 175712, time: 0.547, data: 0.001) G_L1: 16.734 G_L1_ABSOLUTE: 2.675 G_L1_RELATIVE: 14.059 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 20, iters: 177712, time: 0.549, data: 0.000) G_L1: 13.263 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 10.779 G_Regularizer: 0.000 validation_error: 19.910 +(epoch: 20, iters: 179712, time: 0.564, data: 0.000) G_L1: 16.104 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 13.517 G_Regularizer: 0.000 validation_error: 20.350 +(epoch: 20, iters: 181712, time: 0.548, data: 0.000) G_L1: 16.790 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 14.300 G_Regularizer: 0.000 validation_error: 19.638 +(epoch: 20, iters: 183712, time: 0.558, data: 0.000) G_L1: 14.402 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 11.741 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 20, iters: 185712, time: 0.539, data: 0.000) G_L1: 14.274 G_L1_ABSOLUTE: 2.920 G_L1_RELATIVE: 11.354 G_Regularizer: 0.000 validation_error: 20.143 +(epoch: 20, iters: 187712, time: 0.561, data: 0.000) G_L1: 15.507 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 12.701 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 20, iters: 189712, time: 0.556, data: 0.000) G_L1: 15.384 G_L1_ABSOLUTE: 2.253 G_L1_RELATIVE: 13.131 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 20, iters: 191712, time: 0.548, data: 0.000) G_L1: 13.230 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 10.477 G_Regularizer: 0.000 validation_error: 20.061 +(epoch: 20, iters: 193712, time: 0.530, data: 0.000) G_L1: 11.959 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 9.273 G_Regularizer: 0.000 validation_error: 20.280 +(epoch: 20, iters: 195712, time: 0.538, data: 0.000) G_L1: 15.844 G_L1_ABSOLUTE: 2.852 G_L1_RELATIVE: 12.992 G_Regularizer: 0.000 validation_error: 20.519 +(epoch: 20, iters: 197712, time: 0.551, data: 0.000) G_L1: 12.650 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 10.303 G_Regularizer: 0.000 validation_error: 20.993 +(epoch: 20, iters: 199712, time: 0.561, data: 0.000) G_L1: 12.087 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 9.667 G_Regularizer: 0.000 validation_error: 20.099 +(epoch: 20, iters: 201712, time: 0.549, data: 0.001) G_L1: 13.922 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 11.325 G_Regularizer: 0.000 validation_error: 19.985 +(epoch: 20, iters: 203712, time: 0.538, data: 0.000) G_L1: 14.822 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 12.184 G_Regularizer: 0.000 validation_error: 21.059 +(epoch: 20, iters: 205712, time: 0.561, data: 0.000) G_L1: 15.803 G_L1_ABSOLUTE: 2.672 G_L1_RELATIVE: 13.131 G_Regularizer: 0.000 validation_error: 20.279 +(epoch: 20, iters: 207712, time: 0.538, data: 0.000) G_L1: 12.743 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 10.325 G_Regularizer: 0.000 validation_error: 20.137 +(epoch: 20, iters: 209712, time: 0.537, data: 0.000) G_L1: 14.386 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 11.928 G_Regularizer: 0.000 validation_error: 20.241 +(epoch: 20, iters: 211712, time: 0.559, data: 0.001) G_L1: 17.438 G_L1_ABSOLUTE: 3.186 G_L1_RELATIVE: 14.252 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 20, iters: 213712, time: 0.546, data: 0.001) G_L1: 13.378 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 11.044 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 20, iters: 215712, time: 0.552, data: 0.001) G_L1: 15.050 G_L1_ABSOLUTE: 3.258 G_L1_RELATIVE: 11.792 G_Regularizer: 0.000 validation_error: 20.281 +(epoch: 20, iters: 217712, time: 0.551, data: 0.000) G_L1: 15.921 G_L1_ABSOLUTE: 2.715 G_L1_RELATIVE: 13.205 G_Regularizer: 0.000 validation_error: 21.289 +(epoch: 20, iters: 219712, time: 0.556, data: 0.000) G_L1: 14.646 G_L1_ABSOLUTE: 2.983 G_L1_RELATIVE: 11.663 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 20, iters: 221712, time: 0.549, data: 0.000) G_L1: 15.619 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 13.224 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 20, iters: 223712, time: 0.536, data: 0.000) G_L1: 14.936 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 12.231 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 20, iters: 225712, time: 0.559, data: 0.000) G_L1: 13.936 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 11.384 G_Regularizer: 0.000 validation_error: 20.641 +(epoch: 20, iters: 227712, time: 0.537, data: 0.000) G_L1: 14.771 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 12.176 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 20, iters: 229712, time: 0.549, data: 0.000) G_L1: 16.245 G_L1_ABSOLUTE: 2.682 G_L1_RELATIVE: 13.563 G_Regularizer: 0.000 validation_error: 21.293 +(epoch: 20, iters: 231712, time: 0.548, data: 0.000) G_L1: 13.700 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 11.462 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 20, iters: 233712, time: 0.566, data: 0.000) G_L1: 13.444 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 10.608 G_Regularizer: 0.000 validation_error: 20.183 +(epoch: 20, iters: 235712, time: 0.533, data: 0.000) G_L1: 16.799 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 14.148 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 20, iters: 237712, time: 0.557, data: 0.001) G_L1: 14.784 G_L1_ABSOLUTE: 2.300 G_L1_RELATIVE: 12.484 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 20, iters: 239712, time: 0.560, data: 0.000) G_L1: 12.940 G_L1_ABSOLUTE: 2.768 G_L1_RELATIVE: 10.172 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 20, iters: 241712, time: 0.557, data: 0.001) G_L1: 14.575 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 12.151 G_Regularizer: 0.000 validation_error: 21.450 +(epoch: 20, iters: 243712, time: 0.546, data: 0.000) G_L1: 13.789 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 11.196 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 20, iters: 245712, time: 0.561, data: 0.000) G_L1: 15.898 G_L1_ABSOLUTE: 2.166 G_L1_RELATIVE: 13.732 G_Regularizer: 0.000 validation_error: 20.343 +(epoch: 20, iters: 247712, time: 0.560, data: 0.000) G_L1: 15.946 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 13.320 G_Regularizer: 0.000 validation_error: 20.712 +(epoch: 20, iters: 249712, time: 0.556, data: 0.000) G_L1: 16.146 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 13.545 G_Regularizer: 0.000 validation_error: 20.516 +(epoch: 20, iters: 251712, time: 0.544, data: 0.001) G_L1: 15.664 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 13.040 G_Regularizer: 0.000 validation_error: 20.531 +(epoch: 20, iters: 253712, time: 0.565, data: 0.001) G_L1: 15.903 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 13.444 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 20, iters: 255712, time: 0.561, data: 0.000) G_L1: 16.753 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 14.036 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 20, iters: 257712, time: 0.561, data: 0.000) G_L1: 14.228 G_L1_ABSOLUTE: 2.848 G_L1_RELATIVE: 11.380 G_Regularizer: 0.000 validation_error: 20.257 +(epoch: 20, iters: 259712, time: 0.546, data: 0.000) G_L1: 15.523 G_L1_ABSOLUTE: 2.558 G_L1_RELATIVE: 12.964 G_Regularizer: 0.000 validation_error: 20.487 +(epoch: 20, iters: 261712, time: 0.535, data: 0.000) G_L1: 12.375 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 9.789 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 20, iters: 263712, time: 0.563, data: 0.000) G_L1: 16.328 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 13.928 G_Regularizer: 0.000 validation_error: 20.196 +(epoch: 20, iters: 265712, time: 0.564, data: 0.000) G_L1: 19.714 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 17.160 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 20, iters: 267712, time: 0.551, data: 0.000) G_L1: 17.411 G_L1_ABSOLUTE: 2.267 G_L1_RELATIVE: 15.144 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 20, iters: 269712, time: 0.554, data: 0.000) G_L1: 18.151 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 15.172 G_Regularizer: 0.000 validation_error: 20.222 +(epoch: 20, iters: 271712, time: 0.559, data: 0.000) G_L1: 15.165 G_L1_ABSOLUTE: 2.232 G_L1_RELATIVE: 12.934 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 20, iters: 273712, time: 0.556, data: 0.001) G_L1: 13.962 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 11.354 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 20, iters: 275712, time: 0.560, data: 0.000) G_L1: 14.854 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 12.199 G_Regularizer: 0.000 validation_error: 19.767 +(epoch: 20, iters: 277712, time: 0.537, data: 0.000) G_L1: 14.725 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 11.988 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 20, iters: 279712, time: 0.556, data: 0.000) G_L1: 12.802 G_L1_ABSOLUTE: 2.879 G_L1_RELATIVE: 9.923 G_Regularizer: 0.000 validation_error: 20.366 +(epoch: 20, iters: 281712, time: 0.549, data: 0.000) G_L1: 15.423 G_L1_ABSOLUTE: 2.904 G_L1_RELATIVE: 12.519 G_Regularizer: 0.000 validation_error: 20.397 +(epoch: 20, iters: 283712, time: 0.560, data: 0.000) G_L1: 15.268 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 12.893 G_Regularizer: 0.000 validation_error: 19.939 +(epoch: 20, iters: 285712, time: 0.553, data: 0.000) G_L1: 14.921 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 12.272 G_Regularizer: 0.000 validation_error: 20.387 +(epoch: 20, iters: 287712, time: 0.556, data: 0.000) G_L1: 13.322 G_L1_ABSOLUTE: 2.287 G_L1_RELATIVE: 11.035 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 20, iters: 289712, time: 0.548, data: 0.000) G_L1: 15.256 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 12.699 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 20, iters: 291712, time: 0.557, data: 0.000) G_L1: 12.531 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 9.891 G_Regularizer: 0.000 validation_error: 21.067 +(epoch: 20, iters: 293712, time: 0.560, data: 0.000) G_L1: 14.645 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 12.153 G_Regularizer: 0.000 validation_error: 20.753 +(epoch: 20, iters: 295712, time: 0.551, data: 0.000) G_L1: 14.970 G_L1_ABSOLUTE: 2.644 G_L1_RELATIVE: 12.326 G_Regularizer: 0.000 validation_error: 20.363 +(epoch: 20, iters: 297712, time: 0.559, data: 0.000) G_L1: 13.845 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.424 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 20, iters: 299712, time: 0.555, data: 0.000) G_L1: 15.202 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 12.458 G_Regularizer: 0.000 validation_error: 20.319 +(epoch: 20, iters: 301712, time: 0.559, data: 0.000) G_L1: 14.606 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 12.091 G_Regularizer: 0.000 validation_error: 20.307 +(epoch: 21, iters: 960, time: 0.549, data: 0.000) G_L1: 14.657 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 11.933 G_Regularizer: 0.000 validation_error: 20.526 +(epoch: 21, iters: 2960, time: 0.561, data: 0.000) G_L1: 16.028 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 13.372 G_Regularizer: 0.000 validation_error: 21.096 +(epoch: 21, iters: 4960, time: 0.554, data: 0.000) G_L1: 15.241 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 12.562 G_Regularizer: 0.000 validation_error: 20.206 +(epoch: 21, iters: 6960, time: 0.566, data: 0.000) G_L1: 15.466 G_L1_ABSOLUTE: 3.025 G_L1_RELATIVE: 12.441 G_Regularizer: 0.000 validation_error: 21.060 +(epoch: 21, iters: 8960, time: 0.554, data: 0.000) G_L1: 14.840 G_L1_ABSOLUTE: 2.728 G_L1_RELATIVE: 12.113 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 21, iters: 10960, time: 0.560, data: 0.000) G_L1: 14.623 G_L1_ABSOLUTE: 3.103 G_L1_RELATIVE: 11.520 G_Regularizer: 0.000 validation_error: 20.194 +(epoch: 21, iters: 12960, time: 0.552, data: 0.000) G_L1: 14.090 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 11.681 G_Regularizer: 0.000 validation_error: 20.493 +(epoch: 21, iters: 14960, time: 0.568, data: 0.000) G_L1: 13.330 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 10.911 G_Regularizer: 0.000 validation_error: 20.316 +(epoch: 21, iters: 16960, time: 0.553, data: 0.000) G_L1: 16.091 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 13.359 G_Regularizer: 0.000 validation_error: 20.184 +(epoch: 21, iters: 18960, time: 0.552, data: 0.000) G_L1: 17.419 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 14.430 G_Regularizer: 0.000 validation_error: 20.262 +(epoch: 21, iters: 20960, time: 0.553, data: 0.000) G_L1: 13.049 G_L1_ABSOLUTE: 2.079 G_L1_RELATIVE: 10.970 G_Regularizer: 0.000 validation_error: 20.441 +(epoch: 21, iters: 22960, time: 0.555, data: 0.000) G_L1: 14.094 G_L1_ABSOLUTE: 2.250 G_L1_RELATIVE: 11.843 G_Regularizer: 0.000 validation_error: 20.347 +(epoch: 21, iters: 24960, time: 0.556, data: 0.000) G_L1: 15.238 G_L1_ABSOLUTE: 3.027 G_L1_RELATIVE: 12.211 G_Regularizer: 0.000 validation_error: 20.641 +(epoch: 21, iters: 26960, time: 0.550, data: 0.000) G_L1: 14.844 G_L1_ABSOLUTE: 2.532 G_L1_RELATIVE: 12.312 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 21, iters: 28960, time: 0.556, data: 0.000) G_L1: 18.415 G_L1_ABSOLUTE: 3.177 G_L1_RELATIVE: 15.238 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 21, iters: 30960, time: 0.561, data: 0.000) G_L1: 16.895 G_L1_ABSOLUTE: 3.039 G_L1_RELATIVE: 13.856 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 21, iters: 32960, time: 0.565, data: 0.000) G_L1: 14.095 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 11.436 G_Regularizer: 0.000 validation_error: 20.609 +(epoch: 21, iters: 34960, time: 0.573, data: 0.000) G_L1: 14.357 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 11.975 G_Regularizer: 0.000 validation_error: 20.307 +(epoch: 21, iters: 36960, time: 0.572, data: 0.000) G_L1: 15.365 G_L1_ABSOLUTE: 2.819 G_L1_RELATIVE: 12.545 G_Regularizer: 0.000 validation_error: 20.405 +(epoch: 21, iters: 38960, time: 0.584, data: 0.000) G_L1: 11.983 G_L1_ABSOLUTE: 2.173 G_L1_RELATIVE: 9.810 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 21, iters: 40960, time: 0.584, data: 0.000) G_L1: 15.344 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 12.831 G_Regularizer: 0.000 validation_error: 20.407 +(epoch: 21, iters: 42960, time: 0.574, data: 0.000) G_L1: 16.427 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 13.967 G_Regularizer: 0.000 validation_error: 20.146 +(epoch: 21, iters: 44960, time: 0.574, data: 0.000) G_L1: 14.184 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 11.452 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 21, iters: 46960, time: 0.581, data: 0.000) G_L1: 15.763 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 12.939 G_Regularizer: 0.000 validation_error: 20.420 +(epoch: 21, iters: 48960, time: 0.575, data: 0.000) G_L1: 15.683 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 13.000 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 21, iters: 50960, time: 0.575, data: 0.000) G_L1: 15.563 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 13.182 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 21, iters: 52960, time: 0.583, data: 0.000) G_L1: 16.392 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 13.692 G_Regularizer: 0.000 validation_error: 19.954 +(epoch: 21, iters: 54960, time: 0.575, data: 0.000) G_L1: 14.211 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 11.701 G_Regularizer: 0.000 validation_error: 19.679 +(epoch: 21, iters: 56960, time: 0.573, data: 0.000) G_L1: 14.834 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 12.098 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 21, iters: 58960, time: 0.573, data: 0.000) G_L1: 14.887 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 12.258 G_Regularizer: 0.000 validation_error: 20.214 +(epoch: 21, iters: 60960, time: 0.573, data: 0.001) G_L1: 15.122 G_L1_ABSOLUTE: 2.972 G_L1_RELATIVE: 12.150 G_Regularizer: 0.000 validation_error: 21.343 +(epoch: 21, iters: 62960, time: 0.593, data: 0.000) G_L1: 11.192 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 8.827 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 21, iters: 64960, time: 0.574, data: 0.000) G_L1: 14.716 G_L1_ABSOLUTE: 2.740 G_L1_RELATIVE: 11.975 G_Regularizer: 0.000 validation_error: 19.854 +(epoch: 21, iters: 66960, time: 0.580, data: 0.000) G_L1: 13.322 G_L1_ABSOLUTE: 2.639 G_L1_RELATIVE: 10.683 G_Regularizer: 0.000 validation_error: 20.418 +(epoch: 21, iters: 68960, time: 0.569, data: 0.000) G_L1: 13.937 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 11.417 G_Regularizer: 0.000 validation_error: 20.374 +(epoch: 21, iters: 70960, time: 0.573, data: 0.000) G_L1: 16.676 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 14.111 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 21, iters: 72960, time: 0.572, data: 0.000) G_L1: 18.013 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 15.358 G_Regularizer: 0.000 validation_error: 20.543 +(epoch: 21, iters: 74960, time: 0.576, data: 0.000) G_L1: 18.551 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 16.043 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 21, iters: 76960, time: 0.581, data: 0.000) G_L1: 15.418 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 13.093 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 21, iters: 78960, time: 0.583, data: 0.000) G_L1: 15.758 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 13.168 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 21, iters: 80960, time: 0.570, data: 0.000) G_L1: 12.358 G_L1_ABSOLUTE: 2.305 G_L1_RELATIVE: 10.054 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 21, iters: 82960, time: 0.573, data: 0.000) G_L1: 17.076 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 14.352 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 21, iters: 84960, time: 0.576, data: 0.000) G_L1: 15.612 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 13.200 G_Regularizer: 0.000 validation_error: 19.960 +(epoch: 21, iters: 86960, time: 0.567, data: 0.000) G_L1: 16.900 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 14.098 G_Regularizer: 0.000 validation_error: 19.428 +(epoch: 21, iters: 88960, time: 0.577, data: 0.000) G_L1: 14.592 G_L1_ABSOLUTE: 2.972 G_L1_RELATIVE: 11.620 G_Regularizer: 0.000 validation_error: 20.434 +(epoch: 21, iters: 90960, time: 0.571, data: 0.000) G_L1: 16.350 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 13.701 G_Regularizer: 0.000 validation_error: 20.541 +(epoch: 21, iters: 92960, time: 0.569, data: 0.000) G_L1: 17.157 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 14.616 G_Regularizer: 0.000 validation_error: 20.439 +(epoch: 21, iters: 94960, time: 0.585, data: 0.000) G_L1: 12.284 G_L1_ABSOLUTE: 2.191 G_L1_RELATIVE: 10.094 G_Regularizer: 0.000 validation_error: 20.511 +(epoch: 21, iters: 96960, time: 0.576, data: 0.000) G_L1: 14.595 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 11.858 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 21, iters: 98960, time: 0.567, data: 0.000) G_L1: 14.613 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 12.339 G_Regularizer: 0.000 validation_error: 19.728 +(epoch: 21, iters: 100960, time: 0.573, data: 0.000) G_L1: 14.365 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 11.837 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 21, iters: 102960, time: 0.581, data: 0.000) G_L1: 12.860 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 10.401 G_Regularizer: 0.000 validation_error: 19.775 +(epoch: 21, iters: 104960, time: 0.573, data: 0.001) G_L1: 15.804 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 13.330 G_Regularizer: 0.000 validation_error: 20.370 +(epoch: 21, iters: 106960, time: 0.573, data: 0.000) G_L1: 14.789 G_L1_ABSOLUTE: 2.869 G_L1_RELATIVE: 11.920 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 21, iters: 108960, time: 0.576, data: 0.000) G_L1: 14.283 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 11.890 G_Regularizer: 0.000 validation_error: 20.018 +(epoch: 21, iters: 110960, time: 0.574, data: 0.000) G_L1: 14.779 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 12.331 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 21, iters: 112960, time: 0.583, data: 0.000) G_L1: 12.205 G_L1_ABSOLUTE: 2.304 G_L1_RELATIVE: 9.900 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 21, iters: 114960, time: 0.570, data: 0.000) G_L1: 15.022 G_L1_ABSOLUTE: 3.463 G_L1_RELATIVE: 11.560 G_Regularizer: 0.000 validation_error: 19.131 +(epoch: 21, iters: 116960, time: 0.571, data: 0.000) G_L1: 14.601 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 11.910 G_Regularizer: 0.000 validation_error: 20.317 +(epoch: 21, iters: 118960, time: 0.572, data: 0.000) G_L1: 16.187 G_L1_ABSOLUTE: 3.353 G_L1_RELATIVE: 12.834 G_Regularizer: 0.000 validation_error: 20.050 +(epoch: 21, iters: 120960, time: 0.578, data: 0.000) G_L1: 15.491 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 12.839 G_Regularizer: 0.000 validation_error: 20.246 +(epoch: 21, iters: 122960, time: 0.571, data: 0.000) G_L1: 15.616 G_L1_ABSOLUTE: 2.739 G_L1_RELATIVE: 12.878 G_Regularizer: 0.000 validation_error: 20.259 +(epoch: 21, iters: 124960, time: 0.577, data: 0.000) G_L1: 15.792 G_L1_ABSOLUTE: 2.527 G_L1_RELATIVE: 13.265 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 21, iters: 126960, time: 0.574, data: 0.000) G_L1: 15.560 G_L1_ABSOLUTE: 2.924 G_L1_RELATIVE: 12.637 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 21, iters: 128960, time: 0.585, data: 0.000) G_L1: 13.490 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 10.916 G_Regularizer: 0.000 validation_error: 20.185 +(epoch: 21, iters: 130960, time: 0.568, data: 0.000) G_L1: 15.531 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 12.915 G_Regularizer: 0.000 validation_error: 20.054 +(epoch: 21, iters: 132960, time: 0.573, data: 0.000) G_L1: 15.307 G_L1_ABSOLUTE: 3.090 G_L1_RELATIVE: 12.217 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 21, iters: 134960, time: 0.576, data: 0.000) G_L1: 15.118 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 12.381 G_Regularizer: 0.000 validation_error: 20.300 +(epoch: 21, iters: 136960, time: 0.570, data: 0.000) G_L1: 14.705 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 12.350 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 21, iters: 138960, time: 0.580, data: 0.000) G_L1: 14.849 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 12.229 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 21, iters: 140960, time: 0.575, data: 0.000) G_L1: 16.150 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 13.461 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 21, iters: 142960, time: 0.578, data: 0.000) G_L1: 13.754 G_L1_ABSOLUTE: 2.607 G_L1_RELATIVE: 11.147 G_Regularizer: 0.000 validation_error: 20.607 +(epoch: 21, iters: 144960, time: 0.575, data: 0.000) G_L1: 14.477 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.063 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 21, iters: 146960, time: 0.570, data: 0.000) G_L1: 14.357 G_L1_ABSOLUTE: 3.352 G_L1_RELATIVE: 11.005 G_Regularizer: 0.000 validation_error: 20.315 +(epoch: 21, iters: 148960, time: 0.576, data: 0.000) G_L1: 15.390 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 12.905 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 21, iters: 150960, time: 0.585, data: 0.000) G_L1: 14.044 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 11.233 G_Regularizer: 0.000 validation_error: 20.618 +(epoch: 21, iters: 152960, time: 0.572, data: 0.000) G_L1: 16.000 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 13.214 G_Regularizer: 0.000 validation_error: 20.680 +(epoch: 21, iters: 154960, time: 0.569, data: 0.000) G_L1: 16.367 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 13.429 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 21, iters: 156960, time: 0.574, data: 0.000) G_L1: 16.136 G_L1_ABSOLUTE: 3.180 G_L1_RELATIVE: 12.956 G_Regularizer: 0.000 validation_error: 20.413 +(epoch: 21, iters: 158960, time: 0.569, data: 0.000) G_L1: 13.863 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 11.161 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 21, iters: 160960, time: 0.579, data: 0.000) G_L1: 16.223 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 13.700 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 21, iters: 162960, time: 0.582, data: 0.000) G_L1: 12.864 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 10.377 G_Regularizer: 0.000 validation_error: 20.460 +(epoch: 21, iters: 164960, time: 0.583, data: 0.000) G_L1: 16.661 G_L1_ABSOLUTE: 2.348 G_L1_RELATIVE: 14.313 G_Regularizer: 0.000 validation_error: 20.349 +(epoch: 21, iters: 166960, time: 0.572, data: 0.000) G_L1: 16.378 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 13.725 G_Regularizer: 0.000 validation_error: 20.399 +(epoch: 21, iters: 168960, time: 0.574, data: 0.000) G_L1: 16.458 G_L1_ABSOLUTE: 3.036 G_L1_RELATIVE: 13.422 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 21, iters: 170960, time: 0.576, data: 0.000) G_L1: 19.124 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 16.355 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 21, iters: 172960, time: 0.572, data: 0.000) G_L1: 16.306 G_L1_ABSOLUTE: 3.212 G_L1_RELATIVE: 13.094 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 21, iters: 174960, time: 0.577, data: 0.000) G_L1: 14.540 G_L1_ABSOLUTE: 2.986 G_L1_RELATIVE: 11.554 G_Regularizer: 0.000 validation_error: 20.260 +(epoch: 21, iters: 176960, time: 0.578, data: 0.000) G_L1: 13.875 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 11.222 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 21, iters: 178960, time: 0.589, data: 0.000) G_L1: 16.822 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 14.090 G_Regularizer: 0.000 validation_error: 20.178 +(epoch: 21, iters: 180960, time: 0.576, data: 0.000) G_L1: 12.724 G_L1_ABSOLUTE: 2.287 G_L1_RELATIVE: 10.437 G_Regularizer: 0.000 validation_error: 20.268 +(epoch: 21, iters: 182960, time: 0.578, data: 0.000) G_L1: 13.672 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 11.128 G_Regularizer: 0.000 validation_error: 20.251 +(epoch: 21, iters: 184960, time: 0.571, data: 0.000) G_L1: 15.223 G_L1_ABSOLUTE: 2.945 G_L1_RELATIVE: 12.278 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 21, iters: 186960, time: 0.575, data: 0.000) G_L1: 17.398 G_L1_ABSOLUTE: 2.994 G_L1_RELATIVE: 14.404 G_Regularizer: 0.000 validation_error: 20.079 +(epoch: 21, iters: 188960, time: 0.574, data: 0.000) G_L1: 15.853 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 13.044 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 21, iters: 190960, time: 0.574, data: 0.000) G_L1: 18.669 G_L1_ABSOLUTE: 3.504 G_L1_RELATIVE: 15.165 G_Regularizer: 0.000 validation_error: 20.719 +(epoch: 21, iters: 192960, time: 0.579, data: 0.000) G_L1: 14.501 G_L1_ABSOLUTE: 2.889 G_L1_RELATIVE: 11.612 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 21, iters: 194960, time: 0.598, data: 0.000) G_L1: 16.251 G_L1_ABSOLUTE: 2.990 G_L1_RELATIVE: 13.261 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 21, iters: 196960, time: 0.585, data: 0.000) G_L1: 18.594 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 15.804 G_Regularizer: 0.000 validation_error: 20.412 +(epoch: 21, iters: 198960, time: 0.576, data: 0.000) G_L1: 17.582 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 14.930 G_Regularizer: 0.000 validation_error: 21.273 +(epoch: 21, iters: 200960, time: 0.570, data: 0.000) G_L1: 12.575 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 9.958 G_Regularizer: 0.000 validation_error: 21.299 +(epoch: 21, iters: 202960, time: 0.572, data: 0.000) G_L1: 19.067 G_L1_ABSOLUTE: 3.030 G_L1_RELATIVE: 16.037 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 21, iters: 204960, time: 0.577, data: 0.000) G_L1: 16.324 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 13.759 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 21, iters: 206960, time: 0.573, data: 0.001) G_L1: 16.264 G_L1_ABSOLUTE: 2.816 G_L1_RELATIVE: 13.448 G_Regularizer: 0.000 validation_error: 20.947 +(epoch: 21, iters: 208960, time: 0.566, data: 0.000) G_L1: 16.862 G_L1_ABSOLUTE: 2.275 G_L1_RELATIVE: 14.588 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 21, iters: 210960, time: 0.567, data: 0.000) G_L1: 14.952 G_L1_ABSOLUTE: 2.829 G_L1_RELATIVE: 12.122 G_Regularizer: 0.000 validation_error: 20.732 +(epoch: 21, iters: 212960, time: 0.600, data: 0.000) G_L1: 15.201 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 12.492 G_Regularizer: 0.000 validation_error: 20.765 +(epoch: 21, iters: 214960, time: 0.579, data: 0.001) G_L1: 16.019 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 13.349 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 21, iters: 216960, time: 0.570, data: 0.000) G_L1: 12.810 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 10.129 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 21, iters: 218960, time: 0.577, data: 0.000) G_L1: 14.482 G_L1_ABSOLUTE: 1.966 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 20.016 +(epoch: 21, iters: 220960, time: 0.586, data: 0.000) G_L1: 13.491 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 10.694 G_Regularizer: 0.000 validation_error: 20.304 +(epoch: 21, iters: 222960, time: 0.570, data: 0.000) G_L1: 16.252 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 13.536 G_Regularizer: 0.000 validation_error: 20.205 +(epoch: 21, iters: 224960, time: 0.568, data: 0.000) G_L1: 14.137 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 11.252 G_Regularizer: 0.000 validation_error: 20.273 +(epoch: 21, iters: 226960, time: 0.568, data: 0.000) G_L1: 12.263 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 9.754 G_Regularizer: 0.000 validation_error: 21.229 +(epoch: 21, iters: 228960, time: 0.593, data: 0.000) G_L1: 14.767 G_L1_ABSOLUTE: 2.902 G_L1_RELATIVE: 11.865 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 21, iters: 230960, time: 0.579, data: 0.000) G_L1: 14.950 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 12.321 G_Regularizer: 0.000 validation_error: 20.281 +(epoch: 21, iters: 232960, time: 0.565, data: 0.000) G_L1: 17.562 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 14.776 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 21, iters: 234960, time: 0.577, data: 0.000) G_L1: 17.497 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 14.864 G_Regularizer: 0.000 validation_error: 19.722 +(epoch: 21, iters: 236960, time: 0.576, data: 0.000) G_L1: 15.994 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 13.271 G_Regularizer: 0.000 validation_error: 20.190 +(epoch: 21, iters: 238960, time: 0.572, data: 0.001) G_L1: 16.431 G_L1_ABSOLUTE: 3.000 G_L1_RELATIVE: 13.431 G_Regularizer: 0.000 validation_error: 20.234 +(epoch: 21, iters: 240960, time: 0.569, data: 0.000) G_L1: 13.958 G_L1_ABSOLUTE: 2.183 G_L1_RELATIVE: 11.775 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 21, iters: 242960, time: 0.569, data: 0.000) G_L1: 13.054 G_L1_ABSOLUTE: 2.392 G_L1_RELATIVE: 10.662 G_Regularizer: 0.000 validation_error: 21.270 +(epoch: 21, iters: 244960, time: 0.841, data: 0.000) G_L1: 14.084 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 11.348 G_Regularizer: 0.000 validation_error: 20.360 +(epoch: 21, iters: 246960, time: 0.570, data: 0.000) G_L1: 12.456 G_L1_ABSOLUTE: 2.193 G_L1_RELATIVE: 10.264 G_Regularizer: 0.000 validation_error: 20.157 +(epoch: 21, iters: 248960, time: 0.575, data: 0.000) G_L1: 13.866 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 11.344 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 21, iters: 250960, time: 0.575, data: 0.000) G_L1: 16.931 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 14.224 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 21, iters: 252960, time: 0.568, data: 0.000) G_L1: 21.860 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 19.425 G_Regularizer: 0.000 validation_error: 20.662 +(epoch: 21, iters: 254960, time: 0.573, data: 0.000) G_L1: 17.362 G_L1_ABSOLUTE: 2.973 G_L1_RELATIVE: 14.389 G_Regularizer: 0.000 validation_error: 20.383 +(epoch: 21, iters: 256960, time: 0.570, data: 0.000) G_L1: 12.647 G_L1_ABSOLUTE: 2.076 G_L1_RELATIVE: 10.570 G_Regularizer: 0.000 validation_error: 20.578 +(epoch: 21, iters: 258960, time: 0.577, data: 0.000) G_L1: 15.212 G_L1_ABSOLUTE: 2.941 G_L1_RELATIVE: 12.271 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 21, iters: 260960, time: 0.579, data: 0.000) G_L1: 13.113 G_L1_ABSOLUTE: 2.308 G_L1_RELATIVE: 10.805 G_Regularizer: 0.000 validation_error: 20.557 +(epoch: 21, iters: 262960, time: 0.567, data: 0.000) G_L1: 14.773 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 12.394 G_Regularizer: 0.000 validation_error: 21.484 +(epoch: 21, iters: 264960, time: 0.573, data: 0.000) G_L1: 13.611 G_L1_ABSOLUTE: 3.133 G_L1_RELATIVE: 10.478 G_Regularizer: 0.000 validation_error: 21.320 +(epoch: 21, iters: 266960, time: 0.568, data: 0.000) G_L1: 15.283 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 12.859 G_Regularizer: 0.000 validation_error: 20.300 +(epoch: 21, iters: 268960, time: 0.566, data: 0.000) G_L1: 14.007 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 11.553 G_Regularizer: 0.000 validation_error: 20.614 +(epoch: 21, iters: 270960, time: 0.571, data: 0.000) G_L1: 12.995 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 10.513 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 21, iters: 272960, time: 0.588, data: 0.000) G_L1: 12.453 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 9.876 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 21, iters: 274960, time: 0.569, data: 0.000) G_L1: 12.002 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 9.671 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 21, iters: 276960, time: 0.568, data: 0.000) G_L1: 13.820 G_L1_ABSOLUTE: 2.197 G_L1_RELATIVE: 11.623 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 21, iters: 278960, time: 0.597, data: 0.000) G_L1: 14.299 G_L1_ABSOLUTE: 2.903 G_L1_RELATIVE: 11.396 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 21, iters: 280960, time: 0.580, data: 0.000) G_L1: 16.800 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 14.205 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 21, iters: 282960, time: 0.567, data: 0.000) G_L1: 16.754 G_L1_ABSOLUTE: 3.119 G_L1_RELATIVE: 13.635 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 21, iters: 284960, time: 0.573, data: 0.000) G_L1: 17.310 G_L1_ABSOLUTE: 2.849 G_L1_RELATIVE: 14.461 G_Regularizer: 0.000 validation_error: 21.149 +(epoch: 21, iters: 286960, time: 0.575, data: 0.000) G_L1: 16.583 G_L1_ABSOLUTE: 3.059 G_L1_RELATIVE: 13.524 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 21, iters: 288960, time: 0.569, data: 0.000) G_L1: 16.779 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 14.438 G_Regularizer: 0.000 validation_error: 20.482 +(epoch: 21, iters: 290960, time: 0.570, data: 0.000) G_L1: 22.240 G_L1_ABSOLUTE: 3.778 G_L1_RELATIVE: 18.461 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 21, iters: 292960, time: 0.574, data: 0.000) G_L1: 14.951 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 12.660 G_Regularizer: 0.000 validation_error: 20.524 +(epoch: 21, iters: 294960, time: 0.597, data: 0.000) G_L1: 13.017 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 10.347 G_Regularizer: 0.000 validation_error: 21.043 +(epoch: 21, iters: 296960, time: 0.578, data: 0.000) G_L1: 15.471 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 12.940 G_Regularizer: 0.000 validation_error: 21.172 +(epoch: 21, iters: 298960, time: 0.570, data: 0.000) G_L1: 17.117 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 14.457 G_Regularizer: 0.000 validation_error: 20.197 +(epoch: 21, iters: 300960, time: 0.572, data: 0.000) G_L1: 15.888 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 13.111 G_Regularizer: 0.000 validation_error: 20.555 +(epoch: 22, iters: 208, time: 0.581, data: 0.000) G_L1: 13.767 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 11.084 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 22, iters: 2208, time: 0.578, data: 0.000) G_L1: 11.624 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 9.206 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 22, iters: 4208, time: 0.570, data: 0.000) G_L1: 13.907 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 11.399 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 22, iters: 6208, time: 0.577, data: 0.000) G_L1: 15.842 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 13.282 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 22, iters: 8208, time: 0.586, data: 0.001) G_L1: 13.831 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 11.253 G_Regularizer: 0.000 validation_error: 20.089 +(epoch: 22, iters: 10208, time: 0.569, data: 0.000) G_L1: 13.905 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 11.134 G_Regularizer: 0.000 validation_error: 20.664 +(epoch: 22, iters: 12208, time: 0.571, data: 0.000) G_L1: 14.782 G_L1_ABSOLUTE: 2.690 G_L1_RELATIVE: 12.092 G_Regularizer: 0.000 validation_error: 20.198 +(epoch: 22, iters: 14208, time: 0.574, data: 0.000) G_L1: 17.022 G_L1_ABSOLUTE: 2.935 G_L1_RELATIVE: 14.087 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 22, iters: 16208, time: 0.574, data: 0.000) G_L1: 12.987 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 10.448 G_Regularizer: 0.000 validation_error: 20.663 +(epoch: 22, iters: 18208, time: 0.570, data: 0.000) G_L1: 15.535 G_L1_ABSOLUTE: 3.347 G_L1_RELATIVE: 12.188 G_Regularizer: 0.000 validation_error: 20.937 +(epoch: 22, iters: 20208, time: 0.568, data: 0.000) G_L1: 14.662 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 11.944 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 22, iters: 22208, time: 0.571, data: 0.000) G_L1: 14.410 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 11.740 G_Regularizer: 0.000 validation_error: 20.125 +(epoch: 22, iters: 24208, time: 0.571, data: 0.000) G_L1: 14.574 G_L1_ABSOLUTE: 2.081 G_L1_RELATIVE: 12.493 G_Regularizer: 0.000 validation_error: 20.535 +(epoch: 22, iters: 26208, time: 0.572, data: 0.000) G_L1: 13.919 G_L1_ABSOLUTE: 2.250 G_L1_RELATIVE: 11.669 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 22, iters: 28208, time: 0.579, data: 0.000) G_L1: 16.660 G_L1_ABSOLUTE: 2.847 G_L1_RELATIVE: 13.813 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 22, iters: 30208, time: 0.581, data: 0.000) G_L1: 13.343 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 10.929 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 22, iters: 32208, time: 0.569, data: 0.000) G_L1: 16.373 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 13.673 G_Regularizer: 0.000 validation_error: 20.521 +(epoch: 22, iters: 34208, time: 0.572, data: 0.000) G_L1: 15.778 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 13.132 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 22, iters: 36208, time: 0.573, data: 0.000) G_L1: 18.115 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 15.378 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 22, iters: 38208, time: 0.577, data: 0.000) G_L1: 13.812 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 11.062 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 22, iters: 40208, time: 0.578, data: 0.000) G_L1: 15.650 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 13.014 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 22, iters: 42208, time: 0.596, data: 0.000) G_L1: 15.177 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 12.393 G_Regularizer: 0.000 validation_error: 20.303 +(epoch: 22, iters: 44208, time: 0.577, data: 0.000) G_L1: 13.245 G_L1_ABSOLUTE: 2.937 G_L1_RELATIVE: 10.308 G_Regularizer: 0.000 validation_error: 20.586 +(epoch: 22, iters: 46208, time: 0.565, data: 0.000) G_L1: 17.448 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 14.712 G_Regularizer: 0.000 validation_error: 20.407 +(epoch: 22, iters: 48208, time: 0.575, data: 0.000) G_L1: 14.781 G_L1_ABSOLUTE: 2.410 G_L1_RELATIVE: 12.371 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 22, iters: 50208, time: 0.570, data: 0.000) G_L1: 17.763 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 15.102 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 22, iters: 52208, time: 0.571, data: 0.000) G_L1: 14.533 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 11.599 G_Regularizer: 0.000 validation_error: 20.583 +(epoch: 22, iters: 54208, time: 0.569, data: 0.000) G_L1: 16.015 G_L1_ABSOLUTE: 2.146 G_L1_RELATIVE: 13.869 G_Regularizer: 0.000 validation_error: 21.094 +(epoch: 22, iters: 56208, time: 0.585, data: 0.000) G_L1: 15.299 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 12.566 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 22, iters: 58208, time: 0.596, data: 0.000) G_L1: 16.195 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 13.669 G_Regularizer: 0.000 validation_error: 21.326 +(epoch: 22, iters: 60208, time: 0.578, data: 0.000) G_L1: 13.140 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 10.454 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 22, iters: 62208, time: 0.578, data: 0.000) G_L1: 15.618 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 12.885 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 22, iters: 64208, time: 0.564, data: 0.000) G_L1: 16.610 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 14.011 G_Regularizer: 0.000 validation_error: 20.450 +(epoch: 22, iters: 66208, time: 0.581, data: 0.000) G_L1: 13.698 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 11.032 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 22, iters: 68208, time: 0.579, data: 0.000) G_L1: 14.884 G_L1_ABSOLUTE: 3.019 G_L1_RELATIVE: 11.865 G_Regularizer: 0.000 validation_error: 20.385 +(epoch: 22, iters: 70208, time: 0.573, data: 0.000) G_L1: 16.106 G_L1_ABSOLUTE: 2.789 G_L1_RELATIVE: 13.316 G_Regularizer: 0.000 validation_error: 20.435 +(epoch: 22, iters: 72208, time: 0.580, data: 0.000) G_L1: 13.456 G_L1_ABSOLUTE: 2.715 G_L1_RELATIVE: 10.741 G_Regularizer: 0.000 validation_error: 21.499 +(epoch: 22, iters: 74208, time: 0.569, data: 0.000) G_L1: 14.068 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 11.754 G_Regularizer: 0.000 validation_error: 20.775 +(epoch: 22, iters: 76208, time: 0.571, data: 0.000) G_L1: 17.691 G_L1_ABSOLUTE: 2.982 G_L1_RELATIVE: 14.709 G_Regularizer: 0.000 validation_error: 21.433 +(epoch: 22, iters: 78208, time: 0.568, data: 0.000) G_L1: 14.402 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 11.901 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 22, iters: 80208, time: 0.576, data: 0.000) G_L1: 21.391 G_L1_ABSOLUTE: 3.044 G_L1_RELATIVE: 18.347 G_Regularizer: 0.000 validation_error: 21.178 +(epoch: 22, iters: 82208, time: 0.573, data: 0.000) G_L1: 15.181 G_L1_ABSOLUTE: 2.443 G_L1_RELATIVE: 12.738 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 22, iters: 84208, time: 0.568, data: 0.000) G_L1: 13.031 G_L1_ABSOLUTE: 2.240 G_L1_RELATIVE: 10.791 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 22, iters: 86208, time: 0.575, data: 0.000) G_L1: 15.170 G_L1_ABSOLUTE: 2.789 G_L1_RELATIVE: 12.381 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 22, iters: 88208, time: 0.569, data: 0.000) G_L1: 19.579 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 16.641 G_Regularizer: 0.000 validation_error: 21.381 +(epoch: 22, iters: 90208, time: 0.571, data: 0.000) G_L1: 16.322 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 13.760 G_Regularizer: 0.000 validation_error: 20.183 +(epoch: 22, iters: 92208, time: 0.598, data: 0.000) G_L1: 14.492 G_L1_ABSOLUTE: 2.829 G_L1_RELATIVE: 11.663 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 22, iters: 94208, time: 0.574, data: 0.000) G_L1: 16.454 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 13.866 G_Regularizer: 0.000 validation_error: 20.422 +(epoch: 22, iters: 96208, time: 0.563, data: 0.000) G_L1: 16.045 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 13.338 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 22, iters: 98208, time: 0.580, data: 0.000) G_L1: 15.223 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 12.478 G_Regularizer: 0.000 validation_error: 21.412 +(epoch: 22, iters: 100208, time: 0.570, data: 0.000) G_L1: 16.753 G_L1_ABSOLUTE: 2.861 G_L1_RELATIVE: 13.892 G_Regularizer: 0.000 validation_error: 21.237 +(epoch: 22, iters: 102208, time: 0.571, data: 0.000) G_L1: 15.389 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 12.519 G_Regularizer: 0.000 validation_error: 21.167 +(epoch: 22, iters: 104208, time: 0.573, data: 0.000) G_L1: 14.735 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.225 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 22, iters: 106208, time: 0.576, data: 0.001) G_L1: 17.569 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 14.861 G_Regularizer: 0.000 validation_error: 20.466 +(epoch: 22, iters: 108208, time: 0.590, data: 0.000) G_L1: 14.956 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 12.004 G_Regularizer: 0.000 validation_error: 21.253 +(epoch: 22, iters: 110208, time: 0.563, data: 0.000) G_L1: 13.005 G_L1_ABSOLUTE: 2.647 G_L1_RELATIVE: 10.358 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 22, iters: 112208, time: 0.572, data: 0.000) G_L1: 14.331 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 11.722 G_Regularizer: 0.000 validation_error: 20.977 +(epoch: 22, iters: 114208, time: 0.568, data: 0.000) G_L1: 12.098 G_L1_ABSOLUTE: 2.984 G_L1_RELATIVE: 9.114 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 22, iters: 116208, time: 0.581, data: 0.000) G_L1: 14.154 G_L1_ABSOLUTE: 2.712 G_L1_RELATIVE: 11.442 G_Regularizer: 0.000 validation_error: 20.237 +(epoch: 22, iters: 118208, time: 0.576, data: 0.000) G_L1: 14.685 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 12.116 G_Regularizer: 0.000 validation_error: 20.396 +(epoch: 22, iters: 120208, time: 0.583, data: 0.000) G_L1: 15.159 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 12.659 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 22, iters: 122208, time: 0.570, data: 0.000) G_L1: 14.941 G_L1_ABSOLUTE: 2.271 G_L1_RELATIVE: 12.671 G_Regularizer: 0.000 validation_error: 20.482 +(epoch: 22, iters: 124208, time: 0.585, data: 0.001) G_L1: 14.165 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 11.658 G_Regularizer: 0.000 validation_error: 20.360 +(epoch: 22, iters: 126208, time: 0.570, data: 0.000) G_L1: 15.853 G_L1_ABSOLUTE: 3.241 G_L1_RELATIVE: 12.612 G_Regularizer: 0.000 validation_error: 20.387 +(epoch: 22, iters: 128208, time: 0.567, data: 0.000) G_L1: 15.820 G_L1_ABSOLUTE: 3.012 G_L1_RELATIVE: 12.808 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 22, iters: 130208, time: 0.579, data: 0.000) G_L1: 14.438 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.854 G_Regularizer: 0.000 validation_error: 20.220 +(epoch: 22, iters: 132208, time: 0.576, data: 0.000) G_L1: 15.887 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 13.252 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 22, iters: 134208, time: 0.570, data: 0.000) G_L1: 17.251 G_L1_ABSOLUTE: 2.603 G_L1_RELATIVE: 14.648 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 22, iters: 136208, time: 0.577, data: 0.000) G_L1: 15.337 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 12.788 G_Regularizer: 0.000 validation_error: 20.529 +(epoch: 22, iters: 138208, time: 0.575, data: 0.000) G_L1: 13.904 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 11.234 G_Regularizer: 0.000 validation_error: 21.091 +(epoch: 22, iters: 140208, time: 0.569, data: 0.000) G_L1: 13.335 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 10.898 G_Regularizer: 0.000 validation_error: 21.438 +(epoch: 22, iters: 142208, time: 0.574, data: 0.000) G_L1: 14.563 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 11.849 G_Regularizer: 0.000 validation_error: 20.052 +(epoch: 22, iters: 144208, time: 0.573, data: 0.000) G_L1: 17.985 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 15.471 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 22, iters: 146208, time: 0.573, data: 0.000) G_L1: 13.857 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 11.357 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 22, iters: 148208, time: 0.570, data: 0.000) G_L1: 15.106 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 12.432 G_Regularizer: 0.000 validation_error: 19.957 +(epoch: 22, iters: 150208, time: 0.573, data: 0.000) G_L1: 14.317 G_L1_ABSOLUTE: 3.207 G_L1_RELATIVE: 11.110 G_Regularizer: 0.000 validation_error: 20.544 +(epoch: 22, iters: 152208, time: 0.572, data: 0.000) G_L1: 16.559 G_L1_ABSOLUTE: 3.074 G_L1_RELATIVE: 13.485 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 22, iters: 154208, time: 0.574, data: 0.000) G_L1: 15.326 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 12.605 G_Regularizer: 0.000 validation_error: 20.054 +(epoch: 22, iters: 156208, time: 0.569, data: 0.000) G_L1: 13.580 G_L1_ABSOLUTE: 2.671 G_L1_RELATIVE: 10.909 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 22, iters: 158208, time: 0.599, data: 0.000) G_L1: 15.024 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 12.495 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 22, iters: 160208, time: 0.576, data: 0.000) G_L1: 17.291 G_L1_ABSOLUTE: 2.872 G_L1_RELATIVE: 14.419 G_Regularizer: 0.000 validation_error: 20.231 +(epoch: 22, iters: 162208, time: 0.571, data: 0.000) G_L1: 14.548 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 12.047 G_Regularizer: 0.000 validation_error: 21.089 +(epoch: 22, iters: 164208, time: 0.567, data: 0.001) G_L1: 14.304 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 11.845 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 22, iters: 166208, time: 0.585, data: 0.000) G_L1: 13.681 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 11.033 G_Regularizer: 0.000 validation_error: 20.275 +(epoch: 22, iters: 168208, time: 0.578, data: 0.000) G_L1: 17.491 G_L1_ABSOLUTE: 3.211 G_L1_RELATIVE: 14.280 G_Regularizer: 0.000 validation_error: 20.382 +(epoch: 22, iters: 170208, time: 0.568, data: 0.000) G_L1: 16.265 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 13.528 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 22, iters: 172208, time: 0.572, data: 0.000) G_L1: 15.112 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 12.884 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 22, iters: 174208, time: 0.595, data: 0.000) G_L1: 15.892 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 13.619 G_Regularizer: 0.000 validation_error: 20.315 +(epoch: 22, iters: 176208, time: 0.571, data: 0.000) G_L1: 15.324 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 12.807 G_Regularizer: 0.000 validation_error: 20.197 +(epoch: 22, iters: 178208, time: 0.575, data: 0.000) G_L1: 14.730 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 12.115 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 22, iters: 180208, time: 0.566, data: 0.000) G_L1: 14.645 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.135 G_Regularizer: 0.000 validation_error: 21.628 +(epoch: 22, iters: 182208, time: 0.585, data: 0.000) G_L1: 14.469 G_L1_ABSOLUTE: 2.841 G_L1_RELATIVE: 11.628 G_Regularizer: 0.000 validation_error: 20.338 +(epoch: 22, iters: 184208, time: 0.573, data: 0.000) G_L1: 15.051 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 12.455 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 22, iters: 186208, time: 0.577, data: 0.001) G_L1: 14.330 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.729 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 22, iters: 188208, time: 0.571, data: 0.000) G_L1: 16.372 G_L1_ABSOLUTE: 3.071 G_L1_RELATIVE: 13.301 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 22, iters: 190208, time: 0.585, data: 0.000) G_L1: 17.225 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 14.418 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 22, iters: 192208, time: 0.574, data: 0.000) G_L1: 12.951 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 10.081 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 22, iters: 194208, time: 0.571, data: 0.000) G_L1: 13.193 G_L1_ABSOLUTE: 2.964 G_L1_RELATIVE: 10.229 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 22, iters: 196208, time: 0.579, data: 0.000) G_L1: 13.304 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 10.307 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 22, iters: 198208, time: 0.576, data: 0.000) G_L1: 16.164 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 13.308 G_Regularizer: 0.000 validation_error: 20.463 +(epoch: 22, iters: 200208, time: 0.568, data: 0.000) G_L1: 13.908 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 11.545 G_Regularizer: 0.000 validation_error: 20.277 +(epoch: 22, iters: 202208, time: 0.579, data: 0.000) G_L1: 16.757 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 14.239 G_Regularizer: 0.000 validation_error: 20.318 +(epoch: 22, iters: 204208, time: 0.572, data: 0.000) G_L1: 14.372 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 11.960 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 22, iters: 206208, time: 0.566, data: 0.000) G_L1: 16.698 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 13.927 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 22, iters: 208208, time: 0.585, data: 0.000) G_L1: 14.706 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 11.816 G_Regularizer: 0.000 validation_error: 21.336 +(epoch: 22, iters: 210208, time: 0.574, data: 0.000) G_L1: 15.137 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 12.655 G_Regularizer: 0.000 validation_error: 21.339 +(epoch: 22, iters: 212208, time: 0.573, data: 0.000) G_L1: 15.206 G_L1_ABSOLUTE: 2.720 G_L1_RELATIVE: 12.485 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 22, iters: 214208, time: 0.578, data: 0.000) G_L1: 19.794 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 17.494 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 22, iters: 216208, time: 0.574, data: 0.000) G_L1: 14.741 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 12.098 G_Regularizer: 0.000 validation_error: 21.149 +(epoch: 22, iters: 218208, time: 0.578, data: 0.000) G_L1: 15.228 G_L1_ABSOLUTE: 2.157 G_L1_RELATIVE: 13.071 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 22, iters: 220208, time: 0.562, data: 0.000) G_L1: 14.777 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 12.189 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 22, iters: 222208, time: 0.566, data: 0.000) G_L1: 16.897 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 14.347 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 22, iters: 224208, time: 0.584, data: 0.000) G_L1: 13.679 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 11.128 G_Regularizer: 0.000 validation_error: 20.558 +(epoch: 22, iters: 226208, time: 0.575, data: 0.000) G_L1: 12.343 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 9.936 G_Regularizer: 0.000 validation_error: 20.332 +(epoch: 22, iters: 228208, time: 0.569, data: 0.000) G_L1: 15.391 G_L1_ABSOLUTE: 3.176 G_L1_RELATIVE: 12.214 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 22, iters: 230208, time: 0.578, data: 0.000) G_L1: 13.300 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 10.827 G_Regularizer: 0.000 validation_error: 20.924 +(epoch: 22, iters: 232208, time: 0.574, data: 0.000) G_L1: 13.914 G_L1_ABSOLUTE: 2.269 G_L1_RELATIVE: 11.645 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 22, iters: 234208, time: 0.565, data: 0.000) G_L1: 15.530 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 12.911 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 22, iters: 236208, time: 0.575, data: 0.000) G_L1: 16.802 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 14.277 G_Regularizer: 0.000 validation_error: 21.191 +(epoch: 22, iters: 238208, time: 0.564, data: 0.001) G_L1: 15.094 G_L1_ABSOLUTE: 3.021 G_L1_RELATIVE: 12.073 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 22, iters: 240208, time: 0.576, data: 0.000) G_L1: 17.860 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 15.053 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 22, iters: 242208, time: 0.576, data: 0.000) G_L1: 14.657 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 11.884 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 22, iters: 244208, time: 0.568, data: 0.000) G_L1: 14.489 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 11.771 G_Regularizer: 0.000 validation_error: 20.614 +(epoch: 22, iters: 246208, time: 0.572, data: 0.000) G_L1: 15.719 G_L1_ABSOLUTE: 3.348 G_L1_RELATIVE: 12.371 G_Regularizer: 0.000 validation_error: 20.354 +(epoch: 22, iters: 248208, time: 0.572, data: 0.000) G_L1: 13.449 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 11.147 G_Regularizer: 0.000 validation_error: 20.401 +(epoch: 22, iters: 250208, time: 0.572, data: 0.000) G_L1: 19.439 G_L1_ABSOLUTE: 2.923 G_L1_RELATIVE: 16.516 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 22, iters: 252208, time: 0.576, data: 0.000) G_L1: 20.473 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 17.929 G_Regularizer: 0.000 validation_error: 20.345 +(epoch: 22, iters: 254208, time: 0.578, data: 0.000) G_L1: 13.250 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 10.889 G_Regularizer: 0.000 validation_error: 19.922 +(epoch: 22, iters: 256208, time: 0.578, data: 0.000) G_L1: 14.300 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 11.919 G_Regularizer: 0.000 validation_error: 21.463 +(epoch: 22, iters: 258208, time: 0.594, data: 0.000) G_L1: 16.455 G_L1_ABSOLUTE: 2.439 G_L1_RELATIVE: 14.016 G_Regularizer: 0.000 validation_error: 21.107 +(epoch: 22, iters: 260208, time: 0.574, data: 0.000) G_L1: 17.511 G_L1_ABSOLUTE: 3.236 G_L1_RELATIVE: 14.275 G_Regularizer: 0.000 validation_error: 20.033 +(epoch: 22, iters: 262208, time: 0.580, data: 0.000) G_L1: 16.892 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 13.803 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 22, iters: 264208, time: 0.576, data: 0.000) G_L1: 16.247 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 13.515 G_Regularizer: 0.000 validation_error: 20.622 +(epoch: 22, iters: 266208, time: 0.575, data: 0.000) G_L1: 11.013 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 8.562 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 22, iters: 268208, time: 0.579, data: 0.000) G_L1: 12.126 G_L1_ABSOLUTE: 2.097 G_L1_RELATIVE: 10.029 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 22, iters: 270208, time: 0.559, data: 0.000) G_L1: 13.119 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 10.628 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 22, iters: 272208, time: 0.554, data: 0.000) G_L1: 16.110 G_L1_ABSOLUTE: 2.555 G_L1_RELATIVE: 13.555 G_Regularizer: 0.000 validation_error: 20.491 +(epoch: 22, iters: 274208, time: 0.578, data: 0.000) G_L1: 14.058 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 11.284 G_Regularizer: 0.000 validation_error: 20.390 +(epoch: 22, iters: 276208, time: 0.564, data: 0.000) G_L1: 13.771 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 11.480 G_Regularizer: 0.000 validation_error: 21.177 +(epoch: 22, iters: 278208, time: 0.551, data: 0.000) G_L1: 15.467 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 12.827 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 22, iters: 280208, time: 0.560, data: 0.000) G_L1: 12.856 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 10.591 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 22, iters: 282208, time: 0.566, data: 0.000) G_L1: 15.769 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 13.227 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 22, iters: 284208, time: 0.555, data: 0.000) G_L1: 14.615 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 11.809 G_Regularizer: 0.000 validation_error: 20.317 +(epoch: 22, iters: 286208, time: 0.559, data: 0.000) G_L1: 13.354 G_L1_ABSOLUTE: 2.439 G_L1_RELATIVE: 10.915 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 22, iters: 288208, time: 0.556, data: 0.000) G_L1: 14.620 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 12.047 G_Regularizer: 0.000 validation_error: 20.308 +(epoch: 22, iters: 290208, time: 0.560, data: 0.000) G_L1: 14.149 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 11.601 G_Regularizer: 0.000 validation_error: 20.183 +(epoch: 22, iters: 292208, time: 0.565, data: 0.001) G_L1: 14.548 G_L1_ABSOLUTE: 2.387 G_L1_RELATIVE: 12.161 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 22, iters: 294208, time: 0.565, data: 0.000) G_L1: 17.412 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 14.653 G_Regularizer: 0.000 validation_error: 21.128 +(epoch: 22, iters: 296208, time: 0.568, data: 0.000) G_L1: 16.536 G_L1_ABSOLUTE: 2.874 G_L1_RELATIVE: 13.662 G_Regularizer: 0.000 validation_error: 21.395 +(epoch: 22, iters: 298208, time: 0.559, data: 0.000) G_L1: 13.809 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 11.281 G_Regularizer: 0.000 validation_error: 21.273 +(epoch: 22, iters: 300208, time: 0.556, data: 0.000) G_L1: 17.023 G_L1_ABSOLUTE: 2.865 G_L1_RELATIVE: 14.158 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 22, iters: 302208, time: 0.554, data: 0.000) G_L1: 13.653 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 11.173 G_Regularizer: 0.000 validation_error: 20.612 +(epoch: 23, iters: 1456, time: 0.553, data: 0.000) G_L1: 14.903 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 12.133 G_Regularizer: 0.000 validation_error: 20.371 +(epoch: 23, iters: 3456, time: 0.556, data: 0.000) G_L1: 15.112 G_L1_ABSOLUTE: 3.050 G_L1_RELATIVE: 12.061 G_Regularizer: 0.000 validation_error: 21.096 +(epoch: 23, iters: 5456, time: 0.564, data: 0.000) G_L1: 15.330 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 12.802 G_Regularizer: 0.000 validation_error: 21.416 +(epoch: 23, iters: 7456, time: 0.562, data: 0.000) G_L1: 12.981 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 10.399 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 23, iters: 9456, time: 0.558, data: 0.000) G_L1: 15.927 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 23, iters: 11456, time: 0.549, data: 0.000) G_L1: 14.318 G_L1_ABSOLUTE: 2.150 G_L1_RELATIVE: 12.168 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 23, iters: 13456, time: 0.556, data: 0.000) G_L1: 11.802 G_L1_ABSOLUTE: 2.034 G_L1_RELATIVE: 9.767 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 23, iters: 15456, time: 0.564, data: 0.000) G_L1: 16.303 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 13.704 G_Regularizer: 0.000 validation_error: 20.654 +(epoch: 23, iters: 17456, time: 0.561, data: 0.000) G_L1: 14.655 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 11.937 G_Regularizer: 0.000 validation_error: 21.117 +(epoch: 23, iters: 19456, time: 0.563, data: 0.000) G_L1: 14.235 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 11.478 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 23, iters: 21456, time: 0.562, data: 0.000) G_L1: 13.999 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 11.514 G_Regularizer: 0.000 validation_error: 20.555 +(epoch: 23, iters: 23456, time: 0.569, data: 0.000) G_L1: 15.028 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 12.534 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 23, iters: 25456, time: 0.557, data: 0.000) G_L1: 15.079 G_L1_ABSOLUTE: 2.439 G_L1_RELATIVE: 12.639 G_Regularizer: 0.000 validation_error: 20.540 +(epoch: 23, iters: 27456, time: 0.554, data: 0.000) G_L1: 17.180 G_L1_ABSOLUTE: 2.900 G_L1_RELATIVE: 14.281 G_Regularizer: 0.000 validation_error: 21.392 +(epoch: 23, iters: 29456, time: 0.568, data: 0.000) G_L1: 16.802 G_L1_ABSOLUTE: 2.933 G_L1_RELATIVE: 13.869 G_Regularizer: 0.000 validation_error: 20.398 +(epoch: 23, iters: 31456, time: 0.558, data: 0.000) G_L1: 16.203 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 13.643 G_Regularizer: 0.000 validation_error: 20.441 +(epoch: 23, iters: 33456, time: 0.553, data: 0.000) G_L1: 13.631 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 11.033 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 23, iters: 35456, time: 0.559, data: 0.000) G_L1: 11.032 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 8.699 G_Regularizer: 0.000 validation_error: 21.133 +(epoch: 23, iters: 37456, time: 0.555, data: 0.000) G_L1: 12.836 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 10.473 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 23, iters: 39456, time: 0.566, data: 0.000) G_L1: 13.384 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 10.467 G_Regularizer: 0.000 validation_error: 20.393 +(epoch: 23, iters: 41456, time: 0.547, data: 0.000) G_L1: 14.499 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 12.023 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 23, iters: 43456, time: 0.555, data: 0.000) G_L1: 14.552 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 11.681 G_Regularizer: 0.000 validation_error: 20.330 +(epoch: 23, iters: 45456, time: 0.552, data: 0.000) G_L1: 15.767 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 13.259 G_Regularizer: 0.000 validation_error: 20.593 +(epoch: 23, iters: 47456, time: 0.562, data: 0.000) G_L1: 20.930 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 18.565 G_Regularizer: 0.000 validation_error: 20.489 +(epoch: 23, iters: 49456, time: 0.555, data: 0.000) G_L1: 19.710 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 17.014 G_Regularizer: 0.000 validation_error: 19.938 +(epoch: 23, iters: 51456, time: 0.555, data: 0.000) G_L1: 13.471 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 10.993 G_Regularizer: 0.000 validation_error: 20.495 +(epoch: 23, iters: 53456, time: 0.560, data: 0.000) G_L1: 15.839 G_L1_ABSOLUTE: 3.018 G_L1_RELATIVE: 12.820 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 23, iters: 55456, time: 0.566, data: 0.000) G_L1: 13.349 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 11.018 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 23, iters: 57456, time: 0.571, data: 0.000) G_L1: 14.110 G_L1_ABSOLUTE: 2.279 G_L1_RELATIVE: 11.831 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 23, iters: 59456, time: 0.554, data: 0.000) G_L1: 15.851 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 13.233 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 23, iters: 61456, time: 0.556, data: 0.001) G_L1: 13.561 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 10.788 G_Regularizer: 0.000 validation_error: 20.307 +(epoch: 23, iters: 63456, time: 0.561, data: 0.000) G_L1: 17.033 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 14.264 G_Regularizer: 0.000 validation_error: 20.742 +(epoch: 23, iters: 65456, time: 0.559, data: 0.000) G_L1: 16.582 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 13.878 G_Regularizer: 0.000 validation_error: 20.233 +(epoch: 23, iters: 67456, time: 0.558, data: 0.000) G_L1: 15.147 G_L1_ABSOLUTE: 2.896 G_L1_RELATIVE: 12.250 G_Regularizer: 0.000 validation_error: 20.009 +(epoch: 23, iters: 69456, time: 0.557, data: 0.000) G_L1: 14.285 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 11.471 G_Regularizer: 0.000 validation_error: 20.364 +(epoch: 23, iters: 71456, time: 0.564, data: 0.000) G_L1: 13.552 G_L1_ABSOLUTE: 2.203 G_L1_RELATIVE: 11.349 G_Regularizer: 0.000 validation_error: 21.166 +(epoch: 23, iters: 73456, time: 0.578, data: 0.000) G_L1: 15.057 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 12.716 G_Regularizer: 0.000 validation_error: 20.451 +(epoch: 23, iters: 75456, time: 0.558, data: 0.000) G_L1: 13.736 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 11.078 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 23, iters: 77456, time: 0.557, data: 0.000) G_L1: 14.478 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 11.901 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 23, iters: 79456, time: 0.559, data: 0.000) G_L1: 15.038 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 12.303 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 23, iters: 81456, time: 0.552, data: 0.000) G_L1: 13.107 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 10.551 G_Regularizer: 0.000 validation_error: 20.512 +(epoch: 23, iters: 83456, time: 0.547, data: 0.000) G_L1: 16.529 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 14.161 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 23, iters: 85456, time: 0.561, data: 0.000) G_L1: 15.137 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 12.701 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 23, iters: 87456, time: 0.559, data: 0.000) G_L1: 12.414 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 9.939 G_Regularizer: 0.000 validation_error: 20.514 +(epoch: 23, iters: 89456, time: 0.558, data: 0.000) G_L1: 15.780 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 13.184 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 23, iters: 91456, time: 0.560, data: 0.000) G_L1: 14.742 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 11.993 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 23, iters: 93456, time: 0.557, data: 0.000) G_L1: 13.654 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 10.791 G_Regularizer: 0.000 validation_error: 20.079 +(epoch: 23, iters: 95456, time: 0.551, data: 0.000) G_L1: 14.073 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 11.740 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 23, iters: 97456, time: 0.558, data: 0.000) G_L1: 15.919 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 13.402 G_Regularizer: 0.000 validation_error: 20.174 +(epoch: 23, iters: 99456, time: 0.557, data: 0.000) G_L1: 12.953 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 10.503 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 23, iters: 101456, time: 0.553, data: 0.000) G_L1: 15.671 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 12.973 G_Regularizer: 0.000 validation_error: 20.712 +(epoch: 23, iters: 103456, time: 0.557, data: 0.000) G_L1: 14.997 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 12.676 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 23, iters: 105456, time: 0.561, data: 0.000) G_L1: 13.304 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 11.106 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 23, iters: 107456, time: 0.582, data: 0.000) G_L1: 13.757 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 11.253 G_Regularizer: 0.000 validation_error: 21.773 +(epoch: 23, iters: 109456, time: 0.552, data: 0.000) G_L1: 14.261 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 11.632 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 23, iters: 111456, time: 0.563, data: 0.000) G_L1: 15.512 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 12.808 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 23, iters: 113456, time: 0.561, data: 0.000) G_L1: 17.521 G_L1_ABSOLUTE: 2.831 G_L1_RELATIVE: 14.690 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 23, iters: 115456, time: 0.550, data: 0.000) G_L1: 13.892 G_L1_ABSOLUTE: 2.260 G_L1_RELATIVE: 11.632 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 23, iters: 117456, time: 0.556, data: 0.000) G_L1: 13.963 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 11.151 G_Regularizer: 0.000 validation_error: 20.216 +(epoch: 23, iters: 119456, time: 0.547, data: 0.000) G_L1: 13.331 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 11.002 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 23, iters: 121456, time: 0.557, data: 0.000) G_L1: 14.324 G_L1_ABSOLUTE: 3.214 G_L1_RELATIVE: 11.110 G_Regularizer: 0.000 validation_error: 20.091 +(epoch: 23, iters: 123456, time: 0.565, data: 0.000) G_L1: 15.333 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 12.743 G_Regularizer: 0.000 validation_error: 20.622 +(epoch: 23, iters: 125456, time: 0.578, data: 0.000) G_L1: 14.077 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 11.630 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 23, iters: 127456, time: 0.557, data: 0.000) G_L1: 14.390 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 11.969 G_Regularizer: 0.000 validation_error: 20.859 +(epoch: 23, iters: 129456, time: 0.559, data: 0.000) G_L1: 17.098 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 14.241 G_Regularizer: 0.000 validation_error: 20.732 +(epoch: 23, iters: 131456, time: 0.562, data: 0.000) G_L1: 18.305 G_L1_ABSOLUTE: 3.021 G_L1_RELATIVE: 15.283 G_Regularizer: 0.000 validation_error: 20.391 +(epoch: 23, iters: 133456, time: 0.556, data: 0.000) G_L1: 14.170 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 11.593 G_Regularizer: 0.000 validation_error: 20.354 +(epoch: 23, iters: 135456, time: 0.555, data: 0.000) G_L1: 13.717 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 11.250 G_Regularizer: 0.000 validation_error: 20.454 +(epoch: 23, iters: 137456, time: 0.553, data: 0.000) G_L1: 14.577 G_L1_ABSOLUTE: 2.543 G_L1_RELATIVE: 12.034 G_Regularizer: 0.000 validation_error: 20.170 +(epoch: 23, iters: 139456, time: 0.564, data: 0.000) G_L1: 13.808 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 11.262 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 23, iters: 141456, time: 0.574, data: 0.000) G_L1: 15.080 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 12.651 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 23, iters: 143456, time: 0.557, data: 0.000) G_L1: 14.016 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 11.131 G_Regularizer: 0.000 validation_error: 21.800 +(epoch: 23, iters: 145456, time: 0.555, data: 0.000) G_L1: 17.481 G_L1_ABSOLUTE: 3.489 G_L1_RELATIVE: 13.992 G_Regularizer: 0.000 validation_error: 20.616 +(epoch: 23, iters: 147456, time: 0.561, data: 0.000) G_L1: 13.330 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 10.727 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 23, iters: 149456, time: 0.563, data: 0.000) G_L1: 23.808 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 21.158 G_Regularizer: 0.000 validation_error: 19.820 +(epoch: 23, iters: 151456, time: 0.561, data: 0.000) G_L1: 14.457 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 11.774 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 23, iters: 153456, time: 0.559, data: 0.000) G_L1: 13.211 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 10.516 G_Regularizer: 0.000 validation_error: 21.164 +(epoch: 23, iters: 155456, time: 0.559, data: 0.000) G_L1: 16.166 G_L1_ABSOLUTE: 2.289 G_L1_RELATIVE: 13.876 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 23, iters: 157456, time: 0.552, data: 0.000) G_L1: 12.921 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 10.406 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 23, iters: 159456, time: 0.578, data: 0.000) G_L1: 16.799 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 14.279 G_Regularizer: 0.000 validation_error: 21.566 +(epoch: 23, iters: 161456, time: 0.552, data: 0.000) G_L1: 13.658 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 11.274 G_Regularizer: 0.000 validation_error: 21.284 +(epoch: 23, iters: 163456, time: 0.558, data: 0.000) G_L1: 16.607 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 14.036 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 23, iters: 165456, time: 0.563, data: 0.000) G_L1: 15.910 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 13.567 G_Regularizer: 0.000 validation_error: 22.094 +(epoch: 23, iters: 167456, time: 0.555, data: 0.000) G_L1: 14.702 G_L1_ABSOLUTE: 2.472 G_L1_RELATIVE: 12.230 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 23, iters: 169456, time: 0.561, data: 0.000) G_L1: 13.708 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 11.227 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 23, iters: 171456, time: 0.552, data: 0.001) G_L1: 14.870 G_L1_ABSOLUTE: 2.187 G_L1_RELATIVE: 12.682 G_Regularizer: 0.000 validation_error: 20.329 +(epoch: 23, iters: 173456, time: 0.566, data: 0.000) G_L1: 15.420 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 12.760 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 23, iters: 175456, time: 0.574, data: 0.000) G_L1: 16.386 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 13.664 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 23, iters: 177456, time: 0.554, data: 0.000) G_L1: 15.572 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 12.835 G_Regularizer: 0.000 validation_error: 20.771 +(epoch: 23, iters: 179456, time: 0.558, data: 0.000) G_L1: 16.214 G_L1_ABSOLUTE: 2.304 G_L1_RELATIVE: 13.910 G_Regularizer: 0.000 validation_error: 21.309 +(epoch: 23, iters: 181456, time: 0.557, data: 0.000) G_L1: 12.737 G_L1_ABSOLUTE: 2.292 G_L1_RELATIVE: 10.446 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 23, iters: 183456, time: 0.559, data: 0.000) G_L1: 17.185 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 14.393 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 23, iters: 185456, time: 0.561, data: 0.000) G_L1: 14.596 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 12.149 G_Regularizer: 0.000 validation_error: 21.241 +(epoch: 23, iters: 187456, time: 0.561, data: 0.000) G_L1: 12.859 G_L1_ABSOLUTE: 2.998 G_L1_RELATIVE: 9.861 G_Regularizer: 0.000 validation_error: 20.328 +(epoch: 23, iters: 189456, time: 0.563, data: 0.000) G_L1: 17.788 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 15.258 G_Regularizer: 0.000 validation_error: 20.757 +(epoch: 23, iters: 191456, time: 0.558, data: 0.000) G_L1: 12.839 G_L1_ABSOLUTE: 2.311 G_L1_RELATIVE: 10.529 G_Regularizer: 0.000 validation_error: 20.343 +(epoch: 23, iters: 193456, time: 0.574, data: 0.000) G_L1: 17.296 G_L1_ABSOLUTE: 3.072 G_L1_RELATIVE: 14.224 G_Regularizer: 0.000 validation_error: 21.222 +(epoch: 23, iters: 195456, time: 0.561, data: 0.000) G_L1: 13.738 G_L1_ABSOLUTE: 2.210 G_L1_RELATIVE: 11.528 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 23, iters: 197456, time: 0.561, data: 0.000) G_L1: 13.004 G_L1_ABSOLUTE: 2.232 G_L1_RELATIVE: 10.772 G_Regularizer: 0.000 validation_error: 21.131 +(epoch: 23, iters: 199456, time: 0.559, data: 0.000) G_L1: 13.265 G_L1_ABSOLUTE: 2.392 G_L1_RELATIVE: 10.872 G_Regularizer: 0.000 validation_error: 20.959 +(epoch: 23, iters: 201456, time: 0.561, data: 0.000) G_L1: 13.755 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 11.235 G_Regularizer: 0.000 validation_error: 20.346 +(epoch: 23, iters: 203456, time: 0.577, data: 0.000) G_L1: 15.805 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 13.458 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 23, iters: 205456, time: 0.551, data: 0.000) G_L1: 14.326 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 11.848 G_Regularizer: 0.000 validation_error: 19.950 +(epoch: 23, iters: 207456, time: 0.557, data: 0.000) G_L1: 20.621 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 17.891 G_Regularizer: 0.000 validation_error: 20.447 +(epoch: 23, iters: 209456, time: 0.570, data: 0.000) G_L1: 18.799 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 16.120 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 23, iters: 211456, time: 0.557, data: 0.000) G_L1: 16.145 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 13.114 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 23, iters: 213456, time: 0.560, data: 0.000) G_L1: 14.570 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 11.748 G_Regularizer: 0.000 validation_error: 20.910 +(epoch: 23, iters: 215456, time: 0.565, data: 0.000) G_L1: 15.458 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 13.050 G_Regularizer: 0.000 validation_error: 20.251 +(epoch: 23, iters: 217456, time: 0.562, data: 0.000) G_L1: 14.459 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 11.897 G_Regularizer: 0.000 validation_error: 20.465 +(epoch: 23, iters: 219456, time: 0.559, data: 0.000) G_L1: 14.497 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 11.614 G_Regularizer: 0.000 validation_error: 21.918 +(epoch: 23, iters: 221456, time: 0.561, data: 0.000) G_L1: 13.540 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 10.788 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 23, iters: 223456, time: 0.577, data: 0.000) G_L1: 11.678 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 9.215 G_Regularizer: 0.000 validation_error: 21.082 +(epoch: 23, iters: 225456, time: 0.556, data: 0.000) G_L1: 16.451 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 13.925 G_Regularizer: 0.000 validation_error: 20.154 +(epoch: 23, iters: 227456, time: 0.574, data: 0.000) G_L1: 15.848 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 13.063 G_Regularizer: 0.000 validation_error: 20.640 +(epoch: 23, iters: 229456, time: 0.559, data: 0.000) G_L1: 16.631 G_L1_ABSOLUTE: 3.117 G_L1_RELATIVE: 13.514 G_Regularizer: 0.000 validation_error: 20.021 +(epoch: 23, iters: 231456, time: 0.566, data: 0.000) G_L1: 15.471 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 12.565 G_Regularizer: 0.000 validation_error: 20.376 +(epoch: 23, iters: 233456, time: 0.563, data: 0.000) G_L1: 13.764 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 11.394 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 23, iters: 235456, time: 0.556, data: 0.000) G_L1: 16.741 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 13.986 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 23, iters: 237456, time: 0.557, data: 0.000) G_L1: 13.736 G_L1_ABSOLUTE: 2.152 G_L1_RELATIVE: 11.584 G_Regularizer: 0.000 validation_error: 20.593 +(epoch: 23, iters: 239456, time: 0.564, data: 0.000) G_L1: 16.643 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 14.182 G_Regularizer: 0.000 validation_error: 20.247 +(epoch: 23, iters: 241456, time: 0.556, data: 0.000) G_L1: 14.922 G_L1_ABSOLUTE: 2.171 G_L1_RELATIVE: 12.751 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 23, iters: 243456, time: 0.567, data: 0.000) G_L1: 14.584 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 11.823 G_Regularizer: 0.000 validation_error: 20.128 +(epoch: 23, iters: 245456, time: 0.556, data: 0.000) G_L1: 12.958 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 10.478 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 23, iters: 247456, time: 0.562, data: 0.000) G_L1: 16.286 G_L1_ABSOLUTE: 2.927 G_L1_RELATIVE: 13.359 G_Regularizer: 0.000 validation_error: 20.105 +(epoch: 23, iters: 249456, time: 0.548, data: 0.000) G_L1: 16.571 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 13.861 G_Regularizer: 0.000 validation_error: 20.146 +(epoch: 23, iters: 251456, time: 0.554, data: 0.000) G_L1: 15.254 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 12.602 G_Regularizer: 0.000 validation_error: 20.185 +(epoch: 23, iters: 253456, time: 0.563, data: 0.000) G_L1: 14.676 G_L1_ABSOLUTE: 2.965 G_L1_RELATIVE: 11.711 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 23, iters: 255456, time: 0.565, data: 0.000) G_L1: 16.715 G_L1_ABSOLUTE: 2.566 G_L1_RELATIVE: 14.149 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 23, iters: 257456, time: 0.564, data: 0.000) G_L1: 13.777 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 11.515 G_Regularizer: 0.000 validation_error: 20.578 +(epoch: 23, iters: 259456, time: 0.551, data: 0.000) G_L1: 15.047 G_L1_ABSOLUTE: 2.455 G_L1_RELATIVE: 12.592 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 23, iters: 261456, time: 0.577, data: 0.000) G_L1: 15.325 G_L1_ABSOLUTE: 2.617 G_L1_RELATIVE: 12.709 G_Regularizer: 0.000 validation_error: 20.329 +(epoch: 23, iters: 263456, time: 0.559, data: 0.000) G_L1: 12.947 G_L1_ABSOLUTE: 2.281 G_L1_RELATIVE: 10.666 G_Regularizer: 0.000 validation_error: 20.885 +(epoch: 23, iters: 265456, time: 0.563, data: 0.000) G_L1: 15.869 G_L1_ABSOLUTE: 2.801 G_L1_RELATIVE: 13.068 G_Regularizer: 0.000 validation_error: 20.419 +(epoch: 23, iters: 267456, time: 0.554, data: 0.000) G_L1: 20.493 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 17.929 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 23, iters: 269456, time: 0.562, data: 0.000) G_L1: 12.860 G_L1_ABSOLUTE: 2.218 G_L1_RELATIVE: 10.642 G_Regularizer: 0.000 validation_error: 20.666 +(epoch: 23, iters: 271456, time: 0.557, data: 0.000) G_L1: 16.405 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 13.729 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 23, iters: 273456, time: 0.564, data: 0.000) G_L1: 14.032 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 11.398 G_Regularizer: 0.000 validation_error: 21.418 +(epoch: 23, iters: 275456, time: 0.553, data: 0.000) G_L1: 13.872 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 11.240 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 23, iters: 277456, time: 0.572, data: 0.000) G_L1: 16.094 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 13.735 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 23, iters: 279456, time: 0.555, data: 0.000) G_L1: 16.458 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 14.059 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 23, iters: 281456, time: 0.556, data: 0.000) G_L1: 15.113 G_L1_ABSOLUTE: 2.594 G_L1_RELATIVE: 12.519 G_Regularizer: 0.000 validation_error: 20.267 +(epoch: 23, iters: 283456, time: 0.562, data: 0.000) G_L1: 16.488 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 13.908 G_Regularizer: 0.000 validation_error: 20.325 +(epoch: 23, iters: 285456, time: 0.559, data: 0.000) G_L1: 13.824 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 11.314 G_Regularizer: 0.000 validation_error: 20.373 +(epoch: 23, iters: 287456, time: 0.560, data: 0.000) G_L1: 13.868 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 11.246 G_Regularizer: 0.000 validation_error: 20.758 +(epoch: 23, iters: 289456, time: 0.558, data: 0.000) G_L1: 14.106 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 11.594 G_Regularizer: 0.000 validation_error: 21.119 +(epoch: 23, iters: 291456, time: 0.558, data: 0.000) G_L1: 15.317 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 13.058 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 23, iters: 293456, time: 0.557, data: 0.000) G_L1: 14.033 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 11.657 G_Regularizer: 0.000 validation_error: 21.267 +(epoch: 23, iters: 295456, time: 0.581, data: 0.000) G_L1: 13.531 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 11.013 G_Regularizer: 0.000 validation_error: 20.395 +(epoch: 23, iters: 297456, time: 0.558, data: 0.000) G_L1: 14.668 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 12.047 G_Regularizer: 0.000 validation_error: 20.163 +(epoch: 23, iters: 299456, time: 0.562, data: 0.000) G_L1: 18.088 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 15.343 G_Regularizer: 0.000 validation_error: 20.351 +(epoch: 23, iters: 301456, time: 0.561, data: 0.000) G_L1: 14.324 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 11.879 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 24, iters: 704, time: 0.559, data: 0.000) G_L1: 16.026 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 13.300 G_Regularizer: 0.000 validation_error: 21.425 +(epoch: 24, iters: 2704, time: 0.566, data: 0.000) G_L1: 15.383 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 12.848 G_Regularizer: 0.000 validation_error: 20.120 +(epoch: 24, iters: 4704, time: 0.564, data: 0.000) G_L1: 14.147 G_L1_ABSOLUTE: 2.973 G_L1_RELATIVE: 11.173 G_Regularizer: 0.000 validation_error: 21.167 +(epoch: 24, iters: 6704, time: 0.560, data: 0.000) G_L1: 15.940 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 13.350 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 24, iters: 8704, time: 0.574, data: 0.001) G_L1: 12.715 G_L1_ABSOLUTE: 2.611 G_L1_RELATIVE: 10.104 G_Regularizer: 0.000 validation_error: 21.032 +(epoch: 24, iters: 10704, time: 0.561, data: 0.000) G_L1: 15.848 G_L1_ABSOLUTE: 2.455 G_L1_RELATIVE: 13.393 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 24, iters: 12704, time: 0.558, data: 0.000) G_L1: 15.634 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 13.111 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 24, iters: 14704, time: 0.557, data: 0.000) G_L1: 16.633 G_L1_ABSOLUTE: 2.690 G_L1_RELATIVE: 13.942 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 24, iters: 16704, time: 0.559, data: 0.000) G_L1: 14.336 G_L1_ABSOLUTE: 2.237 G_L1_RELATIVE: 12.099 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 24, iters: 18704, time: 0.555, data: 0.000) G_L1: 19.221 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 16.386 G_Regularizer: 0.000 validation_error: 20.361 +(epoch: 24, iters: 20704, time: 0.555, data: 0.000) G_L1: 14.520 G_L1_ABSOLUTE: 2.267 G_L1_RELATIVE: 12.254 G_Regularizer: 0.000 validation_error: 21.255 +(epoch: 24, iters: 22704, time: 0.565, data: 0.000) G_L1: 12.642 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 9.760 G_Regularizer: 0.000 validation_error: 20.331 +(epoch: 24, iters: 24704, time: 0.555, data: 0.000) G_L1: 13.993 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 11.543 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 24, iters: 26704, time: 0.584, data: 0.000) G_L1: 15.209 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 12.614 G_Regularizer: 0.000 validation_error: 21.081 +(epoch: 24, iters: 28704, time: 0.556, data: 0.000) G_L1: 12.063 G_L1_ABSOLUTE: 3.180 G_L1_RELATIVE: 8.883 G_Regularizer: 0.000 validation_error: 20.429 +(epoch: 24, iters: 30704, time: 0.555, data: 0.000) G_L1: 14.699 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 12.113 G_Regularizer: 0.000 validation_error: 20.719 +(epoch: 24, iters: 32704, time: 0.554, data: 0.000) G_L1: 15.142 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 12.757 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 24, iters: 34704, time: 0.556, data: 0.000) G_L1: 13.133 G_L1_ABSOLUTE: 2.471 G_L1_RELATIVE: 10.662 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 24, iters: 36704, time: 0.551, data: 0.000) G_L1: 13.107 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 10.734 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 24, iters: 38704, time: 0.562, data: 0.000) G_L1: 15.375 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 12.878 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 24, iters: 40704, time: 0.559, data: 0.000) G_L1: 14.742 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 12.142 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 24, iters: 42704, time: 0.577, data: 0.000) G_L1: 13.693 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 11.081 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 24, iters: 44704, time: 0.555, data: 0.000) G_L1: 14.181 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 11.627 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 24, iters: 46704, time: 0.567, data: 0.000) G_L1: 15.434 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 12.895 G_Regularizer: 0.000 validation_error: 20.114 +(epoch: 24, iters: 48704, time: 0.559, data: 0.000) G_L1: 13.183 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 10.577 G_Regularizer: 0.000 validation_error: 20.422 +(epoch: 24, iters: 50704, time: 0.561, data: 0.002) G_L1: 14.344 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 24, iters: 52704, time: 0.561, data: 0.000) G_L1: 13.094 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 10.820 G_Regularizer: 0.000 validation_error: 20.629 +(epoch: 24, iters: 54704, time: 0.561, data: 0.000) G_L1: 12.628 G_L1_ABSOLUTE: 1.923 G_L1_RELATIVE: 10.705 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 24, iters: 56704, time: 0.554, data: 0.000) G_L1: 15.932 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 13.355 G_Regularizer: 0.000 validation_error: 20.712 +(epoch: 24, iters: 58704, time: 0.558, data: 0.000) G_L1: 15.586 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 13.050 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 24, iters: 60704, time: 0.581, data: 0.000) G_L1: 14.326 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 11.784 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 24, iters: 62704, time: 0.564, data: 0.000) G_L1: 12.218 G_L1_ABSOLUTE: 2.064 G_L1_RELATIVE: 10.154 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 24, iters: 64704, time: 0.557, data: 0.000) G_L1: 13.757 G_L1_ABSOLUTE: 2.603 G_L1_RELATIVE: 11.154 G_Regularizer: 0.000 validation_error: 20.413 +(epoch: 24, iters: 66704, time: 0.562, data: 0.000) G_L1: 15.032 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 12.566 G_Regularizer: 0.000 validation_error: 20.705 +(epoch: 24, iters: 68704, time: 0.567, data: 0.000) G_L1: 13.397 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 10.878 G_Regularizer: 0.000 validation_error: 20.308 +(epoch: 24, iters: 70704, time: 0.556, data: 0.000) G_L1: 15.283 G_L1_ABSOLUTE: 3.241 G_L1_RELATIVE: 12.042 G_Regularizer: 0.000 validation_error: 20.116 +(epoch: 24, iters: 72704, time: 0.554, data: 0.000) G_L1: 12.996 G_L1_ABSOLUTE: 2.060 G_L1_RELATIVE: 10.935 G_Regularizer: 0.000 validation_error: 20.231 +(epoch: 24, iters: 74704, time: 0.558, data: 0.000) G_L1: 15.953 G_L1_ABSOLUTE: 3.092 G_L1_RELATIVE: 12.861 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 24, iters: 76704, time: 0.563, data: 0.000) G_L1: 17.268 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 14.755 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 24, iters: 78704, time: 0.559, data: 0.000) G_L1: 19.120 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 16.456 G_Regularizer: 0.000 validation_error: 20.496 +(epoch: 24, iters: 80704, time: 0.559, data: 0.000) G_L1: 14.164 G_L1_ABSOLUTE: 2.212 G_L1_RELATIVE: 11.953 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 24, iters: 82704, time: 0.553, data: 0.000) G_L1: 16.816 G_L1_ABSOLUTE: 2.918 G_L1_RELATIVE: 13.898 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 24, iters: 84704, time: 0.572, data: 0.000) G_L1: 15.151 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 12.246 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 24, iters: 86704, time: 0.557, data: 0.000) G_L1: 13.900 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 11.347 G_Regularizer: 0.000 validation_error: 21.016 +(epoch: 24, iters: 88704, time: 0.566, data: 0.000) G_L1: 13.983 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 11.480 G_Regularizer: 0.000 validation_error: 20.134 +(epoch: 24, iters: 90704, time: 0.549, data: 0.000) G_L1: 14.020 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 11.658 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 24, iters: 92704, time: 0.560, data: 0.000) G_L1: 13.211 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 10.793 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 24, iters: 94704, time: 0.574, data: 0.000) G_L1: 17.891 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 15.149 G_Regularizer: 0.000 validation_error: 20.398 +(epoch: 24, iters: 96704, time: 0.572, data: 0.000) G_L1: 13.850 G_L1_ABSOLUTE: 2.585 G_L1_RELATIVE: 11.265 G_Regularizer: 0.000 validation_error: 20.770 +(epoch: 24, iters: 98704, time: 0.558, data: 0.000) G_L1: 16.148 G_L1_ABSOLUTE: 3.617 G_L1_RELATIVE: 12.531 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 24, iters: 100704, time: 0.559, data: 0.000) G_L1: 17.273 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 14.810 G_Regularizer: 0.000 validation_error: 19.873 +(epoch: 24, iters: 102704, time: 0.570, data: 0.000) G_L1: 12.051 G_L1_ABSOLUTE: 2.248 G_L1_RELATIVE: 9.803 G_Regularizer: 0.000 validation_error: 21.108 +(epoch: 24, iters: 104704, time: 0.562, data: 0.000) G_L1: 14.975 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 12.343 G_Regularizer: 0.000 validation_error: 20.948 +(epoch: 24, iters: 106704, time: 0.563, data: 0.000) G_L1: 15.431 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 12.998 G_Regularizer: 0.000 validation_error: 21.456 +(epoch: 24, iters: 108704, time: 0.551, data: 0.000) G_L1: 15.512 G_L1_ABSOLUTE: 1.910 G_L1_RELATIVE: 13.602 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 24, iters: 110704, time: 0.571, data: 0.000) G_L1: 16.515 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 13.834 G_Regularizer: 0.000 validation_error: 20.112 +(epoch: 24, iters: 112704, time: 0.563, data: 0.000) G_L1: 14.358 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 11.807 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 24, iters: 114704, time: 0.560, data: 0.000) G_L1: 13.907 G_L1_ABSOLUTE: 2.308 G_L1_RELATIVE: 11.599 G_Regularizer: 0.000 validation_error: 20.020 +(epoch: 24, iters: 116704, time: 0.563, data: 0.000) G_L1: 19.225 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 16.711 G_Regularizer: 0.000 validation_error: 20.375 +(epoch: 24, iters: 118704, time: 0.569, data: 0.000) G_L1: 15.135 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 12.452 G_Regularizer: 0.000 validation_error: 21.361 +(epoch: 24, iters: 120704, time: 0.561, data: 0.000) G_L1: 15.855 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 13.118 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 24, iters: 122704, time: 0.571, data: 0.000) G_L1: 16.073 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 13.589 G_Regularizer: 0.000 validation_error: 20.551 +(epoch: 24, iters: 124704, time: 0.554, data: 0.000) G_L1: 12.422 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 9.913 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 24, iters: 126704, time: 0.566, data: 0.000) G_L1: 12.164 G_L1_ABSOLUTE: 2.316 G_L1_RELATIVE: 9.848 G_Regularizer: 0.000 validation_error: 20.869 +(epoch: 24, iters: 128704, time: 0.576, data: 0.000) G_L1: 12.460 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 9.992 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 24, iters: 130704, time: 0.573, data: 0.000) G_L1: 14.777 G_L1_ABSOLUTE: 2.229 G_L1_RELATIVE: 12.548 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 24, iters: 132704, time: 0.563, data: 0.000) G_L1: 15.096 G_L1_ABSOLUTE: 2.949 G_L1_RELATIVE: 12.147 G_Regularizer: 0.000 validation_error: 20.202 +(epoch: 24, iters: 134704, time: 0.563, data: 0.000) G_L1: 15.063 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 12.660 G_Regularizer: 0.000 validation_error: 20.100 +(epoch: 24, iters: 136704, time: 0.558, data: 0.000) G_L1: 13.086 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 10.594 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 24, iters: 138704, time: 0.570, data: 0.000) G_L1: 14.226 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 11.418 G_Regularizer: 0.000 validation_error: 20.070 +(epoch: 24, iters: 140704, time: 0.561, data: 0.000) G_L1: 13.786 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 11.163 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 24, iters: 142704, time: 0.566, data: 0.000) G_L1: 18.055 G_L1_ABSOLUTE: 3.263 G_L1_RELATIVE: 14.792 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 24, iters: 144704, time: 0.576, data: 0.000) G_L1: 14.387 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 11.799 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 24, iters: 146704, time: 0.557, data: 0.000) G_L1: 15.297 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 13.042 G_Regularizer: 0.000 validation_error: 20.861 +(epoch: 24, iters: 148704, time: 0.555, data: 0.000) G_L1: 16.317 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 13.766 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 24, iters: 150704, time: 0.573, data: 0.000) G_L1: 14.623 G_L1_ABSOLUTE: 3.099 G_L1_RELATIVE: 11.524 G_Regularizer: 0.000 validation_error: 20.980 +(epoch: 24, iters: 152704, time: 0.557, data: 0.000) G_L1: 13.387 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 10.985 G_Regularizer: 0.000 validation_error: 20.129 +(epoch: 24, iters: 154704, time: 0.563, data: 0.000) G_L1: 15.086 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 12.609 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 24, iters: 156704, time: 0.555, data: 0.000) G_L1: 15.020 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 12.446 G_Regularizer: 0.000 validation_error: 20.750 +(epoch: 24, iters: 158704, time: 0.558, data: 0.001) G_L1: 15.231 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 12.655 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 24, iters: 160704, time: 0.562, data: 0.000) G_L1: 12.787 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 10.335 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 24, iters: 162704, time: 0.565, data: 0.000) G_L1: 12.796 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 9.995 G_Regularizer: 0.000 validation_error: 20.340 +(epoch: 24, iters: 164704, time: 0.564, data: 0.000) G_L1: 14.526 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 12.035 G_Regularizer: 0.000 validation_error: 20.397 +(epoch: 24, iters: 166704, time: 0.565, data: 0.000) G_L1: 15.657 G_L1_ABSOLUTE: 2.206 G_L1_RELATIVE: 13.452 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 24, iters: 168704, time: 0.563, data: 0.000) G_L1: 15.639 G_L1_ABSOLUTE: 2.897 G_L1_RELATIVE: 12.742 G_Regularizer: 0.000 validation_error: 20.230 +(epoch: 24, iters: 170704, time: 0.562, data: 0.000) G_L1: 14.529 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 12.037 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 24, iters: 172704, time: 0.567, data: 0.000) G_L1: 15.020 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 12.545 G_Regularizer: 0.000 validation_error: 20.921 +(epoch: 24, iters: 174704, time: 0.563, data: 0.000) G_L1: 16.078 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 13.505 G_Regularizer: 0.000 validation_error: 21.276 +(epoch: 24, iters: 176704, time: 0.561, data: 0.000) G_L1: 12.635 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 10.221 G_Regularizer: 0.000 validation_error: 21.314 +(epoch: 24, iters: 178704, time: 0.573, data: 0.000) G_L1: 14.076 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 11.657 G_Regularizer: 0.000 validation_error: 20.482 +(epoch: 24, iters: 180704, time: 0.564, data: 0.000) G_L1: 16.631 G_L1_ABSOLUTE: 2.764 G_L1_RELATIVE: 13.866 G_Regularizer: 0.000 validation_error: 21.043 +(epoch: 24, iters: 182704, time: 0.555, data: 0.000) G_L1: 17.755 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 15.366 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 24, iters: 184704, time: 0.557, data: 0.000) G_L1: 15.627 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 12.904 G_Regularizer: 0.000 validation_error: 21.361 +(epoch: 24, iters: 186704, time: 0.563, data: 0.001) G_L1: 15.070 G_L1_ABSOLUTE: 2.316 G_L1_RELATIVE: 12.754 G_Regularizer: 0.000 validation_error: 20.093 +(epoch: 24, iters: 188704, time: 0.565, data: 0.000) G_L1: 14.186 G_L1_ABSOLUTE: 3.432 G_L1_RELATIVE: 10.754 G_Regularizer: 0.000 validation_error: 20.337 +(epoch: 24, iters: 190704, time: 0.563, data: 0.000) G_L1: 13.941 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 11.579 G_Regularizer: 0.000 validation_error: 21.670 +(epoch: 24, iters: 192704, time: 0.560, data: 0.000) G_L1: 13.593 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 11.221 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 24, iters: 194704, time: 0.565, data: 0.000) G_L1: 14.358 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 11.605 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 24, iters: 196704, time: 0.559, data: 0.000) G_L1: 13.918 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 11.054 G_Regularizer: 0.000 validation_error: 20.202 +(epoch: 24, iters: 198704, time: 0.551, data: 0.000) G_L1: 14.000 G_L1_ABSOLUTE: 3.021 G_L1_RELATIVE: 10.978 G_Regularizer: 0.000 validation_error: 20.332 +(epoch: 24, iters: 200704, time: 0.570, data: 0.000) G_L1: 13.345 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 10.799 G_Regularizer: 0.000 validation_error: 20.839 +(epoch: 24, iters: 202704, time: 0.559, data: 0.000) G_L1: 15.501 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 13.000 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 24, iters: 204704, time: 0.561, data: 0.000) G_L1: 13.382 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 10.963 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 24, iters: 206704, time: 0.563, data: 0.000) G_L1: 13.188 G_L1_ABSOLUTE: 2.149 G_L1_RELATIVE: 11.040 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 24, iters: 208704, time: 0.561, data: 0.000) G_L1: 18.588 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 15.767 G_Regularizer: 0.000 validation_error: 20.485 +(epoch: 24, iters: 210704, time: 0.561, data: 0.000) G_L1: 12.726 G_L1_ABSOLUTE: 2.907 G_L1_RELATIVE: 9.820 G_Regularizer: 0.000 validation_error: 20.207 +(epoch: 24, iters: 212704, time: 0.580, data: 0.000) G_L1: 14.909 G_L1_ABSOLUTE: 2.223 G_L1_RELATIVE: 12.687 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 24, iters: 214704, time: 0.559, data: 0.000) G_L1: 14.428 G_L1_ABSOLUTE: 2.309 G_L1_RELATIVE: 12.119 G_Regularizer: 0.000 validation_error: 21.191 +(epoch: 24, iters: 216704, time: 0.570, data: 0.000) G_L1: 14.761 G_L1_ABSOLUTE: 2.268 G_L1_RELATIVE: 12.493 G_Regularizer: 0.000 validation_error: 20.453 +(epoch: 24, iters: 218704, time: 0.562, data: 0.000) G_L1: 16.550 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 13.940 G_Regularizer: 0.000 validation_error: 21.610 +(epoch: 24, iters: 220704, time: 0.559, data: 0.000) G_L1: 15.994 G_L1_ABSOLUTE: 2.891 G_L1_RELATIVE: 13.103 G_Regularizer: 0.000 validation_error: 20.462 +(epoch: 24, iters: 222704, time: 0.570, data: 0.000) G_L1: 14.513 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 24, iters: 224704, time: 0.558, data: 0.000) G_L1: 17.063 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 14.612 G_Regularizer: 0.000 validation_error: 20.523 +(epoch: 24, iters: 226704, time: 0.561, data: 0.001) G_L1: 12.676 G_L1_ABSOLUTE: 2.328 G_L1_RELATIVE: 10.348 G_Regularizer: 0.000 validation_error: 21.187 +(epoch: 24, iters: 228704, time: 0.577, data: 0.000) G_L1: 13.067 G_L1_ABSOLUTE: 2.197 G_L1_RELATIVE: 10.870 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 24, iters: 230704, time: 0.565, data: 0.000) G_L1: 16.636 G_L1_ABSOLUTE: 2.471 G_L1_RELATIVE: 14.166 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 24, iters: 232704, time: 0.575, data: 0.000) G_L1: 16.097 G_L1_ABSOLUTE: 3.122 G_L1_RELATIVE: 12.975 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 24, iters: 234704, time: 0.562, data: 0.000) G_L1: 17.661 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 14.998 G_Regularizer: 0.000 validation_error: 20.313 +(epoch: 24, iters: 236704, time: 0.559, data: 0.000) G_L1: 11.944 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 9.746 G_Regularizer: 0.000 validation_error: 20.937 +(epoch: 24, iters: 238704, time: 0.558, data: 0.000) G_L1: 16.410 G_L1_ABSOLUTE: 2.719 G_L1_RELATIVE: 13.691 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 24, iters: 240704, time: 0.558, data: 0.000) G_L1: 13.702 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 11.224 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 24, iters: 242704, time: 0.563, data: 0.000) G_L1: 17.343 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 14.769 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 24, iters: 244704, time: 0.565, data: 0.000) G_L1: 12.764 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 10.133 G_Regularizer: 0.000 validation_error: 20.303 +(epoch: 24, iters: 246704, time: 0.576, data: 0.000) G_L1: 14.188 G_L1_ABSOLUTE: 2.785 G_L1_RELATIVE: 11.403 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 24, iters: 248704, time: 0.575, data: 0.000) G_L1: 14.787 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 12.129 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 24, iters: 250704, time: 0.553, data: 0.000) G_L1: 14.373 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 12.157 G_Regularizer: 0.000 validation_error: 21.296 +(epoch: 24, iters: 252704, time: 0.544, data: 0.000) G_L1: 15.521 G_L1_ABSOLUTE: 2.488 G_L1_RELATIVE: 13.033 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 24, iters: 254704, time: 0.543, data: 0.000) G_L1: 14.207 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 11.715 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 24, iters: 256704, time: 0.549, data: 0.000) G_L1: 16.004 G_L1_ABSOLUTE: 2.763 G_L1_RELATIVE: 13.240 G_Regularizer: 0.000 validation_error: 21.286 +(epoch: 24, iters: 258704, time: 0.548, data: 0.000) G_L1: 16.068 G_L1_ABSOLUTE: 2.847 G_L1_RELATIVE: 13.222 G_Regularizer: 0.000 validation_error: 20.416 +(epoch: 24, iters: 260704, time: 0.539, data: 0.000) G_L1: 16.207 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 13.704 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 24, iters: 262704, time: 0.543, data: 0.000) G_L1: 15.212 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 12.256 G_Regularizer: 0.000 validation_error: 20.585 +(epoch: 24, iters: 264704, time: 0.547, data: 0.000) G_L1: 14.536 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 11.965 G_Regularizer: 0.000 validation_error: 20.461 +(epoch: 24, iters: 266704, time: 0.548, data: 0.000) G_L1: 12.771 G_L1_ABSOLUTE: 2.011 G_L1_RELATIVE: 10.760 G_Regularizer: 0.000 validation_error: 20.092 +(epoch: 24, iters: 268704, time: 0.549, data: 0.000) G_L1: 15.456 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 12.632 G_Regularizer: 0.000 validation_error: 20.382 +(epoch: 24, iters: 270704, time: 0.545, data: 0.000) G_L1: 12.213 G_L1_ABSOLUTE: 2.030 G_L1_RELATIVE: 10.184 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 24, iters: 272704, time: 0.536, data: 0.000) G_L1: 13.201 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 10.487 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 24, iters: 274704, time: 0.548, data: 0.000) G_L1: 13.934 G_L1_ABSOLUTE: 2.864 G_L1_RELATIVE: 11.070 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 24, iters: 276704, time: 0.543, data: 0.000) G_L1: 13.380 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 10.631 G_Regularizer: 0.000 validation_error: 20.089 +(epoch: 24, iters: 278704, time: 0.550, data: 0.000) G_L1: 14.844 G_L1_ABSOLUTE: 2.782 G_L1_RELATIVE: 12.062 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 24, iters: 280704, time: 0.552, data: 0.000) G_L1: 15.046 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 12.683 G_Regularizer: 0.000 validation_error: 20.708 +(epoch: 24, iters: 282704, time: 0.548, data: 0.001) G_L1: 12.310 G_L1_ABSOLUTE: 2.030 G_L1_RELATIVE: 10.279 G_Regularizer: 0.000 validation_error: 20.618 +(epoch: 24, iters: 284704, time: 0.555, data: 0.000) G_L1: 15.632 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 13.292 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 24, iters: 286704, time: 0.546, data: 0.000) G_L1: 14.568 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 12.102 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 24, iters: 288704, time: 0.547, data: 0.000) G_L1: 13.261 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 10.886 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 24, iters: 290704, time: 0.548, data: 0.000) G_L1: 12.579 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 10.043 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 24, iters: 292704, time: 0.549, data: 0.000) G_L1: 16.031 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 13.034 G_Regularizer: 0.000 validation_error: 21.705 +(epoch: 24, iters: 294704, time: 0.546, data: 0.000) G_L1: 13.506 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 10.966 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 24, iters: 296704, time: 0.545, data: 0.000) G_L1: 12.504 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 10.069 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 24, iters: 298704, time: 0.547, data: 0.001) G_L1: 14.962 G_L1_ABSOLUTE: 2.579 G_L1_RELATIVE: 12.383 G_Regularizer: 0.000 validation_error: 20.574 +(epoch: 24, iters: 300704, time: 0.550, data: 0.000) G_L1: 14.775 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 12.361 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 24, iters: 302704, time: 0.547, data: 0.000) G_L1: 13.865 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.197 G_Regularizer: 0.000 validation_error: 20.438 +(epoch: 25, iters: 1952, time: 0.546, data: 0.000) G_L1: 16.715 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 14.064 G_Regularizer: 0.000 validation_error: 20.354 +(epoch: 25, iters: 3952, time: 0.549, data: 0.000) G_L1: 18.925 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 16.129 G_Regularizer: 0.000 validation_error: 20.198 +(epoch: 25, iters: 5952, time: 0.550, data: 0.000) G_L1: 14.260 G_L1_ABSOLUTE: 3.075 G_L1_RELATIVE: 11.185 G_Regularizer: 0.000 validation_error: 20.948 +(epoch: 25, iters: 7952, time: 0.546, data: 0.001) G_L1: 13.572 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 11.169 G_Regularizer: 0.000 validation_error: 20.370 +(epoch: 25, iters: 9952, time: 0.545, data: 0.000) G_L1: 17.079 G_L1_ABSOLUTE: 3.210 G_L1_RELATIVE: 13.869 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 25, iters: 11952, time: 0.550, data: 0.000) G_L1: 14.941 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 12.379 G_Regularizer: 0.000 validation_error: 20.099 +(epoch: 25, iters: 13952, time: 0.547, data: 0.000) G_L1: 12.610 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 10.104 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 25, iters: 15952, time: 0.549, data: 0.000) G_L1: 23.800 G_L1_ABSOLUTE: 2.837 G_L1_RELATIVE: 20.963 G_Regularizer: 0.000 validation_error: 20.311 +(epoch: 25, iters: 17952, time: 0.552, data: 0.000) G_L1: 13.479 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 10.939 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 25, iters: 19952, time: 0.550, data: 0.000) G_L1: 13.971 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 11.019 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 25, iters: 21952, time: 0.552, data: 0.000) G_L1: 15.496 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 12.771 G_Regularizer: 0.000 validation_error: 19.754 +(epoch: 25, iters: 23952, time: 0.544, data: 0.000) G_L1: 14.165 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 11.358 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 25, iters: 25952, time: 0.548, data: 0.000) G_L1: 16.510 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 13.964 G_Regularizer: 0.000 validation_error: 20.315 +(epoch: 25, iters: 27952, time: 0.544, data: 0.000) G_L1: 13.271 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 10.875 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 25, iters: 29952, time: 0.543, data: 0.000) G_L1: 15.635 G_L1_ABSOLUTE: 2.176 G_L1_RELATIVE: 13.459 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 25, iters: 31952, time: 0.541, data: 0.001) G_L1: 15.312 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 12.847 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 25, iters: 33952, time: 0.546, data: 0.000) G_L1: 11.945 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 9.435 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 25, iters: 35952, time: 0.551, data: 0.000) G_L1: 15.506 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 12.762 G_Regularizer: 0.000 validation_error: 20.555 +(epoch: 25, iters: 37952, time: 0.542, data: 0.000) G_L1: 12.867 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 10.525 G_Regularizer: 0.000 validation_error: 20.484 +(epoch: 25, iters: 39952, time: 0.547, data: 0.001) G_L1: 15.521 G_L1_ABSOLUTE: 2.769 G_L1_RELATIVE: 12.752 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 25, iters: 41952, time: 0.549, data: 0.000) G_L1: 13.969 G_L1_ABSOLUTE: 2.199 G_L1_RELATIVE: 11.770 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 25, iters: 43952, time: 0.543, data: 0.000) G_L1: 16.354 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 13.935 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 25, iters: 45952, time: 0.548, data: 0.000) G_L1: 16.821 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 14.143 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 25, iters: 47952, time: 0.548, data: 0.000) G_L1: 13.068 G_L1_ABSOLUTE: 2.047 G_L1_RELATIVE: 11.022 G_Regularizer: 0.000 validation_error: 20.238 +(epoch: 25, iters: 49952, time: 0.542, data: 0.000) G_L1: 14.608 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 11.884 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 25, iters: 51952, time: 0.549, data: 0.000) G_L1: 13.869 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 11.370 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 25, iters: 53952, time: 0.547, data: 0.000) G_L1: 11.907 G_L1_ABSOLUTE: 2.311 G_L1_RELATIVE: 9.596 G_Regularizer: 0.000 validation_error: 20.600 +(epoch: 25, iters: 55952, time: 0.550, data: 0.000) G_L1: 16.315 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 13.693 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 25, iters: 57952, time: 0.542, data: 0.000) G_L1: 12.949 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 10.624 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 25, iters: 59952, time: 0.542, data: 0.000) G_L1: 13.782 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 11.281 G_Regularizer: 0.000 validation_error: 20.241 +(epoch: 25, iters: 61952, time: 0.547, data: 0.000) G_L1: 15.093 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 12.636 G_Regularizer: 0.000 validation_error: 20.158 +(epoch: 25, iters: 63952, time: 0.547, data: 0.000) G_L1: 12.885 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 10.180 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 25, iters: 65952, time: 0.551, data: 0.000) G_L1: 16.276 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 13.759 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 25, iters: 67952, time: 0.555, data: 0.000) G_L1: 14.900 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 12.211 G_Regularizer: 0.000 validation_error: 20.139 +(epoch: 25, iters: 69952, time: 0.546, data: 0.000) G_L1: 19.067 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 16.610 G_Regularizer: 0.000 validation_error: 20.381 +(epoch: 25, iters: 71952, time: 0.545, data: 0.000) G_L1: 13.194 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 10.492 G_Regularizer: 0.000 validation_error: 21.068 +(epoch: 25, iters: 73952, time: 0.556, data: 0.000) G_L1: 13.975 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 11.265 G_Regularizer: 0.000 validation_error: 20.512 +(epoch: 25, iters: 75952, time: 0.548, data: 0.000) G_L1: 16.101 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 13.466 G_Regularizer: 0.000 validation_error: 20.379 +(epoch: 25, iters: 77952, time: 0.556, data: 0.000) G_L1: 14.420 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 12.098 G_Regularizer: 0.000 validation_error: 20.497 +(epoch: 25, iters: 79952, time: 0.552, data: 0.000) G_L1: 16.715 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 14.248 G_Regularizer: 0.000 validation_error: 20.640 +(epoch: 25, iters: 81952, time: 0.544, data: 0.000) G_L1: 17.056 G_L1_ABSOLUTE: 2.919 G_L1_RELATIVE: 14.137 G_Regularizer: 0.000 validation_error: 20.750 +(epoch: 25, iters: 83952, time: 0.546, data: 0.000) G_L1: 20.427 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 17.622 G_Regularizer: 0.000 validation_error: 20.378 +(epoch: 25, iters: 85952, time: 0.547, data: 0.000) G_L1: 14.589 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 11.998 G_Regularizer: 0.000 validation_error: 20.499 +(epoch: 25, iters: 87952, time: 0.546, data: 0.001) G_L1: 14.603 G_L1_ABSOLUTE: 2.438 G_L1_RELATIVE: 12.166 G_Regularizer: 0.000 validation_error: 20.041 +(epoch: 25, iters: 89952, time: 0.550, data: 0.000) G_L1: 13.411 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 11.126 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 25, iters: 91952, time: 0.554, data: 0.000) G_L1: 15.037 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 12.702 G_Regularizer: 0.000 validation_error: 19.758 +(epoch: 25, iters: 93952, time: 0.545, data: 0.000) G_L1: 15.665 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 13.006 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 25, iters: 95952, time: 0.546, data: 0.000) G_L1: 16.046 G_L1_ABSOLUTE: 2.849 G_L1_RELATIVE: 13.197 G_Regularizer: 0.000 validation_error: 21.242 +(epoch: 25, iters: 97952, time: 0.549, data: 0.000) G_L1: 12.299 G_L1_ABSOLUTE: 2.189 G_L1_RELATIVE: 10.110 G_Regularizer: 0.000 validation_error: 20.861 +(epoch: 25, iters: 99952, time: 0.546, data: 0.000) G_L1: 15.122 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 12.643 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 25, iters: 101952, time: 0.545, data: 0.000) G_L1: 12.570 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 9.990 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 25, iters: 103952, time: 0.546, data: 0.000) G_L1: 14.672 G_L1_ABSOLUTE: 2.829 G_L1_RELATIVE: 11.843 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 25, iters: 105952, time: 0.552, data: 0.000) G_L1: 13.057 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 10.659 G_Regularizer: 0.000 validation_error: 21.183 +(epoch: 25, iters: 107952, time: 0.544, data: 0.000) G_L1: 13.316 G_L1_ABSOLUTE: 1.974 G_L1_RELATIVE: 11.342 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 25, iters: 109952, time: 0.543, data: 0.000) G_L1: 14.259 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 11.945 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 25, iters: 111952, time: 0.541, data: 0.000) G_L1: 14.341 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 11.665 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 25, iters: 113952, time: 0.546, data: 0.000) G_L1: 14.547 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 12.080 G_Regularizer: 0.000 validation_error: 21.192 +(epoch: 25, iters: 115952, time: 0.543, data: 0.000) G_L1: 15.822 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 13.333 G_Regularizer: 0.000 validation_error: 20.771 +(epoch: 25, iters: 117952, time: 0.542, data: 0.000) G_L1: 12.656 G_L1_ABSOLUTE: 2.102 G_L1_RELATIVE: 10.554 G_Regularizer: 0.000 validation_error: 20.859 +(epoch: 25, iters: 119952, time: 0.538, data: 0.000) G_L1: 15.247 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 12.989 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 25, iters: 121952, time: 0.548, data: 0.000) G_L1: 12.808 G_L1_ABSOLUTE: 2.179 G_L1_RELATIVE: 10.629 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 25, iters: 123952, time: 0.545, data: 0.000) G_L1: 11.810 G_L1_ABSOLUTE: 2.166 G_L1_RELATIVE: 9.644 G_Regularizer: 0.000 validation_error: 19.865 +(epoch: 25, iters: 125952, time: 0.548, data: 0.000) G_L1: 16.592 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 14.179 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 25, iters: 127952, time: 0.551, data: 0.001) G_L1: 15.898 G_L1_ABSOLUTE: 3.202 G_L1_RELATIVE: 12.696 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 25, iters: 129952, time: 0.542, data: 0.000) G_L1: 14.299 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 11.463 G_Regularizer: 0.000 validation_error: 20.181 +(epoch: 25, iters: 131952, time: 0.537, data: 0.000) G_L1: 13.003 G_L1_ABSOLUTE: 2.134 G_L1_RELATIVE: 10.870 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 25, iters: 133952, time: 0.546, data: 0.000) G_L1: 12.881 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 10.491 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 25, iters: 135952, time: 0.544, data: 0.000) G_L1: 17.695 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 15.064 G_Regularizer: 0.000 validation_error: 20.486 +(epoch: 25, iters: 137952, time: 0.549, data: 0.000) G_L1: 15.002 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 12.457 G_Regularizer: 0.000 validation_error: 20.314 +(epoch: 25, iters: 139952, time: 0.545, data: 0.000) G_L1: 12.675 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 10.255 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 25, iters: 141952, time: 0.556, data: 0.000) G_L1: 17.942 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 15.141 G_Regularizer: 0.000 validation_error: 20.489 +(epoch: 25, iters: 143952, time: 0.540, data: 0.000) G_L1: 16.373 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 13.636 G_Regularizer: 0.000 validation_error: 20.541 +(epoch: 25, iters: 145952, time: 0.539, data: 0.000) G_L1: 11.442 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 8.932 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 25, iters: 147952, time: 0.547, data: 0.001) G_L1: 13.761 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 11.161 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 25, iters: 149952, time: 0.550, data: 0.000) G_L1: 14.855 G_L1_ABSOLUTE: 2.846 G_L1_RELATIVE: 12.009 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 25, iters: 151952, time: 0.551, data: 0.000) G_L1: 16.074 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 13.553 G_Regularizer: 0.000 validation_error: 20.418 +(epoch: 25, iters: 153952, time: 0.556, data: 0.000) G_L1: 14.377 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 11.802 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 25, iters: 155952, time: 0.546, data: 0.000) G_L1: 12.834 G_L1_ABSOLUTE: 2.059 G_L1_RELATIVE: 10.775 G_Regularizer: 0.000 validation_error: 20.009 +(epoch: 25, iters: 157952, time: 0.548, data: 0.000) G_L1: 15.957 G_L1_ABSOLUTE: 2.883 G_L1_RELATIVE: 13.074 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 25, iters: 159952, time: 0.546, data: 0.001) G_L1: 14.605 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 12.329 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 25, iters: 161952, time: 0.545, data: 0.000) G_L1: 15.289 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 12.353 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 25, iters: 163952, time: 0.550, data: 0.000) G_L1: 13.254 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 10.750 G_Regularizer: 0.000 validation_error: 20.414 +(epoch: 25, iters: 165952, time: 0.544, data: 0.000) G_L1: 15.712 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 12.982 G_Regularizer: 0.000 validation_error: 20.207 +(epoch: 25, iters: 167952, time: 0.545, data: 0.000) G_L1: 16.877 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 13.960 G_Regularizer: 0.000 validation_error: 21.030 +(epoch: 25, iters: 169952, time: 0.542, data: 0.000) G_L1: 15.824 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 13.295 G_Regularizer: 0.000 validation_error: 21.383 +(epoch: 25, iters: 171952, time: 0.548, data: 0.000) G_L1: 13.776 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.355 G_Regularizer: 0.000 validation_error: 21.416 +(epoch: 25, iters: 173952, time: 0.546, data: 0.000) G_L1: 13.602 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 11.041 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 25, iters: 175952, time: 0.546, data: 0.000) G_L1: 15.279 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 12.877 G_Regularizer: 0.000 validation_error: 21.287 +(epoch: 25, iters: 177952, time: 0.550, data: 0.000) G_L1: 16.209 G_L1_ABSOLUTE: 2.965 G_L1_RELATIVE: 13.244 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 25, iters: 179952, time: 0.539, data: 0.000) G_L1: 14.468 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 12.095 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 25, iters: 181952, time: 0.539, data: 0.000) G_L1: 14.712 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 11.877 G_Regularizer: 0.000 validation_error: 20.561 +(epoch: 25, iters: 183952, time: 0.542, data: 0.000) G_L1: 13.878 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 11.255 G_Regularizer: 0.000 validation_error: 20.940 +(epoch: 25, iters: 185952, time: 0.548, data: 0.000) G_L1: 14.138 G_L1_ABSOLUTE: 2.146 G_L1_RELATIVE: 11.991 G_Regularizer: 0.000 validation_error: 20.104 +(epoch: 25, iters: 187952, time: 0.545, data: 0.000) G_L1: 15.302 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 12.980 G_Regularizer: 0.000 validation_error: 20.142 +(epoch: 25, iters: 189952, time: 0.553, data: 0.001) G_L1: 14.616 G_L1_ABSOLUTE: 2.639 G_L1_RELATIVE: 11.977 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 25, iters: 191952, time: 0.545, data: 0.000) G_L1: 16.370 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 13.831 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 25, iters: 193952, time: 0.544, data: 0.000) G_L1: 12.403 G_L1_ABSOLUTE: 2.183 G_L1_RELATIVE: 10.220 G_Regularizer: 0.000 validation_error: 20.006 +(epoch: 25, iters: 195952, time: 0.544, data: 0.000) G_L1: 12.550 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 10.265 G_Regularizer: 0.000 validation_error: 20.427 +(epoch: 25, iters: 197952, time: 0.541, data: 0.000) G_L1: 15.400 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 12.810 G_Regularizer: 0.000 validation_error: 20.943 +(epoch: 25, iters: 199952, time: 0.548, data: 0.000) G_L1: 13.333 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 10.647 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 25, iters: 201952, time: 0.545, data: 0.000) G_L1: 12.835 G_L1_ABSOLUTE: 2.763 G_L1_RELATIVE: 10.072 G_Regularizer: 0.000 validation_error: 21.254 +(epoch: 25, iters: 203952, time: 0.543, data: 0.000) G_L1: 15.029 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 12.206 G_Regularizer: 0.000 validation_error: 20.956 +(epoch: 25, iters: 205952, time: 0.546, data: 0.000) G_L1: 15.837 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 13.356 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 25, iters: 207952, time: 0.549, data: 0.000) G_L1: 19.813 G_L1_ABSOLUTE: 2.868 G_L1_RELATIVE: 16.946 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 25, iters: 209952, time: 0.545, data: 0.000) G_L1: 15.073 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 12.178 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 25, iters: 211952, time: 0.553, data: 0.000) G_L1: 16.697 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 13.921 G_Regularizer: 0.000 validation_error: 21.111 +(epoch: 25, iters: 213952, time: 0.553, data: 0.000) G_L1: 12.403 G_L1_ABSOLUTE: 2.352 G_L1_RELATIVE: 10.051 G_Regularizer: 0.000 validation_error: 20.069 +(epoch: 25, iters: 215952, time: 0.550, data: 0.000) G_L1: 13.453 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 10.994 G_Regularizer: 0.000 validation_error: 21.216 +(epoch: 25, iters: 217952, time: 0.546, data: 0.000) G_L1: 13.543 G_L1_ABSOLUTE: 2.566 G_L1_RELATIVE: 10.977 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 25, iters: 219952, time: 0.541, data: 0.000) G_L1: 14.260 G_L1_ABSOLUTE: 2.128 G_L1_RELATIVE: 12.132 G_Regularizer: 0.000 validation_error: 21.248 +(epoch: 25, iters: 221952, time: 0.543, data: 0.000) G_L1: 15.988 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 13.375 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 25, iters: 223952, time: 0.539, data: 0.000) G_L1: 13.488 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 10.863 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 25, iters: 225952, time: 0.548, data: 0.000) G_L1: 14.501 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 11.723 G_Regularizer: 0.000 validation_error: 20.549 +(epoch: 25, iters: 227952, time: 0.542, data: 0.000) G_L1: 14.266 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 11.567 G_Regularizer: 0.000 validation_error: 20.583 +(epoch: 25, iters: 229952, time: 0.544, data: 0.000) G_L1: 15.227 G_L1_ABSOLUTE: 2.186 G_L1_RELATIVE: 13.041 G_Regularizer: 0.000 validation_error: 20.238 +(epoch: 25, iters: 231952, time: 0.550, data: 0.000) G_L1: 14.301 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 11.608 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 25, iters: 233952, time: 0.537, data: 0.000) G_L1: 19.735 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 16.987 G_Regularizer: 0.000 validation_error: 21.341 +(epoch: 25, iters: 235952, time: 0.542, data: 0.000) G_L1: 12.368 G_L1_ABSOLUTE: 2.306 G_L1_RELATIVE: 10.062 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 25, iters: 237952, time: 0.548, data: 0.001) G_L1: 15.184 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 12.589 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 25, iters: 239952, time: 0.541, data: 0.000) G_L1: 14.809 G_L1_ABSOLUTE: 2.765 G_L1_RELATIVE: 12.044 G_Regularizer: 0.000 validation_error: 21.258 +(epoch: 25, iters: 241952, time: 0.539, data: 0.000) G_L1: 13.245 G_L1_ABSOLUTE: 2.532 G_L1_RELATIVE: 10.713 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 25, iters: 243952, time: 0.548, data: 0.000) G_L1: 14.554 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 11.893 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 25, iters: 245952, time: 0.548, data: 0.000) G_L1: 12.945 G_L1_ABSOLUTE: 2.155 G_L1_RELATIVE: 10.790 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 25, iters: 247952, time: 0.541, data: 0.000) G_L1: 10.626 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 8.344 G_Regularizer: 0.000 validation_error: 20.536 +(epoch: 25, iters: 249952, time: 0.548, data: 0.000) G_L1: 13.284 G_L1_ABSOLUTE: 2.019 G_L1_RELATIVE: 11.265 G_Regularizer: 0.000 validation_error: 20.339 +(epoch: 25, iters: 251952, time: 0.540, data: 0.000) G_L1: 14.619 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 11.814 G_Regularizer: 0.000 validation_error: 21.218 +(epoch: 25, iters: 253952, time: 0.548, data: 0.000) G_L1: 11.994 G_L1_ABSOLUTE: 2.052 G_L1_RELATIVE: 9.942 G_Regularizer: 0.000 validation_error: 20.401 +(epoch: 25, iters: 255952, time: 0.545, data: 0.000) G_L1: 15.365 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 12.643 G_Regularizer: 0.000 validation_error: 20.283 +(epoch: 25, iters: 257952, time: 0.537, data: 0.000) G_L1: 14.425 G_L1_ABSOLUTE: 3.009 G_L1_RELATIVE: 11.416 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 25, iters: 259952, time: 0.546, data: 0.000) G_L1: 13.229 G_L1_ABSOLUTE: 2.098 G_L1_RELATIVE: 11.131 G_Regularizer: 0.000 validation_error: 20.186 +(epoch: 25, iters: 261952, time: 0.543, data: 0.000) G_L1: 13.845 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 11.364 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 25, iters: 263952, time: 0.547, data: 0.000) G_L1: 14.886 G_L1_ABSOLUTE: 2.498 G_L1_RELATIVE: 12.388 G_Regularizer: 0.000 validation_error: 20.093 +(epoch: 25, iters: 265952, time: 0.549, data: 0.000) G_L1: 14.481 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 12.078 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 25, iters: 267952, time: 0.537, data: 0.000) G_L1: 16.035 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 13.340 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 25, iters: 269952, time: 0.537, data: 0.000) G_L1: 17.378 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 14.539 G_Regularizer: 0.000 validation_error: 19.787 +(epoch: 25, iters: 271952, time: 0.545, data: 0.000) G_L1: 14.124 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 11.658 G_Regularizer: 0.000 validation_error: 20.638 +(epoch: 25, iters: 273952, time: 0.557, data: 0.000) G_L1: 13.374 G_L1_ABSOLUTE: 2.975 G_L1_RELATIVE: 10.399 G_Regularizer: 0.000 validation_error: 21.318 +(epoch: 25, iters: 275952, time: 0.550, data: 0.000) G_L1: 12.762 G_L1_ABSOLUTE: 2.244 G_L1_RELATIVE: 10.518 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 25, iters: 277952, time: 0.547, data: 0.000) G_L1: 16.992 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 14.367 G_Regularizer: 0.000 validation_error: 20.382 +(epoch: 25, iters: 279952, time: 0.545, data: 0.000) G_L1: 13.341 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.835 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 25, iters: 281952, time: 0.543, data: 0.000) G_L1: 14.834 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 12.366 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 25, iters: 283952, time: 0.543, data: 0.000) G_L1: 17.367 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 14.876 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 25, iters: 285952, time: 0.546, data: 0.000) G_L1: 17.617 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 15.071 G_Regularizer: 0.000 validation_error: 20.355 +(epoch: 25, iters: 287952, time: 0.550, data: 0.000) G_L1: 14.524 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 11.950 G_Regularizer: 0.000 validation_error: 21.399 +(epoch: 25, iters: 289952, time: 0.538, data: 0.000) G_L1: 16.944 G_L1_ABSOLUTE: 2.900 G_L1_RELATIVE: 14.044 G_Regularizer: 0.000 validation_error: 21.237 +(epoch: 25, iters: 291952, time: 0.551, data: 0.000) G_L1: 13.965 G_L1_ABSOLUTE: 2.981 G_L1_RELATIVE: 10.984 G_Regularizer: 0.000 validation_error: 20.345 +(epoch: 25, iters: 293952, time: 0.546, data: 0.000) G_L1: 16.071 G_L1_ABSOLUTE: 2.879 G_L1_RELATIVE: 13.193 G_Regularizer: 0.000 validation_error: 21.338 +(epoch: 25, iters: 295952, time: 0.538, data: 0.000) G_L1: 14.207 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 11.302 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 25, iters: 297952, time: 0.550, data: 0.000) G_L1: 14.575 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 12.241 G_Regularizer: 0.000 validation_error: 21.365 +(epoch: 25, iters: 299952, time: 0.542, data: 0.000) G_L1: 15.937 G_L1_ABSOLUTE: 3.362 G_L1_RELATIVE: 12.575 G_Regularizer: 0.000 validation_error: 20.360 +(epoch: 25, iters: 301952, time: 0.541, data: 0.000) G_L1: 11.934 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 9.718 G_Regularizer: 0.000 validation_error: 20.271 +(epoch: 26, iters: 1200, time: 0.547, data: 0.001) G_L1: 17.731 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 15.272 G_Regularizer: 0.000 validation_error: 20.519 +(epoch: 26, iters: 3200, time: 0.543, data: 0.000) G_L1: 13.515 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 10.728 G_Regularizer: 0.000 validation_error: 20.033 +(epoch: 26, iters: 5200, time: 0.551, data: 0.000) G_L1: 15.812 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 13.369 G_Regularizer: 0.000 validation_error: 20.323 +(epoch: 26, iters: 7200, time: 0.546, data: 0.000) G_L1: 11.655 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 8.928 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 26, iters: 9200, time: 0.546, data: 0.000) G_L1: 12.563 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 10.279 G_Regularizer: 0.000 validation_error: 20.309 +(epoch: 26, iters: 11200, time: 0.542, data: 0.000) G_L1: 14.576 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 12.197 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 26, iters: 13200, time: 0.545, data: 0.000) G_L1: 14.302 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 11.641 G_Regularizer: 0.000 validation_error: 20.191 +(epoch: 26, iters: 15200, time: 0.541, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 11.747 G_Regularizer: 0.000 validation_error: 20.184 +(epoch: 26, iters: 17200, time: 0.540, data: 0.000) G_L1: 13.829 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.245 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 26, iters: 19200, time: 0.548, data: 0.000) G_L1: 16.028 G_L1_ABSOLUTE: 3.242 G_L1_RELATIVE: 12.785 G_Regularizer: 0.000 validation_error: 20.394 +(epoch: 26, iters: 21200, time: 0.544, data: 0.000) G_L1: 11.484 G_L1_ABSOLUTE: 2.245 G_L1_RELATIVE: 9.239 G_Regularizer: 0.000 validation_error: 20.894 +(epoch: 26, iters: 23200, time: 0.552, data: 0.000) G_L1: 11.320 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 8.698 G_Regularizer: 0.000 validation_error: 21.237 +(epoch: 26, iters: 25200, time: 0.541, data: 0.000) G_L1: 15.500 G_L1_ABSOLUTE: 2.915 G_L1_RELATIVE: 12.586 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 26, iters: 27200, time: 0.541, data: 0.000) G_L1: 13.222 G_L1_ABSOLUTE: 2.308 G_L1_RELATIVE: 10.914 G_Regularizer: 0.000 validation_error: 21.151 +(epoch: 26, iters: 29200, time: 0.546, data: 0.000) G_L1: 14.034 G_L1_ABSOLUTE: 2.018 G_L1_RELATIVE: 12.016 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 26, iters: 31200, time: 0.555, data: 0.000) G_L1: 15.462 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 12.912 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 26, iters: 33200, time: 0.541, data: 0.000) G_L1: 16.336 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 13.502 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 26, iters: 35200, time: 0.546, data: 0.000) G_L1: 15.006 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 12.250 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 26, iters: 37200, time: 0.551, data: 0.000) G_L1: 15.101 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 12.773 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 26, iters: 39200, time: 0.550, data: 0.000) G_L1: 12.957 G_L1_ABSOLUTE: 2.266 G_L1_RELATIVE: 10.691 G_Regularizer: 0.000 validation_error: 19.895 +(epoch: 26, iters: 41200, time: 0.545, data: 0.000) G_L1: 14.701 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 12.176 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 26, iters: 43200, time: 0.540, data: 0.000) G_L1: 18.566 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 15.902 G_Regularizer: 0.000 validation_error: 21.293 +(epoch: 26, iters: 45200, time: 0.547, data: 0.000) G_L1: 12.798 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 10.506 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 26, iters: 47200, time: 0.538, data: 0.000) G_L1: 14.607 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 12.126 G_Regularizer: 0.000 validation_error: 21.059 +(epoch: 26, iters: 49200, time: 0.550, data: 0.000) G_L1: 15.208 G_L1_ABSOLUTE: 2.167 G_L1_RELATIVE: 13.041 G_Regularizer: 0.000 validation_error: 21.204 +(epoch: 26, iters: 51200, time: 0.542, data: 0.000) G_L1: 16.370 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 13.996 G_Regularizer: 0.000 validation_error: 20.775 +(epoch: 26, iters: 53200, time: 0.550, data: 0.000) G_L1: 15.528 G_L1_ABSOLUTE: 2.841 G_L1_RELATIVE: 12.687 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 26, iters: 55200, time: 0.555, data: 0.000) G_L1: 14.653 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 12.357 G_Regularizer: 0.000 validation_error: 20.162 +(epoch: 26, iters: 57200, time: 0.547, data: 0.000) G_L1: 15.623 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 13.193 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 26, iters: 59200, time: 0.544, data: 0.000) G_L1: 12.558 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 10.271 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 26, iters: 61200, time: 0.540, data: 0.000) G_L1: 15.960 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 13.469 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 26, iters: 63200, time: 0.547, data: 0.000) G_L1: 15.280 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 13.026 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 26, iters: 65200, time: 0.540, data: 0.000) G_L1: 12.284 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 9.655 G_Regularizer: 0.000 validation_error: 20.516 +(epoch: 26, iters: 67200, time: 0.540, data: 0.000) G_L1: 13.738 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 11.461 G_Regularizer: 0.000 validation_error: 21.355 +(epoch: 26, iters: 69200, time: 0.545, data: 0.000) G_L1: 14.205 G_L1_ABSOLUTE: 2.992 G_L1_RELATIVE: 11.213 G_Regularizer: 0.000 validation_error: 20.600 +(epoch: 26, iters: 71200, time: 0.550, data: 0.000) G_L1: 14.195 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 11.706 G_Regularizer: 0.000 validation_error: 20.331 +(epoch: 26, iters: 73200, time: 0.542, data: 0.000) G_L1: 15.752 G_L1_ABSOLUTE: 2.697 G_L1_RELATIVE: 13.055 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 26, iters: 75200, time: 0.556, data: 0.000) G_L1: 16.121 G_L1_ABSOLUTE: 2.603 G_L1_RELATIVE: 13.518 G_Regularizer: 0.000 validation_error: 20.241 +(epoch: 26, iters: 77200, time: 0.538, data: 0.000) G_L1: 14.030 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 11.256 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 26, iters: 79200, time: 0.540, data: 0.000) G_L1: 16.021 G_L1_ABSOLUTE: 2.665 G_L1_RELATIVE: 13.357 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 26, iters: 81200, time: 0.551, data: 0.000) G_L1: 14.119 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 11.440 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 26, iters: 83200, time: 0.550, data: 0.000) G_L1: 15.805 G_L1_ABSOLUTE: 2.720 G_L1_RELATIVE: 13.086 G_Regularizer: 0.000 validation_error: 20.421 +(epoch: 26, iters: 85200, time: 0.546, data: 0.000) G_L1: 11.093 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 8.537 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 26, iters: 87200, time: 0.542, data: 0.000) G_L1: 15.693 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 13.224 G_Regularizer: 0.000 validation_error: 20.338 +(epoch: 26, iters: 89200, time: 0.548, data: 0.000) G_L1: 17.452 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 14.742 G_Regularizer: 0.000 validation_error: 20.403 +(epoch: 26, iters: 91200, time: 0.550, data: 0.000) G_L1: 15.264 G_L1_ABSOLUTE: 2.215 G_L1_RELATIVE: 13.049 G_Regularizer: 0.000 validation_error: 20.443 +(epoch: 26, iters: 93200, time: 0.543, data: 0.000) G_L1: 14.834 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 12.327 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 26, iters: 95200, time: 0.549, data: 0.001) G_L1: 14.425 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 11.725 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 26, iters: 97200, time: 0.543, data: 0.000) G_L1: 16.007 G_L1_ABSOLUTE: 2.644 G_L1_RELATIVE: 13.363 G_Regularizer: 0.000 validation_error: 20.438 +(epoch: 26, iters: 99200, time: 0.546, data: 0.000) G_L1: 14.259 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 11.978 G_Regularizer: 0.000 validation_error: 20.168 +(epoch: 26, iters: 101200, time: 0.555, data: 0.001) G_L1: 14.692 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 12.290 G_Regularizer: 0.000 validation_error: 20.213 +(epoch: 26, iters: 103200, time: 0.544, data: 0.000) G_L1: 17.673 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 15.119 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 26, iters: 105200, time: 0.545, data: 0.000) G_L1: 14.042 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 11.585 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 26, iters: 107200, time: 0.544, data: 0.000) G_L1: 13.302 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 10.558 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 26, iters: 109200, time: 0.551, data: 0.000) G_L1: 14.729 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 12.307 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 26, iters: 111200, time: 0.544, data: 0.000) G_L1: 15.444 G_L1_ABSOLUTE: 2.850 G_L1_RELATIVE: 12.594 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 26, iters: 113200, time: 0.547, data: 0.000) G_L1: 14.586 G_L1_ABSOLUTE: 3.393 G_L1_RELATIVE: 11.193 G_Regularizer: 0.000 validation_error: 20.563 +(epoch: 26, iters: 115200, time: 0.556, data: 0.000) G_L1: 14.339 G_L1_ABSOLUTE: 2.791 G_L1_RELATIVE: 11.549 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 26, iters: 117200, time: 0.539, data: 0.000) G_L1: 12.470 G_L1_ABSOLUTE: 2.096 G_L1_RELATIVE: 10.374 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 26, iters: 119200, time: 0.539, data: 0.000) G_L1: 28.228 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 25.394 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 26, iters: 121200, time: 0.546, data: 0.000) G_L1: 14.364 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 11.664 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 26, iters: 123200, time: 0.553, data: 0.000) G_L1: 13.052 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 10.559 G_Regularizer: 0.000 validation_error: 20.460 +(epoch: 26, iters: 125200, time: 0.548, data: 0.000) G_L1: 15.790 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 12.975 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 26, iters: 127200, time: 0.541, data: 0.000) G_L1: 26.302 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 23.527 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 26, iters: 129200, time: 0.542, data: 0.000) G_L1: 15.641 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 13.029 G_Regularizer: 0.000 validation_error: 21.323 +(epoch: 26, iters: 131200, time: 0.545, data: 0.000) G_L1: 15.445 G_L1_ABSOLUTE: 2.667 G_L1_RELATIVE: 12.778 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 26, iters: 133200, time: 0.541, data: 0.000) G_L1: 15.923 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 13.166 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 26, iters: 135200, time: 0.543, data: 0.000) G_L1: 13.783 G_L1_ABSOLUTE: 2.726 G_L1_RELATIVE: 11.057 G_Regularizer: 0.000 validation_error: 20.563 +(epoch: 26, iters: 137200, time: 0.543, data: 0.000) G_L1: 12.564 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 10.005 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 26, iters: 139200, time: 0.546, data: 0.000) G_L1: 14.751 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 11.934 G_Regularizer: 0.000 validation_error: 21.395 +(epoch: 26, iters: 141200, time: 0.563, data: 0.001) G_L1: 16.133 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 13.417 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 26, iters: 143200, time: 0.548, data: 0.000) G_L1: 12.502 G_L1_ABSOLUTE: 2.246 G_L1_RELATIVE: 10.256 G_Regularizer: 0.000 validation_error: 20.304 +(epoch: 26, iters: 145200, time: 0.546, data: 0.000) G_L1: 15.244 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 12.565 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 26, iters: 147200, time: 0.543, data: 0.000) G_L1: 16.972 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 14.494 G_Regularizer: 0.000 validation_error: 21.227 +(epoch: 26, iters: 149200, time: 0.543, data: 0.000) G_L1: 15.682 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 12.923 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 26, iters: 151200, time: 0.546, data: 0.000) G_L1: 14.424 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 11.886 G_Regularizer: 0.000 validation_error: 20.537 +(epoch: 26, iters: 153200, time: 0.546, data: 0.000) G_L1: 14.488 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 11.992 G_Regularizer: 0.000 validation_error: 20.279 +(epoch: 26, iters: 155200, time: 0.541, data: 0.000) G_L1: 13.382 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 10.928 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 26, iters: 157200, time: 0.551, data: 0.000) G_L1: 13.709 G_L1_ABSOLUTE: 2.100 G_L1_RELATIVE: 11.609 G_Regularizer: 0.000 validation_error: 21.339 +(epoch: 26, iters: 159200, time: 0.544, data: 0.000) G_L1: 14.290 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 11.793 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 26, iters: 161200, time: 0.546, data: 0.000) G_L1: 13.713 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 11.474 G_Regularizer: 0.000 validation_error: 20.503 +(epoch: 26, iters: 163200, time: 0.543, data: 0.000) G_L1: 15.597 G_L1_ABSOLUTE: 2.141 G_L1_RELATIVE: 13.455 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 26, iters: 165200, time: 0.540, data: 0.000) G_L1: 15.178 G_L1_ABSOLUTE: 2.988 G_L1_RELATIVE: 12.190 G_Regularizer: 0.000 validation_error: 21.046 +(epoch: 26, iters: 167200, time: 0.544, data: 0.000) G_L1: 16.894 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 14.115 G_Regularizer: 0.000 validation_error: 20.355 +(epoch: 26, iters: 169200, time: 0.542, data: 0.000) G_L1: 13.856 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 11.280 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 26, iters: 171200, time: 0.554, data: 0.000) G_L1: 14.541 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 12.090 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 26, iters: 173200, time: 0.540, data: 0.000) G_L1: 13.012 G_L1_ABSOLUTE: 2.233 G_L1_RELATIVE: 10.779 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 26, iters: 175200, time: 0.550, data: 0.000) G_L1: 11.372 G_L1_ABSOLUTE: 2.093 G_L1_RELATIVE: 9.279 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 26, iters: 177200, time: 0.550, data: 0.000) G_L1: 13.548 G_L1_ABSOLUTE: 2.230 G_L1_RELATIVE: 11.318 G_Regularizer: 0.000 validation_error: 20.579 +(epoch: 26, iters: 179200, time: 0.545, data: 0.000) G_L1: 16.508 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 14.091 G_Regularizer: 0.000 validation_error: 20.758 +(epoch: 26, iters: 181200, time: 0.545, data: 0.000) G_L1: 13.808 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 11.273 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 26, iters: 183200, time: 0.548, data: 0.000) G_L1: 16.096 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 13.520 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 26, iters: 185200, time: 0.545, data: 0.001) G_L1: 16.210 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 13.726 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 26, iters: 187200, time: 0.543, data: 0.001) G_L1: 15.265 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 12.723 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 26, iters: 189200, time: 0.548, data: 0.000) G_L1: 16.285 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 13.332 G_Regularizer: 0.000 validation_error: 21.345 +(epoch: 26, iters: 191200, time: 0.554, data: 0.000) G_L1: 17.032 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 14.323 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 26, iters: 193200, time: 0.545, data: 0.000) G_L1: 15.819 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 13.342 G_Regularizer: 0.000 validation_error: 21.332 +(epoch: 26, iters: 195200, time: 0.542, data: 0.000) G_L1: 14.127 G_L1_ABSOLUTE: 2.973 G_L1_RELATIVE: 11.154 G_Regularizer: 0.000 validation_error: 20.365 +(epoch: 26, iters: 197200, time: 0.545, data: 0.000) G_L1: 15.976 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 13.419 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 26, iters: 199200, time: 0.540, data: 0.000) G_L1: 15.561 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 13.237 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 26, iters: 201200, time: 0.557, data: 0.000) G_L1: 15.706 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 13.142 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 26, iters: 203200, time: 0.546, data: 0.000) G_L1: 15.360 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 13.021 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 26, iters: 205200, time: 0.545, data: 0.000) G_L1: 14.373 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 11.625 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 26, iters: 207200, time: 0.536, data: 0.000) G_L1: 13.921 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 11.495 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 26, iters: 209200, time: 0.547, data: 0.000) G_L1: 12.059 G_L1_ABSOLUTE: 2.074 G_L1_RELATIVE: 9.985 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 26, iters: 211200, time: 0.544, data: 0.000) G_L1: 16.363 G_L1_ABSOLUTE: 2.923 G_L1_RELATIVE: 13.440 G_Regularizer: 0.000 validation_error: 20.607 +(epoch: 26, iters: 213200, time: 0.545, data: 0.000) G_L1: 15.442 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 12.629 G_Regularizer: 0.000 validation_error: 21.784 +(epoch: 26, iters: 215200, time: 0.544, data: 0.000) G_L1: 16.341 G_L1_ABSOLUTE: 2.946 G_L1_RELATIVE: 13.395 G_Regularizer: 0.000 validation_error: 20.121 +(epoch: 26, iters: 217200, time: 0.542, data: 0.000) G_L1: 16.771 G_L1_ABSOLUTE: 2.759 G_L1_RELATIVE: 14.012 G_Regularizer: 0.000 validation_error: 20.322 +(epoch: 26, iters: 219200, time: 0.549, data: 0.000) G_L1: 15.739 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 12.947 G_Regularizer: 0.000 validation_error: 20.436 +(epoch: 26, iters: 221200, time: 0.539, data: 0.000) G_L1: 13.654 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 11.255 G_Regularizer: 0.000 validation_error: 21.284 +(epoch: 26, iters: 223200, time: 0.543, data: 0.001) G_L1: 14.085 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 11.567 G_Regularizer: 0.000 validation_error: 21.489 +(epoch: 26, iters: 225200, time: 0.547, data: 0.000) G_L1: 15.109 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 12.605 G_Regularizer: 0.000 validation_error: 21.141 +(epoch: 26, iters: 227200, time: 0.544, data: 0.000) G_L1: 14.358 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 12.060 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 26, iters: 229200, time: 0.542, data: 0.000) G_L1: 13.022 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 10.246 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 26, iters: 231200, time: 0.546, data: 0.000) G_L1: 14.675 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 11.671 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 26, iters: 233200, time: 0.549, data: 0.001) G_L1: 12.705 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.200 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 26, iters: 235200, time: 0.609, data: 0.000) G_L1: 11.844 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 9.418 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 26, iters: 237200, time: 0.532, data: 0.000) G_L1: 16.622 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 13.973 G_Regularizer: 0.000 validation_error: 20.418 +(epoch: 26, iters: 239200, time: 0.540, data: 0.000) G_L1: 14.724 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 12.129 G_Regularizer: 0.000 validation_error: 21.137 +(epoch: 26, iters: 241200, time: 0.545, data: 0.000) G_L1: 14.158 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 11.661 G_Regularizer: 0.000 validation_error: 21.256 +(epoch: 26, iters: 243200, time: 0.548, data: 0.000) G_L1: 13.462 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.027 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 26, iters: 245200, time: 0.542, data: 0.000) G_L1: 11.069 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 8.731 G_Regularizer: 0.000 validation_error: 20.263 +(epoch: 26, iters: 247200, time: 0.547, data: 0.000) G_L1: 15.064 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 12.677 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 26, iters: 249200, time: 0.541, data: 0.000) G_L1: 15.840 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 13.310 G_Regularizer: 0.000 validation_error: 21.022 +(epoch: 26, iters: 251200, time: 0.545, data: 0.000) G_L1: 12.541 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 9.977 G_Regularizer: 0.000 validation_error: 20.947 +(epoch: 26, iters: 253200, time: 0.544, data: 0.000) G_L1: 16.257 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 13.556 G_Regularizer: 0.000 validation_error: 20.731 +(epoch: 26, iters: 255200, time: 0.545, data: 0.000) G_L1: 15.291 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 12.822 G_Regularizer: 0.000 validation_error: 20.431 +(epoch: 26, iters: 257200, time: 0.535, data: 0.000) G_L1: 15.446 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 12.852 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 26, iters: 259200, time: 0.541, data: 0.000) G_L1: 14.829 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 12.365 G_Regularizer: 0.000 validation_error: 20.654 +(epoch: 26, iters: 261200, time: 0.549, data: 0.000) G_L1: 13.339 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 10.853 G_Regularizer: 0.000 validation_error: 20.519 +(epoch: 26, iters: 263200, time: 0.544, data: 0.000) G_L1: 14.754 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 12.400 G_Regularizer: 0.000 validation_error: 20.775 +(epoch: 26, iters: 265200, time: 0.549, data: 0.000) G_L1: 14.671 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 12.051 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 26, iters: 267200, time: 0.548, data: 0.000) G_L1: 12.848 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 10.479 G_Regularizer: 0.000 validation_error: 20.415 +(epoch: 26, iters: 269200, time: 0.549, data: 0.000) G_L1: 16.247 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 13.391 G_Regularizer: 0.000 validation_error: 20.542 +(epoch: 26, iters: 271200, time: 0.538, data: 0.000) G_L1: 12.614 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 10.186 G_Regularizer: 0.000 validation_error: 20.286 +(epoch: 26, iters: 273200, time: 0.551, data: 0.000) G_L1: 14.505 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 11.868 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 26, iters: 275200, time: 0.544, data: 0.000) G_L1: 14.805 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 12.170 G_Regularizer: 0.000 validation_error: 20.176 +(epoch: 26, iters: 277200, time: 0.546, data: 0.000) G_L1: 16.320 G_L1_ABSOLUTE: 2.851 G_L1_RELATIVE: 13.469 G_Regularizer: 0.000 validation_error: 20.379 +(epoch: 26, iters: 279200, time: 0.543, data: 0.000) G_L1: 15.443 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 13.025 G_Regularizer: 0.000 validation_error: 20.417 +(epoch: 26, iters: 281200, time: 0.548, data: 0.000) G_L1: 16.069 G_L1_ABSOLUTE: 2.728 G_L1_RELATIVE: 13.340 G_Regularizer: 0.000 validation_error: 21.047 +(epoch: 26, iters: 283200, time: 0.542, data: 0.001) G_L1: 13.604 G_L1_ABSOLUTE: 2.060 G_L1_RELATIVE: 11.544 G_Regularizer: 0.000 validation_error: 20.666 +(epoch: 26, iters: 285200, time: 0.551, data: 0.000) G_L1: 15.449 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 13.106 G_Regularizer: 0.000 validation_error: 20.523 +(epoch: 26, iters: 287200, time: 0.555, data: 0.000) G_L1: 13.753 G_L1_ABSOLUTE: 2.181 G_L1_RELATIVE: 11.572 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 26, iters: 289200, time: 0.539, data: 0.000) G_L1: 14.601 G_L1_ABSOLUTE: 2.907 G_L1_RELATIVE: 11.694 G_Regularizer: 0.000 validation_error: 20.216 +(epoch: 26, iters: 291200, time: 0.544, data: 0.000) G_L1: 14.594 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 12.130 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 26, iters: 293200, time: 0.543, data: 0.000) G_L1: 14.140 G_L1_ABSOLUTE: 2.543 G_L1_RELATIVE: 11.597 G_Regularizer: 0.000 validation_error: 20.466 +(epoch: 26, iters: 295200, time: 0.546, data: 0.000) G_L1: 13.739 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 11.476 G_Regularizer: 0.000 validation_error: 21.508 +(epoch: 26, iters: 297200, time: 0.542, data: 0.000) G_L1: 17.267 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 14.719 G_Regularizer: 0.000 validation_error: 21.108 +(epoch: 26, iters: 299200, time: 0.549, data: 0.000) G_L1: 11.134 G_L1_ABSOLUTE: 1.970 G_L1_RELATIVE: 9.164 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 26, iters: 301200, time: 0.541, data: 0.000) G_L1: 13.578 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 10.843 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 27, iters: 448, time: 0.539, data: 0.000) G_L1: 16.217 G_L1_ABSOLUTE: 2.543 G_L1_RELATIVE: 13.674 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 27, iters: 2448, time: 0.546, data: 0.000) G_L1: 14.076 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 11.562 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 27, iters: 4448, time: 0.546, data: 0.000) G_L1: 16.323 G_L1_ABSOLUTE: 2.947 G_L1_RELATIVE: 13.376 G_Regularizer: 0.000 validation_error: 19.992 +(epoch: 27, iters: 6448, time: 0.544, data: 0.000) G_L1: 13.348 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 10.865 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 27, iters: 8448, time: 0.551, data: 0.001) G_L1: 12.721 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 10.244 G_Regularizer: 0.000 validation_error: 20.151 +(epoch: 27, iters: 10448, time: 0.540, data: 0.000) G_L1: 12.463 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 9.966 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 27, iters: 12448, time: 0.545, data: 0.000) G_L1: 15.999 G_L1_ABSOLUTE: 2.828 G_L1_RELATIVE: 13.171 G_Regularizer: 0.000 validation_error: 21.436 +(epoch: 27, iters: 14448, time: 0.545, data: 0.000) G_L1: 11.687 G_L1_ABSOLUTE: 1.926 G_L1_RELATIVE: 9.761 G_Regularizer: 0.000 validation_error: 20.122 +(epoch: 27, iters: 16448, time: 0.535, data: 0.000) G_L1: 16.158 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 13.942 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 27, iters: 18448, time: 0.550, data: 0.000) G_L1: 15.476 G_L1_ABSOLUTE: 2.868 G_L1_RELATIVE: 12.608 G_Regularizer: 0.000 validation_error: 20.794 +(epoch: 27, iters: 20448, time: 0.549, data: 0.000) G_L1: 11.684 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 9.315 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 27, iters: 22448, time: 0.542, data: 0.000) G_L1: 13.060 G_L1_ABSOLUTE: 2.110 G_L1_RELATIVE: 10.950 G_Regularizer: 0.000 validation_error: 20.548 +(epoch: 27, iters: 24448, time: 0.546, data: 0.000) G_L1: 12.763 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 10.254 G_Regularizer: 0.000 validation_error: 20.136 +(epoch: 27, iters: 26448, time: 0.551, data: 0.000) G_L1: 14.683 G_L1_ABSOLUTE: 2.455 G_L1_RELATIVE: 12.229 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 27, iters: 28448, time: 0.539, data: 0.000) G_L1: 13.817 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 11.192 G_Regularizer: 0.000 validation_error: 19.994 +(epoch: 27, iters: 30448, time: 0.544, data: 0.000) G_L1: 15.856 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 13.449 G_Regularizer: 0.000 validation_error: 20.194 +(epoch: 27, iters: 32448, time: 0.542, data: 0.000) G_L1: 14.978 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 12.605 G_Regularizer: 0.000 validation_error: 21.392 +(epoch: 27, iters: 34448, time: 0.541, data: 0.000) G_L1: 15.978 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 13.458 G_Regularizer: 0.000 validation_error: 20.195 +(epoch: 27, iters: 36448, time: 0.543, data: 0.000) G_L1: 14.425 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 12.043 G_Regularizer: 0.000 validation_error: 20.527 +(epoch: 27, iters: 38448, time: 0.547, data: 0.000) G_L1: 14.608 G_L1_ABSOLUTE: 2.194 G_L1_RELATIVE: 12.414 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 27, iters: 40448, time: 0.543, data: 0.000) G_L1: 14.977 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 12.430 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 27, iters: 42448, time: 0.543, data: 0.000) G_L1: 17.086 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 14.437 G_Regularizer: 0.000 validation_error: 21.025 +(epoch: 27, iters: 44448, time: 0.548, data: 0.001) G_L1: 13.298 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 10.674 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 27, iters: 46448, time: 0.551, data: 0.000) G_L1: 15.198 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 12.475 G_Regularizer: 0.000 validation_error: 20.078 +(epoch: 27, iters: 48448, time: 0.544, data: 0.000) G_L1: 14.075 G_L1_ABSOLUTE: 2.719 G_L1_RELATIVE: 11.355 G_Regularizer: 0.000 validation_error: 20.344 +(epoch: 27, iters: 50448, time: 0.542, data: 0.000) G_L1: 15.117 G_L1_ABSOLUTE: 2.380 G_L1_RELATIVE: 12.737 G_Regularizer: 0.000 validation_error: 20.406 +(epoch: 27, iters: 52448, time: 0.546, data: 0.000) G_L1: 14.462 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 12.105 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 27, iters: 54448, time: 0.551, data: 0.000) G_L1: 13.081 G_L1_ABSOLUTE: 2.272 G_L1_RELATIVE: 10.809 G_Regularizer: 0.000 validation_error: 20.461 +(epoch: 27, iters: 56448, time: 0.546, data: 0.000) G_L1: 13.450 G_L1_ABSOLUTE: 2.303 G_L1_RELATIVE: 11.148 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 27, iters: 58448, time: 0.540, data: 0.000) G_L1: 15.751 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 13.130 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 27, iters: 60448, time: 0.547, data: 0.000) G_L1: 12.128 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 9.716 G_Regularizer: 0.000 validation_error: 21.781 +(epoch: 27, iters: 62448, time: 0.541, data: 0.000) G_L1: 13.082 G_L1_ABSOLUTE: 2.055 G_L1_RELATIVE: 11.027 G_Regularizer: 0.000 validation_error: 21.784 +(epoch: 27, iters: 64448, time: 0.542, data: 0.000) G_L1: 13.702 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 10.985 G_Regularizer: 0.000 validation_error: 20.671 +(epoch: 27, iters: 66448, time: 0.537, data: 0.000) G_L1: 15.008 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 12.641 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 27, iters: 68448, time: 0.542, data: 0.001) G_L1: 14.308 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 11.741 G_Regularizer: 0.000 validation_error: 21.469 +(epoch: 27, iters: 70448, time: 0.557, data: 0.000) G_L1: 13.099 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 10.616 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 27, iters: 72448, time: 0.536, data: 0.000) G_L1: 13.134 G_L1_ABSOLUTE: 3.055 G_L1_RELATIVE: 10.079 G_Regularizer: 0.000 validation_error: 20.421 +(epoch: 27, iters: 74448, time: 0.542, data: 0.000) G_L1: 16.029 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 13.519 G_Regularizer: 0.000 validation_error: 21.231 +(epoch: 27, iters: 76448, time: 0.541, data: 0.000) G_L1: 16.189 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 13.309 G_Regularizer: 0.000 validation_error: 20.681 +(epoch: 27, iters: 78448, time: 0.548, data: 0.000) G_L1: 12.902 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 10.265 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 27, iters: 80448, time: 0.547, data: 0.000) G_L1: 13.733 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 11.296 G_Regularizer: 0.000 validation_error: 20.359 +(epoch: 27, iters: 82448, time: 0.544, data: 0.000) G_L1: 13.420 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 10.785 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 27, iters: 84448, time: 0.543, data: 0.000) G_L1: 13.639 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 11.366 G_Regularizer: 0.000 validation_error: 20.247 +(epoch: 27, iters: 86448, time: 0.548, data: 0.000) G_L1: 15.903 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 13.332 G_Regularizer: 0.000 validation_error: 20.195 +(epoch: 27, iters: 88448, time: 0.546, data: 0.000) G_L1: 13.779 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 11.492 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 27, iters: 90448, time: 0.548, data: 0.000) G_L1: 18.633 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 16.172 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 27, iters: 92448, time: 0.546, data: 0.000) G_L1: 14.887 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 12.311 G_Regularizer: 0.000 validation_error: 21.125 +(epoch: 27, iters: 94448, time: 0.539, data: 0.000) G_L1: 13.500 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 10.964 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 27, iters: 96448, time: 0.539, data: 0.000) G_L1: 12.768 G_L1_ABSOLUTE: 2.055 G_L1_RELATIVE: 10.713 G_Regularizer: 0.000 validation_error: 21.382 +(epoch: 27, iters: 98448, time: 0.544, data: 0.000) G_L1: 13.590 G_L1_ABSOLUTE: 2.278 G_L1_RELATIVE: 11.312 G_Regularizer: 0.000 validation_error: 21.358 +(epoch: 27, iters: 100448, time: 0.541, data: 0.000) G_L1: 13.560 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 11.229 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 27, iters: 102448, time: 0.550, data: 0.000) G_L1: 13.789 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 11.134 G_Regularizer: 0.000 validation_error: 20.481 +(epoch: 27, iters: 104448, time: 0.546, data: 0.000) G_L1: 13.524 G_L1_ABSOLUTE: 2.303 G_L1_RELATIVE: 11.221 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 27, iters: 106448, time: 0.546, data: 0.000) G_L1: 15.241 G_L1_ABSOLUTE: 2.222 G_L1_RELATIVE: 13.019 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 27, iters: 108448, time: 0.551, data: 0.000) G_L1: 12.794 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 10.385 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 27, iters: 110448, time: 0.551, data: 0.000) G_L1: 14.867 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 12.237 G_Regularizer: 0.000 validation_error: 20.429 +(epoch: 27, iters: 112448, time: 0.547, data: 0.000) G_L1: 14.473 G_L1_ABSOLUTE: 2.843 G_L1_RELATIVE: 11.630 G_Regularizer: 0.000 validation_error: 20.548 +(epoch: 27, iters: 114448, time: 0.550, data: 0.000) G_L1: 14.742 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 11.791 G_Regularizer: 0.000 validation_error: 20.549 +(epoch: 27, iters: 116448, time: 0.547, data: 0.000) G_L1: 14.700 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 12.264 G_Regularizer: 0.000 validation_error: 19.920 +(epoch: 27, iters: 118448, time: 0.544, data: 0.000) G_L1: 13.651 G_L1_ABSOLUTE: 2.607 G_L1_RELATIVE: 11.044 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 27, iters: 120448, time: 0.539, data: 0.000) G_L1: 16.959 G_L1_ABSOLUTE: 2.534 G_L1_RELATIVE: 14.425 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 27, iters: 122448, time: 0.542, data: 0.000) G_L1: 15.101 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 12.452 G_Regularizer: 0.000 validation_error: 20.263 +(epoch: 27, iters: 124448, time: 0.542, data: 0.000) G_L1: 16.394 G_L1_ABSOLUTE: 2.804 G_L1_RELATIVE: 13.590 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 27, iters: 126448, time: 0.543, data: 0.001) G_L1: 14.281 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 11.628 G_Regularizer: 0.000 validation_error: 21.049 +(epoch: 27, iters: 128448, time: 0.542, data: 0.000) G_L1: 13.161 G_L1_ABSOLUTE: 2.202 G_L1_RELATIVE: 10.959 G_Regularizer: 0.000 validation_error: 20.634 +(epoch: 27, iters: 130448, time: 0.552, data: 0.000) G_L1: 15.239 G_L1_ABSOLUTE: 3.082 G_L1_RELATIVE: 12.157 G_Regularizer: 0.000 validation_error: 21.066 +(epoch: 27, iters: 132448, time: 0.543, data: 0.000) G_L1: 14.367 G_L1_ABSOLUTE: 2.712 G_L1_RELATIVE: 11.654 G_Regularizer: 0.000 validation_error: 21.046 +(epoch: 27, iters: 134448, time: 0.547, data: 0.000) G_L1: 13.851 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 10.930 G_Regularizer: 0.000 validation_error: 21.254 +(epoch: 27, iters: 136448, time: 0.547, data: 0.000) G_L1: 14.202 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 11.689 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 27, iters: 138448, time: 0.548, data: 0.000) G_L1: 13.871 G_L1_ABSOLUTE: 1.988 G_L1_RELATIVE: 11.883 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 27, iters: 140448, time: 0.542, data: 0.000) G_L1: 14.385 G_L1_ABSOLUTE: 3.218 G_L1_RELATIVE: 11.166 G_Regularizer: 0.000 validation_error: 20.273 +(epoch: 27, iters: 142448, time: 0.548, data: 0.000) G_L1: 13.903 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 11.664 G_Regularizer: 0.000 validation_error: 20.251 +(epoch: 27, iters: 144448, time: 0.546, data: 0.000) G_L1: 14.992 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 12.282 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 27, iters: 146448, time: 0.541, data: 0.000) G_L1: 15.290 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 12.920 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 27, iters: 148448, time: 0.544, data: 0.000) G_L1: 14.233 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 11.847 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 27, iters: 150448, time: 0.554, data: 0.000) G_L1: 13.004 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 10.797 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 27, iters: 152448, time: 0.549, data: 0.000) G_L1: 13.517 G_L1_ABSOLUTE: 1.930 G_L1_RELATIVE: 11.588 G_Regularizer: 0.000 validation_error: 21.092 +(epoch: 27, iters: 154448, time: 0.543, data: 0.000) G_L1: 14.781 G_L1_ABSOLUTE: 2.882 G_L1_RELATIVE: 11.899 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 27, iters: 156448, time: 0.543, data: 0.000) G_L1: 13.633 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 11.187 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 27, iters: 158448, time: 0.550, data: 0.000) G_L1: 14.732 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 12.251 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 27, iters: 160448, time: 0.538, data: 0.000) G_L1: 13.907 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 11.424 G_Regularizer: 0.000 validation_error: 20.616 +(epoch: 27, iters: 162448, time: 0.544, data: 0.000) G_L1: 14.725 G_L1_ABSOLUTE: 2.118 G_L1_RELATIVE: 12.607 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 27, iters: 164448, time: 0.545, data: 0.000) G_L1: 13.966 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 11.374 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 27, iters: 166448, time: 0.545, data: 0.000) G_L1: 15.247 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 12.443 G_Regularizer: 0.000 validation_error: 20.427 +(epoch: 27, iters: 168448, time: 0.547, data: 0.001) G_L1: 14.234 G_L1_ABSOLUTE: 2.726 G_L1_RELATIVE: 11.509 G_Regularizer: 0.000 validation_error: 21.210 +(epoch: 27, iters: 170448, time: 0.545, data: 0.000) G_L1: 14.903 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 12.355 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 27, iters: 172448, time: 0.548, data: 0.000) G_L1: 15.826 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 12.823 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 27, iters: 174448, time: 0.544, data: 0.000) G_L1: 13.223 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 10.707 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 27, iters: 176448, time: 0.546, data: 0.000) G_L1: 13.849 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 11.277 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 27, iters: 178448, time: 0.545, data: 0.000) G_L1: 13.122 G_L1_ABSOLUTE: 2.342 G_L1_RELATIVE: 10.780 G_Regularizer: 0.000 validation_error: 20.940 +(epoch: 27, iters: 180448, time: 0.542, data: 0.000) G_L1: 14.672 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 12.315 G_Regularizer: 0.000 validation_error: 20.516 +(epoch: 27, iters: 182448, time: 0.538, data: 0.000) G_L1: 15.489 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 12.869 G_Regularizer: 0.000 validation_error: 20.424 +(epoch: 27, iters: 184448, time: 0.549, data: 0.000) G_L1: 12.518 G_L1_ABSOLUTE: 2.310 G_L1_RELATIVE: 10.208 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 27, iters: 186448, time: 0.538, data: 0.000) G_L1: 16.759 G_L1_ABSOLUTE: 3.110 G_L1_RELATIVE: 13.649 G_Regularizer: 0.000 validation_error: 20.663 +(epoch: 27, iters: 188448, time: 0.548, data: 0.000) G_L1: 13.250 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 10.752 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 27, iters: 190448, time: 0.546, data: 0.000) G_L1: 11.783 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 9.343 G_Regularizer: 0.000 validation_error: 21.755 +(epoch: 27, iters: 192448, time: 0.543, data: 0.000) G_L1: 14.930 G_L1_ABSOLUTE: 2.977 G_L1_RELATIVE: 11.953 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 27, iters: 194448, time: 0.546, data: 0.000) G_L1: 14.583 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 11.778 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 27, iters: 196448, time: 0.550, data: 0.001) G_L1: 12.034 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 9.522 G_Regularizer: 0.000 validation_error: 20.044 +(epoch: 27, iters: 198448, time: 0.549, data: 0.000) G_L1: 15.646 G_L1_ABSOLUTE: 2.915 G_L1_RELATIVE: 12.731 G_Regularizer: 0.000 validation_error: 20.383 +(epoch: 27, iters: 200448, time: 0.539, data: 0.000) G_L1: 13.578 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 10.785 G_Regularizer: 0.000 validation_error: 20.301 +(epoch: 27, iters: 202448, time: 0.542, data: 0.000) G_L1: 13.811 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 10.872 G_Regularizer: 0.000 validation_error: 20.993 +(epoch: 27, iters: 204448, time: 0.545, data: 0.000) G_L1: 13.987 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.565 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 27, iters: 206448, time: 0.543, data: 0.000) G_L1: 14.170 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 11.835 G_Regularizer: 0.000 validation_error: 20.462 +(epoch: 27, iters: 208448, time: 0.548, data: 0.000) G_L1: 14.103 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 11.594 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 27, iters: 210448, time: 0.547, data: 0.000) G_L1: 13.652 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 11.284 G_Regularizer: 0.000 validation_error: 20.731 +(epoch: 27, iters: 212448, time: 0.550, data: 0.000) G_L1: 13.366 G_L1_ABSOLUTE: 3.042 G_L1_RELATIVE: 10.324 G_Regularizer: 0.000 validation_error: 20.398 +(epoch: 27, iters: 214448, time: 0.543, data: 0.000) G_L1: 14.637 G_L1_ABSOLUTE: 2.191 G_L1_RELATIVE: 12.447 G_Regularizer: 0.000 validation_error: 20.476 +(epoch: 27, iters: 216448, time: 0.547, data: 0.000) G_L1: 17.143 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 14.337 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 27, iters: 218448, time: 0.544, data: 0.000) G_L1: 15.904 G_L1_ABSOLUTE: 3.098 G_L1_RELATIVE: 12.806 G_Regularizer: 0.000 validation_error: 20.495 +(epoch: 27, iters: 220448, time: 0.550, data: 0.000) G_L1: 12.901 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 10.539 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 27, iters: 222448, time: 0.551, data: 0.000) G_L1: 14.060 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 11.553 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 27, iters: 224448, time: 0.540, data: 0.000) G_L1: 12.161 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 9.540 G_Regularizer: 0.000 validation_error: 20.206 +(epoch: 27, iters: 226448, time: 0.540, data: 0.000) G_L1: 15.672 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 13.423 G_Regularizer: 0.000 validation_error: 20.362 +(epoch: 27, iters: 228448, time: 0.546, data: 0.000) G_L1: 11.906 G_L1_ABSOLUTE: 2.193 G_L1_RELATIVE: 9.713 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 27, iters: 230448, time: 0.537, data: 0.000) G_L1: 16.354 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 13.470 G_Regularizer: 0.000 validation_error: 20.611 +(epoch: 27, iters: 232448, time: 0.547, data: 0.000) G_L1: 14.434 G_L1_ABSOLUTE: 2.295 G_L1_RELATIVE: 12.138 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 27, iters: 234448, time: 0.541, data: 0.000) G_L1: 15.380 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 12.649 G_Regularizer: 0.000 validation_error: 21.126 +(epoch: 27, iters: 236448, time: 0.545, data: 0.000) G_L1: 12.197 G_L1_ABSOLUTE: 2.084 G_L1_RELATIVE: 10.113 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 27, iters: 238448, time: 0.538, data: 0.000) G_L1: 11.949 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 9.379 G_Regularizer: 0.000 validation_error: 20.277 +(epoch: 27, iters: 240448, time: 0.541, data: 0.000) G_L1: 14.986 G_L1_ABSOLUTE: 2.196 G_L1_RELATIVE: 12.791 G_Regularizer: 0.000 validation_error: 21.303 +(epoch: 27, iters: 242448, time: 0.549, data: 0.001) G_L1: 13.793 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 11.371 G_Regularizer: 0.000 validation_error: 20.495 +(epoch: 27, iters: 244448, time: 0.542, data: 0.000) G_L1: 16.133 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 13.557 G_Regularizer: 0.000 validation_error: 22.036 +(epoch: 27, iters: 246448, time: 0.543, data: 0.000) G_L1: 13.984 G_L1_ABSOLUTE: 2.799 G_L1_RELATIVE: 11.184 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 27, iters: 248448, time: 0.544, data: 0.000) G_L1: 12.124 G_L1_ABSOLUTE: 2.311 G_L1_RELATIVE: 9.813 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 27, iters: 250448, time: 0.549, data: 0.000) G_L1: 13.900 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 11.586 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 27, iters: 252448, time: 0.548, data: 0.000) G_L1: 16.025 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 13.295 G_Regularizer: 0.000 validation_error: 20.446 +(epoch: 27, iters: 254448, time: 0.541, data: 0.000) G_L1: 16.162 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 13.277 G_Regularizer: 0.000 validation_error: 20.478 +(epoch: 27, iters: 256448, time: 0.546, data: 0.000) G_L1: 17.459 G_L1_ABSOLUTE: 3.209 G_L1_RELATIVE: 14.250 G_Regularizer: 0.000 validation_error: 21.225 +(epoch: 27, iters: 258448, time: 0.547, data: 0.000) G_L1: 13.863 G_L1_ABSOLUTE: 2.740 G_L1_RELATIVE: 11.123 G_Regularizer: 0.000 validation_error: 20.138 +(epoch: 27, iters: 260448, time: 0.540, data: 0.000) G_L1: 15.102 G_L1_ABSOLUTE: 2.726 G_L1_RELATIVE: 12.376 G_Regularizer: 0.000 validation_error: 21.196 +(epoch: 27, iters: 262448, time: 0.539, data: 0.000) G_L1: 14.970 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 12.273 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 27, iters: 264448, time: 0.547, data: 0.000) G_L1: 16.592 G_L1_ABSOLUTE: 2.861 G_L1_RELATIVE: 13.731 G_Regularizer: 0.000 validation_error: 20.563 +(epoch: 27, iters: 266448, time: 0.546, data: 0.000) G_L1: 14.695 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 12.115 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 27, iters: 268448, time: 0.537, data: 0.000) G_L1: 11.287 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 8.950 G_Regularizer: 0.000 validation_error: 20.159 +(epoch: 27, iters: 270448, time: 0.539, data: 0.000) G_L1: 11.582 G_L1_ABSOLUTE: 2.382 G_L1_RELATIVE: 9.200 G_Regularizer: 0.000 validation_error: 21.043 +(epoch: 27, iters: 272448, time: 0.547, data: 0.000) G_L1: 13.537 G_L1_ABSOLUTE: 3.089 G_L1_RELATIVE: 10.447 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 27, iters: 274448, time: 0.555, data: 0.000) G_L1: 16.325 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 13.468 G_Regularizer: 0.000 validation_error: 21.605 +(epoch: 27, iters: 276448, time: 0.551, data: 0.001) G_L1: 14.123 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 11.450 G_Regularizer: 0.000 validation_error: 21.040 +(epoch: 27, iters: 278448, time: 0.541, data: 0.000) G_L1: 14.788 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 12.086 G_Regularizer: 0.000 validation_error: 20.084 +(epoch: 27, iters: 280448, time: 0.547, data: 0.000) G_L1: 15.491 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 13.038 G_Regularizer: 0.000 validation_error: 21.260 +(epoch: 27, iters: 282448, time: 0.552, data: 0.000) G_L1: 15.821 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 13.327 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 27, iters: 284448, time: 0.537, data: 0.000) G_L1: 15.459 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 12.982 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 27, iters: 286448, time: 0.543, data: 0.000) G_L1: 16.494 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 13.696 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 27, iters: 288448, time: 0.550, data: 0.000) G_L1: 15.917 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 13.268 G_Regularizer: 0.000 validation_error: 20.794 +(epoch: 27, iters: 290448, time: 0.545, data: 0.000) G_L1: 12.894 G_L1_ABSOLUTE: 2.172 G_L1_RELATIVE: 10.722 G_Regularizer: 0.000 validation_error: 20.757 +(epoch: 27, iters: 292448, time: 0.542, data: 0.000) G_L1: 16.555 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 14.110 G_Regularizer: 0.000 validation_error: 20.301 +(epoch: 27, iters: 294448, time: 0.541, data: 0.000) G_L1: 14.297 G_L1_ABSOLUTE: 2.728 G_L1_RELATIVE: 11.569 G_Regularizer: 0.000 validation_error: 21.617 +(epoch: 27, iters: 296448, time: 0.540, data: 0.000) G_L1: 12.925 G_L1_ABSOLUTE: 2.315 G_L1_RELATIVE: 10.610 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 27, iters: 298448, time: 0.550, data: 0.000) G_L1: 12.797 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 10.551 G_Regularizer: 0.000 validation_error: 20.300 +(epoch: 27, iters: 300448, time: 0.548, data: 0.000) G_L1: 13.827 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 11.357 G_Regularizer: 0.000 validation_error: 20.479 +(epoch: 27, iters: 302448, time: 0.547, data: 0.000) G_L1: 14.407 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 11.615 G_Regularizer: 0.000 validation_error: 21.435 +(epoch: 28, iters: 1696, time: 0.541, data: 0.000) G_L1: 16.847 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 14.099 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 28, iters: 3696, time: 0.553, data: 0.000) G_L1: 14.083 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 11.472 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 28, iters: 5696, time: 0.546, data: 0.000) G_L1: 14.381 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 11.769 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 28, iters: 7696, time: 0.544, data: 0.000) G_L1: 15.810 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 13.402 G_Regularizer: 0.000 validation_error: 21.154 +(epoch: 28, iters: 9696, time: 0.546, data: 0.000) G_L1: 16.450 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 13.741 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 28, iters: 11696, time: 0.548, data: 0.000) G_L1: 13.376 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 11.141 G_Regularizer: 0.000 validation_error: 21.241 +(epoch: 28, iters: 13696, time: 0.542, data: 0.000) G_L1: 13.447 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 11.123 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 28, iters: 15696, time: 0.549, data: 0.000) G_L1: 14.224 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 11.771 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 28, iters: 17696, time: 0.543, data: 0.000) G_L1: 15.751 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 13.043 G_Regularizer: 0.000 validation_error: 21.133 +(epoch: 28, iters: 19696, time: 0.549, data: 0.000) G_L1: 12.690 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 10.275 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 28, iters: 21696, time: 0.544, data: 0.000) G_L1: 17.179 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 14.478 G_Regularizer: 0.000 validation_error: 20.453 +(epoch: 28, iters: 23696, time: 0.544, data: 0.000) G_L1: 16.326 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 13.888 G_Regularizer: 0.000 validation_error: 20.654 +(epoch: 28, iters: 25696, time: 0.541, data: 0.000) G_L1: 15.768 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 13.194 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 28, iters: 27696, time: 0.538, data: 0.000) G_L1: 14.396 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 11.728 G_Regularizer: 0.000 validation_error: 20.641 +(epoch: 28, iters: 29696, time: 0.544, data: 0.000) G_L1: 15.326 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 12.611 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 28, iters: 31696, time: 0.540, data: 0.000) G_L1: 15.457 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 12.740 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 28, iters: 33696, time: 0.544, data: 0.000) G_L1: 11.802 G_L1_ABSOLUTE: 2.405 G_L1_RELATIVE: 9.398 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 28, iters: 35696, time: 0.544, data: 0.000) G_L1: 17.464 G_L1_ABSOLUTE: 2.888 G_L1_RELATIVE: 14.576 G_Regularizer: 0.000 validation_error: 20.222 +(epoch: 28, iters: 37696, time: 0.545, data: 0.000) G_L1: 15.017 G_L1_ABSOLUTE: 2.888 G_L1_RELATIVE: 12.128 G_Regularizer: 0.000 validation_error: 20.323 +(epoch: 28, iters: 39696, time: 0.543, data: 0.001) G_L1: 13.474 G_L1_ABSOLUTE: 2.671 G_L1_RELATIVE: 10.803 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 28, iters: 41696, time: 0.545, data: 0.000) G_L1: 15.910 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 12.951 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 28, iters: 43696, time: 0.537, data: 0.000) G_L1: 14.471 G_L1_ABSOLUTE: 2.743 G_L1_RELATIVE: 11.728 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 28, iters: 45696, time: 0.551, data: 0.001) G_L1: 13.407 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 10.607 G_Regularizer: 0.000 validation_error: 20.382 +(epoch: 28, iters: 47696, time: 0.548, data: 0.000) G_L1: 13.171 G_L1_ABSOLUTE: 2.423 G_L1_RELATIVE: 10.748 G_Regularizer: 0.000 validation_error: 20.489 +(epoch: 28, iters: 49696, time: 0.536, data: 0.000) G_L1: 14.030 G_L1_ABSOLUTE: 2.342 G_L1_RELATIVE: 11.688 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 28, iters: 51696, time: 0.546, data: 0.000) G_L1: 13.750 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 11.154 G_Regularizer: 0.000 validation_error: 21.091 +(epoch: 28, iters: 53696, time: 0.540, data: 0.000) G_L1: 15.999 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 13.202 G_Regularizer: 0.000 validation_error: 21.351 +(epoch: 28, iters: 55696, time: 0.544, data: 0.000) G_L1: 13.970 G_L1_ABSOLUTE: 2.330 G_L1_RELATIVE: 11.640 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 28, iters: 57696, time: 0.545, data: 0.000) G_L1: 15.411 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 12.494 G_Regularizer: 0.000 validation_error: 20.712 +(epoch: 28, iters: 59696, time: 0.547, data: 0.000) G_L1: 12.091 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 9.756 G_Regularizer: 0.000 validation_error: 20.388 +(epoch: 28, iters: 61696, time: 0.541, data: 0.000) G_L1: 19.008 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 16.291 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 28, iters: 63696, time: 0.548, data: 0.000) G_L1: 12.466 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 10.021 G_Regularizer: 0.000 validation_error: 21.262 +(epoch: 28, iters: 65696, time: 0.543, data: 0.000) G_L1: 14.919 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 12.227 G_Regularizer: 0.000 validation_error: 20.309 +(epoch: 28, iters: 67696, time: 0.557, data: 0.000) G_L1: 15.407 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 13.013 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 28, iters: 69696, time: 0.536, data: 0.000) G_L1: 13.233 G_L1_ABSOLUTE: 3.164 G_L1_RELATIVE: 10.069 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 28, iters: 71696, time: 0.539, data: 0.000) G_L1: 15.579 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 12.771 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 28, iters: 73696, time: 0.544, data: 0.000) G_L1: 13.295 G_L1_ABSOLUTE: 2.108 G_L1_RELATIVE: 11.187 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 28, iters: 75696, time: 0.552, data: 0.000) G_L1: 14.383 G_L1_ABSOLUTE: 2.348 G_L1_RELATIVE: 12.035 G_Regularizer: 0.000 validation_error: 20.303 +(epoch: 28, iters: 77696, time: 0.539, data: 0.000) G_L1: 14.285 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 11.281 G_Regularizer: 0.000 validation_error: 21.360 +(epoch: 28, iters: 79696, time: 0.551, data: 0.000) G_L1: 13.302 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 10.679 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 28, iters: 81696, time: 0.546, data: 0.000) G_L1: 16.662 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 14.179 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 28, iters: 83696, time: 0.545, data: 0.000) G_L1: 11.143 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 8.660 G_Regularizer: 0.000 validation_error: 21.236 +(epoch: 28, iters: 85696, time: 0.551, data: 0.000) G_L1: 15.131 G_L1_ABSOLUTE: 3.040 G_L1_RELATIVE: 12.091 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 28, iters: 87696, time: 0.550, data: 0.000) G_L1: 15.833 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 13.515 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 28, iters: 89696, time: 0.548, data: 0.000) G_L1: 13.583 G_L1_ABSOLUTE: 2.531 G_L1_RELATIVE: 11.053 G_Regularizer: 0.000 validation_error: 20.189 +(epoch: 28, iters: 91696, time: 0.547, data: 0.000) G_L1: 15.882 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 13.148 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 28, iters: 93696, time: 0.540, data: 0.000) G_L1: 14.932 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 12.384 G_Regularizer: 0.000 validation_error: 21.236 +(epoch: 28, iters: 95696, time: 0.548, data: 0.000) G_L1: 14.063 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 11.723 G_Regularizer: 0.000 validation_error: 21.194 +(epoch: 28, iters: 97696, time: 0.544, data: 0.000) G_L1: 16.807 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 14.241 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 28, iters: 99696, time: 0.547, data: 0.000) G_L1: 13.905 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 11.275 G_Regularizer: 0.000 validation_error: 20.733 +(epoch: 28, iters: 101696, time: 0.546, data: 0.001) G_L1: 15.005 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 12.531 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 28, iters: 103696, time: 0.543, data: 0.000) G_L1: 12.282 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 9.695 G_Regularizer: 0.000 validation_error: 20.544 +(epoch: 28, iters: 105696, time: 0.543, data: 0.001) G_L1: 16.014 G_L1_ABSOLUTE: 3.374 G_L1_RELATIVE: 12.640 G_Regularizer: 0.000 validation_error: 20.079 +(epoch: 28, iters: 107696, time: 0.552, data: 0.000) G_L1: 14.152 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 28, iters: 109696, time: 0.544, data: 0.000) G_L1: 12.903 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 10.295 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 28, iters: 111696, time: 0.555, data: 0.000) G_L1: 16.142 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 13.489 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 28, iters: 113696, time: 0.543, data: 0.000) G_L1: 15.277 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 12.828 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 28, iters: 115696, time: 0.546, data: 0.000) G_L1: 16.657 G_L1_ABSOLUTE: 2.753 G_L1_RELATIVE: 13.904 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 28, iters: 117696, time: 0.543, data: 0.000) G_L1: 16.223 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 13.728 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 28, iters: 119696, time: 0.546, data: 0.000) G_L1: 16.767 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 13.923 G_Regularizer: 0.000 validation_error: 21.318 +(epoch: 28, iters: 121696, time: 0.545, data: 0.000) G_L1: 18.802 G_L1_ABSOLUTE: 3.270 G_L1_RELATIVE: 15.531 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 28, iters: 123696, time: 0.550, data: 0.001) G_L1: 12.158 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 9.820 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 28, iters: 125696, time: 0.543, data: 0.000) G_L1: 14.648 G_L1_ABSOLUTE: 2.831 G_L1_RELATIVE: 11.817 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 28, iters: 127696, time: 0.543, data: 0.000) G_L1: 13.391 G_L1_ABSOLUTE: 2.109 G_L1_RELATIVE: 11.282 G_Regularizer: 0.000 validation_error: 20.107 +(epoch: 28, iters: 129696, time: 0.544, data: 0.000) G_L1: 12.770 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 10.397 G_Regularizer: 0.000 validation_error: 21.369 +(epoch: 28, iters: 131696, time: 0.540, data: 0.000) G_L1: 13.142 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 10.662 G_Regularizer: 0.000 validation_error: 20.485 +(epoch: 28, iters: 133696, time: 0.541, data: 0.000) G_L1: 16.728 G_L1_ABSOLUTE: 2.791 G_L1_RELATIVE: 13.937 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 28, iters: 135696, time: 0.542, data: 0.000) G_L1: 16.139 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 13.571 G_Regularizer: 0.000 validation_error: 21.212 +(epoch: 28, iters: 137696, time: 0.547, data: 0.000) G_L1: 14.723 G_L1_ABSOLUTE: 2.853 G_L1_RELATIVE: 11.869 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 28, iters: 139696, time: 0.543, data: 0.000) G_L1: 15.737 G_L1_ABSOLUTE: 2.973 G_L1_RELATIVE: 12.763 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 28, iters: 141696, time: 0.547, data: 0.000) G_L1: 13.670 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 10.887 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 28, iters: 143696, time: 0.537, data: 0.000) G_L1: 15.923 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 13.290 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 28, iters: 145696, time: 0.546, data: 0.000) G_L1: 12.602 G_L1_ABSOLUTE: 2.260 G_L1_RELATIVE: 10.342 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 28, iters: 147696, time: 0.538, data: 0.000) G_L1: 18.570 G_L1_ABSOLUTE: 3.407 G_L1_RELATIVE: 15.163 G_Regularizer: 0.000 validation_error: 20.364 +(epoch: 28, iters: 149696, time: 0.539, data: 0.000) G_L1: 13.627 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 11.178 G_Regularizer: 0.000 validation_error: 20.043 +(epoch: 28, iters: 151696, time: 0.547, data: 0.000) G_L1: 14.241 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 11.612 G_Regularizer: 0.000 validation_error: 19.906 +(epoch: 28, iters: 153696, time: 0.549, data: 0.000) G_L1: 14.662 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 12.188 G_Regularizer: 0.000 validation_error: 21.253 +(epoch: 28, iters: 155696, time: 0.541, data: 0.001) G_L1: 13.812 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 11.339 G_Regularizer: 0.000 validation_error: 21.256 +(epoch: 28, iters: 157696, time: 0.541, data: 0.000) G_L1: 15.984 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 13.437 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 28, iters: 159696, time: 0.542, data: 0.001) G_L1: 15.607 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 12.930 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 28, iters: 161696, time: 0.546, data: 0.000) G_L1: 16.120 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 13.543 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 28, iters: 163696, time: 0.549, data: 0.001) G_L1: 15.788 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 13.160 G_Regularizer: 0.000 validation_error: 20.257 +(epoch: 28, iters: 165696, time: 0.544, data: 0.000) G_L1: 13.577 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 21.215 +(epoch: 28, iters: 167696, time: 0.546, data: 0.000) G_L1: 17.069 G_L1_ABSOLUTE: 3.256 G_L1_RELATIVE: 13.813 G_Regularizer: 0.000 validation_error: 20.437 +(epoch: 28, iters: 169696, time: 0.542, data: 0.000) G_L1: 16.515 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 13.857 G_Regularizer: 0.000 validation_error: 21.392 +(epoch: 28, iters: 171696, time: 0.549, data: 0.000) G_L1: 14.691 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 12.146 G_Regularizer: 0.000 validation_error: 21.302 +(epoch: 28, iters: 173696, time: 0.543, data: 0.000) G_L1: 12.286 G_L1_ABSOLUTE: 2.025 G_L1_RELATIVE: 10.260 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 28, iters: 175696, time: 0.545, data: 0.000) G_L1: 13.660 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 10.906 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 28, iters: 177696, time: 0.540, data: 0.000) G_L1: 15.050 G_L1_ABSOLUTE: 2.944 G_L1_RELATIVE: 12.106 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 28, iters: 179696, time: 0.539, data: 0.000) G_L1: 10.556 G_L1_ABSOLUTE: 1.943 G_L1_RELATIVE: 8.614 G_Regularizer: 0.000 validation_error: 20.422 +(epoch: 28, iters: 181696, time: 0.539, data: 0.000) G_L1: 13.187 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 10.667 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 28, iters: 183696, time: 0.540, data: 0.000) G_L1: 12.725 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 10.266 G_Regularizer: 0.000 validation_error: 19.901 +(epoch: 28, iters: 185696, time: 0.545, data: 0.000) G_L1: 15.240 G_L1_ABSOLUTE: 2.315 G_L1_RELATIVE: 12.925 G_Regularizer: 0.000 validation_error: 20.545 +(epoch: 28, iters: 187696, time: 0.547, data: 0.000) G_L1: 14.390 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 11.787 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 28, iters: 189696, time: 0.543, data: 0.000) G_L1: 18.476 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 16.271 G_Regularizer: 0.000 validation_error: 20.297 +(epoch: 28, iters: 191696, time: 0.539, data: 0.000) G_L1: 12.897 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 10.640 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 28, iters: 193696, time: 0.548, data: 0.000) G_L1: 12.322 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 9.892 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 28, iters: 195696, time: 0.537, data: 0.000) G_L1: 16.990 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 14.556 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 28, iters: 197696, time: 0.552, data: 0.000) G_L1: 16.008 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 13.513 G_Regularizer: 0.000 validation_error: 20.299 +(epoch: 28, iters: 199696, time: 0.547, data: 0.000) G_L1: 14.336 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 11.943 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 28, iters: 201696, time: 0.549, data: 0.000) G_L1: 12.775 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 10.612 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 28, iters: 203696, time: 0.536, data: 0.000) G_L1: 17.169 G_L1_ABSOLUTE: 3.224 G_L1_RELATIVE: 13.945 G_Regularizer: 0.000 validation_error: 20.959 +(epoch: 28, iters: 205696, time: 0.548, data: 0.000) G_L1: 13.948 G_L1_ABSOLUTE: 2.376 G_L1_RELATIVE: 11.571 G_Regularizer: 0.000 validation_error: 20.287 +(epoch: 28, iters: 207696, time: 0.542, data: 0.000) G_L1: 13.001 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 10.585 G_Regularizer: 0.000 validation_error: 20.037 +(epoch: 28, iters: 209696, time: 0.541, data: 0.000) G_L1: 15.556 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 13.187 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 28, iters: 211696, time: 0.545, data: 0.000) G_L1: 16.728 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 14.231 G_Regularizer: 0.000 validation_error: 20.442 +(epoch: 28, iters: 213696, time: 0.540, data: 0.000) G_L1: 17.605 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 15.090 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 28, iters: 215696, time: 0.537, data: 0.000) G_L1: 14.490 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 12.152 G_Regularizer: 0.000 validation_error: 20.379 +(epoch: 28, iters: 217696, time: 0.547, data: 0.000) G_L1: 16.151 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 13.426 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 28, iters: 219696, time: 0.553, data: 0.000) G_L1: 14.544 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 11.639 G_Regularizer: 0.000 validation_error: 20.343 +(epoch: 28, iters: 221696, time: 0.548, data: 0.000) G_L1: 15.586 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 12.788 G_Regularizer: 0.000 validation_error: 20.168 +(epoch: 28, iters: 223696, time: 0.550, data: 0.000) G_L1: 13.016 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 10.738 G_Regularizer: 0.000 validation_error: 21.314 +(epoch: 28, iters: 225696, time: 0.547, data: 0.000) G_L1: 16.028 G_L1_ABSOLUTE: 2.266 G_L1_RELATIVE: 13.763 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 28, iters: 227696, time: 0.550, data: 0.000) G_L1: 13.567 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 10.849 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 28, iters: 229696, time: 0.541, data: 0.000) G_L1: 16.846 G_L1_ABSOLUTE: 3.201 G_L1_RELATIVE: 13.645 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 28, iters: 231696, time: 0.554, data: 0.000) G_L1: 14.361 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 11.730 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 28, iters: 233696, time: 0.552, data: 0.000) G_L1: 14.936 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.426 G_Regularizer: 0.000 validation_error: 20.567 +(epoch: 28, iters: 235696, time: 0.540, data: 0.000) G_L1: 17.507 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 14.682 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 28, iters: 237696, time: 0.540, data: 0.000) G_L1: 14.675 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 12.175 G_Regularizer: 0.000 validation_error: 20.389 +(epoch: 28, iters: 239696, time: 0.548, data: 0.000) G_L1: 14.594 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 11.957 G_Regularizer: 0.000 validation_error: 20.475 +(epoch: 28, iters: 241696, time: 0.542, data: 0.000) G_L1: 12.876 G_L1_ABSOLUTE: 2.159 G_L1_RELATIVE: 10.718 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 28, iters: 243696, time: 0.545, data: 0.000) G_L1: 16.207 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 20.389 +(epoch: 28, iters: 245696, time: 0.559, data: 0.000) G_L1: 15.270 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 12.614 G_Regularizer: 0.000 validation_error: 20.234 +(epoch: 28, iters: 247696, time: 0.541, data: 0.000) G_L1: 13.788 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 11.082 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 28, iters: 249696, time: 0.546, data: 0.000) G_L1: 14.391 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.806 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 28, iters: 251696, time: 0.548, data: 0.000) G_L1: 15.266 G_L1_ABSOLUTE: 2.308 G_L1_RELATIVE: 12.958 G_Regularizer: 0.000 validation_error: 20.312 +(epoch: 28, iters: 253696, time: 0.556, data: 0.000) G_L1: 13.227 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 10.861 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 28, iters: 255696, time: 0.546, data: 0.000) G_L1: 13.636 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 11.062 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 28, iters: 257696, time: 0.547, data: 0.001) G_L1: 16.406 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 13.656 G_Regularizer: 0.000 validation_error: 20.421 +(epoch: 28, iters: 259696, time: 0.547, data: 0.000) G_L1: 12.322 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 10.094 G_Regularizer: 0.000 validation_error: 20.397 +(epoch: 28, iters: 261696, time: 0.543, data: 0.000) G_L1: 14.052 G_L1_ABSOLUTE: 2.234 G_L1_RELATIVE: 11.818 G_Regularizer: 0.000 validation_error: 21.172 +(epoch: 28, iters: 263696, time: 0.544, data: 0.000) G_L1: 15.915 G_L1_ABSOLUTE: 2.879 G_L1_RELATIVE: 13.036 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 28, iters: 265696, time: 0.559, data: 0.000) G_L1: 14.742 G_L1_ABSOLUTE: 3.041 G_L1_RELATIVE: 11.701 G_Regularizer: 0.000 validation_error: 21.300 +(epoch: 28, iters: 267696, time: 0.542, data: 0.000) G_L1: 17.566 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 15.112 G_Regularizer: 0.000 validation_error: 20.634 +(epoch: 28, iters: 269696, time: 0.541, data: 0.000) G_L1: 16.793 G_L1_ABSOLUTE: 2.987 G_L1_RELATIVE: 13.806 G_Regularizer: 0.000 validation_error: 20.529 +(epoch: 28, iters: 271696, time: 0.540, data: 0.000) G_L1: 11.008 G_L1_ABSOLUTE: 2.054 G_L1_RELATIVE: 8.955 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 28, iters: 273696, time: 0.550, data: 0.001) G_L1: 14.266 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 11.974 G_Regularizer: 0.000 validation_error: 21.194 +(epoch: 28, iters: 275696, time: 0.547, data: 0.000) G_L1: 11.475 G_L1_ABSOLUTE: 2.141 G_L1_RELATIVE: 9.334 G_Regularizer: 0.000 validation_error: 20.854 +(epoch: 28, iters: 277696, time: 0.539, data: 0.000) G_L1: 14.498 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 12.153 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 28, iters: 279696, time: 0.558, data: 0.000) G_L1: 15.444 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 12.919 G_Regularizer: 0.000 validation_error: 21.060 +(epoch: 28, iters: 281696, time: 0.538, data: 0.001) G_L1: 18.425 G_L1_ABSOLUTE: 2.313 G_L1_RELATIVE: 16.113 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 28, iters: 283696, time: 0.549, data: 0.000) G_L1: 15.748 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 13.264 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 28, iters: 285696, time: 0.541, data: 0.000) G_L1: 13.062 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 10.463 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 28, iters: 287696, time: 0.552, data: 0.001) G_L1: 13.333 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 10.596 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 28, iters: 289696, time: 0.542, data: 0.000) G_L1: 14.396 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.813 G_Regularizer: 0.000 validation_error: 20.292 +(epoch: 28, iters: 291696, time: 0.548, data: 0.000) G_L1: 15.197 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 12.616 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 28, iters: 293696, time: 0.542, data: 0.000) G_L1: 15.412 G_L1_ABSOLUTE: 2.270 G_L1_RELATIVE: 13.142 G_Regularizer: 0.000 validation_error: 20.288 +(epoch: 28, iters: 295696, time: 0.546, data: 0.000) G_L1: 17.772 G_L1_ABSOLUTE: 3.231 G_L1_RELATIVE: 14.542 G_Regularizer: 0.000 validation_error: 20.325 +(epoch: 28, iters: 297696, time: 0.545, data: 0.000) G_L1: 15.716 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 13.160 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 28, iters: 299696, time: 0.549, data: 0.000) G_L1: 14.640 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 12.435 G_Regularizer: 0.000 validation_error: 20.207 +(epoch: 28, iters: 301696, time: 0.538, data: 0.000) G_L1: 15.122 G_L1_ABSOLUTE: 3.334 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 20.309 +(epoch: 29, iters: 944, time: 0.538, data: 0.000) G_L1: 15.783 G_L1_ABSOLUTE: 3.073 G_L1_RELATIVE: 12.710 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 29, iters: 2944, time: 0.544, data: 0.000) G_L1: 13.689 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 11.153 G_Regularizer: 0.000 validation_error: 21.223 +(epoch: 29, iters: 4944, time: 0.548, data: 0.000) G_L1: 14.162 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 11.245 G_Regularizer: 0.000 validation_error: 19.976 +(epoch: 29, iters: 6944, time: 0.545, data: 0.001) G_L1: 14.831 G_L1_ABSOLUTE: 3.379 G_L1_RELATIVE: 11.452 G_Regularizer: 0.000 validation_error: 21.496 +(epoch: 29, iters: 8944, time: 0.543, data: 0.000) G_L1: 14.772 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 12.243 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 29, iters: 10944, time: 0.549, data: 0.000) G_L1: 14.717 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 12.146 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 29, iters: 12944, time: 0.541, data: 0.000) G_L1: 12.274 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 9.614 G_Regularizer: 0.000 validation_error: 20.661 +(epoch: 29, iters: 14944, time: 0.551, data: 0.000) G_L1: 13.604 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 11.245 G_Regularizer: 0.000 validation_error: 20.429 +(epoch: 29, iters: 16944, time: 0.541, data: 0.000) G_L1: 12.651 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 10.110 G_Regularizer: 0.000 validation_error: 21.190 +(epoch: 29, iters: 18944, time: 0.546, data: 0.001) G_L1: 14.415 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 11.789 G_Regularizer: 0.000 validation_error: 20.413 +(epoch: 29, iters: 20944, time: 0.537, data: 0.000) G_L1: 14.138 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 11.673 G_Regularizer: 0.000 validation_error: 20.475 +(epoch: 29, iters: 22944, time: 0.545, data: 0.000) G_L1: 17.081 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 14.175 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 29, iters: 24944, time: 0.551, data: 0.000) G_L1: 15.952 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 13.230 G_Regularizer: 0.000 validation_error: 21.090 +(epoch: 29, iters: 26944, time: 0.546, data: 0.000) G_L1: 24.085 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 21.588 G_Regularizer: 0.000 validation_error: 20.798 +(epoch: 29, iters: 28944, time: 0.544, data: 0.000) G_L1: 14.598 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 12.274 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 29, iters: 30944, time: 0.534, data: 0.000) G_L1: 16.065 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 13.253 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 29, iters: 32944, time: 0.559, data: 0.000) G_L1: 13.906 G_L1_ABSOLUTE: 2.862 G_L1_RELATIVE: 11.044 G_Regularizer: 0.000 validation_error: 21.089 +(epoch: 29, iters: 34944, time: 0.541, data: 0.001) G_L1: 12.031 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 9.684 G_Regularizer: 0.000 validation_error: 21.318 +(epoch: 29, iters: 36944, time: 0.539, data: 0.000) G_L1: 12.281 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 9.658 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 29, iters: 38944, time: 0.549, data: 0.000) G_L1: 14.355 G_L1_ABSOLUTE: 2.330 G_L1_RELATIVE: 12.025 G_Regularizer: 0.000 validation_error: 20.459 +(epoch: 29, iters: 40944, time: 0.547, data: 0.000) G_L1: 15.190 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 12.711 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 29, iters: 42944, time: 0.544, data: 0.000) G_L1: 17.793 G_L1_ABSOLUTE: 3.034 G_L1_RELATIVE: 14.759 G_Regularizer: 0.000 validation_error: 20.339 +(epoch: 29, iters: 44944, time: 0.548, data: 0.000) G_L1: 13.982 G_L1_ABSOLUTE: 2.896 G_L1_RELATIVE: 11.086 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 29, iters: 46944, time: 0.551, data: 0.000) G_L1: 16.979 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 14.046 G_Regularizer: 0.000 validation_error: 20.854 +(epoch: 29, iters: 48944, time: 0.547, data: 0.000) G_L1: 14.975 G_L1_ABSOLUTE: 3.475 G_L1_RELATIVE: 11.500 G_Regularizer: 0.000 validation_error: 20.206 +(epoch: 29, iters: 50944, time: 0.544, data: 0.000) G_L1: 14.249 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 12.086 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 29, iters: 52944, time: 0.548, data: 0.000) G_L1: 14.180 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 20.597 +(epoch: 29, iters: 54944, time: 0.547, data: 0.000) G_L1: 15.496 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 13.001 G_Regularizer: 0.000 validation_error: 21.246 +(epoch: 29, iters: 56944, time: 0.543, data: 0.000) G_L1: 14.402 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 11.748 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 29, iters: 58944, time: 0.543, data: 0.000) G_L1: 16.604 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 13.806 G_Regularizer: 0.000 validation_error: 20.258 +(epoch: 29, iters: 60944, time: 0.544, data: 0.000) G_L1: 13.965 G_L1_ABSOLUTE: 2.766 G_L1_RELATIVE: 11.199 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 29, iters: 62944, time: 0.549, data: 0.000) G_L1: 13.309 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 10.674 G_Regularizer: 0.000 validation_error: 20.424 +(epoch: 29, iters: 64944, time: 0.539, data: 0.000) G_L1: 11.475 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 9.221 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 29, iters: 66944, time: 0.538, data: 0.000) G_L1: 18.099 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 15.606 G_Regularizer: 0.000 validation_error: 20.943 +(epoch: 29, iters: 68944, time: 0.541, data: 0.000) G_L1: 16.482 G_L1_ABSOLUTE: 2.690 G_L1_RELATIVE: 13.793 G_Regularizer: 0.000 validation_error: 20.681 +(epoch: 29, iters: 70944, time: 0.546, data: 0.000) G_L1: 13.784 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 11.176 G_Regularizer: 0.000 validation_error: 20.380 +(epoch: 29, iters: 72944, time: 0.541, data: 0.000) G_L1: 13.097 G_L1_ABSOLUTE: 2.109 G_L1_RELATIVE: 10.988 G_Regularizer: 0.000 validation_error: 21.043 +(epoch: 29, iters: 74944, time: 0.559, data: 0.001) G_L1: 17.087 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 14.669 G_Regularizer: 0.000 validation_error: 20.383 +(epoch: 29, iters: 76944, time: 0.539, data: 0.000) G_L1: 14.200 G_L1_ABSOLUTE: 2.532 G_L1_RELATIVE: 11.667 G_Regularizer: 0.000 validation_error: 20.374 +(epoch: 29, iters: 78944, time: 0.541, data: 0.000) G_L1: 14.180 G_L1_ABSOLUTE: 3.126 G_L1_RELATIVE: 11.053 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 29, iters: 80944, time: 0.553, data: 0.000) G_L1: 15.635 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 13.014 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 29, iters: 82944, time: 0.543, data: 0.000) G_L1: 14.037 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 11.388 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 29, iters: 84944, time: 0.539, data: 0.000) G_L1: 11.372 G_L1_ABSOLUTE: 2.244 G_L1_RELATIVE: 9.129 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 29, iters: 86944, time: 0.542, data: 0.000) G_L1: 12.872 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 10.316 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 29, iters: 88944, time: 0.551, data: 0.000) G_L1: 12.699 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 10.110 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 29, iters: 90944, time: 0.548, data: 0.000) G_L1: 13.365 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 10.621 G_Regularizer: 0.000 validation_error: 20.527 +(epoch: 29, iters: 92944, time: 0.545, data: 0.000) G_L1: 14.232 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 11.595 G_Regularizer: 0.000 validation_error: 20.229 +(epoch: 29, iters: 94944, time: 0.545, data: 0.000) G_L1: 20.259 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 17.590 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 29, iters: 96944, time: 0.544, data: 0.000) G_L1: 15.416 G_L1_ABSOLUTE: 2.761 G_L1_RELATIVE: 12.655 G_Regularizer: 0.000 validation_error: 20.038 +(epoch: 29, iters: 98944, time: 0.537, data: 0.000) G_L1: 13.431 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 10.723 G_Regularizer: 0.000 validation_error: 21.228 +(epoch: 29, iters: 100944, time: 0.545, data: 0.000) G_L1: 14.055 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 11.627 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 29, iters: 102944, time: 0.547, data: 0.001) G_L1: 15.922 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 13.396 G_Regularizer: 0.000 validation_error: 20.323 +(epoch: 29, iters: 104944, time: 0.548, data: 0.000) G_L1: 16.599 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 14.093 G_Regularizer: 0.000 validation_error: 20.027 +(epoch: 29, iters: 106944, time: 0.542, data: 0.000) G_L1: 16.517 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 13.763 G_Regularizer: 0.000 validation_error: 20.421 +(epoch: 29, iters: 108944, time: 0.551, data: 0.000) G_L1: 14.025 G_L1_ABSOLUTE: 2.267 G_L1_RELATIVE: 11.758 G_Regularizer: 0.000 validation_error: 21.238 +(epoch: 29, iters: 110944, time: 0.540, data: 0.000) G_L1: 16.654 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 14.114 G_Regularizer: 0.000 validation_error: 19.846 +(epoch: 29, iters: 112944, time: 0.549, data: 0.000) G_L1: 12.593 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 10.239 G_Regularizer: 0.000 validation_error: 20.408 +(epoch: 29, iters: 114944, time: 0.549, data: 0.000) G_L1: 12.488 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 10.135 G_Regularizer: 0.000 validation_error: 21.152 +(epoch: 29, iters: 116944, time: 0.545, data: 0.000) G_L1: 16.239 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 13.625 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 29, iters: 118944, time: 0.547, data: 0.000) G_L1: 13.338 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 10.629 G_Regularizer: 0.000 validation_error: 21.249 +(epoch: 29, iters: 120944, time: 0.548, data: 0.001) G_L1: 13.866 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 11.131 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 29, iters: 122944, time: 0.554, data: 0.000) G_L1: 13.125 G_L1_ABSOLUTE: 2.204 G_L1_RELATIVE: 10.921 G_Regularizer: 0.000 validation_error: 20.622 +(epoch: 29, iters: 124944, time: 0.542, data: 0.000) G_L1: 12.550 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 9.864 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 29, iters: 126944, time: 0.554, data: 0.000) G_L1: 15.320 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 12.745 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 29, iters: 128944, time: 0.544, data: 0.000) G_L1: 11.659 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 8.847 G_Regularizer: 0.000 validation_error: 21.224 +(epoch: 29, iters: 130944, time: 0.545, data: 0.000) G_L1: 13.934 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 11.534 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 29, iters: 132944, time: 0.538, data: 0.000) G_L1: 14.750 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.349 G_Regularizer: 0.000 validation_error: 20.697 +(epoch: 29, iters: 134944, time: 0.539, data: 0.000) G_L1: 14.704 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 11.990 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 29, iters: 136944, time: 0.544, data: 0.000) G_L1: 14.151 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 11.910 G_Regularizer: 0.000 validation_error: 20.357 +(epoch: 29, iters: 138944, time: 0.549, data: 0.000) G_L1: 16.096 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 13.467 G_Regularizer: 0.000 validation_error: 20.364 +(epoch: 29, iters: 140944, time: 0.549, data: 0.000) G_L1: 13.122 G_L1_ABSOLUTE: 2.155 G_L1_RELATIVE: 10.967 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 29, iters: 142944, time: 0.543, data: 0.000) G_L1: 13.117 G_L1_ABSOLUTE: 2.667 G_L1_RELATIVE: 10.450 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 29, iters: 144944, time: 0.543, data: 0.000) G_L1: 13.839 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 11.135 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 29, iters: 146944, time: 0.542, data: 0.000) G_L1: 14.990 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 12.522 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 29, iters: 148944, time: 0.552, data: 0.000) G_L1: 15.735 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 12.993 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 29, iters: 150944, time: 0.538, data: 0.000) G_L1: 15.098 G_L1_ABSOLUTE: 2.711 G_L1_RELATIVE: 12.387 G_Regularizer: 0.000 validation_error: 21.192 +(epoch: 29, iters: 152944, time: 0.542, data: 0.000) G_L1: 14.173 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 11.915 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 29, iters: 154944, time: 0.538, data: 0.000) G_L1: 14.265 G_L1_ABSOLUTE: 2.617 G_L1_RELATIVE: 11.649 G_Regularizer: 0.000 validation_error: 20.730 +(epoch: 29, iters: 156944, time: 0.569, data: 0.000) G_L1: 13.822 G_L1_ABSOLUTE: 2.170 G_L1_RELATIVE: 11.651 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 29, iters: 158944, time: 0.539, data: 0.000) G_L1: 12.739 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 10.272 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 29, iters: 160944, time: 0.549, data: 0.001) G_L1: 17.458 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 14.833 G_Regularizer: 0.000 validation_error: 21.370 +(epoch: 29, iters: 162944, time: 0.541, data: 0.000) G_L1: 12.440 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 10.182 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 29, iters: 164944, time: 0.547, data: 0.000) G_L1: 16.039 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 13.356 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 29, iters: 166944, time: 0.553, data: 0.000) G_L1: 14.918 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 12.286 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 29, iters: 168944, time: 0.546, data: 0.000) G_L1: 14.001 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 11.612 G_Regularizer: 0.000 validation_error: 21.360 +(epoch: 29, iters: 170944, time: 0.546, data: 0.000) G_L1: 12.809 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 10.202 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 29, iters: 172944, time: 0.547, data: 0.000) G_L1: 14.471 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 12.136 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 29, iters: 174944, time: 0.549, data: 0.000) G_L1: 15.480 G_L1_ABSOLUTE: 3.167 G_L1_RELATIVE: 12.313 G_Regularizer: 0.000 validation_error: 20.181 +(epoch: 29, iters: 176944, time: 0.552, data: 0.000) G_L1: 13.173 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 10.832 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 29, iters: 178944, time: 0.540, data: 0.000) G_L1: 14.595 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 11.902 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 29, iters: 180944, time: 0.555, data: 0.000) G_L1: 13.217 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 10.757 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 29, iters: 182944, time: 0.546, data: 0.000) G_L1: 13.267 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 10.932 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 29, iters: 184944, time: 0.550, data: 0.000) G_L1: 13.577 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 11.006 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 29, iters: 186944, time: 0.541, data: 0.000) G_L1: 13.685 G_L1_ABSOLUTE: 2.218 G_L1_RELATIVE: 11.467 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 29, iters: 188944, time: 0.540, data: 0.000) G_L1: 16.126 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 13.396 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 29, iters: 190944, time: 0.543, data: 0.000) G_L1: 15.471 G_L1_ABSOLUTE: 2.942 G_L1_RELATIVE: 12.530 G_Regularizer: 0.000 validation_error: 20.708 +(epoch: 29, iters: 192944, time: 0.538, data: 0.000) G_L1: 14.545 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 12.091 G_Regularizer: 0.000 validation_error: 20.161 +(epoch: 29, iters: 194944, time: 0.555, data: 0.000) G_L1: 12.990 G_L1_ABSOLUTE: 2.089 G_L1_RELATIVE: 10.901 G_Regularizer: 0.000 validation_error: 20.503 +(epoch: 29, iters: 196944, time: 0.545, data: 0.000) G_L1: 13.842 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 11.054 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 29, iters: 198944, time: 0.539, data: 0.000) G_L1: 16.051 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 13.544 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 29, iters: 200944, time: 0.545, data: 0.000) G_L1: 14.118 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 11.408 G_Regularizer: 0.000 validation_error: 21.144 +(epoch: 29, iters: 202944, time: 0.546, data: 0.000) G_L1: 13.683 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 10.937 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 29, iters: 204944, time: 0.538, data: 0.001) G_L1: 14.252 G_L1_ABSOLUTE: 2.943 G_L1_RELATIVE: 11.310 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 29, iters: 206944, time: 0.542, data: 0.000) G_L1: 14.931 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 12.657 G_Regularizer: 0.000 validation_error: 20.502 +(epoch: 29, iters: 208944, time: 0.547, data: 0.000) G_L1: 13.522 G_L1_ABSOLUTE: 2.211 G_L1_RELATIVE: 11.310 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 29, iters: 210944, time: 0.535, data: 0.001) G_L1: 27.837 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 25.083 G_Regularizer: 0.000 validation_error: 20.395 +(epoch: 29, iters: 212944, time: 0.554, data: 0.000) G_L1: 13.080 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 10.645 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 29, iters: 214944, time: 0.545, data: 0.000) G_L1: 13.408 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 10.754 G_Regularizer: 0.000 validation_error: 19.890 +(epoch: 29, iters: 216944, time: 0.538, data: 0.001) G_L1: 13.051 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 10.400 G_Regularizer: 0.000 validation_error: 20.712 +(epoch: 29, iters: 218944, time: 0.538, data: 0.000) G_L1: 14.443 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 11.826 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 29, iters: 220944, time: 0.555, data: 0.000) G_L1: 12.196 G_L1_ABSOLUTE: 2.671 G_L1_RELATIVE: 9.524 G_Regularizer: 0.000 validation_error: 21.364 +(epoch: 29, iters: 222944, time: 0.542, data: 0.000) G_L1: 17.012 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 14.460 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 29, iters: 224944, time: 0.547, data: 0.000) G_L1: 13.753 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 11.470 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 29, iters: 226944, time: 0.549, data: 0.000) G_L1: 14.797 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 11.804 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 29, iters: 228944, time: 0.547, data: 0.000) G_L1: 14.709 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 12.090 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 29, iters: 230944, time: 0.543, data: 0.000) G_L1: 15.298 G_L1_ABSOLUTE: 2.323 G_L1_RELATIVE: 12.975 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 29, iters: 232944, time: 0.542, data: 0.000) G_L1: 10.488 G_L1_ABSOLUTE: 2.380 G_L1_RELATIVE: 8.108 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 29, iters: 234944, time: 0.554, data: 0.000) G_L1: 15.791 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 13.203 G_Regularizer: 0.000 validation_error: 20.675 +(epoch: 29, iters: 236944, time: 0.540, data: 0.000) G_L1: 15.349 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 12.709 G_Regularizer: 0.000 validation_error: 20.478 +(epoch: 29, iters: 238944, time: 0.544, data: 0.000) G_L1: 13.044 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 10.778 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 29, iters: 240944, time: 0.539, data: 0.000) G_L1: 15.243 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 12.840 G_Regularizer: 0.000 validation_error: 21.164 +(epoch: 29, iters: 242944, time: 0.553, data: 0.000) G_L1: 14.975 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 12.439 G_Regularizer: 0.000 validation_error: 20.564 +(epoch: 29, iters: 244944, time: 0.544, data: 0.000) G_L1: 15.308 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 12.937 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 29, iters: 246944, time: 0.557, data: 0.000) G_L1: 15.104 G_L1_ABSOLUTE: 2.848 G_L1_RELATIVE: 12.256 G_Regularizer: 0.000 validation_error: 20.611 +(epoch: 29, iters: 248944, time: 0.546, data: 0.000) G_L1: 14.972 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 12.634 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 29, iters: 250944, time: 0.546, data: 0.000) G_L1: 11.544 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 8.800 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 29, iters: 252944, time: 0.542, data: 0.000) G_L1: 15.329 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 12.951 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 29, iters: 254944, time: 0.551, data: 0.000) G_L1: 13.078 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 10.671 G_Regularizer: 0.000 validation_error: 21.055 +(epoch: 29, iters: 256944, time: 0.539, data: 0.000) G_L1: 13.231 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 10.577 G_Regularizer: 0.000 validation_error: 20.305 +(epoch: 29, iters: 258944, time: 0.543, data: 0.000) G_L1: 13.720 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 10.972 G_Regularizer: 0.000 validation_error: 21.093 +(epoch: 29, iters: 260944, time: 0.553, data: 0.000) G_L1: 15.331 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 12.692 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 29, iters: 262944, time: 0.541, data: 0.000) G_L1: 14.216 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 11.833 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 29, iters: 264944, time: 0.545, data: 0.000) G_L1: 13.856 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 11.345 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 29, iters: 266944, time: 0.542, data: 0.000) G_L1: 15.001 G_L1_ABSOLUTE: 2.962 G_L1_RELATIVE: 12.039 G_Regularizer: 0.000 validation_error: 20.971 +(epoch: 29, iters: 268944, time: 0.548, data: 0.000) G_L1: 14.325 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 11.625 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 29, iters: 270944, time: 0.534, data: 0.000) G_L1: 18.325 G_L1_ABSOLUTE: 3.427 G_L1_RELATIVE: 14.899 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 29, iters: 272944, time: 0.547, data: 0.000) G_L1: 12.054 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 9.541 G_Regularizer: 0.000 validation_error: 20.529 +(epoch: 29, iters: 274944, time: 0.547, data: 0.000) G_L1: 13.416 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 11.046 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 29, iters: 276944, time: 0.545, data: 0.000) G_L1: 15.214 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 12.594 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 29, iters: 278944, time: 0.549, data: 0.000) G_L1: 14.631 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 12.107 G_Regularizer: 0.000 validation_error: 21.150 +(epoch: 29, iters: 280944, time: 0.548, data: 0.000) G_L1: 14.953 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 12.169 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 29, iters: 282944, time: 0.547, data: 0.000) G_L1: 11.592 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 9.172 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 29, iters: 284944, time: 0.546, data: 0.000) G_L1: 15.213 G_L1_ABSOLUTE: 2.173 G_L1_RELATIVE: 13.039 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 29, iters: 286944, time: 0.550, data: 0.000) G_L1: 13.939 G_L1_ABSOLUTE: 2.279 G_L1_RELATIVE: 11.660 G_Regularizer: 0.000 validation_error: 20.259 +(epoch: 29, iters: 288944, time: 0.547, data: 0.000) G_L1: 15.641 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 12.837 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 29, iters: 290944, time: 0.548, data: 0.000) G_L1: 13.848 G_L1_ABSOLUTE: 3.074 G_L1_RELATIVE: 10.774 G_Regularizer: 0.000 validation_error: 20.316 +(epoch: 29, iters: 292944, time: 0.548, data: 0.000) G_L1: 14.348 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 11.698 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 29, iters: 294944, time: 0.544, data: 0.000) G_L1: 13.913 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 11.502 G_Regularizer: 0.000 validation_error: 20.531 +(epoch: 29, iters: 296944, time: 0.545, data: 0.000) G_L1: 12.746 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 10.491 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 29, iters: 298944, time: 0.547, data: 0.000) G_L1: 17.247 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 14.583 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 29, iters: 300944, time: 0.545, data: 0.000) G_L1: 15.133 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 12.647 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 30, iters: 192, time: 0.544, data: 0.000) G_L1: 15.310 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 12.828 G_Regularizer: 0.000 validation_error: 20.912 +(epoch: 30, iters: 2192, time: 0.544, data: 0.000) G_L1: 14.362 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 11.867 G_Regularizer: 0.000 validation_error: 20.445 +(epoch: 30, iters: 4192, time: 0.550, data: 0.000) G_L1: 11.628 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 9.276 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 30, iters: 6192, time: 0.541, data: 0.000) G_L1: 23.910 G_L1_ABSOLUTE: 2.935 G_L1_RELATIVE: 20.975 G_Regularizer: 0.000 validation_error: 20.758 +(epoch: 30, iters: 8192, time: 0.542, data: 0.000) G_L1: 16.729 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 13.983 G_Regularizer: 0.000 validation_error: 21.262 +(epoch: 30, iters: 10192, time: 0.543, data: 0.000) G_L1: 15.147 G_L1_ABSOLUTE: 3.024 G_L1_RELATIVE: 12.124 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 30, iters: 12192, time: 0.558, data: 0.000) G_L1: 12.274 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 9.853 G_Regularizer: 0.000 validation_error: 20.861 +(epoch: 30, iters: 14192, time: 0.549, data: 0.000) G_L1: 14.458 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 11.975 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 30, iters: 16192, time: 0.548, data: 0.000) G_L1: 14.216 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 11.874 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 30, iters: 18192, time: 0.547, data: 0.000) G_L1: 14.198 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 11.728 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 30, iters: 20192, time: 0.548, data: 0.000) G_L1: 13.245 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 10.549 G_Regularizer: 0.000 validation_error: 21.215 +(epoch: 30, iters: 22192, time: 0.544, data: 0.000) G_L1: 13.640 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 10.963 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 30, iters: 24192, time: 0.542, data: 0.000) G_L1: 15.972 G_L1_ABSOLUTE: 3.341 G_L1_RELATIVE: 12.630 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 30, iters: 26192, time: 0.545, data: 0.000) G_L1: 11.734 G_L1_ABSOLUTE: 1.850 G_L1_RELATIVE: 9.884 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 30, iters: 28192, time: 0.545, data: 0.000) G_L1: 17.166 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 14.743 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 30, iters: 30192, time: 0.546, data: 0.000) G_L1: 12.942 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 10.345 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 30, iters: 32192, time: 0.549, data: 0.000) G_L1: 15.928 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 13.049 G_Regularizer: 0.000 validation_error: 20.463 +(epoch: 30, iters: 34192, time: 0.544, data: 0.000) G_L1: 14.299 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 11.604 G_Regularizer: 0.000 validation_error: 20.027 +(epoch: 30, iters: 36192, time: 0.541, data: 0.000) G_L1: 13.821 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 11.566 G_Regularizer: 0.000 validation_error: 20.491 +(epoch: 30, iters: 38192, time: 0.547, data: 0.000) G_L1: 14.256 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.715 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 30, iters: 40192, time: 0.550, data: 0.000) G_L1: 12.104 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 9.779 G_Regularizer: 0.000 validation_error: 21.229 +(epoch: 30, iters: 42192, time: 0.547, data: 0.000) G_L1: 12.979 G_L1_ABSOLUTE: 2.156 G_L1_RELATIVE: 10.822 G_Regularizer: 0.000 validation_error: 21.224 +(epoch: 30, iters: 44192, time: 0.547, data: 0.000) G_L1: 14.828 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 12.395 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 30, iters: 46192, time: 0.546, data: 0.000) G_L1: 14.724 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 12.147 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 30, iters: 48192, time: 0.539, data: 0.000) G_L1: 15.215 G_L1_ABSOLUTE: 2.243 G_L1_RELATIVE: 12.972 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 30, iters: 50192, time: 0.540, data: 0.001) G_L1: 13.391 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 10.411 G_Regularizer: 0.000 validation_error: 20.684 +(epoch: 30, iters: 52192, time: 0.549, data: 0.000) G_L1: 13.780 G_L1_ABSOLUTE: 2.566 G_L1_RELATIVE: 11.214 G_Regularizer: 0.000 validation_error: 20.659 +(epoch: 30, iters: 54192, time: 0.538, data: 0.000) G_L1: 16.405 G_L1_ABSOLUTE: 3.036 G_L1_RELATIVE: 13.369 G_Regularizer: 0.000 validation_error: 21.271 +(epoch: 30, iters: 56192, time: 0.548, data: 0.000) G_L1: 14.091 G_L1_ABSOLUTE: 3.002 G_L1_RELATIVE: 11.090 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 30, iters: 58192, time: 0.547, data: 0.000) G_L1: 14.600 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 12.152 G_Regularizer: 0.000 validation_error: 20.169 +(epoch: 30, iters: 60192, time: 0.542, data: 0.000) G_L1: 14.532 G_L1_ABSOLUTE: 2.130 G_L1_RELATIVE: 12.402 G_Regularizer: 0.000 validation_error: 21.194 +(epoch: 30, iters: 62192, time: 0.550, data: 0.001) G_L1: 15.906 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 13.025 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 30, iters: 64192, time: 0.549, data: 0.000) G_L1: 14.577 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 11.841 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 30, iters: 66192, time: 0.540, data: 0.001) G_L1: 13.675 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 10.898 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 30, iters: 68192, time: 0.545, data: 0.001) G_L1: 15.177 G_L1_ABSOLUTE: 2.242 G_L1_RELATIVE: 12.935 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 30, iters: 70192, time: 0.564, data: 0.001) G_L1: 18.073 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 15.323 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 30, iters: 72192, time: 0.546, data: 0.000) G_L1: 14.435 G_L1_ABSOLUTE: 3.039 G_L1_RELATIVE: 11.396 G_Regularizer: 0.000 validation_error: 20.659 +(epoch: 30, iters: 74192, time: 0.544, data: 0.000) G_L1: 15.635 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 13.220 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 30, iters: 76192, time: 0.544, data: 0.000) G_L1: 11.577 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 9.091 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 30, iters: 78192, time: 0.541, data: 0.000) G_L1: 17.850 G_L1_ABSOLUTE: 2.776 G_L1_RELATIVE: 15.074 G_Regularizer: 0.000 validation_error: 21.188 +(epoch: 30, iters: 80192, time: 0.545, data: 0.000) G_L1: 29.229 G_L1_ABSOLUTE: 3.360 G_L1_RELATIVE: 25.869 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 30, iters: 82192, time: 0.546, data: 0.000) G_L1: 17.498 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 14.862 G_Regularizer: 0.000 validation_error: 20.145 +(epoch: 30, iters: 84192, time: 0.540, data: 0.000) G_L1: 14.515 G_L1_ABSOLUTE: 3.195 G_L1_RELATIVE: 11.319 G_Regularizer: 0.000 validation_error: 20.327 +(epoch: 30, iters: 86192, time: 0.551, data: 0.000) G_L1: 13.770 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 11.379 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 30, iters: 88192, time: 0.543, data: 0.000) G_L1: 12.906 G_L1_ABSOLUTE: 2.146 G_L1_RELATIVE: 10.760 G_Regularizer: 0.000 validation_error: 20.541 +(epoch: 30, iters: 90192, time: 0.547, data: 0.000) G_L1: 13.634 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 11.046 G_Regularizer: 0.000 validation_error: 20.699 +(epoch: 30, iters: 92192, time: 0.550, data: 0.000) G_L1: 16.852 G_L1_ABSOLUTE: 2.975 G_L1_RELATIVE: 13.877 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 30, iters: 94192, time: 0.550, data: 0.000) G_L1: 13.199 G_L1_ABSOLUTE: 2.342 G_L1_RELATIVE: 10.857 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 30, iters: 96192, time: 0.542, data: 0.000) G_L1: 13.920 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 11.600 G_Regularizer: 0.000 validation_error: 20.977 +(epoch: 30, iters: 98192, time: 0.553, data: 0.000) G_L1: 14.092 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.671 G_Regularizer: 0.000 validation_error: 20.600 +(epoch: 30, iters: 100192, time: 0.548, data: 0.000) G_L1: 11.938 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 9.298 G_Regularizer: 0.000 validation_error: 20.681 +(epoch: 30, iters: 102192, time: 0.542, data: 0.000) G_L1: 14.652 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 12.131 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 30, iters: 104192, time: 0.550, data: 0.000) G_L1: 14.278 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 12.019 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 30, iters: 106192, time: 0.541, data: 0.000) G_L1: 13.384 G_L1_ABSOLUTE: 2.311 G_L1_RELATIVE: 11.073 G_Regularizer: 0.000 validation_error: 21.059 +(epoch: 30, iters: 108192, time: 0.538, data: 0.001) G_L1: 13.742 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 10.975 G_Regularizer: 0.000 validation_error: 20.550 +(epoch: 30, iters: 110192, time: 0.546, data: 0.000) G_L1: 14.492 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 11.811 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 30, iters: 112192, time: 0.548, data: 0.000) G_L1: 15.726 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 13.139 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 30, iters: 114192, time: 0.542, data: 0.000) G_L1: 13.824 G_L1_ABSOLUTE: 2.682 G_L1_RELATIVE: 11.142 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 30, iters: 116192, time: 0.547, data: 0.000) G_L1: 14.053 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 11.679 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 30, iters: 118192, time: 0.543, data: 0.000) G_L1: 13.528 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 11.064 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 30, iters: 120192, time: 0.546, data: 0.000) G_L1: 12.066 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 9.682 G_Regularizer: 0.000 validation_error: 21.084 +(epoch: 30, iters: 122192, time: 0.550, data: 0.000) G_L1: 14.032 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 11.557 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 30, iters: 124192, time: 0.544, data: 0.000) G_L1: 12.804 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 10.368 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 30, iters: 126192, time: 0.547, data: 0.000) G_L1: 15.183 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 12.732 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 30, iters: 128192, time: 0.536, data: 0.000) G_L1: 16.607 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 13.776 G_Regularizer: 0.000 validation_error: 20.960 +(epoch: 30, iters: 130192, time: 0.548, data: 0.000) G_L1: 13.462 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 10.820 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 30, iters: 132192, time: 0.542, data: 0.000) G_L1: 12.991 G_L1_ABSOLUTE: 2.154 G_L1_RELATIVE: 10.837 G_Regularizer: 0.000 validation_error: 20.496 +(epoch: 30, iters: 134192, time: 0.545, data: 0.000) G_L1: 12.391 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 10.010 G_Regularizer: 0.000 validation_error: 20.431 +(epoch: 30, iters: 136192, time: 0.548, data: 0.000) G_L1: 15.861 G_L1_ABSOLUTE: 2.715 G_L1_RELATIVE: 13.146 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 30, iters: 138192, time: 0.552, data: 0.000) G_L1: 14.146 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 11.683 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 30, iters: 140192, time: 0.542, data: 0.000) G_L1: 13.303 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 10.601 G_Regularizer: 0.000 validation_error: 21.349 +(epoch: 30, iters: 142192, time: 0.549, data: 0.001) G_L1: 14.247 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 21.032 +(epoch: 30, iters: 144192, time: 0.547, data: 0.000) G_L1: 11.657 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 9.345 G_Regularizer: 0.000 validation_error: 21.345 +(epoch: 30, iters: 146192, time: 0.542, data: 0.000) G_L1: 15.758 G_L1_ABSOLUTE: 2.295 G_L1_RELATIVE: 13.463 G_Regularizer: 0.000 validation_error: 20.412 +(epoch: 30, iters: 148192, time: 0.543, data: 0.000) G_L1: 15.643 G_L1_ABSOLUTE: 2.901 G_L1_RELATIVE: 12.742 G_Regularizer: 0.000 validation_error: 21.291 +(epoch: 30, iters: 150192, time: 0.544, data: 0.000) G_L1: 12.995 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 10.398 G_Regularizer: 0.000 validation_error: 21.241 +(epoch: 30, iters: 152192, time: 0.541, data: 0.000) G_L1: 14.608 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 11.960 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 30, iters: 154192, time: 0.554, data: 0.000) G_L1: 24.356 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 21.693 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 30, iters: 156192, time: 0.544, data: 0.000) G_L1: 15.083 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 12.461 G_Regularizer: 0.000 validation_error: 20.724 +(epoch: 30, iters: 158192, time: 0.544, data: 0.001) G_L1: 14.519 G_L1_ABSOLUTE: 2.156 G_L1_RELATIVE: 12.363 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 30, iters: 160192, time: 0.540, data: 0.001) G_L1: 13.068 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 10.243 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 30, iters: 162192, time: 0.542, data: 0.000) G_L1: 15.199 G_L1_ABSOLUTE: 2.965 G_L1_RELATIVE: 12.234 G_Regularizer: 0.000 validation_error: 20.321 +(epoch: 30, iters: 164192, time: 0.541, data: 0.000) G_L1: 17.267 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 14.698 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 30, iters: 166192, time: 0.541, data: 0.000) G_L1: 13.261 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 10.694 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 30, iters: 168192, time: 0.544, data: 0.000) G_L1: 13.988 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 11.579 G_Regularizer: 0.000 validation_error: 20.620 +(epoch: 30, iters: 170192, time: 0.545, data: 0.000) G_L1: 15.456 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 12.631 G_Regularizer: 0.000 validation_error: 21.144 +(epoch: 30, iters: 172192, time: 0.548, data: 0.000) G_L1: 12.206 G_L1_ABSOLUTE: 2.307 G_L1_RELATIVE: 9.899 G_Regularizer: 0.000 validation_error: 20.612 +(epoch: 30, iters: 174192, time: 0.547, data: 0.000) G_L1: 13.960 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 11.544 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 30, iters: 176192, time: 0.543, data: 0.000) G_L1: 16.451 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 14.057 G_Regularizer: 0.000 validation_error: 21.047 +(epoch: 30, iters: 178192, time: 0.545, data: 0.000) G_L1: 14.124 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 11.390 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 30, iters: 180192, time: 0.549, data: 0.000) G_L1: 15.308 G_L1_ABSOLUTE: 2.930 G_L1_RELATIVE: 12.378 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 30, iters: 182192, time: 0.548, data: 0.001) G_L1: 16.418 G_L1_ABSOLUTE: 2.946 G_L1_RELATIVE: 13.472 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 30, iters: 184192, time: 0.550, data: 0.001) G_L1: 14.245 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 11.725 G_Regularizer: 0.000 validation_error: 20.332 +(epoch: 30, iters: 186192, time: 0.546, data: 0.000) G_L1: 14.132 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 11.747 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 30, iters: 188192, time: 0.552, data: 0.000) G_L1: 17.100 G_L1_ABSOLUTE: 3.022 G_L1_RELATIVE: 14.078 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 30, iters: 190192, time: 0.542, data: 0.000) G_L1: 13.068 G_L1_ABSOLUTE: 2.231 G_L1_RELATIVE: 10.837 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 30, iters: 192192, time: 0.550, data: 0.000) G_L1: 12.071 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 9.769 G_Regularizer: 0.000 validation_error: 20.702 +(epoch: 30, iters: 194192, time: 0.549, data: 0.000) G_L1: 15.276 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 12.600 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 30, iters: 196192, time: 0.546, data: 0.000) G_L1: 15.834 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 13.558 G_Regularizer: 0.000 validation_error: 20.344 +(epoch: 30, iters: 198192, time: 0.548, data: 0.000) G_L1: 14.914 G_L1_ABSOLUTE: 2.189 G_L1_RELATIVE: 12.725 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 30, iters: 200192, time: 0.538, data: 0.000) G_L1: 14.316 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 11.531 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 30, iters: 202192, time: 0.548, data: 0.000) G_L1: 14.288 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 11.872 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 30, iters: 204192, time: 0.541, data: 0.001) G_L1: 16.212 G_L1_ABSOLUTE: 2.215 G_L1_RELATIVE: 13.997 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 30, iters: 206192, time: 0.541, data: 0.000) G_L1: 14.991 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 12.355 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 30, iters: 208192, time: 0.544, data: 0.000) G_L1: 15.000 G_L1_ABSOLUTE: 2.177 G_L1_RELATIVE: 12.823 G_Regularizer: 0.000 validation_error: 20.567 +(epoch: 30, iters: 210192, time: 0.546, data: 0.000) G_L1: 13.916 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.374 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 30, iters: 212192, time: 0.543, data: 0.001) G_L1: 13.664 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 10.978 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 30, iters: 214192, time: 0.549, data: 0.000) G_L1: 14.181 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 11.820 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 30, iters: 216192, time: 0.542, data: 0.000) G_L1: 13.448 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 11.119 G_Regularizer: 0.000 validation_error: 20.528 +(epoch: 30, iters: 218192, time: 0.554, data: 0.000) G_L1: 15.203 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 12.421 G_Regularizer: 0.000 validation_error: 21.239 +(epoch: 30, iters: 220192, time: 0.542, data: 0.000) G_L1: 12.132 G_L1_ABSOLUTE: 2.869 G_L1_RELATIVE: 9.264 G_Regularizer: 0.000 validation_error: 21.286 +(epoch: 30, iters: 222192, time: 0.545, data: 0.000) G_L1: 16.070 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 13.498 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 30, iters: 224192, time: 0.555, data: 0.000) G_L1: 16.334 G_L1_ABSOLUTE: 2.970 G_L1_RELATIVE: 13.365 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 30, iters: 226192, time: 0.539, data: 0.000) G_L1: 12.728 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 10.203 G_Regularizer: 0.000 validation_error: 20.991 +(epoch: 30, iters: 228192, time: 0.558, data: 0.000) G_L1: 14.609 G_L1_ABSOLUTE: 2.271 G_L1_RELATIVE: 12.338 G_Regularizer: 0.000 validation_error: 21.066 +(epoch: 30, iters: 230192, time: 0.551, data: 0.000) G_L1: 13.980 G_L1_ABSOLUTE: 3.189 G_L1_RELATIVE: 10.791 G_Regularizer: 0.000 validation_error: 20.977 +(epoch: 30, iters: 232192, time: 0.553, data: 0.000) G_L1: 13.544 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 11.297 G_Regularizer: 0.000 validation_error: 20.285 +(epoch: 30, iters: 234192, time: 0.539, data: 0.001) G_L1: 13.285 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 11.057 G_Regularizer: 0.000 validation_error: 20.276 +(epoch: 30, iters: 236192, time: 0.548, data: 0.000) G_L1: 14.005 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 11.375 G_Regularizer: 0.000 validation_error: 20.623 +(epoch: 30, iters: 238192, time: 0.544, data: 0.000) G_L1: 13.833 G_L1_ABSOLUTE: 2.208 G_L1_RELATIVE: 11.625 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 30, iters: 240192, time: 0.540, data: 0.000) G_L1: 14.473 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 11.837 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 30, iters: 242192, time: 0.545, data: 0.000) G_L1: 13.537 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 10.965 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 30, iters: 244192, time: 0.549, data: 0.000) G_L1: 15.545 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 13.128 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 30, iters: 246192, time: 0.539, data: 0.000) G_L1: 14.572 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 11.960 G_Regularizer: 0.000 validation_error: 20.585 +(epoch: 30, iters: 248192, time: 0.546, data: 0.000) G_L1: 15.845 G_L1_ABSOLUTE: 2.579 G_L1_RELATIVE: 13.267 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 30, iters: 250192, time: 0.544, data: 0.000) G_L1: 15.287 G_L1_ABSOLUTE: 2.680 G_L1_RELATIVE: 12.607 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 30, iters: 252192, time: 0.545, data: 0.000) G_L1: 13.651 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 11.158 G_Regularizer: 0.000 validation_error: 20.363 +(epoch: 30, iters: 254192, time: 0.540, data: 0.000) G_L1: 12.626 G_L1_ABSOLUTE: 2.204 G_L1_RELATIVE: 10.422 G_Regularizer: 0.000 validation_error: 20.203 +(epoch: 30, iters: 256192, time: 0.550, data: 0.000) G_L1: 16.060 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 13.743 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 30, iters: 258192, time: 0.540, data: 0.000) G_L1: 13.435 G_L1_ABSOLUTE: 2.157 G_L1_RELATIVE: 11.278 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 30, iters: 260192, time: 0.539, data: 0.000) G_L1: 14.946 G_L1_ABSOLUTE: 2.712 G_L1_RELATIVE: 12.234 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 30, iters: 262192, time: 0.547, data: 0.001) G_L1: 14.251 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 11.577 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 30, iters: 264192, time: 0.542, data: 0.000) G_L1: 13.953 G_L1_ABSOLUTE: 2.346 G_L1_RELATIVE: 11.607 G_Regularizer: 0.000 validation_error: 20.214 +(epoch: 30, iters: 266192, time: 0.544, data: 0.000) G_L1: 15.665 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 13.172 G_Regularizer: 0.000 validation_error: 20.245 +(epoch: 30, iters: 268192, time: 0.551, data: 0.000) G_L1: 14.015 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.580 G_Regularizer: 0.000 validation_error: 20.156 +(epoch: 30, iters: 270192, time: 0.560, data: 0.000) G_L1: 13.780 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 11.097 G_Regularizer: 0.000 validation_error: 19.974 +(epoch: 30, iters: 272192, time: 0.540, data: 0.000) G_L1: 13.332 G_L1_ABSOLUTE: 2.342 G_L1_RELATIVE: 10.990 G_Regularizer: 0.000 validation_error: 20.414 +(epoch: 30, iters: 274192, time: 0.546, data: 0.000) G_L1: 12.581 G_L1_ABSOLUTE: 2.061 G_L1_RELATIVE: 10.520 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 30, iters: 276192, time: 0.552, data: 0.000) G_L1: 13.195 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 10.561 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 30, iters: 278192, time: 0.540, data: 0.000) G_L1: 13.686 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 11.354 G_Regularizer: 0.000 validation_error: 20.805 +(epoch: 30, iters: 280192, time: 0.540, data: 0.000) G_L1: 12.855 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 10.157 G_Regularizer: 0.000 validation_error: 21.163 +(epoch: 30, iters: 282192, time: 0.552, data: 0.001) G_L1: 14.369 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 11.873 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 30, iters: 284192, time: 0.556, data: 0.001) G_L1: 13.699 G_L1_ABSOLUTE: 2.264 G_L1_RELATIVE: 11.435 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 30, iters: 286192, time: 0.540, data: 0.000) G_L1: 14.483 G_L1_ABSOLUTE: 3.027 G_L1_RELATIVE: 11.456 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 30, iters: 288192, time: 0.546, data: 0.000) G_L1: 16.015 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 13.260 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 30, iters: 290192, time: 0.546, data: 0.000) G_L1: 14.025 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 11.477 G_Regularizer: 0.000 validation_error: 21.372 +(epoch: 30, iters: 292192, time: 0.546, data: 0.000) G_L1: 17.001 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 14.375 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 30, iters: 294192, time: 0.545, data: 0.000) G_L1: 15.014 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 12.258 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 30, iters: 296192, time: 0.553, data: 0.000) G_L1: 13.765 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 11.433 G_Regularizer: 0.000 validation_error: 20.697 +(epoch: 30, iters: 298192, time: 0.543, data: 0.000) G_L1: 13.902 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 11.581 G_Regularizer: 0.000 validation_error: 20.219 +(epoch: 30, iters: 300192, time: 0.548, data: 0.000) G_L1: 13.175 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 10.893 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 30, iters: 302192, time: 0.542, data: 0.000) G_L1: 12.268 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 9.890 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 31, iters: 1440, time: 0.553, data: 0.000) G_L1: 15.434 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 12.865 G_Regularizer: 0.000 validation_error: 21.134 +(epoch: 31, iters: 3440, time: 0.546, data: 0.000) G_L1: 16.577 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 14.065 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 31, iters: 5440, time: 0.551, data: 0.001) G_L1: 19.288 G_L1_ABSOLUTE: 3.330 G_L1_RELATIVE: 15.958 G_Regularizer: 0.000 validation_error: 20.264 +(epoch: 31, iters: 7440, time: 0.545, data: 0.000) G_L1: 13.993 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 11.653 G_Regularizer: 0.000 validation_error: 20.330 +(epoch: 31, iters: 9440, time: 0.542, data: 0.000) G_L1: 12.465 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 10.130 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 31, iters: 11440, time: 0.547, data: 0.000) G_L1: 13.523 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 10.684 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 31, iters: 13440, time: 0.541, data: 0.000) G_L1: 17.036 G_L1_ABSOLUTE: 2.886 G_L1_RELATIVE: 14.150 G_Regularizer: 0.000 validation_error: 20.242 +(epoch: 31, iters: 15440, time: 0.546, data: 0.000) G_L1: 14.027 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 11.768 G_Regularizer: 0.000 validation_error: 20.603 +(epoch: 31, iters: 17440, time: 0.540, data: 0.000) G_L1: 18.270 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 15.633 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 31, iters: 19440, time: 0.548, data: 0.000) G_L1: 22.899 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 20.640 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 31, iters: 21440, time: 0.548, data: 0.000) G_L1: 12.428 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 10.092 G_Regularizer: 0.000 validation_error: 20.429 +(epoch: 31, iters: 23440, time: 0.555, data: 0.000) G_L1: 14.527 G_L1_ABSOLUTE: 2.924 G_L1_RELATIVE: 11.603 G_Regularizer: 0.000 validation_error: 20.473 +(epoch: 31, iters: 25440, time: 0.546, data: 0.000) G_L1: 15.899 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 13.022 G_Regularizer: 0.000 validation_error: 20.548 +(epoch: 31, iters: 27440, time: 0.543, data: 0.000) G_L1: 15.554 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 12.865 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 31, iters: 29440, time: 0.537, data: 0.000) G_L1: 18.762 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 16.025 G_Regularizer: 0.000 validation_error: 20.354 +(epoch: 31, iters: 31440, time: 0.542, data: 0.000) G_L1: 21.914 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 18.978 G_Regularizer: 0.000 validation_error: 20.271 +(epoch: 31, iters: 33440, time: 0.541, data: 0.001) G_L1: 14.733 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.320 G_Regularizer: 0.000 validation_error: 20.574 +(epoch: 31, iters: 35440, time: 0.545, data: 0.000) G_L1: 15.080 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 12.616 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 31, iters: 37440, time: 0.556, data: 0.000) G_L1: 14.983 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 12.424 G_Regularizer: 0.000 validation_error: 20.948 +(epoch: 31, iters: 39440, time: 0.544, data: 0.000) G_L1: 16.416 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 13.838 G_Regularizer: 0.000 validation_error: 20.311 +(epoch: 31, iters: 41440, time: 0.545, data: 0.000) G_L1: 16.003 G_L1_ABSOLUTE: 3.008 G_L1_RELATIVE: 12.995 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 31, iters: 43440, time: 0.535, data: 0.000) G_L1: 14.084 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 11.310 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 31, iters: 45440, time: 0.543, data: 0.000) G_L1: 14.728 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 12.258 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 31, iters: 47440, time: 0.549, data: 0.000) G_L1: 15.850 G_L1_ABSOLUTE: 2.645 G_L1_RELATIVE: 13.205 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 31, iters: 49440, time: 0.542, data: 0.001) G_L1: 13.453 G_L1_ABSOLUTE: 2.059 G_L1_RELATIVE: 11.393 G_Regularizer: 0.000 validation_error: 20.757 +(epoch: 31, iters: 51440, time: 0.545, data: 0.000) G_L1: 14.042 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 11.296 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 31, iters: 53440, time: 0.547, data: 0.000) G_L1: 14.859 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 12.072 G_Regularizer: 0.000 validation_error: 21.458 +(epoch: 31, iters: 55440, time: 0.545, data: 0.000) G_L1: 13.159 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 10.680 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 31, iters: 57440, time: 0.540, data: 0.000) G_L1: 14.141 G_L1_ABSOLUTE: 2.037 G_L1_RELATIVE: 12.104 G_Regularizer: 0.000 validation_error: 20.708 +(epoch: 31, iters: 59440, time: 0.551, data: 0.000) G_L1: 15.777 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 12.921 G_Regularizer: 0.000 validation_error: 21.281 +(epoch: 31, iters: 61440, time: 0.540, data: 0.000) G_L1: 13.814 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 11.528 G_Regularizer: 0.000 validation_error: 20.463 +(epoch: 31, iters: 63440, time: 0.543, data: 0.000) G_L1: 15.590 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 12.891 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 31, iters: 65440, time: 0.546, data: 0.000) G_L1: 14.573 G_L1_ABSOLUTE: 2.827 G_L1_RELATIVE: 11.746 G_Regularizer: 0.000 validation_error: 21.196 +(epoch: 31, iters: 67440, time: 0.548, data: 0.000) G_L1: 15.223 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 12.585 G_Regularizer: 0.000 validation_error: 20.310 +(epoch: 31, iters: 69440, time: 0.540, data: 0.000) G_L1: 24.533 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 21.991 G_Regularizer: 0.000 validation_error: 20.324 +(epoch: 31, iters: 71440, time: 0.549, data: 0.000) G_L1: 13.004 G_L1_ABSOLUTE: 2.143 G_L1_RELATIVE: 10.861 G_Regularizer: 0.000 validation_error: 20.616 +(epoch: 31, iters: 73440, time: 0.541, data: 0.000) G_L1: 14.813 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 12.386 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 31, iters: 75440, time: 0.543, data: 0.001) G_L1: 11.469 G_L1_ABSOLUTE: 2.281 G_L1_RELATIVE: 9.188 G_Regularizer: 0.000 validation_error: 20.450 +(epoch: 31, iters: 77440, time: 0.537, data: 0.000) G_L1: 15.981 G_L1_ABSOLUTE: 3.171 G_L1_RELATIVE: 12.810 G_Regularizer: 0.000 validation_error: 21.421 +(epoch: 31, iters: 79440, time: 0.549, data: 0.000) G_L1: 15.608 G_L1_ABSOLUTE: 2.995 G_L1_RELATIVE: 12.613 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 31, iters: 81440, time: 0.540, data: 0.000) G_L1: 15.966 G_L1_ABSOLUTE: 3.113 G_L1_RELATIVE: 12.853 G_Regularizer: 0.000 validation_error: 20.746 +(epoch: 31, iters: 83440, time: 0.542, data: 0.000) G_L1: 14.199 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 11.840 G_Regularizer: 0.000 validation_error: 20.980 +(epoch: 31, iters: 85440, time: 0.545, data: 0.000) G_L1: 11.848 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 9.371 G_Regularizer: 0.000 validation_error: 20.646 +(epoch: 31, iters: 87440, time: 0.546, data: 0.001) G_L1: 15.115 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 12.540 G_Regularizer: 0.000 validation_error: 20.885 +(epoch: 31, iters: 89440, time: 0.549, data: 0.000) G_L1: 14.289 G_L1_ABSOLUTE: 2.438 G_L1_RELATIVE: 11.851 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 31, iters: 91440, time: 0.553, data: 0.000) G_L1: 13.378 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 10.768 G_Regularizer: 0.000 validation_error: 20.640 +(epoch: 31, iters: 93440, time: 0.546, data: 0.000) G_L1: 13.935 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 11.594 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 31, iters: 95440, time: 0.538, data: 0.001) G_L1: 13.643 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 11.445 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 31, iters: 97440, time: 0.546, data: 0.000) G_L1: 14.737 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 12.296 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 31, iters: 99440, time: 0.558, data: 0.000) G_L1: 13.818 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 11.402 G_Regularizer: 0.000 validation_error: 21.150 +(epoch: 31, iters: 101440, time: 0.543, data: 0.000) G_L1: 14.930 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 12.273 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 31, iters: 103440, time: 0.544, data: 0.000) G_L1: 16.166 G_L1_ABSOLUTE: 2.889 G_L1_RELATIVE: 13.278 G_Regularizer: 0.000 validation_error: 20.248 +(epoch: 31, iters: 105440, time: 0.555, data: 0.000) G_L1: 13.641 G_L1_ABSOLUTE: 2.683 G_L1_RELATIVE: 10.959 G_Regularizer: 0.000 validation_error: 20.787 +(epoch: 31, iters: 107440, time: 0.541, data: 0.000) G_L1: 16.349 G_L1_ABSOLUTE: 2.113 G_L1_RELATIVE: 14.236 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 31, iters: 109440, time: 0.543, data: 0.000) G_L1: 14.956 G_L1_ABSOLUTE: 2.443 G_L1_RELATIVE: 12.513 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 31, iters: 111440, time: 0.547, data: 0.000) G_L1: 14.041 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 11.508 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 31, iters: 113440, time: 0.547, data: 0.000) G_L1: 14.198 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 11.822 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 31, iters: 115440, time: 0.543, data: 0.000) G_L1: 12.266 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 9.900 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 31, iters: 117440, time: 0.546, data: 0.001) G_L1: 14.075 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 11.453 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 31, iters: 119440, time: 0.537, data: 0.000) G_L1: 13.390 G_L1_ABSOLUTE: 2.410 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 21.235 +(epoch: 31, iters: 121440, time: 0.541, data: 0.000) G_L1: 13.559 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 10.944 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 31, iters: 123440, time: 0.556, data: 0.000) G_L1: 16.467 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 13.839 G_Regularizer: 0.000 validation_error: 21.761 +(epoch: 31, iters: 125440, time: 0.542, data: 0.000) G_L1: 16.186 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 13.416 G_Regularizer: 0.000 validation_error: 20.315 +(epoch: 31, iters: 127440, time: 0.542, data: 0.001) G_L1: 15.462 G_L1_ABSOLUTE: 2.292 G_L1_RELATIVE: 13.169 G_Regularizer: 0.000 validation_error: 20.475 +(epoch: 31, iters: 129440, time: 0.548, data: 0.000) G_L1: 15.405 G_L1_ABSOLUTE: 2.987 G_L1_RELATIVE: 12.418 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 31, iters: 131440, time: 0.554, data: 0.000) G_L1: 14.183 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 11.151 G_Regularizer: 0.000 validation_error: 20.661 +(epoch: 31, iters: 133440, time: 0.548, data: 0.000) G_L1: 13.904 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 11.236 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 31, iters: 135440, time: 0.542, data: 0.001) G_L1: 13.359 G_L1_ABSOLUTE: 2.166 G_L1_RELATIVE: 11.193 G_Regularizer: 0.000 validation_error: 21.101 +(epoch: 31, iters: 137440, time: 0.556, data: 0.000) G_L1: 17.182 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 14.427 G_Regularizer: 0.000 validation_error: 20.327 +(epoch: 31, iters: 139440, time: 0.551, data: 0.001) G_L1: 13.251 G_L1_ABSOLUTE: 2.982 G_L1_RELATIVE: 10.268 G_Regularizer: 0.000 validation_error: 21.214 +(epoch: 31, iters: 141440, time: 0.539, data: 0.000) G_L1: 14.122 G_L1_ABSOLUTE: 3.042 G_L1_RELATIVE: 11.080 G_Regularizer: 0.000 validation_error: 20.270 +(epoch: 31, iters: 143440, time: 0.547, data: 0.000) G_L1: 15.564 G_L1_ABSOLUTE: 2.857 G_L1_RELATIVE: 12.707 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 31, iters: 145440, time: 0.541, data: 0.000) G_L1: 13.296 G_L1_ABSOLUTE: 2.644 G_L1_RELATIVE: 10.652 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 31, iters: 147440, time: 0.547, data: 0.001) G_L1: 16.899 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 14.120 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 31, iters: 149440, time: 0.541, data: 0.000) G_L1: 12.817 G_L1_ABSOLUTE: 2.260 G_L1_RELATIVE: 10.557 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 31, iters: 151440, time: 0.550, data: 0.000) G_L1: 13.392 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 10.833 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 31, iters: 153440, time: 0.550, data: 0.000) G_L1: 15.198 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 12.633 G_Regularizer: 0.000 validation_error: 20.963 +(epoch: 31, iters: 155440, time: 0.544, data: 0.000) G_L1: 13.105 G_L1_ABSOLUTE: 2.252 G_L1_RELATIVE: 10.853 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 31, iters: 157440, time: 0.545, data: 0.000) G_L1: 15.714 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 13.244 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 31, iters: 159440, time: 0.549, data: 0.000) G_L1: 13.129 G_L1_ABSOLUTE: 2.201 G_L1_RELATIVE: 10.928 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 31, iters: 161440, time: 0.540, data: 0.000) G_L1: 14.800 G_L1_ABSOLUTE: 2.977 G_L1_RELATIVE: 11.823 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 31, iters: 163440, time: 0.544, data: 0.000) G_L1: 12.636 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 10.010 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 31, iters: 165440, time: 0.545, data: 0.000) G_L1: 13.435 G_L1_ABSOLUTE: 2.490 G_L1_RELATIVE: 10.945 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 31, iters: 167440, time: 0.548, data: 0.000) G_L1: 17.262 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 14.734 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 31, iters: 169440, time: 0.546, data: 0.001) G_L1: 13.210 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 10.827 G_Regularizer: 0.000 validation_error: 20.729 +(epoch: 31, iters: 171440, time: 0.548, data: 0.000) G_L1: 14.519 G_L1_ABSOLUTE: 2.188 G_L1_RELATIVE: 12.331 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 31, iters: 173440, time: 0.548, data: 0.000) G_L1: 15.137 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 12.660 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 31, iters: 175440, time: 0.538, data: 0.000) G_L1: 14.146 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 11.436 G_Regularizer: 0.000 validation_error: 21.497 +(epoch: 31, iters: 177440, time: 0.554, data: 0.000) G_L1: 12.884 G_L1_ABSOLUTE: 2.176 G_L1_RELATIVE: 10.708 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 31, iters: 179440, time: 0.568, data: 0.000) G_L1: 12.651 G_L1_ABSOLUTE: 2.380 G_L1_RELATIVE: 10.271 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 31, iters: 181440, time: 0.542, data: 0.000) G_L1: 18.021 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 15.177 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 31, iters: 183440, time: 0.550, data: 0.000) G_L1: 14.459 G_L1_ABSOLUTE: 3.212 G_L1_RELATIVE: 11.246 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 31, iters: 185440, time: 0.548, data: 0.000) G_L1: 15.211 G_L1_ABSOLUTE: 2.266 G_L1_RELATIVE: 12.945 G_Regularizer: 0.000 validation_error: 21.377 +(epoch: 31, iters: 187440, time: 0.544, data: 0.000) G_L1: 12.814 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 10.338 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 31, iters: 189440, time: 0.540, data: 0.000) G_L1: 13.989 G_L1_ABSOLUTE: 2.297 G_L1_RELATIVE: 11.692 G_Regularizer: 0.000 validation_error: 20.994 +(epoch: 31, iters: 191440, time: 0.547, data: 0.000) G_L1: 15.546 G_L1_ABSOLUTE: 3.495 G_L1_RELATIVE: 12.051 G_Regularizer: 0.000 validation_error: 21.090 +(epoch: 31, iters: 193440, time: 0.539, data: 0.000) G_L1: 14.442 G_L1_ABSOLUTE: 2.172 G_L1_RELATIVE: 12.270 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 31, iters: 195440, time: 0.544, data: 0.000) G_L1: 14.517 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 11.956 G_Regularizer: 0.000 validation_error: 20.997 +(epoch: 31, iters: 197440, time: 0.553, data: 0.000) G_L1: 10.857 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 8.565 G_Regularizer: 0.000 validation_error: 20.224 +(epoch: 31, iters: 199440, time: 0.546, data: 0.000) G_L1: 14.373 G_L1_ABSOLUTE: 2.287 G_L1_RELATIVE: 12.086 G_Regularizer: 0.000 validation_error: 20.794 +(epoch: 31, iters: 201440, time: 0.551, data: 0.000) G_L1: 14.260 G_L1_ABSOLUTE: 2.904 G_L1_RELATIVE: 11.356 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 31, iters: 203440, time: 0.537, data: 0.001) G_L1: 14.945 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 12.529 G_Regularizer: 0.000 validation_error: 21.655 +(epoch: 31, iters: 205440, time: 0.546, data: 0.001) G_L1: 15.469 G_L1_ABSOLUTE: 2.319 G_L1_RELATIVE: 13.150 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 31, iters: 207440, time: 0.535, data: 0.000) G_L1: 15.561 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 13.326 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 31, iters: 209440, time: 0.550, data: 0.001) G_L1: 15.538 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 12.906 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 31, iters: 211440, time: 0.547, data: 0.000) G_L1: 13.534 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 10.966 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 31, iters: 213440, time: 0.547, data: 0.000) G_L1: 15.084 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 12.920 G_Regularizer: 0.000 validation_error: 20.763 +(epoch: 31, iters: 215440, time: 0.542, data: 0.000) G_L1: 14.579 G_L1_ABSOLUTE: 2.845 G_L1_RELATIVE: 11.734 G_Regularizer: 0.000 validation_error: 20.028 +(epoch: 31, iters: 217440, time: 0.547, data: 0.000) G_L1: 14.830 G_L1_ABSOLUTE: 2.272 G_L1_RELATIVE: 12.558 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 31, iters: 219440, time: 0.539, data: 0.000) G_L1: 13.070 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 10.542 G_Regularizer: 0.000 validation_error: 20.143 +(epoch: 31, iters: 221440, time: 0.541, data: 0.000) G_L1: 15.771 G_L1_ABSOLUTE: 3.115 G_L1_RELATIVE: 12.656 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 31, iters: 223440, time: 0.543, data: 0.000) G_L1: 13.083 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 10.659 G_Regularizer: 0.000 validation_error: 20.312 +(epoch: 31, iters: 225440, time: 0.546, data: 0.000) G_L1: 12.079 G_L1_ABSOLUTE: 2.498 G_L1_RELATIVE: 9.581 G_Regularizer: 0.000 validation_error: 20.226 +(epoch: 31, iters: 227440, time: 0.537, data: 0.001) G_L1: 11.687 G_L1_ABSOLUTE: 2.061 G_L1_RELATIVE: 9.627 G_Regularizer: 0.000 validation_error: 21.388 +(epoch: 31, iters: 229440, time: 0.552, data: 0.000) G_L1: 12.275 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 9.735 G_Regularizer: 0.000 validation_error: 20.608 +(epoch: 31, iters: 231440, time: 0.548, data: 0.001) G_L1: 15.704 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 12.982 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 31, iters: 233440, time: 0.543, data: 0.000) G_L1: 11.945 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 9.514 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 31, iters: 235440, time: 0.537, data: 0.000) G_L1: 17.086 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 14.517 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 31, iters: 237440, time: 0.550, data: 0.000) G_L1: 14.511 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 11.886 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 31, iters: 239440, time: 0.544, data: 0.000) G_L1: 12.547 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 9.909 G_Regularizer: 0.000 validation_error: 20.339 +(epoch: 31, iters: 241440, time: 0.546, data: 0.000) G_L1: 17.247 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 15.031 G_Regularizer: 0.000 validation_error: 20.079 +(epoch: 31, iters: 243440, time: 0.553, data: 0.000) G_L1: 15.518 G_L1_ABSOLUTE: 2.994 G_L1_RELATIVE: 12.524 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 31, iters: 245440, time: 0.549, data: 0.000) G_L1: 13.652 G_L1_ABSOLUTE: 2.328 G_L1_RELATIVE: 11.324 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 31, iters: 247440, time: 0.548, data: 0.000) G_L1: 23.853 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 21.129 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 31, iters: 249440, time: 0.538, data: 0.000) G_L1: 14.342 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 11.545 G_Regularizer: 0.000 validation_error: 20.405 +(epoch: 31, iters: 251440, time: 0.555, data: 0.001) G_L1: 13.841 G_L1_ABSOLUTE: 2.279 G_L1_RELATIVE: 11.562 G_Regularizer: 0.000 validation_error: 21.196 +(epoch: 31, iters: 253440, time: 0.542, data: 0.000) G_L1: 13.652 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 11.082 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 31, iters: 255440, time: 0.542, data: 0.000) G_L1: 25.598 G_L1_ABSOLUTE: 2.970 G_L1_RELATIVE: 22.629 G_Regularizer: 0.000 validation_error: 21.666 +(epoch: 31, iters: 257440, time: 0.559, data: 0.000) G_L1: 12.904 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 10.173 G_Regularizer: 0.000 validation_error: 20.641 +(epoch: 31, iters: 259440, time: 0.552, data: 0.000) G_L1: 13.977 G_L1_ABSOLUTE: 2.188 G_L1_RELATIVE: 11.790 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 31, iters: 261440, time: 0.546, data: 0.000) G_L1: 13.721 G_L1_ABSOLUTE: 2.772 G_L1_RELATIVE: 10.949 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 31, iters: 263440, time: 0.537, data: 0.000) G_L1: 13.652 G_L1_ABSOLUTE: 2.488 G_L1_RELATIVE: 11.165 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 31, iters: 265440, time: 0.547, data: 0.000) G_L1: 11.899 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 9.533 G_Regularizer: 0.000 validation_error: 20.620 +(epoch: 31, iters: 267440, time: 0.544, data: 0.000) G_L1: 14.270 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 11.671 G_Regularizer: 0.000 validation_error: 21.065 +(epoch: 31, iters: 269440, time: 0.550, data: 0.000) G_L1: 14.495 G_L1_ABSOLUTE: 2.292 G_L1_RELATIVE: 12.202 G_Regularizer: 0.000 validation_error: 21.185 +(epoch: 31, iters: 271440, time: 0.545, data: 0.000) G_L1: 15.408 G_L1_ABSOLUTE: 2.682 G_L1_RELATIVE: 12.726 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 31, iters: 273440, time: 0.554, data: 0.000) G_L1: 12.439 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 10.096 G_Regularizer: 0.000 validation_error: 20.175 +(epoch: 31, iters: 275440, time: 0.542, data: 0.000) G_L1: 12.176 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 9.894 G_Regularizer: 0.000 validation_error: 20.119 +(epoch: 31, iters: 277440, time: 0.541, data: 0.000) G_L1: 15.300 G_L1_ABSOLUTE: 2.989 G_L1_RELATIVE: 12.311 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 31, iters: 279440, time: 0.541, data: 0.000) G_L1: 15.100 G_L1_ABSOLUTE: 2.878 G_L1_RELATIVE: 12.222 G_Regularizer: 0.000 validation_error: 20.320 +(epoch: 31, iters: 281440, time: 0.549, data: 0.000) G_L1: 15.950 G_L1_ABSOLUTE: 2.802 G_L1_RELATIVE: 13.148 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 31, iters: 283440, time: 0.551, data: 0.000) G_L1: 14.677 G_L1_ABSOLUTE: 2.186 G_L1_RELATIVE: 12.491 G_Regularizer: 0.000 validation_error: 20.302 +(epoch: 31, iters: 285440, time: 0.550, data: 0.000) G_L1: 13.018 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 10.425 G_Regularizer: 0.000 validation_error: 20.572 +(epoch: 31, iters: 287440, time: 0.547, data: 0.000) G_L1: 14.301 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 11.484 G_Regularizer: 0.000 validation_error: 20.856 +(epoch: 31, iters: 289440, time: 0.545, data: 0.000) G_L1: 15.117 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 12.652 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 31, iters: 291440, time: 0.548, data: 0.000) G_L1: 16.981 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 14.273 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 31, iters: 293440, time: 0.545, data: 0.001) G_L1: 17.295 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 14.719 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 31, iters: 295440, time: 0.542, data: 0.001) G_L1: 13.315 G_L1_ABSOLUTE: 2.137 G_L1_RELATIVE: 11.178 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 31, iters: 297440, time: 0.552, data: 0.000) G_L1: 13.286 G_L1_ABSOLUTE: 2.069 G_L1_RELATIVE: 11.217 G_Regularizer: 0.000 validation_error: 20.517 +(epoch: 31, iters: 299440, time: 0.559, data: 0.000) G_L1: 12.168 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 9.582 G_Regularizer: 0.000 validation_error: 21.457 +(epoch: 31, iters: 301440, time: 0.545, data: 0.000) G_L1: 14.315 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 11.983 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 32, iters: 688, time: 0.559, data: 0.000) G_L1: 13.400 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 11.146 G_Regularizer: 0.000 validation_error: 20.670 +(epoch: 32, iters: 2688, time: 0.548, data: 0.000) G_L1: 14.849 G_L1_ABSOLUTE: 2.871 G_L1_RELATIVE: 11.978 G_Regularizer: 0.000 validation_error: 20.638 +(epoch: 32, iters: 4688, time: 0.545, data: 0.001) G_L1: 13.882 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 11.461 G_Regularizer: 0.000 validation_error: 20.134 +(epoch: 32, iters: 6688, time: 0.542, data: 0.000) G_L1: 14.857 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 12.407 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 32, iters: 8688, time: 0.550, data: 0.000) G_L1: 14.870 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 12.007 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 32, iters: 10688, time: 0.550, data: 0.000) G_L1: 12.436 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 10.175 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 32, iters: 12688, time: 0.540, data: 0.000) G_L1: 15.911 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 13.248 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 32, iters: 14688, time: 0.542, data: 0.000) G_L1: 14.270 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 12.051 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 32, iters: 16688, time: 0.553, data: 0.000) G_L1: 13.647 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 10.791 G_Regularizer: 0.000 validation_error: 20.535 +(epoch: 32, iters: 18688, time: 0.552, data: 0.000) G_L1: 14.976 G_L1_ABSOLUTE: 2.911 G_L1_RELATIVE: 12.065 G_Regularizer: 0.000 validation_error: 20.320 +(epoch: 32, iters: 20688, time: 0.551, data: 0.000) G_L1: 15.050 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 12.376 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 32, iters: 22688, time: 0.545, data: 0.000) G_L1: 15.020 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 12.312 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 32, iters: 24688, time: 0.545, data: 0.000) G_L1: 12.586 G_L1_ABSOLUTE: 2.531 G_L1_RELATIVE: 10.054 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 32, iters: 26688, time: 0.540, data: 0.000) G_L1: 14.275 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 11.492 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 32, iters: 28688, time: 0.539, data: 0.000) G_L1: 14.102 G_L1_ABSOLUTE: 2.299 G_L1_RELATIVE: 11.804 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 32, iters: 30688, time: 0.550, data: 0.000) G_L1: 13.777 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 11.350 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 32, iters: 32688, time: 0.537, data: 0.000) G_L1: 16.939 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 14.006 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 32, iters: 34688, time: 0.549, data: 0.000) G_L1: 15.492 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 12.988 G_Regularizer: 0.000 validation_error: 21.394 +(epoch: 32, iters: 36688, time: 0.537, data: 0.000) G_L1: 14.049 G_L1_ABSOLUTE: 3.010 G_L1_RELATIVE: 11.039 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 32, iters: 38688, time: 0.546, data: 0.000) G_L1: 16.644 G_L1_ABSOLUTE: 3.173 G_L1_RELATIVE: 13.472 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 32, iters: 40688, time: 0.545, data: 0.000) G_L1: 18.708 G_L1_ABSOLUTE: 3.011 G_L1_RELATIVE: 15.697 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 32, iters: 42688, time: 0.547, data: 0.000) G_L1: 12.636 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 10.139 G_Regularizer: 0.000 validation_error: 20.666 +(epoch: 32, iters: 44688, time: 0.544, data: 0.000) G_L1: 14.435 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 11.721 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 32, iters: 46688, time: 0.545, data: 0.000) G_L1: 14.676 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.275 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 32, iters: 48688, time: 0.549, data: 0.000) G_L1: 16.271 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 13.377 G_Regularizer: 0.000 validation_error: 20.746 +(epoch: 32, iters: 50688, time: 0.545, data: 0.000) G_L1: 18.568 G_L1_ABSOLUTE: 3.174 G_L1_RELATIVE: 15.394 G_Regularizer: 0.000 validation_error: 20.523 +(epoch: 32, iters: 52688, time: 0.547, data: 0.000) G_L1: 13.803 G_L1_ABSOLUTE: 2.751 G_L1_RELATIVE: 11.052 G_Regularizer: 0.000 validation_error: 21.169 +(epoch: 32, iters: 54688, time: 0.545, data: 0.000) G_L1: 14.388 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 11.710 G_Regularizer: 0.000 validation_error: 20.542 +(epoch: 32, iters: 56688, time: 0.550, data: 0.000) G_L1: 13.372 G_L1_ABSOLUTE: 2.801 G_L1_RELATIVE: 10.572 G_Regularizer: 0.000 validation_error: 21.048 +(epoch: 32, iters: 58688, time: 0.547, data: 0.000) G_L1: 15.227 G_L1_ABSOLUTE: 2.189 G_L1_RELATIVE: 13.038 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 32, iters: 60688, time: 0.550, data: 0.000) G_L1: 12.307 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 9.860 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 32, iters: 62688, time: 0.547, data: 0.000) G_L1: 14.540 G_L1_ABSOLUTE: 2.787 G_L1_RELATIVE: 11.753 G_Regularizer: 0.000 validation_error: 21.120 +(epoch: 32, iters: 64688, time: 0.544, data: 0.000) G_L1: 13.082 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 10.552 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 32, iters: 66688, time: 0.547, data: 0.000) G_L1: 16.768 G_L1_ABSOLUTE: 2.991 G_L1_RELATIVE: 13.777 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 32, iters: 68688, time: 0.548, data: 0.000) G_L1: 16.751 G_L1_ABSOLUTE: 3.600 G_L1_RELATIVE: 13.151 G_Regularizer: 0.000 validation_error: 20.077 +(epoch: 32, iters: 70688, time: 0.539, data: 0.000) G_L1: 13.511 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 10.962 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 32, iters: 72688, time: 0.549, data: 0.000) G_L1: 11.979 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 9.567 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 32, iters: 74688, time: 0.542, data: 0.000) G_L1: 12.890 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 10.342 G_Regularizer: 0.000 validation_error: 20.369 +(epoch: 32, iters: 76688, time: 0.539, data: 0.000) G_L1: 12.758 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 10.482 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 32, iters: 78688, time: 0.542, data: 0.000) G_L1: 12.683 G_L1_ABSOLUTE: 1.921 G_L1_RELATIVE: 10.761 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 32, iters: 80688, time: 0.551, data: 0.000) G_L1: 13.188 G_L1_ABSOLUTE: 2.390 G_L1_RELATIVE: 10.799 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 32, iters: 82688, time: 0.543, data: 0.000) G_L1: 14.750 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 12.204 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 32, iters: 84688, time: 0.538, data: 0.000) G_L1: 14.953 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 12.362 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 32, iters: 86688, time: 0.551, data: 0.000) G_L1: 12.258 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 10.042 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 32, iters: 88688, time: 0.543, data: 0.001) G_L1: 13.914 G_L1_ABSOLUTE: 2.144 G_L1_RELATIVE: 11.770 G_Regularizer: 0.000 validation_error: 21.074 +(epoch: 32, iters: 90688, time: 0.548, data: 0.000) G_L1: 13.129 G_L1_ABSOLUTE: 1.969 G_L1_RELATIVE: 11.159 G_Regularizer: 0.000 validation_error: 21.156 +(epoch: 32, iters: 92688, time: 0.546, data: 0.000) G_L1: 13.964 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 11.440 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 32, iters: 94688, time: 0.551, data: 0.000) G_L1: 16.338 G_L1_ABSOLUTE: 3.060 G_L1_RELATIVE: 13.278 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 32, iters: 96688, time: 0.541, data: 0.001) G_L1: 15.931 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 12.972 G_Regularizer: 0.000 validation_error: 21.036 +(epoch: 32, iters: 98688, time: 0.545, data: 0.001) G_L1: 14.770 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 12.248 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 32, iters: 100688, time: 0.558, data: 0.000) G_L1: 11.763 G_L1_ABSOLUTE: 2.242 G_L1_RELATIVE: 9.521 G_Regularizer: 0.000 validation_error: 20.115 +(epoch: 32, iters: 102688, time: 0.550, data: 0.000) G_L1: 12.021 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 9.547 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 32, iters: 104688, time: 0.538, data: 0.000) G_L1: 13.468 G_L1_ABSOLUTE: 2.095 G_L1_RELATIVE: 11.373 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 32, iters: 106688, time: 0.541, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 11.751 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 32, iters: 108688, time: 0.552, data: 0.000) G_L1: 15.795 G_L1_ABSOLUTE: 2.866 G_L1_RELATIVE: 12.928 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 32, iters: 110688, time: 0.546, data: 0.000) G_L1: 13.843 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 11.392 G_Regularizer: 0.000 validation_error: 20.463 +(epoch: 32, iters: 112688, time: 0.538, data: 0.000) G_L1: 13.016 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 10.572 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 32, iters: 114688, time: 0.550, data: 0.000) G_L1: 14.111 G_L1_ABSOLUTE: 2.098 G_L1_RELATIVE: 12.013 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 32, iters: 116688, time: 0.555, data: 0.000) G_L1: 15.896 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 13.189 G_Regularizer: 0.000 validation_error: 20.374 +(epoch: 32, iters: 118688, time: 0.542, data: 0.000) G_L1: 15.725 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 12.893 G_Regularizer: 0.000 validation_error: 20.384 +(epoch: 32, iters: 120688, time: 0.544, data: 0.000) G_L1: 16.500 G_L1_ABSOLUTE: 3.475 G_L1_RELATIVE: 13.025 G_Regularizer: 0.000 validation_error: 20.210 +(epoch: 32, iters: 122688, time: 0.547, data: 0.000) G_L1: 16.913 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 14.309 G_Regularizer: 0.000 validation_error: 21.131 +(epoch: 32, iters: 124688, time: 0.542, data: 0.000) G_L1: 13.215 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 10.559 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 32, iters: 126688, time: 0.540, data: 0.000) G_L1: 14.580 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 11.830 G_Regularizer: 0.000 validation_error: 20.310 +(epoch: 32, iters: 128688, time: 0.546, data: 0.000) G_L1: 16.992 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 13.989 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 32, iters: 130688, time: 0.545, data: 0.000) G_L1: 13.562 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 11.323 G_Regularizer: 0.000 validation_error: 20.318 +(epoch: 32, iters: 132688, time: 0.544, data: 0.000) G_L1: 15.200 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 12.962 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 32, iters: 134688, time: 0.552, data: 0.001) G_L1: 15.393 G_L1_ABSOLUTE: 3.627 G_L1_RELATIVE: 11.766 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 32, iters: 136688, time: 0.543, data: 0.000) G_L1: 13.503 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 11.201 G_Regularizer: 0.000 validation_error: 20.340 +(epoch: 32, iters: 138688, time: 0.549, data: 0.000) G_L1: 13.169 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 10.325 G_Regularizer: 0.000 validation_error: 21.112 +(epoch: 32, iters: 140688, time: 0.544, data: 0.000) G_L1: 14.620 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 12.014 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 32, iters: 142688, time: 0.549, data: 0.000) G_L1: 15.324 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 12.854 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 32, iters: 144688, time: 0.545, data: 0.000) G_L1: 15.071 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 12.387 G_Regularizer: 0.000 validation_error: 20.593 +(epoch: 32, iters: 146688, time: 0.542, data: 0.001) G_L1: 15.299 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 12.839 G_Regularizer: 0.000 validation_error: 20.597 +(epoch: 32, iters: 148688, time: 0.548, data: 0.000) G_L1: 14.960 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 12.605 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 32, iters: 150688, time: 0.545, data: 0.000) G_L1: 13.038 G_L1_ABSOLUTE: 2.268 G_L1_RELATIVE: 10.770 G_Regularizer: 0.000 validation_error: 20.662 +(epoch: 32, iters: 152688, time: 0.544, data: 0.000) G_L1: 15.606 G_L1_ABSOLUTE: 3.091 G_L1_RELATIVE: 12.514 G_Regularizer: 0.000 validation_error: 20.346 +(epoch: 32, iters: 154688, time: 0.548, data: 0.000) G_L1: 13.577 G_L1_ABSOLUTE: 2.753 G_L1_RELATIVE: 10.824 G_Regularizer: 0.000 validation_error: 20.490 +(epoch: 32, iters: 156688, time: 0.546, data: 0.000) G_L1: 14.621 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 12.119 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 32, iters: 158688, time: 0.558, data: 0.000) G_L1: 16.494 G_L1_ABSOLUTE: 2.203 G_L1_RELATIVE: 14.292 G_Regularizer: 0.000 validation_error: 20.234 +(epoch: 32, iters: 160688, time: 0.544, data: 0.000) G_L1: 15.431 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 12.577 G_Regularizer: 0.000 validation_error: 20.337 +(epoch: 32, iters: 162688, time: 0.546, data: 0.000) G_L1: 13.904 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 11.603 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 32, iters: 164688, time: 0.541, data: 0.001) G_L1: 13.935 G_L1_ABSOLUTE: 2.104 G_L1_RELATIVE: 11.831 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 32, iters: 166688, time: 0.544, data: 0.000) G_L1: 17.123 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 14.080 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 32, iters: 168688, time: 0.546, data: 0.000) G_L1: 12.744 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 10.330 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 32, iters: 170688, time: 0.541, data: 0.000) G_L1: 15.111 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 12.320 G_Regularizer: 0.000 validation_error: 20.465 +(epoch: 32, iters: 172688, time: 0.550, data: 0.000) G_L1: 15.355 G_L1_ABSOLUTE: 2.719 G_L1_RELATIVE: 12.636 G_Regularizer: 0.000 validation_error: 20.280 +(epoch: 32, iters: 174688, time: 0.546, data: 0.000) G_L1: 12.118 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 9.786 G_Regularizer: 0.000 validation_error: 20.684 +(epoch: 32, iters: 176688, time: 0.551, data: 0.000) G_L1: 15.378 G_L1_ABSOLUTE: 2.964 G_L1_RELATIVE: 12.415 G_Regularizer: 0.000 validation_error: 21.288 +(epoch: 32, iters: 178688, time: 0.543, data: 0.000) G_L1: 15.049 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 12.808 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 32, iters: 180688, time: 0.544, data: 0.000) G_L1: 16.692 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 14.216 G_Regularizer: 0.000 validation_error: 20.440 +(epoch: 32, iters: 182688, time: 0.549, data: 0.000) G_L1: 13.916 G_L1_ABSOLUTE: 3.170 G_L1_RELATIVE: 10.746 G_Regularizer: 0.000 validation_error: 20.662 +(epoch: 32, iters: 184688, time: 0.549, data: 0.000) G_L1: 13.190 G_L1_ABSOLUTE: 2.387 G_L1_RELATIVE: 10.803 G_Regularizer: 0.000 validation_error: 20.149 +(epoch: 32, iters: 186688, time: 0.551, data: 0.000) G_L1: 12.744 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 10.208 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 32, iters: 188688, time: 0.552, data: 0.001) G_L1: 15.539 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 12.641 G_Regularizer: 0.000 validation_error: 20.392 +(epoch: 32, iters: 190688, time: 0.545, data: 0.000) G_L1: 15.924 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 13.265 G_Regularizer: 0.000 validation_error: 20.080 +(epoch: 32, iters: 192688, time: 0.545, data: 0.000) G_L1: 16.039 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 13.008 G_Regularizer: 0.000 validation_error: 21.080 +(epoch: 32, iters: 194688, time: 0.540, data: 0.000) G_L1: 13.209 G_L1_ABSOLUTE: 2.309 G_L1_RELATIVE: 10.900 G_Regularizer: 0.000 validation_error: 19.919 +(epoch: 32, iters: 196688, time: 0.552, data: 0.000) G_L1: 14.111 G_L1_ABSOLUTE: 2.037 G_L1_RELATIVE: 12.074 G_Regularizer: 0.000 validation_error: 20.351 +(epoch: 32, iters: 198688, time: 0.540, data: 0.001) G_L1: 13.787 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 11.485 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 32, iters: 200688, time: 0.540, data: 0.000) G_L1: 16.914 G_L1_ABSOLUTE: 3.042 G_L1_RELATIVE: 13.872 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 32, iters: 202688, time: 0.557, data: 0.000) G_L1: 13.284 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 10.806 G_Regularizer: 0.000 validation_error: 20.559 +(epoch: 32, iters: 204688, time: 0.544, data: 0.000) G_L1: 14.461 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 11.968 G_Regularizer: 0.000 validation_error: 20.536 +(epoch: 32, iters: 206688, time: 0.547, data: 0.000) G_L1: 12.262 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 9.734 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 32, iters: 208688, time: 0.544, data: 0.000) G_L1: 13.697 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 11.365 G_Regularizer: 0.000 validation_error: 21.244 +(epoch: 32, iters: 210688, time: 0.544, data: 0.000) G_L1: 15.230 G_L1_ABSOLUTE: 3.028 G_L1_RELATIVE: 12.202 G_Regularizer: 0.000 validation_error: 20.524 +(epoch: 32, iters: 212688, time: 0.544, data: 0.000) G_L1: 16.565 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 14.086 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 32, iters: 214688, time: 0.544, data: 0.001) G_L1: 13.447 G_L1_ABSOLUTE: 2.104 G_L1_RELATIVE: 11.343 G_Regularizer: 0.000 validation_error: 20.896 +(epoch: 32, iters: 216688, time: 0.544, data: 0.000) G_L1: 16.394 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 13.818 G_Regularizer: 0.000 validation_error: 20.612 +(epoch: 32, iters: 218688, time: 0.542, data: 0.000) G_L1: 15.679 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 13.318 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 32, iters: 220688, time: 0.545, data: 0.000) G_L1: 13.574 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 11.320 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 32, iters: 222688, time: 0.550, data: 0.000) G_L1: 15.023 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 12.538 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 32, iters: 224688, time: 0.544, data: 0.000) G_L1: 14.424 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 11.878 G_Regularizer: 0.000 validation_error: 20.659 +(epoch: 32, iters: 226688, time: 0.544, data: 0.000) G_L1: 12.131 G_L1_ABSOLUTE: 2.021 G_L1_RELATIVE: 10.110 G_Regularizer: 0.000 validation_error: 21.043 +(epoch: 32, iters: 228688, time: 0.540, data: 0.000) G_L1: 13.827 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 11.470 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 32, iters: 230688, time: 0.549, data: 0.001) G_L1: 15.114 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 12.560 G_Regularizer: 0.000 validation_error: 21.387 +(epoch: 32, iters: 232688, time: 0.555, data: 0.000) G_L1: 13.571 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 11.275 G_Regularizer: 0.000 validation_error: 21.091 +(epoch: 32, iters: 234688, time: 0.546, data: 0.000) G_L1: 12.361 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 9.842 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 32, iters: 236688, time: 0.550, data: 0.001) G_L1: 14.613 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 12.245 G_Regularizer: 0.000 validation_error: 21.153 +(epoch: 32, iters: 238688, time: 0.542, data: 0.000) G_L1: 17.433 G_L1_ABSOLUTE: 2.306 G_L1_RELATIVE: 15.126 G_Regularizer: 0.000 validation_error: 21.147 +(epoch: 32, iters: 240688, time: 0.554, data: 0.000) G_L1: 13.187 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 10.462 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 32, iters: 242688, time: 0.541, data: 0.000) G_L1: 12.326 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 9.773 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 32, iters: 244688, time: 0.544, data: 0.000) G_L1: 13.364 G_L1_ABSOLUTE: 2.566 G_L1_RELATIVE: 10.798 G_Regularizer: 0.000 validation_error: 21.338 +(epoch: 32, iters: 246688, time: 0.547, data: 0.000) G_L1: 11.285 G_L1_ABSOLUTE: 2.042 G_L1_RELATIVE: 9.243 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 32, iters: 248688, time: 0.555, data: 0.000) G_L1: 17.801 G_L1_ABSOLUTE: 2.968 G_L1_RELATIVE: 14.833 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 32, iters: 250688, time: 0.539, data: 0.000) G_L1: 17.351 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 14.931 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 32, iters: 252688, time: 0.547, data: 0.000) G_L1: 13.652 G_L1_ABSOLUTE: 2.135 G_L1_RELATIVE: 11.517 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 32, iters: 254688, time: 0.547, data: 0.000) G_L1: 15.311 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 12.358 G_Regularizer: 0.000 validation_error: 21.229 +(epoch: 32, iters: 256688, time: 0.541, data: 0.000) G_L1: 15.517 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 13.018 G_Regularizer: 0.000 validation_error: 20.504 +(epoch: 32, iters: 258688, time: 0.539, data: 0.000) G_L1: 16.318 G_L1_ABSOLUTE: 2.645 G_L1_RELATIVE: 13.673 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 32, iters: 260688, time: 0.542, data: 0.000) G_L1: 12.333 G_L1_ABSOLUTE: 2.387 G_L1_RELATIVE: 9.946 G_Regularizer: 0.000 validation_error: 20.305 +(epoch: 32, iters: 262688, time: 0.536, data: 0.000) G_L1: 11.829 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 9.452 G_Regularizer: 0.000 validation_error: 20.405 +(epoch: 32, iters: 264688, time: 0.543, data: 0.000) G_L1: 12.642 G_L1_ABSOLUTE: 2.167 G_L1_RELATIVE: 10.474 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 32, iters: 266688, time: 0.548, data: 0.000) G_L1: 13.674 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 11.433 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 32, iters: 268688, time: 0.553, data: 0.000) G_L1: 15.524 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 13.021 G_Regularizer: 0.000 validation_error: 20.496 +(epoch: 32, iters: 270688, time: 0.540, data: 0.000) G_L1: 13.685 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 10.971 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 32, iters: 272688, time: 0.549, data: 0.000) G_L1: 16.412 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 13.408 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 32, iters: 274688, time: 0.552, data: 0.000) G_L1: 17.846 G_L1_ABSOLUTE: 3.076 G_L1_RELATIVE: 14.770 G_Regularizer: 0.000 validation_error: 20.763 +(epoch: 32, iters: 276688, time: 0.542, data: 0.000) G_L1: 15.899 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 13.158 G_Regularizer: 0.000 validation_error: 21.252 +(epoch: 32, iters: 278688, time: 0.542, data: 0.000) G_L1: 12.479 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 10.106 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 32, iters: 280688, time: 0.549, data: 0.000) G_L1: 14.473 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 12.042 G_Regularizer: 0.000 validation_error: 20.848 +(epoch: 32, iters: 282688, time: 0.549, data: 0.000) G_L1: 16.495 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 13.840 G_Regularizer: 0.000 validation_error: 20.730 +(epoch: 32, iters: 284688, time: 0.545, data: 0.000) G_L1: 15.645 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 13.025 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 32, iters: 286688, time: 0.551, data: 0.000) G_L1: 14.766 G_L1_ABSOLUTE: 2.251 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 20.485 +(epoch: 32, iters: 288688, time: 0.553, data: 0.000) G_L1: 14.682 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 12.090 G_Regularizer: 0.000 validation_error: 21.310 +(epoch: 32, iters: 290688, time: 0.544, data: 0.000) G_L1: 14.097 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 11.595 G_Regularizer: 0.000 validation_error: 21.100 +(epoch: 32, iters: 292688, time: 0.550, data: 0.000) G_L1: 15.115 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 12.309 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 32, iters: 294688, time: 0.548, data: 0.000) G_L1: 14.533 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 11.776 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 32, iters: 296688, time: 0.545, data: 0.000) G_L1: 12.034 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 9.635 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 32, iters: 298688, time: 0.550, data: 0.000) G_L1: 14.842 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 12.395 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 32, iters: 300688, time: 0.554, data: 0.000) G_L1: 18.641 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 15.867 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 32, iters: 302688, time: 0.542, data: 0.001) G_L1: 13.871 G_L1_ABSOLUTE: 2.088 G_L1_RELATIVE: 11.783 G_Regularizer: 0.000 validation_error: 20.503 +(epoch: 33, iters: 1936, time: 0.546, data: 0.000) G_L1: 13.409 G_L1_ABSOLUTE: 2.122 G_L1_RELATIVE: 11.287 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 33, iters: 3936, time: 0.547, data: 0.000) G_L1: 15.309 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 12.379 G_Regularizer: 0.000 validation_error: 20.475 +(epoch: 33, iters: 5936, time: 0.551, data: 0.000) G_L1: 13.525 G_L1_ABSOLUTE: 2.148 G_L1_RELATIVE: 11.377 G_Regularizer: 0.000 validation_error: 20.178 +(epoch: 33, iters: 7936, time: 0.541, data: 0.000) G_L1: 14.559 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 12.263 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 33, iters: 9936, time: 0.545, data: 0.000) G_L1: 15.649 G_L1_ABSOLUTE: 2.904 G_L1_RELATIVE: 12.745 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 33, iters: 11936, time: 0.545, data: 0.000) G_L1: 12.280 G_L1_ABSOLUTE: 2.120 G_L1_RELATIVE: 10.159 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 33, iters: 13936, time: 0.543, data: 0.000) G_L1: 14.211 G_L1_ABSOLUTE: 2.932 G_L1_RELATIVE: 11.279 G_Regularizer: 0.000 validation_error: 20.371 +(epoch: 33, iters: 15936, time: 0.552, data: 0.000) G_L1: 14.902 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 12.396 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 33, iters: 17936, time: 0.544, data: 0.000) G_L1: 15.253 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 13.046 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 33, iters: 19936, time: 0.552, data: 0.000) G_L1: 16.224 G_L1_ABSOLUTE: 3.257 G_L1_RELATIVE: 12.967 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 33, iters: 21936, time: 0.547, data: 0.000) G_L1: 13.255 G_L1_ABSOLUTE: 2.884 G_L1_RELATIVE: 10.371 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 33, iters: 23936, time: 0.553, data: 0.000) G_L1: 15.811 G_L1_ABSOLUTE: 2.209 G_L1_RELATIVE: 13.601 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 33, iters: 25936, time: 0.544, data: 0.000) G_L1: 17.588 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 15.189 G_Regularizer: 0.000 validation_error: 21.231 +(epoch: 33, iters: 27936, time: 0.553, data: 0.000) G_L1: 12.939 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 10.218 G_Regularizer: 0.000 validation_error: 20.705 +(epoch: 33, iters: 29936, time: 0.556, data: 0.000) G_L1: 24.473 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 21.968 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 33, iters: 31936, time: 0.554, data: 0.000) G_L1: 19.335 G_L1_ABSOLUTE: 3.288 G_L1_RELATIVE: 16.048 G_Regularizer: 0.000 validation_error: 21.261 +(epoch: 33, iters: 33936, time: 0.541, data: 0.000) G_L1: 15.943 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 13.474 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 33, iters: 35936, time: 0.545, data: 0.000) G_L1: 17.470 G_L1_ABSOLUTE: 2.192 G_L1_RELATIVE: 15.279 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 33, iters: 37936, time: 0.546, data: 0.000) G_L1: 14.971 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 12.385 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 33, iters: 39936, time: 0.544, data: 0.000) G_L1: 17.201 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 14.784 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 33, iters: 41936, time: 0.550, data: 0.000) G_L1: 12.462 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 10.092 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 33, iters: 43936, time: 0.543, data: 0.000) G_L1: 15.364 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 12.773 G_Regularizer: 0.000 validation_error: 21.223 +(epoch: 33, iters: 45936, time: 0.547, data: 0.000) G_L1: 16.958 G_L1_ABSOLUTE: 3.378 G_L1_RELATIVE: 13.580 G_Regularizer: 0.000 validation_error: 21.549 +(epoch: 33, iters: 47936, time: 0.546, data: 0.000) G_L1: 14.058 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 11.461 G_Regularizer: 0.000 validation_error: 20.539 +(epoch: 33, iters: 49936, time: 0.546, data: 0.000) G_L1: 18.969 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 16.350 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 33, iters: 51936, time: 0.546, data: 0.000) G_L1: 16.446 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 13.664 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 33, iters: 53936, time: 0.546, data: 0.000) G_L1: 16.173 G_L1_ABSOLUTE: 2.419 G_L1_RELATIVE: 13.754 G_Regularizer: 0.000 validation_error: 20.461 +(epoch: 33, iters: 55936, time: 0.545, data: 0.000) G_L1: 12.979 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 10.446 G_Regularizer: 0.000 validation_error: 21.299 +(epoch: 33, iters: 57936, time: 0.552, data: 0.000) G_L1: 15.505 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 12.788 G_Regularizer: 0.000 validation_error: 21.066 +(epoch: 33, iters: 59936, time: 0.544, data: 0.000) G_L1: 11.402 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 9.069 G_Regularizer: 0.000 validation_error: 21.067 +(epoch: 33, iters: 61936, time: 0.537, data: 0.000) G_L1: 15.358 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 12.768 G_Regularizer: 0.000 validation_error: 20.327 +(epoch: 33, iters: 63936, time: 0.553, data: 0.000) G_L1: 13.175 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 10.607 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 33, iters: 65936, time: 0.550, data: 0.000) G_L1: 13.051 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 10.616 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 33, iters: 67936, time: 0.543, data: 0.000) G_L1: 14.593 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 12.354 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 33, iters: 69936, time: 0.546, data: 0.000) G_L1: 14.735 G_L1_ABSOLUTE: 2.039 G_L1_RELATIVE: 12.696 G_Regularizer: 0.000 validation_error: 20.400 +(epoch: 33, iters: 71936, time: 0.551, data: 0.000) G_L1: 13.781 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 11.255 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 33, iters: 73936, time: 0.543, data: 0.000) G_L1: 13.518 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 11.227 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 33, iters: 75936, time: 0.554, data: 0.000) G_L1: 14.428 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 12.098 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 33, iters: 77936, time: 0.541, data: 0.000) G_L1: 14.429 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 12.190 G_Regularizer: 0.000 validation_error: 20.545 +(epoch: 33, iters: 79936, time: 0.549, data: 0.000) G_L1: 15.632 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 12.752 G_Regularizer: 0.000 validation_error: 21.076 +(epoch: 33, iters: 81936, time: 0.550, data: 0.000) G_L1: 15.446 G_L1_ABSOLUTE: 3.144 G_L1_RELATIVE: 12.302 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 33, iters: 83936, time: 0.546, data: 0.000) G_L1: 15.278 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 12.863 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 33, iters: 85936, time: 0.539, data: 0.000) G_L1: 13.841 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 11.576 G_Regularizer: 0.000 validation_error: 21.012 +(epoch: 33, iters: 87936, time: 0.545, data: 0.000) G_L1: 17.099 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 14.318 G_Regularizer: 0.000 validation_error: 21.172 +(epoch: 33, iters: 89936, time: 0.560, data: 0.000) G_L1: 13.707 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 11.282 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 33, iters: 91936, time: 0.558, data: 0.000) G_L1: 14.734 G_L1_ABSOLUTE: 2.415 G_L1_RELATIVE: 12.319 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 33, iters: 93936, time: 0.547, data: 0.000) G_L1: 13.566 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 11.041 G_Regularizer: 0.000 validation_error: 21.175 +(epoch: 33, iters: 95936, time: 0.547, data: 0.000) G_L1: 13.512 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 10.877 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 33, iters: 97936, time: 0.547, data: 0.000) G_L1: 12.153 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 9.691 G_Regularizer: 0.000 validation_error: 20.497 +(epoch: 33, iters: 99936, time: 0.539, data: 0.000) G_L1: 15.071 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 12.420 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 33, iters: 101936, time: 0.542, data: 0.000) G_L1: 15.074 G_L1_ABSOLUTE: 3.011 G_L1_RELATIVE: 12.063 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 33, iters: 103936, time: 0.549, data: 0.000) G_L1: 14.412 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 11.784 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 33, iters: 105936, time: 0.547, data: 0.000) G_L1: 14.999 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 12.431 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 33, iters: 107936, time: 0.544, data: 0.000) G_L1: 15.014 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 12.635 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 33, iters: 109936, time: 0.550, data: 0.000) G_L1: 14.181 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 11.564 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 33, iters: 111936, time: 0.546, data: 0.001) G_L1: 13.038 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 10.629 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 33, iters: 113936, time: 0.542, data: 0.000) G_L1: 14.570 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 11.736 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 33, iters: 115936, time: 0.546, data: 0.000) G_L1: 13.842 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 11.502 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 33, iters: 117936, time: 0.556, data: 0.000) G_L1: 13.724 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 11.228 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 33, iters: 119936, time: 0.547, data: 0.000) G_L1: 14.401 G_L1_ABSOLUTE: 2.771 G_L1_RELATIVE: 11.630 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 33, iters: 121936, time: 0.539, data: 0.000) G_L1: 15.212 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 12.558 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 33, iters: 123936, time: 0.545, data: 0.000) G_L1: 13.879 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 11.156 G_Regularizer: 0.000 validation_error: 20.503 +(epoch: 33, iters: 125936, time: 0.550, data: 0.000) G_L1: 14.953 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 12.212 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 33, iters: 127936, time: 0.545, data: 0.000) G_L1: 16.142 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 13.757 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 33, iters: 129936, time: 0.547, data: 0.000) G_L1: 14.769 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 12.055 G_Regularizer: 0.000 validation_error: 20.349 +(epoch: 33, iters: 131936, time: 0.549, data: 0.000) G_L1: 16.240 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 13.417 G_Regularizer: 0.000 validation_error: 21.006 +(epoch: 33, iters: 133936, time: 0.542, data: 0.000) G_L1: 12.864 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 10.605 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 33, iters: 135936, time: 0.548, data: 0.000) G_L1: 13.053 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 10.768 G_Regularizer: 0.000 validation_error: 20.183 +(epoch: 33, iters: 137936, time: 0.546, data: 0.001) G_L1: 11.928 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 9.730 G_Regularizer: 0.000 validation_error: 20.681 +(epoch: 33, iters: 139936, time: 0.541, data: 0.000) G_L1: 15.216 G_L1_ABSOLUTE: 2.871 G_L1_RELATIVE: 12.345 G_Regularizer: 0.000 validation_error: 20.998 +(epoch: 33, iters: 141936, time: 0.537, data: 0.000) G_L1: 13.808 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 11.156 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 33, iters: 143936, time: 0.545, data: 0.000) G_L1: 13.653 G_L1_ABSOLUTE: 2.846 G_L1_RELATIVE: 10.808 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 33, iters: 145936, time: 0.542, data: 0.000) G_L1: 14.972 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 12.506 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 33, iters: 147936, time: 0.553, data: 0.000) G_L1: 10.777 G_L1_ABSOLUTE: 1.930 G_L1_RELATIVE: 8.848 G_Regularizer: 0.000 validation_error: 20.377 +(epoch: 33, iters: 149936, time: 0.544, data: 0.000) G_L1: 14.503 G_L1_ABSOLUTE: 2.182 G_L1_RELATIVE: 12.321 G_Regularizer: 0.000 validation_error: 20.391 +(epoch: 33, iters: 151936, time: 0.544, data: 0.000) G_L1: 16.129 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 13.569 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 33, iters: 153936, time: 0.547, data: 0.001) G_L1: 16.473 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 14.024 G_Regularizer: 0.000 validation_error: 20.519 +(epoch: 33, iters: 155936, time: 0.541, data: 0.000) G_L1: 15.486 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 13.075 G_Regularizer: 0.000 validation_error: 21.350 +(epoch: 33, iters: 157936, time: 0.550, data: 0.000) G_L1: 12.938 G_L1_ABSOLUTE: 2.128 G_L1_RELATIVE: 10.810 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 33, iters: 159936, time: 0.547, data: 0.000) G_L1: 13.856 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 11.454 G_Regularizer: 0.000 validation_error: 20.527 +(epoch: 33, iters: 161936, time: 0.563, data: 0.000) G_L1: 13.801 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 11.110 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 33, iters: 163936, time: 0.545, data: 0.000) G_L1: 15.292 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 12.978 G_Regularizer: 0.000 validation_error: 20.854 +(epoch: 33, iters: 165936, time: 0.546, data: 0.000) G_L1: 12.393 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 10.073 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 33, iters: 167936, time: 0.543, data: 0.000) G_L1: 14.948 G_L1_ABSOLUTE: 2.944 G_L1_RELATIVE: 12.004 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 33, iters: 169936, time: 0.547, data: 0.000) G_L1: 16.082 G_L1_ABSOLUTE: 3.403 G_L1_RELATIVE: 12.679 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 33, iters: 171936, time: 0.544, data: 0.000) G_L1: 13.112 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 10.744 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 33, iters: 173936, time: 0.546, data: 0.000) G_L1: 14.250 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 11.883 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 33, iters: 175936, time: 0.547, data: 0.000) G_L1: 15.091 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 12.553 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 33, iters: 177936, time: 0.554, data: 0.000) G_L1: 15.981 G_L1_ABSOLUTE: 2.838 G_L1_RELATIVE: 13.143 G_Regularizer: 0.000 validation_error: 21.251 +(epoch: 33, iters: 179936, time: 0.541, data: 0.000) G_L1: 15.346 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.812 G_Regularizer: 0.000 validation_error: 21.156 +(epoch: 33, iters: 181936, time: 0.546, data: 0.000) G_L1: 15.805 G_L1_ABSOLUTE: 2.912 G_L1_RELATIVE: 12.893 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 33, iters: 183936, time: 0.546, data: 0.000) G_L1: 13.610 G_L1_ABSOLUTE: 2.287 G_L1_RELATIVE: 11.323 G_Regularizer: 0.000 validation_error: 21.227 +(epoch: 33, iters: 185936, time: 0.544, data: 0.000) G_L1: 17.934 G_L1_ABSOLUTE: 2.626 G_L1_RELATIVE: 15.308 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 33, iters: 187936, time: 0.549, data: 0.000) G_L1: 14.328 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 12.051 G_Regularizer: 0.000 validation_error: 20.671 +(epoch: 33, iters: 189936, time: 0.548, data: 0.001) G_L1: 15.319 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 12.641 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 33, iters: 191936, time: 0.547, data: 0.000) G_L1: 12.611 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 10.040 G_Regularizer: 0.000 validation_error: 20.551 +(epoch: 33, iters: 193936, time: 0.549, data: 0.000) G_L1: 9.905 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 7.558 G_Regularizer: 0.000 validation_error: 21.098 +(epoch: 33, iters: 195936, time: 0.544, data: 0.000) G_L1: 15.420 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 12.902 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 33, iters: 197936, time: 0.545, data: 0.000) G_L1: 14.987 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 12.256 G_Regularizer: 0.000 validation_error: 20.461 +(epoch: 33, iters: 199936, time: 0.540, data: 0.000) G_L1: 15.013 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 12.530 G_Regularizer: 0.000 validation_error: 21.404 +(epoch: 33, iters: 201936, time: 0.550, data: 0.000) G_L1: 12.909 G_L1_ABSOLUTE: 2.763 G_L1_RELATIVE: 10.145 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 33, iters: 203936, time: 0.546, data: 0.000) G_L1: 14.679 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 12.430 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 33, iters: 205936, time: 0.551, data: 0.000) G_L1: 14.088 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 11.596 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 33, iters: 207936, time: 0.549, data: 0.000) G_L1: 14.133 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 11.816 G_Regularizer: 0.000 validation_error: 20.555 +(epoch: 33, iters: 209936, time: 0.550, data: 0.000) G_L1: 14.676 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 12.336 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 33, iters: 211936, time: 0.546, data: 0.000) G_L1: 17.943 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 15.544 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 33, iters: 213936, time: 0.538, data: 0.000) G_L1: 14.725 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 12.186 G_Regularizer: 0.000 validation_error: 21.211 +(epoch: 33, iters: 215936, time: 0.545, data: 0.000) G_L1: 13.449 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 11.210 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 33, iters: 217936, time: 0.540, data: 0.000) G_L1: 14.112 G_L1_ABSOLUTE: 2.272 G_L1_RELATIVE: 11.839 G_Regularizer: 0.000 validation_error: 21.074 +(epoch: 33, iters: 219936, time: 0.547, data: 0.000) G_L1: 12.961 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 10.568 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 33, iters: 221936, time: 0.553, data: 0.000) G_L1: 12.151 G_L1_ABSOLUTE: 1.813 G_L1_RELATIVE: 10.338 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 33, iters: 223936, time: 0.548, data: 0.000) G_L1: 14.776 G_L1_ABSOLUTE: 2.899 G_L1_RELATIVE: 11.877 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 33, iters: 225936, time: 0.540, data: 0.000) G_L1: 13.661 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 11.212 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 33, iters: 227936, time: 0.549, data: 0.000) G_L1: 17.337 G_L1_ABSOLUTE: 2.937 G_L1_RELATIVE: 14.400 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 33, iters: 229936, time: 0.549, data: 0.001) G_L1: 14.612 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 11.980 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 33, iters: 231936, time: 0.543, data: 0.000) G_L1: 13.629 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 11.222 G_Regularizer: 0.000 validation_error: 20.381 +(epoch: 33, iters: 233936, time: 0.543, data: 0.000) G_L1: 16.069 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 13.264 G_Regularizer: 0.000 validation_error: 20.574 +(epoch: 33, iters: 235936, time: 0.543, data: 0.000) G_L1: 14.173 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 11.743 G_Regularizer: 0.000 validation_error: 19.987 +(epoch: 33, iters: 237936, time: 0.547, data: 0.000) G_L1: 14.772 G_L1_ABSOLUTE: 2.175 G_L1_RELATIVE: 12.597 G_Regularizer: 0.000 validation_error: 20.548 +(epoch: 33, iters: 239936, time: 0.543, data: 0.001) G_L1: 12.691 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 10.450 G_Regularizer: 0.000 validation_error: 20.628 +(epoch: 33, iters: 241936, time: 0.549, data: 0.000) G_L1: 16.423 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 13.824 G_Regularizer: 0.000 validation_error: 20.484 +(epoch: 33, iters: 243936, time: 0.542, data: 0.000) G_L1: 15.230 G_L1_ABSOLUTE: 2.197 G_L1_RELATIVE: 13.033 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 33, iters: 245936, time: 0.548, data: 0.000) G_L1: 12.223 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 9.969 G_Regularizer: 0.000 validation_error: 21.022 +(epoch: 33, iters: 247936, time: 0.546, data: 0.000) G_L1: 13.160 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 10.822 G_Regularizer: 0.000 validation_error: 19.880 +(epoch: 33, iters: 249936, time: 0.543, data: 0.000) G_L1: 15.686 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 13.258 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 33, iters: 251936, time: 0.548, data: 0.000) G_L1: 14.987 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 12.239 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 33, iters: 253936, time: 0.545, data: 0.000) G_L1: 16.745 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 14.249 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 33, iters: 255936, time: 0.546, data: 0.000) G_L1: 12.479 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 10.094 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 33, iters: 257936, time: 0.550, data: 0.000) G_L1: 14.069 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 11.442 G_Regularizer: 0.000 validation_error: 20.298 +(epoch: 33, iters: 259936, time: 0.540, data: 0.001) G_L1: 14.659 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 12.217 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 33, iters: 261936, time: 0.559, data: 0.001) G_L1: 14.342 G_L1_ABSOLUTE: 3.172 G_L1_RELATIVE: 11.170 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 33, iters: 263936, time: 0.546, data: 0.000) G_L1: 13.462 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 10.730 G_Regularizer: 0.000 validation_error: 21.515 +(epoch: 33, iters: 265936, time: 0.544, data: 0.000) G_L1: 14.168 G_L1_ABSOLUTE: 2.290 G_L1_RELATIVE: 11.877 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 33, iters: 267936, time: 0.553, data: 0.000) G_L1: 15.194 G_L1_ABSOLUTE: 2.806 G_L1_RELATIVE: 12.389 G_Regularizer: 0.000 validation_error: 20.408 +(epoch: 33, iters: 269936, time: 0.551, data: 0.000) G_L1: 15.737 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 13.095 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 33, iters: 271936, time: 0.543, data: 0.000) G_L1: 13.815 G_L1_ABSOLUTE: 2.195 G_L1_RELATIVE: 11.620 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 33, iters: 273936, time: 0.538, data: 0.000) G_L1: 16.555 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 14.033 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 33, iters: 275936, time: 0.548, data: 0.000) G_L1: 15.634 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 13.060 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 33, iters: 277936, time: 0.542, data: 0.000) G_L1: 13.981 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 11.490 G_Regularizer: 0.000 validation_error: 21.032 +(epoch: 33, iters: 279936, time: 0.540, data: 0.000) G_L1: 16.058 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 13.372 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 33, iters: 281936, time: 0.552, data: 0.000) G_L1: 14.263 G_L1_ABSOLUTE: 2.555 G_L1_RELATIVE: 11.707 G_Regularizer: 0.000 validation_error: 20.355 +(epoch: 33, iters: 283936, time: 0.545, data: 0.000) G_L1: 17.923 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 15.443 G_Regularizer: 0.000 validation_error: 20.141 +(epoch: 33, iters: 285936, time: 0.551, data: 0.001) G_L1: 11.887 G_L1_ABSOLUTE: 1.917 G_L1_RELATIVE: 9.970 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 33, iters: 287936, time: 0.550, data: 0.000) G_L1: 12.751 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 10.431 G_Regularizer: 0.000 validation_error: 20.350 +(epoch: 33, iters: 289936, time: 0.553, data: 0.000) G_L1: 13.549 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 11.041 G_Regularizer: 0.000 validation_error: 20.995 +(epoch: 33, iters: 291936, time: 0.542, data: 0.000) G_L1: 14.457 G_L1_ABSOLUTE: 2.215 G_L1_RELATIVE: 12.242 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 33, iters: 293936, time: 0.544, data: 0.000) G_L1: 16.487 G_L1_ABSOLUTE: 2.092 G_L1_RELATIVE: 14.394 G_Regularizer: 0.000 validation_error: 20.445 +(epoch: 33, iters: 295936, time: 0.548, data: 0.001) G_L1: 13.280 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 10.646 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 33, iters: 297936, time: 0.546, data: 0.000) G_L1: 15.739 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 13.083 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 33, iters: 299936, time: 0.546, data: 0.000) G_L1: 14.956 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.422 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 33, iters: 301936, time: 0.546, data: 0.000) G_L1: 12.875 G_L1_ABSOLUTE: 2.111 G_L1_RELATIVE: 10.764 G_Regularizer: 0.000 validation_error: 21.353 +(epoch: 34, iters: 1184, time: 0.548, data: 0.000) G_L1: 15.059 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.549 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 34, iters: 3184, time: 0.548, data: 0.000) G_L1: 15.093 G_L1_ABSOLUTE: 2.269 G_L1_RELATIVE: 12.825 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 34, iters: 5184, time: 0.546, data: 0.000) G_L1: 15.700 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 12.969 G_Regularizer: 0.000 validation_error: 20.665 +(epoch: 34, iters: 7184, time: 0.544, data: 0.000) G_L1: 14.791 G_L1_ABSOLUTE: 2.761 G_L1_RELATIVE: 12.030 G_Regularizer: 0.000 validation_error: 20.731 +(epoch: 34, iters: 9184, time: 0.548, data: 0.000) G_L1: 17.464 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 14.739 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 34, iters: 11184, time: 0.545, data: 0.000) G_L1: 16.011 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 13.554 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 34, iters: 13184, time: 0.549, data: 0.000) G_L1: 17.306 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 14.675 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 34, iters: 15184, time: 0.547, data: 0.000) G_L1: 16.258 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 13.662 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 34, iters: 17184, time: 0.542, data: 0.000) G_L1: 15.093 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 12.576 G_Regularizer: 0.000 validation_error: 21.060 +(epoch: 34, iters: 19184, time: 0.547, data: 0.000) G_L1: 12.898 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 10.519 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 34, iters: 21184, time: 0.547, data: 0.000) G_L1: 14.618 G_L1_ABSOLUTE: 2.439 G_L1_RELATIVE: 12.179 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 34, iters: 23184, time: 0.551, data: 0.000) G_L1: 14.822 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 12.464 G_Regularizer: 0.000 validation_error: 21.133 +(epoch: 34, iters: 25184, time: 0.544, data: 0.000) G_L1: 15.176 G_L1_ABSOLUTE: 2.995 G_L1_RELATIVE: 12.181 G_Regularizer: 0.000 validation_error: 20.549 +(epoch: 34, iters: 27184, time: 0.547, data: 0.000) G_L1: 16.341 G_L1_ABSOLUTE: 2.851 G_L1_RELATIVE: 13.490 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 34, iters: 29184, time: 0.552, data: 0.000) G_L1: 12.301 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 9.981 G_Regularizer: 0.000 validation_error: 20.261 +(epoch: 34, iters: 31184, time: 0.547, data: 0.000) G_L1: 14.375 G_L1_ABSOLUTE: 2.729 G_L1_RELATIVE: 11.646 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 34, iters: 33184, time: 0.547, data: 0.000) G_L1: 15.799 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 13.346 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 34, iters: 35184, time: 0.546, data: 0.000) G_L1: 14.437 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 11.806 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 34, iters: 37184, time: 0.536, data: 0.000) G_L1: 11.891 G_L1_ABSOLUTE: 2.225 G_L1_RELATIVE: 9.666 G_Regularizer: 0.000 validation_error: 20.839 +(epoch: 34, iters: 39184, time: 0.550, data: 0.000) G_L1: 14.314 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 11.840 G_Regularizer: 0.000 validation_error: 20.521 +(epoch: 34, iters: 41184, time: 0.537, data: 0.000) G_L1: 15.686 G_L1_ABSOLUTE: 2.901 G_L1_RELATIVE: 12.785 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 34, iters: 43184, time: 0.551, data: 0.000) G_L1: 15.393 G_L1_ABSOLUTE: 2.878 G_L1_RELATIVE: 12.514 G_Regularizer: 0.000 validation_error: 20.347 +(epoch: 34, iters: 45184, time: 0.544, data: 0.000) G_L1: 15.652 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 13.148 G_Regularizer: 0.000 validation_error: 20.462 +(epoch: 34, iters: 47184, time: 0.546, data: 0.000) G_L1: 13.669 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 11.097 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 34, iters: 49184, time: 0.548, data: 0.000) G_L1: 11.164 G_L1_ABSOLUTE: 2.088 G_L1_RELATIVE: 9.076 G_Regularizer: 0.000 validation_error: 20.449 +(epoch: 34, iters: 51184, time: 0.538, data: 0.000) G_L1: 12.824 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 10.577 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 34, iters: 53184, time: 0.551, data: 0.000) G_L1: 18.164 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 15.634 G_Regularizer: 0.000 validation_error: 20.475 +(epoch: 34, iters: 55184, time: 0.544, data: 0.000) G_L1: 16.480 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 13.706 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 34, iters: 57184, time: 0.550, data: 0.000) G_L1: 17.145 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 14.619 G_Regularizer: 0.000 validation_error: 20.431 +(epoch: 34, iters: 59184, time: 0.547, data: 0.000) G_L1: 13.581 G_L1_ABSOLUTE: 2.575 G_L1_RELATIVE: 11.006 G_Regularizer: 0.000 validation_error: 20.176 +(epoch: 34, iters: 61184, time: 0.543, data: 0.000) G_L1: 13.479 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 11.061 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 34, iters: 63184, time: 0.543, data: 0.001) G_L1: 14.007 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 11.420 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 34, iters: 65184, time: 0.545, data: 0.000) G_L1: 11.177 G_L1_ABSOLUTE: 1.836 G_L1_RELATIVE: 9.341 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 34, iters: 67184, time: 0.549, data: 0.000) G_L1: 13.192 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 10.647 G_Regularizer: 0.000 validation_error: 20.567 +(epoch: 34, iters: 69184, time: 0.552, data: 0.000) G_L1: 13.467 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 11.166 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 34, iters: 71184, time: 0.538, data: 0.000) G_L1: 22.214 G_L1_ABSOLUTE: 2.170 G_L1_RELATIVE: 20.045 G_Regularizer: 0.000 validation_error: 20.506 +(epoch: 34, iters: 73184, time: 0.551, data: 0.000) G_L1: 15.430 G_L1_ABSOLUTE: 2.222 G_L1_RELATIVE: 13.208 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 34, iters: 75184, time: 0.543, data: 0.001) G_L1: 11.367 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 8.973 G_Regularizer: 0.000 validation_error: 19.999 +(epoch: 34, iters: 77184, time: 0.543, data: 0.001) G_L1: 13.287 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 11.010 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 34, iters: 79184, time: 0.556, data: 0.000) G_L1: 15.083 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 12.545 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 34, iters: 81184, time: 0.549, data: 0.000) G_L1: 14.232 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 11.736 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 34, iters: 83184, time: 0.545, data: 0.000) G_L1: 12.045 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 9.688 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 34, iters: 85184, time: 0.543, data: 0.000) G_L1: 11.533 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 8.874 G_Regularizer: 0.000 validation_error: 20.309 +(epoch: 34, iters: 87184, time: 0.550, data: 0.000) G_L1: 16.227 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 13.467 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 34, iters: 89184, time: 0.550, data: 0.000) G_L1: 13.429 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 10.741 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 34, iters: 91184, time: 0.556, data: 0.000) G_L1: 16.072 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 13.657 G_Regularizer: 0.000 validation_error: 20.612 +(epoch: 34, iters: 93184, time: 0.542, data: 0.000) G_L1: 14.348 G_L1_ABSOLUTE: 2.814 G_L1_RELATIVE: 11.534 G_Regularizer: 0.000 validation_error: 20.359 +(epoch: 34, iters: 95184, time: 0.545, data: 0.000) G_L1: 13.312 G_L1_ABSOLUTE: 2.171 G_L1_RELATIVE: 11.140 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 34, iters: 97184, time: 0.545, data: 0.000) G_L1: 16.660 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 14.000 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 34, iters: 99184, time: 0.559, data: 0.000) G_L1: 13.822 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 11.282 G_Regularizer: 0.000 validation_error: 20.460 +(epoch: 34, iters: 101184, time: 0.550, data: 0.000) G_L1: 13.258 G_L1_ABSOLUTE: 2.350 G_L1_RELATIVE: 10.908 G_Regularizer: 0.000 validation_error: 20.412 +(epoch: 34, iters: 103184, time: 0.553, data: 0.000) G_L1: 13.720 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 11.214 G_Regularizer: 0.000 validation_error: 21.352 +(epoch: 34, iters: 105184, time: 0.539, data: 0.000) G_L1: 14.298 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 11.931 G_Regularizer: 0.000 validation_error: 20.267 +(epoch: 34, iters: 107184, time: 0.563, data: 0.000) G_L1: 17.242 G_L1_ABSOLUTE: 3.000 G_L1_RELATIVE: 14.242 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 34, iters: 109184, time: 0.547, data: 0.001) G_L1: 14.901 G_L1_ABSOLUTE: 3.010 G_L1_RELATIVE: 11.891 G_Regularizer: 0.000 validation_error: 20.586 +(epoch: 34, iters: 111184, time: 0.539, data: 0.000) G_L1: 13.142 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 10.449 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 34, iters: 113184, time: 0.545, data: 0.000) G_L1: 11.338 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 8.971 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 34, iters: 115184, time: 0.553, data: 0.000) G_L1: 13.905 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 11.622 G_Regularizer: 0.000 validation_error: 20.718 +(epoch: 34, iters: 117184, time: 0.540, data: 0.000) G_L1: 15.692 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 13.437 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 34, iters: 119184, time: 0.548, data: 0.000) G_L1: 12.918 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 10.395 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 34, iters: 121184, time: 0.547, data: 0.000) G_L1: 15.171 G_L1_ABSOLUTE: 2.926 G_L1_RELATIVE: 12.245 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 34, iters: 123184, time: 0.541, data: 0.000) G_L1: 10.889 G_L1_ABSOLUTE: 1.943 G_L1_RELATIVE: 8.945 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 34, iters: 125184, time: 0.541, data: 0.000) G_L1: 15.342 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 12.603 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 34, iters: 127184, time: 0.543, data: 0.000) G_L1: 14.748 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 12.128 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 34, iters: 129184, time: 0.548, data: 0.000) G_L1: 15.600 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 12.867 G_Regularizer: 0.000 validation_error: 21.193 +(epoch: 34, iters: 131184, time: 0.548, data: 0.000) G_L1: 15.209 G_L1_ABSOLUTE: 2.145 G_L1_RELATIVE: 13.064 G_Regularizer: 0.000 validation_error: 20.852 +(epoch: 34, iters: 133184, time: 0.553, data: 0.000) G_L1: 14.867 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 12.199 G_Regularizer: 0.000 validation_error: 20.539 +(epoch: 34, iters: 135184, time: 0.542, data: 0.000) G_L1: 13.998 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 11.779 G_Regularizer: 0.000 validation_error: 20.718 +(epoch: 34, iters: 137184, time: 0.543, data: 0.000) G_L1: 15.199 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 12.830 G_Regularizer: 0.000 validation_error: 20.399 +(epoch: 34, iters: 139184, time: 0.549, data: 0.000) G_L1: 15.387 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 12.920 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 34, iters: 141184, time: 0.554, data: 0.000) G_L1: 12.215 G_L1_ABSOLUTE: 2.471 G_L1_RELATIVE: 9.744 G_Regularizer: 0.000 validation_error: 20.514 +(epoch: 34, iters: 143184, time: 0.544, data: 0.000) G_L1: 13.411 G_L1_ABSOLUTE: 2.694 G_L1_RELATIVE: 10.717 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 34, iters: 145184, time: 0.545, data: 0.000) G_L1: 11.331 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 9.018 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 34, iters: 147184, time: 0.547, data: 0.000) G_L1: 14.001 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 11.499 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 34, iters: 149184, time: 0.541, data: 0.000) G_L1: 15.105 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 12.787 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 34, iters: 151184, time: 0.544, data: 0.000) G_L1: 12.803 G_L1_ABSOLUTE: 2.196 G_L1_RELATIVE: 10.607 G_Regularizer: 0.000 validation_error: 20.567 +(epoch: 34, iters: 153184, time: 0.548, data: 0.000) G_L1: 15.405 G_L1_ABSOLUTE: 2.761 G_L1_RELATIVE: 12.644 G_Regularizer: 0.000 validation_error: 21.112 +(epoch: 34, iters: 155184, time: 0.547, data: 0.000) G_L1: 13.326 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 10.798 G_Regularizer: 0.000 validation_error: 21.130 +(epoch: 34, iters: 157184, time: 0.547, data: 0.000) G_L1: 13.622 G_L1_ABSOLUTE: 2.266 G_L1_RELATIVE: 11.356 G_Regularizer: 0.000 validation_error: 20.214 +(epoch: 34, iters: 159184, time: 0.552, data: 0.000) G_L1: 12.488 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 10.027 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 34, iters: 161184, time: 0.544, data: 0.001) G_L1: 13.222 G_L1_ABSOLUTE: 2.221 G_L1_RELATIVE: 11.000 G_Regularizer: 0.000 validation_error: 20.814 +(epoch: 34, iters: 163184, time: 0.549, data: 0.000) G_L1: 14.559 G_L1_ABSOLUTE: 2.209 G_L1_RELATIVE: 12.350 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 34, iters: 165184, time: 0.544, data: 0.001) G_L1: 15.095 G_L1_ABSOLUTE: 2.360 G_L1_RELATIVE: 12.735 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 34, iters: 167184, time: 0.550, data: 0.001) G_L1: 16.368 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 13.611 G_Regularizer: 0.000 validation_error: 21.242 +(epoch: 34, iters: 169184, time: 0.545, data: 0.000) G_L1: 11.490 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 9.059 G_Regularizer: 0.000 validation_error: 20.427 +(epoch: 34, iters: 171184, time: 0.550, data: 0.000) G_L1: 14.567 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 11.865 G_Regularizer: 0.000 validation_error: 20.485 +(epoch: 34, iters: 173184, time: 0.559, data: 0.000) G_L1: 16.130 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 13.604 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 34, iters: 175184, time: 0.544, data: 0.000) G_L1: 12.206 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 9.803 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 34, iters: 177184, time: 0.554, data: 0.000) G_L1: 14.025 G_L1_ABSOLUTE: 2.808 G_L1_RELATIVE: 11.217 G_Regularizer: 0.000 validation_error: 21.048 +(epoch: 34, iters: 179184, time: 0.541, data: 0.000) G_L1: 12.317 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 10.079 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 34, iters: 181184, time: 0.549, data: 0.000) G_L1: 12.714 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 10.346 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 34, iters: 183184, time: 0.543, data: 0.000) G_L1: 14.825 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 12.216 G_Regularizer: 0.000 validation_error: 21.106 +(epoch: 34, iters: 185184, time: 0.539, data: 0.000) G_L1: 17.890 G_L1_ABSOLUTE: 2.836 G_L1_RELATIVE: 15.053 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 34, iters: 187184, time: 0.543, data: 0.000) G_L1: 14.988 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 12.366 G_Regularizer: 0.000 validation_error: 20.665 +(epoch: 34, iters: 189184, time: 0.550, data: 0.000) G_L1: 14.471 G_L1_ABSOLUTE: 2.711 G_L1_RELATIVE: 11.760 G_Regularizer: 0.000 validation_error: 20.363 +(epoch: 34, iters: 191184, time: 0.544, data: 0.000) G_L1: 13.362 G_L1_ABSOLUTE: 3.077 G_L1_RELATIVE: 10.284 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 34, iters: 193184, time: 0.547, data: 0.001) G_L1: 14.773 G_L1_ABSOLUTE: 3.029 G_L1_RELATIVE: 11.744 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 34, iters: 195184, time: 0.554, data: 0.000) G_L1: 16.561 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 13.869 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 34, iters: 197184, time: 0.542, data: 0.000) G_L1: 14.922 G_L1_ABSOLUTE: 2.223 G_L1_RELATIVE: 12.699 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 34, iters: 199184, time: 0.550, data: 0.000) G_L1: 15.048 G_L1_ABSOLUTE: 3.136 G_L1_RELATIVE: 11.912 G_Regularizer: 0.000 validation_error: 21.065 +(epoch: 34, iters: 201184, time: 0.546, data: 0.000) G_L1: 16.401 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 14.058 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 34, iters: 203184, time: 0.543, data: 0.000) G_L1: 15.534 G_L1_ABSOLUTE: 3.041 G_L1_RELATIVE: 12.493 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 34, iters: 205184, time: 0.551, data: 0.000) G_L1: 13.815 G_L1_ABSOLUTE: 2.003 G_L1_RELATIVE: 11.812 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 34, iters: 207184, time: 0.556, data: 0.000) G_L1: 15.092 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 12.542 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 34, iters: 209184, time: 0.543, data: 0.000) G_L1: 13.630 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 11.301 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 34, iters: 211184, time: 0.542, data: 0.000) G_L1: 14.107 G_L1_ABSOLUTE: 2.200 G_L1_RELATIVE: 11.907 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 34, iters: 213184, time: 0.541, data: 0.000) G_L1: 16.203 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 13.426 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 34, iters: 215184, time: 0.555, data: 0.000) G_L1: 12.858 G_L1_ABSOLUTE: 1.922 G_L1_RELATIVE: 10.935 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 34, iters: 217184, time: 0.547, data: 0.000) G_L1: 15.288 G_L1_ABSOLUTE: 3.170 G_L1_RELATIVE: 12.118 G_Regularizer: 0.000 validation_error: 21.492 +(epoch: 34, iters: 219184, time: 0.560, data: 0.000) G_L1: 13.499 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 11.167 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 34, iters: 221184, time: 0.541, data: 0.000) G_L1: 13.337 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 10.900 G_Regularizer: 0.000 validation_error: 21.290 +(epoch: 34, iters: 223184, time: 0.550, data: 0.000) G_L1: 15.404 G_L1_ABSOLUTE: 2.694 G_L1_RELATIVE: 12.710 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 34, iters: 225184, time: 0.558, data: 0.000) G_L1: 14.200 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 11.737 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 34, iters: 227184, time: 0.556, data: 0.000) G_L1: 14.652 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 12.201 G_Regularizer: 0.000 validation_error: 20.614 +(epoch: 34, iters: 229184, time: 0.546, data: 0.000) G_L1: 12.858 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 9.995 G_Regularizer: 0.000 validation_error: 21.277 +(epoch: 34, iters: 231184, time: 0.543, data: 0.000) G_L1: 13.539 G_L1_ABSOLUTE: 2.303 G_L1_RELATIVE: 11.236 G_Regularizer: 0.000 validation_error: 20.814 +(epoch: 34, iters: 233184, time: 0.547, data: 0.000) G_L1: 14.691 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.158 G_Regularizer: 0.000 validation_error: 20.492 +(epoch: 34, iters: 235184, time: 0.555, data: 0.000) G_L1: 13.630 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.194 G_Regularizer: 0.000 validation_error: 20.702 +(epoch: 34, iters: 237184, time: 0.546, data: 0.000) G_L1: 16.811 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 14.285 G_Regularizer: 0.000 validation_error: 19.916 +(epoch: 34, iters: 239184, time: 0.548, data: 0.000) G_L1: 14.660 G_L1_ABSOLUTE: 2.886 G_L1_RELATIVE: 11.774 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 34, iters: 241184, time: 0.554, data: 0.000) G_L1: 13.469 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 11.020 G_Regularizer: 0.000 validation_error: 20.342 +(epoch: 34, iters: 243184, time: 0.547, data: 0.000) G_L1: 15.489 G_L1_ABSOLUTE: 2.095 G_L1_RELATIVE: 13.393 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 34, iters: 245184, time: 0.550, data: 0.000) G_L1: 16.412 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 13.691 G_Regularizer: 0.000 validation_error: 20.266 +(epoch: 34, iters: 247184, time: 0.543, data: 0.000) G_L1: 14.881 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 12.321 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 34, iters: 249184, time: 0.542, data: 0.000) G_L1: 14.717 G_L1_ABSOLUTE: 2.585 G_L1_RELATIVE: 12.132 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 34, iters: 251184, time: 0.549, data: 0.000) G_L1: 13.605 G_L1_ABSOLUTE: 2.846 G_L1_RELATIVE: 10.759 G_Regularizer: 0.000 validation_error: 20.798 +(epoch: 34, iters: 253184, time: 0.554, data: 0.001) G_L1: 10.945 G_L1_ABSOLUTE: 2.246 G_L1_RELATIVE: 8.699 G_Regularizer: 0.000 validation_error: 21.468 +(epoch: 34, iters: 255184, time: 0.546, data: 0.000) G_L1: 15.271 G_L1_ABSOLUTE: 2.281 G_L1_RELATIVE: 12.990 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 34, iters: 257184, time: 0.552, data: 0.000) G_L1: 23.669 G_L1_ABSOLUTE: 2.943 G_L1_RELATIVE: 20.726 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 34, iters: 259184, time: 0.543, data: 0.000) G_L1: 13.679 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 11.293 G_Regularizer: 0.000 validation_error: 21.213 +(epoch: 34, iters: 261184, time: 0.548, data: 0.000) G_L1: 12.633 G_L1_ABSOLUTE: 2.180 G_L1_RELATIVE: 10.453 G_Regularizer: 0.000 validation_error: 20.658 +(epoch: 34, iters: 263184, time: 0.556, data: 0.000) G_L1: 14.253 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 11.633 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 34, iters: 265184, time: 0.543, data: 0.000) G_L1: 13.934 G_L1_ABSOLUTE: 2.947 G_L1_RELATIVE: 10.987 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 34, iters: 267184, time: 0.553, data: 0.000) G_L1: 13.701 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 11.316 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 34, iters: 269184, time: 0.551, data: 0.000) G_L1: 16.795 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 14.158 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 34, iters: 271184, time: 0.556, data: 0.000) G_L1: 15.317 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 12.618 G_Regularizer: 0.000 validation_error: 21.102 +(epoch: 34, iters: 273184, time: 0.543, data: 0.000) G_L1: 13.312 G_L1_ABSOLUTE: 2.158 G_L1_RELATIVE: 11.155 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 34, iters: 275184, time: 0.555, data: 0.000) G_L1: 12.181 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 9.811 G_Regularizer: 0.000 validation_error: 20.963 +(epoch: 34, iters: 277184, time: 0.541, data: 0.000) G_L1: 13.430 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 11.195 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 34, iters: 279184, time: 0.549, data: 0.000) G_L1: 15.151 G_L1_ABSOLUTE: 2.319 G_L1_RELATIVE: 12.831 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 34, iters: 281184, time: 0.550, data: 0.000) G_L1: 14.077 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 11.497 G_Regularizer: 0.000 validation_error: 20.948 +(epoch: 34, iters: 283184, time: 0.550, data: 0.000) G_L1: 13.556 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 10.718 G_Regularizer: 0.000 validation_error: 20.394 +(epoch: 34, iters: 285184, time: 0.540, data: 0.000) G_L1: 16.008 G_L1_ABSOLUTE: 3.011 G_L1_RELATIVE: 12.997 G_Regularizer: 0.000 validation_error: 20.404 +(epoch: 34, iters: 287184, time: 0.540, data: 0.000) G_L1: 13.901 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 11.441 G_Regularizer: 0.000 validation_error: 20.586 +(epoch: 34, iters: 289184, time: 0.545, data: 0.000) G_L1: 16.710 G_L1_ABSOLUTE: 3.333 G_L1_RELATIVE: 13.377 G_Regularizer: 0.000 validation_error: 20.471 +(epoch: 34, iters: 291184, time: 0.544, data: 0.000) G_L1: 14.125 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 11.631 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 34, iters: 293184, time: 0.550, data: 0.000) G_L1: 16.259 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 13.874 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 34, iters: 295184, time: 0.548, data: 0.000) G_L1: 13.856 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 11.368 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 34, iters: 297184, time: 0.539, data: 0.000) G_L1: 16.735 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 14.138 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 34, iters: 299184, time: 0.551, data: 0.000) G_L1: 14.549 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 11.808 G_Regularizer: 0.000 validation_error: 20.287 +(epoch: 34, iters: 301184, time: 0.546, data: 0.000) G_L1: 14.377 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 11.599 G_Regularizer: 0.000 validation_error: 20.341 +(epoch: 35, iters: 432, time: 0.561, data: 0.000) G_L1: 15.108 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 12.504 G_Regularizer: 0.000 validation_error: 21.294 +(epoch: 35, iters: 2432, time: 0.544, data: 0.000) G_L1: 13.208 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 10.732 G_Regularizer: 0.000 validation_error: 21.066 +(epoch: 35, iters: 4432, time: 0.547, data: 0.000) G_L1: 13.142 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 10.785 G_Regularizer: 0.000 validation_error: 20.497 +(epoch: 35, iters: 6432, time: 0.547, data: 0.000) G_L1: 13.918 G_L1_ABSOLUTE: 2.319 G_L1_RELATIVE: 11.599 G_Regularizer: 0.000 validation_error: 20.547 +(epoch: 35, iters: 8432, time: 0.543, data: 0.000) G_L1: 14.711 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 12.158 G_Regularizer: 0.000 validation_error: 20.705 +(epoch: 35, iters: 10432, time: 0.540, data: 0.000) G_L1: 12.538 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 10.172 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 35, iters: 12432, time: 0.550, data: 0.000) G_L1: 14.281 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 11.681 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 35, iters: 14432, time: 0.553, data: 0.000) G_L1: 15.156 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 12.505 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 35, iters: 16432, time: 0.541, data: 0.000) G_L1: 14.692 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 11.971 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 35, iters: 18432, time: 0.548, data: 0.000) G_L1: 14.659 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 11.848 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 35, iters: 20432, time: 0.546, data: 0.000) G_L1: 27.341 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 24.860 G_Regularizer: 0.000 validation_error: 21.140 +(epoch: 35, iters: 22432, time: 0.544, data: 0.000) G_L1: 17.350 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 14.880 G_Regularizer: 0.000 validation_error: 21.065 +(epoch: 35, iters: 24432, time: 0.551, data: 0.000) G_L1: 10.966 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 8.719 G_Regularizer: 0.000 validation_error: 21.127 +(epoch: 35, iters: 26432, time: 0.547, data: 0.000) G_L1: 13.850 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 11.446 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 35, iters: 28432, time: 0.545, data: 0.000) G_L1: 13.254 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 10.841 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 35, iters: 30432, time: 0.546, data: 0.000) G_L1: 14.489 G_L1_ABSOLUTE: 2.955 G_L1_RELATIVE: 11.534 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 35, iters: 32432, time: 0.556, data: 0.000) G_L1: 17.096 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 14.283 G_Regularizer: 0.000 validation_error: 21.144 +(epoch: 35, iters: 34432, time: 0.553, data: 0.000) G_L1: 13.948 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 11.579 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 35, iters: 36432, time: 0.547, data: 0.001) G_L1: 13.091 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 10.633 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 35, iters: 38432, time: 0.546, data: 0.000) G_L1: 14.237 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 11.892 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 35, iters: 40432, time: 0.548, data: 0.000) G_L1: 12.291 G_L1_ABSOLUTE: 2.087 G_L1_RELATIVE: 10.205 G_Regularizer: 0.000 validation_error: 21.088 +(epoch: 35, iters: 42432, time: 0.538, data: 0.000) G_L1: 14.414 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 11.961 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 35, iters: 44432, time: 0.547, data: 0.000) G_L1: 15.494 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 13.182 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 35, iters: 46432, time: 0.543, data: 0.000) G_L1: 18.387 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 15.713 G_Regularizer: 0.000 validation_error: 20.824 +(epoch: 35, iters: 48432, time: 0.546, data: 0.001) G_L1: 12.920 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 10.324 G_Regularizer: 0.000 validation_error: 20.383 +(epoch: 35, iters: 50432, time: 0.546, data: 0.000) G_L1: 13.810 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 11.284 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 35, iters: 52432, time: 0.548, data: 0.000) G_L1: 13.354 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 10.592 G_Regularizer: 0.000 validation_error: 21.333 +(epoch: 35, iters: 54432, time: 0.547, data: 0.001) G_L1: 10.727 G_L1_ABSOLUTE: 2.441 G_L1_RELATIVE: 8.286 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 35, iters: 56432, time: 0.547, data: 0.000) G_L1: 14.973 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 12.369 G_Regularizer: 0.000 validation_error: 21.081 +(epoch: 35, iters: 58432, time: 0.546, data: 0.000) G_L1: 15.700 G_L1_ABSOLUTE: 3.142 G_L1_RELATIVE: 12.557 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 35, iters: 60432, time: 0.550, data: 0.000) G_L1: 12.650 G_L1_ABSOLUTE: 2.194 G_L1_RELATIVE: 10.456 G_Regularizer: 0.000 validation_error: 21.641 +(epoch: 35, iters: 62432, time: 0.545, data: 0.000) G_L1: 14.885 G_L1_ABSOLUTE: 2.809 G_L1_RELATIVE: 12.076 G_Regularizer: 0.000 validation_error: 21.093 +(epoch: 35, iters: 64432, time: 0.539, data: 0.000) G_L1: 12.725 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 10.115 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 35, iters: 66432, time: 0.548, data: 0.000) G_L1: 15.069 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 12.706 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 35, iters: 68432, time: 0.544, data: 0.000) G_L1: 12.469 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 10.012 G_Regularizer: 0.000 validation_error: 20.547 +(epoch: 35, iters: 70432, time: 0.544, data: 0.001) G_L1: 13.441 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 10.845 G_Regularizer: 0.000 validation_error: 20.430 +(epoch: 35, iters: 72432, time: 0.542, data: 0.001) G_L1: 14.156 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 11.676 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 35, iters: 74432, time: 0.548, data: 0.000) G_L1: 14.899 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 12.535 G_Regularizer: 0.000 validation_error: 20.467 +(epoch: 35, iters: 76432, time: 0.550, data: 0.000) G_L1: 11.970 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 9.688 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 35, iters: 78432, time: 0.550, data: 0.000) G_L1: 12.159 G_L1_ABSOLUTE: 2.187 G_L1_RELATIVE: 9.972 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 35, iters: 80432, time: 0.546, data: 0.001) G_L1: 14.338 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 11.854 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 35, iters: 82432, time: 0.543, data: 0.000) G_L1: 14.446 G_L1_ABSOLUTE: 2.981 G_L1_RELATIVE: 11.465 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 35, iters: 84432, time: 0.541, data: 0.000) G_L1: 14.229 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 11.657 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 35, iters: 86432, time: 0.545, data: 0.000) G_L1: 14.126 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 11.739 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 35, iters: 88432, time: 0.546, data: 0.000) G_L1: 13.250 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 10.837 G_Regularizer: 0.000 validation_error: 21.387 +(epoch: 35, iters: 90432, time: 0.542, data: 0.000) G_L1: 13.639 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 10.829 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 35, iters: 92432, time: 0.542, data: 0.000) G_L1: 14.748 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 11.705 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 35, iters: 94432, time: 0.541, data: 0.000) G_L1: 12.252 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 9.828 G_Regularizer: 0.000 validation_error: 20.540 +(epoch: 35, iters: 96432, time: 0.547, data: 0.000) G_L1: 15.842 G_L1_ABSOLUTE: 2.269 G_L1_RELATIVE: 13.573 G_Regularizer: 0.000 validation_error: 20.732 +(epoch: 35, iters: 98432, time: 0.546, data: 0.000) G_L1: 15.805 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 13.176 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 35, iters: 100432, time: 0.546, data: 0.000) G_L1: 13.318 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 11.111 G_Regularizer: 0.000 validation_error: 20.403 +(epoch: 35, iters: 102432, time: 0.547, data: 0.000) G_L1: 17.161 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 14.412 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 35, iters: 104432, time: 0.546, data: 0.000) G_L1: 13.643 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 10.828 G_Regularizer: 0.000 validation_error: 21.214 +(epoch: 35, iters: 106432, time: 0.552, data: 0.000) G_L1: 16.303 G_L1_ABSOLUTE: 2.220 G_L1_RELATIVE: 14.083 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 35, iters: 108432, time: 0.549, data: 0.000) G_L1: 14.148 G_L1_ABSOLUTE: 1.935 G_L1_RELATIVE: 12.214 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 35, iters: 110432, time: 0.548, data: 0.000) G_L1: 14.823 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 12.354 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 35, iters: 112432, time: 0.546, data: 0.000) G_L1: 14.131 G_L1_ABSOLUTE: 2.472 G_L1_RELATIVE: 11.659 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 35, iters: 114432, time: 0.544, data: 0.000) G_L1: 13.438 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 10.909 G_Regularizer: 0.000 validation_error: 20.763 +(epoch: 35, iters: 116432, time: 0.545, data: 0.000) G_L1: 13.549 G_L1_ABSOLUTE: 2.186 G_L1_RELATIVE: 11.362 G_Regularizer: 0.000 validation_error: 20.763 +(epoch: 35, iters: 118432, time: 0.549, data: 0.001) G_L1: 14.729 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 11.930 G_Regularizer: 0.000 validation_error: 20.456 +(epoch: 35, iters: 120432, time: 0.544, data: 0.000) G_L1: 13.506 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 11.027 G_Regularizer: 0.000 validation_error: 21.247 +(epoch: 35, iters: 122432, time: 0.542, data: 0.000) G_L1: 15.021 G_L1_ABSOLUTE: 2.555 G_L1_RELATIVE: 12.467 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 35, iters: 124432, time: 0.547, data: 0.000) G_L1: 12.166 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 9.938 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 35, iters: 126432, time: 0.543, data: 0.000) G_L1: 13.504 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 11.251 G_Regularizer: 0.000 validation_error: 21.091 +(epoch: 35, iters: 128432, time: 0.561, data: 0.000) G_L1: 13.455 G_L1_ABSOLUTE: 2.271 G_L1_RELATIVE: 11.184 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 35, iters: 130432, time: 0.550, data: 0.000) G_L1: 15.314 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 12.651 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 35, iters: 132432, time: 0.545, data: 0.000) G_L1: 15.193 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 12.625 G_Regularizer: 0.000 validation_error: 20.638 +(epoch: 35, iters: 134432, time: 0.544, data: 0.000) G_L1: 14.937 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 12.504 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 35, iters: 136432, time: 0.544, data: 0.000) G_L1: 12.729 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 10.132 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 35, iters: 138432, time: 0.547, data: 0.000) G_L1: 13.802 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 10.967 G_Regularizer: 0.000 validation_error: 21.155 +(epoch: 35, iters: 140432, time: 0.547, data: 0.000) G_L1: 15.749 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 13.367 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 35, iters: 142432, time: 0.549, data: 0.001) G_L1: 12.536 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 9.788 G_Regularizer: 0.000 validation_error: 21.081 +(epoch: 35, iters: 144432, time: 0.547, data: 0.000) G_L1: 15.147 G_L1_ABSOLUTE: 2.797 G_L1_RELATIVE: 12.350 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 35, iters: 146432, time: 0.538, data: 0.000) G_L1: 14.023 G_L1_ABSOLUTE: 2.583 G_L1_RELATIVE: 11.439 G_Regularizer: 0.000 validation_error: 20.396 +(epoch: 35, iters: 148432, time: 0.546, data: 0.000) G_L1: 14.647 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 12.135 G_Regularizer: 0.000 validation_error: 20.425 +(epoch: 35, iters: 150432, time: 0.549, data: 0.000) G_L1: 14.835 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 12.344 G_Regularizer: 0.000 validation_error: 20.492 +(epoch: 35, iters: 152432, time: 0.552, data: 0.000) G_L1: 13.585 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 11.195 G_Regularizer: 0.000 validation_error: 20.750 +(epoch: 35, iters: 154432, time: 0.543, data: 0.001) G_L1: 15.351 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 13.025 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 35, iters: 156432, time: 0.548, data: 0.001) G_L1: 11.860 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 9.577 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 35, iters: 158432, time: 0.547, data: 0.000) G_L1: 17.214 G_L1_ABSOLUTE: 2.974 G_L1_RELATIVE: 14.240 G_Regularizer: 0.000 validation_error: 21.128 +(epoch: 35, iters: 160432, time: 0.551, data: 0.000) G_L1: 13.221 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 10.790 G_Regularizer: 0.000 validation_error: 21.157 +(epoch: 35, iters: 162432, time: 0.550, data: 0.000) G_L1: 14.750 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 12.026 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 35, iters: 164432, time: 0.548, data: 0.000) G_L1: 12.771 G_L1_ABSOLUTE: 2.456 G_L1_RELATIVE: 10.315 G_Regularizer: 0.000 validation_error: 20.885 +(epoch: 35, iters: 166432, time: 0.550, data: 0.000) G_L1: 13.837 G_L1_ABSOLUTE: 2.242 G_L1_RELATIVE: 11.595 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 35, iters: 168432, time: 0.543, data: 0.000) G_L1: 15.765 G_L1_ABSOLUTE: 2.086 G_L1_RELATIVE: 13.678 G_Regularizer: 0.000 validation_error: 20.937 +(epoch: 35, iters: 170432, time: 0.543, data: 0.000) G_L1: 12.664 G_L1_ABSOLUTE: 2.761 G_L1_RELATIVE: 9.903 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 35, iters: 172432, time: 0.547, data: 0.000) G_L1: 16.045 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 13.222 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 35, iters: 174432, time: 0.546, data: 0.000) G_L1: 13.900 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 11.580 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 35, iters: 176432, time: 0.540, data: 0.000) G_L1: 15.201 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 12.749 G_Regularizer: 0.000 validation_error: 21.176 +(epoch: 35, iters: 178432, time: 0.549, data: 0.000) G_L1: 15.717 G_L1_ABSOLUTE: 3.291 G_L1_RELATIVE: 12.427 G_Regularizer: 0.000 validation_error: 21.272 +(epoch: 35, iters: 180432, time: 0.545, data: 0.000) G_L1: 15.519 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 12.974 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 35, iters: 182432, time: 0.551, data: 0.000) G_L1: 12.836 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 10.185 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 35, iters: 184432, time: 0.540, data: 0.000) G_L1: 13.647 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 11.220 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 35, iters: 186432, time: 0.546, data: 0.000) G_L1: 14.176 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 11.885 G_Regularizer: 0.000 validation_error: 21.331 +(epoch: 35, iters: 188432, time: 0.548, data: 0.000) G_L1: 14.045 G_L1_ABSOLUTE: 2.199 G_L1_RELATIVE: 11.845 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 35, iters: 190432, time: 0.547, data: 0.000) G_L1: 13.398 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 11.043 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 35, iters: 192432, time: 0.548, data: 0.000) G_L1: 13.529 G_L1_ABSOLUTE: 2.187 G_L1_RELATIVE: 11.342 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 35, iters: 194432, time: 0.541, data: 0.000) G_L1: 12.448 G_L1_ABSOLUTE: 2.611 G_L1_RELATIVE: 9.838 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 35, iters: 196432, time: 0.557, data: 0.000) G_L1: 14.495 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 12.178 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 35, iters: 198432, time: 0.557, data: 0.000) G_L1: 16.566 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 14.270 G_Regularizer: 0.000 validation_error: 20.729 +(epoch: 35, iters: 200432, time: 0.539, data: 0.000) G_L1: 13.388 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 11.017 G_Regularizer: 0.000 validation_error: 21.287 +(epoch: 35, iters: 202432, time: 0.553, data: 0.000) G_L1: 14.485 G_L1_ABSOLUTE: 2.783 G_L1_RELATIVE: 11.702 G_Regularizer: 0.000 validation_error: 20.253 +(epoch: 35, iters: 204432, time: 0.547, data: 0.001) G_L1: 14.549 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 11.693 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 35, iters: 206432, time: 0.547, data: 0.000) G_L1: 11.949 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 9.684 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 35, iters: 208432, time: 0.551, data: 0.000) G_L1: 16.691 G_L1_ABSOLUTE: 3.128 G_L1_RELATIVE: 13.563 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 35, iters: 210432, time: 0.539, data: 0.000) G_L1: 16.314 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 13.727 G_Regularizer: 0.000 validation_error: 20.896 +(epoch: 35, iters: 212432, time: 0.552, data: 0.000) G_L1: 14.663 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 12.024 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 35, iters: 214432, time: 0.544, data: 0.000) G_L1: 20.001 G_L1_ABSOLUTE: 2.579 G_L1_RELATIVE: 17.422 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 35, iters: 216432, time: 0.560, data: 0.000) G_L1: 15.776 G_L1_ABSOLUTE: 2.735 G_L1_RELATIVE: 13.042 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 35, iters: 218432, time: 0.540, data: 0.000) G_L1: 12.126 G_L1_ABSOLUTE: 2.248 G_L1_RELATIVE: 9.878 G_Regularizer: 0.000 validation_error: 21.323 +(epoch: 35, iters: 220432, time: 0.544, data: 0.000) G_L1: 14.225 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 11.817 G_Regularizer: 0.000 validation_error: 20.522 +(epoch: 35, iters: 222432, time: 0.549, data: 0.000) G_L1: 13.570 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 10.885 G_Regularizer: 0.000 validation_error: 21.216 +(epoch: 35, iters: 224432, time: 0.551, data: 0.000) G_L1: 15.088 G_L1_ABSOLUTE: 2.928 G_L1_RELATIVE: 12.160 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 35, iters: 226432, time: 0.539, data: 0.000) G_L1: 14.787 G_L1_ABSOLUTE: 2.555 G_L1_RELATIVE: 12.232 G_Regularizer: 0.000 validation_error: 20.852 +(epoch: 35, iters: 228432, time: 0.546, data: 0.000) G_L1: 14.327 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 12.065 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 35, iters: 230432, time: 0.547, data: 0.000) G_L1: 11.794 G_L1_ABSOLUTE: 2.165 G_L1_RELATIVE: 9.629 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 35, iters: 232432, time: 0.551, data: 0.000) G_L1: 12.711 G_L1_ABSOLUTE: 2.122 G_L1_RELATIVE: 10.590 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 35, iters: 234432, time: 0.539, data: 0.000) G_L1: 26.445 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 23.896 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 35, iters: 236432, time: 0.542, data: 0.000) G_L1: 11.979 G_L1_ABSOLUTE: 2.328 G_L1_RELATIVE: 9.651 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 35, iters: 238432, time: 0.545, data: 0.000) G_L1: 15.302 G_L1_ABSOLUTE: 2.988 G_L1_RELATIVE: 12.314 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 35, iters: 240432, time: 0.543, data: 0.001) G_L1: 12.377 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 10.024 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 35, iters: 242432, time: 0.546, data: 0.000) G_L1: 17.676 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 15.175 G_Regularizer: 0.000 validation_error: 20.416 +(epoch: 35, iters: 244432, time: 0.544, data: 0.000) G_L1: 13.397 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 10.740 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 35, iters: 246432, time: 0.540, data: 0.001) G_L1: 12.560 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 10.299 G_Regularizer: 0.000 validation_error: 20.770 +(epoch: 35, iters: 248432, time: 0.549, data: 0.000) G_L1: 17.413 G_L1_ABSOLUTE: 3.502 G_L1_RELATIVE: 13.911 G_Regularizer: 0.000 validation_error: 20.671 +(epoch: 35, iters: 250432, time: 0.555, data: 0.000) G_L1: 14.278 G_L1_ABSOLUTE: 3.166 G_L1_RELATIVE: 11.112 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 35, iters: 252432, time: 0.547, data: 0.000) G_L1: 13.589 G_L1_ABSOLUTE: 2.197 G_L1_RELATIVE: 11.392 G_Regularizer: 0.000 validation_error: 21.255 +(epoch: 35, iters: 254432, time: 0.551, data: 0.001) G_L1: 13.261 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 10.768 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 35, iters: 256432, time: 0.543, data: 0.000) G_L1: 14.996 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 12.443 G_Regularizer: 0.000 validation_error: 20.629 +(epoch: 35, iters: 258432, time: 0.548, data: 0.000) G_L1: 15.326 G_L1_ABSOLUTE: 3.128 G_L1_RELATIVE: 12.198 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 35, iters: 260432, time: 0.541, data: 0.001) G_L1: 13.579 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 11.178 G_Regularizer: 0.000 validation_error: 20.536 +(epoch: 35, iters: 262432, time: 0.542, data: 0.000) G_L1: 14.027 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 11.696 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 35, iters: 264432, time: 0.542, data: 0.002) G_L1: 14.784 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 12.316 G_Regularizer: 0.000 validation_error: 21.156 +(epoch: 35, iters: 266432, time: 0.545, data: 0.000) G_L1: 14.524 G_L1_ABSOLUTE: 3.165 G_L1_RELATIVE: 11.359 G_Regularizer: 0.000 validation_error: 20.524 +(epoch: 35, iters: 268432, time: 0.544, data: 0.000) G_L1: 13.273 G_L1_ABSOLUTE: 2.830 G_L1_RELATIVE: 10.443 G_Regularizer: 0.000 validation_error: 21.072 +(epoch: 35, iters: 270432, time: 0.550, data: 0.001) G_L1: 13.977 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 11.575 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 35, iters: 272432, time: 0.549, data: 0.000) G_L1: 12.767 G_L1_ABSOLUTE: 2.025 G_L1_RELATIVE: 10.741 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 35, iters: 274432, time: 0.545, data: 0.000) G_L1: 18.408 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 15.523 G_Regularizer: 0.000 validation_error: 20.719 +(epoch: 35, iters: 276432, time: 0.552, data: 0.001) G_L1: 14.640 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 11.909 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 35, iters: 278432, time: 0.547, data: 0.001) G_L1: 16.662 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 14.136 G_Regularizer: 0.000 validation_error: 20.444 +(epoch: 35, iters: 280432, time: 0.545, data: 0.000) G_L1: 15.315 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 12.979 G_Regularizer: 0.000 validation_error: 20.472 +(epoch: 35, iters: 282432, time: 0.557, data: 0.000) G_L1: 13.674 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 11.111 G_Regularizer: 0.000 validation_error: 20.186 +(epoch: 35, iters: 284432, time: 0.547, data: 0.000) G_L1: 16.963 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 14.219 G_Regularizer: 0.000 validation_error: 20.406 +(epoch: 35, iters: 286432, time: 0.541, data: 0.000) G_L1: 19.652 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 16.791 G_Regularizer: 0.000 validation_error: 20.647 +(epoch: 35, iters: 288432, time: 0.552, data: 0.000) G_L1: 16.436 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 13.482 G_Regularizer: 0.000 validation_error: 20.203 +(epoch: 35, iters: 290432, time: 0.552, data: 0.000) G_L1: 14.637 G_L1_ABSOLUTE: 1.958 G_L1_RELATIVE: 12.679 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 35, iters: 292432, time: 0.540, data: 0.000) G_L1: 13.801 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 11.080 G_Regularizer: 0.000 validation_error: 20.385 +(epoch: 35, iters: 294432, time: 0.541, data: 0.000) G_L1: 13.243 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 10.601 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 35, iters: 296432, time: 0.541, data: 0.001) G_L1: 15.631 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 13.063 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 35, iters: 298432, time: 0.547, data: 0.000) G_L1: 12.644 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 10.084 G_Regularizer: 0.000 validation_error: 20.325 +(epoch: 35, iters: 300432, time: 0.542, data: 0.001) G_L1: 14.638 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 12.316 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 35, iters: 302432, time: 0.556, data: 0.000) G_L1: 13.227 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 10.706 G_Regularizer: 0.000 validation_error: 20.582 +(epoch: 36, iters: 1680, time: 0.549, data: 0.000) G_L1: 15.592 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 13.139 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 36, iters: 3680, time: 0.547, data: 0.000) G_L1: 14.185 G_L1_ABSOLUTE: 2.957 G_L1_RELATIVE: 11.228 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 36, iters: 5680, time: 0.552, data: 0.000) G_L1: 11.535 G_L1_ABSOLUTE: 2.173 G_L1_RELATIVE: 9.361 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 36, iters: 7680, time: 0.545, data: 0.000) G_L1: 13.719 G_L1_ABSOLUTE: 2.199 G_L1_RELATIVE: 11.520 G_Regularizer: 0.000 validation_error: 20.454 +(epoch: 36, iters: 9680, time: 0.546, data: 0.000) G_L1: 14.041 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 11.368 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 36, iters: 11680, time: 0.549, data: 0.000) G_L1: 16.669 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 13.844 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 36, iters: 13680, time: 0.548, data: 0.000) G_L1: 12.521 G_L1_ABSOLUTE: 2.064 G_L1_RELATIVE: 10.457 G_Regularizer: 0.000 validation_error: 21.065 +(epoch: 36, iters: 15680, time: 0.552, data: 0.000) G_L1: 14.798 G_L1_ABSOLUTE: 2.611 G_L1_RELATIVE: 12.187 G_Regularizer: 0.000 validation_error: 20.731 +(epoch: 36, iters: 17680, time: 0.549, data: 0.000) G_L1: 15.035 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 36, iters: 19680, time: 0.546, data: 0.000) G_L1: 16.540 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 13.819 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 36, iters: 21680, time: 0.544, data: 0.000) G_L1: 12.153 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 9.735 G_Regularizer: 0.000 validation_error: 20.548 +(epoch: 36, iters: 23680, time: 0.547, data: 0.000) G_L1: 15.721 G_L1_ABSOLUTE: 2.909 G_L1_RELATIVE: 12.812 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 36, iters: 25680, time: 0.538, data: 0.000) G_L1: 13.707 G_L1_ABSOLUTE: 2.295 G_L1_RELATIVE: 11.412 G_Regularizer: 0.000 validation_error: 20.595 +(epoch: 36, iters: 27680, time: 0.542, data: 0.000) G_L1: 12.990 G_L1_ABSOLUTE: 2.151 G_L1_RELATIVE: 10.839 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 36, iters: 29680, time: 0.539, data: 0.000) G_L1: 14.621 G_L1_ABSOLUTE: 2.740 G_L1_RELATIVE: 11.881 G_Regularizer: 0.000 validation_error: 20.596 +(epoch: 36, iters: 31680, time: 0.547, data: 0.000) G_L1: 14.413 G_L1_ABSOLUTE: 2.201 G_L1_RELATIVE: 12.212 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 36, iters: 33680, time: 0.538, data: 0.000) G_L1: 13.441 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 10.817 G_Regularizer: 0.000 validation_error: 20.578 +(epoch: 36, iters: 35680, time: 0.544, data: 0.000) G_L1: 14.226 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 11.964 G_Regularizer: 0.000 validation_error: 20.956 +(epoch: 36, iters: 37680, time: 0.539, data: 0.000) G_L1: 14.391 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 11.659 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 36, iters: 39680, time: 0.551, data: 0.000) G_L1: 16.365 G_L1_ABSOLUTE: 2.390 G_L1_RELATIVE: 13.975 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 36, iters: 41680, time: 0.552, data: 0.000) G_L1: 12.881 G_L1_ABSOLUTE: 2.148 G_L1_RELATIVE: 10.733 G_Regularizer: 0.000 validation_error: 20.924 +(epoch: 36, iters: 43680, time: 0.544, data: 0.000) G_L1: 12.676 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 10.252 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 36, iters: 45680, time: 0.548, data: 0.000) G_L1: 13.897 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 11.446 G_Regularizer: 0.000 validation_error: 21.078 +(epoch: 36, iters: 47680, time: 0.551, data: 0.000) G_L1: 12.347 G_L1_ABSOLUTE: 2.290 G_L1_RELATIVE: 10.057 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 36, iters: 49680, time: 0.551, data: 0.000) G_L1: 12.085 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 9.681 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 36, iters: 51680, time: 0.552, data: 0.000) G_L1: 14.464 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 11.704 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 36, iters: 53680, time: 0.542, data: 0.000) G_L1: 12.095 G_L1_ABSOLUTE: 2.315 G_L1_RELATIVE: 9.779 G_Regularizer: 0.000 validation_error: 20.607 +(epoch: 36, iters: 55680, time: 0.549, data: 0.000) G_L1: 13.488 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.052 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 36, iters: 57680, time: 0.541, data: 0.000) G_L1: 14.588 G_L1_ABSOLUTE: 2.129 G_L1_RELATIVE: 12.460 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 36, iters: 59680, time: 0.558, data: 0.000) G_L1: 12.554 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 9.961 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 36, iters: 61680, time: 0.545, data: 0.000) G_L1: 14.162 G_L1_ABSOLUTE: 2.913 G_L1_RELATIVE: 11.249 G_Regularizer: 0.000 validation_error: 21.139 +(epoch: 36, iters: 63680, time: 0.550, data: 0.000) G_L1: 14.355 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 11.857 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 36, iters: 65680, time: 0.545, data: 0.000) G_L1: 16.490 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 13.944 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 36, iters: 67680, time: 0.548, data: 0.000) G_L1: 13.661 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 11.072 G_Regularizer: 0.000 validation_error: 20.515 +(epoch: 36, iters: 69680, time: 0.549, data: 0.000) G_L1: 14.511 G_L1_ABSOLUTE: 2.033 G_L1_RELATIVE: 12.478 G_Regularizer: 0.000 validation_error: 21.267 +(epoch: 36, iters: 71680, time: 0.544, data: 0.000) G_L1: 15.471 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 13.189 G_Regularizer: 0.000 validation_error: 21.280 +(epoch: 36, iters: 73680, time: 0.552, data: 0.000) G_L1: 17.074 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 14.732 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 36, iters: 75680, time: 0.547, data: 0.000) G_L1: 14.070 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 11.697 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 36, iters: 77680, time: 0.543, data: 0.000) G_L1: 14.237 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 11.961 G_Regularizer: 0.000 validation_error: 20.991 +(epoch: 36, iters: 79680, time: 0.543, data: 0.000) G_L1: 14.036 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 11.558 G_Regularizer: 0.000 validation_error: 20.912 +(epoch: 36, iters: 81680, time: 0.547, data: 0.001) G_L1: 13.799 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 11.262 G_Regularizer: 0.000 validation_error: 21.281 +(epoch: 36, iters: 83680, time: 0.543, data: 0.000) G_L1: 12.351 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 9.620 G_Regularizer: 0.000 validation_error: 21.517 +(epoch: 36, iters: 85680, time: 0.546, data: 0.000) G_L1: 12.763 G_L1_ABSOLUTE: 2.828 G_L1_RELATIVE: 9.935 G_Regularizer: 0.000 validation_error: 20.545 +(epoch: 36, iters: 87680, time: 0.553, data: 0.000) G_L1: 14.524 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 12.009 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 36, iters: 89680, time: 0.542, data: 0.000) G_L1: 16.921 G_L1_ABSOLUTE: 2.699 G_L1_RELATIVE: 14.222 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 36, iters: 91680, time: 0.541, data: 0.000) G_L1: 13.842 G_L1_ABSOLUTE: 2.034 G_L1_RELATIVE: 11.808 G_Regularizer: 0.000 validation_error: 21.049 +(epoch: 36, iters: 93680, time: 0.555, data: 0.000) G_L1: 14.418 G_L1_ABSOLUTE: 3.457 G_L1_RELATIVE: 10.961 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 36, iters: 95680, time: 0.549, data: 0.000) G_L1: 14.342 G_L1_ABSOLUTE: 2.182 G_L1_RELATIVE: 12.161 G_Regularizer: 0.000 validation_error: 21.313 +(epoch: 36, iters: 97680, time: 0.541, data: 0.000) G_L1: 15.866 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 13.129 G_Regularizer: 0.000 validation_error: 20.938 +(epoch: 36, iters: 99680, time: 0.544, data: 0.000) G_L1: 14.401 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 11.916 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 36, iters: 101680, time: 0.551, data: 0.001) G_L1: 13.188 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 10.644 G_Regularizer: 0.000 validation_error: 20.563 +(epoch: 36, iters: 103680, time: 0.547, data: 0.000) G_L1: 14.381 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 11.901 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 36, iters: 105680, time: 0.544, data: 0.000) G_L1: 14.364 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 11.841 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 36, iters: 107680, time: 0.547, data: 0.000) G_L1: 13.436 G_L1_ABSOLUTE: 2.327 G_L1_RELATIVE: 11.109 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 36, iters: 109680, time: 0.542, data: 0.000) G_L1: 12.987 G_L1_ABSOLUTE: 2.154 G_L1_RELATIVE: 10.833 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 36, iters: 111680, time: 0.541, data: 0.000) G_L1: 16.860 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 14.204 G_Regularizer: 0.000 validation_error: 20.646 +(epoch: 36, iters: 113680, time: 0.547, data: 0.001) G_L1: 14.083 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 11.615 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 36, iters: 115680, time: 0.548, data: 0.000) G_L1: 14.176 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 11.697 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 36, iters: 117680, time: 0.538, data: 0.000) G_L1: 14.236 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 11.373 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 36, iters: 119680, time: 0.551, data: 0.000) G_L1: 13.760 G_L1_ABSOLUTE: 2.127 G_L1_RELATIVE: 11.634 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 36, iters: 121680, time: 0.553, data: 0.000) G_L1: 12.979 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 10.370 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 36, iters: 123680, time: 0.552, data: 0.000) G_L1: 14.568 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 12.228 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 36, iters: 125680, time: 0.549, data: 0.000) G_L1: 16.296 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 13.814 G_Regularizer: 0.000 validation_error: 20.301 +(epoch: 36, iters: 127680, time: 0.549, data: 0.000) G_L1: 13.548 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 11.047 G_Regularizer: 0.000 validation_error: 20.733 +(epoch: 36, iters: 129680, time: 0.550, data: 0.001) G_L1: 12.949 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 10.535 G_Regularizer: 0.000 validation_error: 20.297 +(epoch: 36, iters: 131680, time: 0.543, data: 0.000) G_L1: 13.813 G_L1_ABSOLUTE: 2.456 G_L1_RELATIVE: 11.358 G_Regularizer: 0.000 validation_error: 20.543 +(epoch: 36, iters: 133680, time: 0.545, data: 0.000) G_L1: 12.497 G_L1_ABSOLUTE: 2.272 G_L1_RELATIVE: 10.224 G_Regularizer: 0.000 validation_error: 21.380 +(epoch: 36, iters: 135680, time: 0.551, data: 0.000) G_L1: 13.479 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 11.022 G_Regularizer: 0.000 validation_error: 21.230 +(epoch: 36, iters: 137680, time: 0.542, data: 0.000) G_L1: 13.454 G_L1_ABSOLUTE: 3.182 G_L1_RELATIVE: 10.271 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 36, iters: 139680, time: 0.543, data: 0.000) G_L1: 16.195 G_L1_ABSOLUTE: 3.217 G_L1_RELATIVE: 12.978 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 36, iters: 141680, time: 0.545, data: 0.000) G_L1: 12.818 G_L1_ABSOLUTE: 2.346 G_L1_RELATIVE: 10.471 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 36, iters: 143680, time: 0.544, data: 0.000) G_L1: 15.261 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 12.524 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 36, iters: 145680, time: 0.548, data: 0.000) G_L1: 14.384 G_L1_ABSOLUTE: 2.897 G_L1_RELATIVE: 11.486 G_Regularizer: 0.000 validation_error: 21.248 +(epoch: 36, iters: 147680, time: 0.547, data: 0.000) G_L1: 16.200 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 13.602 G_Regularizer: 0.000 validation_error: 20.355 +(epoch: 36, iters: 149680, time: 0.547, data: 0.000) G_L1: 12.884 G_L1_ABSOLUTE: 2.266 G_L1_RELATIVE: 10.618 G_Regularizer: 0.000 validation_error: 20.611 +(epoch: 36, iters: 151680, time: 0.549, data: 0.000) G_L1: 16.105 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 13.581 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 36, iters: 153680, time: 0.547, data: 0.001) G_L1: 12.350 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 9.606 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 36, iters: 155680, time: 0.552, data: 0.000) G_L1: 13.861 G_L1_ABSOLUTE: 2.789 G_L1_RELATIVE: 11.071 G_Regularizer: 0.000 validation_error: 21.484 +(epoch: 36, iters: 157680, time: 0.553, data: 0.000) G_L1: 12.906 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 10.513 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 36, iters: 159680, time: 0.552, data: 0.000) G_L1: 15.220 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 12.550 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 36, iters: 161680, time: 0.554, data: 0.000) G_L1: 13.658 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 11.100 G_Regularizer: 0.000 validation_error: 20.993 +(epoch: 36, iters: 163680, time: 0.548, data: 0.000) G_L1: 16.049 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 13.678 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 36, iters: 165680, time: 0.545, data: 0.000) G_L1: 11.625 G_L1_ABSOLUTE: 2.297 G_L1_RELATIVE: 9.328 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 36, iters: 167680, time: 0.546, data: 0.000) G_L1: 14.289 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 12.027 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 36, iters: 169680, time: 0.540, data: 0.000) G_L1: 14.768 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 12.407 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 36, iters: 171680, time: 0.548, data: 0.000) G_L1: 20.242 G_L1_ABSOLUTE: 3.288 G_L1_RELATIVE: 16.954 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 36, iters: 173680, time: 0.544, data: 0.000) G_L1: 13.411 G_L1_ABSOLUTE: 2.250 G_L1_RELATIVE: 11.161 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 36, iters: 175680, time: 0.554, data: 0.000) G_L1: 16.492 G_L1_ABSOLUTE: 2.639 G_L1_RELATIVE: 13.854 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 36, iters: 177680, time: 0.558, data: 0.000) G_L1: 14.755 G_L1_ABSOLUTE: 2.533 G_L1_RELATIVE: 12.221 G_Regularizer: 0.000 validation_error: 20.046 +(epoch: 36, iters: 179680, time: 0.542, data: 0.000) G_L1: 16.950 G_L1_ABSOLUTE: 3.028 G_L1_RELATIVE: 13.922 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 36, iters: 181680, time: 0.544, data: 0.000) G_L1: 10.602 G_L1_ABSOLUTE: 2.179 G_L1_RELATIVE: 8.422 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 36, iters: 183680, time: 0.546, data: 0.000) G_L1: 14.370 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 11.897 G_Regularizer: 0.000 validation_error: 20.732 +(epoch: 36, iters: 185680, time: 0.551, data: 0.000) G_L1: 13.858 G_L1_ABSOLUTE: 2.309 G_L1_RELATIVE: 11.549 G_Regularizer: 0.000 validation_error: 20.418 +(epoch: 36, iters: 187680, time: 0.555, data: 0.001) G_L1: 16.004 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 13.529 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 36, iters: 189680, time: 0.544, data: 0.000) G_L1: 15.760 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 13.235 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 36, iters: 191680, time: 0.549, data: 0.000) G_L1: 13.238 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 10.643 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 36, iters: 193680, time: 0.556, data: 0.000) G_L1: 14.353 G_L1_ABSOLUTE: 2.975 G_L1_RELATIVE: 11.378 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 36, iters: 195680, time: 0.547, data: 0.000) G_L1: 14.086 G_L1_ABSOLUTE: 2.288 G_L1_RELATIVE: 11.798 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 36, iters: 197680, time: 0.536, data: 0.000) G_L1: 13.319 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 10.912 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 36, iters: 199680, time: 0.550, data: 0.000) G_L1: 11.218 G_L1_ABSOLUTE: 2.128 G_L1_RELATIVE: 9.090 G_Regularizer: 0.000 validation_error: 20.954 +(epoch: 36, iters: 201680, time: 0.541, data: 0.001) G_L1: 13.131 G_L1_ABSOLUTE: 2.210 G_L1_RELATIVE: 10.921 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 36, iters: 203680, time: 0.549, data: 0.000) G_L1: 15.191 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 12.835 G_Regularizer: 0.000 validation_error: 20.493 +(epoch: 36, iters: 205680, time: 0.555, data: 0.000) G_L1: 16.060 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 13.268 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 36, iters: 207680, time: 0.546, data: 0.000) G_L1: 14.089 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 11.621 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 36, iters: 209680, time: 0.547, data: 0.000) G_L1: 15.208 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 12.671 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 36, iters: 211680, time: 0.548, data: 0.000) G_L1: 16.146 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 13.682 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 36, iters: 213680, time: 0.555, data: 0.000) G_L1: 14.347 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 11.799 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 36, iters: 215680, time: 0.536, data: 0.000) G_L1: 13.851 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 11.463 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 36, iters: 217680, time: 0.549, data: 0.000) G_L1: 15.306 G_L1_ABSOLUTE: 2.768 G_L1_RELATIVE: 12.537 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 36, iters: 219680, time: 0.550, data: 0.000) G_L1: 17.994 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 15.365 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 36, iters: 221680, time: 0.547, data: 0.000) G_L1: 15.728 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 13.304 G_Regularizer: 0.000 validation_error: 20.954 +(epoch: 36, iters: 223680, time: 0.545, data: 0.000) G_L1: 13.260 G_L1_ABSOLUTE: 2.074 G_L1_RELATIVE: 11.186 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 36, iters: 225680, time: 0.544, data: 0.000) G_L1: 15.579 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 13.009 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 36, iters: 227680, time: 0.552, data: 0.000) G_L1: 15.285 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 12.580 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 36, iters: 229680, time: 0.548, data: 0.001) G_L1: 13.415 G_L1_ABSOLUTE: 2.883 G_L1_RELATIVE: 10.532 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 36, iters: 231680, time: 0.547, data: 0.000) G_L1: 13.090 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 10.655 G_Regularizer: 0.000 validation_error: 21.211 +(epoch: 36, iters: 233680, time: 0.545, data: 0.000) G_L1: 15.319 G_L1_ABSOLUTE: 2.555 G_L1_RELATIVE: 12.764 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 36, iters: 235680, time: 0.542, data: 0.000) G_L1: 14.656 G_L1_ABSOLUTE: 2.021 G_L1_RELATIVE: 12.634 G_Regularizer: 0.000 validation_error: 20.956 +(epoch: 36, iters: 237680, time: 0.548, data: 0.000) G_L1: 15.900 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 13.126 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 36, iters: 239680, time: 0.545, data: 0.000) G_L1: 13.728 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 11.334 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 36, iters: 241680, time: 0.542, data: 0.000) G_L1: 13.463 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 10.889 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 36, iters: 243680, time: 0.546, data: 0.001) G_L1: 12.777 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 10.495 G_Regularizer: 0.000 validation_error: 21.306 +(epoch: 36, iters: 245680, time: 0.551, data: 0.000) G_L1: 15.934 G_L1_ABSOLUTE: 2.920 G_L1_RELATIVE: 13.014 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 36, iters: 247680, time: 0.563, data: 0.000) G_L1: 15.549 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 12.897 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 36, iters: 249680, time: 0.546, data: 0.000) G_L1: 11.263 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 8.761 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 36, iters: 251680, time: 0.544, data: 0.000) G_L1: 13.584 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 11.248 G_Regularizer: 0.000 validation_error: 20.802 +(epoch: 36, iters: 253680, time: 0.555, data: 0.000) G_L1: 14.620 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 12.200 G_Regularizer: 0.000 validation_error: 20.455 +(epoch: 36, iters: 255680, time: 0.550, data: 0.000) G_L1: 11.185 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 8.923 G_Regularizer: 0.000 validation_error: 20.536 +(epoch: 36, iters: 257680, time: 0.547, data: 0.000) G_L1: 13.534 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 11.140 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 36, iters: 259680, time: 0.549, data: 0.000) G_L1: 14.603 G_L1_ABSOLUTE: 2.963 G_L1_RELATIVE: 11.640 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 36, iters: 261680, time: 0.555, data: 0.000) G_L1: 17.107 G_L1_ABSOLUTE: 3.222 G_L1_RELATIVE: 13.885 G_Regularizer: 0.000 validation_error: 21.161 +(epoch: 36, iters: 263680, time: 0.544, data: 0.000) G_L1: 13.101 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 10.644 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 36, iters: 265680, time: 0.557, data: 0.000) G_L1: 14.716 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 12.100 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 36, iters: 267680, time: 0.539, data: 0.000) G_L1: 13.044 G_L1_ABSOLUTE: 2.148 G_L1_RELATIVE: 10.896 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 36, iters: 269680, time: 0.543, data: 0.000) G_L1: 15.949 G_L1_ABSOLUTE: 2.912 G_L1_RELATIVE: 13.037 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 36, iters: 271680, time: 0.539, data: 0.000) G_L1: 12.048 G_L1_ABSOLUTE: 2.270 G_L1_RELATIVE: 9.778 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 36, iters: 273680, time: 0.545, data: 0.000) G_L1: 13.558 G_L1_ABSOLUTE: 2.665 G_L1_RELATIVE: 10.893 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 36, iters: 275680, time: 0.554, data: 0.000) G_L1: 17.435 G_L1_ABSOLUTE: 3.108 G_L1_RELATIVE: 14.327 G_Regularizer: 0.000 validation_error: 20.848 +(epoch: 36, iters: 277680, time: 0.546, data: 0.000) G_L1: 12.425 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 9.862 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 36, iters: 279680, time: 0.547, data: 0.000) G_L1: 14.006 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 11.726 G_Regularizer: 0.000 validation_error: 21.126 +(epoch: 36, iters: 281680, time: 0.552, data: 0.000) G_L1: 12.728 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 10.453 G_Regularizer: 0.000 validation_error: 20.552 +(epoch: 36, iters: 283680, time: 0.548, data: 0.000) G_L1: 13.367 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 10.827 G_Regularizer: 0.000 validation_error: 20.603 +(epoch: 36, iters: 285680, time: 0.545, data: 0.000) G_L1: 14.505 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 11.908 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 36, iters: 287680, time: 0.547, data: 0.000) G_L1: 12.516 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 9.999 G_Regularizer: 0.000 validation_error: 20.862 +(epoch: 36, iters: 289680, time: 0.545, data: 0.000) G_L1: 11.822 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 9.386 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 36, iters: 291680, time: 0.545, data: 0.000) G_L1: 13.045 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 10.630 G_Regularizer: 0.000 validation_error: 21.254 +(epoch: 36, iters: 293680, time: 0.545, data: 0.000) G_L1: 13.524 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 11.051 G_Regularizer: 0.000 validation_error: 21.239 +(epoch: 36, iters: 295680, time: 0.552, data: 0.001) G_L1: 13.582 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 11.223 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 36, iters: 297680, time: 0.545, data: 0.000) G_L1: 16.238 G_L1_ABSOLUTE: 2.656 G_L1_RELATIVE: 13.582 G_Regularizer: 0.000 validation_error: 20.269 +(epoch: 36, iters: 299680, time: 0.547, data: 0.000) G_L1: 11.939 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 9.659 G_Regularizer: 0.000 validation_error: 21.025 +(epoch: 36, iters: 301680, time: 0.543, data: 0.000) G_L1: 13.939 G_L1_ABSOLUTE: 2.157 G_L1_RELATIVE: 11.783 G_Regularizer: 0.000 validation_error: 20.408 +(epoch: 37, iters: 928, time: 0.551, data: 0.000) G_L1: 12.967 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 10.732 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 37, iters: 2928, time: 0.550, data: 0.000) G_L1: 17.113 G_L1_ABSOLUTE: 3.076 G_L1_RELATIVE: 14.037 G_Regularizer: 0.000 validation_error: 20.344 +(epoch: 37, iters: 4928, time: 0.555, data: 0.000) G_L1: 11.039 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 8.681 G_Regularizer: 0.000 validation_error: 21.347 +(epoch: 37, iters: 6928, time: 0.550, data: 0.000) G_L1: 13.640 G_L1_ABSOLUTE: 2.029 G_L1_RELATIVE: 11.611 G_Regularizer: 0.000 validation_error: 21.055 +(epoch: 37, iters: 8928, time: 0.559, data: 0.000) G_L1: 14.808 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 12.465 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 37, iters: 10928, time: 0.541, data: 0.000) G_L1: 13.467 G_L1_ABSOLUTE: 2.882 G_L1_RELATIVE: 10.585 G_Regularizer: 0.000 validation_error: 20.446 +(epoch: 37, iters: 12928, time: 0.556, data: 0.000) G_L1: 14.200 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 11.946 G_Regularizer: 0.000 validation_error: 20.397 +(epoch: 37, iters: 14928, time: 0.543, data: 0.000) G_L1: 13.905 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 11.556 G_Regularizer: 0.000 validation_error: 21.206 +(epoch: 37, iters: 16928, time: 0.555, data: 0.000) G_L1: 13.272 G_L1_ABSOLUTE: 2.256 G_L1_RELATIVE: 11.016 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 37, iters: 18928, time: 0.546, data: 0.000) G_L1: 16.778 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 14.275 G_Regularizer: 0.000 validation_error: 21.199 +(epoch: 37, iters: 20928, time: 0.544, data: 0.000) G_L1: 12.605 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 10.348 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 37, iters: 22928, time: 0.546, data: 0.000) G_L1: 14.604 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 12.133 G_Regularizer: 0.000 validation_error: 20.611 +(epoch: 37, iters: 24928, time: 0.545, data: 0.000) G_L1: 13.154 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 10.651 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 37, iters: 26928, time: 0.546, data: 0.001) G_L1: 12.146 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 9.811 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 37, iters: 28928, time: 0.544, data: 0.000) G_L1: 13.214 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 10.857 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 37, iters: 30928, time: 0.549, data: 0.000) G_L1: 14.946 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 12.337 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 37, iters: 32928, time: 0.543, data: 0.000) G_L1: 15.114 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 12.606 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 37, iters: 34928, time: 0.550, data: 0.000) G_L1: 11.943 G_L1_ABSOLUTE: 2.050 G_L1_RELATIVE: 9.893 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 37, iters: 36928, time: 0.546, data: 0.000) G_L1: 14.628 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 11.994 G_Regularizer: 0.000 validation_error: 21.566 +(epoch: 37, iters: 38928, time: 0.551, data: 0.000) G_L1: 14.147 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 11.774 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 37, iters: 40928, time: 0.545, data: 0.000) G_L1: 15.369 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 12.816 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 37, iters: 42928, time: 0.546, data: 0.000) G_L1: 11.599 G_L1_ABSOLUTE: 2.236 G_L1_RELATIVE: 9.364 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 37, iters: 44928, time: 0.550, data: 0.000) G_L1: 13.443 G_L1_ABSOLUTE: 2.950 G_L1_RELATIVE: 10.492 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 37, iters: 46928, time: 0.539, data: 0.000) G_L1: 12.944 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 10.569 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 37, iters: 48928, time: 0.538, data: 0.000) G_L1: 13.358 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 10.898 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 37, iters: 50928, time: 0.549, data: 0.000) G_L1: 16.174 G_L1_ABSOLUTE: 3.209 G_L1_RELATIVE: 12.965 G_Regularizer: 0.000 validation_error: 21.353 +(epoch: 37, iters: 52928, time: 0.548, data: 0.000) G_L1: 11.681 G_L1_ABSOLUTE: 1.838 G_L1_RELATIVE: 9.842 G_Regularizer: 0.000 validation_error: 20.585 +(epoch: 37, iters: 54928, time: 0.544, data: 0.000) G_L1: 14.113 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 11.643 G_Regularizer: 0.000 validation_error: 21.262 +(epoch: 37, iters: 56928, time: 0.548, data: 0.000) G_L1: 13.541 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 11.043 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 37, iters: 58928, time: 0.548, data: 0.000) G_L1: 16.033 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 13.573 G_Regularizer: 0.000 validation_error: 20.702 +(epoch: 37, iters: 60928, time: 0.540, data: 0.000) G_L1: 13.711 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 11.099 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 37, iters: 62928, time: 0.560, data: 0.000) G_L1: 12.815 G_L1_ABSOLUTE: 2.231 G_L1_RELATIVE: 10.584 G_Regularizer: 0.000 validation_error: 21.328 +(epoch: 37, iters: 64928, time: 0.550, data: 0.000) G_L1: 14.106 G_L1_ABSOLUTE: 2.212 G_L1_RELATIVE: 11.894 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 37, iters: 66928, time: 0.552, data: 0.000) G_L1: 14.485 G_L1_ABSOLUTE: 2.267 G_L1_RELATIVE: 12.218 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 37, iters: 68928, time: 0.550, data: 0.000) G_L1: 14.771 G_L1_ABSOLUTE: 2.236 G_L1_RELATIVE: 12.535 G_Regularizer: 0.000 validation_error: 21.139 +(epoch: 37, iters: 70928, time: 0.547, data: 0.000) G_L1: 15.445 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 12.811 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 37, iters: 72928, time: 0.546, data: 0.000) G_L1: 13.060 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 10.652 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 37, iters: 74928, time: 0.544, data: 0.000) G_L1: 14.045 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 11.701 G_Regularizer: 0.000 validation_error: 21.095 +(epoch: 37, iters: 76928, time: 0.547, data: 0.000) G_L1: 15.547 G_L1_ABSOLUTE: 2.053 G_L1_RELATIVE: 13.493 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 37, iters: 78928, time: 0.550, data: 0.000) G_L1: 16.004 G_L1_ABSOLUTE: 2.129 G_L1_RELATIVE: 13.875 G_Regularizer: 0.000 validation_error: 20.153 +(epoch: 37, iters: 80928, time: 0.543, data: 0.001) G_L1: 17.452 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 14.774 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 37, iters: 82928, time: 0.544, data: 0.000) G_L1: 14.551 G_L1_ABSOLUTE: 2.915 G_L1_RELATIVE: 11.637 G_Regularizer: 0.000 validation_error: 20.684 +(epoch: 37, iters: 84928, time: 0.552, data: 0.000) G_L1: 17.284 G_L1_ABSOLUTE: 3.144 G_L1_RELATIVE: 14.140 G_Regularizer: 0.000 validation_error: 21.184 +(epoch: 37, iters: 86928, time: 0.542, data: 0.000) G_L1: 14.708 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 12.359 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 37, iters: 88928, time: 0.542, data: 0.000) G_L1: 14.674 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 11.796 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 37, iters: 90928, time: 0.546, data: 0.000) G_L1: 12.175 G_L1_ABSOLUTE: 2.181 G_L1_RELATIVE: 9.994 G_Regularizer: 0.000 validation_error: 20.980 +(epoch: 37, iters: 92928, time: 0.540, data: 0.000) G_L1: 13.612 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 10.907 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 37, iters: 94928, time: 0.545, data: 0.000) G_L1: 11.470 G_L1_ABSOLUTE: 2.064 G_L1_RELATIVE: 9.406 G_Regularizer: 0.000 validation_error: 21.327 +(epoch: 37, iters: 96928, time: 0.547, data: 0.000) G_L1: 16.009 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 13.444 G_Regularizer: 0.000 validation_error: 20.627 +(epoch: 37, iters: 98928, time: 0.555, data: 0.000) G_L1: 16.093 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 13.516 G_Regularizer: 0.000 validation_error: 21.059 +(epoch: 37, iters: 100928, time: 0.541, data: 0.000) G_L1: 14.228 G_L1_ABSOLUTE: 3.043 G_L1_RELATIVE: 11.184 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 37, iters: 102928, time: 0.548, data: 0.000) G_L1: 16.499 G_L1_ABSOLUTE: 3.352 G_L1_RELATIVE: 13.147 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 37, iters: 104928, time: 0.556, data: 0.000) G_L1: 13.859 G_L1_ABSOLUTE: 2.185 G_L1_RELATIVE: 11.674 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 37, iters: 106928, time: 0.542, data: 0.000) G_L1: 13.762 G_L1_ABSOLUTE: 2.197 G_L1_RELATIVE: 11.565 G_Regularizer: 0.000 validation_error: 21.361 +(epoch: 37, iters: 108928, time: 0.538, data: 0.000) G_L1: 14.099 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 11.765 G_Regularizer: 0.000 validation_error: 20.505 +(epoch: 37, iters: 110928, time: 0.544, data: 0.000) G_L1: 12.755 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 10.282 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 37, iters: 112928, time: 0.551, data: 0.000) G_L1: 15.362 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 12.793 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 37, iters: 114928, time: 0.547, data: 0.000) G_L1: 14.409 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 11.631 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 37, iters: 116928, time: 0.546, data: 0.001) G_L1: 13.312 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 10.661 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 37, iters: 118928, time: 0.548, data: 0.000) G_L1: 17.696 G_L1_ABSOLUTE: 2.558 G_L1_RELATIVE: 15.138 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 37, iters: 120928, time: 0.553, data: 0.000) G_L1: 15.186 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 12.780 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 37, iters: 122928, time: 0.558, data: 0.000) G_L1: 12.544 G_L1_ABSOLUTE: 2.644 G_L1_RELATIVE: 9.900 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 37, iters: 124928, time: 0.556, data: 0.000) G_L1: 12.290 G_L1_ABSOLUTE: 1.966 G_L1_RELATIVE: 10.323 G_Regularizer: 0.000 validation_error: 20.507 +(epoch: 37, iters: 126928, time: 0.544, data: 0.000) G_L1: 12.523 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 10.264 G_Regularizer: 0.000 validation_error: 21.110 +(epoch: 37, iters: 128928, time: 0.547, data: 0.000) G_L1: 15.895 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 13.296 G_Regularizer: 0.000 validation_error: 21.340 +(epoch: 37, iters: 130928, time: 0.553, data: 0.000) G_L1: 14.848 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 12.209 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 37, iters: 132928, time: 0.541, data: 0.000) G_L1: 16.628 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 13.914 G_Regularizer: 0.000 validation_error: 21.169 +(epoch: 37, iters: 134928, time: 0.549, data: 0.000) G_L1: 12.203 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 9.962 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 37, iters: 136928, time: 0.547, data: 0.001) G_L1: 15.411 G_L1_ABSOLUTE: 2.665 G_L1_RELATIVE: 12.745 G_Regularizer: 0.000 validation_error: 20.535 +(epoch: 37, iters: 138928, time: 0.548, data: 0.000) G_L1: 14.649 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 12.069 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 37, iters: 140928, time: 0.542, data: 0.001) G_L1: 12.124 G_L1_ABSOLUTE: 1.944 G_L1_RELATIVE: 10.180 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 37, iters: 142928, time: 0.554, data: 0.000) G_L1: 13.249 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 10.292 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 37, iters: 144928, time: 0.547, data: 0.001) G_L1: 14.035 G_L1_ABSOLUTE: 2.747 G_L1_RELATIVE: 11.288 G_Regularizer: 0.000 validation_error: 20.548 +(epoch: 37, iters: 146928, time: 0.547, data: 0.000) G_L1: 13.470 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 11.086 G_Regularizer: 0.000 validation_error: 20.947 +(epoch: 37, iters: 148928, time: 0.550, data: 0.000) G_L1: 15.007 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 12.405 G_Regularizer: 0.000 validation_error: 21.312 +(epoch: 37, iters: 150928, time: 0.553, data: 0.000) G_L1: 13.359 G_L1_ABSOLUTE: 2.236 G_L1_RELATIVE: 11.123 G_Regularizer: 0.000 validation_error: 21.126 +(epoch: 37, iters: 152928, time: 0.548, data: 0.001) G_L1: 12.775 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 10.453 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 37, iters: 154928, time: 0.559, data: 0.000) G_L1: 13.748 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 11.529 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 37, iters: 156928, time: 0.544, data: 0.000) G_L1: 15.873 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 13.275 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 37, iters: 158928, time: 0.553, data: 0.000) G_L1: 14.122 G_L1_ABSOLUTE: 2.073 G_L1_RELATIVE: 12.050 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 37, iters: 160928, time: 0.551, data: 0.000) G_L1: 14.958 G_L1_ABSOLUTE: 3.084 G_L1_RELATIVE: 11.874 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 37, iters: 162928, time: 0.548, data: 0.001) G_L1: 16.528 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 13.716 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 37, iters: 164928, time: 0.550, data: 0.000) G_L1: 17.369 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 14.746 G_Regularizer: 0.000 validation_error: 20.690 +(epoch: 37, iters: 166928, time: 0.543, data: 0.000) G_L1: 14.611 G_L1_ABSOLUTE: 2.969 G_L1_RELATIVE: 11.642 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 37, iters: 168928, time: 0.536, data: 0.000) G_L1: 14.118 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 11.609 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 37, iters: 170928, time: 0.544, data: 0.000) G_L1: 15.144 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 12.354 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 37, iters: 172928, time: 0.557, data: 0.000) G_L1: 15.802 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 13.138 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 37, iters: 174928, time: 0.547, data: 0.000) G_L1: 14.387 G_L1_ABSOLUTE: 2.798 G_L1_RELATIVE: 11.589 G_Regularizer: 0.000 validation_error: 20.930 +(epoch: 37, iters: 176928, time: 0.549, data: 0.001) G_L1: 14.969 G_L1_ABSOLUTE: 2.840 G_L1_RELATIVE: 12.128 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 37, iters: 178928, time: 0.548, data: 0.000) G_L1: 11.070 G_L1_ABSOLUTE: 2.271 G_L1_RELATIVE: 8.799 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 37, iters: 180928, time: 0.545, data: 0.000) G_L1: 13.724 G_L1_ABSOLUTE: 2.456 G_L1_RELATIVE: 11.268 G_Regularizer: 0.000 validation_error: 20.960 +(epoch: 37, iters: 182928, time: 0.547, data: 0.000) G_L1: 14.327 G_L1_ABSOLUTE: 2.103 G_L1_RELATIVE: 12.224 G_Regularizer: 0.000 validation_error: 21.053 +(epoch: 37, iters: 184928, time: 0.555, data: 0.000) G_L1: 13.489 G_L1_ABSOLUTE: 2.288 G_L1_RELATIVE: 11.202 G_Regularizer: 0.000 validation_error: 20.346 +(epoch: 37, iters: 186928, time: 0.551, data: 0.000) G_L1: 12.048 G_L1_ABSOLUTE: 2.223 G_L1_RELATIVE: 9.825 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 37, iters: 188928, time: 0.550, data: 0.000) G_L1: 12.823 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 10.498 G_Regularizer: 0.000 validation_error: 21.005 +(epoch: 37, iters: 190928, time: 0.546, data: 0.000) G_L1: 16.678 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 13.976 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 37, iters: 192928, time: 0.554, data: 0.000) G_L1: 14.917 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 12.287 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 37, iters: 194928, time: 0.548, data: 0.000) G_L1: 22.297 G_L1_ABSOLUTE: 3.000 G_L1_RELATIVE: 19.296 G_Regularizer: 0.000 validation_error: 20.921 +(epoch: 37, iters: 196928, time: 0.541, data: 0.000) G_L1: 14.453 G_L1_ABSOLUTE: 2.310 G_L1_RELATIVE: 12.143 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 37, iters: 198928, time: 0.547, data: 0.000) G_L1: 13.898 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 11.369 G_Regularizer: 0.000 validation_error: 20.228 +(epoch: 37, iters: 200928, time: 0.546, data: 0.000) G_L1: 14.344 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 20.466 +(epoch: 37, iters: 202928, time: 0.538, data: 0.000) G_L1: 12.804 G_L1_ABSOLUTE: 2.050 G_L1_RELATIVE: 10.754 G_Regularizer: 0.000 validation_error: 21.129 +(epoch: 37, iters: 204928, time: 0.545, data: 0.000) G_L1: 14.758 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 12.475 G_Regularizer: 0.000 validation_error: 20.744 +(epoch: 37, iters: 206928, time: 0.553, data: 0.000) G_L1: 14.930 G_L1_ABSOLUTE: 2.211 G_L1_RELATIVE: 12.719 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 37, iters: 208928, time: 0.542, data: 0.000) G_L1: 13.251 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 10.994 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 37, iters: 210928, time: 0.549, data: 0.000) G_L1: 12.444 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 9.619 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 37, iters: 212928, time: 0.537, data: 0.000) G_L1: 14.678 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 12.058 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 37, iters: 214928, time: 0.549, data: 0.000) G_L1: 13.076 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 10.722 G_Regularizer: 0.000 validation_error: 21.088 +(epoch: 37, iters: 216928, time: 0.546, data: 0.000) G_L1: 13.338 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 10.764 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 37, iters: 218928, time: 0.554, data: 0.001) G_L1: 16.410 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 13.936 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 37, iters: 220928, time: 0.546, data: 0.000) G_L1: 10.908 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 8.610 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 37, iters: 222928, time: 0.546, data: 0.000) G_L1: 12.444 G_L1_ABSOLUTE: 2.614 G_L1_RELATIVE: 9.830 G_Regularizer: 0.000 validation_error: 21.130 +(epoch: 37, iters: 224928, time: 0.550, data: 0.000) G_L1: 14.714 G_L1_ABSOLUTE: 2.709 G_L1_RELATIVE: 12.005 G_Regularizer: 0.000 validation_error: 20.944 +(epoch: 37, iters: 226928, time: 0.538, data: 0.000) G_L1: 16.877 G_L1_ABSOLUTE: 2.917 G_L1_RELATIVE: 13.960 G_Regularizer: 0.000 validation_error: 20.307 +(epoch: 37, iters: 228928, time: 0.542, data: 0.000) G_L1: 12.911 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 10.057 G_Regularizer: 0.000 validation_error: 21.094 +(epoch: 37, iters: 230928, time: 0.548, data: 0.000) G_L1: 12.581 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 9.991 G_Regularizer: 0.000 validation_error: 20.368 +(epoch: 37, iters: 232928, time: 0.550, data: 0.000) G_L1: 15.459 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 12.955 G_Regularizer: 0.000 validation_error: 21.189 +(epoch: 37, iters: 234928, time: 0.543, data: 0.000) G_L1: 16.508 G_L1_ABSOLUTE: 3.179 G_L1_RELATIVE: 13.329 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 37, iters: 236928, time: 0.554, data: 0.000) G_L1: 13.954 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 11.541 G_Regularizer: 0.000 validation_error: 20.421 +(epoch: 37, iters: 238928, time: 0.550, data: 0.000) G_L1: 12.614 G_L1_ABSOLUTE: 2.393 G_L1_RELATIVE: 10.222 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 37, iters: 240928, time: 0.546, data: 0.000) G_L1: 14.030 G_L1_ABSOLUTE: 2.063 G_L1_RELATIVE: 11.968 G_Regularizer: 0.000 validation_error: 21.043 +(epoch: 37, iters: 242928, time: 0.540, data: 0.000) G_L1: 15.094 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 12.531 G_Regularizer: 0.000 validation_error: 20.525 +(epoch: 37, iters: 244928, time: 0.555, data: 0.000) G_L1: 16.561 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 14.135 G_Regularizer: 0.000 validation_error: 20.441 +(epoch: 37, iters: 246928, time: 0.544, data: 0.000) G_L1: 15.541 G_L1_ABSOLUTE: 2.034 G_L1_RELATIVE: 13.507 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 37, iters: 248928, time: 0.545, data: 0.000) G_L1: 14.270 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 11.809 G_Regularizer: 0.000 validation_error: 20.497 +(epoch: 37, iters: 250928, time: 0.548, data: 0.000) G_L1: 14.143 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 11.685 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 37, iters: 252928, time: 0.544, data: 0.000) G_L1: 15.520 G_L1_ABSOLUTE: 2.813 G_L1_RELATIVE: 12.707 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 37, iters: 254928, time: 0.545, data: 0.000) G_L1: 13.282 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 10.447 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 37, iters: 256928, time: 0.546, data: 0.000) G_L1: 15.505 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 12.916 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 37, iters: 258928, time: 0.551, data: 0.000) G_L1: 14.054 G_L1_ABSOLUTE: 2.714 G_L1_RELATIVE: 11.340 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 37, iters: 260928, time: 0.544, data: 0.000) G_L1: 12.391 G_L1_ABSOLUTE: 2.236 G_L1_RELATIVE: 10.155 G_Regularizer: 0.000 validation_error: 20.300 +(epoch: 37, iters: 262928, time: 0.558, data: 0.000) G_L1: 13.390 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 10.645 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 37, iters: 264928, time: 0.546, data: 0.000) G_L1: 14.348 G_L1_ABSOLUTE: 2.883 G_L1_RELATIVE: 11.465 G_Regularizer: 0.000 validation_error: 20.193 +(epoch: 37, iters: 266928, time: 0.540, data: 0.000) G_L1: 15.885 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 13.268 G_Regularizer: 0.000 validation_error: 20.484 +(epoch: 37, iters: 268928, time: 0.544, data: 0.000) G_L1: 13.083 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 10.480 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 37, iters: 270928, time: 0.550, data: 0.000) G_L1: 14.129 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 11.697 G_Regularizer: 0.000 validation_error: 19.990 +(epoch: 37, iters: 272928, time: 0.541, data: 0.000) G_L1: 20.214 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 17.509 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 37, iters: 274928, time: 0.543, data: 0.000) G_L1: 13.674 G_L1_ABSOLUTE: 2.544 G_L1_RELATIVE: 11.131 G_Regularizer: 0.000 validation_error: 21.386 +(epoch: 37, iters: 276928, time: 0.546, data: 0.000) G_L1: 11.363 G_L1_ABSOLUTE: 2.348 G_L1_RELATIVE: 9.015 G_Regularizer: 0.000 validation_error: 21.065 +(epoch: 37, iters: 278928, time: 0.552, data: 0.000) G_L1: 14.836 G_L1_ABSOLUTE: 2.220 G_L1_RELATIVE: 12.616 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 37, iters: 280928, time: 0.543, data: 0.000) G_L1: 14.157 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 11.407 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 37, iters: 282928, time: 0.542, data: 0.000) G_L1: 13.533 G_L1_ABSOLUTE: 2.872 G_L1_RELATIVE: 10.661 G_Regularizer: 0.000 validation_error: 20.480 +(epoch: 37, iters: 284928, time: 0.547, data: 0.000) G_L1: 13.805 G_L1_ABSOLUTE: 2.621 G_L1_RELATIVE: 11.185 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 37, iters: 286928, time: 0.540, data: 0.000) G_L1: 13.148 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 10.899 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 37, iters: 288928, time: 0.545, data: 0.001) G_L1: 14.926 G_L1_ABSOLUTE: 2.410 G_L1_RELATIVE: 12.516 G_Regularizer: 0.000 validation_error: 21.186 +(epoch: 37, iters: 290928, time: 0.541, data: 0.000) G_L1: 12.482 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 10.134 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 37, iters: 292928, time: 0.548, data: 0.000) G_L1: 10.319 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 7.866 G_Regularizer: 0.000 validation_error: 20.395 +(epoch: 37, iters: 294928, time: 0.546, data: 0.000) G_L1: 12.993 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 10.575 G_Regularizer: 0.000 validation_error: 20.502 +(epoch: 37, iters: 296928, time: 0.549, data: 0.001) G_L1: 13.037 G_L1_ABSOLUTE: 2.141 G_L1_RELATIVE: 10.896 G_Regularizer: 0.000 validation_error: 20.658 +(epoch: 37, iters: 298928, time: 0.548, data: 0.000) G_L1: 12.847 G_L1_ABSOLUTE: 2.641 G_L1_RELATIVE: 10.207 G_Regularizer: 0.000 validation_error: 21.357 +(epoch: 37, iters: 300928, time: 0.548, data: 0.000) G_L1: 14.922 G_L1_ABSOLUTE: 2.531 G_L1_RELATIVE: 12.391 G_Regularizer: 0.000 validation_error: 20.658 +(epoch: 38, iters: 176, time: 0.547, data: 0.000) G_L1: 15.220 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 12.323 G_Regularizer: 0.000 validation_error: 21.216 +(epoch: 38, iters: 2176, time: 0.552, data: 0.001) G_L1: 13.925 G_L1_ABSOLUTE: 2.545 G_L1_RELATIVE: 11.380 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 38, iters: 4176, time: 0.546, data: 0.000) G_L1: 14.061 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 11.595 G_Regularizer: 0.000 validation_error: 21.230 +(epoch: 38, iters: 6176, time: 0.544, data: 0.000) G_L1: 12.640 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 10.307 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 38, iters: 8176, time: 0.548, data: 0.000) G_L1: 15.937 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 13.221 G_Regularizer: 0.000 validation_error: 20.719 +(epoch: 38, iters: 10176, time: 0.547, data: 0.000) G_L1: 12.983 G_L1_ABSOLUTE: 2.647 G_L1_RELATIVE: 10.336 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 38, iters: 12176, time: 0.543, data: 0.000) G_L1: 13.620 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 11.221 G_Regularizer: 0.000 validation_error: 20.323 +(epoch: 38, iters: 14176, time: 0.549, data: 0.000) G_L1: 11.918 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 9.582 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 38, iters: 16176, time: 0.555, data: 0.000) G_L1: 14.488 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 12.173 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 38, iters: 18176, time: 0.541, data: 0.000) G_L1: 17.008 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 14.693 G_Regularizer: 0.000 validation_error: 20.512 +(epoch: 38, iters: 20176, time: 0.552, data: 0.000) G_L1: 15.017 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 12.287 G_Regularizer: 0.000 validation_error: 20.705 +(epoch: 38, iters: 22176, time: 0.540, data: 0.000) G_L1: 12.597 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 10.096 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 38, iters: 24176, time: 0.548, data: 0.000) G_L1: 14.606 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 12.015 G_Regularizer: 0.000 validation_error: 20.457 +(epoch: 38, iters: 26176, time: 0.551, data: 0.001) G_L1: 14.208 G_L1_ABSOLUTE: 2.188 G_L1_RELATIVE: 12.020 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 38, iters: 28176, time: 0.552, data: 0.000) G_L1: 16.070 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 13.519 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 38, iters: 30176, time: 0.547, data: 0.000) G_L1: 15.815 G_L1_ABSOLUTE: 2.842 G_L1_RELATIVE: 12.972 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 38, iters: 32176, time: 0.550, data: 0.000) G_L1: 13.450 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 11.064 G_Regularizer: 0.000 validation_error: 20.603 +(epoch: 38, iters: 34176, time: 0.564, data: 0.000) G_L1: 15.928 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 13.556 G_Regularizer: 0.000 validation_error: 20.770 +(epoch: 38, iters: 36176, time: 0.551, data: 0.000) G_L1: 14.883 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 12.439 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 38, iters: 38176, time: 0.550, data: 0.000) G_L1: 18.228 G_L1_ABSOLUTE: 3.090 G_L1_RELATIVE: 15.138 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 38, iters: 40176, time: 0.549, data: 0.000) G_L1: 18.201 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 15.943 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 38, iters: 42176, time: 0.552, data: 0.000) G_L1: 12.995 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 10.625 G_Regularizer: 0.000 validation_error: 20.852 +(epoch: 38, iters: 44176, time: 0.544, data: 0.000) G_L1: 11.206 G_L1_ABSOLUTE: 2.240 G_L1_RELATIVE: 8.966 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 38, iters: 46176, time: 0.546, data: 0.000) G_L1: 15.798 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 13.025 G_Regularizer: 0.000 validation_error: 20.374 +(epoch: 38, iters: 48176, time: 0.550, data: 0.001) G_L1: 16.572 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 14.221 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 38, iters: 50176, time: 0.548, data: 0.000) G_L1: 14.366 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 11.917 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 38, iters: 52176, time: 0.546, data: 0.000) G_L1: 15.267 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 12.711 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 38, iters: 54176, time: 0.555, data: 0.001) G_L1: 11.563 G_L1_ABSOLUTE: 2.029 G_L1_RELATIVE: 9.534 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 38, iters: 56176, time: 0.550, data: 0.000) G_L1: 13.524 G_L1_ABSOLUTE: 2.307 G_L1_RELATIVE: 11.218 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 38, iters: 58176, time: 0.549, data: 0.000) G_L1: 15.926 G_L1_ABSOLUTE: 2.998 G_L1_RELATIVE: 12.928 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 38, iters: 60176, time: 0.539, data: 0.000) G_L1: 13.947 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 11.431 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 38, iters: 62176, time: 0.554, data: 0.000) G_L1: 14.023 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 11.688 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 38, iters: 64176, time: 0.543, data: 0.000) G_L1: 14.440 G_L1_ABSOLUTE: 1.935 G_L1_RELATIVE: 12.505 G_Regularizer: 0.000 validation_error: 21.267 +(epoch: 38, iters: 66176, time: 0.544, data: 0.001) G_L1: 14.853 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 12.068 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 38, iters: 68176, time: 0.557, data: 0.000) G_L1: 14.449 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 11.617 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 38, iters: 70176, time: 0.547, data: 0.000) G_L1: 13.842 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 11.213 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 38, iters: 72176, time: 0.550, data: 0.000) G_L1: 14.803 G_L1_ABSOLUTE: 3.007 G_L1_RELATIVE: 11.796 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 38, iters: 74176, time: 0.542, data: 0.000) G_L1: 12.441 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 10.147 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 38, iters: 76176, time: 0.552, data: 0.000) G_L1: 14.937 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 12.530 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 38, iters: 78176, time: 0.552, data: 0.000) G_L1: 12.647 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 10.097 G_Regularizer: 0.000 validation_error: 21.110 +(epoch: 38, iters: 80176, time: 0.558, data: 0.000) G_L1: 14.093 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.424 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 38, iters: 82176, time: 0.541, data: 0.000) G_L1: 13.347 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 10.862 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 38, iters: 84176, time: 0.588, data: 0.000) G_L1: 17.222 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 14.544 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 38, iters: 86176, time: 0.606, data: 0.000) G_L1: 19.185 G_L1_ABSOLUTE: 2.863 G_L1_RELATIVE: 16.322 G_Regularizer: 0.000 validation_error: 20.995 +(epoch: 38, iters: 88176, time: 0.588, data: 0.000) G_L1: 13.624 G_L1_ABSOLUTE: 2.164 G_L1_RELATIVE: 11.460 G_Regularizer: 0.000 validation_error: 20.514 +(epoch: 38, iters: 90176, time: 0.611, data: 0.000) G_L1: 10.437 G_L1_ABSOLUTE: 2.200 G_L1_RELATIVE: 8.237 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 38, iters: 92176, time: 0.608, data: 0.000) G_L1: 12.581 G_L1_ABSOLUTE: 2.290 G_L1_RELATIVE: 10.291 G_Regularizer: 0.000 validation_error: 21.365 +(epoch: 38, iters: 94176, time: 0.604, data: 0.000) G_L1: 12.218 G_L1_ABSOLUTE: 2.196 G_L1_RELATIVE: 10.021 G_Regularizer: 0.000 validation_error: 21.034 +(epoch: 38, iters: 96176, time: 0.610, data: 0.000) G_L1: 15.028 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 12.889 G_Regularizer: 0.000 validation_error: 21.223 +(epoch: 38, iters: 98176, time: 0.611, data: 0.000) G_L1: 16.481 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 13.761 G_Regularizer: 0.000 validation_error: 20.594 +(epoch: 38, iters: 100176, time: 0.567, data: 0.000) G_L1: 14.994 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 12.619 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 38, iters: 102176, time: 0.614, data: 0.000) G_L1: 16.942 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 14.217 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 38, iters: 104176, time: 0.608, data: 0.000) G_L1: 13.322 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 10.989 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 38, iters: 106176, time: 0.610, data: 0.000) G_L1: 14.850 G_L1_ABSOLUTE: 3.199 G_L1_RELATIVE: 11.652 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 38, iters: 108176, time: 0.594, data: 0.000) G_L1: 12.929 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 10.516 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 38, iters: 110176, time: 0.601, data: 0.000) G_L1: 14.459 G_L1_ABSOLUTE: 2.967 G_L1_RELATIVE: 11.493 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 38, iters: 112176, time: 0.606, data: 0.000) G_L1: 11.923 G_L1_ABSOLUTE: 2.062 G_L1_RELATIVE: 9.861 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 38, iters: 114176, time: 0.607, data: 0.000) G_L1: 15.638 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 13.184 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 38, iters: 116176, time: 0.608, data: 0.000) G_L1: 13.054 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 10.819 G_Regularizer: 0.000 validation_error: 20.546 +(epoch: 38, iters: 118176, time: 0.612, data: 0.000) G_L1: 13.862 G_L1_ABSOLUTE: 2.720 G_L1_RELATIVE: 11.142 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 38, iters: 120176, time: 0.561, data: 0.000) G_L1: 12.793 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 9.971 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 38, iters: 122176, time: 0.604, data: 0.000) G_L1: 16.206 G_L1_ABSOLUTE: 2.309 G_L1_RELATIVE: 13.897 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 38, iters: 124176, time: 0.578, data: 0.000) G_L1: 14.435 G_L1_ABSOLUTE: 3.003 G_L1_RELATIVE: 11.432 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 38, iters: 126176, time: 0.607, data: 0.001) G_L1: 14.751 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 12.045 G_Regularizer: 0.000 validation_error: 20.532 +(epoch: 38, iters: 128176, time: 0.608, data: 0.000) G_L1: 15.299 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 12.638 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 38, iters: 130176, time: 0.608, data: 0.000) G_L1: 14.575 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 11.623 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 38, iters: 132176, time: 0.613, data: 0.000) G_L1: 14.889 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 12.591 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 38, iters: 134176, time: 0.609, data: 0.000) G_L1: 13.422 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 10.648 G_Regularizer: 0.000 validation_error: 20.563 +(epoch: 38, iters: 136176, time: 0.611, data: 0.000) G_L1: 16.604 G_L1_ABSOLUTE: 2.991 G_L1_RELATIVE: 13.613 G_Regularizer: 0.000 validation_error: 21.018 +(epoch: 38, iters: 138176, time: 0.613, data: 0.000) G_L1: 12.279 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 9.732 G_Regularizer: 0.000 validation_error: 21.006 +(epoch: 38, iters: 140176, time: 0.589, data: 0.000) G_L1: 12.235 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 9.895 G_Regularizer: 0.000 validation_error: 20.599 +(epoch: 38, iters: 142176, time: 0.603, data: 0.000) G_L1: 12.970 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 10.572 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 38, iters: 144176, time: 0.610, data: 0.000) G_L1: 15.953 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 13.121 G_Regularizer: 0.000 validation_error: 20.615 +(epoch: 38, iters: 146176, time: 0.599, data: 0.000) G_L1: 13.399 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 10.990 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 38, iters: 148176, time: 0.546, data: 0.000) G_L1: 18.064 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 14.976 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 38, iters: 150176, time: 0.541, data: 0.000) G_L1: 12.437 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 9.840 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 38, iters: 152176, time: 0.541, data: 0.000) G_L1: 15.600 G_L1_ABSOLUTE: 3.037 G_L1_RELATIVE: 12.564 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 38, iters: 154176, time: 0.540, data: 0.000) G_L1: 17.176 G_L1_ABSOLUTE: 3.004 G_L1_RELATIVE: 14.172 G_Regularizer: 0.000 validation_error: 21.127 +(epoch: 38, iters: 156176, time: 0.546, data: 0.000) G_L1: 14.082 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 11.716 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 38, iters: 158176, time: 0.534, data: 0.000) G_L1: 15.048 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 12.387 G_Regularizer: 0.000 validation_error: 21.406 +(epoch: 38, iters: 160176, time: 0.543, data: 0.000) G_L1: 16.212 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 13.475 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 38, iters: 162176, time: 0.544, data: 0.000) G_L1: 14.247 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 11.758 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 38, iters: 164176, time: 0.538, data: 0.000) G_L1: 14.005 G_L1_ABSOLUTE: 2.313 G_L1_RELATIVE: 11.692 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 38, iters: 166176, time: 0.533, data: 0.000) G_L1: 15.798 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 13.074 G_Regularizer: 0.000 validation_error: 20.482 +(epoch: 38, iters: 168176, time: 0.540, data: 0.000) G_L1: 15.040 G_L1_ABSOLUTE: 2.310 G_L1_RELATIVE: 12.730 G_Regularizer: 0.000 validation_error: 21.048 +(epoch: 38, iters: 170176, time: 0.544, data: 0.000) G_L1: 13.183 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 10.955 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 38, iters: 172176, time: 0.549, data: 0.000) G_L1: 14.113 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 11.484 G_Regularizer: 0.000 validation_error: 21.258 +(epoch: 38, iters: 174176, time: 0.532, data: 0.000) G_L1: 13.811 G_L1_ABSOLUTE: 2.924 G_L1_RELATIVE: 10.887 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 38, iters: 176176, time: 0.540, data: 0.000) G_L1: 14.389 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 11.893 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 38, iters: 178176, time: 0.547, data: 0.000) G_L1: 14.586 G_L1_ABSOLUTE: 2.687 G_L1_RELATIVE: 11.898 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 38, iters: 180176, time: 0.542, data: 0.000) G_L1: 13.691 G_L1_ABSOLUTE: 2.300 G_L1_RELATIVE: 11.391 G_Regularizer: 0.000 validation_error: 20.580 +(epoch: 38, iters: 182176, time: 0.543, data: 0.000) G_L1: 14.191 G_L1_ABSOLUTE: 2.073 G_L1_RELATIVE: 12.119 G_Regularizer: 0.000 validation_error: 21.025 +(epoch: 38, iters: 184176, time: 0.532, data: 0.000) G_L1: 13.878 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.277 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 38, iters: 186176, time: 0.539, data: 0.000) G_L1: 12.997 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 10.739 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 38, iters: 188176, time: 0.538, data: 0.000) G_L1: 14.409 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 38, iters: 190176, time: 0.544, data: 0.000) G_L1: 14.019 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 11.444 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 38, iters: 192176, time: 0.536, data: 0.001) G_L1: 13.911 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 11.653 G_Regularizer: 0.000 validation_error: 21.366 +(epoch: 38, iters: 194176, time: 0.542, data: 0.000) G_L1: 15.196 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 12.288 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 38, iters: 196176, time: 0.537, data: 0.000) G_L1: 20.692 G_L1_ABSOLUTE: 2.904 G_L1_RELATIVE: 17.788 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 38, iters: 198176, time: 0.547, data: 0.000) G_L1: 12.780 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 10.256 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 38, iters: 200176, time: 0.537, data: 0.000) G_L1: 13.877 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 11.364 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 38, iters: 202176, time: 0.541, data: 0.000) G_L1: 14.712 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 12.024 G_Regularizer: 0.000 validation_error: 21.329 +(epoch: 38, iters: 204176, time: 0.540, data: 0.000) G_L1: 14.898 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 12.456 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 38, iters: 206176, time: 0.543, data: 0.000) G_L1: 12.426 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 9.924 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 38, iters: 208176, time: 0.539, data: 0.000) G_L1: 13.448 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 10.946 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 38, iters: 210176, time: 0.530, data: 0.000) G_L1: 13.728 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 11.361 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 38, iters: 212176, time: 0.543, data: 0.000) G_L1: 13.261 G_L1_ABSOLUTE: 2.193 G_L1_RELATIVE: 11.068 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 38, iters: 214176, time: 0.538, data: 0.001) G_L1: 16.541 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 13.715 G_Regularizer: 0.000 validation_error: 21.139 +(epoch: 38, iters: 216176, time: 0.539, data: 0.000) G_L1: 16.186 G_L1_ABSOLUTE: 2.818 G_L1_RELATIVE: 13.367 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 38, iters: 218176, time: 0.538, data: 0.000) G_L1: 14.750 G_L1_ABSOLUTE: 2.226 G_L1_RELATIVE: 12.523 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 38, iters: 220176, time: 0.539, data: 0.000) G_L1: 14.231 G_L1_ABSOLUTE: 2.224 G_L1_RELATIVE: 12.008 G_Regularizer: 0.000 validation_error: 20.469 +(epoch: 38, iters: 222176, time: 0.539, data: 0.000) G_L1: 13.746 G_L1_ABSOLUTE: 3.117 G_L1_RELATIVE: 10.629 G_Regularizer: 0.000 validation_error: 21.262 +(epoch: 38, iters: 224176, time: 0.542, data: 0.000) G_L1: 15.530 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 13.057 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 38, iters: 226176, time: 0.535, data: 0.000) G_L1: 15.602 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 13.155 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 38, iters: 228176, time: 0.541, data: 0.000) G_L1: 20.649 G_L1_ABSOLUTE: 2.326 G_L1_RELATIVE: 18.323 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 38, iters: 230176, time: 0.542, data: 0.000) G_L1: 15.236 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 12.649 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 38, iters: 232176, time: 0.539, data: 0.000) G_L1: 15.066 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 12.592 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 38, iters: 234176, time: 0.539, data: 0.001) G_L1: 14.797 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 12.363 G_Regularizer: 0.000 validation_error: 21.187 +(epoch: 38, iters: 236176, time: 0.545, data: 0.000) G_L1: 12.323 G_L1_ABSOLUTE: 2.594 G_L1_RELATIVE: 9.729 G_Regularizer: 0.000 validation_error: 20.940 +(epoch: 38, iters: 238176, time: 0.541, data: 0.000) G_L1: 13.334 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 10.898 G_Regularizer: 0.000 validation_error: 20.373 +(epoch: 38, iters: 240176, time: 0.546, data: 0.000) G_L1: 13.343 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 10.935 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 38, iters: 242176, time: 0.541, data: 0.000) G_L1: 14.764 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 12.196 G_Regularizer: 0.000 validation_error: 21.050 +(epoch: 38, iters: 244176, time: 0.547, data: 0.000) G_L1: 15.758 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 13.371 G_Regularizer: 0.000 validation_error: 20.384 +(epoch: 38, iters: 246176, time: 0.546, data: 0.000) G_L1: 13.677 G_L1_ABSOLUTE: 2.165 G_L1_RELATIVE: 11.512 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 38, iters: 248176, time: 0.541, data: 0.000) G_L1: 12.901 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 10.603 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 38, iters: 250176, time: 0.547, data: 0.000) G_L1: 16.396 G_L1_ABSOLUTE: 2.352 G_L1_RELATIVE: 14.044 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 38, iters: 252176, time: 0.542, data: 0.000) G_L1: 14.126 G_L1_ABSOLUTE: 2.738 G_L1_RELATIVE: 11.388 G_Regularizer: 0.000 validation_error: 20.640 +(epoch: 38, iters: 254176, time: 0.539, data: 0.000) G_L1: 15.742 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 13.220 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 38, iters: 256176, time: 0.539, data: 0.000) G_L1: 14.959 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 12.385 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 38, iters: 258176, time: 0.542, data: 0.000) G_L1: 14.760 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 12.027 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 38, iters: 260176, time: 0.543, data: 0.000) G_L1: 14.052 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.468 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 38, iters: 262176, time: 0.541, data: 0.000) G_L1: 15.193 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 12.382 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 38, iters: 264176, time: 0.541, data: 0.000) G_L1: 13.122 G_L1_ABSOLUTE: 2.188 G_L1_RELATIVE: 10.934 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 38, iters: 266176, time: 0.538, data: 0.000) G_L1: 13.409 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 11.270 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 38, iters: 268176, time: 0.546, data: 0.001) G_L1: 13.196 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 10.858 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 38, iters: 270176, time: 0.534, data: 0.000) G_L1: 15.325 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 12.800 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 38, iters: 272176, time: 0.543, data: 0.000) G_L1: 15.162 G_L1_ABSOLUTE: 2.823 G_L1_RELATIVE: 12.339 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 38, iters: 274176, time: 0.546, data: 0.000) G_L1: 11.841 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 9.581 G_Regularizer: 0.000 validation_error: 21.324 +(epoch: 38, iters: 276176, time: 0.549, data: 0.000) G_L1: 13.307 G_L1_ABSOLUTE: 2.098 G_L1_RELATIVE: 11.208 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 38, iters: 278176, time: 0.524, data: 0.000) G_L1: 13.719 G_L1_ABSOLUTE: 2.780 G_L1_RELATIVE: 10.939 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 38, iters: 280176, time: 0.534, data: 0.000) G_L1: 14.010 G_L1_ABSOLUTE: 3.018 G_L1_RELATIVE: 10.992 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 38, iters: 282176, time: 0.538, data: 0.000) G_L1: 15.020 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.606 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 38, iters: 284176, time: 0.541, data: 0.000) G_L1: 13.529 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 11.026 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 38, iters: 286176, time: 0.542, data: 0.000) G_L1: 15.800 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 13.184 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 38, iters: 288176, time: 0.544, data: 0.000) G_L1: 12.757 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 10.304 G_Regularizer: 0.000 validation_error: 21.168 +(epoch: 38, iters: 290176, time: 0.551, data: 0.000) G_L1: 13.296 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 10.925 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 38, iters: 292176, time: 0.536, data: 0.000) G_L1: 14.027 G_L1_ABSOLUTE: 2.939 G_L1_RELATIVE: 11.089 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 38, iters: 294176, time: 0.534, data: 0.000) G_L1: 11.779 G_L1_ABSOLUTE: 1.975 G_L1_RELATIVE: 9.804 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 38, iters: 296176, time: 0.541, data: 0.000) G_L1: 15.051 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 12.624 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 38, iters: 298176, time: 0.544, data: 0.000) G_L1: 12.588 G_L1_ABSOLUTE: 2.134 G_L1_RELATIVE: 10.455 G_Regularizer: 0.000 validation_error: 20.460 +(epoch: 38, iters: 300176, time: 0.545, data: 0.000) G_L1: 14.091 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 11.708 G_Regularizer: 0.000 validation_error: 21.418 +(epoch: 38, iters: 302176, time: 0.545, data: 0.000) G_L1: 14.047 G_L1_ABSOLUTE: 2.150 G_L1_RELATIVE: 11.897 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 39, iters: 1424, time: 0.532, data: 0.000) G_L1: 14.844 G_L1_ABSOLUTE: 2.183 G_L1_RELATIVE: 12.661 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 39, iters: 3424, time: 0.548, data: 0.000) G_L1: 12.174 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 9.687 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 39, iters: 5424, time: 0.542, data: 0.000) G_L1: 11.743 G_L1_ABSOLUTE: 1.980 G_L1_RELATIVE: 9.763 G_Regularizer: 0.000 validation_error: 20.802 +(epoch: 39, iters: 7424, time: 0.548, data: 0.000) G_L1: 14.694 G_L1_ABSOLUTE: 2.125 G_L1_RELATIVE: 12.569 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 39, iters: 9424, time: 0.538, data: 0.001) G_L1: 14.174 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 11.889 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 39, iters: 11424, time: 0.543, data: 0.001) G_L1: 14.830 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 12.352 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 39, iters: 13424, time: 0.533, data: 0.000) G_L1: 13.771 G_L1_ABSOLUTE: 2.865 G_L1_RELATIVE: 10.906 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 39, iters: 15424, time: 0.546, data: 0.000) G_L1: 14.888 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 12.499 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 39, iters: 17424, time: 0.547, data: 0.000) G_L1: 13.508 G_L1_ABSOLUTE: 2.982 G_L1_RELATIVE: 10.526 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 39, iters: 19424, time: 0.537, data: 0.000) G_L1: 13.126 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 10.613 G_Regularizer: 0.000 validation_error: 20.712 +(epoch: 39, iters: 21424, time: 0.545, data: 0.000) G_L1: 14.570 G_L1_ABSOLUTE: 3.111 G_L1_RELATIVE: 11.459 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 39, iters: 23424, time: 0.540, data: 0.000) G_L1: 13.792 G_L1_ABSOLUTE: 2.380 G_L1_RELATIVE: 11.412 G_Regularizer: 0.000 validation_error: 21.383 +(epoch: 39, iters: 25424, time: 0.546, data: 0.000) G_L1: 14.204 G_L1_ABSOLUTE: 2.680 G_L1_RELATIVE: 11.524 G_Regularizer: 0.000 validation_error: 20.432 +(epoch: 39, iters: 27424, time: 0.541, data: 0.000) G_L1: 13.481 G_L1_ABSOLUTE: 2.737 G_L1_RELATIVE: 10.744 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 39, iters: 29424, time: 0.541, data: 0.000) G_L1: 14.124 G_L1_ABSOLUTE: 2.770 G_L1_RELATIVE: 11.354 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 39, iters: 31424, time: 0.541, data: 0.000) G_L1: 14.669 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 12.089 G_Regularizer: 0.000 validation_error: 20.729 +(epoch: 39, iters: 33424, time: 0.542, data: 0.000) G_L1: 15.470 G_L1_ABSOLUTE: 2.955 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 21.068 +(epoch: 39, iters: 35424, time: 0.541, data: 0.000) G_L1: 13.683 G_L1_ABSOLUTE: 2.937 G_L1_RELATIVE: 10.746 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 39, iters: 37424, time: 0.544, data: 0.000) G_L1: 13.217 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 10.815 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 39, iters: 39424, time: 0.538, data: 0.000) G_L1: 24.063 G_L1_ABSOLUTE: 2.740 G_L1_RELATIVE: 21.323 G_Regularizer: 0.000 validation_error: 20.666 +(epoch: 39, iters: 41424, time: 0.541, data: 0.000) G_L1: 16.581 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 14.129 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 39, iters: 43424, time: 0.542, data: 0.001) G_L1: 14.113 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 11.666 G_Regularizer: 0.000 validation_error: 21.416 +(epoch: 39, iters: 45424, time: 0.545, data: 0.000) G_L1: 11.419 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 9.046 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 39, iters: 47424, time: 0.538, data: 0.001) G_L1: 12.732 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 10.476 G_Regularizer: 0.000 validation_error: 21.097 +(epoch: 39, iters: 49424, time: 0.541, data: 0.000) G_L1: 15.876 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 13.302 G_Regularizer: 0.000 validation_error: 21.171 +(epoch: 39, iters: 51424, time: 0.541, data: 0.000) G_L1: 14.098 G_L1_ABSOLUTE: 2.911 G_L1_RELATIVE: 11.186 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 39, iters: 53424, time: 0.544, data: 0.000) G_L1: 13.286 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 10.746 G_Regularizer: 0.000 validation_error: 20.466 +(epoch: 39, iters: 55424, time: 0.542, data: 0.000) G_L1: 12.995 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 10.336 G_Regularizer: 0.000 validation_error: 21.227 +(epoch: 39, iters: 57424, time: 0.542, data: 0.001) G_L1: 17.724 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 15.354 G_Regularizer: 0.000 validation_error: 20.386 +(epoch: 39, iters: 59424, time: 0.540, data: 0.000) G_L1: 15.137 G_L1_ABSOLUTE: 2.376 G_L1_RELATIVE: 12.761 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 39, iters: 61424, time: 0.537, data: 0.000) G_L1: 14.749 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 12.311 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 39, iters: 63424, time: 0.543, data: 0.001) G_L1: 13.935 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 11.676 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 39, iters: 65424, time: 0.543, data: 0.000) G_L1: 12.096 G_L1_ABSOLUTE: 2.153 G_L1_RELATIVE: 9.943 G_Regularizer: 0.000 validation_error: 20.509 +(epoch: 39, iters: 67424, time: 0.545, data: 0.000) G_L1: 14.398 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 12.065 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 39, iters: 69424, time: 0.537, data: 0.001) G_L1: 14.968 G_L1_ABSOLUTE: 3.076 G_L1_RELATIVE: 11.892 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 39, iters: 71424, time: 0.536, data: 0.001) G_L1: 14.318 G_L1_ABSOLUTE: 2.327 G_L1_RELATIVE: 11.991 G_Regularizer: 0.000 validation_error: 21.104 +(epoch: 39, iters: 73424, time: 0.542, data: 0.000) G_L1: 13.797 G_L1_ABSOLUTE: 2.821 G_L1_RELATIVE: 10.976 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 39, iters: 75424, time: 0.544, data: 0.001) G_L1: 15.121 G_L1_ABSOLUTE: 3.140 G_L1_RELATIVE: 11.981 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 39, iters: 77424, time: 0.540, data: 0.000) G_L1: 13.510 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 10.991 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 39, iters: 79424, time: 0.544, data: 0.000) G_L1: 14.367 G_L1_ABSOLUTE: 2.323 G_L1_RELATIVE: 12.044 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 39, iters: 81424, time: 0.540, data: 0.000) G_L1: 15.942 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 13.006 G_Regularizer: 0.000 validation_error: 20.634 +(epoch: 39, iters: 83424, time: 0.542, data: 0.000) G_L1: 14.078 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 11.624 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 39, iters: 85424, time: 0.542, data: 0.000) G_L1: 14.920 G_L1_ABSOLUTE: 2.122 G_L1_RELATIVE: 12.797 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 39, iters: 87424, time: 0.540, data: 0.000) G_L1: 14.167 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 11.415 G_Regularizer: 0.000 validation_error: 20.539 +(epoch: 39, iters: 89424, time: 0.545, data: 0.000) G_L1: 11.949 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 9.446 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 39, iters: 91424, time: 0.537, data: 0.000) G_L1: 11.065 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 8.772 G_Regularizer: 0.000 validation_error: 21.169 +(epoch: 39, iters: 93424, time: 0.545, data: 0.000) G_L1: 11.189 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 8.620 G_Regularizer: 0.000 validation_error: 21.112 +(epoch: 39, iters: 95424, time: 0.549, data: 0.001) G_L1: 15.107 G_L1_ABSOLUTE: 2.877 G_L1_RELATIVE: 12.230 G_Regularizer: 0.000 validation_error: 20.553 +(epoch: 39, iters: 97424, time: 0.542, data: 0.000) G_L1: 15.318 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 12.800 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 39, iters: 99424, time: 0.543, data: 0.000) G_L1: 13.345 G_L1_ABSOLUTE: 2.138 G_L1_RELATIVE: 11.207 G_Regularizer: 0.000 validation_error: 21.212 +(epoch: 39, iters: 101424, time: 0.543, data: 0.001) G_L1: 13.989 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 11.247 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 39, iters: 103424, time: 0.540, data: 0.000) G_L1: 14.894 G_L1_ABSOLUTE: 3.144 G_L1_RELATIVE: 11.750 G_Regularizer: 0.000 validation_error: 20.530 +(epoch: 39, iters: 105424, time: 0.539, data: 0.000) G_L1: 13.646 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 11.131 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 39, iters: 107424, time: 0.542, data: 0.000) G_L1: 12.621 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 10.393 G_Regularizer: 0.000 validation_error: 21.103 +(epoch: 39, iters: 109424, time: 0.538, data: 0.000) G_L1: 14.827 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 12.107 G_Regularizer: 0.000 validation_error: 20.616 +(epoch: 39, iters: 111424, time: 0.542, data: 0.000) G_L1: 14.047 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 11.237 G_Regularizer: 0.000 validation_error: 21.190 +(epoch: 39, iters: 113424, time: 0.524, data: 0.001) G_L1: 14.037 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 11.716 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 39, iters: 115424, time: 0.540, data: 0.000) G_L1: 15.471 G_L1_ABSOLUTE: 2.827 G_L1_RELATIVE: 12.644 G_Regularizer: 0.000 validation_error: 21.159 +(epoch: 39, iters: 117424, time: 0.544, data: 0.000) G_L1: 15.030 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 12.525 G_Regularizer: 0.000 validation_error: 20.924 +(epoch: 39, iters: 119424, time: 0.542, data: 0.000) G_L1: 15.885 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 13.516 G_Regularizer: 0.000 validation_error: 21.244 +(epoch: 39, iters: 121424, time: 0.535, data: 0.000) G_L1: 15.028 G_L1_ABSOLUTE: 2.390 G_L1_RELATIVE: 12.639 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 39, iters: 123424, time: 0.541, data: 0.000) G_L1: 14.175 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.754 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 39, iters: 125424, time: 0.541, data: 0.000) G_L1: 12.162 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 9.870 G_Regularizer: 0.000 validation_error: 21.172 +(epoch: 39, iters: 127424, time: 0.543, data: 0.000) G_L1: 16.847 G_L1_ABSOLUTE: 3.025 G_L1_RELATIVE: 13.822 G_Regularizer: 0.000 validation_error: 20.511 +(epoch: 39, iters: 129424, time: 0.542, data: 0.000) G_L1: 12.637 G_L1_ABSOLUTE: 2.076 G_L1_RELATIVE: 10.561 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 39, iters: 131424, time: 0.536, data: 0.000) G_L1: 13.061 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 10.543 G_Regularizer: 0.000 validation_error: 21.331 +(epoch: 39, iters: 133424, time: 0.543, data: 0.000) G_L1: 14.440 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 11.873 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 39, iters: 135424, time: 0.541, data: 0.000) G_L1: 15.554 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 13.186 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 39, iters: 137424, time: 0.543, data: 0.000) G_L1: 13.646 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.062 G_Regularizer: 0.000 validation_error: 20.576 +(epoch: 39, iters: 139424, time: 0.535, data: 0.000) G_L1: 12.292 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 9.890 G_Regularizer: 0.000 validation_error: 21.135 +(epoch: 39, iters: 141424, time: 0.538, data: 0.000) G_L1: 13.164 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 10.505 G_Regularizer: 0.000 validation_error: 20.527 +(epoch: 39, iters: 143424, time: 0.542, data: 0.000) G_L1: 14.892 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 12.571 G_Regularizer: 0.000 validation_error: 21.454 +(epoch: 39, iters: 145424, time: 0.541, data: 0.000) G_L1: 12.512 G_L1_ABSOLUTE: 2.360 G_L1_RELATIVE: 10.151 G_Regularizer: 0.000 validation_error: 20.518 +(epoch: 39, iters: 147424, time: 0.537, data: 0.000) G_L1: 15.870 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 13.444 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 39, iters: 149424, time: 0.540, data: 0.000) G_L1: 15.958 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 13.533 G_Regularizer: 0.000 validation_error: 20.188 +(epoch: 39, iters: 151424, time: 0.544, data: 0.000) G_L1: 16.256 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 13.614 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 39, iters: 153424, time: 0.542, data: 0.000) G_L1: 14.178 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 11.442 G_Regularizer: 0.000 validation_error: 21.381 +(epoch: 39, iters: 155424, time: 0.539, data: 0.000) G_L1: 12.463 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 9.916 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 39, iters: 157424, time: 0.530, data: 0.000) G_L1: 13.798 G_L1_ABSOLUTE: 2.397 G_L1_RELATIVE: 11.401 G_Regularizer: 0.000 validation_error: 20.627 +(epoch: 39, iters: 159424, time: 0.538, data: 0.000) G_L1: 14.811 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 12.477 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 39, iters: 161424, time: 0.539, data: 0.000) G_L1: 14.122 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 11.697 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 39, iters: 163424, time: 0.541, data: 0.000) G_L1: 13.110 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 10.486 G_Regularizer: 0.000 validation_error: 20.894 +(epoch: 39, iters: 165424, time: 0.537, data: 0.000) G_L1: 14.945 G_L1_ABSOLUTE: 2.279 G_L1_RELATIVE: 12.666 G_Regularizer: 0.000 validation_error: 21.020 +(epoch: 39, iters: 167424, time: 0.544, data: 0.000) G_L1: 12.862 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 10.472 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 39, iters: 169424, time: 0.543, data: 0.000) G_L1: 11.935 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 9.300 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 39, iters: 171424, time: 0.544, data: 0.000) G_L1: 14.485 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 12.008 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 39, iters: 173424, time: 0.539, data: 0.001) G_L1: 14.638 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 12.079 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 39, iters: 175424, time: 0.537, data: 0.000) G_L1: 15.872 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 13.311 G_Regularizer: 0.000 validation_error: 20.554 +(epoch: 39, iters: 177424, time: 0.549, data: 0.000) G_L1: 11.961 G_L1_ABSOLUTE: 2.120 G_L1_RELATIVE: 9.841 G_Regularizer: 0.000 validation_error: 20.623 +(epoch: 39, iters: 179424, time: 0.538, data: 0.000) G_L1: 15.014 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 12.453 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 39, iters: 181424, time: 0.535, data: 0.000) G_L1: 15.708 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 12.935 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 39, iters: 183424, time: 0.537, data: 0.001) G_L1: 12.920 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 10.458 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 39, iters: 185424, time: 0.539, data: 0.000) G_L1: 13.314 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 10.678 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 39, iters: 187424, time: 0.540, data: 0.001) G_L1: 12.708 G_L1_ABSOLUTE: 2.051 G_L1_RELATIVE: 10.657 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 39, iters: 189424, time: 0.546, data: 0.000) G_L1: 16.990 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 14.304 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 39, iters: 191424, time: 0.536, data: 0.000) G_L1: 13.874 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 11.093 G_Regularizer: 0.000 validation_error: 20.304 +(epoch: 39, iters: 193424, time: 0.539, data: 0.000) G_L1: 14.722 G_L1_ABSOLUTE: 2.543 G_L1_RELATIVE: 12.179 G_Regularizer: 0.000 validation_error: 20.487 +(epoch: 39, iters: 195424, time: 0.543, data: 0.000) G_L1: 15.065 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 39, iters: 197424, time: 0.547, data: 0.000) G_L1: 11.881 G_L1_ABSOLUTE: 2.171 G_L1_RELATIVE: 9.711 G_Regularizer: 0.000 validation_error: 20.955 +(epoch: 39, iters: 199424, time: 0.539, data: 0.000) G_L1: 13.134 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 10.359 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 39, iters: 201424, time: 0.544, data: 0.000) G_L1: 15.879 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 13.473 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 39, iters: 203424, time: 0.545, data: 0.000) G_L1: 14.215 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 11.530 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 39, iters: 205424, time: 0.539, data: 0.000) G_L1: 15.453 G_L1_ABSOLUTE: 2.174 G_L1_RELATIVE: 13.279 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 39, iters: 207424, time: 0.538, data: 0.000) G_L1: 15.155 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 12.705 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 39, iters: 209424, time: 0.538, data: 0.000) G_L1: 14.244 G_L1_ABSOLUTE: 2.443 G_L1_RELATIVE: 11.801 G_Regularizer: 0.000 validation_error: 20.584 +(epoch: 39, iters: 211424, time: 0.547, data: 0.001) G_L1: 13.553 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 11.073 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 39, iters: 213424, time: 0.546, data: 0.000) G_L1: 12.236 G_L1_ABSOLUTE: 2.089 G_L1_RELATIVE: 10.148 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 39, iters: 215424, time: 0.544, data: 0.000) G_L1: 14.512 G_L1_ABSOLUTE: 2.095 G_L1_RELATIVE: 12.417 G_Regularizer: 0.000 validation_error: 20.438 +(epoch: 39, iters: 217424, time: 0.537, data: 0.000) G_L1: 12.221 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 9.649 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 39, iters: 219424, time: 0.550, data: 0.000) G_L1: 14.180 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 11.275 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 39, iters: 221424, time: 0.558, data: 0.000) G_L1: 14.195 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 11.453 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 39, iters: 223424, time: 0.556, data: 0.001) G_L1: 13.123 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 10.745 G_Regularizer: 0.000 validation_error: 20.433 +(epoch: 39, iters: 225424, time: 0.538, data: 0.000) G_L1: 15.493 G_L1_ABSOLUTE: 2.954 G_L1_RELATIVE: 12.539 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 39, iters: 227424, time: 0.554, data: 0.000) G_L1: 14.385 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 12.004 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 39, iters: 229424, time: 0.543, data: 0.000) G_L1: 14.897 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 12.490 G_Regularizer: 0.000 validation_error: 21.317 +(epoch: 39, iters: 231424, time: 0.556, data: 0.000) G_L1: 13.115 G_L1_ABSOLUTE: 2.210 G_L1_RELATIVE: 10.905 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 39, iters: 233424, time: 0.548, data: 0.000) G_L1: 13.395 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 10.790 G_Regularizer: 0.000 validation_error: 20.487 +(epoch: 39, iters: 235424, time: 0.551, data: 0.001) G_L1: 13.899 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 11.446 G_Regularizer: 0.000 validation_error: 20.531 +(epoch: 39, iters: 237424, time: 0.551, data: 0.000) G_L1: 13.591 G_L1_ABSOLUTE: 2.766 G_L1_RELATIVE: 10.825 G_Regularizer: 0.000 validation_error: 20.647 +(epoch: 39, iters: 239424, time: 0.551, data: 0.000) G_L1: 14.379 G_L1_ABSOLUTE: 2.382 G_L1_RELATIVE: 11.997 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 39, iters: 241424, time: 0.547, data: 0.000) G_L1: 15.929 G_L1_ABSOLUTE: 2.936 G_L1_RELATIVE: 12.993 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 39, iters: 243424, time: 0.556, data: 0.000) G_L1: 12.747 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 10.005 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 39, iters: 245424, time: 0.555, data: 0.000) G_L1: 12.735 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 10.474 G_Regularizer: 0.000 validation_error: 21.088 +(epoch: 39, iters: 247424, time: 0.554, data: 0.000) G_L1: 13.421 G_L1_ABSOLUTE: 2.934 G_L1_RELATIVE: 10.488 G_Regularizer: 0.000 validation_error: 21.091 +(epoch: 39, iters: 249424, time: 0.555, data: 0.000) G_L1: 14.009 G_L1_ABSOLUTE: 2.316 G_L1_RELATIVE: 11.693 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 39, iters: 251424, time: 0.563, data: 0.000) G_L1: 17.074 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 14.154 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 39, iters: 253424, time: 0.551, data: 0.000) G_L1: 15.411 G_L1_ABSOLUTE: 2.546 G_L1_RELATIVE: 12.864 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 39, iters: 255424, time: 0.555, data: 0.000) G_L1: 13.196 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 10.796 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 39, iters: 257424, time: 0.554, data: 0.000) G_L1: 14.129 G_L1_ABSOLUTE: 2.794 G_L1_RELATIVE: 11.335 G_Regularizer: 0.000 validation_error: 21.329 +(epoch: 39, iters: 259424, time: 0.548, data: 0.000) G_L1: 14.082 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 11.466 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 39, iters: 261424, time: 0.548, data: 0.000) G_L1: 15.934 G_L1_ABSOLUTE: 2.841 G_L1_RELATIVE: 13.093 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 39, iters: 263424, time: 0.550, data: 0.000) G_L1: 15.937 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 12.999 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 39, iters: 265424, time: 0.555, data: 0.000) G_L1: 15.392 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 12.723 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 39, iters: 267424, time: 0.550, data: 0.000) G_L1: 12.420 G_L1_ABSOLUTE: 2.322 G_L1_RELATIVE: 10.098 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 39, iters: 269424, time: 0.553, data: 0.000) G_L1: 15.722 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 13.194 G_Regularizer: 0.000 validation_error: 20.483 +(epoch: 39, iters: 271424, time: 0.556, data: 0.000) G_L1: 13.586 G_L1_ABSOLUTE: 2.402 G_L1_RELATIVE: 11.184 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 39, iters: 273424, time: 0.541, data: 0.000) G_L1: 14.064 G_L1_ABSOLUTE: 2.439 G_L1_RELATIVE: 11.625 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 39, iters: 275424, time: 0.556, data: 0.000) G_L1: 15.616 G_L1_ABSOLUTE: 2.130 G_L1_RELATIVE: 13.486 G_Regularizer: 0.000 validation_error: 20.670 +(epoch: 39, iters: 277424, time: 0.549, data: 0.000) G_L1: 12.700 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 10.342 G_Regularizer: 0.000 validation_error: 20.523 +(epoch: 39, iters: 279424, time: 0.555, data: 0.000) G_L1: 13.904 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.303 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 39, iters: 281424, time: 0.558, data: 0.000) G_L1: 12.649 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 10.214 G_Regularizer: 0.000 validation_error: 20.998 +(epoch: 39, iters: 283424, time: 0.547, data: 0.000) G_L1: 13.580 G_L1_ABSOLUTE: 2.217 G_L1_RELATIVE: 11.362 G_Regularizer: 0.000 validation_error: 20.658 +(epoch: 39, iters: 285424, time: 0.549, data: 0.000) G_L1: 13.243 G_L1_ABSOLUTE: 2.195 G_L1_RELATIVE: 11.048 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 39, iters: 287424, time: 0.561, data: 0.001) G_L1: 17.081 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 14.599 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 39, iters: 289424, time: 0.652, data: 0.000) G_L1: 14.443 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 11.727 G_Regularizer: 0.000 validation_error: 20.579 +(epoch: 39, iters: 291424, time: 0.647, data: 0.000) G_L1: 12.757 G_L1_ABSOLUTE: 2.921 G_L1_RELATIVE: 9.836 G_Regularizer: 0.000 validation_error: 21.050 +(epoch: 39, iters: 293424, time: 0.592, data: 0.000) G_L1: 14.040 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 11.286 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 39, iters: 295424, time: 0.614, data: 0.000) G_L1: 14.847 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 12.332 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 39, iters: 297424, time: 0.563, data: 0.000) G_L1: 12.949 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 10.413 G_Regularizer: 0.000 validation_error: 20.735 +(epoch: 39, iters: 299424, time: 0.619, data: 0.000) G_L1: 13.104 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 10.214 G_Regularizer: 0.000 validation_error: 20.746 +(epoch: 39, iters: 301424, time: 0.606, data: 0.000) G_L1: 11.411 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 8.928 G_Regularizer: 0.000 validation_error: 20.500 +(epoch: 40, iters: 672, time: 0.619, data: 0.000) G_L1: 13.078 G_L1_ABSOLUTE: 2.410 G_L1_RELATIVE: 10.667 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 40, iters: 2672, time: 0.603, data: 0.000) G_L1: 16.802 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 14.047 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 40, iters: 4672, time: 0.614, data: 0.000) G_L1: 13.812 G_L1_ABSOLUTE: 2.429 G_L1_RELATIVE: 11.383 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 40, iters: 6672, time: 0.612, data: 0.000) G_L1: 14.037 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 11.233 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 40, iters: 8672, time: 0.611, data: 0.000) G_L1: 12.319 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 9.685 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 40, iters: 10672, time: 0.575, data: 0.000) G_L1: 12.714 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 10.292 G_Regularizer: 0.000 validation_error: 20.798 +(epoch: 40, iters: 12672, time: 0.605, data: 0.000) G_L1: 13.719 G_L1_ABSOLUTE: 2.304 G_L1_RELATIVE: 11.415 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 40, iters: 14672, time: 0.610, data: 0.000) G_L1: 13.007 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 10.444 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 40, iters: 16672, time: 0.610, data: 0.000) G_L1: 13.234 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 40, iters: 18672, time: 0.600, data: 0.000) G_L1: 13.697 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 11.354 G_Regularizer: 0.000 validation_error: 21.176 +(epoch: 40, iters: 20672, time: 0.581, data: 0.000) G_L1: 13.538 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 11.147 G_Regularizer: 0.000 validation_error: 20.493 +(epoch: 40, iters: 22672, time: 0.601, data: 0.000) G_L1: 12.109 G_L1_ABSOLUTE: 2.192 G_L1_RELATIVE: 9.917 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 40, iters: 24672, time: 0.576, data: 0.000) G_L1: 14.281 G_L1_ABSOLUTE: 2.115 G_L1_RELATIVE: 12.166 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 40, iters: 26672, time: 0.614, data: 0.000) G_L1: 13.699 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 11.047 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 40, iters: 28672, time: 0.618, data: 0.000) G_L1: 16.170 G_L1_ABSOLUTE: 3.070 G_L1_RELATIVE: 13.100 G_Regularizer: 0.000 validation_error: 20.391 +(epoch: 40, iters: 30672, time: 0.611, data: 0.000) G_L1: 15.237 G_L1_ABSOLUTE: 2.194 G_L1_RELATIVE: 13.043 G_Regularizer: 0.000 validation_error: 21.147 +(epoch: 40, iters: 32672, time: 0.603, data: 0.000) G_L1: 14.975 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 12.606 G_Regularizer: 0.000 validation_error: 21.032 +(epoch: 40, iters: 34672, time: 0.579, data: 0.000) G_L1: 14.283 G_L1_ABSOLUTE: 2.943 G_L1_RELATIVE: 11.340 G_Regularizer: 0.000 validation_error: 20.688 +(epoch: 40, iters: 36672, time: 0.619, data: 0.000) G_L1: 14.336 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 11.718 G_Regularizer: 0.000 validation_error: 21.384 +(epoch: 40, iters: 38672, time: 0.604, data: 0.000) G_L1: 11.601 G_L1_ABSOLUTE: 2.046 G_L1_RELATIVE: 9.555 G_Regularizer: 0.000 validation_error: 20.885 +(epoch: 40, iters: 40672, time: 0.620, data: 0.000) G_L1: 14.937 G_L1_ABSOLUTE: 2.649 G_L1_RELATIVE: 12.288 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 40, iters: 42672, time: 0.593, data: 0.000) G_L1: 14.351 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 11.610 G_Regularizer: 0.000 validation_error: 21.124 +(epoch: 40, iters: 44672, time: 0.615, data: 0.000) G_L1: 17.192 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 14.503 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 40, iters: 46672, time: 0.611, data: 0.000) G_L1: 14.012 G_L1_ABSOLUTE: 3.186 G_L1_RELATIVE: 10.826 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 40, iters: 48672, time: 0.597, data: 0.000) G_L1: 15.074 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 12.341 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 40, iters: 50672, time: 0.529, data: 0.000) G_L1: 14.695 G_L1_ABSOLUTE: 2.835 G_L1_RELATIVE: 11.859 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 40, iters: 52672, time: 0.537, data: 0.000) G_L1: 12.661 G_L1_ABSOLUTE: 2.759 G_L1_RELATIVE: 9.902 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 40, iters: 54672, time: 0.545, data: 0.000) G_L1: 12.950 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 10.484 G_Regularizer: 0.000 validation_error: 21.123 +(epoch: 40, iters: 56672, time: 0.539, data: 0.000) G_L1: 15.742 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 13.089 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 40, iters: 58672, time: 0.540, data: 0.001) G_L1: 15.573 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 12.789 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 40, iters: 60672, time: 0.540, data: 0.000) G_L1: 13.603 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 10.968 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 40, iters: 62672, time: 0.531, data: 0.000) G_L1: 14.146 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 11.829 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 40, iters: 64672, time: 0.538, data: 0.000) G_L1: 16.657 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 14.094 G_Regularizer: 0.000 validation_error: 20.753 +(epoch: 40, iters: 66672, time: 0.550, data: 0.000) G_L1: 15.610 G_L1_ABSOLUTE: 2.224 G_L1_RELATIVE: 13.386 G_Regularizer: 0.000 validation_error: 21.139 +(epoch: 40, iters: 68672, time: 0.543, data: 0.001) G_L1: 13.831 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 11.082 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 40, iters: 70672, time: 0.528, data: 0.000) G_L1: 12.770 G_L1_ABSOLUTE: 2.141 G_L1_RELATIVE: 10.630 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 40, iters: 72672, time: 0.545, data: 0.000) G_L1: 13.850 G_L1_ABSOLUTE: 2.395 G_L1_RELATIVE: 11.454 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 40, iters: 74672, time: 0.542, data: 0.000) G_L1: 13.191 G_L1_ABSOLUTE: 2.313 G_L1_RELATIVE: 10.878 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 40, iters: 76672, time: 0.538, data: 0.000) G_L1: 15.712 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 12.957 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 40, iters: 78672, time: 0.537, data: 0.000) G_L1: 14.055 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 11.615 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 40, iters: 80672, time: 0.542, data: 0.000) G_L1: 15.825 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 13.355 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 40, iters: 82672, time: 0.539, data: 0.000) G_L1: 14.311 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 11.825 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 40, iters: 84672, time: 0.543, data: 0.000) G_L1: 12.777 G_L1_ABSOLUTE: 2.236 G_L1_RELATIVE: 10.541 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 40, iters: 86672, time: 0.546, data: 0.000) G_L1: 12.341 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 9.574 G_Regularizer: 0.000 validation_error: 20.757 +(epoch: 40, iters: 88672, time: 0.548, data: 0.000) G_L1: 13.642 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 10.969 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 40, iters: 90672, time: 0.538, data: 0.000) G_L1: 16.614 G_L1_ABSOLUTE: 2.944 G_L1_RELATIVE: 13.670 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 40, iters: 92672, time: 0.539, data: 0.000) G_L1: 15.413 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 13.014 G_Regularizer: 0.000 validation_error: 21.162 +(epoch: 40, iters: 94672, time: 0.538, data: 0.000) G_L1: 14.949 G_L1_ABSOLUTE: 2.417 G_L1_RELATIVE: 12.532 G_Regularizer: 0.000 validation_error: 21.158 +(epoch: 40, iters: 96672, time: 0.549, data: 0.000) G_L1: 14.418 G_L1_ABSOLUTE: 2.789 G_L1_RELATIVE: 11.629 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 40, iters: 98672, time: 0.548, data: 0.000) G_L1: 15.354 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 12.842 G_Regularizer: 0.000 validation_error: 21.181 +(epoch: 40, iters: 100672, time: 0.542, data: 0.000) G_L1: 13.249 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 10.920 G_Regularizer: 0.000 validation_error: 21.020 +(epoch: 40, iters: 102672, time: 0.548, data: 0.000) G_L1: 14.609 G_L1_ABSOLUTE: 2.167 G_L1_RELATIVE: 12.442 G_Regularizer: 0.000 validation_error: 20.653 +(epoch: 40, iters: 104672, time: 0.544, data: 0.001) G_L1: 12.020 G_L1_ABSOLUTE: 2.183 G_L1_RELATIVE: 9.837 G_Regularizer: 0.000 validation_error: 21.367 +(epoch: 40, iters: 106672, time: 0.541, data: 0.000) G_L1: 14.735 G_L1_ABSOLUTE: 3.030 G_L1_RELATIVE: 11.705 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 40, iters: 108672, time: 0.542, data: 0.000) G_L1: 17.364 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 14.964 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 40, iters: 110672, time: 0.548, data: 0.000) G_L1: 13.443 G_L1_ABSOLUTE: 2.898 G_L1_RELATIVE: 10.545 G_Regularizer: 0.000 validation_error: 21.269 +(epoch: 40, iters: 112672, time: 0.543, data: 0.000) G_L1: 13.810 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 11.572 G_Regularizer: 0.000 validation_error: 21.411 +(epoch: 40, iters: 114672, time: 0.552, data: 0.000) G_L1: 12.922 G_L1_ABSOLUTE: 2.191 G_L1_RELATIVE: 10.731 G_Regularizer: 0.000 validation_error: 21.044 +(epoch: 40, iters: 116672, time: 0.545, data: 0.000) G_L1: 15.168 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 12.837 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 40, iters: 118672, time: 0.545, data: 0.001) G_L1: 14.672 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 11.861 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 40, iters: 120672, time: 0.536, data: 0.000) G_L1: 13.127 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 10.929 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 40, iters: 122672, time: 0.534, data: 0.000) G_L1: 12.248 G_L1_ABSOLUTE: 2.144 G_L1_RELATIVE: 10.105 G_Regularizer: 0.000 validation_error: 20.431 +(epoch: 40, iters: 124672, time: 0.547, data: 0.000) G_L1: 11.208 G_L1_ABSOLUTE: 2.328 G_L1_RELATIVE: 8.881 G_Regularizer: 0.000 validation_error: 21.215 +(epoch: 40, iters: 126672, time: 0.542, data: 0.000) G_L1: 17.507 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 14.812 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 40, iters: 128672, time: 0.549, data: 0.000) G_L1: 14.393 G_L1_ABSOLUTE: 2.539 G_L1_RELATIVE: 11.854 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 40, iters: 130672, time: 0.550, data: 0.001) G_L1: 14.427 G_L1_ABSOLUTE: 2.316 G_L1_RELATIVE: 12.111 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 40, iters: 132672, time: 0.539, data: 0.000) G_L1: 13.453 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 10.864 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 40, iters: 134672, time: 0.544, data: 0.000) G_L1: 13.472 G_L1_ABSOLUTE: 2.077 G_L1_RELATIVE: 11.394 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 40, iters: 136672, time: 0.542, data: 0.001) G_L1: 13.380 G_L1_ABSOLUTE: 2.859 G_L1_RELATIVE: 10.521 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 40, iters: 138672, time: 0.537, data: 0.000) G_L1: 11.557 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 9.394 G_Regularizer: 0.000 validation_error: 21.187 +(epoch: 40, iters: 140672, time: 0.538, data: 0.000) G_L1: 12.548 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 10.137 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 40, iters: 142672, time: 0.541, data: 0.000) G_L1: 12.271 G_L1_ABSOLUTE: 2.212 G_L1_RELATIVE: 10.058 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 40, iters: 144672, time: 0.547, data: 0.000) G_L1: 13.786 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 11.223 G_Regularizer: 0.000 validation_error: 20.367 +(epoch: 40, iters: 146672, time: 0.543, data: 0.000) G_L1: 13.371 G_L1_ABSOLUTE: 2.360 G_L1_RELATIVE: 11.011 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 40, iters: 148672, time: 0.541, data: 0.000) G_L1: 14.804 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 12.177 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 40, iters: 150672, time: 0.539, data: 0.000) G_L1: 17.266 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 14.624 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 40, iters: 152672, time: 0.544, data: 0.000) G_L1: 13.602 G_L1_ABSOLUTE: 1.813 G_L1_RELATIVE: 11.789 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 40, iters: 154672, time: 0.544, data: 0.000) G_L1: 14.020 G_L1_ABSOLUTE: 2.707 G_L1_RELATIVE: 11.313 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 40, iters: 156672, time: 0.530, data: 0.001) G_L1: 13.597 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 10.921 G_Regularizer: 0.000 validation_error: 20.748 +(epoch: 40, iters: 158672, time: 0.540, data: 0.000) G_L1: 14.663 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 12.152 G_Regularizer: 0.000 validation_error: 20.959 +(epoch: 40, iters: 160672, time: 0.537, data: 0.000) G_L1: 18.136 G_L1_ABSOLUTE: 3.774 G_L1_RELATIVE: 14.362 G_Regularizer: 0.000 validation_error: 20.522 +(epoch: 40, iters: 162672, time: 0.543, data: 0.000) G_L1: 11.588 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 9.083 G_Regularizer: 0.000 validation_error: 20.674 +(epoch: 40, iters: 164672, time: 0.538, data: 0.000) G_L1: 12.127 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 9.668 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 40, iters: 166672, time: 0.543, data: 0.000) G_L1: 13.542 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 11.403 G_Regularizer: 0.000 validation_error: 21.041 +(epoch: 40, iters: 168672, time: 0.545, data: 0.000) G_L1: 15.490 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 13.089 G_Regularizer: 0.000 validation_error: 20.516 +(epoch: 40, iters: 170672, time: 0.538, data: 0.000) G_L1: 15.693 G_L1_ABSOLUTE: 2.652 G_L1_RELATIVE: 13.041 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 40, iters: 172672, time: 0.543, data: 0.000) G_L1: 14.590 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 11.992 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 40, iters: 174672, time: 0.539, data: 0.000) G_L1: 13.389 G_L1_ABSOLUTE: 2.208 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 20.573 +(epoch: 40, iters: 176672, time: 0.546, data: 0.000) G_L1: 13.479 G_L1_ABSOLUTE: 2.329 G_L1_RELATIVE: 11.150 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 40, iters: 178672, time: 0.544, data: 0.000) G_L1: 14.187 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 11.752 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 40, iters: 180672, time: 0.543, data: 0.000) G_L1: 14.545 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 11.653 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 40, iters: 182672, time: 0.541, data: 0.000) G_L1: 12.671 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 10.095 G_Regularizer: 0.000 validation_error: 20.176 +(epoch: 40, iters: 184672, time: 0.537, data: 0.000) G_L1: 13.153 G_L1_ABSOLUTE: 2.064 G_L1_RELATIVE: 11.088 G_Regularizer: 0.000 validation_error: 21.239 +(epoch: 40, iters: 186672, time: 0.537, data: 0.000) G_L1: 14.732 G_L1_ABSOLUTE: 2.470 G_L1_RELATIVE: 12.262 G_Regularizer: 0.000 validation_error: 20.284 +(epoch: 40, iters: 188672, time: 0.550, data: 0.000) G_L1: 15.211 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 12.697 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 40, iters: 190672, time: 0.548, data: 0.000) G_L1: 11.419 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 9.128 G_Regularizer: 0.000 validation_error: 21.177 +(epoch: 40, iters: 192672, time: 0.547, data: 0.000) G_L1: 13.893 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 11.569 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 40, iters: 194672, time: 0.544, data: 0.000) G_L1: 12.214 G_L1_ABSOLUTE: 2.067 G_L1_RELATIVE: 10.147 G_Regularizer: 0.000 validation_error: 20.994 +(epoch: 40, iters: 196672, time: 0.539, data: 0.001) G_L1: 15.905 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 13.496 G_Regularizer: 0.000 validation_error: 21.317 +(epoch: 40, iters: 198672, time: 0.544, data: 0.001) G_L1: 11.825 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 9.471 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 40, iters: 200672, time: 0.545, data: 0.000) G_L1: 13.308 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 11.007 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 40, iters: 202672, time: 0.540, data: 0.000) G_L1: 16.156 G_L1_ABSOLUTE: 2.920 G_L1_RELATIVE: 13.236 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 40, iters: 204672, time: 0.539, data: 0.000) G_L1: 14.554 G_L1_ABSOLUTE: 2.446 G_L1_RELATIVE: 12.108 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 40, iters: 206672, time: 0.554, data: 0.001) G_L1: 14.706 G_L1_ABSOLUTE: 2.231 G_L1_RELATIVE: 12.475 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 40, iters: 208672, time: 0.538, data: 0.000) G_L1: 11.998 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 9.646 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 40, iters: 210672, time: 0.540, data: 0.000) G_L1: 13.910 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 11.609 G_Regularizer: 0.000 validation_error: 20.627 +(epoch: 40, iters: 212672, time: 0.533, data: 0.000) G_L1: 12.840 G_L1_ABSOLUTE: 2.173 G_L1_RELATIVE: 10.667 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 40, iters: 214672, time: 0.542, data: 0.000) G_L1: 15.175 G_L1_ABSOLUTE: 2.748 G_L1_RELATIVE: 12.427 G_Regularizer: 0.000 validation_error: 20.575 +(epoch: 40, iters: 216672, time: 0.537, data: 0.000) G_L1: 12.762 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 10.232 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 40, iters: 218672, time: 0.544, data: 0.000) G_L1: 13.906 G_L1_ABSOLUTE: 2.252 G_L1_RELATIVE: 11.655 G_Regularizer: 0.000 validation_error: 21.040 +(epoch: 40, iters: 220672, time: 0.544, data: 0.000) G_L1: 14.043 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 20.538 +(epoch: 40, iters: 222672, time: 0.541, data: 0.000) G_L1: 11.845 G_L1_ABSOLUTE: 1.986 G_L1_RELATIVE: 9.859 G_Regularizer: 0.000 validation_error: 20.758 +(epoch: 40, iters: 224672, time: 0.532, data: 0.000) G_L1: 13.444 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 11.050 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 40, iters: 226672, time: 0.546, data: 0.000) G_L1: 12.270 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 9.633 G_Regularizer: 0.000 validation_error: 20.882 +(epoch: 40, iters: 228672, time: 0.541, data: 0.001) G_L1: 11.422 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 8.964 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 40, iters: 230672, time: 0.541, data: 0.000) G_L1: 16.320 G_L1_ABSOLUTE: 2.982 G_L1_RELATIVE: 13.339 G_Regularizer: 0.000 validation_error: 21.021 +(epoch: 40, iters: 232672, time: 0.550, data: 0.001) G_L1: 13.957 G_L1_ABSOLUTE: 2.147 G_L1_RELATIVE: 11.810 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 40, iters: 234672, time: 0.539, data: 0.000) G_L1: 14.591 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 11.951 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 40, iters: 236672, time: 0.546, data: 0.000) G_L1: 12.607 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 10.379 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 40, iters: 238672, time: 0.537, data: 0.000) G_L1: 13.054 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 10.856 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 40, iters: 240672, time: 0.545, data: 0.000) G_L1: 17.009 G_L1_ABSOLUTE: 2.141 G_L1_RELATIVE: 14.868 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 40, iters: 242672, time: 0.528, data: 0.000) G_L1: 13.186 G_L1_ABSOLUTE: 2.741 G_L1_RELATIVE: 10.445 G_Regularizer: 0.000 validation_error: 21.145 +(epoch: 40, iters: 244672, time: 0.539, data: 0.000) G_L1: 14.491 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 11.882 G_Regularizer: 0.000 validation_error: 21.205 +(epoch: 40, iters: 246672, time: 0.543, data: 0.001) G_L1: 15.506 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 13.220 G_Regularizer: 0.000 validation_error: 20.753 +(epoch: 40, iters: 248672, time: 0.542, data: 0.000) G_L1: 15.389 G_L1_ABSOLUTE: 2.875 G_L1_RELATIVE: 12.515 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 40, iters: 250672, time: 0.542, data: 0.000) G_L1: 14.497 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 11.831 G_Regularizer: 0.000 validation_error: 20.545 +(epoch: 40, iters: 252672, time: 0.538, data: 0.000) G_L1: 13.287 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 10.910 G_Regularizer: 0.000 validation_error: 20.472 +(epoch: 40, iters: 254672, time: 0.543, data: 0.000) G_L1: 12.912 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 10.395 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 40, iters: 256672, time: 0.547, data: 0.000) G_L1: 12.527 G_L1_ABSOLUTE: 2.082 G_L1_RELATIVE: 10.445 G_Regularizer: 0.000 validation_error: 20.510 +(epoch: 40, iters: 258672, time: 0.544, data: 0.000) G_L1: 14.253 G_L1_ABSOLUTE: 3.104 G_L1_RELATIVE: 11.148 G_Regularizer: 0.000 validation_error: 21.270 +(epoch: 40, iters: 260672, time: 0.543, data: 0.000) G_L1: 14.204 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 11.769 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 40, iters: 262672, time: 0.545, data: 0.001) G_L1: 12.295 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 10.034 G_Regularizer: 0.000 validation_error: 20.487 +(epoch: 40, iters: 264672, time: 0.542, data: 0.000) G_L1: 12.326 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 9.928 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 40, iters: 266672, time: 0.543, data: 0.000) G_L1: 16.281 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 13.784 G_Regularizer: 0.000 validation_error: 20.484 +(epoch: 40, iters: 268672, time: 0.526, data: 0.000) G_L1: 15.234 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 12.709 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 40, iters: 270672, time: 0.546, data: 0.000) G_L1: 13.575 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 11.299 G_Regularizer: 0.000 validation_error: 20.425 +(epoch: 40, iters: 272672, time: 0.544, data: 0.000) G_L1: 12.616 G_L1_ABSOLUTE: 2.759 G_L1_RELATIVE: 9.858 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 40, iters: 274672, time: 0.547, data: 0.000) G_L1: 15.339 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 12.741 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 40, iters: 276672, time: 0.537, data: 0.000) G_L1: 15.659 G_L1_ABSOLUTE: 2.887 G_L1_RELATIVE: 12.772 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 40, iters: 278672, time: 0.547, data: 0.000) G_L1: 11.873 G_L1_ABSOLUTE: 2.636 G_L1_RELATIVE: 9.237 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 40, iters: 280672, time: 0.547, data: 0.000) G_L1: 14.515 G_L1_ABSOLUTE: 2.237 G_L1_RELATIVE: 12.278 G_Regularizer: 0.000 validation_error: 20.531 +(epoch: 40, iters: 282672, time: 0.545, data: 0.000) G_L1: 13.618 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 10.985 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 40, iters: 284672, time: 0.544, data: 0.000) G_L1: 13.044 G_L1_ABSOLUTE: 2.959 G_L1_RELATIVE: 10.085 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 40, iters: 286672, time: 0.540, data: 0.000) G_L1: 15.020 G_L1_ABSOLUTE: 2.983 G_L1_RELATIVE: 12.037 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 40, iters: 288672, time: 0.543, data: 0.000) G_L1: 15.352 G_L1_ABSOLUTE: 2.187 G_L1_RELATIVE: 13.165 G_Regularizer: 0.000 validation_error: 21.306 +(epoch: 40, iters: 290672, time: 0.543, data: 0.000) G_L1: 15.308 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 12.802 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 40, iters: 292672, time: 0.541, data: 0.000) G_L1: 13.123 G_L1_ABSOLUTE: 2.323 G_L1_RELATIVE: 10.800 G_Regularizer: 0.000 validation_error: 20.827 +(epoch: 40, iters: 294672, time: 0.540, data: 0.000) G_L1: 15.378 G_L1_ABSOLUTE: 2.410 G_L1_RELATIVE: 12.967 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 40, iters: 296672, time: 0.548, data: 0.000) G_L1: 15.537 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 13.017 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 40, iters: 298672, time: 0.540, data: 0.000) G_L1: 13.935 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 11.452 G_Regularizer: 0.000 validation_error: 20.384 +(epoch: 40, iters: 300672, time: 0.546, data: 0.000) G_L1: 14.812 G_L1_ABSOLUTE: 2.608 G_L1_RELATIVE: 12.204 G_Regularizer: 0.000 validation_error: 20.659 +(epoch: 40, iters: 302672, time: 0.531, data: 0.000) G_L1: 13.715 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.174 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 41, iters: 1920, time: 0.546, data: 0.000) G_L1: 15.368 G_L1_ABSOLUTE: 2.668 G_L1_RELATIVE: 12.701 G_Regularizer: 0.000 validation_error: 20.638 +(epoch: 41, iters: 3920, time: 0.539, data: 0.000) G_L1: 14.953 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 12.402 G_Regularizer: 0.000 validation_error: 20.904 +(epoch: 41, iters: 5920, time: 0.545, data: 0.000) G_L1: 14.879 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 12.090 G_Regularizer: 0.000 validation_error: 20.423 +(epoch: 41, iters: 7920, time: 0.539, data: 0.000) G_L1: 13.578 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 11.103 G_Regularizer: 0.000 validation_error: 20.697 +(epoch: 41, iters: 9920, time: 0.545, data: 0.000) G_L1: 12.208 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 9.864 G_Regularizer: 0.000 validation_error: 20.894 +(epoch: 41, iters: 11920, time: 0.540, data: 0.000) G_L1: 15.097 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 12.496 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 41, iters: 13920, time: 0.542, data: 0.000) G_L1: 13.298 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 10.927 G_Regularizer: 0.000 validation_error: 20.474 +(epoch: 41, iters: 15920, time: 0.546, data: 0.000) G_L1: 12.808 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 10.285 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 41, iters: 17920, time: 0.539, data: 0.000) G_L1: 13.554 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 11.192 G_Regularizer: 0.000 validation_error: 20.824 +(epoch: 41, iters: 19920, time: 0.541, data: 0.000) G_L1: 13.445 G_L1_ABSOLUTE: 2.018 G_L1_RELATIVE: 11.428 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 41, iters: 21920, time: 0.540, data: 0.000) G_L1: 14.210 G_L1_ABSOLUTE: 2.352 G_L1_RELATIVE: 11.858 G_Regularizer: 0.000 validation_error: 21.121 +(epoch: 41, iters: 23920, time: 0.539, data: 0.000) G_L1: 13.030 G_L1_ABSOLUTE: 1.957 G_L1_RELATIVE: 11.073 G_Regularizer: 0.000 validation_error: 20.562 +(epoch: 41, iters: 25920, time: 0.536, data: 0.000) G_L1: 13.076 G_L1_ABSOLUTE: 2.304 G_L1_RELATIVE: 10.773 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 41, iters: 27920, time: 0.545, data: 0.000) G_L1: 13.921 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 11.638 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 41, iters: 29920, time: 0.547, data: 0.001) G_L1: 15.518 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 12.920 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 41, iters: 31920, time: 0.538, data: 0.001) G_L1: 11.288 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 8.955 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 41, iters: 33920, time: 0.534, data: 0.000) G_L1: 13.922 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 11.604 G_Regularizer: 0.000 validation_error: 20.954 +(epoch: 41, iters: 35920, time: 0.544, data: 0.000) G_L1: 14.840 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 12.352 G_Regularizer: 0.000 validation_error: 20.980 +(epoch: 41, iters: 37920, time: 0.542, data: 0.001) G_L1: 12.792 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 10.256 G_Regularizer: 0.000 validation_error: 21.192 +(epoch: 41, iters: 39920, time: 0.545, data: 0.000) G_L1: 12.906 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 10.220 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 41, iters: 41920, time: 0.537, data: 0.000) G_L1: 14.645 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 12.049 G_Regularizer: 0.000 validation_error: 20.856 +(epoch: 41, iters: 43920, time: 0.546, data: 0.000) G_L1: 24.490 G_L1_ABSOLUTE: 2.920 G_L1_RELATIVE: 21.570 G_Regularizer: 0.000 validation_error: 20.166 +(epoch: 41, iters: 45920, time: 0.541, data: 0.000) G_L1: 13.116 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 10.699 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 41, iters: 47920, time: 0.546, data: 0.000) G_L1: 14.506 G_L1_ABSOLUTE: 3.201 G_L1_RELATIVE: 11.305 G_Regularizer: 0.000 validation_error: 21.069 +(epoch: 41, iters: 49920, time: 0.542, data: 0.001) G_L1: 13.297 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 10.866 G_Regularizer: 0.000 validation_error: 20.848 +(epoch: 41, iters: 51920, time: 0.528, data: 0.000) G_L1: 13.154 G_L1_ABSOLUTE: 2.795 G_L1_RELATIVE: 10.359 G_Regularizer: 0.000 validation_error: 20.620 +(epoch: 41, iters: 53920, time: 0.543, data: 0.000) G_L1: 12.910 G_L1_ABSOLUTE: 2.192 G_L1_RELATIVE: 10.719 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 41, iters: 55920, time: 0.546, data: 0.000) G_L1: 14.887 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 12.508 G_Regularizer: 0.000 validation_error: 21.139 +(epoch: 41, iters: 57920, time: 0.535, data: 0.000) G_L1: 13.194 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 10.945 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 41, iters: 59920, time: 0.532, data: 0.000) G_L1: 14.837 G_L1_ABSOLUTE: 3.009 G_L1_RELATIVE: 11.828 G_Regularizer: 0.000 validation_error: 21.175 +(epoch: 41, iters: 61920, time: 0.540, data: 0.000) G_L1: 14.714 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 12.223 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 41, iters: 63920, time: 0.532, data: 0.000) G_L1: 13.255 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 10.739 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 41, iters: 65920, time: 0.546, data: 0.000) G_L1: 14.609 G_L1_ABSOLUTE: 2.171 G_L1_RELATIVE: 12.439 G_Regularizer: 0.000 validation_error: 20.621 +(epoch: 41, iters: 67920, time: 0.535, data: 0.000) G_L1: 14.591 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 12.124 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 41, iters: 69920, time: 0.542, data: 0.000) G_L1: 13.949 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 11.606 G_Regularizer: 0.000 validation_error: 20.591 +(epoch: 41, iters: 71920, time: 0.547, data: 0.001) G_L1: 12.103 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 9.546 G_Regularizer: 0.000 validation_error: 20.997 +(epoch: 41, iters: 73920, time: 0.545, data: 0.000) G_L1: 12.799 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 10.331 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 41, iters: 75920, time: 0.544, data: 0.000) G_L1: 16.727 G_L1_ABSOLUTE: 2.726 G_L1_RELATIVE: 14.001 G_Regularizer: 0.000 validation_error: 20.832 +(epoch: 41, iters: 77920, time: 0.538, data: 0.000) G_L1: 13.721 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 41, iters: 79920, time: 0.551, data: 0.000) G_L1: 13.400 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 10.968 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 41, iters: 81920, time: 0.541, data: 0.000) G_L1: 12.837 G_L1_ABSOLUTE: 2.221 G_L1_RELATIVE: 10.616 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 41, iters: 83920, time: 0.547, data: 0.000) G_L1: 13.586 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 11.237 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 41, iters: 85920, time: 0.540, data: 0.000) G_L1: 12.719 G_L1_ABSOLUTE: 2.080 G_L1_RELATIVE: 10.639 G_Regularizer: 0.000 validation_error: 21.063 +(epoch: 41, iters: 87920, time: 0.545, data: 0.000) G_L1: 13.034 G_L1_ABSOLUTE: 2.061 G_L1_RELATIVE: 10.973 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 41, iters: 89920, time: 0.542, data: 0.000) G_L1: 13.021 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 10.736 G_Regularizer: 0.000 validation_error: 20.670 +(epoch: 41, iters: 91920, time: 0.550, data: 0.000) G_L1: 12.626 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 10.256 G_Regularizer: 0.000 validation_error: 21.345 +(epoch: 41, iters: 93920, time: 0.543, data: 0.000) G_L1: 13.590 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 10.894 G_Regularizer: 0.000 validation_error: 21.147 +(epoch: 41, iters: 95920, time: 0.541, data: 0.000) G_L1: 13.292 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 11.045 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 41, iters: 97920, time: 0.546, data: 0.000) G_L1: 14.344 G_L1_ABSOLUTE: 2.431 G_L1_RELATIVE: 11.913 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 41, iters: 99920, time: 0.542, data: 0.000) G_L1: 16.477 G_L1_ABSOLUTE: 2.156 G_L1_RELATIVE: 14.321 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 41, iters: 101920, time: 0.544, data: 0.000) G_L1: 13.518 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 10.825 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 41, iters: 103920, time: 0.538, data: 0.001) G_L1: 14.605 G_L1_ABSOLUTE: 1.966 G_L1_RELATIVE: 12.639 G_Regularizer: 0.000 validation_error: 21.306 +(epoch: 41, iters: 105920, time: 0.546, data: 0.000) G_L1: 13.908 G_L1_ABSOLUTE: 2.015 G_L1_RELATIVE: 11.893 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 41, iters: 107920, time: 0.547, data: 0.001) G_L1: 14.094 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 11.643 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 41, iters: 109920, time: 0.536, data: 0.000) G_L1: 16.367 G_L1_ABSOLUTE: 3.152 G_L1_RELATIVE: 13.216 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 41, iters: 111920, time: 0.541, data: 0.000) G_L1: 12.847 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 10.498 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 41, iters: 113920, time: 0.542, data: 0.000) G_L1: 14.352 G_L1_ABSOLUTE: 2.395 G_L1_RELATIVE: 11.957 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 41, iters: 115920, time: 0.541, data: 0.000) G_L1: 15.487 G_L1_ABSOLUTE: 2.558 G_L1_RELATIVE: 12.929 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 41, iters: 117920, time: 0.542, data: 0.001) G_L1: 14.679 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 12.245 G_Regularizer: 0.000 validation_error: 21.258 +(epoch: 41, iters: 119920, time: 0.534, data: 0.000) G_L1: 14.116 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 11.394 G_Regularizer: 0.000 validation_error: 21.093 +(epoch: 41, iters: 121920, time: 0.550, data: 0.001) G_L1: 13.570 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 11.174 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 41, iters: 123920, time: 0.549, data: 0.000) G_L1: 12.472 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 10.093 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 41, iters: 125920, time: 0.545, data: 0.000) G_L1: 12.734 G_L1_ABSOLUTE: 2.725 G_L1_RELATIVE: 10.010 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 41, iters: 127920, time: 0.538, data: 0.001) G_L1: 14.107 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 11.498 G_Regularizer: 0.000 validation_error: 21.108 +(epoch: 41, iters: 129920, time: 0.533, data: 0.001) G_L1: 13.977 G_L1_ABSOLUTE: 2.252 G_L1_RELATIVE: 11.725 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 41, iters: 131920, time: 0.537, data: 0.000) G_L1: 14.809 G_L1_ABSOLUTE: 2.817 G_L1_RELATIVE: 11.993 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 41, iters: 133920, time: 0.544, data: 0.001) G_L1: 13.871 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 11.517 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 41, iters: 135920, time: 0.541, data: 0.000) G_L1: 15.046 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 12.483 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 41, iters: 137920, time: 0.539, data: 0.000) G_L1: 13.278 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 10.683 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 41, iters: 139920, time: 0.541, data: 0.000) G_L1: 14.789 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 12.281 G_Regularizer: 0.000 validation_error: 21.143 +(epoch: 41, iters: 141920, time: 0.546, data: 0.000) G_L1: 14.524 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 12.199 G_Regularizer: 0.000 validation_error: 20.468 +(epoch: 41, iters: 143920, time: 0.539, data: 0.000) G_L1: 14.327 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 11.522 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 41, iters: 145920, time: 0.541, data: 0.001) G_L1: 12.166 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 9.653 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 41, iters: 147920, time: 0.540, data: 0.001) G_L1: 13.152 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 10.646 G_Regularizer: 0.000 validation_error: 20.662 +(epoch: 41, iters: 149920, time: 0.539, data: 0.000) G_L1: 13.092 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 10.603 G_Regularizer: 0.000 validation_error: 20.882 +(epoch: 41, iters: 151920, time: 0.544, data: 0.000) G_L1: 14.673 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 12.228 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 41, iters: 153920, time: 0.538, data: 0.000) G_L1: 13.313 G_L1_ABSOLUTE: 2.423 G_L1_RELATIVE: 10.891 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 41, iters: 155920, time: 0.548, data: 0.000) G_L1: 12.737 G_L1_ABSOLUTE: 2.336 G_L1_RELATIVE: 10.402 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 41, iters: 157920, time: 0.537, data: 0.000) G_L1: 13.158 G_L1_ABSOLUTE: 2.168 G_L1_RELATIVE: 10.990 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 41, iters: 159920, time: 0.541, data: 0.000) G_L1: 10.480 G_L1_ABSOLUTE: 2.154 G_L1_RELATIVE: 8.326 G_Regularizer: 0.000 validation_error: 20.770 +(epoch: 41, iters: 161920, time: 0.540, data: 0.000) G_L1: 15.453 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 12.568 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 41, iters: 163920, time: 0.543, data: 0.000) G_L1: 14.635 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 12.386 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 41, iters: 165920, time: 0.540, data: 0.000) G_L1: 16.296 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 13.907 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 41, iters: 167920, time: 0.544, data: 0.000) G_L1: 12.955 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 10.351 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 41, iters: 169920, time: 0.542, data: 0.000) G_L1: 14.811 G_L1_ABSOLUTE: 2.987 G_L1_RELATIVE: 11.824 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 41, iters: 171920, time: 0.532, data: 0.001) G_L1: 13.791 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 11.018 G_Regularizer: 0.000 validation_error: 20.930 +(epoch: 41, iters: 173920, time: 0.546, data: 0.000) G_L1: 14.836 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 11.944 G_Regularizer: 0.000 validation_error: 21.214 +(epoch: 41, iters: 175920, time: 0.546, data: 0.000) G_L1: 14.719 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 12.184 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 41, iters: 177920, time: 0.543, data: 0.000) G_L1: 15.604 G_L1_ABSOLUTE: 2.841 G_L1_RELATIVE: 12.763 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 41, iters: 179920, time: 0.539, data: 0.000) G_L1: 15.723 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 13.291 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 41, iters: 181920, time: 0.543, data: 0.000) G_L1: 14.724 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 12.339 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 41, iters: 183920, time: 0.541, data: 0.000) G_L1: 14.425 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 12.151 G_Regularizer: 0.000 validation_error: 20.587 +(epoch: 41, iters: 185920, time: 0.547, data: 0.000) G_L1: 15.123 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 12.608 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 41, iters: 187920, time: 0.541, data: 0.000) G_L1: 16.249 G_L1_ABSOLUTE: 2.143 G_L1_RELATIVE: 14.106 G_Regularizer: 0.000 validation_error: 21.005 +(epoch: 41, iters: 189920, time: 0.545, data: 0.000) G_L1: 14.550 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 12.192 G_Regularizer: 0.000 validation_error: 21.313 +(epoch: 41, iters: 191920, time: 0.545, data: 0.000) G_L1: 13.311 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 11.104 G_Regularizer: 0.000 validation_error: 21.030 +(epoch: 41, iters: 193920, time: 0.538, data: 0.000) G_L1: 16.444 G_L1_ABSOLUTE: 2.292 G_L1_RELATIVE: 14.153 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 41, iters: 195920, time: 0.541, data: 0.000) G_L1: 13.255 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 10.877 G_Regularizer: 0.000 validation_error: 20.568 +(epoch: 41, iters: 197920, time: 0.544, data: 0.000) G_L1: 16.000 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 13.385 G_Regularizer: 0.000 validation_error: 20.832 +(epoch: 41, iters: 199920, time: 0.543, data: 0.001) G_L1: 12.830 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 10.194 G_Regularizer: 0.000 validation_error: 20.808 +(epoch: 41, iters: 201920, time: 0.537, data: 0.000) G_L1: 11.620 G_L1_ABSOLUTE: 2.116 G_L1_RELATIVE: 9.504 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 41, iters: 203920, time: 0.545, data: 0.000) G_L1: 14.841 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 12.074 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 41, iters: 205920, time: 0.538, data: 0.001) G_L1: 15.624 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 13.305 G_Regularizer: 0.000 validation_error: 21.363 +(epoch: 41, iters: 207920, time: 0.538, data: 0.000) G_L1: 16.028 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 13.542 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 41, iters: 209920, time: 0.543, data: 0.000) G_L1: 13.495 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 11.002 G_Regularizer: 0.000 validation_error: 21.293 +(epoch: 41, iters: 211920, time: 0.548, data: 0.000) G_L1: 15.630 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 13.162 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 41, iters: 213920, time: 0.536, data: 0.000) G_L1: 14.036 G_L1_ABSOLUTE: 2.114 G_L1_RELATIVE: 11.922 G_Regularizer: 0.000 validation_error: 20.402 +(epoch: 41, iters: 215920, time: 0.538, data: 0.001) G_L1: 14.125 G_L1_ABSOLUTE: 2.827 G_L1_RELATIVE: 11.298 G_Regularizer: 0.000 validation_error: 21.380 +(epoch: 41, iters: 217920, time: 0.537, data: 0.000) G_L1: 11.927 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 9.610 G_Regularizer: 0.000 validation_error: 20.697 +(epoch: 41, iters: 219920, time: 0.540, data: 0.000) G_L1: 14.071 G_L1_ABSOLUTE: 2.184 G_L1_RELATIVE: 11.887 G_Regularizer: 0.000 validation_error: 20.640 +(epoch: 41, iters: 221920, time: 0.547, data: 0.000) G_L1: 16.713 G_L1_ABSOLUTE: 3.001 G_L1_RELATIVE: 13.712 G_Regularizer: 0.000 validation_error: 20.948 +(epoch: 41, iters: 223920, time: 0.538, data: 0.000) G_L1: 14.712 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 11.956 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 41, iters: 225920, time: 0.539, data: 0.000) G_L1: 15.197 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 12.445 G_Regularizer: 0.000 validation_error: 21.130 +(epoch: 41, iters: 227920, time: 0.533, data: 0.000) G_L1: 13.016 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 10.642 G_Regularizer: 0.000 validation_error: 20.963 +(epoch: 41, iters: 229920, time: 0.535, data: 0.000) G_L1: 14.113 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 11.637 G_Regularizer: 0.000 validation_error: 21.294 +(epoch: 41, iters: 231920, time: 0.532, data: 0.000) G_L1: 12.419 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 9.824 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 41, iters: 233920, time: 0.547, data: 0.000) G_L1: 12.612 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 10.017 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 41, iters: 235920, time: 0.542, data: 0.000) G_L1: 12.278 G_L1_ABSOLUTE: 2.330 G_L1_RELATIVE: 9.948 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 41, iters: 237920, time: 0.548, data: 0.000) G_L1: 16.593 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 13.895 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 41, iters: 239920, time: 0.543, data: 0.000) G_L1: 15.663 G_L1_ABSOLUTE: 2.962 G_L1_RELATIVE: 12.701 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 41, iters: 241920, time: 0.541, data: 0.000) G_L1: 13.155 G_L1_ABSOLUTE: 2.092 G_L1_RELATIVE: 11.063 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 41, iters: 243920, time: 0.544, data: 0.000) G_L1: 17.743 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 14.999 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 41, iters: 245920, time: 0.540, data: 0.000) G_L1: 14.676 G_L1_ABSOLUTE: 3.065 G_L1_RELATIVE: 11.610 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 41, iters: 247920, time: 0.531, data: 0.000) G_L1: 15.601 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 13.135 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 41, iters: 249920, time: 0.535, data: 0.001) G_L1: 11.738 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 9.325 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 41, iters: 251920, time: 0.546, data: 0.000) G_L1: 13.309 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 10.906 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 41, iters: 253920, time: 0.547, data: 0.000) G_L1: 13.060 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 10.774 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 41, iters: 255920, time: 0.547, data: 0.000) G_L1: 12.577 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 10.313 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 41, iters: 257920, time: 0.538, data: 0.000) G_L1: 12.586 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 9.994 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 41, iters: 259920, time: 0.545, data: 0.000) G_L1: 14.196 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 11.606 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 41, iters: 261920, time: 0.546, data: 0.001) G_L1: 13.981 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 11.494 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 41, iters: 263920, time: 0.541, data: 0.000) G_L1: 15.267 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 13.050 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 41, iters: 265920, time: 0.535, data: 0.000) G_L1: 13.087 G_L1_ABSOLUTE: 2.185 G_L1_RELATIVE: 10.902 G_Regularizer: 0.000 validation_error: 20.737 +(epoch: 41, iters: 267920, time: 0.548, data: 0.000) G_L1: 15.162 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.652 G_Regularizer: 0.000 validation_error: 20.601 +(epoch: 41, iters: 269920, time: 0.545, data: 0.000) G_L1: 14.791 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 12.424 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 41, iters: 271920, time: 0.543, data: 0.000) G_L1: 14.261 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 11.773 G_Regularizer: 0.000 validation_error: 20.947 +(epoch: 41, iters: 273920, time: 0.540, data: 0.000) G_L1: 13.169 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 10.915 G_Regularizer: 0.000 validation_error: 20.943 +(epoch: 41, iters: 275920, time: 0.546, data: 0.000) G_L1: 15.720 G_L1_ABSOLUTE: 2.861 G_L1_RELATIVE: 12.859 G_Regularizer: 0.000 validation_error: 20.641 +(epoch: 41, iters: 277920, time: 0.544, data: 0.001) G_L1: 13.598 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 11.265 G_Regularizer: 0.000 validation_error: 21.320 +(epoch: 41, iters: 279920, time: 0.546, data: 0.000) G_L1: 13.220 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 10.738 G_Regularizer: 0.000 validation_error: 21.036 +(epoch: 41, iters: 281920, time: 0.553, data: 0.000) G_L1: 11.818 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 9.391 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 41, iters: 283920, time: 0.543, data: 0.000) G_L1: 13.965 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.529 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 41, iters: 285920, time: 0.543, data: 0.000) G_L1: 14.259 G_L1_ABSOLUTE: 2.816 G_L1_RELATIVE: 11.443 G_Regularizer: 0.000 validation_error: 20.210 +(epoch: 41, iters: 287920, time: 0.542, data: 0.001) G_L1: 15.088 G_L1_ABSOLUTE: 2.583 G_L1_RELATIVE: 12.505 G_Regularizer: 0.000 validation_error: 20.523 +(epoch: 41, iters: 289920, time: 0.538, data: 0.001) G_L1: 14.560 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 12.010 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 41, iters: 291920, time: 0.530, data: 0.000) G_L1: 12.680 G_L1_ABSOLUTE: 2.665 G_L1_RELATIVE: 10.015 G_Regularizer: 0.000 validation_error: 21.195 +(epoch: 41, iters: 293920, time: 0.529, data: 0.000) G_L1: 14.786 G_L1_ABSOLUTE: 2.143 G_L1_RELATIVE: 12.643 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 41, iters: 295920, time: 0.528, data: 0.000) G_L1: 12.972 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 10.342 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 41, iters: 297920, time: 0.538, data: 0.001) G_L1: 12.231 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 9.616 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 41, iters: 299920, time: 0.531, data: 0.001) G_L1: 15.068 G_L1_ABSOLUTE: 3.068 G_L1_RELATIVE: 12.000 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 41, iters: 301920, time: 0.538, data: 0.000) G_L1: 12.908 G_L1_ABSOLUTE: 2.222 G_L1_RELATIVE: 10.686 G_Regularizer: 0.000 validation_error: 21.135 +(epoch: 42, iters: 1168, time: 0.531, data: 0.000) G_L1: 14.898 G_L1_ABSOLUTE: 3.260 G_L1_RELATIVE: 11.639 G_Regularizer: 0.000 validation_error: 21.053 +(epoch: 42, iters: 3168, time: 0.531, data: 0.001) G_L1: 15.221 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 12.459 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 42, iters: 5168, time: 0.533, data: 0.001) G_L1: 13.587 G_L1_ABSOLUTE: 2.573 G_L1_RELATIVE: 11.015 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 42, iters: 7168, time: 0.531, data: 0.001) G_L1: 13.430 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 10.976 G_Regularizer: 0.000 validation_error: 20.492 +(epoch: 42, iters: 9168, time: 0.537, data: 0.000) G_L1: 13.689 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 11.277 G_Regularizer: 0.000 validation_error: 21.145 +(epoch: 42, iters: 11168, time: 0.531, data: 0.000) G_L1: 13.307 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 10.811 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 42, iters: 13168, time: 0.531, data: 0.000) G_L1: 14.809 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 12.135 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 42, iters: 15168, time: 0.544, data: 0.000) G_L1: 12.463 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 10.116 G_Regularizer: 0.000 validation_error: 21.055 +(epoch: 42, iters: 17168, time: 0.532, data: 0.001) G_L1: 16.653 G_L1_ABSOLUTE: 2.919 G_L1_RELATIVE: 13.734 G_Regularizer: 0.000 validation_error: 20.716 +(epoch: 42, iters: 19168, time: 0.531, data: 0.001) G_L1: 13.070 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.565 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 42, iters: 21168, time: 0.529, data: 0.000) G_L1: 14.784 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 11.991 G_Regularizer: 0.000 validation_error: 21.078 +(epoch: 42, iters: 23168, time: 0.527, data: 0.000) G_L1: 14.689 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 11.905 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 42, iters: 25168, time: 0.540, data: 0.000) G_L1: 15.421 G_L1_ABSOLUTE: 2.785 G_L1_RELATIVE: 12.636 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 42, iters: 27168, time: 0.533, data: 0.000) G_L1: 12.748 G_L1_ABSOLUTE: 2.213 G_L1_RELATIVE: 10.535 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 42, iters: 29168, time: 0.532, data: 0.000) G_L1: 12.323 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 9.861 G_Regularizer: 0.000 validation_error: 20.561 +(epoch: 42, iters: 31168, time: 0.541, data: 0.000) G_L1: 16.265 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 13.632 G_Regularizer: 0.000 validation_error: 20.547 +(epoch: 42, iters: 33168, time: 0.536, data: 0.000) G_L1: 15.132 G_L1_ABSOLUTE: 2.547 G_L1_RELATIVE: 12.585 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 42, iters: 35168, time: 0.535, data: 0.000) G_L1: 13.779 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 11.339 G_Regularizer: 0.000 validation_error: 21.196 +(epoch: 42, iters: 37168, time: 0.540, data: 0.001) G_L1: 14.525 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 11.879 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 42, iters: 39168, time: 0.526, data: 0.000) G_L1: 13.796 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 11.297 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 42, iters: 41168, time: 0.534, data: 0.000) G_L1: 14.898 G_L1_ABSOLUTE: 2.029 G_L1_RELATIVE: 12.869 G_Regularizer: 0.000 validation_error: 20.560 +(epoch: 42, iters: 43168, time: 0.532, data: 0.001) G_L1: 11.622 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 9.024 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 42, iters: 45168, time: 0.534, data: 0.000) G_L1: 12.213 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 10.050 G_Regularizer: 0.000 validation_error: 21.198 +(epoch: 42, iters: 47168, time: 0.529, data: 0.000) G_L1: 15.624 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 13.142 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 42, iters: 49168, time: 0.531, data: 0.000) G_L1: 16.454 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 14.216 G_Regularizer: 0.000 validation_error: 20.464 +(epoch: 42, iters: 51168, time: 0.537, data: 0.000) G_L1: 24.276 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 21.384 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 42, iters: 53168, time: 0.532, data: 0.000) G_L1: 15.067 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 12.371 G_Regularizer: 0.000 validation_error: 20.690 +(epoch: 42, iters: 55168, time: 0.536, data: 0.000) G_L1: 16.208 G_L1_ABSOLUTE: 2.264 G_L1_RELATIVE: 13.944 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 42, iters: 57168, time: 0.532, data: 0.000) G_L1: 14.603 G_L1_ABSOLUTE: 2.669 G_L1_RELATIVE: 11.933 G_Regularizer: 0.000 validation_error: 21.144 +(epoch: 42, iters: 59168, time: 0.531, data: 0.000) G_L1: 17.747 G_L1_ABSOLUTE: 2.259 G_L1_RELATIVE: 15.488 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 42, iters: 61168, time: 0.534, data: 0.001) G_L1: 12.610 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 10.232 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 42, iters: 63168, time: 0.531, data: 0.000) G_L1: 15.618 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 13.205 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 42, iters: 65168, time: 0.534, data: 0.000) G_L1: 13.746 G_L1_ABSOLUTE: 2.204 G_L1_RELATIVE: 11.542 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 42, iters: 67168, time: 0.533, data: 0.000) G_L1: 13.690 G_L1_ABSOLUTE: 2.490 G_L1_RELATIVE: 11.201 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 42, iters: 69168, time: 0.532, data: 0.000) G_L1: 14.227 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 11.764 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 42, iters: 71168, time: 0.530, data: 0.000) G_L1: 11.916 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 9.303 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 42, iters: 73168, time: 0.543, data: 0.000) G_L1: 14.582 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 11.962 G_Regularizer: 0.000 validation_error: 21.435 +(epoch: 42, iters: 75168, time: 0.535, data: 0.000) G_L1: 11.818 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 9.277 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 42, iters: 77168, time: 0.530, data: 0.000) G_L1: 13.539 G_L1_ABSOLUTE: 2.305 G_L1_RELATIVE: 11.234 G_Regularizer: 0.000 validation_error: 20.995 +(epoch: 42, iters: 79168, time: 0.533, data: 0.000) G_L1: 13.848 G_L1_ABSOLUTE: 2.305 G_L1_RELATIVE: 11.543 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 42, iters: 81168, time: 0.530, data: 0.000) G_L1: 13.983 G_L1_ABSOLUTE: 2.908 G_L1_RELATIVE: 11.074 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 42, iters: 83168, time: 0.531, data: 0.000) G_L1: 14.572 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 12.066 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 42, iters: 85168, time: 0.534, data: 0.000) G_L1: 12.953 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 10.591 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 42, iters: 87168, time: 0.537, data: 0.001) G_L1: 17.711 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 15.119 G_Regularizer: 0.000 validation_error: 20.995 +(epoch: 42, iters: 89168, time: 0.532, data: 0.000) G_L1: 13.802 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 11.192 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 42, iters: 91168, time: 0.535, data: 0.000) G_L1: 11.532 G_L1_ABSOLUTE: 2.116 G_L1_RELATIVE: 9.416 G_Regularizer: 0.000 validation_error: 21.163 +(epoch: 42, iters: 93168, time: 0.539, data: 0.000) G_L1: 15.567 G_L1_ABSOLUTE: 2.933 G_L1_RELATIVE: 12.634 G_Regularizer: 0.000 validation_error: 20.611 +(epoch: 42, iters: 95168, time: 0.531, data: 0.001) G_L1: 15.912 G_L1_ABSOLUTE: 2.703 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 42, iters: 97168, time: 0.535, data: 0.000) G_L1: 11.685 G_L1_ABSOLUTE: 2.273 G_L1_RELATIVE: 9.412 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 42, iters: 99168, time: 0.531, data: 0.000) G_L1: 15.493 G_L1_ABSOLUTE: 3.470 G_L1_RELATIVE: 12.023 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 42, iters: 101168, time: 0.528, data: 0.000) G_L1: 14.381 G_L1_ABSOLUTE: 2.266 G_L1_RELATIVE: 12.115 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 42, iters: 103168, time: 0.537, data: 0.000) G_L1: 11.017 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 8.752 G_Regularizer: 0.000 validation_error: 20.761 +(epoch: 42, iters: 105168, time: 0.532, data: 0.000) G_L1: 12.457 G_L1_ABSOLUTE: 2.651 G_L1_RELATIVE: 9.805 G_Regularizer: 0.000 validation_error: 21.221 +(epoch: 42, iters: 107168, time: 0.530, data: 0.000) G_L1: 13.843 G_L1_ABSOLUTE: 2.045 G_L1_RELATIVE: 11.798 G_Regularizer: 0.000 validation_error: 21.109 +(epoch: 42, iters: 109168, time: 0.529, data: 0.000) G_L1: 12.759 G_L1_ABSOLUTE: 2.396 G_L1_RELATIVE: 10.363 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 42, iters: 111168, time: 0.536, data: 0.001) G_L1: 13.539 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 11.035 G_Regularizer: 0.000 validation_error: 21.256 +(epoch: 42, iters: 113168, time: 0.543, data: 0.001) G_L1: 16.596 G_L1_ABSOLUTE: 2.692 G_L1_RELATIVE: 13.904 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 42, iters: 115168, time: 0.535, data: 0.000) G_L1: 13.268 G_L1_ABSOLUTE: 2.834 G_L1_RELATIVE: 10.434 G_Regularizer: 0.000 validation_error: 21.127 +(epoch: 42, iters: 117168, time: 0.534, data: 0.000) G_L1: 15.633 G_L1_ABSOLUTE: 2.717 G_L1_RELATIVE: 12.916 G_Regularizer: 0.000 validation_error: 21.236 +(epoch: 42, iters: 119168, time: 0.536, data: 0.000) G_L1: 14.495 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 11.794 G_Regularizer: 0.000 validation_error: 20.746 +(epoch: 42, iters: 121168, time: 0.539, data: 0.000) G_L1: 14.744 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 12.287 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 42, iters: 123168, time: 0.536, data: 0.000) G_L1: 13.618 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 11.363 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 42, iters: 125168, time: 0.537, data: 0.000) G_L1: 15.296 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 12.846 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 42, iters: 127168, time: 0.539, data: 0.000) G_L1: 14.226 G_L1_ABSOLUTE: 3.105 G_L1_RELATIVE: 11.121 G_Regularizer: 0.000 validation_error: 21.070 +(epoch: 42, iters: 129168, time: 0.535, data: 0.000) G_L1: 12.424 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 9.873 G_Regularizer: 0.000 validation_error: 21.070 +(epoch: 42, iters: 131168, time: 0.535, data: 0.000) G_L1: 15.546 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 13.347 G_Regularizer: 0.000 validation_error: 20.971 +(epoch: 42, iters: 133168, time: 0.534, data: 0.000) G_L1: 11.841 G_L1_ABSOLUTE: 2.184 G_L1_RELATIVE: 9.657 G_Regularizer: 0.000 validation_error: 20.994 +(epoch: 42, iters: 135168, time: 0.541, data: 0.000) G_L1: 14.553 G_L1_ABSOLUTE: 2.785 G_L1_RELATIVE: 11.768 G_Regularizer: 0.000 validation_error: 20.985 +(epoch: 42, iters: 137168, time: 0.532, data: 0.000) G_L1: 12.794 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 10.265 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 42, iters: 139168, time: 0.532, data: 0.000) G_L1: 12.897 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 10.282 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 42, iters: 141168, time: 0.551, data: 0.000) G_L1: 16.315 G_L1_ABSOLUTE: 2.897 G_L1_RELATIVE: 13.418 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 42, iters: 143168, time: 0.580, data: 0.000) G_L1: 14.513 G_L1_ABSOLUTE: 2.161 G_L1_RELATIVE: 12.353 G_Regularizer: 0.000 validation_error: 21.194 +(epoch: 42, iters: 145168, time: 0.595, data: 0.000) G_L1: 14.164 G_L1_ABSOLUTE: 2.879 G_L1_RELATIVE: 11.285 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 42, iters: 147168, time: 0.580, data: 0.000) G_L1: 13.181 G_L1_ABSOLUTE: 2.047 G_L1_RELATIVE: 11.133 G_Regularizer: 0.000 validation_error: 20.861 +(epoch: 42, iters: 149168, time: 0.551, data: 0.000) G_L1: 15.500 G_L1_ABSOLUTE: 2.612 G_L1_RELATIVE: 12.888 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 42, iters: 151168, time: 0.565, data: 0.000) G_L1: 13.768 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 11.250 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 42, iters: 153168, time: 0.589, data: 0.000) G_L1: 14.585 G_L1_ABSOLUTE: 2.925 G_L1_RELATIVE: 11.660 G_Regularizer: 0.000 validation_error: 21.302 +(epoch: 42, iters: 155168, time: 0.567, data: 0.000) G_L1: 14.202 G_L1_ABSOLUTE: 2.384 G_L1_RELATIVE: 11.818 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 42, iters: 157168, time: 0.581, data: 0.000) G_L1: 14.411 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 12.195 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 42, iters: 159168, time: 0.531, data: 0.000) G_L1: 14.604 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 11.974 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 42, iters: 161168, time: 0.527, data: 0.000) G_L1: 16.121 G_L1_ABSOLUTE: 2.871 G_L1_RELATIVE: 13.250 G_Regularizer: 0.000 validation_error: 20.929 +(epoch: 42, iters: 163168, time: 0.539, data: 0.000) G_L1: 15.832 G_L1_ABSOLUTE: 2.862 G_L1_RELATIVE: 12.969 G_Regularizer: 0.000 validation_error: 20.508 +(epoch: 42, iters: 165168, time: 0.534, data: 0.000) G_L1: 14.615 G_L1_ABSOLUTE: 2.085 G_L1_RELATIVE: 12.530 G_Regularizer: 0.000 validation_error: 21.125 +(epoch: 42, iters: 167168, time: 0.540, data: 0.000) G_L1: 13.989 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 11.546 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 42, iters: 169168, time: 0.533, data: 0.000) G_L1: 15.605 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 13.120 G_Regularizer: 0.000 validation_error: 20.446 +(epoch: 42, iters: 171168, time: 0.534, data: 0.000) G_L1: 13.519 G_L1_ABSOLUTE: 2.569 G_L1_RELATIVE: 10.950 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 42, iters: 173168, time: 0.536, data: 0.001) G_L1: 15.572 G_L1_ABSOLUTE: 2.829 G_L1_RELATIVE: 12.744 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 42, iters: 175168, time: 0.528, data: 0.000) G_L1: 12.501 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 10.073 G_Regularizer: 0.000 validation_error: 20.999 +(epoch: 42, iters: 177168, time: 0.539, data: 0.000) G_L1: 14.300 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 11.831 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 42, iters: 179168, time: 0.527, data: 0.001) G_L1: 14.476 G_L1_ABSOLUTE: 2.175 G_L1_RELATIVE: 12.301 G_Regularizer: 0.000 validation_error: 21.133 +(epoch: 42, iters: 181168, time: 0.539, data: 0.000) G_L1: 13.121 G_L1_ABSOLUTE: 2.039 G_L1_RELATIVE: 11.082 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 42, iters: 183168, time: 0.533, data: 0.001) G_L1: 12.156 G_L1_ABSOLUTE: 2.192 G_L1_RELATIVE: 9.964 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 42, iters: 185168, time: 0.537, data: 0.001) G_L1: 13.700 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 11.000 G_Regularizer: 0.000 validation_error: 21.193 +(epoch: 42, iters: 187168, time: 0.538, data: 0.000) G_L1: 13.698 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 11.115 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 42, iters: 189168, time: 0.535, data: 0.000) G_L1: 13.041 G_L1_ABSOLUTE: 2.527 G_L1_RELATIVE: 10.513 G_Regularizer: 0.000 validation_error: 21.148 +(epoch: 42, iters: 191168, time: 0.534, data: 0.000) G_L1: 14.259 G_L1_ABSOLUTE: 2.240 G_L1_RELATIVE: 12.018 G_Regularizer: 0.000 validation_error: 20.808 +(epoch: 42, iters: 193168, time: 0.530, data: 0.000) G_L1: 13.404 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 11.121 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 42, iters: 195168, time: 0.531, data: 0.000) G_L1: 15.108 G_L1_ABSOLUTE: 3.216 G_L1_RELATIVE: 11.892 G_Regularizer: 0.000 validation_error: 20.852 +(epoch: 42, iters: 197168, time: 0.532, data: 0.001) G_L1: 17.183 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 14.668 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 42, iters: 199168, time: 0.536, data: 0.000) G_L1: 13.602 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 10.945 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 42, iters: 201168, time: 0.531, data: 0.000) G_L1: 13.978 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 11.595 G_Regularizer: 0.000 validation_error: 20.651 +(epoch: 42, iters: 203168, time: 0.530, data: 0.000) G_L1: 17.911 G_L1_ABSOLUTE: 2.929 G_L1_RELATIVE: 14.982 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 42, iters: 205168, time: 0.533, data: 0.000) G_L1: 14.804 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 12.459 G_Regularizer: 0.000 validation_error: 21.128 +(epoch: 42, iters: 207168, time: 0.538, data: 0.000) G_L1: 13.170 G_L1_ABSOLUTE: 2.279 G_L1_RELATIVE: 10.891 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 42, iters: 209168, time: 0.537, data: 0.000) G_L1: 13.825 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 11.003 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 42, iters: 211168, time: 0.533, data: 0.000) G_L1: 15.966 G_L1_ABSOLUTE: 2.185 G_L1_RELATIVE: 13.781 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 42, iters: 213168, time: 0.531, data: 0.000) G_L1: 14.473 G_L1_ABSOLUTE: 3.027 G_L1_RELATIVE: 11.446 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 42, iters: 215168, time: 0.536, data: 0.001) G_L1: 14.682 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 12.360 G_Regularizer: 0.000 validation_error: 20.570 +(epoch: 42, iters: 217168, time: 0.531, data: 0.001) G_L1: 14.106 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 11.556 G_Regularizer: 0.000 validation_error: 21.158 +(epoch: 42, iters: 219168, time: 0.538, data: 0.001) G_L1: 13.832 G_L1_ABSOLUTE: 2.192 G_L1_RELATIVE: 11.641 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 42, iters: 221168, time: 0.539, data: 0.000) G_L1: 13.313 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 10.999 G_Regularizer: 0.000 validation_error: 20.904 +(epoch: 42, iters: 223168, time: 0.530, data: 0.001) G_L1: 15.922 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 13.308 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 42, iters: 225168, time: 0.536, data: 0.000) G_L1: 14.113 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 11.639 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 42, iters: 227168, time: 0.528, data: 0.001) G_L1: 12.425 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 9.958 G_Regularizer: 0.000 validation_error: 21.524 +(epoch: 42, iters: 229168, time: 0.542, data: 0.000) G_L1: 12.262 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 9.372 G_Regularizer: 0.000 validation_error: 21.311 +(epoch: 42, iters: 231168, time: 0.530, data: 0.000) G_L1: 13.269 G_L1_ABSOLUTE: 2.230 G_L1_RELATIVE: 11.039 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 42, iters: 233168, time: 0.542, data: 0.000) G_L1: 12.916 G_L1_ABSOLUTE: 1.891 G_L1_RELATIVE: 11.025 G_Regularizer: 0.000 validation_error: 20.719 +(epoch: 42, iters: 235168, time: 0.536, data: 0.000) G_L1: 11.145 G_L1_ABSOLUTE: 2.540 G_L1_RELATIVE: 8.606 G_Regularizer: 0.000 validation_error: 21.202 +(epoch: 42, iters: 237168, time: 0.539, data: 0.001) G_L1: 14.368 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 12.134 G_Regularizer: 0.000 validation_error: 21.124 +(epoch: 42, iters: 239168, time: 0.536, data: 0.000) G_L1: 18.056 G_L1_ABSOLUTE: 2.500 G_L1_RELATIVE: 15.556 G_Regularizer: 0.000 validation_error: 21.165 +(epoch: 42, iters: 241168, time: 0.534, data: 0.001) G_L1: 15.834 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 13.292 G_Regularizer: 0.000 validation_error: 20.954 +(epoch: 42, iters: 243168, time: 0.550, data: 0.000) G_L1: 12.678 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 10.538 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 42, iters: 245168, time: 0.553, data: 0.000) G_L1: 11.597 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 9.093 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 42, iters: 247168, time: 0.556, data: 0.000) G_L1: 13.666 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 10.999 G_Regularizer: 0.000 validation_error: 21.202 +(epoch: 42, iters: 249168, time: 0.536, data: 0.000) G_L1: 12.560 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 10.298 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 42, iters: 251168, time: 0.552, data: 0.000) G_L1: 17.271 G_L1_ABSOLUTE: 2.838 G_L1_RELATIVE: 14.433 G_Regularizer: 0.000 validation_error: 21.006 +(epoch: 42, iters: 253168, time: 0.555, data: 0.000) G_L1: 13.828 G_L1_ABSOLUTE: 2.237 G_L1_RELATIVE: 11.591 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 42, iters: 255168, time: 0.552, data: 0.000) G_L1: 14.657 G_L1_ABSOLUTE: 2.264 G_L1_RELATIVE: 12.393 G_Regularizer: 0.000 validation_error: 21.155 +(epoch: 42, iters: 257168, time: 0.558, data: 0.000) G_L1: 12.840 G_L1_ABSOLUTE: 2.229 G_L1_RELATIVE: 10.611 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 42, iters: 259168, time: 0.550, data: 0.000) G_L1: 14.897 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 12.239 G_Regularizer: 0.000 validation_error: 21.428 +(epoch: 42, iters: 261168, time: 0.547, data: 0.000) G_L1: 12.898 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 10.415 G_Regularizer: 0.000 validation_error: 21.192 +(epoch: 42, iters: 263168, time: 0.539, data: 0.000) G_L1: 17.170 G_L1_ABSOLUTE: 3.259 G_L1_RELATIVE: 13.911 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 42, iters: 265168, time: 0.540, data: 0.000) G_L1: 16.443 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 14.088 G_Regularizer: 0.000 validation_error: 21.057 +(epoch: 42, iters: 267168, time: 0.546, data: 0.000) G_L1: 12.907 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 10.230 G_Regularizer: 0.000 validation_error: 20.960 +(epoch: 42, iters: 269168, time: 0.547, data: 0.000) G_L1: 12.705 G_L1_ABSOLUTE: 2.260 G_L1_RELATIVE: 10.446 G_Regularizer: 0.000 validation_error: 20.736 +(epoch: 42, iters: 271168, time: 0.551, data: 0.000) G_L1: 15.396 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 12.819 G_Regularizer: 0.000 validation_error: 21.166 +(epoch: 42, iters: 273168, time: 0.545, data: 0.000) G_L1: 14.342 G_L1_ABSOLUTE: 2.358 G_L1_RELATIVE: 11.984 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 42, iters: 275168, time: 0.540, data: 0.000) G_L1: 14.749 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 12.092 G_Regularizer: 0.000 validation_error: 21.155 +(epoch: 42, iters: 277168, time: 0.547, data: 0.000) G_L1: 11.636 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 8.945 G_Regularizer: 0.000 validation_error: 20.805 +(epoch: 42, iters: 279168, time: 0.553, data: 0.000) G_L1: 12.489 G_L1_ABSOLUTE: 2.148 G_L1_RELATIVE: 10.341 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 42, iters: 281168, time: 0.555, data: 0.000) G_L1: 11.346 G_L1_ABSOLUTE: 2.217 G_L1_RELATIVE: 9.129 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 42, iters: 283168, time: 0.542, data: 0.000) G_L1: 13.733 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 11.498 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 42, iters: 285168, time: 0.534, data: 0.000) G_L1: 13.730 G_L1_ABSOLUTE: 2.563 G_L1_RELATIVE: 11.167 G_Regularizer: 0.000 validation_error: 20.671 +(epoch: 42, iters: 287168, time: 0.552, data: 0.000) G_L1: 12.243 G_L1_ABSOLUTE: 2.290 G_L1_RELATIVE: 9.954 G_Regularizer: 0.000 validation_error: 21.153 +(epoch: 42, iters: 289168, time: 0.561, data: 0.001) G_L1: 16.455 G_L1_ABSOLUTE: 3.102 G_L1_RELATIVE: 13.352 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 42, iters: 291168, time: 0.548, data: 0.000) G_L1: 14.505 G_L1_ABSOLUTE: 2.083 G_L1_RELATIVE: 12.422 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 42, iters: 293168, time: 0.548, data: 0.000) G_L1: 12.925 G_L1_ABSOLUTE: 1.966 G_L1_RELATIVE: 10.960 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 42, iters: 295168, time: 0.553, data: 0.000) G_L1: 14.089 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.488 G_Regularizer: 0.000 validation_error: 21.310 +(epoch: 42, iters: 297168, time: 0.550, data: 0.000) G_L1: 14.222 G_L1_ABSOLUTE: 2.442 G_L1_RELATIVE: 11.780 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 42, iters: 299168, time: 0.536, data: 0.000) G_L1: 16.529 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 13.937 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 42, iters: 301168, time: 0.521, data: 0.000) G_L1: 14.893 G_L1_ABSOLUTE: 3.190 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 43, iters: 416, time: 0.552, data: 0.000) G_L1: 15.790 G_L1_ABSOLUTE: 2.907 G_L1_RELATIVE: 12.883 G_Regularizer: 0.000 validation_error: 21.137 +(epoch: 43, iters: 2416, time: 0.550, data: 0.000) G_L1: 13.883 G_L1_ABSOLUTE: 2.260 G_L1_RELATIVE: 11.623 G_Regularizer: 0.000 validation_error: 21.040 +(epoch: 43, iters: 4416, time: 0.557, data: 0.000) G_L1: 14.368 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 11.691 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 43, iters: 6416, time: 0.536, data: 0.001) G_L1: 15.613 G_L1_ABSOLUTE: 2.744 G_L1_RELATIVE: 12.869 G_Regularizer: 0.000 validation_error: 21.251 +(epoch: 43, iters: 8416, time: 0.544, data: 0.000) G_L1: 12.993 G_L1_ABSOLUTE: 2.180 G_L1_RELATIVE: 10.812 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 43, iters: 10416, time: 0.552, data: 0.000) G_L1: 15.451 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 12.702 G_Regularizer: 0.000 validation_error: 21.103 +(epoch: 43, iters: 12416, time: 0.546, data: 0.001) G_L1: 13.129 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 10.612 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 43, iters: 14416, time: 0.557, data: 0.000) G_L1: 12.447 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 10.227 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 43, iters: 16416, time: 0.545, data: 0.000) G_L1: 14.548 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 12.040 G_Regularizer: 0.000 validation_error: 21.148 +(epoch: 43, iters: 18416, time: 0.551, data: 0.000) G_L1: 14.733 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 12.390 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 43, iters: 20416, time: 0.551, data: 0.000) G_L1: 12.578 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 10.025 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 43, iters: 22416, time: 0.560, data: 0.000) G_L1: 13.393 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 10.986 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 43, iters: 24416, time: 0.553, data: 0.000) G_L1: 14.071 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 11.610 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 43, iters: 26416, time: 0.551, data: 0.000) G_L1: 14.614 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 12.379 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 43, iters: 28416, time: 0.544, data: 0.000) G_L1: 11.688 G_L1_ABSOLUTE: 2.794 G_L1_RELATIVE: 8.894 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 43, iters: 30416, time: 0.554, data: 0.000) G_L1: 13.910 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 10.972 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 43, iters: 32416, time: 0.557, data: 0.000) G_L1: 11.773 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 9.252 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 43, iters: 34416, time: 0.549, data: 0.000) G_L1: 14.544 G_L1_ABSOLUTE: 2.487 G_L1_RELATIVE: 12.057 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 43, iters: 36416, time: 0.553, data: 0.000) G_L1: 13.890 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 11.605 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 43, iters: 38416, time: 0.556, data: 0.000) G_L1: 13.093 G_L1_ABSOLUTE: 2.136 G_L1_RELATIVE: 10.957 G_Regularizer: 0.000 validation_error: 20.991 +(epoch: 43, iters: 40416, time: 0.567, data: 0.001) G_L1: 10.620 G_L1_ABSOLUTE: 2.076 G_L1_RELATIVE: 8.545 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 43, iters: 42416, time: 0.548, data: 0.000) G_L1: 11.518 G_L1_ABSOLUTE: 2.117 G_L1_RELATIVE: 9.401 G_Regularizer: 0.000 validation_error: 20.633 +(epoch: 43, iters: 44416, time: 0.549, data: 0.000) G_L1: 15.369 G_L1_ABSOLUTE: 2.811 G_L1_RELATIVE: 12.558 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 43, iters: 46416, time: 0.553, data: 0.000) G_L1: 11.842 G_L1_ABSOLUTE: 2.211 G_L1_RELATIVE: 9.631 G_Regularizer: 0.000 validation_error: 21.389 +(epoch: 43, iters: 48416, time: 0.565, data: 0.000) G_L1: 19.041 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 16.469 G_Regularizer: 0.000 validation_error: 20.498 +(epoch: 43, iters: 50416, time: 0.552, data: 0.000) G_L1: 15.270 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 12.925 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 43, iters: 52416, time: 0.550, data: 0.000) G_L1: 13.026 G_L1_ABSOLUTE: 2.055 G_L1_RELATIVE: 10.971 G_Regularizer: 0.000 validation_error: 20.644 +(epoch: 43, iters: 54416, time: 0.551, data: 0.000) G_L1: 12.111 G_L1_ABSOLUTE: 2.397 G_L1_RELATIVE: 9.714 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 43, iters: 56416, time: 0.554, data: 0.000) G_L1: 12.857 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 10.603 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 43, iters: 58416, time: 0.549, data: 0.000) G_L1: 15.180 G_L1_ABSOLUTE: 2.244 G_L1_RELATIVE: 12.935 G_Regularizer: 0.000 validation_error: 21.345 +(epoch: 43, iters: 60416, time: 0.541, data: 0.000) G_L1: 13.603 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 11.254 G_Regularizer: 0.000 validation_error: 21.151 +(epoch: 43, iters: 62416, time: 0.541, data: 0.000) G_L1: 12.914 G_L1_ABSOLUTE: 2.824 G_L1_RELATIVE: 10.089 G_Regularizer: 0.000 validation_error: 21.068 +(epoch: 43, iters: 64416, time: 0.545, data: 0.000) G_L1: 14.504 G_L1_ABSOLUTE: 2.713 G_L1_RELATIVE: 11.791 G_Regularizer: 0.000 validation_error: 21.090 +(epoch: 43, iters: 66416, time: 0.537, data: 0.000) G_L1: 13.085 G_L1_ABSOLUTE: 2.727 G_L1_RELATIVE: 10.359 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 43, iters: 68416, time: 0.559, data: 0.000) G_L1: 15.026 G_L1_ABSOLUTE: 2.297 G_L1_RELATIVE: 12.729 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 43, iters: 70416, time: 0.563, data: 0.000) G_L1: 12.017 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 9.309 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 43, iters: 72416, time: 0.558, data: 0.000) G_L1: 16.321 G_L1_ABSOLUTE: 2.968 G_L1_RELATIVE: 13.353 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 43, iters: 74416, time: 0.554, data: 0.000) G_L1: 13.189 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 10.730 G_Regularizer: 0.000 validation_error: 21.219 +(epoch: 43, iters: 76416, time: 0.553, data: 0.000) G_L1: 13.080 G_L1_ABSOLUTE: 2.137 G_L1_RELATIVE: 10.943 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 43, iters: 78416, time: 0.561, data: 0.000) G_L1: 12.084 G_L1_ABSOLUTE: 2.191 G_L1_RELATIVE: 9.893 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 43, iters: 80416, time: 0.556, data: 0.000) G_L1: 13.273 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 43, iters: 82416, time: 0.544, data: 0.000) G_L1: 12.943 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 10.241 G_Regularizer: 0.000 validation_error: 20.673 +(epoch: 43, iters: 84416, time: 0.562, data: 0.000) G_L1: 16.056 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 13.171 G_Regularizer: 0.000 validation_error: 20.980 +(epoch: 43, iters: 86416, time: 0.539, data: 0.000) G_L1: 14.271 G_L1_ABSOLUTE: 2.492 G_L1_RELATIVE: 11.778 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 43, iters: 88416, time: 0.564, data: 0.000) G_L1: 12.287 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 9.835 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 43, iters: 90416, time: 0.552, data: 0.001) G_L1: 14.875 G_L1_ABSOLUTE: 2.335 G_L1_RELATIVE: 12.540 G_Regularizer: 0.000 validation_error: 20.921 +(epoch: 43, iters: 92416, time: 0.544, data: 0.000) G_L1: 13.125 G_L1_ABSOLUTE: 3.086 G_L1_RELATIVE: 10.040 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 43, iters: 94416, time: 0.559, data: 0.000) G_L1: 14.287 G_L1_ABSOLUTE: 2.791 G_L1_RELATIVE: 11.496 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 43, iters: 96416, time: 0.565, data: 0.001) G_L1: 14.310 G_L1_ABSOLUTE: 2.790 G_L1_RELATIVE: 11.520 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 43, iters: 98416, time: 0.569, data: 0.000) G_L1: 12.250 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 9.714 G_Regularizer: 0.000 validation_error: 21.041 +(epoch: 43, iters: 100416, time: 0.533, data: 0.000) G_L1: 16.172 G_L1_ABSOLUTE: 3.402 G_L1_RELATIVE: 12.770 G_Regularizer: 0.000 validation_error: 20.808 +(epoch: 43, iters: 102416, time: 0.556, data: 0.000) G_L1: 12.235 G_L1_ABSOLUTE: 2.153 G_L1_RELATIVE: 10.082 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 43, iters: 104416, time: 0.543, data: 0.000) G_L1: 14.404 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 11.713 G_Regularizer: 0.000 validation_error: 20.984 +(epoch: 43, iters: 106416, time: 0.543, data: 0.000) G_L1: 11.852 G_L1_ABSOLUTE: 2.326 G_L1_RELATIVE: 9.525 G_Regularizer: 0.000 validation_error: 20.929 +(epoch: 43, iters: 108416, time: 0.541, data: 0.000) G_L1: 14.888 G_L1_ABSOLUTE: 2.910 G_L1_RELATIVE: 11.977 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 43, iters: 110416, time: 0.550, data: 0.000) G_L1: 16.410 G_L1_ABSOLUTE: 3.048 G_L1_RELATIVE: 13.362 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 43, iters: 112416, time: 0.555, data: 0.000) G_L1: 14.724 G_L1_ABSOLUTE: 2.294 G_L1_RELATIVE: 12.431 G_Regularizer: 0.000 validation_error: 20.681 +(epoch: 43, iters: 114416, time: 0.555, data: 0.000) G_L1: 16.235 G_L1_ABSOLUTE: 2.478 G_L1_RELATIVE: 13.758 G_Regularizer: 0.000 validation_error: 20.458 +(epoch: 43, iters: 116416, time: 0.552, data: 0.000) G_L1: 13.759 G_L1_ABSOLUTE: 2.541 G_L1_RELATIVE: 11.218 G_Regularizer: 0.000 validation_error: 20.622 +(epoch: 43, iters: 118416, time: 0.558, data: 0.000) G_L1: 15.833 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 13.298 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 43, iters: 120416, time: 0.533, data: 0.000) G_L1: 13.304 G_L1_ABSOLUTE: 2.747 G_L1_RELATIVE: 10.557 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 43, iters: 122416, time: 0.537, data: 0.000) G_L1: 12.839 G_L1_ABSOLUTE: 2.341 G_L1_RELATIVE: 10.498 G_Regularizer: 0.000 validation_error: 21.108 +(epoch: 43, iters: 124416, time: 0.543, data: 0.001) G_L1: 16.667 G_L1_ABSOLUTE: 2.819 G_L1_RELATIVE: 13.849 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 43, iters: 126416, time: 0.557, data: 0.000) G_L1: 14.240 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 11.790 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 43, iters: 128416, time: 0.549, data: 0.000) G_L1: 13.758 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 11.460 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 43, iters: 130416, time: 0.563, data: 0.000) G_L1: 15.255 G_L1_ABSOLUTE: 2.949 G_L1_RELATIVE: 12.306 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 43, iters: 132416, time: 0.559, data: 0.000) G_L1: 13.908 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 11.451 G_Regularizer: 0.000 validation_error: 20.869 +(epoch: 43, iters: 134416, time: 0.540, data: 0.000) G_L1: 13.171 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 10.266 G_Regularizer: 0.000 validation_error: 21.120 +(epoch: 43, iters: 136416, time: 0.537, data: 0.000) G_L1: 14.653 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 12.028 G_Regularizer: 0.000 validation_error: 21.074 +(epoch: 43, iters: 138416, time: 0.561, data: 0.001) G_L1: 12.042 G_L1_ABSOLUTE: 3.019 G_L1_RELATIVE: 9.024 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 43, iters: 140416, time: 0.556, data: 0.000) G_L1: 13.130 G_L1_ABSOLUTE: 2.571 G_L1_RELATIVE: 10.559 G_Regularizer: 0.000 validation_error: 21.137 +(epoch: 43, iters: 142416, time: 0.540, data: 0.002) G_L1: 12.754 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 10.024 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 43, iters: 144416, time: 0.560, data: 0.000) G_L1: 15.613 G_L1_ABSOLUTE: 2.687 G_L1_RELATIVE: 12.926 G_Regularizer: 0.000 validation_error: 21.459 +(epoch: 43, iters: 146416, time: 0.553, data: 0.000) G_L1: 13.586 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 11.212 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 43, iters: 148416, time: 0.536, data: 0.000) G_L1: 14.672 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 12.328 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 43, iters: 150416, time: 0.544, data: 0.000) G_L1: 12.208 G_L1_ABSOLUTE: 2.268 G_L1_RELATIVE: 9.940 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 43, iters: 152416, time: 0.548, data: 0.000) G_L1: 13.934 G_L1_ABSOLUTE: 2.629 G_L1_RELATIVE: 11.305 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 43, iters: 154416, time: 0.556, data: 0.000) G_L1: 12.636 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 10.055 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 43, iters: 156416, time: 0.547, data: 0.001) G_L1: 14.918 G_L1_ABSOLUTE: 2.337 G_L1_RELATIVE: 12.581 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 43, iters: 158416, time: 0.536, data: 0.000) G_L1: 14.234 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 11.610 G_Regularizer: 0.000 validation_error: 20.819 +(epoch: 43, iters: 160416, time: 0.553, data: 0.000) G_L1: 16.868 G_L1_ABSOLUTE: 2.972 G_L1_RELATIVE: 13.897 G_Regularizer: 0.000 validation_error: 20.535 +(epoch: 43, iters: 162416, time: 0.565, data: 0.000) G_L1: 14.377 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 12.114 G_Regularizer: 0.000 validation_error: 20.753 +(epoch: 43, iters: 164416, time: 0.562, data: 0.000) G_L1: 12.621 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 10.246 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 43, iters: 166416, time: 0.559, data: 0.000) G_L1: 11.947 G_L1_ABSOLUTE: 2.235 G_L1_RELATIVE: 9.711 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 43, iters: 168416, time: 0.540, data: 0.000) G_L1: 15.267 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 12.883 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 43, iters: 170416, time: 0.543, data: 0.000) G_L1: 12.812 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 10.531 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 43, iters: 172416, time: 0.550, data: 0.000) G_L1: 12.279 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 9.858 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 43, iters: 174416, time: 0.556, data: 0.000) G_L1: 11.145 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 8.731 G_Regularizer: 0.000 validation_error: 21.222 +(epoch: 43, iters: 176416, time: 0.556, data: 0.000) G_L1: 16.240 G_L1_ABSOLUTE: 2.583 G_L1_RELATIVE: 13.658 G_Regularizer: 0.000 validation_error: 21.119 +(epoch: 43, iters: 178416, time: 0.555, data: 0.000) G_L1: 12.569 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 10.109 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 43, iters: 180416, time: 0.559, data: 0.001) G_L1: 13.105 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 10.695 G_Regularizer: 0.000 validation_error: 21.095 +(epoch: 43, iters: 182416, time: 0.556, data: 0.001) G_L1: 12.657 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 10.202 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 43, iters: 184416, time: 0.544, data: 0.000) G_L1: 14.547 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 12.021 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 43, iters: 186416, time: 0.558, data: 0.000) G_L1: 12.185 G_L1_ABSOLUTE: 2.096 G_L1_RELATIVE: 10.089 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 43, iters: 188416, time: 0.564, data: 0.000) G_L1: 15.334 G_L1_ABSOLUTE: 2.528 G_L1_RELATIVE: 12.806 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 43, iters: 190416, time: 0.564, data: 0.001) G_L1: 15.090 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 12.813 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 43, iters: 192416, time: 0.559, data: 0.000) G_L1: 11.732 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 9.449 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 43, iters: 194416, time: 0.555, data: 0.001) G_L1: 21.905 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 19.024 G_Regularizer: 0.000 validation_error: 20.628 +(epoch: 43, iters: 196416, time: 0.549, data: 0.000) G_L1: 16.290 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 13.640 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 43, iters: 198416, time: 0.540, data: 0.000) G_L1: 13.504 G_L1_ABSOLUTE: 2.438 G_L1_RELATIVE: 11.066 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 43, iters: 200416, time: 0.570, data: 0.000) G_L1: 15.594 G_L1_ABSOLUTE: 2.200 G_L1_RELATIVE: 13.394 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 43, iters: 202416, time: 0.545, data: 0.000) G_L1: 15.267 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 12.897 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 43, iters: 204416, time: 0.556, data: 0.000) G_L1: 14.213 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 11.933 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 43, iters: 206416, time: 0.562, data: 0.000) G_L1: 13.420 G_L1_ABSOLUTE: 2.453 G_L1_RELATIVE: 10.968 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 43, iters: 208416, time: 0.563, data: 0.000) G_L1: 13.414 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 10.723 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 43, iters: 210416, time: 0.541, data: 0.000) G_L1: 15.183 G_L1_ABSOLUTE: 2.092 G_L1_RELATIVE: 13.091 G_Regularizer: 0.000 validation_error: 21.076 +(epoch: 43, iters: 212416, time: 0.545, data: 0.000) G_L1: 11.589 G_L1_ABSOLUTE: 2.292 G_L1_RELATIVE: 9.297 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 43, iters: 214416, time: 0.553, data: 0.000) G_L1: 13.032 G_L1_ABSOLUTE: 2.138 G_L1_RELATIVE: 10.893 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 43, iters: 216416, time: 0.556, data: 0.000) G_L1: 16.056 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 13.539 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 43, iters: 218416, time: 0.549, data: 0.000) G_L1: 13.361 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 10.739 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 43, iters: 220416, time: 0.548, data: 0.000) G_L1: 13.177 G_L1_ABSOLUTE: 3.134 G_L1_RELATIVE: 10.043 G_Regularizer: 0.000 validation_error: 20.938 +(epoch: 43, iters: 222416, time: 0.565, data: 0.000) G_L1: 12.793 G_L1_ABSOLUTE: 2.032 G_L1_RELATIVE: 10.761 G_Regularizer: 0.000 validation_error: 20.805 +(epoch: 43, iters: 224416, time: 0.561, data: 0.000) G_L1: 13.973 G_L1_ABSOLUTE: 2.743 G_L1_RELATIVE: 11.230 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 43, iters: 226416, time: 0.540, data: 0.000) G_L1: 14.540 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 12.157 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 43, iters: 228416, time: 0.545, data: 0.000) G_L1: 13.696 G_L1_ABSOLUTE: 2.294 G_L1_RELATIVE: 11.402 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 43, iters: 230416, time: 0.555, data: 0.000) G_L1: 12.669 G_L1_ABSOLUTE: 2.303 G_L1_RELATIVE: 10.366 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 43, iters: 232416, time: 0.553, data: 0.000) G_L1: 14.723 G_L1_ABSOLUTE: 2.316 G_L1_RELATIVE: 12.407 G_Regularizer: 0.000 validation_error: 20.542 +(epoch: 43, iters: 234416, time: 0.562, data: 0.000) G_L1: 17.991 G_L1_ABSOLUTE: 2.915 G_L1_RELATIVE: 15.076 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 43, iters: 236416, time: 0.534, data: 0.000) G_L1: 15.218 G_L1_ABSOLUTE: 2.193 G_L1_RELATIVE: 13.026 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 43, iters: 238416, time: 0.559, data: 0.000) G_L1: 15.558 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 13.111 G_Regularizer: 0.000 validation_error: 21.423 +(epoch: 43, iters: 240416, time: 0.564, data: 0.000) G_L1: 13.123 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 10.865 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 43, iters: 242416, time: 0.553, data: 0.000) G_L1: 15.231 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 12.645 G_Regularizer: 0.000 validation_error: 21.022 +(epoch: 43, iters: 244416, time: 0.558, data: 0.000) G_L1: 16.928 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 14.446 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 43, iters: 246416, time: 0.562, data: 0.000) G_L1: 12.564 G_L1_ABSOLUTE: 2.142 G_L1_RELATIVE: 10.422 G_Regularizer: 0.000 validation_error: 20.485 +(epoch: 43, iters: 248416, time: 0.562, data: 0.000) G_L1: 14.410 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 11.797 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 43, iters: 250416, time: 0.550, data: 0.000) G_L1: 14.934 G_L1_ABSOLUTE: 2.980 G_L1_RELATIVE: 11.954 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 43, iters: 252416, time: 0.563, data: 0.000) G_L1: 14.875 G_L1_ABSOLUTE: 2.685 G_L1_RELATIVE: 12.190 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 43, iters: 254416, time: 0.535, data: 0.000) G_L1: 13.767 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 11.394 G_Regularizer: 0.000 validation_error: 20.729 +(epoch: 43, iters: 256416, time: 0.558, data: 0.000) G_L1: 11.073 G_L1_ABSOLUTE: 1.991 G_L1_RELATIVE: 9.082 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 43, iters: 258416, time: 0.553, data: 0.000) G_L1: 13.420 G_L1_ABSOLUTE: 2.234 G_L1_RELATIVE: 11.185 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 43, iters: 260416, time: 0.561, data: 0.000) G_L1: 15.477 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 12.745 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 43, iters: 262416, time: 0.544, data: 0.000) G_L1: 13.963 G_L1_ABSOLUTE: 2.443 G_L1_RELATIVE: 11.520 G_Regularizer: 0.000 validation_error: 20.848 +(epoch: 43, iters: 264416, time: 0.535, data: 0.000) G_L1: 13.710 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 11.199 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 43, iters: 266416, time: 0.557, data: 0.000) G_L1: 14.061 G_L1_ABSOLUTE: 2.742 G_L1_RELATIVE: 11.319 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 43, iters: 268416, time: 0.551, data: 0.000) G_L1: 14.393 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 11.994 G_Regularizer: 0.000 validation_error: 20.533 +(epoch: 43, iters: 270416, time: 0.548, data: 0.000) G_L1: 13.754 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 11.434 G_Regularizer: 0.000 validation_error: 20.410 +(epoch: 43, iters: 272416, time: 0.551, data: 0.000) G_L1: 16.454 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 13.954 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 43, iters: 274416, time: 0.549, data: 0.000) G_L1: 15.649 G_L1_ABSOLUTE: 2.878 G_L1_RELATIVE: 12.771 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 43, iters: 276416, time: 0.541, data: 0.000) G_L1: 14.685 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 12.030 G_Regularizer: 0.000 validation_error: 20.994 +(epoch: 43, iters: 278416, time: 0.533, data: 0.001) G_L1: 17.176 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 14.492 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 43, iters: 280416, time: 0.558, data: 0.000) G_L1: 13.515 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 10.710 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 43, iters: 282416, time: 0.557, data: 0.000) G_L1: 15.595 G_L1_ABSOLUTE: 2.313 G_L1_RELATIVE: 13.282 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 43, iters: 284416, time: 0.562, data: 0.000) G_L1: 15.134 G_L1_ABSOLUTE: 2.568 G_L1_RELATIVE: 12.566 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 43, iters: 286416, time: 0.544, data: 0.000) G_L1: 15.731 G_L1_ABSOLUTE: 2.597 G_L1_RELATIVE: 13.134 G_Regularizer: 0.000 validation_error: 20.452 +(epoch: 43, iters: 288416, time: 0.529, data: 0.000) G_L1: 16.617 G_L1_ABSOLUTE: 2.234 G_L1_RELATIVE: 14.383 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 43, iters: 290416, time: 0.561, data: 0.000) G_L1: 13.604 G_L1_ABSOLUTE: 3.127 G_L1_RELATIVE: 10.477 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 43, iters: 292416, time: 0.547, data: 0.000) G_L1: 14.011 G_L1_ABSOLUTE: 2.224 G_L1_RELATIVE: 11.786 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 43, iters: 294416, time: 0.542, data: 0.000) G_L1: 17.116 G_L1_ABSOLUTE: 2.988 G_L1_RELATIVE: 14.128 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 43, iters: 296416, time: 0.563, data: 0.000) G_L1: 14.414 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 12.026 G_Regularizer: 0.000 validation_error: 20.645 +(epoch: 43, iters: 298416, time: 0.553, data: 0.000) G_L1: 13.677 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 10.998 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 43, iters: 300416, time: 0.551, data: 0.000) G_L1: 13.808 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 11.305 G_Regularizer: 0.000 validation_error: 20.631 +(epoch: 43, iters: 302416, time: 0.545, data: 0.000) G_L1: 18.610 G_L1_ABSOLUTE: 2.981 G_L1_RELATIVE: 15.629 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 44, iters: 1664, time: 0.556, data: 0.000) G_L1: 12.910 G_L1_ABSOLUTE: 2.291 G_L1_RELATIVE: 10.619 G_Regularizer: 0.000 validation_error: 20.470 +(epoch: 44, iters: 3664, time: 0.543, data: 0.000) G_L1: 14.913 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 12.536 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 44, iters: 5664, time: 0.553, data: 0.000) G_L1: 11.004 G_L1_ABSOLUTE: 2.122 G_L1_RELATIVE: 8.883 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 44, iters: 7664, time: 0.560, data: 0.000) G_L1: 13.712 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 11.324 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 44, iters: 9664, time: 0.531, data: 0.000) G_L1: 13.678 G_L1_ABSOLUTE: 3.108 G_L1_RELATIVE: 10.570 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 44, iters: 11664, time: 0.563, data: 0.000) G_L1: 15.823 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 12.870 G_Regularizer: 0.000 validation_error: 20.853 +(epoch: 44, iters: 13664, time: 0.549, data: 0.000) G_L1: 13.809 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 11.176 G_Regularizer: 0.000 validation_error: 20.690 +(epoch: 44, iters: 15664, time: 0.559, data: 0.000) G_L1: 14.197 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 11.896 G_Regularizer: 0.000 validation_error: 20.705 +(epoch: 44, iters: 17664, time: 0.540, data: 0.000) G_L1: 13.534 G_L1_ABSOLUTE: 2.659 G_L1_RELATIVE: 10.875 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 44, iters: 19664, time: 0.556, data: 0.000) G_L1: 14.900 G_L1_ABSOLUTE: 2.473 G_L1_RELATIVE: 12.427 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 44, iters: 21664, time: 0.553, data: 0.000) G_L1: 13.066 G_L1_ABSOLUTE: 2.359 G_L1_RELATIVE: 10.707 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 44, iters: 23664, time: 0.560, data: 0.000) G_L1: 14.756 G_L1_ABSOLUTE: 2.604 G_L1_RELATIVE: 12.152 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 44, iters: 25664, time: 0.555, data: 0.000) G_L1: 16.557 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 13.979 G_Regularizer: 0.000 validation_error: 21.288 +(epoch: 44, iters: 27664, time: 0.550, data: 0.000) G_L1: 16.587 G_L1_ABSOLUTE: 2.599 G_L1_RELATIVE: 13.987 G_Regularizer: 0.000 validation_error: 20.775 +(epoch: 44, iters: 29664, time: 0.555, data: 0.000) G_L1: 14.444 G_L1_ABSOLUTE: 3.279 G_L1_RELATIVE: 11.165 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 44, iters: 31664, time: 0.556, data: 0.000) G_L1: 15.145 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 12.774 G_Regularizer: 0.000 validation_error: 21.114 +(epoch: 44, iters: 33664, time: 0.535, data: 0.000) G_L1: 14.363 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 11.992 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 44, iters: 35664, time: 0.553, data: 0.000) G_L1: 14.682 G_L1_ABSOLUTE: 2.288 G_L1_RELATIVE: 12.394 G_Regularizer: 0.000 validation_error: 20.416 +(epoch: 44, iters: 37664, time: 0.537, data: 0.000) G_L1: 13.645 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 11.365 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 44, iters: 39664, time: 0.539, data: 0.000) G_L1: 13.659 G_L1_ABSOLUTE: 2.466 G_L1_RELATIVE: 11.193 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 44, iters: 41664, time: 0.544, data: 0.000) G_L1: 13.728 G_L1_ABSOLUTE: 2.347 G_L1_RELATIVE: 11.382 G_Regularizer: 0.000 validation_error: 21.115 +(epoch: 44, iters: 43664, time: 0.534, data: 0.000) G_L1: 15.018 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 12.425 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 44, iters: 45664, time: 0.538, data: 0.000) G_L1: 13.819 G_L1_ABSOLUTE: 2.854 G_L1_RELATIVE: 10.965 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 44, iters: 47664, time: 0.557, data: 0.000) G_L1: 12.634 G_L1_ABSOLUTE: 2.184 G_L1_RELATIVE: 10.450 G_Regularizer: 0.000 validation_error: 21.017 +(epoch: 44, iters: 49664, time: 0.538, data: 0.000) G_L1: 15.692 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 13.246 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 44, iters: 51664, time: 0.539, data: 0.000) G_L1: 13.981 G_L1_ABSOLUTE: 3.000 G_L1_RELATIVE: 10.981 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 44, iters: 53664, time: 0.531, data: 0.000) G_L1: 12.315 G_L1_ABSOLUTE: 2.066 G_L1_RELATIVE: 10.249 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 44, iters: 55664, time: 0.563, data: 0.000) G_L1: 11.918 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 9.413 G_Regularizer: 0.000 validation_error: 20.765 +(epoch: 44, iters: 57664, time: 0.545, data: 0.000) G_L1: 13.255 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 10.718 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 44, iters: 59664, time: 0.531, data: 0.000) G_L1: 12.950 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 10.743 G_Regularizer: 0.000 validation_error: 20.465 +(epoch: 44, iters: 61664, time: 0.551, data: 0.000) G_L1: 15.299 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 12.575 G_Regularizer: 0.000 validation_error: 20.513 +(epoch: 44, iters: 63664, time: 0.556, data: 0.000) G_L1: 15.545 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 12.919 G_Regularizer: 0.000 validation_error: 20.824 +(epoch: 44, iters: 65664, time: 0.557, data: 0.000) G_L1: 11.876 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 9.504 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 44, iters: 67664, time: 0.539, data: 0.000) G_L1: 16.032 G_L1_ABSOLUTE: 2.653 G_L1_RELATIVE: 13.379 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 44, iters: 69664, time: 0.540, data: 0.000) G_L1: 14.275 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 11.904 G_Regularizer: 0.000 validation_error: 20.680 +(epoch: 44, iters: 71664, time: 0.551, data: 0.000) G_L1: 13.500 G_L1_ABSOLUTE: 2.878 G_L1_RELATIVE: 10.622 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 44, iters: 73664, time: 0.549, data: 0.000) G_L1: 14.199 G_L1_ABSOLUTE: 2.793 G_L1_RELATIVE: 11.406 G_Regularizer: 0.000 validation_error: 20.520 +(epoch: 44, iters: 75664, time: 0.525, data: 0.000) G_L1: 14.983 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 12.548 G_Regularizer: 0.000 validation_error: 21.081 +(epoch: 44, iters: 77664, time: 0.535, data: 0.000) G_L1: 13.512 G_L1_ABSOLUTE: 2.605 G_L1_RELATIVE: 10.907 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 44, iters: 79664, time: 0.550, data: 0.000) G_L1: 13.438 G_L1_ABSOLUTE: 2.100 G_L1_RELATIVE: 11.338 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 44, iters: 81664, time: 0.553, data: 0.000) G_L1: 14.166 G_L1_ABSOLUTE: 2.460 G_L1_RELATIVE: 11.706 G_Regularizer: 0.000 validation_error: 20.348 +(epoch: 44, iters: 83664, time: 0.538, data: 0.000) G_L1: 13.733 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 11.258 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 44, iters: 85664, time: 0.668, data: 0.000) G_L1: 14.807 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 12.045 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 44, iters: 87664, time: 0.680, data: 0.000) G_L1: 10.322 G_L1_ABSOLUTE: 2.159 G_L1_RELATIVE: 8.163 G_Regularizer: 0.000 validation_error: 21.139 +(epoch: 44, iters: 89664, time: 0.668, data: 0.000) G_L1: 11.637 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 9.077 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 44, iters: 91664, time: 0.650, data: 0.000) G_L1: 13.373 G_L1_ABSOLUTE: 2.381 G_L1_RELATIVE: 10.992 G_Regularizer: 0.000 validation_error: 20.794 +(epoch: 44, iters: 93664, time: 0.638, data: 0.000) G_L1: 15.732 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 12.951 G_Regularizer: 0.000 validation_error: 20.856 +(epoch: 44, iters: 95664, time: 0.658, data: 0.000) G_L1: 14.252 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 11.965 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 44, iters: 97664, time: 0.687, data: 0.000) G_L1: 12.953 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 10.416 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 44, iters: 99664, time: 0.688, data: 0.000) G_L1: 12.121 G_L1_ABSOLUTE: 2.465 G_L1_RELATIVE: 9.657 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 44, iters: 101664, time: 0.672, data: 0.000) G_L1: 12.312 G_L1_ABSOLUTE: 2.031 G_L1_RELATIVE: 10.281 G_Regularizer: 0.000 validation_error: 20.578 +(epoch: 44, iters: 103664, time: 0.643, data: 0.000) G_L1: 12.856 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 10.481 G_Regularizer: 0.000 validation_error: 20.701 +(epoch: 44, iters: 105664, time: 0.673, data: 0.000) G_L1: 16.290 G_L1_ABSOLUTE: 2.743 G_L1_RELATIVE: 13.548 G_Regularizer: 0.000 validation_error: 21.179 +(epoch: 44, iters: 107664, time: 0.679, data: 0.000) G_L1: 17.360 G_L1_ABSOLUTE: 2.903 G_L1_RELATIVE: 14.457 G_Regularizer: 0.000 validation_error: 20.733 +(epoch: 44, iters: 109664, time: 0.675, data: 0.000) G_L1: 13.873 G_L1_ABSOLUTE: 2.912 G_L1_RELATIVE: 10.962 G_Regularizer: 0.000 validation_error: 21.051 +(epoch: 44, iters: 111664, time: 0.663, data: 0.001) G_L1: 15.268 G_L1_ABSOLUTE: 2.633 G_L1_RELATIVE: 12.635 G_Regularizer: 0.000 validation_error: 20.677 +(epoch: 44, iters: 113664, time: 0.686, data: 0.000) G_L1: 14.383 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 11.790 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 44, iters: 115664, time: 0.671, data: 0.000) G_L1: 12.953 G_L1_ABSOLUTE: 2.065 G_L1_RELATIVE: 10.888 G_Regularizer: 0.000 validation_error: 20.799 +(epoch: 44, iters: 117664, time: 0.674, data: 0.000) G_L1: 15.540 G_L1_ABSOLUTE: 2.948 G_L1_RELATIVE: 12.592 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 44, iters: 119664, time: 0.565, data: 0.000) G_L1: 15.911 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 13.590 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 44, iters: 121664, time: 0.554, data: 0.000) G_L1: 18.023 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 15.288 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 44, iters: 123664, time: 0.531, data: 0.000) G_L1: 14.718 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 12.241 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 44, iters: 125664, time: 0.559, data: 0.000) G_L1: 12.288 G_L1_ABSOLUTE: 2.603 G_L1_RELATIVE: 9.684 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 44, iters: 127664, time: 0.553, data: 0.001) G_L1: 14.185 G_L1_ABSOLUTE: 2.243 G_L1_RELATIVE: 11.942 G_Regularizer: 0.000 validation_error: 20.274 +(epoch: 44, iters: 129664, time: 0.552, data: 0.001) G_L1: 12.771 G_L1_ABSOLUTE: 2.596 G_L1_RELATIVE: 10.175 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 44, iters: 131664, time: 0.539, data: 0.000) G_L1: 13.106 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 10.741 G_Regularizer: 0.000 validation_error: 21.270 +(epoch: 44, iters: 133664, time: 0.547, data: 0.000) G_L1: 14.906 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 12.476 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 44, iters: 135664, time: 0.543, data: 0.001) G_L1: 13.279 G_L1_ABSOLUTE: 2.483 G_L1_RELATIVE: 10.796 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 44, iters: 137664, time: 0.548, data: 0.000) G_L1: 15.095 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 12.658 G_Regularizer: 0.000 validation_error: 21.094 +(epoch: 44, iters: 139664, time: 0.555, data: 0.000) G_L1: 13.736 G_L1_ABSOLUTE: 2.825 G_L1_RELATIVE: 10.911 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 44, iters: 141664, time: 0.546, data: 0.000) G_L1: 12.980 G_L1_ABSOLUTE: 2.701 G_L1_RELATIVE: 10.279 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 44, iters: 143664, time: 0.553, data: 0.000) G_L1: 13.982 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 11.261 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 44, iters: 145664, time: 0.556, data: 0.000) G_L1: 12.555 G_L1_ABSOLUTE: 2.098 G_L1_RELATIVE: 10.457 G_Regularizer: 0.000 validation_error: 21.142 +(epoch: 44, iters: 147664, time: 0.557, data: 0.000) G_L1: 13.550 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 11.115 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 44, iters: 149664, time: 0.530, data: 0.001) G_L1: 15.160 G_L1_ABSOLUTE: 3.024 G_L1_RELATIVE: 12.135 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 44, iters: 151664, time: 0.557, data: 0.000) G_L1: 13.735 G_L1_ABSOLUTE: 2.411 G_L1_RELATIVE: 11.325 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 44, iters: 153664, time: 0.540, data: 0.000) G_L1: 11.744 G_L1_ABSOLUTE: 1.894 G_L1_RELATIVE: 9.850 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 44, iters: 155664, time: 0.539, data: 0.000) G_L1: 14.952 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 12.554 G_Regularizer: 0.000 validation_error: 21.082 +(epoch: 44, iters: 157664, time: 0.547, data: 0.001) G_L1: 14.598 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 12.161 G_Regularizer: 0.000 validation_error: 20.722 +(epoch: 44, iters: 159664, time: 0.549, data: 0.001) G_L1: 14.374 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 12.056 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 44, iters: 161664, time: 0.533, data: 0.000) G_L1: 14.563 G_L1_ABSOLUTE: 2.682 G_L1_RELATIVE: 11.881 G_Regularizer: 0.000 validation_error: 21.153 +(epoch: 44, iters: 163664, time: 0.550, data: 0.002) G_L1: 14.036 G_L1_ABSOLUTE: 2.142 G_L1_RELATIVE: 11.895 G_Regularizer: 0.000 validation_error: 20.697 +(epoch: 44, iters: 165664, time: 0.539, data: 0.001) G_L1: 14.653 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 11.886 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 44, iters: 167664, time: 0.530, data: 0.000) G_L1: 24.341 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 21.668 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 44, iters: 169664, time: 0.532, data: 0.000) G_L1: 14.539 G_L1_ABSOLUTE: 2.498 G_L1_RELATIVE: 12.041 G_Regularizer: 0.000 validation_error: 21.235 +(epoch: 44, iters: 171664, time: 0.539, data: 0.000) G_L1: 12.021 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 9.668 G_Regularizer: 0.000 validation_error: 20.547 +(epoch: 44, iters: 173664, time: 0.545, data: 0.000) G_L1: 13.551 G_L1_ABSOLUTE: 2.697 G_L1_RELATIVE: 10.854 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 44, iters: 175664, time: 0.548, data: 0.000) G_L1: 13.379 G_L1_ABSOLUTE: 2.390 G_L1_RELATIVE: 10.989 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 44, iters: 177664, time: 0.545, data: 0.000) G_L1: 12.556 G_L1_ABSOLUTE: 2.232 G_L1_RELATIVE: 10.323 G_Regularizer: 0.000 validation_error: 21.121 +(epoch: 44, iters: 179664, time: 0.538, data: 0.000) G_L1: 12.779 G_L1_ABSOLUTE: 2.558 G_L1_RELATIVE: 10.221 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 44, iters: 181664, time: 0.525, data: 0.001) G_L1: 14.219 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 11.704 G_Regularizer: 0.000 validation_error: 21.173 +(epoch: 44, iters: 183664, time: 0.549, data: 0.000) G_L1: 15.731 G_L1_ABSOLUTE: 2.722 G_L1_RELATIVE: 13.009 G_Regularizer: 0.000 validation_error: 21.138 +(epoch: 44, iters: 185664, time: 0.558, data: 0.000) G_L1: 15.280 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 12.710 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 44, iters: 187664, time: 0.559, data: 0.000) G_L1: 14.145 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 11.423 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 44, iters: 189664, time: 0.552, data: 0.000) G_L1: 15.451 G_L1_ABSOLUTE: 2.625 G_L1_RELATIVE: 12.826 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 44, iters: 191664, time: 0.547, data: 0.000) G_L1: 15.158 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 12.357 G_Regularizer: 0.000 validation_error: 20.566 +(epoch: 44, iters: 193664, time: 0.559, data: 0.000) G_L1: 14.323 G_L1_ABSOLUTE: 2.616 G_L1_RELATIVE: 11.706 G_Regularizer: 0.000 validation_error: 20.992 +(epoch: 44, iters: 195664, time: 0.555, data: 0.000) G_L1: 13.411 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 11.002 G_Regularizer: 0.000 validation_error: 20.619 +(epoch: 44, iters: 197664, time: 0.562, data: 0.000) G_L1: 13.375 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 10.861 G_Regularizer: 0.000 validation_error: 21.196 +(epoch: 44, iters: 199664, time: 0.545, data: 0.000) G_L1: 13.807 G_L1_ABSOLUTE: 2.677 G_L1_RELATIVE: 11.130 G_Regularizer: 0.000 validation_error: 21.006 +(epoch: 44, iters: 201664, time: 0.546, data: 0.000) G_L1: 15.030 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 12.510 G_Regularizer: 0.000 validation_error: 20.689 +(epoch: 44, iters: 203664, time: 0.531, data: 0.000) G_L1: 12.952 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 10.456 G_Regularizer: 0.000 validation_error: 21.170 +(epoch: 44, iters: 205664, time: 0.546, data: 0.000) G_L1: 13.989 G_L1_ABSOLUTE: 2.260 G_L1_RELATIVE: 11.729 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 44, iters: 207664, time: 0.538, data: 0.000) G_L1: 14.157 G_L1_ABSOLUTE: 2.654 G_L1_RELATIVE: 11.503 G_Regularizer: 0.000 validation_error: 20.822 +(epoch: 44, iters: 209664, time: 0.549, data: 0.000) G_L1: 13.158 G_L1_ABSOLUTE: 2.278 G_L1_RELATIVE: 10.880 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 44, iters: 211664, time: 0.525, data: 0.000) G_L1: 14.107 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 11.702 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 44, iters: 213664, time: 0.542, data: 0.001) G_L1: 12.167 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 9.881 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 44, iters: 215664, time: 0.550, data: 0.000) G_L1: 15.549 G_L1_ABSOLUTE: 2.455 G_L1_RELATIVE: 13.094 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 44, iters: 217664, time: 0.538, data: 0.000) G_L1: 12.672 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 10.315 G_Regularizer: 0.000 validation_error: 21.268 +(epoch: 44, iters: 219664, time: 0.553, data: 0.000) G_L1: 13.563 G_L1_ABSOLUTE: 2.775 G_L1_RELATIVE: 10.788 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 44, iters: 221664, time: 0.552, data: 0.000) G_L1: 13.774 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 11.386 G_Regularizer: 0.000 validation_error: 21.132 +(epoch: 44, iters: 223664, time: 0.553, data: 0.000) G_L1: 12.195 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 9.672 G_Regularizer: 0.000 validation_error: 20.629 +(epoch: 44, iters: 225664, time: 0.552, data: 0.000) G_L1: 14.490 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 11.967 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 44, iters: 227664, time: 0.554, data: 0.000) G_L1: 13.406 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 11.121 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 44, iters: 229664, time: 0.553, data: 0.000) G_L1: 14.638 G_L1_ABSOLUTE: 2.662 G_L1_RELATIVE: 11.977 G_Regularizer: 0.000 validation_error: 21.064 +(epoch: 44, iters: 231664, time: 0.532, data: 0.000) G_L1: 13.355 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 11.053 G_Regularizer: 0.000 validation_error: 20.694 +(epoch: 44, iters: 233664, time: 0.551, data: 0.000) G_L1: 12.180 G_L1_ABSOLUTE: 1.990 G_L1_RELATIVE: 10.190 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 44, iters: 235664, time: 0.548, data: 0.000) G_L1: 14.377 G_L1_ABSOLUTE: 2.772 G_L1_RELATIVE: 11.605 G_Regularizer: 0.000 validation_error: 20.683 +(epoch: 44, iters: 237664, time: 0.539, data: 0.000) G_L1: 16.523 G_L1_ABSOLUTE: 2.152 G_L1_RELATIVE: 14.371 G_Regularizer: 0.000 validation_error: 20.938 +(epoch: 44, iters: 239664, time: 0.558, data: 0.000) G_L1: 16.653 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 14.004 G_Regularizer: 0.000 validation_error: 21.054 +(epoch: 44, iters: 241664, time: 0.558, data: 0.000) G_L1: 11.480 G_L1_ABSOLUTE: 2.412 G_L1_RELATIVE: 9.068 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 44, iters: 243664, time: 0.532, data: 0.000) G_L1: 20.372 G_L1_ABSOLUTE: 2.788 G_L1_RELATIVE: 17.583 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 44, iters: 245664, time: 0.533, data: 0.000) G_L1: 11.691 G_L1_ABSOLUTE: 2.137 G_L1_RELATIVE: 9.554 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 44, iters: 247664, time: 0.551, data: 0.000) G_L1: 12.576 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.071 G_Regularizer: 0.000 validation_error: 20.704 +(epoch: 44, iters: 249664, time: 0.559, data: 0.000) G_L1: 13.963 G_L1_ABSOLUTE: 2.514 G_L1_RELATIVE: 11.449 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 44, iters: 251664, time: 0.545, data: 0.001) G_L1: 11.396 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 8.967 G_Regularizer: 0.000 validation_error: 20.606 +(epoch: 44, iters: 253664, time: 0.538, data: 0.000) G_L1: 13.734 G_L1_ABSOLUTE: 2.646 G_L1_RELATIVE: 11.088 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 44, iters: 255664, time: 0.557, data: 0.000) G_L1: 17.421 G_L1_ABSOLUTE: 2.253 G_L1_RELATIVE: 15.169 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 44, iters: 257664, time: 0.561, data: 0.000) G_L1: 13.732 G_L1_ABSOLUTE: 2.471 G_L1_RELATIVE: 11.261 G_Regularizer: 0.000 validation_error: 20.874 +(epoch: 44, iters: 259664, time: 0.554, data: 0.000) G_L1: 12.591 G_L1_ABSOLUTE: 2.148 G_L1_RELATIVE: 10.443 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 44, iters: 261664, time: 0.543, data: 0.000) G_L1: 12.428 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 10.170 G_Regularizer: 0.000 validation_error: 20.460 +(epoch: 44, iters: 263664, time: 0.534, data: 0.000) G_L1: 14.386 G_L1_ABSOLUTE: 2.515 G_L1_RELATIVE: 11.871 G_Regularizer: 0.000 validation_error: 21.080 +(epoch: 44, iters: 265664, time: 0.552, data: 0.001) G_L1: 10.812 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 8.326 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 44, iters: 267664, time: 0.539, data: 0.000) G_L1: 13.236 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 10.974 G_Regularizer: 0.000 validation_error: 20.567 +(epoch: 44, iters: 269664, time: 0.536, data: 0.000) G_L1: 14.057 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 11.610 G_Regularizer: 0.000 validation_error: 21.106 +(epoch: 44, iters: 271664, time: 0.554, data: 0.000) G_L1: 14.849 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 12.651 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 44, iters: 273664, time: 0.562, data: 0.001) G_L1: 14.149 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 11.579 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 44, iters: 275664, time: 0.557, data: 0.000) G_L1: 13.989 G_L1_ABSOLUTE: 2.746 G_L1_RELATIVE: 11.243 G_Regularizer: 0.000 validation_error: 21.130 +(epoch: 44, iters: 277664, time: 0.539, data: 0.000) G_L1: 14.709 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 12.161 G_Regularizer: 0.000 validation_error: 20.728 +(epoch: 44, iters: 279664, time: 0.550, data: 0.000) G_L1: 14.411 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 11.820 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 44, iters: 281664, time: 0.551, data: 0.001) G_L1: 14.643 G_L1_ABSOLUTE: 2.032 G_L1_RELATIVE: 12.611 G_Regularizer: 0.000 validation_error: 20.769 +(epoch: 44, iters: 283664, time: 0.540, data: 0.000) G_L1: 15.154 G_L1_ABSOLUTE: 2.661 G_L1_RELATIVE: 12.494 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 44, iters: 285664, time: 0.542, data: 0.000) G_L1: 15.561 G_L1_ABSOLUTE: 3.076 G_L1_RELATIVE: 12.484 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 44, iters: 287664, time: 0.531, data: 0.000) G_L1: 14.634 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 12.165 G_Regularizer: 0.000 validation_error: 20.815 +(epoch: 44, iters: 289664, time: 0.536, data: 0.000) G_L1: 16.457 G_L1_ABSOLUTE: 3.252 G_L1_RELATIVE: 13.206 G_Regularizer: 0.000 validation_error: 20.778 +(epoch: 44, iters: 291664, time: 0.550, data: 0.000) G_L1: 14.604 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 12.248 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 44, iters: 293664, time: 0.538, data: 0.000) G_L1: 16.174 G_L1_ABSOLUTE: 2.493 G_L1_RELATIVE: 13.681 G_Regularizer: 0.000 validation_error: 21.114 +(epoch: 44, iters: 295664, time: 0.527, data: 0.000) G_L1: 13.417 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 10.983 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 44, iters: 297664, time: 0.544, data: 0.000) G_L1: 11.399 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 8.942 G_Regularizer: 0.000 validation_error: 20.991 +(epoch: 44, iters: 299664, time: 0.535, data: 0.000) G_L1: 15.657 G_L1_ABSOLUTE: 2.371 G_L1_RELATIVE: 13.287 G_Regularizer: 0.000 validation_error: 20.648 +(epoch: 44, iters: 301664, time: 0.538, data: 0.000) G_L1: 15.529 G_L1_ABSOLUTE: 2.408 G_L1_RELATIVE: 13.121 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 45, iters: 912, time: 0.531, data: 0.000) G_L1: 12.325 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 10.051 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 45, iters: 2912, time: 0.556, data: 0.000) G_L1: 11.568 G_L1_ABSOLUTE: 2.163 G_L1_RELATIVE: 9.405 G_Regularizer: 0.000 validation_error: 20.577 +(epoch: 45, iters: 4912, time: 0.530, data: 0.000) G_L1: 14.774 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 12.420 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 45, iters: 6912, time: 0.549, data: 0.000) G_L1: 19.406 G_L1_ABSOLUTE: 2.531 G_L1_RELATIVE: 16.875 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 45, iters: 8912, time: 0.554, data: 0.000) G_L1: 13.827 G_L1_ABSOLUTE: 1.972 G_L1_RELATIVE: 11.855 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 45, iters: 10912, time: 0.554, data: 0.000) G_L1: 13.889 G_L1_ABSOLUTE: 2.963 G_L1_RELATIVE: 10.925 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 45, iters: 12912, time: 0.546, data: 0.000) G_L1: 13.482 G_L1_ABSOLUTE: 2.667 G_L1_RELATIVE: 10.815 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 45, iters: 14912, time: 0.561, data: 0.000) G_L1: 13.473 G_L1_ABSOLUTE: 2.786 G_L1_RELATIVE: 10.687 G_Regularizer: 0.000 validation_error: 20.685 +(epoch: 45, iters: 16912, time: 0.554, data: 0.000) G_L1: 13.540 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 10.992 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 45, iters: 18912, time: 0.560, data: 0.000) G_L1: 14.248 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.827 G_Regularizer: 0.000 validation_error: 20.614 +(epoch: 45, iters: 20912, time: 0.554, data: 0.000) G_L1: 13.981 G_L1_ABSOLUTE: 2.785 G_L1_RELATIVE: 11.196 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 45, iters: 22912, time: 0.541, data: 0.000) G_L1: 11.274 G_L1_ABSOLUTE: 2.188 G_L1_RELATIVE: 9.085 G_Regularizer: 0.000 validation_error: 20.856 +(epoch: 45, iters: 24912, time: 0.545, data: 0.000) G_L1: 14.602 G_L1_ABSOLUTE: 2.906 G_L1_RELATIVE: 11.696 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 45, iters: 26912, time: 0.544, data: 0.000) G_L1: 12.710 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 10.122 G_Regularizer: 0.000 validation_error: 20.753 +(epoch: 45, iters: 28912, time: 0.549, data: 0.000) G_L1: 13.804 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 11.298 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 45, iters: 30912, time: 0.543, data: 0.001) G_L1: 13.549 G_L1_ABSOLUTE: 2.305 G_L1_RELATIVE: 11.245 G_Regularizer: 0.000 validation_error: 20.798 +(epoch: 45, iters: 32912, time: 0.547, data: 0.000) G_L1: 13.908 G_L1_ABSOLUTE: 2.382 G_L1_RELATIVE: 11.527 G_Regularizer: 0.000 validation_error: 20.593 +(epoch: 45, iters: 34912, time: 0.544, data: 0.000) G_L1: 15.413 G_L1_ABSOLUTE: 2.145 G_L1_RELATIVE: 13.268 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 45, iters: 36912, time: 0.555, data: 0.000) G_L1: 12.771 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 10.387 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 45, iters: 38912, time: 0.555, data: 0.000) G_L1: 12.948 G_L1_ABSOLUTE: 2.337 G_L1_RELATIVE: 10.611 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 45, iters: 40912, time: 0.547, data: 0.000) G_L1: 12.964 G_L1_ABSOLUTE: 2.937 G_L1_RELATIVE: 10.027 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 45, iters: 42912, time: 0.546, data: 0.000) G_L1: 16.626 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 14.220 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 45, iters: 44912, time: 0.553, data: 0.000) G_L1: 12.110 G_L1_ABSOLUTE: 2.104 G_L1_RELATIVE: 10.006 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 45, iters: 46912, time: 0.555, data: 0.000) G_L1: 12.978 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 10.578 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 45, iters: 48912, time: 0.561, data: 0.000) G_L1: 16.011 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 13.244 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 45, iters: 50912, time: 0.533, data: 0.000) G_L1: 12.741 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 10.244 G_Regularizer: 0.000 validation_error: 20.859 +(epoch: 45, iters: 52912, time: 0.561, data: 0.000) G_L1: 14.835 G_L1_ABSOLUTE: 2.635 G_L1_RELATIVE: 12.200 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 45, iters: 54912, time: 0.552, data: 0.000) G_L1: 14.650 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 12.176 G_Regularizer: 0.000 validation_error: 20.682 +(epoch: 45, iters: 56912, time: 0.560, data: 0.000) G_L1: 11.943 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 9.626 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 45, iters: 58912, time: 0.551, data: 0.000) G_L1: 16.132 G_L1_ABSOLUTE: 2.781 G_L1_RELATIVE: 13.352 G_Regularizer: 0.000 validation_error: 20.994 +(epoch: 45, iters: 60912, time: 0.548, data: 0.000) G_L1: 11.620 G_L1_ABSOLUTE: 2.331 G_L1_RELATIVE: 9.289 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 45, iters: 62912, time: 0.558, data: 0.000) G_L1: 15.430 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 13.292 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 45, iters: 64912, time: 0.555, data: 0.000) G_L1: 14.492 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 12.018 G_Regularizer: 0.000 validation_error: 20.725 +(epoch: 45, iters: 66912, time: 0.555, data: 0.000) G_L1: 12.844 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 10.188 G_Regularizer: 0.000 validation_error: 20.675 +(epoch: 45, iters: 68912, time: 0.530, data: 0.000) G_L1: 14.981 G_L1_ABSOLUTE: 2.326 G_L1_RELATIVE: 12.654 G_Regularizer: 0.000 validation_error: 21.107 +(epoch: 45, iters: 70912, time: 0.548, data: 0.000) G_L1: 14.608 G_L1_ABSOLUTE: 2.257 G_L1_RELATIVE: 12.351 G_Regularizer: 0.000 validation_error: 20.592 +(epoch: 45, iters: 72912, time: 0.559, data: 0.000) G_L1: 15.087 G_L1_ABSOLUTE: 3.155 G_L1_RELATIVE: 11.932 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 45, iters: 74912, time: 0.532, data: 0.000) G_L1: 14.655 G_L1_ABSOLUTE: 2.620 G_L1_RELATIVE: 12.035 G_Regularizer: 0.000 validation_error: 20.534 +(epoch: 45, iters: 76912, time: 0.535, data: 0.001) G_L1: 13.630 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 20.680 +(epoch: 45, iters: 78912, time: 0.558, data: 0.000) G_L1: 12.824 G_L1_ABSOLUTE: 2.123 G_L1_RELATIVE: 10.701 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 45, iters: 80912, time: 0.551, data: 0.000) G_L1: 12.364 G_L1_ABSOLUTE: 2.199 G_L1_RELATIVE: 10.165 G_Regularizer: 0.000 validation_error: 20.617 +(epoch: 45, iters: 82912, time: 0.544, data: 0.001) G_L1: 15.620 G_L1_ABSOLUTE: 2.698 G_L1_RELATIVE: 12.922 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 45, iters: 84912, time: 0.537, data: 0.000) G_L1: 13.472 G_L1_ABSOLUTE: 2.099 G_L1_RELATIVE: 11.373 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 45, iters: 86912, time: 0.561, data: 0.000) G_L1: 12.495 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 10.211 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 45, iters: 88912, time: 0.559, data: 0.001) G_L1: 13.591 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 11.214 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 45, iters: 90912, time: 0.547, data: 0.000) G_L1: 13.515 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 10.831 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 45, iters: 92912, time: 0.548, data: 0.000) G_L1: 15.287 G_L1_ABSOLUTE: 2.778 G_L1_RELATIVE: 12.509 G_Regularizer: 0.000 validation_error: 20.913 +(epoch: 45, iters: 94912, time: 0.530, data: 0.000) G_L1: 12.667 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 10.045 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 45, iters: 96912, time: 0.533, data: 0.000) G_L1: 14.379 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 11.737 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 45, iters: 98912, time: 0.551, data: 0.000) G_L1: 13.414 G_L1_ABSOLUTE: 2.805 G_L1_RELATIVE: 10.609 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 45, iters: 100912, time: 0.553, data: 0.000) G_L1: 12.269 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 9.863 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 45, iters: 102912, time: 0.542, data: 0.000) G_L1: 15.669 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 13.210 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 45, iters: 104912, time: 0.558, data: 0.000) G_L1: 13.533 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 10.784 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 45, iters: 106912, time: 0.553, data: 0.000) G_L1: 14.889 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.487 G_Regularizer: 0.000 validation_error: 20.472 +(epoch: 45, iters: 108912, time: 0.539, data: 0.000) G_L1: 13.449 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 10.761 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 45, iters: 110912, time: 0.547, data: 0.000) G_L1: 14.344 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 11.970 G_Regularizer: 0.000 validation_error: 20.590 +(epoch: 45, iters: 112912, time: 0.558, data: 0.000) G_L1: 13.522 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 11.109 G_Regularizer: 0.000 validation_error: 20.691 +(epoch: 45, iters: 114912, time: 0.559, data: 0.000) G_L1: 12.916 G_L1_ABSOLUTE: 2.146 G_L1_RELATIVE: 10.771 G_Regularizer: 0.000 validation_error: 21.095 +(epoch: 45, iters: 116912, time: 0.536, data: 0.000) G_L1: 13.311 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 10.638 G_Regularizer: 0.000 validation_error: 20.857 +(epoch: 45, iters: 118912, time: 0.548, data: 0.000) G_L1: 12.679 G_L1_ABSOLUTE: 2.025 G_L1_RELATIVE: 10.654 G_Regularizer: 0.000 validation_error: 20.635 +(epoch: 45, iters: 120912, time: 0.553, data: 0.000) G_L1: 15.030 G_L1_ABSOLUTE: 2.394 G_L1_RELATIVE: 12.636 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 45, iters: 122912, time: 0.539, data: 0.000) G_L1: 13.833 G_L1_ABSOLUTE: 2.588 G_L1_RELATIVE: 11.245 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 45, iters: 124912, time: 0.543, data: 0.000) G_L1: 16.347 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 13.747 G_Regularizer: 0.000 validation_error: 20.610 +(epoch: 45, iters: 126912, time: 0.562, data: 0.000) G_L1: 13.368 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 10.755 G_Regularizer: 0.000 validation_error: 20.693 +(epoch: 45, iters: 128912, time: 0.539, data: 0.000) G_L1: 13.526 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 11.321 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 45, iters: 130912, time: 0.556, data: 0.000) G_L1: 13.877 G_L1_ABSOLUTE: 2.410 G_L1_RELATIVE: 11.467 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 45, iters: 132912, time: 0.540, data: 0.000) G_L1: 13.944 G_L1_ABSOLUTE: 2.277 G_L1_RELATIVE: 11.667 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 45, iters: 134912, time: 0.547, data: 0.000) G_L1: 13.832 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 11.283 G_Regularizer: 0.000 validation_error: 20.598 +(epoch: 45, iters: 136912, time: 0.528, data: 0.000) G_L1: 15.013 G_L1_ABSOLUTE: 2.090 G_L1_RELATIVE: 12.923 G_Regularizer: 0.000 validation_error: 20.462 +(epoch: 45, iters: 138912, time: 0.538, data: 0.000) G_L1: 18.113 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 15.590 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 45, iters: 140912, time: 0.536, data: 0.000) G_L1: 14.581 G_L1_ABSOLUTE: 2.593 G_L1_RELATIVE: 11.988 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 45, iters: 142912, time: 0.551, data: 0.000) G_L1: 12.635 G_L1_ABSOLUTE: 2.174 G_L1_RELATIVE: 10.461 G_Regularizer: 0.000 validation_error: 20.862 +(epoch: 45, iters: 144912, time: 0.540, data: 0.000) G_L1: 15.563 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 13.265 G_Regularizer: 0.000 validation_error: 20.602 +(epoch: 45, iters: 146912, time: 0.544, data: 0.000) G_L1: 14.898 G_L1_ABSOLUTE: 2.199 G_L1_RELATIVE: 12.699 G_Regularizer: 0.000 validation_error: 21.024 +(epoch: 45, iters: 148912, time: 0.549, data: 0.000) G_L1: 14.120 G_L1_ABSOLUTE: 2.265 G_L1_RELATIVE: 11.854 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 45, iters: 150912, time: 0.552, data: 0.000) G_L1: 15.065 G_L1_ABSOLUTE: 2.779 G_L1_RELATIVE: 12.287 G_Regularizer: 0.000 validation_error: 20.665 +(epoch: 45, iters: 152912, time: 0.550, data: 0.000) G_L1: 14.803 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 12.319 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 45, iters: 154912, time: 0.533, data: 0.000) G_L1: 14.263 G_L1_ABSOLUTE: 2.186 G_L1_RELATIVE: 12.077 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 45, iters: 156912, time: 0.537, data: 0.000) G_L1: 13.356 G_L1_ABSOLUTE: 2.812 G_L1_RELATIVE: 10.543 G_Regularizer: 0.000 validation_error: 20.686 +(epoch: 45, iters: 158912, time: 0.531, data: 0.000) G_L1: 13.681 G_L1_ABSOLUTE: 2.130 G_L1_RELATIVE: 11.551 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 45, iters: 160912, time: 0.558, data: 0.000) G_L1: 13.346 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 10.653 G_Regularizer: 0.000 validation_error: 20.642 +(epoch: 45, iters: 162912, time: 0.547, data: 0.000) G_L1: 11.734 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 9.476 G_Regularizer: 0.000 validation_error: 21.017 +(epoch: 45, iters: 164912, time: 0.551, data: 0.001) G_L1: 14.742 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 12.367 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 45, iters: 166912, time: 0.541, data: 0.000) G_L1: 12.975 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 10.614 G_Regularizer: 0.000 validation_error: 20.971 +(epoch: 45, iters: 168912, time: 0.560, data: 0.000) G_L1: 14.345 G_L1_ABSOLUTE: 2.890 G_L1_RELATIVE: 11.455 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 45, iters: 170912, time: 0.551, data: 0.000) G_L1: 15.889 G_L1_ABSOLUTE: 2.743 G_L1_RELATIVE: 13.146 G_Regularizer: 0.000 validation_error: 20.757 +(epoch: 45, iters: 172912, time: 0.543, data: 0.000) G_L1: 13.414 G_L1_ABSOLUTE: 2.691 G_L1_RELATIVE: 10.723 G_Regularizer: 0.000 validation_error: 20.743 +(epoch: 45, iters: 174912, time: 0.564, data: 0.000) G_L1: 13.832 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 11.331 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 45, iters: 176912, time: 0.557, data: 0.001) G_L1: 13.623 G_L1_ABSOLUTE: 1.989 G_L1_RELATIVE: 11.633 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 45, iters: 178912, time: 0.547, data: 0.000) G_L1: 12.384 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 10.101 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 45, iters: 180912, time: 0.554, data: 0.000) G_L1: 12.701 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 10.129 G_Regularizer: 0.000 validation_error: 20.859 +(epoch: 45, iters: 182912, time: 0.545, data: 0.000) G_L1: 14.088 G_L1_ABSOLUTE: 2.388 G_L1_RELATIVE: 11.700 G_Regularizer: 0.000 validation_error: 21.223 +(epoch: 45, iters: 184912, time: 0.541, data: 0.000) G_L1: 13.657 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 11.188 G_Regularizer: 0.000 validation_error: 21.324 +(epoch: 45, iters: 186912, time: 0.554, data: 0.000) G_L1: 11.856 G_L1_ABSOLUTE: 2.314 G_L1_RELATIVE: 9.541 G_Regularizer: 0.000 validation_error: 21.297 +(epoch: 45, iters: 188912, time: 0.545, data: 0.000) G_L1: 14.098 G_L1_ABSOLUTE: 2.920 G_L1_RELATIVE: 11.178 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 45, iters: 190912, time: 0.529, data: 0.000) G_L1: 14.718 G_L1_ABSOLUTE: 2.954 G_L1_RELATIVE: 11.764 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 45, iters: 192912, time: 0.541, data: 0.000) G_L1: 15.457 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 12.741 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 45, iters: 194912, time: 0.559, data: 0.000) G_L1: 13.891 G_L1_ABSOLUTE: 2.592 G_L1_RELATIVE: 11.298 G_Regularizer: 0.000 validation_error: 20.662 +(epoch: 45, iters: 196912, time: 0.544, data: 0.000) G_L1: 12.764 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 10.213 G_Regularizer: 0.000 validation_error: 21.146 +(epoch: 45, iters: 198912, time: 0.552, data: 0.000) G_L1: 13.252 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 10.571 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 45, iters: 200912, time: 0.555, data: 0.000) G_L1: 13.043 G_L1_ABSOLUTE: 2.666 G_L1_RELATIVE: 10.376 G_Regularizer: 0.000 validation_error: 21.076 +(epoch: 45, iters: 202912, time: 0.565, data: 0.000) G_L1: 14.355 G_L1_ABSOLUTE: 2.178 G_L1_RELATIVE: 12.177 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 45, iters: 204912, time: 0.537, data: 0.000) G_L1: 14.718 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 12.256 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 45, iters: 206912, time: 0.558, data: 0.000) G_L1: 12.417 G_L1_ABSOLUTE: 2.207 G_L1_RELATIVE: 10.210 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 45, iters: 208912, time: 0.535, data: 0.000) G_L1: 13.597 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 11.122 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 45, iters: 210912, time: 0.543, data: 0.000) G_L1: 13.592 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 11.247 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 45, iters: 212912, time: 0.538, data: 0.000) G_L1: 13.934 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 11.559 G_Regularizer: 0.000 validation_error: 21.025 +(epoch: 45, iters: 214912, time: 0.560, data: 0.000) G_L1: 14.406 G_L1_ABSOLUTE: 2.197 G_L1_RELATIVE: 12.209 G_Regularizer: 0.000 validation_error: 20.963 +(epoch: 45, iters: 216912, time: 0.546, data: 0.000) G_L1: 14.902 G_L1_ABSOLUTE: 2.740 G_L1_RELATIVE: 12.162 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 45, iters: 218912, time: 0.552, data: 0.000) G_L1: 13.231 G_L1_ABSOLUTE: 2.238 G_L1_RELATIVE: 10.993 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 45, iters: 220912, time: 0.555, data: 0.000) G_L1: 14.574 G_L1_ABSOLUTE: 2.387 G_L1_RELATIVE: 12.188 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 45, iters: 222912, time: 0.558, data: 0.000) G_L1: 13.484 G_L1_ABSOLUTE: 2.647 G_L1_RELATIVE: 10.837 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 45, iters: 224912, time: 0.538, data: 0.001) G_L1: 13.402 G_L1_ABSOLUTE: 2.285 G_L1_RELATIVE: 11.116 G_Regularizer: 0.000 validation_error: 21.026 +(epoch: 45, iters: 226912, time: 0.536, data: 0.000) G_L1: 13.634 G_L1_ABSOLUTE: 2.454 G_L1_RELATIVE: 11.179 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 45, iters: 228912, time: 0.558, data: 0.000) G_L1: 14.295 G_L1_ABSOLUTE: 2.634 G_L1_RELATIVE: 11.661 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 45, iters: 230912, time: 0.542, data: 0.000) G_L1: 15.519 G_L1_ABSOLUTE: 2.535 G_L1_RELATIVE: 12.984 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 45, iters: 232912, time: 0.544, data: 0.000) G_L1: 13.650 G_L1_ABSOLUTE: 2.282 G_L1_RELATIVE: 11.368 G_Regularizer: 0.000 validation_error: 21.155 +(epoch: 45, iters: 234912, time: 0.549, data: 0.000) G_L1: 16.477 G_L1_ABSOLUTE: 3.072 G_L1_RELATIVE: 13.405 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 45, iters: 236912, time: 0.542, data: 0.000) G_L1: 15.186 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 12.614 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 45, iters: 238912, time: 0.540, data: 0.000) G_L1: 12.063 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 9.800 G_Regularizer: 0.000 validation_error: 21.243 +(epoch: 45, iters: 240912, time: 0.544, data: 0.000) G_L1: 13.855 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 11.594 G_Regularizer: 0.000 validation_error: 20.679 +(epoch: 45, iters: 242912, time: 0.549, data: 0.000) G_L1: 14.809 G_L1_ABSOLUTE: 2.801 G_L1_RELATIVE: 12.008 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 45, iters: 244912, time: 0.544, data: 0.000) G_L1: 15.088 G_L1_ABSOLUTE: 2.434 G_L1_RELATIVE: 12.654 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 45, iters: 246912, time: 0.544, data: 0.000) G_L1: 13.458 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 11.146 G_Regularizer: 0.000 validation_error: 20.959 +(epoch: 45, iters: 248912, time: 0.527, data: 0.000) G_L1: 12.984 G_L1_ABSOLUTE: 2.859 G_L1_RELATIVE: 10.126 G_Regularizer: 0.000 validation_error: 20.954 +(epoch: 45, iters: 250912, time: 0.541, data: 0.000) G_L1: 14.836 G_L1_ABSOLUTE: 2.780 G_L1_RELATIVE: 12.056 G_Regularizer: 0.000 validation_error: 20.647 +(epoch: 45, iters: 252912, time: 0.526, data: 0.000) G_L1: 15.372 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 12.599 G_Regularizer: 0.000 validation_error: 21.062 +(epoch: 45, iters: 254912, time: 0.551, data: 0.000) G_L1: 15.427 G_L1_ABSOLUTE: 3.341 G_L1_RELATIVE: 12.086 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 45, iters: 256912, time: 0.552, data: 0.000) G_L1: 14.768 G_L1_ABSOLUTE: 2.920 G_L1_RELATIVE: 11.848 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 45, iters: 258912, time: 0.536, data: 0.000) G_L1: 15.252 G_L1_ABSOLUTE: 2.477 G_L1_RELATIVE: 12.775 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 45, iters: 260912, time: 0.553, data: 0.000) G_L1: 11.715 G_L1_ABSOLUTE: 2.337 G_L1_RELATIVE: 9.379 G_Regularizer: 0.000 validation_error: 20.698 +(epoch: 45, iters: 262912, time: 0.556, data: 0.000) G_L1: 14.512 G_L1_ABSOLUTE: 3.017 G_L1_RELATIVE: 11.495 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 45, iters: 264912, time: 0.535, data: 0.000) G_L1: 13.746 G_L1_ABSOLUTE: 2.006 G_L1_RELATIVE: 11.740 G_Regularizer: 0.000 validation_error: 20.944 +(epoch: 45, iters: 266912, time: 0.548, data: 0.000) G_L1: 16.370 G_L1_ABSOLUTE: 2.886 G_L1_RELATIVE: 13.484 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 45, iters: 268912, time: 0.555, data: 0.001) G_L1: 14.656 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 11.954 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 45, iters: 270912, time: 0.549, data: 0.000) G_L1: 10.988 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 8.705 G_Regularizer: 0.000 validation_error: 21.072 +(epoch: 45, iters: 272912, time: 0.538, data: 0.000) G_L1: 14.963 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 12.566 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 45, iters: 274912, time: 0.545, data: 0.000) G_L1: 16.807 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 14.295 G_Regularizer: 0.000 validation_error: 21.297 +(epoch: 45, iters: 276912, time: 0.538, data: 0.000) G_L1: 13.994 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 11.372 G_Regularizer: 0.000 validation_error: 21.251 +(epoch: 45, iters: 278912, time: 0.559, data: 0.000) G_L1: 17.085 G_L1_ABSOLUTE: 2.708 G_L1_RELATIVE: 14.377 G_Regularizer: 0.000 validation_error: 21.353 +(epoch: 45, iters: 280912, time: 0.555, data: 0.001) G_L1: 14.177 G_L1_ABSOLUTE: 2.142 G_L1_RELATIVE: 12.034 G_Regularizer: 0.000 validation_error: 20.624 +(epoch: 45, iters: 282912, time: 0.541, data: 0.000) G_L1: 12.332 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 9.823 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 45, iters: 284912, time: 0.534, data: 0.000) G_L1: 14.091 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 11.951 G_Regularizer: 0.000 validation_error: 21.042 +(epoch: 45, iters: 286912, time: 0.548, data: 0.000) G_L1: 14.436 G_L1_ABSOLUTE: 2.676 G_L1_RELATIVE: 11.761 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 45, iters: 288912, time: 0.559, data: 0.001) G_L1: 14.049 G_L1_ABSOLUTE: 2.724 G_L1_RELATIVE: 11.325 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 45, iters: 290912, time: 0.536, data: 0.000) G_L1: 14.785 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 12.104 G_Regularizer: 0.000 validation_error: 21.153 +(epoch: 45, iters: 292912, time: 0.560, data: 0.000) G_L1: 14.719 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 12.364 G_Regularizer: 0.000 validation_error: 20.808 +(epoch: 45, iters: 294912, time: 0.552, data: 0.000) G_L1: 13.824 G_L1_ABSOLUTE: 2.706 G_L1_RELATIVE: 11.118 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 45, iters: 296912, time: 0.557, data: 0.000) G_L1: 13.322 G_L1_ABSOLUTE: 2.711 G_L1_RELATIVE: 10.610 G_Regularizer: 0.000 validation_error: 20.630 +(epoch: 45, iters: 298912, time: 0.533, data: 0.000) G_L1: 12.308 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 9.935 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 45, iters: 300912, time: 0.548, data: 0.000) G_L1: 13.233 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 10.950 G_Regularizer: 0.000 validation_error: 20.959 +(epoch: 46, iters: 160, time: 0.546, data: 0.000) G_L1: 12.205 G_L1_ABSOLUTE: 2.495 G_L1_RELATIVE: 9.709 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 46, iters: 2160, time: 0.553, data: 0.000) G_L1: 15.929 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 13.567 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 46, iters: 4160, time: 0.535, data: 0.000) G_L1: 11.617 G_L1_ABSOLUTE: 2.253 G_L1_RELATIVE: 9.364 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 46, iters: 6160, time: 0.556, data: 0.001) G_L1: 14.480 G_L1_ABSOLUTE: 3.309 G_L1_RELATIVE: 11.171 G_Regularizer: 0.000 validation_error: 21.056 +(epoch: 46, iters: 8160, time: 0.545, data: 0.000) G_L1: 10.192 G_L1_ABSOLUTE: 2.131 G_L1_RELATIVE: 8.061 G_Regularizer: 0.000 validation_error: 21.174 +(epoch: 46, iters: 10160, time: 0.558, data: 0.000) G_L1: 14.256 G_L1_ABSOLUTE: 2.127 G_L1_RELATIVE: 12.130 G_Regularizer: 0.000 validation_error: 20.474 +(epoch: 46, iters: 12160, time: 0.547, data: 0.000) G_L1: 14.516 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 11.743 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 46, iters: 14160, time: 0.548, data: 0.000) G_L1: 15.049 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 12.684 G_Regularizer: 0.000 validation_error: 20.625 +(epoch: 46, iters: 16160, time: 0.547, data: 0.001) G_L1: 13.996 G_L1_ABSOLUTE: 2.256 G_L1_RELATIVE: 11.739 G_Regularizer: 0.000 validation_error: 21.088 +(epoch: 46, iters: 18160, time: 0.555, data: 0.000) G_L1: 12.049 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 9.513 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 46, iters: 20160, time: 0.565, data: 0.000) G_L1: 14.241 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 11.875 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 46, iters: 22160, time: 0.533, data: 0.000) G_L1: 14.137 G_L1_ABSOLUTE: 2.444 G_L1_RELATIVE: 11.692 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 46, iters: 24160, time: 0.556, data: 0.000) G_L1: 14.732 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.318 G_Regularizer: 0.000 validation_error: 20.957 +(epoch: 46, iters: 26160, time: 0.556, data: 0.000) G_L1: 13.747 G_L1_ABSOLUTE: 2.566 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 46, iters: 28160, time: 0.566, data: 0.000) G_L1: 15.750 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 13.317 G_Regularizer: 0.000 validation_error: 20.781 +(epoch: 46, iters: 30160, time: 0.532, data: 0.000) G_L1: 25.271 G_L1_ABSOLUTE: 2.881 G_L1_RELATIVE: 22.390 G_Regularizer: 0.000 validation_error: 20.710 +(epoch: 46, iters: 32160, time: 0.556, data: 0.000) G_L1: 14.843 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 12.413 G_Regularizer: 0.000 validation_error: 21.285 +(epoch: 46, iters: 34160, time: 0.555, data: 0.000) G_L1: 13.749 G_L1_ABSOLUTE: 2.953 G_L1_RELATIVE: 10.797 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 46, iters: 36160, time: 0.544, data: 0.000) G_L1: 14.151 G_L1_ABSOLUTE: 2.894 G_L1_RELATIVE: 11.258 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 46, iters: 38160, time: 0.552, data: 0.000) G_L1: 14.118 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 11.735 G_Regularizer: 0.000 validation_error: 20.854 +(epoch: 46, iters: 40160, time: 0.559, data: 0.000) G_L1: 16.428 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 14.075 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 46, iters: 42160, time: 0.550, data: 0.000) G_L1: 14.367 G_L1_ABSOLUTE: 2.628 G_L1_RELATIVE: 11.739 G_Regularizer: 0.000 validation_error: 20.955 +(epoch: 46, iters: 44160, time: 0.534, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 12.220 G_Regularizer: 0.000 validation_error: 20.939 +(epoch: 46, iters: 46160, time: 0.534, data: 0.000) G_L1: 14.090 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 11.553 G_Regularizer: 0.000 validation_error: 20.788 +(epoch: 46, iters: 48160, time: 0.544, data: 0.000) G_L1: 11.956 G_L1_ABSOLUTE: 2.245 G_L1_RELATIVE: 9.711 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 46, iters: 50160, time: 0.552, data: 0.000) G_L1: 13.873 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 11.470 G_Regularizer: 0.000 validation_error: 20.763 +(epoch: 46, iters: 52160, time: 0.542, data: 0.000) G_L1: 11.227 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 8.930 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 46, iters: 54160, time: 0.549, data: 0.000) G_L1: 13.779 G_L1_ABSOLUTE: 2.202 G_L1_RELATIVE: 11.577 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 46, iters: 56160, time: 0.554, data: 0.000) G_L1: 13.107 G_L1_ABSOLUTE: 2.196 G_L1_RELATIVE: 10.911 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 46, iters: 58160, time: 0.553, data: 0.000) G_L1: 14.125 G_L1_ABSOLUTE: 2.971 G_L1_RELATIVE: 11.153 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 46, iters: 60160, time: 0.532, data: 0.000) G_L1: 17.177 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 14.559 G_Regularizer: 0.000 validation_error: 20.812 +(epoch: 46, iters: 62160, time: 0.550, data: 0.000) G_L1: 13.997 G_L1_ABSOLUTE: 2.146 G_L1_RELATIVE: 11.852 G_Regularizer: 0.000 validation_error: 20.569 +(epoch: 46, iters: 64160, time: 0.541, data: 0.000) G_L1: 13.882 G_L1_ABSOLUTE: 2.179 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 46, iters: 66160, time: 0.553, data: 0.001) G_L1: 15.299 G_L1_ABSOLUTE: 2.868 G_L1_RELATIVE: 12.431 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 46, iters: 68160, time: 0.551, data: 0.000) G_L1: 12.648 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 10.147 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 46, iters: 70160, time: 0.554, data: 0.000) G_L1: 14.140 G_L1_ABSOLUTE: 2.184 G_L1_RELATIVE: 11.956 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 46, iters: 72160, time: 0.550, data: 0.000) G_L1: 15.911 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 12.960 G_Regularizer: 0.000 validation_error: 20.775 +(epoch: 46, iters: 74160, time: 0.558, data: 0.000) G_L1: 14.518 G_L1_ABSOLUTE: 2.647 G_L1_RELATIVE: 11.871 G_Regularizer: 0.000 validation_error: 21.132 +(epoch: 46, iters: 76160, time: 0.555, data: 0.000) G_L1: 14.460 G_L1_ABSOLUTE: 2.728 G_L1_RELATIVE: 11.733 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 46, iters: 78160, time: 0.549, data: 0.000) G_L1: 13.665 G_L1_ABSOLUTE: 2.350 G_L1_RELATIVE: 11.314 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 46, iters: 80160, time: 0.534, data: 0.000) G_L1: 14.599 G_L1_ABSOLUTE: 2.837 G_L1_RELATIVE: 11.762 G_Regularizer: 0.000 validation_error: 20.742 +(epoch: 46, iters: 82160, time: 0.562, data: 0.000) G_L1: 13.851 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 11.370 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 46, iters: 84160, time: 0.554, data: 0.000) G_L1: 16.545 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 14.147 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 46, iters: 86160, time: 0.540, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 12.294 G_Regularizer: 0.000 validation_error: 20.669 +(epoch: 46, iters: 88160, time: 0.541, data: 0.000) G_L1: 17.945 G_L1_ABSOLUTE: 3.102 G_L1_RELATIVE: 14.843 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 46, iters: 90160, time: 0.549, data: 0.000) G_L1: 14.786 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 12.318 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 46, iters: 92160, time: 0.553, data: 0.000) G_L1: 13.286 G_L1_ABSOLUTE: 2.315 G_L1_RELATIVE: 10.971 G_Regularizer: 0.000 validation_error: 21.193 +(epoch: 46, iters: 94160, time: 0.552, data: 0.001) G_L1: 15.157 G_L1_ABSOLUTE: 2.667 G_L1_RELATIVE: 12.490 G_Regularizer: 0.000 validation_error: 21.021 +(epoch: 46, iters: 96160, time: 0.534, data: 0.000) G_L1: 13.560 G_L1_ABSOLUTE: 2.905 G_L1_RELATIVE: 10.655 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 46, iters: 98160, time: 0.546, data: 0.000) G_L1: 13.281 G_L1_ABSOLUTE: 1.972 G_L1_RELATIVE: 11.309 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 46, iters: 100160, time: 0.562, data: 0.000) G_L1: 14.769 G_L1_ABSOLUTE: 2.134 G_L1_RELATIVE: 12.635 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 46, iters: 102160, time: 0.546, data: 0.000) G_L1: 14.546 G_L1_ABSOLUTE: 2.841 G_L1_RELATIVE: 11.705 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 46, iters: 104160, time: 0.537, data: 0.000) G_L1: 13.381 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 10.820 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 46, iters: 106160, time: 0.534, data: 0.000) G_L1: 14.442 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 12.010 G_Regularizer: 0.000 validation_error: 21.104 +(epoch: 46, iters: 108160, time: 0.557, data: 0.000) G_L1: 14.541 G_L1_ABSOLUTE: 2.403 G_L1_RELATIVE: 12.138 G_Regularizer: 0.000 validation_error: 21.040 +(epoch: 46, iters: 110160, time: 0.558, data: 0.000) G_L1: 13.826 G_L1_ABSOLUTE: 2.876 G_L1_RELATIVE: 10.949 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 46, iters: 112160, time: 0.549, data: 0.001) G_L1: 13.467 G_L1_ABSOLUTE: 2.501 G_L1_RELATIVE: 10.966 G_Regularizer: 0.000 validation_error: 21.108 +(epoch: 46, iters: 114160, time: 0.545, data: 0.000) G_L1: 15.713 G_L1_ABSOLUTE: 2.229 G_L1_RELATIVE: 13.484 G_Regularizer: 0.000 validation_error: 20.711 +(epoch: 46, iters: 116160, time: 0.531, data: 0.000) G_L1: 14.656 G_L1_ABSOLUTE: 2.739 G_L1_RELATIVE: 11.917 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 46, iters: 118160, time: 0.548, data: 0.000) G_L1: 14.223 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 11.518 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 46, iters: 120160, time: 0.556, data: 0.000) G_L1: 13.219 G_L1_ABSOLUTE: 2.332 G_L1_RELATIVE: 10.887 G_Regularizer: 0.000 validation_error: 20.862 +(epoch: 46, iters: 122160, time: 0.555, data: 0.000) G_L1: 14.154 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 11.617 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 46, iters: 124160, time: 0.543, data: 0.000) G_L1: 16.266 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 13.889 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 46, iters: 126160, time: 0.561, data: 0.000) G_L1: 12.310 G_L1_ABSOLUTE: 2.092 G_L1_RELATIVE: 10.219 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 46, iters: 128160, time: 0.551, data: 0.000) G_L1: 13.216 G_L1_ABSOLUTE: 2.218 G_L1_RELATIVE: 10.998 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 46, iters: 130160, time: 0.541, data: 0.001) G_L1: 14.190 G_L1_ABSOLUTE: 3.018 G_L1_RELATIVE: 11.171 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 46, iters: 132160, time: 0.528, data: 0.001) G_L1: 13.241 G_L1_ABSOLUTE: 2.424 G_L1_RELATIVE: 10.817 G_Regularizer: 0.000 validation_error: 20.885 +(epoch: 46, iters: 134160, time: 0.565, data: 0.001) G_L1: 15.186 G_L1_ABSOLUTE: 2.175 G_L1_RELATIVE: 13.011 G_Regularizer: 0.000 validation_error: 21.184 +(epoch: 46, iters: 136160, time: 0.562, data: 0.000) G_L1: 13.719 G_L1_ABSOLUTE: 2.700 G_L1_RELATIVE: 11.019 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 46, iters: 138160, time: 0.541, data: 0.000) G_L1: 12.734 G_L1_ABSOLUTE: 2.716 G_L1_RELATIVE: 10.019 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 46, iters: 140160, time: 0.544, data: 0.000) G_L1: 15.116 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 12.753 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 46, iters: 142160, time: 0.539, data: 0.000) G_L1: 13.739 G_L1_ABSOLUTE: 2.695 G_L1_RELATIVE: 11.044 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 46, iters: 144160, time: 0.548, data: 0.001) G_L1: 15.581 G_L1_ABSOLUTE: 2.391 G_L1_RELATIVE: 13.190 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 46, iters: 146160, time: 0.539, data: 0.000) G_L1: 14.450 G_L1_ABSOLUTE: 2.364 G_L1_RELATIVE: 12.086 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 46, iters: 148160, time: 0.542, data: 0.000) G_L1: 15.531 G_L1_ABSOLUTE: 2.241 G_L1_RELATIVE: 13.290 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 46, iters: 150160, time: 0.538, data: 0.000) G_L1: 14.299 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 11.719 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 46, iters: 152160, time: 0.537, data: 0.000) G_L1: 14.310 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 11.816 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 46, iters: 154160, time: 0.538, data: 0.000) G_L1: 16.373 G_L1_ABSOLUTE: 3.089 G_L1_RELATIVE: 13.284 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 46, iters: 156160, time: 0.554, data: 0.000) G_L1: 13.353 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 10.922 G_Regularizer: 0.000 validation_error: 21.252 +(epoch: 46, iters: 158160, time: 0.543, data: 0.001) G_L1: 14.793 G_L1_ABSOLUTE: 2.087 G_L1_RELATIVE: 12.706 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 46, iters: 160160, time: 0.549, data: 0.000) G_L1: 14.896 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 12.529 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 46, iters: 162160, time: 0.546, data: 0.000) G_L1: 12.189 G_L1_ABSOLUTE: 2.496 G_L1_RELATIVE: 9.693 G_Regularizer: 0.000 validation_error: 21.266 +(epoch: 46, iters: 164160, time: 0.551, data: 0.000) G_L1: 17.667 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 15.181 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 46, iters: 166160, time: 0.537, data: 0.000) G_L1: 14.345 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 11.825 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 46, iters: 168160, time: 0.555, data: 0.000) G_L1: 14.146 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 11.779 G_Regularizer: 0.000 validation_error: 21.224 +(epoch: 46, iters: 170160, time: 0.558, data: 0.000) G_L1: 16.307 G_L1_ABSOLUTE: 2.694 G_L1_RELATIVE: 13.613 G_Regularizer: 0.000 validation_error: 20.980 +(epoch: 46, iters: 172160, time: 0.549, data: 0.001) G_L1: 16.617 G_L1_ABSOLUTE: 2.468 G_L1_RELATIVE: 14.148 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 46, iters: 174160, time: 0.541, data: 0.001) G_L1: 14.651 G_L1_ABSOLUTE: 2.484 G_L1_RELATIVE: 12.167 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 46, iters: 176160, time: 0.562, data: 0.001) G_L1: 14.125 G_L1_ABSOLUTE: 2.553 G_L1_RELATIVE: 11.573 G_Regularizer: 0.000 validation_error: 20.583 +(epoch: 46, iters: 178160, time: 0.558, data: 0.000) G_L1: 13.338 G_L1_ABSOLUTE: 2.928 G_L1_RELATIVE: 10.410 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 46, iters: 180160, time: 0.535, data: 0.000) G_L1: 17.832 G_L1_ABSOLUTE: 2.859 G_L1_RELATIVE: 14.973 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 46, iters: 182160, time: 0.537, data: 0.000) G_L1: 12.465 G_L1_ABSOLUTE: 1.986 G_L1_RELATIVE: 10.479 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 46, iters: 184160, time: 0.551, data: 0.000) G_L1: 14.832 G_L1_ABSOLUTE: 3.165 G_L1_RELATIVE: 11.667 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 46, iters: 186160, time: 0.561, data: 0.000) G_L1: 15.628 G_L1_ABSOLUTE: 2.987 G_L1_RELATIVE: 12.640 G_Regularizer: 0.000 validation_error: 20.814 +(epoch: 46, iters: 188160, time: 0.530, data: 0.000) G_L1: 16.298 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 13.418 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 46, iters: 190160, time: 0.543, data: 0.000) G_L1: 13.556 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 11.074 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 46, iters: 192160, time: 0.533, data: 0.000) G_L1: 15.481 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 13.156 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 46, iters: 194160, time: 0.552, data: 0.000) G_L1: 11.629 G_L1_ABSOLUTE: 2.094 G_L1_RELATIVE: 9.535 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 46, iters: 196160, time: 0.535, data: 0.000) G_L1: 13.976 G_L1_ABSOLUTE: 2.792 G_L1_RELATIVE: 11.184 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 46, iters: 198160, time: 0.557, data: 0.000) G_L1: 13.653 G_L1_ABSOLUTE: 2.084 G_L1_RELATIVE: 11.569 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 46, iters: 200160, time: 0.535, data: 0.000) G_L1: 17.706 G_L1_ABSOLUTE: 3.464 G_L1_RELATIVE: 14.242 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 46, iters: 202160, time: 0.550, data: 0.000) G_L1: 14.049 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 11.850 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 46, iters: 204160, time: 0.557, data: 0.001) G_L1: 11.970 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 9.772 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 46, iters: 206160, time: 0.561, data: 0.000) G_L1: 11.703 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 9.318 G_Regularizer: 0.000 validation_error: 21.151 +(epoch: 46, iters: 208160, time: 0.553, data: 0.000) G_L1: 12.781 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 10.480 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 46, iters: 210160, time: 0.532, data: 0.000) G_L1: 13.413 G_L1_ABSOLUTE: 2.385 G_L1_RELATIVE: 11.029 G_Regularizer: 0.000 validation_error: 21.278 +(epoch: 46, iters: 212160, time: 0.548, data: 0.000) G_L1: 11.529 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 8.991 G_Regularizer: 0.000 validation_error: 20.657 +(epoch: 46, iters: 214160, time: 0.554, data: 0.000) G_L1: 13.389 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 10.962 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 46, iters: 216160, time: 0.560, data: 0.000) G_L1: 13.063 G_L1_ABSOLUTE: 2.418 G_L1_RELATIVE: 10.646 G_Regularizer: 0.000 validation_error: 20.571 +(epoch: 46, iters: 218160, time: 0.547, data: 0.000) G_L1: 15.455 G_L1_ABSOLUTE: 2.289 G_L1_RELATIVE: 13.167 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 46, iters: 220160, time: 0.555, data: 0.000) G_L1: 14.340 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 11.984 G_Regularizer: 0.000 validation_error: 20.940 +(epoch: 46, iters: 222160, time: 0.557, data: 0.000) G_L1: 13.434 G_L1_ABSOLUTE: 2.537 G_L1_RELATIVE: 10.896 G_Regularizer: 0.000 validation_error: 20.832 +(epoch: 46, iters: 224160, time: 0.551, data: 0.000) G_L1: 12.900 G_L1_ABSOLUTE: 2.189 G_L1_RELATIVE: 10.711 G_Regularizer: 0.000 validation_error: 20.905 +(epoch: 46, iters: 226160, time: 0.541, data: 0.000) G_L1: 16.123 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 13.599 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 46, iters: 228160, time: 0.557, data: 0.001) G_L1: 13.802 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 10.864 G_Regularizer: 0.000 validation_error: 21.177 +(epoch: 46, iters: 230160, time: 0.532, data: 0.000) G_L1: 17.382 G_L1_ABSOLUTE: 2.641 G_L1_RELATIVE: 14.741 G_Regularizer: 0.000 validation_error: 20.930 +(epoch: 46, iters: 232160, time: 0.555, data: 0.001) G_L1: 13.207 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 10.620 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 46, iters: 234160, time: 0.542, data: 0.000) G_L1: 12.219 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 10.003 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 46, iters: 236160, time: 0.545, data: 0.000) G_L1: 13.487 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 10.649 G_Regularizer: 0.000 validation_error: 21.078 +(epoch: 46, iters: 238160, time: 0.533, data: 0.001) G_L1: 12.732 G_L1_ABSOLUTE: 2.509 G_L1_RELATIVE: 10.224 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 46, iters: 240160, time: 0.548, data: 0.000) G_L1: 14.951 G_L1_ABSOLUTE: 3.240 G_L1_RELATIVE: 11.710 G_Regularizer: 0.000 validation_error: 20.503 +(epoch: 46, iters: 242160, time: 0.557, data: 0.000) G_L1: 14.910 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 12.309 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 46, iters: 244160, time: 0.560, data: 0.000) G_L1: 16.740 G_L1_ABSOLUTE: 2.667 G_L1_RELATIVE: 14.073 G_Regularizer: 0.000 validation_error: 21.231 +(epoch: 46, iters: 246160, time: 0.539, data: 0.000) G_L1: 12.884 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 10.604 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 46, iters: 248160, time: 0.563, data: 0.000) G_L1: 12.877 G_L1_ABSOLUTE: 2.182 G_L1_RELATIVE: 10.696 G_Regularizer: 0.000 validation_error: 20.676 +(epoch: 46, iters: 250160, time: 0.557, data: 0.000) G_L1: 11.678 G_L1_ABSOLUTE: 2.133 G_L1_RELATIVE: 9.545 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 46, iters: 252160, time: 0.533, data: 0.000) G_L1: 15.731 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 13.376 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 46, iters: 254160, time: 0.541, data: 0.000) G_L1: 14.257 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.837 G_Regularizer: 0.000 validation_error: 21.001 +(epoch: 46, iters: 256160, time: 0.561, data: 0.000) G_L1: 13.311 G_L1_ABSOLUTE: 2.333 G_L1_RELATIVE: 10.978 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 46, iters: 258160, time: 0.556, data: 0.000) G_L1: 12.909 G_L1_ABSOLUTE: 2.710 G_L1_RELATIVE: 10.199 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 46, iters: 260160, time: 0.545, data: 0.001) G_L1: 14.195 G_L1_ABSOLUTE: 2.399 G_L1_RELATIVE: 11.797 G_Regularizer: 0.000 validation_error: 20.604 +(epoch: 46, iters: 262160, time: 0.543, data: 0.000) G_L1: 19.543 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 16.546 G_Regularizer: 0.000 validation_error: 20.672 +(epoch: 46, iters: 264160, time: 0.526, data: 0.000) G_L1: 12.245 G_L1_ABSOLUTE: 2.226 G_L1_RELATIVE: 10.019 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 46, iters: 266160, time: 0.533, data: 0.000) G_L1: 13.813 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 11.448 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 46, iters: 268160, time: 0.547, data: 0.001) G_L1: 12.655 G_L1_ABSOLUTE: 2.151 G_L1_RELATIVE: 10.504 G_Regularizer: 0.000 validation_error: 20.675 +(epoch: 46, iters: 270160, time: 0.525, data: 0.000) G_L1: 14.083 G_L1_ABSOLUTE: 2.580 G_L1_RELATIVE: 11.503 G_Regularizer: 0.000 validation_error: 21.072 +(epoch: 46, iters: 272160, time: 0.554, data: 0.000) G_L1: 11.637 G_L1_ABSOLUTE: 2.529 G_L1_RELATIVE: 9.109 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 46, iters: 274160, time: 0.552, data: 0.000) G_L1: 14.829 G_L1_ABSOLUTE: 2.682 G_L1_RELATIVE: 12.147 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 46, iters: 276160, time: 0.540, data: 0.000) G_L1: 14.370 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 11.890 G_Regularizer: 0.000 validation_error: 20.979 +(epoch: 46, iters: 278160, time: 0.531, data: 0.000) G_L1: 15.222 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 12.638 G_Regularizer: 0.000 validation_error: 20.747 +(epoch: 46, iters: 280160, time: 0.535, data: 0.000) G_L1: 12.378 G_L1_ABSOLUTE: 2.154 G_L1_RELATIVE: 10.223 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 46, iters: 282160, time: 0.554, data: 0.000) G_L1: 16.971 G_L1_ABSOLUTE: 2.186 G_L1_RELATIVE: 14.784 G_Regularizer: 0.000 validation_error: 21.177 +(epoch: 46, iters: 284160, time: 0.547, data: 0.001) G_L1: 11.308 G_L1_ABSOLUTE: 2.242 G_L1_RELATIVE: 9.066 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 46, iters: 286160, time: 0.545, data: 0.000) G_L1: 14.867 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 12.267 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 46, iters: 288160, time: 0.543, data: 0.000) G_L1: 15.869 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 13.505 G_Regularizer: 0.000 validation_error: 21.118 +(epoch: 46, iters: 290160, time: 0.552, data: 0.000) G_L1: 13.980 G_L1_ABSOLUTE: 2.664 G_L1_RELATIVE: 11.315 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 46, iters: 292160, time: 0.558, data: 0.000) G_L1: 14.178 G_L1_ABSOLUTE: 2.456 G_L1_RELATIVE: 11.722 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 46, iters: 294160, time: 0.555, data: 0.000) G_L1: 13.050 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 10.633 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 46, iters: 296160, time: 0.542, data: 0.000) G_L1: 13.002 G_L1_ABSOLUTE: 2.295 G_L1_RELATIVE: 10.707 G_Regularizer: 0.000 validation_error: 20.811 +(epoch: 46, iters: 298160, time: 0.557, data: 0.000) G_L1: 13.162 G_L1_ABSOLUTE: 2.383 G_L1_RELATIVE: 10.778 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 46, iters: 300160, time: 0.557, data: 0.000) G_L1: 17.280 G_L1_ABSOLUTE: 2.757 G_L1_RELATIVE: 14.523 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 46, iters: 302160, time: 0.538, data: 0.001) G_L1: 13.382 G_L1_ABSOLUTE: 2.413 G_L1_RELATIVE: 10.968 G_Regularizer: 0.000 validation_error: 21.015 +(epoch: 47, iters: 1408, time: 0.532, data: 0.000) G_L1: 14.104 G_L1_ABSOLUTE: 2.306 G_L1_RELATIVE: 11.798 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 47, iters: 3408, time: 0.559, data: 0.001) G_L1: 16.565 G_L1_ABSOLUTE: 2.892 G_L1_RELATIVE: 13.672 G_Regularizer: 0.000 validation_error: 21.022 +(epoch: 47, iters: 5408, time: 0.552, data: 0.000) G_L1: 11.763 G_L1_ABSOLUTE: 1.965 G_L1_RELATIVE: 9.797 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 47, iters: 7408, time: 0.545, data: 0.000) G_L1: 14.375 G_L1_ABSOLUTE: 2.851 G_L1_RELATIVE: 11.524 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 47, iters: 9408, time: 0.545, data: 0.000) G_L1: 17.191 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 14.489 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 47, iters: 11408, time: 0.532, data: 0.000) G_L1: 13.082 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 10.819 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 47, iters: 13408, time: 0.533, data: 0.000) G_L1: 14.603 G_L1_ABSOLUTE: 2.560 G_L1_RELATIVE: 12.043 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 47, iters: 15408, time: 0.553, data: 0.000) G_L1: 12.431 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 10.182 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 47, iters: 17408, time: 0.553, data: 0.000) G_L1: 13.540 G_L1_ABSOLUTE: 2.430 G_L1_RELATIVE: 11.110 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 47, iters: 19408, time: 0.535, data: 0.000) G_L1: 13.446 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 10.970 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 47, iters: 21408, time: 0.548, data: 0.000) G_L1: 16.318 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 13.688 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 47, iters: 23408, time: 0.555, data: 0.000) G_L1: 14.793 G_L1_ABSOLUTE: 2.883 G_L1_RELATIVE: 11.909 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 47, iters: 25408, time: 0.535, data: 0.000) G_L1: 15.210 G_L1_ABSOLUTE: 2.455 G_L1_RELATIVE: 12.755 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 47, iters: 27408, time: 0.534, data: 0.000) G_L1: 12.666 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 9.962 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 47, iters: 29408, time: 0.548, data: 0.000) G_L1: 12.741 G_L1_ABSOLUTE: 2.327 G_L1_RELATIVE: 10.414 G_Regularizer: 0.000 validation_error: 20.667 +(epoch: 47, iters: 31408, time: 0.555, data: 0.000) G_L1: 14.120 G_L1_ABSOLUTE: 2.753 G_L1_RELATIVE: 11.367 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 47, iters: 33408, time: 0.548, data: 0.000) G_L1: 14.030 G_L1_ABSOLUTE: 2.397 G_L1_RELATIVE: 11.633 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 47, iters: 35408, time: 0.571, data: 0.000) G_L1: 11.744 G_L1_ABSOLUTE: 2.369 G_L1_RELATIVE: 9.375 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 47, iters: 37408, time: 0.557, data: 0.000) G_L1: 14.338 G_L1_ABSOLUTE: 3.048 G_L1_RELATIVE: 11.290 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 47, iters: 39408, time: 0.551, data: 0.000) G_L1: 14.526 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 12.208 G_Regularizer: 0.000 validation_error: 21.129 +(epoch: 47, iters: 41408, time: 0.559, data: 0.000) G_L1: 12.454 G_L1_ABSOLUTE: 2.343 G_L1_RELATIVE: 10.111 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 47, iters: 43408, time: 0.538, data: 0.001) G_L1: 12.883 G_L1_ABSOLUTE: 1.771 G_L1_RELATIVE: 11.111 G_Regularizer: 0.000 validation_error: 20.852 +(epoch: 47, iters: 45408, time: 0.551, data: 0.000) G_L1: 15.913 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 13.281 G_Regularizer: 0.000 validation_error: 21.107 +(epoch: 47, iters: 47408, time: 0.555, data: 0.000) G_L1: 15.051 G_L1_ABSOLUTE: 2.609 G_L1_RELATIVE: 12.442 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 47, iters: 49408, time: 0.546, data: 0.000) G_L1: 14.884 G_L1_ABSOLUTE: 2.665 G_L1_RELATIVE: 12.219 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 47, iters: 51408, time: 0.550, data: 0.000) G_L1: 13.978 G_L1_ABSOLUTE: 2.826 G_L1_RELATIVE: 11.151 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 47, iters: 53408, time: 0.536, data: 0.000) G_L1: 12.330 G_L1_ABSOLUTE: 2.164 G_L1_RELATIVE: 10.166 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 47, iters: 55408, time: 0.559, data: 0.000) G_L1: 12.410 G_L1_ABSOLUTE: 1.940 G_L1_RELATIVE: 10.470 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 47, iters: 57408, time: 0.534, data: 0.000) G_L1: 13.164 G_L1_ABSOLUTE: 2.632 G_L1_RELATIVE: 10.532 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 47, iters: 59408, time: 0.539, data: 0.001) G_L1: 15.502 G_L1_ABSOLUTE: 3.134 G_L1_RELATIVE: 12.368 G_Regularizer: 0.000 validation_error: 21.133 +(epoch: 47, iters: 61408, time: 0.547, data: 0.000) G_L1: 12.568 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 10.142 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 47, iters: 63408, time: 0.553, data: 0.000) G_L1: 11.820 G_L1_ABSOLUTE: 2.179 G_L1_RELATIVE: 9.641 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 47, iters: 65408, time: 0.556, data: 0.000) G_L1: 12.960 G_L1_ABSOLUTE: 2.482 G_L1_RELATIVE: 10.478 G_Regularizer: 0.000 validation_error: 20.723 +(epoch: 47, iters: 67408, time: 0.550, data: 0.000) G_L1: 12.496 G_L1_ABSOLUTE: 2.078 G_L1_RELATIVE: 10.418 G_Regularizer: 0.000 validation_error: 20.696 +(epoch: 47, iters: 69408, time: 0.554, data: 0.001) G_L1: 13.477 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 11.044 G_Regularizer: 0.000 validation_error: 20.746 +(epoch: 47, iters: 71408, time: 0.541, data: 0.001) G_L1: 13.205 G_L1_ABSOLUTE: 2.069 G_L1_RELATIVE: 11.136 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 47, iters: 73408, time: 0.557, data: 0.000) G_L1: 15.519 G_L1_ABSOLUTE: 2.488 G_L1_RELATIVE: 13.032 G_Regularizer: 0.000 validation_error: 20.805 +(epoch: 47, iters: 75408, time: 0.574, data: 0.001) G_L1: 16.120 G_L1_ABSOLUTE: 2.933 G_L1_RELATIVE: 13.187 G_Regularizer: 0.000 validation_error: 21.053 +(epoch: 47, iters: 77408, time: 0.550, data: 0.000) G_L1: 13.096 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 10.812 G_Regularizer: 0.000 validation_error: 20.917 +(epoch: 47, iters: 79408, time: 0.551, data: 0.001) G_L1: 13.960 G_L1_ABSOLUTE: 2.556 G_L1_RELATIVE: 11.405 G_Regularizer: 0.000 validation_error: 20.940 +(epoch: 47, iters: 81408, time: 0.548, data: 0.000) G_L1: 14.104 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 11.517 G_Regularizer: 0.000 validation_error: 20.754 +(epoch: 47, iters: 83408, time: 0.551, data: 0.000) G_L1: 14.406 G_L1_ABSOLUTE: 2.400 G_L1_RELATIVE: 12.006 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 47, iters: 85408, time: 0.537, data: 0.001) G_L1: 13.791 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 11.226 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 47, iters: 87408, time: 0.554, data: 0.000) G_L1: 13.983 G_L1_ABSOLUTE: 2.356 G_L1_RELATIVE: 11.628 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 47, iters: 89408, time: 0.550, data: 0.000) G_L1: 12.670 G_L1_ABSOLUTE: 2.426 G_L1_RELATIVE: 10.244 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 47, iters: 91408, time: 0.553, data: 0.000) G_L1: 11.894 G_L1_ABSOLUTE: 2.374 G_L1_RELATIVE: 9.520 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 47, iters: 93408, time: 0.533, data: 0.000) G_L1: 16.572 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 13.757 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 47, iters: 95408, time: 0.541, data: 0.000) G_L1: 13.587 G_L1_ABSOLUTE: 2.822 G_L1_RELATIVE: 10.766 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 47, iters: 97408, time: 0.538, data: 0.000) G_L1: 14.780 G_L1_ABSOLUTE: 2.896 G_L1_RELATIVE: 11.883 G_Regularizer: 0.000 validation_error: 20.678 +(epoch: 47, iters: 99408, time: 0.548, data: 0.000) G_L1: 14.149 G_L1_ABSOLUTE: 2.896 G_L1_RELATIVE: 11.252 G_Regularizer: 0.000 validation_error: 20.726 +(epoch: 47, iters: 101408, time: 0.540, data: 0.001) G_L1: 12.224 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 9.809 G_Regularizer: 0.000 validation_error: 20.555 +(epoch: 47, iters: 103408, time: 0.543, data: 0.000) G_L1: 12.564 G_L1_ABSOLUTE: 2.255 G_L1_RELATIVE: 10.309 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 47, iters: 105408, time: 0.545, data: 0.000) G_L1: 13.255 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 10.898 G_Regularizer: 0.000 validation_error: 20.789 +(epoch: 47, iters: 107408, time: 0.553, data: 0.000) G_L1: 15.414 G_L1_ABSOLUTE: 2.688 G_L1_RELATIVE: 12.727 G_Regularizer: 0.000 validation_error: 20.649 +(epoch: 47, iters: 109408, time: 0.552, data: 0.000) G_L1: 12.213 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 9.843 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 47, iters: 111408, time: 0.529, data: 0.000) G_L1: 14.098 G_L1_ABSOLUTE: 2.876 G_L1_RELATIVE: 11.222 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 47, iters: 113408, time: 0.546, data: 0.000) G_L1: 13.347 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 10.645 G_Regularizer: 0.000 validation_error: 20.376 +(epoch: 47, iters: 115408, time: 0.557, data: 0.000) G_L1: 15.296 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 12.844 G_Regularizer: 0.000 validation_error: 20.962 +(epoch: 47, iters: 117408, time: 0.549, data: 0.000) G_L1: 12.658 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 10.172 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 47, iters: 119408, time: 0.540, data: 0.000) G_L1: 13.671 G_L1_ABSOLUTE: 2.249 G_L1_RELATIVE: 11.422 G_Regularizer: 0.000 validation_error: 20.993 +(epoch: 47, iters: 121408, time: 0.556, data: 0.000) G_L1: 14.940 G_L1_ABSOLUTE: 2.508 G_L1_RELATIVE: 12.431 G_Regularizer: 0.000 validation_error: 21.183 +(epoch: 47, iters: 123408, time: 0.558, data: 0.000) G_L1: 12.359 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 9.752 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 47, iters: 125408, time: 0.549, data: 0.000) G_L1: 11.674 G_L1_ABSOLUTE: 2.459 G_L1_RELATIVE: 9.216 G_Regularizer: 0.000 validation_error: 20.695 +(epoch: 47, iters: 127408, time: 0.540, data: 0.000) G_L1: 13.552 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 11.132 G_Regularizer: 0.000 validation_error: 20.981 +(epoch: 47, iters: 129408, time: 0.563, data: 0.000) G_L1: 14.697 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 12.247 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 47, iters: 131408, time: 0.558, data: 0.000) G_L1: 13.827 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 11.228 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 47, iters: 133408, time: 0.562, data: 0.000) G_L1: 11.654 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 9.455 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 47, iters: 135408, time: 0.534, data: 0.000) G_L1: 14.003 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 11.705 G_Regularizer: 0.000 validation_error: 21.049 +(epoch: 47, iters: 137408, time: 0.571, data: 0.000) G_L1: 12.175 G_L1_ABSOLUTE: 2.192 G_L1_RELATIVE: 9.984 G_Regularizer: 0.000 validation_error: 20.626 +(epoch: 47, iters: 139408, time: 0.537, data: 0.000) G_L1: 16.023 G_L1_ABSOLUTE: 2.206 G_L1_RELATIVE: 13.817 G_Regularizer: 0.000 validation_error: 20.589 +(epoch: 47, iters: 141408, time: 0.550, data: 0.000) G_L1: 14.130 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 11.931 G_Regularizer: 0.000 validation_error: 21.031 +(epoch: 47, iters: 143408, time: 0.546, data: 0.000) G_L1: 12.628 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 10.104 G_Regularizer: 0.000 validation_error: 20.943 +(epoch: 47, iters: 145408, time: 0.542, data: 0.000) G_L1: 14.340 G_L1_ABSOLUTE: 2.133 G_L1_RELATIVE: 12.208 G_Regularizer: 0.000 validation_error: 20.536 +(epoch: 47, iters: 147408, time: 0.556, data: 0.000) G_L1: 12.750 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 10.120 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 47, iters: 149408, time: 0.560, data: 0.000) G_L1: 15.616 G_L1_ABSOLUTE: 2.586 G_L1_RELATIVE: 13.030 G_Regularizer: 0.000 validation_error: 21.173 +(epoch: 47, iters: 151408, time: 0.532, data: 0.001) G_L1: 13.719 G_L1_ABSOLUTE: 2.321 G_L1_RELATIVE: 11.398 G_Regularizer: 0.000 validation_error: 21.127 +(epoch: 47, iters: 153408, time: 0.536, data: 0.000) G_L1: 15.255 G_L1_ABSOLUTE: 2.518 G_L1_RELATIVE: 12.737 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 47, iters: 155408, time: 0.548, data: 0.000) G_L1: 12.808 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 10.278 G_Regularizer: 0.000 validation_error: 20.800 +(epoch: 47, iters: 157408, time: 0.549, data: 0.001) G_L1: 15.417 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 13.083 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 47, iters: 159408, time: 0.545, data: 0.001) G_L1: 20.813 G_L1_ABSOLUTE: 2.850 G_L1_RELATIVE: 17.963 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 47, iters: 161408, time: 0.541, data: 0.001) G_L1: 13.836 G_L1_ABSOLUTE: 2.956 G_L1_RELATIVE: 10.880 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 47, iters: 163408, time: 0.555, data: 0.000) G_L1: 12.773 G_L1_ABSOLUTE: 2.361 G_L1_RELATIVE: 10.413 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 47, iters: 165408, time: 0.554, data: 0.000) G_L1: 13.057 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 10.439 G_Regularizer: 0.000 validation_error: 21.013 +(epoch: 47, iters: 167408, time: 0.547, data: 0.000) G_L1: 12.377 G_L1_ABSOLUTE: 2.324 G_L1_RELATIVE: 10.052 G_Regularizer: 0.000 validation_error: 20.937 +(epoch: 47, iters: 169408, time: 0.551, data: 0.000) G_L1: 11.914 G_L1_ABSOLUTE: 2.110 G_L1_RELATIVE: 9.804 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 47, iters: 171408, time: 0.553, data: 0.000) G_L1: 13.415 G_L1_ABSOLUTE: 2.844 G_L1_RELATIVE: 10.572 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 47, iters: 173408, time: 0.551, data: 0.000) G_L1: 14.099 G_L1_ABSOLUTE: 2.115 G_L1_RELATIVE: 11.984 G_Regularizer: 0.000 validation_error: 21.054 +(epoch: 47, iters: 175408, time: 0.549, data: 0.000) G_L1: 14.728 G_L1_ABSOLUTE: 2.429 G_L1_RELATIVE: 12.299 G_Regularizer: 0.000 validation_error: 20.989 +(epoch: 47, iters: 177408, time: 0.558, data: 0.000) G_L1: 15.051 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 12.274 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 47, iters: 179408, time: 0.532, data: 0.000) G_L1: 14.385 G_L1_ABSOLUTE: 2.437 G_L1_RELATIVE: 11.948 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 47, iters: 181408, time: 0.533, data: 0.000) G_L1: 13.918 G_L1_ABSOLUTE: 2.723 G_L1_RELATIVE: 11.195 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 47, iters: 183408, time: 0.548, data: 0.000) G_L1: 13.733 G_L1_ABSOLUTE: 2.030 G_L1_RELATIVE: 11.703 G_Regularizer: 0.000 validation_error: 20.960 +(epoch: 47, iters: 185408, time: 0.557, data: 0.000) G_L1: 14.363 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 11.786 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 47, iters: 187408, time: 0.529, data: 0.000) G_L1: 15.495 G_L1_ABSOLUTE: 2.452 G_L1_RELATIVE: 13.043 G_Regularizer: 0.000 validation_error: 20.924 +(epoch: 47, iters: 189408, time: 0.556, data: 0.000) G_L1: 14.321 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 11.791 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 47, iters: 191408, time: 0.559, data: 0.000) G_L1: 15.200 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 12.822 G_Regularizer: 0.000 validation_error: 21.050 +(epoch: 47, iters: 193408, time: 0.547, data: 0.000) G_L1: 12.572 G_L1_ABSOLUTE: 2.004 G_L1_RELATIVE: 10.568 G_Regularizer: 0.000 validation_error: 21.073 +(epoch: 47, iters: 195408, time: 0.540, data: 0.000) G_L1: 14.585 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 12.099 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 47, iters: 197408, time: 0.560, data: 0.000) G_L1: 13.173 G_L1_ABSOLUTE: 2.349 G_L1_RELATIVE: 10.823 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 47, iters: 199408, time: 0.539, data: 0.000) G_L1: 13.287 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 10.752 G_Regularizer: 0.000 validation_error: 20.982 +(epoch: 47, iters: 201408, time: 0.542, data: 0.000) G_L1: 14.310 G_L1_ABSOLUTE: 2.767 G_L1_RELATIVE: 11.544 G_Regularizer: 0.000 validation_error: 20.911 +(epoch: 47, iters: 203408, time: 0.556, data: 0.000) G_L1: 13.023 G_L1_ABSOLUTE: 2.206 G_L1_RELATIVE: 10.817 G_Regularizer: 0.000 validation_error: 20.956 +(epoch: 47, iters: 205408, time: 0.545, data: 0.000) G_L1: 15.485 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 12.701 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 47, iters: 207408, time: 0.531, data: 0.000) G_L1: 12.024 G_L1_ABSOLUTE: 2.406 G_L1_RELATIVE: 9.618 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 47, iters: 209408, time: 0.537, data: 0.000) G_L1: 14.493 G_L1_ABSOLUTE: 2.610 G_L1_RELATIVE: 11.883 G_Regularizer: 0.000 validation_error: 21.189 +(epoch: 47, iters: 211408, time: 0.548, data: 0.000) G_L1: 11.333 G_L1_ABSOLUTE: 2.074 G_L1_RELATIVE: 9.259 G_Regularizer: 0.000 validation_error: 21.061 +(epoch: 47, iters: 213408, time: 0.541, data: 0.000) G_L1: 13.648 G_L1_ABSOLUTE: 2.559 G_L1_RELATIVE: 11.089 G_Regularizer: 0.000 validation_error: 20.660 +(epoch: 47, iters: 215408, time: 0.556, data: 0.000) G_L1: 13.757 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 11.126 G_Regularizer: 0.000 validation_error: 20.700 +(epoch: 47, iters: 217408, time: 0.555, data: 0.000) G_L1: 14.165 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 11.641 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 47, iters: 219408, time: 0.558, data: 0.000) G_L1: 15.206 G_L1_ABSOLUTE: 2.777 G_L1_RELATIVE: 12.429 G_Regularizer: 0.000 validation_error: 21.037 +(epoch: 47, iters: 221408, time: 0.539, data: 0.000) G_L1: 15.954 G_L1_ABSOLUTE: 2.395 G_L1_RELATIVE: 13.559 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 47, iters: 223408, time: 0.531, data: 0.000) G_L1: 13.023 G_L1_ABSOLUTE: 2.195 G_L1_RELATIVE: 10.828 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 47, iters: 225408, time: 0.556, data: 0.000) G_L1: 15.072 G_L1_ABSOLUTE: 2.780 G_L1_RELATIVE: 12.292 G_Regularizer: 0.000 validation_error: 21.050 +(epoch: 47, iters: 227408, time: 0.538, data: 0.000) G_L1: 13.501 G_L1_ABSOLUTE: 2.213 G_L1_RELATIVE: 11.288 G_Regularizer: 0.000 validation_error: 20.796 +(epoch: 47, iters: 229408, time: 0.545, data: 0.000) G_L1: 14.215 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 11.826 G_Regularizer: 0.000 validation_error: 21.189 +(epoch: 47, iters: 231408, time: 0.537, data: 0.000) G_L1: 13.624 G_L1_ABSOLUTE: 2.542 G_L1_RELATIVE: 11.082 G_Regularizer: 0.000 validation_error: 20.709 +(epoch: 47, iters: 233408, time: 0.534, data: 0.000) G_L1: 14.455 G_L1_ABSOLUTE: 2.457 G_L1_RELATIVE: 11.997 G_Regularizer: 0.000 validation_error: 21.004 +(epoch: 47, iters: 235408, time: 0.540, data: 0.000) G_L1: 15.438 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 13.029 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 47, iters: 237408, time: 0.546, data: 0.000) G_L1: 23.324 G_L1_ABSOLUTE: 2.531 G_L1_RELATIVE: 20.792 G_Regularizer: 0.000 validation_error: 20.930 +(epoch: 47, iters: 239408, time: 0.547, data: 0.000) G_L1: 15.218 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 12.554 G_Regularizer: 0.000 validation_error: 20.713 +(epoch: 47, iters: 241408, time: 0.559, data: 0.000) G_L1: 14.062 G_L1_ABSOLUTE: 1.943 G_L1_RELATIVE: 12.118 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 47, iters: 243408, time: 0.536, data: 0.000) G_L1: 16.495 G_L1_ABSOLUTE: 2.600 G_L1_RELATIVE: 13.894 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 47, iters: 245408, time: 0.554, data: 0.001) G_L1: 12.721 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 10.184 G_Regularizer: 0.000 validation_error: 21.130 +(epoch: 47, iters: 247408, time: 0.558, data: 0.000) G_L1: 13.595 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 11.311 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 47, iters: 249408, time: 0.559, data: 0.000) G_L1: 15.289 G_L1_ABSOLUTE: 2.756 G_L1_RELATIVE: 12.533 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 47, iters: 251408, time: 0.551, data: 0.000) G_L1: 12.865 G_L1_ABSOLUTE: 2.209 G_L1_RELATIVE: 10.656 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 47, iters: 253408, time: 0.555, data: 0.000) G_L1: 18.581 G_L1_ABSOLUTE: 2.139 G_L1_RELATIVE: 16.442 G_Regularizer: 0.000 validation_error: 20.783 +(epoch: 47, iters: 255408, time: 0.539, data: 0.000) G_L1: 13.438 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 10.832 G_Regularizer: 0.000 validation_error: 20.835 +(epoch: 47, iters: 257408, time: 0.552, data: 0.000) G_L1: 14.660 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 11.900 G_Regularizer: 0.000 validation_error: 20.613 +(epoch: 47, iters: 259408, time: 0.548, data: 0.000) G_L1: 11.916 G_L1_ABSOLUTE: 2.167 G_L1_RELATIVE: 9.748 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 47, iters: 261408, time: 0.556, data: 0.000) G_L1: 13.628 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 10.924 G_Regularizer: 0.000 validation_error: 20.659 +(epoch: 47, iters: 263408, time: 0.554, data: 0.000) G_L1: 14.608 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 12.310 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 47, iters: 265408, time: 0.544, data: 0.000) G_L1: 15.293 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 12.662 G_Regularizer: 0.000 validation_error: 21.075 +(epoch: 47, iters: 267408, time: 0.554, data: 0.000) G_L1: 14.477 G_L1_ABSOLUTE: 2.585 G_L1_RELATIVE: 11.892 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 47, iters: 269408, time: 0.550, data: 0.000) G_L1: 11.432 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 9.004 G_Regularizer: 0.000 validation_error: 20.715 +(epoch: 47, iters: 271408, time: 0.565, data: 0.000) G_L1: 12.547 G_L1_ABSOLUTE: 2.026 G_L1_RELATIVE: 10.522 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 47, iters: 273408, time: 0.552, data: 0.000) G_L1: 13.879 G_L1_ABSOLUTE: 2.474 G_L1_RELATIVE: 11.405 G_Regularizer: 0.000 validation_error: 21.027 +(epoch: 47, iters: 275408, time: 0.554, data: 0.000) G_L1: 16.228 G_L1_ABSOLUTE: 2.467 G_L1_RELATIVE: 13.761 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 47, iters: 277408, time: 0.543, data: 0.000) G_L1: 16.246 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 13.812 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 47, iters: 279408, time: 0.557, data: 0.000) G_L1: 13.866 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 11.235 G_Regularizer: 0.000 validation_error: 20.581 +(epoch: 47, iters: 281408, time: 0.530, data: 0.000) G_L1: 12.102 G_L1_ABSOLUTE: 2.365 G_L1_RELATIVE: 9.737 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 47, iters: 283408, time: 0.563, data: 0.000) G_L1: 13.211 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 10.727 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 47, iters: 285408, time: 0.541, data: 0.000) G_L1: 15.288 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 13.069 G_Regularizer: 0.000 validation_error: 20.467 +(epoch: 47, iters: 287408, time: 0.553, data: 0.000) G_L1: 12.115 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 9.748 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 47, iters: 289408, time: 0.542, data: 0.000) G_L1: 15.578 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 13.174 G_Regularizer: 0.000 validation_error: 20.720 +(epoch: 47, iters: 291408, time: 0.562, data: 0.000) G_L1: 13.177 G_L1_ABSOLUTE: 2.438 G_L1_RELATIVE: 10.739 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 47, iters: 293408, time: 0.530, data: 0.000) G_L1: 14.891 G_L1_ABSOLUTE: 2.414 G_L1_RELATIVE: 12.477 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 47, iters: 295408, time: 0.559, data: 0.000) G_L1: 15.205 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 12.432 G_Regularizer: 0.000 validation_error: 20.740 +(epoch: 47, iters: 297408, time: 0.557, data: 0.000) G_L1: 13.349 G_L1_ABSOLUTE: 2.425 G_L1_RELATIVE: 10.924 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 47, iters: 299408, time: 0.560, data: 0.000) G_L1: 13.582 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 11.243 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 47, iters: 301408, time: 0.539, data: 0.000) G_L1: 15.124 G_L1_ABSOLUTE: 2.984 G_L1_RELATIVE: 12.140 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 48, iters: 656, time: 0.542, data: 0.000) G_L1: 13.665 G_L1_ABSOLUTE: 2.657 G_L1_RELATIVE: 11.008 G_Regularizer: 0.000 validation_error: 21.029 +(epoch: 48, iters: 2656, time: 0.552, data: 0.000) G_L1: 14.486 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 11.891 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 48, iters: 4656, time: 0.535, data: 0.000) G_L1: 12.929 G_L1_ABSOLUTE: 2.516 G_L1_RELATIVE: 10.413 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 48, iters: 6656, time: 0.539, data: 0.000) G_L1: 14.075 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 11.523 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 48, iters: 8656, time: 0.552, data: 0.000) G_L1: 15.876 G_L1_ABSOLUTE: 2.564 G_L1_RELATIVE: 13.312 G_Regularizer: 0.000 validation_error: 21.023 +(epoch: 48, iters: 10656, time: 0.569, data: 0.000) G_L1: 14.472 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 11.966 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 48, iters: 12656, time: 0.535, data: 0.000) G_L1: 13.844 G_L1_ABSOLUTE: 3.055 G_L1_RELATIVE: 10.790 G_Regularizer: 0.000 validation_error: 20.643 +(epoch: 48, iters: 14656, time: 0.536, data: 0.000) G_L1: 14.081 G_L1_ABSOLUTE: 3.025 G_L1_RELATIVE: 11.057 G_Regularizer: 0.000 validation_error: 21.133 +(epoch: 48, iters: 16656, time: 0.536, data: 0.000) G_L1: 13.959 G_L1_ABSOLUTE: 2.289 G_L1_RELATIVE: 11.670 G_Regularizer: 0.000 validation_error: 20.814 +(epoch: 48, iters: 18656, time: 0.540, data: 0.000) G_L1: 16.811 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 14.233 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 48, iters: 20656, time: 0.541, data: 0.000) G_L1: 12.896 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 10.556 G_Regularizer: 0.000 validation_error: 21.105 +(epoch: 48, iters: 22656, time: 0.543, data: 0.000) G_L1: 12.357 G_L1_ABSOLUTE: 2.450 G_L1_RELATIVE: 9.907 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 48, iters: 24656, time: 0.548, data: 0.001) G_L1: 12.884 G_L1_ABSOLUTE: 2.435 G_L1_RELATIVE: 10.449 G_Regularizer: 0.000 validation_error: 20.656 +(epoch: 48, iters: 26656, time: 0.556, data: 0.001) G_L1: 14.310 G_L1_ABSOLUTE: 2.870 G_L1_RELATIVE: 11.440 G_Regularizer: 0.000 validation_error: 21.019 +(epoch: 48, iters: 28656, time: 0.550, data: 0.000) G_L1: 16.712 G_L1_ABSOLUTE: 2.623 G_L1_RELATIVE: 14.089 G_Regularizer: 0.000 validation_error: 20.851 +(epoch: 48, iters: 30656, time: 0.534, data: 0.000) G_L1: 15.419 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 12.686 G_Regularizer: 0.000 validation_error: 20.655 +(epoch: 48, iters: 32656, time: 0.528, data: 0.000) G_L1: 16.417 G_L1_ABSOLUTE: 2.491 G_L1_RELATIVE: 13.926 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 48, iters: 34656, time: 0.565, data: 0.000) G_L1: 15.450 G_L1_ABSOLUTE: 2.731 G_L1_RELATIVE: 12.720 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 48, iters: 36656, time: 0.541, data: 0.000) G_L1: 15.602 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 13.224 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 48, iters: 38656, time: 0.532, data: 0.000) G_L1: 14.467 G_L1_ABSOLUTE: 2.750 G_L1_RELATIVE: 11.717 G_Regularizer: 0.000 validation_error: 21.106 +(epoch: 48, iters: 40656, time: 0.539, data: 0.000) G_L1: 14.569 G_L1_ABSOLUTE: 2.705 G_L1_RELATIVE: 11.864 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 48, iters: 42656, time: 0.559, data: 0.000) G_L1: 15.759 G_L1_ABSOLUTE: 2.733 G_L1_RELATIVE: 13.026 G_Regularizer: 0.000 validation_error: 20.866 +(epoch: 48, iters: 44656, time: 0.561, data: 0.000) G_L1: 14.941 G_L1_ABSOLUTE: 3.087 G_L1_RELATIVE: 11.854 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 48, iters: 46656, time: 0.548, data: 0.001) G_L1: 15.772 G_L1_ABSOLUTE: 2.622 G_L1_RELATIVE: 13.150 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 48, iters: 48656, time: 0.537, data: 0.000) G_L1: 13.331 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 10.576 G_Regularizer: 0.000 validation_error: 20.769 +(epoch: 48, iters: 50656, time: 0.533, data: 0.001) G_L1: 12.280 G_L1_ABSOLUTE: 2.521 G_L1_RELATIVE: 9.759 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 48, iters: 52656, time: 0.545, data: 0.000) G_L1: 12.602 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 10.240 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 48, iters: 54656, time: 0.549, data: 0.000) G_L1: 14.524 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 11.964 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 48, iters: 56656, time: 0.535, data: 0.000) G_L1: 13.512 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 11.108 G_Regularizer: 0.000 validation_error: 21.071 +(epoch: 48, iters: 58656, time: 0.535, data: 0.000) G_L1: 11.933 G_L1_ABSOLUTE: 2.247 G_L1_RELATIVE: 9.686 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 48, iters: 60656, time: 0.553, data: 0.000) G_L1: 12.891 G_L1_ABSOLUTE: 3.088 G_L1_RELATIVE: 9.803 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 48, iters: 62656, time: 0.559, data: 0.000) G_L1: 13.460 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 11.106 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 48, iters: 64656, time: 0.557, data: 0.000) G_L1: 15.884 G_L1_ABSOLUTE: 2.293 G_L1_RELATIVE: 13.591 G_Regularizer: 0.000 validation_error: 21.039 +(epoch: 48, iters: 66656, time: 0.527, data: 0.000) G_L1: 12.567 G_L1_ABSOLUTE: 2.310 G_L1_RELATIVE: 10.258 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 48, iters: 68656, time: 0.557, data: 0.000) G_L1: 13.751 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 11.306 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 48, iters: 70656, time: 0.556, data: 0.000) G_L1: 14.940 G_L1_ABSOLUTE: 2.364 G_L1_RELATIVE: 12.576 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 48, iters: 72656, time: 0.542, data: 0.000) G_L1: 15.313 G_L1_ABSOLUTE: 2.768 G_L1_RELATIVE: 12.545 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 48, iters: 74656, time: 0.554, data: 0.000) G_L1: 13.672 G_L1_ABSOLUTE: 2.387 G_L1_RELATIVE: 11.284 G_Regularizer: 0.000 validation_error: 20.764 +(epoch: 48, iters: 76656, time: 0.556, data: 0.000) G_L1: 16.601 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 14.203 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 48, iters: 78656, time: 0.558, data: 0.000) G_L1: 14.090 G_L1_ABSOLUTE: 2.160 G_L1_RELATIVE: 11.930 G_Regularizer: 0.000 validation_error: 20.861 +(epoch: 48, iters: 80656, time: 0.543, data: 0.000) G_L1: 12.661 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 10.283 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 48, iters: 82656, time: 0.535, data: 0.000) G_L1: 10.948 G_L1_ABSOLUTE: 2.080 G_L1_RELATIVE: 8.868 G_Regularizer: 0.000 validation_error: 20.862 +(epoch: 48, iters: 84656, time: 0.550, data: 0.000) G_L1: 12.662 G_L1_ABSOLUTE: 2.551 G_L1_RELATIVE: 10.111 G_Regularizer: 0.000 validation_error: 20.923 +(epoch: 48, iters: 86656, time: 0.561, data: 0.000) G_L1: 13.155 G_L1_ABSOLUTE: 2.261 G_L1_RELATIVE: 10.894 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 48, iters: 88656, time: 0.554, data: 0.000) G_L1: 14.460 G_L1_ABSOLUTE: 2.195 G_L1_RELATIVE: 12.265 G_Regularizer: 0.000 validation_error: 21.046 +(epoch: 48, iters: 90656, time: 0.537, data: 0.000) G_L1: 12.292 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 9.597 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 48, iters: 92656, time: 0.561, data: 0.000) G_L1: 14.466 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 11.946 G_Regularizer: 0.000 validation_error: 20.912 +(epoch: 48, iters: 94656, time: 0.552, data: 0.000) G_L1: 12.726 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 10.201 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 48, iters: 96656, time: 0.561, data: 0.000) G_L1: 17.071 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 14.635 G_Regularizer: 0.000 validation_error: 20.671 +(epoch: 48, iters: 98656, time: 0.536, data: 0.000) G_L1: 12.858 G_L1_ABSOLUTE: 2.062 G_L1_RELATIVE: 10.796 G_Regularizer: 0.000 validation_error: 21.102 +(epoch: 48, iters: 100656, time: 0.559, data: 0.000) G_L1: 13.393 G_L1_ABSOLUTE: 2.279 G_L1_RELATIVE: 11.114 G_Regularizer: 0.000 validation_error: 20.810 +(epoch: 48, iters: 102656, time: 0.560, data: 0.000) G_L1: 12.866 G_L1_ABSOLUTE: 2.519 G_L1_RELATIVE: 10.347 G_Regularizer: 0.000 validation_error: 20.816 +(epoch: 48, iters: 104656, time: 0.559, data: 0.000) G_L1: 13.026 G_L1_ABSOLUTE: 2.167 G_L1_RELATIVE: 10.859 G_Regularizer: 0.000 validation_error: 20.924 +(epoch: 48, iters: 106656, time: 0.542, data: 0.001) G_L1: 14.272 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 11.921 G_Regularizer: 0.000 validation_error: 20.760 +(epoch: 48, iters: 108656, time: 0.555, data: 0.000) G_L1: 16.128 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 13.580 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 48, iters: 110656, time: 0.552, data: 0.001) G_L1: 12.689 G_L1_ABSOLUTE: 2.464 G_L1_RELATIVE: 10.226 G_Regularizer: 0.000 validation_error: 20.692 +(epoch: 48, iters: 112656, time: 0.563, data: 0.000) G_L1: 13.948 G_L1_ABSOLUTE: 2.439 G_L1_RELATIVE: 11.508 G_Regularizer: 0.000 validation_error: 21.054 +(epoch: 48, iters: 114656, time: 0.544, data: 0.000) G_L1: 13.888 G_L1_ABSOLUTE: 2.451 G_L1_RELATIVE: 11.437 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 48, iters: 116656, time: 0.563, data: 0.000) G_L1: 14.214 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 11.564 G_Regularizer: 0.000 validation_error: 20.702 +(epoch: 48, iters: 118656, time: 0.552, data: 0.000) G_L1: 19.357 G_L1_ABSOLUTE: 3.049 G_L1_RELATIVE: 16.308 G_Regularizer: 0.000 validation_error: 20.756 +(epoch: 48, iters: 120656, time: 0.555, data: 0.000) G_L1: 14.225 G_L1_ABSOLUTE: 2.503 G_L1_RELATIVE: 11.723 G_Regularizer: 0.000 validation_error: 20.804 +(epoch: 48, iters: 122656, time: 0.545, data: 0.000) G_L1: 14.400 G_L1_ABSOLUTE: 2.562 G_L1_RELATIVE: 11.838 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 48, iters: 124656, time: 0.558, data: 0.000) G_L1: 14.816 G_L1_ABSOLUTE: 2.590 G_L1_RELATIVE: 12.226 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 48, iters: 126656, time: 0.559, data: 0.000) G_L1: 13.514 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 11.010 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 48, iters: 128656, time: 0.557, data: 0.000) G_L1: 13.873 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 11.466 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 48, iters: 130656, time: 0.545, data: 0.000) G_L1: 15.862 G_L1_ABSOLUTE: 3.140 G_L1_RELATIVE: 12.723 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 48, iters: 132656, time: 0.527, data: 0.000) G_L1: 13.523 G_L1_ABSOLUTE: 2.213 G_L1_RELATIVE: 11.309 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 48, iters: 134656, time: 0.541, data: 0.000) G_L1: 14.828 G_L1_ABSOLUTE: 2.696 G_L1_RELATIVE: 12.132 G_Regularizer: 0.000 validation_error: 20.912 +(epoch: 48, iters: 136656, time: 0.531, data: 0.000) G_L1: 14.793 G_L1_ABSOLUTE: 2.704 G_L1_RELATIVE: 12.090 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 48, iters: 138656, time: 0.543, data: 0.000) G_L1: 14.514 G_L1_ABSOLUTE: 2.752 G_L1_RELATIVE: 11.762 G_Regularizer: 0.000 validation_error: 20.888 +(epoch: 48, iters: 140656, time: 0.539, data: 0.000) G_L1: 15.677 G_L1_ABSOLUTE: 2.502 G_L1_RELATIVE: 13.174 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 48, iters: 142656, time: 0.535, data: 0.000) G_L1: 13.754 G_L1_ABSOLUTE: 2.227 G_L1_RELATIVE: 11.527 G_Regularizer: 0.000 validation_error: 20.588 +(epoch: 48, iters: 144656, time: 0.555, data: 0.000) G_L1: 26.933 G_L1_ABSOLUTE: 2.548 G_L1_RELATIVE: 24.384 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 48, iters: 146656, time: 0.560, data: 0.002) G_L1: 12.350 G_L1_ABSOLUTE: 2.301 G_L1_RELATIVE: 10.049 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 48, iters: 148656, time: 0.541, data: 0.000) G_L1: 14.605 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 12.146 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 48, iters: 150656, time: 0.531, data: 0.000) G_L1: 14.682 G_L1_ABSOLUTE: 2.147 G_L1_RELATIVE: 12.535 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 48, iters: 152656, time: 0.541, data: 0.000) G_L1: 14.205 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 11.683 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 48, iters: 154656, time: 0.551, data: 0.000) G_L1: 16.240 G_L1_ABSOLUTE: 2.276 G_L1_RELATIVE: 13.964 G_Regularizer: 0.000 validation_error: 21.048 +(epoch: 48, iters: 156656, time: 0.547, data: 0.000) G_L1: 27.442 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 24.802 G_Regularizer: 0.000 validation_error: 21.055 +(epoch: 48, iters: 158656, time: 0.541, data: 0.000) G_L1: 15.127 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 12.622 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 48, iters: 160656, time: 0.543, data: 0.001) G_L1: 14.967 G_L1_ABSOLUTE: 2.796 G_L1_RELATIVE: 12.171 G_Regularizer: 0.000 validation_error: 20.912 +(epoch: 48, iters: 162656, time: 0.558, data: 0.000) G_L1: 14.070 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 11.717 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 48, iters: 164656, time: 0.552, data: 0.001) G_L1: 13.150 G_L1_ABSOLUTE: 2.334 G_L1_RELATIVE: 10.817 G_Regularizer: 0.000 validation_error: 21.126 +(epoch: 48, iters: 166656, time: 0.554, data: 0.000) G_L1: 14.276 G_L1_ABSOLUTE: 2.315 G_L1_RELATIVE: 11.961 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 48, iters: 168656, time: 0.546, data: 0.000) G_L1: 12.856 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 10.372 G_Regularizer: 0.000 validation_error: 20.977 +(epoch: 48, iters: 170656, time: 0.556, data: 0.000) G_L1: 12.938 G_L1_ABSOLUTE: 2.144 G_L1_RELATIVE: 10.794 G_Regularizer: 0.000 validation_error: 20.742 +(epoch: 48, iters: 172656, time: 0.553, data: 0.000) G_L1: 13.878 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 11.492 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 48, iters: 174656, time: 0.543, data: 0.000) G_L1: 12.340 G_L1_ABSOLUTE: 2.185 G_L1_RELATIVE: 10.155 G_Regularizer: 0.000 validation_error: 20.879 +(epoch: 48, iters: 176656, time: 0.545, data: 0.001) G_L1: 17.896 G_L1_ABSOLUTE: 2.619 G_L1_RELATIVE: 15.277 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 48, iters: 178656, time: 0.547, data: 0.000) G_L1: 15.512 G_L1_ABSOLUTE: 2.747 G_L1_RELATIVE: 12.765 G_Regularizer: 0.000 validation_error: 21.044 +(epoch: 48, iters: 180656, time: 0.538, data: 0.000) G_L1: 14.918 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 12.486 G_Regularizer: 0.000 validation_error: 21.058 +(epoch: 48, iters: 182656, time: 0.546, data: 0.000) G_L1: 14.545 G_L1_ABSOLUTE: 2.064 G_L1_RELATIVE: 12.481 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 48, iters: 184656, time: 0.557, data: 0.000) G_L1: 13.449 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 10.715 G_Regularizer: 0.000 validation_error: 20.840 +(epoch: 48, iters: 186656, time: 0.554, data: 0.000) G_L1: 15.108 G_L1_ABSOLUTE: 2.768 G_L1_RELATIVE: 12.340 G_Regularizer: 0.000 validation_error: 20.807 +(epoch: 48, iters: 188656, time: 0.558, data: 0.000) G_L1: 12.296 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 9.785 G_Regularizer: 0.000 validation_error: 20.850 +(epoch: 48, iters: 190656, time: 0.527, data: 0.000) G_L1: 13.134 G_L1_ABSOLUTE: 2.713 G_L1_RELATIVE: 10.421 G_Regularizer: 0.000 validation_error: 21.120 +(epoch: 48, iters: 192656, time: 0.563, data: 0.000) G_L1: 14.674 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 12.306 G_Regularizer: 0.000 validation_error: 20.787 +(epoch: 48, iters: 194656, time: 0.553, data: 0.000) G_L1: 13.659 G_L1_ABSOLUTE: 2.386 G_L1_RELATIVE: 11.274 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 48, iters: 196656, time: 0.556, data: 0.000) G_L1: 12.328 G_L1_ABSOLUTE: 2.307 G_L1_RELATIVE: 10.022 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 48, iters: 198656, time: 0.548, data: 0.000) G_L1: 14.231 G_L1_ABSOLUTE: 2.407 G_L1_RELATIVE: 11.824 G_Regularizer: 0.000 validation_error: 21.116 +(epoch: 48, iters: 200656, time: 0.533, data: 0.000) G_L1: 13.626 G_L1_ABSOLUTE: 2.572 G_L1_RELATIVE: 11.054 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 48, iters: 202656, time: 0.539, data: 0.000) G_L1: 11.540 G_L1_ABSOLUTE: 2.254 G_L1_RELATIVE: 9.286 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 48, iters: 204656, time: 0.554, data: 0.000) G_L1: 12.710 G_L1_ABSOLUTE: 2.693 G_L1_RELATIVE: 10.018 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 48, iters: 206656, time: 0.559, data: 0.000) G_L1: 12.429 G_L1_ABSOLUTE: 2.117 G_L1_RELATIVE: 10.312 G_Regularizer: 0.000 validation_error: 20.639 +(epoch: 48, iters: 208656, time: 0.531, data: 0.000) G_L1: 13.704 G_L1_ABSOLUTE: 2.239 G_L1_RELATIVE: 11.465 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 48, iters: 210656, time: 0.562, data: 0.000) G_L1: 13.644 G_L1_ABSOLUTE: 3.132 G_L1_RELATIVE: 10.512 G_Regularizer: 0.000 validation_error: 20.991 +(epoch: 48, iters: 212656, time: 0.551, data: 0.001) G_L1: 12.194 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 9.731 G_Regularizer: 0.000 validation_error: 21.077 +(epoch: 48, iters: 214656, time: 0.547, data: 0.001) G_L1: 16.337 G_L1_ABSOLUTE: 1.998 G_L1_RELATIVE: 14.339 G_Regularizer: 0.000 validation_error: 21.080 +(epoch: 48, iters: 216656, time: 0.546, data: 0.000) G_L1: 14.736 G_L1_ABSOLUTE: 2.561 G_L1_RELATIVE: 12.175 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 48, iters: 218656, time: 0.534, data: 0.000) G_L1: 14.381 G_L1_ABSOLUTE: 2.479 G_L1_RELATIVE: 11.902 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 48, iters: 220656, time: 0.542, data: 0.000) G_L1: 15.921 G_L1_ABSOLUTE: 2.745 G_L1_RELATIVE: 13.175 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 48, iters: 222656, time: 0.562, data: 0.000) G_L1: 11.972 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 9.390 G_Regularizer: 0.000 validation_error: 20.785 +(epoch: 48, iters: 224656, time: 0.534, data: 0.000) G_L1: 13.156 G_L1_ABSOLUTE: 2.270 G_L1_RELATIVE: 10.886 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 48, iters: 226656, time: 0.553, data: 0.000) G_L1: 13.792 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 11.437 G_Regularizer: 0.000 validation_error: 21.020 +(epoch: 48, iters: 228656, time: 0.561, data: 0.000) G_L1: 13.376 G_L1_ABSOLUTE: 2.856 G_L1_RELATIVE: 10.520 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 48, iters: 230656, time: 0.557, data: 0.000) G_L1: 14.795 G_L1_ABSOLUTE: 2.273 G_L1_RELATIVE: 12.522 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 48, iters: 232656, time: 0.542, data: 0.000) G_L1: 13.533 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 11.106 G_Regularizer: 0.000 validation_error: 20.565 +(epoch: 48, iters: 234656, time: 0.553, data: 0.000) G_L1: 13.277 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 10.676 G_Regularizer: 0.000 validation_error: 21.014 +(epoch: 48, iters: 236656, time: 0.554, data: 0.000) G_L1: 11.555 G_L1_ABSOLUTE: 2.211 G_L1_RELATIVE: 9.344 G_Regularizer: 0.000 validation_error: 20.890 +(epoch: 48, iters: 238656, time: 0.544, data: 0.001) G_L1: 13.956 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 11.524 G_Regularizer: 0.000 validation_error: 20.782 +(epoch: 48, iters: 240656, time: 0.560, data: 0.000) G_L1: 13.908 G_L1_ABSOLUTE: 2.587 G_L1_RELATIVE: 11.321 G_Regularizer: 0.000 validation_error: 20.868 +(epoch: 48, iters: 242656, time: 0.551, data: 0.000) G_L1: 13.177 G_L1_ABSOLUTE: 2.223 G_L1_RELATIVE: 10.953 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 48, iters: 244656, time: 0.566, data: 0.000) G_L1: 14.607 G_L1_ABSOLUTE: 2.730 G_L1_RELATIVE: 11.878 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 48, iters: 246656, time: 0.558, data: 0.000) G_L1: 13.182 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 10.842 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 48, iters: 248656, time: 0.547, data: 0.000) G_L1: 14.058 G_L1_ABSOLUTE: 2.498 G_L1_RELATIVE: 11.561 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 48, iters: 250656, time: 0.548, data: 0.000) G_L1: 13.218 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.712 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 48, iters: 252656, time: 0.544, data: 0.000) G_L1: 12.762 G_L1_ABSOLUTE: 1.940 G_L1_RELATIVE: 10.822 G_Regularizer: 0.000 validation_error: 20.790 +(epoch: 48, iters: 254656, time: 0.572, data: 0.000) G_L1: 13.513 G_L1_ABSOLUTE: 2.268 G_L1_RELATIVE: 11.245 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 48, iters: 256656, time: 0.545, data: 0.000) G_L1: 13.616 G_L1_ABSOLUTE: 2.129 G_L1_RELATIVE: 11.487 G_Regularizer: 0.000 validation_error: 20.922 +(epoch: 48, iters: 258656, time: 0.541, data: 0.000) G_L1: 14.539 G_L1_ABSOLUTE: 2.885 G_L1_RELATIVE: 11.654 G_Regularizer: 0.000 validation_error: 20.844 +(epoch: 48, iters: 260656, time: 0.543, data: 0.000) G_L1: 15.157 G_L1_ABSOLUTE: 2.721 G_L1_RELATIVE: 12.435 G_Regularizer: 0.000 validation_error: 20.858 +(epoch: 48, iters: 262656, time: 0.544, data: 0.000) G_L1: 14.287 G_L1_ABSOLUTE: 2.591 G_L1_RELATIVE: 11.696 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 48, iters: 264656, time: 0.550, data: 0.000) G_L1: 11.775 G_L1_ABSOLUTE: 2.237 G_L1_RELATIVE: 9.538 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 48, iters: 266656, time: 0.557, data: 0.000) G_L1: 13.859 G_L1_ABSOLUTE: 2.338 G_L1_RELATIVE: 11.521 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 48, iters: 268656, time: 0.550, data: 0.000) G_L1: 12.632 G_L1_ABSOLUTE: 2.640 G_L1_RELATIVE: 9.992 G_Regularizer: 0.000 validation_error: 20.861 +(epoch: 48, iters: 270656, time: 0.555, data: 0.000) G_L1: 14.029 G_L1_ABSOLUTE: 3.007 G_L1_RELATIVE: 11.022 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 48, iters: 272656, time: 0.552, data: 0.000) G_L1: 14.219 G_L1_ABSOLUTE: 2.298 G_L1_RELATIVE: 11.921 G_Regularizer: 0.000 validation_error: 20.767 +(epoch: 48, iters: 274656, time: 0.553, data: 0.000) G_L1: 14.481 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 12.111 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 48, iters: 276656, time: 0.534, data: 0.000) G_L1: 16.469 G_L1_ABSOLUTE: 2.754 G_L1_RELATIVE: 13.715 G_Regularizer: 0.000 validation_error: 20.786 +(epoch: 48, iters: 278656, time: 0.537, data: 0.001) G_L1: 15.058 G_L1_ABSOLUTE: 2.736 G_L1_RELATIVE: 12.323 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 48, iters: 280656, time: 0.558, data: 0.000) G_L1: 13.037 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 10.589 G_Regularizer: 0.000 validation_error: 20.875 +(epoch: 48, iters: 282656, time: 0.560, data: 0.000) G_L1: 15.770 G_L1_ABSOLUTE: 2.686 G_L1_RELATIVE: 13.084 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 48, iters: 284656, time: 0.532, data: 0.000) G_L1: 15.240 G_L1_ABSOLUTE: 2.153 G_L1_RELATIVE: 13.088 G_Regularizer: 0.000 validation_error: 20.707 +(epoch: 48, iters: 286656, time: 0.559, data: 0.001) G_L1: 13.817 G_L1_ABSOLUTE: 2.637 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 20.802 +(epoch: 48, iters: 288656, time: 0.554, data: 0.000) G_L1: 12.126 G_L1_ABSOLUTE: 2.630 G_L1_RELATIVE: 9.496 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 48, iters: 290656, time: 0.538, data: 0.000) G_L1: 15.244 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 12.666 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 48, iters: 292656, time: 0.548, data: 0.000) G_L1: 11.808 G_L1_ABSOLUTE: 2.449 G_L1_RELATIVE: 9.359 G_Regularizer: 0.000 validation_error: 20.830 +(epoch: 48, iters: 294656, time: 0.532, data: 0.000) G_L1: 11.594 G_L1_ABSOLUTE: 2.230 G_L1_RELATIVE: 9.364 G_Regularizer: 0.000 validation_error: 21.083 +(epoch: 48, iters: 296656, time: 0.564, data: 0.000) G_L1: 13.473 G_L1_ABSOLUTE: 2.354 G_L1_RELATIVE: 11.119 G_Regularizer: 0.000 validation_error: 20.893 +(epoch: 48, iters: 298656, time: 0.534, data: 0.001) G_L1: 13.508 G_L1_ABSOLUTE: 2.148 G_L1_RELATIVE: 11.360 G_Regularizer: 0.000 validation_error: 20.833 +(epoch: 48, iters: 300656, time: 0.569, data: 0.000) G_L1: 12.742 G_L1_ABSOLUTE: 2.152 G_L1_RELATIVE: 10.590 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 48, iters: 302656, time: 0.544, data: 0.001) G_L1: 16.292 G_L1_ABSOLUTE: 2.617 G_L1_RELATIVE: 13.675 G_Regularizer: 0.000 validation_error: 20.605 +(epoch: 49, iters: 1904, time: 0.549, data: 0.000) G_L1: 13.613 G_L1_ABSOLUTE: 2.340 G_L1_RELATIVE: 11.273 G_Regularizer: 0.000 validation_error: 20.808 +(epoch: 49, iters: 3904, time: 0.553, data: 0.000) G_L1: 12.797 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 10.365 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 49, iters: 5904, time: 0.561, data: 0.000) G_L1: 13.107 G_L1_ABSOLUTE: 2.183 G_L1_RELATIVE: 10.924 G_Regularizer: 0.000 validation_error: 20.932 +(epoch: 49, iters: 7904, time: 0.537, data: 0.000) G_L1: 14.259 G_L1_ABSOLUTE: 2.506 G_L1_RELATIVE: 11.753 G_Regularizer: 0.000 validation_error: 20.822 +(epoch: 49, iters: 9904, time: 0.557, data: 0.000) G_L1: 17.067 G_L1_ABSOLUTE: 2.274 G_L1_RELATIVE: 14.793 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 49, iters: 11904, time: 0.546, data: 0.000) G_L1: 12.834 G_L1_ABSOLUTE: 2.147 G_L1_RELATIVE: 10.687 G_Regularizer: 0.000 validation_error: 21.168 +(epoch: 49, iters: 13904, time: 0.541, data: 0.000) G_L1: 17.336 G_L1_ABSOLUTE: 2.488 G_L1_RELATIVE: 14.848 G_Regularizer: 0.000 validation_error: 20.745 +(epoch: 49, iters: 15904, time: 0.534, data: 0.000) G_L1: 14.037 G_L1_ABSOLUTE: 2.773 G_L1_RELATIVE: 11.265 G_Regularizer: 0.000 validation_error: 20.829 +(epoch: 49, iters: 17904, time: 0.549, data: 0.000) G_L1: 17.021 G_L1_ABSOLUTE: 2.538 G_L1_RELATIVE: 14.482 G_Regularizer: 0.000 validation_error: 20.892 +(epoch: 49, iters: 19904, time: 0.546, data: 0.000) G_L1: 13.703 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 11.181 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 49, iters: 21904, time: 0.559, data: 0.000) G_L1: 13.633 G_L1_ABSOLUTE: 2.981 G_L1_RELATIVE: 10.652 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 49, iters: 23904, time: 0.537, data: 0.001) G_L1: 15.637 G_L1_ABSOLUTE: 2.367 G_L1_RELATIVE: 13.271 G_Regularizer: 0.000 validation_error: 20.751 +(epoch: 49, iters: 25904, time: 0.530, data: 0.000) G_L1: 14.873 G_L1_ABSOLUTE: 2.552 G_L1_RELATIVE: 12.321 G_Regularizer: 0.000 validation_error: 20.749 +(epoch: 49, iters: 27904, time: 0.550, data: 0.000) G_L1: 12.277 G_L1_ABSOLUTE: 2.203 G_L1_RELATIVE: 10.074 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 49, iters: 29904, time: 0.552, data: 0.000) G_L1: 14.633 G_L1_ABSOLUTE: 2.525 G_L1_RELATIVE: 12.108 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 49, iters: 31904, time: 0.550, data: 0.000) G_L1: 15.170 G_L1_ABSOLUTE: 2.663 G_L1_RELATIVE: 12.507 G_Regularizer: 0.000 validation_error: 20.834 +(epoch: 49, iters: 33904, time: 0.535, data: 0.000) G_L1: 14.249 G_L1_ABSOLUTE: 2.712 G_L1_RELATIVE: 11.537 G_Regularizer: 0.000 validation_error: 21.047 +(epoch: 49, iters: 35904, time: 0.557, data: 0.000) G_L1: 13.938 G_L1_ABSOLUTE: 2.048 G_L1_RELATIVE: 11.890 G_Regularizer: 0.000 validation_error: 20.600 +(epoch: 49, iters: 37904, time: 0.549, data: 0.000) G_L1: 13.543 G_L1_ABSOLUTE: 2.101 G_L1_RELATIVE: 11.442 G_Regularizer: 0.000 validation_error: 20.750 +(epoch: 49, iters: 39904, time: 0.550, data: 0.000) G_L1: 13.573 G_L1_ABSOLUTE: 2.123 G_L1_RELATIVE: 11.450 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 49, iters: 41904, time: 0.534, data: 0.000) G_L1: 16.738 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 14.234 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 49, iters: 43904, time: 0.558, data: 0.000) G_L1: 15.040 G_L1_ABSOLUTE: 2.318 G_L1_RELATIVE: 12.722 G_Regularizer: 0.000 validation_error: 20.880 +(epoch: 49, iters: 45904, time: 0.537, data: 0.000) G_L1: 14.812 G_L1_ABSOLUTE: 2.679 G_L1_RELATIVE: 12.133 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 49, iters: 47904, time: 0.569, data: 0.000) G_L1: 13.980 G_L1_ABSOLUTE: 2.302 G_L1_RELATIVE: 11.677 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 49, iters: 49904, time: 0.540, data: 0.000) G_L1: 12.633 G_L1_ABSOLUTE: 2.228 G_L1_RELATIVE: 10.405 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 49, iters: 51904, time: 0.559, data: 0.000) G_L1: 12.430 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 10.052 G_Regularizer: 0.000 validation_error: 20.739 +(epoch: 49, iters: 53904, time: 0.547, data: 0.000) G_L1: 13.977 G_L1_ABSOLUTE: 2.681 G_L1_RELATIVE: 11.296 G_Regularizer: 0.000 validation_error: 20.793 +(epoch: 49, iters: 55904, time: 0.546, data: 0.001) G_L1: 11.549 G_L1_ABSOLUTE: 2.475 G_L1_RELATIVE: 9.074 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 49, iters: 57904, time: 0.546, data: 0.001) G_L1: 12.847 G_L1_ABSOLUTE: 2.784 G_L1_RELATIVE: 10.063 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 49, iters: 59904, time: 0.558, data: 0.000) G_L1: 13.375 G_L1_ABSOLUTE: 2.131 G_L1_RELATIVE: 11.243 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 49, iters: 61904, time: 0.553, data: 0.000) G_L1: 13.330 G_L1_ABSOLUTE: 2.125 G_L1_RELATIVE: 11.205 G_Regularizer: 0.000 validation_error: 20.809 +(epoch: 49, iters: 63904, time: 0.554, data: 0.000) G_L1: 11.689 G_L1_ABSOLUTE: 2.368 G_L1_RELATIVE: 9.321 G_Regularizer: 0.000 validation_error: 20.964 +(epoch: 49, iters: 65904, time: 0.553, data: 0.000) G_L1: 15.371 G_L1_ABSOLUTE: 2.034 G_L1_RELATIVE: 13.337 G_Regularizer: 0.000 validation_error: 20.920 +(epoch: 49, iters: 67904, time: 0.536, data: 0.000) G_L1: 17.632 G_L1_ABSOLUTE: 3.010 G_L1_RELATIVE: 14.622 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 49, iters: 69904, time: 0.548, data: 0.000) G_L1: 13.731 G_L1_ABSOLUTE: 2.379 G_L1_RELATIVE: 11.352 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 49, iters: 71904, time: 0.557, data: 0.000) G_L1: 15.937 G_L1_ABSOLUTE: 2.234 G_L1_RELATIVE: 13.702 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 49, iters: 73904, time: 0.560, data: 0.000) G_L1: 11.998 G_L1_ABSOLUTE: 2.069 G_L1_RELATIVE: 9.929 G_Regularizer: 0.000 validation_error: 20.703 +(epoch: 49, iters: 75904, time: 0.549, data: 0.000) G_L1: 12.983 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 10.778 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 49, iters: 77904, time: 0.535, data: 0.000) G_L1: 14.605 G_L1_ABSOLUTE: 2.624 G_L1_RELATIVE: 11.981 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 49, iters: 79904, time: 0.541, data: 0.000) G_L1: 12.581 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 10.319 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 49, iters: 81904, time: 0.534, data: 0.000) G_L1: 13.376 G_L1_ABSOLUTE: 2.223 G_L1_RELATIVE: 11.153 G_Regularizer: 0.000 validation_error: 21.084 +(epoch: 49, iters: 83904, time: 0.530, data: 0.001) G_L1: 13.694 G_L1_ABSOLUTE: 1.927 G_L1_RELATIVE: 11.767 G_Regularizer: 0.000 validation_error: 20.650 +(epoch: 49, iters: 85904, time: 0.559, data: 0.000) G_L1: 14.352 G_L1_ABSOLUTE: 2.584 G_L1_RELATIVE: 11.767 G_Regularizer: 0.000 validation_error: 20.987 +(epoch: 49, iters: 87904, time: 0.541, data: 0.000) G_L1: 14.997 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.596 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 49, iters: 89904, time: 0.535, data: 0.000) G_L1: 13.576 G_L1_ABSOLUTE: 2.880 G_L1_RELATIVE: 10.696 G_Regularizer: 0.000 validation_error: 20.884 +(epoch: 49, iters: 91904, time: 0.544, data: 0.000) G_L1: 11.584 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 9.002 G_Regularizer: 0.000 validation_error: 20.846 +(epoch: 49, iters: 93904, time: 0.551, data: 0.000) G_L1: 13.397 G_L1_ABSOLUTE: 2.618 G_L1_RELATIVE: 10.778 G_Regularizer: 0.000 validation_error: 20.941 +(epoch: 49, iters: 95904, time: 0.540, data: 0.000) G_L1: 14.491 G_L1_ABSOLUTE: 2.611 G_L1_RELATIVE: 11.880 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 49, iters: 97904, time: 0.558, data: 0.001) G_L1: 15.263 G_L1_ABSOLUTE: 2.938 G_L1_RELATIVE: 12.325 G_Regularizer: 0.000 validation_error: 20.684 +(epoch: 49, iters: 99904, time: 0.552, data: 0.000) G_L1: 13.163 G_L1_ABSOLUTE: 3.027 G_L1_RELATIVE: 10.136 G_Regularizer: 0.000 validation_error: 20.721 +(epoch: 49, iters: 101904, time: 0.538, data: 0.000) G_L1: 10.810 G_L1_ABSOLUTE: 2.130 G_L1_RELATIVE: 8.680 G_Regularizer: 0.000 validation_error: 20.919 +(epoch: 49, iters: 103904, time: 0.542, data: 0.001) G_L1: 17.370 G_L1_ABSOLUTE: 2.861 G_L1_RELATIVE: 14.509 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 49, iters: 105904, time: 0.565, data: 0.001) G_L1: 11.656 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 9.344 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 49, iters: 107904, time: 0.564, data: 0.000) G_L1: 14.769 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 12.167 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 49, iters: 109904, time: 0.545, data: 0.000) G_L1: 12.786 G_L1_ABSOLUTE: 2.234 G_L1_RELATIVE: 10.552 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 49, iters: 111904, time: 0.543, data: 0.000) G_L1: 15.174 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.774 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 49, iters: 113904, time: 0.552, data: 0.000) G_L1: 13.885 G_L1_ABSOLUTE: 2.458 G_L1_RELATIVE: 11.427 G_Regularizer: 0.000 validation_error: 20.963 +(epoch: 49, iters: 115904, time: 0.555, data: 0.000) G_L1: 14.428 G_L1_ABSOLUTE: 2.387 G_L1_RELATIVE: 12.040 G_Regularizer: 0.000 validation_error: 21.016 +(epoch: 49, iters: 117904, time: 0.542, data: 0.001) G_L1: 14.676 G_L1_ABSOLUTE: 2.497 G_L1_RELATIVE: 12.179 G_Regularizer: 0.000 validation_error: 20.996 +(epoch: 49, iters: 119904, time: 0.546, data: 0.000) G_L1: 13.709 G_L1_ABSOLUTE: 2.507 G_L1_RELATIVE: 11.202 G_Regularizer: 0.000 validation_error: 20.841 +(epoch: 49, iters: 121904, time: 0.550, data: 0.000) G_L1: 13.924 G_L1_ABSOLUTE: 2.129 G_L1_RELATIVE: 11.795 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 49, iters: 123904, time: 0.551, data: 0.000) G_L1: 12.596 G_L1_ABSOLUTE: 1.983 G_L1_RELATIVE: 10.612 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 49, iters: 125904, time: 0.538, data: 0.000) G_L1: 11.128 G_L1_ABSOLUTE: 2.289 G_L1_RELATIVE: 8.839 G_Regularizer: 0.000 validation_error: 20.820 +(epoch: 49, iters: 127904, time: 0.551, data: 0.000) G_L1: 14.323 G_L1_ABSOLUTE: 2.827 G_L1_RELATIVE: 11.496 G_Regularizer: 0.000 validation_error: 21.104 +(epoch: 49, iters: 129904, time: 0.532, data: 0.000) G_L1: 15.281 G_L1_ABSOLUTE: 2.510 G_L1_RELATIVE: 12.772 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 49, iters: 131904, time: 0.551, data: 0.000) G_L1: 13.254 G_L1_ABSOLUTE: 2.075 G_L1_RELATIVE: 11.179 G_Regularizer: 0.000 validation_error: 20.891 +(epoch: 49, iters: 133904, time: 0.556, data: 0.001) G_L1: 13.731 G_L1_ABSOLUTE: 2.447 G_L1_RELATIVE: 11.285 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 49, iters: 135904, time: 0.552, data: 0.000) G_L1: 14.572 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 12.292 G_Regularizer: 0.000 validation_error: 21.034 +(epoch: 49, iters: 137904, time: 0.542, data: 0.000) G_L1: 12.657 G_L1_ABSOLUTE: 2.576 G_L1_RELATIVE: 10.080 G_Regularizer: 0.000 validation_error: 20.855 +(epoch: 49, iters: 139904, time: 0.557, data: 0.001) G_L1: 12.236 G_L1_ABSOLUTE: 2.427 G_L1_RELATIVE: 9.809 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 49, iters: 141904, time: 0.557, data: 0.000) G_L1: 15.094 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 12.717 G_Regularizer: 0.000 validation_error: 20.946 +(epoch: 49, iters: 143904, time: 0.532, data: 0.000) G_L1: 15.125 G_L1_ABSOLUTE: 3.032 G_L1_RELATIVE: 12.093 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 49, iters: 145904, time: 0.536, data: 0.000) G_L1: 14.029 G_L1_ABSOLUTE: 2.615 G_L1_RELATIVE: 11.414 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 49, iters: 147904, time: 0.551, data: 0.000) G_L1: 13.184 G_L1_ABSOLUTE: 2.204 G_L1_RELATIVE: 10.980 G_Regularizer: 0.000 validation_error: 20.734 +(epoch: 49, iters: 149904, time: 0.563, data: 0.000) G_L1: 11.877 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 9.502 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 49, iters: 151904, time: 0.555, data: 0.000) G_L1: 13.322 G_L1_ABSOLUTE: 2.505 G_L1_RELATIVE: 10.817 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 49, iters: 153904, time: 0.541, data: 0.000) G_L1: 14.177 G_L1_ABSOLUTE: 2.554 G_L1_RELATIVE: 11.623 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 49, iters: 155904, time: 0.558, data: 0.000) G_L1: 13.299 G_L1_ABSOLUTE: 2.198 G_L1_RELATIVE: 11.101 G_Regularizer: 0.000 validation_error: 20.706 +(epoch: 49, iters: 157904, time: 0.558, data: 0.000) G_L1: 16.129 G_L1_ABSOLUTE: 2.658 G_L1_RELATIVE: 13.470 G_Regularizer: 0.000 validation_error: 20.773 +(epoch: 49, iters: 159904, time: 0.542, data: 0.000) G_L1: 13.056 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 10.530 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 49, iters: 161904, time: 0.531, data: 0.000) G_L1: 13.267 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 10.744 G_Regularizer: 0.000 validation_error: 21.113 +(epoch: 49, iters: 163904, time: 0.541, data: 0.000) G_L1: 14.652 G_L1_ABSOLUTE: 2.175 G_L1_RELATIVE: 12.477 G_Regularizer: 0.000 validation_error: 20.780 +(epoch: 49, iters: 165904, time: 0.539, data: 0.000) G_L1: 14.093 G_L1_ABSOLUTE: 2.766 G_L1_RELATIVE: 11.328 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 49, iters: 167904, time: 0.566, data: 0.000) G_L1: 13.120 G_L1_ABSOLUTE: 2.893 G_L1_RELATIVE: 10.227 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 49, iters: 169904, time: 0.535, data: 0.000) G_L1: 11.731 G_L1_ABSOLUTE: 2.526 G_L1_RELATIVE: 9.205 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 49, iters: 171904, time: 0.546, data: 0.000) G_L1: 16.165 G_L1_ABSOLUTE: 2.570 G_L1_RELATIVE: 13.595 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 49, iters: 173904, time: 0.557, data: 0.001) G_L1: 15.285 G_L1_ABSOLUTE: 2.642 G_L1_RELATIVE: 12.643 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 49, iters: 175904, time: 0.552, data: 0.000) G_L1: 14.278 G_L1_ABSOLUTE: 2.145 G_L1_RELATIVE: 12.133 G_Regularizer: 0.000 validation_error: 20.900 +(epoch: 49, iters: 177904, time: 0.539, data: 0.000) G_L1: 14.081 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 11.595 G_Regularizer: 0.000 validation_error: 20.872 +(epoch: 49, iters: 179904, time: 0.542, data: 0.000) G_L1: 13.539 G_L1_ABSOLUTE: 2.436 G_L1_RELATIVE: 11.102 G_Regularizer: 0.000 validation_error: 20.867 +(epoch: 49, iters: 181904, time: 0.530, data: 0.000) G_L1: 13.544 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 10.979 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 49, iters: 183904, time: 0.531, data: 0.000) G_L1: 14.716 G_L1_ABSOLUTE: 2.718 G_L1_RELATIVE: 11.998 G_Regularizer: 0.000 validation_error: 20.958 +(epoch: 49, iters: 185904, time: 0.538, data: 0.000) G_L1: 13.016 G_L1_ABSOLUTE: 2.309 G_L1_RELATIVE: 10.707 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 49, iters: 187904, time: 0.540, data: 0.000) G_L1: 15.010 G_L1_ABSOLUTE: 2.762 G_L1_RELATIVE: 12.248 G_Regularizer: 0.000 validation_error: 20.988 +(epoch: 49, iters: 189904, time: 0.535, data: 0.000) G_L1: 14.609 G_L1_ABSOLUTE: 2.673 G_L1_RELATIVE: 11.936 G_Regularizer: 0.000 validation_error: 20.634 +(epoch: 49, iters: 191904, time: 0.552, data: 0.001) G_L1: 13.307 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 10.932 G_Regularizer: 0.000 validation_error: 20.950 +(epoch: 49, iters: 193904, time: 0.554, data: 0.001) G_L1: 12.827 G_L1_ABSOLUTE: 2.432 G_L1_RELATIVE: 10.395 G_Regularizer: 0.000 validation_error: 20.837 +(epoch: 49, iters: 195904, time: 0.542, data: 0.000) G_L1: 13.582 G_L1_ABSOLUTE: 2.208 G_L1_RELATIVE: 11.373 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 49, iters: 197904, time: 0.558, data: 0.000) G_L1: 15.217 G_L1_ABSOLUTE: 2.312 G_L1_RELATIVE: 12.905 G_Regularizer: 0.000 validation_error: 20.927 +(epoch: 49, iters: 199904, time: 0.555, data: 0.000) G_L1: 15.170 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 12.702 G_Regularizer: 0.000 validation_error: 21.103 +(epoch: 49, iters: 201904, time: 0.567, data: 0.000) G_L1: 14.614 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 12.242 G_Regularizer: 0.000 validation_error: 21.134 +(epoch: 49, iters: 203904, time: 0.540, data: 0.000) G_L1: 12.198 G_L1_ABSOLUTE: 2.263 G_L1_RELATIVE: 9.935 G_Regularizer: 0.000 validation_error: 20.995 +(epoch: 49, iters: 205904, time: 0.560, data: 0.000) G_L1: 12.741 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 10.522 G_Regularizer: 0.000 validation_error: 21.091 +(epoch: 49, iters: 207904, time: 0.538, data: 0.000) G_L1: 15.057 G_L1_ABSOLUTE: 2.433 G_L1_RELATIVE: 12.624 G_Regularizer: 0.000 validation_error: 21.248 +(epoch: 49, iters: 209904, time: 0.546, data: 0.000) G_L1: 12.408 G_L1_ABSOLUTE: 2.480 G_L1_RELATIVE: 9.927 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 49, iters: 211904, time: 0.542, data: 0.000) G_L1: 14.120 G_L1_ABSOLUTE: 2.339 G_L1_RELATIVE: 11.781 G_Regularizer: 0.000 validation_error: 21.099 +(epoch: 49, iters: 213904, time: 0.560, data: 0.000) G_L1: 14.100 G_L1_ABSOLUTE: 2.398 G_L1_RELATIVE: 11.702 G_Regularizer: 0.000 validation_error: 21.002 +(epoch: 49, iters: 215904, time: 0.560, data: 0.000) G_L1: 12.077 G_L1_ABSOLUTE: 2.063 G_L1_RELATIVE: 10.014 G_Regularizer: 0.000 validation_error: 21.082 +(epoch: 49, iters: 217904, time: 0.558, data: 0.000) G_L1: 13.099 G_L1_ABSOLUTE: 2.461 G_L1_RELATIVE: 10.638 G_Regularizer: 0.000 validation_error: 21.170 +(epoch: 49, iters: 219904, time: 0.540, data: 0.000) G_L1: 13.035 G_L1_ABSOLUTE: 2.017 G_L1_RELATIVE: 11.018 G_Regularizer: 0.000 validation_error: 20.902 +(epoch: 49, iters: 221904, time: 0.542, data: 0.000) G_L1: 14.600 G_L1_ABSOLUTE: 2.310 G_L1_RELATIVE: 12.290 G_Regularizer: 0.000 validation_error: 20.755 +(epoch: 49, iters: 223904, time: 0.554, data: 0.000) G_L1: 13.688 G_L1_ABSOLUTE: 2.828 G_L1_RELATIVE: 10.860 G_Regularizer: 0.000 validation_error: 21.045 +(epoch: 49, iters: 225904, time: 0.556, data: 0.001) G_L1: 11.710 G_L1_ABSOLUTE: 1.925 G_L1_RELATIVE: 9.785 G_Regularizer: 0.000 validation_error: 20.877 +(epoch: 49, iters: 227904, time: 0.555, data: 0.000) G_L1: 13.748 G_L1_ABSOLUTE: 2.987 G_L1_RELATIVE: 10.761 G_Regularizer: 0.000 validation_error: 21.104 +(epoch: 49, iters: 229904, time: 0.544, data: 0.000) G_L1: 11.599 G_L1_ABSOLUTE: 2.162 G_L1_RELATIVE: 9.437 G_Regularizer: 0.000 validation_error: 21.085 +(epoch: 49, iters: 231904, time: 0.553, data: 0.000) G_L1: 13.653 G_L1_ABSOLUTE: 2.602 G_L1_RELATIVE: 11.051 G_Regularizer: 0.000 validation_error: 21.082 +(epoch: 49, iters: 233904, time: 0.559, data: 0.000) G_L1: 13.177 G_L1_ABSOLUTE: 2.357 G_L1_RELATIVE: 10.820 G_Regularizer: 0.000 validation_error: 20.779 +(epoch: 49, iters: 235904, time: 0.562, data: 0.001) G_L1: 14.612 G_L1_ABSOLUTE: 2.243 G_L1_RELATIVE: 12.368 G_Regularizer: 0.000 validation_error: 21.001 +(epoch: 49, iters: 237904, time: 0.530, data: 0.000) G_L1: 13.244 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 10.948 G_Regularizer: 0.000 validation_error: 20.978 +(epoch: 49, iters: 239904, time: 0.537, data: 0.000) G_L1: 13.886 G_L1_ABSOLUTE: 2.205 G_L1_RELATIVE: 11.681 G_Regularizer: 0.000 validation_error: 20.897 +(epoch: 49, iters: 241904, time: 0.556, data: 0.000) G_L1: 13.006 G_L1_ABSOLUTE: 2.389 G_L1_RELATIVE: 10.617 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 49, iters: 243904, time: 0.555, data: 0.000) G_L1: 14.110 G_L1_ABSOLUTE: 2.946 G_L1_RELATIVE: 11.163 G_Regularizer: 0.000 validation_error: 20.951 +(epoch: 49, iters: 245904, time: 0.535, data: 0.000) G_L1: 11.671 G_L1_ABSOLUTE: 1.877 G_L1_RELATIVE: 9.794 G_Regularizer: 0.000 validation_error: 21.156 +(epoch: 49, iters: 247904, time: 0.554, data: 0.000) G_L1: 13.502 G_L1_ABSOLUTE: 2.372 G_L1_RELATIVE: 11.130 G_Regularizer: 0.000 validation_error: 20.971 +(epoch: 49, iters: 249904, time: 0.549, data: 0.000) G_L1: 12.961 G_L1_ABSOLUTE: 2.774 G_L1_RELATIVE: 10.187 G_Regularizer: 0.000 validation_error: 20.930 +(epoch: 49, iters: 251904, time: 0.555, data: 0.000) G_L1: 13.851 G_L1_ABSOLUTE: 2.421 G_L1_RELATIVE: 11.430 G_Regularizer: 0.000 validation_error: 20.915 +(epoch: 49, iters: 253904, time: 0.548, data: 0.000) G_L1: 15.253 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 12.550 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 49, iters: 255904, time: 0.560, data: 0.000) G_L1: 11.771 G_L1_ABSOLUTE: 2.297 G_L1_RELATIVE: 9.474 G_Regularizer: 0.000 validation_error: 20.896 +(epoch: 49, iters: 257904, time: 0.551, data: 0.000) G_L1: 12.998 G_L1_ABSOLUTE: 2.874 G_L1_RELATIVE: 10.123 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 49, iters: 259904, time: 0.552, data: 0.001) G_L1: 17.093 G_L1_ABSOLUTE: 2.448 G_L1_RELATIVE: 14.646 G_Regularizer: 0.000 validation_error: 20.931 +(epoch: 49, iters: 261904, time: 0.549, data: 0.000) G_L1: 13.355 G_L1_ABSOLUTE: 2.807 G_L1_RELATIVE: 10.548 G_Regularizer: 0.000 validation_error: 20.762 +(epoch: 49, iters: 263904, time: 0.555, data: 0.000) G_L1: 14.607 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 12.085 G_Regularizer: 0.000 validation_error: 20.714 +(epoch: 49, iters: 265904, time: 0.563, data: 0.000) G_L1: 14.565 G_L1_ABSOLUTE: 2.517 G_L1_RELATIVE: 12.048 G_Regularizer: 0.000 validation_error: 20.907 +(epoch: 49, iters: 267904, time: 0.559, data: 0.000) G_L1: 14.950 G_L1_ABSOLUTE: 2.655 G_L1_RELATIVE: 12.295 G_Regularizer: 0.000 validation_error: 21.087 +(epoch: 49, iters: 269904, time: 0.554, data: 0.001) G_L1: 14.625 G_L1_ABSOLUTE: 2.832 G_L1_RELATIVE: 11.793 G_Regularizer: 0.000 validation_error: 20.873 +(epoch: 49, iters: 271904, time: 0.549, data: 0.000) G_L1: 17.761 G_L1_ABSOLUTE: 2.755 G_L1_RELATIVE: 15.006 G_Regularizer: 0.000 validation_error: 20.969 +(epoch: 49, iters: 273904, time: 0.556, data: 0.000) G_L1: 14.041 G_L1_ABSOLUTE: 2.405 G_L1_RELATIVE: 11.636 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 49, iters: 275904, time: 0.559, data: 0.000) G_L1: 13.499 G_L1_ABSOLUTE: 2.445 G_L1_RELATIVE: 11.054 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 49, iters: 277904, time: 0.561, data: 0.000) G_L1: 14.529 G_L1_ABSOLUTE: 2.401 G_L1_RELATIVE: 12.128 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 49, iters: 279904, time: 0.540, data: 0.000) G_L1: 14.537 G_L1_ABSOLUTE: 2.382 G_L1_RELATIVE: 12.155 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 49, iters: 281904, time: 0.563, data: 0.000) G_L1: 13.450 G_L1_ABSOLUTE: 2.248 G_L1_RELATIVE: 11.202 G_Regularizer: 0.000 validation_error: 20.759 +(epoch: 49, iters: 283904, time: 0.558, data: 0.000) G_L1: 14.795 G_L1_ABSOLUTE: 2.373 G_L1_RELATIVE: 12.422 G_Regularizer: 0.000 validation_error: 20.899 +(epoch: 49, iters: 285904, time: 0.679, data: 0.000) G_L1: 11.269 G_L1_ABSOLUTE: 2.105 G_L1_RELATIVE: 9.164 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 49, iters: 287904, time: 0.530, data: 0.000) G_L1: 17.003 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 14.438 G_Regularizer: 0.000 validation_error: 20.727 +(epoch: 49, iters: 289904, time: 0.535, data: 0.000) G_L1: 12.346 G_L1_ABSOLUTE: 2.606 G_L1_RELATIVE: 9.741 G_Regularizer: 0.000 validation_error: 20.652 +(epoch: 49, iters: 291904, time: 0.547, data: 0.000) G_L1: 12.442 G_L1_ABSOLUTE: 2.689 G_L1_RELATIVE: 9.752 G_Regularizer: 0.000 validation_error: 20.741 +(epoch: 49, iters: 293904, time: 0.566, data: 0.000) G_L1: 14.000 G_L1_ABSOLUTE: 3.031 G_L1_RELATIVE: 10.969 G_Regularizer: 0.000 validation_error: 20.752 +(epoch: 49, iters: 295904, time: 0.551, data: 0.000) G_L1: 13.322 G_L1_ABSOLUTE: 2.566 G_L1_RELATIVE: 10.756 G_Regularizer: 0.000 validation_error: 20.771 +(epoch: 49, iters: 297904, time: 0.553, data: 0.000) G_L1: 13.791 G_L1_ABSOLUTE: 2.462 G_L1_RELATIVE: 11.328 G_Regularizer: 0.000 validation_error: 20.936 +(epoch: 49, iters: 299904, time: 0.564, data: 0.000) G_L1: 14.164 G_L1_ABSOLUTE: 2.216 G_L1_RELATIVE: 11.948 G_Regularizer: 0.000 validation_error: 20.909 +(epoch: 49, iters: 301904, time: 0.557, data: 0.000) G_L1: 15.067 G_L1_ABSOLUTE: 3.471 G_L1_RELATIVE: 11.597 G_Regularizer: 0.000 validation_error: 21.283 +(epoch: 50, iters: 1152, time: 0.533, data: 0.000) G_L1: 14.371 G_L1_ABSOLUTE: 2.522 G_L1_RELATIVE: 11.849 G_Regularizer: 0.000 validation_error: 20.933 +(epoch: 50, iters: 3152, time: 0.554, data: 0.000) G_L1: 13.130 G_L1_ABSOLUTE: 2.278 G_L1_RELATIVE: 10.853 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 50, iters: 5152, time: 0.554, data: 0.000) G_L1: 14.526 G_L1_ABSOLUTE: 2.749 G_L1_RELATIVE: 11.777 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 50, iters: 7152, time: 0.556, data: 0.000) G_L1: 14.835 G_L1_ABSOLUTE: 2.280 G_L1_RELATIVE: 12.555 G_Regularizer: 0.000 validation_error: 20.898 +(epoch: 50, iters: 9152, time: 0.535, data: 0.000) G_L1: 11.811 G_L1_ABSOLUTE: 2.200 G_L1_RELATIVE: 9.611 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 50, iters: 11152, time: 0.543, data: 0.001) G_L1: 12.810 G_L1_ABSOLUTE: 2.041 G_L1_RELATIVE: 10.769 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 50, iters: 13152, time: 0.556, data: 0.000) G_L1: 15.019 G_L1_ABSOLUTE: 2.815 G_L1_RELATIVE: 12.204 G_Regularizer: 0.000 validation_error: 20.838 +(epoch: 50, iters: 15152, time: 0.561, data: 0.000) G_L1: 18.882 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 15.930 G_Regularizer: 0.000 validation_error: 20.903 +(epoch: 50, iters: 17152, time: 0.540, data: 0.000) G_L1: 13.298 G_L1_ABSOLUTE: 2.134 G_L1_RELATIVE: 11.164 G_Regularizer: 0.000 validation_error: 21.011 +(epoch: 50, iters: 19152, time: 0.536, data: 0.000) G_L1: 15.481 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 12.821 G_Regularizer: 0.000 validation_error: 20.972 +(epoch: 50, iters: 21152, time: 0.548, data: 0.000) G_L1: 14.504 G_L1_ABSOLUTE: 2.530 G_L1_RELATIVE: 11.974 G_Regularizer: 0.000 validation_error: 20.794 +(epoch: 50, iters: 23152, time: 0.554, data: 0.000) G_L1: 13.488 G_L1_ABSOLUTE: 2.795 G_L1_RELATIVE: 10.693 G_Regularizer: 0.000 validation_error: 20.828 +(epoch: 50, iters: 25152, time: 0.549, data: 0.000) G_L1: 13.609 G_L1_ABSOLUTE: 2.325 G_L1_RELATIVE: 11.283 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 50, iters: 27152, time: 0.534, data: 0.000) G_L1: 15.034 G_L1_ABSOLUTE: 2.993 G_L1_RELATIVE: 12.041 G_Regularizer: 0.000 validation_error: 20.881 +(epoch: 50, iters: 29152, time: 0.558, data: 0.000) G_L1: 12.685 G_L1_ABSOLUTE: 2.345 G_L1_RELATIVE: 10.340 G_Regularizer: 0.000 validation_error: 20.738 +(epoch: 50, iters: 31152, time: 0.540, data: 0.000) G_L1: 14.289 G_L1_ABSOLUTE: 2.687 G_L1_RELATIVE: 11.602 G_Regularizer: 0.000 validation_error: 20.774 +(epoch: 50, iters: 33152, time: 0.563, data: 0.000) G_L1: 12.960 G_L1_ABSOLUTE: 2.189 G_L1_RELATIVE: 10.771 G_Regularizer: 0.000 validation_error: 20.859 +(epoch: 50, iters: 35152, time: 0.541, data: 0.000) G_L1: 10.776 G_L1_ABSOLUTE: 2.262 G_L1_RELATIVE: 8.514 G_Regularizer: 0.000 validation_error: 20.803 +(epoch: 50, iters: 37152, time: 0.547, data: 0.000) G_L1: 11.814 G_L1_ABSOLUTE: 2.732 G_L1_RELATIVE: 9.081 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 50, iters: 39152, time: 0.552, data: 0.000) G_L1: 13.315 G_L1_ABSOLUTE: 2.082 G_L1_RELATIVE: 11.233 G_Regularizer: 0.000 validation_error: 21.025 +(epoch: 50, iters: 41152, time: 0.558, data: 0.000) G_L1: 12.997 G_L1_ABSOLUTE: 2.941 G_L1_RELATIVE: 10.056 G_Regularizer: 0.000 validation_error: 21.070 +(epoch: 50, iters: 43152, time: 0.551, data: 0.000) G_L1: 13.694 G_L1_ABSOLUTE: 2.486 G_L1_RELATIVE: 11.208 G_Regularizer: 0.000 validation_error: 20.831 +(epoch: 50, iters: 45152, time: 0.544, data: 0.000) G_L1: 14.561 G_L1_ABSOLUTE: 2.582 G_L1_RELATIVE: 11.979 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 50, iters: 47152, time: 0.533, data: 0.001) G_L1: 12.045 G_L1_ABSOLUTE: 2.292 G_L1_RELATIVE: 9.752 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 50, iters: 49152, time: 0.539, data: 0.000) G_L1: 14.838 G_L1_ABSOLUTE: 2.765 G_L1_RELATIVE: 12.073 G_Regularizer: 0.000 validation_error: 20.948 +(epoch: 50, iters: 51152, time: 0.542, data: 0.000) G_L1: 13.778 G_L1_ABSOLUTE: 2.233 G_L1_RELATIVE: 11.545 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 50, iters: 53152, time: 0.535, data: 0.000) G_L1: 14.896 G_L1_ABSOLUTE: 2.222 G_L1_RELATIVE: 12.674 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 50, iters: 55152, time: 0.535, data: 0.000) G_L1: 13.896 G_L1_ABSOLUTE: 2.595 G_L1_RELATIVE: 11.300 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 50, iters: 57152, time: 0.534, data: 0.001) G_L1: 14.520 G_L1_ABSOLUTE: 2.549 G_L1_RELATIVE: 11.971 G_Regularizer: 0.000 validation_error: 20.914 +(epoch: 50, iters: 59152, time: 0.550, data: 0.000) G_L1: 14.624 G_L1_ABSOLUTE: 2.440 G_L1_RELATIVE: 12.185 G_Regularizer: 0.000 validation_error: 20.798 +(epoch: 50, iters: 61152, time: 0.541, data: 0.000) G_L1: 12.540 G_L1_ABSOLUTE: 2.209 G_L1_RELATIVE: 10.331 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 50, iters: 63152, time: 0.552, data: 0.000) G_L1: 12.599 G_L1_ABSOLUTE: 2.245 G_L1_RELATIVE: 10.354 G_Regularizer: 0.000 validation_error: 20.777 +(epoch: 50, iters: 65152, time: 0.552, data: 0.000) G_L1: 14.447 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 11.935 G_Regularizer: 0.000 validation_error: 20.805 +(epoch: 50, iters: 67152, time: 0.553, data: 0.000) G_L1: 12.909 G_L1_ABSOLUTE: 2.350 G_L1_RELATIVE: 10.559 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 50, iters: 69152, time: 0.562, data: 0.001) G_L1: 14.041 G_L1_ABSOLUTE: 2.601 G_L1_RELATIVE: 11.440 G_Regularizer: 0.000 validation_error: 20.776 +(epoch: 50, iters: 71152, time: 0.545, data: 0.000) G_L1: 13.394 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 11.032 G_Regularizer: 0.000 validation_error: 20.887 +(epoch: 50, iters: 73152, time: 0.541, data: 0.000) G_L1: 12.909 G_L1_ABSOLUTE: 2.520 G_L1_RELATIVE: 10.389 G_Regularizer: 0.000 validation_error: 20.860 +(epoch: 50, iters: 75152, time: 0.554, data: 0.000) G_L1: 16.826 G_L1_ABSOLUTE: 2.631 G_L1_RELATIVE: 14.196 G_Regularizer: 0.000 validation_error: 20.836 +(epoch: 50, iters: 77152, time: 0.557, data: 0.000) G_L1: 13.076 G_L1_ABSOLUTE: 2.187 G_L1_RELATIVE: 10.889 G_Regularizer: 0.000 validation_error: 20.847 +(epoch: 50, iters: 79152, time: 0.533, data: 0.000) G_L1: 12.667 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 10.069 G_Regularizer: 0.000 validation_error: 21.086 +(epoch: 50, iters: 81152, time: 0.562, data: 0.000) G_L1: 16.399 G_L1_ABSOLUTE: 3.145 G_L1_RELATIVE: 13.254 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 50, iters: 83152, time: 0.555, data: 0.000) G_L1: 17.972 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 15.421 G_Regularizer: 0.000 validation_error: 21.033 +(epoch: 50, iters: 85152, time: 0.538, data: 0.001) G_L1: 14.157 G_L1_ABSOLUTE: 2.513 G_L1_RELATIVE: 11.644 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 50, iters: 87152, time: 0.531, data: 0.001) G_L1: 14.550 G_L1_ABSOLUTE: 2.574 G_L1_RELATIVE: 11.976 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 50, iters: 89152, time: 0.564, data: 0.000) G_L1: 12.514 G_L1_ABSOLUTE: 2.362 G_L1_RELATIVE: 10.152 G_Regularizer: 0.000 validation_error: 20.824 +(epoch: 50, iters: 91152, time: 0.551, data: 0.000) G_L1: 12.283 G_L1_ABSOLUTE: 2.494 G_L1_RELATIVE: 9.789 G_Regularizer: 0.000 validation_error: 20.945 +(epoch: 50, iters: 93152, time: 0.552, data: 0.001) G_L1: 11.937 G_L1_ABSOLUTE: 2.155 G_L1_RELATIVE: 9.781 G_Regularizer: 0.000 validation_error: 20.768 +(epoch: 50, iters: 95152, time: 0.548, data: 0.000) G_L1: 11.778 G_L1_ABSOLUTE: 2.242 G_L1_RELATIVE: 9.536 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 50, iters: 97152, time: 0.558, data: 0.000) G_L1: 14.091 G_L1_ABSOLUTE: 2.370 G_L1_RELATIVE: 11.721 G_Regularizer: 0.000 validation_error: 20.878 +(epoch: 50, iters: 99152, time: 0.552, data: 0.000) G_L1: 12.872 G_L1_ABSOLUTE: 2.511 G_L1_RELATIVE: 10.361 G_Regularizer: 0.000 validation_error: 20.817 +(epoch: 50, iters: 101152, time: 0.531, data: 0.001) G_L1: 15.522 G_L1_ABSOLUTE: 2.422 G_L1_RELATIVE: 13.100 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 50, iters: 103152, time: 0.551, data: 0.001) G_L1: 12.428 G_L1_ABSOLUTE: 2.258 G_L1_RELATIVE: 10.169 G_Regularizer: 0.000 validation_error: 20.910 +(epoch: 50, iters: 105152, time: 0.545, data: 0.000) G_L1: 15.354 G_L1_ABSOLUTE: 2.550 G_L1_RELATIVE: 12.804 G_Regularizer: 0.000 validation_error: 20.929 +(epoch: 50, iters: 107152, time: 0.559, data: 0.000) G_L1: 13.092 G_L1_ABSOLUTE: 2.171 G_L1_RELATIVE: 10.922 G_Regularizer: 0.000 validation_error: 20.795 +(epoch: 50, iters: 109152, time: 0.559, data: 0.000) G_L1: 16.573 G_L1_ABSOLUTE: 3.057 G_L1_RELATIVE: 13.517 G_Regularizer: 0.000 validation_error: 20.895 +(epoch: 50, iters: 111152, time: 0.563, data: 0.000) G_L1: 13.745 G_L1_ABSOLUTE: 2.581 G_L1_RELATIVE: 11.165 G_Regularizer: 0.000 validation_error: 21.008 +(epoch: 50, iters: 113152, time: 0.542, data: 0.000) G_L1: 14.991 G_L1_ABSOLUTE: 2.684 G_L1_RELATIVE: 12.307 G_Regularizer: 0.000 validation_error: 20.917 +(epoch: 50, iters: 115152, time: 0.533, data: 0.000) G_L1: 14.602 G_L1_ABSOLUTE: 2.578 G_L1_RELATIVE: 12.024 G_Regularizer: 0.000 validation_error: 20.966 +(epoch: 50, iters: 117152, time: 0.540, data: 0.001) G_L1: 12.585 G_L1_ABSOLUTE: 2.405 G_L1_RELATIVE: 10.180 G_Regularizer: 0.000 validation_error: 21.089 +(epoch: 50, iters: 119152, time: 0.546, data: 0.000) G_L1: 13.344 G_L1_ABSOLUTE: 2.589 G_L1_RELATIVE: 10.755 G_Regularizer: 0.000 validation_error: 20.864 +(epoch: 50, iters: 121152, time: 0.539, data: 0.000) G_L1: 14.067 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 11.690 G_Regularizer: 0.000 validation_error: 20.974 +(epoch: 50, iters: 123152, time: 0.529, data: 0.000) G_L1: 12.631 G_L1_ABSOLUTE: 2.286 G_L1_RELATIVE: 10.345 G_Regularizer: 0.000 validation_error: 21.035 +(epoch: 50, iters: 125152, time: 0.543, data: 0.000) G_L1: 12.825 G_L1_ABSOLUTE: 2.702 G_L1_RELATIVE: 10.124 G_Regularizer: 0.000 validation_error: 21.034 +(epoch: 50, iters: 127152, time: 0.563, data: 0.000) G_L1: 14.406 G_L1_ABSOLUTE: 2.613 G_L1_RELATIVE: 11.793 G_Regularizer: 0.000 validation_error: 21.003 +(epoch: 50, iters: 129152, time: 0.542, data: 0.002) G_L1: 13.469 G_L1_ABSOLUTE: 2.165 G_L1_RELATIVE: 11.304 G_Regularizer: 0.000 validation_error: 20.970 +(epoch: 50, iters: 131152, time: 0.559, data: 0.000) G_L1: 13.021 G_L1_ABSOLUTE: 3.026 G_L1_RELATIVE: 9.995 G_Regularizer: 0.000 validation_error: 21.050 +(epoch: 50, iters: 133152, time: 0.552, data: 0.000) G_L1: 12.392 G_L1_ABSOLUTE: 2.456 G_L1_RELATIVE: 9.936 G_Regularizer: 0.000 validation_error: 20.942 +(epoch: 50, iters: 135152, time: 0.553, data: 0.000) G_L1: 14.762 G_L1_ABSOLUTE: 2.860 G_L1_RELATIVE: 11.902 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 50, iters: 137152, time: 0.555, data: 0.000) G_L1: 12.442 G_L1_ABSOLUTE: 2.155 G_L1_RELATIVE: 10.287 G_Regularizer: 0.000 validation_error: 21.010 +(epoch: 50, iters: 139152, time: 0.544, data: 0.000) G_L1: 13.416 G_L1_ABSOLUTE: 2.524 G_L1_RELATIVE: 10.892 G_Regularizer: 0.000 validation_error: 20.973 +(epoch: 50, iters: 141152, time: 0.559, data: 0.000) G_L1: 11.459 G_L1_ABSOLUTE: 2.348 G_L1_RELATIVE: 9.111 G_Regularizer: 0.000 validation_error: 21.022 +(epoch: 50, iters: 143152, time: 0.537, data: 0.000) G_L1: 13.826 G_L1_ABSOLUTE: 2.156 G_L1_RELATIVE: 11.669 G_Regularizer: 0.000 validation_error: 20.990 +(epoch: 50, iters: 145152, time: 0.559, data: 0.000) G_L1: 17.179 G_L1_ABSOLUTE: 2.485 G_L1_RELATIVE: 14.694 G_Regularizer: 0.000 validation_error: 20.813 +(epoch: 50, iters: 147152, time: 0.530, data: 0.000) G_L1: 11.547 G_L1_ABSOLUTE: 2.557 G_L1_RELATIVE: 8.990 G_Regularizer: 0.000 validation_error: 20.983 +(epoch: 50, iters: 149152, time: 0.539, data: 0.001) G_L1: 14.324 G_L1_ABSOLUTE: 2.536 G_L1_RELATIVE: 11.788 G_Regularizer: 0.000 validation_error: 21.112 +(epoch: 50, iters: 151152, time: 0.541, data: 0.000) G_L1: 14.865 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 12.461 G_Regularizer: 0.000 validation_error: 20.908 +(epoch: 50, iters: 153152, time: 0.559, data: 0.000) G_L1: 14.809 G_L1_ABSOLUTE: 2.531 G_L1_RELATIVE: 12.278 G_Regularizer: 0.000 validation_error: 20.870 +(epoch: 50, iters: 155152, time: 0.549, data: 0.000) G_L1: 12.128 G_L1_ABSOLUTE: 2.213 G_L1_RELATIVE: 9.915 G_Regularizer: 0.000 validation_error: 20.961 +(epoch: 50, iters: 157152, time: 0.545, data: 0.000) G_L1: 13.782 G_L1_ABSOLUTE: 2.800 G_L1_RELATIVE: 10.983 G_Regularizer: 0.000 validation_error: 20.750 +(epoch: 50, iters: 159152, time: 0.555, data: 0.000) G_L1: 13.700 G_L1_ABSOLUTE: 2.423 G_L1_RELATIVE: 11.276 G_Regularizer: 0.000 validation_error: 20.862 +(epoch: 50, iters: 161152, time: 0.559, data: 0.000) G_L1: 16.878 G_L1_ABSOLUTE: 2.810 G_L1_RELATIVE: 14.068 G_Regularizer: 0.000 validation_error: 21.028 +(epoch: 50, iters: 163152, time: 0.545, data: 0.000) G_L1: 14.249 G_L1_ABSOLUTE: 2.355 G_L1_RELATIVE: 11.894 G_Regularizer: 0.000 validation_error: 20.968 +(epoch: 50, iters: 165152, time: 0.546, data: 0.000) G_L1: 13.474 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 11.064 G_Regularizer: 0.000 validation_error: 20.949 +(epoch: 50, iters: 167152, time: 0.541, data: 0.000) G_L1: 14.579 G_L1_ABSOLUTE: 2.638 G_L1_RELATIVE: 11.941 G_Regularizer: 0.000 validation_error: 21.046 +(epoch: 50, iters: 169152, time: 0.552, data: 0.000) G_L1: 13.612 G_L1_ABSOLUTE: 2.317 G_L1_RELATIVE: 11.295 G_Regularizer: 0.000 validation_error: 20.797 +(epoch: 50, iters: 171152, time: 0.550, data: 0.000) G_L1: 13.918 G_L1_ABSOLUTE: 2.350 G_L1_RELATIVE: 11.568 G_Regularizer: 0.000 validation_error: 21.055 +(epoch: 50, iters: 173152, time: 0.559, data: 0.000) G_L1: 14.002 G_L1_ABSOLUTE: 2.420 G_L1_RELATIVE: 11.582 G_Regularizer: 0.000 validation_error: 21.025 +(epoch: 50, iters: 175152, time: 0.544, data: 0.000) G_L1: 12.535 G_L1_ABSOLUTE: 3.139 G_L1_RELATIVE: 9.397 G_Regularizer: 0.000 validation_error: 20.916 +(epoch: 50, iters: 177152, time: 0.540, data: 0.000) G_L1: 15.029 G_L1_ABSOLUTE: 3.106 G_L1_RELATIVE: 11.923 G_Regularizer: 0.000 validation_error: 20.865 +(epoch: 50, iters: 179152, time: 0.562, data: 0.001) G_L1: 13.282 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 10.908 G_Regularizer: 0.000 validation_error: 20.845 +(epoch: 50, iters: 181152, time: 0.538, data: 0.001) G_L1: 12.506 G_L1_ABSOLUTE: 2.523 G_L1_RELATIVE: 9.982 G_Regularizer: 0.000 validation_error: 20.818 +(epoch: 50, iters: 183152, time: 0.549, data: 0.000) G_L1: 13.664 G_L1_ABSOLUTE: 2.346 G_L1_RELATIVE: 11.318 G_Regularizer: 0.000 validation_error: 20.921 +(epoch: 50, iters: 185152, time: 0.554, data: 0.000) G_L1: 12.013 G_L1_ABSOLUTE: 2.378 G_L1_RELATIVE: 9.635 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 50, iters: 187152, time: 0.541, data: 0.000) G_L1: 13.810 G_L1_ABSOLUTE: 2.155 G_L1_RELATIVE: 11.655 G_Regularizer: 0.000 validation_error: 20.929 +(epoch: 50, iters: 189152, time: 0.546, data: 0.000) G_L1: 12.634 G_L1_ABSOLUTE: 2.278 G_L1_RELATIVE: 10.356 G_Regularizer: 0.000 validation_error: 20.871 +(epoch: 50, iters: 191152, time: 0.549, data: 0.000) G_L1: 12.997 G_L1_ABSOLUTE: 2.296 G_L1_RELATIVE: 10.701 G_Regularizer: 0.000 validation_error: 20.886 +(epoch: 50, iters: 193152, time: 0.535, data: 0.000) G_L1: 11.561 G_L1_ABSOLUTE: 2.284 G_L1_RELATIVE: 9.277 G_Regularizer: 0.000 validation_error: 20.876 +(epoch: 50, iters: 195152, time: 0.557, data: 0.001) G_L1: 9.892 G_L1_ABSOLUTE: 2.195 G_L1_RELATIVE: 7.697 G_Regularizer: 0.000 validation_error: 20.967 +(epoch: 50, iters: 197152, time: 0.540, data: 0.000) G_L1: 14.692 G_L1_ABSOLUTE: 2.481 G_L1_RELATIVE: 12.211 G_Regularizer: 0.000 validation_error: 20.668 +(epoch: 50, iters: 199152, time: 0.558, data: 0.000) G_L1: 13.356 G_L1_ABSOLUTE: 2.219 G_L1_RELATIVE: 11.137 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 50, iters: 201152, time: 0.539, data: 0.000) G_L1: 10.934 G_L1_ABSOLUTE: 2.351 G_L1_RELATIVE: 8.584 G_Regularizer: 0.000 validation_error: 20.843 +(epoch: 50, iters: 203152, time: 0.532, data: 0.000) G_L1: 14.364 G_L1_ABSOLUTE: 2.226 G_L1_RELATIVE: 12.138 G_Regularizer: 0.000 validation_error: 20.766 +(epoch: 50, iters: 205152, time: 0.544, data: 0.000) G_L1: 13.461 G_L1_ABSOLUTE: 2.390 G_L1_RELATIVE: 11.071 G_Regularizer: 0.000 validation_error: 20.925 +(epoch: 50, iters: 207152, time: 0.547, data: 0.000) G_L1: 16.535 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 13.865 G_Regularizer: 0.000 validation_error: 20.801 +(epoch: 50, iters: 209152, time: 0.543, data: 0.001) G_L1: 14.066 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 11.393 G_Regularizer: 0.000 validation_error: 20.806 +(epoch: 50, iters: 211152, time: 0.545, data: 0.000) G_L1: 14.960 G_L1_ABSOLUTE: 2.678 G_L1_RELATIVE: 12.282 G_Regularizer: 0.000 validation_error: 21.038 +(epoch: 50, iters: 213152, time: 0.552, data: 0.000) G_L1: 13.359 G_L1_ABSOLUTE: 2.577 G_L1_RELATIVE: 10.782 G_Regularizer: 0.000 validation_error: 21.114 +(epoch: 50, iters: 215152, time: 0.550, data: 0.000) G_L1: 13.462 G_L1_ABSOLUTE: 2.839 G_L1_RELATIVE: 10.623 G_Regularizer: 0.000 validation_error: 21.005 +(epoch: 50, iters: 217152, time: 0.534, data: 0.001) G_L1: 14.286 G_L1_ABSOLUTE: 2.416 G_L1_RELATIVE: 11.870 G_Regularizer: 0.000 validation_error: 21.007 +(epoch: 50, iters: 219152, time: 0.549, data: 0.001) G_L1: 13.220 G_L1_ABSOLUTE: 2.181 G_L1_RELATIVE: 11.039 G_Regularizer: 0.000 validation_error: 20.965 +(epoch: 50, iters: 221152, time: 0.549, data: 0.000) G_L1: 14.465 G_L1_ABSOLUTE: 2.674 G_L1_RELATIVE: 11.791 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 50, iters: 223152, time: 0.543, data: 0.000) G_L1: 15.709 G_L1_ABSOLUTE: 2.489 G_L1_RELATIVE: 13.220 G_Regularizer: 0.000 validation_error: 20.687 +(epoch: 50, iters: 225152, time: 0.552, data: 0.000) G_L1: 14.548 G_L1_ABSOLUTE: 2.997 G_L1_RELATIVE: 11.551 G_Regularizer: 0.000 validation_error: 20.821 +(epoch: 50, iters: 227152, time: 0.551, data: 0.000) G_L1: 16.420 G_L1_ABSOLUTE: 2.512 G_L1_RELATIVE: 13.907 G_Regularizer: 0.000 validation_error: 21.000 +(epoch: 50, iters: 229152, time: 0.553, data: 0.000) G_L1: 14.365 G_L1_ABSOLUTE: 2.567 G_L1_RELATIVE: 11.798 G_Regularizer: 0.000 validation_error: 20.863 +(epoch: 50, iters: 231152, time: 0.554, data: 0.000) G_L1: 13.529 G_L1_ABSOLUTE: 2.409 G_L1_RELATIVE: 11.120 G_Regularizer: 0.000 validation_error: 20.823 +(epoch: 50, iters: 233152, time: 0.541, data: 0.000) G_L1: 13.512 G_L1_ABSOLUTE: 2.270 G_L1_RELATIVE: 11.242 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 50, iters: 235152, time: 0.546, data: 0.000) G_L1: 14.784 G_L1_ABSOLUTE: 2.499 G_L1_RELATIVE: 12.284 G_Regularizer: 0.000 validation_error: 20.963 +(epoch: 50, iters: 237152, time: 0.540, data: 0.001) G_L1: 17.443 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 14.966 G_Regularizer: 0.000 validation_error: 20.952 +(epoch: 50, iters: 239152, time: 0.536, data: 0.000) G_L1: 14.884 G_L1_ABSOLUTE: 2.760 G_L1_RELATIVE: 12.125 G_Regularizer: 0.000 validation_error: 20.934 +(epoch: 50, iters: 241152, time: 0.546, data: 0.000) G_L1: 13.389 G_L1_ABSOLUTE: 2.895 G_L1_RELATIVE: 10.495 G_Regularizer: 0.000 validation_error: 20.825 +(epoch: 50, iters: 243152, time: 0.544, data: 0.000) G_L1: 17.264 G_L1_ABSOLUTE: 2.650 G_L1_RELATIVE: 14.614 G_Regularizer: 0.000 validation_error: 20.975 +(epoch: 50, iters: 245152, time: 0.563, data: 0.000) G_L1: 13.888 G_L1_ABSOLUTE: 2.428 G_L1_RELATIVE: 11.461 G_Regularizer: 0.000 validation_error: 20.784 +(epoch: 50, iters: 247152, time: 0.559, data: 0.000) G_L1: 12.697 G_L1_ABSOLUTE: 2.565 G_L1_RELATIVE: 10.132 G_Regularizer: 0.000 validation_error: 21.009 +(epoch: 50, iters: 249152, time: 0.539, data: 0.000) G_L1: 14.719 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 12.315 G_Regularizer: 0.000 validation_error: 20.901 +(epoch: 50, iters: 251152, time: 0.540, data: 0.000) G_L1: 14.773 G_L1_ABSOLUTE: 2.320 G_L1_RELATIVE: 12.453 G_Regularizer: 0.000 validation_error: 20.998 +(epoch: 50, iters: 253152, time: 0.541, data: 0.000) G_L1: 14.338 G_L1_ABSOLUTE: 2.660 G_L1_RELATIVE: 11.679 G_Regularizer: 0.000 validation_error: 20.889 +(epoch: 50, iters: 255152, time: 0.562, data: 0.000) G_L1: 15.671 G_L1_ABSOLUTE: 2.504 G_L1_RELATIVE: 13.166 G_Regularizer: 0.000 validation_error: 20.839 +(epoch: 50, iters: 257152, time: 0.546, data: 0.001) G_L1: 14.734 G_L1_ABSOLUTE: 2.952 G_L1_RELATIVE: 11.783 G_Regularizer: 0.000 validation_error: 20.976 +(epoch: 50, iters: 259152, time: 0.546, data: 0.000) G_L1: 14.127 G_L1_ABSOLUTE: 2.734 G_L1_RELATIVE: 11.393 G_Regularizer: 0.000 validation_error: 20.826 +(epoch: 50, iters: 261152, time: 0.560, data: 0.000) G_L1: 21.629 G_L1_ABSOLUTE: 2.627 G_L1_RELATIVE: 19.002 G_Regularizer: 0.000 validation_error: 20.832 +(epoch: 50, iters: 263152, time: 0.561, data: 0.000) G_L1: 11.584 G_L1_ABSOLUTE: 2.200 G_L1_RELATIVE: 9.384 G_Regularizer: 0.000 validation_error: 20.842 +(epoch: 50, iters: 265152, time: 0.547, data: 0.000) G_L1: 10.555 G_L1_ABSOLUTE: 2.069 G_L1_RELATIVE: 8.486 G_Regularizer: 0.000 validation_error: 20.883 +(epoch: 50, iters: 267152, time: 0.545, data: 0.000) G_L1: 11.750 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 9.152 G_Regularizer: 0.000 validation_error: 20.791 +(epoch: 50, iters: 269152, time: 0.542, data: 0.000) G_L1: 13.131 G_L1_ABSOLUTE: 2.469 G_L1_RELATIVE: 10.662 G_Regularizer: 0.000 validation_error: 20.953 +(epoch: 50, iters: 271152, time: 0.551, data: 0.000) G_L1: 15.054 G_L1_ABSOLUTE: 2.363 G_L1_RELATIVE: 12.691 G_Regularizer: 0.000 validation_error: 20.928 +(epoch: 50, iters: 273152, time: 0.553, data: 0.000) G_L1: 14.395 G_L1_ABSOLUTE: 2.375 G_L1_RELATIVE: 12.021 G_Regularizer: 0.000 validation_error: 20.935 +(epoch: 50, iters: 275152, time: 0.531, data: 0.000) G_L1: 16.199 G_L1_ABSOLUTE: 2.816 G_L1_RELATIVE: 13.383 G_Regularizer: 0.000 validation_error: 21.037 +(epoch: 50, iters: 277152, time: 0.537, data: 0.000) G_L1: 13.642 G_L1_ABSOLUTE: 2.463 G_L1_RELATIVE: 11.180 G_Regularizer: 0.000 validation_error: 20.772 +(epoch: 50, iters: 279152, time: 0.554, data: 0.000) G_L1: 15.279 G_L1_ABSOLUTE: 2.476 G_L1_RELATIVE: 12.803 G_Regularizer: 0.000 validation_error: 20.792 +(epoch: 50, iters: 281152, time: 0.552, data: 0.000) G_L1: 13.569 G_L1_ABSOLUTE: 2.366 G_L1_RELATIVE: 11.202 G_Regularizer: 0.000 validation_error: 20.918 +(epoch: 50, iters: 283152, time: 0.550, data: 0.001) G_L1: 14.213 G_L1_ABSOLUTE: 2.670 G_L1_RELATIVE: 11.543 G_Regularizer: 0.000 validation_error: 20.986 +(epoch: 50, iters: 285152, time: 0.555, data: 0.000) G_L1: 15.793 G_L1_ABSOLUTE: 2.404 G_L1_RELATIVE: 13.390 G_Regularizer: 0.000 validation_error: 20.636 +(epoch: 50, iters: 287152, time: 0.553, data: 0.000) G_L1: 12.423 G_L1_ABSOLUTE: 2.377 G_L1_RELATIVE: 10.046 G_Regularizer: 0.000 validation_error: 20.906 +(epoch: 50, iters: 289152, time: 0.563, data: 0.000) G_L1: 12.481 G_L1_ABSOLUTE: 2.242 G_L1_RELATIVE: 10.239 G_Regularizer: 0.000 validation_error: 20.723 +(epoch: 50, iters: 291152, time: 0.554, data: 0.000) G_L1: 14.564 G_L1_ABSOLUTE: 2.944 G_L1_RELATIVE: 11.619 G_Regularizer: 0.000 validation_error: 20.637 +(epoch: 50, iters: 293152, time: 0.540, data: 0.000) G_L1: 13.002 G_L1_ABSOLUTE: 2.353 G_L1_RELATIVE: 10.648 G_Regularizer: 0.000 validation_error: 20.926 +(epoch: 50, iters: 295152, time: 0.550, data: 0.001) G_L1: 15.001 G_L1_ABSOLUTE: 2.648 G_L1_RELATIVE: 12.353 G_Regularizer: 0.000 validation_error: 20.943 +(epoch: 50, iters: 297152, time: 0.560, data: 0.001) G_L1: 13.265 G_L1_ABSOLUTE: 2.233 G_L1_RELATIVE: 11.032 G_Regularizer: 0.000 validation_error: 21.106 +(epoch: 50, iters: 299152, time: 0.539, data: 0.000) G_L1: 14.484 G_L1_ABSOLUTE: 2.598 G_L1_RELATIVE: 11.886 G_Regularizer: 0.000 validation_error: 20.849 +(epoch: 50, iters: 301152, time: 0.535, data: 0.000) G_L1: 15.200 G_L1_ABSOLUTE: 2.283 G_L1_RELATIVE: 12.917 G_Regularizer: 0.000 validation_error: 20.902 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/opt.txt b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/opt.txt new file mode 100644 index 0000000..d05388c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/opt.txt @@ -0,0 +1,62 @@ +----------------- Options --------------- + aspect_ratio: 1.0 + audio_window_size: 16 + batch_size: 1 + cached_images: False + checkpoints_dir: ./checkpoints + dataroot: /ARD_ZDF [default: None] + dataset_mode: multi_face_audio_eq_tmp_cached [default: aligned] + direction: AtoB + display_winsize: 512 [default: 256] + epoch: latest + erosionFactor: 1.0 + eval: False + fineSize: 512 + fix_renderer: False + gpu_ids: 0 + hierarchicalTex: False + init_gain: 0.02 + init_type: xavier + input_nc: 3 + isTrain: False [default: None] + loadSize: 512 + load_iter: 0 [default: 0] + look_ahead: True [default: False] + lossType: RMS [default: L1] + mapping_path: /home/alberto/NeuralVoicePuppetry/mappings/audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead/mapping_Severin_videos_SC [default: ] + max_dataset_size: inf + model: audio2ExpressionsAttentionTMP4 [default: test] + n_layers_D: 3 + name: audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead [default: experiment_name] + ndf: 64 + netD: basic + netG: unet_256 + ngf: 64 + no_augmentation: False + no_dropout: False + norm: instance + ntest: inf + num_test: 50 + num_threads: 4 + out_dir: /home/alberto/data/dave_fxfy/audio2exprNVP/ [default: ] + output_audio_expressions: False + output_nc: 3 + phase: test + renderer: no_renderer + rendererType: estimatorAttention [default: UNET_5_level] + resize_or_crop: resize_and_crop + results_dir: ./results/ + seq_len: 8 [default: 1] + serial_batches: False + source_actor: /home/alberto/NeuralVoicePuppetry/datasets/External/Severin_videos_transformers_lecture [default: ] + source_dir: ./datasets/ + suffix: + target_actor: + tex_dim: 256 + tex_features: 16 +tex_features_intermediate: 16 + textureModel: DynamicNeuralTextureAudio + use_mapping: True [default: False] + verbose: False + write_no_images: True [default: False] +----------------- End ------------------- diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4/opt.txt b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4/opt.txt new file mode 100644 index 0000000..caf26b3 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/checkpoints/audio2ExpressionsAttentionTMP4/opt.txt @@ -0,0 +1,59 @@ +----------------- Options --------------- + aspect_ratio: 1.0 + audio_window_size: 16 + batch_size: 1 + cached_images: False + checkpoints_dir: ./checkpoints + dataroot: /ARD_ZDF [default: None] + dataset_mode: multi_face_audio_eq_tmp_cached [default: aligned] + direction: AtoB + display_winsize: 512 [default: 256] + epoch: latest + erosionFactor: 1.0 + eval: False + fineSize: 512 + fix_renderer: False + gpu_ids: 0 + hierarchicalTex: False + init_gain: 0.02 + init_type: xavier + input_nc: 3 + isTrain: False [default: None] + loadSize: 512 + load_iter: 0 [default: 0] + look_ahead: True [default: False] + lossType: RMS [default: L1] + max_dataset_size: inf + model: audio2ExpressionsAttentionTMP4 [default: test] + n_layers_D: 3 + name: audio2ExpressionsAttentionTMP4 [default: experiment_name] + ndf: 64 + netD: basic + netG: unet_256 + ngf: 64 + no_augmentation: False + no_dropout: False + norm: instance + ntest: inf + num_test: 50 + num_threads: 4 + output_audio_expressions: False + output_nc: 3 + phase: test + renderer: no_renderer + rendererType: estimatorAttention [default: UNET_5_level] + resize_or_crop: resize_and_crop + results_dir: ./results/ + seq_len: 8 [default: 1] + serial_batches: False + source_actor: /home/alberto/NeuralVoicePuppetry/datasets//External/Sekunden_Wissen_01 [default: ] + source_dir: ./datasets/ + suffix: + target_actor: /home/alberto/NeuralVoicePuppetry/datasets//External/Russian_guy [default: ] + tex_dim: 256 + tex_features: 16 +tex_features_intermediate: 16 + textureModel: DynamicNeuralTextureAudio + verbose: False + write_no_images: True [default: False] +----------------- End ------------------- diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/__init__.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/__init__.py new file mode 100644 index 0000000..a42be17 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/__init__.py @@ -0,0 +1,90 @@ +import importlib +import torch.utils.data +from .base_data_loader import BaseDataLoader +from .base_dataset import BaseDataset + + +def find_dataset_using_name(dataset_name): + # Given the option --dataset_mode [datasetname], + # the file "data/datasetname_dataset.py" + # will be imported. + dataset_filename = "data." + dataset_name + "_dataset" + datasetlib = importlib.import_module(dataset_filename) + + # In the file, the class called DatasetNameDataset() will + # be instantiated. It has to be a subclass of BaseDataset, + # and it is case-insensitive. + dataset = None + target_dataset_name = dataset_name.replace('_', '') + 'datasets' + for name, cls in datasetlib.__dict__.items(): + if name.lower() == target_dataset_name.lower() \ + and issubclass(cls, BaseDataset): + dataset = cls + + if dataset is None: + print("In %s.py, there should be a subclass of BaseDataset with class name that matches %s in lowercase." % (dataset_filename, target_dataset_name)) + exit(0) + + return dataset + + +def get_option_setter(dataset_name): + dataset_class = find_dataset_using_name(dataset_name) + return dataset_class.modify_commandline_options + + +def create_dataset(opt): + dataset = find_dataset_using_name(opt.dataset_mode) + instance = dataset() + instance.initialize(opt) + print("datasets [%s] was created" % (instance.name())) + return instance + + +def CreateDataLoader(opt): + data_loader = CustomDatasetDataLoader() + data_loader.initialize(opt) + return data_loader + + +# Wrapper class of Dataset class that performs +# multi-threaded data loading +class CustomDatasetDataLoader(BaseDataLoader): + def name(self): + return 'CustomDatasetDataLoader' + + def initialize(self, opt): + BaseDataLoader.initialize(self, opt) + self.dataset = create_dataset(opt) + if opt.serial_batches: + self.dataloader = torch.utils.data.DataLoader( + self.dataset, + batch_size=opt.batch_size, + shuffle=not opt.serial_batches, + num_workers=int(opt.num_threads)) + else: + #weights = make_weights_for_balanced_classes(dataset_train.imgs, len(dataset_train.classes)) + weights = self.dataset.getSampleWeights() + weights = torch.DoubleTensor(weights) + sampler = torch.utils.data.sampler.WeightedRandomSampler(weights, len(weights)) + + self.dataloader = torch.utils.data.DataLoader( + self.dataset, + batch_size=opt.batch_size, + #shuffle=True, + sampler=sampler, + pin_memory=True, + num_workers=int(opt.num_threads)) + + + def load_data(self): + return self + + def __len__(self): + return min(len(self.dataset), self.opt.max_dataset_size) + + def __iter__(self): + for i, data in enumerate(self.dataloader): + if i * self.opt.batch_size >= self.opt.max_dataset_size: + break + yield data diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/audio.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/audio.py new file mode 100644 index 0000000..4932c94 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/audio.py @@ -0,0 +1,77 @@ +import time +import random +import math + +import numpy as np + +import torch +import torch.nn as nn +import torch.nn.functional as F +import torchvision.transforms as transforms + +import torchaudio +import torchaudio.transforms + +import librosa +import scipy.signal +import librosa.display +import matplotlib.pyplot as plt + + +class Audio(): + def name(self): + return 'Audio' + + def __init__(self, filename, write_mel_spectogram = False): + self.n_mels=128 + self.fmax=8000 + self.hop_length_ms = 20 + + sound, sample_rate = librosa.load(filename)#torchaudio.load(filename) + self.raw_audio = sound + self.sample_rate = sample_rate + print('sample_rate = %d' % self.sample_rate) + self.n_samples = sound.shape[0] + self.time_total = self.n_samples / self.sample_rate + print('length = %ds' % self.time_total) + + print('compute mel spectrogram...') + self.hop_length = int(sample_rate / 1000.0 * self.hop_length_ms) + print('hop_length: ', self.hop_length) + self.mel_spectrogram = librosa.feature.melspectrogram(y=self.raw_audio, sr=self.sample_rate, hop_length=self.hop_length, n_mels=self.n_mels, fmax=self.fmax) + + + if write_mel_spectogram: + print('write spectrogram to file') + plt.figure(figsize=(100, 15)) + librosa.display.specshow(librosa.power_to_db(self.mel_spectrogram, ref=np.max), y_axis='mel', fmax=self.fmax, x_axis='time') + plt.colorbar(format='%+2.0f dB') + plt.title('Mel spectrogram') + plt.tight_layout() + plt.savefig('mel_features.png', dpi=None, facecolor='w', edgecolor='w', orientation='portrait', papertype=None, format=None, transparent=False, bbox_inches=None, pad_inches=0.1, frameon=None, metadata=None) + + print('mel: ', self.mel_spectrogram.shape) # (128, 18441) + self.n_mel_frames = self.mel_spectrogram.shape[1] + self.mel_sample_rate = self.mel_spectrogram.shape[1] / self.time_total + print('n_mel_frames: ', self.n_mel_frames) + print('mel_sample_rate: ', self.mel_sample_rate) + + # convert to torch + self.mel_spectrogram = torch.FloatTensor(self.mel_spectrogram) + + def getWindow(self, mel_frame_idx, window_size): + # get audio mel sample window + audio_start = mel_frame_idx - (window_size//2) + audio_end = mel_frame_idx + (window_size//2) + if audio_start < 0: + audio_input = self.mel_spectrogram[0:self.n_mels, 0:audio_end] + zeros = torch.zeros((self.n_mels,-audio_start)) + audio_input = torch.cat([zeros, audio_input], 1) + elif audio_end >= self.n_mel_frames: + audio_input = self.mel_spectrogram[:, audio_start:-1] + zeros = torch.zeros((self.n_mels,audio_end-self.n_mel_frames + 1)) + audio_input = torch.cat([audio_input, zeros], 1) + else: + audio_input = self.mel_spectrogram[:, audio_start:audio_end] + + return torch.reshape(audio_input, (1, 1, self.n_mels, window_size)) \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/audio_dataset.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/audio_dataset.py new file mode 100644 index 0000000..519cbe1 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/audio_dataset.py @@ -0,0 +1,150 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from .base_dataset import BaseDataset +from .audio import Audio +#from data.image_folder import make_dataset +from PIL import Image +import h5py + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.deepspeech.npy']): + id_str = fname[:-15] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + #id_str = fname[l+1:-4] + id_str = fname[l+1:-15] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +class AudioDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + self.audiofeat_path = os.path.join(opt.dataroot, 'audio_feature') + + self.audiofeat_list = sorted(os.listdir(self.audiofeat_path)) + + print('\taudio_feat_path:', self.audiofeat_path) + + opt.nObjects = 1 + opt.nTrainObjects = 116 # TODO + opt.nTestObjects = 1 + opt.test_sequence_names = [[opt.dataroot.split("/")[-1], 'test']] + assert(opt.resize_or_crop == 'resize_and_crop') + + if opt.isTrain: + print('ERROR: audio_dataset only allowed for test') + exit() + + def getSampleWeights(self): + weights = np.ones((len(self.frame_paths))) + return weights + + def getAudioFilename(self): + return os.path.join(self.root, 'audio.wav') + + def getAudioFeatureFilename(self, idx): + return self.frame_paths[idx % len(self.frame_paths)] + + def load_npy(self, index): + + file_path = os.path.join(self.audiofeat_path, "%05d.deepspeech.npy" % index) + + feature_array = np.load(file_path) + + return feature_array + + def __getitem__(self, index): + + # load deepspeech feature + feature_array = self.load_npy(index) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + + feature_array = self.load_npy(index_seq) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.audiofeat_list)-1 + if index_seq > max_idx: index_seq = max_idx + + feature_array = self.load_npy(index_seq) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + for i in range(1, self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + + feature_array = self.load_npy(index_seq) + + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + + ################################# + zeroIdentity = torch.zeros(100) + zeroExpressions = torch.zeros(76) + + target_id = -1 + internal_sequence_id = 0 + + weight = 1.0 / self.__len__() + + return {'paths': '', + 'expressions': zeroExpressions, + 'identity': zeroIdentity, + 'intrinsics': np.zeros((4)), + 'extrinsics': np.zeros((4,4)), + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id':target_id, + 'internal_id':internal_sequence_id, + 'weight': np.array([weight]).astype(np.float32)} + + def __len__(self): + return len(self.audiofeat_list) + + def name(self): + return 'AudioDataset' diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/base_data_loader.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/base_data_loader.py new file mode 100644 index 0000000..ae5a168 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/base_data_loader.py @@ -0,0 +1,10 @@ +class BaseDataLoader(): + def __init__(self): + pass + + def initialize(self, opt): + self.opt = opt + pass + + def load_data(): + return None diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/base_dataset.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/base_dataset.py new file mode 100644 index 0000000..25c7c8c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/base_dataset.py @@ -0,0 +1,105 @@ +import torch.utils.data as data +from PIL import Image +import torchvision.transforms as transforms + + +class BaseDataset(data.Dataset): + def __init__(self): + super(BaseDataset, self).__init__() + + def name(self): + return 'BaseDataset' + + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + pass + + def getSampleWeights(self): + return torch.ones((len(self))) + + def __len__(self): + return 0 + + +def get_transform(opt): + transform_list = [] + if opt.resize_or_crop == 'resize_and_crop': + osize = [opt.loadSize, opt.loadSize] + transform_list.append(transforms.Resize(osize, Image.BICUBIC)) + transform_list.append(transforms.RandomCrop(opt.fineSize)) + elif opt.resize_or_crop == 'crop': + transform_list.append(transforms.RandomCrop(opt.fineSize)) + elif opt.resize_or_crop == 'scale_width': + transform_list.append(transforms.Lambda( + lambda img: __scale_width(img, opt.fineSize))) + elif opt.resize_or_crop == 'scale_width_and_crop': + transform_list.append(transforms.Lambda( + lambda img: __scale_width(img, opt.loadSize))) + transform_list.append(transforms.RandomCrop(opt.fineSize)) + elif opt.resize_or_crop == 'none': + transform_list.append(transforms.Lambda( + lambda img: __adjust(img))) + else: + raise ValueError('--resize_or_crop %s is not a valid option.' % opt.resize_or_crop) + + if opt.isTrain and not opt.no_flip: + transform_list.append(transforms.RandomHorizontalFlip()) + + transform_list += [transforms.ToTensor(), + transforms.Normalize((0.5, 0.5, 0.5), + (0.5, 0.5, 0.5))] + return transforms.Compose(transform_list) + + +# just modify the width and height to be multiple of 4 +def __adjust(img): + ow, oh = img.size + + # the size needs to be a multiple of this number, + # because going through generator network may change img size + # and eventually cause size mismatch error + mult = 4 + if ow % mult == 0 and oh % mult == 0: + return img + w = (ow - 1) // mult + w = (w + 1) * mult + h = (oh - 1) // mult + h = (h + 1) * mult + + if ow != w or oh != h: + __print_size_warning(ow, oh, w, h) + + return img.resize((w, h), Image.BICUBIC) + + +def __scale_width(img, target_width): + ow, oh = img.size + + # the size needs to be a multiple of this number, + # because going through generator network may change img size + # and eventually cause size mismatch error + mult = 4 + assert target_width % mult == 0, "the target width needs to be multiple of %d." % mult + if (ow == target_width and oh % mult == 0): + return img + w = target_width + target_height = int(target_width * oh / ow) + m = (target_height - 1) // mult + h = (m + 1) * mult + + if target_height != h: + __print_size_warning(target_width, target_height, w, h) + + return img.resize((w, h), Image.BICUBIC) + + +def __print_size_warning(ow, oh, w, h): + if not hasattr(__print_size_warning, 'has_printed'): + print("The image size needs to be a multiple of 4. " + "The loaded image size was (%d, %d), so it was adjusted to " + "(%d, %d). This adjustment will be done to all images " + "whose sizes are not multiples of 4" % (ow, oh, w, h)) + __print_size_warning.has_printed = True diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/face_dataset.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/face_dataset.py new file mode 100644 index 0000000..6e46108 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/face_dataset.py @@ -0,0 +1,178 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from .base_dataset import BaseDataset +from .audio import Audio +from PIL import Image +import h5py + + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.npy', '.NPY']): + #.deepspeech.npy + id_str = fname[:-15] #4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_expr_dataset(dir): + expressions = [] + num_expr = len([name for name in os.listdir(dir) if os.path.isfile(name)]) + for i in range(num_expr): + fname = f'expr_{i}.npy' + path = os.path.join(dir, fname) + expressions.append(path) + return expressions + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-15]#4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + + +# def load_expressions(input_dir): +# file = open(input_dir+"/expressions/", "r") +# expressions = [[float(x) for x in line.split()] for line in file] +# file.close() +# return expressions + +class FaceDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + + # directories + self.dataroot = opt.dataroot + self.h5py_path = os.path.join(opt.dataroot, opt.dataroot.split("/")[-1]+'.h5') + + # debug print + print('load sequence:', self.dataroot) + print('\th5py_path:', self.h5py_path) + + self.data = h5py.File(self.h5py_path, 'r') + + # set data + self.n_frames_total = min(len(self.data["dsf"]), len(self.data["ep"])) + + print('\tnum frames:', self.n_frames_total) + + + opt.nTrainObjects = 1 + opt.nValObjects = 1 + opt.nTestObjects = 1 + + opt.test_sequence_names = [[opt.dataroot.split("/")[-1], 'train']] + assert(opt.resize_or_crop == 'resize_and_crop') + + def getSampleWeights(self): + weights = np.ones((self.n_frames_total)) + return weights + + + def getAudioFilename(self): + return os.path.join(self.dataroot, 'audio.wav') + + def getAudioFeatureFilename(self, idx): + #return self.frame_paths[idx % len(self.frame_paths)] + audio_id = self.audio_ids[idx] + return os.path.join(self.audio_feature_dir, str(audio_id) + '.deepspeech.npy') + + + def __getitem__(self, global_index): + # select frame from sequence + index = global_index + + # intrinsics and extrinsics + intrinsics = np.zeros((4)) # not used + extrinsics = np.zeros((4,4))# not used + + # expressions + expressions = self.data["ep"][index] + expressions = torch.tensor(expressions) + + # identity + identity = torch.zeros(100) # not used + + # load deepspeech feature + feature_array = self.data["dsf"][index] + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + + feature_array = self.data["dsf"][index_seq] + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.data["dsf"])-1 + if index_seq > max_idx: index_seq = max_idx + + feature_array = self.data["dsf"][index_seq] + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + last_valid_idx = audio_id + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + + feature_array = self.data["dsf"][index_seq] + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + + ################################# + weight = 1.0 / self.n_frames_total + + return {#'TARGET': TARGET, 'UV': UV, + 'paths': '', #img_path, + 'intrinsics': np.array(intrinsics), + 'extrinsics': np.array(extrinsics), + 'expressions': expressions, + 'identity': identity, + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id': -1, + 'internal_id': 0, + 'weight': np.array([weight]).astype(np.float32)} + + def __len__(self): + return self.n_frames_total + + def name(self): + return 'FaceDataset' diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/multi_face_audio_eq_tmp_cached_dataset.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/multi_face_audio_eq_tmp_cached_dataset.py new file mode 100644 index 0000000..99535be --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/data/multi_face_audio_eq_tmp_cached_dataset.py @@ -0,0 +1,458 @@ +import os.path +import random +import torchvision.transforms as transforms +import torch +import numpy as np +from data.base_dataset import BaseDataset +from data.audio import Audio +#from data.image_folder import make_dataset +from PIL import Image +import progressbar + +#def make_dataset(dir): +# images = [] +# assert os.path.isdir(dir), '%s is not a valid directory' % dir +# for root, _, fnames in sorted(os.walk(dir)): +# for fname in fnames: +# if any(fname.endswith(extension) for extension in ['.bin', '.BIN']): +# path = os.path.join(root, fname) +# images.append(path) +# return sorted(images) + +def make_dataset(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.npy', '.NPY']): + #.deepspeech.npy + id_str = fname[:-15] #4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + + for id in ids: + fname=str(id)+'.deepspeech.npy' + path = os.path.join(root, fname) + images.append(path) + return images + +def make_ids(paths, root_dir): + ids = [] + + for fname in paths: + l = fname.rfind('/') + id_str = fname[l+1:-15]#4] + i = int(id_str) + #print(fname, ': ', i) + ids.append(i) + return ids + +def make_dataset_ids_png(dir): + images = [] + ids = [] + assert os.path.isdir(dir), '%s is not a valid directory' % dir + for root, _, fnames in sorted(os.walk(dir)): + for fname in fnames: + if any(fname.endswith(extension) for extension in ['.png', '.png']): + id_str = fname[:-4] + i = int(id_str) + ids.append(i) + ids = sorted(ids) + return ids + +def load_intrinsics(input_dir): + file = open(input_dir+"/intrinsics.txt", "r") + intrinsics = [[(float(x) for x in line.split())] for line in file] + file.close() + intrinsics = list(intrinsics[0][0]) + return intrinsics + +def load_rigids(input_dir): + file = open(input_dir+"/rigid.txt", "r") + rigid_floats = [[float(x) for x in line.split()] for line in file] # note that it stores 5 lines per matrix (blank line) + file.close() + all_rigids = [ [rigid_floats[4*idx + 0],rigid_floats[4*idx + 1],rigid_floats[4*idx + 2],rigid_floats[4*idx + 3]] for idx in range(0, len(rigid_floats)//4) ] + return all_rigids + +def load_expressions(input_dir): + file = open(input_dir+"/expression.txt", "r") + expressions = [[float(x) for x in line.split()] for line in file] + file.close() + return expressions + +def load_identity(input_dir): + file = open(input_dir+"/identities.txt", "r") + identities = [[float(x) for x in line.split()] for line in file] + file.close() + return identities + + +class MultiFaceAudioEQTmpCachedDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + self.root = opt.dataroot + + # read datasets file that contains the filenames for the train, val and test lists + file = open(self.root+"/datasets.txt", "r") + self.filename_train_list, self.filename_val_list, self.filename_test_list = [str(line) for line in file] + file.close() + if self.filename_train_list[-1] == '\n': self.filename_train_list = self.filename_train_list[:-1] + if self.filename_val_list[-1] == '\n': self.filename_val_list = self.filename_val_list[:-1] + if self.filename_test_list[-1] == '\n': self.filename_test_list = self.filename_test_list[:-1] + + + + # get list of train sequences + file = open(self.root+"/" + self.filename_train_list, "r") + self.train_sequence_names = [str(line) for line in file] + file.close() + for i in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[i][-1] == '\n': + self.train_sequence_names[i] = self.train_sequence_names[i][:-1] + + # get list of val sequences + file = open(self.root+"/" + self.filename_val_list, "r") + self.val_sequence_names = [[str(w) for w in line.split()] for line in file] + file.close() + for i in range(0,len(self.val_sequence_names)): + if self.val_sequence_names[i][0][-1] == '\n': self.val_sequence_names[i][0] = self.val_sequence_names[i][0][:-1] + if self.val_sequence_names[i][1][-1] == '\n': self.val_sequence_names[i][1] = self.val_sequence_names[i][1][:-1] + + # get list of test sequences + file = open(self.root+"/" + self.filename_test_list, "r") + self.test_sequence_names = [[str(w) for w in line.split()] for line in file] + if opt.output_audio_expressions: self.test_sequence_names = self.test_sequence_names[0:1] + file.close() + for i in range(0,len(self.test_sequence_names)): + if self.test_sequence_names[i][0][-1] == '\n': self.test_sequence_names[i][0] = self.test_sequence_names[i][0][:-1] + if self.test_sequence_names[i][1][-1] == '\n': self.test_sequence_names[i][1] = self.test_sequence_names[i][1][:-1] + + # print some stats + print('filename_train_list:', self.filename_train_list) + print('\tnum_seq:', len(self.train_sequence_names)) + print('filename_val_list: ', self.filename_val_list) + print('\tnum_seq:', len(self.val_sequence_names)) + print('filename_test_list: ', self.filename_test_list) + print('\tnum_seq:', len(self.test_sequence_names)) + + opt.train_sequence_names = self.train_sequence_names + opt.val_sequence_names = self.val_sequence_names + opt.test_sequence_names = self.test_sequence_names + + # search mapping from val, test to train sequences that are used as targets + self.val_sequence_targets = [] + for i in range(0,len(self.val_sequence_names)): + target_name = self.val_sequence_names[i][1] + target_id = -1 + for j in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[j] == target_name: + target_id = j + break + if target_id == -1: + print('Target sequence not in train set! ', target_name) + exit() + self.val_sequence_targets.append(target_id) + + self.test_sequence_targets = [] + for i in range(0,len(self.test_sequence_names)): + target_name = self.test_sequence_names[i][1] + target_id = -1 + for j in range(0,len(self.train_sequence_names)): + if self.train_sequence_names[j] == target_name: + target_id = j + break + if target_id == -1: + print('Target sequence not in train set! ', target_name) + exit() + self.test_sequence_targets.append(target_id) + print('test: ', self.test_sequence_names[i]) + print('\t target:', target_id) + + # store len values + opt.nTrainObjects = len(self.train_sequence_names) + opt.nValObjects = len(self.val_sequence_names) + opt.nTestObjects = len(self.test_sequence_names) + + ################################################ + ################################################ + ################################################ + + # prepare dataloader paths / data + self.audio_feature_dir = [] + self.image_dir = [] + self.uvs_dir = [] + self.audio_ids = [] + self.image_ids = [] + self.intrinsics = [] + self.extrinsics = [] + self.expressions = [] + self.identities = [] + self.target_id = [] + self.n_frames_total = 0 + + if opt.phase == 'train': + self.sequence_names = self.train_sequence_names + for i in range(0,len(self.train_sequence_names)): + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[i]) + audio_feature_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[i], 'uvs') + print('load train sequence:', self.train_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + + audio_ids = make_ids(make_dataset(audio_feature_dir), dataroot) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(dataroot) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(i) + + self.n_frames_total += min_len + elif opt.phase == 'val': + for i in range(0,len(self.val_sequence_names)): + target_id = self.val_sequence_targets[i] + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[target_id]) + audio_feature_dir = os.path.join(opt.dataroot, self.val_sequence_names[i][0], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'uvs') + print('load val sequence:', self.val_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + audio_ids = make_ids(make_dataset(audio_feature_dir), os.path.join(opt.dataroot, self.val_sequence_names[i][0])) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(os.path.join(opt.dataroot, self.val_sequence_names[i][0])) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(target_id) + + self.n_frames_total += min_len + else: # test + for i in range(0,len(self.test_sequence_names)): + target_id = self.test_sequence_targets[i] + dataroot = os.path.join(opt.dataroot, self.train_sequence_names[target_id]) + audio_feature_dir = os.path.join(opt.dataroot, self.test_sequence_names[i][0], 'audio_feature') + image_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'images') + uvs_dir = os.path.join(opt.dataroot, self.train_sequence_names[target_id], 'uvs') + print('load test sequence:', self.test_sequence_names[i]) + print('\tidentity_dir:', dataroot) + print('\taudio_feature_dir:', audio_feature_dir) + print('\timage_dir:', image_dir) + print('\tuvs_dir:', uvs_dir) + audio_ids = make_ids(make_dataset(audio_feature_dir), os.path.join(opt.dataroot, self.test_sequence_names[i][0])) + image_ids = make_dataset_ids_png(image_dir) # [-1] * len(audio_ids) #make_ids(make_dataset(image_dir), dataroot) + intrinsics = load_intrinsics(dataroot) + extrinsics = load_rigids(dataroot) + expressions = load_expressions(os.path.join(opt.dataroot, self.test_sequence_names[i][0])) + identity = load_identity(dataroot) + + min_len = min(len(audio_ids), len(image_ids), len(extrinsics), len(expressions)) + + self.audio_feature_dir.append(audio_feature_dir) + self.image_dir.append(image_dir) + self.uvs_dir.append(uvs_dir) + self.audio_ids.append(audio_ids[:min_len]) + self.image_ids.append(image_ids[:min_len]) + self.intrinsics.append(intrinsics) + self.extrinsics.append(extrinsics[:min_len]) + self.expressions.append(expressions[:min_len]) + self.identities.append(identity[:min_len]) + self.target_id.append(target_id) + + self.n_frames_total += min_len + + + print('frames_total:', self.n_frames_total) + + + #global_target_ids = [] + #for i in range(0,len(self.audio_ids)): + # for j in range(0,len(self.audio_ids[i])): + # global_target_ids.append(self.target_id[i]) + #global_target_ids=np.array(global_target_ids) + #self.weights = np.where(global_target_ids==2, 1.0 * np.ones((self.n_frames_total)), 0.01 * np.ones((self.n_frames_total)) ) + self.weights = [] + for i in range(0,len(self.audio_ids)): + l = len(self.audio_ids[i]) + for j in range(0,l): + self.weights.append(1.0 / l) + self.weights = np.array(self.weights) + + assert(opt.resize_or_crop == 'resize_and_crop') + + # mapping global to internal + self.mapping_global2internal = [] + self.mapping_global2internal_offset = [] + self.dsf = [] + offset = 0 + with progressbar.ProgressBar(max_value=self.n_frames_total) as bar: + for i in range(0,len(self.audio_ids)): + l = len(self.audio_ids[i]) + dsf_seq = [] + for k in range(0,l): + self.mapping_global2internal.append(i) + self.mapping_global2internal_offset.append(offset) + dsf_fname = os.path.join(self.audio_feature_dir[i], str(self.audio_ids[i][k]) + '.deepspeech.npy') + feature_array = np.load(dsf_fname) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq.append(dsf_np.astype(np.float32)) + bar.update(offset + k) + self.dsf.append(dsf_seq) + offset += l + + + def getSampleWeights(self): + return self.weights + + def getitem(self, global_index): + + # select sequence + internal_sequence_id = self.mapping_global2internal[global_index] + sum_frames = self.mapping_global2internal_offset[global_index] + + # select frame from sequence + index = (global_index-sum_frames) % len(self.audio_ids[internal_sequence_id]) + + # get data ids + audio_id = self.audio_ids[internal_sequence_id][index] + image_id = self.image_ids[internal_sequence_id][index] + + #print('GET ITEM: ', index) + #img_path = self.frame_paths[sequence_id][index] + + # intrinsics and extrinsics + intrinsics = self.intrinsics[internal_sequence_id] + extrinsics = self.extrinsics[internal_sequence_id][image_id] + + # expressions + expressions = np.asarray(self.expressions[internal_sequence_id][audio_id], dtype=np.float32) + #print('expressions:', expressions.shape) + expressions[32] *= 0.0 # remove eye brow movements + expressions[41] *= 0.0 # remove eye brow movements + expressions[71:75] *= 0.0 # remove eye brow movements + expressions = torch.tensor(expressions) + + # identity + identity = torch.tensor(self.identities[internal_sequence_id][image_id]) + target_id = self.target_id[internal_sequence_id] # sequence id refers to the target sequence (of the training corpus) + + # load deepspeech feature + #print('audio_id', audio_id) + dsf_fname = os.path.join(self.audio_feature_dir[internal_sequence_id], str(audio_id) + '.deepspeech.npy') + + dsf_np = self.dsf[internal_sequence_id][index] + dsf = transforms.ToTensor()(dsf_np) + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + dsf_np = self.dsf[internal_sequence_id][index_seq] + dsf_seq = transforms.ToTensor()(dsf_np) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = len(self.audio_ids[internal_sequence_id])-1 + if index_seq > max_idx: index_seq = max_idx + dsf_np = self.dsf[internal_sequence_id][index_seq] + dsf_seq = transforms.ToTensor()(dsf_np) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + dsf_np = self.dsf[internal_sequence_id][index_seq] + dsf_seq = transforms.ToTensor()(dsf_np) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + #weight = 1.0 / len(self.audio_feature_dir[internal_sequence_id]) + weight = self.weights[global_index] + + return {'paths': dsf_fname, #img_path, + 'intrinsics': np.array(intrinsics), + 'extrinsics': np.array(extrinsics), + 'expressions': expressions, + 'identity': identity, + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id':target_id, + 'internal_id':internal_sequence_id, + + 'weight': np.array([weight]).astype(np.float32)} + + + def __getitem__(self, global_index): + # select frame from sequence + index = global_index + current = self.getitem(index) + prv = self.getitem(max(index-1, 0)) + nxt = self.getitem(min(index+1, self.n_frames_total-1)) + + return { + 'paths': current['paths'], #img_path, + 'target_id': current['target_id'], + 'internal_id': current['internal_id'], + 'weight': current['weight'], + 'identity': current['identity'], + 'intrinsics': current['intrinsics'], + + 'extrinsics': current['extrinsics'], + 'expressions': current['expressions'], + 'audio_deepspeech': current['audio_deepspeech'], + + 'extrinsics_prv': prv['extrinsics'], + 'expressions_prv': prv['expressions'], + 'audio_deepspeech_prv': prv['audio_deepspeech'], + + 'extrinsics_nxt': nxt['extrinsics'], + 'expressions_nxt': nxt['expressions'], + 'audio_deepspeech_nxt': nxt['audio_deepspeech'], + } + + + def __len__(self): + return self.n_frames_total #len(self.frame_paths[0]) + + def name(self): + return 'MultiFaceAudioEQTmpCachedDataset' diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/get_audioexpr.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/get_audioexpr.py new file mode 100644 index 0000000..4fb7501 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/get_audioexpr.py @@ -0,0 +1,383 @@ +import sys +import os +import random +import torchvision.transforms as transforms +import argparse + + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '.'))) + +from data.audio_dataset import AudioDataset +from data.base_dataset import BaseDataset + +from models import create_model +import torch +import numpy as np +import copy +from tqdm import tqdm + +class FaceDataset(BaseDataset): + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def initialize(self, opt): + self.opt = opt + + # directories + self.dataroot = opt.dataroot + self.expr_path = os.path.join(opt.dataroot, 'deca_expr') + self.audio_feature_path = os.path.join(opt.dataroot, 'audio_feature') + + # debug print + print('load sequence:', self.dataroot) + + self.n_frames_total = len(os.listdir(self.expr_path)) - 1 + + opt.nTrainObjects = 1 + opt.nValObjects = 1 + opt.nTestObjects = 1 + + opt.test_sequence_names = [[opt.dataroot.split("/")[-1], 'train']] + assert(opt.resize_or_crop == 'resize_and_crop') + + def getSampleWeights(self): + weights = np.ones((self.n_frames_total)) + return weights + + + def getAudioFilename(self): + return os.path.join(self.dataroot, 'audio.wav') + + def getAudioFeatureFilename(self, idx): + return os.path.join(self.audio_feature_path, str(idx) + '.deepspeech.npy') + + + def __getitem__(self, global_index): + # select frame from sequence + index = global_index + + # expressions + expressions = np.load(os.path.join(self.expr_path, '%05d.npy' % index)) + expressions = torch.from_numpy(expressions)[0] + + # identity + identity = torch.zeros(100) # not used + + # load deepspeech feature + feature_array = np.load(os.path.join(self.audio_feature_path, '%05d.deepspeech.npy' % index)) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf = transforms.ToTensor()(dsf_np.astype(np.float32)) + + + # load sequence data if necessary + if self.opt.look_ahead:# use prev and following frame infos + r = self.opt.seq_len//2 + for i in range(1,r): # prev frames + index_seq = index - i + if index_seq < 0: index_seq = 0 + + feature_array = np.load(os.path.join(self.audio_feature_path, '%05d.deepspeech.npy' % index_seq)) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + for i in range(1,self.opt.seq_len - r + 1): # following frames + index_seq = index + i + max_idx = self.n_frames_total-1 + if index_seq > max_idx: index_seq = max_idx + + feature_array = np.load(os.path.join(self.audio_feature_path, '%05d.deepspeech.npy' % index_seq)) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf, dsf_seq], 0) # seq_len x 16 x 29 + # note the ordering [old ... current ... future] + else: + last_valid_idx = audio_id + for i in range(1,self.opt.seq_len): + index_seq = index - i + if index_seq < 0: index_seq = 0 + + feature_array = np.load(os.path.join(self.audio_feature_path, '%05d.deepspeech.npy' % index_seq)) + dsf_np = np.resize(feature_array, (16,29,1)) + dsf_seq = transforms.ToTensor()(dsf_np.astype(np.float32)) # 1 x 16 x 29 + dsf = torch.cat([dsf_seq, dsf], 0) # seq_len x 16 x 29 + # note the ordering [old ... current] + + + ################################# + weight = 1.0 / self.n_frames_total + + return {#'TARGET': TARGET, 'UV': UV, + 'paths': '', #img_path, + 'expressions': expressions, + 'identity': identity, + 'audio_deepspeech': dsf, # deepspeech feature + 'target_id': -1, + 'internal_id': 0, + 'weight': np.array([weight]).astype(np.float32)} + + def __len__(self): + return self.n_frames_total + + def name(self): + return 'FaceDataset' + + + + self.expressions = input['expressions'].cuda() + self.audio_features = input['audio_deepspeech'].cuda() # b x seq_len x 16 x 29 + self.target_id = input['target_id'].cuda() + + +def make_opts(dataset_base): + class Struct: + def __init__(self, **entries): + self.__dict__.update(entries) + + opt = {} + opt['dataroot'] = dataset_base + opt['batch_size'] = 1 + opt['seq_len'] = 8 + opt['fineSize'] = 512 + opt['display_winsize'] = 512 + opt['input_nc'] = 3 + opt['output_nc'] = 3 + opt['ngf'] = 64 + opt['ndf'] = 64 + opt['netD'] = 'basic' + opt['netG'] = 'unet_256' + opt['n_layers_D'] = 3 + opt['gpu_ids'] = '0' + opt['fineSize'] = 256 + + opt['name'] = 'audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead' + opt['renderer'] = 'no_renderer' + opt['fix_renderer'] = True + opt['dataset_mode']='multi_face_audio_eq_tmp_cached' + opt['model'] = 'audio2ExpressionsAttentionTMP4' + opt['direction'] = 'AtoB' + opt['epoch'] = 'latest' + opt['load_iter'] = 0 + opt['num_threads'] = 4 + opt['checkpoints_dir'] = os.getcwd() + '/third/Audio2ExpressionNet/checkpoints' + opt['norm'] = 'instance' + opt['serial_batches'] = False + opt['no_dropout'] = False + opt['max_dataset_size'] = float("inf") + + opt['resize_or_crop'] = 'resize_and_crop' + opt['no_augmentation'] = False + opt['init_type'] = 'xavier' + opt['init_gain'] = 0.02 + + opt['verbose'] = False + opt['suffix'] = '' + opt['tex_dim'] = 256 + opt['tex_features_intermediate'] = 16 + opt['tex_features'] = 16 + opt['textureModel'] = 'DynamicNeuralTextureAudio' + opt['rendererType'] = 'estimatorAttention' + opt['lossType'] = 'RMS' + + opt['hierarchicalTex'] = False + opt['output_audio_expressions'] = False + opt['erosionFactor'] = 1.0 + opt['audio_window_size'] = 16 + opt['look_ahead'] = True + opt['cached_images'] = False + opt['ntest'] = float("inf") + opt['results_dir'] = './results/' + opt['aspect_ratio'] = 1.0, + opt['phase'] = 'test' + opt['eval'] = False + opt['num_test'] = 50 + opt['write_no_images'] = True + + # Important + opt['source_dir'] = './datasets/' + opt['source_actor'] = dataset_base + + # extra + opt['isTrain'] = False + + s = Struct(**opt) + + return s + + +def load_model(opt): + opt.output_audio_expressions = True + opt.nTrainObjects = 116 + + print('#train objects = %d' % opt.nTrainObjects) + + print('>>> create model <<<') + model = create_model(opt) + print('>>> setup model <<<') + model.setup(opt) + + return model + + +def load_source_sequence(opt): + opt_source = copy.copy(opt) # create a clone + opt_source.dataroot = opt.source_actor # overwrite root directory + print(opt_source.dataroot) + opt_source.dataset_mode = 'audio' + opt_source.phase = 'train' + + dataset_source = AudioDataset() + + dataset_source.initialize(opt_source) + + dataloader = torch.utils.data.DataLoader( + dataset_source, + batch_size=opt.batch_size, + shuffle=not opt.serial_batches, + num_workers=int(opt.num_threads)) + + return dataset_source, dataloader + + +def load_target_sequence(opt): + opt_target = copy.copy(opt) # create a clone + # opt_target.dataroot = opt.target_actor # overwrite root directory + opt_target.dataset_mode = 'face' + opt_target.phase = 'train' + dataset_target = FaceDataset() + dataset_target.initialize(opt_target) + + dataloader = torch.utils.data.DataLoader( + dataset_target, + batch_size=opt.batch_size, + shuffle=not opt.serial_batches, + num_workers=int(opt.num_threads)) + + return dataset_target, dataloader + + +def get_audioexpr(name, dataset_base, out_dir, mapping_path=None): + + # read options + print('Base: ', dataset_base) + opt = make_opts(dataset_base) + + # hard-code some parameters for test + opt.num_threads = 1 # test code only supports num_threads = 1 + opt.batch_size = 1 # test code only supports batch_size = 1 + opt.serial_batches = True # no shuffle + opt.no_augmentation = True # no flip + opt.display_id = -1 # no visdom display + opt.source_actor = dataset_base + + # load model + model = load_model(opt) + + # Make mapping + if mapping_path is None: + mapping_fn = os.path.join(dataset_base, 'mapping.npy') + + else: + mapping_fn = mapping_path + + not_exists = not os.path.exists(mapping_fn) + + if not_exists: + # read target sequence + dataset_target, data_loader_target = load_target_sequence(opt) + + # collect data + print('collect data') + audio_expressions = None + gt_expressions = None + for i, data in tqdm(enumerate(data_loader_target)): + model.set_input(data) + model.test() + + ae = model.fake_expressions.data[:,:,0] + if type(audio_expressions) == type(None): + audio_expressions = ae + e = model.expressions.data + gt_expressions = e + else: + audio_expressions = torch.cat([audio_expressions,ae],dim=0) + e = model.expressions.data + gt_expressions = torch.cat([gt_expressions,e],dim=0) + + # solve for mapping + print('solve for mapping') + optimize_in_parameter_space = True #False + if optimize_in_parameter_space: +# A = audio_expressions +# B = gt_expressions +# # solve lstsq ||AX - B|| +# X, _ = torch.gels(B, A, out=None) +# #X, _ = torch.lstsq(B, A) # requires pytorch 1.2 +# X = X[0:A.shape[1],:] +# mapping = X.t() + + # use gradient descent method + n = audio_expressions.shape[0] + subspace_dim = 32 + + # TODO: patch + n_expr = 53 + + X = torch.nn.Parameter(torch.randn(n_expr, subspace_dim, requires_grad=True).cuda()) + optimizer = torch.optim.Adam([X], lr=0.01) + lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=30, gamma=0.1) + num_epochs = 90 + random_range = [k for k in range(0,n)] + + for ep in tqdm(range(0,num_epochs)): + random.shuffle(random_range) + for j in random_range: + expressions = gt_expressions[j] + fake_expressions = torch.matmul(X, audio_expressions[j]) + diff_expression = fake_expressions - expressions + loss = torch.mean(diff_expression * diff_expression) # L2 + optimizer.zero_grad() + loss.backward() + optimizer.step() + lr_scheduler.step() + + mapping = X.data + + map_cpu = mapping.data.cpu().numpy() + np.save(mapping_fn, map_cpu) + + else: + # load mapping from file + map_cpu = np.load(mapping_fn) + mapping = torch.tensor(map_cpu.astype(np.float32)).cuda() + print('loaded mapping from file', mapping.shape) + + + # read source sequence + dataset_source, data_loader_source = load_source_sequence(opt) + + expression_multiplier = 1.0 + + print(f'Extracting audio-features of {name}..') + for i, data in enumerate(tqdm(data_loader_source)): + model.set_input(data) + model.test() + audio_expression = model.fake_expressions.data[0, :, 0] + expression = expression_multiplier * torch.matmul(mapping, audio_expression) + expression = expression[None, :] + np.save(os.path.join(out_dir, '%05d.npy' % i), expression.cpu().numpy()) + +if __name__ == '__main__': + parser = argparse.ArgumentParser() + parser.add_argument('--name', required=True) + parser.add_argument('--dataset_base', required=True) + parser.add_argument('--out_dir', required=True) + parser.add_argument('--mapping_path', required=True) + inopt = parser.parse_args() + + out_dir = os.path.join(inopt.out_dir, 'audio_expr') + os.makedirs(out_dir, exist_ok=True) + + get_audioexpr(inopt.name, inopt.dataset_base, out_dir, inopt.mapping_path) diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/models/__init__.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/models/__init__.py new file mode 100644 index 0000000..eeddb5a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/models/__init__.py @@ -0,0 +1,42 @@ +import importlib +from .base_model import BaseModel + + +def find_model_using_name(model_name): + # Given the option --model [modelname], + # the file "models/modelname_model.py" + # will be imported. + model_filename = "." + model_name + "_model" + print(model_filename) + modellib = importlib.import_module(model_filename, package="models") + print(modellib) + + # In the file, the class called ModelNameModel() will + # be instantiated. It has to be a subclass of BaseModel, + # and it is case-insensitive. + model = None + target_model_name = model_name.replace('_', '') + 'model' + print(target_model_name) + for name, cls in modellib.__dict__.items(): + if name.lower() == target_model_name.lower() \ + and issubclass(cls, BaseModel): + model = cls + + if model is None: + print("In %s.py, there should be a subclass of BaseModel with class name that matches %s in lowercase." % (model_filename, target_model_name)) + exit(0) + + return model + + +def get_option_setter(model_name): + model_class = find_model_using_name(model_name) + return model_class.modify_commandline_options + + +def create_model(opt): + model = find_model_using_name(opt.model) + instance = model() + instance.initialize(opt) + print("model [%s] was created" % (instance.name())) + return instance diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/models/audio2ExpressionsAttentionTMP4_model.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/models/audio2ExpressionsAttentionTMP4_model.py new file mode 100644 index 0000000..654d99e --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/models/audio2ExpressionsAttentionTMP4_model.py @@ -0,0 +1,348 @@ +import os +import torch +import torch.nn as nn +import torchvision.transforms as transforms +from autil.image_pool import ImagePool +from .base_model import BaseModel +from . import networks +import numpy as np +import functools + +INVALID_UV = -1.0 +N_EXPRESSIONS=76 + +from torchvision import models +from collections import namedtuple + + +class ExpressionEstimator_Attention(nn.Module): + def __init__(self, n_output_expressions, nIdentities, seq_len, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(ExpressionEstimator_Attention, self).__init__() + print('Estimator Attention') + ################################# + ######## audio net ########## + ################################# + self.seq_len = seq_len + + dropout_rate = 0.0 + if use_dropout == True: + #dropout_rate = 0.5 + dropout_rate = 0.25 + + self.convNet = nn.Sequential( + nn.Conv2d(29, 32, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 29 x 16 x 1 => 32 x 8 x 1 + nn.LeakyReLU(0.02, True), + nn.Conv2d(32, 32, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 32 x 8 x 1 => 32 x 4 x 1 + nn.LeakyReLU(0.02, True), + nn.Conv2d(32, 64, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 32 x 4 x 1 => 64 x 2 x 1 + nn.LeakyReLU(0.2, True), + nn.Conv2d(64, 64, kernel_size=(3,1), stride=(2,1), padding=(1,0), bias=True), # 64 x 2 x 1 => 64 x 1 x 1 + nn.LeakyReLU(0.2, True), + ) + + fullNet_input_size = 64 + + self.subspace_dim = 32 # number of audio expressions + print('fullNet_input_size: ', fullNet_input_size) + self.fullNet = nn.Sequential( + nn.Linear(in_features = fullNet_input_size, out_features=128, bias = True), + nn.LeakyReLU(0.02), + + nn.Linear(in_features = 128, out_features=64, bias = True), + nn.LeakyReLU(0.02), + + nn.Linear(in_features = 64, out_features=self.subspace_dim, bias = True), + nn.Tanh() + ) + + + # # mapping from subspace to full expression space + self.register_parameter('mapping', torch.nn.Parameter(torch.randn(1, nIdentities, N_EXPRESSIONS, self.subspace_dim, requires_grad=True))) + + # attention + self.attentionConvNet = nn.Sequential( # b x subspace_dim x seq_len + nn.Conv1d(self.subspace_dim, 16, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True), + nn.Conv1d(16, 8, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True), + nn.Conv1d(8, 4, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True), + nn.Conv1d(4, 2, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True), + nn.Conv1d(2, 1, kernel_size=3, stride=1, padding=1, bias=True), + nn.LeakyReLU(0.02, True) + ) + self.attentionNet = nn.Sequential( + nn.Linear(in_features = self.seq_len, out_features=self.seq_len, bias = True), + nn.Softmax(dim=1) + ) + #self.hidden2subspace = nn.Linear(self.subspace_dim,self.subspace_dim) + + def forward_internal(self, audio_features_sequence, identity_id): + result_subspace, intermediate_expression = self.getAudioExpressions_internal(audio_features_sequence) + mapping = torch.index_select(self.mapping[0], dim = 0, index = identity_id) + result = 10.0 * torch.bmm(mapping, result_subspace)[:,:,0] + result_intermediate = 10.0 * torch.bmm(mapping, intermediate_expression)[:,:,0] + #exit(-1) + return result, result_intermediate + + def forward(self, audio_features_sequence, identity_id): + result_subspace = self.getAudioExpressions(audio_features_sequence) + mapping = torch.index_select(self.mapping[0], dim = 0, index = identity_id) + result = torch.bmm(mapping, result_subspace)[:,:,0] + #exit(-1) + return 10.0 * result + + def getAudioExpressions_internal(self, audio_features_sequence): + # audio_features_sequence: b x seq_len x 16 x 29 + b = audio_features_sequence.shape[0] # batchsize + audio_features_sequence = audio_features_sequence.view(b * self.seq_len, 1, 16, 29) # b * seq_len x 1 x 16 x 29 + audio_features_sequence = torch.transpose(audio_features_sequence, 1, 3) # b* seq_len x 29 x 16 x 1 + conv_res = self.convNet( audio_features_sequence ) + conv_res = torch.reshape( conv_res, (b * self.seq_len, 1, -1)) + result_subspace = self.fullNet(conv_res)[:,0,:] # b * seq_len x subspace_dim + result_subspace = result_subspace.view(b, self.seq_len, self.subspace_dim)# b x seq_len x subspace_dim + + ################# + ### attention ### + ################# + result_subspace_T = torch.transpose(result_subspace, 1, 2) # b x subspace_dim x seq_len + intermediate_expression = result_subspace_T[:,:,(self.seq_len // 2):(self.seq_len // 2) + 1] + att_conv_res = self.attentionConvNet(result_subspace_T) + #print('att_conv_res', att_conv_res.shape) + attention = self.attentionNet(att_conv_res.view(b, self.seq_len)).view(b, self.seq_len, 1) # b x seq_len x 1 + #print('attention', attention.shape) + # pooling along the sequence dimension + result_subspace = torch.bmm(result_subspace_T, attention) + #print('result_subspace', result_subspace.shape) + ### + + return result_subspace.view(b, self.subspace_dim, 1), intermediate_expression + + def getAudioExpressions(self, audio_features_sequence): + expr, _ = self.getAudioExpressions_internal(audio_features_sequence) + return expr + + def regularizer(self): + #reg = torch.norm(self.mapping) + reg_mapping = torch.mean(torch.abs(self.mapping)) + + # one could also enforce orthogonality here + + # s_browExpressions[] = { 32, 41, 71, 72, 73, 74, 75 }; + reg_eye_brow = torch.mean(torch.abs( self.mapping[0,:,[32, 41, 71, 72, 73, 74, 75],:] )) + #return 0.01 * reg_mapping + 1.0 * reg_eye_brow + return 0.0 * reg_mapping + + + +def define_ExpressionEstimator(estimatorType='estimatorDefault', nIdentities=1, seq_len=1, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + print('EstimatorType: ', estimatorType) + if estimatorType=='estimatorAttention': net = ExpressionEstimator_Attention(N_EXPRESSIONS,nIdentities, seq_len) + + return networks.init_net(net, init_type, init_gain, gpu_ids) + + +class Audio2ExpressionsAttentionTMP4Model(BaseModel): + def name(self): + return 'Audio2ExpressionsAttentionTMP4Model' + + @staticmethod + def modify_commandline_options(parser, is_train=True): + + # changing the default values to match the pix2pix paper + # (https://phillipi.github.io/pix2pix/) + #parser.set_defaults(norm='batch', netG='unet_256') + parser.set_defaults(norm='instance', netG='unet_256') + parser.set_defaults(dataset_mode='aligned') + if is_train: + parser.set_defaults(pool_size=0, no_lsgan=True) + parser.add_argument('--lambda_L1', type=float, default=100.0, help='weight for L1 loss') + + return parser + + def initialize(self, opt): + BaseModel.initialize(self, opt) + self.isTrain = opt.isTrain + + self.trainRenderer = not opt.fix_renderer + + # specify the training losses you want to print out. The program will call base_model.get_current_losses + self.loss_names = ['G_L1','G_L1_ABSOLUTE','G_L1_RELATIVE', 'G_Regularizer'] + + # specify the images you want to save/display. The program will call base_model.get_current_visuals + #self.visual_names = ['input_uv', 'fake', 'target'] + self.visual_names = ['zeros'] + self.zeros = torch.zeros(1,3,2,2) + + # specify the models you want to save to the disk. The program will call base_model.save_networks and base_model.load_networks + if self.isTrain: + self.model_names = ['netG'] + else: # during test time, only load Gs + self.model_names = ['netG'] + + self.fake_expressions = None + self.fake_expressions_prv = None + self.fake_expressions_nxt = None + + if self.isTrain: + self.morphable_model = MorphableModel() + self.mask = self.morphable_model.LoadMask() + + nIdentities=opt.nTrainObjects + + # load/define networks + self.netG = define_ExpressionEstimator(estimatorType=opt.rendererType, nIdentities=nIdentities, seq_len=opt.seq_len, gpu_ids=self.gpu_ids) + + if self.isTrain: + use_sigmoid = opt.no_lsgan + self.fake_AB_pool = ImagePool(opt.pool_size) + + # define loss functions + self.criterionL1 = torch.nn.L1Loss() + self.criterionL1Smooth = torch.nn.SmoothL1Loss() + self.criterionL2 = torch.nn.MSELoss() + + # initialize optimizers + self.optimizers = [] + #self.optimizer_G = torch.optim.Adam(self.netG.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999), weight_decay=0.1 )#10.0) + self.optimizer_G = torch.optim.Adam(self.netG.parameters(), lr=opt.lr, betas=(opt.beta1, 0.999), weight_decay=0.0 )#10.0) + self.optimizers.append(self.optimizer_G) + + + def set_input(self, input): + #self.image_paths = input['paths'] + + self.expressions = input['expressions'].cuda() + self.audio_features = input['audio_deepspeech'].cuda() # b x seq_len x 16 x 29 + + if self.isTrain: + self.expressions_prv = input['expressions_prv'].cuda() + self.audio_features_prv = input['audio_deepspeech_prv'].cuda() # b x seq_len x 16 x 29 + + self.expressions_nxt = input['expressions_nxt'].cuda() + self.audio_features_nxt = input['audio_deepspeech_nxt'].cuda() # b x seq_len x 16 x 29 + + self.target_id = input['target_id'].cuda() + + + def forward(self): + # estimate expressions + if self.opt.output_audio_expressions: #self.opt.dataset_mode=='audio': + self.fake_expressions = self.netG.getAudioExpressions(self.audio_features) + if self.isTrain: + self.fake_expressions_prv = self.netG.getAudioExpressions(self.audio_features_prv) + self.fake_expressions_nxt = self.netG.getAudioExpressions(self.audio_features_nxt) + else: + self.fake_expressions, self.fake_expressions_intermediate = self.netG.forward_internal(self.audio_features, self.target_id) + if self.isTrain: + self.fake_expressions_prv = self.netG(self.audio_features_prv, self.target_id) + self.fake_expressions_nxt = self.netG(self.audio_features_nxt, self.target_id) + + + + def backward_G(self, epoch): + + # Second, G(A) = B + #self.loss_G_L1 = self.criterionL1(self.fake_expressions, self.expressions) + + # difference in vertex space + mask = torch.cat([self.mask[:,None],self.mask[:,None],self.mask[:,None]], 1) + mask = mask + 0.1 * torch.ones_like(mask) # priority for the mask region, but other region should also be constrained + + # absolute (single timesteps) + diff_expression = self.fake_expressions - self.expressions + diff_vertices = self.morphable_model.compute_expression_delta(diff_expression) + + + diff_expression_intermediate = self.fake_expressions_intermediate - self.expressions + diff_vertices_intermediate = self.morphable_model.compute_expression_delta(diff_expression_intermediate) + + + diff_expression_prv = self.fake_expressions_prv - self.expressions_prv + diff_vertices_prv = self.morphable_model.compute_expression_delta(diff_expression_prv) + + diff_expression_nxt = self.fake_expressions_nxt - self.expressions_nxt + diff_vertices_nxt = self.morphable_model.compute_expression_delta(diff_expression_nxt) + + # relative (temporal 1 timestep) cur - nxt and prv - cur + diff_expression_tmp_cur_nxt = (self.fake_expressions - self.fake_expressions_nxt) - (self.expressions - self.expressions_nxt) + diff_vertices_tmp_cur_nxt = self.morphable_model.compute_expression_delta(diff_expression_tmp_cur_nxt) + diff_expression_tmp_prv_cur = (self.fake_expressions_prv - self.fake_expressions) - (self.expressions_prv - self.expressions) + diff_vertices_tmp_prv_cur = self.morphable_model.compute_expression_delta(diff_expression_tmp_prv_cur) + + # relative (temporal 2 timesteps) nxt - prv + diff_expression_tmp_nxt_prv = (self.fake_expressions_nxt - self.fake_expressions_prv) - (self.expressions_nxt - self.expressions_prv) + diff_vertices_tmp_nxt_prv = self.morphable_model.compute_expression_delta(diff_expression_tmp_nxt_prv) + + #print('mask: ', mask.shape) + #print('diff_vertices: ', diff_vertices.shape) + + self.loss_G_L1_ABSOLUTE = 0.0 + self.loss_G_L1_RELATIVE = 0.0 + if self.opt.lossType == 'L1': + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices_prv)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices_nxt)) + + self.loss_G_L1_ABSOLUTE += 3000.0 * torch.mean(mask * torch.abs(diff_vertices_intermediate)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_cur_nxt)) + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_prv_cur)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_nxt_prv)) + + elif self.opt.lossType == 'L2': + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * diff_vertices * diff_vertices) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * diff_vertices_prv * diff_vertices_prv) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * diff_vertices_nxt * diff_vertices_nxt) + + self.loss_G_L1_ABSOLUTE += 3000.0 * torch.mean(mask * diff_vertices_intermediate * diff_vertices_intermediate) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * diff_vertices_tmp_cur_nxt * diff_vertices_tmp_cur_nxt) + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * diff_vertices_tmp_prv_cur * diff_vertices_tmp_prv_cur) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * diff_vertices_tmp_nxt_prv * diff_vertices_tmp_nxt_prv) + + elif self.opt.lossType == 'RMS': + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.sqrt(torch.mean(mask * diff_vertices * diff_vertices)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.sqrt(torch.mean(mask * diff_vertices_prv * diff_vertices_prv)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.sqrt(torch.mean(mask * diff_vertices_nxt * diff_vertices_nxt)) + + self.loss_G_L1_ABSOLUTE += 3000.0 * torch.sqrt(torch.mean(mask * diff_vertices_intermediate * diff_vertices_intermediate)) + + + self.loss_G_L1_RELATIVE += 20000.0 * torch.sqrt(torch.mean(mask * diff_vertices_tmp_cur_nxt * diff_vertices_tmp_cur_nxt)) + self.loss_G_L1_RELATIVE += 20000.0 * torch.sqrt(torch.mean(mask * diff_vertices_tmp_prv_cur * diff_vertices_tmp_prv_cur)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.sqrt(torch.mean(mask * diff_vertices_tmp_nxt_prv * diff_vertices_tmp_nxt_prv)) + + else: # L1 + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices_prv)) + self.loss_G_L1_ABSOLUTE += 1000.0 * torch.mean(mask * torch.abs(diff_vertices_nxt)) + + self.loss_G_L1_ABSOLUTE += 3000.0 * torch.mean(mask * torch.abs(diff_vertices_intermediate)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_cur_nxt)) + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_prv_cur)) + + self.loss_G_L1_RELATIVE += 20000.0 * torch.mean(mask * torch.abs(diff_vertices_tmp_nxt_prv)) + + self.loss_G_L1 = self.loss_G_L1_ABSOLUTE + self.loss_G_L1_RELATIVE + self.loss_G_Regularizer = self.netG.regularizer() + + self.loss_G = self.loss_G_L1 + self.loss_G_Regularizer #self.loss_G_GAN + self.loss_G_L1 + self.regularizerTex + + + self.loss_G.backward() + + def optimize_parameters(self, epoch_iter): + self.forward() + + # update Generator + self.optimizer_G.zero_grad() + self.backward_G(epoch_iter) + self.optimizer_G.step() + diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/models/base_model.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/models/base_model.py new file mode 100644 index 0000000..2a76496 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/models/base_model.py @@ -0,0 +1,223 @@ +import os +import torch +import torch.nn as nn +from collections import OrderedDict +from . import networks +import numpy as np +from PIL import Image + +def save_tensor_image(input_image, image_path): + if isinstance(input_image, torch.Tensor): + image_tensor = input_image.data + image_numpy = image_tensor[0].cpu().float().numpy() + if image_numpy.shape[0] == 1: + image_numpy = np.tile(image_numpy, (3, 1, 1)) + image_numpy = (np.transpose(image_numpy, (1, 2, 0)) + 1) / 2.0 * 255.0 + else: + image_numpy = input_image + image_numpy = image_numpy.astype(np.uint8) + image_pil = Image.fromarray(image_numpy) + image_pil.save(image_path) + +class BaseModel(): + + # modify parser to add command line options, + # and also change the default values if needed + @staticmethod + def modify_commandline_options(parser, is_train): + return parser + + def name(self): + return 'BaseModel' + + def initialize(self, opt): + self.opt = opt + self.gpu_ids = opt.gpu_ids + self.isTrain = opt.isTrain + self.device = torch.device('cuda:{}'.format(self.gpu_ids[0])) if self.gpu_ids else torch.device('cpu') + self.load_dir = os.path.join(opt.checkpoints_dir, opt.name) + self.save_dir = os.path.join(opt.checkpoints_dir, opt.name) + if opt.resize_or_crop != 'scale_width': + torch.backends.cudnn.benchmark = True + self.loss_names = [] + self.model_names = [] + self.visual_names = [] + self.image_paths = [] + + def set_input(self, input): + pass + + def forward(self): + pass + + # load and print networks; create schedulers + def setup(self, opt, parser=None): + if self.isTrain: + self.schedulers = [networks.get_scheduler(optimizer, opt) for optimizer in self.optimizers] + if not self.isTrain or opt.continue_train: + load_suffix = 'iter_%d' % opt.load_iter if opt.load_iter > 0 else opt.epoch + self.load_networks(load_suffix) + self.print_networks(opt.verbose) + + + + # load specific moudles + def loadModules(self, opt, model_name, module_names): + for name in module_names: + if isinstance(name, str): + load_dir = os.path.join(opt.checkpoints_dir, model_name) + load_filename = 'latest_%s.pth' % (name) + load_path = os.path.join(load_dir, load_filename) + net = getattr(self, name) + if isinstance(net, torch.Tensor): + print('loading the tensor from %s' % load_path) + net_loaded = torch.load(load_path, map_location=str(self.device)) + net.copy_(net_loaded) + else: + # if isinstance(net, torch.nn.DataParallel): + # net = net.module + print('loading the module from %s' % load_path) + # if you are using PyTorch newer than 0.4 (e.g., built from + # GitHub source), you can remove str() on self.device + state_dict = torch.load(load_path, map_location=str(self.device)) + if hasattr(state_dict, '_metadata'): + del state_dict._metadata + + # patch InstanceNorm checkpoints prior to 0.4 + for key in list(state_dict.keys()): # need to copy keys here because we mutate in loop + self.__patch_instance_norm_state_dict(state_dict, net, key.split('.')) + net.load_state_dict(state_dict) + + + + + # make models eval mode during test time + def eval(self): + for name in self.model_names: + if isinstance(name, str): + net = getattr(self, name) + net.eval() + + # used in test time, wrapping `forward` in no_grad() so we don't save + # intermediate steps for backprop + def test(self): + with torch.no_grad(): + self.forward() + + # get image paths + def get_image_paths(self): + return self.image_paths + + def optimize_parameters(self): + pass + + # update learning rate (called once every epoch) + def update_learning_rate(self): + for scheduler in self.schedulers: + scheduler.step() + lr = self.optimizers[0].param_groups[0]['lr'] + print('learning rate = %.7f' % lr) + + # return visualization images. train.py will display these images, and save the images to a html + def get_current_visuals(self): + visual_ret = OrderedDict() + for name in self.visual_names: + if isinstance(name, str): + visual_ret[name] = getattr(self, name) + return visual_ret + + # return traning losses/errors. train.py will print out these errors as debugging information + def get_current_losses(self): + errors_ret = OrderedDict() + for name in self.loss_names: + if isinstance(name, str): + # float(...) works for both scalar tensor and float number + errors_ret[name] = float(getattr(self, 'loss_' + name)) + return errors_ret + + # save models to the disk + def save_networks(self, epoch): + for name in self.model_names: + if isinstance(name, str): + save_filename = '%s_%s.pth' % (epoch, name) + save_path = os.path.join(self.save_dir, save_filename) + net = getattr(self, name) + + if isinstance(net, torch.Tensor): + #torch.save(net.state_dict(), save_path) + torch.save(net, save_path) + for i in range(0, list(net.size())[0]): + save_tensor_image(net[i:i+1,0:3,:,:], save_path+str(i)+'.png') + else: + if len(self.gpu_ids) > 0 and torch.cuda.is_available(): + #torch.save(net.module.cpu().state_dict(), save_path) # << original + torch.save(net.cpu().state_dict(), save_path) + net.cuda(self.gpu_ids[0]) + else: + torch.save(net.cpu().state_dict(), save_path) + + def __patch_instance_norm_state_dict(self, state_dict, module, keys, i=0): + key = keys[i] + if i + 1 == len(keys): # at the end, pointing to a parameter/buffer + if module.__class__.__name__.startswith('InstanceNorm') and \ + (key == 'running_mean' or key == 'running_var'): + if getattr(module, key) is None: + state_dict.pop('.'.join(keys)) + if module.__class__.__name__.startswith('InstanceNorm') and \ + (key == 'num_batches_tracked'): + state_dict.pop('.'.join(keys)) + else: + self.__patch_instance_norm_state_dict(state_dict, getattr(module, key), keys, i + 1) + + # load models from the disk + def load_networks(self, epoch): + for name in self.model_names: + if isinstance(name, str): + load_filename = '%s_%s.pth' % (epoch, name) + load_path = os.path.join(self.load_dir, load_filename) + net = getattr(self, name) + if isinstance(net, torch.Tensor): + print('loading the tensor from %s' % load_path) + net_loaded = torch.load(load_path, map_location=str(self.device)) + net.copy_(net_loaded) + else: + # if isinstance(net, torch.nn.DataParallel): + # net = net.module + print('loading the module from %s' % load_path) + # if you are using PyTorch newer than 0.4 (e.g., built from + # GitHub source), you can remove str() on self.device + state_dict = torch.load(load_path, map_location=str(self.device)) + if hasattr(state_dict, '_metadata'): + del state_dict._metadata + + # patch InstanceNorm checkpoints prior to 0.4 + for key in list(state_dict.keys()): # need to copy keys here because we mutate in loop + self.__patch_instance_norm_state_dict(state_dict, net, key.split('.')) + net.load_state_dict(state_dict) + + # print network information + def print_networks(self, verbose): + print('---------- Networks initialized -------------') + for name in self.model_names: + if isinstance(name, str): + net = getattr(self, name) + if isinstance(net, torch.Tensor): + num_params = net.numel() + print('[Tensor %s] Total number of parameters : %.3f M' % (name, num_params / 1e6)) + else: + num_params = 0 + for param in net.parameters(): + num_params += param.numel() + if verbose: + print(net) + print('[Network %s] Total number of parameters : %.3f M' % (name, num_params / 1e6)) + print('-----------------------------------------------') + + # set requies_grad=False to avoid computation + def set_requires_grad(self, nets, requires_grad=False): + if not isinstance(nets, list): + nets = [nets] + for net in nets: + if net is not None: + for param in net.parameters(): + param.requires_grad = requires_grad diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/models/networks.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/models/networks.py new file mode 100644 index 0000000..f5495e1 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/models/networks.py @@ -0,0 +1,383 @@ +import torch +import torch.nn as nn +from torch.nn import init +import functools +from torch.optim import lr_scheduler + +############################################################################### +# Helper Functions +############################################################################### + + +def get_norm_layer(norm_type='instance'): + if norm_type == 'batch': + norm_layer = functools.partial(nn.BatchNorm2d, affine=True) + elif norm_type == 'instance': + norm_layer = functools.partial(nn.InstanceNorm2d, affine=False, track_running_stats=False) + elif norm_type == 'none': + norm_layer = None + else: + raise NotImplementedError('normalization layer [%s] is not found' % norm_type) + return norm_layer + + +def get_scheduler(optimizer, opt): + if opt.lr_policy == 'lambda': + def lambda_rule(epoch): + lr_l = 1.0 - max(0, epoch + opt.epoch_count - opt.niter) / float(opt.niter_decay + 1) + return lr_l + scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule) + elif opt.lr_policy == 'step': + scheduler = lr_scheduler.StepLR(optimizer, step_size=opt.lr_decay_iters, gamma=0.1) + elif opt.lr_policy == 'plateau': + scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.2, threshold=0.01, patience=5) + elif opt.lr_policy == 'cosine': + scheduler = lr_scheduler.CosineAnnealingLR(optimizer, T_max=opt.niter, eta_min=0) + else: + return NotImplementedError('learning rate policy [%s] is not implemented', opt.lr_policy) + return scheduler + + +def init_weights(net, init_type='normal', gain=0.02): + def init_func(m): + classname = m.__class__.__name__ + if hasattr(m, 'weight') and (classname.find('Conv') != -1 or classname.find('Linear') != -1): + if init_type == 'normal': + init.normal_(m.weight.data, 0.0, gain) + elif init_type == 'xavier': + init.xavier_normal_(m.weight.data, gain=gain) + elif init_type == 'kaiming': + init.kaiming_normal_(m.weight.data, a=0, mode='fan_in') + elif init_type == 'orthogonal': + init.orthogonal_(m.weight.data, gain=gain) + else: + raise NotImplementedError('initialization method [%s] is not implemented' % init_type) + if hasattr(m, 'bias') and m.bias is not None: + init.constant_(m.bias.data, 0.0) + elif classname.find('BatchNorm2d') != -1: + init.normal_(m.weight.data, 1.0, gain) + init.constant_(m.bias.data, 0.0) + + print('initialize network with %s' % init_type) + net.apply(init_func) + + +def init_net(net, init_type='normal', init_gain=0.02, gpu_ids=[]): + if len(gpu_ids) > 0: + assert(torch.cuda.is_available()) + net.to(int(gpu_ids[0])) + #net = torch.nn.DataParallel(net, gpu_ids) + init_weights(net, init_type, gain=init_gain) + return net + + +def define_G(input_nc, output_nc, ngf, netG, norm='batch', use_dropout=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + norm_layer = get_norm_layer(norm_type=norm) + + if netG == 'resnet_9blocks': + net = ResnetGenerator(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=9) + elif netG == 'resnet_6blocks': + net = ResnetGenerator(input_nc, output_nc, ngf, norm_layer=norm_layer, use_dropout=use_dropout, n_blocks=6) + elif netG == 'unet_128': + net = UnetGenerator(input_nc, output_nc, 7, ngf, norm_layer=norm_layer, use_dropout=use_dropout) + elif netG == 'unet_256': + net = UnetGenerator(input_nc, output_nc, 8, ngf, norm_layer=norm_layer, use_dropout=use_dropout) + else: + raise NotImplementedError('Generator model name [%s] is not recognized' % netG) + return init_net(net, init_type, init_gain, gpu_ids) + + +def define_D(input_nc, ndf, netD, + n_layers_D=3, norm='batch', use_sigmoid=False, init_type='normal', init_gain=0.02, gpu_ids=[]): + net = None + norm_layer = get_norm_layer(norm_type=norm) + + if netD == 'basic': + net = NLayerDiscriminator(input_nc, ndf, n_layers=3, norm_layer=norm_layer, use_sigmoid=use_sigmoid) + elif netD == 'n_layers': + net = NLayerDiscriminator(input_nc, ndf, n_layers_D, norm_layer=norm_layer, use_sigmoid=use_sigmoid) + elif netD == 'pixel': + net = PixelDiscriminator(input_nc, ndf, norm_layer=norm_layer, use_sigmoid=use_sigmoid) + else: + raise NotImplementedError('Discriminator model name [%s] is not recognized' % net) + return init_net(net, init_type, init_gain, gpu_ids) + + +############################################################################## +# Classes +############################################################################## + + +# Defines the GAN loss which uses either LSGAN or the regular GAN. +# When LSGAN is used, it is basically same as MSELoss, +# but it abstracts away the need to create the target label tensor +# that has the same size as the input +class GANLoss(nn.Module): + def __init__(self, use_lsgan=True, target_real_label=1.0, target_fake_label=0.0): + super(GANLoss, self).__init__() + self.register_buffer('real_label', torch.tensor(target_real_label)) + self.register_buffer('fake_label', torch.tensor(target_fake_label)) + if use_lsgan: + self.loss = nn.MSELoss() + else: + self.loss = nn.BCELoss() + + def get_target_tensor(self, input, target_is_real): + if target_is_real: + target_tensor = self.real_label + else: + target_tensor = self.fake_label + return target_tensor.expand_as(input) + + def __call__(self, input, target_is_real): + target_tensor = self.get_target_tensor(input, target_is_real) + return self.loss(input, target_tensor) + + +# Defines the generator that consists of Resnet blocks between a few +# downsampling/upsampling operations. +# Code and idea originally from Justin Johnson's architecture. +# https://github.com/jcjohnson/fast-neural-style/ +class ResnetGenerator(nn.Module): + def __init__(self, input_nc, output_nc, ngf=64, norm_layer=nn.BatchNorm2d, use_dropout=False, n_blocks=6, padding_type='reflect'): + assert(n_blocks >= 0) + super(ResnetGenerator, self).__init__() + self.input_nc = input_nc + self.output_nc = output_nc + self.ngf = ngf + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + + model = [nn.ReflectionPad2d(3), + nn.Conv2d(input_nc, ngf, kernel_size=7, padding=0, + bias=use_bias), + norm_layer(ngf), + nn.ReLU(True)] + + n_downsampling = 2 + for i in range(n_downsampling): + mult = 2**i + model += [nn.Conv2d(ngf * mult, ngf * mult * 2, kernel_size=3, + stride=2, padding=1, bias=use_bias), + norm_layer(ngf * mult * 2), + nn.ReLU(True)] + + mult = 2**n_downsampling + for i in range(n_blocks): + model += [ResnetBlock(ngf * mult, padding_type=padding_type, norm_layer=norm_layer, use_dropout=use_dropout, use_bias=use_bias)] + + for i in range(n_downsampling): + mult = 2**(n_downsampling - i) + model += [nn.ConvTranspose2d(ngf * mult, int(ngf * mult / 2), + kernel_size=3, stride=2, + padding=1, output_padding=1, + bias=use_bias), + norm_layer(int(ngf * mult / 2)), + nn.ReLU(True)] + model += [nn.ReflectionPad2d(3)] + model += [nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)] + model += [nn.Tanh()] + + self.model = nn.Sequential(*model) + + def forward(self, input): + return self.model(input) + + +# Define a resnet block +class ResnetBlock(nn.Module): + def __init__(self, dim, padding_type, norm_layer, use_dropout, use_bias): + super(ResnetBlock, self).__init__() + self.conv_block = self.build_conv_block(dim, padding_type, norm_layer, use_dropout, use_bias) + + def build_conv_block(self, dim, padding_type, norm_layer, use_dropout, use_bias): + conv_block = [] + p = 0 + if padding_type == 'reflect': + conv_block += [nn.ReflectionPad2d(1)] + elif padding_type == 'replicate': + conv_block += [nn.ReplicationPad2d(1)] + elif padding_type == 'zero': + p = 1 + else: + raise NotImplementedError('padding [%s] is not implemented' % padding_type) + + conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), + norm_layer(dim), + nn.ReLU(True)] + if use_dropout: + conv_block += [nn.Dropout(0.5)] + + p = 0 + if padding_type == 'reflect': + conv_block += [nn.ReflectionPad2d(1)] + elif padding_type == 'replicate': + conv_block += [nn.ReplicationPad2d(1)] + elif padding_type == 'zero': + p = 1 + else: + raise NotImplementedError('padding [%s] is not implemented' % padding_type) + conv_block += [nn.Conv2d(dim, dim, kernel_size=3, padding=p, bias=use_bias), + norm_layer(dim)] + + return nn.Sequential(*conv_block) + + def forward(self, x): + out = x + self.conv_block(x) + return out + + +# Defines the Unet generator. +# |num_downs|: number of downsamplings in UNet. For example, +# if |num_downs| == 7, image of size 128x128 will become of size 1x1 +# at the bottleneck +class UnetGenerator(nn.Module): + def __init__(self, input_nc, output_nc, num_downs, ngf=64, + norm_layer=nn.BatchNorm2d, use_dropout=False): + super(UnetGenerator, self).__init__() + + # construct unet structure + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=None, norm_layer=norm_layer, innermost=True) + for i in range(num_downs - 5): + unet_block = UnetSkipConnectionBlock(ngf * 8, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer, use_dropout=use_dropout) + unet_block = UnetSkipConnectionBlock(ngf * 4, ngf * 8, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf * 2, ngf * 4, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(ngf, ngf * 2, input_nc=None, submodule=unet_block, norm_layer=norm_layer) + unet_block = UnetSkipConnectionBlock(output_nc, ngf, input_nc=input_nc, submodule=unet_block, outermost=True, norm_layer=norm_layer) + + self.model = unet_block + + def forward(self, input): + return self.model(input) + + +# Defines the submodule with skip connection. +# X -------------------identity---------------------- X +# |-- downsampling -- |submodule| -- upsampling --| +class UnetSkipConnectionBlock(nn.Module): + def __init__(self, outer_nc, inner_nc, input_nc=None, + submodule=None, outermost=False, innermost=False, norm_layer=nn.BatchNorm2d, use_dropout=False): + super(UnetSkipConnectionBlock, self).__init__() + self.outermost = outermost + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + if input_nc is None: + input_nc = outer_nc + downconv = nn.Conv2d(input_nc, inner_nc, kernel_size=4, + stride=2, padding=1, bias=use_bias) + downrelu = nn.LeakyReLU(0.2, True) + downnorm = norm_layer(inner_nc) + uprelu = nn.ReLU(True) + upnorm = norm_layer(outer_nc) + + if outermost: + upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, + kernel_size=4, stride=2, + padding=1) + down = [downconv] + up = [uprelu, upconv, nn.Tanh()] + model = down + [submodule] + up + elif innermost: + upconv = nn.ConvTranspose2d(inner_nc, outer_nc, + kernel_size=4, stride=2, + padding=1, bias=use_bias) + down = [downrelu, downconv] + up = [uprelu, upconv, upnorm] + model = down + up + else: + upconv = nn.ConvTranspose2d(inner_nc * 2, outer_nc, + kernel_size=4, stride=2, + padding=1, bias=use_bias) + down = [downrelu, downconv, downnorm] + up = [uprelu, upconv, upnorm] + + if use_dropout: + model = down + [submodule] + up + [nn.Dropout(0.5)] + else: + model = down + [submodule] + up + + self.model = nn.Sequential(*model) + + def forward(self, x): + if self.outermost: + return self.model(x) + else: + return torch.cat([x, self.model(x)], 1) + + +# Defines the PatchGAN discriminator with the specified arguments. +class NLayerDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d, use_sigmoid=False): + super(NLayerDiscriminator, self).__init__() + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + + kw = 4 + padw = 1 + sequence = [ + nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw), + nn.LeakyReLU(0.2, True) + ] + + nf_mult = 1 + nf_mult_prev = 1 + for n in range(1, n_layers): + nf_mult_prev = nf_mult + nf_mult = min(2**n, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, + kernel_size=kw, stride=2, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + nf_mult_prev = nf_mult + nf_mult = min(2**n_layers, 8) + sequence += [ + nn.Conv2d(ndf * nf_mult_prev, ndf * nf_mult, + kernel_size=kw, stride=1, padding=padw, bias=use_bias), + norm_layer(ndf * nf_mult), + nn.LeakyReLU(0.2, True) + ] + + sequence += [nn.Conv2d(ndf * nf_mult, 1, kernel_size=kw, stride=1, padding=padw)] + + if use_sigmoid: + sequence += [nn.Sigmoid()] + + self.model = nn.Sequential(*sequence) + + def forward(self, input): + return self.model(input) + + +class PixelDiscriminator(nn.Module): + def __init__(self, input_nc, ndf=64, norm_layer=nn.BatchNorm2d, use_sigmoid=False): + super(PixelDiscriminator, self).__init__() + if type(norm_layer) == functools.partial: + use_bias = norm_layer.func == nn.InstanceNorm2d + else: + use_bias = norm_layer == nn.InstanceNorm2d + + self.net = [ + nn.Conv2d(input_nc, ndf, kernel_size=1, stride=1, padding=0), + nn.LeakyReLU(0.2, True), + nn.Conv2d(ndf, ndf * 2, kernel_size=1, stride=1, padding=0, bias=use_bias), + norm_layer(ndf * 2), + nn.LeakyReLU(0.2, True), + nn.Conv2d(ndf * 2, 1, kernel_size=1, stride=1, padding=0, bias=use_bias)] + + if use_sigmoid: + self.net.append(nn.Sigmoid()) + + self.net = nn.Sequential(*self.net) + + def forward(self, input): + return self.net(input) diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/__init__.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/base_options.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/base_options.py new file mode 100644 index 0000000..94801a6 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/base_options.py @@ -0,0 +1,138 @@ +import argparse +import os +from autil import util +import torch +import models +import data + + +class BaseOptions(): + def __init__(self): + self.initialized = False + + def initialize(self, parser): + parser.add_argument('--dataroot', required=True, help='path to images (should have subfolders trainA, trainB, valA, valB, etc)') + parser.add_argument('--batch_size', type=int, default=1, help='input batch size') + parser.add_argument('--seq_len', type=int, default=1, help='sequence length (if applicable)') + parser.add_argument('--fineSize', type=int, default=512, help='then crop to this size') + parser.add_argument('--display_winsize', type=int, default=256, help='display window size for both visdom and HTML') + parser.add_argument('--input_nc', type=int, default=3, help='# of input image channels') + parser.add_argument('--output_nc', type=int, default=3, help='# of output image channels') + parser.add_argument('--ngf', type=int, default=64, help='# of gen filters in first conv layer') + parser.add_argument('--ndf', type=int, default=64, help='# of discrim filters in first conv layer') + parser.add_argument('--netD', type=str, default='basic', help='selects model to use for netD') + parser.add_argument('--netG', type=str, default='resnet_9blocks', help='selects model to use for netG') + parser.add_argument('--n_layers_D', type=int, default=3, help='only used if netD==n_layers') + parser.add_argument('--gpu_ids', type=str, default='0', help='gpu ids: e.g. 0 0,1,2, 0,2. use -1 for CPU') + parser.add_argument('--name', type=str, default='experiment_name', help='name of the experiment. It decides where to store samples and models') + parser.add_argument('--renderer', type=str, default='no_renderer', help='name of the renderer to load the models from') + parser.add_argument('--fix_renderer', action='store_true', help='renderer is fixed') + parser.add_argument('--dataset_mode', type=str, default='aligned', help='chooses how datasets are loaded. [aligned | multi]') + parser.add_argument('--model', type=str, default='cycle_gan', help='chooses which model to use. cycle_gan, pix2pix, test') + parser.add_argument('--direction', type=str, default='AtoB', help='AtoB or BtoA') + parser.add_argument('--epoch', type=str, default='latest', help='which epoch to load? set to latest to use latest cached model') + parser.add_argument('--load_iter', type=int, default='0', help='which iteration to load? if load_iter > 0, the code will load models by iter_[load_iter]; otherwise, the code will load models by [epoch]') + parser.add_argument('--num_threads', default=4, type=int, help='# threads for loading data') + parser.add_argument('--checkpoints_dir', type=str, default='./checkpoints', help='models are saved here') + parser.add_argument('--norm', type=str, default='instance', help='instance normalization or batch normalization') + parser.add_argument('--serial_batches', action='store_true', help='if true, takes images in order to make batches, otherwise takes them randomly') + parser.add_argument('--no_dropout', action='store_true', help='no dropout for the generator') + parser.add_argument('--max_dataset_size', type=int, default=float("inf"), help='Maximum number of samples allowed per datasets. If the datasets directory contains more than max_dataset_size, only a subset is loaded.') + parser.add_argument('--resize_or_crop', type=str, default='resize_and_crop', help='scaling and cropping of images at load time [resize_and_crop|crop|scale_width|scale_width_and_crop|none]') + parser.add_argument('--no_augmentation', action='store_true', help='if specified, no data augmentation') + #parser.add_argument('--init_type', type=str, default='normal', help='network initialization [normal|xavier|kaiming|orthogonal]') + parser.add_argument('--init_type', type=str, default='xavier', help='network initialization [normal|xavier|kaiming|orthogonal]') + parser.add_argument('--init_gain', type=float, default=0.02, help='scaling factor for normal, xavier and orthogonal.') + parser.add_argument('--verbose', action='store_true', help='if specified, print more debugging information') + parser.add_argument('--suffix', default='', type=str, help='customized suffix: opt.name = opt.name + suffix: e.g., {model}_{netG}_size{loadSize}') + parser.add_argument('--tex_dim', type=int, default=256, help='neural texture dimensions') + parser.add_argument('--tex_features_intermediate', type=int, default=16, help='# intermediate neural texture features when using dynamic textures') + parser.add_argument('--tex_features', type=int, default=16, help='# neural texture features') + parser.add_argument('--textureModel', type=str, default='DynamicNeuralTextureAudio', help='texture model') + parser.add_argument('--rendererType', type=str, default='UNET_5_level', help='neural renderer network') + parser.add_argument('--lossType', type=str, default='L1', help='loss type for the final output') + + parser.add_argument('--hierarchicalTex', action='store_true', help='if specified, hierachical neural textures are used') + + parser.add_argument('--output_audio_expressions', action='store_true', help='if specified, no sh layers are used') + + parser.add_argument('--erosionFactor', type=float, default=1.0, help='scaling factor for erosion of the background.') + + parser.add_argument('--audio_window_size', type=float, default=16, help='audio window size = #mel feature bins') + + parser.add_argument('--look_ahead', action='store_true', help='cache images in numpy format') + + parser.add_argument('--cached_images', action='store_true', help='cache images in numpy format') + + self.initialized = True + return parser + + def gather_options(self): + # initialize parser with basic options + if not self.initialized: + parser = argparse.ArgumentParser( + formatter_class=argparse.ArgumentDefaultsHelpFormatter) + parser = self.initialize(parser) + + # get the basic options + opt, _ = parser.parse_known_args() + + # modify model-related parser options + model_name = opt.model + model_option_setter = models.get_option_setter(model_name) + parser = model_option_setter(parser, self.isTrain) + opt, _ = parser.parse_known_args() # parse again with the new defaults + + # modify datasets-related parser options + dataset_name = opt.dataset_mode + dataset_option_setter = data.get_option_setter(dataset_name) + parser = dataset_option_setter(parser, self.isTrain) + + self.parser = parser + + return parser.parse_args() + + def print_options(self, opt): + message = '' + message += '----------------- Options ---------------\n' + for k, v in sorted(vars(opt).items()): + comment = '' + default = self.parser.get_default(k) + if v != default: + comment = '\t[default: %s]' % str(default) + message += '{:>25}: {:<30}{}\n'.format(str(k), str(v), comment) + message += '----------------- End -------------------' + print(message) + + # save to the disk + expr_dir = os.path.join(opt.checkpoints_dir, opt.name) + util.mkdirs(expr_dir) + file_name = os.path.join(expr_dir, 'opt.txt') + with open(file_name, 'wt') as opt_file: + opt_file.write(message) + opt_file.write('\n') + + def parse(self): + + opt = self.gather_options() + opt.isTrain = self.isTrain # train or test + + # process opt.suffix + if opt.suffix: + suffix = ('_' + opt.suffix.format(**vars(opt))) if opt.suffix != '' else '' + opt.name = opt.name + suffix + + self.print_options(opt) + + # set gpu ids + str_ids = opt.gpu_ids.split(',') + opt.gpu_ids = [] + for str_id in str_ids: + id = int(str_id) + if id >= 0: + opt.gpu_ids.append(id) + if len(opt.gpu_ids) > 0: + torch.cuda.set_device(opt.gpu_ids[0]) + + self.opt = opt + return self.opt diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/test_options.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/test_options.py new file mode 100644 index 0000000..d21da35 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/test_options.py @@ -0,0 +1,27 @@ +from .base_options import BaseOptions + + +class TestOptions(BaseOptions): + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) + parser.add_argument('--ntest', type=int, default=float("inf"), help='# of test examples.') + parser.add_argument('--results_dir', type=str, default='./results/', help='saves results here.') + parser.add_argument('--aspect_ratio', type=float, default=1.0, help='aspect ratio of result images') + parser.add_argument('--phase', type=str, default='test', help='train, val, test, etc') + # Dropout and Batchnorm has different behavioir during training and test. + parser.add_argument('--eval', action='store_true', help='use eval mode during test time.') + parser.add_argument('--num_test', type=int, default=50, help='how many test images to run') + + parser.add_argument('--write_no_images', action='store_true', help='compute validation') + + parser.add_argument('--write_video', action='store_true', help='write video') + parser.add_argument('--video_fps', type=float, default=25.0, help='video fps') + + + parser.add_argument('--source_dir', type=str, default='./datasets/', help='loads source files (expressions, audio, uvs).') + + parser.set_defaults(model='test') + # To avoid cropping, the loadSize should be the same as fineSize + parser.set_defaults(loadSize=parser.get_default('fineSize')) + self.isTrain = False + return parser diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/train_options.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/train_options.py new file mode 100644 index 0000000..c91d50d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/train_options.py @@ -0,0 +1,37 @@ +from .base_options import BaseOptions + + +class TrainOptions(BaseOptions): + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) + parser.add_argument('--display_freq', type=int, default=400, help='frequency of showing training results on screen') + parser.add_argument('--display_ncols', type=int, default=4, help='if positive, display all images in a single visdom web panel with certain number of images per row.') + parser.add_argument('--display_id', type=int, default=1, help='window id of the web display') + parser.add_argument('--display_server', type=str, default="http://localhost", help='visdom server of the web display') + parser.add_argument('--display_env', type=str, default='main', help='visdom display environment name (default is "main")') + parser.add_argument('--display_port', type=int, default=8097, help='visdom port of the web display') + + parser.add_argument('--compute_val', action='store_true', help='compute validation') + parser.add_argument('--input_noise_augmentation', action='store_true', help='add input noise') + + parser.add_argument('--update_html_freq', type=int, default=1000, help='frequency of saving training results to html') + parser.add_argument('--print_freq', type=int, default=500, help='frequency of showing training results on console') + parser.add_argument('--save_latest_freq', type=int, default=5000, help='frequency of saving the latest results') + parser.add_argument('--save_epoch_freq', type=int, default=5, help='frequency of saving checkpoints at the end of epochs') + parser.add_argument('--save_by_iter', action='store_true', help='whether saves model by iteration') + + parser.add_argument('--continue_train', action='store_true', help='continue training: load the latest model') + parser.add_argument('--epoch_count', type=int, default=1, help='the starting epoch count, we save the model by , +, ...') + parser.add_argument('--phase', type=str, default='train', help='train, val, test, etc') + parser.add_argument('--niter', type=int, default=100, help='# of iter at starting learning rate') + parser.add_argument('--niter_decay', type=int, default=100, help='# of iter to linearly decay learning rate to zero') + parser.add_argument('--beta1', type=float, default=0.5, help='momentum term of adam') + parser.add_argument('--lr', type=float, default=0.0002, help='initial learning rate for adam') + parser.add_argument('--no_lsgan', action='store_true', help='do *not* use least square GAN, if false, use vanilla GAN') + parser.add_argument('--pool_size', type=int, default=50, help='the size of image buffer that stores previously generated images') + parser.add_argument('--no_html', action='store_true', help='do not save intermediate training results to [opt.checkpoints_dir]/[opt.name]/web/') + parser.add_argument('--lr_policy', type=str, default='lambda', help='learning rate policy: lambda|step|plateau|cosine') + parser.add_argument('--lr_decay_iters', type=int, default=50, help='multiply by a gamma every lr_decay_iters iterations') + + self.isTrain = True + return parser diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/transfer_options.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/transfer_options.py new file mode 100644 index 0000000..13b78a0 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/options/transfer_options.py @@ -0,0 +1,42 @@ +from .base_options import BaseOptions + + +class TransferOptions(BaseOptions): + def initialize(self, parser): + parser = BaseOptions.initialize(self, parser) + parser.add_argument('--ntest', type=int, default=float("inf"), help='# of test examples.') + parser.add_argument('--results_dir', type=str, default='./results/', help='saves results here.') + parser.add_argument('--aspect_ratio', type=float, default=1.0, help='aspect ratio of result images') + parser.add_argument('--phase', type=str, default='test', help='train, val, test, etc') + # Dropout and Batchnorm has different behavioir during training and test. + parser.add_argument('--eval', action='store_true', help='use eval mode during test time.') + parser.add_argument('--num_test', type=int, default=50, help='how many test images to run') + + parser.add_argument('--write_no_images', action='store_true', help='compute validation') + + + parser.add_argument('--source_dir', type=str, default='./datasets/', help='loads source files (expressions, audio, uvs).') + + + + parser.add_argument('--source_actor', type=str, default='', help='source actor directory') + parser.add_argument('--target_actor', type=str, default='', help='target actor directory') + + + parser.set_defaults(model='test') + # To avoid cropping, the loadSize should be the same as fineSize + parser.set_defaults(loadSize=parser.get_default('fineSize')) + self.isTrain = False + return parser + +class Audio2ExprOptions(TransferOptions): + def initialize(self, parser): + parser = TransferOptions.initialize(self, parser) + parser.add_argument('--use_mapping', action='store_true', help='use mapping function.') + parser.add_argument('--mapping_path', type=str, default='', help='path to mapping function.') + parser.add_argument('--out_dir', type=str, default='', help='path to output directory.') + parser.set_defaults(model='test') + # To avoid cropping, the loadSize should be the same as fineSize + parser.set_defaults(loadSize=parser.get_default('fineSize')) + self.isTrain = False + return parser \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/transfer.py b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/transfer.py new file mode 100644 index 0000000..99c3711 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/transfer.py @@ -0,0 +1,257 @@ +import os +import os.path +from options.transfer_options import TransferOptions +from data import CreateDataLoader +from data.face_dataset import FaceDataset +from data.audio_dataset import AudioDataset +from models import create_model +from util.visualizer import save_images +from util import html +import torch +import torch.nn as nn +import torchvision.transforms as transforms +import numpy as np +from PIL import Image +import time +import random +import progressbar +import copy +from shutil import copyfile + +from BaselModel.basel_model import * + + +def load_model(opt): + opt.output_audio_expressions = True + opt.nTrainObjects = 116 + + print('#train objects = %d' % opt.nTrainObjects) + + print('>>> create model <<<') + model = create_model(opt) + print('>>> setup model <<<') + model.setup(opt) + + return model + +def load_target_sequence(opt): + opt_target = copy.copy(opt) # create a clone + opt_target.dataroot = opt.target_actor # overwrite root directory + opt_target.dataset_mode = 'face' + opt_target.phase = 'train' + dataset_target = FaceDataset() + dataset_target.initialize(opt_target) + + dataloader = torch.utils.data.DataLoader( + dataset_target, + batch_size=opt.batch_size, + shuffle=not opt.serial_batches, + num_workers=int(opt.num_threads)) + + return dataset_target, dataloader + + +def load_source_sequence(opt): + opt_source = copy.copy(opt) # create a clone + opt_source.dataroot = opt.source_actor # overwrite root directory + print(opt_source.dataroot) + opt_source.dataset_mode = 'audio' + opt_source.phase = 'train' + + dataset_source = AudioDataset() + + dataset_source.initialize(opt_source) + + dataloader = torch.utils.data.DataLoader( + dataset_source, + batch_size=opt.batch_size, + shuffle=not opt.serial_batches, + num_workers=int(opt.num_threads)) + + return dataset_source, dataloader + + +if __name__ == '__main__': + # read options + opt = TransferOptions().parse() + + target_name = opt.target_actor.split("/")[-1] + + # hard-code some parameters for test + opt.num_threads = 1 # test code only supports num_threads = 1 + opt.batch_size = 1 # test code only supports batch_size = 1 + opt.serial_batches = True # no shuffle + opt.no_augmentation = True # no flip + opt.display_id = -1 # no visdom display + + + # load model + model = load_model(opt) + print('model version:', opt.name) + + + # # load face model + # morphable_model = MorphableModel() + # mask = morphable_model.LoadMask() + # mask = mask + 0.1 * torch.ones_like(mask) + + + # read target sequence + dataset_target, data_loader_target = load_target_sequence(opt) + dataset_target_size = len(dataset_target) + print('#target_actor frames = %d' % dataset_target_size) + + ################################## + ####### create mapping ####### + ################################## + base_path = '../..' #'/home/alberto/NeuralVoicePuppetry' + dataset_path = '../../datasets' + #dataset_path = '/home/alberto/NeuralVoicePuppetry/datasets' + os.makedirs(base_path+'/mappings/'+opt.name, exist_ok=True) + mapping_fn = base_path+'/mappings/'+opt.name+'/'+'mapping_'+target_name + print(mapping_fn) + #not_exists = True + not_exists = not os.path.exists(mapping_fn+'.npy') + if not_exists: + # collect data + print('collect data') + audio_expressions = None + gt_expressions = None + with progressbar.ProgressBar(max_value=len(dataset_target)) as bar: + for i, data in enumerate(data_loader_target): + bar.update(i) + model.set_input(data) + model.test() + + ae = model.fake_expressions.data[:,:,0] + if type(audio_expressions) == type(None): + audio_expressions = ae + e = model.expressions.data + gt_expressions = e + else: + audio_expressions = torch.cat([audio_expressions,ae],dim=0) + e = model.expressions.data + gt_expressions = torch.cat([gt_expressions,e],dim=0) + + # solve for mapping + print('solve for mapping') + optimize_in_parameter_space = True #False + if optimize_in_parameter_space: +# A = audio_expressions +# B = gt_expressions +# # solve lstsq ||AX - B|| +# X, _ = torch.gels(B, A, out=None) +# #X, _ = torch.lstsq(B, A) # requires pytorch 1.2 +# X = X[0:A.shape[1],:] +# mapping = X.t() + + # use gradient descent method + n = audio_expressions.shape[0] + print(n) + subspace_dim = 32 + + # TODO: patch + n_expr = 53 + + X = torch.nn.Parameter(torch.randn(n_expr, subspace_dim, requires_grad=True).cuda()) + optimizer = torch.optim.Adam([X], lr=0.01) + lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=30, gamma=0.1) + num_epochs = 90 + random_range = [k for k in range(0,n)] + with progressbar.ProgressBar(max_value=num_epochs) as bar: + for ep in range(0,num_epochs): + bar.update(ep) + random.shuffle(random_range) + for j in random_range: + expressions = gt_expressions[j] + fake_expressions = 10.0 * torch.matmul(X, audio_expressions[j]) + diff_expression = fake_expressions - expressions + loss = torch.mean(diff_expression * diff_expression) # L2 + optimizer.zero_grad() + loss.backward() + optimizer.step() + lr_scheduler.step() + mapping = X.data + +# else: # optimize in vertex space +# # use gradient descent method +# n = audio_expressions.shape[0] +# subspace_dim = 32 +# X = torch.nn.Parameter(torch.randn(N_EXPRESSIONS, subspace_dim, requires_grad=True).cuda()) +# optimizer = torch.optim.Adam([X], lr=0.01) +# lr_scheduler = torch.optim.lr_scheduler.StepLR(optimizer, step_size=30, gamma=0.1) +# num_epochs = 90 +# random_range = [k for k in range(0,n)] +# with progressbar.ProgressBar(max_value=num_epochs) as bar: +# for ep in range(0,num_epochs): +# bar.update(ep) +# random.shuffle(random_range) +# for j in random_range: +# expressions = gt_expressions[j] +# fake_expressions = 10.0 * torch.matmul(X, audio_expressions[j]) +# diff_expression = fake_expressions - expressions +# diff_vertices = torch.matmul(morphable_model.expression_basis, diff_expression) +# #loss = torch.sqrt(torch.mean(mask * diff_vertices * diff_vertices)) # RMS +# loss = torch.mean(mask * diff_vertices * diff_vertices) # L2 +# # +# optimizer.zero_grad() +# loss.backward() +# optimizer.step() +# lr_scheduler.step() +# +# mapping = X.data + + map_cpu = mapping.data.cpu().numpy() + print(map_cpu.shape) + file_out=open(mapping_fn+'.txt', 'w') + np.savetxt(file_out, map_cpu, delimiter=' ') + file_out.close() + np.save(mapping_fn+'.npy', map_cpu) + else: + # load mapping from file + map_cpu = np.load(mapping_fn+'.npy') + mapping = torch.tensor(map_cpu.astype(np.float32)).cuda() + print('loaded mapping from file', mapping.shape) + + # ############# + # map_cpu = np.load(base_path + '/mappings/' + opt.name + '/' + 'mapping_Close_355_9105.npy') + # mapping = torch.tensor(map_cpu.astype(np.float32)).cuda() + # print('loaded mapping from file', mapping.shape) + # ############# + + # process source sequence (opt.source_actor) + source_actors = [opt.source_actor] + + os.makedirs(dataset_path+'/TRANSFERS/'+opt.name, exist_ok=True) + list_transfer = open(dataset_path+'/TRANSFERS/'+opt.name+'/list_transfer.txt', "a") + + target_actor_offset = 0 # default + expression_multiplier = 1.0 # default + + if target_actor_offset != 0.0: + target_name = target_name + '--offset' + if expression_multiplier != 1.0: + target_name = target_name + '-X' + + for source_actor in source_actors: + opt.source_actor = source_actor + source_name = opt.source_actor.split("/")[-1] + # read source sequence + dataset_source, data_loader_source = load_source_sequence(opt) + dataset_source_size = len(dataset_source) + print('#source_actor frames = %d' % dataset_source_size) + list_transfer.write(source_name+'--'+target_name+'\n') + out_dir = dataset_path+'/TRANSFERS/'+opt.name+'/'+source_name+'--'+target_name+'/expressions' + os.makedirs(out_dir, exist_ok=True) + with progressbar.ProgressBar(max_value=len(dataset_source)) as bar: + for i, data in enumerate(data_loader_source): + bar.update(i) + model.set_input(data) + model.test() + audio_expression = model.fake_expressions.data[0,:,0] + expression = expression_multiplier * 10.0 * torch.matmul(mapping, audio_expression) + expression = expression[None,:] + np.save(os.path.join(out_dir, f'expr_{i}.npy'), expression.cpu().numpy()) + + list_transfer.close() + exit() diff --git a/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/transfer.sh b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/transfer.sh new file mode 100644 index 0000000..2726e84 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/Audio2ExpressionNet/transfer.sh @@ -0,0 +1,57 @@ +set -ex +# . transfer.sh & +GPUID=0 + +###################################################### +################## SPECIFY MODEL ################## +###################################################### + +## audio2ExpressionsAttentionTMP4-estimatorAttention-SL8-BS16-ARD_ZDF-multi_face_audio_eq_tmp_cached-RMS-20191105-115332-look_ahead # <<<<<<<<<<<<< +DATASETS_DIR=/home/alberto/NeuralVoicePuppetry/datasets +OBJECT=ARD_ZDF +LR=0.00001 +N_ITER=150 +N_ITER_LR_DECAY=50 +RENDERER=$OBJECT +EROSION=1.0 +BATCH_SIZE=16 +MODEL=audio2ExpressionsAttentionTMP4 +RENDERER_TYPE=estimatorAttention +DATASET_MODE=multi_face_audio_eq_tmp_cached +LOSS=RMS +SEQ_LEN=8 +DATE_WITH_TIME=20191105-115332 +NAME=$MODEL-$RENDERER_TYPE-SL$SEQ_LEN-BS$BATCH_SIZE-$OBJECT-$DATASET_MODE-$LOSS-$DATE_WITH_TIME-look_ahead + +# --look_ahead +EPOCH=latest + +############################################################### +###################### SPECIFY TARGET ###################### +############################################################### + +# target actors +#SOURCE_ACTOR_LIST=(/home/alberto/NeuralVoicePuppetry/datasets/External/Alberto_videos_uno /home/alberto/NeuralVoicePuppetry/datasets/External/Alberto_videos_due /home/alberto/NeuralVoicePuppetry/datasets/External/Alberto_videos_tre) +SOURCE_ACTOR_LIST=(/home/alberto/NeuralVoicePuppetry/datasets/External/Clara_audios_one /home/alberto/NeuralVoicePuppetry/datasets/External/Clara_audios_two /home/alberto/NeuralVoicePuppetry/datasets/External/Clara_audios_three) + +#TARGET_ACTOR_LIST[1]=/home/alberto/NeuralVoicePuppetry/datasets/External/Youtube_Russian_guy +TARGET_ACTOR_LIST[1]=/home/alberto/NeuralVoicePuppetry/datasets/SRF_anchor_short/Halbtotale_355_9415 + + +rm -f ./datasets/TRANSFERS/$NAME/list_transfer.txt +for TARGET_ACTOR in "${TARGET_ACTOR_LIST[@]}" +do + echo $TARGET_ACTOR + + for SOURCE_ACTOR in "${SOURCE_ACTOR_LIST[@]}" + do + echo $SOURCE_ACTOR + # --look_ahead + python transfer.py --look_ahead --seq_len $SEQ_LEN --source_actor $SOURCE_ACTOR --target_actor $TARGET_ACTOR --write_no_images --name $NAME --erosionFactor $EROSION --epoch $EPOCH --display_winsize 512 --rendererType $RENDERER_TYPE --lossType $LOSS --dataroot $DATASETS_DIR/$OBJECT --model $MODEL --netG unet_256 --dataset_mode $DATASET_MODE --norm instance --gpu_ids $GPUID + done +done + + +############################################################### +############################################################### +############################################################### \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/AutoregressivePredictiveCoding/bs32-rhl3-rhs512-rd0-adam-res-ts3.pt b/motion-gan-pipeline/preprocessing/third/AutoregressivePredictiveCoding/bs32-rhl3-rhs512-rd0-adam-res-ts3.pt new file mode 100644 index 0000000..244d060 Binary files /dev/null and b/motion-gan-pipeline/preprocessing/third/AutoregressivePredictiveCoding/bs32-rhl3-rhs512-rd0-adam-res-ts3.pt differ diff --git a/motion-gan-pipeline/preprocessing/third/DECA/Doc/images/DECA_evaluation_github.png b/motion-gan-pipeline/preprocessing/third/DECA/Doc/images/DECA_evaluation_github.png new file mode 100644 index 0000000..a690d88 Binary files /dev/null and b/motion-gan-pipeline/preprocessing/third/DECA/Doc/images/DECA_evaluation_github.png differ diff --git a/motion-gan-pipeline/preprocessing/third/DECA/Doc/images/DECA_performance.png b/motion-gan-pipeline/preprocessing/third/DECA/Doc/images/DECA_performance.png new file mode 100755 index 0000000..1d483c6 Binary files /dev/null and b/motion-gan-pipeline/preprocessing/third/DECA/Doc/images/DECA_performance.png differ diff --git a/motion-gan-pipeline/preprocessing/third/DECA/Doc/images/soubhik.gif b/motion-gan-pipeline/preprocessing/third/DECA/Doc/images/soubhik.gif new file mode 100644 index 0000000..9cee13f Binary files /dev/null and b/motion-gan-pipeline/preprocessing/third/DECA/Doc/images/soubhik.gif differ diff --git a/motion-gan-pipeline/preprocessing/third/DECA/README.md b/motion-gan-pipeline/preprocessing/third/DECA/README.md new file mode 100755 index 0000000..359dce6 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/README.md @@ -0,0 +1,125 @@ +# DECA: Detailed Expression Capture and Animation + +

+ +

+

input image, aligned reconstruction, animation with various poses & expressions

+ +This is the official Pytorch implementation of DECA. + +DECA reconstructs a 3D head model with detailed facial geometry from a single input image. The resulting 3D head model can be easily animated. Please refer to the [arXiv paper](https://arxiv.org/abs/2012.04012) for more details. + +The main features: + +* **Reconstruction:** produces head pose, shape, detailed face geometry, and lighting information from a single image. +* **Animation:** animate the face with realistic wrinkle deformations. +* **Robustness:** tested on facial images in unconstrained conditions. Our method is robust to various poses, illuminations and occlusions. +* **Accurate:** state-of-the-art 3D face shape reconstruction on the [NoW Challenge](https://ringnet.is.tue.mpg.de/challenge) benchmark dataset. + +## Getting Started +Clone the repo: + ```bash + git clone https://github.com/YadiraF/DECA + cd DECA + ``` + +### Requirements +* Python 3.7 (numpy, skimage, scipy, opencv) +* PyTorch >= 1.6 (pytorch3d) +* face-alignment (Optional for detecting face) + You can run + ```bash + pip install -r requirements.txt + ``` + Or use virtual environment by runing + ```bash + bash install_pip.sh + ``` + Then follow the instruction to install [pytorch3d](https://github.com/facebookresearch/pytorch3d/blob/master/INSTALL.md). + +### Usage +1. Prepare data + a. download [FLAME model](https://flame.is.tue.mpg.de/downloads), choose **FLAME 2020** and unzip it, copy 'generic_model.pkl' into ./data + b. download [DECA trained model](https://drive.google.com/file/d/1rp8kdyLPvErw2dTmqtjISRVvQLj6Yzje/view?usp=sharing), and put it in ./data (**no unzip required**) + c. (Optional) follow the instructions for the [Albedo model](https://github.com/TimoBolkart/BFM_to_FLAME) to get 'FLAME_albedo_from_BFM.npz', put it into ./data + +2. Run demos + a. **reconstruction** + ```bash + python demos/demo_reconstruct.py -i TestSamples/examples --saveDepth True --saveObj True + ``` + to visualize the predicted 2D landmanks, 3D landmarks (red means non-visible points), coarse geometry, detailed geometry, and depth. +

+ +

+

+ +

+ You can also generate an obj file (which can be opened with Meshlab) that includes extracted texture from the input image. + + Please run `python demos/demo_reconstruct.py --help` for more details. + + b. **expression transfer** + ```bash + python demos/demo_transfer.py + ``` + Given an image, you can reconstruct its 3D face, then animate it by tranfering expressions from other images. + Using Meshlab to open the detailed mesh obj file, you can see something like that: +

+ +

+ (Thank Soubhik for allowing me to use his face ^_^) + + Note that, you need to set '--useTex True' to get full texture. + + c. for the [teaser gif](https://github.com/YadiraF/DECA/results/teaser.gif) (**reposing** and **animation**) + ```bash + python demos/demo_teaser.py + ``` + + More demos and training code coming soon. + +## Evaluation +DECA (ours) achieves 9% lower mean shape reconstruction error on the [NoW Challenge](https://ringnet.is.tue.mpg.de/challenge) dataset compared to the previous state-of-the-art method. +The left figure compares the cumulative error of our approach and other recent methods (RingNet and Deng et al. have nearly identitical performance, so their curves overlap each other). Here we use point-to-surface distance as the error metric, following the NoW Challenge. +

+ +

+ +For more details of the evaluation, please check our [arXiv paper](https://arxiv.org/abs/2012.04012). + +## Citation +If you find our work useful to your research, please consider citing: +``` +@inproceedings{deca2020, + title={Learning an Animatable Detailed {3D} Face Model from In-The-Wild Images}, + author={Feng, Yao and Feng, Haiwen and Black, Michael J. and Bolkart, Timo}, + booktitle = {arxiv}, + month = {Dec}, + year = {2020} +} +``` + + + +## License +This code and model are available for non-commercial scientific research purposes as defined in the [LICENSE](https://github.com/YadiraF/DECA/blob/master/LICENSE) file. +By downloading and using the code and model you agree to the terms in the [LICENSE](https://github.com/YadiraF/DECA/blob/master/LICENSE). + +## Acknowledgements +For functions or scripts that are based on external sources, we acknowledge the origin individually in each file. +Here are some great resources we benefit: +- [FLAME_PyTorch](https://github.com/soubhiksanyal/FLAME_PyTorch) and [TF_FLAME](https://github.com/TimoBolkart/TF_FLAME) for the FLAME model +- [Pytorch3D](https://pytorch3d.org/), [neural_renderer](https://github.com/daniilidis-group/neural_renderer), [SoftRas](https://github.com/ShichenLiu/SoftRas) for rendering +- [kornia](https://github.com/kornia/kornia) for image/rotation processing +- [face-alignment](https://github.com/1adrianb/face-alignment) for cropping + +We would also like to thank other recent public 3D face reconstruction works that allow us to easily perform quantitative and qualitative comparisons :) +[RingNet](https://github.com/soubhiksanyal/RingNet), +[Deep3DFaceReconstruction](https://github.com/microsoft/Deep3DFaceReconstruction/blob/master/renderer/rasterize_triangles.py), +[Nonlinear_Face_3DMM](https://github.com/tranluan/Nonlinear_Face_3DMM), +[3DDFA-v2](https://github.com/cleardusk/3DDFA_V2), +[extreme_3d_faces](https://github.com/anhttran/extreme_3d_faces), +[facescape](https://github.com/zhuhao-nju/facescape) + diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/FLAME_albedo_from_BFM.npz b/motion-gan-pipeline/preprocessing/third/DECA/data/FLAME_albedo_from_BFM.npz new file mode 100644 index 0000000..72066bf --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/data/FLAME_albedo_from_BFM.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:07cefceafab36150b458f138dd9fc001488454be889fa78ce08df5de7f5753b4 +size 1258291694 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/deca_model.tar b/motion-gan-pipeline/preprocessing/third/DECA/data/deca_model.tar new file mode 100644 index 0000000..c04160f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/data/deca_model.tar @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e714ed293054cba5eea9c96bd3b6b57880074cd84b3fd00d606cbaf0bee7c5c2 +size 434142943 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/fixed_displacement_256.npy b/motion-gan-pipeline/preprocessing/third/DECA/data/fixed_displacement_256.npy new file mode 100755 index 0000000..a13befd --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/data/fixed_displacement_256.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:296d01113d67bdaace6f6fe741f7d855e58dc0707f0bb113758520ffa5d8cb93 +size 524416 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/generic_model.pkl b/motion-gan-pipeline/preprocessing/third/DECA/data/generic_model.pkl new file mode 100644 index 0000000..a1a71b0 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/data/generic_model.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:efcd14cc4a69f3a3d9af8ded80146b5b6b50df3bd74cf69108213b144eba725b +size 53023716 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/head_template.obj b/motion-gan-pipeline/preprocessing/third/DECA/data/head_template.obj new file mode 100755 index 0000000..49c83e8 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/data/head_template.obj @@ -0,0 +1,20122 @@ +# Blender v2.80 (sub 75) OBJ File: 'flame_new_uv_map.blend' +# www.blender.org +mtllib template.mtl +v 0.061998 1.493503 -0.027215 +v 0.066005 1.492495 -0.026312 +v 0.066500 1.493900 -0.026200 +v 0.062307 1.494799 -0.027008 +v 0.075800 1.494613 -0.035711 +v 0.075177 1.497973 -0.032283 +v 0.074438 1.500713 -0.032211 +v 0.074800 1.500600 -0.034800 +v 0.028307 1.453590 -0.092080 +v 0.013146 1.453750 -0.096146 +v 0.013086 1.443515 -0.093653 +v 0.026940 1.441069 -0.088483 +v 0.044034 1.418873 -0.070397 +v 0.039796 1.426214 -0.075181 +v 0.036540 1.415001 -0.078651 +v 0.044437 1.411710 -0.070237 +v 0.039024 1.550958 0.053657 +v 0.046783 1.549456 0.048912 +v 0.044954 1.543563 0.051419 +v -0.061998 1.493503 -0.027215 +v -0.066005 1.492495 -0.026312 +v -0.066500 1.493900 -0.026200 +v -0.062307 1.494799 -0.027008 +v -0.075800 1.494613 -0.035711 +v -0.075177 1.497973 -0.032283 +v -0.074438 1.500713 -0.032211 +v -0.074800 1.500600 -0.034800 +v 0.037682 1.544673 0.054847 +v 0.092919 1.516004 -0.048998 +v 0.093182 1.513297 -0.049973 +v 0.093400 1.511900 -0.048200 +v 0.093071 1.514299 -0.047313 +v 0.089794 1.497809 -0.046670 +v 0.089310 1.505389 -0.048436 +v 0.090352 1.508011 -0.049430 +v 0.090705 1.505796 -0.048534 +v 0.090287 1.486997 -0.043418 +v 0.089308 1.487702 -0.042889 +v 0.090700 1.492300 -0.044900 +v 0.091687 1.491903 -0.045704 +v 0.075298 1.468043 -0.021340 +v 0.078140 1.470212 -0.022933 +v 0.078493 1.467790 -0.024400 +v 0.073913 1.466336 -0.023090 +v 0.070914 1.467491 -0.024294 +v 0.075432 1.466828 -0.028178 +v 0.075074 1.469686 -0.029129 +v 0.071489 1.469689 -0.026106 +v 0.086489 1.526078 -0.035111 +v 0.086600 1.523100 -0.034700 +v 0.084675 1.522306 -0.032321 +v 0.084516 1.525303 -0.032395 +v 0.076847 1.490511 -0.045292 +v 0.075461 1.484031 -0.043370 +v 0.081300 1.489074 -0.045823 +v 0.081298 1.491995 -0.046116 +v 0.085506 1.516107 -0.041696 +v 0.086614 1.516897 -0.045188 +v 0.087200 1.514700 -0.046300 +v 0.086210 1.513201 -0.043498 +v 0.089406 1.517594 -0.050088 +v 0.089835 1.514599 -0.051186 +v 0.073155 1.476985 -0.019715 +v 0.071768 1.480608 -0.019054 +v 0.072296 1.482197 -0.020109 +v 0.073056 1.481128 -0.021346 +v 0.073953 1.494303 -0.032675 +v 0.075096 1.498497 -0.031088 +v 0.086495 1.476758 -0.038243 +v 0.086462 1.478693 -0.037171 +v 0.088511 1.482595 -0.040600 +v 0.088684 1.481204 -0.041805 +v 0.083800 1.481300 -0.034800 +v 0.086100 1.483800 -0.038300 +v 0.087015 1.484403 -0.040188 +v 0.085989 1.482300 -0.038105 +v 0.088031 1.519317 -0.041792 +v 0.085861 1.521559 -0.037113 +v 0.086699 1.521126 -0.037101 +v 0.088900 1.518700 -0.041800 +v 0.082000 1.504600 -0.045500 +v 0.080323 1.509694 -0.043474 +v 0.079604 1.508792 -0.043485 +v 0.081297 1.504301 -0.045314 +v 0.089100 1.515800 -0.048100 +v 0.089639 1.513104 -0.049003 +v 0.087992 1.512000 -0.047407 +v 0.075500 1.513617 -0.029983 +v 0.076118 1.514593 -0.031298 +v 0.076609 1.513005 -0.031696 +v 0.076371 1.511694 -0.030520 +v 0.078164 1.466576 -0.027814 +v 0.073096 1.475727 -0.036776 +v 0.074801 1.476562 -0.038030 +v 0.077596 1.480667 -0.040850 +v 0.076407 1.481515 -0.041493 +v 0.070618 1.469515 -0.019925 +v 0.073800 1.469900 -0.019600 +v 0.074701 1.468685 -0.020488 +v 0.071600 1.467919 -0.021209 +v 0.078425 1.518098 -0.031667 +v 0.080957 1.517503 -0.033837 +v 0.079092 1.516506 -0.033011 +v 0.077368 1.516811 -0.031317 +v 0.088500 1.503500 -0.046500 +v 0.087000 1.510300 -0.044900 +v 0.089148 1.494204 -0.040272 +v 0.089000 1.494300 -0.043500 +v 0.088538 1.490188 -0.041897 +v -0.028307 1.453590 -0.092080 +v -0.013146 1.453750 -0.096146 +v -0.013086 1.443515 -0.093653 +v -0.026940 1.441069 -0.088483 +v 0.088564 1.490214 -0.038795 +v 0.088334 1.502483 -0.040508 +v 0.088447 1.502416 -0.044192 +v 0.090983 1.511396 -0.046991 +v 0.091428 1.505198 -0.047205 +v 0.090384 1.512000 -0.047798 +v 0.077544 1.512600 -0.041093 +v 0.077400 1.515300 -0.044900 +v 0.077651 1.508587 -0.046651 +v 0.078383 1.510012 -0.042574 +v 0.072487 1.486189 -0.020399 +v 0.073652 1.488310 -0.021518 +v 0.072785 1.486082 -0.022770 +v 0.072332 1.483812 -0.022359 +v 0.078096 1.478882 -0.039421 +v 0.075400 1.475700 -0.037100 +v 0.075785 1.475417 -0.036252 +v 0.078507 1.477345 -0.037814 +v 0.083100 1.518700 -0.036200 +v 0.083855 1.513511 -0.036581 +v 0.082100 1.513505 -0.035117 +v 0.079400 1.482900 -0.027100 +v 0.080206 1.484210 -0.028695 +v 0.081500 1.481100 -0.030500 +v 0.080687 1.479097 -0.029207 +v 0.082571 1.470333 -0.032585 +v 0.080240 1.469896 -0.033585 +v 0.077900 1.467889 -0.030714 +v 0.080399 1.467916 -0.030093 +v 0.072675 1.482711 -0.021801 +v 0.072327 1.483599 -0.020303 +v 0.083926 1.505195 -0.047073 +v 0.082566 1.511308 -0.044839 +v 0.086087 1.511596 -0.040800 +v 0.086768 1.509089 -0.042098 +v 0.074730 1.498708 -0.025229 +v 0.072484 1.499298 -0.026263 +v 0.072424 1.498498 -0.026271 +v 0.075274 1.497098 -0.025153 +v 0.074986 1.498595 -0.024097 +v 0.075514 1.497004 -0.023998 +v 0.075085 1.496797 -0.022906 +v 0.074710 1.497703 -0.022785 +v 0.071792 1.499615 -0.026608 +v 0.072000 1.499300 -0.026400 +v 0.072388 1.499850 -0.026639 +v 0.077294 1.502724 -0.037763 +v 0.078993 1.494880 -0.039129 +v 0.077413 1.494601 -0.037981 +v 0.075700 1.501500 -0.036400 +v 0.089216 1.495402 -0.045388 +v 0.080994 1.521612 -0.028301 +v 0.079582 1.523421 -0.028204 +v 0.082400 1.526875 -0.032516 +v 0.074387 1.485551 -0.030792 +v 0.077111 1.486378 -0.028293 +v 0.075401 1.486174 -0.026902 +v 0.073776 1.485212 -0.029312 +v 0.072203 1.490294 -0.025077 +v 0.070416 1.486997 -0.024430 +v 0.074220 1.488702 -0.023891 +v 0.074537 1.504508 -0.024103 +v 0.073632 1.500887 -0.026200 +v 0.074301 1.499691 -0.025068 +v 0.074473 1.500004 -0.024007 +v 0.090300 1.486200 -0.044700 +v 0.091516 1.491394 -0.047212 +v 0.089989 1.491103 -0.048594 +v 0.088900 1.485600 -0.046100 +v 0.071174 1.473291 -0.018315 +v 0.067815 1.470535 -0.021913 +v 0.069856 1.476490 -0.018061 +v 0.091000 1.522800 -0.044100 +v 0.087700 1.524800 -0.044500 +v 0.088398 1.522717 -0.046819 +v 0.091894 1.520791 -0.046191 +v 0.072913 1.506097 -0.031592 +v 0.073018 1.504001 -0.031087 +v 0.072919 1.505397 -0.029993 +v 0.072693 1.507397 -0.030290 +v 0.053586 1.467412 -0.075433 +v 0.062458 1.468076 -0.057171 +v 0.066552 1.481198 -0.065315 +v 0.058792 1.480269 -0.080652 +v 0.083582 1.473183 -0.031719 +v 0.082493 1.471092 -0.029401 +v 0.075389 1.512005 -0.023502 +v 0.074722 1.513599 -0.022600 +v 0.075513 1.518799 -0.024222 +v 0.076219 1.515923 -0.024158 +v 0.090682 1.506499 -0.052109 +v 0.093415 1.505906 -0.050612 +v 0.077712 1.518490 -0.030205 +v 0.081200 1.521900 -0.033400 +v 0.079308 1.519492 -0.032198 +v 0.077480 1.518012 -0.030407 +v 0.077190 1.509801 -0.027990 +v 0.077694 1.511105 -0.027778 +v 0.076001 1.512096 -0.027915 +v 0.075200 1.510700 -0.027900 +v 0.074370 1.517897 -0.021713 +v 0.075297 1.522474 -0.025117 +v 0.071415 1.475105 -0.036388 +v 0.077115 1.511100 -0.025590 +v 0.077700 1.513600 -0.025600 +v 0.078380 1.512711 -0.027699 +v -0.044034 1.418873 -0.070397 +v -0.039796 1.426214 -0.075181 +v -0.036540 1.415001 -0.078651 +v -0.044437 1.411710 -0.070237 +v 0.070513 1.481494 -0.017518 +v 0.071608 1.483198 -0.019075 +v 0.076812 1.517093 -0.030192 +v 0.076143 1.515882 -0.030085 +v 0.074901 1.512051 -0.029070 +v 0.075697 1.510116 -0.030317 +v 0.083394 1.503779 -0.036850 +v 0.085900 1.495120 -0.037669 +v 0.070665 1.482116 -0.051701 +v 0.066781 1.468771 -0.037011 +v 0.074900 1.512500 -0.028100 +v 0.074900 1.512900 -0.028300 +v 0.074300 1.512000 -0.028500 +v 0.074200 1.511400 -0.028200 +v 0.081180 1.469406 -0.027314 +v 0.081392 1.510102 -0.034728 +v 0.078509 1.508234 -0.032619 +v 0.078690 1.510401 -0.032211 +v 0.091593 1.496901 -0.045991 +v 0.092481 1.504997 -0.047033 +v 0.093707 1.505301 -0.048582 +v 0.092700 1.496700 -0.047300 +v 0.069138 1.468847 -0.022896 +v 0.073401 1.503294 -0.031788 +v 0.074390 1.501599 -0.031214 +v 0.073320 1.508785 -0.030771 +v 0.073362 1.510351 -0.029068 +v 0.071539 1.486197 -0.018780 +v 0.087705 1.519915 -0.045409 +v 0.088484 1.518000 -0.046810 +v 0.086125 1.518595 -0.043989 +v 0.085717 1.519801 -0.042398 +v 0.084872 1.521507 -0.039310 +v 0.084900 1.522959 -0.038902 +v 0.086800 1.521600 -0.043400 +v 0.076910 1.509099 -0.031384 +v 0.077488 1.510807 -0.031221 +v 0.078757 1.481692 -0.026054 +v 0.079651 1.475700 -0.026960 +v 0.087515 1.486694 -0.039481 +v 0.087178 1.487520 -0.036411 +v 0.087200 1.496700 -0.049600 +v 0.086626 1.505995 -0.050271 +v 0.085270 1.505500 -0.048424 +v 0.086200 1.496700 -0.048500 +v 0.080890 1.497910 -0.046193 +v 0.077697 1.498497 -0.046178 +v 0.074100 1.498700 -0.022500 +v 0.073200 1.502400 -0.020900 +v 0.073000 1.508100 -0.030600 +v 0.073112 1.509696 -0.028998 +v 0.082101 1.481590 -0.041807 +v 0.077895 1.474733 -0.034283 +v 0.082003 1.479694 -0.039813 +v 0.073816 1.473945 -0.032524 +v 0.073304 1.473187 -0.030203 +v 0.075893 1.473216 -0.031599 +v 0.084062 1.483308 -0.033637 +v 0.085978 1.484417 -0.036390 +v 0.071500 1.499400 -0.027100 +v 0.070090 1.497912 -0.028215 +v 0.070510 1.499177 -0.026601 +v 0.086793 1.502906 -0.038129 +v 0.088239 1.494694 -0.038349 +v 0.077317 1.513394 -0.028185 +v 0.076101 1.503386 -0.027988 +v 0.076739 1.506787 -0.026397 +v 0.080088 1.516707 -0.028296 +v 0.078608 1.515193 -0.028702 +v 0.078171 1.514217 -0.028369 +v 0.079119 1.514587 -0.027733 +v 0.064970 1.489977 -0.026859 +v 0.068207 1.488286 -0.025432 +v 0.069600 1.491400 -0.025400 +v 0.083936 1.522604 -0.037179 +v 0.084376 1.520602 -0.038312 +v 0.083800 1.512300 -0.046400 +v 0.077005 1.514888 -0.032296 +v 0.077607 1.513397 -0.033092 +v 0.077512 1.523697 -0.039600 +v 0.077134 1.522214 -0.036306 +v 0.077024 1.522596 -0.035096 +v 0.077000 1.526200 -0.035400 +v 0.072194 1.470894 -0.027111 +v 0.075000 1.471200 -0.029600 +v 0.074913 1.472222 -0.030080 +v 0.072794 1.472090 -0.028308 +v 0.074614 1.485867 -0.026314 +v 0.073392 1.484991 -0.028510 +v 0.088313 1.487893 -0.042607 +v 0.087791 1.487097 -0.041406 +v 0.088800 1.491100 -0.043700 +v 0.089407 1.492300 -0.044808 +v 0.074023 1.506963 -0.033952 +v 0.073613 1.505802 -0.033402 +v 0.078264 1.516972 -0.037457 +v 0.080300 1.519500 -0.038900 +v 0.078936 1.522279 -0.035468 +v 0.077375 1.520039 -0.035263 +v 0.085184 1.513304 -0.048435 +v 0.077900 1.512000 -0.032500 +v 0.078690 1.512194 -0.033514 +v 0.077290 1.512393 -0.032109 +v 0.074693 1.509798 -0.027983 +v 0.076690 1.508602 -0.028290 +v 0.079600 1.517115 -0.029581 +v 0.083299 1.520672 -0.032822 +v 0.082605 1.520723 -0.032873 +v 0.079196 1.517891 -0.029805 +v 0.075486 1.514507 -0.028998 +v 0.014577 1.465024 -0.102244 +v 0.016400 1.474400 -0.109200 +v -0.039024 1.550958 0.053657 +v -0.046783 1.549456 0.048912 +v -0.044954 1.543563 0.051419 +v -0.037682 1.544673 0.054847 +v 0.077508 1.523187 -0.028108 +v 0.078384 1.524716 -0.032003 +v 0.079493 1.525831 -0.032370 +v 0.078124 1.524043 -0.029015 +v 0.085510 1.518598 -0.041793 +v 0.089900 1.514600 -0.047000 +v 0.075224 1.500049 -0.029836 +v 0.072310 1.497196 -0.029602 +v 0.062100 1.496100 -0.028400 +v 0.062400 1.497300 -0.028400 +v 0.062996 1.496896 -0.029588 +v 0.062700 1.495600 -0.029800 +v 0.065798 1.490998 -0.030077 +v 0.068687 1.489987 -0.028824 +v 0.067795 1.488059 -0.027571 +v 0.065087 1.488901 -0.029508 +v 0.061700 1.493000 -0.028800 +v 0.061900 1.494600 -0.028500 +v 0.062600 1.493904 -0.030024 +v 0.062510 1.492089 -0.030514 +v 0.083251 1.522548 -0.035046 +v 0.085047 1.522467 -0.036875 +v 0.091507 1.513492 -0.045983 +v 0.092000 1.511200 -0.046800 +v 0.088778 1.518688 -0.044296 +v 0.089400 1.516800 -0.045700 +v 0.073810 1.490899 -0.021494 +v 0.074915 1.491096 -0.023006 +v 0.074587 1.489510 -0.022989 +v 0.092426 1.516497 -0.046084 +v 0.092491 1.518401 -0.047705 +v 0.074023 1.473610 -0.019287 +v 0.072900 1.472100 -0.018700 +v 0.040539 1.402694 -0.008784 +v 0.048074 1.407914 -0.020517 +v 0.047013 1.398104 -0.021095 +v 0.039392 1.393104 -0.010006 +v 0.079593 1.519097 -0.027011 +v 0.078310 1.521306 -0.026782 +v 0.061832 1.491418 -0.027890 +v 0.061767 1.490564 -0.029280 +v 0.064532 1.489040 -0.028225 +v 0.066803 1.495395 -0.026325 +v 0.062692 1.496102 -0.026989 +v 0.066004 1.492689 -0.029996 +v 0.066600 1.497700 -0.029000 +v 0.066899 1.498785 -0.027594 +v 0.069870 1.484581 -0.025568 +v 0.071628 1.484108 -0.027495 +v 0.073099 1.484030 -0.024480 +v 0.062493 1.490133 -0.030449 +v 0.073605 1.507995 -0.032192 +v 0.069294 1.493493 -0.029380 +v 0.072510 1.491006 -0.030991 +v 0.072108 1.488909 -0.030290 +v 0.069088 1.491598 -0.029288 +v 0.082186 1.480509 -0.032108 +v 0.082500 1.482389 -0.031806 +v 0.075207 1.486410 -0.032571 +v 0.078600 1.486500 -0.030726 +v 0.074610 1.503003 -0.030429 +v 0.075710 1.506076 -0.029052 +v 0.072018 1.499535 -0.027245 +v 0.069600 1.495600 -0.029000 +v 0.072429 1.493400 -0.030779 +v 0.066996 1.496709 -0.026563 +v 0.070701 1.496697 -0.025915 +v 0.070496 1.497807 -0.026089 +v 0.067002 1.497703 -0.026813 +v 0.080210 1.513496 -0.034376 +v 0.077161 1.519075 -0.025362 +v 0.078600 1.516400 -0.026100 +v 0.078956 1.526993 -0.051414 +v 0.077945 1.518913 -0.056083 +v 0.077665 1.520494 -0.043294 +v 0.066104 1.494500 -0.029818 +v 0.063008 1.498217 -0.028195 +v 0.063488 1.497694 -0.027293 +v 0.087700 1.483800 -0.040300 +v 0.073996 1.483585 -0.024303 +v 0.073597 1.507916 -0.028454 +v 0.087091 1.491399 -0.048005 +v 0.086200 1.486200 -0.045800 +v 0.073000 1.509200 -0.028900 +v 0.073613 1.510299 -0.028418 +v 0.072679 1.484436 -0.028099 +v 0.074011 1.484984 -0.025493 +v 0.073800 1.510700 -0.028300 +v 0.074759 1.490612 -0.024153 +v 0.085521 1.513187 -0.039739 +v 0.078886 1.472181 -0.034028 +v 0.076900 1.470700 -0.031400 +v 0.063107 1.497087 -0.027034 +v 0.067048 1.488263 -0.026903 +v 0.066401 1.496204 -0.029511 +v 0.073083 1.494401 -0.025488 +v 0.072718 1.492295 -0.025142 +v 0.075021 1.492793 -0.024235 +v 0.080194 1.487032 -0.032232 +v 0.076200 1.488129 -0.034638 +v 0.077499 1.489024 -0.036690 +v 0.082102 1.487860 -0.033716 +v 0.084805 1.481507 -0.043287 +v 0.083016 1.477319 -0.039971 +v 0.085043 1.475936 -0.039699 +v 0.087297 1.480394 -0.043209 +v 0.079918 1.541308 -0.059991 +v 0.078910 1.532799 -0.044899 +v 0.080121 1.548101 -0.049100 +v 0.083314 1.521389 -0.044086 +v 0.083806 1.518997 -0.045690 +v 0.088991 1.520200 -0.048605 +v 0.076340 1.525030 -0.030066 +v 0.076931 1.522731 -0.033547 +v 0.077407 1.523933 -0.031651 +v 0.090305 1.496596 -0.050608 +v 0.074900 1.508300 -0.023500 +v 0.076811 1.508900 -0.025803 +v 0.081302 1.482541 -0.042839 +v 0.082741 1.485784 -0.031869 +v 0.085088 1.486284 -0.033841 +v 0.088312 1.522894 -0.037393 +v 0.085094 1.521385 -0.034799 +v 0.008841 1.394244 0.015789 +v 0.008800 1.386900 0.011600 +v 0.008589 1.498484 0.073786 +v 0.010030 1.498324 0.072305 +v 0.008757 1.500242 0.071290 +v 0.008117 1.500249 0.072013 +v 0.008946 1.489300 0.072430 +v 0.008334 1.490922 0.071314 +v 0.009315 1.490643 0.070347 +v 0.010188 1.489265 0.071512 +v 0.005681 1.499910 0.071178 +v 0.004213 1.497290 0.073791 +v 0.005785 1.498187 0.075196 +v 0.006796 1.500441 0.071916 +v 0.088400 1.525800 -0.038200 +v 0.012342 1.488981 0.072095 +v 0.011326 1.490885 0.070919 +v 0.012050 1.491341 0.072767 +v 0.013716 1.489773 0.073790 +v 0.004958 1.489999 0.080211 +v 0.006080 1.492103 0.080389 +v 0.004980 1.491307 0.078099 +v 0.004935 1.489324 0.077714 +v 0.007888 1.492561 0.079779 +v 0.009808 1.492021 0.081514 +v 0.011583 1.491985 0.079889 +v 0.009400 1.492428 0.078300 +v 0.004716 1.493621 0.073204 +v 0.006063 1.491670 0.074584 +v 0.003964 1.493789 0.076796 +v 0.012266 1.493714 0.070902 +v 0.010928 1.492691 0.069478 +v 0.009809 1.496498 0.068938 +v 0.011062 1.497282 0.069812 +v 0.045276 1.418919 -0.006497 +v 0.051316 1.421097 -0.020493 +v 0.034601 1.509504 0.051197 +v 0.033968 1.506854 0.052483 +v 0.030002 1.508004 0.053112 +v 0.030801 1.510322 0.051979 +v 0.075600 1.495000 -0.023600 +v 0.074983 1.495799 -0.022522 +v 0.084383 1.516205 -0.047117 +v 0.087769 1.520281 -0.042209 +v 0.082911 1.523192 -0.041995 +v 0.081484 1.525319 -0.037312 +v 0.080076 1.523708 -0.036314 +v 0.081315 1.521297 -0.040291 +v 0.090800 1.520100 -0.042800 +v 0.075382 1.482188 -0.024203 +v 0.084040 1.509298 -0.036335 +v 0.080484 1.525822 -0.034804 +v 0.083919 1.527606 -0.035488 +v 0.090591 1.497096 -0.045921 +v 0.091688 1.518505 -0.044615 +v 0.089812 1.517295 -0.043486 +v 0.081321 1.518998 -0.029088 +v 0.078895 1.491603 -0.038661 +v 0.085402 1.491588 -0.036554 +v 0.084007 1.489086 -0.035109 +v -0.092919 1.516004 -0.048998 +v -0.093182 1.513297 -0.049973 +v -0.093400 1.511900 -0.048200 +v -0.093071 1.514299 -0.047313 +v -0.089794 1.497809 -0.046670 +v -0.089310 1.505389 -0.048436 +v -0.090352 1.508011 -0.049430 +v -0.090705 1.505796 -0.048534 +v 0.078399 1.489817 -0.037774 +v 0.073444 1.504694 -0.032820 +v 0.071111 1.486897 -0.016909 +v 0.081578 1.475997 -0.030012 +v -0.090287 1.486997 -0.043418 +v -0.089308 1.487702 -0.042889 +v -0.090700 1.492300 -0.044900 +v -0.091687 1.491903 -0.045704 +v 0.082714 1.477004 -0.032189 +v 0.031157 1.465431 -0.096619 +v 0.034883 1.475714 -0.102887 +v 0.092400 1.496600 -0.049200 +v 0.083202 1.522130 -0.035985 +v 0.077509 1.481801 -0.025290 +v 0.077217 1.476111 -0.024483 +v 0.075527 1.476613 -0.022791 +v 0.078213 1.512608 -0.033384 +v 0.080792 1.483473 -0.043245 +v 0.081006 1.485539 -0.044522 +v 0.063300 1.497600 -0.029200 +v 0.082300 1.478600 -0.039700 +v 0.078492 1.473495 -0.034110 +v 0.054580 1.424205 -0.033907 +v 0.053617 1.426598 -0.048816 +v -0.075298 1.468043 -0.021340 +v -0.078140 1.470212 -0.022933 +v -0.078493 1.467790 -0.024400 +v -0.073913 1.466336 -0.023090 +v 0.052512 1.415702 -0.047314 +v 0.052297 1.412202 -0.033512 +v 0.072400 1.500500 -0.017900 +v 0.072720 1.493501 -0.019884 +v 0.072078 1.492403 -0.018405 +v 0.022133 1.402373 0.012551 +v 0.018991 1.390401 0.007792 +v 0.071080 1.487414 -0.013506 +v 0.070164 1.486779 -0.003273 +v 0.071773 1.502108 -0.003201 +v 0.072151 1.496898 -0.012896 +v 0.059961 1.533198 0.030682 +v 0.061666 1.533495 0.026588 +v 0.061933 1.524304 0.026724 +v 0.059859 1.525661 0.030102 +v 0.020105 1.589077 0.046472 +v 0.009713 1.590238 0.048897 +v 0.009944 1.597050 0.042825 +v 0.020552 1.595691 0.040151 +v 0.036783 1.483221 -0.106562 +v 0.038502 1.492891 -0.110712 +v 0.018483 1.490152 -0.117701 +v 0.017417 1.481271 -0.113053 +v 0.020717 1.610835 0.018436 +v 0.010055 1.612720 0.020935 +v 0.010178 1.617029 0.011196 +v -0.070914 1.467491 -0.024294 +v -0.075432 1.466828 -0.028178 +v -0.075074 1.469686 -0.029129 +v -0.071489 1.469689 -0.026106 +v 0.021004 1.614887 0.009079 +v 0.051849 1.506984 0.042450 +v 0.053308 1.500902 0.042309 +v 0.049500 1.500600 0.045600 +v 0.048611 1.506682 0.045109 +v 0.077083 1.471595 -0.021914 +v 0.075873 1.472489 -0.020821 +v -0.086489 1.526078 -0.035111 +v -0.086600 1.523100 -0.034700 +v -0.084675 1.522306 -0.032321 +v -0.084516 1.525303 -0.032395 +v -0.076847 1.490511 -0.045292 +v -0.075461 1.484031 -0.043370 +v -0.081300 1.489074 -0.045823 +v -0.081298 1.491995 -0.046116 +v 0.010363 1.620560 -0.000194 +v 0.021444 1.618288 -0.001890 +v 0.013719 1.517008 0.059295 +v -0.085506 1.516107 -0.041696 +v -0.086614 1.516897 -0.045188 +v -0.087200 1.514700 -0.046300 +v -0.086210 1.513201 -0.043498 +v -0.089406 1.517594 -0.050088 +v -0.089835 1.514599 -0.051186 +v -0.073155 1.476985 -0.019715 +v -0.071768 1.480608 -0.019054 +v -0.072296 1.482197 -0.020109 +v -0.073056 1.481128 -0.021346 +v -0.073953 1.494303 -0.032675 +v -0.075096 1.498497 -0.031088 +v -0.086495 1.476758 -0.038243 +v -0.086462 1.478693 -0.037171 +v -0.088511 1.482595 -0.040600 +v -0.088684 1.481204 -0.041805 +v 0.014127 1.518765 0.056494 +v 0.017673 1.515705 0.055114 +v 0.017351 1.513871 0.057445 +v 0.007369 1.564131 0.060202 +v 0.008331 1.571458 0.057659 +v 0.017301 1.571015 0.055837 +v 0.015753 1.563817 0.058794 +v 0.021739 1.513091 0.053661 +v 0.021012 1.511308 0.055406 +v 0.010005 1.602932 0.036161 +v 0.020698 1.601402 0.033405 +v 0.020840 1.621971 -0.050662 +v -0.083800 1.481300 -0.034800 +v -0.086100 1.483800 -0.038300 +v -0.087015 1.484403 -0.040188 +v -0.085989 1.482300 -0.038105 +v 0.021361 1.622750 -0.042579 +v 0.009988 1.625236 -0.042999 +v -0.088031 1.519317 -0.041792 +v -0.085861 1.521559 -0.037113 +v -0.086699 1.521126 -0.037101 +v -0.088900 1.518700 -0.041800 +v 0.011010 1.623798 -0.055177 +v 0.071022 1.498660 -0.072359 +v 0.074708 1.498310 -0.057287 +v 0.076813 1.509999 -0.058491 +v 0.074411 1.514719 -0.074802 +v 0.033028 1.407859 0.005141 +v 0.030039 1.395778 0.000929 +v -0.082000 1.504600 -0.045500 +v -0.080323 1.509694 -0.043474 +v -0.079604 1.508792 -0.043485 +v -0.081297 1.504301 -0.045314 +v -0.089100 1.515800 -0.048100 +v -0.089639 1.513104 -0.049003 +v -0.087992 1.512000 -0.047407 +v -0.075500 1.513617 -0.029983 +v -0.076118 1.514593 -0.031298 +v -0.076609 1.513005 -0.031696 +v -0.076371 1.511694 -0.030520 +v -0.078164 1.466576 -0.027814 +v -0.073096 1.475727 -0.036776 +v -0.074801 1.476562 -0.038030 +v -0.077596 1.480667 -0.040850 +v -0.076407 1.481515 -0.041493 +v 0.053520 1.554444 0.037880 +v 0.055167 1.559187 0.033299 +v 0.058439 1.552305 0.030019 +v 0.056797 1.548534 0.034888 +v 0.005887 1.552131 0.063546 +v 0.013242 1.551916 0.062647 +v 0.011428 1.545963 0.063453 +v 0.005203 1.545936 0.064108 +v -0.070618 1.469515 -0.019925 +v -0.073800 1.469900 -0.019600 +v -0.074701 1.468685 -0.020488 +v -0.071600 1.467919 -0.021209 +v 0.059440 1.518629 0.030726 +v 0.062023 1.515611 0.028312 +v 0.060821 1.508001 0.032915 +v 0.058003 1.511984 0.034409 +v 0.054598 1.507965 0.039780 +v -0.078425 1.518098 -0.031667 +v -0.080957 1.517503 -0.033837 +v -0.079092 1.516506 -0.033011 +v -0.077368 1.516811 -0.031317 +v 0.056600 1.502200 0.039300 +v -0.088500 1.503500 -0.046500 +v -0.087000 1.510300 -0.044900 +v 0.018904 1.579964 0.052345 +v 0.009112 1.580880 0.054387 +v 0.060600 1.543300 0.027500 +v 0.058985 1.541133 0.032384 +v 0.010699 1.519994 0.061739 +v 0.011514 1.521322 0.058249 +v 0.007996 1.525146 0.059987 +v -0.089148 1.494204 -0.040272 +v -0.089000 1.494300 -0.043500 +v -0.088538 1.490188 -0.041897 +v -0.088564 1.490214 -0.038795 +v 0.003912 1.524971 0.065569 +v 0.003648 1.528717 0.063754 +v 0.007046 1.528482 0.060248 +v 0.033993 1.562933 0.052199 +v 0.035634 1.569459 0.048664 +v 0.043291 1.567819 0.043458 +v 0.041735 1.561705 0.047287 +v 0.020698 1.606310 0.026348 +v -0.088334 1.502483 -0.040508 +v -0.088447 1.502416 -0.044192 +v 0.010005 1.608005 0.029016 +v 0.004139 1.537948 0.062980 +v 0.008537 1.537917 0.061632 +v 0.007473 1.534421 0.060282 +v -0.090983 1.511396 -0.046991 +v -0.091428 1.505198 -0.047205 +v -0.090384 1.512000 -0.047798 +v -0.077544 1.512600 -0.041093 +v -0.077400 1.515300 -0.044900 +v -0.077651 1.508587 -0.046651 +v -0.078383 1.510012 -0.042574 +v 0.003950 1.534628 0.062399 +v 0.025500 1.509400 0.054200 +v 0.026221 1.511402 0.052919 +v 0.068924 1.470783 -0.026772 +v 0.070063 1.470235 -0.025719 +v 0.073354 1.517300 -0.015507 +v 0.072714 1.507101 -0.013696 +v 0.071736 1.514897 -0.004110 +v 0.071742 1.527400 -0.006595 +v 0.073300 1.538600 -0.010500 +v 0.074599 1.546926 -0.016268 +v 0.075720 1.531749 -0.022655 +v 0.074613 1.525899 -0.018098 +v 0.049669 1.418009 -0.059856 +v 0.048608 1.427595 -0.062745 +v 0.004078 1.522301 0.068552 +v 0.004027 1.519664 0.071163 +v 0.020495 1.529703 -0.128086 +v -0.072487 1.486189 -0.020399 +v -0.073652 1.488310 -0.021518 +v -0.072785 1.486082 -0.022770 +v -0.072332 1.483812 -0.022359 +v 0.019992 1.516906 -0.126383 +v 0.040609 1.520993 -0.118122 +v 0.040804 1.533198 -0.119411 +v 0.068436 1.488481 -0.068879 +v -0.078096 1.478882 -0.039421 +v -0.075400 1.475700 -0.037100 +v -0.075785 1.475417 -0.036252 +v -0.078507 1.477345 -0.037814 +v 0.072751 1.489694 -0.054914 +v 0.010892 1.495095 0.074194 +v 0.007594 1.499713 0.069461 +v 0.007789 1.496199 0.068918 +v 0.007417 1.498889 0.069265 +v 0.010944 1.492053 0.076099 +v 0.013247 1.491279 0.077394 +v 0.008600 1.492389 0.069127 +v 0.007700 1.492600 0.069700 +v 0.006998 1.496497 0.069211 +v 0.008729 1.500115 0.070107 +v 0.001944 1.488386 0.082348 +v 0.002499 1.489875 0.085125 +v 0.006506 1.490990 0.084097 +v 0.005006 1.489613 0.082093 +v 0.005458 1.499077 0.070745 +v 0.004491 1.496811 0.071589 +v 0.005310 1.494685 0.078792 +v 0.007206 1.498428 0.074823 +v 0.007494 1.500437 0.072081 +v 0.007200 1.495300 0.078000 +v 0.006200 1.489300 0.075200 +v 0.009008 1.495404 0.076405 +v -0.083100 1.518700 -0.036200 +v -0.083855 1.513511 -0.036581 +v -0.082100 1.513505 -0.035117 +v -0.079400 1.482900 -0.027100 +v -0.080206 1.484210 -0.028695 +v -0.081500 1.481100 -0.030500 +v -0.080687 1.479097 -0.029207 +v 0.081106 1.485086 -0.030299 +v 0.055353 1.438827 -0.014305 +v 0.061802 1.452633 -0.021411 +v 0.061800 1.451269 -0.026789 +v -0.082571 1.470333 -0.032585 +v -0.080240 1.469896 -0.033585 +v -0.077900 1.467889 -0.030714 +v -0.080399 1.467916 -0.030093 +v -0.072675 1.482711 -0.021801 +v -0.072327 1.483599 -0.020303 +v -0.083926 1.505195 -0.047073 +v -0.082566 1.511308 -0.044839 +v 0.056475 1.436705 -0.023388 +v 0.080800 1.511900 -0.034700 +v 0.066012 1.558296 0.012561 +v 0.067698 1.568962 0.002878 +v 0.072355 1.557904 -0.008158 +v -0.086087 1.511596 -0.040800 +v -0.086768 1.509089 -0.042098 +v 0.071371 1.547902 -0.002509 +v 0.068379 1.577790 -0.004909 +v 0.073269 1.567598 -0.015308 +v 0.076018 1.556001 -0.024296 +v 0.077911 1.536200 -0.036897 +v 0.042165 1.521251 0.045613 +v 0.042412 1.521112 0.045815 +v 0.040900 1.520800 0.047100 +v -0.074730 1.498708 -0.025229 +v -0.072484 1.499298 -0.026263 +v -0.072424 1.498498 -0.026271 +v -0.075274 1.497098 -0.025153 +v 0.040900 1.520900 0.046900 +v 0.041684 1.521291 0.044982 +v 0.040343 1.520792 0.046058 +v -0.074986 1.498595 -0.024097 +v -0.075514 1.497004 -0.023998 +v -0.075085 1.496797 -0.022906 +v -0.074710 1.497703 -0.022785 +v 0.042542 1.521718 0.044010 +v 0.043095 1.521563 0.044535 +v 0.043365 1.521386 0.044713 +v 0.024933 1.521899 0.048906 +v 0.025588 1.522170 0.048171 +v 0.028191 1.521207 0.048430 +v 0.027547 1.521253 0.049572 +v 0.024720 1.521709 0.049378 +v 0.027503 1.521018 0.050005 +v -0.071792 1.499615 -0.026608 +v -0.072000 1.499300 -0.026400 +v -0.072388 1.499850 -0.026639 +v -0.077294 1.502724 -0.037763 +v -0.078993 1.494880 -0.039129 +v -0.077413 1.494601 -0.037981 +v -0.075700 1.501500 -0.036400 +v 0.028678 1.523900 0.042557 +v 0.028204 1.524236 0.042492 +v -0.089216 1.495402 -0.045388 +v 0.029466 1.523482 0.042681 +v 0.023235 1.523973 0.046674 +v 0.023291 1.524974 0.046778 +v -0.080994 1.521612 -0.028301 +v -0.079582 1.523421 -0.028204 +v -0.082400 1.526875 -0.032516 +v 0.027982 1.525037 0.042609 +v 0.027969 1.524510 0.042490 +v 0.023469 1.523441 0.046801 +v -0.074387 1.485551 -0.030792 +v -0.077111 1.486378 -0.028293 +v -0.075401 1.486174 -0.026902 +v -0.073776 1.485212 -0.029312 +v -0.072203 1.490294 -0.025077 +v -0.070416 1.486997 -0.024430 +v -0.074220 1.488702 -0.023891 +v -0.074537 1.504508 -0.024103 +v -0.073632 1.500887 -0.026200 +v -0.074301 1.499691 -0.025068 +v -0.074473 1.500004 -0.024007 +v -0.090300 1.486200 -0.044700 +v -0.091516 1.491394 -0.047212 +v -0.089989 1.491103 -0.048594 +v -0.088900 1.485600 -0.046100 +v 0.021958 1.523277 0.046966 +v 0.021129 1.523661 0.046653 +v -0.071174 1.473291 -0.018315 +v -0.067815 1.470535 -0.021913 +v -0.069856 1.476490 -0.018061 +v -0.091000 1.522800 -0.044100 +v -0.087700 1.524800 -0.044500 +v -0.088398 1.522717 -0.046819 +v -0.091894 1.520791 -0.046191 +v -0.072913 1.506097 -0.031592 +v -0.073018 1.504001 -0.031087 +v -0.072919 1.505397 -0.029993 +v -0.072693 1.507397 -0.030290 +v 0.021224 1.524972 0.046884 +v 0.032700 1.520100 0.050100 +v 0.032897 1.519399 0.050184 +v 0.028200 1.519704 0.050417 +v 0.027801 1.520500 0.050288 +v 0.032498 1.520485 0.049893 +v 0.035900 1.520600 0.049000 +v 0.035996 1.520197 0.049188 +v 0.036104 1.519501 0.049213 +v -0.053586 1.467412 -0.075433 +v -0.062458 1.468076 -0.057171 +v -0.066552 1.481198 -0.065315 +v -0.058792 1.480269 -0.080652 +v -0.083582 1.473183 -0.031719 +v -0.082493 1.471092 -0.029401 +v -0.075389 1.512005 -0.023502 +v -0.074722 1.513599 -0.022600 +v -0.075513 1.518799 -0.024222 +v -0.076219 1.515923 -0.024158 +v 0.024901 1.520591 0.050179 +v 0.024799 1.521400 0.049815 +v 0.036100 1.518900 0.049200 +v 0.033100 1.518800 0.050100 +v -0.090682 1.506499 -0.052109 +v -0.093415 1.505906 -0.050612 +v 0.028400 1.519100 0.050400 +v 0.024900 1.520200 0.050300 +v 0.022180 1.521429 0.049697 +v 0.023262 1.521579 0.049651 +v 0.033098 1.518102 0.049889 +v 0.033002 1.517298 0.049711 +v 0.028299 1.517601 0.050288 +v 0.028401 1.518399 0.050319 +v -0.077712 1.518490 -0.030205 +v -0.081200 1.521900 -0.033400 +v -0.079308 1.519492 -0.032198 +v -0.077480 1.518012 -0.030407 +v -0.077190 1.509801 -0.027990 +v -0.077694 1.511105 -0.027778 +v -0.076001 1.512096 -0.027915 +v -0.075200 1.510700 -0.027900 +v 0.036100 1.518200 0.048900 +v 0.036000 1.517400 0.048700 +v -0.074370 1.517897 -0.021713 +v -0.075297 1.522474 -0.025117 +v 0.044439 1.521538 0.043123 +v 0.044900 1.522000 0.042200 +v 0.044889 1.521497 0.042391 +v -0.071415 1.475105 -0.036388 +v 0.044613 1.521100 0.042908 +v 0.047300 1.519000 0.040400 +v 0.048300 1.520100 0.039500 +v 0.050100 1.519300 0.038700 +v -0.077115 1.511100 -0.025590 +v -0.077700 1.513600 -0.025600 +v -0.078380 1.512711 -0.027699 +v -0.070513 1.481494 -0.017518 +v -0.071608 1.483198 -0.019075 +v -0.076812 1.517093 -0.030192 +v -0.076143 1.515882 -0.030085 +v -0.074901 1.512051 -0.029070 +v -0.075697 1.510116 -0.030317 +v 0.048907 1.518104 0.039809 +v -0.083394 1.503779 -0.036850 +v -0.085900 1.495120 -0.037669 +v 0.056681 1.509389 0.037056 +v 0.051190 1.515294 0.039686 +v 0.052600 1.516500 0.037900 +v 0.044000 1.521300 0.043900 +v 0.043400 1.521000 0.044800 +v 0.043900 1.521600 0.043800 +v 0.044314 1.521924 0.042872 +v 0.044184 1.520701 0.043691 +v 0.043510 1.520399 0.044707 +v 0.042500 1.520800 0.045900 +v 0.042589 1.520103 0.045890 +v 0.041000 1.519800 0.047200 +v 0.040900 1.520500 0.047200 +v 0.038827 1.520294 0.048207 +v -0.070665 1.482116 -0.051701 +v -0.066781 1.468771 -0.037011 +v 0.039096 1.520691 0.047990 +v 0.038924 1.519591 0.048198 +v 0.041100 1.519200 0.047000 +v 0.039015 1.518991 0.048107 +v 0.042600 1.519500 0.045600 +v 0.043590 1.519902 0.044393 +v 0.044314 1.520298 0.043307 +v 0.044900 1.520700 0.042500 +v 0.045300 1.521300 0.041900 +v 0.044600 1.519800 0.042800 +v 0.045200 1.519100 0.042100 +v 0.044000 1.518600 0.043400 +v 0.043727 1.519374 0.043986 +v 0.045400 1.520300 0.041900 +v 0.046200 1.519700 0.041100 +v -0.074900 1.512500 -0.028100 +v -0.074900 1.512900 -0.028300 +v -0.074300 1.512000 -0.028500 +v -0.074200 1.511400 -0.028200 +v 0.042700 1.519000 0.045200 +v 0.042700 1.518300 0.044800 +v 0.041000 1.517900 0.046300 +v 0.041092 1.518606 0.046690 +v -0.081180 1.469406 -0.027314 +v -0.081392 1.510102 -0.034728 +v -0.078509 1.508234 -0.032619 +v -0.078690 1.510401 -0.032211 +v 0.039015 1.518382 0.047807 +v 0.039024 1.517589 0.047472 +v -0.091593 1.496901 -0.045991 +v -0.092481 1.504997 -0.047033 +v -0.093707 1.505301 -0.048582 +v -0.092700 1.496700 -0.047300 +v 0.032398 1.520499 0.049611 +v 0.032070 1.520462 0.048411 +v 0.035568 1.520277 0.047815 +v 0.035900 1.520600 0.048700 +v 0.039100 1.520700 0.047800 +v -0.069138 1.468847 -0.022896 +v -0.073401 1.503294 -0.031788 +v -0.074390 1.501599 -0.031214 +v 0.038514 1.520381 0.046986 +v 0.035403 1.520177 0.046662 +v 0.033667 1.522493 0.042517 +v 0.035009 1.522519 0.042154 +v 0.038077 1.520272 0.045916 +v -0.073320 1.508785 -0.030771 +v -0.073362 1.510351 -0.029068 +v 0.032250 1.520456 0.047158 +v 0.032125 1.522658 0.042749 +v 0.028946 1.521295 0.047200 +v -0.071539 1.486197 -0.018780 +v 0.030633 1.523025 0.042787 +v 0.026318 1.522169 0.047391 +v 0.033945 1.532789 0.049154 +v 0.033987 1.533023 0.049380 +v 0.038500 1.532400 0.048800 +v 0.038100 1.532100 0.048400 +v 0.051553 1.496317 -0.100345 +v 0.052901 1.510673 -0.105774 +v 0.040195 1.505800 -0.115183 +v 0.040900 1.529700 0.047500 +v 0.040908 1.530602 0.047110 +v 0.043300 1.529300 0.045900 +v 0.043109 1.528498 0.046011 +v 0.040898 1.528786 0.047906 +v -0.087705 1.519915 -0.045409 +v -0.088484 1.518000 -0.046810 +v -0.086125 1.518595 -0.043989 +v -0.085717 1.519801 -0.042398 +v -0.084872 1.521507 -0.039310 +v -0.084900 1.522959 -0.038902 +v -0.086800 1.521600 -0.043400 +v -0.076910 1.509099 -0.031384 +v -0.077488 1.510807 -0.031221 +v 0.042890 1.527512 0.046186 +v 0.038200 1.530000 0.049500 +v -0.078757 1.481692 -0.026054 +v -0.079651 1.475700 -0.026960 +v -0.087515 1.486694 -0.039481 +v -0.087178 1.487520 -0.036411 +v -0.087200 1.496700 -0.049600 +v -0.086626 1.505995 -0.050271 +v -0.085270 1.505500 -0.048424 +v -0.086200 1.496700 -0.048500 +v 0.038200 1.531000 0.049000 +v 0.038100 1.531800 0.048500 +v 0.042119 1.526072 0.042035 +v 0.041362 1.527066 0.042705 +v -0.080890 1.497910 -0.046193 +v -0.077697 1.498497 -0.046178 +v -0.074100 1.498700 -0.022500 +v -0.073200 1.502400 -0.020900 +v -0.073000 1.508100 -0.030600 +v -0.073112 1.509696 -0.028998 +v -0.082101 1.481590 -0.041807 +v -0.077895 1.474733 -0.034283 +v -0.082003 1.479694 -0.039813 +v -0.073816 1.473945 -0.032524 +v -0.073304 1.473187 -0.030203 +v -0.075893 1.473216 -0.031599 +v 0.041916 1.526935 0.044139 +v 0.042956 1.526131 0.042937 +v 0.042757 1.525369 0.041438 +v 0.043602 1.525370 0.042054 +v 0.037542 1.525216 0.039867 +v 0.037198 1.525606 0.040151 +v 0.036660 1.526085 0.040544 +v -0.084062 1.483308 -0.033637 +v -0.085978 1.484417 -0.036390 +v 0.037714 1.524891 0.039697 +v -0.071500 1.499400 -0.027100 +v -0.070090 1.497912 -0.028215 +v -0.070510 1.499177 -0.026601 +v -0.086793 1.502906 -0.038129 +v -0.088239 1.494694 -0.038349 +v -0.077317 1.513394 -0.028185 +v 0.037779 1.524597 0.039632 +v -0.076101 1.503386 -0.027988 +v -0.076739 1.506787 -0.026397 +v -0.080088 1.516707 -0.028296 +v -0.078608 1.515193 -0.028702 +v -0.078171 1.514217 -0.028369 +v -0.079119 1.514587 -0.027733 +v -0.064970 1.489977 -0.026859 +v -0.068207 1.488286 -0.025432 +v -0.069600 1.491400 -0.025400 +v 0.037758 1.524316 0.039661 +v 0.037669 1.524040 0.039811 +v 0.058518 1.493799 0.036810 +v 0.059081 1.504398 0.036285 +v -0.083936 1.522604 -0.037179 +v -0.084376 1.520602 -0.038312 +v -0.083800 1.512300 -0.046400 +v 0.061300 1.496500 0.033300 +v 0.043599 1.521847 0.043556 +v -0.077005 1.514888 -0.032296 +v -0.077607 1.513397 -0.033092 +v 0.044055 1.522221 0.042659 +v -0.077512 1.523697 -0.039600 +v -0.077134 1.522214 -0.036306 +v -0.077024 1.522596 -0.035096 +v -0.077000 1.526200 -0.035400 +v 0.043015 1.522082 0.043129 +v -0.072194 1.470894 -0.027111 +v -0.075000 1.471200 -0.029600 +v -0.074913 1.472222 -0.030080 +v -0.072794 1.472090 -0.028308 +v -0.074614 1.485867 -0.026314 +v -0.073392 1.484991 -0.028510 +v 0.043462 1.522488 0.042348 +v -0.088313 1.487893 -0.042607 +v -0.087791 1.487097 -0.041406 +v -0.088800 1.491100 -0.043700 +v -0.089407 1.492300 -0.044808 +v 0.044553 1.522761 0.041803 +v 0.044927 1.522554 0.041844 +v 0.041056 1.521240 0.044204 +v 0.036673 1.522964 0.041275 +v 0.037083 1.523262 0.040834 +v -0.074023 1.506963 -0.033952 +v -0.073613 1.505802 -0.033402 +v -0.078264 1.516972 -0.037457 +v -0.080300 1.519500 -0.038900 +v -0.078936 1.522279 -0.035468 +v -0.077375 1.520039 -0.035263 +v -0.085184 1.513304 -0.048435 +v 0.041746 1.521873 0.043317 +v 0.039788 1.520688 0.045084 +v 0.035983 1.522699 0.041727 +v 0.037308 1.523509 0.040446 +v 0.037510 1.523762 0.040093 +v -0.077900 1.512000 -0.032500 +v -0.078690 1.512194 -0.033514 +v 0.042057 1.522240 0.042582 +v -0.077290 1.512393 -0.032109 +v 0.042503 1.522667 0.041859 +v 0.043826 1.523087 0.041769 +v -0.074693 1.509798 -0.027983 +v -0.076690 1.508602 -0.028290 +v -0.079600 1.517115 -0.029581 +v -0.083299 1.520672 -0.032822 +v -0.082605 1.520723 -0.032873 +v -0.079196 1.517891 -0.029805 +v 0.043025 1.523723 0.040992 +v 0.043098 1.524246 0.040972 +v 0.044011 1.523655 0.041586 +v -0.075486 1.514507 -0.028998 +v 0.044072 1.524168 0.041487 +v 0.042854 1.523208 0.041263 +v -0.014577 1.465024 -0.102244 +v -0.016400 1.474400 -0.109200 +v 0.043014 1.524789 0.041098 +v 0.043930 1.524704 0.041661 +v 0.045170 1.522128 0.041830 +v 0.045172 1.522916 0.041508 +v 0.045689 1.523909 0.041025 +v 0.045611 1.522137 0.041396 +v -0.077508 1.523187 -0.028108 +v -0.078384 1.524716 -0.032003 +v -0.079493 1.525831 -0.032370 +v -0.078124 1.524043 -0.029015 +v -0.085510 1.518598 -0.041793 +v 0.046307 1.522104 0.040817 +v 0.045900 1.521000 0.041200 +v 0.046700 1.524488 0.040798 +v 0.048114 1.524818 0.040328 +v -0.089900 1.514600 -0.047000 +v -0.075224 1.500049 -0.029836 +v -0.072310 1.497196 -0.029602 +v 0.047605 1.521996 0.040014 +v 0.046900 1.520700 0.040400 +v 0.044824 1.523362 0.041444 +v -0.062100 1.496100 -0.028400 +v -0.062400 1.497300 -0.028400 +v -0.062996 1.496896 -0.029588 +v -0.062700 1.495600 -0.029800 +v 0.044987 1.523980 0.041208 +v 0.046091 1.525512 0.041885 +v -0.065798 1.490998 -0.030077 +v -0.068687 1.489987 -0.028824 +v -0.067795 1.488059 -0.027571 +v -0.065087 1.488901 -0.029508 +v 0.046900 1.526300 0.041900 +v 0.045406 1.524990 0.041911 +v 0.044897 1.525716 0.043091 +v 0.045400 1.526400 0.043200 +v -0.061700 1.493000 -0.028800 +v -0.061900 1.494600 -0.028500 +v -0.062600 1.493904 -0.030024 +v -0.062510 1.492089 -0.030514 +v 0.046000 1.527100 0.043200 +v 0.044489 1.527303 0.044590 +v -0.083251 1.522548 -0.035046 +v -0.085047 1.522467 -0.036875 +v 0.044900 1.528100 0.044600 +v 0.044112 1.526492 0.044515 +v 0.044792 1.524756 0.041778 +v -0.091507 1.513492 -0.045983 +v -0.092000 1.511200 -0.046800 +v -0.088778 1.518688 -0.044296 +v -0.089400 1.516800 -0.045700 +v 0.044314 1.525441 0.042693 +v 0.043559 1.526182 0.043934 +v 0.042506 1.526800 0.045709 +v -0.073810 1.490899 -0.021494 +v -0.074915 1.491096 -0.023006 +v -0.074587 1.489510 -0.022989 +v -0.092426 1.516497 -0.046084 +v -0.092491 1.518401 -0.047705 +v -0.074023 1.473610 -0.019287 +v -0.072900 1.472100 -0.018700 +v -0.040539 1.402694 -0.008784 +v -0.048074 1.407914 -0.020517 +v -0.047013 1.398104 -0.021095 +v -0.039392 1.393104 -0.010006 +v -0.079593 1.519097 -0.027011 +v -0.078310 1.521306 -0.026782 +v 0.040690 1.527910 0.047582 +v 0.039941 1.527889 0.045733 +v 0.038000 1.529200 0.049200 +v -0.061832 1.491418 -0.027890 +v -0.061767 1.490564 -0.029280 +v -0.064532 1.489040 -0.028225 +v -0.066803 1.495395 -0.026325 +v -0.062692 1.496102 -0.026989 +v 0.037203 1.528804 0.047236 +v 0.034542 1.533709 0.050445 +v 0.039499 1.533215 0.049975 +v 0.036807 1.528788 0.045465 +v 0.034347 1.526891 0.041805 +v 0.032772 1.526987 0.042325 +v 0.033546 1.528876 0.046426 +v -0.066004 1.492689 -0.029996 +v 0.023838 1.525682 0.047159 +v 0.024615 1.526505 0.047491 +v -0.066600 1.497700 -0.029000 +v -0.066899 1.498785 -0.027594 +v -0.069870 1.484581 -0.025568 +v -0.071628 1.484108 -0.027495 +v -0.073099 1.484030 -0.024480 +v -0.062493 1.490133 -0.030449 +v -0.073605 1.507995 -0.032192 +v 0.028529 1.525780 0.042811 +v 0.028182 1.525373 0.042703 +v 0.022456 1.525747 0.047856 +v 0.023742 1.526545 0.048365 +v -0.069294 1.493493 -0.029380 +v -0.072510 1.491006 -0.030991 +v -0.072108 1.488909 -0.030290 +v -0.069088 1.491598 -0.029288 +v -0.082186 1.480509 -0.032108 +v -0.082500 1.482389 -0.031806 +v -0.075207 1.486410 -0.032571 +v -0.078600 1.486500 -0.030726 +v -0.074610 1.503003 -0.030429 +v -0.075710 1.506076 -0.029052 +v -0.072018 1.499535 -0.027245 +v -0.069600 1.495600 -0.029000 +v -0.072429 1.493400 -0.030779 +v -0.066996 1.496709 -0.026563 +v -0.070701 1.496697 -0.025915 +v -0.070496 1.497807 -0.026089 +v -0.067002 1.497703 -0.026813 +v -0.080210 1.513496 -0.034376 +v -0.077161 1.519075 -0.025362 +v -0.078600 1.516400 -0.026100 +v -0.078956 1.526993 -0.051414 +v -0.077945 1.518913 -0.056083 +v -0.077665 1.520494 -0.043294 +v -0.066104 1.494500 -0.029818 +v -0.063008 1.498217 -0.028195 +v -0.063488 1.497694 -0.027293 +v -0.087700 1.483800 -0.040300 +v -0.073996 1.483585 -0.024303 +v -0.073597 1.507916 -0.028454 +v -0.087091 1.491399 -0.048005 +v -0.086200 1.486200 -0.045800 +v -0.073000 1.509200 -0.028900 +v -0.073613 1.510299 -0.028418 +v -0.072679 1.484436 -0.028099 +v -0.074011 1.484984 -0.025493 +v -0.073800 1.510700 -0.028300 +v -0.074759 1.490612 -0.024153 +v -0.085521 1.513187 -0.039739 +v 0.027850 1.528251 0.047514 +v 0.029964 1.526652 0.042905 +v -0.078886 1.472181 -0.034028 +v -0.076900 1.470700 -0.031400 +v 0.029085 1.526235 0.042905 +v -0.063107 1.497087 -0.027034 +v 0.026190 1.527374 0.047403 +v -0.067048 1.488263 -0.026903 +v -0.066401 1.496204 -0.029511 +v 0.027281 1.528690 0.049325 +v 0.025103 1.527452 0.048740 +v 0.033744 1.529320 0.048332 +v -0.073083 1.494401 -0.025488 +v -0.072718 1.492295 -0.025142 +v -0.075021 1.492793 -0.024235 +v 0.021800 1.564300 -0.125112 +v 0.021390 1.549196 -0.128268 +v -0.080194 1.487032 -0.032232 +v -0.076200 1.488129 -0.034638 +v -0.077499 1.489024 -0.036690 +v -0.082102 1.487860 -0.033716 +v -0.084805 1.481507 -0.043287 +v -0.083016 1.477319 -0.039971 +v -0.085043 1.475936 -0.039699 +v -0.087297 1.480394 -0.043209 +v 0.041100 1.551200 -0.119600 +v 0.041000 1.564300 -0.117100 +v -0.079918 1.541308 -0.059991 +v -0.078910 1.532799 -0.044899 +v -0.080121 1.548101 -0.049100 +v -0.083314 1.521389 -0.044086 +v -0.083806 1.518997 -0.045690 +v -0.088991 1.520200 -0.048605 +v -0.076340 1.525030 -0.030066 +v -0.076931 1.522731 -0.033547 +v -0.077407 1.523933 -0.031651 +v -0.090305 1.496596 -0.050608 +v 0.039290 1.528051 0.044190 +v 0.035655 1.526552 0.041178 +v 0.051007 1.565829 -0.109698 +v 0.050153 1.574170 -0.107041 +v 0.041409 1.575703 -0.112304 +v 0.037050 1.439676 -0.079925 +v 0.026375 1.429290 -0.086093 +v -0.074900 1.508300 -0.023500 +v -0.076811 1.508900 -0.025803 +v 0.020443 1.525555 0.049023 +v -0.081302 1.482541 -0.042839 +v 0.022090 1.526874 0.049265 +v -0.082741 1.485784 -0.031869 +v -0.085088 1.486284 -0.033841 +v -0.088312 1.522894 -0.037393 +v -0.085094 1.521385 -0.034799 +v 0.019381 1.524425 0.048883 +v 0.018804 1.524338 0.049175 +v 0.019820 1.525679 0.049223 +v 0.021236 1.527039 0.049389 +v 0.024082 1.528063 0.049571 +v 0.023449 1.528355 0.049996 +v 0.026775 1.529813 0.050788 +v 0.026860 1.529186 0.050504 +v 0.034030 1.529944 0.050131 +v 0.034105 1.530682 0.050431 +v 0.026377 1.530509 0.050284 +v 0.022991 1.528811 0.049615 +v 0.025891 1.531275 0.049667 +v 0.022800 1.529700 0.049200 +v 0.033943 1.532420 0.049431 +v 0.034105 1.531621 0.049993 +v 0.022009 1.522235 0.049146 +v 0.019888 1.522635 0.049121 +v 0.022491 1.522472 0.048858 +v 0.020169 1.522855 0.048732 +v 0.023596 1.522765 0.047753 +v 0.019510 1.523334 0.048695 +v 0.018853 1.523205 0.049074 +v -0.008841 1.394244 0.015789 +v -0.008800 1.386900 0.011600 +v 0.024390 1.522876 0.047222 +v 0.019200 1.525800 0.049100 +v 0.018016 1.524253 0.049349 +v 0.018201 1.526310 0.048651 +v 0.017395 1.524492 0.049158 +v 0.020237 1.528365 0.048593 +v 0.020635 1.527357 0.048964 +v 0.017989 1.523129 0.049200 +v 0.018475 1.522629 0.049200 +v 0.017396 1.523127 0.049280 +v 0.017900 1.522496 0.049287 +v 0.024700 1.519600 0.050400 +v 0.024500 1.519000 0.050400 +v 0.021512 1.520977 0.049893 +v 0.021022 1.520599 0.049909 +v 0.032795 1.516401 0.049585 +v 0.035800 1.516500 0.048500 +v 0.032503 1.515300 0.049614 +v -0.008589 1.498484 0.073786 +v -0.010030 1.498324 0.072305 +v -0.008757 1.500242 0.071290 +v -0.008117 1.500249 0.072013 +v 0.035693 1.515294 0.048482 +v 0.027697 1.515798 0.050481 +v 0.028102 1.516702 0.050317 +v 0.029412 1.387797 -0.001384 +v 0.046000 1.518200 0.041600 +v 0.047200 1.517100 0.041300 +v 0.045392 1.516396 0.042790 +v 0.044509 1.517704 0.042911 +v 0.042900 1.517300 0.044400 +v -0.008946 1.489300 0.072430 +v -0.008334 1.490922 0.071314 +v -0.009315 1.490643 0.070347 +v -0.010188 1.489265 0.071512 +v 0.043309 1.515909 0.044414 +v 0.041000 1.515600 0.046000 +v 0.040900 1.516900 0.046000 +v 0.038919 1.516689 0.047177 +v 0.038722 1.515306 0.047242 +v 0.049300 1.521500 0.038900 +v 0.050591 1.524603 0.038686 +v 0.051200 1.520900 0.037800 +v 0.052905 1.524289 0.037006 +v 0.016797 1.527194 0.048968 +v -0.005681 1.499910 0.071178 +v -0.004213 1.497290 0.073791 +v -0.005785 1.498187 0.075196 +v -0.006796 1.500441 0.071916 +v 0.016432 1.524904 0.049054 +v 0.015300 1.527900 0.050300 +v 0.015200 1.525300 0.050100 +v 0.016200 1.530800 0.050800 +v 0.018103 1.529886 0.049322 +v 0.016702 1.523141 0.049381 +v 0.017301 1.522304 0.049412 +v 0.015612 1.523203 0.049817 +v 0.016513 1.521817 0.049816 +v 0.024099 1.518299 0.050487 +v 0.023694 1.517490 0.050592 +v 0.020550 1.520111 0.050065 +v 0.019827 1.519426 0.050302 +v 0.049954 1.486090 -0.095874 +v 0.053460 1.525713 -0.108355 +v 0.047153 1.537083 0.048674 +v 0.044799 1.534344 0.048966 +v 0.041146 1.535635 0.050758 +v 0.043109 1.539081 0.051229 +v 0.050240 1.540841 0.047395 +v 0.052685 1.537388 0.044750 +v 0.049920 1.534711 0.046178 +v 0.047487 1.532583 0.046972 +v 0.051300 1.527400 0.039700 +v 0.049306 1.527394 0.041107 +v 0.049400 1.529100 0.042600 +v 0.051400 1.529700 0.041500 +v 0.053700 1.527400 0.038000 +v 0.054100 1.530500 0.039900 +v 0.050911 1.531995 0.043813 +v 0.048706 1.530695 0.044709 +v 0.053767 1.533613 0.042372 +v 0.023810 1.535193 0.052318 +v 0.024621 1.533474 0.050880 +v 0.021502 1.531689 0.049858 +v 0.020094 1.532606 0.050483 +v 0.022472 1.536856 0.053894 +v 0.017898 1.533692 0.051907 +v 0.036404 1.539832 0.053434 +v 0.035572 1.536604 0.052091 +v 0.025594 1.531753 0.049356 +v 0.023502 1.530697 0.049184 +v 0.025447 1.531989 0.049401 +v 0.022900 1.530600 0.049100 +v 0.058944 1.585628 -0.090242 +v 0.061100 1.592200 -0.078300 +v 0.044568 1.602329 -0.084574 +v 0.043693 1.594392 -0.096197 +v 0.041000 1.531000 0.047100 +v 0.041600 1.531400 0.047600 +v 0.044100 1.530100 0.046400 +v 0.043500 1.529700 0.046000 +v 0.045300 1.528600 0.044700 +v 0.045998 1.529012 0.044988 +v 0.047100 1.528000 0.043400 +v 0.046494 1.527712 0.043387 +v 0.047100 1.527000 0.042300 +v 0.047706 1.526995 0.042008 +v 0.023200 1.531500 0.049500 +v 0.025342 1.532353 0.049786 +v 0.052443 1.554395 -0.110733 +v 0.045436 1.574694 0.038505 +v 0.052333 1.571092 0.032286 +v 0.050200 1.565000 0.037900 +v 0.042799 1.532208 0.048590 +v 0.045400 1.530900 0.047000 +v 0.047100 1.529700 0.045100 +v 0.048000 1.528600 0.043300 +v 0.048394 1.527705 0.042091 +v 0.071726 1.489597 -0.018397 +v 0.072300 1.489600 -0.019800 +v 0.082403 1.497500 -0.046021 +v 0.080609 1.503982 -0.045312 +v 0.079052 1.507506 -0.044008 +v 0.078100 1.518500 -0.029900 +v 0.081582 1.521655 -0.032839 +v 0.085888 1.480697 -0.037210 +v 0.082727 1.479003 -0.033181 +v -0.088400 1.525800 -0.038200 +v 0.085882 1.481597 -0.037615 +v 0.089000 1.517700 -0.043700 +v 0.089700 1.515900 -0.045000 +v 0.090689 1.515510 -0.044722 +v 0.042993 1.466469 -0.086739 +v 0.047760 1.478256 -0.092516 +v 0.090400 1.513800 -0.046200 +v 0.074300 1.494200 -0.021900 +v 0.083479 1.497097 -0.046117 +v 0.085217 1.496703 -0.047579 +v 0.086211 1.491703 -0.047289 +v 0.085503 1.492007 -0.046781 +v 0.085486 1.486796 -0.045215 +v 0.084991 1.487411 -0.044990 +v 0.087400 1.484200 -0.040300 +v 0.088704 1.487908 -0.042783 +v 0.082840 1.522084 -0.034142 +v 0.082112 1.521223 -0.032776 +v 0.083570 1.521955 -0.034436 +v 0.075177 1.493002 -0.023111 +v 0.076500 1.516600 -0.029500 +v 0.075900 1.514300 -0.028300 +v 0.075700 1.514700 -0.028700 +v 0.071800 1.498104 -0.026083 +v 0.071802 1.498996 -0.026313 +v 0.072996 1.496503 -0.026061 +v 0.075410 1.495001 -0.024819 +v 0.069598 1.487587 -0.024692 +v 0.069300 1.486401 -0.026424 +v 0.069687 1.485404 -0.026018 +v 0.085515 1.527480 -0.038589 +v 0.075999 1.513302 -0.027984 +v 0.077199 1.514400 -0.028210 +v 0.077500 1.515700 -0.028600 +v 0.084313 1.521518 -0.034793 +v 0.077793 1.516688 -0.028928 +v 0.077100 1.516600 -0.028900 +v 0.078695 1.518295 -0.029809 +v 0.076700 1.516600 -0.029100 +v 0.077487 1.491398 -0.037513 +v 0.076038 1.491207 -0.035474 +v 0.073200 1.509900 -0.019400 +v 0.073700 1.508000 -0.021200 +v 0.081613 1.478503 -0.031092 +v 0.077071 1.514610 -0.039608 +v 0.076983 1.517291 -0.040993 +v 0.089989 1.492402 -0.044791 +v 0.083719 1.487422 -0.033473 +v 0.081400 1.486758 -0.031819 +v 0.085799 1.488493 -0.035214 +v 0.087406 1.491007 -0.036987 +v 0.073275 1.507102 -0.031910 +v 0.084591 1.511302 -0.037217 +v 0.085725 1.509009 -0.038205 +v 0.082810 1.511499 -0.035689 +v 0.077116 1.520902 -0.039194 +v 0.076866 1.521086 -0.036895 +v 0.077427 1.516146 -0.037671 +v 0.076733 1.519477 -0.036354 +v 0.071294 1.491000 -0.025011 +v 0.071539 1.485011 -0.028169 +v 0.071376 1.487003 -0.028816 +v 0.072800 1.485000 -0.029000 +v 0.073029 1.485996 -0.030176 +v 0.073697 1.487491 -0.031606 +v 0.074400 1.489400 -0.032700 +v 0.074445 1.491799 -0.033137 +v 0.070700 1.495100 -0.025700 +v 0.076554 1.521809 -0.026878 +v 0.070296 1.493304 -0.025475 +v 0.072097 1.496697 -0.025914 +v 0.072201 1.494908 -0.025566 +v 0.071902 1.492995 -0.025121 +v 0.077900 1.484000 -0.026400 +v 0.078789 1.485596 -0.028414 +v 0.076283 1.484669 -0.025833 +v 0.079808 1.486114 -0.030284 +v 0.074909 1.484910 -0.025487 +v 0.077240 1.515882 -0.038378 +v 0.076800 1.519100 -0.038300 +v 0.071770 1.493003 -0.016807 +v 0.063703 1.501321 0.029000 +v 0.065291 1.510391 0.023890 +v 0.026059 1.454896 0.049004 +v 0.025298 1.451791 0.049406 +v 0.054800 1.491500 0.040500 +v 0.049968 1.489449 0.045159 +v 0.025148 1.460346 0.061133 +v 0.024435 1.459621 0.061337 +v 0.023522 1.460187 0.061418 +v 0.024249 1.460520 0.061081 +v 0.022000 1.460772 0.059500 +v 0.022800 1.460707 0.058800 +v 0.023014 1.460696 0.059746 +v 0.022313 1.460759 0.060425 +v 0.028109 1.475652 0.062034 +v 0.031378 1.479582 0.059952 +v 0.034158 1.475322 0.058856 +v 0.030949 1.471935 0.060987 +v 0.024580 1.459509 0.051432 +v 0.024307 1.461110 0.051921 +v 0.023228 1.460994 0.055868 +v 0.023532 1.460361 0.055415 +v 0.024417 1.456932 0.051523 +v 0.023415 1.459081 0.054968 +v 0.026131 1.458845 0.048941 +v 0.025595 1.461351 0.049316 +v 0.033679 1.454181 0.057135 +v 0.030493 1.456211 0.059276 +v 0.031308 1.459310 0.059370 +v 0.034660 1.458692 0.057534 +v 0.025183 1.486692 0.060752 +v 0.028504 1.483099 0.060419 +v 0.025503 1.479201 0.062212 +v 0.022102 1.483306 0.062428 +v 0.018718 1.488020 0.063334 +v 0.021581 1.490552 0.061358 +v 0.017491 1.495490 0.066471 +v 0.017785 1.491959 0.067693 +v 0.017219 1.491111 0.070903 +v 0.017625 1.493108 0.069690 +v 0.015426 1.487303 0.069410 +v 0.017764 1.489813 0.066272 +v 0.011460 1.486297 0.069954 +v -0.012342 1.488981 0.072095 +v -0.011326 1.490885 0.070919 +v -0.012050 1.491341 0.072767 +v -0.013716 1.489773 0.073790 +v 0.011262 1.487086 0.070809 +v 0.015505 1.486658 0.068092 +v -0.004958 1.489999 0.080211 +v -0.006080 1.492103 0.080389 +v -0.004980 1.491307 0.078099 +v -0.004935 1.489324 0.077714 +v 0.015732 1.485856 0.066517 +v 0.011732 1.485545 0.068955 +v 0.010705 1.488275 0.071582 +v 0.013514 1.488048 0.071692 +v -0.007888 1.492561 0.079779 +v -0.009808 1.492021 0.081514 +v -0.011583 1.491985 0.079889 +v -0.009400 1.492428 0.078300 +v -0.004716 1.493621 0.073204 +v -0.006063 1.491670 0.074584 +v -0.003964 1.493789 0.076796 +v -0.012266 1.493714 0.070902 +v -0.010928 1.492691 0.069478 +v -0.009809 1.496498 0.068938 +v -0.011062 1.497282 0.069812 +v 0.015121 1.489854 0.073173 +v 0.004381 1.488908 0.079905 +v 0.005639 1.490149 0.081872 +v 0.010589 1.518080 0.064760 +v 0.013675 1.514833 0.061918 +v 0.010631 1.515723 0.067072 +v 0.013514 1.512714 0.064157 +v 0.014372 1.491968 0.076867 +v 0.015789 1.493572 0.075165 +v 0.011991 1.493152 0.080609 +v 0.013201 1.495495 0.080068 +v 0.003688 1.531722 0.062648 +v 0.002148 1.485462 0.075849 +v 0.002408 1.484602 0.075445 +v 0.001814 1.486575 0.077679 +v 0.022510 1.586616 -0.113415 +v 0.023000 1.599213 -0.102898 +v 0.022139 1.613460 -0.081347 +v 0.019834 1.618165 -0.069601 +v 0.005695 1.468502 0.076511 +v 0.005296 1.470999 0.076289 +v 0.012858 1.464687 0.059653 +v 0.007373 1.464960 0.061179 +v 0.006975 1.463108 0.065761 +v 0.012269 1.462964 0.064467 +v 0.016278 1.462759 0.062362 +v 0.017223 1.464311 0.057602 +v 0.013211 1.465957 0.057083 +v 0.007475 1.466294 0.058483 +v 0.011800 1.461943 0.068868 +v 0.006582 1.462229 0.070956 +v 0.006449 1.463859 0.073683 +v 0.011854 1.463360 0.071100 +v 0.018283 1.481390 0.065162 +v 0.014145 1.482119 0.067466 +v 0.009947 1.482343 0.070802 +v 0.006663 1.482219 0.073355 +v 0.006070 1.484310 0.073705 +v 0.008634 1.484916 0.071533 +v 0.005846 1.485250 0.074051 +v 0.008425 1.485773 0.072107 +v 0.008365 1.486729 0.072615 +v 0.008909 1.488251 0.072763 +v 0.007010 1.488079 0.074018 +v 0.007500 1.489300 0.073600 +v 0.005805 1.486238 0.074278 +v 0.004053 1.510006 0.078946 +v 0.003955 1.513584 0.076221 +v 0.009855 1.472515 0.074138 +v 0.005009 1.472999 0.075408 +v 0.004315 1.476815 0.074624 +v 0.008666 1.476809 0.073279 +v 0.013211 1.476061 0.071006 +v 0.014910 1.470831 0.071677 +v 0.002734 1.483714 0.075107 +v 0.010300 1.470400 0.074600 +v 0.010703 1.467995 0.074513 +v 0.015502 1.468576 0.071979 +v 0.015800 1.466400 0.071600 +v 0.019602 1.468322 0.068491 +v 0.018439 1.474099 0.068134 +v 0.023987 1.472042 0.064669 +v 0.023470 1.465647 0.065227 +v 0.031113 1.462468 0.060031 +v 0.026531 1.460082 0.060975 +v 0.026577 1.461421 0.061328 +v 0.026234 1.462618 0.062158 +v 0.030104 1.465556 0.061112 +v 0.034404 1.463254 0.058475 +v 0.033149 1.467775 0.059705 +v 0.027971 1.468600 0.062562 +v 0.025395 1.463939 0.063270 +v 0.025250 1.461104 0.061542 +v 0.024309 1.460885 0.061344 +v 0.024976 1.461816 0.062298 +v 0.024034 1.461246 0.062012 +v 0.024386 1.462489 0.063348 +v 0.023558 1.461583 0.062874 +v 0.022612 1.462197 0.064256 +v 0.023177 1.463590 0.064981 +v 0.022133 1.460920 0.059768 +v 0.021168 1.460992 0.060855 +v 0.021285 1.460964 0.062165 +v 0.022204 1.460838 0.060987 +v 0.022757 1.460837 0.059067 +v 0.022727 1.460779 0.060310 +v 0.022731 1.461571 0.056693 +v 0.021763 1.461781 0.057508 +v -0.045276 1.418919 -0.006497 +v -0.051316 1.421097 -0.020493 +v 0.023289 1.460690 0.058334 +v 0.023330 1.460718 0.059396 +v 0.023394 1.460716 0.060836 +v 0.023251 1.460914 0.061284 +v 0.022758 1.461069 0.062036 +v 0.021882 1.461334 0.063176 +v 0.015798 1.464470 0.070396 +v 0.019424 1.464378 0.068200 +v 0.019113 1.462802 0.066769 +v 0.015646 1.462491 0.068328 +v 0.018697 1.461683 0.065172 +v 0.017785 1.465453 0.055196 +v 0.015373 1.461586 0.066485 +v 0.020861 1.463425 0.055138 +v 0.021815 1.464456 0.052646 +v 0.019799 1.462183 0.059397 +v 0.023776 1.462261 0.052914 +v 0.022746 1.462430 0.053473 +v 0.024706 1.463135 0.050424 +v 0.023488 1.463681 0.051401 +v 0.018585 1.461130 0.063357 +v 0.019687 1.466161 0.068684 +v 0.011193 1.465708 0.073182 +v 0.006008 1.466276 0.075328 +v -0.034601 1.509504 0.051197 +v -0.033968 1.506854 0.052483 +v -0.030002 1.508004 0.053112 +v -0.030801 1.510322 0.051979 +v 0.004764 1.502917 0.085468 +v 0.004430 1.506626 0.081984 +v 0.022824 1.460592 0.061246 +v 0.009000 1.380900 0.008200 +v -0.075600 1.495000 -0.023600 +v -0.074983 1.495799 -0.022522 +v -0.084383 1.516205 -0.047117 +v 0.025820 1.458897 0.061092 +v 0.023838 1.454565 0.051597 +v 0.022628 1.458084 0.055319 +v 0.030940 1.449785 0.057855 +v 0.026597 1.445592 0.060153 +v 0.024565 1.449667 0.061995 +v 0.028277 1.452952 0.060029 +v 0.021911 1.455732 0.064216 +v 0.024465 1.457488 0.062231 +v -0.087769 1.520281 -0.042209 +v 0.023209 1.458786 0.062690 +v 0.020872 1.457806 0.064774 +v 0.019961 1.459373 0.065056 +v 0.022233 1.459817 0.062668 +v 0.017872 1.461109 0.063406 +v 0.013488 1.461506 0.066143 +v 0.012943 1.457844 0.061366 +v 0.017612 1.457529 0.058998 +v 0.021527 1.457717 0.056493 +v 0.021070 1.460876 0.060391 +v -0.082911 1.523192 -0.041995 +v -0.081484 1.525319 -0.037312 +v -0.080076 1.523708 -0.036314 +v -0.081315 1.521297 -0.040291 +v 0.018523 1.461113 0.064244 +v 0.021342 1.460835 0.061409 +v 0.013977 1.461497 0.067282 +v 0.018900 1.383500 0.004700 +v 0.021710 1.460566 0.062261 +v 0.019048 1.460695 0.064671 +v 0.014749 1.460724 0.068045 +v 0.015854 1.458903 0.068665 +v 0.016792 1.456693 0.068793 +v 0.017461 1.453899 0.067856 +v 0.019149 1.446712 0.065034 +v 0.020456 1.441594 0.063824 +v 0.013229 1.444892 0.067749 +v 0.014176 1.439699 0.067357 +v 0.012333 1.452486 0.071105 +v 0.012051 1.456002 0.072665 +v 0.011198 1.458638 0.072223 +v 0.010410 1.460704 0.071129 +v 0.003264 1.481942 0.074712 +v 0.005689 1.488011 0.075291 +v 0.004510 1.488195 0.077505 +v 0.007868 1.491520 0.082722 +v 0.001736 1.487415 0.079846 +v 0.010492 1.492952 0.082279 +v 0.008765 1.492452 0.083652 +v 0.009522 1.493885 0.084514 +v 0.010810 1.494494 0.083310 +v 0.007182 1.491909 0.084782 +v 0.008500 1.493700 0.085400 +v 0.002902 1.491075 0.086336 +v 0.003721 1.494335 0.088102 +v 0.004650 1.499551 0.087718 +v 0.004111 1.496344 0.088521 +v 0.009882 1.495499 0.084882 +v 0.011411 1.497274 0.083159 +v -0.090800 1.520100 -0.042800 +v 0.011743 1.500268 0.080871 +v 0.013890 1.498099 0.077459 +v 0.010602 1.625284 -0.027469 +v 0.009656 1.461739 0.070112 +v 0.008742 1.461758 0.068290 +v -0.075382 1.482188 -0.024203 +v -0.084040 1.509298 -0.036335 +v 0.012774 1.447697 0.054662 +v 0.012805 1.452064 0.057064 +v 0.008059 1.451891 0.058520 +v -0.080484 1.525822 -0.034804 +v -0.083919 1.527606 -0.035488 +v 0.008007 1.447711 0.056270 +v 0.004812 1.461758 0.069092 +v -0.090591 1.497096 -0.045921 +v -0.091688 1.518505 -0.044615 +v -0.089812 1.517295 -0.043486 +v -0.081321 1.518998 -0.029088 +v -0.078895 1.491603 -0.038661 +v -0.085402 1.491588 -0.036554 +v -0.084007 1.489086 -0.035109 +v -0.078399 1.489817 -0.037774 +v -0.073444 1.504694 -0.032820 +v 0.004543 1.458037 0.063816 +v 0.008262 1.458084 0.063082 +v 0.005072 1.461835 0.071297 +v 0.005589 1.460631 0.072613 +v 0.006193 1.458494 0.073986 +v 0.004333 1.451771 0.059202 +v 0.004401 1.447435 0.056949 +v -0.071111 1.486897 -0.016909 +v 0.022224 1.453109 0.052652 +v -0.081578 1.475997 -0.030012 +v -0.082714 1.477004 -0.032189 +v -0.031157 1.465431 -0.096619 +v -0.034883 1.475714 -0.102887 +v -0.092400 1.496600 -0.049200 +v 0.022834 1.449420 0.050597 +v 0.017680 1.452071 0.055120 +v 0.018045 1.448223 0.052946 +v 0.007817 1.439111 0.069698 +v 0.007074 1.444141 0.069216 +v 0.006581 1.455345 0.074334 +v 0.006668 1.451550 0.072522 +v 0.022923 1.607438 -0.092701 +v 0.062856 1.576001 0.009824 +v 0.061268 1.569757 0.017690 +v -0.083202 1.522130 -0.035985 +v 0.055147 1.577388 0.024227 +v 0.056734 1.582384 0.017099 +v 0.063470 1.583142 0.001913 +v 0.057200 1.588300 0.009600 +v 0.040530 1.616390 -0.045728 +v 0.041088 1.616383 -0.031901 +v 0.021913 1.622737 -0.028613 +v 0.035329 1.615763 -0.059197 +v 0.047797 1.607383 -0.067962 +v 0.050017 1.610884 -0.049176 +v 0.049914 1.611324 -0.034400 +v -0.077509 1.481801 -0.025290 +v -0.077217 1.476111 -0.024483 +v -0.075527 1.476613 -0.022791 +v -0.078213 1.512608 -0.033384 +v 0.025319 1.417296 -0.086431 +v 0.053341 1.537593 -0.109735 +v 0.048594 1.610887 -0.021003 +v 0.040400 1.615200 -0.018500 +v 0.039869 1.612797 -0.006457 +v 0.047253 1.609196 -0.009635 +v 0.021703 1.620912 -0.014487 +v 0.010484 1.623294 -0.012843 +v 0.019518 1.501968 -0.122785 +v 0.003912 1.516946 0.073584 +v 0.049036 1.451832 -0.071080 +v 0.058265 1.451117 -0.053166 +v 0.051200 1.402600 -0.033800 +v 0.072990 1.474329 -0.033592 +v 0.072822 1.474732 -0.034523 +v 0.057824 1.437788 -0.035288 +v 0.061932 1.450619 -0.035689 +v 0.039174 1.452431 -0.082310 +v 0.082400 1.485383 -0.044422 +v 0.082903 1.489094 -0.045618 +v 0.083405 1.484800 -0.043789 +v 0.084115 1.488697 -0.045292 +v 0.083998 1.492895 -0.046032 +v 0.083879 1.483300 -0.042725 +v 0.076924 1.521442 -0.034424 +v 0.078187 1.523278 -0.033695 +v -0.080792 1.483473 -0.043245 +v -0.081006 1.485539 -0.044522 +v 0.072100 1.473090 -0.029303 +v 0.072098 1.473611 -0.030997 +v -0.063300 1.497600 -0.029200 +v 0.071500 1.473300 -0.030200 +v 0.071200 1.472900 -0.028900 +v 0.071696 1.472309 -0.027792 +v 0.071001 1.472380 -0.027818 +v 0.076624 1.536198 -0.028498 +v 0.070000 1.472400 -0.031600 +v 0.072088 1.474257 -0.034077 +v 0.071908 1.473871 -0.032409 +v 0.071000 1.473200 -0.031100 +v 0.070412 1.472386 -0.029103 +v 0.070191 1.471713 -0.027395 +v 0.071000 1.471300 -0.026800 +v -0.082300 1.478600 -0.039700 +v -0.078492 1.473495 -0.034110 +v -0.054580 1.424205 -0.033907 +v -0.053617 1.426598 -0.048816 +v -0.052512 1.415702 -0.047314 +v -0.052297 1.412202 -0.033512 +v -0.072400 1.500500 -0.017900 +v -0.072720 1.493501 -0.019884 +v -0.072078 1.492403 -0.018405 +v 0.084111 1.482503 -0.042789 +v 0.076500 1.472200 -0.031600 +v -0.022133 1.402373 0.012551 +v -0.018991 1.390401 0.007792 +v 0.079079 1.513709 -0.040420 +v 0.079508 1.511895 -0.041894 +v 0.081688 1.513702 -0.043406 +v 0.081123 1.515993 -0.041982 +v 0.060712 1.487981 -0.084226 +v 0.062551 1.497993 -0.088895 +v 0.012694 1.418793 -0.090986 +v 0.012984 1.431500 -0.091585 +v 0.076230 1.530994 -0.074442 +v 0.078600 1.515400 -0.038900 +v 0.080687 1.517803 -0.040507 +v 0.082899 1.493300 -0.046163 +v 0.077782 1.514596 -0.038896 +v 0.078304 1.512904 -0.040408 +v 0.081700 1.519700 -0.042100 +v 0.082200 1.517500 -0.043700 +v 0.082816 1.515000 -0.045088 +v 0.078788 1.510996 -0.041897 +v -0.071080 1.487414 -0.013506 +v -0.070164 1.486779 -0.003273 +v -0.071773 1.502108 -0.003201 +v -0.072151 1.496898 -0.012896 +v 0.077321 1.519308 -0.041011 +v 0.080072 1.561101 -0.051900 +v 0.079500 1.554700 -0.065300 +v 0.077000 1.577000 -0.055600 +v 0.076100 1.571400 -0.070200 +v 0.071739 1.564758 -0.085415 +v 0.075100 1.546934 -0.080535 +v 0.076756 1.521093 -0.035588 +v 0.079218 1.524487 -0.034190 +v 0.064046 1.588845 -0.005609 +v 0.069100 1.583700 -0.012500 +v 0.057334 1.593424 0.002717 +v 0.057190 1.597993 -0.003706 +v 0.063487 1.594294 -0.012304 +v 0.070644 1.587414 -0.023990 +v 0.078576 1.553401 -0.037404 +v 0.078229 1.566908 -0.038395 +v 0.075235 1.571187 -0.025413 +v -0.059961 1.533198 0.030682 +v -0.061666 1.533495 0.026588 +v -0.061933 1.524304 0.026724 +v -0.059859 1.525661 0.030102 +v 0.074960 1.582288 -0.040605 +v 0.066600 1.595600 -0.030100 +v 0.070333 1.591526 -0.044305 +v 0.059609 1.603311 -0.033797 +v 0.061738 1.601056 -0.048113 +v 0.061583 1.597869 -0.063692 +v 0.071969 1.587274 -0.059096 +v 0.061700 1.599100 -0.018200 +v 0.055613 1.602114 -0.008857 +v 0.056700 1.604800 -0.021500 +v 0.053364 1.605595 -0.013183 +v 0.073321 1.572998 -0.018597 +v 0.065872 1.467058 -0.027795 +v 0.066067 1.467582 -0.022798 +v -0.020105 1.589077 0.046472 +v -0.009713 1.590238 0.048897 +v -0.009944 1.597050 0.042825 +v -0.020552 1.595691 0.040151 +v 0.069850 1.479001 -0.015004 +v -0.036783 1.483221 -0.106562 +v -0.038502 1.492891 -0.110712 +v -0.018483 1.490152 -0.117701 +v -0.017417 1.481271 -0.113053 +v 0.067976 1.503604 0.017668 +v 0.068022 1.515690 0.015397 +v 0.070799 1.507799 0.005481 +v 0.069552 1.493504 0.006914 +v -0.020717 1.610835 0.018436 +v -0.010055 1.612720 0.020935 +v -0.010178 1.617029 0.011196 +v -0.021004 1.614887 0.009079 +v -0.051849 1.506984 0.042450 +v -0.053308 1.500902 0.042309 +v -0.049500 1.500600 0.045600 +v -0.048611 1.506682 0.045109 +v 0.066865 1.528807 0.013619 +v 0.070014 1.521399 0.003997 +v 0.069755 1.535201 0.001517 +v 0.066276 1.542800 0.012494 +v 0.063123 1.558544 -0.099907 +v 0.065392 1.542129 -0.097712 +v -0.077083 1.471595 -0.021914 +v -0.075873 1.472489 -0.020821 +v 0.065894 1.530249 -0.096000 +v 0.048710 1.559533 0.042020 +v -0.010363 1.620560 -0.000194 +v -0.021444 1.618288 -0.001890 +v 0.022210 1.577716 -0.119229 +v 0.047881 1.581782 0.031191 +v 0.055375 1.577878 -0.100584 +v 0.000000 1.432410 -0.093127 +v 0.042198 1.583290 -0.106886 +v 0.071219 1.581908 -0.073609 +v 0.068054 1.575876 -0.086371 +v 0.060075 1.568989 -0.100537 +v 0.065163 1.514915 -0.094089 +v 0.047764 1.605853 -0.002383 +v 0.056288 1.527306 0.035388 +v 0.055400 1.522900 0.034700 +v 0.056800 1.532000 0.036900 +v 0.053710 1.518407 0.036312 +v 0.016988 1.511816 0.059509 +v 0.016523 1.509829 0.061527 +v 0.016060 1.507311 0.063477 +v 0.019643 1.507148 0.059252 +v 0.018886 1.504168 0.061098 +v 0.012688 1.507683 0.068746 +v 0.013226 1.510263 0.066357 +v 0.015327 1.504867 0.065498 +v 0.014758 1.501776 0.067443 +v 0.012525 1.504630 0.071081 +v 0.018383 1.489537 0.064268 +v 0.019566 1.494092 0.062319 +v 0.017300 1.498299 0.064286 +v 0.022379 1.501260 0.059636 +v 0.021217 1.498003 0.061176 +v 0.018101 1.501539 0.062795 +v 0.023651 1.504578 0.057693 +v 0.015512 1.498333 0.069921 +v 0.016336 1.495285 0.072852 +v 0.013197 1.501347 0.074285 +v 0.011289 1.503443 0.077455 +v 0.010513 1.506879 0.074245 +v 0.020300 1.509300 0.057400 +v 0.024688 1.506991 0.055978 +v 0.024313 1.494467 0.060252 +v 0.028338 1.491184 0.059583 +v 0.026548 1.498726 0.058604 +v 0.030377 1.496054 0.058135 +v 0.027823 1.502213 0.056766 +v 0.031864 1.499784 0.056280 +v 0.031980 1.487707 0.059208 +v 0.034944 1.483875 0.058162 +v 0.037789 1.479097 0.056553 +v 0.037765 1.488714 0.056258 +v 0.040839 1.483470 0.054215 +v 0.039779 1.493485 0.054171 +v 0.043257 1.488620 0.051763 +v -0.013719 1.517008 0.059295 +v -0.014127 1.518765 0.056494 +v -0.017673 1.515705 0.055114 +v -0.017351 1.513871 0.057445 +v -0.007369 1.564131 0.060202 +v -0.008331 1.571458 0.057659 +v -0.017301 1.571015 0.055837 +v -0.015753 1.563817 0.058794 +v 0.034514 1.492718 0.057558 +v 0.036283 1.496846 0.055538 +v 0.033157 1.503343 0.054269 +v 0.029010 1.505211 0.054822 +v 0.041601 1.504292 0.050690 +v 0.041085 1.498898 0.052381 +v 0.037317 1.501291 0.053567 +v 0.037974 1.505637 0.051703 +v 0.045069 1.495056 0.049656 +v 0.045400 1.502500 0.048600 +v 0.045080 1.507384 0.047479 +v 0.031897 1.513496 0.050390 +v 0.027202 1.514204 0.051209 +v 0.035292 1.513223 0.049379 +v 0.041527 1.508144 0.049267 +v 0.038279 1.508792 0.050353 +v 0.049200 1.514300 0.041600 +v 0.046800 1.513600 0.043500 +v 0.041300 1.513100 0.047100 +v 0.038537 1.513112 0.048362 +v 0.044195 1.513295 0.045391 +v 0.022901 1.515905 0.051410 +v 0.018571 1.517968 0.051684 +v 0.055257 1.438999 -0.050771 +v 0.010308 1.510155 0.071901 +v 0.010478 1.513105 0.069505 +v 0.015156 1.533977 0.054304 +v 0.013596 1.531070 0.053522 +v 0.006923 1.531511 0.060081 +v 0.019158 1.537755 0.056855 +v 0.015489 1.520401 0.051954 +v -0.021739 1.513091 0.053661 +v -0.021012 1.511308 0.055406 +v 0.013800 1.522800 0.052200 +v 0.013178 1.525174 0.052712 +v 0.012942 1.528094 0.053113 +v -0.010005 1.602932 0.036161 +v -0.020698 1.601402 0.033405 +v -0.020840 1.621971 -0.050662 +v -0.021361 1.622750 -0.042579 +v -0.009988 1.625236 -0.042999 +v -0.011010 1.623798 -0.055177 +v 0.030114 1.551419 0.058036 +v 0.026720 1.545266 0.059766 +v 0.058325 1.563672 0.026481 +v 0.061625 1.555341 0.023089 +v 0.051668 1.546035 0.044932 +v 0.054700 1.541900 0.041900 +v 0.056409 1.536994 0.039107 +v 0.063211 1.544202 0.020706 +v 0.064120 1.532200 0.020511 +v 0.064932 1.520906 0.021512 +v -0.071022 1.498660 -0.072359 +v -0.074708 1.498310 -0.057287 +v -0.076813 1.509999 -0.058491 +v -0.074411 1.514719 -0.074802 +v 0.049122 1.592694 0.017549 +v -0.033028 1.407859 0.005141 +v -0.030039 1.395778 0.000929 +v 0.000000 1.419295 -0.092376 +v 0.048919 1.597614 0.010981 +v 0.048889 1.587381 0.024404 +v 0.039547 1.609385 0.002641 +v 0.039647 1.605670 0.010726 +v 0.048516 1.602029 0.004404 +v 0.040184 1.601257 0.017716 +v 0.040667 1.596353 0.024229 +v 0.040410 1.591110 0.031115 +v 0.039626 1.585244 0.037833 +v 0.037606 1.577249 0.044470 +v 0.046691 1.439449 -0.068449 +v 0.008002 1.500332 0.070494 +v 0.007296 1.500479 0.071202 +v -0.053520 1.554444 0.037880 +v -0.055167 1.559187 0.033299 +v -0.058439 1.552305 0.030019 +v -0.056797 1.548534 0.034888 +v 0.006915 1.499983 0.069926 +v 0.006208 1.500093 0.070605 +v 0.005973 1.498711 0.070176 +v 0.007189 1.491278 0.072688 +v 0.005704 1.493209 0.071394 +v 0.005327 1.496700 0.070331 +v 0.068207 1.472506 -0.017095 +v -0.005887 1.552131 0.063546 +v -0.013242 1.551916 0.062647 +v -0.011428 1.545963 0.063453 +v -0.005203 1.545936 0.064108 +v -0.059440 1.518629 0.030726 +v -0.062023 1.515611 0.028312 +v -0.060821 1.508001 0.032915 +v -0.058003 1.511984 0.034409 +v -0.054598 1.507965 0.039780 +v -0.056600 1.502200 0.039300 +v -0.018904 1.579964 0.052345 +v -0.009112 1.580880 0.054387 +v -0.060600 1.543300 0.027500 +v -0.058985 1.541133 0.032384 +v -0.010699 1.519994 0.061739 +v -0.011514 1.521322 0.058249 +v -0.007996 1.525146 0.059987 +v -0.003912 1.524971 0.065569 +v -0.003648 1.528717 0.063754 +v -0.007046 1.528482 0.060248 +v -0.033993 1.562933 0.052199 +v -0.035634 1.569459 0.048664 +v -0.043291 1.567819 0.043458 +v -0.041735 1.561705 0.047287 +v -0.020698 1.606310 0.026348 +v -0.010005 1.608005 0.029016 +v -0.004139 1.537948 0.062980 +v -0.008537 1.537917 0.061632 +v -0.007473 1.534421 0.060282 +v -0.003950 1.534628 0.062399 +v -0.025500 1.509400 0.054200 +v -0.026221 1.511402 0.052919 +v -0.068924 1.470783 -0.026772 +v -0.070063 1.470235 -0.025719 +v -0.073354 1.517300 -0.015507 +v -0.072714 1.507101 -0.013696 +v -0.071736 1.514897 -0.004110 +v -0.071742 1.527400 -0.006595 +v -0.073300 1.538600 -0.010500 +v -0.074599 1.546926 -0.016268 +v -0.075720 1.531749 -0.022655 +v -0.074613 1.525899 -0.018098 +v -0.049669 1.418009 -0.059856 +v -0.048608 1.427595 -0.062745 +v -0.004078 1.522301 0.068552 +v -0.004027 1.519664 0.071163 +v -0.020495 1.529703 -0.128086 +v -0.019992 1.516906 -0.126383 +v -0.040609 1.520993 -0.118122 +v -0.040804 1.533198 -0.119411 +v -0.068436 1.488481 -0.068879 +v -0.072751 1.489694 -0.054914 +v -0.010892 1.495095 0.074194 +v -0.007594 1.499713 0.069461 +v -0.007789 1.496199 0.068918 +v -0.007417 1.498889 0.069265 +v -0.010944 1.492053 0.076099 +v -0.013247 1.491279 0.077394 +v -0.008600 1.492389 0.069127 +v -0.007700 1.492600 0.069700 +v -0.006998 1.496497 0.069211 +v -0.008729 1.500115 0.070107 +v -0.001944 1.488386 0.082348 +v -0.002499 1.489875 0.085125 +v -0.006506 1.490990 0.084097 +v -0.005006 1.489613 0.082093 +v -0.005458 1.499077 0.070745 +v -0.004491 1.496811 0.071589 +v -0.005310 1.494685 0.078792 +v -0.007206 1.498428 0.074823 +v -0.007494 1.500437 0.072081 +v -0.007200 1.495300 0.078000 +v -0.006200 1.489300 0.075200 +v -0.009008 1.495404 0.076405 +v -0.081106 1.485086 -0.030299 +v -0.055353 1.438827 -0.014305 +v -0.061802 1.452633 -0.021411 +v -0.061800 1.451269 -0.026789 +v -0.056475 1.436705 -0.023388 +v -0.080800 1.511900 -0.034700 +v -0.066012 1.558296 0.012561 +v -0.067698 1.568962 0.002878 +v -0.072355 1.557904 -0.008158 +v -0.071371 1.547902 -0.002509 +v -0.068379 1.577790 -0.004909 +v -0.073269 1.567598 -0.015308 +v -0.076018 1.556001 -0.024296 +v -0.077911 1.536200 -0.036897 +v -0.042165 1.521251 0.045613 +v -0.042412 1.521112 0.045815 +v -0.040900 1.520800 0.047100 +v -0.040900 1.520900 0.046900 +v -0.041684 1.521291 0.044982 +v -0.040343 1.520792 0.046058 +v -0.042542 1.521718 0.044010 +v -0.043095 1.521563 0.044535 +v -0.043365 1.521386 0.044713 +v -0.024933 1.521899 0.048906 +v -0.025588 1.522170 0.048171 +v -0.028191 1.521207 0.048430 +v -0.027547 1.521253 0.049572 +v -0.024720 1.521709 0.049378 +v -0.027503 1.521018 0.050005 +v -0.028678 1.523900 0.042557 +v -0.028204 1.524236 0.042492 +v -0.029466 1.523482 0.042681 +v -0.023235 1.523973 0.046674 +v -0.023291 1.524974 0.046778 +v -0.027982 1.525037 0.042609 +v -0.027969 1.524510 0.042490 +v -0.023469 1.523441 0.046801 +v -0.021958 1.523277 0.046966 +v -0.021129 1.523661 0.046653 +v -0.021224 1.524972 0.046884 +v -0.032700 1.520100 0.050100 +v -0.032897 1.519399 0.050184 +v -0.028200 1.519704 0.050417 +v -0.027801 1.520500 0.050288 +v -0.032498 1.520485 0.049893 +v -0.035900 1.520600 0.049000 +v -0.035996 1.520197 0.049188 +v -0.036104 1.519501 0.049213 +v -0.024901 1.520591 0.050179 +v -0.024799 1.521400 0.049815 +v -0.036100 1.518900 0.049200 +v -0.033100 1.518800 0.050100 +v -0.028400 1.519100 0.050400 +v -0.024900 1.520200 0.050300 +v -0.022180 1.521429 0.049697 +v -0.023262 1.521579 0.049651 +v -0.033098 1.518102 0.049889 +v -0.033002 1.517298 0.049711 +v -0.028299 1.517601 0.050288 +v -0.028401 1.518399 0.050319 +v -0.036100 1.518200 0.048900 +v -0.036000 1.517400 0.048700 +v -0.044439 1.521538 0.043123 +v -0.044900 1.522000 0.042200 +v -0.044889 1.521497 0.042391 +v -0.044613 1.521100 0.042908 +v -0.047300 1.519000 0.040400 +v -0.048300 1.520100 0.039500 +v -0.050100 1.519300 0.038700 +v -0.048907 1.518104 0.039809 +v -0.056681 1.509389 0.037056 +v -0.051190 1.515294 0.039686 +v -0.052600 1.516500 0.037900 +v -0.044000 1.521300 0.043900 +v -0.043400 1.521000 0.044800 +v -0.043900 1.521600 0.043800 +v -0.044314 1.521924 0.042872 +v -0.044184 1.520701 0.043691 +v -0.043510 1.520399 0.044707 +v -0.042500 1.520800 0.045900 +v -0.042589 1.520103 0.045890 +v -0.041000 1.519800 0.047200 +v -0.040900 1.520500 0.047200 +v -0.038827 1.520294 0.048207 +v -0.039096 1.520691 0.047990 +v -0.038924 1.519591 0.048198 +v -0.041100 1.519200 0.047000 +v -0.039015 1.518991 0.048107 +v -0.042600 1.519500 0.045600 +v -0.043590 1.519902 0.044393 +v -0.044314 1.520298 0.043307 +v -0.044900 1.520700 0.042500 +v -0.045300 1.521300 0.041900 +v -0.044600 1.519800 0.042800 +v -0.045200 1.519100 0.042100 +v -0.044000 1.518600 0.043400 +v -0.043727 1.519374 0.043986 +v -0.045400 1.520300 0.041900 +v -0.046200 1.519700 0.041100 +v -0.042700 1.519000 0.045200 +v -0.042700 1.518300 0.044800 +v -0.041000 1.517900 0.046300 +v -0.041092 1.518606 0.046690 +v -0.039015 1.518382 0.047807 +v -0.039024 1.517589 0.047472 +v -0.032398 1.520499 0.049611 +v -0.032070 1.520462 0.048411 +v -0.035568 1.520277 0.047815 +v -0.035900 1.520600 0.048700 +v -0.039100 1.520700 0.047800 +v -0.038514 1.520381 0.046986 +v -0.035403 1.520177 0.046662 +v -0.033667 1.522493 0.042517 +v -0.035009 1.522519 0.042154 +v -0.038077 1.520272 0.045916 +v -0.032250 1.520456 0.047158 +v -0.032125 1.522658 0.042749 +v -0.028946 1.521295 0.047200 +v -0.030633 1.523025 0.042787 +v -0.026318 1.522169 0.047391 +v -0.033945 1.532789 0.049154 +v -0.033987 1.533023 0.049380 +v -0.038500 1.532400 0.048800 +v -0.038100 1.532100 0.048400 +v -0.051553 1.496317 -0.100345 +v -0.052901 1.510673 -0.105774 +v -0.040195 1.505800 -0.115183 +v -0.040900 1.529700 0.047500 +v -0.040908 1.530602 0.047110 +v -0.043300 1.529300 0.045900 +v -0.043109 1.528498 0.046011 +v -0.040898 1.528786 0.047906 +v -0.042890 1.527512 0.046186 +v -0.038200 1.530000 0.049500 +v -0.038200 1.531000 0.049000 +v -0.038100 1.531800 0.048500 +v -0.042119 1.526072 0.042035 +v -0.041362 1.527066 0.042705 +v -0.041916 1.526935 0.044139 +v -0.042956 1.526131 0.042937 +v -0.042757 1.525369 0.041438 +v -0.043602 1.525370 0.042054 +v -0.037542 1.525216 0.039867 +v -0.037198 1.525606 0.040151 +v -0.036660 1.526085 0.040544 +v -0.037714 1.524891 0.039697 +v -0.037779 1.524597 0.039632 +v -0.037758 1.524316 0.039661 +v -0.037669 1.524040 0.039811 +v -0.058518 1.493799 0.036810 +v -0.059081 1.504398 0.036285 +v -0.061300 1.496500 0.033300 +v -0.043599 1.521847 0.043556 +v -0.044055 1.522221 0.042659 +v -0.043015 1.522082 0.043129 +v -0.043462 1.522488 0.042348 +v -0.044553 1.522761 0.041803 +v -0.044927 1.522554 0.041844 +v -0.041056 1.521240 0.044204 +v -0.036673 1.522964 0.041275 +v -0.037083 1.523262 0.040834 +v -0.041746 1.521873 0.043317 +v -0.039788 1.520688 0.045084 +v -0.035983 1.522699 0.041727 +v -0.037308 1.523509 0.040446 +v -0.037510 1.523762 0.040093 +v -0.042057 1.522240 0.042582 +v -0.042503 1.522667 0.041859 +v -0.043826 1.523087 0.041769 +v -0.043025 1.523723 0.040992 +v -0.043098 1.524246 0.040972 +v -0.044011 1.523655 0.041586 +v -0.044072 1.524168 0.041487 +v -0.042854 1.523208 0.041263 +v -0.043014 1.524789 0.041098 +v -0.043930 1.524704 0.041661 +v -0.045170 1.522128 0.041830 +v -0.045172 1.522916 0.041508 +v -0.045689 1.523909 0.041025 +v -0.045611 1.522137 0.041396 +v -0.046307 1.522104 0.040817 +v -0.045900 1.521000 0.041200 +v -0.046700 1.524488 0.040798 +v -0.048114 1.524818 0.040328 +v -0.047605 1.521996 0.040014 +v -0.046900 1.520700 0.040400 +v -0.044824 1.523362 0.041444 +v -0.044987 1.523980 0.041208 +v -0.046091 1.525512 0.041885 +v -0.046900 1.526300 0.041900 +v -0.045406 1.524990 0.041911 +v -0.044897 1.525716 0.043091 +v -0.045400 1.526400 0.043200 +v -0.046000 1.527100 0.043200 +v -0.044489 1.527303 0.044590 +v -0.044900 1.528100 0.044600 +v -0.044112 1.526492 0.044515 +v -0.044792 1.524756 0.041778 +v -0.044314 1.525441 0.042693 +v -0.043559 1.526182 0.043934 +v -0.042506 1.526800 0.045709 +v -0.040690 1.527910 0.047582 +v -0.039941 1.527889 0.045733 +v -0.038000 1.529200 0.049200 +v -0.037203 1.528804 0.047236 +v -0.034542 1.533709 0.050445 +v -0.039499 1.533215 0.049975 +v -0.036807 1.528788 0.045465 +v -0.034347 1.526891 0.041805 +v -0.032772 1.526987 0.042325 +v -0.033546 1.528876 0.046426 +v -0.023838 1.525682 0.047159 +v -0.024615 1.526505 0.047491 +v -0.028529 1.525780 0.042811 +v -0.028182 1.525373 0.042703 +v -0.022456 1.525747 0.047856 +v -0.023742 1.526545 0.048365 +v -0.027850 1.528251 0.047514 +v -0.029964 1.526652 0.042905 +v -0.029085 1.526235 0.042905 +v -0.026190 1.527374 0.047403 +v -0.027281 1.528690 0.049325 +v -0.025103 1.527452 0.048740 +v -0.033744 1.529320 0.048332 +v -0.021800 1.564300 -0.125112 +v -0.021390 1.549196 -0.128268 +v -0.041100 1.551200 -0.119600 +v -0.041000 1.564300 -0.117100 +v -0.039290 1.528051 0.044190 +v -0.035655 1.526552 0.041178 +v -0.051007 1.565829 -0.109698 +v -0.050153 1.574170 -0.107041 +v -0.041409 1.575703 -0.112304 +v -0.037050 1.439676 -0.079925 +v -0.026375 1.429290 -0.086093 +v -0.020443 1.525555 0.049023 +v -0.022090 1.526874 0.049265 +v -0.019381 1.524425 0.048883 +v -0.018804 1.524338 0.049175 +v -0.019820 1.525679 0.049223 +v -0.021236 1.527039 0.049389 +v -0.024082 1.528063 0.049571 +v -0.023449 1.528355 0.049996 +v -0.026775 1.529813 0.050788 +v -0.026860 1.529186 0.050504 +v -0.034030 1.529944 0.050131 +v -0.034105 1.530682 0.050431 +v -0.026377 1.530509 0.050284 +v -0.022991 1.528811 0.049615 +v -0.025891 1.531275 0.049667 +v -0.022800 1.529700 0.049200 +v -0.033943 1.532420 0.049431 +v -0.034105 1.531621 0.049993 +v -0.022009 1.522235 0.049146 +v -0.019888 1.522635 0.049121 +v -0.022491 1.522472 0.048858 +v -0.020169 1.522855 0.048732 +v -0.023596 1.522765 0.047753 +v -0.019510 1.523334 0.048695 +v -0.018853 1.523205 0.049074 +v -0.024390 1.522876 0.047222 +v -0.019200 1.525800 0.049100 +v -0.018016 1.524253 0.049349 +v -0.018201 1.526310 0.048651 +v -0.017395 1.524492 0.049158 +v -0.020237 1.528365 0.048593 +v -0.020635 1.527357 0.048964 +v -0.017989 1.523129 0.049200 +v -0.018475 1.522629 0.049200 +v -0.017396 1.523127 0.049280 +v -0.017900 1.522496 0.049287 +v -0.024700 1.519600 0.050400 +v -0.024500 1.519000 0.050400 +v -0.021512 1.520977 0.049893 +v -0.021022 1.520599 0.049909 +v -0.032795 1.516401 0.049585 +v -0.035800 1.516500 0.048500 +v -0.032503 1.515300 0.049614 +v -0.035693 1.515294 0.048482 +v -0.027697 1.515798 0.050481 +v -0.028102 1.516702 0.050317 +v -0.029412 1.387797 -0.001384 +v -0.046000 1.518200 0.041600 +v -0.047200 1.517100 0.041300 +v -0.045392 1.516396 0.042790 +v -0.044509 1.517704 0.042911 +v -0.042900 1.517300 0.044400 +v -0.043309 1.515909 0.044414 +v -0.041000 1.515600 0.046000 +v -0.040900 1.516900 0.046000 +v -0.038919 1.516689 0.047177 +v -0.038722 1.515306 0.047242 +v -0.049300 1.521500 0.038900 +v -0.050591 1.524603 0.038686 +v -0.051200 1.520900 0.037800 +v -0.052905 1.524289 0.037006 +v -0.016797 1.527194 0.048968 +v -0.016432 1.524904 0.049054 +v -0.015300 1.527900 0.050300 +v -0.015200 1.525300 0.050100 +v -0.016200 1.530800 0.050800 +v -0.018103 1.529886 0.049322 +v -0.016702 1.523141 0.049381 +v -0.017301 1.522304 0.049412 +v -0.015612 1.523203 0.049817 +v -0.016513 1.521817 0.049816 +v -0.024099 1.518299 0.050487 +v -0.023694 1.517490 0.050592 +v -0.020550 1.520111 0.050065 +v -0.019827 1.519426 0.050302 +v -0.049954 1.486090 -0.095874 +v -0.053460 1.525713 -0.108355 +v -0.047153 1.537083 0.048674 +v -0.044799 1.534344 0.048966 +v -0.041146 1.535635 0.050758 +v -0.043109 1.539081 0.051229 +v -0.050240 1.540841 0.047395 +v -0.052685 1.537388 0.044750 +v -0.049920 1.534711 0.046178 +v -0.047487 1.532583 0.046972 +v -0.051300 1.527400 0.039700 +v -0.049306 1.527394 0.041107 +v -0.049400 1.529100 0.042600 +v -0.051400 1.529700 0.041500 +v -0.053700 1.527400 0.038000 +v -0.054100 1.530500 0.039900 +v -0.050911 1.531995 0.043813 +v -0.048706 1.530695 0.044709 +v -0.053767 1.533613 0.042372 +v -0.023810 1.535193 0.052318 +v -0.024621 1.533474 0.050880 +v -0.021502 1.531689 0.049858 +v -0.020094 1.532606 0.050483 +v -0.022472 1.536856 0.053894 +v -0.017898 1.533692 0.051907 +v -0.036404 1.539832 0.053434 +v -0.035572 1.536604 0.052091 +v -0.025594 1.531753 0.049356 +v -0.023502 1.530697 0.049184 +v -0.025447 1.531989 0.049401 +v -0.022900 1.530600 0.049100 +v -0.058944 1.585628 -0.090242 +v -0.061100 1.592200 -0.078300 +v -0.044568 1.602329 -0.084574 +v -0.043693 1.594392 -0.096197 +v -0.041000 1.531000 0.047100 +v -0.041600 1.531400 0.047600 +v -0.044100 1.530100 0.046400 +v -0.043500 1.529700 0.046000 +v -0.045300 1.528600 0.044700 +v -0.045998 1.529012 0.044988 +v -0.047100 1.528000 0.043400 +v -0.046494 1.527712 0.043387 +v -0.047100 1.527000 0.042300 +v -0.047706 1.526995 0.042008 +v -0.023200 1.531500 0.049500 +v -0.025342 1.532353 0.049786 +v -0.052443 1.554395 -0.110733 +v -0.045436 1.574694 0.038505 +v -0.052333 1.571092 0.032286 +v -0.050200 1.565000 0.037900 +v -0.042799 1.532208 0.048590 +v -0.045400 1.530900 0.047000 +v -0.047100 1.529700 0.045100 +v -0.048000 1.528600 0.043300 +v -0.048394 1.527705 0.042091 +v -0.071726 1.489597 -0.018397 +v -0.072300 1.489600 -0.019800 +v -0.082403 1.497500 -0.046021 +v -0.080609 1.503982 -0.045312 +v -0.079052 1.507506 -0.044008 +v -0.078100 1.518500 -0.029900 +v -0.081582 1.521655 -0.032839 +v -0.085888 1.480697 -0.037210 +v -0.082727 1.479003 -0.033181 +v -0.085882 1.481597 -0.037615 +v -0.089000 1.517700 -0.043700 +v -0.089700 1.515900 -0.045000 +v -0.090689 1.515510 -0.044722 +v -0.042993 1.466469 -0.086739 +v -0.047760 1.478256 -0.092516 +v -0.090400 1.513800 -0.046200 +v -0.074300 1.494200 -0.021900 +v -0.083479 1.497097 -0.046117 +v -0.085217 1.496703 -0.047579 +v -0.086211 1.491703 -0.047289 +v -0.085503 1.492007 -0.046781 +v -0.085486 1.486796 -0.045215 +v -0.084991 1.487411 -0.044990 +v -0.087400 1.484200 -0.040300 +v -0.088704 1.487908 -0.042783 +v -0.082840 1.522084 -0.034142 +v -0.082112 1.521223 -0.032776 +v -0.083570 1.521955 -0.034436 +v -0.075177 1.493002 -0.023111 +v -0.076500 1.516600 -0.029500 +v -0.075900 1.514300 -0.028300 +v -0.075700 1.514700 -0.028700 +v -0.071800 1.498104 -0.026083 +v -0.071802 1.498996 -0.026313 +v -0.072996 1.496503 -0.026061 +v -0.075410 1.495001 -0.024819 +v -0.069598 1.487587 -0.024692 +v -0.069300 1.486401 -0.026424 +v -0.069687 1.485404 -0.026018 +v -0.085515 1.527480 -0.038589 +v -0.075999 1.513302 -0.027984 +v -0.077199 1.514400 -0.028210 +v -0.077500 1.515700 -0.028600 +v -0.084313 1.521518 -0.034793 +v -0.077793 1.516688 -0.028928 +v -0.077100 1.516600 -0.028900 +v -0.078695 1.518295 -0.029809 +v -0.076700 1.516600 -0.029100 +v -0.077487 1.491398 -0.037513 +v -0.076038 1.491207 -0.035474 +v -0.073200 1.509900 -0.019400 +v -0.073700 1.508000 -0.021200 +v -0.081613 1.478503 -0.031092 +v -0.077071 1.514610 -0.039608 +v -0.076983 1.517291 -0.040993 +v -0.089989 1.492402 -0.044791 +v -0.083719 1.487422 -0.033473 +v -0.081400 1.486758 -0.031819 +v -0.085799 1.488493 -0.035214 +v -0.087406 1.491007 -0.036987 +v -0.073275 1.507102 -0.031910 +v -0.084591 1.511302 -0.037217 +v -0.085725 1.509009 -0.038205 +v -0.082810 1.511499 -0.035689 +v -0.077116 1.520902 -0.039194 +v -0.076866 1.521086 -0.036895 +v -0.077427 1.516146 -0.037671 +v -0.076733 1.519477 -0.036354 +v -0.071294 1.491000 -0.025011 +v -0.071539 1.485011 -0.028169 +v -0.071376 1.487003 -0.028816 +v -0.072800 1.485000 -0.029000 +v -0.073029 1.485996 -0.030176 +v -0.073697 1.487491 -0.031606 +v -0.074400 1.489400 -0.032700 +v -0.074445 1.491799 -0.033137 +v -0.070700 1.495100 -0.025700 +v -0.076554 1.521809 -0.026878 +v -0.070296 1.493304 -0.025475 +v -0.072097 1.496697 -0.025914 +v -0.072201 1.494908 -0.025566 +v -0.071902 1.492995 -0.025121 +v -0.077900 1.484000 -0.026400 +v -0.078789 1.485596 -0.028414 +v -0.076283 1.484669 -0.025833 +v -0.079808 1.486114 -0.030284 +v -0.074909 1.484910 -0.025487 +v -0.077240 1.515882 -0.038378 +v -0.076800 1.519100 -0.038300 +v -0.071770 1.493003 -0.016807 +v -0.063703 1.501321 0.029000 +v -0.065291 1.510391 0.023890 +v -0.026059 1.454896 0.049004 +v -0.025298 1.451791 0.049406 +v -0.054800 1.491500 0.040500 +v -0.049968 1.489449 0.045159 +v -0.025148 1.460346 0.061133 +v -0.024435 1.459621 0.061337 +v -0.023522 1.460187 0.061418 +v -0.024249 1.460520 0.061081 +v -0.022000 1.460772 0.059500 +v -0.022800 1.460707 0.058800 +v -0.023014 1.460696 0.059746 +v -0.022313 1.460759 0.060425 +v -0.028109 1.475652 0.062034 +v -0.031378 1.479582 0.059952 +v -0.034158 1.475322 0.058856 +v -0.030949 1.471935 0.060987 +v -0.024580 1.459509 0.051432 +v -0.024307 1.461110 0.051921 +v -0.023228 1.460994 0.055868 +v -0.023532 1.460361 0.055415 +v -0.024417 1.456932 0.051523 +v -0.023415 1.459081 0.054968 +v -0.026131 1.458845 0.048941 +v -0.025595 1.461351 0.049316 +v -0.033679 1.454181 0.057135 +v -0.030493 1.456211 0.059276 +v -0.031308 1.459310 0.059370 +v -0.034660 1.458692 0.057534 +v -0.025183 1.486692 0.060752 +v -0.028504 1.483099 0.060419 +v -0.025503 1.479201 0.062212 +v -0.022102 1.483306 0.062428 +v -0.018718 1.488020 0.063334 +v -0.021581 1.490552 0.061358 +v -0.017491 1.495490 0.066471 +v -0.017785 1.491959 0.067693 +v -0.017219 1.491111 0.070903 +v -0.017625 1.493108 0.069690 +v -0.015426 1.487303 0.069410 +v -0.017764 1.489813 0.066272 +v -0.011460 1.486297 0.069954 +v -0.011262 1.487086 0.070809 +v -0.015505 1.486658 0.068092 +v -0.015732 1.485856 0.066517 +v -0.011732 1.485545 0.068955 +v -0.010705 1.488275 0.071582 +v -0.013514 1.488048 0.071692 +v -0.015121 1.489854 0.073173 +v -0.004381 1.488908 0.079905 +v -0.005639 1.490149 0.081872 +v -0.010589 1.518080 0.064760 +v -0.013675 1.514833 0.061918 +v -0.010631 1.515723 0.067072 +v -0.013514 1.512714 0.064157 +v -0.014372 1.491968 0.076867 +v -0.015789 1.493572 0.075165 +v -0.011991 1.493152 0.080609 +v -0.013201 1.495495 0.080068 +v -0.003688 1.531722 0.062648 +v -0.002148 1.485462 0.075849 +v -0.002408 1.484602 0.075445 +v -0.001814 1.486575 0.077679 +v -0.022510 1.586616 -0.113415 +v -0.023000 1.599213 -0.102898 +v -0.022139 1.613460 -0.081347 +v -0.019834 1.618165 -0.069601 +v -0.005695 1.468502 0.076511 +v -0.005296 1.470999 0.076289 +v -0.012858 1.464687 0.059653 +v -0.007373 1.464960 0.061179 +v -0.006975 1.463108 0.065761 +v -0.012269 1.462964 0.064467 +v -0.016278 1.462759 0.062362 +v -0.017223 1.464311 0.057602 +v -0.013211 1.465957 0.057083 +v -0.007475 1.466294 0.058483 +v -0.011800 1.461943 0.068868 +v -0.006582 1.462229 0.070956 +v -0.006449 1.463859 0.073683 +v -0.011854 1.463360 0.071100 +v -0.018283 1.481390 0.065162 +v -0.014145 1.482119 0.067466 +v -0.009947 1.482343 0.070802 +v -0.006663 1.482219 0.073355 +v -0.006070 1.484310 0.073705 +v -0.008634 1.484916 0.071533 +v -0.005846 1.485250 0.074051 +v -0.008425 1.485773 0.072107 +v -0.008365 1.486729 0.072615 +v -0.008909 1.488251 0.072763 +v -0.007010 1.488079 0.074018 +v -0.007500 1.489300 0.073600 +v -0.005805 1.486238 0.074278 +v -0.004053 1.510006 0.078946 +v -0.003955 1.513584 0.076221 +v -0.009855 1.472515 0.074138 +v -0.005009 1.472999 0.075408 +v -0.004315 1.476815 0.074624 +v -0.008666 1.476809 0.073279 +v -0.013211 1.476061 0.071006 +v -0.014910 1.470831 0.071677 +v -0.002734 1.483714 0.075107 +v -0.010300 1.470400 0.074600 +v -0.010703 1.467995 0.074513 +v -0.015502 1.468576 0.071979 +v -0.015800 1.466400 0.071600 +v -0.019602 1.468322 0.068491 +v -0.018439 1.474099 0.068134 +v -0.023987 1.472042 0.064669 +v -0.023470 1.465647 0.065227 +v -0.031113 1.462468 0.060031 +v -0.026531 1.460082 0.060975 +v -0.026577 1.461421 0.061328 +v -0.026234 1.462618 0.062158 +v -0.030104 1.465556 0.061112 +v -0.034404 1.463254 0.058475 +v -0.033149 1.467775 0.059705 +v -0.027971 1.468600 0.062562 +v -0.025395 1.463939 0.063270 +v -0.025250 1.461104 0.061542 +v -0.024309 1.460885 0.061344 +v -0.024976 1.461816 0.062298 +v -0.024034 1.461246 0.062012 +v -0.024386 1.462489 0.063348 +v -0.023558 1.461583 0.062874 +v -0.022612 1.462197 0.064256 +v -0.023177 1.463590 0.064981 +v -0.022133 1.460920 0.059768 +v -0.021168 1.460992 0.060855 +v -0.021285 1.460964 0.062165 +v -0.022204 1.460838 0.060987 +v -0.022757 1.460837 0.059067 +v -0.022727 1.460779 0.060310 +v -0.022731 1.461571 0.056693 +v -0.021763 1.461781 0.057508 +v -0.023289 1.460690 0.058334 +v -0.023330 1.460718 0.059396 +v -0.023394 1.460716 0.060836 +v -0.023251 1.460914 0.061284 +v -0.022758 1.461069 0.062036 +v -0.021882 1.461334 0.063176 +v -0.015798 1.464470 0.070396 +v -0.019424 1.464378 0.068200 +v -0.019113 1.462802 0.066769 +v -0.015646 1.462491 0.068328 +v -0.018697 1.461683 0.065172 +v -0.017785 1.465453 0.055196 +v -0.015373 1.461586 0.066485 +v -0.020861 1.463425 0.055138 +v -0.021815 1.464456 0.052646 +v -0.019799 1.462183 0.059397 +v -0.023776 1.462261 0.052914 +v -0.022746 1.462430 0.053473 +v -0.024706 1.463135 0.050424 +v -0.023488 1.463681 0.051401 +v -0.018585 1.461130 0.063357 +v -0.019687 1.466161 0.068684 +v -0.011193 1.465708 0.073182 +v -0.006008 1.466276 0.075328 +v -0.004764 1.502917 0.085468 +v -0.004430 1.506626 0.081984 +v -0.022824 1.460592 0.061246 +v -0.009000 1.380900 0.008200 +v -0.025820 1.458897 0.061092 +v -0.023838 1.454565 0.051597 +v -0.022628 1.458084 0.055319 +v -0.030940 1.449785 0.057855 +v -0.026597 1.445592 0.060153 +v -0.024565 1.449667 0.061995 +v -0.028277 1.452952 0.060029 +v -0.021911 1.455732 0.064216 +v -0.024465 1.457488 0.062231 +v -0.023209 1.458786 0.062690 +v -0.020872 1.457806 0.064774 +v -0.019961 1.459373 0.065056 +v -0.022233 1.459817 0.062668 +v -0.017872 1.461109 0.063406 +v -0.013488 1.461506 0.066143 +v -0.012943 1.457844 0.061366 +v -0.017612 1.457529 0.058998 +v -0.021527 1.457717 0.056493 +v -0.021070 1.460876 0.060391 +v -0.018523 1.461113 0.064244 +v -0.021342 1.460835 0.061409 +v -0.013977 1.461497 0.067282 +v -0.018900 1.383500 0.004700 +v -0.021710 1.460566 0.062261 +v -0.019048 1.460695 0.064671 +v -0.014749 1.460724 0.068045 +v -0.015854 1.458903 0.068665 +v -0.016792 1.456693 0.068793 +v -0.017461 1.453899 0.067856 +v -0.019149 1.446712 0.065034 +v -0.020456 1.441594 0.063824 +v -0.013229 1.444892 0.067749 +v -0.014176 1.439699 0.067357 +v -0.012333 1.452486 0.071105 +v -0.012051 1.456002 0.072665 +v -0.011198 1.458638 0.072223 +v -0.010410 1.460704 0.071129 +v -0.003264 1.481942 0.074712 +v -0.005689 1.488011 0.075291 +v -0.004510 1.488195 0.077505 +v -0.007868 1.491520 0.082722 +v -0.001736 1.487415 0.079846 +v -0.010492 1.492952 0.082279 +v -0.008765 1.492452 0.083652 +v -0.009522 1.493885 0.084514 +v -0.010810 1.494494 0.083310 +v -0.007182 1.491909 0.084782 +v -0.008500 1.493700 0.085400 +v -0.002902 1.491075 0.086336 +v -0.003721 1.494335 0.088102 +v -0.004650 1.499551 0.087718 +v -0.004111 1.496344 0.088521 +v -0.009882 1.495499 0.084882 +v -0.011411 1.497274 0.083159 +v -0.011743 1.500268 0.080871 +v -0.013890 1.498099 0.077459 +v -0.010602 1.625284 -0.027469 +v -0.009656 1.461739 0.070112 +v -0.008742 1.461758 0.068290 +v -0.012774 1.447697 0.054662 +v -0.012805 1.452064 0.057064 +v -0.008059 1.451891 0.058520 +v -0.008007 1.447711 0.056270 +v -0.004812 1.461758 0.069092 +v -0.004543 1.458037 0.063816 +v -0.008262 1.458084 0.063082 +v -0.005072 1.461835 0.071297 +v -0.005589 1.460631 0.072613 +v -0.006193 1.458494 0.073986 +v -0.004333 1.451771 0.059202 +v -0.004401 1.447435 0.056949 +v -0.022224 1.453109 0.052652 +v -0.022834 1.449420 0.050597 +v -0.017680 1.452071 0.055120 +v -0.018045 1.448223 0.052946 +v -0.007817 1.439111 0.069698 +v -0.007074 1.444141 0.069216 +v -0.006581 1.455345 0.074334 +v -0.006668 1.451550 0.072522 +v -0.022923 1.607438 -0.092701 +v -0.062856 1.576001 0.009824 +v -0.061268 1.569757 0.017690 +v -0.055147 1.577388 0.024227 +v -0.056734 1.582384 0.017099 +v -0.063470 1.583142 0.001913 +v -0.057200 1.588300 0.009600 +v -0.040530 1.616390 -0.045728 +v -0.041088 1.616383 -0.031901 +v -0.021913 1.622737 -0.028613 +v -0.035329 1.615763 -0.059197 +v -0.047797 1.607383 -0.067962 +v -0.050017 1.610884 -0.049176 +v -0.049914 1.611324 -0.034400 +v -0.025319 1.417296 -0.086431 +v -0.053341 1.537593 -0.109735 +v -0.048594 1.610887 -0.021003 +v -0.040400 1.615200 -0.018500 +v -0.039869 1.612797 -0.006457 +v -0.047253 1.609196 -0.009635 +v -0.021703 1.620912 -0.014487 +v -0.010484 1.623294 -0.012843 +v -0.019518 1.501968 -0.122785 +v -0.003912 1.516946 0.073584 +v -0.049036 1.451832 -0.071080 +v -0.058265 1.451117 -0.053166 +v -0.051200 1.402600 -0.033800 +v -0.072990 1.474329 -0.033592 +v -0.072822 1.474732 -0.034523 +v -0.057824 1.437788 -0.035288 +v -0.061932 1.450619 -0.035689 +v -0.039174 1.452431 -0.082310 +v -0.082400 1.485383 -0.044422 +v -0.082903 1.489094 -0.045618 +v -0.083405 1.484800 -0.043789 +v -0.084115 1.488697 -0.045292 +v -0.083998 1.492895 -0.046032 +v -0.083879 1.483300 -0.042725 +v -0.076924 1.521442 -0.034424 +v -0.078187 1.523278 -0.033695 +v -0.072100 1.473090 -0.029303 +v -0.072098 1.473611 -0.030997 +v -0.071500 1.473300 -0.030200 +v -0.071200 1.472900 -0.028900 +v -0.071696 1.472309 -0.027792 +v -0.071001 1.472380 -0.027818 +v -0.076624 1.536198 -0.028498 +v -0.070000 1.472400 -0.031600 +v -0.072088 1.474257 -0.034077 +v -0.071908 1.473871 -0.032409 +v -0.071000 1.473200 -0.031100 +v -0.070412 1.472386 -0.029103 +v -0.070191 1.471713 -0.027395 +v -0.071000 1.471300 -0.026800 +v -0.084111 1.482503 -0.042789 +v -0.076500 1.472200 -0.031600 +v -0.079079 1.513709 -0.040420 +v -0.079508 1.511895 -0.041894 +v -0.081688 1.513702 -0.043406 +v -0.081123 1.515993 -0.041982 +v -0.060712 1.487981 -0.084226 +v -0.062551 1.497993 -0.088895 +v -0.012694 1.418793 -0.090986 +v -0.012984 1.431500 -0.091585 +v -0.076230 1.530994 -0.074442 +v -0.078600 1.515400 -0.038900 +v -0.080687 1.517803 -0.040507 +v -0.082899 1.493300 -0.046163 +v -0.077782 1.514596 -0.038896 +v -0.078304 1.512904 -0.040408 +v -0.081700 1.519700 -0.042100 +v -0.082200 1.517500 -0.043700 +v -0.082816 1.515000 -0.045088 +v -0.078788 1.510996 -0.041897 +v -0.077321 1.519308 -0.041011 +v -0.080072 1.561101 -0.051900 +v -0.079500 1.554700 -0.065300 +v -0.077000 1.577000 -0.055600 +v -0.076100 1.571400 -0.070200 +v -0.071739 1.564758 -0.085415 +v -0.075100 1.546934 -0.080535 +v -0.076756 1.521093 -0.035588 +v -0.079218 1.524487 -0.034190 +v -0.064046 1.588845 -0.005609 +v -0.069100 1.583700 -0.012500 +v -0.057334 1.593424 0.002717 +v -0.057190 1.597993 -0.003706 +v -0.063487 1.594294 -0.012304 +v -0.070644 1.587414 -0.023990 +v -0.078576 1.553401 -0.037404 +v -0.078229 1.566908 -0.038395 +v -0.075235 1.571187 -0.025413 +v -0.074960 1.582288 -0.040605 +v -0.066600 1.595600 -0.030100 +v -0.070333 1.591526 -0.044305 +v -0.059609 1.603311 -0.033797 +v -0.061738 1.601056 -0.048113 +v -0.061583 1.597869 -0.063692 +v -0.071969 1.587274 -0.059096 +v -0.061700 1.599100 -0.018200 +v -0.055613 1.602114 -0.008857 +v -0.056700 1.604800 -0.021500 +v -0.053364 1.605595 -0.013183 +v -0.073321 1.572998 -0.018597 +v -0.065872 1.467058 -0.027795 +v -0.066067 1.467582 -0.022798 +v -0.069850 1.479001 -0.015004 +v -0.067976 1.503604 0.017668 +v -0.068022 1.515690 0.015397 +v -0.070799 1.507799 0.005481 +v -0.069552 1.493504 0.006914 +v -0.066865 1.528807 0.013619 +v -0.070014 1.521399 0.003997 +v -0.069755 1.535201 0.001517 +v -0.066276 1.542800 0.012494 +v -0.063123 1.558544 -0.099907 +v -0.065392 1.542129 -0.097712 +v -0.065894 1.530249 -0.096000 +v -0.048710 1.559533 0.042020 +v -0.022210 1.577716 -0.119229 +v -0.047881 1.581782 0.031191 +v -0.055375 1.577878 -0.100584 +v -0.042198 1.583290 -0.106886 +v -0.071219 1.581908 -0.073609 +v -0.068054 1.575876 -0.086371 +v -0.060075 1.568989 -0.100537 +v -0.065163 1.514915 -0.094089 +v -0.047764 1.605853 -0.002383 +v -0.056288 1.527306 0.035388 +v -0.055400 1.522900 0.034700 +v -0.056800 1.532000 0.036900 +v -0.053710 1.518407 0.036312 +v -0.016988 1.511816 0.059509 +v -0.016523 1.509829 0.061527 +v -0.016060 1.507311 0.063477 +v -0.019643 1.507148 0.059252 +v -0.018886 1.504168 0.061098 +v -0.012688 1.507683 0.068746 +v -0.013226 1.510263 0.066357 +v -0.015327 1.504867 0.065498 +v -0.014758 1.501776 0.067443 +v -0.012525 1.504630 0.071081 +v -0.018383 1.489537 0.064268 +v -0.019566 1.494092 0.062319 +v -0.017300 1.498299 0.064286 +v -0.022379 1.501260 0.059636 +v -0.021217 1.498003 0.061176 +v -0.018101 1.501539 0.062795 +v -0.023651 1.504578 0.057693 +v -0.015512 1.498333 0.069921 +v -0.016336 1.495285 0.072852 +v -0.013197 1.501347 0.074285 +v -0.011289 1.503443 0.077455 +v -0.010513 1.506879 0.074245 +v -0.020300 1.509300 0.057400 +v -0.024688 1.506991 0.055978 +v -0.024313 1.494467 0.060252 +v -0.028338 1.491184 0.059583 +v -0.026548 1.498726 0.058604 +v -0.030377 1.496054 0.058135 +v -0.027823 1.502213 0.056766 +v -0.031864 1.499784 0.056280 +v -0.031980 1.487707 0.059208 +v -0.034944 1.483875 0.058162 +v -0.037789 1.479097 0.056553 +v -0.037765 1.488714 0.056258 +v -0.040839 1.483470 0.054215 +v -0.039779 1.493485 0.054171 +v -0.043257 1.488620 0.051763 +v -0.034514 1.492718 0.057558 +v -0.036283 1.496846 0.055538 +v -0.033157 1.503343 0.054269 +v -0.029010 1.505211 0.054822 +v -0.041601 1.504292 0.050690 +v -0.041085 1.498898 0.052381 +v -0.037317 1.501291 0.053567 +v -0.037974 1.505637 0.051703 +v -0.045069 1.495056 0.049656 +v -0.045400 1.502500 0.048600 +v -0.045080 1.507384 0.047479 +v -0.031897 1.513496 0.050390 +v -0.027202 1.514204 0.051209 +v -0.035292 1.513223 0.049379 +v -0.041527 1.508144 0.049267 +v -0.038279 1.508792 0.050353 +v -0.049200 1.514300 0.041600 +v -0.046800 1.513600 0.043500 +v -0.041300 1.513100 0.047100 +v -0.038537 1.513112 0.048362 +v -0.044195 1.513295 0.045391 +v -0.022901 1.515905 0.051410 +v -0.018571 1.517968 0.051684 +v -0.055257 1.438999 -0.050771 +v -0.010308 1.510155 0.071901 +v -0.010478 1.513105 0.069505 +v -0.015156 1.533977 0.054304 +v -0.013596 1.531070 0.053522 +v -0.006923 1.531511 0.060081 +v -0.019158 1.537755 0.056855 +v -0.015489 1.520401 0.051954 +v -0.013800 1.522800 0.052200 +v -0.013178 1.525174 0.052712 +v -0.012942 1.528094 0.053113 +v -0.030114 1.551419 0.058036 +v -0.026720 1.545266 0.059766 +v -0.058325 1.563672 0.026481 +v -0.061625 1.555341 0.023089 +v -0.051668 1.546035 0.044932 +v -0.054700 1.541900 0.041900 +v -0.056409 1.536994 0.039107 +v -0.063211 1.544202 0.020706 +v -0.064120 1.532200 0.020511 +v -0.064932 1.520906 0.021512 +v -0.049122 1.592694 0.017549 +v -0.048919 1.597614 0.010981 +v -0.048889 1.587381 0.024404 +v -0.039547 1.609385 0.002641 +v -0.039647 1.605670 0.010726 +v -0.048516 1.602029 0.004404 +v -0.040184 1.601257 0.017716 +v -0.040667 1.596353 0.024229 +v -0.040410 1.591110 0.031115 +v -0.039626 1.585244 0.037833 +v -0.037606 1.577249 0.044470 +v -0.046691 1.439449 -0.068449 +v -0.008002 1.500332 0.070494 +v -0.007296 1.500479 0.071202 +v -0.006915 1.499983 0.069926 +v -0.006208 1.500093 0.070605 +v -0.005973 1.498711 0.070176 +v -0.007189 1.491278 0.072688 +v -0.005704 1.493209 0.071394 +v -0.005327 1.496700 0.070331 +v -0.068207 1.472506 -0.017095 +v -0.051800 1.406300 -0.047300 +v -0.049500 1.409300 -0.059900 +v 0.051800 1.406300 -0.047300 +v 0.049500 1.409300 -0.059900 +v 0.019200 1.377100 0.002800 +v 0.029900 1.380600 -0.002700 +v 0.031191 1.371902 -0.003814 +v 0.019808 1.368797 0.001123 +v 0.056238 1.388815 -0.051486 +v 0.052715 1.397600 -0.048798 +v 0.050621 1.400619 -0.062005 +v 0.054435 1.392053 -0.065906 +v 0.044999 1.402416 -0.073412 +v 0.048271 1.393089 -0.078193 +v 0.038400 1.392900 -0.087400 +v 0.036279 1.403790 -0.082284 +v 0.025100 1.404800 -0.088700 +v 0.026404 1.392692 -0.093192 +v 0.040100 1.385100 -0.010900 +v 0.047900 1.389700 -0.021800 +v 0.050655 1.380682 -0.023015 +v 0.042189 1.376097 -0.011909 +v 0.051989 1.393992 -0.034900 +v 0.055002 1.384985 -0.036793 +v 0.013400 1.392500 -0.096200 +v 0.012800 1.405400 -0.092300 +v 0.021630 1.347706 -0.002280 +v 0.010000 1.345800 -0.001600 +v 0.009703 1.356796 0.000912 +v 0.020700 1.358599 -0.000789 +v 0.051215 1.344489 -0.016416 +v 0.064809 1.351419 -0.027220 +v 0.071413 1.342539 -0.024854 +v 0.056256 1.334601 -0.013505 +v 0.092658 1.345253 -0.035792 +v 0.101991 1.356549 -0.055946 +v 0.036452 1.340095 -0.007796 +v 0.039614 1.330165 -0.004026 +v 0.000000 1.380304 -0.103110 +v 0.000000 1.392398 -0.097112 +v 0.092167 1.361945 -0.058015 +v 0.091650 1.365890 -0.081008 +v 0.103647 1.360106 -0.079767 +v 0.051390 1.344988 -0.117606 +v 0.051696 1.338191 -0.120202 +v 0.066194 1.342704 -0.118123 +v 0.062220 1.349185 -0.114825 +v 0.071324 1.377988 -0.076126 +v 0.061101 1.377499 -0.090389 +v 0.068719 1.369941 -0.095525 +v 0.081299 1.371989 -0.079679 +v 0.035196 1.360991 -0.111297 +v 0.052500 1.365903 -0.105617 +v 0.047106 1.374613 -0.100311 +v 0.031703 1.371412 -0.106125 +v 0.082737 1.367023 -0.058545 +v 0.072497 1.372769 -0.057489 +v 0.075085 1.358733 -0.041692 +v 0.066988 1.366168 -0.041342 +v 0.059293 1.360571 -0.026713 +v 0.053878 1.384658 -0.084389 +v 0.042301 1.383191 -0.093893 +v 0.028800 1.381600 -0.099600 +v 0.014500 1.380600 -0.102300 +v 0.000000 1.332103 -0.118329 +v 0.000000 1.344407 -0.116723 +v 0.017506 1.357805 -0.112991 +v 0.015901 1.369391 -0.108565 +v 0.083352 1.352116 -0.039990 +v 0.060082 1.375357 -0.039221 +v 0.054595 1.370684 -0.024895 +v 0.062373 1.326943 -0.004014 +v 0.079695 1.333782 -0.017820 +v 0.043486 1.322351 0.005265 +v 0.026318 1.318980 0.010419 +v 0.018804 1.345613 -0.116011 +v 0.018497 1.333611 -0.117736 +v 0.036412 1.336188 -0.118918 +v 0.039911 1.348812 -0.116108 +v 0.047810 1.355088 -0.015297 +v 0.034741 1.350700 -0.006936 +v 0.022774 1.337339 -0.002166 +v 0.061403 1.384360 -0.070911 +v 0.062762 1.380358 -0.054854 +v 0.044957 1.365794 -0.013553 +v 0.033008 1.361602 -0.005389 +v 0.009400 1.367000 0.003600 +v 0.024391 1.327745 0.002406 +v 0.092841 1.355870 -0.099836 +v 0.080101 1.348808 -0.111908 +v 0.073289 1.352758 -0.110048 +v 0.000000 1.356994 -0.113868 +v 0.000000 1.356207 0.001169 +v 0.000000 1.345096 -0.002041 +v 0.000000 1.366405 0.004280 +v 0.077652 1.361117 -0.101163 +v 0.057989 1.356635 -0.110582 +v 0.000000 1.374300 0.006800 +v 0.000000 1.405600 -0.093400 +v 0.000000 1.333470 -0.004016 +v 0.000000 1.322036 -0.000042 +v 0.000000 1.313137 0.009546 +v 0.009198 1.375001 0.005887 +v 0.011601 1.323874 0.001497 +v 0.010712 1.334590 -0.002614 +v 0.012479 1.314794 0.010120 +v 0.000000 1.368900 -0.109400 +v -0.019200 1.377100 0.002800 +v -0.029900 1.380600 -0.002700 +v -0.031191 1.371902 -0.003814 +v -0.019808 1.368797 0.001123 +v -0.056238 1.388815 -0.051486 +v -0.052715 1.397600 -0.048798 +v -0.050621 1.400619 -0.062005 +v -0.054435 1.392053 -0.065906 +v -0.044999 1.402416 -0.073412 +v -0.048271 1.393089 -0.078193 +v -0.038400 1.392900 -0.087400 +v -0.036279 1.403790 -0.082284 +v -0.025100 1.404800 -0.088700 +v -0.026404 1.392692 -0.093192 +v -0.040100 1.385100 -0.010900 +v -0.047900 1.389700 -0.021800 +v -0.050655 1.380682 -0.023015 +v -0.042189 1.376097 -0.011909 +v -0.051989 1.393992 -0.034900 +v -0.055002 1.384985 -0.036793 +v -0.013400 1.392500 -0.096200 +v -0.012800 1.405400 -0.092300 +v -0.021630 1.347706 -0.002280 +v -0.010000 1.345800 -0.001600 +v -0.009703 1.356796 0.000912 +v -0.020700 1.358599 -0.000789 +v -0.051215 1.344489 -0.016416 +v -0.064809 1.351419 -0.027220 +v -0.071413 1.342539 -0.024854 +v -0.056256 1.334601 -0.013505 +v -0.092658 1.345253 -0.035792 +v -0.101991 1.356549 -0.055946 +v -0.036452 1.340095 -0.007796 +v -0.039614 1.330165 -0.004026 +v -0.092167 1.361945 -0.058015 +v -0.091650 1.365890 -0.081008 +v -0.103647 1.360106 -0.079767 +v -0.051390 1.344988 -0.117606 +v -0.051696 1.338191 -0.120202 +v -0.066194 1.342704 -0.118123 +v -0.062220 1.349185 -0.114825 +v -0.071324 1.377988 -0.076126 +v -0.061101 1.377499 -0.090389 +v -0.068719 1.369941 -0.095525 +v -0.081299 1.371989 -0.079679 +v -0.035196 1.360991 -0.111297 +v -0.052500 1.365903 -0.105617 +v -0.047106 1.374613 -0.100311 +v -0.031703 1.371412 -0.106125 +v -0.082737 1.367023 -0.058545 +v -0.072497 1.372769 -0.057489 +v -0.075085 1.358733 -0.041692 +v -0.066988 1.366168 -0.041342 +v -0.059293 1.360571 -0.026713 +v -0.053878 1.384658 -0.084389 +v -0.042301 1.383191 -0.093893 +v -0.028800 1.381600 -0.099600 +v -0.014500 1.380600 -0.102300 +v -0.017506 1.357805 -0.112991 +v -0.015901 1.369391 -0.108565 +v -0.083352 1.352116 -0.039990 +v -0.060082 1.375357 -0.039221 +v -0.054595 1.370684 -0.024895 +v -0.062373 1.326943 -0.004014 +v -0.079695 1.333782 -0.017820 +v -0.043486 1.322351 0.005265 +v -0.026318 1.318980 0.010419 +v -0.018804 1.345613 -0.116011 +v -0.018497 1.333611 -0.117736 +v -0.036412 1.336188 -0.118918 +v -0.039911 1.348812 -0.116108 +v -0.047810 1.355088 -0.015297 +v -0.034741 1.350700 -0.006936 +v -0.022774 1.337339 -0.002166 +v -0.061403 1.384360 -0.070911 +v -0.062762 1.380358 -0.054854 +v -0.044957 1.365794 -0.013553 +v -0.033008 1.361602 -0.005389 +v -0.009400 1.367000 0.003600 +v -0.024391 1.327745 0.002406 +v -0.092841 1.355870 -0.099836 +v -0.080101 1.348808 -0.111908 +v -0.073289 1.352758 -0.110048 +v -0.077652 1.361117 -0.101163 +v -0.057989 1.356635 -0.110582 +v -0.009198 1.375001 0.005887 +v -0.011601 1.323874 0.001497 +v -0.010712 1.334590 -0.002614 +v -0.012479 1.314794 0.010120 +v -0.007181 1.498708 0.069367 +v -0.065756 1.470880 0.002952 +v -0.064678 1.463496 -0.004242 +v -0.059783 1.453210 0.002188 +v -0.061561 1.461383 0.009926 +v -0.053965 1.443825 0.009394 +v -0.057441 1.453563 0.015359 +v -0.047918 1.436419 0.017837 +v -0.052352 1.446406 0.021891 +v -0.043190 1.432359 0.027284 +v -0.047905 1.441899 0.029774 +v -0.031556 1.428128 0.049181 +v -0.029812 1.423095 0.045409 +v -0.023660 1.420365 0.052657 +v -0.024436 1.424506 0.056749 +v -0.016606 1.418589 0.059010 +v -0.017378 1.422321 0.062977 +v -0.009001 1.417534 0.062700 +v -0.009151 1.420928 0.066849 +v 0.000000 1.417091 0.064366 +v 0.000000 1.420471 0.068613 +v 0.000000 1.429788 0.071272 +v -0.008705 1.429911 0.070470 +v -0.009043 1.425262 0.069435 +v 0.000000 1.424583 0.070677 +v -0.016107 1.430898 0.066910 +v -0.017100 1.426200 0.065400 +v -0.022581 1.433356 0.062133 +v -0.023967 1.428745 0.059766 +v -0.030372 1.437649 0.055874 +v -0.031736 1.432795 0.052581 +v -0.035914 1.443090 0.051692 +v -0.038348 1.440043 0.047005 +v -0.008052 1.414547 0.056606 +v 0.000000 1.414303 0.058898 +v -0.015000 1.415300 0.053100 +v -0.021489 1.416660 0.047689 +v -0.026700 1.418600 0.041100 +v -0.037876 1.429176 0.036787 +v -0.032082 1.421210 0.032893 +v -0.038453 1.425506 0.024641 +v -0.044093 1.429078 0.014379 +v -0.051272 1.436912 0.004435 +v -0.058145 1.447580 -0.004610 +v -0.063602 1.458794 -0.011346 +v -0.067842 1.470820 -0.008907 +v -0.015270 1.435111 0.067257 +v -0.008309 1.434379 0.070327 +v -0.021438 1.437110 0.063198 +v -0.028470 1.441606 0.058224 +v -0.033392 1.446468 0.055158 +v -0.037951 1.458101 0.054503 +v -0.036662 1.452045 0.054037 +v -0.036406 1.470117 0.057341 +v -0.037754 1.464227 0.055768 +v -0.043361 1.476565 0.051520 +v -0.045063 1.468107 0.048653 +v -0.041386 1.465667 0.052396 +v -0.039988 1.472930 0.054506 +v -0.046552 1.481759 0.048340 +v -0.048779 1.472120 0.044600 +v -0.007075 1.412271 0.050914 +v 0.000000 1.411855 0.052119 +v -0.013085 1.412852 0.047489 +v -0.018439 1.413640 0.042102 +v -0.022392 1.414312 0.035486 +v -0.026870 1.415220 0.027969 +v -0.032350 1.416735 0.019836 +v -0.039581 1.420103 0.010510 +v -0.048258 1.428267 -0.000383 +v -0.056639 1.442227 -0.010608 +v -0.062381 1.454404 -0.017696 +v -0.019224 1.407282 0.021407 +v -0.015527 1.408710 0.027842 +v -0.012281 1.409694 0.033437 +v -0.009411 1.409986 0.038105 +v -0.005395 1.408971 0.037381 +v 0.000000 1.408566 0.036837 +v -0.004839 1.406112 0.028904 +v 0.000000 1.406027 0.029136 +v -0.006997 1.402824 0.023825 +v 0.000000 1.402365 0.023807 +v -0.008716 1.399498 0.019583 +v 0.000000 1.397683 0.018995 +v -0.039863 1.450124 0.050121 +v -0.043157 1.449236 0.045330 +v -0.045252 1.458819 0.046255 +v -0.049202 1.461445 0.041161 +v -0.046825 1.450025 0.039223 +v -0.041526 1.457887 0.050684 +v -0.051791 1.453260 0.031960 +v -0.053856 1.465584 0.035623 +v -0.052811 1.477332 0.040542 +v -0.057864 1.470411 0.031084 +v -0.056300 1.458400 0.025400 +v -0.056809 1.481199 0.036406 +v -0.060191 1.484902 0.032594 +v -0.060914 1.474694 0.027211 +v -0.059936 1.463921 0.021284 +v -0.040912 1.437507 0.039969 +v -0.062508 1.469594 0.016502 +v -0.062707 1.488777 0.028637 +v -0.063200 1.479400 0.022700 +v -0.065244 1.494102 0.023621 +v -0.065986 1.485178 0.015507 +v -0.065555 1.477004 0.008784 +v -0.069692 1.478726 -0.004793 +v 0.000000 1.434286 0.071004 +v -0.066436 1.467824 -0.032302 +v -0.021200 1.476200 0.065000 +v -0.016400 1.478400 0.067700 +v -0.011548 1.479216 0.070929 +v -0.007644 1.479541 0.073277 +v -0.003728 1.479322 0.074674 +v 0.000000 1.385930 0.012858 +v 0.000000 1.545795 0.064295 +v 0.000000 1.392634 0.016701 +v 0.000000 1.447350 0.057204 +v 0.000000 1.624622 -0.011960 +v 0.000000 1.439029 0.070422 +v 0.000000 1.451933 0.059578 +v 0.000000 1.507470 0.083595 +v 0.000000 1.443887 0.069414 +v 0.000000 1.454946 0.074679 +v 0.000000 1.460438 0.072980 +v 0.000000 1.581135 0.055452 +v 0.000000 1.458137 0.074512 +v 0.000000 1.481587 0.074783 +v 0.000000 1.503893 0.087054 +v 0.000000 1.461914 0.071559 +v 0.000000 1.453858 -0.096645 +v 0.000000 1.451493 0.073105 +v 0.000000 1.457993 0.064088 +v 0.000000 1.461813 0.069558 +v 0.000000 1.466261 0.058890 +v 0.000000 1.478892 0.074531 +v 0.000000 1.523678 0.069736 +v 0.000000 1.489535 -0.119515 +v 0.000000 1.520960 0.072272 +v 0.000000 1.603650 0.037414 +v 0.000000 1.590954 0.049992 +v 0.000000 1.511043 0.080323 +v 0.000000 1.548918 -0.131703 +v 0.000000 1.564870 -0.128630 +v 0.000000 1.564427 0.060719 +v 0.000000 1.619611 -0.072800 +v 0.000000 1.500275 0.089409 +v 0.000000 1.486994 0.079952 +v 0.000000 1.618215 0.012229 +v 0.000000 1.516038 -0.129465 +v 0.000000 1.609100 -0.095443 +v 0.000000 1.463596 0.074251 +v 0.000000 1.621812 0.000643 +v 0.000000 1.462127 0.071343 +v 0.000000 1.490647 0.086919 +v 0.000000 1.501198 -0.125457 +v 0.000000 1.528930 -0.131276 +v 0.000000 1.472588 0.075172 +v 0.000000 1.476298 0.074308 +v 0.000000 1.615230 -0.084480 +v 0.000000 1.552251 0.063707 +v 0.000000 1.470467 0.076486 +v 0.000000 1.531870 0.063901 +v 0.000000 1.467967 0.077060 +v 0.000000 1.380109 0.009288 +v 0.000000 1.588347 -0.116312 +v 0.000000 1.465652 0.075795 +v 0.000000 1.463525 0.066420 +v 0.000000 1.514724 0.077389 +v 0.000000 1.464844 0.061917 +v 0.000000 1.571606 0.058534 +v 0.000000 1.484386 0.075902 +v 0.000000 1.485219 0.076416 +v 0.000000 1.529107 0.065284 +v 0.000000 1.597750 0.044018 +v 0.000000 1.534589 0.063461 +v 0.000000 1.483532 0.075433 +v 0.000000 1.579233 -0.122370 +v 0.000000 1.625022 -0.056413 +v 0.000000 1.626434 -0.043475 +v 0.000000 1.526279 0.067227 +v 0.000000 1.518187 0.074678 +v 0.000000 1.444587 -0.094873 +v 0.000000 1.493944 0.089210 +v 0.000000 1.496642 0.090010 +v 0.000000 1.613719 0.022204 +v 0.000000 1.480841 -0.114686 +v 0.000000 1.626736 -0.026845 +v 0.000000 1.608841 0.030309 +v 0.000000 1.464839 -0.103477 +v 0.000000 1.473938 -0.110708 +v 0.000000 1.489364 0.085409 +v 0.000000 1.537787 0.063642 +v 0.000000 1.487900 0.082456 +v 0.000000 1.601204 -0.105587 +v 0.000000 1.486122 0.077824 +v 0.007181 1.498708 0.069367 +v 0.065756 1.470880 0.002952 +v 0.064678 1.463496 -0.004242 +v 0.059783 1.453210 0.002188 +v 0.061561 1.461383 0.009926 +v 0.053965 1.443825 0.009394 +v 0.057441 1.453563 0.015359 +v 0.047918 1.436419 0.017837 +v 0.052352 1.446406 0.021891 +v -0.004500 1.490438 0.084613 +v -0.003472 1.488993 0.082224 +v 0.043190 1.432359 0.027284 +v 0.047905 1.441899 0.029774 +v -0.016500 1.524050 0.049200 +v -0.008669 1.487703 0.072770 +v -0.007120 1.508654 0.076839 +v -0.015400 1.524250 0.049950 +v 0.031556 1.428128 0.049181 +v 0.029812 1.423095 0.045409 +v 0.023660 1.420365 0.052657 +v 0.024436 1.424506 0.056749 +v -0.010950 1.487850 0.071400 +v 0.016606 1.418589 0.059010 +v 0.017378 1.422321 0.062977 +v 0.009001 1.417534 0.062700 +v 0.009151 1.420928 0.066849 +v -0.007135 1.515470 0.071771 +v -0.009449 1.523427 0.059362 +v 0.008705 1.429911 0.070470 +v 0.009043 1.425262 0.069435 +v -0.006967 1.512062 0.074392 +v -0.016213 1.490290 0.071850 +v -0.014561 1.487764 0.070635 +v -0.007012 1.495925 0.086718 +v -0.007912 1.498518 0.085593 +v 0.016107 1.430898 0.066910 +v -0.007950 1.492950 0.085200 +v -0.005075 1.491535 0.085592 +v 0.017100 1.426200 0.065400 +v -0.017849 1.523713 0.049317 +v -0.027909 1.524762 0.042535 +v -0.006251 1.494112 0.086673 +v -0.003263 1.492383 0.087243 +v -0.020870 1.524355 0.046670 +v -0.017300 1.523850 0.049250 +v -0.012558 1.494457 0.080516 +v -0.007666 1.522431 0.063361 +v 0.022581 1.433356 0.062133 +v 0.023967 1.428745 0.059766 +v 0.030372 1.437649 0.055874 +v 0.031736 1.432795 0.052581 +v -0.015100 1.492888 0.076170 +v 0.035914 1.443090 0.051692 +v 0.038348 1.440043 0.047005 +v 0.008052 1.414547 0.056606 +v -0.018624 1.523754 0.049075 +v -0.019148 1.523864 0.048784 +v -0.007841 1.505078 0.079909 +v 0.015000 1.415300 0.053100 +v 0.021489 1.416660 0.047689 +v 0.026700 1.418600 0.041100 +v 0.037876 1.429176 0.036787 +v -0.023224 1.524491 0.046688 +v -0.003145 1.487404 0.077579 +v -0.010700 1.493800 0.082850 +v -0.009226 1.493297 0.084222 +v -0.003762 1.486831 0.075684 +v 0.032082 1.421210 0.032893 +v 0.038453 1.425506 0.024641 +v -0.013400 1.524050 0.052550 +v 0.044093 1.429078 0.014379 +v -0.007290 1.520479 0.066899 +v -0.007075 1.518086 0.069666 +v 0.051272 1.436912 0.004435 +v 0.058145 1.447580 -0.004610 +v -0.003068 1.488157 0.079873 +v 0.063602 1.458794 -0.011346 +v 0.067842 1.470820 -0.008907 +v 0.015270 1.435111 0.067257 +v 0.008309 1.434379 0.070327 +v 0.021438 1.437110 0.063198 +v -0.008204 1.501659 0.083269 +v 0.028470 1.441606 0.058224 +v 0.033392 1.446468 0.055158 +v 0.037951 1.458101 0.054503 +v -0.006298 1.487370 0.074241 +v 0.036662 1.452045 0.054037 +v 0.036406 1.470117 0.057341 +v 0.037754 1.464227 0.055768 +v 0.043361 1.476565 0.051520 +v 0.045063 1.468107 0.048653 +v 0.041386 1.465667 0.052396 +v 0.000000 1.492114 0.088144 +v -0.005607 1.492667 0.086221 +v 0.039988 1.472930 0.054506 +v 0.046552 1.481759 0.048340 +v 0.048779 1.472120 0.044600 +v 0.007075 1.412271 0.050914 +v -0.029820 1.538011 0.053479 +v -0.029975 1.535460 0.051854 +v 0.013085 1.412852 0.047489 +v 0.018439 1.413640 0.042102 +v 0.022392 1.414312 0.035486 +v 0.026870 1.415220 0.027969 +v 0.032350 1.416735 0.019836 +v 0.039581 1.420103 0.010510 +v -0.022655 1.541291 0.058383 +v -0.004631 1.541666 0.063468 +v -0.009806 1.541712 0.062495 +v 0.048258 1.428267 -0.000383 +v -0.030165 1.531058 0.050149 +v -0.030253 1.529212 0.048923 +v -0.029840 1.533259 0.050285 +v -0.030360 1.529572 0.050362 +v -0.030372 1.530239 0.050612 +v 0.056639 1.442227 -0.010608 +v -0.029637 1.532503 0.049387 +v 0.062381 1.454404 -0.017696 +v -0.029693 1.532256 0.049246 +v 0.019224 1.407282 0.021407 +v -0.027671 1.540519 0.055546 +v 0.015527 1.408710 0.027842 +v 0.012281 1.409694 0.033437 +v -0.029842 1.531837 0.049551 +v -0.030415 1.528777 0.047185 +v 0.009411 1.409986 0.038105 +v -0.031235 1.526917 0.042713 +v 0.005395 1.408971 0.037381 +v 0.000000 1.541720 0.063941 +v -0.021294 1.551703 0.061045 +v -0.052438 1.549986 0.041694 +v 0.004839 1.406112 0.028904 +v -0.055550 1.544750 0.039000 +v 0.006997 1.402824 0.023825 +v -0.047669 1.510613 0.044309 +v -0.050350 1.511400 0.042050 +v -0.018586 1.545772 0.062254 +v 0.008716 1.399498 0.019583 +v 0.039863 1.450124 0.050121 +v 0.043157 1.449236 0.045330 +v 0.045252 1.458819 0.046255 +v 0.049202 1.461445 0.041161 +v -0.026633 1.570326 0.053151 +v -0.031183 1.612253 0.006230 +v -0.025033 1.563535 0.056327 +v -0.039899 1.555892 0.051097 +v 0.046825 1.450025 0.039223 +v -0.031322 1.608404 0.015054 +v 0.041526 1.457887 0.050684 +v 0.051791 1.453260 0.031960 +v 0.053856 1.465584 0.035623 +v 0.052811 1.477332 0.040542 +v 0.057864 1.470411 0.031084 +v -0.047573 1.554241 0.045738 +v 0.056300 1.458400 0.025400 +v -0.055410 1.515906 0.035556 +v -0.006407 1.557738 0.062139 +v -0.057000 1.521250 0.033100 +v 0.056809 1.481199 0.036406 +v -0.014452 1.557484 0.060981 +v -0.027655 1.619202 -0.054491 +v 0.060191 1.484902 0.032594 +v 0.060914 1.474694 0.027211 +v 0.059936 1.463921 0.021284 +v 0.040912 1.437507 0.039969 +v -0.013492 1.537749 0.059645 +v 0.062508 1.469594 0.016502 +v -0.038491 1.511269 0.049270 +v -0.041415 1.510904 0.048099 +v 0.062707 1.488777 0.028637 +v -0.031626 1.619606 -0.043643 +v -0.054250 1.513750 0.037600 +v -0.030425 1.587622 0.043038 +v -0.009981 1.531318 0.057230 +v -0.011297 1.534252 0.057544 +v -0.031087 1.593873 0.036455 +v 0.063200 1.479400 0.022700 +v 0.065244 1.494102 0.023621 +v 0.065986 1.485178 0.015507 +v -0.031324 1.599313 0.029619 +v 0.065555 1.477004 0.008784 +v 0.069692 1.478726 -0.004793 +v -0.031546 1.615607 -0.004075 +v -0.022319 1.514517 0.052251 +v 0.066436 1.467824 -0.032302 +v 0.021200 1.476200 0.065000 +v 0.016400 1.478400 0.067700 +v -0.057500 1.538800 0.036350 +v -0.058100 1.532550 0.034450 +v -0.032263 1.619648 -0.030163 +v -0.031406 1.512072 0.051113 +v -0.057750 1.526700 0.033300 +v -0.015698 1.620824 -0.062572 +v 0.011548 1.479216 0.070929 +v -0.052650 1.512350 0.039800 +v 0.007644 1.479541 0.073277 +v 0.003728 1.479322 0.074674 +v -0.032093 1.556898 0.055351 +v -0.035048 1.511533 0.050181 +v -0.009930 1.528293 0.056852 +v -0.017992 1.516836 0.052905 +v -0.014937 1.519669 0.053809 +v -0.031324 1.604084 0.022698 +v -0.012601 1.522311 0.054996 +v -0.010574 1.525230 0.056359 +v -0.026755 1.512927 0.051954 +v -0.028743 1.578935 0.049280 +v -0.031801 1.618118 -0.016442 +v -0.044633 1.510707 0.046380 +v 0.000000 1.622074 -0.065518 +v 0.000000 1.557883 0.062415 +v -0.011454 1.523753 0.055896 +v -0.015953 1.541608 0.060927 +v -0.023334 1.557278 0.058912 +v 0.000000 1.447907 0.071016 +v -0.026419 1.465952 0.063157 +v -0.027870 1.463869 0.061808 +v -0.027772 1.457800 0.060516 +v -0.019323 1.471062 0.068309 +v -0.023570 1.468321 0.065257 +v -0.018205 1.450607 0.066524 +v -0.028489 1.461876 0.060872 +v -0.012701 1.448969 0.069362 +v -0.028519 1.459779 0.060404 +v -0.006830 1.448110 0.070648 +v 0.000000 1.474497 0.074690 +v -0.009267 1.474752 0.073658 +v -0.004619 1.474962 0.074975 +v -0.014224 1.473445 0.071283 +v -0.026055 1.455533 0.061457 +v -0.023175 1.453006 0.063253 +v 0.004500 1.490438 0.084613 +v 0.003472 1.488993 0.082224 +v 0.016500 1.524050 0.049200 +v 0.008669 1.487703 0.072770 +v 0.007120 1.508654 0.076839 +v 0.015400 1.524250 0.049950 +v 0.010950 1.487850 0.071400 +v 0.007135 1.515470 0.071771 +v 0.009449 1.523427 0.059362 +v 0.006967 1.512062 0.074392 +v 0.016213 1.490290 0.071850 +v 0.014561 1.487764 0.070635 +v 0.007012 1.495925 0.086718 +v 0.007912 1.498518 0.085593 +v 0.007950 1.492950 0.085200 +v 0.005075 1.491535 0.085592 +v 0.017849 1.523713 0.049317 +v 0.027909 1.524762 0.042535 +v 0.006251 1.494112 0.086673 +v 0.003263 1.492383 0.087243 +v 0.020870 1.524355 0.046670 +v 0.017300 1.523850 0.049250 +v 0.012558 1.494457 0.080516 +v 0.007666 1.522431 0.063361 +v 0.015100 1.492888 0.076170 +v 0.018624 1.523754 0.049075 +v 0.019148 1.523864 0.048784 +v 0.007841 1.505078 0.079909 +v 0.023224 1.524491 0.046688 +v 0.003145 1.487404 0.077579 +v 0.010700 1.493800 0.082850 +v 0.009226 1.493297 0.084222 +v 0.003762 1.486831 0.075684 +v 0.013400 1.524050 0.052550 +v 0.007290 1.520479 0.066899 +v 0.007075 1.518086 0.069666 +v 0.003068 1.488157 0.079873 +v 0.008204 1.501659 0.083269 +v 0.006298 1.487370 0.074241 +v 0.005607 1.492667 0.086221 +v 0.029820 1.538011 0.053479 +v 0.029975 1.535460 0.051854 +v 0.022655 1.541291 0.058383 +v 0.004631 1.541666 0.063468 +v 0.009806 1.541712 0.062495 +v 0.030165 1.531058 0.050149 +v 0.030253 1.529212 0.048923 +v 0.029840 1.533259 0.050285 +v 0.030360 1.529572 0.050362 +v 0.030372 1.530239 0.050612 +v 0.029637 1.532503 0.049387 +v 0.029693 1.532256 0.049246 +v 0.027671 1.540519 0.055546 +v 0.029842 1.531837 0.049551 +v 0.030415 1.528777 0.047185 +v 0.031235 1.526917 0.042713 +v 0.021294 1.551703 0.061045 +v 0.052438 1.549986 0.041694 +v 0.055550 1.544750 0.039000 +v 0.047669 1.510613 0.044309 +v 0.050350 1.511400 0.042050 +v 0.018586 1.545772 0.062254 +v 0.026633 1.570326 0.053151 +v 0.031183 1.612253 0.006230 +v 0.025033 1.563535 0.056327 +v 0.039899 1.555892 0.051097 +v 0.031322 1.608404 0.015054 +v 0.047573 1.554241 0.045738 +v 0.055410 1.515906 0.035556 +v 0.006407 1.557738 0.062139 +v 0.057000 1.521250 0.033100 +v 0.014452 1.557484 0.060981 +v 0.027655 1.619202 -0.054491 +v 0.013492 1.537749 0.059645 +v 0.038491 1.511269 0.049270 +v 0.041415 1.510904 0.048099 +v 0.031626 1.619606 -0.043643 +v 0.054250 1.513750 0.037600 +v 0.030425 1.587622 0.043038 +v 0.009981 1.531318 0.057230 +v 0.011297 1.534252 0.057544 +v 0.031087 1.593873 0.036455 +v 0.031324 1.599313 0.029619 +v 0.031546 1.615607 -0.004075 +v 0.022319 1.514517 0.052251 +v 0.057500 1.538800 0.036350 +v 0.058100 1.532550 0.034450 +v 0.032263 1.619648 -0.030163 +v 0.031406 1.512072 0.051113 +v 0.057750 1.526700 0.033300 +v 0.015698 1.620824 -0.062572 +v 0.052650 1.512350 0.039800 +v 0.032093 1.556898 0.055351 +v 0.035048 1.511533 0.050181 +v 0.009930 1.528293 0.056852 +v 0.017992 1.516836 0.052905 +v 0.014937 1.519669 0.053809 +v 0.031324 1.604084 0.022698 +v 0.012601 1.522311 0.054996 +v 0.010574 1.525230 0.056359 +v 0.026755 1.512927 0.051954 +v 0.028743 1.578935 0.049280 +v 0.031801 1.618118 -0.016442 +v 0.044633 1.510707 0.046380 +v 0.011454 1.523753 0.055896 +v 0.015953 1.541608 0.060927 +v 0.023334 1.557278 0.058912 +v 0.026419 1.465952 0.063157 +v 0.027870 1.463869 0.061808 +v 0.027772 1.457800 0.060516 +v 0.019323 1.471062 0.068309 +v 0.023570 1.468321 0.065257 +v 0.018205 1.450607 0.066524 +v 0.028489 1.461876 0.060872 +v 0.012701 1.448969 0.069362 +v 0.028519 1.459779 0.060404 +v 0.006830 1.448110 0.070648 +v 0.009267 1.474752 0.073658 +v 0.004619 1.474962 0.074975 +v 0.014224 1.473445 0.071283 +v 0.026055 1.455533 0.061457 +v 0.023175 1.453006 0.063253 +v 0.031451 1.525118 0.037401 +v -0.031451 1.525118 0.037401 +v 0.029824 1.525118 0.050617 +v 0.028260 1.525118 0.050142 +v 0.026818 1.525118 0.049372 +v 0.025609 1.525118 0.048376 +v 0.029856 1.525435 0.050617 +v 0.028321 1.525741 0.050142 +v 0.026907 1.526022 0.049372 +v 0.025722 1.526258 0.048376 +v 0.029948 1.525741 0.050617 +v 0.028503 1.526339 0.050142 +v 0.027171 1.526891 0.049372 +v 0.026054 1.527353 0.048376 +v 0.030098 1.526022 0.050617 +v 0.028798 1.526891 0.050142 +v 0.027599 1.527692 0.049372 +v 0.026594 1.528363 0.048376 +v 0.030301 1.526268 0.050617 +v 0.029195 1.527374 0.050142 +v 0.028175 1.528394 0.049372 +v 0.027320 1.529249 0.048376 +v 0.030547 1.526471 0.050617 +v 0.029678 1.527771 0.050142 +v 0.028877 1.528970 0.049372 +v 0.028206 1.529975 0.048376 +v 0.030828 1.526621 0.050617 +v 0.030230 1.528066 0.050142 +v 0.029678 1.529398 0.049372 +v 0.029216 1.530515 0.048376 +v 0.031134 1.526714 0.050617 +v 0.030828 1.528248 0.050142 +v 0.030547 1.529662 0.049372 +v 0.030311 1.530847 0.048376 +v 0.031451 1.526745 0.050617 +v 0.031451 1.528309 0.050142 +v 0.031451 1.529751 0.049372 +v 0.031451 1.530960 0.048376 +v 0.031768 1.526714 0.050617 +v 0.032074 1.528248 0.050142 +v 0.032355 1.529662 0.049372 +v 0.032591 1.530847 0.048376 +v 0.032074 1.526621 0.050617 +v 0.032672 1.528066 0.050142 +v 0.033224 1.529398 0.049372 +v 0.033686 1.530515 0.048376 +v 0.032355 1.526471 0.050617 +v 0.033224 1.527771 0.050142 +v 0.034025 1.528970 0.049372 +v 0.034696 1.529975 0.048376 +v 0.032601 1.526268 0.050617 +v 0.033707 1.527374 0.050142 +v 0.034727 1.528394 0.049372 +v 0.035582 1.529249 0.048376 +v 0.032804 1.526022 0.050617 +v 0.034104 1.526891 0.050142 +v 0.035303 1.527692 0.049372 +v 0.036308 1.528363 0.048376 +v 0.032954 1.525741 0.050617 +v 0.034399 1.526339 0.050142 +v 0.035731 1.526891 0.049372 +v 0.036848 1.527353 0.048376 +v 0.033047 1.525435 0.050617 +v 0.034581 1.525741 0.050142 +v 0.035995 1.526022 0.049372 +v 0.037180 1.526258 0.048376 +v 0.033078 1.525118 0.050617 +v 0.034642 1.525118 0.050142 +v 0.036084 1.525118 0.049372 +v 0.037293 1.525118 0.048376 +v 0.033047 1.524801 0.050617 +v 0.034581 1.524495 0.050142 +v 0.035995 1.524214 0.049372 +v 0.037180 1.523978 0.048376 +v 0.032954 1.524495 0.050617 +v 0.034399 1.523897 0.050142 +v 0.035731 1.523345 0.049372 +v 0.036848 1.522883 0.048376 +v 0.032804 1.524214 0.050617 +v 0.034104 1.523345 0.050142 +v 0.035303 1.522544 0.049372 +v 0.036308 1.521873 0.048376 +v 0.032601 1.523968 0.050617 +v 0.033707 1.522862 0.050142 +v 0.034727 1.521842 0.049372 +v 0.035582 1.520987 0.048376 +v 0.032355 1.523765 0.050617 +v 0.033224 1.522465 0.050142 +v 0.034025 1.521266 0.049372 +v 0.034696 1.520261 0.048376 +v 0.032074 1.523615 0.050617 +v 0.032672 1.522170 0.050142 +v 0.033224 1.520838 0.049372 +v 0.033686 1.519721 0.048376 +v 0.031768 1.523522 0.050617 +v 0.032074 1.521988 0.050142 +v 0.032355 1.520574 0.049372 +v 0.032591 1.519389 0.048376 +v 0.031451 1.523491 0.050617 +v 0.031451 1.521927 0.050142 +v 0.031451 1.520485 0.049372 +v 0.031451 1.519276 0.048376 +v 0.031134 1.523522 0.050617 +v 0.030828 1.521988 0.050142 +v 0.030547 1.520574 0.049372 +v 0.030311 1.519389 0.048376 +v 0.030828 1.523615 0.050617 +v 0.030230 1.522170 0.050142 +v 0.029678 1.520838 0.049372 +v 0.029216 1.519721 0.048376 +v 0.030547 1.523765 0.050617 +v 0.029678 1.522465 0.050142 +v 0.028877 1.521266 0.049372 +v 0.028206 1.520261 0.048376 +v 0.030301 1.523968 0.050617 +v 0.029195 1.522862 0.050142 +v 0.028175 1.521842 0.049372 +v 0.027320 1.520987 0.048376 +v 0.030098 1.524214 0.050617 +v 0.028798 1.523345 0.050142 +v 0.027599 1.522544 0.049372 +v 0.026594 1.521873 0.048376 +v 0.031451 1.525118 0.050777 +v 0.029948 1.524495 0.050617 +v 0.028503 1.523897 0.050142 +v 0.027171 1.523345 0.049372 +v 0.026054 1.522883 0.048376 +v 0.029856 1.524801 0.050617 +v 0.028321 1.524495 0.050142 +v 0.026907 1.524214 0.049372 +v 0.025722 1.523978 0.048376 +v 0.024430 1.525118 0.047521 +v 0.022607 1.525118 0.046071 +v 0.021051 1.525118 0.044176 +v 0.019895 1.525118 0.042014 +v 0.019184 1.525118 0.039667 +v 0.018943 1.525118 0.037227 +v 0.019184 1.525118 0.034787 +v 0.019895 1.525118 0.032441 +v 0.021051 1.525118 0.030278 +v 0.022607 1.525118 0.028383 +v 0.024502 1.525118 0.026827 +v 0.026665 1.525118 0.025671 +v 0.029011 1.525118 0.024960 +v 0.024565 1.526488 0.047521 +v 0.022777 1.526843 0.046071 +v 0.021251 1.527147 0.044176 +v 0.020117 1.527372 0.042014 +v 0.019419 1.527511 0.039667 +v 0.019184 1.527558 0.037227 +v 0.019419 1.527511 0.034787 +v 0.020117 1.527372 0.032441 +v 0.021251 1.527147 0.030278 +v 0.022777 1.526843 0.028383 +v 0.024636 1.526474 0.026827 +v 0.026756 1.526052 0.025671 +v 0.029058 1.525594 0.024960 +v 0.024965 1.527805 0.047521 +v 0.023280 1.528503 0.046071 +v 0.021843 1.529098 0.044176 +v 0.020775 1.529540 0.042013 +v 0.020117 1.529813 0.039667 +v 0.019895 1.529905 0.037227 +v 0.020117 1.529813 0.034787 +v 0.020775 1.529540 0.032441 +v 0.021843 1.529098 0.030278 +v 0.023280 1.528503 0.028383 +v 0.025031 1.527777 0.026827 +v 0.027029 1.526950 0.025671 +v 0.029197 1.526052 0.024960 +v 0.025613 1.529019 0.047521 +v 0.024097 1.530032 0.046071 +v 0.022804 1.530896 0.044176 +v 0.021843 1.531538 0.042013 +v 0.021251 1.531933 0.039667 +v 0.021051 1.532067 0.037227 +v 0.021251 1.531933 0.034787 +v 0.021843 1.531538 0.032441 +v 0.022804 1.530896 0.030278 +v 0.024097 1.530032 0.028383 +v 0.025673 1.528979 0.026827 +v 0.027471 1.527777 0.025671 +v 0.029422 1.526474 0.024960 +v 0.026486 1.530083 0.047521 +v 0.025197 1.531372 0.046071 +v 0.024097 1.532472 0.044176 +v 0.023280 1.533289 0.042013 +v 0.022777 1.533792 0.039667 +v 0.022607 1.533962 0.037227 +v 0.022777 1.533792 0.034787 +v 0.023280 1.533289 0.032441 +v 0.024097 1.532472 0.030278 +v 0.025197 1.531372 0.028383 +v 0.026537 1.530032 0.026827 +v 0.028066 1.528503 0.025671 +v 0.029726 1.526843 0.024960 +v 0.027550 1.530956 0.047521 +v 0.026537 1.532472 0.046071 +v 0.025673 1.533765 0.044176 +v 0.025031 1.534726 0.042013 +v 0.024636 1.535318 0.039667 +v 0.024502 1.535518 0.037227 +v 0.024636 1.535318 0.034787 +v 0.025031 1.534726 0.032441 +v 0.025673 1.533765 0.030278 +v 0.026537 1.532472 0.028383 +v 0.027590 1.530896 0.026827 +v 0.028792 1.529098 0.025671 +v 0.030095 1.527147 0.024960 +v 0.028764 1.531604 0.047521 +v 0.028066 1.533289 0.046071 +v 0.027471 1.534726 0.044176 +v 0.027029 1.535794 0.042013 +v 0.026756 1.536452 0.039667 +v 0.026665 1.536674 0.037227 +v 0.026756 1.536452 0.034787 +v 0.027029 1.535794 0.032441 +v 0.027471 1.534726 0.030278 +v 0.028066 1.533289 0.028383 +v 0.028792 1.531538 0.026827 +v 0.029619 1.529540 0.025671 +v 0.030517 1.527372 0.024960 +v 0.030081 1.532004 0.047521 +v 0.029726 1.533792 0.046071 +v 0.029422 1.535318 0.044176 +v 0.029197 1.536452 0.042013 +v 0.029058 1.537150 0.039667 +v 0.029011 1.537385 0.037227 +v 0.029058 1.537150 0.034787 +v 0.029197 1.536452 0.032441 +v 0.029422 1.535318 0.030278 +v 0.029726 1.533792 0.028383 +v 0.030095 1.531933 0.026827 +v 0.030517 1.529813 0.025671 +v 0.030975 1.527511 0.024960 +v 0.031451 1.532139 0.047521 +v 0.031451 1.533962 0.046071 +v 0.031451 1.535518 0.044176 +v 0.031451 1.536674 0.042013 +v 0.031451 1.537385 0.039667 +v 0.031451 1.537626 0.037227 +v 0.031451 1.537385 0.034787 +v 0.031451 1.536674 0.032441 +v 0.031451 1.535518 0.030278 +v 0.031451 1.533962 0.028383 +v 0.031451 1.532067 0.026827 +v 0.031451 1.529905 0.025671 +v 0.031451 1.527558 0.024960 +v 0.032821 1.532004 0.047521 +v 0.033176 1.533792 0.046071 +v 0.033480 1.535318 0.044176 +v 0.033705 1.536452 0.042013 +v 0.033844 1.537150 0.039667 +v 0.033891 1.537385 0.037227 +v 0.033844 1.537150 0.034787 +v 0.033705 1.536452 0.032441 +v 0.033480 1.535318 0.030278 +v 0.033176 1.533792 0.028383 +v 0.032807 1.531933 0.026827 +v 0.032385 1.529813 0.025671 +v 0.031927 1.527511 0.024960 +v 0.034138 1.531604 0.047521 +v 0.034836 1.533289 0.046071 +v 0.035431 1.534726 0.044176 +v 0.035873 1.535794 0.042013 +v 0.036146 1.536452 0.039667 +v 0.036238 1.536674 0.037227 +v 0.036146 1.536452 0.034787 +v 0.035873 1.535794 0.032441 +v 0.035431 1.534726 0.030278 +v 0.034836 1.533289 0.028383 +v 0.034110 1.531538 0.026827 +v 0.033283 1.529540 0.025671 +v 0.032385 1.527372 0.024960 +v 0.035352 1.530956 0.047521 +v 0.036365 1.532472 0.046071 +v 0.037229 1.533765 0.044176 +v 0.037871 1.534726 0.042013 +v 0.038266 1.535318 0.039667 +v 0.038400 1.535518 0.037227 +v 0.038266 1.535318 0.034787 +v 0.037871 1.534726 0.032441 +v 0.037229 1.533765 0.030278 +v 0.036365 1.532472 0.028383 +v 0.035312 1.530896 0.026827 +v 0.034110 1.529098 0.025671 +v 0.032807 1.527147 0.024960 +v 0.036416 1.530083 0.047521 +v 0.037705 1.531372 0.046071 +v 0.038805 1.532472 0.044176 +v 0.039622 1.533289 0.042013 +v 0.040125 1.533792 0.039667 +v 0.040295 1.533962 0.037227 +v 0.040125 1.533792 0.034787 +v 0.039622 1.533289 0.032441 +v 0.038805 1.532472 0.030278 +v 0.037705 1.531372 0.028383 +v 0.036365 1.530032 0.026827 +v 0.034836 1.528503 0.025671 +v 0.033176 1.526843 0.024960 +v 0.037289 1.529019 0.047521 +v 0.038805 1.530032 0.046071 +v 0.040098 1.530896 0.044176 +v 0.041059 1.531538 0.042013 +v 0.041651 1.531933 0.039667 +v 0.041851 1.532067 0.037227 +v 0.041651 1.531933 0.034787 +v 0.041059 1.531538 0.032441 +v 0.040098 1.530896 0.030278 +v 0.038805 1.530032 0.028383 +v 0.037229 1.528979 0.026827 +v 0.035431 1.527777 0.025671 +v 0.033480 1.526474 0.024960 +v 0.037937 1.527805 0.047521 +v 0.039622 1.528503 0.046071 +v 0.041059 1.529098 0.044176 +v 0.042127 1.529540 0.042013 +v 0.042785 1.529813 0.039667 +v 0.043007 1.529904 0.037227 +v 0.042785 1.529813 0.034787 +v 0.042127 1.529540 0.032441 +v 0.041059 1.529098 0.030278 +v 0.039622 1.528503 0.028383 +v 0.037871 1.527777 0.026827 +v 0.035873 1.526950 0.025671 +v 0.033705 1.526052 0.024960 +v 0.038337 1.526488 0.047521 +v 0.040125 1.526843 0.046071 +v 0.041651 1.527147 0.044176 +v 0.042785 1.527372 0.042014 +v 0.043483 1.527511 0.039667 +v 0.043718 1.527558 0.037227 +v 0.043483 1.527511 0.034787 +v 0.042785 1.527372 0.032441 +v 0.041651 1.527147 0.030278 +v 0.040125 1.526843 0.028383 +v 0.038266 1.526474 0.026827 +v 0.036146 1.526052 0.025671 +v 0.033844 1.525594 0.024960 +v 0.038472 1.525118 0.047521 +v 0.040295 1.525118 0.046071 +v 0.041851 1.525118 0.044176 +v 0.043007 1.525118 0.042014 +v 0.043718 1.525118 0.039667 +v 0.043959 1.525118 0.037227 +v 0.043718 1.525118 0.034787 +v 0.043007 1.525118 0.032441 +v 0.041851 1.525118 0.030278 +v 0.040295 1.525118 0.028383 +v 0.038400 1.525118 0.026827 +v 0.036238 1.525118 0.025671 +v 0.033891 1.525118 0.024960 +v 0.038337 1.523748 0.047521 +v 0.040125 1.523393 0.046071 +v 0.041651 1.523089 0.044176 +v 0.042785 1.522864 0.042014 +v 0.043483 1.522725 0.039667 +v 0.043718 1.522678 0.037227 +v 0.043483 1.522725 0.034787 +v 0.042785 1.522864 0.032441 +v 0.041651 1.523089 0.030278 +v 0.040125 1.523393 0.028383 +v 0.038266 1.523762 0.026827 +v 0.036146 1.524184 0.025671 +v 0.033844 1.524642 0.024960 +v 0.037937 1.522431 0.047521 +v 0.039622 1.521733 0.046071 +v 0.041059 1.521138 0.044176 +v 0.042127 1.520696 0.042014 +v 0.042785 1.520423 0.039667 +v 0.043007 1.520331 0.037227 +v 0.042785 1.520423 0.034787 +v 0.042127 1.520696 0.032441 +v 0.041059 1.521138 0.030278 +v 0.039622 1.521733 0.028383 +v 0.037871 1.522459 0.026827 +v 0.035873 1.523286 0.025671 +v 0.033705 1.524184 0.024960 +v 0.037289 1.521217 0.047521 +v 0.038805 1.520204 0.046071 +v 0.040098 1.519340 0.044176 +v 0.041059 1.518698 0.042014 +v 0.041651 1.518303 0.039667 +v 0.041851 1.518169 0.037227 +v 0.041651 1.518303 0.034787 +v 0.041059 1.518698 0.032441 +v 0.040098 1.519340 0.030278 +v 0.038805 1.520204 0.028383 +v 0.037229 1.521257 0.026827 +v 0.035431 1.522459 0.025671 +v 0.033480 1.523762 0.024960 +v 0.036416 1.520153 0.047521 +v 0.037705 1.518864 0.046071 +v 0.038805 1.517764 0.044176 +v 0.039622 1.516947 0.042014 +v 0.040125 1.516444 0.039667 +v 0.040295 1.516274 0.037227 +v 0.040125 1.516444 0.034787 +v 0.039622 1.516947 0.032441 +v 0.038805 1.517764 0.030278 +v 0.037705 1.518864 0.028383 +v 0.036365 1.520204 0.026827 +v 0.034836 1.521733 0.025671 +v 0.033176 1.523393 0.024960 +v 0.035352 1.519280 0.047521 +v 0.036365 1.517764 0.046071 +v 0.037229 1.516471 0.044176 +v 0.037871 1.515510 0.042014 +v 0.038266 1.514918 0.039667 +v 0.038400 1.514718 0.037227 +v 0.038266 1.514918 0.034787 +v 0.037871 1.515510 0.032441 +v 0.037229 1.516471 0.030278 +v 0.036365 1.517764 0.028383 +v 0.035312 1.519340 0.026827 +v 0.034110 1.521138 0.025671 +v 0.032807 1.523089 0.024960 +v 0.031451 1.525118 0.024719 +v 0.034138 1.518632 0.047521 +v 0.034836 1.516947 0.046071 +v 0.035431 1.515510 0.044176 +v 0.035873 1.514442 0.042014 +v 0.036146 1.513784 0.039667 +v 0.036238 1.513562 0.037227 +v 0.036146 1.513784 0.034787 +v 0.035873 1.514442 0.032441 +v 0.035431 1.515510 0.030278 +v 0.034836 1.516947 0.028383 +v 0.034110 1.518698 0.026827 +v 0.033283 1.520696 0.025671 +v 0.032385 1.522864 0.024960 +v 0.032821 1.518232 0.047521 +v 0.033176 1.516444 0.046071 +v 0.033480 1.514918 0.044176 +v 0.033705 1.513784 0.042014 +v 0.033844 1.513086 0.039667 +v 0.033891 1.512851 0.037227 +v 0.033844 1.513086 0.034787 +v 0.033705 1.513784 0.032441 +v 0.033480 1.514918 0.030278 +v 0.033176 1.516444 0.028383 +v 0.032807 1.518303 0.026827 +v 0.032385 1.520423 0.025671 +v 0.031927 1.522725 0.024960 +v 0.031451 1.518097 0.047521 +v 0.031451 1.516274 0.046071 +v 0.031451 1.514718 0.044176 +v 0.031451 1.513562 0.042014 +v 0.031451 1.512851 0.039667 +v 0.031451 1.512610 0.037227 +v 0.031451 1.512851 0.034787 +v 0.031451 1.513562 0.032441 +v 0.031451 1.514718 0.030278 +v 0.031451 1.516274 0.028383 +v 0.031451 1.518169 0.026827 +v 0.031451 1.520332 0.025671 +v 0.031451 1.522678 0.024960 +v 0.030081 1.518232 0.047521 +v 0.029726 1.516444 0.046071 +v 0.029422 1.514918 0.044176 +v 0.029197 1.513784 0.042014 +v 0.029058 1.513086 0.039667 +v 0.029011 1.512851 0.037227 +v 0.029058 1.513086 0.034787 +v 0.029197 1.513784 0.032441 +v 0.029422 1.514918 0.030278 +v 0.029726 1.516444 0.028383 +v 0.030095 1.518303 0.026827 +v 0.030517 1.520423 0.025671 +v 0.030975 1.522725 0.024960 +v 0.028764 1.518632 0.047521 +v 0.028066 1.516947 0.046071 +v 0.027471 1.515510 0.044176 +v 0.027029 1.514442 0.042014 +v 0.026756 1.513784 0.039667 +v 0.026665 1.513562 0.037227 +v 0.026756 1.513784 0.034787 +v 0.027029 1.514442 0.032441 +v 0.027471 1.515510 0.030278 +v 0.028066 1.516947 0.028383 +v 0.028792 1.518698 0.026827 +v 0.029619 1.520696 0.025671 +v 0.030517 1.522864 0.024960 +v 0.027550 1.519280 0.047521 +v 0.026537 1.517764 0.046071 +v 0.025673 1.516471 0.044176 +v 0.025031 1.515510 0.042014 +v 0.024636 1.514918 0.039667 +v 0.024502 1.514718 0.037227 +v 0.024636 1.514918 0.034787 +v 0.025031 1.515510 0.032441 +v 0.025673 1.516471 0.030278 +v 0.026537 1.517764 0.028383 +v 0.027590 1.519340 0.026827 +v 0.028792 1.521138 0.025671 +v 0.030095 1.523089 0.024960 +v 0.026486 1.520153 0.047521 +v 0.025197 1.518864 0.046071 +v 0.024097 1.517764 0.044176 +v 0.023280 1.516947 0.042014 +v 0.022777 1.516444 0.039667 +v 0.022607 1.516274 0.037227 +v 0.022777 1.516444 0.034787 +v 0.023280 1.516947 0.032441 +v 0.024097 1.517764 0.030278 +v 0.025197 1.518864 0.028383 +v 0.026537 1.520204 0.026827 +v 0.028066 1.521733 0.025671 +v 0.029726 1.523393 0.024960 +v 0.025613 1.521217 0.047521 +v 0.024097 1.520204 0.046071 +v 0.022804 1.519340 0.044176 +v 0.021843 1.518698 0.042014 +v 0.021251 1.518303 0.039667 +v 0.021051 1.518169 0.037227 +v 0.021251 1.518303 0.034787 +v 0.021843 1.518698 0.032441 +v 0.022804 1.519340 0.030278 +v 0.024097 1.520204 0.028383 +v 0.025673 1.521257 0.026827 +v 0.027471 1.522459 0.025671 +v 0.029422 1.523762 0.024960 +v 0.024965 1.522431 0.047521 +v 0.023280 1.521733 0.046071 +v 0.021843 1.521138 0.044176 +v 0.020775 1.520696 0.042014 +v 0.020117 1.520423 0.039667 +v 0.019895 1.520332 0.037227 +v 0.020117 1.520423 0.034787 +v 0.020775 1.520696 0.032441 +v 0.021843 1.521138 0.030278 +v 0.023280 1.521733 0.028383 +v 0.025031 1.522459 0.026827 +v 0.027029 1.523286 0.025671 +v 0.029197 1.524184 0.024960 +v 0.024565 1.523748 0.047521 +v 0.022777 1.523393 0.046071 +v 0.021251 1.523089 0.044176 +v 0.020117 1.522864 0.042014 +v 0.019419 1.522725 0.039667 +v 0.019184 1.522678 0.037227 +v 0.019419 1.522725 0.034787 +v 0.020117 1.522864 0.032441 +v 0.021251 1.523089 0.030278 +v 0.022777 1.523393 0.028383 +v 0.024636 1.523762 0.026827 +v 0.026756 1.524184 0.025671 +v 0.029058 1.524642 0.024960 +v -0.033078 1.525118 0.050617 +v -0.034642 1.525118 0.050142 +v -0.036084 1.525118 0.049372 +v -0.037293 1.525118 0.048376 +v -0.033047 1.525435 0.050617 +v -0.034581 1.525741 0.050142 +v -0.035995 1.526022 0.049372 +v -0.037180 1.526258 0.048376 +v -0.032954 1.525741 0.050617 +v -0.034399 1.526339 0.050142 +v -0.035731 1.526891 0.049372 +v -0.036848 1.527353 0.048376 +v -0.032804 1.526022 0.050617 +v -0.034104 1.526891 0.050142 +v -0.035303 1.527692 0.049372 +v -0.036308 1.528363 0.048376 +v -0.032601 1.526268 0.050617 +v -0.033707 1.527374 0.050142 +v -0.034727 1.528394 0.049372 +v -0.035582 1.529249 0.048376 +v -0.032355 1.526471 0.050617 +v -0.033224 1.527771 0.050142 +v -0.034025 1.528970 0.049372 +v -0.034696 1.529975 0.048376 +v -0.032074 1.526621 0.050617 +v -0.032672 1.528066 0.050142 +v -0.033224 1.529398 0.049372 +v -0.033686 1.530515 0.048376 +v -0.031768 1.526714 0.050617 +v -0.032074 1.528248 0.050142 +v -0.032355 1.529662 0.049372 +v -0.032591 1.530847 0.048376 +v -0.031451 1.526745 0.050617 +v -0.031451 1.528309 0.050142 +v -0.031451 1.529751 0.049372 +v -0.031451 1.530960 0.048376 +v -0.031134 1.526714 0.050617 +v -0.030828 1.528248 0.050142 +v -0.030547 1.529662 0.049372 +v -0.030311 1.530847 0.048376 +v -0.030828 1.526621 0.050617 +v -0.030230 1.528066 0.050142 +v -0.029678 1.529398 0.049372 +v -0.029216 1.530515 0.048376 +v -0.030547 1.526471 0.050617 +v -0.029678 1.527771 0.050142 +v -0.028877 1.528970 0.049372 +v -0.028206 1.529975 0.048376 +v -0.030301 1.526268 0.050617 +v -0.029195 1.527374 0.050142 +v -0.028175 1.528394 0.049372 +v -0.027320 1.529249 0.048376 +v -0.030098 1.526022 0.050617 +v -0.028798 1.526891 0.050142 +v -0.027599 1.527692 0.049372 +v -0.026594 1.528363 0.048376 +v -0.029948 1.525741 0.050617 +v -0.028503 1.526339 0.050142 +v -0.027171 1.526891 0.049372 +v -0.026054 1.527353 0.048376 +v -0.029856 1.525435 0.050617 +v -0.028321 1.525741 0.050142 +v -0.026907 1.526022 0.049372 +v -0.025722 1.526258 0.048376 +v -0.029824 1.525118 0.050617 +v -0.028260 1.525118 0.050142 +v -0.026818 1.525118 0.049372 +v -0.025609 1.525118 0.048376 +v -0.029856 1.524801 0.050617 +v -0.028321 1.524495 0.050142 +v -0.026907 1.524214 0.049372 +v -0.025722 1.523978 0.048376 +v -0.029948 1.524495 0.050617 +v -0.028503 1.523897 0.050142 +v -0.027171 1.523345 0.049372 +v -0.026054 1.522883 0.048376 +v -0.030098 1.524214 0.050617 +v -0.028798 1.523345 0.050142 +v -0.027599 1.522544 0.049372 +v -0.026594 1.521873 0.048376 +v -0.030301 1.523968 0.050617 +v -0.029195 1.522862 0.050142 +v -0.028175 1.521842 0.049372 +v -0.027320 1.520987 0.048376 +v -0.030547 1.523765 0.050617 +v -0.029678 1.522465 0.050142 +v -0.028877 1.521266 0.049372 +v -0.028206 1.520261 0.048376 +v -0.030828 1.523615 0.050617 +v -0.030230 1.522170 0.050142 +v -0.029678 1.520838 0.049372 +v -0.029216 1.519721 0.048376 +v -0.031134 1.523522 0.050617 +v -0.030828 1.521988 0.050142 +v -0.030547 1.520574 0.049372 +v -0.030311 1.519389 0.048376 +v -0.031451 1.523491 0.050617 +v -0.031451 1.521927 0.050142 +v -0.031451 1.520485 0.049372 +v -0.031451 1.519276 0.048376 +v -0.031768 1.523522 0.050617 +v -0.032074 1.521988 0.050142 +v -0.032355 1.520574 0.049372 +v -0.032591 1.519389 0.048376 +v -0.032074 1.523615 0.050617 +v -0.032672 1.522170 0.050142 +v -0.033224 1.520838 0.049372 +v -0.033686 1.519721 0.048376 +v -0.032355 1.523765 0.050617 +v -0.033224 1.522465 0.050142 +v -0.034025 1.521266 0.049372 +v -0.034696 1.520261 0.048376 +v -0.032601 1.523968 0.050617 +v -0.033707 1.522862 0.050142 +v -0.034727 1.521842 0.049372 +v -0.035582 1.520987 0.048376 +v -0.032804 1.524214 0.050617 +v -0.034104 1.523345 0.050142 +v -0.035303 1.522544 0.049372 +v -0.036308 1.521873 0.048376 +v -0.031451 1.525118 0.050777 +v -0.032954 1.524495 0.050617 +v -0.034399 1.523897 0.050142 +v -0.035731 1.523345 0.049372 +v -0.036848 1.522883 0.048376 +v -0.033047 1.524801 0.050617 +v -0.034581 1.524495 0.050142 +v -0.035995 1.524214 0.049372 +v -0.037180 1.523978 0.048376 +v -0.038472 1.525118 0.047521 +v -0.040295 1.525118 0.046071 +v -0.041851 1.525118 0.044176 +v -0.043007 1.525118 0.042014 +v -0.043718 1.525118 0.039667 +v -0.043959 1.525118 0.037227 +v -0.043718 1.525118 0.034787 +v -0.043007 1.525118 0.032441 +v -0.041851 1.525118 0.030278 +v -0.040295 1.525118 0.028383 +v -0.038400 1.525118 0.026827 +v -0.036238 1.525118 0.025671 +v -0.033891 1.525118 0.024960 +v -0.038337 1.526488 0.047521 +v -0.040125 1.526843 0.046071 +v -0.041651 1.527147 0.044176 +v -0.042785 1.527372 0.042014 +v -0.043483 1.527511 0.039667 +v -0.043718 1.527558 0.037227 +v -0.043483 1.527511 0.034787 +v -0.042785 1.527372 0.032441 +v -0.041651 1.527147 0.030278 +v -0.040125 1.526843 0.028383 +v -0.038266 1.526474 0.026827 +v -0.036146 1.526052 0.025671 +v -0.033844 1.525594 0.024960 +v -0.037937 1.527805 0.047521 +v -0.039622 1.528503 0.046071 +v -0.041059 1.529098 0.044176 +v -0.042127 1.529540 0.042013 +v -0.042785 1.529813 0.039667 +v -0.043007 1.529904 0.037227 +v -0.042785 1.529813 0.034787 +v -0.042127 1.529540 0.032441 +v -0.041059 1.529098 0.030278 +v -0.039622 1.528503 0.028383 +v -0.037871 1.527777 0.026827 +v -0.035873 1.526950 0.025671 +v -0.033705 1.526052 0.024960 +v -0.037289 1.529019 0.047521 +v -0.038805 1.530032 0.046071 +v -0.040098 1.530896 0.044176 +v -0.041059 1.531538 0.042013 +v -0.041651 1.531933 0.039667 +v -0.041851 1.532067 0.037227 +v -0.041651 1.531933 0.034787 +v -0.041059 1.531538 0.032441 +v -0.040098 1.530896 0.030278 +v -0.038805 1.530032 0.028383 +v -0.037229 1.528979 0.026827 +v -0.035431 1.527777 0.025671 +v -0.033480 1.526474 0.024960 +v -0.036416 1.530083 0.047521 +v -0.037705 1.531372 0.046071 +v -0.038805 1.532472 0.044176 +v -0.039622 1.533289 0.042013 +v -0.040125 1.533792 0.039667 +v -0.040295 1.533962 0.037227 +v -0.040125 1.533792 0.034787 +v -0.039622 1.533289 0.032441 +v -0.038805 1.532472 0.030278 +v -0.037705 1.531372 0.028383 +v -0.036365 1.530032 0.026827 +v -0.034836 1.528503 0.025671 +v -0.033176 1.526843 0.024960 +v -0.035352 1.530956 0.047521 +v -0.036365 1.532472 0.046071 +v -0.037229 1.533765 0.044176 +v -0.037871 1.534726 0.042013 +v -0.038266 1.535318 0.039667 +v -0.038400 1.535518 0.037227 +v -0.038266 1.535318 0.034787 +v -0.037871 1.534726 0.032441 +v -0.037229 1.533765 0.030278 +v -0.036365 1.532472 0.028383 +v -0.035312 1.530896 0.026827 +v -0.034110 1.529098 0.025671 +v -0.032807 1.527147 0.024960 +v -0.034138 1.531604 0.047521 +v -0.034836 1.533289 0.046071 +v -0.035431 1.534726 0.044176 +v -0.035873 1.535794 0.042013 +v -0.036146 1.536452 0.039667 +v -0.036238 1.536674 0.037227 +v -0.036146 1.536452 0.034787 +v -0.035873 1.535794 0.032441 +v -0.035431 1.534726 0.030278 +v -0.034836 1.533289 0.028383 +v -0.034110 1.531538 0.026827 +v -0.033283 1.529540 0.025671 +v -0.032385 1.527372 0.024960 +v -0.032821 1.532004 0.047521 +v -0.033176 1.533792 0.046071 +v -0.033480 1.535318 0.044176 +v -0.033705 1.536452 0.042013 +v -0.033844 1.537150 0.039667 +v -0.033891 1.537385 0.037227 +v -0.033844 1.537150 0.034787 +v -0.033705 1.536452 0.032441 +v -0.033480 1.535318 0.030278 +v -0.033176 1.533792 0.028383 +v -0.032807 1.531933 0.026827 +v -0.032385 1.529813 0.025671 +v -0.031927 1.527511 0.024960 +v -0.031451 1.532139 0.047521 +v -0.031451 1.533962 0.046071 +v -0.031451 1.535518 0.044176 +v -0.031451 1.536674 0.042013 +v -0.031451 1.537385 0.039667 +v -0.031451 1.537626 0.037227 +v -0.031451 1.537385 0.034787 +v -0.031451 1.536674 0.032441 +v -0.031451 1.535518 0.030278 +v -0.031451 1.533962 0.028383 +v -0.031451 1.532067 0.026827 +v -0.031451 1.529905 0.025671 +v -0.031451 1.527558 0.024960 +v -0.030081 1.532004 0.047521 +v -0.029726 1.533792 0.046071 +v -0.029422 1.535318 0.044176 +v -0.029197 1.536452 0.042013 +v -0.029058 1.537150 0.039667 +v -0.029011 1.537385 0.037227 +v -0.029058 1.537150 0.034787 +v -0.029197 1.536452 0.032441 +v -0.029422 1.535318 0.030278 +v -0.029726 1.533792 0.028383 +v -0.030095 1.531933 0.026827 +v -0.030517 1.529813 0.025671 +v -0.030975 1.527511 0.024960 +v -0.028764 1.531604 0.047521 +v -0.028066 1.533289 0.046071 +v -0.027471 1.534726 0.044176 +v -0.027029 1.535794 0.042013 +v -0.026756 1.536452 0.039667 +v -0.026665 1.536674 0.037227 +v -0.026756 1.536452 0.034787 +v -0.027029 1.535794 0.032441 +v -0.027471 1.534726 0.030278 +v -0.028066 1.533289 0.028383 +v -0.028792 1.531538 0.026827 +v -0.029619 1.529540 0.025671 +v -0.030517 1.527372 0.024960 +v -0.027550 1.530956 0.047521 +v -0.026537 1.532472 0.046071 +v -0.025673 1.533765 0.044176 +v -0.025031 1.534726 0.042013 +v -0.024636 1.535318 0.039667 +v -0.024502 1.535518 0.037227 +v -0.024636 1.535318 0.034787 +v -0.025031 1.534726 0.032441 +v -0.025673 1.533765 0.030278 +v -0.026537 1.532472 0.028383 +v -0.027590 1.530896 0.026827 +v -0.028792 1.529098 0.025671 +v -0.030095 1.527147 0.024960 +v -0.026486 1.530083 0.047521 +v -0.025197 1.531372 0.046071 +v -0.024097 1.532472 0.044176 +v -0.023280 1.533289 0.042013 +v -0.022777 1.533792 0.039667 +v -0.022607 1.533962 0.037227 +v -0.022777 1.533792 0.034787 +v -0.023280 1.533289 0.032441 +v -0.024097 1.532472 0.030278 +v -0.025197 1.531372 0.028383 +v -0.026537 1.530032 0.026827 +v -0.028066 1.528503 0.025671 +v -0.029726 1.526843 0.024960 +v -0.025613 1.529019 0.047521 +v -0.024097 1.530032 0.046071 +v -0.022804 1.530896 0.044176 +v -0.021843 1.531538 0.042013 +v -0.021251 1.531933 0.039667 +v -0.021051 1.532067 0.037227 +v -0.021251 1.531933 0.034787 +v -0.021843 1.531538 0.032441 +v -0.022804 1.530896 0.030278 +v -0.024097 1.530032 0.028383 +v -0.025673 1.528979 0.026827 +v -0.027471 1.527777 0.025671 +v -0.029422 1.526474 0.024960 +v -0.024965 1.527805 0.047521 +v -0.023280 1.528503 0.046071 +v -0.021843 1.529098 0.044176 +v -0.020775 1.529540 0.042013 +v -0.020117 1.529813 0.039667 +v -0.019895 1.529905 0.037227 +v -0.020117 1.529813 0.034787 +v -0.020775 1.529540 0.032441 +v -0.021843 1.529098 0.030278 +v -0.023280 1.528503 0.028383 +v -0.025031 1.527777 0.026827 +v -0.027029 1.526950 0.025671 +v -0.029197 1.526052 0.024960 +v -0.024565 1.526488 0.047521 +v -0.022777 1.526843 0.046071 +v -0.021251 1.527147 0.044176 +v -0.020117 1.527372 0.042014 +v -0.019419 1.527511 0.039667 +v -0.019184 1.527558 0.037227 +v -0.019419 1.527511 0.034787 +v -0.020117 1.527372 0.032441 +v -0.021251 1.527147 0.030278 +v -0.022777 1.526843 0.028383 +v -0.024636 1.526474 0.026827 +v -0.026756 1.526052 0.025671 +v -0.029058 1.525594 0.024960 +v -0.024430 1.525118 0.047521 +v -0.022607 1.525118 0.046071 +v -0.021051 1.525118 0.044176 +v -0.019895 1.525118 0.042014 +v -0.019184 1.525118 0.039667 +v -0.018943 1.525118 0.037227 +v -0.019184 1.525118 0.034787 +v -0.019895 1.525118 0.032441 +v -0.021051 1.525118 0.030278 +v -0.022607 1.525118 0.028383 +v -0.024502 1.525118 0.026827 +v -0.026665 1.525118 0.025671 +v -0.029011 1.525118 0.024960 +v -0.024565 1.523748 0.047521 +v -0.022777 1.523393 0.046071 +v -0.021251 1.523089 0.044176 +v -0.020117 1.522864 0.042014 +v -0.019419 1.522725 0.039667 +v -0.019184 1.522678 0.037227 +v -0.019419 1.522725 0.034787 +v -0.020117 1.522864 0.032441 +v -0.021251 1.523089 0.030278 +v -0.022777 1.523393 0.028383 +v -0.024636 1.523762 0.026827 +v -0.026756 1.524184 0.025671 +v -0.029058 1.524642 0.024960 +v -0.024965 1.522431 0.047521 +v -0.023280 1.521733 0.046071 +v -0.021843 1.521138 0.044176 +v -0.020775 1.520696 0.042014 +v -0.020117 1.520423 0.039667 +v -0.019895 1.520332 0.037227 +v -0.020117 1.520423 0.034787 +v -0.020775 1.520696 0.032441 +v -0.021843 1.521138 0.030278 +v -0.023280 1.521733 0.028383 +v -0.025031 1.522459 0.026827 +v -0.027029 1.523286 0.025671 +v -0.029197 1.524184 0.024960 +v -0.025613 1.521217 0.047521 +v -0.024097 1.520204 0.046071 +v -0.022804 1.519340 0.044176 +v -0.021843 1.518698 0.042014 +v -0.021251 1.518303 0.039667 +v -0.021051 1.518169 0.037227 +v -0.021251 1.518303 0.034787 +v -0.021843 1.518698 0.032441 +v -0.022804 1.519340 0.030278 +v -0.024097 1.520204 0.028383 +v -0.025673 1.521257 0.026827 +v -0.027471 1.522459 0.025671 +v -0.029422 1.523762 0.024960 +v -0.026486 1.520153 0.047521 +v -0.025197 1.518864 0.046071 +v -0.024097 1.517764 0.044176 +v -0.023280 1.516947 0.042014 +v -0.022777 1.516444 0.039667 +v -0.022607 1.516274 0.037227 +v -0.022777 1.516444 0.034787 +v -0.023280 1.516947 0.032441 +v -0.024097 1.517764 0.030278 +v -0.025197 1.518864 0.028383 +v -0.026537 1.520204 0.026827 +v -0.028066 1.521733 0.025671 +v -0.029726 1.523393 0.024960 +v -0.027550 1.519280 0.047521 +v -0.026537 1.517764 0.046071 +v -0.025673 1.516471 0.044176 +v -0.025031 1.515510 0.042014 +v -0.024636 1.514918 0.039667 +v -0.024502 1.514718 0.037227 +v -0.024636 1.514918 0.034787 +v -0.025031 1.515510 0.032441 +v -0.025673 1.516471 0.030278 +v -0.026537 1.517764 0.028383 +v -0.027590 1.519340 0.026827 +v -0.028792 1.521138 0.025671 +v -0.030095 1.523089 0.024960 +v -0.031451 1.525118 0.024719 +v -0.028764 1.518632 0.047521 +v -0.028066 1.516947 0.046071 +v -0.027471 1.515510 0.044176 +v -0.027029 1.514442 0.042014 +v -0.026756 1.513784 0.039667 +v -0.026665 1.513562 0.037227 +v -0.026756 1.513784 0.034787 +v -0.027029 1.514442 0.032441 +v -0.027471 1.515510 0.030278 +v -0.028066 1.516947 0.028383 +v -0.028792 1.518698 0.026827 +v -0.029619 1.520696 0.025671 +v -0.030517 1.522864 0.024960 +v -0.030081 1.518232 0.047521 +v -0.029726 1.516444 0.046071 +v -0.029422 1.514918 0.044176 +v -0.029197 1.513784 0.042014 +v -0.029058 1.513086 0.039667 +v -0.029011 1.512851 0.037227 +v -0.029058 1.513086 0.034787 +v -0.029197 1.513784 0.032441 +v -0.029422 1.514918 0.030278 +v -0.029726 1.516444 0.028383 +v -0.030095 1.518303 0.026827 +v -0.030517 1.520423 0.025671 +v -0.030975 1.522725 0.024960 +v -0.031451 1.518097 0.047521 +v -0.031451 1.516274 0.046071 +v -0.031451 1.514718 0.044176 +v -0.031451 1.513562 0.042014 +v -0.031451 1.512851 0.039667 +v -0.031451 1.512610 0.037227 +v -0.031451 1.512851 0.034787 +v -0.031451 1.513562 0.032441 +v -0.031451 1.514718 0.030278 +v -0.031451 1.516274 0.028383 +v -0.031451 1.518169 0.026827 +v -0.031451 1.520332 0.025671 +v -0.031451 1.522678 0.024960 +v -0.032821 1.518232 0.047521 +v -0.033176 1.516444 0.046071 +v -0.033480 1.514918 0.044176 +v -0.033705 1.513784 0.042014 +v -0.033844 1.513086 0.039667 +v -0.033891 1.512851 0.037227 +v -0.033844 1.513086 0.034787 +v -0.033705 1.513784 0.032441 +v -0.033480 1.514918 0.030278 +v -0.033176 1.516444 0.028383 +v -0.032807 1.518303 0.026827 +v -0.032385 1.520423 0.025671 +v -0.031927 1.522725 0.024960 +v -0.034138 1.518632 0.047521 +v -0.034836 1.516947 0.046071 +v -0.035431 1.515510 0.044176 +v -0.035873 1.514442 0.042014 +v -0.036146 1.513784 0.039667 +v -0.036238 1.513562 0.037227 +v -0.036146 1.513784 0.034787 +v -0.035873 1.514442 0.032441 +v -0.035431 1.515510 0.030278 +v -0.034836 1.516947 0.028383 +v -0.034110 1.518698 0.026827 +v -0.033283 1.520696 0.025671 +v -0.032385 1.522864 0.024960 +v -0.035352 1.519280 0.047521 +v -0.036365 1.517764 0.046071 +v -0.037229 1.516471 0.044176 +v -0.037871 1.515510 0.042014 +v -0.038266 1.514918 0.039667 +v -0.038400 1.514718 0.037227 +v -0.038266 1.514918 0.034787 +v -0.037871 1.515510 0.032441 +v -0.037229 1.516471 0.030278 +v -0.036365 1.517764 0.028383 +v -0.035312 1.519340 0.026827 +v -0.034110 1.521138 0.025671 +v -0.032807 1.523089 0.024960 +v -0.036416 1.520153 0.047521 +v -0.037705 1.518864 0.046071 +v -0.038805 1.517764 0.044176 +v -0.039622 1.516947 0.042014 +v -0.040125 1.516444 0.039667 +v -0.040295 1.516274 0.037227 +v -0.040125 1.516444 0.034787 +v -0.039622 1.516947 0.032441 +v -0.038805 1.517764 0.030278 +v -0.037705 1.518864 0.028383 +v -0.036365 1.520204 0.026827 +v -0.034836 1.521733 0.025671 +v -0.033176 1.523393 0.024960 +v -0.037289 1.521217 0.047521 +v -0.038805 1.520204 0.046071 +v -0.040098 1.519340 0.044176 +v -0.041059 1.518698 0.042014 +v -0.041651 1.518303 0.039667 +v -0.041851 1.518169 0.037227 +v -0.041651 1.518303 0.034787 +v -0.041059 1.518698 0.032441 +v -0.040098 1.519340 0.030278 +v -0.038805 1.520204 0.028383 +v -0.037229 1.521257 0.026827 +v -0.035431 1.522459 0.025671 +v -0.033480 1.523762 0.024960 +v -0.037937 1.522431 0.047521 +v -0.039622 1.521733 0.046071 +v -0.041059 1.521138 0.044176 +v -0.042127 1.520696 0.042014 +v -0.042785 1.520423 0.039667 +v -0.043007 1.520331 0.037227 +v -0.042785 1.520423 0.034787 +v -0.042127 1.520696 0.032441 +v -0.041059 1.521138 0.030278 +v -0.039622 1.521733 0.028383 +v -0.037871 1.522459 0.026827 +v -0.035873 1.523286 0.025671 +v -0.033705 1.524184 0.024960 +v -0.038337 1.523748 0.047521 +v -0.040125 1.523393 0.046071 +v -0.041651 1.523089 0.044176 +v -0.042785 1.522864 0.042014 +v -0.043483 1.522725 0.039667 +v -0.043718 1.522678 0.037227 +v -0.043483 1.522725 0.034787 +v -0.042785 1.522864 0.032441 +v -0.041651 1.523089 0.030278 +v -0.040125 1.523393 0.028383 +v -0.038266 1.523762 0.026827 +v -0.036146 1.524184 0.025671 +v -0.033844 1.524642 0.024960 +usemtl material_1 +vt 0.827841 0.456444 +vt 0.826865 0.455402 +vt 0.827907 0.456096 +vt 0.842058 0.471775 +vt 0.837976 0.467212 +vt 0.842847 0.461572 +vt 0.870015 0.218946 +vt 0.894946 0.209010 +vt 0.891536 0.243748 +vt 0.626067 0.713548 +vt 0.646852 0.690397 +vt 0.655533 0.703344 +vt 0.873927 0.494550 +vt 0.876257 0.493740 +vt 0.875382 0.496354 +vt 0.872019 0.483542 +vt 0.870940 0.482536 +vt 0.871472 0.470778 +vt 0.871509 0.449954 +vt 0.872148 0.460940 +vt 0.869884 0.450896 +vt 0.165800 0.455529 +vt 0.164845 0.456567 +vt 0.164776 0.456219 +vt 0.154869 0.467361 +vt 0.150856 0.471940 +vt 0.149956 0.461721 +vt 0.838315 0.395768 +vt 0.844026 0.400208 +vt 0.840380 0.402876 +vt 0.846600 0.382012 +vt 0.849120 0.393760 +vt 0.841435 0.384948 +vt 0.862553 0.515661 +vt 0.863353 0.509890 +vt 0.865905 0.512859 +vt 0.903197 0.443541 +vt 0.890997 0.445717 +vt 0.896335 0.420583 +vt 0.864481 0.490056 +vt 0.865943 0.493748 +vt 0.862936 0.493077 +vt 0.878909 0.496392 +vt 0.877836 0.499308 +vt 0.823238 0.423976 +vt 0.816272 0.418672 +vt 0.824230 0.409451 +vt 0.838080 0.460496 +vt 0.836168 0.468022 +vt 0.870187 0.437303 +vt 0.863523 0.429405 +vt 0.865693 0.426083 +vt 0.863829 0.437613 +vt 0.863830 0.440753 +vt 0.857959 0.433341 +vt 0.867019 0.499853 +vt 0.864036 0.505455 +vt 0.863152 0.504591 +vt 0.897221 0.488439 +vt 0.899057 0.503323 +vt 0.896823 0.503983 +vt 0.866905 0.491780 +vt 0.869888 0.490883 +vt 0.868763 0.493053 +vt 0.844697 0.494535 +vt 0.847181 0.492958 +vt 0.846961 0.495396 +vt 0.888614 0.415091 +vt 0.878802 0.397244 +vt 0.876764 0.389051 +vt 0.836173 0.388504 +vt 0.834026 0.395287 +vt 0.831825 0.387866 +vt 0.850583 0.499869 +vt 0.851510 0.496872 +vt 0.854041 0.497539 +vt 0.866125 0.487052 +vt 0.868225 0.489363 +vt 0.868953 0.463043 +vt 0.862904 0.475192 +vt 0.864392 0.461427 +vt 0.872170 0.490753 +vt 0.873166 0.482859 +vt 0.814715 0.439052 +vt 0.820247 0.442227 +vt 0.815386 0.446010 +vt 0.874814 0.407608 +vt 0.875562 0.396903 +vt 0.879540 0.411076 +vt 0.857822 0.497888 +vt 0.854636 0.491664 +vt 0.857133 0.491101 +vt 0.852684 0.402568 +vt 0.859029 0.403657 +vt 0.857200 0.408243 +vt 0.863556 0.485000 +vt 0.861944 0.487965 +vt 0.817191 0.430877 +vt 0.822995 0.435592 +vt 0.820870 0.466724 +vt 0.824843 0.464314 +vt 0.825851 0.465959 +vt 0.815771 0.465966 +vt 0.817856 0.463741 +vt 0.818798 0.467103 +vt 0.826902 0.466657 +vt 0.826344 0.465087 +vt 0.826963 0.465058 +vt 0.867416 0.476409 +vt 0.870879 0.466015 +vt 0.869476 0.479087 +vt 0.835489 0.444680 +vt 0.839104 0.439991 +vt 0.837491 0.446209 +vt 0.818719 0.449140 +vt 0.824589 0.446916 +vt 0.821894 0.452148 +vt 0.873450 0.448904 +vt 0.878909 0.459689 +vt 0.875985 0.460354 +vt 0.872027 0.503703 +vt 0.875232 0.504611 +vt 0.873706 0.507233 +vt 0.913942 0.372248 +vt 0.910810 0.410742 +vt 0.892795 0.368057 +vt 0.851839 0.409438 +vt 0.855720 0.414436 +vt 0.849343 0.501995 +vt 0.852223 0.501391 +vt 0.855562 0.503813 +vt 0.838856 0.492407 +vt 0.836210 0.497003 +vt 0.835143 0.493577 +vt 0.833641 0.524315 +vt 0.825758 0.523817 +vt 0.824494 0.509354 +vt 0.876011 0.382443 +vt 0.848993 0.398547 +vt 0.841142 0.391893 +vt 0.836186 0.380433 +vt 0.840694 0.489359 +vt 0.844147 0.488818 +vt 0.842631 0.492279 +vt 0.866551 0.496353 +vt 0.867676 0.494849 +vt 0.865297 0.497855 +vt 0.862330 0.499569 +vt 0.864020 0.496699 +vt 0.846809 0.489395 +vt 0.845915 0.486975 +vt 0.052020 0.330165 +vt 0.031772 0.301144 +vt 0.058399 0.292781 +vt 0.863402 0.453967 +vt 0.865473 0.447501 +vt 0.868216 0.455204 +vt 0.885552 0.469190 +vt 0.889406 0.487571 +vt 0.886308 0.487255 +vt 0.893600 0.452797 +vt 0.906806 0.470475 +vt 0.898399 0.469472 +vt 0.840313 0.488493 +vt 0.841474 0.486048 +vt 0.814942 0.400565 +vt 0.861999 0.391729 +vt 0.865396 0.386821 +vt 0.867006 0.399398 +vt 0.827310 0.462459 +vt 0.827830 0.464426 +vt 0.840662 0.499750 +vt 0.839852 0.495550 +vt 0.853694 0.398236 +vt 0.831159 0.479268 +vt 0.822201 0.482616 +vt 0.825611 0.471616 +vt 0.834363 0.443763 +vt 0.835726 0.439241 +vt 0.848779 0.405351 +vt 0.842170 0.480592 +vt 0.846995 0.474293 +vt 0.843139 0.481763 +vt 0.854377 0.382884 +vt 0.856214 0.389598 +vt 0.850473 0.382115 +vt 0.848576 0.488354 +vt 0.850112 0.490893 +vt 0.848590 0.490914 +vt 0.848141 0.485157 +vt 0.853582 0.390914 +vt 0.870296 0.457783 +vt 0.870133 0.493228 +vt 0.871296 0.490781 +vt 0.828116 0.456781 +vt 0.828338 0.457060 +vt 0.828099 0.457058 +vt 0.828269 0.456009 +vt 0.828574 0.456299 +vt 0.828169 0.456406 +vt 0.828739 0.454554 +vt 0.830826 0.453572 +vt 0.829438 0.455268 +vt 0.873008 0.490889 +vt 0.872106 0.493568 +vt 0.867798 0.497175 +vt 0.869023 0.495378 +vt 0.813626 0.451266 +vt 0.817093 0.450008 +vt 0.816442 0.452822 +vt 0.824350 0.395864 +vt 0.829501 0.402016 +vt 0.844427 0.472826 +vt 0.848408 0.462167 +vt 0.848950 0.500912 +vt 0.847276 0.525033 +vt 0.853080 0.519142 +vt 0.854777 0.524826 +vt 0.828156 0.455564 +vt 0.828349 0.454453 +vt 0.828385 0.455468 +vt 0.827833 0.456816 +vt 0.826692 0.456246 +vt 0.829235 0.461455 +vt 0.827896 0.458828 +vt 0.828578 0.458422 +vt 0.827866 0.444228 +vt 0.828832 0.437216 +vt 0.831484 0.443654 +vt 0.842333 0.484020 +vt 0.851557 0.433941 +vt 0.852536 0.429870 +vt 0.827043 0.458047 +vt 0.826006 0.461257 +vt 0.825145 0.460133 +vt 0.831353 0.514392 +vt 0.839442 0.512336 +vt 0.839093 0.521955 +vt 0.901202 0.540443 +vt 0.906030 0.547063 +vt 0.893380 0.565617 +vt 0.876358 0.421251 +vt 0.880120 0.426829 +vt 0.865041 0.495361 +vt 0.829413 0.457025 +vt 0.828438 0.456741 +vt 0.827875 0.457305 +vt 0.827333 0.458555 +vt 0.834400 0.484499 +vt 0.838510 0.482213 +vt 0.838422 0.486832 +vt 0.829764 0.434392 +vt 0.822729 0.430085 +vt 0.883518 0.458053 +vt 0.876382 0.447390 +vt 0.880796 0.445447 +vt 0.849181 0.493012 +vt 0.852394 0.492252 +vt 0.818204 0.452463 +vt 0.828647 0.455809 +vt 0.884507 0.499233 +vt 0.858140 0.395432 +vt 0.863034 0.401494 +vt 0.830765 0.459446 +vt 0.829107 0.457866 +vt 0.818032 0.456219 +vt 0.821361 0.455110 +vt 0.821563 0.458333 +vt 0.846693 0.444503 +vt 0.845698 0.453704 +vt 0.842916 0.451400 +vt 0.880686 0.507364 +vt 0.878487 0.510394 +vt 0.858323 0.545975 +vt 0.863625 0.537467 +vt 0.872414 0.538309 +vt 0.809760 0.419093 +vt 0.813500 0.427367 +vt 0.840782 0.491898 +vt 0.840360 0.490638 +vt 0.883175 0.428313 +vt 0.855501 0.437323 +vt 0.855438 0.443606 +vt 0.850535 0.440915 +vt 0.866251 0.507358 +vt 0.860721 0.441452 +vt 0.512697 0.504195 +vt 0.513067 0.504456 +vt 0.512684 0.504669 +vt 0.828563 0.455379 +vt 0.505058 0.505642 +vt 0.507875 0.503852 +vt 0.507972 0.507293 +vt 0.512335 0.508938 +vt 0.510077 0.512472 +vt 0.510052 0.508717 +vt 0.514515 0.503892 +vt 0.513301 0.503601 +vt 0.514308 0.502955 +vt 0.515702 0.499885 +vt 0.513324 0.500964 +vt 0.513730 0.498908 +vt 0.607350 0.540611 +vt 0.618975 0.526944 +vt 0.620706 0.532580 +vt 0.124014 0.218952 +vt 0.099625 0.209027 +vt 0.132266 0.193780 +vt 0.831926 0.429504 +vt 0.869474 0.498017 +vt 0.870133 0.501385 +vt 0.867880 0.500126 +vt 0.856221 0.485346 +vt 0.854964 0.476579 +vt 0.859501 0.475270 +vt 0.877449 0.485093 +vt 0.874834 0.491952 +vt 0.875665 0.483738 +vt 0.873360 0.501225 +vt 0.876614 0.501958 +vt 0.841095 0.479381 +vt 0.805414 0.436792 +vt 0.810321 0.436713 +vt 0.892196 0.594266 +vt 0.881916 0.571862 +vt 0.968926 0.403403 +vt 0.946795 0.365227 +vt 0.967802 0.369636 +vt 0.836877 0.428859 +vt 0.833086 0.413826 +vt 0.837725 0.414975 +vt 0.848004 0.507703 +vt 0.845494 0.509822 +vt 0.843622 0.504405 +vt 0.844002 0.416461 +vt 0.828052 0.457252 +vt 0.828261 0.457238 +vt 0.838708 0.479561 +vt 0.839042 0.477802 +vt 0.873821 0.482824 +vt 0.841574 0.492499 +vt 0.953351 0.429482 +vt 0.971103 0.462460 +vt 0.954995 0.465773 +vt 0.735380 0.595059 +vt 0.746723 0.622955 +vt 0.734142 0.625525 +vt 0.806992 0.474518 +vt 0.809147 0.456029 +vt 0.813424 0.478681 +vt 0.688497 0.515273 +vt 0.675381 0.493409 +vt 0.689271 0.492812 +vt 0.834603 0.403046 +vt 0.837606 0.403441 +vt 0.816953 0.460105 +vt 0.815875 0.463681 +vt 0.815165 0.461557 +vt 0.545483 0.595339 +vt 0.561340 0.585414 +vt 0.549221 0.600940 +vt 0.518794 0.501766 +vt 0.516426 0.504569 +vt 0.515620 0.502466 +vt 0.922067 0.475564 +vt 0.916804 0.515723 +vt 0.917837 0.459539 +vt 0.702609 0.496283 +vt 0.700502 0.518907 +vt 0.598305 0.177246 +vt 0.631221 0.160647 +vt 0.650865 0.188922 +vt 0.844660 0.377072 +vt 0.829147 0.374883 +vt 0.846930 0.370751 +vt 0.511030 0.672679 +vt 0.524864 0.660104 +vt 0.526165 0.675057 +vt 0.560268 0.788311 +vt 0.530673 0.810853 +vt 0.527456 0.791432 +vt 0.950180 0.399125 +vt 0.970058 0.431030 +vt 0.834077 0.471401 +vt 0.837115 0.474679 +vt 0.836102 0.477765 +vt 0.884644 0.415077 +vt 0.529278 0.623530 +vt 0.510111 0.632307 +vt 0.510484 0.616017 +vt 0.566976 0.806985 +vt 0.533546 0.828017 +vt 0.829741 0.487902 +vt 0.834518 0.490395 +vt 0.576070 0.568723 +vt 0.590032 0.548918 +vt 0.591544 0.553315 +vt 0.816021 0.598369 +vt 0.834984 0.572839 +vt 0.828252 0.619165 +vt 0.513764 0.505485 +vt 0.513358 0.503917 +vt 0.514804 0.507870 +vt 0.522578 0.506258 +vt 0.516757 0.510849 +vt 0.512989 0.503347 +vt 0.513175 0.502218 +vt 0.513552 0.502344 +vt 0.512822 0.504016 +vt 0.512796 0.504144 +vt 0.503668 0.508524 +vt 0.502165 0.512866 +vt 0.501425 0.507976 +vt 0.513190 0.512449 +vt 0.510196 0.504111 +vt 0.512053 0.504588 +vt 0.510245 0.505628 +vt 0.511941 0.503989 +vt 0.512554 0.503823 +vt 0.512570 0.503939 +vt 0.512653 0.504160 +vt 0.512338 0.504729 +vt 0.508944 0.499425 +vt 0.510420 0.501562 +vt 0.511322 0.506236 +vt 0.512772 0.503886 +vt 0.849040 0.430210 +vt 0.846823 0.438440 +vt 0.844002 0.435579 +vt 0.852604 0.489893 +vt 0.849713 0.491741 +vt 0.859490 0.512043 +vt 0.872856 0.496911 +vt 0.874405 0.498745 +vt 0.799000 0.494613 +vt 0.783589 0.486921 +vt 0.794853 0.465043 +vt 0.848561 0.495287 +vt 0.872259 0.469466 +vt 0.873147 0.469339 +vt 0.812520 0.657031 +vt 0.843089 0.635107 +vt 0.828194 0.673622 +vt 0.617074 0.583026 +vt 0.620821 0.582710 +vt 0.620252 0.584600 +vt 0.623581 0.584615 +vt 0.622713 0.586620 +vt 0.876467 0.554242 +vt 0.867701 0.582874 +vt 0.617641 0.580805 +vt 0.624187 0.578736 +vt 0.587850 0.592882 +vt 0.593223 0.589141 +vt 0.589369 0.594582 +vt 0.605344 0.536366 +vt 0.840205 0.489942 +vt 0.840085 0.487871 +vt 0.603224 0.575080 +vt 0.593526 0.578528 +vt 0.604336 0.570586 +vt 0.609685 0.577108 +vt 0.611579 0.573168 +vt 0.838182 0.501180 +vt 0.831447 0.499300 +vt 0.583392 0.593640 +vt 0.584890 0.585342 +vt 0.585676 0.587324 +vt 0.880876 0.470091 +vt 0.877545 0.470008 +vt 0.587059 0.590707 +vt 0.592949 0.586449 +vt 0.593757 0.574953 +vt 0.845884 0.517276 +vt 0.850768 0.513058 +vt 0.638269 0.582255 +vt 0.630538 0.582823 +vt 0.634312 0.579590 +vt 0.636311 0.572698 +vt 0.640405 0.575426 +vt 0.496576 0.591923 +vt 0.508928 0.602069 +vt 0.496593 0.603855 +vt 0.605851 0.566318 +vt 0.613550 0.568708 +vt 0.818097 0.425891 +vt 0.877311 0.433336 +vt 0.631134 0.577046 +vt 0.627535 0.580742 +vt 0.627083 0.569172 +vt 0.624238 0.563484 +vt 0.630966 0.565091 +vt 0.644022 0.578574 +vt 0.646048 0.572456 +vt 0.650388 0.576026 +vt 0.646106 0.581556 +vt 0.652481 0.581003 +vt 0.856279 0.529400 +vt 0.863956 0.531768 +vt 0.852631 0.530653 +vt 0.647358 0.565863 +vt 0.653202 0.569565 +vt 0.654610 0.562736 +vt 0.661953 0.567757 +vt 0.658639 0.574156 +vt 0.668028 0.573558 +vt 0.636508 0.566538 +vt 0.635064 0.561020 +vt 0.641742 0.562859 +vt 0.368060 0.714197 +vt 0.347288 0.690679 +vt 0.373206 0.699728 +vt 0.601030 0.582653 +vt 0.595463 0.590613 +vt 0.620449 0.577427 +vt 0.613824 0.580182 +vt 0.616262 0.576571 +vt 0.602971 0.587835 +vt 0.608858 0.595982 +vt 0.606630 0.597002 +vt 0.593675 0.595782 +vt 0.597853 0.591954 +vt 0.604755 0.598589 +vt 0.841014 0.505658 +vt 0.606845 0.561903 +vt 0.618657 0.556448 +vt 0.617669 0.559836 +vt 0.940198 0.473328 +vt 0.954814 0.509723 +vt 0.941717 0.515026 +vt 0.626720 0.634207 +vt 0.636822 0.632162 +vt 0.629149 0.638316 +vt 0.617384 0.634586 +vt 0.618989 0.639365 +vt 0.626287 0.612167 +vt 0.621924 0.613739 +vt 0.623243 0.610213 +vt 0.627677 0.607573 +vt 0.624237 0.606726 +vt 0.620364 0.643831 +vt 0.624525 0.589034 +vt 0.628073 0.589723 +vt 0.626314 0.591851 +vt 0.619302 0.586771 +vt 0.613726 0.598335 +vt 0.613253 0.597093 +vt 0.621266 0.646631 +vt 0.609782 0.653721 +vt 0.609590 0.650317 +vt 0.616548 0.585438 +vt 0.625786 0.586908 +vt 0.633766 0.586250 +vt 0.868540 0.523146 +vt 0.864092 0.520060 +vt 0.867481 0.516529 +vt 0.890929 0.502588 +vt 0.622033 0.591282 +vt 0.613989 0.600950 +vt 0.613890 0.599641 +vt 0.621140 0.589013 +vt 0.628725 0.598126 +vt 0.624071 0.596774 +vt 0.627851 0.595228 +vt 0.613987 0.604688 +vt 0.624498 0.603907 +vt 0.613902 0.605946 +vt 0.614023 0.603477 +vt 0.614042 0.602252 +vt 0.630655 0.593375 +vt 0.639684 0.593254 +vt 0.652386 0.587694 +vt 0.645961 0.597584 +vt 0.645560 0.586568 +vt 0.495457 0.167657 +vt 0.536350 0.159940 +vt 0.536360 0.173776 +vt 0.624571 0.601471 +vt 0.628661 0.603948 +vt 0.661892 0.579842 +vt 0.662230 0.587471 +vt 0.659032 0.600032 +vt 0.673155 0.589137 +vt 0.671188 0.601764 +vt 0.633314 0.599519 +vt 0.629067 0.600946 +vt 0.632284 0.596411 +vt 0.640880 0.605832 +vt 0.649401 0.607973 +vt 0.636899 0.612221 +vt 0.643092 0.615417 +vt 0.613763 0.607310 +vt 0.633479 0.628707 +vt 0.643130 0.625490 +vt 0.671183 0.580956 +vt 0.633474 0.618563 +vt 0.638340 0.622042 +vt 0.630626 0.609444 +vt 0.632324 0.604167 +vt 0.628621 0.615007 +vt 0.602197 0.606482 +vt 0.590147 0.610704 +vt 0.590076 0.608379 +vt 0.626154 0.620744 +vt 0.629593 0.624753 +vt 0.620183 0.621710 +vt 0.621962 0.626056 +vt 0.624159 0.616778 +vt 0.614824 0.625560 +vt 0.616146 0.630314 +vt 0.713764 0.682147 +vt 0.740827 0.657366 +vt 0.727510 0.687880 +vt 0.624110 0.630195 +vt 0.611706 0.659848 +vt 0.623291 0.649980 +vt 0.627693 0.656201 +vt 0.602283 0.609196 +vt 0.592142 0.616797 +vt 0.590957 0.613732 +vt 0.608369 0.623222 +vt 0.610222 0.613525 +vt 0.614127 0.621690 +vt 0.602190 0.607792 +vt 0.586891 0.613965 +vt 0.585111 0.609774 +vt 0.589288 0.617568 +vt 0.861742 0.372864 +vt 0.864851 0.355415 +vt 0.633635 0.518804 +vt 0.572079 0.738226 +vt 0.594420 0.720196 +vt 0.602804 0.733237 +vt 0.882846 0.503671 +vt 0.829832 0.493332 +vt 0.967415 0.627599 +vt 0.951869 0.602418 +vt 0.969915 0.597156 +vt 0.605852 0.614092 +vt 0.602623 0.623295 +vt 0.612999 0.610472 +vt 0.618467 0.618403 +vt 0.611880 0.612203 +vt 0.944497 0.655175 +vt 0.935769 0.634601 +vt 0.949170 0.629601 +vt 0.584545 0.618598 +vt 0.580865 0.613953 +vt 0.595131 0.619368 +vt 0.602539 0.610682 +vt 0.603082 0.612132 +vt 0.578615 0.609968 +vt 0.577721 0.617494 +vt 0.575111 0.612782 +vt 0.581432 0.622873 +vt 0.588887 0.622919 +vt 0.591747 0.620938 +vt 0.586903 0.627316 +vt 0.593472 0.633083 +vt 0.594078 0.628190 +vt 0.595608 0.625378 +vt 0.600734 0.634939 +vt 0.608236 0.632113 +vt 0.608700 0.636571 +vt 0.591943 0.638043 +vt 0.600910 0.630655 +vt 0.608193 0.627519 +vt 0.590113 0.643481 +vt 0.584901 0.632138 +vt 0.599019 0.644959 +vt 0.600064 0.639879 +vt 0.582085 0.598236 +vt 0.579986 0.594846 +vt 0.576821 0.602548 +vt 0.586195 0.604614 +vt 0.584586 0.599055 +vt 0.588547 0.600099 +vt 0.591525 0.595390 +vt 0.579988 0.603322 +vt 0.578209 0.607795 +vt 0.573368 0.610167 +vt 0.584884 0.606497 +vt 0.578919 0.605626 +vt 0.584495 0.608159 +vt 0.602828 0.602238 +vt 0.603497 0.600447 +vt 0.574563 0.620965 +vt 0.578064 0.627607 +vt 0.564820 0.611830 +vt 0.568107 0.613336 +vt 0.565571 0.616286 +vt 0.117210 0.493851 +vt 0.119524 0.494663 +vt 0.118092 0.496461 +vt 0.122329 0.482707 +vt 0.121284 0.483697 +vt 0.121663 0.470888 +vt 0.566433 0.606581 +vt 0.567821 0.609808 +vt 0.121427 0.449965 +vt 0.120919 0.460973 +vt 0.119208 0.460784 +vt 0.574783 0.635192 +vt 0.583111 0.638455 +vt 0.569461 0.605710 +vt 0.573563 0.606920 +vt 0.583094 0.583056 +vt 0.153804 0.395813 +vt 0.148166 0.400258 +vt 0.150946 0.391940 +vt 0.581892 0.581599 +vt 0.593326 0.571358 +vt 0.576582 0.594195 +vt 0.574286 0.594011 +vt 0.619552 0.552929 +vt 0.607395 0.558943 +vt 0.607740 0.555933 +vt 0.671657 0.151136 +vt 0.623360 0.140133 +vt 0.669444 0.564513 +vt 0.143038 0.393787 +vt 0.145432 0.382018 +vt 0.150594 0.384988 +vt 0.667279 0.553935 +vt 0.660996 0.559305 +vt 0.130139 0.509993 +vt 0.130976 0.515745 +vt 0.127630 0.512953 +vt 0.090000 0.443329 +vt 0.102148 0.445512 +vt 0.099618 0.452587 +vt 0.651873 0.555060 +vt 0.646749 0.559214 +vt 0.127396 0.493876 +vt 0.128806 0.490204 +vt 0.130360 0.493207 +vt 0.114594 0.496494 +vt 0.115676 0.499407 +vt 0.175956 0.418777 +vt 0.169078 0.424088 +vt 0.167946 0.409559 +vt 0.154693 0.460640 +vt 0.129095 0.429485 +vt 0.122591 0.437322 +vt 0.126922 0.426147 +vt 0.656382 0.549525 +vt 0.644927 0.547206 +vt 0.641630 0.552313 +vt 0.632092 0.551231 +vt 0.685811 0.586527 +vt 0.128901 0.440913 +vt 0.128874 0.437761 +vt 0.134637 0.433513 +vt 0.692279 0.573440 +vt 0.681662 0.578555 +vt 0.126395 0.499946 +vt 0.129429 0.505565 +vt 0.125558 0.500216 +vt 0.697529 0.581880 +vt 0.688251 0.599122 +vt 0.563943 0.624463 +vt 0.570513 0.627714 +vt 0.564925 0.633617 +vt 0.558059 0.638233 +vt 0.567246 0.644244 +vt 0.559424 0.649226 +vt 0.096339 0.488441 +vt 0.094686 0.503314 +vt 0.095359 0.488082 +vt 0.126427 0.491921 +vt 0.123469 0.491033 +vt 0.125096 0.489518 +vt 0.148451 0.494678 +vt 0.145972 0.493106 +vt 0.147733 0.491278 +vt 0.113607 0.397153 +vt 0.104166 0.414911 +vt 0.115526 0.388903 +vt 0.558152 0.628565 +vt 0.557708 0.616970 +vt 0.562570 0.619754 +vt 0.558072 0.623155 +vt 0.563594 0.607429 +vt 0.561728 0.614173 +vt 0.560378 0.607815 +vt 0.592682 0.566585 +vt 0.580395 0.580325 +vt 0.158063 0.395356 +vt 0.155868 0.388546 +vt 0.160195 0.387914 +vt 0.591798 0.564591 +vt 0.578990 0.578766 +vt 0.572042 0.593431 +vt 0.142643 0.499990 +vt 0.141702 0.497001 +vt 0.144338 0.498530 +vt 0.569248 0.592983 +vt 0.127148 0.487213 +vt 0.955551 0.539937 +vt 0.940933 0.548837 +vt 0.128552 0.461564 +vt 0.124693 0.455365 +vt 0.129459 0.454116 +vt 0.937915 0.440042 +vt 0.621501 0.346150 +vt 0.598586 0.340764 +vt 0.608331 0.329450 +vt 0.124040 0.463188 +vt 0.130206 0.475357 +vt 0.125735 0.476585 +vt 0.718346 0.475276 +vt 0.740050 0.463946 +vt 0.732541 0.489889 +vt 0.121226 0.490893 +vt 0.122083 0.490930 +vt 0.087872 0.524952 +vt 0.094456 0.509046 +vt 0.097096 0.517213 +vt 0.496638 0.645071 +vt 0.496625 0.631456 +vt 0.657002 0.667209 +vt 0.668154 0.675497 +vt 0.640877 0.678015 +vt 0.647816 0.658559 +vt 0.510348 0.455822 +vt 0.496196 0.459443 +vt 0.496203 0.455843 +vt 0.697171 0.627110 +vt 0.688575 0.612696 +vt 0.700980 0.611010 +vt 0.685173 0.626475 +vt 0.677179 0.613209 +vt 0.659318 0.648863 +vt 0.669998 0.654597 +vt 0.690573 0.643419 +vt 0.678244 0.640655 +vt 0.177725 0.439167 +vt 0.172244 0.442356 +vt 0.169441 0.435719 +vt 0.667601 0.637631 +vt 0.116806 0.396831 +vt 0.117675 0.407504 +vt 0.113061 0.410951 +vt 0.701389 0.596194 +vt 0.569947 0.656791 +vt 0.583353 0.662647 +vt 0.579974 0.670936 +vt 0.574124 0.677771 +vt 0.590610 0.692187 +vt 0.597881 0.681450 +vt 0.575217 0.651355 +vt 0.562593 0.661933 +vt 0.512004 0.500921 +vt 0.135470 0.498002 +vt 0.138567 0.491801 +vt 0.139203 0.497657 +vt 0.151168 0.432110 +vt 0.143474 0.430336 +vt 0.145449 0.425274 +vt 0.516445 0.488326 +vt 0.507223 0.483405 +vt 0.519424 0.482924 +vt 0.598283 0.649077 +vt 0.133217 0.403715 +vt 0.139551 0.402627 +vt 0.135087 0.408336 +vt 0.175168 0.430982 +vt 0.102728 0.502646 +vt 0.096889 0.504015 +vt 0.912058 0.689267 +vt 0.907357 0.733069 +vt 0.894950 0.709852 +vt 0.582130 0.645108 +vt 0.584139 0.644258 +vt 0.129668 0.485163 +vt 0.131294 0.488114 +vt 0.524213 0.609833 +vt 0.609219 0.641580 +vt 0.609321 0.646571 +vt 0.630774 0.640755 +vt 0.634069 0.644000 +vt 0.171893 0.466849 +vt 0.167908 0.464449 +vt 0.173095 0.463698 +vt 0.529130 0.575825 +vt 0.516811 0.567236 +vt 0.528800 0.565557 +vt 0.174863 0.463852 +vt 0.176972 0.466065 +vt 0.173963 0.467224 +vt 0.654247 0.621616 +vt 0.649736 0.618365 +vt 0.659657 0.615772 +vt 0.659121 0.622906 +vt 0.657806 0.611827 +vt 0.665029 0.615081 +vt 0.166425 0.465229 +vt 0.165882 0.466799 +vt 0.165807 0.465197 +vt 0.144454 0.462305 +vt 0.148523 0.472982 +vt 0.146000 0.474440 +vt 0.581755 0.650305 +vt 0.122175 0.466145 +vt 0.123731 0.479260 +vt 0.588053 0.650969 +vt 0.138723 0.524964 +vt 0.129487 0.520137 +vt 0.598879 0.670340 +vt 0.597802 0.658869 +vt 0.153467 0.440115 +vt 0.157117 0.444812 +vt 0.155139 0.446345 +vt 0.167973 0.447048 +vt 0.173838 0.449263 +vt 0.170707 0.452281 +vt 0.167236 0.471755 +vt 0.173202 0.470992 +vt 0.170725 0.482753 +vt 0.119500 0.448893 +vt 0.114219 0.459663 +vt 0.116584 0.447365 +vt 0.597704 0.652690 +vt 0.167717 0.395948 +vt 0.162728 0.374933 +vt 0.177105 0.400674 +vt 0.121480 0.503791 +vt 0.118301 0.504705 +vt 0.120134 0.501321 +vt 0.152498 0.482205 +vt 0.154490 0.482386 +vt 0.152980 0.484696 +vt 0.937403 0.610412 +vt 0.650134 0.641770 +vt 0.639670 0.649566 +vt 0.643468 0.637218 +vt 0.079885 0.372002 +vt 0.082758 0.410666 +vt 0.071206 0.415476 +vt 0.140460 0.409517 +vt 0.136635 0.414551 +vt 0.168691 0.509508 +vt 0.161920 0.514516 +vt 0.167555 0.503813 +vt 0.112781 0.486667 +vt 0.658855 0.633603 +vt 0.652118 0.630403 +vt 0.516556 0.320179 +vt 0.495554 0.309736 +vt 0.518283 0.310595 +vt 0.143891 0.502148 +vt 0.141036 0.501496 +vt 0.144269 0.501055 +vt 0.156920 0.497164 +vt 0.154239 0.492572 +vt 0.157942 0.493738 +vt 0.505405 0.363885 +vt 0.495616 0.354825 +vt 0.507392 0.356351 +vt 0.167559 0.524009 +vt 0.159730 0.524467 +vt 0.670031 0.618552 +vt 0.675006 0.626496 +vt 0.665770 0.625329 +vt 0.116235 0.382230 +vt 0.096574 0.420337 +vt 0.810210 0.446784 +vt 0.806560 0.452759 +vt 0.807268 0.445544 +vt 0.898225 0.488134 +vt 0.895254 0.468654 +vt 0.161685 0.499450 +vt 0.155004 0.501350 +vt 0.182428 0.419200 +vt 0.145511 0.499814 +vt 0.146806 0.498153 +vt 0.150486 0.492425 +vt 0.148954 0.488973 +vt 0.901599 0.500491 +vt 0.909515 0.504110 +vt 0.899334 0.509068 +vt 0.138086 0.476747 +vt 0.881500 0.520433 +vt 0.873447 0.523940 +vt 0.878700 0.516993 +vt 0.503805 0.369681 +vt 0.495608 0.363008 +vt 0.862380 0.433351 +vt 0.855370 0.427298 +vt 0.854376 0.422465 +vt 0.504671 0.536218 +vt 0.496478 0.546108 +vt 0.496452 0.536719 +vt 0.868671 0.497700 +vt 0.871113 0.493388 +vt 0.928731 0.370200 +vt 0.934982 0.410495 +vt 0.091285 0.405461 +vt 0.127450 0.355243 +vt 0.591853 0.421981 +vt 0.595072 0.441683 +vt 0.579286 0.440476 +vt 0.815461 0.468621 +vt 0.869930 0.495686 +vt 0.863322 0.495722 +vt 0.861071 0.499198 +vt 0.886046 0.457249 +vt 0.887309 0.468437 +vt 0.884949 0.457500 +vt 0.889009 0.467807 +vt 0.892258 0.488113 +vt 0.151597 0.494415 +vt 0.151537 0.492658 +vt 0.152318 0.492064 +vt 0.513983 0.490628 +vt 0.505197 0.488105 +vt 0.511731 0.495495 +vt 0.508811 0.497471 +vt 0.505713 0.495534 +vt 0.143207 0.398593 +vt 0.140342 0.487287 +vt 0.144954 0.485310 +vt 0.144548 0.488507 +vt 0.119520 0.482929 +vt 0.117974 0.469632 +vt 0.120014 0.469379 +vt 0.863027 0.435676 +vt 0.866641 0.441524 +vt 0.866689 0.442450 +vt 0.859447 0.384333 +vt 0.858188 0.388834 +vt 0.155794 0.380475 +vt 0.153921 0.477972 +vt 0.155801 0.474848 +vt 0.154272 0.479734 +vt 0.869403 0.451247 +vt 0.857741 0.505000 +vt 0.855624 0.506390 +vt 0.855346 0.505082 +vt 0.858856 0.504067 +vt 0.858772 0.505629 +vt 0.152387 0.489511 +vt 0.899227 0.487547 +vt 0.182074 0.436830 +vt 0.178802 0.427464 +vt 0.816356 0.456400 +vt 0.509151 0.448815 +vt 0.496229 0.446032 +vt 0.507452 0.445185 +vt 0.846518 0.499773 +vt 0.847690 0.499671 +vt 0.842088 0.494587 +vt 0.843983 0.497553 +vt 0.843253 0.497798 +vt 0.841529 0.494243 +vt 0.832190 0.437950 +vt 0.833032 0.443705 +vt 0.856721 0.455209 +vt 0.858014 0.462012 +vt 0.126820 0.496465 +vt 0.128304 0.495480 +vt 0.131022 0.499687 +vt 0.128072 0.497957 +vt 0.129326 0.496811 +vt 0.146313 0.489547 +vt 0.147185 0.487129 +vt 0.836807 0.395414 +vt 0.153053 0.429013 +vt 0.148345 0.416555 +vt 0.127337 0.447673 +vt 0.132593 0.447971 +vt 0.107701 0.469174 +vt 0.104093 0.487621 +vt 0.105965 0.468379 +vt 0.870823 0.495902 +vt 0.868386 0.509868 +vt 0.841661 0.497002 +vt 0.086688 0.470289 +vt 0.095034 0.469291 +vt 0.179427 0.478817 +vt 0.151585 0.486208 +vt 0.152753 0.488652 +vt 0.116334 0.421189 +vt 0.112690 0.426725 +vt 0.126773 0.386764 +vt 0.130185 0.391694 +vt 0.125281 0.399326 +vt 0.841673 0.502561 +vt 0.842580 0.500445 +vt 0.844807 0.501959 +vt 0.856067 0.507834 +vt 0.859927 0.506638 +vt 0.877670 0.540256 +vt 0.860885 0.507680 +vt 0.856924 0.509300 +vt 0.131992 0.441625 +vt 0.849388 0.503147 +vt 0.846308 0.501544 +vt 0.846301 0.500559 +vt 0.164936 0.464566 +vt 0.165437 0.462594 +vt 0.134889 0.462173 +vt 0.133575 0.475439 +vt 0.131476 0.461645 +vt 0.152519 0.499924 +vt 0.153281 0.495720 +vt 0.842990 0.456347 +vt 0.826094 0.464213 +vt 0.845951 0.457128 +vt 0.138510 0.398275 +vt 0.161772 0.479427 +vt 0.163263 0.488052 +vt 0.149623 0.504591 +vt 0.152233 0.505835 +vt 0.147824 0.510009 +vt 0.847016 0.455126 +vt 0.166130 0.451324 +vt 0.168069 0.454079 +vt 0.814213 0.500474 +vt 0.986328 0.431641 +vt 0.132405 0.502326 +vt 0.132265 0.499317 +vt 0.105872 0.501131 +vt 0.101277 0.488131 +vt 0.851078 0.425043 +vt 0.847002 0.425162 +vt 0.144619 0.495427 +vt 0.143989 0.493156 +vt 0.872905 0.435104 +vt 0.112404 0.540950 +vt 0.117463 0.554329 +vt 0.103782 0.548600 +vt 0.871495 0.460942 +vt 0.135939 0.389594 +vt 0.137708 0.382847 +vt 0.141577 0.382098 +vt 0.156819 0.439362 +vt 0.158227 0.443897 +vt 0.593085 0.568997 +vt 0.871135 0.460681 +vt 0.850295 0.419238 +vt 0.869330 0.451150 +vt 0.123564 0.451261 +vt 0.122666 0.457921 +vt 0.121885 0.460781 +vt 0.856934 0.448765 +vt 0.860183 0.447818 +vt 0.853972 0.449920 +vt 0.852838 0.445365 +vt 0.150846 0.480761 +vt 0.149900 0.481921 +vt 0.112206 0.520501 +vt 0.112749 0.532131 +vt 0.106476 0.525100 +vt 0.107156 0.487330 +vt 0.174219 0.425998 +vt 0.873566 0.396725 +vt 0.507138 0.568350 +vt 0.496506 0.557105 +vt 0.506955 0.556829 +vt 0.840048 0.484530 +vt 0.841466 0.483081 +vt 0.840797 0.485217 +vt 0.859945 0.454421 +vt 0.861443 0.461495 +vt 0.151824 0.402929 +vt 0.143469 0.405418 +vt 0.840514 0.482035 +vt 0.499339 0.502612 +vt 0.496336 0.507190 +vt 0.496309 0.502312 +vt 0.514640 0.329083 +vt 0.495611 0.336761 +vt 0.495574 0.327991 +vt 0.143041 0.491038 +vt 0.144553 0.491062 +vt 0.145261 0.491917 +vt 0.860830 0.489973 +vt 0.857758 0.487913 +vt 0.154587 0.490677 +vt 0.145308 0.507906 +vt 0.137336 0.507935 +vt 0.144156 0.506192 +vt 0.887687 0.435499 +vt 0.825316 0.448901 +vt 0.828335 0.448233 +vt 0.826493 0.451200 +vt 0.149191 0.496841 +vt 0.859013 0.484650 +vt 0.855113 0.488739 +vt 0.852810 0.487134 +vt 0.866326 0.499152 +vt 0.862646 0.501132 +vt 0.129711 0.531843 +vt 0.137262 0.529517 +vt 0.140890 0.530784 +vt 0.138571 0.390932 +vt 0.825122 0.462901 +vt 0.881463 0.540893 +vt 0.890411 0.539287 +vt 0.890185 0.548544 +vt 0.517524 0.738833 +vt 0.496711 0.756070 +vt 0.496670 0.739258 +vt 0.123253 0.493362 +vt 0.124605 0.493188 +vt 0.158790 0.471568 +vt 0.986328 0.626953 +vt 0.164575 0.456905 +vt 0.164358 0.457185 +vt 0.164254 0.456866 +vt 0.834315 0.505848 +vt 0.825587 0.503685 +vt 0.161841 0.453700 +vt 0.163932 0.454680 +vt 0.163243 0.455395 +vt 0.829213 0.397026 +vt 0.846380 0.498011 +vt 0.164414 0.456134 +vt 0.164114 0.456425 +vt 0.164036 0.455935 +vt 0.822864 0.453260 +vt 0.893447 0.467841 +vt 0.131415 0.503692 +vt 0.130725 0.501248 +vt 0.834744 0.451673 +vt 0.832660 0.448714 +vt 0.120405 0.491017 +vt 0.121315 0.493690 +vt 0.166926 0.466103 +vt 0.125595 0.497283 +vt 0.125695 0.494972 +vt 0.540716 0.609765 +vt 0.534111 0.603239 +vt 0.178927 0.451378 +vt 0.175465 0.450129 +vt 0.177125 0.446127 +vt 0.120599 0.497018 +vt 0.162636 0.402109 +vt 0.130008 0.495838 +vt 0.306173 0.175609 +vt 0.271726 0.157314 +vt 0.320525 0.151226 +vt 0.588936 0.647679 +vt 0.140367 0.519298 +vt 0.146175 0.525177 +vt 0.164522 0.455689 +vt 0.164317 0.454580 +vt 0.164884 0.454495 +vt 0.165981 0.456376 +vt 0.164857 0.456939 +vt 0.593248 0.582888 +vt 0.828108 0.446010 +vt 0.163109 0.456245 +vt 0.163515 0.461586 +vt 0.164821 0.458956 +vt 0.164685 0.444351 +vt 0.163656 0.437341 +vt 0.164294 0.455593 +vt 0.164115 0.455504 +vt 0.843973 0.496693 +vt 0.150714 0.484181 +vt 0.839266 0.456671 +vt 0.835482 0.447027 +vt 0.835077 0.458391 +vt 0.832419 0.463270 +vt 0.157130 0.455048 +vt 0.161233 0.455440 +vt 0.161263 0.457448 +vt 0.141013 0.434082 +vt 0.139986 0.430014 +vt 0.149413 0.442856 +vt 0.156841 0.477939 +vt 0.158598 0.484661 +vt 0.872401 0.421509 +vt 0.864923 0.400484 +vt 0.874771 0.420745 +vt 0.164519 0.456530 +vt 0.164859 0.465746 +vt 0.827853 0.457101 +vt 0.826782 0.457215 +vt 0.971701 0.560564 +vt 0.972241 0.531458 +vt 0.831458 0.455309 +vt 0.831996 0.445359 +vt 0.845799 0.532056 +vt 0.868129 0.448996 +vt 0.835578 0.454916 +vt 0.831453 0.457315 +vt 0.157665 0.458528 +vt 0.824571 0.453950 +vt 0.165654 0.458178 +vt 0.166721 0.461394 +vt 0.165376 0.458683 +vt 0.623362 0.573874 +vt 0.631938 0.570488 +vt 0.627607 0.575167 +vt 0.140795 0.492391 +vt 0.153858 0.512492 +vt 0.158898 0.505998 +vt 0.079974 0.536981 +vt 0.092782 0.540463 +vt 0.098862 0.565130 +vt 0.838484 0.490513 +vt 0.873879 0.460770 +vt 0.163289 0.457152 +vt 0.164823 0.457430 +vt 0.126135 0.441577 +vt 0.124716 0.439457 +vt 0.579609 0.151601 +vt 0.534965 0.142921 +vt 0.162696 0.434514 +vt 0.154613 0.486999 +vt 0.922640 0.415840 +vt 0.109627 0.458019 +vt 0.574187 0.564121 +vt 0.152968 0.488039 +vt 0.152871 0.490109 +vt 0.781214 0.438251 +vt 0.796889 0.436436 +vt 0.847887 0.491769 +vt 0.159548 0.443839 +vt 0.158218 0.438982 +vt 0.160321 0.438074 +vt 0.846460 0.502961 +vt 0.152726 0.490804 +vt 0.834025 0.445028 +vt 0.824334 0.458283 +vt 0.822403 0.455957 +vt 0.824110 0.456095 +vt 0.174386 0.452582 +vt 0.176142 0.452936 +vt 0.136091 0.491238 +vt 0.132418 0.490114 +vt 0.544515 0.865441 +vt 0.497502 0.886260 +vt 0.497474 0.866962 +vt 0.642416 0.737327 +vt 0.619326 0.759157 +vt 0.611275 0.745948 +vt 0.839408 0.428900 +vt 0.838400 0.433958 +vt 0.134055 0.395454 +vt 0.165903 0.457346 +vt 0.164841 0.457225 +vt 0.828944 0.451929 +vt 0.828070 0.452461 +vt 0.848287 0.442841 +vt 0.844662 0.440691 +vt 0.164574 0.452586 +vt 0.161973 0.459579 +vt 0.843203 0.442724 +vt 0.841510 0.438158 +vt 0.835578 0.435477 +vt 0.171270 0.455239 +vt 0.174598 0.456333 +vt 0.171104 0.458461 +vt 0.869775 0.513306 +vt 0.876361 0.513264 +vt 0.871914 0.519527 +vt 0.145959 0.444638 +vt 0.147044 0.453862 +vt 0.142286 0.446903 +vt 0.120239 0.421543 +vt 0.119883 0.435109 +vt 0.115504 0.433332 +vt 0.816892 0.493808 +vt 0.101369 0.592903 +vt 0.112268 0.571927 +vt 0.117596 0.610444 +vt 0.112914 0.507447 +vt 0.116908 0.502055 +vt 0.135412 0.546105 +vt 0.121368 0.538391 +vt 0.130077 0.537561 +vt 0.112192 0.445413 +vt 0.112362 0.470085 +vt 0.690596 0.462505 +vt 0.704884 0.467959 +vt 0.527621 0.400626 +vt 0.532344 0.403463 +vt 0.525828 0.404990 +vt 0.590835 0.856742 +vt 0.557496 0.885355 +vt 0.512562 0.412577 +vt 0.510405 0.414236 +vt 0.509920 0.412702 +vt 0.616530 0.431486 +vt 0.607875 0.448149 +vt 0.513157 0.414386 +vt 0.510772 0.415369 +vt 0.706139 0.544956 +vt 0.693071 0.552724 +vt 0.696843 0.538496 +vt 0.614056 0.371650 +vt 0.596617 0.363660 +vt 0.609313 0.355157 +vt 0.579262 0.472490 +vt 0.599593 0.462834 +vt 0.588878 0.478079 +vt 0.638273 0.556895 +vt 0.509340 0.415427 +vt 0.508982 0.414272 +vt 0.546374 0.512126 +vt 0.551629 0.507151 +vt 0.555587 0.516867 +vt 0.860967 0.502216 +vt 0.861984 0.503578 +vt 0.527162 0.499287 +vt 0.523046 0.496975 +vt 0.534908 0.497467 +vt 0.171035 0.469486 +vt 0.170221 0.493284 +vt 0.163228 0.493474 +vt 0.839064 0.452887 +vt 0.745698 0.586680 +vt 0.731815 0.564972 +vt 0.739753 0.550363 +vt 0.512583 0.506208 +vt 0.529936 0.585635 +vt 0.541892 0.590368 +vt 0.530723 0.594725 +vt 0.137131 0.437484 +vt 0.137258 0.443751 +vt 0.132572 0.507788 +vt 0.514222 0.501409 +vt 0.519405 0.598966 +vt 0.517431 0.588507 +vt 0.885098 0.429574 +vt 0.847930 0.457579 +vt 0.590053 0.603431 +vt 0.602277 0.605207 +vt 0.589971 0.605967 +vt 0.521665 0.513111 +vt 0.528992 0.506912 +vt 0.834313 0.438857 +vt 0.833330 0.436734 +vt 0.495604 0.155380 +vt 0.495763 0.140227 +vt 0.456642 0.142801 +vt 0.819626 0.463578 +vt 0.515322 0.515574 +vt 0.526392 0.515320 +vt 0.522088 0.521893 +vt 0.518357 0.518892 +vt 0.510266 0.435417 +vt 0.503397 0.432709 +vt 0.508429 0.429012 +vt 0.696546 0.704386 +vt 0.708617 0.712859 +vt 0.837775 0.862671 +vt 0.800596 0.904287 +vt 0.807062 0.878193 +vt 0.792384 0.228006 +vt 0.810809 0.205530 +vt 0.827256 0.237922 +vt 0.511452 0.502781 +vt 0.496248 0.490635 +vt 0.502170 0.493027 +vt 0.496263 0.493190 +vt 0.606835 0.873017 +vt 0.581502 0.903595 +vt 0.829584 0.456118 +vt 0.479636 0.504410 +vt 0.480006 0.504149 +vt 0.480021 0.504624 +vt 0.497596 0.905660 +vt 0.502668 0.429395 +vt 0.507072 0.426047 +vt 0.479362 0.500914 +vt 0.476979 0.499827 +vt 0.478947 0.498856 +vt 0.514408 0.430152 +vt 0.519491 0.443818 +vt 0.515806 0.440655 +vt 0.554820 0.487678 +vt 0.550678 0.479113 +vt 0.566053 0.473330 +vt 0.480131 0.503895 +vt 0.480651 0.504547 +vt 0.480085 0.504064 +vt 0.526672 0.488971 +vt 0.533357 0.482286 +vt 0.533546 0.491803 +vt 0.539318 0.489023 +vt 0.522566 0.491348 +vt 0.519136 0.498251 +vt 0.517108 0.496102 +vt 0.802223 0.635054 +vt 0.911653 0.824908 +vt 0.909457 0.776219 +vt 0.937275 0.784646 +vt 0.827920 0.465609 +vt 0.548163 0.753960 +vt 0.524567 0.772294 +vt 0.521114 0.755115 +vt 0.512800 0.503781 +vt 0.789443 0.262334 +vt 0.805713 0.314811 +vt 0.772247 0.268393 +vt 0.823787 0.461097 +vt 0.864087 0.526570 +vt 0.524935 0.455792 +vt 0.522702 0.452061 +vt 0.541686 0.452855 +vt 0.539225 0.447545 +vt 0.496233 0.488112 +vt 0.503545 0.490605 +vt 0.589534 0.391848 +vt 0.577791 0.384208 +vt 0.588611 0.380784 +vt 0.588049 0.403513 +vt 0.601352 0.390783 +vt 0.598814 0.405900 +vt 0.573652 0.431877 +vt 0.555910 0.440236 +vt 0.568941 0.424675 +vt 0.560008 0.455333 +vt 0.543218 0.459099 +vt 0.559393 0.446902 +vt 0.822721 0.458698 +vt 0.612299 0.596173 +vt 0.614196 0.389666 +vt 0.610985 0.408415 +vt 0.600915 0.376482 +vt 0.604171 0.426149 +vt 0.558153 0.401767 +vt 0.569231 0.394169 +vt 0.568140 0.401424 +vt 0.578795 0.392815 +vt 0.568586 0.388067 +vt 0.557248 0.408636 +vt 0.566482 0.409114 +vt 0.537214 0.409458 +vt 0.530182 0.416044 +vt 0.529265 0.410116 +vt 0.127244 0.507464 +vt 0.125146 0.509968 +vt 0.839953 0.448188 +vt 0.715060 0.503416 +vt 0.746934 0.525283 +vt 0.727481 0.517036 +vt 0.917567 0.439888 +vt 0.911571 0.435363 +vt 0.528917 0.405202 +vt 0.526788 0.408573 +vt 0.523558 0.407959 +vt 0.513820 0.415534 +vt 0.544497 0.397804 +vt 0.621038 0.548656 +vt 0.608100 0.552583 +vt 0.546305 0.401817 +vt 0.559107 0.395101 +vt 0.535378 0.405270 +vt 0.558542 0.392259 +vt 0.547091 0.408584 +vt 0.538944 0.416983 +vt 0.547339 0.416702 +vt 0.556224 0.417727 +vt 0.531490 0.440960 +vt 0.544932 0.433189 +vt 0.535487 0.444197 +vt 0.521170 0.434825 +vt 0.512304 0.419513 +vt 0.509637 0.422161 +vt 0.510564 0.418688 +vt 0.526373 0.436745 +vt 0.538877 0.430268 +vt 0.511441 0.424242 +vt 0.516249 0.423275 +vt 0.514804 0.416911 +vt 0.511745 0.417312 +vt 0.511303 0.416420 +vt 0.509763 0.416510 +vt 0.510192 0.417292 +vt 0.511111 0.459534 +vt 0.496174 0.464125 +vt 0.531619 0.112351 +vt 0.495872 0.125641 +vt 0.495922 0.111115 +vt 0.526250 0.426483 +vt 0.532429 0.428046 +vt 0.515662 0.418857 +vt 0.642113 0.585364 +vt 0.638014 0.590149 +vt 0.958163 0.683154 +vt 0.974667 0.727263 +vt 0.948547 0.713584 +vt 0.507264 0.580003 +vt 0.496535 0.569162 +vt 0.551165 0.435415 +vt 0.521241 0.448284 +vt 0.509893 0.452544 +vt 0.627586 0.559022 +vt 0.587537 0.351649 +vt 0.581844 0.328914 +vt 0.512456 0.410520 +vt 0.509192 0.411594 +vt 0.551217 0.387898 +vt 0.555452 0.373888 +vt 0.562644 0.382925 +vt 0.576554 0.361362 +vt 0.556818 0.358955 +vt 0.565554 0.350706 +vt 0.572917 0.377375 +vt 0.584247 0.371036 +vt 0.545776 0.378739 +vt 0.547575 0.364595 +vt 0.573372 0.455862 +vt 0.613185 0.584938 +vt 0.610876 0.595720 +vt 0.565741 0.369285 +vt 0.630099 0.555227 +vt 0.526487 0.397253 +vt 0.511446 0.403299 +vt 0.522995 0.388556 +vt 0.779178 0.865621 +vt 0.818133 0.848992 +vt 0.573557 0.340643 +vt 0.532609 0.376887 +vt 0.531879 0.390520 +vt 0.527872 0.382313 +vt 0.555904 0.330797 +vt 0.496241 0.482658 +vt 0.508994 0.477727 +vt 0.527186 0.342028 +vt 0.532102 0.333509 +vt 0.523723 0.370777 +vt 0.529186 0.363661 +vt 0.539348 0.370736 +vt 0.533212 0.396957 +vt 0.549551 0.340559 +vt 0.538715 0.313464 +vt 0.536257 0.324240 +vt 0.560010 0.318855 +vt 0.542545 0.348792 +vt 0.535360 0.355770 +vt 0.517897 0.358633 +vt 0.496217 0.449866 +vt 0.496210 0.452659 +vt 0.506048 0.501805 +vt 0.503748 0.504458 +vt 0.502951 0.498977 +vt 0.500245 0.497900 +vt 0.505238 0.500152 +vt 0.526393 0.470827 +vt 0.538676 0.474398 +vt 0.523279 0.476930 +vt 0.511587 0.515400 +vt 0.509263 0.517317 +vt 0.507926 0.519037 +vt 0.506925 0.517443 +vt 0.512468 0.517712 +vt 0.508346 0.515004 +vt 0.504635 0.508423 +vt 0.504839 0.513087 +vt 0.502799 0.515370 +vt 0.504546 0.520287 +vt 0.503523 0.517641 +vt 0.506998 0.511702 +vt 0.505680 0.515289 +vt 0.513423 0.701458 +vt 0.496676 0.721244 +vt 0.496674 0.700385 +vt 0.513281 0.519556 +vt 0.515682 0.525436 +vt 0.510898 0.522111 +vt 0.508740 0.527931 +vt 0.505990 0.523424 +vt 0.520906 0.532412 +vt 0.530520 0.528431 +vt 0.512053 0.371570 +vt 0.514715 0.366086 +vt 0.504114 0.438864 +vt 0.516244 0.382517 +vt 0.503897 0.396345 +vt 0.508839 0.377906 +vt 0.519740 0.376361 +vt 0.501592 0.402876 +vt 0.498151 0.405557 +vt 0.498760 0.402038 +vt 0.497085 0.812617 +vt 0.473892 0.501702 +vt 0.476271 0.504514 +vt 0.472945 0.505855 +vt 0.495297 0.405251 +vt 0.495398 0.401461 +vt 0.487660 0.505616 +vt 0.484833 0.503820 +vt 0.486651 0.501775 +vt 0.508756 0.410283 +vt 0.508097 0.411771 +vt 0.507332 0.410840 +vt 0.499975 0.395274 +vt 0.495532 0.394835 +vt 0.512433 0.408124 +vt 0.482669 0.512447 +vt 0.480389 0.508903 +vt 0.482675 0.508687 +vt 0.482274 0.501522 +vt 0.482508 0.504076 +vt 0.481245 0.502740 +vt 0.478183 0.503841 +vt 0.479398 0.503552 +vt 0.479343 0.503869 +vt 0.505628 0.409176 +vt 0.507281 0.407375 +vt 0.521866 0.350561 +vt 0.512286 0.337764 +vt 0.503173 0.407559 +vt 0.504859 0.404679 +vt 0.733657 0.756547 +vt 0.746327 0.721236 +vt 0.762615 0.728409 +vt 0.777683 0.738507 +vt 0.787212 0.698478 +vt 0.800741 0.715520 +vt 0.714002 0.587146 +vt 0.723974 0.602233 +vt 0.714263 0.607027 +vt 0.477070 0.502411 +vt 0.478466 0.501355 +vt 0.793385 0.840092 +vt 0.829469 0.821333 +vt 0.853506 0.871477 +vt 0.893352 0.854190 +vt 0.860410 0.887132 +vt 0.848298 0.836743 +vt 0.931401 0.257462 +vt 0.734656 0.874546 +vt 0.756593 0.892816 +vt 0.708212 0.900978 +vt 0.677667 0.927363 +vt 0.664771 0.899138 +vt 0.496881 0.792407 +vt 0.798186 0.923734 +vt 0.896755 0.321851 +vt 0.866272 0.315934 +vt 0.776610 0.825800 +vt 0.755370 0.850882 +vt 0.870889 0.385338 +vt 0.868375 0.385023 +vt 0.901829 0.405782 +vt 0.577962 0.401685 +vt 0.803765 0.454062 +vt 0.919091 0.325182 +vt 0.508141 0.399056 +vt 0.885073 0.437078 +vt 0.888007 0.447280 +vt 0.613253 0.582441 +vt 0.607792 0.580865 +vt 0.625626 0.494808 +vt 0.605805 0.500095 +vt 0.618592 0.484563 +vt 0.853467 0.379892 +vt 0.856067 0.378506 +vt 0.857310 0.380915 +vt 0.870825 0.532831 +vt 0.877254 0.535470 +vt 0.859359 0.379283 +vt 0.861605 0.381166 +vt 0.635908 0.533490 +vt 0.622640 0.542517 +vt 0.622405 0.537558 +vt 0.955576 0.744383 +vt 0.929285 0.745773 +vt 0.861451 0.377472 +vt 0.865324 0.381150 +vt 0.496610 0.617182 +vt 0.496412 0.524952 +vt 0.500919 0.521205 +vt 0.501516 0.524429 +vt 0.855772 0.375851 +vt 0.763987 0.383409 +vt 0.790724 0.381560 +vt 0.782121 0.408923 +vt 0.849921 0.378538 +vt 0.813714 0.458141 +vt 0.816383 0.266551 +vt 0.817021 0.309268 +vt 0.869605 0.382552 +vt 0.896074 0.529959 +vt 0.890763 0.525173 +vt 0.893818 0.522483 +vt 0.871300 0.485865 +vt 0.875211 0.469610 +vt 0.860547 0.393759 +vt 0.583220 0.416632 +vt 0.496262 0.430323 +vt 0.275629 0.212786 +vt 0.260447 0.184716 +vt 0.597986 0.621892 +vt 0.601471 0.627029 +vt 0.910789 0.567847 +vt 0.837477 0.449665 +vt 0.607836 0.583220 +vt 0.601547 0.585579 +vt 0.641442 0.569290 +vt 0.891717 0.457641 +vt 0.859261 0.502416 +vt 0.374032 0.527031 +vt 0.385766 0.540719 +vt 0.372388 0.532676 +vt 0.515422 0.497172 +vt 0.511436 0.498715 +vt 0.511739 0.496967 +vt 0.858895 0.244265 +vt 0.842644 0.214193 +vt 0.880655 0.486596 +vt 0.175714 0.460212 +vt 0.176836 0.463777 +vt 0.109056 0.499326 +vt 0.885539 0.447508 +vt 0.889706 0.457633 +vt 0.897327 0.511008 +vt 0.896723 0.517194 +vt 0.127065 0.499249 +vt 0.895759 0.535702 +vt 0.889118 0.533454 +vt 0.873903 0.640989 +vt 0.889781 0.659102 +vt 0.871162 0.677372 +vt 0.910038 0.601245 +vt 0.891300 0.622943 +vt 0.121708 0.519615 +vt 0.114955 0.517070 +vt 0.117247 0.513351 +vt 0.880799 0.536163 +vt 0.869393 0.527736 +vt 0.875365 0.528482 +vt 0.880137 0.433566 +vt 0.882567 0.436728 +vt 0.849390 0.504553 +vt 0.782755 0.752257 +vt 0.805886 0.729645 +vt 0.758499 0.787390 +vt 0.787620 0.765824 +vt 0.858718 0.626100 +vt 0.854055 0.659370 +vt 0.827778 0.454368 +vt 0.563991 0.419177 +vt 0.575506 0.411629 +vt 0.602036 0.579051 +vt 0.519208 0.493840 +vt 0.115956 0.485171 +vt 0.118605 0.492071 +vt 0.841335 0.431988 +vt 0.876438 0.611078 +vt 0.630971 0.887634 +vt 0.620961 0.918605 +vt 0.849757 0.698009 +vt 0.838881 0.683261 +vt 0.819717 0.731913 +vt 0.819481 0.752818 +vt 0.845681 0.722403 +vt 0.822423 0.775991 +vt 0.848132 0.752101 +vt 0.868820 0.702788 +vt 0.873191 0.733015 +vt 0.881039 0.532059 +vt 0.887296 0.525042 +vt 0.624362 0.599188 +vt 0.762704 0.798304 +vt 0.792308 0.777292 +vt 0.767442 0.808817 +vt 0.793752 0.796129 +vt 0.826779 0.691096 +vt 0.823396 0.360326 +vt 0.837432 0.353794 +vt 0.160491 0.429626 +vt 0.169641 0.430205 +vt 0.136948 0.485503 +vt 0.772334 0.682395 +vt 0.807641 0.525283 +vt 0.790903 0.526456 +vt 0.764466 0.543330 +vt 0.783086 0.555325 +vt 0.773649 0.592049 +vt 0.125089 0.523227 +vt 0.129539 0.526641 +vt 0.793765 0.599501 +vt 0.777104 0.638269 +vt 0.120883 0.469538 +vt 0.123336 0.501475 +vt 0.123957 0.498119 +vt 0.681220 0.659993 +vt 0.147499 0.517437 +vt 0.142624 0.513239 +vt 0.821763 0.469348 +vt 0.635889 0.539468 +vt 0.144874 0.457728 +vt 0.138784 0.450060 +vt 0.145754 0.455280 +vt 0.151897 0.479557 +vt 0.927190 0.559531 +vt 0.924002 0.586949 +vt 0.871632 0.499215 +vt 0.939129 0.575464 +vt 0.496775 0.773065 +vt 0.512182 0.502386 +vt 0.512249 0.503561 +vt 0.186940 0.436927 +vt 0.542242 0.495094 +vt 0.528193 0.494658 +vt 0.142102 0.419351 +vt 0.954258 0.566655 +vt 0.026627 0.403186 +vt 0.048239 0.364844 +vt 0.044804 0.398632 +vt 0.154283 0.522091 +vt 0.894604 0.516829 +vt 0.894923 0.511104 +vt 0.673244 0.725044 +vt 0.683356 0.734852 +vt 0.963330 0.658076 +vt 0.861350 0.806737 +vt 0.888892 0.810067 +vt 0.889742 0.684185 +vt 0.100373 0.367920 +vt 0.812081 0.808772 +vt 0.134091 0.502519 +vt 0.134519 0.504169 +vt 0.923271 0.621656 +vt 0.933662 0.653654 +vt 0.923245 0.644506 +vt 0.938308 0.676322 +vt 0.924428 0.665299 +vt 0.155566 0.428985 +vt 0.159172 0.413941 +vt 0.498047 0.923828 +vt 0.930272 0.522736 +vt 0.930377 0.477399 +vt 0.496558 0.581177 +vt 0.507950 0.590528 +vt 0.922426 0.526688 +vt 0.847206 0.786251 +vt 0.722598 0.578171 +vt 0.708540 0.568110 +vt 0.714984 0.554815 +vt 0.559332 0.579134 +vt 0.554907 0.557706 +vt 0.569882 0.552118 +vt 0.556184 0.566259 +vt 0.701322 0.559063 +vt 0.552834 0.550107 +vt 0.542384 0.552612 +vt 0.550708 0.540023 +vt 0.120153 0.482992 +vt 0.547982 0.492008 +vt 0.555576 0.499506 +vt 0.563006 0.495411 +vt 0.576610 0.494439 +vt 0.569573 0.508144 +vt 0.567633 0.489816 +vt 0.868056 0.439421 +vt 0.579948 0.528232 +vt 0.564701 0.534777 +vt 0.575773 0.519132 +vt 0.539218 0.520125 +vt 0.543654 0.539955 +vt 0.567410 0.542768 +vt 0.584293 0.537043 +vt 0.541460 0.508150 +vt 0.531686 0.516832 +vt 0.822714 0.461454 +vt 0.818606 0.460085 +vt 0.517337 0.578904 +vt 0.604122 0.613375 +vt 0.547154 0.528836 +vt 0.884307 0.516760 +vt 0.889853 0.520964 +vt 0.107803 0.429420 +vt 0.105305 0.435312 +vt 0.571707 0.558377 +vt 0.557819 0.572631 +vt 0.926279 0.706920 +vt 0.598489 0.487734 +vt 0.585597 0.503678 +vt 0.164138 0.458549 +vt 0.164646 0.457376 +vt 0.593609 0.513869 +vt 0.611453 0.509405 +vt 0.598275 0.522763 +vt 0.609713 0.472319 +vt 0.541731 0.581610 +vt 0.608371 0.585714 +vt 0.127342 0.400460 +vt 0.117889 0.420741 +vt 0.639461 0.634378 +vt 0.199574 0.227939 +vt 0.182088 0.205547 +vt 0.217787 0.194349 +vt 0.185766 0.474706 +vt 0.183433 0.456136 +vt 0.185966 0.452887 +vt 0.630345 0.467897 +vt 0.651839 0.461828 +vt 0.639124 0.479975 +vt 0.885984 0.506148 +vt 0.887740 0.501051 +vt 0.454855 0.159803 +vt 0.412111 0.151479 +vt 0.744264 0.829296 +vt 0.729477 0.822526 +vt 0.619233 0.456856 +vt 0.641231 0.448493 +vt 0.616449 0.518158 +vt 0.541673 0.572517 +vt 0.541304 0.563711 +vt 0.646443 0.511842 +vt 0.630504 0.506987 +vt 0.644504 0.495026 +vt 0.691977 0.669071 +vt 0.713854 0.652273 +vt 0.701432 0.675298 +vt 0.703771 0.648991 +vt 0.721128 0.627228 +vt 0.659743 0.479658 +vt 0.674902 0.459042 +vt 0.587409 0.542803 +vt 0.602181 0.530195 +vt 0.660822 0.502336 +vt 0.675279 0.514770 +vt 0.961043 0.267168 +vt 0.936587 0.292931 +vt 0.609006 0.547667 +vt 0.647505 0.522944 +vt 0.591739 0.557127 +vt 0.608494 0.544179 +vt 0.685582 0.566507 +vt 0.683470 0.547308 +vt 0.676611 0.571168 +vt 0.686388 0.534096 +vt 0.211131 0.438388 +vt 0.197752 0.465227 +vt 0.195407 0.436588 +vt 0.676801 0.559572 +vt 0.672806 0.542921 +vt 0.661263 0.540345 +vt 0.755643 0.406277 +vt 0.730348 0.416677 +vt 0.736441 0.382962 +vt 0.661990 0.530569 +vt 0.661547 0.519412 +vt 0.675141 0.530517 +vt 0.592019 0.560426 +vt 0.577554 0.575786 +vt 0.565456 0.591675 +vt 0.576644 0.572628 +vt 0.563282 0.589137 +vt 0.508508 0.412683 +vt 0.247218 0.622931 +vt 0.258273 0.595016 +vt 0.259780 0.625505 +vt 0.529295 0.554604 +vt 0.496659 0.671205 +vt 0.511232 0.658801 +vt 0.542916 0.679180 +vt 0.552290 0.664245 +vt 0.561314 0.681738 +vt 0.556113 0.605785 +vt 0.552795 0.603805 +vt 0.543467 0.738132 +vt 0.426422 0.807363 +vt 0.460836 0.828084 +vt 0.420582 0.823809 +vt 0.541049 0.622296 +vt 0.548789 0.630162 +vt 0.538146 0.628721 +vt 0.041438 0.429023 +vt 0.024268 0.462301 +vt 0.025410 0.430852 +vt 0.547808 0.640473 +vt 0.388712 0.874477 +vt 0.415288 0.904106 +vt 0.363898 0.889133 +vt 0.304520 0.515275 +vt 0.317494 0.493436 +vt 0.317700 0.514819 +vt 0.927244 0.444976 +vt 0.157559 0.403132 +vt 0.154581 0.415071 +vt 0.154579 0.403512 +vt 0.535788 0.639647 +vt 0.548903 0.651875 +vt 0.535575 0.651299 +vt 0.579010 0.751836 +vt 0.711697 0.524783 +vt 0.524105 0.648034 +vt 0.512224 0.534898 +vt 0.499157 0.507497 +vt 0.501652 0.503505 +vt 0.512037 0.686716 +vt 0.529026 0.689131 +vt 0.585882 0.766223 +vt 0.881142 0.761088 +vt 0.374813 0.919108 +vt 0.328934 0.900180 +vt 0.538760 0.383815 +vt 0.886713 0.512933 +vt 0.883579 0.510282 +vt 0.641367 0.361982 +vt 0.627414 0.366446 +vt 0.549148 0.693845 +vt 0.571604 0.695256 +vt 0.512620 0.503757 +vt 0.512710 0.503911 +vt 0.512631 0.503933 +vt 0.726896 0.716572 +vt 0.852544 0.355225 +vt 0.836203 0.309847 +vt 0.620711 0.699293 +vt 0.618691 0.685290 +vt 0.549706 0.624761 +vt 0.697435 0.743748 +vt 0.747076 0.687839 +vt 0.716922 0.750097 +vt 0.538461 0.663803 +vt 0.502742 0.376614 +vt 0.495592 0.375946 +vt 0.535992 0.506945 +vt 0.685731 0.695491 +vt 0.648277 0.531522 +vt 0.648242 0.539062 +vt 0.569919 0.615546 +vt 0.567350 0.619671 +vt 0.497402 0.847661 +vt 0.497258 0.830253 +vt 0.664094 0.715112 +vt 0.676367 0.687001 +vt 0.583199 0.707776 +vt 0.726790 0.655391 +vt 0.710965 0.627794 +vt 0.556053 0.708190 +vt 0.499857 0.515396 +vt 0.499469 0.512721 +vt 0.758679 0.652398 +vt 0.760912 0.611485 +vt 0.756318 0.568753 +vt 0.447728 0.595421 +vt 0.431841 0.585484 +vt 0.433835 0.579215 +vt 0.468951 0.772473 +vt 0.445251 0.754348 +vt 0.472306 0.755271 +vt 0.750203 0.764327 +vt 0.710324 0.803118 +vt 0.705631 0.790142 +vt 0.693924 0.779829 +vt 0.718486 0.813747 +vt 0.698215 0.846391 +vt 0.681569 0.837010 +vt 0.647750 0.854864 +vt 0.554212 0.770317 +vt 0.671969 0.823394 +vt 0.667754 0.807400 +vt 0.633210 0.840430 +vt 0.622617 0.825545 +vt 0.524926 0.635830 +vt 0.500281 0.517925 +vt 0.679911 0.770766 +vt 0.654052 0.797833 +vt 0.641391 0.786875 +vt 0.612770 0.812847 +vt 0.603576 0.798797 +vt 0.667838 0.867928 +vt 0.632621 0.725895 +vt 0.634105 0.666604 +vt 0.616235 0.672830 +vt 0.579652 0.839562 +vt 0.696245 0.874912 +vt 0.418941 0.564213 +vt 0.417085 0.568824 +vt 0.613493 0.608818 +vt 0.720721 0.851737 +vt 0.458189 0.844527 +vt 0.159468 0.862644 +vt 0.195576 0.903893 +vt 0.143250 0.871607 +vt 0.564791 0.723734 +vt 0.593965 0.782357 +vt 0.651378 0.749264 +vt 0.628274 0.773483 +vt 0.510639 0.646052 +vt 0.906951 0.664799 +vt 0.908048 0.639864 +vt 0.071181 0.475285 +vt 0.074911 0.508701 +vt 0.069036 0.522058 +vt 0.942974 0.330423 +vt 0.963798 0.301244 +vt 0.966389 0.332846 +vt 0.495570 0.319257 +vt 0.393102 0.177156 +vt 0.360617 0.160631 +vt 0.853289 0.378041 +vt 0.881084 0.281512 +vt 0.846312 0.275881 +vt 0.819607 0.470858 +vt 0.910245 0.284814 +vt 0.851133 0.375626 +vt 0.986328 0.302734 +vt 0.986328 0.236328 +vt 0.958748 0.231354 +vt 0.512617 0.504109 +vt 0.576388 0.132918 +vt 0.534131 0.127411 +vt 0.298435 0.704796 +vt 0.267123 0.687771 +vt 0.280797 0.682236 +vt 0.512865 0.503394 +vt 0.812037 0.382048 +vt 0.803885 0.407752 +vt 0.512759 0.504025 +vt 0.663098 0.760932 +vt 0.477842 0.721253 +vt 0.460166 0.703687 +vt 0.479929 0.701540 +vt 0.253594 0.550221 +vt 0.271501 0.535961 +vt 0.261620 0.564847 +vt 0.290347 0.496211 +vt 0.292582 0.518840 +vt 0.247937 0.586554 +vt 0.884227 0.537732 +vt 0.590984 0.599951 +vt 0.831816 0.185975 +vt 0.861880 0.193731 +vt 0.774460 0.194304 +vt 0.754276 0.220467 +vt 0.731676 0.184657 +vt 0.775795 0.122966 +vt 0.822321 0.156767 +vt 0.785955 0.151135 +vt 0.893631 0.173540 +vt 0.854941 0.132560 +vt 0.892355 0.137821 +vt 0.560381 0.062199 +vt 0.527974 0.080184 +vt 0.526501 0.061000 +vt 0.637070 0.047301 +vt 0.669343 0.029555 +vt 0.676534 0.048689 +vt 0.433093 0.788766 +vt 0.463324 0.810986 +vt 0.253407 0.657241 +vt 0.267390 0.655371 +vt 0.452509 0.609819 +vt 0.459101 0.603300 +vt 0.463967 0.623579 +vt 0.483142 0.632332 +vt 0.468342 0.635878 +vt 0.374469 0.760661 +vt 0.352171 0.738389 +vt 0.382516 0.747042 +vt 0.623742 0.411266 +vt 0.641198 0.389007 +vt 0.636611 0.415525 +vt 0.551280 0.618398 +vt 0.545380 0.616404 +vt 0.623233 0.593892 +vt 0.404392 0.857625 +vt 0.438369 0.885525 +vt 0.027789 0.369460 +vt 0.597273 0.045757 +vt 0.632851 0.029051 +vt 0.482293 0.672728 +vt 0.468449 0.660177 +vt 0.482070 0.658839 +vt 0.971919 0.496814 +vt 0.387708 0.536462 +vt 0.401585 0.553433 +vt 0.340867 0.188907 +vt 0.147331 0.377070 +vt 0.144984 0.370718 +vt 0.414545 0.839484 +vt 0.450491 0.865387 +vt 0.108080 0.414927 +vt 0.156670 0.468178 +vt 0.185586 0.525446 +vt 0.202288 0.526556 +vt 0.190820 0.565529 +vt 0.177875 0.598430 +vt 0.158790 0.572951 +vt 0.174423 0.553098 +vt 0.134674 0.244184 +vt 0.150865 0.214208 +vt 0.737942 0.028021 +vt 0.759766 0.009766 +vt 0.775492 0.028180 +vt 0.484258 0.602091 +vt 0.023692 0.560366 +vt 0.038944 0.539523 +vt 0.040481 0.566151 +vt 0.403055 0.549017 +vt 0.538881 0.721562 +vt 0.076371 0.440113 +vt 0.079330 0.469524 +vt 0.478943 0.505439 +vt 0.479901 0.503736 +vt 0.479930 0.503841 +vt 0.477911 0.507827 +vt 0.479709 0.503300 +vt 0.479517 0.502171 +vt 0.479833 0.503349 +vt 0.479880 0.503970 +vt 0.479906 0.504099 +vt 0.490552 0.512925 +vt 0.489026 0.508690 +vt 0.491262 0.508049 +vt 0.479550 0.512420 +vt 0.480761 0.503949 +vt 0.480146 0.503779 +vt 0.480451 0.503519 +vt 0.915584 0.060223 +vt 0.883478 0.079411 +vt 0.875332 0.056639 +vt 0.802216 0.069907 +vt 0.749708 0.046472 +vt 0.790619 0.047418 +vt 0.844160 0.071498 +vt 0.832197 0.051132 +vt 0.482467 0.505594 +vt 0.480050 0.504114 +vt 0.480368 0.504686 +vt 0.484751 0.507266 +vt 0.483070 0.499259 +vt 0.481392 0.506198 +vt 0.480130 0.506166 +vt 0.479141 0.502293 +vt 0.478386 0.502903 +vt 0.851258 0.100520 +vt 0.145774 0.438573 +vt 0.185748 0.314880 +vt 0.201964 0.262337 +vt 0.218894 0.268486 +vt 0.634048 0.546276 +vt 0.628822 0.438314 +vt 0.140573 0.490034 +vt 0.143449 0.491888 +vt 0.133987 0.512145 +vt 0.221921 0.681919 +vt 0.181820 0.656863 +vt 0.191960 0.634821 +vt 0.207008 0.698082 +vt 0.166293 0.673633 +vt 0.151279 0.635159 +vt 0.165886 0.619217 +vt 0.193909 0.494800 +vt 0.209227 0.487046 +vt 0.153841 0.537660 +vt 0.126430 0.582940 +vt 0.124200 0.423441 +vt 0.129219 0.401521 +vt 0.884363 0.532713 +vt 0.368867 0.578555 +vt 0.375378 0.580615 +vt 0.372214 0.582504 +vt 0.375955 0.582837 +vt 0.372792 0.584392 +vt 0.369475 0.584399 +vt 0.370353 0.586399 +vt 0.365542 0.580551 +vt 0.401639 0.595379 +vt 0.399902 0.589095 +vt 0.403800 0.594576 +vt 0.405330 0.592890 +vt 0.400197 0.586433 +vt 0.403200 0.610764 +vt 0.391089 0.606420 +vt 0.403230 0.608440 +vt 0.403174 0.603478 +vt 0.390971 0.605142 +vt 0.390743 0.603762 +vt 0.407057 0.604657 +vt 0.403293 0.606024 +vt 0.408219 0.609822 +vt 0.408809 0.608205 +vt 0.389877 0.575043 +vt 0.399650 0.578571 +vt 0.399914 0.582902 +vt 0.391041 0.578972 +vt 0.383370 0.577000 +vt 0.381507 0.573095 +vt 0.388794 0.570586 +vt 0.407517 0.587359 +vt 0.406127 0.590726 +vt 0.379569 0.568668 +vt 0.387308 0.566354 +vt 0.399435 0.575028 +vt 0.408312 0.585397 +vt 0.409809 0.593665 +vt 0.385817 0.559051 +vt 0.399883 0.571468 +vt 0.386347 0.561974 +vt 0.377681 0.564511 +vt 0.374564 0.556491 +vt 0.375524 0.559860 +vt 0.354885 0.582077 +vt 0.347101 0.581401 +vt 0.349159 0.578424 +vt 0.311704 0.578488 +vt 0.307677 0.566460 +vt 0.316667 0.571084 +vt 0.287043 0.544957 +vt 0.300146 0.552713 +vt 0.291941 0.559036 +vt 0.358809 0.579416 +vt 0.362564 0.582629 +vt 0.359371 0.586049 +vt 0.352744 0.575281 +vt 0.356808 0.572557 +vt 0.361159 0.570356 +vt 0.369698 0.573732 +vt 0.365466 0.575008 +vt 0.372589 0.577261 +vt 0.361961 0.576878 +vt 0.376775 0.576425 +vt 0.371953 0.567674 +vt 0.366003 0.569057 +vt 0.368880 0.563430 +vt 0.362143 0.564996 +vt 0.356613 0.566427 +vt 0.351701 0.569170 +vt 0.347127 0.572334 +vt 0.342824 0.575902 +vt 0.340761 0.580867 +vt 0.340004 0.569468 +vt 0.338597 0.562636 +vt 0.345809 0.565765 +vt 0.334612 0.574059 +vt 0.331272 0.567665 +vt 0.351403 0.562768 +vt 0.354918 0.556834 +vt 0.358069 0.560944 +vt 0.346450 0.559125 +vt 0.365562 0.558993 +vt 0.363100 0.555225 +vt 0.392024 0.582528 +vt 0.397667 0.590555 +vt 0.391532 0.585462 +vt 0.385208 0.583077 +vt 0.385231 0.580717 +vt 0.379190 0.580004 +vt 0.384692 0.585576 +vt 0.382227 0.595612 +vt 0.379858 0.584775 +vt 0.379776 0.582270 +vt 0.390122 0.587723 +vt 0.384255 0.595883 +vt 0.395275 0.591881 +vt 0.388385 0.598503 +vt 0.386498 0.596910 +vt 0.399481 0.595757 +vt 0.389659 0.600365 +vt 0.383849 0.653822 +vt 0.372418 0.646619 +vt 0.384009 0.650371 +vt 0.053832 0.472626 +vt 0.039223 0.509364 +vt 0.039515 0.465368 +vt 0.366854 0.634099 +vt 0.356777 0.632041 +vt 0.360063 0.628566 +vt 0.369411 0.630062 +vt 0.363893 0.624594 +vt 0.376155 0.634478 +vt 0.374597 0.639285 +vt 0.364481 0.638233 +vt 0.371482 0.613538 +vt 0.367084 0.611957 +vt 0.370129 0.609984 +vt 0.365645 0.607344 +vt 0.369092 0.606473 +vt 0.379607 0.607136 +vt 0.379903 0.608653 +vt 0.274395 0.475257 +vt 0.287787 0.467935 +vt 0.367294 0.586687 +vt 0.368567 0.588804 +vt 0.365040 0.589500 +vt 0.355180 0.589953 +vt 0.373746 0.586560 +vt 0.379410 0.598187 +vt 0.371931 0.588782 +vt 0.376491 0.585252 +vt 0.379863 0.596958 +vt 0.371066 0.591035 +vt 0.379198 0.600785 +vt 0.369901 0.593632 +vt 0.366813 0.591615 +vt 0.362503 0.593148 +vt 0.536499 0.844537 +vt 0.368856 0.598914 +vt 0.379321 0.604508 +vt 0.368674 0.601200 +vt 0.364176 0.600707 +vt 0.364482 0.597886 +vt 0.369104 0.596505 +vt 0.365319 0.594987 +vt 0.379223 0.603302 +vt 0.368789 0.603640 +vt 0.364617 0.603712 +vt 0.379437 0.605768 +vt 0.347666 0.586400 +vt 0.351085 0.585190 +vt 0.353534 0.593059 +vt 0.340883 0.587537 +vt 0.347343 0.597409 +vt 0.331398 0.579735 +vt 0.331097 0.587343 +vt 0.334374 0.599886 +vt 0.320193 0.589053 +vt 0.322219 0.601654 +vt 0.322123 0.580871 +vt 0.360907 0.596191 +vt 0.359913 0.599303 +vt 0.335686 0.611681 +vt 0.344013 0.607807 +vt 0.352463 0.605640 +vt 0.356490 0.612031 +vt 0.350358 0.615245 +vt 0.355156 0.621879 +vt 0.343779 0.618211 +vt 0.359965 0.618383 +vt 0.350423 0.625343 +vt 0.360949 0.603952 +vt 0.362705 0.609234 +vt 0.364761 0.614814 +vt 0.369251 0.616591 +vt 0.367272 0.620571 +vt 0.373263 0.621549 +vt 0.371501 0.625902 +vt 0.378642 0.625426 +vt 0.377343 0.630179 +vt 0.381959 0.659977 +vt 0.370444 0.650002 +vt 0.383208 0.613401 +vt 0.385090 0.623118 +vt 0.379324 0.621554 +vt 0.401270 0.616828 +vt 0.391074 0.609131 +vt 0.402424 0.613783 +vt 0.391133 0.607729 +vt 0.406473 0.614008 +vt 0.404107 0.617599 +vt 0.387578 0.613989 +vt 0.390835 0.623223 +vt 0.395466 0.621857 +vt 0.390329 0.612052 +vt 0.398305 0.619366 +vt 0.397842 0.625348 +vt 0.401672 0.620946 +vt 0.385281 0.627419 +vt 0.391996 0.626965 +vt 0.028156 0.627357 +vt 0.043123 0.601900 +vt 0.045959 0.629138 +vt 0.374968 0.618238 +vt 0.380418 0.610319 +vt 0.381547 0.612064 +vt 0.059194 0.634011 +vt 0.050725 0.654777 +vt 0.084193 0.284621 +vt 0.063580 0.257415 +vt 0.102731 0.243708 +vt 0.390849 0.610610 +vt 0.408838 0.618629 +vt 0.412484 0.613994 +vt 0.414702 0.610018 +vt 0.415611 0.617534 +vt 0.418189 0.612816 +vt 0.411931 0.622914 +vt 0.404530 0.622931 +vt 0.406501 0.627344 +vt 0.399986 0.633084 +vt 0.399384 0.628165 +vt 0.392576 0.630591 +vt 0.392759 0.634906 +vt 0.385261 0.632016 +vt 0.401506 0.638069 +vt 0.408490 0.632183 +vt 0.403325 0.643529 +vt 0.410275 0.638515 +vt 0.394483 0.644988 +vt 0.393434 0.639877 +vt 0.413222 0.594870 +vt 0.411137 0.598264 +vt 0.413273 0.603359 +vt 0.408636 0.599075 +vt 0.404663 0.600124 +vt 0.415089 0.607849 +vt 0.408396 0.606542 +vt 0.416436 0.602583 +vt 0.414361 0.605675 +vt 0.402208 0.599973 +vt 0.390352 0.602162 +vt 0.418752 0.621002 +vt 0.423363 0.615564 +vt 0.422811 0.627793 +vt 0.425940 0.619701 +vt 0.415283 0.627655 +vt 0.418600 0.635332 +vt 0.419711 0.606956 +vt 0.423797 0.605738 +vt 0.425440 0.609828 +vt 0.426856 0.606609 +vt 0.428479 0.611872 +vt 0.425164 0.613352 +vt 0.427740 0.616337 +vt 0.419916 0.610202 +vt 0.410116 0.583134 +vt 0.411329 0.581702 +vt 0.400133 0.569132 +vt 0.416623 0.594220 +vt 0.418915 0.594053 +vt 0.373634 0.552961 +vt 0.385444 0.556035 +vt 0.372148 0.548689 +vt 0.385091 0.552709 +vt 0.400517 0.566703 +vt 0.401369 0.564686 +vt 0.323811 0.564428 +vt 0.325225 0.573451 +vt 0.316416 0.559540 +vt 0.325888 0.553878 +vt 0.332202 0.559229 +vt 0.341321 0.554983 +vt 0.336758 0.549494 +vt 0.348209 0.547174 +vt 0.351547 0.552262 +vt 0.361051 0.551214 +vt 0.359074 0.546273 +vt 0.307561 0.586409 +vt 0.305282 0.599058 +vt 0.295917 0.581843 +vt 0.292214 0.596189 +vt 0.301070 0.573398 +vt 0.429350 0.624527 +vt 0.428414 0.633725 +vt 0.435177 0.628717 +vt 0.435293 0.638413 +vt 0.426131 0.644382 +vt 0.433932 0.649404 +vt 0.429706 0.607443 +vt 0.431587 0.614251 +vt 0.432935 0.607835 +vt 0.435631 0.617129 +vt 0.430745 0.619809 +vt 0.435229 0.623303 +vt 0.412827 0.580444 +vt 0.414230 0.578911 +vt 0.421145 0.593451 +vt 0.423936 0.593010 +vt 0.056260 0.439487 +vt 0.052243 0.514090 +vt 0.346023 0.658566 +vt 0.353019 0.678134 +vt 0.336934 0.667208 +vt 0.326024 0.675603 +vt 0.323928 0.654556 +vt 0.312844 0.660001 +vt 0.334525 0.648816 +vt 0.316430 0.613134 +vt 0.308577 0.626462 +vt 0.305072 0.612631 +vt 0.296667 0.627112 +vt 0.292703 0.611015 +vt 0.326210 0.637561 +vt 0.315631 0.640629 +vt 0.303367 0.643423 +vt 0.318697 0.626420 +vt 0.410092 0.662818 +vt 0.423433 0.656946 +vt 0.413433 0.671154 +vt 0.419291 0.678031 +vt 0.430767 0.662123 +vt 0.402913 0.692524 +vt 0.394659 0.670561 +vt 0.395679 0.681716 +vt 0.418169 0.651470 +vt 0.409257 0.644332 +vt 0.404495 0.647751 +vt 0.405374 0.651069 +vt 0.411257 0.645191 +vt 0.395816 0.652803 +vt 0.395227 0.649145 +vt 0.083085 0.688973 +vt 0.088339 0.733007 +vt 0.069192 0.706811 +vt 0.373267 0.643783 +vt 0.384327 0.641554 +vt 0.384248 0.646582 +vt 0.362916 0.640696 +vt 0.359676 0.643967 +vt 0.354199 0.634275 +vt 0.350247 0.637137 +vt 0.346427 0.627743 +vt 0.341549 0.630293 +vt 0.339322 0.621474 +vt 0.334495 0.622778 +vt 0.333884 0.615635 +vt 0.328541 0.614950 +vt 0.411643 0.650403 +vt 0.406540 0.654994 +vt 0.395733 0.659011 +vt 0.057367 0.609682 +vt 0.332479 0.762948 +vt 0.312630 0.735829 +vt 0.343639 0.750744 +vt 0.343634 0.641721 +vt 0.354126 0.649562 +vt 0.366088 0.656254 +vt 0.359693 0.666658 +vt 0.334862 0.633516 +vt 0.327897 0.625219 +vt 0.323594 0.618441 +vt 0.527839 0.542455 +vt 0.182288 0.446891 +vt 0.185196 0.445659 +vt 0.098124 0.468502 +vt 0.084300 0.504015 +vt 0.092151 0.500433 +vt 0.120220 0.524028 +vt 0.122015 0.486027 +vt 0.838344 0.472620 +vt 0.138008 0.505171 +vt 0.137768 0.503899 +vt 0.130264 0.433438 +vt 0.137128 0.427456 +vt 0.129647 0.435799 +vt 0.154558 0.472779 +vt 0.124742 0.497805 +vt 0.123479 0.495803 +vt 0.122603 0.496014 +vt 0.124369 0.495498 +vt 0.065654 0.369858 +vt 0.122290 0.493517 +vt 0.177510 0.461653 +vt 0.177307 0.468733 +vt 0.121831 0.499314 +vt 0.099882 0.467726 +vt 0.104290 0.467705 +vt 0.107147 0.457130 +vt 0.108220 0.457424 +vt 0.109879 0.445237 +vt 0.110751 0.444963 +vt 0.126091 0.442541 +vt 0.126256 0.443021 +vt 0.123040 0.450930 +vt 0.123508 0.451316 +vt 0.132680 0.384281 +vt 0.133964 0.388806 +vt 0.135636 0.505098 +vt 0.137753 0.506484 +vt 0.134624 0.505731 +vt 0.874110 0.839431 +vt 0.094372 0.487453 +vt 0.109688 0.428185 +vt 0.117705 0.483830 +vt 0.176267 0.456514 +vt 0.866519 0.442895 +vt 0.882261 0.445038 +vt 0.146684 0.499924 +vt 0.161083 0.443786 +vt 0.151045 0.494754 +vt 0.149918 0.497974 +vt 0.149188 0.497715 +vt 0.136106 0.455355 +vt 0.164596 0.457182 +vt 0.167616 0.463035 +vt 0.166663 0.464351 +vt 0.734644 0.921134 +vt 0.169994 0.461584 +vt 0.167278 0.449027 +vt 0.164271 0.448360 +vt 0.164469 0.446136 +vt 0.119841 0.507324 +vt 0.138060 0.422596 +vt 0.556944 0.467747 +vt 0.151494 0.497178 +vt 0.150617 0.500626 +vt 0.151543 0.502740 +vt 0.148416 0.502146 +vt 0.133499 0.506742 +vt 0.136515 0.509412 +vt 0.130282 0.504705 +vt 0.883165 0.445363 +vt 0.116153 0.540323 +vt 0.146783 0.503145 +vt 0.143882 0.504737 +vt 0.146918 0.501723 +vt 0.143864 0.503317 +vt 0.146911 0.500723 +vt 0.149789 0.451552 +vt 0.152708 0.448331 +vt 0.721730 0.536070 +vt 0.146571 0.461789 +vt 0.146831 0.457277 +vt 0.149765 0.456493 +vt 0.753802 0.495517 +vt 0.772308 0.507639 +vt 0.178832 0.500669 +vt 0.176112 0.493950 +vt 0.850412 0.446767 +vt 0.141384 0.425168 +vt 0.097803 0.529978 +vt 0.142103 0.441050 +vt 0.121550 0.461007 +vt 0.848863 0.498398 +vt 0.496396 0.521160 +vt 0.144356 0.442975 +vt 0.139862 0.445504 +vt 0.135830 0.448912 +vt 0.132895 0.454569 +vt 0.118767 0.396678 +vt 0.151563 0.483246 +vt 0.152244 0.485379 +vt 0.119078 0.498847 +vt 0.839685 0.537490 +vt 0.819097 0.552963 +vt 0.822803 0.493152 +vt 0.178908 0.458246 +vt 0.135445 0.488063 +vt 0.502940 0.529452 +vt 0.496431 0.530039 +vt 0.117122 0.460346 +vt 0.134177 0.484805 +vt 0.138075 0.488885 +vt 0.158530 0.490555 +vt 0.103493 0.539340 +vt 0.109465 0.532777 +vt 0.104249 0.525370 +vt 0.906030 0.524955 +vt 0.615473 0.564518 +vt 0.162879 0.397099 +vt 0.115667 0.470012 +vt 0.889552 0.525322 +vt 0.146209 0.495539 +vt 0.766255 0.458449 +vt 0.059317 0.410018 +vt 0.846256 0.461643 +vt 0.169759 0.453391 +vt 0.160596 0.445488 +vt 0.126092 0.516617 +vt 0.157925 0.451804 +vt 0.868419 0.423396 +vt 0.159970 0.448843 +vt 0.163699 0.452054 +vt 0.685817 0.175559 +vt 0.164438 0.457363 +vt 0.157146 0.447159 +vt 0.158575 0.445160 +vt 0.153639 0.453027 +vt 0.155185 0.449800 +vt 0.153474 0.456812 +vt 0.168348 0.458418 +vt 0.160362 0.463408 +vt 0.115119 0.510478 +vt 0.163604 0.457993 +vt 0.147704 0.532208 +vt 0.124720 0.449144 +vt 0.174068 0.460199 +vt 0.123792 0.513398 +vt 0.168923 0.461229 +vt 0.167563 0.460270 +vt 0.169957 0.458827 +vt 0.168548 0.456226 +vt 0.170244 0.456086 +vt 0.881013 0.513805 +vt 0.151055 0.438284 +vt 0.154105 0.434078 +vt 0.156925 0.435596 +vt 0.147943 0.440821 +vt 0.148552 0.435707 +vt 0.159176 0.436854 +vt 0.845394 0.491128 +vt 0.104744 0.533503 +vt 0.103062 0.525209 +vt 0.600533 0.942036 +vt 0.496260 0.439975 +vt 0.560880 0.525140 +vt 0.802747 0.565452 +vt 0.260358 0.489907 +vt 0.277956 0.503372 +vt 0.265661 0.516952 +vt 0.246263 0.525137 +vt 0.301986 0.462515 +vt 0.303640 0.492758 +vt 0.428993 0.382675 +vt 0.433181 0.391965 +vt 0.423113 0.387854 +vt 0.463984 0.399967 +vt 0.459393 0.402868 +vt 0.458403 0.396408 +vt 0.397253 0.441707 +vt 0.375651 0.431560 +vt 0.387950 0.426198 +vt 0.481147 0.413962 +vt 0.478652 0.413424 +vt 0.481306 0.412817 +vt 0.479056 0.411596 +vt 0.481552 0.411353 +vt 0.482554 0.412565 +vt 0.482829 0.411169 +vt 0.482465 0.413664 +vt 0.394955 0.363596 +vt 0.377610 0.371613 +vt 0.382211 0.355120 +vt 0.392853 0.462862 +vt 0.413287 0.472458 +vt 0.403728 0.478088 +vt 0.425142 0.489789 +vt 0.416179 0.494425 +vt 0.441499 0.507111 +vt 0.446834 0.512106 +vt 0.437469 0.516840 +vt 0.451977 0.508148 +vt 0.437452 0.499492 +vt 0.450915 0.494975 +vt 0.459206 0.491692 +vt 0.444983 0.491922 +vt 0.453383 0.488899 +vt 0.437962 0.487618 +vt 0.473547 0.498214 +vt 0.466132 0.499263 +vt 0.469710 0.496900 +vt 0.458390 0.497388 +vt 0.457841 0.506970 +vt 0.488921 0.504467 +vt 0.451328 0.590484 +vt 0.462471 0.594793 +vt 0.463236 0.585706 +vt 0.451469 0.581726 +vt 0.473782 0.599007 +vt 0.475726 0.588546 +vt 0.469002 0.609877 +vt 0.956794 0.159535 +vt 0.986328 0.138672 +vt 0.986328 0.173828 +vt 0.471479 0.512985 +vt 0.465466 0.506822 +vt 0.466847 0.515226 +vt 0.477475 0.515566 +vt 0.475967 0.510807 +vt 0.470836 0.521907 +vt 0.474502 0.518888 +vt 0.928408 0.145792 +vt 0.890108 0.104814 +vt 0.923919 0.114266 +vt 0.490343 0.492994 +vt 0.084441 0.824908 +vt 0.086900 0.776620 +vt 0.107727 0.810503 +vt 0.511313 0.468276 +vt 0.526908 0.465772 +vt 0.489247 0.432424 +vt 0.482244 0.435005 +vt 0.484203 0.428530 +vt 0.478054 0.429599 +vt 0.481129 0.423560 +vt 0.490072 0.429052 +vt 0.485669 0.425548 +vt 0.484997 0.445039 +vt 0.472916 0.443612 +vt 0.476623 0.440389 +vt 0.488408 0.438642 +vt 0.426537 0.473300 +vt 0.441959 0.479039 +vt 0.473063 0.482826 +vt 0.465895 0.488847 +vt 0.459229 0.482170 +vt 0.476032 0.488229 +vt 0.470005 0.491249 +vt 0.464590 0.494566 +vt 0.477221 0.497128 +vt 0.475496 0.496031 +vt 0.481244 0.498669 +vt 0.480726 0.496912 +vt 0.473365 0.493756 +vt 0.478486 0.490543 +vt 0.480339 0.492901 +vt 0.481063 0.468219 +vt 0.466084 0.470719 +vt 0.465565 0.465667 +vt 0.449270 0.458991 +vt 0.466012 0.460499 +vt 0.488942 0.490570 +vt 0.467465 0.455682 +vt 0.482057 0.455747 +vt 0.481285 0.459464 +vt 0.480790 0.463892 +vt 0.450735 0.452752 +vt 0.469696 0.451939 +vt 0.453135 0.447422 +vt 0.456864 0.444035 +vt 0.432401 0.455272 +vt 0.432961 0.446804 +vt 0.418553 0.431802 +vt 0.436389 0.440106 +vt 0.402261 0.391813 +vt 0.413888 0.384080 +vt 0.412987 0.392703 +vt 0.403872 0.403490 +vt 0.390438 0.390749 +vt 0.377586 0.389646 +vt 0.390772 0.376436 +vt 0.393146 0.405930 +vt 0.380973 0.408454 +vt 0.400246 0.421997 +vt 0.416443 0.411504 +vt 0.408830 0.416612 +vt 0.432664 0.394815 +vt 0.422535 0.393965 +vt 0.433702 0.401484 +vt 0.423702 0.401225 +vt 0.413922 0.401560 +vt 0.425443 0.408921 +vt 0.434692 0.408364 +vt 0.435822 0.417482 +vt 0.428049 0.419003 +vt 0.461957 0.415522 +vt 0.454777 0.408978 +vt 0.462736 0.409526 +vt 0.456518 0.404745 +vt 0.465113 0.407917 +vt 0.477248 0.416131 +vt 0.478105 0.414644 +vt 0.476539 0.418169 +vt 0.462888 0.404574 +vt 0.468260 0.407228 +vt 0.465902 0.404290 +vt 0.447259 0.397374 +vt 0.445547 0.401417 +vt 0.444867 0.408213 +vt 0.453161 0.416564 +vt 0.444727 0.416369 +vt 0.460859 0.440750 +vt 0.447324 0.432981 +vt 0.453381 0.429986 +vt 0.465989 0.436457 +vt 0.459849 0.427680 +vt 0.483037 0.421345 +vt 0.471220 0.434459 +vt 0.480075 0.418565 +vt 0.476121 0.422650 +vt 0.480432 0.416124 +vt 0.480784 0.415127 +vt 0.482419 0.414783 +vt 0.482119 0.415795 +vt 0.481882 0.417405 +vt 0.466057 0.426023 +vt 0.441089 0.435266 +vt 0.423161 0.424526 +vt 0.471157 0.448133 +vt 0.483278 0.448704 +vt 0.482521 0.452456 +vt 0.449475 0.390777 +vt 0.418686 0.377218 +vt 0.407332 0.370970 +vt 0.478931 0.409602 +vt 0.482096 0.410317 +vt 0.440434 0.387549 +vt 0.409466 0.328850 +vt 0.403905 0.351581 +vt 0.392818 0.340707 +vt 0.414924 0.361217 +vt 0.434584 0.358762 +vt 0.425759 0.369080 +vt 0.443832 0.364349 +vt 0.445729 0.378386 +vt 0.436070 0.373620 +vt 0.468340 0.388068 +vt 0.482997 0.398477 +vt 0.479737 0.402556 +vt 0.464983 0.396627 +vt 0.459604 0.390005 +vt 0.463477 0.381888 +vt 0.471525 0.376051 +vt 0.368669 0.140106 +vt 0.415446 0.132840 +vt 0.452732 0.383387 +vt 0.452036 0.370424 +vt 0.458749 0.376511 +vt 0.478819 0.407273 +vt 0.467555 0.370504 +vt 0.462116 0.363431 +vt 0.455962 0.355586 +vt 0.448776 0.348630 +vt 0.441758 0.340437 +vt 0.425808 0.350541 +vt 0.435385 0.330682 +vt 0.417795 0.340542 +vt 0.452455 0.313403 +vt 0.454982 0.324165 +vt 0.464113 0.341937 +vt 0.473366 0.358488 +vt 0.459208 0.333434 +vt 0.573146 0.823606 +vt 0.489610 0.498955 +vt 0.486774 0.495463 +vt 0.483495 0.497399 +vt 0.487875 0.513152 +vt 0.488035 0.508773 +vt 0.493284 0.502616 +vt 0.490988 0.503516 +vt 0.481189 0.515391 +vt 0.484417 0.515003 +vt 0.485747 0.511681 +vt 0.483523 0.517325 +vt 0.480335 0.517715 +vt 0.484870 0.519050 +vt 0.485849 0.517461 +vt 0.487068 0.515320 +vt 0.489942 0.515410 +vt 0.488247 0.520302 +vt 0.489242 0.517667 +vt 0.466232 0.791627 +vt 0.496669 0.686174 +vt 0.477195 0.525438 +vt 0.479548 0.519572 +vt 0.481934 0.522120 +vt 0.482886 0.518884 +vt 0.486834 0.523431 +vt 0.484121 0.527932 +vt 0.472017 0.532395 +vt 0.480684 0.534892 +vt 0.760141 0.167745 +vt 0.496649 0.657621 +vt 0.479182 0.371376 +vt 0.475013 0.382157 +vt 0.485959 0.404066 +vt 0.490078 0.405831 +vt 0.487338 0.406885 +vt 0.482373 0.377683 +vt 0.491105 0.395081 +vt 0.487209 0.395974 +vt 0.487405 0.369578 +vt 0.588711 0.456282 +vt 0.485818 0.363792 +vt 0.476530 0.365913 +vt 0.483848 0.356272 +vt 0.627507 0.388774 +vt 0.469415 0.350442 +vt 0.492426 0.405381 +vt 0.489199 0.402503 +vt 0.492027 0.401846 +vt 0.482343 0.409161 +vt 0.483123 0.410370 +vt 0.483607 0.406525 +vt 0.484990 0.408263 +vt 0.472833 0.310574 +vt 0.474628 0.320157 +vt 0.481596 0.347174 +vt 0.478933 0.337732 +vt 0.476544 0.329047 +vt 0.247734 0.719577 +vt 0.263324 0.754507 +vt 0.231354 0.726775 +vt 0.247128 0.760638 +vt 0.217012 0.738546 +vt 0.179532 0.848324 +vt 0.204440 0.838045 +vt 0.217793 0.863676 +vt 0.149027 0.836997 +vt 0.135692 0.807299 +vt 0.149351 0.786459 +vt 0.168261 0.821222 +vt 0.186054 0.808273 +vt 0.067085 0.221466 +vt 0.055313 0.574565 +vt 0.384822 0.636509 +vt 0.221675 0.822335 +vt 0.241438 0.847909 +vt 0.249783 0.827173 +vt 0.272228 0.850676 +vt 0.296562 0.875117 +vt 0.287007 0.900460 +vt 0.261170 0.872547 +vt 0.238598 0.891050 +vt 0.316505 0.927760 +vt 0.023356 0.496651 +vt 0.197908 0.923734 +vt 0.500453 0.406198 +vt 0.618250 0.572383 +vt 0.515515 0.721143 +vt 0.533200 0.703514 +vt 0.097042 0.321643 +vt 0.232429 0.167840 +vt 0.081853 0.435434 +vt 0.121310 0.385222 +vt 0.123799 0.384932 +vt 0.237492 0.220504 +vt 0.188755 0.454234 +vt 0.175337 0.266374 +vt 0.174599 0.309287 +vt 0.155778 0.309621 +vt 0.075325 0.324930 +vt 0.155295 0.395476 +vt 0.107908 0.436909 +vt 0.110379 0.436584 +vt 0.107562 0.447348 +vt 0.103515 0.457498 +vt 0.122896 0.532908 +vt 0.116536 0.535543 +vt 0.134784 0.380861 +vt 0.132734 0.379209 +vt 0.136003 0.378452 +vt 0.138595 0.379863 +vt 0.138757 0.378006 +vt 0.130510 0.381095 +vt 0.140226 0.589360 +vt 0.122581 0.382430 +vt 0.130632 0.377377 +vt 0.126815 0.381066 +vt 0.130299 0.372713 +vt 0.136282 0.375788 +vt 0.140880 0.375604 +vt 0.142104 0.378531 +vt 0.112738 0.433470 +vt 0.113549 0.432871 +vt 0.849136 0.505998 +vt 0.131645 0.393754 +vt 0.101344 0.516338 +vt 0.104352 0.508329 +vt 0.106977 0.512993 +vt 0.066521 0.444449 +vt 0.063132 0.476444 +vt 0.036868 0.231346 +vt 0.034520 0.267129 +vt 0.082140 0.564458 +vt 0.109390 0.516825 +vt 0.105120 0.447093 +vt 0.101523 0.457471 +vt 0.103917 0.521013 +vt 0.101839 0.521677 +vt 0.099999 0.522520 +vt 0.099182 0.516862 +vt 0.112637 0.513880 +vt 0.110066 0.510355 +vt 0.110742 0.503755 +vt 0.107650 0.506222 +vt 0.096440 0.511025 +vt 0.098820 0.511139 +vt 0.098149 0.535738 +vt 0.102721 0.621827 +vt 0.120358 0.640301 +vt 0.104722 0.658335 +vt 0.086360 0.638881 +vt 0.083712 0.599414 +vt 0.109626 0.537790 +vt 0.113024 0.536228 +vt 0.118358 0.528565 +vt 0.124278 0.527815 +vt 0.212810 0.752255 +vt 0.193887 0.715564 +vt 0.244211 0.774144 +vt 0.238068 0.784440 +vt 0.208256 0.765467 +vt 0.189076 0.729602 +vt 0.135508 0.625789 +vt 0.140363 0.658864 +vt 0.155645 0.682828 +vt 0.145005 0.697200 +vt 0.123454 0.676719 +vt 0.175809 0.752479 +vt 0.175444 0.731959 +vt 0.173752 0.775707 +vt 0.149266 0.721646 +vt 0.147547 0.751794 +vt 0.126060 0.702276 +vt 0.233389 0.795494 +vt 0.203572 0.776555 +vt 0.228193 0.806035 +vt 0.203920 0.794022 +vt 0.167756 0.690884 +vt 0.168419 0.360416 +vt 0.154515 0.353789 +vt 0.239122 0.495473 +vt 0.220711 0.507583 +vt 0.226297 0.458555 +vt 0.228899 0.543102 +vt 0.210405 0.555160 +vt 0.220121 0.591885 +vt 0.200116 0.599281 +vt 0.217024 0.638020 +vt 0.071286 0.620724 +vt 0.070124 0.585598 +vt 0.066553 0.557805 +vt 0.025586 0.596907 +vt 0.122725 0.839807 +vt 0.102742 0.854190 +vt 0.489920 0.529452 +vt 0.322075 0.725839 +vt 0.286462 0.713157 +vt 0.259862 0.919947 +vt 0.114847 0.761151 +vt 0.032290 0.657854 +vt 0.299425 0.744830 +vt 0.316387 0.773132 +vt 0.280198 0.749928 +vt 0.070603 0.664844 +vt 0.056974 0.676019 +vt 0.122251 0.732791 +vt 0.105091 0.683696 +vt 0.100315 0.709570 +vt 0.087781 0.664167 +vt 0.071562 0.643830 +vt 0.061406 0.653167 +vt 0.747626 0.433144 +vt 0.485203 0.590548 +vt 0.485852 0.580020 +vt 0.375075 0.685573 +vt 0.377431 0.673011 +vt 0.053268 0.547849 +vt 0.063167 0.520827 +vt 0.270877 0.578184 +vt 0.269698 0.602206 +vt 0.279417 0.606991 +vt 0.282902 0.627739 +vt 0.272748 0.627181 +vt 0.284818 0.568075 +vt 0.279522 0.587119 +vt 0.278289 0.554812 +vt 0.047189 0.713519 +vt 0.037478 0.682997 +vt 0.436948 0.566348 +vt 0.435331 0.572720 +vt 0.438201 0.557786 +vt 0.423190 0.552197 +vt 0.425631 0.542828 +vt 0.451515 0.572652 +vt 0.440249 0.550167 +vt 0.451831 0.563814 +vt 0.450642 0.552623 +vt 0.442346 0.540039 +vt 0.429875 0.495370 +vt 0.423335 0.508136 +vt 0.432134 0.525147 +vt 0.413013 0.528286 +vt 0.428319 0.534817 +vt 0.408708 0.537116 +vt 0.417156 0.519163 +vt 0.445906 0.528813 +vt 0.453938 0.520106 +vt 0.449246 0.539841 +vt 0.461531 0.516825 +vt 0.462453 0.528397 +vt 0.486577 0.546278 +vt 0.476242 0.556296 +vt 0.477008 0.544825 +vt 0.463727 0.554612 +vt 0.465110 0.542419 +vt 0.421393 0.558461 +vt 0.281467 0.524706 +vt 0.405632 0.542886 +vt 0.394183 0.487758 +vt 0.407222 0.503700 +vt 0.386947 0.500143 +vt 0.399253 0.513918 +vt 0.394644 0.522828 +vt 0.381370 0.509465 +vt 0.382817 0.472348 +vt 0.363402 0.438359 +vt 0.384485 0.448204 +vt 0.373156 0.456893 +vt 0.351155 0.448579 +vt 0.362192 0.467992 +vt 0.340658 0.461892 +vt 0.353521 0.480044 +vt 0.374031 0.484612 +vt 0.367084 0.494868 +vt 0.376456 0.518229 +vt 0.390802 0.530277 +vt 0.346438 0.511923 +vt 0.362313 0.507057 +vt 0.359305 0.518880 +vt 0.348238 0.495096 +vt 0.332942 0.479728 +vt 0.332071 0.502428 +vt 0.331418 0.519490 +vt 0.023082 0.531264 +vt 0.401375 0.557202 +vt 0.384685 0.544309 +vt 0.384183 0.547799 +vt 0.370527 0.542587 +vt 0.370749 0.537649 +vt 0.345496 0.523014 +vt 0.358353 0.526767 +vt 0.317906 0.530579 +vt 0.306671 0.534102 +vt 0.320281 0.542889 +vt 0.309697 0.547298 +vt 0.296290 0.538528 +vt 0.344853 0.539069 +vt 0.357173 0.533525 +vt 0.344789 0.531556 +vt 0.331068 0.530601 +vt 0.331831 0.540335 +vt 0.401107 0.560494 +vt 0.415664 0.575948 +vt 0.427715 0.591666 +vt 0.416550 0.572766 +vt 0.126639 0.315770 +vt 0.165540 0.237832 +vt 0.146475 0.275705 +vt 0.379174 0.602081 +vt 0.486060 0.556834 +vt 0.476264 0.567261 +vt 0.475788 0.578940 +vt 0.464011 0.575893 +vt 0.485932 0.568362 +vt 0.441046 0.664436 +vt 0.444431 0.652108 +vt 0.454851 0.664033 +vt 0.457739 0.651492 +vt 0.450392 0.679395 +vt 0.432051 0.681987 +vt 0.816524 0.128759 +vt 0.858356 0.164485 +vt 0.459068 0.617013 +vt 0.429876 0.589123 +vt 0.437190 0.605859 +vt 0.443632 0.624949 +vt 0.440475 0.603894 +vt 0.442053 0.618601 +vt 0.452260 0.622427 +vt 0.444565 0.630347 +vt 0.445537 0.640703 +vt 0.455183 0.628858 +vt 0.457542 0.639787 +vt 0.469185 0.648092 +vt 0.189637 0.877332 +vt 0.481303 0.686780 +vt 0.467171 0.675155 +vt 0.407280 0.767247 +vt 0.414337 0.752637 +vt 0.066798 0.746023 +vt 0.437330 0.708536 +vt 0.454490 0.721800 +vt 0.428623 0.724192 +vt 0.449867 0.738434 +vt 0.421323 0.738838 +vt 0.390971 0.734066 +vt 0.361644 0.726674 +vt 0.399239 0.720831 +vt 0.444203 0.694106 +vt 0.421839 0.695591 +vt 0.338887 0.703718 +vt 0.267832 0.716018 +vt 0.247351 0.687253 +vt 0.308903 0.695681 +vt 0.292923 0.675326 +vt 0.302267 0.669148 +vt 0.330712 0.715653 +vt 0.280278 0.652252 +vt 0.290245 0.648946 +vt 0.232955 0.611323 +vt 0.235511 0.652201 +vt 0.853798 0.589267 +vt 0.237216 0.568534 +vt 0.664643 0.128626 +vt 0.696788 0.115134 +vt 0.707541 0.136669 +vt 0.516777 0.556289 +vt 0.283739 0.802595 +vt 0.289154 0.790141 +vt 0.928040 0.221479 +vt 0.264347 0.820932 +vt 0.294429 0.847001 +vt 0.275310 0.812685 +vt 0.310973 0.839192 +vt 0.345714 0.857742 +vt 0.325412 0.869883 +vt 0.320392 0.826720 +vt 0.324485 0.811664 +vt 0.360236 0.844002 +vt 0.370284 0.829582 +vt 0.302195 0.781572 +vt 0.339141 0.801751 +vt 0.352085 0.789903 +vt 0.379556 0.815572 +vt 0.388897 0.800713 +vt 0.365353 0.775599 +vt 0.398844 0.783710 +vt 0.482640 0.646082 +vt 0.112716 0.281315 +vt 0.891955 0.521636 +vt 0.439178 0.770765 +vt 0.475819 0.738967 +vt 0.009766 0.302734 +vt 0.029205 0.332718 +vt 0.009766 0.333984 +vt 0.621138 0.567755 +vt 0.009766 0.271484 +vt 0.009766 0.236328 +vt 0.760109 0.069014 +vt 0.713770 0.048318 +vt 0.480023 0.504017 +vt 0.479943 0.503980 +vt 0.479991 0.503866 +vt 0.480070 0.503889 +vt 0.480684 0.500876 +vt 0.480513 0.502342 +vt 0.480210 0.503394 +vt 0.179782 0.382168 +vt 0.188135 0.407893 +vt 0.526418 0.460599 +vt 0.195600 0.177514 +vt 0.161848 0.186033 +vt 0.752965 0.777233 +vt 0.539544 0.303339 +vt 0.518510 0.292089 +vt 0.539713 0.293695 +vt 0.512680 0.504062 +vt 0.955078 0.009766 +vt 0.986328 0.041016 +vt 0.951793 0.036519 +vt 0.682381 0.068892 +vt 0.641405 0.066939 +vt 0.599883 0.064169 +vt 0.810829 0.098936 +vt 0.647262 0.088061 +vt 0.688650 0.091866 +vt 0.563247 0.081760 +vt 0.604222 0.084329 +vt 0.529700 0.098328 +vt 0.567295 0.100334 +vt 0.952361 0.065926 +vt 0.920421 0.086097 +vt 0.596067 0.028271 +vt 0.559259 0.044295 +vt 0.560091 0.026960 +vt 0.721334 0.069797 +vt 0.768993 0.096738 +vt 0.728507 0.094727 +vt 0.703755 0.029073 +vt 0.662109 0.009766 +vt 0.626953 0.009766 +vt 0.595703 0.009766 +vt 0.564453 0.009766 +vt 0.822266 0.009766 +vt 0.818791 0.028907 +vt 0.791016 0.009766 +vt 0.886942 0.020032 +vt 0.923828 0.009766 +vt 0.910268 0.032622 +vt 0.835322 0.019964 +vt 0.867338 0.036144 +vt 0.572424 0.115442 +vt 0.527057 0.025855 +vt 0.986328 0.076172 +vt 0.526242 0.042712 +vt 0.497091 0.060366 +vt 0.497433 0.042052 +vt 0.957256 0.195000 +vt 0.986328 0.205078 +vt 0.927117 0.183865 +vt 0.857422 0.009766 +vt 0.888672 0.009766 +vt 0.529297 0.009766 +vt 0.497739 0.025546 +vt 0.498047 0.009766 +vt 0.736904 0.120469 +vt 0.610153 0.103947 +vt 0.654860 0.109223 +vt 0.862237 0.021112 +vt 0.954066 0.095141 +vt 0.986328 0.107422 +vt 0.955679 0.125864 +vt 0.496282 0.097549 +vt 0.496695 0.079550 +vt 0.724609 0.009766 +vt 0.693359 0.009766 +vt 0.747939 0.144575 +vt 0.797524 0.177441 +vt 0.618216 0.120880 +vt 0.720382 0.157182 +vt 0.512796 0.503761 +vt 0.512488 0.503438 +vt 0.725821 0.329020 +vt 0.754453 0.323143 +vt 0.744227 0.353536 +vt 0.776450 0.356789 +vt 0.599789 0.279618 +vt 0.564775 0.269897 +vt 0.597053 0.264136 +vt 0.495464 0.274345 +vt 0.518094 0.281020 +vt 0.495495 0.281473 +vt 0.593746 0.305942 +vt 0.564136 0.289344 +vt 0.596567 0.293705 +vt 0.518231 0.262132 +vt 0.539898 0.272853 +vt 0.518599 0.273766 +vt 0.563037 0.256850 +vt 0.687546 0.249474 +vt 0.664434 0.270609 +vt 0.653043 0.245857 +vt 0.726618 0.268257 +vt 0.694508 0.275883 +vt 0.788018 0.336888 +vt 0.762200 0.299568 +vt 0.518466 0.301156 +vt 0.726141 0.294492 +vt 0.620759 0.243320 +vt 0.589855 0.247890 +vt 0.588225 0.317139 +vt 0.655905 0.394408 +vt 0.650256 0.424281 +vt 0.670169 0.404683 +vt 0.663177 0.438038 +vt 0.517984 0.250147 +vt 0.539632 0.260307 +vt 0.538726 0.247794 +vt 0.675353 0.221280 +vt 0.637121 0.215670 +vt 0.767547 0.280993 +vt 0.721308 0.240572 +vt 0.606383 0.220172 +vt 0.797478 0.322117 +vt 0.715973 0.212765 +vt 0.582191 0.192787 +vt 0.495525 0.286126 +vt 0.495556 0.291832 +vt 0.532675 0.226089 +vt 0.559651 0.241530 +vt 0.545774 0.216694 +vt 0.580660 0.228572 +vt 0.633855 0.269801 +vt 0.516557 0.198579 +vt 0.495202 0.221982 +vt 0.495108 0.198801 +vt 0.495271 0.182279 +vt 0.540149 0.287122 +vt 0.540347 0.281233 +vt 0.528268 0.185584 +vt 0.562211 0.204190 +vt 0.618051 0.318143 +vt 0.629571 0.305473 +vt 0.564925 0.280694 +vt 0.634289 0.337700 +vt 0.656947 0.362302 +vt 0.666538 0.329639 +vt 0.673128 0.367607 +vt 0.649254 0.331186 +vt 0.689602 0.378173 +vt 0.683636 0.419103 +vt 0.687584 0.336517 +vt 0.707032 0.349492 +vt 0.703651 0.391770 +vt 0.676317 0.300905 +vt 0.702030 0.309617 +vt 0.561247 0.307784 +vt 0.710671 0.438363 +vt 0.697093 0.428864 +vt 0.516428 0.224309 +vt 0.518259 0.287028 +vt 0.495393 0.250567 +vt 0.602469 0.603831 +vt 0.534166 0.616961 +vt 0.721582 0.366152 +vt 0.562412 0.297800 +vt 0.495430 0.264726 +vt 0.716594 0.403850 +vt 0.644476 0.293442 +vt 0.419489 0.115429 +vt 0.382549 0.103945 +vt 0.425314 0.100312 +vt 0.218106 0.123011 +vt 0.171633 0.156815 +vt 0.177851 0.128809 +vt 0.136095 0.164517 +vt 0.139829 0.132598 +vt 0.102760 0.137850 +vt 0.068170 0.183871 +vt 0.067041 0.145817 +vt 0.327612 0.128716 +vt 0.296399 0.115211 +vt 0.338026 0.109274 +vt 0.285043 0.136778 +vt 0.256643 0.120535 +vt 0.101228 0.173558 +vt 0.038982 0.159547 +vt 0.038436 0.195008 +vt 0.433765 0.062158 +vt 0.465410 0.080181 +vt 0.430143 0.081727 +vt 0.357880 0.047291 +vt 0.326241 0.029552 +vt 0.362691 0.029041 +vt 0.397584 0.045710 +vt 0.399419 0.028239 +vt 0.257804 0.028046 +vt 0.236328 0.009766 +vt 0.271484 0.009766 +vt 0.109083 0.020037 +vt 0.138672 0.009766 +vt 0.133763 0.021116 +vt 0.192978 0.069959 +vt 0.163420 0.051156 +vt 0.204910 0.047463 +vt 0.080283 0.060239 +vt 0.112070 0.079442 +vt 0.075323 0.086122 +vt 0.245663 0.046514 +vt 0.234825 0.069066 +vt 0.281396 0.048351 +vt 0.273344 0.069851 +vt 0.318508 0.048706 +vt 0.120383 0.056657 +vt 0.151195 0.071539 +vt 0.143815 0.100569 +vt 0.105235 0.104856 +vt 0.009766 0.138672 +vt 0.040174 0.125881 +vt 0.043621 0.065934 +vt 0.041855 0.095155 +vt 0.071691 0.114293 +vt 0.291901 0.029072 +vt 0.305195 0.091925 +vt 0.220340 0.028203 +vt 0.044250 0.036516 +vt 0.072266 0.009766 +vt 0.085717 0.032633 +vt 0.041016 0.009766 +vt 0.009766 0.041016 +vt 0.009766 0.009766 +vt 0.312082 0.068935 +vt 0.394301 0.064125 +vt 0.435570 0.044252 +vt 0.183959 0.098993 +vt 0.225445 0.096793 +vt 0.346342 0.088082 +vt 0.389234 0.084301 +vt 0.462872 0.098322 +vt 0.265622 0.094788 +vt 0.207524 0.151202 +vt 0.245055 0.144666 +vt 0.352886 0.066942 +vt 0.373818 0.120907 +vt 0.160679 0.019974 +vt 0.333984 0.009766 +vt 0.369141 0.009766 +vt 0.400391 0.009766 +vt 0.435378 0.026938 +vt 0.302734 0.009766 +vt 0.173828 0.009766 +vt 0.177087 0.028918 +vt 0.128543 0.036149 +vt 0.205078 0.009766 +vt 0.009766 0.076172 +vt 0.460241 0.112334 +vt 0.467660 0.060997 +vt 0.468609 0.042710 +vt 0.468420 0.025858 +vt 0.431641 0.009766 +vt 0.466797 0.009766 +vt 0.009766 0.173828 +vt 0.009766 0.107422 +vt 0.457647 0.127332 +vt 0.009766 0.205078 +vt 0.479905 0.503716 +vt 0.480080 0.503713 +vt 0.215125 0.356904 +vt 0.247322 0.353606 +vt 0.227821 0.383498 +vt 0.236813 0.323229 +vt 0.265541 0.329070 +vt 0.264912 0.294536 +vt 0.289198 0.309626 +vt 0.296495 0.275909 +vt 0.314873 0.300921 +vt 0.391272 0.279638 +vt 0.426107 0.269884 +vt 0.426046 0.280686 +vt 0.451009 0.272836 +vt 0.450635 0.281221 +vt 0.472324 0.273753 +vt 0.472894 0.281011 +vt 0.472603 0.292088 +vt 0.451402 0.293692 +vt 0.472793 0.287021 +vt 0.428717 0.297797 +vt 0.450899 0.287115 +vt 0.397442 0.305936 +vt 0.426918 0.289338 +vt 0.373231 0.318135 +vt 0.394530 0.293699 +vt 0.472613 0.262115 +vt 0.451186 0.260281 +vt 0.427743 0.256829 +vt 0.393908 0.264180 +vt 0.401007 0.247943 +vt 0.370266 0.243378 +vt 0.357202 0.269822 +vt 0.338000 0.245915 +vt 0.326601 0.270650 +vt 0.303410 0.249544 +vt 0.264180 0.268285 +vt 0.228758 0.299610 +vt 0.203345 0.336971 +vt 0.201091 0.381689 +vt 0.472651 0.301136 +vt 0.429942 0.307744 +vt 0.431207 0.318796 +vt 0.403000 0.317113 +vt 0.364229 0.366424 +vt 0.364297 0.388796 +vt 0.335975 0.394453 +vt 0.355419 0.415571 +vt 0.341860 0.424331 +vt 0.321812 0.404713 +vt 0.329083 0.438060 +vt 0.368239 0.411319 +vt 0.472784 0.250121 +vt 0.451996 0.247747 +vt 0.431014 0.241495 +vt 0.410108 0.228594 +vt 0.384584 0.220182 +vt 0.354075 0.215753 +vt 0.315946 0.221317 +vt 0.269875 0.240599 +vt 0.223519 0.281046 +vt 0.193935 0.322179 +vt 0.408859 0.192719 +vt 0.428500 0.204157 +vt 0.444635 0.216644 +vt 0.457824 0.226027 +vt 0.473998 0.224270 +vt 0.473666 0.198541 +vt 0.462324 0.185465 +vt 0.454543 0.173634 +vt 0.357133 0.337690 +vt 0.361672 0.305451 +vt 0.334646 0.362347 +vt 0.324835 0.329646 +vt 0.342120 0.331196 +vt 0.383019 0.329430 +vt 0.369962 0.346110 +vt 0.350233 0.361984 +vt 0.350618 0.389030 +vt 0.318519 0.367693 +vt 0.303869 0.336495 +vt 0.302139 0.378228 +vt 0.308496 0.419101 +vt 0.317621 0.459099 +vt 0.284535 0.349513 +vt 0.288220 0.391825 +vt 0.295169 0.428876 +vt 0.281687 0.438356 +vt 0.275391 0.403912 +vt 0.270097 0.366184 +vt 0.346710 0.293462 +vt 0.255367 0.383025 +vt 0.268074 0.448682 +vt 0.261764 0.416750 +vt 0.252617 0.463959 +vt 0.244656 0.433256 +vt 0.236384 0.406375 +vt 0.209922 0.409028 +vt 0.495526 0.300642 +vt 0.451586 0.303307 +vt 0.892414 0.516296 +vt 0.889327 0.508270 +vt 0.515950 0.544835 +vt 0.647184 0.627875 +vt 0.879292 0.432912 +vt 0.509662 0.347234 +vt 0.506382 0.546280 +vt 0.139545 0.355116 +vt 0.419030 0.455810 +vt 0.413049 0.440439 +vt 0.374813 0.572274 +vt 0.380806 0.596052 +vt 0.403693 0.456273 +vt 0.357213 0.539484 +vt 0.435582 0.467684 +vt 0.509918 0.518874 +vt 0.485259 0.483363 +vt 0.487266 0.488061 +vt 0.480690 0.495424 +vt 0.453858 0.474274 +vt 0.449813 0.465922 +vt 0.493519 0.507528 +vt 0.496358 0.512502 +vt 0.493248 0.512751 +vt 0.009766 0.400391 +vt 0.009766 0.369141 +vt 0.009766 0.431641 +vt 0.491874 0.521214 +vt 0.491306 0.524432 +vt 0.293074 0.949356 +vt 0.542160 0.391215 +vt 0.482736 0.616041 +vt 0.492312 0.497887 +vt 0.496280 0.434234 +vt 0.021308 0.727217 +vt 0.009766 0.693359 +vt 0.009766 0.662109 +vt 0.496207 0.472215 +vt 0.481546 0.472459 +vt 0.496370 0.515409 +vt 0.492883 0.515420 +vt 0.496383 0.518175 +vt 0.492483 0.517941 +vt 0.058819 0.784646 +vt 0.040518 0.744383 +vt 0.009766 0.529297 +vt 0.009766 0.498047 +vt 0.009766 0.564453 +vt 0.009766 0.626953 +vt 0.009766 0.595703 +vt 0.986328 0.271484 +vt 0.009766 0.466797 +vt 0.488448 0.376491 +vt 0.495600 0.369416 +vt 0.495625 0.345422 +vt 0.488233 0.536215 +vt 0.395560 0.942036 +vt 0.447917 0.616560 +vt 0.464292 0.565604 +vt 0.724404 0.448669 +vt 0.487337 0.500140 +vt 0.542674 0.466048 +vt 0.703020 0.949356 +vt 0.986328 0.529297 +vt 0.986328 0.564453 +vt 0.496280 0.497402 +vt 0.986328 0.693359 +vt 0.496188 0.468222 +vt 0.986328 0.400391 +vt 0.512115 0.492980 +vt 0.410289 0.708217 +vt 0.464328 0.689261 +vt 0.389303 0.613284 +vt 0.986328 0.466797 +vt 0.986328 0.369141 +vt 0.986328 0.595703 +vt 0.986328 0.662109 +vt 0.986328 0.498047 +vt 0.586890 0.654870 +vt 0.634697 0.526689 +vt 0.318047 0.687163 +vt 0.443995 0.600998 +vt 0.135684 0.887132 +vt 0.469267 0.476833 +vt 0.483490 0.477694 +vt 0.496229 0.477187 +vt 0.403101 0.380735 +vt 0.510880 0.472508 +vt 0.511591 0.463957 +vt 0.608332 0.606220 +vt 0.608067 0.614100 +vt 0.384974 0.606128 +vt 0.385361 0.613987 +vt 0.379271 0.599484 +vt 0.911391 0.942787 +vt 0.918162 0.939607 +vt 0.915141 0.945119 +vt 0.912275 0.950252 +vt 0.907647 0.947216 +vt 0.920464 0.932110 +vt 0.915739 0.937662 +vt 0.917282 0.952478 +vt 0.923343 0.933687 +vt 0.921093 0.940637 +vt 0.919172 0.946966 +vt 0.922711 0.953703 +vt 0.924951 0.934090 +vt 0.924140 0.941299 +vt 0.923462 0.947645 +vt 0.927800 0.947989 +vt 0.928305 0.953722 +vt 0.927258 0.941370 +vt 0.933810 0.952709 +vt 0.928245 0.933727 +vt 0.930297 0.940862 +vt 0.932046 0.946914 +vt 0.936157 0.945427 +vt 0.938899 0.950537 +vt 0.929791 0.933155 +vt 0.933110 0.939595 +vt 0.939784 0.942950 +vt 0.943383 0.947433 +vt 0.931207 0.932288 +vt 0.935736 0.937939 +vt 0.947467 0.943782 +vt 0.937943 0.935764 +vt 0.942992 0.939978 +vt 0.950441 0.939172 +vt 0.933308 0.929765 +vt 0.939688 0.933225 +vt 0.945289 0.936286 +vt 0.952644 0.934207 +vt 0.940878 0.930403 +vt 0.947106 0.932328 +vt 0.953886 0.928861 +vt 0.934305 0.926633 +vt 0.941513 0.927410 +vt 0.947847 0.928103 +vt 0.948179 0.923826 +vt 0.953879 0.923346 +vt 0.941578 0.924364 +vt 0.952889 0.917903 +vt 0.934034 0.923376 +vt 0.941015 0.921365 +vt 0.947101 0.919647 +vt 0.945669 0.915607 +vt 0.950756 0.912842 +vt 0.939857 0.918521 +vt 0.943168 0.911999 +vt 0.947723 0.908346 +vt 0.932524 0.920462 +vt 0.938212 0.915938 +vt 0.940278 0.908729 +vt 0.943961 0.904368 +vt 0.936060 0.913750 +vt 0.939399 0.901353 +vt 0.930047 0.918336 +vt 0.933537 0.912021 +vt 0.936568 0.906464 +vt 0.934449 0.899133 +vt 0.930696 0.910810 +vt 0.932609 0.904623 +vt 0.929108 0.897870 +vt 0.926910 0.917334 +vt 0.927694 0.910155 +vt 0.928357 0.903871 +vt 0.924019 0.903512 +vt 0.923519 0.897817 +vt 0.924629 0.910061 +vt 0.918038 0.898760 +vt 0.923633 0.917587 +vt 0.921608 0.910624 +vt 0.919825 0.904561 +vt 0.915733 0.905954 +vt 0.912942 0.900861 +vt 0.918730 0.911738 +vt 0.912127 0.908428 +vt 0.908391 0.903850 +vt 0.920699 0.919086 +vt 0.916117 0.913403 +vt 0.908842 0.911299 +vt 0.904297 0.907520 +vt 0.913879 0.915531 +vt 0.906413 0.914935 +vt 0.901320 0.912151 +vt 0.918547 0.921567 +vt 0.912092 0.918039 +vt 0.899011 0.917085 +vt 0.910875 0.920877 +vt 0.904579 0.918930 +vt 0.897697 0.922479 +vt 0.917529 0.924700 +vt 0.910211 0.923890 +vt 0.903816 0.923194 +vt 0.903467 0.927529 +vt 0.897632 0.928043 +vt 0.910152 0.926954 +vt 0.898667 0.933501 +vt 0.917767 0.927992 +vt 0.910765 0.929984 +vt 0.904552 0.931737 +vt 0.906005 0.935807 +vt 0.900805 0.938636 +vt 0.911934 0.932865 +vt 0.908462 0.939480 +vt 0.903844 0.943193 +vt 0.918429 0.929508 +vt 0.913444 0.935583 +vt 0.925956 0.925720 +vt 0.921852 0.932984 +vt 0.926612 0.933980 +vt 0.932315 0.931072 +vt 0.933892 0.928225 +vt 0.934247 0.924992 +vt 0.933340 0.921889 +vt 0.931341 0.919329 +vt 0.928500 0.917764 +vt 0.925265 0.917384 +vt 0.922140 0.918276 +vt 0.919561 0.920271 +vt 0.917955 0.923106 +vt 0.917564 0.926350 +vt 0.919365 0.930873 +vt 0.972934 0.847573 +vt 0.973280 0.844993 +vt 0.974223 0.846712 +vt 0.968500 0.842226 +vt 0.972219 0.843005 +vt 0.970244 0.844334 +vt 0.886838 0.972284 +vt 0.899797 0.973007 +vt 0.896691 0.978922 +vt 0.895318 0.961904 +vt 0.906283 0.961080 +vt 0.903074 0.967135 +vt 0.975113 0.850181 +vt 0.976566 0.851027 +vt 0.976095 0.851353 +vt 0.975058 0.848269 +vt 0.974078 0.848945 +vt 0.971699 0.846088 +vt 0.882558 0.977579 +vt 0.893864 0.984919 +vt 0.891152 0.966991 +vt 0.904179 0.951342 +vt 0.899845 0.956471 +vt 0.977273 0.852317 +vt 0.977038 0.852478 +vt 0.975837 0.849694 +vt 0.909604 0.977175 +vt 0.909635 0.955134 +vt 0.913569 0.964263 +vt 0.977095 0.850805 +vt 0.977535 0.852206 +vt 0.976638 0.849348 +vt 0.975053 0.844231 +vt 0.975643 0.846122 +vt 0.970974 0.840625 +vt 0.974407 0.842107 +vt 0.907789 0.983419 +vt 0.911695 0.970779 +vt 0.976156 0.847801 +vt 0.906134 0.989822 +vt 0.920129 0.979218 +vt 0.922156 0.959114 +vt 0.921459 0.965760 +vt 0.977813 0.852147 +vt 0.977494 0.849165 +vt 0.976943 0.843861 +vt 0.977143 0.845832 +vt 0.973658 0.839559 +vt 0.976712 0.841635 +vt 0.919448 0.985762 +vt 0.920808 0.972387 +vt 0.977655 0.850686 +vt 0.977322 0.847556 +vt 0.918974 0.992319 +vt 0.930070 0.972651 +vt 0.930728 0.979168 +vt 0.929466 0.965888 +vt 0.978228 0.850679 +vt 0.978097 0.852145 +vt 0.978511 0.847553 +vt 0.978367 0.849162 +vt 0.978664 0.845814 +vt 0.976465 0.839022 +vt 0.979071 0.841607 +vt 0.931409 0.985915 +vt 0.978857 0.843874 +vt 0.931992 0.992319 +vt 0.941090 0.977383 +vt 0.935329 0.957910 +vt 0.937242 0.964271 +vt 0.978377 0.852197 +vt 0.979677 0.847779 +vt 0.979222 0.849329 +vt 0.980166 0.846097 +vt 0.982150 0.839518 +vt 0.981376 0.842098 +vt 0.943069 0.983682 +vt 0.939117 0.970836 +vt 0.978791 0.850789 +vt 0.980732 0.844226 +vt 0.945049 0.990147 +vt 0.947709 0.967544 +vt 0.950962 0.973439 +vt 0.944690 0.961437 +vt 0.979320 0.851006 +vt 0.978640 0.852304 +vt 0.980776 0.848228 +vt 0.980028 0.849660 +vt 0.981586 0.846669 +vt 0.983547 0.842984 +vt 0.953987 0.979197 +vt 0.982515 0.844936 +vt 0.957232 0.984939 +vt 0.955465 0.962407 +vt 0.959791 0.967526 +vt 0.946844 0.951792 +vt 0.951235 0.957014 +vt 0.978878 0.852459 +vt 0.981767 0.848883 +vt 0.980750 0.850141 +vt 0.982868 0.847511 +vt 0.984799 0.840658 +vt 0.985518 0.844261 +vt 0.964002 0.972662 +vt 0.979799 0.851318 +vt 0.984104 0.846007 +vt 0.968023 0.977689 +vt 0.962121 0.955953 +vt 0.967173 0.960114 +vt 0.951551 0.947189 +vt 0.956688 0.951452 +vt 0.979081 0.852658 +vt 0.981372 0.850746 +vt 0.983967 0.848587 +vt 0.989255 0.844209 +vt 0.987175 0.845930 +vt 0.972446 0.964404 +vt 0.980205 0.851720 +vt 0.982604 0.849720 +vt 0.985440 0.847366 +vt 0.977718 0.968664 +vt 0.973152 0.951548 +vt 0.961234 0.945134 +vt 0.979241 0.852893 +vt 0.983274 0.850695 +vt 0.981855 0.851467 +vt 0.986523 0.848933 +vt 0.984820 0.849866 +vt 0.988490 0.847883 +vt 0.979030 0.954662 +vt 0.967303 0.948297 +vt 0.980528 0.852190 +vt 0.984998 0.957471 +vt 0.977271 0.941841 +vt 0.955314 0.941777 +vt 0.964390 0.937905 +vt 0.980749 0.852716 +vt 0.979352 0.853154 +vt 0.982197 0.852262 +vt 0.987278 0.850690 +vt 0.985406 0.851276 +vt 0.990840 0.846649 +vt 0.989380 0.850047 +vt 0.983485 0.943672 +vt 0.970882 0.939787 +vt 0.983739 0.851786 +vt 0.989866 0.945310 +vt 0.979325 0.931427 +vt 0.959269 0.929402 +vt 0.965895 0.930093 +vt 0.979411 0.853432 +vt 0.982377 0.853112 +vt 0.987646 0.852563 +vt 0.985697 0.852766 +vt 0.991900 0.849301 +vt 0.989852 0.852329 +vt 0.985839 0.932119 +vt 0.972515 0.930766 +vt 0.980867 0.853274 +vt 0.983983 0.852946 +vt 0.992346 0.932575 +vt 0.972758 0.921605 +vt 0.979238 0.920953 +vt 0.965994 0.922185 +vt 0.980875 0.853845 +vt 0.979413 0.853715 +vt 0.982386 0.853979 +vt 0.985722 0.854282 +vt 0.992439 0.852077 +vt 0.989886 0.854666 +vt 0.985966 0.920244 +vt 0.983986 0.854130 +vt 0.987634 0.854458 +vt 0.992346 0.919666 +vt 0.977479 0.910701 +vt 0.958068 0.916378 +vt 0.964409 0.914480 +vt 0.979361 0.853995 +vt 0.982226 0.854833 +vt 0.987298 0.856322 +vt 0.985424 0.855768 +vt 0.991969 0.857713 +vt 0.989408 0.856954 +vt 0.983760 0.908726 +vt 0.970944 0.912638 +vt 0.980767 0.854406 +vt 0.983760 0.855288 +vt 0.990191 0.906725 +vt 0.967703 0.904144 +vt 0.973579 0.900930 +vt 0.961608 0.907121 +vt 0.980551 0.854934 +vt 0.979255 0.854257 +vt 0.983318 0.856380 +vt 0.981896 0.855637 +vt 0.984867 0.857179 +vt 0.988535 0.859112 +vt 0.979319 0.897929 +vt 0.986594 0.858095 +vt 0.985030 0.894658 +vt 0.962669 0.896463 +vt 0.967725 0.892175 +vt 0.952046 0.904942 +vt 0.957268 0.900647 +vt 0.979100 0.854495 +vt 0.982670 0.857366 +vt 0.981418 0.856358 +vt 0.984037 0.858454 +vt 0.990846 0.860347 +vt 0.987274 0.861071 +vt 0.972845 0.888023 +vt 0.980241 0.855412 +vt 0.985536 0.859676 +vt 0.977852 0.883989 +vt 0.956215 0.889890 +vt 0.960381 0.884872 +vt 0.947411 0.900307 +vt 0.951699 0.895235 +vt 0.978902 0.854699 +vt 0.980817 0.856980 +vt 0.982975 0.859550 +vt 0.987336 0.864773 +vt 0.985624 0.862719 +vt 0.964680 0.879648 +vt 0.979844 0.855820 +vt 0.981842 0.858202 +vt 0.984194 0.861007 +vt 0.968953 0.874430 +vt 0.951934 0.878950 +vt 0.945417 0.890704 +vt 0.979373 0.856141 +vt 0.978668 0.854858 +vt 0.980876 0.858874 +vt 0.980101 0.857464 +vt 0.982645 0.862088 +vt 0.981709 0.860404 +vt 0.983697 0.864030 +vt 0.955085 0.873149 +vt 0.948633 0.884716 +vt 0.957954 0.867251 +vt 0.942301 0.874833 +vt 0.942053 0.896536 +vt 0.938222 0.887524 +vt 0.978848 0.856363 +vt 0.978407 0.854970 +vt 0.979308 0.857809 +vt 0.980909 0.862847 +vt 0.980313 0.860996 +vt 0.984927 0.866348 +vt 0.981560 0.864924 +vt 0.944255 0.868716 +vt 0.940161 0.881085 +vt 0.979792 0.859344 +vt 0.946065 0.862443 +vt 0.931985 0.872731 +vt 0.929668 0.892545 +vt 0.930449 0.886009 +vt 0.978129 0.855029 +vt 0.978460 0.857993 +vt 0.979057 0.863226 +vt 0.978832 0.861309 +vt 0.982313 0.867406 +vt 0.979310 0.865409 +vt 0.932846 0.866264 +vt 0.931176 0.879294 +vt 0.978292 0.856484 +vt 0.978638 0.859579 +vt 0.933719 0.859936 +vt 0.921583 0.872552 +vt 0.922496 0.885722 +vt 0.977721 0.856494 +vt 0.977846 0.855032 +vt 0.977461 0.859606 +vt 0.977594 0.858000 +vt 0.977178 0.863245 +vt 0.977321 0.861324 +vt 0.979580 0.867953 +vt 0.977004 0.865474 +vt 0.921160 0.865861 +vt 0.921957 0.879170 +vt 0.921051 0.859362 +vt 0.911245 0.874087 +vt 0.916591 0.893575 +vt 0.914715 0.887234 +vt 0.977567 0.854981 +vt 0.976741 0.857849 +vt 0.975317 0.862937 +vt 0.975840 0.861052 +vt 0.974073 0.867545 +vt 0.974727 0.865059 +vt 0.909613 0.867735 +vt 0.912928 0.880697 +vt 0.977160 0.856388 +vt 0.976301 0.859389 +vt 0.908283 0.861316 +vt 0.904347 0.883753 +vt 0.901311 0.877755 +vt 0.907257 0.889911 +vt 0.976631 0.856175 +vt 0.977304 0.854875 +vt 0.975203 0.858957 +vt 0.975935 0.857525 +vt 0.974423 0.860517 +vt 0.971430 0.866587 +vt 0.972563 0.864228 +vt 0.898596 0.871851 +vt 0.973535 0.862264 +vt 0.896051 0.865748 +vt 0.896524 0.888693 +vt 0.892381 0.883386 +vt 0.904975 0.899477 +vt 0.900655 0.894179 +vt 0.977065 0.854720 +vt 0.974208 0.858317 +vt 0.975209 0.857051 +vt 0.973131 0.859704 +vt 0.968979 0.865126 +vt 0.970572 0.863002 +vt 0.888408 0.878007 +vt 0.976150 0.855867 +vt 0.971929 0.861231 +vt 0.884747 0.872494 +vt 0.889750 0.895063 +vt 0.884845 0.890678 +vt 0.900221 0.904055 +vt 0.895115 0.899693 +vt 0.975742 0.855467 +vt 0.976862 0.854522 +vt 0.974581 0.856452 +vt 0.972011 0.858648 +vt 0.966813 0.863171 +vt 0.968853 0.861385 +vt 0.879787 0.886102 +vt 0.973360 0.857492 +vt 0.970559 0.859903 +vt 0.874777 0.881512 +vt 0.878642 0.899218 +vt 0.890453 0.905995 +vt 0.976700 0.854288 +vt 0.972675 0.856524 +vt 0.974090 0.855734 +vt 0.969434 0.858354 +vt 0.971131 0.857380 +vt 0.967466 0.859470 +vt 0.872908 0.895855 +vt 0.884404 0.902685 +vt 0.975415 0.854998 +vt 0.867347 0.892604 +vt 0.874199 0.909012 +vt 0.896411 0.909459 +vt 0.887180 0.913209 +vt 0.975191 0.854472 +vt 0.976588 0.854027 +vt 0.973738 0.854938 +vt 0.968637 0.856597 +vt 0.970515 0.855969 +vt 0.963991 0.858169 +vt 0.966520 0.857299 +vt 0.868018 0.906913 +vt 0.880601 0.911235 +vt 0.972196 0.855433 +vt 0.861836 0.904544 +vt 0.871913 0.919521 +vt 0.892270 0.921874 +vt 0.885618 0.921070 +vt 0.976529 0.853748 +vt 0.973549 0.854084 +vt 0.968235 0.854703 +vt 0.970192 0.854463 +vt 0.963275 0.855326 +vt 0.966000 0.854982 +vt 0.865320 0.918588 +vt 0.878790 0.920288 +vt 0.975068 0.853913 +vt 0.971942 0.854269 +vt 0.858584 0.917614 +vt 0.878578 0.929570 +vt 0.871924 0.930104 +vt 0.885348 0.929060 +vt 0.975058 0.853338 +vt 0.976526 0.853464 +vt 0.971918 0.853074 +vt 0.973541 0.853210 +vt 0.968212 0.852774 +vt 0.970175 0.852925 +vt 0.965908 0.852587 +vt 0.865014 0.930643 +vt 0.858540 0.931134 +vt 0.873611 0.940541 +vt 0.892280 0.928512 +vt 0.886848 0.936862 +vt 0.976577 0.853184 +vt 0.973695 0.852349 +vt 0.968541 0.850865 +vt 0.970457 0.851414 +vt 0.963733 0.849459 +vt 0.966378 0.850231 +vt 0.867256 0.942484 +vt 0.880261 0.938685 +vt 0.975165 0.852774 +vt 0.972147 0.851902 +vt 0.860755 0.944480 +vt 0.883564 0.947337 +vt 0.877612 0.950556 +vt 0.893457 0.935024 +vt 0.889718 0.944358 +vt 0.975380 0.852243 +vt 0.976684 0.852920 +vt 0.972578 0.850789 +vt 0.974024 0.851537 +vt 0.971017 0.849981 +vt 0.967265 0.848017 +vt 0.871822 0.953597 +vt 0.969252 0.849053 +vt 0.866065 0.956870 +vt 0.888742 0.955199 +vt 0.883613 0.959523 +vt 0.899459 0.946639 +vt 0.894166 0.950992 +vt 0.976839 0.852682 +vt 0.973237 0.849790 +vt 0.974506 0.850809 +vt 0.971854 0.848686 +vt 0.964893 0.846750 +vt 0.968554 0.846014 +vt 0.878454 0.963724 +vt 0.975691 0.851761 +vt 0.970328 0.847441 +vt 0.873428 0.967793 +vt 0.977971 0.853589 +vt 0.895966 0.941083 +vt 0.893847 0.915474 +vt 0.910534 0.896052 +vt 0.936024 0.894040 +vt 0.955518 0.910411 +vt 0.957780 0.935741 +vt 0.941356 0.955315 +vt 0.915729 0.957624 +vt 0.057212 0.938803 +vt 0.064416 0.936718 +vt 0.060460 0.941688 +vt 0.056792 0.946268 +vt 0.052834 0.942489 +vt 0.062248 0.934559 +vt 0.068917 0.931031 +vt 0.061268 0.949297 +vt 0.066991 0.938387 +vt 0.064029 0.944172 +vt 0.066315 0.951424 +vt 0.071804 0.932545 +vt 0.069829 0.939531 +vt 0.068072 0.945628 +vt 0.072254 0.946698 +vt 0.071766 0.952417 +vt 0.072830 0.940101 +vt 0.077314 0.952428 +vt 0.075065 0.932822 +vt 0.075882 0.940040 +vt 0.076546 0.946375 +vt 0.080780 0.945655 +vt 0.082647 0.951185 +vt 0.078883 0.939407 +vt 0.084743 0.943837 +vt 0.087629 0.949008 +vt 0.078185 0.931830 +vt 0.081698 0.938239 +vt 0.088436 0.941549 +vt 0.092251 0.946052 +vt 0.084238 0.936507 +vt 0.091425 0.938379 +vt 0.095908 0.942002 +vt 0.080712 0.929742 +vt 0.086398 0.934307 +vt 0.098984 0.937514 +vt 0.088019 0.931679 +vt 0.093863 0.934739 +vt 0.101151 0.932447 +vt 0.081567 0.928315 +vt 0.089296 0.928881 +vt 0.095363 0.930645 +vt 0.096416 0.926416 +vt 0.102144 0.926929 +vt 0.082158 0.926775 +vt 0.089829 0.925839 +vt 0.102198 0.921354 +vt 0.082507 0.923503 +vt 0.089733 0.922743 +vt 0.096107 0.922098 +vt 0.095403 0.917834 +vt 0.100966 0.915951 +vt 0.089068 0.919716 +vt 0.093604 0.913803 +vt 0.098749 0.910947 +vt 0.082105 0.921915 +vt 0.088061 0.916794 +vt 0.091293 0.910052 +vt 0.095733 0.906312 +vt 0.080540 0.919045 +vt 0.086156 0.914360 +vt 0.091750 0.902504 +vt 0.084101 0.912048 +vt 0.088036 0.907099 +vt 0.087219 0.899454 +vt 0.077978 0.917045 +vt 0.081424 0.910537 +vt 0.084396 0.904636 +vt 0.082136 0.897299 +vt 0.076492 0.916365 +vt 0.078557 0.909365 +vt 0.080342 0.903182 +vt 0.076138 0.902099 +vt 0.076685 0.896267 +vt 0.075545 0.908749 +vt 0.071121 0.896315 +vt 0.073219 0.916065 +vt 0.072473 0.908778 +vt 0.071815 0.902401 +vt 0.067548 0.903139 +vt 0.065748 0.897585 +vt 0.069444 0.909419 +vt 0.063557 0.904961 +vt 0.060803 0.899889 +vt 0.070099 0.917092 +vt 0.066611 0.910620 +vt 0.056179 0.902822 +vt 0.064088 0.912405 +vt 0.059915 0.907371 +vt 0.052483 0.906883 +vt 0.067592 0.919215 +vt 0.061953 0.914621 +vt 0.057037 0.910627 +vt 0.049498 0.911423 +vt 0.060316 0.917238 +vt 0.054576 0.914244 +vt 0.047369 0.916533 +vt 0.066111 0.922153 +vt 0.059173 0.920093 +vt 0.053136 0.918317 +vt 0.052098 0.922535 +vt 0.046422 0.922010 +vt 0.058616 0.923118 +vt 0.046462 0.927573 +vt 0.065834 0.925421 +vt 0.058695 0.926185 +vt 0.052430 0.926843 +vt 0.053159 0.931097 +vt 0.047673 0.932945 +vt 0.059326 0.929196 +vt 0.054974 0.935078 +vt 0.049845 0.937897 +vt 0.066815 0.928536 +vt 0.060506 0.932018 +vt 0.067814 0.929831 +vt 0.074142 0.924466 +vt 0.070330 0.931857 +vt 0.073428 0.932757 +vt 0.076648 0.932412 +vt 0.079512 0.930858 +vt 0.082410 0.925145 +vt 0.081416 0.920430 +vt 0.079307 0.917984 +vt 0.074865 0.916130 +vt 0.071630 0.916494 +vt 0.068783 0.918085 +vt 0.066775 0.920652 +vt 0.065897 0.923774 +vt 0.066260 0.927001 +vt 0.027180 0.846482 +vt 0.029743 0.846354 +vt 0.028244 0.847578 +vt 0.031538 0.841251 +vt 0.031480 0.844956 +vt 0.029828 0.843308 +vt 0.027873 0.963038 +vt 0.040438 0.966189 +vt 0.036215 0.971273 +vt 0.038214 0.954650 +vt 0.049014 0.955773 +vt 0.044769 0.961115 +vt 0.024052 0.850215 +vt 0.025626 0.849676 +vt 0.024449 0.850624 +vt 0.026880 0.848668 +vt 0.026049 0.847832 +vt 0.028398 0.845023 +vt 0.022589 0.967292 +vt 0.032163 0.976257 +vt 0.033146 0.958778 +vt 0.048745 0.945921 +vt 0.043629 0.950167 +vt 0.023110 0.851337 +vt 0.023308 0.851540 +vt 0.025024 0.849055 +vt 0.049202 0.972078 +vt 0.053364 0.950584 +vt 0.055497 0.960163 +vt 0.024759 0.851102 +vt 0.023463 0.851778 +vt 0.026104 0.850398 +vt 0.030802 0.847935 +vt 0.029072 0.848852 +vt 0.035056 0.845676 +vt 0.032744 0.846915 +vt 0.046148 0.977789 +vt 0.052462 0.966234 +vt 0.027532 0.849654 +vt 0.042831 0.983485 +vt 0.058996 0.975966 +vt 0.064789 0.956603 +vt 0.062860 0.962984 +vt 0.023569 0.852041 +vt 0.026432 0.851202 +vt 0.031508 0.849709 +vt 0.029634 0.850263 +vt 0.036185 0.848313 +vt 0.033620 0.849075 +vt 0.056991 0.982274 +vt 0.060989 0.969547 +vt 0.024974 0.851629 +vt 0.027963 0.850752 +vt 0.054896 0.988707 +vt 0.069226 0.977870 +vt 0.070581 0.964558 +vt 0.025083 0.852190 +vt 0.023621 0.852320 +vt 0.028192 0.851909 +vt 0.026589 0.852055 +vt 0.029932 0.851753 +vt 0.034100 0.851367 +vt 0.068499 0.984579 +vt 0.070028 0.971270 +vt 0.031845 0.851576 +vt 0.067859 0.990937 +vt 0.079728 0.977947 +vt 0.077848 0.957818 +vt 0.078551 0.964454 +vt 0.023619 0.852604 +vt 0.026588 0.852922 +vt 0.031858 0.853474 +vt 0.029907 0.853270 +vt 0.036657 0.853961 +vt 0.034066 0.853707 +vt 0.080377 0.984483 +vt 0.079146 0.971097 +vt 0.025075 0.852762 +vt 0.028190 0.853092 +vt 0.080823 0.990998 +vt 0.088168 0.969538 +vt 0.090207 0.975938 +vt 0.086355 0.962980 +vt 0.024957 0.853319 +vt 0.023560 0.852881 +vt 0.027946 0.854250 +vt 0.026407 0.853773 +vt 0.029615 0.854761 +vt 0.036115 0.856741 +vt 0.033593 0.855992 +vt 0.091997 0.982159 +vt 0.031489 0.855348 +vt 0.093625 0.988551 +vt 0.096698 0.965930 +vt 0.099930 0.971821 +vt 0.090202 0.953903 +vt 0.093527 0.959843 +vt 0.023449 0.853143 +vt 0.027482 0.855342 +vt 0.026064 0.854569 +vt 0.029028 0.856171 +vt 0.035052 0.859395 +vt 0.032700 0.858158 +vt 0.103046 0.977714 +vt 0.024736 0.853846 +vt 0.030733 0.857106 +vt 0.105849 0.983691 +vt 0.104377 0.960793 +vt 0.108533 0.965857 +vt 0.095638 0.950195 +vt 0.099896 0.955350 +vt 0.024413 0.854316 +vt 0.023289 0.853378 +vt 0.025581 0.855290 +vt 0.028175 0.857451 +vt 0.033463 0.861835 +vt 0.031383 0.860111 +vt 0.112842 0.971129 +vt 0.026812 0.856316 +vt 0.029649 0.858673 +vt 0.117098 0.976401 +vt 0.116005 0.958439 +vt 0.105479 0.949915 +vt 0.024007 0.854718 +vt 0.023086 0.853576 +vt 0.025975 0.857153 +vt 0.024958 0.855896 +vt 0.028311 0.860031 +vt 0.027075 0.858526 +vt 0.029724 0.861780 +vt 0.121144 0.962654 +vt 0.110875 0.954122 +vt 0.126159 0.966682 +vt 0.121911 0.949590 +vt 0.100238 0.945498 +vt 0.109876 0.943332 +vt 0.023528 0.855030 +vt 0.022848 0.853731 +vt 0.024236 0.856376 +vt 0.026721 0.861101 +vt 0.025793 0.859368 +vt 0.029001 0.865382 +vt 0.027752 0.863055 +vt 0.127676 0.952626 +vt 0.116003 0.946348 +vt 0.024984 0.857808 +vt 0.133435 0.955869 +vt 0.125860 0.939715 +vt 0.106314 0.933951 +vt 0.112686 0.935902 +vt 0.022585 0.853839 +vt 0.023430 0.856707 +vt 0.024937 0.861810 +vt 0.024372 0.859939 +vt 0.026350 0.866519 +vt 0.025579 0.863938 +vt 0.132177 0.941679 +vt 0.119272 0.937783 +vt 0.022999 0.855247 +vt 0.023884 0.858257 +vt 0.138657 0.943663 +vt 0.121122 0.928734 +vt 0.127653 0.929356 +vt 0.114328 0.928123 +vt 0.022437 0.855356 +vt 0.022305 0.853890 +vt 0.022575 0.856873 +vt 0.023062 0.862160 +vt 0.022871 0.860221 +vt 0.023275 0.864426 +vt 0.134419 0.930035 +vt 0.022718 0.858482 +vt 0.140835 0.930585 +vt 0.127739 0.918780 +vt 0.107614 0.920817 +vt 0.114257 0.920138 +vt 0.022021 0.853889 +vt 0.021702 0.856871 +vt 0.021149 0.862172 +vt 0.021350 0.860202 +vt 0.020667 0.867007 +vt 0.020917 0.864397 +vt 0.134278 0.918067 +vt 0.120903 0.919479 +vt 0.021863 0.855349 +vt 0.021530 0.858479 +vt 0.140835 0.917556 +vt 0.119309 0.910340 +vt 0.125699 0.908226 +vt 0.112774 0.912220 +vt 0.021303 0.855230 +vt 0.021743 0.853830 +vt 0.020365 0.858234 +vt 0.020846 0.856688 +vt 0.019851 0.859912 +vt 0.017862 0.866467 +vt 0.018614 0.863923 +vt 0.131936 0.906379 +vt 0.019260 0.861801 +vt 0.138336 0.904707 +vt 0.115645 0.901724 +vt 0.121520 0.898430 +vt 0.103634 0.908324 +vt 0.109576 0.904936 +vt 0.021481 0.853719 +vt 0.019267 0.857766 +vt 0.020046 0.856341 +vt 0.018432 0.859321 +vt 0.015183 0.865400 +vt 0.016428 0.863024 +vt 0.127431 0.895286 +vt 0.020775 0.855008 +vt 0.017489 0.861039 +vt 0.133432 0.892437 +vt 0.110462 0.893965 +vt 0.115522 0.889765 +vt 0.099874 0.902849 +vt 0.105022 0.898523 +vt 0.020303 0.854683 +vt 0.021246 0.853558 +vt 0.019322 0.855854 +vt 0.017144 0.858461 +vt 0.012716 0.863800 +vt 0.014456 0.861696 +vt 0.120807 0.885419 +vt 0.018288 0.857090 +vt 0.015909 0.859945 +vt 0.126094 0.881135 +vt 0.108072 0.882252 +vt 0.099555 0.892839 +vt 0.021047 0.853355 +vt 0.017448 0.856246 +vt 0.018715 0.855227 +vt 0.014540 0.858593 +vt 0.016063 0.857350 +vt 0.012770 0.860019 +vt 0.112282 0.877084 +vt 0.103757 0.887397 +vt 0.019900 0.854275 +vt 0.116315 0.872014 +vt 0.099157 0.876244 +vt 0.095199 0.898112 +vt 0.092953 0.888356 +vt 0.019589 0.853793 +vt 0.020892 0.853116 +vt 0.018232 0.854499 +vt 0.013465 0.856983 +vt 0.015221 0.856055 +vt 0.009117 0.859287 +vt 0.011483 0.858020 +vt 0.102173 0.870442 +vt 0.095946 0.882186 +vt 0.016793 0.855247 +vt 0.105407 0.864661 +vt 0.089155 0.872257 +vt 0.083677 0.892093 +vt 0.085489 0.885488 +vt 0.020785 0.852852 +vt 0.017901 0.853688 +vt 0.012759 0.855176 +vt 0.014657 0.854620 +vt 0.007956 0.856588 +vt 0.010596 0.855812 +vt 0.091085 0.865894 +vt 0.087314 0.878891 +vt 0.019373 0.853262 +vt 0.016349 0.854140 +vt 0.093039 0.859361 +vt 0.078243 0.877204 +vt 0.078733 0.870532 +vt 0.077153 0.890914 +vt 0.077720 0.883973 +vt 0.019266 0.852698 +vt 0.020734 0.852572 +vt 0.016126 0.852966 +vt 0.017745 0.852826 +vt 0.012419 0.853273 +vt 0.014395 0.853110 +vt 0.010120 0.853461 +vt 0.079263 0.863626 +vt 0.079726 0.857150 +vt 0.068207 0.870503 +vt 0.070534 0.890891 +vt 0.069762 0.884234 +vt 0.020737 0.852287 +vt 0.016149 0.851772 +vt 0.017761 0.851952 +vt 0.014404 0.851577 +vt 0.007481 0.850726 +vt 0.010208 0.851065 +vt 0.067247 0.863923 +vt 0.069013 0.877381 +vt 0.019277 0.852124 +vt 0.012443 0.851341 +vt 0.066247 0.857190 +vt 0.059988 0.879162 +vt 0.057726 0.872776 +vt 0.061940 0.885730 +vt 0.019399 0.851565 +vt 0.020796 0.852010 +vt 0.016404 0.850606 +vt 0.017948 0.851099 +vt 0.014724 0.850072 +vt 0.010725 0.848746 +vt 0.055606 0.866595 +vt 0.012844 0.849446 +vt 0.053223 0.860424 +vt 0.051467 0.882941 +vt 0.048005 0.877181 +vt 0.058161 0.894961 +vt 0.054769 0.888990 +vt 0.020908 0.851748 +vt 0.016882 0.849515 +vt 0.018299 0.850304 +vt 0.015339 0.848660 +vt 0.008192 0.847879 +vt 0.011670 0.846573 +vt 0.044624 0.871452 +vt 0.019623 0.851039 +vt 0.013640 0.847687 +vt 0.041333 0.865901 +vt 0.043826 0.888251 +vt 0.039455 0.883345 +vt 0.052767 0.898728 +vt 0.048441 0.893607 +vt 0.019950 0.850570 +vt 0.021069 0.851514 +vt 0.018789 0.849586 +vt 0.016217 0.847391 +vt 0.011013 0.842870 +vt 0.013056 0.844655 +vt 0.034889 0.878286 +vt 0.017567 0.848546 +vt 0.014764 0.846136 +vt 0.030298 0.873276 +vt 0.032189 0.890830 +vt 0.042918 0.899096 +vt 0.021273 0.851316 +vt 0.018414 0.847720 +vt 0.019417 0.848987 +vt 0.016133 0.844807 +vt 0.017337 0.846334 +vt 0.014775 0.843037 +vt 0.026830 0.886838 +vt 0.037473 0.894971 +vt 0.020358 0.850170 +vt 0.021321 0.883155 +vt 0.026550 0.899707 +vt 0.048138 0.903424 +vt 0.038616 0.905704 +vt 0.020838 0.849861 +vt 0.021511 0.851161 +vt 0.020142 0.848513 +vt 0.017739 0.843774 +vt 0.018628 0.845520 +vt 0.013181 0.840912 +vt 0.016766 0.841809 +vt 0.020676 0.896936 +vt 0.032516 0.902765 +vt 0.019411 0.847079 +vt 0.014599 0.894342 +vt 0.022862 0.909613 +vt 0.042214 0.915024 +vt 0.035854 0.913164 +vt 0.021774 0.851055 +vt 0.020947 0.848191 +vt 0.019520 0.843099 +vt 0.020045 0.844983 +vt 0.015633 0.839451 +vt 0.018930 0.840978 +vt 0.016530 0.907880 +vt 0.029460 0.911391 +vt 0.021367 0.849648 +vt 0.020509 0.846645 +vt 0.010159 0.906416 +vt 0.027740 0.920329 +vt 0.021155 0.919908 +vt 0.034395 0.920993 +vt 0.021928 0.849542 +vt 0.022053 0.851004 +vt 0.021799 0.848031 +vt 0.021526 0.844711 +vt 0.018275 0.838493 +vt 0.021205 0.840562 +vt 0.014498 0.919393 +vt 0.021669 0.846427 +vt 0.021381 0.842790 +vt 0.008135 0.918962 +vt 0.027802 0.929537 +vt 0.021185 0.930314 +vt 0.041125 0.928158 +vt 0.034557 0.928894 +vt 0.022337 0.851007 +vt 0.022667 0.848040 +vt 0.023034 0.844726 +vt 0.023778 0.838082 +vt 0.023510 0.840625 +vt 0.014676 0.931069 +vt 0.022499 0.849552 +vt 0.022848 0.846449 +vt 0.023259 0.842808 +vt 0.008162 0.931635 +vt 0.029533 0.938533 +vt 0.023194 0.940613 +vt 0.036012 0.936678 +vt 0.023056 0.849672 +vt 0.022614 0.851066 +vt 0.023999 0.846696 +vt 0.023515 0.848225 +vt 0.024516 0.845035 +vt 0.026509 0.838625 +vt 0.025760 0.841108 +vt 0.016991 0.942492 +vt 0.025111 0.843185 +vt 0.010609 0.944182 +vt 0.033117 0.947005 +vt 0.027275 0.950255 +vt 0.045025 0.940497 +vt 0.039148 0.943843 +vt 0.022875 0.851177 +vt 0.025083 0.847162 +vt 0.024308 0.848571 +vt 0.025914 0.845628 +vt 0.029125 0.839679 +vt 0.027898 0.841999 +vt 0.021387 0.953362 +vt 0.023580 0.849894 +vt 0.026848 0.843943 +vt 0.015406 0.956203 +vt 0.022179 0.852447 +vt 0.042583 0.934489 +vt 0.044732 0.908981 +vt 0.064153 0.892427 +vt 0.089692 0.894632 +vt 0.106146 0.914403 +vt 0.103741 0.939990 +vt 0.084203 0.956330 +vt 0.058833 0.954045 +vt 0.986328 0.333984 +vt 0.483537 0.409555 +vt 0.986328 0.009766 +vt 0.107422 0.009766 +vt 0.928788 0.958957 +vt 0.979307 0.839033 +vt 0.987145 0.842241 +vt 0.959088 0.922854 +vt 0.992439 0.854891 +vt 0.989281 0.862679 +vt 0.923091 0.892611 +vt 0.976815 0.867973 +vt 0.965194 0.860763 +vt 0.963256 0.852378 +vt 0.966503 0.844363 +vt 0.033487 0.843344 +vt 0.071271 0.957636 +vt 0.036657 0.851141 +vt 0.031349 0.863802 +vt 0.107376 0.927434 +vt 0.023507 0.867001 +vt 0.010724 0.861667 +vt 0.007470 0.853676 +vt 0.009395 0.845280 +vt 0.041240 0.921575 +vt 0.021015 0.838064 +s off +f 4/1 2/2 1/3 +f 8/4 6/5 5/6 +f 13/7 15/8 14/9 +f 17/10 19/11 18/12 +f 32/13 30/14 29/15 +f 36/16 34/17 33/18 +f 37/19 39/20 38/21 +f 21/22 23/23 20/24 +f 25/25 27/26 24/27 +f 41/28 43/29 42/30 +f 48/31 46/32 45/33 +f 52/34 50/35 49/36 +f 53/37 55/38 54/39 +f 60/40 58/41 57/42 +f 29/15 62/43 61/44 +f 66/45 64/46 63/47 +f 67/48 6/5 68/49 +f 72/50 70/51 69/52 +f 76/53 74/54 73/55 +f 77/56 79/57 78/58 +f 81/59 83/60 82/61 +f 59/62 86/63 85/64 +f 88/65 90/66 89/67 +f 96/68 94/69 93/70 +f 100/71 98/72 97/73 +f 101/74 103/75 102/76 +f 106/77 34/17 87/78 +f 108/79 115/80 107/81 +f 117/82 36/16 118/83 +f 124/84 126/85 125/86 +f 131/87 129/88 128/89 +f 132/90 134/91 133/92 +f 142/93 140/94 139/95 +f 148/96 60/40 147/97 +f 144/98 127/99 124/84 +f 149/100 151/101 150/102 +f 156/103 154/104 153/105 +f 159/106 158/107 157/108 +f 116/109 164/110 105/111 +f 171/112 169/113 168/114 +f 174/115 173/116 172/117 +f 179/118 181/119 180/120 +f 186/121 188/122 187/123 +f 194/124 196/125 195/126 +f 199/127 139/95 198/128 +f 206/129 208/130 207/131 +f 213/132 211/133 210/134 +f 202/135 214/136 201/137 +f 96/68 216/138 54/39 +f 92/139 43/29 44/140 +f 97/73 246/141 100/71 +f 250/142 229/143 228/144 +f 252/145 58/41 253/146 +f 258/147 256/148 255/149 +f 260/150 229/143 259/151 +f 110/152 112/153 113/154 +f 114/155 263/156 109/157 +f 265/158 267/159 266/160 +f 56/161 270/162 269/163 +f 274/164 249/165 250/142 +f 185/166 63/47 64/46 +f 280/167 278/168 276/169 +f 285/170 283/171 157/108 +f 288/172 211/133 212/173 +f 141/174 92/139 46/32 +f 289/175 175/176 176/177 +f 148/96 105/111 106/77 +f 312/178 170/179 171/112 +f 238/180 42/30 43/29 +f 318/181 160/182 317/183 +f 310/184 308/185 307/186 +f 241/187 325/188 324/189 +f 160/182 240/190 317/183 +f 47/191 307/186 308/185 +f 108/79 315/192 164/110 +f 345/193 86/63 119/194 +f 348/195 350/196 349/197 +f 356/198 358/199 357/200 +f 355/201 353/202 352/203 +f 363/204 32/13 362/205 +f 149/100 150/102 176/177 +f 364/206 253/146 365/207 +f 366/208 368/209 367/210 +f 183/211 371/212 63/47 +f 163/213 161/214 160/182 +f 209/215 101/74 208/130 +f 378/216 165/217 166/218 +f 379/219 381/220 380/221 +f 383/222 3/223 4/1 +f 284/224 386/225 385/226 +f 387/227 389/228 388/229 +f 391/230 229/143 249/165 +f 73/55 397/231 396/232 +f 405/233 407/234 406/235 +f 203/236 411/237 410/238 +f 414/239 413/240 412/241 +f 1/3 357/200 4/1 +f 277/242 131/87 275/243 +f 254/244 57/42 58/41 +f 358/199 415/245 351/246 +f 417/247 386/225 408/248 +f 401/249 192/250 420/251 +f 419/252 127/99 143/253 +f 421/254 182/255 422/256 +f 103/75 302/257 409/258 +f 428/259 368/209 174/115 +f 380/221 359/260 356/198 +f 62/43 266/160 323/261 +f 140/94 431/262 430/263 +f 357/200 351/246 348/195 +f 356/198 379/219 380/221 +f 415/245 403/264 434/265 +f 437/266 436/267 435/268 +f 438/269 440/270 439/271 +f 188/122 450/272 449/273 +f 452/274 454/275 453/276 +f 265/158 181/119 421/254 +f 224/277 64/46 225/278 +f 237/279 213/132 427/280 +f 275/243 128/89 458/281 +f 281/282 460/283 459/284 +f 50/35 79/57 461/285 +f 73/55 282/286 281/282 +f 468/287 466/288 465/289 +f 390/290 352/203 359/260 +f 482/291 484/292 483/293 +f 489/294 487/295 486/296 +f 493/297 495/298 494/299 +f 472/300 470/301 469/302 +f 502/303 500/304 499/305 +f 220/306 222/307 223/308 +f 512/309 143/253 66/45 +f 518/310 511/311 80/312 +f 513/313 230/314 286/315 +f 205/316 31/317 244/318 +f 189/319 451/320 188/122 +f 163/213 532/321 8/4 +f 533/322 225/278 251/323 +f 446/324 447/325 412/241 +f 335/326 540/327 334/328 +f 544/329 546/330 545/331 +f 329/332 291/333 292/334 +f 199/127 262/335 238/180 +f 385/226 416/336 550/337 +f 532/321 191/338 247/339 +f 363/204 118/83 243/340 +f 236/341 237/279 250/142 +f 578/342 580/343 579/344 +f 573/345 571/346 570/347 +f 561/348 562/349 272/350 +f 241/187 259/151 240/190 +f 590/351 592/352 591/353 +f 545/331 595/354 594/355 +f 503/356 155/357 504/358 +f 606/359 624/360 623/361 +f 478/362 480/363 479/364 +f 646/365 648/366 647/367 +f 590/351 689/368 684/369 +f 564/370 651/371 650/372 +f 728/373 184/374 727/375 +f 714/376 716/377 715/378 +f 692/379 575/380 693/381 +f 541/382 581/383 578/342 +f 346/384 248/385 400/386 +f 161/214 230/314 160/182 +f 128/89 94/69 95/387 +f 698/388 704/389 703/390 +f 574/391 576/392 575/380 +f 290/393 401/249 328/394 +f 630/395 725/396 726/397 +f 733/398 735/399 734/400 +f 755/401 496/402 493/297 +f 759/403 481/404 760/405 +f 757/406 762/407 761/408 +f 764/409 466/288 467/410 +f 768/411 3808/412 3809/413 +f 759/403 488/414 489/294 +f 492/415 475/416 771/417 +f 474/418 769/419 473/420 +f 773/421 465/289 772/422 +f 771/417 486/296 483/293 +f 484/292 775/423 491/424 +f 774/425 489/294 486/296 +f 495/298 764/409 756/426 +f 495/298 761/408 494/299 +f 137/427 784/428 136/429 +f 325/188 797/430 547/431 +f 165/217 51/432 52/34 +f 369/433 29/15 370/434 +f 730/435 568/436 569/437 +f 90/66 301/438 89/67 +f 243/340 516/439 242/440 +f 800/441 806/442 805/443 +f 817/444 808/445 816/446 +f 816/446 823/447 822/448 +f 306/449 447/325 807/450 +f 815/451 809/452 808/445 +f 829/453 828/454 825/455 +f 726/397 501/456 502/303 +f 274/164 424/457 423/458 +f 879/459 881/460 880/461 +f 884/462 879/459 885/463 +f 219/464 217/465 211/133 +f 906/466 904/467 897/468 +f 455/469 205/316 542/470 +f 898/471 830/472 829/453 +f 903/473 880/461 881/460 +f 906/466 897/468 898/471 +f 377/474 519/475 165/217 +f 923/476 948/477 946/478 +f 946/478 951/479 950/480 +f 3519/481 739/482 3517/483 +f 900/484 886/485 880/461 +f 225/278 65/486 144/98 +f 182/255 442/487 422/256 +f 809/452 947/488 824/489 +f 954/490 962/491 961/492 +f 927/493 965/494 966/495 +f 885/463 880/461 886/485 +f 925/496 966/495 967/497 +f 343/498 341/499 340/500 +f 965/494 971/501 968/502 +f 968/502 970/503 969/504 +f 972/505 969/504 973/506 +f 963/507 981/508 978/509 +f 336/510 338/511 339/512 +f 992/513 827/514 828/454 +f 810/515 996/516 959/517 +f 1007/518 1002/519 1008/520 +f 1012/521 827/514 1009/522 +f 1011/523 1007/518 1008/520 +f 294/524 292/334 291/333 +f 907/525 920/526 919/527 +f 1017/528 1019/529 1018/530 +f 1020/531 1022/532 1021/533 +f 1035/534 1020/531 1044/535 +f 1061/536 1047/537 1046/538 +f 1063/539 1046/538 1062/540 +f 1044/535 1021/533 1045/541 +f 1102/542 1097/543 1109/544 +f 1116/545 1118/546 1117/547 +f 1016/548 1014/549 1013/550 +f 881/460 904/467 903/473 +f 816/446 1127/551 817/444 +f 1094/552 949/553 1097/543 +f 514/554 167/555 515/556 +f 146/557 81/59 82/61 +f 1133/558 1130/559 1129/560 +f 1102/542 1126/561 822/448 +f 822/448 1116/545 816/446 +f 1145/562 1148/563 1136/564 +f 1076/565 1151/566 1069/567 +f 1148/563 1086/568 1087/569 +f 1109/544 1114/570 1136/564 +f 1154/571 1156/572 1155/573 +f 1153/574 967/497 1156/572 +f 3464/575 463/576 3714/577 +f 1144/578 1152/579 1151/566 +f 1156/572 1163/580 1162/581 +f 1164/582 1169/583 1165/584 +f 1145/562 1176/585 1147/586 +f 1114/570 1154/571 1171/587 +f 1183/588 1164/582 1177/589 +f 1184/590 1177/589 1185/591 +f 1069/567 1062/540 1064/592 +f 1023/593 1194/594 1022/532 +f 1162/581 1170/595 1169/583 +f 1195/596 1185/591 1191/597 +f 1183/588 1176/585 1155/573 +f 1201/598 1152/579 1196/599 +f 1061/536 1201/598 1202/600 +f 3825/601 843/602 3836/603 +f 1203/604 1195/596 1034/605 +f 1202/600 1184/590 1195/596 +f 1218/606 1203/604 1217/607 +f 1060/608 1202/600 1203/604 +f 1225/609 1217/607 1219/610 +f 671/611 694/612 670/613 +f 1219/610 1024/614 1035/534 +f 1217/607 1034/605 1024/614 +f 1226/615 1015/616 1227/617 +f 1243/618 1234/619 1233/620 +f 1231/621 1229/622 1228/623 +f 847/624 1233/620 843/602 +f 843/602 1244/625 878/626 +f 1233/620 1245/627 1244/625 +f 1923/628 233/629 216/138 +f 2100/630 499/305 500/304 +f 3914/631 2135/632 3900/633 +f 61/44 323/261 505/634 +f 210/134 217/465 457/635 +f 1299/636 1309/637 1300/638 +f 3863/639 1231/621 3862/640 +f 1066/641 1321/642 1322/643 +f 1322/643 1228/623 1229/622 +f 1060/608 1321/642 1047/537 +f 1325/644 1323/645 1310/646 +f 1244/625 1332/647 1330/648 +f 1290/649 1242/650 1288/651 +f 1337/652 1339/653 1338/654 +f 1330/648 1340/655 1339/653 +f 1341/656 1245/627 1294/657 +f 1342/658 1332/647 1341/656 +f 1343/659 1341/656 1344/660 +f 1344/660 1294/657 1293/661 +f 3857/662 1345/663 1346/664 +f 1347/665 1342/658 1343/659 +f 3856/666 1295/667 1345/663 +f 1349/668 1348/669 1347/665 +f 3861/670 1347/665 3853/671 +f 3853/671 1343/659 3857/662 +f 906/466 829/453 1353/672 +f 905/673 1353/672 1354/674 +f 865/675 1355/676 1357/677 +f 1355/676 829/453 825/455 +f 1357/677 825/455 826/678 +f 1356/679 1353/672 1355/676 +f 3834/680 1338/654 3833/681 +f 866/682 1356/679 865/675 +f 1358/683 1354/674 1356/679 +f 3828/684 1337/652 3834/680 +f 838/685 1012/521 841/686 +f 1363/687 1338/654 1339/653 +f 1368/688 1339/653 1340/655 +f 1348/669 1340/655 1342/658 +f 1371/689 3824/690 3829/691 +f 524/692 526/693 523/694 +f 528/695 530/696 527/697 +f 1372/698 1369/699 1371/689 +f 535/700 537/701 538/702 +f 1367/703 1348/669 1350/704 +f 1370/705 1359/706 1369/699 +f 1369/699 3833/681 3824/690 +f 1373/707 903/473 904/467 +f 555/708 557/709 558/710 +f 1374/711 910/712 1373/707 +f 1373/707 905/673 1375/713 +f 1374/711 1375/713 1376/714 +f 1375/713 1354/674 1370/705 +f 1378/715 908/716 1377/717 +f 376/718 651/371 1387/719 +f 1388/720 973/506 969/504 +f 586/721 588/722 585/723 +f 1390/724 1388/720 1391/725 +f 1391/725 969/504 970/503 +f 597/726 599/727 596/728 +f 600/729 602/730 603/731 +f 1392/732 970/503 979/733 +f 608/734 610/735 607/736 +f 612/737 523/694 611/738 +f 614/739 616/740 613/741 +f 617/742 25/25 24/27 +f 620/743 622/744 619/745 +f 1397/746 1391/725 1392/732 +f 1398/747 1392/732 1399/748 +f 1400/749 920/526 1378/715 +f 1402/750 1165/584 1169/583 +f 636/751 638/752 635/753 +f 930/754 1402/750 929/755 +f 641/756 643/757 644/758 +f 929/755 1169/583 1170/595 +f 1404/759 1403/760 1402/750 +f 1411/761 1365/762 1406/763 +f 1412/764 1415/765 1414/766 +f 652/767 654/768 655/769 +f 609/770 657/771 658/772 +f 659/773 661/774 662/775 +f 558/710 586/721 585/723 +f 665/776 667/777 664/778 +f 1413/779 1406/763 1412/764 +f 1418/780 3810/781 3813/782 +f 1417/783 1371/689 1416/784 +f 1416/784 3829/691 3810/781 +f 1419/785 1416/784 1418/780 +f 1386/786 1374/711 1420/787 +f 677/788 679/789 676/790 +f 1385/791 1420/787 1421/792 +f 1420/787 1376/714 1422/793 +f 685/794 687/795 688/796 +f 1421/792 1422/793 1423/797 +f 528/695 691/798 658/772 +f 1422/793 1372/698 1417/783 +f 1018/530 747/799 1425/800 +f 699/801 701/802 702/803 +f 1424/804 579/344 1017/528 +f 3663/805 1767/806 3660/807 +f 700/808 711/809 712/810 +f 1093/811 3754/812 1571/813 +f 717/814 530/696 719/815 +f 721/816 723/817 720/818 +f 704/389 3543/819 3554/820 +f 19/11 1426/821 1430/822 +f 1429/823 1427/824 1426/821 +f 1658/825 3542/826 3544/827 +f 1439/828 1434/829 1438/830 +f 1437/831 1435/832 1434/829 +f 1426/821 1433/833 1432/834 +f 1442/835 1432/834 1440/836 +f 742/837 744/838 745/839 +f 1432/834 1441/840 1440/836 +f 751/841 753/842 750/843 +f 1438/830 1403/760 1405/844 +f 1446/845 1444/846 1443/847 +f 1447/848 1446/845 1443/847 +f 3860/849 1443/847 3848/850 +f 1415/765 1445/851 1446/845 +f 1414/766 1446/845 1448/852 +f 2173/853 469/302 470/301 +f 777/854 779/855 686/856 +f 780/857 782/858 783/859 +f 1676/860 1806/861 1675/862 +f 3859/863 1349/668 3861/670 +f 789/864 791/865 788/866 +f 745/839 793/867 742/837 +f 652/767 795/868 653/869 +f 1455/870 1457/871 1456/872 +f 1454/873 1350/704 1452/874 +f 610/735 802/875 801/876 +f 3831/877 698/388 703/390 +f 1352/878 1045/541 1351/879 +f 1015/616 1459/880 1460/881 +f 811/882 813/883 814/884 +f 2118/885 3817/886 2117/887 +f 819/888 821/889 818/890 +f 1466/891 1194/594 1190/892 +f 1467/893 1465/894 1466/891 +f 1182/895 1468/896 1467/893 +f 832/897 833/898 831/899 +f 835/900 837/901 834/902 +f 1467/893 1190/892 1182/895 +f 1469/903 1444/846 1445/851 +f 840/904 712/810 690/905 +f 1453/906 1469/903 1454/873 +f 599/727 845/907 846/908 +f 1226/615 3849/909 3855/910 +f 851/911 853/912 850/913 +f 855/914 856/915 854/916 +f 858/917 860/918 857/919 +f 861/920 863/921 864/922 +f 1014/549 3855/910 3858/923 +f 867/924 868/925 869/926 +f 870/927 872/928 873/929 +f 874/930 876/931 877/932 +f 1310/646 1471/933 1309/637 +f 1476/934 1427/824 1475/935 +f 1461/936 1475/935 1460/881 +f 887/937 889/938 890/939 +f 788/866 892/940 891/941 +f 894/942 896/943 893/944 +f 1460/881 1227/617 1015/616 +f 901/945 524/692 612/737 +f 1465/894 1477/946 1464/947 +f 1865/948 3500/949 1864/950 +f 911/951 913/952 914/953 +f 916/954 918/955 915/956 +f 1850/957 3507/958 1851/959 +f 921/960 895/961 894/942 +f 1479/962 1436/963 1478/964 +f 926/965 667/777 601/966 +f 1481/967 563/968 1480/969 +f 269/163 84/970 1482/971 +f 931/972 933/973 916/954 +f 934/974 614/739 869/926 +f 936/975 688/796 937/976 +f 938/977 662/775 939/978 +f 1484/979 122/980 123/981 +f 941/982 835/900 834/902 +f 320/983 509/984 510/985 +f 1849/986 3505/987 1850/957 +f 84/970 1484/979 83/60 +f 1487/988 1488/989 539/990 +f 1757/991 3502/992 3509/993 +f 365/207 1491/994 364/206 +f 461/285 80/312 511/311 +f 117/82 362/205 1496/995 +f 1494/996 541/382 1495/997 +f 926/965 957/998 958/999 +f 1477/946 1433/833 1476/934 +f 1709/1000 1585/1001 1700/1002 +f 119/194 1496/995 345/193 +f 271/1003 504/358 156/103 +f 511/311 189/319 186/121 +f 345/193 1492/1004 365/207 +f 255/149 344/1005 254/244 +f 57/42 299/1006 132/90 +f 1501/1007 268/1008 1500/1009 +f 267/159 1499/1010 145/1011 +f 974/1012 976/1013 977/1014 +f 1678/1015 1693/1016 1676/860 +f 3846/1017 1807/1018 3840/1019 +f 557/709 663/1020 558/710 +f 983/1021 984/1022 985/1023 +f 1443/847 3849/909 3848/850 +f 421/254 268/1008 265/158 +f 989/1024 991/1025 988/1026 +f 1490/1027 418/1028 1504/1029 +f 279/1030 309/1031 310/184 +f 997/1032 676/790 679/789 +f 998/1033 999/1034 875/1035 +f 1504/1029 38/21 1505/1036 +f 1506/1037 1507/1038 1486/1039 +f 360/1040 1508/1041 1506/1037 +f 1506/1037 207/131 360/1040 +f 939/978 1006/1042 938/977 +f 270/162 1483/1043 269/163 +f 66/45 546/330 512/309 +f 1010/1044 793/867 935/1045 +f 367/210 437/266 1509/1046 +f 244/318 363/204 243/340 +f 1670/1047 3534/1048 1669/1049 +f 209/215 1510/1050 226/1051 +f 235/1052 1512/1053 1511/1054 +f 1511/1054 234/1055 235/1052 +f 388/229 426/1056 425/1057 +f 427/280 250/142 237/279 +f 161/214 521/1058 231/1059 +f 100/71 45/33 44/140 +f 1025/1060 608/734 1027/1061 +f 1029/1062 1031/1063 1028/1064 +f 939/978 1033/1065 1032/1066 +f 41/28 594/355 99/1067 +f 1036/1068 783/859 1037/1069 +f 702/803 1038/1070 1039/1071 +f 1040/1072 1042/1073 1043/1074 +f 362/205 369/433 1493/1075 +f 187/123 477/1076 186/121 +f 70/51 539/990 198/128 +f 1511/1054 1521/1077 234/1055 +f 1049/1078 603/731 1048/1079 +f 613/741 869/926 614/739 +f 860/918 1051/1080 857/919 +f 1005/1081 1053/1082 1006/1042 +f 753/842 1056/1083 1054/1084 +f 1057/1085 1059/1086 1055/1087 +f 293/1088 1522/1089 292/334 +f 288/172 1521/1077 1522/1089 +f 292/334 1523/1090 329/332 +f 331/1091 1508/1041 1524/1092 +f 306/449 453/276 305/1093 +f 100/71 41/28 99/1067 +f 462/1094 78/58 79/57 +f 330/1095 1524/1092 462/1094 +f 635/753 1068/1096 636/751 +f 1485/1097 1526/1098 1528/1099 +f 206/129 1528/1099 1510/1050 +f 1070/1100 1072/1101 831/899 +f 942/1102 1073/1103 1074/1104 +f 916/954 1075/1105 917/1106 +f 440/270 1530/1107 439/271 +f 157/108 1514/1108 285/170 +f 1529/1109 5/6 1530/1107 +f 371/212 546/330 63/47 +f 790/1110 663/1020 791/865 +f 1077/1111 857/919 1078/1112 +f 1403/760 1435/832 1165/584 +f 1080/1113 1082/1114 1079/1115 +f 531/1116 1529/1109 440/270 +f 21/22 1084/1117 1085/1118 +f 272/350 1531/1119 561/348 +f 137/427 396/232 397/231 +f 690/905 802/875 691/798 +f 335/326 3567/1120 581/383 +f 1090/1121 1029/1062 1091/1122 +f 1092/1123 794/1124 1042/1073 +f 1533/1125 137/427 138/1126 +f 661/774 1095/1127 1096/1128 +f 69/52 445/1129 72/50 +f 1099/1130 1101/1131 1098/1132 +f 38/21 1536/1133 1505/1036 +f 1104/1134 1106/1135 1103/1136 +f 852/1137 1108/1138 853/912 +f 1377/717 909/1139 1386/786 +f 291/333 411/237 294/524 +f 316/1140 516/439 33/18 +f 539/990 1533/1125 534/1141 +f 1505/1036 316/1140 313/1142 +f 1110/1143 1112/1144 1113/1145 +f 1539/1146 460/283 264/1147 +f 522/1148 1537/1149 1539/1146 +f 834/902 1120/1150 1119/1151 +f 1122/1152 1124/1153 1121/1154 +f 612/737 1041/1155 901/945 +f 935/1045 615/1156 614/739 +f 276/169 130/1157 131/87 +f 1686/1158 3522/1159 1685/1160 +f 193/1161 1541/1162 273/1163 +f 231/1059 1540/1164 287/1165 +f 556/1166 982/1167 557/709 +f 365/207 85/64 345/193 +f 190/1168 318/181 1541/1162 +f 1810/1169 3574/1170 3528/1171 +f 3924/1172 3512/1173 3791/1174 +f 1132/1175 985/1023 1131/1176 +f 208/130 102/76 132/90 +f 1134/1177 662/775 661/774 +f 1491/994 80/312 77/56 +f 429/1178 1542/1179 147/97 +f 1137/1180 915/956 918/955 +f 172/117 428/259 174/115 +f 1139/1181 1141/1182 1142/1183 +f 549/1184 54/39 55/38 +f 1517/1185 1518/1186 296/1187 +f 976/1013 1146/1188 938/977 +f 834/902 984/1022 941/982 +f 1543/1189 286/315 115/80 +f 1544/1190 239/1191 513/313 +f 133/92 1544/1190 1542/1179 +f 506/1192 257/1193 258/147 +f 420/251 328/394 401/249 +f 1158/1194 1160/1195 1157/1196 +f 587/1197 1103/1136 588/722 +f 1513/1198 1514/1108 151/101 +f 607/736 1027/1061 608/734 +f 304/1199 1545/1200 303/1201 +f 364/206 77/56 506/1192 +f 69/52 198/128 139/95 +f 1006/1042 976/1013 938/977 +f 700/808 1112/1144 701/802 +f 3877/1202 3525/1203 3787/1204 +f 1166/1205 657/771 656/1206 +f 1167/1207 858/917 1077/1111 +f 1300/638 3524/1208 1299/636 +f 1172/1209 1174/1210 1175/1211 +f 218/1212 200/1213 217/465 +f 1179/1214 1181/1215 1178/1216 +f 372/1217 97/73 98/72 +f 89/67 227/1218 88/65 +f 1186/1219 1188/1220 1189/1221 +f 172/117 1517/1185 1549/1222 +f 1482/971 81/59 1498/1223 +f 1090/1121 1193/1224 1030/1225 +f 394/1226 353/202 1551/1227 +f 526/693 1198/1228 1197/1229 +f 811/882 858/917 812/1230 +f 1199/1231 1026/1232 1025/1060 +f 606/359 697/1233 696/1234 +f 1204/1235 1206/1236 743/1237 +f 1207/1238 523/694 526/693 +f 1209/1239 867/924 613/741 +f 914/953 685/794 936/975 +f 1478/964 1441/840 1477/946 +f 1029/1062 1161/1240 1091/1122 +f 1211/1241 1213/1242 1214/1243 +f 3858/923 1451/1244 3859/863 +f 844/1245 1216/1246 845/907 +f 423/458 273/1163 274/164 +f 1220/1247 1222/1248 1083/1249 +f 22/1250 1224/1251 23/23 +f 882/1252 897/468 881/460 +f 1519/1253 173/116 387/227 +f 1189/1221 1232/1254 1178/1216 +f 349/197 550/337 416/336 +f 1071/1255 1236/1256 1072/1101 +f 1237/1257 1239/1258 745/839 +f 1221/1259 1189/1221 1240/1260 +f 1510/1050 1512/1053 333/1261 +f 939/978 1241/1262 1005/1081 +f 1556/1263 5/6 67/48 +f 1553/1264 394/1226 1551/1227 +f 404/1265 67/48 347/1266 +f 1247/1267 1249/1268 1246/1269 +f 142/93 238/180 92/139 +f 1251/1270 635/753 1250/1271 +f 850/913 1253/1272 851/911 +f 1254/1273 1077/1111 1255/1274 +f 57/42 133/92 429/1178 +f 443/1275 552/1276 551/1277 +f 1187/1278 1175/1211 1188/1220 +f 1256/1279 831/899 833/898 +f 432/1280 382/1281 383/222 +f 741/1282 747/799 746/1283 +f 434/265 351/246 415/245 +f 349/197 383/222 348/195 +f 251/323 1481/967 1480/969 +f 395/1284 352/203 353/202 +f 1518/1186 1550/1285 1551/1227 +f 1558/1286 378/216 340/500 +f 109/157 314/1287 315/192 +f 285/170 1513/1198 407/234 +f 395/1284 393/1288 392/1289 +f 202/135 410/238 1558/1286 +f 417/247 405/233 432/1280 +f 1246/1269 1258/1290 1247/1267 +f 3/223 297/1291 2/2 +f 1259/1292 1261/1293 1262/1294 +f 403/264 347/1266 284/224 +f 955/1295 953/1296 952/1297 +f 462/1094 51/432 330/1095 +f 1096/1128 687/795 1263/1298 +f 429/1178 60/40 57/42 +f 896/943 1265/1299 932/1300 +f 1267/1301 1268/1302 1266/1303 +f 423/458 327/1304 420/251 +f 37/19 180/120 40/1305 +f 20/24 1187/1278 1186/1219 +f 1256/1279 1071/1255 1070/1100 +f 155/357 156/103 504/358 +f 1188/1220 1269/1306 1232/1254 +f 1236/1256 1271/1307 1262/1294 +f 1272/1308 535/700 621/1309 +f 565/1310 463/576 464/1311 +f 1273/1312 745/839 1239/1258 +f 876/931 1255/1274 1274/1313 +f 316/1140 164/110 315/192 +f 437/266 503/356 1509/1046 +f 194/124 1495/997 197/1314 +f 1275/1315 864/922 863/921 +f 624/360 631/1316 630/395 +f 1277/1317 1137/1180 1278/1318 +f 569/437 567/1319 566/1320 +f 1481/967 125/86 366/208 +f 324/189 547/431 326/1321 +f 1279/1322 1107/1323 1280/1324 +f 1525/1325 329/332 1523/1090 +f 1281/1326 1137/1180 918/955 +f 171/112 1553/1264 1552/1327 +f 1488/989 396/232 1533/1125 +f 1557/1328 1562/1329 1559/1330 +f 1282/1331 1206/1236 1205/1332 +f 1053/1082 1278/1318 1281/1326 +f 778/1333 607/736 1283/1334 +f 713/1335 3566/1336 3569/1337 +f 251/323 144/98 124/84 +f 4/1 348/195 383/222 +f 709/1338 707/1339 706/1340 +f 728/373 45/33 246/141 +f 261/1341 1563/1342 544/329 +f 789/864 1287/1343 790/1110 +f 74/54 263/156 282/286 +f 403/264 385/226 434/265 +f 1178/1216 1240/1260 1189/1221 +f 1223/1344 1289/1345 1224/1251 +f 158/107 150/102 151/101 +f 1220/1247 1186/1219 1221/1259 +f 296/1187 354/1346 433/1347 +f 784/428 1538/1348 1566/1349 +f 1181/1215 1291/1350 1222/1248 +f 1269/1306 1257/1351 1246/1269 +f 1566/1349 438/269 399/1352 +f 136/429 1566/1349 1564/1353 +f 1563/1342 169/113 1565/1354 +f 107/81 109/157 108/79 +f 1297/1355 1298/1356 1296/1357 +f 1464/947 1476/934 1461/936 +f 1520/1358 507/1359 508/1360 +f 1301/1361 1303/1362 1304/1363 +f 1306/1364 1308/1365 1305/1366 +f 175/176 1532/1367 272/350 +f 1532/1367 200/1213 201/137 +f 1311/1368 1312/1369 1313/1370 +f 872/928 1315/1371 1316/1372 +f 1317/1373 1318/1374 1319/1375 +f 3836/603 878/626 3828/684 +f 1305/1366 864/922 1276/1376 +f 1040/1072 863/921 1320/1377 +f 1564/1353 399/1352 169/113 +f 592/352 1575/1378 591/353 +f 591/353 1088/1379 689/368 +f 1581/1380 1583/1381 1582/1382 +f 1196/599 1147/586 1176/585 +f 710/1383 583/1384 713/1335 +f 285/170 408/248 386/225 +f 953/1296 961/492 963/507 +f 1594/1385 1589/1386 1593/1387 +f 1585/1001 1587/1388 1586/1389 +f 1592/1390 1590/1391 1589/1386 +f 3885/1392 944/1393 3899/1394 +f 1600/1395 1598/1396 1597/1397 +f 1604/1398 1602/1399 1601/1400 +f 1399/748 979/733 980/1401 +f 1589/1386 1596/1402 1595/1403 +f 1610/1404 1608/1405 1607/1406 +f 298/1407 361/1408 360/1040 +f 551/1277 276/169 277/242 +f 1627/1409 3814/1410 3819/1411 +f 859/1412 818/890 860/918 +f 1078/1112 1328/1413 1329/1414 +f 1555/1415 1530/1107 1556/1263 +f 572/1416 680/1417 681/1418 +f 772/422 776/1419 774/425 +f 977/1014 918/955 917/1106 +f 404/1265 392/1289 393/1288 +f 1054/1084 750/843 753/842 +f 1644/1420 1643/1421 1642/1422 +f 932/1300 1082/1114 933/973 +f 1067/1423 1334/1424 1068/1096 +f 1540/1164 264/1147 114/155 +f 597/726 643/757 1336/1425 +f 471/1426 762/407 470/301 +f 1642/1422 3831/877 3842/1427 +f 1644/1420 3842/1427 3843/1428 +f 923/476 950/480 927/493 +f 96/68 548/1429 95/387 +f 520/1430 522/1148 521/1058 +f 445/1129 443/1275 442/487 +f 849/1431 848/1432 842/1433 +f 480/363 755/401 493/297 +f 1646/1434 481/404 1639/1435 +f 311/1436 1567/1437 170/179 +f 464/1311 3497/1438 3495/1439 +f 3497/1438 1361/1440 3495/1439 +f 234/1055 236/341 235/1052 +f 190/1168 192/250 191/338 +f 153/105 152/1441 149/100 +f 231/1059 286/315 230/314 +f 1552/1327 1551/1227 1550/1285 +f 760/405 1648/1442 488/414 +f 3832/1443 1649/1444 3830/1445 +f 828/454 826/678 825/455 +f 52/34 166/218 165/217 +f 1663/1446 1661/1447 1660/1448 +f 287/1165 115/80 286/315 +f 668/1449 670/613 669/1450 +f 634/1451 640/1452 639/1453 +f 553/1454 559/1455 554/1456 +f 118/83 33/18 516/439 +f 492/415 491/424 490/1457 +f 3552/1458 1651/1459 3553/1460 +f 582/1461 584/1462 583/1384 +f 384/1463 392/1289 415/245 +f 1381/1464 1383/1465 1380/1466 +f 1562/1329 435/268 436/267 +f 952/1297 951/479 947/488 +f 3529/1467 583/1384 584/1462 +f 1660/1448 1667/1468 1666/1469 +f 1394/1470 1396/1471 1393/1472 +f 1660/1448 1664/1473 1663/1446 +f 1671/1474 1669/1049 1668/1475 +f 1624/1476 1673/1477 1672/1478 +f 1407/1479 1409/1480 1410/1481 +f 1677/1482 1675/862 1674/1483 +f 1613/1484 1677/1482 1625/1485 +f 1679/1486 1676/860 1677/1482 +f 1626/1487 3811/1488 3814/1410 +f 534/1141 198/128 539/990 +f 176/177 150/102 159/106 +f 803/1489 734/400 800/441 +f 400/386 191/338 192/250 +f 65/486 143/253 144/98 +f 3526/1490 1656/1491 3540/1492 +f 402/1493 157/108 283/171 +f 629/1494 627/1495 626/1496 +f 461/285 49/36 50/35 +f 758/1497 495/298 756/426 +f 796/1498 786/1499 785/1500 +f 406/235 1513/1198 1560/1501 +f 342/1502 166/218 167/555 +f 1694/1503 1658/825 1695/1504 +f 1692/1505 1694/1503 1696/1506 +f 3557/1507 1652/1508 3552/1458 +f 3921/1509 1703/1510 3923/1511 +f 326/1321 302/257 90/66 +f 3916/1512 1702/1513 1706/1514 +f 3919/1515 1698/1516 1701/1517 +f 1699/1518 3927/1519 3918/1520 +f 1557/1328 1560/1501 1561/1521 +f 1127/551 1117/547 1128/1522 +f 400/386 289/175 346/384 +f 1706/1514 1707/1523 1708/1524 +f 1707/1523 1599/1525 1600/1395 +f 1709/1000 1708/1524 1588/1526 +f 1714/1527 1711/1528 1713/1529 +f 3919/1515 1709/1000 1700/1002 +f 1704/1530 1577/1531 1703/1510 +f 1716/1532 1713/1529 1715/1533 +f 1559/1330 1549/1222 297/1291 +f 1722/1534 1720/1535 1719/1536 +f 1335/1537 596/728 1489/1538 +f 410/238 377/474 378/216 +f 973/506 1163/580 972/505 +f 168/114 399/1352 398/1539 +f 1571/813 1089/1540 1093/811 +f 1572/1541 682/1542 1571/813 +f 749/1543 647/367 754/1544 +f 1730/1545 1723/1546 1729/1547 +f 1594/1385 1729/1547 1592/1390 +f 1729/1547 1591/1548 1592/1390 +f 1731/1549 1730/1545 1583/1381 +f 1384/1550 1377/717 1379/1551 +f 1732/1552 1712/1553 1714/1527 +f 1724/1554 1731/1549 1732/1552 +f 1712/1553 1731/1549 1580/1555 +f 1722/1534 1732/1552 1733/1556 +f 1721/1557 1733/1556 1734/1558 +f 1734/1558 1716/1532 1717/1559 +f 1735/1560 1736/1561 1697/1562 +f 1733/1556 1714/1527 1716/1532 +f 1663/1446 1741/1563 1668/1475 +f 1742/1564 1740/1565 1743/1566 +f 1738/1567 1737/1568 1735/1560 +f 1665/1569 1666/1469 1740/1565 +f 1665/1569 1744/1570 1664/1473 +f 1725/1571 1746/1572 1745/1573 +f 1591/1548 1745/1573 1590/1391 +f 1590/1391 1747/1574 1596/1402 +f 1745/1573 1748/1575 1747/1574 +f 1659/1576 3538/1577 3542/826 +f 3287/1578 3545/1579 3282/1580 +f 1185/591 1182/895 1190/892 +f 1664/1473 1749/1581 1741/1563 +f 1741/1563 1739/1582 1738/1567 +f 1746/1572 1743/1566 1748/1575 +f 1726/1583 1742/1564 1746/1572 +f 1153/574 924/1584 925/496 +f 1115/1585 1153/574 1154/571 +f 1654/1586 3575/1587 1655/1588 +f 1896/1589 3549/1590 1686/1158 +f 1720/1535 1744/1570 1726/1583 +f 1721/1557 1749/1581 1720/1535 +f 1750/1591 1701/1517 1698/1516 +f 1697/1562 1750/1591 1696/1506 +f 1151/566 1063/539 1062/540 +f 1751/1592 1670/1047 1671/1474 +f 1695/1504 1752/1593 1751/1592 +f 961/492 986/1594 981/508 +f 1770/1595 1768/1596 1767/806 +f 1766/1597 1593/1387 1765/1598 +f 1731/1549 1579/1599 1580/1555 +f 1579/1599 1774/1600 1578/1601 +f 3928/1602 1771/1603 3929/1604 +f 1764/1605 3928/1602 3917/1606 +f 1777/1607 1775/1608 1774/1600 +f 3762/1609 1699/1518 1700/1002 +f 1004/1610 1128/1522 1003/1611 +f 1774/1600 1771/1603 1772/1612 +f 987/1613 1399/748 980/1401 +f 1578/1601 1772/1612 1764/1605 +f 1783/1614 1781/1615 1778/1616 +f 1597/1397 1770/1595 1767/806 +f 639/1453 3895/1617 3884/1618 +f 3928/1602 1769/1619 1770/1595 +f 1793/1620 1789/1621 1788/1622 +f 3929/1604 1798/1623 1769/1619 +f 3508/1624 3773/1625 1806/861 +f 1802/1626 3924/1172 3922/1627 +f 1788/1622 1794/1628 1793/1620 +f 1793/1620 1795/1629 1776/1630 +f 3773/1625 1675/862 1806/861 +f 1584/1631 1783/1614 1789/1621 +f 1771/1603 3920/1632 3929/1604 +f 1801/1633 1798/1623 1800/1634 +f 1768/1596 1798/1623 1799/1635 +f 1775/1608 1797/1636 1771/1603 +f 508/1360 515/556 1520/1358 +f 1796/1637 1802/1626 1797/1636 +f 1800/1634 3920/1632 3922/1627 +f 1794/1628 1804/1638 1795/1629 +f 3532/1639 1752/1593 3547/1640 +f 485/1641 1807/1018 775/423 +f 3809/413 1640/1642 768/411 +f 3837/1643 1810/1169 1653/1644 +f 485/1641 1640/1642 1808/1645 +f 1690/1646 3770/1647 3772/1648 +f 487/295 1648/1442 1811/1649 +f 3839/1650 1816/1651 3822/1652 +f 3838/1653 1648/1442 3830/1445 +f 3839/1650 1811/1649 3838/1653 +f 1812/1654 487/295 1811/1649 +f 1641/1655 767/1656 768/411 +f 465/289 755/401 776/1419 +f 767/1656 3823/1657 3808/412 +f 3822/1652 3826/1658 3847/1659 +f 1809/1660 1815/1661 767/1656 +f 3837/1643 1651/1459 3840/1019 +f 675/1662 3541/1663 3496/1664 +f 1814/1665 1822/1666 1821/1667 +f 1821/1667 3821/1668 3820/1669 +f 1824/1670 1649/1444 1825/1671 +f 1827/1672 1850/957 1805/1673 +f 1668/1475 1662/1674 1663/1446 +f 1779/1675 1848/1676 1828/1677 +f 1790/1678 1805/1673 1794/1628 +f 1833/1679 1853/1680 1852/1681 +f 3521/1682 693/381 575/380 +f 1614/1683 1616/1684 1617/1685 +f 1852/1681 3498/1686 3501/1687 +f 1620/1688 1622/1689 1623/1690 +f 1855/1691 1574/1692 1861/1693 +f 1847/1694 3501/1687 3513/1695 +f 1766/1597 1855/1691 1782/1696 +f 1629/1697 1631/1698 1628/1699 +f 1633/1700 1634/1701 1632/1702 +f 1635/1703 1637/1704 1638/1705 +f 1848/1676 1852/1681 1847/1694 +f 1855/1691 1863/1706 1862/1707 +f 1803/1708 1867/1709 1802/1626 +f 1862/1707 1831/1710 1832/1711 +f 1782/1696 1862/1707 1781/1615 +f 3922/1627 1865/948 1800/1634 +f 1873/1712 1870/1713 1869/1714 +f 1874/1715 799/1716 804/1717 +f 2045/1718 3897/1719 2044/1720 +f 1219/610 1295/667 1225/609 +f 1615/1721 1396/1471 1395/1722 +f 3884/1618 1877/1723 1876/1724 +f 645/1725 3786/1726 3559/1727 +f 1345/663 1035/534 1346/664 +f 634/1451 3884/1618 3880/1728 +f 15/8 1327/1729 14/9 +f 3910/1730 1878/1731 1893/1732 +f 1894/1733 605/1734 1893/1732 +f 3506/1735 627/1495 693/381 +f 3560/1736 645/1725 3559/1727 +f 340/500 454/275 1558/1286 +f 1897/1737 195/126 1898/1738 +f 1889/1739 1877/1723 1890/1740 +f 1901/1741 129/88 1900/1742 +f 196/125 754/1544 232/1743 +f 1705/1744 3921/1509 3916/1512 +f 340/500 166/218 343/498 +f 1570/1745 563/968 561/348 +f 1904/1746 194/124 1897/1737 +f 1781/1615 1832/1711 1780/1747 +f 1905/1748 55/38 1906/1749 +f 1000/1750 995/1751 996/516 +f 986/1594 920/526 987/1613 +f 1191/597 1190/892 1194/594 +f 2094/1752 2075/1753 2093/1754 +f 1920/1755 1919/1756 1915/1757 +f 453/276 1912/1758 1911/1759 +f 1915/1757 1918/1760 1916/1761 +f 3882/1762 2106/1763 3901/1764 +f 1655/1588 3531/1765 1868/1766 +f 1916/1761 1926/1767 1925/1768 +f 704/389 3561/1769 703/390 +f 278/168 1925/1768 1900/1742 +f 3565/1770 1818/1771 1820/1772 +f 1097/543 1115/1585 1114/570 +f 1927/1773 727/375 1923/628 +f 3578/1774 3654/1775 3758/1776 +f 1920/1755 307/186 1929/1777 +f 1929/1777 48/31 728/373 +f 503/356 1497/1778 1509/1046 +f 1902/1779 787/1780 796/1498 +f 1816/1651 3820/1669 3826/1658 +f 1925/1768 1923/628 1924/1781 +f 1535/1782 1568/1783 1534/1784 +f 254/244 258/147 255/149 +f 1509/1046 366/208 367/210 +f 119/194 35/1785 36/16 +f 219/464 293/1088 294/524 +f 141/174 47/191 431/262 +f 542/470 244/318 245/1786 +f 1940/1787 430/263 431/262 +f 1779/1675 1827/1672 1790/1678 +f 1701/1517 3915/1788 3919/1515 +f 1661/1447 3515/1789 1667/1468 +f 270/162 754/1544 647/367 +f 197/1314 749/1543 196/125 +f 1727/1790 1212/1791 1211/1241 +f 53/37 232/1743 754/1544 +f 1465/894 1479/962 1478/964 +f 1284/1792 3854/1793 1293/661 +f 1951/1794 412/241 413/240 +f 168/114 1554/1795 1553/1264 +f 1486/1039 206/129 207/131 +f 506/1192 78/58 361/1408 +f 994/1796 1007/518 993/1797 +f 951/479 963/507 964/1798 +f 1013/550 3861/670 1351/879 +f 3826/1658 1820/1772 1818/1771 +f 1498/1223 1954/1799 1482/971 +f 299/1006 543/1800 132/90 +f 1754/1801 1756/1802 1753/1803 +f 1681/1804 1683/1805 1682/1806 +f 1696/1506 1695/1504 1697/1562 +f 13/7 738/1807 737/1808 +f 473/420 475/416 474/418 +f 482/291 768/411 1640/1642 +f 204/1809 30/14 205/316 +f 1761/1810 820/1811 819/888 +f 611/738 1125/1812 612/737 +f 1501/1007 1908/1813 1909/1814 +f 1074/1104 711/809 699/801 +f 1960/1815 82/61 83/60 +f 120/1816 1960/1815 123/981 +f 35/1785 87/78 34/17 +f 1773/1817 1025/1060 1031/1063 +f 1965/1818 1569/1819 1535/1782 +f 494/299 471/1426 479/364 +f 1966/1820 1969/1821 1968/1822 +f 446/324 1971/1823 1967/1824 +f 174/115 125/86 126/85 +f 1580/1555 1578/1601 1577/1531 +f 1785/1825 1787/1826 1784/1827 +f 1972/1828 304/1199 305/1093 +f 1254/1273 875/1035 999/1034 +f 1973/1829 342/1502 514/554 +f 321/1830 1973/1829 509/984 +f 1910/1831 275/243 1907/1832 +f 509/984 514/554 508/1360 +f 1486/1039 1527/1833 1485/1097 +f 1974/1834 804/1717 1975/1835 +f 1977/1836 1974/1834 1978/1837 +f 1980/1838 1966/1820 1981/1839 +f 379/219 2/2 295/1840 +f 1718/1841 1710/1842 1701/1517 +f 884/462 992/513 883/1843 +f 1680/1844 1678/1015 1679/1486 +f 902/1845 525/1846 524/692 +f 344/1005 256/148 299/1006 +f 135/1847 137/427 136/429 +f 236/341 228/144 333/1261 +f 448/1848 1967/1824 1966/1820 +f 589/1849 604/1850 584/1462 +f 910/712 908/716 907/525 +f 1981/1839 1968/1822 1987/1851 +f 1982/1852 1987/1851 1979/1853 +f 1988/1854 1987/1851 1989/1855 +f 1990/1856 1989/1855 1991/1857 +f 1991/1857 1993/1858 1992/1859 +f 322/1860 320/983 319/1861 +f 992/513 830/472 883/1843 +f 1144/578 1086/568 1143/1862 +f 1995/1863 1978/1837 1994/1864 +f 1133/558 1118/546 1126/561 +f 1997/1865 1994/1864 1996/1866 +f 870/927 1335/1537 1489/1538 +f 805/443 1982/1852 1998/1867 +f 727/375 2000/1868 1999/1869 +f 1996/1866 1988/1854 1990/1856 +f 1994/1864 1979/1853 1988/1854 +f 1829/1870 792/1871 1273/1312 +f 1830/1872 941/982 983/1021 +f 798/1873 800/441 799/1716 +f 729/1874 731/1875 730/435 +f 2011/1876 2023/1877 2022/1878 +f 1834/1879 846/908 1159/1880 +f 402/1493 284/224 347/1266 +f 2022/1878 2024/1881 2025/1882 +f 1838/1883 989/1024 988/1026 +f 810/515 952/1297 809/452 +f 1823/1884 1840/1885 644/758 +f 1430/822 1432/834 1431/1886 +f 1215/1887 1841/1888 1079/1115 +f 149/100 176/177 177/1889 +f 1316/1372 873/929 872/928 +f 2112/1890 1384/1550 2106/1763 +f 1842/1891 1844/1892 1845/1893 +f 1159/1880 845/907 1160/1195 +f 837/901 1846/1894 1120/1150 +f 1971/1823 2030/1895 2027/1896 +f 517/1897 370/434 189/319 +f 858/917 833/898 812/1230 +f 1471/933 2027/1896 1888/1898 +f 3551/1899 626/1496 627/1495 +f 2174/1900 770/1901 490/1457 +f 935/1045 1854/1902 1010/1044 +f 1613/1484 1611/1903 1618/1904 +f 1231/621 1225/609 1295/667 +f 1856/1905 891/941 892/940 +f 1300/638 748/1906 741/1282 +f 1150/1907 1858/1908 1859/1909 +f 1643/1421 696/1234 1642/1422 +f 1215/1887 1264/1910 1216/1246 +f 1320/1377 902/1845 901/945 +f 1956/1911 1944/1912 1960/1815 +f 2031/1913 669/1450 1474/1914 +f 2034/1915 1310/646 1299/636 +f 1879/1916 1656/1491 1657/1917 +f 40/1305 242/440 39/20 +f 1992/1859 2039/1918 1456/872 +f 888/1919 957/998 889/938 +f 1882/1920 1996/1866 1990/1856 +f 1078/1112 1255/1274 1077/1111 +f 1871/1921 1192/1922 1090/1121 +f 294/524 218/1212 219/464 +f 1323/645 2026/1923 1471/933 +f 123/981 83/60 1484/979 +f 1324/1924 2041/1925 1323/645 +f 384/1463 359/260 352/203 +f 1324/1924 2038/1926 2036/1927 +f 1883/1928 1885/1929 1829/1870 +f 287/1165 114/155 107/81 +f 1096/1128 1134/1177 661/774 +f 915/956 931/972 916/954 +f 806/442 1981/1839 1982/1852 +f 3533/1930 584/1462 604/1850 +f 1425/800 2042/1931 1018/530 +f 1018/530 1948/1932 1017/528 +f 3562/1933 740/1934 3519/481 +f 1948/1932 649/1935 646/365 +f 1881/1936 1990/1856 1991/1857 +f 1900/1742 130/1157 278/168 +f 992/513 994/1796 993/1797 +f 923/476 925/496 924/1584 +f 3878/1937 2047/1938 3876/1939 +f 1643/1421 625/1940 606/359 +f 2050/1941 2051/1942 2049/1943 +f 2045/1718 1404/759 2047/1938 +f 3876/1939 945/1944 3885/1392 +f 1139/1181 1079/1115 1841/1888 +f 1037/1069 892/940 982/1167 +f 2055/1945 2057/1946 2056/1947 +f 1198/1228 718/1948 717/814 +f 1619/1949 1612/1950 1611/1903 +f 2058/1951 1606/1952 2059/1953 +f 2059/1953 1607/1406 2058/1951 +f 2058/1951 1624/1476 1605/1954 +f 71/1955 179/118 37/19 +f 405/233 1557/1328 382/1281 +f 976/1013 1006/1042 977/1014 +f 322/1860 1972/1828 1911/1759 +f 2061/1956 2063/1957 2062/1958 +f 2066/1959 2067/1960 1825/1671 +f 2052/1961 2064/1962 2051/1942 +f 1609/1963 2066/1959 1647/1964 +f 1515/1965 152/1441 1516/1966 +f 1009/522 993/1797 1007/518 +f 1644/1420 3815/1967 2118/885 +f 279/1030 1916/1761 278/168 +f 1285/1968 3862/640 1284/1792 +f 1647/1964 1825/1671 1649/1444 +f 2056/1947 2067/1960 2065/1969 +f 319/1861 1953/1970 1952/1971 +f 667/777 1913/1972 1914/1973 +f 2070/1974 625/1940 2048/1975 +f 2052/1961 2055/1945 2063/1957 +f 2038/1926 1655/1588 1458/1976 +f 1606/1952 2073/1977 2072/1978 +f 1235/1979 1270/1980 1236/1256 +f 2072/1978 2075/1753 2074/1981 +f 2059/1953 2072/1978 2062/1958 +f 2074/1981 2077/1982 2076/1983 +f 1601/1400 2078/1984 2073/1977 +f 2049/1943 1643/1421 1645/1985 +f 1000/1750 1001/1986 994/1796 +f 1089/1540 684/369 689/368 +f 1931/1987 1306/1364 1930/1988 +f 1462/1989 1021/533 1022/532 +f 1932/1990 1934/1991 1935/1992 +f 875/1035 1846/1894 998/1033 +f 1936/1993 1937/1994 1938/1995 +f 44/140 46/32 92/139 +f 1534/1784 1956/1911 120/1816 +f 1541/1162 317/183 391/230 +f 2081/1996 2084/1997 2083/1998 +f 146/557 1959/1999 300/2000 +f 1360/2001 1942/2002 1361/1440 +f 985/1023 1032/1066 1033/1065 +f 1995/1863 1892/2003 2043/2004 +f 2079/2005 2082/2006 2081/1996 +f 2095/2007 2076/1983 2077/1982 +f 2081/1996 2078/1984 2079/2005 +f 2055/1945 2054/2008 2053/2009 +f 501/456 2095/2007 500/304 +f 2097/2010 2099/2011 2098/2012 +f 2098/2012 2094/1752 2083/1998 +f 2140/2013 3893/2014 3866/2015 +f 2141/2016 3894/2017 3893/2014 +f 2051/1942 2048/1975 2049/1943 +f 592/352 2101/2018 1576/2019 +f 2071/2020 2076/1983 2096/2021 +f 2102/2022 2098/2012 2101/2018 +f 593/2023 2102/2022 592/352 +f 1019/529 746/1283 747/799 +f 1950/2024 12/2025 1327/1729 +f 725/396 2096/2021 501/456 +f 1385/791 2104/2026 1379/1551 +f 1379/1551 2106/1763 1384/1550 +f 2107/2027 2100/630 2097/2010 +f 3908/2028 502/303 3896/2029 +f 3896/2029 499/305 3901/1764 +f 940/2030 2109/2031 944/1393 +f 928/2032 1170/595 973/506 +f 3868/2033 684/369 3899/1394 +f 1962/2034 1964/2035 1961/2036 +f 3878/1937 573/345 3897/1719 +f 2039/1918 1968/1822 1969/1821 +f 1389/2037 2110/2038 2109/2031 +f 1398/747 2113/2039 1397/746 +f 3757/2040 3753/2041 3743/2042 +f 3911/2043 2107/2027 2103/2044 +f 949/553 924/1584 1115/1585 +f 260/150 324/189 326/1321 +f 1390/724 2113/2039 2110/2038 +f 2070/1974 2064/1962 2071/2020 +f 3867/2045 2103/2044 593/2023 +f 2105/2046 1421/792 2114/2047 +f 2114/2047 1423/797 2115/2048 +f 3892/2049 2115/2048 3903/2050 +f 3908/2028 2114/2047 3892/2049 +f 554/1456 1902/1779 553/1454 +f 972/505 967/497 966/495 +f 1765/1598 1573/2051 1574/1692 +f 1984/2052 1986/2053 1983/2054 +f 2053/2009 2069/2055 2057/1946 +f 3573/2056 724/2057 714/376 +f 1625/1485 1674/1483 1673/1477 +f 3881/2058 2119/2059 2122/2060 +f 3903/2050 2123/2061 3904/2062 +f 56/161 1906/1749 55/38 +f 626/1496 3879/2063 629/1494 +f 2001/2064 2003/2065 2004/2066 +f 3912/2067 2127/2068 3907/2069 +f 2006/2070 2008/2071 2009/2072 +f 2127/2068 1412/764 2128/2073 +f 2014/2074 2016/2075 2017/2076 +f 2018/2077 2020/2078 2021/2079 +f 1495/997 1947/2080 197/1314 +f 1379/1551 1386/786 1385/791 +f 3814/1410 1680/1844 1618/1904 +f 2029/2081 1884/2082 2028/2083 +f 3902/2084 2120/2085 3887/2086 +f 629/1494 3914/631 3872/2087 +f 682/1542 943/2088 1089/1540 +f 724/2057 2121/2089 716/377 +f 1757/991 3821/1668 3845/2090 +f 765/2091 3844/2092 3809/413 +f 3851/2093 715/378 3852/2094 +f 706/1340 3870/2095 3872/2087 +f 981/508 987/1613 980/1401 +f 1457/871 1656/1491 1880/2096 +f 2017/2076 2032/2097 2033/2098 +f 1458/1976 1868/1766 1457/871 +f 1776/1630 1792/2099 1793/1620 +f 1959/1999 1946/2100 1958/2101 +f 3663/805 3725/2102 3661/2103 +f 3913/2104 2122/2060 3850/2105 +f 1093/811 689/368 1088/1379 +f 966/495 968/502 972/505 +f 3852/2094 3881/2058 3913/2104 +f 2172/2106 2170/2107 2171/2108 +f 670/613 2137/2109 669/1450 +f 3761/2110 1903/2111 233/629 +f 1429/823 28/2112 1449/2113 +f 964/1798 978/509 971/501 +f 3841/2114 1413/779 2127/2068 +f 1473/2115 669/1450 2137/2109 +f 3834/680 1359/706 1358/683 +f 2138/2116 1870/1713 2137/2109 +f 1872/2117 2137/2109 1870/1713 +f 715/378 3888/2118 3881/2058 +f 1881/1936 1992/1859 1880/2096 +f 3843/1428 739/482 740/1934 +f 1837/2119 3513/1695 3514/2120 +f 253/146 59/62 85/64 +f 1808/1645 3840/1019 1807/1018 +f 1647/1964 3818/2121 1609/1963 +f 3865/2122 671/611 668/1449 +f 2112/1890 3883/2123 2111/2124 +f 1405/844 2044/1720 1438/830 +f 3829/691 1364/2125 1366/2126 +f 273/1163 391/230 249/165 +f 576/392 3520/2127 3555/2128 +f 3875/2129 668/1449 2031/1913 +f 1430/822 2140/2013 2139/2130 +f 2136/2131 3860/849 28/2112 +f 1430/822 18/12 19/11 +f 3893/2014 570/347 695/2132 +f 1442/835 2046/2133 2141/2016 +f 1431/1886 2141/2016 2140/2013 +f 3869/2134 3850/2105 2136/2131 +f 3808/412 1817/2135 766/2136 +f 3866/2015 695/2132 671/611 +f 2142/2137 798/1873 2138/2116 +f 571/346 2142/2137 694/612 +f 3819/1411 1618/1904 1611/1903 +f 2143/2138 2025/1882 2142/2137 +f 694/612 2138/2116 670/613 +f 681/1418 2144/2139 572/1416 +f 2144/2139 2022/1878 2143/2138 +f 328/394 457/635 290/393 +f 1572/1541 2011/1876 2144/2139 +f 630/395 3903/2050 624/360 +f 2085/2140 2087/2141 2088/2142 +f 2090/2143 2092/2144 2089/2145 +f 572/1416 2143/2138 571/346 +f 1875/2146 2153/2147 2149/2148 +f 1873/1712 2149/2148 2154/2149 +f 2103/2044 2097/2010 2102/2022 +f 2157/2150 2155/2151 2156/2152 +f 2128/2073 1414/766 2120/2085 +f 3874/2153 589/1849 582/1461 +f 3872/2087 628/2154 629/1494 +f 2149/2148 2158/2155 2159/2156 +f 2159/2156 3905/2157 3890/2158 +f 698/388 3902/2084 705/2159 +f 3823/1657 3827/2160 1817/2135 +f 2158/2155 3874/2153 3905/2157 +f 1438/830 2046/2133 1439/828 +f 2035/2161 2160/2162 2161/2163 +f 2161/2163 3889/2164 3886/2165 +f 3871/2166 605/1734 589/1849 +f 3873/2167 18/12 3875/2129 +f 370/434 61/44 451/320 +f 649/1935 413/240 648/366 +f 2153/2147 2156/2152 2158/2155 +f 1428/2168 1449/2113 1450/2169 +f 1490/1027 73/55 1488/989 +f 3890/2158 710/1383 633/2170 +f 2155/2151 3891/2171 3871/2166 +f 1456/872 1880/2096 1992/1859 +f 2125/2172 2087/2141 2124/2173 +f 2160/2162 3890/2158 3889/2164 +f 300/2000 505/634 323/261 +f 1064/592 1046/538 1065/2174 +f 2043/2004 1891/2175 2155/2151 +f 2129/2176 2004/2066 2003/2065 +f 2131/2177 2133/2178 2134/2179 +f 3848/850 1450/2169 1449/2113 +f 1998/1867 804/1717 805/443 +f 3879/2063 3864/2180 3914/631 +f 474/418 490/1457 770/1901 +f 1164/582 1182/895 1177/589 +f 707/1339 3909/2181 3870/2095 +f 708/2182 2162/2183 707/1339 +f 2162/2183 3886/2165 3909/2181 +f 548/1429 1905/1748 458/281 +f 1145/562 1114/570 1171/587 +f 705/2159 1650/2184 704/389 +f 2043/2004 1977/1836 1995/1863 +f 737/1808 554/1456 559/1455 +f 1034/605 1191/597 1023/593 +f 458/281 1907/1832 275/243 +f 195/126 232/1743 233/629 +f 2040/2185 1969/1821 1970/2186 +f 2145/2187 2147/2188 2148/2189 +f 2054/2008 2049/1943 1645/1985 +f 3832/1443 1639/1435 3818/2121 +f 9/2190 11/2191 10/2192 +f 252/145 506/1192 258/147 +f 472/300 479/364 471/1426 +f 1865/948 3791/1174 3503/2193 +f 900/484 910/712 907/525 +f 1941/2194 2151/2195 1942/2002 +f 1921/2196 1927/1773 1919/1756 +f 2163/2197 1898/1738 2116/2198 +f 271/1003 153/105 178/2199 +f 1904/1746 12/2025 9/2190 +f 282/286 264/1147 460/283 +f 1872/2117 2154/2149 2035/2161 +f 1326/2200 1897/1737 2163/2197 +f 628/2154 693/381 627/1495 +f 1724/1554 1719/1536 1723/1546 +f 1549/1222 436/267 172/117 +f 1582/1382 1730/1545 1729/1547 +f 1921/2196 1929/1777 1928/2201 +f 3563/2202 10/2192 11/2191 +f 2152/2203 1950/2024 1949/2204 +f 475/416 774/425 771/417 +f 769/419 2171/2108 473/420 +f 2171/2108 476/2205 473/420 +f 476/2205 772/422 475/416 +f 1791/2206 464/1311 1760/2207 +f 2166/2208 2168/2209 2169/2210 +f 595/354 372/1217 98/72 +f 483/293 1641/1655 482/291 +f 762/407 2173/853 470/301 +f 771/417 484/292 492/415 +f 486/296 1809/1660 483/293 +f 297/1291 1517/1185 296/1187 +f 763/2211 2174/1900 762/407 +f 776/1419 759/403 489/294 +f 184/374 2176/2212 2000/1868 +f 412/241 303/1201 414/239 +f 2005/2213 185/166 224/277 +f 3811/1488 1682/1806 3846/1017 +f 2164/2214 756/426 764/409 +f 1472/2215 2161/2163 2162/2183 +f 950/480 964/1798 965/494 +f 2177/2216 2179/2217 2180/2218 +f 227/1218 1510/1050 333/1261 +f 764/409 467/410 2164/2214 +f 2182/2219 2184/2220 2181/2221 +f 2186/2222 2018/2077 2185/2223 +f 1985/2224 2181/2221 1986/2053 +f 214/136 729/1874 1531/1119 +f 1545/1200 1546/2225 1569/1819 +f 865/675 842/1433 866/682 +f 849/1431 1357/677 1362/2226 +f 452/274 1558/1286 454/275 +f 559/1455 3188/2227 737/1808 +f 16/2228 737/1808 3188/2227 +f 560/2229 498/2230 374/2231 +f 3193/2232 3195/2233 3194/2234 +f 3200/2235 3198/2236 3199/2237 +f 3211/2238 3213/2239 3212/2240 +f 3215/2241 3217/2242 3216/2243 +f 2187/2244 2002/2245 2001/2064 +f 2169/2210 2189/2246 2190/2247 +f 2192/2248 2085/2140 2191/2249 +f 2193/2250 2195/2251 2196/2252 +f 2198/2253 2200/2254 2197/2255 +f 1998/1867 1979/1853 1975/1835 +f 3664/2256 3668/2257 3671/2258 +f 2116/2198 1903/2111 1902/1779 +f 1859/1909 2009/2072 1150/1907 +f 3904/2062 2126/2259 3906/2260 +f 1135/2261 1087/569 1130/559 +f 2201/2262 2015/2263 2014/2074 +f 1149/2264 110/152 1858/1908 +f 3221/2265 3218/2266 3215/2241 +f 2203/2267 2205/2268 2206/2269 +f 579/344 1895/2270 1019/529 +f 1755/2271 2208/2272 1756/1802 +f 3891/2171 1893/1732 605/1734 +f 1211/1241 2150/2273 1727/1790 +f 868/925 2210/2274 2209/2275 +f 944/1393 930/754 940/2030 +f 2130/2276 2202/2277 2201/2262 +f 233/629 1898/1738 195/126 +f 665/776 750/843 666/2278 +f 1167/1207 999/1034 618/2279 +f 2211/2280 2213/2281 2214/2282 +f 2215/2283 2217/2284 2218/2285 +f 3867/2045 590/351 3868/2033 +f 2220/2286 220/306 2219/2287 +f 823/447 948/477 1094/552 +f 3225/2288 3227/2289 3226/2290 +f 266/160 455/469 265/158 +f 3882/1762 2107/2027 3883/2123 +f 2221/2291 3519/481 3517/483 +f 2223/2292 2225/2293 2226/2294 +f 2207/2295 2124/2173 2208/2272 +f 673/2296 3869/2134 3864/2180 +f 817/444 996/516 815/451 +f 2227/2297 2146/2298 2145/2187 +f 1947/2080 646/365 749/1543 +f 3900/633 17/10 3873/2167 +f 2229/2299 1638/1705 1381/1464 +f 1679/1486 1618/1904 1680/1844 +f 1637/1704 2232/2300 2230/2301 +f 2233/2302 1617/1685 1616/1684 +f 2231/2303 2236/2304 2237/2305 +f 1381/1464 2238/2306 1382/2307 +f 3586/2308 2242/2309 3587/2310 +f 1630/2311 2233/2302 1631/1698 +f 1681/1804 472/300 469/302 +f 1408/2312 2243/2313 2244/2314 +f 3236/2315 3238/2316 3237/2317 +f 3232/2318 3240/2319 3235/2320 +f 1637/1704 2238/2306 1638/1705 +f 3237/2317 3233/2321 3234/2322 +f 1409/1480 1634/1701 2245/2323 +f 1380/1466 2247/2324 2246/2325 +f 1628/1699 2245/2323 1621/2326 +f 2249/2327 1622/1689 1633/1700 +f 1632/1702 1408/2312 2244/2314 +f 2248/2328 1631/1698 2250/2329 +f 2235/2330 1637/1704 1636/2331 +f 3245/2332 3199/2237 3198/2236 +f 631/1316 2071/2020 725/396 +f 515/556 52/34 49/36 +f 782/858 2251/2333 1251/1270 +f 2253/2334 2255/2335 2252/2336 +f 397/231 459/284 784/428 +f 1456/872 2040/2185 1455/870 +f 2040/2185 2036/1927 1455/870 +f 1401/2337 2111/2124 1398/747 +f 2041/1925 1970/2186 2026/1923 +f 1535/1782 414/239 1965/1818 +f 1586/1389 2080/2338 2079/2005 +f 2256/2339 1132/1175 1886/2340 +f 1700/1002 3918/1520 3919/1515 +f 3815/1967 1686/1158 3817/886 +f 1263/1298 1886/2340 1096/1128 +f 598/2341 844/1245 599/727 +f 2257/2342 2259/2343 2260/2344 +f 2258/2345 2262/2346 2259/2343 +f 2259/2343 2263/2347 2216/2348 +f 2260/2344 2216/2348 2215/2283 +f 1967/1824 1970/2186 1969/1821 +f 2212/2349 1963/2350 2213/2281 +f 922/2351 2218/2285 2217/2284 +f 1312/1369 1101/1131 2264/2352 +f 1307/2353 1286/2354 789/864 +f 1548/2355 1546/2225 1972/1828 +f 2266/2356 2268/2357 2265/2358 +f 2265/2358 2270/2359 2269/2360 +f 2272/2361 2269/2360 2271/2362 +f 2273/2363 2265/2358 2272/2361 +f 2275/2364 2277/2365 2274/2366 +f 2278/2367 2277/2365 2279/2368 +f 2284/2369 3617/2370 3639/2371 +f 2287/2372 2286/2373 2281/2374 +f 2288/2375 2283/2376 2287/2372 +f 2290/2377 3639/2371 3620/2378 +f 2291/2379 2293/2380 2294/2381 +f 2295/2382 2294/2381 2279/2368 +f 2296/2383 2291/2379 2295/2382 +f 2297/2384 2292/2385 2291/2379 +f 2294/2381 2299/2386 2300/2387 +f 2279/2368 2300/2387 2278/2367 +f 2298/2388 2302/2389 2292/2385 +f 2292/2385 2303/2390 2293/2380 +f 2293/2380 2304/2391 2299/2386 +f 2304/2391 2306/2392 2299/2386 +f 2306/2392 2300/2387 2299/2386 +f 2308/2393 2310/2394 2307/2395 +f 2302/2389 2310/2394 2303/2390 +f 2301/2396 2307/2395 2302/2389 +f 2312/2397 2307/2395 2311/2398 +f 2313/2399 2315/2400 2316/2401 +f 2318/2402 2320/2403 2317/2404 +f 3748/2405 2322/2406 2323/2407 +f 2324/2408 2273/2363 2326/2409 +f 2313/2399 2326/2409 2327/2410 +f 2328/2411 2313/2399 2316/2401 +f 2329/2412 2324/2408 2328/2411 +f 2331/2413 2333/2414 2330/2415 +f 2330/2415 2267/2416 2266/2356 +f 2325/2417 2266/2356 2273/2363 +f 2329/2412 2330/2415 2325/2417 +f 2333/2414 2335/2418 2267/2416 +f 2336/2419 2333/2414 2332/2420 +f 2338/2421 2332/2420 2337/2422 +f 2331/2413 2337/2422 2332/2420 +f 2329/2412 2339/2423 2331/2413 +f 2328/2411 2340/2424 2329/2412 +f 2316/2401 2341/2425 2328/2411 +f 2342/2426 2315/2400 2343/2427 +f 2344/2428 2346/2429 2347/2430 +f 2341/2425 2347/2430 2340/2424 +f 2342/2426 2344/2428 2341/2425 +f 2348/2431 2345/2432 2344/2428 +f 2350/2433 2352/2434 2353/2435 +f 2339/2423 2353/2435 2337/2422 +f 2340/2424 2350/2433 2339/2423 +f 2347/2430 2351/2436 2350/2433 +f 2337/2422 2354/2437 2338/2421 +f 2353/2435 2355/2438 2354/2437 +f 2356/2439 2276/2440 2357/2441 +f 2356/2439 2358/2442 2359/2443 +f 2356/2439 2296/2383 2295/2382 +f 2279/2368 2356/2439 2295/2382 +f 2360/2444 2270/2359 2268/2357 +f 2267/2416 2360/2444 2268/2357 +f 2362/2445 2364/2446 2365/2447 +f 2362/2445 2361/2448 2358/2442 +f 2366/2449 2358/2442 2357/2441 +f 2366/2449 2363/2450 2362/2445 +f 2357/2441 2368/2451 2366/2449 +f 2366/2449 2369/2452 2367/2453 +f 2276/2440 2370/2454 2368/2451 +f 2368/2451 2282/2455 2369/2452 +f 2372/2456 2374/2457 2371/2458 +f 2375/2459 2377/2460 2007/2461 +f 2378/2462 2380/2463 2381/2464 +f 2382/2465 2381/2464 2383/2466 +f 2384/2467 2378/2462 2382/2465 +f 2385/2468 2379/2469 2378/2462 +f 2388/2470 2390/2471 2387/2472 +f 2387/2472 2392/2473 2391/2474 +f 2393/2475 2387/2472 2391/2474 +f 2394/2476 2388/2470 2387/2472 +f 2186/2222 2402/2477 2400/2478 +f 2403/2479 2327/2410 2326/2409 +f 2272/2361 2326/2409 2273/2363 +f 2271/2362 2403/2479 2272/2361 +f 2405/2480 2404/2481 2403/2479 +f 2404/2481 2408/2482 2327/2410 +f 2409/2483 2411/2484 2412/2485 +f 2409/2483 2271/2362 2269/2360 +f 2413/2486 2269/2360 2270/2359 +f 2413/2486 2410/2487 2409/2483 +f 2417/2488 2416/2489 2418/2490 +f 2417/2488 2406/2491 2405/2480 +f 2412/2485 2405/2480 2271/2362 +f 2411/2484 2417/2488 2412/2485 +f 2406/2491 2407/2492 2404/2481 +f 3569/1337 632/2493 713/1335 +f 2420/2494 2397/2495 2421/2496 +f 2420/2494 2423/2497 2422/2498 +f 2424/2499 2422/2498 2419/2500 +f 2424/2499 2398/2501 2420/2494 +f 2425/2502 2392/2473 2426/2503 +f 2421/2496 2426/2503 2423/2497 +f 2397/2495 2425/2502 2421/2496 +f 2396/2504 2391/2474 2425/2502 +f 2427/2505 2315/2400 2314/2506 +f 2427/2505 2408/2482 2428/2507 +f 2430/2508 2428/2507 2429/2509 +f 2343/2427 2427/2505 2430/2508 +f 2432/2510 2430/2508 2431/2511 +f 2431/2511 2429/2509 2433/2512 +f 2435/2513 2433/2512 2434/2514 +f 2436/2515 2431/2511 2435/2513 +f 2428/2507 2407/2492 2437/2516 +f 2422/2498 2407/2492 2419/2500 +f 2438/2517 2422/2498 2423/2497 +f 2429/2509 2437/2516 2438/2517 +f 2440/2518 2433/2512 2439/2519 +f 2441/2520 2433/2512 2429/2509 +f 2442/2521 2439/2519 2441/2520 +f 2443/2522 2440/2518 2439/2519 +f 2445/2523 2444/2524 2443/2522 +f 2447/2525 2443/2522 2442/2521 +f 2383/2466 2445/2523 2447/2525 +f 2381/2464 2446/2526 2445/2523 +f 2448/2527 2423/2497 2426/2503 +f 2449/2528 2426/2503 2392/2473 +f 2441/2520 2449/2528 2442/2521 +f 2441/2520 2438/2517 2448/2527 +f 2449/2528 2390/2471 2450/2529 +f 2450/2529 2389/2530 2451/2531 +f 2447/2525 2451/2531 2383/2466 +f 2442/2521 2450/2529 2447/2525 +f 2451/2531 2453/2532 2452/2533 +f 2452/2533 2455/2534 2454/2535 +f 2382/2465 2454/2535 2384/2467 +f 2383/2466 2452/2533 2382/2465 +f 2456/2536 2373/2537 2372/2456 +f 2459/2538 2461/2539 2458/2540 +f 2463/2541 2465/2542 2462/2543 +f 2462/2543 2285/2544 2284/2369 +f 2466/2545 2284/2369 2290/2377 +f 2467/2546 2462/2543 2466/2545 +f 2461/2539 3703/2547 3701/2548 +f 2468/2549 2470/2550 2471/2551 +f 2472/2552 2471/2551 2473/2553 +f 2474/2554 3701/2548 3688/2555 +f 2475/2556 2477/2557 2478/2558 +f 2458/2540 2453/2532 2479/2559 +f 2479/2559 2389/2530 2388/2470 +f 2395/2560 2479/2559 2388/2470 +f 2480/2561 2458/2540 2479/2559 +f 2481/2562 2483/2563 2478/2558 +f 2484/2564 2485/2565 221/2566 +f 2471/2551 2464/2567 2463/2541 +f 2473/2553 2463/2541 2467/2546 +f 2487/2568 2466/2545 2486/2569 +f 2486/2569 2290/2377 2488/2570 +f 2490/2571 2488/2570 2489/2572 +f 2491/2573 2486/2569 2490/2571 +f 2492/2574 2467/2546 2487/2568 +f 2493/2575 2487/2568 2491/2573 +f 2494/2576 2492/2574 2493/2575 +f 2495/2577 2473/2553 2492/2574 +f 3690/2578 2474/2554 3688/2555 +f 3691/2579 2496/2580 3690/2578 +f 2498/2581 2493/2575 2499/2582 +f 2500/2583 2499/2582 2501/2584 +f 3700/2585 2498/2581 2500/2583 +f 3687/2586 2494/2576 2498/2581 +f 2306/2392 2278/2367 2300/2387 +f 2305/2587 2504/2588 2306/2392 +f 2504/2588 2507/2589 2506/2590 +f 2506/2590 2288/2375 2508/2591 +f 2274/2366 2508/2591 2275/2364 +f 2278/2367 2506/2590 2274/2366 +f 3633/2592 2489/2572 2488/2570 +f 3620/2378 2488/2570 2290/2377 +f 2507/2589 2289/2593 2288/2375 +f 2505/2594 2509/2595 2507/2589 +f 2508/2591 2287/2372 2511/2596 +f 2511/2596 2281/2374 2280/2597 +f 2370/2454 2280/2597 2282/2455 +f 2275/2364 2511/2596 2370/2454 +f 2512/2598 2489/2572 2513/2599 +f 2514/2600 2513/2599 2515/2601 +f 2517/2602 2514/2600 2516/2603 +f 2517/2602 2490/2571 2512/2598 +f 2499/2582 2491/2573 2517/2602 +f 2499/2582 2516/2603 2501/2584 +f 2510/2604 2519/2605 2518/2606 +f 2518/2606 2521/2607 2520/2608 +f 3616/2609 2520/2608 3621/2610 +f 3632/2611 2518/2606 3616/2609 +f 2522/2612 2303/2390 2310/2394 +f 2523/2613 2310/2394 2309/2614 +f 2305/2587 2522/2612 2524/2615 +f 2524/2615 2523/2613 2525/2616 +f 2519/2605 2525/2616 2521/2607 +f 2505/2594 2524/2615 2519/2605 +f 2308/2393 2527/2617 2526/2618 +f 2526/2618 2529/2619 2528/2620 +f 2531/2621 2528/2620 2530/2622 +f 2309/2614 2526/2618 2531/2621 +f 1214/1243 2151/2195 1211/1241 +f 2419/2500 2418/2490 2424/2499 +f 2533/2623 2349/2624 2317/2404 +f 2534/2625 2317/2404 2320/2403 +f 2535/2626 2533/2623 2534/2625 +f 2536/2627 2345/2432 2533/2623 +f 2537/2628 2346/2429 2536/2627 +f 2538/2629 2536/2627 2535/2626 +f 2539/2630 2537/2628 2538/2629 +f 2540/2631 2351/2436 2537/2628 +f 2312/2397 2541/2632 2527/2617 +f 2527/2617 2542/2633 2529/2619 +f 2543/2634 2434/2514 2544/2635 +f 2545/2636 2544/2635 2546/2637 +f 2319/2638 2543/2634 2545/2636 +f 2318/2402 2435/2513 2543/2634 +f 2514/2600 2548/2639 2547/2640 +f 2547/2640 2550/2641 2549/2642 +f 2552/2643 2549/2642 2551/2644 +f 2552/2643 2514/2600 2547/2640 +f 2520/2608 2554/2645 2553/2646 +f 2553/2646 2556/2647 2555/2648 +f 3590/2649 2555/2648 3593/2650 +f 3621/2610 2553/2646 3590/2649 +f 2523/2613 2531/2621 2557/2651 +f 2557/2651 2530/2622 2558/2652 +f 2525/2616 2557/2651 2559/2653 +f 2559/2653 2558/2652 2560/2654 +f 2554/2645 2560/2654 2556/2647 +f 2521/2607 2559/2653 2554/2645 +f 2561/2655 2007/2461 2006/2070 +f 2376/2656 2225/2293 2377/2460 +f 2564/2657 2566/2658 2563/2659 +f 2563/2659 338/511 2567/2660 +f 2569/2661 2567/2660 2568/2662 +f 2570/2663 2563/2659 2569/2661 +f 2572/2664 2574/2665 2571/2666 +f 2571/2666 2576/2667 2575/2668 +f 2544/2635 2575/2668 2546/2637 +f 2544/2635 2572/2664 2571/2666 +f 2578/2669 2569/2661 2577/2670 +f 2579/2671 2569/2661 2568/2662 +f 2574/2665 2579/2671 2576/2667 +f 2573/2672 2577/2670 2574/2665 +f 2581/2673 2583/2674 2580/2675 +f 2584/2676 2583/2674 2585/2677 +f 3697/2678 2580/2675 2584/2676 +f 3676/2679 2580/2675 3675/2680 +f 2582/2681 2552/2643 2583/2674 +f 2583/2674 2551/2644 2585/2677 +f 2500/2583 2589/2682 2588/2683 +f 2590/2684 2589/2682 2591/2685 +f 3693/2686 2588/2683 2590/2684 +f 3695/2687 2500/2583 2588/2683 +f 2501/2584 2591/2685 2589/2682 +f 2592/2688 2594/2689 2595/2690 +f 2386/2691 2503/2692 2502/2693 +f 2596/2694 2386/2691 2374/2457 +f 2596/2694 2373/2537 2597/2695 +f 2599/2696 2597/2695 2598/2697 +f 2599/2696 2379/2469 2596/2694 +f 2600/2698 2380/2463 2599/2696 +f 2600/2698 2598/2697 2601/2699 +f 2603/2700 2601/2699 2602/2701 +f 2603/2700 2446/2526 2600/2698 +f 2604/2702 2602/2701 2605/2703 +f 2440/2518 2605/2703 2434/2514 +f 2444/2524 2604/2702 2440/2518 +f 2606/2704 2581/2673 2607/2705 +f 2606/2704 2590/2684 2591/2685 +f 2591/2685 2582/2681 2606/2704 +f 3676/2679 2456/2536 3689/2706 +f 3689/2706 2372/2456 3693/2686 +f 2478/2558 2608/2707 2481/2562 +f 2609/2708 2611/2709 2199/2710 +f 2564/2657 2613/2711 2612/2712 +f 2612/2712 2598/2697 2597/2695 +f 2457/2713 2597/2695 2373/2537 +f 2565/2714 2612/2712 2457/2713 +f 2614/2715 2570/2663 2578/2669 +f 2615/2716 2578/2669 2573/2672 +f 2602/2701 2614/2715 2615/2716 +f 2601/2699 2613/2711 2614/2715 +f 2572/2664 2605/2703 2616/2717 +f 2616/2717 2602/2701 2615/2716 +f 2573/2672 2616/2717 2615/2716 +f 1459/880 1045/541 1021/533 +f 1265/1299 1079/1115 1082/1114 +f 1612/1950 1609/1963 1611/1903 +f 2067/1960 2069/2055 2068/2718 +f 1135/2261 1136/564 1148/563 +f 1938/1995 2618/2719 2617/2720 +f 95/387 458/281 128/89 +f 2058/1951 1608/1405 1612/1950 +f 1454/873 1445/851 1367/703 +f 655/769 1048/1079 2619/2721 +f 722/2722 2621/2723 723/817 +f 1131/1176 1033/1065 1134/1177 +f 562/349 366/208 1497/1778 +f 1723/1546 1725/1571 1591/1548 +f 1786/2724 1122/1152 1787/1826 +f 267/159 323/261 266/160 +f 1325/644 1654/1586 2038/1926 +f 1468/896 1435/832 1479/962 +f 658/772 529/2725 528/695 +f 8/4 247/339 7/2726 +f 382/1281 1559/1330 3/223 +f 788/866 1307/2353 789/864 +f 940/2030 929/755 928/2032 +f 911/951 2623/2727 912/2728 +f 2621/2723 655/769 654/768 +f 2624/2729 2625/2730 2626/2731 +f 998/1033 27/26 26/2732 +f 635/753 2626/2731 2625/2730 +f 2627/2733 644/758 1840/1885 +f 2628/2734 1840/1885 2629/2735 +f 1200/2736 2627/2733 2628/2734 +f 1199/1231 641/756 2627/2733 +f 1520/1358 49/36 477/1076 +f 1452/874 1349/668 1451/1244 +f 2630/2737 1859/1909 1858/1908 +f 644/758 1335/1537 1823/1884 +f 1197/1229 717/814 2632/2738 +f 2632/2738 719/815 1166/1205 +f 2628/2734 1166/1205 1200/2736 +f 2629/2735 2632/2738 2628/2734 +f 1762/2739 1050/2740 821/889 +f 1823/1884 873/929 1839/2741 +f 2634/2742 794/1124 652/767 +f 1028/1064 1027/1061 1161/1240 +f 1042/1073 2635/2743 1043/1074 +f 93/70 1901/1741 1924/1781 +f 2036/1927 1458/1976 1455/870 +f 1527/1833 1525/1325 1526/1098 +f 877/932 1274/1313 1277/1317 +f 134/91 797/430 1544/1190 +f 1328/1413 931/972 1329/1414 +f 607/736 1091/1122 1161/1240 +f 1043/1074 2637/2744 2636/2745 +f 2636/2745 2639/2746 2638/2747 +f 1275/1315 2638/2747 1276/1376 +f 1275/1315 1043/1074 2636/2745 +f 1272/1308 2626/2731 2640/2748 +f 2640/2748 638/752 637/2749 +f 1110/1143 2640/2748 637/2749 +f 536/2750 2640/2748 2641/2751 +f 1472/2215 1474/1914 1473/2115 +f 2042/1931 1951/1794 649/1935 +f 1058/2752 1105/2753 1059/1086 +f 2642/2754 2643/2755 2644/2756 +f 1192/1922 2644/2756 1193/1224 +f 912/2728 2642/2754 1192/1922 +f 3898/2757 3526/1490 3786/1726 +f 1049/1078 2620/2758 2621/2723 +f 616/740 1885/1929 613/741 +f 1331/2759 666/2278 750/843 +f 990/2760 1198/1228 525/1846 +f 1911/1759 321/1830 322/1860 +f 1298/1356 1205/1332 2645/2761 +f 76/53 1504/1029 75/2762 +f 1502/2763 421/254 422/256 +f 610/735 658/772 691/798 +f 1146/1188 659/773 938/977 +f 2646/2764 914/953 936/975 +f 1280/1324 1238/2765 1279/1322 +f 975/2766 2647/2767 2648/2768 +f 2647/2767 975/2766 974/1012 +f 1453/906 1452/874 1451/1244 +f 1514/1108 158/107 151/101 +f 835/900 1843/2769 1842/1891 +f 1271/1307 1173/2770 1289/1345 +f 2649/2771 813/883 2650/2772 +f 1893/1732 1826/2773 1894/1733 +f 1050/2740 1937/1994 1051/1080 +f 2651/2774 814/884 813/883 +f 1006/1042 1281/1326 977/1014 +f 2653/2775 2654/2776 2655/2777 +f 1001/1986 1003/1611 1002/519 +f 585/723 679/789 558/710 +f 2624/2729 621/1309 620/743 +f 555/708 2028/2083 556/1166 +f 1207/1238 1197/1229 2629/2735 +f 1626/1487 478/362 472/300 +f 426/1056 419/252 1567/1437 +f 1030/1225 1773/1817 1031/1063 +f 3898/2757 634/1451 3880/1728 +f 534/1141 138/1126 262/335 +f 1125/1812 1042/1073 1041/1155 +f 333/1261 88/65 227/1218 +f 569/437 561/348 730/435 +f 1489/1538 871/2778 870/927 +f 1857/2779 620/743 891/941 +f 1673/1477 3770/1647 3763/2780 +f 622/744 864/922 1308/1365 +f 2647/2767 974/1012 2657/2781 +f 619/745 891/941 620/743 +f 261/1341 545/331 262/335 +f 393/1288 1556/1263 404/1265 +f 2658/2782 2647/2767 2657/2781 +f 1075/1105 2657/2781 917/1106 +f 1081/2783 2658/2782 1075/1105 +f 1080/1113 2659/2784 2658/2782 +f 2644/2756 1141/1182 2660/2785 +f 2660/2785 1140/2786 1336/1425 +f 642/2787 1336/1425 643/757 +f 1193/1224 2660/2785 642/2787 +f 679/789 555/708 558/710 +f 1503/2788 1500/1009 1502/2763 +f 1318/1374 1101/1131 1100/2789 +f 2661/2790 1139/1181 1142/1183 +f 2661/2790 2663/2791 2662/2792 +f 2662/2792 2659/2784 2661/2790 +f 2662/2792 2622/2793 2664/2794 +f 2664/2794 911/951 2646/2764 +f 2648/2768 2646/2764 1146/1188 +f 2648/2768 2662/2792 2664/2794 +f 2083/1998 2093/1754 2081/1996 +f 1442/835 1437/831 1439/828 +f 519/475 330/1095 51/432 +f 1885/1929 1209/1239 613/741 +f 799/1716 805/443 804/1717 +f 1946/2100 1952/1971 1953/1970 +f 1302/2795 1253/1272 1252/2796 +f 566/1320 224/277 533/322 +f 1389/2037 928/2032 1388/720 +f 683/2797 681/1418 680/1417 +f 836/2798 1842/1891 2665/2799 +f 2665/2799 1845/1893 1303/1362 +f 2666/2800 1303/1362 1302/2795 +f 24/27 2665/2799 2666/2800 +f 831/899 2650/2772 832/897 +f 1845/1893 1304/1363 1303/1362 +f 2010/2801 2012/2802 2011/1876 +f 1051/1080 2667/2803 2668/2804 +f 1522/1089 1511/1054 1523/1090 +f 531/1116 441/2805 522/1148 +f 782/858 1251/1270 1250/1271 +f 1250/1271 2625/2730 2669/2806 +f 2669/2806 1857/2779 1856/1905 +f 783/859 1856/1905 1037/1069 +f 782/858 2669/2806 783/859 +f 720/818 2671/2807 721/816 +f 1055/1087 1930/1988 1056/1083 +f 1333/2808 1251/1270 2251/2333 +f 619/745 1308/1365 1307/2353 +f 652/767 2619/2721 2634/2742 +f 521/1058 1539/1146 1540/1164 +f 1838/1883 537/701 2672/2809 +f 2672/2809 536/2750 2641/2751 +f 1113/1145 2641/2751 1110/1143 +f 1113/1145 1838/1883 2672/2809 +f 1016/548 1351/879 1045/541 +f 3897/1719 570/347 3894/2017 +f 226/1051 104/2810 101/74 +f 3564/2811 3827/2160 1818/1771 +f 1036/1068 1884/2082 1883/1928 +f 2674/2812 1304/1363 2673/2813 +f 2673/2813 1844/1892 2675/2814 +f 1334/1424 2675/2814 1039/1071 +f 1333/2808 2673/2813 1334/1424 +f 2676/2815 1039/1071 2675/2814 +f 1843/2769 2675/2814 1844/1892 +f 942/1102 2676/2815 1843/2769 +f 1074/1104 702/803 2676/2815 +f 811/882 859/1412 858/917 +f 752/2816 1055/1087 753/842 +f 1200/2736 656/1206 1026/1232 +f 1120/1150 874/930 2677/2817 +f 2677/2817 877/932 1052/2818 +f 1241/1262 1052/2818 1005/1081 +f 1119/1151 2677/2817 1241/1262 +f 1560/1501 151/101 1515/1965 +f 1208/2819 611/738 523/694 +f 215/2820 736/2821 214/136 +f 1460/881 1462/1989 1461/936 +f 1208/2819 1839/2741 873/929 +f 854/916 1282/1331 1297/1355 +f 72/50 182/255 179/118 +f 544/329 1565/1354 512/309 +f 217/465 456/2822 457/635 +f 2047/1938 930/754 945/1944 +f 1204/1235 1937/1994 2633/2823 +f 913/952 686/856 685/794 +f 840/904 1113/1145 1112/1144 +f 1531/1119 730/435 561/348 +f 2678/2824 1283/1334 801/876 +f 601/966 1914/1973 602/730 +f 1537/1149 459/284 460/283 +f 1819/2825 3509/993 3527/2826 +f 991/1025 862/2827 538/702 +f 2679/2828 1073/1103 1830/1872 +f 2680/2829 983/1021 2256/2339 +f 779/855 2256/2339 1263/1298 +f 778/1333 2680/2829 779/855 +f 2678/2824 1830/1872 2680/2829 +f 180/120 245/1786 40/1305 +f 1886/2340 1131/1176 1134/1177 +f 1138/2830 1274/1313 1255/1274 +f 1841/1888 1140/2786 1139/1181 +f 75/2762 314/1287 74/54 +f 193/1161 420/251 192/250 +f 2681/2831 1099/1130 1098/1132 +f 170/179 1565/1354 169/113 +f 349/197 417/247 432/1280 +f 1121/1154 2684/2832 2683/2833 +f 547/431 409/258 302/257 +f 123/981 121/2834 120/1816 +f 2643/2755 1142/1183 1141/1182 +f 1542/1179 513/313 1543/1189 +f 899/2835 907/525 919/527 +f 1210/2836 676/790 867/924 +f 791/865 982/1167 892/940 +f 24/27 837/901 836/2798 +f 893/944 932/1300 931/972 +f 1860/2837 863/921 862/2827 +f 535/700 862/2827 861/920 +f 832/897 813/883 812/1230 +f 2650/2772 813/883 832/897 +f 1569/1819 1547/2838 1568/1783 +f 181/119 542/470 180/120 +f 660/2839 937/976 688/796 +f 2013/2840 568/436 2012/2802 +f 887/937 2631/2841 2630/2737 +f 1524/1092 361/1408 78/58 +f 520/1430 162/2842 1529/1109 +f 2653/2775 854/916 2685/2843 +f 1238/2765 2655/2777 2686/2844 +f 718/1948 527/697 530/696 +f 1492/1004 518/310 1491/994 +f 792/1871 615/1156 793/867 +f 599/727 1835/2845 596/728 +f 290/393 456/2822 175/176 +f 312/178 1552/1327 425/1057 +f 1248/2846 1179/1214 1249/1268 +f 153/105 177/1889 178/2199 +f 1240/1260 1222/1248 1221/1259 +f 1257/1351 1235/1979 1071/1255 +f 1291/1350 1083/1249 1222/1248 +f 1052/2818 1277/1317 1053/1082 +f 743/1237 2618/2719 1204/1235 +f 444/2847 430/263 443/1275 +f 23/23 1172/1209 1187/1278 +f 2687/2848 1180/2849 1179/1214 +f 102/76 409/258 134/91 +f 1283/1334 610/735 801/876 +f 373/2850 650/372 651/371 +f 1180/2849 1084/1117 1291/1350 +f 855/914 2655/2777 1237/1257 +f 1173/2770 1917/2851 1174/1210 +f 2689/2852 853/912 2688/2853 +f 2688/2853 1108/1138 1279/1322 +f 2686/2844 1279/1322 1238/2765 +f 2687/2848 2688/2853 2686/2844 +f 2691/2854 1252/2796 2690/2855 +f 2690/2855 850/913 2689/2852 +f 1248/2846 2689/2852 2687/2848 +f 1248/2846 2691/2854 2690/2855 +f 2692/2856 24/27 2666/2800 +f 2691/2854 2666/2800 1302/2795 +f 1247/1267 2692/2856 2691/2854 +f 1258/1290 617/742 2692/2856 +f 394/1226 1555/1415 393/1288 +f 2693/2857 1259/1292 1223/1344 +f 1072/1101 1262/1294 1261/1293 +f 1257/1351 1168/2858 1258/1290 +f 398/1539 1555/1415 1554/1795 +f 871/2778 1314/2859 872/928 +f 1178/1216 1249/1268 1179/1214 +f 1224/1251 1173/2770 1172/1209 +f 1010/1044 2618/2719 742/837 +f 1175/1211 1292/2860 1269/1306 +f 2654/2776 2686/2844 2655/2777 +f 2694/2861 1216/1246 1264/1910 +f 71/1955 1487/988 70/51 +f 701/802 1111/2862 1038/1070 +f 1072/1101 2649/2771 2650/2772 +f 1085/1118 22/1250 21/22 +f 895/961 1264/1910 896/943 +f 1601/1400 1605/1954 1604/1398 +f 1259/1292 1271/1307 1289/1345 +f 21/22 1220/1247 1083/1249 +f 988/1026 538/702 537/701 +f 598/2341 1336/1425 1140/2786 +f 820/1811 1762/2739 821/889 +f 1050/2740 818/890 821/889 +f 1298/1356 1761/1810 2652/2863 +f 1168/2858 618/2279 617/742 +f 858/917 1256/1279 833/898 +f 39/20 516/439 1536/1133 +f 2652/2863 819/888 814/884 +f 313/1142 315/192 314/1287 +f 2026/1923 1971/1823 2027/1896 +f 2656/2864 596/728 1835/2845 +f 814/884 818/890 811/882 +f 2696/2865 813/883 2649/2771 +f 1260/2866 2649/2771 1261/1293 +f 2693/2857 2696/2865 1260/2866 +f 2697/2867 2651/2774 2696/2865 +f 381/220 390/290 380/221 +f 3818/2121 1611/1903 1609/1963 +f 1475/935 1428/2168 1227/617 +f 1292/2860 1917/2851 1235/1979 +f 1551/1227 354/1346 1518/1186 +f 1526/1098 1523/1090 1511/1054 +f 1593/1387 1595/1403 1573/2051 +f 1232/1254 1246/1269 1249/1268 +f 2695/2868 1223/1344 22/1250 +f 2698/2869 1296/1357 2697/2867 +f 2698/2869 2693/2857 2695/2868 +f 2685/2843 2695/2868 1085/1118 +f 2685/2843 1297/1355 2698/2869 +f 562/349 271/1003 272/350 +f 2652/2863 1296/1357 1298/1356 +f 1365/762 1364/2125 1363/687 +f 1958/2101 1953/1970 1957/2870 +f 585/723 2210/2274 997/1032 +f 1561/1521 1515/1965 435/268 +f 975/2766 1146/1188 976/1013 +f 347/1266 68/49 346/384 +f 1038/1070 636/751 1068/1096 +f 2700/2871 780/857 2699/2872 +f 2699/2872 1036/1068 1883/1928 +f 2701/2873 1883/1928 1829/1870 +f 851/911 2699/2872 2701/2873 +f 2674/2812 2251/2333 2702/2874 +f 2702/2874 781/2875 2700/2871 +f 1253/1272 2700/2871 851/911 +f 1301/1361 2702/2874 1253/1272 +f 1519/1253 388/229 1550/1285 +f 1273/1312 1280/1324 2703/2876 +f 2703/2876 1107/1323 852/1137 +f 2701/2873 852/1137 851/911 +f 1829/1870 2703/2876 2701/2873 +f 91/2877 326/1321 90/66 +f 1784/1827 2656/2864 1785/1825 +f 2683/2833 2705/2878 2704/2879 +f 604/1850 3499/2880 3533/1930 +f 1669/1049 3548/2881 1662/1674 +f 2062/1958 2060/2882 2059/1953 +f 621/1309 861/920 622/744 +f 2023/1877 732/2883 2024/1881 +f 686/856 1263/1298 687/795 +f 857/919 2668/2804 1328/1413 +f 883/1843 882/1252 879/459 +f 637/2749 1111/2862 1110/1143 +f 2668/2804 893/944 1328/1413 +f 2667/2803 894/942 2668/2804 +f 1039/1071 1068/1096 1334/1424 +f 313/1142 1504/1029 1505/1036 +f 1911/1759 305/1093 453/276 +f 1936/1993 1964/2035 2212/2349 +f 2707/2884 2401/2885 2183/2886 +f 2708/2887 2183/2886 2182/2219 +f 738/1807 2116/2198 554/1456 +f 2349/2624 2432/2510 2436/2515 +f 2711/2888 2020/2078 2019/2889 +f 2400/2478 2019/2889 2186/2222 +f 2714/2890 2716/2891 2713/2892 +f 2717/2893 2719/2894 2720/2895 +f 2721/2896 2723/2897 2724/2898 +f 2726/2899 2728/2900 2725/2901 +f 2725/2901 2730/2902 2729/2903 +f 2731/2904 2729/2903 2709/2905 +f 2732/2906 2725/2901 2731/2904 +f 2734/2907 2736/2908 2733/2909 +f 2738/2910 2740/2911 2737/2912 +f 2737/2912 2741/2913 2742/2914 +f 2744/2915 2746/2916 2743/2917 +f 2745/2918 2748/2919 2747/2920 +f 2749/2921 2747/2920 2751/2922 +f 2753/2923 2751/2922 2752/2924 +f 2250/2329 2246/2325 2248/2328 +f 2754/2925 1614/1683 2755/2926 +f 3598/2927 2755/2926 3609/2928 +f 3608/2929 2747/2920 3609/2928 +f 1617/1685 2755/2926 1614/1683 +f 2242/2309 1620/1688 2757/2930 +f 2191/2249 2760/2931 2759/2932 +f 2761/2933 2760/2931 2762/2934 +f 3648/2935 2761/2933 3649/2936 +f 3623/2937 2759/2932 3648/2935 +f 3209/2938 3223/2939 3224/2940 +f 1395/1722 2236/2304 2235/2330 +f 1395/1722 1636/2331 1615/1721 +f 1616/1684 2229/2299 2233/2302 +f 1617/1685 2763/2941 2756/2942 +f 2756/2942 3628/2943 3608/2929 +f 2765/2944 2234/2945 1630/2311 +f 2766/2946 3628/2943 3622/2947 +f 3202/2948 3246/2949 3247/2950 +f 1516/1966 435/268 1515/1965 +f 2768/2951 3552/1458 3553/1460 +f 1495/997 578/342 1424/804 +f 2135/632 28/2112 17/10 +f 3526/2952 2773/2953 2774/2954 +f 1690/1646 3926/2955 3925/2956 +f 2778/2957 2780/2958 2777/2959 +f 2777/2959 2781/2960 2782/2961 +f 2784/2962 2777/2959 2783/2963 +f 3555/2128 575/380 576/392 +f 1494/996 9/2190 540/327 +f 3529/1467 2015/2263 3566/1336 +f 451/320 505/634 450/272 +f 2786/2964 2788/2965 2785/2966 +f 2779/2967 2785/2966 2780/2958 +f 2789/2968 2741/2913 2740/2911 +f 2790/2969 2752/2924 2789/2968 +f 2792/2970 2794/2971 2791/2972 +f 2793/2973 2796/2974 2794/2971 +f 2796/2974 2750/2975 2749/2921 +f 2794/2971 2749/2921 2753/2923 +f 2798/2976 1396/1471 2754/2925 +f 3591/2977 2754/2925 3598/2927 +f 2798/2976 2800/2978 1393/1472 +f 3591/2977 2799/2979 2798/2976 +f 2797/2980 2795/2981 2801/2982 +f 2791/2972 2753/2923 2790/2969 +f 216/138 232/1743 54/39 +f 3804/2983 2807/2984 3803/2985 +f 3805/2986 2804/2987 3803/2985 +f 2769/2988 3557/1507 3552/1458 +f 183/211 184/374 97/73 +f 2811/2989 2775/2990 2776/2991 +f 2804/2987 2776/2991 2805/2992 +f 2809/2993 2811/2989 2804/2987 +f 2812/2994 2813/2995 2814/2996 +f 3805/2986 2816/2997 3795/2998 +f 3796/2999 2815/3000 3795/2998 +f 3798/3001 2820/3002 2821/3003 +f 3793/3004 2819/3005 3798/3001 +f 2824/3006 2735/3007 2819/3005 +f 2823/3008 2824/3006 2819/3005 +f 2825/3009 2826/3010 2724/2898 +f 3793/3004 2827/3011 3792/3012 +f 3796/2999 2826/3010 3792/3012 +f 2713/2892 2829/3013 2828/3014 +f 2828/3014 2831/3015 2830/3016 +f 2822/3017 2828/3014 2830/3016 +f 2821/3003 2713/2892 2828/3014 +f 2832/3018 2822/3017 2830/3016 +f 2833/3019 2830/3016 2831/3015 +f 2834/3020 2832/3018 2833/3019 +f 2835/3021 2827/3011 2832/3018 +f 2837/3022 2839/3023 2836/3024 +f 2836/3024 2841/3025 2840/3026 +f 2842/3027 2840/3026 2727/3028 +f 2843/3029 2836/3024 2842/3027 +f 2840/3026 2845/3030 2844/3031 +f 2718/3032 2845/3030 2719/2894 +f 2730/2902 2844/3031 2718/3032 +f 2727/3028 2844/3031 2728/2900 +f 2846/3033 2719/2894 2845/3030 +f 2846/3033 2841/3025 2847/3034 +f 2829/3013 2847/3034 2831/3015 +f 2829/3013 2716/2891 2846/3033 +f 2847/3034 2839/3023 2848/3035 +f 2848/3035 2838/3036 2849/3037 +f 2833/3019 2849/3037 2834/3020 +f 2831/3015 2848/3035 2833/3019 +f 2850/3038 2851/3039 2852/3040 +f 2853/3041 2852/3040 2854/3042 +f 2783/2963 2782/2961 2855/3043 +f 2785/2966 2853/3041 2856/3044 +f 2780/2958 2856/3044 2781/2960 +f 2857/3045 2855/3043 2782/2961 +f 2859/3046 2782/2961 2781/2960 +f 2861/3047 2842/3027 2860/3048 +f 2860/3048 2727/3028 2726/2899 +f 2862/3049 2726/2899 2732/2906 +f 2863/3050 2860/3048 2862/3049 +f 2861/3047 2858/3051 2857/3045 +f 2843/3029 2857/3045 2859/3046 +f 2856/3044 2854/3042 2864/3052 +f 2781/2960 2864/3052 2859/3046 +f 2837/3022 2859/3046 2864/3052 +f 2838/3036 2864/3052 2854/3042 +f 2852/3040 2834/3020 2849/3037 +f 2854/3042 2849/3037 2838/3036 +f 2865/3053 2818/3054 2835/3021 +f 2851/3039 2835/3021 2834/3020 +f 2813/2995 2815/3000 2865/3053 +f 2814/2996 2865/3053 2851/3039 +f 2866/3055 2814/2996 2850/3038 +f 2788/2965 2850/3038 2853/3041 +f 2866/3055 2787/3056 2867/3057 +f 2812/2994 2867/3057 2775/2990 +f 1892/2003 1890/1740 1891/2175 +f 2720/2895 2846/3033 2870/3058 +f 298/1407 256/148 257/1193 +f 2872/3059 2713/2892 2820/3002 +f 3794/3060 2735/3007 2734/2907 +f 2729/2903 2874/3061 2873/3062 +f 2846/3033 2715/3063 2870/3058 +f 2876/3064 2878/3065 2875/3066 +f 2733/2909 2878/3065 2734/2907 +f 3806/3067 2879/3068 2880/3069 +f 3806/3067 2872/3059 3794/3060 +f 2882/3070 2884/3071 2881/3072 +f 2881/3072 2715/3063 2714/2890 +f 2880/3069 2714/2890 2872/3059 +f 2879/3068 2881/3072 2880/3069 +f 2885/3073 2887/3074 2888/3075 +f 2888/3075 2890/3076 2885/3073 +f 2885/3073 2892/3077 2891/3078 +f 2893/3079 2885/3073 2891/3078 +f 1942/2002 2532/3080 2894/3081 +f 2895/3082 2720/2895 2870/3058 +f 2884/3071 2870/3058 2715/3063 +f 2883/3083 2895/3082 2884/3071 +f 2896/3084 2892/3077 2895/3082 +f 2889/3085 2717/2893 2890/3076 +f 2890/3076 2720/2895 2892/3077 +f 2897/3086 2891/3078 2896/3084 +f 2898/3087 2896/3084 2883/3083 +f 2899/3088 2883/3083 2882/3070 +f 2900/3089 2882/3070 2879/3068 +f 3797/3090 2879/3068 3807/3091 +f 2876/3064 2901/3092 2877/3093 +f 2901/3092 2904/3094 2903/3095 +f 1785/1825 1835/2845 1834/1879 +f 2905/3096 2899/3088 2900/3089 +f 2907/3097 2897/3086 2898/3087 +f 3797/3090 2903/3095 3799/3098 +f 632/2493 577/3099 633/2170 +f 3532/1639 2867/3057 2787/3056 +f 434/265 550/337 350/196 +f 2768/2951 3640/3100 3643/3101 +f 2910/3102 1623/1690 2249/2327 +f 2241/3103 2758/3104 2242/2309 +f 3640/3100 2913/3105 3652/3106 +f 1623/1690 2757/2930 1620/1688 +f 3587/2310 2757/2930 3652/3106 +f 2229/2299 1380/1466 2250/2329 +f 2765/2944 1629/1697 2914/3107 +f 2915/3108 1629/1697 2912/3109 +f 3642/3110 2914/3107 2915/3108 +f 3641/3111 2765/2944 2914/3107 +f 2919/3112 3642/3110 3613/3113 +f 2918/3114 2912/3109 2241/3103 +f 3614/3115 2241/3103 3586/2308 +f 3618/3116 3613/3113 3670/3117 +f 3541/1663 2180/2218 3496/1664 +f 3521/1682 2188/3118 3506/1735 +f 3705/3119 714/376 3851/2093 +f 2925/3120 2917/3121 2924/3122 +f 2924/3122 2916/3123 2919/3112 +f 3610/3124 2919/3112 3618/3116 +f 3611/3125 2924/3122 3610/3124 +f 2926/3126 2766/2946 2925/3120 +f 3658/3127 2925/3120 3611/3125 +f 1715/1533 1705/1744 1710/1842 +f 785/1500 498/2230 796/1498 +f 1387/719 565/1310 1791/2206 +f 374/2231 1899/3128 560/2229 +f 3573/2056 2206/2269 3556/3129 +f 2929/3130 2886/3131 2893/3079 +f 2932/3132 2934/3133 2931/3134 +f 1905/1748 1908/1813 1907/1832 +f 2930/3135 2936/3136 2937/3137 +f 2938/3138 2930/3135 2929/3130 +f 1493/1075 517/1897 518/310 +f 1603/3139 1586/1389 1602/1399 +f 2939/3140 2929/3130 2908/3141 +f 2940/3142 2908/3141 2907/3097 +f 3665/3143 1600/1395 3661/2103 +f 2886/3131 2937/3137 2887/3074 +f 2906/3144 2898/3087 2899/3088 +f 2908/3141 2893/3079 2897/3086 +f 2942/3145 2933/3146 2941/3147 +f 2941/3147 2937/3137 2936/3136 +f 3501/1687 2936/3136 3513/1695 +f 3498/1686 2941/3147 3501/1687 +f 2943/3148 2710/3149 2873/3062 +f 2943/3148 2874/3061 2889/3085 +f 2945/3150 2889/3085 2888/3075 +f 2946/3151 2943/3148 2945/3150 +f 2932/3132 2888/3075 2887/3074 +f 2931/3134 2945/3150 2932/3132 +f 727/375 1929/1777 728/373 +f 2903/3095 2947/3152 2948/3153 +f 545/331 42/30 262/335 +f 1480/969 533/322 251/323 +f 2907/3097 2949/3154 2940/3142 +f 2906/3144 2950/3155 2949/3154 +f 1558/1286 215/2820 202/135 +f 3799/3098 2948/3153 3801/3156 +f 2054/2008 1644/1420 2118/885 +f 3705/3119 2203/2267 3573/2056 +f 1828/1677 1849/986 1827/1672 +f 2953/3157 2955/3158 2952/3159 +f 2952/3159 2957/3160 2956/3161 +f 2956/3161 2258/2345 2952/3159 +f 2953/3157 2258/2345 2257/2342 +f 2454/2535 2474/2554 2496/2580 +f 3747/3162 2959/3163 3766/3164 +f 3747/3162 2131/2177 3737/3165 +f 2961/3166 2963/3167 2958/3168 +f 2958/3168 2964/3169 2959/3163 +f 222/307 2485/2565 2965/3170 +f 2477/2557 2966/3171 2608/2707 +f 2631/2841 2006/2070 1859/1909 +f 2384/2467 2496/2580 2497/3172 +f 2967/3173 2959/3163 2964/3169 +f 2968/3174 2970/3175 2969/3176 +f 3759/3177 2971/3178 3784/3179 +f 3784/3179 2960/3180 3766/3164 +f 2972/3181 2033/2098 2032/2097 +f 2007/2461 2973/3182 2008/2071 +f 3560/3183 2134/2179 2133/2178 +f 1836/3184 1832/1711 1831/1710 +f 3506/1735 2090/2143 3551/1899 +f 955/1295 959/517 956/3185 +f 672/3186 674/3187 673/2296 +f 633/2170 713/1335 632/2493 +f 2024/1881 733/398 803/1489 +f 2025/1882 803/1489 798/1873 +f 425/1057 1550/1285 388/229 +f 2975/3188 888/1919 887/937 +f 1319/1375 1157/1196 2694/2861 +f 1212/1791 2977/3189 1213/1242 +f 889/938 2228/3190 2227/2297 +f 1978/1837 1975/1835 1979/1853 +f 1138/2830 1329/1414 915/956 +f 751/841 2979/3191 2978/3192 +f 1619/1949 1625/1485 1624/1476 +f 2252/2336 1728/3193 1727/1790 +f 1157/1196 845/907 1216/1246 +f 2706/3194 1938/1995 2617/2720 +f 2980/3195 2254/3196 2981/3197 +f 2982/3198 887/937 2630/2737 +f 802/875 711/809 2679/2828 +f 678/3199 2029/2081 2028/2083 +f 527/697 690/905 528/695 +f 1839/2741 2629/2735 1840/1885 +f 2983/3200 602/730 1914/1973 +f 2983/3200 1913/1972 1331/2759 +f 2985/3201 1331/2759 1054/1084 +f 2986/3202 2983/3200 2985/3201 +f 2987/3203 2635/2743 2634/2742 +f 2985/3201 2639/2746 2986/3202 +f 2990/3204 1318/1374 2989/3205 +f 2991/3206 2993/3207 2994/3208 +f 2995/3209 2994/3208 2996/3210 +f 1106/1135 2991/3206 2995/3209 +f 1058/2752 2992/3211 2991/3206 +f 1415/765 1365/762 1367/703 +f 1317/1373 2217/2284 2997/3212 +f 1932/1990 2255/2335 2980/3195 +f 926/965 2999/3213 664/778 +f 3001/3214 2992/3211 3000/3215 +f 3000/3215 1057/1085 2978/3192 +f 2999/3213 2978/3192 2979/3191 +f 2998/3216 3000/3215 2999/3213 +f 3002/3217 2209/2275 3003/3218 +f 3002/3217 2996/3210 2994/3208 +f 2993/3207 3002/3217 2994/3208 +f 3001/3214 2998/3216 3002/3217 +f 1103/1136 2995/3209 3004/3219 +f 3004/3219 2996/3210 3003/3218 +f 2209/2275 3004/3219 3003/3218 +f 588/722 3004/3219 2210/2274 +f 295/1840 433/1347 381/220 +f 2638/2747 2988/3220 3005/3221 +f 3005/3221 1056/1083 1930/1988 +f 1305/1366 1930/1988 1306/1364 +f 1276/1376 3005/3221 1305/1366 +f 1987/1851 1993/1858 1989/1855 +f 2633/2823 1761/1810 2645/2761 +f 1423/797 1417/783 1419/785 +f 3561/1769 739/482 703/390 +f 2671/2807 2704/2879 2705/2878 +f 1473/2115 2035/2161 1472/2215 +f 922/2351 2694/2861 895/961 +f 1401/2337 1378/715 1384/1550 +f 556/1166 1884/2082 1037/1069 +f 842/1433 3825/601 3836/603 +f 1854/1902 2617/2720 1010/1044 +f 933/973 1081/2783 1075/1105 +f 1440/836 1436/963 1437/831 +f 529/2725 719/815 530/696 +f 1031/1063 1027/1061 1028/1064 +f 777/854 912/2728 1871/1921 +f 2645/2761 1204/1235 2633/2823 +f 441/2805 1538/1348 1537/1149 +f 790/1110 587/1197 586/721 +f 2065/1969 1610/1404 1607/1406 +f 1860/2837 990/2760 902/1845 +f 60/40 87/78 59/62 +f 1424/804 1948/1932 1947/2080 +f 332/3222 1507/1038 331/1091 +f 261/1341 138/1126 135/1847 +f 187/123 449/273 507/1359 +f 1719/1536 1726/1583 1725/1571 +f 1931/1987 1059/1086 3006/3223 +f 3006/3223 1105/2753 1104/1134 +f 1287/1343 1104/1134 587/1197 +f 1286/2354 3006/3223 1287/1343 +f 3007/3224 3009/3225 3010/3226 +f 3009/3225 653/869 795/868 +f 618/2279 26/2732 25/25 +f 2029/2081 1210/2836 1209/1239 +f 957/998 600/729 2228/3190 +f 1049/1078 2228/3190 600/729 +f 2631/2841 3011/3227 2561/2655 +f 890/939 2227/2297 3011/3227 +f 3011/3227 2145/2187 3012/3228 +f 2561/2655 3012/3228 2375/2459 +f 1026/1232 609/770 608/734 +f 2706/3194 1961/2036 1964/2035 +f 3013/3229 2485/2565 3014/3230 +f 2148/2189 1267/1301 3015/3231 +f 3015/3231 1266/1303 1311/1368 +f 2997/3212 1101/1131 1317/1373 +f 642/2787 1773/1817 1193/1224 +f 186/121 461/285 511/311 +f 1041/1155 1320/1377 901/945 +f 1121/1154 3017/3232 1122/1152 +f 2984/3233 2987/3203 3018/3234 +f 3018/3234 2634/2742 2619/2721 +f 1048/1079 3018/3234 2619/2721 +f 603/731 2984/3233 3018/3234 +f 2679/2828 801/876 802/875 +f 2062/1958 2074/1981 2061/1956 +f 2061/1956 2076/1983 2064/1962 +f 414/239 1545/1200 1965/1818 +f 3016/3235 2683/2833 3019/3236 +f 3019/3236 2704/2879 2670/3237 +f 3020/3238 2670/3237 720/818 +f 3007/3224 3019/3236 3020/3238 +f 1871/1921 1091/1122 777/854 +f 3021/3239 1122/1152 3017/3232 +f 3022/3240 3017/3232 3010/3226 +f 1314/2859 3022/3240 1315/1371 +f 1784/1827 3021/3239 1314/2859 +f 1316/1372 1763/3241 611/738 +f 3023/3242 3010/3226 3009/3225 +f 3023/3242 795/868 1092/1123 +f 1763/3241 1092/1123 1125/1812 +f 1315/1371 3023/3242 1763/3241 +f 3016/3235 3010/3226 3017/3232 +f 752/2816 2978/3192 1057/1085 +f 2986/3202 2637/2744 2987/3203 +f 3024/3243 653/869 3008/3244 +f 3020/3238 3008/3244 3007/3224 +f 720/818 3024/3243 3020/3238 +f 723/817 654/768 3024/3243 +f 2681/2831 1268/1302 3025/3245 +f 2671/2807 1268/1302 721/816 +f 2705/2878 3025/3245 2671/2807 +f 1313/1370 3027/3246 1311/1368 +f 3026/3247 3029/3248 3027/3246 +f 3027/3246 3030/3249 3031/3250 +f 1311/1368 3031/3250 3015/3231 +f 2012/2802 731/1875 2023/1877 +f 917/1106 974/1012 977/1014 +f 743/1237 856/915 744/838 +f 1888/1898 2030/1895 1425/800 +f 2682/3251 2684/2832 3032/3252 +f 3032/3252 1124/1153 2989/3205 +f 1100/2789 2989/3205 1318/1374 +f 1099/1130 3032/3252 1100/2789 +f 1123/3253 2989/3205 1124/1153 +f 3033/3254 1159/1880 1158/1194 +f 2990/3204 1158/1194 1319/1375 +f 1123/3253 3033/3254 2990/3204 +f 1786/2724 1834/1879 3033/3254 +f 1054/1084 2988/3220 2985/3201 +f 2979/3191 664/778 2999/3213 +f 2623/2727 2663/2791 2643/2755 +f 1095/1127 688/796 687/795 +f 3034/3255 2261/3256 2956/3161 +f 3036/3257 2956/3161 2957/3160 +f 3037/3258 3034/3255 3036/3257 +f 3038/3259 3035/3260 3034/3255 +f 3040/3261 3026/3247 1313/1370 +f 2263/2347 3041/3262 3040/3261 +f 3042/3263 3043/3264 3041/3262 +f 3041/3262 3028/3265 3026/3247 +f 3044/3266 3043/3264 3039/3267 +f 3046/3268 3045/3269 3044/3266 +f 3047/3270 3049/3271 3045/3269 +f 3043/3264 3049/3271 3028/3265 +f 3038/3259 3051/3272 3050/3273 +f 3050/3273 3053/3274 3052/3275 +f 3052/3275 3044/3266 3050/3273 +f 3039/3267 3050/3273 3044/3266 +f 3054/3276 3039/3267 3042/3263 +f 3042/3263 2262/2346 3054/3276 +f 2261/3256 3054/3276 2262/2346 +f 2216/2348 2997/3212 2217/2284 +f 2212/2349 2667/2803 1936/1993 +f 3056/3277 2209/2275 3055/3278 +f 934/974 1961/2036 1854/1902 +f 2013/2840 3757/2040 567/1319 +f 3058/3279 3060/3280 3061/3281 +f 3059/3282 3063/3283 3060/3280 +f 3060/3280 2213/2281 1963/2350 +f 3061/3281 1963/2350 1962/2034 +f 3063/3283 2214/2282 2213/2281 +f 3062/3284 3064/3285 3063/3283 +f 3065/3286 2260/2344 3064/3285 +f 3064/3285 2215/2283 2214/2282 +f 3066/3287 3031/3250 3030/3249 +f 2608/2707 3067/3288 3066/3287 +f 2966/3171 3068/3289 3067/3288 +f 3031/3250 3068/3289 3015/3231 +f 2455/2534 2461/2539 2474/2554 +f 2476/3290 2226/2294 2477/2557 +f 3551/1899 2089/2145 3525/1203 +f 3526/2952 3769/3291 3786/3292 +f 433/1347 355/201 381/220 +f 2115/2048 1419/785 2123/2061 +f 3509/993 2922/3293 3527/2826 +f 3769/3291 2131/2177 2134/2179 +f 3069/3294 2167/3295 2166/2208 +f 2971/3178 2928/3296 2960/3180 +f 2961/3166 2773/2953 2962/3297 +f 3070/3298 2478/2558 2483/2563 +f 2610/3299 3071/3300 2954/3301 +f 2502/2693 2374/2457 2386/2691 +f 3072/3302 2595/2690 3073/3303 +f 2964/3169 3052/3275 2967/3173 +f 3048/3304 3074/3305 3049/3271 +f 2593/3306 3075/3307 3074/3305 +f 3075/3307 3029/3248 3074/3305 +f 3074/3305 3028/3265 3049/3271 +f 3072/3302 3075/3307 2592/2688 +f 3076/3308 2482/3309 2481/2562 +f 2481/2562 3066/3287 3076/3308 +f 3076/3308 3030/3249 3075/3307 +f 2967/3173 3053/3274 2970/3175 +f 2482/3309 3073/3303 2483/2563 +f 479/364 493/297 494/299 +f 706/1340 3873/2167 709/1338 +f 3561/1769 2221/2291 3517/483 +f 2013/2840 3754/812 3755/3310 +f 3533/1930 2016/2075 3529/1467 +f 3562/1933 2222/3311 2974/3312 +f 1171/587 1155/573 1176/585 +f 2586/3313 2565/2714 2587/3314 +f 2982/3198 113/154 2484/2564 +f 2562/3315 3077/3316 3068/3289 +f 2376/2656 3012/3228 3077/3316 +f 3012/3228 2148/2189 3077/3316 +f 3077/3316 3015/3231 3068/3289 +f 2963/3167 3046/3268 2964/3169 +f 3051/3272 2970/3175 3053/3274 +f 1683/1805 491/424 775/423 +f 3850/2105 1447/848 3860/849 +f 1986/2053 3734/3317 3768/3318 +f 3079/3319 2546/2637 2575/2668 +f 3081/3320 2575/2668 2576/2667 +f 1983/2054 3768/3318 3765/3321 +f 3082/3322 2319/2638 2545/2636 +f 3080/3323 2545/2636 2546/2637 +f 3734/3317 3082/3322 3080/3323 +f 3732/3324 2323/2407 3082/3322 +f 1309/637 1888/1898 748/1906 +f 3073/3303 2772/3325 2771/3326 +f 2760/2931 3084/3327 2762/2934 +f 2760/2931 2088/2142 3083/3328 +f 3085/3329 3086/3330 3087/3331 +f 3089/3332 3090/3333 3088/3334 +f 3089/3332 3084/3327 3085/3329 +f 3092/3335 3090/3333 3091/3336 +f 3093/3337 2744/2915 2743/2917 +f 2751/2922 2748/2919 3093/3337 +f 3093/3337 2752/2924 2751/2922 +f 2742/2914 3093/3337 3094/3338 +f 3094/3338 2743/2917 3095/3339 +f 3096/3340 3098/3341 3087/3331 +f 3099/3342 3087/3331 3086/3330 +f 3097/3343 3095/3339 3098/3341 +f 3100/3344 2746/2916 3101/3345 +f 3101/3345 3102/3346 3100/3344 +f 2764/3347 2927/3348 3101/3345 +f 2745/2918 3101/3345 2746/2916 +f 3102/3346 3091/3336 3100/3344 +f 3100/3344 3095/3339 2743/2917 +f 2869/3349 3592/3350 3634/3351 +f 3102/3346 3104/3352 3092/3335 +f 3658/3127 2869/3349 3634/3351 +f 2927/3348 3103/3353 3102/3346 +f 3098/3341 3091/3336 3090/3333 +f 3087/3331 3090/3333 3085/3329 +f 2088/2142 3105/3354 3083/3328 +f 3083/3328 3086/3330 3084/3327 +f 2483/2563 2771/3326 3070/3298 +f 2401/2885 2185/2223 2321/3355 +f 3099/3342 3105/3354 3106/3356 +f 2125/2172 3106/3356 3105/3354 +f 3108/3357 2742/2914 3107/3358 +f 3107/3358 3094/3338 3097/3343 +f 3110/3359 3107/3358 3109/3360 +f 3109/3360 3097/3343 3096/3340 +f 3111/3361 3096/3340 3099/3342 +f 3112/3362 3109/3360 3111/3361 +f 3113/3363 2737/2912 3108/3357 +f 3115/3364 2722/3365 3114/3366 +f 3117/3367 3114/3366 3116/3368 +f 3119/3369 3116/3368 3118/3370 +f 3118/3370 3120/3371 3121/3372 +f 3116/3368 3113/3363 3120/3371 +f 3122/3373 3111/3361 3123/3374 +f 3122/3373 1755/2271 1754/1801 +f 3124/3375 3126/3376 3127/3377 +f 3125/3378 3121/3372 3126/3376 +f 3128/3379 3118/3370 3125/3378 +f 3129/3380 3125/3378 3124/3375 +f 2020/2078 3128/3379 3129/3380 +f 3853/671 1346/664 1352/878 +f 1065/2174 1047/537 1066/641 +f 1814/1665 3830/1445 1649/1444 +f 2021/2079 3129/3380 3130/3381 +f 3111/3361 3106/3356 3123/3374 +f 3123/3374 2207/2295 1755/2271 +f 2377/2460 2224/3382 2973/3182 +f 3014/3230 113/154 112/153 +f 1756/1802 3782/3383 3767/3384 +f 3131/3385 2530/2622 2528/2620 +f 3133/3386 2528/2620 2529/2619 +f 1753/1803 3767/3384 3775/3387 +f 3134/3388 3127/3377 3135/3389 +f 2327/2410 2314/2506 2313/2399 +f 2436/2515 2317/2404 2349/2624 +f 2018/2077 3711/3390 3712/3391 +f 2534/2625 3137/3392 2535/2626 +f 2320/2403 3136/3393 2534/2625 +f 2185/2223 3712/3391 3771/3394 +f 3138/3395 2542/2633 2539/2630 +f 3134/3388 3744/3396 3745/3397 +f 3785/3398 3134/3388 3745/3397 +f 3140/3399 2539/2630 2538/2629 +f 2535/2626 3140/3399 2538/2629 +f 3711/3390 3130/3381 3785/3398 +f 2558/2652 3132/3400 3141/3401 +f 3782/3383 3141/3401 3132/3400 +f 2560/2654 3141/3401 3142/3402 +f 3760/3403 3142/3402 3141/3401 +f 958/999 2976/3404 2981/3197 +f 1933/3405 2980/3195 3143/3406 +f 2399/3407 2418/2490 2416/2489 +f 2348/2431 2343/2427 2432/2510 +f 2322/2406 2319/2638 2323/2407 +f 2718/3032 2874/3061 2730/2902 +f 2709/2905 2873/3062 2710/3149 +f 2894/3081 1361/1440 1942/2002 +f 346/384 176/177 347/1266 +f 1876/1724 1882/1920 1881/1936 +f 2802/3408 3607/3409 3592/3350 +f 3104/3352 3088/3334 3092/3335 +f 2974/3312 3649/2936 3603/3410 +f 3089/3332 2761/2933 2762/2934 +f 3088/3334 3145/3411 3089/3332 +f 2803/3412 3603/3410 3607/3409 +f 3870/2095 692/379 628/2154 +f 2123/2061 1418/780 2126/2259 +f 1365/762 1368/688 1367/703 +f 1581/1380 1782/1696 1783/1614 +f 3569/1337 2129/2176 3520/2127 +f 3555/2128 2002/2245 3521/1682 +f 3146/3413 2551/2644 3147/3414 +f 3751/3415 3147/3414 3750/3416 +f 3742/3417 3146/3413 3751/3415 +f 3149/3418 2585/2677 3146/3413 +f 3196/3419 3197/3420 3195/2233 +f 3604/3421 2191/2249 3623/2937 +f 3142/3402 2556/2647 2560/2654 +f 3777/3422 3150/3423 3142/3402 +f 3646/3424 2550/2641 3593/2650 +f 3150/3423 2555/2648 2556/2647 +f 3778/3425 3151/3426 3150/3423 +f 3788/3427 3152/3428 3646/3424 +f 3153/3429 2551/2644 2549/2642 +f 3152/3428 2549/2642 2550/2641 +f 3781/3430 3153/3429 3152/3428 +f 3776/3431 3147/3414 3153/3429 +f 3148/3432 2206/2269 2205/2268 +f 2183/2886 2321/3355 2184/2220 +f 2132/3433 2928/3296 2133/2178 +f 3684/3434 2204/3435 2203/2267 +f 3719/3436 2197/2255 3721/3437 +f 2594/2689 2773/2953 2951/3438 +f 2595/2690 2951/3438 2772/3325 +f 1870/1713 799/1716 1869/1714 +f 3713/3439 2178/3440 3706/3441 +f 3706/3441 3736/3442 3790/3443 +f 336/510 3774/3444 3722/3445 +f 339/512 3154/3446 336/510 +f 3789/3447 3149/3418 3742/3417 +f 3683/3448 2584/2676 3149/3418 +f 2963/3167 3048/3304 3047/3270 +f 3722/3445 337/3449 336/510 +f 339/512 2566/2658 2586/3313 +f 3156/3450 2168/2209 2167/3295 +f 2610/3299 2167/3295 2611/2709 +f 2954/3301 3156/3450 2610/3299 +f 3157/3451 2953/3157 2257/2342 +f 3707/3452 2169/2210 3709/3453 +f 2567/2660 3159/3454 2568/2662 +f 337/3449 2567/2660 338/511 +f 3730/3455 2166/2208 3707/3452 +f 3764/3456 1983/2054 3765/3321 +f 2579/2671 3081/3320 2576/2667 +f 2568/2662 3160/3457 2579/2671 +f 3709/3453 2190/2247 3764/3456 +f 3065/3286 3162/3458 3161/3459 +f 3161/3459 1984/2052 2189/2246 +f 3157/3451 2189/2246 2168/2209 +f 2257/2342 3161/3459 3157/3451 +f 734/400 1922/3460 806/442 +f 3059/3282 2708/2887 3163/3461 +f 3163/3461 2182/2219 1985/2224 +f 3162/3458 1985/2224 1984/2052 +f 3062/3284 3163/3461 3162/3458 +f 3203/3462 3205/3463 3204/3464 +f 2122/2060 1448/852 1447/848 +f 3817/886 1685/1160 3812/3465 +f 139/95 444/2847 69/52 +f 1570/1745 566/1320 533/322 +f 2957/3160 3165/3466 3036/3257 +f 2955/3158 3164/3467 2957/3160 +f 1912/1758 341/499 1973/1829 +f 3130/3381 3124/3375 3134/3388 +f 1548/2355 319/1861 1547/2838 +f 1949/2204 1327/1729 1887/3468 +f 2969/3176 3078/3469 3167/3470 +f 3167/3470 3169/3471 3168/3472 +f 3724/3473 2017/2076 3720/3474 +f 3720/3474 2033/2098 3759/3177 +f 3168/3472 3165/3466 3170/3475 +f 3170/3475 3164/3467 3171/3476 +f 3779/3477 3171/3476 3756/3478 +f 3724/3473 3170/3475 3779/3477 +f 3171/3476 3166/3479 3172/3480 +f 3172/3480 3071/3300 3173/3481 +f 3752/3482 3173/3481 3749/3483 +f 3756/3478 3172/3480 3752/3482 +f 3169/3471 3036/3257 3165/3466 +f 2593/3306 2962/3297 2594/2689 +f 3173/3481 2609/2708 3174/3484 +f 3174/3484 2199/2710 2198/2253 +f 3783/3485 2198/2253 3719/3436 +f 3749/3483 3174/3484 3783/3485 +f 2190/2247 1984/2052 1983/2054 +f 2196/2252 2767/3486 3148/3432 +f 2119/2059 1414/766 1448/852 +f 1464/947 1466/891 1465/894 +f 2199/2710 3069/3294 2200/2254 +f 2457/2713 2587/3314 2565/2714 +f 2226/2294 2562/3315 2966/3171 +f 481/404 1627/1409 1639/1435 +f 333/1261 235/1052 236/341 +f 3078/3469 3037/3258 3169/3471 +f 3040/3261 2997/3212 2263/2347 +f 2219/2287 1933/3405 2220/2286 +f 2385/2468 2497/3172 2503/2692 +f 221/2566 3175/3487 2484/2564 +f 2220/2286 3143/3406 3175/3487 +f 3175/3487 2976/3404 2975/3188 +f 2484/2564 2975/3188 2982/3198 +f 2954/3301 3166/3479 2955/3158 +f 2630/2737 110/152 2982/3198 +f 1547/2838 1952/1971 1955/3488 +f 2091/3489 2188/3118 2187/2244 +f 3733/3490 2178/3440 2177/2216 +f 1918/1760 1927/1773 1926/1767 +f 3563/3491 111/3492 3511/3493 +f 955/1295 960/3494 954/490 +f 2037/3495 112/153 3563/3491 +f 3143/3406 2981/3197 2976/3404 +f 2218/2285 2214/2282 2215/2283 +f 672/3186 3787/1204 3541/1663 +f 2152/3496 3014/3230 2037/3495 +f 1147/586 1143/1862 1145/562 +f 2248/2328 1409/1480 2245/2323 +f 1615/1721 1635/1703 1616/1684 +f 1410/1481 2246/2325 2247/2324 +f 3241/3497 3242/3498 3240/2319 +f 3177/3499 1383/1465 1382/2307 +f 2238/2306 3176/3500 1382/2307 +f 3176/3500 2230/2301 3178/3501 +f 1410/1481 3179/3502 1407/1479 +f 3179/3502 2243/2313 1407/1479 +f 1633/1700 2800/2978 2249/2327 +f 1621/2326 2758/3104 2912/3109 +f 1622/1689 2245/2323 1634/1701 +f 2236/2304 3181/3503 3182/3504 +f 2237/2305 3182/3504 3183/3505 +f 2250/2329 2233/2302 2229/2299 +f 2912/3109 1628/1699 1621/2326 +f 135/1847 1564/1353 1563/1342 +f 1567/1437 512/309 1565/1354 +f 1085/1118 2653/2775 2685/2843 +f 3184/3506 868/925 3056/3277 +f 869/926 3057/3507 934/974 +f 1266/1303 1098/1132 1312/1369 +f 1267/1301 722/2722 721/816 +f 937/976 2646/2764 936/975 +f 2146/2298 722/2722 2147/2188 +f 2211/2280 921/960 2667/2803 +f 1957/2870 507/1359 449/273 +f 1313/1370 2264/2352 3040/3261 +f 1496/995 1493/1075 1492/1004 +f 2681/2831 2705/2878 2682/3251 +f 2887/3074 2933/3146 2932/3132 +f 1687/3508 1659/1576 1694/1503 +f 145/1011 1498/1223 81/59 +f 1317/1373 1319/1375 2694/2861 +f 37/19 418/1028 71/1955 +f 1935/1992 3185/3509 2977/3189 +f 104/2810 301/438 103/75 +f 2101/2018 2083/1998 2084/1997 +f 543/1800 360/1040 207/131 +f 1934/1991 3186/3510 3185/3509 +f 234/1055 212/173 237/279 +f 3845/2090 1822/1666 1824/1670 +f 1976/3511 1874/1715 1974/1834 +f 223/308 2219/2287 220/306 +f 3655/3512 3605/3513 3612/3514 +f 468/287 773/421 2165/3515 +f 1935/1992 1728/3193 1932/1990 +f 3240/2319 3226/2290 3235/2320 +f 3261/3516 3250/3517 3260/3518 +f 3215/2241 3244/3519 3264/3520 +f 3265/3521 3215/2241 3264/3520 +f 3267/3522 3198/2236 3196/3419 +f 3205/3463 3269/3523 3255/3524 +f 3214/3525 3265/3521 3270/3526 +f 3271/3527 3214/3525 3192/3528 +f 3251/3529 3239/3530 3236/2315 +f 3222/3531 3266/3532 3272/3533 +f 3232/2318 3234/2322 3233/2321 +f 3216/2243 3243/3534 3244/3519 +f 3196/3419 3268/3535 3267/3522 +f 3244/3519 3254/3536 3255/3524 +f 3238/2316 3247/2950 3246/2949 +f 3264/3520 3255/3524 3269/3523 +f 3253/3537 3216/2243 3217/2242 +f 3211/2238 3221/2265 3265/3521 +f 3257/3538 3218/2266 3256/3539 +f 3258/3540 3272/3533 3259/3541 +f 3274/3542 3280/3543 3273/3544 +f 3228/3545 3262/3546 3263/3547 +f 3275/3548 3281/3549 3280/3543 +f 3226/2290 3273/3544 3280/3543 +f 3236/2315 3260/3518 3251/3529 +f 3281/3549 3236/2315 3237/2317 +f 3287/1578 3192/3528 3189/3550 +f 3235/2320 3280/3543 3234/2322 +f 3234/2322 3281/3549 3237/2317 +f 3288/3551 3259/3541 3272/3533 +f 3260/3518 3276/3552 3251/3529 +f 3289/3553 3278/3554 3284/3555 +f 3210/3556 3224/2940 3283/3557 +f 3199/2237 3201/3558 3200/2235 +f 3228/3545 3230/3559 3229/3560 +f 3290/3561 3285/3562 3286/3563 +f 3282/1580 3271/3527 3287/1578 +f 3260/3518 3262/3546 3261/3516 +f 3243/3534 3268/3535 3254/3536 +f 3267/3522 3241/3497 3232/2318 +f 3194/2234 3208/3564 3193/2232 +f 3191/3565 3269/3523 3206/3566 +f 3270/3526 3192/3528 3214/3525 +f 3275/3548 3230/3559 3231/3567 +f 3264/3520 3270/3526 3265/3521 +f 3252/3568 3276/3552 3291/3569 +f 3248/3570 3291/3569 3223/2939 +f 3213/2239 3279/3571 3277/3572 +f 3272/3533 3289/3553 3288/3551 +f 3277/3572 3212/2240 3213/2239 +f 3228/3545 3281/3549 3231/3567 +f 3220/3573 3253/3537 3219/3574 +f 3219/3574 3217/2242 3257/3538 +f 3256/3539 3222/3531 3258/3540 +f 3247/2950 3252/3568 3248/3570 +f 3245/2332 3232/2318 3233/2321 +f 3266/3532 3212/2240 3289/3553 +f 3208/3564 3255/3524 3254/3536 +f 3288/3551 3284/3555 3285/3562 +f 3204/3464 3208/3564 3207/3575 +f 560/2229 3187/3576 559/1455 +f 3209/2938 3201/3558 3202/2948 +f 3189/3550 3191/3565 3190/3577 +f 448/1848 807/450 447/325 +f 647/367 122/980 270/162 +f 1833/1679 1780/1747 1832/1711 +f 510/985 508/1360 507/1359 +f 3210/3556 1887/3468 3201/3558 +f 1887/3468 3200/2235 3201/3558 +f 15/8 3197/3420 3200/2235 +f 16/2228 3195/2233 3197/3420 +f 3195/2233 3187/3576 3194/2234 +f 3194/2234 1899/3128 3207/3575 +f 3207/3575 375/3578 3204/3464 +f 387/227 126/85 127/99 +f 3189/3550 1760/2207 3287/1578 +f 3577/3579 757/406 758/1497 +f 763/2211 2172/2106 2175/3580 +f 3583/3581 3580/3582 3581/3583 +f 3581/3583 3579/3584 3578/1774 +f 3594/3585 3596/3586 3595/3587 +f 3400/3588 3602/3589 3401/3590 +f 3626/3591 3625/3592 3627/3593 +f 3631/3594 3599/3595 3601/3596 +f 3636/3597 3595/3587 3596/3586 +f 3647/3598 3588/3599 3645/3600 +f 3650/3601 3584/3602 3647/3598 +f 3653/3603 3580/3582 3651/3604 +f 3653/3603 3654/1775 3579/3584 +f 1801/1633 3656/3605 3655/3512 +f 3651/3604 3582/3606 3650/3601 +f 3644/3607 3595/3587 3637/3608 +f 1767/806 3659/3609 3660/807 +f 1597/1397 3661/2103 1600/1395 +f 1708/1524 3665/3143 3664/2256 +f 3671/2258 3667/3610 3666/3611 +f 3666/3611 3673/3612 3672/3613 +f 1588/1526 3664/2256 1587/1388 +f 2080/2338 3666/3611 2082/2006 +f 2082/2006 3672/3613 2084/1997 +f 3674/3614 3635/3615 3631/3594 +f 3677/3616 3636/3597 3635/3615 +f 3682/3617 3645/3600 3681/3618 +f 3692/3619 3650/3601 3686/3620 +f 3681/3618 3644/3607 3680/3621 +f 786/1499 3692/3619 785/1500 +f 3694/3622 3651/3604 3692/3619 +f 785/1500 3686/3620 497/3623 +f 497/3623 3682/3617 650/372 +f 650/372 3681/3618 564/370 +f 3696/3624 3681/3618 3680/3621 +f 3405/3625 3605/3513 3402/3626 +f 3702/3627 3678/3628 3677/3616 +f 3699/3629 3679/3630 3678/3628 +f 3645/3600 3638/3631 3644/3607 +f 1799/1635 3659/3609 1768/1596 +f 3708/3632 3458/3633 3460/3634 +f 3462/3635 3708/3632 3460/3634 +f 3714/577 564/370 3696/3624 +f 3615/3636 3602/3589 3600/3637 +f 3710/3638 3696/3624 3698/3639 +f 3635/3615 3596/3586 3599/3595 +f 3629/3640 3627/3593 3630/3641 +f 3597/3642 3599/3595 3596/3586 +f 3630/3641 3715/3643 3629/3640 +f 3627/3593 3597/3642 3594/3585 +f 3717/3644 3723/3645 3718/3646 +f 3715/3643 3660/807 3629/3640 +f 3716/3647 3725/2102 3715/3643 +f 3668/2257 3661/2103 3725/2102 +f 3668/2257 3717/3644 3667/3610 +f 3667/3610 3718/3646 3673/3612 +f 3727/3648 3673/3612 3718/3646 +f 3728/3649 3672/3613 3673/3612 +f 3726/3650 3718/3646 3723/3645 +f 1576/2019 2084/1997 3672/3613 +f 3727/3648 3731/3651 3729/3652 +f 3203/3462 1387/719 3190/3577 +f 3190/3577 1791/2206 3189/3550 +f 3283/3557 1949/2204 3210/3556 +f 317/183 259/151 391/230 +f 2000/1868 3653/3603 3694/3622 +f 3601/3596 3600/3637 3602/3589 +f 3589/3653 3584/3602 3585/3654 +f 3577/3579 756/426 2170/2107 +f 3585/3654 3582/3606 3583/3581 +f 3678/3628 3637/3608 3636/3597 +f 3204/3464 376/718 3203/3462 +f 1801/1633 3657/3655 1799/1635 +f 3729/3652 3728/3649 3727/3648 +f 1575/1378 3738/3656 1088/1379 +f 3735/3657 1576/2019 3728/3649 +f 1776/1630 1796/1637 1775/1608 +f 1293/661 1290/649 1284/1792 +f 1326/2200 1327/1729 12/2025 +f 1294/657 1234/619 1290/649 +f 878/626 1330/648 1337/652 +f 497/3623 374/2231 498/2230 +f 122/980 413/240 121/2834 +f 3699/3629 3704/3658 3708/3632 +f 3606/3659 3401/3590 3602/3589 +f 3708/3632 3698/3639 3699/3629 +f 3464/575 3710/3638 3462/3635 +f 3704/3658 3443/3660 3458/3633 +f 439/271 399/1352 438/269 +f 1512/1053 1526/1098 1511/1054 +f 1668/1475 1738/1567 1671/1474 +f 839/3661 1362/2226 838/685 +f 1362/2226 826/678 1012/521 +f 3843/1428 1896/1589 3815/1967 +f 2053/2009 2118/885 2117/887 +f 3888/2118 2120/2085 2119/2059 +f 696/1234 3816/3662 3831/877 +f 1826/2773 639/1453 640/1452 +f 3740/3663 3729/3652 3731/3651 +f 956/3185 886/485 960/3494 +f 1980/1838 1922/3460 807/450 +f 33/18 105/111 164/110 +f 1343/659 3856/666 3857/662 +f 3907/2069 2128/2073 3902/2084 +f 1766/1597 1582/1382 1594/1385 +f 14/9 2163/2197 738/1807 +f 595/354 99/1067 594/355 +f 3233/2321 3246/2949 3245/2332 +f 1346/664 1044/535 1352/878 +f 748/1906 1425/800 747/799 +f 3193/2232 3254/3536 3268/3535 +f 3190/3577 3206/3566 3203/3462 +f 3612/3514 3606/3659 3615/3636 +f 3624/3664 3615/3636 3625/3592 +f 3631/3594 3443/3660 3674/3614 +f 3679/3630 3644/3607 3637/3608 +f 3686/3620 3647/3598 3682/3617 +f 3702/3627 3674/3614 3704/3658 +f 3625/3592 3600/3637 3597/3642 +f 3601/3596 3415/3665 3631/3594 +f 3739/3666 3735/3657 3729/3652 +f 3630/3641 3594/3585 3741/3667 +f 3741/3667 3716/3647 3630/3641 +f 1284/1792 1288/651 1285/1968 +f 3698/3639 3680/3621 3679/3630 +f 1778/1616 1780/1747 1779/1675 +f 1792/2099 1584/1631 1789/1621 +f 695/2132 571/346 694/612 +f 3638/3631 3594/3585 3595/3587 +f 3741/3667 3588/3599 3589/3653 +f 3247/2950 3209/2938 3202/2948 +f 3292/3668 3294/3669 3295/3670 +f 3296/3671 3298/3672 3299/3673 +f 3299/3673 3300/3674 3301/3675 +f 3302/3676 3304/3677 3305/3678 +f 3306/3679 3308/3680 3309/3681 +f 3307/3682 3311/3683 3308/3680 +f 3303/3684 3301/3675 3300/3674 +f 3312/3685 3304/3677 3313/3686 +f 3314/3687 3316/3688 3317/3689 +f 3318/3690 3320/3691 3321/3692 +f 3324/3693 3321/3692 3325/3694 +f 3326/3695 3328/3696 3323/3697 +f 3329/3698 3331/3699 3332/3700 +f 3333/3701 3335/3702 3336/3703 +f 3337/3704 3339/3705 3340/3706 +f 3333/3701 3341/3707 3342/3708 +f 3342/3708 3343/3709 3344/3710 +f 3319/3711 3344/3710 3343/3709 +f 3338/3712 3334/3713 3339/3705 +f 3346/3714 3302/3676 3347/3715 +f 3305/3678 3347/3715 3302/3676 +f 3312/3685 3223/3716 3349/3717 +f 3350/3718 3340/3706 3351/3719 +f 3312/3685 3348/3720 3305/3678 +f 3352/3721 3341/3707 3326/3695 +f 3311/3683 3354/3722 3308/3680 +f 3341/3707 3327/3723 3326/3695 +f 3359/3724 3361/3725 3362/3726 +f 3360/3727 3250/3728 3249/3729 +f 3318/3690 3345/3730 3319/3711 +f 3364/3731 3318/3690 3324/3693 +f 3314/3687 3324/3693 3365/3732 +f 3366/3733 3301/3675 3346/3714 +f 3299/3673 3367/3734 3296/3671 +f 3308/3680 3368/3735 3309/3681 +f 3369/3736 3295/3670 3294/3669 +f 3317/3689 3364/3731 3314/3687 +f 3370/3737 3317/3689 3316/3688 +f 3325/3694 3365/3732 3324/3693 +f 3352/3721 3319/3711 3343/3709 +f 3296/3671 3353/3738 3311/3683 +f 3297/3739 3311/3683 3310/3740 +f 3363/3741 3354/3722 3345/3730 +f 3345/3730 3353/3738 3344/3710 +f 3344/3710 3367/3734 3342/3708 +f 3366/3733 3342/3708 3367/3734 +f 3347/3715 3334/3713 3346/3714 +f 3339/3705 3348/3720 3340/3706 +f 3348/3720 3351/3719 3340/3706 +f 3363/3741 3369/3736 3368/3735 +f 3294/3669 3368/3735 3369/3736 +f 3293/3742 3309/3681 3294/3669 +f 3331/3699 3374/3743 3332/3700 +f 3346/3714 3333/3701 3366/3733 +f 3323/3697 3352/3721 3326/3695 +f 3356/3744 3321/3692 3320/3691 +f 3355/3745 3325/3694 3321/3692 +f 3357/3746 3371/3747 3325/3694 +f 3322/3748 3320/3691 3352/3721 +f 3361/3725 3329/3698 3362/3726 +f 3373/3749 3375/3750 3374/3743 +f 3374/3743 3376/3751 3332/3700 +f 3329/3698 3376/3751 3362/3726 +f 3327/3723 3372/3752 3328/3696 +f 3336/3703 3375/3750 3327/3723 +f 3335/3702 3376/3751 3375/3750 +f 3376/3751 3337/3704 3362/3726 +f 3337/3704 3359/3724 3362/3726 +f 3359/3724 3276/3753 3250/3728 +f 3377/3754 3295/3670 3370/3737 +f 3277/3572 3315/3755 3278/3554 +f 3371/3747 3379/3756 3365/3732 +f 3378/3757 3358/3758 3380/3759 +f 3365/3732 3315/3755 3314/3687 +f 3316/3688 3279/3571 3370/3737 +f 3282/1580 3370/3737 3279/3571 +f 3313/3686 3224/3760 3312/3685 +f 3378/3757 3284/3555 3379/3756 +f 3380/3759 3285/3562 3378/3757 +f 3379/3756 3278/3554 3315/3755 +f 3276/3753 3351/3719 3291/3761 +f 3291/3761 3349/3717 3223/3716 +f 3313/3686 2965/3170 3013/3229 +f 2965/3170 3303/3684 222/307 +f 222/307 3300/3674 223/308 +f 223/308 3298/3672 3186/3510 +f 3298/3672 3185/3509 3186/3510 +f 3297/3739 2977/3189 3185/3509 +f 3310/3740 1213/1242 2977/3189 +f 3307/3682 1214/1243 1213/1242 +f 2532/3080 3306/3679 3293/3742 +f 2894/3081 3293/3742 3292/3668 +f 2871/3762 3292/3668 3377/3754 +f 3253/3537 3240/2319 3242/3498 +f 3283/3763 3013/3229 2152/3496 +f 1997/1865 1889/1739 1892/2003 +f 1237/1257 744/838 855/914 +f 1535/1782 120/1816 121/2834 +f 1531/1119 201/137 214/136 +f 1119/1151 1032/1066 984/1022 +f 946/478 824/489 947/488 +f 163/213 5/6 162/2842 +f 3589/3653 3723/3645 3741/3667 +f 1155/573 1162/581 1164/582 +f 427/280 327/1304 424/457 +f 553/1454 796/1498 498/2230 +f 3381/3764 2231/2303 2237/2305 +f 3381/3764 2230/2301 2232/2300 +f 2237/2305 3180/3765 3381/3764 +f 3383/3766 3385/3767 3382/3768 +f 3384/3769 3387/3770 3385/3767 +f 3386/3771 3389/3772 3387/3770 +f 3388/3773 3391/3774 3389/3772 +f 3392/3775 3394/3776 3395/3777 +f 3395/3777 3396/3778 3397/3779 +f 3398/3780 3397/3779 3396/3778 +f 3400/3588 3399/3781 3398/3780 +f 3403/3782 3405/3625 3402/3626 +f 3406/3783 3404/3784 3403/3782 +f 3408/3785 3407/3786 3406/3783 +f 3410/3787 3409/3788 3408/3785 +f 3412/3789 3411/3790 3410/3787 +f 3415/3665 3398/3780 3414/3791 +f 3414/3791 3396/3778 3416/3792 +f 3416/3792 3394/3776 3417/3793 +f 3417/3793 3393/3794 3418/3795 +f 3393/3794 3420/3796 3418/3795 +f 3419/3797 3421/3798 3420/3796 +f 3390/3799 3422/3800 3421/3798 +f 3388/3773 3423/3801 3422/3800 +f 3386/3771 3424/3802 3423/3801 +f 3384/3769 3425/3803 3424/3802 +f 3425/3803 3426/3804 3184/3506 +f 2904/3094 3428/3805 2947/3152 +f 3429/3806 2904/3094 2902/3807 +f 3430/3808 2902/3807 2876/3064 +f 2875/3066 3430/3808 2876/3064 +f 3432/3809 2733/2909 2736/2908 +f 2825/3009 3435/3810 2824/3006 +f 3437/3811 3439/3812 3436/3813 +f 3441/3814 3436/3813 3440/3815 +f 2724/2898 3434/3816 2825/3009 +f 3436/3813 3115/3364 3117/3367 +f 3440/3815 3117/3367 3119/3369 +f 3443/3660 3414/3791 3442/3817 +f 3442/3817 3416/3792 3444/3818 +f 3444/3818 3417/3793 3445/3819 +f 3445/3819 3418/3795 3446/3820 +f 3446/3820 3420/3796 3447/3821 +f 3420/3796 3448/3822 3447/3821 +f 3421/3798 3449/3823 3448/3822 +f 3422/3800 3450/3824 3449/3823 +f 3423/3801 3451/3825 3450/3824 +f 3424/3802 3452/3826 3451/3825 +f 3425/3803 3056/3277 3452/3826 +f 3451/3825 2253/2334 2252/2336 +f 3450/3824 2252/2336 1727/1790 +f 3449/3823 1727/1790 2150/2273 +f 3448/3822 2150/2273 1941/2194 +f 3453/3827 3448/3822 1941/2194 +f 3454/3828 3447/3821 3453/3827 +f 3455/3829 3446/3820 3454/3828 +f 3456/3830 3445/3819 3455/3829 +f 3442/3817 3456/3830 3457/3831 +f 3443/3660 3457/3831 3458/3633 +f 3459/3832 3458/3633 3457/3831 +f 3457/3831 3455/3829 3459/3832 +f 3459/3832 3454/3828 3461/3833 +f 3462/3635 3459/3832 3461/3833 +f 3461/3833 3453/3827 3463/3834 +f 3464/575 3461/3833 3463/3834 +f 3463/3834 1941/2194 1360/2001 +f 3404/3784 3401/3590 3405/3625 +f 3407/3786 3399/3781 3404/3784 +f 3409/3788 3397/3779 3407/3786 +f 3411/3790 3395/3777 3409/3788 +f 3465/3835 3413/3836 3412/3789 +f 3467/3837 3469/3838 3466/3839 +f 3465/3835 3431/3840 3433/3841 +f 3470/3842 3466/3839 3465/3835 +f 3438/3843 3432/3809 3435/3810 +f 3467/3837 3438/3843 3437/3811 +f 3468/3844 3437/3811 3441/3814 +f 3471/3845 3468/3844 3472/3846 +f 3472/3846 3441/3814 3473/3847 +f 3473/3847 3440/3815 2712/3848 +f 2712/3848 3119/3369 3128/3379 +f 3475/3849 3472/3846 3474/3850 +f 3474/3850 3473/3847 3476/3851 +f 3476/3851 2712/3848 2711/2888 +f 3477/3852 2711/2888 2400/2478 +f 3478/3853 3476/3851 3477/3852 +f 3479/3854 3474/3850 3478/3853 +f 3392/3775 3413/3836 3480/3855 +f 3466/3839 3480/3855 3413/3836 +f 3419/3797 3392/3775 3480/3855 +f 3390/3799 3480/3855 3391/3774 +f 3469/3838 3391/3774 3480/3855 +f 3389/3772 3471/3845 3475/3849 +f 3387/3770 3475/3849 3479/3854 +f 3385/3767 3479/3854 3481/3856 +f 3482/3857 2400/2478 2402/2477 +f 3483/3858 3477/3852 3482/3857 +f 3481/3856 3478/3853 3483/3858 +f 3484/3859 2402/2477 2707/2884 +f 3058/3279 2707/2884 2708/2887 +f 3061/3281 3484/3859 3058/3279 +f 3485/3860 3482/3857 3484/3859 +f 3382/3768 3481/3856 3486/3861 +f 3486/3861 3483/3858 3485/3860 +f 3426/3804 3382/3768 3487/3862 +f 3487/3862 3486/3861 1962/2034 +f 3486/3861 3061/3281 1962/2034 +f 1961/2036 3487/3862 1962/2034 +f 3057/3507 3426/3804 3487/3862 +f 3428/3805 3402/3626 3488/3863 +f 3427/3864 3403/3782 3428/3805 +f 3429/3806 3406/3783 3427/3864 +f 3430/3808 3408/3785 3429/3806 +f 3431/3840 3410/3787 3430/3808 +f 3470/3842 3433/3841 3432/3809 +f 3434/3816 3438/3843 3435/3810 +f 2723/2897 3439/3812 3434/3816 +f 2875/3066 3433/3841 3431/3840 +f 2736/2908 3435/3810 3432/3809 +f 2947/3152 3488/3863 3500/949 +f 3464/575 1360/2001 3497/1438 +f 1900/1742 1924/1781 1901/1741 +f 1736/1561 1718/1841 1750/1591 +f 1955/3488 1943/3865 1956/1911 +f 1671/1474 1735/1560 1751/1592 +f 1696/1506 1698/1516 1692/1505 +f 1800/1634 1864/950 1801/1633 +f 2063/1957 2056/1947 2060/2882 +f 1737/1568 1717/1559 1736/1561 +f 1751/1592 1697/1562 1695/1504 +f 943/2088 3899/1394 684/369 +f 3656/3605 3402/3626 3605/3513 +f 1577/1531 1764/1605 1703/1510 +f 3917/1606 1599/1525 3923/1511 +f 1945/3866 82/61 1944/1912 +f 68/49 7/2726 248/385 +f 147/97 1543/1189 148/96 +f 1109/544 1133/558 1102/542 +f 1503/2788 1907/1832 1908/1813 +f 1499/1010 1909/1814 1498/1223 +f 452/274 735/399 215/2820 +f 1824/1670 3835/3867 3845/2090 +f 1825/1671 2068/2718 1824/1670 +f 1463/3868 1022/532 1194/594 +f 841/686 1009/522 1011/523 +f 1568/1783 1955/3488 1534/1784 +f 1926/1767 1927/1773 1923/628 +f 1910/1831 1502/2763 1939/3869 +f 1939/3869 422/256 442/487 +f 309/1031 1940/1787 308/185 +f 1804/1638 1866/3870 1803/1708 +f 269/163 1954/1799 56/161 +f 2065/1969 2060/2882 2056/1947 +f 3812/3465 1758/3871 3835/3867 +f 280/167 552/1276 1940/1787 +f 1875/2146 1869/1714 1874/1715 +f 1739/1582 1734/1558 1737/1568 +f 1778/1616 1790/1678 1788/1622 +f 310/184 1915/1757 279/1030 +f 1909/1814 1906/1749 1954/1799 +f 1789/1621 1778/1616 1788/1622 +f 978/509 980/1401 979/733 +f 1584/1631 1731/1549 1583/1381 +f 425/1057 311/1436 312/178 +f 148/96 115/80 116/109 +f 2044/1720 3894/2017 2046/2133 +f 277/242 1939/3869 551/1277 +f 1943/3865 1945/3866 1944/1912 +f 1461/936 1463/3868 1464/947 +f 2253/2334 3056/3277 3055/3278 +f 2981/3197 3489/3872 958/999 +f 2209/2275 3489/3872 3055/3278 +f 2254/3196 3055/3278 3489/3872 +f 2998/3216 958/999 3489/3872 +f 2826/3010 2721/2896 2724/2898 +f 3490/3873 2721/2896 2817/3874 +f 2816/2997 3490/3873 2817/3874 +f 2334/3875 2296/2383 2335/2418 +f 2298/2388 2334/3875 2336/2419 +f 2301/2396 2336/2419 2338/2421 +f 2311/2398 2338/2421 2354/2437 +f 2312/2397 2354/2437 2355/2438 +f 2359/2443 2361/2448 2360/2444 +f 2335/2418 2359/2443 2360/2444 +f 2414/3876 2365/2447 2364/2446 +f 2413/2486 2361/2448 2365/2447 +f 2355/2438 2540/2631 2541/2632 +f 2541/2632 2539/2630 2542/2633 +f 2722/3365 2739/3877 2738/2910 +f 3114/3366 2738/2910 3113/3363 +f 3110/3359 3121/3372 3120/3371 +f 3108/3357 3120/3371 3113/3363 +f 3122/3373 3127/3377 3126/3376 +f 3112/3362 3126/3376 3121/3372 +f 3127/3377 1753/1803 3135/3389 +f 3139/3878 2529/2619 2542/2633 +f 3744/3396 3133/3386 3139/3878 +f 3490/3873 2740/2911 2739/3877 +f 3491/3879 2789/2968 3490/3873 +f 228/144 91/2877 88/65 +f 245/1786 243/340 242/440 +f 248/385 247/339 191/338 +f 2/2 296/1187 295/1840 +f 145/1011 300/2000 267/159 +f 327/1304 210/134 328/394 +f 329/332 331/1091 330/1095 +f 250/142 228/144 236/341 +f 239/1191 240/190 230/314 +f 373/2850 375/3578 374/2231 +f 178/2199 176/177 175/176 +f 1218/606 1228/623 1321/642 +f 3862/640 1295/667 3854/1793 +f 808/445 824/489 823/447 +f 3858/923 1013/550 1014/549 +f 3813/782 1411/761 1413/779 +f 2154/2149 2159/2156 2160/2162 +f 3845/2090 1758/3871 1757/991 +f 306/449 1922/3460 452/274 +f 2157/2150 1976/3511 1977/1836 +f 708/2182 2031/1913 1474/1914 +f 1450/2169 1227/617 1428/2168 +f 1672/1478 1605/1954 1624/1476 +f 1813/3880 1821/1667 1816/1651 +f 1717/1559 1715/1533 1718/1841 +f 176/177 402/1493 347/1266 +f 1710/1842 3916/1512 3915/1788 +f 1712/1553 1577/1531 1711/1528 +f 1705/1744 1711/1528 1704/1530 +f 971/501 979/733 970/503 +f 822/448 1094/552 1102/542 +f 2909/3881 2793/2973 2792/2970 +f 2810/3882 2795/2981 2793/2973 +f 2799/2979 2249/2327 2800/2978 +f 3662/3883 2910/3102 2799/2979 +f 2769/2988 2801/2982 2795/2981 +f 3177/3499 3178/3501 3179/3502 +f 3178/3501 3180/3765 3179/3502 +f 3181/3503 1393/1472 2800/2978 +f 3181/3503 1632/1702 3182/3504 +f 3182/3504 2244/2314 3183/3505 +f 3183/3505 2243/2313 3180/3765 +f 3492/3884 2790/2969 3491/3879 +f 2808/3885 3491/3879 2816/2997 +f 2239/3886 3572/3887 2240/3888 +f 1149/2264 3571/3889 3570/3890 +f 1150/1907 3567/3891 3571/3889 +f 2202/2277 3566/1336 2015/2263 +f 2921/3892 3565/1770 2923/3893 +f 3568/3894 2133/2178 2928/3296 +f 1777/1607 1759/3895 1792/2099 +f 1201/598 1183/588 1184/590 +f 3561/1769 2195/2251 2194/3896 +f 3553/1460 2770/3897 2768/2951 +f 3927/1519 1687/3508 1692/1505 +f 1516/1966 154/104 503/356 +f 3549/1590 2974/3312 2803/3412 +f 3550/3898 2779/2967 2778/2957 +f 2771/3326 3575/3899 3546/3900 +f 3070/3298 3546/3900 3558/3901 +f 3547/1640 2775/2990 2867/3057 +f 1376/714 1370/705 1372/698 +f 2767/3486 3556/3129 2206/2269 +f 2195/2251 3543/819 2767/3486 +f 2775/2990 3542/826 2776/2991 +f 3377/3754 3545/1579 2871/3762 +f 3804/2983 3539/3902 2806/3903 +f 2776/2991 3538/1577 2805/2992 +f 2240/3888 3535/3904 2920/3905 +f 2920/3905 3669/3906 3619/3907 +f 3548/2881 2786/2964 2779/2967 +f 1805/1673 1851/959 1804/1638 +f 3534/1048 2787/3056 2786/2964 +f 2951/3438 3540/3908 3531/3909 +f 2772/3325 3531/3909 3575/3899 +f 2973/3182 3530/3910 3536/3911 +f 2224/3382 3537/3912 3530/3910 +f 1957/2870 320/983 510/985 +f 306/449 304/1199 303/1201 +f 2770/3897 3528/1171 2913/3105 +f 2913/3105 3574/1170 2239/3886 +f 2923/3893 3527/2826 2922/3293 +f 308/185 431/262 47/191 +f 207/131 132/90 543/1800 +f 1879/1916 1881/1936 1880/2096 +f 3733/3490 3525/1203 2089/2145 +f 2475/2556 3558/3901 3524/3913 +f 2476/3290 3524/3913 3523/3914 +f 2223/2292 3523/3914 3537/3912 +f 3522/1159 2803/3412 2802/3408 +f 2037/3915 11/2191 1950/2024 +f 1959/1999 450/272 505/634 +f 2003/2065 3520/2127 2129/2176 +f 9/2190 334/328 540/327 +f 2009/2072 3518/3916 3567/3891 +f 2008/2071 3536/3911 3518/3916 +f 3877/1202 673/2296 3879/2063 +f 3515/1789 2778/2957 2784/2962 +f 2935/3917 3513/1695 2936/3136 +f 111/3492 3570/3890 3511/3493 +f 3510/3918 2935/3917 2938/3138 +f 683/2797 3885/1392 943/2088 +f 3508/1624 2810/3882 2909/3881 +f 736/2821 732/2883 729/1874 +f 1795/1629 1803/1708 1796/1637 +f 3505/987 2938/3138 2939/3140 +f 3507/958 2939/3140 2940/3142 +f 2950/3155 3504/3919 2949/3154 +f 2940/3142 3504/3919 3507/958 +f 3801/3156 3512/1173 2950/3155 +f 2868/3920 3502/992 2869/3349 +f 2869/3349 3522/1159 2802/3408 +f 1024/614 1023/593 1020/531 +f 3500/949 2948/3153 2947/3152 +f 551/1277 442/487 443/1275 +f 2972/3181 3568/3894 2928/3296 +f 2032/2097 3499/3921 2972/3181 +f 216/138 1924/1781 1923/628 +f 203/236 201/137 200/1213 +f 1828/1677 1847/1694 1837/2119 +f 1958/2101 449/273 450/272 +f 272/350 178/2199 175/176 +f 2871/3762 3495/1439 1361/1440 +f 3619/3907 3564/2811 2921/3892 +f 2801/2982 3643/3101 3662/3883 +f 2005/2213 3654/1775 2176/2212 +f 960/3494 899/2835 962/491 +f 3657/3655 3612/3514 3624/3664 +f 3780/3922 3646/3424 3151/3426 +f 3151/3426 3593/2650 2555/2648 +f 2193/2250 3623/2937 2194/3896 +f 3607/3409 3145/3411 3144/3923 +f 3603/3410 2761/2933 3145/3411 +f 3592/3350 3144/3923 3104/3352 +f 2926/3126 3634/3351 3103/3353 +f 3634/3351 3104/3352 3103/3353 +f 3755/3310 3746/3924 3753/2041 +f 566/1320 3758/1776 2005/2213 +f 3488/3863 1864/950 3500/949 +f 1602/1399 2079/2005 2078/1984 +f 2868/3920 3611/3125 2922/3293 +f 2922/3293 3610/3124 2923/3893 +f 2923/3893 3618/3116 2921/3892 +f 3670/3117 2918/3114 3614/3115 +f 3619/3907 3614/3115 2920/3905 +f 2921/3892 3670/3117 3619/3907 +f 2920/3905 3586/2308 2240/3888 +f 3613/3113 2915/3108 2918/3114 +f 2917/3121 3622/2947 3641/3111 +f 2916/3123 3641/3111 3642/3110 +f 2239/3886 3652/3106 2913/3105 +f 2911/3925 3652/3106 2757/2930 +f 3643/3101 2911/3925 2910/3102 +f 2797/2980 3662/3883 3591/2977 +f 2797/2980 3598/2927 2750/2975 +f 3622/2947 2763/2941 2765/2944 +f 3608/2929 2764/3347 2745/2918 +f 2194/3896 3648/2935 2221/2291 +f 2221/2291 3649/2936 2222/3311 +f 2756/2942 3609/2928 2755/2926 +f 2750/2975 3609/2928 2747/2920 +f 3669/3906 1817/2135 3827/2160 +f 3763/2780 1691/3926 1699/1518 +f 2515/2601 3590/2649 2548/2639 +f 2548/2639 3593/2650 2550/2641 +f 2513/2599 3632/2611 3616/2609 +f 2513/2599 3621/2610 2515/2601 +f 2509/2595 3620/2378 2289/2593 +f 2510/2604 3633/2592 2509/2595 +f 2289/2593 3639/2371 2283/2376 +f 2283/2376 3617/2370 2286/2373 +f 2240/3888 3587/2310 2239/3886 +f 3659/3609 3624/3664 3626/3591 +f 716/377 3887/2086 3888/2118 +f 727/375 3761/2110 1923/628 +f 2165/3515 2170/2107 2164/2214 +f 3660/807 3626/3591 3629/3640 +f 3743/2042 3578/1774 3757/2040 +f 2175/3580 769/419 770/1901 +f 2100/630 2095/2007 2099/2011 +f 3758/1776 3757/2040 3578/1774 +f 640/1452 3568/3927 1826/2773 +f 1653/1644 3553/1460 1651/1459 +f 1895/2270 3530/3928 746/1283 +f 746/1283 3537/3929 741/1282 +f 1653/1644 3528/1171 3576/3930 +f 3762/1609 1604/1398 1672/1478 +f 2093/1754 2073/1977 2078/1984 +f 959/517 995/1751 884/462 +f 962/491 919/527 986/1594 +f 1400/749 1398/747 1399/748 +f 1000/1750 1127/551 1004/1610 +f 956/3185 884/462 885/463 +f 1662/1674 3550/3898 1661/1447 +f 2034/1915 3546/3931 1654/1586 +f 3926/2955 3539/3902 3802/3932 +f 3535/3904 766/2136 1817/2135 +f 3572/3887 765/2091 766/2136 +f 334/328 3571/3933 335/326 +f 1868/1766 3540/1492 1656/1491 +f 1684/3934 1652/1508 1678/1015 +f 1682/1806 775/423 1807/1018 +f 1585/1001 3762/1609 1700/1002 +f 3763/2780 1672/1478 1673/1477 +f 2173/853 490/1457 491/424 +f 1658/825 3547/1640 1752/1593 +f 3556/3129 1650/2184 724/2057 +f 1571/813 2010/2801 1572/1541 +f 1587/1388 3671/2258 2080/2338 +f 2099/2011 2077/1982 2094/1752 +f 787/1780 1999/1869 786/1499 +f 2000/1868 786/1499 1999/1869 +f 3155/3935 3697/2678 3683/3448 +f 3713/3439 3683/3448 3789/3447 +f 2180/2218 3685/3936 3684/3434 +f 3496/1664 3684/3434 3705/3119 +f 1684/3934 3840/1019 1651/1459 +f 1760/2207 3495/1439 3545/1579 +f 2607/2705 3693/2686 2590/2684 +f 2581/2673 3689/2706 2607/2705 +f 2371/2458 3700/2585 3695/2687 +f 2371/2458 3693/2686 2372/2456 +f 2587/3314 3675/2680 2586/3313 +f 2586/3313 3697/2678 339/512 +f 2497/3172 3687/2586 2503/2692 +f 2502/2693 3687/2586 3700/2585 +f 2494/2576 3690/2578 2495/2577 +f 3690/2578 2472/2552 2495/2577 +f 3688/2555 2468/2549 2472/2552 +f 3701/2548 2469/3937 2468/2549 +f 726/397 3892/2049 630/395 +f 581/383 3518/3938 580/343 +f 10/2192 3570/3939 334/328 +f 1837/2119 3510/3918 1849/986 +f 741/1282 3523/3940 1300/638 +f 1299/636 3558/3941 2034/1915 +f 1851/959 3504/3919 1866/3870 +f 3527/2826 1820/1772 1819/2825 +f 580/343 3536/3942 1895/2270 +f 1758/3871 3522/1159 3502/992 +f 1693/1016 3508/1624 1806/861 +f 3504/3919 1867/1709 1866/3870 +f 1894/1733 3568/3927 3499/2880 +f 2126/2259 3813/782 3841/2114 +f 3906/2260 3841/2114 3912/2067 +f 2113/2039 3867/2045 2110/2038 +f 2179/2217 3789/3447 3685/3936 +f 624/360 3904/2062 623/361 +f 2192/2248 3788/3427 3780/3922 +f 2177/2216 3787/1204 3733/3490 +f 1364/2125 3833/681 1338/654 +f 3858/923 1470/3943 1453/906 +f 3135/3389 3775/3387 3744/3396 +f 2069/2055 3835/3867 2068/2718 +f 1351/879 3853/671 1352/878 +f 2108/3944 3901/1764 499/305 +f 2089/2145 3736/3442 3733/3490 +f 3828/684 1358/683 866/682 +f 2001/2064 3783/3485 2187/2244 +f 2187/2244 3719/3436 2091/3489 +f 2130/2276 3752/3482 2004/2066 +f 2004/2066 3749/3483 2001/2064 +f 2014/2074 3779/3477 2201/2262 +f 2201/2262 3756/3478 2130/2276 +f 3167/3470 3759/3177 2969/3176 +f 3168/3472 3720/3474 3167/3470 +f 3159/3454 3764/3456 3160/3457 +f 3160/3457 3765/3321 3081/3320 +f 3730/3455 3158/3945 337/3449 +f 3158/3945 3709/3453 3159/3454 +f 2200/2254 3730/3455 3722/3445 +f 3685/3936 3742/3417 2204/3435 +f 3722/3445 2197/2255 2200/2254 +f 3790/3443 2092/2144 3721/3437 +f 3774/3444 3721/3437 2197/2255 +f 3154/3446 3790/3443 3774/3444 +f 3155/3935 3706/3441 3154/3446 +f 2091/3489 3721/3437 2092/2144 +f 2196/2252 3750/3416 3776/3431 +f 2193/2250 3776/3431 3781/3430 +f 3604/3421 3781/3430 3788/3427 +f 2086/3946 3780/3922 3778/3425 +f 2087/2141 3778/3425 3777/3422 +f 2204/3435 3751/3415 2205/2268 +f 2205/2268 3750/3416 3148/3432 +f 3909/2181 574/391 692/379 +f 2124/2173 3777/3422 3760/3403 +f 2208/2272 3760/3403 3782/3383 +f 3711/3390 3140/3399 3137/3392 +f 3785/3398 3138/3395 3140/3399 +f 3745/3397 3139/3878 3138/3395 +f 3771/3394 3136/3393 2322/2406 +f 3712/3391 3137/3392 3136/3393 +f 3775/3387 3131/3385 3133/3386 +f 3767/3384 3132/3400 3131/3385 +f 1813/3880 3838/1653 1814/1665 +f 1293/661 3856/666 1344/660 +f 2184/2220 3748/2405 3732/3324 +f 2181/2221 3732/3324 3734/3317 +f 3765/3321 3079/3319 3081/3320 +f 3768/3318 3080/3323 3079/3319 +f 709/1338 3875/2129 2031/1913 +f 2774/2954 3737/3165 3769/3291 +f 3786/3292 2134/2179 3559/3947 +f 3809/413 766/2136 765/2091 +f 2968/3174 3766/3164 2959/3163 +f 2969/3176 3784/3179 2968/3174 +f 2958/3168 3737/3165 2961/3166 +f 2132/3433 3766/3164 2960/3180 +f 2321/3355 3771/3394 3748/2405 +f 3860/849 1449/2113 28/2112 +f 3905/2157 582/1461 710/1383 +f 3886/2165 577/3099 574/391 +f 3847/1659 1818/1771 3827/2160 +f 705/2159 3887/2086 2121/2089 +f 3816/3662 3907/2069 698/388 +f 2111/2124 3911/2043 2113/2039 +f 3810/781 1366/2126 1411/761 +f 1812/1654 3822/1652 1815/1661 +f 675/1662 3852/2094 674/3187 +f 3496/1664 3851/2093 675/1662 +f 2110/2038 3868/2033 2109/2031 +f 2104/2026 3901/1764 2106/1763 +f 2109/2031 3899/1394 944/1393 +f 3842/1427 703/390 739/482 +f 1680/1844 3846/1017 1684/3934 +f 1639/1435 3819/1411 3818/2121 +f 623/361 3906/2260 697/1233 +f 2139/2130 3866/2015 3865/2122 +f 1808/1645 3844/2092 3837/1643 +f 3820/1669 1819/2825 1820/1772 +f 3872/2087 3900/633 706/1340 +f 1646/1434 3830/1445 1648/1442 +f 3889/2164 633/2170 577/3099 +f 2139/2130 3875/2129 18/12 +f 2156/2152 3871/2166 3874/2153 +f 674/3187 3913/2104 3869/2134 +f 2117/887 3812/3465 2069/2055 +f 1815/1661 3847/1659 3823/1657 +f 3855/910 1444/846 1470/3943 +f 697/1233 3912/2067 3816/3662 +f 866/682 3836/603 3828/684 +f 3864/2180 2136/2131 2135/632 +f 3585/3654 3726/3650 3589/3653 +f 3583/3581 3731/3651 3585/3654 +f 3753/2041 3738/3656 3739/3666 +f 3740/3663 3581/3583 3743/2042 +f 1088/1379 3746/3924 1093/811 +f 3739/3666 3743/2042 3753/2041 +f 2791/2972 3493/3948 2792/2970 +f 3492/3884 2807/2984 3493/3948 +f 2792/2970 3494/3949 2909/3881 +f 3493/3948 2806/3903 3494/3949 +f 3508/1624 3494/3949 3516/3950 +f 3516/3950 2806/3903 3539/3902 +f 2948/3153 3791/1174 3801/3156 +f 2105/2046 3896/2029 2104/2026 +f 3538/1577 3804/2983 2805/2992 +f 680/1417 3876/1939 683/2797 +f 1891/2175 3910/1730 3891/2171 +f 2905/3096 3801/3156 2950/3155 +f 2900/3089 3799/3098 2905/3096 +f 2901/3092 3807/3091 2877/3093 +f 2734/2907 3806/3067 3794/3060 +f 2877/3093 3806/3067 2878/3065 +f 2820/3002 3794/3060 2872/3059 +f 2818/3054 3792/3012 2827/3011 +f 2823/3008 3792/3012 2826/3010 +f 2822/3017 3798/3001 2821/3003 +f 2819/3005 3800/3951 3798/3001 +f 2817/3874 3795/2998 2816/2997 +f 2809/2993 3795/2998 2815/3000 +f 2808/3885 3803/2985 2807/2984 +f 2805/2992 3803/2985 2804/2987 +f 1702/1513 3923/1511 1599/1525 +f 3922/1627 1797/1636 1802/1626 +f 3915/1788 1706/1514 1709/1000 +f 3918/1520 1692/1505 1698/1516 +f 1657/1917 3880/1728 1879/1916 +f 1890/1740 3895/1617 3910/1730 +f 3772/1648 1674/1483 1675/862 +f 1689/3952 3772/1648 3773/1625 +f 3926/2955 3538/1577 1688/3953 +f 3516/3950 1689/3952 3773/1625 +f 3917/1606 1703/1510 1764/1605 +f 1598/1396 3928/1602 1770/1595 +f 3880/1728 1876/1724 1879/1916 +f 1691/3926 3925/2956 3927/1519 +f 3925/2956 1688/3953 1687/3508 +f 1117/547 1118/546 3930/3954 +f 1066/641 1322/643 3930/3954 +f 1243/618 847/624 3930/3954 +f 3825/601 848/1432 3930/3954 +f 1128/1522 1117/547 3930/3954 +f 1129/560 1130/559 3930/3954 +f 841/686 1011/523 3930/3954 +f 1230/3955 3863/639 3930/3954 +f 1242/650 1243/618 3930/3954 +f 1322/643 1229/622 3930/3954 +f 1064/592 1065/2174 3930/3954 +f 1003/1611 1128/1522 3930/3954 +f 1229/622 1230/3955 3930/3954 +f 3863/639 1285/1968 3930/3954 +f 1285/1968 1288/651 3930/3954 +f 848/1432 839/3661 3930/3954 +f 847/624 3825/601 3930/3954 +f 1118/546 1129/560 3930/3954 +f 1065/2174 1066/641 3930/3954 +f 1087/569 1086/568 3930/3954 +f 1008/520 1002/519 3930/3954 +f 1011/523 1008/520 3930/3954 +f 1002/519 1003/1611 3930/3954 +f 1076/565 1069/567 3930/3954 +f 838/685 841/686 3930/3954 +f 839/3661 838/685 3930/3954 +f 1288/651 1242/650 3930/3954 +f 1130/559 1087/569 3930/3954 +f 1086/568 1076/565 3930/3954 +f 1069/567 1064/592 3930/3954 +f 2410/2487 2414/3876 3931/3956 +f 2411/2484 2410/2487 3931/3956 +f 3617/2370 2285/2544 3931/3956 +f 2394/2476 2393/2475 3931/3956 +f 2393/2475 2396/2504 3931/3956 +f 2281/2374 2286/2373 3931/3956 +f 2282/2455 2280/2597 3931/3956 +f 2398/2501 2399/3407 3931/3956 +f 2465/2542 2464/2567 3931/3956 +f 2396/2504 2397/2495 3931/3956 +f 2470/2550 2469/3937 3931/3956 +f 2399/3407 2416/2489 3931/3956 +f 2480/2561 2395/2560 3931/3956 +f 2459/2538 2480/2561 3931/3956 +f 2369/2452 2282/2455 3931/3956 +f 2460/3957 2459/2538 3931/3956 +f 2415/3958 2411/2484 3931/3956 +f 2416/2489 2415/3958 3931/3956 +f 2364/2446 2363/2450 3931/3956 +f 2286/2373 3617/2370 3931/3956 +f 2464/2567 2470/2550 3931/3956 +f 2469/3937 3703/2547 3931/3956 +f 2285/2544 2465/2542 3931/3956 +f 2414/3876 2364/2446 3931/3956 +f 2395/2560 2394/2476 3931/3956 +f 2363/2450 2367/2453 3931/3956 +f 2397/2495 2398/2501 3931/3956 +f 2280/2597 2281/2374 3931/3956 +f 3703/2547 2460/3957 3931/3956 +f 2367/2453 2369/2452 3931/3956 +f 3934/3959 3937/3960 3938/3961 +f 3934/3959 3939/3962 3935/3963 +f 3932/3964 3937/3960 3933/3965 +f 3938/3961 3943/3966 3939/3962 +f 3937/3960 3940/3967 3941/3968 +f 3937/3960 3942/3969 3938/3961 +f 3942/3969 3947/3970 3943/3966 +f 3941/3968 3944/3971 3945/3972 +f 3942/3969 3945/3972 3946/3973 +f 3947/3970 3950/3974 3951/3975 +f 3944/3971 3949/3976 3945/3972 +f 3945/3972 3950/3974 3946/3973 +f 3950/3974 3955/3977 3951/3975 +f 3949/3976 3952/3978 3953/3979 +f 3950/3974 3953/3979 3954/3980 +f 3955/3977 3958/3981 3959/3982 +f 3953/3979 3956/3983 3957/3984 +f 3953/3979 3958/3981 3954/3980 +f 3959/3982 3962/3985 3963/3986 +f 3957/3984 3960/3987 3961/3988 +f 3958/3981 3961/3988 3962/3985 +f 3962/3985 3967/3989 3963/3986 +f 3960/3987 3965/3990 3961/3988 +f 3961/3988 3966/3991 3962/3985 +f 3966/3991 3971/3992 3967/3989 +f 3965/3990 3968/3993 3969/3994 +f 3966/3991 3969/3994 3970/3995 +f 3970/3995 3975/3996 3971/3992 +f 3968/3993 3973/3997 3969/3994 +f 3969/3994 3974/3998 3970/3995 +f 3974/3998 3979/3999 3975/3996 +f 3973/3997 3976/4000 3977/4001 +f 3974/3998 3977/4001 3978/4002 +f 3979/3999 3982/4003 3983/4004 +f 3976/4000 3981/4005 3977/4001 +f 3977/4001 3982/4003 3978/4002 +f 3982/4003 3987/4006 3983/4004 +f 3981/4005 3984/4007 3985/4008 +f 3982/4003 3985/4008 3986/4009 +f 3987/4006 3990/4010 3991/4011 +f 3984/4007 3989/4012 3985/4008 +f 3985/4008 3990/4010 3986/4009 +f 3991/4011 3994/4013 3995/4014 +f 3989/4012 3992/4015 3993/4016 +f 3990/4010 3993/4016 3994/4013 +f 3995/4014 3998/4017 3999/4018 +f 3992/4015 3997/4019 3993/4016 +f 3993/4016 3998/4017 3994/4013 +f 3998/4017 4003/4020 3999/4018 +f 3997/4019 4000/4021 4001/4022 +f 3998/4017 4001/4022 4002/4023 +f 4002/4023 4007/4024 4003/4020 +f 4000/4021 4005/4025 4001/4022 +f 4001/4022 4006/4026 4002/4023 +f 4006/4026 4011/4027 4007/4024 +f 4005/4025 4008/4028 4009/4029 +f 4006/4026 4009/4029 4010/4030 +f 4011/4027 4014/4031 4015/4032 +f 4008/4028 4013/4033 4009/4029 +f 4009/4029 4014/4031 4010/4030 +f 4014/4031 4019/4034 4015/4032 +f 4013/4033 4016/4035 4017/4036 +f 4014/4031 4017/4036 4018/4037 +f 4019/4034 4022/4038 4023/4039 +f 4016/4035 4021/4040 4017/4036 +f 4017/4036 4022/4038 4018/4037 +f 4023/4039 4026/4041 4027/4042 +f 4021/4040 4024/4043 4025/4044 +f 4022/4038 4025/4044 4026/4041 +f 4027/4042 4030/4045 4031/4046 +f 4024/4043 4029/4047 4025/4044 +f 4025/4044 4030/4045 4026/4041 +f 4031/4046 4034/4048 4035/4049 +f 4029/4047 4032/4050 4033/4051 +f 4030/4045 4033/4051 4034/4048 +f 4034/4048 4039/4052 4035/4049 +f 4032/4050 4037/4053 4033/4051 +f 4033/4051 4038/4054 4034/4048 +f 4038/4054 4043/4055 4039/4052 +f 4037/4053 4040/4056 4041/4057 +f 4038/4054 4041/4057 4042/4058 +f 4043/4055 4046/4059 4047/4060 +f 4040/4056 4045/4061 4041/4057 +f 4041/4057 4046/4059 4042/4058 +f 4046/4059 4051/4062 4047/4060 +f 4045/4061 4048/4063 4049/4064 +f 4046/4059 4049/4064 4050/4065 +f 4051/4062 4055/4066 4056/4067 +f 4048/4063 4054/4068 4049/4064 +f 4049/4064 4055/4066 4050/4065 +f 4056/4067 4059/4069 4060/4070 +f 4053/4071 4058/4072 4054/4068 +f 4055/4066 4058/4072 4059/4069 +f 3932/3964 4052/4073 3936/4074 +f 3936/4074 4052/4073 3940/3967 +f 3940/3967 4052/4073 3944/3971 +f 3944/3971 4052/4073 3948/4075 +f 3948/4075 4052/4073 3952/3978 +f 3952/3978 4052/4073 3956/3983 +f 3956/3983 4052/4073 3960/3987 +f 3960/3987 4052/4073 3964/4076 +f 3964/4076 4052/4073 3968/3993 +f 3968/3993 4052/4073 3972/4077 +f 3972/4077 4052/4073 3976/4000 +f 3976/4000 4052/4073 3980/4078 +f 3980/4078 4052/4073 3984/4007 +f 3984/4007 4052/4073 3988/4079 +f 3988/4079 4052/4073 3992/4015 +f 3992/4015 4052/4073 3996/4080 +f 3996/4080 4052/4073 4000/4021 +f 4000/4021 4052/4073 4004/4081 +f 4004/4081 4052/4073 4008/4028 +f 4008/4028 4052/4073 4012/4082 +f 4012/4082 4052/4073 4016/4035 +f 4016/4035 4052/4073 4020/4083 +f 4020/4083 4052/4073 4024/4043 +f 4024/4043 4052/4073 4028/4084 +f 4028/4084 4052/4073 4032/4050 +f 4032/4050 4052/4073 4036/4085 +f 4036/4085 4052/4073 4040/4056 +f 4040/4056 4052/4073 4044/4086 +f 4044/4086 4052/4073 4048/4063 +f 4048/4063 4052/4073 4053/4071 +f 4053/4071 4052/4073 4057/4087 +f 4060/4070 3934/3959 3935/3963 +f 4058/4072 3932/3964 3933/3965 +f 4058/4072 3934/3959 4059/4069 +f 4057/4087 4052/4073 3932/3964 +f 4069/4088 4081/4089 4082/4090 +f 4066/4091 4080/4092 4067/4093 +f 4065/4094 4077/4095 4078/4096 +f 4063/4097 4075/4098 4076/4099 +f 4071/4100 4085/4101 4072/4102 +f 4069/4088 4083/4103 4070/4104 +f 4067/4093 4081/4089 4068/4105 +f 4066/4106 4078/4096 4079/4107 +f 4063/4097 4077/4095 4064/4108 +f 4061/4109 4075/4098 4062/4110 +f 4072/4102 4086/4111 4073/4112 +f 4071/4100 4083/4103 4084/4113 +f 4076/4099 4090/4114 4077/4095 +f 4074/4115 4088/4116 4075/4098 +f 4086/4111 4098/4117 4099/4118 +f 4083/4103 4097/4119 4084/4113 +f 4082/4090 4094/4120 4095/4121 +f 4079/4122 4093/4123 4080/4092 +f 4078/4096 4090/4114 4091/4124 +f 4076/4099 4088/4116 4089/4125 +f 4085/4101 4097/4119 4098/4117 +f 4082/4090 4096/4126 4083/4103 +f 4080/4092 4094/4120 4081/4089 +f 4079/4107 4091/4124 4092/4127 +f 4089/4125 4103/4128 4090/4114 +f 4088/4116 4100/4129 4101/4130 +f 4098/4117 4112/4131 4099/4118 +f 4096/4126 4110/4132 4097/4119 +f 4095/4121 4107/4133 4108/4134 +f 4092/4135 4106/4136 4093/4123 +f 4090/4114 4104/4137 4091/4124 +f 4089/4125 4101/4130 4102/4138 +f 4097/4119 4111/4139 4098/4117 +f 4095/4121 4109/4140 4096/4126 +f 4094/4120 4106/4136 4107/4133 +f 4092/4127 4104/4137 4105/4141 +f 4103/4128 4115/4142 4116/4143 +f 4100/4129 4114/4144 4101/4130 +f 4112/4131 4124/4145 4125/4146 +f 4110/4132 4122/4147 4123/4148 +f 4107/4133 4121/4149 4108/4134 +f 4105/4150 4119/4151 4106/4136 +f 4103/4128 4117/4152 4104/4137 +f 4101/4130 4115/4142 4102/4138 +f 4110/4132 4124/4145 4111/4139 +f 4109/4140 4121/4149 4122/4147 +f 4107/4133 4119/4151 4120/4153 +f 4105/4141 4117/4152 4118/4154 +f 4115/4142 4129/4155 4116/4143 +f 4114/4144 4126/4156 4127/4157 +f 4124/4145 4138/4158 4125/4146 +f 4123/4148 4135/4159 4136/4160 +f 4120/4153 4134/4161 4121/4149 +f 4119/4151 4131/4162 4132/4163 +f 4117/4152 4129/4155 4130/4164 +f 4114/4144 4128/4165 4115/4142 +f 4124/4145 4136/4160 4137/4166 +f 4122/4147 4134/4161 4135/4159 +f 4119/4151 4133/4167 4120/4153 +f 4117/4152 4131/4168 4118/4154 +f 4129/4155 4141/4169 4142/4170 +f 4126/4156 4140/4171 4127/4157 +f 4138/4158 4150/4172 4151/4173 +f 4136/4160 4148/4174 4149/4175 +f 4133/4167 4147/4176 4134/4161 +f 4131/4162 4145/4177 4132/4163 +f 4130/4164 4142/4170 4143/4178 +f 4127/4157 4141/4169 4128/4165 +f 4137/4166 4149/4175 4150/4172 +f 4135/4159 4147/4176 4148/4174 +f 4132/4163 4146/4179 4133/4167 +f 4131/4168 4143/4178 4144/4180 +f 4142/4170 4154/4181 4155/4182 +f 4140/4171 4152/4183 4153/4184 +f 4150/4172 4164/4185 4151/4173 +f 4149/4175 4161/4186 4162/4187 +f 4146/4179 4160/4188 4147/4176 +f 4144/4189 4158/4190 4145/4177 +f 4142/4170 4156/4191 4143/4178 +f 4140/4171 4154/4181 4141/4169 +f 4149/4175 4163/4192 4150/4172 +f 4148/4174 4160/4188 4161/4186 +f 4146/4179 4158/4190 4159/4193 +f 4144/4180 4156/4191 4157/4194 +f 4155/4182 4167/4195 4168/4196 +f 4153/4184 4165/4197 4166/4198 +f 4163/4192 4177/4199 4164/4185 +f 4161/4186 4175/4200 4162/4187 +f 4159/4193 4173/4201 4160/4188 +f 4158/4190 4170/4202 4171/4203 +f 4155/4182 4169/4204 4156/4191 +f 4153/4184 4167/4195 4154/4181 +f 4163/4192 4175/4200 4176/4205 +f 4161/4186 4173/4201 4174/4206 +f 4159/4193 4171/4203 4172/4207 +f 4156/4191 4170/4208 4157/4194 +f 4167/4195 4181/4209 4168/4196 +f 4165/4197 4179/4210 4166/4198 +f 4176/4205 4190/4211 4177/4199 +f 4175/4200 4187/4212 4188/4213 +f 4173/4201 4185/4214 4186/4215 +f 4170/4202 4184/4216 4171/4203 +f 4169/4204 4181/4209 4182/4217 +f 4167/4195 4179/4210 4180/4218 +f 4175/4200 4189/4219 4176/4205 +f 4173/4201 4187/4212 4174/4206 +f 4171/4203 4185/4214 4172/4207 +f 4170/4208 4182/4217 4183/4220 +f 4180/4218 4194/4221 4181/4209 +f 4178/4222 4192/4223 4179/4210 +f 4190/4211 4202/4224 4203/4225 +f 4187/4212 4201/4226 4188/4213 +f 4186/4215 4198/4227 4199/4228 +f 4183/4229 4197/4230 4184/4216 +f 4182/4217 4194/4221 4195/4231 +f 4180/4218 4192/4223 4193/4232 +f 4189/4219 4201/4226 4202/4224 +f 4186/4215 4200/4233 4187/4212 +f 4184/4216 4198/4227 4185/4214 +f 4183/4220 4195/4231 4196/4234 +f 4193/4232 4207/4235 4194/4221 +f 4192/4223 4204/4236 4205/4237 +f 4202/4224 4216/4238 4203/4225 +f 4200/4233 4214/4239 4201/4226 +f 4199/4228 4211/4240 4212/4241 +f 4196/4242 4210/4243 4197/4230 +f 4194/4221 4208/4244 4195/4231 +f 4193/4232 4205/4237 4206/4245 +f 4201/4226 4215/4246 4202/4224 +f 4199/4228 4213/4247 4200/4233 +f 4198/4227 4210/4243 4211/4240 +f 4196/4234 4208/4244 4209/4248 +f 4207/4235 4219/4249 4220/4250 +f 4204/4236 4218/4251 4205/4237 +f 4216/4238 4228/4252 4229/4253 +f 4213/4247 4227/4254 4214/4239 +f 4211/4240 4225/4255 4212/4241 +f 4209/4256 4223/4257 4210/4243 +f 4207/4235 4221/4258 4208/4244 +f 4205/4237 4219/4249 4206/4245 +f 4214/4239 4228/4252 4215/4246 +f 4213/4247 4225/4255 4226/4259 +f 4211/4240 4223/4257 4224/4260 +f 4209/4248 4221/4258 4222/4261 +f 4219/4249 4233/4262 4220/4250 +f 4218/4251 4230/4263 4231/4264 +f 4228/4252 4242/4265 4229/4253 +f 4226/4259 4240/4266 4227/4254 +f 4225/4255 4237/4267 4238/4268 +f 4223/4257 4235/4269 4236/4270 +f 4221/4258 4233/4262 4234/4271 +f 4218/4251 4232/4272 4219/4249 +f 4228/4252 4240/4266 4241/4273 +f 4226/4259 4238/4268 4239/4274 +f 4223/4257 4237/4267 4224/4260 +f 4221/4258 4235/4275 4222/4261 +f 4233/4262 4245/4276 4246/4277 +f 4230/4263 4244/4278 4231/4264 +f 4242/4265 4254/4279 4255/4280 +f 4240/4266 4252/4281 4253/4282 +f 4237/4267 4251/4283 4238/4268 +f 4235/4269 4249/4284 4236/4270 +f 4234/4271 4246/4277 4247/4285 +f 4231/4264 4245/4276 4232/4272 +f 4241/4273 4253/4282 4254/4279 +f 4239/4274 4251/4283 4252/4281 +f 4236/4270 4250/4286 4237/4267 +f 4235/4275 4247/4285 4248/4287 +f 4246/4277 4258/4288 4259/4289 +f 4244/4278 4256/4290 4257/4291 +f 4254/4279 4268/4292 4255/4280 +f 4253/4282 4265/4293 4266/4294 +f 4250/4286 4264/4295 4251/4283 +f 4248/4296 4262/4297 4249/4284 +f 4246/4277 4260/4298 4247/4285 +f 4244/4278 4258/4288 4245/4276 +f 4253/4282 4267/4299 4254/4279 +f 4252/4281 4264/4295 4265/4293 +f 4250/4286 4262/4297 4263/4300 +f 4248/4287 4260/4298 4261/4301 +f 4259/4289 4271/4302 4272/4303 +f 4257/4291 4269/4304 4270/4305 +f 4267/4299 4281/4306 4268/4292 +f 4265/4293 4279/4307 4266/4294 +f 4263/4300 4277/4308 4264/4295 +f 4262/4297 4274/4309 4275/4310 +f 4259/4289 4273/4311 4260/4298 +f 4257/4291 4271/4302 4258/4288 +f 4266/4294 4280/4312 4267/4299 +f 4265/4293 4277/4308 4278/4313 +f 4263/4300 4275/4310 4276/4314 +f 4260/4298 4274/4315 4261/4301 +f 4271/4302 4285/4316 4272/4303 +f 4269/4304 4283/4317 4270/4305 +f 4281/4306 4293/4318 4294/4319 +f 4279/4307 4291/4320 4292/4321 +f 4277/4308 4289/4322 4290/4323 +f 4274/4309 4288/4324 4275/4310 +f 4273/4311 4285/4316 4286/4325 +f 4271/4302 4283/4317 4284/4326 +f 4280/4312 4292/4321 4293/4318 +f 4277/4308 4291/4320 4278/4313 +f 4275/4310 4289/4322 4276/4314 +f 4274/4315 4286/4325 4287/4327 +f 4284/4326 4298/4328 4285/4316 +f 4282/4329 4296/4330 4283/4317 +f 4294/4319 4306/4331 4307/4332 +f 4291/4320 4305/4333 4292/4321 +f 4290/4323 4302/4334 4303/4335 +f 4287/4336 4301/4337 4288/4324 +f 4286/4325 4298/4328 4299/4338 +f 4284/4326 4296/4330 4297/4339 +f 4293/4318 4305/4333 4306/4331 +f 4290/4323 4304/4340 4291/4320 +f 4288/4324 4302/4334 4289/4322 +f 4287/4327 4299/4338 4300/4341 +f 4297/4339 4311/4342 4298/4328 +f 4296/4330 4308/4343 4309/4344 +f 4306/4331 4320/4345 4307/4332 +f 4304/4340 4318/4346 4305/4333 +f 4303/4335 4315/4347 4316/4348 +f 4300/4349 4314/4350 4301/4337 +f 4298/4328 4312/4351 4299/4338 +f 4297/4339 4309/4344 4310/4352 +f 4305/4333 4319/4353 4306/4331 +f 4304/4340 4316/4348 4317/4354 +f 4302/4334 4314/4350 4315/4347 +f 4300/4341 4312/4351 4313/4355 +f 4310/4352 4324/4356 4311/4342 +f 4308/4343 4322/4357 4309/4344 +f 4320/4345 4332/4358 4333/4359 +f 4318/4346 4330/4360 4331/4361 +f 4316/4348 4328/4362 4329/4363 +f 4313/4364 4327/4365 4314/4350 +f 4311/4342 4325/4366 4312/4351 +f 4310/4352 4322/4357 4323/4367 +f 4318/4346 4332/4358 4319/4353 +f 4316/4348 4330/4360 4317/4354 +f 4315/4347 4327/4365 4328/4362 +f 4312/4351 4326/4368 4313/4355 +f 4323/4367 4337/4369 4324/4356 +f 4322/4357 4334/4370 4335/4371 +f 4332/4358 4346/4372 4333/4359 +f 4330/4360 4344/4373 4331/4361 +f 4329/4363 4341/4374 4342/4375 +f 4327/4365 4339/4376 4340/4377 +f 4325/4366 4337/4369 4338/4378 +f 4322/4357 4336/4379 4323/4367 +f 4332/4358 4344/4373 4345/4380 +f 4330/4360 4342/4375 4343/4381 +f 4327/4365 4341/4374 4328/4362 +f 4325/4366 4339/4382 4326/4368 +f 4337/4369 4350/4383 4351/4384 +f 4334/4370 4349/4385 4335/4371 +f 4346/4372 4359/4386 4360/4387 +f 4344/4373 4357/4388 4358/4389 +f 4341/4374 4356/4390 4342/4375 +f 4340/4377 4353/4391 4354/4392 +f 4338/4378 4351/4384 4352/4393 +f 4335/4371 4350/4383 4336/4379 +f 4345/4380 4358/4389 4359/4386 +f 4343/4381 4356/4390 4357/4388 +f 4340/4377 4355/4394 4341/4374 +f 4338/4378 4353/4395 4339/4382 +f 4351/4384 4363/4396 4364/4397 +f 4349/4385 4361/4398 4362/4399 +f 4359/4386 4373/4400 4360/4387 +f 4358/4389 4370/4401 4371/4402 +f 4355/4394 4369/4403 4356/4390 +f 4354/4392 4366/4404 4367/4405 +f 4351/4384 4365/4406 4352/4393 +f 4349/4385 4363/4396 4350/4383 +f 4358/4389 4372/4407 4359/4386 +f 4357/4388 4369/4403 4370/4401 +f 4355/4394 4367/4405 4368/4408 +f 4352/4393 4366/4409 4353/4395 +f 4364/4397 4376/4410 4377/4411 +f 4362/4399 4374/4412 4375/4413 +f 4373/4400 4385/4414 4386/4415 +f 4370/4401 4384/4416 4371/4402 +f 4368/4408 4382/4417 4369/4403 +f 4367/4405 4379/4418 4380/4419 +f 4364/4397 4378/4420 4365/4406 +f 4362/4399 4376/4410 4363/4396 +f 4372/4407 4384/4416 4385/4414 +f 4370/4401 4382/4417 4383/4421 +f 4368/4408 4380/4419 4381/4422 +f 4365/4406 4379/4423 4366/4409 +f 4376/4410 4390/4424 4377/4411 +f 4374/4412 4388/4425 4375/4413 +f 4385/4414 4399/4426 4386/4415 +f 4384/4416 4396/4427 4397/4428 +f 4382/4417 4394/4429 4395/4430 +f 4379/4418 4393/4431 4380/4419 +f 4378/4420 4390/4424 4391/4432 +f 4376/4410 4388/4425 4389/4433 +f 4384/4416 4398/4434 4385/4414 +f 4382/4417 4396/4427 4383/4421 +f 4380/4419 4394/4429 4381/4422 +f 4379/4423 4391/4432 4392/4435 +f 4389/4433 4403/4436 4390/4424 +f 4387/4437 4401/4438 4388/4425 +f 4399/4426 4411/4439 4412/4440 +f 4396/4427 4410/4441 4397/4428 +f 4395/4430 4407/4442 4408/4443 +f 4393/4431 4405/4444 4406/4445 +f 4391/4432 4403/4436 4404/4446 +f 4389/4433 4401/4438 4402/4447 +f 4398/4434 4410/4441 4411/4439 +f 4395/4430 4409/4448 4396/4427 +f 4393/4431 4407/4442 4394/4429 +f 4391/4432 4405/4449 4392/4435 +f 4402/4447 4416/4450 4403/4436 +f 4401/4438 4413/4451 4414/4452 +f 4411/4439 4425/4453 4412/4440 +f 4409/4448 4423/4454 4410/4441 +f 4408/4443 4420/4455 4421/4456 +f 4406/4445 4418/4457 4419/4458 +f 4403/4436 4417/4459 4404/4446 +f 4402/4447 4414/4452 4415/4460 +f 4410/4441 4424/4461 4411/4439 +f 4408/4443 4422/4462 4409/4448 +f 4407/4442 4419/4458 4420/4455 +f 4404/4446 4418/4463 4405/4449 +f 4416/4450 4428/4464 4429/4465 +f 4413/4451 4427/4466 4414/4452 +f 4425/4453 4437/4467 4438/4468 +f 4423/4454 4435/4469 4436/4470 +f 4421/4456 4433/4471 4434/4472 +f 4418/4457 4432/4473 4419/4458 +f 4416/4450 4430/4474 4417/4459 +f 4415/4460 4427/4466 4428/4464 +f 4423/4454 4437/4467 4424/4461 +f 4421/4456 4435/4469 4422/4462 +f 4420/4455 4432/4473 4433/4471 +f 4418/4463 4430/4474 4431/4475 +f 4428/4464 4442/4476 4429/4465 +f 4426/4477 4440/4478 4427/4466 +f 4437/4467 4451/4479 4438/4468 +f 4435/4469 4449/4480 4436/4470 +f 4434/4472 4446/4481 4447/4482 +f 4432/4473 4444/4483 4445/4484 +f 4430/4474 4442/4476 4443/4485 +f 4427/4466 4441/4486 4428/4464 +f 4437/4467 4449/4480 4450/4487 +f 4435/4469 4447/4482 4448/4488 +f 4432/4473 4446/4481 4433/4471 +f 4430/4474 4444/4489 4431/4475 +f 4442/4476 4454/4490 4455/4491 +f 4439/4492 4453/4493 4440/4478 +f 4451/4479 4463/4494 4464/4495 +f 4449/4480 4461/4496 4462/4497 +f 4446/4481 4460/4498 4447/4482 +f 4444/4483 4458/4499 4445/4484 +f 4443/4485 4455/4491 4456/4500 +f 4440/4478 4454/4490 4441/4486 +f 4450/4487 4462/4497 4463/4494 +f 4447/4482 4461/4496 4448/4488 +f 4445/4484 4459/4501 4446/4481 +f 4444/4489 4456/4500 4457/4502 +f 4455/4491 4467/4503 4468/4504 +f 4453/4493 4465/4505 4466/4506 +f 4463/4494 4477/4507 4464/4495 +f 4462/4497 4474/4508 4475/4509 +f 4459/4501 4473/4510 4460/4498 +f 4457/4511 4471/4512 4458/4499 +f 4455/4491 4469/4513 4456/4500 +f 4453/4493 4467/4503 4454/4490 +f 4462/4497 4476/4514 4463/4494 +f 4461/4496 4473/4510 4474/4508 +f 4459/4501 4471/4512 4472/4515 +f 4457/4502 4469/4513 4470/4516 +f 4347/4517 4073/4112 4086/4111 +f 4347/4517 4086/4111 4099/4118 +f 4347/4517 4099/4118 4112/4131 +f 4347/4517 4112/4131 4125/4146 +f 4347/4517 4125/4146 4138/4158 +f 4347/4517 4138/4158 4151/4173 +f 4347/4517 4151/4173 4164/4185 +f 4347/4517 4164/4185 4177/4199 +f 4347/4517 4177/4199 4190/4211 +f 4347/4517 4190/4211 4203/4225 +f 4347/4517 4203/4225 4216/4238 +f 4347/4517 4216/4238 4229/4253 +f 4347/4517 4229/4253 4242/4265 +f 4347/4517 4242/4265 4255/4280 +f 4347/4517 4255/4280 4268/4292 +f 4347/4517 4268/4292 4281/4306 +f 4347/4517 4281/4306 4294/4319 +f 4347/4517 4294/4319 4307/4332 +f 4347/4517 4307/4332 4320/4345 +f 4347/4517 4320/4345 4333/4359 +f 4347/4517 4333/4359 4346/4372 +f 4347/4517 4346/4372 4360/4387 +f 4347/4517 4360/4387 4373/4400 +f 4347/4517 4373/4400 4386/4415 +f 4347/4517 4386/4415 4399/4426 +f 4347/4517 4399/4426 4412/4440 +f 4347/4517 4412/4440 4425/4453 +f 4347/4517 4425/4453 4438/4468 +f 4347/4517 4438/4468 4451/4479 +f 4347/4517 4451/4479 4464/4495 +f 4347/4517 4464/4495 4477/4507 +f 4468/4504 4063/4097 4064/4108 +f 4466/4506 4061/4109 4062/4110 +f 4477/4507 4072/4102 4073/4112 +f 4474/4508 4071/4100 4475/4509 +f 4472/4515 4069/4088 4473/4510 +f 4471/4512 4066/4091 4067/4093 +f 4468/4504 4065/4094 4469/4513 +f 4466/4506 4063/4097 4467/4503 +f 4347/4517 4477/4507 4073/4112 +f 4476/4514 4071/4100 4072/4102 +f 4474/4508 4069/4088 4070/4104 +f 4472/4515 4067/4093 4068/4105 +f 4469/4513 4066/4106 4470/4516 +f 3935/3963 4465/4505 4060/4070 +f 4465/4505 4056/4067 4060/4070 +f 4452/4518 4051/4062 4056/4067 +f 4439/4492 4047/4060 4051/4062 +f 4047/4060 4413/4451 4043/4055 +f 4043/4055 4400/4519 4039/4052 +f 4039/4052 4387/4437 4035/4049 +f 4387/4437 4031/4046 4035/4049 +f 4031/4046 4361/4398 4027/4042 +f 4361/4398 4023/4039 4027/4042 +f 4348/4520 4019/4034 4023/4039 +f 4334/4370 4015/4032 4019/4034 +f 4015/4032 4308/4343 4011/4027 +f 4011/4027 4295/4521 4007/4024 +f 4007/4024 4282/4329 4003/4020 +f 4282/4329 3999/4018 4003/4020 +f 3999/4018 4256/4290 3995/4014 +f 4256/4290 3991/4011 3995/4014 +f 4243/4522 3987/4006 3991/4011 +f 4230/4263 3983/4004 3987/4006 +f 3983/4004 4204/4236 3979/3999 +f 3979/3999 4191/4523 3975/3996 +f 3975/3996 4178/4222 3971/3992 +f 4178/4222 3967/3989 3971/3992 +f 3967/3989 4152/4183 3963/3986 +f 4152/4183 3959/3982 3963/3986 +f 4139/4524 3955/3977 3959/3982 +f 4126/4156 3951/3975 3955/3977 +f 3951/3975 4100/4129 3947/3970 +f 3947/3970 4087/4525 3943/3966 +f 3943/3966 4074/4115 3939/3962 +f 4074/4115 3935/3963 3939/3962 +f 4480/4526 4483/4527 4484/4528 +f 4480/4526 4485/4529 4481/4530 +f 4479/4531 4482/4532 4483/4527 +f 4484/4528 4489/4533 4485/4529 +f 4482/4532 4487/4534 4483/4527 +f 4483/4527 4488/4535 4484/4528 +f 4488/4535 4493/4536 4489/4533 +f 4487/4534 4490/4537 4491/4538 +f 4488/4535 4491/4538 4492/4539 +f 4493/4536 4496/4540 4497/4541 +f 4490/4537 4495/4542 4491/4538 +f 4491/4538 4496/4540 4492/4539 +f 4496/4540 4501/4543 4497/4541 +f 4495/4542 4498/4544 4499/4545 +f 4496/4540 4499/4545 4500/4546 +f 4501/4543 4504/4547 4505/4548 +f 4498/4544 4503/4549 4499/4545 +f 4499/4545 4504/4547 4500/4546 +f 4505/4548 4508/4550 4509/4551 +f 4503/4549 4506/4552 4507/4553 +f 4504/4547 4507/4553 4508/4550 +f 4509/4551 4512/4554 4513/4555 +f 4506/4552 4511/4556 4507/4553 +f 4507/4553 4512/4554 4508/4550 +f 4513/4555 4516/4557 4517/4558 +f 4511/4556 4514/4559 4515/4560 +f 4512/4554 4515/4560 4516/4557 +f 4516/4557 4521/4561 4517/4558 +f 4514/4559 4519/4562 4515/4560 +f 4515/4560 4520/4563 4516/4557 +f 4520/4563 4525/4564 4521/4561 +f 4518/4565 4523/4566 4519/4562 +f 4520/4563 4523/4566 4524/4567 +f 4525/4564 4528/4568 4529/4569 +f 4522/4570 4527/4571 4523/4566 +f 4523/4566 4528/4568 4524/4567 +f 4528/4568 4533/4572 4529/4569 +f 4527/4571 4530/4573 4531/4574 +f 4528/4568 4531/4574 4532/4575 +f 4533/4572 4536/4576 4537/4577 +f 4530/4573 4535/4578 4531/4574 +f 4531/4574 4536/4576 4532/4575 +f 4537/4577 4540/4579 4541/4580 +f 4534/4581 4539/4582 4535/4578 +f 4536/4576 4539/4582 4540/4579 +f 4541/4580 4544/4583 4545/4584 +f 4539/4582 4542/4585 4543/4586 +f 4539/4582 4544/4583 4540/4579 +f 4544/4583 4549/4587 4545/4584 +f 4542/4585 4547/4588 4543/4586 +f 4544/4583 4547/4588 4548/4589 +f 4548/4589 4553/4590 4549/4587 +f 4547/4588 4550/4591 4551/4592 +f 4547/4588 4552/4593 4548/4589 +f 4552/4593 4557/4594 4553/4590 +f 4551/4592 4554/4595 4555/4596 +f 4552/4593 4555/4596 4556/4597 +f 4557/4594 4560/4598 4561/4599 +f 4554/4595 4559/4600 4555/4596 +f 4555/4596 4560/4598 4556/4597 +f 4560/4598 4565/4601 4561/4599 +f 4559/4600 4562/4602 4563/4603 +f 4560/4598 4563/4603 4564/4604 +f 4565/4601 4568/4605 4569/4606 +f 4562/4602 4567/4607 4563/4603 +f 4563/4603 4568/4605 4564/4604 +f 4569/4606 4572/4608 4573/4609 +f 4567/4607 4570/4610 4571/4611 +f 4568/4605 4571/4611 4572/4608 +f 4572/4608 4577/4612 4573/4609 +f 4570/4610 4575/4613 4571/4611 +f 4571/4611 4576/4614 4572/4608 +f 4576/4614 4581/4615 4577/4612 +f 4575/4613 4578/4616 4579/4617 +f 4576/4614 4579/4617 4580/4618 +f 4580/4618 4585/4619 4581/4615 +f 4578/4616 4583/4620 4579/4617 +f 4579/4617 4584/4621 4580/4618 +f 4584/4621 4589/4622 4585/4619 +f 4583/4620 4586/4623 4587/4624 +f 4584/4621 4587/4624 4588/4625 +f 4589/4622 4592/4626 4593/4627 +f 4586/4623 4591/4628 4587/4624 +f 4587/4624 4592/4626 4588/4625 +f 4592/4626 4597/4629 4593/4627 +f 4591/4628 4594/4630 4595/4631 +f 4592/4626 4595/4631 4596/4632 +f 4597/4629 4601/4633 4602/4634 +f 4594/4630 4600/4635 4595/4631 +f 4595/4631 4601/4633 4596/4632 +f 4602/4634 4605/4636 4606/4637 +f 4600/4635 4603/4638 4604/4639 +f 4601/4633 4604/4639 4605/4636 +f 4478/4640 4598/4641 4482/4532 +f 4482/4532 4598/4641 4486/4642 +f 4486/4642 4598/4641 4490/4537 +f 4490/4537 4598/4641 4494/4643 +f 4494/4643 4598/4641 4498/4544 +f 4498/4544 4598/4641 4502/4644 +f 4502/4644 4598/4641 4506/4552 +f 4506/4552 4598/4641 4510/4645 +f 4510/4645 4598/4641 4514/4559 +f 4514/4559 4598/4641 4518/4565 +f 4518/4565 4598/4641 4522/4570 +f 4522/4570 4598/4641 4526/4646 +f 4526/4646 4598/4641 4530/4573 +f 4530/4573 4598/4641 4534/4581 +f 4534/4581 4598/4641 4538/4647 +f 4538/4647 4598/4641 4542/4585 +f 4542/4585 4598/4641 4546/4648 +f 4546/4648 4598/4641 4550/4591 +f 4550/4591 4598/4641 4554/4595 +f 4554/4595 4598/4641 4558/4649 +f 4558/4649 4598/4641 4562/4602 +f 4562/4602 4598/4641 4566/4650 +f 4566/4650 4598/4641 4570/4610 +f 4570/4610 4598/4641 4574/4651 +f 4574/4651 4598/4641 4578/4616 +f 4578/4616 4598/4641 4582/4652 +f 4582/4652 4598/4641 4586/4623 +f 4586/4623 4598/4641 4590/4653 +f 4590/4653 4598/4641 4594/4630 +f 4594/4630 4598/4641 4599/4654 +f 4599/4654 4598/4641 4603/4638 +f 4606/4637 4480/4526 4481/4530 +f 4603/4638 4479/4531 4604/4639 +f 4604/4639 4480/4526 4605/4636 +f 4603/4638 4598/4641 4478/4640 +f 4615/4655 4627/4656 4628/4657 +f 4612/4658 4626/4659 4613/4660 +f 4611/4661 4623/4662 4624/4663 +f 4609/4664 4621/4665 4622/4666 +f 4618/4667 4630/4668 4631/4669 +f 4615/4655 4629/4670 4616/4671 +f 4613/4660 4627/4656 4614/4672 +f 4612/4673 4624/4663 4625/4674 +f 4609/4664 4623/4662 4610/4675 +f 4607/4676 4621/4665 4608/4677 +f 4619/4678 4631/4669 4632/4679 +f 4617/4680 4629/4670 4630/4668 +f 4622/4666 4636/4681 4623/4662 +f 4620/4682 4634/4683 4621/4665 +f 4632/4679 4644/4684 4645/4685 +f 4629/4670 4643/4686 4630/4668 +f 4628/4657 4640/4687 4641/4688 +f 4626/4659 4638/4689 4639/4690 +f 4624/4663 4636/4681 4637/4691 +f 4622/4666 4634/4683 4635/4692 +f 4631/4669 4643/4686 4644/4684 +f 4628/4657 4642/4693 4629/4670 +f 4626/4659 4640/4687 4627/4656 +f 4624/4663 4638/4694 4625/4674 +f 4635/4692 4649/4695 4636/4681 +f 4634/4683 4646/4696 4647/4697 +f 4644/4684 4658/4698 4645/4685 +f 4642/4693 4656/4699 4643/4686 +f 4641/4688 4653/4700 4654/4701 +f 4639/4690 4651/4702 4652/4703 +f 4636/4681 4650/4704 4637/4691 +f 4635/4692 4647/4697 4648/4705 +f 4643/4686 4657/4706 4644/4684 +f 4642/4693 4654/4701 4655/4707 +f 4640/4687 4652/4703 4653/4700 +f 4637/4691 4651/4708 4638/4694 +f 4648/4705 4662/4709 4649/4695 +f 4646/4696 4660/4710 4647/4697 +f 4658/4698 4670/4711 4671/4712 +f 4656/4699 4668/4713 4669/4714 +f 4653/4700 4667/4715 4654/4701 +f 4651/4702 4665/4716 4652/4703 +f 4649/4695 4663/4717 4650/4704 +f 4648/4705 4660/4710 4661/4718 +f 4656/4699 4670/4711 4657/4706 +f 4654/4701 4668/4713 4655/4707 +f 4653/4700 4665/4716 4666/4719 +f 4651/4708 4663/4717 4664/4720 +f 4661/4718 4675/4721 4662/4709 +f 4660/4710 4672/4722 4673/4723 +f 4670/4711 4684/4724 4671/4712 +f 4668/4713 4682/4725 4669/4714 +f 4667/4715 4679/4726 4680/4727 +f 4665/4716 4677/4728 4678/4729 +f 4663/4717 4675/4721 4676/4730 +f 4661/4718 4673/4723 4674/4731 +f 4670/4711 4682/4725 4683/4732 +f 4667/4715 4681/4733 4668/4713 +f 4665/4716 4679/4726 4666/4719 +f 4663/4717 4677/4734 4664/4720 +f 4675/4721 4687/4735 4688/4736 +f 4672/4722 4686/4737 4673/4723 +f 4684/4724 4696/4738 4697/4739 +f 4682/4725 4694/4740 4695/4741 +f 4679/4726 4693/4742 4680/4727 +f 4678/4729 4690/4743 4691/4744 +f 4676/4730 4688/4736 4689/4745 +f 4673/4723 4687/4735 4674/4731 +f 4683/4732 4695/4741 4696/4738 +f 4681/4733 4693/4742 4694/4740 +f 4678/4729 4692/4746 4679/4726 +f 4676/4730 4690/4747 4677/4734 +f 4688/4736 4700/4748 4701/4749 +f 4686/4737 4698/4750 4699/4751 +f 4696/4738 4710/4752 4697/4739 +f 4695/4741 4707/4753 4708/4754 +f 4692/4746 4706/4755 4693/4742 +f 4691/4744 4703/4756 4704/4757 +f 4688/4736 4702/4758 4689/4745 +f 4686/4737 4700/4748 4687/4735 +f 4695/4741 4709/4759 4696/4738 +f 4694/4740 4706/4755 4707/4753 +f 4692/4746 4704/4757 4705/4760 +f 4689/4745 4703/4761 4690/4747 +f 4701/4749 4713/4762 4714/4763 +f 4699/4751 4711/4764 4712/4765 +f 4710/4752 4722/4766 4723/4767 +f 4707/4753 4721/4768 4708/4754 +f 4705/4760 4719/4769 4706/4755 +f 4704/4757 4716/4770 4717/4771 +f 4701/4749 4715/4772 4702/4758 +f 4699/4751 4713/4762 4700/4748 +f 4709/4759 4721/4768 4722/4766 +f 4707/4753 4719/4769 4720/4773 +f 4705/4760 4717/4771 4718/4774 +f 4702/4758 4716/4775 4703/4761 +f 4713/4762 4727/4776 4714/4763 +f 4711/4764 4725/4777 4712/4765 +f 4723/4767 4735/4778 4736/4779 +f 4721/4768 4733/4780 4734/4781 +f 4719/4769 4731/4782 4732/4783 +f 4716/4770 4730/4784 4717/4771 +f 4715/4772 4727/4776 4728/4785 +f 4713/4762 4725/4777 4726/4786 +f 4721/4768 4735/4778 4722/4766 +f 4719/4769 4733/4780 4720/4773 +f 4717/4771 4731/4782 4718/4774 +f 4716/4775 4728/4785 4729/4787 +f 4726/4786 4740/4788 4727/4776 +f 4724/4789 4738/4790 4725/4777 +f 4736/4779 4748/4791 4749/4792 +f 4733/4780 4747/4793 4734/4781 +f 4732/4783 4744/4794 4745/4795 +f 4730/4784 4742/4796 4743/4797 +f 4728/4785 4740/4788 4741/4798 +f 4726/4786 4738/4790 4739/4799 +f 4735/4778 4747/4793 4748/4791 +f 4732/4783 4746/4800 4733/4780 +f 4730/4784 4744/4794 4731/4782 +f 4728/4785 4742/4801 4729/4787 +f 4739/4799 4753/4802 4740/4788 +f 4738/4790 4750/4803 4751/4804 +f 4748/4791 4762/4805 4749/4792 +f 4746/4800 4760/4806 4747/4793 +f 4745/4795 4757/4807 4758/4808 +f 4743/4797 4755/4809 4756/4810 +f 4740/4788 4754/4811 4741/4798 +f 4739/4799 4751/4804 4752/4812 +f 4747/4793 4761/4813 4748/4791 +f 4745/4795 4759/4814 4746/4800 +f 4744/4794 4756/4810 4757/4807 +f 4741/4798 4755/4815 4742/4801 +f 4753/4802 4765/4816 4766/4817 +f 4750/4803 4764/4818 4751/4804 +f 4762/4805 4774/4819 4775/4820 +f 4759/4814 4773/4821 4760/4806 +f 4758/4808 4770/4822 4771/4823 +f 4755/4809 4769/4824 4756/4810 +f 4753/4802 4767/4825 4754/4811 +f 4752/4812 4764/4818 4765/4816 +f 4760/4806 4774/4819 4761/4813 +f 4758/4808 4772/4826 4759/4814 +f 4757/4807 4769/4824 4770/4822 +f 4755/4815 4767/4825 4768/4827 +f 4765/4816 4779/4828 4766/4817 +f 4764/4818 4776/4829 4777/4830 +f 4774/4819 4788/4831 4775/4820 +f 4772/4826 4786/4832 4773/4821 +f 4771/4823 4783/4833 4784/4834 +f 4769/4824 4781/4835 4782/4836 +f 4767/4825 4779/4828 4780/4837 +f 4765/4816 4777/4830 4778/4838 +f 4774/4819 4786/4832 4787/4839 +f 4771/4823 4785/4840 4772/4826 +f 4769/4824 4783/4833 4770/4822 +f 4767/4825 4781/4841 4768/4827 +f 4779/4828 4791/4842 4792/4843 +f 4776/4829 4790/4844 4777/4830 +f 4788/4831 4800/4845 4801/4846 +f 4786/4832 4798/4847 4799/4848 +f 4783/4833 4797/4849 4784/4834 +f 4782/4836 4794/4850 4795/4851 +f 4780/4837 4792/4843 4793/4852 +f 4777/4830 4791/4842 4778/4838 +f 4787/4839 4799/4848 4800/4845 +f 4785/4840 4797/4849 4798/4847 +f 4782/4836 4796/4853 4783/4833 +f 4780/4837 4794/4854 4781/4841 +f 4792/4843 4804/4855 4805/4856 +f 4790/4844 4802/4857 4803/4858 +f 4800/4845 4814/4859 4801/4846 +f 4799/4848 4811/4860 4812/4861 +f 4796/4853 4810/4862 4797/4849 +f 4795/4851 4807/4863 4808/4864 +f 4792/4843 4806/4865 4793/4852 +f 4790/4844 4804/4855 4791/4842 +f 4799/4848 4813/4866 4800/4845 +f 4798/4847 4810/4862 4811/4860 +f 4796/4853 4808/4864 4809/4867 +f 4793/4852 4807/4868 4794/4854 +f 4805/4856 4817/4869 4818/4870 +f 4803/4858 4815/4871 4816/4872 +f 4814/4859 4826/4873 4827/4874 +f 4811/4860 4825/4875 4812/4861 +f 4809/4867 4823/4876 4810/4862 +f 4808/4864 4820/4877 4821/4878 +f 4805/4856 4819/4879 4806/4865 +f 4803/4858 4817/4869 4804/4855 +f 4813/4866 4825/4875 4826/4873 +f 4811/4860 4823/4876 4824/4880 +f 4809/4867 4821/4878 4822/4881 +f 4806/4865 4820/4882 4807/4868 +f 4817/4869 4831/4883 4818/4870 +f 4815/4871 4829/4884 4816/4872 +f 4826/4873 4840/4885 4827/4874 +f 4825/4875 4837/4886 4838/4887 +f 4823/4876 4835/4888 4836/4889 +f 4820/4877 4834/4890 4821/4878 +f 4819/4879 4831/4883 4832/4891 +f 4817/4869 4829/4884 4830/4892 +f 4825/4875 4839/4893 4826/4873 +f 4823/4876 4837/4886 4824/4880 +f 4821/4878 4835/4888 4822/4881 +f 4820/4882 4832/4891 4833/4894 +f 4830/4892 4844/4895 4831/4883 +f 4828/4896 4842/4897 4829/4884 +f 4840/4885 4852/4898 4853/4899 +f 4837/4886 4851/4900 4838/4887 +f 4836/4889 4848/4901 4849/4902 +f 4834/4890 4846/4903 4847/4904 +f 4832/4891 4844/4895 4845/4905 +f 4830/4892 4842/4897 4843/4906 +f 4839/4893 4851/4900 4852/4898 +f 4836/4889 4850/4907 4837/4886 +f 4834/4890 4848/4901 4835/4888 +f 4832/4891 4846/4908 4833/4894 +f 4843/4906 4857/4909 4844/4895 +f 4842/4897 4854/4910 4855/4911 +f 4852/4898 4866/4912 4853/4899 +f 4850/4907 4864/4913 4851/4900 +f 4849/4902 4861/4914 4862/4915 +f 4847/4904 4859/4916 4860/4917 +f 4844/4895 4858/4918 4845/4905 +f 4843/4906 4855/4911 4856/4919 +f 4851/4900 4865/4920 4852/4898 +f 4849/4902 4863/4921 4850/4907 +f 4848/4901 4860/4917 4861/4914 +f 4845/4905 4859/4922 4846/4908 +f 4857/4909 4869/4923 4870/4924 +f 4855/4911 4867/4925 4868/4926 +f 4866/4912 4878/4927 4879/4928 +f 4864/4913 4876/4929 4877/4930 +f 4862/4915 4874/4931 4875/4932 +f 4859/4916 4873/4933 4860/4917 +f 4857/4909 4871/4934 4858/4918 +f 4856/4919 4868/4926 4869/4923 +f 4864/4913 4878/4927 4865/4920 +f 4862/4915 4876/4929 4863/4921 +f 4861/4914 4873/4933 4874/4931 +f 4859/4922 4871/4934 4872/4935 +f 4869/4923 4883/4936 4870/4924 +f 4868/4926 4880/4937 4881/4938 +f 4878/4927 4892/4939 4879/4928 +f 4877/4930 4889/4940 4890/4941 +f 4874/4931 4888/4942 4875/4932 +f 4873/4933 4885/4943 4886/4944 +f 4871/4934 4883/4936 4884/4945 +f 4868/4926 4882/4946 4869/4923 +f 4878/4927 4890/4941 4891/4947 +f 4876/4929 4888/4942 4889/4940 +f 4873/4933 4887/4948 4874/4931 +f 4871/4934 4885/4949 4872/4935 +f 4883/4936 4896/4950 4897/4951 +f 4880/4937 4895/4952 4881/4938 +f 4892/4939 4905/4953 4906/4954 +f 4890/4941 4903/4955 4904/4956 +f 4887/4948 4902/4957 4888/4942 +f 4885/4943 4900/4958 4886/4944 +f 4884/4945 4897/4951 4898/4959 +f 4881/4938 4896/4950 4882/4946 +f 4891/4947 4904/4956 4905/4953 +f 4889/4940 4902/4957 4903/4955 +f 4886/4944 4901/4960 4887/4948 +f 4885/4949 4898/4959 4899/4961 +f 4897/4951 4909/4962 4910/4963 +f 4895/4952 4907/4964 4908/4965 +f 4905/4953 4919/4966 4906/4954 +f 4904/4956 4916/4967 4917/4968 +f 4901/4960 4915/4969 4902/4957 +f 4899/4970 4913/4971 4900/4958 +f 4897/4951 4911/4972 4898/4959 +f 4895/4952 4909/4962 4896/4950 +f 4904/4956 4918/4973 4905/4953 +f 4903/4955 4915/4969 4916/4967 +f 4901/4960 4913/4971 4914/4974 +f 4899/4961 4911/4972 4912/4975 +f 4910/4963 4922/4976 4923/4977 +f 4908/4965 4920/4978 4921/4979 +f 4919/4966 4931/4980 4932/4981 +f 4916/4967 4930/4982 4917/4968 +f 4914/4974 4928/4983 4915/4969 +f 4913/4971 4925/4984 4926/4985 +f 4910/4963 4924/4986 4911/4972 +f 4908/4965 4922/4976 4909/4962 +f 4918/4973 4930/4982 4931/4980 +f 4916/4967 4928/4983 4929/4987 +f 4914/4974 4926/4985 4927/4988 +f 4911/4972 4925/4989 4912/4975 +f 4922/4976 4936/4990 4923/4977 +f 4920/4978 4934/4991 4921/4979 +f 4931/4980 4945/4992 4932/4981 +f 4930/4982 4942/4993 4943/4994 +f 4928/4983 4940/4995 4941/4996 +f 4925/4984 4939/4997 4926/4985 +f 4924/4986 4936/4990 4937/4998 +f 4922/4976 4934/4991 4935/4999 +f 4930/4982 4944/5000 4931/4980 +f 4928/4983 4942/4993 4929/4987 +f 4926/4985 4940/4995 4927/4988 +f 4925/4989 4937/4998 4938/5001 +f 4935/4999 4949/5002 4936/4990 +f 4933/5003 4947/5004 4934/4991 +f 4945/4992 4957/5005 4958/5006 +f 4942/4993 4956/5007 4943/4994 +f 4941/4996 4953/5008 4954/5009 +f 4938/5010 4952/5011 4939/4997 +f 4937/4998 4949/5002 4950/5012 +f 4935/4999 4947/5004 4948/5013 +f 4944/5000 4956/5007 4957/5005 +f 4941/4996 4955/5014 4942/4993 +f 4939/4997 4953/5008 4940/4995 +f 4938/5001 4950/5012 4951/5015 +f 4948/5013 4962/5016 4949/5002 +f 4947/5004 4959/5017 4960/5018 +f 4957/5005 4971/5019 4958/5006 +f 4955/5014 4969/5020 4956/5007 +f 4954/5009 4966/5021 4967/5022 +f 4951/5023 4965/5024 4952/5011 +f 4949/5002 4963/5025 4950/5012 +f 4948/5013 4960/5018 4961/5026 +f 4956/5007 4970/5027 4957/5005 +f 4954/5009 4968/5028 4955/5014 +f 4953/5008 4965/5024 4966/5021 +f 4951/5015 4963/5025 4964/5029 +f 4962/5016 4974/5030 4975/5031 +f 4959/5017 4973/5032 4960/5018 +f 4971/5019 4983/5033 4984/5034 +f 4968/5028 4982/5035 4969/5020 +f 4966/5021 4980/5036 4967/5022 +f 4964/5037 4978/5038 4965/5024 +f 4962/5016 4976/5039 4963/5025 +f 4960/5018 4974/5030 4961/5026 +f 4969/5020 4983/5033 4970/5027 +f 4967/5022 4981/5040 4968/5028 +f 4966/5021 4978/5038 4979/5041 +f 4964/5029 4976/5039 4977/5042 +f 4975/5031 4987/5043 4988/5044 +f 4973/5032 4985/5045 4986/5046 +f 4983/5033 4997/5047 4984/5034 +f 4981/5040 4995/5048 4982/5035 +f 4979/5041 4993/5049 4980/5036 +f 4978/5038 4990/5050 4991/5051 +f 4976/5039 4988/5044 4989/5052 +f 4973/5032 4987/5043 4974/5030 +f 4983/5033 4995/5048 4996/5053 +f 4981/5040 4993/5049 4994/5054 +f 4978/5038 4992/5055 4979/5041 +f 4976/5039 4990/5056 4977/5042 +f 4988/5044 5000/5057 5001/5058 +f 4985/5045 4999/5059 4986/5046 +f 4997/5047 5009/5060 5010/5061 +f 4995/5048 5007/5062 5008/5063 +f 4992/5055 5006/5064 4993/5049 +f 4991/5051 5003/5065 5004/5066 +f 4989/5052 5001/5058 5002/5067 +f 4986/5046 5000/5057 4987/5043 +f 4996/5053 5008/5063 5009/5060 +f 4994/5054 5006/5064 5007/5062 +f 4991/5051 5005/5068 4992/5055 +f 4989/5052 5003/5069 4990/5056 +f 5001/5058 5013/5070 5014/5071 +f 4999/5059 5011/5072 5012/5073 +f 5009/5060 5023/5074 5010/5061 +f 5008/5063 5020/5075 5021/5076 +f 5005/5068 5019/5077 5006/5064 +f 5004/5066 5016/5078 5017/5079 +f 5001/5058 5015/5080 5002/5067 +f 4999/5059 5013/5070 5000/5057 +f 5008/5063 5022/5081 5009/5060 +f 5007/5062 5019/5077 5020/5075 +f 5005/5068 5017/5079 5018/5082 +f 5002/5067 5016/5083 5003/5069 +f 4893/5084 4619/4678 4632/4679 +f 4893/5084 4632/4679 4645/4685 +f 4893/5084 4645/4685 4658/4698 +f 4893/5084 4658/4698 4671/4712 +f 4893/5084 4671/4712 4684/4724 +f 4893/5084 4684/4724 4697/4739 +f 4893/5084 4697/4739 4710/4752 +f 4893/5084 4710/4752 4723/4767 +f 4893/5084 4723/4767 4736/4779 +f 4893/5084 4736/4779 4749/4792 +f 4893/5084 4749/4792 4762/4805 +f 4893/5084 4762/4805 4775/4820 +f 4893/5084 4775/4820 4788/4831 +f 4893/5084 4788/4831 4801/4846 +f 4893/5084 4801/4846 4814/4859 +f 4893/5084 4814/4859 4827/4874 +f 4893/5084 4827/4874 4840/4885 +f 4893/5084 4840/4885 4853/4899 +f 4893/5084 4853/4899 4866/4912 +f 4893/5084 4866/4912 4879/4928 +f 4893/5084 4879/4928 4892/4939 +f 4893/5084 4892/4939 4906/4954 +f 4893/5084 4906/4954 4919/4966 +f 4893/5084 4919/4966 4932/4981 +f 4893/5084 4932/4981 4945/4992 +f 4893/5084 4945/4992 4958/5006 +f 4893/5084 4958/5006 4971/5019 +f 4893/5084 4971/5019 4984/5034 +f 4893/5084 4984/5034 4997/5047 +f 4893/5084 4997/5047 5010/5061 +f 4893/5084 5010/5061 5023/5074 +f 5014/5071 4609/4664 4610/4675 +f 5012/5073 4607/4676 4608/4677 +f 5022/5081 4619/4678 5023/5074 +f 5020/5075 4617/4680 5021/5076 +f 5018/5082 4615/4655 5019/5077 +f 5017/5079 4612/4658 4613/4660 +f 5014/5071 4611/4661 5015/5080 +f 5012/5073 4609/4664 5013/5070 +f 4893/5084 5023/5074 4619/4678 +f 5021/5076 4618/4667 5022/5081 +f 5020/5075 4615/4655 4616/4671 +f 5018/5082 4613/4660 4614/4672 +f 5015/5080 4612/4673 5016/5083 +f 4481/4530 5011/5072 4606/4637 +f 5011/5072 4602/4634 4606/4637 +f 4998/5085 4597/4629 4602/4634 +f 4985/5045 4593/4627 4597/4629 +f 4593/4627 4959/5017 4589/4622 +f 4589/4622 4946/5086 4585/4619 +f 4585/4619 4933/5003 4581/4615 +f 4933/5003 4577/4612 4581/4615 +f 4577/4612 4907/4964 4573/4609 +f 4907/4964 4569/4606 4573/4609 +f 4894/5087 4565/4601 4569/4606 +f 4880/4937 4561/4599 4565/4601 +f 4561/4599 4854/4910 4557/4594 +f 4557/4594 4841/5088 4553/4590 +f 4553/4590 4828/4896 4549/4587 +f 4828/4896 4545/4584 4549/4587 +f 4545/4584 4802/4857 4541/4580 +f 4802/4857 4537/4577 4541/4580 +f 4789/5089 4533/4572 4537/4577 +f 4776/4829 4529/4569 4533/4572 +f 4529/4569 4750/4803 4525/4564 +f 4525/4564 4737/5090 4521/4561 +f 4521/4561 4724/4789 4517/4558 +f 4724/4789 4513/4555 4517/4558 +f 4513/4555 4698/4750 4509/4551 +f 4698/4750 4505/4548 4509/4551 +f 4685/5091 4501/4543 4505/4548 +f 4672/4722 4497/4541 4501/4543 +f 4497/4541 4646/4696 4493/4536 +f 4493/4536 4633/5092 4489/4533 +f 4489/4533 4620/4682 4485/4529 +f 4620/4682 4481/4530 4485/4529 +f 4/1 3/223 2/2 +f 8/4 7/2726 6/5 +f 13/7 16/2228 15/8 +f 17/10 28/2112 19/11 +f 32/13 31/317 30/14 +f 36/16 35/1785 34/17 +f 37/19 40/1305 39/20 +f 21/22 22/1250 23/23 +f 25/25 26/2732 27/26 +f 41/28 44/140 43/29 +f 48/31 47/191 46/32 +f 52/34 51/432 50/35 +f 53/37 56/161 55/38 +f 60/40 59/62 58/41 +f 29/15 30/14 62/43 +f 66/45 65/486 64/46 +f 67/48 5/6 6/5 +f 72/50 71/1955 70/51 +f 76/53 75/2762 74/54 +f 77/56 80/312 79/57 +f 81/59 84/970 83/60 +f 59/62 87/78 86/63 +f 88/65 91/2877 90/66 +f 96/68 95/387 94/69 +f 100/71 99/1067 98/72 +f 101/74 104/2810 103/75 +f 106/77 105/111 34/17 +f 108/79 116/109 115/80 +f 117/82 119/194 36/16 +f 124/84 127/99 126/85 +f 131/87 130/1157 129/88 +f 132/90 102/76 134/91 +f 142/93 141/174 140/94 +f 148/96 106/77 60/40 +f 144/98 143/253 127/99 +f 149/100 152/1441 151/101 +f 156/103 155/357 154/104 +f 159/106 150/102 158/107 +f 116/109 108/79 164/110 +f 171/112 170/179 169/113 +f 174/115 126/85 173/116 +f 179/118 182/255 181/119 +f 186/121 189/319 188/122 +f 194/124 197/1314 196/125 +f 199/127 142/93 139/95 +f 206/129 209/215 208/130 +f 213/132 212/173 211/133 +f 202/135 215/2820 214/136 +f 96/68 93/70 216/138 +f 92/139 238/180 43/29 +f 97/73 184/374 246/141 +f 250/142 249/165 229/143 +f 252/145 254/244 58/41 +f 258/147 257/1193 256/148 +f 260/150 91/2877 229/143 +f 110/152 111/3492 112/153 +f 114/155 264/1147 263/156 +f 265/158 268/1008 267/159 +f 56/161 53/37 270/162 +f 274/164 273/1163 249/165 +f 185/166 183/211 63/47 +f 280/167 279/1030 278/168 +f 285/170 284/224 283/171 +f 288/172 219/464 211/133 +f 141/174 142/93 92/139 +f 289/175 290/393 175/176 +f 148/96 116/109 105/111 +f 312/178 311/1436 170/179 +f 238/180 262/335 42/30 +f 318/181 163/213 160/182 +f 310/184 309/1031 308/185 +f 241/187 239/1191 325/188 +f 160/182 230/314 240/190 +f 47/191 48/31 307/186 +f 108/79 109/157 315/192 +f 345/193 85/64 86/63 +f 348/195 351/246 350/196 +f 356/198 359/260 358/199 +f 355/201 354/1346 353/202 +f 363/204 31/317 32/13 +f 364/206 252/145 253/146 +f 366/208 125/86 368/209 +f 183/211 372/1217 371/212 +f 163/213 162/2842 161/214 +f 209/215 226/1051 101/74 +f 378/216 377/474 165/217 +f 379/219 295/1840 381/220 +f 383/222 382/1281 3/223 +f 284/224 285/170 386/225 +f 387/227 127/99 389/228 +f 391/230 259/151 229/143 +f 73/55 281/282 397/231 +f 405/233 408/248 407/234 +f 203/236 218/1212 411/237 +f 414/239 121/2834 413/240 +f 1/3 356/198 357/200 +f 277/242 276/169 131/87 +f 254/244 344/1005 57/42 +f 358/199 384/1463 415/245 +f 417/247 416/336 386/225 +f 401/249 400/386 192/250 +f 419/252 389/228 127/99 +f 421/254 181/119 182/255 +f 103/75 301/438 302/257 +f 428/259 367/210 368/209 +f 380/221 390/290 359/260 +f 62/43 204/1809 266/160 +f 140/94 141/174 431/262 +f 357/200 358/199 351/246 +f 356/198 1/3 379/219 +f 415/245 392/1289 403/264 +f 437/266 428/259 436/267 +f 438/269 441/2805 440/270 +f 188/122 451/320 450/272 +f 265/158 455/469 181/119 +f 224/277 185/166 64/46 +f 237/279 212/173 213/132 +f 275/243 131/87 128/89 +f 281/282 282/286 460/283 +f 50/35 462/1094 79/57 +f 73/55 74/54 282/286 +f 468/287 467/410 466/288 +f 390/290 355/201 352/203 +f 482/291 485/1641 484/292 +f 489/294 488/414 487/295 +f 493/297 496/402 495/298 +f 472/300 471/1426 470/301 +f 502/303 501/456 500/304 +f 220/306 221/2566 222/307 +f 512/309 419/252 143/253 +f 518/310 517/1897 511/311 +f 513/313 239/1191 230/314 +f 205/316 30/14 31/317 +f 189/319 370/434 451/320 +f 163/213 318/181 532/321 +f 533/322 224/277 225/278 +f 446/324 448/1848 447/325 +f 335/326 541/382 540/327 +f 544/329 512/309 546/330 +f 329/332 519/475 291/333 +f 199/127 534/1141 262/335 +f 385/226 386/225 416/336 +f 532/321 190/1168 191/338 +f 363/204 117/82 118/83 +f 578/342 581/383 580/343 +f 573/345 572/1416 571/346 +f 561/348 563/968 562/349 +f 241/187 260/150 259/151 +f 590/351 593/2023 592/352 +f 545/331 546/330 595/354 +f 503/356 154/104 155/357 +f 606/359 625/1940 624/360 +f 478/362 481/404 480/363 +f 646/365 649/1935 648/366 +f 590/351 591/353 689/368 +f 564/370 565/1310 651/371 +f 728/373 246/141 184/374 +f 714/376 724/2057 716/377 +f 692/379 574/391 575/380 +f 541/382 335/326 581/383 +f 346/384 68/49 248/385 +f 161/214 231/1059 230/314 +f 128/89 129/88 94/69 +f 698/388 705/2159 704/389 +f 574/391 577/3099 576/392 +f 290/393 289/175 401/249 +f 630/395 631/1316 725/396 +f 733/398 736/2821 735/399 +f 755/401 466/288 496/402 +f 759/403 480/363 481/404 +f 757/406 763/2211 762/407 +f 764/409 496/402 466/288 +f 768/411 767/1656 3808/412 +f 759/403 760/405 488/414 +f 492/415 474/418 475/416 +f 474/418 770/1901 769/419 +f 773/421 468/287 465/289 +f 771/417 774/425 486/296 +f 484/292 485/1641 775/423 +f 774/425 776/1419 489/294 +f 495/298 496/402 764/409 +f 495/298 757/406 761/408 +f 137/427 397/231 784/428 +f 325/188 239/1191 797/430 +f 165/217 519/475 51/432 +f 369/433 32/13 29/15 +f 730/435 731/1875 568/436 +f 90/66 302/257 301/438 +f 243/340 118/83 516/439 +f 800/441 734/400 806/442 +f 817/444 815/451 808/445 +f 816/446 808/445 823/447 +f 306/449 303/1201 447/325 +f 815/451 810/515 809/452 +f 829/453 830/472 828/454 +f 726/397 725/396 501/456 +f 274/164 427/280 424/457 +f 879/459 882/1252 881/460 +f 884/462 883/1843 879/459 +f 219/464 218/1212 217/465 +f 906/466 905/673 904/467 +f 455/469 204/1809 205/316 +f 898/471 882/1252 830/472 +f 903/473 900/484 880/461 +f 377/474 291/333 519/475 +f 923/476 949/553 948/477 +f 946/478 947/488 951/479 +f 3519/481 740/1934 739/482 +f 900/484 899/2835 886/485 +f 225/278 64/46 65/486 +f 182/255 445/1129 442/487 +f 809/452 952/1297 947/488 +f 954/490 960/3494 962/491 +f 927/493 950/480 965/494 +f 885/463 879/459 880/461 +f 925/496 927/493 966/495 +f 343/498 342/1502 341/499 +f 965/494 964/1798 971/501 +f 968/502 971/501 970/503 +f 972/505 968/502 969/504 +f 963/507 961/492 981/508 +f 336/510 337/3449 338/511 +f 992/513 993/1797 827/514 +f 810/515 815/451 996/516 +f 1007/518 1001/1986 1002/519 +f 1012/521 826/678 827/514 +f 1011/523 1009/522 1007/518 +f 294/524 293/1088 292/334 +f 907/525 908/716 920/526 +f 1017/528 579/344 1019/529 +f 1020/531 1023/593 1022/532 +f 1035/534 1024/614 1020/531 +f 1061/536 1060/608 1047/537 +f 1063/539 1061/536 1046/538 +f 1044/535 1020/531 1021/533 +f 1102/542 1094/552 1097/543 +f 1116/545 1126/561 1118/546 +f 1016/548 1015/616 1014/549 +f 881/460 897/468 904/467 +f 816/446 1116/545 1127/551 +f 1094/552 948/477 949/553 +f 514/554 342/1502 167/555 +f 146/557 145/1011 81/59 +f 1133/558 1135/2261 1130/559 +f 1102/542 1133/558 1126/561 +f 822/448 1126/561 1116/545 +f 1145/562 1143/1862 1148/563 +f 1076/565 1144/578 1151/566 +f 1148/563 1143/1862 1086/568 +f 1109/544 1097/543 1114/570 +f 1154/571 1153/574 1156/572 +f 1153/574 925/496 967/497 +f 3464/575 3497/1438 463/576 +f 1144/578 1147/586 1152/579 +f 1156/572 967/497 1163/580 +f 1164/582 1162/581 1169/583 +f 1145/562 1171/587 1176/585 +f 1114/570 1115/1585 1154/571 +f 1183/588 1155/573 1164/582 +f 1184/590 1183/588 1177/589 +f 1069/567 1151/566 1062/540 +f 1023/593 1191/597 1194/594 +f 1162/581 1163/580 1170/595 +f 1195/596 1184/590 1185/591 +f 1183/588 1196/599 1176/585 +f 1201/598 1063/539 1152/579 +f 1061/536 1063/539 1201/598 +f 3825/601 847/624 843/602 +f 1203/604 1202/600 1195/596 +f 1202/600 1201/598 1184/590 +f 1218/606 1060/608 1203/604 +f 1060/608 1061/536 1202/600 +f 1225/609 1218/606 1217/607 +f 671/611 695/2132 694/612 +f 1219/610 1217/607 1024/614 +f 1217/607 1203/604 1034/605 +f 1226/615 1014/549 1015/616 +f 1243/618 1242/650 1234/619 +f 1231/621 1230/3955 1229/622 +f 847/624 1243/618 1233/620 +f 843/602 1233/620 1244/625 +f 1233/620 1234/619 1245/627 +f 1923/628 3761/2110 233/629 +f 2100/630 2108/3944 499/305 +f 3914/631 3864/2180 2135/632 +f 61/44 62/43 323/261 +f 210/134 211/133 217/465 +f 1299/636 1310/646 1309/637 +f 3863/639 1230/3955 1231/621 +f 1066/641 1047/537 1321/642 +f 1322/643 1321/642 1228/623 +f 1060/608 1218/606 1321/642 +f 1325/644 1324/1924 1323/645 +f 1244/625 1245/627 1332/647 +f 1290/649 1234/619 1242/650 +f 1337/652 1330/648 1339/653 +f 1330/648 1332/647 1340/655 +f 1341/656 1332/647 1245/627 +f 1342/658 1340/655 1332/647 +f 1343/659 1342/658 1341/656 +f 1344/660 1341/656 1294/657 +f 3857/662 3856/666 1345/663 +f 1347/665 1348/669 1342/658 +f 3856/666 3854/1793 1295/667 +f 1349/668 1350/704 1348/669 +f 3861/670 1349/668 1347/665 +f 3853/671 1347/665 1343/659 +f 906/466 898/471 829/453 +f 905/673 906/466 1353/672 +f 865/675 1356/679 1355/676 +f 1355/676 1353/672 829/453 +f 1357/677 1355/676 825/455 +f 1356/679 1354/674 1353/672 +f 3834/680 1337/652 1338/654 +f 866/682 1358/683 1356/679 +f 1358/683 1359/706 1354/674 +f 3828/684 878/626 1337/652 +f 838/685 1362/2226 1012/521 +f 1363/687 1364/2125 1338/654 +f 1368/688 1363/687 1339/653 +f 1348/669 1368/688 1340/655 +f 1371/689 1369/699 3824/690 +f 524/692 525/1846 526/693 +f 528/695 529/2725 530/696 +f 1372/698 1370/705 1369/699 +f 535/700 536/2750 537/701 +f 1367/703 1368/688 1348/669 +f 1370/705 1354/674 1359/706 +f 1369/699 1359/706 3833/681 +f 1373/707 910/712 903/473 +f 555/708 556/1166 557/709 +f 1374/711 909/1139 910/712 +f 1373/707 904/467 905/673 +f 1374/711 1373/707 1375/713 +f 1375/713 905/673 1354/674 +f 1378/715 920/526 908/716 +f 376/718 373/2850 651/371 +f 1388/720 928/2032 973/506 +f 586/721 587/1197 588/722 +f 1390/724 1389/2037 1388/720 +f 1391/725 1388/720 969/504 +f 597/726 598/2341 599/727 +f 600/729 601/966 602/730 +f 1392/732 1391/725 970/503 +f 608/734 609/770 610/735 +f 612/737 524/692 523/694 +f 614/739 615/1156 616/740 +f 617/742 618/2279 25/25 +f 620/743 621/1309 622/744 +f 1397/746 1390/724 1391/725 +f 1398/747 1397/746 1392/732 +f 1400/749 987/1613 920/526 +f 1402/750 1403/760 1165/584 +f 636/751 637/2749 638/752 +f 930/754 1404/759 1402/750 +f 641/756 642/2787 643/757 +f 929/755 1402/750 1169/583 +f 1404/759 1405/844 1403/760 +f 1411/761 1366/2126 1365/762 +f 1412/764 1406/763 1415/765 +f 652/767 653/869 654/768 +f 609/770 656/1206 657/771 +f 659/773 660/2839 661/774 +f 558/710 663/1020 586/721 +f 665/776 666/2278 667/777 +f 1413/779 1411/761 1406/763 +f 1418/780 1416/784 3810/781 +f 1417/783 1372/698 1371/689 +f 1416/784 1371/689 3829/691 +f 1419/785 1417/783 1416/784 +f 1386/786 909/1139 1374/711 +f 677/788 678/3199 679/789 +f 1385/791 1386/786 1420/787 +f 1420/787 1374/711 1376/714 +f 685/794 686/856 687/795 +f 1421/792 1420/787 1422/793 +f 528/695 690/905 691/798 +f 1422/793 1376/714 1372/698 +f 1018/530 1019/529 747/799 +f 699/801 700/808 701/802 +f 1424/804 578/342 579/344 +f 3663/805 1597/1397 1767/806 +f 700/808 699/801 711/809 +f 1093/811 3746/3924 3754/812 +f 717/814 718/1948 530/696 +f 721/816 722/2722 723/817 +f 704/389 1650/2184 3543/819 +f 19/11 1429/823 1426/821 +f 1429/823 1428/2168 1427/824 +f 1658/825 1659/1576 3542/826 +f 1439/828 1437/831 1434/829 +f 1437/831 1436/963 1435/832 +f 1426/821 1427/824 1433/833 +f 1442/835 1431/1886 1432/834 +f 742/837 743/1237 744/838 +f 1432/834 1433/833 1441/840 +f 751/841 752/2816 753/842 +f 1438/830 1434/829 1403/760 +f 1446/845 1445/851 1444/846 +f 1447/848 1448/852 1446/845 +f 3860/849 1447/848 1443/847 +f 1415/765 1367/703 1445/851 +f 1414/766 1415/765 1446/845 +f 2173/853 1683/1805 469/302 +f 777/854 778/1333 779/855 +f 780/857 781/2875 782/858 +f 1676/860 1693/1016 1806/861 +f 3859/863 1451/1244 1349/668 +f 789/864 790/1110 791/865 +f 745/839 792/1871 793/867 +f 652/767 794/1124 795/868 +f 1455/870 1458/1976 1457/871 +f 1454/873 1367/703 1350/704 +f 610/735 691/798 802/875 +f 3831/877 3816/3662 698/388 +f 1352/878 1044/535 1045/541 +f 1015/616 1016/548 1459/880 +f 811/882 812/1230 813/883 +f 2118/885 3815/1967 3817/886 +f 819/888 820/1811 821/889 +f 1466/891 1463/3868 1194/594 +f 1467/893 1468/896 1465/894 +f 1182/895 1165/584 1468/896 +f 832/897 812/1230 833/898 +f 835/900 836/2798 837/901 +f 1467/893 1466/891 1190/892 +f 1469/903 1470/3943 1444/846 +f 840/904 700/808 712/810 +f 1453/906 1470/3943 1469/903 +f 599/727 844/1245 845/907 +f 1226/615 1450/2169 3849/909 +f 851/911 852/1137 853/912 +f 855/914 744/838 856/915 +f 858/917 859/1412 860/918 +f 861/920 862/2827 863/921 +f 1014/549 1226/615 3855/910 +f 867/924 676/790 868/925 +f 870/927 871/2778 872/928 +f 874/930 875/1035 876/931 +f 1310/646 1323/645 1471/933 +f 1476/934 1433/833 1427/824 +f 1461/936 1476/934 1475/935 +f 887/937 888/1919 889/938 +f 788/866 791/865 892/940 +f 894/942 895/961 896/943 +f 1460/881 1475/935 1227/617 +f 901/945 902/1845 524/692 +f 1465/894 1478/964 1477/946 +f 1865/948 3503/2193 3500/949 +f 911/951 912/2728 913/952 +f 916/954 917/1106 918/955 +f 1850/957 3505/987 3507/958 +f 921/960 922/2351 895/961 +f 1479/962 1435/832 1436/963 +f 926/965 664/778 667/777 +f 1481/967 562/349 563/968 +f 269/163 1483/1043 84/970 +f 931/972 932/1300 933/973 +f 934/974 935/1045 614/739 +f 936/975 685/794 688/796 +f 938/977 659/773 662/775 +f 1484/979 270/162 122/980 +f 941/982 942/1102 835/900 +f 320/983 321/1830 509/984 +f 1849/986 3510/3918 3505/987 +f 84/970 1483/1043 1484/979 +f 1487/988 1490/1027 1488/989 +f 1757/991 1758/3871 3502/992 +f 365/207 1492/1004 1491/994 +f 461/285 79/57 80/312 +f 117/82 363/204 362/205 +f 1494/996 540/327 541/382 +f 926/965 601/966 957/998 +f 1477/946 1441/840 1433/833 +f 1709/1000 1588/1526 1585/1001 +f 119/194 117/82 1496/995 +f 271/1003 1497/1778 504/358 +f 511/311 517/1897 189/319 +f 345/193 1496/995 1492/1004 +f 57/42 344/1005 299/1006 +f 1501/1007 1499/1010 268/1008 +f 267/159 268/1008 1499/1010 +f 974/1012 975/2766 976/1013 +f 1678/1015 1652/1508 1693/1016 +f 3846/1017 1682/1806 1807/1018 +f 557/709 982/1167 663/1020 +f 983/1021 941/982 984/1022 +f 1443/847 1444/846 3849/909 +f 421/254 1500/1009 268/1008 +f 989/1024 990/2760 991/1025 +f 1490/1027 1487/988 418/1028 +f 279/1030 280/167 309/1031 +f 997/1032 868/925 676/790 +f 998/1033 26/2732 999/1034 +f 1504/1029 418/1028 38/21 +f 1506/1037 1508/1041 1507/1038 +f 360/1040 361/1408 1508/1041 +f 1506/1037 1486/1039 207/131 +f 939/978 1005/1081 1006/1042 +f 270/162 1484/979 1483/1043 +f 66/45 63/47 546/330 +f 1010/1044 742/837 793/867 +f 367/210 428/259 437/266 +f 244/318 31/317 363/204 +f 1670/1047 3532/1639 3534/1048 +f 209/215 206/129 1510/1050 +f 388/229 389/228 426/1056 +f 427/280 274/164 250/142 +f 161/214 520/1430 521/1058 +f 100/71 246/141 45/33 +f 1025/1060 1026/1232 608/734 +f 1029/1062 1030/1225 1031/1063 +f 939/978 662/775 1033/1065 +f 41/28 42/30 594/355 +f 1036/1068 780/857 783/859 +f 702/803 701/802 1038/1070 +f 1040/1072 1041/1155 1042/1073 +f 362/205 32/13 369/433 +f 187/123 1520/1358 477/1076 +f 70/51 1487/988 539/990 +f 1049/1078 600/729 603/731 +f 613/741 867/924 869/926 +f 860/918 1050/2740 1051/1080 +f 1005/1081 1052/2818 1053/1082 +f 753/842 1055/1087 1056/1083 +f 1057/1085 1058/2752 1059/1086 +f 293/1088 288/172 1522/1089 +f 288/172 212/173 1521/1077 +f 292/334 1522/1089 1523/1090 +f 331/1091 1507/1038 1508/1041 +f 306/449 452/274 453/276 +f 100/71 44/140 41/28 +f 462/1094 1524/1092 78/58 +f 330/1095 331/1091 1524/1092 +f 635/753 1067/1423 1068/1096 +f 1485/1097 1527/1833 1526/1098 +f 206/129 1485/1097 1528/1099 +f 1070/1100 1071/1255 1072/1101 +f 942/1102 941/982 1073/1103 +f 916/954 933/973 1075/1105 +f 440/270 1529/1109 1530/1107 +f 157/108 158/107 1514/1108 +f 1529/1109 162/2842 5/6 +f 371/212 595/354 546/330 +f 790/1110 586/721 663/1020 +f 1077/1111 858/917 857/919 +f 1403/760 1434/829 1435/832 +f 1080/1113 1081/2783 1082/1114 +f 531/1116 520/1430 1529/1109 +f 21/22 1083/1249 1084/1117 +f 272/350 1532/1367 1531/1119 +f 690/905 712/810 802/875 +f 335/326 3571/3933 3567/1120 +f 1090/1121 1030/1225 1029/1062 +f 1092/1123 795/868 794/1124 +f 1533/1125 396/232 137/427 +f 661/774 660/2839 1095/1127 +f 69/52 444/2847 445/1129 +f 1099/1130 1100/2789 1101/1131 +f 38/21 39/20 1536/1133 +f 1104/1134 1105/2753 1106/1135 +f 852/1137 1107/1323 1108/1138 +f 1377/717 908/716 909/1139 +f 291/333 377/474 411/237 +f 316/1140 1536/1133 516/439 +f 539/990 1488/989 1533/1125 +f 1505/1036 1536/1133 316/1140 +f 1110/1143 1111/2862 1112/1144 +f 1539/1146 1537/1149 460/283 +f 522/1148 441/2805 1537/1149 +f 834/902 837/901 1120/1150 +f 1122/1152 1123/3253 1124/1153 +f 612/737 1125/1812 1041/1155 +f 935/1045 793/867 615/1156 +f 276/169 278/168 130/1157 +f 1686/1158 3549/1590 3522/1159 +f 193/1161 190/1168 1541/1162 +f 231/1059 521/1058 1540/1164 +f 556/1166 1037/1069 982/1167 +f 365/207 253/146 85/64 +f 190/1168 532/321 318/181 +f 1810/1169 765/2091 3574/1170 +f 3924/1172 1867/1709 3512/1173 +f 1132/1175 983/1021 985/1023 +f 208/130 101/74 102/76 +f 1134/1177 1033/1065 662/775 +f 1491/994 518/310 80/312 +f 429/1178 133/92 1542/1179 +f 1137/1180 1138/2830 915/956 +f 172/117 436/267 428/259 +f 1139/1181 1140/2786 1141/1182 +f 549/1184 96/68 54/39 +f 1517/1185 1519/1253 1518/1186 +f 834/902 1119/1151 984/1022 +f 1543/1189 513/313 286/315 +f 1544/1190 797/430 239/1191 +f 133/92 134/91 1544/1190 +f 506/1192 361/1408 257/1193 +f 420/251 327/1304 328/394 +f 1158/1194 1159/1880 1160/1195 +f 587/1197 1104/1134 1103/1136 +f 607/736 1161/1240 1027/1061 +f 304/1199 1546/2225 1545/1200 +f 364/206 1491/994 77/56 +f 69/52 70/51 198/128 +f 700/808 840/904 1112/1144 +f 3877/1202 626/1496 3525/1203 +f 1166/1205 719/815 657/771 +f 1167/1207 1168/2858 858/917 +f 1300/638 3523/3940 3524/1208 +f 1172/1209 1173/2770 1174/1210 +f 218/1212 203/236 200/1213 +f 1179/1214 1180/2849 1181/1215 +f 372/1217 183/211 97/73 +f 89/67 104/2810 227/1218 +f 1186/1219 1187/1278 1188/1220 +f 172/117 173/116 1517/1185 +f 1482/971 84/970 81/59 +f 1090/1121 1192/1922 1193/1224 +f 394/1226 395/1284 353/202 +f 526/693 525/1846 1198/1228 +f 1199/1231 1200/2736 1026/1232 +f 606/359 623/361 697/1233 +f 1204/1235 1205/1332 1206/1236 +f 1207/1238 1208/2819 523/694 +f 1209/1239 1210/2836 867/924 +f 914/953 913/952 685/794 +f 1478/964 1436/963 1441/840 +f 1029/1062 1028/1064 1161/1240 +f 1211/1241 1212/1791 1213/1242 +f 3858/923 1453/906 1451/1244 +f 844/1245 1215/1887 1216/1246 +f 423/458 193/1161 273/1163 +f 1220/1247 1221/1259 1222/1248 +f 22/1250 1223/1344 1224/1251 +f 882/1252 898/471 897/468 +f 1519/1253 1517/1185 173/116 +f 1189/1221 1188/1220 1232/1254 +f 349/197 350/196 550/337 +f 1071/1255 1235/1979 1236/1256 +f 1237/1257 1238/2765 1239/1258 +f 1221/1259 1186/1219 1189/1221 +f 1510/1050 1528/1099 1512/1053 +f 939/978 1032/1066 1241/1262 +f 1556/1263 1530/1107 5/6 +f 1553/1264 1554/1795 394/1226 +f 404/1265 1556/1263 67/48 +f 1247/1267 1248/2846 1249/1268 +f 142/93 199/127 238/180 +f 1251/1270 1067/1423 635/753 +f 850/913 1252/2796 1253/1272 +f 1254/1273 1167/1207 1077/1111 +f 57/42 132/90 133/92 +f 443/1275 430/263 552/1276 +f 1187/1278 1172/1209 1175/1211 +f 1256/1279 1070/1100 831/899 +f 432/1280 405/233 382/1281 +f 741/1282 748/1906 747/799 +f 434/265 350/196 351/246 +f 349/197 432/1280 383/222 +f 251/323 124/84 1481/967 +f 395/1284 384/1463 352/203 +f 1518/1186 1519/1253 1550/1285 +f 1558/1286 410/238 378/216 +f 109/157 263/156 314/1287 +f 285/170 1514/1108 1513/1198 +f 395/1284 394/1226 393/1288 +f 202/135 203/236 410/238 +f 417/247 408/248 405/233 +f 1246/1269 1257/1351 1258/1290 +f 3/223 1559/1330 297/1291 +f 1259/1292 1260/2866 1261/1293 +f 403/264 404/1265 347/1266 +f 955/1295 954/490 953/1296 +f 462/1094 50/35 51/432 +f 1096/1128 1095/1127 687/795 +f 429/1178 147/97 60/40 +f 896/943 1264/1910 1265/1299 +f 1267/1301 721/816 1268/1302 +f 423/458 424/457 327/1304 +f 37/19 179/118 180/120 +f 20/24 23/23 1187/1278 +f 1256/1279 1168/2858 1071/1255 +f 1188/1220 1175/1211 1269/1306 +f 1236/1256 1270/1980 1271/1307 +f 1272/1308 536/2750 535/700 +f 565/1310 564/370 463/576 +f 1273/1312 792/1871 745/839 +f 876/931 1254/1273 1255/1274 +f 316/1140 33/18 164/110 +f 437/266 1516/1966 503/356 +f 194/124 1494/996 1495/997 +f 1275/1315 1276/1376 864/922 +f 624/360 625/1940 631/1316 +f 1277/1317 1274/1313 1137/1180 +f 569/437 568/436 567/1319 +f 1481/967 124/84 125/86 +f 324/189 325/188 547/431 +f 1279/1322 1108/1138 1107/1323 +f 1525/1325 332/3222 329/332 +f 1281/1326 1278/1318 1137/1180 +f 171/112 168/114 1553/1264 +f 1488/989 73/55 396/232 +f 1557/1328 1561/1521 1562/1329 +f 1282/1331 856/915 1206/1236 +f 1053/1082 1277/1317 1278/1318 +f 778/1333 777/854 607/736 +f 713/1335 583/1384 3566/1336 +f 251/323 225/278 144/98 +f 4/1 357/200 348/195 +f 709/1338 708/2182 707/1339 +f 728/373 48/31 45/33 +f 261/1341 135/1847 1563/1342 +f 789/864 1286/2354 1287/1343 +f 74/54 314/1287 263/156 +f 403/264 284/224 385/226 +f 1178/1216 1181/1215 1240/1260 +f 1223/1344 1259/1292 1289/1345 +f 1220/1247 20/24 1186/1219 +f 296/1187 1518/1186 354/1346 +f 784/428 459/284 1538/1348 +f 1181/1215 1180/2849 1291/1350 +f 1269/1306 1292/2860 1257/1351 +f 1566/1349 1538/1348 438/269 +f 136/429 784/428 1566/1349 +f 1563/1342 1564/1353 169/113 +f 107/81 114/155 109/157 +f 1297/1355 1282/1331 1298/1356 +f 1464/947 1477/946 1476/934 +f 1520/1358 187/123 507/1359 +f 1301/1361 1302/2795 1303/1362 +f 1306/1364 1307/2353 1308/1365 +f 175/176 456/2822 1532/1367 +f 1532/1367 456/2822 200/1213 +f 1311/1368 1266/1303 1312/1369 +f 872/928 1314/2859 1315/1371 +f 3836/603 843/602 878/626 +f 1305/1366 1308/1365 864/922 +f 1040/1072 1275/1315 863/921 +f 1564/1353 1566/1349 399/1352 +f 592/352 1576/2019 1575/1378 +f 591/353 1575/1378 1088/1379 +f 1581/1380 1584/1631 1583/1381 +f 1196/599 1152/579 1147/586 +f 710/1383 582/1461 583/1384 +f 285/170 407/234 408/248 +f 953/1296 954/490 961/492 +f 1594/1385 1592/1390 1589/1386 +f 1585/1001 1588/1526 1587/1388 +f 1592/1390 1591/1548 1590/1391 +f 3885/1392 945/1944 944/1393 +f 1600/1395 1599/1525 1598/1396 +f 1604/1398 1603/3139 1602/1399 +f 1399/748 1392/732 979/733 +f 1589/1386 1590/1391 1596/1402 +f 1610/1404 1609/1963 1608/1405 +f 298/1407 257/1193 361/1408 +f 551/1277 552/1276 276/169 +f 1627/1409 1626/1487 3814/1410 +f 859/1412 811/882 818/890 +f 1078/1112 857/919 1328/1413 +f 1555/1415 439/271 1530/1107 +f 572/1416 573/345 680/1417 +f 772/422 465/289 776/1419 +f 977/1014 1281/1326 918/955 +f 404/1265 403/264 392/1289 +f 1054/1084 1331/2759 750/843 +f 1644/1420 1645/1985 1643/1421 +f 932/1300 1265/1299 1082/1114 +f 1067/1423 1333/2808 1334/1424 +f 1540/1164 1539/1146 264/1147 +f 597/726 1335/1537 643/757 +f 471/1426 761/408 762/407 +f 1642/1422 696/1234 3831/877 +f 1644/1420 1642/1422 3842/1427 +f 923/476 946/478 950/480 +f 96/68 549/1184 548/1429 +f 520/1430 531/1116 522/1148 +f 445/1129 444/2847 443/1275 +f 849/1431 839/3661 848/1432 +f 480/363 759/403 755/401 +f 1646/1434 760/405 481/404 +f 311/1436 426/1056 1567/1437 +f 464/1311 463/576 3497/1438 +f 3497/1438 1360/2001 1361/1440 +f 234/1055 237/279 236/341 +f 190/1168 193/1161 192/250 +f 153/105 154/104 152/1441 +f 231/1059 287/1165 286/315 +f 1552/1327 1553/1264 1551/1227 +f 760/405 1646/1434 1648/1442 +f 3832/1443 1647/1964 1649/1444 +f 828/454 827/514 826/678 +f 52/34 167/555 166/218 +f 1663/1446 1662/1674 1661/1447 +f 287/1165 107/81 115/80 +f 668/1449 671/611 670/613 +f 634/1451 645/1725 640/1452 +f 553/1454 560/2229 559/1455 +f 118/83 36/16 33/18 +f 492/415 484/292 491/424 +f 3552/1458 1652/1508 1651/1459 +f 582/1461 589/1849 584/1462 +f 384/1463 395/1284 392/1289 +f 1381/1464 1382/2307 1383/1465 +f 1562/1329 1561/1521 435/268 +f 952/1297 953/1296 951/479 +f 3529/1467 3566/1336 583/1384 +f 1660/1448 1661/1447 1667/1468 +f 1394/1470 1395/1722 1396/1471 +f 1660/1448 1665/1569 1664/1473 +f 1671/1474 1670/1047 1669/1049 +f 1624/1476 1625/1485 1673/1477 +f 1407/1479 1408/2312 1409/1480 +f 1677/1482 1676/860 1675/862 +f 1613/1484 1679/1486 1677/1482 +f 1679/1486 1678/1015 1676/860 +f 1626/1487 1681/1804 3811/1488 +f 534/1141 199/127 198/128 +f 803/1489 733/398 734/400 +f 400/386 248/385 191/338 +f 65/486 66/45 143/253 +f 3526/1490 1657/1917 1656/1491 +f 402/1493 159/106 157/108 +f 629/1494 628/2154 627/1495 +f 461/285 477/1076 49/36 +f 758/1497 757/406 495/298 +f 796/1498 787/1780 786/1499 +f 406/235 407/234 1513/1198 +f 342/1502 343/498 166/218 +f 1694/1503 1659/1576 1658/825 +f 1692/1505 1687/3508 1694/1503 +f 3557/1507 1693/1016 1652/1508 +f 3921/1509 1704/1530 1703/1510 +f 326/1321 547/431 302/257 +f 3916/1512 3921/1509 1702/1513 +f 3919/1515 3918/1520 1698/1516 +f 1699/1518 1691/3926 3927/1519 +f 1557/1328 406/235 1560/1501 +f 1127/551 1116/545 1117/547 +f 400/386 401/249 289/175 +f 1706/1514 1702/1513 1707/1523 +f 1707/1523 1702/1513 1599/1525 +f 1709/1000 1706/1514 1708/1524 +f 1714/1527 1712/1553 1711/1528 +f 3919/1515 3915/1788 1709/1000 +f 1704/1530 1711/1528 1577/1531 +f 1716/1532 1714/1527 1713/1529 +f 1559/1330 1562/1329 1549/1222 +f 1722/1534 1721/1557 1720/1535 +f 1335/1537 597/726 596/728 +f 410/238 411/237 377/474 +f 973/506 1170/595 1163/580 +f 168/114 169/113 399/1352 +f 1571/813 682/1542 1089/1540 +f 1572/1541 681/1418 682/1542 +f 749/1543 646/365 647/367 +f 1730/1545 1724/1554 1723/1546 +f 1594/1385 1582/1382 1729/1547 +f 1729/1547 1723/1546 1591/1548 +f 1384/1550 1378/715 1377/717 +f 1732/1552 1731/1549 1712/1553 +f 1724/1554 1730/1545 1731/1549 +f 1722/1534 1724/1554 1732/1552 +f 1721/1557 1722/1534 1733/1556 +f 1734/1558 1733/1556 1716/1532 +f 1735/1560 1737/1568 1736/1561 +f 1733/1556 1732/1552 1714/1527 +f 1663/1446 1664/1473 1741/1563 +f 1742/1564 1665/1569 1740/1565 +f 1738/1567 1739/1582 1737/1568 +f 1665/1569 1660/1448 1666/1469 +f 1665/1569 1742/1564 1744/1570 +f 1725/1571 1726/1583 1746/1572 +f 1591/1548 1725/1571 1745/1573 +f 1590/1391 1745/1573 1747/1574 +f 1745/1573 1746/1572 1748/1575 +f 1659/1576 1688/3953 3538/1577 +f 3287/1578 1760/2207 3545/1579 +f 1185/591 1177/589 1182/895 +f 1664/1473 1744/1570 1749/1581 +f 1741/1563 1749/1581 1739/1582 +f 1746/1572 1742/1564 1743/1566 +f 1726/1583 1744/1570 1742/1564 +f 1115/1585 924/1584 1153/574 +f 1654/1586 3546/3931 3575/1587 +f 1896/1589 3562/1933 3549/1590 +f 1720/1535 1749/1581 1744/1570 +f 1721/1557 1739/1582 1749/1581 +f 1750/1591 1718/1841 1701/1517 +f 1697/1562 1736/1561 1750/1591 +f 1151/566 1152/579 1063/539 +f 1751/1592 1752/1593 1670/1047 +f 1695/1504 1658/825 1752/1593 +f 961/492 962/491 986/1594 +f 1770/1595 1769/1619 1768/1596 +f 1766/1597 1594/1385 1593/1387 +f 1731/1549 1759/3895 1579/1599 +f 1579/1599 1777/1607 1774/1600 +f 3928/1602 1772/1612 1771/1603 +f 1764/1605 1772/1612 3928/1602 +f 1777/1607 1776/1630 1775/1608 +f 3762/1609 3763/2780 1699/1518 +f 1004/1610 1127/551 1128/1522 +f 1774/1600 1775/1608 1771/1603 +f 987/1613 1400/749 1399/748 +f 1578/1601 1774/1600 1772/1612 +f 1783/1614 1782/1696 1781/1615 +f 1597/1397 1598/1396 1770/1595 +f 639/1453 1878/1731 3895/1617 +f 3928/1602 3929/1604 1769/1619 +f 1793/1620 1792/2099 1789/1621 +f 3929/1604 3920/1632 1798/1623 +f 3508/1624 3516/3950 3773/1625 +f 1802/1626 1867/1709 3924/1172 +f 1788/1622 1790/1678 1794/1628 +f 1793/1620 1794/1628 1795/1629 +f 3773/1625 3772/1648 1675/862 +f 1584/1631 1581/1380 1783/1614 +f 1771/1603 1797/1636 3920/1632 +f 1801/1633 1799/1635 1798/1623 +f 1768/1596 1769/1619 1798/1623 +f 1775/1608 1796/1637 1797/1636 +f 508/1360 514/554 515/556 +f 1796/1637 1803/1708 1802/1626 +f 1800/1634 1798/1623 3920/1632 +f 1794/1628 1805/1673 1804/1638 +f 3532/1639 1670/1047 1752/1593 +f 485/1641 1808/1645 1807/1018 +f 3809/413 3844/2092 1640/1642 +f 3837/1643 3844/2092 1810/1169 +f 485/1641 482/291 1640/1642 +f 1690/1646 1691/3926 3770/1647 +f 487/295 488/414 1648/1442 +f 3839/1650 1813/3880 1816/1651 +f 3838/1653 1811/1649 1648/1442 +f 3839/1650 1812/1654 1811/1649 +f 1812/1654 1809/1660 487/295 +f 1641/1655 1809/1660 767/1656 +f 465/289 466/288 755/401 +f 767/1656 1815/1661 3823/1657 +f 3822/1652 1816/1651 3826/1658 +f 1809/1660 1812/1654 1815/1661 +f 3837/1643 1653/1644 1651/1459 +f 675/1662 672/3186 3541/1663 +f 1814/1665 1649/1444 1822/1666 +f 1821/1667 1822/1666 3821/1668 +f 1824/1670 1822/1666 1649/1444 +f 1827/1672 1849/986 1850/957 +f 1668/1475 1669/1049 1662/1674 +f 1779/1675 1780/1747 1848/1676 +f 1790/1678 1827/1672 1805/1673 +f 1833/1679 1836/3184 1853/1680 +f 3521/1682 3506/1735 693/381 +f 1614/1683 1615/1721 1616/1684 +f 1852/1681 1853/1680 3498/1686 +f 1620/1688 1621/2326 1622/1689 +f 1855/1691 1765/1598 1574/1692 +f 1847/1694 1852/1681 3501/1687 +f 1766/1597 1765/1598 1855/1691 +f 1629/1697 1630/2311 1631/1698 +f 1633/1700 1622/1689 1634/1701 +f 1635/1703 1636/2331 1637/1704 +f 1848/1676 1833/1679 1852/1681 +f 1855/1691 1861/1693 1863/1706 +f 1803/1708 1866/3870 1867/1709 +f 1862/1707 1863/1706 1831/1710 +f 1782/1696 1855/1691 1862/1707 +f 3922/1627 3924/1172 1865/948 +f 1873/1712 1872/2117 1870/1713 +f 1874/1715 1869/1714 799/1716 +f 2045/1718 3878/1937 3897/1719 +f 1219/610 1345/663 1295/667 +f 1615/1721 1614/1683 1396/1471 +f 3884/1618 3895/1617 1877/1723 +f 645/1725 3898/2757 3786/1726 +f 1345/663 1219/610 1035/534 +f 634/1451 639/1453 3884/1618 +f 15/8 1887/3468 1327/1729 +f 3910/1730 3895/1617 1878/1731 +f 1894/1733 604/1850 605/1734 +f 3506/1735 3551/1899 627/1495 +f 3560/1736 640/1452 645/1725 +f 340/500 341/499 454/275 +f 1897/1737 194/124 195/126 +f 1889/1739 1882/1920 1877/1723 +f 1901/1741 94/69 129/88 +f 196/125 749/1543 754/1544 +f 1705/1744 1704/1530 3921/1509 +f 340/500 378/216 166/218 +f 1570/1745 1480/969 563/968 +f 1904/1746 1494/996 194/124 +f 1781/1615 1862/1707 1832/1711 +f 1905/1748 549/1184 55/38 +f 1000/1750 994/1796 995/1751 +f 986/1594 919/527 920/526 +f 1191/597 1185/591 1190/892 +f 2094/1752 2077/1982 2075/1753 +f 1920/1755 1921/2196 1919/1756 +f 453/276 454/275 1912/1758 +f 1915/1757 1919/1756 1918/1760 +f 3882/1762 2112/1890 2106/1763 +f 1655/1588 3575/1587 3531/1765 +f 1916/1761 1918/1760 1926/1767 +f 704/389 3554/820 3561/1769 +f 278/168 1916/1761 1925/1768 +f 3565/1770 3564/2811 1818/1771 +f 1097/543 949/553 1115/1585 +f 1927/1773 1928/2201 727/375 +f 3578/1774 3579/3584 3654/1775 +f 1920/1755 310/184 307/186 +f 1929/1777 307/186 48/31 +f 503/356 504/358 1497/1778 +f 1902/1779 1903/2111 787/1780 +f 1816/1651 1821/1667 3820/1669 +f 1925/1768 1926/1767 1923/628 +f 1535/1782 1569/1819 1568/1783 +f 254/244 252/145 258/147 +f 1509/1046 1497/1778 366/208 +f 119/194 86/63 35/1785 +f 219/464 288/172 293/1088 +f 141/174 46/32 47/191 +f 542/470 205/316 244/318 +f 1940/1787 552/1276 430/263 +f 1779/1675 1828/1677 1827/1672 +f 1701/1517 1710/1842 3915/1788 +f 1661/1447 3550/3898 3515/1789 +f 270/162 53/37 754/1544 +f 197/1314 1947/2080 749/1543 +f 1727/1790 1728/3193 1212/1791 +f 53/37 54/39 232/1743 +f 1465/894 1468/896 1479/962 +f 1284/1792 3862/640 3854/1793 +f 1951/1794 446/324 412/241 +f 168/114 398/1539 1554/1795 +f 1486/1039 1485/1097 206/129 +f 506/1192 77/56 78/58 +f 994/1796 1001/1986 1007/518 +f 951/479 953/1296 963/507 +f 1013/550 3859/863 3861/670 +f 3826/1658 3820/1669 1820/1772 +f 1498/1223 1909/1814 1954/1799 +f 299/1006 298/1407 543/1800 +f 1754/1801 1755/2271 1756/1802 +f 1681/1804 469/302 1683/1805 +f 1696/1506 1694/1503 1695/1504 +f 13/7 14/9 738/1807 +f 473/420 476/2205 475/416 +f 482/291 1641/1655 768/411 +f 204/1809 62/43 30/14 +f 1761/1810 1762/2739 820/1811 +f 611/738 1763/3241 1125/1812 +f 1501/1007 1503/2788 1908/1813 +f 1074/1104 1073/1103 711/809 +f 1960/1815 1944/1912 82/61 +f 120/1816 1956/1911 1960/1815 +f 35/1785 86/63 87/78 +f 1773/1817 1199/1231 1025/1060 +f 1965/1818 1545/1200 1569/1819 +f 494/299 761/408 471/1426 +f 1966/1820 1967/1824 1969/1821 +f 446/324 1951/1794 1971/1823 +f 174/115 368/209 125/86 +f 1580/1555 1579/1599 1578/1601 +f 1785/1825 1786/2724 1787/1826 +f 1972/1828 1546/2225 304/1199 +f 1254/1273 876/931 875/1035 +f 1973/1829 341/499 342/1502 +f 321/1830 1912/1758 1973/1829 +f 1910/1831 277/242 275/243 +f 509/984 1973/1829 514/554 +f 1486/1039 1507/1038 1527/1833 +f 1974/1834 1874/1715 804/1717 +f 1977/1836 1976/3511 1974/1834 +f 1980/1838 448/1848 1966/1820 +f 379/219 1/3 2/2 +f 1718/1841 1715/1533 1710/1842 +f 884/462 995/1751 992/513 +f 1680/1844 1684/3934 1678/1015 +f 902/1845 990/2760 525/1846 +f 344/1005 255/149 256/148 +f 135/1847 138/1126 137/427 +f 448/1848 446/324 1967/1824 +f 589/1849 605/1734 604/1850 +f 910/712 909/1139 908/716 +f 1981/1839 1966/1820 1968/1822 +f 1982/1852 1981/1839 1987/1851 +f 1988/1854 1979/1853 1987/1851 +f 1990/1856 1988/1854 1989/1855 +f 1991/1857 1989/1855 1993/1858 +f 322/1860 321/1830 320/983 +f 992/513 828/454 830/472 +f 1144/578 1076/565 1086/568 +f 1995/1863 1977/1836 1978/1837 +f 1133/558 1129/560 1118/546 +f 1997/1865 1995/1863 1994/1864 +f 870/927 1823/1884 1335/1537 +f 805/443 806/442 1982/1852 +f 727/375 184/374 2000/1868 +f 1996/1866 1994/1864 1988/1854 +f 1994/1864 1978/1837 1979/1853 +f 1829/1870 616/740 792/1871 +f 1830/1872 1073/1103 941/982 +f 798/1873 803/1489 800/441 +f 729/1874 732/2883 731/1875 +f 2011/1876 2012/2802 2023/1877 +f 1834/1879 1835/2845 846/908 +f 402/1493 283/171 284/224 +f 2022/1878 2023/1877 2024/1881 +f 1838/1883 718/1948 989/1024 +f 810/515 955/1295 952/1297 +f 1823/1884 1839/2741 1840/1885 +f 1430/822 1426/821 1432/834 +f 1215/1887 844/1245 1841/1888 +f 1316/1372 1208/2819 873/929 +f 2112/1890 1401/2337 1384/1550 +f 1842/1891 1843/2769 1844/1892 +f 1159/1880 846/908 845/907 +f 837/901 27/26 1846/1894 +f 1971/1823 1951/1794 2030/1895 +f 517/1897 369/433 370/434 +f 1471/933 2026/1923 2027/1896 +f 3551/1899 3525/1203 626/1496 +f 2174/1900 2175/3580 770/1901 +f 935/1045 934/974 1854/1902 +f 1613/1484 1619/1949 1611/1903 +f 1231/621 1228/623 1225/609 +f 1856/1905 1857/2779 891/941 +f 1300/638 1309/637 748/1906 +f 1150/1907 1149/2264 1858/1908 +f 1643/1421 606/359 696/1234 +f 1215/1887 1265/1299 1264/1910 +f 1320/1377 1860/2837 902/1845 +f 1956/1911 1943/3865 1944/1912 +f 2031/1913 668/1449 669/1450 +f 2034/1915 1325/644 1310/646 +f 1879/1916 1880/2096 1656/1491 +f 40/1305 245/1786 242/440 +f 1992/1859 1993/1858 2039/1918 +f 888/1919 958/999 957/998 +f 1882/1920 1889/1739 1996/1866 +f 1078/1112 1138/2830 1255/1274 +f 1871/1921 912/2728 1192/1922 +f 294/524 411/237 218/1212 +f 1323/645 2041/1925 2026/1923 +f 123/981 1960/1815 83/60 +f 1324/1924 2036/1927 2041/1925 +f 384/1463 358/199 359/260 +f 1324/1924 1325/644 2038/1926 +f 1883/1928 1884/2082 1885/1929 +f 287/1165 1540/1164 114/155 +f 1096/1128 1886/2340 1134/1177 +f 915/956 1329/1414 931/972 +f 806/442 1980/1838 1981/1839 +f 3533/1930 3529/1467 584/1462 +f 1425/800 2030/1895 2042/1931 +f 1018/530 2042/1931 1948/1932 +f 3562/1933 1896/1589 740/1934 +f 1948/1932 2042/1931 649/1935 +f 1881/1936 1882/1920 1990/1856 +f 1900/1742 129/88 130/1157 +f 992/513 995/1751 994/1796 +f 923/476 927/493 925/496 +f 3878/1937 2045/1718 2047/1938 +f 1643/1421 2048/1975 625/1940 +f 2050/1941 2052/1961 2051/1942 +f 2045/1718 1405/844 1404/759 +f 3876/1939 2047/1938 945/1944 +f 1139/1181 1080/1113 1079/1115 +f 1037/1069 1856/1905 892/940 +f 2055/1945 2053/2009 2057/1946 +f 1198/1228 989/1024 718/1948 +f 1619/1949 2058/1951 1612/1950 +f 2058/1951 1605/1954 1606/1952 +f 2059/1953 2060/2882 1607/1406 +f 2058/1951 1619/1949 1624/1476 +f 71/1955 72/50 179/118 +f 405/233 406/235 1557/1328 +f 322/1860 1548/2355 1972/1828 +f 2061/1956 2052/1961 2063/1957 +f 2066/1959 2065/1969 2067/1960 +f 2052/1961 2061/1956 2064/1962 +f 1609/1963 1610/1404 2066/1959 +f 1515/1965 151/101 152/1441 +f 1009/522 827/514 993/1797 +f 1644/1420 3843/1428 3815/1967 +f 279/1030 1915/1757 1916/1761 +f 1285/1968 3863/639 3862/640 +f 1647/1964 2066/1959 1825/1671 +f 2056/1947 2057/1946 2067/1960 +f 319/1861 320/983 1953/1970 +f 667/777 666/2278 1913/1972 +f 2070/1974 631/1316 625/1940 +f 2052/1961 2050/1941 2055/1945 +f 2038/1926 1654/1586 1655/1588 +f 1606/1952 1601/1400 2073/1977 +f 1235/1979 1917/2851 1270/1980 +f 2072/1978 2073/1977 2075/1753 +f 2059/1953 1606/1952 2072/1978 +f 2074/1981 2075/1753 2077/1982 +f 1601/1400 1602/1399 2078/1984 +f 2049/1943 2048/1975 1643/1421 +f 1000/1750 1004/1610 1001/1986 +f 1089/1540 943/2088 684/369 +f 1931/1987 1286/2354 1306/1364 +f 1462/1989 1459/880 1021/533 +f 1932/1990 1933/3405 1934/1991 +f 875/1035 874/930 1846/1894 +f 1936/1993 1051/1080 1937/1994 +f 44/140 45/33 46/32 +f 1534/1784 1955/3488 1956/1911 +f 1541/1162 318/181 317/183 +f 2081/1996 2082/2006 2084/1997 +f 146/557 1945/3866 1959/1999 +f 1360/2001 1941/2194 1942/2002 +f 985/1023 984/1022 1032/1066 +f 1995/1863 1997/1865 1892/2003 +f 2079/2005 2080/2338 2082/2006 +f 2095/2007 2096/2021 2076/1983 +f 2081/1996 2093/1754 2078/1984 +f 2055/1945 2050/1941 2054/2008 +f 501/456 2096/2021 2095/2007 +f 2097/2010 2100/630 2099/2011 +f 2098/2012 2099/2011 2094/1752 +f 2140/2013 2141/2016 3893/2014 +f 2141/2016 2046/2133 3894/2017 +f 2051/1942 2070/1974 2048/1975 +f 592/352 2102/2022 2101/2018 +f 2071/2020 2064/1962 2076/1983 +f 2102/2022 2097/2010 2098/2012 +f 593/2023 2103/2044 2102/2022 +f 1019/529 1895/2270 746/1283 +f 1950/2024 11/2191 12/2025 +f 725/396 2071/2020 2096/2021 +f 1385/791 2105/2046 2104/2026 +f 1379/1551 2104/2026 2106/1763 +f 2107/2027 2108/3944 2100/630 +f 3908/2028 726/397 502/303 +f 3896/2029 502/303 499/305 +f 940/2030 1389/2037 2109/2031 +f 928/2032 929/755 1170/595 +f 3868/2033 590/351 684/369 +f 1962/2034 1963/2350 1964/2035 +f 3878/1937 680/1417 573/345 +f 2039/1918 1993/1858 1968/1822 +f 1389/2037 1390/724 2110/2038 +f 1398/747 2111/2124 2113/2039 +f 3757/2040 3755/3310 3753/2041 +f 3911/2043 3883/2123 2107/2027 +f 949/553 923/476 924/1584 +f 260/150 241/187 324/189 +f 1390/724 1397/746 2113/2039 +f 2070/1974 2051/1942 2064/1962 +f 3867/2045 3911/2043 2103/2044 +f 2105/2046 1385/791 1421/792 +f 2114/2047 1421/792 1423/797 +f 3892/2049 2114/2047 2115/2048 +f 3908/2028 2105/2046 2114/2047 +f 554/1456 2116/2198 1902/1779 +f 972/505 1163/580 967/497 +f 1765/1598 1593/1387 1573/2051 +f 1984/2052 1985/2224 1986/2053 +f 2053/2009 2117/887 2069/2055 +f 3573/2056 3556/3129 724/2057 +f 1625/1485 1677/1482 1674/1483 +f 3881/2058 3888/2118 2119/2059 +f 3903/2050 2115/2048 2123/2061 +f 56/161 1954/1799 1906/1749 +f 626/1496 3877/1202 3879/2063 +f 2001/2064 2002/2245 2003/2065 +f 3912/2067 3841/2114 2127/2068 +f 2006/2070 2007/2461 2008/2071 +f 2127/2068 1413/779 1412/764 +f 2014/2074 2015/2263 2016/2075 +f 2018/2077 2019/2889 2020/2078 +f 1495/997 1424/804 1947/2080 +f 1379/1551 1377/717 1386/786 +f 3814/1410 3811/1488 1680/1844 +f 2029/2081 1885/1929 1884/2082 +f 3902/2084 2128/2073 2120/2085 +f 629/1494 3879/2063 3914/631 +f 682/1542 683/2797 943/2088 +f 724/2057 1650/2184 2121/2089 +f 1757/991 1819/2825 3821/1668 +f 765/2091 1810/1169 3844/2092 +f 3851/2093 714/376 715/378 +f 706/1340 707/1339 3870/2095 +f 981/508 986/1594 987/1613 +f 1457/871 1868/1766 1656/1491 +f 2017/2076 2016/2075 2032/2097 +f 1458/1976 1655/1588 1868/1766 +f 1776/1630 1777/1607 1792/2099 +f 1959/1999 1945/3866 1946/2100 +f 3663/805 3715/3643 3725/2102 +f 3913/2104 3881/2058 2122/2060 +f 1093/811 1089/1540 689/368 +f 966/495 965/494 968/502 +f 3852/2094 715/378 3881/2058 +f 2172/2106 3577/3579 2170/2107 +f 670/613 2138/2116 2137/2109 +f 3761/2110 787/1780 1903/2111 +f 1429/823 19/11 28/2112 +f 964/1798 963/507 978/509 +f 3841/2114 3813/782 1413/779 +f 1473/2115 1474/1914 669/1450 +f 3834/680 3833/681 1359/706 +f 2138/2116 798/1873 1870/1713 +f 1872/2117 1473/2115 2137/2109 +f 715/378 716/377 3888/2118 +f 1881/1936 1991/1857 1992/1859 +f 3843/1428 3842/1427 739/482 +f 1837/2119 1847/1694 3513/1695 +f 253/146 58/41 59/62 +f 1808/1645 3837/1643 3840/1019 +f 1647/1964 3832/1443 3818/2121 +f 3865/2122 3866/2015 671/611 +f 2112/1890 3882/1762 3883/2123 +f 1405/844 2045/1718 2044/1720 +f 3829/691 3824/690 1364/2125 +f 273/1163 1541/1162 391/230 +f 576/392 632/2493 3520/2127 +f 3875/2129 3865/2122 668/1449 +f 1430/822 1431/1886 2140/2013 +f 2136/2131 3850/2105 3860/849 +f 1430/822 2139/2130 18/12 +f 3893/2014 3894/2017 570/347 +f 1442/835 1439/828 2046/2133 +f 1431/1886 1442/835 2141/2016 +f 3869/2134 3913/2104 3850/2105 +f 3808/412 3823/1657 1817/2135 +f 3866/2015 3893/2014 695/2132 +f 2142/2137 2025/1882 798/1873 +f 571/346 2143/2138 2142/2137 +f 3819/1411 3814/1410 1618/1904 +f 2143/2138 2022/1878 2025/1882 +f 694/612 2142/2137 2138/2116 +f 681/1418 1572/1541 2144/2139 +f 2144/2139 2011/1876 2022/1878 +f 328/394 210/134 457/635 +f 1572/1541 2010/2801 2011/1876 +f 630/395 3892/2049 3903/2050 +f 2085/2140 2086/3946 2087/2141 +f 2090/2143 2091/3489 2092/2144 +f 572/1416 2144/2139 2143/2138 +f 1875/2146 1976/3511 2153/2147 +f 1873/1712 1875/2146 2149/2148 +f 2103/2044 2107/2027 2097/2010 +f 2157/2150 2043/2004 2155/2151 +f 2128/2073 1412/764 1414/766 +f 3874/2153 3871/2166 589/1849 +f 3872/2087 3870/2095 628/2154 +f 2149/2148 2153/2147 2158/2155 +f 2159/2156 2158/2155 3905/2157 +f 698/388 3907/2069 3902/2084 +f 3823/1657 3847/1659 3827/2160 +f 2158/2155 2156/2152 3874/2153 +f 1438/830 2044/1720 2046/2133 +f 2035/2161 2154/2149 2160/2162 +f 2161/2163 2160/2162 3889/2164 +f 3871/2166 3891/2171 605/1734 +f 3873/2167 17/10 18/12 +f 370/434 29/15 61/44 +f 649/1935 1951/1794 413/240 +f 2153/2147 2157/2150 2156/2152 +f 1428/2168 1429/823 1449/2113 +f 1490/1027 76/53 73/55 +f 3890/2158 3905/2157 710/1383 +f 2155/2151 1891/2175 3891/2171 +f 1456/872 1457/871 1880/2096 +f 2125/2172 2088/2142 2087/2141 +f 2160/2162 2159/2156 3890/2158 +f 300/2000 1959/1999 505/634 +f 1064/592 1062/540 1046/538 +f 2043/2004 1892/2003 1891/2175 +f 2129/2176 2130/2276 2004/2066 +f 2131/2177 2132/3433 2133/2178 +f 3848/850 3849/909 1450/2169 +f 1998/1867 1975/1835 804/1717 +f 3879/2063 673/2296 3864/2180 +f 474/418 492/415 490/1457 +f 1164/582 1165/584 1182/895 +f 707/1339 2162/2183 3909/2181 +f 708/2182 1472/2215 2162/2183 +f 2162/2183 2161/2163 3886/2165 +f 548/1429 549/1184 1905/1748 +f 1145/562 1136/564 1114/570 +f 705/2159 2121/2089 1650/2184 +f 2043/2004 2157/2150 1977/1836 +f 737/1808 738/1807 554/1456 +f 1034/605 1195/596 1191/597 +f 458/281 1905/1748 1907/1832 +f 195/126 196/125 232/1743 +f 2040/2185 2039/1918 1969/1821 +f 2145/2187 2146/2298 2147/2188 +f 2054/2008 2050/1941 2049/1943 +f 3832/1443 1646/1434 1639/1435 +f 9/2190 12/2025 11/2191 +f 252/145 364/206 506/1192 +f 472/300 478/362 479/364 +f 1865/948 3924/1172 3791/1174 +f 900/484 903/473 910/712 +f 1941/2194 2150/2273 2151/2195 +f 1921/2196 1928/2201 1927/1773 +f 2163/2197 1897/1737 1898/1738 +f 271/1003 156/103 153/105 +f 1904/1746 1326/2200 12/2025 +f 282/286 263/156 264/1147 +f 1872/2117 1873/1712 2154/2149 +f 1326/2200 1904/1746 1897/1737 +f 628/2154 692/379 693/381 +f 1724/1554 1722/1534 1719/1536 +f 1549/1222 1562/1329 436/267 +f 1582/1382 1583/1381 1730/1545 +f 1921/2196 1920/1755 1929/1777 +f 3563/2202 3511/5093 10/2192 +f 2152/2203 2037/3915 1950/2024 +f 475/416 772/422 774/425 +f 769/419 2172/2106 2171/2108 +f 2171/2108 2165/3515 476/2205 +f 476/2205 773/421 772/422 +f 1791/2206 565/1310 464/1311 +f 2166/2208 2167/3295 2168/2209 +f 595/354 371/212 372/1217 +f 483/293 1809/1660 1641/1655 +f 762/407 2174/1900 2173/853 +f 771/417 483/293 484/292 +f 486/296 487/295 1809/1660 +f 297/1291 1549/1222 1517/1185 +f 763/2211 2175/3580 2174/1900 +f 776/1419 755/401 759/403 +f 184/374 185/166 2176/2212 +f 412/241 447/325 303/1201 +f 2005/2213 2176/2212 185/166 +f 3811/1488 1681/1804 1682/1806 +f 2164/2214 2170/2107 756/426 +f 1472/2215 2035/2161 2161/2163 +f 950/480 951/479 964/1798 +f 2177/2216 2178/3440 2179/2217 +f 227/1218 226/1051 1510/1050 +f 2182/2219 2183/2886 2184/2220 +f 2186/2222 2019/2889 2018/2077 +f 1985/2224 2182/2219 2181/2221 +f 214/136 736/2821 729/1874 +f 865/675 849/1431 842/1433 +f 849/1431 865/675 1357/677 +f 559/1455 3187/3576 3188/2227 +f 16/2228 13/7 737/1808 +f 560/2229 553/1454 498/2230 +f 3193/2232 3196/3419 3195/2233 +f 3200/2235 3197/3420 3198/2236 +f 3211/2238 3214/3525 3213/2239 +f 3215/2241 3218/2266 3217/2242 +f 2187/2244 2188/3118 2002/2245 +f 2169/2210 2168/2209 2189/2246 +f 2192/2248 2086/3946 2085/2140 +f 2193/2250 2194/3896 2195/2251 +f 2198/2253 2199/2710 2200/2254 +f 1998/1867 1982/1852 1979/1853 +f 3664/2256 3665/3143 3668/2257 +f 2116/2198 1898/1738 1903/2111 +f 1859/1909 2006/2070 2009/2072 +f 3904/2062 2123/2061 2126/2259 +f 1135/2261 1148/563 1087/569 +f 2201/2262 2202/2277 2015/2263 +f 1149/2264 111/3492 110/152 +f 3221/2265 3222/3531 3218/2266 +f 2203/2267 2204/3435 2205/2268 +f 579/344 580/343 1895/2270 +f 1755/2271 2207/2295 2208/2272 +f 3891/2171 3910/1730 1893/1732 +f 1211/1241 2151/2195 2150/2273 +f 868/925 997/1032 2210/2274 +f 944/1393 945/1944 930/754 +f 2130/2276 2129/2176 2202/2277 +f 233/629 1903/2111 1898/1738 +f 665/776 751/841 750/843 +f 1167/1207 1254/1273 999/1034 +f 2211/2280 2212/2349 2213/2281 +f 2215/2283 2216/2348 2217/2284 +f 3867/2045 593/2023 590/351 +f 2220/2286 221/2566 220/306 +f 823/447 824/489 948/477 +f 3225/2288 3220/3573 3227/2289 +f 266/160 204/1809 455/469 +f 3882/1762 2108/3944 2107/2027 +f 2221/2291 2222/3311 3519/481 +f 2223/2292 2224/3382 2225/2293 +f 2207/2295 2125/2172 2124/2173 +f 673/2296 674/3187 3869/2134 +f 817/444 1000/1750 996/516 +f 2227/2297 2228/3190 2146/2298 +f 1947/2080 1948/1932 646/365 +f 3900/633 2135/632 17/10 +f 2229/2299 1635/1703 1638/1705 +f 1679/1486 1613/1484 1618/1904 +f 1637/1704 2231/2303 2232/2300 +f 2233/2302 2234/2945 1617/1685 +f 2231/2303 2235/2330 2236/2304 +f 1381/1464 1638/1705 2238/2306 +f 3586/2308 2241/3103 2242/2309 +f 1630/2311 2234/2945 2233/2302 +f 1681/1804 1626/1487 472/300 +f 1408/2312 1407/1479 2243/2313 +f 3236/2315 3239/3530 3238/2316 +f 3232/2318 3241/3497 3240/2319 +f 1637/1704 2230/2301 2238/2306 +f 3237/2317 3238/2316 3233/2321 +f 1409/1480 1408/2312 1634/1701 +f 1380/1466 1383/1465 2247/2324 +f 1628/1699 2248/2328 2245/2323 +f 2249/2327 1623/1690 1622/1689 +f 1632/1702 1634/1701 1408/2312 +f 2248/2328 1628/1699 1631/1698 +f 2235/2330 2231/2303 1637/1704 +f 3245/2332 3246/2949 3199/2237 +f 631/1316 2070/1974 2071/2020 +f 515/556 167/555 52/34 +f 782/858 781/2875 2251/2333 +f 2253/2334 2254/3196 2255/2335 +f 397/231 281/282 459/284 +f 1456/872 2039/1918 2040/2185 +f 2040/2185 2041/1925 2036/1927 +f 1401/2337 2112/1890 2111/2124 +f 2041/1925 2040/2185 1970/2186 +f 1535/1782 121/2834 414/239 +f 1586/1389 1587/1388 2080/2338 +f 2256/2339 983/1021 1132/1175 +f 1700/1002 1699/1518 3918/1520 +f 3815/1967 1896/1589 1686/1158 +f 1263/1298 2256/2339 1886/2340 +f 598/2341 1841/1888 844/1245 +f 2257/2342 2258/2345 2259/2343 +f 2258/2345 2261/3256 2262/2346 +f 2259/2343 2262/2346 2263/2347 +f 2260/2344 2259/2343 2216/2348 +f 1967/1824 1971/1823 1970/2186 +f 2212/2349 1964/2035 1963/2350 +f 922/2351 921/960 2218/2285 +f 1312/1369 1098/1132 1101/1131 +f 1307/2353 1306/1364 1286/2354 +f 1548/2355 1569/1819 1546/2225 +f 2266/2356 2267/2416 2268/2357 +f 2265/2358 2268/2357 2270/2359 +f 2272/2361 2265/2358 2269/2360 +f 2273/2363 2266/2356 2265/2358 +f 2275/2364 2276/2440 2277/2365 +f 2278/2367 2274/2366 2277/2365 +f 2284/2369 2285/2544 3617/2370 +f 2287/2372 2283/2376 2286/2373 +f 2288/2375 2289/2593 2283/2376 +f 2290/2377 2284/2369 3639/2371 +f 2291/2379 2292/2385 2293/2380 +f 2295/2382 2291/2379 2294/2381 +f 2296/2383 2297/2384 2291/2379 +f 2297/2384 2298/2388 2292/2385 +f 2294/2381 2293/2380 2299/2386 +f 2279/2368 2294/2381 2300/2387 +f 2298/2388 2301/2396 2302/2389 +f 2292/2385 2302/2389 2303/2390 +f 2293/2380 2303/2390 2304/2391 +f 2304/2391 2305/2587 2306/2392 +f 2308/2393 2309/2614 2310/2394 +f 2302/2389 2307/2395 2310/2394 +f 2301/2396 2311/2398 2307/2395 +f 2312/2397 2308/2393 2307/2395 +f 2313/2399 2314/2506 2315/2400 +f 2318/2402 2319/2638 2320/2403 +f 3748/2405 3771/3394 2322/2406 +f 2324/2408 2325/2417 2273/2363 +f 2313/2399 2324/2408 2326/2409 +f 2328/2411 2324/2408 2313/2399 +f 2329/2412 2325/2417 2324/2408 +f 2331/2413 2332/2420 2333/2414 +f 2330/2415 2333/2414 2267/2416 +f 2325/2417 2330/2415 2266/2356 +f 2329/2412 2331/2413 2330/2415 +f 2333/2414 2334/3875 2335/2418 +f 2336/2419 2334/3875 2333/2414 +f 2338/2421 2336/2419 2332/2420 +f 2331/2413 2339/2423 2337/2422 +f 2329/2412 2340/2424 2339/2423 +f 2328/2411 2341/2425 2340/2424 +f 2316/2401 2342/2426 2341/2425 +f 2342/2426 2316/2401 2315/2400 +f 2344/2428 2345/2432 2346/2429 +f 2341/2425 2344/2428 2347/2430 +f 2342/2426 2348/2431 2344/2428 +f 2348/2431 2349/2624 2345/2432 +f 2350/2433 2351/2436 2352/2434 +f 2339/2423 2350/2433 2353/2435 +f 2340/2424 2347/2430 2350/2433 +f 2347/2430 2346/2429 2351/2436 +f 2337/2422 2353/2435 2354/2437 +f 2353/2435 2352/2434 2355/2438 +f 2356/2439 2277/2365 2276/2440 +f 2356/2439 2357/2441 2358/2442 +f 2356/2439 2359/2443 2296/2383 +f 2279/2368 2277/2365 2356/2439 +f 2360/2444 2361/2448 2270/2359 +f 2267/2416 2335/2418 2360/2444 +f 2362/2445 2363/2450 2364/2446 +f 2362/2445 2365/2447 2361/2448 +f 2366/2449 2362/2445 2358/2442 +f 2366/2449 2367/2453 2363/2450 +f 2357/2441 2276/2440 2368/2451 +f 2366/2449 2368/2451 2369/2452 +f 2276/2440 2275/2364 2370/2454 +f 2368/2451 2370/2454 2282/2455 +f 2372/2456 2373/2537 2374/2457 +f 2375/2459 2376/2656 2377/2460 +f 2378/2462 2379/2469 2380/2463 +f 2382/2465 2378/2462 2381/2464 +f 2384/2467 2385/2468 2378/2462 +f 2385/2468 2386/2691 2379/2469 +f 2388/2470 2389/2530 2390/2471 +f 2387/2472 2390/2471 2392/2473 +f 2393/2475 2394/2476 2387/2472 +f 2394/2476 2395/2560 2388/2470 +f 2186/2222 2401/2885 2402/2477 +f 2403/2479 2404/2481 2327/2410 +f 2272/2361 2403/2479 2326/2409 +f 2271/2362 2405/2480 2403/2479 +f 2405/2480 2406/2491 2404/2481 +f 2404/2481 2407/2492 2408/2482 +f 2409/2483 2410/2487 2411/2484 +f 2409/2483 2412/2485 2271/2362 +f 2413/2486 2409/2483 2269/2360 +f 2413/2486 2414/3876 2410/2487 +f 2417/2488 2415/3958 2416/2489 +f 2417/2488 2418/2490 2406/2491 +f 2412/2485 2417/2488 2405/2480 +f 2411/2484 2415/3958 2417/2488 +f 2406/2491 2419/2500 2407/2492 +f 3569/1337 3520/2127 632/2493 +f 2420/2494 2398/2501 2397/2495 +f 2420/2494 2421/2496 2423/2497 +f 2424/2499 2420/2494 2422/2498 +f 2424/2499 2399/3407 2398/2501 +f 2425/2502 2391/2474 2392/2473 +f 2421/2496 2425/2502 2426/2503 +f 2397/2495 2396/2504 2425/2502 +f 2396/2504 2393/2475 2391/2474 +f 2427/2505 2314/2506 2408/2482 +f 2430/2508 2427/2505 2428/2507 +f 2343/2427 2315/2400 2427/2505 +f 2432/2510 2343/2427 2430/2508 +f 2431/2511 2430/2508 2429/2509 +f 2435/2513 2431/2511 2433/2512 +f 2436/2515 2432/2510 2431/2511 +f 2428/2507 2408/2482 2407/2492 +f 2422/2498 2437/2516 2407/2492 +f 2438/2517 2437/2516 2422/2498 +f 2429/2509 2428/2507 2437/2516 +f 2440/2518 2434/2514 2433/2512 +f 2441/2520 2439/2519 2433/2512 +f 2442/2521 2443/2522 2439/2519 +f 2443/2522 2444/2524 2440/2518 +f 2445/2523 2446/2526 2444/2524 +f 2447/2525 2445/2523 2443/2522 +f 2383/2466 2381/2464 2445/2523 +f 2381/2464 2380/2463 2446/2526 +f 2448/2527 2438/2517 2423/2497 +f 2449/2528 2448/2527 2426/2503 +f 2441/2520 2448/2527 2449/2528 +f 2441/2520 2429/2509 2438/2517 +f 2449/2528 2392/2473 2390/2471 +f 2450/2529 2390/2471 2389/2530 +f 2447/2525 2450/2529 2451/2531 +f 2442/2521 2449/2528 2450/2529 +f 2451/2531 2389/2530 2453/2532 +f 2452/2533 2453/2532 2455/2534 +f 2382/2465 2452/2533 2454/2535 +f 2383/2466 2451/2531 2452/2533 +f 2456/2536 2457/2713 2373/2537 +f 2459/2538 2460/3957 2461/2539 +f 2463/2541 2464/2567 2465/2542 +f 2462/2543 2465/2542 2285/2544 +f 2466/2545 2462/2543 2284/2369 +f 2467/2546 2463/2541 2462/2543 +f 2461/2539 2460/3957 3703/2547 +f 2468/2549 2469/3937 2470/2550 +f 2472/2552 2468/2549 2471/2551 +f 2474/2554 2461/2539 3701/2548 +f 2475/2556 2476/3290 2477/2557 +f 2458/2540 2455/2534 2453/2532 +f 2479/2559 2453/2532 2389/2530 +f 2395/2560 2480/2561 2479/2559 +f 2480/2561 2459/2538 2458/2540 +f 2481/2562 2482/3309 2483/2563 +f 2484/2564 113/154 2485/2565 +f 2471/2551 2470/2550 2464/2567 +f 2473/2553 2471/2551 2463/2541 +f 2487/2568 2467/2546 2466/2545 +f 2486/2569 2466/2545 2290/2377 +f 2490/2571 2486/2569 2488/2570 +f 2491/2573 2487/2568 2486/2569 +f 2492/2574 2473/2553 2467/2546 +f 2493/2575 2492/2574 2487/2568 +f 2494/2576 2495/2577 2492/2574 +f 2495/2577 2472/2552 2473/2553 +f 3690/2578 2496/2580 2474/2554 +f 3691/2579 2497/3172 2496/2580 +f 2498/2581 2494/2576 2493/2575 +f 2500/2583 2498/2581 2499/2582 +f 3700/2585 3687/2586 2498/2581 +f 3687/2586 3691/2579 2494/2576 +f 2306/2392 2504/2588 2278/2367 +f 2305/2587 2505/2594 2504/2588 +f 2504/2588 2505/2594 2507/2589 +f 2506/2590 2507/2589 2288/2375 +f 2274/2366 2506/2590 2508/2591 +f 2278/2367 2504/2588 2506/2590 +f 3633/2592 3632/2611 2489/2572 +f 3620/2378 3633/2592 2488/2570 +f 2507/2589 2509/2595 2289/2593 +f 2505/2594 2510/2604 2509/2595 +f 2508/2591 2288/2375 2287/2372 +f 2511/2596 2287/2372 2281/2374 +f 2370/2454 2511/2596 2280/2597 +f 2275/2364 2508/2591 2511/2596 +f 2512/2598 2490/2571 2489/2572 +f 2514/2600 2512/2598 2513/2599 +f 2517/2602 2512/2598 2514/2600 +f 2517/2602 2491/2573 2490/2571 +f 2499/2582 2493/2575 2491/2573 +f 2499/2582 2517/2602 2516/2603 +f 2510/2604 2505/2594 2519/2605 +f 2518/2606 2519/2605 2521/2607 +f 3616/2609 2518/2606 2520/2608 +f 3632/2611 2510/2604 2518/2606 +f 2522/2612 2304/2391 2303/2390 +f 2523/2613 2522/2612 2310/2394 +f 2305/2587 2304/2391 2522/2612 +f 2524/2615 2522/2612 2523/2613 +f 2519/2605 2524/2615 2525/2616 +f 2505/2594 2305/2587 2524/2615 +f 2308/2393 2312/2397 2527/2617 +f 2526/2618 2527/2617 2529/2619 +f 2531/2621 2526/2618 2528/2620 +f 2309/2614 2308/2393 2526/2618 +f 1214/1243 2532/3080 2151/2195 +f 2419/2500 2406/2491 2418/2490 +f 2533/2623 2345/2432 2349/2624 +f 2534/2625 2533/2623 2317/2404 +f 2535/2626 2536/2627 2533/2623 +f 2536/2627 2346/2429 2345/2432 +f 2537/2628 2351/2436 2346/2429 +f 2538/2629 2537/2628 2536/2627 +f 2539/2630 2540/2631 2537/2628 +f 2540/2631 2352/2434 2351/2436 +f 2312/2397 2355/2438 2541/2632 +f 2527/2617 2541/2632 2542/2633 +f 2543/2634 2435/2513 2434/2514 +f 2545/2636 2543/2634 2544/2635 +f 2319/2638 2318/2402 2543/2634 +f 2318/2402 2436/2515 2435/2513 +f 2514/2600 2515/2601 2548/2639 +f 2547/2640 2548/2639 2550/2641 +f 2552/2643 2547/2640 2549/2642 +f 2552/2643 2516/2603 2514/2600 +f 2520/2608 2521/2607 2554/2645 +f 2553/2646 2554/2645 2556/2647 +f 3590/2649 2553/2646 2555/2648 +f 3621/2610 2520/2608 2553/2646 +f 2523/2613 2309/2614 2531/2621 +f 2557/2651 2531/2621 2530/2622 +f 2525/2616 2523/2613 2557/2651 +f 2559/2653 2557/2651 2558/2652 +f 2554/2645 2559/2653 2560/2654 +f 2521/2607 2525/2616 2559/2653 +f 2561/2655 2375/2459 2007/2461 +f 2376/2656 2562/3315 2225/2293 +f 2564/2657 2565/2714 2566/2658 +f 2563/2659 2566/2658 338/511 +f 2569/2661 2563/2659 2567/2660 +f 2570/2663 2564/2657 2563/2659 +f 2572/2664 2573/2672 2574/2665 +f 2571/2666 2574/2665 2576/2667 +f 2544/2635 2571/2666 2575/2668 +f 2544/2635 2434/2514 2572/2664 +f 2578/2669 2570/2663 2569/2661 +f 2579/2671 2577/2670 2569/2661 +f 2574/2665 2577/2670 2579/2671 +f 2573/2672 2578/2669 2577/2670 +f 2581/2673 2582/2681 2583/2674 +f 2584/2676 2580/2675 2583/2674 +f 3697/2678 3675/2680 2580/2675 +f 3676/2679 2581/2673 2580/2675 +f 2582/2681 2516/2603 2552/2643 +f 2583/2674 2552/2643 2551/2644 +f 2500/2583 2501/2584 2589/2682 +f 2590/2684 2588/2683 2589/2682 +f 3693/2686 3695/2687 2588/2683 +f 3695/2687 3700/2585 2500/2583 +f 2501/2584 2516/2603 2591/2685 +f 2592/2688 2593/3306 2594/2689 +f 2386/2691 2385/2468 2503/2692 +f 2596/2694 2379/2469 2386/2691 +f 2596/2694 2374/2457 2373/2537 +f 2599/2696 2596/2694 2597/2695 +f 2599/2696 2380/2463 2379/2469 +f 2600/2698 2446/2526 2380/2463 +f 2600/2698 2599/2696 2598/2697 +f 2603/2700 2600/2698 2601/2699 +f 2603/2700 2444/2524 2446/2526 +f 2604/2702 2603/2700 2602/2701 +f 2440/2518 2604/2702 2605/2703 +f 2444/2524 2603/2700 2604/2702 +f 2606/2704 2582/2681 2581/2673 +f 2606/2704 2607/2705 2590/2684 +f 2591/2685 2516/2603 2582/2681 +f 3676/2679 2587/3314 2456/2536 +f 3689/2706 2456/2536 2372/2456 +f 2478/2558 2477/2557 2608/2707 +f 2609/2708 2610/3299 2611/2709 +f 2564/2657 2570/2663 2613/2711 +f 2612/2712 2613/2711 2598/2697 +f 2457/2713 2612/2712 2597/2695 +f 2565/2714 2564/2657 2612/2712 +f 2614/2715 2613/2711 2570/2663 +f 2615/2716 2614/2715 2578/2669 +f 2602/2701 2601/2699 2614/2715 +f 2601/2699 2598/2697 2613/2711 +f 2572/2664 2434/2514 2605/2703 +f 2616/2717 2605/2703 2602/2701 +f 2573/2672 2572/2664 2616/2717 +f 1459/880 1016/548 1045/541 +f 1265/1299 1215/1887 1079/1115 +f 1612/1950 1608/1405 1609/1963 +f 2067/1960 2057/1946 2069/2055 +f 1135/2261 1109/544 1136/564 +f 1938/1995 1937/1994 2618/2719 +f 95/387 548/1429 458/281 +f 2058/1951 1607/1406 1608/1405 +f 1454/873 1469/903 1445/851 +f 655/769 2620/2758 1048/1079 +f 722/2722 1049/1078 2621/2723 +f 1131/1176 985/1023 1033/1065 +f 562/349 1481/967 366/208 +f 1723/1546 1719/1536 1725/1571 +f 1786/2724 1123/3253 1122/1152 +f 267/159 300/2000 323/261 +f 1325/644 2034/1915 1654/1586 +f 1468/896 1165/584 1435/832 +f 658/772 657/771 529/2725 +f 8/4 532/321 247/339 +f 382/1281 1557/1328 1559/1330 +f 788/866 619/745 1307/2353 +f 940/2030 930/754 929/755 +f 911/951 2622/2793 2623/2727 +f 2621/2723 2620/2758 655/769 +f 2624/2729 1857/2779 2625/2730 +f 998/1033 1846/1894 27/26 +f 635/753 638/752 2626/2731 +f 2627/2733 641/756 644/758 +f 2628/2734 2627/2733 1840/1885 +f 1200/2736 1199/1231 2627/2733 +f 1199/1231 1773/1817 641/756 +f 1520/1358 515/556 49/36 +f 1452/874 1350/704 1349/668 +f 2630/2737 2631/2841 1859/1909 +f 644/758 643/757 1335/1537 +f 1197/1229 1198/1228 717/814 +f 2632/2738 717/814 719/815 +f 2628/2734 2632/2738 1166/1205 +f 2629/2735 1197/1229 2632/2738 +f 1762/2739 2633/2823 1050/2740 +f 1823/1884 870/927 873/929 +f 2634/2742 2635/2743 794/1124 +f 1042/1073 794/1124 2635/2743 +f 93/70 94/69 1901/1741 +f 2036/1927 2038/1926 1458/1976 +f 1527/1833 332/3222 1525/1325 +f 877/932 876/931 1274/1313 +f 134/91 409/258 797/430 +f 1328/1413 893/944 931/972 +f 607/736 777/854 1091/1122 +f 1043/1074 2635/2743 2637/2744 +f 2636/2745 2637/2744 2639/2746 +f 1275/1315 2636/2745 2638/2747 +f 1275/1315 1040/1072 1043/1074 +f 1272/1308 2624/2729 2626/2731 +f 2640/2748 2626/2731 638/752 +f 1110/1143 2641/2751 2640/2748 +f 536/2750 1272/1308 2640/2748 +f 1472/2215 708/2182 1474/1914 +f 2042/1931 2030/1895 1951/1794 +f 1058/2752 1106/1135 1105/2753 +f 2642/2754 2623/2727 2643/2755 +f 1192/1922 2642/2754 2644/2756 +f 912/2728 2623/2727 2642/2754 +f 3898/2757 1657/1917 3526/1490 +f 1049/1078 1048/1079 2620/2758 +f 616/740 1829/1870 1885/1929 +f 1331/2759 1913/1972 666/2278 +f 990/2760 989/1024 1198/1228 +f 1911/1759 1912/1758 321/1830 +f 1298/1356 1282/1331 1205/1332 +f 76/53 1490/1027 1504/1029 +f 1502/2763 1500/1009 421/254 +f 610/735 609/770 658/772 +f 1146/1188 937/976 659/773 +f 2646/2764 911/951 914/953 +f 1280/1324 1239/1258 1238/2765 +f 1453/906 1454/873 1452/874 +f 835/900 942/1102 1843/2769 +f 1271/1307 1270/1980 1173/2770 +f 1893/1732 1878/1731 1826/2773 +f 1050/2740 2633/2823 1937/1994 +f 2651/2774 2652/2863 814/884 +f 1006/1042 1053/1082 1281/1326 +f 2653/2775 1084/1117 2654/2776 +f 1001/1986 1004/1610 1003/1611 +f 585/723 997/1032 679/789 +f 2624/2729 1272/1308 621/1309 +f 555/708 678/3199 2028/2083 +f 1207/1238 526/693 1197/1229 +f 1626/1487 1627/1409 478/362 +f 426/1056 389/228 419/252 +f 1030/1225 1193/1224 1773/1817 +f 3898/2757 645/1725 634/1451 +f 534/1141 1533/1125 138/1126 +f 1125/1812 1092/1123 1042/1073 +f 333/1261 228/144 88/65 +f 569/437 1570/1745 561/348 +f 1489/1538 2656/2864 871/2778 +f 1857/2779 2624/2729 620/743 +f 1673/1477 1674/1483 3770/1647 +f 622/744 861/920 864/922 +f 619/745 788/866 891/941 +f 261/1341 544/329 545/331 +f 393/1288 1555/1415 1556/1263 +f 2658/2782 2659/2784 2647/2767 +f 1075/1105 2658/2782 2657/2781 +f 1081/2783 1080/1113 2658/2782 +f 1080/1113 1139/1181 2659/2784 +f 2644/2756 2643/2755 1141/1182 +f 2660/2785 1141/1182 1140/2786 +f 642/2787 2660/2785 1336/1425 +f 1193/1224 2644/2756 2660/2785 +f 679/789 678/3199 555/708 +f 1503/2788 1501/1007 1500/1009 +f 1318/1374 1317/1373 1101/1131 +f 2661/2790 2659/2784 1139/1181 +f 2661/2790 1142/1183 2663/2791 +f 2662/2792 2647/2767 2659/2784 +f 2662/2792 2663/2791 2622/2793 +f 2664/2794 2622/2793 911/951 +f 2648/2768 2664/2794 2646/2764 +f 2648/2768 2647/2767 2662/2792 +f 2083/1998 2094/1752 2093/1754 +f 1442/835 1440/836 1437/831 +f 519/475 329/332 330/1095 +f 1885/1929 2029/2081 1209/1239 +f 799/1716 800/441 805/443 +f 1946/2100 1943/3865 1952/1971 +f 1302/2795 1301/1361 1253/1272 +f 566/1320 2005/2213 224/277 +f 1389/2037 940/2030 928/2032 +f 683/2797 682/1542 681/1418 +f 836/2798 835/900 1842/1891 +f 2665/2799 1842/1891 1845/1893 +f 2666/2800 2665/2799 1303/1362 +f 24/27 836/2798 2665/2799 +f 831/899 1072/1101 2650/2772 +f 1845/1893 1844/1892 1304/1363 +f 2010/2801 2013/2840 2012/2802 +f 1051/1080 1936/1993 2667/2803 +f 1522/1089 1521/1077 1511/1054 +f 531/1116 440/270 441/2805 +f 1250/1271 635/753 2625/2730 +f 2669/2806 2625/2730 1857/2779 +f 783/859 2669/2806 1856/1905 +f 782/858 1250/1271 2669/2806 +f 720/818 2670/3237 2671/2807 +f 1055/1087 1931/1987 1930/1988 +f 1333/2808 1067/1423 1251/1270 +f 619/745 622/744 1308/1365 +f 652/767 655/769 2619/2721 +f 521/1058 522/1148 1539/1146 +f 1838/1883 988/1026 537/701 +f 2672/2809 537/701 536/2750 +f 1113/1145 2672/2809 2641/2751 +f 1113/1145 527/697 1838/1883 +f 1016/548 1013/550 1351/879 +f 3897/1719 573/345 570/347 +f 226/1051 227/1218 104/2810 +f 3564/2811 3669/3906 3827/2160 +f 1036/1068 1037/1069 1884/2082 +f 2674/2812 1301/1361 1304/1363 +f 2673/2813 1304/1363 1844/1892 +f 1334/1424 2673/2813 2675/2814 +f 1333/2808 2674/2812 2673/2813 +f 2676/2815 702/803 1039/1071 +f 1843/2769 2676/2815 2675/2814 +f 942/1102 1074/1104 2676/2815 +f 1074/1104 699/801 702/803 +f 752/2816 1057/1085 1055/1087 +f 1200/2736 1166/1205 656/1206 +f 1120/1150 1846/1894 874/930 +f 2677/2817 874/930 877/932 +f 1241/1262 2677/2817 1052/2818 +f 1119/1151 1120/1150 2677/2817 +f 1560/1501 1513/1198 151/101 +f 1208/2819 1316/1372 611/738 +f 215/2820 735/399 736/2821 +f 1460/881 1459/880 1462/1989 +f 1208/2819 1207/1238 1839/2741 +f 854/916 856/915 1282/1331 +f 72/50 445/1129 182/255 +f 544/329 1563/1342 1565/1354 +f 217/465 200/1213 456/2822 +f 2047/1938 1404/759 930/754 +f 1204/1235 2618/2719 1937/1994 +f 913/952 777/854 686/856 +f 840/904 527/697 1113/1145 +f 1531/1119 729/1874 730/435 +f 2678/2824 778/1333 1283/1334 +f 601/966 667/777 1914/1973 +f 1537/1149 1538/1348 459/284 +f 1819/2825 1757/991 3509/993 +f 991/1025 1860/2837 862/2827 +f 2679/2828 711/809 1073/1103 +f 2680/2829 1830/1872 983/1021 +f 779/855 2680/2829 2256/2339 +f 778/1333 2678/2824 2680/2829 +f 2678/2824 2679/2828 1830/1872 +f 180/120 542/470 245/1786 +f 1886/2340 1132/1175 1131/1176 +f 1138/2830 1137/1180 1274/1313 +f 1841/1888 598/2341 1140/2786 +f 75/2762 313/1142 314/1287 +f 193/1161 423/458 420/251 +f 2681/2831 2682/3251 1099/1130 +f 170/179 1567/1437 1565/1354 +f 349/197 416/336 417/247 +f 1121/1154 1124/1153 2684/2832 +f 547/431 797/430 409/258 +f 123/981 122/980 121/2834 +f 2643/2755 2663/2791 1142/1183 +f 1542/1179 1544/1190 513/313 +f 899/2835 900/484 907/525 +f 1210/2836 677/788 676/790 +f 791/865 663/1020 982/1167 +f 24/27 27/26 837/901 +f 893/944 896/943 932/1300 +f 1860/2837 1320/1377 863/921 +f 535/700 538/702 862/2827 +f 1569/1819 1548/2355 1547/2838 +f 181/119 455/469 542/470 +f 660/2839 659/773 937/976 +f 2013/2840 567/1319 568/436 +f 887/937 890/939 2631/2841 +f 1524/1092 1508/1041 361/1408 +f 520/1430 161/214 162/2842 +f 2653/2775 855/914 854/916 +f 1238/2765 1237/1257 2655/2777 +f 718/1948 1838/1883 527/697 +f 1492/1004 1493/1075 518/310 +f 792/1871 616/740 615/1156 +f 599/727 846/908 1835/2845 +f 290/393 457/635 456/2822 +f 312/178 171/112 1552/1327 +f 1248/2846 2687/2848 1179/1214 +f 153/105 149/100 177/1889 +f 1240/1260 1181/1215 1222/1248 +f 1257/1351 1292/2860 1235/1979 +f 1291/1350 1084/1117 1083/1249 +f 1052/2818 877/932 1277/1317 +f 743/1237 742/837 2618/2719 +f 444/2847 140/94 430/263 +f 23/23 1224/1251 1172/1209 +f 2687/2848 2654/2776 1180/2849 +f 102/76 103/75 409/258 +f 1283/1334 607/736 610/735 +f 373/2850 497/3623 650/372 +f 1180/2849 2654/2776 1084/1117 +f 855/914 2653/2775 2655/2777 +f 1173/2770 1270/1980 1917/2851 +f 2689/2852 850/913 853/912 +f 2688/2853 853/912 1108/1138 +f 2686/2844 2688/2853 1279/1322 +f 2687/2848 2689/2852 2688/2853 +f 2691/2854 1302/2795 1252/2796 +f 2690/2855 1252/2796 850/913 +f 1248/2846 2690/2855 2689/2852 +f 1248/2846 1247/1267 2691/2854 +f 2692/2856 617/742 24/27 +f 2691/2854 2692/2856 2666/2800 +f 1247/1267 1258/1290 2692/2856 +f 1258/1290 1168/2858 617/742 +f 394/1226 1554/1795 1555/1415 +f 2693/2857 1260/2866 1259/1292 +f 1072/1101 1236/1256 1262/1294 +f 1257/1351 1071/1255 1168/2858 +f 398/1539 439/271 1555/1415 +f 871/2778 1784/1827 1314/2859 +f 1178/1216 1232/1254 1249/1268 +f 1224/1251 1289/1345 1173/2770 +f 1010/1044 2617/2720 2618/2719 +f 1175/1211 1174/1210 1292/2860 +f 2654/2776 2687/2848 2686/2844 +f 2694/2861 1157/1196 1216/1246 +f 71/1955 418/1028 1487/988 +f 701/802 1112/1144 1111/2862 +f 1072/1101 1261/1293 2649/2771 +f 1085/1118 2695/2868 22/1250 +f 895/961 2694/2861 1264/1910 +f 1601/1400 1606/1952 1605/1954 +f 1259/1292 1262/1294 1271/1307 +f 21/22 20/24 1220/1247 +f 988/1026 991/1025 538/702 +f 598/2341 597/726 1336/1425 +f 1050/2740 860/918 818/890 +f 1298/1356 2645/2761 1761/1810 +f 1168/2858 1167/1207 618/2279 +f 858/917 1168/2858 1256/1279 +f 39/20 242/440 516/439 +f 2652/2863 1761/1810 819/888 +f 313/1142 316/1140 315/192 +f 2026/1923 1970/2186 1971/1823 +f 2656/2864 1489/1538 596/728 +f 814/884 819/888 818/890 +f 2696/2865 2651/2774 813/883 +f 1260/2866 2696/2865 2649/2771 +f 2693/2857 2697/2867 2696/2865 +f 2697/2867 1296/1357 2651/2774 +f 381/220 355/201 390/290 +f 3818/2121 3819/1411 1611/1903 +f 1475/935 1427/824 1428/2168 +f 1292/2860 1174/1210 1917/2851 +f 1551/1227 353/202 354/1346 +f 1526/1098 1525/1325 1523/1090 +f 1593/1387 1589/1386 1595/1403 +f 1232/1254 1269/1306 1246/1269 +f 2695/2868 2693/2857 1223/1344 +f 2698/2869 1297/1355 1296/1357 +f 2698/2869 2697/2867 2693/2857 +f 2685/2843 2698/2869 2695/2868 +f 2685/2843 854/916 1297/1355 +f 562/349 1497/1778 271/1003 +f 2652/2863 2651/2774 1296/1357 +f 1365/762 1366/2126 1364/2125 +f 1958/2101 1946/2100 1953/1970 +f 585/723 588/722 2210/2274 +f 1561/1521 1560/1501 1515/1965 +f 975/2766 2648/2768 1146/1188 +f 347/1266 67/48 68/49 +f 1038/1070 1111/2862 636/751 +f 2700/2871 781/2875 780/857 +f 2699/2872 780/857 1036/1068 +f 2701/2873 2699/2872 1883/1928 +f 851/911 2700/2871 2699/2872 +f 2674/2812 1333/2808 2251/2333 +f 2702/2874 2251/2333 781/2875 +f 1253/1272 2702/2874 2700/2871 +f 1301/1361 2674/2812 2702/2874 +f 1519/1253 387/227 388/229 +f 1273/1312 1239/1258 1280/1324 +f 2703/2876 1280/1324 1107/1323 +f 2701/2873 2703/2876 852/1137 +f 1829/1870 1273/1312 2703/2876 +f 91/2877 260/150 326/1321 +f 1784/1827 871/2778 2656/2864 +f 2683/2833 2684/2832 2705/2878 +f 604/1850 1894/1733 3499/2880 +f 1669/1049 3534/1048 3548/2881 +f 2062/1958 2063/1957 2060/2882 +f 621/1309 535/700 861/920 +f 2023/1877 731/1875 732/2883 +f 686/856 779/855 1263/1298 +f 857/919 1051/1080 2668/2804 +f 883/1843 830/472 882/1252 +f 637/2749 636/751 1111/2862 +f 2668/2804 894/942 893/944 +f 2667/2803 921/960 894/942 +f 1039/1071 1038/1070 1068/1096 +f 313/1142 75/2762 1504/1029 +f 1911/1759 1972/1828 305/1093 +f 1936/1993 2706/3194 1964/2035 +f 2707/2884 2402/2477 2401/2885 +f 2708/2887 2707/2884 2183/2886 +f 738/1807 2163/2197 2116/2198 +f 2349/2624 2348/2431 2432/2510 +f 2711/2888 2712/3848 2020/2078 +f 2400/2478 2711/2888 2019/2889 +f 2714/2890 2715/3063 2716/2891 +f 2717/2893 2718/3032 2719/2894 +f 2721/2896 2722/3365 2723/2897 +f 2726/2899 2727/3028 2728/2900 +f 2725/2901 2728/2900 2730/2902 +f 2731/2904 2725/2901 2729/2903 +f 2732/2906 2726/2899 2725/2901 +f 2734/2907 2735/3007 2736/2908 +f 2738/2910 2739/3877 2740/2911 +f 2737/2912 2740/2911 2741/2913 +f 2744/2915 2745/2918 2746/2916 +f 2745/2918 2744/2915 2748/2919 +f 2749/2921 2750/2975 2747/2920 +f 2753/2923 2749/2921 2751/2922 +f 2250/2329 1380/1466 2246/2325 +f 2754/2925 1396/1471 1614/1683 +f 3598/2927 2754/2925 2755/2926 +f 3608/2929 2745/2918 2747/2920 +f 1617/1685 2756/2942 2755/2926 +f 2242/2309 2758/3104 1620/1688 +f 2191/2249 2085/2140 2760/2931 +f 2761/2933 2759/2932 2760/2931 +f 3648/2935 2759/2932 2761/2933 +f 3623/2937 2191/2249 2759/2932 +f 3209/2938 3248/3570 3223/2939 +f 1395/1722 1394/1470 2236/2304 +f 1395/1722 2235/2330 1636/2331 +f 1616/1684 1635/1703 2229/2299 +f 1617/1685 2234/2945 2763/2941 +f 2756/2942 2763/2941 3628/2943 +f 2765/2944 2763/2941 2234/2945 +f 2766/2946 2764/3347 3628/2943 +f 3202/2948 3199/2237 3246/2949 +f 1516/1966 437/266 435/268 +f 2768/2951 2769/2988 3552/1458 +f 1495/997 541/382 578/342 +f 2135/632 2136/2131 28/2112 +f 3526/2952 3540/3908 2773/2953 +f 1690/1646 1689/3952 3926/2955 +f 2778/2957 2779/2967 2780/2958 +f 2777/2959 2780/2958 2781/2960 +f 2784/2962 2778/2957 2777/2959 +f 3555/2128 3521/1682 575/380 +f 1494/996 1904/1746 9/2190 +f 3529/1467 2016/2075 2015/2263 +f 451/320 61/44 505/634 +f 2786/2964 2787/3056 2788/2965 +f 2779/2967 2786/2964 2785/2966 +f 2789/2968 2752/2924 2741/2913 +f 2790/2969 2753/2923 2752/2924 +f 2792/2970 2793/2973 2794/2971 +f 2793/2973 2795/2981 2796/2974 +f 2796/2974 2797/2980 2750/2975 +f 2794/2971 2796/2974 2749/2921 +f 2798/2976 1393/1472 1396/1471 +f 3591/2977 2798/2976 2754/2925 +f 2798/2976 2799/2979 2800/2978 +f 3591/2977 3662/3883 2799/2979 +f 2797/2980 2796/2974 2795/2981 +f 2791/2972 2794/2971 2753/2923 +f 216/138 233/629 232/1743 +f 3804/2983 2806/3903 2807/2984 +f 3805/2986 2809/2993 2804/2987 +f 2769/2988 2810/3882 3557/1507 +f 183/211 185/166 184/374 +f 2811/2989 2812/2994 2775/2990 +f 2804/2987 2811/2989 2776/2991 +f 2809/2993 2813/2995 2811/2989 +f 2812/2994 2811/2989 2813/2995 +f 3805/2986 2808/3885 2816/2997 +f 3796/2999 2818/3054 2815/3000 +f 3798/3001 3800/3951 2820/3002 +f 3793/3004 2823/3008 2819/3005 +f 2824/3006 2736/2908 2735/3007 +f 2823/3008 2825/3009 2824/3006 +f 2825/3009 2823/3008 2826/3010 +f 3793/3004 2822/3017 2827/3011 +f 3796/2999 2817/3874 2826/3010 +f 2713/2892 2716/2891 2829/3013 +f 2828/3014 2829/3013 2831/3015 +f 2822/3017 2821/3003 2828/3014 +f 2821/3003 2820/3002 2713/2892 +f 2832/3018 2827/3011 2822/3017 +f 2833/3019 2832/3018 2830/3016 +f 2834/3020 2835/3021 2832/3018 +f 2835/3021 2818/3054 2827/3011 +f 2837/3022 2838/3036 2839/3023 +f 2836/3024 2839/3023 2841/3025 +f 2842/3027 2836/3024 2840/3026 +f 2843/3029 2837/3022 2836/3024 +f 2840/3026 2841/3025 2845/3030 +f 2718/3032 2844/3031 2845/3030 +f 2730/2902 2728/2900 2844/3031 +f 2727/3028 2840/3026 2844/3031 +f 2846/3033 2845/3030 2841/3025 +f 2829/3013 2846/3033 2847/3034 +f 2847/3034 2841/3025 2839/3023 +f 2848/3035 2839/3023 2838/3036 +f 2833/3019 2848/3035 2849/3037 +f 2831/3015 2847/3034 2848/3035 +f 2850/3038 2814/2996 2851/3039 +f 2853/3041 2850/3038 2852/3040 +f 2783/2963 2777/2959 2782/2961 +f 2785/2966 2788/2965 2853/3041 +f 2780/2958 2785/2966 2856/3044 +f 2857/3045 2858/3051 2855/3043 +f 2859/3046 2857/3045 2782/2961 +f 2861/3047 2843/3029 2842/3027 +f 2860/3048 2842/3027 2727/3028 +f 2862/3049 2860/3048 2726/2899 +f 2863/3050 2861/3047 2860/3048 +f 2861/3047 2863/3050 2858/3051 +f 2843/3029 2861/3047 2857/3045 +f 2856/3044 2853/3041 2854/3042 +f 2781/2960 2856/3044 2864/3052 +f 2837/3022 2843/3029 2859/3046 +f 2838/3036 2837/3022 2864/3052 +f 2852/3040 2851/3039 2834/3020 +f 2854/3042 2852/3040 2849/3037 +f 2865/3053 2815/3000 2818/3054 +f 2851/3039 2865/3053 2835/3021 +f 2813/2995 2809/2993 2815/3000 +f 2814/2996 2813/2995 2865/3053 +f 2866/3055 2812/2994 2814/2996 +f 2788/2965 2866/3055 2850/3038 +f 2866/3055 2788/2965 2787/3056 +f 2812/2994 2866/3055 2867/3057 +f 1892/2003 1889/1739 1890/1740 +f 2720/2895 2719/2894 2846/3033 +f 298/1407 299/1006 256/148 +f 2872/3059 2714/2890 2713/2892 +f 3794/3060 3800/3951 2735/3007 +f 2729/2903 2730/2902 2874/3061 +f 2846/3033 2716/2891 2715/3063 +f 2876/3064 2877/3093 2878/3065 +f 2733/2909 2875/3066 2878/3065 +f 3806/3067 3807/3091 2879/3068 +f 3806/3067 2880/3069 2872/3059 +f 2882/3070 2883/3083 2884/3071 +f 2881/3072 2884/3071 2715/3063 +f 2880/3069 2881/3072 2714/2890 +f 2879/3068 2882/3070 2881/3072 +f 2885/3073 2886/3131 2887/3074 +f 2888/3075 2889/3085 2890/3076 +f 2885/3073 2890/3076 2892/3077 +f 2893/3079 2886/3131 2885/3073 +f 1942/2002 2151/2195 2532/3080 +f 2895/3082 2892/3077 2720/2895 +f 2884/3071 2895/3082 2870/3058 +f 2883/3083 2896/3084 2895/3082 +f 2896/3084 2891/3078 2892/3077 +f 2889/3085 2874/3061 2717/2893 +f 2890/3076 2717/2893 2720/2895 +f 2897/3086 2893/3079 2891/3078 +f 2898/3087 2897/3086 2896/3084 +f 2899/3088 2898/3087 2883/3083 +f 2900/3089 2899/3088 2882/3070 +f 3797/3090 2900/3089 2879/3068 +f 2876/3064 2902/3807 2901/3092 +f 2901/3092 2902/3807 2904/3094 +f 1785/1825 2656/2864 1835/2845 +f 2905/3096 2906/3144 2899/3088 +f 2907/3097 2908/3141 2897/3086 +f 3797/3090 2901/3092 2903/3095 +f 632/2493 576/392 577/3099 +f 3532/1639 3547/1640 2867/3057 +f 434/265 385/226 550/337 +f 2768/2951 2770/3897 3640/3100 +f 2910/3102 2911/3925 1623/1690 +f 2241/3103 2912/3109 2758/3104 +f 3640/3100 2770/3897 2913/3105 +f 1623/1690 2911/3925 2757/2930 +f 3587/2310 2242/2309 2757/2930 +f 2229/2299 1381/1464 1380/1466 +f 2765/2944 1630/2311 1629/1697 +f 2915/3108 2914/3107 1629/1697 +f 3642/3110 3641/3111 2914/3107 +f 3641/3111 3622/2947 2765/2944 +f 2919/3112 2916/3123 3642/3110 +f 2918/3114 2915/3108 2912/3109 +f 3614/3115 2918/3114 2241/3103 +f 3618/3116 2919/3112 3613/3113 +f 3541/1663 2177/2216 2180/2218 +f 3521/1682 2002/2245 2188/3118 +f 3705/3119 3573/2056 714/376 +f 2925/3120 2766/2946 2917/3121 +f 2924/3122 2917/3121 2916/3123 +f 3610/3124 2924/3122 2919/3112 +f 3611/3125 2925/3120 2924/3122 +f 2926/3126 2927/3348 2766/2946 +f 3658/3127 2926/3126 2925/3120 +f 1715/1533 1713/1529 1705/1744 +f 785/1500 497/3623 498/2230 +f 1387/719 651/371 565/1310 +f 374/2231 375/3578 1899/3128 +f 3573/2056 2203/2267 2206/2269 +f 2929/3130 2930/3135 2886/3131 +f 2932/3132 2933/3146 2934/3133 +f 1905/1748 1906/1749 1908/1813 +f 2930/3135 2935/3917 2936/3136 +f 2938/3138 2935/3917 2930/3135 +f 1493/1075 369/433 517/1897 +f 1603/3139 1585/1001 1586/1389 +f 2939/3140 2938/3138 2929/3130 +f 2940/3142 2939/3140 2908/3141 +f 3665/3143 1707/1523 1600/1395 +f 2886/3131 2930/3135 2937/3137 +f 2906/3144 2907/3097 2898/3087 +f 2908/3141 2929/3130 2893/3079 +f 2942/3145 2934/3133 2933/3146 +f 2941/3147 2933/3146 2937/3137 +f 3501/1687 2941/3147 2936/3136 +f 3498/1686 2942/3145 2941/3147 +f 2943/3148 2944/5094 2710/3149 +f 2943/3148 2873/3062 2874/3061 +f 2945/3150 2943/3148 2889/3085 +f 2946/3151 2944/5094 2943/3148 +f 2932/3132 2945/3150 2888/3075 +f 2931/3134 2946/3151 2945/3150 +f 727/375 1928/2201 1929/1777 +f 2903/3095 2904/3094 2947/3152 +f 545/331 594/355 42/30 +f 1480/969 1570/1745 533/322 +f 2907/3097 2906/3144 2949/3154 +f 2906/3144 2905/3096 2950/3155 +f 1558/1286 452/274 215/2820 +f 3799/3098 2903/3095 2948/3153 +f 2054/2008 1645/1985 1644/1420 +f 3705/3119 3684/3434 2203/2267 +f 1828/1677 1837/2119 1849/986 +f 2953/3157 2954/3301 2955/3158 +f 2952/3159 2955/3158 2957/3160 +f 2956/3161 2261/3256 2258/2345 +f 2953/3157 2952/3159 2258/2345 +f 2454/2535 2455/2534 2474/2554 +f 3747/3162 2958/3168 2959/3163 +f 3747/3162 2132/3433 2131/2177 +f 2961/3166 2962/3297 2963/3167 +f 2958/3168 2963/3167 2964/3169 +f 222/307 221/2566 2485/2565 +f 2477/2557 2226/2294 2966/3171 +f 2631/2841 2561/2655 2006/2070 +f 2384/2467 2454/2535 2496/2580 +f 2967/3173 2968/3174 2959/3163 +f 2968/3174 2967/3173 2970/3175 +f 3759/3177 2033/2098 2971/3178 +f 3784/3179 2971/3178 2960/3180 +f 2972/3181 2971/3178 2033/2098 +f 2007/2461 2377/2460 2973/3182 +f 3560/3183 3559/3947 2134/2179 +f 1836/3184 1833/1679 1832/1711 +f 3506/1735 2188/3118 2090/2143 +f 955/1295 810/515 959/517 +f 672/3186 675/1662 674/3187 +f 633/2170 710/1383 713/1335 +f 2024/1881 732/2883 733/398 +f 2025/1882 2024/1881 803/1489 +f 425/1057 1552/1327 1550/1285 +f 2975/3188 2976/3404 888/1919 +f 1319/1375 1158/1194 1157/1196 +f 1212/1791 1935/1992 2977/3189 +f 889/938 957/998 2228/3190 +f 1978/1837 1974/1834 1975/1835 +f 1138/2830 1078/1112 1329/1414 +f 751/841 665/776 2979/3191 +f 1619/1949 1613/1484 1625/1485 +f 2252/2336 2255/2335 1728/3193 +f 1157/1196 1160/1195 845/907 +f 2706/3194 1936/1993 1938/1995 +f 2980/3195 2255/2335 2254/3196 +f 2982/3198 2975/3188 887/937 +f 802/875 712/810 711/809 +f 678/3199 677/788 2029/2081 +f 527/697 840/904 690/905 +f 1839/2741 1207/1238 2629/2735 +f 2983/3200 2984/3233 602/730 +f 2983/3200 1914/1973 1913/1972 +f 2985/3201 2983/3200 1331/2759 +f 2986/3202 2984/3233 2983/3200 +f 2987/3203 2637/2744 2635/2743 +f 2985/3201 2988/3220 2639/2746 +f 2990/3204 1319/1375 1318/1374 +f 2991/3206 2992/3211 2993/3207 +f 2995/3209 2991/3206 2994/3208 +f 1106/1135 1058/2752 2991/3206 +f 1058/2752 1057/1085 2992/3211 +f 1415/765 1406/763 1365/762 +f 1317/1373 922/2351 2217/2284 +f 1932/1990 1728/3193 2255/2335 +f 926/965 2998/3216 2999/3213 +f 3001/3214 2993/3207 2992/3211 +f 3000/3215 2992/3211 1057/1085 +f 2999/3213 3000/3215 2978/3192 +f 2998/3216 3001/3214 3000/3215 +f 3002/3217 2998/3216 2209/2275 +f 3002/3217 3003/3218 2996/3210 +f 2993/3207 3001/3214 3002/3217 +f 1103/1136 1106/1135 2995/3209 +f 3004/3219 2995/3209 2996/3210 +f 2209/2275 2210/2274 3004/3219 +f 588/722 1103/1136 3004/3219 +f 295/1840 296/1187 433/1347 +f 2638/2747 2639/2746 2988/3220 +f 3005/3221 2988/3220 1056/1083 +f 1305/1366 3005/3221 1930/1988 +f 1276/1376 2638/2747 3005/3221 +f 1987/1851 1968/1822 1993/1858 +f 2633/2823 1762/2739 1761/1810 +f 1423/797 1422/793 1417/783 +f 3561/1769 3517/483 739/482 +f 2671/2807 2670/3237 2704/2879 +f 1473/2115 1872/2117 2035/2161 +f 922/2351 1317/1373 2694/2861 +f 1401/2337 1400/749 1378/715 +f 556/1166 2028/2083 1884/2082 +f 842/1433 848/1432 3825/601 +f 1854/1902 2706/3194 2617/2720 +f 933/973 1082/1114 1081/2783 +f 1440/836 1441/840 1436/963 +f 529/2725 657/771 719/815 +f 1031/1063 1025/1060 1027/1061 +f 777/854 913/952 912/2728 +f 2645/2761 1205/1332 1204/1235 +f 441/2805 438/269 1538/1348 +f 790/1110 1287/1343 587/1197 +f 2065/1969 2066/1959 1610/1404 +f 1860/2837 991/1025 990/2760 +f 60/40 106/77 87/78 +f 1424/804 1017/528 1948/1932 +f 332/3222 1527/1833 1507/1038 +f 261/1341 262/335 138/1126 +f 187/123 188/122 449/273 +f 1719/1536 1720/1535 1726/1583 +f 1931/1987 1055/1087 1059/1086 +f 3006/3223 1059/1086 1105/2753 +f 1287/1343 3006/3223 1104/1134 +f 1286/2354 1931/1987 3006/3223 +f 3007/3224 3008/3244 3009/3225 +f 3009/3225 3008/3244 653/869 +f 618/2279 999/1034 26/2732 +f 2029/2081 677/788 1210/2836 +f 957/998 601/966 600/729 +f 1049/1078 2146/2298 2228/3190 +f 2631/2841 890/939 3011/3227 +f 890/939 889/938 2227/2297 +f 3011/3227 2227/2297 2145/2187 +f 2561/2655 3011/3227 3012/3228 +f 1026/1232 656/1206 609/770 +f 2706/3194 1854/1902 1961/2036 +f 3013/3229 2965/3170 2485/2565 +f 2148/2189 2147/2188 1267/1301 +f 3015/3231 1267/1301 1266/1303 +f 2997/3212 2264/2352 1101/1131 +f 642/2787 641/756 1773/1817 +f 186/121 477/1076 461/285 +f 1041/1155 1040/1072 1320/1377 +f 1121/1154 3016/3235 3017/3232 +f 2984/3233 2986/3202 2987/3203 +f 3018/3234 2987/3203 2634/2742 +f 1048/1079 603/731 3018/3234 +f 603/731 602/730 2984/3233 +f 2679/2828 2678/2824 801/876 +f 2062/1958 2072/1978 2074/1981 +f 2061/1956 2074/1981 2076/1983 +f 414/239 303/1201 1545/1200 +f 3016/3235 1121/1154 2683/2833 +f 3019/3236 2683/2833 2704/2879 +f 3020/3238 3019/3236 2670/3237 +f 3007/3224 3016/3235 3019/3236 +f 1871/1921 1090/1121 1091/1122 +f 3021/3239 1787/1826 1122/1152 +f 3022/3240 3021/3239 3017/3232 +f 1314/2859 3021/3239 3022/3240 +f 1784/1827 1787/1826 3021/3239 +f 1316/1372 1315/1371 1763/3241 +f 3023/3242 3022/3240 3010/3226 +f 3023/3242 3009/3225 795/868 +f 1763/3241 3023/3242 1092/1123 +f 1315/1371 3022/3240 3023/3242 +f 3016/3235 3007/3224 3010/3226 +f 752/2816 751/841 2978/3192 +f 2986/3202 2639/2746 2637/2744 +f 3024/3243 654/768 653/869 +f 3020/3238 3024/3243 3008/3244 +f 720/818 723/817 3024/3243 +f 723/817 2621/2723 654/768 +f 2681/2831 1098/1132 1268/1302 +f 2671/2807 3025/3245 1268/1302 +f 2705/2878 2681/2831 3025/3245 +f 1313/1370 3026/3247 3027/3246 +f 3026/3247 3028/3265 3029/3248 +f 3027/3246 3029/3248 3030/3249 +f 1311/1368 3027/3246 3031/3250 +f 2012/2802 568/436 731/1875 +f 917/1106 2657/2781 974/1012 +f 743/1237 1206/1236 856/915 +f 1888/1898 2027/1896 2030/1895 +f 2682/3251 2705/2878 2684/2832 +f 3032/3252 2684/2832 1124/1153 +f 1100/2789 3032/3252 2989/3205 +f 1099/1130 2682/3251 3032/3252 +f 1123/3253 2990/3204 2989/3205 +f 3033/3254 1834/1879 1159/1880 +f 2990/3204 3033/3254 1158/1194 +f 1123/3253 1786/2724 3033/3254 +f 1786/2724 1785/1825 1834/1879 +f 1054/1084 1056/1083 2988/3220 +f 2979/3191 665/776 664/778 +f 2623/2727 2622/2793 2663/2791 +f 1095/1127 660/2839 688/796 +f 3034/3255 3035/3260 2261/3256 +f 3036/3257 3034/3255 2956/3161 +f 3037/3258 3038/3259 3034/3255 +f 3038/3259 3039/3267 3035/3260 +f 3040/3261 3041/3262 3026/3247 +f 2263/2347 3042/3263 3041/3262 +f 3042/3263 3039/3267 3043/3264 +f 3041/3262 3043/3264 3028/3265 +f 3044/3266 3045/3269 3043/3264 +f 3046/3268 3047/3270 3045/3269 +f 3047/3270 3048/3304 3049/3271 +f 3043/3264 3045/3269 3049/3271 +f 3038/3259 3037/3258 3051/3272 +f 3050/3273 3051/3272 3053/3274 +f 3052/3275 3046/3268 3044/3266 +f 3039/3267 3038/3259 3050/3273 +f 3054/3276 3035/3260 3039/3267 +f 3042/3263 2263/2347 2262/2346 +f 2261/3256 3035/3260 3054/3276 +f 2216/2348 2263/2347 2997/3212 +f 2212/2349 2211/2280 2667/2803 +f 3056/3277 868/925 2209/2275 +f 934/974 3057/3507 1961/2036 +f 2013/2840 3755/3310 3757/2040 +f 3058/3279 3059/3282 3060/3280 +f 3059/3282 3062/3284 3063/3283 +f 3060/3280 3063/3283 2213/2281 +f 3061/3281 3060/3280 1963/2350 +f 3063/3283 3064/3285 2214/2282 +f 3062/3284 3065/3286 3064/3285 +f 3065/3286 2257/2342 2260/2344 +f 3064/3285 2260/2344 2215/2283 +f 3066/3287 3067/3288 3031/3250 +f 2608/2707 2966/3171 3067/3288 +f 2966/3171 2562/3315 3068/3289 +f 3031/3250 3067/3288 3068/3289 +f 2455/2534 2458/2540 2461/2539 +f 2476/3290 2223/2292 2226/2294 +f 3551/1899 2090/2143 2089/2145 +f 3526/2952 2774/2954 3769/3291 +f 433/1347 354/1346 355/201 +f 2115/2048 1423/797 1419/785 +f 3509/993 2868/3920 2922/3293 +f 3769/3291 3737/3165 2131/2177 +f 3069/3294 2611/2709 2167/3295 +f 2971/3178 2972/3181 2928/3296 +f 2961/3166 2774/2954 2773/2953 +f 3070/3298 2475/2556 2478/2558 +f 2610/3299 2609/2708 3071/3300 +f 2502/2693 2371/2458 2374/2457 +f 3072/3302 2592/2688 2595/2690 +f 2964/3169 3046/3268 3052/3275 +f 3048/3304 2593/3306 3074/3305 +f 2593/3306 2592/2688 3075/3307 +f 3075/3307 3030/3249 3029/3248 +f 3074/3305 3029/3248 3028/3265 +f 3072/3302 3076/3308 3075/3307 +f 3076/3308 3072/3302 2482/3309 +f 2481/2562 2608/2707 3066/3287 +f 3076/3308 3066/3287 3030/3249 +f 2967/3173 3052/3275 3053/3274 +f 2482/3309 3072/3302 3073/3303 +f 479/364 480/363 493/297 +f 706/1340 3900/633 3873/2167 +f 3561/1769 2194/3896 2221/2291 +f 2013/2840 2010/2801 3754/812 +f 3533/1930 2032/2097 2016/2075 +f 3562/1933 3519/481 2222/3311 +f 1171/587 1154/571 1155/573 +f 2586/3313 2566/2658 2565/2714 +f 2982/3198 110/152 113/154 +f 2562/3315 2376/2656 3077/3316 +f 2376/2656 2375/2459 3012/3228 +f 3012/3228 2145/2187 2148/2189 +f 3077/3316 2148/2189 3015/3231 +f 2963/3167 3047/3270 3046/3268 +f 3051/3272 3078/3469 2970/3175 +f 1683/1805 2173/853 491/424 +f 3850/2105 2122/2060 1447/848 +f 1986/2053 2181/2221 3734/3317 +f 3079/3319 3080/3323 2546/2637 +f 3081/3320 3079/3319 2575/2668 +f 1983/2054 1986/2053 3768/3318 +f 3082/3322 2323/2407 2319/2638 +f 3080/3323 3082/3322 2545/2636 +f 3734/3317 3732/3324 3082/3322 +f 3732/3324 3748/2405 2323/2407 +f 1309/637 1471/933 1888/1898 +f 3073/3303 2595/2690 2772/3325 +f 2760/2931 3083/3328 3084/3327 +f 2760/2931 2085/2140 2088/2142 +f 3085/3329 3084/3327 3086/3330 +f 3089/3332 3085/3329 3090/3333 +f 3089/3332 2762/2934 3084/3327 +f 3092/3335 3088/3334 3090/3333 +f 3093/3337 2748/2919 2744/2915 +f 2751/2922 2747/2920 2748/2919 +f 3093/3337 2741/2913 2752/2924 +f 2742/2914 2741/2913 3093/3337 +f 3094/3338 3093/3337 2743/2917 +f 3096/3340 3097/3343 3098/3341 +f 3099/3342 3096/3340 3087/3331 +f 3097/3343 3094/3338 3095/3339 +f 3100/3344 2743/2917 2746/2916 +f 3101/3345 2927/3348 3102/3346 +f 2764/3347 2766/2946 2927/3348 +f 2745/2918 2764/3347 3101/3345 +f 3102/3346 3092/3335 3091/3336 +f 3100/3344 3091/3336 3095/3339 +f 2869/3349 2802/3408 3592/3350 +f 3102/3346 3103/3353 3104/3352 +f 3658/3127 2868/3920 2869/3349 +f 2927/3348 2926/3126 3103/3353 +f 3098/3341 3095/3339 3091/3336 +f 3087/3331 3098/3341 3090/3333 +f 2088/2142 2125/2172 3105/3354 +f 3083/3328 3105/3354 3086/3330 +f 2483/2563 3073/3303 2771/3326 +f 2401/2885 2186/2222 2185/2223 +f 3099/3342 3086/3330 3105/3354 +f 2125/2172 2207/2295 3106/3356 +f 3108/3357 2737/2912 2742/2914 +f 3107/3358 2742/2914 3094/3338 +f 3110/3359 3108/3357 3107/3358 +f 3109/3360 3107/3358 3097/3343 +f 3111/3361 3109/3360 3096/3340 +f 3112/3362 3110/3359 3109/3360 +f 3113/3363 2738/2910 2737/2912 +f 3115/3364 2723/2897 2722/3365 +f 3117/3367 3115/3364 3114/3366 +f 3119/3369 3117/3367 3116/3368 +f 3118/3370 3116/3368 3120/3371 +f 3116/3368 3114/3366 3113/3363 +f 3122/3373 3112/3362 3111/3361 +f 3122/3373 3123/3374 1755/2271 +f 3124/3375 3125/3378 3126/3376 +f 3125/3378 3118/3370 3121/3372 +f 3128/3379 3119/3369 3118/3370 +f 3129/3380 3128/3379 3125/3378 +f 2020/2078 2712/3848 3128/3379 +f 3853/671 3857/662 1346/664 +f 1065/2174 1046/538 1047/537 +f 1814/1665 3838/1653 3830/1445 +f 2021/2079 2020/2078 3129/3380 +f 3111/3361 3099/3342 3106/3356 +f 3123/3374 3106/3356 2207/2295 +f 2377/2460 2225/2293 2224/3382 +f 3014/3230 2485/2565 113/154 +f 1756/1802 2208/2272 3782/3383 +f 3131/3385 3132/3400 2530/2622 +f 3133/3386 3131/3385 2528/2620 +f 1753/1803 1756/1802 3767/3384 +f 3134/3388 3124/3375 3127/3377 +f 2327/2410 2408/2482 2314/2506 +f 2436/2515 2318/2402 2317/2404 +f 2018/2077 2021/2079 3711/3390 +f 2534/2625 3136/3393 3137/3392 +f 2320/2403 2322/2406 3136/3393 +f 2185/2223 2018/2077 3712/3391 +f 3138/3395 3139/3878 2542/2633 +f 3134/3388 3135/3389 3744/3396 +f 3785/3398 3130/3381 3134/3388 +f 3140/3399 3138/3395 2539/2630 +f 2535/2626 3137/3392 3140/3399 +f 3711/3390 2021/2079 3130/3381 +f 2558/2652 2530/2622 3132/3400 +f 3782/3383 3760/3403 3141/3401 +f 2560/2654 2558/2652 3141/3401 +f 3760/3403 3777/3422 3142/3402 +f 958/999 888/1919 2976/3404 +f 1933/3405 1932/1990 2980/3195 +f 2399/3407 2424/2499 2418/2490 +f 2348/2431 2342/2426 2343/2427 +f 2322/2406 2320/2403 2319/2638 +f 2718/3032 2717/2893 2874/3061 +f 2709/2905 2729/2903 2873/3062 +f 2894/3081 2871/3762 1361/1440 +f 346/384 289/175 176/177 +f 1876/1724 1877/1723 1882/1920 +f 2802/3408 2803/3412 3607/3409 +f 3104/3352 3144/3923 3088/3334 +f 2974/3312 2222/3311 3649/2936 +f 3089/3332 3145/3411 2761/2933 +f 3088/3334 3144/3923 3145/3411 +f 2803/3412 2974/3312 3603/3410 +f 3870/2095 3909/2181 692/379 +f 2123/2061 1419/785 1418/780 +f 1365/762 1363/687 1368/688 +f 1581/1380 1766/1597 1782/1696 +f 3569/1337 2202/2277 2129/2176 +f 3555/2128 2003/2065 2002/2245 +f 3146/3413 2585/2677 2551/2644 +f 3751/3415 3146/3413 3147/3414 +f 3742/3417 3149/3418 3146/3413 +f 3149/3418 2584/2676 2585/2677 +f 3196/3419 3198/2236 3197/3420 +f 3604/3421 2192/2248 2191/2249 +f 3142/3402 3150/3423 2556/2647 +f 3777/3422 3778/3425 3150/3423 +f 3646/3424 3152/3428 2550/2641 +f 3150/3423 3151/3426 2555/2648 +f 3778/3425 3780/3922 3151/3426 +f 3788/3427 3781/3430 3152/3428 +f 3153/3429 3147/3414 2551/2644 +f 3152/3428 3153/3429 2549/2642 +f 3781/3430 3776/3431 3153/3429 +f 3776/3431 3750/3416 3147/3414 +f 3148/3432 2767/3486 2206/2269 +f 2183/2886 2401/2885 2321/3355 +f 2132/3433 2960/3180 2928/3296 +f 3684/3434 3685/3936 2204/3435 +f 3719/3436 2198/2253 2197/2255 +f 2594/2689 2962/3297 2773/2953 +f 2595/2690 2594/2689 2951/3438 +f 1870/1713 798/1873 799/1716 +f 3713/3439 2179/2217 2178/3440 +f 3706/3441 2178/3440 3736/3442 +f 336/510 3154/3446 3774/3444 +f 339/512 3155/3935 3154/3446 +f 3789/3447 3683/3448 3149/3418 +f 3683/3448 3697/2678 2584/2676 +f 2963/3167 2962/3297 3048/3304 +f 3722/3445 3730/3455 337/3449 +f 339/512 338/511 2566/2658 +f 3156/3450 3157/3451 2168/2209 +f 2610/3299 3156/3450 2167/3295 +f 2954/3301 2953/3157 3156/3450 +f 3157/3451 3156/3450 2953/3157 +f 3707/3452 2166/2208 2169/2210 +f 2567/2660 3158/3945 3159/3454 +f 337/3449 3158/3945 2567/2660 +f 3730/3455 3069/3294 2166/2208 +f 3764/3456 2190/2247 1983/2054 +f 2579/2671 3160/3457 3081/3320 +f 2568/2662 3159/3454 3160/3457 +f 3709/3453 2169/2210 2190/2247 +f 3065/3286 3062/3284 3162/3458 +f 3161/3459 3162/3458 1984/2052 +f 3157/3451 3161/3459 2189/2246 +f 2257/2342 3065/3286 3161/3459 +f 734/400 735/399 1922/3460 +f 3059/3282 3058/3279 2708/2887 +f 3163/3461 2708/2887 2182/2219 +f 3162/3458 3163/3461 1985/2224 +f 3062/3284 3059/3282 3163/3461 +f 3203/3462 3206/3566 3205/3463 +f 2122/2060 2119/2059 1448/852 +f 3817/886 1686/1158 1685/1160 +f 139/95 140/94 444/2847 +f 1570/1745 569/437 566/1320 +f 2957/3160 3164/3467 3165/3466 +f 2955/3158 3166/3479 3164/3467 +f 1912/1758 454/275 341/499 +f 3130/3381 3129/3380 3124/3375 +f 1548/2355 322/1860 319/1861 +f 1949/2204 1950/2024 1327/1729 +f 2969/3176 2970/3175 3078/3469 +f 3167/3470 3078/3469 3169/3471 +f 3724/3473 2014/2074 2017/2076 +f 3720/3474 2017/2076 2033/2098 +f 3168/3472 3169/3471 3165/3466 +f 3170/3475 3165/3466 3164/3467 +f 3779/3477 3170/3475 3171/3476 +f 3724/3473 3168/3472 3170/3475 +f 3171/3476 3164/3467 3166/3479 +f 3172/3480 3166/3479 3071/3300 +f 3752/3482 3172/3480 3173/3481 +f 3756/3478 3171/3476 3172/3480 +f 3169/3471 3037/3258 3036/3257 +f 2593/3306 3048/3304 2962/3297 +f 3173/3481 3071/3300 2609/2708 +f 3174/3484 2609/2708 2199/2710 +f 3783/3485 3174/3484 2198/2253 +f 3749/3483 3173/3481 3174/3484 +f 2190/2247 2189/2246 1984/2052 +f 2196/2252 2195/2251 2767/3486 +f 2119/2059 2120/2085 1414/766 +f 1464/947 1463/3868 1466/891 +f 2199/2710 2611/2709 3069/3294 +f 2457/2713 2456/2536 2587/3314 +f 2226/2294 2225/2293 2562/3315 +f 481/404 478/362 1627/1409 +f 333/1261 1512/1053 235/1052 +f 3078/3469 3051/3272 3037/3258 +f 3040/3261 2264/2352 2997/3212 +f 2219/2287 1934/1991 1933/3405 +f 2385/2468 2384/2467 2497/3172 +f 221/2566 2220/2286 3175/3487 +f 2220/2286 1933/3405 3143/3406 +f 3175/3487 3143/3406 2976/3404 +f 2484/2564 3175/3487 2975/3188 +f 2954/3301 3071/3300 3166/3479 +f 2630/2737 1858/1908 110/152 +f 1547/2838 319/1861 1952/1971 +f 2091/3489 2090/2143 2188/3118 +f 3733/3490 3736/3442 2178/3440 +f 1918/1760 1919/1756 1927/1773 +f 3563/3491 112/153 111/3492 +f 955/1295 956/3185 960/3494 +f 2037/3495 3014/3230 112/153 +f 3143/3406 2980/3195 2981/3197 +f 2218/2285 2211/2280 2214/2282 +f 672/3186 3877/1202 3787/1204 +f 2152/3496 3013/3229 3014/3230 +f 1147/586 1144/578 1143/1862 +f 2248/2328 2246/2325 1409/1480 +f 1615/1721 1636/2331 1635/1703 +f 1410/1481 1409/1480 2246/2325 +f 3241/3497 3243/3534 3242/3498 +f 1382/2307 3176/3500 3177/3499 +f 3177/3499 1410/1481 2247/2324 +f 2247/2324 1383/1465 3177/3499 +f 3176/3500 2238/2306 2230/2301 +f 1410/1481 3177/3499 3179/3502 +f 3179/3502 3180/3765 2243/2313 +f 1633/1700 3181/3503 2800/2978 +f 1621/2326 1620/1688 2758/3104 +f 1622/1689 1621/2326 2245/2323 +f 2236/2304 1394/1470 3181/3503 +f 2237/2305 2236/2304 3182/3504 +f 2250/2329 1631/1698 2233/2302 +f 2912/3109 1629/1697 1628/1699 +f 135/1847 136/429 1564/1353 +f 1567/1437 419/252 512/309 +f 1085/1118 1084/1117 2653/2775 +f 3184/3506 869/926 868/925 +f 869/926 3184/3506 3057/3507 +f 1266/1303 1268/1302 1098/1132 +f 1267/1301 2147/2188 722/2722 +f 937/976 1146/1188 2646/2764 +f 2146/2298 1049/1078 722/2722 +f 2211/2280 2218/2285 921/960 +f 1957/2870 510/985 507/1359 +f 1313/1370 1312/1369 2264/2352 +f 1496/995 362/205 1493/1075 +f 2887/3074 2937/3137 2933/3146 +f 1687/3508 1688/3953 1659/1576 +f 145/1011 1499/1010 1498/1223 +f 37/19 38/21 418/1028 +f 1935/1992 1934/1991 3185/3509 +f 104/2810 89/67 301/438 +f 2101/2018 2098/2012 2083/1998 +f 543/1800 298/1407 360/1040 +f 1934/1991 2219/2287 3186/3510 +f 234/1055 1521/1077 212/173 +f 3845/2090 3821/1668 1822/1666 +f 1976/3511 1875/2146 1874/1715 +f 223/308 3186/3510 2219/2287 +f 3655/3512 3656/3605 3605/3513 +f 2165/3515 2164/2214 467/410 +f 467/410 468/287 2165/3515 +f 773/421 476/2205 2165/3515 +f 1935/1992 1212/1791 1728/3193 +f 3240/2319 3225/2288 3226/2290 +f 3261/3516 3249/5095 3250/3517 +f 3215/2241 3216/2243 3244/3519 +f 3265/3521 3221/2265 3215/2241 +f 3267/3522 3245/2332 3198/2236 +f 3205/3463 3206/3566 3269/3523 +f 3214/3525 3211/2238 3265/3521 +f 3271/3527 3213/2239 3214/3525 +f 3251/3529 3252/3568 3239/3530 +f 3222/3531 3221/2265 3266/3532 +f 3232/2318 3235/2320 3234/2322 +f 3216/2243 3242/3498 3243/3534 +f 3196/3419 3193/2232 3268/3535 +f 3244/3519 3243/3534 3254/3536 +f 3238/2316 3239/3530 3247/2950 +f 3264/3520 3244/3519 3255/3524 +f 3253/3537 3242/3498 3216/2243 +f 3211/2238 3266/3532 3221/2265 +f 3257/3538 3217/2242 3218/2266 +f 3258/3540 3222/3531 3272/3533 +f 3274/3542 3275/3548 3280/3543 +f 3228/3545 3229/3560 3262/3546 +f 3275/3548 3231/3567 3281/3549 +f 3226/2290 3227/2289 3273/3544 +f 3236/2315 3263/3547 3260/3518 +f 3281/3549 3263/3547 3236/2315 +f 3287/1578 3271/3527 3192/3528 +f 3235/2320 3226/2290 3280/3543 +f 3234/2322 3280/3543 3281/3549 +f 3288/3551 3290/3561 3259/3541 +f 3260/3518 3250/3517 3276/3552 +f 3289/3553 3212/2240 3278/3554 +f 3210/3556 3209/2938 3224/2940 +f 3199/2237 3202/2948 3201/3558 +f 3228/3545 3231/3567 3230/3559 +f 3290/3561 3288/3551 3285/3562 +f 3282/1580 3279/3571 3271/3527 +f 3260/3518 3263/3547 3262/3546 +f 3243/3534 3241/3497 3268/3535 +f 3267/3522 3268/3535 3241/3497 +f 3194/2234 3207/3575 3208/3564 +f 3191/3565 3270/3526 3269/3523 +f 3270/3526 3191/3565 3192/3528 +f 3275/3548 3274/3542 3230/3559 +f 3264/3520 3269/3523 3270/3526 +f 3252/3568 3251/3529 3276/3552 +f 3248/3570 3252/3568 3291/3569 +f 3213/2239 3271/3527 3279/3571 +f 3272/3533 3266/3532 3289/3553 +f 3277/3572 3278/3554 3212/2240 +f 3228/3545 3263/3547 3281/3549 +f 3220/3573 3225/2288 3253/3537 +f 3219/3574 3253/3537 3217/2242 +f 3256/3539 3218/2266 3222/3531 +f 3247/2950 3239/3530 3252/3568 +f 3245/2332 3267/3522 3232/2318 +f 3266/3532 3211/2238 3212/2240 +f 3208/3564 3205/3463 3255/3524 +f 3288/3551 3289/3553 3284/3555 +f 3204/3464 3205/3463 3208/3564 +f 560/2229 1899/3128 3187/3576 +f 3209/2938 3210/3556 3201/3558 +f 3189/3550 3192/3528 3191/3565 +f 448/1848 1980/1838 807/450 +f 647/367 648/366 122/980 +f 1833/1679 1848/1676 1780/1747 +f 510/985 509/984 508/1360 +f 3210/3556 1949/2204 1887/3468 +f 1887/3468 15/8 3200/2235 +f 15/8 16/2228 3197/3420 +f 16/2228 3188/2227 3195/2233 +f 3195/2233 3188/2227 3187/3576 +f 3194/2234 3187/3576 1899/3128 +f 3207/3575 1899/3128 375/3578 +f 387/227 173/116 126/85 +f 3189/3550 1791/2206 1760/2207 +f 3577/3579 763/2211 757/406 +f 763/2211 3577/3579 2172/2106 +f 3583/3581 3582/3606 3580/3582 +f 3581/3583 3580/3582 3579/3584 +f 3594/3585 3597/3642 3596/3586 +f 3400/3588 3601/3596 3602/3589 +f 3626/3591 3624/3664 3625/3592 +f 3631/3594 3635/3615 3599/3595 +f 3636/3597 3637/3608 3595/3587 +f 3647/3598 3584/3602 3588/3599 +f 3650/3601 3582/3606 3584/3602 +f 3653/3603 3579/3584 3580/3582 +f 3653/3603 2176/2212 3654/1775 +f 1801/1633 1864/950 3656/3605 +f 3651/3604 3580/3582 3582/3606 +f 3644/3607 3638/3631 3595/3587 +f 1767/806 1768/1596 3659/3609 +f 1597/1397 3663/805 3661/2103 +f 1708/1524 1707/1523 3665/3143 +f 3671/2258 3668/2257 3667/3610 +f 3666/3611 3667/3610 3673/3612 +f 1588/1526 1708/1524 3664/2256 +f 2080/2338 3671/2258 3666/3611 +f 2082/2006 3666/3611 3672/3613 +f 3674/3614 3677/3616 3635/3615 +f 3677/3616 3678/3628 3636/3597 +f 3682/3617 3647/3598 3645/3600 +f 3692/3619 3651/3604 3650/3601 +f 3681/3618 3645/3600 3644/3607 +f 786/1499 3694/3622 3692/3619 +f 3694/3622 3653/3603 3651/3604 +f 785/1500 3692/3619 3686/3620 +f 497/3623 3686/3620 3682/3617 +f 650/372 3682/3617 3681/3618 +f 3696/3624 564/370 3681/3618 +f 3405/3625 3606/3659 3605/3513 +f 3702/3627 3699/3629 3678/3628 +f 3699/3629 3698/3639 3679/3630 +f 3645/3600 3588/3599 3638/3631 +f 1799/1635 3657/3655 3659/3609 +f 3708/3632 3704/3658 3458/3633 +f 3462/3635 3710/3638 3708/3632 +f 3714/577 463/576 564/370 +f 3615/3636 3606/3659 3602/3589 +f 3710/3638 3714/577 3696/3624 +f 3635/3615 3636/3597 3596/3586 +f 3629/3640 3626/3591 3627/3593 +f 3597/3642 3600/3637 3599/3595 +f 3630/3641 3716/3647 3715/3643 +f 3627/3593 3625/3592 3597/3642 +f 3717/3644 3716/3647 3723/3645 +f 3715/3643 3663/805 3660/807 +f 3716/3647 3717/3644 3725/2102 +f 3668/2257 3665/3143 3661/2103 +f 3668/2257 3725/2102 3717/3644 +f 3667/3610 3717/3644 3718/3646 +f 3727/3648 3728/3649 3673/3612 +f 3728/3649 1576/2019 3672/3613 +f 3726/3650 3727/3648 3718/3646 +f 1576/2019 2101/2018 2084/1997 +f 3727/3648 3726/3650 3731/3651 +f 3203/3462 376/718 1387/719 +f 3190/3577 1387/719 1791/2206 +f 3283/3557 2152/2203 1949/2204 +f 317/183 240/190 259/151 +f 2000/1868 2176/2212 3653/3603 +f 3601/3596 3599/3595 3600/3637 +f 3589/3653 3588/3599 3584/3602 +f 3577/3579 758/1497 756/426 +f 3585/3654 3584/3602 3582/3606 +f 3678/3628 3679/3630 3637/3608 +f 3204/3464 375/3578 376/718 +f 1801/1633 3655/3512 3657/3655 +f 3729/3652 3735/3657 3728/3649 +f 1575/1378 3735/3657 3738/3656 +f 3735/3657 1575/1378 1576/2019 +f 1776/1630 1795/1629 1796/1637 +f 1293/661 1294/657 1290/649 +f 1326/2200 14/9 1327/1729 +f 1294/657 1245/627 1234/619 +f 878/626 1244/625 1330/648 +f 497/3623 373/2850 374/2231 +f 122/980 648/366 413/240 +f 3699/3629 3702/3627 3704/3658 +f 3606/3659 3405/3625 3401/3590 +f 3708/3632 3710/3638 3698/3639 +f 3464/575 3714/577 3710/3638 +f 3704/3658 3674/3614 3443/3660 +f 439/271 398/1539 399/1352 +f 1512/1053 1528/1099 1526/1098 +f 1668/1475 1741/1563 1738/1567 +f 839/3661 849/1431 1362/2226 +f 1362/2226 1357/677 826/678 +f 3843/1428 740/1934 1896/1589 +f 2053/2009 2054/2008 2118/885 +f 3888/2118 3887/2086 2120/2085 +f 696/1234 697/1233 3816/3662 +f 1826/2773 1878/1731 639/1453 +f 3740/3663 3739/3666 3729/3652 +f 956/3185 885/463 886/485 +f 1980/1838 806/442 1922/3460 +f 33/18 34/17 105/111 +f 1343/659 1344/660 3856/666 +f 3907/2069 2127/2068 2128/2073 +f 1766/1597 1581/1380 1582/1382 +f 14/9 1326/2200 2163/2197 +f 595/354 98/72 99/1067 +f 3233/2321 3238/2316 3246/2949 +f 1346/664 1035/534 1044/535 +f 748/1906 1888/1898 1425/800 +f 3193/2232 3208/3564 3254/3536 +f 3190/3577 3191/3565 3206/3566 +f 3612/3514 3605/3513 3606/3659 +f 3624/3664 3612/3514 3615/3636 +f 3631/3594 3415/3665 3443/3660 +f 3679/3630 3680/3621 3644/3607 +f 3686/3620 3650/3601 3647/3598 +f 3702/3627 3677/3616 3674/3614 +f 3625/3592 3615/3636 3600/3637 +f 3601/3596 3400/3588 3415/3665 +f 3739/3666 3738/3656 3735/3657 +f 3630/3641 3627/3593 3594/3585 +f 3741/3667 3723/3645 3716/3647 +f 1284/1792 1290/649 1288/651 +f 3698/3639 3696/3624 3680/3621 +f 1778/1616 1781/1615 1780/1747 +f 1792/2099 1759/3895 1584/1631 +f 695/2132 570/347 571/346 +f 3638/3631 3741/3667 3594/3585 +f 3741/3667 3638/3631 3588/3599 +f 3247/2950 3248/3570 3209/2938 +f 3292/3668 3293/3742 3294/3669 +f 3296/3671 3297/3739 3298/3672 +f 3299/3673 3298/3672 3300/3674 +f 3302/3676 3303/3684 3304/3677 +f 3306/3679 3307/3682 3308/3680 +f 3307/3682 3310/3740 3311/3683 +f 3303/3684 3302/3676 3301/3675 +f 3312/3685 3305/3678 3304/3677 +f 3314/3687 3315/3755 3316/3688 +f 3318/3690 3319/3711 3320/3691 +f 3324/3693 3318/3690 3321/3692 +f 3326/3695 3327/3723 3328/3696 +f 3329/3698 3330/5096 3331/3699 +f 3333/3701 3334/3713 3335/3702 +f 3337/3704 3338/3712 3339/3705 +f 3333/3701 3336/3703 3341/3707 +f 3342/3708 3341/3707 3343/3709 +f 3319/3711 3345/3730 3344/3710 +f 3338/3712 3335/3702 3334/3713 +f 3346/3714 3301/3675 3302/3676 +f 3305/3678 3348/3720 3347/3715 +f 3312/3685 3224/3760 3223/3716 +f 3350/3718 3337/3704 3340/3706 +f 3312/3685 3349/3717 3348/3720 +f 3352/3721 3343/3709 3341/3707 +f 3311/3683 3353/3738 3354/3722 +f 3341/3707 3336/3703 3327/3723 +f 3359/3724 3360/3727 3361/3725 +f 3360/3727 3359/3724 3250/3728 +f 3318/3690 3363/3741 3345/3730 +f 3364/3731 3363/3741 3318/3690 +f 3314/3687 3364/3731 3324/3693 +f 3366/3733 3299/3673 3301/3675 +f 3299/3673 3366/3733 3367/3734 +f 3308/3680 3354/3722 3368/3735 +f 3369/3736 3317/3689 3295/3670 +f 3317/3689 3369/3736 3364/3731 +f 3370/3737 3295/3670 3317/3689 +f 3325/3694 3371/3747 3365/3732 +f 3352/3721 3320/3691 3319/3711 +f 3296/3671 3367/3734 3353/3738 +f 3297/3739 3296/3671 3311/3683 +f 3363/3741 3368/3735 3354/3722 +f 3345/3730 3354/3722 3353/3738 +f 3344/3710 3353/3738 3367/3734 +f 3366/3733 3333/3701 3342/3708 +f 3347/3715 3339/3705 3334/3713 +f 3339/3705 3347/3715 3348/3720 +f 3348/3720 3349/3717 3351/3719 +f 3363/3741 3364/3731 3369/3736 +f 3294/3669 3309/3681 3368/3735 +f 3293/3742 3306/3679 3309/3681 +f 3331/3699 3373/3749 3374/3743 +f 3346/3714 3334/3713 3333/3701 +f 3323/3697 3322/3748 3352/3721 +f 3356/3744 3355/3745 3321/3692 +f 3355/3745 3357/3746 3325/3694 +f 3357/3746 3358/3758 3371/3747 +f 3322/3748 3356/3744 3320/3691 +f 3361/3725 3330/5096 3329/3698 +f 3373/3749 3372/3752 3375/3750 +f 3374/3743 3375/3750 3376/3751 +f 3329/3698 3332/3700 3376/3751 +f 3327/3723 3375/3750 3372/3752 +f 3336/3703 3335/3702 3375/3750 +f 3335/3702 3338/3712 3376/3751 +f 3376/3751 3338/3712 3337/3704 +f 3337/3704 3350/3718 3359/3724 +f 3359/3724 3350/3718 3276/3753 +f 3377/3754 3292/3668 3295/3670 +f 3277/3572 3316/3688 3315/3755 +f 3371/3747 3378/3757 3379/3756 +f 3378/3757 3371/3747 3358/3758 +f 3365/3732 3379/3756 3315/3755 +f 3316/3688 3277/3572 3279/3571 +f 3282/1580 3377/3754 3370/3737 +f 3313/3686 3283/3763 3224/3760 +f 3378/3757 3285/3562 3284/3555 +f 3380/3759 3286/3563 3285/3562 +f 3379/3756 3284/3555 3278/3554 +f 3276/3753 3350/3718 3351/3719 +f 3291/3761 3351/3719 3349/3717 +f 3313/3686 3304/3677 2965/3170 +f 2965/3170 3304/3677 3303/3684 +f 222/307 3303/3684 3300/3674 +f 223/308 3300/3674 3298/3672 +f 3298/3672 3297/3739 3185/3509 +f 3297/3739 3310/3740 2977/3189 +f 3310/3740 3307/3682 1213/1242 +f 3307/3682 3306/3679 1214/1243 +f 2532/3080 1214/1243 3306/3679 +f 2894/3081 2532/3080 3293/3742 +f 2871/3762 2894/3081 3292/3668 +f 3253/3537 3225/2288 3240/2319 +f 3283/3763 3313/3686 3013/3229 +f 1997/1865 1996/1866 1889/1739 +f 1237/1257 745/839 744/838 +f 1535/1782 1534/1784 120/1816 +f 1531/1119 1532/1367 201/137 +f 1119/1151 1241/1262 1032/1066 +f 946/478 948/477 824/489 +f 163/213 8/4 5/6 +f 3589/3653 3726/3650 3723/3645 +f 1155/573 1156/572 1162/581 +f 427/280 213/132 327/1304 +f 553/1454 1902/1779 796/1498 +f 3381/3764 2232/2300 2231/2303 +f 3381/3764 3178/3501 2230/2301 +f 2237/2305 3183/3505 3180/3765 +f 3383/3766 3384/3769 3385/3767 +f 3384/3769 3386/3771 3387/3770 +f 3386/3771 3388/3773 3389/3772 +f 3388/3773 3390/3799 3391/3774 +f 3392/3775 3393/3794 3394/3776 +f 3395/3777 3394/3776 3396/3778 +f 3398/3780 3399/3781 3397/3779 +f 3400/3588 3401/3590 3399/3781 +f 3403/3782 3404/3784 3405/3625 +f 3406/3783 3407/3786 3404/3784 +f 3408/3785 3409/3788 3407/3786 +f 3410/3787 3411/3790 3409/3788 +f 3412/3789 3413/3836 3411/3790 +f 3415/3665 3400/3588 3398/3780 +f 3414/3791 3398/3780 3396/3778 +f 3416/3792 3396/3778 3394/3776 +f 3417/3793 3394/3776 3393/3794 +f 3393/3794 3419/3797 3420/3796 +f 3419/3797 3390/3799 3421/3798 +f 3390/3799 3388/3773 3422/3800 +f 3388/3773 3386/3771 3423/3801 +f 3386/3771 3384/3769 3424/3802 +f 3384/3769 3383/3766 3425/3803 +f 3425/3803 3383/3766 3426/3804 +f 2904/3094 3427/3864 3428/3805 +f 3429/3806 3427/3864 2904/3094 +f 3430/3808 3429/3806 2902/3807 +f 2875/3066 3431/3840 3430/3808 +f 3432/3809 3433/3841 2733/2909 +f 2825/3009 3434/3816 3435/3810 +f 3437/3811 3438/3843 3439/3812 +f 3441/3814 3437/3811 3436/3813 +f 2724/2898 2723/2897 3434/3816 +f 3436/3813 3439/3812 3115/3364 +f 3440/3815 3436/3813 3117/3367 +f 3443/3660 3415/3665 3414/3791 +f 3442/3817 3414/3791 3416/3792 +f 3444/3818 3416/3792 3417/3793 +f 3445/3819 3417/3793 3418/3795 +f 3446/3820 3418/3795 3420/3796 +f 3420/3796 3421/3798 3448/3822 +f 3421/3798 3422/3800 3449/3823 +f 3422/3800 3423/3801 3450/3824 +f 3423/3801 3424/3802 3451/3825 +f 3424/3802 3425/3803 3452/3826 +f 3425/3803 3184/3506 3056/3277 +f 3451/3825 3452/3826 2253/2334 +f 3450/3824 3451/3825 2252/2336 +f 3449/3823 3450/3824 1727/1790 +f 3448/3822 3449/3823 2150/2273 +f 3453/3827 3447/3821 3448/3822 +f 3454/3828 3446/3820 3447/3821 +f 3455/3829 3445/3819 3446/3820 +f 3456/3830 3444/3818 3445/3819 +f 3442/3817 3444/3818 3456/3830 +f 3443/3660 3442/3817 3457/3831 +f 3459/3832 3460/3634 3458/3633 +f 3457/3831 3456/3830 3455/3829 +f 3459/3832 3455/3829 3454/3828 +f 3462/3635 3460/3634 3459/3832 +f 3461/3833 3454/3828 3453/3827 +f 3464/575 3462/3635 3461/3833 +f 3463/3834 3453/3827 1941/2194 +f 3404/3784 3399/3781 3401/3590 +f 3407/3786 3397/3779 3399/3781 +f 3409/3788 3395/3777 3397/3779 +f 3411/3790 3392/3775 3395/3777 +f 3465/3835 3466/3839 3413/3836 +f 3467/3837 3468/3844 3469/3838 +f 3465/3835 3412/3789 3431/3840 +f 3470/3842 3467/3837 3466/3839 +f 3438/3843 3470/3842 3432/3809 +f 3467/3837 3470/3842 3438/3843 +f 3468/3844 3467/3837 3437/3811 +f 3471/3845 3469/3838 3468/3844 +f 3472/3846 3468/3844 3441/3814 +f 3473/3847 3441/3814 3440/3815 +f 2712/3848 3440/3815 3119/3369 +f 3475/3849 3471/3845 3472/3846 +f 3474/3850 3472/3846 3473/3847 +f 3476/3851 3473/3847 2712/3848 +f 3477/3852 3476/3851 2711/2888 +f 3478/3853 3474/3850 3476/3851 +f 3479/3854 3475/3849 3474/3850 +f 3392/3775 3411/3790 3413/3836 +f 3466/3839 3469/3838 3480/3855 +f 3419/3797 3393/3794 3392/3775 +f 3390/3799 3419/3797 3480/3855 +f 3469/3838 3471/3845 3391/3774 +f 3389/3772 3391/3774 3471/3845 +f 3387/3770 3389/3772 3475/3849 +f 3385/3767 3387/3770 3479/3854 +f 3482/3857 3477/3852 2400/2478 +f 3483/3858 3478/3853 3477/3852 +f 3481/3856 3479/3854 3478/3853 +f 3484/3859 3482/3857 2402/2477 +f 3058/3279 3484/3859 2707/2884 +f 3061/3281 3485/3860 3484/3859 +f 3485/3860 3483/3858 3482/3857 +f 3382/3768 3385/3767 3481/3856 +f 3486/3861 3481/3856 3483/3858 +f 3426/3804 3383/3766 3382/3768 +f 3487/3862 3382/3768 3486/3861 +f 3486/3861 3485/3860 3061/3281 +f 1961/2036 3057/3507 3487/3862 +f 3057/3507 3184/3506 3426/3804 +f 3428/3805 3403/3782 3402/3626 +f 3427/3864 3406/3783 3403/3782 +f 3429/3806 3408/3785 3406/3783 +f 3430/3808 3410/3787 3408/3785 +f 3431/3840 3412/3789 3410/3787 +f 3470/3842 3465/3835 3433/3841 +f 3434/3816 3439/3812 3438/3843 +f 2723/2897 3115/3364 3439/3812 +f 2875/3066 2733/2909 3433/3841 +f 2736/2908 2824/3006 3435/3810 +f 2947/3152 3428/3805 3488/3863 +f 3464/575 3463/3834 1360/2001 +f 1900/1742 1925/1768 1924/1781 +f 1736/1561 1717/1559 1718/1841 +f 1955/3488 1952/1971 1943/3865 +f 1671/1474 1738/1567 1735/1560 +f 1696/1506 1750/1591 1698/1516 +f 1800/1634 1865/948 1864/950 +f 2063/1957 2055/1945 2056/1947 +f 1737/1568 1734/1558 1717/1559 +f 1751/1592 1735/1560 1697/1562 +f 943/2088 3885/1392 3899/1394 +f 3656/3605 3488/3863 3402/3626 +f 1577/1531 1578/1601 1764/1605 +f 3917/1606 1598/1396 1599/1525 +f 1945/3866 146/557 82/61 +f 68/49 6/5 7/2726 +f 147/97 1542/1179 1543/1189 +f 1109/544 1135/2261 1133/558 +f 1503/2788 1910/1831 1907/1832 +f 1499/1010 1501/1007 1909/1814 +f 452/274 1922/3460 735/399 +f 1824/1670 2068/2718 3835/3867 +f 1825/1671 2067/1960 2068/2718 +f 1463/3868 1462/1989 1022/532 +f 841/686 1012/521 1009/522 +f 1568/1783 1547/2838 1955/3488 +f 1910/1831 1503/2788 1502/2763 +f 1939/3869 1502/2763 422/256 +f 309/1031 280/167 1940/1787 +f 1804/1638 1851/959 1866/3870 +f 269/163 1482/971 1954/1799 +f 2065/1969 1607/1406 2060/2882 +f 3812/3465 1685/1160 1758/3871 +f 280/167 276/169 552/1276 +f 1875/2146 1873/1712 1869/1714 +f 1739/1582 1721/1557 1734/1558 +f 1778/1616 1779/1675 1790/1678 +f 310/184 1920/1755 1915/1757 +f 1909/1814 1908/1813 1906/1749 +f 1789/1621 1783/1614 1778/1616 +f 978/509 981/508 980/1401 +f 1584/1631 1759/3895 1731/1549 +f 425/1057 426/1056 311/1436 +f 148/96 1543/1189 115/80 +f 2044/1720 3897/1719 3894/2017 +f 277/242 1910/1831 1939/3869 +f 1943/3865 1946/2100 1945/3866 +f 1461/936 1462/1989 1463/3868 +f 2253/2334 3452/3826 3056/3277 +f 2981/3197 2254/3196 3489/3872 +f 2209/2275 2998/3216 3489/3872 +f 2254/3196 2253/2334 3055/3278 +f 2998/3216 926/965 958/999 +f 2826/3010 2817/3874 2721/2896 +f 3490/3873 2739/3877 2721/2896 +f 2816/2997 3491/3879 3490/3873 +f 2334/3875 2297/2384 2296/2383 +f 2298/2388 2297/2384 2334/3875 +f 2301/2396 2298/2388 2336/2419 +f 2311/2398 2301/2396 2338/2421 +f 2312/2397 2311/2398 2354/2437 +f 2359/2443 2358/2442 2361/2448 +f 2335/2418 2296/2383 2359/2443 +f 2414/3876 2413/2486 2365/2447 +f 2413/2486 2270/2359 2361/2448 +f 2355/2438 2352/2434 2540/2631 +f 2541/2632 2540/2631 2539/2630 +f 2722/3365 2721/2896 2739/3877 +f 3114/3366 2722/3365 2738/2910 +f 3110/3359 3112/3362 3121/3372 +f 3108/3357 3110/3359 3120/3371 +f 3122/3373 1754/1801 3127/3377 +f 3112/3362 3122/3373 3126/3376 +f 3127/3377 1754/1801 1753/1803 +f 3139/3878 3133/3386 2529/2619 +f 3744/3396 3775/3387 3133/3386 +f 3490/3873 2789/2968 2740/2911 +f 3491/3879 2790/2969 2789/2968 +f 228/144 229/143 91/2877 +f 245/1786 244/318 243/340 +f 248/385 7/2726 247/339 +f 2/2 297/1291 296/1187 +f 145/1011 146/557 300/2000 +f 327/1304 213/132 210/134 +f 329/332 332/3222 331/1091 +f 239/1191 241/187 240/190 +f 373/2850 376/718 375/3578 +f 178/2199 177/1889 176/177 +f 1218/606 1225/609 1228/623 +f 3862/640 1231/621 1295/667 +f 808/445 809/452 824/489 +f 3858/923 3859/863 1013/550 +f 3813/782 3810/781 1411/761 +f 2154/2149 2149/2148 2159/2156 +f 3845/2090 3835/3867 1758/3871 +f 306/449 807/450 1922/3460 +f 2157/2150 2153/2147 1976/3511 +f 708/2182 709/1338 2031/1913 +f 1450/2169 1226/615 1227/617 +f 1672/1478 1604/1398 1605/1954 +f 1813/3880 1814/1665 1821/1667 +f 1717/1559 1716/1532 1715/1533 +f 176/177 159/106 402/1493 +f 1710/1842 1705/1744 3916/1512 +f 1712/1553 1580/1555 1577/1531 +f 1705/1744 1713/1529 1711/1528 +f 971/501 978/509 979/733 +f 822/448 823/447 1094/552 +f 2909/3881 2810/3882 2793/2973 +f 2810/3882 2769/2988 2795/2981 +f 2799/2979 2910/3102 2249/2327 +f 3662/3883 3643/3101 2910/3102 +f 2769/2988 2768/2951 2801/2982 +f 3177/3499 3176/3500 3178/3501 +f 3178/3501 3381/3764 3180/3765 +f 3181/3503 1394/1470 1393/1472 +f 3181/3503 1633/1700 1632/1702 +f 3182/3504 1632/1702 2244/2314 +f 3183/3505 2244/2314 2243/2313 +f 3492/3884 2791/2972 2790/2969 +f 2808/3885 3492/3884 3491/3879 +f 2239/3886 3574/1170 3572/3887 +f 1149/2264 1150/1907 3571/3889 +f 1150/1907 2009/2072 3567/3891 +f 2202/2277 3569/1337 3566/1336 +f 2921/3892 3564/2811 3565/1770 +f 3568/3894 3560/3183 2133/2178 +f 1777/1607 1579/1599 1759/3895 +f 1201/598 1196/599 1183/588 +f 3561/1769 3554/820 2195/2251 +f 3553/1460 3576/3930 2770/3897 +f 3927/1519 3925/2956 1687/3508 +f 1516/1966 152/1441 154/104 +f 3549/1590 3562/1933 2974/3312 +f 3550/3898 3548/2881 2779/2967 +f 2771/3326 2772/3325 3575/3899 +f 3070/3298 2771/3326 3546/3900 +f 3547/1640 3544/827 2775/2990 +f 1376/714 1375/713 1370/705 +f 2767/3486 3543/819 3556/3129 +f 2195/2251 3554/820 3543/819 +f 2775/2990 3544/827 3542/826 +f 3377/3754 3282/1580 3545/1579 +f 3804/2983 3802/3932 3539/3902 +f 2776/2991 3542/826 3538/1577 +f 2240/3888 3572/3887 3535/3904 +f 2920/3905 3535/3904 3669/3906 +f 3548/2881 3534/1048 2786/2964 +f 1805/1673 1850/957 1851/959 +f 3534/1048 3532/1639 2787/3056 +f 2951/3438 2773/2953 3540/3908 +f 2772/3325 2951/3438 3531/3909 +f 2973/3182 2224/3382 3530/3910 +f 2224/3382 2223/2292 3537/3912 +f 1957/2870 1953/1970 320/983 +f 306/449 305/1093 304/1199 +f 2770/3897 3576/3930 3528/1171 +f 2913/3105 3528/1171 3574/1170 +f 2923/3893 3565/1770 3527/2826 +f 308/185 1940/1787 431/262 +f 207/131 208/130 132/90 +f 1879/1916 1876/1724 1881/1936 +f 3733/3490 3787/1204 3525/1203 +f 2475/2556 3070/3298 3558/3901 +f 2476/3290 2475/2556 3524/3913 +f 2223/2292 2476/3290 3523/3914 +f 3522/1159 3549/1590 2803/3412 +f 2037/3915 3563/2202 11/2191 +f 1959/1999 1958/2101 450/272 +f 2003/2065 3555/2128 3520/2127 +f 9/2190 10/2192 334/328 +f 2009/2072 2008/2071 3518/3916 +f 2008/2071 2973/3182 3536/3911 +f 3877/1202 672/3186 673/2296 +f 3515/1789 3550/3898 2778/2957 +f 2935/3917 3514/2120 3513/1695 +f 111/3492 1149/2264 3570/3890 +f 3510/3918 3514/2120 2935/3917 +f 683/2797 3876/1939 3885/1392 +f 3508/1624 3557/1507 2810/3882 +f 736/2821 733/398 732/2883 +f 1795/1629 1804/1638 1803/1708 +f 3505/987 3510/3918 2938/3138 +f 3507/958 3505/987 2939/3140 +f 2950/3155 3512/1173 3504/3919 +f 2940/3142 2949/3154 3504/3919 +f 3801/3156 3791/1174 3512/1173 +f 2868/3920 3509/993 3502/992 +f 2869/3349 3502/992 3522/1159 +f 1024/614 1034/605 1023/593 +f 3500/949 3503/2193 2948/3153 +f 551/1277 1939/3869 442/487 +f 2972/3181 3499/3921 3568/3894 +f 2032/2097 3533/1930 3499/3921 +f 216/138 93/70 1924/1781 +f 203/236 202/135 201/137 +f 1828/1677 1848/1676 1847/1694 +f 1958/2101 1957/2870 449/273 +f 272/350 271/1003 178/2199 +f 2871/3762 3545/1579 3495/1439 +f 3619/3907 3669/3906 3564/2811 +f 2801/2982 2768/2951 3643/3101 +f 2005/2213 3758/1776 3654/1775 +f 960/3494 886/485 899/2835 +f 3657/3655 3655/3512 3612/3514 +f 3780/3922 3788/3427 3646/3424 +f 3151/3426 3646/3424 3593/2650 +f 2193/2250 3604/3421 3623/2937 +f 3607/3409 3603/3410 3145/3411 +f 3603/3410 3649/2936 2761/2933 +f 3592/3350 3607/3409 3144/3923 +f 2926/3126 3658/3127 3634/3351 +f 3634/3351 3592/3350 3104/3352 +f 3755/3310 3754/812 3746/3924 +f 566/1320 567/1319 3758/1776 +f 3488/3863 3656/3605 1864/950 +f 1602/1399 1586/1389 2079/2005 +f 2868/3920 3658/3127 3611/3125 +f 2922/3293 3611/3125 3610/3124 +f 2923/3893 3610/3124 3618/3116 +f 3670/3117 3613/3113 2918/3114 +f 3619/3907 3670/3117 3614/3115 +f 2921/3892 3618/3116 3670/3117 +f 2920/3905 3614/3115 3586/2308 +f 3613/3113 3642/3110 2915/3108 +f 2917/3121 2766/2946 3622/2947 +f 2916/3123 2917/3121 3641/3111 +f 2239/3886 3587/2310 3652/3106 +f 2911/3925 3640/3100 3652/3106 +f 3643/3101 3640/3100 2911/3925 +f 2797/2980 2801/2982 3662/3883 +f 2797/2980 3591/2977 3598/2927 +f 3622/2947 3628/2943 2763/2941 +f 3608/2929 3628/2943 2764/3347 +f 2194/3896 3623/2937 3648/2935 +f 2221/2291 3648/2935 3649/2936 +f 2756/2942 3608/2929 3609/2928 +f 2750/2975 3598/2927 3609/2928 +f 3669/3906 3535/3904 1817/2135 +f 3763/2780 3770/1647 1691/3926 +f 2515/2601 3621/2610 3590/2649 +f 2548/2639 3590/2649 3593/2650 +f 2513/2599 2489/2572 3632/2611 +f 2513/2599 3616/2609 3621/2610 +f 2509/2595 3633/2592 3620/2378 +f 2510/2604 3632/2611 3633/2592 +f 2289/2593 3620/2378 3639/2371 +f 2283/2376 3639/2371 3617/2370 +f 2240/3888 3586/2308 3587/2310 +f 3659/3609 3657/3655 3624/3664 +f 716/377 2121/2089 3887/2086 +f 727/375 1999/1869 3761/2110 +f 2165/3515 2171/2108 2170/2107 +f 3660/807 3659/3609 3626/3591 +f 3743/2042 3581/3583 3578/1774 +f 2175/3580 2172/2106 769/419 +f 2100/630 500/304 2095/2007 +f 3758/1776 567/1319 3757/2040 +f 640/1452 3560/1736 3568/3927 +f 1653/1644 3576/3930 3553/1460 +f 1895/2270 3536/3942 3530/3928 +f 746/1283 3530/3928 3537/3929 +f 1653/1644 1810/1169 3528/1171 +f 3762/1609 1603/3139 1604/1398 +f 2093/1754 2075/1753 2073/1977 +f 959/517 996/516 995/1751 +f 962/491 899/2835 919/527 +f 1400/749 1401/2337 1398/747 +f 1000/1750 817/444 1127/551 +f 956/3185 959/517 884/462 +f 1662/1674 3548/2881 3550/3898 +f 2034/1915 3558/3941 3546/3931 +f 3926/2955 1689/3952 3539/3902 +f 3535/3904 3572/3887 766/2136 +f 3572/3887 3574/1170 765/2091 +f 334/328 3570/3939 3571/3933 +f 1868/1766 3531/1765 3540/1492 +f 1684/3934 1651/1459 1652/1508 +f 1682/1806 1683/1805 775/423 +f 1585/1001 1603/3139 3762/1609 +f 3763/2780 3762/1609 1672/1478 +f 2173/853 2174/1900 490/1457 +f 1658/825 3544/827 3547/1640 +f 3556/3129 3543/819 1650/2184 +f 1571/813 3754/812 2010/2801 +f 1587/1388 3664/2256 3671/2258 +f 2099/2011 2095/2007 2077/1982 +f 787/1780 3761/2110 1999/1869 +f 2000/1868 3694/3622 786/1499 +f 3155/3935 339/512 3697/2678 +f 3713/3439 3155/3935 3683/3448 +f 2180/2218 2179/2217 3685/3936 +f 3496/1664 2180/2218 3684/3434 +f 1684/3934 3846/1017 3840/1019 +f 1760/2207 464/1311 3495/1439 +f 2607/2705 3689/2706 3693/2686 +f 2581/2673 3676/2679 3689/2706 +f 2371/2458 2502/2693 3700/2585 +f 2371/2458 3695/2687 3693/2686 +f 2587/3314 3676/2679 3675/2680 +f 2586/3313 3675/2680 3697/2678 +f 2497/3172 3691/2579 3687/2586 +f 2502/2693 2503/2692 3687/2586 +f 2494/2576 3691/2579 3690/2578 +f 3690/2578 3688/2555 2472/2552 +f 3688/2555 3701/2548 2468/2549 +f 3701/2548 3703/2547 2469/3937 +f 726/397 3908/2028 3892/2049 +f 581/383 3567/1120 3518/3938 +f 10/2192 3511/5093 3570/3939 +f 1837/2119 3514/2120 3510/3918 +f 741/1282 3537/3929 3523/3940 +f 1299/636 3524/1208 3558/3941 +f 1851/959 3507/958 3504/3919 +f 3527/2826 3565/1770 1820/1772 +f 580/343 3518/3938 3536/3942 +f 1758/3871 1685/1160 3522/1159 +f 1693/1016 3557/1507 3508/1624 +f 3504/3919 3512/1173 1867/1709 +f 1894/1733 1826/2773 3568/3927 +f 2126/2259 1418/780 3813/782 +f 3906/2260 2126/2259 3841/2114 +f 2113/2039 3911/2043 3867/2045 +f 2179/2217 3713/3439 3789/3447 +f 624/360 3903/2050 3904/2062 +f 2192/2248 3604/3421 3788/3427 +f 2177/2216 3541/1663 3787/1204 +f 1364/2125 3824/690 3833/681 +f 3858/923 3855/910 1470/3943 +f 3135/3389 1753/1803 3775/3387 +f 2069/2055 3812/3465 3835/3867 +f 1351/879 3861/670 3853/671 +f 2108/3944 3882/1762 3901/1764 +f 2089/2145 2092/2144 3736/3442 +f 3828/684 3834/680 1358/683 +f 2001/2064 3749/3483 3783/3485 +f 2187/2244 3783/3485 3719/3436 +f 2130/2276 3756/3478 3752/3482 +f 2004/2066 3752/3482 3749/3483 +f 2014/2074 3724/3473 3779/3477 +f 2201/2262 3779/3477 3756/3478 +f 3167/3470 3720/3474 3759/3177 +f 3168/3472 3724/3473 3720/3474 +f 3159/3454 3709/3453 3764/3456 +f 3160/3457 3764/3456 3765/3321 +f 3730/3455 3707/3452 3158/3945 +f 3158/3945 3707/3452 3709/3453 +f 2200/2254 3069/3294 3730/3455 +f 3685/3936 3789/3447 3742/3417 +f 3722/3445 3774/3444 2197/2255 +f 3790/3443 3736/3442 2092/2144 +f 3774/3444 3790/3443 3721/3437 +f 3154/3446 3706/3441 3790/3443 +f 3155/3935 3713/3439 3706/3441 +f 2091/3489 3719/3436 3721/3437 +f 2196/2252 3148/3432 3750/3416 +f 2193/2250 2196/2252 3776/3431 +f 3604/3421 2193/2250 3781/3430 +f 2086/3946 2192/2248 3780/3922 +f 2087/2141 2086/3946 3778/3425 +f 2204/3435 3742/3417 3751/3415 +f 2205/2268 3751/3415 3750/3416 +f 3909/2181 3886/2165 574/391 +f 2124/2173 2087/2141 3777/3422 +f 2208/2272 2124/2173 3760/3403 +f 3711/3390 3785/3398 3140/3399 +f 3785/3398 3745/3397 3138/3395 +f 3745/3397 3744/3396 3139/3878 +f 3771/3394 3712/3391 3136/3393 +f 3712/3391 3711/3390 3137/3392 +f 3775/3387 3767/3384 3131/3385 +f 3767/3384 3782/3383 3132/3400 +f 1813/3880 3839/1650 3838/1653 +f 1293/661 3854/1793 3856/666 +f 2184/2220 2321/3355 3748/2405 +f 2181/2221 2184/2220 3732/3324 +f 3765/3321 3768/3318 3079/3319 +f 3768/3318 3734/3317 3080/3323 +f 709/1338 3873/2167 3875/2129 +f 2774/2954 2961/3166 3737/3165 +f 3786/3292 3769/3291 2134/2179 +f 3809/413 3808/412 766/2136 +f 2968/3174 3784/3179 3766/3164 +f 2969/3176 3759/3177 3784/3179 +f 2958/3168 3747/3162 3737/3165 +f 2132/3433 3747/3162 3766/3164 +f 2321/3355 2185/2223 3771/3394 +f 3860/849 3848/850 1449/2113 +f 3905/2157 3874/2153 582/1461 +f 3886/2165 3889/2164 577/3099 +f 3847/1659 3826/1658 1818/1771 +f 705/2159 3902/2084 3887/2086 +f 3816/3662 3912/2067 3907/2069 +f 2111/2124 3883/2123 3911/2043 +f 3810/781 3829/691 1366/2126 +f 1812/1654 3839/1650 3822/1652 +f 675/1662 3851/2093 3852/2094 +f 3496/1664 3705/3119 3851/2093 +f 2110/2038 3867/2045 3868/2033 +f 2104/2026 3896/2029 3901/1764 +f 2109/2031 3868/2033 3899/1394 +f 3842/1427 3831/877 703/390 +f 1680/1844 3811/1488 3846/1017 +f 1639/1435 1627/1409 3819/1411 +f 623/361 3904/2062 3906/2260 +f 2139/2130 2140/2013 3866/2015 +f 1808/1645 1640/1642 3844/2092 +f 3820/1669 3821/1668 1819/2825 +f 3872/2087 3914/631 3900/633 +f 1646/1434 3832/1443 3830/1445 +f 3889/2164 3890/2158 633/2170 +f 2139/2130 3865/2122 3875/2129 +f 2156/2152 2155/2151 3871/2166 +f 674/3187 3852/2094 3913/2104 +f 2117/887 3817/886 3812/3465 +f 1815/1661 3822/1652 3847/1659 +f 3855/910 3849/909 1444/846 +f 697/1233 3906/2260 3912/2067 +f 866/682 842/1433 3836/603 +f 3864/2180 3869/2134 2136/2131 +f 3585/3654 3731/3651 3726/3650 +f 3583/3581 3740/3663 3731/3651 +f 3753/2041 3746/3924 3738/3656 +f 3740/3663 3583/3581 3581/3583 +f 1088/1379 3738/3656 3746/3924 +f 3739/3666 3740/3663 3743/2042 +f 2791/2972 3492/3884 3493/3948 +f 3492/3884 2808/3885 2807/2984 +f 2792/2970 3493/3948 3494/3949 +f 3493/3948 2807/2984 2806/3903 +f 3508/1624 2909/3881 3494/3949 +f 3516/3950 3494/3949 2806/3903 +f 2948/3153 3503/2193 3791/1174 +f 2105/2046 3908/2028 3896/2029 +f 3538/1577 3802/3932 3804/2983 +f 680/1417 3878/1937 3876/1939 +f 1891/2175 1890/1740 3910/1730 +f 2905/3096 3799/3098 3801/3156 +f 2900/3089 3797/3090 3799/3098 +f 2901/3092 3797/3090 3807/3091 +f 2734/2907 2878/3065 3806/3067 +f 2877/3093 3807/3091 3806/3067 +f 2820/3002 3800/3951 3794/3060 +f 2818/3054 3796/2999 3792/3012 +f 2823/3008 3793/3004 3792/3012 +f 2822/3017 3793/3004 3798/3001 +f 2819/3005 2735/3007 3800/3951 +f 2817/3874 3796/2999 3795/2998 +f 2809/2993 3805/2986 3795/2998 +f 2808/3885 3805/2986 3803/2985 +f 2805/2992 3804/2983 3803/2985 +f 1702/1513 3921/1509 3923/1511 +f 3922/1627 3920/1632 1797/1636 +f 3915/1788 3916/1512 1706/1514 +f 3918/1520 3927/1519 1692/1505 +f 1657/1917 3898/2757 3880/1728 +f 1890/1740 1877/1723 3895/1617 +f 3772/1648 3770/1647 1674/1483 +f 1689/3952 1690/1646 3772/1648 +f 3926/2955 3802/3932 3538/1577 +f 3516/3950 3539/3902 1689/3952 +f 3917/1606 3923/1511 1703/1510 +f 1598/1396 3917/1606 3928/1602 +f 3880/1728 3884/1618 1876/1724 +f 1691/3926 1690/1646 3925/2956 +f 3925/2956 3926/2955 1688/3953 +f 3934/3959 3933/3965 3937/3960 +f 3934/3959 3938/3961 3939/3962 +f 3932/3964 3936/4074 3937/3960 +f 3938/3961 3942/3969 3943/3966 +f 3937/3960 3936/4074 3940/3967 +f 3937/3960 3941/3968 3942/3969 +f 3942/3969 3946/3973 3947/3970 +f 3941/3968 3940/3967 3944/3971 +f 3942/3969 3941/3968 3945/3972 +f 3947/3970 3946/3973 3950/3974 +f 3944/3971 3948/4075 3949/3976 +f 3945/3972 3949/3976 3950/3974 +f 3950/3974 3954/3980 3955/3977 +f 3949/3976 3948/4075 3952/3978 +f 3950/3974 3949/3976 3953/3979 +f 3955/3977 3954/3980 3958/3981 +f 3953/3979 3952/3978 3956/3983 +f 3953/3979 3957/3984 3958/3981 +f 3959/3982 3958/3981 3962/3985 +f 3957/3984 3956/3983 3960/3987 +f 3958/3981 3957/3984 3961/3988 +f 3962/3985 3966/3991 3967/3989 +f 3960/3987 3964/4076 3965/3990 +f 3961/3988 3965/3990 3966/3991 +f 3966/3991 3970/3995 3971/3992 +f 3965/3990 3964/4076 3968/3993 +f 3966/3991 3965/3990 3969/3994 +f 3970/3995 3974/3998 3975/3996 +f 3968/3993 3972/4077 3973/3997 +f 3969/3994 3973/3997 3974/3998 +f 3974/3998 3978/4002 3979/3999 +f 3973/3997 3972/4077 3976/4000 +f 3974/3998 3973/3997 3977/4001 +f 3979/3999 3978/4002 3982/4003 +f 3976/4000 3980/4078 3981/4005 +f 3977/4001 3981/4005 3982/4003 +f 3982/4003 3986/4009 3987/4006 +f 3981/4005 3980/4078 3984/4007 +f 3982/4003 3981/4005 3985/4008 +f 3987/4006 3986/4009 3990/4010 +f 3984/4007 3988/4079 3989/4012 +f 3985/4008 3989/4012 3990/4010 +f 3991/4011 3990/4010 3994/4013 +f 3989/4012 3988/4079 3992/4015 +f 3990/4010 3989/4012 3993/4016 +f 3995/4014 3994/4013 3998/4017 +f 3992/4015 3996/4080 3997/4019 +f 3993/4016 3997/4019 3998/4017 +f 3998/4017 4002/4023 4003/4020 +f 3997/4019 3996/4080 4000/4021 +f 3998/4017 3997/4019 4001/4022 +f 4002/4023 4006/4026 4007/4024 +f 4000/4021 4004/4081 4005/4025 +f 4001/4022 4005/4025 4006/4026 +f 4006/4026 4010/4030 4011/4027 +f 4005/4025 4004/4081 4008/4028 +f 4006/4026 4005/4025 4009/4029 +f 4011/4027 4010/4030 4014/4031 +f 4008/4028 4012/4082 4013/4033 +f 4009/4029 4013/4033 4014/4031 +f 4014/4031 4018/4037 4019/4034 +f 4013/4033 4012/4082 4016/4035 +f 4014/4031 4013/4033 4017/4036 +f 4019/4034 4018/4037 4022/4038 +f 4016/4035 4020/4083 4021/4040 +f 4017/4036 4021/4040 4022/4038 +f 4023/4039 4022/4038 4026/4041 +f 4021/4040 4020/4083 4024/4043 +f 4022/4038 4021/4040 4025/4044 +f 4027/4042 4026/4041 4030/4045 +f 4024/4043 4028/4084 4029/4047 +f 4025/4044 4029/4047 4030/4045 +f 4031/4046 4030/4045 4034/4048 +f 4029/4047 4028/4084 4032/4050 +f 4030/4045 4029/4047 4033/4051 +f 4034/4048 4038/4054 4039/4052 +f 4032/4050 4036/4085 4037/4053 +f 4033/4051 4037/4053 4038/4054 +f 4038/4054 4042/4058 4043/4055 +f 4037/4053 4036/4085 4040/4056 +f 4038/4054 4037/4053 4041/4057 +f 4043/4055 4042/4058 4046/4059 +f 4040/4056 4044/4086 4045/4061 +f 4041/4057 4045/4061 4046/4059 +f 4046/4059 4050/4065 4051/4062 +f 4045/4061 4044/4086 4048/4063 +f 4046/4059 4045/4061 4049/4064 +f 4051/4062 4050/4065 4055/4066 +f 4048/4063 4053/4071 4054/4068 +f 4049/4064 4054/4068 4055/4066 +f 4056/4067 4055/4066 4059/4069 +f 4053/4071 4057/4087 4058/4072 +f 4055/4066 4054/4068 4058/4072 +f 4060/4070 4059/4069 3934/3959 +f 4058/4072 4057/4087 3932/3964 +f 4058/4072 3933/3965 3934/3959 +f 4069/4088 4068/4105 4081/4089 +f 4066/4091 4079/4122 4080/4092 +f 4065/4094 4064/4108 4077/4095 +f 4063/4097 4062/4110 4075/4098 +f 4071/4100 4084/4113 4085/4101 +f 4069/4088 4082/4090 4083/4103 +f 4067/4093 4080/4092 4081/4089 +f 4066/4106 4065/4094 4078/4096 +f 4063/4097 4076/4099 4077/4095 +f 4061/4109 4074/4115 4075/4098 +f 4072/4102 4085/4101 4086/4111 +f 4071/4100 4070/4104 4083/4103 +f 4076/4099 4089/4125 4090/4114 +f 4074/4115 4087/4525 4088/4116 +f 4086/4111 4085/4101 4098/4117 +f 4083/4103 4096/4126 4097/4119 +f 4082/4090 4081/4089 4094/4120 +f 4079/4122 4092/4135 4093/4123 +f 4078/4096 4077/4095 4090/4114 +f 4076/4099 4075/4098 4088/4116 +f 4085/4101 4084/4113 4097/4119 +f 4082/4090 4095/4121 4096/4126 +f 4080/4092 4093/4123 4094/4120 +f 4079/4107 4078/4096 4091/4124 +f 4089/4125 4102/4138 4103/4128 +f 4088/4116 4087/4525 4100/4129 +f 4098/4117 4111/4139 4112/4131 +f 4096/4126 4109/4140 4110/4132 +f 4095/4121 4094/4120 4107/4133 +f 4092/4135 4105/4150 4106/4136 +f 4090/4114 4103/4128 4104/4137 +f 4089/4125 4088/4116 4101/4130 +f 4097/4119 4110/4132 4111/4139 +f 4095/4121 4108/4134 4109/4140 +f 4094/4120 4093/4123 4106/4136 +f 4092/4127 4091/4124 4104/4137 +f 4103/4128 4102/4138 4115/4142 +f 4100/4129 4113/5097 4114/4144 +f 4112/4131 4111/4139 4124/4145 +f 4110/4132 4109/4140 4122/4147 +f 4107/4133 4120/4153 4121/4149 +f 4105/4150 4118/5098 4119/4151 +f 4103/4128 4116/4143 4117/4152 +f 4101/4130 4114/4144 4115/4142 +f 4110/4132 4123/4148 4124/4145 +f 4109/4140 4108/4134 4121/4149 +f 4107/4133 4106/4136 4119/4151 +f 4105/4141 4104/4137 4117/4152 +f 4115/4142 4128/4165 4129/4155 +f 4114/4144 4113/5097 4126/4156 +f 4124/4145 4137/4166 4138/4158 +f 4123/4148 4122/4147 4135/4159 +f 4120/4153 4133/4167 4134/4161 +f 4119/4151 4118/5098 4131/4162 +f 4117/4152 4116/4143 4129/4155 +f 4114/4144 4127/4157 4128/4165 +f 4124/4145 4123/4148 4136/4160 +f 4122/4147 4121/4149 4134/4161 +f 4119/4151 4132/4163 4133/4167 +f 4117/4152 4130/4164 4131/4168 +f 4129/4155 4128/4165 4141/4169 +f 4126/4156 4139/4524 4140/4171 +f 4138/4158 4137/4166 4150/4172 +f 4136/4160 4135/4159 4148/4174 +f 4133/4167 4146/4179 4147/4176 +f 4131/4162 4144/4189 4145/4177 +f 4130/4164 4129/4155 4142/4170 +f 4127/4157 4140/4171 4141/4169 +f 4137/4166 4136/4160 4149/4175 +f 4135/4159 4134/4161 4147/4176 +f 4132/4163 4145/4177 4146/4179 +f 4131/4168 4130/4164 4143/4178 +f 4142/4170 4141/4169 4154/4181 +f 4140/4171 4139/4524 4152/4183 +f 4150/4172 4163/4192 4164/4185 +f 4149/4175 4148/4174 4161/4186 +f 4146/4179 4159/4193 4160/4188 +f 4144/4189 4157/5099 4158/4190 +f 4142/4170 4155/4182 4156/4191 +f 4140/4171 4153/4184 4154/4181 +f 4149/4175 4162/4187 4163/4192 +f 4148/4174 4147/4176 4160/4188 +f 4146/4179 4145/4177 4158/4190 +f 4144/4180 4143/4178 4156/4191 +f 4155/4182 4154/4181 4167/4195 +f 4153/4184 4152/4183 4165/4197 +f 4163/4192 4176/4205 4177/4199 +f 4161/4186 4174/4206 4175/4200 +f 4159/4193 4172/4207 4173/4201 +f 4158/4190 4157/5099 4170/4202 +f 4155/4182 4168/4196 4169/4204 +f 4153/4184 4166/4198 4167/4195 +f 4163/4192 4162/4187 4175/4200 +f 4161/4186 4160/4188 4173/4201 +f 4159/4193 4158/4190 4171/4203 +f 4156/4191 4169/4204 4170/4208 +f 4167/4195 4180/4218 4181/4209 +f 4165/4197 4178/4222 4179/4210 +f 4176/4205 4189/4219 4190/4211 +f 4175/4200 4174/4206 4187/4212 +f 4173/4201 4172/4207 4185/4214 +f 4170/4202 4183/4229 4184/4216 +f 4169/4204 4168/4196 4181/4209 +f 4167/4195 4166/4198 4179/4210 +f 4175/4200 4188/4213 4189/4219 +f 4173/4201 4186/4215 4187/4212 +f 4171/4203 4184/4216 4185/4214 +f 4170/4208 4169/4204 4182/4217 +f 4180/4218 4193/4232 4194/4221 +f 4178/4222 4191/4523 4192/4223 +f 4190/4211 4189/4219 4202/4224 +f 4187/4212 4200/4233 4201/4226 +f 4186/4215 4185/4214 4198/4227 +f 4183/4229 4196/4242 4197/4230 +f 4182/4217 4181/4209 4194/4221 +f 4180/4218 4179/4210 4192/4223 +f 4189/4219 4188/4213 4201/4226 +f 4186/4215 4199/4228 4200/4233 +f 4184/4216 4197/4230 4198/4227 +f 4183/4220 4182/4217 4195/4231 +f 4193/4232 4206/4245 4207/4235 +f 4192/4223 4191/4523 4204/4236 +f 4202/4224 4215/4246 4216/4238 +f 4200/4233 4213/4247 4214/4239 +f 4199/4228 4198/4227 4211/4240 +f 4196/4242 4209/4256 4210/4243 +f 4194/4221 4207/4235 4208/4244 +f 4193/4232 4192/4223 4205/4237 +f 4201/4226 4214/4239 4215/4246 +f 4199/4228 4212/4241 4213/4247 +f 4198/4227 4197/4230 4210/4243 +f 4196/4234 4195/4231 4208/4244 +f 4207/4235 4206/4245 4219/4249 +f 4204/4236 4217/5100 4218/4251 +f 4216/4238 4215/4246 4228/4252 +f 4213/4247 4226/4259 4227/4254 +f 4211/4240 4224/4260 4225/4255 +f 4209/4256 4222/5101 4223/4257 +f 4207/4235 4220/4250 4221/4258 +f 4205/4237 4218/4251 4219/4249 +f 4214/4239 4227/4254 4228/4252 +f 4213/4247 4212/4241 4225/4255 +f 4211/4240 4210/4243 4223/4257 +f 4209/4248 4208/4244 4221/4258 +f 4219/4249 4232/4272 4233/4262 +f 4218/4251 4217/5100 4230/4263 +f 4228/4252 4241/4273 4242/4265 +f 4226/4259 4239/4274 4240/4266 +f 4225/4255 4224/4260 4237/4267 +f 4223/4257 4222/5101 4235/4269 +f 4221/4258 4220/4250 4233/4262 +f 4218/4251 4231/4264 4232/4272 +f 4228/4252 4227/4254 4240/4266 +f 4226/4259 4225/4255 4238/4268 +f 4223/4257 4236/4270 4237/4267 +f 4221/4258 4234/4271 4235/4275 +f 4233/4262 4232/4272 4245/4276 +f 4230/4263 4243/4522 4244/4278 +f 4242/4265 4241/4273 4254/4279 +f 4240/4266 4239/4274 4252/4281 +f 4237/4267 4250/4286 4251/4283 +f 4235/4269 4248/4296 4249/4284 +f 4234/4271 4233/4262 4246/4277 +f 4231/4264 4244/4278 4245/4276 +f 4241/4273 4240/4266 4253/4282 +f 4239/4274 4238/4268 4251/4283 +f 4236/4270 4249/4284 4250/4286 +f 4235/4275 4234/4271 4247/4285 +f 4246/4277 4245/4276 4258/4288 +f 4244/4278 4243/4522 4256/4290 +f 4254/4279 4267/4299 4268/4292 +f 4253/4282 4252/4281 4265/4293 +f 4250/4286 4263/4300 4264/4295 +f 4248/4296 4261/5102 4262/4297 +f 4246/4277 4259/4289 4260/4298 +f 4244/4278 4257/4291 4258/4288 +f 4253/4282 4266/4294 4267/4299 +f 4252/4281 4251/4283 4264/4295 +f 4250/4286 4249/4284 4262/4297 +f 4248/4287 4247/4285 4260/4298 +f 4259/4289 4258/4288 4271/4302 +f 4257/4291 4256/4290 4269/4304 +f 4267/4299 4280/4312 4281/4306 +f 4265/4293 4278/4313 4279/4307 +f 4263/4300 4276/4314 4277/4308 +f 4262/4297 4261/5102 4274/4309 +f 4259/4289 4272/4303 4273/4311 +f 4257/4291 4270/4305 4271/4302 +f 4266/4294 4279/4307 4280/4312 +f 4265/4293 4264/4295 4277/4308 +f 4263/4300 4262/4297 4275/4310 +f 4260/4298 4273/4311 4274/4315 +f 4271/4302 4284/4326 4285/4316 +f 4269/4304 4282/4329 4283/4317 +f 4281/4306 4280/4312 4293/4318 +f 4279/4307 4278/4313 4291/4320 +f 4277/4308 4276/4314 4289/4322 +f 4274/4309 4287/4336 4288/4324 +f 4273/4311 4272/4303 4285/4316 +f 4271/4302 4270/4305 4283/4317 +f 4280/4312 4279/4307 4292/4321 +f 4277/4308 4290/4323 4291/4320 +f 4275/4310 4288/4324 4289/4322 +f 4274/4315 4273/4311 4286/4325 +f 4284/4326 4297/4339 4298/4328 +f 4282/4329 4295/4521 4296/4330 +f 4294/4319 4293/4318 4306/4331 +f 4291/4320 4304/4340 4305/4333 +f 4290/4323 4289/4322 4302/4334 +f 4287/4336 4300/4349 4301/4337 +f 4286/4325 4285/4316 4298/4328 +f 4284/4326 4283/4317 4296/4330 +f 4293/4318 4292/4321 4305/4333 +f 4290/4323 4303/4335 4304/4340 +f 4288/4324 4301/4337 4302/4334 +f 4287/4327 4286/4325 4299/4338 +f 4297/4339 4310/4352 4311/4342 +f 4296/4330 4295/4521 4308/4343 +f 4306/4331 4319/4353 4320/4345 +f 4304/4340 4317/4354 4318/4346 +f 4303/4335 4302/4334 4315/4347 +f 4300/4349 4313/4364 4314/4350 +f 4298/4328 4311/4342 4312/4351 +f 4297/4339 4296/4330 4309/4344 +f 4305/4333 4318/4346 4319/4353 +f 4304/4340 4303/4335 4316/4348 +f 4302/4334 4301/4337 4314/4350 +f 4300/4341 4299/4338 4312/4351 +f 4310/4352 4323/4367 4324/4356 +f 4308/4343 4321/5103 4322/4357 +f 4320/4345 4319/4353 4332/4358 +f 4318/4346 4317/4354 4330/4360 +f 4316/4348 4315/4347 4328/4362 +f 4313/4364 4326/5104 4327/4365 +f 4311/4342 4324/4356 4325/4366 +f 4310/4352 4309/4344 4322/4357 +f 4318/4346 4331/4361 4332/4358 +f 4316/4348 4329/4363 4330/4360 +f 4315/4347 4314/4350 4327/4365 +f 4312/4351 4325/4366 4326/4368 +f 4323/4367 4336/4379 4337/4369 +f 4322/4357 4321/5103 4334/4370 +f 4332/4358 4345/4380 4346/4372 +f 4330/4360 4343/4381 4344/4373 +f 4329/4363 4328/4362 4341/4374 +f 4327/4365 4326/5104 4339/4376 +f 4325/4366 4324/4356 4337/4369 +f 4322/4357 4335/4371 4336/4379 +f 4332/4358 4331/4361 4344/4373 +f 4330/4360 4329/4363 4342/4375 +f 4327/4365 4340/4377 4341/4374 +f 4325/4366 4338/4378 4339/4382 +f 4337/4369 4336/4379 4350/4383 +f 4334/4370 4348/4520 4349/4385 +f 4346/4372 4345/4380 4359/4386 +f 4344/4373 4343/4381 4357/4388 +f 4341/4374 4355/4394 4356/4390 +f 4340/4377 4339/4376 4353/4391 +f 4338/4378 4337/4369 4351/4384 +f 4335/4371 4349/4385 4350/4383 +f 4345/4380 4344/4373 4358/4389 +f 4343/4381 4342/4375 4356/4390 +f 4340/4377 4354/4392 4355/4394 +f 4338/4378 4352/4393 4353/4395 +f 4351/4384 4350/4383 4363/4396 +f 4349/4385 4348/4520 4361/4398 +f 4359/4386 4372/4407 4373/4400 +f 4358/4389 4357/4388 4370/4401 +f 4355/4394 4368/4408 4369/4403 +f 4354/4392 4353/4391 4366/4404 +f 4351/4384 4364/4397 4365/4406 +f 4349/4385 4362/4399 4363/4396 +f 4358/4389 4371/4402 4372/4407 +f 4357/4388 4356/4390 4369/4403 +f 4355/4394 4354/4392 4367/4405 +f 4352/4393 4365/4406 4366/4409 +f 4364/4397 4363/4396 4376/4410 +f 4362/4399 4361/4398 4374/4412 +f 4373/4400 4372/4407 4385/4414 +f 4370/4401 4383/4421 4384/4416 +f 4368/4408 4381/4422 4382/4417 +f 4367/4405 4366/4404 4379/4418 +f 4364/4397 4377/4411 4378/4420 +f 4362/4399 4375/4413 4376/4410 +f 4372/4407 4371/4402 4384/4416 +f 4370/4401 4369/4403 4382/4417 +f 4368/4408 4367/4405 4380/4419 +f 4365/4406 4378/4420 4379/4423 +f 4376/4410 4389/4433 4390/4424 +f 4374/4412 4387/4437 4388/4425 +f 4385/4414 4398/4434 4399/4426 +f 4384/4416 4383/4421 4396/4427 +f 4382/4417 4381/4422 4394/4429 +f 4379/4418 4392/5105 4393/4431 +f 4378/4420 4377/4411 4390/4424 +f 4376/4410 4375/4413 4388/4425 +f 4384/4416 4397/4428 4398/4434 +f 4382/4417 4395/4430 4396/4427 +f 4380/4419 4393/4431 4394/4429 +f 4379/4423 4378/4420 4391/4432 +f 4389/4433 4402/4447 4403/4436 +f 4387/4437 4400/4519 4401/4438 +f 4399/4426 4398/4434 4411/4439 +f 4396/4427 4409/4448 4410/4441 +f 4395/4430 4394/4429 4407/4442 +f 4393/4431 4392/5105 4405/4444 +f 4391/4432 4390/4424 4403/4436 +f 4389/4433 4388/4425 4401/4438 +f 4398/4434 4397/4428 4410/4441 +f 4395/4430 4408/4443 4409/4448 +f 4393/4431 4406/4445 4407/4442 +f 4391/4432 4404/4446 4405/4449 +f 4402/4447 4415/4460 4416/4450 +f 4401/4438 4400/4519 4413/4451 +f 4411/4439 4424/4461 4425/4453 +f 4409/4448 4422/4462 4423/4454 +f 4408/4443 4407/4442 4420/4455 +f 4406/4445 4405/4444 4418/4457 +f 4403/4436 4416/4450 4417/4459 +f 4402/4447 4401/4438 4414/4452 +f 4410/4441 4423/4454 4424/4461 +f 4408/4443 4421/4456 4422/4462 +f 4407/4442 4406/4445 4419/4458 +f 4404/4446 4417/4459 4418/4463 +f 4416/4450 4415/4460 4428/4464 +f 4413/4451 4426/4477 4427/4466 +f 4425/4453 4424/4461 4437/4467 +f 4423/4454 4422/4462 4435/4469 +f 4421/4456 4420/4455 4433/4471 +f 4418/4457 4431/5106 4432/4473 +f 4416/4450 4429/4465 4430/4474 +f 4415/4460 4414/4452 4427/4466 +f 4423/4454 4436/4470 4437/4467 +f 4421/4456 4434/4472 4435/4469 +f 4420/4455 4419/4458 4432/4473 +f 4418/4463 4417/4459 4430/4474 +f 4428/4464 4441/4486 4442/4476 +f 4426/4477 4439/4492 4440/4478 +f 4437/4467 4450/4487 4451/4479 +f 4435/4469 4448/4488 4449/4480 +f 4434/4472 4433/4471 4446/4481 +f 4432/4473 4431/5106 4444/4483 +f 4430/4474 4429/4465 4442/4476 +f 4427/4466 4440/4478 4441/4486 +f 4437/4467 4436/4470 4449/4480 +f 4435/4469 4434/4472 4447/4482 +f 4432/4473 4445/4484 4446/4481 +f 4430/4474 4443/4485 4444/4489 +f 4442/4476 4441/4486 4454/4490 +f 4439/4492 4452/4518 4453/4493 +f 4451/4479 4450/4487 4463/4494 +f 4449/4480 4448/4488 4461/4496 +f 4446/4481 4459/4501 4460/4498 +f 4444/4483 4457/4511 4458/4499 +f 4443/4485 4442/4476 4455/4491 +f 4440/4478 4453/4493 4454/4490 +f 4450/4487 4449/4480 4462/4497 +f 4447/4482 4460/4498 4461/4496 +f 4445/4484 4458/4499 4459/4501 +f 4444/4489 4443/4485 4456/4500 +f 4455/4491 4454/4490 4467/4503 +f 4453/4493 4452/4518 4465/4505 +f 4463/4494 4476/4514 4477/4507 +f 4462/4497 4461/4496 4474/4508 +f 4459/4501 4472/4515 4473/4510 +f 4457/4511 4470/5107 4471/4512 +f 4455/4491 4468/4504 4469/4513 +f 4453/4493 4466/4506 4467/4503 +f 4462/4497 4475/4509 4476/4514 +f 4461/4496 4460/4498 4473/4510 +f 4459/4501 4458/4499 4471/4512 +f 4457/4502 4456/4500 4469/4513 +f 4468/4504 4467/4503 4063/4097 +f 4466/4506 4465/4505 4061/4109 +f 4477/4507 4476/4514 4072/4102 +f 4474/4508 4070/4104 4071/4100 +f 4472/4515 4068/4105 4069/4088 +f 4471/4512 4470/5107 4066/4091 +f 4468/4504 4064/4108 4065/4094 +f 4466/4506 4062/4110 4063/4097 +f 4476/4514 4475/4509 4071/4100 +f 4474/4508 4473/4510 4069/4088 +f 4472/4515 4471/4512 4067/4093 +f 4469/4513 4065/4094 4066/4106 +f 3935/3963 4061/4109 4465/4505 +f 4465/4505 4452/4518 4056/4067 +f 4452/4518 4439/4492 4051/4062 +f 4439/4492 4426/4477 4047/4060 +f 4047/4060 4426/4477 4413/4451 +f 4043/4055 4413/4451 4400/4519 +f 4039/4052 4400/4519 4387/4437 +f 4387/4437 4374/4412 4031/4046 +f 4031/4046 4374/4412 4361/4398 +f 4361/4398 4348/4520 4023/4039 +f 4348/4520 4334/4370 4019/4034 +f 4334/4370 4321/5103 4015/4032 +f 4015/4032 4321/5103 4308/4343 +f 4011/4027 4308/4343 4295/4521 +f 4007/4024 4295/4521 4282/4329 +f 4282/4329 4269/4304 3999/4018 +f 3999/4018 4269/4304 4256/4290 +f 4256/4290 4243/4522 3991/4011 +f 4243/4522 4230/4263 3987/4006 +f 4230/4263 4217/5100 3983/4004 +f 3983/4004 4217/5100 4204/4236 +f 3979/3999 4204/4236 4191/4523 +f 3975/3996 4191/4523 4178/4222 +f 4178/4222 4165/4197 3967/3989 +f 3967/3989 4165/4197 4152/4183 +f 4152/4183 4139/4524 3959/3982 +f 4139/4524 4126/4156 3955/3977 +f 4126/4156 4113/5097 3951/3975 +f 3951/3975 4113/5097 4100/4129 +f 3947/3970 4100/4129 4087/4525 +f 3943/3966 4087/4525 4074/4115 +f 4074/4115 4061/4109 3935/3963 +f 4480/4526 4479/4531 4483/4527 +f 4480/4526 4484/4528 4485/4529 +f 4479/4531 4478/4640 4482/4532 +f 4484/4528 4488/4535 4489/4533 +f 4482/4532 4486/4642 4487/4534 +f 4483/4527 4487/4534 4488/4535 +f 4488/4535 4492/4539 4493/4536 +f 4487/4534 4486/4642 4490/4537 +f 4488/4535 4487/4534 4491/4538 +f 4493/4536 4492/4539 4496/4540 +f 4490/4537 4494/4643 4495/4542 +f 4491/4538 4495/4542 4496/4540 +f 4496/4540 4500/4546 4501/4543 +f 4495/4542 4494/4643 4498/4544 +f 4496/4540 4495/4542 4499/4545 +f 4501/4543 4500/4546 4504/4547 +f 4498/4544 4502/4644 4503/4549 +f 4499/4545 4503/4549 4504/4547 +f 4505/4548 4504/4547 4508/4550 +f 4503/4549 4502/4644 4506/4552 +f 4504/4547 4503/4549 4507/4553 +f 4509/4551 4508/4550 4512/4554 +f 4506/4552 4510/4645 4511/4556 +f 4507/4553 4511/4556 4512/4554 +f 4513/4555 4512/4554 4516/4557 +f 4511/4556 4510/4645 4514/4559 +f 4512/4554 4511/4556 4515/4560 +f 4516/4557 4520/4563 4521/4561 +f 4514/4559 4518/4565 4519/4562 +f 4515/4560 4519/4562 4520/4563 +f 4520/4563 4524/4567 4525/4564 +f 4518/4565 4522/4570 4523/4566 +f 4520/4563 4519/4562 4523/4566 +f 4525/4564 4524/4567 4528/4568 +f 4522/4570 4526/4646 4527/4571 +f 4523/4566 4527/4571 4528/4568 +f 4528/4568 4532/4575 4533/4572 +f 4527/4571 4526/4646 4530/4573 +f 4528/4568 4527/4571 4531/4574 +f 4533/4572 4532/4575 4536/4576 +f 4530/4573 4534/4581 4535/4578 +f 4531/4574 4535/4578 4536/4576 +f 4537/4577 4536/4576 4540/4579 +f 4534/4581 4538/4647 4539/4582 +f 4536/4576 4535/4578 4539/4582 +f 4541/4580 4540/4579 4544/4583 +f 4539/4582 4538/4647 4542/4585 +f 4539/4582 4543/4586 4544/4583 +f 4544/4583 4548/4589 4549/4587 +f 4542/4585 4546/4648 4547/4588 +f 4544/4583 4543/4586 4547/4588 +f 4548/4589 4552/4593 4553/4590 +f 4547/4588 4546/4648 4550/4591 +f 4547/4588 4551/4592 4552/4593 +f 4552/4593 4556/4597 4557/4594 +f 4551/4592 4550/4591 4554/4595 +f 4552/4593 4551/4592 4555/4596 +f 4557/4594 4556/4597 4560/4598 +f 4554/4595 4558/4649 4559/4600 +f 4555/4596 4559/4600 4560/4598 +f 4560/4598 4564/4604 4565/4601 +f 4559/4600 4558/4649 4562/4602 +f 4560/4598 4559/4600 4563/4603 +f 4565/4601 4564/4604 4568/4605 +f 4562/4602 4566/4650 4567/4607 +f 4563/4603 4567/4607 4568/4605 +f 4569/4606 4568/4605 4572/4608 +f 4567/4607 4566/4650 4570/4610 +f 4568/4605 4567/4607 4571/4611 +f 4572/4608 4576/4614 4577/4612 +f 4570/4610 4574/4651 4575/4613 +f 4571/4611 4575/4613 4576/4614 +f 4576/4614 4580/4618 4581/4615 +f 4575/4613 4574/4651 4578/4616 +f 4576/4614 4575/4613 4579/4617 +f 4580/4618 4584/4621 4585/4619 +f 4578/4616 4582/4652 4583/4620 +f 4579/4617 4583/4620 4584/4621 +f 4584/4621 4588/4625 4589/4622 +f 4583/4620 4582/4652 4586/4623 +f 4584/4621 4583/4620 4587/4624 +f 4589/4622 4588/4625 4592/4626 +f 4586/4623 4590/4653 4591/4628 +f 4587/4624 4591/4628 4592/4626 +f 4592/4626 4596/4632 4597/4629 +f 4591/4628 4590/4653 4594/4630 +f 4592/4626 4591/4628 4595/4631 +f 4597/4629 4596/4632 4601/4633 +f 4594/4630 4599/4654 4600/4635 +f 4595/4631 4600/4635 4601/4633 +f 4602/4634 4601/4633 4605/4636 +f 4600/4635 4599/4654 4603/4638 +f 4601/4633 4600/4635 4604/4639 +f 4606/4637 4605/4636 4480/4526 +f 4603/4638 4478/4640 4479/4531 +f 4604/4639 4479/4531 4480/4526 +f 4615/4655 4614/4672 4627/4656 +f 4612/4658 4625/5108 4626/4659 +f 4611/4661 4610/4675 4623/4662 +f 4609/4664 4608/4677 4621/4665 +f 4618/4667 4617/4680 4630/4668 +f 4615/4655 4628/4657 4629/4670 +f 4613/4660 4626/4659 4627/4656 +f 4612/4673 4611/4661 4624/4663 +f 4609/4664 4622/4666 4623/4662 +f 4607/4676 4620/4682 4621/4665 +f 4619/4678 4618/4667 4631/4669 +f 4617/4680 4616/4671 4629/4670 +f 4622/4666 4635/4692 4636/4681 +f 4620/4682 4633/5092 4634/4683 +f 4632/4679 4631/4669 4644/4684 +f 4629/4670 4642/4693 4643/4686 +f 4628/4657 4627/4656 4640/4687 +f 4626/4659 4625/5108 4638/4689 +f 4624/4663 4623/4662 4636/4681 +f 4622/4666 4621/4665 4634/4683 +f 4631/4669 4630/4668 4643/4686 +f 4628/4657 4641/4688 4642/4693 +f 4626/4659 4639/4690 4640/4687 +f 4624/4663 4637/4691 4638/4694 +f 4635/4692 4648/4705 4649/4695 +f 4634/4683 4633/5092 4646/4696 +f 4644/4684 4657/4706 4658/4698 +f 4642/4693 4655/4707 4656/4699 +f 4641/4688 4640/4687 4653/4700 +f 4639/4690 4638/4689 4651/4702 +f 4636/4681 4649/4695 4650/4704 +f 4635/4692 4634/4683 4647/4697 +f 4643/4686 4656/4699 4657/4706 +f 4642/4693 4641/4688 4654/4701 +f 4640/4687 4639/4690 4652/4703 +f 4637/4691 4650/4704 4651/4708 +f 4648/4705 4661/4718 4662/4709 +f 4646/4696 4659/5109 4660/4710 +f 4658/4698 4657/4706 4670/4711 +f 4656/4699 4655/4707 4668/4713 +f 4653/4700 4666/4719 4667/4715 +f 4651/4702 4664/5110 4665/4716 +f 4649/4695 4662/4709 4663/4717 +f 4648/4705 4647/4697 4660/4710 +f 4656/4699 4669/4714 4670/4711 +f 4654/4701 4667/4715 4668/4713 +f 4653/4700 4652/4703 4665/4716 +f 4651/4708 4650/4704 4663/4717 +f 4661/4718 4674/4731 4675/4721 +f 4660/4710 4659/5109 4672/4722 +f 4670/4711 4683/4732 4684/4724 +f 4668/4713 4681/4733 4682/4725 +f 4667/4715 4666/4719 4679/4726 +f 4665/4716 4664/5110 4677/4728 +f 4663/4717 4662/4709 4675/4721 +f 4661/4718 4660/4710 4673/4723 +f 4670/4711 4669/4714 4682/4725 +f 4667/4715 4680/4727 4681/4733 +f 4665/4716 4678/4729 4679/4726 +f 4663/4717 4676/4730 4677/4734 +f 4675/4721 4674/4731 4687/4735 +f 4672/4722 4685/5091 4686/4737 +f 4684/4724 4683/4732 4696/4738 +f 4682/4725 4681/4733 4694/4740 +f 4679/4726 4692/4746 4693/4742 +f 4678/4729 4677/4728 4690/4743 +f 4676/4730 4675/4721 4688/4736 +f 4673/4723 4686/4737 4687/4735 +f 4683/4732 4682/4725 4695/4741 +f 4681/4733 4680/4727 4693/4742 +f 4678/4729 4691/4744 4692/4746 +f 4676/4730 4689/4745 4690/4747 +f 4688/4736 4687/4735 4700/4748 +f 4686/4737 4685/5091 4698/4750 +f 4696/4738 4709/4759 4710/4752 +f 4695/4741 4694/4740 4707/4753 +f 4692/4746 4705/4760 4706/4755 +f 4691/4744 4690/4743 4703/4756 +f 4688/4736 4701/4749 4702/4758 +f 4686/4737 4699/4751 4700/4748 +f 4695/4741 4708/4754 4709/4759 +f 4694/4740 4693/4742 4706/4755 +f 4692/4746 4691/4744 4704/4757 +f 4689/4745 4702/4758 4703/4761 +f 4701/4749 4700/4748 4713/4762 +f 4699/4751 4698/4750 4711/4764 +f 4710/4752 4709/4759 4722/4766 +f 4707/4753 4720/4773 4721/4768 +f 4705/4760 4718/4774 4719/4769 +f 4704/4757 4703/4756 4716/4770 +f 4701/4749 4714/4763 4715/4772 +f 4699/4751 4712/4765 4713/4762 +f 4709/4759 4708/4754 4721/4768 +f 4707/4753 4706/4755 4719/4769 +f 4705/4760 4704/4757 4717/4771 +f 4702/4758 4715/4772 4716/4775 +f 4713/4762 4726/4786 4727/4776 +f 4711/4764 4724/4789 4725/4777 +f 4723/4767 4722/4766 4735/4778 +f 4721/4768 4720/4773 4733/4780 +f 4719/4769 4718/4774 4731/4782 +f 4716/4770 4729/5111 4730/4784 +f 4715/4772 4714/4763 4727/4776 +f 4713/4762 4712/4765 4725/4777 +f 4721/4768 4734/4781 4735/4778 +f 4719/4769 4732/4783 4733/4780 +f 4717/4771 4730/4784 4731/4782 +f 4716/4775 4715/4772 4728/4785 +f 4726/4786 4739/4799 4740/4788 +f 4724/4789 4737/5090 4738/4790 +f 4736/4779 4735/4778 4748/4791 +f 4733/4780 4746/4800 4747/4793 +f 4732/4783 4731/4782 4744/4794 +f 4730/4784 4729/5111 4742/4796 +f 4728/4785 4727/4776 4740/4788 +f 4726/4786 4725/4777 4738/4790 +f 4735/4778 4734/4781 4747/4793 +f 4732/4783 4745/4795 4746/4800 +f 4730/4784 4743/4797 4744/4794 +f 4728/4785 4741/4798 4742/4801 +f 4739/4799 4752/4812 4753/4802 +f 4738/4790 4737/5090 4750/4803 +f 4748/4791 4761/4813 4762/4805 +f 4746/4800 4759/4814 4760/4806 +f 4745/4795 4744/4794 4757/4807 +f 4743/4797 4742/4796 4755/4809 +f 4740/4788 4753/4802 4754/4811 +f 4739/4799 4738/4790 4751/4804 +f 4747/4793 4760/4806 4761/4813 +f 4745/4795 4758/4808 4759/4814 +f 4744/4794 4743/4797 4756/4810 +f 4741/4798 4754/4811 4755/4815 +f 4753/4802 4752/4812 4765/4816 +f 4750/4803 4763/5112 4764/4818 +f 4762/4805 4761/4813 4774/4819 +f 4759/4814 4772/4826 4773/4821 +f 4758/4808 4757/4807 4770/4822 +f 4755/4809 4768/5113 4769/4824 +f 4753/4802 4766/4817 4767/4825 +f 4752/4812 4751/4804 4764/4818 +f 4760/4806 4773/4821 4774/4819 +f 4758/4808 4771/4823 4772/4826 +f 4757/4807 4756/4810 4769/4824 +f 4755/4815 4754/4811 4767/4825 +f 4765/4816 4778/4838 4779/4828 +f 4764/4818 4763/5112 4776/4829 +f 4774/4819 4787/4839 4788/4831 +f 4772/4826 4785/4840 4786/4832 +f 4771/4823 4770/4822 4783/4833 +f 4769/4824 4768/5113 4781/4835 +f 4767/4825 4766/4817 4779/4828 +f 4765/4816 4764/4818 4777/4830 +f 4774/4819 4773/4821 4786/4832 +f 4771/4823 4784/4834 4785/4840 +f 4769/4824 4782/4836 4783/4833 +f 4767/4825 4780/4837 4781/4841 +f 4779/4828 4778/4838 4791/4842 +f 4776/4829 4789/5089 4790/4844 +f 4788/4831 4787/4839 4800/4845 +f 4786/4832 4785/4840 4798/4847 +f 4783/4833 4796/4853 4797/4849 +f 4782/4836 4781/4835 4794/4850 +f 4780/4837 4779/4828 4792/4843 +f 4777/4830 4790/4844 4791/4842 +f 4787/4839 4786/4832 4799/4848 +f 4785/4840 4784/4834 4797/4849 +f 4782/4836 4795/4851 4796/4853 +f 4780/4837 4793/4852 4794/4854 +f 4792/4843 4791/4842 4804/4855 +f 4790/4844 4789/5089 4802/4857 +f 4800/4845 4813/4866 4814/4859 +f 4799/4848 4798/4847 4811/4860 +f 4796/4853 4809/4867 4810/4862 +f 4795/4851 4794/4850 4807/4863 +f 4792/4843 4805/4856 4806/4865 +f 4790/4844 4803/4858 4804/4855 +f 4799/4848 4812/4861 4813/4866 +f 4798/4847 4797/4849 4810/4862 +f 4796/4853 4795/4851 4808/4864 +f 4793/4852 4806/4865 4807/4868 +f 4805/4856 4804/4855 4817/4869 +f 4803/4858 4802/4857 4815/4871 +f 4814/4859 4813/4866 4826/4873 +f 4811/4860 4824/4880 4825/4875 +f 4809/4867 4822/4881 4823/4876 +f 4808/4864 4807/4863 4820/4877 +f 4805/4856 4818/4870 4819/4879 +f 4803/4858 4816/4872 4817/4869 +f 4813/4866 4812/4861 4825/4875 +f 4811/4860 4810/4862 4823/4876 +f 4809/4867 4808/4864 4821/4878 +f 4806/4865 4819/4879 4820/4882 +f 4817/4869 4830/4892 4831/4883 +f 4815/4871 4828/4896 4829/4884 +f 4826/4873 4839/4893 4840/4885 +f 4825/4875 4824/4880 4837/4886 +f 4823/4876 4822/4881 4835/4888 +f 4820/4877 4833/5114 4834/4890 +f 4819/4879 4818/4870 4831/4883 +f 4817/4869 4816/4872 4829/4884 +f 4825/4875 4838/4887 4839/4893 +f 4823/4876 4836/4889 4837/4886 +f 4821/4878 4834/4890 4835/4888 +f 4820/4882 4819/4879 4832/4891 +f 4830/4892 4843/4906 4844/4895 +f 4828/4896 4841/5088 4842/4897 +f 4840/4885 4839/4893 4852/4898 +f 4837/4886 4850/4907 4851/4900 +f 4836/4889 4835/4888 4848/4901 +f 4834/4890 4833/5114 4846/4903 +f 4832/4891 4831/4883 4844/4895 +f 4830/4892 4829/4884 4842/4897 +f 4839/4893 4838/4887 4851/4900 +f 4836/4889 4849/4902 4850/4907 +f 4834/4890 4847/4904 4848/4901 +f 4832/4891 4845/4905 4846/4908 +f 4843/4906 4856/4919 4857/4909 +f 4842/4897 4841/5088 4854/4910 +f 4852/4898 4865/4920 4866/4912 +f 4850/4907 4863/4921 4864/4913 +f 4849/4902 4848/4901 4861/4914 +f 4847/4904 4846/4903 4859/4916 +f 4844/4895 4857/4909 4858/4918 +f 4843/4906 4842/4897 4855/4911 +f 4851/4900 4864/4913 4865/4920 +f 4849/4902 4862/4915 4863/4921 +f 4848/4901 4847/4904 4860/4917 +f 4845/4905 4858/4918 4859/4922 +f 4857/4909 4856/4919 4869/4923 +f 4855/4911 4854/4910 4867/4925 +f 4866/4912 4865/4920 4878/4927 +f 4864/4913 4863/4921 4876/4929 +f 4862/4915 4861/4914 4874/4931 +f 4859/4916 4872/5115 4873/4933 +f 4857/4909 4870/4924 4871/4934 +f 4856/4919 4855/4911 4868/4926 +f 4864/4913 4877/4930 4878/4927 +f 4862/4915 4875/4932 4876/4929 +f 4861/4914 4860/4917 4873/4933 +f 4859/4922 4858/4918 4871/4934 +f 4869/4923 4882/4946 4883/4936 +f 4868/4926 4867/4925 4880/4937 +f 4878/4927 4891/4947 4892/4939 +f 4877/4930 4876/4929 4889/4940 +f 4874/4931 4887/4948 4888/4942 +f 4873/4933 4872/5115 4885/4943 +f 4871/4934 4870/4924 4883/4936 +f 4868/4926 4881/4938 4882/4946 +f 4878/4927 4877/4930 4890/4941 +f 4876/4929 4875/4932 4888/4942 +f 4873/4933 4886/4944 4887/4948 +f 4871/4934 4884/4945 4885/4949 +f 4883/4936 4882/4946 4896/4950 +f 4880/4937 4894/5087 4895/4952 +f 4892/4939 4891/4947 4905/4953 +f 4890/4941 4889/4940 4903/4955 +f 4887/4948 4901/4960 4902/4957 +f 4885/4943 4899/4970 4900/4958 +f 4884/4945 4883/4936 4897/4951 +f 4881/4938 4895/4952 4896/4950 +f 4891/4947 4890/4941 4904/4956 +f 4889/4940 4888/4942 4902/4957 +f 4886/4944 4900/4958 4901/4960 +f 4885/4949 4884/4945 4898/4959 +f 4897/4951 4896/4950 4909/4962 +f 4895/4952 4894/5087 4907/4964 +f 4905/4953 4918/4973 4919/4966 +f 4904/4956 4903/4955 4916/4967 +f 4901/4960 4914/4974 4915/4969 +f 4899/4970 4912/5116 4913/4971 +f 4897/4951 4910/4963 4911/4972 +f 4895/4952 4908/4965 4909/4962 +f 4904/4956 4917/4968 4918/4973 +f 4903/4955 4902/4957 4915/4969 +f 4901/4960 4900/4958 4913/4971 +f 4899/4961 4898/4959 4911/4972 +f 4910/4963 4909/4962 4922/4976 +f 4908/4965 4907/4964 4920/4978 +f 4919/4966 4918/4973 4931/4980 +f 4916/4967 4929/4987 4930/4982 +f 4914/4974 4927/4988 4928/4983 +f 4913/4971 4912/5116 4925/4984 +f 4910/4963 4923/4977 4924/4986 +f 4908/4965 4921/4979 4922/4976 +f 4918/4973 4917/4968 4930/4982 +f 4916/4967 4915/4969 4928/4983 +f 4914/4974 4913/4971 4926/4985 +f 4911/4972 4924/4986 4925/4989 +f 4922/4976 4935/4999 4936/4990 +f 4920/4978 4933/5003 4934/4991 +f 4931/4980 4944/5000 4945/4992 +f 4930/4982 4929/4987 4942/4993 +f 4928/4983 4927/4988 4940/4995 +f 4925/4984 4938/5010 4939/4997 +f 4924/4986 4923/4977 4936/4990 +f 4922/4976 4921/4979 4934/4991 +f 4930/4982 4943/4994 4944/5000 +f 4928/4983 4941/4996 4942/4993 +f 4926/4985 4939/4997 4940/4995 +f 4925/4989 4924/4986 4937/4998 +f 4935/4999 4948/5013 4949/5002 +f 4933/5003 4946/5086 4947/5004 +f 4945/4992 4944/5000 4957/5005 +f 4942/4993 4955/5014 4956/5007 +f 4941/4996 4940/4995 4953/5008 +f 4938/5010 4951/5023 4952/5011 +f 4937/4998 4936/4990 4949/5002 +f 4935/4999 4934/4991 4947/5004 +f 4944/5000 4943/4994 4956/5007 +f 4941/4996 4954/5009 4955/5014 +f 4939/4997 4952/5011 4953/5008 +f 4938/5001 4937/4998 4950/5012 +f 4948/5013 4961/5026 4962/5016 +f 4947/5004 4946/5086 4959/5017 +f 4957/5005 4970/5027 4971/5019 +f 4955/5014 4968/5028 4969/5020 +f 4954/5009 4953/5008 4966/5021 +f 4951/5023 4964/5037 4965/5024 +f 4949/5002 4962/5016 4963/5025 +f 4948/5013 4947/5004 4960/5018 +f 4956/5007 4969/5020 4970/5027 +f 4954/5009 4967/5022 4968/5028 +f 4953/5008 4952/5011 4965/5024 +f 4951/5015 4950/5012 4963/5025 +f 4962/5016 4961/5026 4974/5030 +f 4959/5017 4972/5117 4973/5032 +f 4971/5019 4970/5027 4983/5033 +f 4968/5028 4981/5040 4982/5035 +f 4966/5021 4979/5041 4980/5036 +f 4964/5037 4977/5118 4978/5038 +f 4962/5016 4975/5031 4976/5039 +f 4960/5018 4973/5032 4974/5030 +f 4969/5020 4982/5035 4983/5033 +f 4967/5022 4980/5036 4981/5040 +f 4966/5021 4965/5024 4978/5038 +f 4964/5029 4963/5025 4976/5039 +f 4975/5031 4974/5030 4987/5043 +f 4973/5032 4972/5117 4985/5045 +f 4983/5033 4996/5053 4997/5047 +f 4981/5040 4994/5054 4995/5048 +f 4979/5041 4992/5055 4993/5049 +f 4978/5038 4977/5118 4990/5050 +f 4976/5039 4975/5031 4988/5044 +f 4973/5032 4986/5046 4987/5043 +f 4983/5033 4982/5035 4995/5048 +f 4981/5040 4980/5036 4993/5049 +f 4978/5038 4991/5051 4992/5055 +f 4976/5039 4989/5052 4990/5056 +f 4988/5044 4987/5043 5000/5057 +f 4985/5045 4998/5085 4999/5059 +f 4997/5047 4996/5053 5009/5060 +f 4995/5048 4994/5054 5007/5062 +f 4992/5055 5005/5068 5006/5064 +f 4991/5051 4990/5050 5003/5065 +f 4989/5052 4988/5044 5001/5058 +f 4986/5046 4999/5059 5000/5057 +f 4996/5053 4995/5048 5008/5063 +f 4994/5054 4993/5049 5006/5064 +f 4991/5051 5004/5066 5005/5068 +f 4989/5052 5002/5067 5003/5069 +f 5001/5058 5000/5057 5013/5070 +f 4999/5059 4998/5085 5011/5072 +f 5009/5060 5022/5081 5023/5074 +f 5008/5063 5007/5062 5020/5075 +f 5005/5068 5018/5082 5019/5077 +f 5004/5066 5003/5065 5016/5078 +f 5001/5058 5014/5071 5015/5080 +f 4999/5059 5012/5073 5013/5070 +f 5008/5063 5021/5076 5022/5081 +f 5007/5062 5006/5064 5019/5077 +f 5005/5068 5004/5066 5017/5079 +f 5002/5067 5015/5080 5016/5083 +f 5014/5071 5013/5070 4609/4664 +f 5012/5073 5011/5072 4607/4676 +f 5022/5081 4618/4667 4619/4678 +f 5020/5075 4616/4671 4617/4680 +f 5018/5082 4614/4672 4615/4655 +f 5017/5079 5016/5078 4612/4658 +f 5014/5071 4610/4675 4611/4661 +f 5012/5073 4608/4677 4609/4664 +f 5021/5076 4617/4680 4618/4667 +f 5020/5075 5019/5077 4615/4655 +f 5018/5082 5017/5079 4613/4660 +f 5015/5080 4611/4661 4612/4673 +f 4481/4530 4607/4676 5011/5072 +f 5011/5072 4998/5085 4602/4634 +f 4998/5085 4985/5045 4597/4629 +f 4985/5045 4972/5117 4593/4627 +f 4593/4627 4972/5117 4959/5017 +f 4589/4622 4959/5017 4946/5086 +f 4585/4619 4946/5086 4933/5003 +f 4933/5003 4920/4978 4577/4612 +f 4577/4612 4920/4978 4907/4964 +f 4907/4964 4894/5087 4569/4606 +f 4894/5087 4880/4937 4565/4601 +f 4880/4937 4867/4925 4561/4599 +f 4561/4599 4867/4925 4854/4910 +f 4557/4594 4854/4910 4841/5088 +f 4553/4590 4841/5088 4828/4896 +f 4828/4896 4815/4871 4545/4584 +f 4545/4584 4815/4871 4802/4857 +f 4802/4857 4789/5089 4537/4577 +f 4789/5089 4776/4829 4533/4572 +f 4776/4829 4763/5112 4529/4569 +f 4529/4569 4763/5112 4750/4803 +f 4525/4564 4750/4803 4737/5090 +f 4521/4561 4737/5090 4724/4789 +f 4724/4789 4711/4764 4513/4555 +f 4513/4555 4711/4764 4698/4750 +f 4698/4750 4685/5091 4505/4548 +f 4685/5091 4672/4722 4501/4543 +f 4672/4722 4659/5109 4497/4541 +f 4497/4541 4659/5109 4646/4696 +f 4493/4536 4646/4696 4633/5092 +f 4489/4533 4633/5092 4620/4682 +f 4620/4682 4607/4676 4481/4530 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/landmark_embedding.npy b/motion-gan-pipeline/preprocessing/third/DECA/data/landmark_embedding.npy new file mode 100755 index 0000000..046c64f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/data/landmark_embedding.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8095348eeafce5a02f6bd8765146307f9567a3f03b316d788a2e47336d667954 +size 31292 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/mask_2.png b/motion-gan-pipeline/preprocessing/third/DECA/data/mask_2.png new file mode 100644 index 0000000..7a63ded Binary files /dev/null and b/motion-gan-pipeline/preprocessing/third/DECA/data/mask_2.png differ diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/mask_3.png b/motion-gan-pipeline/preprocessing/third/DECA/data/mask_3.png new file mode 100644 index 0000000..30b8407 Binary files /dev/null and b/motion-gan-pipeline/preprocessing/third/DECA/data/mask_3.png differ diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/mask_face.png b/motion-gan-pipeline/preprocessing/third/DECA/data/mask_face.png new file mode 100644 index 0000000..69e2d94 Binary files /dev/null and b/motion-gan-pipeline/preprocessing/third/DECA/data/mask_face.png differ diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/mask_inpainting.npz b/motion-gan-pipeline/preprocessing/third/DECA/data/mask_inpainting.npz new file mode 100644 index 0000000..3c39c8a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/data/mask_inpainting.npz @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:098d89790c8178dfdee7b11f21cecf8eb8af8cfd28950d7d32db4a231f4a9bfd +size 78643654 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/mask_mouth.png b/motion-gan-pipeline/preprocessing/third/DECA/data/mask_mouth.png new file mode 100644 index 0000000..b77ae40 Binary files /dev/null and b/motion-gan-pipeline/preprocessing/third/DECA/data/mask_mouth.png differ diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/mask_mouth_2.png b/motion-gan-pipeline/preprocessing/third/DECA/data/mask_mouth_2.png new file mode 100644 index 0000000..e033c78 Binary files /dev/null and b/motion-gan-pipeline/preprocessing/third/DECA/data/mask_mouth_2.png differ diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/mean_texture.jpg b/motion-gan-pipeline/preprocessing/third/DECA/data/mean_texture.jpg new file mode 100644 index 0000000..a139edd --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/data/mean_texture.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:34364c02dec9dfb30580d1575b375553c6cd8d5da5caa6ac5a611ce65b91f529 +size 53722 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/texture_data_256.npy b/motion-gan-pipeline/preprocessing/third/DECA/data/texture_data_256.npy new file mode 100644 index 0000000..46c3295 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/data/texture_data_256.npy @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8563919927a2bc2ea7343d0ceef35254105d21a1947f00e5ca39e01d6ed1f9f1 +size 6677908 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/uv_face_eye_mask.png b/motion-gan-pipeline/preprocessing/third/DECA/data/uv_face_eye_mask.png new file mode 100755 index 0000000..86c768c Binary files /dev/null and b/motion-gan-pipeline/preprocessing/third/DECA/data/uv_face_eye_mask.png differ diff --git a/motion-gan-pipeline/preprocessing/third/DECA/data/uv_face_mask.png b/motion-gan-pipeline/preprocessing/third/DECA/data/uv_face_mask.png new file mode 100755 index 0000000..7baaceb Binary files /dev/null and b/motion-gan-pipeline/preprocessing/third/DECA/data/uv_face_mask.png differ diff --git a/motion-gan-pipeline/preprocessing/third/DECA/decalib/__init__.py b/motion-gan-pipeline/preprocessing/third/DECA/decalib/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/decalib/datasets/datasets.py b/motion-gan-pipeline/preprocessing/third/DECA/decalib/datasets/datasets.py new file mode 100755 index 0000000..c949524 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/decalib/datasets/datasets.py @@ -0,0 +1,169 @@ +#-*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import os, sys +import torch +from torch.utils.data import Dataset, DataLoader +import torchvision.transforms as transforms +import numpy as np +import cv2 +import scipy +from skimage.io import imread, imsave +from skimage.transform import estimate_transform, warp, resize, rescale +from glob import glob +import scipy.io +import matplotlib.pyplot as plt +from subprocess import call + +from . import detectors + + +def check_mkdir(path): + if not os.path.exists(path): + print('creating %s' % path) + os.makedirs(path) + +def video2sequence(video_path): + videofolder = video_path.split('.')[0] + check_mkdir(videofolder) + video_name = video_path.split('/')[-1].split('.')[0] + vidcap = cv2.VideoCapture(video_path) + success,image = vidcap.read() + count = 0 + imagepath_list = [] + while success: + imagepath = '{}/{}_frame{:04d}.png'.format(videofolder, video_name, count) + cv2.imwrite(imagepath, image) # save frame as PNG file + success,image = vidcap.read() + count += 1 + imagepath_list.append(imagepath) + print('video frames are stored in {}'.format(videofolder)) + return imagepath_list + +def video2sequence_lossless(video_path): + videofolder = video_path.split('.')[0] + check_mkdir(videofolder) + video_name = video_path.split('/')[-1].split('.')[0] + cmd = (f'ffmpeg -i {video_path} -vf fps=25 {videofolder}/{video_name}_frame%04d.png').split() + call(cmd) + imagepath_list = [os.path.join(videofolder, f) for f in os.listdir(videofolder)] + print('video frames are stored in {}'.format(videofolder)) + return imagepath_list + +class TestData(Dataset): + def __init__(self, testpath, iscrop=True, crop_size=224, scale=1.25, face_detector='mtcnn'): + ''' + testpath: folder, imagepath_list, image path, video path + ''' + # print('testpath: ', testpath) + if isinstance(testpath, list): + self.imagepath_list = testpath + elif os.path.isdir(testpath): + self.imagepath_list = glob(testpath + '/*.jpg') + glob(testpath + '/*.png') + glob(testpath + '/*.bmp') + elif os.path.isfile(testpath) and (testpath[-3:] in ['jpg', 'png', 'bmp']): + self.imagepath_list = [testpath] + elif os.path.isfile(testpath) and (testpath[-3:] in ['mp4', 'csv', 'vid', 'ebm']): + self.imagepath_list = video2sequence_lossless(testpath) + else: + print(f'please check the test path: {testpath}') + exit() + print('total {} images'.format(len(self.imagepath_list))) + self.imagepath_list = sorted(self.imagepath_list) + self.crop_size = crop_size + self.scale = scale + self.iscrop = iscrop + self.resolution_inp = crop_size + if face_detector == 'fan': + self.face_detector = detectors.FAN() + # elif face_detector == 'mtcnn': + # self.face_detector = detectors.MTCNN() + else: + print(f'please check the detector: {face_detector}') + exit() + + def __len__(self): + return len(self.imagepath_list) + + def bbox2point(self, left, right, top, bottom, type='bbox'): + ''' bbox from detector and landmarks are different + ''' + if type=='kpt68': + old_size = (right - left + bottom - top)/2*1.1 + center = np.array([right - (right - left) / 2.0, bottom - (bottom - top) / 2.0 ]) + elif type=='bbox': + old_size = (right - left + bottom - top)/2 + center = np.array([right - (right - left) / 2.0, bottom - (bottom - top) / 2.0 + old_size*0.12]) + else: + raise NotImplementedError + return old_size, center + + def __getitem__(self, index): + imagepath = self.imagepath_list[index] + imagename = imagepath.split('/')[-1].split('.')[0] + + image = np.array(imread(imagepath)) + if len(image.shape) == 2: + image = image[:,:,None].repeat(1,1,3) + if len(image.shape) == 3 and image.shape[2] > 3: + image = image[:,:,:3] + + h, w, _ = image.shape + if self.iscrop: + # provide kpt as txt file, or mat file (for AFLW2000) + kpt_matpath = imagepath.replace('.jpg', '.mat').replace('.png', '.mat') + kpt_txtpath = imagepath.replace('.jpg', '.txt').replace('.png', '.txt') + if os.path.exists(kpt_matpath): + kpt = scipy.io.loadmat(kpt_matpath)['pt3d_68'].T + left = np.min(kpt[:,0]); right = np.max(kpt[:,0]); + top = np.min(kpt[:,1]); bottom = np.max(kpt[:,1]) + old_size, center = self.bbox2point(left, right, top, bottom, type='kpt68') + elif os.path.exists(kpt_txtpath): + kpt = np.loadtxt(kptpath) + left = np.min(kpt[:,0]); right = np.max(kpt[:,0]); + top = np.min(kpt[:,1]); bottom = np.max(kpt[:,1]) + old_size, center = self.bbox2point(left, right, top, bottom, type='kpt68') + else: + try: + bbox, bbox_type = self.face_detector.run(image) + + + except ValueError: + bbox = self.face_detector.run(image) + bbox_type = 'kpt68' + + if len(bbox) < 4: + print('no face detected! run original image') + left = 0; right = h-1; top=0; bottom=w-1 + else: + left = bbox[0]; right=bbox[2] + top = bbox[1]; bottom=bbox[3] + old_size, center = self.bbox2point(left, right, top, bottom, type=bbox_type) + + size = int(old_size*self.scale) + src_pts = np.array([[center[0]-size/2, center[1]-size/2], [center[0] - size/2, center[1]+size/2], [center[0]+size/2, center[1]-size/2]]) + else: + src_pts = np.array([[0, 0], [0, h-1], [w-1, 0]]) + + DST_PTS = np.array([[0,0], [0,self.resolution_inp - 1], [self.resolution_inp - 1, 0]]) + tform = estimate_transform('similarity', src_pts, DST_PTS) + + image = image/255. + dst_image = warp(image, tform.inverse, output_shape=(self.resolution_inp, self.resolution_inp)) + dst_image = dst_image.transpose(2,0,1) + return {'image': torch.tensor(dst_image).float(), + 'imagename': imagename, + 'tform': tform, + 'original_image': torch.tensor(image.transpose(2,0,1)).float(), + } \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/decalib/datasets/detectors.py b/motion-gan-pipeline/preprocessing/third/DECA/decalib/datasets/detectors.py new file mode 100755 index 0000000..09d6688 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/decalib/datasets/detectors.py @@ -0,0 +1,60 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import numpy as np +import torch + +class FAN(object): + def __init__(self): + import face_alignment + self.model = face_alignment.FaceAlignment(face_alignment.LandmarksType._2D, flip_input=False) + + def run(self, image): + ''' + image: 0-255, uint8, rgb, [h, w, 3] + return: detected box list + ''' + out = self.model.get_landmarks(image) + if out is None: + return [0] + else: + kpt = out[0].squeeze() + left = np.min(kpt[:,0]); right = np.max(kpt[:,0]); + top = np.min(kpt[:,1]); bottom = np.max(kpt[:,1]) + bbox = [left,top, right, bottom] + return bbox, 'kpt68' + +class MTCNN(object): + def __init__(self, device = 'cpu'): + ''' + https://github.com/timesler/facenet-pytorch/blob/master/examples/infer.ipynb + ''' + from facenet_pytorch import MTCNN as mtcnn + self.device = device + self.model = mtcnn(keep_all=True) + def run(self, input): + ''' + image: 0-255, uint8, rgb, [h, w, 3] + return: detected box + ''' + out = self.model.detect(input[None,...]) + if out[0][0] is None: + return [0] + else: + bbox = out[0][0].squeeze() + return bbox, 'bbox' + + + diff --git a/motion-gan-pipeline/preprocessing/third/DECA/decalib/deca.py b/motion-gan-pipeline/preprocessing/third/DECA/decalib/deca.py new file mode 100644 index 0000000..8eec61c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/decalib/deca.py @@ -0,0 +1,473 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import os, sys +import torch +import torchvision +from torchvision import transforms +from PIL import Image +import torch.nn.functional as F +import torch.nn as nn +import pdb +import matplotlib.pyplot as plt +from skimage.transform import warp +import numpy as np +from time import time +from skimage.io import imread + +import cv2 +import pickle +from .utils.renderer import SRenderY +from .models.encoders import ResnetEncoder +from .models.FLAME import FLAME, FLAMETex +from .models.decoders import Generator +from .utils import util +from .utils.rotation_converter import batch_euler2axis +from .datasets import datasets +from .utils.config import cfg +torch.backends.cudnn.benchmark = True + +class DECA(object): + def __init__(self, config=None, device='cuda'): + if config is None: + self.cfg = cfg + else: + self.cfg = config + self.device = device + self.image_size = self.cfg.dataset.image_size + self.uv_size = self.cfg.model.uv_size + + self._create_model(self.cfg.model) + self._setup_renderer(self.cfg.model) + + def _setup_renderer(self, model_cfg): + self.render = SRenderY(self.image_size, obj_filename=model_cfg.topology_path, uv_size=model_cfg.uv_size).to(self.device) + # face mask for rendering details + mask = imread(model_cfg.face_eye_mask_path).astype(np.float32)/255.; mask = torch.from_numpy(mask[:,:,0])[None,None,:,:].contiguous() + self.uv_face_eye_mask = F.interpolate(mask, [model_cfg.uv_size, model_cfg.uv_size]).to(self.device) + mask = imread(model_cfg.face_mask_path).astype(np.float32)/255.; mask = torch.from_numpy(mask[:,:,0])[None,None,:,:].contiguous() + self.uv_face_mask = F.interpolate(mask, [model_cfg.uv_size, model_cfg.uv_size]).to(self.device) + # displacement correction + fixed_dis = np.load(model_cfg.fixed_displacement_path) + self.fixed_uv_dis = torch.tensor(fixed_dis).float().to(self.device) + # mean texture + mean_texture = imread(model_cfg.mean_tex_path).astype(np.float32)/255.; mean_texture = torch.from_numpy(mean_texture.transpose(2,0,1))[None,:,:,:].contiguous() + self.mean_texture = F.interpolate(mean_texture, [model_cfg.uv_size, model_cfg.uv_size]).to(self.device) + # dense mesh template, for save detail mesh + self.dense_template = np.load(model_cfg.dense_template_path, allow_pickle=True, encoding='latin1').item() + + def _create_model(self, model_cfg): + # set up parameters + self.n_param = model_cfg.n_shape+model_cfg.n_tex+model_cfg.n_exp+model_cfg.n_pose+model_cfg.n_cam+model_cfg.n_light + self.n_detail = model_cfg.n_detail + self.n_cond = model_cfg.n_exp + 3 # exp + jaw pose + self.num_list = [model_cfg.n_shape, model_cfg.n_tex, model_cfg.n_exp, model_cfg.n_pose, model_cfg.n_cam, model_cfg.n_light] + self.param_dict = {i:model_cfg.get('n_' + i) for i in model_cfg.param_list} + + # encoders + self.E_flame = ResnetEncoder(outsize=self.n_param).to(self.device) + self.E_detail = ResnetEncoder(outsize=self.n_detail).to(self.device) + # decoders + self.flame = FLAME(model_cfg).to(self.device) + if model_cfg.use_tex: + self.flametex = FLAMETex(model_cfg).to(self.device) + self.D_detail = Generator(latent_dim=self.n_detail+self.n_cond, out_channels=1, out_scale=model_cfg.max_z, sample_mode = 'bilinear').to(self.device) + # resume model + model_path = self.cfg.pretrained_modelpath + if os.path.exists(model_path): + print(f'trained model found. load {model_path}') + checkpoint = torch.load(model_path) + self.checkpoint = checkpoint + util.copy_state_dict(self.E_flame.state_dict(), checkpoint['E_flame']) + util.copy_state_dict(self.E_detail.state_dict(), checkpoint['E_detail']) + util.copy_state_dict(self.D_detail.state_dict(), checkpoint['D_detail']) + else: + print(f'please check model path: {model_path}') + exit() + # eval mode + self.E_flame.eval() + self.E_detail.eval() + self.D_detail.eval() + + def decompose_code(self, code, num_dict): + ''' Convert a flattened parameter vector to a dictionary of parameters + code_dict.keys() = ['shape', 'tex', 'exp', 'pose', 'cam', 'light'] + ''' + code_dict = {} + start = 0 + for key in num_dict: + end = start+int(num_dict[key]) + code_dict[key] = code[:, start:end] + start = end + if key == 'light': + code_dict[key] = code_dict[key].reshape(code_dict[key].shape[0], 9, 3) + return code_dict + + def displacement2normal(self, uv_z, coarse_verts, coarse_normals): + ''' Convert displacement map into detail normal map + ''' + batch_size = uv_z.shape[0] + uv_coarse_vertices = self.render.world2uv(coarse_verts).detach() + uv_coarse_normals = self.render.world2uv(coarse_normals).detach() + + uv_z = uv_z*self.uv_face_eye_mask + uv_detail_vertices = uv_coarse_vertices + uv_z*uv_coarse_normals + self.fixed_uv_dis[None,None,:,:]*uv_coarse_normals.detach() + dense_vertices = uv_detail_vertices.permute(0,2,3,1).reshape([batch_size, -1, 3]) + uv_detail_normals = util.vertex_normals(dense_vertices, self.render.dense_faces.expand(batch_size, -1, -1)) + uv_detail_normals = uv_detail_normals.reshape([batch_size, uv_coarse_vertices.shape[2], uv_coarse_vertices.shape[3], 3]).permute(0,3,1,2) + return uv_detail_normals + + def displacement2vertex(self, uv_z, coarse_verts, coarse_normals): + ''' Convert displacement map into detail vertices + ''' + batch_size = uv_z.shape[0] + uv_coarse_vertices = self.render.world2uv(coarse_verts).detach() + uv_coarse_normals = self.render.world2uv(coarse_normals).detach() + + uv_z = uv_z*self.uv_face_eye_mask + uv_detail_vertices = uv_coarse_vertices + uv_z*uv_coarse_normals + self.fixed_uv_dis[None,None,:,:]*uv_coarse_normals.detach() + dense_vertices = uv_detail_vertices.permute(0,2,3,1).reshape([batch_size, -1, 3]) + # uv_detail_normals = util.vertex_normals(dense_vertices, self.render.dense_faces.expand(batch_size, -1, -1)) + # uv_detail_normals = uv_detail_normals.reshape([batch_size, uv_coarse_vertices.shape[2], uv_coarse_vertices.shape[3], 3]).permute(0,3,1,2) + detail_faces = self.render.dense_faces + return dense_vertices, detail_faces + + def visofp(self, normals): + ''' visibility of keypoints, based on the normal direction + ''' + normals68 = self.flame.seletec_3d68(normals) + vis68 = (normals68[:,:,2:] < 0.1).float() + return vis68 + + @torch.no_grad() + def encode(self, images): + batch_size = images.shape[0] + parameters = self.E_flame(images) + detailcode = self.E_detail(images) + codedict = self.decompose_code(parameters, self.param_dict) + codedict['detail'] = detailcode + codedict['images'] = images + return codedict + + @torch.no_grad() + def decode(self, codedict, tform=None): + images = codedict['images'] + batch_size = images.shape[0] + + # pose = codedict['pose'] + # print(f'Pose: {pose}') + # camera = codedict['cam'] + # print(f'Camera: {camera}') + + ## decode + verts, landmarks2d, landmarks3d = self.flame(shape_params=codedict['shape'], expression_params=codedict['exp'], pose_params=codedict['pose']) + uv_z = self.D_detail(torch.cat([codedict['pose'][:,3:], codedict['exp'], codedict['detail']], dim=1)) + if self.cfg.model.use_tex: + albedo = self.flametex(codedict['tex']) + else: + albedo = torch.zeros([batch_size, 3, self.uv_size, self.uv_size], device=images.device) + + # pdb.set_trace() + + ## projection + landmarks2d = util.batch_orth_proj(landmarks2d, codedict['cam'])[:,:,:2]; landmarks2d[:,:,1:] = -landmarks2d[:,:,1:]; landmarks2d = landmarks2d*self.image_size/2 + self.image_size/2 + landmarks3d = util.batch_orth_proj(landmarks3d, codedict['cam']); landmarks3d[:,:,1:] = -landmarks3d[:,:,1:]; landmarks3d = landmarks3d*self.image_size/2 + self.image_size/2 + trans_verts = util.batch_orth_proj(verts, codedict['cam']); trans_verts[:,:,1:] = -trans_verts[:,:,1:] + + if tform is not None: + tform_tensor = torch.tensor(tform.params, dtype=torch.float32).cuda() + dst_image = warp(trans_verts[0,:,1:].cpu().numpy(), tform) + trans_verts = torch.cat((trans_verts[0,:,:1], torch.tensor(dst_image, dtype=torch.float32).cuda()), dim = 1) + trans_verts = torch.unsqueeze(trans_verts, dim=0) + + ## rendering + ops = self.render(verts, trans_verts, albedo, codedict['light']) + uv_detail_normals = self.displacement2normal(uv_z, verts, ops['normals']) + uv_shading = self.render.add_SHlight(uv_detail_normals, codedict['light']) + uv_texture = albedo*uv_shading + + landmarks3d_vis = self.visofp(ops['transformed_normals']) + landmarks3d = torch.cat([landmarks3d, landmarks3d_vis], dim=2) + + ## render shape + shape_images = self.render.render_shape(verts, trans_verts) + + # new_shape = shape_images[0].permute(1, 2, 0).cpu().numpy() + # plt.imshow(new_shape) + # plt.show() + + detail_normal_images = F.grid_sample(uv_detail_normals, ops['grid'], align_corners=False)*ops['alpha_images'] + shape_detail_images = self.render.render_shape(verts, trans_verts, detail_normal_images=detail_normal_images) + + ## extract texture + ## TODO: current resolution 256x256, support higher resolution, and add visibility + uv_pverts = self.render.world2uv(trans_verts) + uv_gt = F.grid_sample(images, uv_pverts.permute(0,2,3,1)[:,:,:,:2], mode='bilinear') + if self.cfg.model.use_tex: + ## TODO: poisson blending should give better-looking results + uv_texture_gt = uv_gt[:,:3,:,:]*self.uv_face_eye_mask + (uv_texture[:,:3,:,:]*(1-self.uv_face_eye_mask)*0.7) + else: + uv_texture_gt = uv_gt[:,:3,:,:]*self.uv_face_eye_mask + (torch.ones_like(uv_gt[:,:3,:,:])*(1-self.uv_face_eye_mask)*0.7) + + ## output + opdict = { + 'vertices': verts, + 'normals': ops['normals'], + 'grid': ops['grid'], + 'transformed_vertices': trans_verts, + 'landmarks2d': landmarks2d, + 'landmarks3d': landmarks3d, + 'uv_detail_normals': uv_detail_normals, + 'uv_texture_gt': uv_texture_gt, + 'displacement_map': uv_z+self.fixed_uv_dis[None,None,:,:], + 'detail_normal_images': detail_normal_images, + } + + if self.cfg.model.use_tex: + opdict['albedo'] = albedo + opdict['uv_texture'] = uv_texture + + visdict = { + 'inputs': images, + 'landmarks2d': util.tensor_vis_landmarks(images, landmarks2d, isScale=False), + 'landmarks3d': util.tensor_vis_landmarks(images, landmarks3d, isScale=False), + 'shape_images': shape_images, + 'shape_detail_images': shape_detail_images, + } + + if self.cfg.model.use_tex: + visdict['rendered_images'] = ops['images'] + + return opdict, visdict + + @torch.no_grad() + def decode_eyes(self, codedict, tform=None): + + images = codedict['images'] + batch_size = images.shape[0] + + pose = codedict['pose'] + print(f'Pose: {pose}') + camera = codedict['cam'] + print(f'Camera: {camera}') + + eye_pose_params = torch.tensor([[0, 0, 0, 0, 0, 0]], device='cuda') + eye_pose_params = -20 * pose + print(f'eye_pose_params: {eye_pose_params}') + + ## decode + verts, landmarks2d, landmarks3d = self.flame(shape_params=codedict['shape'], expression_params=codedict['exp'], + pose_params=codedict['pose'], eye_pose_params=eye_pose_params) + + uv_z = self.D_detail(torch.cat([codedict['pose'][:, 3:], codedict['exp'], codedict['detail']], dim=1)) + if self.cfg.model.use_tex: + albedo = self.flametex(codedict['tex']) + else: + albedo = torch.zeros([batch_size, 3, self.uv_size, self.uv_size], device=images.device) + + # pdb.set_trace() + + ## projection + landmarks2d = util.batch_orth_proj(landmarks2d, codedict['cam'])[:, :, :2]; + landmarks2d[:, :, 1:] = -landmarks2d[:, :, 1:]; + landmarks2d = landmarks2d * self.image_size / 2 + self.image_size / 2 + landmarks3d = util.batch_orth_proj(landmarks3d, codedict['cam']); + landmarks3d[:, :, 1:] = -landmarks3d[:, :, 1:]; + landmarks3d = landmarks3d * self.image_size / 2 + self.image_size / 2 + trans_verts = util.batch_orth_proj(verts, codedict['cam']); + trans_verts[:, :, 1:] = -trans_verts[:, :, 1:] + + if tform is not None: + tform_tensor = torch.tensor(tform.params, dtype=torch.float32).cuda() + dst_image = warp(trans_verts[0, :, 1:].cpu().numpy(), tform) + trans_verts = torch.cat((trans_verts[0, :, :1], torch.tensor(dst_image, dtype=torch.float32).cuda()), dim=1) + trans_verts = torch.unsqueeze(trans_verts, dim=0) + + ## rendering + ops = self.render(verts, trans_verts, albedo, codedict['light']) + uv_detail_normals = self.displacement2normal(uv_z, verts, ops['normals']) + uv_shading = self.render.add_SHlight(uv_detail_normals, codedict['light']) + uv_texture = albedo * uv_shading + + landmarks3d_vis = self.visofp(ops['transformed_normals']) + landmarks3d = torch.cat([landmarks3d, landmarks3d_vis], dim=2) + + ## render shape + shape_images = self.render.render_shape(verts, trans_verts) + + # new_shape = shape_images[0].permute(1, 2, 0).cpu().numpy() + # plt.imshow(new_shape) + # plt.show() + + detail_normal_images = F.grid_sample(uv_detail_normals, ops['grid'], align_corners=False) * ops['alpha_images'] + shape_detail_images = self.render.render_shape(verts, trans_verts, detail_normal_images=detail_normal_images) + + ## extract texture + ## TODO: current resolution 256x256, support higher resolution, and add visibility + uv_pverts = self.render.world2uv(trans_verts) + uv_gt = F.grid_sample(images, uv_pverts.permute(0, 2, 3, 1)[:, :, :, :2], mode='bilinear') + if self.cfg.model.use_tex: + ## TODO: poisson blending should give better-looking results + uv_texture_gt = uv_gt[:, :3, :, :] * self.uv_face_eye_mask + ( + uv_texture[:, :3, :, :] * (1 - self.uv_face_eye_mask) * 0.7) + else: + uv_texture_gt = uv_gt[:, :3, :, :] * self.uv_face_eye_mask + ( + torch.ones_like(uv_gt[:, :3, :, :]) * (1 - self.uv_face_eye_mask) * 0.7) + + ## output + opdict = { + 'vertices': verts, + 'normals': ops['normals'], + 'grid': ops['grid'], + 'transformed_vertices': trans_verts, + 'landmarks2d': landmarks2d, + 'landmarks3d': landmarks3d, + 'uv_detail_normals': uv_detail_normals, + 'uv_texture_gt': uv_texture_gt, + 'displacement_map': uv_z + self.fixed_uv_dis[None, None, :, :], + 'detail_normal_images': detail_normal_images, + + } + + if self.cfg.model.use_tex: + opdict['albedo'] = albedo + opdict['uv_texture'] = uv_texture + + visdict = { + 'inputs': images, + 'landmarks2d': util.tensor_vis_landmarks(images, landmarks2d, isScale=False), + 'landmarks3d': util.tensor_vis_landmarks(images, landmarks3d, isScale=False), + 'shape_images': shape_images, + 'shape_detail_images': shape_detail_images, + } + + if self.cfg.model.use_tex: + visdict['rendered_images'] = ops['images'] + + return opdict, visdict + + def render_uv(self, codedict, exp, pose): + + images = codedict['images'] + batch_size = images.shape[0] + + ## decode + verts, landmarks2d, landmarks3d = self.flame(shape_params=codedict['shape'], expression_params=exp, + pose_params=pose) + + if self.cfg.model.use_tex: + albedo = self.flametex(codedict['tex']) + else: + albedo = torch.zeros([batch_size, 3, self.uv_size, self.uv_size], device=images.device) + + ## projection + trans_verts = util.batch_orth_proj(verts, codedict['cam']) + trans_verts[:, :, 1:] = -trans_verts[:, :, 1:] + + ## rendering + ops = self.render(verts, trans_verts, albedo, codedict['light']) + + return ops['grid'] + + def render_uv_details(self, codedict, exp, pose): + + images = codedict['images'] + batch_size = images.shape[0] + + ## decode + verts, landmarks2d, landmarks3d = self.flame(shape_params=codedict['shape'], expression_params=exp, + pose_params=pose) + uv_z = self.D_detail(torch.cat([pose[:, 3:], exp, codedict['detail']], dim=1)) + + if self.cfg.model.use_tex: + albedo = self.flametex(codedict['tex']) + else: + albedo = torch.zeros([batch_size, 3, self.uv_size, self.uv_size], device=images.device) + + ## projection + trans_verts = util.batch_orth_proj(verts, codedict['cam']); + trans_verts[:, :, 1:] = -trans_verts[:, :, 1:] + + ## rendering + ops = self.render(verts, trans_verts, albedo, codedict['light']) + uv_detail_normals = self.displacement2normal(uv_z, verts, ops['normals']) + + detail_normal_images = F.grid_sample(uv_detail_normals, ops['grid'], align_corners=False) * ops['alpha_images'] + + return ops['grid'], detail_normal_images + + def render_mask(self, grid): + + # image = Image.open('./third/DECA/data/mask_face.png') + # image = Image.open('./third/DECA/data/mask_3.png') + current_path = os.getcwd() + try : + head_tail = os.path.split(current_path) + image = Image.open(head_tail[0] + '/third/DECA/data/mask_mouth_2.png') + + except FileNotFoundError: + image = Image.open(current_path + '/third/DECA/data/mask_mouth_2.png') + + trans = transforms.ToTensor() + tmp = trans(image) + image_tensor = trans(image).cuda().unsqueeze(0) + mask_1 = (grid[:, :, :, 0:1] != 0.0) & (grid[:, :, :, 1:2] != 0.0) + mask_1 = mask_1.permute(0, 3, 1, 2) + + mask = F.grid_sample(image_tensor, grid, align_corners=False, padding_mode='zeros') + mask = mask_1 * mask + + return mask[0,0,:,:] + + def visualize(self, visdict, size=None): + grids = {} + if size is None: + size = self.image_size + for key in visdict: + grids[key] = torchvision.utils.make_grid(F.interpolate(visdict[key], [size, size])).detach().cpu() + grid = torch.cat(list(grids.values()), 2) + grid_image = (grid.numpy().transpose(1,2,0).copy()*255)[:,:,[2,1,0]] + grid_image = np.minimum(np.maximum(grid_image, 0), 255).astype(np.uint8) + return grid_image + + def save_obj(self, filename, opdict): + ''' + vertices: [nv, 3], tensor + texture: [3, h, w], tensor + ''' + i = 0 + vertices = opdict['vertices'][i].cpu().numpy() + faces = self.render.faces[0].cpu().numpy() + texture = util.tensor2image(opdict['uv_texture_gt'][i]) + uvcoords = self.render.raw_uvcoords[0].cpu().numpy() + uvfaces = self.render.uvfaces[0].cpu().numpy() + # save coarse mesh, with texture and normal map + normal_map = util.tensor2image(opdict['uv_detail_normals'][i]*0.5 + 0.5) + util.write_obj(filename, vertices, faces, + texture=texture, + uvcoords=uvcoords, + uvfaces=uvfaces, + normal_map=normal_map) + # upsample mesh, save detailed mesh + texture = texture[:,:,[2,1,0]] + normals = opdict['normals'][i].cpu().numpy() + displacement_map = opdict['displacement_map'][i].cpu().numpy().squeeze() + dense_vertices, dense_colors, dense_faces = util.upsample_mesh(vertices, normals, faces, displacement_map, + texture, self.dense_template) + util.write_obj(filename.replace('.obj', '_detail.obj'), + dense_vertices, + dense_faces, + colors=dense_colors, + inverse_face_order=True) \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/FLAME.py b/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/FLAME.py new file mode 100755 index 0000000..7e2821a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/FLAME.py @@ -0,0 +1,262 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import torch +import torch.nn as nn +import numpy as np +import pickle +import torch.nn.functional as F + +from .lbs import lbs, batch_rodrigues, vertices2landmarks, rot_mat_to_euler + +def to_tensor(array, dtype=torch.float32): + if 'torch.tensor' not in str(type(array)): + return torch.tensor(array, dtype=dtype) +def to_np(array, dtype=np.float32): + if 'scipy.sparse' in str(type(array)): + array = array.todense() + return np.array(array, dtype=dtype) + +class Struct(object): + def __init__(self, **kwargs): + for key, val in kwargs.items(): + setattr(self, key, val) + +class FLAME(nn.Module): + """ + borrowed from https://github.com/soubhiksanyal/FLAME_PyTorch/blob/master/FLAME.py + Given flame parameters this class generates a differentiable FLAME function + which outputs the a mesh and 2D/3D facial landmarks + """ + def __init__(self, config): + super(FLAME, self).__init__() + print("creating the FLAME Decoder") + with open(config.flame_model_path, 'rb') as f: + ss = pickle.load(f, encoding='latin1') + flame_model = Struct(**ss) + + self.dtype = torch.float32 + self.register_buffer('faces_tensor', to_tensor(to_np(flame_model.f, dtype=np.int64), dtype=torch.long)) + # The vertices of the template model + self.register_buffer('v_template', to_tensor(to_np(flame_model.v_template), dtype=self.dtype)) + # The shape components and expression + shapedirs = to_tensor(to_np(flame_model.shapedirs), dtype=self.dtype) + shapedirs = torch.cat([shapedirs[:,:,:config.n_shape], shapedirs[:,:,300:300+config.n_exp]], 2) + self.register_buffer('shapedirs', shapedirs) + # The pose components + num_pose_basis = flame_model.posedirs.shape[-1] + posedirs = np.reshape(flame_model.posedirs, [-1, num_pose_basis]).T + self.register_buffer('posedirs', to_tensor(to_np(posedirs), dtype=self.dtype)) + # + self.register_buffer('J_regressor', to_tensor(to_np(flame_model.J_regressor), dtype=self.dtype)) + parents = to_tensor(to_np(flame_model.kintree_table[0])).long(); parents[0] = -1 + self.register_buffer('parents', parents) + self.register_buffer('lbs_weights', to_tensor(to_np(flame_model.weights), dtype=self.dtype)) + + # Fixing Eyeball and neck rotation + default_eyball_pose = torch.zeros([1, 6], dtype=self.dtype, requires_grad=False) + self.register_parameter('eye_pose', nn.Parameter(default_eyball_pose, + requires_grad=False)) + default_neck_pose = torch.zeros([1, 3], dtype=self.dtype, requires_grad=False) + self.register_parameter('neck_pose', nn.Parameter(default_neck_pose, + requires_grad=False)) + + # Static and Dynamic Landmark embeddings for FLAME + lmk_embeddings = np.load(config.flame_lmk_embedding_path, allow_pickle=True, encoding='latin1') + lmk_embeddings = lmk_embeddings[()] + self.register_buffer('lmk_faces_idx', torch.from_numpy(lmk_embeddings['static_lmk_faces_idx']).long()) + self.register_buffer('lmk_bary_coords', torch.from_numpy(lmk_embeddings['static_lmk_bary_coords']).to(self.dtype)) + self.register_buffer('dynamic_lmk_faces_idx', lmk_embeddings['dynamic_lmk_faces_idx'].long()) + self.register_buffer('dynamic_lmk_bary_coords', lmk_embeddings['dynamic_lmk_bary_coords'].to(self.dtype)) + self.register_buffer('full_lmk_faces_idx', torch.from_numpy(lmk_embeddings['full_lmk_faces_idx']).long()) + self.register_buffer('full_lmk_bary_coords', torch.from_numpy(lmk_embeddings['full_lmk_bary_coords']).to(self.dtype)) + + neck_kin_chain = []; NECK_IDX=1 + curr_idx = torch.tensor(NECK_IDX, dtype=torch.long) + while curr_idx != -1: + neck_kin_chain.append(curr_idx) + curr_idx = self.parents[curr_idx] + self.register_buffer('neck_kin_chain', torch.stack(neck_kin_chain)) + + def _find_dynamic_lmk_idx_and_bcoords(self, pose, dynamic_lmk_faces_idx, + dynamic_lmk_b_coords, + neck_kin_chain, dtype=torch.float32): + """ + Selects the face contour depending on the reletive position of the head + Input: + vertices: N X num_of_vertices X 3 + pose: N X full pose + dynamic_lmk_faces_idx: The list of contour face indexes + dynamic_lmk_b_coords: The list of contour barycentric weights + neck_kin_chain: The tree to consider for the relative rotation + dtype: Data type + return: + The contour face indexes and the corresponding barycentric weights + """ + + batch_size = pose.shape[0] + + aa_pose = torch.index_select(pose.view(batch_size, -1, 3), 1, + neck_kin_chain) + rot_mats = batch_rodrigues( + aa_pose.view(-1, 3), dtype=dtype).view(batch_size, -1, 3, 3) + + rel_rot_mat = torch.eye(3, device=pose.device, + dtype=dtype).unsqueeze_(dim=0).expand(batch_size, -1, -1) + for idx in range(len(neck_kin_chain)): + rel_rot_mat = torch.bmm(rot_mats[:, idx], rel_rot_mat) + + y_rot_angle = torch.round( + torch.clamp(rot_mat_to_euler(rel_rot_mat) * 180.0 / np.pi, + max=39)).to(dtype=torch.long) + + neg_mask = y_rot_angle.lt(0).to(dtype=torch.long) + mask = y_rot_angle.lt(-39).to(dtype=torch.long) + neg_vals = mask * 78 + (1 - mask) * (39 - y_rot_angle) + y_rot_angle = (neg_mask * neg_vals + + (1 - neg_mask) * y_rot_angle) + + dyn_lmk_faces_idx = torch.index_select(dynamic_lmk_faces_idx, + 0, y_rot_angle) + dyn_lmk_b_coords = torch.index_select(dynamic_lmk_b_coords, + 0, y_rot_angle) + return dyn_lmk_faces_idx, dyn_lmk_b_coords + + def _vertices2landmarks(self, vertices, faces, lmk_faces_idx, lmk_bary_coords): + """ + Calculates landmarks by barycentric interpolation + Input: + vertices: torch.tensor NxVx3, dtype = torch.float32 + The tensor of input vertices + faces: torch.tensor (N*F)x3, dtype = torch.long + The faces of the mesh + lmk_faces_idx: torch.tensor N X L, dtype = torch.long + The tensor with the indices of the faces used to calculate the + landmarks. + lmk_bary_coords: torch.tensor N X L X 3, dtype = torch.float32 + The tensor of barycentric coordinates that are used to interpolate + the landmarks + + Returns: + landmarks: torch.tensor NxLx3, dtype = torch.float32 + The coordinates of the landmarks for each mesh in the batch + """ + # Extract the indices of the vertices for each face + # NxLx3 + batch_size, num_verts = vertices.shape[:dd2] + lmk_faces = torch.index_select(faces, 0, lmk_faces_idx.view(-1)).view( + 1, -1, 3).view(batch_size, lmk_faces_idx.shape[1], -1) + + lmk_faces += torch.arange(batch_size, dtype=torch.long).view(-1, 1, 1).to( + device=vertices.device) * num_verts + + lmk_vertices = vertices.view(-1, 3)[lmk_faces] + landmarks = torch.einsum('blfi,blf->bli', [lmk_vertices, lmk_bary_coords]) + return landmarks + + def seletec_3d68(self, vertices): + landmarks3d = vertices2landmarks(vertices, self.faces_tensor, + self.full_lmk_faces_idx.repeat(vertices.shape[0], 1), + self.full_lmk_bary_coords.repeat(vertices.shape[0], 1, 1)) + return landmarks3d + + def forward(self, shape_params=None, expression_params=None, pose_params=None, eye_pose_params=None): + """ + Input: + shape_params: N X number of shape parameters + expression_params: N X number of expression parameters + pose_params: N X number of pose parameters (6) + return:d + vertices: N X V X 3 + landmarks: N X number of landmarks X 3 + """ + batch_size = shape_params.shape[0] + if eye_pose_params is None: + eye_pose_params = self.eye_pose.expand(batch_size, -1) + betas = torch.cat([shape_params, expression_params], dim=1) + full_pose = torch.cat([pose_params[:, :3], self.neck_pose.expand(batch_size, -1), pose_params[:, 3:], eye_pose_params], dim=1) + template_vertices = self.v_template.unsqueeze(0).expand(batch_size, -1, -1) + + vertices, _ = lbs(betas, full_pose, template_vertices, + self.shapedirs, self.posedirs, + self.J_regressor, self.parents, + self.lbs_weights, dtype=self.dtype) + + lmk_faces_idx = self.lmk_faces_idx.unsqueeze(dim=0).expand(batch_size, -1) + lmk_bary_coords = self.lmk_bary_coords.unsqueeze(dim=0).expand(batch_size, -1, -1) + + dyn_lmk_faces_idx, dyn_lmk_bary_coords = self._find_dynamic_lmk_idx_and_bcoords( + full_pose, self.dynamic_lmk_faces_idx, + self.dynamic_lmk_bary_coords, + self.neck_kin_chain, dtype=self.dtype) + lmk_faces_idx = torch.cat([dyn_lmk_faces_idx, lmk_faces_idx], 1) + lmk_bary_coords = torch.cat([dyn_lmk_bary_coords, lmk_bary_coords], 1) + + landmarks2d = vertices2landmarks(vertices, self.faces_tensor, + lmk_faces_idx, + lmk_bary_coords) + bz = vertices.shape[0] + landmarks3d = vertices2landmarks(vertices, self.faces_tensor, + self.full_lmk_faces_idx.repeat(bz, 1), + self.full_lmk_bary_coords.repeat(bz, 1, 1)) + return vertices, landmarks2d, landmarks3d + +class FLAMETex(nn.Module): + """ + FLAME texture: + https://github.com/TimoBolkart/TF_FLAME/blob/ade0ab152300ec5f0e8555d6765411555c5ed43d/sample_texture.py#L64 + FLAME texture converted from BFM: + https://github.com/TimoBolkart/BFM_to_FLAME + """ + def __init__(self, config): + super(FLAMETex, self).__init__() + if config.tex_type == 'BFM': + mu_key = 'MU' + pc_key = 'PC' + n_pc = 199 + tex_path = config.tex_path + tex_space = np.load(tex_path) + texture_mean = tex_space[mu_key].reshape(1, -1) + texture_basis = tex_space[pc_key].reshape(-1, n_pc) + + elif config.tex_type == 'FLAME': + mu_key = 'mean' + pc_key = 'tex_dir' + n_pc = 200 + tex_path = config.flame_tex_path + tex_space = np.load(tex_path) + texture_mean = tex_space[mu_key].reshape(1, -1)/255. + texture_basis = tex_space[pc_key].reshape(-1, n_pc)/255. + else: + print('texture type ', config.tex_type, 'not exist!') + raise NotImplementedError + + n_tex = config.n_tex + num_components = texture_basis.shape[1] + texture_mean = torch.from_numpy(texture_mean).float()[None,...] + texture_basis = torch.from_numpy(texture_basis[:,:n_tex]).float()[None,...] + self.register_buffer('texture_mean', texture_mean) + self.register_buffer('texture_basis', texture_basis) + + def forward(self, texcode): + ''' + texcode: [batchsize, n_tex] + texture: [bz, 3, 256, 256], range: 0-1 + ''' + texture = self.texture_mean + (self.texture_basis*texcode[:,None,:]).sum(-1) + texture = texture.reshape(texcode.shape[0], 512, 512, 3).permute(0,3,1,2) + texture = F.interpolate(texture, [256, 256]) + texture = texture[:,[2,1,0], :,:] + return texture \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/decoders.py b/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/decoders.py new file mode 100755 index 0000000..b9af7e2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/decoders.py @@ -0,0 +1,56 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import torch +import torch.nn as nn + +class Generator(nn.Module): + def __init__(self, latent_dim=100, out_channels=1, out_scale=0.01, sample_mode = 'bilinear'): + super(Generator, self).__init__() + self.out_scale = out_scale + + self.init_size = 32 // 4 # Initial size before upsampling + self.l1 = nn.Sequential(nn.Linear(latent_dim, 128 * self.init_size ** 2)) + self.conv_blocks = nn.Sequential( + nn.BatchNorm2d(128), + nn.Upsample(scale_factor=2, mode=sample_mode), #16 + nn.Conv2d(128, 128, 3, stride=1, padding=1), + nn.BatchNorm2d(128, 0.8), + nn.LeakyReLU(0.2, inplace=True), + nn.Upsample(scale_factor=2, mode=sample_mode), #32 + nn.Conv2d(128, 64, 3, stride=1, padding=1), + nn.BatchNorm2d(64, 0.8), + nn.LeakyReLU(0.2, inplace=True), + nn.Upsample(scale_factor=2, mode=sample_mode), #64 + nn.Conv2d(64, 64, 3, stride=1, padding=1), + nn.BatchNorm2d(64, 0.8), + nn.LeakyReLU(0.2, inplace=True), + nn.Upsample(scale_factor=2, mode=sample_mode), #128 + nn.Conv2d(64, 32, 3, stride=1, padding=1), + nn.BatchNorm2d(32, 0.8), + nn.LeakyReLU(0.2, inplace=True), + nn.Upsample(scale_factor=2, mode=sample_mode), #256 + nn.Conv2d(32, 16, 3, stride=1, padding=1), + nn.BatchNorm2d(16, 0.8), + nn.LeakyReLU(0.2, inplace=True), + nn.Conv2d(16, out_channels, 3, stride=1, padding=1), + nn.Tanh(), + ) + + def forward(self, noise): + out = self.l1(noise) + out = out.view(out.shape[0], 128, self.init_size, self.init_size) + img = self.conv_blocks(out) + return img*self.out_scale \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/encoders.py b/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/encoders.py new file mode 100755 index 0000000..48937ad --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/encoders.py @@ -0,0 +1,40 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import numpy as np +import torch.nn as nn +import torch +import torch.nn.functional as F +from . import resnet + +class ResnetEncoder(nn.Module): + def __init__(self, outsize, last_op=None): + super(ResnetEncoder, self).__init__() + feature_size = 2048 + self.encoder = resnet.load_ResNet50Model() #out: 2048 + ### regressor + self.layers = nn.Sequential( + nn.Linear(feature_size, 1024), + nn.ReLU(), + nn.Linear(1024, outsize) + ) + self.last_op = last_op + + def forward(self, inputs): + features = self.encoder(inputs) + parameters = self.layers(features) + if self.last_op: + parameters = self.last_op(parameters) + return parameters diff --git a/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/lbs.py b/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/lbs.py new file mode 100755 index 0000000..df55ab3 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/lbs.py @@ -0,0 +1,378 @@ +# -*- coding: utf-8 -*- + +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# You can only use this computer program if you have closed +# a license agreement with MPG or you get the right to use the computer +# program from someone who is authorized to grant you that right. +# Any use of the computer program without a valid license is prohibited and +# liable to prosecution. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# Contact: ps-license@tuebingen.mpg.de + +from __future__ import absolute_import +from __future__ import print_function +from __future__ import division + +import numpy as np + +import torch +import torch.nn.functional as F + +def rot_mat_to_euler(rot_mats): + # Calculates rotation matrix to euler angles + # Careful for extreme cases of eular angles like [0.0, pi, 0.0] + + sy = torch.sqrt(rot_mats[:, 0, 0] * rot_mats[:, 0, 0] + + rot_mats[:, 1, 0] * rot_mats[:, 1, 0]) + return torch.atan2(-rot_mats[:, 2, 0], sy) + +def find_dynamic_lmk_idx_and_bcoords(vertices, pose, dynamic_lmk_faces_idx, + dynamic_lmk_b_coords, + neck_kin_chain, dtype=torch.float32): + ''' Compute the faces, barycentric coordinates for the dynamic landmarks + + + To do so, we first compute the rotation of the neck around the y-axis + and then use a pre-computed look-up table to find the faces and the + barycentric coordinates that will be used. + + Special thanks to Soubhik Sanyal (soubhik.sanyal@tuebingen.mpg.de) + for providing the original TensorFlow implementation and for the LUT. + + Parameters + ---------- + vertices: torch.tensor BxVx3, dtype = torch.float32 + The tensor of input vertices + pose: torch.tensor Bx(Jx3), dtype = torch.float32 + The current pose of the body model + dynamic_lmk_faces_idx: torch.tensor L, dtype = torch.long + The look-up table from neck rotation to faces + dynamic_lmk_b_coords: torch.tensor Lx3, dtype = torch.float32 + The look-up table from neck rotation to barycentric coordinates + neck_kin_chain: list + A python list that contains the indices of the joints that form the + kinematic chain of the neck. + dtype: torch.dtype, optional + + Returns + ------- + dyn_lmk_faces_idx: torch.tensor, dtype = torch.long + A tensor of size BxL that contains the indices of the faces that + will be used to compute the current dynamic landmarks. + dyn_lmk_b_coords: torch.tensor, dtype = torch.float32 + A tensor of size BxL that contains the indices of the faces that + will be used to compute the current dynamic landmarks. + ''' + + batch_size = vertices.shape[0] + + aa_pose = torch.index_select(pose.view(batch_size, -1, 3), 1, + neck_kin_chain) + rot_mats = batch_rodrigues( + aa_pose.view(-1, 3), dtype=dtype).view(batch_size, -1, 3, 3) + + rel_rot_mat = torch.eye(3, device=vertices.device, + dtype=dtype).unsqueeze_(dim=0) + for idx in range(len(neck_kin_chain)): + rel_rot_mat = torch.bmm(rot_mats[:, idx], rel_rot_mat) + + y_rot_angle = torch.round( + torch.clamp(-rot_mat_to_euler(rel_rot_mat) * 180.0 / np.pi, + max=39)).to(dtype=torch.long) + neg_mask = y_rot_angle.lt(0).to(dtype=torch.long) + mask = y_rot_angle.lt(-39).to(dtype=torch.long) + neg_vals = mask * 78 + (1 - mask) * (39 - y_rot_angle) + y_rot_angle = (neg_mask * neg_vals + + (1 - neg_mask) * y_rot_angle) + + dyn_lmk_faces_idx = torch.index_select(dynamic_lmk_faces_idx, + 0, y_rot_angle) + dyn_lmk_b_coords = torch.index_select(dynamic_lmk_b_coords, + 0, y_rot_angle) + + return dyn_lmk_faces_idx, dyn_lmk_b_coords + + +def vertices2landmarks(vertices, faces, lmk_faces_idx, lmk_bary_coords): + ''' Calculates landmarks by barycentric interpolation + + Parameters + ---------- + vertices: torch.tensor BxVx3, dtype = torch.float32 + The tensor of input vertices + faces: torch.tensor Fx3, dtype = torch.long + The faces of the mesh + lmk_faces_idx: torch.tensor L, dtype = torch.long + The tensor with the indices of the faces used to calculate the + landmarks. + lmk_bary_coords: torch.tensor Lx3, dtype = torch.float32 + The tensor of barycentric coordinates that are used to interpolate + the landmarks + + Returns + ------- + landmarks: torch.tensor BxLx3, dtype = torch.float32 + The coordinates of the landmarks for each mesh in the batch + ''' + # Extract the indices of the vertices for each face + # BxLx3 + batch_size, num_verts = vertices.shape[:2] + device = vertices.device + + lmk_faces = torch.index_select(faces, 0, lmk_faces_idx.view(-1)).view( + batch_size, -1, 3) + + lmk_faces += torch.arange( + batch_size, dtype=torch.long, device=device).view(-1, 1, 1) * num_verts + + lmk_vertices = vertices.view(-1, 3)[lmk_faces].view( + batch_size, -1, 3, 3) + + landmarks = torch.einsum('blfi,blf->bli', [lmk_vertices, lmk_bary_coords]) + return landmarks + + +def lbs(betas, pose, v_template, shapedirs, posedirs, J_regressor, parents, + lbs_weights, pose2rot=True, dtype=torch.float32): + ''' Performs Linear Blend Skinning with the given shape and pose parameters + + Parameters + ---------- + betas : torch.tensor BxNB + The tensor of shape parameters + pose : torch.tensor Bx(J + 1) * 3 + The pose parameters in axis-angle format + v_template torch.tensor BxVx3 + The template mesh that will be deformed + shapedirs : torch.tensor 1xNB + The tensor of PCA shape displacements + posedirs : torch.tensor Px(V * 3) + The pose PCA coefficients + J_regressor : torch.tensor JxV + The regressor array that is used to calculate the joints from + the position of the vertices + parents: torch.tensor J + The array that describes the kinematic tree for the model + lbs_weights: torch.tensor N x V x (J + 1) + The linear blend skinning weights that represent how much the + rotation matrix of each part affects each vertex + pose2rot: bool, optional + Flag on whether to convert the input pose tensor to rotation + matrices. The default value is True. If False, then the pose tensor + should already contain rotation matrices and have a size of + Bx(J + 1)x9 + dtype: torch.dtype, optional + + Returns + ------- + verts: torch.tensor BxVx3 + The vertices of the mesh after applying the shape and pose + displacements. + joints: torch.tensor BxJx3 + The joints of the model + ''' + + batch_size = max(betas.shape[0], pose.shape[0]) + device = betas.device + + # Add shape contribution + v_shaped = v_template + blend_shapes(betas, shapedirs) + + # Get the joints + # NxJx3 array + J = vertices2joints(J_regressor, v_shaped) + + # 3. Add pose blend shapes + # N x J x 3 x 3 + ident = torch.eye(3, dtype=dtype, device=device) + if pose2rot: + rot_mats = batch_rodrigues( + pose.view(-1, 3), dtype=dtype).view([batch_size, -1, 3, 3]) + + pose_feature = (rot_mats[:, 1:, :, :] - ident).view([batch_size, -1]) + # (N x P) x (P, V * 3) -> N x V x 3 + pose_offsets = torch.matmul(pose_feature, posedirs) \ + .view(batch_size, -1, 3) + else: + pose_feature = pose[:, 1:].view(batch_size, -1, 3, 3) - ident + rot_mats = pose.view(batch_size, -1, 3, 3) + + pose_offsets = torch.matmul(pose_feature.view(batch_size, -1), + posedirs).view(batch_size, -1, 3) + + v_posed = pose_offsets + v_shaped + # 4. Get the global joint location + J_transformed, A = batch_rigid_transform(rot_mats, J, parents, dtype=dtype) + + # 5. Do skinning: + # W is N x V x (J + 1) + W = lbs_weights.unsqueeze(dim=0).expand([batch_size, -1, -1]) + # (N x V x (J + 1)) x (N x (J + 1) x 16) + num_joints = J_regressor.shape[0] + T = torch.matmul(W, A.view(batch_size, num_joints, 16)) \ + .view(batch_size, -1, 4, 4) + + homogen_coord = torch.ones([batch_size, v_posed.shape[1], 1], + dtype=dtype, device=device) + v_posed_homo = torch.cat([v_posed, homogen_coord], dim=2) + v_homo = torch.matmul(T, torch.unsqueeze(v_posed_homo, dim=-1)) + + verts = v_homo[:, :, :3, 0] + + return verts, J_transformed + + +def vertices2joints(J_regressor, vertices): + ''' Calculates the 3D joint locations from the vertices + + Parameters + ---------- + J_regressor : torch.tensor JxV + The regressor array that is used to calculate the joints from the + position of the vertices + vertices : torch.tensor BxVx3 + The tensor of mesh vertices + + Returns + ------- + torch.tensor BxJx3 + The location of the joints + ''' + + return torch.einsum('bik,ji->bjk', [vertices, J_regressor]) + + +def blend_shapes(betas, shape_disps): + ''' Calculates the per vertex displacement due to the blend shapes + + + Parameters + ---------- + betas : torch.tensor Bx(num_betas) + Blend shape coefficients + shape_disps: torch.tensor Vx3x(num_betas) + Blend shapes + + Returns + ------- + torch.tensor BxVx3 + The per-vertex displacement due to shape deformation + ''' + + # Displacement[b, m, k] = sum_{l} betas[b, l] * shape_disps[m, k, l] + # i.e. Multiply each shape displacement by its corresponding beta and + # then sum them. + blend_shape = torch.einsum('bl,mkl->bmk', [betas, shape_disps]) + return blend_shape + + +def batch_rodrigues(rot_vecs, epsilon=1e-8, dtype=torch.float32): + ''' Calculates the rotation matrices for a batch of rotation vectors + Parameters + ---------- + rot_vecs: torch.tensor Nx3 + array of N axis-angle vectors + Returns + ------- + R: torch.tensor Nx3x3 + The rotation matrices for the given axis-angle parameters + ''' + + batch_size = rot_vecs.shape[0] + device = rot_vecs.device + + angle = torch.norm(rot_vecs + 1e-8, dim=1, keepdim=True) + rot_dir = rot_vecs / angle + + cos = torch.unsqueeze(torch.cos(angle), dim=1) + sin = torch.unsqueeze(torch.sin(angle), dim=1) + + # Bx1 arrays + rx, ry, rz = torch.split(rot_dir, 1, dim=1) + K = torch.zeros((batch_size, 3, 3), dtype=dtype, device=device) + + zeros = torch.zeros((batch_size, 1), dtype=dtype, device=device) + K = torch.cat([zeros, -rz, ry, rz, zeros, -rx, -ry, rx, zeros], dim=1) \ + .view((batch_size, 3, 3)) + + ident = torch.eye(3, dtype=dtype, device=device).unsqueeze(dim=0) + rot_mat = ident + sin * K + (1 - cos) * torch.bmm(K, K) + return rot_mat + + +def transform_mat(R, t): + ''' Creates a batch of transformation matrices + Args: + - R: Bx3x3 array of a batch of rotation matrices + - t: Bx3x1 array of a batch of translation vectors + Returns: + - T: Bx4x4 Transformation matrix + ''' + # No padding left or right, only add an extra row + return torch.cat([F.pad(R, [0, 0, 0, 1]), + F.pad(t, [0, 0, 0, 1], value=1)], dim=2) + + +def batch_rigid_transform(rot_mats, joints, parents, dtype=torch.float32): + """ + Applies a batch of rigid transformations to the joints + + Parameters + ---------- + rot_mats : torch.tensor BxNx3x3 + Tensor of rotation matrices + joints : torch.tensor BxNx3 + Locations of joints + parents : torch.tensor BxN + The kinematic tree of each object + dtype : torch.dtype, optional: + The data type of the created tensors, the default is torch.float32 + + Returns + ------- + posed_joints : torch.tensor BxNx3 + The locations of the joints after applying the pose rotations + rel_transforms : torch.tensor BxNx4x4 + The relative (with respect to the root joint) rigid transformations + for all the joints + """ + + joints = torch.unsqueeze(joints, dim=-1) + + rel_joints = joints.clone() + rel_joints[:, 1:] -= joints[:, parents[1:]] + + # transforms_mat = transform_mat( + # rot_mats.view(-1, 3, 3), + # rel_joints.view(-1, 3, 1)).view(-1, joints.shape[1], 4, 4) + transforms_mat = transform_mat( + rot_mats.view(-1, 3, 3), + rel_joints.reshape(-1, 3, 1)).reshape(-1, joints.shape[1], 4, 4) + + transform_chain = [transforms_mat[:, 0]] + for i in range(1, parents.shape[0]): + # Subtract the joint location at the rest pose + # No need for rotation, since it's identity when at rest + curr_res = torch.matmul(transform_chain[parents[i]], + transforms_mat[:, i]) + transform_chain.append(curr_res) + + transforms = torch.stack(transform_chain, dim=1) + + # The last column of the transformations contains the posed joints + posed_joints = transforms[:, :, :3, 3] + + # The last column of the transformations contains the posed joints + posed_joints = transforms[:, :, :3, 3] + + joints_homogen = F.pad(joints, [0, 0, 0, 1]) + + rel_transforms = transforms - F.pad( + torch.matmul(transforms, joints_homogen), [3, 0, 0, 0, 0, 0, 0, 0]) + + return posed_joints, rel_transforms \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/resnet.py b/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/resnet.py new file mode 100755 index 0000000..039bab0 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/decalib/models/resnet.py @@ -0,0 +1,291 @@ +""" +Author: Soubhik Sanyal +Copyright (c) 2019, Soubhik Sanyal +All rights reserved. +Loads different resnet models +""" +''' + file: Resnet.py + date: 2018_05_02 + author: zhangxiong(1025679612@qq.com) + mark: copied from pytorch source code +''' + +import torch.nn as nn +import torch.nn.functional as F +import torch +from torch.nn.parameter import Parameter +import torch.optim as optim +import numpy as np +import math +import torchvision + +class ResNet(nn.Module): + def __init__(self, block, layers, num_classes=1000): + self.inplanes = 64 + super(ResNet, self).__init__() + self.conv1 = nn.Conv2d(3, 64, kernel_size=7, stride=2, padding=3, + bias=False) + self.bn1 = nn.BatchNorm2d(64) + self.relu = nn.ReLU(inplace=True) + self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1) + self.layer1 = self._make_layer(block, 64, layers[0]) + self.layer2 = self._make_layer(block, 128, layers[1], stride=2) + self.layer3 = self._make_layer(block, 256, layers[2], stride=2) + self.layer4 = self._make_layer(block, 512, layers[3], stride=2) + self.avgpool = nn.AvgPool2d(7, stride=1) + # self.fc = nn.Linear(512 * block.expansion, num_classes) + + for m in self.modules(): + if isinstance(m, nn.Conv2d): + n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(0, math.sqrt(2. / n)) + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1) + m.bias.data.zero_() + + def _make_layer(self, block, planes, blocks, stride=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d(self.inplanes, planes * block.expansion, + kernel_size=1, stride=stride, bias=False), + nn.BatchNorm2d(planes * block.expansion), + ) + + layers = [] + layers.append(block(self.inplanes, planes, stride, downsample)) + self.inplanes = planes * block.expansion + for i in range(1, blocks): + layers.append(block(self.inplanes, planes)) + + return nn.Sequential(*layers) + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.maxpool(x) + + x = self.layer1(x) + x = self.layer2(x) + x = self.layer3(x) + x1 = self.layer4(x) + + x2 = self.avgpool(x1) + x2 = x2.view(x2.size(0), -1) + # x = self.fc(x) + ## x2: [bz, 2048] for shape + ## x1: [bz, 2048, 7, 7] for texture + return x2 + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(Bottleneck, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + self.bn1 = nn.BatchNorm2d(planes) + self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, + padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(planes) + self.conv3 = nn.Conv2d(planes, planes * 4, kernel_size=1, bias=False) + self.bn3 = nn.BatchNorm2d(planes * 4) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + +def conv3x3(in_planes, out_planes, stride=1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, + padding=1, bias=False) + +class BasicBlock(nn.Module): + expansion = 1 + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(BasicBlock, self).__init__() + self.conv1 = conv3x3(inplanes, planes, stride) + self.bn1 = nn.BatchNorm2d(planes) + self.relu = nn.ReLU(inplace=True) + self.conv2 = conv3x3(planes, planes) + self.bn2 = nn.BatchNorm2d(planes) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + +def copy_parameter_from_resnet(model, resnet_dict): + cur_state_dict = model.state_dict() + # import ipdb; ipdb.set_trace() + for name, param in list(resnet_dict.items())[0:None]: + if name not in cur_state_dict: + print(name, ' not available in reconstructed resnet') + continue + if isinstance(param, Parameter): + param = param.data + try: + cur_state_dict[name].copy_(param) + except: + print(name, ' is inconsistent!') + continue + # print('copy resnet state dict finished!') + # import ipdb; ipdb.set_trace() + +def load_ResNet50Model(): + model = ResNet(Bottleneck, [3, 4, 6, 3]) + copy_parameter_from_resnet(model, torchvision.models.resnet50(pretrained = True).state_dict()) + return model + +def load_ResNet101Model(): + model = ResNet(Bottleneck, [3, 4, 23, 3]) + copy_parameter_from_resnet(model, torchvision.models.resnet101(pretrained = True).state_dict()) + return model + +def load_ResNet152Model(): + model = ResNet(Bottleneck, [3, 8, 36, 3]) + copy_parameter_from_resnet(model, torchvision.models.resnet152(pretrained = True).state_dict()) + return model + +# model.load_state_dict(checkpoint['model_state_dict']) + + +######## Unet + +class DoubleConv(nn.Module): + """(convolution => [BN] => ReLU) * 2""" + + def __init__(self, in_channels, out_channels): + super().__init__() + self.double_conv = nn.Sequential( + nn.Conv2d(in_channels, out_channels, kernel_size=3, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True), + nn.Conv2d(out_channels, out_channels, kernel_size=3, padding=1), + nn.BatchNorm2d(out_channels), + nn.ReLU(inplace=True) + ) + + def forward(self, x): + return self.double_conv(x) + + +class Down(nn.Module): + """Downscaling with maxpool then double conv""" + + def __init__(self, in_channels, out_channels): + super().__init__() + self.maxpool_conv = nn.Sequential( + nn.MaxPool2d(2), + DoubleConv(in_channels, out_channels) + ) + + def forward(self, x): + return self.maxpool_conv(x) + + +class Up(nn.Module): + """Upscaling then double conv""" + + def __init__(self, in_channels, out_channels, bilinear=True): + super().__init__() + + # if bilinear, use the normal convolutions to reduce the number of channels + if bilinear: + self.up = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=True) + else: + self.up = nn.ConvTranspose2d(in_channels // 2, in_channels // 2, kernel_size=2, stride=2) + + self.conv = DoubleConv(in_channels, out_channels) + + def forward(self, x1, x2): + x1 = self.up(x1) + # input is CHW + diffY = x2.size()[2] - x1.size()[2] + diffX = x2.size()[3] - x1.size()[3] + + x1 = F.pad(x1, [diffX // 2, diffX - diffX // 2, + diffY // 2, diffY - diffY // 2]) + # if you have padding issues, see + # https://github.com/HaiyongJiang/U-Net-Pytorch-Unstructured-Buggy/commit/0e854509c2cea854e247a9c615f175f76fbb2e3a + # https://github.com/xiaopeng-liao/Pytorch-UNet/commit/8ebac70e633bac59fc22bb5195e513d5832fb3bd + x = torch.cat([x2, x1], dim=1) + return self.conv(x) + + +class OutConv(nn.Module): + def __init__(self, in_channels, out_channels): + super(OutConv, self).__init__() + self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=1) + + def forward(self, x): + return self.conv(x) + +class UNet(nn.Module): + def __init__(self, n_channels, n_classes, bilinear=True): + super(UNet, self).__init__() + self.n_channels = n_channels + self.n_classes = n_classes + self.bilinear = bilinear + + self.inc = DoubleConv(n_channels, 64) + self.down1 = Down(64, 128) + self.down2 = Down(128, 256) + self.down3 = Down(256, 512) + self.down4 = Down(512, 512) + self.up1 = Up(1024, 256, bilinear) + self.up2 = Up(512, 128, bilinear) + self.up3 = Up(256, 64, bilinear) + self.up4 = Up(128, 64, bilinear) + self.outc = OutConv(64, n_classes) + + def forward(self, x): + x1 = self.inc(x) + x2 = self.down1(x1) + x3 = self.down2(x2) + x4 = self.down3(x3) + x5 = self.down4(x4) + x = self.up1(x5, x4) + x = self.up2(x, x3) + x = self.up3(x, x2) + x = self.up4(x, x1) + x = F.normalize(x) + return x \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/decalib/utils/config.py b/motion-gan-pipeline/preprocessing/third/DECA/decalib/utils/config.py new file mode 100644 index 0000000..a44d1e2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/decalib/utils/config.py @@ -0,0 +1,79 @@ +''' +Default config for DECA +''' +from yacs.config import CfgNode as CN +import argparse +import yaml +import os + +cfg = CN() + +abs_deca_dir = os.path.abspath(os.path.join(os.path.dirname(__file__), '..', '..')) +cfg.deca_dir = abs_deca_dir +cfg.device = 'cuda' +cfg.device_id = '0' + +cfg.pretrained_modelpath = os.path.join(cfg.deca_dir, 'data', 'deca_model.tar') + +# ---------------------------------------------------------------------------- # +# Options for Face model +# ---------------------------------------------------------------------------- # +cfg.model = CN() +cfg.model.topology_path = os.path.join(cfg.deca_dir, 'data', 'head_template.obj') +# texture data original from http://files.is.tue.mpg.de/tbolkart/FLAME/FLAME_texture_data.zip +cfg.model.dense_template_path = os.path.join(cfg.deca_dir, 'data', 'texture_data_256.npy') +cfg.model.fixed_displacement_path = os.path.join(cfg.deca_dir, 'data', 'fixed_displacement_256.npy') +cfg.model.flame_model_path = os.path.join(cfg.deca_dir, 'data', 'generic_model.pkl') +cfg.model.flame_lmk_embedding_path = os.path.join(cfg.deca_dir, 'data', 'landmark_embedding.npy') +cfg.model.face_mask_path = os.path.join(cfg.deca_dir, 'data', 'uv_face_mask.png') +cfg.model.face_eye_mask_path = os.path.join(cfg.deca_dir, 'data', 'uv_face_eye_mask.png') +cfg.model.mean_tex_path = os.path.join(cfg.deca_dir, 'data', 'mean_texture.jpg') +cfg.model.tex_path = os.path.join(cfg.deca_dir, 'data', 'FLAME_albedo_from_BFM.npz') +cfg.model.tex_type = 'BFM' # BFM, FLAME, albedoMM +cfg.model.uv_size = 256 +cfg.model.param_list = ['shape', 'tex', 'exp', 'pose', 'cam', 'light'] +cfg.model.n_shape = 100 +cfg.model.n_tex = 50 +cfg.model.n_exp = 50 +cfg.model.n_cam = 3 +cfg.model.n_pose = 6 +cfg.model.n_light = 27 +cfg.model.use_tex = False +cfg.model.jaw_type = 'aa' # default use axis angle, another option: euler + +## details +cfg.model.n_detail = 128 +cfg.model.max_z = 0.01 + +# ---------------------------------------------------------------------------- # +# Options for Dataset +# ---------------------------------------------------------------------------- # +cfg.dataset = CN() +cfg.dataset.batch_size = 24 +cfg.dataset.num_workers = 2 +cfg.dataset.image_size = 224 + +def get_cfg_defaults(): + """Get a yacs CfgNode object with default values for my_project.""" + # Return a clone so that the defaults will not be altered + # This is for the "local variable" use pattern + return cfg.clone() + +def update_cfg(cfg, cfg_file): + cfg.merge_from_file(cfg_file) + return cfg.clone() + +def parse_args(): + parser = argparse.ArgumentParser() + parser.add_argument('--cfg', type=str, help='cfg file path') + + args = parser.parse_args() + print(args, end='\n\n') + + cfg = get_cfg_defaults() + if args.cfg is not None: + cfg_file = args.cfg + cfg = update_cfg(cfg, args.cfg) + cfg.cfg_file = cfg_file + + return cfg diff --git a/motion-gan-pipeline/preprocessing/third/DECA/decalib/utils/renderer.py b/motion-gan-pipeline/preprocessing/third/DECA/decalib/utils/renderer.py new file mode 100755 index 0000000..e7a2155 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/decalib/utils/renderer.py @@ -0,0 +1,342 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de +import pdb +import matplotlib.pyplot as plt +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as F +from skimage.io import imread +import imageio +from pytorch3d.structures import Meshes +from pytorch3d.io import load_obj +from pytorch3d.renderer.mesh import rasterize_meshes +from . import util +# from .rasterizer.standard_rasterize_cuda import standard_rasterize + +class Pytorch3dRasterizer(nn.Module): + """ Borrowed from https://github.com/facebookresearch/pytorch3d + Notice: + x,y,z are in image space, normalized + can only render squared image now + """ + + def __init__(self, image_size=224): + """ + use fixed raster_settings for rendering faces + """ + super().__init__() + raster_settings = { + 'image_size': image_size, + 'blur_radius': 0.0, + 'faces_per_pixel': 1, + 'bin_size': None, + 'max_faces_per_bin': None, + 'perspective_correct': False, + } + raster_settings = util.dict2obj(raster_settings) + self.raster_settings = raster_settings + + def forward(self, vertices, faces, attributes=None): + fixed_vertices = vertices.clone() + fixed_vertices[...,:2] = -fixed_vertices[...,:2] + meshes_screen = Meshes(verts=fixed_vertices.float(), faces=faces.long()) + raster_settings = self.raster_settings + pix_to_face, zbuf, bary_coords, dists = rasterize_meshes( + meshes_screen, + image_size=raster_settings.image_size, + blur_radius=raster_settings.blur_radius, + faces_per_pixel=raster_settings.faces_per_pixel, + bin_size=raster_settings.bin_size, + max_faces_per_bin=raster_settings.max_faces_per_bin, + perspective_correct=raster_settings.perspective_correct, + ) + vismask = (pix_to_face > -1).float() + D = attributes.shape[-1] + attributes = attributes.clone(); attributes = attributes.view(attributes.shape[0]*attributes.shape[1], 3, attributes.shape[-1]) + N, H, W, K, _ = bary_coords.shape + mask = pix_to_face == -1 + pix_to_face = pix_to_face.clone() + pix_to_face[mask] = 0 + idx = pix_to_face.view(N * H * W * K, 1, 1).expand(N * H * W * K, 3, D) + pixel_face_vals = attributes.gather(0, idx).view(N, H, W, K, 3, D) + pixel_vals = (bary_coords[..., None] * pixel_face_vals).sum(dim=-2) + pixel_vals[mask] = 0 # Replace masked values in output. + pixel_vals = pixel_vals[:,:,:,0].permute(0,3,1,2) + pixel_vals = torch.cat([pixel_vals, vismask[:,:,:,0][:,None,:,:]], dim=1) + return pixel_vals + +class SRenderY(nn.Module): + def __init__(self, image_size, obj_filename, uv_size=256, rasterizer_type='pytorch3d'): + super(SRenderY, self).__init__() + self.image_size = image_size + self.uv_size = uv_size + + verts, faces, aux = load_obj(obj_filename) + uvcoords = aux.verts_uvs[None, ...] # (N, V, 2) + uvfaces = faces.textures_idx[None, ...] # (N, F, 3) + faces = faces.verts_idx[None,...] + + if rasterizer_type == 'pytorch3d': + self.rasterizer = Pytorch3dRasterizer(image_size) + self.uv_rasterizer = Pytorch3dRasterizer(uv_size) + + # faces + dense_triangles = util.generate_triangles(uv_size, uv_size) + self.register_buffer('dense_faces', torch.from_numpy(dense_triangles).long()[None,:,:]) + self.register_buffer('faces', faces) + self.register_buffer('raw_uvcoords', uvcoords) + + # uv coords + uvcoords = torch.cat([uvcoords, uvcoords[:,:,0:1]*0.+1.], -1) #[bz, ntv, 3] + uvcoords = uvcoords*2 - 1; uvcoords[...,1] = -uvcoords[...,1] + face_uvcoords = util.face_vertices(uvcoords, uvfaces) + self.register_buffer('uvcoords', uvcoords) + self.register_buffer('uvfaces', uvfaces) + self.register_buffer('face_uvcoords', face_uvcoords) + + # shape colors, for rendering shape overlay + colors = torch.tensor([180, 180, 180])[None, None, :].repeat(1, faces.max()+1, 1).float()/255. + face_colors = util.face_vertices(colors, faces) + self.register_buffer('face_colors', face_colors) + + ## SH factors for lighting + pi = np.pi + constant_factor = torch.tensor([1/np.sqrt(4*pi), ((2*pi)/3)*(np.sqrt(3/(4*pi))), ((2*pi)/3)*(np.sqrt(3/(4*pi))),\ + ((2*pi)/3)*(np.sqrt(3/(4*pi))), (pi/4)*(3)*(np.sqrt(5/(12*pi))), (pi/4)*(3)*(np.sqrt(5/(12*pi))),\ + (pi/4)*(3)*(np.sqrt(5/(12*pi))), (pi/4)*(3/2)*(np.sqrt(5/(12*pi))), (pi/4)*(1/2)*(np.sqrt(5/(4*pi)))]).float() + self.register_buffer('constant_factor', constant_factor) + + def forward(self, vertices, transformed_vertices, albedos, lights=None, light_type='point'): + ''' + -- Texture Rendering + vertices: [batch_size, V, 3], vertices in world space, for calculating normals, then shading + transformed_vertices: [batch_size, V, 3], range:normalized to [-1,1], projected vertices in image space (that is aligned to the iamge pixel), for rasterization + albedos: [batch_size, 3, h, w], uv map + lights: + spherical homarnic: [N, 9(shcoeff), 3(rgb)] + points/directional lighting: [N, n_lights, 6(xyzrgb)] + light_type: + point or directional + ''' + batch_size = vertices.shape[0] + ## rasterizer near 0 far 100. move mesh so minz larger than 0 + transformed_vertices[:,:,2] = transformed_vertices[:,:,2] + 10 + # attributes + face_vertices = util.face_vertices(vertices, self.faces.expand(batch_size, -1, -1)) + normals = util.vertex_normals(vertices, self.faces.expand(batch_size, -1, -1)); face_normals = util.face_vertices(normals, self.faces.expand(batch_size, -1, -1)) + transformed_normals = util.vertex_normals(transformed_vertices, self.faces.expand(batch_size, -1, -1)); transformed_face_normals = util.face_vertices(transformed_normals, self.faces.expand(batch_size, -1, -1)) + + attributes = torch.cat([self.face_uvcoords.expand(batch_size, -1, -1, -1), + transformed_face_normals.detach(), + face_vertices.detach(), + face_normals], + -1) + # rasterize + rendering = self.rasterizer(transformed_vertices, self.faces.expand(batch_size, -1, -1), attributes) + + #### + # vis mask + alpha_images = rendering[:, -1, :, :][:, None, :, :].detach() + + # albedo + #pdb.set_trace() + uvcoords_images = rendering[:, :3, :, :]; grid = (uvcoords_images).permute(0, 2, 3, 1)[:, :, :, :2] + albedo_images = F.grid_sample(albedos, grid, align_corners=False) + + # visible mask for pixels with positive normal direction + transformed_normal_map = rendering[:, 3:6, :, :].detach() + pos_mask = (transformed_normal_map[:, 2:, :, :] < -0.05).float() + + # shading + normal_images = rendering[:, 9:12, :, :] + if lights is not None: + if lights.shape[1] == 9: + shading_images = self.add_SHlight(normal_images, lights) + else: + if light_type=='point': + vertice_images = rendering[:, 6:9, :, :].detach() + shading = self.add_pointlight(vertice_images.permute(0,2,3,1).reshape([batch_size, -1, 3]), normal_images.permute(0,2,3,1).reshape([batch_size, -1, 3]), lights) + shading_images = shading.reshape([batch_size, albedo_images.shape[2], albedo_images.shape[3], 3]).permute(0,3,1,2) + else: + shading = self.add_directionlight(normal_images.permute(0,2,3,1).reshape([batch_size, -1, 3]), lights) + shading_images = shading.reshape([batch_size, albedo_images.shape[2], albedo_images.shape[3], 3]).permute(0,3,1,2) + images = albedo_images*shading_images + else: + images = albedo_images + shading_images = images.detach()*0. + + outputs = { + 'images': images*alpha_images, + 'albedo_images': albedo_images*alpha_images, + 'alpha_images': alpha_images, + 'pos_mask': pos_mask, + 'shading_images': shading_images, + 'grid': grid, + 'normals': normals, + 'normal_images': normal_images*alpha_images, + 'transformed_normals': transformed_normals, + } + + return outputs + + def add_SHlight(self, normal_images, sh_coeff): + ''' + sh_coeff: [bz, 9, 3] + ''' + N = normal_images + sh = torch.stack([ + N[:,0]*0.+1., N[:,0], N[:,1], \ + N[:,2], N[:,0]*N[:,1], N[:,0]*N[:,2], + N[:,1]*N[:,2], N[:,0]**2 - N[:,1]**2, 3*(N[:,2]**2) - 1 + ], + 1) # [bz, 9, h, w] + sh = sh*self.constant_factor[None,:,None,None] + shading = torch.sum(sh_coeff[:,:,:,None,None]*sh[:,:,None,:,:], 1) # [bz, 9, 3, h, w] + return shading + + def add_pointlight(self, vertices, normals, lights): + ''' + vertices: [bz, nv, 3] + lights: [bz, nlight, 6] + returns: + shading: [bz, nv, 3] + ''' + light_positions = lights[:,:,:3]; light_intensities = lights[:,:,3:] + directions_to_lights = F.normalize(light_positions[:,:,None,:] - vertices[:,None,:,:], dim=3) + # normals_dot_lights = torch.clamp((normals[:,None,:,:]*directions_to_lights).sum(dim=3), 0., 1.) + normals_dot_lights = (normals[:,None,:,:]*directions_to_lights).sum(dim=3) + shading = normals_dot_lights[:,:,:,None]*light_intensities[:,:,None,:] + return shading.mean(1) + + def add_directionlight(self, normals, lights): + ''' + normals: [bz, nv, 3] + lights: [bz, nlight, 6] + returns: + shading: [bz, nv, 3] + ''' + light_direction = lights[:,:,:3]; light_intensities = lights[:,:,3:] + directions_to_lights = F.normalize(light_direction[:,:,None,:].expand(-1,-1,normals.shape[1],-1), dim=3) + # normals_dot_lights = torch.clamp((normals[:,None,:,:]*directions_to_lights).sum(dim=3), 0., 1.) + # normals_dot_lights = (normals[:,None,:,:]*directions_to_lights).sum(dim=3) + normals_dot_lights = torch.clamp((normals[:,None,:,:]*directions_to_lights).sum(dim=3), 0., 1.) + shading = normals_dot_lights[:,:,:,None]*light_intensities[:,:,None,:] + return shading.mean(1) + + def render_shape(self, vertices, transformed_vertices, images=None, detail_normal_images=None, lights=None): + ''' + -- rendering shape with detail normal map + ''' + batch_size = vertices.shape[0] + # set lighting + if lights is None: + light_positions = torch.tensor( + [ + [-1,1,1], + [1,1,1], + [-1,-1,1], + [1,-1,1], + [0,0,1] + ] + )[None,:,:].expand(batch_size, -1, -1).float() + light_intensities = torch.ones_like(light_positions).float()*1.7 + lights = torch.cat((light_positions, light_intensities), 2).to(vertices.device) + transformed_vertices[:,:,2] = transformed_vertices[:,:,2] + 10 + + # Attributes + face_vertices = util.face_vertices(vertices, self.faces.expand(batch_size, -1, -1)) + normals = util.vertex_normals(vertices, self.faces.expand(batch_size, -1, -1)); face_normals = util.face_vertices(normals, self.faces.expand(batch_size, -1, -1)) + transformed_normals = util.vertex_normals(transformed_vertices, self.faces.expand(batch_size, -1, -1)); transformed_face_normals = util.face_vertices(transformed_normals, self.faces.expand(batch_size, -1, -1)) + attributes = torch.cat([self.face_colors.expand(batch_size, -1, -1, -1), + transformed_face_normals.detach(), + face_vertices.detach(), + face_normals], + -1) + # rasterize + rendering = self.rasterizer(transformed_vertices, self.faces.expand(batch_size, -1, -1), attributes) + + #### + alpha_images = rendering[:, -1, :, :][:, None, :, :].detach() + + # albedo + albedo_images = rendering[:, :3, :, :] + # mask + transformed_normal_map = rendering[:, 3:6, :, :].detach() + pos_mask = (transformed_normal_map[:, 2:, :, :] < 0.15).float() + + # shading + normal_images = rendering[:, 9:12, :, :].detach() + vertice_images = rendering[:, 6:9, :, :].detach() + if detail_normal_images is not None: + normal_images = detail_normal_images + + shading = self.add_directionlight(normal_images.permute(0,2,3,1).reshape([batch_size, -1, 3]), lights) + shading_images = shading.reshape([batch_size, albedo_images.shape[2], albedo_images.shape[3], 3]).permute(0,3,1,2).contiguous() + shaded_images = albedo_images*shading_images + + alpha_images = alpha_images*pos_mask + if images is None: + shape_images = shaded_images*alpha_images + torch.zeros_like(shaded_images).to(vertices.device)*(1-alpha_images) + else: + shape_images = shaded_images*alpha_images + images*(1-alpha_images) + return shape_images + + def render_depth(self, transformed_vertices): + ''' + -- rendering depth + ''' + batch_size = transformed_vertices.shape[0] + + transformed_vertices[:,:,2] = transformed_vertices[:,:,2] - transformed_vertices[:,:,2].min() + z = -transformed_vertices[:,:,2:].repeat(1,1,3).clone() + z = z-z.min() + z = z/z.max() + # Attributes + attributes = util.face_vertices(z, self.faces.expand(batch_size, -1, -1)) + # rasterize + transformed_vertices[:,:,2] = transformed_vertices[:,:,2] + 10 + rendering = self.rasterizer(transformed_vertices, self.faces.expand(batch_size, -1, -1), attributes) + + #### + alpha_images = rendering[:, -1, :, :][:, None, :, :].detach() + depth_images = rendering[:, :1, :, :] + return depth_images + + def render_normal(self, transformed_vertices, normals): + ''' + -- rendering normal + ''' + batch_size = normals.shape[0] + + # Attributes + attributes = util.face_vertices(normals, self.faces.expand(batch_size, -1, -1)) + # rasterize + rendering = self.rasterizer(transformed_vertices, self.faces.expand(batch_size, -1, -1), attributes) + #### + alpha_images = rendering[:, -1, :, :][:, None, :, :].detach() + normal_images = rendering[:, :3, :, :] + return normal_images + + def world2uv(self, vertices): + ''' + warp vertices from world space to uv space + vertices: [bz, V, 3] + uv_vertices: [bz, 3, h, w] + ''' + batch_size = vertices.shape[0] + face_vertices = util.face_vertices(vertices, self.faces.expand(batch_size, -1, -1)) + uv_vertices = self.uv_rasterizer(self.uvcoords.expand(batch_size, -1, -1), self.uvfaces.expand(batch_size, -1, -1), face_vertices)[:, :3] + return uv_vertices \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/decalib/utils/rotation_converter.py b/motion-gan-pipeline/preprocessing/third/DECA/decalib/utils/rotation_converter.py new file mode 100644 index 0000000..89756d9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/decalib/utils/rotation_converter.py @@ -0,0 +1,381 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import torch +import pdb + +''' Rotation Converter +Representations: + euler angle(3), angle axis(3), rotation matrix(3x3), quaternion(4), continous repre +Ref: + https://kornia.readthedocs.io/en/v0.1.2/_modules/torchgeometry/core/conversions.html# + smplx/lbs +''' + +pi = torch.Tensor([3.14159265358979323846]) +def rad2deg(tensor): + """Function that converts angles from radians to degrees. + + See :class:`~torchgeometry.RadToDeg` for details. + + Args: + tensor (Tensor): Tensor of arbitrary shape. + + Returns: + Tensor: Tensor with same shape as input. + + Example: + >>> input = tgm.pi * torch.rand(1, 3, 3) + >>> output = tgm.rad2deg(input) + """ + if not torch.is_tensor(tensor): + raise TypeError("Input type is not a torch.Tensor. Got {}" + .format(type(tensor))) + + return 180. * tensor / pi.to(tensor.device).type(tensor.dtype) + +def deg2rad(tensor): + """Function that converts angles from degrees to radians. + + See :class:`~torchgeometry.DegToRad` for details. + + Args: + tensor (Tensor): Tensor of arbitrary shape. + + Returns: + Tensor: Tensor with same shape as input. + + Examples:: + + >>> input = 360. * torch.rand(1, 3, 3) + >>> output = tgm.deg2rad(input) + """ + if not torch.is_tensor(tensor): + raise TypeError("Input type is not a torch.Tensor. Got {}" + .format(type(tensor))) + + return tensor * pi.to(tensor.device).type(tensor.dtype) / 180. + +######### to quaternion +def euler_to_quaternion(r): + x = r[..., 0] + y = r[..., 1] + z = r[..., 2] + + z = z/2.0 + y = y/2.0 + x = x/2.0 + cz = torch.cos(z) + sz = torch.sin(z) + cy = torch.cos(y) + sy = torch.sin(y) + cx = torch.cos(x) + sx = torch.sin(x) + quaternion = torch.zeros_like(r.repeat(1,2))[..., :4].to(r.device) + quaternion[..., 0] += cx*cy*cz - sx*sy*sz + quaternion[..., 1] += cx*sy*sz + cy*cz*sx + quaternion[..., 2] += cx*cz*sy - sx*cy*sz + quaternion[..., 3] += cx*cy*sz + sx*cz*sy + return quaternion + +def rotation_matrix_to_quaternion(rotation_matrix, eps=1e-6): + """Convert 3x4 rotation matrix to 4d quaternion vector + + This algorithm is based on algorithm described in + https://github.com/KieranWynn/pyquaternion/blob/master/pyquaternion/quaternion.py#L201 + + Args: + rotation_matrix (Tensor): the rotation matrix to convert. + + Return: + Tensor: the rotation in quaternion + + Shape: + - Input: :math:`(N, 3, 4)` + - Output: :math:`(N, 4)` + + Example: + >>> input = torch.rand(4, 3, 4) # Nx3x4 + >>> output = tgm.rotation_matrix_to_quaternion(input) # Nx4 + """ + if not torch.is_tensor(rotation_matrix): + raise TypeError("Input type is not a torch.Tensor. Got {}".format( + type(rotation_matrix))) + + if len(rotation_matrix.shape) > 3: + raise ValueError( + "Input size must be a three dimensional tensor. Got {}".format( + rotation_matrix.shape)) + # if not rotation_matrix.shape[-2:] == (3, 4): + # raise ValueError( + # "Input size must be a N x 3 x 4 tensor. Got {}".format( + # rotation_matrix.shape)) + + rmat_t = torch.transpose(rotation_matrix, 1, 2) + + mask_d2 = rmat_t[:, 2, 2] < eps + + mask_d0_d1 = rmat_t[:, 0, 0] > rmat_t[:, 1, 1] + mask_d0_nd1 = rmat_t[:, 0, 0] < -rmat_t[:, 1, 1] + + t0 = 1 + rmat_t[:, 0, 0] - rmat_t[:, 1, 1] - rmat_t[:, 2, 2] + q0 = torch.stack([rmat_t[:, 1, 2] - rmat_t[:, 2, 1], + t0, rmat_t[:, 0, 1] + rmat_t[:, 1, 0], + rmat_t[:, 2, 0] + rmat_t[:, 0, 2]], -1) + t0_rep = t0.repeat(4, 1).t() + + t1 = 1 - rmat_t[:, 0, 0] + rmat_t[:, 1, 1] - rmat_t[:, 2, 2] + q1 = torch.stack([rmat_t[:, 2, 0] - rmat_t[:, 0, 2], + rmat_t[:, 0, 1] + rmat_t[:, 1, 0], + t1, rmat_t[:, 1, 2] + rmat_t[:, 2, 1]], -1) + t1_rep = t1.repeat(4, 1).t() + + t2 = 1 - rmat_t[:, 0, 0] - rmat_t[:, 1, 1] + rmat_t[:, 2, 2] + q2 = torch.stack([rmat_t[:, 0, 1] - rmat_t[:, 1, 0], + rmat_t[:, 2, 0] + rmat_t[:, 0, 2], + rmat_t[:, 1, 2] + rmat_t[:, 2, 1], t2], -1) + t2_rep = t2.repeat(4, 1).t() + + t3 = 1 + rmat_t[:, 0, 0] + rmat_t[:, 1, 1] + rmat_t[:, 2, 2] + q3 = torch.stack([t3, rmat_t[:, 1, 2] - rmat_t[:, 2, 1], + rmat_t[:, 2, 0] - rmat_t[:, 0, 2], + rmat_t[:, 0, 1] - rmat_t[:, 1, 0]], -1) + t3_rep = t3.repeat(4, 1).t() + + mask_c0 = mask_d2 * mask_d0_d1.float() + mask_c1 = mask_d2 * (1 - mask_d0_d1.float()) + mask_c2 = (1 - mask_d2.float()) * mask_d0_nd1 + mask_c3 = (1 - mask_d2.float()) * (1 - mask_d0_nd1.float()) + mask_c0 = mask_c0.view(-1, 1).type_as(q0) + mask_c1 = mask_c1.view(-1, 1).type_as(q1) + mask_c2 = mask_c2.view(-1, 1).type_as(q2) + mask_c3 = mask_c3.view(-1, 1).type_as(q3) + + q = q0 * mask_c0 + q1 * mask_c1 + q2 * mask_c2 + q3 * mask_c3 + q /= torch.sqrt(t0_rep * mask_c0 + t1_rep * mask_c1 + # noqa + t2_rep * mask_c2 + t3_rep * mask_c3) # noqa + q *= 0.5 + return q + +# def angle_axis_to_quaternion(theta): +# batch_size = theta.shape[0] +# l1norm = torch.norm(theta + 1e-8, p=2, dim=1) +# angle = torch.unsqueeze(l1norm, -1) +# normalized = torch.div(theta, angle) +# angle = angle * 0.5 +# v_cos = torch.cos(angle) +# v_sin = torch.sin(angle) +# quat = torch.cat([v_cos, v_sin * normalized], dim=1) +# return quat + +def angle_axis_to_quaternion(angle_axis: torch.Tensor) -> torch.Tensor: + """Convert an angle axis to a quaternion. + + Adapted from ceres C++ library: ceres-solver/include/ceres/rotation.h + + Args: + angle_axis (torch.Tensor): tensor with angle axis. + + Return: + torch.Tensor: tensor with quaternion. + + Shape: + - Input: :math:`(*, 3)` where `*` means, any number of dimensions + - Output: :math:`(*, 4)` + + Example: + >>> angle_axis = torch.rand(2, 4) # Nx4 + >>> quaternion = tgm.angle_axis_to_quaternion(angle_axis) # Nx3 + """ + if not torch.is_tensor(angle_axis): + raise TypeError("Input type is not a torch.Tensor. Got {}".format( + type(angle_axis))) + + if not angle_axis.shape[-1] == 3: + raise ValueError("Input must be a tensor of shape Nx3 or 3. Got {}" + .format(angle_axis.shape)) + # unpack input and compute conversion + a0: torch.Tensor = angle_axis[..., 0:1] + a1: torch.Tensor = angle_axis[..., 1:2] + a2: torch.Tensor = angle_axis[..., 2:3] + theta_squared: torch.Tensor = a0 * a0 + a1 * a1 + a2 * a2 + + theta: torch.Tensor = torch.sqrt(theta_squared) + half_theta: torch.Tensor = theta * 0.5 + + mask: torch.Tensor = theta_squared > 0.0 + ones: torch.Tensor = torch.ones_like(half_theta) + + k_neg: torch.Tensor = 0.5 * ones + k_pos: torch.Tensor = torch.sin(half_theta) / theta + k: torch.Tensor = torch.where(mask, k_pos, k_neg) + w: torch.Tensor = torch.where(mask, torch.cos(half_theta), ones) + + quaternion: torch.Tensor = torch.zeros_like(angle_axis) + quaternion[..., 0:1] += a0 * k + quaternion[..., 1:2] += a1 * k + quaternion[..., 2:3] += a2 * k + return torch.cat([w, quaternion], dim=-1) + +#### quaternion to +def quaternion_to_rotation_matrix(quat): + """Convert quaternion coefficients to rotation matrix. + Args: + quat: size = [B, 4] 4 <===>(w, x, y, z) + Returns: + Rotation matrix corresponding to the quaternion -- size = [B, 3, 3] + """ + norm_quat = quat + norm_quat = norm_quat / norm_quat.norm(p=2, dim=1, keepdim=True) + w, x, y, z = norm_quat[:, 0], norm_quat[:, 1], norm_quat[:, 2], norm_quat[:, 3] + + B = quat.size(0) + + w2, x2, y2, z2 = w.pow(2), x.pow(2), y.pow(2), z.pow(2) + wx, wy, wz = w * x, w * y, w * z + xy, xz, yz = x * y, x * z, y * z + + rotMat = torch.stack([w2 + x2 - y2 - z2, 2 * xy - 2 * wz, 2 * wy + 2 * xz, + 2 * wz + 2 * xy, w2 - x2 + y2 - z2, 2 * yz - 2 * wx, + 2 * xz - 2 * wy, 2 * wx + 2 * yz, w2 - x2 - y2 + z2], dim=1).view(B, 3, 3) + return rotMat + +def quaternion_to_angle_axis(quaternion: torch.Tensor): + """Convert quaternion vector to angle axis of rotation. TODO: CORRECT + + Adapted from ceres C++ library: ceres-solver/include/ceres/rotation.h + + Args: + quaternion (torch.Tensor): tensor with quaternions. + + Return: + torch.Tensor: tensor with angle axis of rotation. + + Shape: + - Input: :math:`(*, 4)` where `*` means, any number of dimensions + - Output: :math:`(*, 3)` + + Example: + >>> quaternion = torch.rand(2, 4) # Nx4 + >>> angle_axis = tgm.quaternion_to_angle_axis(quaternion) # Nx3 + """ + if not torch.is_tensor(quaternion): + raise TypeError("Input type is not a torch.Tensor. Got {}".format( + type(quaternion))) + + if not quaternion.shape[-1] == 4: + raise ValueError("Input must be a tensor of shape Nx4 or 4. Got {}" + .format(quaternion.shape)) + # unpack input and compute conversion + q1: torch.Tensor = quaternion[..., 1] + q2: torch.Tensor = quaternion[..., 2] + q3: torch.Tensor = quaternion[..., 3] + sin_squared_theta: torch.Tensor = q1 * q1 + q2 * q2 + q3 * q3 + + sin_theta: torch.Tensor = torch.sqrt(sin_squared_theta) + cos_theta: torch.Tensor = quaternion[..., 0] + two_theta: torch.Tensor = 2.0 * torch.where( + cos_theta < 0.0, + torch.atan2(-sin_theta, -cos_theta), + torch.atan2(sin_theta, cos_theta)) + + k_pos: torch.Tensor = two_theta / sin_theta + k_neg: torch.Tensor = 2.0 * torch.ones_like(sin_theta).to(quaternion.device) + k: torch.Tensor = torch.where(sin_squared_theta > 0.0, k_pos, k_neg) + + angle_axis: torch.Tensor = torch.zeros_like(quaternion).to(quaternion.device)[..., :3] + angle_axis[..., 0] += q1 * k + angle_axis[..., 1] += q2 * k + angle_axis[..., 2] += q3 * k + return angle_axis + +#### batch converter +def batch_euler2axis(r): + return quaternion_to_angle_axis(euler_to_quaternion(r)) + +def batch_euler2matrix(r): + return quaternion_to_rotation_matrix(euler_to_quaternion(r)) + +def batch_matrix2euler(rot_mats): + # Calculates rotation matrix to euler angles + # Careful for extreme cases of eular angles like [0.0, pi, 0.0] + ### only y? + # TODO: + sy = torch.sqrt(rot_mats[:, 0, 0] * rot_mats[:, 0, 0] + + rot_mats[:, 1, 0] * rot_mats[:, 1, 0]) + return torch.atan2(-rot_mats[:, 2, 0], sy) + +def batch_matrix2axis(rot_mats): + return quaternion_to_angle_axis(rotation_matrix_to_quaternion(rot_mats)) + +def batch_axis2matrix(theta): + + # angle axis to rotation matrix + # theta N x 3 + # return quat2mat(quat) + # batch_rodrigues + return quaternion_to_rotation_matrix(angle_axis_to_quaternion(theta)) + +def batch_axis2euler(theta): + return batch_matrix2euler(batch_axis2matrix(theta)) + +def batch_axis2euler(r): + return rot_mat_to_euler(batch_rodrigues(r)) + + +def batch_orth_proj(X, camera): + ''' + X is N x num_pquaternion_to_angle_axisoints x 3 + ''' + pdb.set_trace() + camera = camera.clone().view(-1, 1, 3) + X_trans = X[:, :, :2] + camera[:, :, 1:] + X_trans = torch.cat([X_trans, X[:,:,2:]], 2) + Xn = (camera[:, :, 0:1] * X_trans) + return Xn + +def batch_rodrigues(rot_vecs, epsilon=1e-8, dtype=torch.float32): + ''' same as batch_matrix2axis + Calculates the rotation matrices for a batch of rotation vectors + Parameters + ---------- + rot_vecs: torch.tensor Nx3 + array of N axis-angle vectors + Returns + ------- + R: torch.tensor Nx3x3 + The rotation matrices for the given axis-angle parameters + ''' + + batch_size = rot_vecs.shape[0] + device = rot_vecs.device + + angle = torch.norm(rot_vecs + 1e-8, dim=1, keepdim=True) + rot_dir = rot_vecs / angle + + cos = torch.unsqueeze(torch.cos(angle), dim=1) + sin = torch.unsqueeze(torch.sin(angle), dim=1) + + # Bx1 arrays + rx, ry, rz = torch.split(rot_dir, 1, dim=1) + K = torch.zeros((batch_size, 3, 3), dtype=dtype, device=device) + + zeros = torch.zeros((batch_size, 1), dtype=dtype, device=device) + K = torch.cat([zeros, -rz, ry, rz, zeros, -rx, -ry, rx, zeros], dim=1) \ + .view((batch_size, 3, 3)) + + ident = torch.eye(3, dtype=dtype, device=device).unsqueeze(dim=0) + rot_mat = ident + sin * K + (1 - cos) * torch.bmm(K, K) + return rot_mat diff --git a/motion-gan-pipeline/preprocessing/third/DECA/decalib/utils/util.py b/motion-gan-pipeline/preprocessing/third/DECA/decalib/utils/util.py new file mode 100755 index 0000000..42e3172 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/decalib/utils/util.py @@ -0,0 +1,601 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import numpy as np +import torch +import torch.nn.functional as F +import math +from collections import OrderedDict +import os +from scipy.ndimage import morphology +from skimage.io import imsave +import cv2 + +def upsample_mesh(vertices, normals, faces, displacement_map, texture_map, dense_template): + ''' upsampling coarse mesh (with displacment map) + vertices: vertices of coarse mesh, [nv, 3] + normals: vertex normals, [nv, 3] + faces: faces of coarse mesh, [nf, 3] + texture_map: texture map, [256, 256, 3] + displacement_map: displacment map, [256, 256] + dense_template: + Returns: + dense_vertices: upsampled vertices with details, [number of dense vertices, 3] + dense_colors: vertex color, [number of dense vertices, 3] + dense_faces: [number of dense faces, 3] + ''' + img_size = dense_template['img_size'] + dense_faces = dense_template['f'] + x_coords = dense_template['x_coords'] + y_coords = dense_template['y_coords'] + valid_pixel_ids = dense_template['valid_pixel_ids'] + valid_pixel_3d_faces = dense_template['valid_pixel_3d_faces'] + valid_pixel_b_coords = dense_template['valid_pixel_b_coords'] + + pixel_3d_points = vertices[valid_pixel_3d_faces[:, 0], :] * valid_pixel_b_coords[:, 0][:, np.newaxis] + \ + vertices[valid_pixel_3d_faces[:, 1], :] * valid_pixel_b_coords[:, 1][:, np.newaxis] + \ + vertices[valid_pixel_3d_faces[:, 2], :] * valid_pixel_b_coords[:, 2][:, np.newaxis] + vertex_normals = normals + pixel_3d_normals = vertex_normals[valid_pixel_3d_faces[:, 0], :] * valid_pixel_b_coords[:, 0][:, np.newaxis] + \ + vertex_normals[valid_pixel_3d_faces[:, 1], :] * valid_pixel_b_coords[:, 1][:, np.newaxis] + \ + vertex_normals[valid_pixel_3d_faces[:, 2], :] * valid_pixel_b_coords[:, 2][:, np.newaxis] + pixel_3d_normals = pixel_3d_normals / np.linalg.norm(pixel_3d_normals, axis=-1)[:, np.newaxis] + displacements = displacement_map[y_coords[valid_pixel_ids].astype(int), x_coords[valid_pixel_ids].astype(int)] + dense_colors = texture_map[y_coords[valid_pixel_ids].astype(int), x_coords[valid_pixel_ids].astype(int)] + offsets = np.einsum('i,ij->ij', displacements, pixel_3d_normals) + dense_vertices = pixel_3d_points + offsets + return dense_vertices, dense_colors, dense_faces + +# borrowed from https://github.com/YadiraF/PRNet/blob/master/utils/write.py +def write_obj(obj_name, + vertices, + faces, + colors=None, + texture=None, + uvcoords=None, + uvfaces=None, + inverse_face_order=False, + normal_map=None, + ): + ''' Save 3D face model with texture. + Ref: https://github.com/patrikhuber/eos/blob/bd00155ebae4b1a13b08bf5a991694d682abbada/include/eos/core/Mesh.hpp + Args: + obj_name: str + vertices: shape = (nver, 3) + colors: shape = (nver, 3) + faces: shape = (ntri, 3) + texture: shape = (uv_size, uv_size, 3) + uvcoords: shape = (nver, 2) max value<=1 + ''' + if obj_name.split('.')[-1] != 'obj': + obj_name = obj_name + '.obj' + mtl_name = obj_name.replace('.obj', '.mtl') + texture_name = obj_name.replace('.obj', '.png') + material_name = 'FaceTexture' + + faces = faces.copy() + # mesh lab start with 1, python/c++ start from 0 + faces += 1 + if inverse_face_order: + faces = faces[:, [2, 1, 0]] + if uvfaces is not None: + uvfaces = uvfaces[:, [2, 1, 0]] + + # write obj + with open(obj_name, 'w') as f: + # first line: write mtlib(material library) + # f.write('# %s\n' % os.path.basename(obj_name)) + # f.write('#\n') + # f.write('\n') + if texture is not None: + f.write('mtllib %s\n\n' % os.path.basename(mtl_name)) + + # write vertices + if colors is None: + for i in range(vertices.shape[0]): + f.write('v {} {} {}\n'.format(vertices[i, 0], vertices[i, 1], vertices[i, 2])) + else: + for i in range(vertices.shape[0]): + f.write('v {} {} {} {} {} {}\n'.format(vertices[i, 0], vertices[i, 1], vertices[i, 2], colors[i, 0], colors[i, 1], colors[i, 2])) + + # write uv coords + if texture is None: + for i in range(faces.shape[0]): + f.write('f {} {} {}\n'.format(faces[i, 2], faces[i, 1], faces[i, 0])) + else: + for i in range(uvcoords.shape[0]): + f.write('vt {} {}\n'.format(uvcoords[i,0], uvcoords[i,1])) + f.write('usemtl %s\n' % material_name) + # write f: ver ind/ uv ind + uvfaces = uvfaces + 1 + for i in range(faces.shape[0]): + f.write('f {}/{} {}/{} {}/{}\n'.format( + # faces[i, 2], uvfaces[i, 2], + # faces[i, 1], uvfaces[i, 1], + # faces[i, 0], uvfaces[i, 0] + faces[i, 0], uvfaces[i, 0], + faces[i, 1], uvfaces[i, 1], + faces[i, 2], uvfaces[i, 2] + ) + ) + # write mtl + with open(mtl_name, 'w') as f: + f.write('newmtl %s\n' % material_name) + s = 'map_Kd {}\n'.format(os.path.basename(texture_name)) # map to image + f.write(s) + + if normal_map is not None: + name, _ = os.path.splitext(obj_name) + normal_name = f'{name}_normals.png' + f.write(f'disp {normal_name}') + # out_normal_map = normal_map / (np.linalg.norm( + # normal_map, axis=-1, keepdims=True) + 1e-9) + # out_normal_map = (out_normal_map + 1) * 0.5 + + cv2.imwrite( + normal_name, + # (out_normal_map * 255).astype(np.uint8)[:, :, ::-1] + normal_map + ) + cv2.imwrite(texture_name, texture) + +# ---------------------------- process/generate vertices, normals, faces +def generate_triangles(h, w, margin_x=2, margin_y=5, mask = None): + # quad layout: + # 0 1 ... w-1 + # w w+1 + #. + # w*h + triangles = [] + for x in range(margin_x, w-1-margin_x): + for y in range(margin_y, h-1-margin_y): + triangle0 = [y*w + x, y*w + x + 1, (y+1)*w + x] + triangle1 = [y*w + x + 1, (y+1)*w + x + 1, (y+1)*w + x] + triangles.append(triangle0) + triangles.append(triangle1) + triangles = np.array(triangles) + triangles = triangles[:,[0,2,1]] + return triangles + +# borrowed from https://github.com/daniilidis-group/neural_renderer/blob/master/neural_renderer/vertices_to_faces.py +def face_vertices(vertices, faces): + """ + :param vertices: [batch size, number of vertices, 3] + :param faces: [batch size, number of faces, 3] + :return: [batch size, number of faces, 3, 3] + """ + assert (vertices.ndimension() == 3) + assert (faces.ndimension() == 3) + assert (vertices.shape[0] == faces.shape[0]) + assert (vertices.shape[2] == 3) + assert (faces.shape[2] == 3) + + bs, nv = vertices.shape[:2] + bs, nf = faces.shape[:2] + device = vertices.device + faces = faces + (torch.arange(bs, dtype=torch.int32).to(device) * nv)[:, None, None] + vertices = vertices.reshape((bs * nv, 3)) + # pytorch only supports long and byte tensors for indexing + return vertices[faces.long()] + +def vertex_normals(vertices, faces): + """ + :param vertices: [batch size, number of vertices, 3] + :param faces: [batch size, number of faces, 3] + :return: [batch size, number of vertices, 3] + """ + assert (vertices.ndimension() == 3) + assert (faces.ndimension() == 3) + assert (vertices.shape[0] == faces.shape[0]) + assert (vertices.shape[2] == 3) + assert (faces.shape[2] == 3) + bs, nv = vertices.shape[:2] + bs, nf = faces.shape[:2] + device = vertices.device + normals = torch.zeros(bs * nv, 3).to(device) + + faces = faces + (torch.arange(bs, dtype=torch.int32).to(device) * nv)[:, None, None] # expanded faces + vertices_faces = vertices.reshape((bs * nv, 3))[faces.long()] + + faces = faces.reshape(-1, 3) + vertices_faces = vertices_faces.reshape(-1, 3, 3) + + normals.index_add_(0, faces[:, 1].long(), + torch.cross(vertices_faces[:, 2] - vertices_faces[:, 1], vertices_faces[:, 0] - vertices_faces[:, 1])) + normals.index_add_(0, faces[:, 2].long(), + torch.cross(vertices_faces[:, 0] - vertices_faces[:, 2], vertices_faces[:, 1] - vertices_faces[:, 2])) + normals.index_add_(0, faces[:, 0].long(), + torch.cross(vertices_faces[:, 1] - vertices_faces[:, 0], vertices_faces[:, 2] - vertices_faces[:, 0])) + + normals = F.normalize(normals, eps=1e-6, dim=1) + normals = normals.reshape((bs, nv, 3)) + # pytorch only supports long and byte tensors for indexing + return normals + +def batch_orth_proj(X, camera): + ''' orthgraphic projection + X: 3d vertices, [bz, n_point, 3] + camera: scale and translation, [bz, 3], [scale, tx, ty] + ''' + camera = camera.clone().view(-1, 1, 3) + X_trans = X[:, :, :2] + camera[:, :, 1:] + X_trans = torch.cat([X_trans, X[:,:,2:]], 2) + shape = X_trans.shape + Xn = (camera[:, :, 0:1] * X_trans) + return Xn + +# -------------------------------------- image processing +# borrowed from: https://torchgeometry.readthedocs.io/en/latest/_modules/kornia/filters +def gaussian(window_size, sigma): + def gauss_fcn(x): + return -(x - window_size // 2)**2 / float(2 * sigma**2) + gauss = torch.stack( + [torch.exp(torch.tensor(gauss_fcn(x))) for x in range(window_size)]) + return gauss / gauss.sum() + +def get_gaussian_kernel(kernel_size: int, sigma: float): + r"""Function that returns Gaussian filter coefficients. + + Args: + kernel_size (int): filter size. It should be odd and positive. + sigma (float): gaussian standard deviation. + + Returns: + Tensor: 1D tensor with gaussian filter coefficients. + + Shape: + - Output: :math:`(\text{kernel_size})` + + Examples:: + + >>> kornia.image.get_gaussian_kernel(3, 2.5) + tensor([0.3243, 0.3513, 0.3243]) + + >>> kornia.image.get_gaussian_kernel(5, 1.5) + tensor([0.1201, 0.2339, 0.2921, 0.2339, 0.1201]) + """ + if not isinstance(kernel_size, int) or kernel_size % 2 == 0 or \ + kernel_size <= 0: + raise TypeError("kernel_size must be an odd positive integer. " + "Got {}".format(kernel_size)) + window_1d = gaussian(kernel_size, sigma) + return window_1d + +def get_gaussian_kernel2d(kernel_size, sigma): + r"""Function that returns Gaussian filter matrix coefficients. + + Args: + kernel_size (Tuple[int, int]): filter sizes in the x and y direction. + Sizes should be odd and positive. + sigma (Tuple[int, int]): gaussian standard deviation in the x and y + direction. + + Returns: + Tensor: 2D tensor with gaussian filter matrix coefficients. + + Shape: + - Output: :math:`(\text{kernel_size}_x, \text{kernel_size}_y)` + + Examples:: + + >>> kornia.image.get_gaussian_kernel2d((3, 3), (1.5, 1.5)) + tensor([[0.0947, 0.1183, 0.0947], + [0.1183, 0.1478, 0.1183], + [0.0947, 0.1183, 0.0947]]) + + >>> kornia.image.get_gaussian_kernel2d((3, 5), (1.5, 1.5)) + tensor([[0.0370, 0.0720, 0.0899, 0.0720, 0.0370], + [0.0462, 0.0899, 0.1123, 0.0899, 0.0462], + [0.0370, 0.0720, 0.0899, 0.0720, 0.0370]]) + """ + if not isinstance(kernel_size, tuple) or len(kernel_size) != 2: + raise TypeError("kernel_size must be a tuple of length two. Got {}" + .format(kernel_size)) + if not isinstance(sigma, tuple) or len(sigma) != 2: + raise TypeError("sigma must be a tuple of length two. Got {}" + .format(sigma)) + ksize_x, ksize_y = kernel_size + sigma_x, sigma_y = sigma + kernel_x = get_gaussian_kernel(ksize_x, sigma_x) + kernel_y = get_gaussian_kernel(ksize_y, sigma_y) + kernel_2d = torch.matmul( + kernel_x.unsqueeze(-1), kernel_y.unsqueeze(-1).t()) + return kernel_2d + +def gaussian_blur(x, kernel_size=(3,3), sigma=(0.8,0.8)): + b, c, h, w = x.shape + kernel = get_gaussian_kernel2d(kernel_size, sigma).to(x.device).to(x.dtype) + kernel = kernel.repeat(c, 1, 1, 1) + padding = [(k - 1) // 2 for k in kernel_size] + return F.conv2d(x, kernel, padding=padding, stride=1, groups=c) + +def _compute_binary_kernel(window_size): + r"""Creates a binary kernel to extract the patches. If the window size + is HxW will create a (H*W)xHxW kernel. + """ + window_range = window_size[0] * window_size[1] + kernel: torch.Tensor = torch.zeros(window_range, window_range) + for i in range(window_range): + kernel[i, i] += 1.0 + return kernel.view(window_range, 1, window_size[0], window_size[1]) + +def median_blur(x, kernel_size=(3,3)): + b, c, h, w = x.shape + kernel = _compute_binary_kernel(kernel_size).to(x.device).to(x.dtype) + kernel = kernel.repeat(c, 1, 1, 1) + padding = [(k - 1) // 2 for k in kernel_size] + features = F.conv2d(x, kernel, padding=padding, stride=1, groups=c) + features = features.view(b,c,-1,h,w) + median = torch.median(features, dim=2)[0] + return median + +def get_laplacian_kernel2d(kernel_size: int): + r"""Function that returns Gaussian filter matrix coefficients. + + Args: + kernel_size (int): filter size should be odd. + + Returns: + Tensor: 2D tensor with laplacian filter matrix coefficients. + + Shape: + - Output: :math:`(\text{kernel_size}_x, \text{kernel_size}_y)` + + Examples:: + + >>> kornia.image.get_laplacian_kernel2d(3) + tensor([[ 1., 1., 1.], + [ 1., -8., 1.], + [ 1., 1., 1.]]) + + >>> kornia.image.get_laplacian_kernel2d(5) + tensor([[ 1., 1., 1., 1., 1.], + [ 1., 1., 1., 1., 1.], + [ 1., 1., -24., 1., 1.], + [ 1., 1., 1., 1., 1.], + [ 1., 1., 1., 1., 1.]]) + + """ + if not isinstance(kernel_size, int) or kernel_size % 2 == 0 or \ + kernel_size <= 0: + raise TypeError("ksize must be an odd positive integer. Got {}" + .format(kernel_size)) + + kernel = torch.ones((kernel_size, kernel_size)) + mid = kernel_size // 2 + kernel[mid, mid] = 1 - kernel_size ** 2 + kernel_2d: torch.Tensor = kernel + return kernel_2d + +def laplacian(x): + # https://torchgeometry.readthedocs.io/en/latest/_modules/kornia/filters/laplacian.html + b, c, h, w = x.shape + kernel_size = 3 + kernel = get_laplacian_kernel2d(kernel_size).to(x.device).to(x.dtype) + kernel = kernel.repeat(c, 1, 1, 1) + padding = (kernel_size - 1) // 2 + return F.conv2d(x, kernel, padding=padding, stride=1, groups=c) + +def angle2matrix(angles): + ''' get rotation matrix from three rotation angles(degree). right-handed. + Args: + angles: [batch_size, 3] tensor containing X, Y, and Z angles. + x: pitch. positive for looking down. + y: yaw. positive for looking left. + z: roll. positive for tilting head right. + Returns: + R: [batch_size, 3, 3]. rotation matrices. + ''' + angles = angles*(np.pi)/180. + s = torch.sin(angles) + c = torch.cos(angles) + + cx, cy, cz = (c[:, 0], c[:, 1], c[:, 2]) + sx, sy, sz = (s[:, 0], s[:, 1], s[:, 2]) + + zeros = torch.zeros_like(s[:, 0]).to(angles.device) + ones = torch.ones_like(s[:, 0]).to(angles.device) + + # Rz.dot(Ry.dot(Rx)) + R_flattened = torch.stack( + [ + cz * cy, cz * sy * sx - sz * cx, cz * sy * cx + sz * sx, + sz * cy, sz * sy * sx + cz * cx, sz * sy * cx - cz * sx, + -sy, cy * sx, cy * cx, + ], + dim=0) #[batch_size, 9] + R = torch.reshape(R_flattened, (-1, 3, 3)) #[batch_size, 3, 3] + return R + +def binary_erosion(tensor, kernel_size=5): + # tensor: [bz, 1, h, w]. + device = tensor.device + mask = tensor.cpu().numpy() + structure=np.ones((kernel_size,kernel_size)) + new_mask = mask.copy() + for i in range(mask.shape[0]): + new_mask[i,0] = morphology.binary_erosion(mask[i,0], structure) + return torch.from_numpy(new_mask.astype(np.float32)).to(device) + +def flip_image(src_image, kps): + ''' + purpose: + flip a image given by src_image and the 2d keypoints + flip_mode: + 0: horizontal flip + >0: vertical flip + <0: horizontal & vertical flip + ''' + h, w = src_image.shape[0], src_image.shape[1] + src_image = cv2.flip(src_image, 1) + if kps is not None: + kps[:, 0] = w - 1 - kps[:, 0] + kp_map = [5, 4, 3, 2, 1, 0, 11, 10, 9, 8, 7, 6, 12, 13] + kps[:, :] = kps[kp_map] + + return src_image, kps + +# -------------------------------------- io +def copy_state_dict(cur_state_dict, pre_state_dict, prefix='', load_name=None): + def _get_params(key): + key = prefix + key + if key in pre_state_dict: + return pre_state_dict[key] + return None + for k in cur_state_dict.keys(): + if load_name is not None: + if load_name not in k: + continue + v = _get_params(k) + try: + if v is None: + # print('parameter {} not found'.format(k)) + continue + cur_state_dict[k].copy_(v) + except: + # print('copy param {} failed'.format(k)) + continue + +def check_mkdir(path): + if not os.path.exists(path): + print('creating %s' % path) + os.makedirs(path) + +def check_mkdirlist(pathlist): + for path in pathlist: + if not os.path.exists(path): + print('creating %s' % path) + os.makedirs(path) + +def tensor2image(tensor): + image = tensor.detach().cpu().numpy() + image = image*255. + image = np.maximum(np.minimum(image, 255), 0) + image = image.transpose(1,2,0)[:,:,[2,1,0]] + return image.astype(np.uint8).copy() + +def dict2obj(d): + # if isinstance(d, list): + # d = [dict2obj(x) for x in d] + if not isinstance(d, dict): + return d + class C(object): + pass + o = C() + for k in d: + o.__dict__[k] = dict2obj(d[k]) + return o + +class Struct(object): + def __init__(self, **kwargs): + for key, val in kwargs.items(): + setattr(self, key, val) + +# original saved file with DataParallel +def remove_module(state_dict): +# create new OrderedDict that does not contain `module.` + new_state_dict = OrderedDict() + for k, v in state_dict.items(): + name = k[7:] # remove `module.` + new_state_dict[name] = v + return new_state_dict + +def dict_tensor2npy(tensor_dict): + npy_dict = {} + for key in tensor_dict: + npy_dict[key] = tensor_dict[key][0].cpu().numpy() + return npy_dict + +# ---------------------------------- visualization +end_list = np.array([17, 22, 27, 42, 48, 31, 36, 68], dtype = np.int32) - 1 +def plot_kpts(image, kpts, color = 'r'): + ''' Draw 68 key points + Args: + image: the input image + kpt: (68, 3). + ''' + if color == 'r': + c = (255, 0, 0) + elif color == 'g': + c = (0, 255, 0) + elif color == 'b': + c = (255, 0, 0) + image = image.copy() + kpts = kpts.copy() + + for i in range(kpts.shape[0]): + st = kpts[i, :2] + if kpts.shape[1]==4: + if kpts[i, 3] > 0.5: + c = (0, 255, 0) + else: + c = (0, 0, 255) + image = cv2.circle(image,(st[0], st[1]), 1, c, 2) + if i in end_list: + continue + ed = kpts[i + 1, :2] + image = cv2.line(image, (st[0], st[1]), (ed[0], ed[1]), (255, 255, 255), 1) + + return image + +def plot_verts(image, kpts, color = 'r'): + ''' Draw 68 key points + Args: + image: the input image + kpt: (68, 3). + ''' + if color == 'r': + c = (255, 0, 0) + elif color == 'g': + c = (0, 255, 0) + elif color == 'b': + c = (0, 0, 255) + elif color == 'y': + c = (0, 255, 255) + image = image.copy() + + for i in range(kpts.shape[0]): + st = kpts[i, :2] + image = cv2.circle(image,(st[0], st[1]), 1, c, 2) + + return image + +def tensor_vis_landmarks(images, landmarks, gt_landmarks=None, color = 'g', isScale=True): + # visualize landmarks + vis_landmarks = [] + images = images.cpu().numpy() + predicted_landmarks = landmarks.detach().cpu().numpy() + if gt_landmarks is not None: + gt_landmarks_np = gt_landmarks.detach().cpu().numpy() + for i in range(images.shape[0]): + image = images[i] + image = image.transpose(1,2,0)[:,:,[2,1,0]].copy(); image = (image*255) + if isScale: + predicted_landmark = predicted_landmarks[i]*image.shape[0]/2 + image.shape[0]/2 + else: + predicted_landmark = predicted_landmarks[i] + if predicted_landmark.shape[0] == 68: + image_landmarks = plot_kpts(image, predicted_landmark, color) + if gt_landmarks is not None: + image_landmarks = plot_verts(image_landmarks, gt_landmarks_np[i]*image.shape[0]/2 + image.shape[0]/2, 'r') + else: + image_landmarks = plot_verts(image, predicted_landmark, color) + if gt_landmarks is not None: + image_landmarks = plot_verts(image_landmarks, gt_landmarks_np[i]*image.shape[0]/2 + image.shape[0]/2, 'r') + vis_landmarks.append(image_landmarks) + + vis_landmarks = np.stack(vis_landmarks) + vis_landmarks = torch.from_numpy(vis_landmarks[:,:,:,[2,1,0]].transpose(0,3,1,2))/255.#, dtype=torch.float32) + return vis_landmarks \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/demos/__init__.py b/motion-gan-pipeline/preprocessing/third/DECA/demos/__init__.py new file mode 100755 index 0000000..e69de29 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/demos/demo_reconstruct.py b/motion-gan-pipeline/preprocessing/third/DECA/demos/demo_reconstruct.py new file mode 100755 index 0000000..16310f8 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/demos/demo_reconstruct.py @@ -0,0 +1,108 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import os, sys +import cv2 +import numpy as np +from time import time +from scipy.io import savemat +import argparse +from tqdm import tqdm + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) +from decalib.deca import DECA +from decalib.datasets import datasets +from decalib.utils import util +from decalib.utils.config import cfg as deca_cfg + + +def main(args): + savefolder = args.savefolder + device = args.device + os.makedirs(savefolder, exist_ok=True) + + # load test images + testdata = datasets.TestData(args.inputpath, iscrop=args.iscrop, face_detector=args.detector) + + # run DECA + deca_cfg.model.use_tex = args.useTex + deca = DECA(config = deca_cfg, device=device) + # for i in range(len(testdata)): + for i in tqdm(range(len(testdata))): + name = testdata[i]['imagename'] + images = testdata[i]['image'].to(device)[None,...] + codedict = deca.encode(images) + opdict, visdict = deca.decode(codedict) #tensor + + print(opdict.keys()) + print(visdict.keys()) + + if args.saveDepth or args.saveKpt or args.saveObj or args.saveMat or args.saveImages: + os.makedirs(os.path.join(savefolder, name), exist_ok=True) + # -- save results + if args.saveDepth: + depth_image = deca.render.render_depth(opdict['transformed_vertices']).repeat(1,3,1,1) + visdict['depth_images'] = depth_image + cv2.imwrite(os.path.join(savefolder, name, name + '_depth.jpg'), util.tensor2image(depth_image[0])) + if args.saveKpt: + np.savetxt(os.path.join(savefolder, name, name + '_kpt2d.txt'), opdict['landmarks2d'][0].cpu().numpy()) + np.savetxt(os.path.join(savefolder, name, name + '_kpt3d.txt'), opdict['landmarks3d'][0].cpu().numpy()) + if args.saveObj: + deca.save_obj(os.path.join(savefolder, name, name + '.obj'), opdict) + if args.saveMat: + opdict = util.dict_tensor2npy(opdict) + savemat(os.path.join(savefolder, name, name + '.mat'), opdict) + if args.saveVis: + cv2.imwrite(os.path.join(savefolder, name + '_vis.jpg'), deca.visualize(visdict)) + if args.saveImages: + for vis_name in ['inputs', 'rendered_images', 'albedo_images', 'shape_images', 'shape_detail_images']: + if vis_name not in visdict.keys(): + continue + image = util.tensor2image(visdict[vis_name][0]) + cv2.imwrite(os.path.join(savefolder, name, name + '_' + vis_name +'.jpg'), util.tensor2image(visdict[vis_name][0])) + print(f'-- please check the results in {savefolder}') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='DECA: Detailed Expression Capture and Animation') + + parser.add_argument('-i', '--inputpath', default='TestSamples/examples', type=str, + help='path to the test data, can be image folder, image path, image list, video') + parser.add_argument('-s', '--savefolder', default='TestSamples/examples/results', type=str, + help='path to the output directory, where results(obj, txt files) will be stored.') + parser.add_argument('--device', default='cuda', type=str, + help='set device, cpu for using cpu' ) + # process test images + parser.add_argument('--iscrop', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to crop input image, set false only when the test image are well cropped' ) + parser.add_argument('--detector', default='fan', type=str, + help='detector for cropping face, check decalib/detectors.py for details' ) + # save + parser.add_argument('--useTex', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to use FLAME texture model to generate uv texture map, \ + set it to True only if you downloaded texture model' ) + parser.add_argument('--saveVis', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save visualization of output' ) + parser.add_argument('--saveKpt', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save 2D and 3D keypoints' ) + parser.add_argument('--saveDepth', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save depth image' ) + parser.add_argument('--saveObj', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save outputs as .obj, detail mesh will end with _detail.obj. \ + Note that saving objs could be slow' ) + parser.add_argument('--saveMat', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save outputs as .mat' ) + parser.add_argument('--saveImages', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save visualization output as seperate images' ) + main(parser.parse_args()) \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/demos/demo_teaser.py b/motion-gan-pipeline/preprocessing/third/DECA/demos/demo_teaser.py new file mode 100755 index 0000000..bc0e630 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/demos/demo_teaser.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import os, sys +import cv2 +import numpy as np +from time import time +from scipy.io import savemat +import argparse +import imageio +from skimage.transform import rescale +import torch + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) +from decalib.deca import DECA +from decalib.datasets import datasets +from decalib.utils import util +from decalib.utils.rotation_converter import batch_euler2axis, deg2rad + +def main(args): + savefolder = args.savefolder + device = args.device + os.makedirs(savefolder, exist_ok=True) + + # load test images + testdata = datasets.TestData(args.inputpath, iscrop=args.iscrop, face_detector=args.detector) + expdata = datasets.TestData(args.exp_path, iscrop=args.iscrop, face_detector=args.detector) + # DECA + deca = DECA(device=device) + + visdict_list_list = [] + for i in range(len(testdata)): + name = testdata[i]['imagename'] + images = testdata[i]['image'].to(device)[None,...] + codedict = deca.encode(images) + opdict, visdict = deca.decode(codedict) #tensor + ### show shape with different views and expressions + visdict_list = [] + max_yaw = 30 + yaw_list = list(range(0,max_yaw,5)) + list(range(max_yaw,-max_yaw,-5)) + list(range(-max_yaw,0,5)) + for k in yaw_list: #jaw angle from -50 to 50 + ## yaw angle + euler_pose = torch.randn((1, 3)) + euler_pose[:,1] = k#torch.rand((self.batch_size))*160 - 80 + euler_pose[:,0] = 0#(torch.rand((self.batch_size))*60 - 30)*(2./euler_pose[:,1].abs()) + euler_pose[:,2] = 0#(torch.rand((self.batch_size))*60 - 30)*(2./euler_pose[:,1].abs()) + global_pose = batch_euler2axis(deg2rad(euler_pose[:,:3].cuda())) + codedict['pose'][:,:3] = global_pose + codedict['cam'][:,:] = 0. + codedict['cam'][:,0] = 8 + _, visdict_view = deca.decode(codedict) + visdict = {x:visdict[x] for x in ['inputs', 'shape_detail_images']} + visdict['pose'] = visdict_view['shape_detail_images'] + visdict_list.append(visdict) + + euler_pose = torch.zeros((1, 3)) + global_pose = batch_euler2axis(deg2rad(euler_pose[:,:3].cuda())) + codedict['pose'][:,:3] = global_pose + for (i,k) in enumerate(range(0,31,2)): #jaw angle from -50 to 50 + # expression: jaw pose + euler_pose = torch.randn((1, 3)) + euler_pose[:,0] = k#torch.rand((self.batch_size))*160 - 80 + euler_pose[:,1] = 0#(torch.rand((self.batch_size))*60 - 30)*(2./euler_pose[:,1].abs()) + euler_pose[:,2] = 0#(torch.rand((self.batch_size))*60 - 30)*(2./euler_pose[:,1].abs()) + jaw_pose = batch_euler2axis(deg2rad(euler_pose[:,:3].cuda())) + codedict['pose'][:,3:] = jaw_pose + _, visdict_view = deca.decode(codedict) + visdict_list[i]['exp'] = visdict_view['shape_detail_images'] + count = i + + for (i,k) in enumerate(range(len(expdata))): #jaw angle from -50 to 50 + # expression: jaw pose + exp_images = expdata[i]['image'].to(device)[None,...] + exp_codedict = deca.encode(exp_images) + # transfer exp code + codedict['pose'][:,3:] = exp_codedict['pose'][:,3:] + codedict['exp'] = exp_codedict['exp'] + _, exp_visdict = deca.decode(codedict) + visdict_list[i+count]['exp'] = exp_visdict['shape_detail_images'] + + visdict_list_list.append(visdict_list) + + ### write gif + writer = imageio.get_writer(os.path.join(savefolder, 'teaser.gif'), mode='I') + for i in range(len(yaw_list)): + grid_image_list = [] + for j in range(len(testdata)): + grid_image = deca.visualize(visdict_list_list[j][i]) + grid_image_list.append(grid_image) + grid_image_all = np.concatenate(grid_image_list, 0) + grid_image_all = rescale(grid_image_all, 0.6, multichannel=True) # resize for showing in github + writer.append_data(grid_image_all[:,:,[2,1,0]]) + + print(f'-- please check the teaser figure in {savefolder}') + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='DECA: Detailed Expression Capture and Animation') + + parser.add_argument('-i', '--inputpath', default='TestSamples/teaser', type=str, + help='path to the test data, can be image folder, image path, image list, video') + parser.add_argument('-e', '--exp_path', default='TestSamples/exp', type=str, + help='path to expression') + parser.add_argument('-s', '--savefolder', default='TestSamples/teaser/results', type=str, + help='path to the output directory, where results(obj, txt files) will be stored.') + parser.add_argument('--device', default='cuda', type=str, + help='set device, cpu for using cpu' ) + # process test images + parser.add_argument('--iscrop', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to crop input image, set false only when the test image are well cropped' ) + parser.add_argument('--detector', default='fan', type=str, + help='detector for cropping face, check detectos.py for details' ) + + main(parser.parse_args()) \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/demos/demo_transfer.py b/motion-gan-pipeline/preprocessing/third/DECA/demos/demo_transfer.py new file mode 100755 index 0000000..4ea3037 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/demos/demo_transfer.py @@ -0,0 +1,126 @@ +# -*- coding: utf-8 -*- +# +# Max-Planck-Gesellschaft zur Förderung der Wissenschaften e.V. (MPG) is +# holder of all proprietary rights on this computer program. +# Using this computer program means that you agree to the terms +# in the LICENSE file included with this software distribution. +# Any use not explicitly granted by the LICENSE is prohibited. +# +# Copyright©2019 Max-Planck-Gesellschaft zur Förderung +# der Wissenschaften e.V. (MPG). acting on behalf of its Max Planck Institute +# for Intelligent Systems. All rights reserved. +# +# For comments or questions, please email us at deca@tue.mpg.de +# For commercial licensing contact, please contact ps-license@tuebingen.mpg.de + +import os, sys +import cv2 +import numpy as np +from time import time +import argparse + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) +from decalib.deca import DECA +from decalib.datasets import datasets +from decalib.utils import util +from decalib.utils.config import cfg as deca_cfg + +def main(args): + savefolder = args.savefolder + device = args.device + os.makedirs(savefolder, exist_ok=True) + + # load test images + testdata = datasets.TestData(args.image_path, iscrop=args.iscrop, face_detector=args.detector) + expdata = datasets.TestData(args.exp_path, iscrop=args.iscrop, face_detector=args.detector) + + # run DECA + deca_cfg.model.use_tex = args.useTex + deca = DECA(config = deca_cfg, device=device) + # identity reference + i = 0 + name = testdata[i]['imagename'] + savepath = '{}/{}.jpg'.format(savefolder, name) + images = testdata[i]['image'].to(device)[None,...] + id_codedict = deca.encode(images) + id_opdict, id_visdict = deca.decode(id_codedict) + id_visdict = {x:id_visdict[x] for x in ['inputs', 'shape_detail_images']} + + # -- expression transfer + # exp code from image + exp_images = expdata[i]['image'].to(device)[None,...] + exp_codedict = deca.encode(exp_images) + # transfer exp code + id_codedict['pose'][:,3:] = exp_codedict['pose'][:,3:] + id_codedict['exp'] = exp_codedict['exp'] + transfer_opdict, transfer_visdict = deca.decode(id_codedict) + id_visdict['transferred_shape'] = transfer_visdict['shape_detail_images'] + cv2.imwrite(os.path.join(savefolder, name + '_animation.jpg'), deca.visualize(id_visdict)) + + transfer_opdict['uv_texture_gt'] = id_opdict['uv_texture_gt'] + if args.saveDepth or args.saveKpt or args.saveObj or args.saveMat or args.saveImages: + os.makedirs(os.path.join(savefolder, name, 'reconstruction'), exist_ok=True) + os.makedirs(os.path.join(savefolder, name, 'animation'), exist_ok=True) + + # -- save results + image_name = name + for save_type in ['reconstruction', 'animation']: + if save_type == 'reconstruction': + visdict = id_codedict; opdict = id_opdict + else: + visdict = transfer_visdict; opdict = transfer_opdict + if args.saveDepth: + depth_image = deca.render.render_depth(opdict['transformed_vertices']).repeat(1,3,1,1) + visdict['depth_images'] = depth_image + cv2.imwrite(os.path.join(savefolder, name, save_type, name + '_depth.jpg'), util.tensor2image(depth_image[0])) + if args.saveKpt: + np.savetxt(os.path.join(savefolder, name, save_type, name + '_kpt2d.txt'), opdict['landmarks2d'][0].cpu().numpy()) + np.savetxt(os.path.join(savefolder, name, save_type, name + '_kpt3d.txt'), opdict['landmarks3d'][0].cpu().numpy()) + if args.saveObj: + deca.save_obj(os.path.join(savefolder, name, save_type, name + '.obj'), opdict) + if args.saveMat: + opdict = util.dict_tensor2npy(opdict) + savemat(os.path.join(savefolder, name, save_type, name + '.mat'), opdict) + if args.saveImages: + for vis_name in ['inputs', 'rendered_images', 'albedo_images', 'shape_images', 'shape_detail_images']: + if vis_name not in visdict.keys(): + continue + image =util.tensor2image(visdict[vis_name][0]) + cv2.imwrite(os.path.join(savefolder, name, save_type, name + '_' + vis_name +'.jpg'), util.tensor2image(visdict[vis_name][0])) + print(f'-- please check the results in {savefolder}') + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='DECA: Detailed Expression Capture and Animation') + + parser.add_argument('-i', '--image_path', default='TestSamples/examples/IMG_0392_inputs.jpg', type=str, + help='path to input image') + parser.add_argument('-e', '--exp_path', default='TestSamples/exp/7.jpg', type=str, + help='path to expression') + parser.add_argument('-s', '--savefolder', default='TestSamples/animation_results', type=str, + help='path to the output directory, where results(obj, txt files) will be stored.') + parser.add_argument('--device', default='cuda', type=str, + help='set device, cpu for using cpu' ) + # process test images + parser.add_argument('--iscrop', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to crop input image, set false only when the test image are well cropped' ) + parser.add_argument('--detector', default='fan', type=str, + help='detector for cropping face, check detectos.py for details' ) + # save + parser.add_argument('--useTex', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to use FLAME texture model to generate uv texture map, \ + set it to True only if you downloaded texture model' ) + parser.add_argument('--saveVis', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save visualization of output' ) + parser.add_argument('--saveKpt', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save 2D and 3D keypoints' ) + parser.add_argument('--saveDepth', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save depth image' ) + parser.add_argument('--saveObj', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save outputs as .obj' ) + parser.add_argument('--saveMat', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save outputs as .mat' ) + parser.add_argument('--saveImages', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save visualization output as seperate images' ) + main(parser.parse_args()) + + main(parser.parse_args()) diff --git a/motion-gan-pipeline/preprocessing/third/DECA/demos/run_on_img.py b/motion-gan-pipeline/preprocessing/third/DECA/demos/run_on_img.py new file mode 100644 index 0000000..d75969a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/demos/run_on_img.py @@ -0,0 +1,126 @@ + +import os, sys +import cv2 +import numpy as np +from time import time +from scipy.io import savemat +import argparse +from tqdm import tqdm +import matplotlib.pyplot as plt +from skimage.transform import estimate_transform, warp, resize, rescale + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) +from decalib.deca import DECA +from decalib.datasets import datasets +from decalib.utils import util +from decalib.utils.config import cfg as deca_cfg + + +def warp_back(image, oldimage, tform): + + alpha = 0.6 + + oldimage = oldimage.astype(np.float64) /255. + new_size = oldimage.shape + + dst_image = warp(image, tform, output_shape=new_size) + + # Mask of non-black pixels. + mask = np.where(np.all(dst_image == [0, 0, 0], axis=-1)) + dst_image[mask] = oldimage[mask] + + res = cv2.addWeighted(oldimage, 1 - alpha, dst_image, alpha, 0) + res = res[:, :, ::-1] + + return res + + +def main(args): + savefolder = args.savefolder + device = args.device + os.makedirs(savefolder, exist_ok=True) + + # load test images + testdata = datasets.TestData(args.inputpath, iscrop=args.iscrop, face_detector=args.detector) + + # run DECA + deca_cfg.model.use_tex = args.useTex + deca = DECA(config=deca_cfg, device=device) + # for i in range(len(testdata)): + for i in tqdm(range(len(testdata))): + + name = testdata[i]['imagename'] + images = testdata[i]['image'].to(device)[None, ...] + original_images = testdata[i]['original_image'].to(device)[None, ...] + + codedict = deca.encode(images) + opdict, visdict = deca.decode(codedict) # tensor + + images = util.tensor2image(images[0]) + original_images = util.tensor2image(original_images[0]) + image = util.tensor2image(visdict['shape_detail_images'][0]) + + # plt.imshow(image) + # plt.show() + + new = warp_back(image, original_images, testdata[i]['tform']) + plt.imshow(new) + plt.show() + + break + + + + + # if args.saveDepth or args.saveKpt or args.saveObj or args.saveMat or args.saveImages: + # os.makedirs(os.path.join(savefolder, name), exist_ok=True) + # # -- save results + # if args.saveObj: + # deca.save_obj(os.path.join(savefolder, name, name + '.obj'), opdict) + # if args.saveMat: + # opdict = util.dict_tensor2npy(opdict) + # savemat(os.path.join(savefolder, name, name + '.mat'), opdict) + # if args.saveVis: + # cv2.imwrite(os.path.join(savefolder, name + '_vis.jpg'), deca.visualize(visdict)) + # if args.saveImages: + # for vis_name in ['inputs', 'rendered_images', 'albedo_images', 'shape_images', 'shape_detail_images']: + # if vis_name not in visdict.keys(): + # continue + # image = util.tensor2image(visdict[vis_name][0]) + # cv2.imwrite(os.path.join(savefolder, name, name + '_' + vis_name + '.jpg'), + # util.tensor2image(visdict[vis_name][0])) + # print(f'-- please check the results in {savefolder}') + + +if __name__ == '__main__': + parser = argparse.ArgumentParser(description='DECA: Detailed Expression Capture and Animation') + + parser.add_argument('-i', '--inputpath', default='TestSamples/SRF', type=str, + help='path to the test data, can be image folder, image path, image list, video') + parser.add_argument('-s', '--savefolder', default='TestSamples/SRF/results', type=str, + help='path to the output directory, where results(obj, txt files) will be stored.') + parser.add_argument('--device', default='cuda', type=str, + help='set device, cpu for using cpu') + # process test images + parser.add_argument('--iscrop', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to crop input image, set false only when the test image are well cropped') + parser.add_argument('--detector', default='fan', type=str, + help='detector for cropping face, check decalib/detectors.py for details') + # save + parser.add_argument('--useTex', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to use FLAME texture model to generate uv texture map, \ + set it to True only if you downloaded texture model') + parser.add_argument('--saveVis', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save visualization of output') + parser.add_argument('--saveKpt', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save 2D and 3D keypoints') + parser.add_argument('--saveDepth', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save depth image') + parser.add_argument('--saveObj', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save outputs as .obj, detail mesh will end with _detail.obj. \ + Note that saving objs could be slow') + parser.add_argument('--saveMat', default=False, type=lambda x: x.lower() in ['true', '1'], + help='whether to save outputs as .mat') + parser.add_argument('--saveImages', default=True, type=lambda x: x.lower() in ['true', '1'], + help='whether to save visualization output as seperate images') + main(parser.parse_args()) \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/install_conda.sh b/motion-gan-pipeline/preprocessing/third/DECA/install_conda.sh new file mode 100755 index 0000000..76c91f9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/install_conda.sh @@ -0,0 +1,10 @@ +#!/usr/bin/env bash + +export CONDA_ENV_NAME=deca-env +echo $CONDA_ENV_NAME + +conda create -n $CONDA_ENV_NAME python=3.7 + +eval "$(conda shell.bash hook)" +conda activate $CONDA_ENV_NAME +pip install -r requirements.txt \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/install_pip.sh b/motion-gan-pipeline/preprocessing/third/DECA/install_pip.sh new file mode 100755 index 0000000..7346c85 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/install_pip.sh @@ -0,0 +1,8 @@ +#!/usr/bin/env bash + +echo "Creating virtual environment" +python3.7 -m venv deca-env +echo "Activating virtual environment" + +source $PWD/deca-env/bin/activate +$PWD/deca-env/bin/pip install -r requirements.txt \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/rendered_image_gaze.jpg b/motion-gan-pipeline/preprocessing/third/DECA/rendered_image_gaze.jpg new file mode 100644 index 0000000..1af4837 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/rendered_image_gaze.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:84dbf5d84fc14f56baad37ed3a607f30110d79a8170ffb807273552b2fb2dfe2 +size 6204 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/rendered_image_og.jpg b/motion-gan-pipeline/preprocessing/third/DECA/rendered_image_og.jpg new file mode 100644 index 0000000..7451f00 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/rendered_image_og.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3cfad6a31c2a684887fc17092bca6037a398bddd25e672cffb2bd754284fc47f +size 6176 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/requirements.txt b/motion-gan-pipeline/preprocessing/third/DECA/requirements.txt new file mode 100644 index 0000000..e931eb7 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/requirements.txt @@ -0,0 +1,13 @@ +#to install all this requirements +#`pip install -r requirements.txt` +numpy>=1.18.5 +scipy>=1.4.1 +chumpy>=0.69 +scikit-image>=0.15 +opencv-python>=4.1.1 +scikit-image>=0.15 #skimage +PyYAML>=5.1.1 +torch==1.6.0 # for compatible with pytorch3d +torchvision==0.7.0 +face-alignment +# pytorch3d \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/shape.jpg b/motion-gan-pipeline/preprocessing/third/DECA/shape.jpg new file mode 100644 index 0000000..8ddc9f8 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/shape.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d8a4c504331e5c6c93d3b62d79e30c8c2daf4821b49f5adc5f8dc41e339d3899 +size 5138 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/test_expression.jpg b/motion-gan-pipeline/preprocessing/third/DECA/test_expression.jpg new file mode 100644 index 0000000..99d17c8 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/test_expression.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:584255a499aa5bfa4a3511e98acb63af703b2bb2524ffb1d3aa8b11a9f99e547 +size 142515 diff --git a/motion-gan-pipeline/preprocessing/third/DECA/test_eye_gazing.py b/motion-gan-pipeline/preprocessing/third/DECA/test_eye_gazing.py new file mode 100644 index 0000000..4efacc3 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/test_eye_gazing.py @@ -0,0 +1,67 @@ +import sys +import os +from tqdm import tqdm +import matplotlib.pyplot as plt +import numpy as np +from PIL import Image +import torch.nn.functional as F + + +sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '../third/DECA/'))) +from decalib.deca import DECA +from decalib.datasets import datasets +from decalib.utils import util +from decalib.utils.config import cfg as deca_cfg + +def save_img(name, image): + to_save = image[0].permute(1, 2, 0).cpu().numpy() + to_save = to_save * 255. + to_save = to_save.astype(np.uint8) + filename = name + '.jpg' + img = Image.fromarray(to_save) + img.save(filename) + +deca_cfg.model.use_tex = False +device = 'cuda' +deca = DECA(config=deca_cfg, device=device) + +# load test images +testdata = datasets.TestData('test_expression.jpg', iscrop=True, face_detector='fan') +images = testdata[0]['image'].to(device)[None, ...] +codedict = deca.encode(images) +expr = codedict['exp'] + +# load test images +testdata = datasets.TestData('test_gaze.jpg', iscrop=True, face_detector='fan') +images = testdata[0]['image'].to(device)[None, ...] + +codedict = deca.encode(images) +opdict, visdict = deca.decode(codedict) + +save_img('shape', visdict['shape_images']) + +opdict, visdict = deca.decode(codedict) +verts_og = opdict['vertices'] +gird_og = opdict['grid'] +texture_og = opdict['uv_texture_gt'] +image = F.grid_sample(texture_og, gird_og, align_corners=False) +save_img('rendered_image_og', image) + + +# codedict['exp'] = expr +opdict, visdict = deca.decode_eyes(codedict) +verts_gaze = opdict['vertices'] +gird_gaze = opdict['grid'] +texture_gaze = opdict['uv_texture_gt'] +image = F.grid_sample(texture_og, gird_gaze, align_corners=False) +save_img('rendered_image_gaze', image) + + +euclidena_dist = sum(((verts_og - verts_gaze)**2).flatten()) +print('Mesh distance: ', euclidena_dist) + +gird_dist = sum(((gird_og - gird_gaze)**2).flatten()) +print('Grid distance: ', gird_dist) + +text_dist = sum(((texture_og - texture_gaze)**2).flatten()) +print('Texture distance: ', text_dist) \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/DECA/test_gaze.jpg b/motion-gan-pipeline/preprocessing/third/DECA/test_gaze.jpg new file mode 100644 index 0000000..a324dd6 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DECA/test_gaze.jpg @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:181ff4fe3433b81cf338e31b9ec7b6ab603fc57dd4bc1c46d9f909dbcf85c33e +size 121963 diff --git a/motion-gan-pipeline/preprocessing/third/DeepSpeech/models/output_graph.pb b/motion-gan-pipeline/preprocessing/third/DeepSpeech/models/output_graph.pb new file mode 100644 index 0000000..da61e4e --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/DeepSpeech/models/output_graph.pb @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:309a4c24f861edca2a027e3d9ecd5ae34468d8b20ed4d9be694858e0bd6f0640 +size 490979273 diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/LICENSE b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/LICENSE new file mode 100644 index 0000000..e72bfdd --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/LICENSE @@ -0,0 +1,674 @@ + GNU GENERAL PUBLIC LICENSE + Version 3, 29 June 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU General Public License is a free, copyleft license for +software and other kinds of works. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +the GNU General Public License is intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. We, the Free Software Foundation, use the +GNU General Public License for most of our software; it applies also to +any other work released this way by its authors. You can apply it to +your programs, too. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + To protect your rights, we need to prevent others from denying you +these rights or asking you to surrender the rights. Therefore, you have +certain responsibilities if you distribute copies of the software, or if +you modify it: responsibilities to respect the freedom of others. + + For example, if you distribute copies of such a program, whether +gratis or for a fee, you must pass on to the recipients the same +freedoms that you received. You must make sure that they, too, receive +or can get the source code. And you must show them these terms so they +know their rights. + + Developers that use the GNU GPL protect your rights with two steps: +(1) assert copyright on the software, and (2) offer you this License +giving you legal permission to copy, distribute and/or modify it. + + For the developers' and authors' protection, the GPL clearly explains +that there is no warranty for this free software. For both users' and +authors' sake, the GPL requires that modified versions be marked as +changed, so that their problems will not be attributed erroneously to +authors of previous versions. + + Some devices are designed to deny users access to install or run +modified versions of the software inside them, although the manufacturer +can do so. This is fundamentally incompatible with the aim of +protecting users' freedom to change the software. The systematic +pattern of such abuse occurs in the area of products for individuals to +use, which is precisely where it is most unacceptable. Therefore, we +have designed this version of the GPL to prohibit the practice for those +products. If such problems arise substantially in other domains, we +stand ready to extend this provision to those domains in future versions +of the GPL, as needed to protect the freedom of users. + + Finally, every program is threatened constantly by software patents. +States should not allow patents to restrict development and use of +software on general-purpose computers, but in those that do, we wish to +avoid the special danger that patents applied to a free program could +make it effectively proprietary. To prevent this, the GPL assures that +patents cannot be used to render the program non-free. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Use with the GNU Affero General Public License. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU Affero General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the special requirements of the GNU Affero General Public License, +section 13, concerning interaction through a network will apply to the +combination as such. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU General Public License from time to time. Such new versions will +be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU General Public License as published by + the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU General Public License for more details. + + You should have received a copy of the GNU General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If the program does terminal interaction, make it output a short +notice like this when it starts in an interactive mode: + + Copyright (C) + This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. + This is free software, and you are welcome to redistribute it + under certain conditions; type `show c' for details. + +The hypothetical commands `show w' and `show c' should show the appropriate +parts of the General Public License. Of course, your program's commands +might be different; for a GUI interface, you would use an "about box". + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU GPL, see +. + + The GNU General Public License does not permit incorporating your program +into proprietary programs. If your program is a subroutine library, you +may consider it more useful to permit linking proprietary applications with +the library. If this is what you want to do, use the GNU Lesser General +Public License instead of this License. But first, please read +. \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/README.md b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/README.md new file mode 100644 index 0000000..262d439 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/README.md @@ -0,0 +1,235 @@ +# Robust Video Matting (RVM) + +![Teaser](/documentation/image/teaser.gif) + +

English | 中文

+ +Official repository for the paper [Robust High-Resolution Video Matting with Temporal Guidance](https://peterl1n.github.io/RobustVideoMatting/). RVM is specifically designed for robust human video matting. Unlike existing neural models that process frames as independent images, RVM uses a recurrent neural network to process videos with temporal memory. RVM can perform matting in real-time on any videos without additional inputs. It achieves **4K 76FPS** and **HD 104FPS** on an Nvidia GTX 1080 Ti GPU. The project was developed at [ByteDance Inc.](https://www.bytedance.com/) + +
+ +## News + +* [Sep 16 2021] Code is re-released under GPL-3.0 license. +* [Aug 25 2021] Source code and pretrained models are published. +* [Jul 27 2021] Paper is accepted by WACV 2022. + +
+ +## Showreel +Watch the showreel video ([YouTube](https://youtu.be/Jvzltozpbpk), [Bilibili](https://www.bilibili.com/video/BV1Z3411B7g7/)) to see the model's performance. + +

+ + + +

+ +All footage in the video are available in [Google Drive](https://drive.google.com/drive/folders/1VFnWwuu-YXDKG-N6vcjK_nL7YZMFapMU?usp=sharing) and [Baidu Pan](https://pan.baidu.com/s/1igMteDwN5rO1Sn7YIhBlvQ) (code: tb3w). + +
+ + +## Demo +* [Webcam Demo](https://peterl1n.github.io/RobustVideoMatting/#/demo): Run the model live in your browser. Visualize recurrent states. +* [Colab Demo](https://colab.research.google.com/drive/10z-pNKRnVNsp0Lq9tH1J_XPZ7CBC_uHm?usp=sharing): Test our model on your own videos with free GPU. + +
+ +## Download + +We recommend MobileNetv3 models for most use cases. ResNet50 models are the larger variant with small performance improvements. Our model is available on various inference frameworks. See [inference documentation](documentation/inference.md) for more instructions. + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
FrameworkDownloadNotes
PyTorch + rvm_mobilenetv3.pth
+ rvm_resnet50.pth +
+ Official weights for PyTorch. Doc +
TorchHub + Nothing to Download. + + Easiest way to use our model in your PyTorch project. Doc +
TorchScript + rvm_mobilenetv3_fp32.torchscript
+ rvm_mobilenetv3_fp16.torchscript
+ rvm_resnet50_fp32.torchscript
+ rvm_resnet50_fp16.torchscript +
+ If inference on mobile, consider export int8 quantized models yourself. Doc +
ONNX + rvm_mobilenetv3_fp32.onnx
+ rvm_mobilenetv3_fp16.onnx
+ rvm_resnet50_fp32.onnx
+ rvm_resnet50_fp16.onnx +
+ Tested on ONNX Runtime with CPU and CUDA backends. Provided models use opset 12. Doc, Exporter. +
TensorFlow + rvm_mobilenetv3_tf.zip
+ rvm_resnet50_tf.zip +
+ TensorFlow 2 SavedModel. Doc +
TensorFlow.js + rvm_mobilenetv3_tfjs_int8.zip
+
+ Run the model on the web. Demo, Starter Code +
CoreML + rvm_mobilenetv3_1280x720_s0.375_fp16.mlmodel
+ rvm_mobilenetv3_1280x720_s0.375_int8.mlmodel
+ rvm_mobilenetv3_1920x1080_s0.25_fp16.mlmodel
+ rvm_mobilenetv3_1920x1080_s0.25_int8.mlmodel
+
+ CoreML does not support dynamic resolution. Other resolutions can be exported yourself. Models require iOS 13+. s denotes downsample_ratio. Doc, Exporter +
+ +All models are available in [Google Drive](https://drive.google.com/drive/folders/1pBsG-SCTatv-95SnEuxmnvvlRx208VKj?usp=sharing) and [Baidu Pan](https://pan.baidu.com/s/1puPSxQqgBFOVpW4W7AolkA) (code: gym7). + +
+ +## PyTorch Example + +1. Install dependencies: +```sh +pip install -r requirements_inference.txt +``` + +2. Load the model: + +```python +import torch +from model import MattingNetwork + +model = MattingNetwork('mobilenetv3').eval().cuda() # or "resnet50" +model.load_state_dict(torch.load('rvm_mobilenetv3.pth')) +``` + +3. To convert videos, we provide a simple conversion API: + +```python +from inference import convert_video + +convert_video( + model, # The model, can be on any device (cpu or cuda). + input_source='input.mp4', # A video file or an image sequence directory. + output_type='video', # Choose "video" or "png_sequence" + output_composition='output.mp4', # File path if video; directory path if png sequence. + output_video_mbps=4, # Output video mbps. Not needed for png sequence. + downsample_ratio=None, # A hyperparameter to adjust or use None for auto. + seq_chunk=12, # Process n frames at once for better parallelism. +) +``` + +4. Or write your own inference code: +```python +from torch.utils.data import DataLoader +from torchvision.transforms import ToTensor +from inference_utils import VideoReader, VideoWriter + +reader = VideoReader('input.mp4', transform=ToTensor()) +writer = VideoWriter('output.mp4', frame_rate=30) + +bgr = torch.tensor([.47, 1, .6]).view(3, 1, 1).cuda() # Green background. +rec = [None] * 4 # Initial recurrent states. +downsample_ratio = 0.25 # Adjust based on your video. + +with torch.no_grad(): + for src in DataLoader(reader): # RGB tensor normalized to 0 ~ 1. + fgr, pha, *rec = model(src.cuda(), *rec, downsample_ratio) # Cycle the recurrent states. + com = fgr * pha + bgr * (1 - pha) # Composite to green background. + writer.write(com) # Write frame. +``` + +5. The models and converter API are also available through TorchHub. + +```python +# Load the model. +model = torch.hub.load("PeterL1n/RobustVideoMatting", "mobilenetv3") # or "resnet50" + +# Converter API. +convert_video = torch.hub.load("PeterL1n/RobustVideoMatting", "converter") +``` + +Please see [inference documentation](documentation/inference.md) for details on `downsample_ratio` hyperparameter, more converter arguments, and more advanced usage. + +
+ +## Training and Evaluation + +Please refer to the [training documentation](documentation/training.md) to train and evaluate your own model. + +
+ +## Speed + +Speed is measured with `inference_speed_test.py` for reference. + +| GPU | dType | HD (1920x1080) | 4K (3840x2160) | +| -------------- | ----- | -------------- |----------------| +| RTX 3090 | FP16 | 172 FPS | 154 FPS | +| RTX 2060 Super | FP16 | 134 FPS | 108 FPS | +| GTX 1080 Ti | FP32 | 104 FPS | 74 FPS | + +* Note 1: HD uses `downsample_ratio=0.25`, 4K uses `downsample_ratio=0.125`. All tests use batch size 1 and frame chunk 1. +* Note 2: GPUs before Turing architecture does not support FP16 inference, so GTX 1080 Ti uses FP32. +* Note 3: We only measure tensor throughput. The provided video conversion script in this repo is expected to be much slower, because it does not utilize hardware video encoding/decoding and does not have the tensor transfer done on parallel threads. If you are interested in implementing hardware video encoding/decoding in Python, please refer to [PyNvCodec](https://github.com/NVIDIA/VideoProcessingFramework). + +
+ +## Project Members +* [Shanchuan Lin](https://www.linkedin.com/in/shanchuanlin/) +* [Linjie Yang](https://sites.google.com/site/linjieyang89/) +* [Imran Saleemi](https://www.linkedin.com/in/imran-saleemi/) +* [Soumyadip Sengupta](https://homes.cs.washington.edu/~soumya91/) + +
+ +## Third-Party Projects + +* [NCNN C++ Android](https://github.com/FeiGeChuanShu/ncnn_Android_RobustVideoMatting) ([@FeiGeChuanShu](https://github.com/FeiGeChuanShu)) +* [lite.ai.toolkit](https://github.com/DefTruth/RobustVideoMatting.lite.ai.toolkit) ([@DefTruth](https://github.com/DefTruth)) +* [Gradio Web Demo](https://huggingface.co/spaces/akhaliq/Robust-Video-Matting) ([@AK391](https://github.com/AK391)) +* [Unity Engine demo with NatML](https://hub.natml.ai/@natsuite/robust-video-matting) ([@natsuite](https://github.com/natsuite)) \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/README_zh_Hans.md b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/README_zh_Hans.md new file mode 100644 index 0000000..f6a8886 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/README_zh_Hans.md @@ -0,0 +1,234 @@ +# 稳定视频抠像 (RVM) + +![Teaser](/documentation/image/teaser.gif) + +

English | 中文

+ +论文 [Robust High-Resolution Video Matting with Temporal Guidance](https://peterl1n.github.io/RobustVideoMatting/) 的官方 GitHub 库。RVM 专为稳定人物视频抠像设计。不同于现有神经网络将每一帧作为单独图片处理,RVM 使用循环神经网络,在处理视频流时有时间记忆。RVM 可在任意视频上做实时高清抠像。在 Nvidia GTX 1080Ti 上实现 **4K 76FPS** 和 **HD 104FPS**。此研究项目来自[字节跳动](https://www.bytedance.com/)。 + +
+ +## 更新 + +* [2021年9月16日] 代码重新以 GPL-3.0 许可发布。 +* [2021年8月25日] 公开代码和模型。 +* [2021年7月27日] 论文被 WACV 2022 收录。 + +
+ +## 展示视频 +观看展示视频 ([YouTube](https://youtu.be/Jvzltozpbpk), [Bilibili](https://www.bilibili.com/video/BV1Z3411B7g7/)),了解模型能力。 +

+ + + +

+ +视频中的所有素材都提供下载,可用于测试模型:[Google Drive](https://drive.google.com/drive/folders/1VFnWwuu-YXDKG-N6vcjK_nL7YZMFapMU?usp=sharing) 或[百度网盘](https://pan.baidu.com/s/1igMteDwN5rO1Sn7YIhBlvQ)(密码: tb3w) + +
+ + +## Demo +* [网页](https://peterl1n.github.io/RobustVideoMatting/#/demo): 在浏览器里看摄像头抠像效果,展示模型内部循环记忆值。 +* [Colab](https://colab.research.google.com/drive/10z-pNKRnVNsp0Lq9tH1J_XPZ7CBC_uHm?usp=sharing): 用我们的模型转换你的视频。 + +
+ +## 下载 + +推荐在通常情况下使用 MobileNetV3 的模型。ResNet50 的模型大很多,效果稍有提高。我们的模型支持很多框架。详情请阅读[推断文档](documentation/inference_zh_Hans.md)。 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
框架下载备注
PyTorch + rvm_mobilenetv3.pth
+ rvm_resnet50.pth +
+ 官方 PyTorch 模型权值。文档 +
TorchHub + 无需手动下载。 + + 更方便地在你的 PyTorch 项目里使用此模型。文档 +
TorchScript + rvm_mobilenetv3_fp32.torchscript
+ rvm_mobilenetv3_fp16.torchscript
+ rvm_resnet50_fp32.torchscript
+ rvm_resnet50_fp16.torchscript +
+ 若需在移动端推断,可以考虑自行导出 int8 量化的模型。文档 +
ONNX + rvm_mobilenetv3_fp32.onnx
+ rvm_mobilenetv3_fp16.onnx
+ rvm_resnet50_fp32.onnx
+ rvm_resnet50_fp16.onnx +
+ 在 ONNX Runtime 的 CPU 和 CUDA backend 上测试过。提供的模型用 opset 12。文档导出 +
TensorFlow + rvm_mobilenetv3_tf.zip
+ rvm_resnet50_tf.zip +
+ TensorFlow 2 SavedModel 格式。文档 +
TensorFlow.js + rvm_mobilenetv3_tfjs_int8.zip
+
+ 在网页上跑模型。展示示范代码 +
CoreML + rvm_mobilenetv3_1280x720_s0.375_fp16.mlmodel
+ rvm_mobilenetv3_1280x720_s0.375_int8.mlmodel
+ rvm_mobilenetv3_1920x1080_s0.25_fp16.mlmodel
+ rvm_mobilenetv3_1920x1080_s0.25_int8.mlmodel
+
+ CoreML 只能导出固定分辨率,其他分辨率可自行导出。支持 iOS 13+。s 代表下采样比。文档导出 +
+ +所有模型可在 [Google Drive](https://drive.google.com/drive/folders/1pBsG-SCTatv-95SnEuxmnvvlRx208VKj?usp=sharing) 或[百度网盘](https://pan.baidu.com/s/1puPSxQqgBFOVpW4W7AolkA)(密码: gym7)上下载。 + +
+ +## PyTorch 范例 + +1. 安装 Python 库: +```sh +pip install -r requirements_inference.txt +``` + +2. 加载模型: + +```python +import torch +from model import MattingNetwork + +model = MattingNetwork('mobilenetv3').eval().cuda() # 或 "resnet50" +model.load_state_dict(torch.load('rvm_mobilenetv3.pth')) +``` + +3. 若只需要做视频抠像处理,我们提供简单的 API: + +```python +from inference import convert_video + +convert_video( + model, # 模型,可以加载到任何设备(cpu 或 cuda) + input_source='input.mp4', # 视频文件,或图片序列文件夹 + output_type='video', # 可选 "video"(视频)或 "png_sequence"(PNG 序列) + output_composition='output.mp4', # 若导出视频,提供文件路径。若导出 PNG 序列,提供文件夹路径 + output_video_mbps=4, # 若导出视频,提供视频码率 + downsample_ratio=None, # 下采样比,可根据具体视频调节,或 None 选择自动 + seq_chunk=12, # 设置多帧并行计算 +) +``` + +4. 或自己写推断逻辑: +```python +from torch.utils.data import DataLoader +from torchvision.transforms import ToTensor +from inference_utils import VideoReader, VideoWriter + +reader = VideoReader('input.mp4', transform=ToTensor()) +writer = VideoWriter('output.mp4', frame_rate=30) + +bgr = torch.tensor([.47, 1, .6]).view(3, 1, 1).cuda() # 绿背景 +rec = [None] * 4 # 初始循环记忆(Recurrent States) +downsample_ratio = 0.25 # 下采样比,根据视频调节 + +with torch.no_grad(): + for src in DataLoader(reader): # 输入张量,RGB通道,范围为 0~1 + fgr, pha, *rec = model(src.cuda(), *rec, downsample_ratio) # 将上一帧的记忆给下一帧 + com = fgr * pha + bgr * (1 - pha) # 将前景合成到绿色背景 + writer.write(com) # 输出帧 +``` + +5. 模型和 API 也可通过 TorchHub 快速载入。 + +```python +# 加载模型 +model = torch.hub.load("PeterL1n/RobustVideoMatting", "mobilenetv3") # 或 "resnet50" + +# 转换 API +convert_video = torch.hub.load("PeterL1n/RobustVideoMatting", "converter") +``` + +[推断文档](documentation/inference_zh_Hans.md)里有对 `downsample_ratio` 参数,API 使用,和高阶使用的讲解。 + +
+ +## 训练和评估 + +请参照[训练文档(英文)](documentation/training.md)。 + +
+ +## 速度 + +速度用 `inference_speed_test.py` 测量以供参考。 + +| GPU | dType | HD (1920x1080) | 4K (3840x2160) | +| -------------- | ----- | -------------- |----------------| +| RTX 3090 | FP16 | 172 FPS | 154 FPS | +| RTX 2060 Super | FP16 | 134 FPS | 108 FPS | +| GTX 1080 Ti | FP32 | 104 FPS | 74 FPS | + +* 注释1:HD 使用 `downsample_ratio=0.25`,4K 使用 `downsample_ratio=0.125`。 所有测试都使用 batch size 1 和 frame chunk 1。 +* 注释2:图灵架构之前的 GPU 不支持 FP16 推理,所以 GTX 1080 Ti 使用 FP32。 +* 注释3:我们只测量张量吞吐量(tensor throughput)。 提供的视频转换脚本会慢得多,因为它不使用硬件视频编码/解码,也没有在并行线程上完成张量传输。如果您有兴趣在 Python 中实现硬件视频编码/解码,请参考 [PyNvCodec](https://github.com/NVIDIA/VideoProcessingFramework)。 + +
+ +## 项目成员 +* [Shanchuan Lin](https://www.linkedin.com/in/shanchuanlin/) +* [Linjie Yang](https://sites.google.com/site/linjieyang89/) +* [Imran Saleemi](https://www.linkedin.com/in/imran-saleemi/) +* [Soumyadip Sengupta](https://homes.cs.washington.edu/~soumya91/) + +
+ +## 第三方资源 + +* [NCNN C++ Android](https://github.com/FeiGeChuanShu/ncnn_Android_RobustVideoMatting) ([@FeiGeChuanShu](https://github.com/FeiGeChuanShu)) +* [lite.ai.toolkit](https://github.com/DefTruth/RobustVideoMatting.lite.ai.toolkit) ([@DefTruth](https://github.com/DefTruth)) +* [Gradio Web Demo](https://huggingface.co/spaces/akhaliq/Robust-Video-Matting) ([@AK391](https://github.com/AK391)) +* [带有 NatML 的 Unity 引擎](https://hub.natml.ai/@natsuite/robust-video-matting) ([@natsuite](https://github.com/natsuite)) \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/__init__.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/checkpoints/rvm_mobilenetv3.pth b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/checkpoints/rvm_mobilenetv3.pth new file mode 100644 index 0000000..bb67632 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/checkpoints/rvm_mobilenetv3.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:3c7c1d92033f7c38d6577c481d13a195d7d80a159b960f4f3119ac7b534cf4f8 +size 15217721 diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/checkpoints/rvm_resnet50.pth b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/checkpoints/rvm_resnet50.pth new file mode 100644 index 0000000..a94490c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/checkpoints/rvm_resnet50.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c191a807251164c073dce5fa408e7a816070d539b882b2a3150330a9fec112ce +size 107905875 diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/augmentation.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/augmentation.py new file mode 100644 index 0000000..4a12869 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/augmentation.py @@ -0,0 +1,260 @@ +import easing_functions as ef +import random +import torch +from torchvision import transforms +from torchvision.transforms import functional as F + + +class MotionAugmentation: + def __init__(self, + size, + prob_fgr_affine, + prob_bgr_affine, + prob_noise, + prob_color_jitter, + prob_grayscale, + prob_sharpness, + prob_blur, + prob_hflip, + prob_pause, + static_affine=True, + aspect_ratio_range=(0.9, 1.1)): + self.size = size + self.prob_fgr_affine = prob_fgr_affine + self.prob_bgr_affine = prob_bgr_affine + self.prob_noise = prob_noise + self.prob_color_jitter = prob_color_jitter + self.prob_grayscale = prob_grayscale + self.prob_sharpness = prob_sharpness + self.prob_blur = prob_blur + self.prob_hflip = prob_hflip + self.prob_pause = prob_pause + self.static_affine = static_affine + self.aspect_ratio_range = aspect_ratio_range + + def __call__(self, fgrs, phas, bgrs): + # Foreground affine + if random.random() < self.prob_fgr_affine: + fgrs, phas = self._motion_affine(fgrs, phas) + + # Background affine + if random.random() < self.prob_bgr_affine / 2: + bgrs = self._motion_affine(bgrs) + if random.random() < self.prob_bgr_affine / 2: + fgrs, phas, bgrs = self._motion_affine(fgrs, phas, bgrs) + + # Still Affine + if self.static_affine: + fgrs, phas = self._static_affine(fgrs, phas, scale_ranges=(0.5, 1)) + bgrs = self._static_affine(bgrs, scale_ranges=(1, 1.5)) + + # To tensor + fgrs = torch.stack([F.to_tensor(fgr) for fgr in fgrs]) + phas = torch.stack([F.to_tensor(pha) for pha in phas]) + bgrs = torch.stack([F.to_tensor(bgr) for bgr in bgrs]) + + # Resize + params = transforms.RandomResizedCrop.get_params(fgrs, scale=(1, 1), ratio=self.aspect_ratio_range) + fgrs = F.resized_crop(fgrs, *params, self.size, interpolation=F.InterpolationMode.BILINEAR) + phas = F.resized_crop(phas, *params, self.size, interpolation=F.InterpolationMode.BILINEAR) + params = transforms.RandomResizedCrop.get_params(bgrs, scale=(1, 1), ratio=self.aspect_ratio_range) + bgrs = F.resized_crop(bgrs, *params, self.size, interpolation=F.InterpolationMode.BILINEAR) + + # Horizontal flip + if random.random() < self.prob_hflip: + fgrs = F.hflip(fgrs) + phas = F.hflip(phas) + if random.random() < self.prob_hflip: + bgrs = F.hflip(bgrs) + + # Noise + if random.random() < self.prob_noise: + fgrs, bgrs = self._motion_noise(fgrs, bgrs) + + # Color jitter + if random.random() < self.prob_color_jitter: + fgrs = self._motion_color_jitter(fgrs) + if random.random() < self.prob_color_jitter: + bgrs = self._motion_color_jitter(bgrs) + + # Grayscale + if random.random() < self.prob_grayscale: + fgrs = F.rgb_to_grayscale(fgrs, num_output_channels=3).contiguous() + bgrs = F.rgb_to_grayscale(bgrs, num_output_channels=3).contiguous() + + # Sharpen + if random.random() < self.prob_sharpness: + sharpness = random.random() * 8 + fgrs = F.adjust_sharpness(fgrs, sharpness) + phas = F.adjust_sharpness(phas, sharpness) + bgrs = F.adjust_sharpness(bgrs, sharpness) + + # Blur + if random.random() < self.prob_blur / 3: + fgrs, phas = self._motion_blur(fgrs, phas) + if random.random() < self.prob_blur / 3: + bgrs = self._motion_blur(bgrs) + if random.random() < self.prob_blur / 3: + fgrs, phas, bgrs = self._motion_blur(fgrs, phas, bgrs) + + # Pause + if random.random() < self.prob_pause: + fgrs, phas, bgrs = self._motion_pause(fgrs, phas, bgrs) + + return fgrs, phas, bgrs + + def _static_affine(self, *imgs, scale_ranges): + params = transforms.RandomAffine.get_params( + degrees=(-10, 10), translate=(0.1, 0.1), scale_ranges=scale_ranges, + shears=(-5, 5), img_size=imgs[0][0].size) + imgs = [[F.affine(t, *params, F.InterpolationMode.BILINEAR) for t in img] for img in imgs] + return imgs if len(imgs) > 1 else imgs[0] + + def _motion_affine(self, *imgs): + config = dict(degrees=(-10, 10), translate=(0.1, 0.1), + scale_ranges=(0.9, 1.1), shears=(-5, 5), img_size=imgs[0][0].size) + angleA, (transXA, transYA), scaleA, (shearXA, shearYA) = transforms.RandomAffine.get_params(**config) + angleB, (transXB, transYB), scaleB, (shearXB, shearYB) = transforms.RandomAffine.get_params(**config) + + T = len(imgs[0]) + easing = random_easing_fn() + for t in range(T): + percentage = easing(t / (T - 1)) + angle = lerp(angleA, angleB, percentage) + transX = lerp(transXA, transXB, percentage) + transY = lerp(transYA, transYB, percentage) + scale = lerp(scaleA, scaleB, percentage) + shearX = lerp(shearXA, shearXB, percentage) + shearY = lerp(shearYA, shearYB, percentage) + for img in imgs: + img[t] = F.affine(img[t], angle, (transX, transY), scale, (shearX, shearY), F.InterpolationMode.BILINEAR) + return imgs if len(imgs) > 1 else imgs[0] + + def _motion_noise(self, *imgs): + grain_size = random.random() * 3 + 1 # range 1 ~ 4 + monochrome = random.random() < 0.5 + for img in imgs: + T, C, H, W = img.shape + noise = torch.randn((T, 1 if monochrome else C, round(H / grain_size), round(W / grain_size))) + noise.mul_(random.random() * 0.2 / grain_size) + if grain_size != 1: + noise = F.resize(noise, (H, W)) + img.add_(noise).clamp_(0, 1) + return imgs if len(imgs) > 1 else imgs[0] + + def _motion_color_jitter(self, *imgs): + brightnessA, brightnessB, contrastA, contrastB, saturationA, saturationB, hueA, hueB \ + = torch.randn(8).mul(0.1).tolist() + strength = random.random() * 0.2 + easing = random_easing_fn() + T = len(imgs[0]) + for t in range(T): + percentage = easing(t / (T - 1)) * strength + for img in imgs: + img[t] = F.adjust_brightness(img[t], max(1 + lerp(brightnessA, brightnessB, percentage), 0.1)) + img[t] = F.adjust_contrast(img[t], max(1 + lerp(contrastA, contrastB, percentage), 0.1)) + img[t] = F.adjust_saturation(img[t], max(1 + lerp(brightnessA, brightnessB, percentage), 0.1)) + img[t] = F.adjust_hue(img[t], min(0.5, max(-0.5, lerp(hueA, hueB, percentage) * 0.1))) + return imgs if len(imgs) > 1 else imgs[0] + + def _motion_blur(self, *imgs): + blurA = random.random() * 10 + blurB = random.random() * 10 + + T = len(imgs[0]) + easing = random_easing_fn() + for t in range(T): + percentage = easing(t / (T - 1)) + blur = max(lerp(blurA, blurB, percentage), 0) + if blur != 0: + kernel_size = int(blur * 2) + if kernel_size % 2 == 0: + kernel_size += 1 # Make kernel_size odd + for img in imgs: + img[t] = F.gaussian_blur(img[t], kernel_size, sigma=blur) + + return imgs if len(imgs) > 1 else imgs[0] + + def _motion_pause(self, *imgs): + T = len(imgs[0]) + pause_frame = random.choice(range(T - 1)) + pause_length = random.choice(range(T - pause_frame)) + for img in imgs: + img[pause_frame + 1 : pause_frame + pause_length] = img[pause_frame] + return imgs if len(imgs) > 1 else imgs[0] + + +def lerp(a, b, percentage): + return a * (1 - percentage) + b * percentage + + +def random_easing_fn(): + if random.random() < 0.2: + return ef.LinearInOut() + else: + return random.choice([ + ef.BackEaseIn, + ef.BackEaseOut, + ef.BackEaseInOut, + ef.BounceEaseIn, + ef.BounceEaseOut, + ef.BounceEaseInOut, + ef.CircularEaseIn, + ef.CircularEaseOut, + ef.CircularEaseInOut, + ef.CubicEaseIn, + ef.CubicEaseOut, + ef.CubicEaseInOut, + ef.ExponentialEaseIn, + ef.ExponentialEaseOut, + ef.ExponentialEaseInOut, + ef.ElasticEaseIn, + ef.ElasticEaseOut, + ef.ElasticEaseInOut, + ef.QuadEaseIn, + ef.QuadEaseOut, + ef.QuadEaseInOut, + ef.QuarticEaseIn, + ef.QuarticEaseOut, + ef.QuarticEaseInOut, + ef.QuinticEaseIn, + ef.QuinticEaseOut, + ef.QuinticEaseInOut, + ef.SineEaseIn, + ef.SineEaseOut, + ef.SineEaseInOut, + Step, + ])() + +class Step: # Custom easing function for sudden change. + def __call__(self, value): + return 0 if value < 0.5 else 1 + + +# ---------------------------- Frame Sampler ---------------------------- + + +class TrainFrameSampler: + def __init__(self, speed=[0.5, 1, 2, 3, 4, 5]): + self.speed = speed + + def __call__(self, seq_length): + frames = list(range(seq_length)) + + # Speed up + speed = random.choice(self.speed) + frames = [int(f * speed) for f in frames] + + # Shift + shift = random.choice(range(seq_length)) + frames = [f + shift for f in frames] + + # Reverse + if random.random() < 0.5: + frames = frames[::-1] + + return frames + +class ValidFrameSampler: + def __call__(self, seq_length): + return range(seq_length) diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/coco.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/coco.py new file mode 100644 index 0000000..3c183cb --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/coco.py @@ -0,0 +1,103 @@ +import os +import numpy as np +import random +import json +import os +from torch.utils.data import Dataset +from torchvision import transforms +from torchvision.transforms import functional as F +from PIL import Image + + +class CocoPanopticDataset(Dataset): + def __init__(self, + imgdir: str, + anndir: str, + annfile: str, + transform=None): + with open(annfile) as f: + self.data = json.load(f)['annotations'] + self.data = list(filter(lambda data: any(info['category_id'] == 1 for info in data['segments_info']), self.data)) + self.imgdir = imgdir + self.anndir = anndir + self.transform = transform + + def __len__(self): + return len(self.data) + + def __getitem__(self, idx): + data = self.data[idx] + img = self._load_img(data) + seg = self._load_seg(data) + + if self.transform is not None: + img, seg = self.transform(img, seg) + + return img, seg + + def _load_img(self, data): + with Image.open(os.path.join(self.imgdir, data['file_name'].replace('.png', '.jpg'))) as img: + return img.convert('RGB') + + def _load_seg(self, data): + with Image.open(os.path.join(self.anndir, data['file_name'])) as ann: + ann.load() + + ann = np.array(ann, copy=False).astype(np.int32) + ann = ann[:, :, 0] + 256 * ann[:, :, 1] + 256 * 256 * ann[:, :, 2] + seg = np.zeros(ann.shape, np.uint8) + + for segments_info in data['segments_info']: + if segments_info['category_id'] in [1, 27, 32]: # person, backpack, tie + seg[ann == segments_info['id']] = 255 + + return Image.fromarray(seg) + + +class CocoPanopticTrainAugmentation: + def __init__(self, size): + self.size = size + self.jitter = transforms.ColorJitter(0.1, 0.1, 0.1, 0.1) + + def __call__(self, img, seg): + # Affine + params = transforms.RandomAffine.get_params(degrees=(-20, 20), translate=(0.1, 0.1), + scale_ranges=(1, 1), shears=(-10, 10), img_size=img.size) + img = F.affine(img, *params, interpolation=F.InterpolationMode.BILINEAR) + seg = F.affine(seg, *params, interpolation=F.InterpolationMode.NEAREST) + + # Resize + params = transforms.RandomResizedCrop.get_params(img, scale=(0.5, 1), ratio=(0.7, 1.3)) + img = F.resized_crop(img, *params, self.size, interpolation=F.InterpolationMode.BILINEAR) + seg = F.resized_crop(seg, *params, self.size, interpolation=F.InterpolationMode.NEAREST) + + # Horizontal flip + if random.random() < 0.5: + img = F.hflip(img) + seg = F.hflip(seg) + + # Color jitter + img = self.jitter(img) + + # To tensor + img = F.to_tensor(img) + seg = F.to_tensor(seg) + + return img, seg + + +class CocoPanopticValidAugmentation: + def __init__(self, size): + self.size = size + + def __call__(self, img, seg): + # Resize + params = transforms.RandomResizedCrop.get_params(img, scale=(1, 1), ratio=(1., 1.)) + img = F.resized_crop(img, *params, self.size, interpolation=F.InterpolationMode.BILINEAR) + seg = F.resized_crop(seg, *params, self.size, interpolation=F.InterpolationMode.NEAREST) + + # To tensor + img = F.to_tensor(img) + seg = F.to_tensor(seg) + + return img, seg \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/imagematte.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/imagematte.py new file mode 100644 index 0000000..124faca --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/imagematte.py @@ -0,0 +1,98 @@ +import os +import random +from torch.utils.data import Dataset +from PIL import Image + +from .augmentation import MotionAugmentation + + +class ImageMatteDataset(Dataset): + def __init__(self, + imagematte_dir, + background_image_dir, + background_video_dir, + size, + seq_length, + seq_sampler, + transform): + self.imagematte_dir = imagematte_dir + self.imagematte_files = os.listdir(os.path.join(imagematte_dir, 'fgr')) + self.background_image_dir = background_image_dir + self.background_image_files = os.listdir(background_image_dir) + self.background_video_dir = background_video_dir + self.background_video_clips = os.listdir(background_video_dir) + self.background_video_frames = [sorted(os.listdir(os.path.join(background_video_dir, clip))) + for clip in self.background_video_clips] + self.seq_length = seq_length + self.seq_sampler = seq_sampler + self.size = size + self.transform = transform + + def __len__(self): + return max(len(self.imagematte_files), len(self.background_image_files) + len(self.background_video_clips)) + + def __getitem__(self, idx): + if random.random() < 0.5: + bgrs = self._get_random_image_background() + else: + bgrs = self._get_random_video_background() + + fgrs, phas = self._get_imagematte(idx) + + if self.transform is not None: + return self.transform(fgrs, phas, bgrs) + + return fgrs, phas, bgrs + + def _get_imagematte(self, idx): + with Image.open(os.path.join(self.imagematte_dir, 'fgr', self.imagematte_files[idx % len(self.imagematte_files)])) as fgr, \ + Image.open(os.path.join(self.imagematte_dir, 'pha', self.imagematte_files[idx % len(self.imagematte_files)])) as pha: + fgr = self._downsample_if_needed(fgr.convert('RGB')) + pha = self._downsample_if_needed(pha.convert('L')) + fgrs = [fgr] * self.seq_length + phas = [pha] * self.seq_length + return fgrs, phas + + def _get_random_image_background(self): + with Image.open(os.path.join(self.background_image_dir, self.background_image_files[random.choice(range(len(self.background_image_files)))])) as bgr: + bgr = self._downsample_if_needed(bgr.convert('RGB')) + bgrs = [bgr] * self.seq_length + return bgrs + + def _get_random_video_background(self): + clip_idx = random.choice(range(len(self.background_video_clips))) + frame_count = len(self.background_video_frames[clip_idx]) + frame_idx = random.choice(range(max(1, frame_count - self.seq_length))) + clip = self.background_video_clips[clip_idx] + bgrs = [] + for i in self.seq_sampler(self.seq_length): + frame_idx_t = frame_idx + i + frame = self.background_video_frames[clip_idx][frame_idx_t % frame_count] + with Image.open(os.path.join(self.background_video_dir, clip, frame)) as bgr: + bgr = self._downsample_if_needed(bgr.convert('RGB')) + bgrs.append(bgr) + return bgrs + + def _downsample_if_needed(self, img): + w, h = img.size + if min(w, h) > self.size: + scale = self.size / min(w, h) + w = int(scale * w) + h = int(scale * h) + img = img.resize((w, h)) + return img + +class ImageMatteAugmentation(MotionAugmentation): + def __init__(self, size): + super().__init__( + size=size, + prob_fgr_affine=0.95, + prob_bgr_affine=0.3, + prob_noise=0.05, + prob_color_jitter=0.3, + prob_grayscale=0.03, + prob_sharpness=0.05, + prob_blur=0.02, + prob_hflip=0.5, + prob_pause=0.03, + ) diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/spd.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/spd.py new file mode 100644 index 0000000..7f76b82 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/spd.py @@ -0,0 +1,27 @@ +import os +from torch.utils.data import Dataset +from PIL import Image + + +class SuperviselyPersonDataset(Dataset): + def __init__(self, imgdir, segdir, transform=None): + self.img_dir = imgdir + self.img_files = sorted(os.listdir(imgdir)) + self.seg_dir = segdir + self.seg_files = sorted(os.listdir(segdir)) + assert len(self.img_files) == len(self.seg_files) + self.transform = transform + + def __len__(self): + return len(self.img_files) + + def __getitem__(self, idx): + with Image.open(os.path.join(self.img_dir, self.img_files[idx])) as img, \ + Image.open(os.path.join(self.seg_dir, self.seg_files[idx])) as seg: + img = img.convert('RGB') + seg = seg.convert('L') + + if self.transform is not None: + img, seg = self.transform(img, seg) + + return img, seg diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/videomatte.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/videomatte.py new file mode 100644 index 0000000..555911b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/videomatte.py @@ -0,0 +1,125 @@ +import os +import random +from torch.utils.data import Dataset +from PIL import Image + +from .augmentation import MotionAugmentation + + +class VideoMatteDataset(Dataset): + def __init__(self, + videomatte_dir, + background_image_dir, + background_video_dir, + size, + seq_length, + seq_sampler, + transform=None): + self.background_image_dir = background_image_dir + self.background_image_files = os.listdir(background_image_dir) + self.background_video_dir = background_video_dir + self.background_video_clips = sorted(os.listdir(background_video_dir)) + self.background_video_frames = [sorted(os.listdir(os.path.join(background_video_dir, clip))) + for clip in self.background_video_clips] + + self.videomatte_dir = videomatte_dir + self.videomatte_clips = sorted(os.listdir(os.path.join(videomatte_dir, 'fgr'))) + self.videomatte_frames = [sorted(os.listdir(os.path.join(videomatte_dir, 'fgr', clip))) + for clip in self.videomatte_clips] + self.videomatte_idx = [(clip_idx, frame_idx) + for clip_idx in range(len(self.videomatte_clips)) + for frame_idx in range(0, len(self.videomatte_frames[clip_idx]), seq_length)] + self.size = size + self.seq_length = seq_length + self.seq_sampler = seq_sampler + self.transform = transform + + def __len__(self): + return len(self.videomatte_idx) + + def __getitem__(self, idx): + if random.random() < 0.5: + bgrs = self._get_random_image_background() + else: + bgrs = self._get_random_video_background() + + fgrs, phas = self._get_videomatte(idx) + + if self.transform is not None: + return self.transform(fgrs, phas, bgrs) + + return fgrs, phas, bgrs + + def _get_random_image_background(self): + with Image.open(os.path.join(self.background_image_dir, random.choice(self.background_image_files))) as bgr: + bgr = self._downsample_if_needed(bgr.convert('RGB')) + bgrs = [bgr] * self.seq_length + return bgrs + + def _get_random_video_background(self): + clip_idx = random.choice(range(len(self.background_video_clips))) + frame_count = len(self.background_video_frames[clip_idx]) + frame_idx = random.choice(range(max(1, frame_count - self.seq_length))) + clip = self.background_video_clips[clip_idx] + bgrs = [] + for i in self.seq_sampler(self.seq_length): + frame_idx_t = frame_idx + i + frame = self.background_video_frames[clip_idx][frame_idx_t % frame_count] + with Image.open(os.path.join(self.background_video_dir, clip, frame)) as bgr: + bgr = self._downsample_if_needed(bgr.convert('RGB')) + bgrs.append(bgr) + return bgrs + + def _get_videomatte(self, idx): + clip_idx, frame_idx = self.videomatte_idx[idx] + clip = self.videomatte_clips[clip_idx] + frame_count = len(self.videomatte_frames[clip_idx]) + fgrs, phas = [], [] + for i in self.seq_sampler(self.seq_length): + frame = self.videomatte_frames[clip_idx][(frame_idx + i) % frame_count] + with Image.open(os.path.join(self.videomatte_dir, 'fgr', clip, frame)) as fgr, \ + Image.open(os.path.join(self.videomatte_dir, 'pha', clip, frame)) as pha: + fgr = self._downsample_if_needed(fgr.convert('RGB')) + pha = self._downsample_if_needed(pha.convert('L')) + fgrs.append(fgr) + phas.append(pha) + return fgrs, phas + + def _downsample_if_needed(self, img): + w, h = img.size + if min(w, h) > self.size: + scale = self.size / min(w, h) + w = int(scale * w) + h = int(scale * h) + img = img.resize((w, h)) + return img + +class VideoMatteTrainAugmentation(MotionAugmentation): + def __init__(self, size): + super().__init__( + size=size, + prob_fgr_affine=0.3, + prob_bgr_affine=0.3, + prob_noise=0.1, + prob_color_jitter=0.3, + prob_grayscale=0.02, + prob_sharpness=0.1, + prob_blur=0.02, + prob_hflip=0.5, + prob_pause=0.03, + ) + +class VideoMatteValidAugmentation(MotionAugmentation): + def __init__(self, size): + super().__init__( + size=size, + prob_fgr_affine=0, + prob_bgr_affine=0, + prob_noise=0, + prob_color_jitter=0, + prob_grayscale=0, + prob_sharpness=0, + prob_blur=0, + prob_hflip=0, + prob_pause=0, + ) diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/youtubevis.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/youtubevis.py new file mode 100644 index 0000000..babbf0e --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/dataset/youtubevis.py @@ -0,0 +1,123 @@ +import torch +import os +import json +import numpy as np +import random +from torch.utils.data import Dataset +from PIL import Image +from torchvision import transforms +from torchvision.transforms import functional as F + + +class YouTubeVISDataset(Dataset): + def __init__(self, videodir, annfile, size, seq_length, seq_sampler, transform=None): + self.videodir = videodir + self.size = size + self.seq_length = seq_length + self.seq_sampler = seq_sampler + self.transform = transform + + with open(annfile) as f: + data = json.load(f) + + self.masks = {} + for ann in data['annotations']: + if ann['category_id'] == 26: # person + video_id = ann['video_id'] + if video_id not in self.masks: + self.masks[video_id] = [[] for _ in range(len(ann['segmentations']))] + for frame, mask in zip(self.masks[video_id], ann['segmentations']): + if mask is not None: + frame.append(mask) + + self.videos = {} + for video in data['videos']: + video_id = video['id'] + if video_id in self.masks: + self.videos[video_id] = video + + self.index = [] + for video_id in self.videos.keys(): + for frame in range(len(self.videos[video_id])): + self.index.append((video_id, frame)) + + def __len__(self): + return len(self.index) + + def __getitem__(self, idx): + video_id, frame_id = self.index[idx] + video = self.videos[video_id] + frame_count = len(self.videos[video_id]['file_names']) + H, W = video['height'], video['width'] + + imgs, segs = [], [] + for t in self.seq_sampler(self.seq_length): + frame = (frame_id + t) % frame_count + + filename = video['file_names'][frame] + masks = self.masks[video_id][frame] + + with Image.open(os.path.join(self.videodir, filename)) as img: + imgs.append(self._downsample_if_needed(img.convert('RGB'), Image.BILINEAR)) + + seg = np.zeros((H, W), dtype=np.uint8) + for mask in masks: + seg |= self._decode_rle(mask) + segs.append(self._downsample_if_needed(Image.fromarray(seg), Image.NEAREST)) + + if self.transform is not None: + imgs, segs = self.transform(imgs, segs) + + return imgs, segs + + def _decode_rle(self, rle): + H, W = rle['size'] + msk = np.zeros(H * W, dtype=np.uint8) + encoding = rle['counts'] + skip = 0 + for i in range(0, len(encoding) - 1, 2): + skip += encoding[i] + draw = encoding[i + 1] + msk[skip : skip + draw] = 255 + skip += draw + return msk.reshape(W, H).transpose() + + def _downsample_if_needed(self, img, resample): + w, h = img.size + if min(w, h) > self.size: + scale = self.size / min(w, h) + w = int(scale * w) + h = int(scale * h) + img = img.resize((w, h), resample) + return img + + +class YouTubeVISAugmentation: + def __init__(self, size): + self.size = size + self.jitter = transforms.ColorJitter(0.3, 0.3, 0.3, 0.15) + + def __call__(self, imgs, segs): + + # To tensor + imgs = torch.stack([F.to_tensor(img) for img in imgs]) + segs = torch.stack([F.to_tensor(seg) for seg in segs]) + + # Resize + params = transforms.RandomResizedCrop.get_params(imgs, scale=(0.8, 1), ratio=(0.9, 1.1)) + imgs = F.resized_crop(imgs, *params, self.size, interpolation=F.InterpolationMode.BILINEAR) + segs = F.resized_crop(segs, *params, self.size, interpolation=F.InterpolationMode.BILINEAR) + + # Color jitter + imgs = self.jitter(imgs) + + # Grayscale + if random.random() < 0.05: + imgs = F.rgb_to_grayscale(imgs, num_output_channels=3) + + # Horizontal flip + if random.random() < 0.5: + imgs = F.hflip(imgs) + segs = F.hflip(segs) + + return imgs, segs \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/image/showreel.gif b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/image/showreel.gif new file mode 100644 index 0000000..5af4c45 Binary files /dev/null and b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/image/showreel.gif differ diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/image/teaser.gif b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/image/teaser.gif new file mode 100644 index 0000000..26c9ec2 Binary files /dev/null and b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/image/teaser.gif differ diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/inference.md b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/inference.md new file mode 100644 index 0000000..413d720 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/inference.md @@ -0,0 +1,352 @@ +# Inference + +

English | 中文

+ +## Content + +* [Concepts](#concepts) + * [Downsample Ratio](#downsample-ratio) + * [Recurrent States](#recurrent-states) +* [PyTorch](#pytorch) +* [TorchHub](#torchhub) +* [TorchScript](#torchscript) +* [ONNX](#onnx) +* [TensorFlow](#tensorflow) +* [TensorFlow.js](#tensorflowjs) +* [CoreML](#coreml) + +
+ + +## Concepts + +### Downsample Ratio + +The table provides a general guideline. Please adjust based on your video content. + +| Resolution | Portrait | Full-Body | +| ------------- | ------------- | -------------- | +| <= 512x512 | 1 | 1 | +| 1280x720 | 0.375 | 0.6 | +| 1920x1080 | 0.25 | 0.4 | +| 3840x2160 | 0.125 | 0.2 | + +Internally, the model resizes down the input for stage 1. Then, it refines at high-resolution for stage 2. + +Set `downsample_ratio` so that the downsampled resolution is between 256 and 512. For example, for `1920x1080` input with `downsample_ratio=0.25`, the resized resolution `480x270` is between 256 and 512. + +Adjust `downsample_ratio` base on the video content. If the shot is portrait, a lower `downsample_ratio` is sufficient. If the shot contains the full human body, use high `downsample_ratio`. Note that higher `downsample_ratio` is not always better. + + +
+ +### Recurrent States +The model is a recurrent neural network. You must process frames sequentially and recycle its recurrent states. + +**Correct Way** + +The recurrent outputs are recycled back as input when processing the next frame. The states are essentially the model's memory. + +```python +rec = [None] * 4 # Initial recurrent states are None + +for frame in YOUR_VIDEO: + fgr, pha, *rec = model(frame, *rec, downsample_ratio) +``` + +**Wrong Way** + +The model does not utilize the recurrent states. Only use it to process independent images. + +```python +for frame in YOUR_VIDEO: + fgr, pha = model(frame, downsample_ratio)[:2] +``` + +More technical details are in the [paper](https://peterl1n.github.io/RobustVideoMatting/). + +


+ + +## PyTorch + +Model loading: + +```python +import torch +from model import MattingNetwork + +model = MattingNetwork(variant='mobilenetv3').eval().cuda() # Or variant="resnet50" +model.load_state_dict(torch.load('rvm_mobilenetv3.pth')) +``` + +Example inference loop: +```python +rec = [None] * 4 # Set initial recurrent states to None + +for src in YOUR_VIDEO: # src can be [B, C, H, W] or [B, T, C, H, W] + fgr, pha, *rec = model(src, *rec, downsample_ratio=0.25) +``` + +* `src`: Input frame. + * Can be of shape `[B, C, H, W]` or `[B, T, C, H, W]`. + * If `[B, T, C, H, W]`, a chunk of `T` frames can be given at once for better parallelism. + * RGB input is normalized to `0~1` range. + +* `fgr, pha`: Foreground and alpha predictions. + * Can be of shape `[B, C, H, W]` or `[B, T, C, H, W]` depends on `src`. + * `fgr` has `C=3` for RGB, `pha` has `C=1`. + * Outputs normalized to `0~1` range. +* `rec`: Recurrent states. + * Type of `List[Tensor, Tensor, Tensor, Tensor]`. + * Initial `rec` can be `List[None, None, None, None]`. + * It has 4 recurrent states because the model has 4 ConvGRU layers. + * All tensors are rank 4 regardless of `src` rank. + * If a chunk of `T` frames is given, only the last frame's recurrent states will be returned. + +To inference on video, here is a complete example: + +```python +from torch.utils.data import DataLoader +from torchvision.transforms import ToTensor +from inference_utils import VideoReader, VideoWriter + +reader = VideoReader('input.mp4', transform=ToTensor()) +writer = VideoWriter('output.mp4', frame_rate=30) + +bgr = torch.tensor([.47, 1, .6]).view(3, 1, 1).cuda() # Green background. +rec = [None] * 4 # Initial recurrent states. + +with torch.no_grad(): + for src in DataLoader(reader): + fgr, pha, *rec = model(src.cuda(), *rec, downsample_ratio=0.25) # Cycle the recurrent states. + writer.write(fgr * pha + bgr * (1 - pha)) +``` + +Or you can use the provided video converter: + +```python +from inference import convert_video + +convert_video( + model, # The loaded model, can be on any device (cpu or cuda). + input_source='input.mp4', # A video file or an image sequence directory. + input_resize=(1920, 1080), # [Optional] Resize the input (also the output). + downsample_ratio=0.25, # [Optional] If None, make downsampled max size be 512px. + output_type='video', # Choose "video" or "png_sequence" + output_composition='com.mp4', # File path if video; directory path if png sequence. + output_alpha="pha.mp4", # [Optional] Output the raw alpha prediction. + output_foreground="fgr.mp4", # [Optional] Output the raw foreground prediction. + output_video_mbps=4, # Output video mbps. Not needed for png sequence. + seq_chunk=12, # Process n frames at once for better parallelism. + num_workers=1, # Only for image sequence input. Reader threads. + progress=True # Print conversion progress. +) +``` + +The converter can also be invoked in command line: + +```sh +python inference.py \ + --variant mobilenetv3 \ + --checkpoint "CHECKPOINT" \ + --device cuda \ + --input-source "input.mp4" \ + --downsample-ratio 0.25 \ + --output-type video \ + --output-composition "composition.mp4" \ + --output-alpha "alpha.mp4" \ + --output-foreground "foreground.mp4" \ + --output-video-mbps 4 \ + --seq-chunk 12 +``` + +


+ +## TorchHub + +Model loading: + +```python +model = torch.hub.load("PeterL1n/RobustVideoMatting", "mobilenetv3") # or "resnet50" +``` + +Use the conversion function. Refer to the documentation for `convert_video` function above. + +```python +convert_video = torch.hub.load("PeterL1n/RobustVideoMatting", "converter") + +convert_video(model, ...args...) +``` + +


+ +## TorchScript + +Model loading: + +```python +import torch +model = torch.jit.load('rvm_mobilenetv3.torchscript') +``` + +Optionally, freeze the model. This will trigger graph optimization, such as BatchNorm fusion etc. Frozen models are faster. + +```python +model = torch.jit.freeze(model) +``` + +Then, you can use the `model` exactly the same as a PyTorch model, with the exception that you must manually provide `device` and `dtype` to the converter API for frozen model. For example: + +```python +convert_video(frozen_model, ...args..., device='cuda', dtype=torch.float32) +``` + +


+ +## ONNX + +Model spec: +* Inputs: [`src`, `r1i`, `r2i`, `r3i`, `r4i`, `downsample_ratio`]. + * `src` is the RGB input frame of shape `[B, C, H, W]` normalized to `0~1` range. + * `rXi` are the recurrent state inputs. Initial recurrent states are zero value tensors of shape `[1, 1, 1, 1]`. + * `downsample_ratio` is a tensor of shape `[1]`. + * Only `downsample_ratio` must have `dtype=FP32`. Other inputs must have `dtype` matching the loaded model's precision. +* Outputs: [`fgr`, `pha`, `r1o`, `r2o`, `r3o`, `r4o`] + * `fgr, pha` are the foreground and alpha prediction. Normalized to `0~1` range. + * `rXo` are the recurrent state outputs. + +We only show examples of using onnxruntime CUDA backend in Python. + +Model loading + +```python +import onnxruntime as ort + +sess = ort.InferenceSession('rvm_mobilenetv3_fp16.onnx') +``` + +Naive inference loop + +```python +import numpy as np + +rec = [ np.zeros([1, 1, 1, 1], dtype=np.float16) ] * 4 # Must match dtype of the model. +downsample_ratio = np.array([0.25], dtype=np.float32) # dtype always FP32 + +for src in YOUR_VIDEO: # src is of [B, C, H, W] with dtype of the model. + fgr, pha, *rec = sess.run([], { + 'src': src, + 'r1i': rec[0], + 'r2i': rec[1], + 'r3i': rec[2], + 'r4i': rec[3], + 'downsample_ratio': downsample_ratio + }) +``` + +If you use GPU version of ONNX Runtime, the above naive implementation has recurrent states transferred between CPU and GPU on every frame. They could have just stayed on the GPU for better performance. Below is an example using `iobinding` to eliminate useless transfers. + +```python +import onnxruntime as ort +import numpy as np + +# Load model. +sess = ort.InferenceSession('rvm_mobilenetv3_fp16.onnx') + +# Create an io binding. +io = sess.io_binding() + +# Create tensors on CUDA. +rec = [ ort.OrtValue.ortvalue_from_numpy(np.zeros([1, 1, 1, 1], dtype=np.float16), 'cuda') ] * 4 +downsample_ratio = ort.OrtValue.ortvalue_from_numpy(np.asarray([0.25], dtype=np.float32), 'cuda') + +# Set output binding. +for name in ['fgr', 'pha', 'r1o', 'r2o', 'r3o', 'r4o']: + io.bind_output(name, 'cuda') + +# Inference loop +for src in YOUR_VIDEO: + io.bind_cpu_input('src', src) + io.bind_ortvalue_input('r1i', rec[0]) + io.bind_ortvalue_input('r2i', rec[1]) + io.bind_ortvalue_input('r3i', rec[2]) + io.bind_ortvalue_input('r4i', rec[3]) + io.bind_ortvalue_input('downsample_ratio', downsample_ratio) + + sess.run_with_iobinding(io) + + fgr, pha, *rec = io.get_outputs() + + # Only transfer `fgr` and `pha` to CPU. + fgr = fgr.numpy() + pha = pha.numpy() +``` + +Note: depending on the inference tool you choose, it may not support all the operations in our official ONNX model. You are responsible for modifying the model code and exporting your own ONNX model. You can refer to our exporter code in the [onnx branch](https://github.com/PeterL1n/RobustVideoMatting/tree/onnx). + +


+ +### TensorFlow + +An example usage: + +```python +import tensorflow as tf + +model = tf.keras.models.load_model('rvm_mobilenetv3_tf') +model = tf.function(model) + +rec = [ tf.constant(0.) ] * 4 # Initial recurrent states. +downsample_ratio = tf.constant(0.25) # Adjust based on your video. + +for src in YOUR_VIDEO: # src is of shape [B, H, W, C], not [B, C, H, W]! + out = model([src, *rec, downsample_ratio]) + fgr, pha, *rec = out['fgr'], out['pha'], out['r1o'], out['r2o'], out['r3o'], out['r4o'] +``` + +Note the the tensors are all channel last. Otherwise, the inputs and outputs are exactly the same as PyTorch. + +We also provide the raw TensorFlow model code in the [tensorflow branch](https://github.com/PeterL1n/RobustVideoMatting/tree/tensorflow). You can transfer PyTorch checkpoint weights to TensorFlow models. + +


+ +### TensorFlow.js + +We provide a starter code in the [tfjs branch](https://github.com/PeterL1n/RobustVideoMatting/tree/tfjs). The example is very self-explanatory. It shows how to properly use the model. + +


+ +### CoreML + +We only show example usage of the CoreML models in Python API using `coremltools`. In production, the same logic can be applied in Swift. When processing the first frame, do not provide recurrent states. CoreML will internally construct zero tensors of the correct shapes as the initial recurrent states. + +```python +import coremltools as ct + +model = ct.models.model.MLModel('rvm_mobilenetv3_1920x1080_s0.25_int8.mlmodel') + +r1, r2, r3, r4 = None, None, None, None + +for src in YOUR_VIDEO: # src is PIL.Image. + + if r1 is None: + # Initial frame, do not provide recurrent states. + inputs = {'src': src} + else: + # Subsequent frames, provide recurrent states. + inputs = {'src': src, 'r1i': r1, 'r2i': r2, 'r3i': r3, 'r4i': r4} + + outputs = model.predict(inputs) + + fgr = outputs['fgr'] # PIL.Image. + pha = outputs['pha'] # PIL.Image. + + r1 = outputs['r1o'] # Numpy array. + r2 = outputs['r2o'] # Numpy array. + r3 = outputs['r3o'] # Numpy array. + r4 = outputs['r4o'] # Numpy array. + +``` + +Our CoreML models only support fixed resolutions. If you need other resolutions, you can export them yourself. See [coreml branch](https://github.com/PeterL1n/RobustVideoMatting/tree/coreml) for model export. \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/inference_zh_Hans.md b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/inference_zh_Hans.md new file mode 100644 index 0000000..7995f40 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/inference_zh_Hans.md @@ -0,0 +1,353 @@ +# 推断文档 + +

English | 中文

+ +## 目录 + +* [概念](#概念) + * [下采样比](#下采样比) + * [循环记忆](#循环记忆) +* [PyTorch](#pytorch) +* [TorchHub](#torchhub) +* [TorchScript](#torchscript) +* [ONNX](#onnx) +* [TensorFlow](#tensorflow) +* [TensorFlow.js](#tensorflowjs) +* [CoreML](#coreml) + +
+ + +## 概念 + +### 下采样比 + +该表仅供参考。可根据视频内容进行调节。 + +| 分辨率 | 人像 | 全身 | +| ------------- | ------------- | -------------- | +| <= 512x512 | 1 | 1 | +| 1280x720 | 0.375 | 0.6 | +| 1920x1080 | 0.25 | 0.4 | +| 3840x2160 | 0.125 | 0.2 | + +模型在内部将高分辨率输入缩小做初步的处理,然后再放大做细分处理。 + +建议设置 `downsample_ratio` 使缩小后的分辨率维持在 256 到 512 像素之间. 例如,`1920x1080` 的输入用 `downsample_ratio=0.25`,缩小后的分辨率 `480x270` 在 256 到 512 像素之间。 + +根据视频内容调整 `downsample_ratio`。若视频是上身人像,低 `downsample_ratio` 足矣。若视频是全身像,建议尝试更高的 `downsample_ratio`。但注意,过高的 `downsample_ratio` 反而会降低效果。 + + +
+ +### 循环记忆 +此模型是循环神经网络(Recurrent Neural Network)。必须按顺序处理视频每帧,并提供网络循环记忆。 + +**正确用法** + +循环记忆输出被传递到下一帧做输入。 + +```python +rec = [None] * 4 # 初始值设置为 None + +for frame in YOUR_VIDEO: + fgr, pha, *rec = model(frame, *rec, downsample_ratio) +``` + +**错误用法** + +没有使用循环记忆。此方法仅可用于处理单独的图片。 + +```python +for frame in YOUR_VIDEO: + fgr, pha = model(frame, downsample_ratio)[:2] +``` + +更多技术细节见[论文](https://peterl1n.github.io/RobustVideoMatting/)。 + +


+ + +## PyTorch + +载入模型: + +```python +import torch +from model import MattingNetwork + +model = MattingNetwork(variant='mobilenetv3').eval().cuda() # 或 variant="resnet50" +model.load_state_dict(torch.load('rvm_mobilenetv3.pth')) +``` + +推断循环: +```python +rec = [None] * 4 # 初始值设置为 None + +for src in YOUR_VIDEO: # src 可以是 [B, C, H, W] 或 [B, T, C, H, W] + fgr, pha, *rec = model(src, *rec, downsample_ratio=0.25) +``` + +* `src`: 输入帧(Source)。 + * 可以是 `[B, C, H, W]` 或 `[B, T, C, H, W]` 的张量。 + * 若是 `[B, T, C, H, W]`,可给模型一次 `T` 帧,做一小段一小段地处理,用于更好的并行计算。 + * RGB 通道输入,范围为 `0~1`。 + +* `fgr, pha`: 前景(Foreground)和透明度通道(Alpha)的预测。 + * 根据`src`,可为 `[B, C, H, W]` 或 `[B, T, C, H, W]` 的输出。 + * `fgr` 是 RGB 三通道,`pha` 为一通道。 + * 输出范围为 `0~1`。 +* `rec`: 循环记忆(Recurrent States)。 + * `List[Tensor, Tensor, Tensor, Tensor]` 类型。 + * 初始 `rec` 为 `List[None, None, None, None]`。 + * 有四个记忆,因为网络使用四个 `ConvGRU` 层。 + * 无论 `src` 的 Rank,所有记忆张量的 Rank 为 4。 + * 若一次给予 `T` 帧,只返回处理完最后一帧后的记忆。 + +完整的推断例子: + +```python +from torch.utils.data import DataLoader +from torchvision.transforms import ToTensor +from inference_utils import VideoReader, VideoWriter + +reader = VideoReader('input.mp4', transform=ToTensor()) +writer = VideoWriter('output.mp4', frame_rate=30) + +bgr = torch.tensor([.47, 1, .6]).view(3, 1, 1).cuda() # 绿背景 +rec = [None] * 4 # 初始记忆 + +with torch.no_grad(): + for src in DataLoader(reader): + fgr, pha, *rec = model(src.cuda(), *rec, downsample_ratio=0.25) # 将上一帧的记忆给下一帧 + writer.write(fgr * pha + bgr * (1 - pha)) +``` + +或者使用提供的视频转换 API: + +```python +from inference import convert_video + +convert_video( + model, # 模型,可以加载到任何设备(cpu 或 cuda) + input_source='input.mp4', # 视频文件,或图片序列文件夹 + input_resize=(1920, 1080), # [可选项] 缩放视频大小 + downsample_ratio=0.25, # [可选项] 下采样比,若 None,自动下采样至 512px + output_type='video', # 可选 "video"(视频)或 "png_sequence"(PNG 序列) + output_composition='com.mp4', # 若导出视频,提供文件路径。若导出 PNG 序列,提供文件夹路径 + output_alpha="pha.mp4", # [可选项] 输出透明度预测 + output_foreground="fgr.mp4", # [可选项] 输出前景预测 + output_video_mbps=4, # 若导出视频,提供视频码率 + seq_chunk=12, # 设置多帧并行计算 + num_workers=1, # 只适用于图片序列输入,读取线程 + progress=True # 显示进度条 +) +``` + +也可通过命令行调用转换 API: + +```sh +python inference.py \ + --variant mobilenetv3 \ + --checkpoint "CHECKPOINT" \ + --device cuda \ + --input-source "input.mp4" \ + --downsample-ratio 0.25 \ + --output-type video \ + --output-composition "composition.mp4" \ + --output-alpha "alpha.mp4" \ + --output-foreground "foreground.mp4" \ + --output-video-mbps 4 \ + --seq-chunk 12 +``` + +


+ +## TorchHub + +载入模型: + +```python +model = torch.hub.load("PeterL1n/RobustVideoMatting", "mobilenetv3") # or "resnet50" +``` + +使用转换 API,具体请参考之前对 `convert_video` 的文档。 + +```python +convert_video = torch.hub.load("PeterL1n/RobustVideoMatting", "converter") + +convert_video(model, ...args...) +``` + +


+ +## TorchScript + +载入模型: + +```python +import torch +model = torch.jit.load('rvm_mobilenetv3.torchscript') +``` + +也可以可选的将模型固化(Freeze)。这会对模型进行优化,例如 BatchNorm Fusion 等。固化的模型更快。 + +```python +model = torch.jit.freeze(model) +``` + +然后,可以将 `model` 作为普通的 PyTorch 模型使用。但注意,若用固化模型调用转换 API,必须手动提供 `device` 和 `dtype`: + +```python +convert_video(frozen_model, ...args..., device='cuda', dtype=torch.float32) +``` + +


+ +## ONNX + +模型规格: +* 输入: [`src`, `r1i`, `r2i`, `r3i`, `r4i`, `downsample_ratio`]. + * `src`:输入帧,RGB 通道,形状为 `[B, C, H, W]`,范围为`0~1`。 + * `rXi`:记忆输入,初始值是是形状为 `[1, 1, 1, 1]` 的零张量。 + * `downsample_ratio` 下采样比,张量形状为 `[1]`。 + * 只有 `downsample_ratio` 必须是 `FP32`,其他输入必须和加载的模型使用一样的 `dtype`。 +* 输出: [`fgr`, `pha`, `r1o`, `r2o`, `r3o`, `r4o`] + * `fgr, pha`:前景和透明度通道输出,范围为 `0~1`。 + * `rXo`:记忆输出。 + +我们只展示用 ONNX Runtime CUDA Backend 在 Python 上的使用范例。 + +载入模型: + +```python +import onnxruntime as ort + +sess = ort.InferenceSession('rvm_mobilenetv3_fp16.onnx') +``` + +简单推断循环,但此方法不是最优化的: + +```python +import numpy as np + +rec = [ np.zeros([1, 1, 1, 1], dtype=np.float16) ] * 4 # 必须用模型一样的 dtype +downsample_ratio = np.array([0.25], dtype=np.float32) # 必须是 FP32 + +for src in YOUR_VIDEO: # src 张量是 [B, C, H, W] 形状,必须用模型一样的 dtype + fgr, pha, *rec = sess.run([], { + 'src': src, + 'r1i': rec[0], + 'r2i': rec[1], + 'r3i': rec[2], + 'r4i': rec[3], + 'downsample_ratio': downsample_ratio + }) +``` + +若使用 GPU,上例会将记忆输出传回到 CPU,再在下一帧时传回到 GPU。这种传输是无意义的,因为记忆值可以留在 GPU 上。下例使用 `iobinding` 来杜绝无用的传输。 + +```python +import onnxruntime as ort +import numpy as np + +# 载入模型 +sess = ort.InferenceSession('rvm_mobilenetv3_fp16.onnx') + +# 创建 io binding. +io = sess.io_binding() + +# 在 CUDA 上创建张量 +rec = [ ort.OrtValue.ortvalue_from_numpy(np.zeros([1, 1, 1, 1], dtype=np.float16), 'cuda') ] * 4 +downsample_ratio = ort.OrtValue.ortvalue_from_numpy(np.asarray([0.25], dtype=np.float32), 'cuda') + +# 设置输出项 +for name in ['fgr', 'pha', 'r1o', 'r2o', 'r3o', 'r4o']: + io.bind_output(name, 'cuda') + +# 推断 +for src in YOUR_VIDEO: + io.bind_cpu_input('src', src) + io.bind_ortvalue_input('r1i', rec[0]) + io.bind_ortvalue_input('r2i', rec[1]) + io.bind_ortvalue_input('r3i', rec[2]) + io.bind_ortvalue_input('r4i', rec[3]) + io.bind_ortvalue_input('downsample_ratio', downsample_ratio) + + sess.run_with_iobinding(io) + + fgr, pha, *rec = io.get_outputs() + + # 只将 `fgr` 和 `pha` 回传到 CPU + fgr = fgr.numpy() + pha = pha.numpy() +``` + +注:若你使用其他推断框架,可能有些 ONNX ops 不被支持,需被替换。可以参考 [onnx](https://github.com/PeterL1n/RobustVideoMatting/tree/onnx) 分支的代码做自行导出。 + +


+ +### TensorFlow + +范例: + +```python +import tensorflow as tf + +model = tf.keras.models.load_model('rvm_mobilenetv3_tf') +model = tf.function(model) + +rec = [ tf.constant(0.) ] * 4 # 初始记忆 +downsample_ratio = tf.constant(0.25) # 下采样率,根据视频调整 + +for src in YOUR_VIDEO: # src 张量是 [B, H, W, C] 的形状,而不是 [B, C, H, W]! + out = model([src, *rec, downsample_ratio]) + fgr, pha, *rec = out['fgr'], out['pha'], out['r1o'], out['r2o'], out['r3o'], out['r4o'] +``` + +注意,在 TensorFlow 上,所有张量都是 Channal Last 的格式。 + +我们提供 TensorFlow 的原始模型代码,请参考 [tensorflow](https://github.com/PeterL1n/RobustVideoMatting/tree/tensorflow) 分支。您可自行将 PyTorch 的权值转到 TensorFlow 模型上。 + + +


+ +### TensorFlow.js + +我们在 [tfjs](https://github.com/PeterL1n/RobustVideoMatting/tree/tfjs) 分支提供范例代码。代码简单易懂,解释如何正确使用模型。 + +


+ +### CoreML + +我们只展示在 Python 下通过 `coremltools` 使用 CoreML 模型。在部署时,同样逻辑可用于 Swift。模型的循环记忆输入不需要在处理第一帧时提供。CoreML 内部会自动创建零张量作为初始记忆。 + +```python +import coremltools as ct + +model = ct.models.model.MLModel('rvm_mobilenetv3_1920x1080_s0.25_int8.mlmodel') + +r1, r2, r3, r4 = None, None, None, None + +for src in YOUR_VIDEO: # src 是 PIL.Image. + + if r1 is None: + # 初始帧, 不用提供循环记忆 + inputs = {'src': src} + else: + # 剩余帧,提供循环记忆 + inputs = {'src': src, 'r1i': r1, 'r2i': r2, 'r3i': r3, 'r4i': r4} + + outputs = model.predict(inputs) + + fgr = outputs['fgr'] # PIL.Image + pha = outputs['pha'] # PIL.Image + + r1 = outputs['r1o'] # Numpy array + r2 = outputs['r2o'] # Numpy array + r3 = outputs['r3o'] # Numpy array + r4 = outputs['r4o'] # Numpy array + +``` + +我们的 CoreML 模型只支持固定分辨率。如果你需要其他分辨率,可自行导出。导出代码见 [coreml](https://github.com/PeterL1n/RobustVideoMatting/tree/coreml) 分支。 \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/aim_test.txt b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/aim_test.txt new file mode 100644 index 0000000..088006b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/aim_test.txt @@ -0,0 +1,11 @@ +boy-1518482_1920.png +girl-1219339_1920.png +girl-1467820_1280.png +girl-beautiful-young-face-53000.png +long-1245787_1920.png +model-600238_1920.png +pexels-photo-58463.png +sea-sunny-person-beach.png +wedding-dresses-1486260_1280.png +woman-952506_1920 (1).png +woman-morning-bathrobe-bathroom.png diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/d646_test.txt b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/d646_test.txt new file mode 100644 index 0000000..b5f02d5 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/d646_test.txt @@ -0,0 +1,11 @@ +test_13.png +test_16.png +test_18.png +test_22.png +test_32.png +test_35.png +test_39.png +test_42.png +test_46.png +test_4.png +test_6.png diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/dvm_background_test_clips.txt b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/dvm_background_test_clips.txt new file mode 100644 index 0000000..a821c86 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/dvm_background_test_clips.txt @@ -0,0 +1,162 @@ +0000 +0001 +0002 +0004 +0005 +0007 +0008 +0009 +0010 +0012 +0013 +0014 +0015 +0016 +0017 +0018 +0019 +0021 +0022 +0023 +0024 +0025 +0027 +0029 +0030 +0032 +0033 +0034 +0035 +0037 +0038 +0039 +0040 +0041 +0042 +0043 +0045 +0046 +0047 +0048 +0050 +0051 +0052 +0054 +0055 +0057 +0058 +0059 +0060 +0061 +0062 +0063 +0064 +0065 +0066 +0068 +0070 +0071 +0073 +0074 +0075 +0077 +0078 +0079 +0080 +0081 +0082 +0083 +0084 +0085 +0086 +0089 +0097 +0100 +0101 +0102 +0103 +0104 +0106 +0107 +0109 +0110 +0111 +0113 +0115 +0116 +0117 +0119 +0120 +0121 +0122 +0123 +0124 +0125 +0126 +0127 +0128 +0129 +0130 +0131 +0132 +0133 +0134 +0135 +0136 +0137 +0143 +0145 +0147 +0148 +0150 +0159 +0160 +0161 +0162 +0165 +0166 +0168 +0172 +0174 +0175 +0176 +0178 +0181 +0182 +0183 +0184 +0185 +0187 +0194 +0198 +0200 +0201 +0207 +0210 +0211 +0212 +0215 +0217 +0218 +0219 +0220 +0222 +0223 +0224 +0225 +0226 +0227 +0229 +0230 +0231 +0232 +0233 +0234 +0235 +0237 +0240 +0241 +0242 +0243 +0244 +0245 diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/dvm_background_train_clips.txt b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/dvm_background_train_clips.txt new file mode 100644 index 0000000..6bdfffe --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/dvm_background_train_clips.txt @@ -0,0 +1,3117 @@ +0000 +0002 +0003 +0004 +0005 +0006 +0007 +0009 +0010 +0012 +0013 +0014 +0015 +0016 +0019 +0021 +0022 +0023 +0024 +0025 +0028 +0029 +0030 +0031 +0032 +0034 +0035 +0036 +0037 +0039 +0040 +0041 +0042 +0043 +0044 +0046 +0047 +0048 +0049 +0050 +0051 +0052 +0053 +0054 +0055 +0056 +0057 +0058 +0060 +0061 +0062 +0063 +0064 +0065 +0066 +0067 +0068 +0069 +0070 +0071 +0073 +0074 +0075 +0076 +0077 +0078 +0079 +0081 +0082 +0087 +0088 +0099 +0100 +0101 +0104 +0105 +0107 +0108 +0109 +0110 +0111 +0112 +0113 +0114 +0115 +0117 +0118 +0119 +0120 +0122 +0123 +0124 +0125 +0127 +0128 +0129 +0130 +0131 +0132 +0133 +0134 +0135 +0136 +0137 +0138 +0139 +0140 +0141 +0142 +0144 +0146 +0147 +0148 +0150 +0151 +0152 +0153 +0154 +0155 +0156 +0157 +0158 +0159 +0160 +0161 +0163 +0164 +0165 +0167 +0168 +0169 +0170 +0171 +0172 +0174 +0175 +0176 +0177 +0178 +0180 +0181 +0182 +0184 +0185 +0187 +0188 +0189 +0190 +0192 +0193 +0194 +0195 +0196 +0197 +0198 +0199 +0200 +0202 +0203 +0204 +0205 +0206 +0207 +0208 +0209 +0210 +0211 +0212 +0213 +0214 +0215 +0217 +0218 +0219 +0220 +0221 +0222 +0223 +0224 +0225 +0226 +0227 +0229 +0230 +0231 +0233 +0234 +0235 +0236 +0237 +0238 +0240 +0241 +0242 +0243 +0244 +0245 +0246 +0247 +0248 +0249 +0250 +0251 +0252 +0253 +0254 +0255 +0256 +0257 +0258 +0259 +0260 +0261 +0262 +0263 +0264 +0265 +0266 +0267 +0268 +0269 +0270 +0271 +0272 +0273 +0274 +0275 +0276 +0277 +0278 +0279 +0280 +0281 +0282 +0283 +0284 +0285 +0286 +0287 +0288 +0289 +0290 +0291 +0292 +0293 +0294 +0297 +0298 +0299 +0300 +0301 +0302 +0303 +0304 +0305 +0306 +0308 +0309 +0310 +0311 +0312 +0313 +0314 +0315 +0316 +0317 +0319 +0320 +0321 +0322 +0323 +0324 +0325 +0326 +0327 +0328 +0329 +0330 +0331 +0332 +0333 +0335 +0336 +0337 +0338 +0339 +0341 +0342 +0344 +0345 +0346 +0348 +0349 +0352 +0353 +0356 +0357 +0358 +0359 +0360 +0361 +0362 +0363 +0364 +0365 +0366 +0368 +0369 +0370 +0371 +0372 +0373 +0374 +0375 +0376 +0377 +0378 +0379 +0380 +0381 +0382 +0383 +0384 +0385 +0386 +0387 +0388 +0389 +0391 +0392 +0393 +0394 +0395 +0397 +0398 +0399 +0400 +0401 +0402 +0403 +0404 +0405 +0406 +0407 +0408 +0409 +0410 +0411 +0413 +0414 +0415 +0416 +0417 +0419 +0420 +0421 +0422 +0423 +0424 +0425 +0426 +0427 +0428 +0429 +0431 +0433 +0434 +0435 +0436 +0437 +0438 +0439 +0440 +0441 +0442 +0443 +0445 +0446 +0447 +0448 +0449 +0450 +0451 +0452 +0453 +0454 +0456 +0457 +0458 +0459 +0462 +0463 +0464 +0465 +0466 +0467 +0468 +0469 +0470 +0471 +0472 +0473 +0474 +0475 +0476 +0477 +0478 +0479 +0480 +0481 +0482 +0483 +0484 +0485 +0486 +0487 +0488 +0489 +0490 +0491 +0492 +0493 +0494 +0499 +0501 +0502 +0503 +0504 +0505 +0506 +0507 +0509 +0510 +0511 +0512 +0513 +0514 +0515 +0517 +0518 +0519 +0520 +0521 +0522 +0524 +0526 +0527 +0529 +0530 +0534 +0535 +0536 +0538 +0539 +0541 +0542 +0543 +0544 +0545 +0546 +0548 +0549 +0550 +0552 +0554 +0555 +0556 +0557 +0558 +0559 +0560 +0561 +0562 +0563 +0564 +0565 +0566 +0567 +0568 +0571 +0572 +0573 +0574 +0575 +0576 +0577 +0578 +0579 +0580 +0581 +0582 +0583 +0584 +0586 +0587 +0589 +0590 +0591 +0592 +0594 +0595 +0596 +0597 +0598 +0600 +0601 +0602 +0603 +0604 +0605 +0606 +0608 +0609 +0610 +0611 +0612 +0613 +0614 +0615 +0616 +0617 +0618 +0619 +0620 +0624 +0625 +0626 +0627 +0628 +0629 +0630 +0631 +0634 +0635 +0636 +0637 +0638 +0639 +0640 +0641 +0642 +0643 +0644 +0645 +0646 +0647 +0648 +0650 +0651 +0652 +0654 +0655 +0656 +0658 +0659 +0660 +0661 +0662 +0663 +0664 +0665 +0666 +0667 +0669 +0670 +0671 +0672 +0673 +0674 +0675 +0676 +0677 +0678 +0679 +0680 +0681 +0682 +0683 +0684 +0685 +0686 +0687 +0689 +0690 +0691 +0692 +0693 +0694 +0695 +0696 +0697 +0698 +0699 +0700 +0701 +0702 +0703 +0704 +0705 +0706 +0707 +0708 +0709 +0710 +0711 +0712 +0713 +0714 +0715 +0716 +0717 +0718 +0719 +0720 +0721 +0723 +0724 +0725 +0726 +0727 +0729 +0730 +0731 +0732 +0733 +0734 +0735 +0736 +0738 +0740 +0741 +0742 +0743 +0744 +0746 +0747 +0748 +0749 +0750 +0752 +0753 +0754 +0755 +0756 +0757 +0758 +0759 +0760 +0762 +0763 +0764 +0765 +0766 +0767 +0768 +0770 +0771 +0772 +0773 +0774 +0775 +0776 +0777 +0778 +0779 +0780 +0781 +0782 +0783 +0784 +0786 +0787 +0788 +0789 +0790 +0791 +0792 +0793 +0794 +0795 +0796 +0797 +0798 +0800 +0801 +0804 +0806 +0808 +0809 +0811 +0812 +0813 +0814 +0815 +0816 +0817 +0819 +0823 +0824 +0825 +0827 +0828 +0829 +0830 +0831 +0832 +0833 +0834 +0835 +0836 +0837 +0840 +0841 +0842 +0847 +0848 +0850 +0851 +0852 +0853 +0854 +0855 +0856 +0857 +0858 +0859 +0860 +0861 +0862 +0864 +0867 +0868 +0869 +0870 +0871 +0872 +0873 +0874 +0876 +0877 +0878 +0879 +0880 +0881 +0882 +0883 +0885 +0886 +0887 +0889 +0890 +0891 +0892 +0893 +0894 +0895 +0896 +0899 +0900 +0901 +0902 +0903 +0904 +0905 +0906 +0907 +0908 +0909 +0910 +0911 +0912 +0913 +0914 +0915 +0916 +0917 +0918 +0919 +0921 +0922 +0923 +0924 +0925 +0926 +0927 +0929 +0930 +0931 +0932 +0933 +0934 +0935 +0936 +0937 +0939 +0940 +0941 +0942 +0945 +0946 +0947 +0948 +0949 +0950 +0951 +0952 +0953 +0954 +0955 +0956 +0957 +0958 +0960 +0962 +0963 +0964 +0965 +0966 +0967 +0968 +0969 +0970 +0971 +0973 +0974 +0976 +0977 +0978 +0979 +0980 +0981 +0982 +0983 +0984 +0985 +0986 +0987 +0988 +0989 +0990 +0991 +0992 +0993 +0994 +0995 +0996 +0997 +0998 +0999 +1000 +1001 +1002 +1003 +1005 +1006 +1008 +1009 +1010 +1011 +1012 +1013 +1014 +1016 +1017 +1018 +1019 +1020 +1021 +1022 +1023 +1024 +1025 +1026 +1028 +1029 +1030 +1032 +1035 +1036 +1037 +1038 +1039 +1040 +1041 +1042 +1043 +1044 +1045 +1046 +1047 +1048 +1049 +1050 +1052 +1053 +1054 +1055 +1056 +1057 +1058 +1061 +1062 +1063 +1064 +1065 +1066 +1067 +1068 +1069 +1072 +1075 +1076 +1077 +1078 +1079 +1081 +1082 +1083 +1084 +1087 +1088 +1089 +1090 +1096 +1097 +1098 +1099 +1101 +1102 +1103 +1104 +1105 +1106 +1107 +1108 +1109 +1110 +1111 +1112 +1113 +1115 +1116 +1117 +1118 +1119 +1120 +1121 +1122 +1123 +1124 +1128 +1129 +1130 +1131 +1132 +1134 +1137 +1138 +1139 +1140 +1141 +1142 +1143 +1144 +1145 +1146 +1147 +1148 +1149 +1150 +1151 +1152 +1153 +1154 +1155 +1156 +1157 +1158 +1159 +1160 +1161 +1162 +1163 +1165 +1168 +1169 +1170 +1171 +1172 +1173 +1174 +1175 +1176 +1177 +1178 +1179 +1180 +1181 +1182 +1183 +1184 +1185 +1186 +1187 +1188 +1189 +1190 +1191 +1192 +1193 +1194 +1195 +1196 +1197 +1199 +1200 +1201 +1202 +1203 +1204 +1206 +1207 +1208 +1211 +1212 +1213 +1215 +1216 +1217 +1219 +1220 +1221 +1222 +1223 +1224 +1225 +1226 +1227 +1228 +1229 +1230 +1231 +1232 +1233 +1234 +1235 +1237 +1238 +1239 +1240 +1241 +1242 +1245 +1246 +1248 +1249 +1252 +1253 +1255 +1256 +1257 +1258 +1259 +1260 +1261 +1262 +1263 +1264 +1265 +1266 +1267 +1268 +1269 +1270 +1271 +1272 +1273 +1274 +1275 +1277 +1278 +1279 +1280 +1281 +1282 +1283 +1284 +1287 +1288 +1289 +1290 +1291 +1293 +1294 +1295 +1296 +1297 +1299 +1300 +1301 +1302 +1303 +1304 +1305 +1306 +1307 +1308 +1309 +1310 +1311 +1312 +1313 +1316 +1317 +1318 +1319 +1320 +1321 +1322 +1323 +1324 +1325 +1326 +1327 +1328 +1329 +1331 +1332 +1333 +1334 +1335 +1336 +1337 +1338 +1339 +1340 +1341 +1342 +1343 +1344 +1345 +1346 +1347 +1348 +1350 +1351 +1352 +1353 +1354 +1355 +1356 +1357 +1359 +1360 +1361 +1362 +1363 +1364 +1365 +1366 +1367 +1368 +1369 +1370 +1371 +1372 +1373 +1374 +1375 +1376 +1378 +1379 +1380 +1381 +1382 +1383 +1385 +1386 +1387 +1389 +1390 +1391 +1392 +1393 +1394 +1395 +1396 +1397 +1398 +1399 +1400 +1402 +1403 +1405 +1406 +1409 +1410 +1411 +1412 +1413 +1414 +1415 +1416 +1417 +1418 +1420 +1421 +1422 +1423 +1424 +1425 +1426 +1427 +1428 +1429 +1430 +1431 +1432 +1433 +1434 +1435 +1436 +1437 +1438 +1439 +1441 +1442 +1443 +1444 +1445 +1446 +1447 +1448 +1449 +1450 +1451 +1452 +1453 +1454 +1455 +1456 +1457 +1458 +1459 +1460 +1461 +1462 +1465 +1466 +1467 +1468 +1469 +1470 +1471 +1472 +1473 +1474 +1475 +1476 +1477 +1478 +1479 +1480 +1481 +1482 +1483 +1484 +1485 +1486 +1487 +1488 +1489 +1490 +1491 +1493 +1494 +1495 +1496 +1497 +1499 +1500 +1501 +1502 +1503 +1504 +1505 +1506 +1507 +1508 +1509 +1510 +1511 +1512 +1513 +1514 +1515 +1516 +1519 +1520 +1521 +1522 +1523 +1524 +1525 +1526 +1527 +1528 +1529 +1530 +1531 +1532 +1534 +1535 +1536 +1537 +1538 +1539 +1540 +1541 +1542 +1543 +1544 +1545 +1546 +1547 +1548 +1549 +1550 +1551 +1552 +1553 +1554 +1555 +1556 +1557 +1558 +1560 +1561 +1562 +1563 +1565 +1566 +1567 +1568 +1569 +1570 +1571 +1572 +1573 +1574 +1575 +1576 +1577 +1578 +1579 +1580 +1581 +1582 +1583 +1584 +1585 +1586 +1587 +1588 +1589 +1590 +1591 +1592 +1593 +1594 +1595 +1596 +1597 +1598 +1599 +1600 +1601 +1602 +1603 +1604 +1605 +1606 +1608 +1609 +1611 +1612 +1613 +1614 +1615 +1616 +1617 +1618 +1619 +1620 +1621 +1622 +1623 +1624 +1625 +1626 +1627 +1628 +1629 +1630 +1631 +1632 +1633 +1634 +1635 +1636 +1637 +1638 +1639 +1640 +1641 +1642 +1643 +1644 +1645 +1646 +1649 +1650 +1651 +1652 +1653 +1654 +1655 +1656 +1657 +1658 +1659 +1660 +1661 +1662 +1663 +1664 +1665 +1666 +1667 +1668 +1669 +1670 +1671 +1672 +1673 +1675 +1676 +1677 +1678 +1679 +1680 +1681 +1682 +1683 +1684 +1685 +1686 +1687 +1688 +1689 +1690 +1691 +1692 +1693 +1694 +1695 +1696 +1697 +1698 +1699 +1700 +1701 +1702 +1703 +1704 +1705 +1706 +1707 +1708 +1709 +1710 +1712 +1713 +1714 +1715 +1716 +1717 +1718 +1719 +1720 +1721 +1722 +1723 +1724 +1725 +1726 +1727 +1728 +1729 +1730 +1731 +1732 +1733 +1734 +1735 +1736 +1737 +1738 +1739 +1740 +1741 +1742 +1743 +1744 +1745 +1746 +1747 +1748 +1749 +1750 +1751 +1752 +1753 +1754 +1755 +1756 +1757 +1758 +1759 +1760 +1761 +1762 +1763 +1764 +1765 +1766 +1767 +1768 +1769 +1770 +1771 +1772 +1773 +1774 +1775 +1776 +1778 +1779 +1780 +1781 +1782 +1784 +1785 +1786 +1787 +1788 +1789 +1790 +1791 +1792 +1793 +1794 +1795 +1796 +1797 +1798 +1799 +1800 +1804 +1806 +1807 +1808 +1809 +1811 +1812 +1813 +1814 +1815 +1816 +1817 +1818 +1819 +1820 +1821 +1822 +1823 +1825 +1826 +1827 +1828 +1829 +1831 +1833 +1834 +1835 +1836 +1837 +1838 +1839 +1840 +1842 +1843 +1844 +1845 +1846 +1847 +1848 +1849 +1850 +1851 +1852 +1853 +1854 +1855 +1856 +1857 +1858 +1859 +1861 +1862 +1863 +1864 +1865 +1866 +1867 +1868 +1869 +1870 +1871 +1872 +1873 +1874 +1875 +1876 +1877 +1879 +1880 +1881 +1882 +1886 +1887 +1889 +1891 +1892 +1893 +1894 +1896 +1897 +1898 +1899 +1900 +1901 +1902 +1903 +1904 +1905 +1906 +1907 +1908 +1909 +1910 +1911 +1912 +1913 +1914 +1915 +1916 +1917 +1918 +1919 +1920 +1921 +1922 +1923 +1924 +1925 +1926 +1927 +1928 +1929 +1930 +1931 +1932 +1933 +1934 +1935 +1936 +1937 +1938 +1939 +1940 +1941 +1942 +1943 +1944 +1945 +1946 +1947 +1948 +1949 +1950 +1952 +1953 +1954 +1955 +1956 +1958 +1959 +1960 +1961 +1962 +1963 +1964 +1965 +1966 +1967 +1968 +1969 +1970 +1971 +1972 +1973 +1974 +1975 +1976 +1977 +1978 +1980 +1981 +1982 +1983 +1984 +1985 +1986 +1988 +1989 +1990 +1991 +1992 +1993 +1994 +1995 +1997 +1998 +1999 +2000 +2002 +2003 +2004 +2005 +2007 +2008 +2010 +2011 +2012 +2013 +2014 +2015 +2016 +2017 +2018 +2019 +2020 +2021 +2022 +2024 +2025 +2026 +2027 +2028 +2029 +2030 +2031 +2032 +2035 +2036 +2037 +2038 +2039 +2040 +2041 +2042 +2043 +2045 +2046 +2047 +2049 +2050 +2052 +2053 +2054 +2056 +2057 +2059 +2060 +2061 +2064 +2066 +2068 +2069 +2070 +2071 +2072 +2073 +2074 +2075 +2077 +2078 +2079 +2080 +2081 +2082 +2083 +2084 +2085 +2086 +2087 +2088 +2089 +2090 +2091 +2092 +2093 +2094 +2096 +2097 +2098 +2099 +2100 +2101 +2102 +2103 +2104 +2105 +2107 +2108 +2109 +2111 +2112 +2113 +2114 +2115 +2116 +2117 +2119 +2120 +2121 +2122 +2123 +2124 +2125 +2126 +2127 +2128 +2129 +2130 +2131 +2132 +2133 +2134 +2135 +2136 +2137 +2138 +2139 +2140 +2141 +2143 +2144 +2145 +2146 +2147 +2148 +2149 +2152 +2153 +2155 +2158 +2159 +2160 +2161 +2162 +2163 +2164 +2165 +2166 +2167 +2168 +2169 +2170 +2171 +2174 +2176 +2177 +2178 +2179 +2180 +2181 +2182 +2183 +2184 +2186 +2187 +2188 +2189 +2190 +2191 +2192 +2193 +2195 +2197 +2198 +2199 +2200 +2201 +2202 +2203 +2204 +2205 +2206 +2207 +2208 +2209 +2210 +2211 +2212 +2213 +2214 +2215 +2216 +2217 +2218 +2219 +2220 +2221 +2222 +2223 +2225 +2226 +2227 +2228 +2229 +2230 +2231 +2232 +2233 +2234 +2235 +2236 +2238 +2239 +2240 +2241 +2242 +2243 +2244 +2245 +2246 +2247 +2248 +2249 +2250 +2251 +2252 +2253 +2255 +2256 +2257 +2258 +2259 +2260 +2261 +2263 +2264 +2265 +2267 +2268 +2269 +2270 +2271 +2272 +2273 +2274 +2275 +2276 +2277 +2278 +2279 +2280 +2281 +2282 +2283 +2284 +2285 +2286 +2287 +2288 +2290 +2291 +2292 +2293 +2294 +2295 +2297 +2299 +2300 +2301 +2302 +2303 +2304 +2305 +2311 +2312 +2313 +2314 +2315 +2316 +2317 +2318 +2319 +2320 +2322 +2324 +2325 +2326 +2329 +2331 +2332 +2334 +2335 +2336 +2337 +2338 +2339 +2340 +2341 +2342 +2343 +2344 +2345 +2347 +2349 +2350 +2351 +2352 +2353 +2355 +2356 +2358 +2359 +2360 +2361 +2362 +2363 +2364 +2365 +2367 +2368 +2369 +2370 +2372 +2373 +2374 +2375 +2376 +2377 +2378 +2379 +2380 +2381 +2382 +2383 +2384 +2386 +2389 +2390 +2391 +2392 +2393 +2394 +2395 +2396 +2397 +2398 +2399 +2400 +2401 +2402 +2403 +2404 +2406 +2407 +2408 +2409 +2410 +2411 +2412 +2413 +2414 +2416 +2417 +2418 +2419 +2420 +2421 +2422 +2423 +2424 +2425 +2426 +2427 +2428 +2429 +2431 +2432 +2433 +2434 +2437 +2439 +2440 +2441 +2442 +2443 +2444 +2445 +2446 +2448 +2449 +2450 +2451 +2452 +2453 +2454 +2455 +2456 +2457 +2460 +2461 +2462 +2463 +2464 +2465 +2466 +2467 +2468 +2469 +2470 +2472 +2474 +2475 +2479 +2480 +2481 +2482 +2483 +2484 +2485 +2486 +2487 +2488 +2489 +2490 +2491 +2492 +2493 +2494 +2495 +2496 +2497 +2499 +2500 +2501 +2502 +2503 +2504 +2505 +2506 +2507 +2508 +2509 +2510 +2511 +2512 +2513 +2514 +2515 +2516 +2517 +2518 +2519 +2520 +2521 +2522 +2523 +2524 +2525 +2530 +2534 +2536 +2537 +2539 +2540 +2541 +2542 +2543 +2544 +2545 +2546 +2548 +2549 +2550 +2551 +2553 +2554 +2555 +2556 +2559 +2560 +2562 +2567 +2570 +2573 +2575 +2576 +2578 +2579 +2582 +2585 +2588 +2590 +2593 +2594 +2595 +2596 +2597 +2598 +2600 +2601 +2602 +2603 +2604 +2605 +2606 +2607 +2612 +2613 +2615 +2616 +2617 +2618 +2619 +2620 +2622 +2623 +2624 +2625 +2629 +2630 +2633 +2634 +2635 +2637 +2638 +2639 +2641 +2643 +2644 +2646 +2648 +2649 +2650 +2652 +2654 +2656 +2659 +2661 +2662 +2663 +2664 +2665 +2667 +2669 +2670 +2671 +2672 +2674 +2675 +2677 +2679 +2680 +2682 +2684 +2685 +2687 +2689 +2691 +2692 +2693 +2694 +2695 +2696 +2697 +2698 +2699 +2700 +2701 +2702 +2703 +2705 +2706 +2707 +2710 +2713 +2714 +2715 +2717 +2718 +2720 +2721 +2722 +2726 +2727 +2729 +2733 +2734 +2737 +2739 +2740 +2741 +2742 +2743 +2745 +2747 +2748 +2749 +2755 +2756 +2757 +2758 +2759 +2761 +2762 +2763 +2765 +2767 +2768 +2769 +2770 +2773 +2775 +2776 +2780 +2781 +2782 +2785 +2787 +2790 +2791 +2793 +2794 +2795 +2796 +2797 +2798 +2800 +2801 +2802 +2803 +2806 +2808 +2810 +2812 +2813 +2814 +2815 +2816 +2818 +2819 +2820 +2821 +2822 +2823 +2825 +2827 +2828 +2829 +2830 +2831 +2832 +2833 +2834 +2835 +2837 +2838 +2842 +2843 +2844 +2845 +2847 +2848 +2849 +2850 +2851 +2852 +2853 +2854 +2856 +2857 +2858 +2860 +2861 +2862 +2863 +2864 +2865 +2866 +2868 +2869 +2871 +2874 +2875 +2877 +2879 +2881 +2882 +2884 +2885 +2887 +2888 +2889 +2890 +2891 +2893 +2894 +2895 +2896 +2900 +2902 +2903 +2904 +2905 +2906 +2907 +2908 +2911 +2912 +2914 +2915 +2916 +2917 +2918 +2919 +2921 +2923 +2924 +2925 +2926 +2928 +2929 +2930 +2931 +2932 +2933 +2934 +2935 +2936 +2938 +2939 +2940 +2941 +2944 +2945 +2946 +2947 +2952 +2954 +2957 +2958 +2960 +2962 +2963 +2966 +2967 +2968 +2969 +2970 +2971 +2972 +2973 +2974 +2975 +2976 +2981 +2982 +2984 +2985 +2986 +2988 +2993 +2994 +2996 +2998 +2999 +3000 +3001 +3002 +3004 +3006 +3007 +3008 +3010 +3015 +3016 +3017 +3018 +3019 +3020 +3021 +3022 +3024 +3026 +3027 +3029 +3033 +3034 +3035 +3036 +3037 +3040 +3041 +3042 +3043 +3044 +3045 +3046 +3047 +3048 +3049 +3050 +3051 +3053 +3056 +3057 +3058 +3059 +3060 +3061 +3063 +3065 +3066 +3068 +3069 +3070 +3074 +3077 +3078 +3079 +3080 +3081 +3082 +3083 +3084 +3085 +3086 +3087 +3094 +3095 +3097 +3098 +3100 +3101 +3102 +3103 +3105 +3106 +3108 +3109 +3110 +3111 +3112 +3113 +3114 +3115 +3116 +3117 +3118 +3119 +3121 +3123 +3124 +3125 +3127 +3128 +3130 +3131 +3133 +3134 +3136 +3137 +3140 +3142 +3143 +3144 +3145 +3147 +3148 +3152 +3153 +3155 +3156 +3157 +3159 +3160 +3162 +3164 +3165 +3166 +3168 +3170 +3171 +3173 +3176 +3178 +3179 +3180 +3181 +3184 +3187 +3188 +3190 +3191 +3194 +3197 +3199 +3200 +3201 +3202 +3204 +3209 +3210 +3211 +3212 +3214 +3215 +3217 +3218 +3219 +3221 +3222 +3223 +3224 +3226 +3228 +3229 +3230 +3232 +3237 +3238 +3239 +3240 +3241 +3242 +3243 +3245 +3247 +3248 +3250 +3251 +3252 +3253 +3254 +3257 +3258 +3259 +3260 +3262 +3264 +3266 +3267 +3269 +3270 +3275 +3278 +3279 +3280 +3282 +3284 +3285 +3286 +3287 +3288 +3289 +3292 +3293 +3295 +3296 +3298 +3299 +3300 +3301 +3302 +3303 +3304 +3309 +3311 +3312 +3315 +3316 +3317 +3318 +3319 +3322 +3325 +3328 +3332 +3334 +3339 +3340 +3342 +3346 +3348 +3349 +3350 +3351 +3352 +3354 +3355 +3357 +3358 +3361 +3363 +3364 +3365 +3366 +3367 +3368 +3369 +3370 +3373 +3374 +3377 +3378 +3380 +3381 +3382 +3383 +3384 +3385 +3386 +3391 +3393 +3395 +3396 +3397 +3398 +3399 +3400 +3401 +3402 +3403 +3404 +3405 +3407 +3408 +3409 +3410 +3411 +3415 +3416 +3418 +3420 +3422 +3423 +3424 +3427 +3428 +3431 +3433 +3435 +3437 +3438 +3439 +3440 +3441 +3442 +3443 +3444 +3446 +3449 +3450 +3451 +3452 +3453 +3454 +3455 +3456 +3457 +3460 +3462 +3463 +3464 +3465 +3466 +3467 +3468 +3470 +3475 +3476 +3477 +3483 +3484 +3486 +3487 +3489 +3492 +3496 +3497 +3498 +3500 +3501 +3502 +3505 +3507 +3508 +3509 +3510 +3511 +3512 +3513 +3514 +3515 +3517 +3518 +3519 +3521 +3524 +3525 +3526 +3528 +3529 +3532 +3536 +3541 +3542 +3543 +3544 +3545 +3546 +3549 +3550 +3551 +3552 +3554 +3556 +3557 +3558 +3559 +3560 +3562 +3563 +3564 +3566 +3567 +3568 +3571 +3572 +3573 +3574 +3575 +3576 +3578 +3579 +3580 +3582 +3584 +3585 +3588 +3590 +3592 +3593 +3594 +3599 +3602 +3605 +3606 +3608 +3611 +3612 +3615 +3617 +3620 +3621 +3622 +3623 +3624 +3625 +3626 +3629 +3630 +3632 +3633 +3636 +3637 +3638 +3641 +3644 +3646 +3647 +3648 +3649 +3654 +3655 +3656 +3657 +3660 +3662 +3667 +3671 +3672 +3673 +3674 +3675 +3676 +3678 +3679 +3680 +3681 +3682 +3683 +3685 +3687 +3691 +3692 +3694 +3695 +3697 +3698 +3699 +3700 +3701 +3703 +3704 +3705 +3707 +3709 +3711 +3712 +3715 +3717 +3718 +3719 +3720 +3721 +3723 +3724 +3725 +3726 +3728 +3729 +3733 +3734 +3735 +3737 +3738 +3739 +3741 +3743 +3746 +3748 +3750 +3753 +3754 +3756 +3757 +3759 +3760 +3762 +3764 +3765 +3766 +3768 +3772 +3773 +3774 +3776 +3777 +3779 +3780 +3782 +3783 +3784 +3785 +3786 +3790 +3792 +3793 +3794 +3798 +3799 +3800 +3801 +3802 +3803 +3804 +3807 +3809 +3813 +3814 +3816 +3819 +3822 +3823 +3824 +3826 +3827 +3828 +3829 +3830 +3831 +3832 +3833 +3834 +3836 +3837 +3838 +3839 +3840 +3841 +3842 +3844 +3845 +3847 +3848 +3849 +3850 +3852 +3854 +3855 +3856 +3857 +3858 +3861 +3862 +3863 +3865 +3869 +3872 +3873 +3874 +3879 +3880 +3883 +3885 +3886 +3887 +3888 +3889 +3890 +3891 +3893 +3894 +3895 +3897 +3898 +3899 +3900 +3901 +3903 +3904 +3906 +3914 +3915 +3916 +3920 +3923 +3924 +3925 +3926 +3928 +3929 +3931 +3932 +3934 +3935 +3937 +3939 +3942 +3943 +3945 +3949 +3950 +3952 +3955 +3956 +3963 +3965 +3966 +3967 +3969 +3970 +3971 +3974 +3976 +3977 +3978 +3980 +3981 +3982 +3983 +3984 +3987 +3988 +3992 +3995 +3996 +3997 +3999 diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/imagematte_train.txt b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/imagematte_train.txt new file mode 100644 index 0000000..314f673 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/imagematte_train.txt @@ -0,0 +1,420 @@ +10743257206_18e7f44f2e_b.jpg +10845279884_d2d4c7b4d1_b.jpg +1-1252426161dfXY.jpg +1-1255621189mTnS.jpg +1-1259162624NMFK.jpg +1-1259245823Un3j.jpg +11363165393_05d7a21d76_b.jpg +131686738165901828.jpg +13564741125_753939e9ce_o.jpg +14731860273_5b40b19b51_o.jpg +16087-a-young-woman-showing-a-bitten-green-apple-pv.jpg +1609484818_b9bb12b.jpg +17620-a-beautiful-woman-in-a-bikini-pv.jpg +20672673163_20c8467827_b.jpg +3262986095_2d5afe583c_b.jpg +3588101233_f91aa5e3a3.jpg +3858897226_cae5b75963_o.jpg +4889657410_2d503ca287_o.jpg +4981835627_c4e6c4ffa8_o.jpg +5025666458_576b974455_o.jpg +5149410930_3a943dc43f_b.jpg +539641011387760661.jpg +5892503248_4b882863c7_o.jpg +604673748289192179.jpg +606189768665464996.jpg +624753897218113578.jpg +657454154710122500.jpg +664308724952072193.jpg +7669262460_e4be408343_b.jpg +8244818049_dfa59a3eb8_b.jpg +8688417335_01f3bafbe5_o.jpg +9434599749_e7ccfc7812_b.jpg +Aaron_Friedman_Headshot.jpg +arrgh___r___28_by_mjranum_stock.jpg +arrgh___r___29_by_mjranum_stock.jpg +arrgh___r___30_by_mjranum_stock.jpg +a-single-person-1084191_960_720.jpg +ballerina-855652_1920.jpg +beautiful-19075_960_720.jpg +boy-454633_1920.jpg +bride-2819673_1920.jpg +bride-442894_1920.jpg +face-1223346_960_720.jpg +fashion-model-portrait.jpg +fashion-model-pose.jpg +girl-1535859_1920.jpg +Girl_in_front_of_a_green_background.jpg +goth_by_bugidifino-d4w7zms.jpg +h_0.jpg +h_100.jpg +h_101.jpg +h_102.jpg +h_103.jpg +h_104.jpg +h_105.jpg +h_106.jpg +h_107.jpg +h_108.jpg +h_109.jpg +h_10.jpg +h_111.jpg +h_112.jpg +h_113.jpg +h_114.jpg +h_115.jpg +h_116.jpg +h_117.jpg +h_118.jpg +h_119.jpg +h_11.jpg +h_120.jpg +h_121.jpg +h_122.jpg +h_123.jpg +h_124.jpg +h_125.jpg +h_126.jpg +h_127.jpg +h_128.jpg +h_129.jpg +h_12.jpg +h_130.jpg +h_131.jpg +h_132.jpg +h_133.jpg +h_134.jpg +h_135.jpg +h_136.jpg +h_137.jpg +h_138.jpg +h_139.jpg +h_13.jpg +h_140.jpg +h_141.jpg +h_142.jpg +h_143.jpg +h_144.jpg +h_145.jpg +h_146.jpg +h_147.jpg +h_148.jpg +h_149.jpg +h_14.jpg +h_151.jpg +h_152.jpg +h_153.jpg +h_154.jpg +h_155.jpg +h_156.jpg +h_157.jpg +h_158.jpg +h_159.jpg +h_15.jpg +h_160.jpg +h_161.jpg +h_162.jpg +h_163.jpg +h_164.jpg +h_165.jpg +h_166.jpg +h_167.jpg +h_168.jpg +h_169.jpg +h_170.jpg +h_171.jpg +h_172.jpg +h_173.jpg +h_174.jpg +h_175.jpg +h_176.jpg +h_177.jpg +h_178.jpg +h_179.jpg +h_17.jpg +h_180.jpg +h_181.jpg +h_182.jpg +h_183.jpg +h_184.jpg +h_185.jpg +h_186.jpg +h_187.jpg +h_188.jpg +h_189.jpg +h_18.jpg +h_190.jpg +h_191.jpg +h_192.jpg +h_193.jpg +h_194.jpg +h_195.jpg +h_196.jpg +h_197.jpg +h_198.jpg +h_199.jpg +h_19.jpg +h_1.jpg +h_200.jpg +h_201.jpg +h_202.jpg +h_204.jpg +h_205.jpg +h_206.jpg +h_207.jpg +h_208.jpg +h_209.jpg +h_20.jpg +h_210.jpg +h_211.jpg +h_212.jpg +h_213.jpg +h_214.jpg +h_215.jpg +h_216.jpg +h_217.jpg +h_218.jpg +h_219.jpg +h_21.jpg +h_220.jpg +h_221.jpg +h_222.jpg +h_223.jpg +h_224.jpg +h_225.jpg +h_226.jpg +h_227.jpg +h_228.jpg +h_229.jpg +h_22.jpg +h_230.jpg +h_231.jpg +h_232.jpg +h_233.jpg +h_234.jpg +h_235.jpg +h_236.jpg +h_237.jpg +h_238.jpg +h_239.jpg +h_23.jpg +h_240.jpg +h_241.jpg +h_242.jpg +h_243.jpg +h_244.jpg +h_245.jpg +h_247.jpg +h_248.jpg +h_249.jpg +h_24.jpg +h_250.jpg +h_251.jpg +h_252.jpg +h_253.jpg +h_254.jpg +h_255.jpg +h_256.jpg +h_257.jpg +h_258.jpg +h_259.jpg +h_25.jpg +h_260.jpg +h_261.jpg +h_262.jpg +h_263.jpg +h_264.jpg +h_265.jpg +h_266.jpg +h_268.jpg +h_269.jpg +h_26.jpg +h_270.jpg +h_271.jpg +h_272.jpg +h_273.jpg +h_274.jpg +h_276.jpg +h_277.jpg +h_278.jpg +h_279.jpg +h_27.jpg +h_280.jpg +h_281.jpg +h_282.jpg +h_283.jpg +h_284.jpg +h_285.jpg +h_286.jpg +h_287.jpg +h_288.jpg +h_289.jpg +h_28.jpg +h_290.jpg +h_291.jpg +h_292.jpg +h_293.jpg +h_294.jpg +h_295.jpg +h_296.jpg +h_297.jpg +h_298.jpg +h_299.jpg +h_29.jpg +h_300.jpg +h_301.jpg +h_302.jpg +h_303.jpg +h_304.jpg +h_305.jpg +h_307.jpg +h_308.jpg +h_309.jpg +h_30.jpg +h_310.jpg +h_311.jpg +h_312.jpg +h_313.jpg +h_314.jpg +h_315.jpg +h_316.jpg +h_317.jpg +h_318.jpg +h_319.jpg +h_31.jpg +h_320.jpg +h_321.jpg +h_322.jpg +h_323.jpg +h_324.jpg +h_325.jpg +h_326.jpg +h_327.jpg +h_329.jpg +h_32.jpg +h_33.jpg +h_34.jpg +h_35.jpg +h_36.jpg +h_37.jpg +h_38.jpg +h_39.jpg +h_3.jpg +h_40.jpg +h_41.jpg +h_42.jpg +h_43.jpg +h_44.jpg +h_45.jpg +h_46.jpg +h_47.jpg +h_48.jpg +h_49.jpg +h_4.jpg +h_50.jpg +h_51.jpg +h_52.jpg +h_53.jpg +h_54.jpg +h_55.jpg +h_56.jpg +h_57.jpg +h_58.jpg +h_59.jpg +h_5.jpg +h_60.jpg +h_61.jpg +h_62.jpg +h_63.jpg +h_65.jpg +h_67.jpg +h_68.jpg +h_69.jpg +h_6.jpg +h_70.jpg +h_71.jpg +h_72.jpg +h_73.jpg +h_74.jpg +h_75.jpg +h_76.jpg +h_77.jpg +h_78.jpg +h_79.jpg +h_7.jpg +h_80.jpg +h_81.jpg +h_82.jpg +h_83.jpg +h_84.jpg +h_85.jpg +h_86.jpg +h_87.jpg +h_88.jpg +h_89.jpg +h_8.jpg +h_90.jpg +h_91.jpg +h_92.jpg +h_93.jpg +h_94.jpg +h_95.jpg +h_96.jpg +h_97.jpg +h_98.jpg +h_99.jpg +h_9.jpg +hair-flying-142210_1920.jpg +headshotid_by_bokogreat_stock-d355xf3.jpg +lil_white_goth_grl___23_by_mjranum_stock.jpg +lil_white_goth_grl___26_by_mjranum_stock.jpg +man-388104_960_720.jpg +man_headshot.jpg +MFettes-headshot.jpg +model-429733_960_720.jpg +model-610352_960_720.jpg +model-858753_960_720.jpg +model-858755_960_720.jpg +model-873675_960_720.jpg +model-873678_960_720.jpg +model-873690_960_720.jpg +model-881425_960_720.jpg +model-881431_960_720.jpg +model-female-girl-beautiful-51969.jpg +Model_in_green_dress_3.jpg +Modern_shingle_bob_haircut.jpg +Motivate_(Fitness_model).jpg +Official_portrait_of_Barack_Obama.jpg +person-woman-eyes-face.jpg +pink-hair-855660_960_720.jpg +portrait-750774_1920.jpg +Professor_Steven_Chu_ForMemRS_headshot.jpg +sailor_flying_4_by_senshistock-d4k2wmr.jpg +skin-care-937667_960_720.jpg +sorcery___8_by_mjranum_stock.jpg +t_62.jpg +t_65.jpg +test_32.jpg +test_8.jpg +train_245.jpg +train_246.jpg +train_255.jpg +train_304.jpg +train_333.jpg +train_361.jpg +train_395.jpg +train_480.jpg +train_488.jpg +train_539.jpg +wedding-846926_1920.jpg +Wild_hair.jpg +with_wings___pose_reference_by_senshistock-d6by42n_2.jpg +with_wings___pose_reference_by_senshistock-d6by42n.jpg +woman-1138435_960_720.jpg +woman1.jpg +woman2.jpg +woman-659354_960_720.jpg +woman-804072_960_720.jpg +woman-868519_960_720.jpg +Woman_in_white_shirt_on_August_2009_02.jpg +women-878869_1920.jpg diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/imagematte_valid.txt b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/imagematte_valid.txt new file mode 100644 index 0000000..0444046 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/imagematte_valid.txt @@ -0,0 +1,15 @@ +13564741125_753939e9ce_o.jpg +3858897226_cae5b75963_o.jpg +538724499685900405.jpg +ballerina-855652_1920.jpg +boy-454633_1920.jpg +h_110.jpg +h_150.jpg +h_16.jpg +h_246.jpg +h_267.jpg +h_275.jpg +h_306.jpg +h_328.jpg +model-610352_960_720.jpg +t_66.jpg diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/spd_preprocess.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/spd_preprocess.py new file mode 100644 index 0000000..b9a309a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/misc/spd_preprocess.py @@ -0,0 +1,45 @@ +# pip install supervisely +import supervisely_lib as sly +import numpy as np +import os +from PIL import Image +from tqdm import tqdm + +# Download dataset from +project_root = 'PATH_TO/Supervisely Person Dataset' # <-- Configure input +project = sly.Project(project_root, sly.OpenMode.READ) + +output_path = 'OUTPUT_DIR' # <-- Configure output +os.makedirs(os.path.join(output_path, 'train', 'src')) +os.makedirs(os.path.join(output_path, 'train', 'msk')) +os.makedirs(os.path.join(output_path, 'valid', 'src')) +os.makedirs(os.path.join(output_path, 'valid', 'msk')) + +max_size = 2048 # <-- Configure max size + +for dataset in project.datasets: + for item in tqdm(dataset): + ann = sly.Annotation.load_json_file(dataset.get_ann_path(item), project.meta) + msk = np.zeros(ann.img_size, dtype=np.uint8) + for label in ann.labels: + label.geometry.draw(msk, color=[255]) + msk = Image.fromarray(msk) + + img = Image.open(dataset.get_img_path(item)).convert('RGB') + if img.size[0] > max_size or img.size[1] > max_size: + scale = max_size / max(img.size) + img = img.resize((int(img.size[0] * scale), int(img.size[1] * scale)), Image.BILINEAR) + msk = msk.resize((int(msk.size[0] * scale), int(msk.size[1] * scale)), Image.NEAREST) + + img.save(os.path.join(output_path, 'train', 'src', item.replace('.png', '.jpg'))) + msk.save(os.path.join(output_path, 'train', 'msk', item.replace('.png', '.jpg'))) + +# Move first 100 to validation set +names = os.listdir(os.path.join(output_path, 'train', 'src')) +for name in tqdm(names[:100]): + os.rename( + os.path.join(output_path, 'train', 'src', name), + os.path.join(output_path, 'valid', 'src', name)) + os.rename( + os.path.join(output_path, 'train', 'msk', name), + os.path.join(output_path, 'valid', 'msk', name)) \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/training.md b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/training.md new file mode 100644 index 0000000..a66ee8c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/documentation/training.md @@ -0,0 +1,158 @@ +# Training Documentation + +This documentation only shows the way to re-produce our [paper](https://peterl1n.github.io/RobustVideoMatting/). If you would like to remove or add a dataset to the training, you are responsible for adapting the training code yourself. + +## Datasets + +The following datasets are used during our training. + +**IMPORTANT: If you choose to download our preprocessed versions. Please avoid repeated downloads and cache the data locally. All traffics cost our expense. Please be responsible. We may only provide the preprocessed version of a limited time.** + +### Matting Datasets +* [VideoMatte240K](https://grail.cs.washington.edu/projects/background-matting-v2/#/datasets) + * Download JPEG SD version (6G) for stage 1 and 2. + * Download JPEG HD version (60G) for stage 3 and 4. + * Manually move clips `0000`, `0100`, `0200`, `0300` from the training set to a validation set. +* ImageMatte + * ImageMatte consists of [Distinctions-646](https://wukaoliu.github.io/HAttMatting/) and [Adobe Image Matting](https://sites.google.com/view/deepimagematting) datasets. + * Only needed for stage 4. + * You need to contact their authors to acquire. + * After downloading both datasets, merge their samples together to form ImageMatte dataset. + * Only keep samples of humans. + * Full list of images we used in ImageMatte for training: + * [imagematte_train.txt](/documentation/misc/imagematte_train.txt) + * [imagematte_valid.txt](/documentation/misc/imagematte_train.txt) + * Full list of images we used for evaluation. + * [aim_test.txt](/documentation/misc/aim_test.txt) + * [d646_test.txt](/documentation/misc/d646_test.txt) +### Background Datasets +* Video Backgrounds + * We process from [DVM Background Set](https://github.com/nowsyn/DVM) by selecting clips without humans and extract only the first 100 frames as JPEG sequence. + * Full list of clips we used: + * [dvm_background_train_clips.txt](/documentation/misc/dvm_background_train_clips.txt) + * [dvm_background_test_clips.txt](/documentation/misc/dvm_background_test_clips.txt) + * You can download our preprocessed versions: + * [Train set (14.6G)](https://robustvideomatting.blob.core.windows.net/data/BackgroundVideosTrain.tar) (Manually move some clips to validation set) + * [Test set (936M)](https://robustvideomatting.blob.core.windows.net/data/BackgroundVideosTest.tar) (Not needed for training. Only used for making synthetic test samples for evaluation) +* Image Backgrounds + * Train set: + * We crawled 8000 suitable images from Google and Flicker. + * We will not publish these images. + * [Test set](https://grail.cs.washington.edu/projects/background-matting-v2/#/datasets) + * We use the validation background set from [BGMv2](https://grail.cs.washington.edu/projects/background-matting-v2/) project. + * It contains about 200 images. + * It is not used in our training. Only used for making synthetic test samples for evaluation. + * But if you just want to quickly tryout training, you may use this as a temporary subsitute for the train set. + +### Segmentation Datasets + +* [COCO](https://cocodataset.org/#download) + * Download [train2017.zip (18G)](http://images.cocodataset.org/zips/train2017.zip) + * Download [panoptic_annotations_trainval2017.zip (821M)](http://images.cocodataset.org/annotations/panoptic_annotations_trainval2017.zip) + * Note that our train script expects the panopitc version. +* [YouTubeVIS 2021](https://youtube-vos.org/dataset/vis/) + * Download the train set. No preprocessing needed. +* [Supervisely Person Dataset](https://supervise.ly/explore/projects/supervisely-person-dataset-23304/datasets) + * We used the supervisedly library to convert their encoding to bitmaps masks before using our script. We also resized down some of the large images to avoid disk loading bottleneck. + * You can refer to [spd_preprocess.py](/documentation/misc/spd_preprocess.py) + * Or, you can download our [preprocessed version (800M)](https://robustvideomatting.blob.core.windows.net/data/SuperviselyPersonDataset.tar) + +## Training + +For reference, our training was done on data center machines with 48 CPU cores, 300G CPU memory, and 4 Nvidia V100 32G GPUs. + +During our official training, the code contains custom logics for our infrastructure. For release, the script has been cleaned up. There may be bugs existing in this version of the code but not in our official training. If you find problems, please file an issue. + +After you have downloaded the datasets. Please configure `train_config.py` to provide paths to your datasets. + +The training consists of 4 stages. For detail, please refer to the [paper](https://peterl1n.github.io/RobustVideoMatting/). + +### Stage 1 +```sh +python train.py \ + --model-variant mobilenetv3 \ + --dataset videomatte \ + --resolution-lr 512 \ + --seq-length-lr 15 \ + --learning-rate-backbone 0.0001 \ + --learning-rate-aspp 0.0002 \ + --learning-rate-decoder 0.0002 \ + --learning-rate-refiner 0 \ + --checkpoint-dir checkpoint/stage1 \ + --log-dir log/stage1 \ + --epoch-start 0 \ + --epoch-end 20 +``` + +### Stage 2 +```sh +python train.py \ + --model-variant mobilenetv3 \ + --dataset videomatte \ + --resolution-lr 512 \ + --seq-length-lr 50 \ + --learning-rate-backbone 0.00005 \ + --learning-rate-aspp 0.0001 \ + --learning-rate-decoder 0.0001 \ + --learning-rate-refiner 0 \ + --checkpoint checkpoint/stage1/epoch-19.pth \ + --checkpoint-dir checkpoint/stage2 \ + --log-dir log/stage2 \ + --epoch-start 20 \ + --epoch-end 22 +``` + +### Stage 3 +```sh +python train.py \ + --model-variant mobilenetv3 \ + --dataset videomatte \ + --train-hr \ + --resolution-lr 512 \ + --resolution-hr 2048 \ + --seq-length-lr 40 \ + --seq-length-hr 6 \ + --learning-rate-backbone 0.00001 \ + --learning-rate-aspp 0.00001 \ + --learning-rate-decoder 0.00001 \ + --learning-rate-refiner 0.0002 \ + --checkpoint checkpoint/stage2/epoch-21.pth \ + --checkpoint-dir checkpoint/stage3 \ + --log-dir log/stage3 \ + --epoch-start 22 \ + --epoch-end 23 +``` + +### Stage 4 +```sh +python train.py \ + --model-variant mobilenetv3 \ + --dataset imagematte \ + --train-hr \ + --resolution-lr 512 \ + --resolution-hr 2048 \ + --seq-length-lr 40 \ + --seq-length-hr 6 \ + --learning-rate-backbone 0.00001 \ + --learning-rate-aspp 0.00001 \ + --learning-rate-decoder 0.00005 \ + --learning-rate-refiner 0.0002 \ + --checkpoint checkpoint/stage3/epoch-22.pth \ + --checkpoint-dir checkpoint/stage4 \ + --log-dir log/stage4 \ + --epoch-start 23 \ + --epoch-end 28 +``` + +


+ +## Evaluation + +We synthetically composite test samples to both image and video backgrounds. Image samples (from D646, AIM) are augmented with synthetic motion. + +We only provide the composited VideoMatte240K test set. They are used in our paper evaluation. For D646 and AIM, you need to acquire the data from their authors and composite them yourself. The composition scripts we used are saved in `/evaluation` folder as reference backup. You need to modify them based on your setup. + +* [videomatte_512x512.tar (PNG 1.8G)](https://robustvideomatting.blob.core.windows.net/eval/videomatte_512x288.tar) +* [videomatte_1920x1080.tar (JPG 2.2G)](https://robustvideomatting.blob.core.windows.net/eval/videomatte_1920x1080.tar) + +Evaluation scripts are provided in `/evaluation` folder. \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/evaluate_hr.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/evaluate_hr.py new file mode 100644 index 0000000..cb5d146 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/evaluate_hr.py @@ -0,0 +1,216 @@ +""" +HR (High-Resolution) evaluation. We found using numpy is very slow for high resolution, so we moved it to PyTorch using CUDA. + +Note, the script only does evaluation. You will need to first inference yourself and save the results to disk +Expected directory format for both prediction and ground-truth is: + + videomatte_1920x1080 + ├── videomatte_motion + ├── pha + ├── 0000 + ├── 0000.png + ├── fgr + ├── 0000 + ├── 0000.png + ├── videomatte_static + ├── pha + ├── 0000 + ├── 0000.png + ├── fgr + ├── 0000 + ├── 0000.png + +Prediction must have the exact file structure and file name as the ground-truth, +meaning that if the ground-truth is png/jpg, prediction should be png/jpg. + +Example usage: + +python evaluate.py \ + --pred-dir pred/videomatte_1920x1080 \ + --true-dir true/videomatte_1920x1080 + +An excel sheet with evaluation results will be written to "pred/videomatte_1920x1080/videomatte_1920x1080.xlsx" +""" + + +import argparse +import os +import cv2 +import kornia +import numpy as np +import xlsxwriter +import torch +from concurrent.futures import ThreadPoolExecutor +from tqdm import tqdm + + +class Evaluator: + def __init__(self): + self.parse_args() + self.init_metrics() + self.evaluate() + self.write_excel() + + def parse_args(self): + parser = argparse.ArgumentParser() + parser.add_argument('--pred-dir', type=str, required=True) + parser.add_argument('--true-dir', type=str, required=True) + parser.add_argument('--num-workers', type=int, default=48) + parser.add_argument('--metrics', type=str, nargs='+', default=[ + 'pha_mad', 'pha_mse', 'pha_grad', 'pha_dtssd', 'fgr_mse']) + self.args = parser.parse_args() + + def init_metrics(self): + self.mad = MetricMAD() + self.mse = MetricMSE() + self.grad = MetricGRAD() + self.dtssd = MetricDTSSD() + + def evaluate(self): + tasks = [] + position = 0 + + with ThreadPoolExecutor(max_workers=self.args.num_workers) as executor: + for dataset in sorted(os.listdir(self.args.pred_dir)): + if os.path.isdir(os.path.join(self.args.pred_dir, dataset)): + for clip in sorted(os.listdir(os.path.join(self.args.pred_dir, dataset))): + future = executor.submit(self.evaluate_worker, dataset, clip, position) + tasks.append((dataset, clip, future)) + position += 1 + + self.results = [(dataset, clip, future.result()) for dataset, clip, future in tasks] + + def write_excel(self): + workbook = xlsxwriter.Workbook(os.path.join(self.args.pred_dir, f'{os.path.basename(self.args.pred_dir)}.xlsx')) + summarysheet = workbook.add_worksheet('summary') + metricsheets = [workbook.add_worksheet(metric) for metric in self.results[0][2].keys()] + + for i, metric in enumerate(self.results[0][2].keys()): + summarysheet.write(i, 0, metric) + summarysheet.write(i, 1, f'={metric}!B2') + + for row, (dataset, clip, metrics) in enumerate(self.results): + for metricsheet, metric in zip(metricsheets, metrics.values()): + # Write the header + if row == 0: + metricsheet.write(1, 0, 'Average') + metricsheet.write(1, 1, f'=AVERAGE(C2:ZZ2)') + for col in range(len(metric)): + metricsheet.write(0, col + 2, col) + colname = xlsxwriter.utility.xl_col_to_name(col + 2) + metricsheet.write(1, col + 2, f'=AVERAGE({colname}3:{colname}9999)') + + metricsheet.write(row + 2, 0, dataset) + metricsheet.write(row + 2, 1, clip) + metricsheet.write_row(row + 2, 2, metric) + + workbook.close() + + def evaluate_worker(self, dataset, clip, position): + framenames = sorted(os.listdir(os.path.join(self.args.pred_dir, dataset, clip, 'pha'))) + metrics = {metric_name : [] for metric_name in self.args.metrics} + + pred_pha_tm1 = None + true_pha_tm1 = None + + for i, framename in enumerate(tqdm(framenames, desc=f'{dataset} {clip}', position=position, dynamic_ncols=True)): + true_pha = cv2.imread(os.path.join(self.args.true_dir, dataset, clip, 'pha', framename), cv2.IMREAD_GRAYSCALE) + pred_pha = cv2.imread(os.path.join(self.args.pred_dir, dataset, clip, 'pha', framename), cv2.IMREAD_GRAYSCALE) + + true_pha = torch.from_numpy(true_pha).cuda(non_blocking=True).float().div_(255) + pred_pha = torch.from_numpy(pred_pha).cuda(non_blocking=True).float().div_(255) + + if 'pha_mad' in self.args.metrics: + metrics['pha_mad'].append(self.mad(pred_pha, true_pha)) + if 'pha_mse' in self.args.metrics: + metrics['pha_mse'].append(self.mse(pred_pha, true_pha)) + if 'pha_grad' in self.args.metrics: + metrics['pha_grad'].append(self.grad(pred_pha, true_pha)) + if 'pha_conn' in self.args.metrics: + metrics['pha_conn'].append(self.conn(pred_pha, true_pha)) + if 'pha_dtssd' in self.args.metrics: + if i == 0: + metrics['pha_dtssd'].append(0) + else: + metrics['pha_dtssd'].append(self.dtssd(pred_pha, pred_pha_tm1, true_pha, true_pha_tm1)) + + pred_pha_tm1 = pred_pha + true_pha_tm1 = true_pha + + if 'fgr_mse' in self.args.metrics: + true_fgr = cv2.imread(os.path.join(self.args.true_dir, dataset, clip, 'fgr', framename), cv2.IMREAD_COLOR) + pred_fgr = cv2.imread(os.path.join(self.args.pred_dir, dataset, clip, 'fgr', framename), cv2.IMREAD_COLOR) + + true_fgr = torch.from_numpy(true_fgr).float().div_(255) + pred_fgr = torch.from_numpy(pred_fgr).float().div_(255) + + true_msk = true_pha > 0 + metrics['fgr_mse'].append(self.mse(pred_fgr[true_msk], true_fgr[true_msk])) + + return metrics + + +class MetricMAD: + def __call__(self, pred, true): + return (pred - true).abs_().mean() * 1e3 + + +class MetricMSE: + def __call__(self, pred, true): + return ((pred - true) ** 2).mean() * 1e3 + + +class MetricGRAD: + def __init__(self, sigma=1.4): + self.filter_x, self.filter_y = self.gauss_filter(sigma) + self.filter_x = torch.from_numpy(self.filter_x).unsqueeze(0).cuda() + self.filter_y = torch.from_numpy(self.filter_y).unsqueeze(0).cuda() + + def __call__(self, pred, true): + true_grad = self.gauss_gradient(true) + pred_grad = self.gauss_gradient(pred) + return ((true_grad - pred_grad) ** 2).sum() / 1000 + + def gauss_gradient(self, img): + img_filtered_x = kornia.filters.filter2D(img[None, None, :, :], self.filter_x, border_type='replicate')[0, 0] + img_filtered_y = kornia.filters.filter2D(img[None, None, :, :], self.filter_y, border_type='replicate')[0, 0] + return (img_filtered_x**2 + img_filtered_y**2).sqrt() + + @staticmethod + def gauss_filter(sigma, epsilon=1e-2): + half_size = np.ceil(sigma * np.sqrt(-2 * np.log(np.sqrt(2 * np.pi) * sigma * epsilon))) + size = np.int(2 * half_size + 1) + + # create filter in x axis + filter_x = np.zeros((size, size)) + for i in range(size): + for j in range(size): + filter_x[i, j] = MetricGRAD.gaussian(i - half_size, sigma) * MetricGRAD.dgaussian( + j - half_size, sigma) + + # normalize filter + norm = np.sqrt((filter_x**2).sum()) + filter_x = filter_x / norm + filter_y = np.transpose(filter_x) + + return filter_x, filter_y + + @staticmethod + def gaussian(x, sigma): + return np.exp(-x**2 / (2 * sigma**2)) / (sigma * np.sqrt(2 * np.pi)) + + @staticmethod + def dgaussian(x, sigma): + return -x * MetricGRAD.gaussian(x, sigma) / sigma**2 + + +class MetricDTSSD: + def __call__(self, pred_t, pred_tm1, true_t, true_tm1): + dtSSD = ((pred_t - pred_tm1) - (true_t - true_tm1)) ** 2 + dtSSD = dtSSD.sum() / true_t.numel() + dtSSD = dtSSD.sqrt() + return dtSSD * 1e2 + + +if __name__ == '__main__': + Evaluator() \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/evaluate_lr.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/evaluate_lr.py new file mode 100644 index 0000000..92d671f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/evaluate_lr.py @@ -0,0 +1,254 @@ +""" +LR (Low-Resolution) evaluation. + +Note, the script only does evaluation. You will need to first inference yourself and save the results to disk +Expected directory format for both prediction and ground-truth is: + + videomatte_512x288 + ├── videomatte_motion + ├── pha + ├── 0000 + ├── 0000.png + ├── fgr + ├── 0000 + ├── 0000.png + ├── videomatte_static + ├── pha + ├── 0000 + ├── 0000.png + ├── fgr + ├── 0000 + ├── 0000.png + +Prediction must have the exact file structure and file name as the ground-truth, +meaning that if the ground-truth is png/jpg, prediction should be png/jpg. + +Example usage: + +python evaluate.py \ + --pred-dir PATH_TO_PREDICTIONS/videomatte_512x288 \ + --true-dir PATH_TO_GROUNDTURTH/videomatte_512x288 + +An excel sheet with evaluation results will be written to "PATH_TO_PREDICTIONS/videomatte_512x288/videomatte_512x288.xlsx" +""" + + +import argparse +import os +import cv2 +import numpy as np +import xlsxwriter +from concurrent.futures import ThreadPoolExecutor +from tqdm import tqdm + + +class Evaluator: + def __init__(self): + self.parse_args() + self.init_metrics() + self.evaluate() + self.write_excel() + + def parse_args(self): + parser = argparse.ArgumentParser() + parser.add_argument('--pred-dir', type=str, required=True) + parser.add_argument('--true-dir', type=str, required=True) + parser.add_argument('--num-workers', type=int, default=48) + parser.add_argument('--metrics', type=str, nargs='+', default=[ + 'pha_mad', 'pha_mse', 'pha_grad', 'pha_conn', 'pha_dtssd', 'fgr_mad', 'fgr_mse']) + self.args = parser.parse_args() + + def init_metrics(self): + self.mad = MetricMAD() + self.mse = MetricMSE() + self.grad = MetricGRAD() + self.conn = MetricCONN() + self.dtssd = MetricDTSSD() + + def evaluate(self): + tasks = [] + position = 0 + + with ThreadPoolExecutor(max_workers=self.args.num_workers) as executor: + for dataset in sorted(os.listdir(self.args.pred_dir)): + if os.path.isdir(os.path.join(self.args.pred_dir, dataset)): + for clip in sorted(os.listdir(os.path.join(self.args.pred_dir, dataset))): + future = executor.submit(self.evaluate_worker, dataset, clip, position) + tasks.append((dataset, clip, future)) + position += 1 + + self.results = [(dataset, clip, future.result()) for dataset, clip, future in tasks] + + def write_excel(self): + workbook = xlsxwriter.Workbook(os.path.join(self.args.pred_dir, f'{os.path.basename(self.args.pred_dir)}.xlsx')) + summarysheet = workbook.add_worksheet('summary') + metricsheets = [workbook.add_worksheet(metric) for metric in self.results[0][2].keys()] + + for i, metric in enumerate(self.results[0][2].keys()): + summarysheet.write(i, 0, metric) + summarysheet.write(i, 1, f'={metric}!B2') + + for row, (dataset, clip, metrics) in enumerate(self.results): + for metricsheet, metric in zip(metricsheets, metrics.values()): + # Write the header + if row == 0: + metricsheet.write(1, 0, 'Average') + metricsheet.write(1, 1, f'=AVERAGE(C2:ZZ2)') + for col in range(len(metric)): + metricsheet.write(0, col + 2, col) + colname = xlsxwriter.utility.xl_col_to_name(col + 2) + metricsheet.write(1, col + 2, f'=AVERAGE({colname}3:{colname}9999)') + + metricsheet.write(row + 2, 0, dataset) + metricsheet.write(row + 2, 1, clip) + metricsheet.write_row(row + 2, 2, metric) + + workbook.close() + + def evaluate_worker(self, dataset, clip, position): + framenames = sorted(os.listdir(os.path.join(self.args.pred_dir, dataset, clip, 'pha'))) + metrics = {metric_name : [] for metric_name in self.args.metrics} + + pred_pha_tm1 = None + true_pha_tm1 = None + + for i, framename in enumerate(tqdm(framenames, desc=f'{dataset} {clip}', position=position, dynamic_ncols=True)): + true_pha = cv2.imread(os.path.join(self.args.true_dir, dataset, clip, 'pha', framename), cv2.IMREAD_GRAYSCALE).astype(np.float32) / 255 + pred_pha = cv2.imread(os.path.join(self.args.pred_dir, dataset, clip, 'pha', framename), cv2.IMREAD_GRAYSCALE).astype(np.float32) / 255 + if 'pha_mad' in self.args.metrics: + metrics['pha_mad'].append(self.mad(pred_pha, true_pha)) + if 'pha_mse' in self.args.metrics: + metrics['pha_mse'].append(self.mse(pred_pha, true_pha)) + if 'pha_grad' in self.args.metrics: + metrics['pha_grad'].append(self.grad(pred_pha, true_pha)) + if 'pha_conn' in self.args.metrics: + metrics['pha_conn'].append(self.conn(pred_pha, true_pha)) + if 'pha_dtssd' in self.args.metrics: + if i == 0: + metrics['pha_dtssd'].append(0) + else: + metrics['pha_dtssd'].append(self.dtssd(pred_pha, pred_pha_tm1, true_pha, true_pha_tm1)) + + pred_pha_tm1 = pred_pha + true_pha_tm1 = true_pha + + if 'fgr_mse' in self.args.metrics or 'fgr_mad' in self.args.metrics: + true_fgr = cv2.imread(os.path.join(self.args.true_dir, dataset, clip, 'fgr', framename), cv2.IMREAD_COLOR).astype(np.float32) / 255 + pred_fgr = cv2.imread(os.path.join(self.args.pred_dir, dataset, clip, 'fgr', framename), cv2.IMREAD_COLOR).astype(np.float32) / 255 + true_msk = true_pha > 0 + + if 'fgr_mse' in self.args.metrics: + metrics['fgr_mse'].append(self.mse(pred_fgr[true_msk], true_fgr[true_msk])) + if 'fgr_mad' in self.args.metrics: + metrics['fgr_mad'].append(self.mad(pred_fgr[true_msk], true_fgr[true_msk])) + + return metrics + + +class MetricMAD: + def __call__(self, pred, true): + return np.abs(pred - true).mean() * 1e3 + + +class MetricMSE: + def __call__(self, pred, true): + return ((pred - true) ** 2).mean() * 1e3 + + +class MetricGRAD: + def __init__(self, sigma=1.4): + self.filter_x, self.filter_y = self.gauss_filter(sigma) + + def __call__(self, pred, true): + pred_normed = np.zeros_like(pred) + true_normed = np.zeros_like(true) + cv2.normalize(pred, pred_normed, 1., 0., cv2.NORM_MINMAX) + cv2.normalize(true, true_normed, 1., 0., cv2.NORM_MINMAX) + + true_grad = self.gauss_gradient(true_normed).astype(np.float32) + pred_grad = self.gauss_gradient(pred_normed).astype(np.float32) + + grad_loss = ((true_grad - pred_grad) ** 2).sum() + return grad_loss / 1000 + + def gauss_gradient(self, img): + img_filtered_x = cv2.filter2D(img, -1, self.filter_x, borderType=cv2.BORDER_REPLICATE) + img_filtered_y = cv2.filter2D(img, -1, self.filter_y, borderType=cv2.BORDER_REPLICATE) + return np.sqrt(img_filtered_x**2 + img_filtered_y**2) + + @staticmethod + def gauss_filter(sigma, epsilon=1e-2): + half_size = np.ceil(sigma * np.sqrt(-2 * np.log(np.sqrt(2 * np.pi) * sigma * epsilon))) + size = np.int(2 * half_size + 1) + + # create filter in x axis + filter_x = np.zeros((size, size)) + for i in range(size): + for j in range(size): + filter_x[i, j] = MetricGRAD.gaussian(i - half_size, sigma) * MetricGRAD.dgaussian( + j - half_size, sigma) + + # normalize filter + norm = np.sqrt((filter_x**2).sum()) + filter_x = filter_x / norm + filter_y = np.transpose(filter_x) + + return filter_x, filter_y + + @staticmethod + def gaussian(x, sigma): + return np.exp(-x**2 / (2 * sigma**2)) / (sigma * np.sqrt(2 * np.pi)) + + @staticmethod + def dgaussian(x, sigma): + return -x * MetricGRAD.gaussian(x, sigma) / sigma**2 + + +class MetricCONN: + def __call__(self, pred, true): + step=0.1 + thresh_steps = np.arange(0, 1 + step, step) + round_down_map = -np.ones_like(true) + for i in range(1, len(thresh_steps)): + true_thresh = true >= thresh_steps[i] + pred_thresh = pred >= thresh_steps[i] + intersection = (true_thresh & pred_thresh).astype(np.uint8) + + # connected components + _, output, stats, _ = cv2.connectedComponentsWithStats( + intersection, connectivity=4) + # start from 1 in dim 0 to exclude background + size = stats[1:, -1] + + # largest connected component of the intersection + omega = np.zeros_like(true) + if len(size) != 0: + max_id = np.argmax(size) + # plus one to include background + omega[output == max_id + 1] = 1 + + mask = (round_down_map == -1) & (omega == 0) + round_down_map[mask] = thresh_steps[i - 1] + round_down_map[round_down_map == -1] = 1 + + true_diff = true - round_down_map + pred_diff = pred - round_down_map + # only calculate difference larger than or equal to 0.15 + true_phi = 1 - true_diff * (true_diff >= 0.15) + pred_phi = 1 - pred_diff * (pred_diff >= 0.15) + + connectivity_error = np.sum(np.abs(true_phi - pred_phi)) + return connectivity_error / 1000 + + +class MetricDTSSD: + def __call__(self, pred_t, pred_tm1, true_t, true_tm1): + dtSSD = ((pred_t - pred_tm1) - (true_t - true_tm1)) ** 2 + dtSSD = np.sum(dtSSD) / true_t.size + dtSSD = np.sqrt(dtSSD) + return dtSSD * 1e2 + + + +if __name__ == '__main__': + Evaluator() \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/generate_imagematte_with_background_image.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/generate_imagematte_with_background_image.py new file mode 100644 index 0000000..9d836f1 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/generate_imagematte_with_background_image.py @@ -0,0 +1,146 @@ +""" +python generate_imagematte_with_background_image.py \ + --imagematte-dir ../matting-data/Distinctions/test \ + --background-dir ../matting-data/Backgrounds/valid \ + --resolution 512 \ + --out-dir ../matting-data/evaluation/distinction_static_sd/ \ + --random-seed 10 + +Seed: + 10 - distinction-static + 11 - distinction-motion + 12 - adobe-static + 13 - adobe-motion + +""" + +import argparse +import os +import pims +import numpy as np +import random +from PIL import Image +from tqdm import tqdm +from tqdm.contrib.concurrent import process_map +from torchvision import transforms +from torchvision.transforms import functional as F + +parser = argparse.ArgumentParser() +parser.add_argument('--imagematte-dir', type=str, required=True) +parser.add_argument('--background-dir', type=str, required=True) +parser.add_argument('--num-samples', type=int, default=20) +parser.add_argument('--num-frames', type=int, default=100) +parser.add_argument('--resolution', type=int, required=True) +parser.add_argument('--out-dir', type=str, required=True) +parser.add_argument('--random-seed', type=int) +parser.add_argument('--extension', type=str, default='.png') +args = parser.parse_args() + +random.seed(args.random_seed) + +imagematte_filenames = os.listdir(os.path.join(args.imagematte_dir, 'fgr')) +background_filenames = os.listdir(args.background_dir) +random.shuffle(imagematte_filenames) +random.shuffle(background_filenames) + + +def lerp(a, b, percentage): + return a * (1 - percentage) + b * percentage + +def motion_affine(*imgs): + config = dict(degrees=(-10, 10), translate=(0.1, 0.1), + scale_ranges=(0.9, 1.1), shears=(-5, 5), img_size=imgs[0][0].size) + angleA, (transXA, transYA), scaleA, (shearXA, shearYA) = transforms.RandomAffine.get_params(**config) + angleB, (transXB, transYB), scaleB, (shearXB, shearYB) = transforms.RandomAffine.get_params(**config) + + T = len(imgs[0]) + variation_over_time = random.random() + for t in range(T): + percentage = (t / (T - 1)) * variation_over_time + angle = lerp(angleA, angleB, percentage) + transX = lerp(transXA, transXB, percentage) + transY = lerp(transYA, transYB, percentage) + scale = lerp(scaleA, scaleB, percentage) + shearX = lerp(shearXA, shearXB, percentage) + shearY = lerp(shearYA, shearYB, percentage) + for img in imgs: + img[t] = F.affine(img[t], angle, (transX, transY), scale, (shearX, shearY), F.InterpolationMode.BILINEAR) + return imgs + + + +def process(i): + imagematte_filename = imagematte_filenames[i % len(imagematte_filenames)] + background_filename = background_filenames[i % len(background_filenames)] + + out_path = os.path.join(args.out_dir, str(i).zfill(4)) + os.makedirs(os.path.join(out_path, 'fgr'), exist_ok=True) + os.makedirs(os.path.join(out_path, 'pha'), exist_ok=True) + os.makedirs(os.path.join(out_path, 'com'), exist_ok=True) + os.makedirs(os.path.join(out_path, 'bgr'), exist_ok=True) + + with Image.open(os.path.join(args.background_dir, background_filename)) as bgr: + bgr = bgr.convert('RGB') + + w, h = bgr.size + scale = args.resolution / min(h, w) + w, h = int(w * scale), int(h * scale) + bgr = bgr.resize((w, h)) + bgr = F.center_crop(bgr, (args.resolution, args.resolution)) + + with Image.open(os.path.join(args.imagematte_dir, 'fgr', imagematte_filename)) as fgr, \ + Image.open(os.path.join(args.imagematte_dir, 'pha', imagematte_filename)) as pha: + fgr = fgr.convert('RGB') + pha = pha.convert('L') + + fgrs = [fgr] * args.num_frames + phas = [pha] * args.num_frames + fgrs, phas = motion_affine(fgrs, phas) + + for t in tqdm(range(args.num_frames), desc=str(i).zfill(4)): + fgr = fgrs[t] + pha = phas[t] + + w, h = fgr.size + scale = args.resolution / max(h, w) + w, h = int(w * scale), int(h * scale) + + fgr = fgr.resize((w, h)) + pha = pha.resize((w, h)) + + if h < args.resolution: + pt = (args.resolution - h) // 2 + pb = args.resolution - h - pt + else: + pt = 0 + pb = 0 + + if w < args.resolution: + pl = (args.resolution - w) // 2 + pr = args.resolution - w - pl + else: + pl = 0 + pr = 0 + + fgr = F.pad(fgr, [pl, pt, pr, pb]) + pha = F.pad(pha, [pl, pt, pr, pb]) + + if i // len(imagematte_filenames) % 2 == 1: + fgr = fgr.transpose(Image.FLIP_LEFT_RIGHT) + pha = pha.transpose(Image.FLIP_LEFT_RIGHT) + + fgr.save(os.path.join(out_path, 'fgr', str(t).zfill(4) + args.extension)) + pha.save(os.path.join(out_path, 'pha', str(t).zfill(4) + args.extension)) + + if t == 0: + bgr.save(os.path.join(out_path, 'bgr', str(t).zfill(4) + args.extension)) + else: + os.symlink(str(0).zfill(4) + args.extension, os.path.join(out_path, 'bgr', str(t).zfill(4) + args.extension)) + + pha = np.asarray(pha).astype(float)[:, :, None] / 255 + com = Image.fromarray(np.uint8(np.asarray(fgr) * pha + np.asarray(bgr) * (1 - pha))) + com.save(os.path.join(out_path, 'com', str(t).zfill(4) + args.extension)) + + +if __name__ == '__main__': + r = process_map(process, range(args.num_samples), max_workers=32) \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/generate_imagematte_with_background_video.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/generate_imagematte_with_background_video.py new file mode 100644 index 0000000..6c67c7b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/generate_imagematte_with_background_video.py @@ -0,0 +1,189 @@ +""" +python generate_imagematte_with_background_video.py \ + --imagematte-dir ../matting-data/Distinctions/test \ + --background-dir ../matting-data/BackgroundVideos_mp4/test \ + --resolution 512 \ + --out-dir ../matting-data/evaluation/distinction_motion_sd/ \ + --random-seed 11 + +Seed: + 10 - distinction-static + 11 - distinction-motion + 12 - adobe-static + 13 - adobe-motion + +""" + +import argparse +import os +import pims +import numpy as np +import random +from multiprocessing import Pool +from PIL import Image +# from tqdm import tqdm +from tqdm.contrib.concurrent import process_map +from torchvision import transforms +from torchvision.transforms import functional as F + +parser = argparse.ArgumentParser() +parser.add_argument('--imagematte-dir', type=str, required=True) +parser.add_argument('--background-dir', type=str, required=True) +parser.add_argument('--num-samples', type=int, default=20) +parser.add_argument('--num-frames', type=int, default=100) +parser.add_argument('--resolution', type=int, required=True) +parser.add_argument('--out-dir', type=str, required=True) +parser.add_argument('--random-seed', type=int) +parser.add_argument('--extension', type=str, default='.png') +args = parser.parse_args() + +random.seed(args.random_seed) + +imagematte_filenames = os.listdir(os.path.join(args.imagematte_dir, 'fgr')) +random.shuffle(imagematte_filenames) + +background_filenames = [ + "0000.mp4", + "0007.mp4", + "0008.mp4", + "0010.mp4", + "0013.mp4", + "0015.mp4", + "0016.mp4", + "0018.mp4", + "0021.mp4", + "0029.mp4", + "0033.mp4", + "0035.mp4", + "0039.mp4", + "0050.mp4", + "0052.mp4", + "0055.mp4", + "0060.mp4", + "0063.mp4", + "0087.mp4", + "0086.mp4", + "0090.mp4", + "0101.mp4", + "0110.mp4", + "0117.mp4", + "0120.mp4", + "0122.mp4", + "0123.mp4", + "0125.mp4", + "0128.mp4", + "0131.mp4", + "0172.mp4", + "0176.mp4", + "0181.mp4", + "0187.mp4", + "0193.mp4", + "0198.mp4", + "0220.mp4", + "0221.mp4", + "0224.mp4", + "0229.mp4", + "0233.mp4", + "0238.mp4", + "0241.mp4", + "0245.mp4", + "0246.mp4" +] + +random.shuffle(background_filenames) + +def lerp(a, b, percentage): + return a * (1 - percentage) + b * percentage + +def motion_affine(*imgs): + config = dict(degrees=(-10, 10), translate=(0.1, 0.1), + scale_ranges=(0.9, 1.1), shears=(-5, 5), img_size=imgs[0][0].size) + angleA, (transXA, transYA), scaleA, (shearXA, shearYA) = transforms.RandomAffine.get_params(**config) + angleB, (transXB, transYB), scaleB, (shearXB, shearYB) = transforms.RandomAffine.get_params(**config) + + T = len(imgs[0]) + variation_over_time = random.random() + for t in range(T): + percentage = (t / (T - 1)) * variation_over_time + angle = lerp(angleA, angleB, percentage) + transX = lerp(transXA, transXB, percentage) + transY = lerp(transYA, transYB, percentage) + scale = lerp(scaleA, scaleB, percentage) + shearX = lerp(shearXA, shearXB, percentage) + shearY = lerp(shearYA, shearYB, percentage) + for img in imgs: + img[t] = F.affine(img[t], angle, (transX, transY), scale, (shearX, shearY), F.InterpolationMode.BILINEAR) + return imgs + + +def process(i): + imagematte_filename = imagematte_filenames[i % len(imagematte_filenames)] + background_filename = background_filenames[i % len(background_filenames)] + + bgrs = pims.PyAVVideoReader(os.path.join(args.background_dir, background_filename)) + + out_path = os.path.join(args.out_dir, str(i).zfill(4)) + os.makedirs(os.path.join(out_path, 'fgr'), exist_ok=True) + os.makedirs(os.path.join(out_path, 'pha'), exist_ok=True) + os.makedirs(os.path.join(out_path, 'com'), exist_ok=True) + os.makedirs(os.path.join(out_path, 'bgr'), exist_ok=True) + + with Image.open(os.path.join(args.imagematte_dir, 'fgr', imagematte_filename)) as fgr, \ + Image.open(os.path.join(args.imagematte_dir, 'pha', imagematte_filename)) as pha: + fgr = fgr.convert('RGB') + pha = pha.convert('L') + + fgrs = [fgr] * args.num_frames + phas = [pha] * args.num_frames + fgrs, phas = motion_affine(fgrs, phas) + + for t in range(args.num_frames): + fgr = fgrs[t] + pha = phas[t] + + w, h = fgr.size + scale = args.resolution / max(h, w) + w, h = int(w * scale), int(h * scale) + + fgr = fgr.resize((w, h)) + pha = pha.resize((w, h)) + + if h < args.resolution: + pt = (args.resolution - h) // 2 + pb = args.resolution - h - pt + else: + pt = 0 + pb = 0 + + if w < args.resolution: + pl = (args.resolution - w) // 2 + pr = args.resolution - w - pl + else: + pl = 0 + pr = 0 + + fgr = F.pad(fgr, [pl, pt, pr, pb]) + pha = F.pad(pha, [pl, pt, pr, pb]) + + if i // len(imagematte_filenames) % 2 == 1: + fgr = fgr.transpose(Image.FLIP_LEFT_RIGHT) + pha = pha.transpose(Image.FLIP_LEFT_RIGHT) + + fgr.save(os.path.join(out_path, 'fgr', str(t).zfill(4) + args.extension)) + pha.save(os.path.join(out_path, 'pha', str(t).zfill(4) + args.extension)) + + bgr = Image.fromarray(bgrs[t]).convert('RGB') + w, h = bgr.size + scale = args.resolution / min(h, w) + w, h = int(w * scale), int(h * scale) + bgr = bgr.resize((w, h)) + bgr = F.center_crop(bgr, (args.resolution, args.resolution)) + bgr.save(os.path.join(out_path, 'bgr', str(t).zfill(4) + args.extension)) + + pha = np.asarray(pha).astype(float)[:, :, None] / 255 + com = Image.fromarray(np.uint8(np.asarray(fgr) * pha + np.asarray(bgr) * (1 - pha))) + com.save(os.path.join(out_path, 'com', str(t).zfill(4) + args.extension)) + +if __name__ == '__main__': + r = process_map(process, range(args.num_samples), max_workers=10) + diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/generate_videomatte_with_background_image.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/generate_videomatte_with_background_image.py new file mode 100644 index 0000000..1c9f349 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/generate_videomatte_with_background_image.py @@ -0,0 +1,78 @@ +""" +python generate_videomatte_with_background_image.py \ + --videomatte-dir ../matting-data/VideoMatte240K_JPEG_HD/test \ + --background-dir ../matting-data/Backgrounds/valid \ + --num-samples 25 \ + --resize 512 288 \ + --out-dir ../matting-data/evaluation/vidematte_static_sd/ +""" + +import argparse +import os +import pims +import numpy as np +import random +from PIL import Image +from tqdm import tqdm + +parser = argparse.ArgumentParser() +parser.add_argument('--videomatte-dir', type=str, required=True) +parser.add_argument('--background-dir', type=str, required=True) +parser.add_argument('--num-samples', type=int, default=20) +parser.add_argument('--num-frames', type=int, default=100) +parser.add_argument('--resize', type=int, default=None, nargs=2) +parser.add_argument('--out-dir', type=str, required=True) +parser.add_argument('--extension', type=str, default='.png') +args = parser.parse_args() + +random.seed(10) + +videomatte_filenames = [(clipname, sorted(os.listdir(os.path.join(args.videomatte_dir, 'fgr', clipname)))) + for clipname in sorted(os.listdir(os.path.join(args.videomatte_dir, 'fgr')))] + +background_filenames = os.listdir(args.background_dir) +random.shuffle(background_filenames) + +for i in range(args.num_samples): + + clipname, framenames = videomatte_filenames[i % len(videomatte_filenames)] + + out_path = os.path.join(args.out_dir, str(i).zfill(4)) + os.makedirs(os.path.join(out_path, 'fgr'), exist_ok=True) + os.makedirs(os.path.join(out_path, 'pha'), exist_ok=True) + os.makedirs(os.path.join(out_path, 'com'), exist_ok=True) + os.makedirs(os.path.join(out_path, 'bgr'), exist_ok=True) + + with Image.open(os.path.join(args.background_dir, background_filenames[i])) as bgr: + bgr = bgr.convert('RGB') + + + base_t = random.choice(range(len(framenames) - args.num_frames)) + + for t in tqdm(range(args.num_frames), desc=str(i).zfill(4)): + with Image.open(os.path.join(args.videomatte_dir, 'fgr', clipname, framenames[base_t + t])) as fgr, \ + Image.open(os.path.join(args.videomatte_dir, 'pha', clipname, framenames[base_t + t])) as pha: + fgr = fgr.convert('RGB') + pha = pha.convert('L') + + if args.resize is not None: + fgr = fgr.resize(args.resize, Image.BILINEAR) + pha = pha.resize(args.resize, Image.BILINEAR) + + + if i // len(videomatte_filenames) % 2 == 1: + fgr = fgr.transpose(Image.FLIP_LEFT_RIGHT) + pha = pha.transpose(Image.FLIP_LEFT_RIGHT) + + fgr.save(os.path.join(out_path, 'fgr', str(t).zfill(4) + args.extension)) + pha.save(os.path.join(out_path, 'pha', str(t).zfill(4) + args.extension)) + + if t == 0: + bgr = bgr.resize(fgr.size, Image.BILINEAR) + bgr.save(os.path.join(out_path, 'bgr', str(t).zfill(4) + args.extension)) + else: + os.symlink(str(0).zfill(4) + args.extension, os.path.join(out_path, 'bgr', str(t).zfill(4) + args.extension)) + + pha = np.asarray(pha).astype(float)[:, :, None] / 255 + com = Image.fromarray(np.uint8(np.asarray(fgr) * pha + np.asarray(bgr) * (1 - pha))) + com.save(os.path.join(out_path, 'com', str(t).zfill(4) + args.extension)) diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/generate_videomatte_with_background_video.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/generate_videomatte_with_background_video.py new file mode 100644 index 0000000..63c24ce --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/evaluation/generate_videomatte_with_background_video.py @@ -0,0 +1,118 @@ +""" +python generate_videomatte_with_background_video.py \ + --videomatte-dir ../matting-data/VideoMatte240K_JPEG_HD/test \ + --background-dir ../matting-data/BackgroundVideos_mp4/test \ + --resize 512 288 \ + --out-dir ../matting-data/evaluation/vidematte_motion_sd/ +""" + +import argparse +import os +import pims +import numpy as np +import random +from PIL import Image +from tqdm import tqdm + +parser = argparse.ArgumentParser() +parser.add_argument('--videomatte-dir', type=str, required=True) +parser.add_argument('--background-dir', type=str, required=True) +parser.add_argument('--num-samples', type=int, default=20) +parser.add_argument('--num-frames', type=int, default=100) +parser.add_argument('--resize', type=int, default=None, nargs=2) +parser.add_argument('--out-dir', type=str, required=True) +args = parser.parse_args() + +# Hand selected a list of videos +background_filenames = [ + "0000.mp4", + "0007.mp4", + "0008.mp4", + "0010.mp4", + "0013.mp4", + "0015.mp4", + "0016.mp4", + "0018.mp4", + "0021.mp4", + "0029.mp4", + "0033.mp4", + "0035.mp4", + "0039.mp4", + "0050.mp4", + "0052.mp4", + "0055.mp4", + "0060.mp4", + "0063.mp4", + "0087.mp4", + "0086.mp4", + "0090.mp4", + "0101.mp4", + "0110.mp4", + "0117.mp4", + "0120.mp4", + "0122.mp4", + "0123.mp4", + "0125.mp4", + "0128.mp4", + "0131.mp4", + "0172.mp4", + "0176.mp4", + "0181.mp4", + "0187.mp4", + "0193.mp4", + "0198.mp4", + "0220.mp4", + "0221.mp4", + "0224.mp4", + "0229.mp4", + "0233.mp4", + "0238.mp4", + "0241.mp4", + "0245.mp4", + "0246.mp4" +] + +random.seed(10) + +videomatte_filenames = [(clipname, sorted(os.listdir(os.path.join(args.videomatte_dir, 'fgr', clipname)))) + for clipname in sorted(os.listdir(os.path.join(args.videomatte_dir, 'fgr')))] + +random.shuffle(background_filenames) + +for i in range(args.num_samples): + bgrs = pims.PyAVVideoReader(os.path.join(args.background_dir, background_filenames[i % len(background_filenames)])) + clipname, framenames = videomatte_filenames[i % len(videomatte_filenames)] + + out_path = os.path.join(args.out_dir, str(i).zfill(4)) + os.makedirs(os.path.join(out_path, 'fgr'), exist_ok=True) + os.makedirs(os.path.join(out_path, 'pha'), exist_ok=True) + os.makedirs(os.path.join(out_path, 'com'), exist_ok=True) + os.makedirs(os.path.join(out_path, 'bgr'), exist_ok=True) + + base_t = random.choice(range(len(framenames) - args.num_frames)) + + for t in tqdm(range(args.num_frames), desc=str(i).zfill(4)): + with Image.open(os.path.join(args.videomatte_dir, 'fgr', clipname, framenames[base_t + t])) as fgr, \ + Image.open(os.path.join(args.videomatte_dir, 'pha', clipname, framenames[base_t + t])) as pha: + fgr = fgr.convert('RGB') + pha = pha.convert('L') + + if args.resize is not None: + fgr = fgr.resize(args.resize, Image.BILINEAR) + pha = pha.resize(args.resize, Image.BILINEAR) + + + if i // len(videomatte_filenames) % 2 == 1: + fgr = fgr.transpose(Image.FLIP_LEFT_RIGHT) + pha = pha.transpose(Image.FLIP_LEFT_RIGHT) + + fgr.save(os.path.join(out_path, 'fgr', str(t).zfill(4) + '.png')) + pha.save(os.path.join(out_path, 'pha', str(t).zfill(4) + '.png')) + + bgr = Image.fromarray(bgrs[t]) + bgr = bgr.resize(fgr.size, Image.BILINEAR) + bgr.save(os.path.join(out_path, 'bgr', str(t).zfill(4) + '.png')) + + pha = np.asarray(pha).astype(float)[:, :, None] / 255 + com = Image.fromarray(np.uint8(np.asarray(fgr) * pha + np.asarray(bgr) * (1 - pha))) + com.save(os.path.join(out_path, 'com', str(t).zfill(4) + '.png')) diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/hubconf.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/hubconf.py new file mode 100644 index 0000000..9b1ceba --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/hubconf.py @@ -0,0 +1,39 @@ +""" +Loading model + model = torch.hub.load("PeterL1n/RobustVideoMatting", "mobilenetv3") + model = torch.hub.load("PeterL1n/RobustVideoMatting", "resnet50") + +Converter API + convert_video = torch.hub.load("PeterL1n/RobustVideoMatting", "converter") +""" + + +dependencies = ['torch', 'torchvision'] + +import torch +from model import MattingNetwork + + +def mobilenetv3(pretrained: bool = True, progress: bool = True): + model = MattingNetwork('mobilenetv3') + if pretrained: + url = 'https://github.com/PeterL1n/RobustVideoMatting/releases/download/v1.0.0/rvm_mobilenetv3.pth' + model.load_state_dict(torch.hub.load_state_dict_from_url(url, map_location='cpu', progress=progress)) + return model + + +def resnet50(pretrained: bool = True, progress: bool = True): + model = MattingNetwork('resnet50') + if pretrained: + url = 'https://github.com/PeterL1n/RobustVideoMatting/releases/download/v1.0.0/rvm_resnet50.pth' + model.load_state_dict(torch.hub.load_state_dict_from_url(url, map_location='cpu', progress=progress)) + return model + + +def converter(): + try: + from inference import convert_video + return convert_video + except ModuleNotFoundError as error: + print(error) + print('Please run "pip install av tqdm pims"') diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/inference.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/inference.py new file mode 100644 index 0000000..1e5471c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/inference.py @@ -0,0 +1,207 @@ +""" +python inference.py \ + --variant mobilenetv3 \ + --checkpoint "CHECKPOINT" \ + --device cuda \ + --input-source "input.mp4" \ + --output-type video \ + --output-composition "composition.mp4" \ + --output-alpha "alpha.mp4" \ + --output-foreground "foreground.mp4" \ + --output-video-mbps 4 \ + --seq-chunk 1 +""" + +import torch +import os +from torch.utils.data import DataLoader +from torchvision import transforms +from typing import Optional, Tuple +from tqdm.auto import tqdm + +from .inference_utils import * + +def convert_video(model, + input_source: str, + input_resize: Optional[Tuple[int, int]] = None, + downsample_ratio: Optional[float] = None, + output_type: str = 'video', + output_composition: Optional[str] = None, + output_alpha: Optional[str] = None, + output_foreground: Optional[str] = None, + output_video_mbps: Optional[float] = None, + seq_chunk: int = 1, + num_workers: int = 0, + progress: bool = True, + device: Optional[str] = None, + dtype: Optional[torch.dtype] = None): + + """ + Args: + input_source:A video file, or an image sequence directory. Images must be sorted in accending order, support png and jpg. + input_resize: If provided, the input are first resized to (w, h). + downsample_ratio: The model's downsample_ratio hyperparameter. If not provided, model automatically set one. + output_type: Options: ["video", "png_sequence"]. + output_composition: + The composition output path. File path if output_type == 'video'. Directory path if output_type == 'png_sequence'. + If output_type == 'video', the composition has green screen background. + If output_type == 'png_sequence'. the composition is RGBA png images. + output_alpha: The alpha output from the model. + output_foreground: The foreground output from the model. + seq_chunk: Number of frames to process at once. Increase it for better parallelism. + num_workers: PyTorch's DataLoader workers. Only use >0 for image input. + progress: Show progress bar. + device: Only need to manually provide if model is a TorchScript freezed model. + dtype: Only need to manually provide if model is a TorchScript freezed model. + """ + + assert downsample_ratio is None or (downsample_ratio > 0 and downsample_ratio <= 1), 'Downsample ratio must be between 0 (exclusive) and 1 (inclusive).' + assert any([output_composition, output_alpha, output_foreground]), 'Must provide at least one output.' + assert output_type in ['video', 'png_sequence'], 'Only support "video" and "png_sequence" output modes.' + assert seq_chunk >= 1, 'Sequence chunk must be >= 1' + assert num_workers >= 0, 'Number of workers must be >= 0' + + # Initialize transform + if input_resize is not None: + transform = transforms.Compose([ + transforms.Resize(input_resize[::-1]), + transforms.ToTensor() + ]) + else: + transform = transforms.ToTensor() + + # Initialize reader + if os.path.isfile(input_source): + source = VideoReader(input_source, transform) + else: + source = ImageSequenceReader(input_source, transform) + reader = DataLoader(source, batch_size=seq_chunk, pin_memory=True, num_workers=num_workers) + + # Initialize writers + if output_type == 'video': + frame_rate = source.frame_rate if isinstance(source, VideoReader) else 30 + output_video_mbps = 1 if output_video_mbps is None else output_video_mbps + if output_composition is not None: + writer_com = VideoWriter( + path=output_composition, + frame_rate=frame_rate, + bit_rate=int(output_video_mbps * 1000000)) + if output_alpha is not None: + writer_pha = VideoWriter( + path=output_alpha, + frame_rate=frame_rate, + bit_rate=int(output_video_mbps * 1000000)) + if output_foreground is not None: + writer_fgr = VideoWriter( + path=output_foreground, + frame_rate=frame_rate, + bit_rate=int(output_video_mbps * 1000000)) + else: + if output_composition is not None: + writer_com = ImageSequenceWriter(output_composition, 'png') + if output_alpha is not None: + writer_pha = ImageSequenceWriter(output_alpha, 'png') + if output_foreground is not None: + writer_fgr = ImageSequenceWriter(output_foreground, 'png') + + # Inference + model = model.eval() + if device is None or dtype is None: + param = next(model.parameters()) + dtype = param.dtype + device = param.device + + if (output_composition is not None) and (output_type == 'video'): + bgr = torch.tensor([120, 255, 155], device=device, dtype=dtype).div(255).view(1, 1, 3, 1, 1) + + try: + with torch.no_grad(): + bar = tqdm(total=len(source), disable=not progress, dynamic_ncols=True) + rec = [None] * 4 + for src in reader: + + if downsample_ratio is None: + downsample_ratio = auto_downsample_ratio(*src.shape[2:]) + + src = src.to(device, dtype, non_blocking=True).unsqueeze(0) # [B, T, C, H, W] + fgr, pha, *rec = model(src, *rec, downsample_ratio) + + if output_foreground is not None: + writer_fgr.write(fgr[0]) + if output_alpha is not None: + writer_pha.write(pha[0]) + if output_composition is not None: + if output_type == 'video': + com = fgr * pha + bgr * (1 - pha) + else: + fgr = fgr * pha.gt(0) + com = torch.cat([fgr, pha], dim=-3) + writer_com.write(com[0]) + + bar.update(src.size(1)) + + finally: + # Clean up + if output_composition is not None: + writer_com.close() + if output_alpha is not None: + writer_pha.close() + if output_foreground is not None: + writer_fgr.close() + + +def auto_downsample_ratio(h, w): + """ + Automatically find a downsample ratio so that the largest side of the resolution be 512px. + """ + return min(512 / max(h, w), 1) + + +class Converter: + def __init__(self, variant: str, checkpoint: str, device: str): + self.model = MattingNetwork(variant).eval().to(device) + self.model.load_state_dict(torch.load(checkpoint, map_location=device)) + self.model = torch.jit.script(self.model) + self.model = torch.jit.freeze(self.model) + self.device = device + + def convert(self, *args, **kwargs): + convert_video(self.model, device=self.device, dtype=torch.float32, *args, **kwargs) + +if __name__ == '__main__': + import argparse + from model import MattingNetwork + + parser = argparse.ArgumentParser() + parser.add_argument('--variant', type=str, required=True, choices=['mobilenetv3', 'resnet50']) + parser.add_argument('--checkpoint', type=str, required=True) + parser.add_argument('--device', type=str, required=True) + parser.add_argument('--input-source', type=str, required=True) + parser.add_argument('--input-resize', type=int, default=None, nargs=2) + parser.add_argument('--downsample-ratio', type=float) + parser.add_argument('--output-composition', type=str) + parser.add_argument('--output-alpha', type=str) + parser.add_argument('--output-foreground', type=str) + parser.add_argument('--output-type', type=str, required=True, choices=['video', 'png_sequence']) + parser.add_argument('--output-video-mbps', type=int, default=1) + parser.add_argument('--seq-chunk', type=int, default=1) + parser.add_argument('--num-workers', type=int, default=0) + parser.add_argument('--disable-progress', action='store_true') + args = parser.parse_args() + + converter = Converter(args.variant, args.checkpoint, args.device) + converter.convert( + input_source=args.input_source, + input_resize=args.input_resize, + downsample_ratio=args.downsample_ratio, + output_type=args.output_type, + output_composition=args.output_composition, + output_alpha=args.output_alpha, + output_foreground=args.output_foreground, + output_video_mbps=args.output_video_mbps, + seq_chunk=args.seq_chunk, + num_workers=args.num_workers, + progress=not args.disable_progress + ) + + diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/inference_speed_test.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/inference_speed_test.py new file mode 100644 index 0000000..2f6808f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/inference_speed_test.py @@ -0,0 +1,50 @@ +""" +python inference_speed_test.py \ + --model-variant mobilenetv3 \ + --resolution 1920 1080 \ + --downsample-ratio 0.25 \ + --precision float32 +""" + +import argparse +import torch +from tqdm import tqdm + +from model.model import MattingNetwork + +torch.backends.cudnn.benchmark = True + +class InferenceSpeedTest: + def __init__(self): + self.parse_args() + self.init_model() + self.loop() + + def parse_args(self): + parser = argparse.ArgumentParser() + parser.add_argument('--model-variant', type=str, required=True) + parser.add_argument('--resolution', type=int, required=True, nargs=2) + parser.add_argument('--downsample-ratio', type=float, required=True) + parser.add_argument('--precision', type=str, default='float32') + parser.add_argument('--disable-refiner', action='store_true') + self.args = parser.parse_args() + + def init_model(self): + self.device = 'cuda' + self.precision = {'float32': torch.float32, 'float16': torch.float16}[self.args.precision] + self.model = MattingNetwork(self.args.model_variant) + self.model = self.model.to(device=self.device, dtype=self.precision).eval() + self.model = torch.jit.script(self.model) + self.model = torch.jit.freeze(self.model) + + def loop(self): + w, h = self.args.resolution + src = torch.randn((1, 3, h, w), device=self.device, dtype=self.precision) + with torch.no_grad(): + rec = None, None, None, None + for _ in tqdm(range(1000)): + fgr, pha, *rec = self.model(src, *rec, self.args.downsample_ratio) + torch.cuda.synchronize() + +if __name__ == '__main__': + InferenceSpeedTest() \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/inference_utils.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/inference_utils.py new file mode 100644 index 0000000..ceada29 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/inference_utils.py @@ -0,0 +1,88 @@ +import av +import os +import pims +import numpy as np +from torch.utils.data import Dataset +from torchvision.transforms.functional import to_pil_image +from PIL import Image + + +class VideoReader(Dataset): + def __init__(self, path, transform=None): + self.video = pims.PyAVVideoReader(path) + self.rate = self.video.frame_rate + self.transform = transform + + @property + def frame_rate(self): + return self.rate + + def __len__(self): + return len(self.video) + + def __getitem__(self, idx): + frame = self.video[idx] + frame = Image.fromarray(np.asarray(frame)) + if self.transform is not None: + frame = self.transform(frame) + return frame + + +class VideoWriter: + def __init__(self, path, frame_rate, bit_rate=1000000): + self.container = av.open(path, mode='w') + self.stream = self.container.add_stream('h264', rate=round(frame_rate)) + self.stream.pix_fmt = 'yuv420p' + self.stream.bit_rate = bit_rate + + def write(self, frames): + # frames: [T, C, H, W] + self.stream.width = frames.size(3) + self.stream.height = frames.size(2) + if frames.size(1) == 1: + frames = frames.repeat(1, 3, 1, 1) # convert grayscale to RGB + frames = frames.mul(255).byte().cpu().permute(0, 2, 3, 1).numpy() + for t in range(frames.shape[0]): + frame = frames[t] + frame = av.VideoFrame.from_ndarray(frame, format='rgb24') + self.container.mux(self.stream.encode(frame)) + + def close(self): + self.container.mux(self.stream.encode()) + self.container.close() + + +class ImageSequenceReader(Dataset): + def __init__(self, path, transform=None): + self.path = path + self.files = sorted(os.listdir(path)) + self.transform = transform + + def __len__(self): + return len(self.files) + + def __getitem__(self, idx): + with Image.open(os.path.join(self.path, self.files[idx])) as img: + img.load() + if self.transform is not None: + return self.transform(img) + return img + + +class ImageSequenceWriter: + def __init__(self, path, extension='jpg'): + self.path = path + self.extension = extension + self.counter = 0 + os.makedirs(path, exist_ok=True) + + def write(self, frames): + # frames: [T, C, H, W] + for t in range(frames.shape[0]): + to_pil_image(frames[t]).save(os.path.join( + self.path, '%05d' % self.counter + '.' + self.extension)) + self.counter += 1 + + def close(self): + pass + \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/__init__.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/__init__.py new file mode 100644 index 0000000..ac047a1 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/__init__.py @@ -0,0 +1 @@ +from .model import MattingNetwork \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/decoder.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/decoder.py new file mode 100644 index 0000000..7307435 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/decoder.py @@ -0,0 +1,210 @@ +import torch +from torch import Tensor +from torch import nn +from torch.nn import functional as F +from typing import Tuple, Optional + +class RecurrentDecoder(nn.Module): + def __init__(self, feature_channels, decoder_channels): + super().__init__() + self.avgpool = AvgPool() + self.decode4 = BottleneckBlock(feature_channels[3]) + self.decode3 = UpsamplingBlock(feature_channels[3], feature_channels[2], 3, decoder_channels[0]) + self.decode2 = UpsamplingBlock(decoder_channels[0], feature_channels[1], 3, decoder_channels[1]) + self.decode1 = UpsamplingBlock(decoder_channels[1], feature_channels[0], 3, decoder_channels[2]) + self.decode0 = OutputBlock(decoder_channels[2], 3, decoder_channels[3]) + + def forward(self, + s0: Tensor, f1: Tensor, f2: Tensor, f3: Tensor, f4: Tensor, + r1: Optional[Tensor], r2: Optional[Tensor], + r3: Optional[Tensor], r4: Optional[Tensor]): + s1, s2, s3 = self.avgpool(s0) + x4, r4 = self.decode4(f4, r4) + x3, r3 = self.decode3(x4, f3, s3, r3) + x2, r2 = self.decode2(x3, f2, s2, r2) + x1, r1 = self.decode1(x2, f1, s1, r1) + x0 = self.decode0(x1, s0) + return x0, r1, r2, r3, r4 + + +class AvgPool(nn.Module): + def __init__(self): + super().__init__() + self.avgpool = nn.AvgPool2d(2, 2, count_include_pad=False, ceil_mode=True) + + def forward_single_frame(self, s0): + s1 = self.avgpool(s0) + s2 = self.avgpool(s1) + s3 = self.avgpool(s2) + return s1, s2, s3 + + def forward_time_series(self, s0): + B, T = s0.shape[:2] + s0 = s0.flatten(0, 1) + s1, s2, s3 = self.forward_single_frame(s0) + s1 = s1.unflatten(0, (B, T)) + s2 = s2.unflatten(0, (B, T)) + s3 = s3.unflatten(0, (B, T)) + return s1, s2, s3 + + def forward(self, s0): + if s0.ndim == 5: + return self.forward_time_series(s0) + else: + return self.forward_single_frame(s0) + + +class BottleneckBlock(nn.Module): + def __init__(self, channels): + super().__init__() + self.channels = channels + self.gru = ConvGRU(channels // 2) + + def forward(self, x, r: Optional[Tensor]): + a, b = x.split(self.channels // 2, dim=-3) + b, r = self.gru(b, r) + x = torch.cat([a, b], dim=-3) + return x, r + + +class UpsamplingBlock(nn.Module): + def __init__(self, in_channels, skip_channels, src_channels, out_channels): + super().__init__() + self.out_channels = out_channels + self.upsample = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False) + self.conv = nn.Sequential( + nn.Conv2d(in_channels + skip_channels + src_channels, out_channels, 3, 1, 1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(True), + ) + self.gru = ConvGRU(out_channels // 2) + + def forward_single_frame(self, x, f, s, r: Optional[Tensor]): + x = self.upsample(x) + x = x[:, :, :s.size(2), :s.size(3)] + x = torch.cat([x, f, s], dim=1) + x = self.conv(x) + a, b = x.split(self.out_channels // 2, dim=1) + b, r = self.gru(b, r) + x = torch.cat([a, b], dim=1) + return x, r + + def forward_time_series(self, x, f, s, r: Optional[Tensor]): + B, T, _, H, W = s.shape + x = x.flatten(0, 1) + f = f.flatten(0, 1) + s = s.flatten(0, 1) + x = self.upsample(x) + x = x[:, :, :H, :W] + x = torch.cat([x, f, s], dim=1) + x = self.conv(x) + x = x.unflatten(0, (B, T)) + a, b = x.split(self.out_channels // 2, dim=2) + b, r = self.gru(b, r) + x = torch.cat([a, b], dim=2) + return x, r + + def forward(self, x, f, s, r: Optional[Tensor]): + if x.ndim == 5: + return self.forward_time_series(x, f, s, r) + else: + return self.forward_single_frame(x, f, s, r) + + +class OutputBlock(nn.Module): + def __init__(self, in_channels, src_channels, out_channels): + super().__init__() + self.upsample = nn.Upsample(scale_factor=2, mode='bilinear', align_corners=False) + self.conv = nn.Sequential( + nn.Conv2d(in_channels + src_channels, out_channels, 3, 1, 1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(True), + nn.Conv2d(out_channels, out_channels, 3, 1, 1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(True), + ) + + def forward_single_frame(self, x, s): + x = self.upsample(x) + x = x[:, :, :s.size(2), :s.size(3)] + x = torch.cat([x, s], dim=1) + x = self.conv(x) + return x + + def forward_time_series(self, x, s): + B, T, _, H, W = s.shape + x = x.flatten(0, 1) + s = s.flatten(0, 1) + x = self.upsample(x) + x = x[:, :, :H, :W] + x = torch.cat([x, s], dim=1) + x = self.conv(x) + x = x.unflatten(0, (B, T)) + return x + + def forward(self, x, s): + if x.ndim == 5: + return self.forward_time_series(x, s) + else: + return self.forward_single_frame(x, s) + + +class ConvGRU(nn.Module): + def __init__(self, + channels: int, + kernel_size: int = 3, + padding: int = 1): + super().__init__() + self.channels = channels + self.ih = nn.Sequential( + nn.Conv2d(channels * 2, channels * 2, kernel_size, padding=padding), + nn.Sigmoid() + ) + self.hh = nn.Sequential( + nn.Conv2d(channels * 2, channels, kernel_size, padding=padding), + nn.Tanh() + ) + + def forward_single_frame(self, x, h): + r, z = self.ih(torch.cat([x, h], dim=1)).split(self.channels, dim=1) + c = self.hh(torch.cat([x, r * h], dim=1)) + h = (1 - z) * h + z * c + return h, h + + def forward_time_series(self, x, h): + o = [] + for xt in x.unbind(dim=1): + ot, h = self.forward_single_frame(xt, h) + o.append(ot) + o = torch.stack(o, dim=1) + return o, h + + def forward(self, x, h: Optional[Tensor]): + if h is None: + h = torch.zeros((x.size(0), x.size(-3), x.size(-2), x.size(-1)), + device=x.device, dtype=x.dtype) + + if x.ndim == 5: + return self.forward_time_series(x, h) + else: + return self.forward_single_frame(x, h) + + +class Projection(nn.Module): + def __init__(self, in_channels, out_channels): + super().__init__() + self.conv = nn.Conv2d(in_channels, out_channels, 1) + + def forward_single_frame(self, x): + return self.conv(x) + + def forward_time_series(self, x): + B, T = x.shape[:2] + return self.conv(x.flatten(0, 1)).unflatten(0, (B, T)) + + def forward(self, x): + if x.ndim == 5: + return self.forward_time_series(x) + else: + return self.forward_single_frame(x) + \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/deep_guided_filter.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/deep_guided_filter.py new file mode 100644 index 0000000..a24b8c5 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/deep_guided_filter.py @@ -0,0 +1,61 @@ +import torch +from torch import nn +from torch.nn import functional as F + +""" +Adopted from +""" + +class DeepGuidedFilterRefiner(nn.Module): + def __init__(self, hid_channels=16): + super().__init__() + self.box_filter = nn.Conv2d(4, 4, kernel_size=3, padding=1, bias=False, groups=4) + self.box_filter.weight.data[...] = 1 / 9 + self.conv = nn.Sequential( + nn.Conv2d(4 * 2 + hid_channels, hid_channels, kernel_size=1, bias=False), + nn.BatchNorm2d(hid_channels), + nn.ReLU(True), + nn.Conv2d(hid_channels, hid_channels, kernel_size=1, bias=False), + nn.BatchNorm2d(hid_channels), + nn.ReLU(True), + nn.Conv2d(hid_channels, 4, kernel_size=1, bias=True) + ) + + def forward_single_frame(self, fine_src, base_src, base_fgr, base_pha, base_hid): + fine_x = torch.cat([fine_src, fine_src.mean(1, keepdim=True)], dim=1) + base_x = torch.cat([base_src, base_src.mean(1, keepdim=True)], dim=1) + base_y = torch.cat([base_fgr, base_pha], dim=1) + + mean_x = self.box_filter(base_x) + mean_y = self.box_filter(base_y) + cov_xy = self.box_filter(base_x * base_y) - mean_x * mean_y + var_x = self.box_filter(base_x * base_x) - mean_x * mean_x + + A = self.conv(torch.cat([cov_xy, var_x, base_hid], dim=1)) + b = mean_y - A * mean_x + + H, W = fine_src.shape[2:] + A = F.interpolate(A, (H, W), mode='bilinear', align_corners=False) + b = F.interpolate(b, (H, W), mode='bilinear', align_corners=False) + + out = A * fine_x + b + fgr, pha = out.split([3, 1], dim=1) + return fgr, pha + + def forward_time_series(self, fine_src, base_src, base_fgr, base_pha, base_hid): + B, T = fine_src.shape[:2] + fgr, pha = self.forward_single_frame( + fine_src.flatten(0, 1), + base_src.flatten(0, 1), + base_fgr.flatten(0, 1), + base_pha.flatten(0, 1), + base_hid.flatten(0, 1)) + fgr = fgr.unflatten(0, (B, T)) + pha = pha.unflatten(0, (B, T)) + return fgr, pha + + def forward(self, fine_src, base_src, base_fgr, base_pha, base_hid): + if fine_src.ndim == 5: + return self.forward_time_series(fine_src, base_src, base_fgr, base_pha, base_hid) + else: + return self.forward_single_frame(fine_src, base_src, base_fgr, base_pha, base_hid) diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/fast_guided_filter.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/fast_guided_filter.py new file mode 100644 index 0000000..df9b4b2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/fast_guided_filter.py @@ -0,0 +1,76 @@ +import torch +from torch import nn +from torch.nn import functional as F + +""" +Adopted from +""" + +class FastGuidedFilterRefiner(nn.Module): + def __init__(self, *args, **kwargs): + super().__init__() + self.guilded_filter = FastGuidedFilter(1) + + def forward_single_frame(self, fine_src, base_src, base_fgr, base_pha): + fine_src_gray = fine_src.mean(1, keepdim=True) + base_src_gray = base_src.mean(1, keepdim=True) + + fgr, pha = self.guilded_filter( + torch.cat([base_src, base_src_gray], dim=1), + torch.cat([base_fgr, base_pha], dim=1), + torch.cat([fine_src, fine_src_gray], dim=1)).split([3, 1], dim=1) + + return fgr, pha + + def forward_time_series(self, fine_src, base_src, base_fgr, base_pha): + B, T = fine_src.shape[:2] + fgr, pha = self.forward_single_frame( + fine_src.flatten(0, 1), + base_src.flatten(0, 1), + base_fgr.flatten(0, 1), + base_pha.flatten(0, 1)) + fgr = fgr.unflatten(0, (B, T)) + pha = pha.unflatten(0, (B, T)) + return fgr, pha + + def forward(self, fine_src, base_src, base_fgr, base_pha, base_hid): + if fine_src.ndim == 5: + return self.forward_time_series(fine_src, base_src, base_fgr, base_pha) + else: + return self.forward_single_frame(fine_src, base_src, base_fgr, base_pha) + + +class FastGuidedFilter(nn.Module): + def __init__(self, r: int, eps: float = 1e-5): + super().__init__() + self.r = r + self.eps = eps + self.boxfilter = BoxFilter(r) + + def forward(self, lr_x, lr_y, hr_x): + mean_x = self.boxfilter(lr_x) + mean_y = self.boxfilter(lr_y) + cov_xy = self.boxfilter(lr_x * lr_y) - mean_x * mean_y + var_x = self.boxfilter(lr_x * lr_x) - mean_x * mean_x + A = cov_xy / (var_x + self.eps) + b = mean_y - A * mean_x + A = F.interpolate(A, hr_x.shape[2:], mode='bilinear', align_corners=False) + b = F.interpolate(b, hr_x.shape[2:], mode='bilinear', align_corners=False) + return A * hr_x + b + + +class BoxFilter(nn.Module): + def __init__(self, r): + super(BoxFilter, self).__init__() + self.r = r + + def forward(self, x): + # Note: The original implementation at + # uses faster box blur. However, it may not be friendly for ONNX export. + # We are switching to use simple convolution for box blur. + kernel_size = 2 * self.r + 1 + kernel_x = torch.full((x.data.shape[1], 1, 1, kernel_size), 1 / kernel_size, device=x.device, dtype=x.dtype) + kernel_y = torch.full((x.data.shape[1], 1, kernel_size, 1), 1 / kernel_size, device=x.device, dtype=x.dtype) + x = F.conv2d(x, kernel_x, padding=(0, self.r), groups=x.data.shape[1]) + x = F.conv2d(x, kernel_y, padding=(self.r, 0), groups=x.data.shape[1]) + return x \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/lraspp.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/lraspp.py new file mode 100644 index 0000000..5fc7079 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/lraspp.py @@ -0,0 +1,29 @@ +from torch import nn + +class LRASPP(nn.Module): + def __init__(self, in_channels, out_channels): + super().__init__() + self.aspp1 = nn.Sequential( + nn.Conv2d(in_channels, out_channels, 1, bias=False), + nn.BatchNorm2d(out_channels), + nn.ReLU(True) + ) + self.aspp2 = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(in_channels, out_channels, 1, bias=False), + nn.Sigmoid() + ) + + def forward_single_frame(self, x): + return self.aspp1(x) * self.aspp2(x) + + def forward_time_series(self, x): + B, T = x.shape[:2] + x = self.forward_single_frame(x.flatten(0, 1)).unflatten(0, (B, T)) + return x + + def forward(self, x): + if x.ndim == 5: + return self.forward_time_series(x) + else: + return self.forward_single_frame(x) \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/mobilenetv3.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/mobilenetv3.py new file mode 100644 index 0000000..9e863d3 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/mobilenetv3.py @@ -0,0 +1,73 @@ +from torch import nn +from torchvision.models.mobilenetv3 import MobileNetV3, InvertedResidualConfig +# from torchvision.models.utils import load_state_dict_from_url +from torch.utils.model_zoo import load_url as load_state_dict_from_url +from torchvision.transforms.functional import normalize + +class MobileNetV3LargeEncoder(MobileNetV3): + def __init__(self, pretrained: bool = False): + super().__init__( + inverted_residual_setting=[ + InvertedResidualConfig( 16, 3, 16, 16, False, "RE", 1, 1, 1), + InvertedResidualConfig( 16, 3, 64, 24, False, "RE", 2, 1, 1), # C1 + InvertedResidualConfig( 24, 3, 72, 24, False, "RE", 1, 1, 1), + InvertedResidualConfig( 24, 5, 72, 40, True, "RE", 2, 1, 1), # C2 + InvertedResidualConfig( 40, 5, 120, 40, True, "RE", 1, 1, 1), + InvertedResidualConfig( 40, 5, 120, 40, True, "RE", 1, 1, 1), + InvertedResidualConfig( 40, 3, 240, 80, False, "HS", 2, 1, 1), # C3 + InvertedResidualConfig( 80, 3, 200, 80, False, "HS", 1, 1, 1), + InvertedResidualConfig( 80, 3, 184, 80, False, "HS", 1, 1, 1), + InvertedResidualConfig( 80, 3, 184, 80, False, "HS", 1, 1, 1), + InvertedResidualConfig( 80, 3, 480, 112, True, "HS", 1, 1, 1), + InvertedResidualConfig(112, 3, 672, 112, True, "HS", 1, 1, 1), + InvertedResidualConfig(112, 5, 672, 160, True, "HS", 2, 2, 1), # C4 + InvertedResidualConfig(160, 5, 960, 160, True, "HS", 1, 2, 1), + InvertedResidualConfig(160, 5, 960, 160, True, "HS", 1, 2, 1), + ], + last_channel=1280 + ) + + if pretrained: + self.load_state_dict(load_state_dict_from_url( + 'https://download.pytorch.org/models/mobilenet_v3_large-8738ca79.pth')) + + del self.avgpool + del self.classifier + + def forward_single_frame(self, x): + x = normalize(x, [0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) + + x = self.features[0](x) + x = self.features[1](x) + f1 = x + x = self.features[2](x) + x = self.features[3](x) + f2 = x + x = self.features[4](x) + x = self.features[5](x) + x = self.features[6](x) + f3 = x + x = self.features[7](x) + x = self.features[8](x) + x = self.features[9](x) + x = self.features[10](x) + x = self.features[11](x) + x = self.features[12](x) + x = self.features[13](x) + x = self.features[14](x) + x = self.features[15](x) + x = self.features[16](x) + f4 = x + return [f1, f2, f3, f4] + + def forward_time_series(self, x): + B, T = x.shape[:2] + features = self.forward_single_frame(x.flatten(0, 1)) + features = [f.unflatten(0, (B, T)) for f in features] + return features + + def forward(self, x): + if x.ndim == 5: + return self.forward_time_series(x) + else: + return self.forward_single_frame(x) diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/model.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/model.py new file mode 100644 index 0000000..71fc684 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/model.py @@ -0,0 +1,79 @@ +import torch +from torch import Tensor +from torch import nn +from torch.nn import functional as F +from typing import Optional, List + +from .mobilenetv3 import MobileNetV3LargeEncoder +from .resnet import ResNet50Encoder +from .lraspp import LRASPP +from .decoder import RecurrentDecoder, Projection +from .fast_guided_filter import FastGuidedFilterRefiner +from .deep_guided_filter import DeepGuidedFilterRefiner + +class MattingNetwork(nn.Module): + def __init__(self, + variant: str = 'mobilenetv3', + refiner: str = 'deep_guided_filter', + pretrained_backbone: bool = False): + super().__init__() + assert variant in ['mobilenetv3', 'resnet50'] + assert refiner in ['fast_guided_filter', 'deep_guided_filter'] + + if variant == 'mobilenetv3': + self.backbone = MobileNetV3LargeEncoder(pretrained_backbone) + self.aspp = LRASPP(960, 128) + self.decoder = RecurrentDecoder([16, 24, 40, 128], [80, 40, 32, 16]) + else: + self.backbone = ResNet50Encoder(pretrained_backbone) + self.aspp = LRASPP(2048, 256) + self.decoder = RecurrentDecoder([64, 256, 512, 256], [128, 64, 32, 16]) + + self.project_mat = Projection(16, 4) + self.project_seg = Projection(16, 1) + + if refiner == 'deep_guided_filter': + self.refiner = DeepGuidedFilterRefiner() + else: + self.refiner = FastGuidedFilterRefiner() + + def forward(self, + src: Tensor, + r1: Optional[Tensor] = None, + r2: Optional[Tensor] = None, + r3: Optional[Tensor] = None, + r4: Optional[Tensor] = None, + downsample_ratio: float = 1, + segmentation_pass: bool = False): + + if downsample_ratio != 1: + src_sm = self._interpolate(src, scale_factor=downsample_ratio) + else: + src_sm = src + + f1, f2, f3, f4 = self.backbone(src_sm) + f4 = self.aspp(f4) + hid, *rec = self.decoder(src_sm, f1, f2, f3, f4, r1, r2, r3, r4) + + if not segmentation_pass: + fgr_residual, pha = self.project_mat(hid).split([3, 1], dim=-3) + if downsample_ratio != 1: + fgr_residual, pha = self.refiner(src, src_sm, fgr_residual, pha, hid) + fgr = fgr_residual + src + fgr = fgr.clamp(0., 1.) + pha = pha.clamp(0., 1.) + return [fgr, pha, *rec] + else: + seg = self.project_seg(hid) + return [seg, *rec] + + def _interpolate(self, x: Tensor, scale_factor: float): + if x.ndim == 5: + B, T = x.shape[:2] + x = F.interpolate(x.flatten(0, 1), scale_factor=scale_factor, + mode='bilinear', align_corners=False, recompute_scale_factor=False) + x = x.unflatten(0, (B, T)) + else: + x = F.interpolate(x, scale_factor=scale_factor, + mode='bilinear', align_corners=False, recompute_scale_factor=False) + return x diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/resnet.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/resnet.py new file mode 100644 index 0000000..ecb3304 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/model/resnet.py @@ -0,0 +1,45 @@ +from torch import nn +from torchvision.models.resnet import ResNet, Bottleneck +from torch.utils.model_zoo import load_url as load_state_dict_from_url + +class ResNet50Encoder(ResNet): + def __init__(self, pretrained: bool = False): + super().__init__( + block=Bottleneck, + layers=[3, 4, 6, 3], + replace_stride_with_dilation=[False, False, True], + norm_layer=None) + + if pretrained: + self.load_state_dict(load_state_dict_from_url( + 'https://download.pytorch.org/models/resnet50-0676ba61.pth')) + + del self.avgpool + del self.fc + + def forward_single_frame(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + f1 = x # 1/2 + x = self.maxpool(x) + x = self.layer1(x) + f2 = x # 1/4 + x = self.layer2(x) + f3 = x # 1/8 + x = self.layer3(x) + x = self.layer4(x) + f4 = x # 1/16 + return [f1, f2, f3, f4] + + def forward_time_series(self, x): + B, T = x.shape[:2] + features = self.forward_single_frame(x.flatten(0, 1)) + features = [f.unflatten(0, (B, T)) for f in features] + return features + + def forward(self, x): + if x.ndim == 5: + return self.forward_time_series(x) + else: + return self.forward_single_frame(x) diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/requirements_inference.txt b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/requirements_inference.txt new file mode 100644 index 0000000..4b24a22 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/requirements_inference.txt @@ -0,0 +1,5 @@ +av==8.0.3 +torch==1.9.0 +torchvision==0.10.0 +tqdm==4.61.1 +pims==0.5 \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/requirements_training.txt b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/requirements_training.txt new file mode 100644 index 0000000..70fd4b1 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/requirements_training.txt @@ -0,0 +1,5 @@ +easing_functions==1.0.4 +tensorboard==2.5.0 +torch==1.9.0 +torchvision==0.10.0 +tqdm==4.61.1 \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/test_matting.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/test_matting.py new file mode 100644 index 0000000..f5711d7 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/test_matting.py @@ -0,0 +1,24 @@ +import torch +import os +from model import MattingNetwork +from inference import convert_video + +input_video_path = '/home/alberto/data/nerf-videosynth/MTC_data/Clara/Clara.mp4' +checkpoint_path = 'checkpoints/' +results_path = '/home/alberto/data/nerf-videosynth/MTC_data/Clara/matting/' +os.makedirs(results_path, exist_ok=True) + +# load model +model = MattingNetwork('mobilenetv3').eval().cuda() # or "resnet50" +model.load_state_dict(torch.load(checkpoint_path + 'rvm_mobilenetv3.pth')) + + +convert_video( + model, # The model, can be on any device (cpu or cuda). + input_source=input_video_path, # A video file or an image sequence directory. + output_type='png_sequence', # Choose "video" or "png_sequence" + output_composition=results_path, # File path if video; directory path if png sequence. + output_video_mbps=4, # Output video mbps. Not needed for png sequence. + downsample_ratio=None, # A hyperparameter to adjust or use None for auto. + seq_chunk=12, # Process n frames at once for better parallelism. +) \ No newline at end of file diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/train.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/train.py new file mode 100644 index 0000000..e38918d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/train.py @@ -0,0 +1,504 @@ +""" +# First update `train_config.py` to set paths to your dataset locations. + +# You may want to change `--num-workers` according to your machine's memory. +# The default num-workers=8 may cause dataloader to exit unexpectedly when +# machine is out of memory. + +# Stage 1 +python train.py \ + --model-variant mobilenetv3 \ + --dataset videomatte \ + --resolution-lr 512 \ + --seq-length-lr 15 \ + --learning-rate-backbone 0.0001 \ + --learning-rate-aspp 0.0002 \ + --learning-rate-decoder 0.0002 \ + --learning-rate-refiner 0 \ + --checkpoint-dir checkpoint/stage1 \ + --log-dir log/stage1 \ + --epoch-start 0 \ + --epoch-end 20 + +# Stage 2 +python train.py \ + --model-variant mobilenetv3 \ + --dataset videomatte \ + --resolution-lr 512 \ + --seq-length-lr 50 \ + --learning-rate-backbone 0.00005 \ + --learning-rate-aspp 0.0001 \ + --learning-rate-decoder 0.0001 \ + --learning-rate-refiner 0 \ + --checkpoint checkpoint/stage1/epoch-19.pth \ + --checkpoint-dir checkpoint/stage2 \ + --log-dir log/stage2 \ + --epoch-start 20 \ + --epoch-end 22 + +# Stage 3 +python train.py \ + --model-variant mobilenetv3 \ + --dataset videomatte \ + --train-hr \ + --resolution-lr 512 \ + --resolution-hr 2048 \ + --seq-length-lr 40 \ + --seq-length-hr 6 \ + --learning-rate-backbone 0.00001 \ + --learning-rate-aspp 0.00001 \ + --learning-rate-decoder 0.00001 \ + --learning-rate-refiner 0.0002 \ + --checkpoint checkpoint/stage2/epoch-21.pth \ + --checkpoint-dir checkpoint/stage3 \ + --log-dir log/stage3 \ + --epoch-start 22 \ + --epoch-end 23 + +# Stage 4 +python train.py \ + --model-variant mobilenetv3 \ + --dataset imagematte \ + --train-hr \ + --resolution-lr 512 \ + --resolution-hr 2048 \ + --seq-length-lr 40 \ + --seq-length-hr 6 \ + --learning-rate-backbone 0.00001 \ + --learning-rate-aspp 0.00001 \ + --learning-rate-decoder 0.00005 \ + --learning-rate-refiner 0.0002 \ + --checkpoint checkpoint/stage3/epoch-22.pth \ + --checkpoint-dir checkpoint/stage4 \ + --log-dir log/stage4 \ + --epoch-start 23 \ + --epoch-end 28 +""" + + +import argparse +import torch +import random +import os +from torch import nn +from torch import distributed as dist +from torch import multiprocessing as mp +from torch.nn import functional as F +from torch.nn.parallel import DistributedDataParallel as DDP +from torch.optim import Adam +from torch.cuda.amp import autocast, GradScaler +from torch.utils.data import DataLoader, ConcatDataset +from torch.utils.data.distributed import DistributedSampler +from torch.utils.tensorboard import SummaryWriter +from torchvision.utils import make_grid +from torchvision.transforms.functional import center_crop +from tqdm import tqdm + +from dataset.videomatte import ( + VideoMatteDataset, + VideoMatteTrainAugmentation, + VideoMatteValidAugmentation, +) +from dataset.imagematte import ( + ImageMatteDataset, + ImageMatteAugmentation +) +from dataset.coco import ( + CocoPanopticDataset, + CocoPanopticTrainAugmentation, +) +from dataset.spd import ( + SuperviselyPersonDataset +) +from dataset.youtubevis import ( + YouTubeVISDataset, + YouTubeVISAugmentation +) +from dataset.augmentation import ( + TrainFrameSampler, + ValidFrameSampler +) +from model import MattingNetwork +from train_config import DATA_PATHS +from train_loss import matting_loss, segmentation_loss + + +class Trainer: + def __init__(self, rank, world_size): + self.parse_args() + self.init_distributed(rank, world_size) + self.init_datasets() + self.init_model() + self.init_writer() + self.train() + self.cleanup() + + def parse_args(self): + parser = argparse.ArgumentParser() + # Model + parser.add_argument('--model-variant', type=str, required=True, choices=['mobilenetv3', 'resnet50']) + # Matting dataset + parser.add_argument('--dataset', type=str, required=True, choices=['videomatte', 'imagematte']) + # Learning rate + parser.add_argument('--learning-rate-backbone', type=float, required=True) + parser.add_argument('--learning-rate-aspp', type=float, required=True) + parser.add_argument('--learning-rate-decoder', type=float, required=True) + parser.add_argument('--learning-rate-refiner', type=float, required=True) + # Training setting + parser.add_argument('--train-hr', action='store_true') + parser.add_argument('--resolution-lr', type=int, default=512) + parser.add_argument('--resolution-hr', type=int, default=2048) + parser.add_argument('--seq-length-lr', type=int, required=True) + parser.add_argument('--seq-length-hr', type=int, default=6) + parser.add_argument('--downsample-ratio', type=float, default=0.25) + parser.add_argument('--batch-size-per-gpu', type=int, default=1) + parser.add_argument('--num-workers', type=int, default=8) + parser.add_argument('--epoch-start', type=int, default=0) + parser.add_argument('--epoch-end', type=int, default=16) + # Tensorboard logging + parser.add_argument('--log-dir', type=str, required=True) + parser.add_argument('--log-train-loss-interval', type=int, default=20) + parser.add_argument('--log-train-images-interval', type=int, default=500) + # Checkpoint loading and saving + parser.add_argument('--checkpoint', type=str) + parser.add_argument('--checkpoint-dir', type=str, required=True) + parser.add_argument('--checkpoint-save-interval', type=int, default=500) + # Distributed + parser.add_argument('--distributed-addr', type=str, default='localhost') + parser.add_argument('--distributed-port', type=str, default='12355') + # Debugging + parser.add_argument('--disable-progress-bar', action='store_true') + parser.add_argument('--disable-validation', action='store_true') + parser.add_argument('--disable-mixed-precision', action='store_true') + self.args = parser.parse_args() + + def init_distributed(self, rank, world_size): + self.rank = rank + self.world_size = world_size + self.log('Initializing distributed') + os.environ['MASTER_ADDR'] = self.args.distributed_addr + os.environ['MASTER_PORT'] = self.args.distributed_port + dist.init_process_group("nccl", rank=rank, world_size=world_size) + + def init_datasets(self): + self.log('Initializing matting datasets') + size_hr = (self.args.resolution_hr, self.args.resolution_hr) + size_lr = (self.args.resolution_lr, self.args.resolution_lr) + + # Matting datasets: + if self.args.dataset == 'videomatte': + self.dataset_lr_train = VideoMatteDataset( + videomatte_dir=DATA_PATHS['videomatte']['train'], + background_image_dir=DATA_PATHS['background_images']['train'], + background_video_dir=DATA_PATHS['background_videos']['train'], + size=self.args.resolution_lr, + seq_length=self.args.seq_length_lr, + seq_sampler=TrainFrameSampler(), + transform=VideoMatteTrainAugmentation(size_lr)) + if self.args.train_hr: + self.dataset_hr_train = VideoMatteDataset( + videomatte_dir=DATA_PATHS['videomatte']['train'], + background_image_dir=DATA_PATHS['background_images']['train'], + background_video_dir=DATA_PATHS['background_videos']['train'], + size=self.args.resolution_hr, + seq_length=self.args.seq_length_hr, + seq_sampler=TrainFrameSampler(), + transform=VideoMatteTrainAugmentation(size_hr)) + self.dataset_valid = VideoMatteDataset( + videomatte_dir=DATA_PATHS['videomatte']['valid'], + background_image_dir=DATA_PATHS['background_images']['valid'], + background_video_dir=DATA_PATHS['background_videos']['valid'], + size=self.args.resolution_hr if self.args.train_hr else self.args.resolution_lr, + seq_length=self.args.seq_length_hr if self.args.train_hr else self.args.seq_length_lr, + seq_sampler=ValidFrameSampler(), + transform=VideoMatteValidAugmentation(size_hr if self.args.train_hr else size_lr)) + else: + self.dataset_lr_train = ImageMatteDataset( + imagematte_dir=DATA_PATHS['imagematte']['train'], + background_image_dir=DATA_PATHS['background_images']['train'], + background_video_dir=DATA_PATHS['background_videos']['train'], + size=self.args.resolution_lr, + seq_length=self.args.seq_length_lr, + seq_sampler=TrainFrameSampler(), + transform=ImageMatteAugmentation(size_lr)) + if self.args.train_hr: + self.dataset_hr_train = ImageMatteDataset( + imagematte_dir=DATA_PATHS['imagematte']['train'], + background_image_dir=DATA_PATHS['background_images']['train'], + background_video_dir=DATA_PATHS['background_videos']['train'], + size=self.args.resolution_hr, + seq_length=self.args.seq_length_hr, + seq_sampler=TrainFrameSampler(), + transform=ImageMatteAugmentation(size_hr)) + self.dataset_valid = ImageMatteDataset( + imagematte_dir=DATA_PATHS['imagematte']['valid'], + background_image_dir=DATA_PATHS['background_images']['valid'], + background_video_dir=DATA_PATHS['background_videos']['valid'], + size=self.args.resolution_hr if self.args.train_hr else self.args.resolution_lr, + seq_length=self.args.seq_length_hr if self.args.train_hr else self.args.seq_length_lr, + seq_sampler=ValidFrameSampler(), + transform=ImageMatteAugmentation(size_hr if self.args.train_hr else size_lr)) + + # Matting dataloaders: + self.datasampler_lr_train = DistributedSampler( + dataset=self.dataset_lr_train, + rank=self.rank, + num_replicas=self.world_size, + shuffle=True) + self.dataloader_lr_train = DataLoader( + dataset=self.dataset_lr_train, + batch_size=self.args.batch_size_per_gpu, + num_workers=self.args.num_workers, + sampler=self.datasampler_lr_train, + pin_memory=True) + if self.args.train_hr: + self.datasampler_hr_train = DistributedSampler( + dataset=self.dataset_hr_train, + rank=self.rank, + num_replicas=self.world_size, + shuffle=True) + self.dataloader_hr_train = DataLoader( + dataset=self.dataset_hr_train, + batch_size=self.args.batch_size_per_gpu, + num_workers=self.args.num_workers, + sampler=self.datasampler_hr_train, + pin_memory=True) + self.dataloader_valid = DataLoader( + dataset=self.dataset_valid, + batch_size=self.args.batch_size_per_gpu, + num_workers=self.args.num_workers, + pin_memory=True) + + # Segementation datasets + self.log('Initializing image segmentation datasets') + self.dataset_seg_image = ConcatDataset([ + CocoPanopticDataset( + imgdir=DATA_PATHS['coco_panoptic']['imgdir'], + anndir=DATA_PATHS['coco_panoptic']['anndir'], + annfile=DATA_PATHS['coco_panoptic']['annfile'], + transform=CocoPanopticTrainAugmentation(size_lr)), + SuperviselyPersonDataset( + imgdir=DATA_PATHS['spd']['imgdir'], + segdir=DATA_PATHS['spd']['segdir'], + transform=CocoPanopticTrainAugmentation(size_lr)) + ]) + self.datasampler_seg_image = DistributedSampler( + dataset=self.dataset_seg_image, + rank=self.rank, + num_replicas=self.world_size, + shuffle=True) + self.dataloader_seg_image = DataLoader( + dataset=self.dataset_seg_image, + batch_size=self.args.batch_size_per_gpu * self.args.seq_length_lr, + num_workers=self.args.num_workers, + sampler=self.datasampler_seg_image, + pin_memory=True) + + self.log('Initializing video segmentation datasets') + self.dataset_seg_video = YouTubeVISDataset( + videodir=DATA_PATHS['youtubevis']['videodir'], + annfile=DATA_PATHS['youtubevis']['annfile'], + size=self.args.resolution_lr, + seq_length=self.args.seq_length_lr, + seq_sampler=TrainFrameSampler(speed=[1]), + transform=YouTubeVISAugmentation(size_lr)) + self.datasampler_seg_video = DistributedSampler( + dataset=self.dataset_seg_video, + rank=self.rank, + num_replicas=self.world_size, + shuffle=True) + self.dataloader_seg_video = DataLoader( + dataset=self.dataset_seg_video, + batch_size=self.args.batch_size_per_gpu, + num_workers=self.args.num_workers, + sampler=self.datasampler_seg_video, + pin_memory=True) + + def init_model(self): + self.log('Initializing model') + self.model = MattingNetwork(self.args.model_variant, pretrained_backbone=True).to(self.rank) + + if self.args.checkpoint: + self.log(f'Restoring from checkpoint: {self.args.checkpoint}') + self.log(self.model.load_state_dict( + torch.load(self.args.checkpoint, map_location=f'cuda:{self.rank}'))) + + self.model = nn.SyncBatchNorm.convert_sync_batchnorm(self.model) + self.model_ddp = DDP(self.model, device_ids=[self.rank], broadcast_buffers=False, find_unused_parameters=True) + self.optimizer = Adam([ + {'params': self.model.backbone.parameters(), 'lr': self.args.learning_rate_backbone}, + {'params': self.model.aspp.parameters(), 'lr': self.args.learning_rate_aspp}, + {'params': self.model.decoder.parameters(), 'lr': self.args.learning_rate_decoder}, + {'params': self.model.refiner.parameters(), 'lr': self.args.learning_rate_refiner}, + ]) + self.scaler = GradScaler() + + def init_writer(self): + if self.rank == 0: + self.log('Initializing writer') + self.writer = SummaryWriter(self.args.log_dir) + + def train(self): + for epoch in range(self.args.epoch_start, self.args.epoch_end): + self.epoch = epoch + self.step = epoch * len(self.dataloader_lr_train) + + if not self.args.disable_validation: + self.validate() + + self.log(f'Training epoch: {epoch}') + for true_fgr, true_pha, true_bgr in tqdm(self.dataloader_lr_train, disable=self.args.disable_progress_bar, dynamic_ncols=True): + # Low resolution pass + self.train_mat(true_fgr, true_pha, true_bgr, downsample_ratio=1, tag='lr') + + # High resolution pass + if self.args.train_hr: + true_fgr, true_pha, true_bgr = self.load_next_mat_hr_sample() + self.train_mat(true_fgr, true_pha, true_bgr, downsample_ratio=self.args.downsample_ratio, tag='hr') + + # Segmentation pass + if self.step % 2 == 0: + true_img, true_seg = self.load_next_seg_video_sample() + self.train_seg(true_img, true_seg, log_label='seg_video') + else: + true_img, true_seg = self.load_next_seg_image_sample() + self.train_seg(true_img.unsqueeze(1), true_seg.unsqueeze(1), log_label='seg_image') + + if self.step % self.args.checkpoint_save_interval == 0: + self.save() + + self.step += 1 + + def train_mat(self, true_fgr, true_pha, true_bgr, downsample_ratio, tag): + true_fgr = true_fgr.to(self.rank, non_blocking=True) + true_pha = true_pha.to(self.rank, non_blocking=True) + true_bgr = true_bgr.to(self.rank, non_blocking=True) + true_fgr, true_pha, true_bgr = self.random_crop(true_fgr, true_pha, true_bgr) + true_src = true_fgr * true_pha + true_bgr * (1 - true_pha) + + with autocast(enabled=not self.args.disable_mixed_precision): + pred_fgr, pred_pha = self.model_ddp(true_src, downsample_ratio=downsample_ratio)[:2] + loss = matting_loss(pred_fgr, pred_pha, true_fgr, true_pha) + + self.scaler.scale(loss['total']).backward() + self.scaler.step(self.optimizer) + self.scaler.update() + self.optimizer.zero_grad() + + if self.rank == 0 and self.step % self.args.log_train_loss_interval == 0: + for loss_name, loss_value in loss.items(): + self.writer.add_scalar(f'train_{tag}_{loss_name}', loss_value, self.step) + + if self.rank == 0 and self.step % self.args.log_train_images_interval == 0: + self.writer.add_image(f'train_{tag}_pred_fgr', make_grid(pred_fgr.flatten(0, 1), nrow=pred_fgr.size(1)), self.step) + self.writer.add_image(f'train_{tag}_pred_pha', make_grid(pred_pha.flatten(0, 1), nrow=pred_pha.size(1)), self.step) + self.writer.add_image(f'train_{tag}_true_fgr', make_grid(true_fgr.flatten(0, 1), nrow=true_fgr.size(1)), self.step) + self.writer.add_image(f'train_{tag}_true_pha', make_grid(true_pha.flatten(0, 1), nrow=true_pha.size(1)), self.step) + self.writer.add_image(f'train_{tag}_true_src', make_grid(true_src.flatten(0, 1), nrow=true_src.size(1)), self.step) + + def train_seg(self, true_img, true_seg, log_label): + true_img = true_img.to(self.rank, non_blocking=True) + true_seg = true_seg.to(self.rank, non_blocking=True) + + true_img, true_seg = self.random_crop(true_img, true_seg) + + with autocast(enabled=not self.args.disable_mixed_precision): + pred_seg = self.model_ddp(true_img, segmentation_pass=True)[0] + loss = segmentation_loss(pred_seg, true_seg) + + self.scaler.scale(loss).backward() + self.scaler.step(self.optimizer) + self.scaler.update() + self.optimizer.zero_grad() + + if self.rank == 0 and (self.step - self.step % 2) % self.args.log_train_loss_interval == 0: + self.writer.add_scalar(f'{log_label}_loss', loss, self.step) + + if self.rank == 0 and (self.step - self.step % 2) % self.args.log_train_images_interval == 0: + self.writer.add_image(f'{log_label}_pred_seg', make_grid(pred_seg.flatten(0, 1).float().sigmoid(), nrow=self.args.seq_length_lr), self.step) + self.writer.add_image(f'{log_label}_true_seg', make_grid(true_seg.flatten(0, 1), nrow=self.args.seq_length_lr), self.step) + self.writer.add_image(f'{log_label}_true_img', make_grid(true_img.flatten(0, 1), nrow=self.args.seq_length_lr), self.step) + + def load_next_mat_hr_sample(self): + try: + sample = next(self.dataiterator_mat_hr) + except: + self.datasampler_hr_train.set_epoch(self.datasampler_hr_train.epoch + 1) + self.dataiterator_mat_hr = iter(self.dataloader_hr_train) + sample = next(self.dataiterator_mat_hr) + return sample + + def load_next_seg_video_sample(self): + try: + sample = next(self.dataiterator_seg_video) + except: + self.datasampler_seg_video.set_epoch(self.datasampler_seg_video.epoch + 1) + self.dataiterator_seg_video = iter(self.dataloader_seg_video) + sample = next(self.dataiterator_seg_video) + return sample + + def load_next_seg_image_sample(self): + try: + sample = next(self.dataiterator_seg_image) + except: + self.datasampler_seg_image.set_epoch(self.datasampler_seg_image.epoch + 1) + self.dataiterator_seg_image = iter(self.dataloader_seg_image) + sample = next(self.dataiterator_seg_image) + return sample + + def validate(self): + if self.rank == 0: + self.log(f'Validating at the start of epoch: {self.epoch}') + self.model_ddp.eval() + total_loss, total_count = 0, 0 + with torch.no_grad(): + with autocast(enabled=not self.args.disable_mixed_precision): + for true_fgr, true_pha, true_bgr in tqdm(self.dataloader_valid, disable=self.args.disable_progress_bar, dynamic_ncols=True): + true_fgr = true_fgr.to(self.rank, non_blocking=True) + true_pha = true_pha.to(self.rank, non_blocking=True) + true_bgr = true_bgr.to(self.rank, non_blocking=True) + true_src = true_fgr * true_pha + true_bgr * (1 - true_pha) + batch_size = true_src.size(0) + pred_fgr, pred_pha = self.model(true_src)[:2] + total_loss += matting_loss(pred_fgr, pred_pha, true_fgr, true_pha)['total'].item() * batch_size + total_count += batch_size + avg_loss = total_loss / total_count + self.log(f'Validation set average loss: {avg_loss}') + self.writer.add_scalar('valid_loss', avg_loss, self.step) + self.model_ddp.train() + dist.barrier() + + def random_crop(self, *imgs): + h, w = imgs[0].shape[-2:] + w = random.choice(range(w // 2, w)) + h = random.choice(range(w // 2, h)) + results = [] + for img in imgs: + B, T = img.shape[:2] + img = img.flatten(0, 1) + img = F.interpolate(img, (max(h, w), max(h, w)), mode='bilinear', align_corners=False) + img = center_crop(img, (h, w)) + img = img.reshape(B, T, *img.shape[1:]) + results.append(img) + return results + + def save(self): + if self.rank == 0: + os.makedirs(self.args.checkpoint_dir, exist_ok=True) + torch.save(self.model.state_dict(), os.path.join(self.args.checkpoint_dir, f'epoch-{self.epoch}.pth')) + self.log('Model saved') + dist.barrier() + + def cleanup(self): + dist.destroy_process_group() + + def log(self, msg): + print(f'[GPU{self.rank}] {msg}') + +if __name__ == '__main__': + world_size = torch.cuda.device_count() + mp.spawn( + Trainer, + nprocs=world_size, + args=(world_size,), + join=True) diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/train_config.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/train_config.py new file mode 100644 index 0000000..0792696 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/train_config.py @@ -0,0 +1,68 @@ +""" +Expected directory format: + +VideoMatte Train/Valid: + ├──fgr/ + ├── 0001/ + ├── 00000.jpg + ├── 00001.jpg + ├── pha/ + ├── 0001/ + ├── 00000.jpg + ├── 00001.jpg + +ImageMatte Train/Valid: + ├── fgr/ + ├── sample1.jpg + ├── sample2.jpg + ├── pha/ + ├── sample1.jpg + ├── sample2.jpg + +Background Image Train/Valid + ├── sample1.png + ├── sample2.png + +Background Video Train/Valid + ├── 0000/ + ├── 0000.jpg/ + ├── 0001.jpg/ + +""" + + +DATA_PATHS = { + + 'videomatte': { + 'train': '../matting-data/VideoMatte240K_JPEG_SD/train', + 'valid': '../matting-data/VideoMatte240K_JPEG_SD/valid', + }, + 'imagematte': { + 'train': '../matting-data/ImageMatte/train', + 'valid': '../matting-data/ImageMatte/valid', + }, + 'background_images': { + 'train': '../matting-data/Backgrounds/train', + 'valid': '../matting-data/Backgrounds/valid', + }, + 'background_videos': { + 'train': '../matting-data/BackgroundVideos/train', + 'valid': '../matting-data/BackgroundVideos/valid', + }, + + + 'coco_panoptic': { + 'imgdir': '../matting-data/coco/train2017/', + 'anndir': '../matting-data/coco/panoptic_train2017/', + 'annfile': '../matting-data/coco/annotations/panoptic_train2017.json', + }, + 'spd': { + 'imgdir': '../matting-data/SuperviselyPersonDataset/img', + 'segdir': '../matting-data/SuperviselyPersonDataset/seg', + }, + 'youtubevis': { + 'videodir': '../matting-data/YouTubeVIS/train/JPEGImages', + 'annfile': '../matting-data/YouTubeVIS/train/instances.json', + } + +} diff --git a/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/train_loss.py b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/train_loss.py new file mode 100644 index 0000000..01d8f1d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/RobustVideoMatting/train_loss.py @@ -0,0 +1,101 @@ +import torch +from torch.nn import functional as F + +# --------------------------------------------------------------------------------- Train Loss + + +def matting_loss(pred_fgr, pred_pha, true_fgr, true_pha): + """ + Args: + pred_fgr: Shape(B, T, 3, H, W) + pred_pha: Shape(B, T, 1, H, W) + true_fgr: Shape(B, T, 3, H, W) + true_pha: Shape(B, T, 1, H, W) + """ + loss = dict() + # Alpha losses + loss['pha_l1'] = F.l1_loss(pred_pha, true_pha) + loss['pha_laplacian'] = laplacian_loss(pred_pha.flatten(0, 1), true_pha.flatten(0, 1)) + loss['pha_coherence'] = F.mse_loss(pred_pha[:, 1:] - pred_pha[:, :-1], + true_pha[:, 1:] - true_pha[:, :-1]) * 5 + # Foreground losses + true_msk = true_pha.gt(0) + pred_fgr = pred_fgr * true_msk + true_fgr = true_fgr * true_msk + loss['fgr_l1'] = F.l1_loss(pred_fgr, true_fgr) + loss['fgr_coherence'] = F.mse_loss(pred_fgr[:, 1:] - pred_fgr[:, :-1], + true_fgr[:, 1:] - true_fgr[:, :-1]) * 5 + # Total + loss['total'] = loss['pha_l1'] + loss['pha_coherence'] + loss['pha_laplacian'] \ + + loss['fgr_l1'] + loss['fgr_coherence'] + return loss + +def segmentation_loss(pred_seg, true_seg): + """ + Args: + pred_seg: Shape(B, T, 1, H, W) + true_seg: Shape(B, T, 1, H, W) + """ + return F.binary_cross_entropy_with_logits(pred_seg, true_seg) + + +# ----------------------------------------------------------------------------- Laplacian Loss + + +def laplacian_loss(pred, true, max_levels=5): + kernel = gauss_kernel(device=pred.device, dtype=pred.dtype) + pred_pyramid = laplacian_pyramid(pred, kernel, max_levels) + true_pyramid = laplacian_pyramid(true, kernel, max_levels) + loss = 0 + for level in range(max_levels): + loss += (2 ** level) * F.l1_loss(pred_pyramid[level], true_pyramid[level]) + return loss / max_levels + +def laplacian_pyramid(img, kernel, max_levels): + current = img + pyramid = [] + for _ in range(max_levels): + current = crop_to_even_size(current) + down = downsample(current, kernel) + up = upsample(down, kernel) + diff = current - up + pyramid.append(diff) + current = down + return pyramid + +def gauss_kernel(device='cpu', dtype=torch.float32): + kernel = torch.tensor([[1, 4, 6, 4, 1], + [4, 16, 24, 16, 4], + [6, 24, 36, 24, 6], + [4, 16, 24, 16, 4], + [1, 4, 6, 4, 1]], device=device, dtype=dtype) + kernel /= 256 + kernel = kernel[None, None, :, :] + return kernel + +def gauss_convolution(img, kernel): + B, C, H, W = img.shape + img = img.reshape(B * C, 1, H, W) + img = F.pad(img, (2, 2, 2, 2), mode='reflect') + img = F.conv2d(img, kernel) + img = img.reshape(B, C, H, W) + return img + +def downsample(img, kernel): + img = gauss_convolution(img, kernel) + img = img[:, :, ::2, ::2] + return img + +def upsample(img, kernel): + B, C, H, W = img.shape + out = torch.zeros((B, C, H * 2, W * 2), device=img.device, dtype=img.dtype) + out[:, :, ::2, ::2] = img * 4 + out = gauss_convolution(out, kernel) + return out + +def crop_to_even_size(img): + H, W = img.shape[2:] + H = H - H % 2 + W = W - W % 2 + return img[:, :, :H, :W] + diff --git a/motion-gan-pipeline/preprocessing/third/densepose/Base-DensePose-RCNN-FPN.yaml b/motion-gan-pipeline/preprocessing/third/densepose/Base-DensePose-RCNN-FPN.yaml new file mode 100644 index 0000000..1579187 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/Base-DensePose-RCNN-FPN.yaml @@ -0,0 +1,48 @@ +VERSION: 2 +MODEL: + META_ARCHITECTURE: "GeneralizedRCNN" + BACKBONE: + NAME: "build_resnet_fpn_backbone" + RESNETS: + OUT_FEATURES: ["res2", "res3", "res4", "res5"] + FPN: + IN_FEATURES: ["res2", "res3", "res4", "res5"] + ANCHOR_GENERATOR: + SIZES: [[32], [64], [128], [256], [512]] # One size for each in feature map + ASPECT_RATIOS: [[0.5, 1.0, 2.0]] # Three aspect ratios (same for all in feature maps) + RPN: + IN_FEATURES: ["p2", "p3", "p4", "p5", "p6"] + PRE_NMS_TOPK_TRAIN: 2000 # Per FPN level + PRE_NMS_TOPK_TEST: 1000 # Per FPN level + # Detectron1 uses 2000 proposals per-batch, + # (See "modeling/rpn/rpn_outputs.py" for details of this legacy issue) + # which is approximately 1000 proposals per-image since the default batch size for FPN is 2. + POST_NMS_TOPK_TRAIN: 1000 + POST_NMS_TOPK_TEST: 1000 + + DENSEPOSE_ON: True + ROI_HEADS: + NAME: "DensePoseROIHeads" + IN_FEATURES: ["p2", "p3", "p4", "p5"] + NUM_CLASSES: 1 + ROI_BOX_HEAD: + NAME: "FastRCNNConvFCHead" + NUM_FC: 2 + POOLER_RESOLUTION: 7 + POOLER_SAMPLING_RATIO: 2 + POOLER_TYPE: "ROIAlign" + ROI_DENSEPOSE_HEAD: + NAME: "DensePoseV1ConvXHead" + POOLER_TYPE: "ROIAlign" + NUM_COARSE_SEGM_CHANNELS: 2 +DATASETS: + TRAIN: ("densepose_coco_2014_train", "densepose_coco_2014_valminusminival") + TEST: ("densepose_coco_2014_minival",) +SOLVER: + IMS_PER_BATCH: 16 + BASE_LR: 0.01 + STEPS: (60000, 80000) + MAX_ITER: 90000 + WARMUP_FACTOR: 0.1 +INPUT: + MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/__init__.py new file mode 100644 index 0000000..b50a3da --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/__init__.py @@ -0,0 +1,20 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .data.datasets import builtin # just to register data +from .converters import builtin as builtin_converters # register converters +from .config import ( + add_densepose_config, + add_densepose_head_config, + add_hrnet_config, + add_dataset_category_config, + add_bootstrap_config, + load_bootstrap_config, +) +from .structures import DensePoseDataRelative, DensePoseList, DensePoseTransformData +from .evaluation import DensePoseCOCOEvaluator +from .modeling.roi_heads import DensePoseROIHeads +from .modeling.test_time_augmentation import ( + DensePoseGeneralizedRCNNWithTTA, + DensePoseDatasetMapperTTA, +) +from .utils.transform import load_from_cfg +from .modeling.hrfpn import build_hrfpn_backbone diff --git a/motion-gan-pipeline/preprocessing/third/densepose/config.py b/motion-gan-pipeline/preprocessing/third/densepose/config.py new file mode 100644 index 0000000..2a06a09 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/config.py @@ -0,0 +1,277 @@ +# -*- coding = utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. +# pyre-ignore-all-errors + +from detectron2.config import CfgNode as CN + + +def add_dataset_category_config(cfg: CN) -> None: + """ + Add config for additional category-related dataset options + - category whitelisting + - category mapping + """ + _C = cfg + _C.DATASETS.CATEGORY_MAPS = CN(new_allowed=True) + _C.DATASETS.WHITELISTED_CATEGORIES = CN(new_allowed=True) + # class to mesh mapping + _C.DATASETS.CLASS_TO_MESH_NAME_MAPPING = CN(new_allowed=True) + + +def add_evaluation_config(cfg: CN) -> None: + _C = cfg + _C.DENSEPOSE_EVALUATION = CN() + # evaluator type, possible values: + # - "iou": evaluator for models that produce iou data + # - "cse": evaluator for models that produce cse data + _C.DENSEPOSE_EVALUATION.TYPE = "iou" + # storage for DensePose results, possible values: + # - "none": no explicit storage, all the results are stored in the + # dictionary with predictions, memory intensive; + # historically the default storage type + # - "ram": RAM storage, uses per-process RAM storage, which is + # reduced to a single process storage on later stages, + # less memory intensive + # - "file": file storage, uses per-process file-based storage, + # the least memory intensive, but may create bottlenecks + # on file system accesses + _C.DENSEPOSE_EVALUATION.STORAGE = "none" + # minimum threshold for IOU values: the lower its values is, + # the more matches are produced (and the higher the AP score) + _C.DENSEPOSE_EVALUATION.MIN_IOU_THRESHOLD = 0.5 + # Non-distributed inference is slower (at inference time) but can avoid RAM OOM + _C.DENSEPOSE_EVALUATION.DISTRIBUTED_INFERENCE = True + # evaluate mesh alignment based on vertex embeddings, only makes sense in CSE context + _C.DENSEPOSE_EVALUATION.EVALUATE_MESH_ALIGNMENT = False + # meshes to compute mesh alignment for + _C.DENSEPOSE_EVALUATION.MESH_ALIGNMENT_MESH_NAMES = [] + + +def add_bootstrap_config(cfg: CN) -> None: + """ """ + _C = cfg + _C.BOOTSTRAP_DATASETS = [] + _C.BOOTSTRAP_MODEL = CN() + _C.BOOTSTRAP_MODEL.WEIGHTS = "" + _C.BOOTSTRAP_MODEL.DEVICE = "cuda" + + +def get_bootstrap_dataset_config() -> CN: + _C = CN() + _C.DATASET = "" + # ratio used to mix data loaders + _C.RATIO = 0.1 + # image loader + _C.IMAGE_LOADER = CN(new_allowed=True) + _C.IMAGE_LOADER.TYPE = "" + _C.IMAGE_LOADER.BATCH_SIZE = 4 + _C.IMAGE_LOADER.NUM_WORKERS = 4 + _C.IMAGE_LOADER.CATEGORIES = [] + _C.IMAGE_LOADER.MAX_COUNT_PER_CATEGORY = 1_000_000 + _C.IMAGE_LOADER.CATEGORY_TO_CLASS_MAPPING = CN(new_allowed=True) + # inference + _C.INFERENCE = CN() + # batch size for model inputs + _C.INFERENCE.INPUT_BATCH_SIZE = 4 + # batch size to group model outputs + _C.INFERENCE.OUTPUT_BATCH_SIZE = 2 + # sampled data + _C.DATA_SAMPLER = CN(new_allowed=True) + _C.DATA_SAMPLER.TYPE = "" + _C.DATA_SAMPLER.USE_GROUND_TRUTH_CATEGORIES = False + # filter + _C.FILTER = CN(new_allowed=True) + _C.FILTER.TYPE = "" + return _C + + +def load_bootstrap_config(cfg: CN) -> None: + """ + Bootstrap datasets are given as a list of `dict` that are not automatically + converted into CfgNode. This method processes all bootstrap dataset entries + and ensures that they are in CfgNode format and comply with the specification + """ + if not cfg.BOOTSTRAP_DATASETS: + return + + bootstrap_datasets_cfgnodes = [] + for dataset_cfg in cfg.BOOTSTRAP_DATASETS: + _C = get_bootstrap_dataset_config().clone() + _C.merge_from_other_cfg(CN(dataset_cfg)) + bootstrap_datasets_cfgnodes.append(_C) + cfg.BOOTSTRAP_DATASETS = bootstrap_datasets_cfgnodes + + +def add_densepose_head_cse_config(cfg: CN) -> None: + """ + Add configuration options for Continuous Surface Embeddings (CSE) + """ + _C = cfg + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE = CN() + # Dimensionality D of the embedding space + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBED_SIZE = 16 + # Embedder specifications for various mesh IDs + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBEDDERS = CN(new_allowed=True) + # normalization coefficient for embedding distances + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBEDDING_DIST_GAUSS_SIGMA = 0.01 + # normalization coefficient for geodesic distances + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.GEODESIC_DIST_GAUSS_SIGMA = 0.01 + # embedding loss weight + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBED_LOSS_WEIGHT = 0.6 + # embedding loss name, currently the following options are supported: + # - EmbeddingLoss: cross-entropy on vertex labels + # - SoftEmbeddingLoss: cross-entropy on vertex label combined with + # Gaussian penalty on distance between vertices + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBED_LOSS_NAME = "EmbeddingLoss" + # optimizer hyperparameters + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.FEATURES_LR_FACTOR = 1.0 + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBEDDING_LR_FACTOR = 1.0 + # Shape to shape cycle consistency loss parameters: + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.SHAPE_TO_SHAPE_CYCLE_LOSS = CN({"ENABLED": False}) + # shape to shape cycle consistency loss weight + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.SHAPE_TO_SHAPE_CYCLE_LOSS.WEIGHT = 0.025 + # norm type used for loss computation + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.SHAPE_TO_SHAPE_CYCLE_LOSS.NORM_P = 2 + # normalization term for embedding similarity matrices + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.SHAPE_TO_SHAPE_CYCLE_LOSS.TEMPERATURE = 0.05 + # maximum number of vertices to include into shape to shape cycle loss + # if negative or zero, all vertices are considered + # if positive, random subset of vertices of given size is considered + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.SHAPE_TO_SHAPE_CYCLE_LOSS.MAX_NUM_VERTICES = 4936 + # Pixel to shape cycle consistency loss parameters: + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS = CN({"ENABLED": False}) + # pixel to shape cycle consistency loss weight + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.WEIGHT = 0.0001 + # norm type used for loss computation + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.NORM_P = 2 + # map images to all meshes and back (if false, use only gt meshes from the batch) + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.USE_ALL_MESHES_NOT_GT_ONLY = False + # Randomly select at most this number of pixels from every instance + # if negative or zero, all vertices are considered + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.NUM_PIXELS_TO_SAMPLE = 100 + # normalization factor for pixel to pixel distances (higher value = smoother distribution) + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.PIXEL_SIGMA = 5.0 + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.TEMPERATURE_PIXEL_TO_VERTEX = 0.05 + _C.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.TEMPERATURE_VERTEX_TO_PIXEL = 0.05 + + +def add_densepose_head_config(cfg: CN) -> None: + """ + Add config for densepose head. + """ + _C = cfg + + _C.MODEL.DENSEPOSE_ON = True + + _C.MODEL.ROI_DENSEPOSE_HEAD = CN() + _C.MODEL.ROI_DENSEPOSE_HEAD.NAME = "" + _C.MODEL.ROI_DENSEPOSE_HEAD.NUM_STACKED_CONVS = 8 + # Number of parts used for point labels + _C.MODEL.ROI_DENSEPOSE_HEAD.NUM_PATCHES = 24 + _C.MODEL.ROI_DENSEPOSE_HEAD.DECONV_KERNEL = 4 + _C.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_DIM = 512 + _C.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_KERNEL = 3 + _C.MODEL.ROI_DENSEPOSE_HEAD.UP_SCALE = 2 + _C.MODEL.ROI_DENSEPOSE_HEAD.HEATMAP_SIZE = 112 + _C.MODEL.ROI_DENSEPOSE_HEAD.POOLER_TYPE = "ROIAlignV2" + _C.MODEL.ROI_DENSEPOSE_HEAD.POOLER_RESOLUTION = 28 + _C.MODEL.ROI_DENSEPOSE_HEAD.POOLER_SAMPLING_RATIO = 2 + _C.MODEL.ROI_DENSEPOSE_HEAD.NUM_COARSE_SEGM_CHANNELS = 2 # 15 or 2 + # Overlap threshold for an RoI to be considered foreground (if >= FG_IOU_THRESHOLD) + _C.MODEL.ROI_DENSEPOSE_HEAD.FG_IOU_THRESHOLD = 0.7 + # Loss weights for annotation masks.(14 Parts) + _C.MODEL.ROI_DENSEPOSE_HEAD.INDEX_WEIGHTS = 5.0 + # Loss weights for surface parts. (24 Parts) + _C.MODEL.ROI_DENSEPOSE_HEAD.PART_WEIGHTS = 1.0 + # Loss weights for UV regression. + _C.MODEL.ROI_DENSEPOSE_HEAD.POINT_REGRESSION_WEIGHTS = 0.01 + # Coarse segmentation is trained using instance segmentation task data + _C.MODEL.ROI_DENSEPOSE_HEAD.COARSE_SEGM_TRAINED_BY_MASKS = False + # For Decoder + _C.MODEL.ROI_DENSEPOSE_HEAD.DECODER_ON = True + _C.MODEL.ROI_DENSEPOSE_HEAD.DECODER_NUM_CLASSES = 256 + _C.MODEL.ROI_DENSEPOSE_HEAD.DECODER_CONV_DIMS = 256 + _C.MODEL.ROI_DENSEPOSE_HEAD.DECODER_NORM = "" + _C.MODEL.ROI_DENSEPOSE_HEAD.DECODER_COMMON_STRIDE = 4 + # For DeepLab head + _C.MODEL.ROI_DENSEPOSE_HEAD.DEEPLAB = CN() + _C.MODEL.ROI_DENSEPOSE_HEAD.DEEPLAB.NORM = "GN" + _C.MODEL.ROI_DENSEPOSE_HEAD.DEEPLAB.NONLOCAL_ON = 0 + # Predictor class name, must be registered in DENSEPOSE_PREDICTOR_REGISTRY + # Some registered predictors: + # "DensePoseChartPredictor": predicts segmentation and UV coordinates for predefined charts + # "DensePoseChartWithConfidencePredictor": predicts segmentation, UV coordinates + # and associated confidences for predefined charts (default) + # "DensePoseEmbeddingWithConfidencePredictor": predicts segmentation, embeddings + # and associated confidences for CSE + _C.MODEL.ROI_DENSEPOSE_HEAD.PREDICTOR_NAME = "DensePoseChartWithConfidencePredictor" + # Loss class name, must be registered in DENSEPOSE_LOSS_REGISTRY + # Some registered losses: + # "DensePoseChartLoss": loss for chart-based models that estimate + # segmentation and UV coordinates + # "DensePoseChartWithConfidenceLoss": loss for chart-based models that estimate + # segmentation, UV coordinates and the corresponding confidences (default) + _C.MODEL.ROI_DENSEPOSE_HEAD.LOSS_NAME = "DensePoseChartWithConfidenceLoss" + # Confidences + # Enable learning UV confidences (variances) along with the actual values + _C.MODEL.ROI_DENSEPOSE_HEAD.UV_CONFIDENCE = CN({"ENABLED": False}) + # UV confidence lower bound + _C.MODEL.ROI_DENSEPOSE_HEAD.UV_CONFIDENCE.EPSILON = 0.01 + # Enable learning segmentation confidences (variances) along with the actual values + _C.MODEL.ROI_DENSEPOSE_HEAD.SEGM_CONFIDENCE = CN({"ENABLED": False}) + # Segmentation confidence lower bound + _C.MODEL.ROI_DENSEPOSE_HEAD.SEGM_CONFIDENCE.EPSILON = 0.01 + # Statistical model type for confidence learning, possible values: + # - "iid_iso": statistically independent identically distributed residuals + # with isotropic covariance + # - "indep_aniso": statistically independent residuals with anisotropic + # covariances + _C.MODEL.ROI_DENSEPOSE_HEAD.UV_CONFIDENCE.TYPE = "iid_iso" + # List of angles for rotation in data augmentation during training + _C.INPUT.ROTATION_ANGLES = [0] + _C.TEST.AUG.ROTATION_ANGLES = () # Rotation TTA + + add_densepose_head_cse_config(cfg) + + +def add_hrnet_config(cfg: CN) -> None: + """ + Add config for HRNet backbone. + """ + _C = cfg + + # For HigherHRNet w32 + _C.MODEL.HRNET = CN() + _C.MODEL.HRNET.STEM_INPLANES = 64 + _C.MODEL.HRNET.STAGE2 = CN() + _C.MODEL.HRNET.STAGE2.NUM_MODULES = 1 + _C.MODEL.HRNET.STAGE2.NUM_BRANCHES = 2 + _C.MODEL.HRNET.STAGE2.BLOCK = "BASIC" + _C.MODEL.HRNET.STAGE2.NUM_BLOCKS = [4, 4] + _C.MODEL.HRNET.STAGE2.NUM_CHANNELS = [32, 64] + _C.MODEL.HRNET.STAGE2.FUSE_METHOD = "SUM" + _C.MODEL.HRNET.STAGE3 = CN() + _C.MODEL.HRNET.STAGE3.NUM_MODULES = 4 + _C.MODEL.HRNET.STAGE3.NUM_BRANCHES = 3 + _C.MODEL.HRNET.STAGE3.BLOCK = "BASIC" + _C.MODEL.HRNET.STAGE3.NUM_BLOCKS = [4, 4, 4] + _C.MODEL.HRNET.STAGE3.NUM_CHANNELS = [32, 64, 128] + _C.MODEL.HRNET.STAGE3.FUSE_METHOD = "SUM" + _C.MODEL.HRNET.STAGE4 = CN() + _C.MODEL.HRNET.STAGE4.NUM_MODULES = 3 + _C.MODEL.HRNET.STAGE4.NUM_BRANCHES = 4 + _C.MODEL.HRNET.STAGE4.BLOCK = "BASIC" + _C.MODEL.HRNET.STAGE4.NUM_BLOCKS = [4, 4, 4, 4] + _C.MODEL.HRNET.STAGE4.NUM_CHANNELS = [32, 64, 128, 256] + _C.MODEL.HRNET.STAGE4.FUSE_METHOD = "SUM" + + _C.MODEL.HRNET.HRFPN = CN() + _C.MODEL.HRNET.HRFPN.OUT_CHANNELS = 256 + + +def add_densepose_config(cfg: CN) -> None: + add_densepose_head_config(cfg) + add_hrnet_config(cfg) + add_bootstrap_config(cfg) + add_dataset_category_config(cfg) + add_evaluation_config(cfg) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/converters/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/converters/__init__.py new file mode 100644 index 0000000..930339e --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/converters/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .hflip import HFlipConverter +from .to_mask import ToMaskConverter +from .to_chart_result import ToChartResultConverter, ToChartResultConverterWithConfidences +from .segm_to_mask import ( + predictor_output_with_fine_and_coarse_segm_to_mask, + predictor_output_with_coarse_segm_to_mask, + resample_fine_and_coarse_segm_to_bbox, +) +from .chart_output_to_chart_result import ( + densepose_chart_predictor_output_to_result, + densepose_chart_predictor_output_to_result_with_confidences, +) +from .chart_output_hflip import densepose_chart_predictor_output_hflip diff --git a/motion-gan-pipeline/preprocessing/third/densepose/converters/base.py b/motion-gan-pipeline/preprocessing/third/densepose/converters/base.py new file mode 100644 index 0000000..c9dbe56 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/converters/base.py @@ -0,0 +1,93 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Any, Tuple, Type +import torch + + +class BaseConverter: + """ + Converter base class to be reused by various converters. + Converter allows one to convert data from various source types to a particular + destination type. Each source type needs to register its converter. The + registration for each source type is valid for all descendants of that type. + """ + + @classmethod + def register(cls, from_type: Type, converter: Any = None): + """ + Registers a converter for the specified type. + Can be used as a decorator (if converter is None), or called as a method. + + Args: + from_type (type): type to register the converter for; + all instances of this type will use the same converter + converter (callable): converter to be registered for the given + type; if None, this method is assumed to be a decorator for the converter + """ + + if converter is not None: + cls._do_register(from_type, converter) + + def wrapper(converter: Any) -> Any: + cls._do_register(from_type, converter) + return converter + + return wrapper + + @classmethod + def _do_register(cls, from_type: Type, converter: Any): + cls.registry[from_type] = converter # pyre-ignore[16] + + @classmethod + def _lookup_converter(cls, from_type: Type) -> Any: + """ + Perform recursive lookup for the given type + to find registered converter. If a converter was found for some base + class, it gets registered for this class to save on further lookups. + + Args: + from_type: type for which to find a converter + Return: + callable or None - registered converter or None + if no suitable entry was found in the registry + """ + if from_type in cls.registry: # pyre-ignore[16] + return cls.registry[from_type] + for base in from_type.__bases__: + converter = cls._lookup_converter(base) + if converter is not None: + cls._do_register(from_type, converter) + return converter + return None + + @classmethod + def convert(cls, instance: Any, *args, **kwargs): + """ + Convert an instance to the destination type using some registered + converter. Does recursive lookup for base classes, so there's no need + for explicit registration for derived classes. + + Args: + instance: source instance to convert to the destination type + Return: + An instance of the destination type obtained from the source instance + Raises KeyError, if no suitable converter found + """ + instance_type = type(instance) + converter = cls._lookup_converter(instance_type) + if converter is None: + if cls.dst_type is None: # pyre-ignore[16] + output_type_str = "itself" + else: + output_type_str = cls.dst_type + raise KeyError(f"Could not find converter from {instance_type} to {output_type_str}") + return converter(instance, *args, **kwargs) + + +IntTupleBox = Tuple[int, int, int, int] + + +def make_int_box(box: torch.Tensor) -> IntTupleBox: + int_box = [0, 0, 0, 0] + int_box[0], int_box[1], int_box[2], int_box[3] = tuple(box.long().tolist()) + return int_box[0], int_box[1], int_box[2], int_box[3] diff --git a/motion-gan-pipeline/preprocessing/third/densepose/converters/builtin.py b/motion-gan-pipeline/preprocessing/third/densepose/converters/builtin.py new file mode 100644 index 0000000..3bd48f8 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/converters/builtin.py @@ -0,0 +1,31 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from ..structures import DensePoseChartPredictorOutput, DensePoseEmbeddingPredictorOutput +from . import ( + HFlipConverter, + ToChartResultConverter, + ToChartResultConverterWithConfidences, + ToMaskConverter, + densepose_chart_predictor_output_hflip, + densepose_chart_predictor_output_to_result, + densepose_chart_predictor_output_to_result_with_confidences, + predictor_output_with_coarse_segm_to_mask, + predictor_output_with_fine_and_coarse_segm_to_mask, +) + +ToMaskConverter.register( + DensePoseChartPredictorOutput, predictor_output_with_fine_and_coarse_segm_to_mask +) +ToMaskConverter.register( + DensePoseEmbeddingPredictorOutput, predictor_output_with_coarse_segm_to_mask +) + +ToChartResultConverter.register( + DensePoseChartPredictorOutput, densepose_chart_predictor_output_to_result +) + +ToChartResultConverterWithConfidences.register( + DensePoseChartPredictorOutput, densepose_chart_predictor_output_to_result_with_confidences +) + +HFlipConverter.register(DensePoseChartPredictorOutput, densepose_chart_predictor_output_hflip) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/converters/chart_output_hflip.py b/motion-gan-pipeline/preprocessing/third/densepose/converters/chart_output_hflip.py new file mode 100644 index 0000000..17d2948 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/converters/chart_output_hflip.py @@ -0,0 +1,71 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from dataclasses import fields +import torch + +from densepose.structures import DensePoseChartPredictorOutput, DensePoseTransformData + + +def densepose_chart_predictor_output_hflip( + densepose_predictor_output: DensePoseChartPredictorOutput, + transform_data: DensePoseTransformData, +) -> DensePoseChartPredictorOutput: + """ + Change to take into account a Horizontal flip. + """ + if len(densepose_predictor_output) > 0: + + PredictorOutput = type(densepose_predictor_output) + output_dict = {} + + for field in fields(densepose_predictor_output): + field_value = getattr(densepose_predictor_output, field.name) + # flip tensors + if isinstance(field_value, torch.Tensor): + setattr(densepose_predictor_output, field.name, torch.flip(field_value, [3])) + + densepose_predictor_output = _flip_iuv_semantics_tensor( + densepose_predictor_output, transform_data + ) + densepose_predictor_output = _flip_segm_semantics_tensor( + densepose_predictor_output, transform_data + ) + + for field in fields(densepose_predictor_output): + output_dict[field.name] = getattr(densepose_predictor_output, field.name) + + return PredictorOutput(**output_dict) + else: + return densepose_predictor_output + + +def _flip_iuv_semantics_tensor( + densepose_predictor_output: DensePoseChartPredictorOutput, + dp_transform_data: DensePoseTransformData, +) -> DensePoseChartPredictorOutput: + point_label_symmetries = dp_transform_data.point_label_symmetries + uv_symmetries = dp_transform_data.uv_symmetries + + N, C, H, W = densepose_predictor_output.u.shape + u_loc = (densepose_predictor_output.u[:, 1:, :, :].clamp(0, 1) * 255).long() + v_loc = (densepose_predictor_output.v[:, 1:, :, :].clamp(0, 1) * 255).long() + Iindex = torch.arange(C - 1, device=densepose_predictor_output.u.device)[ + None, :, None, None + ].expand(N, C - 1, H, W) + densepose_predictor_output.u[:, 1:, :, :] = uv_symmetries["U_transforms"][Iindex, v_loc, u_loc] + densepose_predictor_output.v[:, 1:, :, :] = uv_symmetries["V_transforms"][Iindex, v_loc, u_loc] + + for el in ["fine_segm", "u", "v"]: + densepose_predictor_output.__dict__[el] = densepose_predictor_output.__dict__[el][ + :, point_label_symmetries, :, : + ] + return densepose_predictor_output + + +def _flip_segm_semantics_tensor( + densepose_predictor_output: DensePoseChartPredictorOutput, dp_transform_data +): + if densepose_predictor_output.coarse_segm.shape[1] > 2: + densepose_predictor_output.coarse_segm = densepose_predictor_output.coarse_segm[ + :, dp_transform_data.mask_label_symmetries, :, : + ] + return densepose_predictor_output diff --git a/motion-gan-pipeline/preprocessing/third/densepose/converters/chart_output_to_chart_result.py b/motion-gan-pipeline/preprocessing/third/densepose/converters/chart_output_to_chart_result.py new file mode 100644 index 0000000..b589519 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/converters/chart_output_to_chart_result.py @@ -0,0 +1,189 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Dict +import torch +from torch.nn import functional as F + +from detectron2.structures.boxes import Boxes, BoxMode + +from ..structures import ( + DensePoseChartPredictorOutput, + DensePoseChartResult, + DensePoseChartResultWithConfidences, +) +from . import resample_fine_and_coarse_segm_to_bbox +from .base import IntTupleBox, make_int_box + + +def resample_uv_tensors_to_bbox( + u: torch.Tensor, + v: torch.Tensor, + labels: torch.Tensor, + box_xywh_abs: IntTupleBox, +) -> torch.Tensor: + """ + Resamples U and V coordinate estimates for the given bounding box + + Args: + u (tensor [1, C, H, W] of float): U coordinates + v (tensor [1, C, H, W] of float): V coordinates + labels (tensor [H, W] of long): labels obtained by resampling segmentation + outputs for the given bounding box + box_xywh_abs (tuple of 4 int): bounding box that corresponds to predictor outputs + Return: + Resampled U and V coordinates - a tensor [2, H, W] of float + """ + x, y, w, h = box_xywh_abs + w = max(int(w), 1) + h = max(int(h), 1) + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got `Tuple[int, int]`. + u_bbox = F.interpolate(u, (h, w), mode="bilinear", align_corners=False) + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got `Tuple[int, int]`. + v_bbox = F.interpolate(v, (h, w), mode="bilinear", align_corners=False) + uv = torch.zeros([2, h, w], dtype=torch.float32, device=u.device) + for part_id in range(1, u_bbox.size(1)): + uv[0][labels == part_id] = u_bbox[0, part_id][labels == part_id] + uv[1][labels == part_id] = v_bbox[0, part_id][labels == part_id] + return uv + + +def resample_uv_to_bbox( + predictor_output: DensePoseChartPredictorOutput, + labels: torch.Tensor, + box_xywh_abs: IntTupleBox, +) -> torch.Tensor: + """ + Resamples U and V coordinate estimates for the given bounding box + + Args: + predictor_output (DensePoseChartPredictorOutput): DensePose predictor + output to be resampled + labels (tensor [H, W] of long): labels obtained by resampling segmentation + outputs for the given bounding box + box_xywh_abs (tuple of 4 int): bounding box that corresponds to predictor outputs + Return: + Resampled U and V coordinates - a tensor [2, H, W] of float + """ + return resample_uv_tensors_to_bbox( + predictor_output.u, + predictor_output.v, + labels, + box_xywh_abs, + ) + + +def densepose_chart_predictor_output_to_result( + predictor_output: DensePoseChartPredictorOutput, boxes: Boxes +) -> DensePoseChartResult: + """ + Convert densepose chart predictor outputs to results + + Args: + predictor_output (DensePoseChartPredictorOutput): DensePose predictor + output to be converted to results, must contain only 1 output + boxes (Boxes): bounding box that corresponds to the predictor output, + must contain only 1 bounding box + Return: + DensePose chart-based result (DensePoseChartResult) + """ + assert len(predictor_output) == 1 and len(boxes) == 1, ( + f"Predictor output to result conversion can operate only single outputs" + f", got {len(predictor_output)} predictor outputs and {len(boxes)} boxes" + ) + + boxes_xyxy_abs = boxes.tensor.clone() + boxes_xywh_abs = BoxMode.convert(boxes_xyxy_abs, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS) + box_xywh = make_int_box(boxes_xywh_abs[0]) + + labels = resample_fine_and_coarse_segm_to_bbox(predictor_output, box_xywh).squeeze(0) + uv = resample_uv_to_bbox(predictor_output, labels, box_xywh) + return DensePoseChartResult(labels=labels, uv=uv) + + +def resample_confidences_to_bbox( + predictor_output: DensePoseChartPredictorOutput, + labels: torch.Tensor, + box_xywh_abs: IntTupleBox, +) -> Dict[str, torch.Tensor]: + """ + Resamples confidences for the given bounding box + + Args: + predictor_output (DensePoseChartPredictorOutput): DensePose predictor + output to be resampled + labels (tensor [H, W] of long): labels obtained by resampling segmentation + outputs for the given bounding box + box_xywh_abs (tuple of 4 int): bounding box that corresponds to predictor outputs + Return: + Resampled confidences - a dict of [H, W] tensors of float + """ + + x, y, w, h = box_xywh_abs + w = max(int(w), 1) + h = max(int(h), 1) + + confidence_names = [ + "sigma_1", + "sigma_2", + "kappa_u", + "kappa_v", + "fine_segm_confidence", + "coarse_segm_confidence", + ] + confidence_results = {key: None for key in confidence_names} + confidence_names = [ + key for key in confidence_names if getattr(predictor_output, key) is not None + ] + confidence_base = torch.zeros([h, w], dtype=torch.float32, device=predictor_output.u.device) + + # assign data from channels that correspond to the labels + for key in confidence_names: + resampled_confidence = F.interpolate( + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got + # `Tuple[int, int]`. + getattr(predictor_output, key), (h, w), mode="bilinear", align_corners=False + ) + result = confidence_base.clone() + for part_id in range(1, predictor_output.u.size(1)): + if resampled_confidence.size(1) != predictor_output.u.size(1): + # confidence is not part-based, don't try to fill it part by part + continue + result[labels == part_id] = resampled_confidence[0, part_id][labels == part_id] + + if resampled_confidence.size(1) != predictor_output.u.size(1): + # confidence is not part-based, fill the data with the first channel + # (targeted for segmentation confidences that have only 1 channel) + result = resampled_confidence[0, 0] + + confidence_results[key] = result + + return confidence_results # pyre-ignore[7] + + +def densepose_chart_predictor_output_to_result_with_confidences( + predictor_output: DensePoseChartPredictorOutput, boxes: Boxes +) -> DensePoseChartResultWithConfidences: + """ + Convert densepose chart predictor outputs to results + + Args: + predictor_output (DensePoseChartPredictorOutput): DensePose predictor + output with confidences to be converted to results, must contain only 1 output + boxes (Boxes): bounding box that corresponds to the predictor output, + must contain only 1 bounding box + Return: + DensePose chart-based result with confidences (DensePoseChartResultWithConfidences) + """ + assert len(predictor_output) == 1 and len(boxes) == 1, ( + f"Predictor output to result conversion can operate only single outputs" + f", got {len(predictor_output)} predictor outputs and {len(boxes)} boxes" + ) + + boxes_xyxy_abs = boxes.tensor.clone() + boxes_xywh_abs = BoxMode.convert(boxes_xyxy_abs, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS) + box_xywh = make_int_box(boxes_xywh_abs[0]) + + labels = resample_fine_and_coarse_segm_to_bbox(predictor_output, box_xywh).squeeze(0) + uv = resample_uv_to_bbox(predictor_output, labels, box_xywh) + confidences = resample_confidences_to_bbox(predictor_output, labels, box_xywh) + return DensePoseChartResultWithConfidences(labels=labels, uv=uv, **confidences) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/converters/hflip.py b/motion-gan-pipeline/preprocessing/third/densepose/converters/hflip.py new file mode 100644 index 0000000..092c50c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/converters/hflip.py @@ -0,0 +1,32 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Any + +from .base import BaseConverter + + +class HFlipConverter(BaseConverter): + """ + Converts various DensePose predictor outputs to DensePose results. + Each DensePose predictor output type has to register its convertion strategy. + """ + + registry = {} + dst_type = None + + @classmethod + def convert(cls, predictor_outputs: Any, transform_data: Any, *args, **kwargs): + """ + Performs an horizontal flip on DensePose predictor outputs. + Does recursive lookup for base classes, so there's no need + for explicit registration for derived classes. + + Args: + predictor_outputs: DensePose predictor output to be converted to BitMasks + transform_data: Anything useful for the flip + Return: + An instance of the same type as predictor_outputs + """ + return super(HFlipConverter, cls).convert( + predictor_outputs, transform_data, *args, **kwargs + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/converters/segm_to_mask.py b/motion-gan-pipeline/preprocessing/third/densepose/converters/segm_to_mask.py new file mode 100644 index 0000000..e667b7f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/converters/segm_to_mask.py @@ -0,0 +1,154 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Any +import torch +from torch.nn import functional as F + +from detectron2.structures import BitMasks, Boxes, BoxMode + +from .base import IntTupleBox, make_int_box +from .to_mask import ImageSizeType + + +def resample_coarse_segm_tensor_to_bbox(coarse_segm: torch.Tensor, box_xywh_abs: IntTupleBox): + """ + Resample coarse segmentation tensor to the given + bounding box and derive labels for each pixel of the bounding box + + Args: + coarse_segm: float tensor of shape [1, K, Hout, Wout] + box_xywh_abs (tuple of 4 int): bounding box given by its upper-left + corner coordinates, width (W) and height (H) + Return: + Labels for each pixel of the bounding box, a long tensor of size [1, H, W] + """ + x, y, w, h = box_xywh_abs + w = max(int(w), 1) + h = max(int(h), 1) + # pyre-fixme[16]: `Tensor` has no attribute `argmax`. + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got `Tuple[int, int]`. + labels = F.interpolate(coarse_segm, (h, w), mode="bilinear", align_corners=False).argmax(dim=1) + return labels + + +def resample_fine_and_coarse_segm_tensors_to_bbox( + fine_segm: torch.Tensor, coarse_segm: torch.Tensor, box_xywh_abs: IntTupleBox +): + """ + Resample fine and coarse segmentation tensors to the given + bounding box and derive labels for each pixel of the bounding box + + Args: + fine_segm: float tensor of shape [1, C, Hout, Wout] + coarse_segm: float tensor of shape [1, K, Hout, Wout] + box_xywh_abs (tuple of 4 int): bounding box given by its upper-left + corner coordinates, width (W) and height (H) + Return: + Labels for each pixel of the bounding box, a long tensor of size [1, H, W] + """ + x, y, w, h = box_xywh_abs + w = max(int(w), 1) + h = max(int(h), 1) + # coarse segmentation + # pyre-fixme[16]: `Tensor` has no attribute `argmax`. + coarse_segm_bbox = F.interpolate( + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got `Tuple[int, + # int]`. + coarse_segm, (h, w), mode="bilinear", align_corners=False + ).argmax(dim=1) + # combined coarse and fine segmentation + labels = ( + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got `Tuple[int, + # int]`. + F.interpolate(fine_segm, (h, w), mode="bilinear", align_corners=False).argmax(dim=1) + * (coarse_segm_bbox > 0).long() + ) + return labels + + +def resample_fine_and_coarse_segm_to_bbox(predictor_output: Any, box_xywh_abs: IntTupleBox): + """ + Resample fine and coarse segmentation outputs from a predictor to the given + bounding box and derive labels for each pixel of the bounding box + + Args: + predictor_output: DensePose predictor output that contains segmentation + results to be resampled + box_xywh_abs (tuple of 4 int): bounding box given by its upper-left + corner coordinates, width (W) and height (H) + Return: + Labels for each pixel of the bounding box, a long tensor of size [1, H, W] + """ + return resample_fine_and_coarse_segm_tensors_to_bbox( + predictor_output.fine_segm, + predictor_output.coarse_segm, + box_xywh_abs, + ) + + +def predictor_output_with_coarse_segm_to_mask( + predictor_output: Any, boxes: Boxes, image_size_hw: ImageSizeType +) -> BitMasks: + """ + Convert predictor output with coarse and fine segmentation to a mask. + Assumes that predictor output has the following attributes: + - coarse_segm (tensor of size [N, D, H, W]): coarse segmentation + unnormalized scores for N instances; D is the number of coarse + segmentation labels, H and W is the resolution of the estimate + + Args: + predictor_output: DensePose predictor output to be converted to mask + boxes (Boxes): bounding boxes that correspond to the DensePose + predictor outputs + image_size_hw (tuple [int, int]): image height Himg and width Wimg + Return: + BitMasks that contain a bool tensor of size [N, Himg, Wimg] with + a mask of the size of the image for each instance + """ + H, W = image_size_hw + boxes_xyxy_abs = boxes.tensor.clone() + boxes_xywh_abs = BoxMode.convert(boxes_xyxy_abs, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS) + N = len(boxes_xywh_abs) + masks = torch.zeros((N, H, W), dtype=torch.bool, device=boxes.tensor.device) + for i in range(len(boxes_xywh_abs)): + box_xywh = make_int_box(boxes_xywh_abs[i]) + box_mask = resample_coarse_segm_tensor_to_bbox(predictor_output[i].coarse_segm, box_xywh) + x, y, w, h = box_xywh + masks[i, y : y + h, x : x + w] = box_mask + + return BitMasks(masks) + + +def predictor_output_with_fine_and_coarse_segm_to_mask( + predictor_output: Any, boxes: Boxes, image_size_hw: ImageSizeType +) -> BitMasks: + """ + Convert predictor output with coarse and fine segmentation to a mask. + Assumes that predictor output has the following attributes: + - coarse_segm (tensor of size [N, D, H, W]): coarse segmentation + unnormalized scores for N instances; D is the number of coarse + segmentation labels, H and W is the resolution of the estimate + - fine_segm (tensor of size [N, C, H, W]): fine segmentation + unnormalized scores for N instances; C is the number of fine + segmentation labels, H and W is the resolution of the estimate + + Args: + predictor_output: DensePose predictor output to be converted to mask + boxes (Boxes): bounding boxes that correspond to the DensePose + predictor outputs + image_size_hw (tuple [int, int]): image height Himg and width Wimg + Return: + BitMasks that contain a bool tensor of size [N, Himg, Wimg] with + a mask of the size of the image for each instance + """ + H, W = image_size_hw + boxes_xyxy_abs = boxes.tensor.clone() + boxes_xywh_abs = BoxMode.convert(boxes_xyxy_abs, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS) + N = len(boxes_xywh_abs) + masks = torch.zeros((N, H, W), dtype=torch.bool, device=boxes.tensor.device) + for i in range(len(boxes_xywh_abs)): + box_xywh = make_int_box(boxes_xywh_abs[i]) + labels_i = resample_fine_and_coarse_segm_to_bbox(predictor_output[i], box_xywh) + x, y, w, h = box_xywh + masks[i, y : y + h, x : x + w] = labels_i > 0 + return BitMasks(masks) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/converters/to_chart_result.py b/motion-gan-pipeline/preprocessing/third/densepose/converters/to_chart_result.py new file mode 100644 index 0000000..f96da3c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/converters/to_chart_result.py @@ -0,0 +1,66 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Any + +from detectron2.structures import Boxes + +from ..structures import DensePoseChartResult, DensePoseChartResultWithConfidences +from .base import BaseConverter + + +class ToChartResultConverter(BaseConverter): + """ + Converts various DensePose predictor outputs to DensePose results. + Each DensePose predictor output type has to register its convertion strategy. + """ + + registry = {} + dst_type = DensePoseChartResult + + @classmethod + def convert(cls, predictor_outputs: Any, boxes: Boxes, *args, **kwargs) -> DensePoseChartResult: + """ + Convert DensePose predictor outputs to DensePoseResult using some registered + converter. Does recursive lookup for base classes, so there's no need + for explicit registration for derived classes. + + Args: + densepose_predictor_outputs: DensePose predictor output to be + converted to BitMasks + boxes (Boxes): bounding boxes that correspond to the DensePose + predictor outputs + Return: + An instance of DensePoseResult. If no suitable converter was found, raises KeyError + """ + return super(ToChartResultConverter, cls).convert(predictor_outputs, boxes, *args, **kwargs) + + +class ToChartResultConverterWithConfidences(BaseConverter): + """ + Converts various DensePose predictor outputs to DensePose results. + Each DensePose predictor output type has to register its convertion strategy. + """ + + registry = {} + dst_type = DensePoseChartResultWithConfidences + + @classmethod + def convert( + cls, predictor_outputs: Any, boxes: Boxes, *args, **kwargs + ) -> DensePoseChartResultWithConfidences: + """ + Convert DensePose predictor outputs to DensePoseResult with confidences + using some registered converter. Does recursive lookup for base classes, + so there's no need for explicit registration for derived classes. + + Args: + densepose_predictor_outputs: DensePose predictor output with confidences + to be converted to BitMasks + boxes (Boxes): bounding boxes that correspond to the DensePose + predictor outputs + Return: + An instance of DensePoseResult. If no suitable converter was found, raises KeyError + """ + return super(ToChartResultConverterWithConfidences, cls).convert( + predictor_outputs, boxes, *args, **kwargs + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/converters/to_mask.py b/motion-gan-pipeline/preprocessing/third/densepose/converters/to_mask.py new file mode 100644 index 0000000..9d8152b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/converters/to_mask.py @@ -0,0 +1,47 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Any, Tuple + +from detectron2.structures import BitMasks, Boxes + +from .base import BaseConverter + +ImageSizeType = Tuple[int, int] + + +class ToMaskConverter(BaseConverter): + """ + Converts various DensePose predictor outputs to masks + in bit mask format (see `BitMasks`). Each DensePose predictor output type + has to register its convertion strategy. + """ + + registry = {} + dst_type = BitMasks + + @classmethod + def convert( + cls, + densepose_predictor_outputs: Any, + boxes: Boxes, + image_size_hw: ImageSizeType, + *args, + **kwargs + ) -> BitMasks: + """ + Convert DensePose predictor outputs to BitMasks using some registered + converter. Does recursive lookup for base classes, so there's no need + for explicit registration for derived classes. + + Args: + densepose_predictor_outputs: DensePose predictor output to be + converted to BitMasks + boxes (Boxes): bounding boxes that correspond to the DensePose + predictor outputs + image_size_hw (tuple [int, int]): image height and width + Return: + An instance of `BitMasks`. If no suitable converter was found, raises KeyError + """ + return super(ToMaskConverter, cls).convert( + densepose_predictor_outputs, boxes, image_size_hw, *args, **kwargs + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/data/__init__.py new file mode 100644 index 0000000..bf21ba7 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/__init__.py @@ -0,0 +1,25 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .meshes import builtin +from .build import ( + build_detection_test_loader, + build_detection_train_loader, + build_combined_loader, + build_frame_selector, + build_inference_based_loaders, + has_inference_based_loaders, + BootstrapDatasetFactoryCatalog, +) +from .combined_loader import CombinedDataLoader +from .dataset_mapper import DatasetMapper +from .inference_based_loader import InferenceBasedLoader, ScoreBasedFilter +from .image_list_dataset import ImageListDataset +from .utils import is_relative_local_path, maybe_prepend_base_path + +# ensure the builtin datasets are registered +from . import datasets + +# ensure the bootstrap datasets builders are registered +from . import build + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/build.py b/motion-gan-pipeline/preprocessing/third/densepose/data/build.py new file mode 100644 index 0000000..db709d5 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/build.py @@ -0,0 +1,734 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import itertools +import logging +import numpy as np +from collections import UserDict, defaultdict +from dataclasses import dataclass +from typing import Any, Callable, Collection, Dict, Iterable, List, Optional, Sequence, Tuple +import torch +from torch.utils.data.dataset import Dataset + +from detectron2.config import CfgNode +from detectron2.data.build import build_detection_test_loader as d2_build_detection_test_loader +from detectron2.data.build import build_detection_train_loader as d2_build_detection_train_loader +from detectron2.data.build import ( + load_proposals_into_dataset, + print_instances_class_histogram, + trivial_batch_collator, + worker_init_reset_seed, +) +from detectron2.data.catalog import DatasetCatalog, Metadata, MetadataCatalog +from detectron2.data.samplers import TrainingSampler +from detectron2.utils.comm import get_world_size + +from densepose.config import get_bootstrap_dataset_config +from densepose.modeling import build_densepose_embedder + +from .combined_loader import CombinedDataLoader, Loader +from .dataset_mapper import DatasetMapper +from .datasets.coco import DENSEPOSE_CSE_KEYS_WITHOUT_MASK, DENSEPOSE_IUV_KEYS_WITHOUT_MASK +from .datasets.dataset_type import DatasetType +from .inference_based_loader import InferenceBasedLoader, ScoreBasedFilter +from .samplers import ( + DensePoseConfidenceBasedSampler, + DensePoseCSEConfidenceBasedSampler, + DensePoseCSEUniformSampler, + DensePoseUniformSampler, + MaskFromDensePoseSampler, + PredictionToGroundTruthSampler, +) +from .transform import ImageResizeTransform +from .utils import get_category_to_class_mapping, get_class_to_mesh_name_mapping +from .video import ( + FirstKFramesSelector, + FrameSelectionStrategy, + LastKFramesSelector, + RandomKFramesSelector, + VideoKeyframeDataset, + video_list_from_file, +) + +__all__ = ["build_detection_train_loader", "build_detection_test_loader"] + + +Instance = Dict[str, Any] +InstancePredicate = Callable[[Instance], bool] + + +def _compute_num_images_per_worker(cfg: CfgNode) -> int: + num_workers = get_world_size() + images_per_batch = cfg.SOLVER.IMS_PER_BATCH + assert ( + images_per_batch % num_workers == 0 + ), "SOLVER.IMS_PER_BATCH ({}) must be divisible by the number of workers ({}).".format( + images_per_batch, num_workers + ) + assert ( + images_per_batch >= num_workers + ), "SOLVER.IMS_PER_BATCH ({}) must be larger than the number of workers ({}).".format( + images_per_batch, num_workers + ) + images_per_worker = images_per_batch // num_workers + return images_per_worker + + +def _map_category_id_to_contiguous_id(dataset_name: str, dataset_dicts: Iterable[Instance]) -> None: + meta = MetadataCatalog.get(dataset_name) + for dataset_dict in dataset_dicts: + for ann in dataset_dict["annotations"]: + ann["category_id"] = meta.thing_dataset_id_to_contiguous_id[ann["category_id"]] + + +@dataclass +class _DatasetCategory: + """ + Class representing category data in a dataset: + - id: category ID, as specified in the dataset annotations file + - name: category name, as specified in the dataset annotations file + - mapped_id: category ID after applying category maps (DATASETS.CATEGORY_MAPS config option) + - mapped_name: category name after applying category maps + - dataset_name: dataset in which the category is defined + + For example, when training models in a class-agnostic manner, one could take LVIS 1.0 + dataset and map the animal categories to the same category as human data from COCO: + id = 225 + name = "cat" + mapped_id = 1 + mapped_name = "person" + dataset_name = "lvis_v1_animals_dp_train" + """ + + id: int + name: str + mapped_id: int + mapped_name: str + dataset_name: str + + +_MergedCategoriesT = Dict[int, List[_DatasetCategory]] + + +def _add_category_id_to_contiguous_id_maps_to_metadata(merged_categories: _MergedCategoriesT) -> None: + merged_categories_per_dataset = {} + for contiguous_cat_id, cat_id in enumerate(sorted(merged_categories.keys())): + for cat in merged_categories[cat_id]: + if cat.dataset_name not in merged_categories_per_dataset: + merged_categories_per_dataset[cat.dataset_name] = defaultdict(list) + merged_categories_per_dataset[cat.dataset_name][cat_id].append( + ( + contiguous_cat_id, + cat, + ) + ) + + logger = logging.getLogger(__name__) + for dataset_name, merged_categories in merged_categories_per_dataset.items(): + meta = MetadataCatalog.get(dataset_name) + if not hasattr(meta, "thing_classes"): + meta.thing_classes = [] + meta.thing_dataset_id_to_contiguous_id = {} + meta.thing_dataset_id_to_merged_id = {} + else: + meta.thing_classes.clear() + meta.thing_dataset_id_to_contiguous_id.clear() + meta.thing_dataset_id_to_merged_id.clear() + logger.info(f"Dataset {dataset_name}: category ID to contiguous ID mapping:") + for _cat_id, categories in sorted(merged_categories.items()): + added_to_thing_classes = False + for contiguous_cat_id, cat in categories: + if not added_to_thing_classes: + meta.thing_classes.append(cat.mapped_name) + added_to_thing_classes = True + meta.thing_dataset_id_to_contiguous_id[cat.id] = contiguous_cat_id + meta.thing_dataset_id_to_merged_id[cat.id] = cat.mapped_id + logger.info(f"{cat.id} ({cat.name}) -> {contiguous_cat_id}") + + +def _maybe_create_general_keep_instance_predicate(cfg: CfgNode) -> Optional[InstancePredicate]: + def has_annotations(instance: Instance) -> bool: + return "annotations" in instance + + def has_only_crowd_anotations(instance: Instance) -> bool: + for ann in instance["annotations"]: + if ann.get("is_crowd", 0) == 0: + return False + return True + + def general_keep_instance_predicate(instance: Instance) -> bool: + return has_annotations(instance) and not has_only_crowd_anotations(instance) + + if not cfg.DATALOADER.FILTER_EMPTY_ANNOTATIONS: + return None + return general_keep_instance_predicate + + +def _maybe_create_keypoints_keep_instance_predicate(cfg: CfgNode) -> Optional[InstancePredicate]: + + min_num_keypoints = cfg.MODEL.ROI_KEYPOINT_HEAD.MIN_KEYPOINTS_PER_IMAGE + + def has_sufficient_num_keypoints(instance: Instance) -> bool: + num_kpts = sum( + (np.array(ann["keypoints"][2::3]) > 0).sum() + for ann in instance["annotations"] + if "keypoints" in ann + ) + return num_kpts >= min_num_keypoints + + if cfg.MODEL.KEYPOINT_ON and (min_num_keypoints > 0): + return has_sufficient_num_keypoints + return None + + +def _maybe_create_mask_keep_instance_predicate(cfg: CfgNode) -> Optional[InstancePredicate]: + if not cfg.MODEL.MASK_ON: + return None + + def has_mask_annotations(instance: Instance) -> bool: + return any("segmentation" in ann for ann in instance["annotations"]) + + return has_mask_annotations + + +def _maybe_create_densepose_keep_instance_predicate(cfg: CfgNode) -> Optional[InstancePredicate]: + if not cfg.MODEL.DENSEPOSE_ON: + return None + + use_masks = cfg.MODEL.ROI_DENSEPOSE_HEAD.COARSE_SEGM_TRAINED_BY_MASKS + + def has_densepose_annotations(instance: Instance) -> bool: + for ann in instance["annotations"]: + if all(key in ann for key in DENSEPOSE_IUV_KEYS_WITHOUT_MASK) or all( + key in ann for key in DENSEPOSE_CSE_KEYS_WITHOUT_MASK + ): + return True + if use_masks and "segmentation" in ann: + return True + return False + + return has_densepose_annotations + + +def _maybe_create_specific_keep_instance_predicate(cfg: CfgNode) -> Optional[InstancePredicate]: + specific_predicate_creators = [ + _maybe_create_keypoints_keep_instance_predicate, + _maybe_create_mask_keep_instance_predicate, + _maybe_create_densepose_keep_instance_predicate, + ] + predicates = [creator(cfg) for creator in specific_predicate_creators] + predicates = [p for p in predicates if p is not None] + if not predicates: + return None + + def combined_predicate(instance: Instance) -> bool: + return any(p(instance) for p in predicates) + + return combined_predicate + + +def _get_train_keep_instance_predicate(cfg: CfgNode): + general_keep_predicate = _maybe_create_general_keep_instance_predicate(cfg) + combined_specific_keep_predicate = _maybe_create_specific_keep_instance_predicate(cfg) + + def combined_general_specific_keep_predicate(instance: Instance) -> bool: + return general_keep_predicate(instance) and combined_specific_keep_predicate(instance) + + if (general_keep_predicate is None) and (combined_specific_keep_predicate is None): + return None + if general_keep_predicate is None: + return combined_specific_keep_predicate + if combined_specific_keep_predicate is None: + return general_keep_predicate + return combined_general_specific_keep_predicate + + +def _get_test_keep_instance_predicate(cfg: CfgNode): + general_keep_predicate = _maybe_create_general_keep_instance_predicate(cfg) + return general_keep_predicate + + +def _maybe_filter_and_map_categories( + dataset_name: str, dataset_dicts: List[Instance] +) -> List[Instance]: + meta = MetadataCatalog.get(dataset_name) + category_id_map = meta.thing_dataset_id_to_contiguous_id + filtered_dataset_dicts = [] + for dataset_dict in dataset_dicts: + anns = [] + for ann in dataset_dict["annotations"]: + cat_id = ann["category_id"] + if cat_id not in category_id_map: + continue + ann["category_id"] = category_id_map[cat_id] + anns.append(ann) + dataset_dict["annotations"] = anns + filtered_dataset_dicts.append(dataset_dict) + return filtered_dataset_dicts + + +def _add_category_whitelists_to_metadata(cfg: CfgNode) -> None: + for dataset_name, whitelisted_cat_ids in cfg.DATASETS.WHITELISTED_CATEGORIES.items(): + meta = MetadataCatalog.get(dataset_name) + meta.whitelisted_categories = whitelisted_cat_ids + logger = logging.getLogger(__name__) + logger.info( + "Whitelisted categories for dataset {}: {}".format( + dataset_name, meta.whitelisted_categories + ) + ) + + +def _add_category_maps_to_metadata(cfg: CfgNode) -> None: + for dataset_name, category_map in cfg.DATASETS.CATEGORY_MAPS.items(): + category_map = { + int(cat_id_src): int(cat_id_dst) for cat_id_src, cat_id_dst in category_map.items() + } + meta = MetadataCatalog.get(dataset_name) + meta.category_map = category_map + logger = logging.getLogger(__name__) + logger.info("Category maps for dataset {}: {}".format(dataset_name, meta.category_map)) + + +def _add_category_info_to_bootstrapping_metadata(dataset_name: str, dataset_cfg: CfgNode) -> None: + meta = MetadataCatalog.get(dataset_name) + meta.category_to_class_mapping = get_category_to_class_mapping(dataset_cfg) + meta.categories = dataset_cfg.CATEGORIES + meta.max_count_per_category = dataset_cfg.MAX_COUNT_PER_CATEGORY + logger = logging.getLogger(__name__) + logger.info( + "Category to class mapping for dataset {}: {}".format( + dataset_name, meta.category_to_class_mapping + ) + ) + + +def _maybe_add_class_to_mesh_name_map_to_metadata(dataset_names: List[str], cfg: CfgNode) -> None: + for dataset_name in dataset_names: + meta = MetadataCatalog.get(dataset_name) + if not hasattr(meta, "class_to_mesh_name"): + meta.class_to_mesh_name = get_class_to_mesh_name_mapping(cfg) + + +def _merge_categories(dataset_names: Collection[str]) -> _MergedCategoriesT: + merged_categories = defaultdict(list) + category_names = {} + for dataset_name in dataset_names: + meta = MetadataCatalog.get(dataset_name) + whitelisted_categories = meta.get("whitelisted_categories") + category_map = meta.get("category_map", {}) + cat_ids = ( + whitelisted_categories if whitelisted_categories is not None else meta.categories.keys() + ) + for cat_id in cat_ids: + cat_name = meta.categories[cat_id] + cat_id_mapped = category_map.get(cat_id, cat_id) + if cat_id_mapped == cat_id or cat_id_mapped in cat_ids: + category_names[cat_id] = cat_name + else: + category_names[cat_id] = str(cat_id_mapped) + # assign temporary mapped category name, this name can be changed + # during the second pass, since mapped ID can correspond to a category + # from a different dataset + cat_name_mapped = meta.categories[cat_id_mapped] + merged_categories[cat_id_mapped].append( + _DatasetCategory( + id=cat_id, + name=cat_name, + mapped_id=cat_id_mapped, + mapped_name=cat_name_mapped, + dataset_name=dataset_name, + ) + ) + # second pass to assign proper mapped category names + for cat_id, categories in merged_categories.items(): + for cat in categories: + if cat_id in category_names and cat.mapped_name != category_names[cat_id]: + cat.mapped_name = category_names[cat_id] + + return merged_categories + + +def _warn_if_merged_different_categories(merged_categories: _MergedCategoriesT) -> None: + logger = logging.getLogger(__name__) + for cat_id in merged_categories: + merged_categories_i = merged_categories[cat_id] + first_cat_name = merged_categories_i[0].name + if len(merged_categories_i) > 1 and not all( + cat.name == first_cat_name for cat in merged_categories_i[1:] + ): + cat_summary_str = ", ".join( + [f"{cat.id} ({cat.name}) from {cat.dataset_name}" for cat in merged_categories_i] + ) + logger.warning( + f"Merged category {cat_id} corresponds to the following categories: " + f"{cat_summary_str}" + ) + + +def combine_detection_dataset_dicts( + dataset_names: Collection[str], + keep_instance_predicate: Optional[InstancePredicate] = None, + proposal_files: Optional[Collection[str]] = None, +) -> List[Instance]: + """ + Load and prepare dataset dicts for training / testing + + Args: + dataset_names (Collection[str]): a list of dataset names + keep_instance_predicate (Callable: Dict[str, Any] -> bool): predicate + applied to instance dicts which defines whether to keep the instance + proposal_files (Collection[str]): if given, a list of object proposal files + that match each dataset in `dataset_names`. + """ + assert len(dataset_names) + if proposal_files is None: + proposal_files = [None] * len(dataset_names) + assert len(dataset_names) == len(proposal_files) + # load datasets and metadata + dataset_name_to_dicts = {} + for dataset_name in dataset_names: + dataset_name_to_dicts[dataset_name] = DatasetCatalog.get(dataset_name) + assert len(dataset_name_to_dicts), f"Dataset '{dataset_name}' is empty!" + # merge categories, requires category metadata to be loaded + # cat_id -> [(orig_cat_id, cat_name, dataset_name)] + merged_categories = _merge_categories(dataset_names) + _warn_if_merged_different_categories(merged_categories) + merged_category_names = [ + merged_categories[cat_id][0].mapped_name for cat_id in sorted(merged_categories) + ] + # map to contiguous category IDs + _add_category_id_to_contiguous_id_maps_to_metadata(merged_categories) + # load annotations and dataset metadata + for dataset_name, proposal_file in zip(dataset_names, proposal_files): + dataset_dicts = dataset_name_to_dicts[dataset_name] + assert len(dataset_dicts), f"Dataset '{dataset_name}' is empty!" + if proposal_file is not None: + dataset_dicts = load_proposals_into_dataset(dataset_dicts, proposal_file) + dataset_dicts = _maybe_filter_and_map_categories(dataset_name, dataset_dicts) + print_instances_class_histogram(dataset_dicts, merged_category_names) + dataset_name_to_dicts[dataset_name] = dataset_dicts + + if keep_instance_predicate is not None: + all_datasets_dicts_plain = [ + d + for d in itertools.chain.from_iterable(dataset_name_to_dicts.values()) + if keep_instance_predicate(d) + ] + else: + all_datasets_dicts_plain = list( + itertools.chain.from_iterable(dataset_name_to_dicts.values()) + ) + return all_datasets_dicts_plain + + +def build_detection_train_loader(cfg: CfgNode, mapper=None): + """ + A data loader is created in a way similar to that of Detectron2. + The main differences are: + - it allows to combine datasets with different but compatible object category sets + + The data loader is created by the following steps: + 1. Use the dataset names in config to query :class:`DatasetCatalog`, and obtain a list of dicts. + 2. Start workers to work on the dicts. Each worker will: + * Map each metadata dict into another format to be consumed by the model. + * Batch them by simply putting dicts into a list. + The batched ``list[mapped_dict]`` is what this dataloader will return. + + Args: + cfg (CfgNode): the config + mapper (callable): a callable which takes a sample (dict) from dataset and + returns the format to be consumed by the model. + By default it will be `DatasetMapper(cfg, True)`. + + Returns: + an infinite iterator of training data + """ + + _add_category_whitelists_to_metadata(cfg) + _add_category_maps_to_metadata(cfg) + _maybe_add_class_to_mesh_name_map_to_metadata(cfg.DATASETS.TRAIN, cfg) + dataset_dicts = combine_detection_dataset_dicts( + cfg.DATASETS.TRAIN, + keep_instance_predicate=_get_train_keep_instance_predicate(cfg), + proposal_files=cfg.DATASETS.PROPOSAL_FILES_TRAIN if cfg.MODEL.LOAD_PROPOSALS else None, + ) + if mapper is None: + mapper = DatasetMapper(cfg, True) + return d2_build_detection_train_loader(cfg, dataset=dataset_dicts, mapper=mapper) + + +def build_detection_test_loader(cfg, dataset_name, mapper=None): + """ + Similar to `build_detection_train_loader`. + But this function uses the given `dataset_name` argument (instead of the names in cfg), + and uses batch size 1. + + Args: + cfg: a detectron2 CfgNode + dataset_name (str): a name of the dataset that's available in the DatasetCatalog + mapper (callable): a callable which takes a sample (dict) from dataset + and returns the format to be consumed by the model. + By default it will be `DatasetMapper(cfg, False)`. + + Returns: + DataLoader: a torch DataLoader, that loads the given detection + dataset, with test-time transformation and batching. + """ + _add_category_whitelists_to_metadata(cfg) + _add_category_maps_to_metadata(cfg) + _maybe_add_class_to_mesh_name_map_to_metadata([dataset_name], cfg) + dataset_dicts = combine_detection_dataset_dicts( + [dataset_name], + keep_instance_predicate=_get_test_keep_instance_predicate(cfg), + proposal_files=[ + cfg.DATASETS.PROPOSAL_FILES_TEST[list(cfg.DATASETS.TEST).index(dataset_name)] + ] + if cfg.MODEL.LOAD_PROPOSALS + else None, + ) + sampler = None + if not cfg.DENSEPOSE_EVALUATION.DISTRIBUTED_INFERENCE: + sampler = torch.utils.data.SequentialSampler(dataset_dicts) + if mapper is None: + mapper = DatasetMapper(cfg, False) + return d2_build_detection_test_loader( + dataset_dicts, mapper=mapper, num_workers=cfg.DATALOADER.NUM_WORKERS, sampler=sampler + ) + + +def build_frame_selector(cfg: CfgNode): + strategy = FrameSelectionStrategy(cfg.STRATEGY) + if strategy == FrameSelectionStrategy.RANDOM_K: + frame_selector = RandomKFramesSelector(cfg.NUM_IMAGES) + elif strategy == FrameSelectionStrategy.FIRST_K: + frame_selector = FirstKFramesSelector(cfg.NUM_IMAGES) + elif strategy == FrameSelectionStrategy.LAST_K: + frame_selector = LastKFramesSelector(cfg.NUM_IMAGES) + elif strategy == FrameSelectionStrategy.ALL: + frame_selector = None + # pyre-fixme[61]: `frame_selector` may not be initialized here. + return frame_selector + + +def build_transform(cfg: CfgNode, data_type: str): + if cfg.TYPE == "resize": + if data_type == "image": + return ImageResizeTransform(cfg.MIN_SIZE, cfg.MAX_SIZE) + raise ValueError(f"Unknown transform {cfg.TYPE} for data type {data_type}") + + +def build_combined_loader(cfg: CfgNode, loaders: Collection[Loader], ratios: Sequence[float]): + images_per_worker = _compute_num_images_per_worker(cfg) + return CombinedDataLoader(loaders, images_per_worker, ratios) + + +def build_bootstrap_dataset(dataset_name: str, cfg: CfgNode) -> Sequence[torch.Tensor]: + """ + Build dataset that provides data to bootstrap on + + Args: + dataset_name (str): Name of the dataset, needs to have associated metadata + to load the data + cfg (CfgNode): bootstrapping config + Returns: + Sequence[Tensor] - dataset that provides image batches, Tensors of size + [N, C, H, W] of type float32 + """ + logger = logging.getLogger(__name__) + _add_category_info_to_bootstrapping_metadata(dataset_name, cfg) + meta = MetadataCatalog.get(dataset_name) + factory = BootstrapDatasetFactoryCatalog.get(meta.dataset_type) + dataset = None + if factory is not None: + dataset = factory(meta, cfg) + if dataset is None: + logger.warning(f"Failed to create dataset {dataset_name} of type {meta.dataset_type}") + return dataset + + +def build_data_sampler(cfg: CfgNode, sampler_cfg: CfgNode, embedder: Optional[torch.nn.Module]): + if sampler_cfg.TYPE == "densepose_uniform": + data_sampler = PredictionToGroundTruthSampler() + # transform densepose pred -> gt + data_sampler.register_sampler( + "pred_densepose", + "gt_densepose", + DensePoseUniformSampler(count_per_class=sampler_cfg.COUNT_PER_CLASS), + ) + data_sampler.register_sampler("pred_densepose", "gt_masks", MaskFromDensePoseSampler()) + return data_sampler + elif sampler_cfg.TYPE == "densepose_UV_confidence": + data_sampler = PredictionToGroundTruthSampler() + # transform densepose pred -> gt + data_sampler.register_sampler( + "pred_densepose", + "gt_densepose", + DensePoseConfidenceBasedSampler( + confidence_channel="sigma_2", + count_per_class=sampler_cfg.COUNT_PER_CLASS, + search_proportion=0.5, + ), + ) + data_sampler.register_sampler("pred_densepose", "gt_masks", MaskFromDensePoseSampler()) + return data_sampler + elif sampler_cfg.TYPE == "densepose_fine_segm_confidence": + data_sampler = PredictionToGroundTruthSampler() + # transform densepose pred -> gt + data_sampler.register_sampler( + "pred_densepose", + "gt_densepose", + DensePoseConfidenceBasedSampler( + confidence_channel="fine_segm_confidence", + count_per_class=sampler_cfg.COUNT_PER_CLASS, + search_proportion=0.5, + ), + ) + data_sampler.register_sampler("pred_densepose", "gt_masks", MaskFromDensePoseSampler()) + return data_sampler + elif sampler_cfg.TYPE == "densepose_coarse_segm_confidence": + data_sampler = PredictionToGroundTruthSampler() + # transform densepose pred -> gt + data_sampler.register_sampler( + "pred_densepose", + "gt_densepose", + DensePoseConfidenceBasedSampler( + confidence_channel="coarse_segm_confidence", + count_per_class=sampler_cfg.COUNT_PER_CLASS, + search_proportion=0.5, + ), + ) + data_sampler.register_sampler("pred_densepose", "gt_masks", MaskFromDensePoseSampler()) + return data_sampler + elif sampler_cfg.TYPE == "densepose_cse_uniform": + assert embedder is not None + data_sampler = PredictionToGroundTruthSampler() + # transform densepose pred -> gt + data_sampler.register_sampler( + "pred_densepose", + "gt_densepose", + DensePoseCSEUniformSampler( + cfg=cfg, + use_gt_categories=sampler_cfg.USE_GROUND_TRUTH_CATEGORIES, + embedder=embedder, + count_per_class=sampler_cfg.COUNT_PER_CLASS, + ), + ) + data_sampler.register_sampler("pred_densepose", "gt_masks", MaskFromDensePoseSampler()) + return data_sampler + elif sampler_cfg.TYPE == "densepose_cse_coarse_segm_confidence": + assert embedder is not None + data_sampler = PredictionToGroundTruthSampler() + # transform densepose pred -> gt + data_sampler.register_sampler( + "pred_densepose", + "gt_densepose", + DensePoseCSEConfidenceBasedSampler( + cfg=cfg, + use_gt_categories=sampler_cfg.USE_GROUND_TRUTH_CATEGORIES, + embedder=embedder, + confidence_channel="coarse_segm_confidence", + count_per_class=sampler_cfg.COUNT_PER_CLASS, + search_proportion=0.5, + ), + ) + data_sampler.register_sampler("pred_densepose", "gt_masks", MaskFromDensePoseSampler()) + return data_sampler + + raise ValueError(f"Unknown data sampler type {sampler_cfg.TYPE}") + + +def build_data_filter(cfg: CfgNode): + if cfg.TYPE == "detection_score": + min_score = cfg.MIN_VALUE + return ScoreBasedFilter(min_score=min_score) + raise ValueError(f"Unknown data filter type {cfg.TYPE}") + + +def build_inference_based_loader( + cfg: CfgNode, + dataset_cfg: CfgNode, + model: torch.nn.Module, + embedder: Optional[torch.nn.Module] = None, +) -> InferenceBasedLoader: + """ + Constructs data loader based on inference results of a model. + """ + dataset = build_bootstrap_dataset(dataset_cfg.DATASET, dataset_cfg.IMAGE_LOADER) + meta = MetadataCatalog.get(dataset_cfg.DATASET) + training_sampler = TrainingSampler(len(dataset)) + data_loader = torch.utils.data.DataLoader( + dataset, # pyre-ignore[6] + batch_size=dataset_cfg.IMAGE_LOADER.BATCH_SIZE, + sampler=training_sampler, + num_workers=dataset_cfg.IMAGE_LOADER.NUM_WORKERS, + collate_fn=trivial_batch_collator, + worker_init_fn=worker_init_reset_seed, + ) + return InferenceBasedLoader( + model, + data_loader=data_loader, + data_sampler=build_data_sampler(cfg, dataset_cfg.DATA_SAMPLER, embedder), + data_filter=build_data_filter(dataset_cfg.FILTER), + shuffle=True, + batch_size=dataset_cfg.INFERENCE.OUTPUT_BATCH_SIZE, + inference_batch_size=dataset_cfg.INFERENCE.INPUT_BATCH_SIZE, + category_to_class_mapping=meta.category_to_class_mapping, + ) + + +def has_inference_based_loaders(cfg: CfgNode) -> bool: + """ + Returns True, if at least one inferense-based loader must + be instantiated for training + """ + return len(cfg.BOOTSTRAP_DATASETS) > 0 + + +def build_inference_based_loaders( + cfg: CfgNode, model: torch.nn.Module +) -> Tuple[List[InferenceBasedLoader], List[float]]: + loaders = [] + ratios = [] + embedder = build_densepose_embedder(cfg).to(device=model.device) # pyre-ignore[16] + for dataset_spec in cfg.BOOTSTRAP_DATASETS: + dataset_cfg = get_bootstrap_dataset_config().clone() + dataset_cfg.merge_from_other_cfg(CfgNode(dataset_spec)) + loader = build_inference_based_loader(cfg, dataset_cfg, model, embedder) + loaders.append(loader) + ratios.append(dataset_cfg.RATIO) + return loaders, ratios + + +def build_video_list_dataset(meta: Metadata, cfg: CfgNode): + video_list_fpath = meta.video_list_fpath + video_base_path = meta.video_base_path + category = meta.category + if cfg.TYPE == "video_keyframe": + frame_selector = build_frame_selector(cfg.SELECT) + transform = build_transform(cfg.TRANSFORM, data_type="image") + video_list = video_list_from_file(video_list_fpath, video_base_path) + keyframe_helper_fpath = cfg.KEYFRAME_HELPER if hasattr(cfg, "KEYFRAME_HELPER") else None + return VideoKeyframeDataset( + video_list, category, frame_selector, transform, keyframe_helper_fpath + ) + + +class _BootstrapDatasetFactoryCatalog(UserDict): + """ + A global dictionary that stores information about bootstrapped datasets creation functions + from metadata and config, for diverse DatasetType + """ + + def register(self, dataset_type: DatasetType, factory: Callable[[Metadata, CfgNode], Dataset]): + """ + Args: + dataset_type (DatasetType): a DatasetType e.g. DatasetType.VIDEO_LIST + factory (Callable[Metadata, CfgNode]): a callable which takes Metadata and cfg + arguments and returns a dataset object. + """ + assert dataset_type not in self, "Dataset '{}' is already registered!".format(dataset_type) + self[dataset_type] = factory + + +BootstrapDatasetFactoryCatalog = _BootstrapDatasetFactoryCatalog() +BootstrapDatasetFactoryCatalog.register(DatasetType.VIDEO_LIST, build_video_list_dataset) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/combined_loader.py b/motion-gan-pipeline/preprocessing/third/densepose/data/combined_loader.py new file mode 100644 index 0000000..5bfbbde --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/combined_loader.py @@ -0,0 +1,44 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import random +from collections import deque +from typing import Any, Collection, Deque, Iterable, Iterator, List, Sequence + +Loader = Iterable[Any] + + +def _pooled_next(iterator: Iterator[Any], pool: Deque[Any]): + if not pool: + pool.extend(next(iterator)) + return pool.popleft() + + +class CombinedDataLoader: + """ + Combines data loaders using the provided sampling ratios + """ + + BATCH_COUNT = 100 + + def __init__(self, loaders: Collection[Loader], batch_size: int, ratios: Sequence[float]): + self.loaders = loaders + self.batch_size = batch_size + self.ratios = ratios + + def __iter__(self) -> Iterator[List[Any]]: + iters = [iter(loader) for loader in self.loaders] + indices = [] + pool = [deque()] * len(iters) + # infinite iterator, as in D2 + while True: + if not indices: + # just a buffer of indices, its size doesn't matter + # as long as it's a multiple of batch_size + k = self.batch_size * self.BATCH_COUNT + indices = random.choices(range(len(self.loaders)), self.ratios, k=k) + try: + batch = [_pooled_next(iters[i], pool[i]) for i in indices[: self.batch_size]] + except StopIteration: + break + indices = indices[self.batch_size :] + yield batch diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/dataset_mapper.py b/motion-gan-pipeline/preprocessing/third/densepose/data/dataset_mapper.py new file mode 100644 index 0000000..4f1c289 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/dataset_mapper.py @@ -0,0 +1,168 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +import copy +import logging +from typing import Any, Dict, List, Tuple +import torch + +from detectron2.data import MetadataCatalog +from detectron2.data import detection_utils as utils +from detectron2.data import transforms as T +from detectron2.layers import ROIAlign +from detectron2.structures import BoxMode +from detectron2.utils.file_io import PathManager + +from densepose.structures import DensePoseDataRelative, DensePoseList, DensePoseTransformData + + +def build_augmentation(cfg, is_train): + logger = logging.getLogger(__name__) + result = utils.build_augmentation(cfg, is_train) + if is_train: + random_rotation = T.RandomRotation( + cfg.INPUT.ROTATION_ANGLES, expand=False, sample_style="choice" + ) + result.append(random_rotation) + logger.info("DensePose-specific augmentation used in training: " + str(random_rotation)) + return result + + +class DatasetMapper: + """ + A customized version of `detectron2.data.DatasetMapper` + """ + + def __init__(self, cfg, is_train=True): + self.augmentation = build_augmentation(cfg, is_train) + + # fmt: off + self.img_format = cfg.INPUT.FORMAT + self.mask_on = ( + cfg.MODEL.MASK_ON or ( + cfg.MODEL.DENSEPOSE_ON + and cfg.MODEL.ROI_DENSEPOSE_HEAD.COARSE_SEGM_TRAINED_BY_MASKS) + ) + self.keypoint_on = cfg.MODEL.KEYPOINT_ON + self.densepose_on = cfg.MODEL.DENSEPOSE_ON + assert not cfg.MODEL.LOAD_PROPOSALS, "not supported yet" + # fmt: on + if self.keypoint_on and is_train: + # Flip only makes sense in training + self.keypoint_hflip_indices = utils.create_keypoint_hflip_indices(cfg.DATASETS.TRAIN) + else: + self.keypoint_hflip_indices = None + + if self.densepose_on: + densepose_transform_srcs = [ + MetadataCatalog.get(ds).densepose_transform_src + for ds in cfg.DATASETS.TRAIN + cfg.DATASETS.TEST + ] + assert len(densepose_transform_srcs) > 0 + # TODO: check that DensePose transformation data is the same for + # all the datasets. Otherwise one would have to pass DB ID with + # each entry to select proper transformation data. For now, since + # all DensePose annotated data uses the same data semantics, we + # omit this check. + densepose_transform_data_fpath = PathManager.get_local_path(densepose_transform_srcs[0]) + self.densepose_transform_data = DensePoseTransformData.load( + densepose_transform_data_fpath + ) + + self.is_train = is_train + + def __call__(self, dataset_dict): + """ + Args: + dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format. + + Returns: + dict: a format that builtin models in detectron2 accept + """ + dataset_dict = copy.deepcopy(dataset_dict) # it will be modified by code below + image = utils.read_image(dataset_dict["file_name"], format=self.img_format) + utils.check_image_size(dataset_dict, image) + + image, transforms = T.apply_transform_gens(self.augmentation, image) + image_shape = image.shape[:2] # h, w + dataset_dict["image"] = torch.as_tensor(image.transpose(2, 0, 1).astype("float32")) + + if not self.is_train: + dataset_dict.pop("annotations", None) + return dataset_dict + + for anno in dataset_dict["annotations"]: + if not self.mask_on: + anno.pop("segmentation", None) + if not self.keypoint_on: + anno.pop("keypoints", None) + + # USER: Implement additional transformations if you have other types of data + # USER: Don't call transpose_densepose if you don't need + annos = [ + self._transform_densepose( + utils.transform_instance_annotations( + obj, transforms, image_shape, keypoint_hflip_indices=self.keypoint_hflip_indices + ), + transforms, + ) + for obj in dataset_dict.pop("annotations") + if obj.get("iscrowd", 0) == 0 + ] + + if self.mask_on: + self._add_densepose_masks_as_segmentation(annos, image_shape) + + instances = utils.annotations_to_instances(annos, image_shape, mask_format="bitmask") + densepose_annotations = [obj.get("densepose") for obj in annos] + if densepose_annotations and not all(v is None for v in densepose_annotations): + instances.gt_densepose = DensePoseList( + densepose_annotations, instances.gt_boxes, image_shape + ) + + dataset_dict["instances"] = instances[instances.gt_boxes.nonempty()] + return dataset_dict + + def _transform_densepose(self, annotation, transforms): + if not self.densepose_on: + return annotation + + # Handle densepose annotations + is_valid, reason_not_valid = DensePoseDataRelative.validate_annotation(annotation) + if is_valid: + densepose_data = DensePoseDataRelative(annotation, cleanup=True) + densepose_data.apply_transform(transforms, self.densepose_transform_data) + annotation["densepose"] = densepose_data + else: + # logger = logging.getLogger(__name__) + # logger.debug("Could not load DensePose annotation: {}".format(reason_not_valid)) + DensePoseDataRelative.cleanup_annotation(annotation) + # NOTE: annotations for certain instances may be unavailable. + # 'None' is accepted by the DensePostList data structure. + annotation["densepose"] = None + return annotation + + def _add_densepose_masks_as_segmentation( + self, annotations: List[Dict[str, Any]], image_shape_hw: Tuple[int, int] + ): + for obj in annotations: + if ("densepose" not in obj) or ("segmentation" in obj): + continue + # DP segmentation: torch.Tensor [S, S] of float32, S=256 + segm_dp = torch.zeros_like(obj["densepose"].segm) + segm_dp[obj["densepose"].segm > 0] = 1 + segm_h, segm_w = segm_dp.shape + bbox_segm_dp = torch.tensor((0, 0, segm_h - 1, segm_w - 1), dtype=torch.float32) + # image bbox + x0, y0, x1, y1 = ( + v.item() for v in BoxMode.convert(obj["bbox"], obj["bbox_mode"], BoxMode.XYXY_ABS) + ) + segm_aligned = ( + ROIAlign((y1 - y0, x1 - x0), 1.0, 0, aligned=True) + .forward(segm_dp.view(1, 1, *segm_dp.shape), bbox_segm_dp) + .squeeze() + ) + image_mask = torch.zeros(*image_shape_hw, dtype=torch.float32) + image_mask[y0:y1, x0:x1] = segm_aligned + # segmentation for BitMask: np.array [H, W] of np.bool + obj["segmentation"] = image_mask >= 0.5 diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/__init__.py new file mode 100644 index 0000000..260ccb9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from . import builtin # ensure the builtin datasets are registered + +__all__ = [k for k in globals().keys() if "builtin" not in k and not k.startswith("_")] diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/builtin.py b/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/builtin.py new file mode 100644 index 0000000..7572cd6 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/builtin.py @@ -0,0 +1,16 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .chimpnsee import register_dataset as register_chimpnsee_dataset +from .coco import BASE_DATASETS as BASE_COCO_DATASETS +from .coco import DATASETS as COCO_DATASETS +from .coco import register_datasets as register_coco_datasets +from .lvis import DATASETS as LVIS_DATASETS +from .lvis import register_datasets as register_lvis_datasets + +DEFAULT_DATASETS_ROOT = "datasets" + + +register_coco_datasets(COCO_DATASETS, DEFAULT_DATASETS_ROOT) +register_coco_datasets(BASE_COCO_DATASETS, DEFAULT_DATASETS_ROOT) +register_lvis_datasets(LVIS_DATASETS, DEFAULT_DATASETS_ROOT) + +register_chimpnsee_dataset(DEFAULT_DATASETS_ROOT) # pyre-ignore[19] diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/chimpnsee.py b/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/chimpnsee.py new file mode 100644 index 0000000..61e0b50 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/chimpnsee.py @@ -0,0 +1,29 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Optional + +from detectron2.data import DatasetCatalog, MetadataCatalog + +from ..utils import maybe_prepend_base_path +from .dataset_type import DatasetType + +CHIMPNSEE_DATASET_NAME = "chimpnsee" + + +def register_dataset(datasets_root: Optional[str] = None) -> None: + def empty_load_callback(): + pass + + video_list_fpath = maybe_prepend_base_path( + datasets_root, + "chimpnsee/cdna.eva.mpg.de/video_list.txt", + ) + video_base_path = maybe_prepend_base_path(datasets_root, "chimpnsee/cdna.eva.mpg.de") + + DatasetCatalog.register(CHIMPNSEE_DATASET_NAME, empty_load_callback) + MetadataCatalog.get(CHIMPNSEE_DATASET_NAME).set( + dataset_type=DatasetType.VIDEO_LIST, + video_list_fpath=video_list_fpath, + video_base_path=video_base_path, + category="chimpanzee", + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/coco.py b/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/coco.py new file mode 100644 index 0000000..c19f7b0 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/coco.py @@ -0,0 +1,432 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import contextlib +import io +import logging +import os +from collections import defaultdict +from dataclasses import dataclass +from typing import Any, Dict, Iterable, List, Optional +from fvcore.common.timer import Timer + +from detectron2.data import DatasetCatalog, MetadataCatalog +from detectron2.structures import BoxMode +from detectron2.utils.file_io import PathManager + +from ..utils import maybe_prepend_base_path + +DENSEPOSE_MASK_KEY = "dp_masks" +DENSEPOSE_IUV_KEYS_WITHOUT_MASK = ["dp_x", "dp_y", "dp_I", "dp_U", "dp_V"] +DENSEPOSE_CSE_KEYS_WITHOUT_MASK = ["dp_x", "dp_y", "dp_vertex", "ref_model"] +DENSEPOSE_ALL_POSSIBLE_KEYS = set( + DENSEPOSE_IUV_KEYS_WITHOUT_MASK + DENSEPOSE_CSE_KEYS_WITHOUT_MASK + [DENSEPOSE_MASK_KEY] +) +DENSEPOSE_METADATA_URL_PREFIX = "https://dl.fbaipublicfiles.com/densepose/data/" + + +@dataclass +class CocoDatasetInfo: + name: str + images_root: str + annotations_fpath: str + + +DATASETS = [ + CocoDatasetInfo( + name="densepose_coco_2014_train", + images_root="coco/train2014", + annotations_fpath="coco/annotations/densepose_train2014.json", + ), + CocoDatasetInfo( + name="densepose_coco_2014_minival", + images_root="coco/val2014", + annotations_fpath="coco/annotations/densepose_minival2014.json", + ), + CocoDatasetInfo( + name="densepose_coco_2014_minival_100", + images_root="coco/val2014", + annotations_fpath="coco/annotations/densepose_minival2014_100.json", + ), + CocoDatasetInfo( + name="densepose_coco_2014_valminusminival", + images_root="coco/val2014", + annotations_fpath="coco/annotations/densepose_valminusminival2014.json", + ), + CocoDatasetInfo( + name="densepose_coco_2014_train_cse", + images_root="coco/train2014", + annotations_fpath="coco_cse/densepose_train2014_cse.json", + ), + CocoDatasetInfo( + name="densepose_coco_2014_minival_cse", + images_root="coco/val2014", + annotations_fpath="coco_cse/densepose_minival2014_cse.json", + ), + CocoDatasetInfo( + name="densepose_coco_2014_minival_100_cse", + images_root="coco/val2014", + annotations_fpath="coco_cse/densepose_minival2014_100_cse.json", + ), + CocoDatasetInfo( + name="densepose_coco_2014_valminusminival_cse", + images_root="coco/val2014", + annotations_fpath="coco_cse/densepose_valminusminival2014_cse.json", + ), + CocoDatasetInfo( + name="densepose_chimps", + images_root="densepose_chimps/images", + annotations_fpath="densepose_chimps/densepose_chimps_densepose.json", + ), + CocoDatasetInfo( + name="densepose_chimps_cse_train", + images_root="densepose_chimps/images", + annotations_fpath="densepose_chimps/densepose_chimps_cse_train.json", + ), + CocoDatasetInfo( + name="densepose_chimps_cse_val", + images_root="densepose_chimps/images", + annotations_fpath="densepose_chimps/densepose_chimps_cse_val.json", + ), + CocoDatasetInfo( + name="posetrack2017_train", + images_root="posetrack2017/posetrack_data_2017", + annotations_fpath="posetrack2017/densepose_posetrack_train2017.json", + ), + CocoDatasetInfo( + name="posetrack2017_val", + images_root="posetrack2017/posetrack_data_2017", + annotations_fpath="posetrack2017/densepose_posetrack_val2017.json", + ), + CocoDatasetInfo( + name="lvis_v05_train", + images_root="coco/train2017", + annotations_fpath="lvis/lvis_v0.5_plus_dp_train.json", + ), + CocoDatasetInfo( + name="lvis_v05_val", + images_root="coco/val2017", + annotations_fpath="lvis/lvis_v0.5_plus_dp_val.json", + ), +] + + +BASE_DATASETS = [ + CocoDatasetInfo( + name="base_coco_2017_train", + images_root="coco/train2017", + annotations_fpath="coco/annotations/instances_train2017.json", + ), + CocoDatasetInfo( + name="base_coco_2017_val", + images_root="coco/val2017", + annotations_fpath="coco/annotations/instances_val2017.json", + ), + CocoDatasetInfo( + name="base_coco_2017_val_100", + images_root="coco/val2017", + annotations_fpath="coco/annotations/instances_val2017_100.json", + ), +] + + +def get_metadata(base_path: Optional[str]) -> Dict[str, Any]: + """ + Returns metadata associated with COCO DensePose datasets + + Args: + base_path: Optional[str] + Base path used to load metadata from + + Returns: + Dict[str, Any] + Metadata in the form of a dictionary + """ + meta = { + "densepose_transform_src": maybe_prepend_base_path(base_path, "UV_symmetry_transforms.mat"), + "densepose_smpl_subdiv": maybe_prepend_base_path(base_path, "SMPL_subdiv.mat"), + "densepose_smpl_subdiv_transform": maybe_prepend_base_path( + base_path, + "SMPL_SUBDIV_TRANSFORM.mat", + ), + } + return meta + + +def _load_coco_annotations(json_file: str): + """ + Load COCO annotations from a JSON file + + Args: + json_file: str + Path to the file to load annotations from + Returns: + Instance of `pycocotools.coco.COCO` that provides access to annotations + data + """ + from pycocotools.coco import COCO + + logger = logging.getLogger(__name__) + timer = Timer() + with contextlib.redirect_stdout(io.StringIO()): + coco_api = COCO(json_file) + if timer.seconds() > 1: + logger.info("Loading {} takes {:.2f} seconds.".format(json_file, timer.seconds())) + return coco_api + + +def _add_categories_metadata(dataset_name: str, categories: List[Dict[str, Any]]): + meta = MetadataCatalog.get(dataset_name) + meta.categories = {c["id"]: c["name"] for c in categories} + logger = logging.getLogger(__name__) + logger.info("Dataset {} categories: {}".format(dataset_name, meta.categories)) + + +def _verify_annotations_have_unique_ids(json_file: str, anns: List[List[Dict[str, Any]]]): + if "minival" in json_file: + # Skip validation on COCO2014 valminusminival and minival annotations + # The ratio of buggy annotations there is tiny and does not affect accuracy + # Therefore we explicitly white-list them + return + ann_ids = [ann["id"] for anns_per_image in anns for ann in anns_per_image] + assert len(set(ann_ids)) == len(ann_ids), "Annotation ids in '{}' are not unique!".format( + json_file + ) + + +def _maybe_add_bbox(obj: Dict[str, Any], ann_dict: Dict[str, Any]): + if "bbox" not in ann_dict: + return + obj["bbox"] = ann_dict["bbox"] + obj["bbox_mode"] = BoxMode.XYWH_ABS + + +def _maybe_add_segm(obj: Dict[str, Any], ann_dict: Dict[str, Any]): + if "segmentation" not in ann_dict: + return + segm = ann_dict["segmentation"] + if not isinstance(segm, dict): + # filter out invalid polygons (< 3 points) + segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6] + if len(segm) == 0: + return + obj["segmentation"] = segm + + +def _maybe_add_keypoints(obj: Dict[str, Any], ann_dict: Dict[str, Any]): + if "keypoints" not in ann_dict: + return + keypts = ann_dict["keypoints"] # list[int] + for idx, v in enumerate(keypts): + if idx % 3 != 2: + # COCO's segmentation coordinates are floating points in [0, H or W], + # but keypoint coordinates are integers in [0, H-1 or W-1] + # Therefore we assume the coordinates are "pixel indices" and + # add 0.5 to convert to floating point coordinates. + keypts[idx] = v + 0.5 + obj["keypoints"] = keypts + + +def _maybe_add_densepose(obj: Dict[str, Any], ann_dict: Dict[str, Any]): + for key in DENSEPOSE_ALL_POSSIBLE_KEYS: + if key in ann_dict: + obj[key] = ann_dict[key] + + +def _combine_images_with_annotations( + dataset_name: str, + image_root: str, + img_datas: Iterable[Dict[str, Any]], + ann_datas: Iterable[Iterable[Dict[str, Any]]], +): + + ann_keys = ["iscrowd", "category_id"] + dataset_dicts = [] + contains_video_frame_info = False + + for img_dict, ann_dicts in zip(img_datas, ann_datas): + record = {} + record["file_name"] = os.path.join(image_root, img_dict["file_name"]) + record["height"] = img_dict["height"] + record["width"] = img_dict["width"] + record["image_id"] = img_dict["id"] + record["dataset"] = dataset_name + if "frame_id" in img_dict: + record["frame_id"] = img_dict["frame_id"] + record["video_id"] = img_dict.get("vid_id", None) + contains_video_frame_info = True + objs = [] + for ann_dict in ann_dicts: + assert ann_dict["image_id"] == record["image_id"] + assert ann_dict.get("ignore", 0) == 0 + obj = {key: ann_dict[key] for key in ann_keys if key in ann_dict} + _maybe_add_bbox(obj, ann_dict) + _maybe_add_segm(obj, ann_dict) + _maybe_add_keypoints(obj, ann_dict) + _maybe_add_densepose(obj, ann_dict) + objs.append(obj) + record["annotations"] = objs + dataset_dicts.append(record) + if contains_video_frame_info: + create_video_frame_mapping(dataset_name, dataset_dicts) + return dataset_dicts + + +def get_contiguous_id_to_category_id_map(metadata): + cat_id_2_cont_id = metadata.thing_dataset_id_to_contiguous_id + cont_id_2_cat_id = {} + for cat_id, cont_id in cat_id_2_cont_id.items(): + if cont_id in cont_id_2_cat_id: + continue + cont_id_2_cat_id[cont_id] = cat_id + return cont_id_2_cat_id + + +def maybe_filter_categories_cocoapi(dataset_name, coco_api): + meta = MetadataCatalog.get(dataset_name) + cont_id_2_cat_id = get_contiguous_id_to_category_id_map(meta) + cat_id_2_cont_id = meta.thing_dataset_id_to_contiguous_id + # filter categories + cats = [] + for cat in coco_api.dataset["categories"]: + cat_id = cat["id"] + if cat_id not in cat_id_2_cont_id: + continue + cont_id = cat_id_2_cont_id[cat_id] + if (cont_id in cont_id_2_cat_id) and (cont_id_2_cat_id[cont_id] == cat_id): + cats.append(cat) + coco_api.dataset["categories"] = cats + # filter annotations, if multiple categories are mapped to a single + # contiguous ID, use only one category ID and map all annotations to that category ID + anns = [] + for ann in coco_api.dataset["annotations"]: + cat_id = ann["category_id"] + if cat_id not in cat_id_2_cont_id: + continue + cont_id = cat_id_2_cont_id[cat_id] + ann["category_id"] = cont_id_2_cat_id[cont_id] + anns.append(ann) + coco_api.dataset["annotations"] = anns + # recreate index + coco_api.createIndex() + + +def maybe_filter_and_map_categories_cocoapi(dataset_name, coco_api): + meta = MetadataCatalog.get(dataset_name) + category_id_map = meta.thing_dataset_id_to_contiguous_id + # map categories + cats = [] + for cat in coco_api.dataset["categories"]: + cat_id = cat["id"] + if cat_id not in category_id_map: + continue + cat["id"] = category_id_map[cat_id] + cats.append(cat) + coco_api.dataset["categories"] = cats + # map annotation categories + anns = [] + for ann in coco_api.dataset["annotations"]: + cat_id = ann["category_id"] + if cat_id not in category_id_map: + continue + ann["category_id"] = category_id_map[cat_id] + anns.append(ann) + coco_api.dataset["annotations"] = anns + # recreate index + coco_api.createIndex() + + +def create_video_frame_mapping(dataset_name, dataset_dicts): + mapping = defaultdict(dict) + for d in dataset_dicts: + video_id = d.get("video_id") + if video_id is None: + continue + mapping[video_id].update({d["frame_id"]: d["file_name"]}) + MetadataCatalog.get(dataset_name).set(video_frame_mapping=mapping) + + +def load_coco_json(annotations_json_file: str, image_root: str, dataset_name: str): + """ + Loads a JSON file with annotations in COCO instances format. + Replaces `detectron2.data.datasets.coco.load_coco_json` to handle metadata + in a more flexible way. Postpones category mapping to a later stage to be + able to combine several datasets with different (but coherent) sets of + categories. + + Args: + + annotations_json_file: str + Path to the JSON file with annotations in COCO instances format. + image_root: str + directory that contains all the images + dataset_name: str + the name that identifies a dataset, e.g. "densepose_coco_2014_train" + extra_annotation_keys: Optional[List[str]] + If provided, these keys are used to extract additional data from + the annotations. + """ + coco_api = _load_coco_annotations(PathManager.get_local_path(annotations_json_file)) + _add_categories_metadata(dataset_name, coco_api.loadCats(coco_api.getCatIds())) + # sort indices for reproducible results + img_ids = sorted(coco_api.imgs.keys()) + # imgs is a list of dicts, each looks something like: + # {'license': 4, + # 'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg', + # 'file_name': 'COCO_val2014_000000001268.jpg', + # 'height': 427, + # 'width': 640, + # 'date_captured': '2013-11-17 05:57:24', + # 'id': 1268} + imgs = coco_api.loadImgs(img_ids) + logger = logging.getLogger(__name__) + logger.info("Loaded {} images in COCO format from {}".format(len(imgs), annotations_json_file)) + # anns is a list[list[dict]], where each dict is an annotation + # record for an object. The inner list enumerates the objects in an image + # and the outer list enumerates over images. + anns = [coco_api.imgToAnns[img_id] for img_id in img_ids] + _verify_annotations_have_unique_ids(annotations_json_file, anns) + dataset_records = _combine_images_with_annotations(dataset_name, image_root, imgs, anns) + return dataset_records + + +def register_dataset(dataset_data: CocoDatasetInfo, datasets_root: Optional[str] = None): + """ + Registers provided COCO DensePose dataset + + Args: + dataset_data: CocoDatasetInfo + Dataset data + datasets_root: Optional[str] + Datasets root folder (default: None) + """ + annotations_fpath = maybe_prepend_base_path(datasets_root, dataset_data.annotations_fpath) + images_root = maybe_prepend_base_path(datasets_root, dataset_data.images_root) + + def load_annotations(): + return load_coco_json( + annotations_json_file=annotations_fpath, + image_root=images_root, + dataset_name=dataset_data.name, + ) + + DatasetCatalog.register(dataset_data.name, load_annotations) + MetadataCatalog.get(dataset_data.name).set( + json_file=annotations_fpath, + image_root=images_root, + **get_metadata(DENSEPOSE_METADATA_URL_PREFIX) + ) + + +def register_datasets( + datasets_data: Iterable[CocoDatasetInfo], datasets_root: Optional[str] = None +): + """ + Registers provided COCO DensePose datasets + + Args: + datasets_data: Iterable[CocoDatasetInfo] + An iterable of dataset datas + datasets_root: Optional[str] + Datasets root folder (default: None) + """ + for dataset_data in datasets_data: + register_dataset(dataset_data, datasets_root) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/dataset_type.py b/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/dataset_type.py new file mode 100644 index 0000000..ed8f8f2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/dataset_type.py @@ -0,0 +1,11 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from enum import Enum + + +class DatasetType(Enum): + """ + Dataset type, mostly used for datasets that contain data to bootstrap models on + """ + + VIDEO_LIST = "video_list" diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/lvis.py b/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/lvis.py new file mode 100644 index 0000000..b4af9fa --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/datasets/lvis.py @@ -0,0 +1,257 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +import os +from typing import Any, Dict, Iterable, List, Optional +from fvcore.common.timer import Timer + +from detectron2.data import DatasetCatalog, MetadataCatalog +from detectron2.data.datasets.lvis import get_lvis_instances_meta +from detectron2.structures import BoxMode +from detectron2.utils.file_io import PathManager + +from ..utils import maybe_prepend_base_path +from .coco import ( + DENSEPOSE_ALL_POSSIBLE_KEYS, + DENSEPOSE_METADATA_URL_PREFIX, + CocoDatasetInfo, + get_metadata, +) + +DATASETS = [ + CocoDatasetInfo( + name="densepose_lvis_v1_ds1_train_v1", + images_root="coco_", + annotations_fpath="lvis/densepose_lvis_v1_ds1_train_v1.json", + ), + CocoDatasetInfo( + name="densepose_lvis_v1_ds1_val_v1", + images_root="coco_", + annotations_fpath="lvis/densepose_lvis_v1_ds1_val_v1.json", + ), + CocoDatasetInfo( + name="densepose_lvis_v1_ds2_train_v1", + images_root="coco_", + annotations_fpath="lvis/densepose_lvis_v1_ds2_train_v1.json", + ), + CocoDatasetInfo( + name="densepose_lvis_v1_ds2_val_v1", + images_root="coco_", + annotations_fpath="lvis/densepose_lvis_v1_ds2_val_v1.json", + ), + CocoDatasetInfo( + name="densepose_lvis_v1_ds1_val_animals_100", + images_root="coco_", + annotations_fpath="lvis/densepose_lvis_v1_val_animals_100_v2.json", + ), +] + + +def _load_lvis_annotations(json_file: str): + """ + Load COCO annotations from a JSON file + + Args: + json_file: str + Path to the file to load annotations from + Returns: + Instance of `pycocotools.coco.COCO` that provides access to annotations + data + """ + from lvis import LVIS + + json_file = PathManager.get_local_path(json_file) + logger = logging.getLogger(__name__) + timer = Timer() + lvis_api = LVIS(json_file) + if timer.seconds() > 1: + logger.info("Loading {} takes {:.2f} seconds.".format(json_file, timer.seconds())) + return lvis_api + + +def _add_categories_metadata(dataset_name: str) -> None: + metadict = get_lvis_instances_meta(dataset_name) + categories = metadict["thing_classes"] + metadata = MetadataCatalog.get(dataset_name) + metadata.categories = {i + 1: categories[i] for i in range(len(categories))} + logger = logging.getLogger(__name__) + logger.info(f"Dataset {dataset_name} has {len(categories)} categories") + + +def _verify_annotations_have_unique_ids(json_file: str, anns: List[List[Dict[str, Any]]]) -> None: + ann_ids = [ann["id"] for anns_per_image in anns for ann in anns_per_image] + assert len(set(ann_ids)) == len(ann_ids), "Annotation ids in '{}' are not unique!".format( + json_file + ) + + +def _maybe_add_bbox(obj: Dict[str, Any], ann_dict: Dict[str, Any]) -> None: + if "bbox" not in ann_dict: + return + obj["bbox"] = ann_dict["bbox"] + obj["bbox_mode"] = BoxMode.XYWH_ABS + + +def _maybe_add_segm(obj: Dict[str, Any], ann_dict: Dict[str, Any]) -> None: + if "segmentation" not in ann_dict: + return + segm = ann_dict["segmentation"] + if not isinstance(segm, dict): + # filter out invalid polygons (< 3 points) + segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6] + if len(segm) == 0: + return + obj["segmentation"] = segm + + +def _maybe_add_keypoints(obj: Dict[str, Any], ann_dict: Dict[str, Any]) -> None: + if "keypoints" not in ann_dict: + return + keypts = ann_dict["keypoints"] # list[int] + for idx, v in enumerate(keypts): + if idx % 3 != 2: + # COCO's segmentation coordinates are floating points in [0, H or W], + # but keypoint coordinates are integers in [0, H-1 or W-1] + # Therefore we assume the coordinates are "pixel indices" and + # add 0.5 to convert to floating point coordinates. + keypts[idx] = v + 0.5 + obj["keypoints"] = keypts + + +def _maybe_add_densepose(obj: Dict[str, Any], ann_dict: Dict[str, Any]) -> None: + for key in DENSEPOSE_ALL_POSSIBLE_KEYS: + if key in ann_dict: + obj[key] = ann_dict[key] + + +def _combine_images_with_annotations( + dataset_name: str, + image_root: str, + img_datas: Iterable[Dict[str, Any]], + ann_datas: Iterable[Iterable[Dict[str, Any]]], +): + + dataset_dicts = [] + + def get_file_name(img_root, img_dict): + # Determine the path including the split folder ("train2017", "val2017", "test2017") from + # the coco_url field. Example: + # 'coco_url': 'http://images.cocodataset.org/train2017/000000155379.jpg' + split_folder, file_name = img_dict["coco_url"].split("/")[-2:] + return os.path.join(img_root + split_folder, file_name) + + for img_dict, ann_dicts in zip(img_datas, ann_datas): + record = {} + record["file_name"] = get_file_name(image_root, img_dict) + record["height"] = img_dict["height"] + record["width"] = img_dict["width"] + record["not_exhaustive_category_ids"] = img_dict.get("not_exhaustive_category_ids", []) + record["neg_category_ids"] = img_dict.get("neg_category_ids", []) + record["image_id"] = img_dict["id"] + record["dataset"] = dataset_name + + objs = [] + for ann_dict in ann_dicts: + assert ann_dict["image_id"] == record["image_id"] + obj = {} + _maybe_add_bbox(obj, ann_dict) + obj["iscrowd"] = ann_dict.get("iscrowd", 0) + obj["category_id"] = ann_dict["category_id"] + _maybe_add_segm(obj, ann_dict) + _maybe_add_keypoints(obj, ann_dict) + _maybe_add_densepose(obj, ann_dict) + objs.append(obj) + record["annotations"] = objs + dataset_dicts.append(record) + return dataset_dicts + + +def load_lvis_json(annotations_json_file: str, image_root: str, dataset_name: str): + """ + Loads a JSON file with annotations in LVIS instances format. + Replaces `detectron2.data.datasets.coco.load_lvis_json` to handle metadata + in a more flexible way. Postpones category mapping to a later stage to be + able to combine several datasets with different (but coherent) sets of + categories. + + Args: + + annotations_json_file: str + Path to the JSON file with annotations in COCO instances format. + image_root: str + directory that contains all the images + dataset_name: str + the name that identifies a dataset, e.g. "densepose_coco_2014_train" + extra_annotation_keys: Optional[List[str]] + If provided, these keys are used to extract additional data from + the annotations. + """ + lvis_api = _load_lvis_annotations(PathManager.get_local_path(annotations_json_file)) + + _add_categories_metadata(dataset_name) + + # sort indices for reproducible results + img_ids = sorted(lvis_api.imgs.keys()) + # imgs is a list of dicts, each looks something like: + # {'license': 4, + # 'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg', + # 'file_name': 'COCO_val2014_000000001268.jpg', + # 'height': 427, + # 'width': 640, + # 'date_captured': '2013-11-17 05:57:24', + # 'id': 1268} + imgs = lvis_api.load_imgs(img_ids) + logger = logging.getLogger(__name__) + logger.info("Loaded {} images in LVIS format from {}".format(len(imgs), annotations_json_file)) + # anns is a list[list[dict]], where each dict is an annotation + # record for an object. The inner list enumerates the objects in an image + # and the outer list enumerates over images. + anns = [lvis_api.img_ann_map[img_id] for img_id in img_ids] + + _verify_annotations_have_unique_ids(annotations_json_file, anns) + dataset_records = _combine_images_with_annotations(dataset_name, image_root, imgs, anns) + return dataset_records + + +def register_dataset(dataset_data: CocoDatasetInfo, datasets_root: Optional[str] = None) -> None: + """ + Registers provided LVIS DensePose dataset + + Args: + dataset_data: CocoDatasetInfo + Dataset data + datasets_root: Optional[str] + Datasets root folder (default: None) + """ + annotations_fpath = maybe_prepend_base_path(datasets_root, dataset_data.annotations_fpath) + images_root = maybe_prepend_base_path(datasets_root, dataset_data.images_root) + + def load_annotations(): + return load_lvis_json( + annotations_json_file=annotations_fpath, + image_root=images_root, + dataset_name=dataset_data.name, + ) + + DatasetCatalog.register(dataset_data.name, load_annotations) + MetadataCatalog.get(dataset_data.name).set( + json_file=annotations_fpath, + image_root=images_root, + evaluator_type="lvis", + **get_metadata(DENSEPOSE_METADATA_URL_PREFIX), + ) + + +def register_datasets( + datasets_data: Iterable[CocoDatasetInfo], datasets_root: Optional[str] = None +) -> None: + """ + Registers provided LVIS DensePose datasets + + Args: + datasets_data: Iterable[CocoDatasetInfo] + An iterable of dataset datas + datasets_root: Optional[str] + Datasets root folder (default: None) + """ + for dataset_data in datasets_data: + register_dataset(dataset_data, datasets_root) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/image_list_dataset.py b/motion-gan-pipeline/preprocessing/third/densepose/data/image_list_dataset.py new file mode 100644 index 0000000..92a95d3 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/image_list_dataset.py @@ -0,0 +1,72 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +import logging +import numpy as np +from typing import Any, Callable, Dict, List, Optional, Union +import torch +from torch.utils.data.dataset import Dataset + +from detectron2.data.detection_utils import read_image + +ImageTransform = Callable[[torch.Tensor], torch.Tensor] + + +class ImageListDataset(Dataset): + """ + Dataset that provides images from a list. + """ + + _EMPTY_IMAGE = torch.empty((0, 3, 1, 1)) + + def __init__( + self, + image_list: List[str], + category_list: Union[str, List[str], None] = None, + transform: Optional[ImageTransform] = None, + ): + """ + Args: + image_list (List[str]): list of paths to image files + category_list (Union[str, List[str], None]): list of animal categories for + each image. If it is a string, or None, this applies to all images + """ + if type(category_list) == list: + self.category_list = category_list + else: + self.category_list = [category_list] * len(image_list) + assert len(image_list) == len( + self.category_list + ), "length of image and category lists must be equal" + self.image_list = image_list + self.transform = transform + + def __getitem__(self, idx: int) -> Dict[str, Any]: + """ + Gets selected images from the list + + Args: + idx (int): video index in the video list file + Returns: + A dictionary containing two keys: + images (torch.Tensor): tensor of size [N, 3, H, W] (N = 1, or 0 for _EMPTY_IMAGE) + categories (List[str]): categories of the frames + """ + categories = [self.category_list[idx]] + fpath = self.image_list[idx] + transform = self.transform + + try: + image = torch.from_numpy(np.ascontiguousarray(read_image(fpath, format="BGR"))) + image = image.permute(2, 0, 1).unsqueeze(0).float() # HWC -> NCHW + if transform is not None: + image = transform(image) + return {"images": image, "categories": categories} + except (OSError, RuntimeError) as e: + logger = logging.getLogger(__name__) + logger.warning(f"Error opening image file container {fpath}: {e}") + + return {"images": self._EMPTY_IMAGE, "categories": []} + + def __len__(self): + return len(self.image_list) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/inference_based_loader.py b/motion-gan-pipeline/preprocessing/third/densepose/data/inference_based_loader.py new file mode 100644 index 0000000..cb89544 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/inference_based_loader.py @@ -0,0 +1,172 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import random +from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Tuple +import torch +from torch import nn + +SampledData = Any +ModelOutput = Any + + +def _grouper(iterable: Iterable[Any], n: int, fillvalue=None) -> Iterator[Tuple[Any]]: + """ + Group elements of an iterable by chunks of size `n`, e.g. + grouper(range(9), 4) -> + (0, 1, 2, 3), (4, 5, 6, 7), (8, None, None, None) + """ + it = iter(iterable) + while True: + values = [] + for _ in range(n): + try: + value = next(it) + except StopIteration: + if values: + values.extend([fillvalue] * (n - len(values))) + yield tuple(values) + return + values.append(value) + yield tuple(values) + + +class ScoreBasedFilter: + """ + Filters entries in model output based on their scores + Discards all entries with score less than the specified minimum + """ + + def __init__(self, min_score: float = 0.8): + self.min_score = min_score + + def __call__(self, model_output: ModelOutput) -> ModelOutput: + for model_output_i in model_output: + instances = model_output_i["instances"] + if not instances.has("scores"): + continue + instances_filtered = instances[instances.scores >= self.min_score] + model_output_i["instances"] = instances_filtered + return model_output + + +class InferenceBasedLoader: + """ + Data loader based on results inferred by a model. Consists of: + - a data loader that provides batches of images + - a model that is used to infer the results + - a data sampler that converts inferred results to annotations + """ + + def __init__( + self, + model: nn.Module, + data_loader: Iterable[List[Dict[str, Any]]], + data_sampler: Optional[Callable[[ModelOutput], List[SampledData]]] = None, + data_filter: Optional[Callable[[ModelOutput], ModelOutput]] = None, + shuffle: bool = True, + batch_size: int = 4, + inference_batch_size: int = 4, + drop_last: bool = False, + category_to_class_mapping: Optional[dict] = None, + ): + """ + Constructor + + Args: + model (torch.nn.Module): model used to produce data + data_loader (Iterable[List[Dict[str, Any]]]): iterable that provides + dictionaries with "images" and "categories" fields to perform inference on + data_sampler (Callable: ModelOutput -> SampledData): functor + that produces annotation data from inference results; + (optional, default: None) + data_filter (Callable: ModelOutput -> ModelOutput): filter + that selects model outputs for further processing + (optional, default: None) + shuffle (bool): if True, the input images get shuffled + batch_size (int): batch size for the produced annotation data + inference_batch_size (int): batch size for input images + drop_last (bool): if True, drop the last batch if it is undersized + category_to_class_mapping (dict): category to class mapping + """ + self.model = model + self.model.eval() + self.data_loader = data_loader + self.data_sampler = data_sampler + self.data_filter = data_filter + self.shuffle = shuffle + self.batch_size = batch_size + self.inference_batch_size = inference_batch_size + self.drop_last = drop_last + if category_to_class_mapping is not None: + self.category_to_class_mapping = category_to_class_mapping + else: + self.category_to_class_mapping = {} + + def __iter__(self) -> Iterator[List[SampledData]]: + for batch in self.data_loader: + # batch : List[Dict[str: Tensor[N, C, H, W], str: Optional[str]]] + # images_batch : Tensor[N, C, H, W] + # image : Tensor[C, H, W] + images_and_categories = [ + {"image": image, "category": category} + for element in batch + for image, category in zip(element["images"], element["categories"]) + ] + if not images_and_categories: + continue + if self.shuffle: + random.shuffle(images_and_categories) + yield from self._produce_data(images_and_categories) # pyre-ignore[6] + + def _produce_data( + self, images_and_categories: List[Tuple[torch.Tensor, Optional[str]]] + ) -> Iterator[List[SampledData]]: + """ + Produce batches of data from images + + Args: + images_and_categories (List[Tuple[torch.Tensor, Optional[str]]]): + list of images and corresponding categories to process + + Returns: + Iterator over batches of data sampled from model outputs + """ + data_batches: List[SampledData] = [] + category_to_class_mapping = self.category_to_class_mapping + batched_images_and_categories = _grouper(images_and_categories, self.inference_batch_size) + for batch in batched_images_and_categories: + batch = [ + { + "image": image_and_category["image"].to(self.model.device), + "category": image_and_category["category"], + } + for image_and_category in batch + if image_and_category is not None + ] + if not batch: + continue + with torch.no_grad(): + model_output = self.model(batch) + for model_output_i, batch_i in zip(model_output, batch): + assert len(batch_i["image"].shape) == 3 + model_output_i["image"] = batch_i["image"] + instance_class = category_to_class_mapping.get(batch_i["category"], 0) + model_output_i["instances"].dataset_classes = torch.tensor( + [instance_class] * len(model_output_i["instances"]) + ) + model_output_filtered = ( + model_output if self.data_filter is None else self.data_filter(model_output) + ) + data = ( + model_output_filtered + if self.data_sampler is None + else self.data_sampler(model_output_filtered) + ) + for data_i in data: + if len(data_i["instances"]): + data_batches.append(data_i) + if len(data_batches) >= self.batch_size: + yield data_batches[: self.batch_size] + data_batches = data_batches[self.batch_size :] + if not self.drop_last and data_batches: + yield data_batches diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/meshes/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/data/meshes/__init__.py new file mode 100644 index 0000000..1e1f0d5 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/meshes/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from . import builtin + +__all__ = [k for k in globals().keys() if "builtin" not in k and not k.startswith("_")] diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/meshes/builtin.py b/motion-gan-pipeline/preprocessing/third/densepose/data/meshes/builtin.py new file mode 100644 index 0000000..c0b2376 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/meshes/builtin.py @@ -0,0 +1,101 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from .catalog import MeshInfo, register_meshes + +DENSEPOSE_MESHES_DIR = "https://dl.fbaipublicfiles.com/densepose/meshes/" + +MESHES = [ + MeshInfo( + name="smpl_27554", + data="smpl_27554.pkl", + geodists="geodists/geodists_smpl_27554.pkl", + symmetry="symmetry/symmetry_smpl_27554.pkl", + texcoords="texcoords/texcoords_smpl_27554.pkl", + ), + MeshInfo( + name="chimp_5029", + data="chimp_5029.pkl", + geodists="geodists/geodists_chimp_5029.pkl", + symmetry="symmetry/symmetry_chimp_5029.pkl", + texcoords="texcoords/texcoords_chimp_5029.pkl", + ), + MeshInfo( + name="cat_5001", + data="cat_5001.pkl", + geodists="geodists/geodists_cat_5001.pkl", + symmetry="symmetry/symmetry_cat_5001.pkl", + texcoords="texcoords/texcoords_cat_5001.pkl", + ), + MeshInfo( + name="cat_7466", + data="cat_7466.pkl", + geodists="geodists/geodists_cat_7466.pkl", + symmetry="symmetry/symmetry_cat_7466.pkl", + texcoords="texcoords/texcoords_cat_7466.pkl", + ), + MeshInfo( + name="sheep_5004", + data="sheep_5004.pkl", + geodists="geodists/geodists_sheep_5004.pkl", + symmetry="symmetry/symmetry_sheep_5004.pkl", + texcoords="texcoords/texcoords_sheep_5004.pkl", + ), + MeshInfo( + name="zebra_5002", + data="zebra_5002.pkl", + geodists="geodists/geodists_zebra_5002.pkl", + symmetry="symmetry/symmetry_zebra_5002.pkl", + texcoords="texcoords/texcoords_zebra_5002.pkl", + ), + MeshInfo( + name="horse_5004", + data="horse_5004.pkl", + geodists="geodists/geodists_horse_5004.pkl", + symmetry="symmetry/symmetry_horse_5004.pkl", + texcoords="texcoords/texcoords_zebra_5002.pkl", + ), + MeshInfo( + name="giraffe_5002", + data="giraffe_5002.pkl", + geodists="geodists/geodists_giraffe_5002.pkl", + symmetry="symmetry/symmetry_giraffe_5002.pkl", + texcoords="texcoords/texcoords_giraffe_5002.pkl", + ), + MeshInfo( + name="elephant_5002", + data="elephant_5002.pkl", + geodists="geodists/geodists_elephant_5002.pkl", + symmetry="symmetry/symmetry_elephant_5002.pkl", + texcoords="texcoords/texcoords_elephant_5002.pkl", + ), + MeshInfo( + name="dog_5002", + data="dog_5002.pkl", + geodists="geodists/geodists_dog_5002.pkl", + symmetry="symmetry/symmetry_dog_5002.pkl", + texcoords="texcoords/texcoords_dog_5002.pkl", + ), + MeshInfo( + name="dog_7466", + data="dog_7466.pkl", + geodists="geodists/geodists_dog_7466.pkl", + symmetry="symmetry/symmetry_dog_7466.pkl", + texcoords="texcoords/texcoords_dog_7466.pkl", + ), + MeshInfo( + name="cow_5002", + data="cow_5002.pkl", + geodists="geodists/geodists_cow_5002.pkl", + symmetry="symmetry/symmetry_cow_5002.pkl", + texcoords="texcoords/texcoords_cow_5002.pkl", + ), + MeshInfo( + name="bear_4936", + data="bear_4936.pkl", + geodists="geodists/geodists_bear_4936.pkl", + symmetry="symmetry/symmetry_bear_4936.pkl", + texcoords="texcoords/texcoords_bear_4936.pkl", + ), +] + +register_meshes(MESHES, DENSEPOSE_MESHES_DIR) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/meshes/catalog.py b/motion-gan-pipeline/preprocessing/third/densepose/data/meshes/catalog.py new file mode 100644 index 0000000..b258f3c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/meshes/catalog.py @@ -0,0 +1,71 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import logging +from collections import UserDict +from dataclasses import dataclass +from typing import Iterable, Optional + +from ..utils import maybe_prepend_base_path + + +@dataclass +class MeshInfo: + name: str + data: str + geodists: Optional[str] = None + symmetry: Optional[str] = None + texcoords: Optional[str] = None + + +class _MeshCatalog(UserDict): + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + self.mesh_ids = {} + self.mesh_names = {} + self.max_mesh_id = -1 + + def __setitem__(self, key, value): + if key in self: + logger = logging.getLogger(__name__) + logger.warning( + f"Overwriting mesh catalog entry '{key}': old value {self[key]}" + f", new value {value}" + ) + mesh_id = self.mesh_ids[key] + else: + self.max_mesh_id += 1 + mesh_id = self.max_mesh_id + super().__setitem__(key, value) + self.mesh_ids[key] = mesh_id + self.mesh_names[mesh_id] = key + + def get_mesh_id(self, shape_name: str) -> int: + return self.mesh_ids[shape_name] + + def get_mesh_name(self, mesh_id: int) -> str: + return self.mesh_names[mesh_id] + + +MeshCatalog = _MeshCatalog() + + +def register_mesh(mesh_info: MeshInfo, base_path: Optional[str]) -> None: + geodists, symmetry, texcoords = mesh_info.geodists, mesh_info.symmetry, mesh_info.texcoords + if geodists: + geodists = maybe_prepend_base_path(base_path, geodists) + if symmetry: + symmetry = maybe_prepend_base_path(base_path, symmetry) + if texcoords: + texcoords = maybe_prepend_base_path(base_path, texcoords) + MeshCatalog[mesh_info.name] = MeshInfo( + name=mesh_info.name, + data=maybe_prepend_base_path(base_path, mesh_info.data), + geodists=geodists, + symmetry=symmetry, + texcoords=texcoords, + ) + + +def register_meshes(mesh_infos: Iterable[MeshInfo], base_path: Optional[str]) -> None: + for mesh_info in mesh_infos: + register_mesh(mesh_info, base_path) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/__init__.py new file mode 100644 index 0000000..7dba87e --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/__init__.py @@ -0,0 +1,8 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .densepose_uniform import DensePoseUniformSampler +from .densepose_confidence_based import DensePoseConfidenceBasedSampler +from .densepose_cse_uniform import DensePoseCSEUniformSampler +from .densepose_cse_confidence_based import DensePoseCSEConfidenceBasedSampler +from .mask_from_densepose import MaskFromDensePoseSampler +from .prediction_to_gt import PredictionToGroundTruthSampler diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_base.py b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_base.py new file mode 100644 index 0000000..eaf4780 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_base.py @@ -0,0 +1,205 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Any, Dict, List, Tuple +import torch +from torch.nn import functional as F + +from detectron2.structures import BoxMode, Instances + +from densepose.converters import ToChartResultConverter +from densepose.converters.base import IntTupleBox, make_int_box +from densepose.structures import DensePoseDataRelative, DensePoseList + + +class DensePoseBaseSampler: + """ + Base DensePose sampler to produce DensePose data from DensePose predictions. + Samples for each class are drawn according to some distribution over all pixels estimated + to belong to that class. + """ + + def __init__(self, count_per_class: int = 8): + """ + Constructor + + Args: + count_per_class (int): the sampler produces at most `count_per_class` + samples for each category + """ + self.count_per_class = count_per_class + + def __call__(self, instances: Instances) -> DensePoseList: + """ + Convert DensePose predictions (an instance of `DensePoseChartPredictorOutput`) + into DensePose annotations data (an instance of `DensePoseList`) + """ + boxes_xyxy_abs = instances.pred_boxes.tensor.clone().cpu() + boxes_xywh_abs = BoxMode.convert(boxes_xyxy_abs, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS) + dp_datas = [] + for i in range(len(boxes_xywh_abs)): + annotation_i = self._sample(instances[i], make_int_box(boxes_xywh_abs[i])) + annotation_i[DensePoseDataRelative.S_KEY] = self._resample_mask( # pyre-ignore[6] + instances[i].pred_densepose + ) + dp_datas.append(DensePoseDataRelative(annotation_i)) + # create densepose annotations on CPU + dp_list = DensePoseList(dp_datas, boxes_xyxy_abs, instances.image_size) + return dp_list + + def _sample(self, instance: Instances, bbox_xywh: IntTupleBox) -> Dict[str, List[Any]]: + """ + Sample DensPoseDataRelative from estimation results + """ + labels, dp_result = self._produce_labels_and_results(instance) + annotation = { + DensePoseDataRelative.X_KEY: [], + DensePoseDataRelative.Y_KEY: [], + DensePoseDataRelative.U_KEY: [], + DensePoseDataRelative.V_KEY: [], + DensePoseDataRelative.I_KEY: [], + } + n, h, w = dp_result.shape + for part_id in range(1, DensePoseDataRelative.N_PART_LABELS + 1): + # indices - tuple of 3 1D tensors of size k + # 0: index along the first dimension N + # 1: index along H dimension + # 2: index along W dimension + indices = torch.nonzero(labels.expand(n, h, w) == part_id, as_tuple=True) + # values - an array of size [n, k] + # n: number of channels (U, V, confidences) + # k: number of points labeled with part_id + values = dp_result[indices].view(n, -1) + k = values.shape[1] + count = min(self.count_per_class, k) + if count <= 0: + continue + index_sample = self._produce_index_sample(values, count) + sampled_values = values[:, index_sample] + sampled_y = indices[1][index_sample] + 0.5 + sampled_x = indices[2][index_sample] + 0.5 + # prepare / normalize data + x = (sampled_x / w * 256.0).cpu().tolist() + y = (sampled_y / h * 256.0).cpu().tolist() + u = sampled_values[0].clamp(0, 1).cpu().tolist() + v = sampled_values[1].clamp(0, 1).cpu().tolist() + fine_segm_labels = [part_id] * count + # extend annotations + annotation[DensePoseDataRelative.X_KEY].extend(x) + annotation[DensePoseDataRelative.Y_KEY].extend(y) + annotation[DensePoseDataRelative.U_KEY].extend(u) + annotation[DensePoseDataRelative.V_KEY].extend(v) + annotation[DensePoseDataRelative.I_KEY].extend(fine_segm_labels) + return annotation + + def _produce_index_sample(self, values: torch.Tensor, count: int): + """ + Abstract method to produce a sample of indices to select data + To be implemented in descendants + + Args: + values (torch.Tensor): an array of size [n, k] that contains + estimated values (U, V, confidences); + n: number of channels (U, V, confidences) + k: number of points labeled with part_id + count (int): number of samples to produce, should be positive and <= k + + Return: + list(int): indices of values (along axis 1) selected as a sample + """ + raise NotImplementedError + + def _produce_labels_and_results(self, instance: Instances) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Method to get labels and DensePose results from an instance + + Args: + instance (Instances): an instance of `DensePoseChartPredictorOutput` + + Return: + labels (torch.Tensor): shape [H, W], DensePose segmentation labels + dp_result (torch.Tensor): shape [2, H, W], stacked DensePose results u and v + """ + converter = ToChartResultConverter + chart_result = converter.convert(instance.pred_densepose, instance.pred_boxes) + labels, dp_result = chart_result.labels.cpu(), chart_result.uv.cpu() + return labels, dp_result + + def _resample_mask(self, output: Any) -> torch.Tensor: + """ + Convert DensePose predictor output to segmentation annotation - tensors of size + (256, 256) and type `int64`. + + Args: + output: DensePose predictor output with the following attributes: + - coarse_segm: tensor of size [N, D, H, W] with unnormalized coarse + segmentation scores + - fine_segm: tensor of size [N, C, H, W] with unnormalized fine + segmentation scores + Return: + Tensor of size (S, S) and type `int64` with coarse segmentation annotations, + where S = DensePoseDataRelative.MASK_SIZE + """ + sz = DensePoseDataRelative.MASK_SIZE + S = ( + # pyre-fixme[16]: `Tensor` has no attribute `argmax`. + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got + # `Tuple[int, int]`. + F.interpolate(output.coarse_segm, (sz, sz), mode="bilinear", align_corners=False) + .argmax(dim=1) + .long() + ) + I = ( + ( + F.interpolate( + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got + # `Tuple[int, int]`. + output.fine_segm, (sz, sz), mode="bilinear", align_corners=False + ).argmax(dim=1) + * (S > 0).long() + ) + .squeeze() + .cpu() + ) + # Map fine segmentation results to coarse segmentation ground truth + # TODO: extract this into separate classes + # coarse segmentation: 1 = Torso, 2 = Right Hand, 3 = Left Hand, + # 4 = Left Foot, 5 = Right Foot, 6 = Upper Leg Right, 7 = Upper Leg Left, + # 8 = Lower Leg Right, 9 = Lower Leg Left, 10 = Upper Arm Left, + # 11 = Upper Arm Right, 12 = Lower Arm Left, 13 = Lower Arm Right, + # 14 = Head + # fine segmentation: 1, 2 = Torso, 3 = Right Hand, 4 = Left Hand, + # 5 = Left Foot, 6 = Right Foot, 7, 9 = Upper Leg Right, + # 8, 10 = Upper Leg Left, 11, 13 = Lower Leg Right, + # 12, 14 = Lower Leg Left, 15, 17 = Upper Arm Left, + # 16, 18 = Upper Arm Right, 19, 21 = Lower Arm Left, + # 20, 22 = Lower Arm Right, 23, 24 = Head + FINE_TO_COARSE_SEGMENTATION = { + 1: 1, + 2: 1, + 3: 2, + 4: 3, + 5: 4, + 6: 5, + 7: 6, + 8: 7, + 9: 6, + 10: 7, + 11: 8, + 12: 9, + 13: 8, + 14: 9, + 15: 10, + 16: 11, + 17: 10, + 18: 11, + 19: 12, + 20: 13, + 21: 12, + 22: 13, + 23: 14, + 24: 14, + } + mask = torch.zeros((sz, sz), dtype=torch.int64, device=torch.device("cpu")) + for i in range(DensePoseDataRelative.N_PART_LABELS): + mask[I == i + 1] = FINE_TO_COARSE_SEGMENTATION[i + 1] + return mask diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_confidence_based.py b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_confidence_based.py new file mode 100644 index 0000000..48e325b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_confidence_based.py @@ -0,0 +1,108 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import random +from typing import Optional, Tuple +import torch + +from densepose.converters import ToChartResultConverterWithConfidences + +from .densepose_base import DensePoseBaseSampler + + +class DensePoseConfidenceBasedSampler(DensePoseBaseSampler): + """ + Samples DensePose data from DensePose predictions. + Samples for each class are drawn using confidence value estimates. + """ + + def __init__( + self, + confidence_channel: str, + count_per_class: int = 8, + search_count_multiplier: Optional[float] = None, + search_proportion: Optional[float] = None, + ): + """ + Constructor + + Args: + confidence_channel (str): confidence channel to use for sampling; + possible values: + "sigma_2": confidences for UV values + "fine_segm_confidence": confidences for fine segmentation + "coarse_segm_confidence": confidences for coarse segmentation + (default: "sigma_2") + count_per_class (int): the sampler produces at most `count_per_class` + samples for each category (default: 8) + search_count_multiplier (float or None): if not None, the total number + of the most confident estimates of a given class to consider is + defined as `min(search_count_multiplier * count_per_class, N)`, + where `N` is the total number of estimates of the class; cannot be + specified together with `search_proportion` (default: None) + search_proportion (float or None): if not None, the total number of the + of the most confident estimates of a given class to consider is + defined as `min(max(search_proportion * N, count_per_class), N)`, + where `N` is the total number of estimates of the class; cannot be + specified together with `search_count_multiplier` (default: None) + """ + super().__init__(count_per_class) + self.confidence_channel = confidence_channel + self.search_count_multiplier = search_count_multiplier + self.search_proportion = search_proportion + assert (search_count_multiplier is None) or (search_proportion is None), ( + f"Cannot specify both search_count_multiplier (={search_count_multiplier})" + f"and search_proportion (={search_proportion})" + ) + + def _produce_index_sample(self, values: torch.Tensor, count: int): + """ + Produce a sample of indices to select data based on confidences + + Args: + values (torch.Tensor): an array of size [n, k] that contains + estimated values (U, V, confidences); + n: number of channels (U, V, confidences) + k: number of points labeled with part_id + count (int): number of samples to produce, should be positive and <= k + + Return: + list(int): indices of values (along axis 1) selected as a sample + """ + k = values.shape[1] + if k == count: + index_sample = list(range(k)) + else: + # take the best count * search_count_multiplier pixels, + # sample from them uniformly + # (here best = smallest variance) + _, sorted_confidence_indices = torch.sort(values[2]) + if self.search_count_multiplier is not None: + search_count = min(int(count * self.search_count_multiplier), k) + elif self.search_proportion is not None: + search_count = min(max(int(k * self.search_proportion), count), k) + else: + search_count = min(count, k) + sample_from_top = random.sample(range(search_count), count) + index_sample = sorted_confidence_indices[:search_count][sample_from_top] + return index_sample + + def _produce_labels_and_results(self, instance) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Method to get labels and DensePose results from an instance, with confidences + + Args: + instance (Instances): an instance of `DensePoseChartPredictorOutputWithConfidences` + + Return: + labels (torch.Tensor): shape [H, W], DensePose segmentation labels + dp_result (torch.Tensor): shape [3, H, W], DensePose results u and v + stacked with the confidence channel + """ + converter = ToChartResultConverterWithConfidences + chart_result = converter.convert(instance.pred_densepose, instance.pred_boxes) + labels, dp_result = chart_result.labels.cpu(), chart_result.uv.cpu() + dp_result = torch.cat( + (dp_result, getattr(chart_result, self.confidence_channel)[None].cpu()) + ) + + return labels, dp_result diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_cse_base.py b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_cse_base.py new file mode 100644 index 0000000..ca2b1b2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_cse_base.py @@ -0,0 +1,146 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Any, Dict, List, Tuple +import torch +from torch.nn import functional as F + +from detectron2.config import CfgNode +from detectron2.structures import Instances + +from densepose.converters.base import IntTupleBox +from densepose.data.utils import get_class_to_mesh_name_mapping +from densepose.modeling.cse.utils import squared_euclidean_distance_matrix +from densepose.structures import DensePoseDataRelative + +from .densepose_base import DensePoseBaseSampler + + +class DensePoseCSEBaseSampler(DensePoseBaseSampler): + """ + Base DensePose sampler to produce DensePose data from DensePose predictions. + Samples for each class are drawn according to some distribution over all pixels estimated + to belong to that class. + """ + + def __init__( + self, + cfg: CfgNode, + use_gt_categories: bool, + embedder: torch.nn.Module, + count_per_class: int = 8, + ): + """ + Constructor + + Args: + cfg (CfgNode): the config of the model + embedder (torch.nn.Module): necessary to compute mesh vertex embeddings + count_per_class (int): the sampler produces at most `count_per_class` + samples for each category + """ + super().__init__(count_per_class) + self.embedder = embedder + self.class_to_mesh_name = get_class_to_mesh_name_mapping(cfg) + self.use_gt_categories = use_gt_categories + + def _sample(self, instance: Instances, bbox_xywh: IntTupleBox) -> Dict[str, List[Any]]: + """ + Sample DensPoseDataRelative from estimation results + """ + if self.use_gt_categories: + instance_class = instance.dataset_classes.tolist()[0] + else: + instance_class = instance.pred_classes.tolist()[0] + mesh_name = self.class_to_mesh_name[instance_class] + + annotation = { + DensePoseDataRelative.X_KEY: [], + DensePoseDataRelative.Y_KEY: [], + DensePoseDataRelative.VERTEX_IDS_KEY: [], + DensePoseDataRelative.MESH_NAME_KEY: mesh_name, + } + + mask, embeddings, other_values = self._produce_mask_and_results(instance, bbox_xywh) + indices = torch.nonzero(mask, as_tuple=True) + selected_embeddings = embeddings.permute(1, 2, 0)[indices].cpu() + values = other_values[:, indices[0], indices[1]] + k = values.shape[1] + + count = min(self.count_per_class, k) + if count <= 0: + return annotation + + index_sample = self._produce_index_sample(values, count) + closest_vertices = squared_euclidean_distance_matrix( + selected_embeddings[index_sample], self.embedder(mesh_name) + ) + closest_vertices = torch.argmin(closest_vertices, dim=1) + + sampled_y = indices[0][index_sample] + 0.5 + sampled_x = indices[1][index_sample] + 0.5 + # prepare / normalize data + _, _, w, h = bbox_xywh + x = (sampled_x / w * 256.0).cpu().tolist() + y = (sampled_y / h * 256.0).cpu().tolist() + # extend annotations + annotation[DensePoseDataRelative.X_KEY].extend(x) + annotation[DensePoseDataRelative.Y_KEY].extend(y) + annotation[DensePoseDataRelative.VERTEX_IDS_KEY].extend(closest_vertices.cpu().tolist()) + return annotation + + def _produce_mask_and_results( + self, instance: Instances, bbox_xywh: IntTupleBox + ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + """ + Method to get labels and DensePose results from an instance + + Args: + instance (Instances): an instance of `DensePoseEmbeddingPredictorOutput` + bbox_xywh (IntTupleBox): the corresponding bounding box + + Return: + mask (torch.Tensor): shape [H, W], DensePose segmentation mask + embeddings (Tuple[torch.Tensor]): a tensor of shape [D, H, W], + DensePose CSE Embeddings + other_values (Tuple[torch.Tensor]): a tensor of shape [0, H, W], + for potential other values + """ + densepose_output = instance.pred_densepose + S = densepose_output.coarse_segm + E = densepose_output.embedding + _, _, w, h = bbox_xywh + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got `Tuple[int, + # int]`. + embeddings = F.interpolate(E, size=(h, w), mode="bilinear")[0] + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got `Tuple[int, + # int]`. + coarse_segm_resized = F.interpolate(S, size=(h, w), mode="bilinear")[0] + mask = coarse_segm_resized.argmax(0) > 0 + other_values = torch.empty((0, h, w), device=E.device) + return mask, embeddings, other_values + + def _resample_mask(self, output: Any) -> torch.Tensor: + """ + Convert DensePose predictor output to segmentation annotation - tensors of size + (256, 256) and type `int64`. + + Args: + output: DensePose predictor output with the following attributes: + - coarse_segm: tensor of size [N, D, H, W] with unnormalized coarse + segmentation scores + Return: + Tensor of size (S, S) and type `int64` with coarse segmentation annotations, + where S = DensePoseDataRelative.MASK_SIZE + """ + sz = DensePoseDataRelative.MASK_SIZE + mask = ( + # pyre-fixme[16]: `Tensor` has no attribute `argmax`. + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got + # `Tuple[int, int]`. + F.interpolate(output.coarse_segm, (sz, sz), mode="bilinear", align_corners=False) + .argmax(dim=1) + .long() + .squeeze() + .cpu() + ) + return mask diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_cse_confidence_based.py b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_cse_confidence_based.py new file mode 100644 index 0000000..d664fd4 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_cse_confidence_based.py @@ -0,0 +1,119 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import random +from typing import Optional, Tuple +import torch +from torch.nn import functional as F + +from detectron2.config import CfgNode +from detectron2.structures import Instances + +from densepose.converters.base import IntTupleBox + +from .densepose_cse_base import DensePoseCSEBaseSampler + + +class DensePoseCSEConfidenceBasedSampler(DensePoseCSEBaseSampler): + """ + Samples DensePose data from DensePose predictions. + Samples for each class are drawn using confidence value estimates. + """ + + def __init__( + self, + cfg: CfgNode, + use_gt_categories: bool, + embedder: torch.nn.Module, + confidence_channel: str, + count_per_class: int = 8, + search_count_multiplier: Optional[float] = None, + search_proportion: Optional[float] = None, + ): + """ + Constructor + + Args: + cfg (CfgNode): the config of the model + embedder (torch.nn.Module): necessary to compute mesh vertex embeddings + confidence_channel (str): confidence channel to use for sampling; + possible values: + "coarse_segm_confidence": confidences for coarse segmentation + (default: "coarse_segm_confidence") + count_per_class (int): the sampler produces at most `count_per_class` + samples for each category (default: 8) + search_count_multiplier (float or None): if not None, the total number + of the most confident estimates of a given class to consider is + defined as `min(search_count_multiplier * count_per_class, N)`, + where `N` is the total number of estimates of the class; cannot be + specified together with `search_proportion` (default: None) + search_proportion (float or None): if not None, the total number of the + of the most confident estimates of a given class to consider is + defined as `min(max(search_proportion * N, count_per_class), N)`, + where `N` is the total number of estimates of the class; cannot be + specified together with `search_count_multiplier` (default: None) + """ + super().__init__(cfg, use_gt_categories, embedder, count_per_class) + self.confidence_channel = confidence_channel + self.search_count_multiplier = search_count_multiplier + self.search_proportion = search_proportion + assert (search_count_multiplier is None) or (search_proportion is None), ( + f"Cannot specify both search_count_multiplier (={search_count_multiplier})" + f"and search_proportion (={search_proportion})" + ) + + def _produce_index_sample(self, values: torch.Tensor, count: int): + """ + Produce a sample of indices to select data based on confidences + + Args: + values (torch.Tensor): a tensor of length k that contains confidences + k: number of points labeled with part_id + count (int): number of samples to produce, should be positive and <= k + + Return: + list(int): indices of values (along axis 1) selected as a sample + """ + k = values.shape[1] + if k == count: + index_sample = list(range(k)) + else: + # take the best count * search_count_multiplier pixels, + # sample from them uniformly + # (here best = smallest variance) + _, sorted_confidence_indices = torch.sort(values[0]) + if self.search_count_multiplier is not None: + search_count = min(int(count * self.search_count_multiplier), k) + elif self.search_proportion is not None: + search_count = min(max(int(k * self.search_proportion), count), k) + else: + search_count = min(count, k) + sample_from_top = random.sample(range(search_count), count) + index_sample = sorted_confidence_indices[-search_count:][sample_from_top] + return index_sample + + def _produce_mask_and_results( + self, instance: Instances, bbox_xywh: IntTupleBox + ) -> Tuple[torch.Tensor, torch.Tensor, torch.Tensor]: + """ + Method to get labels and DensePose results from an instance + + Args: + instance (Instances): an instance of + `DensePoseEmbeddingPredictorOutputWithConfidences` + bbox_xywh (IntTupleBox): the corresponding bounding box + + Return: + mask (torch.Tensor): shape [H, W], DensePose segmentation mask + embeddings (Tuple[torch.Tensor]): a tensor of shape [D, H, W] + DensePose CSE Embeddings + other_values: a tensor of shape [1, H, W], DensePose CSE confidence + """ + _, _, w, h = bbox_xywh + densepose_output = instance.pred_densepose + mask, embeddings, _ = super()._produce_mask_and_results(instance, bbox_xywh) + other_values = F.interpolate( + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got + # `Tuple[int, int]`. + getattr(densepose_output, self.confidence_channel), size=(h, w), mode="bilinear" + )[0].cpu() + return mask, embeddings, other_values diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_cse_uniform.py b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_cse_uniform.py new file mode 100644 index 0000000..567636c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_cse_uniform.py @@ -0,0 +1,12 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .densepose_cse_base import DensePoseCSEBaseSampler +from .densepose_uniform import DensePoseUniformSampler + + +class DensePoseCSEUniformSampler(DensePoseCSEBaseSampler, DensePoseUniformSampler): + """ + Uniform Sampler for CSE + """ + + pass diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_uniform.py b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_uniform.py new file mode 100644 index 0000000..0d72cc3 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/densepose_uniform.py @@ -0,0 +1,41 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import random +import torch + +from .densepose_base import DensePoseBaseSampler + + +class DensePoseUniformSampler(DensePoseBaseSampler): + """ + Samples DensePose data from DensePose predictions. + Samples for each class are drawn uniformly over all pixels estimated + to belong to that class. + """ + + def __init__(self, count_per_class: int = 8): + """ + Constructor + + Args: + count_per_class (int): the sampler produces at most `count_per_class` + samples for each category + """ + super().__init__(count_per_class) + + def _produce_index_sample(self, values: torch.Tensor, count: int): + """ + Produce a uniform sample of indices to select data + + Args: + values (torch.Tensor): an array of size [n, k] that contains + estimated values (U, V, confidences); + n: number of channels (U, V, confidences) + k: number of points labeled with part_id + count (int): number of samples to produce, should be positive and <= k + + Return: + list(int): indices of values (along axis 1) selected as a sample + """ + k = values.shape[1] + return random.sample(range(k), count) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/mask_from_densepose.py b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/mask_from_densepose.py new file mode 100644 index 0000000..0e6e812 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/mask_from_densepose.py @@ -0,0 +1,28 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from detectron2.structures import BitMasks, Instances + +from densepose.converters import ToMaskConverter + + +class MaskFromDensePoseSampler: + """ + Produce mask GT from DensePose predictions + This sampler simply converts DensePose predictions to BitMasks + that a contain a bool tensor of the size of the input image + """ + + def __call__(self, instances: Instances) -> BitMasks: + """ + Converts predicted data from `instances` into the GT mask data + + Args: + instances (Instances): predicted results, expected to have `pred_densepose` field + + Returns: + Boolean Tensor of the size of the input image that has non-zero + values at pixels that are estimated to belong to the detected object + """ + return ToMaskConverter.convert( + instances.pred_densepose, instances.pred_boxes, instances.image_size + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/prediction_to_gt.py b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/prediction_to_gt.py new file mode 100644 index 0000000..3881fa5 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/samplers/prediction_to_gt.py @@ -0,0 +1,98 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from dataclasses import dataclass +from typing import Any, Callable, Dict, List, Optional + +from detectron2.structures import Instances + +ModelOutput = Dict[str, Any] +SampledData = Dict[str, Any] + + +@dataclass +class _Sampler: + """ + Sampler registry entry that contains: + - src (str): source field to sample from (deleted after sampling) + - dst (Optional[str]): destination field to sample to, if not None + - func (Optional[Callable: Any -> Any]): function that performs sampling, + if None, reference copy is performed + """ + + src: str + dst: Optional[str] + func: Optional[Callable[[Any], Any]] + + +class PredictionToGroundTruthSampler: + """ + Sampler implementation that converts predictions to GT using registered + samplers for different fields of `Instances`. + """ + + def __init__(self, dataset_name: str = ""): + self.dataset_name = dataset_name + self._samplers = {} + self.register_sampler("pred_boxes", "gt_boxes", None) + self.register_sampler("pred_classes", "gt_classes", None) + # delete scores + self.register_sampler("scores") + + def __call__(self, model_output: List[ModelOutput]) -> List[SampledData]: + """ + Transform model output into ground truth data through sampling + + Args: + model_output (Dict[str, Any]): model output + Returns: + Dict[str, Any]: sampled data + """ + for model_output_i in model_output: + instances: Instances = model_output_i["instances"] + # transform data in each field + for _, sampler in self._samplers.items(): + if not instances.has(sampler.src) or sampler.dst is None: + continue + if sampler.func is None: + instances.set(sampler.dst, instances.get(sampler.src)) + else: + instances.set(sampler.dst, sampler.func(instances)) + # delete model output data that was transformed + for _, sampler in self._samplers.items(): + if sampler.src != sampler.dst and instances.has(sampler.src): + instances.remove(sampler.src) + model_output_i["dataset"] = self.dataset_name + return model_output + + def register_sampler( + self, + prediction_attr: str, + gt_attr: Optional[str] = None, + func: Optional[Callable[[Any], Any]] = None, + ): + """ + Register sampler for a field + + Args: + prediction_attr (str): field to replace with a sampled value + gt_attr (Optional[str]): field to store the sampled value to, if not None + func (Optional[Callable: Any -> Any]): sampler function + """ + self._samplers[(prediction_attr, gt_attr)] = _Sampler( + src=prediction_attr, dst=gt_attr, func=func + ) + + def remove_sampler( + self, + prediction_attr: str, + gt_attr: Optional[str] = None, + ): + """ + Remove sampler for a field + + Args: + prediction_attr (str): field to replace with a sampled value + gt_attr (Optional[str]): field to store the sampled value to, if not None + """ + assert (prediction_attr, gt_attr) in self._samplers + del self._samplers[(prediction_attr, gt_attr)] diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/transform/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/data/transform/__init__.py new file mode 100644 index 0000000..369e1b2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/transform/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .image import ImageResizeTransform diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/transform/image.py b/motion-gan-pipeline/preprocessing/third/densepose/data/transform/image.py new file mode 100644 index 0000000..842865f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/transform/image.py @@ -0,0 +1,38 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import torch + + +class ImageResizeTransform: + """ + Transform that resizes images loaded from a dataset + (BGR data in NCHW channel order, typically uint8) to a format ready to be + consumed by DensePose training (BGR float32 data in NCHW channel order) + """ + + def __init__(self, min_size: int = 800, max_size: int = 1333): + self.min_size = min_size + self.max_size = max_size + + def __call__(self, images: torch.Tensor) -> torch.Tensor: + """ + Args: + images (torch.Tensor): tensor of size [N, 3, H, W] that contains + BGR data (typically in uint8) + Returns: + images (torch.Tensor): tensor of size [N, 3, H1, W1] where + H1 and W1 are chosen to respect the specified min and max sizes + and preserve the original aspect ratio, the data channels + follow BGR order and the data type is `torch.float32` + """ + # resize with min size + images = images.float() + min_size = min(images.shape[-2:]) + max_size = max(images.shape[-2:]) + scale = min(self.min_size / min_size, self.max_size / max_size) + images = torch.nn.functional.interpolate( + # pyre-fixme[6]: Expected `Optional[typing.List[float]]` for 2nd param + # but got `float`. + images, scale_factor=scale, mode="bilinear", align_corners=False + ) + return images diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/utils.py b/motion-gan-pipeline/preprocessing/third/densepose/data/utils.py new file mode 100644 index 0000000..9878c31 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/utils.py @@ -0,0 +1,38 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import os +from typing import Dict, Optional + +from detectron2.config import CfgNode + + +def is_relative_local_path(path: str) -> bool: + path_str = os.fsdecode(path) + return ("://" not in path_str) and not os.path.isabs(path) + + +def maybe_prepend_base_path(base_path: Optional[str], path: str): + """ + Prepends the provided path with a base path prefix if: + 1) base path is not None; + 2) path is a local path + """ + if base_path is None: + return path + if is_relative_local_path(path): + return os.path.join(base_path, path) + return path + + +def get_class_to_mesh_name_mapping(cfg: CfgNode) -> Dict[int, str]: + return { + int(class_id): mesh_name + for class_id, mesh_name in cfg.DATASETS.CLASS_TO_MESH_NAME_MAPPING.items() + } + + +def get_category_to_class_mapping(dataset_cfg: CfgNode) -> Dict[str, int]: + return { + category: int(class_id) + for category, class_id in dataset_cfg.CATEGORY_TO_CLASS_MAPPING.items() + } diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/video/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/data/video/__init__.py new file mode 100644 index 0000000..72406e1 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/video/__init__.py @@ -0,0 +1,17 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .frame_selector import ( + FrameSelectionStrategy, + RandomKFramesSelector, + FirstKFramesSelector, + LastKFramesSelector, + FrameTsList, + FrameSelector, +) + +from .video_keyframe_dataset import ( + VideoKeyframeDataset, + video_list_from_file, + list_keyframes, + read_keyframes, +) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/video/frame_selector.py b/motion-gan-pipeline/preprocessing/third/densepose/data/video/frame_selector.py new file mode 100644 index 0000000..c28f0e9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/video/frame_selector.py @@ -0,0 +1,87 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import random +from collections.abc import Callable +from enum import Enum +from typing import Callable as TCallable +from typing import List + +FrameTsList = List[int] +FrameSelector = TCallable[[FrameTsList], FrameTsList] + + +class FrameSelectionStrategy(Enum): + """ + Frame selection strategy used with videos: + - "random_k": select k random frames + - "first_k": select k first frames + - "last_k": select k last frames + - "all": select all frames + """ + + # fmt: off + RANDOM_K = "random_k" + FIRST_K = "first_k" + LAST_K = "last_k" + ALL = "all" + # fmt: on + + +class RandomKFramesSelector(Callable): # pyre-ignore[39] + """ + Selector that retains at most `k` random frames + """ + + def __init__(self, k: int): + self.k = k + + def __call__(self, frame_tss: FrameTsList) -> FrameTsList: + """ + Select `k` random frames + + Args: + frames_tss (List[int]): timestamps of input frames + Returns: + List[int]: timestamps of selected frames + """ + return random.sample(frame_tss, min(self.k, len(frame_tss))) + + +class FirstKFramesSelector(Callable): # pyre-ignore[39] + """ + Selector that retains at most `k` first frames + """ + + def __init__(self, k: int): + self.k = k + + def __call__(self, frame_tss: FrameTsList) -> FrameTsList: + """ + Select `k` first frames + + Args: + frames_tss (List[int]): timestamps of input frames + Returns: + List[int]: timestamps of selected frames + """ + return frame_tss[: self.k] + + +class LastKFramesSelector(Callable): # pyre-ignore[39] + """ + Selector that retains at most `k` last frames from video data + """ + + def __init__(self, k: int): + self.k = k + + def __call__(self, frame_tss: FrameTsList) -> FrameTsList: + """ + Select `k` last frames + + Args: + frames_tss (List[int]): timestamps of input frames + Returns: + List[int]: timestamps of selected frames + """ + return frame_tss[-self.k :] diff --git a/motion-gan-pipeline/preprocessing/third/densepose/data/video/video_keyframe_dataset.py b/motion-gan-pipeline/preprocessing/third/densepose/data/video/video_keyframe_dataset.py new file mode 100644 index 0000000..214365c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/data/video/video_keyframe_dataset.py @@ -0,0 +1,300 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +import csv +import logging +import numpy as np +from typing import Any, Callable, Dict, List, Optional, Union +import av +import torch +from torch.utils.data.dataset import Dataset + +from detectron2.utils.file_io import PathManager + +from ..utils import maybe_prepend_base_path +from .frame_selector import FrameSelector, FrameTsList + +FrameList = List[av.frame.Frame] # pyre-ignore[16] +FrameTransform = Callable[[torch.Tensor], torch.Tensor] + + +def list_keyframes(video_fpath: str, video_stream_idx: int = 0) -> FrameTsList: + """ + Traverses all keyframes of a video file. Returns a list of keyframe + timestamps. Timestamps are counts in timebase units. + + Args: + video_fpath (str): Video file path + video_stream_idx (int): Video stream index (default: 0) + Returns: + List[int]: list of keyframe timestaps (timestamp is a count in timebase + units) + """ + try: + with PathManager.open(video_fpath, "rb") as io: + container = av.open(io, mode="r") + stream = container.streams.video[video_stream_idx] + keyframes = [] + pts = -1 + # Note: even though we request forward seeks for keyframes, sometimes + # a keyframe in backwards direction is returned. We introduce tolerance + # as a max count of ignored backward seeks + tolerance_backward_seeks = 2 + while True: + try: + container.seek(pts + 1, backward=False, any_frame=False, stream=stream) + except av.AVError as e: + # the exception occurs when the video length is exceeded, + # we then return whatever data we've already collected + logger = logging.getLogger(__name__) + logger.debug( + f"List keyframes: Error seeking video file {video_fpath}, " + f"video stream {video_stream_idx}, pts {pts + 1}, AV error: {e}" + ) + return keyframes + except OSError as e: + logger = logging.getLogger(__name__) + logger.warning( + f"List keyframes: Error seeking video file {video_fpath}, " + f"video stream {video_stream_idx}, pts {pts + 1}, OS error: {e}" + ) + return [] + packet = next(container.demux(video=video_stream_idx)) + if packet.pts is not None and packet.pts <= pts: + logger = logging.getLogger(__name__) + logger.warning( + f"Video file {video_fpath}, stream {video_stream_idx}: " + f"bad seek for packet {pts + 1} (got packet {packet.pts}), " + f"tolerance {tolerance_backward_seeks}." + ) + tolerance_backward_seeks -= 1 + if tolerance_backward_seeks == 0: + return [] + pts += 1 + continue + tolerance_backward_seeks = 2 + pts = packet.pts + if pts is None: + return keyframes + if packet.is_keyframe: + keyframes.append(pts) + return keyframes + except OSError as e: + logger = logging.getLogger(__name__) + logger.warning( + f"List keyframes: Error opening video file container {video_fpath}, " f"OS error: {e}" + ) + except RuntimeError as e: + logger = logging.getLogger(__name__) + logger.warning( + f"List keyframes: Error opening video file container {video_fpath}, " + f"Runtime error: {e}" + ) + return [] + + +def read_keyframes( + video_fpath: str, keyframes: FrameTsList, video_stream_idx: int = 0 +) -> FrameList: # pyre-ignore[11] + """ + Reads keyframe data from a video file. + + Args: + video_fpath (str): Video file path + keyframes (List[int]): List of keyframe timestamps (as counts in + timebase units to be used in container seek operations) + video_stream_idx (int): Video stream index (default: 0) + Returns: + List[Frame]: list of frames that correspond to the specified timestamps + """ + try: + with PathManager.open(video_fpath, "rb") as io: + container = av.open(io) + stream = container.streams.video[video_stream_idx] + frames = [] + for pts in keyframes: + try: + container.seek(pts, any_frame=False, stream=stream) + frame = next(container.decode(video=0)) + frames.append(frame) + except av.AVError as e: + logger = logging.getLogger(__name__) + logger.warning( + f"Read keyframes: Error seeking video file {video_fpath}, " + f"video stream {video_stream_idx}, pts {pts}, AV error: {e}" + ) + container.close() + return frames + except OSError as e: + logger = logging.getLogger(__name__) + logger.warning( + f"Read keyframes: Error seeking video file {video_fpath}, " + f"video stream {video_stream_idx}, pts {pts}, OS error: {e}" + ) + container.close() + return frames + except StopIteration: + logger = logging.getLogger(__name__) + logger.warning( + f"Read keyframes: Error decoding frame from {video_fpath}, " + f"video stream {video_stream_idx}, pts {pts}" + ) + container.close() + return frames + + container.close() + return frames + except OSError as e: + logger = logging.getLogger(__name__) + logger.warning( + f"Read keyframes: Error opening video file container {video_fpath}, OS error: {e}" + ) + except RuntimeError as e: + logger = logging.getLogger(__name__) + logger.warning( + f"Read keyframes: Error opening video file container {video_fpath}, Runtime error: {e}" + ) + return [] + + +def video_list_from_file(video_list_fpath: str, base_path: Optional[str] = None): + """ + Create a list of paths to video files from a text file. + + Args: + video_list_fpath (str): path to a plain text file with the list of videos + base_path (str): base path for entries from the video list (default: None) + """ + video_list = [] + with PathManager.open(video_list_fpath, "r") as io: + for line in io: + video_list.append(maybe_prepend_base_path(base_path, str(line.strip()))) + return video_list + + +def read_keyframe_helper_data(fpath: str): + """ + Read keyframe data from a file in CSV format: the header should contain + "video_id" and "keyframes" fields. Value specifications are: + video_id: int + keyframes: list(int) + Example of contents: + video_id,keyframes + 2,"[1,11,21,31,41,51,61,71,81]" + + Args: + fpath (str): File containing keyframe data + + Return: + video_id_to_keyframes (dict: int -> list(int)): for a given video ID it + contains a list of keyframes for that video + """ + video_id_to_keyframes = {} + try: + with PathManager.open(fpath, "r") as io: + csv_reader = csv.reader(io) # pyre-ignore[6] + header = next(csv_reader) + video_id_idx = header.index("video_id") + keyframes_idx = header.index("keyframes") + for row in csv_reader: + video_id = int(row[video_id_idx]) + assert ( + video_id not in video_id_to_keyframes + ), f"Duplicate keyframes entry for video {fpath}" + video_id_to_keyframes[video_id] = ( + [int(v) for v in row[keyframes_idx][1:-1].split(",")] + if len(row[keyframes_idx]) > 2 + else [] + ) + except Exception as e: + logger = logging.getLogger(__name__) + logger.warning(f"Error reading keyframe helper data from {fpath}: {e}") + return video_id_to_keyframes + + +class VideoKeyframeDataset(Dataset): + """ + Dataset that provides keyframes for a set of videos. + """ + + _EMPTY_FRAMES = torch.empty((0, 3, 1, 1)) + + def __init__( + self, + video_list: List[str], + category_list: Union[str, List[str], None] = None, + frame_selector: Optional[FrameSelector] = None, + transform: Optional[FrameTransform] = None, + keyframe_helper_fpath: Optional[str] = None, + ): + """ + Dataset constructor + + Args: + video_list (List[str]): list of paths to video files + category_list (Union[str, List[str], None]): list of animal categories for each + video file. If it is a string, or None, this applies to all videos + frame_selector (Callable: KeyFrameList -> KeyFrameList): + selects keyframes to process, keyframes are given by + packet timestamps in timebase counts. If None, all keyframes + are selected (default: None) + transform (Callable: torch.Tensor -> torch.Tensor): + transforms a batch of RGB images (tensors of size [B, 3, H, W]), + returns a tensor of the same size. If None, no transform is + applied (default: None) + + """ + if type(category_list) == list: + self.category_list = category_list + else: + self.category_list = [category_list] * len(video_list) + assert len(video_list) == len( + self.category_list + ), "length of video and category lists must be equal" + self.video_list = video_list + self.frame_selector = frame_selector + self.transform = transform + self.keyframe_helper_data = ( + read_keyframe_helper_data(keyframe_helper_fpath) + if keyframe_helper_fpath is not None + else None + ) + + def __getitem__(self, idx: int) -> Dict[str, Any]: + """ + Gets selected keyframes from a given video + + Args: + idx (int): video index in the video list file + Returns: + A dictionary containing two keys: + images (torch.Tensor): tensor of size [N, H, W, 3] or of size + defined by the transform that contains keyframes data + categories (List[str]): categories of the frames + """ + categories = [self.category_list[idx]] + fpath = self.video_list[idx] + keyframes = ( + list_keyframes(fpath) + if self.keyframe_helper_data is None or idx not in self.keyframe_helper_data + else self.keyframe_helper_data[idx] + ) + transform = self.transform + frame_selector = self.frame_selector + if not keyframes: + return {"images": self._EMPTY_FRAMES, "categories": []} + if frame_selector is not None: + keyframes = frame_selector(keyframes) + frames = read_keyframes(fpath, keyframes) + if not frames: + return {"images": self._EMPTY_FRAMES, "categories": []} + frames = np.stack([frame.to_rgb().to_ndarray() for frame in frames]) + frames = torch.as_tensor(frames, device=torch.device("cpu")) + frames = frames[..., [2, 1, 0]] # RGB -> BGR + frames = frames.permute(0, 3, 1, 2).float() # NHWC -> NCHW + if transform is not None: + frames = transform(frames) + return {"images": frames, "categories": categories} + + def __len__(self): + return len(self.video_list) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/densepose_rcnn_R_50_FPN_s1x.yaml b/motion-gan-pipeline/preprocessing/third/densepose/densepose_rcnn_R_50_FPN_s1x.yaml new file mode 100644 index 0000000..d2dd14c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/densepose_rcnn_R_50_FPN_s1x.yaml @@ -0,0 +1,8 @@ +_BASE_: "Base-DensePose-RCNN-FPN.yaml" +MODEL: + WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl" + RESNETS: + DEPTH: 50 +SOLVER: + MAX_ITER: 130000 + STEPS: (100000, 120000) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/engine/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/engine/__init__.py new file mode 100644 index 0000000..539b93a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/engine/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .trainer import Trainer diff --git a/motion-gan-pipeline/preprocessing/third/densepose/engine/trainer.py b/motion-gan-pipeline/preprocessing/third/densepose/engine/trainer.py new file mode 100644 index 0000000..1c25131 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/engine/trainer.py @@ -0,0 +1,257 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import logging +import os +from collections import OrderedDict +from typing import List, Optional, Union +import torch +from torch import nn + +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import CfgNode +from detectron2.engine import DefaultTrainer +from detectron2.evaluation import ( + DatasetEvaluator, + DatasetEvaluators, + inference_on_dataset, + print_csv_format, +) +from detectron2.solver.build import get_default_optimizer_params, maybe_add_gradient_clipping +from detectron2.utils import comm +from detectron2.utils.events import EventWriter, get_event_storage + +from densepose import DensePoseDatasetMapperTTA, DensePoseGeneralizedRCNNWithTTA, load_from_cfg +from densepose.data import ( + DatasetMapper, + build_combined_loader, + build_detection_test_loader, + build_detection_train_loader, + build_inference_based_loaders, + has_inference_based_loaders, +) +from densepose.evaluation.d2_evaluator_adapter import Detectron2COCOEvaluatorAdapter +from densepose.evaluation.evaluator import DensePoseCOCOEvaluator, build_densepose_evaluator_storage +from densepose.modeling.cse import Embedder + + +class SampleCountingLoader: + def __init__(self, loader): + self.loader = loader + + def __iter__(self): + it = iter(self.loader) + storage = get_event_storage() + while True: + try: + batch = next(it) + num_inst_per_dataset = {} + for data in batch: + dataset_name = data["dataset"] + if dataset_name not in num_inst_per_dataset: + num_inst_per_dataset[dataset_name] = 0 + num_inst = len(data["instances"]) + num_inst_per_dataset[dataset_name] += num_inst + for dataset_name in num_inst_per_dataset: + storage.put_scalar(f"batch/{dataset_name}", num_inst_per_dataset[dataset_name]) + yield batch + except StopIteration: + break + + +class SampleCountMetricPrinter(EventWriter): + def __init__(self): + self.logger = logging.getLogger(__name__) + + def write(self): + storage = get_event_storage() + batch_stats_strs = [] + for key, buf in storage.histories().items(): + if key.startswith("batch/"): + batch_stats_strs.append(f"{key} {buf.avg(20)}") + self.logger.info(", ".join(batch_stats_strs)) + + +class Trainer(DefaultTrainer): + @classmethod + def extract_embedder_from_model(cls, model: nn.Module) -> Optional[Embedder]: + if isinstance(model, nn.parallel.DistributedDataParallel): + model = model.module + if hasattr(model, "roi_heads") and hasattr(model.roi_heads, "embedder"): + return model.roi_heads.embedder + return None + + # TODO: the only reason to copy the base class code here is to pass the embedder from + # the model to the evaluator; that should be refactored to avoid unnecessary copy-pasting + @classmethod + def test( + cls, + cfg: CfgNode, + model: nn.Module, + evaluators: Optional[Union[DatasetEvaluator, List[DatasetEvaluator]]] = None, + ): + """ + Args: + cfg (CfgNode): + model (nn.Module): + evaluators (DatasetEvaluator, list[DatasetEvaluator] or None): if None, will call + :meth:`build_evaluator`. Otherwise, must have the same length as + ``cfg.DATASETS.TEST``. + + Returns: + dict: a dict of result metrics + """ + logger = logging.getLogger(__name__) + if isinstance(evaluators, DatasetEvaluator): + evaluators = [evaluators] + if evaluators is not None: + assert len(cfg.DATASETS.TEST) == len(evaluators), "{} != {}".format( + len(cfg.DATASETS.TEST), len(evaluators) + ) + + results = OrderedDict() + for idx, dataset_name in enumerate(cfg.DATASETS.TEST): + data_loader = cls.build_test_loader(cfg, dataset_name) + # When evaluators are passed in as arguments, + # implicitly assume that evaluators can be created before data_loader. + if evaluators is not None: + evaluator = evaluators[idx] + else: + try: + embedder = cls.extract_embedder_from_model(model) + evaluator = cls.build_evaluator(cfg, dataset_name, embedder=embedder) + except NotImplementedError: + logger.warn( + "No evaluator found. Use `DefaultTrainer.test(evaluators=)`, " + "or implement its `build_evaluator` method." + ) + results[dataset_name] = {} + continue + if cfg.DENSEPOSE_EVALUATION.DISTRIBUTED_INFERENCE or comm.is_main_process(): + results_i = inference_on_dataset(model, data_loader, evaluator) + else: + results_i = {} + results[dataset_name] = results_i + if comm.is_main_process(): + assert isinstance( + results_i, dict + ), "Evaluator must return a dict on the main process. Got {} instead.".format( + results_i + ) + logger.info("Evaluation results for {} in csv format:".format(dataset_name)) + print_csv_format(results_i) + + if len(results) == 1: + results = list(results.values())[0] + return results + + @classmethod + def build_evaluator( + cls, + cfg: CfgNode, + dataset_name: str, + output_folder: Optional[str] = None, + embedder: Optional[Embedder] = None, + ) -> DatasetEvaluators: + if output_folder is None: + output_folder = os.path.join(cfg.OUTPUT_DIR, "inference") + evaluators = [] + distributed = cfg.DENSEPOSE_EVALUATION.DISTRIBUTED_INFERENCE + # Note: we currently use COCO evaluator for both COCO and LVIS datasets + # to have compatible metrics. LVIS bbox evaluator could also be used + # with an adapter to properly handle filtered / mapped categories + # evaluator_type = MetadataCatalog.get(dataset_name).evaluator_type + # if evaluator_type == "coco": + # evaluators.append(COCOEvaluator(dataset_name, output_dir=output_folder)) + # elif evaluator_type == "lvis": + # evaluators.append(LVISEvaluator(dataset_name, output_dir=output_folder)) + evaluators.append( + Detectron2COCOEvaluatorAdapter( + dataset_name, output_dir=output_folder, distributed=distributed + ) + ) + if cfg.MODEL.DENSEPOSE_ON: + storage = build_densepose_evaluator_storage(cfg, output_folder) + evaluators.append( + DensePoseCOCOEvaluator( + dataset_name, + distributed, + output_folder, + evaluator_type=cfg.DENSEPOSE_EVALUATION.TYPE, + min_iou_threshold=cfg.DENSEPOSE_EVALUATION.MIN_IOU_THRESHOLD, + storage=storage, + embedder=embedder, + should_evaluate_mesh_alignment=cfg.DENSEPOSE_EVALUATION.EVALUATE_MESH_ALIGNMENT, + mesh_alignment_mesh_names=cfg.DENSEPOSE_EVALUATION.MESH_ALIGNMENT_MESH_NAMES, + ) + ) + return DatasetEvaluators(evaluators) + + @classmethod + def build_optimizer(cls, cfg: CfgNode, model: nn.Module): + params = get_default_optimizer_params( + model, + base_lr=cfg.SOLVER.BASE_LR, + weight_decay_norm=cfg.SOLVER.WEIGHT_DECAY_NORM, + bias_lr_factor=cfg.SOLVER.BIAS_LR_FACTOR, + weight_decay_bias=cfg.SOLVER.WEIGHT_DECAY_BIAS, + overrides={ + "features": { + "lr": cfg.SOLVER.BASE_LR * cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.FEATURES_LR_FACTOR, + }, + "embeddings": { + "lr": cfg.SOLVER.BASE_LR * cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBEDDING_LR_FACTOR, + }, + }, + ) + optimizer = torch.optim.SGD( + params, + cfg.SOLVER.BASE_LR, + momentum=cfg.SOLVER.MOMENTUM, + nesterov=cfg.SOLVER.NESTEROV, + weight_decay=cfg.SOLVER.WEIGHT_DECAY, + ) + return maybe_add_gradient_clipping(cfg, optimizer) + + @classmethod + def build_test_loader(cls, cfg: CfgNode, dataset_name): + return build_detection_test_loader(cfg, dataset_name, mapper=DatasetMapper(cfg, False)) + + @classmethod + def build_train_loader(cls, cfg: CfgNode): + data_loader = build_detection_train_loader(cfg, mapper=DatasetMapper(cfg, True)) + if not has_inference_based_loaders(cfg): + return data_loader + model = cls.build_model(cfg) + model.to(cfg.BOOTSTRAP_MODEL.DEVICE) + DetectionCheckpointer(model).resume_or_load(cfg.BOOTSTRAP_MODEL.WEIGHTS, resume=False) + inference_based_loaders, ratios = build_inference_based_loaders(cfg, model) + loaders = [data_loader] + inference_based_loaders + ratios = [1.0] + ratios + combined_data_loader = build_combined_loader(cfg, loaders, ratios) + sample_counting_loader = SampleCountingLoader(combined_data_loader) + return sample_counting_loader + + def build_writers(self): + writers = super().build_writers() + writers.append(SampleCountMetricPrinter()) + return writers + + @classmethod + def test_with_TTA(cls, cfg: CfgNode, model): + logger = logging.getLogger("detectron2.trainer") + # In the end of training, run an evaluation with TTA + # Only support some R-CNN models. + logger.info("Running inference with test-time augmentation ...") + transform_data = load_from_cfg(cfg) + model = DensePoseGeneralizedRCNNWithTTA( + cfg, model, transform_data, DensePoseDatasetMapperTTA(cfg) + ) + evaluators = [ + cls.build_evaluator( + cfg, name, output_folder=os.path.join(cfg.OUTPUT_DIR, "inference_TTA") + ) + for name in cfg.DATASETS.TEST + ] + res = cls.test(cfg, model, evaluators) # pyre-ignore[6] + res = OrderedDict({k + "_TTA": v for k, v in res.items()}) + return res diff --git a/motion-gan-pipeline/preprocessing/third/densepose/evaluation/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/evaluation/__init__.py new file mode 100644 index 0000000..e5ae1f2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/evaluation/__init__.py @@ -0,0 +1,3 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .evaluator import DensePoseCOCOEvaluator diff --git a/motion-gan-pipeline/preprocessing/third/densepose/evaluation/d2_evaluator_adapter.py b/motion-gan-pipeline/preprocessing/third/densepose/evaluation/d2_evaluator_adapter.py new file mode 100644 index 0000000..1fbc526 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/evaluation/d2_evaluator_adapter.py @@ -0,0 +1,50 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from detectron2.data.catalog import Metadata +from detectron2.evaluation import COCOEvaluator + +from densepose.data.datasets.coco import ( + get_contiguous_id_to_category_id_map, + maybe_filter_categories_cocoapi, +) + + +def _maybe_add_iscrowd_annotations(cocoapi) -> None: + for ann in cocoapi.dataset["annotations"]: + if "iscrowd" not in ann: + ann["iscrowd"] = 0 + + +class Detectron2COCOEvaluatorAdapter(COCOEvaluator): + def __init__( + self, + dataset_name, + output_dir=None, + distributed=True, + ): + super().__init__(dataset_name, output_dir=output_dir, distributed=distributed) + maybe_filter_categories_cocoapi(dataset_name, self._coco_api) + _maybe_add_iscrowd_annotations(self._coco_api) + # substitute category metadata to account for categories + # that are mapped to the same contiguous id + if hasattr(self._metadata, "thing_dataset_id_to_contiguous_id"): + self._maybe_substitute_metadata() + + def _maybe_substitute_metadata(self): + cont_id_2_cat_id = get_contiguous_id_to_category_id_map(self._metadata) + cat_id_2_cont_id = self._metadata.thing_dataset_id_to_contiguous_id + if len(cont_id_2_cat_id) == len(cat_id_2_cont_id): + return + + cat_id_2_cont_id_injective = {} + for cat_id, cont_id in cat_id_2_cont_id.items(): + if (cont_id in cont_id_2_cat_id) and (cont_id_2_cat_id[cont_id] == cat_id): + cat_id_2_cont_id_injective[cat_id] = cont_id + + metadata_new = Metadata(name=self._metadata.name) + for key, value in self._metadata.__dict__.items(): + if key == "thing_dataset_id_to_contiguous_id": + setattr(metadata_new, key, cat_id_2_cont_id_injective) + else: + setattr(metadata_new, key, value) + self._metadata = metadata_new diff --git a/motion-gan-pipeline/preprocessing/third/densepose/evaluation/densepose_coco_evaluation.py b/motion-gan-pipeline/preprocessing/third/densepose/evaluation/densepose_coco_evaluation.py new file mode 100644 index 0000000..5cd507f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/evaluation/densepose_coco_evaluation.py @@ -0,0 +1,1302 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# All rights reserved. +# +# This source code is licensed under the license found in the +# LICENSE file in the root directory of this source tree. +# This is a modified version of cocoeval.py where we also have the densepose evaluation. + +__author__ = "tsungyi" + +import copy +import datetime +import logging +import numpy as np +import pickle +import time +from collections import defaultdict +from enum import Enum +from typing import Any, Dict, Tuple +import scipy.spatial.distance as ssd +import torch +import torch.nn.functional as F +from pycocotools import mask as maskUtils +from scipy.io import loadmat +from scipy.ndimage import zoom as spzoom + +from detectron2.utils.file_io import PathManager + +from densepose.converters.chart_output_to_chart_result import resample_uv_tensors_to_bbox +from densepose.converters.segm_to_mask import ( + resample_coarse_segm_tensor_to_bbox, + resample_fine_and_coarse_segm_tensors_to_bbox, +) +from densepose.modeling.cse.utils import squared_euclidean_distance_matrix +from densepose.structures import DensePoseDataRelative +from densepose.structures.mesh import create_mesh + +logger = logging.getLogger(__name__) + + +class DensePoseEvalMode(str, Enum): + # use both masks and geodesic distances (GPS * IOU) to compute scores + GPSM = "gpsm" + # use only geodesic distances (GPS) to compute scores + GPS = "gps" + # use only masks (IOU) to compute scores + IOU = "iou" + + +class DensePoseDataMode(str, Enum): + # use estimated IUV data (default mode) + IUV_DT = "iuvdt" + # use ground truth IUV data + IUV_GT = "iuvgt" + # use ground truth labels I and set UV to 0 + I_GT_UV_0 = "igtuv0" + # use ground truth labels I and estimated UV coordinates + I_GT_UV_DT = "igtuvdt" + # use estimated labels I and set UV to 0 + I_DT_UV_0 = "idtuv0" + + +class DensePoseCocoEval(object): + # Interface for evaluating detection on the Microsoft COCO dataset. + # + # The usage for CocoEval is as follows: + # cocoGt=..., cocoDt=... # load dataset and results + # E = CocoEval(cocoGt,cocoDt); # initialize CocoEval object + # E.params.recThrs = ...; # set parameters as desired + # E.evaluate(); # run per image evaluation + # E.accumulate(); # accumulate per image results + # E.summarize(); # display summary metrics of results + # For example usage see evalDemo.m and http://mscoco.org/. + # + # The evaluation parameters are as follows (defaults in brackets): + # imgIds - [all] N img ids to use for evaluation + # catIds - [all] K cat ids to use for evaluation + # iouThrs - [.5:.05:.95] T=10 IoU thresholds for evaluation + # recThrs - [0:.01:1] R=101 recall thresholds for evaluation + # areaRng - [...] A=4 object area ranges for evaluation + # maxDets - [1 10 100] M=3 thresholds on max detections per image + # iouType - ['segm'] set iouType to 'segm', 'bbox', 'keypoints' or 'densepose' + # iouType replaced the now DEPRECATED useSegm parameter. + # useCats - [1] if true use category labels for evaluation + # Note: if useCats=0 category labels are ignored as in proposal scoring. + # Note: multiple areaRngs [Ax2] and maxDets [Mx1] can be specified. + # + # evaluate(): evaluates detections on every image and every category and + # concats the results into the "evalImgs" with fields: + # dtIds - [1xD] id for each of the D detections (dt) + # gtIds - [1xG] id for each of the G ground truths (gt) + # dtMatches - [TxD] matching gt id at each IoU or 0 + # gtMatches - [TxG] matching dt id at each IoU or 0 + # dtScores - [1xD] confidence of each dt + # gtIgnore - [1xG] ignore flag for each gt + # dtIgnore - [TxD] ignore flag for each dt at each IoU + # + # accumulate(): accumulates the per-image, per-category evaluation + # results in "evalImgs" into the dictionary "eval" with fields: + # params - parameters used for evaluation + # date - date evaluation was performed + # counts - [T,R,K,A,M] parameter dimensions (see above) + # precision - [TxRxKxAxM] precision for every evaluation setting + # recall - [TxKxAxM] max recall for every evaluation setting + # Note: precision and recall==-1 for settings with no gt objects. + # + # See also coco, mask, pycocoDemo, pycocoEvalDemo + # + # Microsoft COCO Toolbox. version 2.0 + # Data, paper, and tutorials available at: http://mscoco.org/ + # Code written by Piotr Dollar and Tsung-Yi Lin, 2015. + # Licensed under the Simplified BSD License [see coco/license.txt] + def __init__( + self, + cocoGt=None, + cocoDt=None, + iouType: str = "densepose", + multi_storage=None, + embedder=None, + dpEvalMode: DensePoseEvalMode = DensePoseEvalMode.GPS, + dpDataMode: DensePoseDataMode = DensePoseDataMode.IUV_DT, + ): + """ + Initialize CocoEval using coco APIs for gt and dt + :param cocoGt: coco object with ground truth annotations + :param cocoDt: coco object with detection results + :return: None + """ + self.cocoGt = cocoGt # ground truth COCO API + self.cocoDt = cocoDt # detections COCO API + self.multi_storage = multi_storage + self.embedder = embedder + self._dpEvalMode = dpEvalMode + self._dpDataMode = dpDataMode + self.evalImgs = defaultdict(list) # per-image per-category eval results [KxAxI] + self.eval = {} # accumulated evaluation results + self._gts = defaultdict(list) # gt for evaluation + self._dts = defaultdict(list) # dt for evaluation + self.params = Params(iouType=iouType) # parameters + self._paramsEval = {} # parameters for evaluation + self.stats = [] # result summarization + self.ious = {} # ious between all gts and dts + if cocoGt is not None: + self.params.imgIds = sorted(cocoGt.getImgIds()) + self.params.catIds = sorted(cocoGt.getCatIds()) + self.ignoreThrBB = 0.7 + self.ignoreThrUV = 0.9 + + def _loadGEval(self): + smpl_subdiv_fpath = PathManager.get_local_path( + "https://dl.fbaipublicfiles.com/densepose/data/SMPL_subdiv.mat" + ) + pdist_transform_fpath = PathManager.get_local_path( + "https://dl.fbaipublicfiles.com/densepose/data/SMPL_SUBDIV_TRANSFORM.mat" + ) + pdist_matrix_fpath = PathManager.get_local_path( + "https://dl.fbaipublicfiles.com/densepose/data/Pdist_matrix.pkl", timeout_sec=120 + ) + SMPL_subdiv = loadmat(smpl_subdiv_fpath) + self.PDIST_transform = loadmat(pdist_transform_fpath) + self.PDIST_transform = self.PDIST_transform["index"].squeeze() + UV = np.array([SMPL_subdiv["U_subdiv"], SMPL_subdiv["V_subdiv"]]).squeeze() + ClosestVertInds = np.arange(UV.shape[1]) + 1 + self.Part_UVs = [] + self.Part_ClosestVertInds = [] + for i in np.arange(24): + self.Part_UVs.append(UV[:, SMPL_subdiv["Part_ID_subdiv"].squeeze() == (i + 1)]) + self.Part_ClosestVertInds.append( + ClosestVertInds[SMPL_subdiv["Part_ID_subdiv"].squeeze() == (i + 1)] + ) + + with open(pdist_matrix_fpath, "rb") as hFile: + arrays = pickle.load(hFile, encoding="latin1") + self.Pdist_matrix = arrays["Pdist_matrix"] + self.Part_ids = np.array(SMPL_subdiv["Part_ID_subdiv"].squeeze()) + # Mean geodesic distances for parts. + self.Mean_Distances = np.array([0, 0.351, 0.107, 0.126, 0.237, 0.173, 0.142, 0.128, 0.150]) + # Coarse Part labels. + self.CoarseParts = np.array( + [0, 1, 1, 2, 2, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5, 6, 6, 6, 6, 7, 7, 7, 7, 8, 8] + ) + + def _prepare(self): + """ + Prepare ._gts and ._dts for evaluation based on params + :return: None + """ + + def _toMask(anns, coco): + # modify ann['segmentation'] by reference + for ann in anns: + # safeguard for invalid segmentation annotation; + # annotations containing empty lists exist in the posetrack + # dataset. This is not a correct segmentation annotation + # in terms of COCO format; we need to deal with it somehow + segm = ann["segmentation"] + if type(segm) == list and len(segm) == 0: + ann["segmentation"] = None + continue + rle = coco.annToRLE(ann) + ann["segmentation"] = rle + + def _getIgnoreRegion(iid, coco): + img = coco.imgs[iid] + + if "ignore_regions_x" not in img.keys(): + return None + + if len(img["ignore_regions_x"]) == 0: + return None + + rgns_merged = [ + [v for xy in zip(region_x, region_y) for v in xy] + for region_x, region_y in zip(img["ignore_regions_x"], img["ignore_regions_y"]) + ] + rles = maskUtils.frPyObjects(rgns_merged, img["height"], img["width"]) + rle = maskUtils.merge(rles) + return maskUtils.decode(rle) + + def _checkIgnore(dt, iregion): + if iregion is None: + return True + + bb = np.array(dt["bbox"]).astype(np.int) + x1, y1, x2, y2 = bb[0], bb[1], bb[0] + bb[2], bb[1] + bb[3] + x2 = min([x2, iregion.shape[1]]) + y2 = min([y2, iregion.shape[0]]) + + if bb[2] * bb[3] == 0: + return False + + crop_iregion = iregion[y1:y2, x1:x2] + + if crop_iregion.sum() == 0: + return True + + if "densepose" not in dt.keys(): # filtering boxes + return crop_iregion.sum() / bb[2] / bb[3] < self.ignoreThrBB + + # filtering UVs + ignoremask = np.require(crop_iregion, requirements=["F"]) + mask = self._extract_mask(dt) + uvmask = np.require(np.asarray(mask > 0), dtype=np.uint8, requirements=["F"]) + uvmask_ = maskUtils.encode(uvmask) + ignoremask_ = maskUtils.encode(ignoremask) + uviou = maskUtils.iou([uvmask_], [ignoremask_], [1])[0] + return uviou < self.ignoreThrUV + + p = self.params + + if p.useCats: + gts = self.cocoGt.loadAnns(self.cocoGt.getAnnIds(imgIds=p.imgIds, catIds=p.catIds)) + dts = self.cocoDt.loadAnns(self.cocoDt.getAnnIds(imgIds=p.imgIds, catIds=p.catIds)) + else: + gts = self.cocoGt.loadAnns(self.cocoGt.getAnnIds(imgIds=p.imgIds)) + dts = self.cocoDt.loadAnns(self.cocoDt.getAnnIds(imgIds=p.imgIds)) + + imns = self.cocoGt.loadImgs(p.imgIds) + self.size_mapping = {} + for im in imns: + self.size_mapping[im["id"]] = [im["height"], im["width"]] + + # if iouType == 'uv', add point gt annotations + if p.iouType == "densepose": + self._loadGEval() + + # convert ground truth to mask if iouType == 'segm' + if p.iouType == "segm": + _toMask(gts, self.cocoGt) + _toMask(dts, self.cocoDt) + + # set ignore flag + for gt in gts: + gt["ignore"] = gt["ignore"] if "ignore" in gt else 0 + gt["ignore"] = "iscrowd" in gt and gt["iscrowd"] + if p.iouType == "keypoints": + gt["ignore"] = (gt["num_keypoints"] == 0) or gt["ignore"] + if p.iouType == "densepose": + gt["ignore"] = ("dp_x" in gt) == 0 + if p.iouType == "segm": + gt["ignore"] = gt["segmentation"] is None + + self._gts = defaultdict(list) # gt for evaluation + self._dts = defaultdict(list) # dt for evaluation + self._igrgns = defaultdict(list) + + for gt in gts: + iid = gt["image_id"] + if iid not in self._igrgns.keys(): + self._igrgns[iid] = _getIgnoreRegion(iid, self.cocoGt) + if _checkIgnore(gt, self._igrgns[iid]): + self._gts[iid, gt["category_id"]].append(gt) + for dt in dts: + iid = dt["image_id"] + if (iid not in self._igrgns) or _checkIgnore(dt, self._igrgns[iid]): + self._dts[iid, dt["category_id"]].append(dt) + + self.evalImgs = defaultdict(list) # per-image per-category evaluation results + self.eval = {} # accumulated evaluation results + + def evaluate(self): + """ + Run per image evaluation on given images and store results (a list of dict) in self.evalImgs + :return: None + """ + tic = time.time() + logger.info("Running per image DensePose evaluation... {}".format(self.params.iouType)) + p = self.params + # add backward compatibility if useSegm is specified in params + if p.useSegm is not None: + p.iouType = "segm" if p.useSegm == 1 else "bbox" + logger.info("useSegm (deprecated) is not None. Running DensePose evaluation") + p.imgIds = list(np.unique(p.imgIds)) + if p.useCats: + p.catIds = list(np.unique(p.catIds)) + p.maxDets = sorted(p.maxDets) + self.params = p + + self._prepare() + # loop through images, area range, max detection number + catIds = p.catIds if p.useCats else [-1] + + if p.iouType in ["segm", "bbox"]: + computeIoU = self.computeIoU + elif p.iouType == "keypoints": + computeIoU = self.computeOks + elif p.iouType == "densepose": + computeIoU = self.computeOgps + if self._dpEvalMode in {DensePoseEvalMode.GPSM, DensePoseEvalMode.IOU}: + self.real_ious = { + (imgId, catId): self.computeDPIoU(imgId, catId) + for imgId in p.imgIds + for catId in catIds + } + + self.ious = { + (imgId, catId): computeIoU(imgId, catId) for imgId in p.imgIds for catId in catIds + } + + evaluateImg = self.evaluateImg + maxDet = p.maxDets[-1] + self.evalImgs = [ + evaluateImg(imgId, catId, areaRng, maxDet) + for catId in catIds + for areaRng in p.areaRng + for imgId in p.imgIds + ] + self._paramsEval = copy.deepcopy(self.params) + toc = time.time() + logger.info("DensePose evaluation DONE (t={:0.2f}s).".format(toc - tic)) + + def getDensePoseMask(self, polys): + maskGen = np.zeros([256, 256]) + stop = min(len(polys) + 1, 15) + for i in range(1, stop): + if polys[i - 1]: + currentMask = maskUtils.decode(polys[i - 1]) + maskGen[currentMask > 0] = i + return maskGen + + def _generate_rlemask_on_image(self, mask, imgId, data): + bbox_xywh = np.array(data["bbox"]) + x, y, w, h = bbox_xywh + im_h, im_w = self.size_mapping[imgId] + im_mask = np.zeros((im_h, im_w), dtype=np.uint8) + if mask is not None: + x0 = max(int(x), 0) + x1 = min(int(x + w), im_w, int(x) + mask.shape[1]) + y0 = max(int(y), 0) + y1 = min(int(y + h), im_h, int(y) + mask.shape[0]) + y = int(y) + x = int(x) + im_mask[y0:y1, x0:x1] = mask[y0 - y : y1 - y, x0 - x : x1 - x] + im_mask = np.require(np.asarray(im_mask > 0), dtype=np.uint8, requirements=["F"]) + rle_mask = maskUtils.encode(np.array(im_mask[:, :, np.newaxis], order="F"))[0] + return rle_mask + + def computeDPIoU(self, imgId, catId): + p = self.params + if p.useCats: + gt = self._gts[imgId, catId] + dt = self._dts[imgId, catId] + else: + gt = [_ for cId in p.catIds for _ in self._gts[imgId, cId]] + dt = [_ for cId in p.catIds for _ in self._dts[imgId, cId]] + if len(gt) == 0 and len(dt) == 0: + return [] + inds = np.argsort([-d["score"] for d in dt], kind="mergesort") + dt = [dt[i] for i in inds] + if len(dt) > p.maxDets[-1]: + dt = dt[0 : p.maxDets[-1]] + + gtmasks = [] + for g in gt: + if DensePoseDataRelative.S_KEY in g: + # convert DensePose mask to a binary mask + mask = np.minimum(self.getDensePoseMask(g[DensePoseDataRelative.S_KEY]), 1.0) + _, _, w, h = g["bbox"] + scale_x = float(max(w, 1)) / mask.shape[1] + scale_y = float(max(h, 1)) / mask.shape[0] + mask = spzoom(mask, (scale_y, scale_x), order=1, prefilter=False) + mask = np.array(mask > 0.5, dtype=np.uint8) + rle_mask = self._generate_rlemask_on_image(mask, imgId, g) + elif "segmentation" in g: + segmentation = g["segmentation"] + if isinstance(segmentation, list) and segmentation: + # polygons + im_h, im_w = self.size_mapping[imgId] + rles = maskUtils.frPyObjects(segmentation, im_h, im_w) + rle_mask = maskUtils.merge(rles) + elif isinstance(segmentation, dict): + if isinstance(segmentation["counts"], list): + # uncompressed RLE + im_h, im_w = self.size_mapping[imgId] + rle_mask = maskUtils.frPyObjects(segmentation, im_h, im_w) + else: + # compressed RLE + rle_mask = segmentation + else: + rle_mask = self._generate_rlemask_on_image(None, imgId, g) + else: + rle_mask = self._generate_rlemask_on_image(None, imgId, g) + gtmasks.append(rle_mask) + + dtmasks = [] + for d in dt: + mask = self._extract_mask(d) + mask = np.require(np.asarray(mask > 0), dtype=np.uint8, requirements=["F"]) + rle_mask = self._generate_rlemask_on_image(mask, imgId, d) + dtmasks.append(rle_mask) + + # compute iou between each dt and gt region + iscrowd = [int(o.get("iscrowd", 0)) for o in gt] + iousDP = maskUtils.iou(dtmasks, gtmasks, iscrowd) + return iousDP + + def computeIoU(self, imgId, catId): + p = self.params + if p.useCats: + gt = self._gts[imgId, catId] + dt = self._dts[imgId, catId] + else: + gt = [_ for cId in p.catIds for _ in self._gts[imgId, cId]] + dt = [_ for cId in p.catIds for _ in self._dts[imgId, cId]] + if len(gt) == 0 and len(dt) == 0: + return [] + inds = np.argsort([-d["score"] for d in dt], kind="mergesort") + dt = [dt[i] for i in inds] + if len(dt) > p.maxDets[-1]: + dt = dt[0 : p.maxDets[-1]] + + if p.iouType == "segm": + g = [g["segmentation"] for g in gt if g["segmentation"] is not None] + d = [d["segmentation"] for d in dt if d["segmentation"] is not None] + elif p.iouType == "bbox": + g = [g["bbox"] for g in gt] + d = [d["bbox"] for d in dt] + else: + raise Exception("unknown iouType for iou computation") + + # compute iou between each dt and gt region + iscrowd = [int(o.get("iscrowd", 0)) for o in gt] + ious = maskUtils.iou(d, g, iscrowd) + return ious + + def computeOks(self, imgId, catId): + p = self.params + # dimension here should be Nxm + gts = self._gts[imgId, catId] + dts = self._dts[imgId, catId] + inds = np.argsort([-d["score"] for d in dts], kind="mergesort") + dts = [dts[i] for i in inds] + if len(dts) > p.maxDets[-1]: + dts = dts[0 : p.maxDets[-1]] + # if len(gts) == 0 and len(dts) == 0: + if len(gts) == 0 or len(dts) == 0: + return [] + ious = np.zeros((len(dts), len(gts))) + sigmas = ( + np.array( + [ + 0.26, + 0.25, + 0.25, + 0.35, + 0.35, + 0.79, + 0.79, + 0.72, + 0.72, + 0.62, + 0.62, + 1.07, + 1.07, + 0.87, + 0.87, + 0.89, + 0.89, + ] + ) + / 10.0 + ) + vars = (sigmas * 2) ** 2 + k = len(sigmas) + # compute oks between each detection and ground truth object + for j, gt in enumerate(gts): + # create bounds for ignore regions(double the gt bbox) + g = np.array(gt["keypoints"]) + xg = g[0::3] + yg = g[1::3] + vg = g[2::3] + k1 = np.count_nonzero(vg > 0) + bb = gt["bbox"] + x0 = bb[0] - bb[2] + x1 = bb[0] + bb[2] * 2 + y0 = bb[1] - bb[3] + y1 = bb[1] + bb[3] * 2 + for i, dt in enumerate(dts): + d = np.array(dt["keypoints"]) + xd = d[0::3] + yd = d[1::3] + if k1 > 0: + # measure the per-keypoint distance if keypoints visible + dx = xd - xg + dy = yd - yg + else: + # measure minimum distance to keypoints in (x0,y0) & (x1,y1) + z = np.zeros(k) + dx = np.max((z, x0 - xd), axis=0) + np.max((z, xd - x1), axis=0) + dy = np.max((z, y0 - yd), axis=0) + np.max((z, yd - y1), axis=0) + e = (dx ** 2 + dy ** 2) / vars / (gt["area"] + np.spacing(1)) / 2 + if k1 > 0: + e = e[vg > 0] + ious[i, j] = np.sum(np.exp(-e)) / e.shape[0] + return ious + + def _extract_mask(self, dt: Dict[str, Any]) -> np.ndarray: + if "densepose" in dt: + densepose_results_quantized = dt["densepose"] + return densepose_results_quantized.labels_uv_uint8[0].numpy() + elif "cse_mask" in dt: + return dt["cse_mask"] + elif "coarse_segm" in dt: + dy = max(int(dt["bbox"][3]), 1) + dx = max(int(dt["bbox"][2]), 1) + return ( + # pyre-fixme[16]: `Tensor` has no attribute `argmax`. + F.interpolate( + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got + # `Tuple[int, int]`. + dt["coarse_segm"].unsqueeze(0), (dy, dx), mode="bilinear", align_corners=False + ) + .squeeze(0) + .argmax(0) + .numpy() + .astype(np.uint8) + ) + elif "record_id" in dt: + assert ( + self.multi_storage is not None + ), f"Storage record id encountered in a detection {dt}, but no storage provided!" + record = self.multi_storage.get(dt["rank"], dt["record_id"]) + coarse_segm = record["coarse_segm"] + dy = max(int(dt["bbox"][3]), 1) + dx = max(int(dt["bbox"][2]), 1) + return ( + F.interpolate( + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got + # `Tuple[int, int]`. + coarse_segm.unsqueeze(0), (dy, dx), mode="bilinear", align_corners=False + ) + .squeeze(0) + .argmax(0) + .numpy() + .astype(np.uint8) + ) + else: + raise Exception(f"No mask data in the detection: {dt}") + raise ValueError('The prediction dict needs to contain either "densepose" or "cse_mask"') + + def _extract_iuv( + self, densepose_data: np.ndarray, py: np.ndarray, px: np.ndarray, gt: Dict[str, Any] + ) -> Tuple[np.ndarray, np.ndarray, np.ndarray]: + """ + Extract arrays of I, U and V values at given points as numpy arrays + given the data mode stored in self._dpDataMode + """ + if self._dpDataMode == DensePoseDataMode.IUV_DT: + # estimated labels and UV (default) + ipoints = densepose_data[0, py, px] + upoints = densepose_data[1, py, px] / 255.0 # convert from uint8 by /255. + vpoints = densepose_data[2, py, px] / 255.0 + elif self._dpDataMode == DensePoseDataMode.IUV_GT: + # ground truth + ipoints = np.array(gt["dp_I"]) + upoints = np.array(gt["dp_U"]) + vpoints = np.array(gt["dp_V"]) + elif self._dpDataMode == DensePoseDataMode.I_GT_UV_0: + # ground truth labels, UV = 0 + ipoints = np.array(gt["dp_I"]) + upoints = upoints * 0.0 + vpoints = vpoints * 0.0 + elif self._dpDataMode == DensePoseDataMode.I_GT_UV_DT: + # ground truth labels, estimated UV + ipoints = np.array(gt["dp_I"]) + upoints = densepose_data[1, py, px] / 255.0 # convert from uint8 by /255. + vpoints = densepose_data[2, py, px] / 255.0 + elif self._dpDataMode == DensePoseDataMode.I_DT_UV_0: + # estimated labels, UV = 0 + ipoints = densepose_data[0, py, px] + upoints = upoints * 0.0 + vpoints = vpoints * 0.0 + else: + raise ValueError(f"Unknown data mode: {self._dpDataMode}") + return ipoints, upoints, vpoints + + def computeOgps_single_pair(self, dt, gt, py, px, pt_mask): + if "densepose" in dt: + ipoints, upoints, vpoints = self.extract_iuv_from_quantized(dt, gt, py, px, pt_mask) + return self.computeOgps_single_pair_iuv(dt, gt, ipoints, upoints, vpoints) + elif "u" in dt: + ipoints, upoints, vpoints = self.extract_iuv_from_raw(dt, gt, py, px, pt_mask) + return self.computeOgps_single_pair_iuv(dt, gt, ipoints, upoints, vpoints) + elif "record_id" in dt: + assert ( + self.multi_storage is not None + ), f"Storage record id encountered in detection {dt}, but no storage provided!" + record = self.multi_storage.get(dt["rank"], dt["record_id"]) + record["bbox"] = dt["bbox"] + if "u" in record: + ipoints, upoints, vpoints = self.extract_iuv_from_raw(record, gt, py, px, pt_mask) + return self.computeOgps_single_pair_iuv(dt, gt, ipoints, upoints, vpoints) + elif "embedding" in record: + return self.computeOgps_single_pair_cse( + dt, + gt, + py, + px, + pt_mask, + record["coarse_segm"], + record["embedding"], + record["bbox"], + ) + else: + raise Exception(f"Unknown record format: {record}") + elif "embedding" in dt: + return self.computeOgps_single_pair_cse( + dt, gt, py, px, pt_mask, dt["coarse_segm"], dt["embedding"], dt["bbox"] + ) + raise Exception(f"Unknown detection format: {dt}") + + def extract_iuv_from_quantized(self, dt, gt, py, px, pt_mask): + densepose_results_quantized = dt["densepose"] + ipoints, upoints, vpoints = self._extract_iuv( + densepose_results_quantized.labels_uv_uint8.numpy(), py, px, gt + ) + ipoints[pt_mask == -1] = 0 + return ipoints, upoints, vpoints + + def extract_iuv_from_raw(self, dt, gt, py, px, pt_mask): + labels_dt = resample_fine_and_coarse_segm_tensors_to_bbox( + dt["fine_segm"].unsqueeze(0), + dt["coarse_segm"].unsqueeze(0), + dt["bbox"], + ) + uv = resample_uv_tensors_to_bbox( + dt["u"].unsqueeze(0), dt["v"].unsqueeze(0), labels_dt.squeeze(0), dt["bbox"] + ) + labels_uv_uint8 = torch.cat((labels_dt.byte(), (uv * 255).clamp(0, 255).byte())) + ipoints, upoints, vpoints = self._extract_iuv(labels_uv_uint8.numpy(), py, px, gt) + ipoints[pt_mask == -1] = 0 + return ipoints, upoints, vpoints + + def computeOgps_single_pair_iuv(self, dt, gt, ipoints, upoints, vpoints): + cVertsGT, ClosestVertsGTTransformed = self.findAllClosestVertsGT(gt) + cVerts = self.findAllClosestVertsUV(upoints, vpoints, ipoints) + # Get pairwise geodesic distances between gt and estimated mesh points. + dist = self.getDistancesUV(ClosestVertsGTTransformed, cVerts) + # Compute the Ogps measure. + # Find the mean geodesic normalization distance for + # each GT point, based on which part it is on. + Current_Mean_Distances = self.Mean_Distances[ + self.CoarseParts[self.Part_ids[cVertsGT[cVertsGT > 0].astype(int) - 1]] + ] + return dist, Current_Mean_Distances + + def computeOgps_single_pair_cse( + self, dt, gt, py, px, pt_mask, coarse_segm, embedding, bbox_xywh_abs + ): + # 0-based mesh vertex indices + cVertsGT = torch.as_tensor(gt["dp_vertex"], dtype=torch.int64) + # label for each pixel of the bbox, [H, W] tensor of long + labels_dt = resample_coarse_segm_tensor_to_bbox( + coarse_segm.unsqueeze(0), bbox_xywh_abs + ).squeeze(0) + x, y, w, h = bbox_xywh_abs + # embedding for each pixel of the bbox, [D, H, W] tensor of float32 + embedding = F.interpolate( + embedding.unsqueeze(0), (int(h), int(w)), mode="bilinear", align_corners=False + ).squeeze(0) + # valid locations py, px + py_pt = torch.from_numpy(py[pt_mask > -1]) + px_pt = torch.from_numpy(px[pt_mask > -1]) + cVerts = torch.ones_like(cVertsGT) * -1 + cVerts[pt_mask > -1] = self.findClosestVertsCse( + embedding, py_pt, px_pt, labels_dt, gt["ref_model"] + ) + # Get pairwise geodesic distances between gt and estimated mesh points. + dist = self.getDistancesCse(cVertsGT, cVerts, gt["ref_model"]) + # normalize distances + if (gt["ref_model"] == "smpl_27554") and ("dp_I" in gt): + Current_Mean_Distances = self.Mean_Distances[ + self.CoarseParts[np.array(gt["dp_I"], dtype=int)] + ] + else: + Current_Mean_Distances = 0.255 + return dist, Current_Mean_Distances + + def computeOgps(self, imgId, catId): + p = self.params + # dimension here should be Nxm + g = self._gts[imgId, catId] + d = self._dts[imgId, catId] + inds = np.argsort([-d_["score"] for d_ in d], kind="mergesort") + d = [d[i] for i in inds] + if len(d) > p.maxDets[-1]: + d = d[0 : p.maxDets[-1]] + # if len(gts) == 0 and len(dts) == 0: + if len(g) == 0 or len(d) == 0: + return [] + ious = np.zeros((len(d), len(g))) + # compute opgs between each detection and ground truth object + # sigma = self.sigma #0.255 # dist = 0.3m corresponds to ogps = 0.5 + # 1 # dist = 0.3m corresponds to ogps = 0.96 + # 1.45 # dist = 1.7m (person height) corresponds to ogps = 0.5) + for j, gt in enumerate(g): + if not gt["ignore"]: + g_ = gt["bbox"] + for i, dt in enumerate(d): + # + dy = int(dt["bbox"][3]) + dx = int(dt["bbox"][2]) + dp_x = np.array(gt["dp_x"]) * g_[2] / 255.0 + dp_y = np.array(gt["dp_y"]) * g_[3] / 255.0 + py = (dp_y + g_[1] - dt["bbox"][1]).astype(np.int) + px = (dp_x + g_[0] - dt["bbox"][0]).astype(np.int) + # + pts = np.zeros(len(px)) + pts[px >= dx] = -1 + pts[py >= dy] = -1 + pts[px < 0] = -1 + pts[py < 0] = -1 + if len(pts) < 1: + ogps = 0.0 + elif np.max(pts) == -1: + ogps = 0.0 + else: + px[pts == -1] = 0 + py[pts == -1] = 0 + dists_between_matches, dist_norm_coeffs = self.computeOgps_single_pair( + dt, gt, py, px, pts + ) + # Compute gps + ogps_values = np.exp( + -(dists_between_matches ** 2) / (2 * (dist_norm_coeffs ** 2)) + ) + # + ogps = np.mean(ogps_values) if len(ogps_values) > 0 else 0.0 + ious[i, j] = ogps + + gbb = [gt["bbox"] for gt in g] + dbb = [dt["bbox"] for dt in d] + + # compute iou between each dt and gt region + iscrowd = [int(o.get("iscrowd", 0)) for o in g] + ious_bb = maskUtils.iou(dbb, gbb, iscrowd) + return ious, ious_bb + + def evaluateImg(self, imgId, catId, aRng, maxDet): + """ + perform evaluation for single category and image + :return: dict (single image results) + """ + + p = self.params + if p.useCats: + gt = self._gts[imgId, catId] + dt = self._dts[imgId, catId] + else: + gt = [_ for cId in p.catIds for _ in self._gts[imgId, cId]] + dt = [_ for cId in p.catIds for _ in self._dts[imgId, cId]] + if len(gt) == 0 and len(dt) == 0: + return None + + for g in gt: + # g['_ignore'] = g['ignore'] + if g["ignore"] or (g["area"] < aRng[0] or g["area"] > aRng[1]): + g["_ignore"] = True + else: + g["_ignore"] = False + + # sort dt highest score first, sort gt ignore last + gtind = np.argsort([g["_ignore"] for g in gt], kind="mergesort") + gt = [gt[i] for i in gtind] + dtind = np.argsort([-d["score"] for d in dt], kind="mergesort") + dt = [dt[i] for i in dtind[0:maxDet]] + iscrowd = [int(o.get("iscrowd", 0)) for o in gt] + # load computed ious + if p.iouType == "densepose": + # print('Checking the length', len(self.ious[imgId, catId])) + # if len(self.ious[imgId, catId]) == 0: + # print(self.ious[imgId, catId]) + ious = ( + self.ious[imgId, catId][0][:, gtind] + if len(self.ious[imgId, catId]) > 0 + else self.ious[imgId, catId] + ) + ioubs = ( + self.ious[imgId, catId][1][:, gtind] + if len(self.ious[imgId, catId]) > 0 + else self.ious[imgId, catId] + ) + if self._dpEvalMode in {DensePoseEvalMode.GPSM, DensePoseEvalMode.IOU}: + iousM = ( + self.real_ious[imgId, catId][:, gtind] + if len(self.real_ious[imgId, catId]) > 0 + else self.real_ious[imgId, catId] + ) + else: + ious = ( + self.ious[imgId, catId][:, gtind] + if len(self.ious[imgId, catId]) > 0 + else self.ious[imgId, catId] + ) + + T = len(p.iouThrs) + G = len(gt) + D = len(dt) + gtm = np.zeros((T, G)) + dtm = np.zeros((T, D)) + gtIg = np.array([g["_ignore"] for g in gt]) + dtIg = np.zeros((T, D)) + if np.all(gtIg) and p.iouType == "densepose": + dtIg = np.logical_or(dtIg, True) + + if len(ious) > 0: # and not p.iouType == 'densepose': + for tind, t in enumerate(p.iouThrs): + for dind, d in enumerate(dt): + # information about best match so far (m=-1 -> unmatched) + iou = min([t, 1 - 1e-10]) + m = -1 + for gind, _g in enumerate(gt): + # if this gt already matched, and not a crowd, continue + if gtm[tind, gind] > 0 and not iscrowd[gind]: + continue + # if dt matched to reg gt, and on ignore gt, stop + if m > -1 and gtIg[m] == 0 and gtIg[gind] == 1: + break + if p.iouType == "densepose": + if self._dpEvalMode == DensePoseEvalMode.GPSM: + new_iou = np.sqrt(iousM[dind, gind] * ious[dind, gind]) + elif self._dpEvalMode == DensePoseEvalMode.IOU: + new_iou = iousM[dind, gind] + elif self._dpEvalMode == DensePoseEvalMode.GPS: + new_iou = ious[dind, gind] + else: + new_iou = ious[dind, gind] + if new_iou < iou: + continue + if new_iou == 0.0: + continue + # if match successful and best so far, store appropriately + iou = new_iou + m = gind + # if match made store id of match for both dt and gt + if m == -1: + continue + dtIg[tind, dind] = gtIg[m] + dtm[tind, dind] = gt[m]["id"] + gtm[tind, m] = d["id"] + + if p.iouType == "densepose": + if not len(ioubs) == 0: + for dind, d in enumerate(dt): + # information about best match so far (m=-1 -> unmatched) + if dtm[tind, dind] == 0: + ioub = 0.8 + m = -1 + for gind, _g in enumerate(gt): + # if this gt already matched, and not a crowd, continue + if gtm[tind, gind] > 0 and not iscrowd[gind]: + continue + # continue to next gt unless better match made + if ioubs[dind, gind] < ioub: + continue + # if match successful and best so far, store appropriately + ioub = ioubs[dind, gind] + m = gind + # if match made store id of match for both dt and gt + if m > -1: + dtIg[:, dind] = gtIg[m] + if gtIg[m]: + dtm[tind, dind] = gt[m]["id"] + gtm[tind, m] = d["id"] + # set unmatched detections outside of area range to ignore + a = np.array([d["area"] < aRng[0] or d["area"] > aRng[1] for d in dt]).reshape((1, len(dt))) + dtIg = np.logical_or(dtIg, np.logical_and(dtm == 0, np.repeat(a, T, 0))) + # store results for given image and category + # print('Done with the function', len(self.ious[imgId, catId])) + return { + "image_id": imgId, + "category_id": catId, + "aRng": aRng, + "maxDet": maxDet, + "dtIds": [d["id"] for d in dt], + "gtIds": [g["id"] for g in gt], + "dtMatches": dtm, + "gtMatches": gtm, + "dtScores": [d["score"] for d in dt], + "gtIgnore": gtIg, + "dtIgnore": dtIg, + } + + def accumulate(self, p=None): + """ + Accumulate per image evaluation results and store the result in self.eval + :param p: input params for evaluation + :return: None + """ + logger.info("Accumulating evaluation results...") + tic = time.time() + if not self.evalImgs: + logger.info("Please run evaluate() first") + # allows input customized parameters + if p is None: + p = self.params + p.catIds = p.catIds if p.useCats == 1 else [-1] + T = len(p.iouThrs) + R = len(p.recThrs) + K = len(p.catIds) if p.useCats else 1 + A = len(p.areaRng) + M = len(p.maxDets) + precision = -(np.ones((T, R, K, A, M))) # -1 for the precision of absent categories + recall = -(np.ones((T, K, A, M))) + + # create dictionary for future indexing + logger.info("Categories: {}".format(p.catIds)) + _pe = self._paramsEval + catIds = _pe.catIds if _pe.useCats else [-1] + setK = set(catIds) + setA = set(map(tuple, _pe.areaRng)) + setM = set(_pe.maxDets) + setI = set(_pe.imgIds) + # get inds to evaluate + k_list = [n for n, k in enumerate(p.catIds) if k in setK] + m_list = [m for n, m in enumerate(p.maxDets) if m in setM] + a_list = [n for n, a in enumerate(map(lambda x: tuple(x), p.areaRng)) if a in setA] + i_list = [n for n, i in enumerate(p.imgIds) if i in setI] + I0 = len(_pe.imgIds) + A0 = len(_pe.areaRng) + # retrieve E at each category, area range, and max number of detections + for k, k0 in enumerate(k_list): + Nk = k0 * A0 * I0 + for a, a0 in enumerate(a_list): + Na = a0 * I0 + for m, maxDet in enumerate(m_list): + E = [self.evalImgs[Nk + Na + i] for i in i_list] + E = [e for e in E if e is not None] + if len(E) == 0: + continue + dtScores = np.concatenate([e["dtScores"][0:maxDet] for e in E]) + + # different sorting method generates slightly different results. + # mergesort is used to be consistent as Matlab implementation. + inds = np.argsort(-dtScores, kind="mergesort") + + dtm = np.concatenate([e["dtMatches"][:, 0:maxDet] for e in E], axis=1)[:, inds] + dtIg = np.concatenate([e["dtIgnore"][:, 0:maxDet] for e in E], axis=1)[:, inds] + gtIg = np.concatenate([e["gtIgnore"] for e in E]) + npig = np.count_nonzero(gtIg == 0) + if npig == 0: + continue + tps = np.logical_and(dtm, np.logical_not(dtIg)) + fps = np.logical_and(np.logical_not(dtm), np.logical_not(dtIg)) + tp_sum = np.cumsum(tps, axis=1).astype(dtype=np.float) + fp_sum = np.cumsum(fps, axis=1).astype(dtype=np.float) + for t, (tp, fp) in enumerate(zip(tp_sum, fp_sum)): + tp = np.array(tp) + fp = np.array(fp) + nd = len(tp) + rc = tp / npig + pr = tp / (fp + tp + np.spacing(1)) + q = np.zeros((R,)) + + if nd: + recall[t, k, a, m] = rc[-1] + else: + recall[t, k, a, m] = 0 + + # numpy is slow without cython optimization for accessing elements + # use python array gets significant speed improvement + pr = pr.tolist() + q = q.tolist() + + for i in range(nd - 1, 0, -1): + if pr[i] > pr[i - 1]: + pr[i - 1] = pr[i] + + inds = np.searchsorted(rc, p.recThrs, side="left") + try: + for ri, pi in enumerate(inds): + q[ri] = pr[pi] + except Exception: + pass + precision[t, :, k, a, m] = np.array(q) + logger.info( + "Final: max precision {}, min precision {}".format(np.max(precision), np.min(precision)) + ) + self.eval = { + "params": p, + "counts": [T, R, K, A, M], + "date": datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S"), + "precision": precision, + "recall": recall, + } + toc = time.time() + logger.info("DONE (t={:0.2f}s).".format(toc - tic)) + + def summarize(self): + """ + Compute and display summary metrics for evaluation results. + Note this function can *only* be applied on the default parameter setting + """ + + def _summarize(ap=1, iouThr=None, areaRng="all", maxDets=100): + p = self.params + iStr = " {:<18} {} @[ {}={:<9} | area={:>6s} | maxDets={:>3d} ] = {:0.3f}" + titleStr = "Average Precision" if ap == 1 else "Average Recall" + typeStr = "(AP)" if ap == 1 else "(AR)" + measure = "IoU" + if self.params.iouType == "keypoints": + measure = "OKS" + elif self.params.iouType == "densepose": + measure = "OGPS" + iouStr = ( + "{:0.2f}:{:0.2f}".format(p.iouThrs[0], p.iouThrs[-1]) + if iouThr is None + else "{:0.2f}".format(iouThr) + ) + + aind = [i for i, aRng in enumerate(p.areaRngLbl) if aRng == areaRng] + mind = [i for i, mDet in enumerate(p.maxDets) if mDet == maxDets] + if ap == 1: + # dimension of precision: [TxRxKxAxM] + s = self.eval["precision"] + # IoU + if iouThr is not None: + t = np.where(np.abs(iouThr - p.iouThrs) < 0.001)[0] + s = s[t] + s = s[:, :, :, aind, mind] + else: + # dimension of recall: [TxKxAxM] + s = self.eval["recall"] + if iouThr is not None: + t = np.where(np.abs(iouThr - p.iouThrs) < 0.001)[0] + s = s[t] + s = s[:, :, aind, mind] + if len(s[s > -1]) == 0: + mean_s = -1 + else: + mean_s = np.mean(s[s > -1]) + logger.info(iStr.format(titleStr, typeStr, measure, iouStr, areaRng, maxDets, mean_s)) + return mean_s + + def _summarizeDets(): + stats = np.zeros((12,)) + stats[0] = _summarize(1) + stats[1] = _summarize(1, iouThr=0.5, maxDets=self.params.maxDets[2]) + stats[2] = _summarize(1, iouThr=0.75, maxDets=self.params.maxDets[2]) + stats[3] = _summarize(1, areaRng="small", maxDets=self.params.maxDets[2]) + stats[4] = _summarize(1, areaRng="medium", maxDets=self.params.maxDets[2]) + stats[5] = _summarize(1, areaRng="large", maxDets=self.params.maxDets[2]) + stats[6] = _summarize(0, maxDets=self.params.maxDets[0]) + stats[7] = _summarize(0, maxDets=self.params.maxDets[1]) + stats[8] = _summarize(0, maxDets=self.params.maxDets[2]) + stats[9] = _summarize(0, areaRng="small", maxDets=self.params.maxDets[2]) + stats[10] = _summarize(0, areaRng="medium", maxDets=self.params.maxDets[2]) + stats[11] = _summarize(0, areaRng="large", maxDets=self.params.maxDets[2]) + return stats + + def _summarizeKps(): + stats = np.zeros((10,)) + stats[0] = _summarize(1, maxDets=20) + stats[1] = _summarize(1, maxDets=20, iouThr=0.5) + stats[2] = _summarize(1, maxDets=20, iouThr=0.75) + stats[3] = _summarize(1, maxDets=20, areaRng="medium") + stats[4] = _summarize(1, maxDets=20, areaRng="large") + stats[5] = _summarize(0, maxDets=20) + stats[6] = _summarize(0, maxDets=20, iouThr=0.5) + stats[7] = _summarize(0, maxDets=20, iouThr=0.75) + stats[8] = _summarize(0, maxDets=20, areaRng="medium") + stats[9] = _summarize(0, maxDets=20, areaRng="large") + return stats + + def _summarizeUvs(): + stats = [_summarize(1, maxDets=self.params.maxDets[0])] + min_threshold = self.params.iouThrs.min() + if min_threshold <= 0.201: + stats += [_summarize(1, maxDets=self.params.maxDets[0], iouThr=0.2)] + if min_threshold <= 0.301: + stats += [_summarize(1, maxDets=self.params.maxDets[0], iouThr=0.3)] + if min_threshold <= 0.401: + stats += [_summarize(1, maxDets=self.params.maxDets[0], iouThr=0.4)] + stats += [ + _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.5), + _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.75), + _summarize(1, maxDets=self.params.maxDets[0], areaRng="medium"), + _summarize(1, maxDets=self.params.maxDets[0], areaRng="large"), + _summarize(0, maxDets=self.params.maxDets[0]), + _summarize(0, maxDets=self.params.maxDets[0], iouThr=0.5), + _summarize(0, maxDets=self.params.maxDets[0], iouThr=0.75), + _summarize(0, maxDets=self.params.maxDets[0], areaRng="medium"), + _summarize(0, maxDets=self.params.maxDets[0], areaRng="large"), + ] + return np.array(stats) + + def _summarizeUvsOld(): + stats = np.zeros((18,)) + stats[0] = _summarize(1, maxDets=self.params.maxDets[0]) + stats[1] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.5) + stats[2] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.55) + stats[3] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.60) + stats[4] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.65) + stats[5] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.70) + stats[6] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.75) + stats[7] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.80) + stats[8] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.85) + stats[9] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.90) + stats[10] = _summarize(1, maxDets=self.params.maxDets[0], iouThr=0.95) + stats[11] = _summarize(1, maxDets=self.params.maxDets[0], areaRng="medium") + stats[12] = _summarize(1, maxDets=self.params.maxDets[0], areaRng="large") + stats[13] = _summarize(0, maxDets=self.params.maxDets[0]) + stats[14] = _summarize(0, maxDets=self.params.maxDets[0], iouThr=0.5) + stats[15] = _summarize(0, maxDets=self.params.maxDets[0], iouThr=0.75) + stats[16] = _summarize(0, maxDets=self.params.maxDets[0], areaRng="medium") + stats[17] = _summarize(0, maxDets=self.params.maxDets[0], areaRng="large") + return stats + + if not self.eval: + raise Exception("Please run accumulate() first") + iouType = self.params.iouType + if iouType in ["segm", "bbox"]: + summarize = _summarizeDets + elif iouType in ["keypoints"]: + summarize = _summarizeKps + elif iouType in ["densepose"]: + summarize = _summarizeUvs + self.stats = summarize() + + def __str__(self): + self.summarize() + + # ================ functions for dense pose ============================== + def findAllClosestVertsUV(self, U_points, V_points, Index_points): + ClosestVerts = np.ones(Index_points.shape) * -1 + for i in np.arange(24): + # + if (i + 1) in Index_points: + UVs = np.array( + [U_points[Index_points == (i + 1)], V_points[Index_points == (i + 1)]] + ) + Current_Part_UVs = self.Part_UVs[i] + Current_Part_ClosestVertInds = self.Part_ClosestVertInds[i] + D = ssd.cdist(Current_Part_UVs.transpose(), UVs.transpose()).squeeze() + ClosestVerts[Index_points == (i + 1)] = Current_Part_ClosestVertInds[ + np.argmin(D, axis=0) + ] + ClosestVertsTransformed = self.PDIST_transform[ClosestVerts.astype(int) - 1] + ClosestVertsTransformed[ClosestVerts < 0] = 0 + return ClosestVertsTransformed + + def findClosestVertsCse(self, embedding, py, px, mask, mesh_name): + mesh_vertex_embeddings = self.embedder(mesh_name) + pixel_embeddings = embedding[:, py, px].t().to(device="cuda") + mask_vals = mask[py, px] + edm = squared_euclidean_distance_matrix(pixel_embeddings, mesh_vertex_embeddings) + vertex_indices = edm.argmin(dim=1).cpu() + vertex_indices[mask_vals <= 0] = -1 + return vertex_indices + + def findAllClosestVertsGT(self, gt): + # + I_gt = np.array(gt["dp_I"]) + U_gt = np.array(gt["dp_U"]) + V_gt = np.array(gt["dp_V"]) + # + # print(I_gt) + # + ClosestVertsGT = np.ones(I_gt.shape) * -1 + for i in np.arange(24): + if (i + 1) in I_gt: + UVs = np.array([U_gt[I_gt == (i + 1)], V_gt[I_gt == (i + 1)]]) + Current_Part_UVs = self.Part_UVs[i] + Current_Part_ClosestVertInds = self.Part_ClosestVertInds[i] + D = ssd.cdist(Current_Part_UVs.transpose(), UVs.transpose()).squeeze() + ClosestVertsGT[I_gt == (i + 1)] = Current_Part_ClosestVertInds[np.argmin(D, axis=0)] + # + ClosestVertsGTTransformed = self.PDIST_transform[ClosestVertsGT.astype(int) - 1] + ClosestVertsGTTransformed[ClosestVertsGT < 0] = 0 + return ClosestVertsGT, ClosestVertsGTTransformed + + def getDistancesCse(self, cVertsGT, cVerts, mesh_name): + geodists_vertices = torch.ones_like(cVertsGT) * float("inf") + selected = (cVertsGT >= 0) * (cVerts >= 0) + mesh = create_mesh(mesh_name, "cpu") + geodists_vertices[selected] = mesh.geodists[cVertsGT[selected], cVerts[selected]] + return geodists_vertices.numpy() + + def getDistancesUV(self, cVertsGT, cVerts): + # + n = 27554 + dists = [] + for d in range(len(cVertsGT)): + if cVertsGT[d] > 0: + if cVerts[d] > 0: + i = cVertsGT[d] - 1 + j = cVerts[d] - 1 + if j == i: + dists.append(0) + elif j > i: + ccc = i + i = j + j = ccc + i = n - i - 1 + j = n - j - 1 + k = (n * (n - 1) / 2) - (n - i) * ((n - i) - 1) / 2 + j - i - 1 + k = (n * n - n) / 2 - k - 1 + dists.append(self.Pdist_matrix[int(k)][0]) + else: + i = n - i - 1 + j = n - j - 1 + k = (n * (n - 1) / 2) - (n - i) * ((n - i) - 1) / 2 + j - i - 1 + k = (n * n - n) / 2 - k - 1 + dists.append(self.Pdist_matrix[int(k)][0]) + else: + dists.append(np.inf) + return np.atleast_1d(np.array(dists).squeeze()) + + +class Params: + """ + Params for coco evaluation api + """ + + def setDetParams(self): + self.imgIds = [] + self.catIds = [] + # np.arange causes trouble. the data point on arange is slightly larger than the true value + self.iouThrs = np.linspace(0.5, 0.95, int(np.round((0.95 - 0.5) / 0.05)) + 1, endpoint=True) + self.recThrs = np.linspace(0.0, 1.00, int(np.round((1.00 - 0.0) / 0.01)) + 1, endpoint=True) + self.maxDets = [1, 10, 100] + self.areaRng = [ + [0 ** 2, 1e5 ** 2], + [0 ** 2, 32 ** 2], + [32 ** 2, 96 ** 2], + [96 ** 2, 1e5 ** 2], + ] + self.areaRngLbl = ["all", "small", "medium", "large"] + self.useCats = 1 + + def setKpParams(self): + self.imgIds = [] + self.catIds = [] + # np.arange causes trouble. the data point on arange is slightly larger than the true value + self.iouThrs = np.linspace(0.5, 0.95, np.round((0.95 - 0.5) / 0.05) + 1, endpoint=True) + self.recThrs = np.linspace(0.0, 1.00, np.round((1.00 - 0.0) / 0.01) + 1, endpoint=True) + self.maxDets = [20] + self.areaRng = [[0 ** 2, 1e5 ** 2], [32 ** 2, 96 ** 2], [96 ** 2, 1e5 ** 2]] + self.areaRngLbl = ["all", "medium", "large"] + self.useCats = 1 + + def setUvParams(self): + self.imgIds = [] + self.catIds = [] + self.iouThrs = np.linspace(0.5, 0.95, int(np.round((0.95 - 0.5) / 0.05)) + 1, endpoint=True) + self.recThrs = np.linspace(0.0, 1.00, int(np.round((1.00 - 0.0) / 0.01)) + 1, endpoint=True) + self.maxDets = [20] + self.areaRng = [[0 ** 2, 1e5 ** 2], [32 ** 2, 96 ** 2], [96 ** 2, 1e5 ** 2]] + self.areaRngLbl = ["all", "medium", "large"] + self.useCats = 1 + + def __init__(self, iouType="segm"): + if iouType == "segm" or iouType == "bbox": + self.setDetParams() + elif iouType == "keypoints": + self.setKpParams() + elif iouType == "densepose": + self.setUvParams() + else: + raise Exception("iouType not supported") + self.iouType = iouType + # useSegm is deprecated + self.useSegm = None diff --git a/motion-gan-pipeline/preprocessing/third/densepose/evaluation/evaluator.py b/motion-gan-pipeline/preprocessing/third/densepose/evaluation/evaluator.py new file mode 100644 index 0000000..0e0e9e3 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/evaluation/evaluator.py @@ -0,0 +1,421 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +import contextlib +import copy +import io +import itertools +import logging +import numpy as np +import os +from collections import OrderedDict +from typing import Dict, Iterable, List, Optional +import pycocotools.mask as mask_utils +import torch +from pycocotools.coco import COCO +from tabulate import tabulate + +from detectron2.config import CfgNode +from detectron2.data import MetadataCatalog +from detectron2.evaluation import DatasetEvaluator +from detectron2.structures import BoxMode +from detectron2.utils.comm import gather, get_rank, is_main_process, synchronize +from detectron2.utils.file_io import PathManager +from detectron2.utils.logger import create_small_table + +from densepose.converters import ToChartResultConverter, ToMaskConverter +from densepose.data.datasets.coco import maybe_filter_and_map_categories_cocoapi +from densepose.structures import ( + DensePoseChartPredictorOutput, + DensePoseEmbeddingPredictorOutput, + quantize_densepose_chart_result, +) + +from .densepose_coco_evaluation import DensePoseCocoEval, DensePoseEvalMode +from .mesh_alignment_evaluator import MeshAlignmentEvaluator +from .tensor_storage import ( + SingleProcessFileTensorStorage, + SingleProcessRamTensorStorage, + SingleProcessTensorStorage, + SizeData, + storage_gather, +) + + +class DensePoseCOCOEvaluator(DatasetEvaluator): + def __init__( + self, + dataset_name, + distributed, + output_dir=None, + evaluator_type: str = "iuv", + min_iou_threshold: float = 0.5, + storage: Optional[SingleProcessTensorStorage] = None, + embedder=None, + should_evaluate_mesh_alignment: bool = False, + mesh_alignment_mesh_names: Optional[List[str]] = None, + ): + self._embedder = embedder + self._distributed = distributed + self._output_dir = output_dir + self._evaluator_type = evaluator_type + self._storage = storage + self._should_evaluate_mesh_alignment = should_evaluate_mesh_alignment + + assert not ( + should_evaluate_mesh_alignment and embedder is None + ), "Mesh alignment evaluation is activated, but no vertex embedder provided!" + if should_evaluate_mesh_alignment: + self._mesh_alignment_evaluator = MeshAlignmentEvaluator( + embedder, + mesh_alignment_mesh_names, + ) + + self._cpu_device = torch.device("cpu") + self._logger = logging.getLogger(__name__) + + self._metadata = MetadataCatalog.get(dataset_name) + self._min_threshold = min_iou_threshold + json_file = PathManager.get_local_path(self._metadata.json_file) + with contextlib.redirect_stdout(io.StringIO()): + self._coco_api = COCO(json_file) + maybe_filter_and_map_categories_cocoapi(dataset_name, self._coco_api) + + def reset(self): + self._predictions = [] + + def process(self, inputs, outputs): + """ + Args: + inputs: the inputs to a COCO model (e.g., GeneralizedRCNN). + It is a list of dict. Each dict corresponds to an image and + contains keys like "height", "width", "file_name", "image_id". + outputs: the outputs of a COCO model. It is a list of dicts with key + "instances" that contains :class:`Instances`. + The :class:`Instances` object needs to have `densepose` field. + """ + for input, output in zip(inputs, outputs): + instances = output["instances"].to(self._cpu_device) + if not instances.has("pred_densepose"): + continue + prediction_list = prediction_to_dict( + instances, + input["image_id"], + self._embedder, + self._metadata.class_to_mesh_name, + self._storage is not None, + ) + if self._storage is not None: + for prediction_dict in prediction_list: + dict_to_store = {} + for field_name in self._storage.data_schema: + dict_to_store[field_name] = prediction_dict[field_name] + record_id = self._storage.put(dict_to_store) + prediction_dict["record_id"] = record_id + prediction_dict["rank"] = get_rank() + for field_name in self._storage.data_schema: + del prediction_dict[field_name] + self._predictions.extend(prediction_list) + + def evaluate(self, img_ids=None): + if self._distributed: + synchronize() + predictions = gather(self._predictions) + predictions = list(itertools.chain(*predictions)) + else: + predictions = self._predictions + + multi_storage = storage_gather(self._storage) if self._storage is not None else None + + if not is_main_process(): + return + return copy.deepcopy(self._eval_predictions(predictions, multi_storage, img_ids)) + + def _eval_predictions(self, predictions, multi_storage=None, img_ids=None): + """ + Evaluate predictions on densepose. + Return results with the metrics of the tasks. + """ + self._logger.info("Preparing results for COCO format ...") + + if self._output_dir: + PathManager.mkdirs(self._output_dir) + file_path = os.path.join(self._output_dir, "coco_densepose_predictions.pth") + with PathManager.open(file_path, "wb") as f: + torch.save(predictions, f) + + self._logger.info("Evaluating predictions ...") + res = OrderedDict() + results_gps, results_gpsm, results_segm = _evaluate_predictions_on_coco( + self._coco_api, + predictions, + multi_storage, + self._embedder, + class_names=self._metadata.get("thing_classes"), + min_threshold=self._min_threshold, + img_ids=img_ids, + ) + res["densepose_gps"] = results_gps + res["densepose_gpsm"] = results_gpsm + res["densepose_segm"] = results_segm + if self._should_evaluate_mesh_alignment: + res["densepose_mesh_alignment"] = self._evaluate_mesh_alignment() + return res + + def _evaluate_mesh_alignment(self): + self._logger.info("Mesh alignment evaluation ...") + mean_ge, mean_gps, per_mesh_metrics = self._mesh_alignment_evaluator.evaluate() + results = { + "GE": mean_ge * 100, + "GPS": mean_gps * 100, + } + mesh_names = set() + for metric_name in per_mesh_metrics: + for mesh_name, value in per_mesh_metrics[metric_name].items(): + results[f"{metric_name}-{mesh_name}"] = value * 100 + mesh_names.add(mesh_name) + self._print_mesh_alignment_results(results, mesh_names) + return results + + def _print_mesh_alignment_results(self, results: Dict[str, float], mesh_names: Iterable[str]): + self._logger.info("Evaluation results for densepose, mesh alignment:") + self._logger.info(f'| {"Mesh":13s} | {"GErr":7s} | {"GPS":7s} |') + self._logger.info("| :-----------: | :-----: | :-----: |") + for mesh_name in mesh_names: + ge_key = f"GE-{mesh_name}" + ge_str = f"{results[ge_key]:.4f}" if ge_key in results else " " + gps_key = f"GPS-{mesh_name}" + gps_str = f"{results[gps_key]:.4f}" if gps_key in results else " " + self._logger.info(f"| {mesh_name:13s} | {ge_str:7s} | {gps_str:7s} |") + self._logger.info("| :-------------------------------: |") + ge_key = "GE" + ge_str = f"{results[ge_key]:.4f}" if ge_key in results else " " + gps_key = "GPS" + gps_str = f"{results[gps_key]:.4f}" if gps_key in results else " " + self._logger.info(f'| {"MEAN":13s} | {ge_str:7s} | {gps_str:7s} |') + + +def prediction_to_dict(instances, img_id, embedder, class_to_mesh_name, use_storage): + """ + Args: + instances (Instances): the output of the model + img_id (str): the image id in COCO + + Returns: + list[dict]: the results in densepose evaluation format + """ + scores = instances.scores.tolist() + classes = instances.pred_classes.tolist() + raw_boxes_xywh = BoxMode.convert( + instances.pred_boxes.tensor.clone(), BoxMode.XYXY_ABS, BoxMode.XYWH_ABS + ) + + if isinstance(instances.pred_densepose, DensePoseEmbeddingPredictorOutput): + results_densepose = densepose_cse_predictions_to_dict( + instances, embedder, class_to_mesh_name, use_storage + ) + elif isinstance(instances.pred_densepose, DensePoseChartPredictorOutput): + if not use_storage: + results_densepose = densepose_chart_predictions_to_dict(instances) + else: + results_densepose = densepose_chart_predictions_to_storage_dict(instances) + + results = [] + for k in range(len(instances)): + result = { + "image_id": img_id, + "category_id": classes[k], + "bbox": raw_boxes_xywh[k].tolist(), + "score": scores[k], + } + results.append({**result, **results_densepose[k]}) + return results + + +def densepose_chart_predictions_to_dict(instances): + segmentations = ToMaskConverter.convert( + instances.pred_densepose, instances.pred_boxes, instances.image_size + ) + + results = [] + for k in range(len(instances)): + densepose_results_quantized = quantize_densepose_chart_result( + ToChartResultConverter.convert(instances.pred_densepose[k], instances.pred_boxes[k]) + ) + densepose_results_quantized.labels_uv_uint8 = ( + densepose_results_quantized.labels_uv_uint8.cpu() + ) + segmentation = segmentations.tensor[k] + segmentation_encoded = mask_utils.encode( + np.require(segmentation.numpy(), dtype=np.uint8, requirements=["F"]) + ) + segmentation_encoded["counts"] = segmentation_encoded["counts"].decode("utf-8") + result = { + "densepose": densepose_results_quantized, + "segmentation": segmentation_encoded, + } + results.append(result) + return results + + +def densepose_chart_predictions_to_storage_dict(instances): + results = [] + for k in range(len(instances)): + densepose_predictor_output = instances.pred_densepose[k] + result = { + "coarse_segm": densepose_predictor_output.coarse_segm.squeeze(0).cpu(), + "fine_segm": densepose_predictor_output.fine_segm.squeeze(0).cpu(), + "u": densepose_predictor_output.u.squeeze(0).cpu(), + "v": densepose_predictor_output.v.squeeze(0).cpu(), + } + results.append(result) + return results + + +def densepose_cse_predictions_to_dict(instances, embedder, class_to_mesh_name, use_storage): + results = [] + for k in range(len(instances)): + cse = instances.pred_densepose[k] + results.append( + { + "coarse_segm": cse.coarse_segm[0].cpu(), + "embedding": cse.embedding[0].cpu(), + } + ) + return results + + +def _evaluate_predictions_on_coco( + coco_gt, + coco_results, + multi_storage=None, + embedder=None, + class_names=None, + min_threshold: float=0.5, + img_ids=None, +): + logger = logging.getLogger(__name__) + + densepose_metrics = _get_densepose_metrics(min_threshold) + if len(coco_results) == 0: # cocoapi does not handle empty results very well + logger.warn("No predictions from the model! Set scores to -1") + results_gps = {metric: -1 for metric in densepose_metrics} + results_gpsm = {metric: -1 for metric in densepose_metrics} + results_segm = {metric: -1 for metric in densepose_metrics} + return results_gps, results_gpsm, results_segm + + coco_dt = coco_gt.loadRes(coco_results) + + results = [] + for eval_mode_name in ["GPS", "GPSM", "IOU"]: + eval_mode = getattr(DensePoseEvalMode, eval_mode_name) + coco_eval = DensePoseCocoEval( + coco_gt, coco_dt, "densepose", multi_storage, embedder, dpEvalMode=eval_mode + ) + result = _derive_results_from_coco_eval( + coco_eval, eval_mode_name, densepose_metrics, class_names, min_threshold, img_ids + ) + results.append(result) + return results + + +def _get_densepose_metrics(min_threshold: float=0.5): + metrics = ["AP"] + if min_threshold <= 0.201: + metrics += ["AP20"] + if min_threshold <= 0.301: + metrics += ["AP30"] + if min_threshold <= 0.401: + metrics += ["AP40"] + metrics.extend(["AP50", "AP75", "APm", "APl", "AR", "AR50", "AR75", "ARm", "ARl"]) + return metrics + + +def _derive_results_from_coco_eval( + coco_eval, eval_mode_name, metrics, class_names, min_threshold: float, img_ids +): + if img_ids is not None: + coco_eval.params.imgIds = img_ids + coco_eval.params.iouThrs = np.linspace( + min_threshold, 0.95, int(np.round((0.95 - min_threshold) / 0.05)) + 1, endpoint=True + ) + coco_eval.evaluate() + coco_eval.accumulate() + coco_eval.summarize() + results = {metric: float(coco_eval.stats[idx] * 100) for idx, metric in enumerate(metrics)} + logger = logging.getLogger(__name__) + logger.info( + f"Evaluation results for densepose, {eval_mode_name} metric: \n" + + create_small_table(results) + ) + if class_names is None or len(class_names) <= 1: + return results + + # Compute per-category AP, the same way as it is done in D2 + # (see detectron2/evaluation/coco_evaluation.py): + precisions = coco_eval.eval["precision"] + # precision has dims (iou, recall, cls, area range, max dets) + assert len(class_names) == precisions.shape[2] + + results_per_category = [] + for idx, name in enumerate(class_names): + # area range index 0: all area ranges + # max dets index -1: typically 100 per image + precision = precisions[:, :, idx, 0, -1] + precision = precision[precision > -1] + ap = np.mean(precision) if precision.size else float("nan") + results_per_category.append((f"{name}", float(ap * 100))) + + # tabulate it + n_cols = min(6, len(results_per_category) * 2) + results_flatten = list(itertools.chain(*results_per_category)) + results_2d = itertools.zip_longest(*[results_flatten[i::n_cols] for i in range(n_cols)]) + table = tabulate( + results_2d, + tablefmt="pipe", + floatfmt=".3f", + headers=["category", "AP"] * (n_cols // 2), + numalign="left", + ) + logger.info(f"Per-category {eval_mode_name} AP: \n" + table) + + results.update({"AP-" + name: ap for name, ap in results_per_category}) + return results + + +def build_densepose_evaluator_storage(cfg: CfgNode, output_folder: str): + storage_spec = cfg.DENSEPOSE_EVALUATION.STORAGE + if storage_spec == "none": + return None + evaluator_type = cfg.DENSEPOSE_EVALUATION.TYPE + # common output tensor sizes + hout = cfg.MODEL.ROI_DENSEPOSE_HEAD.HEATMAP_SIZE + wout = cfg.MODEL.ROI_DENSEPOSE_HEAD.HEATMAP_SIZE + n_csc = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_COARSE_SEGM_CHANNELS + # specific output tensors + if evaluator_type == "iuv": + n_fsc = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_PATCHES + 1 + schema = { + "coarse_segm": SizeData(dtype="float32", shape=(n_csc, hout, wout)), + "fine_segm": SizeData(dtype="float32", shape=(n_fsc, hout, wout)), + "u": SizeData(dtype="float32", shape=(n_fsc, hout, wout)), + "v": SizeData(dtype="float32", shape=(n_fsc, hout, wout)), + } + elif evaluator_type == "cse": + embed_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBED_SIZE + schema = { + "coarse_segm": SizeData(dtype="float32", shape=(n_csc, hout, wout)), + "embedding": SizeData(dtype="float32", shape=(embed_size, hout, wout)), + } + else: + raise ValueError(f"Unknown evaluator type: {evaluator_type}") + # storage types + if storage_spec == "ram": + storage = SingleProcessRamTensorStorage(schema, io.BytesIO()) + elif storage_spec == "file": + fpath = os.path.join(output_folder, f"DensePoseEvaluatorStorage.{get_rank()}.bin") + PathManager.mkdirs(output_folder) + storage = SingleProcessFileTensorStorage(schema, fpath, "wb") + else: + raise ValueError(f"Unknown storage specification: {storage_spec}") + return storage diff --git a/motion-gan-pipeline/preprocessing/third/densepose/evaluation/mesh_alignment_evaluator.py b/motion-gan-pipeline/preprocessing/third/densepose/evaluation/mesh_alignment_evaluator.py new file mode 100644 index 0000000..c5ef290 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/evaluation/mesh_alignment_evaluator.py @@ -0,0 +1,66 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import json +import logging +from typing import List, Optional +import torch +from torch import nn + +from detectron2.utils.file_io import PathManager + +from densepose.structures.mesh import create_mesh + + +class MeshAlignmentEvaluator: + """ + Class for evaluation of 3D mesh alignment based on the learned vertex embeddings + """ + + def __init__(self, embedder: nn.Module, mesh_names: Optional[List[str]]): + self.embedder = embedder + # use the provided mesh names if not None and not an empty list + self.mesh_names = mesh_names if mesh_names else embedder.mesh_names + self.logger = logging.getLogger(__name__) + with PathManager.open( + "https://dl.fbaipublicfiles.com/densepose/data/cse/mesh_keyvertices_v0.json", "r" + ) as f: + self.mesh_keyvertices = json.load(f) + + def evaluate(self): + ge_per_mesh = {} + gps_per_mesh = {} + for mesh_name_1 in self.mesh_names: + avg_errors = [] + avg_gps = [] + embeddings_1 = self.embedder(mesh_name_1) + keyvertices_1 = self.mesh_keyvertices[mesh_name_1] + keyvertex_names_1 = list(keyvertices_1.keys()) + keyvertex_indices_1 = [keyvertices_1[name] for name in keyvertex_names_1] + for mesh_name_2 in self.mesh_names: + if mesh_name_1 == mesh_name_2: + continue + embeddings_2 = self.embedder(mesh_name_2) + keyvertices_2 = self.mesh_keyvertices[mesh_name_2] + sim_matrix_12 = embeddings_1[keyvertex_indices_1].mm(embeddings_2.T) + vertices_2_matching_keyvertices_1 = sim_matrix_12.argmax(axis=1) + mesh_2 = create_mesh(mesh_name_2, embeddings_2.device) + geodists = mesh_2.geodists[ + vertices_2_matching_keyvertices_1, + [keyvertices_2[name] for name in keyvertex_names_1], + ] + Current_Mean_Distances = 0.255 + gps = (-(geodists ** 2) / (2 * (Current_Mean_Distances ** 2))).exp() + avg_errors.append(geodists.mean().item()) + avg_gps.append(gps.mean().item()) + + ge_mean = torch.as_tensor(avg_errors).mean().item() + gps_mean = torch.as_tensor(avg_gps).mean().item() + ge_per_mesh[mesh_name_1] = ge_mean + gps_per_mesh[mesh_name_1] = gps_mean + ge_mean_global = torch.as_tensor(list(ge_per_mesh.values())).mean().item() + gps_mean_global = torch.as_tensor(list(gps_per_mesh.values())).mean().item() + per_mesh_metrics = { + "GE": ge_per_mesh, + "GPS": gps_per_mesh, + } + return ge_mean_global, gps_mean_global, per_mesh_metrics diff --git a/motion-gan-pipeline/preprocessing/third/densepose/evaluation/tensor_storage.py b/motion-gan-pipeline/preprocessing/third/densepose/evaluation/tensor_storage.py new file mode 100644 index 0000000..ea0ece2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/evaluation/tensor_storage.py @@ -0,0 +1,238 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import io +import numpy as np +import os +from dataclasses import dataclass +from functools import reduce +from operator import mul +from typing import BinaryIO, Dict, Optional, Tuple +import torch + +from detectron2.utils.comm import gather, get_rank +from detectron2.utils.file_io import PathManager + + +@dataclass +class SizeData: + dtype: str + shape: Tuple[int] + + +def _calculate_record_field_size_b(data_schema: Dict[str, SizeData], field_name: str) -> int: + schema = data_schema[field_name] + element_size_b = np.dtype(schema.dtype).itemsize + record_field_size_b = reduce(mul, schema.shape) * element_size_b + return record_field_size_b + + +def _calculate_record_size_b(data_schema: Dict[str, SizeData]) -> int: + record_size_b = 0 + for field_name in data_schema: + record_field_size_b = _calculate_record_field_size_b(data_schema, field_name) + record_size_b += record_field_size_b + return record_size_b + + +def _calculate_record_field_sizes_b(data_schema: Dict[str, SizeData]) -> Dict[str, int]: + field_sizes_b = {} + for field_name in data_schema: + field_sizes_b[field_name] = _calculate_record_field_size_b(data_schema, field_name) + return field_sizes_b + + +class SingleProcessTensorStorage: + """ + Compact tensor storage to keep tensor data of predefined size and type. + """ + + def __init__(self, data_schema: Dict[str, SizeData], storage_impl: BinaryIO): + """ + Construct tensor storage based on information on data shape and size. + Internally uses numpy to interpret the type specification. + The storage must support operations `seek(offset, whence=os.SEEK_SET)` and + `read(size)` to be able to perform the `get` operation. + The storage must support operation `write(bytes)` to be able to perform + the `put` operation. + + Args: + data_schema (dict: str -> SizeData): dictionary which maps tensor name + to its size data (shape and data type), e.g. + ``` + { + "coarse_segm": SizeData(dtype="float32", shape=(112, 112)), + "embedding": SizeData(dtype="float32", shape=(16, 112, 112)), + } + ``` + storage_impl (BinaryIO): io instance that handles file-like seek, read + and write operations, e.g. a file handle or a memory buffer like io.BytesIO + """ + self.data_schema = data_schema + self.record_size_b = _calculate_record_size_b(data_schema) + self.record_field_sizes_b = _calculate_record_field_sizes_b(data_schema) + self.storage_impl = storage_impl + self.next_record_id = 0 + + def get(self, record_id: int) -> Dict[str, torch.Tensor]: + """ + Load tensors from the storage by record ID + + Args: + record_id (int): Record ID, for which to load the data + + Return: + dict: str -> tensor: tensor name mapped to tensor data, recorded under the provided ID + """ + self.storage_impl.seek(record_id * self.record_size_b, os.SEEK_SET) + data_bytes = self.storage_impl.read(self.record_size_b) + assert len(data_bytes) == self.record_size_b, ( + f"Expected data size {self.record_size_b} B could not be read: " + f"got {len(data_bytes)} B" + ) + record = {} + cur_idx = 0 + # it's important to read and write in the same order + for field_name in sorted(self.data_schema): + schema = self.data_schema[field_name] + field_size_b = self.record_field_sizes_b[field_name] + chunk = data_bytes[cur_idx : cur_idx + field_size_b] + data_np = np.frombuffer( + chunk, dtype=schema.dtype, count=reduce(mul, schema.shape) + ).reshape(schema.shape) + record[field_name] = torch.from_numpy(data_np) + cur_idx += field_size_b + return record + + def put(self, data: Dict[str, torch.Tensor]) -> int: + """ + Store tensors in the storage + + Args: + data (dict: str -> tensor): data to store, a dictionary which maps + tensor names into tensors; tensor shapes must match those specified + in data schema. + Return: + int: record ID, under which the data is stored + """ + # it's important to read and write in the same order + for field_name in sorted(self.data_schema): + assert ( + field_name in data + ), f"Field '{field_name}' not present in data: data keys are {data.keys()}" + value = data[field_name] + assert value.shape == self.data_schema[field_name].shape, ( + f"Mismatched tensor shapes for field '{field_name}': " + f"expected {self.data_schema[field_name].shape}, got {value.shape}" + ) + data_bytes = value.cpu().numpy().tobytes() + assert len(data_bytes) == self.record_field_sizes_b[field_name], ( + f"Expected field {field_name} to be of size " + f"{self.record_field_sizes_b[field_name]} B, got {len(data_bytes)} B" + ) + self.storage_impl.write(data_bytes) + record_id = self.next_record_id + self.next_record_id += 1 + return record_id + + +class SingleProcessFileTensorStorage(SingleProcessTensorStorage): + """ + Implementation of a single process tensor storage which stores data in a file + """ + + def __init__(self, data_schema: Dict[str, SizeData], fpath: str, mode: str): + self.fpath = fpath + assert "b" in mode, f"Tensor storage should be opened in binary mode, got '{mode}'" + if "w" in mode: + file_h = PathManager.open(fpath, mode) + elif "r" in mode: + local_fpath = PathManager.get_local_path(fpath) + file_h = open(local_fpath, mode) + else: + raise ValueError(f"Unsupported file mode {mode}, supported modes: rb, wb") + super().__init__(data_schema, file_h) # pyre-ignore[6] + + +class SingleProcessRamTensorStorage(SingleProcessTensorStorage): + """ + Implementation of a single process tensor storage which stores data in RAM + """ + + def __init__(self, data_schema: Dict[str, SizeData], buf: io.BytesIO): + super().__init__(data_schema, buf) + + +class MultiProcessTensorStorage: + """ + Representation of a set of tensor storages created by individual processes, + allows to access those storages from a single owner process. The storages + should either be shared or broadcasted to the owner process. + The processes are identified by their rank, data is uniquely defined by + the rank of the process and the record ID. + """ + + def __init__(self, rank_to_storage: Dict[int, SingleProcessTensorStorage]): + self.rank_to_storage = rank_to_storage + + def get(self, rank: int, record_id: int) -> Dict[str, torch.Tensor]: + storage = self.rank_to_storage[rank] + return storage.get(record_id) + + def put(self, rank: int, data: Dict[str, torch.Tensor]) -> int: + storage = self.rank_to_storage[rank] + return storage.put(data) + + +class MultiProcessFileTensorStorage(MultiProcessTensorStorage): + def __init__(self, data_schema: Dict[str, SizeData], rank_to_fpath: Dict[int, str], mode: str): + rank_to_storage = { + rank: SingleProcessFileTensorStorage(data_schema, fpath, mode) + for rank, fpath in rank_to_fpath.items() + } + super().__init__(rank_to_storage) # pyre-ignore[6] + + +class MultiProcessRamTensorStorage(MultiProcessTensorStorage): + def __init__(self, data_schema: Dict[str, SizeData], rank_to_buffer: Dict[int, io.BytesIO]): + rank_to_storage = { + rank: SingleProcessRamTensorStorage(data_schema, buf) + for rank, buf in rank_to_buffer.items() + } + super().__init__(rank_to_storage) # pyre-ignore[6] + + +def _ram_storage_gather( + storage: SingleProcessRamTensorStorage, dst_rank: int = 0 +) -> Optional[MultiProcessRamTensorStorage]: + storage.storage_impl.seek(0, os.SEEK_SET) + # TODO: overhead, pickling a bytes object, can just pass bytes in a tensor directly + # see detectron2/autils.comm.py + data_list = gather(storage.storage_impl.read(), dst=dst_rank) + if get_rank() != dst_rank: + return None + rank_to_buffer = {i: io.BytesIO(data_list[i]) for i in range(len(data_list))} + multiprocess_storage = MultiProcessRamTensorStorage(storage.data_schema, rank_to_buffer) + return multiprocess_storage + + +def _file_storage_gather( + storage: SingleProcessFileTensorStorage, + dst_rank: int = 0, + mode: str = "rb", +) -> Optional[MultiProcessFileTensorStorage]: + storage.storage_impl.close() + fpath_list = gather(storage.fpath, dst=dst_rank) + if get_rank() != dst_rank: + return None + rank_to_fpath = {i: fpath_list[i] for i in range(len(fpath_list))} + return MultiProcessFileTensorStorage(storage.data_schema, rank_to_fpath, mode) + + +def storage_gather( + storage: SingleProcessTensorStorage, dst_rank: int = 0 +) -> Optional[MultiProcessTensorStorage]: + if isinstance(storage, SingleProcessRamTensorStorage): + return _ram_storage_gather(storage, dst_rank) + elif isinstance(storage, SingleProcessFileTensorStorage): + return _file_storage_gather(storage, dst_rank) + raise Exception(f"Unsupported storage for gather operation: {storage}") diff --git a/motion-gan-pipeline/preprocessing/third/densepose/model_final_162be9.pkl b/motion-gan-pipeline/preprocessing/third/densepose/model_final_162be9.pkl new file mode 100644 index 0000000..1556c53 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/model_final_162be9.pkl @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b8a7382001b16e453bad95ca9dbc68ae8f2b839b304cf90eaf5c27fbdb4dae91 +size 255757821 diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/__init__.py new file mode 100644 index 0000000..4c49f6d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/__init__.py @@ -0,0 +1,13 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .confidence import DensePoseConfidenceModelConfig, DensePoseUVConfidenceType +from .filter import DensePoseDataFilter +from .inference import densepose_inference +from .utils import initialize_module_params +from .build import ( + build_densepose_data_filter, + build_densepose_embedder, + build_densepose_head, + build_densepose_losses, + build_densepose_predictor, +) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/build.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/build.py new file mode 100644 index 0000000..bb7f54b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/build.py @@ -0,0 +1,87 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Optional +from torch import nn + +from detectron2.config import CfgNode + +from .cse.embedder import Embedder +from .filter import DensePoseDataFilter + + +def build_densepose_predictor(cfg: CfgNode, input_channels: int): + """ + Create an instance of DensePose predictor based on configuration options. + + Args: + cfg (CfgNode): configuration options + input_channels (int): input tensor size along the channel dimension + Return: + An instance of DensePose predictor + """ + from .predictors import DENSEPOSE_PREDICTOR_REGISTRY + + predictor_name = cfg.MODEL.ROI_DENSEPOSE_HEAD.PREDICTOR_NAME + return DENSEPOSE_PREDICTOR_REGISTRY.get(predictor_name)(cfg, input_channels) + + +def build_densepose_data_filter(cfg: CfgNode): + """ + Build DensePose data filter which selects data for training + + Args: + cfg (CfgNode): configuration options + + Return: + Callable: list(Tensor), list(Instances) -> list(Tensor), list(Instances) + An instance of DensePose filter, which takes feature tensors and proposals + as an input and returns filtered features and proposals + """ + dp_filter = DensePoseDataFilter(cfg) + return dp_filter + + +def build_densepose_head(cfg: CfgNode, input_channels: int): + """ + Build DensePose head based on configurations options + + Args: + cfg (CfgNode): configuration options + input_channels (int): input tensor size along the channel dimension + Return: + An instance of DensePose head + """ + from .roi_heads.registry import ROI_DENSEPOSE_HEAD_REGISTRY + + head_name = cfg.MODEL.ROI_DENSEPOSE_HEAD.NAME + return ROI_DENSEPOSE_HEAD_REGISTRY.get(head_name)(cfg, input_channels) + + +def build_densepose_losses(cfg: CfgNode): + """ + Build DensePose loss based on configurations options + + Args: + cfg (CfgNode): configuration options + Return: + An instance of DensePose loss + """ + from .losses import DENSEPOSE_LOSS_REGISTRY + + loss_name = cfg.MODEL.ROI_DENSEPOSE_HEAD.LOSS_NAME + return DENSEPOSE_LOSS_REGISTRY.get(loss_name)(cfg) + + +def build_densepose_embedder(cfg: CfgNode) -> Optional[nn.Module]: + """ + Build embedder used to embed mesh vertices into an embedding space. + Embedder contains sub-embedders, one for each mesh ID. + + Args: + cfg (cfgNode): configuration options + Return: + Embedding module + """ + if cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBEDDERS: + return Embedder(cfg) + return None diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/confidence.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/confidence.py new file mode 100644 index 0000000..6f4a72e --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/confidence.py @@ -0,0 +1,73 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from dataclasses import dataclass +from enum import Enum + +from detectron2.config import CfgNode + + +class DensePoseUVConfidenceType(Enum): + """ + Statistical model type for confidence learning, possible values: + - "iid_iso": statistically independent identically distributed residuals + with anisotropic covariance + - "indep_aniso": statistically independent residuals with anisotropic + covariances + For details, see: + N. Neverova, D. Novotny, A. Vedaldi "Correlated Uncertainty for Learning + Dense Correspondences from Noisy Labels", p. 918--926, in Proc. NIPS 2019 + """ + + # fmt: off + IID_ISO = "iid_iso" + INDEP_ANISO = "indep_aniso" + # fmt: on + + +@dataclass +class DensePoseUVConfidenceConfig: + """ + Configuration options for confidence on UV data + """ + + enabled: bool = False + # lower bound on UV confidences + epsilon: float = 0.01 + type: DensePoseUVConfidenceType = DensePoseUVConfidenceType.IID_ISO + + +@dataclass +class DensePoseSegmConfidenceConfig: + """ + Configuration options for confidence on segmentation + """ + + enabled: bool = False + # lower bound on confidence values + epsilon: float = 0.01 + + +@dataclass +class DensePoseConfidenceModelConfig: + """ + Configuration options for confidence models + """ + + # confidence for U and V values + uv_confidence: DensePoseUVConfidenceConfig + # segmentation confidence + segm_confidence: DensePoseSegmConfidenceConfig + + @staticmethod + def from_cfg(cfg: CfgNode) -> "DensePoseConfidenceModelConfig": + return DensePoseConfidenceModelConfig( + uv_confidence=DensePoseUVConfidenceConfig( + enabled=cfg.MODEL.ROI_DENSEPOSE_HEAD.UV_CONFIDENCE.ENABLED, + epsilon=cfg.MODEL.ROI_DENSEPOSE_HEAD.UV_CONFIDENCE.EPSILON, + type=DensePoseUVConfidenceType(cfg.MODEL.ROI_DENSEPOSE_HEAD.UV_CONFIDENCE.TYPE), + ), + segm_confidence=DensePoseSegmConfidenceConfig( + enabled=cfg.MODEL.ROI_DENSEPOSE_HEAD.SEGM_CONFIDENCE.ENABLED, + epsilon=cfg.MODEL.ROI_DENSEPOSE_HEAD.SEGM_CONFIDENCE.EPSILON, + ), + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/__init__.py new file mode 100644 index 0000000..a227360 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from .vertex_direct_embedder import VertexDirectEmbedder +from .vertex_feature_embedder import VertexFeatureEmbedder +from .embedder import Embedder diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/embedder.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/embedder.py new file mode 100644 index 0000000..e326953 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/embedder.py @@ -0,0 +1,130 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import logging +import numpy as np +import pickle +from enum import Enum +from typing import Optional +import torch +from torch import nn + +from detectron2.config import CfgNode +from detectron2.utils.file_io import PathManager + +from .vertex_direct_embedder import VertexDirectEmbedder +from .vertex_feature_embedder import VertexFeatureEmbedder + + +class EmbedderType(Enum): + """ + Embedder type which defines how vertices are mapped into the embedding space: + - "vertex_direct": direct vertex embedding + - "vertex_feature": embedding vertex features + """ + + VERTEX_DIRECT = "vertex_direct" + VERTEX_FEATURE = "vertex_feature" + + +def create_embedder(embedder_spec: CfgNode, embedder_dim: int) -> nn.Module: + """ + Create an embedder based on the provided configuration + + Args: + embedder_spec (CfgNode): embedder configuration + embedder_dim (int): embedding space dimensionality + Return: + An embedder instance for the specified configuration + Raises ValueError, in case of unexpected embedder type + """ + embedder_type = EmbedderType(embedder_spec.TYPE) + if embedder_type == EmbedderType.VERTEX_DIRECT: + embedder = VertexDirectEmbedder( + num_vertices=embedder_spec.NUM_VERTICES, + embed_dim=embedder_dim, + ) + if embedder_spec.INIT_FILE != "": + embedder.load(embedder_spec.INIT_FILE) + elif embedder_type == EmbedderType.VERTEX_FEATURE: + embedder = VertexFeatureEmbedder( + num_vertices=embedder_spec.NUM_VERTICES, + feature_dim=embedder_spec.FEATURE_DIM, + embed_dim=embedder_dim, + train_features=embedder_spec.FEATURES_TRAINABLE, + ) + if embedder_spec.INIT_FILE != "": + embedder.load(embedder_spec.INIT_FILE) + else: + raise ValueError(f"Unexpected embedder type {embedder_type}") + + if not embedder_spec.IS_TRAINABLE: + embedder.requires_grad_(False) + + return embedder + + +class Embedder(nn.Module): + """ + Embedder module that serves as a container for embedders to use with different + meshes. Extends Module to automatically save / load state dict. + """ + + DEFAULT_MODEL_CHECKPOINT_PREFIX = "roi_heads.embedder." + + def __init__(self, cfg: CfgNode): + """ + Initialize mesh embedders. An embedder for mesh `i` is stored in a submodule + "embedder_{i}". + + Args: + cfg (CfgNode): configuration options + """ + super(Embedder, self).__init__() + self.mesh_names = set() + embedder_dim = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBED_SIZE + logger = logging.getLogger(__name__) + for mesh_name, embedder_spec in cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBEDDERS.items(): + logger.info(f"Adding embedder embedder_{mesh_name} with spec {embedder_spec}") + self.add_module(f"embedder_{mesh_name}", create_embedder(embedder_spec, embedder_dim)) + self.mesh_names.add(mesh_name) + if cfg.MODEL.WEIGHTS != "": + self.load_from_model_checkpoint(cfg.MODEL.WEIGHTS) + + def load_from_model_checkpoint(self, fpath: str, prefix: Optional[str] = None): + if prefix is None: + prefix = Embedder.DEFAULT_MODEL_CHECKPOINT_PREFIX + state_dict = None + if fpath.endswith(".pkl"): + with PathManager.open(fpath, "rb") as hFile: + state_dict = pickle.load(hFile, encoding="latin1") # pyre-ignore[6] + else: + with PathManager.open(fpath, "rb") as hFile: + state_dict = torch.load(hFile, map_location=torch.device("cpu")) + if state_dict is not None and "model" in state_dict: + state_dict_local = {} + for key in state_dict["model"]: + if key.startswith(prefix): + v_key = state_dict["model"][key] + if isinstance(v_key, np.ndarray): + v_key = torch.from_numpy(v_key) + state_dict_local[key[len(prefix) :]] = v_key + # non-strict loading to finetune on different meshes + # pyre-fixme[6]: Expected `OrderedDict[typing.Any, typing.Any]` for 1st + # param but got `Dict[typing.Any, typing.Any]`. + self.load_state_dict(state_dict_local, strict=False) + + def forward(self, mesh_name: str) -> torch.Tensor: + """ + Produce vertex embeddings for the specific mesh; vertex embeddings are + a tensor of shape [N, D] where: + N = number of vertices + D = number of dimensions in the embedding space + Args: + mesh_name (str): name of a mesh for which to obtain vertex embeddings + Return: + Vertex embeddings, a tensor of shape [N, D] + """ + return getattr(self, f"embedder_{mesh_name}")() + + def has_embeddings(self, mesh_name: str) -> bool: + return hasattr(self, f"embedder_{mesh_name}") diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/utils.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/utils.py new file mode 100644 index 0000000..18480db --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/utils.py @@ -0,0 +1,85 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import torch +from torch.nn import functional as F + + +def squared_euclidean_distance_matrix(pts1: torch.Tensor, pts2: torch.Tensor) -> torch.Tensor: + """ + Get squared Euclidean Distance Matrix + Computes pairwise squared Euclidean distances between points + + Args: + pts1: Tensor [M x D], M is the number of points, D is feature dimensionality + pts2: Tensor [N x D], N is the number of points, D is feature dimensionality + + Return: + Tensor [M, N]: matrix of squared Euclidean distances; at index (m, n) + it contains || pts1[m] - pts2[n] ||^2 + """ + edm = torch.mm(-2 * pts1, pts2.t()) + edm += (pts1 * pts1).sum(1, keepdim=True) + (pts2 * pts2).sum(1, keepdim=True).t() + return edm.contiguous() + + +def normalize_embeddings(embeddings: torch.Tensor, epsilon: float = 1e-6) -> torch.Tensor: + """ + Normalize N D-dimensional embedding vectors arranged in a tensor [N, D] + + Args: + embeddings (tensor [N, D]): N D-dimensional embedding vectors + epsilon (float): minimum value for a vector norm + Return: + Normalized embeddings (tensor [N, D]), such that L2 vector norms are all equal to 1. + """ + return embeddings / torch.clamp( + embeddings.norm(p=None, dim=1, keepdim=True), min=epsilon # pyre-ignore[6] + ) + + +def get_closest_vertices_mask_from_ES( + E: torch.Tensor, + S: torch.Tensor, + h: int, + w: int, + mesh_vertex_embeddings: torch.Tensor, + device: torch.device, +): + """ + Interpolate Embeddings and Segmentations to the size of a given bounding box, + and compute closest vertices and the segmentation mask + + Args: + E (tensor [1, D, H, W]): D-dimensional embedding vectors for every point of the + default-sized box + S (tensor [1, 2, H, W]): 2-dimensional segmentation mask for every point of the + default-sized box + h (int): height of the target bounding box + w (int): width of the target bounding box + mesh_vertex_embeddings (tensor [N, D]): vertex embeddings for a chosen mesh + N is the number of vertices in the mesh, D is feature dimensionality + device (torch.device): device to move the tensors to + Return: + Closest Vertices (tensor [h, w]), int, for every point of the resulting box + Segmentation mask (tensor [h, w]), boolean, for every point of the resulting box + """ + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got `Tuple[int, int]`. + embedding_resized = F.interpolate(E, size=(h, w), mode="bilinear")[0].to(device) + # pyre-fixme[6]: Expected `Optional[int]` for 2nd param but got `Tuple[int, int]`. + coarse_segm_resized = F.interpolate(S, size=(h, w), mode="bilinear")[0].to(device) + mask = coarse_segm_resized.argmax(0) > 0 + closest_vertices = torch.zeros(mask.shape, dtype=torch.long, device=device) + all_embeddings = embedding_resized[:, mask].t() + size_chunk = 10_000 # Chunking to avoid possible OOM + edm = [] + if len(all_embeddings) == 0: + return closest_vertices, mask + for chunk in range((len(all_embeddings) - 1) // size_chunk + 1): + chunk_embeddings = all_embeddings[size_chunk * chunk : size_chunk * (chunk + 1)] + edm.append( + torch.argmin( + squared_euclidean_distance_matrix(chunk_embeddings, mesh_vertex_embeddings), dim=1 + ) + ) + closest_vertices[mask] = torch.cat(edm) + return closest_vertices, mask diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/vertex_direct_embedder.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/vertex_direct_embedder.py new file mode 100644 index 0000000..60fba27 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/vertex_direct_embedder.py @@ -0,0 +1,64 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import pickle +import torch +from torch import nn + +from detectron2.utils.file_io import PathManager + +from .utils import normalize_embeddings + + +class VertexDirectEmbedder(nn.Module): + """ + Class responsible for embedding vertices. Vertex embeddings take + the form of a tensor of size [N, D], where + N = number of vertices + D = number of dimensions in the embedding space + """ + + def __init__(self, num_vertices: int, embed_dim: int): + """ + Initialize embedder, set random embeddings + + Args: + num_vertices (int): number of vertices to embed + embed_dim (int): number of dimensions in the embedding space + """ + super(VertexDirectEmbedder, self).__init__() + self.embeddings = nn.Parameter(torch.Tensor(num_vertices, embed_dim)) + self.reset_parameters() + + @torch.no_grad() + def reset_parameters(self): + """ + Reset embeddings to random values + """ + self.embeddings.zero_() + + def forward(self) -> torch.Tensor: + """ + Produce vertex embeddings, a tensor of shape [N, D] where: + N = number of vertices + D = number of dimensions in the embedding space + + Return: + Full vertex embeddings, a tensor of shape [N, D] + """ + return normalize_embeddings(self.embeddings) + + @torch.no_grad() + def load(self, fpath: str): + """ + Load data from a file + + Args: + fpath (str): file path to load data from + """ + with PathManager.open(fpath, "rb") as hFile: + data = pickle.load(hFile) # pyre-ignore[6] + for name in ["embeddings"]: + if name in data: + getattr(self, name).copy_( + torch.tensor(data[name]).float().to(device=getattr(self, name).device) + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/vertex_feature_embedder.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/vertex_feature_embedder.py new file mode 100644 index 0000000..dcb2f20 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/cse/vertex_feature_embedder.py @@ -0,0 +1,75 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import pickle +import torch +from torch import nn + +from detectron2.utils.file_io import PathManager + +from .utils import normalize_embeddings + + +class VertexFeatureEmbedder(nn.Module): + """ + Class responsible for embedding vertex features. Mapping from + feature space to the embedding space is a tensor of size [K, D], where + K = number of dimensions in the feature space + D = number of dimensions in the embedding space + Vertex features is a tensor of size [N, K], where + N = number of vertices + K = number of dimensions in the feature space + Vertex embeddings are computed as F * E = tensor of size [N, D] + """ + + def __init__( + self, num_vertices: int, feature_dim: int, embed_dim: int, train_features: bool = False + ): + """ + Initialize embedder, set random embeddings + + Args: + num_vertices (int): number of vertices to embed + feature_dim (int): number of dimensions in the feature space + embed_dim (int): number of dimensions in the embedding space + train_features (bool): determines whether vertex features should + be trained (default: False) + """ + super(VertexFeatureEmbedder, self).__init__() + if train_features: + self.features = nn.Parameter(torch.Tensor(num_vertices, feature_dim)) + else: + self.register_buffer("features", torch.Tensor(num_vertices, feature_dim)) + self.embeddings = nn.Parameter(torch.Tensor(feature_dim, embed_dim)) + self.reset_parameters() + + @torch.no_grad() + def reset_parameters(self): + self.features.zero_() + self.embeddings.zero_() + + def forward(self) -> torch.Tensor: + """ + Produce vertex embeddings, a tensor of shape [N, D] where: + N = number of vertices + D = number of dimensions in the embedding space + + Return: + Full vertex embeddings, a tensor of shape [N, D] + """ + return normalize_embeddings(torch.mm(self.features, self.embeddings)) + + @torch.no_grad() + def load(self, fpath: str): + """ + Load data from a file + + Args: + fpath (str): file path to load data from + """ + with PathManager.open(fpath, "rb") as hFile: + data = pickle.load(hFile) # pyre-ignore[6] + for name in ["features", "embeddings"]: + if name in data: + getattr(self, name).copy_( + torch.tensor(data[name]).float().to(device=getattr(self, name).device) + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/densepose_checkpoint.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/densepose_checkpoint.py new file mode 100644 index 0000000..8c2b4f2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/densepose_checkpoint.py @@ -0,0 +1,35 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from collections import OrderedDict + +from detectron2.checkpoint import DetectionCheckpointer + + +def _rename_HRNet_weights(weights): + # We detect and rename HRNet weights for DensePose. 1956 and 1716 are values that are + # common to all HRNet pretrained weights, and should be enough to accurately identify them + if ( + len(weights["model"].keys()) == 1956 + and len([k for k in weights["model"].keys() if k.startswith("stage")]) == 1716 + ): + hrnet_weights = OrderedDict() + for k in weights["model"].keys(): + hrnet_weights["backbone.bottom_up." + str(k)] = weights["model"][k] + return {"model": hrnet_weights} + else: + return weights + + +class DensePoseCheckpointer(DetectionCheckpointer): + """ + Same as :class:`DetectionCheckpointer`, but is able to handle HRNet weights + """ + + def __init__(self, model, save_dir="", *, save_to_disk=None, **checkpointables): + super().__init__(model, save_dir, save_to_disk=save_to_disk, **checkpointables) + + def _load_file(self, filename: str) -> object: + """ + Adding hrnet support + """ + weights = super()._load_file(filename) + return _rename_HRNet_weights(weights) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/filter.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/filter.py new file mode 100644 index 0000000..18a8567 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/filter.py @@ -0,0 +1,94 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import List +import torch + +from detectron2.config import CfgNode +from detectron2.structures import Instances +from detectron2.structures.boxes import matched_pairwise_iou + + +class DensePoseDataFilter(object): + def __init__(self, cfg: CfgNode): + self.iou_threshold = cfg.MODEL.ROI_DENSEPOSE_HEAD.FG_IOU_THRESHOLD + self.keep_masks = cfg.MODEL.ROI_DENSEPOSE_HEAD.COARSE_SEGM_TRAINED_BY_MASKS + + @torch.no_grad() + def __call__(self, features: List[torch.Tensor], proposals_with_targets: List[Instances]): + """ + Filters proposals with targets to keep only the ones relevant for + DensePose training + + Args: + features (list[Tensor]): input data as a list of features, + each feature is a tensor. Axis 0 represents the number of + images `N` in the input data; axes 1-3 are channels, + height, and width, which may vary between features + (e.g., if a feature pyramid is used). + proposals_with_targets (list[Instances]): length `N` list of + `Instances`. The i-th `Instances` contains instances + (proposals, GT) for the i-th input image, + Returns: + list[Tensor]: filtered features + list[Instances]: filtered proposals + """ + proposals_filtered = [] + # TODO: the commented out code was supposed to correctly deal with situations + # where no valid DensePose GT is available for certain images. The corresponding + # image features were sliced and proposals were filtered. This led to performance + # deterioration, both in terms of runtime and in terms of evaluation results. + # + # feature_mask = torch.ones( + # len(proposals_with_targets), + # dtype=torch.bool, + # device=features[0].device if len(features) > 0 else torch.device("cpu"), + # ) + for i, proposals_per_image in enumerate(proposals_with_targets): + if not proposals_per_image.has("gt_densepose") and ( + not proposals_per_image.has("gt_masks") or not self.keep_masks + ): + # feature_mask[i] = 0 + continue + gt_boxes = proposals_per_image.gt_boxes + est_boxes = proposals_per_image.proposal_boxes + # apply match threshold for densepose head + iou = matched_pairwise_iou(gt_boxes, est_boxes) + iou_select = iou > self.iou_threshold + proposals_per_image = proposals_per_image[iou_select] # pyre-ignore[6] + + N_gt_boxes = len(proposals_per_image.gt_boxes) + assert N_gt_boxes == len(proposals_per_image.proposal_boxes), ( + f"The number of GT boxes {N_gt_boxes} is different from the " + f"number of proposal boxes {len(proposals_per_image.proposal_boxes)}" + ) + # filter out any target without suitable annotation + if self.keep_masks: + gt_masks = ( + proposals_per_image.gt_masks + if hasattr(proposals_per_image, "gt_masks") + else [None] * N_gt_boxes + ) + else: + gt_masks = [None] * N_gt_boxes + gt_densepose = ( + proposals_per_image.gt_densepose + if hasattr(proposals_per_image, "gt_densepose") + else [None] * N_gt_boxes + ) + assert len(gt_masks) == N_gt_boxes + assert len(gt_densepose) == N_gt_boxes + selected_indices = [ + i + for i, (dp_target, mask_target) in enumerate(zip(gt_densepose, gt_masks)) + if (dp_target is not None) or (mask_target is not None) + ] + # if not len(selected_indices): + # feature_mask[i] = 0 + # continue + if len(selected_indices) != N_gt_boxes: + proposals_per_image = proposals_per_image[selected_indices] # pyre-ignore[6] + assert len(proposals_per_image.gt_boxes) == len(proposals_per_image.proposal_boxes) + proposals_filtered.append(proposals_per_image) + # features_filtered = [feature[feature_mask] for feature in features] + # return features_filtered, proposals_filtered + return features, proposals_filtered diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/hrfpn.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/hrfpn.py new file mode 100644 index 0000000..6e3c222 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/hrfpn.py @@ -0,0 +1,182 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +""" +MIT License +Copyright (c) 2019 Microsoft +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. +""" + +import torch +import torch.nn as nn +import torch.nn.functional as F + +from detectron2.layers import ShapeSpec +from detectron2.modeling.backbone import BACKBONE_REGISTRY +from detectron2.modeling.backbone.backbone import Backbone + +from .hrnet import build_pose_hrnet_backbone + + +class HRFPN(Backbone): + """HRFPN (High Resolution Feature Pyramids) + Transforms outputs of HRNet backbone so they are suitable for the ROI_heads + arXiv: https://arxiv.org/abs/1904.04514 + Adapted from https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/necks/hrfpn.py + Args: + bottom_up: (list) output of HRNet + in_features (list): names of the input features (output of HRNet) + in_channels (list): number of channels for each branch + out_channels (int): output channels of feature pyramids + n_out_features (int): number of output stages + pooling (str): pooling for generating feature pyramids (from {MAX, AVG}) + share_conv (bool): Have one conv per output, or share one with all the outputs + """ + + def __init__( + self, + bottom_up, + in_features, + n_out_features, + in_channels, + out_channels, + pooling="AVG", + share_conv=False, + ): + super(HRFPN, self).__init__() + assert isinstance(in_channels, list) + self.bottom_up = bottom_up + self.in_features = in_features + self.n_out_features = n_out_features + self.in_channels = in_channels + self.out_channels = out_channels + self.num_ins = len(in_channels) + self.share_conv = share_conv + + if self.share_conv: + self.fpn_conv = nn.Conv2d( + in_channels=out_channels, out_channels=out_channels, kernel_size=3, padding=1 + ) + else: + self.fpn_conv = nn.ModuleList() + for _ in range(self.n_out_features): + self.fpn_conv.append( + nn.Conv2d( + in_channels=out_channels, + out_channels=out_channels, + kernel_size=3, + padding=1, + ) + ) + + # Custom change: Replaces a simple bilinear interpolation + self.interp_conv = nn.ModuleList() + for i in range(len(self.in_features)): + self.interp_conv.append( + nn.Sequential( + nn.ConvTranspose2d( + in_channels=in_channels[i], + out_channels=in_channels[i], + kernel_size=4, + stride=2 ** i, + padding=0, + output_padding=0, + bias=False, + ), + nn.BatchNorm2d(in_channels[i], momentum=0.1), + nn.ReLU(inplace=True), + ) + ) + + # Custom change: Replaces a couple (reduction conv + pooling) by one conv + self.reduction_pooling_conv = nn.ModuleList() + for i in range(self.n_out_features): + self.reduction_pooling_conv.append( + nn.Sequential( + nn.Conv2d(sum(in_channels), out_channels, kernel_size=2 ** i, stride=2 ** i), + nn.BatchNorm2d(out_channels, momentum=0.1), + nn.ReLU(inplace=True), + ) + ) + + if pooling == "MAX": + self.pooling = F.max_pool2d + else: + self.pooling = F.avg_pool2d + + self._out_features = [] + self._out_feature_channels = {} + self._out_feature_strides = {} + + for i in range(self.n_out_features): + self._out_features.append("p%d" % (i + 1)) + self._out_feature_channels.update({self._out_features[-1]: self.out_channels}) + self._out_feature_strides.update({self._out_features[-1]: 2 ** (i + 2)}) + + # default init_weights for conv(msra) and norm in ConvModule + def init_weights(self): + for m in self.modules(): + if isinstance(m, nn.Conv2d): + nn.init.kaiming_normal_(m.weight, a=1) + nn.init.constant_(m.bias, 0) + + def forward(self, inputs): + bottom_up_features = self.bottom_up(inputs) + assert len(bottom_up_features) == len(self.in_features) + inputs = [bottom_up_features[f] for f in self.in_features] + + outs = [] + for i in range(len(inputs)): + outs.append(self.interp_conv[i](inputs[i])) + shape_2 = min(o.shape[2] for o in outs) + shape_3 = min(o.shape[3] for o in outs) + out = torch.cat([o[:, :, :shape_2, :shape_3] for o in outs], dim=1) + outs = [] + for i in range(self.n_out_features): + outs.append(self.reduction_pooling_conv[i](out)) + for i in range(len(outs)): # Make shapes consistent + outs[-1 - i] = outs[-1 - i][ + :, :, : outs[-1].shape[2] * 2 ** i, : outs[-1].shape[3] * 2 ** i + ] + outputs = [] + for i in range(len(outs)): + if self.share_conv: + outputs.append(self.fpn_conv(outs[i])) + else: + outputs.append(self.fpn_conv[i](outs[i])) + + assert len(self._out_features) == len(outputs) + return dict(zip(self._out_features, outputs)) + + +@BACKBONE_REGISTRY.register() +def build_hrfpn_backbone(cfg, input_shape: ShapeSpec) -> HRFPN: + + in_channels = cfg.MODEL.HRNET.STAGE4.NUM_CHANNELS + in_features = ["p%d" % (i + 1) for i in range(cfg.MODEL.HRNET.STAGE4.NUM_BRANCHES)] + n_out_features = len(cfg.MODEL.ROI_HEADS.IN_FEATURES) + out_channels = cfg.MODEL.HRNET.HRFPN.OUT_CHANNELS + hrnet = build_pose_hrnet_backbone(cfg, input_shape) + hrfpn = HRFPN( + hrnet, + in_features, + n_out_features, + in_channels, + out_channels, + pooling="AVG", + share_conv=False, + ) + + return hrfpn diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/hrnet.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/hrnet.py new file mode 100644 index 0000000..ca24671 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/hrnet.py @@ -0,0 +1,474 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# ------------------------------------------------------------------------------ +# Copyright (c) Microsoft +# Licensed under the MIT License. +# Written by Bin Xiao (leoxiaobin@gmail.com) +# Modified by Bowen Cheng (bcheng9@illinois.edu) +# Adapted from https://github.com/HRNet/Higher-HRNet-Human-Pose-Estimation/blob/master/lib/models/pose_higher_hrnet.py # noqa +# ------------------------------------------------------------------------------ + +from __future__ import absolute_import, division, print_function +import logging +import torch.nn as nn + +from detectron2.layers import ShapeSpec +from detectron2.modeling.backbone import BACKBONE_REGISTRY +from detectron2.modeling.backbone.backbone import Backbone + +BN_MOMENTUM = 0.1 +logger = logging.getLogger(__name__) + +__all__ = ["build_pose_hrnet_backbone", "PoseHigherResolutionNet"] + + +def conv3x3(in_planes, out_planes, stride=1): + """3x3 convolution with padding""" + return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride, padding=1, bias=False) + + +class BasicBlock(nn.Module): + expansion = 1 + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(BasicBlock, self).__init__() + self.conv1 = conv3x3(inplanes, planes, stride) + self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM) + self.relu = nn.ReLU(inplace=True) + self.conv2 = conv3x3(planes, planes) + self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class Bottleneck(nn.Module): + expansion = 4 + + def __init__(self, inplanes, planes, stride=1, downsample=None): + super(Bottleneck, self).__init__() + self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False) + self.bn1 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM) + self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(planes, momentum=BN_MOMENTUM) + self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1, bias=False) + self.bn3 = nn.BatchNorm2d(planes * self.expansion, momentum=BN_MOMENTUM) + self.relu = nn.ReLU(inplace=True) + self.downsample = downsample + self.stride = stride + + def forward(self, x): + residual = x + + out = self.conv1(x) + out = self.bn1(out) + out = self.relu(out) + + out = self.conv2(out) + out = self.bn2(out) + out = self.relu(out) + + out = self.conv3(out) + out = self.bn3(out) + + if self.downsample is not None: + residual = self.downsample(x) + + out += residual + out = self.relu(out) + + return out + + +class HighResolutionModule(nn.Module): + """HighResolutionModule + Building block of the PoseHigherResolutionNet (see lower) + arXiv: https://arxiv.org/abs/1908.10357 + Args: + num_branches (int): number of branches of the modyle + blocks (str): type of block of the module + num_blocks (int): number of blocks of the module + num_inchannels (int): number of input channels of the module + num_channels (list): number of channels of each branch + multi_scale_output (bool): only used by the last module of PoseHigherResolutionNet + """ + + def __init__( + self, + num_branches, + blocks, + num_blocks, + num_inchannels, + num_channels, + multi_scale_output=True, + ): + super(HighResolutionModule, self).__init__() + self._check_branches(num_branches, blocks, num_blocks, num_inchannels, num_channels) + + self.num_inchannels = num_inchannels + self.num_branches = num_branches + + self.multi_scale_output = multi_scale_output + + self.branches = self._make_branches(num_branches, blocks, num_blocks, num_channels) + self.fuse_layers = self._make_fuse_layers() + self.relu = nn.ReLU(True) + + def _check_branches(self, num_branches, blocks, num_blocks, num_inchannels, num_channels): + if num_branches != len(num_blocks): + error_msg = "NUM_BRANCHES({}) <> NUM_BLOCKS({})".format(num_branches, len(num_blocks)) + logger.error(error_msg) + raise ValueError(error_msg) + + if num_branches != len(num_channels): + error_msg = "NUM_BRANCHES({}) <> NUM_CHANNELS({})".format( + num_branches, len(num_channels) + ) + logger.error(error_msg) + raise ValueError(error_msg) + + if num_branches != len(num_inchannels): + error_msg = "NUM_BRANCHES({}) <> NUM_INCHANNELS({})".format( + num_branches, len(num_inchannels) + ) + logger.error(error_msg) + raise ValueError(error_msg) + + def _make_one_branch(self, branch_index, block, num_blocks, num_channels, stride=1): + downsample = None + if ( + stride != 1 + or self.num_inchannels[branch_index] != num_channels[branch_index] * block.expansion + ): + downsample = nn.Sequential( + nn.Conv2d( + self.num_inchannels[branch_index], + num_channels[branch_index] * block.expansion, + kernel_size=1, + stride=stride, + bias=False, + ), + nn.BatchNorm2d(num_channels[branch_index] * block.expansion, momentum=BN_MOMENTUM), + ) + + layers = [] + layers.append( + block(self.num_inchannels[branch_index], num_channels[branch_index], stride, downsample) + ) + self.num_inchannels[branch_index] = num_channels[branch_index] * block.expansion + for _ in range(1, num_blocks[branch_index]): + layers.append(block(self.num_inchannels[branch_index], num_channels[branch_index])) + + return nn.Sequential(*layers) + + def _make_branches(self, num_branches, block, num_blocks, num_channels): + branches = [] + + for i in range(num_branches): + branches.append(self._make_one_branch(i, block, num_blocks, num_channels)) + + return nn.ModuleList(branches) + + def _make_fuse_layers(self): + if self.num_branches == 1: + return None + + num_branches = self.num_branches + num_inchannels = self.num_inchannels + fuse_layers = [] + for i in range(num_branches if self.multi_scale_output else 1): + fuse_layer = [] + for j in range(num_branches): + if j > i: + fuse_layer.append( + nn.Sequential( + nn.Conv2d(num_inchannels[j], num_inchannels[i], 1, 1, 0, bias=False), + nn.BatchNorm2d(num_inchannels[i]), + nn.Upsample(scale_factor=2 ** (j - i), mode="nearest"), + ) + ) + elif j == i: + fuse_layer.append(None) + else: + conv3x3s = [] + for k in range(i - j): + if k == i - j - 1: + num_outchannels_conv3x3 = num_inchannels[i] + conv3x3s.append( + nn.Sequential( + nn.Conv2d( + num_inchannels[j], + num_outchannels_conv3x3, + 3, + 2, + 1, + bias=False, + ), + nn.BatchNorm2d(num_outchannels_conv3x3), + ) + ) + else: + num_outchannels_conv3x3 = num_inchannels[j] + conv3x3s.append( + nn.Sequential( + nn.Conv2d( + num_inchannels[j], + num_outchannels_conv3x3, + 3, + 2, + 1, + bias=False, + ), + nn.BatchNorm2d(num_outchannels_conv3x3), + nn.ReLU(True), + ) + ) + fuse_layer.append(nn.Sequential(*conv3x3s)) + fuse_layers.append(nn.ModuleList(fuse_layer)) + + return nn.ModuleList(fuse_layers) + + def get_num_inchannels(self): + return self.num_inchannels + + def forward(self, x): + if self.num_branches == 1: + return [self.branches[0](x[0])] + + for i in range(self.num_branches): + x[i] = self.branches[i](x[i]) + + x_fuse = [] + + for i in range(len(self.fuse_layers)): + y = x[0] if i == 0 else self.fuse_layers[i][0](x[0]) + for j in range(1, self.num_branches): + if i == j: + y = y + x[j] + else: + z = self.fuse_layers[i][j](x[j])[:, :, : y.shape[2], : y.shape[3]] + y = y + z + x_fuse.append(self.relu(y)) + + return x_fuse + + +blocks_dict = {"BASIC": BasicBlock, "BOTTLENECK": Bottleneck} + + +class PoseHigherResolutionNet(Backbone): + """PoseHigherResolutionNet + Composed of several HighResolutionModule tied together with ConvNets + Adapted from the GitHub version to fit with HRFPN and the Detectron2 infrastructure + arXiv: https://arxiv.org/abs/1908.10357 + """ + + def __init__(self, cfg, **kwargs): + self.inplanes = cfg.MODEL.HRNET.STEM_INPLANES + super(PoseHigherResolutionNet, self).__init__() + + # stem net + self.conv1 = nn.Conv2d(3, 64, kernel_size=3, stride=2, padding=1, bias=False) + self.bn1 = nn.BatchNorm2d(64, momentum=BN_MOMENTUM) + self.conv2 = nn.Conv2d(64, 64, kernel_size=3, stride=2, padding=1, bias=False) + self.bn2 = nn.BatchNorm2d(64, momentum=BN_MOMENTUM) + self.relu = nn.ReLU(inplace=True) + self.layer1 = self._make_layer(Bottleneck, 64, 4) + + self.stage2_cfg = cfg.MODEL.HRNET.STAGE2 + num_channels = self.stage2_cfg.NUM_CHANNELS + block = blocks_dict[self.stage2_cfg.BLOCK] + num_channels = [num_channels[i] * block.expansion for i in range(len(num_channels))] + self.transition1 = self._make_transition_layer([256], num_channels) + self.stage2, pre_stage_channels = self._make_stage(self.stage2_cfg, num_channels) + + self.stage3_cfg = cfg.MODEL.HRNET.STAGE3 + num_channels = self.stage3_cfg.NUM_CHANNELS + block = blocks_dict[self.stage3_cfg.BLOCK] + num_channels = [num_channels[i] * block.expansion for i in range(len(num_channels))] + self.transition2 = self._make_transition_layer(pre_stage_channels, num_channels) + self.stage3, pre_stage_channels = self._make_stage(self.stage3_cfg, num_channels) + + self.stage4_cfg = cfg.MODEL.HRNET.STAGE4 + num_channels = self.stage4_cfg.NUM_CHANNELS + block = blocks_dict[self.stage4_cfg.BLOCK] + num_channels = [num_channels[i] * block.expansion for i in range(len(num_channels))] + self.transition3 = self._make_transition_layer(pre_stage_channels, num_channels) + self.stage4, pre_stage_channels = self._make_stage( + self.stage4_cfg, num_channels, multi_scale_output=True + ) + + self._out_features = [] + self._out_feature_channels = {} + self._out_feature_strides = {} + + for i in range(cfg.MODEL.HRNET.STAGE4.NUM_BRANCHES): + self._out_features.append("p%d" % (i + 1)) + self._out_feature_channels.update( + {self._out_features[-1]: cfg.MODEL.HRNET.STAGE4.NUM_CHANNELS[i]} + ) + self._out_feature_strides.update({self._out_features[-1]: 1}) + + def _get_deconv_cfg(self, deconv_kernel): + if deconv_kernel == 4: + padding = 1 + output_padding = 0 + elif deconv_kernel == 3: + padding = 1 + output_padding = 1 + elif deconv_kernel == 2: + padding = 0 + output_padding = 0 + + return deconv_kernel, padding, output_padding + + def _make_transition_layer(self, num_channels_pre_layer, num_channels_cur_layer): + num_branches_cur = len(num_channels_cur_layer) + num_branches_pre = len(num_channels_pre_layer) + + transition_layers = [] + for i in range(num_branches_cur): + if i < num_branches_pre: + if num_channels_cur_layer[i] != num_channels_pre_layer[i]: + transition_layers.append( + nn.Sequential( + nn.Conv2d( + num_channels_pre_layer[i], + num_channels_cur_layer[i], + 3, + 1, + 1, + bias=False, + ), + nn.BatchNorm2d(num_channels_cur_layer[i]), + nn.ReLU(inplace=True), + ) + ) + else: + transition_layers.append(None) + else: + conv3x3s = [] + for j in range(i + 1 - num_branches_pre): + inchannels = num_channels_pre_layer[-1] + outchannels = ( + num_channels_cur_layer[i] if j == i - num_branches_pre else inchannels + ) + conv3x3s.append( + nn.Sequential( + nn.Conv2d(inchannels, outchannels, 3, 2, 1, bias=False), + nn.BatchNorm2d(outchannels), + nn.ReLU(inplace=True), + ) + ) + transition_layers.append(nn.Sequential(*conv3x3s)) + + return nn.ModuleList(transition_layers) + + def _make_layer(self, block, planes, blocks, stride=1): + downsample = None + if stride != 1 or self.inplanes != planes * block.expansion: + downsample = nn.Sequential( + nn.Conv2d( + self.inplanes, + planes * block.expansion, + kernel_size=1, + stride=stride, + bias=False, + ), + nn.BatchNorm2d(planes * block.expansion, momentum=BN_MOMENTUM), + ) + + layers = [] + layers.append(block(self.inplanes, planes, stride, downsample)) + self.inplanes = planes * block.expansion + for _ in range(1, blocks): + layers.append(block(self.inplanes, planes)) + + return nn.Sequential(*layers) + + def _make_stage(self, layer_config, num_inchannels, multi_scale_output=True): + num_modules = layer_config["NUM_MODULES"] + num_branches = layer_config["NUM_BRANCHES"] + num_blocks = layer_config["NUM_BLOCKS"] + num_channels = layer_config["NUM_CHANNELS"] + block = blocks_dict[layer_config["BLOCK"]] + + modules = [] + for i in range(num_modules): + # multi_scale_output is only used last module + if not multi_scale_output and i == num_modules - 1: + reset_multi_scale_output = False + else: + reset_multi_scale_output = True + + modules.append( + HighResolutionModule( + num_branches, + block, + num_blocks, + num_inchannels, + num_channels, + reset_multi_scale_output, + ) + ) + num_inchannels = modules[-1].get_num_inchannels() + + return nn.Sequential(*modules), num_inchannels + + def forward(self, x): + x = self.conv1(x) + x = self.bn1(x) + x = self.relu(x) + x = self.conv2(x) + x = self.bn2(x) + x = self.relu(x) + x = self.layer1(x) + + x_list = [] + for i in range(self.stage2_cfg.NUM_BRANCHES): + if self.transition1[i] is not None: + x_list.append(self.transition1[i](x)) + else: + x_list.append(x) + y_list = self.stage2(x_list) + + x_list = [] + for i in range(self.stage3_cfg.NUM_BRANCHES): + if self.transition2[i] is not None: + x_list.append(self.transition2[i](y_list[-1])) + else: + x_list.append(y_list[i]) + y_list = self.stage3(x_list) + + x_list = [] + for i in range(self.stage4_cfg.NUM_BRANCHES): + if self.transition3[i] is not None: + x_list.append(self.transition3[i](y_list[-1])) + else: + x_list.append(y_list[i]) + y_list = self.stage4(x_list) + + assert len(self._out_features) == len(y_list) + return dict(zip(self._out_features, y_list)) # final_outputs + + +@BACKBONE_REGISTRY.register() +def build_pose_hrnet_backbone(cfg, input_shape: ShapeSpec): + model = PoseHigherResolutionNet(cfg) + return model diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/inference.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/inference.py new file mode 100644 index 0000000..de486d5 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/inference.py @@ -0,0 +1,43 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from dataclasses import fields +from typing import Any, List +import torch + +from detectron2.structures import Instances + + +def densepose_inference(densepose_predictor_output: Any, detections: List[Instances]) -> None: + """ + Splits DensePose predictor outputs into chunks, each chunk corresponds to + detections on one image. Predictor output chunks are stored in `pred_densepose` + attribute of the corresponding `Instances` object. + + Args: + densepose_predictor_output: a dataclass instance (can be of different types, + depending on predictor used for inference). Each field can be `None` + (if the corresponding output was not inferred) or a tensor of size + [N, ...], where N = N_1 + N_2 + .. + N_k is a total number of + detections on all images, N_1 is the number of detections on image 1, + N_2 is the number of detections on image 2, etc. + detections: a list of objects of type `Instance`, k-th object corresponds + to detections on k-th image. + """ + k = 0 + for detection_i in detections: + if densepose_predictor_output is None: + # don't add `pred_densepose` attribute + continue + n_i = len(detection_i) + PredictorOutput = type(densepose_predictor_output) + output_i_dict = {} + # we assume here that `densepose_predictor_output` is a dataclass object + for field in fields(densepose_predictor_output): + field_value = getattr(densepose_predictor_output, field.name) + # slice tensors + if isinstance(field_value, torch.Tensor): + output_i_dict[field.name] = field_value[k : k + n_i] + # leave others as is + else: + output_i_dict[field.name] = field_value + detection_i.pred_densepose = PredictorOutput(**output_i_dict) + k += n_i diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/__init__.py new file mode 100644 index 0000000..e5c5937 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/__init__.py @@ -0,0 +1,14 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .chart import DensePoseChartLoss +from .chart_with_confidences import DensePoseChartWithConfidenceLoss +from .cse import DensePoseCseLoss +from .registry import DENSEPOSE_LOSS_REGISTRY + + +__all__ = [ + "DensePoseChartLoss", + "DensePoseChartWithConfidenceLoss", + "DensePoseCseLoss", + "DENSEPOSE_LOSS_REGISTRY", +] diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/chart.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/chart.py new file mode 100644 index 0000000..a2007af --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/chart.py @@ -0,0 +1,290 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Any, List +import torch +from torch.nn import functional as F + +from detectron2.config import CfgNode +from detectron2.structures import Instances + +from .mask_or_segm import MaskOrSegmentationLoss +from .registry import DENSEPOSE_LOSS_REGISTRY +from .utils import ( + BilinearInterpolationHelper, + ChartBasedAnnotationsAccumulator, + LossDict, + extract_packed_annotations_from_matches, +) + + +@DENSEPOSE_LOSS_REGISTRY.register() +class DensePoseChartLoss: + """ + DensePose loss for chart-based training. A mesh is split into charts, + each chart is given a label (I) and parametrized by 2 coordinates referred to + as U and V. Ground truth consists of a number of points annotated with + I, U and V values and coarse segmentation S defined for all pixels of the + object bounding box. In some cases (see `COARSE_SEGM_TRAINED_BY_MASKS`), + semantic segmentation annotations can be used as ground truth inputs as well. + + Estimated values are tensors: + * U coordinates, tensor of shape [N, C, S, S] + * V coordinates, tensor of shape [N, C, S, S] + * fine segmentation estimates, tensor of shape [N, C, S, S] with raw unnormalized + scores for each fine segmentation label at each location + * coarse segmentation estimates, tensor of shape [N, D, S, S] with raw unnormalized + scores for each coarse segmentation label at each location + where N is the number of detections, C is the number of fine segmentation + labels, S is the estimate size ( = width = height) and D is the number of + coarse segmentation channels. + + The losses are: + * regression (smooth L1) loss for U and V coordinates + * cross entropy loss for fine (I) and coarse (S) segmentations + Each loss has an associated weight + """ + + def __init__(self, cfg: CfgNode): + """ + Initialize chart-based loss from configuration options + + Args: + cfg (CfgNode): configuration options + """ + # fmt: off + self.heatmap_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.HEATMAP_SIZE + self.w_points = cfg.MODEL.ROI_DENSEPOSE_HEAD.POINT_REGRESSION_WEIGHTS + self.w_part = cfg.MODEL.ROI_DENSEPOSE_HEAD.PART_WEIGHTS + self.w_segm = cfg.MODEL.ROI_DENSEPOSE_HEAD.INDEX_WEIGHTS + self.n_segm_chan = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_COARSE_SEGM_CHANNELS + # fmt: on + self.segm_trained_by_masks = cfg.MODEL.ROI_DENSEPOSE_HEAD.COARSE_SEGM_TRAINED_BY_MASKS + self.segm_loss = MaskOrSegmentationLoss(cfg) + + def __call__( + self, proposals_with_gt: List[Instances], densepose_predictor_outputs: Any, **kwargs + ) -> LossDict: + """ + Produce chart-based DensePose losses + + Args: + proposals_with_gt (list of Instances): detections with associated ground truth data + densepose_predictor_outputs: an object of a dataclass that contains predictor outputs + with estimated values; assumed to have the following attributes: + * coarse_segm - coarse segmentation estimates, tensor of shape [N, D, S, S] + * fine_segm - fine segmentation estimates, tensor of shape [N, C, S, S] + * u - U coordinate estimates per fine labels, tensor of shape [N, C, S, S] + * v - V coordinate estimates per fine labels, tensor of shape [N, C, S, S] + where N is the number of detections, C is the number of fine segmentation + labels, S is the estimate size ( = width = height) and D is the number of + coarse segmentation channels. + + Return: + dict: str -> tensor: dict of losses with the following entries: + * `loss_densepose_U`: smooth L1 loss for U coordinate estimates + * `loss_densepose_V`: smooth L1 loss for V coordinate estimates + * `loss_densepose_I`: cross entropy for raw unnormalized scores for fine + segmentation estimates given ground truth labels; + * `loss_densepose_S`: cross entropy for raw unnormalized scores for coarse + segmentation estimates given ground truth labels; + """ + # densepose outputs are computed for all images and all bounding boxes; + # i.e. if a batch has 4 images with (3, 1, 2, 1) proposals respectively, + # the outputs will have size(0) == 3+1+2+1 == 7 + + if not len(proposals_with_gt): + return self.produce_fake_densepose_losses(densepose_predictor_outputs) + + accumulator = ChartBasedAnnotationsAccumulator() + packed_annotations = extract_packed_annotations_from_matches(proposals_with_gt, accumulator) + + # NOTE: we need to keep the same computation graph on all the GPUs to + # perform reduction properly. Hence even if we have no data on one + # of the GPUs, we still need to generate the computation graph. + # Add fake (zero) loss in the form Tensor.sum() * 0 + if packed_annotations is None: + return self.produce_fake_densepose_losses(densepose_predictor_outputs) + + h, w = densepose_predictor_outputs.u.shape[2:] + interpolator = BilinearInterpolationHelper.from_matches( + packed_annotations, + (h, w), + ) + + j_valid_fg = interpolator.j_valid * ( # pyre-ignore[16] + packed_annotations.fine_segm_labels_gt > 0 + ) + if not torch.any(j_valid_fg): + return self.produce_fake_densepose_losses(densepose_predictor_outputs) + + losses_uv = self.produce_densepose_losses_uv( + proposals_with_gt, + densepose_predictor_outputs, + packed_annotations, + interpolator, + j_valid_fg, # pyre-ignore[6] + ) + + losses_segm = self.produce_densepose_losses_segm( + proposals_with_gt, + densepose_predictor_outputs, + packed_annotations, + interpolator, + j_valid_fg, # pyre-ignore[6] + ) + + return {**losses_uv, **losses_segm} + + def produce_fake_densepose_losses(self, densepose_predictor_outputs: Any) -> LossDict: + """ + Fake losses for fine segmentation and U/V coordinates. These are used when + no suitable ground truth data was found in a batch. The loss has a value 0 + and is primarily used to construct the computation graph, so that + `DistributedDataParallel` has similar graphs on all GPUs and can perform + reduction properly. + + Args: + densepose_predictor_outputs: DensePose predictor outputs, an object + of a dataclass that is assumed to have the following attributes: + * fine_segm - fine segmentation estimates, tensor of shape [N, C, S, S] + * u - U coordinate estimates per fine labels, tensor of shape [N, C, S, S] + * v - V coordinate estimates per fine labels, tensor of shape [N, C, S, S] + Return: + dict: str -> tensor: dict of losses with the following entries: + * `loss_densepose_U`: has value 0 + * `loss_densepose_V`: has value 0 + * `loss_densepose_I`: has value 0 + * `loss_densepose_S`: has value 0 + """ + losses_uv = self.produce_fake_densepose_losses_uv(densepose_predictor_outputs) + losses_segm = self.produce_fake_densepose_losses_segm(densepose_predictor_outputs) + return {**losses_uv, **losses_segm} + + def produce_fake_densepose_losses_uv(self, densepose_predictor_outputs: Any) -> LossDict: + """ + Fake losses for U/V coordinates. These are used when no suitable ground + truth data was found in a batch. The loss has a value 0 + and is primarily used to construct the computation graph, so that + `DistributedDataParallel` has similar graphs on all GPUs and can perform + reduction properly. + + Args: + densepose_predictor_outputs: DensePose predictor outputs, an object + of a dataclass that is assumed to have the following attributes: + * u - U coordinate estimates per fine labels, tensor of shape [N, C, S, S] + * v - V coordinate estimates per fine labels, tensor of shape [N, C, S, S] + Return: + dict: str -> tensor: dict of losses with the following entries: + * `loss_densepose_U`: has value 0 + * `loss_densepose_V`: has value 0 + """ + return { + "loss_densepose_U": densepose_predictor_outputs.u.sum() * 0, + "loss_densepose_V": densepose_predictor_outputs.v.sum() * 0, + } + + def produce_fake_densepose_losses_segm(self, densepose_predictor_outputs: Any) -> LossDict: + """ + Fake losses for fine / coarse segmentation. These are used when + no suitable ground truth data was found in a batch. The loss has a value 0 + and is primarily used to construct the computation graph, so that + `DistributedDataParallel` has similar graphs on all GPUs and can perform + reduction properly. + + Args: + densepose_predictor_outputs: DensePose predictor outputs, an object + of a dataclass that is assumed to have the following attributes: + * fine_segm - fine segmentation estimates, tensor of shape [N, C, S, S] + * coarse_segm - coarse segmentation estimates, tensor of shape [N, D, S, S] + Return: + dict: str -> tensor: dict of losses with the following entries: + * `loss_densepose_I`: has value 0 + * `loss_densepose_S`: has value 0, added only if `segm_trained_by_masks` is False + """ + losses = { + "loss_densepose_I": densepose_predictor_outputs.fine_segm.sum() * 0, + "loss_densepose_S": self.segm_loss.fake_value(densepose_predictor_outputs), + } + return losses + + def produce_densepose_losses_uv( + self, + proposals_with_gt: List[Instances], + densepose_predictor_outputs: Any, + packed_annotations: Any, + interpolator: BilinearInterpolationHelper, + j_valid_fg: torch.Tensor, + ) -> LossDict: + """ + Compute losses for U/V coordinates: smooth L1 loss between + estimated coordinates and the ground truth. + + Args: + proposals_with_gt (list of Instances): detections with associated ground truth data + densepose_predictor_outputs: DensePose predictor outputs, an object + of a dataclass that is assumed to have the following attributes: + * u - U coordinate estimates per fine labels, tensor of shape [N, C, S, S] + * v - V coordinate estimates per fine labels, tensor of shape [N, C, S, S] + Return: + dict: str -> tensor: dict of losses with the following entries: + * `loss_densepose_U`: smooth L1 loss for U coordinate estimates + * `loss_densepose_V`: smooth L1 loss for V coordinate estimates + """ + u_gt = packed_annotations.u_gt[j_valid_fg] + u_est = interpolator.extract_at_points(densepose_predictor_outputs.u)[j_valid_fg] + v_gt = packed_annotations.v_gt[j_valid_fg] + v_est = interpolator.extract_at_points(densepose_predictor_outputs.v)[j_valid_fg] + return { + "loss_densepose_U": F.smooth_l1_loss(u_est, u_gt, reduction="sum") * self.w_points, + "loss_densepose_V": F.smooth_l1_loss(v_est, v_gt, reduction="sum") * self.w_points, + } + + def produce_densepose_losses_segm( + self, + proposals_with_gt: List[Instances], + densepose_predictor_outputs: Any, + packed_annotations: Any, + interpolator: BilinearInterpolationHelper, + j_valid_fg: torch.Tensor, + ) -> LossDict: + """ + Losses for fine / coarse segmentation: cross-entropy + for segmentation unnormalized scores given ground truth labels at + annotated points for fine segmentation and dense mask annotations + for coarse segmentation. + + Args: + proposals_with_gt (list of Instances): detections with associated ground truth data + densepose_predictor_outputs: DensePose predictor outputs, an object + of a dataclass that is assumed to have the following attributes: + * fine_segm - fine segmentation estimates, tensor of shape [N, C, S, S] + * coarse_segm - coarse segmentation estimates, tensor of shape [N, D, S, S] + Return: + dict: str -> tensor: dict of losses with the following entries: + * `loss_densepose_I`: cross entropy for raw unnormalized scores for fine + segmentation estimates given ground truth labels + * `loss_densepose_S`: cross entropy for raw unnormalized scores for coarse + segmentation estimates given ground truth labels; + may be included if coarse segmentation is only trained + using DensePose ground truth; if additional supervision through + instance segmentation data is performed (`segm_trained_by_masks` is True), + this loss is handled by `produce_mask_losses` instead + """ + fine_segm_gt = packed_annotations.fine_segm_labels_gt[ + interpolator.j_valid # pyre-ignore[16] + ] + fine_segm_est = interpolator.extract_at_points( + densepose_predictor_outputs.fine_segm, + slice_fine_segm=slice(None), + w_ylo_xlo=interpolator.w_ylo_xlo[:, None], # pyre-ignore[16] + w_ylo_xhi=interpolator.w_ylo_xhi[:, None], # pyre-ignore[16] + w_yhi_xlo=interpolator.w_yhi_xlo[:, None], # pyre-ignore[16] + w_yhi_xhi=interpolator.w_yhi_xhi[:, None], # pyre-ignore[16] + )[interpolator.j_valid, :] + return { + "loss_densepose_I": F.cross_entropy(fine_segm_est, fine_segm_gt.long()) * self.w_part, + "loss_densepose_S": self.segm_loss( + proposals_with_gt, densepose_predictor_outputs, packed_annotations + ) + * self.w_segm, + } diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/chart_with_confidences.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/chart_with_confidences.py new file mode 100644 index 0000000..39f76b4 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/chart_with_confidences.py @@ -0,0 +1,206 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import math +from typing import Any, List +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import CfgNode +from detectron2.structures import Instances + +from .. import DensePoseConfidenceModelConfig, DensePoseUVConfidenceType +from .chart import DensePoseChartLoss +from .registry import DENSEPOSE_LOSS_REGISTRY +from .utils import BilinearInterpolationHelper, LossDict + + +@DENSEPOSE_LOSS_REGISTRY.register() +class DensePoseChartWithConfidenceLoss(DensePoseChartLoss): + """ """ + + def __init__(self, cfg: CfgNode): + super().__init__(cfg) + self.confidence_model_cfg = DensePoseConfidenceModelConfig.from_cfg(cfg) + if self.confidence_model_cfg.uv_confidence.type == DensePoseUVConfidenceType.IID_ISO: + self.uv_loss_with_confidences = IIDIsotropicGaussianUVLoss( + self.confidence_model_cfg.uv_confidence.epsilon + ) + elif self.confidence_model_cfg.uv_confidence.type == DensePoseUVConfidenceType.INDEP_ANISO: + self.uv_loss_with_confidences = IndepAnisotropicGaussianUVLoss( + self.confidence_model_cfg.uv_confidence.epsilon + ) + + def produce_fake_densepose_losses_uv(self, densepose_predictor_outputs: Any) -> LossDict: + """ + Overrides fake losses for fine segmentation and U/V coordinates to + include computation graphs for additional confidence parameters. + These are used when no suitable ground truth data was found in a batch. + The loss has a value 0 and is primarily used to construct the computation graph, + so that `DistributedDataParallel` has similar graphs on all GPUs and can + perform reduction properly. + + Args: + densepose_predictor_outputs: DensePose predictor outputs, an object + of a dataclass that is assumed to have the following attributes: + * fine_segm - fine segmentation estimates, tensor of shape [N, C, S, S] + * u - U coordinate estimates per fine labels, tensor of shape [N, C, S, S] + * v - V coordinate estimates per fine labels, tensor of shape [N, C, S, S] + Return: + dict: str -> tensor: dict of losses with the following entries: + * `loss_densepose_U`: has value 0 + * `loss_densepose_V`: has value 0 + * `loss_densepose_I`: has value 0 + """ + conf_type = self.confidence_model_cfg.uv_confidence.type + if self.confidence_model_cfg.uv_confidence.enabled: + loss_uv = ( + densepose_predictor_outputs.u.sum() + densepose_predictor_outputs.v.sum() + ) * 0 + if conf_type == DensePoseUVConfidenceType.IID_ISO: + loss_uv += densepose_predictor_outputs.sigma_2.sum() * 0 + elif conf_type == DensePoseUVConfidenceType.INDEP_ANISO: + loss_uv += ( + densepose_predictor_outputs.sigma_2.sum() + + densepose_predictor_outputs.kappa_u.sum() + + densepose_predictor_outputs.kappa_v.sum() + ) * 0 + return {"loss_densepose_UV": loss_uv} + else: + return super().produce_fake_densepose_losses_uv(densepose_predictor_outputs) + + def produce_densepose_losses_uv( + self, + proposals_with_gt: List[Instances], + densepose_predictor_outputs: Any, + packed_annotations: Any, + interpolator: BilinearInterpolationHelper, + j_valid_fg: torch.Tensor, + ) -> LossDict: + conf_type = self.confidence_model_cfg.uv_confidence.type + if self.confidence_model_cfg.uv_confidence.enabled: + u_gt = packed_annotations.u_gt[j_valid_fg] + u_est = interpolator.extract_at_points(densepose_predictor_outputs.u)[j_valid_fg] + v_gt = packed_annotations.v_gt[j_valid_fg] + v_est = interpolator.extract_at_points(densepose_predictor_outputs.v)[j_valid_fg] + sigma_2_est = interpolator.extract_at_points(densepose_predictor_outputs.sigma_2)[ + j_valid_fg + ] + if conf_type == DensePoseUVConfidenceType.IID_ISO: + return { + "loss_densepose_UV": ( + self.uv_loss_with_confidences(u_est, v_est, sigma_2_est, u_gt, v_gt) + * self.w_points + ) + } + elif conf_type in [DensePoseUVConfidenceType.INDEP_ANISO]: + kappa_u_est = interpolator.extract_at_points(densepose_predictor_outputs.kappa_u)[ + j_valid_fg + ] + kappa_v_est = interpolator.extract_at_points(densepose_predictor_outputs.kappa_v)[ + j_valid_fg + ] + return { + "loss_densepose_UV": ( + self.uv_loss_with_confidences( + u_est, v_est, sigma_2_est, kappa_u_est, kappa_v_est, u_gt, v_gt + ) + * self.w_points + ) + } + return super().produce_densepose_losses_uv( + proposals_with_gt, + densepose_predictor_outputs, + packed_annotations, + interpolator, + j_valid_fg, + ) + + +class IIDIsotropicGaussianUVLoss(nn.Module): + """ + Loss for the case of iid residuals with isotropic covariance: + $Sigma_i = sigma_i^2 I$ + The loss (negative log likelihood) is then: + $1/2 sum_{i=1}^n (log(2 pi) + 2 log sigma_i^2 + ||delta_i||^2 / sigma_i^2)$, + where $delta_i=(u - u', v - v')$ is a 2D vector containing UV coordinates + difference between estimated and ground truth UV values + For details, see: + N. Neverova, D. Novotny, A. Vedaldi "Correlated Uncertainty for Learning + Dense Correspondences from Noisy Labels", p. 918--926, in Proc. NIPS 2019 + """ + + def __init__(self, sigma_lower_bound: float): + super(IIDIsotropicGaussianUVLoss, self).__init__() + self.sigma_lower_bound = sigma_lower_bound + self.log2pi = math.log(2 * math.pi) + + def forward( + self, + u: torch.Tensor, + v: torch.Tensor, + sigma_u: torch.Tensor, + target_u: torch.Tensor, + target_v: torch.Tensor, + ): + # compute $\sigma_i^2$ + # use sigma_lower_bound to avoid degenerate solution for variance + # (sigma -> 0) + sigma2 = F.softplus(sigma_u) + self.sigma_lower_bound + # compute \|delta_i\|^2 + delta_t_delta = (u - target_u) ** 2 + (v - target_v) ** 2 + # the total loss from the formula above: + loss = 0.5 * (self.log2pi + 2 * torch.log(sigma2) + delta_t_delta / sigma2) + # pyre-fixme[16]: `float` has no attribute `sum`. + return loss.sum() + + +class IndepAnisotropicGaussianUVLoss(nn.Module): + """ + Loss for the case of independent residuals with anisotropic covariances: + $Sigma_i = sigma_i^2 I + r_i r_i^T$ + The loss (negative log likelihood) is then: + $1/2 sum_{i=1}^n (log(2 pi) + + log sigma_i^2 (sigma_i^2 + ||r_i||^2) + + ||delta_i||^2 / sigma_i^2 + - ^2 / (sigma_i^2 * (sigma_i^2 + ||r_i||^2)))$, + where $delta_i=(u - u', v - v')$ is a 2D vector containing UV coordinates + difference between estimated and ground truth UV values + For details, see: + N. Neverova, D. Novotny, A. Vedaldi "Correlated Uncertainty for Learning + Dense Correspondences from Noisy Labels", p. 918--926, in Proc. NIPS 2019 + """ + + def __init__(self, sigma_lower_bound: float): + super(IndepAnisotropicGaussianUVLoss, self).__init__() + self.sigma_lower_bound = sigma_lower_bound + self.log2pi = math.log(2 * math.pi) + + def forward( + self, + u: torch.Tensor, + v: torch.Tensor, + sigma_u: torch.Tensor, + kappa_u_est: torch.Tensor, + kappa_v_est: torch.Tensor, + target_u: torch.Tensor, + target_v: torch.Tensor, + ): + # compute $\sigma_i^2$ + sigma2 = F.softplus(sigma_u) + self.sigma_lower_bound + # compute \|r_i\|^2 + r_sqnorm2 = kappa_u_est ** 2 + kappa_v_est ** 2 + delta_u = u - target_u + delta_v = v - target_v + # compute \|delta_i\|^2 + delta_sqnorm = delta_u ** 2 + delta_v ** 2 + delta_u_r_u = delta_u * kappa_u_est + delta_v_r_v = delta_v * kappa_v_est + # compute the scalar product + delta_r = delta_u_r_u + delta_v_r_v + # compute squared scalar product ^2 + delta_r_sqnorm = delta_r ** 2 + denom2 = sigma2 * (sigma2 + r_sqnorm2) + loss = 0.5 * ( + self.log2pi + torch.log(denom2) + delta_sqnorm / sigma2 - delta_r_sqnorm / denom2 + ) + return loss.sum() # pyre-ignore[16] diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/cse.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/cse.py new file mode 100644 index 0000000..dd561ad --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/cse.py @@ -0,0 +1,115 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from typing import Any, List +from torch import nn + +from detectron2.config import CfgNode +from detectron2.structures import Instances + +from .cycle_pix2shape import PixToShapeCycleLoss +from .cycle_shape2shape import ShapeToShapeCycleLoss +from .embed import EmbeddingLoss +from .embed_utils import CseAnnotationsAccumulator +from .mask_or_segm import MaskOrSegmentationLoss +from .registry import DENSEPOSE_LOSS_REGISTRY +from .soft_embed import SoftEmbeddingLoss +from .utils import BilinearInterpolationHelper, LossDict, extract_packed_annotations_from_matches + + +@DENSEPOSE_LOSS_REGISTRY.register() +class DensePoseCseLoss: + """ """ + + _EMBED_LOSS_REGISTRY = { + EmbeddingLoss.__name__: EmbeddingLoss, + SoftEmbeddingLoss.__name__: SoftEmbeddingLoss, + } + + def __init__(self, cfg: CfgNode): + """ + Initialize CSE loss from configuration options + + Args: + cfg (CfgNode): configuration options + """ + self.w_segm = cfg.MODEL.ROI_DENSEPOSE_HEAD.INDEX_WEIGHTS + self.w_embed = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBED_LOSS_WEIGHT + self.segm_loss = MaskOrSegmentationLoss(cfg) + self.embed_loss = DensePoseCseLoss.create_embed_loss(cfg) + self.do_shape2shape = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.SHAPE_TO_SHAPE_CYCLE_LOSS.ENABLED + if self.do_shape2shape: + self.w_shape2shape = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.SHAPE_TO_SHAPE_CYCLE_LOSS.WEIGHT + self.shape2shape_loss = ShapeToShapeCycleLoss(cfg) + self.do_pix2shape = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.ENABLED + if self.do_pix2shape: + self.w_pix2shape = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.WEIGHT + self.pix2shape_loss = PixToShapeCycleLoss(cfg) + + @classmethod + def create_embed_loss(cls, cfg: CfgNode): + # registry not used here, since embedding losses are currently local + # and are not used anywhere else + return cls._EMBED_LOSS_REGISTRY[cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBED_LOSS_NAME](cfg) + + def __call__( + self, + proposals_with_gt: List[Instances], + densepose_predictor_outputs: Any, + embedder: nn.Module, + ) -> LossDict: + if not len(proposals_with_gt): + return self.produce_fake_losses(densepose_predictor_outputs, embedder) + accumulator = CseAnnotationsAccumulator() + packed_annotations = extract_packed_annotations_from_matches(proposals_with_gt, accumulator) + if packed_annotations is None: + return self.produce_fake_losses(densepose_predictor_outputs, embedder) + h, w = densepose_predictor_outputs.embedding.shape[2:] + interpolator = BilinearInterpolationHelper.from_matches( + packed_annotations, + (h, w), + ) + meshid_to_embed_losses = self.embed_loss( + proposals_with_gt, + densepose_predictor_outputs, + packed_annotations, + interpolator, + embedder, + ) + embed_loss_dict = { + f"loss_densepose_E{meshid}": self.w_embed * meshid_to_embed_losses[meshid] + for meshid in meshid_to_embed_losses + } + all_loss_dict = { + "loss_densepose_S": self.w_segm + * self.segm_loss(proposals_with_gt, densepose_predictor_outputs, packed_annotations), + **embed_loss_dict, + } + if self.do_shape2shape: + all_loss_dict["loss_shape2shape"] = self.w_shape2shape * self.shape2shape_loss(embedder) + if self.do_pix2shape: + all_loss_dict["loss_pix2shape"] = self.w_pix2shape * self.pix2shape_loss( + proposals_with_gt, densepose_predictor_outputs, packed_annotations, embedder + ) + return all_loss_dict + + def produce_fake_losses( + self, densepose_predictor_outputs: Any, embedder: nn.Module + ) -> LossDict: + meshname_to_embed_losses = self.embed_loss.fake_values( + densepose_predictor_outputs, embedder=embedder + ) + embed_loss_dict = { + f"loss_densepose_E{mesh_name}": meshname_to_embed_losses[mesh_name] + for mesh_name in meshname_to_embed_losses + } + all_loss_dict = { + "loss_densepose_S": self.segm_loss.fake_value(densepose_predictor_outputs), + **embed_loss_dict, + } + if self.do_shape2shape: + all_loss_dict["loss_shape2shape"] = self.shape2shape_loss.fake_value(embedder) + if self.do_pix2shape: + all_loss_dict["loss_pix2shape"] = self.pix2shape_loss.fake_value( + densepose_predictor_outputs, embedder + ) + return all_loss_dict diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/cycle_pix2shape.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/cycle_pix2shape.py new file mode 100644 index 0000000..4447ff4 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/cycle_pix2shape.py @@ -0,0 +1,156 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from typing import Any, List +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import CfgNode +from detectron2.structures import Instances + +from densepose.data.meshes.catalog import MeshCatalog +from densepose.modeling.cse.utils import normalize_embeddings, squared_euclidean_distance_matrix + +from .embed_utils import PackedCseAnnotations +from .mask import extract_data_for_mask_loss_from_matches + + +def _create_pixel_dist_matrix(grid_size: int) -> torch.Tensor: + rows = torch.arange(grid_size) + cols = torch.arange(grid_size) + # at index `i` contains [row, col], where + # row = i // grid_size + # col = i % grid_size + pix_coords = ( + torch.stack(torch.meshgrid(rows, cols), -1).reshape((grid_size * grid_size, 2)).float() + ) + return squared_euclidean_distance_matrix(pix_coords, pix_coords) + + +def _sample_fg_pixels_randperm(fg_mask: torch.Tensor, sample_size: int) -> torch.Tensor: + fg_mask_flattened = fg_mask.reshape((-1,)) + num_pixels = int(fg_mask_flattened.sum().item()) + fg_pixel_indices = fg_mask_flattened.nonzero(as_tuple=True)[0] + if (sample_size <= 0) or (num_pixels <= sample_size): + return fg_pixel_indices + sample_indices = torch.randperm(num_pixels, device=fg_mask.device)[:sample_size] + return fg_pixel_indices[sample_indices] + + +def _sample_fg_pixels_multinomial(fg_mask: torch.Tensor, sample_size: int) -> torch.Tensor: + fg_mask_flattened = fg_mask.reshape((-1,)) + num_pixels = int(fg_mask_flattened.sum().item()) + if (sample_size <= 0) or (num_pixels <= sample_size): + return fg_mask_flattened.nonzero(as_tuple=True)[0] + return fg_mask_flattened.float().multinomial(sample_size, replacement=False) # pyre-ignore[16] + + +class PixToShapeCycleLoss(nn.Module): + """ + Cycle loss for pixel-vertex correspondence + """ + + def __init__(self, cfg: CfgNode): + super().__init__() + self.shape_names = list(cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBEDDERS.keys()) + self.embed_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBED_SIZE + self.norm_p = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.NORM_P + self.use_all_meshes_not_gt_only = ( + cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.USE_ALL_MESHES_NOT_GT_ONLY + ) + self.num_pixels_to_sample = ( + cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.NUM_PIXELS_TO_SAMPLE + ) + self.pix_sigma = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.PIXEL_SIGMA + self.temperature_pix_to_vertex = ( + cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.TEMPERATURE_PIXEL_TO_VERTEX + ) + self.temperature_vertex_to_pix = ( + cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.PIX_TO_SHAPE_CYCLE_LOSS.TEMPERATURE_VERTEX_TO_PIXEL + ) + self.pixel_dists = _create_pixel_dist_matrix(cfg.MODEL.ROI_DENSEPOSE_HEAD.HEATMAP_SIZE) + + def forward( + self, + proposals_with_gt: List[Instances], + densepose_predictor_outputs: Any, + packed_annotations: PackedCseAnnotations, + embedder: nn.Module, + ): + """ + Args: + proposals_with_gt (list of Instances): detections with associated + ground truth data; each item corresponds to instances detected + on 1 image; the number of items corresponds to the number of + images in a batch + densepose_predictor_outputs: an object of a dataclass that contains predictor + outputs with estimated values; assumed to have the following attributes: + * embedding - embedding estimates, tensor of shape [N, D, S, S], where + N = number of instances (= sum N_i, where N_i is the number of + instances on image i) + D = embedding space dimensionality (MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBED_SIZE) + S = output size (width and height) + packed_annotations (PackedCseAnnotations): contains various data useful + for loss computation, each data is packed into a single tensor + embedder (nn.Module): module that computes vertex embeddings for different meshes + """ + pix_embeds = densepose_predictor_outputs.embedding + if self.pixel_dists.device != pix_embeds.device: + # should normally be done only once + self.pixel_dists = self.pixel_dists.to(device=pix_embeds.device) + with torch.no_grad(): + mask_loss_data = extract_data_for_mask_loss_from_matches( + proposals_with_gt, densepose_predictor_outputs.coarse_segm + ) + # GT masks - tensor of shape [N, S, S] of int64 + masks_gt = mask_loss_data.masks_gt.long() # pyre-ignore[16] + assert len(pix_embeds) == len(masks_gt), ( + f"Number of instances with embeddings {len(pix_embeds)} != " + f"number of instances with GT masks {len(masks_gt)}" + ) + losses = [] + mesh_names = ( + self.shape_names + if self.use_all_meshes_not_gt_only + else [ + MeshCatalog.get_mesh_name(mesh_id.item()) + for mesh_id in packed_annotations.vertex_mesh_ids_gt.unique() # pyre-ignore[16] + ] + ) + for pixel_embeddings, mask_gt in zip(pix_embeds, masks_gt): + # pixel_embeddings [D, S, S] + # mask_gt [S, S] + for mesh_name in mesh_names: + mesh_vertex_embeddings = embedder(mesh_name) + # pixel indices [M] + pixel_indices_flattened = _sample_fg_pixels_randperm( + mask_gt, self.num_pixels_to_sample + ) + # pixel distances [M, M] + pixel_dists = self.pixel_dists.to(pixel_embeddings.device)[ + torch.meshgrid(pixel_indices_flattened, pixel_indices_flattened) + ] + # pixel embeddings [M, D] + pixel_embeddings_sampled = normalize_embeddings( + pixel_embeddings.reshape((self.embed_size, -1))[:, pixel_indices_flattened].T + ) + # pixel-vertex similarity [M, K] + sim_matrix = pixel_embeddings_sampled.mm( # pyre-ignore[16] + mesh_vertex_embeddings.T + ) + c_pix_vertex = F.softmax(sim_matrix / self.temperature_pix_to_vertex, dim=1) + c_vertex_pix = F.softmax(sim_matrix.T / self.temperature_vertex_to_pix, dim=1) + c_cycle = c_pix_vertex.mm(c_vertex_pix) + loss_cycle = torch.norm(pixel_dists * c_cycle, p=self.norm_p) + losses.append(loss_cycle) + + if len(losses) == 0: + return pix_embeds.sum() * 0 + return torch.stack(losses, dim=0).mean() + + def fake_value(self, densepose_predictor_outputs: Any, embedder: nn.Module): + losses = [ + embedder(mesh_name).sum() * 0 for mesh_name in embedder.mesh_names # pyre-ignore[29] + ] + losses.append(densepose_predictor_outputs.embedding.sum() * 0) + return torch.mean(torch.stack(losses)) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/cycle_shape2shape.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/cycle_shape2shape.py new file mode 100644 index 0000000..d2adc13 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/cycle_shape2shape.py @@ -0,0 +1,117 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import random +from typing import Tuple +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import CfgNode + +from densepose.structures.mesh import create_mesh + +from .utils import sample_random_indices + + +class ShapeToShapeCycleLoss(nn.Module): + """ + Cycle Loss for Shapes. + Inspired by: + "Mapping in a Cycle: Sinkhorn Regularized Unsupervised Learning for Point Cloud Shapes". + """ + + def __init__(self, cfg: CfgNode): + super().__init__() + self.shape_names = list(cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBEDDERS.keys()) + self.all_shape_pairs = [ + (x, y) for i, x in enumerate(self.shape_names) for y in self.shape_names[i + 1 :] + ] + random.shuffle(self.all_shape_pairs) + self.cur_pos = 0 + self.norm_p = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.SHAPE_TO_SHAPE_CYCLE_LOSS.NORM_P + self.temperature = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.SHAPE_TO_SHAPE_CYCLE_LOSS.TEMPERATURE + self.max_num_vertices = ( + cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.SHAPE_TO_SHAPE_CYCLE_LOSS.MAX_NUM_VERTICES + ) + + def _sample_random_pair(self) -> Tuple[str, str]: + """ + Produce a random pair of different mesh names + + Return: + tuple(str, str): a pair of different mesh names + """ + if self.cur_pos >= len(self.all_shape_pairs): + random.shuffle(self.all_shape_pairs) + self.cur_pos = 0 + shape_pair = self.all_shape_pairs[self.cur_pos] + self.cur_pos += 1 + return shape_pair + + def forward(self, embedder: nn.Module): + """ + Do a forward pass with a random pair (src, dst) pair of shapes + Args: + embedder (nn.Module): module that computes vertex embeddings for different meshes + """ + src_mesh_name, dst_mesh_name = self._sample_random_pair() + return self._forward_one_pair(embedder, src_mesh_name, dst_mesh_name) + + def fake_value(self, embedder: nn.Module): + losses = [] + for mesh_name in embedder.mesh_names: # pyre-ignore[29] + losses.append(embedder(mesh_name).sum() * 0) + return torch.mean(torch.stack(losses)) + + def _get_embeddings_and_geodists_for_mesh( + self, embedder: nn.Module, mesh_name: str + ) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Produces embeddings and geodesic distance tensors for a given mesh. May subsample + the mesh, if it contains too many vertices (controlled by + SHAPE_CYCLE_LOSS_MAX_NUM_VERTICES parameter). + Args: + embedder (nn.Module): module that computes embeddings for mesh vertices + mesh_name (str): mesh name + Return: + embeddings (torch.Tensor of size [N, D]): embeddings for selected mesh + vertices (N = number of selected vertices, D = embedding space dim) + geodists (torch.Tensor of size [N, N]): geodesic distances for the selected + mesh vertices (N = number of selected vertices) + """ + embeddings = embedder(mesh_name) + indices = sample_random_indices( + embeddings.shape[0], self.max_num_vertices, embeddings.device + ) + mesh = create_mesh(mesh_name, embeddings.device) + geodists = mesh.geodists + if indices is not None: + embeddings = embeddings[indices] + geodists = geodists[torch.meshgrid(indices, indices)] + return embeddings, geodists + + def _forward_one_pair( + self, embedder: nn.Module, mesh_name_1: str, mesh_name_2: str + ) -> torch.Tensor: + """ + Do a forward pass with a selected pair of meshes + Args: + embedder (nn.Module): module that computes vertex embeddings for different meshes + mesh_name_1 (str): first mesh name + mesh_name_2 (str): second mesh name + Return: + Tensor containing the loss value + """ + embeddings_1, geodists_1 = self._get_embeddings_and_geodists_for_mesh(embedder, mesh_name_1) + embeddings_2, geodists_2 = self._get_embeddings_and_geodists_for_mesh(embedder, mesh_name_2) + sim_matrix_12 = embeddings_1.mm(embeddings_2.T) # pyre-ignore[16] + + c_12 = F.softmax(sim_matrix_12 / self.temperature, dim=1) + c_21 = F.softmax(sim_matrix_12.T / self.temperature, dim=1) + c_11 = c_12.mm(c_21) + c_22 = c_21.mm(c_12) + + loss_cycle_11 = torch.norm(geodists_1 * c_11, p=self.norm_p) + loss_cycle_22 = torch.norm(geodists_2 * c_22, p=self.norm_p) + + return loss_cycle_11 + loss_cycle_22 diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/embed.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/embed.py new file mode 100644 index 0000000..4feff03 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/embed.py @@ -0,0 +1,127 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from typing import Any, Dict, List +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import CfgNode +from detectron2.structures import Instances + +from densepose.data.meshes.catalog import MeshCatalog +from densepose.modeling.cse.utils import normalize_embeddings, squared_euclidean_distance_matrix + +from .embed_utils import PackedCseAnnotations +from .utils import BilinearInterpolationHelper + + +class EmbeddingLoss: + """ + Computes losses for estimated embeddings given annotated vertices. + Instances in a minibatch that correspond to the same mesh are grouped + together. For each group, loss is computed as cross-entropy for + unnormalized scores given ground truth mesh vertex ids. + Scores are based on squared distances between estimated vertex embeddings + and mesh vertex embeddings. + """ + + def __init__(self, cfg: CfgNode): + """ + Initialize embedding loss from config + """ + self.embdist_gauss_sigma = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBEDDING_DIST_GAUSS_SIGMA + + def __call__( + self, + proposals_with_gt: List[Instances], + densepose_predictor_outputs: Any, + packed_annotations: PackedCseAnnotations, + interpolator: BilinearInterpolationHelper, + embedder: nn.Module, + ) -> Dict[int, torch.Tensor]: + """ + Produces losses for estimated embeddings given annotated vertices. + Embeddings for all the vertices of a mesh are computed by the embedder. + Embeddings for observed pixels are estimated by a predictor. + Losses are computed as cross-entropy for squared distances between + observed vertex embeddings and all mesh vertex embeddings given + ground truth vertex IDs. + + Args: + proposals_with_gt (list of Instances): detections with associated + ground truth data; each item corresponds to instances detected + on 1 image; the number of items corresponds to the number of + images in a batch + densepose_predictor_outputs: an object of a dataclass that contains predictor + outputs with estimated values; assumed to have the following attributes: + * embedding - embedding estimates, tensor of shape [N, D, S, S], where + N = number of instances (= sum N_i, where N_i is the number of + instances on image i) + D = embedding space dimensionality (MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBED_SIZE) + S = output size (width and height) + packed_annotations (PackedCseAnnotations): contains various data useful + for loss computation, each data is packed into a single tensor + interpolator (BilinearInterpolationHelper): bilinear interpolation helper + embedder (nn.Module): module that computes vertex embeddings for different meshes + Return: + dict(int -> tensor): losses for different mesh IDs + """ + losses = {} + for mesh_id_tensor in packed_annotations.vertex_mesh_ids_gt.unique(): # pyre-ignore[16] + mesh_id = mesh_id_tensor.item() + mesh_name = MeshCatalog.get_mesh_name(mesh_id) + # valid points are those that fall into estimated bbox + # and correspond to the current mesh + j_valid = interpolator.j_valid * ( # pyre-ignore[16] + packed_annotations.vertex_mesh_ids_gt == mesh_id + ) + if not torch.any(j_valid): + continue + # extract estimated embeddings for valid points + # -> tensor [J, D] + vertex_embeddings_i = normalize_embeddings( + interpolator.extract_at_points( + densepose_predictor_outputs.embedding, + slice_fine_segm=slice(None), + w_ylo_xlo=interpolator.w_ylo_xlo[:, None], # pyre-ignore[16] + w_ylo_xhi=interpolator.w_ylo_xhi[:, None], # pyre-ignore[16] + w_yhi_xlo=interpolator.w_yhi_xlo[:, None], # pyre-ignore[16] + w_yhi_xhi=interpolator.w_yhi_xhi[:, None], # pyre-ignore[16] + )[j_valid, :] + ) + # extract vertex ids for valid points + # -> tensor [J] + vertex_indices_i = packed_annotations.vertex_ids_gt[j_valid] + # embeddings for all mesh vertices + # -> tensor [K, D] + mesh_vertex_embeddings = embedder(mesh_name) + # unnormalized scores for valid points + # -> tensor [J, K] + scores = squared_euclidean_distance_matrix( + vertex_embeddings_i, mesh_vertex_embeddings + ) / (-self.embdist_gauss_sigma) + losses[mesh_name] = F.cross_entropy(scores, vertex_indices_i, ignore_index=-1) + + # pyre-fixme[29]: + # `Union[BoundMethod[typing.Callable(torch.Tensor.__iter__)[[Named(self, + # torch.Tensor)], typing.Iterator[typing.Any]], torch.Tensor], nn.Module, + # torch.Tensor]` is not a function. + for mesh_name in embedder.mesh_names: + if mesh_name not in losses: + losses[mesh_name] = self.fake_value( + densepose_predictor_outputs, embedder, mesh_name + ) + return losses + + def fake_values(self, densepose_predictor_outputs: Any, embedder: nn.Module): + losses = {} + # pyre-fixme[29]: + # `Union[BoundMethod[typing.Callable(torch.Tensor.__iter__)[[Named(self, + # torch.Tensor)], typing.Iterator[typing.Any]], torch.Tensor], nn.Module, + # torch.Tensor]` is not a function. + for mesh_name in embedder.mesh_names: + losses[mesh_name] = self.fake_value(densepose_predictor_outputs, embedder, mesh_name) + return losses + + def fake_value(self, densepose_predictor_outputs: Any, embedder: nn.Module, mesh_name: str): + return densepose_predictor_outputs.embedding.sum() * 0 + embedder(mesh_name).sum() * 0 diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/embed_utils.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/embed_utils.py new file mode 100644 index 0000000..eb9492f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/embed_utils.py @@ -0,0 +1,135 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from dataclasses import dataclass +from typing import Any, Optional +import torch + +from detectron2.structures import BoxMode, Instances + +from .utils import AnnotationsAccumulator + + +@dataclass +class PackedCseAnnotations: + x_gt: torch.Tensor + y_gt: torch.Tensor + coarse_segm_gt: Optional[torch.Tensor] + vertex_mesh_ids_gt: torch.Tensor + vertex_ids_gt: torch.Tensor + bbox_xywh_gt: torch.Tensor + bbox_xywh_est: torch.Tensor + point_bbox_with_dp_indices: torch.Tensor + point_bbox_indices: torch.Tensor + bbox_indices: torch.Tensor + + +class CseAnnotationsAccumulator(AnnotationsAccumulator): + """ + Accumulates annotations by batches that correspond to objects detected on + individual images. Can pack them together into single tensors. + """ + + def __init__(self): + self.x_gt = [] + self.y_gt = [] + self.s_gt = [] + self.vertex_mesh_ids_gt = [] + self.vertex_ids_gt = [] + self.bbox_xywh_gt = [] + self.bbox_xywh_est = [] + self.point_bbox_with_dp_indices = [] + self.point_bbox_indices = [] + self.bbox_indices = [] + self.nxt_bbox_with_dp_index = 0 + self.nxt_bbox_index = 0 + + def accumulate(self, instances_one_image: Instances): + """ + Accumulate instances data for one image + + Args: + instances_one_image (Instances): instances data to accumulate + """ + boxes_xywh_est = BoxMode.convert( + instances_one_image.proposal_boxes.tensor.clone(), BoxMode.XYXY_ABS, BoxMode.XYWH_ABS + ) + boxes_xywh_gt = BoxMode.convert( + instances_one_image.gt_boxes.tensor.clone(), BoxMode.XYXY_ABS, BoxMode.XYWH_ABS + ) + n_matches = len(boxes_xywh_gt) + assert n_matches == len( + boxes_xywh_est + ), f"Got {len(boxes_xywh_est)} proposal boxes and {len(boxes_xywh_gt)} GT boxes" + if not n_matches: + # no detection - GT matches + return + if ( + not hasattr(instances_one_image, "gt_densepose") + or instances_one_image.gt_densepose is None + ): + # no densepose GT for the detections, just increase the bbox index + self.nxt_bbox_index += n_matches + return + for box_xywh_est, box_xywh_gt, dp_gt in zip( + boxes_xywh_est, boxes_xywh_gt, instances_one_image.gt_densepose + ): + if (dp_gt is not None) and (len(dp_gt.x) > 0): + self._do_accumulate(box_xywh_gt, box_xywh_est, dp_gt) + self.nxt_bbox_index += 1 + + def _do_accumulate(self, box_xywh_gt: torch.Tensor, box_xywh_est: torch.Tensor, dp_gt: Any): + """ + Accumulate instances data for one image, given that the data is not empty + + Args: + box_xywh_gt (tensor): GT bounding box + box_xywh_est (tensor): estimated bounding box + dp_gt: GT densepose data with the following attributes: + - x: normalized X coordinates + - y: normalized Y coordinates + - segm: tensor of size [S, S] with coarse segmentation + - + """ + self.x_gt.append(dp_gt.x) + self.y_gt.append(dp_gt.y) + if hasattr(dp_gt, "segm"): + self.s_gt.append(dp_gt.segm.unsqueeze(0)) + self.vertex_ids_gt.append(dp_gt.vertex_ids) + self.vertex_mesh_ids_gt.append(torch.full_like(dp_gt.vertex_ids, dp_gt.mesh_id)) + self.bbox_xywh_gt.append(box_xywh_gt.view(-1, 4)) + self.bbox_xywh_est.append(box_xywh_est.view(-1, 4)) + self.point_bbox_with_dp_indices.append( + torch.full_like(dp_gt.vertex_ids, self.nxt_bbox_with_dp_index) + ) + self.point_bbox_indices.append(torch.full_like(dp_gt.vertex_ids, self.nxt_bbox_index)) + self.bbox_indices.append(self.nxt_bbox_index) + self.nxt_bbox_with_dp_index += 1 + + def pack(self) -> Optional[PackedCseAnnotations]: + """ + Pack data into tensors + """ + if not len(self.x_gt): + # TODO: + # returning proper empty annotations would require + # creating empty tensors of appropriate shape and + # type on an appropriate device; + # we return None so far to indicate empty annotations + return None + return PackedCseAnnotations( + x_gt=torch.cat(self.x_gt, 0), + y_gt=torch.cat(self.y_gt, 0), + vertex_mesh_ids_gt=torch.cat(self.vertex_mesh_ids_gt, 0), + vertex_ids_gt=torch.cat(self.vertex_ids_gt, 0), + # ignore segmentation annotations, if not all the instances contain those + coarse_segm_gt=torch.cat(self.s_gt, 0) + if len(self.s_gt) == len(self.bbox_xywh_gt) + else None, + bbox_xywh_gt=torch.cat(self.bbox_xywh_gt, 0), + bbox_xywh_est=torch.cat(self.bbox_xywh_est, 0), + point_bbox_with_dp_indices=torch.cat(self.point_bbox_with_dp_indices, 0), + point_bbox_indices=torch.cat(self.point_bbox_indices, 0), + bbox_indices=torch.as_tensor( + self.bbox_indices, dtype=torch.long, device=self.x_gt[0].device + ), + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/mask.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/mask.py new file mode 100644 index 0000000..c16b15c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/mask.py @@ -0,0 +1,125 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from dataclasses import dataclass +from typing import Any, Iterable, List, Optional +import torch +from torch.nn import functional as F + +from detectron2.structures import Instances + + +@dataclass +class DataForMaskLoss: + """ + Contains mask GT and estimated data for proposals from multiple images: + """ + + # tensor of size (K, H, W) containing GT labels + masks_gt: Optional[torch.Tensor] = None + # tensor of size (K, C, H, W) containing estimated scores + masks_est: Optional[torch.Tensor] = None + + +def extract_data_for_mask_loss_from_matches( + proposals_targets: Iterable[Instances], estimated_segm: torch.Tensor +) -> DataForMaskLoss: + """ + Extract data for mask loss from instances that contain matched GT and + estimated bounding boxes. + Args: + proposals_targets: Iterable[Instances] + matched GT and estimated results, each item in the iterable + corresponds to data in 1 image + estimated_segm: tensor(K, C, S, S) of float - raw unnormalized + segmentation scores, here S is the size to which GT masks are + to be resized + Return: + masks_est: tensor(K, C, S, S) of float - class scores + masks_gt: tensor(K, S, S) of int64 - labels + """ + data = DataForMaskLoss() + masks_gt = [] + offset = 0 + assert estimated_segm.shape[2] == estimated_segm.shape[3], ( + f"Expected estimated segmentation to have a square shape, " + f"but the actual shape is {estimated_segm.shape[2:]}" + ) + mask_size = estimated_segm.shape[2] + num_proposals = sum(inst.proposal_boxes.tensor.size(0) for inst in proposals_targets) + num_estimated = estimated_segm.shape[0] + assert ( + num_proposals == num_estimated + ), "The number of proposals {} must be equal to the number of estimates {}".format( + num_proposals, num_estimated + ) + + for proposals_targets_per_image in proposals_targets: + n_i = proposals_targets_per_image.proposal_boxes.tensor.size(0) + if not n_i: + continue + gt_masks_per_image = proposals_targets_per_image.gt_masks.crop_and_resize( + proposals_targets_per_image.proposal_boxes.tensor, mask_size + ).to(device=estimated_segm.device) + masks_gt.append(gt_masks_per_image) + offset += n_i + if masks_gt: + data.masks_est = estimated_segm + data.masks_gt = torch.cat(masks_gt, dim=0) + return data + + +class MaskLoss: + """ + Mask loss as cross-entropy for raw unnormalized scores given ground truth labels. + Mask ground truth labels are defined for the whole image and not only the + bounding box of interest. They are stored as objects that are assumed to implement + the `crop_and_resize` interface (e.g. BitMasks, PolygonMasks). + """ + + def __call__( + self, proposals_with_gt: List[Instances], densepose_predictor_outputs: Any + ) -> torch.Tensor: + """ + Computes segmentation loss as cross-entropy for raw unnormalized + scores given ground truth labels. + + Args: + proposals_with_gt (list of Instances): detections with associated ground truth data + densepose_predictor_outputs: an object of a dataclass that contains predictor outputs + with estimated values; assumed to have the following attribute: + * coarse_segm (tensor of shape [N, D, S, S]): coarse segmentation estimates + as raw unnormalized scores + where N is the number of detections, S is the estimate size ( = width = height) + and D is the number of coarse segmentation channels. + Return: + Cross entropy for raw unnormalized scores for coarse segmentation given + ground truth labels from masks + """ + if not len(proposals_with_gt): + return self.fake_value(densepose_predictor_outputs) + # densepose outputs are computed for all images and all bounding boxes; + # i.e. if a batch has 4 images with (3, 1, 2, 1) proposals respectively, + # the outputs will have size(0) == 3+1+2+1 == 7 + with torch.no_grad(): + mask_loss_data = extract_data_for_mask_loss_from_matches( + proposals_with_gt, densepose_predictor_outputs.coarse_segm + ) + if (mask_loss_data.masks_gt is None) or (mask_loss_data.masks_est is None): + return self.fake_value(densepose_predictor_outputs) + return F.cross_entropy(mask_loss_data.masks_est, mask_loss_data.masks_gt.long()) + + def fake_value(self, densepose_predictor_outputs: Any) -> torch.Tensor: + """ + Fake segmentation loss used when no suitable ground truth data + was found in a batch. The loss has a value 0 and is primarily used to + construct the computation graph, so that `DistributedDataParallel` + has similar graphs on all GPUs and can perform reduction properly. + + Args: + densepose_predictor_outputs: DensePose predictor outputs, an object + of a dataclass that is assumed to have `coarse_segm` + attribute + Return: + Zero value loss with proper computation graph + """ + return densepose_predictor_outputs.coarse_segm.sum() * 0 diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/mask_or_segm.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/mask_or_segm.py new file mode 100644 index 0000000..98b773d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/mask_or_segm.py @@ -0,0 +1,72 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from typing import Any, List +import torch + +from detectron2.config import CfgNode +from detectron2.structures import Instances + +from .mask import MaskLoss +from .segm import SegmentationLoss + + +class MaskOrSegmentationLoss: + """ + Mask or segmentation loss as cross-entropy for raw unnormalized scores + given ground truth labels. Ground truth labels are either defined by coarse + segmentation annotation, or by mask annotation, depending on the config + value MODEL.ROI_DENSEPOSE_HEAD.COARSE_SEGM_TRAINED_BY_MASKS + """ + + def __init__(self, cfg: CfgNode): + """ + Initialize segmentation loss from configuration options + + Args: + cfg (CfgNode): configuration options + """ + self.segm_trained_by_masks = cfg.MODEL.ROI_DENSEPOSE_HEAD.COARSE_SEGM_TRAINED_BY_MASKS + if self.segm_trained_by_masks: + self.mask_loss = MaskLoss() + self.segm_loss = SegmentationLoss(cfg) + + def __call__( + self, + proposals_with_gt: List[Instances], + densepose_predictor_outputs: Any, + packed_annotations: Any, + ) -> torch.Tensor: + """ + Compute segmentation loss as cross-entropy between aligned unnormalized + score estimates and ground truth; with ground truth given + either by masks, or by coarse segmentation annotations. + + Args: + proposals_with_gt (list of Instances): detections with associated ground truth data + densepose_predictor_outputs: an object of a dataclass that contains predictor outputs + with estimated values; assumed to have the following attributes: + * coarse_segm - coarse segmentation estimates, tensor of shape [N, D, S, S] + packed_annotations: packed annotations for efficient loss computation + Return: + tensor: loss value as cross-entropy for raw unnormalized scores + given ground truth labels + """ + if self.segm_trained_by_masks: + return self.mask_loss(proposals_with_gt, densepose_predictor_outputs) + return self.segm_loss(proposals_with_gt, densepose_predictor_outputs, packed_annotations) + + def fake_value(self, densepose_predictor_outputs: Any) -> torch.Tensor: + """ + Fake segmentation loss used when no suitable ground truth data + was found in a batch. The loss has a value 0 and is primarily used to + construct the computation graph, so that `DistributedDataParallel` + has similar graphs on all GPUs and can perform reduction properly. + + Args: + densepose_predictor_outputs: DensePose predictor outputs, an object + of a dataclass that is assumed to have `coarse_segm` + attribute + Return: + Zero value loss with proper computation graph + """ + return densepose_predictor_outputs.coarse_segm.sum() * 0 diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/registry.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/registry.py new file mode 100644 index 0000000..d9c8817 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/registry.py @@ -0,0 +1,5 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from detectron2.utils.registry import Registry + +DENSEPOSE_LOSS_REGISTRY = Registry("DENSEPOSE_LOSS") diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/segm.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/segm.py new file mode 100644 index 0000000..1962b88 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/segm.py @@ -0,0 +1,83 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from typing import Any, List +import torch +from torch.nn import functional as F + +from detectron2.config import CfgNode +from detectron2.structures import Instances + +from .utils import resample_data + + +class SegmentationLoss: + """ + Segmentation loss as cross-entropy for raw unnormalized scores given ground truth + labels. Segmentation ground truth labels are defined for the bounding box of + interest at some fixed resolution [S, S], where + S = MODEL.ROI_DENSEPOSE_HEAD.HEATMAP_SIZE. + """ + + def __init__(self, cfg: CfgNode): + """ + Initialize segmentation loss from configuration options + + Args: + cfg (CfgNode): configuration options + """ + self.heatmap_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.HEATMAP_SIZE + self.n_segm_chan = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_COARSE_SEGM_CHANNELS + + def __call__( + self, + proposals_with_gt: List[Instances], + densepose_predictor_outputs: Any, + packed_annotations: Any, + ) -> torch.Tensor: + """ + Compute segmentation loss as cross-entropy on aligned segmentation + ground truth and estimated scores. + + Args: + proposals_with_gt (list of Instances): detections with associated ground truth data + densepose_predictor_outputs: an object of a dataclass that contains predictor outputs + with estimated values; assumed to have the following attributes: + * coarse_segm - coarse segmentation estimates, tensor of shape [N, D, S, S] + packed_annotations: packed annotations for efficient loss computation; + the following attributes are used: + - coarse_segm_gt + - bbox_xywh_gt + - bbox_xywh_est + """ + if packed_annotations.coarse_segm_gt is None: + return self.fake_value(densepose_predictor_outputs) + coarse_segm_est = densepose_predictor_outputs.coarse_segm[packed_annotations.bbox_indices] + with torch.no_grad(): + coarse_segm_gt = resample_data( + packed_annotations.coarse_segm_gt.unsqueeze(1), + packed_annotations.bbox_xywh_gt, + packed_annotations.bbox_xywh_est, + self.heatmap_size, + self.heatmap_size, + mode="nearest", + padding_mode="zeros", + ).squeeze(1) + if self.n_segm_chan == 2: + coarse_segm_gt = coarse_segm_gt > 0 + return F.cross_entropy(coarse_segm_est, coarse_segm_gt.long()) + + def fake_value(self, densepose_predictor_outputs: Any) -> torch.Tensor: + """ + Fake segmentation loss used when no suitable ground truth data + was found in a batch. The loss has a value 0 and is primarily used to + construct the computation graph, so that `DistributedDataParallel` + has similar graphs on all GPUs and can perform reduction properly. + + Args: + densepose_predictor_outputs: DensePose predictor outputs, an object + of a dataclass that is assumed to have `coarse_segm` + attribute + Return: + Zero value loss with proper computation graph + """ + return densepose_predictor_outputs.coarse_segm.sum() * 0 diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/soft_embed.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/soft_embed.py new file mode 100644 index 0000000..7b055c4 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/soft_embed.py @@ -0,0 +1,141 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from typing import Any, Dict, List +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import CfgNode +from detectron2.structures import Instances + +from densepose.data.meshes.catalog import MeshCatalog +from densepose.modeling.cse.utils import normalize_embeddings, squared_euclidean_distance_matrix +from densepose.structures.mesh import create_mesh + +from .embed_utils import PackedCseAnnotations +from .utils import BilinearInterpolationHelper + + +class SoftEmbeddingLoss: + """ + Computes losses for estimated embeddings given annotated vertices. + Instances in a minibatch that correspond to the same mesh are grouped + together. For each group, loss is computed as cross-entropy for + unnormalized scores given ground truth mesh vertex ids. + Scores are based on: + 1) squared distances between estimated vertex embeddings + and mesh vertex embeddings; + 2) geodesic distances between vertices of a mesh + """ + + def __init__(self, cfg: CfgNode): + """ + Initialize embedding loss from config + """ + self.embdist_gauss_sigma = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBEDDING_DIST_GAUSS_SIGMA + self.geodist_gauss_sigma = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.GEODESIC_DIST_GAUSS_SIGMA + + def __call__( + self, + proposals_with_gt: List[Instances], + densepose_predictor_outputs: Any, + packed_annotations: PackedCseAnnotations, + interpolator: BilinearInterpolationHelper, + embedder: nn.Module, + ) -> Dict[int, torch.Tensor]: + """ + Produces losses for estimated embeddings given annotated vertices. + Embeddings for all the vertices of a mesh are computed by the embedder. + Embeddings for observed pixels are estimated by a predictor. + Losses are computed as cross-entropy for unnormalized scores given + ground truth vertex IDs. + 1) squared distances between estimated vertex embeddings + and mesh vertex embeddings; + 2) geodesic distances between vertices of a mesh + + Args: + proposals_with_gt (list of Instances): detections with associated + ground truth data; each item corresponds to instances detected + on 1 image; the number of items corresponds to the number of + images in a batch + densepose_predictor_outputs: an object of a dataclass that contains predictor + outputs with estimated values; assumed to have the following attributes: + * embedding - embedding estimates, tensor of shape [N, D, S, S], where + N = number of instances (= sum N_i, where N_i is the number of + instances on image i) + D = embedding space dimensionality (MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBED_SIZE) + S = output size (width and height) + packed_annotations (PackedCseAnnotations): contains various data useful + for loss computation, each data is packed into a single tensor + interpolator (BilinearInterpolationHelper): bilinear interpolation helper + embedder (nn.Module): module that computes vertex embeddings for different meshes + Return: + dict(int -> tensor): losses for different mesh IDs + """ + losses = {} + for mesh_id_tensor in packed_annotations.vertex_mesh_ids_gt.unique(): # pyre-ignore[16] + mesh_id = mesh_id_tensor.item() + mesh_name = MeshCatalog.get_mesh_name(mesh_id) + # valid points are those that fall into estimated bbox + # and correspond to the current mesh + j_valid = interpolator.j_valid * ( # pyre-ignore[16] + packed_annotations.vertex_mesh_ids_gt == mesh_id + ) + if not torch.any(j_valid): + continue + # extract estimated embeddings for valid points + # -> tensor [J, D] + vertex_embeddings_i = normalize_embeddings( + interpolator.extract_at_points( + densepose_predictor_outputs.embedding, + slice_fine_segm=slice(None), + w_ylo_xlo=interpolator.w_ylo_xlo[:, None], # pyre-ignore[16] + w_ylo_xhi=interpolator.w_ylo_xhi[:, None], # pyre-ignore[16] + w_yhi_xlo=interpolator.w_yhi_xlo[:, None], # pyre-ignore[16] + w_yhi_xhi=interpolator.w_yhi_xhi[:, None], # pyre-ignore[16] + )[j_valid, :] + ) + # extract vertex ids for valid points + # -> tensor [J] + vertex_indices_i = packed_annotations.vertex_ids_gt[j_valid] + # embeddings for all mesh vertices + # -> tensor [K, D] + mesh_vertex_embeddings = embedder(mesh_name) + # softmax values of geodesic distances for GT mesh vertices + # -> tensor [J, K] + mesh = create_mesh(mesh_name, mesh_vertex_embeddings.device) + geodist_softmax_values = F.softmax( + mesh.geodists[vertex_indices_i] / (-self.geodist_gauss_sigma), dim=1 + ) + # logsoftmax values for valid points + # -> tensor [J, K] + embdist_logsoftmax_values = F.log_softmax( + squared_euclidean_distance_matrix(vertex_embeddings_i, mesh_vertex_embeddings) + / (-self.embdist_gauss_sigma), + dim=1, + ) + losses[mesh_name] = (-geodist_softmax_values * embdist_logsoftmax_values).sum(1).mean() + + # pyre-fixme[29]: + # `Union[BoundMethod[typing.Callable(torch.Tensor.__iter__)[[Named(self, + # torch.Tensor)], typing.Iterator[typing.Any]], torch.Tensor], nn.Module, + # torch.Tensor]` is not a function. + for mesh_name in embedder.mesh_names: + if mesh_name not in losses: + losses[mesh_name] = self.fake_value( + densepose_predictor_outputs, embedder, mesh_name + ) + return losses + + def fake_values(self, densepose_predictor_outputs: Any, embedder: nn.Module): + losses = {} + # pyre-fixme[29]: + # `Union[BoundMethod[typing.Callable(torch.Tensor.__iter__)[[Named(self, + # torch.Tensor)], typing.Iterator[typing.Any]], torch.Tensor], nn.Module, + # torch.Tensor]` is not a function. + for mesh_name in embedder.mesh_names: + losses[mesh_name] = self.fake_value(densepose_predictor_outputs, embedder, mesh_name) + return losses + + def fake_value(self, densepose_predictor_outputs: Any, embedder: nn.Module, mesh_name: str): + return densepose_predictor_outputs.embedding.sum() * 0 + embedder(mesh_name).sum() * 0 diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/utils.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/utils.py new file mode 100644 index 0000000..346a2e0 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/losses/utils.py @@ -0,0 +1,441 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import Any, Dict, List, Optional, Tuple +import torch +from torch.nn import functional as F + +from detectron2.structures import BoxMode, Instances + +from densepose import DensePoseDataRelative + +LossDict = Dict[str, torch.Tensor] + + +def _linear_interpolation_utilities(v_norm, v0_src, size_src, v0_dst, size_dst, size_z): + """ + Computes utility values for linear interpolation at points v. + The points are given as normalized offsets in the source interval + (v0_src, v0_src + size_src), more precisely: + v = v0_src + v_norm * size_src / 256.0 + The computed utilities include lower points v_lo, upper points v_hi, + interpolation weights v_w and flags j_valid indicating whether the + points falls into the destination interval (v0_dst, v0_dst + size_dst). + + Args: + v_norm (:obj: `torch.Tensor`): tensor of size N containing + normalized point offsets + v0_src (:obj: `torch.Tensor`): tensor of size N containing + left bounds of source intervals for normalized points + size_src (:obj: `torch.Tensor`): tensor of size N containing + source interval sizes for normalized points + v0_dst (:obj: `torch.Tensor`): tensor of size N containing + left bounds of destination intervals + size_dst (:obj: `torch.Tensor`): tensor of size N containing + destination interval sizes + size_z (int): interval size for data to be interpolated + + Returns: + v_lo (:obj: `torch.Tensor`): int tensor of size N containing + indices of lower values used for interpolation, all values are + integers from [0, size_z - 1] + v_hi (:obj: `torch.Tensor`): int tensor of size N containing + indices of upper values used for interpolation, all values are + integers from [0, size_z - 1] + v_w (:obj: `torch.Tensor`): float tensor of size N containing + interpolation weights + j_valid (:obj: `torch.Tensor`): uint8 tensor of size N containing + 0 for points outside the estimation interval + (v0_est, v0_est + size_est) and 1 otherwise + """ + v = v0_src + v_norm * size_src / 256.0 + j_valid = (v - v0_dst >= 0) * (v - v0_dst < size_dst) + v_grid = (v - v0_dst) * size_z / size_dst + v_lo = v_grid.floor().long().clamp(min=0, max=size_z - 1) + v_hi = (v_lo + 1).clamp(max=size_z - 1) + v_grid = torch.min(v_hi.float(), v_grid) + v_w = v_grid - v_lo.float() + return v_lo, v_hi, v_w, j_valid + + +class BilinearInterpolationHelper: + """ + Args: + packed_annotations: object that contains packed annotations + j_valid (:obj: `torch.Tensor`): uint8 tensor of size M containing + 0 for points to be discarded and 1 for points to be selected + y_lo (:obj: `torch.Tensor`): int tensor of indices of upper values + in z_est for each point + y_hi (:obj: `torch.Tensor`): int tensor of indices of lower values + in z_est for each point + x_lo (:obj: `torch.Tensor`): int tensor of indices of left values + in z_est for each point + x_hi (:obj: `torch.Tensor`): int tensor of indices of right values + in z_est for each point + w_ylo_xlo (:obj: `torch.Tensor`): float tensor of size M; + contains upper-left value weight for each point + w_ylo_xhi (:obj: `torch.Tensor`): float tensor of size M; + contains upper-right value weight for each point + w_yhi_xlo (:obj: `torch.Tensor`): float tensor of size M; + contains lower-left value weight for each point + w_yhi_xhi (:obj: `torch.Tensor`): float tensor of size M; + contains lower-right value weight for each point + """ + + def __init__( + self, + packed_annotations: Any, + j_valid: torch.Tensor, + y_lo: torch.Tensor, + y_hi: torch.Tensor, + x_lo: torch.Tensor, + x_hi: torch.Tensor, + w_ylo_xlo: torch.Tensor, + w_ylo_xhi: torch.Tensor, + w_yhi_xlo: torch.Tensor, + w_yhi_xhi: torch.Tensor, + ): + for k, v in locals().items(): + if k != "self": + setattr(self, k, v) + + @staticmethod + def from_matches( + packed_annotations: Any, densepose_outputs_size_hw: Tuple[int, int] + ) -> "BilinearInterpolationHelper": + """ + Args: + packed_annotations: annotations packed into tensors, the following + attributes are required: + - bbox_xywh_gt + - bbox_xywh_est + - x_gt + - y_gt + - point_bbox_with_dp_indices + - point_bbox_indices + densepose_outputs_size_hw (tuple [int, int]): resolution of + DensePose predictor outputs (H, W) + Return: + An instance of `BilinearInterpolationHelper` used to perform + interpolation for the given annotation points and output resolution + """ + + zh, zw = densepose_outputs_size_hw + x0_gt, y0_gt, w_gt, h_gt = packed_annotations.bbox_xywh_gt[ + packed_annotations.point_bbox_with_dp_indices + ].unbind(dim=1) + x0_est, y0_est, w_est, h_est = packed_annotations.bbox_xywh_est[ + packed_annotations.point_bbox_with_dp_indices + ].unbind(dim=1) + x_lo, x_hi, x_w, jx_valid = _linear_interpolation_utilities( + packed_annotations.x_gt, x0_gt, w_gt, x0_est, w_est, zw + ) + y_lo, y_hi, y_w, jy_valid = _linear_interpolation_utilities( + packed_annotations.y_gt, y0_gt, h_gt, y0_est, h_est, zh + ) + j_valid = jx_valid * jy_valid + + w_ylo_xlo = (1.0 - x_w) * (1.0 - y_w) + w_ylo_xhi = x_w * (1.0 - y_w) + w_yhi_xlo = (1.0 - x_w) * y_w + w_yhi_xhi = x_w * y_w + + return BilinearInterpolationHelper( + packed_annotations, + j_valid, + y_lo, + y_hi, + x_lo, + x_hi, + w_ylo_xlo, # pyre-ignore[6] + w_ylo_xhi, + # pyre-fixme[6]: Expected `Tensor` for 9th param but got `float`. + w_yhi_xlo, + w_yhi_xhi, + ) + + def extract_at_points( + self, + z_est, + slice_fine_segm=None, + w_ylo_xlo=None, + w_ylo_xhi=None, + w_yhi_xlo=None, + w_yhi_xhi=None, + ): + """ + Extract ground truth values z_gt for valid point indices and estimated + values z_est using bilinear interpolation over top-left (y_lo, x_lo), + top-right (y_lo, x_hi), bottom-left (y_hi, x_lo) and bottom-right + (y_hi, x_hi) values in z_est with corresponding weights: + w_ylo_xlo, w_ylo_xhi, w_yhi_xlo and w_yhi_xhi. + Use slice_fine_segm to slice dim=1 in z_est + """ + slice_fine_segm = ( + self.packed_annotations.fine_segm_labels_gt + if slice_fine_segm is None + else slice_fine_segm + ) + w_ylo_xlo = self.w_ylo_xlo if w_ylo_xlo is None else w_ylo_xlo + w_ylo_xhi = self.w_ylo_xhi if w_ylo_xhi is None else w_ylo_xhi + w_yhi_xlo = self.w_yhi_xlo if w_yhi_xlo is None else w_yhi_xlo + w_yhi_xhi = self.w_yhi_xhi if w_yhi_xhi is None else w_yhi_xhi + + index_bbox = self.packed_annotations.point_bbox_indices + z_est_sampled = ( + z_est[index_bbox, slice_fine_segm, self.y_lo, self.x_lo] * w_ylo_xlo + + z_est[index_bbox, slice_fine_segm, self.y_lo, self.x_hi] * w_ylo_xhi + + z_est[index_bbox, slice_fine_segm, self.y_hi, self.x_lo] * w_yhi_xlo + + z_est[index_bbox, slice_fine_segm, self.y_hi, self.x_hi] * w_yhi_xhi + ) + return z_est_sampled + + +def resample_data( + z, bbox_xywh_src, bbox_xywh_dst, wout, hout, mode: str="nearest", padding_mode: str="zeros" +): + """ + Args: + z (:obj: `torch.Tensor`): tensor of size (N,C,H,W) with data to be + resampled + bbox_xywh_src (:obj: `torch.Tensor`): tensor of size (N,4) containing + source bounding boxes in format XYWH + bbox_xywh_dst (:obj: `torch.Tensor`): tensor of size (N,4) containing + destination bounding boxes in format XYWH + Return: + zresampled (:obj: `torch.Tensor`): tensor of size (N, C, Hout, Wout) + with resampled values of z, where D is the discretization size + """ + n = bbox_xywh_src.size(0) + assert n == bbox_xywh_dst.size(0), ( + "The number of " + "source ROIs for resampling ({}) should be equal to the number " + "of destination ROIs ({})".format(bbox_xywh_src.size(0), bbox_xywh_dst.size(0)) + ) + x0src, y0src, wsrc, hsrc = bbox_xywh_src.unbind(dim=1) + x0dst, y0dst, wdst, hdst = bbox_xywh_dst.unbind(dim=1) + x0dst_norm = 2 * (x0dst - x0src) / wsrc - 1 + y0dst_norm = 2 * (y0dst - y0src) / hsrc - 1 + x1dst_norm = 2 * (x0dst + wdst - x0src) / wsrc - 1 + y1dst_norm = 2 * (y0dst + hdst - y0src) / hsrc - 1 + grid_w = torch.arange(wout, device=z.device, dtype=torch.float) / wout + grid_h = torch.arange(hout, device=z.device, dtype=torch.float) / hout + grid_w_expanded = grid_w[None, None, :].expand(n, hout, wout) + grid_h_expanded = grid_h[None, :, None].expand(n, hout, wout) + dx_expanded = (x1dst_norm - x0dst_norm)[:, None, None].expand(n, hout, wout) + dy_expanded = (y1dst_norm - y0dst_norm)[:, None, None].expand(n, hout, wout) + x0_expanded = x0dst_norm[:, None, None].expand(n, hout, wout) + y0_expanded = y0dst_norm[:, None, None].expand(n, hout, wout) + grid_x = grid_w_expanded * dx_expanded + x0_expanded + grid_y = grid_h_expanded * dy_expanded + y0_expanded + grid = torch.stack((grid_x, grid_y), dim=3) + # resample Z from (N, C, H, W) into (N, C, Hout, Wout) + zresampled = F.grid_sample(z, grid, mode=mode, padding_mode=padding_mode, align_corners=True) + return zresampled + + +class AnnotationsAccumulator(ABC): + """ + Abstract class for an accumulator for annotations that can produce + dense annotations packed into tensors. + """ + + @abstractmethod + def accumulate(self, instances_one_image: Instances): + """ + Accumulate instances data for one image + + Args: + instances_one_image (Instances): instances data to accumulate + """ + pass + + @abstractmethod + def pack(self) -> Any: + """ + Pack data into tensors + """ + pass + + +@dataclass +class PackedChartBasedAnnotations: + """ + Packed annotations for chart-based model training. The following attributes + are defined: + - fine_segm_labels_gt (tensor [K] of `int64`): GT fine segmentation point labels + - x_gt (tensor [K] of `float32`): GT normalized X point coordinates + - y_gt (tensor [K] of `float32`): GT normalized Y point coordinates + - u_gt (tensor [K] of `float32`): GT point U values + - v_gt (tensor [K] of `float32`): GT point V values + - coarse_segm_gt (tensor [N, S, S] of `float32`): GT segmentation for bounding boxes + - bbox_xywh_gt (tensor [N, 4] of `float32`): selected GT bounding boxes in + XYWH format + - bbox_xywh_est (tensor [N, 4] of `float32`): selected matching estimated + bounding boxes in XYWH format + - point_bbox_with_dp_indices (tensor [K] of `int64`): indices of bounding boxes + with DensePose annotations that correspond to the point data + - point_bbox_indices (tensor [K] of `int64`): indices of bounding boxes + (not necessarily the selected ones with DensePose data) that correspond + to the point data + - bbox_indices (tensor [N] of `int64`): global indices of selected bounding + boxes with DensePose annotations; these indices could be used to access + features that are computed for all bounding boxes, not only the ones with + DensePose annotations. + Here K is the total number of points and N is the total number of instances + with DensePose annotations. + """ + + fine_segm_labels_gt: torch.Tensor + x_gt: torch.Tensor + y_gt: torch.Tensor + u_gt: torch.Tensor + v_gt: torch.Tensor + coarse_segm_gt: Optional[torch.Tensor] + bbox_xywh_gt: torch.Tensor + bbox_xywh_est: torch.Tensor + point_bbox_with_dp_indices: torch.Tensor + point_bbox_indices: torch.Tensor + bbox_indices: torch.Tensor + + +class ChartBasedAnnotationsAccumulator(AnnotationsAccumulator): + """ + Accumulates annotations by batches that correspond to objects detected on + individual images. Can pack them together into single tensors. + """ + + def __init__(self): + self.i_gt = [] + self.x_gt = [] + self.y_gt = [] + self.u_gt = [] + self.v_gt = [] + self.s_gt = [] + self.bbox_xywh_gt = [] + self.bbox_xywh_est = [] + self.point_bbox_with_dp_indices = [] + self.point_bbox_indices = [] + self.bbox_indices = [] + self.nxt_bbox_with_dp_index = 0 + self.nxt_bbox_index = 0 + + def accumulate(self, instances_one_image: Instances): + """ + Accumulate instances data for one image + + Args: + instances_one_image (Instances): instances data to accumulate + """ + boxes_xywh_est = BoxMode.convert( + instances_one_image.proposal_boxes.tensor.clone(), BoxMode.XYXY_ABS, BoxMode.XYWH_ABS + ) + boxes_xywh_gt = BoxMode.convert( + instances_one_image.gt_boxes.tensor.clone(), BoxMode.XYXY_ABS, BoxMode.XYWH_ABS + ) + n_matches = len(boxes_xywh_gt) + assert n_matches == len( + boxes_xywh_est + ), f"Got {len(boxes_xywh_est)} proposal boxes and {len(boxes_xywh_gt)} GT boxes" + if not n_matches: + # no detection - GT matches + return + if ( + not hasattr(instances_one_image, "gt_densepose") + or instances_one_image.gt_densepose is None + ): + # no densepose GT for the detections, just increase the bbox index + self.nxt_bbox_index += n_matches + return + for box_xywh_est, box_xywh_gt, dp_gt in zip( + boxes_xywh_est, boxes_xywh_gt, instances_one_image.gt_densepose + ): + if (dp_gt is not None) and (len(dp_gt.x) > 0): + self._do_accumulate(box_xywh_gt, box_xywh_est, dp_gt) + self.nxt_bbox_index += 1 + + def _do_accumulate( + self, box_xywh_gt: torch.Tensor, box_xywh_est: torch.Tensor, dp_gt: DensePoseDataRelative + ): + """ + Accumulate instances data for one image, given that the data is not empty + + Args: + box_xywh_gt (tensor): GT bounding box + box_xywh_est (tensor): estimated bounding box + dp_gt (DensePoseDataRelative): GT densepose data + """ + self.i_gt.append(dp_gt.i) + self.x_gt.append(dp_gt.x) + self.y_gt.append(dp_gt.y) + self.u_gt.append(dp_gt.u) + self.v_gt.append(dp_gt.v) + if hasattr(dp_gt, "segm"): + self.s_gt.append(dp_gt.segm.unsqueeze(0)) + self.bbox_xywh_gt.append(box_xywh_gt.view(-1, 4)) + self.bbox_xywh_est.append(box_xywh_est.view(-1, 4)) + self.point_bbox_with_dp_indices.append( + torch.full_like(dp_gt.i, self.nxt_bbox_with_dp_index) + ) + self.point_bbox_indices.append(torch.full_like(dp_gt.i, self.nxt_bbox_index)) + self.bbox_indices.append(self.nxt_bbox_index) + self.nxt_bbox_with_dp_index += 1 + + def pack(self) -> Optional[PackedChartBasedAnnotations]: + """ + Pack data into tensors + """ + if not len(self.i_gt): + # TODO: + # returning proper empty annotations would require + # creating empty tensors of appropriate shape and + # type on an appropriate device; + # we return None so far to indicate empty annotations + return None + return PackedChartBasedAnnotations( + fine_segm_labels_gt=torch.cat(self.i_gt, 0).long(), + x_gt=torch.cat(self.x_gt, 0), + y_gt=torch.cat(self.y_gt, 0), + u_gt=torch.cat(self.u_gt, 0), + v_gt=torch.cat(self.v_gt, 0), + # ignore segmentation annotations, if not all the instances contain those + coarse_segm_gt=torch.cat(self.s_gt, 0) + if len(self.s_gt) == len(self.bbox_xywh_gt) + else None, + bbox_xywh_gt=torch.cat(self.bbox_xywh_gt, 0), + bbox_xywh_est=torch.cat(self.bbox_xywh_est, 0), + point_bbox_with_dp_indices=torch.cat(self.point_bbox_with_dp_indices, 0).long(), + point_bbox_indices=torch.cat(self.point_bbox_indices, 0).long(), + bbox_indices=torch.as_tensor( + self.bbox_indices, dtype=torch.long, device=self.x_gt[0].device + ).long(), + ) + + +def extract_packed_annotations_from_matches( + proposals_with_targets: List[Instances], accumulator: AnnotationsAccumulator +) -> Any: + for proposals_targets_per_image in proposals_with_targets: + accumulator.accumulate(proposals_targets_per_image) + return accumulator.pack() + + +def sample_random_indices( + n_indices: int, n_samples: int, device: Optional[torch.device] = None +) -> Optional[torch.Tensor]: + """ + Samples `n_samples` random indices from range `[0..n_indices - 1]`. + If `n_indices` is smaller than `n_samples`, returns `None` meaning that all indices + are selected. + Args: + n_indices (int): total number of indices + n_samples (int): number of indices to sample + device (torch.device): the desired device of returned tensor + Return: + Tensor of selected vertex indices, or `None`, if all vertices are selected + """ + if (n_samples <= 0) or (n_indices <= n_samples): + return None + indices = torch.randperm(n_indices, device=device)[:n_samples] + return indices diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/__init__.py new file mode 100644 index 0000000..1ece075 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/__init__.py @@ -0,0 +1,9 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .chart import DensePoseChartPredictor +from .chart_confidence import DensePoseChartConfidencePredictorMixin +from .chart_with_confidence import DensePoseChartWithConfidencePredictor +from .cse import DensePoseEmbeddingPredictor +from .cse_confidence import DensePoseEmbeddingConfidencePredictorMixin +from .cse_with_confidence import DensePoseEmbeddingWithConfidencePredictor +from .registry import DENSEPOSE_PREDICTOR_REGISTRY diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/chart.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/chart.py new file mode 100644 index 0000000..3bcd13f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/chart.py @@ -0,0 +1,94 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import torch +from torch import nn + +from detectron2.config import CfgNode +from detectron2.layers import ConvTranspose2d, interpolate + +from ...structures import DensePoseChartPredictorOutput +from ..utils import initialize_module_params +from .registry import DENSEPOSE_PREDICTOR_REGISTRY + + +@DENSEPOSE_PREDICTOR_REGISTRY.register() +class DensePoseChartPredictor(nn.Module): + """ + Predictor (last layers of a DensePose model) that takes DensePose head outputs as an input + and produces 4 tensors which represent DensePose results for predefined body parts + (patches / charts): + * coarse segmentation, a tensor of shape [N, K, Hout, Wout] + * fine segmentation, a tensor of shape [N, C, Hout, Wout] + * U coordinates, a tensor of shape [N, C, Hout, Wout] + * V coordinates, a tensor of shape [N, C, Hout, Wout] + where + - N is the number of instances + - K is the number of coarse segmentation channels ( + 2 = foreground / background, + 15 = one of 14 body parts / background) + - C is the number of fine segmentation channels ( + 24 fine body parts / background) + - Hout and Wout are height and width of predictions + """ + + def __init__(self, cfg: CfgNode, input_channels: int): + """ + Initialize predictor using configuration options + + Args: + cfg (CfgNode): configuration options + input_channels (int): input tensor size along the channel dimension + """ + super().__init__() + dim_in = input_channels + n_segm_chan = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_COARSE_SEGM_CHANNELS + dim_out_patches = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_PATCHES + 1 + kernel_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECONV_KERNEL + # coarse segmentation + self.ann_index_lowres = ConvTranspose2d( + dim_in, n_segm_chan, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + # fine segmentation + self.index_uv_lowres = ConvTranspose2d( + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + # U + self.u_lowres = ConvTranspose2d( + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + # V + self.v_lowres = ConvTranspose2d( + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + self.scale_factor = cfg.MODEL.ROI_DENSEPOSE_HEAD.UP_SCALE + initialize_module_params(self) + + def interp2d(self, tensor_nchw: torch.Tensor): + """ + Bilinear interpolation method to be used for upscaling + + Args: + tensor_nchw (tensor): tensor of shape (N, C, H, W) + Return: + tensor of shape (N, C, Hout, Wout), where Hout and Wout are computed + by applying the scale factor to H and W + """ + return interpolate( + tensor_nchw, scale_factor=self.scale_factor, mode="bilinear", align_corners=False + ) + + def forward(self, head_outputs: torch.Tensor): + """ + Perform forward step on DensePose head outputs + + Args: + head_outputs (tensor): DensePose head outputs, tensor of shape [N, D, H, W] + Return: + An instance of DensePoseChartPredictorOutput + """ + return DensePoseChartPredictorOutput( + coarse_segm=self.interp2d(self.ann_index_lowres(head_outputs)), + fine_segm=self.interp2d(self.index_uv_lowres(head_outputs)), + u=self.interp2d(self.u_lowres(head_outputs)), + v=self.interp2d(self.v_lowres(head_outputs)), + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/chart_confidence.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/chart_confidence.py new file mode 100644 index 0000000..0c00999 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/chart_confidence.py @@ -0,0 +1,174 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Any +import torch +from torch.nn import functional as F + +from detectron2.config import CfgNode +from detectron2.layers import ConvTranspose2d + +from ...structures import decorate_predictor_output_class_with_confidences +from ..confidence import DensePoseConfidenceModelConfig, DensePoseUVConfidenceType +from ..utils import initialize_module_params + + +class DensePoseChartConfidencePredictorMixin: + """ + Predictor contains the last layers of a DensePose model that take DensePose head + outputs as an input and produce model outputs. Confidence predictor mixin is used + to generate confidences for segmentation and UV tensors estimated by some + base predictor. Several assumptions need to hold for the base predictor: + 1) the `forward` method must return SIUV tuple as the first result ( + S = coarse segmentation, I = fine segmentation, U and V are intrinsic + chart coordinates) + 2) `interp2d` method must be defined to perform bilinear interpolation; + the same method is typically used for SIUV and confidences + Confidence predictor mixin provides confidence estimates, as described in: + N. Neverova et al., Correlated Uncertainty for Learning Dense Correspondences + from Noisy Labels, NeurIPS 2019 + A. Sanakoyeu et al., Transferring Dense Pose to Proximal Animal Classes, CVPR 2020 + """ + + def __init__(self, cfg: CfgNode, input_channels: int): + """ + Initialize confidence predictor using configuration options. + + Args: + cfg (CfgNode): configuration options + input_channels (int): number of input channels + """ + # we rely on base predictor to call nn.Module.__init__ + super().__init__(cfg, input_channels) # pyre-ignore[19] + self.confidence_model_cfg = DensePoseConfidenceModelConfig.from_cfg(cfg) + self._initialize_confidence_estimation_layers(cfg, input_channels) + self._registry = {} + initialize_module_params(self) # pyre-ignore[6] + + def _initialize_confidence_estimation_layers(self, cfg: CfgNode, dim_in: int): + """ + Initialize confidence estimation layers based on configuration options + + Args: + cfg (CfgNode): configuration options + dim_in (int): number of input channels + """ + dim_out_patches = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_PATCHES + 1 + kernel_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECONV_KERNEL + if self.confidence_model_cfg.uv_confidence.enabled: + if self.confidence_model_cfg.uv_confidence.type == DensePoseUVConfidenceType.IID_ISO: + self.sigma_2_lowres = ConvTranspose2d( # pyre-ignore[16] + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + elif ( + self.confidence_model_cfg.uv_confidence.type + == DensePoseUVConfidenceType.INDEP_ANISO + ): + self.sigma_2_lowres = ConvTranspose2d( + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + self.kappa_u_lowres = ConvTranspose2d( # pyre-ignore[16] + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + self.kappa_v_lowres = ConvTranspose2d( # pyre-ignore[16] + dim_in, dim_out_patches, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + else: + raise ValueError( + f"Unknown confidence model type: " + f"{self.confidence_model_cfg.confidence_model_type}" + ) + if self.confidence_model_cfg.segm_confidence.enabled: + self.fine_segm_confidence_lowres = ConvTranspose2d( # pyre-ignore[16] + dim_in, 1, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + self.coarse_segm_confidence_lowres = ConvTranspose2d( # pyre-ignore[16] + dim_in, 1, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + + def forward(self, head_outputs: torch.Tensor): + """ + Perform forward operation on head outputs used as inputs for the predictor. + Calls forward method from the base predictor and uses its outputs to compute + confidences. + + Args: + head_outputs (Tensor): head outputs used as predictor inputs + Return: + An instance of outputs with confidences, + see `decorate_predictor_output_class_with_confidences` + """ + # assuming base class returns SIUV estimates in its first result + base_predictor_outputs = super().forward(head_outputs) # pyre-ignore[16] + + # create output instance by extending base predictor outputs: + output = self._create_output_instance(base_predictor_outputs) + + if self.confidence_model_cfg.uv_confidence.enabled: + if self.confidence_model_cfg.uv_confidence.type == DensePoseUVConfidenceType.IID_ISO: + # assuming base class defines interp2d method for bilinear interpolation + output.sigma_2 = self.interp2d(self.sigma_2_lowres(head_outputs)) # pyre-ignore[16] + elif ( + self.confidence_model_cfg.uv_confidence.type + == DensePoseUVConfidenceType.INDEP_ANISO + ): + # assuming base class defines interp2d method for bilinear interpolation + output.sigma_2 = self.interp2d(self.sigma_2_lowres(head_outputs)) + output.kappa_u = self.interp2d(self.kappa_u_lowres(head_outputs)) # pyre-ignore[16] + output.kappa_v = self.interp2d(self.kappa_v_lowres(head_outputs)) # pyre-ignore[16] + else: + raise ValueError( + f"Unknown confidence model type: " + f"{self.confidence_model_cfg.confidence_model_type}" + ) + if self.confidence_model_cfg.segm_confidence.enabled: + # base predictor outputs are assumed to have `fine_segm` and `coarse_segm` attributes + # base predictor is assumed to define `interp2d` method for bilinear interpolation + output.fine_segm_confidence = ( + F.softplus( + self.interp2d(self.fine_segm_confidence_lowres(head_outputs)) # pyre-ignore[16] + ) + + self.confidence_model_cfg.segm_confidence.epsilon + ) + output.fine_segm = base_predictor_outputs.fine_segm * torch.repeat_interleave( + output.fine_segm_confidence, base_predictor_outputs.fine_segm.shape[1], dim=1 + ) + output.coarse_segm_confidence = ( + F.softplus( + self.interp2d( + self.coarse_segm_confidence_lowres(head_outputs) # pyre-ignore[16] + ) + ) + + self.confidence_model_cfg.segm_confidence.epsilon + ) + output.coarse_segm = base_predictor_outputs.coarse_segm * torch.repeat_interleave( + output.coarse_segm_confidence, base_predictor_outputs.coarse_segm.shape[1], dim=1 + ) + + return output + + def _create_output_instance(self, base_predictor_outputs: Any): + """ + Create an instance of predictor outputs by copying the outputs from the + base predictor and initializing confidence + + Args: + base_predictor_outputs: an instance of base predictor outputs + (the outputs type is assumed to be a dataclass) + Return: + An instance of outputs with confidences + """ + PredictorOutput = decorate_predictor_output_class_with_confidences( + type(base_predictor_outputs) # pyre-ignore[6] + ) + # base_predictor_outputs is assumed to be a dataclass + # reassign all the fields from base_predictor_outputs (no deep copy!), add new fields + output = PredictorOutput( + **base_predictor_outputs.__dict__, + coarse_segm_confidence=None, + fine_segm_confidence=None, + sigma_1=None, + sigma_2=None, + kappa_u=None, + kappa_v=None, + ) + return output diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/chart_with_confidence.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/chart_with_confidence.py new file mode 100644 index 0000000..9c1cd6c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/chart_with_confidence.py @@ -0,0 +1,15 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from . import DensePoseChartConfidencePredictorMixin, DensePoseChartPredictor +from .registry import DENSEPOSE_PREDICTOR_REGISTRY + + +@DENSEPOSE_PREDICTOR_REGISTRY.register() +class DensePoseChartWithConfidencePredictor( + DensePoseChartConfidencePredictorMixin, DensePoseChartPredictor +): + """ + Predictor that combines chart and chart confidence estimation + """ + + pass diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/cse.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/cse.py new file mode 100644 index 0000000..466a5ec --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/cse.py @@ -0,0 +1,70 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import torch +from torch import nn + +from detectron2.config import CfgNode +from detectron2.layers import ConvTranspose2d, interpolate + +from ...structures import DensePoseEmbeddingPredictorOutput +from ..utils import initialize_module_params +from .registry import DENSEPOSE_PREDICTOR_REGISTRY + + +@DENSEPOSE_PREDICTOR_REGISTRY.register() +class DensePoseEmbeddingPredictor(nn.Module): + """ + Last layers of a DensePose model that take DensePose head outputs as an input + and produce model outputs for continuous surface embeddings (CSE). + """ + + def __init__(self, cfg: CfgNode, input_channels: int): + """ + Initialize predictor using configuration options + + Args: + cfg (CfgNode): configuration options + input_channels (int): input tensor size along the channel dimension + """ + super().__init__() + dim_in = input_channels + n_segm_chan = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_COARSE_SEGM_CHANNELS + embed_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBED_SIZE + kernel_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECONV_KERNEL + # coarse segmentation + self.coarse_segm_lowres = ConvTranspose2d( + dim_in, n_segm_chan, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + # embedding + self.embed_lowres = ConvTranspose2d( + dim_in, embed_size, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + self.scale_factor = cfg.MODEL.ROI_DENSEPOSE_HEAD.UP_SCALE + initialize_module_params(self) + + def interp2d(self, tensor_nchw: torch.Tensor): + """ + Bilinear interpolation method to be used for upscaling + + Args: + tensor_nchw (tensor): tensor of shape (N, C, H, W) + Return: + tensor of shape (N, C, Hout, Wout), where Hout and Wout are computed + by applying the scale factor to H and W + """ + return interpolate( + tensor_nchw, scale_factor=self.scale_factor, mode="bilinear", align_corners=False + ) + + def forward(self, head_outputs): + """ + Perform forward step on DensePose head outputs + + Args: + head_outputs (tensor): DensePose head outputs, tensor of shape [N, D, H, W] + """ + embed_lowres = self.embed_lowres(head_outputs) + coarse_segm_lowres = self.coarse_segm_lowres(head_outputs) + embed = self.interp2d(embed_lowres) + coarse_segm = self.interp2d(coarse_segm_lowres) + return DensePoseEmbeddingPredictorOutput(embedding=embed, coarse_segm=coarse_segm) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/cse_confidence.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/cse_confidence.py new file mode 100644 index 0000000..8220337 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/cse_confidence.py @@ -0,0 +1,115 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Any +import torch +from torch.nn import functional as F + +from detectron2.config import CfgNode +from detectron2.layers import ConvTranspose2d + +from densepose.modeling.confidence import DensePoseConfidenceModelConfig +from densepose.modeling.utils import initialize_module_params +from densepose.structures import decorate_cse_predictor_output_class_with_confidences + + +class DensePoseEmbeddingConfidencePredictorMixin: + """ + Predictor contains the last layers of a DensePose model that take DensePose head + outputs as an input and produce model outputs. Confidence predictor mixin is used + to generate confidences for coarse segmentation estimated by some + base predictor. Several assumptions need to hold for the base predictor: + 1) the `forward` method must return CSE DensePose head outputs, + tensor of shape [N, D, H, W] + 2) `interp2d` method must be defined to perform bilinear interpolation; + the same method is typically used for masks and confidences + Confidence predictor mixin provides confidence estimates, as described in: + N. Neverova et al., Correlated Uncertainty for Learning Dense Correspondences + from Noisy Labels, NeurIPS 2019 + A. Sanakoyeu et al., Transferring Dense Pose to Proximal Animal Classes, CVPR 2020 + """ + + def __init__(self, cfg: CfgNode, input_channels: int): + """ + Initialize confidence predictor using configuration options. + + Args: + cfg (CfgNode): configuration options + input_channels (int): number of input channels + """ + # we rely on base predictor to call nn.Module.__init__ + super().__init__(cfg, input_channels) # pyre-ignore[19] + self.confidence_model_cfg = DensePoseConfidenceModelConfig.from_cfg(cfg) + self._initialize_confidence_estimation_layers(cfg, input_channels) + self._registry = {} + initialize_module_params(self) # pyre-ignore[6] + + def _initialize_confidence_estimation_layers(self, cfg: CfgNode, dim_in: int): + """ + Initialize confidence estimation layers based on configuration options + + Args: + cfg (CfgNode): configuration options + dim_in (int): number of input channels + """ + kernel_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECONV_KERNEL + if self.confidence_model_cfg.segm_confidence.enabled: + self.coarse_segm_confidence_lowres = ConvTranspose2d( # pyre-ignore[16] + dim_in, 1, kernel_size, stride=2, padding=int(kernel_size / 2 - 1) + ) + + def forward(self, head_outputs: torch.Tensor): + """ + Perform forward operation on head outputs used as inputs for the predictor. + Calls forward method from the base predictor and uses its outputs to compute + confidences. + + Args: + head_outputs (Tensor): head outputs used as predictor inputs + Return: + An instance of outputs with confidences, + see `decorate_cse_predictor_output_class_with_confidences` + """ + # assuming base class returns SIUV estimates in its first result + base_predictor_outputs = super().forward(head_outputs) # pyre-ignore[16] + + # create output instance by extending base predictor outputs: + output = self._create_output_instance(base_predictor_outputs) + + if self.confidence_model_cfg.segm_confidence.enabled: + # base predictor outputs are assumed to have `coarse_segm` attribute + # base predictor is assumed to define `interp2d` method for bilinear interpolation + output.coarse_segm_confidence = ( + F.softplus( + self.interp2d( # pyre-ignore[16] + self.coarse_segm_confidence_lowres(head_outputs) # pyre-ignore[16] + ) + ) + + self.confidence_model_cfg.segm_confidence.epsilon + ) + output.coarse_segm = base_predictor_outputs.coarse_segm * torch.repeat_interleave( + output.coarse_segm_confidence, base_predictor_outputs.coarse_segm.shape[1], dim=1 + ) + + return output + + def _create_output_instance(self, base_predictor_outputs: Any): + """ + Create an instance of predictor outputs by copying the outputs from the + base predictor and initializing confidence + + Args: + base_predictor_outputs: an instance of base predictor outputs + (the outputs type is assumed to be a dataclass) + Return: + An instance of outputs with confidences + """ + PredictorOutput = decorate_cse_predictor_output_class_with_confidences( + type(base_predictor_outputs) # pyre-ignore[6] + ) + # base_predictor_outputs is assumed to be a dataclass + # reassign all the fields from base_predictor_outputs (no deep copy!), add new fields + output = PredictorOutput( + **base_predictor_outputs.__dict__, + coarse_segm_confidence=None, + ) + return output diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/cse_with_confidence.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/cse_with_confidence.py new file mode 100644 index 0000000..17ecef6 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/cse_with_confidence.py @@ -0,0 +1,15 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from . import DensePoseEmbeddingConfidencePredictorMixin, DensePoseEmbeddingPredictor +from .registry import DENSEPOSE_PREDICTOR_REGISTRY + + +@DENSEPOSE_PREDICTOR_REGISTRY.register() +class DensePoseEmbeddingWithConfidencePredictor( + DensePoseEmbeddingConfidencePredictorMixin, DensePoseEmbeddingPredictor +): + """ + Predictor that combines CSE and CSE confidence estimation + """ + + pass diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/registry.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/registry.py new file mode 100644 index 0000000..f96901d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/predictors/registry.py @@ -0,0 +1,5 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from detectron2.utils.registry import Registry + +DENSEPOSE_PREDICTOR_REGISTRY = Registry("DENSEPOSE_PREDICTOR") diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/__init__.py new file mode 100644 index 0000000..8403589 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/__init__.py @@ -0,0 +1,6 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .v1convx import DensePoseV1ConvXHead +from .deeplab import DensePoseDeepLabHead +from .registry import ROI_DENSEPOSE_HEAD_REGISTRY +from .roi_head import Decoder, DensePoseROIHeads diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/deeplab.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/deeplab.py new file mode 100644 index 0000000..4e5cb48 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/deeplab.py @@ -0,0 +1,263 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import fvcore.nn.weight_init as weight_init +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import CfgNode +from detectron2.layers import Conv2d + +from .registry import ROI_DENSEPOSE_HEAD_REGISTRY + + +@ROI_DENSEPOSE_HEAD_REGISTRY.register() +class DensePoseDeepLabHead(nn.Module): + """ + DensePose head using DeepLabV3 model from + "Rethinking Atrous Convolution for Semantic Image Segmentation" + . + """ + + def __init__(self, cfg: CfgNode, input_channels: int): + super(DensePoseDeepLabHead, self).__init__() + # fmt: off + hidden_dim = cfg.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_DIM + kernel_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_KERNEL + norm = cfg.MODEL.ROI_DENSEPOSE_HEAD.DEEPLAB.NORM + self.n_stacked_convs = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_STACKED_CONVS + self.use_nonlocal = cfg.MODEL.ROI_DENSEPOSE_HEAD.DEEPLAB.NONLOCAL_ON + # fmt: on + pad_size = kernel_size // 2 + n_channels = input_channels + + self.ASPP = ASPP(input_channels, [6, 12, 56], n_channels) # 6, 12, 56 + self.add_module("ASPP", self.ASPP) + + if self.use_nonlocal: + self.NLBlock = NONLocalBlock2D(input_channels, bn_layer=True) + self.add_module("NLBlock", self.NLBlock) + # weight_init.c2_msra_fill(self.ASPP) + + for i in range(self.n_stacked_convs): + norm_module = nn.GroupNorm(32, hidden_dim) if norm == "GN" else None + layer = Conv2d( + n_channels, + hidden_dim, + kernel_size, + stride=1, + padding=pad_size, + bias=not norm, + norm=norm_module, + ) + weight_init.c2_msra_fill(layer) + n_channels = hidden_dim + layer_name = self._get_layer_name(i) + self.add_module(layer_name, layer) + self.n_out_channels = hidden_dim + # initialize_module_params(self) + + def forward(self, features): + x0 = features + x = self.ASPP(x0) + if self.use_nonlocal: + x = self.NLBlock(x) + output = x + for i in range(self.n_stacked_convs): + layer_name = self._get_layer_name(i) + x = getattr(self, layer_name)(x) + x = F.relu(x) + output = x + return output + + def _get_layer_name(self, i: int): + layer_name = "body_conv_fcn{}".format(i + 1) + return layer_name + + +# Copied from +# https://github.com/pytorch/vision/blob/master/torchvision/models/segmentation/deeplabv3.py +# See https://arxiv.org/pdf/1706.05587.pdf for details +class ASPPConv(nn.Sequential): + def __init__(self, in_channels, out_channels, dilation): + modules = [ + nn.Conv2d( + in_channels, out_channels, 3, padding=dilation, dilation=dilation, bias=False + ), + nn.GroupNorm(32, out_channels), + nn.ReLU(), + ] + super(ASPPConv, self).__init__(*modules) + + +class ASPPPooling(nn.Sequential): + def __init__(self, in_channels, out_channels): + super(ASPPPooling, self).__init__( + nn.AdaptiveAvgPool2d(1), + nn.Conv2d(in_channels, out_channels, 1, bias=False), + nn.GroupNorm(32, out_channels), + nn.ReLU(), + ) + + def forward(self, x): + size = x.shape[-2:] + x = super(ASPPPooling, self).forward(x) + return F.interpolate(x, size=size, mode="bilinear", align_corners=False) + + +class ASPP(nn.Module): + def __init__(self, in_channels, atrous_rates, out_channels): + super(ASPP, self).__init__() + modules = [] + modules.append( + nn.Sequential( + nn.Conv2d(in_channels, out_channels, 1, bias=False), + nn.GroupNorm(32, out_channels), + nn.ReLU(), + ) + ) + + rate1, rate2, rate3 = tuple(atrous_rates) + modules.append(ASPPConv(in_channels, out_channels, rate1)) + modules.append(ASPPConv(in_channels, out_channels, rate2)) + modules.append(ASPPConv(in_channels, out_channels, rate3)) + modules.append(ASPPPooling(in_channels, out_channels)) + + self.convs = nn.ModuleList(modules) + + self.project = nn.Sequential( + nn.Conv2d(5 * out_channels, out_channels, 1, bias=False), + # nn.BatchNorm2d(out_channels), + nn.ReLU() + # nn.Dropout(0.5) + ) + + def forward(self, x): + res = [] + for conv in self.convs: + res.append(conv(x)) + res = torch.cat(res, dim=1) + return self.project(res) + + +# copied from +# https://github.com/AlexHex7/Non-local_pytorch/blob/master/lib/non_local_embedded_gaussian.py +# See https://arxiv.org/abs/1711.07971 for details +class _NonLocalBlockND(nn.Module): + def __init__( + self, in_channels, inter_channels=None, dimension=3, sub_sample=True, bn_layer=True + ): + super(_NonLocalBlockND, self).__init__() + + assert dimension in [1, 2, 3] + + self.dimension = dimension + self.sub_sample = sub_sample + + self.in_channels = in_channels + self.inter_channels = inter_channels + + if self.inter_channels is None: + self.inter_channels = in_channels // 2 + if self.inter_channels == 0: + self.inter_channels = 1 + + if dimension == 3: + conv_nd = nn.Conv3d + max_pool_layer = nn.MaxPool3d(kernel_size=(1, 2, 2)) + bn = nn.GroupNorm # (32, hidden_dim) #nn.BatchNorm3d + elif dimension == 2: + conv_nd = nn.Conv2d + max_pool_layer = nn.MaxPool2d(kernel_size=(2, 2)) + bn = nn.GroupNorm # (32, hidden_dim)nn.BatchNorm2d + else: + conv_nd = nn.Conv1d + max_pool_layer = nn.MaxPool1d(kernel_size=2) + bn = nn.GroupNorm # (32, hidden_dim)nn.BatchNorm1d + + self.g = conv_nd( + in_channels=self.in_channels, + out_channels=self.inter_channels, + kernel_size=1, + stride=1, + padding=0, + ) + + if bn_layer: + self.W = nn.Sequential( + conv_nd( + in_channels=self.inter_channels, + out_channels=self.in_channels, + kernel_size=1, + stride=1, + padding=0, + ), + bn(32, self.in_channels), + ) + nn.init.constant_(self.W[1].weight, 0) + nn.init.constant_(self.W[1].bias, 0) + else: + self.W = conv_nd( + in_channels=self.inter_channels, + out_channels=self.in_channels, + kernel_size=1, + stride=1, + padding=0, + ) + nn.init.constant_(self.W.weight, 0) + nn.init.constant_(self.W.bias, 0) + + self.theta = conv_nd( + in_channels=self.in_channels, + out_channels=self.inter_channels, + kernel_size=1, + stride=1, + padding=0, + ) + self.phi = conv_nd( + in_channels=self.in_channels, + out_channels=self.inter_channels, + kernel_size=1, + stride=1, + padding=0, + ) + + if sub_sample: + self.g = nn.Sequential(self.g, max_pool_layer) + self.phi = nn.Sequential(self.phi, max_pool_layer) + + def forward(self, x): + """ + :param x: (b, c, t, h, w) + :return: + """ + + batch_size = x.size(0) + + g_x = self.g(x).view(batch_size, self.inter_channels, -1) + g_x = g_x.permute(0, 2, 1) + + theta_x = self.theta(x).view(batch_size, self.inter_channels, -1) + theta_x = theta_x.permute(0, 2, 1) + phi_x = self.phi(x).view(batch_size, self.inter_channels, -1) + f = torch.matmul(theta_x, phi_x) + f_div_C = F.softmax(f, dim=-1) + + y = torch.matmul(f_div_C, g_x) + y = y.permute(0, 2, 1).contiguous() + y = y.view(batch_size, self.inter_channels, *x.size()[2:]) + W_y = self.W(y) + z = W_y + x + + return z + + +class NONLocalBlock2D(_NonLocalBlockND): + def __init__(self, in_channels, inter_channels=None, sub_sample=True, bn_layer=True): + super(NONLocalBlock2D, self).__init__( + in_channels, + inter_channels=inter_channels, + dimension=2, + sub_sample=sub_sample, + bn_layer=bn_layer, + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/registry.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/registry.py new file mode 100644 index 0000000..e1cea43 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/registry.py @@ -0,0 +1,5 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from detectron2.utils.registry import Registry + +ROI_DENSEPOSE_HEAD_REGISTRY = Registry("ROI_DENSEPOSE_HEAD") diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/roi_head.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/roi_head.py new file mode 100644 index 0000000..8f9d9a6 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/roi_head.py @@ -0,0 +1,221 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import numpy as np +from typing import Dict, List, Optional +import fvcore.nn.weight_init as weight_init +import torch +import torch.nn as nn +from torch.nn import functional as F + +from detectron2.layers import Conv2d, ShapeSpec, get_norm +from detectron2.modeling import ROI_HEADS_REGISTRY, StandardROIHeads +from detectron2.modeling.poolers import ROIPooler +from detectron2.modeling.roi_heads import select_foreground_proposals +from detectron2.structures import ImageList, Instances + +from .. import ( + build_densepose_data_filter, + build_densepose_embedder, + build_densepose_head, + build_densepose_losses, + build_densepose_predictor, + densepose_inference, +) + + +class Decoder(nn.Module): + """ + A semantic segmentation head described in detail in the Panoptic Feature Pyramid Networks paper + (https://arxiv.org/abs/1901.02446). It takes FPN features as input and merges information from + all levels of the FPN into single output. + """ + + def __init__(self, cfg, input_shape: Dict[str, ShapeSpec], in_features): + super(Decoder, self).__init__() + + # fmt: off + self.in_features = in_features + feature_strides = {k: v.stride for k, v in input_shape.items()} + feature_channels = {k: v.channels for k, v in input_shape.items()} + num_classes = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECODER_NUM_CLASSES + conv_dims = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECODER_CONV_DIMS + self.common_stride = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECODER_COMMON_STRIDE + norm = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECODER_NORM + # fmt: on + + self.scale_heads = [] + for in_feature in self.in_features: + head_ops = [] + head_length = max( + 1, int(np.log2(feature_strides[in_feature]) - np.log2(self.common_stride)) + ) + for k in range(head_length): + conv = Conv2d( + feature_channels[in_feature] if k == 0 else conv_dims, + conv_dims, + kernel_size=3, + stride=1, + padding=1, + bias=not norm, + norm=get_norm(norm, conv_dims), + activation=F.relu, + ) + weight_init.c2_msra_fill(conv) + head_ops.append(conv) + if feature_strides[in_feature] != self.common_stride: + head_ops.append( + nn.Upsample(scale_factor=2, mode="bilinear", align_corners=False) + ) + self.scale_heads.append(nn.Sequential(*head_ops)) + self.add_module(in_feature, self.scale_heads[-1]) + self.predictor = Conv2d(conv_dims, num_classes, kernel_size=1, stride=1, padding=0) + weight_init.c2_msra_fill(self.predictor) + + def forward(self, features: List[torch.Tensor]): + for i, _ in enumerate(self.in_features): + if i == 0: + x = self.scale_heads[i](features[i]) + else: + x = x + self.scale_heads[i](features[i]) + x = self.predictor(x) + return x + + +@ROI_HEADS_REGISTRY.register() +class DensePoseROIHeads(StandardROIHeads): + """ + A Standard ROIHeads which contains an addition of DensePose head. + """ + + def __init__(self, cfg, input_shape): + super().__init__(cfg, input_shape) + self._init_densepose_head(cfg, input_shape) + + def _init_densepose_head(self, cfg, input_shape): + # fmt: off + self.densepose_on = cfg.MODEL.DENSEPOSE_ON + if not self.densepose_on: + return + self.densepose_data_filter = build_densepose_data_filter(cfg) + dp_pooler_resolution = cfg.MODEL.ROI_DENSEPOSE_HEAD.POOLER_RESOLUTION + dp_pooler_sampling_ratio = cfg.MODEL.ROI_DENSEPOSE_HEAD.POOLER_SAMPLING_RATIO + dp_pooler_type = cfg.MODEL.ROI_DENSEPOSE_HEAD.POOLER_TYPE + self.use_decoder = cfg.MODEL.ROI_DENSEPOSE_HEAD.DECODER_ON + # fmt: on + if self.use_decoder: + dp_pooler_scales = (1.0 / input_shape[self.in_features[0]].stride,) + else: + dp_pooler_scales = tuple(1.0 / input_shape[k].stride for k in self.in_features) + in_channels = [input_shape[f].channels for f in self.in_features][0] + + if self.use_decoder: + self.decoder = Decoder(cfg, input_shape, self.in_features) + + self.densepose_pooler = ROIPooler( + output_size=dp_pooler_resolution, + scales=dp_pooler_scales, + sampling_ratio=dp_pooler_sampling_ratio, + pooler_type=dp_pooler_type, + ) + self.densepose_head = build_densepose_head(cfg, in_channels) + self.densepose_predictor = build_densepose_predictor( + cfg, self.densepose_head.n_out_channels + ) + self.densepose_losses = build_densepose_losses(cfg) + self.embedder = build_densepose_embedder(cfg) + + def _forward_densepose(self, features: Dict[str, torch.Tensor], instances: List[Instances]): + """ + Forward logic of the densepose prediction branch. + + Args: + features (dict[str, Tensor]): input data as a mapping from feature + map name to tensor. Axis 0 represents the number of images `N` in + the input data; axes 1-3 are channels, height, and width, which may + vary between feature maps (e.g., if a feature pyramid is used). + instances (list[Instances]): length `N` list of `Instances`. The i-th + `Instances` contains instances for the i-th input image, + In training, they can be the proposals. + In inference, they can be the predicted boxes. + + Returns: + In training, a dict of losses. + In inference, update `instances` with new fields "densepose" and return it. + """ + if not self.densepose_on: + return {} if self.training else instances + + features_list = [features[f] for f in self.in_features] + if self.training: + proposals, _ = select_foreground_proposals(instances, self.num_classes) + features_list, proposals = self.densepose_data_filter(features_list, proposals) + if len(proposals) > 0: + proposal_boxes = [x.proposal_boxes for x in proposals] + + if self.use_decoder: + # pyre-fixme[29]: `Union[nn.Module, torch.Tensor]` is not a + # function. + features_list = [self.decoder(features_list)] + + features_dp = self.densepose_pooler(features_list, proposal_boxes) + densepose_head_outputs = self.densepose_head(features_dp) + densepose_predictor_outputs = self.densepose_predictor(densepose_head_outputs) + densepose_loss_dict = self.densepose_losses( + proposals, densepose_predictor_outputs, embedder=self.embedder + ) + return densepose_loss_dict + else: + pred_boxes = [x.pred_boxes for x in instances] + + if self.use_decoder: + # pyre-fixme[29]: `Union[nn.Module, torch.Tensor]` is not a function. + features_list = [self.decoder(features_list)] + + features_dp = self.densepose_pooler(features_list, pred_boxes) + if len(features_dp) > 0: + densepose_head_outputs = self.densepose_head(features_dp) + densepose_predictor_outputs = self.densepose_predictor(densepose_head_outputs) + else: + densepose_predictor_outputs = None + + densepose_inference(densepose_predictor_outputs, instances) + return instances + + def forward( + self, + images: ImageList, + features: Dict[str, torch.Tensor], + proposals: List[Instances], + targets: Optional[List[Instances]] = None, + ): + instances, losses = super().forward(images, features, proposals, targets) + del targets, images + + if self.training: + losses.update(self._forward_densepose(features, instances)) + return instances, losses + + def forward_with_given_boxes( + self, features: Dict[str, torch.Tensor], instances: List[Instances] + ): + """ + Use the given boxes in `instances` to produce other (non-box) per-ROI outputs. + + This is useful for downstream tasks where a box is known, but need to obtain + other attributes (outputs of other heads). + Test-time augmentation also uses this. + + Args: + features: same as in `forward()` + instances (list[Instances]): instances to predict other outputs. Expect the keys + "pred_boxes" and "pred_classes" to exist. + + Returns: + instances (list[Instances]): + the same `Instances` objects, with extra + fields such as `pred_masks` or `pred_keypoints`. + """ + + instances = super().forward_with_given_boxes(features, instances) + instances = self._forward_densepose(features, instances) + return instances diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/v1convx.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/v1convx.py new file mode 100644 index 0000000..df79f65 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/roi_heads/v1convx.py @@ -0,0 +1,64 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import CfgNode +from detectron2.layers import Conv2d + +from ..utils import initialize_module_params +from .registry import ROI_DENSEPOSE_HEAD_REGISTRY + + +@ROI_DENSEPOSE_HEAD_REGISTRY.register() +class DensePoseV1ConvXHead(nn.Module): + """ + Fully convolutional DensePose head. + """ + + def __init__(self, cfg: CfgNode, input_channels: int): + """ + Initialize DensePose fully convolutional head + + Args: + cfg (CfgNode): configuration options + input_channels (int): number of input channels + """ + super(DensePoseV1ConvXHead, self).__init__() + # fmt: off + hidden_dim = cfg.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_DIM + kernel_size = cfg.MODEL.ROI_DENSEPOSE_HEAD.CONV_HEAD_KERNEL + self.n_stacked_convs = cfg.MODEL.ROI_DENSEPOSE_HEAD.NUM_STACKED_CONVS + # fmt: on + pad_size = kernel_size // 2 + n_channels = input_channels + for i in range(self.n_stacked_convs): + layer = Conv2d(n_channels, hidden_dim, kernel_size, stride=1, padding=pad_size) + layer_name = self._get_layer_name(i) + self.add_module(layer_name, layer) + n_channels = hidden_dim + self.n_out_channels = n_channels + initialize_module_params(self) + + def forward(self, features: torch.Tensor): + """ + Apply DensePose fully convolutional head to the input features + + Args: + features (tensor): input features + Result: + A tensor of DensePose head outputs + """ + x = features + output = x + for i in range(self.n_stacked_convs): + layer_name = self._get_layer_name(i) + x = getattr(self, layer_name)(x) + x = F.relu(x) + output = x + return output + + def _get_layer_name(self, i: int): + layer_name = "body_conv_fcn{}".format(i + 1) + return layer_name diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/test_time_augmentation.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/test_time_augmentation.py new file mode 100644 index 0000000..bf36022 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/test_time_augmentation.py @@ -0,0 +1,207 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import copy +import numpy as np +import torch +from fvcore.transforms import HFlipTransform, TransformList +from torch.nn import functional as F + +from detectron2.data.transforms import RandomRotation, RotationTransform, apply_transform_gens +from detectron2.modeling.postprocessing import detector_postprocess +from detectron2.modeling.test_time_augmentation import DatasetMapperTTA, GeneralizedRCNNWithTTA + +from ..converters import HFlipConverter + + +class DensePoseDatasetMapperTTA(DatasetMapperTTA): + def __init__(self, cfg): + super().__init__(cfg=cfg) + self.angles = cfg.TEST.AUG.ROTATION_ANGLES + + def __call__(self, dataset_dict): + ret = super().__call__(dataset_dict=dataset_dict) + numpy_image = dataset_dict["image"].permute(1, 2, 0).numpy() + for angle in self.angles: + rotate = RandomRotation(angle=angle, expand=True) + new_numpy_image, tfms = apply_transform_gens([rotate], np.copy(numpy_image)) + torch_image = torch.from_numpy(np.ascontiguousarray(new_numpy_image.transpose(2, 0, 1))) + dic = copy.deepcopy(dataset_dict) + # In DatasetMapperTTA, there is a pre_tfm transform (resize or no-op) that is + # added at the beginning of each TransformList. That's '.transforms[0]'. + dic["transforms"] = TransformList( + [ret[-1]["transforms"].transforms[0]] + tfms.transforms + ) + dic["image"] = torch_image + ret.append(dic) + return ret + + +class DensePoseGeneralizedRCNNWithTTA(GeneralizedRCNNWithTTA): + def __init__(self, cfg, model, transform_data, tta_mapper=None, batch_size=1): + """ + Args: + cfg (CfgNode): + model (GeneralizedRCNN): a GeneralizedRCNN to apply TTA on. + transform_data (DensePoseTransformData): contains symmetry label + transforms used for horizontal flip + tta_mapper (callable): takes a dataset dict and returns a list of + augmented versions of the dataset dict. Defaults to + `DatasetMapperTTA(cfg)`. + batch_size (int): batch the augmented images into this batch size for inference. + """ + self._transform_data = transform_data.to(model.device) + super().__init__(cfg=cfg, model=model, tta_mapper=tta_mapper, batch_size=batch_size) + + # the implementation follows closely the one from detectron2/modeling + def _inference_one_image(self, input): + """ + Args: + input (dict): one dataset dict with "image" field being a CHW tensor + + Returns: + dict: one output dict + """ + orig_shape = (input["height"], input["width"]) + # For some reason, resize with uint8 slightly increases box AP but decreases densepose AP + input["image"] = input["image"].to(torch.uint8) + augmented_inputs, tfms = self._get_augmented_inputs(input) + # Detect boxes from all augmented versions + with self._turn_off_roi_heads(["mask_on", "keypoint_on", "densepose_on"]): + # temporarily disable roi heads + all_boxes, all_scores, all_classes = self._get_augmented_boxes(augmented_inputs, tfms) + merged_instances = self._merge_detections(all_boxes, all_scores, all_classes, orig_shape) + + if self.cfg.MODEL.MASK_ON or self.cfg.MODEL.DENSEPOSE_ON: + # Use the detected boxes to obtain new fields + augmented_instances = self._rescale_detected_boxes( + augmented_inputs, merged_instances, tfms + ) + # run forward on the detected boxes + outputs = self._batch_inference(augmented_inputs, augmented_instances) + # Delete now useless variables to avoid being out of memory + del augmented_inputs, augmented_instances + # average the predictions + if self.cfg.MODEL.MASK_ON: + merged_instances.pred_masks = self._reduce_pred_masks(outputs, tfms) + if self.cfg.MODEL.DENSEPOSE_ON: + merged_instances.pred_densepose = self._reduce_pred_densepose(outputs, tfms) + # postprocess + merged_instances = detector_postprocess(merged_instances, *orig_shape) + return {"instances": merged_instances} + else: + return {"instances": merged_instances} + + def _get_augmented_boxes(self, augmented_inputs, tfms): + # Heavily based on detectron2/modeling/test_time_augmentation.py + # Only difference is that RotationTransform is excluded from bbox computation + # 1: forward with all augmented images + outputs = self._batch_inference(augmented_inputs) + # 2: union the results + all_boxes = [] + all_scores = [] + all_classes = [] + for output, tfm in zip(outputs, tfms): + # Need to inverse the transforms on boxes, to obtain results on original image + if not any(isinstance(t, RotationTransform) for t in tfm.transforms): + # Some transforms can't compute bbox correctly + pred_boxes = output.pred_boxes.tensor + original_pred_boxes = tfm.inverse().apply_box(pred_boxes.cpu().numpy()) + all_boxes.append(torch.from_numpy(original_pred_boxes).to(pred_boxes.device)) + all_scores.extend(output.scores) + all_classes.extend(output.pred_classes) + all_boxes = torch.cat(all_boxes, dim=0) + return all_boxes, all_scores, all_classes + + def _reduce_pred_densepose(self, outputs, tfms): + # Should apply inverse transforms on densepose preds. + # We assume only rotation, resize & flip are used. pred_masks is a scale-invariant + # representation, so we handle the other ones specially + for idx, (output, tfm) in enumerate(zip(outputs, tfms)): + for t in tfm.transforms: + for attr in ["coarse_segm", "fine_segm", "u", "v"]: + setattr( + output.pred_densepose, + attr, + _inverse_rotation( + getattr(output.pred_densepose, attr), output.pred_boxes.tensor, t + ), + ) + if any(isinstance(t, HFlipTransform) for t in tfm.transforms): + output.pred_densepose = HFlipConverter.convert( + output.pred_densepose, self._transform_data + ) + self._incremental_avg_dp(outputs[0].pred_densepose, output.pred_densepose, idx) + return outputs[0].pred_densepose + + # incrementally computed average: u_(n + 1) = u_n + (x_(n+1) - u_n) / (n + 1). + def _incremental_avg_dp(self, avg, new_el, idx): + for attr in ["coarse_segm", "fine_segm", "u", "v"]: + setattr(avg, attr, (getattr(avg, attr) * idx + getattr(new_el, attr)) / (idx + 1)) + if idx: + # Deletion of the > 0 index intermediary values to prevent GPU OOM + setattr(new_el, attr, None) + return avg + + +def _inverse_rotation(densepose_attrs, boxes, transform): + # resample outputs to image size and rotate back the densepose preds + # on the rotated images to the space of the original image + if len(boxes) == 0 or not isinstance(transform, RotationTransform): + return densepose_attrs + boxes = boxes.int().cpu().numpy() + wh_boxes = boxes[:, 2:] - boxes[:, :2] # bboxes in the rotated space + inv_boxes = rotate_box_inverse(transform, boxes).astype(int) # bboxes in original image + wh_diff = (inv_boxes[:, 2:] - inv_boxes[:, :2] - wh_boxes) // 2 # diff between new/old bboxes + rotation_matrix = torch.tensor([transform.rm_image]).to(device=densepose_attrs.device).float() + rotation_matrix[:, :, -1] = 0 + # To apply grid_sample for rotation, we need to have enough space to fit the original and + # rotated bboxes. l_bds and r_bds are the left/right bounds that will be used to + # crop the difference once the rotation is done + l_bds = np.maximum(0, -wh_diff) + for i in range(len(densepose_attrs)): + if min(wh_boxes[i]) <= 0: + continue + densepose_attr = densepose_attrs[[i]].clone() + # 1. Interpolate densepose attribute to size of the rotated bbox + densepose_attr = F.interpolate(densepose_attr, wh_boxes[i].tolist()[::-1], mode="bilinear") + # 2. Pad the interpolated attribute so it has room for the original + rotated bbox + densepose_attr = F.pad(densepose_attr, tuple(np.repeat(np.maximum(0, wh_diff[i]), 2))) + # 3. Compute rotation grid and transform + grid = F.affine_grid(rotation_matrix, size=densepose_attr.shape) + densepose_attr = F.grid_sample(densepose_attr, grid) + # 4. Compute right bounds and crop the densepose_attr to the size of the original bbox + r_bds = densepose_attr.shape[2:][::-1] - l_bds[i] + densepose_attr = densepose_attr[:, :, l_bds[i][1] : r_bds[1], l_bds[i][0] : r_bds[0]] + if min(densepose_attr.shape) > 0: + # Interpolate back to the original size of the densepose attribute + densepose_attr = F.interpolate( + densepose_attr, densepose_attrs.shape[-2:], mode="bilinear" + ) + # Adding a very small probability to the background class to fill padded zones + densepose_attr[:, 0] += 1e-10 + densepose_attrs[i] = densepose_attr + return densepose_attrs + + +def rotate_box_inverse(rot_tfm, rotated_box): + """ + rotated_box is a N * 4 array of [x0, y0, x1, y1] boxes + When a bbox is rotated, it gets bigger, because we need to surround the tilted bbox + So when a bbox is rotated then inverse-rotated, it is much bigger than the original + This function aims to invert the rotation on the box, but also resize it to its original size + """ + # 1. Compute the inverse rotation of the rotated bboxes (bigger than it ) + invrot_box = rot_tfm.inverse().apply_box(rotated_box) + h, w = rotated_box[:, 3] - rotated_box[:, 1], rotated_box[:, 2] - rotated_box[:, 0] + ih, iw = invrot_box[:, 3] - invrot_box[:, 1], invrot_box[:, 2] - invrot_box[:, 0] + assert 2 * rot_tfm.abs_sin ** 2 != 1, "45 degrees angle can't be inverted" + # 2. Inverse the corresponding computation in the rotation transform + # to get the original height/width of the rotated boxes + orig_h = (h * rot_tfm.abs_cos - w * rot_tfm.abs_sin) / (1 - 2 * rot_tfm.abs_sin ** 2) + orig_w = (w * rot_tfm.abs_cos - h * rot_tfm.abs_sin) / (1 - 2 * rot_tfm.abs_sin ** 2) + # 3. Resize the inverse-rotated bboxes to their original size + invrot_box[:, 0] += (iw - orig_w) / 2 + invrot_box[:, 1] += (ih - orig_h) / 2 + invrot_box[:, 2] -= (iw - orig_w) / 2 + invrot_box[:, 3] -= (ih - orig_h) / 2 + + return invrot_box diff --git a/motion-gan-pipeline/preprocessing/third/densepose/modeling/utils.py b/motion-gan-pipeline/preprocessing/third/densepose/modeling/utils.py new file mode 100644 index 0000000..2e76eb9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/modeling/utils.py @@ -0,0 +1,11 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from torch import nn + + +def initialize_module_params(module: nn.Module) -> None: + for name, param in module.named_parameters(): + if "bias" in name: + nn.init.constant_(param, 0) + elif "weight" in name: + nn.init.kaiming_normal_(param, mode="fan_out", nonlinearity="relu") diff --git a/motion-gan-pipeline/preprocessing/third/densepose/structures/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/structures/__init__.py new file mode 100644 index 0000000..ed32c5e --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/structures/__init__.py @@ -0,0 +1,17 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .chart import DensePoseChartPredictorOutput +from .chart_confidence import decorate_predictor_output_class_with_confidences +from .cse_confidence import decorate_cse_predictor_output_class_with_confidences +from .chart_result import ( + DensePoseChartResult, + DensePoseChartResultWithConfidences, + quantize_densepose_chart_result, + compress_quantized_densepose_chart_result, + decompress_compressed_densepose_chart_result, +) +from .cse import DensePoseEmbeddingPredictorOutput +from .data_relative import DensePoseDataRelative +from .list import DensePoseList +from .mesh import Mesh, create_mesh +from .transform_data import DensePoseTransformData, normalized_coords_transform diff --git a/motion-gan-pipeline/preprocessing/third/densepose/structures/chart.py b/motion-gan-pipeline/preprocessing/third/densepose/structures/chart.py new file mode 100644 index 0000000..115cc08 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/structures/chart.py @@ -0,0 +1,70 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from dataclasses import dataclass +from typing import Union +import torch + + +@dataclass +class DensePoseChartPredictorOutput: + """ + Predictor output that contains segmentation and inner coordinates predictions for predefined + body parts: + * coarse segmentation, a tensor of shape [N, K, Hout, Wout] + * fine segmentation, a tensor of shape [N, C, Hout, Wout] + * U coordinates, a tensor of shape [N, C, Hout, Wout] + * V coordinates, a tensor of shape [N, C, Hout, Wout] + where + - N is the number of instances + - K is the number of coarse segmentation channels ( + 2 = foreground / background, + 15 = one of 14 body parts / background) + - C is the number of fine segmentation channels ( + 24 fine body parts / background) + - Hout and Wout are height and width of predictions + """ + + coarse_segm: torch.Tensor + fine_segm: torch.Tensor + u: torch.Tensor + v: torch.Tensor + + def __len__(self): + """ + Number of instances (N) in the output + """ + return self.coarse_segm.size(0) + + def __getitem__( + self, item: Union[int, slice, torch.BoolTensor] + ) -> "DensePoseChartPredictorOutput": + """ + Get outputs for the selected instance(s) + + Args: + item (int or slice or tensor): selected items + """ + if isinstance(item, int): + return DensePoseChartPredictorOutput( + coarse_segm=self.coarse_segm[item].unsqueeze(0), + fine_segm=self.fine_segm[item].unsqueeze(0), + u=self.u[item].unsqueeze(0), + v=self.v[item].unsqueeze(0), + ) + else: + return DensePoseChartPredictorOutput( + coarse_segm=self.coarse_segm[item], + fine_segm=self.fine_segm[item], + u=self.u[item], + v=self.v[item], + ) + + def to(self, device: torch.device): + """ + Transfers all tensors to the given device + """ + coarse_segm = self.coarse_segm.to(device) + fine_segm = self.fine_segm.to(device) + u = self.u.to(device) + v = self.v.to(device) + return DensePoseChartPredictorOutput(coarse_segm=coarse_segm, fine_segm=fine_segm, u=u, v=v) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/structures/chart_confidence.py b/motion-gan-pipeline/preprocessing/third/densepose/structures/chart_confidence.py new file mode 100644 index 0000000..01b54d7 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/structures/chart_confidence.py @@ -0,0 +1,98 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from dataclasses import make_dataclass +from functools import lru_cache +from typing import Any, Optional +import torch + + +@lru_cache(maxsize=None) +def decorate_predictor_output_class_with_confidences(BasePredictorOutput: type) -> type: + """ + Create a new output class from an existing one by adding new attributes + related to confidence estimation: + - sigma_1 (tensor) + - sigma_2 (tensor) + - kappa_u (tensor) + - kappa_v (tensor) + - fine_segm_confidence (tensor) + - coarse_segm_confidence (tensor) + + Details on confidence estimation parameters can be found in: + N. Neverova, D. Novotny, A. Vedaldi "Correlated Uncertainty for Learning + Dense Correspondences from Noisy Labels", p. 918--926, in Proc. NIPS 2019 + A. Sanakoyeu et al., Transferring Dense Pose to Proximal Animal Classes, CVPR 2020 + + The new class inherits the provided `BasePredictorOutput` class, + it's name is composed of the name of the provided class and + "WithConfidences" suffix. + + Args: + BasePredictorOutput (type): output type to which confidence data + is to be added, assumed to be a dataclass + Return: + New dataclass derived from the provided one that has attributes + for confidence estimation + """ + + PredictorOutput = make_dataclass( + BasePredictorOutput.__name__ + "WithConfidences", + fields=[ # pyre-ignore[6] + ("sigma_1", Optional[torch.Tensor], None), + ("sigma_2", Optional[torch.Tensor], None), + ("kappa_u", Optional[torch.Tensor], None), + ("kappa_v", Optional[torch.Tensor], None), + ("fine_segm_confidence", Optional[torch.Tensor], None), + ("coarse_segm_confidence", Optional[torch.Tensor], None), + ], + bases=(BasePredictorOutput,), + ) + + # add possibility to index PredictorOutput + + def slice_if_not_none(data, item): + if data is None: + return None + if isinstance(item, int): + return data[item].unsqueeze(0) + return data[item] + + def PredictorOutput_getitem(self, item): + PredictorOutput = type(self) + base_predictor_output_sliced = super(PredictorOutput, self).__getitem__(item) + return PredictorOutput( + **base_predictor_output_sliced.__dict__, + coarse_segm_confidence=slice_if_not_none(self.coarse_segm_confidence, item), + fine_segm_confidence=slice_if_not_none(self.fine_segm_confidence, item), + sigma_1=slice_if_not_none(self.sigma_1, item), + sigma_2=slice_if_not_none(self.sigma_2, item), + kappa_u=slice_if_not_none(self.kappa_u, item), + kappa_v=slice_if_not_none(self.kappa_v, item), + ) + + PredictorOutput.__getitem__ = PredictorOutput_getitem + + def PredictorOutput_to(self, device: torch.device): + """ + Transfers all tensors to the given device + """ + PredictorOutput = type(self) + base_predictor_output_to = super(PredictorOutput, self).to(device) # pyre-ignore[16] + + def to_device_if_tensor(var: Any): + if isinstance(var, torch.Tensor): + return var.to(device) + return var + + return PredictorOutput( + **base_predictor_output_to.__dict__, + sigma_1=to_device_if_tensor(self.sigma_1), + sigma_2=to_device_if_tensor(self.sigma_2), + kappa_u=to_device_if_tensor(self.kappa_u), + kappa_v=to_device_if_tensor(self.kappa_v), + fine_segm_confidence=to_device_if_tensor(self.fine_segm_confidence), + coarse_segm_confidence=to_device_if_tensor(self.coarse_segm_confidence), + ) + + PredictorOutput.to = PredictorOutput_to + return PredictorOutput diff --git a/motion-gan-pipeline/preprocessing/third/densepose/structures/chart_result.py b/motion-gan-pipeline/preprocessing/third/densepose/structures/chart_result.py new file mode 100644 index 0000000..003933d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/structures/chart_result.py @@ -0,0 +1,183 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from dataclasses import dataclass +from typing import Any, Optional, Tuple +import torch + + +@dataclass +class DensePoseChartResult: + """ + DensePose results for chart-based methods represented by labels and inner + coordinates (U, V) of individual charts. Each chart is a 2D manifold + that has an associated label and is parameterized by two coordinates U and V. + Both U and V take values in [0, 1]. + Thus the results are represented by two tensors: + - labels (tensor [H, W] of long): contains estimated label for each pixel of + the detection bounding box of size (H, W) + - uv (tensor [2, H, W] of float): contains estimated U and V coordinates + for each pixel of the detection bounding box of size (H, W) + """ + + labels: torch.Tensor + uv: torch.Tensor + + def to(self, device: torch.device): + """ + Transfers all tensors to the given device + """ + labels = self.labels.to(device) + uv = self.uv.to(device) + return DensePoseChartResult(labels=labels, uv=uv) + + +@dataclass +class DensePoseChartResultWithConfidences: + """ + We add confidence values to DensePoseChartResult + Thus the results are represented by two tensors: + - labels (tensor [H, W] of long): contains estimated label for each pixel of + the detection bounding box of size (H, W) + - uv (tensor [2, H, W] of float): contains estimated U and V coordinates + for each pixel of the detection bounding box of size (H, W) + Plus one [H, W] tensor of float for each confidence type + """ + + labels: torch.Tensor + uv: torch.Tensor + sigma_1: Optional[torch.Tensor] = None + sigma_2: Optional[torch.Tensor] = None + kappa_u: Optional[torch.Tensor] = None + kappa_v: Optional[torch.Tensor] = None + fine_segm_confidence: Optional[torch.Tensor] = None + coarse_segm_confidence: Optional[torch.Tensor] = None + + def to(self, device: torch.device): + """ + Transfers all tensors to the given device, except if their value is None + """ + + def to_device_if_tensor(var: Any): + if isinstance(var, torch.Tensor): + return var.to(device) + return var + + return DensePoseChartResultWithConfidences( + labels=self.labels.to(device), + uv=self.uv.to(device), + sigma_1=to_device_if_tensor(self.sigma_1), + sigma_2=to_device_if_tensor(self.sigma_2), + kappa_u=to_device_if_tensor(self.kappa_u), + kappa_v=to_device_if_tensor(self.kappa_v), + fine_segm_confidence=to_device_if_tensor(self.fine_segm_confidence), + coarse_segm_confidence=to_device_if_tensor(self.coarse_segm_confidence), + ) + + +@dataclass +class DensePoseChartResultQuantized: + """ + DensePose results for chart-based methods represented by labels and quantized + inner coordinates (U, V) of individual charts. Each chart is a 2D manifold + that has an associated label and is parameterized by two coordinates U and V. + Both U and V take values in [0, 1]. + Quantized coordinates Uq and Vq have uint8 values which are obtained as: + Uq = U * 255 (hence 0 <= Uq <= 255) + Vq = V * 255 (hence 0 <= Vq <= 255) + Thus the results are represented by one tensor: + - labels_uv_uint8 (tensor [3, H, W] of uint8): contains estimated label + and quantized coordinates Uq and Vq for each pixel of the detection + bounding box of size (H, W) + """ + + labels_uv_uint8: torch.Tensor + + def to(self, device: torch.device): + """ + Transfers all tensors to the given device + """ + labels_uv_uint8 = self.labels_uv_uint8.to(device) + return DensePoseChartResultQuantized(labels_uv_uint8=labels_uv_uint8) + + +@dataclass +class DensePoseChartResultCompressed: + """ + DensePose results for chart-based methods represented by a PNG-encoded string. + The tensor of quantized DensePose results of size [3, H, W] is considered + as an image with 3 color channels. PNG compression is applied and the result + is stored as a Base64-encoded string. The following attributes are defined: + - shape_chw (tuple of 3 int): contains shape of the result tensor + (number of channels, height, width) + - labels_uv_str (str): contains Base64-encoded results tensor of size + [3, H, W] compressed with PNG compression methods + """ + + shape_chw: Tuple[int, int, int] + labels_uv_str: str + + +def quantize_densepose_chart_result(result: DensePoseChartResult) -> DensePoseChartResultQuantized: + """ + Applies quantization to DensePose chart-based result. + + Args: + result (DensePoseChartResult): DensePose chart-based result + Return: + Quantized DensePose chart-based result (DensePoseChartResultQuantized) + """ + h, w = result.labels.shape + labels_uv_uint8 = torch.zeros([3, h, w], dtype=torch.uint8, device=result.labels.device) + labels_uv_uint8[0] = result.labels + labels_uv_uint8[1:] = (result.uv * 255).clamp(0, 255).byte() + return DensePoseChartResultQuantized(labels_uv_uint8=labels_uv_uint8) + + +def compress_quantized_densepose_chart_result( + result: DensePoseChartResultQuantized, +) -> DensePoseChartResultCompressed: + """ + Compresses quantized DensePose chart-based result + + Args: + result (DensePoseChartResultQuantized): quantized DensePose chart-based result + Return: + Compressed DensePose chart-based result (DensePoseChartResultCompressed) + """ + import base64 + import numpy as np + from io import BytesIO + from PIL import Image + + labels_uv_uint8_np_chw = result.labels_uv_uint8.cpu().numpy() + labels_uv_uint8_np_hwc = np.moveaxis(labels_uv_uint8_np_chw, 0, -1) + im = Image.fromarray(labels_uv_uint8_np_hwc) + fstream = BytesIO() + im.save(fstream, format="png", optimize=True) + labels_uv_str = base64.encodebytes(fstream.getvalue()).decode() + shape_chw = labels_uv_uint8_np_chw.shape + return DensePoseChartResultCompressed(labels_uv_str=labels_uv_str, shape_chw=shape_chw) + + +def decompress_compressed_densepose_chart_result( + result: DensePoseChartResultCompressed, +) -> DensePoseChartResultQuantized: + """ + Decompresses DensePose chart-based result encoded into a base64 string + + Args: + result (DensePoseChartResultCompressed): compressed DensePose chart result + Return: + Quantized DensePose chart-based result (DensePoseChartResultQuantized) + """ + import base64 + import numpy as np + from io import BytesIO + from PIL import Image + + fstream = BytesIO(base64.decodebytes(result.labels_uv_str.encode())) + im = Image.open(fstream) + labels_uv_uint8_np_chw = np.moveaxis(np.array(im, dtype=np.uint8), -1, 0) + return DensePoseChartResultQuantized( + labels_uv_uint8=torch.from_numpy(labels_uv_uint8_np_chw.reshape(result.shape_chw)) + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/structures/cse.py b/motion-gan-pipeline/preprocessing/third/densepose/structures/cse.py new file mode 100644 index 0000000..9cd65da --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/structures/cse.py @@ -0,0 +1,52 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +from dataclasses import dataclass +from typing import Union +import torch + + +@dataclass +class DensePoseEmbeddingPredictorOutput: + """ + Predictor output that contains embedding and coarse segmentation data: + * embedding: float tensor of size [N, D, H, W], contains estimated embeddings + * coarse_segm: float tensor of size [N, K, H, W] + Here D = MODEL.ROI_DENSEPOSE_HEAD.CSE.EMBED_SIZE + K = MODEL.ROI_DENSEPOSE_HEAD.NUM_COARSE_SEGM_CHANNELS + """ + + embedding: torch.Tensor + coarse_segm: torch.Tensor + + def __len__(self): + """ + Number of instances (N) in the output + """ + return self.coarse_segm.size(0) + + def __getitem__( + self, item: Union[int, slice, torch.BoolTensor] + ) -> "DensePoseEmbeddingPredictorOutput": + """ + Get outputs for the selected instance(s) + + Args: + item (int or slice or tensor): selected items + """ + if isinstance(item, int): + return DensePoseEmbeddingPredictorOutput( + coarse_segm=self.coarse_segm[item].unsqueeze(0), + embedding=self.embedding[item].unsqueeze(0), + ) + else: + return DensePoseEmbeddingPredictorOutput( + coarse_segm=self.coarse_segm[item], embedding=self.embedding[item] + ) + + def to(self, device: torch.device): + """ + Transfers all tensors to the given device + """ + coarse_segm = self.coarse_segm.to(device) + embedding = self.embedding.to(device) + return DensePoseEmbeddingPredictorOutput(coarse_segm=coarse_segm, embedding=embedding) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/structures/cse_confidence.py b/motion-gan-pipeline/preprocessing/third/densepose/structures/cse_confidence.py new file mode 100644 index 0000000..9a1319c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/structures/cse_confidence.py @@ -0,0 +1,78 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from dataclasses import make_dataclass +from functools import lru_cache +from typing import Any, Optional +import torch + + +@lru_cache(maxsize=None) +def decorate_cse_predictor_output_class_with_confidences(BasePredictorOutput: type) -> type: + """ + Create a new output class from an existing one by adding new attributes + related to confidence estimation: + - coarse_segm_confidence (tensor) + + Details on confidence estimation parameters can be found in: + N. Neverova, D. Novotny, A. Vedaldi "Correlated Uncertainty for Learning + Dense Correspondences from Noisy Labels", p. 918--926, in Proc. NIPS 2019 + A. Sanakoyeu et al., Transferring Dense Pose to Proximal Animal Classes, CVPR 2020 + + The new class inherits the provided `BasePredictorOutput` class, + it's name is composed of the name of the provided class and + "WithConfidences" suffix. + + Args: + BasePredictorOutput (type): output type to which confidence data + is to be added, assumed to be a dataclass + Return: + New dataclass derived from the provided one that has attributes + for confidence estimation + """ + + PredictorOutput = make_dataclass( + BasePredictorOutput.__name__ + "WithConfidences", + fields=[ # pyre-ignore[6] + ("coarse_segm_confidence", Optional[torch.Tensor], None), + ], + bases=(BasePredictorOutput,), + ) + + # add possibility to index PredictorOutput + + def slice_if_not_none(data, item): + if data is None: + return None + if isinstance(item, int): + return data[item].unsqueeze(0) + return data[item] + + def PredictorOutput_getitem(self, item): + PredictorOutput = type(self) + base_predictor_output_sliced = super(PredictorOutput, self).__getitem__(item) + return PredictorOutput( + **base_predictor_output_sliced.__dict__, + coarse_segm_confidence=slice_if_not_none(self.coarse_segm_confidence, item), + ) + + PredictorOutput.__getitem__ = PredictorOutput_getitem + + def PredictorOutput_to(self, device: torch.device): + """ + Transfers all tensors to the given device + """ + PredictorOutput = type(self) + base_predictor_output_to = super(PredictorOutput, self).to(device) # pyre-ignore[16] + + def to_device_if_tensor(var: Any): + if isinstance(var, torch.Tensor): + return var.to(device) + return var + + return PredictorOutput( + **base_predictor_output_to.__dict__, + coarse_segm_confidence=to_device_if_tensor(self.coarse_segm_confidence), + ) + + PredictorOutput.to = PredictorOutput_to + return PredictorOutput diff --git a/motion-gan-pipeline/preprocessing/third/densepose/structures/data_relative.py b/motion-gan-pipeline/preprocessing/third/densepose/structures/data_relative.py new file mode 100644 index 0000000..a148fa7 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/structures/data_relative.py @@ -0,0 +1,243 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import numpy as np +import torch +from torch.nn import functional as F + +from densepose.data.meshes.catalog import MeshCatalog +from densepose.structures.mesh import load_mesh_symmetry +from densepose.structures.transform_data import DensePoseTransformData + + +class DensePoseDataRelative(object): + """ + Dense pose relative annotations that can be applied to any bounding box: + x - normalized X coordinates [0, 255] of annotated points + y - normalized Y coordinates [0, 255] of annotated points + i - body part labels 0,...,24 for annotated points + u - body part U coordinates [0, 1] for annotated points + v - body part V coordinates [0, 1] for annotated points + segm - 256x256 segmentation mask with values 0,...,14 + To obtain absolute x and y data wrt some bounding box one needs to first + divide the data by 256, multiply by the respective bounding box size + and add bounding box offset: + x_img = x0 + x_norm * w / 256.0 + y_img = y0 + y_norm * h / 256.0 + Segmentation masks are typically sampled to get image-based masks. + """ + + # Key for normalized X coordinates in annotation dict + X_KEY = "dp_x" + # Key for normalized Y coordinates in annotation dict + Y_KEY = "dp_y" + # Key for U part coordinates in annotation dict (used in chart-based annotations) + U_KEY = "dp_U" + # Key for V part coordinates in annotation dict (used in chart-based annotations) + V_KEY = "dp_V" + # Key for I point labels in annotation dict (used in chart-based annotations) + I_KEY = "dp_I" + # Key for segmentation mask in annotation dict + S_KEY = "dp_masks" + # Key for vertex ids (used in continuous surface embeddings annotations) + VERTEX_IDS_KEY = "dp_vertex" + # Key for mesh id (used in continuous surface embeddings annotations) + MESH_NAME_KEY = "ref_model" + # Number of body parts in segmentation masks + N_BODY_PARTS = 14 + # Number of parts in point labels + N_PART_LABELS = 24 + MASK_SIZE = 256 + + def __init__(self, annotation, cleanup=False): + self.x = torch.as_tensor(annotation[DensePoseDataRelative.X_KEY]) + self.y = torch.as_tensor(annotation[DensePoseDataRelative.Y_KEY]) + if ( + DensePoseDataRelative.I_KEY in annotation + and DensePoseDataRelative.U_KEY in annotation + and DensePoseDataRelative.V_KEY in annotation + ): + self.i = torch.as_tensor(annotation[DensePoseDataRelative.I_KEY]) + self.u = torch.as_tensor(annotation[DensePoseDataRelative.U_KEY]) + self.v = torch.as_tensor(annotation[DensePoseDataRelative.V_KEY]) + if ( + DensePoseDataRelative.VERTEX_IDS_KEY in annotation + and DensePoseDataRelative.MESH_NAME_KEY in annotation + ): + self.vertex_ids = torch.as_tensor( + annotation[DensePoseDataRelative.VERTEX_IDS_KEY], dtype=torch.long + ) + self.mesh_id = MeshCatalog.get_mesh_id(annotation[DensePoseDataRelative.MESH_NAME_KEY]) + if DensePoseDataRelative.S_KEY in annotation: + self.segm = DensePoseDataRelative.extract_segmentation_mask(annotation) + self.device = torch.device("cpu") + if cleanup: + DensePoseDataRelative.cleanup_annotation(annotation) + + def to(self, device): + if self.device == device: + return self + new_data = DensePoseDataRelative.__new__(DensePoseDataRelative) + new_data.x = self.x.to(device) + new_data.y = self.y.to(device) + for attr in ["i", "u", "v", "vertex_ids", "segm"]: + if hasattr(self, attr): + setattr(new_data, attr, getattr(self, attr).to(device)) + if hasattr(self, "mesh_id"): + new_data.mesh_id = self.mesh_id + new_data.device = device + return new_data + + @staticmethod + def extract_segmentation_mask(annotation): + import pycocotools.mask as mask_utils + + # TODO: annotation instance is accepted if it contains either + # DensePose segmentation or instance segmentation. However, here we + # only rely on DensePose segmentation + poly_specs = annotation[DensePoseDataRelative.S_KEY] + if isinstance(poly_specs, torch.Tensor): + # data is already given as mask tensors, no need to decode + return poly_specs + segm = torch.zeros((DensePoseDataRelative.MASK_SIZE,) * 2, dtype=torch.float32) + if isinstance(poly_specs, dict): + if poly_specs: + mask = mask_utils.decode(poly_specs) + segm[mask > 0] = 1 + else: + for i in range(len(poly_specs)): + poly_i = poly_specs[i] + if poly_i: + mask_i = mask_utils.decode(poly_i) + segm[mask_i > 0] = i + 1 + return segm + + @staticmethod + def validate_annotation(annotation): + for key in [ + DensePoseDataRelative.X_KEY, + DensePoseDataRelative.Y_KEY, + ]: + if key not in annotation: + return False, "no {key} data in the annotation".format(key=key) + valid_for_iuv_setting = all( + key in annotation + for key in [ + DensePoseDataRelative.I_KEY, + DensePoseDataRelative.U_KEY, + DensePoseDataRelative.V_KEY, + ] + ) + valid_for_cse_setting = all( + key in annotation + for key in [ + DensePoseDataRelative.VERTEX_IDS_KEY, + DensePoseDataRelative.MESH_NAME_KEY, + ] + ) + if not valid_for_iuv_setting and not valid_for_cse_setting: + return ( + False, + "expected either {} (IUV setting) or {} (CSE setting) annotations".format( + ", ".join( + [ + DensePoseDataRelative.I_KEY, + DensePoseDataRelative.U_KEY, + DensePoseDataRelative.V_KEY, + ] + ), + ", ".join( + [ + DensePoseDataRelative.VERTEX_IDS_KEY, + DensePoseDataRelative.MESH_NAME_KEY, + ] + ), + ), + ) + return True, None + + @staticmethod + def cleanup_annotation(annotation): + for key in [ + DensePoseDataRelative.X_KEY, + DensePoseDataRelative.Y_KEY, + DensePoseDataRelative.I_KEY, + DensePoseDataRelative.U_KEY, + DensePoseDataRelative.V_KEY, + DensePoseDataRelative.S_KEY, + DensePoseDataRelative.VERTEX_IDS_KEY, + DensePoseDataRelative.MESH_NAME_KEY, + ]: + if key in annotation: + del annotation[key] + + def apply_transform(self, transforms, densepose_transform_data): + self._transform_pts(transforms, densepose_transform_data) + if hasattr(self, "segm"): + self._transform_segm(transforms, densepose_transform_data) + + def _transform_pts(self, transforms, dp_transform_data): + import detectron2.data.transforms as T + + # NOTE: This assumes that HorizFlipTransform is the only one that does flip + do_hflip = sum(isinstance(t, T.HFlipTransform) for t in transforms.transforms) % 2 == 1 + if do_hflip: + self.x = self.MASK_SIZE - self.x + if hasattr(self, "i"): + self._flip_iuv_semantics(dp_transform_data) + if hasattr(self, "vertex_ids"): + self._flip_vertices() + + for t in transforms.transforms: + if isinstance(t, T.RotationTransform): + xy_scale = np.array((t.w, t.h)) / DensePoseDataRelative.MASK_SIZE + xy = t.apply_coords(np.stack((self.x, self.y), axis=1) * xy_scale) + self.x, self.y = torch.tensor(xy / xy_scale, dtype=self.x.dtype).T + + def _flip_iuv_semantics(self, dp_transform_data: DensePoseTransformData) -> None: + i_old = self.i.clone() + uv_symmetries = dp_transform_data.uv_symmetries + pt_label_symmetries = dp_transform_data.point_label_symmetries + for i in range(self.N_PART_LABELS): + if i + 1 in i_old: + annot_indices_i = i_old == i + 1 + if pt_label_symmetries[i + 1] != i + 1: + self.i[annot_indices_i] = pt_label_symmetries[i + 1] + u_loc = (self.u[annot_indices_i] * 255).long() + v_loc = (self.v[annot_indices_i] * 255).long() + self.u[annot_indices_i] = uv_symmetries["U_transforms"][i][v_loc, u_loc].to( + device=self.u.device + ) + self.v[annot_indices_i] = uv_symmetries["V_transforms"][i][v_loc, u_loc].to( + device=self.v.device + ) + + def _flip_vertices(self): + mesh_info = MeshCatalog[MeshCatalog.get_mesh_name(self.mesh_id)] + mesh_symmetry = ( + load_mesh_symmetry(mesh_info.symmetry) if mesh_info.symmetry is not None else None + ) + self.vertex_ids = mesh_symmetry["vertex_transforms"][self.vertex_ids] + + def _transform_segm(self, transforms, dp_transform_data): + import detectron2.data.transforms as T + + # NOTE: This assumes that HorizFlipTransform is the only one that does flip + do_hflip = sum(isinstance(t, T.HFlipTransform) for t in transforms.transforms) % 2 == 1 + if do_hflip: + self.segm = torch.flip(self.segm, [1]) + self._flip_segm_semantics(dp_transform_data) + + for t in transforms.transforms: + if isinstance(t, T.RotationTransform): + self._transform_segm_rotation(t) + + def _flip_segm_semantics(self, dp_transform_data): + old_segm = self.segm.clone() + mask_label_symmetries = dp_transform_data.mask_label_symmetries + for i in range(self.N_BODY_PARTS): + if mask_label_symmetries[i + 1] != i + 1: + self.segm[old_segm == i + 1] = mask_label_symmetries[i + 1] + + def _transform_segm_rotation(self, rotation): + self.segm = F.interpolate(self.segm[None, None, :], (rotation.h, rotation.w)).numpy() + self.segm = torch.tensor(rotation.apply_segmentation(self.segm[0, 0]))[None, None, :] + self.segm = F.interpolate(self.segm, [DensePoseDataRelative.MASK_SIZE] * 2)[0, 0] diff --git a/motion-gan-pipeline/preprocessing/third/densepose/structures/list.py b/motion-gan-pipeline/preprocessing/third/densepose/structures/list.py new file mode 100644 index 0000000..3dc40b0 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/structures/list.py @@ -0,0 +1,70 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import torch + +from densepose.structures.data_relative import DensePoseDataRelative + + +class DensePoseList(object): + + _TORCH_DEVICE_CPU = torch.device("cpu") + + def __init__(self, densepose_datas, boxes_xyxy_abs, image_size_hw, device=_TORCH_DEVICE_CPU): + assert len(densepose_datas) == len( + boxes_xyxy_abs + ), "Attempt to initialize DensePoseList with {} DensePose datas " "and {} boxes".format( + len(densepose_datas), len(boxes_xyxy_abs) + ) + self.densepose_datas = [] + for densepose_data in densepose_datas: + assert isinstance(densepose_data, DensePoseDataRelative) or densepose_data is None, ( + "Attempt to initialize DensePoseList with DensePose datas " + "of type {}, expected DensePoseDataRelative".format(type(densepose_data)) + ) + densepose_data_ondevice = ( + densepose_data.to(device) if densepose_data is not None else None + ) + self.densepose_datas.append(densepose_data_ondevice) + self.boxes_xyxy_abs = boxes_xyxy_abs.to(device) + self.image_size_hw = image_size_hw + self.device = device + + def to(self, device): + if self.device == device: + return self + return DensePoseList(self.densepose_datas, self.boxes_xyxy_abs, self.image_size_hw, device) + + def __iter__(self): + return iter(self.densepose_datas) + + def __len__(self): + return len(self.densepose_datas) + + def __repr__(self): + s = self.__class__.__name__ + "(" + s += "num_instances={}, ".format(len(self.densepose_datas)) + s += "image_width={}, ".format(self.image_size_hw[1]) + s += "image_height={})".format(self.image_size_hw[0]) + return s + + def __getitem__(self, item): + if isinstance(item, int): + densepose_data_rel = self.densepose_datas[item] + return densepose_data_rel + elif isinstance(item, slice): + densepose_datas_rel = self.densepose_datas[item] + boxes_xyxy_abs = self.boxes_xyxy_abs[item] + return DensePoseList( + densepose_datas_rel, boxes_xyxy_abs, self.image_size_hw, self.device + ) + elif isinstance(item, torch.Tensor) and (item.dtype == torch.bool): + densepose_datas_rel = [self.densepose_datas[i] for i, x in enumerate(item) if x > 0] + boxes_xyxy_abs = self.boxes_xyxy_abs[item] + return DensePoseList( + densepose_datas_rel, boxes_xyxy_abs, self.image_size_hw, self.device + ) + else: + densepose_datas_rel = [self.densepose_datas[i] for i in item] + boxes_xyxy_abs = self.boxes_xyxy_abs[item] + return DensePoseList( + densepose_datas_rel, boxes_xyxy_abs, self.image_size_hw, self.device + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/structures/mesh.py b/motion-gan-pipeline/preprocessing/third/densepose/structures/mesh.py new file mode 100644 index 0000000..e3304e7 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/structures/mesh.py @@ -0,0 +1,170 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved + +import pickle +from functools import lru_cache +from typing import Dict, Optional, Tuple +import torch + +from detectron2.utils.file_io import PathManager + +from densepose.data.meshes.catalog import MeshCatalog, MeshInfo + + +def _maybe_copy_to_device( + attribute: Optional[torch.Tensor], device: torch.device +) -> Optional[torch.Tensor]: + if attribute is None: + return None + return attribute.to(device) + + +class Mesh: + def __init__( + self, + vertices: Optional[torch.Tensor] = None, + faces: Optional[torch.Tensor] = None, + geodists: Optional[torch.Tensor] = None, + symmetry: Optional[Dict[str, torch.Tensor]] = None, + texcoords: Optional[torch.Tensor] = None, + mesh_info: Optional[MeshInfo] = None, + device: Optional[torch.device] = None, + ): + """ + Args: + vertices (tensor [N, 3] of float32): vertex coordinates in 3D + faces (tensor [M, 3] of long): triangular face represented as 3 + vertex indices + geodists (tensor [N, N] of float32): geodesic distances from + vertex `i` to vertex `j` (optional, default: None) + symmetry (dict: str -> tensor): various mesh symmetry data: + - "vertex_transforms": vertex mapping under horizontal flip, + tensor of size [N] of type long; vertex `i` is mapped to + vertex `tensor[i]` (optional, default: None) + texcoords (tensor [N, 2] of float32): texture coordinates, i.e. global + and normalized mesh UVs (optional, default: None) + mesh_info (MeshInfo type): necessary to load the attributes on-the-go, + can be used instead of passing all the variables one by one + device (torch.device): device of the Mesh. If not provided, will use + the device of the vertices + """ + self._vertices = vertices + self._faces = faces + self._geodists = geodists + self._symmetry = symmetry + self._texcoords = texcoords + self.mesh_info = mesh_info + self.device = device + + assert self._vertices is not None or self.mesh_info is not None + + all_fields = [self._vertices, self._faces, self._geodists, self._texcoords] + + if self.device is None: + for field in all_fields: + if field is not None: + self.device = field.device + break + if self.device is None and symmetry is not None: + for key in symmetry: + self.device = symmetry[key].device + break + self.device = torch.device("cpu") if self.device is None else self.device + + assert all([var.device == self.device for var in all_fields if var is not None]) + if symmetry: + assert all(symmetry[key].device == self.device for key in symmetry) + if texcoords and vertices: + assert len(vertices) == len(texcoords) + + def to(self, device: torch.device): + device_symmetry = self._symmetry + if device_symmetry: + device_symmetry = {key: value.to(device) for key, value in device_symmetry.items()} + return Mesh( + _maybe_copy_to_device(self._vertices, device), + _maybe_copy_to_device(self._faces, device), + _maybe_copy_to_device(self._geodists, device), + device_symmetry, + _maybe_copy_to_device(self._texcoords, device), + self.mesh_info, + device, + ) + + @property + def vertices(self): + if self._vertices is None and self.mesh_info is not None: + self._vertices = load_mesh_data(self.mesh_info.data, "vertices", self.device) + return self._vertices + + @property + def faces(self): + if self._faces is None and self.mesh_info is not None: + self._faces = load_mesh_data(self.mesh_info.data, "faces", self.device) + return self._faces + + @property + def geodists(self): + if self._geodists is None and self.mesh_info is not None: + self._geodists = load_mesh_auxiliary_data(self.mesh_info.geodists, self.device) + return self._geodists + + @property + def symmetry(self): + if self._symmetry is None and self.mesh_info is not None: + self._symmetry = load_mesh_symmetry(self.mesh_info.symmetry, self.device) + return self._symmetry + + @property + def texcoords(self): + if self._texcoords is None and self.mesh_info is not None: + self._texcoords = load_mesh_auxiliary_data(self.mesh_info.texcoords, self.device) + return self._texcoords + + def get_geodists(self): + if self.geodists is None: + self.geodists = self._compute_geodists() + return self.geodists + + def _compute_geodists(self): + # TODO: compute using Laplace-Beltrami + geodists = None + return geodists + + +def load_mesh_data( + mesh_fpath: str, field: str, device: Optional[torch.device] = None +) -> Tuple[Optional[torch.Tensor], Optional[torch.Tensor]]: + with PathManager.open(mesh_fpath, "rb") as hFile: + return torch.as_tensor(pickle.load(hFile)[field], dtype=torch.float).to( # pyre-ignore[6] + device + ) + return None + + +def load_mesh_auxiliary_data( + fpath: str, device: Optional[torch.device] = None +) -> Optional[torch.Tensor]: + fpath_local = PathManager.get_local_path(fpath) + with PathManager.open(fpath_local, "rb") as hFile: + return torch.as_tensor(pickle.load(hFile), dtype=torch.float).to(device) # pyre-ignore[6] + return None + + +@lru_cache() +def load_mesh_symmetry( + symmetry_fpath: str, device: Optional[torch.device] = None +) -> Optional[Dict[str, torch.Tensor]]: + with PathManager.open(symmetry_fpath, "rb") as hFile: + symmetry_loaded = pickle.load(hFile) # pyre-ignore[6] + symmetry = { + "vertex_transforms": torch.as_tensor( + symmetry_loaded["vertex_transforms"], dtype=torch.long + ).to(device), + } + return symmetry + return None + + +@lru_cache() +def create_mesh(mesh_name: str, device: Optional[torch.device] = None) -> Mesh: + return Mesh(mesh_info=MeshCatalog[mesh_name], device=device) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/structures/transform_data.py b/motion-gan-pipeline/preprocessing/third/densepose/structures/transform_data.py new file mode 100644 index 0000000..7cac1bb --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/structures/transform_data.py @@ -0,0 +1,71 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from typing import BinaryIO, Dict, Union +import torch + + +def normalized_coords_transform(x0, y0, w, h): + """ + Coordinates transform that maps top left corner to (-1, -1) and bottom + right corner to (1, 1). Used for torch.grid_sample to initialize the + grid + """ + + def f(p): + return (2 * (p[0] - x0) / w - 1, 2 * (p[1] - y0) / h - 1) + + return f + + +class DensePoseTransformData(object): + + # Horizontal symmetry label transforms used for horizontal flip + MASK_LABEL_SYMMETRIES = [0, 1, 3, 2, 5, 4, 7, 6, 9, 8, 11, 10, 13, 12, 14] + # fmt: off + POINT_LABEL_SYMMETRIES = [ 0, 1, 2, 4, 3, 6, 5, 8, 7, 10, 9, 12, 11, 14, 13, 16, 15, 18, 17, 20, 19, 22, 21, 24, 23] # noqa + # fmt: on + + def __init__(self, uv_symmetries: Dict[str, torch.Tensor], device: torch.device): + self.mask_label_symmetries = DensePoseTransformData.MASK_LABEL_SYMMETRIES + self.point_label_symmetries = DensePoseTransformData.POINT_LABEL_SYMMETRIES + self.uv_symmetries = uv_symmetries + self.device = torch.device("cpu") + + def to(self, device: torch.device, copy: bool = False) -> "DensePoseTransformData": + """ + Convert transform data to the specified device + + Args: + device (torch.device): device to convert the data to + copy (bool): flag that specifies whether to copy or to reference the data + in case the device is the same + Return: + An instance of `DensePoseTransformData` with data stored on the specified device + """ + if self.device == device and not copy: + return self + uv_symmetry_map = {} + for key in self.uv_symmetries: + uv_symmetry_map[key] = self.uv_symmetries[key].to(device=device, copy=copy) + return DensePoseTransformData(uv_symmetry_map, device) + + @staticmethod + def load(io: Union[str, BinaryIO]): + """ + Args: + io: (str or binary file-like object): input file to load data from + Returns: + An instance of `DensePoseTransformData` with transforms loaded from the file + """ + import scipy.io + + uv_symmetry_map = scipy.io.loadmat(io) + uv_symmetry_map_torch = {} + for key in ["U_transforms", "V_transforms"]: + uv_symmetry_map_torch[key] = [] + map_src = uv_symmetry_map[key] + map_dst = uv_symmetry_map_torch[key] + for i in range(map_src.shape[1]): + map_dst.append(torch.from_numpy(map_src[0, i]).to(dtype=torch.float)) + uv_symmetry_map_torch[key] = torch.stack(map_dst, dim=0) + transform_data = DensePoseTransformData(uv_symmetry_map_torch, device=torch.device("cpu")) + return transform_data diff --git a/motion-gan-pipeline/preprocessing/third/densepose/utils/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/utils/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/preprocessing/third/densepose/utils/dbhelper.py b/motion-gan-pipeline/preprocessing/third/densepose/utils/dbhelper.py new file mode 100644 index 0000000..65b6157 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/utils/dbhelper.py @@ -0,0 +1,147 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from typing import Any, Dict, Optional, Tuple + + +class EntrySelector(object): + """ + Base class for entry selectors + """ + + @staticmethod + def from_string(spec: str) -> "EntrySelector": + if spec == "*": + return AllEntrySelector() + return FieldEntrySelector(spec) + + +class AllEntrySelector(EntrySelector): + """ + Selector that accepts all entries + """ + + SPECIFIER = "*" + + def __call__(self, entry): + return True + + +class FieldEntrySelector(EntrySelector): + """ + Selector that accepts only entries that match provided field + specifier(s). Only a limited set of specifiers is supported for now: + ::=[] + ::=[] + is a valid identifier + ::= "int" | "str" + ::= "=" + ::= "," + ::= ":" + ::= | + ::= + ::= "-" + is a string without spaces and special symbols + (e.g. , , , ) + """ + + _SPEC_DELIM = "," + _TYPE_DELIM = ":" + _RANGE_DELIM = "-" + _EQUAL = "=" + _ERROR_PREFIX = "Invalid field selector specifier" + + class _FieldEntryValuePredicate(object): + """ + Predicate that checks strict equality for the specified entry field + """ + + def __init__(self, name: str, typespec: Optional[str], value: str): + import builtins + + self.name = name + self.type = getattr(builtins, typespec) if typespec is not None else str + self.value = value + + def __call__(self, entry): + return entry[self.name] == self.type(self.value) + + class _FieldEntryRangePredicate(object): + """ + Predicate that checks whether an entry field falls into the specified range + """ + + def __init__(self, name: str, typespec: Optional[str], vmin: str, vmax: str): + import builtins + + self.name = name + self.type = getattr(builtins, typespec) if typespec is not None else str + self.vmin = vmin + self.vmax = vmax + + def __call__(self, entry): + return (entry[self.name] >= self.type(self.vmin)) and ( + entry[self.name] <= self.type(self.vmax) + ) + + def __init__(self, spec: str): + self._predicates = self._parse_specifier_into_predicates(spec) + + def __call__(self, entry: Dict[str, Any]): + for predicate in self._predicates: + if not predicate(entry): + return False + return True + + def _parse_specifier_into_predicates(self, spec: str): + predicates = [] + specs = spec.split(self._SPEC_DELIM) + for subspec in specs: + eq_idx = subspec.find(self._EQUAL) + if eq_idx > 0: + field_name_with_type = subspec[:eq_idx] + field_name, field_type = self._parse_field_name_type(field_name_with_type) + field_value_or_range = subspec[eq_idx + 1 :] + if self._is_range_spec(field_value_or_range): + vmin, vmax = self._get_range_spec(field_value_or_range) + predicate = FieldEntrySelector._FieldEntryRangePredicate( + field_name, field_type, vmin, vmax + ) + else: + predicate = FieldEntrySelector._FieldEntryValuePredicate( + field_name, field_type, field_value_or_range + ) + predicates.append(predicate) + elif eq_idx == 0: + self._parse_error(f'"{subspec}", field name is empty!') + else: + self._parse_error(f'"{subspec}", should have format ' "=!") + return predicates + + def _parse_field_name_type(self, field_name_with_type: str) -> Tuple[str, Optional[str]]: + type_delim_idx = field_name_with_type.find(self._TYPE_DELIM) + if type_delim_idx > 0: + field_name = field_name_with_type[:type_delim_idx] + field_type = field_name_with_type[type_delim_idx + 1 :] + elif type_delim_idx == 0: + self._parse_error(f'"{field_name_with_type}", field name is empty!') + else: + field_name = field_name_with_type + field_type = None + # pyre-fixme[61]: `field_name` may not be initialized here. + # pyre-fixme[61]: `field_type` may not be initialized here. + return field_name, field_type + + def _is_range_spec(self, field_value_or_range): + delim_idx = field_value_or_range.find(self._RANGE_DELIM) + return delim_idx > 0 + + def _get_range_spec(self, field_value_or_range): + if self._is_range_spec(field_value_or_range): + delim_idx = field_value_or_range.find(self._RANGE_DELIM) + vmin = field_value_or_range[:delim_idx] + vmax = field_value_or_range[delim_idx + 1 :] + return vmin, vmax + else: + self._parse_error('"field_value_or_range", range of values expected!') + + def _parse_error(self, msg): + raise ValueError(f"{self._ERROR_PREFIX}: {msg}") diff --git a/motion-gan-pipeline/preprocessing/third/densepose/utils/logger.py b/motion-gan-pipeline/preprocessing/third/densepose/utils/logger.py new file mode 100644 index 0000000..70cd3cb --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/utils/logger.py @@ -0,0 +1,13 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging + + +def verbosity_to_level(verbosity) -> int: + if verbosity is not None: + if verbosity == 0: + return logging.WARNING + elif verbosity == 1: + return logging.INFO + elif verbosity >= 2: + return logging.DEBUG + return logging.WARNING diff --git a/motion-gan-pipeline/preprocessing/third/densepose/utils/transform.py b/motion-gan-pipeline/preprocessing/third/densepose/utils/transform.py new file mode 100644 index 0000000..8dc4ae7 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/utils/transform.py @@ -0,0 +1,15 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from detectron2.data import MetadataCatalog +from detectron2.utils.file_io import PathManager + +from densepose import DensePoseTransformData + + +def load_for_dataset(dataset_name): + path = MetadataCatalog.get(dataset_name).densepose_transform_src + densepose_transform_data_fpath = PathManager.get_local_path(path) + return DensePoseTransformData.load(densepose_transform_data_fpath) + + +def load_from_cfg(cfg): + return load_for_dataset(cfg.DATASETS.TEST[0]) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/vis/__init__.py b/motion-gan-pipeline/preprocessing/third/densepose/vis/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/motion-gan-pipeline/preprocessing/third/densepose/vis/base.py b/motion-gan-pipeline/preprocessing/third/densepose/vis/base.py new file mode 100644 index 0000000..7b35397 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/vis/base.py @@ -0,0 +1,191 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +import numpy as np +import cv2 +import torch + +Image = np.ndarray +Boxes = torch.Tensor + + +class MatrixVisualizer(object): + """ + Base visualizer for matrix data + """ + + def __init__( + self, + inplace=True, + cmap=cv2.COLORMAP_PARULA, + val_scale=1.0, + alpha=0.7, + interp_method_matrix=cv2.INTER_LINEAR, + interp_method_mask=cv2.INTER_NEAREST, + ): + self.inplace = inplace + self.cmap = cmap + self.val_scale = val_scale + self.alpha = alpha + self.interp_method_matrix = interp_method_matrix + self.interp_method_mask = interp_method_mask + + def visualize(self, image_bgr, mask, matrix, bbox_xywh): + self._check_image(image_bgr) + self._check_mask_matrix(mask, matrix) + if self.inplace: + image_target_bgr = image_bgr + else: + image_target_bgr = image_bgr * 0 + x, y, w, h = [int(v) for v in bbox_xywh] + if w <= 0 or h <= 0: + return image_bgr + mask, matrix = self._resize(mask, matrix, w, h) + mask_bg = np.tile((mask == 0)[:, :, np.newaxis], [1, 1, 3]) + matrix_scaled = matrix.astype(np.float32) * self.val_scale + _EPSILON = 1e-6 + if np.any(matrix_scaled > 255 + _EPSILON): + logger = logging.getLogger(__name__) + logger.warning( + f"Matrix has values > {255 + _EPSILON} after " f"scaling, clipping to [0..255]" + ) + matrix_scaled_8u = matrix_scaled.clip(0, 255).astype(np.uint8) + matrix_vis = cv2.applyColorMap(matrix_scaled_8u, self.cmap) + matrix_vis[mask_bg] = image_target_bgr[y : y + h, x : x + w, :][mask_bg] + image_target_bgr[y : y + h, x : x + w, :] = ( + image_target_bgr[y : y + h, x : x + w, :] * (1.0 - self.alpha) + matrix_vis * self.alpha + ) + return image_target_bgr.astype(np.uint8) + + def _resize(self, mask, matrix, w, h): + if (w != mask.shape[1]) or (h != mask.shape[0]): + mask = cv2.resize(mask, (w, h), self.interp_method_mask) + if (w != matrix.shape[1]) or (h != matrix.shape[0]): + matrix = cv2.resize(matrix, (w, h), self.interp_method_matrix) + return mask, matrix + + def _check_image(self, image_rgb): + assert len(image_rgb.shape) == 3 + assert image_rgb.shape[2] == 3 + assert image_rgb.dtype == np.uint8 + + def _check_mask_matrix(self, mask, matrix): + assert len(matrix.shape) == 2 + assert len(mask.shape) == 2 + assert mask.dtype == np.uint8 + + +class RectangleVisualizer(object): + + _COLOR_GREEN = (18, 127, 15) + + def __init__(self, color=_COLOR_GREEN, thickness=1): + self.color = color + self.thickness = thickness + + def visualize(self, image_bgr, bbox_xywh, color=None, thickness=None): + x, y, w, h = bbox_xywh + color = color or self.color + thickness = thickness or self.thickness + cv2.rectangle(image_bgr, (int(x), int(y)), (int(x + w), int(y + h)), color, thickness) + return image_bgr + + +class PointsVisualizer(object): + + _COLOR_GREEN = (18, 127, 15) + + def __init__(self, color_bgr=_COLOR_GREEN, r=5): + self.color_bgr = color_bgr + self.r = r + + def visualize(self, image_bgr, pts_xy, colors_bgr=None, rs=None): + for j, pt_xy in enumerate(pts_xy): + x, y = pt_xy + color_bgr = colors_bgr[j] if colors_bgr is not None else self.color_bgr + r = rs[j] if rs is not None else self.r + cv2.circle(image_bgr, (x, y), r, color_bgr, -1) + return image_bgr + + +class TextVisualizer(object): + + _COLOR_GRAY = (218, 227, 218) + _COLOR_WHITE = (255, 255, 255) + + def __init__( + self, + font_face=cv2.FONT_HERSHEY_SIMPLEX, + font_color_bgr=_COLOR_GRAY, + font_scale=0.35, + font_line_type=cv2.LINE_AA, + font_line_thickness=1, + fill_color_bgr=_COLOR_WHITE, + fill_color_transparency=1.0, + frame_color_bgr=_COLOR_WHITE, + frame_color_transparency=1.0, + frame_thickness=1, + ): + self.font_face = font_face + self.font_color_bgr = font_color_bgr + self.font_scale = font_scale + self.font_line_type = font_line_type + self.font_line_thickness = font_line_thickness + self.fill_color_bgr = fill_color_bgr + self.fill_color_transparency = fill_color_transparency + self.frame_color_bgr = frame_color_bgr + self.frame_color_transparency = frame_color_transparency + self.frame_thickness = frame_thickness + + def visualize(self, image_bgr, txt, topleft_xy): + txt_w, txt_h = self.get_text_size_wh(txt) + topleft_xy = tuple(map(int, topleft_xy)) + x, y = topleft_xy + if self.frame_color_transparency < 1.0: + t = self.frame_thickness + image_bgr[y - t : y + txt_h + t, x - t : x + txt_w + t, :] = ( + image_bgr[y - t : y + txt_h + t, x - t : x + txt_w + t, :] + * self.frame_color_transparency + + np.array(self.frame_color_bgr) * (1.0 - self.frame_color_transparency) + ).astype(np.float) + if self.fill_color_transparency < 1.0: + image_bgr[y : y + txt_h, x : x + txt_w, :] = ( + image_bgr[y : y + txt_h, x : x + txt_w, :] * self.fill_color_transparency + + np.array(self.fill_color_bgr) * (1.0 - self.fill_color_transparency) + ).astype(np.float) + cv2.putText( + image_bgr, + txt, + topleft_xy, + self.font_face, + self.font_scale, + self.font_color_bgr, + self.font_line_thickness, + self.font_line_type, + ) + return image_bgr + + def get_text_size_wh(self, txt): + ((txt_w, txt_h), _) = cv2.getTextSize( + txt, self.font_face, self.font_scale, self.font_line_thickness + ) + return txt_w, txt_h + + +class CompoundVisualizer(object): + def __init__(self, visualizers): + self.visualizers = visualizers + + def visualize(self, image_bgr, data): + assert len(data) == len( + self.visualizers + ), "The number of datas {} should match the number of visualizers" " {}".format( + len(data), len(self.visualizers) + ) + image = image_bgr + for i, visualizer in enumerate(self.visualizers): + image = visualizer.visualize(image, data[i]) + return image + + def __str__(self): + visualizer_str = ", ".join([str(v) for v in self.visualizers]) + return "Compound Visualizer [{}]".format(visualizer_str) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/vis/bounding_box.py b/motion-gan-pipeline/preprocessing/third/densepose/vis/bounding_box.py new file mode 100644 index 0000000..4f83957 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/vis/bounding_box.py @@ -0,0 +1,37 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .base import RectangleVisualizer, TextVisualizer + + +class BoundingBoxVisualizer(object): + def __init__(self): + self.rectangle_visualizer = RectangleVisualizer() + + def visualize(self, image_bgr, boxes_xywh): + for bbox_xywh in boxes_xywh: + image_bgr = self.rectangle_visualizer.visualize(image_bgr, bbox_xywh) + return image_bgr + + +class ScoredBoundingBoxVisualizer(object): + def __init__(self, bbox_visualizer_params=None, score_visualizer_params=None, **kwargs): + if bbox_visualizer_params is None: + bbox_visualizer_params = {} + if score_visualizer_params is None: + score_visualizer_params = {} + self.visualizer_bbox = RectangleVisualizer(**bbox_visualizer_params) + self.visualizer_score = TextVisualizer(**score_visualizer_params) + + def visualize(self, image_bgr, scored_bboxes): + boxes_xywh, box_scores = scored_bboxes + assert len(boxes_xywh) == len( + box_scores + ), "Number of bounding boxes {} should be equal to the number of scores {}".format( + len(boxes_xywh), len(box_scores) + ) + for i, box_xywh in enumerate(boxes_xywh): + score_i = box_scores[i] + image_bgr = self.visualizer_bbox.visualize(image_bgr, box_xywh) + score_txt = "{0:6.4f}".format(score_i) + topleft_xy = box_xywh[0], box_xywh[1] + image_bgr = self.visualizer_score.visualize(image_bgr, score_txt, topleft_xy) + return image_bgr diff --git a/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_data_points.py b/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_data_points.py new file mode 100644 index 0000000..b6839a9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_data_points.py @@ -0,0 +1,106 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import numpy as np +from typing import Iterable, Optional, Tuple +import cv2 + +from densepose.structures import DensePoseDataRelative + +from .base import Boxes, Image, MatrixVisualizer, PointsVisualizer + + +class DensePoseDataCoarseSegmentationVisualizer(object): + """ + Visualizer for ground truth segmentation + """ + + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7, **kwargs): + self.mask_visualizer = MatrixVisualizer( + inplace=inplace, + cmap=cmap, + val_scale=255.0 / DensePoseDataRelative.N_BODY_PARTS, + alpha=alpha, + ) + + def visualize( + self, + image_bgr: Image, + bbox_densepose_datas: Optional[Tuple[Iterable[Boxes], Iterable[DensePoseDataRelative]]], + ) -> Image: + if bbox_densepose_datas is None: + return image_bgr + for bbox_xywh, densepose_data in zip(*bbox_densepose_datas): + matrix = densepose_data.segm.numpy() + mask = np.zeros(matrix.shape, dtype=np.uint8) + mask[matrix > 0] = 1 + image_bgr = self.mask_visualizer.visualize(image_bgr, mask, matrix, bbox_xywh.numpy()) + return image_bgr + + +class DensePoseDataPointsVisualizer(object): + def __init__(self, densepose_data_to_value_fn=None, cmap=cv2.COLORMAP_PARULA, **kwargs): + self.points_visualizer = PointsVisualizer() + self.densepose_data_to_value_fn = densepose_data_to_value_fn + self.cmap = cmap + + def visualize( + self, + image_bgr: Image, + bbox_densepose_datas: Optional[Tuple[Iterable[Boxes], Iterable[DensePoseDataRelative]]], + ) -> Image: + if bbox_densepose_datas is None: + return image_bgr + for bbox_xywh, densepose_data in zip(*bbox_densepose_datas): + x0, y0, w, h = bbox_xywh.numpy() + x = densepose_data.x.numpy() * w / 255.0 + x0 + y = densepose_data.y.numpy() * h / 255.0 + y0 + pts_xy = zip(x, y) + if self.densepose_data_to_value_fn is None: + image_bgr = self.points_visualizer.visualize(image_bgr, pts_xy) + else: + v = self.densepose_data_to_value_fn(densepose_data) + img_colors_bgr = cv2.applyColorMap(v, self.cmap) + colors_bgr = [ + [int(v) for v in img_color_bgr.ravel()] for img_color_bgr in img_colors_bgr + ] + image_bgr = self.points_visualizer.visualize(image_bgr, pts_xy, colors_bgr) + return image_bgr + + +def _densepose_data_u_for_cmap(densepose_data): + u = np.clip(densepose_data.u.numpy(), 0, 1) * 255.0 + return u.astype(np.uint8) + + +def _densepose_data_v_for_cmap(densepose_data): + v = np.clip(densepose_data.v.numpy(), 0, 1) * 255.0 + return v.astype(np.uint8) + + +def _densepose_data_i_for_cmap(densepose_data): + i = ( + np.clip(densepose_data.i.numpy(), 0.0, DensePoseDataRelative.N_PART_LABELS) + * 255.0 + / DensePoseDataRelative.N_PART_LABELS + ) + return i.astype(np.uint8) + + +class DensePoseDataPointsUVisualizer(DensePoseDataPointsVisualizer): + def __init__(self, **kwargs): + super(DensePoseDataPointsUVisualizer, self).__init__( + densepose_data_to_value_fn=_densepose_data_u_for_cmap, **kwargs + ) + + +class DensePoseDataPointsVVisualizer(DensePoseDataPointsVisualizer): + def __init__(self, **kwargs): + super(DensePoseDataPointsVVisualizer, self).__init__( + densepose_data_to_value_fn=_densepose_data_v_for_cmap, **kwargs + ) + + +class DensePoseDataPointsIVisualizer(DensePoseDataPointsVisualizer): + def __init__(self, **kwargs): + super(DensePoseDataPointsIVisualizer, self).__init__( + densepose_data_to_value_fn=_densepose_data_i_for_cmap, **kwargs + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_outputs_iuv.py b/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_outputs_iuv.py new file mode 100644 index 0000000..a32a418 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_outputs_iuv.py @@ -0,0 +1,101 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import numpy as np +from typing import Optional, Tuple +import cv2 + +from densepose.structures import DensePoseDataRelative + +from ..structures import DensePoseChartPredictorOutput +from .base import Boxes, Image, MatrixVisualizer + + +class DensePoseOutputsVisualizer(object): + def __init__( + self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7, to_visualize=None, **kwargs + ): + assert to_visualize in "IUV", "can only visualize IUV" + self.to_visualize = to_visualize + + if self.to_visualize == "I": + val_scale = 255.0 / DensePoseDataRelative.N_PART_LABELS + else: + val_scale = 1.0 + self.mask_visualizer = MatrixVisualizer( + inplace=inplace, cmap=cmap, val_scale=val_scale, alpha=alpha + ) + + def visualize( + self, + image_bgr: Image, + dp_output_with_bboxes: Tuple[Optional[DensePoseChartPredictorOutput], Optional[Boxes]], + ) -> Image: + densepose_output, bboxes_xywh = dp_output_with_bboxes + if densepose_output is None or bboxes_xywh is None: + return image_bgr + + assert isinstance( + densepose_output, DensePoseChartPredictorOutput + ), "DensePoseChartPredictorOutput expected, {} encountered".format(type(densepose_output)) + + S = densepose_output.coarse_segm + I = densepose_output.fine_segm # noqa + U = densepose_output.u + V = densepose_output.v + N = S.size(0) + assert N == I.size( + 0 + ), "densepose outputs S {} and I {}" " should have equal first dim size".format( + S.size(), I.size() + ) + assert N == U.size( + 0 + ), "densepose outputs S {} and U {}" " should have equal first dim size".format( + S.size(), U.size() + ) + assert N == V.size( + 0 + ), "densepose outputs S {} and V {}" " should have equal first dim size".format( + S.size(), V.size() + ) + assert N == len( + bboxes_xywh + ), "number of bounding boxes {}" " should be equal to first dim size of outputs {}".format( + len(bboxes_xywh), N + ) + for n in range(N): + Sn = S[n].argmax(dim=0) + In = I[n].argmax(dim=0) * (Sn > 0).long() + segmentation = In.cpu().numpy().astype(np.uint8) + mask = np.zeros(segmentation.shape, dtype=np.uint8) + mask[segmentation > 0] = 1 + bbox_xywh = bboxes_xywh[n] + + if self.to_visualize == "I": + vis = segmentation + elif self.to_visualize in "UV": + U_or_Vn = {"U": U, "V": V}[self.to_visualize][n].cpu().numpy().astype(np.float32) + vis = np.zeros(segmentation.shape, dtype=np.float32) + for partId in range(U_or_Vn.shape[0]): + vis[segmentation == partId] = ( + U_or_Vn[partId][segmentation == partId].clip(0, 1) * 255 + ) + + # pyre-fixme[61]: `vis` may not be initialized here. + image_bgr = self.mask_visualizer.visualize(image_bgr, mask, vis, bbox_xywh) + + return image_bgr + + +class DensePoseOutputsUVisualizer(DensePoseOutputsVisualizer): + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7, **kwargs): + super().__init__(inplace=inplace, cmap=cmap, alpha=alpha, to_visualize="U", **kwargs) + + +class DensePoseOutputsVVisualizer(DensePoseOutputsVisualizer): + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7, **kwargs): + super().__init__(inplace=inplace, cmap=cmap, alpha=alpha, to_visualize="V", **kwargs) + + +class DensePoseOutputsFineSegmentationVisualizer(DensePoseOutputsVisualizer): + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7, **kwargs): + super().__init__(inplace=inplace, cmap=cmap, alpha=alpha, to_visualize="I", **kwargs) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_outputs_vertex.py b/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_outputs_vertex.py new file mode 100644 index 0000000..a9c0c56 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_outputs_vertex.py @@ -0,0 +1,229 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +import json +import numpy as np +from functools import lru_cache +from typing import Dict, List, Optional, Tuple +import cv2 +import torch + +from detectron2.utils.file_io import PathManager + +from densepose.modeling import build_densepose_embedder +from densepose.modeling.cse.utils import get_closest_vertices_mask_from_ES + +from ..data.utils import get_class_to_mesh_name_mapping +from ..structures import DensePoseEmbeddingPredictorOutput +from ..structures.mesh import create_mesh +from .base import Boxes, Image, MatrixVisualizer +from .densepose_results_textures import get_texture_atlas + + +@lru_cache() +def get_xyz_vertex_embedding(mesh_name: str, device: torch.device): + if mesh_name == "smpl_27554": + embed_path = PathManager.get_local_path( + "https://dl.fbaipublicfiles.com/densepose/data/cse/mds_d=256.npy" + ) + embed_map, _ = np.load(embed_path, allow_pickle=True) + embed_map = torch.tensor(embed_map).float()[:, 0] + embed_map -= embed_map.min() + embed_map /= embed_map.max() + else: + mesh = create_mesh(mesh_name, device) + embed_map = mesh.vertices.sum(dim=1) + embed_map -= embed_map.min() + embed_map /= embed_map.max() + embed_map = embed_map ** 2 + return embed_map + + +class DensePoseOutputsVertexVisualizer(object): + def __init__( + self, + cfg, + inplace=True, + cmap=cv2.COLORMAP_JET, + alpha=0.7, + device="cuda", + default_class=0, + **kwargs, + ): + self.mask_visualizer = MatrixVisualizer( + inplace=inplace, cmap=cmap, val_scale=1.0, alpha=alpha + ) + self.class_to_mesh_name = get_class_to_mesh_name_mapping(cfg) + self.embedder = build_densepose_embedder(cfg) + self.device = torch.device(device) + self.default_class = default_class + + self.mesh_vertex_embeddings = { + mesh_name: self.embedder(mesh_name).to(self.device) + for mesh_name in self.class_to_mesh_name.values() + if self.embedder.has_embeddings(mesh_name) + } + + def visualize( + self, + image_bgr: Image, + outputs_boxes_xywh_classes: Tuple[ + Optional[DensePoseEmbeddingPredictorOutput], Optional[Boxes], Optional[List[int]] + ], + ) -> Image: + if outputs_boxes_xywh_classes[0] is None: + return image_bgr + + S, E, N, bboxes_xywh, pred_classes = self.extract_and_check_outputs_and_boxes( + outputs_boxes_xywh_classes + ) + + for n in range(N): + x, y, w, h = bboxes_xywh[n].int().tolist() + mesh_name = self.class_to_mesh_name[pred_classes[n]] + closest_vertices, mask = get_closest_vertices_mask_from_ES( + E[[n]], + S[[n]], + h, + w, + self.mesh_vertex_embeddings[mesh_name], + self.device, + ) + embed_map = get_xyz_vertex_embedding(mesh_name, self.device) + vis = (embed_map[closest_vertices].clip(0, 1) * 255.0).cpu().numpy() + mask_numpy = mask.cpu().numpy().astype(dtype=np.uint8) + image_bgr = self.mask_visualizer.visualize(image_bgr, mask_numpy, vis, [x, y, w, h]) + + return image_bgr + + def extract_and_check_outputs_and_boxes(self, outputs_boxes_xywh_classes): + + densepose_output, bboxes_xywh, pred_classes = outputs_boxes_xywh_classes + + if pred_classes is None: + pred_classes = [self.default_class] * len(bboxes_xywh) + + assert isinstance( + densepose_output, DensePoseEmbeddingPredictorOutput + ), "DensePoseEmbeddingPredictorOutput expected, {} encountered".format( + type(densepose_output) + ) + + S = densepose_output.coarse_segm + E = densepose_output.embedding + N = S.size(0) + assert N == E.size( + 0 + ), "CSE coarse_segm {} and embeddings {}" " should have equal first dim size".format( + S.size(), E.size() + ) + assert N == len( + bboxes_xywh + ), "number of bounding boxes {}" " should be equal to first dim size of outputs {}".format( + len(bboxes_xywh), N + ) + assert N == len(pred_classes), ( + "number of predicted classes {}" + " should be equal to first dim size of outputs {}".format(len(bboxes_xywh), N) + ) + + return S, E, N, bboxes_xywh, pred_classes + + +def get_texture_atlases(json_str: Optional[str]) -> Optional[Dict[str, Optional[np.ndarray]]]: + """ + json_str is a JSON string representing a mesh_name -> texture_atlas_path dictionary + """ + if json_str is None: + return None + + paths = json.loads(json_str) + return {mesh_name: get_texture_atlas(path) for mesh_name, path in paths.items()} + + +class DensePoseOutputsTextureVisualizer(DensePoseOutputsVertexVisualizer): + def __init__( + self, + cfg, + texture_atlases_dict, + device="cuda", + default_class=0, + **kwargs, + ): + self.embedder = build_densepose_embedder(cfg) + + self.texture_image_dict = {} + self.alpha_dict = {} + + for mesh_name in texture_atlases_dict.keys(): + if texture_atlases_dict[mesh_name].shape[-1] == 4: # Image with alpha channel + self.alpha_dict[mesh_name] = texture_atlases_dict[mesh_name][:, :, -1] / 255.0 + self.texture_image_dict[mesh_name] = texture_atlases_dict[mesh_name][:, :, :3] + else: + self.alpha_dict[mesh_name] = texture_atlases_dict[mesh_name].sum(axis=-1) > 0 + self.texture_image_dict[mesh_name] = texture_atlases_dict[mesh_name] + + self.device = torch.device(device) + self.class_to_mesh_name = get_class_to_mesh_name_mapping(cfg) + self.default_class = default_class + + self.mesh_vertex_embeddings = { + mesh_name: self.embedder(mesh_name).to(self.device) + for mesh_name in self.class_to_mesh_name.values() + } + + def visualize( + self, + image_bgr: Image, + outputs_boxes_xywh_classes: Tuple[ + Optional[DensePoseEmbeddingPredictorOutput], Optional[Boxes], Optional[List[int]] + ], + ) -> Image: + image_target_bgr = image_bgr.copy() + if outputs_boxes_xywh_classes[0] is None: + return image_target_bgr + + S, E, N, bboxes_xywh, pred_classes = self.extract_and_check_outputs_and_boxes( + outputs_boxes_xywh_classes + ) + + meshes = { + p: create_mesh(self.class_to_mesh_name[p], self.device) for p in np.unique(pred_classes) + } + + for n in range(N): + x, y, w, h = bboxes_xywh[n].int().cpu().numpy() + mesh_name = self.class_to_mesh_name[pred_classes[n]] + closest_vertices, mask = get_closest_vertices_mask_from_ES( + E[[n]], + S[[n]], + h, + w, + self.mesh_vertex_embeddings[mesh_name], + self.device, + ) + uv_array = meshes[pred_classes[n]].texcoords[closest_vertices].permute((2, 0, 1)) + uv_array = uv_array.cpu().numpy().clip(0, 1) + textured_image = self.generate_image_with_texture( + image_target_bgr[y : y + h, x : x + w], + uv_array, + mask.cpu().numpy(), + self.class_to_mesh_name[pred_classes[n]], + ) + if textured_image is None: + continue + image_target_bgr[y : y + h, x : x + w] = textured_image + + return image_target_bgr + + def generate_image_with_texture(self, bbox_image_bgr, uv_array, mask, mesh_name): + alpha = self.alpha_dict.get(mesh_name) + texture_image = self.texture_image_dict.get(mesh_name) + if alpha is None or texture_image is None: + return None + U, V = uv_array + x_index = (U * texture_image.shape[1]).astype(int) + y_index = (V * texture_image.shape[0]).astype(int) + local_texture = texture_image[y_index, x_index][mask] + local_alpha = np.expand_dims(alpha[y_index, x_index][mask], -1) + output_image = bbox_image_bgr.copy() + output_image[mask] = output_image[mask] * (1 - local_alpha) + local_texture * local_alpha + return output_image.astype(np.uint8) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_results.py b/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_results.py new file mode 100644 index 0000000..f8382f8 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_results.py @@ -0,0 +1,355 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +import numpy as np +from typing import List, Optional, Tuple +import cv2 +import torch + +from densepose.structures import DensePoseDataRelative + +from ..structures import DensePoseChartResult +from .base import Boxes, Image, MatrixVisualizer + + +class DensePoseResultsVisualizer(object): + def visualize( + self, + image_bgr: Image, + results_and_boxes_xywh: Tuple[Optional[List[DensePoseChartResult]], Optional[Boxes]], + ) -> Image: + densepose_result, boxes_xywh = results_and_boxes_xywh + if densepose_result is None or boxes_xywh is None: + return image_bgr + + boxes_xywh = boxes_xywh.cpu().numpy() + context = self.create_visualization_context(image_bgr) + for i, result in enumerate(densepose_result): + iuv_array = torch.cat( + (result.labels[None].type(torch.float32), result.uv * 255.0) + ).type(torch.uint8) + self.visualize_iuv_arr(context, iuv_array.cpu().numpy(), boxes_xywh[i]) + image_bgr = self.context_to_image_bgr(context) + return image_bgr + + def create_visualization_context(self, image_bgr: Image): + return image_bgr + + def visualize_iuv_arr(self, context, iuv_arr: np.ndarray, bbox_xywh) -> None: + pass + + def context_to_image_bgr(self, context): + return context + + def get_image_bgr_from_context(self, context): + return context + + +class DensePoseMaskedColormapResultsVisualizer(DensePoseResultsVisualizer): + def __init__( + self, + data_extractor, + segm_extractor, + inplace=True, + cmap=cv2.COLORMAP_PARULA, + alpha=0.7, + val_scale=1.0, + **kwargs, + ): + self.mask_visualizer = MatrixVisualizer( + inplace=inplace, cmap=cmap, val_scale=val_scale, alpha=alpha + ) + self.data_extractor = data_extractor + self.segm_extractor = segm_extractor + + def context_to_image_bgr(self, context): + return context + + def visualize_iuv_arr(self, context, iuv_arr: np.ndarray, bbox_xywh) -> None: + image_bgr = self.get_image_bgr_from_context(context) + matrix = self.data_extractor(iuv_arr) + segm = self.segm_extractor(iuv_arr) + mask = np.zeros(matrix.shape, dtype=np.uint8) + mask[segm > 0] = 1 + image_bgr = self.mask_visualizer.visualize(image_bgr, mask, matrix, bbox_xywh) + + +def _extract_i_from_iuvarr(iuv_arr): + return iuv_arr[0, :, :] + + +def _extract_u_from_iuvarr(iuv_arr): + return iuv_arr[1, :, :] + + +def _extract_v_from_iuvarr(iuv_arr): + return iuv_arr[2, :, :] + + +class DensePoseResultsMplContourVisualizer(DensePoseResultsVisualizer): + def __init__(self, levels=10, **kwargs): + self.levels = levels + self.plot_args = kwargs + + def create_visualization_context(self, image_bgr: Image): + import matplotlib.pyplot as plt + from matplotlib.backends.backend_agg import FigureCanvasAgg as FigureCanvas + + context = {} + context["image_bgr"] = image_bgr + dpi = 100 + height_inches = float(image_bgr.shape[0]) / dpi + width_inches = float(image_bgr.shape[1]) / dpi + fig = plt.figure(figsize=(width_inches, height_inches), dpi=dpi) + plt.axes([0, 0, 1, 1]) + plt.axis("off") + context["fig"] = fig + canvas = FigureCanvas(fig) + context["canvas"] = canvas + extent = (0, image_bgr.shape[1], image_bgr.shape[0], 0) + plt.imshow(image_bgr[:, :, ::-1], extent=extent) + return context + + def context_to_image_bgr(self, context): + fig = context["fig"] + w, h = map(int, fig.get_size_inches() * fig.get_dpi()) + canvas = context["canvas"] + canvas.draw() + image_1d = np.fromstring(canvas.tostring_rgb(), dtype="uint8") + image_rgb = image_1d.reshape(h, w, 3) + image_bgr = image_rgb[:, :, ::-1].copy() + return image_bgr + + def visualize_iuv_arr(self, context, iuv_arr: np.ndarray, bbox_xywh: Boxes) -> None: + import matplotlib.pyplot as plt + + u = _extract_u_from_iuvarr(iuv_arr).astype(float) / 255.0 + v = _extract_v_from_iuvarr(iuv_arr).astype(float) / 255.0 + extent = ( + bbox_xywh[0], + bbox_xywh[0] + bbox_xywh[2], + bbox_xywh[1], + bbox_xywh[1] + bbox_xywh[3], + ) + plt.contour(u, self.levels, extent=extent, **self.plot_args) + plt.contour(v, self.levels, extent=extent, **self.plot_args) + + +class DensePoseResultsCustomContourVisualizer(DensePoseResultsVisualizer): + """ + Contour visualization using marching squares + """ + + def __init__(self, levels=10, **kwargs): + # TODO: colormap is hardcoded + cmap = cv2.COLORMAP_PARULA + if isinstance(levels, int): + self.levels = np.linspace(0, 1, levels) + else: + self.levels = levels + if "linewidths" in kwargs: + self.linewidths = kwargs["linewidths"] + else: + self.linewidths = [1] * len(self.levels) + self.plot_args = kwargs + img_colors_bgr = cv2.applyColorMap((self.levels * 255).astype(np.uint8), cmap) + self.level_colors_bgr = [ + [int(v) for v in img_color_bgr.ravel()] for img_color_bgr in img_colors_bgr + ] + + def visualize_iuv_arr(self, context, iuv_arr: np.ndarray, bbox_xywh: Boxes) -> None: + image_bgr = self.get_image_bgr_from_context(context) + segm = _extract_i_from_iuvarr(iuv_arr) + u = _extract_u_from_iuvarr(iuv_arr).astype(float) / 255.0 + v = _extract_v_from_iuvarr(iuv_arr).astype(float) / 255.0 + self._contours(image_bgr, u, segm, bbox_xywh) + self._contours(image_bgr, v, segm, bbox_xywh) + + def _contours(self, image_bgr, arr, segm, bbox_xywh): + for part_idx in range(1, DensePoseDataRelative.N_PART_LABELS + 1): + mask = segm == part_idx + if not np.any(mask): + continue + arr_min = np.amin(arr[mask]) + arr_max = np.amax(arr[mask]) + I, J = np.nonzero(mask) + i0 = np.amin(I) + i1 = np.amax(I) + 1 + j0 = np.amin(J) + j1 = np.amax(J) + 1 + if (j1 == j0 + 1) or (i1 == i0 + 1): + continue + Nw = arr.shape[1] - 1 + Nh = arr.shape[0] - 1 + for level_idx, level in enumerate(self.levels): + if (level < arr_min) or (level > arr_max): + continue + vp = arr[i0:i1, j0:j1] >= level + bin_codes = vp[:-1, :-1] + vp[1:, :-1] * 2 + vp[1:, 1:] * 4 + vp[:-1, 1:] * 8 + mp = mask[i0:i1, j0:j1] + bin_mask_codes = mp[:-1, :-1] + mp[1:, :-1] * 2 + mp[1:, 1:] * 4 + mp[:-1, 1:] * 8 + it = np.nditer(bin_codes, flags=["multi_index"]) + color_bgr = self.level_colors_bgr[level_idx] + linewidth = self.linewidths[level_idx] + while not it.finished: + if (it[0] != 0) and (it[0] != 15): + i, j = it.multi_index + if bin_mask_codes[i, j] != 0: + self._draw_line( + image_bgr, + arr, + mask, + level, + color_bgr, + linewidth, + it[0], + it.multi_index, + bbox_xywh, + Nw, + Nh, + (i0, j0), + ) + it.iternext() + + def _draw_line( + self, + image_bgr, + arr, + mask, + v, + color_bgr, + linewidth, + bin_code, + multi_idx, + bbox_xywh, + Nw, + Nh, + offset, + ): + lines = self._bin_code_2_lines(arr, v, bin_code, multi_idx, Nw, Nh, offset) + x0, y0, w, h = bbox_xywh + x1 = x0 + w + y1 = y0 + h + for line in lines: + x0r, y0r = line[0] + x1r, y1r = line[1] + pt0 = (int(x0 + x0r * (x1 - x0)), int(y0 + y0r * (y1 - y0))) + pt1 = (int(x0 + x1r * (x1 - x0)), int(y0 + y1r * (y1 - y0))) + cv2.line(image_bgr, pt0, pt1, color_bgr, linewidth) + + def _bin_code_2_lines(self, arr, v, bin_code, multi_idx, Nw, Nh, offset): + i0, j0 = offset + i, j = multi_idx + i += i0 + j += j0 + v0, v1, v2, v3 = arr[i, j], arr[i + 1, j], arr[i + 1, j + 1], arr[i, j + 1] + x0i = float(j) / Nw + y0j = float(i) / Nh + He = 1.0 / Nh + We = 1.0 / Nw + if (bin_code == 1) or (bin_code == 14): + a = (v - v0) / (v1 - v0) + b = (v - v0) / (v3 - v0) + pt1 = (x0i, y0j + a * He) + pt2 = (x0i + b * We, y0j) + return [(pt1, pt2)] + elif (bin_code == 2) or (bin_code == 13): + a = (v - v0) / (v1 - v0) + b = (v - v1) / (v2 - v1) + pt1 = (x0i, y0j + a * He) + pt2 = (x0i + b * We, y0j + He) + return [(pt1, pt2)] + elif (bin_code == 3) or (bin_code == 12): + a = (v - v0) / (v3 - v0) + b = (v - v1) / (v2 - v1) + pt1 = (x0i + a * We, y0j) + pt2 = (x0i + b * We, y0j + He) + return [(pt1, pt2)] + elif (bin_code == 4) or (bin_code == 11): + a = (v - v1) / (v2 - v1) + b = (v - v3) / (v2 - v3) + pt1 = (x0i + a * We, y0j + He) + pt2 = (x0i + We, y0j + b * He) + return [(pt1, pt2)] + elif (bin_code == 6) or (bin_code == 9): + a = (v - v0) / (v1 - v0) + b = (v - v3) / (v2 - v3) + pt1 = (x0i, y0j + a * He) + pt2 = (x0i + We, y0j + b * He) + return [(pt1, pt2)] + elif (bin_code == 7) or (bin_code == 8): + a = (v - v0) / (v3 - v0) + b = (v - v3) / (v2 - v3) + pt1 = (x0i + a * We, y0j) + pt2 = (x0i + We, y0j + b * He) + return [(pt1, pt2)] + elif bin_code == 5: + a1 = (v - v0) / (v1 - v0) + b1 = (v - v1) / (v2 - v1) + pt11 = (x0i, y0j + a1 * He) + pt12 = (x0i + b1 * We, y0j + He) + a2 = (v - v0) / (v3 - v0) + b2 = (v - v3) / (v2 - v3) + pt21 = (x0i + a2 * We, y0j) + pt22 = (x0i + We, y0j + b2 * He) + return [(pt11, pt12), (pt21, pt22)] + elif bin_code == 10: + a1 = (v - v0) / (v3 - v0) + b1 = (v - v0) / (v1 - v0) + pt11 = (x0i + a1 * We, y0j) + pt12 = (x0i, y0j + b1 * He) + a2 = (v - v1) / (v2 - v1) + b2 = (v - v3) / (v2 - v3) + pt21 = (x0i + a2 * We, y0j + He) + pt22 = (x0i + We, y0j + b2 * He) + return [(pt11, pt12), (pt21, pt22)] + return [] + + +try: + import matplotlib + + # matplotlib.use("Agg") + DensePoseResultsContourVisualizer = DensePoseResultsMplContourVisualizer +except ModuleNotFoundError: + logger = logging.getLogger(__name__) + logger.warning("Could not import matplotlib, using custom contour visualizer") + DensePoseResultsContourVisualizer = DensePoseResultsCustomContourVisualizer + + +class DensePoseResultsFineSegmentationVisualizer(DensePoseMaskedColormapResultsVisualizer): + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7, **kwargs): + super(DensePoseResultsFineSegmentationVisualizer, self).__init__( + _extract_i_from_iuvarr, + _extract_i_from_iuvarr, + inplace, + cmap, + alpha, + val_scale=255.0 / DensePoseDataRelative.N_PART_LABELS, + **kwargs, + ) + + +class DensePoseResultsUVisualizer(DensePoseMaskedColormapResultsVisualizer): + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7, **kwargs): + super(DensePoseResultsUVisualizer, self).__init__( + _extract_u_from_iuvarr, + _extract_i_from_iuvarr, + inplace, + cmap, + alpha, + val_scale=1.0, + **kwargs, + ) + + +class DensePoseResultsVVisualizer(DensePoseMaskedColormapResultsVisualizer): + def __init__(self, inplace=True, cmap=cv2.COLORMAP_PARULA, alpha=0.7, **kwargs): + super(DensePoseResultsVVisualizer, self).__init__( + _extract_v_from_iuvarr, + _extract_i_from_iuvarr, + inplace, + cmap, + alpha, + val_scale=1.0, + **kwargs, + ) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_results_textures.py b/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_results_textures.py new file mode 100644 index 0000000..8b02f2b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/vis/densepose_results_textures.py @@ -0,0 +1,91 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import numpy as np +from typing import List, Optional, Tuple +import torch + +from detectron2.data.detection_utils import read_image + +from ..structures import DensePoseChartResult +from .base import Boxes, Image +from .densepose_results import DensePoseResultsVisualizer + + +def get_texture_atlas(path: Optional[str]) -> Optional[np.ndarray]: + if path is None: + return None + + # Reading images like that downsamples 16-bit images to 8-bit + # If 16-bit images are needed, we can replace that by cv2.imread with the + # cv2.IMREAD_UNCHANGED flag (with cv2 we also need it to keep alpha channels) + # The rest of the pipeline would need to be adapted to 16-bit images too + bgr_image = read_image(path) + rgb_image = np.copy(bgr_image) # Convert BGR -> RGB + rgb_image[:, :, :3] = rgb_image[:, :, 2::-1] # Works with alpha channel + return rgb_image + + +class DensePoseResultsVisualizerWithTexture(DensePoseResultsVisualizer): + """ + texture_atlas: An image, size 6N * 4N, with N * N squares for each of the 24 body parts. + It must follow the grid found at https://github.com/facebookresearch/DensePose/blob/master/DensePoseData/demo_data/texture_atlas_200.png # noqa + For each body part, U is proportional to the x coordinate, and (1 - V) to y + """ + + def __init__(self, texture_atlas, **kwargs): + self.texture_atlas = texture_atlas + self.body_part_size = texture_atlas.shape[0] // 6 + assert self.body_part_size == texture_atlas.shape[1] // 4 + + def visualize( + self, + image_bgr: Image, + results_and_boxes_xywh: Tuple[Optional[List[DensePoseChartResult]], Optional[Boxes]], + ) -> Image: + densepose_result, boxes_xywh = results_and_boxes_xywh + if densepose_result is None or boxes_xywh is None: + return image_bgr + + boxes_xywh = boxes_xywh.int().cpu().numpy() + texture_image, alpha = self.get_texture() + for i, result in enumerate(densepose_result): + iuv_array = torch.cat((result.labels[None], result.uv.clamp(0, 1))) + x, y, w, h = boxes_xywh[i] + bbox_image = image_bgr[y : y + h, x : x + w] + image_bgr[y : y + h, x : x + w] = self.generate_image_with_texture( + texture_image, alpha, bbox_image, iuv_array.cpu().numpy() + ) + return image_bgr + + def get_texture(self): + N = self.body_part_size + texture_image = np.zeros([24, N, N, self.texture_atlas.shape[-1]]) + for i in range(4): + for j in range(6): + texture_image[(6 * i + j), :, :, :] = self.texture_atlas[ + N * j : N * (j + 1), N * i : N * (i + 1), : + ] + + if texture_image.shape[-1] == 4: # Image with alpha channel + alpha = texture_image[:, :, :, -1] / 255.0 + texture_image = texture_image[:, :, :, :3] + else: + alpha = texture_image.sum(axis=-1) > 0 + + return texture_image, alpha + + def generate_image_with_texture(self, texture_image, alpha, bbox_image_bgr, iuv_array): + + I, U, V = iuv_array + generated_image_bgr = bbox_image_bgr.copy() + + for PartInd in range(1, 25): + x, y = np.where(I == PartInd) + x_index = (U[x, y] * (self.body_part_size - 1)).astype(int) + y_index = ((1 - V[x, y]) * (self.body_part_size - 1)).astype(int) + part_alpha = np.expand_dims(alpha[PartInd - 1, y_index, x_index], -1) + generated_image_bgr[I == PartInd] = ( + generated_image_bgr[I == PartInd] * (1 - part_alpha) + + texture_image[PartInd - 1, y_index, x_index] * part_alpha + ) + + return generated_image_bgr.astype(np.uint8) diff --git a/motion-gan-pipeline/preprocessing/third/densepose/vis/extractor.py b/motion-gan-pipeline/preprocessing/third/densepose/vis/extractor.py new file mode 100644 index 0000000..bfb2bdf --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/densepose/vis/extractor.py @@ -0,0 +1,199 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +from typing import List, Optional, Sequence, Tuple +import torch + +from detectron2.layers.nms import batched_nms +from detectron2.structures.instances import Instances + +from densepose.converters import ToChartResultConverterWithConfidences +from densepose.structures import ( + DensePoseChartResultWithConfidences, + DensePoseEmbeddingPredictorOutput, +) +from densepose.vis.bounding_box import BoundingBoxVisualizer, ScoredBoundingBoxVisualizer +from densepose.vis.densepose_outputs_vertex import DensePoseOutputsVertexVisualizer +from densepose.vis.densepose_results import DensePoseResultsVisualizer + +from .base import CompoundVisualizer + +Scores = Sequence[float] +DensePoseChartResultsWithConfidences = List[DensePoseChartResultWithConfidences] + + +def extract_scores_from_instances(instances: Instances, select=None): + if instances.has("scores"): + return instances.scores if select is None else instances.scores[select] + return None + + +def extract_boxes_xywh_from_instances(instances: Instances, select=None): + if instances.has("pred_boxes"): + boxes_xywh = instances.pred_boxes.tensor.clone() + boxes_xywh[:, 2] -= boxes_xywh[:, 0] + boxes_xywh[:, 3] -= boxes_xywh[:, 1] + return boxes_xywh if select is None else boxes_xywh[select] + return None + + +def create_extractor(visualizer: object): + """ + Create an extractor for the provided visualizer + """ + if isinstance(visualizer, CompoundVisualizer): + extractors = [create_extractor(v) for v in visualizer.visualizers] + return CompoundExtractor(extractors) + elif isinstance(visualizer, DensePoseResultsVisualizer): + return DensePoseResultExtractor() + elif isinstance(visualizer, ScoredBoundingBoxVisualizer): + return CompoundExtractor([extract_boxes_xywh_from_instances, extract_scores_from_instances]) + elif isinstance(visualizer, BoundingBoxVisualizer): + return extract_boxes_xywh_from_instances + elif isinstance(visualizer, DensePoseOutputsVertexVisualizer): + return DensePoseOutputsExtractor() + else: + logger = logging.getLogger(__name__) + logger.error(f"Could not create extractor for {visualizer}") + return None + + +class BoundingBoxExtractor(object): + """ + Extracts bounding boxes from instances + """ + + def __call__(self, instances: Instances): + boxes_xywh = extract_boxes_xywh_from_instances(instances) + return boxes_xywh + + +class ScoredBoundingBoxExtractor(object): + """ + Extracts bounding boxes from instances + """ + + def __call__(self, instances: Instances, select=None): + scores = extract_scores_from_instances(instances) + boxes_xywh = extract_boxes_xywh_from_instances(instances) + if (scores is None) or (boxes_xywh is None): + return (boxes_xywh, scores) + if select is not None: + scores = scores[select] + boxes_xywh = boxes_xywh[select] + return (boxes_xywh, scores) + + +class DensePoseResultExtractor(object): + """ + Extracts DensePose chart result with confidences from instances + """ + + def __call__( + self, instances: Instances, select=None + ) -> Tuple[Optional[DensePoseChartResultsWithConfidences], Optional[torch.Tensor]]: + if instances.has("pred_densepose") and instances.has("pred_boxes"): + dpout = instances.pred_densepose + boxes_xyxy = instances.pred_boxes + boxes_xywh = extract_boxes_xywh_from_instances(instances) + if select is not None: + dpout = dpout[select] + boxes_xyxy = boxes_xyxy[select] + converter = ToChartResultConverterWithConfidences() + results = [converter.convert(dpout[i], boxes_xyxy[[i]]) for i in range(len(dpout))] + return results, boxes_xywh + else: + return None, None + + +class DensePoseOutputsExtractor(object): + """ + Extracts DensePose result from instances + """ + + def __call__( + self, + instances: Instances, + select=None, + ) -> Tuple[ + Optional[DensePoseEmbeddingPredictorOutput], Optional[torch.Tensor], Optional[List[int]] + ]: + if not (instances.has("pred_densepose") and instances.has("pred_boxes")): + return None, None, None + + dpout = instances.pred_densepose + boxes_xyxy = instances.pred_boxes + boxes_xywh = extract_boxes_xywh_from_instances(instances) + + if instances.has("pred_classes"): + classes = instances.pred_classes.tolist() + else: + classes = None + + if select is not None: + dpout = dpout[select] + boxes_xyxy = boxes_xyxy[select] + if classes is not None: + classes = classes[select] + + return dpout, boxes_xywh, classes + + +class CompoundExtractor(object): + """ + Extracts data for CompoundVisualizer + """ + + def __init__(self, extractors): + self.extractors = extractors + + def __call__(self, instances: Instances, select=None): + datas = [] + for extractor in self.extractors: + data = extractor(instances, select) + datas.append(data) + return datas + + +class NmsFilteredExtractor(object): + """ + Extracts data in the format accepted by NmsFilteredVisualizer + """ + + def __init__(self, extractor, iou_threshold): + self.extractor = extractor + self.iou_threshold = iou_threshold + + def __call__(self, instances: Instances, select=None): + scores = extract_scores_from_instances(instances) + boxes_xywh = extract_boxes_xywh_from_instances(instances) + if boxes_xywh is None: + return None + select_local_idx = batched_nms( + boxes_xywh, + scores, + torch.zeros(len(scores), dtype=torch.int32), + iou_threshold=self.iou_threshold, + ).squeeze() + select_local = torch.zeros(len(boxes_xywh), dtype=torch.bool, device=boxes_xywh.device) + select_local[select_local_idx] = True + select = select_local if select is None else (select & select_local) + return self.extractor(instances, select=select) + + +class ScoreThresholdedExtractor(object): + """ + Extracts data in the format accepted by ScoreThresholdedVisualizer + """ + + def __init__(self, extractor, min_score): + self.extractor = extractor + self.min_score = min_score + + def __call__(self, instances: Instances, select=None): + scores = extract_scores_from_instances(instances) + if scores is None: + return None + select_local = scores > self.min_score + select = select_local if select is None else (select & select_local) + data = self.extractor(instances, select=select) + return data diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/__init__.py new file mode 100644 index 0000000..bdd994b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .utils.env import setup_environment + +setup_environment() + + +# This line will be programatically read/write by setup.py. +# Leave them at the bottom of this file and don't touch them. +__version__ = "0.6" diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/checkpoint/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/checkpoint/__init__.py new file mode 100644 index 0000000..99da046 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/checkpoint/__init__.py @@ -0,0 +1,10 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. +# File: + + +from . import catalog as _UNUSED # register the handler +from .detection_checkpoint import DetectionCheckpointer +from fvcore.common.checkpoint import Checkpointer, PeriodicCheckpointer + +__all__ = ["Checkpointer", "PeriodicCheckpointer", "DetectionCheckpointer"] diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/checkpoint/c2_model_loading.py b/motion-gan-pipeline/preprocessing/third/detectron2/checkpoint/c2_model_loading.py new file mode 100644 index 0000000..8c8d181 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/checkpoint/c2_model_loading.py @@ -0,0 +1,407 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import copy +import logging +import re +from typing import Dict, List +import torch +from tabulate import tabulate + + +def convert_basic_c2_names(original_keys): + """ + Apply some basic name conversion to names in C2 weights. + It only deals with typical backbone models. + + Args: + original_keys (list[str]): + Returns: + list[str]: The same number of strings matching those in original_keys. + """ + layer_keys = copy.deepcopy(original_keys) + layer_keys = [ + {"pred_b": "linear_b", "pred_w": "linear_w"}.get(k, k) for k in layer_keys + ] # some hard-coded mappings + + layer_keys = [k.replace("_", ".") for k in layer_keys] + layer_keys = [re.sub("\\.b$", ".bias", k) for k in layer_keys] + layer_keys = [re.sub("\\.w$", ".weight", k) for k in layer_keys] + # Uniform both bn and gn names to "norm" + layer_keys = [re.sub("bn\\.s$", "norm.weight", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.bias$", "norm.bias", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.rm", "norm.running_mean", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.running.mean$", "norm.running_mean", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.riv$", "norm.running_var", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.running.var$", "norm.running_var", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.gamma$", "norm.weight", k) for k in layer_keys] + layer_keys = [re.sub("bn\\.beta$", "norm.bias", k) for k in layer_keys] + layer_keys = [re.sub("gn\\.s$", "norm.weight", k) for k in layer_keys] + layer_keys = [re.sub("gn\\.bias$", "norm.bias", k) for k in layer_keys] + + # stem + layer_keys = [re.sub("^res\\.conv1\\.norm\\.", "conv1.norm.", k) for k in layer_keys] + # to avoid mis-matching with "conv1" in other components (e.g. detection head) + layer_keys = [re.sub("^conv1\\.", "stem.conv1.", k) for k in layer_keys] + + # layer1-4 is used by torchvision, however we follow the C2 naming strategy (res2-5) + # layer_keys = [re.sub("^res2.", "layer1.", k) for k in layer_keys] + # layer_keys = [re.sub("^res3.", "layer2.", k) for k in layer_keys] + # layer_keys = [re.sub("^res4.", "layer3.", k) for k in layer_keys] + # layer_keys = [re.sub("^res5.", "layer4.", k) for k in layer_keys] + + # blocks + layer_keys = [k.replace(".branch1.", ".shortcut.") for k in layer_keys] + layer_keys = [k.replace(".branch2a.", ".conv1.") for k in layer_keys] + layer_keys = [k.replace(".branch2b.", ".conv2.") for k in layer_keys] + layer_keys = [k.replace(".branch2c.", ".conv3.") for k in layer_keys] + + # DensePose substitutions + layer_keys = [re.sub("^body.conv.fcn", "body_conv_fcn", k) for k in layer_keys] + layer_keys = [k.replace("AnnIndex.lowres", "ann_index_lowres") for k in layer_keys] + layer_keys = [k.replace("Index.UV.lowres", "index_uv_lowres") for k in layer_keys] + layer_keys = [k.replace("U.lowres", "u_lowres") for k in layer_keys] + layer_keys = [k.replace("V.lowres", "v_lowres") for k in layer_keys] + return layer_keys + + +def convert_c2_detectron_names(weights): + """ + Map Caffe2 Detectron weight names to Detectron2 names. + + Args: + weights (dict): name -> tensor + + Returns: + dict: detectron2 names -> tensor + dict: detectron2 names -> C2 names + """ + logger = logging.getLogger(__name__) + logger.info("Renaming Caffe2 weights ......") + original_keys = sorted(weights.keys()) + layer_keys = copy.deepcopy(original_keys) + + layer_keys = convert_basic_c2_names(layer_keys) + + # -------------------------------------------------------------------------- + # RPN hidden representation conv + # -------------------------------------------------------------------------- + # FPN case + # In the C2 model, the RPN hidden layer conv is defined for FPN level 2 and then + # shared for all other levels, hence the appearance of "fpn2" + layer_keys = [ + k.replace("conv.rpn.fpn2", "proposal_generator.rpn_head.conv") for k in layer_keys + ] + # Non-FPN case + layer_keys = [k.replace("conv.rpn", "proposal_generator.rpn_head.conv") for k in layer_keys] + + # -------------------------------------------------------------------------- + # RPN box transformation conv + # -------------------------------------------------------------------------- + # FPN case (see note above about "fpn2") + layer_keys = [ + k.replace("rpn.bbox.pred.fpn2", "proposal_generator.rpn_head.anchor_deltas") + for k in layer_keys + ] + layer_keys = [ + k.replace("rpn.cls.logits.fpn2", "proposal_generator.rpn_head.objectness_logits") + for k in layer_keys + ] + # Non-FPN case + layer_keys = [ + k.replace("rpn.bbox.pred", "proposal_generator.rpn_head.anchor_deltas") for k in layer_keys + ] + layer_keys = [ + k.replace("rpn.cls.logits", "proposal_generator.rpn_head.objectness_logits") + for k in layer_keys + ] + + # -------------------------------------------------------------------------- + # Fast R-CNN box head + # -------------------------------------------------------------------------- + layer_keys = [re.sub("^bbox\\.pred", "bbox_pred", k) for k in layer_keys] + layer_keys = [re.sub("^cls\\.score", "cls_score", k) for k in layer_keys] + layer_keys = [re.sub("^fc6\\.", "box_head.fc1.", k) for k in layer_keys] + layer_keys = [re.sub("^fc7\\.", "box_head.fc2.", k) for k in layer_keys] + # 4conv1fc head tensor names: head_conv1_w, head_conv1_gn_s + layer_keys = [re.sub("^head\\.conv", "box_head.conv", k) for k in layer_keys] + + # -------------------------------------------------------------------------- + # FPN lateral and output convolutions + # -------------------------------------------------------------------------- + def fpn_map(name): + """ + Look for keys with the following patterns: + 1) Starts with "fpn.inner." + Example: "fpn.inner.res2.2.sum.lateral.weight" + Meaning: These are lateral pathway convolutions + 2) Starts with "fpn.res" + Example: "fpn.res2.2.sum.weight" + Meaning: These are FPN output convolutions + """ + splits = name.split(".") + norm = ".norm" if "norm" in splits else "" + if name.startswith("fpn.inner."): + # splits example: ['fpn', 'inner', 'res2', '2', 'sum', 'lateral', 'weight'] + stage = int(splits[2][len("res") :]) + return "fpn_lateral{}{}.{}".format(stage, norm, splits[-1]) + elif name.startswith("fpn.res"): + # splits example: ['fpn', 'res2', '2', 'sum', 'weight'] + stage = int(splits[1][len("res") :]) + return "fpn_output{}{}.{}".format(stage, norm, splits[-1]) + return name + + layer_keys = [fpn_map(k) for k in layer_keys] + + # -------------------------------------------------------------------------- + # Mask R-CNN mask head + # -------------------------------------------------------------------------- + # roi_heads.StandardROIHeads case + layer_keys = [k.replace(".[mask].fcn", "mask_head.mask_fcn") for k in layer_keys] + layer_keys = [re.sub("^\\.mask\\.fcn", "mask_head.mask_fcn", k) for k in layer_keys] + layer_keys = [k.replace("mask.fcn.logits", "mask_head.predictor") for k in layer_keys] + # roi_heads.Res5ROIHeads case + layer_keys = [k.replace("conv5.mask", "mask_head.deconv") for k in layer_keys] + + # -------------------------------------------------------------------------- + # Keypoint R-CNN head + # -------------------------------------------------------------------------- + # interestingly, the keypoint head convs have blob names that are simply "conv_fcnX" + layer_keys = [k.replace("conv.fcn", "roi_heads.keypoint_head.conv_fcn") for k in layer_keys] + layer_keys = [ + k.replace("kps.score.lowres", "roi_heads.keypoint_head.score_lowres") for k in layer_keys + ] + layer_keys = [k.replace("kps.score.", "roi_heads.keypoint_head.score.") for k in layer_keys] + + # -------------------------------------------------------------------------- + # Done with replacements + # -------------------------------------------------------------------------- + assert len(set(layer_keys)) == len(layer_keys) + assert len(original_keys) == len(layer_keys) + + new_weights = {} + new_keys_to_original_keys = {} + for orig, renamed in zip(original_keys, layer_keys): + new_keys_to_original_keys[renamed] = orig + if renamed.startswith("bbox_pred.") or renamed.startswith("mask_head.predictor."): + # remove the meaningless prediction weight for background class + new_start_idx = 4 if renamed.startswith("bbox_pred.") else 1 + new_weights[renamed] = weights[orig][new_start_idx:] + logger.info( + "Remove prediction weight for background class in {}. The shape changes from " + "{} to {}.".format( + renamed, tuple(weights[orig].shape), tuple(new_weights[renamed].shape) + ) + ) + elif renamed.startswith("cls_score."): + # move weights of bg class from original index 0 to last index + logger.info( + "Move classification weights for background class in {} from index 0 to " + "index {}.".format(renamed, weights[orig].shape[0] - 1) + ) + new_weights[renamed] = torch.cat([weights[orig][1:], weights[orig][:1]]) + else: + new_weights[renamed] = weights[orig] + + return new_weights, new_keys_to_original_keys + + +# Note the current matching is not symmetric. +# it assumes model_state_dict will have longer names. +def align_and_update_state_dicts(model_state_dict, ckpt_state_dict, c2_conversion=True): + """ + Match names between the two state-dict, and returns a new chkpt_state_dict with names + converted to match model_state_dict with heuristics. The returned dict can be later + loaded with fvcore checkpointer. + If `c2_conversion==True`, `ckpt_state_dict` is assumed to be a Caffe2 + model and will be renamed at first. + + Strategy: suppose that the models that we will create will have prefixes appended + to each of its keys, for example due to an extra level of nesting that the original + pre-trained weights from ImageNet won't contain. For example, model.state_dict() + might return backbone[0].body.res2.conv1.weight, while the pre-trained model contains + res2.conv1.weight. We thus want to match both parameters together. + For that, we look for each model weight, look among all loaded keys if there is one + that is a suffix of the current weight name, and use it if that's the case. + If multiple matches exist, take the one with longest size + of the corresponding name. For example, for the same model as before, the pretrained + weight file can contain both res2.conv1.weight, as well as conv1.weight. In this case, + we want to match backbone[0].body.conv1.weight to conv1.weight, and + backbone[0].body.res2.conv1.weight to res2.conv1.weight. + """ + model_keys = sorted(model_state_dict.keys()) + if c2_conversion: + ckpt_state_dict, original_keys = convert_c2_detectron_names(ckpt_state_dict) + # original_keys: the name in the original dict (before renaming) + else: + original_keys = {x: x for x in ckpt_state_dict.keys()} + ckpt_keys = sorted(ckpt_state_dict.keys()) + + def match(a, b): + # Matched ckpt_key should be a complete (starts with '.') suffix. + # For example, roi_heads.mesh_head.whatever_conv1 does not match conv1, + # but matches whatever_conv1 or mesh_head.whatever_conv1. + return a == b or a.endswith("." + b) + + # get a matrix of string matches, where each (i, j) entry correspond to the size of the + # ckpt_key string, if it matches + match_matrix = [len(j) if match(i, j) else 0 for i in model_keys for j in ckpt_keys] + match_matrix = torch.as_tensor(match_matrix).view(len(model_keys), len(ckpt_keys)) + # use the matched one with longest size in case of multiple matches + max_match_size, idxs = match_matrix.max(1) + # remove indices that correspond to no-match + idxs[max_match_size == 0] = -1 + + logger = logging.getLogger(__name__) + # matched_pairs (matched checkpoint key --> matched model key) + matched_keys = {} + result_state_dict = {} + for idx_model, idx_ckpt in enumerate(idxs.tolist()): + if idx_ckpt == -1: + continue + key_model = model_keys[idx_model] + key_ckpt = ckpt_keys[idx_ckpt] + value_ckpt = ckpt_state_dict[key_ckpt] + shape_in_model = model_state_dict[key_model].shape + + if shape_in_model != value_ckpt.shape: + logger.warning( + "Shape of {} in checkpoint is {}, while shape of {} in model is {}.".format( + key_ckpt, value_ckpt.shape, key_model, shape_in_model + ) + ) + logger.warning( + "{} will not be loaded. Please double check and see if this is desired.".format( + key_ckpt + ) + ) + continue + + assert key_model not in result_state_dict + result_state_dict[key_model] = value_ckpt + if key_ckpt in matched_keys: # already added to matched_keys + logger.error( + "Ambiguity found for {} in checkpoint!" + "It matches at least two keys in the model ({} and {}).".format( + key_ckpt, key_model, matched_keys[key_ckpt] + ) + ) + raise ValueError("Cannot match one checkpoint key to multiple keys in the model.") + + matched_keys[key_ckpt] = key_model + + # logging: + matched_model_keys = sorted(matched_keys.values()) + if len(matched_model_keys) == 0: + logger.warning("No weights in checkpoint matched with model.") + return ckpt_state_dict + common_prefix = _longest_common_prefix(matched_model_keys) + rev_matched_keys = {v: k for k, v in matched_keys.items()} + original_keys = {k: original_keys[rev_matched_keys[k]] for k in matched_model_keys} + + model_key_groups = _group_keys_by_module(matched_model_keys, original_keys) + table = [] + memo = set() + for key_model in matched_model_keys: + if key_model in memo: + continue + if key_model in model_key_groups: + group = model_key_groups[key_model] + memo |= set(group) + shapes = [tuple(model_state_dict[k].shape) for k in group] + table.append( + ( + _longest_common_prefix([k[len(common_prefix) :] for k in group]) + "*", + _group_str([original_keys[k] for k in group]), + " ".join([str(x).replace(" ", "") for x in shapes]), + ) + ) + else: + key_checkpoint = original_keys[key_model] + shape = str(tuple(model_state_dict[key_model].shape)) + table.append((key_model[len(common_prefix) :], key_checkpoint, shape)) + table_str = tabulate( + table, tablefmt="pipe", headers=["Names in Model", "Names in Checkpoint", "Shapes"] + ) + logger.info( + "Following weights matched with " + + (f"submodule {common_prefix[:-1]}" if common_prefix else "model") + + ":\n" + + table_str + ) + + unmatched_ckpt_keys = [k for k in ckpt_keys if k not in set(matched_keys.keys())] + for k in unmatched_ckpt_keys: + result_state_dict[k] = ckpt_state_dict[k] + return result_state_dict + + +def _group_keys_by_module(keys: List[str], original_names: Dict[str, str]): + """ + Params in the same submodule are grouped together. + + Args: + keys: names of all parameters + original_names: mapping from parameter name to their name in the checkpoint + + Returns: + dict[name -> all other names in the same group] + """ + + def _submodule_name(key): + pos = key.rfind(".") + if pos < 0: + return None + prefix = key[: pos + 1] + return prefix + + all_submodules = [_submodule_name(k) for k in keys] + all_submodules = [x for x in all_submodules if x] + all_submodules = sorted(all_submodules, key=len) + + ret = {} + for prefix in all_submodules: + group = [k for k in keys if k.startswith(prefix)] + if len(group) <= 1: + continue + original_name_lcp = _longest_common_prefix_str([original_names[k] for k in group]) + if len(original_name_lcp) == 0: + # don't group weights if original names don't share prefix + continue + + for k in group: + if k in ret: + continue + ret[k] = group + return ret + + +def _longest_common_prefix(names: List[str]) -> str: + """ + ["abc.zfg", "abc.zef"] -> "abc." + """ + names = [n.split(".") for n in names] + m1, m2 = min(names), max(names) + ret = [a for a, b in zip(m1, m2) if a == b] + ret = ".".join(ret) + "." if len(ret) else "" + return ret + + +def _longest_common_prefix_str(names: List[str]) -> str: + m1, m2 = min(names), max(names) + lcp = [a for a, b in zip(m1, m2) if a == b] + lcp = "".join(lcp) + return lcp + + +def _group_str(names: List[str]) -> str: + """ + Turn "common1", "common2", "common3" into "common{1,2,3}" + """ + lcp = _longest_common_prefix_str(names) + rest = [x[len(lcp) :] for x in names] + rest = "{" + ",".join(rest) + "}" + ret = lcp + rest + + # add some simplification for BN specifically + ret = ret.replace("bn_{beta,running_mean,running_var,gamma}", "bn_*") + ret = ret.replace("bn_beta,bn_running_mean,bn_running_var,bn_gamma", "bn_*") + return ret diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/checkpoint/catalog.py b/motion-gan-pipeline/preprocessing/third/detectron2/checkpoint/catalog.py new file mode 100644 index 0000000..9a85736 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/checkpoint/catalog.py @@ -0,0 +1,115 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging + +from detectron2.utils.file_io import PathHandler, PathManager + + +class ModelCatalog(object): + """ + Store mappings from names to third-party models. + """ + + S3_C2_DETECTRON_PREFIX = "https://dl.fbaipublicfiles.com/detectron" + + # MSRA models have STRIDE_IN_1X1=True. False otherwise. + # NOTE: all BN models here have fused BN into an affine layer. + # As a result, you should only load them to a model with "FrozenBN". + # Loading them to a model with regular BN or SyncBN is wrong. + # Even when loaded to FrozenBN, it is still different from affine by an epsilon, + # which should be negligible for training. + # NOTE: all models here uses PIXEL_STD=[1,1,1] + # NOTE: Most of the BN models here are no longer used. We use the + # re-converted pre-trained models under detectron2 model zoo instead. + C2_IMAGENET_MODELS = { + "MSRA/R-50": "ImageNetPretrained/MSRA/R-50.pkl", + "MSRA/R-101": "ImageNetPretrained/MSRA/R-101.pkl", + "FAIR/R-50-GN": "ImageNetPretrained/47261647/R-50-GN.pkl", + "FAIR/R-101-GN": "ImageNetPretrained/47592356/R-101-GN.pkl", + "FAIR/X-101-32x8d": "ImageNetPretrained/20171220/X-101-32x8d.pkl", + "FAIR/X-101-64x4d": "ImageNetPretrained/FBResNeXt/X-101-64x4d.pkl", + "FAIR/X-152-32x8d-IN5k": "ImageNetPretrained/25093814/X-152-32x8d-IN5k.pkl", + } + + C2_DETECTRON_PATH_FORMAT = ( + "{prefix}/{url}/output/train/{dataset}/{type}/model_final.pkl" # noqa B950 + ) + + C2_DATASET_COCO = "coco_2014_train%3Acoco_2014_valminusminival" + C2_DATASET_COCO_KEYPOINTS = "keypoints_coco_2014_train%3Akeypoints_coco_2014_valminusminival" + + # format: {model_name} -> part of the url + C2_DETECTRON_MODELS = { + "35857197/e2e_faster_rcnn_R-50-C4_1x": "35857197/12_2017_baselines/e2e_faster_rcnn_R-50-C4_1x.yaml.01_33_49.iAX0mXvW", # noqa B950 + "35857345/e2e_faster_rcnn_R-50-FPN_1x": "35857345/12_2017_baselines/e2e_faster_rcnn_R-50-FPN_1x.yaml.01_36_30.cUF7QR7I", # noqa B950 + "35857890/e2e_faster_rcnn_R-101-FPN_1x": "35857890/12_2017_baselines/e2e_faster_rcnn_R-101-FPN_1x.yaml.01_38_50.sNxI7sX7", # noqa B950 + "36761737/e2e_faster_rcnn_X-101-32x8d-FPN_1x": "36761737/12_2017_baselines/e2e_faster_rcnn_X-101-32x8d-FPN_1x.yaml.06_31_39.5MIHi1fZ", # noqa B950 + "35858791/e2e_mask_rcnn_R-50-C4_1x": "35858791/12_2017_baselines/e2e_mask_rcnn_R-50-C4_1x.yaml.01_45_57.ZgkA7hPB", # noqa B950 + "35858933/e2e_mask_rcnn_R-50-FPN_1x": "35858933/12_2017_baselines/e2e_mask_rcnn_R-50-FPN_1x.yaml.01_48_14.DzEQe4wC", # noqa B950 + "35861795/e2e_mask_rcnn_R-101-FPN_1x": "35861795/12_2017_baselines/e2e_mask_rcnn_R-101-FPN_1x.yaml.02_31_37.KqyEK4tT", # noqa B950 + "36761843/e2e_mask_rcnn_X-101-32x8d-FPN_1x": "36761843/12_2017_baselines/e2e_mask_rcnn_X-101-32x8d-FPN_1x.yaml.06_35_59.RZotkLKI", # noqa B950 + "48616381/e2e_mask_rcnn_R-50-FPN_2x_gn": "GN/48616381/04_2018_gn_baselines/e2e_mask_rcnn_R-50-FPN_2x_gn_0416.13_23_38.bTlTI97Q", # noqa B950 + "37697547/e2e_keypoint_rcnn_R-50-FPN_1x": "37697547/12_2017_baselines/e2e_keypoint_rcnn_R-50-FPN_1x.yaml.08_42_54.kdzV35ao", # noqa B950 + "35998355/rpn_R-50-C4_1x": "35998355/12_2017_baselines/rpn_R-50-C4_1x.yaml.08_00_43.njH5oD9L", # noqa B950 + "35998814/rpn_R-50-FPN_1x": "35998814/12_2017_baselines/rpn_R-50-FPN_1x.yaml.08_06_03.Axg0r179", # noqa B950 + "36225147/fast_R-50-FPN_1x": "36225147/12_2017_baselines/fast_rcnn_R-50-FPN_1x.yaml.08_39_09.L3obSdQ2", # noqa B950 + } + + @staticmethod + def get(name): + if name.startswith("Caffe2Detectron/COCO"): + return ModelCatalog._get_c2_detectron_baseline(name) + if name.startswith("ImageNetPretrained/"): + return ModelCatalog._get_c2_imagenet_pretrained(name) + raise RuntimeError("model not present in the catalog: {}".format(name)) + + @staticmethod + def _get_c2_imagenet_pretrained(name): + prefix = ModelCatalog.S3_C2_DETECTRON_PREFIX + name = name[len("ImageNetPretrained/") :] + name = ModelCatalog.C2_IMAGENET_MODELS[name] + url = "/".join([prefix, name]) + return url + + @staticmethod + def _get_c2_detectron_baseline(name): + name = name[len("Caffe2Detectron/COCO/") :] + url = ModelCatalog.C2_DETECTRON_MODELS[name] + if "keypoint_rcnn" in name: + dataset = ModelCatalog.C2_DATASET_COCO_KEYPOINTS + else: + dataset = ModelCatalog.C2_DATASET_COCO + + if "35998355/rpn_R-50-C4_1x" in name: + # this one model is somehow different from others .. + type = "rpn" + else: + type = "generalized_rcnn" + + # Detectron C2 models are stored in the structure defined in `C2_DETECTRON_PATH_FORMAT`. + url = ModelCatalog.C2_DETECTRON_PATH_FORMAT.format( + prefix=ModelCatalog.S3_C2_DETECTRON_PREFIX, url=url, type=type, dataset=dataset + ) + return url + + +class ModelCatalogHandler(PathHandler): + """ + Resolve URL like catalog://. + """ + + PREFIX = "catalog://" + + def _get_supported_prefixes(self): + return [self.PREFIX] + + def _get_local_path(self, path, **kwargs): + logger = logging.getLogger(__name__) + catalog_path = ModelCatalog.get(path[len(self.PREFIX) :]) + logger.info("Catalog entry {} points to {}".format(path, catalog_path)) + return PathManager.get_local_path(catalog_path, **kwargs) + + def _open(self, path, mode="r", **kwargs): + return PathManager.open(self._get_local_path(path), mode, **kwargs) + + +PathManager.register_handler(ModelCatalogHandler()) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/checkpoint/detection_checkpoint.py b/motion-gan-pipeline/preprocessing/third/detectron2/checkpoint/detection_checkpoint.py new file mode 100644 index 0000000..82fd3b2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/checkpoint/detection_checkpoint.py @@ -0,0 +1,120 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +import os +import pickle +import torch +from fvcore.common.checkpoint import Checkpointer +from torch.nn.parallel import DistributedDataParallel + +import detectron2.utils.comm as comm +from detectron2.utils.file_io import PathManager + +from .c2_model_loading import align_and_update_state_dicts + + +class DetectionCheckpointer(Checkpointer): + """ + Same as :class:`Checkpointer`, but is able to: + 1. handle models in detectron & detectron2 model zoo, and apply conversions for legacy models. + 2. correctly load checkpoints that are only available on the master worker + """ + + def __init__(self, model, save_dir="", *, save_to_disk=None, **checkpointables): + is_main_process = comm.is_main_process() + super().__init__( + model, + save_dir, + save_to_disk=is_main_process if save_to_disk is None else save_to_disk, + **checkpointables, + ) + self.path_manager = PathManager + + def load(self, path, *args, **kwargs): + need_sync = False + + if path and isinstance(self.model, DistributedDataParallel): + logger = logging.getLogger(__name__) + path = self.path_manager.get_local_path(path) + has_file = os.path.isfile(path) + all_has_file = comm.all_gather(has_file) + if not all_has_file[0]: + raise OSError(f"File {path} not found on main worker.") + if not all(all_has_file): + logger.warning( + f"Not all workers can read checkpoint {path}. " + "Training may fail to fully resume." + ) + # TODO: broadcast the checkpoint file contents from main + # worker, and load from it instead. + need_sync = True + if not has_file: + path = None # don't load if not readable + ret = super().load(path, *args, **kwargs) + + if need_sync: + logger.info("Broadcasting model states from main worker ...") + self.model._sync_params_and_buffers() + return ret + + def _load_file(self, filename): + if filename.endswith(".pkl"): + with PathManager.open(filename, "rb") as f: + data = pickle.load(f, encoding="latin1") + if "model" in data and "__author__" in data: + # file is in Detectron2 model zoo format + self.logger.info("Reading a file from '{}'".format(data["__author__"])) + return data + else: + # assume file is from Caffe2 / Detectron1 model zoo + if "blobs" in data: + # Detection models have "blobs", but ImageNet models don't + data = data["blobs"] + data = {k: v for k, v in data.items() if not k.endswith("_momentum")} + return {"model": data, "__author__": "Caffe2", "matching_heuristics": True} + elif filename.endswith(".pyth"): + # assume file is from pycls; no one else seems to use the ".pyth" extension + with PathManager.open(filename, "rb") as f: + data = torch.load(f) + assert ( + "model_state" in data + ), f"Cannot load .pyth file {filename}; pycls checkpoints must contain 'model_state'." + model_state = { + k: v + for k, v in data["model_state"].items() + if not k.endswith("num_batches_tracked") + } + return {"model": model_state, "__author__": "pycls", "matching_heuristics": True} + + loaded = super()._load_file(filename) # load native pth checkpoint + if "model" not in loaded: + loaded = {"model": loaded} + return loaded + + def _load_model(self, checkpoint): + if checkpoint.get("matching_heuristics", False): + self._convert_ndarray_to_tensor(checkpoint["model"]) + # convert weights by name-matching heuristics + checkpoint["model"] = align_and_update_state_dicts( + self.model.state_dict(), + checkpoint["model"], + c2_conversion=checkpoint.get("__author__", None) == "Caffe2", + ) + # for non-caffe2 models, use standard ways to load it + incompatible = super()._load_model(checkpoint) + + model_buffers = dict(self.model.named_buffers(recurse=False)) + for k in ["pixel_mean", "pixel_std"]: + # Ignore missing key message about pixel_mean/std. + # Though they may be missing in old checkpoints, they will be correctly + # initialized from config anyway. + if k in model_buffers: + try: + incompatible.missing_keys.remove(k) + except ValueError: + pass + for k in incompatible.unexpected_keys[:]: + # Ignore unexpected keys about cell anchors. They exist in old checkpoints + # but now they are non-persistent buffers and will not be in new checkpoints. + if "anchor_generator.cell_anchors" in k: + incompatible.unexpected_keys.remove(k) + return incompatible diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/config/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/config/__init__.py new file mode 100644 index 0000000..4e648e6 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/config/__init__.py @@ -0,0 +1,24 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .compat import downgrade_config, upgrade_config +from .config import CfgNode, get_cfg, global_cfg, set_global_cfg, configurable +from .instantiate import instantiate +from .lazy import LazyCall, LazyConfig + +__all__ = [ + "CfgNode", + "get_cfg", + "global_cfg", + "set_global_cfg", + "downgrade_config", + "upgrade_config", + "configurable", + "instantiate", + "LazyCall", + "LazyConfig", +] + + +from detectron2.utils.env import fixup_module_metadata + +fixup_module_metadata(__name__, globals(), __all__) +del fixup_module_metadata diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/config/compat.py b/motion-gan-pipeline/preprocessing/third/detectron2/config/compat.py new file mode 100644 index 0000000..11a08c4 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/config/compat.py @@ -0,0 +1,229 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +""" +Backward compatibility of configs. + +Instructions to bump version: ++ It's not needed to bump version if new keys are added. + It's only needed when backward-incompatible changes happen + (i.e., some existing keys disappear, or the meaning of a key changes) ++ To bump version, do the following: + 1. Increment _C.VERSION in defaults.py + 2. Add a converter in this file. + + Each ConverterVX has a function "upgrade" which in-place upgrades config from X-1 to X, + and a function "downgrade" which in-place downgrades config from X to X-1 + + In each function, VERSION is left unchanged. + + Each converter assumes that its input has the relevant keys + (i.e., the input is not a partial config). + 3. Run the tests (test_config.py) to make sure the upgrade & downgrade + functions are consistent. +""" + +import logging +from typing import List, Optional, Tuple + +from .config import CfgNode as CN +from .defaults import _C + +__all__ = ["upgrade_config", "downgrade_config"] + + +def upgrade_config(cfg: CN, to_version: Optional[int] = None) -> CN: + """ + Upgrade a config from its current version to a newer version. + + Args: + cfg (CfgNode): + to_version (int): defaults to the latest version. + """ + cfg = cfg.clone() + if to_version is None: + to_version = _C.VERSION + + assert cfg.VERSION <= to_version, "Cannot upgrade from v{} to v{}!".format( + cfg.VERSION, to_version + ) + for k in range(cfg.VERSION, to_version): + converter = globals()["ConverterV" + str(k + 1)] + converter.upgrade(cfg) + cfg.VERSION = k + 1 + return cfg + + +def downgrade_config(cfg: CN, to_version: int) -> CN: + """ + Downgrade a config from its current version to an older version. + + Args: + cfg (CfgNode): + to_version (int): + + Note: + A general downgrade of arbitrary configs is not always possible due to the + different functionalities in different versions. + The purpose of downgrade is only to recover the defaults in old versions, + allowing it to load an old partial yaml config. + Therefore, the implementation only needs to fill in the default values + in the old version when a general downgrade is not possible. + """ + cfg = cfg.clone() + assert cfg.VERSION >= to_version, "Cannot downgrade from v{} to v{}!".format( + cfg.VERSION, to_version + ) + for k in range(cfg.VERSION, to_version, -1): + converter = globals()["ConverterV" + str(k)] + converter.downgrade(cfg) + cfg.VERSION = k - 1 + return cfg + + +def guess_version(cfg: CN, filename: str) -> int: + """ + Guess the version of a partial config where the VERSION field is not specified. + Returns the version, or the latest if cannot make a guess. + + This makes it easier for users to migrate. + """ + logger = logging.getLogger(__name__) + + def _has(name: str) -> bool: + cur = cfg + for n in name.split("."): + if n not in cur: + return False + cur = cur[n] + return True + + # Most users' partial configs have "MODEL.WEIGHT", so guess on it + ret = None + if _has("MODEL.WEIGHT") or _has("TEST.AUG_ON"): + ret = 1 + + if ret is not None: + logger.warning("Config '{}' has no VERSION. Assuming it to be v{}.".format(filename, ret)) + else: + ret = _C.VERSION + logger.warning( + "Config '{}' has no VERSION. Assuming it to be compatible with latest v{}.".format( + filename, ret + ) + ) + return ret + + +def _rename(cfg: CN, old: str, new: str) -> None: + old_keys = old.split(".") + new_keys = new.split(".") + + def _set(key_seq: List[str], val: str) -> None: + cur = cfg + for k in key_seq[:-1]: + if k not in cur: + cur[k] = CN() + cur = cur[k] + cur[key_seq[-1]] = val + + def _get(key_seq: List[str]) -> CN: + cur = cfg + for k in key_seq: + cur = cur[k] + return cur + + def _del(key_seq: List[str]) -> None: + cur = cfg + for k in key_seq[:-1]: + cur = cur[k] + del cur[key_seq[-1]] + if len(cur) == 0 and len(key_seq) > 1: + _del(key_seq[:-1]) + + _set(new_keys, _get(old_keys)) + _del(old_keys) + + +class _RenameConverter: + """ + A converter that handles simple rename. + """ + + RENAME: List[Tuple[str, str]] = [] # list of tuples of (old name, new name) + + @classmethod + def upgrade(cls, cfg: CN) -> None: + for old, new in cls.RENAME: + _rename(cfg, old, new) + + @classmethod + def downgrade(cls, cfg: CN) -> None: + for old, new in cls.RENAME[::-1]: + _rename(cfg, new, old) + + +class ConverterV1(_RenameConverter): + RENAME = [("MODEL.RPN_HEAD.NAME", "MODEL.RPN.HEAD_NAME")] + + +class ConverterV2(_RenameConverter): + """ + A large bulk of rename, before public release. + """ + + RENAME = [ + ("MODEL.WEIGHT", "MODEL.WEIGHTS"), + ("MODEL.PANOPTIC_FPN.SEMANTIC_LOSS_SCALE", "MODEL.SEM_SEG_HEAD.LOSS_WEIGHT"), + ("MODEL.PANOPTIC_FPN.RPN_LOSS_SCALE", "MODEL.RPN.LOSS_WEIGHT"), + ("MODEL.PANOPTIC_FPN.INSTANCE_LOSS_SCALE", "MODEL.PANOPTIC_FPN.INSTANCE_LOSS_WEIGHT"), + ("MODEL.PANOPTIC_FPN.COMBINE_ON", "MODEL.PANOPTIC_FPN.COMBINE.ENABLED"), + ( + "MODEL.PANOPTIC_FPN.COMBINE_OVERLAP_THRESHOLD", + "MODEL.PANOPTIC_FPN.COMBINE.OVERLAP_THRESH", + ), + ( + "MODEL.PANOPTIC_FPN.COMBINE_STUFF_AREA_LIMIT", + "MODEL.PANOPTIC_FPN.COMBINE.STUFF_AREA_LIMIT", + ), + ( + "MODEL.PANOPTIC_FPN.COMBINE_INSTANCES_CONFIDENCE_THRESHOLD", + "MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH", + ), + ("MODEL.ROI_HEADS.SCORE_THRESH", "MODEL.ROI_HEADS.SCORE_THRESH_TEST"), + ("MODEL.ROI_HEADS.NMS", "MODEL.ROI_HEADS.NMS_THRESH_TEST"), + ("MODEL.RETINANET.INFERENCE_SCORE_THRESHOLD", "MODEL.RETINANET.SCORE_THRESH_TEST"), + ("MODEL.RETINANET.INFERENCE_TOPK_CANDIDATES", "MODEL.RETINANET.TOPK_CANDIDATES_TEST"), + ("MODEL.RETINANET.INFERENCE_NMS_THRESHOLD", "MODEL.RETINANET.NMS_THRESH_TEST"), + ("TEST.DETECTIONS_PER_IMG", "TEST.DETECTIONS_PER_IMAGE"), + ("TEST.AUG_ON", "TEST.AUG.ENABLED"), + ("TEST.AUG_MIN_SIZES", "TEST.AUG.MIN_SIZES"), + ("TEST.AUG_MAX_SIZE", "TEST.AUG.MAX_SIZE"), + ("TEST.AUG_FLIP", "TEST.AUG.FLIP"), + ] + + @classmethod + def upgrade(cls, cfg: CN) -> None: + super().upgrade(cfg) + + if cfg.MODEL.META_ARCHITECTURE == "RetinaNet": + _rename( + cfg, "MODEL.RETINANET.ANCHOR_ASPECT_RATIOS", "MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS" + ) + _rename(cfg, "MODEL.RETINANET.ANCHOR_SIZES", "MODEL.ANCHOR_GENERATOR.SIZES") + del cfg["MODEL"]["RPN"]["ANCHOR_SIZES"] + del cfg["MODEL"]["RPN"]["ANCHOR_ASPECT_RATIOS"] + else: + _rename(cfg, "MODEL.RPN.ANCHOR_ASPECT_RATIOS", "MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS") + _rename(cfg, "MODEL.RPN.ANCHOR_SIZES", "MODEL.ANCHOR_GENERATOR.SIZES") + del cfg["MODEL"]["RETINANET"]["ANCHOR_SIZES"] + del cfg["MODEL"]["RETINANET"]["ANCHOR_ASPECT_RATIOS"] + del cfg["MODEL"]["RETINANET"]["ANCHOR_STRIDES"] + + @classmethod + def downgrade(cls, cfg: CN) -> None: + super().downgrade(cfg) + + _rename(cfg, "MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS", "MODEL.RPN.ANCHOR_ASPECT_RATIOS") + _rename(cfg, "MODEL.ANCHOR_GENERATOR.SIZES", "MODEL.RPN.ANCHOR_SIZES") + cfg.MODEL.RETINANET.ANCHOR_ASPECT_RATIOS = cfg.MODEL.RPN.ANCHOR_ASPECT_RATIOS + cfg.MODEL.RETINANET.ANCHOR_SIZES = cfg.MODEL.RPN.ANCHOR_SIZES + cfg.MODEL.RETINANET.ANCHOR_STRIDES = [] # this is not used anywhere in any version diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/config/config.py b/motion-gan-pipeline/preprocessing/third/detectron2/config/config.py new file mode 100644 index 0000000..49a55b1 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/config/config.py @@ -0,0 +1,265 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +import functools +import inspect +import logging +from fvcore.common.config import CfgNode as _CfgNode + +from detectron2.utils.file_io import PathManager + + +class CfgNode(_CfgNode): + """ + The same as `fvcore.common.config.CfgNode`, but different in: + + 1. Use unsafe yaml loading by default. + Note that this may lead to arbitrary code execution: you must not + load a config file from untrusted sources before manually inspecting + the content of the file. + 2. Support config versioning. + When attempting to merge an old config, it will convert the old config automatically. + + .. automethod:: clone + .. automethod:: freeze + .. automethod:: defrost + .. automethod:: is_frozen + .. automethod:: load_yaml_with_base + .. automethod:: merge_from_list + .. automethod:: merge_from_other_cfg + """ + + @classmethod + def _open_cfg(cls, filename): + return PathManager.open(filename, "r") + + # Note that the default value of allow_unsafe is changed to True + def merge_from_file(self, cfg_filename: str, allow_unsafe: bool = True) -> None: + """ + Load content from the given config file and merge it into self. + + Args: + cfg_filename: config filename + allow_unsafe: allow unsafe yaml syntax + """ + assert PathManager.isfile(cfg_filename), f"Config file '{cfg_filename}' does not exist!" + loaded_cfg = self.load_yaml_with_base(cfg_filename, allow_unsafe=allow_unsafe) + loaded_cfg = type(self)(loaded_cfg) + + # defaults.py needs to import CfgNode + from .defaults import _C + + latest_ver = _C.VERSION + assert ( + latest_ver == self.VERSION + ), "CfgNode.merge_from_file is only allowed on a config object of latest version!" + + logger = logging.getLogger(__name__) + + loaded_ver = loaded_cfg.get("VERSION", None) + if loaded_ver is None: + from .compat import guess_version + + loaded_ver = guess_version(loaded_cfg, cfg_filename) + assert loaded_ver <= self.VERSION, "Cannot merge a v{} config into a v{} config.".format( + loaded_ver, self.VERSION + ) + + if loaded_ver == self.VERSION: + self.merge_from_other_cfg(loaded_cfg) + else: + # compat.py needs to import CfgNode + from .compat import upgrade_config, downgrade_config + + logger.warning( + "Loading an old v{} config file '{}' by automatically upgrading to v{}. " + "See docs/CHANGELOG.md for instructions to update your files.".format( + loaded_ver, cfg_filename, self.VERSION + ) + ) + # To convert, first obtain a full config at an old version + old_self = downgrade_config(self, to_version=loaded_ver) + old_self.merge_from_other_cfg(loaded_cfg) + new_config = upgrade_config(old_self) + self.clear() + self.update(new_config) + + def dump(self, *args, **kwargs): + """ + Returns: + str: a yaml string representation of the config + """ + # to make it show up in docs + return super().dump(*args, **kwargs) + + +global_cfg = CfgNode() + + +def get_cfg() -> CfgNode: + """ + Get a copy of the default config. + + Returns: + a detectron2 CfgNode instance. + """ + from .defaults import _C + + return _C.clone() + + +def set_global_cfg(cfg: CfgNode) -> None: + """ + Let the global config point to the given cfg. + + Assume that the given "cfg" has the key "KEY", after calling + `set_global_cfg(cfg)`, the key can be accessed by: + :: + from detectron2.config import global_cfg + print(global_cfg.KEY) + + By using a hacky global config, you can access these configs anywhere, + without having to pass the config object or the values deep into the code. + This is a hacky feature introduced for quick prototyping / research exploration. + """ + global global_cfg + global_cfg.clear() + global_cfg.update(cfg) + + +def configurable(init_func=None, *, from_config=None): + """ + Decorate a function or a class's __init__ method so that it can be called + with a :class:`CfgNode` object using a :func:`from_config` function that translates + :class:`CfgNode` to arguments. + + Examples: + :: + # Usage 1: Decorator on __init__: + class A: + @configurable + def __init__(self, a, b=2, c=3): + pass + + @classmethod + def from_config(cls, cfg): # 'cfg' must be the first argument + # Returns kwargs to be passed to __init__ + return {"a": cfg.A, "b": cfg.B} + + a1 = A(a=1, b=2) # regular construction + a2 = A(cfg) # construct with a cfg + a3 = A(cfg, b=3, c=4) # construct with extra overwrite + + # Usage 2: Decorator on any function. Needs an extra from_config argument: + @configurable(from_config=lambda cfg: {"a: cfg.A, "b": cfg.B}) + def a_func(a, b=2, c=3): + pass + + a1 = a_func(a=1, b=2) # regular call + a2 = a_func(cfg) # call with a cfg + a3 = a_func(cfg, b=3, c=4) # call with extra overwrite + + Args: + init_func (callable): a class's ``__init__`` method in usage 1. The + class must have a ``from_config`` classmethod which takes `cfg` as + the first argument. + from_config (callable): the from_config function in usage 2. It must take `cfg` + as its first argument. + """ + + if init_func is not None: + assert ( + inspect.isfunction(init_func) + and from_config is None + and init_func.__name__ == "__init__" + ), "Incorrect use of @configurable. Check API documentation for examples." + + @functools.wraps(init_func) + def wrapped(self, *args, **kwargs): + try: + from_config_func = type(self).from_config + except AttributeError as e: + raise AttributeError( + "Class with @configurable must have a 'from_config' classmethod." + ) from e + if not inspect.ismethod(from_config_func): + raise TypeError("Class with @configurable must have a 'from_config' classmethod.") + + if _called_with_cfg(*args, **kwargs): + explicit_args = _get_args_from_config(from_config_func, *args, **kwargs) + init_func(self, **explicit_args) + else: + init_func(self, *args, **kwargs) + + return wrapped + + else: + if from_config is None: + return configurable # @configurable() is made equivalent to @configurable + assert inspect.isfunction( + from_config + ), "from_config argument of configurable must be a function!" + + def wrapper(orig_func): + @functools.wraps(orig_func) + def wrapped(*args, **kwargs): + if _called_with_cfg(*args, **kwargs): + explicit_args = _get_args_from_config(from_config, *args, **kwargs) + return orig_func(**explicit_args) + else: + return orig_func(*args, **kwargs) + + wrapped.from_config = from_config + return wrapped + + return wrapper + + +def _get_args_from_config(from_config_func, *args, **kwargs): + """ + Use `from_config` to obtain explicit arguments. + + Returns: + dict: arguments to be used for cls.__init__ + """ + signature = inspect.signature(from_config_func) + if list(signature.parameters.keys())[0] != "cfg": + if inspect.isfunction(from_config_func): + name = from_config_func.__name__ + else: + name = f"{from_config_func.__self__}.from_config" + raise TypeError(f"{name} must take 'cfg' as the first argument!") + support_var_arg = any( + param.kind in [param.VAR_POSITIONAL, param.VAR_KEYWORD] + for param in signature.parameters.values() + ) + if support_var_arg: # forward all arguments to from_config, if from_config accepts them + ret = from_config_func(*args, **kwargs) + else: + # forward supported arguments to from_config + supported_arg_names = set(signature.parameters.keys()) + extra_kwargs = {} + for name in list(kwargs.keys()): + if name not in supported_arg_names: + extra_kwargs[name] = kwargs.pop(name) + ret = from_config_func(*args, **kwargs) + # forward the other arguments to __init__ + ret.update(extra_kwargs) + return ret + + +def _called_with_cfg(*args, **kwargs): + """ + Returns: + bool: whether the arguments contain CfgNode and should be considered + forwarded to from_config. + """ + from omegaconf import DictConfig + + if len(args) and isinstance(args[0], (_CfgNode, DictConfig)): + return True + if isinstance(kwargs.pop("cfg", None), (_CfgNode, DictConfig)): + return True + # `from_config`'s first argument is forced to be "cfg". + # So the above check covers all cases. + return False diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/config/defaults.py b/motion-gan-pipeline/preprocessing/third/detectron2/config/defaults.py new file mode 100644 index 0000000..2a3a22a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/config/defaults.py @@ -0,0 +1,637 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .config import CfgNode as CN + +# NOTE: given the new config system +# (https://detectron2.readthedocs.io/en/latest/tutorials/lazyconfigs.html), +# we will stop adding new functionalities to default CfgNode. + +# ----------------------------------------------------------------------------- +# Convention about Training / Test specific parameters +# ----------------------------------------------------------------------------- +# Whenever an argument can be either used for training or for testing, the +# corresponding name will be post-fixed by a _TRAIN for a training parameter, +# or _TEST for a test-specific parameter. +# For example, the number of images during training will be +# IMAGES_PER_BATCH_TRAIN, while the number of images for testing will be +# IMAGES_PER_BATCH_TEST + +# ----------------------------------------------------------------------------- +# Config definition +# ----------------------------------------------------------------------------- + +_C = CN() + +# The version number, to upgrade from old configs to new ones if any +# changes happen. It's recommended to keep a VERSION in your config file. +_C.VERSION = 2 + +_C.MODEL = CN() +_C.MODEL.LOAD_PROPOSALS = False +_C.MODEL.MASK_ON = False +_C.MODEL.KEYPOINT_ON = False +_C.MODEL.DEVICE = "cuda" +_C.MODEL.META_ARCHITECTURE = "GeneralizedRCNN" + +# Path (a file path, or URL like detectron2://.., https://..) to a checkpoint file +# to be loaded to the model. You can find available models in the model zoo. +_C.MODEL.WEIGHTS = "" + +# Values to be used for image normalization (BGR order, since INPUT.FORMAT defaults to BGR). +# To train on images of different number of channels, just set different mean & std. +# Default values are the mean pixel value from ImageNet: [103.53, 116.28, 123.675] +_C.MODEL.PIXEL_MEAN = [103.530, 116.280, 123.675] +# When using pre-trained models in Detectron1 or any MSRA models, +# std has been absorbed into its conv1 weights, so the std needs to be set 1. +# Otherwise, you can use [57.375, 57.120, 58.395] (ImageNet std) +_C.MODEL.PIXEL_STD = [1.0, 1.0, 1.0] + + +# ----------------------------------------------------------------------------- +# INPUT +# ----------------------------------------------------------------------------- +_C.INPUT = CN() +# By default, {MIN,MAX}_SIZE options are used in transforms.ResizeShortestEdge. +# Please refer to ResizeShortestEdge for detailed definition. +# Size of the smallest side of the image during training +_C.INPUT.MIN_SIZE_TRAIN = (800,) +# Sample size of smallest side by choice or random selection from range give by +# INPUT.MIN_SIZE_TRAIN +_C.INPUT.MIN_SIZE_TRAIN_SAMPLING = "choice" +# Maximum size of the side of the image during training +_C.INPUT.MAX_SIZE_TRAIN = 1333 +# Size of the smallest side of the image during testing. Set to zero to disable resize in testing. +_C.INPUT.MIN_SIZE_TEST = 800 +# Maximum size of the side of the image during testing +_C.INPUT.MAX_SIZE_TEST = 1333 +# Mode for flipping images used in data augmentation during training +# choose one of ["horizontal, "vertical", "none"] +_C.INPUT.RANDOM_FLIP = "horizontal" + +# `True` if cropping is used for data augmentation during training +_C.INPUT.CROP = CN({"ENABLED": False}) +# Cropping type. See documentation of `detectron2.data.transforms.RandomCrop` for explanation. +_C.INPUT.CROP.TYPE = "relative_range" +# Size of crop in range (0, 1] if CROP.TYPE is "relative" or "relative_range" and in number of +# pixels if CROP.TYPE is "absolute" +_C.INPUT.CROP.SIZE = [0.9, 0.9] + + +# Whether the model needs RGB, YUV, HSV etc. +# Should be one of the modes defined here, as we use PIL to read the image: +# https://pillow.readthedocs.io/en/stable/handbook/concepts.html#concept-modes +# with BGR being the one exception. One can set image format to BGR, we will +# internally use RGB for conversion and flip the channels over +_C.INPUT.FORMAT = "BGR" +# The ground truth mask format that the model will use. +# Mask R-CNN supports either "polygon" or "bitmask" as ground truth. +_C.INPUT.MASK_FORMAT = "polygon" # alternative: "bitmask" + + +# ----------------------------------------------------------------------------- +# Dataset +# ----------------------------------------------------------------------------- +_C.DATASETS = CN() +# List of the dataset names for training. Must be registered in DatasetCatalog +# Samples from these datasets will be merged and used as one dataset. +_C.DATASETS.TRAIN = () +# List of the pre-computed proposal files for training, which must be consistent +# with datasets listed in DATASETS.TRAIN. +_C.DATASETS.PROPOSAL_FILES_TRAIN = () +# Number of top scoring precomputed proposals to keep for training +_C.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TRAIN = 2000 +# List of the dataset names for testing. Must be registered in DatasetCatalog +_C.DATASETS.TEST = () +# List of the pre-computed proposal files for test, which must be consistent +# with datasets listed in DATASETS.TEST. +_C.DATASETS.PROPOSAL_FILES_TEST = () +# Number of top scoring precomputed proposals to keep for test +_C.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TEST = 1000 + +# ----------------------------------------------------------------------------- +# DataLoader +# ----------------------------------------------------------------------------- +_C.DATALOADER = CN() +# Number of data loading threads +_C.DATALOADER.NUM_WORKERS = 4 +# If True, each batch should contain only images for which the aspect ratio +# is compatible. This groups portrait images together, and landscape images +# are not batched with portrait images. +_C.DATALOADER.ASPECT_RATIO_GROUPING = True +# Options: TrainingSampler, RepeatFactorTrainingSampler +_C.DATALOADER.SAMPLER_TRAIN = "TrainingSampler" +# Repeat threshold for RepeatFactorTrainingSampler +_C.DATALOADER.REPEAT_THRESHOLD = 0.0 +# Tf True, when working on datasets that have instance annotations, the +# training dataloader will filter out images without associated annotations +_C.DATALOADER.FILTER_EMPTY_ANNOTATIONS = True + +# ---------------------------------------------------------------------------- # +# Backbone options +# ---------------------------------------------------------------------------- # +_C.MODEL.BACKBONE = CN() + +_C.MODEL.BACKBONE.NAME = "build_resnet_backbone" +# Freeze the first several stages so they are not trained. +# There are 5 stages in ResNet. The first is a convolution, and the following +# stages are each group of residual blocks. +_C.MODEL.BACKBONE.FREEZE_AT = 2 + + +# ---------------------------------------------------------------------------- # +# FPN options +# ---------------------------------------------------------------------------- # +_C.MODEL.FPN = CN() +# Names of the input feature maps to be used by FPN +# They must have contiguous power of 2 strides +# e.g., ["res2", "res3", "res4", "res5"] +_C.MODEL.FPN.IN_FEATURES = [] +_C.MODEL.FPN.OUT_CHANNELS = 256 + +# Options: "" (no norm), "GN" +_C.MODEL.FPN.NORM = "" + +# Types for fusing the FPN top-down and lateral features. Can be either "sum" or "avg" +_C.MODEL.FPN.FUSE_TYPE = "sum" + + +# ---------------------------------------------------------------------------- # +# Proposal generator options +# ---------------------------------------------------------------------------- # +_C.MODEL.PROPOSAL_GENERATOR = CN() +# Current proposal generators include "RPN", "RRPN" and "PrecomputedProposals" +_C.MODEL.PROPOSAL_GENERATOR.NAME = "RPN" +# Proposal height and width both need to be greater than MIN_SIZE +# (a the scale used during training or inference) +_C.MODEL.PROPOSAL_GENERATOR.MIN_SIZE = 0 + + +# ---------------------------------------------------------------------------- # +# Anchor generator options +# ---------------------------------------------------------------------------- # +_C.MODEL.ANCHOR_GENERATOR = CN() +# The generator can be any name in the ANCHOR_GENERATOR registry +_C.MODEL.ANCHOR_GENERATOR.NAME = "DefaultAnchorGenerator" +# Anchor sizes (i.e. sqrt of area) in absolute pixels w.r.t. the network input. +# Format: list[list[float]]. SIZES[i] specifies the list of sizes to use for +# IN_FEATURES[i]; len(SIZES) must be equal to len(IN_FEATURES) or 1. +# When len(SIZES) == 1, SIZES[0] is used for all IN_FEATURES. +_C.MODEL.ANCHOR_GENERATOR.SIZES = [[32, 64, 128, 256, 512]] +# Anchor aspect ratios. For each area given in `SIZES`, anchors with different aspect +# ratios are generated by an anchor generator. +# Format: list[list[float]]. ASPECT_RATIOS[i] specifies the list of aspect ratios (H/W) +# to use for IN_FEATURES[i]; len(ASPECT_RATIOS) == len(IN_FEATURES) must be true, +# or len(ASPECT_RATIOS) == 1 is true and aspect ratio list ASPECT_RATIOS[0] is used +# for all IN_FEATURES. +_C.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS = [[0.5, 1.0, 2.0]] +# Anchor angles. +# list[list[float]], the angle in degrees, for each input feature map. +# ANGLES[i] specifies the list of angles for IN_FEATURES[i]. +_C.MODEL.ANCHOR_GENERATOR.ANGLES = [[-90, 0, 90]] +# Relative offset between the center of the first anchor and the top-left corner of the image +# Value has to be in [0, 1). Recommend to use 0.5, which means half stride. +# The value is not expected to affect model accuracy. +_C.MODEL.ANCHOR_GENERATOR.OFFSET = 0.0 + +# ---------------------------------------------------------------------------- # +# RPN options +# ---------------------------------------------------------------------------- # +_C.MODEL.RPN = CN() +_C.MODEL.RPN.HEAD_NAME = "StandardRPNHead" # used by RPN_HEAD_REGISTRY + +# Names of the input feature maps to be used by RPN +# e.g., ["p2", "p3", "p4", "p5", "p6"] for FPN +_C.MODEL.RPN.IN_FEATURES = ["res4"] +# Remove RPN anchors that go outside the image by BOUNDARY_THRESH pixels +# Set to -1 or a large value, e.g. 100000, to disable pruning anchors +_C.MODEL.RPN.BOUNDARY_THRESH = -1 +# IOU overlap ratios [BG_IOU_THRESHOLD, FG_IOU_THRESHOLD] +# Minimum overlap required between an anchor and ground-truth box for the +# (anchor, gt box) pair to be a positive example (IoU >= FG_IOU_THRESHOLD +# ==> positive RPN example: 1) +# Maximum overlap allowed between an anchor and ground-truth box for the +# (anchor, gt box) pair to be a negative examples (IoU < BG_IOU_THRESHOLD +# ==> negative RPN example: 0) +# Anchors with overlap in between (BG_IOU_THRESHOLD <= IoU < FG_IOU_THRESHOLD) +# are ignored (-1) +_C.MODEL.RPN.IOU_THRESHOLDS = [0.3, 0.7] +_C.MODEL.RPN.IOU_LABELS = [0, -1, 1] +# Number of regions per image used to train RPN +_C.MODEL.RPN.BATCH_SIZE_PER_IMAGE = 256 +# Target fraction of foreground (positive) examples per RPN minibatch +_C.MODEL.RPN.POSITIVE_FRACTION = 0.5 +# Options are: "smooth_l1", "giou", "diou", "ciou" +_C.MODEL.RPN.BBOX_REG_LOSS_TYPE = "smooth_l1" +_C.MODEL.RPN.BBOX_REG_LOSS_WEIGHT = 1.0 +# Weights on (dx, dy, dw, dh) for normalizing RPN anchor regression targets +_C.MODEL.RPN.BBOX_REG_WEIGHTS = (1.0, 1.0, 1.0, 1.0) +# The transition point from L1 to L2 loss. Set to 0.0 to make the loss simply L1. +_C.MODEL.RPN.SMOOTH_L1_BETA = 0.0 +_C.MODEL.RPN.LOSS_WEIGHT = 1.0 +# Number of top scoring RPN proposals to keep before applying NMS +# When FPN is used, this is *per FPN level* (not total) +_C.MODEL.RPN.PRE_NMS_TOPK_TRAIN = 12000 +_C.MODEL.RPN.PRE_NMS_TOPK_TEST = 6000 +# Number of top scoring RPN proposals to keep after applying NMS +# When FPN is used, this limit is applied per level and then again to the union +# of proposals from all levels +# NOTE: When FPN is used, the meaning of this config is different from Detectron1. +# It means per-batch topk in Detectron1, but per-image topk here. +# See the "find_top_rpn_proposals" function for details. +_C.MODEL.RPN.POST_NMS_TOPK_TRAIN = 2000 +_C.MODEL.RPN.POST_NMS_TOPK_TEST = 1000 +# NMS threshold used on RPN proposals +_C.MODEL.RPN.NMS_THRESH = 0.7 +# Set this to -1 to use the same number of output channels as input channels. +_C.MODEL.RPN.CONV_DIMS = [-1] + +# ---------------------------------------------------------------------------- # +# ROI HEADS options +# ---------------------------------------------------------------------------- # +_C.MODEL.ROI_HEADS = CN() +_C.MODEL.ROI_HEADS.NAME = "Res5ROIHeads" +# Number of foreground classes +_C.MODEL.ROI_HEADS.NUM_CLASSES = 80 +# Names of the input feature maps to be used by ROI heads +# Currently all heads (box, mask, ...) use the same input feature map list +# e.g., ["p2", "p3", "p4", "p5"] is commonly used for FPN +_C.MODEL.ROI_HEADS.IN_FEATURES = ["res4"] +# IOU overlap ratios [IOU_THRESHOLD] +# Overlap threshold for an RoI to be considered background (if < IOU_THRESHOLD) +# Overlap threshold for an RoI to be considered foreground (if >= IOU_THRESHOLD) +_C.MODEL.ROI_HEADS.IOU_THRESHOLDS = [0.5] +_C.MODEL.ROI_HEADS.IOU_LABELS = [0, 1] +# RoI minibatch size *per image* (number of regions of interest [ROIs]) during training +# Total number of RoIs per training minibatch = +# ROI_HEADS.BATCH_SIZE_PER_IMAGE * SOLVER.IMS_PER_BATCH +# E.g., a common configuration is: 512 * 16 = 8192 +_C.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE = 512 +# Target fraction of RoI minibatch that is labeled foreground (i.e. class > 0) +_C.MODEL.ROI_HEADS.POSITIVE_FRACTION = 0.25 + +# Only used on test mode + +# Minimum score threshold (assuming scores in a [0, 1] range); a value chosen to +# balance obtaining high recall with not having too many low precision +# detections that will slow down inference post processing steps (like NMS) +# A default threshold of 0.0 increases AP by ~0.2-0.3 but significantly slows down +# inference. +_C.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.05 +# Overlap threshold used for non-maximum suppression (suppress boxes with +# IoU >= this threshold) +_C.MODEL.ROI_HEADS.NMS_THRESH_TEST = 0.5 +# If True, augment proposals with ground-truth boxes before sampling proposals to +# train ROI heads. +_C.MODEL.ROI_HEADS.PROPOSAL_APPEND_GT = True + +# ---------------------------------------------------------------------------- # +# Box Head +# ---------------------------------------------------------------------------- # +_C.MODEL.ROI_BOX_HEAD = CN() +# C4 don't use head name option +# Options for non-C4 models: FastRCNNConvFCHead, +_C.MODEL.ROI_BOX_HEAD.NAME = "" +# Options are: "smooth_l1", "giou", "diou", "ciou" +_C.MODEL.ROI_BOX_HEAD.BBOX_REG_LOSS_TYPE = "smooth_l1" +# The final scaling coefficient on the box regression loss, used to balance the magnitude of its +# gradients with other losses in the model. See also `MODEL.ROI_KEYPOINT_HEAD.LOSS_WEIGHT`. +_C.MODEL.ROI_BOX_HEAD.BBOX_REG_LOSS_WEIGHT = 1.0 +# Default weights on (dx, dy, dw, dh) for normalizing bbox regression targets +# These are empirically chosen to approximately lead to unit variance targets +_C.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS = (10.0, 10.0, 5.0, 5.0) +# The transition point from L1 to L2 loss. Set to 0.0 to make the loss simply L1. +_C.MODEL.ROI_BOX_HEAD.SMOOTH_L1_BETA = 0.0 +_C.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION = 14 +_C.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO = 0 +# Type of pooling operation applied to the incoming feature map for each RoI +_C.MODEL.ROI_BOX_HEAD.POOLER_TYPE = "ROIAlignV2" + +_C.MODEL.ROI_BOX_HEAD.NUM_FC = 0 +# Hidden layer dimension for FC layers in the RoI box head +_C.MODEL.ROI_BOX_HEAD.FC_DIM = 1024 +_C.MODEL.ROI_BOX_HEAD.NUM_CONV = 0 +# Channel dimension for Conv layers in the RoI box head +_C.MODEL.ROI_BOX_HEAD.CONV_DIM = 256 +# Normalization method for the convolution layers. +# Options: "" (no norm), "GN", "SyncBN". +_C.MODEL.ROI_BOX_HEAD.NORM = "" +# Whether to use class agnostic for bbox regression +_C.MODEL.ROI_BOX_HEAD.CLS_AGNOSTIC_BBOX_REG = False +# If true, RoI heads use bounding boxes predicted by the box head rather than proposal boxes. +_C.MODEL.ROI_BOX_HEAD.TRAIN_ON_PRED_BOXES = False + +# ---------------------------------------------------------------------------- # +# Cascaded Box Head +# ---------------------------------------------------------------------------- # +_C.MODEL.ROI_BOX_CASCADE_HEAD = CN() +# The number of cascade stages is implicitly defined by the length of the following two configs. +_C.MODEL.ROI_BOX_CASCADE_HEAD.BBOX_REG_WEIGHTS = ( + (10.0, 10.0, 5.0, 5.0), + (20.0, 20.0, 10.0, 10.0), + (30.0, 30.0, 15.0, 15.0), +) +_C.MODEL.ROI_BOX_CASCADE_HEAD.IOUS = (0.5, 0.6, 0.7) + + +# ---------------------------------------------------------------------------- # +# Mask Head +# ---------------------------------------------------------------------------- # +_C.MODEL.ROI_MASK_HEAD = CN() +_C.MODEL.ROI_MASK_HEAD.NAME = "MaskRCNNConvUpsampleHead" +_C.MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION = 14 +_C.MODEL.ROI_MASK_HEAD.POOLER_SAMPLING_RATIO = 0 +_C.MODEL.ROI_MASK_HEAD.NUM_CONV = 0 # The number of convs in the mask head +_C.MODEL.ROI_MASK_HEAD.CONV_DIM = 256 +# Normalization method for the convolution layers. +# Options: "" (no norm), "GN", "SyncBN". +_C.MODEL.ROI_MASK_HEAD.NORM = "" +# Whether to use class agnostic for mask prediction +_C.MODEL.ROI_MASK_HEAD.CLS_AGNOSTIC_MASK = False +# Type of pooling operation applied to the incoming feature map for each RoI +_C.MODEL.ROI_MASK_HEAD.POOLER_TYPE = "ROIAlignV2" + + +# ---------------------------------------------------------------------------- # +# Keypoint Head +# ---------------------------------------------------------------------------- # +_C.MODEL.ROI_KEYPOINT_HEAD = CN() +_C.MODEL.ROI_KEYPOINT_HEAD.NAME = "KRCNNConvDeconvUpsampleHead" +_C.MODEL.ROI_KEYPOINT_HEAD.POOLER_RESOLUTION = 14 +_C.MODEL.ROI_KEYPOINT_HEAD.POOLER_SAMPLING_RATIO = 0 +_C.MODEL.ROI_KEYPOINT_HEAD.CONV_DIMS = tuple(512 for _ in range(8)) +_C.MODEL.ROI_KEYPOINT_HEAD.NUM_KEYPOINTS = 17 # 17 is the number of keypoints in COCO. + +# Images with too few (or no) keypoints are excluded from training. +_C.MODEL.ROI_KEYPOINT_HEAD.MIN_KEYPOINTS_PER_IMAGE = 1 +# Normalize by the total number of visible keypoints in the minibatch if True. +# Otherwise, normalize by the total number of keypoints that could ever exist +# in the minibatch. +# The keypoint softmax loss is only calculated on visible keypoints. +# Since the number of visible keypoints can vary significantly between +# minibatches, this has the effect of up-weighting the importance of +# minibatches with few visible keypoints. (Imagine the extreme case of +# only one visible keypoint versus N: in the case of N, each one +# contributes 1/N to the gradient compared to the single keypoint +# determining the gradient direction). Instead, we can normalize the +# loss by the total number of keypoints, if it were the case that all +# keypoints were visible in a full minibatch. (Returning to the example, +# this means that the one visible keypoint contributes as much as each +# of the N keypoints.) +_C.MODEL.ROI_KEYPOINT_HEAD.NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS = True +# Multi-task loss weight to use for keypoints +# Recommended values: +# - use 1.0 if NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS is True +# - use 4.0 if NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS is False +_C.MODEL.ROI_KEYPOINT_HEAD.LOSS_WEIGHT = 1.0 +# Type of pooling operation applied to the incoming feature map for each RoI +_C.MODEL.ROI_KEYPOINT_HEAD.POOLER_TYPE = "ROIAlignV2" + +# ---------------------------------------------------------------------------- # +# Semantic Segmentation Head +# ---------------------------------------------------------------------------- # +_C.MODEL.SEM_SEG_HEAD = CN() +_C.MODEL.SEM_SEG_HEAD.NAME = "SemSegFPNHead" +_C.MODEL.SEM_SEG_HEAD.IN_FEATURES = ["p2", "p3", "p4", "p5"] +# Label in the semantic segmentation ground truth that is ignored, i.e., no loss is calculated for +# the correposnding pixel. +_C.MODEL.SEM_SEG_HEAD.IGNORE_VALUE = 255 +# Number of classes in the semantic segmentation head +_C.MODEL.SEM_SEG_HEAD.NUM_CLASSES = 54 +# Number of channels in the 3x3 convs inside semantic-FPN heads. +_C.MODEL.SEM_SEG_HEAD.CONVS_DIM = 128 +# Outputs from semantic-FPN heads are up-scaled to the COMMON_STRIDE stride. +_C.MODEL.SEM_SEG_HEAD.COMMON_STRIDE = 4 +# Normalization method for the convolution layers. Options: "" (no norm), "GN". +_C.MODEL.SEM_SEG_HEAD.NORM = "GN" +_C.MODEL.SEM_SEG_HEAD.LOSS_WEIGHT = 1.0 + +_C.MODEL.PANOPTIC_FPN = CN() +# Scaling of all losses from instance detection / segmentation head. +_C.MODEL.PANOPTIC_FPN.INSTANCE_LOSS_WEIGHT = 1.0 + +# options when combining instance & semantic segmentation outputs +_C.MODEL.PANOPTIC_FPN.COMBINE = CN({"ENABLED": True}) # "COMBINE.ENABLED" is deprecated & not used +_C.MODEL.PANOPTIC_FPN.COMBINE.OVERLAP_THRESH = 0.5 +_C.MODEL.PANOPTIC_FPN.COMBINE.STUFF_AREA_LIMIT = 4096 +_C.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH = 0.5 + + +# ---------------------------------------------------------------------------- # +# RetinaNet Head +# ---------------------------------------------------------------------------- # +_C.MODEL.RETINANET = CN() + +# This is the number of foreground classes. +_C.MODEL.RETINANET.NUM_CLASSES = 80 + +_C.MODEL.RETINANET.IN_FEATURES = ["p3", "p4", "p5", "p6", "p7"] + +# Convolutions to use in the cls and bbox tower +# NOTE: this doesn't include the last conv for logits +_C.MODEL.RETINANET.NUM_CONVS = 4 + +# IoU overlap ratio [bg, fg] for labeling anchors. +# Anchors with < bg are labeled negative (0) +# Anchors with >= bg and < fg are ignored (-1) +# Anchors with >= fg are labeled positive (1) +_C.MODEL.RETINANET.IOU_THRESHOLDS = [0.4, 0.5] +_C.MODEL.RETINANET.IOU_LABELS = [0, -1, 1] + +# Prior prob for rare case (i.e. foreground) at the beginning of training. +# This is used to set the bias for the logits layer of the classifier subnet. +# This improves training stability in the case of heavy class imbalance. +_C.MODEL.RETINANET.PRIOR_PROB = 0.01 + +# Inference cls score threshold, only anchors with score > INFERENCE_TH are +# considered for inference (to improve speed) +_C.MODEL.RETINANET.SCORE_THRESH_TEST = 0.05 +# Select topk candidates before NMS +_C.MODEL.RETINANET.TOPK_CANDIDATES_TEST = 1000 +_C.MODEL.RETINANET.NMS_THRESH_TEST = 0.5 + +# Weights on (dx, dy, dw, dh) for normalizing Retinanet anchor regression targets +_C.MODEL.RETINANET.BBOX_REG_WEIGHTS = (1.0, 1.0, 1.0, 1.0) + +# Loss parameters +_C.MODEL.RETINANET.FOCAL_LOSS_GAMMA = 2.0 +_C.MODEL.RETINANET.FOCAL_LOSS_ALPHA = 0.25 +_C.MODEL.RETINANET.SMOOTH_L1_LOSS_BETA = 0.1 +# Options are: "smooth_l1", "giou", "diou", "ciou" +_C.MODEL.RETINANET.BBOX_REG_LOSS_TYPE = "smooth_l1" + +# One of BN, SyncBN, FrozenBN, GN +# Only supports GN until unshared norm is implemented +_C.MODEL.RETINANET.NORM = "" + + +# ---------------------------------------------------------------------------- # +# ResNe[X]t options (ResNets = {ResNet, ResNeXt} +# Note that parts of a resnet may be used for both the backbone and the head +# These options apply to both +# ---------------------------------------------------------------------------- # +_C.MODEL.RESNETS = CN() + +_C.MODEL.RESNETS.DEPTH = 50 +_C.MODEL.RESNETS.OUT_FEATURES = ["res4"] # res4 for C4 backbone, res2..5 for FPN backbone + +# Number of groups to use; 1 ==> ResNet; > 1 ==> ResNeXt +_C.MODEL.RESNETS.NUM_GROUPS = 1 + +# Options: FrozenBN, GN, "SyncBN", "BN" +_C.MODEL.RESNETS.NORM = "FrozenBN" + +# Baseline width of each group. +# Scaling this parameters will scale the width of all bottleneck layers. +_C.MODEL.RESNETS.WIDTH_PER_GROUP = 64 + +# Place the stride 2 conv on the 1x1 filter +# Use True only for the original MSRA ResNet; use False for C2 and Torch models +_C.MODEL.RESNETS.STRIDE_IN_1X1 = True + +# Apply dilation in stage "res5" +_C.MODEL.RESNETS.RES5_DILATION = 1 + +# Output width of res2. Scaling this parameters will scale the width of all 1x1 convs in ResNet +# For R18 and R34, this needs to be set to 64 +_C.MODEL.RESNETS.RES2_OUT_CHANNELS = 256 +_C.MODEL.RESNETS.STEM_OUT_CHANNELS = 64 + +# Apply Deformable Convolution in stages +# Specify if apply deform_conv on Res2, Res3, Res4, Res5 +_C.MODEL.RESNETS.DEFORM_ON_PER_STAGE = [False, False, False, False] +# Use True to use modulated deform_conv (DeformableV2, https://arxiv.org/abs/1811.11168); +# Use False for DeformableV1. +_C.MODEL.RESNETS.DEFORM_MODULATED = False +# Number of groups in deformable conv. +_C.MODEL.RESNETS.DEFORM_NUM_GROUPS = 1 + + +# ---------------------------------------------------------------------------- # +# Solver +# ---------------------------------------------------------------------------- # +_C.SOLVER = CN() + +# Options: WarmupMultiStepLR, WarmupCosineLR. +# See detectron2/solver/build.py for definition. +_C.SOLVER.LR_SCHEDULER_NAME = "WarmupMultiStepLR" + +_C.SOLVER.MAX_ITER = 40000 + +_C.SOLVER.BASE_LR = 0.001 +# The end lr, only used by WarmupCosineLR +_C.SOLVER.BASE_LR_END = 0.0 + +_C.SOLVER.MOMENTUM = 0.9 + +_C.SOLVER.NESTEROV = False + +_C.SOLVER.WEIGHT_DECAY = 0.0001 +# The weight decay that's applied to parameters of normalization layers +# (typically the affine transformation) +_C.SOLVER.WEIGHT_DECAY_NORM = 0.0 + +_C.SOLVER.GAMMA = 0.1 +# The iteration number to decrease learning rate by GAMMA. +_C.SOLVER.STEPS = (30000,) + +_C.SOLVER.WARMUP_FACTOR = 1.0 / 1000 +_C.SOLVER.WARMUP_ITERS = 1000 +_C.SOLVER.WARMUP_METHOD = "linear" + +# Save a checkpoint after every this number of iterations +_C.SOLVER.CHECKPOINT_PERIOD = 5000 + +# Number of images per batch across all machines. This is also the number +# of training images per step (i.e. per iteration). If we use 16 GPUs +# and IMS_PER_BATCH = 32, each GPU will see 2 images per batch. +# May be adjusted automatically if REFERENCE_WORLD_SIZE is set. +_C.SOLVER.IMS_PER_BATCH = 16 + +# The reference number of workers (GPUs) this config is meant to train with. +# It takes no effect when set to 0. +# With a non-zero value, it will be used by DefaultTrainer to compute a desired +# per-worker batch size, and then scale the other related configs (total batch size, +# learning rate, etc) to match the per-worker batch size. +# See documentation of `DefaultTrainer.auto_scale_workers` for details: +_C.SOLVER.REFERENCE_WORLD_SIZE = 0 + +# Detectron v1 (and previous detection code) used a 2x higher LR and 0 WD for +# biases. This is not useful (at least for recent models). You should avoid +# changing these and they exist only to reproduce Detectron v1 training if +# desired. +_C.SOLVER.BIAS_LR_FACTOR = 1.0 +_C.SOLVER.WEIGHT_DECAY_BIAS = None # None means following WEIGHT_DECAY + +# Gradient clipping +_C.SOLVER.CLIP_GRADIENTS = CN({"ENABLED": False}) +# Type of gradient clipping, currently 2 values are supported: +# - "value": the absolute values of elements of each gradients are clipped +# - "norm": the norm of the gradient for each parameter is clipped thus +# affecting all elements in the parameter +_C.SOLVER.CLIP_GRADIENTS.CLIP_TYPE = "value" +# Maximum absolute value used for clipping gradients +_C.SOLVER.CLIP_GRADIENTS.CLIP_VALUE = 1.0 +# Floating point number p for L-p norm to be used with the "norm" +# gradient clipping type; for L-inf, please specify .inf +_C.SOLVER.CLIP_GRADIENTS.NORM_TYPE = 2.0 + +# Enable automatic mixed precision for training +# Note that this does not change model's inference behavior. +# To use AMP in inference, run inference under autocast() +_C.SOLVER.AMP = CN({"ENABLED": False}) + +# ---------------------------------------------------------------------------- # +# Specific test options +# ---------------------------------------------------------------------------- # +_C.TEST = CN() +# For end-to-end tests to verify the expected accuracy. +# Each item is [task, metric, value, tolerance] +# e.g.: [['bbox', 'AP', 38.5, 0.2]] +_C.TEST.EXPECTED_RESULTS = [] +# The period (in terms of steps) to evaluate the model during training. +# Set to 0 to disable. +_C.TEST.EVAL_PERIOD = 0 +# The sigmas used to calculate keypoint OKS. See http://cocodataset.org/#keypoints-eval +# When empty, it will use the defaults in COCO. +# Otherwise it should be a list[float] with the same length as ROI_KEYPOINT_HEAD.NUM_KEYPOINTS. +_C.TEST.KEYPOINT_OKS_SIGMAS = [] +# Maximum number of detections to return per image during inference (100 is +# based on the limit established for the COCO dataset). +_C.TEST.DETECTIONS_PER_IMAGE = 100 + +_C.TEST.AUG = CN({"ENABLED": False}) +_C.TEST.AUG.MIN_SIZES = (400, 500, 600, 700, 800, 900, 1000, 1100, 1200) +_C.TEST.AUG.MAX_SIZE = 4000 +_C.TEST.AUG.FLIP = True + +_C.TEST.PRECISE_BN = CN({"ENABLED": False}) +_C.TEST.PRECISE_BN.NUM_ITER = 200 + +# ---------------------------------------------------------------------------- # +# Misc options +# ---------------------------------------------------------------------------- # +# Directory where output files are written +_C.OUTPUT_DIR = "./output" +# Set seed to negative to fully randomize everything. +# Set seed to positive to use a fixed seed. Note that a fixed seed increases +# reproducibility but does not guarantee fully deterministic behavior. +# Disabling all parallelism further increases reproducibility. +_C.SEED = -1 +# Benchmark different cudnn algorithms. +# If input images have very different sizes, this option will have large overhead +# for about 10k iterations. It usually hurts total time, but can benefit for certain models. +# If input images have the same or similar sizes, benchmark is often helpful. +_C.CUDNN_BENCHMARK = False +# The period (in terms of steps) for minibatch visualization at train time. +# Set to 0 to disable. +_C.VIS_PERIOD = 0 + +# global config is for quick hack purposes. +# You can set them in command line or config files, +# and access it with: +# +# from detectron2.config import global_cfg +# print(global_cfg.HACK) +# +# Do not commit any configs into it. +_C.GLOBAL = CN() +_C.GLOBAL.HACK = 1.0 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/config/instantiate.py b/motion-gan-pipeline/preprocessing/third/detectron2/config/instantiate.py new file mode 100644 index 0000000..93239a9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/config/instantiate.py @@ -0,0 +1,82 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import dataclasses +import logging +from collections import abc +from typing import Any + +from detectron2.utils.registry import _convert_target_to_string, locate + +__all__ = ["dump_dataclass", "instantiate"] + + +def dump_dataclass(obj: Any): + """ + Dump a dataclass recursively into a dict that can be later instantiated. + + Args: + obj: a dataclass object + + Returns: + dict + """ + assert dataclasses.is_dataclass(obj) and not isinstance( + obj, type + ), "dump_dataclass() requires an instance of a dataclass." + ret = {"_target_": _convert_target_to_string(type(obj))} + for f in dataclasses.fields(obj): + v = getattr(obj, f.name) + if dataclasses.is_dataclass(v): + v = dump_dataclass(v) + if isinstance(v, (list, tuple)): + v = [dump_dataclass(x) if dataclasses.is_dataclass(x) else x for x in v] + ret[f.name] = v + return ret + + +def instantiate(cfg): + """ + Recursively instantiate objects defined in dictionaries by + "_target_" and arguments. + + Args: + cfg: a dict-like object with "_target_" that defines the caller, and + other keys that define the arguments + + Returns: + object instantiated by cfg + """ + from omegaconf import ListConfig + + if isinstance(cfg, ListConfig): + lst = [instantiate(x) for x in cfg] + return ListConfig(lst, flags={"allow_objects": True}) + if isinstance(cfg, list): + # Specialize for list, because many classes take + # list[objects] as arguments, such as ResNet, DatasetMapper + return [instantiate(x) for x in cfg] + + if isinstance(cfg, abc.Mapping) and "_target_" in cfg: + # conceptually equivalent to hydra.autils.instantiate(cfg) with _convert_=all, + # but faster: https://github.com/facebookresearch/hydra/issues/1200 + cfg = {k: instantiate(v) for k, v in cfg.items()} + cls = cfg.pop("_target_") + cls = instantiate(cls) + + if isinstance(cls, str): + cls_name = cls + cls = locate(cls_name) + assert cls is not None, cls_name + else: + try: + cls_name = cls.__module__ + "." + cls.__qualname__ + except Exception: + # target could be anything, so the above could fail + cls_name = str(cls) + assert callable(cls), f"_target_ {cls} does not define a callable object" + try: + return cls(**cfg) + except TypeError: + logger = logging.getLogger(__name__) + logger.error(f"Error when instantiating {cls_name}!") + raise + return cfg # return as-is if don't know what to do diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/config/lazy.py b/motion-gan-pipeline/preprocessing/third/detectron2/config/lazy.py new file mode 100644 index 0000000..fa5d86b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/config/lazy.py @@ -0,0 +1,399 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import ast +import builtins +import importlib +import inspect +import logging +import os +import uuid +from collections import abc +from contextlib import contextmanager +from copy import deepcopy +from dataclasses import is_dataclass +from typing import List, Tuple, Union +import cloudpickle +import yaml +from omegaconf import DictConfig, ListConfig, OmegaConf + +from detectron2.utils.file_io import PathManager +from detectron2.utils.registry import _convert_target_to_string + +__all__ = ["LazyCall", "LazyConfig"] + + +class LazyCall: + """ + Wrap a callable so that when it's called, the call will not be executed, + but returns a dict that describes the call. + + LazyCall object has to be called with only keyword arguments. Positional + arguments are not yet supported. + + Examples: + :: + from detectron2.config import instantiate, LazyCall + + layer_cfg = LazyCall(nn.Conv2d)(in_channels=32, out_channels=32) + layer_cfg.out_channels = 64 # can edit it afterwards + layer = instantiate(layer_cfg) + """ + + def __init__(self, target): + if not (callable(target) or isinstance(target, (str, abc.Mapping))): + raise TypeError( + f"target of LazyCall must be a callable or defines a callable! Got {target}" + ) + self._target = target + + def __call__(self, **kwargs): + if is_dataclass(self._target): + # omegaconf object cannot hold dataclass type + # https://github.com/omry/omegaconf/issues/784 + target = _convert_target_to_string(self._target) + else: + target = self._target + kwargs["_target_"] = target + + return DictConfig(content=kwargs, flags={"allow_objects": True}) + + +def _visit_dict_config(cfg, func): + """ + Apply func recursively to all DictConfig in cfg. + """ + if isinstance(cfg, DictConfig): + func(cfg) + for v in cfg.values(): + _visit_dict_config(v, func) + elif isinstance(cfg, ListConfig): + for v in cfg: + _visit_dict_config(v, func) + + +def _validate_py_syntax(filename): + # see also https://github.com/open-mmlab/mmcv/blob/master/mmcv/utils/config.py + with PathManager.open(filename, "r") as f: + content = f.read() + try: + ast.parse(content) + except SyntaxError as e: + raise SyntaxError(f"Config file {filename} has syntax error!") from e + + +def _cast_to_config(obj): + # if given a dict, return DictConfig instead + if isinstance(obj, dict): + return DictConfig(obj, flags={"allow_objects": True}) + return obj + + +_CFG_PACKAGE_NAME = "detectron2._cfg_loader" +""" +A namespace to put all imported config into. +""" + + +def _random_package_name(filename): + # generate a random package name when loading config files + return _CFG_PACKAGE_NAME + str(uuid.uuid4())[:4] + "." + os.path.basename(filename) + + +@contextmanager +def _patch_import(): + """ + Enhance relative import statements in config files, so that they: + 1. locate files purely based on relative location, regardless of packages. + e.g. you can import file without having __init__ + 2. do not cache modules globally; modifications of module states has no side effect + 3. support other storage system through PathManager + 4. imported dict are turned into omegaconf.DictConfig automatically + """ + old_import = builtins.__import__ + + def find_relative_file(original_file, relative_import_path, level): + cur_file = os.path.dirname(original_file) + for _ in range(level - 1): + cur_file = os.path.dirname(cur_file) + cur_name = relative_import_path.lstrip(".") + for part in cur_name.split("."): + cur_file = os.path.join(cur_file, part) + # NOTE: directory import is not handled. Because then it's unclear + # if such import should produce python module or DictConfig. This can + # be discussed further if needed. + if not cur_file.endswith(".py"): + cur_file += ".py" + if not PathManager.isfile(cur_file): + raise ImportError( + f"Cannot import name {relative_import_path} from " + f"{original_file}: {cur_file} has to exist." + ) + return cur_file + + def new_import(name, globals=None, locals=None, fromlist=(), level=0): + if ( + # Only deal with relative imports inside config files + level != 0 + and globals is not None + and (globals.get("__package__", "") or "").startswith(_CFG_PACKAGE_NAME) + ): + cur_file = find_relative_file(globals["__file__"], name, level) + _validate_py_syntax(cur_file) + spec = importlib.machinery.ModuleSpec( + _random_package_name(cur_file), None, origin=cur_file + ) + module = importlib.util.module_from_spec(spec) + module.__file__ = cur_file + with PathManager.open(cur_file) as f: + content = f.read() + exec(compile(content, cur_file, "exec"), module.__dict__) + for name in fromlist: # turn imported dict into DictConfig automatically + val = _cast_to_config(module.__dict__[name]) + module.__dict__[name] = val + return module + return old_import(name, globals, locals, fromlist=fromlist, level=level) + + builtins.__import__ = new_import + yield new_import + builtins.__import__ = old_import + + +class LazyConfig: + """ + Provide methods to save, load, and overrides an omegaconf config object + which may contain definition of lazily-constructed objects. + """ + + @staticmethod + def load_rel(filename: str, keys: Union[None, str, Tuple[str, ...]] = None): + """ + Similar to :meth:`load()`, but load path relative to the caller's + source file. + + This has the same functionality as a relative import, except that this method + accepts filename as a string, so more characters are allowed in the filename. + """ + caller_frame = inspect.stack()[1] + caller_fname = caller_frame[0].f_code.co_filename + assert caller_fname != "", "load_rel Unable to find caller" + caller_dir = os.path.dirname(caller_fname) + filename = os.path.join(caller_dir, filename) + return LazyConfig.load(filename, keys) + + @staticmethod + def load(filename: str, keys: Union[None, str, Tuple[str, ...]] = None): + """ + Load a config file. + + Args: + filename: absolute path or relative path w.r.t. the current working directory + keys: keys to load and return. If not given, return all keys + (whose values are config objects) in a dict. + """ + has_keys = keys is not None + filename = filename.replace("/./", "/") # redundant + if os.path.splitext(filename)[1] not in [".py", ".yaml", ".yml"]: + raise ValueError(f"Config file {filename} has to be a python or yaml file.") + if filename.endswith(".py"): + _validate_py_syntax(filename) + + with _patch_import(): + # Record the filename + module_namespace = { + "__file__": filename, + "__package__": _random_package_name(filename), + } + with PathManager.open(filename) as f: + content = f.read() + # Compile first with filename to: + # 1. make filename appears in stacktrace + # 2. make load_rel able to find its parent's (possibly remote) location + exec(compile(content, filename, "exec"), module_namespace) + + ret = module_namespace + else: + with PathManager.open(filename) as f: + obj = yaml.unsafe_load(f) + ret = OmegaConf.create(obj, flags={"allow_objects": True}) + + if has_keys: + if isinstance(keys, str): + return _cast_to_config(ret[keys]) + else: + return tuple(_cast_to_config(ret[a]) for a in keys) + else: + if filename.endswith(".py"): + # when not specified, only load those that are config objects + ret = DictConfig( + { + name: _cast_to_config(value) + for name, value in ret.items() + if isinstance(value, (DictConfig, ListConfig, dict)) + and not name.startswith("_") + }, + flags={"allow_objects": True}, + ) + return ret + + @staticmethod + def save(cfg, filename: str): + """ + Save a config object to a yaml file. + Note that when the config dictionary contains complex objects (e.g. lambda), + it can't be saved to yaml. In that case we will print an error and + attempt to save to a pkl file instead. + + Args: + cfg: an omegaconf config object + filename: yaml file name to save the config file + """ + logger = logging.getLogger(__name__) + try: + cfg = deepcopy(cfg) + except Exception: + pass + else: + # if it's deep-copyable, then... + def _replace_type_by_name(x): + if "_target_" in x and callable(x._target_): + try: + x._target_ = _convert_target_to_string(x._target_) + except AttributeError: + pass + + # not necessary, but makes yaml looks nicer + _visit_dict_config(cfg, _replace_type_by_name) + + save_pkl = False + try: + dict = OmegaConf.to_container(cfg, resolve=False) + dumped = yaml.dump(dict, default_flow_style=None, allow_unicode=True, width=9999) + with PathManager.open(filename, "w") as f: + f.write(dumped) + + try: + _ = yaml.unsafe_load(dumped) # test that it is loadable + except Exception: + logger.warning( + "The config contains objects that cannot serialize to a valid yaml. " + f"{filename} is human-readable but cannot be loaded." + ) + save_pkl = True + except Exception: + logger.exception("Unable to serialize the config to yaml. Error:") + save_pkl = True + + if save_pkl: + new_filename = filename + ".pkl" + try: + # retry by pickle + with PathManager.open(new_filename, "wb") as f: + cloudpickle.dump(cfg, f) + logger.warning(f"Config is saved using cloudpickle at {new_filename}.") + except Exception: + pass + + @staticmethod + def apply_overrides(cfg, overrides: List[str]): + """ + In-place override contents of cfg. + + Args: + cfg: an omegaconf config object + overrides: list of strings in the format of "a=b" to override configs. + See https://hydra.cc/docs/next/advanced/override_grammar/basic/ + for syntax. + + Returns: + the cfg object + """ + + def safe_update(cfg, key, value): + parts = key.split(".") + for idx in range(1, len(parts)): + prefix = ".".join(parts[:idx]) + v = OmegaConf.select(cfg, prefix, default=None) + if v is None: + break + if not OmegaConf.is_config(v): + raise KeyError( + f"Trying to update key {key}, but {prefix} " + f"is not a config, but has type {type(v)}." + ) + OmegaConf.update(cfg, key, value, merge=True) + + from hydra.core.override_parser.overrides_parser import OverridesParser + + parser = OverridesParser.create() + overrides = parser.parse_overrides(overrides) + for o in overrides: + key = o.key_or_group + value = o.value() + if o.is_delete(): + # TODO support this + raise NotImplementedError("deletion is not yet a supported override") + safe_update(cfg, key, value) + return cfg + + @staticmethod + def to_py(cfg, prefix: str = "cfg."): + """ + Try to convert a config object into Python-like psuedo code. + + Note that perfect conversion is not always possible. So the returned + results are mainly meant to be human-readable, and not meant to be executed. + + Args: + cfg: an omegaconf config object + prefix: root name for the resulting code (default: "cfg.") + + + Returns: + str of formatted Python code + """ + import black + + cfg = OmegaConf.to_container(cfg, resolve=True) + + def _to_str(obj, prefix=None, inside_call=False): + if prefix is None: + prefix = [] + if isinstance(obj, abc.Mapping) and "_target_" in obj: + # Dict representing a function call + target = _convert_target_to_string(obj.pop("_target_")) + args = [] + for k, v in sorted(obj.items()): + args.append(f"{k}={_to_str(v, inside_call=True)}") + args = ", ".join(args) + call = f"{target}({args})" + return "".join(prefix) + call + elif isinstance(obj, abc.Mapping) and not inside_call: + # Dict that is not inside a call is a list of top-level config objects that we + # render as one object per line with dot separated prefixes + key_list = [] + for k, v in sorted(obj.items()): + if isinstance(v, abc.Mapping) and "_target_" not in v: + key_list.append(_to_str(v, prefix=prefix + [k + "."])) + else: + key = "".join(prefix) + k + key_list.append(f"{key}={_to_str(v)}") + return "\n".join(key_list) + elif isinstance(obj, abc.Mapping): + # Dict that is inside a call is rendered as a regular dict + return ( + "{" + + ",".join( + f"{repr(k)}: {_to_str(v, inside_call=inside_call)}" + for k, v in sorted(obj.items()) + ) + + "}" + ) + elif isinstance(obj, list): + return "[" + ",".join(_to_str(x, inside_call=inside_call) for x in obj) + "]" + else: + return repr(obj) + + py_str = _to_str(cfg, prefix=[prefix]) + try: + return black.format_str(py_str, mode=black.Mode()) + except black.InvalidInput: + return py_str diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/__init__.py new file mode 100644 index 0000000..259f669 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/__init__.py @@ -0,0 +1,19 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from . import transforms # isort:skip + +from .build import ( + build_batch_data_loader, + build_detection_test_loader, + build_detection_train_loader, + get_detection_dataset_dicts, + load_proposals_into_dataset, + print_instances_class_histogram, +) +from .catalog import DatasetCatalog, MetadataCatalog, Metadata +from .common import DatasetFromList, MapDataset, ToIterableDataset +from .dataset_mapper import DatasetMapper + +# ensure the builtin datasets are registered +from . import datasets, samplers # isort:skip + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/benchmark.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/benchmark.py new file mode 100644 index 0000000..ac2f372 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/benchmark.py @@ -0,0 +1,225 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +import numpy as np +from itertools import count +from typing import List, Tuple +import torch +import tqdm +from fvcore.common.timer import Timer + +from detectron2.utils import comm + +from .build import build_batch_data_loader +from .common import DatasetFromList, MapDataset +from .samplers import TrainingSampler + +logger = logging.getLogger(__name__) + + +class _EmptyMapDataset(torch.utils.data.Dataset): + """ + Map anything to emptiness. + """ + + def __init__(self, dataset): + self.ds = dataset + + def __len__(self): + return len(self.ds) + + def __getitem__(self, idx): + _ = self.ds[idx] + return [0] + + +def iter_benchmark( + iterator, num_iter: int, warmup: int = 5, max_time_seconds: float = 60 +) -> Tuple[float, List[float]]: + """ + Benchmark an iterator/iterable for `num_iter` iterations with an extra + `warmup` iterations of warmup. + End early if `max_time_seconds` time is spent on iterations. + + Returns: + float: average time (seconds) per iteration + list[float]: time spent on each iteration. Sometimes useful for further analysis. + """ + num_iter, warmup = int(num_iter), int(warmup) + + iterator = iter(iterator) + for _ in range(warmup): + next(iterator) + timer = Timer() + all_times = [] + for curr_iter in tqdm.trange(num_iter): + start = timer.seconds() + if start > max_time_seconds: + num_iter = curr_iter + break + next(iterator) + all_times.append(timer.seconds() - start) + avg = timer.seconds() / num_iter + return avg, all_times + + +class DataLoaderBenchmark: + """ + Some common benchmarks that help understand perf bottleneck of a standard dataloader + made of dataset, mapper and sampler. + """ + + def __init__( + self, + dataset, + *, + mapper, + sampler=None, + total_batch_size, + num_workers=0, + max_time_seconds: int = 90, + ): + """ + Args: + max_time_seconds (int): maximum time to spent for each benchmark + other args: same as in `build.py:build_detection_train_loader` + """ + if isinstance(dataset, list): + dataset = DatasetFromList(dataset, copy=False, serialize=True) + if sampler is None: + sampler = TrainingSampler(len(dataset)) + + self.dataset = dataset + self.mapper = mapper + self.sampler = sampler + self.total_batch_size = total_batch_size + self.num_workers = num_workers + self.per_gpu_batch_size = self.total_batch_size // comm.get_world_size() + + self.max_time_seconds = max_time_seconds + + def _benchmark(self, iterator, num_iter, warmup, msg=None): + avg, all_times = iter_benchmark(iterator, num_iter, warmup, self.max_time_seconds) + if msg is not None: + self._log_time(msg, avg, all_times) + return avg, all_times + + def _log_time(self, msg, avg, all_times, distributed=False): + percentiles = [np.percentile(all_times, k, interpolation="nearest") for k in [1, 5, 95, 99]] + if not distributed: + logger.info( + f"{msg}: avg={1.0/avg:.1f} it/s, " + f"p1={percentiles[0]:.2g}s, p5={percentiles[1]:.2g}s, " + f"p95={percentiles[2]:.2g}s, p99={percentiles[3]:.2g}s." + ) + return + avg_per_gpu = comm.all_gather(avg) + percentiles_per_gpu = comm.all_gather(percentiles) + if comm.get_rank() > 0: + return + for idx, avg, percentiles in zip(count(), avg_per_gpu, percentiles_per_gpu): + logger.info( + f"GPU{idx} {msg}: avg={1.0/avg:.1f} it/s, " + f"p1={percentiles[0]:.2g}s, p5={percentiles[1]:.2g}s, " + f"p95={percentiles[2]:.2g}s, p99={percentiles[3]:.2g}s." + ) + + def benchmark_dataset(self, num_iter, warmup=5): + """ + Benchmark the speed of taking raw samples from the dataset. + """ + + def loader(): + while True: + for k in self.sampler: + yield self.dataset[k] + + self._benchmark(loader(), num_iter, warmup, "Dataset Alone") + + def benchmark_mapper(self, num_iter, warmup=5): + """ + Benchmark the speed of taking raw samples from the dataset and map + them in a single process. + """ + + def loader(): + while True: + for k in self.sampler: + yield self.mapper(self.dataset[k]) + + self._benchmark(loader(), num_iter, warmup, "Single Process Mapper (sec/sample)") + + def benchmark_workers(self, num_iter, warmup=10): + """ + Benchmark the dataloader by tuning num_workers to [0, 1, self.num_workers]. + """ + candidates = [0, 1] + if self.num_workers not in candidates: + candidates.append(self.num_workers) + + dataset = MapDataset(self.dataset, self.mapper) + for n in candidates: + loader = build_batch_data_loader( + dataset, + self.sampler, + self.total_batch_size, + num_workers=n, + ) + self._benchmark( + iter(loader), + num_iter * max(n, 1), + warmup * max(n, 1), + f"DataLoader ({n} workers, bs={self.per_gpu_batch_size})", + ) + del loader + + def benchmark_IPC(self, num_iter, warmup=10): + """ + Benchmark the dataloader where each worker outputs nothing. This + eliminates the IPC overhead compared to the regular dataloader. + + PyTorch multiprocessing's IPC only optimizes for torch tensors. + Large numpy arrays or other data structure may incur large IPC overhead. + """ + n = self.num_workers + dataset = _EmptyMapDataset(MapDataset(self.dataset, self.mapper)) + loader = build_batch_data_loader( + dataset, self.sampler, self.total_batch_size, num_workers=n + ) + self._benchmark( + iter(loader), + num_iter * max(n, 1), + warmup * max(n, 1), + f"DataLoader ({n} workers, bs={self.per_gpu_batch_size}) w/o comm", + ) + + def benchmark_distributed(self, num_iter, warmup=10): + """ + Benchmark the dataloader in each distributed worker, and log results of + all workers. This helps understand the final performance as well as + the variances among workers. + + It also prints startup time (first iter) of the dataloader. + """ + gpu = comm.get_world_size() + dataset = MapDataset(self.dataset, self.mapper) + n = self.num_workers + loader = build_batch_data_loader( + dataset, self.sampler, self.total_batch_size, num_workers=n + ) + + timer = Timer() + loader = iter(loader) + next(loader) + startup_time = timer.seconds() + logger.info("Dataloader startup time: {:.2f} seconds".format(startup_time)) + + comm.synchronize() + + avg, all_times = self._benchmark(loader, num_iter * max(n, 1), warmup * max(n, 1)) + del loader + self._log_time( + f"DataLoader ({gpu} GPUs x {n} workers, total bs={self.total_batch_size})", + avg, + all_times, + True, + ) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/build.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/build.py new file mode 100644 index 0000000..cf3a79b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/build.py @@ -0,0 +1,542 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import itertools +import logging +import numpy as np +import operator +import pickle +from typing import Any, Callable, Dict, List, Optional, Union +import torch +import torch.utils.data as torchdata +from tabulate import tabulate +from termcolor import colored + +from detectron2.config import configurable +from detectron2.structures import BoxMode +from detectron2.utils.comm import get_world_size +from detectron2.utils.env import seed_all_rng +from detectron2.utils.file_io import PathManager +from detectron2.utils.logger import _log_api_usage, log_first_n + +from .catalog import DatasetCatalog, MetadataCatalog +from .common import AspectRatioGroupedDataset, DatasetFromList, MapDataset, ToIterableDataset +from .dataset_mapper import DatasetMapper +from .detection_utils import check_metadata_consistency +from .samplers import ( + InferenceSampler, + RandomSubsetTrainingSampler, + RepeatFactorTrainingSampler, + TrainingSampler, +) + +""" +This file contains the default logic to build a dataloader for training or testing. +""" + +__all__ = [ + "build_batch_data_loader", + "build_detection_train_loader", + "build_detection_test_loader", + "get_detection_dataset_dicts", + "load_proposals_into_dataset", + "print_instances_class_histogram", +] + + +def filter_images_with_only_crowd_annotations(dataset_dicts): + """ + Filter out images with none annotations or only crowd annotations + (i.e., images without non-crowd annotations). + A common training-time preprocessing on COCO dataset. + + Args: + dataset_dicts (list[dict]): annotations in Detectron2 Dataset format. + + Returns: + list[dict]: the same format, but filtered. + """ + num_before = len(dataset_dicts) + + def valid(anns): + for ann in anns: + if ann.get("iscrowd", 0) == 0: + return True + return False + + dataset_dicts = [x for x in dataset_dicts if valid(x["annotations"])] + num_after = len(dataset_dicts) + logger = logging.getLogger(__name__) + logger.info( + "Removed {} images with no usable annotations. {} images left.".format( + num_before - num_after, num_after + ) + ) + return dataset_dicts + + +def filter_images_with_few_keypoints(dataset_dicts, min_keypoints_per_image): + """ + Filter out images with too few number of keypoints. + + Args: + dataset_dicts (list[dict]): annotations in Detectron2 Dataset format. + + Returns: + list[dict]: the same format as dataset_dicts, but filtered. + """ + num_before = len(dataset_dicts) + + def visible_keypoints_in_image(dic): + # Each keypoints field has the format [x1, y1, v1, ...], where v is visibility + annotations = dic["annotations"] + return sum( + (np.array(ann["keypoints"][2::3]) > 0).sum() + for ann in annotations + if "keypoints" in ann + ) + + dataset_dicts = [ + x for x in dataset_dicts if visible_keypoints_in_image(x) >= min_keypoints_per_image + ] + num_after = len(dataset_dicts) + logger = logging.getLogger(__name__) + logger.info( + "Removed {} images with fewer than {} keypoints.".format( + num_before - num_after, min_keypoints_per_image + ) + ) + return dataset_dicts + + +def load_proposals_into_dataset(dataset_dicts, proposal_file): + """ + Load precomputed object proposals into the dataset. + + The proposal file should be a pickled dict with the following keys: + + - "ids": list[int] or list[str], the image ids + - "boxes": list[np.ndarray], each is an Nx4 array of boxes corresponding to the image id + - "objectness_logits": list[np.ndarray], each is an N sized array of objectness scores + corresponding to the boxes. + - "bbox_mode": the BoxMode of the boxes array. Defaults to ``BoxMode.XYXY_ABS``. + + Args: + dataset_dicts (list[dict]): annotations in Detectron2 Dataset format. + proposal_file (str): file path of pre-computed proposals, in pkl format. + + Returns: + list[dict]: the same format as dataset_dicts, but added proposal field. + """ + logger = logging.getLogger(__name__) + logger.info("Loading proposals from: {}".format(proposal_file)) + + with PathManager.open(proposal_file, "rb") as f: + proposals = pickle.load(f, encoding="latin1") + + # Rename the key names in D1 proposal files + rename_keys = {"indexes": "ids", "scores": "objectness_logits"} + for key in rename_keys: + if key in proposals: + proposals[rename_keys[key]] = proposals.pop(key) + + # Fetch the indexes of all proposals that are in the dataset + # Convert image_id to str since they could be int. + img_ids = set({str(record["image_id"]) for record in dataset_dicts}) + id_to_index = {str(id): i for i, id in enumerate(proposals["ids"]) if str(id) in img_ids} + + # Assuming default bbox_mode of precomputed proposals are 'XYXY_ABS' + bbox_mode = BoxMode(proposals["bbox_mode"]) if "bbox_mode" in proposals else BoxMode.XYXY_ABS + + for record in dataset_dicts: + # Get the index of the proposal + i = id_to_index[str(record["image_id"])] + + boxes = proposals["boxes"][i] + objectness_logits = proposals["objectness_logits"][i] + # Sort the proposals in descending order of the scores + inds = objectness_logits.argsort()[::-1] + record["proposal_boxes"] = boxes[inds] + record["proposal_objectness_logits"] = objectness_logits[inds] + record["proposal_bbox_mode"] = bbox_mode + + return dataset_dicts + + +def print_instances_class_histogram(dataset_dicts, class_names): + """ + Args: + dataset_dicts (list[dict]): list of dataset dicts. + class_names (list[str]): list of class names (zero-indexed). + """ + num_classes = len(class_names) + hist_bins = np.arange(num_classes + 1) + histogram = np.zeros((num_classes,), dtype=np.int) + for entry in dataset_dicts: + annos = entry["annotations"] + classes = np.asarray( + [x["category_id"] for x in annos if not x.get("iscrowd", 0)], dtype=np.int + ) + if len(classes): + assert classes.min() >= 0, f"Got an invalid category_id={classes.min()}" + assert ( + classes.max() < num_classes + ), f"Got an invalid category_id={classes.max()} for a dataset of {num_classes} classes" + histogram += np.histogram(classes, bins=hist_bins)[0] + + N_COLS = min(6, len(class_names) * 2) + + def short_name(x): + # make long class names shorter. useful for lvis + if len(x) > 13: + return x[:11] + ".." + return x + + data = list( + itertools.chain(*[[short_name(class_names[i]), int(v)] for i, v in enumerate(histogram)]) + ) + total_num_instances = sum(data[1::2]) + data.extend([None] * (N_COLS - (len(data) % N_COLS))) + if num_classes > 1: + data.extend(["total", total_num_instances]) + data = itertools.zip_longest(*[data[i::N_COLS] for i in range(N_COLS)]) + table = tabulate( + data, + headers=["category", "#instances"] * (N_COLS // 2), + tablefmt="pipe", + numalign="left", + stralign="center", + ) + log_first_n( + logging.INFO, + "Distribution of instances among all {} categories:\n".format(num_classes) + + colored(table, "cyan"), + key="message", + ) + + +def get_detection_dataset_dicts( + names, + filter_empty=True, + min_keypoints=0, + proposal_files=None, + check_consistency=True, +): + """ + Load and prepare dataset dicts for instance detection/segmentation and semantic segmentation. + + Args: + names (str or list[str]): a dataset name or a list of dataset names + filter_empty (bool): whether to filter out images without instance annotations + min_keypoints (int): filter out images with fewer keypoints than + `min_keypoints`. Set to 0 to do nothing. + proposal_files (list[str]): if given, a list of object proposal files + that match each dataset in `names`. + check_consistency (bool): whether to check if datasets have consistent metadata. + + Returns: + list[dict]: a list of dicts following the standard dataset dict format. + """ + if isinstance(names, str): + names = [names] + assert len(names), names + dataset_dicts = [DatasetCatalog.get(dataset_name) for dataset_name in names] + for dataset_name, dicts in zip(names, dataset_dicts): + assert len(dicts), "Dataset '{}' is empty!".format(dataset_name) + + if proposal_files is not None: + assert len(names) == len(proposal_files) + # load precomputed proposals from proposal files + dataset_dicts = [ + load_proposals_into_dataset(dataset_i_dicts, proposal_file) + for dataset_i_dicts, proposal_file in zip(dataset_dicts, proposal_files) + ] + + if isinstance(dataset_dicts[0], torchdata.Dataset): + return torchdata.ConcatDataset(dataset_dicts) + + dataset_dicts = list(itertools.chain.from_iterable(dataset_dicts)) + + has_instances = "annotations" in dataset_dicts[0] + if filter_empty and has_instances: + dataset_dicts = filter_images_with_only_crowd_annotations(dataset_dicts) + if min_keypoints > 0 and has_instances: + dataset_dicts = filter_images_with_few_keypoints(dataset_dicts, min_keypoints) + + if check_consistency and has_instances: + try: + class_names = MetadataCatalog.get(names[0]).thing_classes + check_metadata_consistency("thing_classes", names) + print_instances_class_histogram(dataset_dicts, class_names) + except AttributeError: # class names are not available for this dataset + pass + + assert len(dataset_dicts), "No valid data found in {}.".format(",".join(names)) + return dataset_dicts + + +def build_batch_data_loader( + dataset, + sampler, + total_batch_size, + *, + aspect_ratio_grouping=False, + num_workers=0, + collate_fn=None, +): + """ + Build a batched dataloader. The main differences from `torch.autils.data.DataLoader` are: + 1. support aspect ratio grouping options + 2. use no "batch collation", because this is common for detection training + + Args: + dataset (torch.autils.data.Dataset): a pytorch map-style or iterable dataset. + sampler (torch.autils.data.sampler.Sampler or None): a sampler that produces indices. + Must be provided iff. ``dataset`` is a map-style dataset. + total_batch_size, aspect_ratio_grouping, num_workers, collate_fn: see + :func:`build_detection_train_loader`. + + Returns: + iterable[list]. Length of each list is the batch size of the current + GPU. Each element in the list comes from the dataset. + """ + world_size = get_world_size() + assert ( + total_batch_size > 0 and total_batch_size % world_size == 0 + ), "Total batch size ({}) must be divisible by the number of gpus ({}).".format( + total_batch_size, world_size + ) + batch_size = total_batch_size // world_size + + if isinstance(dataset, torchdata.IterableDataset): + assert sampler is None, "sampler must be None if dataset is IterableDataset" + else: + dataset = ToIterableDataset(dataset, sampler) + + if aspect_ratio_grouping: + data_loader = torchdata.DataLoader( + dataset, + num_workers=num_workers, + collate_fn=operator.itemgetter(0), # don't batch, but yield individual elements + worker_init_fn=worker_init_reset_seed, + ) # yield individual mapped dict + data_loader = AspectRatioGroupedDataset(data_loader, batch_size) + if collate_fn is None: + return data_loader + return MapDataset(data_loader, collate_fn) + else: + return torchdata.DataLoader( + dataset, + batch_size=batch_size, + drop_last=True, + num_workers=num_workers, + collate_fn=trivial_batch_collator if collate_fn is None else collate_fn, + worker_init_fn=worker_init_reset_seed, + ) + + +def _train_loader_from_config(cfg, mapper=None, *, dataset=None, sampler=None): + if dataset is None: + dataset = get_detection_dataset_dicts( + cfg.DATASETS.TRAIN, + filter_empty=cfg.DATALOADER.FILTER_EMPTY_ANNOTATIONS, + min_keypoints=cfg.MODEL.ROI_KEYPOINT_HEAD.MIN_KEYPOINTS_PER_IMAGE + if cfg.MODEL.KEYPOINT_ON + else 0, + proposal_files=cfg.DATASETS.PROPOSAL_FILES_TRAIN if cfg.MODEL.LOAD_PROPOSALS else None, + ) + _log_api_usage("dataset." + cfg.DATASETS.TRAIN[0]) + + if mapper is None: + mapper = DatasetMapper(cfg, True) + + if sampler is None: + sampler_name = cfg.DATALOADER.SAMPLER_TRAIN + logger = logging.getLogger(__name__) + logger.info("Using training sampler {}".format(sampler_name)) + if sampler_name == "TrainingSampler": + sampler = TrainingSampler(len(dataset)) + elif sampler_name == "RepeatFactorTrainingSampler": + repeat_factors = RepeatFactorTrainingSampler.repeat_factors_from_category_frequency( + dataset, cfg.DATALOADER.REPEAT_THRESHOLD + ) + sampler = RepeatFactorTrainingSampler(repeat_factors) + elif sampler_name == "RandomSubsetTrainingSampler": + sampler = RandomSubsetTrainingSampler(len(dataset), cfg.DATALOADER.RANDOM_SUBSET_RATIO) + else: + raise ValueError("Unknown training sampler: {}".format(sampler_name)) + + return { + "dataset": dataset, + "sampler": sampler, + "mapper": mapper, + "total_batch_size": cfg.SOLVER.IMS_PER_BATCH, + "aspect_ratio_grouping": cfg.DATALOADER.ASPECT_RATIO_GROUPING, + "num_workers": cfg.DATALOADER.NUM_WORKERS, + } + + +@configurable(from_config=_train_loader_from_config) +def build_detection_train_loader( + dataset, + *, + mapper, + sampler=None, + total_batch_size, + aspect_ratio_grouping=True, + num_workers=0, + collate_fn=None, +): + """ + Build a dataloader for object detection with some default features. + + Args: + dataset (list or torch.autils.data.Dataset): a list of dataset dicts, + or a pytorch dataset (either map-style or iterable). It can be obtained + by using :func:`DatasetCatalog.get` or :func:`get_detection_dataset_dicts`. + mapper (callable): a callable which takes a sample (dict) from dataset and + returns the format to be consumed by the model. + When using cfg, the default choice is ``DatasetMapper(cfg, is_train=True)``. + sampler (torch.autils.data.sampler.Sampler or None): a sampler that produces + indices to be applied on ``dataset``. + If ``dataset`` is map-style, the default sampler is a :class:`TrainingSampler`, + which coordinates an infinite random shuffle sequence across all workers. + Sampler must be None if ``dataset`` is iterable. + total_batch_size (int): total batch size across all workers. + aspect_ratio_grouping (bool): whether to group images with similar + aspect ratio for efficiency. When enabled, it requires each + element in dataset be a dict with keys "width" and "height". + num_workers (int): number of parallel data loading workers + collate_fn: a function that determines how to do batching, same as the argument of + `torch.autils.data.DataLoader`. Defaults to do no collation and return a list of + data. No collation is OK for small batch size and simple data structures. + If your batch size is large and each sample contains too many small tensors, + it's more efficient to collate them in data loader. + + Returns: + torch.autils.data.DataLoader: + a dataloader. Each output from it is a ``list[mapped_element]`` of length + ``total_batch_size / num_workers``, where ``mapped_element`` is produced + by the ``mapper``. + """ + if isinstance(dataset, list): + dataset = DatasetFromList(dataset, copy=False) + if mapper is not None: + dataset = MapDataset(dataset, mapper) + + if isinstance(dataset, torchdata.IterableDataset): + assert sampler is None, "sampler must be None if dataset is IterableDataset" + else: + if sampler is None: + sampler = TrainingSampler(len(dataset)) + assert isinstance(sampler, torchdata.Sampler), f"Expect a Sampler but got {type(sampler)}" + return build_batch_data_loader( + dataset, + sampler, + total_batch_size, + aspect_ratio_grouping=aspect_ratio_grouping, + num_workers=num_workers, + collate_fn=collate_fn, + ) + + +def _test_loader_from_config(cfg, dataset_name, mapper=None): + """ + Uses the given `dataset_name` argument (instead of the names in cfg), because the + standard practice is to evaluate each test set individually (not combining them). + """ + if isinstance(dataset_name, str): + dataset_name = [dataset_name] + + dataset = get_detection_dataset_dicts( + dataset_name, + filter_empty=False, + proposal_files=[ + cfg.DATASETS.PROPOSAL_FILES_TEST[list(cfg.DATASETS.TEST).index(x)] for x in dataset_name + ] + if cfg.MODEL.LOAD_PROPOSALS + else None, + ) + if mapper is None: + mapper = DatasetMapper(cfg, False) + return { + "dataset": dataset, + "mapper": mapper, + "num_workers": cfg.DATALOADER.NUM_WORKERS, + "sampler": InferenceSampler(len(dataset)), + } + + +@configurable(from_config=_test_loader_from_config) +def build_detection_test_loader( + dataset: Union[List[Any], torchdata.Dataset], + *, + mapper: Callable[[Dict[str, Any]], Any], + sampler: Optional[torchdata.Sampler] = None, + batch_size: int = 1, + num_workers: int = 0, + collate_fn: Optional[Callable[[List[Any]], Any]] = None, +) -> torchdata.DataLoader: + """ + Similar to `build_detection_train_loader`, with default batch size = 1, + and sampler = :class:`InferenceSampler`. This sampler coordinates all workers + to produce the exact set of all samples. + + Args: + dataset: a list of dataset dicts, + or a pytorch dataset (either map-style or iterable). They can be obtained + by using :func:`DatasetCatalog.get` or :func:`get_detection_dataset_dicts`. + mapper: a callable which takes a sample (dict) from dataset + and returns the format to be consumed by the model. + When using cfg, the default choice is ``DatasetMapper(cfg, is_train=False)``. + sampler: a sampler that produces + indices to be applied on ``dataset``. Default to :class:`InferenceSampler`, + which splits the dataset across all workers. Sampler must be None + if `dataset` is iterable. + batch_size: the batch size of the data loader to be created. + Default to 1 image per worker since this is the standard when reporting + inference time in papers. + num_workers: number of parallel data loading workers + collate_fn: same as the argument of `torch.autils.data.DataLoader`. + Defaults to do no collation and return a list of data. + + Returns: + DataLoader: a torch DataLoader, that loads the given detection + dataset, with test-time transformation and batching. + + Examples: + :: + data_loader = build_detection_test_loader( + DatasetRegistry.get("my_test"), + mapper=DatasetMapper(...)) + + # or, instantiate with a CfgNode: + data_loader = build_detection_test_loader(cfg, "my_test") + """ + if isinstance(dataset, list): + dataset = DatasetFromList(dataset, copy=False) + if mapper is not None: + dataset = MapDataset(dataset, mapper) + if isinstance(dataset, torchdata.IterableDataset): + assert sampler is None, "sampler must be None if dataset is IterableDataset" + else: + if sampler is None: + sampler = InferenceSampler(len(dataset)) + return torchdata.DataLoader( + dataset, + batch_size=batch_size, + sampler=sampler, + drop_last=False, + num_workers=num_workers, + collate_fn=trivial_batch_collator if collate_fn is None else collate_fn, + ) + + +def trivial_batch_collator(batch): + """ + A batch collator that does nothing. + """ + return batch + + +def worker_init_reset_seed(worker_id): + initial_seed = torch.initial_seed() % 2 ** 31 + seed_all_rng(initial_seed + worker_id) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/catalog.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/catalog.py new file mode 100644 index 0000000..45c110c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/catalog.py @@ -0,0 +1,236 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import copy +import logging +import types +from collections import UserDict +from typing import List + +from detectron2.utils.logger import log_first_n + +__all__ = ["DatasetCatalog", "MetadataCatalog", "Metadata"] + + +class _DatasetCatalog(UserDict): + """ + A global dictionary that stores information about the datasets and how to obtain them. + + It contains a mapping from strings + (which are names that identify a dataset, e.g. "coco_2014_train") + to a function which parses the dataset and returns the samples in the + format of `list[dict]`. + + The returned dicts should be in Detectron2 Dataset format (See DATASETS.md for details) + if used with the data loader functionalities in `data/build.py,data/detection_transform.py`. + + The purpose of having this catalog is to make it easy to choose + different datasets, by just using the strings in the config. + """ + + def register(self, name, func): + """ + Args: + name (str): the name that identifies a dataset, e.g. "coco_2014_train". + func (callable): a callable which takes no arguments and returns a list of dicts. + It must return the same results if called multiple times. + """ + assert callable(func), "You must register a function with `DatasetCatalog.register`!" + assert name not in self, "Dataset '{}' is already registered!".format(name) + self[name] = func + + def get(self, name): + """ + Call the registered function and return its results. + + Args: + name (str): the name that identifies a dataset, e.g. "coco_2014_train". + + Returns: + list[dict]: dataset annotations. + """ + try: + f = self[name] + except KeyError as e: + raise KeyError( + "Dataset '{}' is not registered! Available datasets are: {}".format( + name, ", ".join(list(self.keys())) + ) + ) from e + return f() + + def list(self) -> List[str]: + """ + List all registered datasets. + + Returns: + list[str] + """ + return list(self.keys()) + + def remove(self, name): + """ + Alias of ``pop``. + """ + self.pop(name) + + def __str__(self): + return "DatasetCatalog(registered datasets: {})".format(", ".join(self.keys())) + + __repr__ = __str__ + + +DatasetCatalog = _DatasetCatalog() +DatasetCatalog.__doc__ = ( + _DatasetCatalog.__doc__ + + """ + .. automethod:: detectron2.data.catalog.DatasetCatalog.register + .. automethod:: detectron2.data.catalog.DatasetCatalog.get +""" +) + + +class Metadata(types.SimpleNamespace): + """ + A class that supports simple attribute setter/getter. + It is intended for storing metadata of a dataset and make it accessible globally. + + Examples: + :: + # somewhere when you load the data: + MetadataCatalog.get("mydataset").thing_classes = ["person", "dog"] + + # somewhere when you print statistics or visualize: + classes = MetadataCatalog.get("mydataset").thing_classes + """ + + # the name of the dataset + # set default to N/A so that `self.name` in the errors will not trigger getattr again + name: str = "N/A" + + _RENAMED = { + "class_names": "thing_classes", + "dataset_id_to_contiguous_id": "thing_dataset_id_to_contiguous_id", + "stuff_class_names": "stuff_classes", + } + + def __getattr__(self, key): + if key in self._RENAMED: + log_first_n( + logging.WARNING, + "Metadata '{}' was renamed to '{}'!".format(key, self._RENAMED[key]), + n=10, + ) + return getattr(self, self._RENAMED[key]) + + # "name" exists in every metadata + if len(self.__dict__) > 1: + raise AttributeError( + "Attribute '{}' does not exist in the metadata of dataset '{}'. Available " + "keys are {}.".format(key, self.name, str(self.__dict__.keys())) + ) + else: + raise AttributeError( + f"Attribute '{key}' does not exist in the metadata of dataset '{self.name}': " + "metadata is empty." + ) + + def __setattr__(self, key, val): + if key in self._RENAMED: + log_first_n( + logging.WARNING, + "Metadata '{}' was renamed to '{}'!".format(key, self._RENAMED[key]), + n=10, + ) + setattr(self, self._RENAMED[key], val) + + # Ensure that metadata of the same name stays consistent + try: + oldval = getattr(self, key) + assert oldval == val, ( + "Attribute '{}' in the metadata of '{}' cannot be set " + "to a different value!\n{} != {}".format(key, self.name, oldval, val) + ) + except AttributeError: + super().__setattr__(key, val) + + def as_dict(self): + """ + Returns all the metadata as a dict. + Note that modifications to the returned dict will not reflect on the Metadata object. + """ + return copy.copy(self.__dict__) + + def set(self, **kwargs): + """ + Set multiple metadata with kwargs. + """ + for k, v in kwargs.items(): + setattr(self, k, v) + return self + + def get(self, key, default=None): + """ + Access an attribute and return its value if exists. + Otherwise return default. + """ + try: + return getattr(self, key) + except AttributeError: + return default + + +class _MetadataCatalog(UserDict): + """ + MetadataCatalog is a global dictionary that provides access to + :class:`Metadata` of a given dataset. + + The metadata associated with a certain name is a singleton: once created, the + metadata will stay alive and will be returned by future calls to ``get(name)``. + + It's like global variables, so don't abuse it. + It's meant for storing knowledge that's constant and shared across the execution + of the program, e.g.: the class names in COCO. + """ + + def get(self, name): + """ + Args: + name (str): name of a dataset (e.g. coco_2014_train). + + Returns: + Metadata: The :class:`Metadata` instance associated with this name, + or create an empty one if none is available. + """ + assert len(name) + r = super().get(name, None) + if r is None: + r = self[name] = Metadata(name=name) + return r + + def list(self): + """ + List all registered metadata. + + Returns: + list[str]: keys (names of datasets) of all registered metadata + """ + return list(self.keys()) + + def remove(self, name): + """ + Alias of ``pop``. + """ + self.pop(name) + + def __str__(self): + return "MetadataCatalog(registered metadata: {})".format(", ".join(self.keys())) + + __repr__ = __str__ + + +MetadataCatalog = _MetadataCatalog() +MetadataCatalog.__doc__ = ( + _MetadataCatalog.__doc__ + + """ + .. automethod:: detectron2.data.catalog.MetadataCatalog.get +""" +) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/common.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/common.py new file mode 100644 index 0000000..22080a9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/common.py @@ -0,0 +1,244 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import copy +import itertools +import logging +import numpy as np +import pickle +import random +import torch.utils.data as data +from torch.utils.data.sampler import Sampler + +from detectron2.utils.serialize import PicklableWrapper + +__all__ = ["MapDataset", "DatasetFromList", "AspectRatioGroupedDataset", "ToIterableDataset"] + + +def _shard_iterator_dataloader_worker(iterable): + # Shard the iterable if we're currently inside pytorch dataloader worker. + worker_info = data.get_worker_info() + if worker_info is None or worker_info.num_workers == 1: + # do nothing + yield from iterable + else: + yield from itertools.islice(iterable, worker_info.id, None, worker_info.num_workers) + + +class _MapIterableDataset(data.IterableDataset): + """ + Map a function over elements in an IterableDataset. + + Similar to pytorch's MapIterDataPipe, but support filtering when map_func + returns None. + + This class is not public-facing. Will be called by `MapDataset`. + """ + + def __init__(self, dataset, map_func): + self._dataset = dataset + self._map_func = PicklableWrapper(map_func) # wrap so that a lambda will work + + def __len__(self): + return len(self._dataset) + + def __iter__(self): + for x in map(self._map_func, self._dataset): + if x is not None: + yield x + + +class MapDataset(data.Dataset): + """ + Map a function over the elements in a dataset. + """ + + def __init__(self, dataset, map_func): + """ + Args: + dataset: a dataset where map function is applied. Can be either + map-style or iterable dataset. When given an iterable dataset, + the returned object will also be an iterable dataset. + map_func: a callable which maps the element in dataset. map_func can + return None to skip the data (e.g. in case of errors). + How None is handled depends on the style of `dataset`. + If `dataset` is map-style, it randomly tries other elements. + If `dataset` is iterable, it skips the data and tries the next. + """ + self._dataset = dataset + self._map_func = PicklableWrapper(map_func) # wrap so that a lambda will work + + self._rng = random.Random(42) + self._fallback_candidates = set(range(len(dataset))) + + def __new__(cls, dataset, map_func): + is_iterable = isinstance(dataset, data.IterableDataset) + if is_iterable: + return _MapIterableDataset(dataset, map_func) + else: + return super().__new__(cls) + + def __getnewargs__(self): + return self._dataset, self._map_func + + def __len__(self): + return len(self._dataset) + + def __getitem__(self, idx): + retry_count = 0 + cur_idx = int(idx) + + while True: + data = self._map_func(self._dataset[cur_idx]) + if data is not None: + self._fallback_candidates.add(cur_idx) + return data + + # _map_func fails for this idx, use a random new index from the pool + retry_count += 1 + self._fallback_candidates.discard(cur_idx) + cur_idx = self._rng.sample(self._fallback_candidates, k=1)[0] + + if retry_count >= 3: + logger = logging.getLogger(__name__) + logger.warning( + "Failed to apply `_map_func` for idx: {}, retry count: {}".format( + idx, retry_count + ) + ) + + +class DatasetFromList(data.Dataset): + """ + Wrap a list to a torch Dataset. It produces elements of the list as data. + """ + + def __init__(self, lst: list, copy: bool = True, serialize: bool = True): + """ + Args: + lst (list): a list which contains elements to produce. + copy (bool): whether to deepcopy the element when producing it, + so that the result can be modified in place without affecting the + source in the list. + serialize (bool): whether to hold memory using serialized objects, when + enabled, data loader workers can use shared RAM from master + process instead of making a copy. + """ + self._lst = lst + self._copy = copy + self._serialize = serialize + + def _serialize(data): + buffer = pickle.dumps(data, protocol=-1) + return np.frombuffer(buffer, dtype=np.uint8) + + if self._serialize: + logger = logging.getLogger(__name__) + logger.info( + "Serializing {} elements to byte tensors and concatenating them all ...".format( + len(self._lst) + ) + ) + self._lst = [_serialize(x) for x in self._lst] + self._addr = np.asarray([len(x) for x in self._lst], dtype=np.int64) + self._addr = np.cumsum(self._addr) + self._lst = np.concatenate(self._lst) + logger.info("Serialized dataset takes {:.2f} MiB".format(len(self._lst) / 1024 ** 2)) + + def __len__(self): + if self._serialize: + return len(self._addr) + else: + return len(self._lst) + + def __getitem__(self, idx): + if self._serialize: + start_addr = 0 if idx == 0 else self._addr[idx - 1].item() + end_addr = self._addr[idx].item() + bytes = memoryview(self._lst[start_addr:end_addr]) + return pickle.loads(bytes) + elif self._copy: + return copy.deepcopy(self._lst[idx]) + else: + return self._lst[idx] + + +class ToIterableDataset(data.IterableDataset): + """ + Convert an old indices-based (also called map-style) dataset + to an iterable-style dataset. + """ + + def __init__(self, dataset: data.Dataset, sampler: Sampler, shard_sampler: bool = True): + """ + Args: + dataset: an old-style dataset with ``__getitem__`` + sampler: a cheap iterable that produces indices to be applied on ``dataset``. + shard_sampler: whether to shard the sampler based on the current pytorch data loader + worker id. When an IterableDataset is forked by pytorch's DataLoader into multiple + workers, it is responsible for sharding its data based on worker id so that workers + don't produce identical data. + + Most samplers (like our TrainingSampler) do not shard based on dataloader worker id + and this argument should be set to True. But certain samplers may be already + sharded, in that case this argument should be set to False. + """ + assert not isinstance(dataset, data.IterableDataset), dataset + assert isinstance(sampler, Sampler), sampler + self.dataset = dataset + self.sampler = sampler + self.shard_sampler = shard_sampler + + def __iter__(self): + if not self.shard_sampler: + sampler = self.sampler + else: + # With map-style dataset, `DataLoader(dataset, sampler)` runs the + # sampler in main process only. But `DataLoader(ToIterableDataset(dataset, sampler))` + # will run sampler in every of the N worker. So we should only keep 1/N of the ids on + # each worker. The assumption is that sampler is cheap to iterate so it's fine to + # discard ids in workers. + sampler = _shard_iterator_dataloader_worker(self.sampler) + for idx in sampler: + yield self.dataset[idx] + + def __len__(self): + return len(self.sampler) + + +class AspectRatioGroupedDataset(data.IterableDataset): + """ + Batch data that have similar aspect ratio together. + In this implementation, images whose aspect ratio < (or >) 1 will + be batched together. + This improves training speed because the images then need less padding + to form a batch. + + It assumes the underlying dataset produces dicts with "width" and "height" keys. + It will then produce a list of original dicts with length = batch_size, + all with similar aspect ratios. + """ + + def __init__(self, dataset, batch_size): + """ + Args: + dataset: an iterable. Each element must be a dict with keys + "width" and "height", which will be used to batch data. + batch_size (int): + """ + self.dataset = dataset + self.batch_size = batch_size + self._buckets = [[] for _ in range(2)] + # Hard-coded two aspect ratio groups: w > h and w < h. + # Can add support for more aspect ratio groups, but doesn't seem useful + + def __iter__(self): + for d in self.dataset: + w, h = d["width"], d["height"] + bucket_id = 0 if w > h else 1 + bucket = self._buckets[bucket_id] + bucket.append(d) + if len(bucket) == self.batch_size: + data = bucket[:] + # Clear bucket first, because code after yield is not + # guaranteed to execute + del bucket[:] + yield data diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/dataset_mapper.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/dataset_mapper.py new file mode 100644 index 0000000..a8714f7 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/dataset_mapper.py @@ -0,0 +1,191 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import copy +import logging +import numpy as np +from typing import List, Optional, Union +import torch + +from detectron2.config import configurable + +from . import detection_utils as utils +from . import transforms as T + +""" +This file contains the default mapping that's applied to "dataset dicts". +""" + +__all__ = ["DatasetMapper"] + + +class DatasetMapper: + """ + A callable which takes a dataset dict in Detectron2 Dataset format, + and map it into a format used by the model. + + This is the default callable to be used to map your dataset dict into training data. + You may need to follow it to implement your own one for customized logic, + such as a different way to read or transform images. + See :doc:`/tutorials/data_loading` for details. + + The callable currently does the following: + + 1. Read the image from "file_name" + 2. Applies cropping/geometric transforms to the image and annotations + 3. Prepare data and annotations to Tensor and :class:`Instances` + """ + + @configurable + def __init__( + self, + is_train: bool, + *, + augmentations: List[Union[T.Augmentation, T.Transform]], + image_format: str, + use_instance_mask: bool = False, + use_keypoint: bool = False, + instance_mask_format: str = "polygon", + keypoint_hflip_indices: Optional[np.ndarray] = None, + precomputed_proposal_topk: Optional[int] = None, + recompute_boxes: bool = False, + ): + """ + NOTE: this interface is experimental. + + Args: + is_train: whether it's used in training or inference + augmentations: a list of augmentations or deterministic transforms to apply + image_format: an image format supported by :func:`detection_utils.read_image`. + use_instance_mask: whether to process instance segmentation annotations, if available + use_keypoint: whether to process keypoint annotations if available + instance_mask_format: one of "polygon" or "bitmask". Process instance segmentation + masks into this format. + keypoint_hflip_indices: see :func:`detection_utils.create_keypoint_hflip_indices` + precomputed_proposal_topk: if given, will load pre-computed + proposals from dataset_dict and keep the top k proposals for each image. + recompute_boxes: whether to overwrite bounding box annotations + by computing tight bounding boxes from instance mask annotations. + """ + if recompute_boxes: + assert use_instance_mask, "recompute_boxes requires instance masks" + # fmt: off + self.is_train = is_train + self.augmentations = T.AugmentationList(augmentations) + self.image_format = image_format + self.use_instance_mask = use_instance_mask + self.instance_mask_format = instance_mask_format + self.use_keypoint = use_keypoint + self.keypoint_hflip_indices = keypoint_hflip_indices + self.proposal_topk = precomputed_proposal_topk + self.recompute_boxes = recompute_boxes + # fmt: on + logger = logging.getLogger(__name__) + mode = "training" if is_train else "inference" + logger.info(f"[DatasetMapper] Augmentations used in {mode}: {augmentations}") + + @classmethod + def from_config(cls, cfg, is_train: bool = True): + augs = utils.build_augmentation(cfg, is_train) + if cfg.INPUT.CROP.ENABLED and is_train: + augs.insert(0, T.RandomCrop(cfg.INPUT.CROP.TYPE, cfg.INPUT.CROP.SIZE)) + recompute_boxes = cfg.MODEL.MASK_ON + else: + recompute_boxes = False + + ret = { + "is_train": is_train, + "augmentations": augs, + "image_format": cfg.INPUT.FORMAT, + "use_instance_mask": cfg.MODEL.MASK_ON, + "instance_mask_format": cfg.INPUT.MASK_FORMAT, + "use_keypoint": cfg.MODEL.KEYPOINT_ON, + "recompute_boxes": recompute_boxes, + } + + if cfg.MODEL.KEYPOINT_ON: + ret["keypoint_hflip_indices"] = utils.create_keypoint_hflip_indices(cfg.DATASETS.TRAIN) + + if cfg.MODEL.LOAD_PROPOSALS: + ret["precomputed_proposal_topk"] = ( + cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TRAIN + if is_train + else cfg.DATASETS.PRECOMPUTED_PROPOSAL_TOPK_TEST + ) + return ret + + def _transform_annotations(self, dataset_dict, transforms, image_shape): + # USER: Modify this if you want to keep them for some reason. + for anno in dataset_dict["annotations"]: + if not self.use_instance_mask: + anno.pop("segmentation", None) + if not self.use_keypoint: + anno.pop("keypoints", None) + + # USER: Implement additional transformations if you have other types of data + annos = [ + utils.transform_instance_annotations( + obj, transforms, image_shape, keypoint_hflip_indices=self.keypoint_hflip_indices + ) + for obj in dataset_dict.pop("annotations") + if obj.get("iscrowd", 0) == 0 + ] + instances = utils.annotations_to_instances( + annos, image_shape, mask_format=self.instance_mask_format + ) + + # After transforms such as cropping are applied, the bounding box may no longer + # tightly bound the object. As an example, imagine a triangle object + # [(0,0), (2,0), (0,2)] cropped by a box [(1,0),(2,2)] (XYXY format). The tight + # bounding box of the cropped triangle should be [(1,0),(2,1)], which is not equal to + # the intersection of original bounding box and the cropping box. + if self.recompute_boxes: + instances.gt_boxes = instances.gt_masks.get_bounding_boxes() + dataset_dict["instances"] = utils.filter_empty_instances(instances) + + def __call__(self, dataset_dict): + """ + Args: + dataset_dict (dict): Metadata of one image, in Detectron2 Dataset format. + + Returns: + dict: a format that builtin models in detectron2 accept + """ + dataset_dict = copy.deepcopy(dataset_dict) # it will be modified by code below + # USER: Write your own image loading if it's not from a file + image = utils.read_image(dataset_dict["file_name"], format=self.image_format) + utils.check_image_size(dataset_dict, image) + + # USER: Remove if you don't do semantic/panoptic segmentation. + if "sem_seg_file_name" in dataset_dict: + sem_seg_gt = utils.read_image(dataset_dict.pop("sem_seg_file_name"), "L").squeeze(2) + else: + sem_seg_gt = None + + aug_input = T.AugInput(image, sem_seg=sem_seg_gt) + transforms = self.augmentations(aug_input) + image, sem_seg_gt = aug_input.image, aug_input.sem_seg + + image_shape = image.shape[:2] # h, w + # Pytorch's dataloader is efficient on torch.Tensor due to shared-memory, + # but not efficient on large generic data structures due to the use of pickle & mp.Queue. + # Therefore it's important to use torch.Tensor. + dataset_dict["image"] = torch.as_tensor(np.ascontiguousarray(image.transpose(2, 0, 1))) + if sem_seg_gt is not None: + dataset_dict["sem_seg"] = torch.as_tensor(sem_seg_gt.astype("long")) + + # USER: Remove if you don't use pre-computed proposals. + # Most users would not need this feature. + if self.proposal_topk is not None: + utils.transform_proposals( + dataset_dict, image_shape, transforms, proposal_topk=self.proposal_topk + ) + + if not self.is_train: + # USER: Modify this if you want to keep them for some reason. + dataset_dict.pop("annotations", None) + dataset_dict.pop("sem_seg_file_name", None) + return dataset_dict + + if "annotations" in dataset_dict: + self._transform_annotations(dataset_dict, transforms, image_shape) + + return dataset_dict diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/README.md b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/README.md new file mode 100644 index 0000000..9fb3e4f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/README.md @@ -0,0 +1,9 @@ + + +### Common Datasets + +The dataset implemented here do not need to load the data into the final format. +It should provide the minimal data structure needed to use the dataset, so it can be very efficient. + +For example, for an image dataset, just provide the file names and labels, but don't read the images. +Let the downstream decide how to read. diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/__init__.py new file mode 100644 index 0000000..a44bedc --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/__init__.py @@ -0,0 +1,9 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .coco import load_coco_json, load_sem_seg, register_coco_instances, convert_to_coco_json +from .coco_panoptic import register_coco_panoptic, register_coco_panoptic_separated +from .lvis import load_lvis_json, register_lvis_instances, get_lvis_instances_meta +from .pascal_voc import load_voc_instances, register_pascal_voc +from . import builtin as _builtin # ensure the builtin datasets are registered + + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/builtin.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/builtin.py new file mode 100644 index 0000000..c3a68aa --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/builtin.py @@ -0,0 +1,259 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + + +""" +This file registers pre-defined datasets at hard-coded paths, and their metadata. + +We hard-code metadata for common datasets. This will enable: +1. Consistency check when loading the datasets +2. Use models on these standard datasets directly and run demos, + without having to download the dataset annotations + +We hard-code some paths to the dataset that's assumed to +exist in "./datasets/". + +Users SHOULD NOT use this file to create new dataset / metadata for new dataset. +To add new dataset, refer to the tutorial "docs/DATASETS.md". +""" + +import os + +from detectron2.data import DatasetCatalog, MetadataCatalog + +from .builtin_meta import ADE20K_SEM_SEG_CATEGORIES, _get_builtin_metadata +from .cityscapes import load_cityscapes_instances, load_cityscapes_semantic +from .cityscapes_panoptic import register_all_cityscapes_panoptic +from .coco import load_sem_seg, register_coco_instances +from .coco_panoptic import register_coco_panoptic, register_coco_panoptic_separated +from .lvis import get_lvis_instances_meta, register_lvis_instances +from .pascal_voc import register_pascal_voc + +# ==== Predefined datasets and splits for COCO ========== + +_PREDEFINED_SPLITS_COCO = {} +_PREDEFINED_SPLITS_COCO["coco"] = { + "coco_2014_train": ("coco/train2014", "coco/annotations/instances_train2014.json"), + "coco_2014_val": ("coco/val2014", "coco/annotations/instances_val2014.json"), + "coco_2014_minival": ("coco/val2014", "coco/annotations/instances_minival2014.json"), + "coco_2014_valminusminival": ( + "coco/val2014", + "coco/annotations/instances_valminusminival2014.json", + ), + "coco_2017_train": ("coco/train2017", "coco/annotations/instances_train2017.json"), + "coco_2017_val": ("coco/val2017", "coco/annotations/instances_val2017.json"), + "coco_2017_test": ("coco/test2017", "coco/annotations/image_info_test2017.json"), + "coco_2017_test-dev": ("coco/test2017", "coco/annotations/image_info_test-dev2017.json"), + "coco_2017_val_100": ("coco/val2017", "coco/annotations/instances_val2017_100.json"), +} + +_PREDEFINED_SPLITS_COCO["coco_person"] = { + "keypoints_coco_2014_train": ( + "coco/train2014", + "coco/annotations/person_keypoints_train2014.json", + ), + "keypoints_coco_2014_val": ("coco/val2014", "coco/annotations/person_keypoints_val2014.json"), + "keypoints_coco_2014_minival": ( + "coco/val2014", + "coco/annotations/person_keypoints_minival2014.json", + ), + "keypoints_coco_2014_valminusminival": ( + "coco/val2014", + "coco/annotations/person_keypoints_valminusminival2014.json", + ), + "keypoints_coco_2017_train": ( + "coco/train2017", + "coco/annotations/person_keypoints_train2017.json", + ), + "keypoints_coco_2017_val": ("coco/val2017", "coco/annotations/person_keypoints_val2017.json"), + "keypoints_coco_2017_val_100": ( + "coco/val2017", + "coco/annotations/person_keypoints_val2017_100.json", + ), +} + + +_PREDEFINED_SPLITS_COCO_PANOPTIC = { + "coco_2017_train_panoptic": ( + # This is the original panoptic annotation directory + "coco/panoptic_train2017", + "coco/annotations/panoptic_train2017.json", + # This directory contains semantic annotations that are + # converted from panoptic annotations. + # It is used by PanopticFPN. + # You can use the script at detectron2/datasets/prepare_panoptic_fpn.py + # to create these directories. + "coco/panoptic_stuff_train2017", + ), + "coco_2017_val_panoptic": ( + "coco/panoptic_val2017", + "coco/annotations/panoptic_val2017.json", + "coco/panoptic_stuff_val2017", + ), + "coco_2017_val_100_panoptic": ( + "coco/panoptic_val2017_100", + "coco/annotations/panoptic_val2017_100.json", + "coco/panoptic_stuff_val2017_100", + ), +} + + +def register_all_coco(root): + for dataset_name, splits_per_dataset in _PREDEFINED_SPLITS_COCO.items(): + for key, (image_root, json_file) in splits_per_dataset.items(): + # Assume pre-defined datasets live in `./datasets`. + register_coco_instances( + key, + _get_builtin_metadata(dataset_name), + os.path.join(root, json_file) if "://" not in json_file else json_file, + os.path.join(root, image_root), + ) + + for ( + prefix, + (panoptic_root, panoptic_json, semantic_root), + ) in _PREDEFINED_SPLITS_COCO_PANOPTIC.items(): + prefix_instances = prefix[: -len("_panoptic")] + instances_meta = MetadataCatalog.get(prefix_instances) + image_root, instances_json = instances_meta.image_root, instances_meta.json_file + # The "separated" version of COCO panoptic segmentation dataset, + # e.g. used by Panoptic FPN + register_coco_panoptic_separated( + prefix, + _get_builtin_metadata("coco_panoptic_separated"), + image_root, + os.path.join(root, panoptic_root), + os.path.join(root, panoptic_json), + os.path.join(root, semantic_root), + instances_json, + ) + # The "standard" version of COCO panoptic segmentation dataset, + # e.g. used by Panoptic-DeepLab + register_coco_panoptic( + prefix, + _get_builtin_metadata("coco_panoptic_standard"), + image_root, + os.path.join(root, panoptic_root), + os.path.join(root, panoptic_json), + instances_json, + ) + + +# ==== Predefined datasets and splits for LVIS ========== + + +_PREDEFINED_SPLITS_LVIS = { + "lvis_v1": { + "lvis_v1_train": ("coco/", "lvis/lvis_v1_train.json"), + "lvis_v1_val": ("coco/", "lvis/lvis_v1_val.json"), + "lvis_v1_test_dev": ("coco/", "lvis/lvis_v1_image_info_test_dev.json"), + "lvis_v1_test_challenge": ("coco/", "lvis/lvis_v1_image_info_test_challenge.json"), + }, + "lvis_v0.5": { + "lvis_v0.5_train": ("coco/", "lvis/lvis_v0.5_train.json"), + "lvis_v0.5_val": ("coco/", "lvis/lvis_v0.5_val.json"), + "lvis_v0.5_val_rand_100": ("coco/", "lvis/lvis_v0.5_val_rand_100.json"), + "lvis_v0.5_test": ("coco/", "lvis/lvis_v0.5_image_info_test.json"), + }, + "lvis_v0.5_cocofied": { + "lvis_v0.5_train_cocofied": ("coco/", "lvis/lvis_v0.5_train_cocofied.json"), + "lvis_v0.5_val_cocofied": ("coco/", "lvis/lvis_v0.5_val_cocofied.json"), + }, +} + + +def register_all_lvis(root): + for dataset_name, splits_per_dataset in _PREDEFINED_SPLITS_LVIS.items(): + for key, (image_root, json_file) in splits_per_dataset.items(): + register_lvis_instances( + key, + get_lvis_instances_meta(dataset_name), + os.path.join(root, json_file) if "://" not in json_file else json_file, + os.path.join(root, image_root), + ) + + +# ==== Predefined splits for raw cityscapes images =========== +_RAW_CITYSCAPES_SPLITS = { + "cityscapes_fine_{task}_train": ("cityscapes/leftImg8bit/train/", "cityscapes/gtFine/train/"), + "cityscapes_fine_{task}_val": ("cityscapes/leftImg8bit/val/", "cityscapes/gtFine/val/"), + "cityscapes_fine_{task}_test": ("cityscapes/leftImg8bit/test/", "cityscapes/gtFine/test/"), +} + + +def register_all_cityscapes(root): + for key, (image_dir, gt_dir) in _RAW_CITYSCAPES_SPLITS.items(): + meta = _get_builtin_metadata("cityscapes") + image_dir = os.path.join(root, image_dir) + gt_dir = os.path.join(root, gt_dir) + + inst_key = key.format(task="instance_seg") + DatasetCatalog.register( + inst_key, + lambda x=image_dir, y=gt_dir: load_cityscapes_instances( + x, y, from_json=True, to_polygons=True + ), + ) + MetadataCatalog.get(inst_key).set( + image_dir=image_dir, gt_dir=gt_dir, evaluator_type="cityscapes_instance", **meta + ) + + sem_key = key.format(task="sem_seg") + DatasetCatalog.register( + sem_key, lambda x=image_dir, y=gt_dir: load_cityscapes_semantic(x, y) + ) + MetadataCatalog.get(sem_key).set( + image_dir=image_dir, + gt_dir=gt_dir, + evaluator_type="cityscapes_sem_seg", + ignore_label=255, + **meta, + ) + + +# ==== Predefined splits for PASCAL VOC =========== +def register_all_pascal_voc(root): + SPLITS = [ + ("voc_2007_trainval", "VOC2007", "trainval"), + ("voc_2007_train", "VOC2007", "train"), + ("voc_2007_val", "VOC2007", "val"), + ("voc_2007_test", "VOC2007", "test"), + ("voc_2012_trainval", "VOC2012", "trainval"), + ("voc_2012_train", "VOC2012", "train"), + ("voc_2012_val", "VOC2012", "val"), + ] + for name, dirname, split in SPLITS: + year = 2007 if "2007" in name else 2012 + register_pascal_voc(name, os.path.join(root, dirname), split, year) + MetadataCatalog.get(name).evaluator_type = "pascal_voc" + + +def register_all_ade20k(root): + root = os.path.join(root, "ADEChallengeData2016") + for name, dirname in [("train", "training"), ("val", "validation")]: + image_dir = os.path.join(root, "images", dirname) + gt_dir = os.path.join(root, "annotations_detectron2", dirname) + name = f"ade20k_sem_seg_{name}" + DatasetCatalog.register( + name, lambda x=image_dir, y=gt_dir: load_sem_seg(y, x, gt_ext="png", image_ext="jpg") + ) + MetadataCatalog.get(name).set( + stuff_classes=ADE20K_SEM_SEG_CATEGORIES[:], + image_root=image_dir, + sem_seg_root=gt_dir, + evaluator_type="sem_seg", + ignore_label=255, + ) + + +# True for open source; +# Internally at fb, we register them elsewhere +if __name__.endswith(".builtin"): + # Assume pre-defined datasets live in `./datasets`. + _root = os.path.expanduser(os.getenv("DETECTRON2_DATASETS", "datasets")) + register_all_coco(_root) + register_all_lvis(_root) + register_all_cityscapes(_root) + register_all_cityscapes_panoptic(_root) + register_all_pascal_voc(_root) + register_all_ade20k(_root) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/builtin_meta.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/builtin_meta.py new file mode 100644 index 0000000..63c7a1a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/builtin_meta.py @@ -0,0 +1,350 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +""" +Note: +For your custom dataset, there is no need to hard-code metadata anywhere in the code. +For example, for COCO-format dataset, metadata will be obtained automatically +when calling `load_coco_json`. For other dataset, metadata may also be obtained in other ways +during loading. + +However, we hard-coded metadata for a few common dataset here. +The only goal is to allow users who don't have these dataset to use pre-trained models. +Users don't have to download a COCO json (which contains metadata), in order to visualize a +COCO model (with correct class names and colors). +""" + + +# All coco categories, together with their nice-looking visualization colors +# It's from https://github.com/cocodataset/panopticapi/blob/master/panoptic_coco_categories.json +COCO_CATEGORIES = [ + {"color": [220, 20, 60], "isthing": 1, "id": 1, "name": "person"}, + {"color": [119, 11, 32], "isthing": 1, "id": 2, "name": "bicycle"}, + {"color": [0, 0, 142], "isthing": 1, "id": 3, "name": "car"}, + {"color": [0, 0, 230], "isthing": 1, "id": 4, "name": "motorcycle"}, + {"color": [106, 0, 228], "isthing": 1, "id": 5, "name": "airplane"}, + {"color": [0, 60, 100], "isthing": 1, "id": 6, "name": "bus"}, + {"color": [0, 80, 100], "isthing": 1, "id": 7, "name": "train"}, + {"color": [0, 0, 70], "isthing": 1, "id": 8, "name": "truck"}, + {"color": [0, 0, 192], "isthing": 1, "id": 9, "name": "boat"}, + {"color": [250, 170, 30], "isthing": 1, "id": 10, "name": "traffic light"}, + {"color": [100, 170, 30], "isthing": 1, "id": 11, "name": "fire hydrant"}, + {"color": [220, 220, 0], "isthing": 1, "id": 13, "name": "stop sign"}, + {"color": [175, 116, 175], "isthing": 1, "id": 14, "name": "parking meter"}, + {"color": [250, 0, 30], "isthing": 1, "id": 15, "name": "bench"}, + {"color": [165, 42, 42], "isthing": 1, "id": 16, "name": "bird"}, + {"color": [255, 77, 255], "isthing": 1, "id": 17, "name": "cat"}, + {"color": [0, 226, 252], "isthing": 1, "id": 18, "name": "dog"}, + {"color": [182, 182, 255], "isthing": 1, "id": 19, "name": "horse"}, + {"color": [0, 82, 0], "isthing": 1, "id": 20, "name": "sheep"}, + {"color": [120, 166, 157], "isthing": 1, "id": 21, "name": "cow"}, + {"color": [110, 76, 0], "isthing": 1, "id": 22, "name": "elephant"}, + {"color": [174, 57, 255], "isthing": 1, "id": 23, "name": "bear"}, + {"color": [199, 100, 0], "isthing": 1, "id": 24, "name": "zebra"}, + {"color": [72, 0, 118], "isthing": 1, "id": 25, "name": "giraffe"}, + {"color": [255, 179, 240], "isthing": 1, "id": 27, "name": "backpack"}, + {"color": [0, 125, 92], "isthing": 1, "id": 28, "name": "umbrella"}, + {"color": [209, 0, 151], "isthing": 1, "id": 31, "name": "handbag"}, + {"color": [188, 208, 182], "isthing": 1, "id": 32, "name": "tie"}, + {"color": [0, 220, 176], "isthing": 1, "id": 33, "name": "suitcase"}, + {"color": [255, 99, 164], "isthing": 1, "id": 34, "name": "frisbee"}, + {"color": [92, 0, 73], "isthing": 1, "id": 35, "name": "skis"}, + {"color": [133, 129, 255], "isthing": 1, "id": 36, "name": "snowboard"}, + {"color": [78, 180, 255], "isthing": 1, "id": 37, "name": "sports ball"}, + {"color": [0, 228, 0], "isthing": 1, "id": 38, "name": "kite"}, + {"color": [174, 255, 243], "isthing": 1, "id": 39, "name": "baseball bat"}, + {"color": [45, 89, 255], "isthing": 1, "id": 40, "name": "baseball glove"}, + {"color": [134, 134, 103], "isthing": 1, "id": 41, "name": "skateboard"}, + {"color": [145, 148, 174], "isthing": 1, "id": 42, "name": "surfboard"}, + {"color": [255, 208, 186], "isthing": 1, "id": 43, "name": "tennis racket"}, + {"color": [197, 226, 255], "isthing": 1, "id": 44, "name": "bottle"}, + {"color": [171, 134, 1], "isthing": 1, "id": 46, "name": "wine glass"}, + {"color": [109, 63, 54], "isthing": 1, "id": 47, "name": "cup"}, + {"color": [207, 138, 255], "isthing": 1, "id": 48, "name": "fork"}, + {"color": [151, 0, 95], "isthing": 1, "id": 49, "name": "knife"}, + {"color": [9, 80, 61], "isthing": 1, "id": 50, "name": "spoon"}, + {"color": [84, 105, 51], "isthing": 1, "id": 51, "name": "bowl"}, + {"color": [74, 65, 105], "isthing": 1, "id": 52, "name": "banana"}, + {"color": [166, 196, 102], "isthing": 1, "id": 53, "name": "apple"}, + {"color": [208, 195, 210], "isthing": 1, "id": 54, "name": "sandwich"}, + {"color": [255, 109, 65], "isthing": 1, "id": 55, "name": "orange"}, + {"color": [0, 143, 149], "isthing": 1, "id": 56, "name": "broccoli"}, + {"color": [179, 0, 194], "isthing": 1, "id": 57, "name": "carrot"}, + {"color": [209, 99, 106], "isthing": 1, "id": 58, "name": "hot dog"}, + {"color": [5, 121, 0], "isthing": 1, "id": 59, "name": "pizza"}, + {"color": [227, 255, 205], "isthing": 1, "id": 60, "name": "donut"}, + {"color": [147, 186, 208], "isthing": 1, "id": 61, "name": "cake"}, + {"color": [153, 69, 1], "isthing": 1, "id": 62, "name": "chair"}, + {"color": [3, 95, 161], "isthing": 1, "id": 63, "name": "couch"}, + {"color": [163, 255, 0], "isthing": 1, "id": 64, "name": "potted plant"}, + {"color": [119, 0, 170], "isthing": 1, "id": 65, "name": "bed"}, + {"color": [0, 182, 199], "isthing": 1, "id": 67, "name": "dining table"}, + {"color": [0, 165, 120], "isthing": 1, "id": 70, "name": "toilet"}, + {"color": [183, 130, 88], "isthing": 1, "id": 72, "name": "tv"}, + {"color": [95, 32, 0], "isthing": 1, "id": 73, "name": "laptop"}, + {"color": [130, 114, 135], "isthing": 1, "id": 74, "name": "mouse"}, + {"color": [110, 129, 133], "isthing": 1, "id": 75, "name": "remote"}, + {"color": [166, 74, 118], "isthing": 1, "id": 76, "name": "keyboard"}, + {"color": [219, 142, 185], "isthing": 1, "id": 77, "name": "cell phone"}, + {"color": [79, 210, 114], "isthing": 1, "id": 78, "name": "microwave"}, + {"color": [178, 90, 62], "isthing": 1, "id": 79, "name": "oven"}, + {"color": [65, 70, 15], "isthing": 1, "id": 80, "name": "toaster"}, + {"color": [127, 167, 115], "isthing": 1, "id": 81, "name": "sink"}, + {"color": [59, 105, 106], "isthing": 1, "id": 82, "name": "refrigerator"}, + {"color": [142, 108, 45], "isthing": 1, "id": 84, "name": "book"}, + {"color": [196, 172, 0], "isthing": 1, "id": 85, "name": "clock"}, + {"color": [95, 54, 80], "isthing": 1, "id": 86, "name": "vase"}, + {"color": [128, 76, 255], "isthing": 1, "id": 87, "name": "scissors"}, + {"color": [201, 57, 1], "isthing": 1, "id": 88, "name": "teddy bear"}, + {"color": [246, 0, 122], "isthing": 1, "id": 89, "name": "hair drier"}, + {"color": [191, 162, 208], "isthing": 1, "id": 90, "name": "toothbrush"}, + {"color": [255, 255, 128], "isthing": 0, "id": 92, "name": "banner"}, + {"color": [147, 211, 203], "isthing": 0, "id": 93, "name": "blanket"}, + {"color": [150, 100, 100], "isthing": 0, "id": 95, "name": "bridge"}, + {"color": [168, 171, 172], "isthing": 0, "id": 100, "name": "cardboard"}, + {"color": [146, 112, 198], "isthing": 0, "id": 107, "name": "counter"}, + {"color": [210, 170, 100], "isthing": 0, "id": 109, "name": "curtain"}, + {"color": [92, 136, 89], "isthing": 0, "id": 112, "name": "door-stuff"}, + {"color": [218, 88, 184], "isthing": 0, "id": 118, "name": "floor-wood"}, + {"color": [241, 129, 0], "isthing": 0, "id": 119, "name": "flower"}, + {"color": [217, 17, 255], "isthing": 0, "id": 122, "name": "fruit"}, + {"color": [124, 74, 181], "isthing": 0, "id": 125, "name": "gravel"}, + {"color": [70, 70, 70], "isthing": 0, "id": 128, "name": "house"}, + {"color": [255, 228, 255], "isthing": 0, "id": 130, "name": "light"}, + {"color": [154, 208, 0], "isthing": 0, "id": 133, "name": "mirror-stuff"}, + {"color": [193, 0, 92], "isthing": 0, "id": 138, "name": "net"}, + {"color": [76, 91, 113], "isthing": 0, "id": 141, "name": "pillow"}, + {"color": [255, 180, 195], "isthing": 0, "id": 144, "name": "platform"}, + {"color": [106, 154, 176], "isthing": 0, "id": 145, "name": "playingfield"}, + {"color": [230, 150, 140], "isthing": 0, "id": 147, "name": "railroad"}, + {"color": [60, 143, 255], "isthing": 0, "id": 148, "name": "river"}, + {"color": [128, 64, 128], "isthing": 0, "id": 149, "name": "road"}, + {"color": [92, 82, 55], "isthing": 0, "id": 151, "name": "roof"}, + {"color": [254, 212, 124], "isthing": 0, "id": 154, "name": "sand"}, + {"color": [73, 77, 174], "isthing": 0, "id": 155, "name": "sea"}, + {"color": [255, 160, 98], "isthing": 0, "id": 156, "name": "shelf"}, + {"color": [255, 255, 255], "isthing": 0, "id": 159, "name": "snow"}, + {"color": [104, 84, 109], "isthing": 0, "id": 161, "name": "stairs"}, + {"color": [169, 164, 131], "isthing": 0, "id": 166, "name": "tent"}, + {"color": [225, 199, 255], "isthing": 0, "id": 168, "name": "towel"}, + {"color": [137, 54, 74], "isthing": 0, "id": 171, "name": "wall-brick"}, + {"color": [135, 158, 223], "isthing": 0, "id": 175, "name": "wall-stone"}, + {"color": [7, 246, 231], "isthing": 0, "id": 176, "name": "wall-tile"}, + {"color": [107, 255, 200], "isthing": 0, "id": 177, "name": "wall-wood"}, + {"color": [58, 41, 149], "isthing": 0, "id": 178, "name": "water-other"}, + {"color": [183, 121, 142], "isthing": 0, "id": 180, "name": "window-blind"}, + {"color": [255, 73, 97], "isthing": 0, "id": 181, "name": "window-other"}, + {"color": [107, 142, 35], "isthing": 0, "id": 184, "name": "tree-merged"}, + {"color": [190, 153, 153], "isthing": 0, "id": 185, "name": "fence-merged"}, + {"color": [146, 139, 141], "isthing": 0, "id": 186, "name": "ceiling-merged"}, + {"color": [70, 130, 180], "isthing": 0, "id": 187, "name": "sky-other-merged"}, + {"color": [134, 199, 156], "isthing": 0, "id": 188, "name": "cabinet-merged"}, + {"color": [209, 226, 140], "isthing": 0, "id": 189, "name": "table-merged"}, + {"color": [96, 36, 108], "isthing": 0, "id": 190, "name": "floor-other-merged"}, + {"color": [96, 96, 96], "isthing": 0, "id": 191, "name": "pavement-merged"}, + {"color": [64, 170, 64], "isthing": 0, "id": 192, "name": "mountain-merged"}, + {"color": [152, 251, 152], "isthing": 0, "id": 193, "name": "grass-merged"}, + {"color": [208, 229, 228], "isthing": 0, "id": 194, "name": "dirt-merged"}, + {"color": [206, 186, 171], "isthing": 0, "id": 195, "name": "paper-merged"}, + {"color": [152, 161, 64], "isthing": 0, "id": 196, "name": "food-other-merged"}, + {"color": [116, 112, 0], "isthing": 0, "id": 197, "name": "building-other-merged"}, + {"color": [0, 114, 143], "isthing": 0, "id": 198, "name": "rock-merged"}, + {"color": [102, 102, 156], "isthing": 0, "id": 199, "name": "wall-other-merged"}, + {"color": [250, 141, 255], "isthing": 0, "id": 200, "name": "rug-merged"}, +] + +# fmt: off +COCO_PERSON_KEYPOINT_NAMES = ( + "nose", + "left_eye", "right_eye", + "left_ear", "right_ear", + "left_shoulder", "right_shoulder", + "left_elbow", "right_elbow", + "left_wrist", "right_wrist", + "left_hip", "right_hip", + "left_knee", "right_knee", + "left_ankle", "right_ankle", +) +# fmt: on + +# Pairs of keypoints that should be exchanged under horizontal flipping +COCO_PERSON_KEYPOINT_FLIP_MAP = ( + ("left_eye", "right_eye"), + ("left_ear", "right_ear"), + ("left_shoulder", "right_shoulder"), + ("left_elbow", "right_elbow"), + ("left_wrist", "right_wrist"), + ("left_hip", "right_hip"), + ("left_knee", "right_knee"), + ("left_ankle", "right_ankle"), +) + +# rules for pairs of keypoints to draw a line between, and the line color to use. +KEYPOINT_CONNECTION_RULES = [ + # face + ("left_ear", "left_eye", (102, 204, 255)), + ("right_ear", "right_eye", (51, 153, 255)), + ("left_eye", "nose", (102, 0, 204)), + ("nose", "right_eye", (51, 102, 255)), + # upper-body + ("left_shoulder", "right_shoulder", (255, 128, 0)), + ("left_shoulder", "left_elbow", (153, 255, 204)), + ("right_shoulder", "right_elbow", (128, 229, 255)), + ("left_elbow", "left_wrist", (153, 255, 153)), + ("right_elbow", "right_wrist", (102, 255, 224)), + # lower-body + ("left_hip", "right_hip", (255, 102, 0)), + ("left_hip", "left_knee", (255, 255, 77)), + ("right_hip", "right_knee", (153, 255, 204)), + ("left_knee", "left_ankle", (191, 255, 128)), + ("right_knee", "right_ankle", (255, 195, 77)), +] + +# All Cityscapes categories, together with their nice-looking visualization colors +# It's from https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/helpers/labels.py # noqa +CITYSCAPES_CATEGORIES = [ + {"color": (128, 64, 128), "isthing": 0, "id": 7, "trainId": 0, "name": "road"}, + {"color": (244, 35, 232), "isthing": 0, "id": 8, "trainId": 1, "name": "sidewalk"}, + {"color": (70, 70, 70), "isthing": 0, "id": 11, "trainId": 2, "name": "building"}, + {"color": (102, 102, 156), "isthing": 0, "id": 12, "trainId": 3, "name": "wall"}, + {"color": (190, 153, 153), "isthing": 0, "id": 13, "trainId": 4, "name": "fence"}, + {"color": (153, 153, 153), "isthing": 0, "id": 17, "trainId": 5, "name": "pole"}, + {"color": (250, 170, 30), "isthing": 0, "id": 19, "trainId": 6, "name": "traffic light"}, + {"color": (220, 220, 0), "isthing": 0, "id": 20, "trainId": 7, "name": "traffic sign"}, + {"color": (107, 142, 35), "isthing": 0, "id": 21, "trainId": 8, "name": "vegetation"}, + {"color": (152, 251, 152), "isthing": 0, "id": 22, "trainId": 9, "name": "terrain"}, + {"color": (70, 130, 180), "isthing": 0, "id": 23, "trainId": 10, "name": "sky"}, + {"color": (220, 20, 60), "isthing": 1, "id": 24, "trainId": 11, "name": "person"}, + {"color": (255, 0, 0), "isthing": 1, "id": 25, "trainId": 12, "name": "rider"}, + {"color": (0, 0, 142), "isthing": 1, "id": 26, "trainId": 13, "name": "car"}, + {"color": (0, 0, 70), "isthing": 1, "id": 27, "trainId": 14, "name": "truck"}, + {"color": (0, 60, 100), "isthing": 1, "id": 28, "trainId": 15, "name": "bus"}, + {"color": (0, 80, 100), "isthing": 1, "id": 31, "trainId": 16, "name": "train"}, + {"color": (0, 0, 230), "isthing": 1, "id": 32, "trainId": 17, "name": "motorcycle"}, + {"color": (119, 11, 32), "isthing": 1, "id": 33, "trainId": 18, "name": "bicycle"}, +] + +# fmt: off +ADE20K_SEM_SEG_CATEGORIES = [ + "wall", "building", "sky", "floor", "tree", "ceiling", "road, route", "bed", "window ", "grass", "cabinet", "sidewalk, pavement", "person", "earth, ground", "door", "table", "mountain, mount", "plant", "curtain", "chair", "car", "water", "painting, picture", "sofa", "shelf", "house", "sea", "mirror", "rug", "field", "armchair", "seat", "fence", "desk", "rock, stone", "wardrobe, closet, press", "lamp", "tub", "rail", "cushion", "base, pedestal, stand", "box", "column, pillar", "signboard, sign", "chest of drawers, chest, bureau, dresser", "counter", "sand", "sink", "skyscraper", "fireplace", "refrigerator, icebox", "grandstand, covered stand", "path", "stairs", "runway", "case, display case, showcase, vitrine", "pool table, billiard table, snooker table", "pillow", "screen door, screen", "stairway, staircase", "river", "bridge, span", "bookcase", "blind, screen", "coffee table", "toilet, can, commode, crapper, pot, potty, stool, throne", "flower", "book", "hill", "bench", "countertop", "stove", "palm, palm tree", "kitchen island", "computer", "swivel chair", "boat", "bar", "arcade machine", "hovel, hut, hutch, shack, shanty", "bus", "towel", "light", "truck", "tower", "chandelier", "awning, sunshade, sunblind", "street lamp", "booth", "tv", "plane", "dirt track", "clothes", "pole", "land, ground, soil", "bannister, banister, balustrade, balusters, handrail", "escalator, moving staircase, moving stairway", "ottoman, pouf, pouffe, puff, hassock", "bottle", "buffet, counter, sideboard", "poster, posting, placard, notice, bill, card", "stage", "van", "ship", "fountain", "conveyer belt, conveyor belt, conveyer, conveyor, transporter", "canopy", "washer, automatic washer, washing machine", "plaything, toy", "pool", "stool", "barrel, cask", "basket, handbasket", "falls", "tent", "bag", "minibike, motorbike", "cradle", "oven", "ball", "food, solid food", "step, stair", "tank, storage tank", "trade name", "microwave", "pot", "animal", "bicycle", "lake", "dishwasher", "screen", "blanket, cover", "sculpture", "hood, exhaust hood", "sconce", "vase", "traffic light", "tray", "trash can", "fan", "pier", "crt screen", "plate", "monitor", "bulletin board", "shower", "radiator", "glass, drinking glass", "clock", "flag", # noqa +] +# After processed by `prepare_ade20k_sem_seg.py`, id 255 means ignore +# fmt: on + + +def _get_coco_instances_meta(): + thing_ids = [k["id"] for k in COCO_CATEGORIES if k["isthing"] == 1] + thing_colors = [k["color"] for k in COCO_CATEGORIES if k["isthing"] == 1] + assert len(thing_ids) == 80, len(thing_ids) + # Mapping from the incontiguous COCO category id to an id in [0, 79] + thing_dataset_id_to_contiguous_id = {k: i for i, k in enumerate(thing_ids)} + thing_classes = [k["name"] for k in COCO_CATEGORIES if k["isthing"] == 1] + ret = { + "thing_dataset_id_to_contiguous_id": thing_dataset_id_to_contiguous_id, + "thing_classes": thing_classes, + "thing_colors": thing_colors, + } + return ret + + +def _get_coco_panoptic_separated_meta(): + """ + Returns metadata for "separated" version of the panoptic segmentation dataset. + """ + stuff_ids = [k["id"] for k in COCO_CATEGORIES if k["isthing"] == 0] + assert len(stuff_ids) == 53, len(stuff_ids) + + # For semantic segmentation, this mapping maps from contiguous stuff id + # (in [0, 53], used in models) to ids in the dataset (used for processing results) + # The id 0 is mapped to an extra category "thing". + stuff_dataset_id_to_contiguous_id = {k: i + 1 for i, k in enumerate(stuff_ids)} + # When converting COCO panoptic annotations to semantic annotations + # We label the "thing" category to 0 + stuff_dataset_id_to_contiguous_id[0] = 0 + + # 54 names for COCO stuff categories (including "things") + stuff_classes = ["things"] + [ + k["name"].replace("-other", "").replace("-merged", "") + for k in COCO_CATEGORIES + if k["isthing"] == 0 + ] + + # NOTE: I randomly picked a color for things + stuff_colors = [[82, 18, 128]] + [k["color"] for k in COCO_CATEGORIES if k["isthing"] == 0] + ret = { + "stuff_dataset_id_to_contiguous_id": stuff_dataset_id_to_contiguous_id, + "stuff_classes": stuff_classes, + "stuff_colors": stuff_colors, + } + ret.update(_get_coco_instances_meta()) + return ret + + +def _get_builtin_metadata(dataset_name): + if dataset_name == "coco": + return _get_coco_instances_meta() + if dataset_name == "coco_panoptic_separated": + return _get_coco_panoptic_separated_meta() + elif dataset_name == "coco_panoptic_standard": + meta = {} + # The following metadata maps contiguous id from [0, #thing categories + + # #stuff categories) to their names and colors. We have to replica of the + # same name and color under "thing_*" and "stuff_*" because the current + # visualization function in D2 handles thing and class classes differently + # due to some heuristic used in Panoptic FPN. We keep the same naming to + # enable reusing existing visualization functions. + thing_classes = [k["name"] for k in COCO_CATEGORIES] + thing_colors = [k["color"] for k in COCO_CATEGORIES] + stuff_classes = [k["name"] for k in COCO_CATEGORIES] + stuff_colors = [k["color"] for k in COCO_CATEGORIES] + + meta["thing_classes"] = thing_classes + meta["thing_colors"] = thing_colors + meta["stuff_classes"] = stuff_classes + meta["stuff_colors"] = stuff_colors + + # Convert category id for training: + # category id: like semantic segmentation, it is the class id for each + # pixel. Since there are some classes not used in evaluation, the category + # id is not always contiguous and thus we have two set of category ids: + # - original category id: category id in the original dataset, mainly + # used for evaluation. + # - contiguous category id: [0, #classes), in order to train the linear + # softmax classifier. + thing_dataset_id_to_contiguous_id = {} + stuff_dataset_id_to_contiguous_id = {} + + for i, cat in enumerate(COCO_CATEGORIES): + if cat["isthing"]: + thing_dataset_id_to_contiguous_id[cat["id"]] = i + else: + stuff_dataset_id_to_contiguous_id[cat["id"]] = i + + meta["thing_dataset_id_to_contiguous_id"] = thing_dataset_id_to_contiguous_id + meta["stuff_dataset_id_to_contiguous_id"] = stuff_dataset_id_to_contiguous_id + + return meta + elif dataset_name == "coco_person": + return { + "thing_classes": ["person"], + "keypoint_names": COCO_PERSON_KEYPOINT_NAMES, + "keypoint_flip_map": COCO_PERSON_KEYPOINT_FLIP_MAP, + "keypoint_connection_rules": KEYPOINT_CONNECTION_RULES, + } + elif dataset_name == "cityscapes": + # fmt: off + CITYSCAPES_THING_CLASSES = [ + "person", "rider", "car", "truck", + "bus", "train", "motorcycle", "bicycle", + ] + CITYSCAPES_STUFF_CLASSES = [ + "road", "sidewalk", "building", "wall", "fence", "pole", "traffic light", + "traffic sign", "vegetation", "terrain", "sky", "person", "rider", "car", + "truck", "bus", "train", "motorcycle", "bicycle", + ] + # fmt: on + return { + "thing_classes": CITYSCAPES_THING_CLASSES, + "stuff_classes": CITYSCAPES_STUFF_CLASSES, + } + raise KeyError("No built-in metadata for dataset {}".format(dataset_name)) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/cityscapes.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/cityscapes.py new file mode 100644 index 0000000..1e84a5b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/cityscapes.py @@ -0,0 +1,329 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import functools +import json +import logging +import multiprocessing as mp +import numpy as np +import os +from itertools import chain +import pycocotools.mask as mask_util +from PIL import Image + +from detectron2.structures import BoxMode +from detectron2.utils.comm import get_world_size +from detectron2.utils.file_io import PathManager +from detectron2.utils.logger import setup_logger + +try: + import cv2 # noqa +except ImportError: + # OpenCV is an optional dependency at the moment + pass + + +logger = logging.getLogger(__name__) + + +def _get_cityscapes_files(image_dir, gt_dir): + files = [] + # scan through the directory + cities = PathManager.ls(image_dir) + logger.info(f"{len(cities)} cities found in '{image_dir}'.") + for city in cities: + city_img_dir = os.path.join(image_dir, city) + city_gt_dir = os.path.join(gt_dir, city) + for basename in PathManager.ls(city_img_dir): + image_file = os.path.join(city_img_dir, basename) + + suffix = "leftImg8bit.png" + assert basename.endswith(suffix), basename + basename = basename[: -len(suffix)] + + instance_file = os.path.join(city_gt_dir, basename + "gtFine_instanceIds.png") + label_file = os.path.join(city_gt_dir, basename + "gtFine_labelIds.png") + json_file = os.path.join(city_gt_dir, basename + "gtFine_polygons.json") + + files.append((image_file, instance_file, label_file, json_file)) + assert len(files), "No images found in {}".format(image_dir) + for f in files[0]: + assert PathManager.isfile(f), f + return files + + +def load_cityscapes_instances(image_dir, gt_dir, from_json=True, to_polygons=True): + """ + Args: + image_dir (str): path to the raw dataset. e.g., "~/cityscapes/leftImg8bit/train". + gt_dir (str): path to the raw annotations. e.g., "~/cityscapes/gtFine/train". + from_json (bool): whether to read annotations from the raw json file or the png files. + to_polygons (bool): whether to represent the segmentation as polygons + (COCO's format) instead of masks (cityscapes's format). + + Returns: + list[dict]: a list of dicts in Detectron2 standard format. (See + `Using Custom Datasets `_ ) + """ + if from_json: + assert to_polygons, ( + "Cityscapes's json annotations are in polygon format. " + "Converting to mask format is not supported now." + ) + files = _get_cityscapes_files(image_dir, gt_dir) + + logger.info("Preprocessing cityscapes annotations ...") + # This is still not fast: all workers will execute duplicate works and will + # take up to 10m on a 8GPU server. + pool = mp.Pool(processes=max(mp.cpu_count() // get_world_size() // 2, 4)) + + ret = pool.map( + functools.partial(_cityscapes_files_to_dict, from_json=from_json, to_polygons=to_polygons), + files, + ) + logger.info("Loaded {} images from {}".format(len(ret), image_dir)) + + # Map cityscape ids to contiguous ids + from cityscapesscripts.helpers.labels import labels + + labels = [l for l in labels if l.hasInstances and not l.ignoreInEval] + dataset_id_to_contiguous_id = {l.id: idx for idx, l in enumerate(labels)} + for dict_per_image in ret: + for anno in dict_per_image["annotations"]: + anno["category_id"] = dataset_id_to_contiguous_id[anno["category_id"]] + return ret + + +def load_cityscapes_semantic(image_dir, gt_dir): + """ + Args: + image_dir (str): path to the raw dataset. e.g., "~/cityscapes/leftImg8bit/train". + gt_dir (str): path to the raw annotations. e.g., "~/cityscapes/gtFine/train". + + Returns: + list[dict]: a list of dict, each has "file_name" and + "sem_seg_file_name". + """ + ret = [] + # gt_dir is small and contain many small files. make sense to fetch to local first + gt_dir = PathManager.get_local_path(gt_dir) + for image_file, _, label_file, json_file in _get_cityscapes_files(image_dir, gt_dir): + label_file = label_file.replace("labelIds", "labelTrainIds") + + with PathManager.open(json_file, "r") as f: + jsonobj = json.load(f) + ret.append( + { + "file_name": image_file, + "sem_seg_file_name": label_file, + "height": jsonobj["imgHeight"], + "width": jsonobj["imgWidth"], + } + ) + assert len(ret), f"No images found in {image_dir}!" + assert PathManager.isfile( + ret[0]["sem_seg_file_name"] + ), "Please generate labelTrainIds.png with cityscapesscripts/preparation/createTrainIdLabelImgs.py" # noqa + return ret + + +def _cityscapes_files_to_dict(files, from_json, to_polygons): + """ + Parse cityscapes annotation files to a instance segmentation dataset dict. + + Args: + files (tuple): consists of (image_file, instance_id_file, label_id_file, json_file) + from_json (bool): whether to read annotations from the raw json file or the png files. + to_polygons (bool): whether to represent the segmentation as polygons + (COCO's format) instead of masks (cityscapes's format). + + Returns: + A dict in Detectron2 Dataset format. + """ + from cityscapesscripts.helpers.labels import id2label, name2label + + image_file, instance_id_file, _, json_file = files + + annos = [] + + if from_json: + from shapely.geometry import MultiPolygon, Polygon + + with PathManager.open(json_file, "r") as f: + jsonobj = json.load(f) + ret = { + "file_name": image_file, + "image_id": os.path.basename(image_file), + "height": jsonobj["imgHeight"], + "width": jsonobj["imgWidth"], + } + + # `polygons_union` contains the union of all valid polygons. + polygons_union = Polygon() + + # CityscapesScripts draw the polygons in sequential order + # and each polygon *overwrites* existing ones. See + # (https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/preparation/json2instanceImg.py) # noqa + # We use reverse order, and each polygon *avoids* early ones. + # This will resolve the ploygon overlaps in the same way as CityscapesScripts. + for obj in jsonobj["objects"][::-1]: + if "deleted" in obj: # cityscapes data format specific + continue + label_name = obj["label"] + + try: + label = name2label[label_name] + except KeyError: + if label_name.endswith("group"): # crowd area + label = name2label[label_name[: -len("group")]] + else: + raise + if label.id < 0: # cityscapes data format + continue + + # Cityscapes's raw annotations uses integer coordinates + # Therefore +0.5 here + poly_coord = np.asarray(obj["polygon"], dtype="f4") + 0.5 + # CityscapesScript uses PIL.ImageDraw.polygon to rasterize + # polygons for evaluation. This function operates in integer space + # and draws each pixel whose center falls into the polygon. + # Therefore it draws a polygon which is 0.5 "fatter" in expectation. + # We therefore dilate the input polygon by 0.5 as our input. + poly = Polygon(poly_coord).buffer(0.5, resolution=4) + + if not label.hasInstances or label.ignoreInEval: + # even if we won't store the polygon it still contributes to overlaps resolution + polygons_union = polygons_union.union(poly) + continue + + # Take non-overlapping part of the polygon + poly_wo_overlaps = poly.difference(polygons_union) + if poly_wo_overlaps.is_empty: + continue + polygons_union = polygons_union.union(poly) + + anno = {} + anno["iscrowd"] = label_name.endswith("group") + anno["category_id"] = label.id + + if isinstance(poly_wo_overlaps, Polygon): + poly_list = [poly_wo_overlaps] + elif isinstance(poly_wo_overlaps, MultiPolygon): + poly_list = poly_wo_overlaps.geoms + else: + raise NotImplementedError("Unknown geometric structure {}".format(poly_wo_overlaps)) + + poly_coord = [] + for poly_el in poly_list: + # COCO API can work only with exterior boundaries now, hence we store only them. + # TODO: store both exterior and interior boundaries once other parts of the + # codebase support holes in polygons. + poly_coord.append(list(chain(*poly_el.exterior.coords))) + anno["segmentation"] = poly_coord + (xmin, ymin, xmax, ymax) = poly_wo_overlaps.bounds + + anno["bbox"] = (xmin, ymin, xmax, ymax) + anno["bbox_mode"] = BoxMode.XYXY_ABS + + annos.append(anno) + else: + # See also the official annotation parsing scripts at + # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/instances2dict.py # noqa + with PathManager.open(instance_id_file, "rb") as f: + inst_image = np.asarray(Image.open(f), order="F") + # ids < 24 are stuff labels (filtering them first is about 5% faster) + flattened_ids = np.unique(inst_image[inst_image >= 24]) + + ret = { + "file_name": image_file, + "image_id": os.path.basename(image_file), + "height": inst_image.shape[0], + "width": inst_image.shape[1], + } + + for instance_id in flattened_ids: + # For non-crowd annotations, instance_id // 1000 is the label_id + # Crowd annotations have <1000 instance ids + label_id = instance_id // 1000 if instance_id >= 1000 else instance_id + label = id2label[label_id] + if not label.hasInstances or label.ignoreInEval: + continue + + anno = {} + anno["iscrowd"] = instance_id < 1000 + anno["category_id"] = label.id + + mask = np.asarray(inst_image == instance_id, dtype=np.uint8, order="F") + + inds = np.nonzero(mask) + ymin, ymax = inds[0].min(), inds[0].max() + xmin, xmax = inds[1].min(), inds[1].max() + anno["bbox"] = (xmin, ymin, xmax, ymax) + if xmax <= xmin or ymax <= ymin: + continue + anno["bbox_mode"] = BoxMode.XYXY_ABS + if to_polygons: + # This conversion comes from D4809743 and D5171122, + # when Mask-RCNN was first developed. + contours = cv2.findContours(mask.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_NONE)[ + -2 + ] + polygons = [c.reshape(-1).tolist() for c in contours if len(c) >= 3] + # opencv's can produce invalid polygons + if len(polygons) == 0: + continue + anno["segmentation"] = polygons + else: + anno["segmentation"] = mask_util.encode(mask[:, :, None])[0] + annos.append(anno) + ret["annotations"] = annos + return ret + + +if __name__ == "__main__": + """ + Test the cityscapes dataset loader. + + Usage: + python -m detectron2.data.datasets.cityscapes \ + cityscapes/leftImg8bit/train cityscapes/gtFine/train + """ + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("image_dir") + parser.add_argument("gt_dir") + parser.add_argument("--type", choices=["instance", "semantic"], default="instance") + args = parser.parse_args() + from detectron2.data.catalog import Metadata + from detectron2.utils.visualizer import Visualizer + from cityscapesscripts.helpers.labels import labels + + logger = setup_logger(name=__name__) + + dirname = "cityscapes-data-vis" + os.makedirs(dirname, exist_ok=True) + + if args.type == "instance": + dicts = load_cityscapes_instances( + args.image_dir, args.gt_dir, from_json=True, to_polygons=True + ) + logger.info("Done loading {} samples.".format(len(dicts))) + + thing_classes = [k.name for k in labels if k.hasInstances and not k.ignoreInEval] + meta = Metadata().set(thing_classes=thing_classes) + + else: + dicts = load_cityscapes_semantic(args.image_dir, args.gt_dir) + logger.info("Done loading {} samples.".format(len(dicts))) + + stuff_classes = [k.name for k in labels if k.trainId != 255] + stuff_colors = [k.color for k in labels if k.trainId != 255] + meta = Metadata().set(stuff_classes=stuff_classes, stuff_colors=stuff_colors) + + for d in dicts: + img = np.array(Image.open(PathManager.open(d["file_name"], "rb"))) + visualizer = Visualizer(img, metadata=meta) + vis = visualizer.draw_dataset_dict(d) + # cv2.imshow("a", vis.get_image()[:, :, ::-1]) + # cv2.waitKey() + fpath = os.path.join(dirname, os.path.basename(d["file_name"])) + vis.save(fpath) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/cityscapes_panoptic.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/cityscapes_panoptic.py new file mode 100644 index 0000000..48c136f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/cityscapes_panoptic.py @@ -0,0 +1,187 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import json +import logging +import os + +from detectron2.data import DatasetCatalog, MetadataCatalog +from detectron2.data.datasets.builtin_meta import CITYSCAPES_CATEGORIES +from detectron2.utils.file_io import PathManager + +""" +This file contains functions to register the Cityscapes panoptic dataset to the DatasetCatalog. +""" + + +logger = logging.getLogger(__name__) + + +def get_cityscapes_panoptic_files(image_dir, gt_dir, json_info): + files = [] + # scan through the directory + cities = PathManager.ls(image_dir) + logger.info(f"{len(cities)} cities found in '{image_dir}'.") + image_dict = {} + for city in cities: + city_img_dir = os.path.join(image_dir, city) + for basename in PathManager.ls(city_img_dir): + image_file = os.path.join(city_img_dir, basename) + + suffix = "_leftImg8bit.png" + assert basename.endswith(suffix), basename + basename = os.path.basename(basename)[: -len(suffix)] + + image_dict[basename] = image_file + + for ann in json_info["annotations"]: + image_file = image_dict.get(ann["image_id"], None) + assert image_file is not None, "No image {} found for annotation {}".format( + ann["image_id"], ann["file_name"] + ) + label_file = os.path.join(gt_dir, ann["file_name"]) + segments_info = ann["segments_info"] + + files.append((image_file, label_file, segments_info)) + + assert len(files), "No images found in {}".format(image_dir) + assert PathManager.isfile(files[0][0]), files[0][0] + assert PathManager.isfile(files[0][1]), files[0][1] + return files + + +def load_cityscapes_panoptic(image_dir, gt_dir, gt_json, meta): + """ + Args: + image_dir (str): path to the raw dataset. e.g., "~/cityscapes/leftImg8bit/train". + gt_dir (str): path to the raw annotations. e.g., + "~/cityscapes/gtFine/cityscapes_panoptic_train". + gt_json (str): path to the json file. e.g., + "~/cityscapes/gtFine/cityscapes_panoptic_train.json". + meta (dict): dictionary containing "thing_dataset_id_to_contiguous_id" + and "stuff_dataset_id_to_contiguous_id" to map category ids to + contiguous ids for training. + + Returns: + list[dict]: a list of dicts in Detectron2 standard format. (See + `Using Custom Datasets `_ ) + """ + + def _convert_category_id(segment_info, meta): + if segment_info["category_id"] in meta["thing_dataset_id_to_contiguous_id"]: + segment_info["category_id"] = meta["thing_dataset_id_to_contiguous_id"][ + segment_info["category_id"] + ] + else: + segment_info["category_id"] = meta["stuff_dataset_id_to_contiguous_id"][ + segment_info["category_id"] + ] + return segment_info + + assert os.path.exists( + gt_json + ), "Please run `python cityscapesscripts/preparation/createPanopticImgs.py` to generate label files." # noqa + with open(gt_json) as f: + json_info = json.load(f) + files = get_cityscapes_panoptic_files(image_dir, gt_dir, json_info) + ret = [] + for image_file, label_file, segments_info in files: + sem_label_file = ( + image_file.replace("leftImg8bit", "gtFine").split(".")[0] + "_labelTrainIds.png" + ) + segments_info = [_convert_category_id(x, meta) for x in segments_info] + ret.append( + { + "file_name": image_file, + "image_id": "_".join( + os.path.splitext(os.path.basename(image_file))[0].split("_")[:3] + ), + "sem_seg_file_name": sem_label_file, + "pan_seg_file_name": label_file, + "segments_info": segments_info, + } + ) + assert len(ret), f"No images found in {image_dir}!" + assert PathManager.isfile( + ret[0]["sem_seg_file_name"] + ), "Please generate labelTrainIds.png with cityscapesscripts/preparation/createTrainIdLabelImgs.py" # noqa + assert PathManager.isfile( + ret[0]["pan_seg_file_name"] + ), "Please generate panoptic annotation with python cityscapesscripts/preparation/createPanopticImgs.py" # noqa + return ret + + +_RAW_CITYSCAPES_PANOPTIC_SPLITS = { + "cityscapes_fine_panoptic_train": ( + "cityscapes/leftImg8bit/train", + "cityscapes/gtFine/cityscapes_panoptic_train", + "cityscapes/gtFine/cityscapes_panoptic_train.json", + ), + "cityscapes_fine_panoptic_val": ( + "cityscapes/leftImg8bit/val", + "cityscapes/gtFine/cityscapes_panoptic_val", + "cityscapes/gtFine/cityscapes_panoptic_val.json", + ), + # "cityscapes_fine_panoptic_test": not supported yet +} + + +def register_all_cityscapes_panoptic(root): + meta = {} + # The following metadata maps contiguous id from [0, #thing categories + + # #stuff categories) to their names and colors. We have to replica of the + # same name and color under "thing_*" and "stuff_*" because the current + # visualization function in D2 handles thing and class classes differently + # due to some heuristic used in Panoptic FPN. We keep the same naming to + # enable reusing existing visualization functions. + thing_classes = [k["name"] for k in CITYSCAPES_CATEGORIES] + thing_colors = [k["color"] for k in CITYSCAPES_CATEGORIES] + stuff_classes = [k["name"] for k in CITYSCAPES_CATEGORIES] + stuff_colors = [k["color"] for k in CITYSCAPES_CATEGORIES] + + meta["thing_classes"] = thing_classes + meta["thing_colors"] = thing_colors + meta["stuff_classes"] = stuff_classes + meta["stuff_colors"] = stuff_colors + + # There are three types of ids in cityscapes panoptic segmentation: + # (1) category id: like semantic segmentation, it is the class id for each + # pixel. Since there are some classes not used in evaluation, the category + # id is not always contiguous and thus we have two set of category ids: + # - original category id: category id in the original dataset, mainly + # used for evaluation. + # - contiguous category id: [0, #classes), in order to train the classifier + # (2) instance id: this id is used to differentiate different instances from + # the same category. For "stuff" classes, the instance id is always 0; for + # "thing" classes, the instance id starts from 1 and 0 is reserved for + # ignored instances (e.g. crowd annotation). + # (3) panoptic id: this is the compact id that encode both category and + # instance id by: category_id * 1000 + instance_id. + thing_dataset_id_to_contiguous_id = {} + stuff_dataset_id_to_contiguous_id = {} + + for k in CITYSCAPES_CATEGORIES: + if k["isthing"] == 1: + thing_dataset_id_to_contiguous_id[k["id"]] = k["trainId"] + else: + stuff_dataset_id_to_contiguous_id[k["id"]] = k["trainId"] + + meta["thing_dataset_id_to_contiguous_id"] = thing_dataset_id_to_contiguous_id + meta["stuff_dataset_id_to_contiguous_id"] = stuff_dataset_id_to_contiguous_id + + for key, (image_dir, gt_dir, gt_json) in _RAW_CITYSCAPES_PANOPTIC_SPLITS.items(): + image_dir = os.path.join(root, image_dir) + gt_dir = os.path.join(root, gt_dir) + gt_json = os.path.join(root, gt_json) + + DatasetCatalog.register( + key, lambda x=image_dir, y=gt_dir, z=gt_json: load_cityscapes_panoptic(x, y, z, meta) + ) + MetadataCatalog.get(key).set( + panoptic_root=gt_dir, + image_root=image_dir, + panoptic_json=gt_json, + gt_dir=gt_dir.replace("cityscapes_panoptic_", ""), + evaluator_type="cityscapes_panoptic_seg", + ignore_label=255, + label_divisor=1000, + **meta, + ) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/coco.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/coco.py new file mode 100644 index 0000000..ed4f7cc --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/coco.py @@ -0,0 +1,539 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import contextlib +import datetime +import io +import json +import logging +import numpy as np +import os +import shutil +import pycocotools.mask as mask_util +from fvcore.common.timer import Timer +from iopath.common.file_io import file_lock +from PIL import Image + +from detectron2.structures import Boxes, BoxMode, PolygonMasks, RotatedBoxes +from detectron2.utils.file_io import PathManager + +from .. import DatasetCatalog, MetadataCatalog + +""" +This file contains functions to parse COCO-format annotations into dicts in "Detectron2 format". +""" + + +logger = logging.getLogger(__name__) + +__all__ = ["load_coco_json", "load_sem_seg", "convert_to_coco_json", "register_coco_instances"] + + +def load_coco_json(json_file, image_root, dataset_name=None, extra_annotation_keys=None): + """ + Load a json file with COCO's instances annotation format. + Currently supports instance detection, instance segmentation, + and person keypoints annotations. + + Args: + json_file (str): full path to the json file in COCO instances annotation format. + image_root (str or path-like): the directory where the images in this json file exists. + dataset_name (str or None): the name of the dataset (e.g., coco_2017_train). + When provided, this function will also do the following: + + * Put "thing_classes" into the metadata associated with this dataset. + * Map the category ids into a contiguous range (needed by standard dataset format), + and add "thing_dataset_id_to_contiguous_id" to the metadata associated + with this dataset. + + This option should usually be provided, unless users need to load + the original json content and apply more processing manually. + extra_annotation_keys (list[str]): list of per-annotation keys that should also be + loaded into the dataset dict (besides "iscrowd", "bbox", "keypoints", + "category_id", "segmentation"). The values for these keys will be returned as-is. + For example, the densepose annotations are loaded in this way. + + Returns: + list[dict]: a list of dicts in Detectron2 standard dataset dicts format (See + `Using Custom Datasets `_ ) when `dataset_name` is not None. + If `dataset_name` is None, the returned `category_ids` may be + incontiguous and may not conform to the Detectron2 standard format. + + Notes: + 1. This function does not read the image files. + The results do not have the "image" field. + """ + from pycocotools.coco import COCO + + timer = Timer() + json_file = PathManager.get_local_path(json_file) + with contextlib.redirect_stdout(io.StringIO()): + coco_api = COCO(json_file) + if timer.seconds() > 1: + logger.info("Loading {} takes {:.2f} seconds.".format(json_file, timer.seconds())) + + id_map = None + if dataset_name is not None: + meta = MetadataCatalog.get(dataset_name) + cat_ids = sorted(coco_api.getCatIds()) + cats = coco_api.loadCats(cat_ids) + # The categories in a custom json file may not be sorted. + thing_classes = [c["name"] for c in sorted(cats, key=lambda x: x["id"])] + meta.thing_classes = thing_classes + + # In COCO, certain category ids are artificially removed, + # and by convention they are always ignored. + # We deal with COCO's id issue and translate + # the category ids to contiguous ids in [0, 80). + + # It works by looking at the "categories" field in the json, therefore + # if users' own json also have incontiguous ids, we'll + # apply this mapping as well but print a warning. + if not (min(cat_ids) == 1 and max(cat_ids) == len(cat_ids)): + if "coco" not in dataset_name: + logger.warning( + """ +Category ids in annotations are not in [1, #categories]! We'll apply a mapping for you. +""" + ) + id_map = {v: i for i, v in enumerate(cat_ids)} + meta.thing_dataset_id_to_contiguous_id = id_map + + # sort indices for reproducible results + img_ids = sorted(coco_api.imgs.keys()) + # imgs is a list of dicts, each looks something like: + # {'license': 4, + # 'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg', + # 'file_name': 'COCO_val2014_000000001268.jpg', + # 'height': 427, + # 'width': 640, + # 'date_captured': '2013-11-17 05:57:24', + # 'id': 1268} + imgs = coco_api.loadImgs(img_ids) + # anns is a list[list[dict]], where each dict is an annotation + # record for an object. The inner list enumerates the objects in an image + # and the outer list enumerates over images. Example of anns[0]: + # [{'segmentation': [[192.81, + # 247.09, + # ... + # 219.03, + # 249.06]], + # 'area': 1035.749, + # 'iscrowd': 0, + # 'image_id': 1268, + # 'bbox': [192.81, 224.8, 74.73, 33.43], + # 'category_id': 16, + # 'id': 42986}, + # ...] + anns = [coco_api.imgToAnns[img_id] for img_id in img_ids] + total_num_valid_anns = sum([len(x) for x in anns]) + total_num_anns = len(coco_api.anns) + if total_num_valid_anns < total_num_anns: + logger.warning( + f"{json_file} contains {total_num_anns} annotations, but only " + f"{total_num_valid_anns} of them match to images in the file." + ) + + if "minival" not in json_file: + # The popular valminusminival & minival annotations for COCO2014 contain this bug. + # However the ratio of buggy annotations there is tiny and does not affect accuracy. + # Therefore we explicitly white-list them. + ann_ids = [ann["id"] for anns_per_image in anns for ann in anns_per_image] + assert len(set(ann_ids)) == len(ann_ids), "Annotation ids in '{}' are not unique!".format( + json_file + ) + + imgs_anns = list(zip(imgs, anns)) + logger.info("Loaded {} images in COCO format from {}".format(len(imgs_anns), json_file)) + + dataset_dicts = [] + + ann_keys = ["iscrowd", "bbox", "keypoints", "category_id"] + (extra_annotation_keys or []) + + num_instances_without_valid_segmentation = 0 + + for (img_dict, anno_dict_list) in imgs_anns: + record = {} + record["file_name"] = os.path.join(image_root, img_dict["file_name"]) + record["height"] = img_dict["height"] + record["width"] = img_dict["width"] + image_id = record["image_id"] = img_dict["id"] + + objs = [] + for anno in anno_dict_list: + # Check that the image_id in this annotation is the same as + # the image_id we're looking at. + # This fails only when the data parsing logic or the annotation file is buggy. + + # The original COCO valminusminival2014 & minival2014 annotation files + # actually contains bugs that, together with certain ways of using COCO API, + # can trigger this assertion. + assert anno["image_id"] == image_id + + assert anno.get("ignore", 0) == 0, '"ignore" in COCO json file is not supported.' + + obj = {key: anno[key] for key in ann_keys if key in anno} + if "bbox" in obj and len(obj["bbox"]) == 0: + raise ValueError( + f"One annotation of image {image_id} contains empty 'bbox' value! " + "This json does not have valid COCO format." + ) + + segm = anno.get("segmentation", None) + if segm: # either list[list[float]] or dict(RLE) + if isinstance(segm, dict): + if isinstance(segm["counts"], list): + # convert to compressed RLE + segm = mask_util.frPyObjects(segm, *segm["size"]) + else: + # filter out invalid polygons (< 3 points) + segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6] + if len(segm) == 0: + num_instances_without_valid_segmentation += 1 + continue # ignore this instance + obj["segmentation"] = segm + + keypts = anno.get("keypoints", None) + if keypts: # list[int] + for idx, v in enumerate(keypts): + if idx % 3 != 2: + # COCO's segmentation coordinates are floating points in [0, H or W], + # but keypoint coordinates are integers in [0, H-1 or W-1] + # Therefore we assume the coordinates are "pixel indices" and + # add 0.5 to convert to floating point coordinates. + keypts[idx] = v + 0.5 + obj["keypoints"] = keypts + + obj["bbox_mode"] = BoxMode.XYWH_ABS + if id_map: + annotation_category_id = obj["category_id"] + try: + obj["category_id"] = id_map[annotation_category_id] + except KeyError as e: + raise KeyError( + f"Encountered category_id={annotation_category_id} " + "but this id does not exist in 'categories' of the json file." + ) from e + objs.append(obj) + record["annotations"] = objs + dataset_dicts.append(record) + + if num_instances_without_valid_segmentation > 0: + logger.warning( + "Filtered out {} instances without valid segmentation. ".format( + num_instances_without_valid_segmentation + ) + + "There might be issues in your dataset generation process. Please " + "check https://detectron2.readthedocs.io/en/latest/tutorials/datasets.html carefully" + ) + return dataset_dicts + + +def load_sem_seg(gt_root, image_root, gt_ext="png", image_ext="jpg"): + """ + Load semantic segmentation datasets. All files under "gt_root" with "gt_ext" extension are + treated as ground truth annotations and all files under "image_root" with "image_ext" extension + as input images. Ground truth and input images are matched using file paths relative to + "gt_root" and "image_root" respectively without taking into account file extensions. + This works for COCO as well as some other datasets. + + Args: + gt_root (str): full path to ground truth semantic segmentation files. Semantic segmentation + annotations are stored as images with integer values in pixels that represent + corresponding semantic labels. + image_root (str): the directory where the input images are. + gt_ext (str): file extension for ground truth annotations. + image_ext (str): file extension for input images. + + Returns: + list[dict]: + a list of dicts in detectron2 standard format without instance-level + annotation. + + Notes: + 1. This function does not read the image and ground truth files. + The results do not have the "image" and "sem_seg" fields. + """ + + # We match input images with ground truth based on their relative filepaths (without file + # extensions) starting from 'image_root' and 'gt_root' respectively. + def file2id(folder_path, file_path): + # extract relative path starting from `folder_path` + image_id = os.path.normpath(os.path.relpath(file_path, start=folder_path)) + # remove file extension + image_id = os.path.splitext(image_id)[0] + return image_id + + input_files = sorted( + (os.path.join(image_root, f) for f in PathManager.ls(image_root) if f.endswith(image_ext)), + key=lambda file_path: file2id(image_root, file_path), + ) + gt_files = sorted( + (os.path.join(gt_root, f) for f in PathManager.ls(gt_root) if f.endswith(gt_ext)), + key=lambda file_path: file2id(gt_root, file_path), + ) + + assert len(gt_files) > 0, "No annotations found in {}.".format(gt_root) + + # Use the intersection, so that val2017_100 annotations can run smoothly with val2017 images + if len(input_files) != len(gt_files): + logger.warn( + "Directory {} and {} has {} and {} files, respectively.".format( + image_root, gt_root, len(input_files), len(gt_files) + ) + ) + input_basenames = [os.path.basename(f)[: -len(image_ext)] for f in input_files] + gt_basenames = [os.path.basename(f)[: -len(gt_ext)] for f in gt_files] + intersect = list(set(input_basenames) & set(gt_basenames)) + # sort, otherwise each worker may obtain a list[dict] in different order + intersect = sorted(intersect) + logger.warn("Will use their intersection of {} files.".format(len(intersect))) + input_files = [os.path.join(image_root, f + image_ext) for f in intersect] + gt_files = [os.path.join(gt_root, f + gt_ext) for f in intersect] + + logger.info( + "Loaded {} images with semantic segmentation from {}".format(len(input_files), image_root) + ) + + dataset_dicts = [] + for (img_path, gt_path) in zip(input_files, gt_files): + record = {} + record["file_name"] = img_path + record["sem_seg_file_name"] = gt_path + dataset_dicts.append(record) + + return dataset_dicts + + +def convert_to_coco_dict(dataset_name): + """ + Convert an instance detection/segmentation or keypoint detection dataset + in detectron2's standard format into COCO json format. + + Generic dataset description can be found here: + https://detectron2.readthedocs.io/tutorials/datasets.html#register-a-dataset + + COCO data format description can be found here: + http://cocodataset.org/#format-data + + Args: + dataset_name (str): + name of the source dataset + Must be registered in DatastCatalog and in detectron2's standard format. + Must have corresponding metadata "thing_classes" + Returns: + coco_dict: serializable dict in COCO json format + """ + + dataset_dicts = DatasetCatalog.get(dataset_name) + metadata = MetadataCatalog.get(dataset_name) + + # unmap the category mapping ids for COCO + if hasattr(metadata, "thing_dataset_id_to_contiguous_id"): + reverse_id_mapping = {v: k for k, v in metadata.thing_dataset_id_to_contiguous_id.items()} + reverse_id_mapper = lambda contiguous_id: reverse_id_mapping[contiguous_id] # noqa + else: + reverse_id_mapper = lambda contiguous_id: contiguous_id # noqa + + categories = [ + {"id": reverse_id_mapper(id), "name": name} + for id, name in enumerate(metadata.thing_classes) + ] + + logger.info("Converting dataset dicts into COCO format") + coco_images = [] + coco_annotations = [] + + for image_id, image_dict in enumerate(dataset_dicts): + coco_image = { + "id": image_dict.get("image_id", image_id), + "width": int(image_dict["width"]), + "height": int(image_dict["height"]), + "file_name": str(image_dict["file_name"]), + } + coco_images.append(coco_image) + + anns_per_image = image_dict.get("annotations", []) + for annotation in anns_per_image: + # create a new dict with only COCO fields + coco_annotation = {} + + # COCO requirement: XYWH box format for axis-align and XYWHA for rotated + bbox = annotation["bbox"] + if isinstance(bbox, np.ndarray): + if bbox.ndim != 1: + raise ValueError(f"bbox has to be 1-dimensional. Got shape={bbox.shape}.") + bbox = bbox.tolist() + if len(bbox) not in [4, 5]: + raise ValueError(f"bbox has to has length 4 or 5. Got {bbox}.") + from_bbox_mode = annotation["bbox_mode"] + to_bbox_mode = BoxMode.XYWH_ABS if len(bbox) == 4 else BoxMode.XYWHA_ABS + bbox = BoxMode.convert(bbox, from_bbox_mode, to_bbox_mode) + + # COCO requirement: instance area + if "segmentation" in annotation: + # Computing areas for instances by counting the pixels + segmentation = annotation["segmentation"] + # TODO: check segmentation type: RLE, BinaryMask or Polygon + if isinstance(segmentation, list): + polygons = PolygonMasks([segmentation]) + area = polygons.area()[0].item() + elif isinstance(segmentation, dict): # RLE + area = mask_util.area(segmentation).item() + else: + raise TypeError(f"Unknown segmentation type {type(segmentation)}!") + else: + # Computing areas using bounding boxes + if to_bbox_mode == BoxMode.XYWH_ABS: + bbox_xy = BoxMode.convert(bbox, to_bbox_mode, BoxMode.XYXY_ABS) + area = Boxes([bbox_xy]).area()[0].item() + else: + area = RotatedBoxes([bbox]).area()[0].item() + + if "keypoints" in annotation: + keypoints = annotation["keypoints"] # list[int] + for idx, v in enumerate(keypoints): + if idx % 3 != 2: + # COCO's segmentation coordinates are floating points in [0, H or W], + # but keypoint coordinates are integers in [0, H-1 or W-1] + # For COCO format consistency we substract 0.5 + # https://github.com/facebookresearch/detectron2/pull/175#issuecomment-551202163 + keypoints[idx] = v - 0.5 + if "num_keypoints" in annotation: + num_keypoints = annotation["num_keypoints"] + else: + num_keypoints = sum(kp > 0 for kp in keypoints[2::3]) + + # COCO requirement: + # linking annotations to images + # "id" field must start with 1 + coco_annotation["id"] = len(coco_annotations) + 1 + coco_annotation["image_id"] = coco_image["id"] + coco_annotation["bbox"] = [round(float(x), 3) for x in bbox] + coco_annotation["area"] = float(area) + coco_annotation["iscrowd"] = int(annotation.get("iscrowd", 0)) + coco_annotation["category_id"] = int(reverse_id_mapper(annotation["category_id"])) + + # Add optional fields + if "keypoints" in annotation: + coco_annotation["keypoints"] = keypoints + coco_annotation["num_keypoints"] = num_keypoints + + if "segmentation" in annotation: + seg = coco_annotation["segmentation"] = annotation["segmentation"] + if isinstance(seg, dict): # RLE + counts = seg["counts"] + if not isinstance(counts, str): + # make it json-serializable + seg["counts"] = counts.decode("ascii") + + coco_annotations.append(coco_annotation) + + logger.info( + "Conversion finished, " + f"#images: {len(coco_images)}, #annotations: {len(coco_annotations)}" + ) + + info = { + "date_created": str(datetime.datetime.now()), + "description": "Automatically generated COCO json file for Detectron2.", + } + coco_dict = {"info": info, "images": coco_images, "categories": categories, "licenses": None} + if len(coco_annotations) > 0: + coco_dict["annotations"] = coco_annotations + return coco_dict + + +def convert_to_coco_json(dataset_name, output_file, allow_cached=True): + """ + Converts dataset into COCO format and saves it to a json file. + dataset_name must be registered in DatasetCatalog and in detectron2's standard format. + + Args: + dataset_name: + reference from the config file to the catalogs + must be registered in DatasetCatalog and in detectron2's standard format + output_file: path of json file that will be saved to + allow_cached: if json file is already present then skip conversion + """ + + # TODO: The dataset or the conversion script *may* change, + # a checksum would be useful for validating the cached data + + PathManager.mkdirs(os.path.dirname(output_file)) + with file_lock(output_file): + if PathManager.exists(output_file) and allow_cached: + logger.warning( + f"Using previously cached COCO format annotations at '{output_file}'. " + "You need to clear the cache file if your dataset has been modified." + ) + else: + logger.info(f"Converting annotations of dataset '{dataset_name}' to COCO format ...)") + coco_dict = convert_to_coco_dict(dataset_name) + + logger.info(f"Caching COCO format annotations at '{output_file}' ...") + tmp_file = output_file + ".tmp" + with PathManager.open(tmp_file, "w") as f: + json.dump(coco_dict, f) + shutil.move(tmp_file, output_file) + + +def register_coco_instances(name, metadata, json_file, image_root): + """ + Register a dataset in COCO's json annotation format for + instance detection, instance segmentation and keypoint detection. + (i.e., Type 1 and 2 in http://cocodataset.org/#format-data. + `instances*.json` and `person_keypoints*.json` in the dataset). + + This is an example of how to register a new dataset. + You can do something similar to this function, to register new datasets. + + Args: + name (str): the name that identifies a dataset, e.g. "coco_2014_train". + metadata (dict): extra metadata associated with this dataset. You can + leave it as an empty dict. + json_file (str): path to the json instance annotation file. + image_root (str or path-like): directory which contains all the images. + """ + assert isinstance(name, str), name + assert isinstance(json_file, (str, os.PathLike)), json_file + assert isinstance(image_root, (str, os.PathLike)), image_root + # 1. register a function which returns dicts + DatasetCatalog.register(name, lambda: load_coco_json(json_file, image_root, name)) + + # 2. Optionally, add metadata about this dataset, + # since they might be useful in evaluation, visualization or logging + MetadataCatalog.get(name).set( + json_file=json_file, image_root=image_root, evaluator_type="coco", **metadata + ) + + +if __name__ == "__main__": + """ + Test the COCO json dataset loader. + + Usage: + python -m detectron2.data.datasets.coco \ + path/to/json path/to/image_root dataset_name + + "dataset_name" can be "coco_2014_minival_100", or other + pre-registered ones + """ + from detectron2.utils.logger import setup_logger + from detectron2.utils.visualizer import Visualizer + import detectron2.data.datasets # noqa # add pre-defined metadata + import sys + + logger = setup_logger(name=__name__) + assert sys.argv[3] in DatasetCatalog.list() + meta = MetadataCatalog.get(sys.argv[3]) + + dicts = load_coco_json(sys.argv[1], sys.argv[2], sys.argv[3]) + logger.info("Done loading {} samples.".format(len(dicts))) + + dirname = "coco-data-vis" + os.makedirs(dirname, exist_ok=True) + for d in dicts: + img = np.array(Image.open(d["file_name"])) + visualizer = Visualizer(img, metadata=meta) + vis = visualizer.draw_dataset_dict(d) + fpath = os.path.join(dirname, os.path.basename(d["file_name"])) + vis.save(fpath) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/coco_panoptic.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/coco_panoptic.py new file mode 100644 index 0000000..b8dae44 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/coco_panoptic.py @@ -0,0 +1,228 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import copy +import json +import os + +from detectron2.data import DatasetCatalog, MetadataCatalog +from detectron2.utils.file_io import PathManager + +from .coco import load_coco_json, load_sem_seg + +__all__ = ["register_coco_panoptic", "register_coco_panoptic_separated"] + + +def load_coco_panoptic_json(json_file, image_dir, gt_dir, meta): + """ + Args: + image_dir (str): path to the raw dataset. e.g., "~/coco/train2017". + gt_dir (str): path to the raw annotations. e.g., "~/coco/panoptic_train2017". + json_file (str): path to the json file. e.g., "~/coco/annotations/panoptic_train2017.json". + + Returns: + list[dict]: a list of dicts in Detectron2 standard format. (See + `Using Custom Datasets `_ ) + """ + + def _convert_category_id(segment_info, meta): + if segment_info["category_id"] in meta["thing_dataset_id_to_contiguous_id"]: + segment_info["category_id"] = meta["thing_dataset_id_to_contiguous_id"][ + segment_info["category_id"] + ] + segment_info["isthing"] = True + else: + segment_info["category_id"] = meta["stuff_dataset_id_to_contiguous_id"][ + segment_info["category_id"] + ] + segment_info["isthing"] = False + return segment_info + + with PathManager.open(json_file) as f: + json_info = json.load(f) + + ret = [] + for ann in json_info["annotations"]: + image_id = int(ann["image_id"]) + # TODO: currently we assume image and label has the same filename but + # different extension, and images have extension ".jpg" for COCO. Need + # to make image extension a user-provided argument if we extend this + # function to support other COCO-like datasets. + image_file = os.path.join(image_dir, os.path.splitext(ann["file_name"])[0] + ".jpg") + label_file = os.path.join(gt_dir, ann["file_name"]) + segments_info = [_convert_category_id(x, meta) for x in ann["segments_info"]] + ret.append( + { + "file_name": image_file, + "image_id": image_id, + "pan_seg_file_name": label_file, + "segments_info": segments_info, + } + ) + assert len(ret), f"No images found in {image_dir}!" + assert PathManager.isfile(ret[0]["file_name"]), ret[0]["file_name"] + assert PathManager.isfile(ret[0]["pan_seg_file_name"]), ret[0]["pan_seg_file_name"] + return ret + + +def register_coco_panoptic( + name, metadata, image_root, panoptic_root, panoptic_json, instances_json=None +): + """ + Register a "standard" version of COCO panoptic segmentation dataset named `name`. + The dictionaries in this registered dataset follows detectron2's standard format. + Hence it's called "standard". + + Args: + name (str): the name that identifies a dataset, + e.g. "coco_2017_train_panoptic" + metadata (dict): extra metadata associated with this dataset. + image_root (str): directory which contains all the images + panoptic_root (str): directory which contains panoptic annotation images in COCO format + panoptic_json (str): path to the json panoptic annotation file in COCO format + sem_seg_root (none): not used, to be consistent with + `register_coco_panoptic_separated`. + instances_json (str): path to the json instance annotation file + """ + panoptic_name = name + DatasetCatalog.register( + panoptic_name, + lambda: load_coco_panoptic_json(panoptic_json, image_root, panoptic_root, metadata), + ) + MetadataCatalog.get(panoptic_name).set( + panoptic_root=panoptic_root, + image_root=image_root, + panoptic_json=panoptic_json, + json_file=instances_json, + evaluator_type="coco_panoptic_seg", + ignore_label=255, + label_divisor=1000, + **metadata, + ) + + +def register_coco_panoptic_separated( + name, metadata, image_root, panoptic_root, panoptic_json, sem_seg_root, instances_json +): + """ + Register a "separated" version of COCO panoptic segmentation dataset named `name`. + The annotations in this registered dataset will contain both instance annotations and + semantic annotations, each with its own contiguous ids. Hence it's called "separated". + + It follows the setting used by the PanopticFPN paper: + + 1. The instance annotations directly come from polygons in the COCO + instances annotation task, rather than from the masks in the COCO panoptic annotations. + + The two format have small differences: + Polygons in the instance annotations may have overlaps. + The mask annotations are produced by labeling the overlapped polygons + with depth ordering. + + 2. The semantic annotations are converted from panoptic annotations, where + all "things" are assigned a semantic id of 0. + All semantic categories will therefore have ids in contiguous + range [1, #stuff_categories]. + + This function will also register a pure semantic segmentation dataset + named ``name + '_stuffonly'``. + + Args: + name (str): the name that identifies a dataset, + e.g. "coco_2017_train_panoptic" + metadata (dict): extra metadata associated with this dataset. + image_root (str): directory which contains all the images + panoptic_root (str): directory which contains panoptic annotation images + panoptic_json (str): path to the json panoptic annotation file + sem_seg_root (str): directory which contains all the ground truth segmentation annotations. + instances_json (str): path to the json instance annotation file + """ + panoptic_name = name + "_separated" + DatasetCatalog.register( + panoptic_name, + lambda: merge_to_panoptic( + load_coco_json(instances_json, image_root, panoptic_name), + load_sem_seg(sem_seg_root, image_root), + ), + ) + MetadataCatalog.get(panoptic_name).set( + panoptic_root=panoptic_root, + image_root=image_root, + panoptic_json=panoptic_json, + sem_seg_root=sem_seg_root, + json_file=instances_json, # TODO rename + evaluator_type="coco_panoptic_seg", + ignore_label=255, + **metadata, + ) + + semantic_name = name + "_stuffonly" + DatasetCatalog.register(semantic_name, lambda: load_sem_seg(sem_seg_root, image_root)) + MetadataCatalog.get(semantic_name).set( + sem_seg_root=sem_seg_root, + image_root=image_root, + evaluator_type="sem_seg", + ignore_label=255, + **metadata, + ) + + +def merge_to_panoptic(detection_dicts, sem_seg_dicts): + """ + Create dataset dicts for panoptic segmentation, by + merging two dicts using "file_name" field to match their entries. + + Args: + detection_dicts (list[dict]): lists of dicts for object detection or instance segmentation. + sem_seg_dicts (list[dict]): lists of dicts for semantic segmentation. + + Returns: + list[dict] (one per input image): Each dict contains all (key, value) pairs from dicts in + both detection_dicts and sem_seg_dicts that correspond to the same image. + The function assumes that the same key in different dicts has the same value. + """ + results = [] + sem_seg_file_to_entry = {x["file_name"]: x for x in sem_seg_dicts} + assert len(sem_seg_file_to_entry) > 0 + + for det_dict in detection_dicts: + dic = copy.copy(det_dict) + dic.update(sem_seg_file_to_entry[dic["file_name"]]) + results.append(dic) + return results + + +if __name__ == "__main__": + """ + Test the COCO panoptic dataset loader. + + Usage: + python -m detectron2.data.datasets.coco_panoptic \ + path/to/image_root path/to/panoptic_root path/to/panoptic_json dataset_name 10 + + "dataset_name" can be "coco_2017_train_panoptic", or other + pre-registered ones + """ + from detectron2.utils.logger import setup_logger + from detectron2.utils.visualizer import Visualizer + import detectron2.data.datasets # noqa # add pre-defined metadata + import sys + from PIL import Image + import numpy as np + + logger = setup_logger(name=__name__) + assert sys.argv[4] in DatasetCatalog.list() + meta = MetadataCatalog.get(sys.argv[4]) + + dicts = load_coco_panoptic_json(sys.argv[3], sys.argv[1], sys.argv[2], meta.as_dict()) + logger.info("Done loading {} samples.".format(len(dicts))) + + dirname = "coco-data-vis" + os.makedirs(dirname, exist_ok=True) + num_imgs_to_vis = int(sys.argv[5]) + for i, d in enumerate(dicts): + img = np.array(Image.open(d["file_name"])) + visualizer = Visualizer(img, metadata=meta) + vis = visualizer.draw_dataset_dict(d) + fpath = os.path.join(dirname, os.path.basename(d["file_name"])) + vis.save(fpath) + if i + 1 >= num_imgs_to_vis: + break diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/lvis.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/lvis.py new file mode 100644 index 0000000..78b3965 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/lvis.py @@ -0,0 +1,240 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +import os +from fvcore.common.timer import Timer + +from detectron2.data import DatasetCatalog, MetadataCatalog +from detectron2.structures import BoxMode +from detectron2.utils.file_io import PathManager + +from .builtin_meta import _get_coco_instances_meta +from .lvis_v0_5_categories import LVIS_CATEGORIES as LVIS_V0_5_CATEGORIES +from .lvis_v1_categories import LVIS_CATEGORIES as LVIS_V1_CATEGORIES + +""" +This file contains functions to parse LVIS-format annotations into dicts in the +"Detectron2 format". +""" + +logger = logging.getLogger(__name__) + +__all__ = ["load_lvis_json", "register_lvis_instances", "get_lvis_instances_meta"] + + +def register_lvis_instances(name, metadata, json_file, image_root): + """ + Register a dataset in LVIS's json annotation format for instance detection and segmentation. + + Args: + name (str): a name that identifies the dataset, e.g. "lvis_v0.5_train". + metadata (dict): extra metadata associated with this dataset. It can be an empty dict. + json_file (str): path to the json instance annotation file. + image_root (str or path-like): directory which contains all the images. + """ + DatasetCatalog.register(name, lambda: load_lvis_json(json_file, image_root, name)) + MetadataCatalog.get(name).set( + json_file=json_file, image_root=image_root, evaluator_type="lvis", **metadata + ) + + +def load_lvis_json(json_file, image_root, dataset_name=None, extra_annotation_keys=None): + """ + Load a json file in LVIS's annotation format. + + Args: + json_file (str): full path to the LVIS json annotation file. + image_root (str): the directory where the images in this json file exists. + dataset_name (str): the name of the dataset (e.g., "lvis_v0.5_train"). + If provided, this function will put "thing_classes" into the metadata + associated with this dataset. + extra_annotation_keys (list[str]): list of per-annotation keys that should also be + loaded into the dataset dict (besides "bbox", "bbox_mode", "category_id", + "segmentation"). The values for these keys will be returned as-is. + + Returns: + list[dict]: a list of dicts in Detectron2 standard format. (See + `Using Custom Datasets `_ ) + + Notes: + 1. This function does not read the image files. + The results do not have the "image" field. + """ + from lvis import LVIS + + json_file = PathManager.get_local_path(json_file) + + timer = Timer() + lvis_api = LVIS(json_file) + if timer.seconds() > 1: + logger.info("Loading {} takes {:.2f} seconds.".format(json_file, timer.seconds())) + + if dataset_name is not None: + meta = get_lvis_instances_meta(dataset_name) + MetadataCatalog.get(dataset_name).set(**meta) + + # sort indices for reproducible results + img_ids = sorted(lvis_api.imgs.keys()) + # imgs is a list of dicts, each looks something like: + # {'license': 4, + # 'url': 'http://farm6.staticflickr.com/5454/9413846304_881d5e5c3b_z.jpg', + # 'file_name': 'COCO_val2014_000000001268.jpg', + # 'height': 427, + # 'width': 640, + # 'date_captured': '2013-11-17 05:57:24', + # 'id': 1268} + imgs = lvis_api.load_imgs(img_ids) + # anns is a list[list[dict]], where each dict is an annotation + # record for an object. The inner list enumerates the objects in an image + # and the outer list enumerates over images. Example of anns[0]: + # [{'segmentation': [[192.81, + # 247.09, + # ... + # 219.03, + # 249.06]], + # 'area': 1035.749, + # 'image_id': 1268, + # 'bbox': [192.81, 224.8, 74.73, 33.43], + # 'category_id': 16, + # 'id': 42986}, + # ...] + anns = [lvis_api.img_ann_map[img_id] for img_id in img_ids] + + # Sanity check that each annotation has a unique id + ann_ids = [ann["id"] for anns_per_image in anns for ann in anns_per_image] + assert len(set(ann_ids)) == len(ann_ids), "Annotation ids in '{}' are not unique".format( + json_file + ) + + imgs_anns = list(zip(imgs, anns)) + + logger.info("Loaded {} images in the LVIS format from {}".format(len(imgs_anns), json_file)) + + if extra_annotation_keys: + logger.info( + "The following extra annotation keys will be loaded: {} ".format(extra_annotation_keys) + ) + else: + extra_annotation_keys = [] + + def get_file_name(img_root, img_dict): + # Determine the path including the split folder ("train2017", "val2017", "test2017") from + # the coco_url field. Example: + # 'coco_url': 'http://images.cocodataset.org/train2017/000000155379.jpg' + split_folder, file_name = img_dict["coco_url"].split("/")[-2:] + return os.path.join(img_root + split_folder, file_name) + + dataset_dicts = [] + + for (img_dict, anno_dict_list) in imgs_anns: + record = {} + record["file_name"] = get_file_name(image_root, img_dict) + record["height"] = img_dict["height"] + record["width"] = img_dict["width"] + record["not_exhaustive_category_ids"] = img_dict.get("not_exhaustive_category_ids", []) + record["neg_category_ids"] = img_dict.get("neg_category_ids", []) + image_id = record["image_id"] = img_dict["id"] + + objs = [] + for anno in anno_dict_list: + # Check that the image_id in this annotation is the same as + # the image_id we're looking at. + # This fails only when the data parsing logic or the annotation file is buggy. + assert anno["image_id"] == image_id + obj = {"bbox": anno["bbox"], "bbox_mode": BoxMode.XYWH_ABS} + # LVIS data loader can be used to load COCO dataset categories. In this case `meta` + # variable will have a field with COCO-specific category mapping. + if dataset_name is not None and "thing_dataset_id_to_contiguous_id" in meta: + obj["category_id"] = meta["thing_dataset_id_to_contiguous_id"][anno["category_id"]] + else: + obj["category_id"] = anno["category_id"] - 1 # Convert 1-indexed to 0-indexed + segm = anno["segmentation"] # list[list[float]] + # filter out invalid polygons (< 3 points) + valid_segm = [poly for poly in segm if len(poly) % 2 == 0 and len(poly) >= 6] + assert len(segm) == len( + valid_segm + ), "Annotation contains an invalid polygon with < 3 points" + assert len(segm) > 0 + obj["segmentation"] = segm + for extra_ann_key in extra_annotation_keys: + obj[extra_ann_key] = anno[extra_ann_key] + objs.append(obj) + record["annotations"] = objs + dataset_dicts.append(record) + + return dataset_dicts + + +def get_lvis_instances_meta(dataset_name): + """ + Load LVIS metadata. + + Args: + dataset_name (str): LVIS dataset name without the split name (e.g., "lvis_v0.5"). + + Returns: + dict: LVIS metadata with keys: thing_classes + """ + if "cocofied" in dataset_name: + return _get_coco_instances_meta() + if "v0.5" in dataset_name: + return _get_lvis_instances_meta_v0_5() + elif "v1" in dataset_name: + return _get_lvis_instances_meta_v1() + raise ValueError("No built-in metadata for dataset {}".format(dataset_name)) + + +def _get_lvis_instances_meta_v0_5(): + assert len(LVIS_V0_5_CATEGORIES) == 1230 + cat_ids = [k["id"] for k in LVIS_V0_5_CATEGORIES] + assert min(cat_ids) == 1 and max(cat_ids) == len( + cat_ids + ), "Category ids are not in [1, #categories], as expected" + # Ensure that the category list is sorted by id + lvis_categories = sorted(LVIS_V0_5_CATEGORIES, key=lambda x: x["id"]) + thing_classes = [k["synonyms"][0] for k in lvis_categories] + meta = {"thing_classes": thing_classes} + return meta + + +def _get_lvis_instances_meta_v1(): + assert len(LVIS_V1_CATEGORIES) == 1203 + cat_ids = [k["id"] for k in LVIS_V1_CATEGORIES] + assert min(cat_ids) == 1 and max(cat_ids) == len( + cat_ids + ), "Category ids are not in [1, #categories], as expected" + # Ensure that the category list is sorted by id + lvis_categories = sorted(LVIS_V1_CATEGORIES, key=lambda x: x["id"]) + thing_classes = [k["synonyms"][0] for k in lvis_categories] + meta = {"thing_classes": thing_classes} + return meta + + +if __name__ == "__main__": + """ + Test the LVIS json dataset loader. + + Usage: + python -m detectron2.data.datasets.lvis \ + path/to/json path/to/image_root dataset_name vis_limit + """ + import sys + import numpy as np + from detectron2.utils.logger import setup_logger + from PIL import Image + import detectron2.data.datasets # noqa # add pre-defined metadata + from detectron2.utils.visualizer import Visualizer + + logger = setup_logger(name=__name__) + meta = MetadataCatalog.get(sys.argv[3]) + + dicts = load_lvis_json(sys.argv[1], sys.argv[2], sys.argv[3]) + logger.info("Done loading {} samples.".format(len(dicts))) + + dirname = "lvis-data-vis" + os.makedirs(dirname, exist_ok=True) + for d in dicts[: int(sys.argv[4])]: + img = np.array(Image.open(d["file_name"])) + visualizer = Visualizer(img, metadata=meta) + vis = visualizer.draw_dataset_dict(d) + fpath = os.path.join(dirname, os.path.basename(d["file_name"])) + vis.save(fpath) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/lvis_v0_5_categories.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/lvis_v0_5_categories.py new file mode 100644 index 0000000..d3dab61 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/lvis_v0_5_categories.py @@ -0,0 +1,13 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# Autogen with +# with open("lvis_v0.5_val.json", "r") as f: +# a = json.load(f) +# c = a["categories"] +# for x in c: +# del x["image_count"] +# del x["instance_count"] +# LVIS_CATEGORIES = repr(c) + " # noqa" + +# fmt: off +LVIS_CATEGORIES = [{'frequency': 'r', 'id': 1, 'synset': 'acorn.n.01', 'synonyms': ['acorn'], 'def': 'nut from an oak tree', 'name': 'acorn'}, {'frequency': 'c', 'id': 2, 'synset': 'aerosol.n.02', 'synonyms': ['aerosol_can', 'spray_can'], 'def': 'a dispenser that holds a substance under pressure', 'name': 'aerosol_can'}, {'frequency': 'f', 'id': 3, 'synset': 'air_conditioner.n.01', 'synonyms': ['air_conditioner'], 'def': 'a machine that keeps air cool and dry', 'name': 'air_conditioner'}, {'frequency': 'f', 'id': 4, 'synset': 'airplane.n.01', 'synonyms': ['airplane', 'aeroplane'], 'def': 'an aircraft that has a fixed wing and is powered by propellers or jets', 'name': 'airplane'}, {'frequency': 'c', 'id': 5, 'synset': 'alarm_clock.n.01', 'synonyms': ['alarm_clock'], 'def': 'a clock that wakes a sleeper at some preset time', 'name': 'alarm_clock'}, {'frequency': 'c', 'id': 6, 'synset': 'alcohol.n.01', 'synonyms': ['alcohol', 'alcoholic_beverage'], 'def': 'a liquor or brew containing alcohol as the active agent', 'name': 'alcohol'}, {'frequency': 'r', 'id': 7, 'synset': 'alligator.n.02', 'synonyms': ['alligator', 'gator'], 'def': 'amphibious reptiles related to crocodiles but with shorter broader snouts', 'name': 'alligator'}, {'frequency': 'c', 'id': 8, 'synset': 'almond.n.02', 'synonyms': ['almond'], 'def': 'oval-shaped edible seed of the almond tree', 'name': 'almond'}, {'frequency': 'c', 'id': 9, 'synset': 'ambulance.n.01', 'synonyms': ['ambulance'], 'def': 'a vehicle that takes people to and from hospitals', 'name': 'ambulance'}, {'frequency': 'r', 'id': 10, 'synset': 'amplifier.n.01', 'synonyms': ['amplifier'], 'def': 'electronic equipment that increases strength of signals', 'name': 'amplifier'}, {'frequency': 'c', 'id': 11, 'synset': 'anklet.n.03', 'synonyms': ['anklet', 'ankle_bracelet'], 'def': 'an ornament worn around the ankle', 'name': 'anklet'}, {'frequency': 'f', 'id': 12, 'synset': 'antenna.n.01', 'synonyms': ['antenna', 'aerial', 'transmitting_aerial'], 'def': 'an electrical device that sends or receives radio or television signals', 'name': 'antenna'}, {'frequency': 'f', 'id': 13, 'synset': 'apple.n.01', 'synonyms': ['apple'], 'def': 'fruit with red or yellow or green skin and sweet to tart crisp whitish flesh', 'name': 'apple'}, {'frequency': 'r', 'id': 14, 'synset': 'apple_juice.n.01', 'synonyms': ['apple_juice'], 'def': 'the juice of apples', 'name': 'apple_juice'}, {'frequency': 'r', 'id': 15, 'synset': 'applesauce.n.01', 'synonyms': ['applesauce'], 'def': 'puree of stewed apples usually sweetened and spiced', 'name': 'applesauce'}, {'frequency': 'r', 'id': 16, 'synset': 'apricot.n.02', 'synonyms': ['apricot'], 'def': 'downy yellow to rosy-colored fruit resembling a small peach', 'name': 'apricot'}, {'frequency': 'f', 'id': 17, 'synset': 'apron.n.01', 'synonyms': ['apron'], 'def': 'a garment of cloth that is tied about the waist and worn to protect clothing', 'name': 'apron'}, {'frequency': 'c', 'id': 18, 'synset': 'aquarium.n.01', 'synonyms': ['aquarium', 'fish_tank'], 'def': 'a tank/pool/bowl filled with water for keeping live fish and underwater animals', 'name': 'aquarium'}, {'frequency': 'c', 'id': 19, 'synset': 'armband.n.02', 'synonyms': ['armband'], 'def': 'a band worn around the upper arm', 'name': 'armband'}, {'frequency': 'f', 'id': 20, 'synset': 'armchair.n.01', 'synonyms': ['armchair'], 'def': 'chair with a support on each side for arms', 'name': 'armchair'}, {'frequency': 'r', 'id': 21, 'synset': 'armoire.n.01', 'synonyms': ['armoire'], 'def': 'a large wardrobe or cabinet', 'name': 'armoire'}, {'frequency': 'r', 'id': 22, 'synset': 'armor.n.01', 'synonyms': ['armor', 'armour'], 'def': 'protective covering made of metal and used in combat', 'name': 'armor'}, {'frequency': 'c', 'id': 23, 'synset': 'artichoke.n.02', 'synonyms': ['artichoke'], 'def': 'a thistlelike flower head with edible fleshy leaves and heart', 'name': 'artichoke'}, {'frequency': 'f', 'id': 24, 'synset': 'ashcan.n.01', 'synonyms': ['trash_can', 'garbage_can', 'wastebin', 'dustbin', 'trash_barrel', 'trash_bin'], 'def': 'a bin that holds rubbish until it is collected', 'name': 'trash_can'}, {'frequency': 'c', 'id': 25, 'synset': 'ashtray.n.01', 'synonyms': ['ashtray'], 'def': "a receptacle for the ash from smokers' cigars or cigarettes", 'name': 'ashtray'}, {'frequency': 'c', 'id': 26, 'synset': 'asparagus.n.02', 'synonyms': ['asparagus'], 'def': 'edible young shoots of the asparagus plant', 'name': 'asparagus'}, {'frequency': 'c', 'id': 27, 'synset': 'atomizer.n.01', 'synonyms': ['atomizer', 'atomiser', 'spray', 'sprayer', 'nebulizer', 'nebuliser'], 'def': 'a dispenser that turns a liquid (such as perfume) into a fine mist', 'name': 'atomizer'}, {'frequency': 'c', 'id': 28, 'synset': 'avocado.n.01', 'synonyms': ['avocado'], 'def': 'a pear-shaped fruit with green or blackish skin and rich yellowish pulp enclosing a single large seed', 'name': 'avocado'}, {'frequency': 'c', 'id': 29, 'synset': 'award.n.02', 'synonyms': ['award', 'accolade'], 'def': 'a tangible symbol signifying approval or distinction', 'name': 'award'}, {'frequency': 'f', 'id': 30, 'synset': 'awning.n.01', 'synonyms': ['awning'], 'def': 'a canopy made of canvas to shelter people or things from rain or sun', 'name': 'awning'}, {'frequency': 'r', 'id': 31, 'synset': 'ax.n.01', 'synonyms': ['ax', 'axe'], 'def': 'an edge tool with a heavy bladed head mounted across a handle', 'name': 'ax'}, {'frequency': 'f', 'id': 32, 'synset': 'baby_buggy.n.01', 'synonyms': ['baby_buggy', 'baby_carriage', 'perambulator', 'pram', 'stroller'], 'def': 'a small vehicle with four wheels in which a baby or child is pushed around', 'name': 'baby_buggy'}, {'frequency': 'c', 'id': 33, 'synset': 'backboard.n.01', 'synonyms': ['basketball_backboard'], 'def': 'a raised vertical board with basket attached; used to play basketball', 'name': 'basketball_backboard'}, {'frequency': 'f', 'id': 34, 'synset': 'backpack.n.01', 'synonyms': ['backpack', 'knapsack', 'packsack', 'rucksack', 'haversack'], 'def': 'a bag carried by a strap on your back or shoulder', 'name': 'backpack'}, {'frequency': 'f', 'id': 35, 'synset': 'bag.n.04', 'synonyms': ['handbag', 'purse', 'pocketbook'], 'def': 'a container used for carrying money and small personal items or accessories', 'name': 'handbag'}, {'frequency': 'f', 'id': 36, 'synset': 'bag.n.06', 'synonyms': ['suitcase', 'baggage', 'luggage'], 'def': 'cases used to carry belongings when traveling', 'name': 'suitcase'}, {'frequency': 'c', 'id': 37, 'synset': 'bagel.n.01', 'synonyms': ['bagel', 'beigel'], 'def': 'glazed yeast-raised doughnut-shaped roll with hard crust', 'name': 'bagel'}, {'frequency': 'r', 'id': 38, 'synset': 'bagpipe.n.01', 'synonyms': ['bagpipe'], 'def': 'a tubular wind instrument; the player blows air into a bag and squeezes it out', 'name': 'bagpipe'}, {'frequency': 'r', 'id': 39, 'synset': 'baguet.n.01', 'synonyms': ['baguet', 'baguette'], 'def': 'narrow French stick loaf', 'name': 'baguet'}, {'frequency': 'r', 'id': 40, 'synset': 'bait.n.02', 'synonyms': ['bait', 'lure'], 'def': 'something used to lure fish or other animals into danger so they can be trapped or killed', 'name': 'bait'}, {'frequency': 'f', 'id': 41, 'synset': 'ball.n.06', 'synonyms': ['ball'], 'def': 'a spherical object used as a plaything', 'name': 'ball'}, {'frequency': 'r', 'id': 42, 'synset': 'ballet_skirt.n.01', 'synonyms': ['ballet_skirt', 'tutu'], 'def': 'very short skirt worn by ballerinas', 'name': 'ballet_skirt'}, {'frequency': 'f', 'id': 43, 'synset': 'balloon.n.01', 'synonyms': ['balloon'], 'def': 'large tough nonrigid bag filled with gas or heated air', 'name': 'balloon'}, {'frequency': 'c', 'id': 44, 'synset': 'bamboo.n.02', 'synonyms': ['bamboo'], 'def': 'woody tropical grass having hollow woody stems', 'name': 'bamboo'}, {'frequency': 'f', 'id': 45, 'synset': 'banana.n.02', 'synonyms': ['banana'], 'def': 'elongated crescent-shaped yellow fruit with soft sweet flesh', 'name': 'banana'}, {'frequency': 'r', 'id': 46, 'synset': 'band_aid.n.01', 'synonyms': ['Band_Aid'], 'def': 'trade name for an adhesive bandage to cover small cuts or blisters', 'name': 'Band_Aid'}, {'frequency': 'c', 'id': 47, 'synset': 'bandage.n.01', 'synonyms': ['bandage'], 'def': 'a piece of soft material that covers and protects an injured part of the body', 'name': 'bandage'}, {'frequency': 'c', 'id': 48, 'synset': 'bandanna.n.01', 'synonyms': ['bandanna', 'bandana'], 'def': 'large and brightly colored handkerchief; often used as a neckerchief', 'name': 'bandanna'}, {'frequency': 'r', 'id': 49, 'synset': 'banjo.n.01', 'synonyms': ['banjo'], 'def': 'a stringed instrument of the guitar family with a long neck and circular body', 'name': 'banjo'}, {'frequency': 'f', 'id': 50, 'synset': 'banner.n.01', 'synonyms': ['banner', 'streamer'], 'def': 'long strip of cloth or paper used for decoration or advertising', 'name': 'banner'}, {'frequency': 'r', 'id': 51, 'synset': 'barbell.n.01', 'synonyms': ['barbell'], 'def': 'a bar to which heavy discs are attached at each end; used in weightlifting', 'name': 'barbell'}, {'frequency': 'r', 'id': 52, 'synset': 'barge.n.01', 'synonyms': ['barge'], 'def': 'a flatbottom boat for carrying heavy loads (especially on canals)', 'name': 'barge'}, {'frequency': 'f', 'id': 53, 'synset': 'barrel.n.02', 'synonyms': ['barrel', 'cask'], 'def': 'a cylindrical container that holds liquids', 'name': 'barrel'}, {'frequency': 'c', 'id': 54, 'synset': 'barrette.n.01', 'synonyms': ['barrette'], 'def': "a pin for holding women's hair in place", 'name': 'barrette'}, {'frequency': 'c', 'id': 55, 'synset': 'barrow.n.03', 'synonyms': ['barrow', 'garden_cart', 'lawn_cart', 'wheelbarrow'], 'def': 'a cart for carrying small loads; has handles and one or more wheels', 'name': 'barrow'}, {'frequency': 'f', 'id': 56, 'synset': 'base.n.03', 'synonyms': ['baseball_base'], 'def': 'a place that the runner must touch before scoring', 'name': 'baseball_base'}, {'frequency': 'f', 'id': 57, 'synset': 'baseball.n.02', 'synonyms': ['baseball'], 'def': 'a ball used in playing baseball', 'name': 'baseball'}, {'frequency': 'f', 'id': 58, 'synset': 'baseball_bat.n.01', 'synonyms': ['baseball_bat'], 'def': 'an implement used in baseball by the batter', 'name': 'baseball_bat'}, {'frequency': 'f', 'id': 59, 'synset': 'baseball_cap.n.01', 'synonyms': ['baseball_cap', 'jockey_cap', 'golf_cap'], 'def': 'a cap with a bill', 'name': 'baseball_cap'}, {'frequency': 'f', 'id': 60, 'synset': 'baseball_glove.n.01', 'synonyms': ['baseball_glove', 'baseball_mitt'], 'def': 'the handwear used by fielders in playing baseball', 'name': 'baseball_glove'}, {'frequency': 'f', 'id': 61, 'synset': 'basket.n.01', 'synonyms': ['basket', 'handbasket'], 'def': 'a container that is usually woven and has handles', 'name': 'basket'}, {'frequency': 'c', 'id': 62, 'synset': 'basket.n.03', 'synonyms': ['basketball_hoop'], 'def': 'metal hoop supporting a net through which players try to throw the basketball', 'name': 'basketball_hoop'}, {'frequency': 'c', 'id': 63, 'synset': 'basketball.n.02', 'synonyms': ['basketball'], 'def': 'an inflated ball used in playing basketball', 'name': 'basketball'}, {'frequency': 'r', 'id': 64, 'synset': 'bass_horn.n.01', 'synonyms': ['bass_horn', 'sousaphone', 'tuba'], 'def': 'the lowest brass wind instrument', 'name': 'bass_horn'}, {'frequency': 'r', 'id': 65, 'synset': 'bat.n.01', 'synonyms': ['bat_(animal)'], 'def': 'nocturnal mouselike mammal with forelimbs modified to form membranous wings', 'name': 'bat_(animal)'}, {'frequency': 'f', 'id': 66, 'synset': 'bath_mat.n.01', 'synonyms': ['bath_mat'], 'def': 'a heavy towel or mat to stand on while drying yourself after a bath', 'name': 'bath_mat'}, {'frequency': 'f', 'id': 67, 'synset': 'bath_towel.n.01', 'synonyms': ['bath_towel'], 'def': 'a large towel; to dry yourself after a bath', 'name': 'bath_towel'}, {'frequency': 'c', 'id': 68, 'synset': 'bathrobe.n.01', 'synonyms': ['bathrobe'], 'def': 'a loose-fitting robe of towelling; worn after a bath or swim', 'name': 'bathrobe'}, {'frequency': 'f', 'id': 69, 'synset': 'bathtub.n.01', 'synonyms': ['bathtub', 'bathing_tub'], 'def': 'a large open container that you fill with water and use to wash the body', 'name': 'bathtub'}, {'frequency': 'r', 'id': 70, 'synset': 'batter.n.02', 'synonyms': ['batter_(food)'], 'def': 'a liquid or semiliquid mixture, as of flour, eggs, and milk, used in cooking', 'name': 'batter_(food)'}, {'frequency': 'c', 'id': 71, 'synset': 'battery.n.02', 'synonyms': ['battery'], 'def': 'a portable device that produces electricity', 'name': 'battery'}, {'frequency': 'r', 'id': 72, 'synset': 'beach_ball.n.01', 'synonyms': ['beachball'], 'def': 'large and light ball; for play at the seaside', 'name': 'beachball'}, {'frequency': 'c', 'id': 73, 'synset': 'bead.n.01', 'synonyms': ['bead'], 'def': 'a small ball with a hole through the middle used for ornamentation, jewellery, etc.', 'name': 'bead'}, {'frequency': 'r', 'id': 74, 'synset': 'beaker.n.01', 'synonyms': ['beaker'], 'def': 'a flatbottomed jar made of glass or plastic; used for chemistry', 'name': 'beaker'}, {'frequency': 'c', 'id': 75, 'synset': 'bean_curd.n.01', 'synonyms': ['bean_curd', 'tofu'], 'def': 'cheeselike food made of curdled soybean milk', 'name': 'bean_curd'}, {'frequency': 'c', 'id': 76, 'synset': 'beanbag.n.01', 'synonyms': ['beanbag'], 'def': 'a bag filled with dried beans or similar items; used in games or to sit on', 'name': 'beanbag'}, {'frequency': 'f', 'id': 77, 'synset': 'beanie.n.01', 'synonyms': ['beanie', 'beany'], 'def': 'a small skullcap; formerly worn by schoolboys and college freshmen', 'name': 'beanie'}, {'frequency': 'f', 'id': 78, 'synset': 'bear.n.01', 'synonyms': ['bear'], 'def': 'large carnivorous or omnivorous mammals with shaggy coats and claws', 'name': 'bear'}, {'frequency': 'f', 'id': 79, 'synset': 'bed.n.01', 'synonyms': ['bed'], 'def': 'a piece of furniture that provides a place to sleep', 'name': 'bed'}, {'frequency': 'c', 'id': 80, 'synset': 'bedspread.n.01', 'synonyms': ['bedspread', 'bedcover', 'bed_covering', 'counterpane', 'spread'], 'def': 'decorative cover for a bed', 'name': 'bedspread'}, {'frequency': 'f', 'id': 81, 'synset': 'beef.n.01', 'synonyms': ['cow'], 'def': 'cattle that are reared for their meat', 'name': 'cow'}, {'frequency': 'c', 'id': 82, 'synset': 'beef.n.02', 'synonyms': ['beef_(food)', 'boeuf_(food)'], 'def': 'meat from an adult domestic bovine', 'name': 'beef_(food)'}, {'frequency': 'r', 'id': 83, 'synset': 'beeper.n.01', 'synonyms': ['beeper', 'pager'], 'def': 'an device that beeps when the person carrying it is being paged', 'name': 'beeper'}, {'frequency': 'f', 'id': 84, 'synset': 'beer_bottle.n.01', 'synonyms': ['beer_bottle'], 'def': 'a bottle that holds beer', 'name': 'beer_bottle'}, {'frequency': 'c', 'id': 85, 'synset': 'beer_can.n.01', 'synonyms': ['beer_can'], 'def': 'a can that holds beer', 'name': 'beer_can'}, {'frequency': 'r', 'id': 86, 'synset': 'beetle.n.01', 'synonyms': ['beetle'], 'def': 'insect with hard wing covers', 'name': 'beetle'}, {'frequency': 'f', 'id': 87, 'synset': 'bell.n.01', 'synonyms': ['bell'], 'def': 'a hollow device made of metal that makes a ringing sound when struck', 'name': 'bell'}, {'frequency': 'f', 'id': 88, 'synset': 'bell_pepper.n.02', 'synonyms': ['bell_pepper', 'capsicum'], 'def': 'large bell-shaped sweet pepper in green or red or yellow or orange or black varieties', 'name': 'bell_pepper'}, {'frequency': 'f', 'id': 89, 'synset': 'belt.n.02', 'synonyms': ['belt'], 'def': 'a band to tie or buckle around the body (usually at the waist)', 'name': 'belt'}, {'frequency': 'f', 'id': 90, 'synset': 'belt_buckle.n.01', 'synonyms': ['belt_buckle'], 'def': 'the buckle used to fasten a belt', 'name': 'belt_buckle'}, {'frequency': 'f', 'id': 91, 'synset': 'bench.n.01', 'synonyms': ['bench'], 'def': 'a long seat for more than one person', 'name': 'bench'}, {'frequency': 'c', 'id': 92, 'synset': 'beret.n.01', 'synonyms': ['beret'], 'def': 'a cap with no brim or bill; made of soft cloth', 'name': 'beret'}, {'frequency': 'c', 'id': 93, 'synset': 'bib.n.02', 'synonyms': ['bib'], 'def': 'a napkin tied under the chin of a child while eating', 'name': 'bib'}, {'frequency': 'r', 'id': 94, 'synset': 'bible.n.01', 'synonyms': ['Bible'], 'def': 'the sacred writings of the Christian religions', 'name': 'Bible'}, {'frequency': 'f', 'id': 95, 'synset': 'bicycle.n.01', 'synonyms': ['bicycle', 'bike_(bicycle)'], 'def': 'a wheeled vehicle that has two wheels and is moved by foot pedals', 'name': 'bicycle'}, {'frequency': 'f', 'id': 96, 'synset': 'bill.n.09', 'synonyms': ['visor', 'vizor'], 'def': 'a brim that projects to the front to shade the eyes', 'name': 'visor'}, {'frequency': 'c', 'id': 97, 'synset': 'binder.n.03', 'synonyms': ['binder', 'ring-binder'], 'def': 'holds loose papers or magazines', 'name': 'binder'}, {'frequency': 'c', 'id': 98, 'synset': 'binoculars.n.01', 'synonyms': ['binoculars', 'field_glasses', 'opera_glasses'], 'def': 'an optical instrument designed for simultaneous use by both eyes', 'name': 'binoculars'}, {'frequency': 'f', 'id': 99, 'synset': 'bird.n.01', 'synonyms': ['bird'], 'def': 'animal characterized by feathers and wings', 'name': 'bird'}, {'frequency': 'r', 'id': 100, 'synset': 'bird_feeder.n.01', 'synonyms': ['birdfeeder'], 'def': 'an outdoor device that supplies food for wild birds', 'name': 'birdfeeder'}, {'frequency': 'r', 'id': 101, 'synset': 'birdbath.n.01', 'synonyms': ['birdbath'], 'def': 'an ornamental basin (usually in a garden) for birds to bathe in', 'name': 'birdbath'}, {'frequency': 'c', 'id': 102, 'synset': 'birdcage.n.01', 'synonyms': ['birdcage'], 'def': 'a cage in which a bird can be kept', 'name': 'birdcage'}, {'frequency': 'c', 'id': 103, 'synset': 'birdhouse.n.01', 'synonyms': ['birdhouse'], 'def': 'a shelter for birds', 'name': 'birdhouse'}, {'frequency': 'f', 'id': 104, 'synset': 'birthday_cake.n.01', 'synonyms': ['birthday_cake'], 'def': 'decorated cake served at a birthday party', 'name': 'birthday_cake'}, {'frequency': 'r', 'id': 105, 'synset': 'birthday_card.n.01', 'synonyms': ['birthday_card'], 'def': 'a card expressing a birthday greeting', 'name': 'birthday_card'}, {'frequency': 'r', 'id': 106, 'synset': 'biscuit.n.01', 'synonyms': ['biscuit_(bread)'], 'def': 'small round bread leavened with baking-powder or soda', 'name': 'biscuit_(bread)'}, {'frequency': 'r', 'id': 107, 'synset': 'black_flag.n.01', 'synonyms': ['pirate_flag'], 'def': 'a flag usually bearing a white skull and crossbones on a black background', 'name': 'pirate_flag'}, {'frequency': 'c', 'id': 108, 'synset': 'black_sheep.n.02', 'synonyms': ['black_sheep'], 'def': 'sheep with a black coat', 'name': 'black_sheep'}, {'frequency': 'c', 'id': 109, 'synset': 'blackboard.n.01', 'synonyms': ['blackboard', 'chalkboard'], 'def': 'sheet of slate; for writing with chalk', 'name': 'blackboard'}, {'frequency': 'f', 'id': 110, 'synset': 'blanket.n.01', 'synonyms': ['blanket'], 'def': 'bedding that keeps a person warm in bed', 'name': 'blanket'}, {'frequency': 'c', 'id': 111, 'synset': 'blazer.n.01', 'synonyms': ['blazer', 'sport_jacket', 'sport_coat', 'sports_jacket', 'sports_coat'], 'def': 'lightweight jacket; often striped in the colors of a club or school', 'name': 'blazer'}, {'frequency': 'f', 'id': 112, 'synset': 'blender.n.01', 'synonyms': ['blender', 'liquidizer', 'liquidiser'], 'def': 'an electrically powered mixer that mix or chop or liquefy foods', 'name': 'blender'}, {'frequency': 'r', 'id': 113, 'synset': 'blimp.n.02', 'synonyms': ['blimp'], 'def': 'a small nonrigid airship used for observation or as a barrage balloon', 'name': 'blimp'}, {'frequency': 'c', 'id': 114, 'synset': 'blinker.n.01', 'synonyms': ['blinker', 'flasher'], 'def': 'a light that flashes on and off; used as a signal or to send messages', 'name': 'blinker'}, {'frequency': 'c', 'id': 115, 'synset': 'blueberry.n.02', 'synonyms': ['blueberry'], 'def': 'sweet edible dark-blue berries of blueberry plants', 'name': 'blueberry'}, {'frequency': 'r', 'id': 116, 'synset': 'boar.n.02', 'synonyms': ['boar'], 'def': 'an uncastrated male hog', 'name': 'boar'}, {'frequency': 'r', 'id': 117, 'synset': 'board.n.09', 'synonyms': ['gameboard'], 'def': 'a flat portable surface (usually rectangular) designed for board games', 'name': 'gameboard'}, {'frequency': 'f', 'id': 118, 'synset': 'boat.n.01', 'synonyms': ['boat', 'ship_(boat)'], 'def': 'a vessel for travel on water', 'name': 'boat'}, {'frequency': 'c', 'id': 119, 'synset': 'bobbin.n.01', 'synonyms': ['bobbin', 'spool', 'reel'], 'def': 'a thing around which thread/tape/film or other flexible materials can be wound', 'name': 'bobbin'}, {'frequency': 'r', 'id': 120, 'synset': 'bobby_pin.n.01', 'synonyms': ['bobby_pin', 'hairgrip'], 'def': 'a flat wire hairpin used to hold bobbed hair in place', 'name': 'bobby_pin'}, {'frequency': 'c', 'id': 121, 'synset': 'boiled_egg.n.01', 'synonyms': ['boiled_egg', 'coddled_egg'], 'def': 'egg cooked briefly in the shell in gently boiling water', 'name': 'boiled_egg'}, {'frequency': 'r', 'id': 122, 'synset': 'bolo_tie.n.01', 'synonyms': ['bolo_tie', 'bolo', 'bola_tie', 'bola'], 'def': 'a cord fastened around the neck with an ornamental clasp and worn as a necktie', 'name': 'bolo_tie'}, {'frequency': 'c', 'id': 123, 'synset': 'bolt.n.03', 'synonyms': ['deadbolt'], 'def': 'the part of a lock that is engaged or withdrawn with a key', 'name': 'deadbolt'}, {'frequency': 'f', 'id': 124, 'synset': 'bolt.n.06', 'synonyms': ['bolt'], 'def': 'a screw that screws into a nut to form a fastener', 'name': 'bolt'}, {'frequency': 'r', 'id': 125, 'synset': 'bonnet.n.01', 'synonyms': ['bonnet'], 'def': 'a hat tied under the chin', 'name': 'bonnet'}, {'frequency': 'f', 'id': 126, 'synset': 'book.n.01', 'synonyms': ['book'], 'def': 'a written work or composition that has been published', 'name': 'book'}, {'frequency': 'r', 'id': 127, 'synset': 'book_bag.n.01', 'synonyms': ['book_bag'], 'def': 'a bag in which students carry their books', 'name': 'book_bag'}, {'frequency': 'c', 'id': 128, 'synset': 'bookcase.n.01', 'synonyms': ['bookcase'], 'def': 'a piece of furniture with shelves for storing books', 'name': 'bookcase'}, {'frequency': 'c', 'id': 129, 'synset': 'booklet.n.01', 'synonyms': ['booklet', 'brochure', 'leaflet', 'pamphlet'], 'def': 'a small book usually having a paper cover', 'name': 'booklet'}, {'frequency': 'r', 'id': 130, 'synset': 'bookmark.n.01', 'synonyms': ['bookmark', 'bookmarker'], 'def': 'a marker (a piece of paper or ribbon) placed between the pages of a book', 'name': 'bookmark'}, {'frequency': 'r', 'id': 131, 'synset': 'boom.n.04', 'synonyms': ['boom_microphone', 'microphone_boom'], 'def': 'a pole carrying an overhead microphone projected over a film or tv set', 'name': 'boom_microphone'}, {'frequency': 'f', 'id': 132, 'synset': 'boot.n.01', 'synonyms': ['boot'], 'def': 'footwear that covers the whole foot and lower leg', 'name': 'boot'}, {'frequency': 'f', 'id': 133, 'synset': 'bottle.n.01', 'synonyms': ['bottle'], 'def': 'a glass or plastic vessel used for storing drinks or other liquids', 'name': 'bottle'}, {'frequency': 'c', 'id': 134, 'synset': 'bottle_opener.n.01', 'synonyms': ['bottle_opener'], 'def': 'an opener for removing caps or corks from bottles', 'name': 'bottle_opener'}, {'frequency': 'c', 'id': 135, 'synset': 'bouquet.n.01', 'synonyms': ['bouquet'], 'def': 'an arrangement of flowers that is usually given as a present', 'name': 'bouquet'}, {'frequency': 'r', 'id': 136, 'synset': 'bow.n.04', 'synonyms': ['bow_(weapon)'], 'def': 'a weapon for shooting arrows', 'name': 'bow_(weapon)'}, {'frequency': 'f', 'id': 137, 'synset': 'bow.n.08', 'synonyms': ['bow_(decorative_ribbons)'], 'def': 'a decorative interlacing of ribbons', 'name': 'bow_(decorative_ribbons)'}, {'frequency': 'f', 'id': 138, 'synset': 'bow_tie.n.01', 'synonyms': ['bow-tie', 'bowtie'], 'def': "a man's tie that ties in a bow", 'name': 'bow-tie'}, {'frequency': 'f', 'id': 139, 'synset': 'bowl.n.03', 'synonyms': ['bowl'], 'def': 'a dish that is round and open at the top for serving foods', 'name': 'bowl'}, {'frequency': 'r', 'id': 140, 'synset': 'bowl.n.08', 'synonyms': ['pipe_bowl'], 'def': 'a small round container that is open at the top for holding tobacco', 'name': 'pipe_bowl'}, {'frequency': 'c', 'id': 141, 'synset': 'bowler_hat.n.01', 'synonyms': ['bowler_hat', 'bowler', 'derby_hat', 'derby', 'plug_hat'], 'def': 'a felt hat that is round and hard with a narrow brim', 'name': 'bowler_hat'}, {'frequency': 'r', 'id': 142, 'synset': 'bowling_ball.n.01', 'synonyms': ['bowling_ball'], 'def': 'a large ball with finger holes used in the sport of bowling', 'name': 'bowling_ball'}, {'frequency': 'r', 'id': 143, 'synset': 'bowling_pin.n.01', 'synonyms': ['bowling_pin'], 'def': 'a club-shaped wooden object used in bowling', 'name': 'bowling_pin'}, {'frequency': 'r', 'id': 144, 'synset': 'boxing_glove.n.01', 'synonyms': ['boxing_glove'], 'def': 'large glove coverings the fists of a fighter worn for the sport of boxing', 'name': 'boxing_glove'}, {'frequency': 'c', 'id': 145, 'synset': 'brace.n.06', 'synonyms': ['suspenders'], 'def': 'elastic straps that hold trousers up (usually used in the plural)', 'name': 'suspenders'}, {'frequency': 'f', 'id': 146, 'synset': 'bracelet.n.02', 'synonyms': ['bracelet', 'bangle'], 'def': 'jewelry worn around the wrist for decoration', 'name': 'bracelet'}, {'frequency': 'r', 'id': 147, 'synset': 'brass.n.07', 'synonyms': ['brass_plaque'], 'def': 'a memorial made of brass', 'name': 'brass_plaque'}, {'frequency': 'c', 'id': 148, 'synset': 'brassiere.n.01', 'synonyms': ['brassiere', 'bra', 'bandeau'], 'def': 'an undergarment worn by women to support their breasts', 'name': 'brassiere'}, {'frequency': 'c', 'id': 149, 'synset': 'bread-bin.n.01', 'synonyms': ['bread-bin', 'breadbox'], 'def': 'a container used to keep bread or cake in', 'name': 'bread-bin'}, {'frequency': 'r', 'id': 150, 'synset': 'breechcloth.n.01', 'synonyms': ['breechcloth', 'breechclout', 'loincloth'], 'def': 'a garment that provides covering for the loins', 'name': 'breechcloth'}, {'frequency': 'c', 'id': 151, 'synset': 'bridal_gown.n.01', 'synonyms': ['bridal_gown', 'wedding_gown', 'wedding_dress'], 'def': 'a gown worn by the bride at a wedding', 'name': 'bridal_gown'}, {'frequency': 'c', 'id': 152, 'synset': 'briefcase.n.01', 'synonyms': ['briefcase'], 'def': 'a case with a handle; for carrying papers or files or books', 'name': 'briefcase'}, {'frequency': 'c', 'id': 153, 'synset': 'bristle_brush.n.01', 'synonyms': ['bristle_brush'], 'def': 'a brush that is made with the short stiff hairs of an animal or plant', 'name': 'bristle_brush'}, {'frequency': 'f', 'id': 154, 'synset': 'broccoli.n.01', 'synonyms': ['broccoli'], 'def': 'plant with dense clusters of tight green flower buds', 'name': 'broccoli'}, {'frequency': 'r', 'id': 155, 'synset': 'brooch.n.01', 'synonyms': ['broach'], 'def': 'a decorative pin worn by women', 'name': 'broach'}, {'frequency': 'c', 'id': 156, 'synset': 'broom.n.01', 'synonyms': ['broom'], 'def': 'bundle of straws or twigs attached to a long handle; used for cleaning', 'name': 'broom'}, {'frequency': 'c', 'id': 157, 'synset': 'brownie.n.03', 'synonyms': ['brownie'], 'def': 'square or bar of very rich chocolate cake usually with nuts', 'name': 'brownie'}, {'frequency': 'c', 'id': 158, 'synset': 'brussels_sprouts.n.01', 'synonyms': ['brussels_sprouts'], 'def': 'the small edible cabbage-like buds growing along a stalk', 'name': 'brussels_sprouts'}, {'frequency': 'r', 'id': 159, 'synset': 'bubble_gum.n.01', 'synonyms': ['bubble_gum'], 'def': 'a kind of chewing gum that can be blown into bubbles', 'name': 'bubble_gum'}, {'frequency': 'f', 'id': 160, 'synset': 'bucket.n.01', 'synonyms': ['bucket', 'pail'], 'def': 'a roughly cylindrical vessel that is open at the top', 'name': 'bucket'}, {'frequency': 'r', 'id': 161, 'synset': 'buggy.n.01', 'synonyms': ['horse_buggy'], 'def': 'a small lightweight carriage; drawn by a single horse', 'name': 'horse_buggy'}, {'frequency': 'c', 'id': 162, 'synset': 'bull.n.11', 'synonyms': ['bull'], 'def': 'mature male cow', 'name': 'bull'}, {'frequency': 'r', 'id': 163, 'synset': 'bulldog.n.01', 'synonyms': ['bulldog'], 'def': 'a thickset short-haired dog with a large head and strong undershot lower jaw', 'name': 'bulldog'}, {'frequency': 'r', 'id': 164, 'synset': 'bulldozer.n.01', 'synonyms': ['bulldozer', 'dozer'], 'def': 'large powerful tractor; a large blade in front flattens areas of ground', 'name': 'bulldozer'}, {'frequency': 'c', 'id': 165, 'synset': 'bullet_train.n.01', 'synonyms': ['bullet_train'], 'def': 'a high-speed passenger train', 'name': 'bullet_train'}, {'frequency': 'c', 'id': 166, 'synset': 'bulletin_board.n.02', 'synonyms': ['bulletin_board', 'notice_board'], 'def': 'a board that hangs on a wall; displays announcements', 'name': 'bulletin_board'}, {'frequency': 'r', 'id': 167, 'synset': 'bulletproof_vest.n.01', 'synonyms': ['bulletproof_vest'], 'def': 'a vest capable of resisting the impact of a bullet', 'name': 'bulletproof_vest'}, {'frequency': 'c', 'id': 168, 'synset': 'bullhorn.n.01', 'synonyms': ['bullhorn', 'megaphone'], 'def': 'a portable loudspeaker with built-in microphone and amplifier', 'name': 'bullhorn'}, {'frequency': 'r', 'id': 169, 'synset': 'bully_beef.n.01', 'synonyms': ['corned_beef', 'corn_beef'], 'def': 'beef cured or pickled in brine', 'name': 'corned_beef'}, {'frequency': 'f', 'id': 170, 'synset': 'bun.n.01', 'synonyms': ['bun', 'roll'], 'def': 'small rounded bread either plain or sweet', 'name': 'bun'}, {'frequency': 'c', 'id': 171, 'synset': 'bunk_bed.n.01', 'synonyms': ['bunk_bed'], 'def': 'beds built one above the other', 'name': 'bunk_bed'}, {'frequency': 'f', 'id': 172, 'synset': 'buoy.n.01', 'synonyms': ['buoy'], 'def': 'a float attached by rope to the seabed to mark channels in a harbor or underwater hazards', 'name': 'buoy'}, {'frequency': 'r', 'id': 173, 'synset': 'burrito.n.01', 'synonyms': ['burrito'], 'def': 'a flour tortilla folded around a filling', 'name': 'burrito'}, {'frequency': 'f', 'id': 174, 'synset': 'bus.n.01', 'synonyms': ['bus_(vehicle)', 'autobus', 'charabanc', 'double-decker', 'motorbus', 'motorcoach'], 'def': 'a vehicle carrying many passengers; used for public transport', 'name': 'bus_(vehicle)'}, {'frequency': 'c', 'id': 175, 'synset': 'business_card.n.01', 'synonyms': ['business_card'], 'def': "a card on which are printed the person's name and business affiliation", 'name': 'business_card'}, {'frequency': 'c', 'id': 176, 'synset': 'butcher_knife.n.01', 'synonyms': ['butcher_knife'], 'def': 'a large sharp knife for cutting or trimming meat', 'name': 'butcher_knife'}, {'frequency': 'c', 'id': 177, 'synset': 'butter.n.01', 'synonyms': ['butter'], 'def': 'an edible emulsion of fat globules made by churning milk or cream; for cooking and table use', 'name': 'butter'}, {'frequency': 'c', 'id': 178, 'synset': 'butterfly.n.01', 'synonyms': ['butterfly'], 'def': 'insect typically having a slender body with knobbed antennae and broad colorful wings', 'name': 'butterfly'}, {'frequency': 'f', 'id': 179, 'synset': 'button.n.01', 'synonyms': ['button'], 'def': 'a round fastener sewn to shirts and coats etc to fit through buttonholes', 'name': 'button'}, {'frequency': 'f', 'id': 180, 'synset': 'cab.n.03', 'synonyms': ['cab_(taxi)', 'taxi', 'taxicab'], 'def': 'a car that takes passengers where they want to go in exchange for money', 'name': 'cab_(taxi)'}, {'frequency': 'r', 'id': 181, 'synset': 'cabana.n.01', 'synonyms': ['cabana'], 'def': 'a small tent used as a dressing room beside the sea or a swimming pool', 'name': 'cabana'}, {'frequency': 'r', 'id': 182, 'synset': 'cabin_car.n.01', 'synonyms': ['cabin_car', 'caboose'], 'def': 'a car on a freight train for use of the train crew; usually the last car on the train', 'name': 'cabin_car'}, {'frequency': 'f', 'id': 183, 'synset': 'cabinet.n.01', 'synonyms': ['cabinet'], 'def': 'a piece of furniture resembling a cupboard with doors and shelves and drawers', 'name': 'cabinet'}, {'frequency': 'r', 'id': 184, 'synset': 'cabinet.n.03', 'synonyms': ['locker', 'storage_locker'], 'def': 'a storage compartment for clothes and valuables; usually it has a lock', 'name': 'locker'}, {'frequency': 'f', 'id': 185, 'synset': 'cake.n.03', 'synonyms': ['cake'], 'def': 'baked goods made from or based on a mixture of flour, sugar, eggs, and fat', 'name': 'cake'}, {'frequency': 'c', 'id': 186, 'synset': 'calculator.n.02', 'synonyms': ['calculator'], 'def': 'a small machine that is used for mathematical calculations', 'name': 'calculator'}, {'frequency': 'f', 'id': 187, 'synset': 'calendar.n.02', 'synonyms': ['calendar'], 'def': 'a list or register of events (appointments/social events/court cases, etc)', 'name': 'calendar'}, {'frequency': 'c', 'id': 188, 'synset': 'calf.n.01', 'synonyms': ['calf'], 'def': 'young of domestic cattle', 'name': 'calf'}, {'frequency': 'c', 'id': 189, 'synset': 'camcorder.n.01', 'synonyms': ['camcorder'], 'def': 'a portable television camera and videocassette recorder', 'name': 'camcorder'}, {'frequency': 'c', 'id': 190, 'synset': 'camel.n.01', 'synonyms': ['camel'], 'def': 'cud-chewing mammal used as a draft or saddle animal in desert regions', 'name': 'camel'}, {'frequency': 'f', 'id': 191, 'synset': 'camera.n.01', 'synonyms': ['camera'], 'def': 'equipment for taking photographs', 'name': 'camera'}, {'frequency': 'c', 'id': 192, 'synset': 'camera_lens.n.01', 'synonyms': ['camera_lens'], 'def': 'a lens that focuses the image in a camera', 'name': 'camera_lens'}, {'frequency': 'c', 'id': 193, 'synset': 'camper.n.02', 'synonyms': ['camper_(vehicle)', 'camping_bus', 'motor_home'], 'def': 'a recreational vehicle equipped for camping out while traveling', 'name': 'camper_(vehicle)'}, {'frequency': 'f', 'id': 194, 'synset': 'can.n.01', 'synonyms': ['can', 'tin_can'], 'def': 'airtight sealed metal container for food or drink or paint etc.', 'name': 'can'}, {'frequency': 'c', 'id': 195, 'synset': 'can_opener.n.01', 'synonyms': ['can_opener', 'tin_opener'], 'def': 'a device for cutting cans open', 'name': 'can_opener'}, {'frequency': 'r', 'id': 196, 'synset': 'candelabrum.n.01', 'synonyms': ['candelabrum', 'candelabra'], 'def': 'branched candlestick; ornamental; has several lights', 'name': 'candelabrum'}, {'frequency': 'f', 'id': 197, 'synset': 'candle.n.01', 'synonyms': ['candle', 'candlestick'], 'def': 'stick of wax with a wick in the middle', 'name': 'candle'}, {'frequency': 'f', 'id': 198, 'synset': 'candlestick.n.01', 'synonyms': ['candle_holder'], 'def': 'a holder with sockets for candles', 'name': 'candle_holder'}, {'frequency': 'r', 'id': 199, 'synset': 'candy_bar.n.01', 'synonyms': ['candy_bar'], 'def': 'a candy shaped as a bar', 'name': 'candy_bar'}, {'frequency': 'c', 'id': 200, 'synset': 'candy_cane.n.01', 'synonyms': ['candy_cane'], 'def': 'a hard candy in the shape of a rod (usually with stripes)', 'name': 'candy_cane'}, {'frequency': 'c', 'id': 201, 'synset': 'cane.n.01', 'synonyms': ['walking_cane'], 'def': 'a stick that people can lean on to help them walk', 'name': 'walking_cane'}, {'frequency': 'c', 'id': 202, 'synset': 'canister.n.02', 'synonyms': ['canister', 'cannister'], 'def': 'metal container for storing dry foods such as tea or flour', 'name': 'canister'}, {'frequency': 'r', 'id': 203, 'synset': 'cannon.n.02', 'synonyms': ['cannon'], 'def': 'heavy gun fired from a tank', 'name': 'cannon'}, {'frequency': 'c', 'id': 204, 'synset': 'canoe.n.01', 'synonyms': ['canoe'], 'def': 'small and light boat; pointed at both ends; propelled with a paddle', 'name': 'canoe'}, {'frequency': 'r', 'id': 205, 'synset': 'cantaloup.n.02', 'synonyms': ['cantaloup', 'cantaloupe'], 'def': 'the fruit of a cantaloup vine; small to medium-sized melon with yellowish flesh', 'name': 'cantaloup'}, {'frequency': 'r', 'id': 206, 'synset': 'canteen.n.01', 'synonyms': ['canteen'], 'def': 'a flask for carrying water; used by soldiers or travelers', 'name': 'canteen'}, {'frequency': 'c', 'id': 207, 'synset': 'cap.n.01', 'synonyms': ['cap_(headwear)'], 'def': 'a tight-fitting headwear', 'name': 'cap_(headwear)'}, {'frequency': 'f', 'id': 208, 'synset': 'cap.n.02', 'synonyms': ['bottle_cap', 'cap_(container_lid)'], 'def': 'a top (as for a bottle)', 'name': 'bottle_cap'}, {'frequency': 'r', 'id': 209, 'synset': 'cape.n.02', 'synonyms': ['cape'], 'def': 'a sleeveless garment like a cloak but shorter', 'name': 'cape'}, {'frequency': 'c', 'id': 210, 'synset': 'cappuccino.n.01', 'synonyms': ['cappuccino', 'coffee_cappuccino'], 'def': 'equal parts of espresso and steamed milk', 'name': 'cappuccino'}, {'frequency': 'f', 'id': 211, 'synset': 'car.n.01', 'synonyms': ['car_(automobile)', 'auto_(automobile)', 'automobile'], 'def': 'a motor vehicle with four wheels', 'name': 'car_(automobile)'}, {'frequency': 'f', 'id': 212, 'synset': 'car.n.02', 'synonyms': ['railcar_(part_of_a_train)', 'railway_car_(part_of_a_train)', 'railroad_car_(part_of_a_train)'], 'def': 'a wheeled vehicle adapted to the rails of railroad', 'name': 'railcar_(part_of_a_train)'}, {'frequency': 'r', 'id': 213, 'synset': 'car.n.04', 'synonyms': ['elevator_car'], 'def': 'where passengers ride up and down', 'name': 'elevator_car'}, {'frequency': 'r', 'id': 214, 'synset': 'car_battery.n.01', 'synonyms': ['car_battery', 'automobile_battery'], 'def': 'a battery in a motor vehicle', 'name': 'car_battery'}, {'frequency': 'c', 'id': 215, 'synset': 'card.n.02', 'synonyms': ['identity_card'], 'def': 'a card certifying the identity of the bearer', 'name': 'identity_card'}, {'frequency': 'c', 'id': 216, 'synset': 'card.n.03', 'synonyms': ['card'], 'def': 'a rectangular piece of paper used to send messages (e.g. greetings or pictures)', 'name': 'card'}, {'frequency': 'r', 'id': 217, 'synset': 'cardigan.n.01', 'synonyms': ['cardigan'], 'def': 'knitted jacket that is fastened up the front with buttons or a zipper', 'name': 'cardigan'}, {'frequency': 'r', 'id': 218, 'synset': 'cargo_ship.n.01', 'synonyms': ['cargo_ship', 'cargo_vessel'], 'def': 'a ship designed to carry cargo', 'name': 'cargo_ship'}, {'frequency': 'r', 'id': 219, 'synset': 'carnation.n.01', 'synonyms': ['carnation'], 'def': 'plant with pink to purple-red spice-scented usually double flowers', 'name': 'carnation'}, {'frequency': 'c', 'id': 220, 'synset': 'carriage.n.02', 'synonyms': ['horse_carriage'], 'def': 'a vehicle with wheels drawn by one or more horses', 'name': 'horse_carriage'}, {'frequency': 'f', 'id': 221, 'synset': 'carrot.n.01', 'synonyms': ['carrot'], 'def': 'deep orange edible root of the cultivated carrot plant', 'name': 'carrot'}, {'frequency': 'c', 'id': 222, 'synset': 'carryall.n.01', 'synonyms': ['tote_bag'], 'def': 'a capacious bag or basket', 'name': 'tote_bag'}, {'frequency': 'c', 'id': 223, 'synset': 'cart.n.01', 'synonyms': ['cart'], 'def': 'a heavy open wagon usually having two wheels and drawn by an animal', 'name': 'cart'}, {'frequency': 'c', 'id': 224, 'synset': 'carton.n.02', 'synonyms': ['carton'], 'def': 'a box made of cardboard; opens by flaps on top', 'name': 'carton'}, {'frequency': 'c', 'id': 225, 'synset': 'cash_register.n.01', 'synonyms': ['cash_register', 'register_(for_cash_transactions)'], 'def': 'a cashbox with an adding machine to register transactions', 'name': 'cash_register'}, {'frequency': 'r', 'id': 226, 'synset': 'casserole.n.01', 'synonyms': ['casserole'], 'def': 'food cooked and served in a casserole', 'name': 'casserole'}, {'frequency': 'r', 'id': 227, 'synset': 'cassette.n.01', 'synonyms': ['cassette'], 'def': 'a container that holds a magnetic tape used for recording or playing sound or video', 'name': 'cassette'}, {'frequency': 'c', 'id': 228, 'synset': 'cast.n.05', 'synonyms': ['cast', 'plaster_cast', 'plaster_bandage'], 'def': 'bandage consisting of a firm covering that immobilizes broken bones while they heal', 'name': 'cast'}, {'frequency': 'f', 'id': 229, 'synset': 'cat.n.01', 'synonyms': ['cat'], 'def': 'a domestic house cat', 'name': 'cat'}, {'frequency': 'c', 'id': 230, 'synset': 'cauliflower.n.02', 'synonyms': ['cauliflower'], 'def': 'edible compact head of white undeveloped flowers', 'name': 'cauliflower'}, {'frequency': 'r', 'id': 231, 'synset': 'caviar.n.01', 'synonyms': ['caviar', 'caviare'], 'def': "salted roe of sturgeon or other large fish; usually served as an hors d'oeuvre", 'name': 'caviar'}, {'frequency': 'c', 'id': 232, 'synset': 'cayenne.n.02', 'synonyms': ['cayenne_(spice)', 'cayenne_pepper_(spice)', 'red_pepper_(spice)'], 'def': 'ground pods and seeds of pungent red peppers of the genus Capsicum', 'name': 'cayenne_(spice)'}, {'frequency': 'c', 'id': 233, 'synset': 'cd_player.n.01', 'synonyms': ['CD_player'], 'def': 'electronic equipment for playing compact discs (CDs)', 'name': 'CD_player'}, {'frequency': 'c', 'id': 234, 'synset': 'celery.n.01', 'synonyms': ['celery'], 'def': 'widely cultivated herb with aromatic leaf stalks that are eaten raw or cooked', 'name': 'celery'}, {'frequency': 'f', 'id': 235, 'synset': 'cellular_telephone.n.01', 'synonyms': ['cellular_telephone', 'cellular_phone', 'cellphone', 'mobile_phone', 'smart_phone'], 'def': 'a hand-held mobile telephone', 'name': 'cellular_telephone'}, {'frequency': 'r', 'id': 236, 'synset': 'chain_mail.n.01', 'synonyms': ['chain_mail', 'ring_mail', 'chain_armor', 'chain_armour', 'ring_armor', 'ring_armour'], 'def': '(Middle Ages) flexible armor made of interlinked metal rings', 'name': 'chain_mail'}, {'frequency': 'f', 'id': 237, 'synset': 'chair.n.01', 'synonyms': ['chair'], 'def': 'a seat for one person, with a support for the back', 'name': 'chair'}, {'frequency': 'r', 'id': 238, 'synset': 'chaise_longue.n.01', 'synonyms': ['chaise_longue', 'chaise', 'daybed'], 'def': 'a long chair; for reclining', 'name': 'chaise_longue'}, {'frequency': 'r', 'id': 239, 'synset': 'champagne.n.01', 'synonyms': ['champagne'], 'def': 'a white sparkling wine produced in Champagne or resembling that produced there', 'name': 'champagne'}, {'frequency': 'f', 'id': 240, 'synset': 'chandelier.n.01', 'synonyms': ['chandelier'], 'def': 'branched lighting fixture; often ornate; hangs from the ceiling', 'name': 'chandelier'}, {'frequency': 'r', 'id': 241, 'synset': 'chap.n.04', 'synonyms': ['chap'], 'def': 'leather leggings without a seat; worn over trousers by cowboys to protect their legs', 'name': 'chap'}, {'frequency': 'r', 'id': 242, 'synset': 'checkbook.n.01', 'synonyms': ['checkbook', 'chequebook'], 'def': 'a book issued to holders of checking accounts', 'name': 'checkbook'}, {'frequency': 'r', 'id': 243, 'synset': 'checkerboard.n.01', 'synonyms': ['checkerboard'], 'def': 'a board having 64 squares of two alternating colors', 'name': 'checkerboard'}, {'frequency': 'c', 'id': 244, 'synset': 'cherry.n.03', 'synonyms': ['cherry'], 'def': 'a red fruit with a single hard stone', 'name': 'cherry'}, {'frequency': 'r', 'id': 245, 'synset': 'chessboard.n.01', 'synonyms': ['chessboard'], 'def': 'a checkerboard used to play chess', 'name': 'chessboard'}, {'frequency': 'r', 'id': 246, 'synset': 'chest_of_drawers.n.01', 'synonyms': ['chest_of_drawers_(furniture)', 'bureau_(furniture)', 'chest_(furniture)'], 'def': 'furniture with drawers for keeping clothes', 'name': 'chest_of_drawers_(furniture)'}, {'frequency': 'c', 'id': 247, 'synset': 'chicken.n.02', 'synonyms': ['chicken_(animal)'], 'def': 'a domestic fowl bred for flesh or eggs', 'name': 'chicken_(animal)'}, {'frequency': 'c', 'id': 248, 'synset': 'chicken_wire.n.01', 'synonyms': ['chicken_wire'], 'def': 'a galvanized wire network with a hexagonal mesh; used to build fences', 'name': 'chicken_wire'}, {'frequency': 'r', 'id': 249, 'synset': 'chickpea.n.01', 'synonyms': ['chickpea', 'garbanzo'], 'def': 'the seed of the chickpea plant; usually dried', 'name': 'chickpea'}, {'frequency': 'r', 'id': 250, 'synset': 'chihuahua.n.03', 'synonyms': ['Chihuahua'], 'def': 'an old breed of tiny short-haired dog with protruding eyes from Mexico', 'name': 'Chihuahua'}, {'frequency': 'r', 'id': 251, 'synset': 'chili.n.02', 'synonyms': ['chili_(vegetable)', 'chili_pepper_(vegetable)', 'chilli_(vegetable)', 'chilly_(vegetable)', 'chile_(vegetable)'], 'def': 'very hot and finely tapering pepper of special pungency', 'name': 'chili_(vegetable)'}, {'frequency': 'r', 'id': 252, 'synset': 'chime.n.01', 'synonyms': ['chime', 'gong'], 'def': 'an instrument consisting of a set of bells that are struck with a hammer', 'name': 'chime'}, {'frequency': 'r', 'id': 253, 'synset': 'chinaware.n.01', 'synonyms': ['chinaware'], 'def': 'dishware made of high quality porcelain', 'name': 'chinaware'}, {'frequency': 'c', 'id': 254, 'synset': 'chip.n.04', 'synonyms': ['crisp_(potato_chip)', 'potato_chip'], 'def': 'a thin crisp slice of potato fried in deep fat', 'name': 'crisp_(potato_chip)'}, {'frequency': 'r', 'id': 255, 'synset': 'chip.n.06', 'synonyms': ['poker_chip'], 'def': 'a small disk-shaped counter used to represent money when gambling', 'name': 'poker_chip'}, {'frequency': 'c', 'id': 256, 'synset': 'chocolate_bar.n.01', 'synonyms': ['chocolate_bar'], 'def': 'a bar of chocolate candy', 'name': 'chocolate_bar'}, {'frequency': 'c', 'id': 257, 'synset': 'chocolate_cake.n.01', 'synonyms': ['chocolate_cake'], 'def': 'cake containing chocolate', 'name': 'chocolate_cake'}, {'frequency': 'r', 'id': 258, 'synset': 'chocolate_milk.n.01', 'synonyms': ['chocolate_milk'], 'def': 'milk flavored with chocolate syrup', 'name': 'chocolate_milk'}, {'frequency': 'r', 'id': 259, 'synset': 'chocolate_mousse.n.01', 'synonyms': ['chocolate_mousse'], 'def': 'dessert mousse made with chocolate', 'name': 'chocolate_mousse'}, {'frequency': 'f', 'id': 260, 'synset': 'choker.n.03', 'synonyms': ['choker', 'collar', 'neckband'], 'def': 'necklace that fits tightly around the neck', 'name': 'choker'}, {'frequency': 'f', 'id': 261, 'synset': 'chopping_board.n.01', 'synonyms': ['chopping_board', 'cutting_board', 'chopping_block'], 'def': 'a wooden board where meats or vegetables can be cut', 'name': 'chopping_board'}, {'frequency': 'c', 'id': 262, 'synset': 'chopstick.n.01', 'synonyms': ['chopstick'], 'def': 'one of a pair of slender sticks used as oriental tableware to eat food with', 'name': 'chopstick'}, {'frequency': 'f', 'id': 263, 'synset': 'christmas_tree.n.05', 'synonyms': ['Christmas_tree'], 'def': 'an ornamented evergreen used as a Christmas decoration', 'name': 'Christmas_tree'}, {'frequency': 'c', 'id': 264, 'synset': 'chute.n.02', 'synonyms': ['slide'], 'def': 'sloping channel through which things can descend', 'name': 'slide'}, {'frequency': 'r', 'id': 265, 'synset': 'cider.n.01', 'synonyms': ['cider', 'cyder'], 'def': 'a beverage made from juice pressed from apples', 'name': 'cider'}, {'frequency': 'r', 'id': 266, 'synset': 'cigar_box.n.01', 'synonyms': ['cigar_box'], 'def': 'a box for holding cigars', 'name': 'cigar_box'}, {'frequency': 'c', 'id': 267, 'synset': 'cigarette.n.01', 'synonyms': ['cigarette'], 'def': 'finely ground tobacco wrapped in paper; for smoking', 'name': 'cigarette'}, {'frequency': 'c', 'id': 268, 'synset': 'cigarette_case.n.01', 'synonyms': ['cigarette_case', 'cigarette_pack'], 'def': 'a small flat case for holding cigarettes', 'name': 'cigarette_case'}, {'frequency': 'f', 'id': 269, 'synset': 'cistern.n.02', 'synonyms': ['cistern', 'water_tank'], 'def': 'a tank that holds the water used to flush a toilet', 'name': 'cistern'}, {'frequency': 'r', 'id': 270, 'synset': 'clarinet.n.01', 'synonyms': ['clarinet'], 'def': 'a single-reed instrument with a straight tube', 'name': 'clarinet'}, {'frequency': 'r', 'id': 271, 'synset': 'clasp.n.01', 'synonyms': ['clasp'], 'def': 'a fastener (as a buckle or hook) that is used to hold two things together', 'name': 'clasp'}, {'frequency': 'c', 'id': 272, 'synset': 'cleansing_agent.n.01', 'synonyms': ['cleansing_agent', 'cleanser', 'cleaner'], 'def': 'a preparation used in cleaning something', 'name': 'cleansing_agent'}, {'frequency': 'r', 'id': 273, 'synset': 'clementine.n.01', 'synonyms': ['clementine'], 'def': 'a variety of mandarin orange', 'name': 'clementine'}, {'frequency': 'c', 'id': 274, 'synset': 'clip.n.03', 'synonyms': ['clip'], 'def': 'any of various small fasteners used to hold loose articles together', 'name': 'clip'}, {'frequency': 'c', 'id': 275, 'synset': 'clipboard.n.01', 'synonyms': ['clipboard'], 'def': 'a small writing board with a clip at the top for holding papers', 'name': 'clipboard'}, {'frequency': 'f', 'id': 276, 'synset': 'clock.n.01', 'synonyms': ['clock', 'timepiece', 'timekeeper'], 'def': 'a timepiece that shows the time of day', 'name': 'clock'}, {'frequency': 'f', 'id': 277, 'synset': 'clock_tower.n.01', 'synonyms': ['clock_tower'], 'def': 'a tower with a large clock visible high up on an outside face', 'name': 'clock_tower'}, {'frequency': 'c', 'id': 278, 'synset': 'clothes_hamper.n.01', 'synonyms': ['clothes_hamper', 'laundry_basket', 'clothes_basket'], 'def': 'a hamper that holds dirty clothes to be washed or wet clothes to be dried', 'name': 'clothes_hamper'}, {'frequency': 'c', 'id': 279, 'synset': 'clothespin.n.01', 'synonyms': ['clothespin', 'clothes_peg'], 'def': 'wood or plastic fastener; for holding clothes on a clothesline', 'name': 'clothespin'}, {'frequency': 'r', 'id': 280, 'synset': 'clutch_bag.n.01', 'synonyms': ['clutch_bag'], 'def': "a woman's strapless purse that is carried in the hand", 'name': 'clutch_bag'}, {'frequency': 'f', 'id': 281, 'synset': 'coaster.n.03', 'synonyms': ['coaster'], 'def': 'a covering (plate or mat) that protects the surface of a table', 'name': 'coaster'}, {'frequency': 'f', 'id': 282, 'synset': 'coat.n.01', 'synonyms': ['coat'], 'def': 'an outer garment that has sleeves and covers the body from shoulder down', 'name': 'coat'}, {'frequency': 'c', 'id': 283, 'synset': 'coat_hanger.n.01', 'synonyms': ['coat_hanger', 'clothes_hanger', 'dress_hanger'], 'def': "a hanger that is shaped like a person's shoulders", 'name': 'coat_hanger'}, {'frequency': 'r', 'id': 284, 'synset': 'coatrack.n.01', 'synonyms': ['coatrack', 'hatrack'], 'def': 'a rack with hooks for temporarily holding coats and hats', 'name': 'coatrack'}, {'frequency': 'c', 'id': 285, 'synset': 'cock.n.04', 'synonyms': ['cock', 'rooster'], 'def': 'adult male chicken', 'name': 'cock'}, {'frequency': 'c', 'id': 286, 'synset': 'coconut.n.02', 'synonyms': ['coconut', 'cocoanut'], 'def': 'large hard-shelled brown oval nut with a fibrous husk', 'name': 'coconut'}, {'frequency': 'r', 'id': 287, 'synset': 'coffee_filter.n.01', 'synonyms': ['coffee_filter'], 'def': 'filter (usually of paper) that passes the coffee and retains the coffee grounds', 'name': 'coffee_filter'}, {'frequency': 'f', 'id': 288, 'synset': 'coffee_maker.n.01', 'synonyms': ['coffee_maker', 'coffee_machine'], 'def': 'a kitchen appliance for brewing coffee automatically', 'name': 'coffee_maker'}, {'frequency': 'f', 'id': 289, 'synset': 'coffee_table.n.01', 'synonyms': ['coffee_table', 'cocktail_table'], 'def': 'low table where magazines can be placed and coffee or cocktails are served', 'name': 'coffee_table'}, {'frequency': 'c', 'id': 290, 'synset': 'coffeepot.n.01', 'synonyms': ['coffeepot'], 'def': 'tall pot in which coffee is brewed', 'name': 'coffeepot'}, {'frequency': 'r', 'id': 291, 'synset': 'coil.n.05', 'synonyms': ['coil'], 'def': 'tubing that is wound in a spiral', 'name': 'coil'}, {'frequency': 'c', 'id': 292, 'synset': 'coin.n.01', 'synonyms': ['coin'], 'def': 'a flat metal piece (usually a disc) used as money', 'name': 'coin'}, {'frequency': 'r', 'id': 293, 'synset': 'colander.n.01', 'synonyms': ['colander', 'cullender'], 'def': 'bowl-shaped strainer; used to wash or drain foods', 'name': 'colander'}, {'frequency': 'c', 'id': 294, 'synset': 'coleslaw.n.01', 'synonyms': ['coleslaw', 'slaw'], 'def': 'basically shredded cabbage', 'name': 'coleslaw'}, {'frequency': 'r', 'id': 295, 'synset': 'coloring_material.n.01', 'synonyms': ['coloring_material', 'colouring_material'], 'def': 'any material used for its color', 'name': 'coloring_material'}, {'frequency': 'r', 'id': 296, 'synset': 'combination_lock.n.01', 'synonyms': ['combination_lock'], 'def': 'lock that can be opened only by turning dials in a special sequence', 'name': 'combination_lock'}, {'frequency': 'c', 'id': 297, 'synset': 'comforter.n.04', 'synonyms': ['pacifier', 'teething_ring'], 'def': 'device used for an infant to suck or bite on', 'name': 'pacifier'}, {'frequency': 'r', 'id': 298, 'synset': 'comic_book.n.01', 'synonyms': ['comic_book'], 'def': 'a magazine devoted to comic strips', 'name': 'comic_book'}, {'frequency': 'f', 'id': 299, 'synset': 'computer_keyboard.n.01', 'synonyms': ['computer_keyboard', 'keyboard_(computer)'], 'def': 'a keyboard that is a data input device for computers', 'name': 'computer_keyboard'}, {'frequency': 'r', 'id': 300, 'synset': 'concrete_mixer.n.01', 'synonyms': ['concrete_mixer', 'cement_mixer'], 'def': 'a machine with a large revolving drum in which cement/concrete is mixed', 'name': 'concrete_mixer'}, {'frequency': 'f', 'id': 301, 'synset': 'cone.n.01', 'synonyms': ['cone', 'traffic_cone'], 'def': 'a cone-shaped object used to direct traffic', 'name': 'cone'}, {'frequency': 'f', 'id': 302, 'synset': 'control.n.09', 'synonyms': ['control', 'controller'], 'def': 'a mechanism that controls the operation of a machine', 'name': 'control'}, {'frequency': 'r', 'id': 303, 'synset': 'convertible.n.01', 'synonyms': ['convertible_(automobile)'], 'def': 'a car that has top that can be folded or removed', 'name': 'convertible_(automobile)'}, {'frequency': 'r', 'id': 304, 'synset': 'convertible.n.03', 'synonyms': ['sofa_bed'], 'def': 'a sofa that can be converted into a bed', 'name': 'sofa_bed'}, {'frequency': 'c', 'id': 305, 'synset': 'cookie.n.01', 'synonyms': ['cookie', 'cooky', 'biscuit_(cookie)'], 'def': "any of various small flat sweet cakes (`biscuit' is the British term)", 'name': 'cookie'}, {'frequency': 'r', 'id': 306, 'synset': 'cookie_jar.n.01', 'synonyms': ['cookie_jar', 'cooky_jar'], 'def': 'a jar in which cookies are kept (and sometimes money is hidden)', 'name': 'cookie_jar'}, {'frequency': 'r', 'id': 307, 'synset': 'cooking_utensil.n.01', 'synonyms': ['cooking_utensil'], 'def': 'a kitchen utensil made of material that does not melt easily; used for cooking', 'name': 'cooking_utensil'}, {'frequency': 'f', 'id': 308, 'synset': 'cooler.n.01', 'synonyms': ['cooler_(for_food)', 'ice_chest'], 'def': 'an insulated box for storing food often with ice', 'name': 'cooler_(for_food)'}, {'frequency': 'c', 'id': 309, 'synset': 'cork.n.04', 'synonyms': ['cork_(bottle_plug)', 'bottle_cork'], 'def': 'the plug in the mouth of a bottle (especially a wine bottle)', 'name': 'cork_(bottle_plug)'}, {'frequency': 'r', 'id': 310, 'synset': 'corkboard.n.01', 'synonyms': ['corkboard'], 'def': 'a sheet consisting of cork granules', 'name': 'corkboard'}, {'frequency': 'r', 'id': 311, 'synset': 'corkscrew.n.01', 'synonyms': ['corkscrew', 'bottle_screw'], 'def': 'a bottle opener that pulls corks', 'name': 'corkscrew'}, {'frequency': 'c', 'id': 312, 'synset': 'corn.n.03', 'synonyms': ['edible_corn', 'corn', 'maize'], 'def': 'ears of corn that can be prepared and served for human food', 'name': 'edible_corn'}, {'frequency': 'r', 'id': 313, 'synset': 'cornbread.n.01', 'synonyms': ['cornbread'], 'def': 'bread made primarily of cornmeal', 'name': 'cornbread'}, {'frequency': 'c', 'id': 314, 'synset': 'cornet.n.01', 'synonyms': ['cornet', 'horn', 'trumpet'], 'def': 'a brass musical instrument with a narrow tube and a flared bell and many valves', 'name': 'cornet'}, {'frequency': 'c', 'id': 315, 'synset': 'cornice.n.01', 'synonyms': ['cornice', 'valance', 'valance_board', 'pelmet'], 'def': 'a decorative framework to conceal curtain fixtures at the top of a window casing', 'name': 'cornice'}, {'frequency': 'r', 'id': 316, 'synset': 'cornmeal.n.01', 'synonyms': ['cornmeal'], 'def': 'coarsely ground corn', 'name': 'cornmeal'}, {'frequency': 'r', 'id': 317, 'synset': 'corset.n.01', 'synonyms': ['corset', 'girdle'], 'def': "a woman's close-fitting foundation garment", 'name': 'corset'}, {'frequency': 'r', 'id': 318, 'synset': 'cos.n.02', 'synonyms': ['romaine_lettuce'], 'def': 'lettuce with long dark-green leaves in a loosely packed elongated head', 'name': 'romaine_lettuce'}, {'frequency': 'c', 'id': 319, 'synset': 'costume.n.04', 'synonyms': ['costume'], 'def': 'the attire characteristic of a country or a time or a social class', 'name': 'costume'}, {'frequency': 'r', 'id': 320, 'synset': 'cougar.n.01', 'synonyms': ['cougar', 'puma', 'catamount', 'mountain_lion', 'panther'], 'def': 'large American feline resembling a lion', 'name': 'cougar'}, {'frequency': 'r', 'id': 321, 'synset': 'coverall.n.01', 'synonyms': ['coverall'], 'def': 'a loose-fitting protective garment that is worn over other clothing', 'name': 'coverall'}, {'frequency': 'r', 'id': 322, 'synset': 'cowbell.n.01', 'synonyms': ['cowbell'], 'def': 'a bell hung around the neck of cow so that the cow can be easily located', 'name': 'cowbell'}, {'frequency': 'f', 'id': 323, 'synset': 'cowboy_hat.n.01', 'synonyms': ['cowboy_hat', 'ten-gallon_hat'], 'def': 'a hat with a wide brim and a soft crown; worn by American ranch hands', 'name': 'cowboy_hat'}, {'frequency': 'r', 'id': 324, 'synset': 'crab.n.01', 'synonyms': ['crab_(animal)'], 'def': 'decapod having eyes on short stalks and a broad flattened shell and pincers', 'name': 'crab_(animal)'}, {'frequency': 'c', 'id': 325, 'synset': 'cracker.n.01', 'synonyms': ['cracker'], 'def': 'a thin crisp wafer', 'name': 'cracker'}, {'frequency': 'r', 'id': 326, 'synset': 'crape.n.01', 'synonyms': ['crape', 'crepe', 'French_pancake'], 'def': 'small very thin pancake', 'name': 'crape'}, {'frequency': 'f', 'id': 327, 'synset': 'crate.n.01', 'synonyms': ['crate'], 'def': 'a rugged box (usually made of wood); used for shipping', 'name': 'crate'}, {'frequency': 'r', 'id': 328, 'synset': 'crayon.n.01', 'synonyms': ['crayon', 'wax_crayon'], 'def': 'writing or drawing implement made of a colored stick of composition wax', 'name': 'crayon'}, {'frequency': 'r', 'id': 329, 'synset': 'cream_pitcher.n.01', 'synonyms': ['cream_pitcher'], 'def': 'a small pitcher for serving cream', 'name': 'cream_pitcher'}, {'frequency': 'r', 'id': 330, 'synset': 'credit_card.n.01', 'synonyms': ['credit_card', 'charge_card', 'debit_card'], 'def': 'a card, usually plastic, used to pay for goods and services', 'name': 'credit_card'}, {'frequency': 'c', 'id': 331, 'synset': 'crescent_roll.n.01', 'synonyms': ['crescent_roll', 'croissant'], 'def': 'very rich flaky crescent-shaped roll', 'name': 'crescent_roll'}, {'frequency': 'c', 'id': 332, 'synset': 'crib.n.01', 'synonyms': ['crib', 'cot'], 'def': 'baby bed with high sides made of slats', 'name': 'crib'}, {'frequency': 'c', 'id': 333, 'synset': 'crock.n.03', 'synonyms': ['crock_pot', 'earthenware_jar'], 'def': 'an earthen jar (made of baked clay)', 'name': 'crock_pot'}, {'frequency': 'f', 'id': 334, 'synset': 'crossbar.n.01', 'synonyms': ['crossbar'], 'def': 'a horizontal bar that goes across something', 'name': 'crossbar'}, {'frequency': 'r', 'id': 335, 'synset': 'crouton.n.01', 'synonyms': ['crouton'], 'def': 'a small piece of toasted or fried bread; served in soup or salads', 'name': 'crouton'}, {'frequency': 'r', 'id': 336, 'synset': 'crow.n.01', 'synonyms': ['crow'], 'def': 'black birds having a raucous call', 'name': 'crow'}, {'frequency': 'c', 'id': 337, 'synset': 'crown.n.04', 'synonyms': ['crown'], 'def': 'an ornamental jeweled headdress signifying sovereignty', 'name': 'crown'}, {'frequency': 'c', 'id': 338, 'synset': 'crucifix.n.01', 'synonyms': ['crucifix'], 'def': 'representation of the cross on which Jesus died', 'name': 'crucifix'}, {'frequency': 'c', 'id': 339, 'synset': 'cruise_ship.n.01', 'synonyms': ['cruise_ship', 'cruise_liner'], 'def': 'a passenger ship used commercially for pleasure cruises', 'name': 'cruise_ship'}, {'frequency': 'c', 'id': 340, 'synset': 'cruiser.n.01', 'synonyms': ['police_cruiser', 'patrol_car', 'police_car', 'squad_car'], 'def': 'a car in which policemen cruise the streets', 'name': 'police_cruiser'}, {'frequency': 'c', 'id': 341, 'synset': 'crumb.n.03', 'synonyms': ['crumb'], 'def': 'small piece of e.g. bread or cake', 'name': 'crumb'}, {'frequency': 'r', 'id': 342, 'synset': 'crutch.n.01', 'synonyms': ['crutch'], 'def': 'a wooden or metal staff that fits under the armpit and reaches to the ground', 'name': 'crutch'}, {'frequency': 'c', 'id': 343, 'synset': 'cub.n.03', 'synonyms': ['cub_(animal)'], 'def': 'the young of certain carnivorous mammals such as the bear or wolf or lion', 'name': 'cub_(animal)'}, {'frequency': 'r', 'id': 344, 'synset': 'cube.n.05', 'synonyms': ['cube', 'square_block'], 'def': 'a block in the (approximate) shape of a cube', 'name': 'cube'}, {'frequency': 'f', 'id': 345, 'synset': 'cucumber.n.02', 'synonyms': ['cucumber', 'cuke'], 'def': 'cylindrical green fruit with thin green rind and white flesh eaten as a vegetable', 'name': 'cucumber'}, {'frequency': 'c', 'id': 346, 'synset': 'cufflink.n.01', 'synonyms': ['cufflink'], 'def': 'jewelry consisting of linked buttons used to fasten the cuffs of a shirt', 'name': 'cufflink'}, {'frequency': 'f', 'id': 347, 'synset': 'cup.n.01', 'synonyms': ['cup'], 'def': 'a small open container usually used for drinking; usually has a handle', 'name': 'cup'}, {'frequency': 'c', 'id': 348, 'synset': 'cup.n.08', 'synonyms': ['trophy_cup'], 'def': 'a metal vessel with handles that is awarded as a trophy to a competition winner', 'name': 'trophy_cup'}, {'frequency': 'c', 'id': 349, 'synset': 'cupcake.n.01', 'synonyms': ['cupcake'], 'def': 'small cake baked in a muffin tin', 'name': 'cupcake'}, {'frequency': 'r', 'id': 350, 'synset': 'curler.n.01', 'synonyms': ['hair_curler', 'hair_roller', 'hair_crimper'], 'def': 'a cylindrical tube around which the hair is wound to curl it', 'name': 'hair_curler'}, {'frequency': 'r', 'id': 351, 'synset': 'curling_iron.n.01', 'synonyms': ['curling_iron'], 'def': 'a cylindrical home appliance that heats hair that has been curled around it', 'name': 'curling_iron'}, {'frequency': 'f', 'id': 352, 'synset': 'curtain.n.01', 'synonyms': ['curtain', 'drapery'], 'def': 'hanging cloth used as a blind (especially for a window)', 'name': 'curtain'}, {'frequency': 'f', 'id': 353, 'synset': 'cushion.n.03', 'synonyms': ['cushion'], 'def': 'a soft bag filled with air or padding such as feathers or foam rubber', 'name': 'cushion'}, {'frequency': 'r', 'id': 354, 'synset': 'custard.n.01', 'synonyms': ['custard'], 'def': 'sweetened mixture of milk and eggs baked or boiled or frozen', 'name': 'custard'}, {'frequency': 'c', 'id': 355, 'synset': 'cutter.n.06', 'synonyms': ['cutting_tool'], 'def': 'a cutting implement; a tool for cutting', 'name': 'cutting_tool'}, {'frequency': 'r', 'id': 356, 'synset': 'cylinder.n.04', 'synonyms': ['cylinder'], 'def': 'a cylindrical container', 'name': 'cylinder'}, {'frequency': 'r', 'id': 357, 'synset': 'cymbal.n.01', 'synonyms': ['cymbal'], 'def': 'a percussion instrument consisting of a concave brass disk', 'name': 'cymbal'}, {'frequency': 'r', 'id': 358, 'synset': 'dachshund.n.01', 'synonyms': ['dachshund', 'dachsie', 'badger_dog'], 'def': 'small long-bodied short-legged breed of dog having a short sleek coat and long drooping ears', 'name': 'dachshund'}, {'frequency': 'r', 'id': 359, 'synset': 'dagger.n.01', 'synonyms': ['dagger'], 'def': 'a short knife with a pointed blade used for piercing or stabbing', 'name': 'dagger'}, {'frequency': 'r', 'id': 360, 'synset': 'dartboard.n.01', 'synonyms': ['dartboard'], 'def': 'a circular board of wood or cork used as the target in the game of darts', 'name': 'dartboard'}, {'frequency': 'r', 'id': 361, 'synset': 'date.n.08', 'synonyms': ['date_(fruit)'], 'def': 'sweet edible fruit of the date palm with a single long woody seed', 'name': 'date_(fruit)'}, {'frequency': 'f', 'id': 362, 'synset': 'deck_chair.n.01', 'synonyms': ['deck_chair', 'beach_chair'], 'def': 'a folding chair for use outdoors; a wooden frame supports a length of canvas', 'name': 'deck_chair'}, {'frequency': 'c', 'id': 363, 'synset': 'deer.n.01', 'synonyms': ['deer', 'cervid'], 'def': "distinguished from Bovidae by the male's having solid deciduous antlers", 'name': 'deer'}, {'frequency': 'c', 'id': 364, 'synset': 'dental_floss.n.01', 'synonyms': ['dental_floss', 'floss'], 'def': 'a soft thread for cleaning the spaces between the teeth', 'name': 'dental_floss'}, {'frequency': 'f', 'id': 365, 'synset': 'desk.n.01', 'synonyms': ['desk'], 'def': 'a piece of furniture with a writing surface and usually drawers or other compartments', 'name': 'desk'}, {'frequency': 'r', 'id': 366, 'synset': 'detergent.n.01', 'synonyms': ['detergent'], 'def': 'a surface-active chemical widely used in industry and laundering', 'name': 'detergent'}, {'frequency': 'c', 'id': 367, 'synset': 'diaper.n.01', 'synonyms': ['diaper'], 'def': 'garment consisting of a folded cloth drawn up between the legs and fastened at the waist', 'name': 'diaper'}, {'frequency': 'r', 'id': 368, 'synset': 'diary.n.01', 'synonyms': ['diary', 'journal'], 'def': 'a daily written record of (usually personal) experiences and observations', 'name': 'diary'}, {'frequency': 'r', 'id': 369, 'synset': 'die.n.01', 'synonyms': ['die', 'dice'], 'def': 'a small cube with 1 to 6 spots on the six faces; used in gambling', 'name': 'die'}, {'frequency': 'r', 'id': 370, 'synset': 'dinghy.n.01', 'synonyms': ['dinghy', 'dory', 'rowboat'], 'def': 'a small boat of shallow draft with seats and oars with which it is propelled', 'name': 'dinghy'}, {'frequency': 'f', 'id': 371, 'synset': 'dining_table.n.01', 'synonyms': ['dining_table'], 'def': 'a table at which meals are served', 'name': 'dining_table'}, {'frequency': 'r', 'id': 372, 'synset': 'dinner_jacket.n.01', 'synonyms': ['tux', 'tuxedo'], 'def': 'semiformal evening dress for men', 'name': 'tux'}, {'frequency': 'c', 'id': 373, 'synset': 'dish.n.01', 'synonyms': ['dish'], 'def': 'a piece of dishware normally used as a container for holding or serving food', 'name': 'dish'}, {'frequency': 'c', 'id': 374, 'synset': 'dish.n.05', 'synonyms': ['dish_antenna'], 'def': 'directional antenna consisting of a parabolic reflector', 'name': 'dish_antenna'}, {'frequency': 'c', 'id': 375, 'synset': 'dishrag.n.01', 'synonyms': ['dishrag', 'dishcloth'], 'def': 'a cloth for washing dishes', 'name': 'dishrag'}, {'frequency': 'c', 'id': 376, 'synset': 'dishtowel.n.01', 'synonyms': ['dishtowel', 'tea_towel'], 'def': 'a towel for drying dishes', 'name': 'dishtowel'}, {'frequency': 'f', 'id': 377, 'synset': 'dishwasher.n.01', 'synonyms': ['dishwasher', 'dishwashing_machine'], 'def': 'a machine for washing dishes', 'name': 'dishwasher'}, {'frequency': 'r', 'id': 378, 'synset': 'dishwasher_detergent.n.01', 'synonyms': ['dishwasher_detergent', 'dishwashing_detergent', 'dishwashing_liquid'], 'def': 'a low-sudsing detergent designed for use in dishwashers', 'name': 'dishwasher_detergent'}, {'frequency': 'r', 'id': 379, 'synset': 'diskette.n.01', 'synonyms': ['diskette', 'floppy', 'floppy_disk'], 'def': 'a small plastic magnetic disk enclosed in a stiff envelope used to store data', 'name': 'diskette'}, {'frequency': 'c', 'id': 380, 'synset': 'dispenser.n.01', 'synonyms': ['dispenser'], 'def': 'a container so designed that the contents can be used in prescribed amounts', 'name': 'dispenser'}, {'frequency': 'c', 'id': 381, 'synset': 'dixie_cup.n.01', 'synonyms': ['Dixie_cup', 'paper_cup'], 'def': 'a disposable cup made of paper; for holding drinks', 'name': 'Dixie_cup'}, {'frequency': 'f', 'id': 382, 'synset': 'dog.n.01', 'synonyms': ['dog'], 'def': 'a common domesticated dog', 'name': 'dog'}, {'frequency': 'f', 'id': 383, 'synset': 'dog_collar.n.01', 'synonyms': ['dog_collar'], 'def': 'a collar for a dog', 'name': 'dog_collar'}, {'frequency': 'c', 'id': 384, 'synset': 'doll.n.01', 'synonyms': ['doll'], 'def': 'a toy replica of a HUMAN (NOT AN ANIMAL)', 'name': 'doll'}, {'frequency': 'r', 'id': 385, 'synset': 'dollar.n.02', 'synonyms': ['dollar', 'dollar_bill', 'one_dollar_bill'], 'def': 'a piece of paper money worth one dollar', 'name': 'dollar'}, {'frequency': 'r', 'id': 386, 'synset': 'dolphin.n.02', 'synonyms': ['dolphin'], 'def': 'any of various small toothed whales with a beaklike snout; larger than porpoises', 'name': 'dolphin'}, {'frequency': 'c', 'id': 387, 'synset': 'domestic_ass.n.01', 'synonyms': ['domestic_ass', 'donkey'], 'def': 'domestic beast of burden descended from the African wild ass; patient but stubborn', 'name': 'domestic_ass'}, {'frequency': 'r', 'id': 388, 'synset': 'domino.n.03', 'synonyms': ['eye_mask'], 'def': 'a mask covering the upper part of the face but with holes for the eyes', 'name': 'eye_mask'}, {'frequency': 'r', 'id': 389, 'synset': 'doorbell.n.01', 'synonyms': ['doorbell', 'buzzer'], 'def': 'a button at an outer door that gives a ringing or buzzing signal when pushed', 'name': 'doorbell'}, {'frequency': 'f', 'id': 390, 'synset': 'doorknob.n.01', 'synonyms': ['doorknob', 'doorhandle'], 'def': "a knob used to open a door (often called `doorhandle' in Great Britain)", 'name': 'doorknob'}, {'frequency': 'c', 'id': 391, 'synset': 'doormat.n.02', 'synonyms': ['doormat', 'welcome_mat'], 'def': 'a mat placed outside an exterior door for wiping the shoes before entering', 'name': 'doormat'}, {'frequency': 'f', 'id': 392, 'synset': 'doughnut.n.02', 'synonyms': ['doughnut', 'donut'], 'def': 'a small ring-shaped friedcake', 'name': 'doughnut'}, {'frequency': 'r', 'id': 393, 'synset': 'dove.n.01', 'synonyms': ['dove'], 'def': 'any of numerous small pigeons', 'name': 'dove'}, {'frequency': 'r', 'id': 394, 'synset': 'dragonfly.n.01', 'synonyms': ['dragonfly'], 'def': 'slender-bodied non-stinging insect having iridescent wings that are outspread at rest', 'name': 'dragonfly'}, {'frequency': 'f', 'id': 395, 'synset': 'drawer.n.01', 'synonyms': ['drawer'], 'def': 'a boxlike container in a piece of furniture; made so as to slide in and out', 'name': 'drawer'}, {'frequency': 'c', 'id': 396, 'synset': 'drawers.n.01', 'synonyms': ['underdrawers', 'boxers', 'boxershorts'], 'def': 'underpants worn by men', 'name': 'underdrawers'}, {'frequency': 'f', 'id': 397, 'synset': 'dress.n.01', 'synonyms': ['dress', 'frock'], 'def': 'a one-piece garment for a woman; has skirt and bodice', 'name': 'dress'}, {'frequency': 'c', 'id': 398, 'synset': 'dress_hat.n.01', 'synonyms': ['dress_hat', 'high_hat', 'opera_hat', 'silk_hat', 'top_hat'], 'def': "a man's hat with a tall crown; usually covered with silk or with beaver fur", 'name': 'dress_hat'}, {'frequency': 'c', 'id': 399, 'synset': 'dress_suit.n.01', 'synonyms': ['dress_suit'], 'def': 'formalwear consisting of full evening dress for men', 'name': 'dress_suit'}, {'frequency': 'c', 'id': 400, 'synset': 'dresser.n.05', 'synonyms': ['dresser'], 'def': 'a cabinet with shelves', 'name': 'dresser'}, {'frequency': 'c', 'id': 401, 'synset': 'drill.n.01', 'synonyms': ['drill'], 'def': 'a tool with a sharp rotating point for making holes in hard materials', 'name': 'drill'}, {'frequency': 'r', 'id': 402, 'synset': 'drinking_fountain.n.01', 'synonyms': ['drinking_fountain'], 'def': 'a public fountain to provide a jet of drinking water', 'name': 'drinking_fountain'}, {'frequency': 'r', 'id': 403, 'synset': 'drone.n.04', 'synonyms': ['drone'], 'def': 'an aircraft without a pilot that is operated by remote control', 'name': 'drone'}, {'frequency': 'r', 'id': 404, 'synset': 'dropper.n.01', 'synonyms': ['dropper', 'eye_dropper'], 'def': 'pipet consisting of a small tube with a vacuum bulb at one end for drawing liquid in and releasing it a drop at a time', 'name': 'dropper'}, {'frequency': 'c', 'id': 405, 'synset': 'drum.n.01', 'synonyms': ['drum_(musical_instrument)'], 'def': 'a musical percussion instrument; usually consists of a hollow cylinder with a membrane stretched across each end', 'name': 'drum_(musical_instrument)'}, {'frequency': 'r', 'id': 406, 'synset': 'drumstick.n.02', 'synonyms': ['drumstick'], 'def': 'a stick used for playing a drum', 'name': 'drumstick'}, {'frequency': 'f', 'id': 407, 'synset': 'duck.n.01', 'synonyms': ['duck'], 'def': 'small web-footed broad-billed swimming bird', 'name': 'duck'}, {'frequency': 'r', 'id': 408, 'synset': 'duckling.n.02', 'synonyms': ['duckling'], 'def': 'young duck', 'name': 'duckling'}, {'frequency': 'c', 'id': 409, 'synset': 'duct_tape.n.01', 'synonyms': ['duct_tape'], 'def': 'a wide silvery adhesive tape', 'name': 'duct_tape'}, {'frequency': 'f', 'id': 410, 'synset': 'duffel_bag.n.01', 'synonyms': ['duffel_bag', 'duffle_bag', 'duffel', 'duffle'], 'def': 'a large cylindrical bag of heavy cloth', 'name': 'duffel_bag'}, {'frequency': 'r', 'id': 411, 'synset': 'dumbbell.n.01', 'synonyms': ['dumbbell'], 'def': 'an exercising weight with two ball-like ends connected by a short handle', 'name': 'dumbbell'}, {'frequency': 'c', 'id': 412, 'synset': 'dumpster.n.01', 'synonyms': ['dumpster'], 'def': 'a container designed to receive and transport and dump waste', 'name': 'dumpster'}, {'frequency': 'r', 'id': 413, 'synset': 'dustpan.n.02', 'synonyms': ['dustpan'], 'def': 'a short-handled receptacle into which dust can be swept', 'name': 'dustpan'}, {'frequency': 'r', 'id': 414, 'synset': 'dutch_oven.n.02', 'synonyms': ['Dutch_oven'], 'def': 'iron or earthenware cooking pot; used for stews', 'name': 'Dutch_oven'}, {'frequency': 'c', 'id': 415, 'synset': 'eagle.n.01', 'synonyms': ['eagle'], 'def': 'large birds of prey noted for their broad wings and strong soaring flight', 'name': 'eagle'}, {'frequency': 'f', 'id': 416, 'synset': 'earphone.n.01', 'synonyms': ['earphone', 'earpiece', 'headphone'], 'def': 'device for listening to audio that is held over or inserted into the ear', 'name': 'earphone'}, {'frequency': 'r', 'id': 417, 'synset': 'earplug.n.01', 'synonyms': ['earplug'], 'def': 'a soft plug that is inserted into the ear canal to block sound', 'name': 'earplug'}, {'frequency': 'f', 'id': 418, 'synset': 'earring.n.01', 'synonyms': ['earring'], 'def': 'jewelry to ornament the ear', 'name': 'earring'}, {'frequency': 'c', 'id': 419, 'synset': 'easel.n.01', 'synonyms': ['easel'], 'def': "an upright tripod for displaying something (usually an artist's canvas)", 'name': 'easel'}, {'frequency': 'r', 'id': 420, 'synset': 'eclair.n.01', 'synonyms': ['eclair'], 'def': 'oblong cream puff', 'name': 'eclair'}, {'frequency': 'r', 'id': 421, 'synset': 'eel.n.01', 'synonyms': ['eel'], 'def': 'an elongate fish with fatty flesh', 'name': 'eel'}, {'frequency': 'f', 'id': 422, 'synset': 'egg.n.02', 'synonyms': ['egg', 'eggs'], 'def': 'oval reproductive body of a fowl (especially a hen) used as food', 'name': 'egg'}, {'frequency': 'r', 'id': 423, 'synset': 'egg_roll.n.01', 'synonyms': ['egg_roll', 'spring_roll'], 'def': 'minced vegetables and meat wrapped in a pancake and fried', 'name': 'egg_roll'}, {'frequency': 'c', 'id': 424, 'synset': 'egg_yolk.n.01', 'synonyms': ['egg_yolk', 'yolk_(egg)'], 'def': 'the yellow spherical part of an egg', 'name': 'egg_yolk'}, {'frequency': 'c', 'id': 425, 'synset': 'eggbeater.n.02', 'synonyms': ['eggbeater', 'eggwhisk'], 'def': 'a mixer for beating eggs or whipping cream', 'name': 'eggbeater'}, {'frequency': 'c', 'id': 426, 'synset': 'eggplant.n.01', 'synonyms': ['eggplant', 'aubergine'], 'def': 'egg-shaped vegetable having a shiny skin typically dark purple', 'name': 'eggplant'}, {'frequency': 'r', 'id': 427, 'synset': 'electric_chair.n.01', 'synonyms': ['electric_chair'], 'def': 'a chair-shaped instrument of execution by electrocution', 'name': 'electric_chair'}, {'frequency': 'f', 'id': 428, 'synset': 'electric_refrigerator.n.01', 'synonyms': ['refrigerator'], 'def': 'a refrigerator in which the coolant is pumped around by an electric motor', 'name': 'refrigerator'}, {'frequency': 'f', 'id': 429, 'synset': 'elephant.n.01', 'synonyms': ['elephant'], 'def': 'a common elephant', 'name': 'elephant'}, {'frequency': 'r', 'id': 430, 'synset': 'elk.n.01', 'synonyms': ['elk', 'moose'], 'def': 'large northern deer with enormous flattened antlers in the male', 'name': 'elk'}, {'frequency': 'c', 'id': 431, 'synset': 'envelope.n.01', 'synonyms': ['envelope'], 'def': 'a flat (usually rectangular) container for a letter, thin package, etc.', 'name': 'envelope'}, {'frequency': 'c', 'id': 432, 'synset': 'eraser.n.01', 'synonyms': ['eraser'], 'def': 'an implement used to erase something', 'name': 'eraser'}, {'frequency': 'r', 'id': 433, 'synset': 'escargot.n.01', 'synonyms': ['escargot'], 'def': 'edible snail usually served in the shell with a sauce of melted butter and garlic', 'name': 'escargot'}, {'frequency': 'r', 'id': 434, 'synset': 'eyepatch.n.01', 'synonyms': ['eyepatch'], 'def': 'a protective cloth covering for an injured eye', 'name': 'eyepatch'}, {'frequency': 'r', 'id': 435, 'synset': 'falcon.n.01', 'synonyms': ['falcon'], 'def': 'birds of prey having long pointed powerful wings adapted for swift flight', 'name': 'falcon'}, {'frequency': 'f', 'id': 436, 'synset': 'fan.n.01', 'synonyms': ['fan'], 'def': 'a device for creating a current of air by movement of a surface or surfaces', 'name': 'fan'}, {'frequency': 'f', 'id': 437, 'synset': 'faucet.n.01', 'synonyms': ['faucet', 'spigot', 'tap'], 'def': 'a regulator for controlling the flow of a liquid from a reservoir', 'name': 'faucet'}, {'frequency': 'r', 'id': 438, 'synset': 'fedora.n.01', 'synonyms': ['fedora'], 'def': 'a hat made of felt with a creased crown', 'name': 'fedora'}, {'frequency': 'r', 'id': 439, 'synset': 'ferret.n.02', 'synonyms': ['ferret'], 'def': 'domesticated albino variety of the European polecat bred for hunting rats and rabbits', 'name': 'ferret'}, {'frequency': 'c', 'id': 440, 'synset': 'ferris_wheel.n.01', 'synonyms': ['Ferris_wheel'], 'def': 'a large wheel with suspended seats that remain upright as the wheel rotates', 'name': 'Ferris_wheel'}, {'frequency': 'r', 'id': 441, 'synset': 'ferry.n.01', 'synonyms': ['ferry', 'ferryboat'], 'def': 'a boat that transports people or vehicles across a body of water and operates on a regular schedule', 'name': 'ferry'}, {'frequency': 'r', 'id': 442, 'synset': 'fig.n.04', 'synonyms': ['fig_(fruit)'], 'def': 'fleshy sweet pear-shaped yellowish or purple fruit eaten fresh or preserved or dried', 'name': 'fig_(fruit)'}, {'frequency': 'c', 'id': 443, 'synset': 'fighter.n.02', 'synonyms': ['fighter_jet', 'fighter_aircraft', 'attack_aircraft'], 'def': 'a high-speed military or naval airplane designed to destroy enemy targets', 'name': 'fighter_jet'}, {'frequency': 'f', 'id': 444, 'synset': 'figurine.n.01', 'synonyms': ['figurine'], 'def': 'a small carved or molded figure', 'name': 'figurine'}, {'frequency': 'c', 'id': 445, 'synset': 'file.n.03', 'synonyms': ['file_cabinet', 'filing_cabinet'], 'def': 'office furniture consisting of a container for keeping papers in order', 'name': 'file_cabinet'}, {'frequency': 'r', 'id': 446, 'synset': 'file.n.04', 'synonyms': ['file_(tool)'], 'def': 'a steel hand tool with small sharp teeth on some or all of its surfaces; used for smoothing wood or metal', 'name': 'file_(tool)'}, {'frequency': 'f', 'id': 447, 'synset': 'fire_alarm.n.02', 'synonyms': ['fire_alarm', 'smoke_alarm'], 'def': 'an alarm that is tripped off by fire or smoke', 'name': 'fire_alarm'}, {'frequency': 'c', 'id': 448, 'synset': 'fire_engine.n.01', 'synonyms': ['fire_engine', 'fire_truck'], 'def': 'large trucks that carry firefighters and equipment to the site of a fire', 'name': 'fire_engine'}, {'frequency': 'c', 'id': 449, 'synset': 'fire_extinguisher.n.01', 'synonyms': ['fire_extinguisher', 'extinguisher'], 'def': 'a manually operated device for extinguishing small fires', 'name': 'fire_extinguisher'}, {'frequency': 'c', 'id': 450, 'synset': 'fire_hose.n.01', 'synonyms': ['fire_hose'], 'def': 'a large hose that carries water from a fire hydrant to the site of the fire', 'name': 'fire_hose'}, {'frequency': 'f', 'id': 451, 'synset': 'fireplace.n.01', 'synonyms': ['fireplace'], 'def': 'an open recess in a wall at the base of a chimney where a fire can be built', 'name': 'fireplace'}, {'frequency': 'f', 'id': 452, 'synset': 'fireplug.n.01', 'synonyms': ['fireplug', 'fire_hydrant', 'hydrant'], 'def': 'an upright hydrant for drawing water to use in fighting a fire', 'name': 'fireplug'}, {'frequency': 'c', 'id': 453, 'synset': 'fish.n.01', 'synonyms': ['fish'], 'def': 'any of various mostly cold-blooded aquatic vertebrates usually having scales and breathing through gills', 'name': 'fish'}, {'frequency': 'r', 'id': 454, 'synset': 'fish.n.02', 'synonyms': ['fish_(food)'], 'def': 'the flesh of fish used as food', 'name': 'fish_(food)'}, {'frequency': 'r', 'id': 455, 'synset': 'fishbowl.n.02', 'synonyms': ['fishbowl', 'goldfish_bowl'], 'def': 'a transparent bowl in which small fish are kept', 'name': 'fishbowl'}, {'frequency': 'r', 'id': 456, 'synset': 'fishing_boat.n.01', 'synonyms': ['fishing_boat', 'fishing_vessel'], 'def': 'a vessel for fishing', 'name': 'fishing_boat'}, {'frequency': 'c', 'id': 457, 'synset': 'fishing_rod.n.01', 'synonyms': ['fishing_rod', 'fishing_pole'], 'def': 'a rod that is used in fishing to extend the fishing line', 'name': 'fishing_rod'}, {'frequency': 'f', 'id': 458, 'synset': 'flag.n.01', 'synonyms': ['flag'], 'def': 'emblem usually consisting of a rectangular piece of cloth of distinctive design (do not include pole)', 'name': 'flag'}, {'frequency': 'f', 'id': 459, 'synset': 'flagpole.n.02', 'synonyms': ['flagpole', 'flagstaff'], 'def': 'a tall staff or pole on which a flag is raised', 'name': 'flagpole'}, {'frequency': 'c', 'id': 460, 'synset': 'flamingo.n.01', 'synonyms': ['flamingo'], 'def': 'large pink web-footed bird with down-bent bill', 'name': 'flamingo'}, {'frequency': 'c', 'id': 461, 'synset': 'flannel.n.01', 'synonyms': ['flannel'], 'def': 'a soft light woolen fabric; used for clothing', 'name': 'flannel'}, {'frequency': 'r', 'id': 462, 'synset': 'flash.n.10', 'synonyms': ['flash', 'flashbulb'], 'def': 'a lamp for providing momentary light to take a photograph', 'name': 'flash'}, {'frequency': 'c', 'id': 463, 'synset': 'flashlight.n.01', 'synonyms': ['flashlight', 'torch'], 'def': 'a small portable battery-powered electric lamp', 'name': 'flashlight'}, {'frequency': 'r', 'id': 464, 'synset': 'fleece.n.03', 'synonyms': ['fleece'], 'def': 'a soft bulky fabric with deep pile; used chiefly for clothing', 'name': 'fleece'}, {'frequency': 'f', 'id': 465, 'synset': 'flip-flop.n.02', 'synonyms': ['flip-flop_(sandal)'], 'def': 'a backless sandal held to the foot by a thong between two toes', 'name': 'flip-flop_(sandal)'}, {'frequency': 'c', 'id': 466, 'synset': 'flipper.n.01', 'synonyms': ['flipper_(footwear)', 'fin_(footwear)'], 'def': 'a shoe to aid a person in swimming', 'name': 'flipper_(footwear)'}, {'frequency': 'f', 'id': 467, 'synset': 'flower_arrangement.n.01', 'synonyms': ['flower_arrangement', 'floral_arrangement'], 'def': 'a decorative arrangement of flowers', 'name': 'flower_arrangement'}, {'frequency': 'c', 'id': 468, 'synset': 'flute.n.02', 'synonyms': ['flute_glass', 'champagne_flute'], 'def': 'a tall narrow wineglass', 'name': 'flute_glass'}, {'frequency': 'r', 'id': 469, 'synset': 'foal.n.01', 'synonyms': ['foal'], 'def': 'a young horse', 'name': 'foal'}, {'frequency': 'c', 'id': 470, 'synset': 'folding_chair.n.01', 'synonyms': ['folding_chair'], 'def': 'a chair that can be folded flat for storage', 'name': 'folding_chair'}, {'frequency': 'c', 'id': 471, 'synset': 'food_processor.n.01', 'synonyms': ['food_processor'], 'def': 'a kitchen appliance for shredding, blending, chopping, or slicing food', 'name': 'food_processor'}, {'frequency': 'c', 'id': 472, 'synset': 'football.n.02', 'synonyms': ['football_(American)'], 'def': 'the inflated oblong ball used in playing American football', 'name': 'football_(American)'}, {'frequency': 'r', 'id': 473, 'synset': 'football_helmet.n.01', 'synonyms': ['football_helmet'], 'def': 'a padded helmet with a face mask to protect the head of football players', 'name': 'football_helmet'}, {'frequency': 'c', 'id': 474, 'synset': 'footstool.n.01', 'synonyms': ['footstool', 'footrest'], 'def': 'a low seat or a stool to rest the feet of a seated person', 'name': 'footstool'}, {'frequency': 'f', 'id': 475, 'synset': 'fork.n.01', 'synonyms': ['fork'], 'def': 'cutlery used for serving and eating food', 'name': 'fork'}, {'frequency': 'r', 'id': 476, 'synset': 'forklift.n.01', 'synonyms': ['forklift'], 'def': 'an industrial vehicle with a power operated fork in front that can be inserted under loads to lift and move them', 'name': 'forklift'}, {'frequency': 'r', 'id': 477, 'synset': 'freight_car.n.01', 'synonyms': ['freight_car'], 'def': 'a railway car that carries freight', 'name': 'freight_car'}, {'frequency': 'r', 'id': 478, 'synset': 'french_toast.n.01', 'synonyms': ['French_toast'], 'def': 'bread slice dipped in egg and milk and fried', 'name': 'French_toast'}, {'frequency': 'c', 'id': 479, 'synset': 'freshener.n.01', 'synonyms': ['freshener', 'air_freshener'], 'def': 'anything that freshens', 'name': 'freshener'}, {'frequency': 'f', 'id': 480, 'synset': 'frisbee.n.01', 'synonyms': ['frisbee'], 'def': 'a light, plastic disk propelled with a flip of the wrist for recreation or competition', 'name': 'frisbee'}, {'frequency': 'c', 'id': 481, 'synset': 'frog.n.01', 'synonyms': ['frog', 'toad', 'toad_frog'], 'def': 'a tailless stout-bodied amphibians with long hind limbs for leaping', 'name': 'frog'}, {'frequency': 'c', 'id': 482, 'synset': 'fruit_juice.n.01', 'synonyms': ['fruit_juice'], 'def': 'drink produced by squeezing or crushing fruit', 'name': 'fruit_juice'}, {'frequency': 'r', 'id': 483, 'synset': 'fruit_salad.n.01', 'synonyms': ['fruit_salad'], 'def': 'salad composed of fruits', 'name': 'fruit_salad'}, {'frequency': 'c', 'id': 484, 'synset': 'frying_pan.n.01', 'synonyms': ['frying_pan', 'frypan', 'skillet'], 'def': 'a pan used for frying foods', 'name': 'frying_pan'}, {'frequency': 'r', 'id': 485, 'synset': 'fudge.n.01', 'synonyms': ['fudge'], 'def': 'soft creamy candy', 'name': 'fudge'}, {'frequency': 'r', 'id': 486, 'synset': 'funnel.n.02', 'synonyms': ['funnel'], 'def': 'a cone-shaped utensil used to channel a substance into a container with a small mouth', 'name': 'funnel'}, {'frequency': 'c', 'id': 487, 'synset': 'futon.n.01', 'synonyms': ['futon'], 'def': 'a pad that is used for sleeping on the floor or on a raised frame', 'name': 'futon'}, {'frequency': 'r', 'id': 488, 'synset': 'gag.n.02', 'synonyms': ['gag', 'muzzle'], 'def': "restraint put into a person's mouth to prevent speaking or shouting", 'name': 'gag'}, {'frequency': 'r', 'id': 489, 'synset': 'garbage.n.03', 'synonyms': ['garbage'], 'def': 'a receptacle where waste can be discarded', 'name': 'garbage'}, {'frequency': 'c', 'id': 490, 'synset': 'garbage_truck.n.01', 'synonyms': ['garbage_truck'], 'def': 'a truck for collecting domestic refuse', 'name': 'garbage_truck'}, {'frequency': 'c', 'id': 491, 'synset': 'garden_hose.n.01', 'synonyms': ['garden_hose'], 'def': 'a hose used for watering a lawn or garden', 'name': 'garden_hose'}, {'frequency': 'c', 'id': 492, 'synset': 'gargle.n.01', 'synonyms': ['gargle', 'mouthwash'], 'def': 'a medicated solution used for gargling and rinsing the mouth', 'name': 'gargle'}, {'frequency': 'r', 'id': 493, 'synset': 'gargoyle.n.02', 'synonyms': ['gargoyle'], 'def': 'an ornament consisting of a grotesquely carved figure of a person or animal', 'name': 'gargoyle'}, {'frequency': 'c', 'id': 494, 'synset': 'garlic.n.02', 'synonyms': ['garlic', 'ail'], 'def': 'aromatic bulb used as seasoning', 'name': 'garlic'}, {'frequency': 'r', 'id': 495, 'synset': 'gasmask.n.01', 'synonyms': ['gasmask', 'respirator', 'gas_helmet'], 'def': 'a protective face mask with a filter', 'name': 'gasmask'}, {'frequency': 'r', 'id': 496, 'synset': 'gazelle.n.01', 'synonyms': ['gazelle'], 'def': 'small swift graceful antelope of Africa and Asia having lustrous eyes', 'name': 'gazelle'}, {'frequency': 'c', 'id': 497, 'synset': 'gelatin.n.02', 'synonyms': ['gelatin', 'jelly'], 'def': 'an edible jelly made with gelatin and used as a dessert or salad base or a coating for foods', 'name': 'gelatin'}, {'frequency': 'r', 'id': 498, 'synset': 'gem.n.02', 'synonyms': ['gemstone'], 'def': 'a crystalline rock that can be cut and polished for jewelry', 'name': 'gemstone'}, {'frequency': 'c', 'id': 499, 'synset': 'giant_panda.n.01', 'synonyms': ['giant_panda', 'panda', 'panda_bear'], 'def': 'large black-and-white herbivorous mammal of bamboo forests of China and Tibet', 'name': 'giant_panda'}, {'frequency': 'c', 'id': 500, 'synset': 'gift_wrap.n.01', 'synonyms': ['gift_wrap'], 'def': 'attractive wrapping paper suitable for wrapping gifts', 'name': 'gift_wrap'}, {'frequency': 'c', 'id': 501, 'synset': 'ginger.n.03', 'synonyms': ['ginger', 'gingerroot'], 'def': 'the root of the common ginger plant; used fresh as a seasoning', 'name': 'ginger'}, {'frequency': 'f', 'id': 502, 'synset': 'giraffe.n.01', 'synonyms': ['giraffe'], 'def': 'tall animal having a spotted coat and small horns and very long neck and legs', 'name': 'giraffe'}, {'frequency': 'c', 'id': 503, 'synset': 'girdle.n.02', 'synonyms': ['cincture', 'sash', 'waistband', 'waistcloth'], 'def': 'a band of material around the waist that strengthens a skirt or trousers', 'name': 'cincture'}, {'frequency': 'f', 'id': 504, 'synset': 'glass.n.02', 'synonyms': ['glass_(drink_container)', 'drinking_glass'], 'def': 'a container for holding liquids while drinking', 'name': 'glass_(drink_container)'}, {'frequency': 'c', 'id': 505, 'synset': 'globe.n.03', 'synonyms': ['globe'], 'def': 'a sphere on which a map (especially of the earth) is represented', 'name': 'globe'}, {'frequency': 'f', 'id': 506, 'synset': 'glove.n.02', 'synonyms': ['glove'], 'def': 'handwear covering the hand', 'name': 'glove'}, {'frequency': 'c', 'id': 507, 'synset': 'goat.n.01', 'synonyms': ['goat'], 'def': 'a common goat', 'name': 'goat'}, {'frequency': 'f', 'id': 508, 'synset': 'goggles.n.01', 'synonyms': ['goggles'], 'def': 'tight-fitting spectacles worn to protect the eyes', 'name': 'goggles'}, {'frequency': 'r', 'id': 509, 'synset': 'goldfish.n.01', 'synonyms': ['goldfish'], 'def': 'small golden or orange-red freshwater fishes used as pond or aquarium pets', 'name': 'goldfish'}, {'frequency': 'r', 'id': 510, 'synset': 'golf_club.n.02', 'synonyms': ['golf_club', 'golf-club'], 'def': 'golf equipment used by a golfer to hit a golf ball', 'name': 'golf_club'}, {'frequency': 'c', 'id': 511, 'synset': 'golfcart.n.01', 'synonyms': ['golfcart'], 'def': 'a small motor vehicle in which golfers can ride between shots', 'name': 'golfcart'}, {'frequency': 'r', 'id': 512, 'synset': 'gondola.n.02', 'synonyms': ['gondola_(boat)'], 'def': 'long narrow flat-bottomed boat propelled by sculling; traditionally used on canals of Venice', 'name': 'gondola_(boat)'}, {'frequency': 'c', 'id': 513, 'synset': 'goose.n.01', 'synonyms': ['goose'], 'def': 'loud, web-footed long-necked aquatic birds usually larger than ducks', 'name': 'goose'}, {'frequency': 'r', 'id': 514, 'synset': 'gorilla.n.01', 'synonyms': ['gorilla'], 'def': 'largest ape', 'name': 'gorilla'}, {'frequency': 'r', 'id': 515, 'synset': 'gourd.n.02', 'synonyms': ['gourd'], 'def': 'any of numerous inedible fruits with hard rinds', 'name': 'gourd'}, {'frequency': 'r', 'id': 516, 'synset': 'gown.n.04', 'synonyms': ['surgical_gown', 'scrubs_(surgical_clothing)'], 'def': 'protective garment worn by surgeons during operations', 'name': 'surgical_gown'}, {'frequency': 'f', 'id': 517, 'synset': 'grape.n.01', 'synonyms': ['grape'], 'def': 'any of various juicy fruit with green or purple skins; grow in clusters', 'name': 'grape'}, {'frequency': 'r', 'id': 518, 'synset': 'grasshopper.n.01', 'synonyms': ['grasshopper'], 'def': 'plant-eating insect with hind legs adapted for leaping', 'name': 'grasshopper'}, {'frequency': 'c', 'id': 519, 'synset': 'grater.n.01', 'synonyms': ['grater'], 'def': 'utensil with sharp perforations for shredding foods (as vegetables or cheese)', 'name': 'grater'}, {'frequency': 'c', 'id': 520, 'synset': 'gravestone.n.01', 'synonyms': ['gravestone', 'headstone', 'tombstone'], 'def': 'a stone that is used to mark a grave', 'name': 'gravestone'}, {'frequency': 'r', 'id': 521, 'synset': 'gravy_boat.n.01', 'synonyms': ['gravy_boat', 'gravy_holder'], 'def': 'a dish (often boat-shaped) for serving gravy or sauce', 'name': 'gravy_boat'}, {'frequency': 'c', 'id': 522, 'synset': 'green_bean.n.02', 'synonyms': ['green_bean'], 'def': 'a common bean plant cultivated for its slender green edible pods', 'name': 'green_bean'}, {'frequency': 'c', 'id': 523, 'synset': 'green_onion.n.01', 'synonyms': ['green_onion', 'spring_onion', 'scallion'], 'def': 'a young onion before the bulb has enlarged', 'name': 'green_onion'}, {'frequency': 'r', 'id': 524, 'synset': 'griddle.n.01', 'synonyms': ['griddle'], 'def': 'cooking utensil consisting of a flat heated surface on which food is cooked', 'name': 'griddle'}, {'frequency': 'r', 'id': 525, 'synset': 'grillroom.n.01', 'synonyms': ['grillroom', 'grill_(restaurant)'], 'def': 'a restaurant where food is cooked on a grill', 'name': 'grillroom'}, {'frequency': 'r', 'id': 526, 'synset': 'grinder.n.04', 'synonyms': ['grinder_(tool)'], 'def': 'a machine tool that polishes metal', 'name': 'grinder_(tool)'}, {'frequency': 'r', 'id': 527, 'synset': 'grits.n.01', 'synonyms': ['grits', 'hominy_grits'], 'def': 'coarsely ground corn boiled as a breakfast dish', 'name': 'grits'}, {'frequency': 'c', 'id': 528, 'synset': 'grizzly.n.01', 'synonyms': ['grizzly', 'grizzly_bear'], 'def': 'powerful brownish-yellow bear of the uplands of western North America', 'name': 'grizzly'}, {'frequency': 'c', 'id': 529, 'synset': 'grocery_bag.n.01', 'synonyms': ['grocery_bag'], 'def': "a sack for holding customer's groceries", 'name': 'grocery_bag'}, {'frequency': 'r', 'id': 530, 'synset': 'guacamole.n.01', 'synonyms': ['guacamole'], 'def': 'a dip made of mashed avocado mixed with chopped onions and other seasonings', 'name': 'guacamole'}, {'frequency': 'f', 'id': 531, 'synset': 'guitar.n.01', 'synonyms': ['guitar'], 'def': 'a stringed instrument usually having six strings; played by strumming or plucking', 'name': 'guitar'}, {'frequency': 'c', 'id': 532, 'synset': 'gull.n.02', 'synonyms': ['gull', 'seagull'], 'def': 'mostly white aquatic bird having long pointed wings and short legs', 'name': 'gull'}, {'frequency': 'c', 'id': 533, 'synset': 'gun.n.01', 'synonyms': ['gun'], 'def': 'a weapon that discharges a bullet at high velocity from a metal tube', 'name': 'gun'}, {'frequency': 'r', 'id': 534, 'synset': 'hair_spray.n.01', 'synonyms': ['hair_spray'], 'def': 'substance sprayed on the hair to hold it in place', 'name': 'hair_spray'}, {'frequency': 'c', 'id': 535, 'synset': 'hairbrush.n.01', 'synonyms': ['hairbrush'], 'def': "a brush used to groom a person's hair", 'name': 'hairbrush'}, {'frequency': 'c', 'id': 536, 'synset': 'hairnet.n.01', 'synonyms': ['hairnet'], 'def': 'a small net that someone wears over their hair to keep it in place', 'name': 'hairnet'}, {'frequency': 'c', 'id': 537, 'synset': 'hairpin.n.01', 'synonyms': ['hairpin'], 'def': "a double pronged pin used to hold women's hair in place", 'name': 'hairpin'}, {'frequency': 'f', 'id': 538, 'synset': 'ham.n.01', 'synonyms': ['ham', 'jambon', 'gammon'], 'def': 'meat cut from the thigh of a hog (usually smoked)', 'name': 'ham'}, {'frequency': 'c', 'id': 539, 'synset': 'hamburger.n.01', 'synonyms': ['hamburger', 'beefburger', 'burger'], 'def': 'a sandwich consisting of a patty of minced beef served on a bun', 'name': 'hamburger'}, {'frequency': 'c', 'id': 540, 'synset': 'hammer.n.02', 'synonyms': ['hammer'], 'def': 'a hand tool with a heavy head and a handle; used to deliver an impulsive force by striking', 'name': 'hammer'}, {'frequency': 'r', 'id': 541, 'synset': 'hammock.n.02', 'synonyms': ['hammock'], 'def': 'a hanging bed of canvas or rope netting (usually suspended between two trees)', 'name': 'hammock'}, {'frequency': 'r', 'id': 542, 'synset': 'hamper.n.02', 'synonyms': ['hamper'], 'def': 'a basket usually with a cover', 'name': 'hamper'}, {'frequency': 'r', 'id': 543, 'synset': 'hamster.n.01', 'synonyms': ['hamster'], 'def': 'short-tailed burrowing rodent with large cheek pouches', 'name': 'hamster'}, {'frequency': 'c', 'id': 544, 'synset': 'hand_blower.n.01', 'synonyms': ['hair_dryer'], 'def': 'a hand-held electric blower that can blow warm air onto the hair', 'name': 'hair_dryer'}, {'frequency': 'r', 'id': 545, 'synset': 'hand_glass.n.01', 'synonyms': ['hand_glass', 'hand_mirror'], 'def': 'a mirror intended to be held in the hand', 'name': 'hand_glass'}, {'frequency': 'f', 'id': 546, 'synset': 'hand_towel.n.01', 'synonyms': ['hand_towel', 'face_towel'], 'def': 'a small towel used to dry the hands or face', 'name': 'hand_towel'}, {'frequency': 'c', 'id': 547, 'synset': 'handcart.n.01', 'synonyms': ['handcart', 'pushcart', 'hand_truck'], 'def': 'wheeled vehicle that can be pushed by a person', 'name': 'handcart'}, {'frequency': 'r', 'id': 548, 'synset': 'handcuff.n.01', 'synonyms': ['handcuff'], 'def': 'shackle that consists of a metal loop that can be locked around the wrist', 'name': 'handcuff'}, {'frequency': 'c', 'id': 549, 'synset': 'handkerchief.n.01', 'synonyms': ['handkerchief'], 'def': 'a square piece of cloth used for wiping the eyes or nose or as a costume accessory', 'name': 'handkerchief'}, {'frequency': 'f', 'id': 550, 'synset': 'handle.n.01', 'synonyms': ['handle', 'grip', 'handgrip'], 'def': 'the appendage to an object that is designed to be held in order to use or move it', 'name': 'handle'}, {'frequency': 'r', 'id': 551, 'synset': 'handsaw.n.01', 'synonyms': ['handsaw', "carpenter's_saw"], 'def': 'a saw used with one hand for cutting wood', 'name': 'handsaw'}, {'frequency': 'r', 'id': 552, 'synset': 'hardback.n.01', 'synonyms': ['hardback_book', 'hardcover_book'], 'def': 'a book with cardboard or cloth or leather covers', 'name': 'hardback_book'}, {'frequency': 'r', 'id': 553, 'synset': 'harmonium.n.01', 'synonyms': ['harmonium', 'organ_(musical_instrument)', 'reed_organ_(musical_instrument)'], 'def': 'a free-reed instrument in which air is forced through the reeds by bellows', 'name': 'harmonium'}, {'frequency': 'f', 'id': 554, 'synset': 'hat.n.01', 'synonyms': ['hat'], 'def': 'headwear that protects the head from bad weather, sun, or worn for fashion', 'name': 'hat'}, {'frequency': 'r', 'id': 555, 'synset': 'hatbox.n.01', 'synonyms': ['hatbox'], 'def': 'a round piece of luggage for carrying hats', 'name': 'hatbox'}, {'frequency': 'r', 'id': 556, 'synset': 'hatch.n.03', 'synonyms': ['hatch'], 'def': 'a movable barrier covering a hatchway', 'name': 'hatch'}, {'frequency': 'c', 'id': 557, 'synset': 'head_covering.n.01', 'synonyms': ['veil'], 'def': 'a garment that covers the head and face', 'name': 'veil'}, {'frequency': 'f', 'id': 558, 'synset': 'headband.n.01', 'synonyms': ['headband'], 'def': 'a band worn around or over the head', 'name': 'headband'}, {'frequency': 'f', 'id': 559, 'synset': 'headboard.n.01', 'synonyms': ['headboard'], 'def': 'a vertical board or panel forming the head of a bedstead', 'name': 'headboard'}, {'frequency': 'f', 'id': 560, 'synset': 'headlight.n.01', 'synonyms': ['headlight', 'headlamp'], 'def': 'a powerful light with reflector; attached to the front of an automobile or locomotive', 'name': 'headlight'}, {'frequency': 'c', 'id': 561, 'synset': 'headscarf.n.01', 'synonyms': ['headscarf'], 'def': 'a kerchief worn over the head and tied under the chin', 'name': 'headscarf'}, {'frequency': 'r', 'id': 562, 'synset': 'headset.n.01', 'synonyms': ['headset'], 'def': 'receiver consisting of a pair of headphones', 'name': 'headset'}, {'frequency': 'c', 'id': 563, 'synset': 'headstall.n.01', 'synonyms': ['headstall_(for_horses)', 'headpiece_(for_horses)'], 'def': "the band that is the part of a bridle that fits around a horse's head", 'name': 'headstall_(for_horses)'}, {'frequency': 'r', 'id': 564, 'synset': 'hearing_aid.n.02', 'synonyms': ['hearing_aid'], 'def': 'an acoustic device used to direct sound to the ear of a hearing-impaired person', 'name': 'hearing_aid'}, {'frequency': 'c', 'id': 565, 'synset': 'heart.n.02', 'synonyms': ['heart'], 'def': 'a muscular organ; its contractions move the blood through the body', 'name': 'heart'}, {'frequency': 'c', 'id': 566, 'synset': 'heater.n.01', 'synonyms': ['heater', 'warmer'], 'def': 'device that heats water or supplies warmth to a room', 'name': 'heater'}, {'frequency': 'c', 'id': 567, 'synset': 'helicopter.n.01', 'synonyms': ['helicopter'], 'def': 'an aircraft without wings that obtains its lift from the rotation of overhead blades', 'name': 'helicopter'}, {'frequency': 'f', 'id': 568, 'synset': 'helmet.n.02', 'synonyms': ['helmet'], 'def': 'a protective headgear made of hard material to resist blows', 'name': 'helmet'}, {'frequency': 'r', 'id': 569, 'synset': 'heron.n.02', 'synonyms': ['heron'], 'def': 'grey or white wading bird with long neck and long legs and (usually) long bill', 'name': 'heron'}, {'frequency': 'c', 'id': 570, 'synset': 'highchair.n.01', 'synonyms': ['highchair', 'feeding_chair'], 'def': 'a chair for feeding a very young child', 'name': 'highchair'}, {'frequency': 'f', 'id': 571, 'synset': 'hinge.n.01', 'synonyms': ['hinge'], 'def': 'a joint that holds two parts together so that one can swing relative to the other', 'name': 'hinge'}, {'frequency': 'r', 'id': 572, 'synset': 'hippopotamus.n.01', 'synonyms': ['hippopotamus'], 'def': 'massive thick-skinned animal living in or around rivers of tropical Africa', 'name': 'hippopotamus'}, {'frequency': 'r', 'id': 573, 'synset': 'hockey_stick.n.01', 'synonyms': ['hockey_stick'], 'def': 'sports implement consisting of a stick used by hockey players to move the puck', 'name': 'hockey_stick'}, {'frequency': 'c', 'id': 574, 'synset': 'hog.n.03', 'synonyms': ['hog', 'pig'], 'def': 'domestic swine', 'name': 'hog'}, {'frequency': 'f', 'id': 575, 'synset': 'home_plate.n.01', 'synonyms': ['home_plate_(baseball)', 'home_base_(baseball)'], 'def': '(baseball) a rubber slab where the batter stands; it must be touched by a base runner in order to score', 'name': 'home_plate_(baseball)'}, {'frequency': 'c', 'id': 576, 'synset': 'honey.n.01', 'synonyms': ['honey'], 'def': 'a sweet yellow liquid produced by bees', 'name': 'honey'}, {'frequency': 'f', 'id': 577, 'synset': 'hood.n.06', 'synonyms': ['fume_hood', 'exhaust_hood'], 'def': 'metal covering leading to a vent that exhausts smoke or fumes', 'name': 'fume_hood'}, {'frequency': 'f', 'id': 578, 'synset': 'hook.n.05', 'synonyms': ['hook'], 'def': 'a curved or bent implement for suspending or pulling something', 'name': 'hook'}, {'frequency': 'f', 'id': 579, 'synset': 'horse.n.01', 'synonyms': ['horse'], 'def': 'a common horse', 'name': 'horse'}, {'frequency': 'f', 'id': 580, 'synset': 'hose.n.03', 'synonyms': ['hose', 'hosepipe'], 'def': 'a flexible pipe for conveying a liquid or gas', 'name': 'hose'}, {'frequency': 'r', 'id': 581, 'synset': 'hot-air_balloon.n.01', 'synonyms': ['hot-air_balloon'], 'def': 'balloon for travel through the air in a basket suspended below a large bag of heated air', 'name': 'hot-air_balloon'}, {'frequency': 'r', 'id': 582, 'synset': 'hot_plate.n.01', 'synonyms': ['hotplate'], 'def': 'a portable electric appliance for heating or cooking or keeping food warm', 'name': 'hotplate'}, {'frequency': 'c', 'id': 583, 'synset': 'hot_sauce.n.01', 'synonyms': ['hot_sauce'], 'def': 'a pungent peppery sauce', 'name': 'hot_sauce'}, {'frequency': 'r', 'id': 584, 'synset': 'hourglass.n.01', 'synonyms': ['hourglass'], 'def': 'a sandglass timer that runs for sixty minutes', 'name': 'hourglass'}, {'frequency': 'r', 'id': 585, 'synset': 'houseboat.n.01', 'synonyms': ['houseboat'], 'def': 'a barge that is designed and equipped for use as a dwelling', 'name': 'houseboat'}, {'frequency': 'r', 'id': 586, 'synset': 'hummingbird.n.01', 'synonyms': ['hummingbird'], 'def': 'tiny American bird having brilliant iridescent plumage and long slender bills', 'name': 'hummingbird'}, {'frequency': 'r', 'id': 587, 'synset': 'hummus.n.01', 'synonyms': ['hummus', 'humus', 'hommos', 'hoummos', 'humous'], 'def': 'a thick spread made from mashed chickpeas', 'name': 'hummus'}, {'frequency': 'c', 'id': 588, 'synset': 'ice_bear.n.01', 'synonyms': ['polar_bear'], 'def': 'white bear of Arctic regions', 'name': 'polar_bear'}, {'frequency': 'c', 'id': 589, 'synset': 'ice_cream.n.01', 'synonyms': ['icecream'], 'def': 'frozen dessert containing cream and sugar and flavoring', 'name': 'icecream'}, {'frequency': 'r', 'id': 590, 'synset': 'ice_lolly.n.01', 'synonyms': ['popsicle'], 'def': 'ice cream or water ice on a small wooden stick', 'name': 'popsicle'}, {'frequency': 'c', 'id': 591, 'synset': 'ice_maker.n.01', 'synonyms': ['ice_maker'], 'def': 'an appliance included in some electric refrigerators for making ice cubes', 'name': 'ice_maker'}, {'frequency': 'r', 'id': 592, 'synset': 'ice_pack.n.01', 'synonyms': ['ice_pack', 'ice_bag'], 'def': 'a waterproof bag filled with ice: applied to the body (especially the head) to cool or reduce swelling', 'name': 'ice_pack'}, {'frequency': 'r', 'id': 593, 'synset': 'ice_skate.n.01', 'synonyms': ['ice_skate'], 'def': 'skate consisting of a boot with a steel blade fitted to the sole', 'name': 'ice_skate'}, {'frequency': 'r', 'id': 594, 'synset': 'ice_tea.n.01', 'synonyms': ['ice_tea', 'iced_tea'], 'def': 'strong tea served over ice', 'name': 'ice_tea'}, {'frequency': 'c', 'id': 595, 'synset': 'igniter.n.01', 'synonyms': ['igniter', 'ignitor', 'lighter'], 'def': 'a substance or device used to start a fire', 'name': 'igniter'}, {'frequency': 'r', 'id': 596, 'synset': 'incense.n.01', 'synonyms': ['incense'], 'def': 'a substance that produces a fragrant odor when burned', 'name': 'incense'}, {'frequency': 'r', 'id': 597, 'synset': 'inhaler.n.01', 'synonyms': ['inhaler', 'inhalator'], 'def': 'a dispenser that produces a chemical vapor to be inhaled through mouth or nose', 'name': 'inhaler'}, {'frequency': 'c', 'id': 598, 'synset': 'ipod.n.01', 'synonyms': ['iPod'], 'def': 'a pocket-sized device used to play music files', 'name': 'iPod'}, {'frequency': 'c', 'id': 599, 'synset': 'iron.n.04', 'synonyms': ['iron_(for_clothing)', 'smoothing_iron_(for_clothing)'], 'def': 'home appliance consisting of a flat metal base that is heated and used to smooth cloth', 'name': 'iron_(for_clothing)'}, {'frequency': 'r', 'id': 600, 'synset': 'ironing_board.n.01', 'synonyms': ['ironing_board'], 'def': 'narrow padded board on collapsible supports; used for ironing clothes', 'name': 'ironing_board'}, {'frequency': 'f', 'id': 601, 'synset': 'jacket.n.01', 'synonyms': ['jacket'], 'def': 'a waist-length coat', 'name': 'jacket'}, {'frequency': 'r', 'id': 602, 'synset': 'jam.n.01', 'synonyms': ['jam'], 'def': 'preserve of crushed fruit', 'name': 'jam'}, {'frequency': 'f', 'id': 603, 'synset': 'jean.n.01', 'synonyms': ['jean', 'blue_jean', 'denim'], 'def': '(usually plural) close-fitting trousers of heavy denim for manual work or casual wear', 'name': 'jean'}, {'frequency': 'c', 'id': 604, 'synset': 'jeep.n.01', 'synonyms': ['jeep', 'landrover'], 'def': 'a car suitable for traveling over rough terrain', 'name': 'jeep'}, {'frequency': 'r', 'id': 605, 'synset': 'jelly_bean.n.01', 'synonyms': ['jelly_bean', 'jelly_egg'], 'def': 'sugar-glazed jellied candy', 'name': 'jelly_bean'}, {'frequency': 'f', 'id': 606, 'synset': 'jersey.n.03', 'synonyms': ['jersey', 'T-shirt', 'tee_shirt'], 'def': 'a close-fitting pullover shirt', 'name': 'jersey'}, {'frequency': 'c', 'id': 607, 'synset': 'jet.n.01', 'synonyms': ['jet_plane', 'jet-propelled_plane'], 'def': 'an airplane powered by one or more jet engines', 'name': 'jet_plane'}, {'frequency': 'c', 'id': 608, 'synset': 'jewelry.n.01', 'synonyms': ['jewelry', 'jewellery'], 'def': 'an adornment (as a bracelet or ring or necklace) made of precious metals and set with gems (or imitation gems)', 'name': 'jewelry'}, {'frequency': 'r', 'id': 609, 'synset': 'joystick.n.02', 'synonyms': ['joystick'], 'def': 'a control device for computers consisting of a vertical handle that can move freely in two directions', 'name': 'joystick'}, {'frequency': 'r', 'id': 610, 'synset': 'jump_suit.n.01', 'synonyms': ['jumpsuit'], 'def': "one-piece garment fashioned after a parachutist's uniform", 'name': 'jumpsuit'}, {'frequency': 'c', 'id': 611, 'synset': 'kayak.n.01', 'synonyms': ['kayak'], 'def': 'a small canoe consisting of a light frame made watertight with animal skins', 'name': 'kayak'}, {'frequency': 'r', 'id': 612, 'synset': 'keg.n.02', 'synonyms': ['keg'], 'def': 'small cask or barrel', 'name': 'keg'}, {'frequency': 'r', 'id': 613, 'synset': 'kennel.n.01', 'synonyms': ['kennel', 'doghouse'], 'def': 'outbuilding that serves as a shelter for a dog', 'name': 'kennel'}, {'frequency': 'c', 'id': 614, 'synset': 'kettle.n.01', 'synonyms': ['kettle', 'boiler'], 'def': 'a metal pot for stewing or boiling; usually has a lid', 'name': 'kettle'}, {'frequency': 'f', 'id': 615, 'synset': 'key.n.01', 'synonyms': ['key'], 'def': 'metal instrument used to unlock a lock', 'name': 'key'}, {'frequency': 'r', 'id': 616, 'synset': 'keycard.n.01', 'synonyms': ['keycard'], 'def': 'a plastic card used to gain access typically to a door', 'name': 'keycard'}, {'frequency': 'r', 'id': 617, 'synset': 'kilt.n.01', 'synonyms': ['kilt'], 'def': 'a knee-length pleated tartan skirt worn by men as part of the traditional dress in the Highlands of northern Scotland', 'name': 'kilt'}, {'frequency': 'c', 'id': 618, 'synset': 'kimono.n.01', 'synonyms': ['kimono'], 'def': 'a loose robe; imitated from robes originally worn by Japanese', 'name': 'kimono'}, {'frequency': 'f', 'id': 619, 'synset': 'kitchen_sink.n.01', 'synonyms': ['kitchen_sink'], 'def': 'a sink in a kitchen', 'name': 'kitchen_sink'}, {'frequency': 'c', 'id': 620, 'synset': 'kitchen_table.n.01', 'synonyms': ['kitchen_table'], 'def': 'a table in the kitchen', 'name': 'kitchen_table'}, {'frequency': 'f', 'id': 621, 'synset': 'kite.n.03', 'synonyms': ['kite'], 'def': 'plaything consisting of a light frame covered with tissue paper; flown in wind at end of a string', 'name': 'kite'}, {'frequency': 'c', 'id': 622, 'synset': 'kitten.n.01', 'synonyms': ['kitten', 'kitty'], 'def': 'young domestic cat', 'name': 'kitten'}, {'frequency': 'c', 'id': 623, 'synset': 'kiwi.n.03', 'synonyms': ['kiwi_fruit'], 'def': 'fuzzy brown egg-shaped fruit with slightly tart green flesh', 'name': 'kiwi_fruit'}, {'frequency': 'f', 'id': 624, 'synset': 'knee_pad.n.01', 'synonyms': ['knee_pad'], 'def': 'protective garment consisting of a pad worn by football or baseball or hockey players', 'name': 'knee_pad'}, {'frequency': 'f', 'id': 625, 'synset': 'knife.n.01', 'synonyms': ['knife'], 'def': 'tool with a blade and point used as a cutting instrument', 'name': 'knife'}, {'frequency': 'r', 'id': 626, 'synset': 'knight.n.02', 'synonyms': ['knight_(chess_piece)', 'horse_(chess_piece)'], 'def': 'a chess game piece shaped to resemble the head of a horse', 'name': 'knight_(chess_piece)'}, {'frequency': 'r', 'id': 627, 'synset': 'knitting_needle.n.01', 'synonyms': ['knitting_needle'], 'def': 'needle consisting of a slender rod with pointed ends; usually used in pairs', 'name': 'knitting_needle'}, {'frequency': 'f', 'id': 628, 'synset': 'knob.n.02', 'synonyms': ['knob'], 'def': 'a round handle often found on a door', 'name': 'knob'}, {'frequency': 'r', 'id': 629, 'synset': 'knocker.n.05', 'synonyms': ['knocker_(on_a_door)', 'doorknocker'], 'def': 'a device (usually metal and ornamental) attached by a hinge to a door', 'name': 'knocker_(on_a_door)'}, {'frequency': 'r', 'id': 630, 'synset': 'koala.n.01', 'synonyms': ['koala', 'koala_bear'], 'def': 'sluggish tailless Australian marsupial with grey furry ears and coat', 'name': 'koala'}, {'frequency': 'r', 'id': 631, 'synset': 'lab_coat.n.01', 'synonyms': ['lab_coat', 'laboratory_coat'], 'def': 'a light coat worn to protect clothing from substances used while working in a laboratory', 'name': 'lab_coat'}, {'frequency': 'f', 'id': 632, 'synset': 'ladder.n.01', 'synonyms': ['ladder'], 'def': 'steps consisting of two parallel members connected by rungs', 'name': 'ladder'}, {'frequency': 'c', 'id': 633, 'synset': 'ladle.n.01', 'synonyms': ['ladle'], 'def': 'a spoon-shaped vessel with a long handle frequently used to transfer liquids', 'name': 'ladle'}, {'frequency': 'r', 'id': 634, 'synset': 'ladybug.n.01', 'synonyms': ['ladybug', 'ladybeetle', 'ladybird_beetle'], 'def': 'small round bright-colored and spotted beetle, typically red and black', 'name': 'ladybug'}, {'frequency': 'c', 'id': 635, 'synset': 'lamb.n.01', 'synonyms': ['lamb_(animal)'], 'def': 'young sheep', 'name': 'lamb_(animal)'}, {'frequency': 'r', 'id': 636, 'synset': 'lamb_chop.n.01', 'synonyms': ['lamb-chop', 'lambchop'], 'def': 'chop cut from a lamb', 'name': 'lamb-chop'}, {'frequency': 'f', 'id': 637, 'synset': 'lamp.n.02', 'synonyms': ['lamp'], 'def': 'a piece of furniture holding one or more electric light bulbs', 'name': 'lamp'}, {'frequency': 'f', 'id': 638, 'synset': 'lamppost.n.01', 'synonyms': ['lamppost'], 'def': 'a metal post supporting an outdoor lamp (such as a streetlight)', 'name': 'lamppost'}, {'frequency': 'f', 'id': 639, 'synset': 'lampshade.n.01', 'synonyms': ['lampshade'], 'def': 'a protective ornamental shade used to screen a light bulb from direct view', 'name': 'lampshade'}, {'frequency': 'c', 'id': 640, 'synset': 'lantern.n.01', 'synonyms': ['lantern'], 'def': 'light in a transparent protective case', 'name': 'lantern'}, {'frequency': 'f', 'id': 641, 'synset': 'lanyard.n.02', 'synonyms': ['lanyard', 'laniard'], 'def': 'a cord worn around the neck to hold a knife or whistle, etc.', 'name': 'lanyard'}, {'frequency': 'f', 'id': 642, 'synset': 'laptop.n.01', 'synonyms': ['laptop_computer', 'notebook_computer'], 'def': 'a portable computer small enough to use in your lap', 'name': 'laptop_computer'}, {'frequency': 'r', 'id': 643, 'synset': 'lasagna.n.01', 'synonyms': ['lasagna', 'lasagne'], 'def': 'baked dish of layers of lasagna pasta with sauce and cheese and meat or vegetables', 'name': 'lasagna'}, {'frequency': 'c', 'id': 644, 'synset': 'latch.n.02', 'synonyms': ['latch'], 'def': 'a bar that can be lowered or slid into a groove to fasten a door or gate', 'name': 'latch'}, {'frequency': 'r', 'id': 645, 'synset': 'lawn_mower.n.01', 'synonyms': ['lawn_mower'], 'def': 'garden tool for mowing grass on lawns', 'name': 'lawn_mower'}, {'frequency': 'r', 'id': 646, 'synset': 'leather.n.01', 'synonyms': ['leather'], 'def': 'an animal skin made smooth and flexible by removing the hair and then tanning', 'name': 'leather'}, {'frequency': 'c', 'id': 647, 'synset': 'legging.n.01', 'synonyms': ['legging_(clothing)', 'leging_(clothing)', 'leg_covering'], 'def': 'a garment covering the leg (usually extending from the knee to the ankle)', 'name': 'legging_(clothing)'}, {'frequency': 'c', 'id': 648, 'synset': 'lego.n.01', 'synonyms': ['Lego', 'Lego_set'], 'def': "a child's plastic construction set for making models from blocks", 'name': 'Lego'}, {'frequency': 'f', 'id': 649, 'synset': 'lemon.n.01', 'synonyms': ['lemon'], 'def': 'yellow oval fruit with juicy acidic flesh', 'name': 'lemon'}, {'frequency': 'r', 'id': 650, 'synset': 'lemonade.n.01', 'synonyms': ['lemonade'], 'def': 'sweetened beverage of diluted lemon juice', 'name': 'lemonade'}, {'frequency': 'f', 'id': 651, 'synset': 'lettuce.n.02', 'synonyms': ['lettuce'], 'def': 'leafy plant commonly eaten in salad or on sandwiches', 'name': 'lettuce'}, {'frequency': 'f', 'id': 652, 'synset': 'license_plate.n.01', 'synonyms': ['license_plate', 'numberplate'], 'def': "a plate mounted on the front and back of car and bearing the car's registration number", 'name': 'license_plate'}, {'frequency': 'f', 'id': 653, 'synset': 'life_buoy.n.01', 'synonyms': ['life_buoy', 'lifesaver', 'life_belt', 'life_ring'], 'def': 'a ring-shaped life preserver used to prevent drowning (NOT a life-jacket or vest)', 'name': 'life_buoy'}, {'frequency': 'f', 'id': 654, 'synset': 'life_jacket.n.01', 'synonyms': ['life_jacket', 'life_vest'], 'def': 'life preserver consisting of a sleeveless jacket of buoyant or inflatable design', 'name': 'life_jacket'}, {'frequency': 'f', 'id': 655, 'synset': 'light_bulb.n.01', 'synonyms': ['lightbulb'], 'def': 'glass bulb or tube shaped electric device that emits light (DO NOT MARK LAMPS AS A WHOLE)', 'name': 'lightbulb'}, {'frequency': 'r', 'id': 656, 'synset': 'lightning_rod.n.02', 'synonyms': ['lightning_rod', 'lightning_conductor'], 'def': 'a metallic conductor that is attached to a high point and leads to the ground', 'name': 'lightning_rod'}, {'frequency': 'c', 'id': 657, 'synset': 'lime.n.06', 'synonyms': ['lime'], 'def': 'the green acidic fruit of any of various lime trees', 'name': 'lime'}, {'frequency': 'r', 'id': 658, 'synset': 'limousine.n.01', 'synonyms': ['limousine'], 'def': 'long luxurious car; usually driven by a chauffeur', 'name': 'limousine'}, {'frequency': 'r', 'id': 659, 'synset': 'linen.n.02', 'synonyms': ['linen_paper'], 'def': 'a high-quality paper made of linen fibers or with a linen finish', 'name': 'linen_paper'}, {'frequency': 'c', 'id': 660, 'synset': 'lion.n.01', 'synonyms': ['lion'], 'def': 'large gregarious predatory cat of Africa and India', 'name': 'lion'}, {'frequency': 'c', 'id': 661, 'synset': 'lip_balm.n.01', 'synonyms': ['lip_balm'], 'def': 'a balm applied to the lips', 'name': 'lip_balm'}, {'frequency': 'c', 'id': 662, 'synset': 'lipstick.n.01', 'synonyms': ['lipstick', 'lip_rouge'], 'def': 'makeup that is used to color the lips', 'name': 'lipstick'}, {'frequency': 'r', 'id': 663, 'synset': 'liquor.n.01', 'synonyms': ['liquor', 'spirits', 'hard_liquor', 'liqueur', 'cordial'], 'def': 'an alcoholic beverage that is distilled rather than fermented', 'name': 'liquor'}, {'frequency': 'r', 'id': 664, 'synset': 'lizard.n.01', 'synonyms': ['lizard'], 'def': 'a reptile with usually two pairs of legs and a tapering tail', 'name': 'lizard'}, {'frequency': 'r', 'id': 665, 'synset': 'loafer.n.02', 'synonyms': ['Loafer_(type_of_shoe)'], 'def': 'a low leather step-in shoe', 'name': 'Loafer_(type_of_shoe)'}, {'frequency': 'f', 'id': 666, 'synset': 'log.n.01', 'synonyms': ['log'], 'def': 'a segment of the trunk of a tree when stripped of branches', 'name': 'log'}, {'frequency': 'c', 'id': 667, 'synset': 'lollipop.n.02', 'synonyms': ['lollipop'], 'def': 'hard candy on a stick', 'name': 'lollipop'}, {'frequency': 'c', 'id': 668, 'synset': 'lotion.n.01', 'synonyms': ['lotion'], 'def': 'any of various cosmetic preparations that are applied to the skin', 'name': 'lotion'}, {'frequency': 'f', 'id': 669, 'synset': 'loudspeaker.n.01', 'synonyms': ['speaker_(stero_equipment)'], 'def': 'electronic device that produces sound often as part of a stereo system', 'name': 'speaker_(stero_equipment)'}, {'frequency': 'c', 'id': 670, 'synset': 'love_seat.n.01', 'synonyms': ['loveseat'], 'def': 'small sofa that seats two people', 'name': 'loveseat'}, {'frequency': 'r', 'id': 671, 'synset': 'machine_gun.n.01', 'synonyms': ['machine_gun'], 'def': 'a rapidly firing automatic gun', 'name': 'machine_gun'}, {'frequency': 'f', 'id': 672, 'synset': 'magazine.n.02', 'synonyms': ['magazine'], 'def': 'a paperback periodic publication', 'name': 'magazine'}, {'frequency': 'f', 'id': 673, 'synset': 'magnet.n.01', 'synonyms': ['magnet'], 'def': 'a device that attracts iron and produces a magnetic field', 'name': 'magnet'}, {'frequency': 'r', 'id': 674, 'synset': 'mail_slot.n.01', 'synonyms': ['mail_slot'], 'def': 'a slot (usually in a door) through which mail can be delivered', 'name': 'mail_slot'}, {'frequency': 'c', 'id': 675, 'synset': 'mailbox.n.01', 'synonyms': ['mailbox_(at_home)', 'letter_box_(at_home)'], 'def': 'a private box for delivery of mail', 'name': 'mailbox_(at_home)'}, {'frequency': 'r', 'id': 676, 'synset': 'mallet.n.01', 'synonyms': ['mallet'], 'def': 'a sports implement with a long handle and a hammer-like head used to hit a ball', 'name': 'mallet'}, {'frequency': 'r', 'id': 677, 'synset': 'mammoth.n.01', 'synonyms': ['mammoth'], 'def': 'any of numerous extinct elephants widely distributed in the Pleistocene', 'name': 'mammoth'}, {'frequency': 'c', 'id': 678, 'synset': 'mandarin.n.05', 'synonyms': ['mandarin_orange'], 'def': 'a somewhat flat reddish-orange loose skinned citrus of China', 'name': 'mandarin_orange'}, {'frequency': 'c', 'id': 679, 'synset': 'manger.n.01', 'synonyms': ['manger', 'trough'], 'def': 'a container (usually in a barn or stable) from which cattle or horses feed', 'name': 'manger'}, {'frequency': 'f', 'id': 680, 'synset': 'manhole.n.01', 'synonyms': ['manhole'], 'def': 'a hole (usually with a flush cover) through which a person can gain access to an underground structure', 'name': 'manhole'}, {'frequency': 'c', 'id': 681, 'synset': 'map.n.01', 'synonyms': ['map'], 'def': "a diagrammatic representation of the earth's surface (or part of it)", 'name': 'map'}, {'frequency': 'c', 'id': 682, 'synset': 'marker.n.03', 'synonyms': ['marker'], 'def': 'a writing implement for making a mark', 'name': 'marker'}, {'frequency': 'r', 'id': 683, 'synset': 'martini.n.01', 'synonyms': ['martini'], 'def': 'a cocktail made of gin (or vodka) with dry vermouth', 'name': 'martini'}, {'frequency': 'r', 'id': 684, 'synset': 'mascot.n.01', 'synonyms': ['mascot'], 'def': 'a person or animal that is adopted by a team or other group as a symbolic figure', 'name': 'mascot'}, {'frequency': 'c', 'id': 685, 'synset': 'mashed_potato.n.01', 'synonyms': ['mashed_potato'], 'def': 'potato that has been peeled and boiled and then mashed', 'name': 'mashed_potato'}, {'frequency': 'r', 'id': 686, 'synset': 'masher.n.02', 'synonyms': ['masher'], 'def': 'a kitchen utensil used for mashing (e.g. potatoes)', 'name': 'masher'}, {'frequency': 'f', 'id': 687, 'synset': 'mask.n.04', 'synonyms': ['mask', 'facemask'], 'def': 'a protective covering worn over the face', 'name': 'mask'}, {'frequency': 'f', 'id': 688, 'synset': 'mast.n.01', 'synonyms': ['mast'], 'def': 'a vertical spar for supporting sails', 'name': 'mast'}, {'frequency': 'c', 'id': 689, 'synset': 'mat.n.03', 'synonyms': ['mat_(gym_equipment)', 'gym_mat'], 'def': 'sports equipment consisting of a piece of thick padding on the floor for gymnastics', 'name': 'mat_(gym_equipment)'}, {'frequency': 'r', 'id': 690, 'synset': 'matchbox.n.01', 'synonyms': ['matchbox'], 'def': 'a box for holding matches', 'name': 'matchbox'}, {'frequency': 'f', 'id': 691, 'synset': 'mattress.n.01', 'synonyms': ['mattress'], 'def': 'a thick pad filled with resilient material used as a bed or part of a bed', 'name': 'mattress'}, {'frequency': 'c', 'id': 692, 'synset': 'measuring_cup.n.01', 'synonyms': ['measuring_cup'], 'def': 'graduated cup used to measure liquid or granular ingredients', 'name': 'measuring_cup'}, {'frequency': 'c', 'id': 693, 'synset': 'measuring_stick.n.01', 'synonyms': ['measuring_stick', 'ruler_(measuring_stick)', 'measuring_rod'], 'def': 'measuring instrument having a sequence of marks at regular intervals', 'name': 'measuring_stick'}, {'frequency': 'c', 'id': 694, 'synset': 'meatball.n.01', 'synonyms': ['meatball'], 'def': 'ground meat formed into a ball and fried or simmered in broth', 'name': 'meatball'}, {'frequency': 'c', 'id': 695, 'synset': 'medicine.n.02', 'synonyms': ['medicine'], 'def': 'something that treats or prevents or alleviates the symptoms of disease', 'name': 'medicine'}, {'frequency': 'r', 'id': 696, 'synset': 'melon.n.01', 'synonyms': ['melon'], 'def': 'fruit of the gourd family having a hard rind and sweet juicy flesh', 'name': 'melon'}, {'frequency': 'f', 'id': 697, 'synset': 'microphone.n.01', 'synonyms': ['microphone'], 'def': 'device for converting sound waves into electrical energy', 'name': 'microphone'}, {'frequency': 'r', 'id': 698, 'synset': 'microscope.n.01', 'synonyms': ['microscope'], 'def': 'magnifier of the image of small objects', 'name': 'microscope'}, {'frequency': 'f', 'id': 699, 'synset': 'microwave.n.02', 'synonyms': ['microwave_oven'], 'def': 'kitchen appliance that cooks food by passing an electromagnetic wave through it', 'name': 'microwave_oven'}, {'frequency': 'r', 'id': 700, 'synset': 'milestone.n.01', 'synonyms': ['milestone', 'milepost'], 'def': 'stone post at side of a road to show distances', 'name': 'milestone'}, {'frequency': 'c', 'id': 701, 'synset': 'milk.n.01', 'synonyms': ['milk'], 'def': 'a white nutritious liquid secreted by mammals and used as food by human beings', 'name': 'milk'}, {'frequency': 'f', 'id': 702, 'synset': 'minivan.n.01', 'synonyms': ['minivan'], 'def': 'a small box-shaped passenger van', 'name': 'minivan'}, {'frequency': 'r', 'id': 703, 'synset': 'mint.n.05', 'synonyms': ['mint_candy'], 'def': 'a candy that is flavored with a mint oil', 'name': 'mint_candy'}, {'frequency': 'f', 'id': 704, 'synset': 'mirror.n.01', 'synonyms': ['mirror'], 'def': 'polished surface that forms images by reflecting light', 'name': 'mirror'}, {'frequency': 'c', 'id': 705, 'synset': 'mitten.n.01', 'synonyms': ['mitten'], 'def': 'glove that encases the thumb separately and the other four fingers together', 'name': 'mitten'}, {'frequency': 'c', 'id': 706, 'synset': 'mixer.n.04', 'synonyms': ['mixer_(kitchen_tool)', 'stand_mixer'], 'def': 'a kitchen utensil that is used for mixing foods', 'name': 'mixer_(kitchen_tool)'}, {'frequency': 'c', 'id': 707, 'synset': 'money.n.03', 'synonyms': ['money'], 'def': 'the official currency issued by a government or national bank', 'name': 'money'}, {'frequency': 'f', 'id': 708, 'synset': 'monitor.n.04', 'synonyms': ['monitor_(computer_equipment) computer_monitor'], 'def': 'a computer monitor', 'name': 'monitor_(computer_equipment) computer_monitor'}, {'frequency': 'c', 'id': 709, 'synset': 'monkey.n.01', 'synonyms': ['monkey'], 'def': 'any of various long-tailed primates', 'name': 'monkey'}, {'frequency': 'f', 'id': 710, 'synset': 'motor.n.01', 'synonyms': ['motor'], 'def': 'machine that converts other forms of energy into mechanical energy and so imparts motion', 'name': 'motor'}, {'frequency': 'f', 'id': 711, 'synset': 'motor_scooter.n.01', 'synonyms': ['motor_scooter', 'scooter'], 'def': 'a wheeled vehicle with small wheels and a low-powered engine', 'name': 'motor_scooter'}, {'frequency': 'r', 'id': 712, 'synset': 'motor_vehicle.n.01', 'synonyms': ['motor_vehicle', 'automotive_vehicle'], 'def': 'a self-propelled wheeled vehicle that does not run on rails', 'name': 'motor_vehicle'}, {'frequency': 'r', 'id': 713, 'synset': 'motorboat.n.01', 'synonyms': ['motorboat', 'powerboat'], 'def': 'a boat propelled by an internal-combustion engine', 'name': 'motorboat'}, {'frequency': 'f', 'id': 714, 'synset': 'motorcycle.n.01', 'synonyms': ['motorcycle'], 'def': 'a motor vehicle with two wheels and a strong frame', 'name': 'motorcycle'}, {'frequency': 'f', 'id': 715, 'synset': 'mound.n.01', 'synonyms': ['mound_(baseball)', "pitcher's_mound"], 'def': '(baseball) the slight elevation on which the pitcher stands', 'name': 'mound_(baseball)'}, {'frequency': 'r', 'id': 716, 'synset': 'mouse.n.01', 'synonyms': ['mouse_(animal_rodent)'], 'def': 'a small rodent with pointed snouts and small ears on elongated bodies with slender usually hairless tails', 'name': 'mouse_(animal_rodent)'}, {'frequency': 'f', 'id': 717, 'synset': 'mouse.n.04', 'synonyms': ['mouse_(computer_equipment)', 'computer_mouse'], 'def': 'a computer input device that controls an on-screen pointer', 'name': 'mouse_(computer_equipment)'}, {'frequency': 'f', 'id': 718, 'synset': 'mousepad.n.01', 'synonyms': ['mousepad'], 'def': 'a small portable pad that provides an operating surface for a computer mouse', 'name': 'mousepad'}, {'frequency': 'c', 'id': 719, 'synset': 'muffin.n.01', 'synonyms': ['muffin'], 'def': 'a sweet quick bread baked in a cup-shaped pan', 'name': 'muffin'}, {'frequency': 'f', 'id': 720, 'synset': 'mug.n.04', 'synonyms': ['mug'], 'def': 'with handle and usually cylindrical', 'name': 'mug'}, {'frequency': 'f', 'id': 721, 'synset': 'mushroom.n.02', 'synonyms': ['mushroom'], 'def': 'a common mushroom', 'name': 'mushroom'}, {'frequency': 'r', 'id': 722, 'synset': 'music_stool.n.01', 'synonyms': ['music_stool', 'piano_stool'], 'def': 'a stool for piano players; usually adjustable in height', 'name': 'music_stool'}, {'frequency': 'r', 'id': 723, 'synset': 'musical_instrument.n.01', 'synonyms': ['musical_instrument', 'instrument_(musical)'], 'def': 'any of various devices or contrivances that can be used to produce musical tones or sounds', 'name': 'musical_instrument'}, {'frequency': 'r', 'id': 724, 'synset': 'nailfile.n.01', 'synonyms': ['nailfile'], 'def': 'a small flat file for shaping the nails', 'name': 'nailfile'}, {'frequency': 'r', 'id': 725, 'synset': 'nameplate.n.01', 'synonyms': ['nameplate'], 'def': 'a plate bearing a name', 'name': 'nameplate'}, {'frequency': 'f', 'id': 726, 'synset': 'napkin.n.01', 'synonyms': ['napkin', 'table_napkin', 'serviette'], 'def': 'a small piece of table linen or paper that is used to wipe the mouth and to cover the lap in order to protect clothing', 'name': 'napkin'}, {'frequency': 'r', 'id': 727, 'synset': 'neckerchief.n.01', 'synonyms': ['neckerchief'], 'def': 'a kerchief worn around the neck', 'name': 'neckerchief'}, {'frequency': 'f', 'id': 728, 'synset': 'necklace.n.01', 'synonyms': ['necklace'], 'def': 'jewelry consisting of a cord or chain (often bearing gems) worn about the neck as an ornament', 'name': 'necklace'}, {'frequency': 'f', 'id': 729, 'synset': 'necktie.n.01', 'synonyms': ['necktie', 'tie_(necktie)'], 'def': 'neckwear consisting of a long narrow piece of material worn under a collar and tied in knot at the front', 'name': 'necktie'}, {'frequency': 'r', 'id': 730, 'synset': 'needle.n.03', 'synonyms': ['needle'], 'def': 'a sharp pointed implement (usually metal)', 'name': 'needle'}, {'frequency': 'c', 'id': 731, 'synset': 'nest.n.01', 'synonyms': ['nest'], 'def': 'a structure in which animals lay eggs or give birth to their young', 'name': 'nest'}, {'frequency': 'r', 'id': 732, 'synset': 'newsstand.n.01', 'synonyms': ['newsstand'], 'def': 'a stall where newspapers and other periodicals are sold', 'name': 'newsstand'}, {'frequency': 'c', 'id': 733, 'synset': 'nightwear.n.01', 'synonyms': ['nightshirt', 'nightwear', 'sleepwear', 'nightclothes'], 'def': 'garments designed to be worn in bed', 'name': 'nightshirt'}, {'frequency': 'r', 'id': 734, 'synset': 'nosebag.n.01', 'synonyms': ['nosebag_(for_animals)', 'feedbag'], 'def': 'a canvas bag that is used to feed an animal (such as a horse); covers the muzzle and fastens at the top of the head', 'name': 'nosebag_(for_animals)'}, {'frequency': 'r', 'id': 735, 'synset': 'noseband.n.01', 'synonyms': ['noseband_(for_animals)', 'nosepiece_(for_animals)'], 'def': "a strap that is the part of a bridle that goes over the animal's nose", 'name': 'noseband_(for_animals)'}, {'frequency': 'f', 'id': 736, 'synset': 'notebook.n.01', 'synonyms': ['notebook'], 'def': 'a book with blank pages for recording notes or memoranda', 'name': 'notebook'}, {'frequency': 'c', 'id': 737, 'synset': 'notepad.n.01', 'synonyms': ['notepad'], 'def': 'a pad of paper for keeping notes', 'name': 'notepad'}, {'frequency': 'c', 'id': 738, 'synset': 'nut.n.03', 'synonyms': ['nut'], 'def': 'a small metal block (usually square or hexagonal) with internal screw thread to be fitted onto a bolt', 'name': 'nut'}, {'frequency': 'r', 'id': 739, 'synset': 'nutcracker.n.01', 'synonyms': ['nutcracker'], 'def': 'a hand tool used to crack nuts open', 'name': 'nutcracker'}, {'frequency': 'c', 'id': 740, 'synset': 'oar.n.01', 'synonyms': ['oar'], 'def': 'an implement used to propel or steer a boat', 'name': 'oar'}, {'frequency': 'r', 'id': 741, 'synset': 'octopus.n.01', 'synonyms': ['octopus_(food)'], 'def': 'tentacles of octopus prepared as food', 'name': 'octopus_(food)'}, {'frequency': 'r', 'id': 742, 'synset': 'octopus.n.02', 'synonyms': ['octopus_(animal)'], 'def': 'bottom-living cephalopod having a soft oval body with eight long tentacles', 'name': 'octopus_(animal)'}, {'frequency': 'c', 'id': 743, 'synset': 'oil_lamp.n.01', 'synonyms': ['oil_lamp', 'kerosene_lamp', 'kerosine_lamp'], 'def': 'a lamp that burns oil (as kerosine) for light', 'name': 'oil_lamp'}, {'frequency': 'c', 'id': 744, 'synset': 'olive_oil.n.01', 'synonyms': ['olive_oil'], 'def': 'oil from olives', 'name': 'olive_oil'}, {'frequency': 'r', 'id': 745, 'synset': 'omelet.n.01', 'synonyms': ['omelet', 'omelette'], 'def': 'beaten eggs cooked until just set; may be folded around e.g. ham or cheese or jelly', 'name': 'omelet'}, {'frequency': 'f', 'id': 746, 'synset': 'onion.n.01', 'synonyms': ['onion'], 'def': 'the bulb of an onion plant', 'name': 'onion'}, {'frequency': 'f', 'id': 747, 'synset': 'orange.n.01', 'synonyms': ['orange_(fruit)'], 'def': 'orange (FRUIT of an orange tree)', 'name': 'orange_(fruit)'}, {'frequency': 'c', 'id': 748, 'synset': 'orange_juice.n.01', 'synonyms': ['orange_juice'], 'def': 'bottled or freshly squeezed juice of oranges', 'name': 'orange_juice'}, {'frequency': 'r', 'id': 749, 'synset': 'oregano.n.01', 'synonyms': ['oregano', 'marjoram'], 'def': 'aromatic Eurasian perennial herb used in cooking and baking', 'name': 'oregano'}, {'frequency': 'c', 'id': 750, 'synset': 'ostrich.n.02', 'synonyms': ['ostrich'], 'def': 'fast-running African flightless bird with two-toed feet; largest living bird', 'name': 'ostrich'}, {'frequency': 'c', 'id': 751, 'synset': 'ottoman.n.03', 'synonyms': ['ottoman', 'pouf', 'pouffe', 'hassock'], 'def': 'thick cushion used as a seat', 'name': 'ottoman'}, {'frequency': 'c', 'id': 752, 'synset': 'overall.n.01', 'synonyms': ['overalls_(clothing)'], 'def': 'work clothing consisting of denim trousers usually with a bib and shoulder straps', 'name': 'overalls_(clothing)'}, {'frequency': 'c', 'id': 753, 'synset': 'owl.n.01', 'synonyms': ['owl'], 'def': 'nocturnal bird of prey with hawk-like beak and claws and large head with front-facing eyes', 'name': 'owl'}, {'frequency': 'c', 'id': 754, 'synset': 'packet.n.03', 'synonyms': ['packet'], 'def': 'a small package or bundle', 'name': 'packet'}, {'frequency': 'r', 'id': 755, 'synset': 'pad.n.03', 'synonyms': ['inkpad', 'inking_pad', 'stamp_pad'], 'def': 'absorbent material saturated with ink used to transfer ink evenly to a rubber stamp', 'name': 'inkpad'}, {'frequency': 'c', 'id': 756, 'synset': 'pad.n.04', 'synonyms': ['pad'], 'def': 'a flat mass of soft material used for protection, stuffing, or comfort', 'name': 'pad'}, {'frequency': 'c', 'id': 757, 'synset': 'paddle.n.04', 'synonyms': ['paddle', 'boat_paddle'], 'def': 'a short light oar used without an oarlock to propel a canoe or small boat', 'name': 'paddle'}, {'frequency': 'c', 'id': 758, 'synset': 'padlock.n.01', 'synonyms': ['padlock'], 'def': 'a detachable, portable lock', 'name': 'padlock'}, {'frequency': 'r', 'id': 759, 'synset': 'paintbox.n.01', 'synonyms': ['paintbox'], 'def': "a box containing a collection of cubes or tubes of artists' paint", 'name': 'paintbox'}, {'frequency': 'c', 'id': 760, 'synset': 'paintbrush.n.01', 'synonyms': ['paintbrush'], 'def': 'a brush used as an applicator to apply paint', 'name': 'paintbrush'}, {'frequency': 'f', 'id': 761, 'synset': 'painting.n.01', 'synonyms': ['painting'], 'def': 'graphic art consisting of an artistic composition made by applying paints to a surface', 'name': 'painting'}, {'frequency': 'c', 'id': 762, 'synset': 'pajama.n.02', 'synonyms': ['pajamas', 'pyjamas'], 'def': 'loose-fitting nightclothes worn for sleeping or lounging', 'name': 'pajamas'}, {'frequency': 'c', 'id': 763, 'synset': 'palette.n.02', 'synonyms': ['palette', 'pallet'], 'def': 'board that provides a flat surface on which artists mix paints and the range of colors used', 'name': 'palette'}, {'frequency': 'f', 'id': 764, 'synset': 'pan.n.01', 'synonyms': ['pan_(for_cooking)', 'cooking_pan'], 'def': 'cooking utensil consisting of a wide metal vessel', 'name': 'pan_(for_cooking)'}, {'frequency': 'r', 'id': 765, 'synset': 'pan.n.03', 'synonyms': ['pan_(metal_container)'], 'def': 'shallow container made of metal', 'name': 'pan_(metal_container)'}, {'frequency': 'c', 'id': 766, 'synset': 'pancake.n.01', 'synonyms': ['pancake'], 'def': 'a flat cake of thin batter fried on both sides on a griddle', 'name': 'pancake'}, {'frequency': 'r', 'id': 767, 'synset': 'pantyhose.n.01', 'synonyms': ['pantyhose'], 'def': "a woman's tights consisting of underpants and stockings", 'name': 'pantyhose'}, {'frequency': 'r', 'id': 768, 'synset': 'papaya.n.02', 'synonyms': ['papaya'], 'def': 'large oval melon-like tropical fruit with yellowish flesh', 'name': 'papaya'}, {'frequency': 'r', 'id': 769, 'synset': 'paper_clip.n.01', 'synonyms': ['paperclip'], 'def': 'a wire or plastic clip for holding sheets of paper together', 'name': 'paperclip'}, {'frequency': 'f', 'id': 770, 'synset': 'paper_plate.n.01', 'synonyms': ['paper_plate'], 'def': 'a disposable plate made of cardboard', 'name': 'paper_plate'}, {'frequency': 'f', 'id': 771, 'synset': 'paper_towel.n.01', 'synonyms': ['paper_towel'], 'def': 'a disposable towel made of absorbent paper', 'name': 'paper_towel'}, {'frequency': 'r', 'id': 772, 'synset': 'paperback_book.n.01', 'synonyms': ['paperback_book', 'paper-back_book', 'softback_book', 'soft-cover_book'], 'def': 'a book with paper covers', 'name': 'paperback_book'}, {'frequency': 'r', 'id': 773, 'synset': 'paperweight.n.01', 'synonyms': ['paperweight'], 'def': 'a weight used to hold down a stack of papers', 'name': 'paperweight'}, {'frequency': 'c', 'id': 774, 'synset': 'parachute.n.01', 'synonyms': ['parachute'], 'def': 'rescue equipment consisting of a device that fills with air and retards your fall', 'name': 'parachute'}, {'frequency': 'r', 'id': 775, 'synset': 'parakeet.n.01', 'synonyms': ['parakeet', 'parrakeet', 'parroket', 'paraquet', 'paroquet', 'parroquet'], 'def': 'any of numerous small slender long-tailed parrots', 'name': 'parakeet'}, {'frequency': 'c', 'id': 776, 'synset': 'parasail.n.01', 'synonyms': ['parasail_(sports)'], 'def': 'parachute that will lift a person up into the air when it is towed by a motorboat or a car', 'name': 'parasail_(sports)'}, {'frequency': 'r', 'id': 777, 'synset': 'parchment.n.01', 'synonyms': ['parchment'], 'def': 'a superior paper resembling sheepskin', 'name': 'parchment'}, {'frequency': 'r', 'id': 778, 'synset': 'parka.n.01', 'synonyms': ['parka', 'anorak'], 'def': "a kind of heavy jacket (`windcheater' is a British term)", 'name': 'parka'}, {'frequency': 'f', 'id': 779, 'synset': 'parking_meter.n.01', 'synonyms': ['parking_meter'], 'def': 'a coin-operated timer located next to a parking space', 'name': 'parking_meter'}, {'frequency': 'c', 'id': 780, 'synset': 'parrot.n.01', 'synonyms': ['parrot'], 'def': 'usually brightly colored tropical birds with short hooked beaks and the ability to mimic sounds', 'name': 'parrot'}, {'frequency': 'c', 'id': 781, 'synset': 'passenger_car.n.01', 'synonyms': ['passenger_car_(part_of_a_train)', 'coach_(part_of_a_train)'], 'def': 'a railcar where passengers ride', 'name': 'passenger_car_(part_of_a_train)'}, {'frequency': 'r', 'id': 782, 'synset': 'passenger_ship.n.01', 'synonyms': ['passenger_ship'], 'def': 'a ship built to carry passengers', 'name': 'passenger_ship'}, {'frequency': 'r', 'id': 783, 'synset': 'passport.n.02', 'synonyms': ['passport'], 'def': 'a document issued by a country to a citizen allowing that person to travel abroad and re-enter the home country', 'name': 'passport'}, {'frequency': 'f', 'id': 784, 'synset': 'pastry.n.02', 'synonyms': ['pastry'], 'def': 'any of various baked foods made of dough or batter', 'name': 'pastry'}, {'frequency': 'r', 'id': 785, 'synset': 'patty.n.01', 'synonyms': ['patty_(food)'], 'def': 'small flat mass of chopped food', 'name': 'patty_(food)'}, {'frequency': 'c', 'id': 786, 'synset': 'pea.n.01', 'synonyms': ['pea_(food)'], 'def': 'seed of a pea plant used for food', 'name': 'pea_(food)'}, {'frequency': 'c', 'id': 787, 'synset': 'peach.n.03', 'synonyms': ['peach'], 'def': 'downy juicy fruit with sweet yellowish or whitish flesh', 'name': 'peach'}, {'frequency': 'c', 'id': 788, 'synset': 'peanut_butter.n.01', 'synonyms': ['peanut_butter'], 'def': 'a spread made from ground peanuts', 'name': 'peanut_butter'}, {'frequency': 'c', 'id': 789, 'synset': 'pear.n.01', 'synonyms': ['pear'], 'def': 'sweet juicy gritty-textured fruit available in many varieties', 'name': 'pear'}, {'frequency': 'r', 'id': 790, 'synset': 'peeler.n.03', 'synonyms': ['peeler_(tool_for_fruit_and_vegetables)'], 'def': 'a device for peeling vegetables or fruits', 'name': 'peeler_(tool_for_fruit_and_vegetables)'}, {'frequency': 'r', 'id': 791, 'synset': 'pegboard.n.01', 'synonyms': ['pegboard'], 'def': 'a board perforated with regularly spaced holes into which pegs can be fitted', 'name': 'pegboard'}, {'frequency': 'c', 'id': 792, 'synset': 'pelican.n.01', 'synonyms': ['pelican'], 'def': 'large long-winged warm-water seabird having a large bill with a distensible pouch for fish', 'name': 'pelican'}, {'frequency': 'f', 'id': 793, 'synset': 'pen.n.01', 'synonyms': ['pen'], 'def': 'a writing implement with a point from which ink flows', 'name': 'pen'}, {'frequency': 'c', 'id': 794, 'synset': 'pencil.n.01', 'synonyms': ['pencil'], 'def': 'a thin cylindrical pointed writing implement made of wood and graphite', 'name': 'pencil'}, {'frequency': 'r', 'id': 795, 'synset': 'pencil_box.n.01', 'synonyms': ['pencil_box', 'pencil_case'], 'def': 'a box for holding pencils', 'name': 'pencil_box'}, {'frequency': 'r', 'id': 796, 'synset': 'pencil_sharpener.n.01', 'synonyms': ['pencil_sharpener'], 'def': 'a rotary implement for sharpening the point on pencils', 'name': 'pencil_sharpener'}, {'frequency': 'r', 'id': 797, 'synset': 'pendulum.n.01', 'synonyms': ['pendulum'], 'def': 'an apparatus consisting of an object mounted so that it swings freely under the influence of gravity', 'name': 'pendulum'}, {'frequency': 'c', 'id': 798, 'synset': 'penguin.n.01', 'synonyms': ['penguin'], 'def': 'short-legged flightless birds of cold southern regions having webbed feet and wings modified as flippers', 'name': 'penguin'}, {'frequency': 'r', 'id': 799, 'synset': 'pennant.n.02', 'synonyms': ['pennant'], 'def': 'a flag longer than it is wide (and often tapering)', 'name': 'pennant'}, {'frequency': 'r', 'id': 800, 'synset': 'penny.n.02', 'synonyms': ['penny_(coin)'], 'def': 'a coin worth one-hundredth of the value of the basic unit', 'name': 'penny_(coin)'}, {'frequency': 'c', 'id': 801, 'synset': 'pepper.n.03', 'synonyms': ['pepper', 'peppercorn'], 'def': 'pungent seasoning from the berry of the common pepper plant; whole or ground', 'name': 'pepper'}, {'frequency': 'c', 'id': 802, 'synset': 'pepper_mill.n.01', 'synonyms': ['pepper_mill', 'pepper_grinder'], 'def': 'a mill for grinding pepper', 'name': 'pepper_mill'}, {'frequency': 'c', 'id': 803, 'synset': 'perfume.n.02', 'synonyms': ['perfume'], 'def': 'a toiletry that emits and diffuses a fragrant odor', 'name': 'perfume'}, {'frequency': 'r', 'id': 804, 'synset': 'persimmon.n.02', 'synonyms': ['persimmon'], 'def': 'orange fruit resembling a plum; edible when fully ripe', 'name': 'persimmon'}, {'frequency': 'f', 'id': 805, 'synset': 'person.n.01', 'synonyms': ['baby', 'child', 'boy', 'girl', 'man', 'woman', 'person', 'human'], 'def': 'a human being', 'name': 'baby'}, {'frequency': 'r', 'id': 806, 'synset': 'pet.n.01', 'synonyms': ['pet'], 'def': 'a domesticated animal kept for companionship or amusement', 'name': 'pet'}, {'frequency': 'r', 'id': 807, 'synset': 'petfood.n.01', 'synonyms': ['petfood', 'pet-food'], 'def': 'food prepared for animal pets', 'name': 'petfood'}, {'frequency': 'r', 'id': 808, 'synset': 'pew.n.01', 'synonyms': ['pew_(church_bench)', 'church_bench'], 'def': 'long bench with backs; used in church by the congregation', 'name': 'pew_(church_bench)'}, {'frequency': 'r', 'id': 809, 'synset': 'phonebook.n.01', 'synonyms': ['phonebook', 'telephone_book', 'telephone_directory'], 'def': 'a directory containing an alphabetical list of telephone subscribers and their telephone numbers', 'name': 'phonebook'}, {'frequency': 'c', 'id': 810, 'synset': 'phonograph_record.n.01', 'synonyms': ['phonograph_record', 'phonograph_recording', 'record_(phonograph_recording)'], 'def': 'sound recording consisting of a typically black disk with a continuous groove', 'name': 'phonograph_record'}, {'frequency': 'c', 'id': 811, 'synset': 'piano.n.01', 'synonyms': ['piano'], 'def': 'a keyboard instrument that is played by depressing keys that cause hammers to strike tuned strings and produce sounds', 'name': 'piano'}, {'frequency': 'f', 'id': 812, 'synset': 'pickle.n.01', 'synonyms': ['pickle'], 'def': 'vegetables (especially cucumbers) preserved in brine or vinegar', 'name': 'pickle'}, {'frequency': 'f', 'id': 813, 'synset': 'pickup.n.01', 'synonyms': ['pickup_truck'], 'def': 'a light truck with an open body and low sides and a tailboard', 'name': 'pickup_truck'}, {'frequency': 'c', 'id': 814, 'synset': 'pie.n.01', 'synonyms': ['pie'], 'def': 'dish baked in pastry-lined pan often with a pastry top', 'name': 'pie'}, {'frequency': 'c', 'id': 815, 'synset': 'pigeon.n.01', 'synonyms': ['pigeon'], 'def': 'wild and domesticated birds having a heavy body and short legs', 'name': 'pigeon'}, {'frequency': 'r', 'id': 816, 'synset': 'piggy_bank.n.01', 'synonyms': ['piggy_bank', 'penny_bank'], 'def': "a child's coin bank (often shaped like a pig)", 'name': 'piggy_bank'}, {'frequency': 'f', 'id': 817, 'synset': 'pillow.n.01', 'synonyms': ['pillow'], 'def': 'a cushion to support the head of a sleeping person', 'name': 'pillow'}, {'frequency': 'r', 'id': 818, 'synset': 'pin.n.09', 'synonyms': ['pin_(non_jewelry)'], 'def': 'a small slender (often pointed) piece of wood or metal used to support or fasten or attach things', 'name': 'pin_(non_jewelry)'}, {'frequency': 'f', 'id': 819, 'synset': 'pineapple.n.02', 'synonyms': ['pineapple'], 'def': 'large sweet fleshy tropical fruit with a tuft of stiff leaves', 'name': 'pineapple'}, {'frequency': 'c', 'id': 820, 'synset': 'pinecone.n.01', 'synonyms': ['pinecone'], 'def': 'the seed-producing cone of a pine tree', 'name': 'pinecone'}, {'frequency': 'r', 'id': 821, 'synset': 'ping-pong_ball.n.01', 'synonyms': ['ping-pong_ball'], 'def': 'light hollow ball used in playing table tennis', 'name': 'ping-pong_ball'}, {'frequency': 'r', 'id': 822, 'synset': 'pinwheel.n.03', 'synonyms': ['pinwheel'], 'def': 'a toy consisting of vanes of colored paper or plastic that is pinned to a stick and spins when it is pointed into the wind', 'name': 'pinwheel'}, {'frequency': 'r', 'id': 823, 'synset': 'pipe.n.01', 'synonyms': ['tobacco_pipe'], 'def': 'a tube with a small bowl at one end; used for smoking tobacco', 'name': 'tobacco_pipe'}, {'frequency': 'f', 'id': 824, 'synset': 'pipe.n.02', 'synonyms': ['pipe', 'piping'], 'def': 'a long tube made of metal or plastic that is used to carry water or oil or gas etc.', 'name': 'pipe'}, {'frequency': 'r', 'id': 825, 'synset': 'pistol.n.01', 'synonyms': ['pistol', 'handgun'], 'def': 'a firearm that is held and fired with one hand', 'name': 'pistol'}, {'frequency': 'r', 'id': 826, 'synset': 'pita.n.01', 'synonyms': ['pita_(bread)', 'pocket_bread'], 'def': 'usually small round bread that can open into a pocket for filling', 'name': 'pita_(bread)'}, {'frequency': 'f', 'id': 827, 'synset': 'pitcher.n.02', 'synonyms': ['pitcher_(vessel_for_liquid)', 'ewer'], 'def': 'an open vessel with a handle and a spout for pouring', 'name': 'pitcher_(vessel_for_liquid)'}, {'frequency': 'r', 'id': 828, 'synset': 'pitchfork.n.01', 'synonyms': ['pitchfork'], 'def': 'a long-handled hand tool with sharp widely spaced prongs for lifting and pitching hay', 'name': 'pitchfork'}, {'frequency': 'f', 'id': 829, 'synset': 'pizza.n.01', 'synonyms': ['pizza'], 'def': 'Italian open pie made of thin bread dough spread with a spiced mixture of e.g. tomato sauce and cheese', 'name': 'pizza'}, {'frequency': 'f', 'id': 830, 'synset': 'place_mat.n.01', 'synonyms': ['place_mat'], 'def': 'a mat placed on a table for an individual place setting', 'name': 'place_mat'}, {'frequency': 'f', 'id': 831, 'synset': 'plate.n.04', 'synonyms': ['plate'], 'def': 'dish on which food is served or from which food is eaten', 'name': 'plate'}, {'frequency': 'c', 'id': 832, 'synset': 'platter.n.01', 'synonyms': ['platter'], 'def': 'a large shallow dish used for serving food', 'name': 'platter'}, {'frequency': 'r', 'id': 833, 'synset': 'playing_card.n.01', 'synonyms': ['playing_card'], 'def': 'one of a pack of cards that are used to play card games', 'name': 'playing_card'}, {'frequency': 'r', 'id': 834, 'synset': 'playpen.n.01', 'synonyms': ['playpen'], 'def': 'a portable enclosure in which babies may be left to play', 'name': 'playpen'}, {'frequency': 'c', 'id': 835, 'synset': 'pliers.n.01', 'synonyms': ['pliers', 'plyers'], 'def': 'a gripping hand tool with two hinged arms and (usually) serrated jaws', 'name': 'pliers'}, {'frequency': 'r', 'id': 836, 'synset': 'plow.n.01', 'synonyms': ['plow_(farm_equipment)', 'plough_(farm_equipment)'], 'def': 'a farm tool having one or more heavy blades to break the soil and cut a furrow prior to sowing', 'name': 'plow_(farm_equipment)'}, {'frequency': 'r', 'id': 837, 'synset': 'pocket_watch.n.01', 'synonyms': ['pocket_watch'], 'def': 'a watch that is carried in a small watch pocket', 'name': 'pocket_watch'}, {'frequency': 'c', 'id': 838, 'synset': 'pocketknife.n.01', 'synonyms': ['pocketknife'], 'def': 'a knife with a blade that folds into the handle; suitable for carrying in the pocket', 'name': 'pocketknife'}, {'frequency': 'c', 'id': 839, 'synset': 'poker.n.01', 'synonyms': ['poker_(fire_stirring_tool)', 'stove_poker', 'fire_hook'], 'def': 'fire iron consisting of a metal rod with a handle; used to stir a fire', 'name': 'poker_(fire_stirring_tool)'}, {'frequency': 'f', 'id': 840, 'synset': 'pole.n.01', 'synonyms': ['pole', 'post'], 'def': 'a long (usually round) rod of wood or metal or plastic', 'name': 'pole'}, {'frequency': 'r', 'id': 841, 'synset': 'police_van.n.01', 'synonyms': ['police_van', 'police_wagon', 'paddy_wagon', 'patrol_wagon'], 'def': 'van used by police to transport prisoners', 'name': 'police_van'}, {'frequency': 'f', 'id': 842, 'synset': 'polo_shirt.n.01', 'synonyms': ['polo_shirt', 'sport_shirt'], 'def': 'a shirt with short sleeves designed for comfort and casual wear', 'name': 'polo_shirt'}, {'frequency': 'r', 'id': 843, 'synset': 'poncho.n.01', 'synonyms': ['poncho'], 'def': 'a blanket-like cloak with a hole in the center for the head', 'name': 'poncho'}, {'frequency': 'c', 'id': 844, 'synset': 'pony.n.05', 'synonyms': ['pony'], 'def': 'any of various breeds of small gentle horses usually less than five feet high at the shoulder', 'name': 'pony'}, {'frequency': 'r', 'id': 845, 'synset': 'pool_table.n.01', 'synonyms': ['pool_table', 'billiard_table', 'snooker_table'], 'def': 'game equipment consisting of a heavy table on which pool is played', 'name': 'pool_table'}, {'frequency': 'f', 'id': 846, 'synset': 'pop.n.02', 'synonyms': ['pop_(soda)', 'soda_(pop)', 'tonic', 'soft_drink'], 'def': 'a sweet drink containing carbonated water and flavoring', 'name': 'pop_(soda)'}, {'frequency': 'r', 'id': 847, 'synset': 'portrait.n.02', 'synonyms': ['portrait', 'portrayal'], 'def': 'any likeness of a person, in any medium', 'name': 'portrait'}, {'frequency': 'c', 'id': 848, 'synset': 'postbox.n.01', 'synonyms': ['postbox_(public)', 'mailbox_(public)'], 'def': 'public box for deposit of mail', 'name': 'postbox_(public)'}, {'frequency': 'c', 'id': 849, 'synset': 'postcard.n.01', 'synonyms': ['postcard', 'postal_card', 'mailing-card'], 'def': 'a card for sending messages by post without an envelope', 'name': 'postcard'}, {'frequency': 'f', 'id': 850, 'synset': 'poster.n.01', 'synonyms': ['poster', 'placard'], 'def': 'a sign posted in a public place as an advertisement', 'name': 'poster'}, {'frequency': 'f', 'id': 851, 'synset': 'pot.n.01', 'synonyms': ['pot'], 'def': 'metal or earthenware cooking vessel that is usually round and deep; often has a handle and lid', 'name': 'pot'}, {'frequency': 'f', 'id': 852, 'synset': 'pot.n.04', 'synonyms': ['flowerpot'], 'def': 'a container in which plants are cultivated', 'name': 'flowerpot'}, {'frequency': 'f', 'id': 853, 'synset': 'potato.n.01', 'synonyms': ['potato'], 'def': 'an edible tuber native to South America', 'name': 'potato'}, {'frequency': 'c', 'id': 854, 'synset': 'potholder.n.01', 'synonyms': ['potholder'], 'def': 'an insulated pad for holding hot pots', 'name': 'potholder'}, {'frequency': 'c', 'id': 855, 'synset': 'pottery.n.01', 'synonyms': ['pottery', 'clayware'], 'def': 'ceramic ware made from clay and baked in a kiln', 'name': 'pottery'}, {'frequency': 'c', 'id': 856, 'synset': 'pouch.n.01', 'synonyms': ['pouch'], 'def': 'a small or medium size container for holding or carrying things', 'name': 'pouch'}, {'frequency': 'r', 'id': 857, 'synset': 'power_shovel.n.01', 'synonyms': ['power_shovel', 'excavator', 'digger'], 'def': 'a machine for excavating', 'name': 'power_shovel'}, {'frequency': 'c', 'id': 858, 'synset': 'prawn.n.01', 'synonyms': ['prawn', 'shrimp'], 'def': 'any of various edible decapod crustaceans', 'name': 'prawn'}, {'frequency': 'f', 'id': 859, 'synset': 'printer.n.03', 'synonyms': ['printer', 'printing_machine'], 'def': 'a machine that prints', 'name': 'printer'}, {'frequency': 'c', 'id': 860, 'synset': 'projectile.n.01', 'synonyms': ['projectile_(weapon)', 'missile'], 'def': 'a weapon that is forcibly thrown or projected at a targets', 'name': 'projectile_(weapon)'}, {'frequency': 'c', 'id': 861, 'synset': 'projector.n.02', 'synonyms': ['projector'], 'def': 'an optical instrument that projects an enlarged image onto a screen', 'name': 'projector'}, {'frequency': 'f', 'id': 862, 'synset': 'propeller.n.01', 'synonyms': ['propeller', 'propellor'], 'def': 'a mechanical device that rotates to push against air or water', 'name': 'propeller'}, {'frequency': 'r', 'id': 863, 'synset': 'prune.n.01', 'synonyms': ['prune'], 'def': 'dried plum', 'name': 'prune'}, {'frequency': 'r', 'id': 864, 'synset': 'pudding.n.01', 'synonyms': ['pudding'], 'def': 'any of various soft thick unsweetened baked dishes', 'name': 'pudding'}, {'frequency': 'r', 'id': 865, 'synset': 'puffer.n.02', 'synonyms': ['puffer_(fish)', 'pufferfish', 'blowfish', 'globefish'], 'def': 'fishes whose elongated spiny body can inflate itself with water or air to form a globe', 'name': 'puffer_(fish)'}, {'frequency': 'r', 'id': 866, 'synset': 'puffin.n.01', 'synonyms': ['puffin'], 'def': 'seabirds having short necks and brightly colored compressed bills', 'name': 'puffin'}, {'frequency': 'r', 'id': 867, 'synset': 'pug.n.01', 'synonyms': ['pug-dog'], 'def': 'small compact smooth-coated breed of Asiatic origin having a tightly curled tail and broad flat wrinkled muzzle', 'name': 'pug-dog'}, {'frequency': 'c', 'id': 868, 'synset': 'pumpkin.n.02', 'synonyms': ['pumpkin'], 'def': 'usually large pulpy deep-yellow round fruit of the squash family maturing in late summer or early autumn', 'name': 'pumpkin'}, {'frequency': 'r', 'id': 869, 'synset': 'punch.n.03', 'synonyms': ['puncher'], 'def': 'a tool for making holes or indentations', 'name': 'puncher'}, {'frequency': 'r', 'id': 870, 'synset': 'puppet.n.01', 'synonyms': ['puppet', 'marionette'], 'def': 'a small figure of a person operated from above with strings by a puppeteer', 'name': 'puppet'}, {'frequency': 'r', 'id': 871, 'synset': 'puppy.n.01', 'synonyms': ['puppy'], 'def': 'a young dog', 'name': 'puppy'}, {'frequency': 'r', 'id': 872, 'synset': 'quesadilla.n.01', 'synonyms': ['quesadilla'], 'def': 'a tortilla that is filled with cheese and heated', 'name': 'quesadilla'}, {'frequency': 'r', 'id': 873, 'synset': 'quiche.n.02', 'synonyms': ['quiche'], 'def': 'a tart filled with rich unsweetened custard; often contains other ingredients (as cheese or ham or seafood or vegetables)', 'name': 'quiche'}, {'frequency': 'f', 'id': 874, 'synset': 'quilt.n.01', 'synonyms': ['quilt', 'comforter'], 'def': 'bedding made of two layers of cloth filled with stuffing and stitched together', 'name': 'quilt'}, {'frequency': 'c', 'id': 875, 'synset': 'rabbit.n.01', 'synonyms': ['rabbit'], 'def': 'any of various burrowing animals of the family Leporidae having long ears and short tails', 'name': 'rabbit'}, {'frequency': 'r', 'id': 876, 'synset': 'racer.n.02', 'synonyms': ['race_car', 'racing_car'], 'def': 'a fast car that competes in races', 'name': 'race_car'}, {'frequency': 'c', 'id': 877, 'synset': 'racket.n.04', 'synonyms': ['racket', 'racquet'], 'def': 'a sports implement used to strike a ball in various games', 'name': 'racket'}, {'frequency': 'r', 'id': 878, 'synset': 'radar.n.01', 'synonyms': ['radar'], 'def': 'measuring instrument in which the echo of a pulse of microwave radiation is used to detect and locate distant objects', 'name': 'radar'}, {'frequency': 'c', 'id': 879, 'synset': 'radiator.n.03', 'synonyms': ['radiator'], 'def': 'a mechanism consisting of a metal honeycomb through which hot fluids circulate', 'name': 'radiator'}, {'frequency': 'c', 'id': 880, 'synset': 'radio_receiver.n.01', 'synonyms': ['radio_receiver', 'radio_set', 'radio', 'tuner_(radio)'], 'def': 'an electronic receiver that detects and demodulates and amplifies transmitted radio signals', 'name': 'radio_receiver'}, {'frequency': 'c', 'id': 881, 'synset': 'radish.n.03', 'synonyms': ['radish', 'daikon'], 'def': 'pungent edible root of any of various cultivated radish plants', 'name': 'radish'}, {'frequency': 'c', 'id': 882, 'synset': 'raft.n.01', 'synonyms': ['raft'], 'def': 'a flat float (usually made of logs or planks) that can be used for transport or as a platform for swimmers', 'name': 'raft'}, {'frequency': 'r', 'id': 883, 'synset': 'rag_doll.n.01', 'synonyms': ['rag_doll'], 'def': 'a cloth doll that is stuffed and (usually) painted', 'name': 'rag_doll'}, {'frequency': 'c', 'id': 884, 'synset': 'raincoat.n.01', 'synonyms': ['raincoat', 'waterproof_jacket'], 'def': 'a water-resistant coat', 'name': 'raincoat'}, {'frequency': 'c', 'id': 885, 'synset': 'ram.n.05', 'synonyms': ['ram_(animal)'], 'def': 'uncastrated adult male sheep', 'name': 'ram_(animal)'}, {'frequency': 'c', 'id': 886, 'synset': 'raspberry.n.02', 'synonyms': ['raspberry'], 'def': 'red or black edible aggregate berries usually smaller than the related blackberries', 'name': 'raspberry'}, {'frequency': 'r', 'id': 887, 'synset': 'rat.n.01', 'synonyms': ['rat'], 'def': 'any of various long-tailed rodents similar to but larger than a mouse', 'name': 'rat'}, {'frequency': 'c', 'id': 888, 'synset': 'razorblade.n.01', 'synonyms': ['razorblade'], 'def': 'a blade that has very sharp edge', 'name': 'razorblade'}, {'frequency': 'c', 'id': 889, 'synset': 'reamer.n.01', 'synonyms': ['reamer_(juicer)', 'juicer', 'juice_reamer'], 'def': 'a squeezer with a conical ridged center that is used for squeezing juice from citrus fruit', 'name': 'reamer_(juicer)'}, {'frequency': 'f', 'id': 890, 'synset': 'rearview_mirror.n.01', 'synonyms': ['rearview_mirror'], 'def': 'car mirror that reflects the view out of the rear window', 'name': 'rearview_mirror'}, {'frequency': 'c', 'id': 891, 'synset': 'receipt.n.02', 'synonyms': ['receipt'], 'def': 'an acknowledgment (usually tangible) that payment has been made', 'name': 'receipt'}, {'frequency': 'c', 'id': 892, 'synset': 'recliner.n.01', 'synonyms': ['recliner', 'reclining_chair', 'lounger_(chair)'], 'def': 'an armchair whose back can be lowered and foot can be raised to allow the sitter to recline in it', 'name': 'recliner'}, {'frequency': 'r', 'id': 893, 'synset': 'record_player.n.01', 'synonyms': ['record_player', 'phonograph_(record_player)', 'turntable'], 'def': 'machine in which rotating records cause a stylus to vibrate and the vibrations are amplified acoustically or electronically', 'name': 'record_player'}, {'frequency': 'r', 'id': 894, 'synset': 'red_cabbage.n.02', 'synonyms': ['red_cabbage'], 'def': 'compact head of purplish-red leaves', 'name': 'red_cabbage'}, {'frequency': 'f', 'id': 895, 'synset': 'reflector.n.01', 'synonyms': ['reflector'], 'def': 'device that reflects light, radiation, etc.', 'name': 'reflector'}, {'frequency': 'f', 'id': 896, 'synset': 'remote_control.n.01', 'synonyms': ['remote_control'], 'def': 'a device that can be used to control a machine or apparatus from a distance', 'name': 'remote_control'}, {'frequency': 'c', 'id': 897, 'synset': 'rhinoceros.n.01', 'synonyms': ['rhinoceros'], 'def': 'massive powerful herbivorous odd-toed ungulate of southeast Asia and Africa having very thick skin and one or two horns on the snout', 'name': 'rhinoceros'}, {'frequency': 'r', 'id': 898, 'synset': 'rib.n.03', 'synonyms': ['rib_(food)'], 'def': 'cut of meat including one or more ribs', 'name': 'rib_(food)'}, {'frequency': 'r', 'id': 899, 'synset': 'rifle.n.01', 'synonyms': ['rifle'], 'def': 'a shoulder firearm with a long barrel', 'name': 'rifle'}, {'frequency': 'f', 'id': 900, 'synset': 'ring.n.08', 'synonyms': ['ring'], 'def': 'jewelry consisting of a circlet of precious metal (often set with jewels) worn on the finger', 'name': 'ring'}, {'frequency': 'r', 'id': 901, 'synset': 'river_boat.n.01', 'synonyms': ['river_boat'], 'def': 'a boat used on rivers or to ply a river', 'name': 'river_boat'}, {'frequency': 'r', 'id': 902, 'synset': 'road_map.n.02', 'synonyms': ['road_map'], 'def': '(NOT A ROAD) a MAP showing roads (for automobile travel)', 'name': 'road_map'}, {'frequency': 'c', 'id': 903, 'synset': 'robe.n.01', 'synonyms': ['robe'], 'def': 'any loose flowing garment', 'name': 'robe'}, {'frequency': 'c', 'id': 904, 'synset': 'rocking_chair.n.01', 'synonyms': ['rocking_chair'], 'def': 'a chair mounted on rockers', 'name': 'rocking_chair'}, {'frequency': 'r', 'id': 905, 'synset': 'roller_skate.n.01', 'synonyms': ['roller_skate'], 'def': 'a shoe with pairs of rollers (small hard wheels) fixed to the sole', 'name': 'roller_skate'}, {'frequency': 'r', 'id': 906, 'synset': 'rollerblade.n.01', 'synonyms': ['Rollerblade'], 'def': 'an in-line variant of a roller skate', 'name': 'Rollerblade'}, {'frequency': 'c', 'id': 907, 'synset': 'rolling_pin.n.01', 'synonyms': ['rolling_pin'], 'def': 'utensil consisting of a cylinder (usually of wood) with a handle at each end; used to roll out dough', 'name': 'rolling_pin'}, {'frequency': 'r', 'id': 908, 'synset': 'root_beer.n.01', 'synonyms': ['root_beer'], 'def': 'carbonated drink containing extracts of roots and herbs', 'name': 'root_beer'}, {'frequency': 'c', 'id': 909, 'synset': 'router.n.02', 'synonyms': ['router_(computer_equipment)'], 'def': 'a device that forwards data packets between computer networks', 'name': 'router_(computer_equipment)'}, {'frequency': 'f', 'id': 910, 'synset': 'rubber_band.n.01', 'synonyms': ['rubber_band', 'elastic_band'], 'def': 'a narrow band of elastic rubber used to hold things (such as papers) together', 'name': 'rubber_band'}, {'frequency': 'c', 'id': 911, 'synset': 'runner.n.08', 'synonyms': ['runner_(carpet)'], 'def': 'a long narrow carpet', 'name': 'runner_(carpet)'}, {'frequency': 'f', 'id': 912, 'synset': 'sack.n.01', 'synonyms': ['plastic_bag', 'paper_bag'], 'def': "a bag made of paper or plastic for holding customer's purchases", 'name': 'plastic_bag'}, {'frequency': 'f', 'id': 913, 'synset': 'saddle.n.01', 'synonyms': ['saddle_(on_an_animal)'], 'def': 'a seat for the rider of a horse or camel', 'name': 'saddle_(on_an_animal)'}, {'frequency': 'f', 'id': 914, 'synset': 'saddle_blanket.n.01', 'synonyms': ['saddle_blanket', 'saddlecloth', 'horse_blanket'], 'def': 'stable gear consisting of a blanket placed under the saddle', 'name': 'saddle_blanket'}, {'frequency': 'c', 'id': 915, 'synset': 'saddlebag.n.01', 'synonyms': ['saddlebag'], 'def': 'a large bag (or pair of bags) hung over a saddle', 'name': 'saddlebag'}, {'frequency': 'r', 'id': 916, 'synset': 'safety_pin.n.01', 'synonyms': ['safety_pin'], 'def': 'a pin in the form of a clasp; has a guard so the point of the pin will not stick the user', 'name': 'safety_pin'}, {'frequency': 'c', 'id': 917, 'synset': 'sail.n.01', 'synonyms': ['sail'], 'def': 'a large piece of fabric by means of which wind is used to propel a sailing vessel', 'name': 'sail'}, {'frequency': 'c', 'id': 918, 'synset': 'salad.n.01', 'synonyms': ['salad'], 'def': 'food mixtures either arranged on a plate or tossed and served with a moist dressing; usually consisting of or including greens', 'name': 'salad'}, {'frequency': 'r', 'id': 919, 'synset': 'salad_plate.n.01', 'synonyms': ['salad_plate', 'salad_bowl'], 'def': 'a plate or bowl for individual servings of salad', 'name': 'salad_plate'}, {'frequency': 'r', 'id': 920, 'synset': 'salami.n.01', 'synonyms': ['salami'], 'def': 'highly seasoned fatty sausage of pork and beef usually dried', 'name': 'salami'}, {'frequency': 'r', 'id': 921, 'synset': 'salmon.n.01', 'synonyms': ['salmon_(fish)'], 'def': 'any of various large food and game fishes of northern waters', 'name': 'salmon_(fish)'}, {'frequency': 'r', 'id': 922, 'synset': 'salmon.n.03', 'synonyms': ['salmon_(food)'], 'def': 'flesh of any of various marine or freshwater fish of the family Salmonidae', 'name': 'salmon_(food)'}, {'frequency': 'r', 'id': 923, 'synset': 'salsa.n.01', 'synonyms': ['salsa'], 'def': 'spicy sauce of tomatoes and onions and chili peppers to accompany Mexican foods', 'name': 'salsa'}, {'frequency': 'f', 'id': 924, 'synset': 'saltshaker.n.01', 'synonyms': ['saltshaker'], 'def': 'a shaker with a perforated top for sprinkling salt', 'name': 'saltshaker'}, {'frequency': 'f', 'id': 925, 'synset': 'sandal.n.01', 'synonyms': ['sandal_(type_of_shoe)'], 'def': 'a shoe consisting of a sole fastened by straps to the foot', 'name': 'sandal_(type_of_shoe)'}, {'frequency': 'f', 'id': 926, 'synset': 'sandwich.n.01', 'synonyms': ['sandwich'], 'def': 'two (or more) slices of bread with a filling between them', 'name': 'sandwich'}, {'frequency': 'r', 'id': 927, 'synset': 'satchel.n.01', 'synonyms': ['satchel'], 'def': 'luggage consisting of a small case with a flat bottom and (usually) a shoulder strap', 'name': 'satchel'}, {'frequency': 'r', 'id': 928, 'synset': 'saucepan.n.01', 'synonyms': ['saucepan'], 'def': 'a deep pan with a handle; used for stewing or boiling', 'name': 'saucepan'}, {'frequency': 'f', 'id': 929, 'synset': 'saucer.n.02', 'synonyms': ['saucer'], 'def': 'a small shallow dish for holding a cup at the table', 'name': 'saucer'}, {'frequency': 'f', 'id': 930, 'synset': 'sausage.n.01', 'synonyms': ['sausage'], 'def': 'highly seasoned minced meat stuffed in casings', 'name': 'sausage'}, {'frequency': 'r', 'id': 931, 'synset': 'sawhorse.n.01', 'synonyms': ['sawhorse', 'sawbuck'], 'def': 'a framework for holding wood that is being sawed', 'name': 'sawhorse'}, {'frequency': 'r', 'id': 932, 'synset': 'sax.n.02', 'synonyms': ['saxophone'], 'def': "a wind instrument with a `J'-shaped form typically made of brass", 'name': 'saxophone'}, {'frequency': 'f', 'id': 933, 'synset': 'scale.n.07', 'synonyms': ['scale_(measuring_instrument)'], 'def': 'a measuring instrument for weighing; shows amount of mass', 'name': 'scale_(measuring_instrument)'}, {'frequency': 'r', 'id': 934, 'synset': 'scarecrow.n.01', 'synonyms': ['scarecrow', 'strawman'], 'def': 'an effigy in the shape of a man to frighten birds away from seeds', 'name': 'scarecrow'}, {'frequency': 'f', 'id': 935, 'synset': 'scarf.n.01', 'synonyms': ['scarf'], 'def': 'a garment worn around the head or neck or shoulders for warmth or decoration', 'name': 'scarf'}, {'frequency': 'c', 'id': 936, 'synset': 'school_bus.n.01', 'synonyms': ['school_bus'], 'def': 'a bus used to transport children to or from school', 'name': 'school_bus'}, {'frequency': 'f', 'id': 937, 'synset': 'scissors.n.01', 'synonyms': ['scissors'], 'def': 'a tool having two crossed pivoting blades with looped handles', 'name': 'scissors'}, {'frequency': 'c', 'id': 938, 'synset': 'scoreboard.n.01', 'synonyms': ['scoreboard'], 'def': 'a large board for displaying the score of a contest (and some other information)', 'name': 'scoreboard'}, {'frequency': 'c', 'id': 939, 'synset': 'scrambled_eggs.n.01', 'synonyms': ['scrambled_eggs'], 'def': 'eggs beaten and cooked to a soft firm consistency while stirring', 'name': 'scrambled_eggs'}, {'frequency': 'r', 'id': 940, 'synset': 'scraper.n.01', 'synonyms': ['scraper'], 'def': 'any of various hand tools for scraping', 'name': 'scraper'}, {'frequency': 'r', 'id': 941, 'synset': 'scratcher.n.03', 'synonyms': ['scratcher'], 'def': 'a device used for scratching', 'name': 'scratcher'}, {'frequency': 'c', 'id': 942, 'synset': 'screwdriver.n.01', 'synonyms': ['screwdriver'], 'def': 'a hand tool for driving screws; has a tip that fits into the head of a screw', 'name': 'screwdriver'}, {'frequency': 'c', 'id': 943, 'synset': 'scrub_brush.n.01', 'synonyms': ['scrubbing_brush'], 'def': 'a brush with short stiff bristles for heavy cleaning', 'name': 'scrubbing_brush'}, {'frequency': 'c', 'id': 944, 'synset': 'sculpture.n.01', 'synonyms': ['sculpture'], 'def': 'a three-dimensional work of art', 'name': 'sculpture'}, {'frequency': 'r', 'id': 945, 'synset': 'seabird.n.01', 'synonyms': ['seabird', 'seafowl'], 'def': 'a bird that frequents coastal waters and the open ocean: gulls; pelicans; gannets; cormorants; albatrosses; petrels; etc.', 'name': 'seabird'}, {'frequency': 'r', 'id': 946, 'synset': 'seahorse.n.02', 'synonyms': ['seahorse'], 'def': 'small fish with horse-like heads bent sharply downward and curled tails', 'name': 'seahorse'}, {'frequency': 'r', 'id': 947, 'synset': 'seaplane.n.01', 'synonyms': ['seaplane', 'hydroplane'], 'def': 'an airplane that can land on or take off from water', 'name': 'seaplane'}, {'frequency': 'c', 'id': 948, 'synset': 'seashell.n.01', 'synonyms': ['seashell'], 'def': 'the shell of a marine organism', 'name': 'seashell'}, {'frequency': 'r', 'id': 949, 'synset': 'seedling.n.01', 'synonyms': ['seedling'], 'def': 'young plant or tree grown from a seed', 'name': 'seedling'}, {'frequency': 'c', 'id': 950, 'synset': 'serving_dish.n.01', 'synonyms': ['serving_dish'], 'def': 'a dish used for serving food', 'name': 'serving_dish'}, {'frequency': 'r', 'id': 951, 'synset': 'sewing_machine.n.01', 'synonyms': ['sewing_machine'], 'def': 'a textile machine used as a home appliance for sewing', 'name': 'sewing_machine'}, {'frequency': 'r', 'id': 952, 'synset': 'shaker.n.03', 'synonyms': ['shaker'], 'def': 'a container in which something can be shaken', 'name': 'shaker'}, {'frequency': 'c', 'id': 953, 'synset': 'shampoo.n.01', 'synonyms': ['shampoo'], 'def': 'cleansing agent consisting of soaps or detergents used for washing the hair', 'name': 'shampoo'}, {'frequency': 'r', 'id': 954, 'synset': 'shark.n.01', 'synonyms': ['shark'], 'def': 'typically large carnivorous fishes with sharpe teeth', 'name': 'shark'}, {'frequency': 'r', 'id': 955, 'synset': 'sharpener.n.01', 'synonyms': ['sharpener'], 'def': 'any implement that is used to make something (an edge or a point) sharper', 'name': 'sharpener'}, {'frequency': 'r', 'id': 956, 'synset': 'sharpie.n.03', 'synonyms': ['Sharpie'], 'def': 'a pen with indelible ink that will write on any surface', 'name': 'Sharpie'}, {'frequency': 'r', 'id': 957, 'synset': 'shaver.n.03', 'synonyms': ['shaver_(electric)', 'electric_shaver', 'electric_razor'], 'def': 'a razor powered by an electric motor', 'name': 'shaver_(electric)'}, {'frequency': 'c', 'id': 958, 'synset': 'shaving_cream.n.01', 'synonyms': ['shaving_cream', 'shaving_soap'], 'def': 'toiletry consisting that forms a rich lather for softening the beard before shaving', 'name': 'shaving_cream'}, {'frequency': 'r', 'id': 959, 'synset': 'shawl.n.01', 'synonyms': ['shawl'], 'def': 'cloak consisting of an oblong piece of cloth used to cover the head and shoulders', 'name': 'shawl'}, {'frequency': 'r', 'id': 960, 'synset': 'shears.n.01', 'synonyms': ['shears'], 'def': 'large scissors with strong blades', 'name': 'shears'}, {'frequency': 'f', 'id': 961, 'synset': 'sheep.n.01', 'synonyms': ['sheep'], 'def': 'woolly usually horned ruminant mammal related to the goat', 'name': 'sheep'}, {'frequency': 'r', 'id': 962, 'synset': 'shepherd_dog.n.01', 'synonyms': ['shepherd_dog', 'sheepdog'], 'def': 'any of various usually long-haired breeds of dog reared to herd and guard sheep', 'name': 'shepherd_dog'}, {'frequency': 'r', 'id': 963, 'synset': 'sherbert.n.01', 'synonyms': ['sherbert', 'sherbet'], 'def': 'a frozen dessert made primarily of fruit juice and sugar', 'name': 'sherbert'}, {'frequency': 'r', 'id': 964, 'synset': 'shield.n.02', 'synonyms': ['shield'], 'def': 'armor carried on the arm to intercept blows', 'name': 'shield'}, {'frequency': 'f', 'id': 965, 'synset': 'shirt.n.01', 'synonyms': ['shirt'], 'def': 'a garment worn on the upper half of the body', 'name': 'shirt'}, {'frequency': 'f', 'id': 966, 'synset': 'shoe.n.01', 'synonyms': ['shoe', 'sneaker_(type_of_shoe)', 'tennis_shoe'], 'def': 'common footwear covering the foot', 'name': 'shoe'}, {'frequency': 'c', 'id': 967, 'synset': 'shopping_bag.n.01', 'synonyms': ['shopping_bag'], 'def': 'a bag made of plastic or strong paper (often with handles); used to transport goods after shopping', 'name': 'shopping_bag'}, {'frequency': 'c', 'id': 968, 'synset': 'shopping_cart.n.01', 'synonyms': ['shopping_cart'], 'def': 'a handcart that holds groceries or other goods while shopping', 'name': 'shopping_cart'}, {'frequency': 'f', 'id': 969, 'synset': 'short_pants.n.01', 'synonyms': ['short_pants', 'shorts_(clothing)', 'trunks_(clothing)'], 'def': 'trousers that end at or above the knee', 'name': 'short_pants'}, {'frequency': 'r', 'id': 970, 'synset': 'shot_glass.n.01', 'synonyms': ['shot_glass'], 'def': 'a small glass adequate to hold a single swallow of whiskey', 'name': 'shot_glass'}, {'frequency': 'c', 'id': 971, 'synset': 'shoulder_bag.n.01', 'synonyms': ['shoulder_bag'], 'def': 'a large handbag that can be carried by a strap looped over the shoulder', 'name': 'shoulder_bag'}, {'frequency': 'c', 'id': 972, 'synset': 'shovel.n.01', 'synonyms': ['shovel'], 'def': 'a hand tool for lifting loose material such as snow, dirt, etc.', 'name': 'shovel'}, {'frequency': 'f', 'id': 973, 'synset': 'shower.n.01', 'synonyms': ['shower_head'], 'def': 'a plumbing fixture that sprays water over you', 'name': 'shower_head'}, {'frequency': 'f', 'id': 974, 'synset': 'shower_curtain.n.01', 'synonyms': ['shower_curtain'], 'def': 'a curtain that keeps water from splashing out of the shower area', 'name': 'shower_curtain'}, {'frequency': 'r', 'id': 975, 'synset': 'shredder.n.01', 'synonyms': ['shredder_(for_paper)'], 'def': 'a device that shreds documents', 'name': 'shredder_(for_paper)'}, {'frequency': 'r', 'id': 976, 'synset': 'sieve.n.01', 'synonyms': ['sieve', 'screen_(sieve)'], 'def': 'a strainer for separating lumps from powdered material or grading particles', 'name': 'sieve'}, {'frequency': 'f', 'id': 977, 'synset': 'signboard.n.01', 'synonyms': ['signboard'], 'def': 'structure displaying a board on which advertisements can be posted', 'name': 'signboard'}, {'frequency': 'c', 'id': 978, 'synset': 'silo.n.01', 'synonyms': ['silo'], 'def': 'a cylindrical tower used for storing goods', 'name': 'silo'}, {'frequency': 'f', 'id': 979, 'synset': 'sink.n.01', 'synonyms': ['sink'], 'def': 'plumbing fixture consisting of a water basin fixed to a wall or floor and having a drainpipe', 'name': 'sink'}, {'frequency': 'f', 'id': 980, 'synset': 'skateboard.n.01', 'synonyms': ['skateboard'], 'def': 'a board with wheels that is ridden in a standing or crouching position and propelled by foot', 'name': 'skateboard'}, {'frequency': 'c', 'id': 981, 'synset': 'skewer.n.01', 'synonyms': ['skewer'], 'def': 'a long pin for holding meat in position while it is being roasted', 'name': 'skewer'}, {'frequency': 'f', 'id': 982, 'synset': 'ski.n.01', 'synonyms': ['ski'], 'def': 'sports equipment for skiing on snow', 'name': 'ski'}, {'frequency': 'f', 'id': 983, 'synset': 'ski_boot.n.01', 'synonyms': ['ski_boot'], 'def': 'a stiff boot that is fastened to a ski with a ski binding', 'name': 'ski_boot'}, {'frequency': 'f', 'id': 984, 'synset': 'ski_parka.n.01', 'synonyms': ['ski_parka', 'ski_jacket'], 'def': 'a parka to be worn while skiing', 'name': 'ski_parka'}, {'frequency': 'f', 'id': 985, 'synset': 'ski_pole.n.01', 'synonyms': ['ski_pole'], 'def': 'a pole with metal points used as an aid in skiing', 'name': 'ski_pole'}, {'frequency': 'f', 'id': 986, 'synset': 'skirt.n.02', 'synonyms': ['skirt'], 'def': 'a garment hanging from the waist; worn mainly by girls and women', 'name': 'skirt'}, {'frequency': 'c', 'id': 987, 'synset': 'sled.n.01', 'synonyms': ['sled', 'sledge', 'sleigh'], 'def': 'a vehicle or flat object for transportation over snow by sliding or pulled by dogs, etc.', 'name': 'sled'}, {'frequency': 'c', 'id': 988, 'synset': 'sleeping_bag.n.01', 'synonyms': ['sleeping_bag'], 'def': 'large padded bag designed to be slept in outdoors', 'name': 'sleeping_bag'}, {'frequency': 'r', 'id': 989, 'synset': 'sling.n.05', 'synonyms': ['sling_(bandage)', 'triangular_bandage'], 'def': 'bandage to support an injured forearm; slung over the shoulder or neck', 'name': 'sling_(bandage)'}, {'frequency': 'c', 'id': 990, 'synset': 'slipper.n.01', 'synonyms': ['slipper_(footwear)', 'carpet_slipper_(footwear)'], 'def': 'low footwear that can be slipped on and off easily; usually worn indoors', 'name': 'slipper_(footwear)'}, {'frequency': 'r', 'id': 991, 'synset': 'smoothie.n.02', 'synonyms': ['smoothie'], 'def': 'a thick smooth drink consisting of fresh fruit pureed with ice cream or yoghurt or milk', 'name': 'smoothie'}, {'frequency': 'r', 'id': 992, 'synset': 'snake.n.01', 'synonyms': ['snake', 'serpent'], 'def': 'limbless scaly elongate reptile; some are venomous', 'name': 'snake'}, {'frequency': 'f', 'id': 993, 'synset': 'snowboard.n.01', 'synonyms': ['snowboard'], 'def': 'a board that resembles a broad ski or a small surfboard; used in a standing position to slide down snow-covered slopes', 'name': 'snowboard'}, {'frequency': 'c', 'id': 994, 'synset': 'snowman.n.01', 'synonyms': ['snowman'], 'def': 'a figure of a person made of packed snow', 'name': 'snowman'}, {'frequency': 'c', 'id': 995, 'synset': 'snowmobile.n.01', 'synonyms': ['snowmobile'], 'def': 'tracked vehicle for travel on snow having skis in front', 'name': 'snowmobile'}, {'frequency': 'f', 'id': 996, 'synset': 'soap.n.01', 'synonyms': ['soap'], 'def': 'a cleansing agent made from the salts of vegetable or animal fats', 'name': 'soap'}, {'frequency': 'f', 'id': 997, 'synset': 'soccer_ball.n.01', 'synonyms': ['soccer_ball'], 'def': "an inflated ball used in playing soccer (called `football' outside of the United States)", 'name': 'soccer_ball'}, {'frequency': 'f', 'id': 998, 'synset': 'sock.n.01', 'synonyms': ['sock'], 'def': 'cloth covering for the foot; worn inside the shoe; reaches to between the ankle and the knee', 'name': 'sock'}, {'frequency': 'r', 'id': 999, 'synset': 'soda_fountain.n.02', 'synonyms': ['soda_fountain'], 'def': 'an apparatus for dispensing soda water', 'name': 'soda_fountain'}, {'frequency': 'r', 'id': 1000, 'synset': 'soda_water.n.01', 'synonyms': ['carbonated_water', 'club_soda', 'seltzer', 'sparkling_water'], 'def': 'effervescent beverage artificially charged with carbon dioxide', 'name': 'carbonated_water'}, {'frequency': 'f', 'id': 1001, 'synset': 'sofa.n.01', 'synonyms': ['sofa', 'couch', 'lounge'], 'def': 'an upholstered seat for more than one person', 'name': 'sofa'}, {'frequency': 'r', 'id': 1002, 'synset': 'softball.n.01', 'synonyms': ['softball'], 'def': 'ball used in playing softball', 'name': 'softball'}, {'frequency': 'c', 'id': 1003, 'synset': 'solar_array.n.01', 'synonyms': ['solar_array', 'solar_battery', 'solar_panel'], 'def': 'electrical device consisting of a large array of connected solar cells', 'name': 'solar_array'}, {'frequency': 'r', 'id': 1004, 'synset': 'sombrero.n.02', 'synonyms': ['sombrero'], 'def': 'a straw hat with a tall crown and broad brim; worn in American southwest and in Mexico', 'name': 'sombrero'}, {'frequency': 'c', 'id': 1005, 'synset': 'soup.n.01', 'synonyms': ['soup'], 'def': 'liquid food especially of meat or fish or vegetable stock often containing pieces of solid food', 'name': 'soup'}, {'frequency': 'r', 'id': 1006, 'synset': 'soup_bowl.n.01', 'synonyms': ['soup_bowl'], 'def': 'a bowl for serving soup', 'name': 'soup_bowl'}, {'frequency': 'c', 'id': 1007, 'synset': 'soupspoon.n.01', 'synonyms': ['soupspoon'], 'def': 'a spoon with a rounded bowl for eating soup', 'name': 'soupspoon'}, {'frequency': 'c', 'id': 1008, 'synset': 'sour_cream.n.01', 'synonyms': ['sour_cream', 'soured_cream'], 'def': 'soured light cream', 'name': 'sour_cream'}, {'frequency': 'r', 'id': 1009, 'synset': 'soya_milk.n.01', 'synonyms': ['soya_milk', 'soybean_milk', 'soymilk'], 'def': 'a milk substitute containing soybean flour and water; used in some infant formulas and in making tofu', 'name': 'soya_milk'}, {'frequency': 'r', 'id': 1010, 'synset': 'space_shuttle.n.01', 'synonyms': ['space_shuttle'], 'def': "a reusable spacecraft with wings for a controlled descent through the Earth's atmosphere", 'name': 'space_shuttle'}, {'frequency': 'r', 'id': 1011, 'synset': 'sparkler.n.02', 'synonyms': ['sparkler_(fireworks)'], 'def': 'a firework that burns slowly and throws out a shower of sparks', 'name': 'sparkler_(fireworks)'}, {'frequency': 'f', 'id': 1012, 'synset': 'spatula.n.02', 'synonyms': ['spatula'], 'def': 'a hand tool with a thin flexible blade used to mix or spread soft substances', 'name': 'spatula'}, {'frequency': 'r', 'id': 1013, 'synset': 'spear.n.01', 'synonyms': ['spear', 'lance'], 'def': 'a long pointed rod used as a tool or weapon', 'name': 'spear'}, {'frequency': 'f', 'id': 1014, 'synset': 'spectacles.n.01', 'synonyms': ['spectacles', 'specs', 'eyeglasses', 'glasses'], 'def': 'optical instrument consisting of a frame that holds a pair of lenses for correcting defective vision', 'name': 'spectacles'}, {'frequency': 'c', 'id': 1015, 'synset': 'spice_rack.n.01', 'synonyms': ['spice_rack'], 'def': 'a rack for displaying containers filled with spices', 'name': 'spice_rack'}, {'frequency': 'r', 'id': 1016, 'synset': 'spider.n.01', 'synonyms': ['spider'], 'def': 'predatory arachnid with eight legs, two poison fangs, two feelers, and usually two silk-spinning organs at the back end of the body', 'name': 'spider'}, {'frequency': 'c', 'id': 1017, 'synset': 'sponge.n.01', 'synonyms': ['sponge'], 'def': 'a porous mass usable to absorb water typically used for cleaning', 'name': 'sponge'}, {'frequency': 'f', 'id': 1018, 'synset': 'spoon.n.01', 'synonyms': ['spoon'], 'def': 'a piece of cutlery with a shallow bowl-shaped container and a handle', 'name': 'spoon'}, {'frequency': 'c', 'id': 1019, 'synset': 'sportswear.n.01', 'synonyms': ['sportswear', 'athletic_wear', 'activewear'], 'def': 'attire worn for sport or for casual wear', 'name': 'sportswear'}, {'frequency': 'c', 'id': 1020, 'synset': 'spotlight.n.02', 'synonyms': ['spotlight'], 'def': 'a lamp that produces a strong beam of light to illuminate a restricted area; used to focus attention of a stage performer', 'name': 'spotlight'}, {'frequency': 'r', 'id': 1021, 'synset': 'squirrel.n.01', 'synonyms': ['squirrel'], 'def': 'a kind of arboreal rodent having a long bushy tail', 'name': 'squirrel'}, {'frequency': 'c', 'id': 1022, 'synset': 'stapler.n.01', 'synonyms': ['stapler_(stapling_machine)'], 'def': 'a machine that inserts staples into sheets of paper in order to fasten them together', 'name': 'stapler_(stapling_machine)'}, {'frequency': 'r', 'id': 1023, 'synset': 'starfish.n.01', 'synonyms': ['starfish', 'sea_star'], 'def': 'echinoderms characterized by five arms extending from a central disk', 'name': 'starfish'}, {'frequency': 'f', 'id': 1024, 'synset': 'statue.n.01', 'synonyms': ['statue_(sculpture)'], 'def': 'a sculpture representing a human or animal', 'name': 'statue_(sculpture)'}, {'frequency': 'c', 'id': 1025, 'synset': 'steak.n.01', 'synonyms': ['steak_(food)'], 'def': 'a slice of meat cut from the fleshy part of an animal or large fish', 'name': 'steak_(food)'}, {'frequency': 'r', 'id': 1026, 'synset': 'steak_knife.n.01', 'synonyms': ['steak_knife'], 'def': 'a sharp table knife used in eating steak', 'name': 'steak_knife'}, {'frequency': 'r', 'id': 1027, 'synset': 'steamer.n.02', 'synonyms': ['steamer_(kitchen_appliance)'], 'def': 'a cooking utensil that can be used to cook food by steaming it', 'name': 'steamer_(kitchen_appliance)'}, {'frequency': 'f', 'id': 1028, 'synset': 'steering_wheel.n.01', 'synonyms': ['steering_wheel'], 'def': 'a handwheel that is used for steering', 'name': 'steering_wheel'}, {'frequency': 'r', 'id': 1029, 'synset': 'stencil.n.01', 'synonyms': ['stencil'], 'def': 'a sheet of material (metal, plastic, etc.) that has been perforated with a pattern; ink or paint can pass through the perforations to create the printed pattern on the surface below', 'name': 'stencil'}, {'frequency': 'r', 'id': 1030, 'synset': 'step_ladder.n.01', 'synonyms': ['stepladder'], 'def': 'a folding portable ladder hinged at the top', 'name': 'stepladder'}, {'frequency': 'c', 'id': 1031, 'synset': 'step_stool.n.01', 'synonyms': ['step_stool'], 'def': 'a stool that has one or two steps that fold under the seat', 'name': 'step_stool'}, {'frequency': 'c', 'id': 1032, 'synset': 'stereo.n.01', 'synonyms': ['stereo_(sound_system)'], 'def': 'electronic device for playing audio', 'name': 'stereo_(sound_system)'}, {'frequency': 'r', 'id': 1033, 'synset': 'stew.n.02', 'synonyms': ['stew'], 'def': 'food prepared by stewing especially meat or fish with vegetables', 'name': 'stew'}, {'frequency': 'r', 'id': 1034, 'synset': 'stirrer.n.02', 'synonyms': ['stirrer'], 'def': 'an implement used for stirring', 'name': 'stirrer'}, {'frequency': 'f', 'id': 1035, 'synset': 'stirrup.n.01', 'synonyms': ['stirrup'], 'def': "support consisting of metal loops into which rider's feet go", 'name': 'stirrup'}, {'frequency': 'c', 'id': 1036, 'synset': 'stocking.n.01', 'synonyms': ['stockings_(leg_wear)'], 'def': 'close-fitting hosiery to cover the foot and leg; come in matched pairs', 'name': 'stockings_(leg_wear)'}, {'frequency': 'f', 'id': 1037, 'synset': 'stool.n.01', 'synonyms': ['stool'], 'def': 'a simple seat without a back or arms', 'name': 'stool'}, {'frequency': 'f', 'id': 1038, 'synset': 'stop_sign.n.01', 'synonyms': ['stop_sign'], 'def': 'a traffic sign to notify drivers that they must come to a complete stop', 'name': 'stop_sign'}, {'frequency': 'f', 'id': 1039, 'synset': 'stoplight.n.01', 'synonyms': ['brake_light'], 'def': 'a red light on the rear of a motor vehicle that signals when the brakes are applied', 'name': 'brake_light'}, {'frequency': 'f', 'id': 1040, 'synset': 'stove.n.01', 'synonyms': ['stove', 'kitchen_stove', 'range_(kitchen_appliance)', 'kitchen_range', 'cooking_stove'], 'def': 'a kitchen appliance used for cooking food', 'name': 'stove'}, {'frequency': 'c', 'id': 1041, 'synset': 'strainer.n.01', 'synonyms': ['strainer'], 'def': 'a filter to retain larger pieces while smaller pieces and liquids pass through', 'name': 'strainer'}, {'frequency': 'f', 'id': 1042, 'synset': 'strap.n.01', 'synonyms': ['strap'], 'def': 'an elongated strip of material for binding things together or holding', 'name': 'strap'}, {'frequency': 'f', 'id': 1043, 'synset': 'straw.n.04', 'synonyms': ['straw_(for_drinking)', 'drinking_straw'], 'def': 'a thin paper or plastic tube used to suck liquids into the mouth', 'name': 'straw_(for_drinking)'}, {'frequency': 'f', 'id': 1044, 'synset': 'strawberry.n.01', 'synonyms': ['strawberry'], 'def': 'sweet fleshy red fruit', 'name': 'strawberry'}, {'frequency': 'f', 'id': 1045, 'synset': 'street_sign.n.01', 'synonyms': ['street_sign'], 'def': 'a sign visible from the street', 'name': 'street_sign'}, {'frequency': 'f', 'id': 1046, 'synset': 'streetlight.n.01', 'synonyms': ['streetlight', 'street_lamp'], 'def': 'a lamp supported on a lamppost; for illuminating a street', 'name': 'streetlight'}, {'frequency': 'r', 'id': 1047, 'synset': 'string_cheese.n.01', 'synonyms': ['string_cheese'], 'def': 'cheese formed in long strings twisted together', 'name': 'string_cheese'}, {'frequency': 'r', 'id': 1048, 'synset': 'stylus.n.02', 'synonyms': ['stylus'], 'def': 'a pointed tool for writing or drawing or engraving', 'name': 'stylus'}, {'frequency': 'r', 'id': 1049, 'synset': 'subwoofer.n.01', 'synonyms': ['subwoofer'], 'def': 'a loudspeaker that is designed to reproduce very low bass frequencies', 'name': 'subwoofer'}, {'frequency': 'r', 'id': 1050, 'synset': 'sugar_bowl.n.01', 'synonyms': ['sugar_bowl'], 'def': 'a dish in which sugar is served', 'name': 'sugar_bowl'}, {'frequency': 'r', 'id': 1051, 'synset': 'sugarcane.n.01', 'synonyms': ['sugarcane_(plant)'], 'def': 'juicy canes whose sap is a source of molasses and commercial sugar; fresh canes are sometimes chewed for the juice', 'name': 'sugarcane_(plant)'}, {'frequency': 'c', 'id': 1052, 'synset': 'suit.n.01', 'synonyms': ['suit_(clothing)'], 'def': 'a set of garments (usually including a jacket and trousers or skirt) for outerwear all of the same fabric and color', 'name': 'suit_(clothing)'}, {'frequency': 'c', 'id': 1053, 'synset': 'sunflower.n.01', 'synonyms': ['sunflower'], 'def': 'any plant of the genus Helianthus having large flower heads with dark disk florets and showy yellow rays', 'name': 'sunflower'}, {'frequency': 'f', 'id': 1054, 'synset': 'sunglasses.n.01', 'synonyms': ['sunglasses'], 'def': 'spectacles that are darkened or polarized to protect the eyes from the glare of the sun', 'name': 'sunglasses'}, {'frequency': 'c', 'id': 1055, 'synset': 'sunhat.n.01', 'synonyms': ['sunhat'], 'def': 'a hat with a broad brim that protects the face from direct exposure to the sun', 'name': 'sunhat'}, {'frequency': 'r', 'id': 1056, 'synset': 'sunscreen.n.01', 'synonyms': ['sunscreen', 'sunblock'], 'def': 'a cream spread on the skin; contains a chemical to filter out ultraviolet light and so protect from sunburn', 'name': 'sunscreen'}, {'frequency': 'f', 'id': 1057, 'synset': 'surfboard.n.01', 'synonyms': ['surfboard'], 'def': 'a narrow buoyant board for riding surf', 'name': 'surfboard'}, {'frequency': 'c', 'id': 1058, 'synset': 'sushi.n.01', 'synonyms': ['sushi'], 'def': 'rice (with raw fish) wrapped in seaweed', 'name': 'sushi'}, {'frequency': 'c', 'id': 1059, 'synset': 'swab.n.02', 'synonyms': ['mop'], 'def': 'cleaning implement consisting of absorbent material fastened to a handle; for cleaning floors', 'name': 'mop'}, {'frequency': 'c', 'id': 1060, 'synset': 'sweat_pants.n.01', 'synonyms': ['sweat_pants'], 'def': 'loose-fitting trousers with elastic cuffs; worn by athletes', 'name': 'sweat_pants'}, {'frequency': 'c', 'id': 1061, 'synset': 'sweatband.n.02', 'synonyms': ['sweatband'], 'def': 'a band of material tied around the forehead or wrist to absorb sweat', 'name': 'sweatband'}, {'frequency': 'f', 'id': 1062, 'synset': 'sweater.n.01', 'synonyms': ['sweater'], 'def': 'a crocheted or knitted garment covering the upper part of the body', 'name': 'sweater'}, {'frequency': 'f', 'id': 1063, 'synset': 'sweatshirt.n.01', 'synonyms': ['sweatshirt'], 'def': 'cotton knit pullover with long sleeves worn during athletic activity', 'name': 'sweatshirt'}, {'frequency': 'c', 'id': 1064, 'synset': 'sweet_potato.n.02', 'synonyms': ['sweet_potato'], 'def': 'the edible tuberous root of the sweet potato vine', 'name': 'sweet_potato'}, {'frequency': 'f', 'id': 1065, 'synset': 'swimsuit.n.01', 'synonyms': ['swimsuit', 'swimwear', 'bathing_suit', 'swimming_costume', 'bathing_costume', 'swimming_trunks', 'bathing_trunks'], 'def': 'garment worn for swimming', 'name': 'swimsuit'}, {'frequency': 'c', 'id': 1066, 'synset': 'sword.n.01', 'synonyms': ['sword'], 'def': 'a cutting or thrusting weapon that has a long metal blade', 'name': 'sword'}, {'frequency': 'r', 'id': 1067, 'synset': 'syringe.n.01', 'synonyms': ['syringe'], 'def': 'a medical instrument used to inject or withdraw fluids', 'name': 'syringe'}, {'frequency': 'r', 'id': 1068, 'synset': 'tabasco.n.02', 'synonyms': ['Tabasco_sauce'], 'def': 'very spicy sauce (trade name Tabasco) made from fully-aged red peppers', 'name': 'Tabasco_sauce'}, {'frequency': 'r', 'id': 1069, 'synset': 'table-tennis_table.n.01', 'synonyms': ['table-tennis_table', 'ping-pong_table'], 'def': 'a table used for playing table tennis', 'name': 'table-tennis_table'}, {'frequency': 'f', 'id': 1070, 'synset': 'table.n.02', 'synonyms': ['table'], 'def': 'a piece of furniture having a smooth flat top that is usually supported by one or more vertical legs', 'name': 'table'}, {'frequency': 'c', 'id': 1071, 'synset': 'table_lamp.n.01', 'synonyms': ['table_lamp'], 'def': 'a lamp that sits on a table', 'name': 'table_lamp'}, {'frequency': 'f', 'id': 1072, 'synset': 'tablecloth.n.01', 'synonyms': ['tablecloth'], 'def': 'a covering spread over a dining table', 'name': 'tablecloth'}, {'frequency': 'r', 'id': 1073, 'synset': 'tachometer.n.01', 'synonyms': ['tachometer'], 'def': 'measuring instrument for indicating speed of rotation', 'name': 'tachometer'}, {'frequency': 'r', 'id': 1074, 'synset': 'taco.n.02', 'synonyms': ['taco'], 'def': 'a small tortilla cupped around a filling', 'name': 'taco'}, {'frequency': 'f', 'id': 1075, 'synset': 'tag.n.02', 'synonyms': ['tag'], 'def': 'a label associated with something for the purpose of identification or information', 'name': 'tag'}, {'frequency': 'f', 'id': 1076, 'synset': 'taillight.n.01', 'synonyms': ['taillight', 'rear_light'], 'def': 'lamp (usually red) mounted at the rear of a motor vehicle', 'name': 'taillight'}, {'frequency': 'r', 'id': 1077, 'synset': 'tambourine.n.01', 'synonyms': ['tambourine'], 'def': 'a shallow drum with a single drumhead and with metallic disks in the sides', 'name': 'tambourine'}, {'frequency': 'r', 'id': 1078, 'synset': 'tank.n.01', 'synonyms': ['army_tank', 'armored_combat_vehicle', 'armoured_combat_vehicle'], 'def': 'an enclosed armored military vehicle; has a cannon and moves on caterpillar treads', 'name': 'army_tank'}, {'frequency': 'c', 'id': 1079, 'synset': 'tank.n.02', 'synonyms': ['tank_(storage_vessel)', 'storage_tank'], 'def': 'a large (usually metallic) vessel for holding gases or liquids', 'name': 'tank_(storage_vessel)'}, {'frequency': 'f', 'id': 1080, 'synset': 'tank_top.n.01', 'synonyms': ['tank_top_(clothing)'], 'def': 'a tight-fitting sleeveless shirt with wide shoulder straps and low neck and no front opening', 'name': 'tank_top_(clothing)'}, {'frequency': 'c', 'id': 1081, 'synset': 'tape.n.01', 'synonyms': ['tape_(sticky_cloth_or_paper)'], 'def': 'a long thin piece of cloth or paper as used for binding or fastening', 'name': 'tape_(sticky_cloth_or_paper)'}, {'frequency': 'c', 'id': 1082, 'synset': 'tape.n.04', 'synonyms': ['tape_measure', 'measuring_tape'], 'def': 'measuring instrument consisting of a narrow strip (cloth or metal) marked in inches or centimeters and used for measuring lengths', 'name': 'tape_measure'}, {'frequency': 'c', 'id': 1083, 'synset': 'tapestry.n.02', 'synonyms': ['tapestry'], 'def': 'a heavy textile with a woven design; used for curtains and upholstery', 'name': 'tapestry'}, {'frequency': 'f', 'id': 1084, 'synset': 'tarpaulin.n.01', 'synonyms': ['tarp'], 'def': 'waterproofed canvas', 'name': 'tarp'}, {'frequency': 'c', 'id': 1085, 'synset': 'tartan.n.01', 'synonyms': ['tartan', 'plaid'], 'def': 'a cloth having a crisscross design', 'name': 'tartan'}, {'frequency': 'c', 'id': 1086, 'synset': 'tassel.n.01', 'synonyms': ['tassel'], 'def': 'adornment consisting of a bunch of cords fastened at one end', 'name': 'tassel'}, {'frequency': 'r', 'id': 1087, 'synset': 'tea_bag.n.01', 'synonyms': ['tea_bag'], 'def': 'a measured amount of tea in a bag for an individual serving of tea', 'name': 'tea_bag'}, {'frequency': 'c', 'id': 1088, 'synset': 'teacup.n.02', 'synonyms': ['teacup'], 'def': 'a cup from which tea is drunk', 'name': 'teacup'}, {'frequency': 'c', 'id': 1089, 'synset': 'teakettle.n.01', 'synonyms': ['teakettle'], 'def': 'kettle for boiling water to make tea', 'name': 'teakettle'}, {'frequency': 'c', 'id': 1090, 'synset': 'teapot.n.01', 'synonyms': ['teapot'], 'def': 'pot for brewing tea; usually has a spout and handle', 'name': 'teapot'}, {'frequency': 'f', 'id': 1091, 'synset': 'teddy.n.01', 'synonyms': ['teddy_bear'], 'def': "plaything consisting of a child's toy bear (usually plush and stuffed with soft materials)", 'name': 'teddy_bear'}, {'frequency': 'f', 'id': 1092, 'synset': 'telephone.n.01', 'synonyms': ['telephone', 'phone', 'telephone_set'], 'def': 'electronic device for communicating by voice over long distances', 'name': 'telephone'}, {'frequency': 'c', 'id': 1093, 'synset': 'telephone_booth.n.01', 'synonyms': ['telephone_booth', 'phone_booth', 'call_box', 'telephone_box', 'telephone_kiosk'], 'def': 'booth for using a telephone', 'name': 'telephone_booth'}, {'frequency': 'f', 'id': 1094, 'synset': 'telephone_pole.n.01', 'synonyms': ['telephone_pole', 'telegraph_pole', 'telegraph_post'], 'def': 'tall pole supporting telephone wires', 'name': 'telephone_pole'}, {'frequency': 'r', 'id': 1095, 'synset': 'telephoto_lens.n.01', 'synonyms': ['telephoto_lens', 'zoom_lens'], 'def': 'a camera lens that magnifies the image', 'name': 'telephoto_lens'}, {'frequency': 'c', 'id': 1096, 'synset': 'television_camera.n.01', 'synonyms': ['television_camera', 'tv_camera'], 'def': 'television equipment for capturing and recording video', 'name': 'television_camera'}, {'frequency': 'f', 'id': 1097, 'synset': 'television_receiver.n.01', 'synonyms': ['television_set', 'tv', 'tv_set'], 'def': 'an electronic device that receives television signals and displays them on a screen', 'name': 'television_set'}, {'frequency': 'f', 'id': 1098, 'synset': 'tennis_ball.n.01', 'synonyms': ['tennis_ball'], 'def': 'ball about the size of a fist used in playing tennis', 'name': 'tennis_ball'}, {'frequency': 'f', 'id': 1099, 'synset': 'tennis_racket.n.01', 'synonyms': ['tennis_racket'], 'def': 'a racket used to play tennis', 'name': 'tennis_racket'}, {'frequency': 'r', 'id': 1100, 'synset': 'tequila.n.01', 'synonyms': ['tequila'], 'def': 'Mexican liquor made from fermented juices of an agave plant', 'name': 'tequila'}, {'frequency': 'c', 'id': 1101, 'synset': 'thermometer.n.01', 'synonyms': ['thermometer'], 'def': 'measuring instrument for measuring temperature', 'name': 'thermometer'}, {'frequency': 'c', 'id': 1102, 'synset': 'thermos.n.01', 'synonyms': ['thermos_bottle'], 'def': 'vacuum flask that preserves temperature of hot or cold drinks', 'name': 'thermos_bottle'}, {'frequency': 'c', 'id': 1103, 'synset': 'thermostat.n.01', 'synonyms': ['thermostat'], 'def': 'a regulator for automatically regulating temperature by starting or stopping the supply of heat', 'name': 'thermostat'}, {'frequency': 'r', 'id': 1104, 'synset': 'thimble.n.02', 'synonyms': ['thimble'], 'def': 'a small metal cap to protect the finger while sewing; can be used as a small container', 'name': 'thimble'}, {'frequency': 'c', 'id': 1105, 'synset': 'thread.n.01', 'synonyms': ['thread', 'yarn'], 'def': 'a fine cord of twisted fibers (of cotton or silk or wool or nylon etc.) used in sewing and weaving', 'name': 'thread'}, {'frequency': 'c', 'id': 1106, 'synset': 'thumbtack.n.01', 'synonyms': ['thumbtack', 'drawing_pin', 'pushpin'], 'def': 'a tack for attaching papers to a bulletin board or drawing board', 'name': 'thumbtack'}, {'frequency': 'c', 'id': 1107, 'synset': 'tiara.n.01', 'synonyms': ['tiara'], 'def': 'a jeweled headdress worn by women on formal occasions', 'name': 'tiara'}, {'frequency': 'c', 'id': 1108, 'synset': 'tiger.n.02', 'synonyms': ['tiger'], 'def': 'large feline of forests in most of Asia having a tawny coat with black stripes', 'name': 'tiger'}, {'frequency': 'c', 'id': 1109, 'synset': 'tights.n.01', 'synonyms': ['tights_(clothing)', 'leotards'], 'def': 'skintight knit hose covering the body from the waist to the feet worn by acrobats and dancers and as stockings by women and girls', 'name': 'tights_(clothing)'}, {'frequency': 'c', 'id': 1110, 'synset': 'timer.n.01', 'synonyms': ['timer', 'stopwatch'], 'def': 'a timepiece that measures a time interval and signals its end', 'name': 'timer'}, {'frequency': 'f', 'id': 1111, 'synset': 'tinfoil.n.01', 'synonyms': ['tinfoil'], 'def': 'foil made of tin or an alloy of tin and lead', 'name': 'tinfoil'}, {'frequency': 'r', 'id': 1112, 'synset': 'tinsel.n.01', 'synonyms': ['tinsel'], 'def': 'a showy decoration that is basically valueless', 'name': 'tinsel'}, {'frequency': 'f', 'id': 1113, 'synset': 'tissue.n.02', 'synonyms': ['tissue_paper'], 'def': 'a soft thin (usually translucent) paper', 'name': 'tissue_paper'}, {'frequency': 'c', 'id': 1114, 'synset': 'toast.n.01', 'synonyms': ['toast_(food)'], 'def': 'slice of bread that has been toasted', 'name': 'toast_(food)'}, {'frequency': 'f', 'id': 1115, 'synset': 'toaster.n.02', 'synonyms': ['toaster'], 'def': 'a kitchen appliance (usually electric) for toasting bread', 'name': 'toaster'}, {'frequency': 'c', 'id': 1116, 'synset': 'toaster_oven.n.01', 'synonyms': ['toaster_oven'], 'def': 'kitchen appliance consisting of a small electric oven for toasting or warming food', 'name': 'toaster_oven'}, {'frequency': 'f', 'id': 1117, 'synset': 'toilet.n.02', 'synonyms': ['toilet'], 'def': 'a plumbing fixture for defecation and urination', 'name': 'toilet'}, {'frequency': 'f', 'id': 1118, 'synset': 'toilet_tissue.n.01', 'synonyms': ['toilet_tissue', 'toilet_paper', 'bathroom_tissue'], 'def': 'a soft thin absorbent paper for use in toilets', 'name': 'toilet_tissue'}, {'frequency': 'f', 'id': 1119, 'synset': 'tomato.n.01', 'synonyms': ['tomato'], 'def': 'mildly acid red or yellow pulpy fruit eaten as a vegetable', 'name': 'tomato'}, {'frequency': 'c', 'id': 1120, 'synset': 'tongs.n.01', 'synonyms': ['tongs'], 'def': 'any of various devices for taking hold of objects; usually have two hinged legs with handles above and pointed hooks below', 'name': 'tongs'}, {'frequency': 'c', 'id': 1121, 'synset': 'toolbox.n.01', 'synonyms': ['toolbox'], 'def': 'a box or chest or cabinet for holding hand tools', 'name': 'toolbox'}, {'frequency': 'f', 'id': 1122, 'synset': 'toothbrush.n.01', 'synonyms': ['toothbrush'], 'def': 'small brush; has long handle; used to clean teeth', 'name': 'toothbrush'}, {'frequency': 'f', 'id': 1123, 'synset': 'toothpaste.n.01', 'synonyms': ['toothpaste'], 'def': 'a dentifrice in the form of a paste', 'name': 'toothpaste'}, {'frequency': 'c', 'id': 1124, 'synset': 'toothpick.n.01', 'synonyms': ['toothpick'], 'def': 'pick consisting of a small strip of wood or plastic; used to pick food from between the teeth', 'name': 'toothpick'}, {'frequency': 'c', 'id': 1125, 'synset': 'top.n.09', 'synonyms': ['cover'], 'def': 'covering for a hole (especially a hole in the top of a container)', 'name': 'cover'}, {'frequency': 'c', 'id': 1126, 'synset': 'tortilla.n.01', 'synonyms': ['tortilla'], 'def': 'thin unleavened pancake made from cornmeal or wheat flour', 'name': 'tortilla'}, {'frequency': 'c', 'id': 1127, 'synset': 'tow_truck.n.01', 'synonyms': ['tow_truck'], 'def': 'a truck equipped to hoist and pull wrecked cars (or to remove cars from no-parking zones)', 'name': 'tow_truck'}, {'frequency': 'f', 'id': 1128, 'synset': 'towel.n.01', 'synonyms': ['towel'], 'def': 'a rectangular piece of absorbent cloth (or paper) for drying or wiping', 'name': 'towel'}, {'frequency': 'f', 'id': 1129, 'synset': 'towel_rack.n.01', 'synonyms': ['towel_rack', 'towel_rail', 'towel_bar'], 'def': 'a rack consisting of one or more bars on which towels can be hung', 'name': 'towel_rack'}, {'frequency': 'f', 'id': 1130, 'synset': 'toy.n.03', 'synonyms': ['toy'], 'def': 'a device regarded as providing amusement', 'name': 'toy'}, {'frequency': 'c', 'id': 1131, 'synset': 'tractor.n.01', 'synonyms': ['tractor_(farm_equipment)'], 'def': 'a wheeled vehicle with large wheels; used in farming and other applications', 'name': 'tractor_(farm_equipment)'}, {'frequency': 'f', 'id': 1132, 'synset': 'traffic_light.n.01', 'synonyms': ['traffic_light'], 'def': 'a device to control vehicle traffic often consisting of three or more lights', 'name': 'traffic_light'}, {'frequency': 'r', 'id': 1133, 'synset': 'trail_bike.n.01', 'synonyms': ['dirt_bike'], 'def': 'a lightweight motorcycle equipped with rugged tires and suspension for off-road use', 'name': 'dirt_bike'}, {'frequency': 'c', 'id': 1134, 'synset': 'trailer_truck.n.01', 'synonyms': ['trailer_truck', 'tractor_trailer', 'trucking_rig', 'articulated_lorry', 'semi_truck'], 'def': 'a truck consisting of a tractor and trailer together', 'name': 'trailer_truck'}, {'frequency': 'f', 'id': 1135, 'synset': 'train.n.01', 'synonyms': ['train_(railroad_vehicle)', 'railroad_train'], 'def': 'public or private transport provided by a line of railway cars coupled together and drawn by a locomotive', 'name': 'train_(railroad_vehicle)'}, {'frequency': 'r', 'id': 1136, 'synset': 'trampoline.n.01', 'synonyms': ['trampoline'], 'def': 'gymnastic apparatus consisting of a strong canvas sheet attached with springs to a metal frame', 'name': 'trampoline'}, {'frequency': 'f', 'id': 1137, 'synset': 'tray.n.01', 'synonyms': ['tray'], 'def': 'an open receptacle for holding or displaying or serving articles or food', 'name': 'tray'}, {'frequency': 'r', 'id': 1138, 'synset': 'tree_house.n.01', 'synonyms': ['tree_house'], 'def': '(NOT A TREE) a PLAYHOUSE built in the branches of a tree', 'name': 'tree_house'}, {'frequency': 'r', 'id': 1139, 'synset': 'trench_coat.n.01', 'synonyms': ['trench_coat'], 'def': 'a military style raincoat; belted with deep pockets', 'name': 'trench_coat'}, {'frequency': 'r', 'id': 1140, 'synset': 'triangle.n.05', 'synonyms': ['triangle_(musical_instrument)'], 'def': 'a percussion instrument consisting of a metal bar bent in the shape of an open triangle', 'name': 'triangle_(musical_instrument)'}, {'frequency': 'r', 'id': 1141, 'synset': 'tricycle.n.01', 'synonyms': ['tricycle'], 'def': 'a vehicle with three wheels that is moved by foot pedals', 'name': 'tricycle'}, {'frequency': 'c', 'id': 1142, 'synset': 'tripod.n.01', 'synonyms': ['tripod'], 'def': 'a three-legged rack used for support', 'name': 'tripod'}, {'frequency': 'f', 'id': 1143, 'synset': 'trouser.n.01', 'synonyms': ['trousers', 'pants_(clothing)'], 'def': 'a garment extending from the waist to the knee or ankle, covering each leg separately', 'name': 'trousers'}, {'frequency': 'f', 'id': 1144, 'synset': 'truck.n.01', 'synonyms': ['truck'], 'def': 'an automotive vehicle suitable for hauling', 'name': 'truck'}, {'frequency': 'r', 'id': 1145, 'synset': 'truffle.n.03', 'synonyms': ['truffle_(chocolate)', 'chocolate_truffle'], 'def': 'creamy chocolate candy', 'name': 'truffle_(chocolate)'}, {'frequency': 'c', 'id': 1146, 'synset': 'trunk.n.02', 'synonyms': ['trunk'], 'def': 'luggage consisting of a large strong case used when traveling or for storage', 'name': 'trunk'}, {'frequency': 'r', 'id': 1147, 'synset': 'tub.n.02', 'synonyms': ['vat'], 'def': 'a large open vessel for holding or storing liquids', 'name': 'vat'}, {'frequency': 'c', 'id': 1148, 'synset': 'turban.n.01', 'synonyms': ['turban'], 'def': 'a traditional headdress consisting of a long scarf wrapped around the head', 'name': 'turban'}, {'frequency': 'r', 'id': 1149, 'synset': 'turkey.n.01', 'synonyms': ['turkey_(bird)'], 'def': 'large gallinaceous bird with fan-shaped tail; widely domesticated for food', 'name': 'turkey_(bird)'}, {'frequency': 'c', 'id': 1150, 'synset': 'turkey.n.04', 'synonyms': ['turkey_(food)'], 'def': 'flesh of large domesticated fowl usually roasted', 'name': 'turkey_(food)'}, {'frequency': 'r', 'id': 1151, 'synset': 'turnip.n.01', 'synonyms': ['turnip'], 'def': 'widely cultivated plant having a large fleshy edible white or yellow root', 'name': 'turnip'}, {'frequency': 'c', 'id': 1152, 'synset': 'turtle.n.02', 'synonyms': ['turtle'], 'def': 'any of various aquatic and land reptiles having a bony shell and flipper-like limbs for swimming', 'name': 'turtle'}, {'frequency': 'r', 'id': 1153, 'synset': 'turtleneck.n.01', 'synonyms': ['turtleneck_(clothing)', 'polo-neck'], 'def': 'a sweater or jersey with a high close-fitting collar', 'name': 'turtleneck_(clothing)'}, {'frequency': 'r', 'id': 1154, 'synset': 'typewriter.n.01', 'synonyms': ['typewriter'], 'def': 'hand-operated character printer for printing written messages one character at a time', 'name': 'typewriter'}, {'frequency': 'f', 'id': 1155, 'synset': 'umbrella.n.01', 'synonyms': ['umbrella'], 'def': 'a lightweight handheld collapsible canopy', 'name': 'umbrella'}, {'frequency': 'c', 'id': 1156, 'synset': 'underwear.n.01', 'synonyms': ['underwear', 'underclothes', 'underclothing', 'underpants'], 'def': 'undergarment worn next to the skin and under the outer garments', 'name': 'underwear'}, {'frequency': 'r', 'id': 1157, 'synset': 'unicycle.n.01', 'synonyms': ['unicycle'], 'def': 'a vehicle with a single wheel that is driven by pedals', 'name': 'unicycle'}, {'frequency': 'c', 'id': 1158, 'synset': 'urinal.n.01', 'synonyms': ['urinal'], 'def': 'a plumbing fixture (usually attached to the wall) used by men to urinate', 'name': 'urinal'}, {'frequency': 'r', 'id': 1159, 'synset': 'urn.n.01', 'synonyms': ['urn'], 'def': 'a large vase that usually has a pedestal or feet', 'name': 'urn'}, {'frequency': 'c', 'id': 1160, 'synset': 'vacuum.n.04', 'synonyms': ['vacuum_cleaner'], 'def': 'an electrical home appliance that cleans by suction', 'name': 'vacuum_cleaner'}, {'frequency': 'c', 'id': 1161, 'synset': 'valve.n.03', 'synonyms': ['valve'], 'def': 'control consisting of a mechanical device for controlling the flow of a fluid', 'name': 'valve'}, {'frequency': 'f', 'id': 1162, 'synset': 'vase.n.01', 'synonyms': ['vase'], 'def': 'an open jar of glass or porcelain used as an ornament or to hold flowers', 'name': 'vase'}, {'frequency': 'c', 'id': 1163, 'synset': 'vending_machine.n.01', 'synonyms': ['vending_machine'], 'def': 'a slot machine for selling goods', 'name': 'vending_machine'}, {'frequency': 'f', 'id': 1164, 'synset': 'vent.n.01', 'synonyms': ['vent', 'blowhole', 'air_vent'], 'def': 'a hole for the escape of gas or air', 'name': 'vent'}, {'frequency': 'c', 'id': 1165, 'synset': 'videotape.n.01', 'synonyms': ['videotape'], 'def': 'a video recording made on magnetic tape', 'name': 'videotape'}, {'frequency': 'r', 'id': 1166, 'synset': 'vinegar.n.01', 'synonyms': ['vinegar'], 'def': 'sour-tasting liquid produced usually by oxidation of the alcohol in wine or cider and used as a condiment or food preservative', 'name': 'vinegar'}, {'frequency': 'r', 'id': 1167, 'synset': 'violin.n.01', 'synonyms': ['violin', 'fiddle'], 'def': 'bowed stringed instrument that is the highest member of the violin family', 'name': 'violin'}, {'frequency': 'r', 'id': 1168, 'synset': 'vodka.n.01', 'synonyms': ['vodka'], 'def': 'unaged colorless liquor originating in Russia', 'name': 'vodka'}, {'frequency': 'r', 'id': 1169, 'synset': 'volleyball.n.02', 'synonyms': ['volleyball'], 'def': 'an inflated ball used in playing volleyball', 'name': 'volleyball'}, {'frequency': 'r', 'id': 1170, 'synset': 'vulture.n.01', 'synonyms': ['vulture'], 'def': 'any of various large birds of prey having naked heads and weak claws and feeding chiefly on carrion', 'name': 'vulture'}, {'frequency': 'c', 'id': 1171, 'synset': 'waffle.n.01', 'synonyms': ['waffle'], 'def': 'pancake batter baked in a waffle iron', 'name': 'waffle'}, {'frequency': 'r', 'id': 1172, 'synset': 'waffle_iron.n.01', 'synonyms': ['waffle_iron'], 'def': 'a kitchen appliance for baking waffles', 'name': 'waffle_iron'}, {'frequency': 'c', 'id': 1173, 'synset': 'wagon.n.01', 'synonyms': ['wagon'], 'def': 'any of various kinds of wheeled vehicles drawn by an animal or a tractor', 'name': 'wagon'}, {'frequency': 'c', 'id': 1174, 'synset': 'wagon_wheel.n.01', 'synonyms': ['wagon_wheel'], 'def': 'a wheel of a wagon', 'name': 'wagon_wheel'}, {'frequency': 'c', 'id': 1175, 'synset': 'walking_stick.n.01', 'synonyms': ['walking_stick'], 'def': 'a stick carried in the hand for support in walking', 'name': 'walking_stick'}, {'frequency': 'c', 'id': 1176, 'synset': 'wall_clock.n.01', 'synonyms': ['wall_clock'], 'def': 'a clock mounted on a wall', 'name': 'wall_clock'}, {'frequency': 'f', 'id': 1177, 'synset': 'wall_socket.n.01', 'synonyms': ['wall_socket', 'wall_plug', 'electric_outlet', 'electrical_outlet', 'outlet', 'electric_receptacle'], 'def': 'receptacle providing a place in a wiring system where current can be taken to run electrical devices', 'name': 'wall_socket'}, {'frequency': 'c', 'id': 1178, 'synset': 'wallet.n.01', 'synonyms': ['wallet', 'billfold'], 'def': 'a pocket-size case for holding papers and paper money', 'name': 'wallet'}, {'frequency': 'r', 'id': 1179, 'synset': 'walrus.n.01', 'synonyms': ['walrus'], 'def': 'either of two large northern marine mammals having ivory tusks and tough hide over thick blubber', 'name': 'walrus'}, {'frequency': 'r', 'id': 1180, 'synset': 'wardrobe.n.01', 'synonyms': ['wardrobe'], 'def': 'a tall piece of furniture that provides storage space for clothes; has a door and rails or hooks for hanging clothes', 'name': 'wardrobe'}, {'frequency': 'r', 'id': 1181, 'synset': 'wasabi.n.02', 'synonyms': ['wasabi'], 'def': 'the thick green root of the wasabi plant that the Japanese use in cooking and that tastes like strong horseradish', 'name': 'wasabi'}, {'frequency': 'c', 'id': 1182, 'synset': 'washer.n.03', 'synonyms': ['automatic_washer', 'washing_machine'], 'def': 'a home appliance for washing clothes and linens automatically', 'name': 'automatic_washer'}, {'frequency': 'f', 'id': 1183, 'synset': 'watch.n.01', 'synonyms': ['watch', 'wristwatch'], 'def': 'a small, portable timepiece', 'name': 'watch'}, {'frequency': 'f', 'id': 1184, 'synset': 'water_bottle.n.01', 'synonyms': ['water_bottle'], 'def': 'a bottle for holding water', 'name': 'water_bottle'}, {'frequency': 'c', 'id': 1185, 'synset': 'water_cooler.n.01', 'synonyms': ['water_cooler'], 'def': 'a device for cooling and dispensing drinking water', 'name': 'water_cooler'}, {'frequency': 'c', 'id': 1186, 'synset': 'water_faucet.n.01', 'synonyms': ['water_faucet', 'water_tap', 'tap_(water_faucet)'], 'def': 'a faucet for drawing water from a pipe or cask', 'name': 'water_faucet'}, {'frequency': 'r', 'id': 1187, 'synset': 'water_filter.n.01', 'synonyms': ['water_filter'], 'def': 'a filter to remove impurities from the water supply', 'name': 'water_filter'}, {'frequency': 'r', 'id': 1188, 'synset': 'water_heater.n.01', 'synonyms': ['water_heater', 'hot-water_heater'], 'def': 'a heater and storage tank to supply heated water', 'name': 'water_heater'}, {'frequency': 'r', 'id': 1189, 'synset': 'water_jug.n.01', 'synonyms': ['water_jug'], 'def': 'a jug that holds water', 'name': 'water_jug'}, {'frequency': 'r', 'id': 1190, 'synset': 'water_pistol.n.01', 'synonyms': ['water_gun', 'squirt_gun'], 'def': 'plaything consisting of a toy pistol that squirts water', 'name': 'water_gun'}, {'frequency': 'c', 'id': 1191, 'synset': 'water_scooter.n.01', 'synonyms': ['water_scooter', 'sea_scooter', 'jet_ski'], 'def': 'a motorboat resembling a motor scooter (NOT A SURFBOARD OR WATER SKI)', 'name': 'water_scooter'}, {'frequency': 'c', 'id': 1192, 'synset': 'water_ski.n.01', 'synonyms': ['water_ski'], 'def': 'broad ski for skimming over water towed by a speedboat (DO NOT MARK WATER)', 'name': 'water_ski'}, {'frequency': 'c', 'id': 1193, 'synset': 'water_tower.n.01', 'synonyms': ['water_tower'], 'def': 'a large reservoir for water', 'name': 'water_tower'}, {'frequency': 'c', 'id': 1194, 'synset': 'watering_can.n.01', 'synonyms': ['watering_can'], 'def': 'a container with a handle and a spout with a perforated nozzle; used to sprinkle water over plants', 'name': 'watering_can'}, {'frequency': 'c', 'id': 1195, 'synset': 'watermelon.n.02', 'synonyms': ['watermelon'], 'def': 'large oblong or roundish melon with a hard green rind and sweet watery red or occasionally yellowish pulp', 'name': 'watermelon'}, {'frequency': 'f', 'id': 1196, 'synset': 'weathervane.n.01', 'synonyms': ['weathervane', 'vane_(weathervane)', 'wind_vane'], 'def': 'mechanical device attached to an elevated structure; rotates freely to show the direction of the wind', 'name': 'weathervane'}, {'frequency': 'c', 'id': 1197, 'synset': 'webcam.n.01', 'synonyms': ['webcam'], 'def': 'a digital camera designed to take digital photographs and transmit them over the internet', 'name': 'webcam'}, {'frequency': 'c', 'id': 1198, 'synset': 'wedding_cake.n.01', 'synonyms': ['wedding_cake', 'bridecake'], 'def': 'a rich cake with two or more tiers and covered with frosting and decorations; served at a wedding reception', 'name': 'wedding_cake'}, {'frequency': 'c', 'id': 1199, 'synset': 'wedding_ring.n.01', 'synonyms': ['wedding_ring', 'wedding_band'], 'def': 'a ring given to the bride and/or groom at the wedding', 'name': 'wedding_ring'}, {'frequency': 'f', 'id': 1200, 'synset': 'wet_suit.n.01', 'synonyms': ['wet_suit'], 'def': 'a close-fitting garment made of a permeable material; worn in cold water to retain body heat', 'name': 'wet_suit'}, {'frequency': 'f', 'id': 1201, 'synset': 'wheel.n.01', 'synonyms': ['wheel'], 'def': 'a circular frame with spokes (or a solid disc) that can rotate on a shaft or axle', 'name': 'wheel'}, {'frequency': 'c', 'id': 1202, 'synset': 'wheelchair.n.01', 'synonyms': ['wheelchair'], 'def': 'a movable chair mounted on large wheels', 'name': 'wheelchair'}, {'frequency': 'c', 'id': 1203, 'synset': 'whipped_cream.n.01', 'synonyms': ['whipped_cream'], 'def': 'cream that has been beaten until light and fluffy', 'name': 'whipped_cream'}, {'frequency': 'r', 'id': 1204, 'synset': 'whiskey.n.01', 'synonyms': ['whiskey'], 'def': 'a liquor made from fermented mash of grain', 'name': 'whiskey'}, {'frequency': 'r', 'id': 1205, 'synset': 'whistle.n.03', 'synonyms': ['whistle'], 'def': 'a small wind instrument that produces a whistling sound by blowing into it', 'name': 'whistle'}, {'frequency': 'r', 'id': 1206, 'synset': 'wick.n.02', 'synonyms': ['wick'], 'def': 'a loosely woven cord in a candle or oil lamp that is lit on fire', 'name': 'wick'}, {'frequency': 'c', 'id': 1207, 'synset': 'wig.n.01', 'synonyms': ['wig'], 'def': 'hairpiece covering the head and made of real or synthetic hair', 'name': 'wig'}, {'frequency': 'c', 'id': 1208, 'synset': 'wind_chime.n.01', 'synonyms': ['wind_chime'], 'def': 'a decorative arrangement of pieces of metal or glass or pottery that hang together loosely so the wind can cause them to tinkle', 'name': 'wind_chime'}, {'frequency': 'c', 'id': 1209, 'synset': 'windmill.n.01', 'synonyms': ['windmill'], 'def': 'a mill that is powered by the wind', 'name': 'windmill'}, {'frequency': 'c', 'id': 1210, 'synset': 'window_box.n.01', 'synonyms': ['window_box_(for_plants)'], 'def': 'a container for growing plants on a windowsill', 'name': 'window_box_(for_plants)'}, {'frequency': 'f', 'id': 1211, 'synset': 'windshield_wiper.n.01', 'synonyms': ['windshield_wiper', 'windscreen_wiper', 'wiper_(for_windshield/screen)'], 'def': 'a mechanical device that cleans the windshield', 'name': 'windshield_wiper'}, {'frequency': 'c', 'id': 1212, 'synset': 'windsock.n.01', 'synonyms': ['windsock', 'air_sock', 'air-sleeve', 'wind_sleeve', 'wind_cone'], 'def': 'a truncated cloth cone mounted on a mast/pole; shows wind direction', 'name': 'windsock'}, {'frequency': 'f', 'id': 1213, 'synset': 'wine_bottle.n.01', 'synonyms': ['wine_bottle'], 'def': 'a bottle for holding wine', 'name': 'wine_bottle'}, {'frequency': 'r', 'id': 1214, 'synset': 'wine_bucket.n.01', 'synonyms': ['wine_bucket', 'wine_cooler'], 'def': 'a bucket of ice used to chill a bottle of wine', 'name': 'wine_bucket'}, {'frequency': 'f', 'id': 1215, 'synset': 'wineglass.n.01', 'synonyms': ['wineglass'], 'def': 'a glass that has a stem and in which wine is served', 'name': 'wineglass'}, {'frequency': 'r', 'id': 1216, 'synset': 'wing_chair.n.01', 'synonyms': ['wing_chair'], 'def': 'easy chair having wings on each side of a high back', 'name': 'wing_chair'}, {'frequency': 'c', 'id': 1217, 'synset': 'winker.n.02', 'synonyms': ['blinder_(for_horses)'], 'def': 'blinds that prevent a horse from seeing something on either side', 'name': 'blinder_(for_horses)'}, {'frequency': 'c', 'id': 1218, 'synset': 'wok.n.01', 'synonyms': ['wok'], 'def': 'pan with a convex bottom; used for frying in Chinese cooking', 'name': 'wok'}, {'frequency': 'r', 'id': 1219, 'synset': 'wolf.n.01', 'synonyms': ['wolf'], 'def': 'a wild carnivorous mammal of the dog family, living and hunting in packs', 'name': 'wolf'}, {'frequency': 'c', 'id': 1220, 'synset': 'wooden_spoon.n.02', 'synonyms': ['wooden_spoon'], 'def': 'a spoon made of wood', 'name': 'wooden_spoon'}, {'frequency': 'c', 'id': 1221, 'synset': 'wreath.n.01', 'synonyms': ['wreath'], 'def': 'an arrangement of flowers, leaves, or stems fastened in a ring', 'name': 'wreath'}, {'frequency': 'c', 'id': 1222, 'synset': 'wrench.n.03', 'synonyms': ['wrench', 'spanner'], 'def': 'a hand tool that is used to hold or twist a nut or bolt', 'name': 'wrench'}, {'frequency': 'c', 'id': 1223, 'synset': 'wristband.n.01', 'synonyms': ['wristband'], 'def': 'band consisting of a part of a sleeve that covers the wrist', 'name': 'wristband'}, {'frequency': 'f', 'id': 1224, 'synset': 'wristlet.n.01', 'synonyms': ['wristlet', 'wrist_band'], 'def': 'a band or bracelet worn around the wrist', 'name': 'wristlet'}, {'frequency': 'r', 'id': 1225, 'synset': 'yacht.n.01', 'synonyms': ['yacht'], 'def': 'an expensive vessel propelled by sail or power and used for cruising or racing', 'name': 'yacht'}, {'frequency': 'r', 'id': 1226, 'synset': 'yak.n.02', 'synonyms': ['yak'], 'def': 'large long-haired wild ox of Tibet often domesticated', 'name': 'yak'}, {'frequency': 'c', 'id': 1227, 'synset': 'yogurt.n.01', 'synonyms': ['yogurt', 'yoghurt', 'yoghourt'], 'def': 'a custard-like food made from curdled milk', 'name': 'yogurt'}, {'frequency': 'r', 'id': 1228, 'synset': 'yoke.n.07', 'synonyms': ['yoke_(animal_equipment)'], 'def': 'gear joining two animals at the neck; NOT egg yolk', 'name': 'yoke_(animal_equipment)'}, {'frequency': 'f', 'id': 1229, 'synset': 'zebra.n.01', 'synonyms': ['zebra'], 'def': 'any of several fleet black-and-white striped African equines', 'name': 'zebra'}, {'frequency': 'c', 'id': 1230, 'synset': 'zucchini.n.02', 'synonyms': ['zucchini', 'courgette'], 'def': 'small cucumber-shaped vegetable marrow; typically dark green', 'name': 'zucchini'}] # noqa +# fmt: on diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/lvis_v1_categories.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/lvis_v1_categories.py new file mode 100644 index 0000000..7374e69 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/lvis_v1_categories.py @@ -0,0 +1,16 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# Autogen with +# with open("lvis_v1_val.json", "r") as f: +# a = json.load(f) +# c = a["categories"] +# for x in c: +# del x["image_count"] +# del x["instance_count"] +# LVIS_CATEGORIES = repr(c) + " # noqa" +# with open("/tmp/lvis_categories.py", "wt") as f: +# f.write(f"LVIS_CATEGORIES = {LVIS_CATEGORIES}") +# Then paste the contents of that file below + +# fmt: off +LVIS_CATEGORIES = [{'frequency': 'c', 'synset': 'aerosol.n.02', 'synonyms': ['aerosol_can', 'spray_can'], 'id': 1, 'def': 'a dispenser that holds a substance under pressure', 'name': 'aerosol_can'}, {'frequency': 'f', 'synset': 'air_conditioner.n.01', 'synonyms': ['air_conditioner'], 'id': 2, 'def': 'a machine that keeps air cool and dry', 'name': 'air_conditioner'}, {'frequency': 'f', 'synset': 'airplane.n.01', 'synonyms': ['airplane', 'aeroplane'], 'id': 3, 'def': 'an aircraft that has a fixed wing and is powered by propellers or jets', 'name': 'airplane'}, {'frequency': 'f', 'synset': 'alarm_clock.n.01', 'synonyms': ['alarm_clock'], 'id': 4, 'def': 'a clock that wakes a sleeper at some preset time', 'name': 'alarm_clock'}, {'frequency': 'c', 'synset': 'alcohol.n.01', 'synonyms': ['alcohol', 'alcoholic_beverage'], 'id': 5, 'def': 'a liquor or brew containing alcohol as the active agent', 'name': 'alcohol'}, {'frequency': 'c', 'synset': 'alligator.n.02', 'synonyms': ['alligator', 'gator'], 'id': 6, 'def': 'amphibious reptiles related to crocodiles but with shorter broader snouts', 'name': 'alligator'}, {'frequency': 'c', 'synset': 'almond.n.02', 'synonyms': ['almond'], 'id': 7, 'def': 'oval-shaped edible seed of the almond tree', 'name': 'almond'}, {'frequency': 'c', 'synset': 'ambulance.n.01', 'synonyms': ['ambulance'], 'id': 8, 'def': 'a vehicle that takes people to and from hospitals', 'name': 'ambulance'}, {'frequency': 'c', 'synset': 'amplifier.n.01', 'synonyms': ['amplifier'], 'id': 9, 'def': 'electronic equipment that increases strength of signals', 'name': 'amplifier'}, {'frequency': 'c', 'synset': 'anklet.n.03', 'synonyms': ['anklet', 'ankle_bracelet'], 'id': 10, 'def': 'an ornament worn around the ankle', 'name': 'anklet'}, {'frequency': 'f', 'synset': 'antenna.n.01', 'synonyms': ['antenna', 'aerial', 'transmitting_aerial'], 'id': 11, 'def': 'an electrical device that sends or receives radio or television signals', 'name': 'antenna'}, {'frequency': 'f', 'synset': 'apple.n.01', 'synonyms': ['apple'], 'id': 12, 'def': 'fruit with red or yellow or green skin and sweet to tart crisp whitish flesh', 'name': 'apple'}, {'frequency': 'r', 'synset': 'applesauce.n.01', 'synonyms': ['applesauce'], 'id': 13, 'def': 'puree of stewed apples usually sweetened and spiced', 'name': 'applesauce'}, {'frequency': 'r', 'synset': 'apricot.n.02', 'synonyms': ['apricot'], 'id': 14, 'def': 'downy yellow to rosy-colored fruit resembling a small peach', 'name': 'apricot'}, {'frequency': 'f', 'synset': 'apron.n.01', 'synonyms': ['apron'], 'id': 15, 'def': 'a garment of cloth that is tied about the waist and worn to protect clothing', 'name': 'apron'}, {'frequency': 'c', 'synset': 'aquarium.n.01', 'synonyms': ['aquarium', 'fish_tank'], 'id': 16, 'def': 'a tank/pool/bowl filled with water for keeping live fish and underwater animals', 'name': 'aquarium'}, {'frequency': 'r', 'synset': 'arctic.n.02', 'synonyms': ['arctic_(type_of_shoe)', 'galosh', 'golosh', 'rubber_(type_of_shoe)', 'gumshoe'], 'id': 17, 'def': 'a waterproof overshoe that protects shoes from water or snow', 'name': 'arctic_(type_of_shoe)'}, {'frequency': 'c', 'synset': 'armband.n.02', 'synonyms': ['armband'], 'id': 18, 'def': 'a band worn around the upper arm', 'name': 'armband'}, {'frequency': 'f', 'synset': 'armchair.n.01', 'synonyms': ['armchair'], 'id': 19, 'def': 'chair with a support on each side for arms', 'name': 'armchair'}, {'frequency': 'r', 'synset': 'armoire.n.01', 'synonyms': ['armoire'], 'id': 20, 'def': 'a large wardrobe or cabinet', 'name': 'armoire'}, {'frequency': 'r', 'synset': 'armor.n.01', 'synonyms': ['armor', 'armour'], 'id': 21, 'def': 'protective covering made of metal and used in combat', 'name': 'armor'}, {'frequency': 'c', 'synset': 'artichoke.n.02', 'synonyms': ['artichoke'], 'id': 22, 'def': 'a thistlelike flower head with edible fleshy leaves and heart', 'name': 'artichoke'}, {'frequency': 'f', 'synset': 'ashcan.n.01', 'synonyms': ['trash_can', 'garbage_can', 'wastebin', 'dustbin', 'trash_barrel', 'trash_bin'], 'id': 23, 'def': 'a bin that holds rubbish until it is collected', 'name': 'trash_can'}, {'frequency': 'c', 'synset': 'ashtray.n.01', 'synonyms': ['ashtray'], 'id': 24, 'def': "a receptacle for the ash from smokers' cigars or cigarettes", 'name': 'ashtray'}, {'frequency': 'c', 'synset': 'asparagus.n.02', 'synonyms': ['asparagus'], 'id': 25, 'def': 'edible young shoots of the asparagus plant', 'name': 'asparagus'}, {'frequency': 'c', 'synset': 'atomizer.n.01', 'synonyms': ['atomizer', 'atomiser', 'spray', 'sprayer', 'nebulizer', 'nebuliser'], 'id': 26, 'def': 'a dispenser that turns a liquid (such as perfume) into a fine mist', 'name': 'atomizer'}, {'frequency': 'f', 'synset': 'avocado.n.01', 'synonyms': ['avocado'], 'id': 27, 'def': 'a pear-shaped fruit with green or blackish skin and rich yellowish pulp enclosing a single large seed', 'name': 'avocado'}, {'frequency': 'c', 'synset': 'award.n.02', 'synonyms': ['award', 'accolade'], 'id': 28, 'def': 'a tangible symbol signifying approval or distinction', 'name': 'award'}, {'frequency': 'f', 'synset': 'awning.n.01', 'synonyms': ['awning'], 'id': 29, 'def': 'a canopy made of canvas to shelter people or things from rain or sun', 'name': 'awning'}, {'frequency': 'r', 'synset': 'ax.n.01', 'synonyms': ['ax', 'axe'], 'id': 30, 'def': 'an edge tool with a heavy bladed head mounted across a handle', 'name': 'ax'}, {'frequency': 'r', 'synset': 'baboon.n.01', 'synonyms': ['baboon'], 'id': 31, 'def': 'large terrestrial monkeys having doglike muzzles', 'name': 'baboon'}, {'frequency': 'f', 'synset': 'baby_buggy.n.01', 'synonyms': ['baby_buggy', 'baby_carriage', 'perambulator', 'pram', 'stroller'], 'id': 32, 'def': 'a small vehicle with four wheels in which a baby or child is pushed around', 'name': 'baby_buggy'}, {'frequency': 'c', 'synset': 'backboard.n.01', 'synonyms': ['basketball_backboard'], 'id': 33, 'def': 'a raised vertical board with basket attached; used to play basketball', 'name': 'basketball_backboard'}, {'frequency': 'f', 'synset': 'backpack.n.01', 'synonyms': ['backpack', 'knapsack', 'packsack', 'rucksack', 'haversack'], 'id': 34, 'def': 'a bag carried by a strap on your back or shoulder', 'name': 'backpack'}, {'frequency': 'f', 'synset': 'bag.n.04', 'synonyms': ['handbag', 'purse', 'pocketbook'], 'id': 35, 'def': 'a container used for carrying money and small personal items or accessories', 'name': 'handbag'}, {'frequency': 'f', 'synset': 'bag.n.06', 'synonyms': ['suitcase', 'baggage', 'luggage'], 'id': 36, 'def': 'cases used to carry belongings when traveling', 'name': 'suitcase'}, {'frequency': 'c', 'synset': 'bagel.n.01', 'synonyms': ['bagel', 'beigel'], 'id': 37, 'def': 'glazed yeast-raised doughnut-shaped roll with hard crust', 'name': 'bagel'}, {'frequency': 'r', 'synset': 'bagpipe.n.01', 'synonyms': ['bagpipe'], 'id': 38, 'def': 'a tubular wind instrument; the player blows air into a bag and squeezes it out', 'name': 'bagpipe'}, {'frequency': 'r', 'synset': 'baguet.n.01', 'synonyms': ['baguet', 'baguette'], 'id': 39, 'def': 'narrow French stick loaf', 'name': 'baguet'}, {'frequency': 'r', 'synset': 'bait.n.02', 'synonyms': ['bait', 'lure'], 'id': 40, 'def': 'something used to lure fish or other animals into danger so they can be trapped or killed', 'name': 'bait'}, {'frequency': 'f', 'synset': 'ball.n.06', 'synonyms': ['ball'], 'id': 41, 'def': 'a spherical object used as a plaything', 'name': 'ball'}, {'frequency': 'r', 'synset': 'ballet_skirt.n.01', 'synonyms': ['ballet_skirt', 'tutu'], 'id': 42, 'def': 'very short skirt worn by ballerinas', 'name': 'ballet_skirt'}, {'frequency': 'f', 'synset': 'balloon.n.01', 'synonyms': ['balloon'], 'id': 43, 'def': 'large tough nonrigid bag filled with gas or heated air', 'name': 'balloon'}, {'frequency': 'c', 'synset': 'bamboo.n.02', 'synonyms': ['bamboo'], 'id': 44, 'def': 'woody tropical grass having hollow woody stems', 'name': 'bamboo'}, {'frequency': 'f', 'synset': 'banana.n.02', 'synonyms': ['banana'], 'id': 45, 'def': 'elongated crescent-shaped yellow fruit with soft sweet flesh', 'name': 'banana'}, {'frequency': 'c', 'synset': 'band_aid.n.01', 'synonyms': ['Band_Aid'], 'id': 46, 'def': 'trade name for an adhesive bandage to cover small cuts or blisters', 'name': 'Band_Aid'}, {'frequency': 'c', 'synset': 'bandage.n.01', 'synonyms': ['bandage'], 'id': 47, 'def': 'a piece of soft material that covers and protects an injured part of the body', 'name': 'bandage'}, {'frequency': 'f', 'synset': 'bandanna.n.01', 'synonyms': ['bandanna', 'bandana'], 'id': 48, 'def': 'large and brightly colored handkerchief; often used as a neckerchief', 'name': 'bandanna'}, {'frequency': 'r', 'synset': 'banjo.n.01', 'synonyms': ['banjo'], 'id': 49, 'def': 'a stringed instrument of the guitar family with a long neck and circular body', 'name': 'banjo'}, {'frequency': 'f', 'synset': 'banner.n.01', 'synonyms': ['banner', 'streamer'], 'id': 50, 'def': 'long strip of cloth or paper used for decoration or advertising', 'name': 'banner'}, {'frequency': 'r', 'synset': 'barbell.n.01', 'synonyms': ['barbell'], 'id': 51, 'def': 'a bar to which heavy discs are attached at each end; used in weightlifting', 'name': 'barbell'}, {'frequency': 'r', 'synset': 'barge.n.01', 'synonyms': ['barge'], 'id': 52, 'def': 'a flatbottom boat for carrying heavy loads (especially on canals)', 'name': 'barge'}, {'frequency': 'f', 'synset': 'barrel.n.02', 'synonyms': ['barrel', 'cask'], 'id': 53, 'def': 'a cylindrical container that holds liquids', 'name': 'barrel'}, {'frequency': 'c', 'synset': 'barrette.n.01', 'synonyms': ['barrette'], 'id': 54, 'def': "a pin for holding women's hair in place", 'name': 'barrette'}, {'frequency': 'c', 'synset': 'barrow.n.03', 'synonyms': ['barrow', 'garden_cart', 'lawn_cart', 'wheelbarrow'], 'id': 55, 'def': 'a cart for carrying small loads; has handles and one or more wheels', 'name': 'barrow'}, {'frequency': 'f', 'synset': 'base.n.03', 'synonyms': ['baseball_base'], 'id': 56, 'def': 'a place that the runner must touch before scoring', 'name': 'baseball_base'}, {'frequency': 'f', 'synset': 'baseball.n.02', 'synonyms': ['baseball'], 'id': 57, 'def': 'a ball used in playing baseball', 'name': 'baseball'}, {'frequency': 'f', 'synset': 'baseball_bat.n.01', 'synonyms': ['baseball_bat'], 'id': 58, 'def': 'an implement used in baseball by the batter', 'name': 'baseball_bat'}, {'frequency': 'f', 'synset': 'baseball_cap.n.01', 'synonyms': ['baseball_cap', 'jockey_cap', 'golf_cap'], 'id': 59, 'def': 'a cap with a bill', 'name': 'baseball_cap'}, {'frequency': 'f', 'synset': 'baseball_glove.n.01', 'synonyms': ['baseball_glove', 'baseball_mitt'], 'id': 60, 'def': 'the handwear used by fielders in playing baseball', 'name': 'baseball_glove'}, {'frequency': 'f', 'synset': 'basket.n.01', 'synonyms': ['basket', 'handbasket'], 'id': 61, 'def': 'a container that is usually woven and has handles', 'name': 'basket'}, {'frequency': 'c', 'synset': 'basketball.n.02', 'synonyms': ['basketball'], 'id': 62, 'def': 'an inflated ball used in playing basketball', 'name': 'basketball'}, {'frequency': 'r', 'synset': 'bass_horn.n.01', 'synonyms': ['bass_horn', 'sousaphone', 'tuba'], 'id': 63, 'def': 'the lowest brass wind instrument', 'name': 'bass_horn'}, {'frequency': 'c', 'synset': 'bat.n.01', 'synonyms': ['bat_(animal)'], 'id': 64, 'def': 'nocturnal mouselike mammal with forelimbs modified to form membranous wings', 'name': 'bat_(animal)'}, {'frequency': 'f', 'synset': 'bath_mat.n.01', 'synonyms': ['bath_mat'], 'id': 65, 'def': 'a heavy towel or mat to stand on while drying yourself after a bath', 'name': 'bath_mat'}, {'frequency': 'f', 'synset': 'bath_towel.n.01', 'synonyms': ['bath_towel'], 'id': 66, 'def': 'a large towel; to dry yourself after a bath', 'name': 'bath_towel'}, {'frequency': 'c', 'synset': 'bathrobe.n.01', 'synonyms': ['bathrobe'], 'id': 67, 'def': 'a loose-fitting robe of towelling; worn after a bath or swim', 'name': 'bathrobe'}, {'frequency': 'f', 'synset': 'bathtub.n.01', 'synonyms': ['bathtub', 'bathing_tub'], 'id': 68, 'def': 'a large open container that you fill with water and use to wash the body', 'name': 'bathtub'}, {'frequency': 'r', 'synset': 'batter.n.02', 'synonyms': ['batter_(food)'], 'id': 69, 'def': 'a liquid or semiliquid mixture, as of flour, eggs, and milk, used in cooking', 'name': 'batter_(food)'}, {'frequency': 'c', 'synset': 'battery.n.02', 'synonyms': ['battery'], 'id': 70, 'def': 'a portable device that produces electricity', 'name': 'battery'}, {'frequency': 'r', 'synset': 'beach_ball.n.01', 'synonyms': ['beachball'], 'id': 71, 'def': 'large and light ball; for play at the seaside', 'name': 'beachball'}, {'frequency': 'c', 'synset': 'bead.n.01', 'synonyms': ['bead'], 'id': 72, 'def': 'a small ball with a hole through the middle used for ornamentation, jewellery, etc.', 'name': 'bead'}, {'frequency': 'c', 'synset': 'bean_curd.n.01', 'synonyms': ['bean_curd', 'tofu'], 'id': 73, 'def': 'cheeselike food made of curdled soybean milk', 'name': 'bean_curd'}, {'frequency': 'c', 'synset': 'beanbag.n.01', 'synonyms': ['beanbag'], 'id': 74, 'def': 'a bag filled with dried beans or similar items; used in games or to sit on', 'name': 'beanbag'}, {'frequency': 'f', 'synset': 'beanie.n.01', 'synonyms': ['beanie', 'beany'], 'id': 75, 'def': 'a small skullcap; formerly worn by schoolboys and college freshmen', 'name': 'beanie'}, {'frequency': 'f', 'synset': 'bear.n.01', 'synonyms': ['bear'], 'id': 76, 'def': 'large carnivorous or omnivorous mammals with shaggy coats and claws', 'name': 'bear'}, {'frequency': 'f', 'synset': 'bed.n.01', 'synonyms': ['bed'], 'id': 77, 'def': 'a piece of furniture that provides a place to sleep', 'name': 'bed'}, {'frequency': 'r', 'synset': 'bedpan.n.01', 'synonyms': ['bedpan'], 'id': 78, 'def': 'a shallow vessel used by a bedridden patient for defecation and urination', 'name': 'bedpan'}, {'frequency': 'f', 'synset': 'bedspread.n.01', 'synonyms': ['bedspread', 'bedcover', 'bed_covering', 'counterpane', 'spread'], 'id': 79, 'def': 'decorative cover for a bed', 'name': 'bedspread'}, {'frequency': 'f', 'synset': 'beef.n.01', 'synonyms': ['cow'], 'id': 80, 'def': 'cattle/cow', 'name': 'cow'}, {'frequency': 'f', 'synset': 'beef.n.02', 'synonyms': ['beef_(food)', 'boeuf_(food)'], 'id': 81, 'def': 'meat from an adult domestic bovine', 'name': 'beef_(food)'}, {'frequency': 'r', 'synset': 'beeper.n.01', 'synonyms': ['beeper', 'pager'], 'id': 82, 'def': 'an device that beeps when the person carrying it is being paged', 'name': 'beeper'}, {'frequency': 'f', 'synset': 'beer_bottle.n.01', 'synonyms': ['beer_bottle'], 'id': 83, 'def': 'a bottle that holds beer', 'name': 'beer_bottle'}, {'frequency': 'c', 'synset': 'beer_can.n.01', 'synonyms': ['beer_can'], 'id': 84, 'def': 'a can that holds beer', 'name': 'beer_can'}, {'frequency': 'r', 'synset': 'beetle.n.01', 'synonyms': ['beetle'], 'id': 85, 'def': 'insect with hard wing covers', 'name': 'beetle'}, {'frequency': 'f', 'synset': 'bell.n.01', 'synonyms': ['bell'], 'id': 86, 'def': 'a hollow device made of metal that makes a ringing sound when struck', 'name': 'bell'}, {'frequency': 'f', 'synset': 'bell_pepper.n.02', 'synonyms': ['bell_pepper', 'capsicum'], 'id': 87, 'def': 'large bell-shaped sweet pepper in green or red or yellow or orange or black varieties', 'name': 'bell_pepper'}, {'frequency': 'f', 'synset': 'belt.n.02', 'synonyms': ['belt'], 'id': 88, 'def': 'a band to tie or buckle around the body (usually at the waist)', 'name': 'belt'}, {'frequency': 'f', 'synset': 'belt_buckle.n.01', 'synonyms': ['belt_buckle'], 'id': 89, 'def': 'the buckle used to fasten a belt', 'name': 'belt_buckle'}, {'frequency': 'f', 'synset': 'bench.n.01', 'synonyms': ['bench'], 'id': 90, 'def': 'a long seat for more than one person', 'name': 'bench'}, {'frequency': 'c', 'synset': 'beret.n.01', 'synonyms': ['beret'], 'id': 91, 'def': 'a cap with no brim or bill; made of soft cloth', 'name': 'beret'}, {'frequency': 'c', 'synset': 'bib.n.02', 'synonyms': ['bib'], 'id': 92, 'def': 'a napkin tied under the chin of a child while eating', 'name': 'bib'}, {'frequency': 'r', 'synset': 'bible.n.01', 'synonyms': ['Bible'], 'id': 93, 'def': 'the sacred writings of the Christian religions', 'name': 'Bible'}, {'frequency': 'f', 'synset': 'bicycle.n.01', 'synonyms': ['bicycle', 'bike_(bicycle)'], 'id': 94, 'def': 'a wheeled vehicle that has two wheels and is moved by foot pedals', 'name': 'bicycle'}, {'frequency': 'f', 'synset': 'bill.n.09', 'synonyms': ['visor', 'vizor'], 'id': 95, 'def': 'a brim that projects to the front to shade the eyes', 'name': 'visor'}, {'frequency': 'f', 'synset': 'billboard.n.01', 'synonyms': ['billboard'], 'id': 96, 'def': 'large outdoor signboard', 'name': 'billboard'}, {'frequency': 'c', 'synset': 'binder.n.03', 'synonyms': ['binder', 'ring-binder'], 'id': 97, 'def': 'holds loose papers or magazines', 'name': 'binder'}, {'frequency': 'c', 'synset': 'binoculars.n.01', 'synonyms': ['binoculars', 'field_glasses', 'opera_glasses'], 'id': 98, 'def': 'an optical instrument designed for simultaneous use by both eyes', 'name': 'binoculars'}, {'frequency': 'f', 'synset': 'bird.n.01', 'synonyms': ['bird'], 'id': 99, 'def': 'animal characterized by feathers and wings', 'name': 'bird'}, {'frequency': 'c', 'synset': 'bird_feeder.n.01', 'synonyms': ['birdfeeder'], 'id': 100, 'def': 'an outdoor device that supplies food for wild birds', 'name': 'birdfeeder'}, {'frequency': 'c', 'synset': 'birdbath.n.01', 'synonyms': ['birdbath'], 'id': 101, 'def': 'an ornamental basin (usually in a garden) for birds to bathe in', 'name': 'birdbath'}, {'frequency': 'c', 'synset': 'birdcage.n.01', 'synonyms': ['birdcage'], 'id': 102, 'def': 'a cage in which a bird can be kept', 'name': 'birdcage'}, {'frequency': 'c', 'synset': 'birdhouse.n.01', 'synonyms': ['birdhouse'], 'id': 103, 'def': 'a shelter for birds', 'name': 'birdhouse'}, {'frequency': 'f', 'synset': 'birthday_cake.n.01', 'synonyms': ['birthday_cake'], 'id': 104, 'def': 'decorated cake served at a birthday party', 'name': 'birthday_cake'}, {'frequency': 'r', 'synset': 'birthday_card.n.01', 'synonyms': ['birthday_card'], 'id': 105, 'def': 'a card expressing a birthday greeting', 'name': 'birthday_card'}, {'frequency': 'r', 'synset': 'black_flag.n.01', 'synonyms': ['pirate_flag'], 'id': 106, 'def': 'a flag usually bearing a white skull and crossbones on a black background', 'name': 'pirate_flag'}, {'frequency': 'c', 'synset': 'black_sheep.n.02', 'synonyms': ['black_sheep'], 'id': 107, 'def': 'sheep with a black coat', 'name': 'black_sheep'}, {'frequency': 'c', 'synset': 'blackberry.n.01', 'synonyms': ['blackberry'], 'id': 108, 'def': 'large sweet black or very dark purple edible aggregate fruit', 'name': 'blackberry'}, {'frequency': 'f', 'synset': 'blackboard.n.01', 'synonyms': ['blackboard', 'chalkboard'], 'id': 109, 'def': 'sheet of slate; for writing with chalk', 'name': 'blackboard'}, {'frequency': 'f', 'synset': 'blanket.n.01', 'synonyms': ['blanket'], 'id': 110, 'def': 'bedding that keeps a person warm in bed', 'name': 'blanket'}, {'frequency': 'c', 'synset': 'blazer.n.01', 'synonyms': ['blazer', 'sport_jacket', 'sport_coat', 'sports_jacket', 'sports_coat'], 'id': 111, 'def': 'lightweight jacket; often striped in the colors of a club or school', 'name': 'blazer'}, {'frequency': 'f', 'synset': 'blender.n.01', 'synonyms': ['blender', 'liquidizer', 'liquidiser'], 'id': 112, 'def': 'an electrically powered mixer that mix or chop or liquefy foods', 'name': 'blender'}, {'frequency': 'r', 'synset': 'blimp.n.02', 'synonyms': ['blimp'], 'id': 113, 'def': 'a small nonrigid airship used for observation or as a barrage balloon', 'name': 'blimp'}, {'frequency': 'f', 'synset': 'blinker.n.01', 'synonyms': ['blinker', 'flasher'], 'id': 114, 'def': 'a light that flashes on and off; used as a signal or to send messages', 'name': 'blinker'}, {'frequency': 'f', 'synset': 'blouse.n.01', 'synonyms': ['blouse'], 'id': 115, 'def': 'a top worn by women', 'name': 'blouse'}, {'frequency': 'f', 'synset': 'blueberry.n.02', 'synonyms': ['blueberry'], 'id': 116, 'def': 'sweet edible dark-blue berries of blueberry plants', 'name': 'blueberry'}, {'frequency': 'r', 'synset': 'board.n.09', 'synonyms': ['gameboard'], 'id': 117, 'def': 'a flat portable surface (usually rectangular) designed for board games', 'name': 'gameboard'}, {'frequency': 'f', 'synset': 'boat.n.01', 'synonyms': ['boat', 'ship_(boat)'], 'id': 118, 'def': 'a vessel for travel on water', 'name': 'boat'}, {'frequency': 'r', 'synset': 'bob.n.05', 'synonyms': ['bob', 'bobber', 'bobfloat'], 'id': 119, 'def': 'a small float usually made of cork; attached to a fishing line', 'name': 'bob'}, {'frequency': 'c', 'synset': 'bobbin.n.01', 'synonyms': ['bobbin', 'spool', 'reel'], 'id': 120, 'def': 'a thing around which thread/tape/film or other flexible materials can be wound', 'name': 'bobbin'}, {'frequency': 'c', 'synset': 'bobby_pin.n.01', 'synonyms': ['bobby_pin', 'hairgrip'], 'id': 121, 'def': 'a flat wire hairpin used to hold bobbed hair in place', 'name': 'bobby_pin'}, {'frequency': 'c', 'synset': 'boiled_egg.n.01', 'synonyms': ['boiled_egg', 'coddled_egg'], 'id': 122, 'def': 'egg cooked briefly in the shell in gently boiling water', 'name': 'boiled_egg'}, {'frequency': 'r', 'synset': 'bolo_tie.n.01', 'synonyms': ['bolo_tie', 'bolo', 'bola_tie', 'bola'], 'id': 123, 'def': 'a cord fastened around the neck with an ornamental clasp and worn as a necktie', 'name': 'bolo_tie'}, {'frequency': 'c', 'synset': 'bolt.n.03', 'synonyms': ['deadbolt'], 'id': 124, 'def': 'the part of a lock that is engaged or withdrawn with a key', 'name': 'deadbolt'}, {'frequency': 'f', 'synset': 'bolt.n.06', 'synonyms': ['bolt'], 'id': 125, 'def': 'a screw that screws into a nut to form a fastener', 'name': 'bolt'}, {'frequency': 'r', 'synset': 'bonnet.n.01', 'synonyms': ['bonnet'], 'id': 126, 'def': 'a hat tied under the chin', 'name': 'bonnet'}, {'frequency': 'f', 'synset': 'book.n.01', 'synonyms': ['book'], 'id': 127, 'def': 'a written work or composition that has been published', 'name': 'book'}, {'frequency': 'c', 'synset': 'bookcase.n.01', 'synonyms': ['bookcase'], 'id': 128, 'def': 'a piece of furniture with shelves for storing books', 'name': 'bookcase'}, {'frequency': 'c', 'synset': 'booklet.n.01', 'synonyms': ['booklet', 'brochure', 'leaflet', 'pamphlet'], 'id': 129, 'def': 'a small book usually having a paper cover', 'name': 'booklet'}, {'frequency': 'r', 'synset': 'bookmark.n.01', 'synonyms': ['bookmark', 'bookmarker'], 'id': 130, 'def': 'a marker (a piece of paper or ribbon) placed between the pages of a book', 'name': 'bookmark'}, {'frequency': 'r', 'synset': 'boom.n.04', 'synonyms': ['boom_microphone', 'microphone_boom'], 'id': 131, 'def': 'a pole carrying an overhead microphone projected over a film or tv set', 'name': 'boom_microphone'}, {'frequency': 'f', 'synset': 'boot.n.01', 'synonyms': ['boot'], 'id': 132, 'def': 'footwear that covers the whole foot and lower leg', 'name': 'boot'}, {'frequency': 'f', 'synset': 'bottle.n.01', 'synonyms': ['bottle'], 'id': 133, 'def': 'a glass or plastic vessel used for storing drinks or other liquids', 'name': 'bottle'}, {'frequency': 'c', 'synset': 'bottle_opener.n.01', 'synonyms': ['bottle_opener'], 'id': 134, 'def': 'an opener for removing caps or corks from bottles', 'name': 'bottle_opener'}, {'frequency': 'c', 'synset': 'bouquet.n.01', 'synonyms': ['bouquet'], 'id': 135, 'def': 'an arrangement of flowers that is usually given as a present', 'name': 'bouquet'}, {'frequency': 'r', 'synset': 'bow.n.04', 'synonyms': ['bow_(weapon)'], 'id': 136, 'def': 'a weapon for shooting arrows', 'name': 'bow_(weapon)'}, {'frequency': 'f', 'synset': 'bow.n.08', 'synonyms': ['bow_(decorative_ribbons)'], 'id': 137, 'def': 'a decorative interlacing of ribbons', 'name': 'bow_(decorative_ribbons)'}, {'frequency': 'f', 'synset': 'bow_tie.n.01', 'synonyms': ['bow-tie', 'bowtie'], 'id': 138, 'def': "a man's tie that ties in a bow", 'name': 'bow-tie'}, {'frequency': 'f', 'synset': 'bowl.n.03', 'synonyms': ['bowl'], 'id': 139, 'def': 'a dish that is round and open at the top for serving foods', 'name': 'bowl'}, {'frequency': 'r', 'synset': 'bowl.n.08', 'synonyms': ['pipe_bowl'], 'id': 140, 'def': 'a small round container that is open at the top for holding tobacco', 'name': 'pipe_bowl'}, {'frequency': 'c', 'synset': 'bowler_hat.n.01', 'synonyms': ['bowler_hat', 'bowler', 'derby_hat', 'derby', 'plug_hat'], 'id': 141, 'def': 'a felt hat that is round and hard with a narrow brim', 'name': 'bowler_hat'}, {'frequency': 'r', 'synset': 'bowling_ball.n.01', 'synonyms': ['bowling_ball'], 'id': 142, 'def': 'a large ball with finger holes used in the sport of bowling', 'name': 'bowling_ball'}, {'frequency': 'f', 'synset': 'box.n.01', 'synonyms': ['box'], 'id': 143, 'def': 'a (usually rectangular) container; may have a lid', 'name': 'box'}, {'frequency': 'r', 'synset': 'boxing_glove.n.01', 'synonyms': ['boxing_glove'], 'id': 144, 'def': 'large glove coverings the fists of a fighter worn for the sport of boxing', 'name': 'boxing_glove'}, {'frequency': 'c', 'synset': 'brace.n.06', 'synonyms': ['suspenders'], 'id': 145, 'def': 'elastic straps that hold trousers up (usually used in the plural)', 'name': 'suspenders'}, {'frequency': 'f', 'synset': 'bracelet.n.02', 'synonyms': ['bracelet', 'bangle'], 'id': 146, 'def': 'jewelry worn around the wrist for decoration', 'name': 'bracelet'}, {'frequency': 'r', 'synset': 'brass.n.07', 'synonyms': ['brass_plaque'], 'id': 147, 'def': 'a memorial made of brass', 'name': 'brass_plaque'}, {'frequency': 'c', 'synset': 'brassiere.n.01', 'synonyms': ['brassiere', 'bra', 'bandeau'], 'id': 148, 'def': 'an undergarment worn by women to support their breasts', 'name': 'brassiere'}, {'frequency': 'c', 'synset': 'bread-bin.n.01', 'synonyms': ['bread-bin', 'breadbox'], 'id': 149, 'def': 'a container used to keep bread or cake in', 'name': 'bread-bin'}, {'frequency': 'f', 'synset': 'bread.n.01', 'synonyms': ['bread'], 'id': 150, 'def': 'food made from dough of flour or meal and usually raised with yeast or baking powder and then baked', 'name': 'bread'}, {'frequency': 'r', 'synset': 'breechcloth.n.01', 'synonyms': ['breechcloth', 'breechclout', 'loincloth'], 'id': 151, 'def': 'a garment that provides covering for the loins', 'name': 'breechcloth'}, {'frequency': 'f', 'synset': 'bridal_gown.n.01', 'synonyms': ['bridal_gown', 'wedding_gown', 'wedding_dress'], 'id': 152, 'def': 'a gown worn by the bride at a wedding', 'name': 'bridal_gown'}, {'frequency': 'c', 'synset': 'briefcase.n.01', 'synonyms': ['briefcase'], 'id': 153, 'def': 'a case with a handle; for carrying papers or files or books', 'name': 'briefcase'}, {'frequency': 'f', 'synset': 'broccoli.n.01', 'synonyms': ['broccoli'], 'id': 154, 'def': 'plant with dense clusters of tight green flower buds', 'name': 'broccoli'}, {'frequency': 'r', 'synset': 'brooch.n.01', 'synonyms': ['broach'], 'id': 155, 'def': 'a decorative pin worn by women', 'name': 'broach'}, {'frequency': 'c', 'synset': 'broom.n.01', 'synonyms': ['broom'], 'id': 156, 'def': 'bundle of straws or twigs attached to a long handle; used for cleaning', 'name': 'broom'}, {'frequency': 'c', 'synset': 'brownie.n.03', 'synonyms': ['brownie'], 'id': 157, 'def': 'square or bar of very rich chocolate cake usually with nuts', 'name': 'brownie'}, {'frequency': 'c', 'synset': 'brussels_sprouts.n.01', 'synonyms': ['brussels_sprouts'], 'id': 158, 'def': 'the small edible cabbage-like buds growing along a stalk', 'name': 'brussels_sprouts'}, {'frequency': 'r', 'synset': 'bubble_gum.n.01', 'synonyms': ['bubble_gum'], 'id': 159, 'def': 'a kind of chewing gum that can be blown into bubbles', 'name': 'bubble_gum'}, {'frequency': 'f', 'synset': 'bucket.n.01', 'synonyms': ['bucket', 'pail'], 'id': 160, 'def': 'a roughly cylindrical vessel that is open at the top', 'name': 'bucket'}, {'frequency': 'r', 'synset': 'buggy.n.01', 'synonyms': ['horse_buggy'], 'id': 161, 'def': 'a small lightweight carriage; drawn by a single horse', 'name': 'horse_buggy'}, {'frequency': 'c', 'synset': 'bull.n.11', 'synonyms': ['horned_cow'], 'id': 162, 'def': 'a cow with horns', 'name': 'bull'}, {'frequency': 'c', 'synset': 'bulldog.n.01', 'synonyms': ['bulldog'], 'id': 163, 'def': 'a thickset short-haired dog with a large head and strong undershot lower jaw', 'name': 'bulldog'}, {'frequency': 'r', 'synset': 'bulldozer.n.01', 'synonyms': ['bulldozer', 'dozer'], 'id': 164, 'def': 'large powerful tractor; a large blade in front flattens areas of ground', 'name': 'bulldozer'}, {'frequency': 'c', 'synset': 'bullet_train.n.01', 'synonyms': ['bullet_train'], 'id': 165, 'def': 'a high-speed passenger train', 'name': 'bullet_train'}, {'frequency': 'c', 'synset': 'bulletin_board.n.02', 'synonyms': ['bulletin_board', 'notice_board'], 'id': 166, 'def': 'a board that hangs on a wall; displays announcements', 'name': 'bulletin_board'}, {'frequency': 'r', 'synset': 'bulletproof_vest.n.01', 'synonyms': ['bulletproof_vest'], 'id': 167, 'def': 'a vest capable of resisting the impact of a bullet', 'name': 'bulletproof_vest'}, {'frequency': 'c', 'synset': 'bullhorn.n.01', 'synonyms': ['bullhorn', 'megaphone'], 'id': 168, 'def': 'a portable loudspeaker with built-in microphone and amplifier', 'name': 'bullhorn'}, {'frequency': 'f', 'synset': 'bun.n.01', 'synonyms': ['bun', 'roll'], 'id': 169, 'def': 'small rounded bread either plain or sweet', 'name': 'bun'}, {'frequency': 'c', 'synset': 'bunk_bed.n.01', 'synonyms': ['bunk_bed'], 'id': 170, 'def': 'beds built one above the other', 'name': 'bunk_bed'}, {'frequency': 'f', 'synset': 'buoy.n.01', 'synonyms': ['buoy'], 'id': 171, 'def': 'a float attached by rope to the seabed to mark channels in a harbor or underwater hazards', 'name': 'buoy'}, {'frequency': 'r', 'synset': 'burrito.n.01', 'synonyms': ['burrito'], 'id': 172, 'def': 'a flour tortilla folded around a filling', 'name': 'burrito'}, {'frequency': 'f', 'synset': 'bus.n.01', 'synonyms': ['bus_(vehicle)', 'autobus', 'charabanc', 'double-decker', 'motorbus', 'motorcoach'], 'id': 173, 'def': 'a vehicle carrying many passengers; used for public transport', 'name': 'bus_(vehicle)'}, {'frequency': 'c', 'synset': 'business_card.n.01', 'synonyms': ['business_card'], 'id': 174, 'def': "a card on which are printed the person's name and business affiliation", 'name': 'business_card'}, {'frequency': 'f', 'synset': 'butter.n.01', 'synonyms': ['butter'], 'id': 175, 'def': 'an edible emulsion of fat globules made by churning milk or cream; for cooking and table use', 'name': 'butter'}, {'frequency': 'c', 'synset': 'butterfly.n.01', 'synonyms': ['butterfly'], 'id': 176, 'def': 'insect typically having a slender body with knobbed antennae and broad colorful wings', 'name': 'butterfly'}, {'frequency': 'f', 'synset': 'button.n.01', 'synonyms': ['button'], 'id': 177, 'def': 'a round fastener sewn to shirts and coats etc to fit through buttonholes', 'name': 'button'}, {'frequency': 'f', 'synset': 'cab.n.03', 'synonyms': ['cab_(taxi)', 'taxi', 'taxicab'], 'id': 178, 'def': 'a car that takes passengers where they want to go in exchange for money', 'name': 'cab_(taxi)'}, {'frequency': 'r', 'synset': 'cabana.n.01', 'synonyms': ['cabana'], 'id': 179, 'def': 'a small tent used as a dressing room beside the sea or a swimming pool', 'name': 'cabana'}, {'frequency': 'c', 'synset': 'cabin_car.n.01', 'synonyms': ['cabin_car', 'caboose'], 'id': 180, 'def': 'a car on a freight train for use of the train crew; usually the last car on the train', 'name': 'cabin_car'}, {'frequency': 'f', 'synset': 'cabinet.n.01', 'synonyms': ['cabinet'], 'id': 181, 'def': 'a piece of furniture resembling a cupboard with doors and shelves and drawers', 'name': 'cabinet'}, {'frequency': 'r', 'synset': 'cabinet.n.03', 'synonyms': ['locker', 'storage_locker'], 'id': 182, 'def': 'a storage compartment for clothes and valuables; usually it has a lock', 'name': 'locker'}, {'frequency': 'f', 'synset': 'cake.n.03', 'synonyms': ['cake'], 'id': 183, 'def': 'baked goods made from or based on a mixture of flour, sugar, eggs, and fat', 'name': 'cake'}, {'frequency': 'c', 'synset': 'calculator.n.02', 'synonyms': ['calculator'], 'id': 184, 'def': 'a small machine that is used for mathematical calculations', 'name': 'calculator'}, {'frequency': 'f', 'synset': 'calendar.n.02', 'synonyms': ['calendar'], 'id': 185, 'def': 'a list or register of events (appointments/social events/court cases, etc)', 'name': 'calendar'}, {'frequency': 'c', 'synset': 'calf.n.01', 'synonyms': ['calf'], 'id': 186, 'def': 'young of domestic cattle', 'name': 'calf'}, {'frequency': 'c', 'synset': 'camcorder.n.01', 'synonyms': ['camcorder'], 'id': 187, 'def': 'a portable television camera and videocassette recorder', 'name': 'camcorder'}, {'frequency': 'c', 'synset': 'camel.n.01', 'synonyms': ['camel'], 'id': 188, 'def': 'cud-chewing mammal used as a draft or saddle animal in desert regions', 'name': 'camel'}, {'frequency': 'f', 'synset': 'camera.n.01', 'synonyms': ['camera'], 'id': 189, 'def': 'equipment for taking photographs', 'name': 'camera'}, {'frequency': 'c', 'synset': 'camera_lens.n.01', 'synonyms': ['camera_lens'], 'id': 190, 'def': 'a lens that focuses the image in a camera', 'name': 'camera_lens'}, {'frequency': 'c', 'synset': 'camper.n.02', 'synonyms': ['camper_(vehicle)', 'camping_bus', 'motor_home'], 'id': 191, 'def': 'a recreational vehicle equipped for camping out while traveling', 'name': 'camper_(vehicle)'}, {'frequency': 'f', 'synset': 'can.n.01', 'synonyms': ['can', 'tin_can'], 'id': 192, 'def': 'airtight sealed metal container for food or drink or paint etc.', 'name': 'can'}, {'frequency': 'c', 'synset': 'can_opener.n.01', 'synonyms': ['can_opener', 'tin_opener'], 'id': 193, 'def': 'a device for cutting cans open', 'name': 'can_opener'}, {'frequency': 'f', 'synset': 'candle.n.01', 'synonyms': ['candle', 'candlestick'], 'id': 194, 'def': 'stick of wax with a wick in the middle', 'name': 'candle'}, {'frequency': 'f', 'synset': 'candlestick.n.01', 'synonyms': ['candle_holder'], 'id': 195, 'def': 'a holder with sockets for candles', 'name': 'candle_holder'}, {'frequency': 'r', 'synset': 'candy_bar.n.01', 'synonyms': ['candy_bar'], 'id': 196, 'def': 'a candy shaped as a bar', 'name': 'candy_bar'}, {'frequency': 'c', 'synset': 'candy_cane.n.01', 'synonyms': ['candy_cane'], 'id': 197, 'def': 'a hard candy in the shape of a rod (usually with stripes)', 'name': 'candy_cane'}, {'frequency': 'c', 'synset': 'cane.n.01', 'synonyms': ['walking_cane'], 'id': 198, 'def': 'a stick that people can lean on to help them walk', 'name': 'walking_cane'}, {'frequency': 'c', 'synset': 'canister.n.02', 'synonyms': ['canister', 'cannister'], 'id': 199, 'def': 'metal container for storing dry foods such as tea or flour', 'name': 'canister'}, {'frequency': 'c', 'synset': 'canoe.n.01', 'synonyms': ['canoe'], 'id': 200, 'def': 'small and light boat; pointed at both ends; propelled with a paddle', 'name': 'canoe'}, {'frequency': 'c', 'synset': 'cantaloup.n.02', 'synonyms': ['cantaloup', 'cantaloupe'], 'id': 201, 'def': 'the fruit of a cantaloup vine; small to medium-sized melon with yellowish flesh', 'name': 'cantaloup'}, {'frequency': 'r', 'synset': 'canteen.n.01', 'synonyms': ['canteen'], 'id': 202, 'def': 'a flask for carrying water; used by soldiers or travelers', 'name': 'canteen'}, {'frequency': 'f', 'synset': 'cap.n.01', 'synonyms': ['cap_(headwear)'], 'id': 203, 'def': 'a tight-fitting headwear', 'name': 'cap_(headwear)'}, {'frequency': 'f', 'synset': 'cap.n.02', 'synonyms': ['bottle_cap', 'cap_(container_lid)'], 'id': 204, 'def': 'a top (as for a bottle)', 'name': 'bottle_cap'}, {'frequency': 'c', 'synset': 'cape.n.02', 'synonyms': ['cape'], 'id': 205, 'def': 'a sleeveless garment like a cloak but shorter', 'name': 'cape'}, {'frequency': 'c', 'synset': 'cappuccino.n.01', 'synonyms': ['cappuccino', 'coffee_cappuccino'], 'id': 206, 'def': 'equal parts of espresso and steamed milk', 'name': 'cappuccino'}, {'frequency': 'f', 'synset': 'car.n.01', 'synonyms': ['car_(automobile)', 'auto_(automobile)', 'automobile'], 'id': 207, 'def': 'a motor vehicle with four wheels', 'name': 'car_(automobile)'}, {'frequency': 'f', 'synset': 'car.n.02', 'synonyms': ['railcar_(part_of_a_train)', 'railway_car_(part_of_a_train)', 'railroad_car_(part_of_a_train)'], 'id': 208, 'def': 'a wheeled vehicle adapted to the rails of railroad (mark each individual railcar separately)', 'name': 'railcar_(part_of_a_train)'}, {'frequency': 'r', 'synset': 'car.n.04', 'synonyms': ['elevator_car'], 'id': 209, 'def': 'where passengers ride up and down', 'name': 'elevator_car'}, {'frequency': 'r', 'synset': 'car_battery.n.01', 'synonyms': ['car_battery', 'automobile_battery'], 'id': 210, 'def': 'a battery in a motor vehicle', 'name': 'car_battery'}, {'frequency': 'c', 'synset': 'card.n.02', 'synonyms': ['identity_card'], 'id': 211, 'def': 'a card certifying the identity of the bearer', 'name': 'identity_card'}, {'frequency': 'c', 'synset': 'card.n.03', 'synonyms': ['card'], 'id': 212, 'def': 'a rectangular piece of paper used to send messages (e.g. greetings or pictures)', 'name': 'card'}, {'frequency': 'c', 'synset': 'cardigan.n.01', 'synonyms': ['cardigan'], 'id': 213, 'def': 'knitted jacket that is fastened up the front with buttons or a zipper', 'name': 'cardigan'}, {'frequency': 'r', 'synset': 'cargo_ship.n.01', 'synonyms': ['cargo_ship', 'cargo_vessel'], 'id': 214, 'def': 'a ship designed to carry cargo', 'name': 'cargo_ship'}, {'frequency': 'r', 'synset': 'carnation.n.01', 'synonyms': ['carnation'], 'id': 215, 'def': 'plant with pink to purple-red spice-scented usually double flowers', 'name': 'carnation'}, {'frequency': 'c', 'synset': 'carriage.n.02', 'synonyms': ['horse_carriage'], 'id': 216, 'def': 'a vehicle with wheels drawn by one or more horses', 'name': 'horse_carriage'}, {'frequency': 'f', 'synset': 'carrot.n.01', 'synonyms': ['carrot'], 'id': 217, 'def': 'deep orange edible root of the cultivated carrot plant', 'name': 'carrot'}, {'frequency': 'f', 'synset': 'carryall.n.01', 'synonyms': ['tote_bag'], 'id': 218, 'def': 'a capacious bag or basket', 'name': 'tote_bag'}, {'frequency': 'c', 'synset': 'cart.n.01', 'synonyms': ['cart'], 'id': 219, 'def': 'a heavy open wagon usually having two wheels and drawn by an animal', 'name': 'cart'}, {'frequency': 'c', 'synset': 'carton.n.02', 'synonyms': ['carton'], 'id': 220, 'def': 'a container made of cardboard for holding food or drink', 'name': 'carton'}, {'frequency': 'c', 'synset': 'cash_register.n.01', 'synonyms': ['cash_register', 'register_(for_cash_transactions)'], 'id': 221, 'def': 'a cashbox with an adding machine to register transactions', 'name': 'cash_register'}, {'frequency': 'r', 'synset': 'casserole.n.01', 'synonyms': ['casserole'], 'id': 222, 'def': 'food cooked and served in a casserole', 'name': 'casserole'}, {'frequency': 'r', 'synset': 'cassette.n.01', 'synonyms': ['cassette'], 'id': 223, 'def': 'a container that holds a magnetic tape used for recording or playing sound or video', 'name': 'cassette'}, {'frequency': 'c', 'synset': 'cast.n.05', 'synonyms': ['cast', 'plaster_cast', 'plaster_bandage'], 'id': 224, 'def': 'bandage consisting of a firm covering that immobilizes broken bones while they heal', 'name': 'cast'}, {'frequency': 'f', 'synset': 'cat.n.01', 'synonyms': ['cat'], 'id': 225, 'def': 'a domestic house cat', 'name': 'cat'}, {'frequency': 'f', 'synset': 'cauliflower.n.02', 'synonyms': ['cauliflower'], 'id': 226, 'def': 'edible compact head of white undeveloped flowers', 'name': 'cauliflower'}, {'frequency': 'c', 'synset': 'cayenne.n.02', 'synonyms': ['cayenne_(spice)', 'cayenne_pepper_(spice)', 'red_pepper_(spice)'], 'id': 227, 'def': 'ground pods and seeds of pungent red peppers of the genus Capsicum', 'name': 'cayenne_(spice)'}, {'frequency': 'c', 'synset': 'cd_player.n.01', 'synonyms': ['CD_player'], 'id': 228, 'def': 'electronic equipment for playing compact discs (CDs)', 'name': 'CD_player'}, {'frequency': 'f', 'synset': 'celery.n.01', 'synonyms': ['celery'], 'id': 229, 'def': 'widely cultivated herb with aromatic leaf stalks that are eaten raw or cooked', 'name': 'celery'}, {'frequency': 'f', 'synset': 'cellular_telephone.n.01', 'synonyms': ['cellular_telephone', 'cellular_phone', 'cellphone', 'mobile_phone', 'smart_phone'], 'id': 230, 'def': 'a hand-held mobile telephone', 'name': 'cellular_telephone'}, {'frequency': 'r', 'synset': 'chain_mail.n.01', 'synonyms': ['chain_mail', 'ring_mail', 'chain_armor', 'chain_armour', 'ring_armor', 'ring_armour'], 'id': 231, 'def': '(Middle Ages) flexible armor made of interlinked metal rings', 'name': 'chain_mail'}, {'frequency': 'f', 'synset': 'chair.n.01', 'synonyms': ['chair'], 'id': 232, 'def': 'a seat for one person, with a support for the back', 'name': 'chair'}, {'frequency': 'r', 'synset': 'chaise_longue.n.01', 'synonyms': ['chaise_longue', 'chaise', 'daybed'], 'id': 233, 'def': 'a long chair; for reclining', 'name': 'chaise_longue'}, {'frequency': 'r', 'synset': 'chalice.n.01', 'synonyms': ['chalice'], 'id': 234, 'def': 'a bowl-shaped drinking vessel; especially the Eucharistic cup', 'name': 'chalice'}, {'frequency': 'f', 'synset': 'chandelier.n.01', 'synonyms': ['chandelier'], 'id': 235, 'def': 'branched lighting fixture; often ornate; hangs from the ceiling', 'name': 'chandelier'}, {'frequency': 'r', 'synset': 'chap.n.04', 'synonyms': ['chap'], 'id': 236, 'def': 'leather leggings without a seat; worn over trousers by cowboys to protect their legs', 'name': 'chap'}, {'frequency': 'r', 'synset': 'checkbook.n.01', 'synonyms': ['checkbook', 'chequebook'], 'id': 237, 'def': 'a book issued to holders of checking accounts', 'name': 'checkbook'}, {'frequency': 'r', 'synset': 'checkerboard.n.01', 'synonyms': ['checkerboard'], 'id': 238, 'def': 'a board having 64 squares of two alternating colors', 'name': 'checkerboard'}, {'frequency': 'c', 'synset': 'cherry.n.03', 'synonyms': ['cherry'], 'id': 239, 'def': 'a red fruit with a single hard stone', 'name': 'cherry'}, {'frequency': 'r', 'synset': 'chessboard.n.01', 'synonyms': ['chessboard'], 'id': 240, 'def': 'a checkerboard used to play chess', 'name': 'chessboard'}, {'frequency': 'c', 'synset': 'chicken.n.02', 'synonyms': ['chicken_(animal)'], 'id': 241, 'def': 'a domestic fowl bred for flesh or eggs', 'name': 'chicken_(animal)'}, {'frequency': 'c', 'synset': 'chickpea.n.01', 'synonyms': ['chickpea', 'garbanzo'], 'id': 242, 'def': 'the seed of the chickpea plant; usually dried', 'name': 'chickpea'}, {'frequency': 'c', 'synset': 'chili.n.02', 'synonyms': ['chili_(vegetable)', 'chili_pepper_(vegetable)', 'chilli_(vegetable)', 'chilly_(vegetable)', 'chile_(vegetable)'], 'id': 243, 'def': 'very hot and finely tapering pepper of special pungency', 'name': 'chili_(vegetable)'}, {'frequency': 'r', 'synset': 'chime.n.01', 'synonyms': ['chime', 'gong'], 'id': 244, 'def': 'an instrument consisting of a set of bells that are struck with a hammer', 'name': 'chime'}, {'frequency': 'r', 'synset': 'chinaware.n.01', 'synonyms': ['chinaware'], 'id': 245, 'def': 'dishware made of high quality porcelain', 'name': 'chinaware'}, {'frequency': 'c', 'synset': 'chip.n.04', 'synonyms': ['crisp_(potato_chip)', 'potato_chip'], 'id': 246, 'def': 'a thin crisp slice of potato fried in deep fat', 'name': 'crisp_(potato_chip)'}, {'frequency': 'r', 'synset': 'chip.n.06', 'synonyms': ['poker_chip'], 'id': 247, 'def': 'a small disk-shaped counter used to represent money when gambling', 'name': 'poker_chip'}, {'frequency': 'c', 'synset': 'chocolate_bar.n.01', 'synonyms': ['chocolate_bar'], 'id': 248, 'def': 'a bar of chocolate candy', 'name': 'chocolate_bar'}, {'frequency': 'c', 'synset': 'chocolate_cake.n.01', 'synonyms': ['chocolate_cake'], 'id': 249, 'def': 'cake containing chocolate', 'name': 'chocolate_cake'}, {'frequency': 'r', 'synset': 'chocolate_milk.n.01', 'synonyms': ['chocolate_milk'], 'id': 250, 'def': 'milk flavored with chocolate syrup', 'name': 'chocolate_milk'}, {'frequency': 'r', 'synset': 'chocolate_mousse.n.01', 'synonyms': ['chocolate_mousse'], 'id': 251, 'def': 'dessert mousse made with chocolate', 'name': 'chocolate_mousse'}, {'frequency': 'f', 'synset': 'choker.n.03', 'synonyms': ['choker', 'collar', 'neckband'], 'id': 252, 'def': 'shirt collar, animal collar, or tight-fitting necklace', 'name': 'choker'}, {'frequency': 'f', 'synset': 'chopping_board.n.01', 'synonyms': ['chopping_board', 'cutting_board', 'chopping_block'], 'id': 253, 'def': 'a wooden board where meats or vegetables can be cut', 'name': 'chopping_board'}, {'frequency': 'f', 'synset': 'chopstick.n.01', 'synonyms': ['chopstick'], 'id': 254, 'def': 'one of a pair of slender sticks used as oriental tableware to eat food with', 'name': 'chopstick'}, {'frequency': 'f', 'synset': 'christmas_tree.n.05', 'synonyms': ['Christmas_tree'], 'id': 255, 'def': 'an ornamented evergreen used as a Christmas decoration', 'name': 'Christmas_tree'}, {'frequency': 'c', 'synset': 'chute.n.02', 'synonyms': ['slide'], 'id': 256, 'def': 'sloping channel through which things can descend', 'name': 'slide'}, {'frequency': 'r', 'synset': 'cider.n.01', 'synonyms': ['cider', 'cyder'], 'id': 257, 'def': 'a beverage made from juice pressed from apples', 'name': 'cider'}, {'frequency': 'r', 'synset': 'cigar_box.n.01', 'synonyms': ['cigar_box'], 'id': 258, 'def': 'a box for holding cigars', 'name': 'cigar_box'}, {'frequency': 'f', 'synset': 'cigarette.n.01', 'synonyms': ['cigarette'], 'id': 259, 'def': 'finely ground tobacco wrapped in paper; for smoking', 'name': 'cigarette'}, {'frequency': 'c', 'synset': 'cigarette_case.n.01', 'synonyms': ['cigarette_case', 'cigarette_pack'], 'id': 260, 'def': 'a small flat case for holding cigarettes', 'name': 'cigarette_case'}, {'frequency': 'f', 'synset': 'cistern.n.02', 'synonyms': ['cistern', 'water_tank'], 'id': 261, 'def': 'a tank that holds the water used to flush a toilet', 'name': 'cistern'}, {'frequency': 'r', 'synset': 'clarinet.n.01', 'synonyms': ['clarinet'], 'id': 262, 'def': 'a single-reed instrument with a straight tube', 'name': 'clarinet'}, {'frequency': 'c', 'synset': 'clasp.n.01', 'synonyms': ['clasp'], 'id': 263, 'def': 'a fastener (as a buckle or hook) that is used to hold two things together', 'name': 'clasp'}, {'frequency': 'c', 'synset': 'cleansing_agent.n.01', 'synonyms': ['cleansing_agent', 'cleanser', 'cleaner'], 'id': 264, 'def': 'a preparation used in cleaning something', 'name': 'cleansing_agent'}, {'frequency': 'r', 'synset': 'cleat.n.02', 'synonyms': ['cleat_(for_securing_rope)'], 'id': 265, 'def': 'a fastener (usually with two projecting horns) around which a rope can be secured', 'name': 'cleat_(for_securing_rope)'}, {'frequency': 'r', 'synset': 'clementine.n.01', 'synonyms': ['clementine'], 'id': 266, 'def': 'a variety of mandarin orange', 'name': 'clementine'}, {'frequency': 'c', 'synset': 'clip.n.03', 'synonyms': ['clip'], 'id': 267, 'def': 'any of various small fasteners used to hold loose articles together', 'name': 'clip'}, {'frequency': 'c', 'synset': 'clipboard.n.01', 'synonyms': ['clipboard'], 'id': 268, 'def': 'a small writing board with a clip at the top for holding papers', 'name': 'clipboard'}, {'frequency': 'r', 'synset': 'clipper.n.03', 'synonyms': ['clippers_(for_plants)'], 'id': 269, 'def': 'shears for cutting grass or shrubbery (often used in the plural)', 'name': 'clippers_(for_plants)'}, {'frequency': 'r', 'synset': 'cloak.n.02', 'synonyms': ['cloak'], 'id': 270, 'def': 'a loose outer garment', 'name': 'cloak'}, {'frequency': 'f', 'synset': 'clock.n.01', 'synonyms': ['clock', 'timepiece', 'timekeeper'], 'id': 271, 'def': 'a timepiece that shows the time of day', 'name': 'clock'}, {'frequency': 'f', 'synset': 'clock_tower.n.01', 'synonyms': ['clock_tower'], 'id': 272, 'def': 'a tower with a large clock visible high up on an outside face', 'name': 'clock_tower'}, {'frequency': 'c', 'synset': 'clothes_hamper.n.01', 'synonyms': ['clothes_hamper', 'laundry_basket', 'clothes_basket'], 'id': 273, 'def': 'a hamper that holds dirty clothes to be washed or wet clothes to be dried', 'name': 'clothes_hamper'}, {'frequency': 'c', 'synset': 'clothespin.n.01', 'synonyms': ['clothespin', 'clothes_peg'], 'id': 274, 'def': 'wood or plastic fastener; for holding clothes on a clothesline', 'name': 'clothespin'}, {'frequency': 'r', 'synset': 'clutch_bag.n.01', 'synonyms': ['clutch_bag'], 'id': 275, 'def': "a woman's strapless purse that is carried in the hand", 'name': 'clutch_bag'}, {'frequency': 'f', 'synset': 'coaster.n.03', 'synonyms': ['coaster'], 'id': 276, 'def': 'a covering (plate or mat) that protects the surface of a table', 'name': 'coaster'}, {'frequency': 'f', 'synset': 'coat.n.01', 'synonyms': ['coat'], 'id': 277, 'def': 'an outer garment that has sleeves and covers the body from shoulder down', 'name': 'coat'}, {'frequency': 'c', 'synset': 'coat_hanger.n.01', 'synonyms': ['coat_hanger', 'clothes_hanger', 'dress_hanger'], 'id': 278, 'def': "a hanger that is shaped like a person's shoulders", 'name': 'coat_hanger'}, {'frequency': 'c', 'synset': 'coatrack.n.01', 'synonyms': ['coatrack', 'hatrack'], 'id': 279, 'def': 'a rack with hooks for temporarily holding coats and hats', 'name': 'coatrack'}, {'frequency': 'c', 'synset': 'cock.n.04', 'synonyms': ['cock', 'rooster'], 'id': 280, 'def': 'adult male chicken', 'name': 'cock'}, {'frequency': 'r', 'synset': 'cockroach.n.01', 'synonyms': ['cockroach'], 'id': 281, 'def': 'any of numerous chiefly nocturnal insects; some are domestic pests', 'name': 'cockroach'}, {'frequency': 'r', 'synset': 'cocoa.n.01', 'synonyms': ['cocoa_(beverage)', 'hot_chocolate_(beverage)', 'drinking_chocolate'], 'id': 282, 'def': 'a beverage made from cocoa powder and milk and sugar; usually drunk hot', 'name': 'cocoa_(beverage)'}, {'frequency': 'c', 'synset': 'coconut.n.02', 'synonyms': ['coconut', 'cocoanut'], 'id': 283, 'def': 'large hard-shelled brown oval nut with a fibrous husk', 'name': 'coconut'}, {'frequency': 'f', 'synset': 'coffee_maker.n.01', 'synonyms': ['coffee_maker', 'coffee_machine'], 'id': 284, 'def': 'a kitchen appliance for brewing coffee automatically', 'name': 'coffee_maker'}, {'frequency': 'f', 'synset': 'coffee_table.n.01', 'synonyms': ['coffee_table', 'cocktail_table'], 'id': 285, 'def': 'low table where magazines can be placed and coffee or cocktails are served', 'name': 'coffee_table'}, {'frequency': 'c', 'synset': 'coffeepot.n.01', 'synonyms': ['coffeepot'], 'id': 286, 'def': 'tall pot in which coffee is brewed', 'name': 'coffeepot'}, {'frequency': 'r', 'synset': 'coil.n.05', 'synonyms': ['coil'], 'id': 287, 'def': 'tubing that is wound in a spiral', 'name': 'coil'}, {'frequency': 'c', 'synset': 'coin.n.01', 'synonyms': ['coin'], 'id': 288, 'def': 'a flat metal piece (usually a disc) used as money', 'name': 'coin'}, {'frequency': 'c', 'synset': 'colander.n.01', 'synonyms': ['colander', 'cullender'], 'id': 289, 'def': 'bowl-shaped strainer; used to wash or drain foods', 'name': 'colander'}, {'frequency': 'c', 'synset': 'coleslaw.n.01', 'synonyms': ['coleslaw', 'slaw'], 'id': 290, 'def': 'basically shredded cabbage', 'name': 'coleslaw'}, {'frequency': 'r', 'synset': 'coloring_material.n.01', 'synonyms': ['coloring_material', 'colouring_material'], 'id': 291, 'def': 'any material used for its color', 'name': 'coloring_material'}, {'frequency': 'r', 'synset': 'combination_lock.n.01', 'synonyms': ['combination_lock'], 'id': 292, 'def': 'lock that can be opened only by turning dials in a special sequence', 'name': 'combination_lock'}, {'frequency': 'c', 'synset': 'comforter.n.04', 'synonyms': ['pacifier', 'teething_ring'], 'id': 293, 'def': 'device used for an infant to suck or bite on', 'name': 'pacifier'}, {'frequency': 'r', 'synset': 'comic_book.n.01', 'synonyms': ['comic_book'], 'id': 294, 'def': 'a magazine devoted to comic strips', 'name': 'comic_book'}, {'frequency': 'r', 'synset': 'compass.n.01', 'synonyms': ['compass'], 'id': 295, 'def': 'navigational instrument for finding directions', 'name': 'compass'}, {'frequency': 'f', 'synset': 'computer_keyboard.n.01', 'synonyms': ['computer_keyboard', 'keyboard_(computer)'], 'id': 296, 'def': 'a keyboard that is a data input device for computers', 'name': 'computer_keyboard'}, {'frequency': 'f', 'synset': 'condiment.n.01', 'synonyms': ['condiment'], 'id': 297, 'def': 'a preparation (a sauce or relish or spice) to enhance flavor or enjoyment', 'name': 'condiment'}, {'frequency': 'f', 'synset': 'cone.n.01', 'synonyms': ['cone', 'traffic_cone'], 'id': 298, 'def': 'a cone-shaped object used to direct traffic', 'name': 'cone'}, {'frequency': 'f', 'synset': 'control.n.09', 'synonyms': ['control', 'controller'], 'id': 299, 'def': 'a mechanism that controls the operation of a machine', 'name': 'control'}, {'frequency': 'r', 'synset': 'convertible.n.01', 'synonyms': ['convertible_(automobile)'], 'id': 300, 'def': 'a car that has top that can be folded or removed', 'name': 'convertible_(automobile)'}, {'frequency': 'r', 'synset': 'convertible.n.03', 'synonyms': ['sofa_bed'], 'id': 301, 'def': 'a sofa that can be converted into a bed', 'name': 'sofa_bed'}, {'frequency': 'r', 'synset': 'cooker.n.01', 'synonyms': ['cooker'], 'id': 302, 'def': 'a utensil for cooking', 'name': 'cooker'}, {'frequency': 'f', 'synset': 'cookie.n.01', 'synonyms': ['cookie', 'cooky', 'biscuit_(cookie)'], 'id': 303, 'def': "any of various small flat sweet cakes (`biscuit' is the British term)", 'name': 'cookie'}, {'frequency': 'r', 'synset': 'cooking_utensil.n.01', 'synonyms': ['cooking_utensil'], 'id': 304, 'def': 'a kitchen utensil made of material that does not melt easily; used for cooking', 'name': 'cooking_utensil'}, {'frequency': 'f', 'synset': 'cooler.n.01', 'synonyms': ['cooler_(for_food)', 'ice_chest'], 'id': 305, 'def': 'an insulated box for storing food often with ice', 'name': 'cooler_(for_food)'}, {'frequency': 'f', 'synset': 'cork.n.04', 'synonyms': ['cork_(bottle_plug)', 'bottle_cork'], 'id': 306, 'def': 'the plug in the mouth of a bottle (especially a wine bottle)', 'name': 'cork_(bottle_plug)'}, {'frequency': 'r', 'synset': 'corkboard.n.01', 'synonyms': ['corkboard'], 'id': 307, 'def': 'a sheet consisting of cork granules', 'name': 'corkboard'}, {'frequency': 'c', 'synset': 'corkscrew.n.01', 'synonyms': ['corkscrew', 'bottle_screw'], 'id': 308, 'def': 'a bottle opener that pulls corks', 'name': 'corkscrew'}, {'frequency': 'f', 'synset': 'corn.n.03', 'synonyms': ['edible_corn', 'corn', 'maize'], 'id': 309, 'def': 'ears or kernels of corn that can be prepared and served for human food (only mark individual ears or kernels)', 'name': 'edible_corn'}, {'frequency': 'r', 'synset': 'cornbread.n.01', 'synonyms': ['cornbread'], 'id': 310, 'def': 'bread made primarily of cornmeal', 'name': 'cornbread'}, {'frequency': 'c', 'synset': 'cornet.n.01', 'synonyms': ['cornet', 'horn', 'trumpet'], 'id': 311, 'def': 'a brass musical instrument with a narrow tube and a flared bell and many valves', 'name': 'cornet'}, {'frequency': 'c', 'synset': 'cornice.n.01', 'synonyms': ['cornice', 'valance', 'valance_board', 'pelmet'], 'id': 312, 'def': 'a decorative framework to conceal curtain fixtures at the top of a window casing', 'name': 'cornice'}, {'frequency': 'r', 'synset': 'cornmeal.n.01', 'synonyms': ['cornmeal'], 'id': 313, 'def': 'coarsely ground corn', 'name': 'cornmeal'}, {'frequency': 'c', 'synset': 'corset.n.01', 'synonyms': ['corset', 'girdle'], 'id': 314, 'def': "a woman's close-fitting foundation garment", 'name': 'corset'}, {'frequency': 'c', 'synset': 'costume.n.04', 'synonyms': ['costume'], 'id': 315, 'def': 'the attire characteristic of a country or a time or a social class', 'name': 'costume'}, {'frequency': 'r', 'synset': 'cougar.n.01', 'synonyms': ['cougar', 'puma', 'catamount', 'mountain_lion', 'panther'], 'id': 316, 'def': 'large American feline resembling a lion', 'name': 'cougar'}, {'frequency': 'r', 'synset': 'coverall.n.01', 'synonyms': ['coverall'], 'id': 317, 'def': 'a loose-fitting protective garment that is worn over other clothing', 'name': 'coverall'}, {'frequency': 'c', 'synset': 'cowbell.n.01', 'synonyms': ['cowbell'], 'id': 318, 'def': 'a bell hung around the neck of cow so that the cow can be easily located', 'name': 'cowbell'}, {'frequency': 'f', 'synset': 'cowboy_hat.n.01', 'synonyms': ['cowboy_hat', 'ten-gallon_hat'], 'id': 319, 'def': 'a hat with a wide brim and a soft crown; worn by American ranch hands', 'name': 'cowboy_hat'}, {'frequency': 'c', 'synset': 'crab.n.01', 'synonyms': ['crab_(animal)'], 'id': 320, 'def': 'decapod having eyes on short stalks and a broad flattened shell and pincers', 'name': 'crab_(animal)'}, {'frequency': 'r', 'synset': 'crab.n.05', 'synonyms': ['crabmeat'], 'id': 321, 'def': 'the edible flesh of any of various crabs', 'name': 'crabmeat'}, {'frequency': 'c', 'synset': 'cracker.n.01', 'synonyms': ['cracker'], 'id': 322, 'def': 'a thin crisp wafer', 'name': 'cracker'}, {'frequency': 'r', 'synset': 'crape.n.01', 'synonyms': ['crape', 'crepe', 'French_pancake'], 'id': 323, 'def': 'small very thin pancake', 'name': 'crape'}, {'frequency': 'f', 'synset': 'crate.n.01', 'synonyms': ['crate'], 'id': 324, 'def': 'a rugged box (usually made of wood); used for shipping', 'name': 'crate'}, {'frequency': 'c', 'synset': 'crayon.n.01', 'synonyms': ['crayon', 'wax_crayon'], 'id': 325, 'def': 'writing or drawing implement made of a colored stick of composition wax', 'name': 'crayon'}, {'frequency': 'r', 'synset': 'cream_pitcher.n.01', 'synonyms': ['cream_pitcher'], 'id': 326, 'def': 'a small pitcher for serving cream', 'name': 'cream_pitcher'}, {'frequency': 'c', 'synset': 'crescent_roll.n.01', 'synonyms': ['crescent_roll', 'croissant'], 'id': 327, 'def': 'very rich flaky crescent-shaped roll', 'name': 'crescent_roll'}, {'frequency': 'c', 'synset': 'crib.n.01', 'synonyms': ['crib', 'cot'], 'id': 328, 'def': 'baby bed with high sides made of slats', 'name': 'crib'}, {'frequency': 'c', 'synset': 'crock.n.03', 'synonyms': ['crock_pot', 'earthenware_jar'], 'id': 329, 'def': 'an earthen jar (made of baked clay) or a modern electric crockpot', 'name': 'crock_pot'}, {'frequency': 'f', 'synset': 'crossbar.n.01', 'synonyms': ['crossbar'], 'id': 330, 'def': 'a horizontal bar that goes across something', 'name': 'crossbar'}, {'frequency': 'r', 'synset': 'crouton.n.01', 'synonyms': ['crouton'], 'id': 331, 'def': 'a small piece of toasted or fried bread; served in soup or salads', 'name': 'crouton'}, {'frequency': 'c', 'synset': 'crow.n.01', 'synonyms': ['crow'], 'id': 332, 'def': 'black birds having a raucous call', 'name': 'crow'}, {'frequency': 'r', 'synset': 'crowbar.n.01', 'synonyms': ['crowbar', 'wrecking_bar', 'pry_bar'], 'id': 333, 'def': 'a heavy iron lever with one end forged into a wedge', 'name': 'crowbar'}, {'frequency': 'c', 'synset': 'crown.n.04', 'synonyms': ['crown'], 'id': 334, 'def': 'an ornamental jeweled headdress signifying sovereignty', 'name': 'crown'}, {'frequency': 'c', 'synset': 'crucifix.n.01', 'synonyms': ['crucifix'], 'id': 335, 'def': 'representation of the cross on which Jesus died', 'name': 'crucifix'}, {'frequency': 'c', 'synset': 'cruise_ship.n.01', 'synonyms': ['cruise_ship', 'cruise_liner'], 'id': 336, 'def': 'a passenger ship used commercially for pleasure cruises', 'name': 'cruise_ship'}, {'frequency': 'c', 'synset': 'cruiser.n.01', 'synonyms': ['police_cruiser', 'patrol_car', 'police_car', 'squad_car'], 'id': 337, 'def': 'a car in which policemen cruise the streets', 'name': 'police_cruiser'}, {'frequency': 'f', 'synset': 'crumb.n.03', 'synonyms': ['crumb'], 'id': 338, 'def': 'small piece of e.g. bread or cake', 'name': 'crumb'}, {'frequency': 'c', 'synset': 'crutch.n.01', 'synonyms': ['crutch'], 'id': 339, 'def': 'a wooden or metal staff that fits under the armpit and reaches to the ground', 'name': 'crutch'}, {'frequency': 'c', 'synset': 'cub.n.03', 'synonyms': ['cub_(animal)'], 'id': 340, 'def': 'the young of certain carnivorous mammals such as the bear or wolf or lion', 'name': 'cub_(animal)'}, {'frequency': 'c', 'synset': 'cube.n.05', 'synonyms': ['cube', 'square_block'], 'id': 341, 'def': 'a block in the (approximate) shape of a cube', 'name': 'cube'}, {'frequency': 'f', 'synset': 'cucumber.n.02', 'synonyms': ['cucumber', 'cuke'], 'id': 342, 'def': 'cylindrical green fruit with thin green rind and white flesh eaten as a vegetable', 'name': 'cucumber'}, {'frequency': 'c', 'synset': 'cufflink.n.01', 'synonyms': ['cufflink'], 'id': 343, 'def': 'jewelry consisting of linked buttons used to fasten the cuffs of a shirt', 'name': 'cufflink'}, {'frequency': 'f', 'synset': 'cup.n.01', 'synonyms': ['cup'], 'id': 344, 'def': 'a small open container usually used for drinking; usually has a handle', 'name': 'cup'}, {'frequency': 'c', 'synset': 'cup.n.08', 'synonyms': ['trophy_cup'], 'id': 345, 'def': 'a metal award or cup-shaped vessel with handles that is awarded as a trophy to a competition winner', 'name': 'trophy_cup'}, {'frequency': 'f', 'synset': 'cupboard.n.01', 'synonyms': ['cupboard', 'closet'], 'id': 346, 'def': 'a small room (or recess) or cabinet used for storage space', 'name': 'cupboard'}, {'frequency': 'f', 'synset': 'cupcake.n.01', 'synonyms': ['cupcake'], 'id': 347, 'def': 'small cake baked in a muffin tin', 'name': 'cupcake'}, {'frequency': 'r', 'synset': 'curler.n.01', 'synonyms': ['hair_curler', 'hair_roller', 'hair_crimper'], 'id': 348, 'def': 'a cylindrical tube around which the hair is wound to curl it', 'name': 'hair_curler'}, {'frequency': 'r', 'synset': 'curling_iron.n.01', 'synonyms': ['curling_iron'], 'id': 349, 'def': 'a cylindrical home appliance that heats hair that has been curled around it', 'name': 'curling_iron'}, {'frequency': 'f', 'synset': 'curtain.n.01', 'synonyms': ['curtain', 'drapery'], 'id': 350, 'def': 'hanging cloth used as a blind (especially for a window)', 'name': 'curtain'}, {'frequency': 'f', 'synset': 'cushion.n.03', 'synonyms': ['cushion'], 'id': 351, 'def': 'a soft bag filled with air or padding such as feathers or foam rubber', 'name': 'cushion'}, {'frequency': 'r', 'synset': 'cylinder.n.04', 'synonyms': ['cylinder'], 'id': 352, 'def': 'a cylindrical container', 'name': 'cylinder'}, {'frequency': 'r', 'synset': 'cymbal.n.01', 'synonyms': ['cymbal'], 'id': 353, 'def': 'a percussion instrument consisting of a concave brass disk', 'name': 'cymbal'}, {'frequency': 'r', 'synset': 'dagger.n.01', 'synonyms': ['dagger'], 'id': 354, 'def': 'a short knife with a pointed blade used for piercing or stabbing', 'name': 'dagger'}, {'frequency': 'r', 'synset': 'dalmatian.n.02', 'synonyms': ['dalmatian'], 'id': 355, 'def': 'a large breed having a smooth white coat with black or brown spots', 'name': 'dalmatian'}, {'frequency': 'c', 'synset': 'dartboard.n.01', 'synonyms': ['dartboard'], 'id': 356, 'def': 'a circular board of wood or cork used as the target in the game of darts', 'name': 'dartboard'}, {'frequency': 'r', 'synset': 'date.n.08', 'synonyms': ['date_(fruit)'], 'id': 357, 'def': 'sweet edible fruit of the date palm with a single long woody seed', 'name': 'date_(fruit)'}, {'frequency': 'f', 'synset': 'deck_chair.n.01', 'synonyms': ['deck_chair', 'beach_chair'], 'id': 358, 'def': 'a folding chair for use outdoors; a wooden frame supports a length of canvas', 'name': 'deck_chair'}, {'frequency': 'c', 'synset': 'deer.n.01', 'synonyms': ['deer', 'cervid'], 'id': 359, 'def': "distinguished from Bovidae by the male's having solid deciduous antlers", 'name': 'deer'}, {'frequency': 'c', 'synset': 'dental_floss.n.01', 'synonyms': ['dental_floss', 'floss'], 'id': 360, 'def': 'a soft thread for cleaning the spaces between the teeth', 'name': 'dental_floss'}, {'frequency': 'f', 'synset': 'desk.n.01', 'synonyms': ['desk'], 'id': 361, 'def': 'a piece of furniture with a writing surface and usually drawers or other compartments', 'name': 'desk'}, {'frequency': 'r', 'synset': 'detergent.n.01', 'synonyms': ['detergent'], 'id': 362, 'def': 'a surface-active chemical widely used in industry and laundering', 'name': 'detergent'}, {'frequency': 'c', 'synset': 'diaper.n.01', 'synonyms': ['diaper'], 'id': 363, 'def': 'garment consisting of a folded cloth drawn up between the legs and fastened at the waist', 'name': 'diaper'}, {'frequency': 'r', 'synset': 'diary.n.01', 'synonyms': ['diary', 'journal'], 'id': 364, 'def': 'yearly planner book', 'name': 'diary'}, {'frequency': 'r', 'synset': 'die.n.01', 'synonyms': ['die', 'dice'], 'id': 365, 'def': 'a small cube with 1 to 6 spots on the six faces; used in gambling', 'name': 'die'}, {'frequency': 'r', 'synset': 'dinghy.n.01', 'synonyms': ['dinghy', 'dory', 'rowboat'], 'id': 366, 'def': 'a small boat of shallow draft with seats and oars with which it is propelled', 'name': 'dinghy'}, {'frequency': 'f', 'synset': 'dining_table.n.01', 'synonyms': ['dining_table'], 'id': 367, 'def': 'a table at which meals are served', 'name': 'dining_table'}, {'frequency': 'r', 'synset': 'dinner_jacket.n.01', 'synonyms': ['tux', 'tuxedo'], 'id': 368, 'def': 'semiformal evening dress for men', 'name': 'tux'}, {'frequency': 'f', 'synset': 'dish.n.01', 'synonyms': ['dish'], 'id': 369, 'def': 'a piece of dishware normally used as a container for holding or serving food', 'name': 'dish'}, {'frequency': 'c', 'synset': 'dish.n.05', 'synonyms': ['dish_antenna'], 'id': 370, 'def': 'directional antenna consisting of a parabolic reflector', 'name': 'dish_antenna'}, {'frequency': 'c', 'synset': 'dishrag.n.01', 'synonyms': ['dishrag', 'dishcloth'], 'id': 371, 'def': 'a cloth for washing dishes or cleaning in general', 'name': 'dishrag'}, {'frequency': 'f', 'synset': 'dishtowel.n.01', 'synonyms': ['dishtowel', 'tea_towel'], 'id': 372, 'def': 'a towel for drying dishes', 'name': 'dishtowel'}, {'frequency': 'f', 'synset': 'dishwasher.n.01', 'synonyms': ['dishwasher', 'dishwashing_machine'], 'id': 373, 'def': 'a machine for washing dishes', 'name': 'dishwasher'}, {'frequency': 'r', 'synset': 'dishwasher_detergent.n.01', 'synonyms': ['dishwasher_detergent', 'dishwashing_detergent', 'dishwashing_liquid', 'dishsoap'], 'id': 374, 'def': 'dishsoap or dish detergent designed for use in dishwashers', 'name': 'dishwasher_detergent'}, {'frequency': 'f', 'synset': 'dispenser.n.01', 'synonyms': ['dispenser'], 'id': 375, 'def': 'a container so designed that the contents can be used in prescribed amounts', 'name': 'dispenser'}, {'frequency': 'r', 'synset': 'diving_board.n.01', 'synonyms': ['diving_board'], 'id': 376, 'def': 'a springboard from which swimmers can dive', 'name': 'diving_board'}, {'frequency': 'f', 'synset': 'dixie_cup.n.01', 'synonyms': ['Dixie_cup', 'paper_cup'], 'id': 377, 'def': 'a disposable cup made of paper; for holding drinks', 'name': 'Dixie_cup'}, {'frequency': 'f', 'synset': 'dog.n.01', 'synonyms': ['dog'], 'id': 378, 'def': 'a common domesticated dog', 'name': 'dog'}, {'frequency': 'f', 'synset': 'dog_collar.n.01', 'synonyms': ['dog_collar'], 'id': 379, 'def': 'a collar for a dog', 'name': 'dog_collar'}, {'frequency': 'f', 'synset': 'doll.n.01', 'synonyms': ['doll'], 'id': 380, 'def': 'a toy replica of a HUMAN (NOT AN ANIMAL)', 'name': 'doll'}, {'frequency': 'r', 'synset': 'dollar.n.02', 'synonyms': ['dollar', 'dollar_bill', 'one_dollar_bill'], 'id': 381, 'def': 'a piece of paper money worth one dollar', 'name': 'dollar'}, {'frequency': 'r', 'synset': 'dollhouse.n.01', 'synonyms': ['dollhouse', "doll's_house"], 'id': 382, 'def': "a house so small that it is likened to a child's plaything", 'name': 'dollhouse'}, {'frequency': 'c', 'synset': 'dolphin.n.02', 'synonyms': ['dolphin'], 'id': 383, 'def': 'any of various small toothed whales with a beaklike snout; larger than porpoises', 'name': 'dolphin'}, {'frequency': 'c', 'synset': 'domestic_ass.n.01', 'synonyms': ['domestic_ass', 'donkey'], 'id': 384, 'def': 'domestic beast of burden descended from the African wild ass; patient but stubborn', 'name': 'domestic_ass'}, {'frequency': 'f', 'synset': 'doorknob.n.01', 'synonyms': ['doorknob', 'doorhandle'], 'id': 385, 'def': "a knob used to open a door (often called `doorhandle' in Great Britain)", 'name': 'doorknob'}, {'frequency': 'c', 'synset': 'doormat.n.02', 'synonyms': ['doormat', 'welcome_mat'], 'id': 386, 'def': 'a mat placed outside an exterior door for wiping the shoes before entering', 'name': 'doormat'}, {'frequency': 'f', 'synset': 'doughnut.n.02', 'synonyms': ['doughnut', 'donut'], 'id': 387, 'def': 'a small ring-shaped friedcake', 'name': 'doughnut'}, {'frequency': 'r', 'synset': 'dove.n.01', 'synonyms': ['dove'], 'id': 388, 'def': 'any of numerous small pigeons', 'name': 'dove'}, {'frequency': 'r', 'synset': 'dragonfly.n.01', 'synonyms': ['dragonfly'], 'id': 389, 'def': 'slender-bodied non-stinging insect having iridescent wings that are outspread at rest', 'name': 'dragonfly'}, {'frequency': 'f', 'synset': 'drawer.n.01', 'synonyms': ['drawer'], 'id': 390, 'def': 'a boxlike container in a piece of furniture; made so as to slide in and out', 'name': 'drawer'}, {'frequency': 'c', 'synset': 'drawers.n.01', 'synonyms': ['underdrawers', 'boxers', 'boxershorts'], 'id': 391, 'def': 'underpants worn by men', 'name': 'underdrawers'}, {'frequency': 'f', 'synset': 'dress.n.01', 'synonyms': ['dress', 'frock'], 'id': 392, 'def': 'a one-piece garment for a woman; has skirt and bodice', 'name': 'dress'}, {'frequency': 'c', 'synset': 'dress_hat.n.01', 'synonyms': ['dress_hat', 'high_hat', 'opera_hat', 'silk_hat', 'top_hat'], 'id': 393, 'def': "a man's hat with a tall crown; usually covered with silk or with beaver fur", 'name': 'dress_hat'}, {'frequency': 'f', 'synset': 'dress_suit.n.01', 'synonyms': ['dress_suit'], 'id': 394, 'def': 'formalwear consisting of full evening dress for men', 'name': 'dress_suit'}, {'frequency': 'f', 'synset': 'dresser.n.05', 'synonyms': ['dresser'], 'id': 395, 'def': 'a cabinet with shelves', 'name': 'dresser'}, {'frequency': 'c', 'synset': 'drill.n.01', 'synonyms': ['drill'], 'id': 396, 'def': 'a tool with a sharp rotating point for making holes in hard materials', 'name': 'drill'}, {'frequency': 'r', 'synset': 'drone.n.04', 'synonyms': ['drone'], 'id': 397, 'def': 'an aircraft without a pilot that is operated by remote control', 'name': 'drone'}, {'frequency': 'r', 'synset': 'dropper.n.01', 'synonyms': ['dropper', 'eye_dropper'], 'id': 398, 'def': 'pipet consisting of a small tube with a vacuum bulb at one end for drawing liquid in and releasing it a drop at a time', 'name': 'dropper'}, {'frequency': 'c', 'synset': 'drum.n.01', 'synonyms': ['drum_(musical_instrument)'], 'id': 399, 'def': 'a musical percussion instrument; usually consists of a hollow cylinder with a membrane stretched across each end', 'name': 'drum_(musical_instrument)'}, {'frequency': 'r', 'synset': 'drumstick.n.02', 'synonyms': ['drumstick'], 'id': 400, 'def': 'a stick used for playing a drum', 'name': 'drumstick'}, {'frequency': 'f', 'synset': 'duck.n.01', 'synonyms': ['duck'], 'id': 401, 'def': 'small web-footed broad-billed swimming bird', 'name': 'duck'}, {'frequency': 'c', 'synset': 'duckling.n.02', 'synonyms': ['duckling'], 'id': 402, 'def': 'young duck', 'name': 'duckling'}, {'frequency': 'c', 'synset': 'duct_tape.n.01', 'synonyms': ['duct_tape'], 'id': 403, 'def': 'a wide silvery adhesive tape', 'name': 'duct_tape'}, {'frequency': 'f', 'synset': 'duffel_bag.n.01', 'synonyms': ['duffel_bag', 'duffle_bag', 'duffel', 'duffle'], 'id': 404, 'def': 'a large cylindrical bag of heavy cloth (does not include suitcases)', 'name': 'duffel_bag'}, {'frequency': 'r', 'synset': 'dumbbell.n.01', 'synonyms': ['dumbbell'], 'id': 405, 'def': 'an exercising weight with two ball-like ends connected by a short handle', 'name': 'dumbbell'}, {'frequency': 'c', 'synset': 'dumpster.n.01', 'synonyms': ['dumpster'], 'id': 406, 'def': 'a container designed to receive and transport and dump waste', 'name': 'dumpster'}, {'frequency': 'r', 'synset': 'dustpan.n.02', 'synonyms': ['dustpan'], 'id': 407, 'def': 'a short-handled receptacle into which dust can be swept', 'name': 'dustpan'}, {'frequency': 'c', 'synset': 'eagle.n.01', 'synonyms': ['eagle'], 'id': 408, 'def': 'large birds of prey noted for their broad wings and strong soaring flight', 'name': 'eagle'}, {'frequency': 'f', 'synset': 'earphone.n.01', 'synonyms': ['earphone', 'earpiece', 'headphone'], 'id': 409, 'def': 'device for listening to audio that is held over or inserted into the ear', 'name': 'earphone'}, {'frequency': 'r', 'synset': 'earplug.n.01', 'synonyms': ['earplug'], 'id': 410, 'def': 'a soft plug that is inserted into the ear canal to block sound', 'name': 'earplug'}, {'frequency': 'f', 'synset': 'earring.n.01', 'synonyms': ['earring'], 'id': 411, 'def': 'jewelry to ornament the ear', 'name': 'earring'}, {'frequency': 'c', 'synset': 'easel.n.01', 'synonyms': ['easel'], 'id': 412, 'def': "an upright tripod for displaying something (usually an artist's canvas)", 'name': 'easel'}, {'frequency': 'r', 'synset': 'eclair.n.01', 'synonyms': ['eclair'], 'id': 413, 'def': 'oblong cream puff', 'name': 'eclair'}, {'frequency': 'r', 'synset': 'eel.n.01', 'synonyms': ['eel'], 'id': 414, 'def': 'an elongate fish with fatty flesh', 'name': 'eel'}, {'frequency': 'f', 'synset': 'egg.n.02', 'synonyms': ['egg', 'eggs'], 'id': 415, 'def': 'oval reproductive body of a fowl (especially a hen) used as food', 'name': 'egg'}, {'frequency': 'r', 'synset': 'egg_roll.n.01', 'synonyms': ['egg_roll', 'spring_roll'], 'id': 416, 'def': 'minced vegetables and meat wrapped in a pancake and fried', 'name': 'egg_roll'}, {'frequency': 'c', 'synset': 'egg_yolk.n.01', 'synonyms': ['egg_yolk', 'yolk_(egg)'], 'id': 417, 'def': 'the yellow spherical part of an egg', 'name': 'egg_yolk'}, {'frequency': 'c', 'synset': 'eggbeater.n.02', 'synonyms': ['eggbeater', 'eggwhisk'], 'id': 418, 'def': 'a mixer for beating eggs or whipping cream', 'name': 'eggbeater'}, {'frequency': 'c', 'synset': 'eggplant.n.01', 'synonyms': ['eggplant', 'aubergine'], 'id': 419, 'def': 'egg-shaped vegetable having a shiny skin typically dark purple', 'name': 'eggplant'}, {'frequency': 'r', 'synset': 'electric_chair.n.01', 'synonyms': ['electric_chair'], 'id': 420, 'def': 'a chair-shaped instrument of execution by electrocution', 'name': 'electric_chair'}, {'frequency': 'f', 'synset': 'electric_refrigerator.n.01', 'synonyms': ['refrigerator'], 'id': 421, 'def': 'a refrigerator in which the coolant is pumped around by an electric motor', 'name': 'refrigerator'}, {'frequency': 'f', 'synset': 'elephant.n.01', 'synonyms': ['elephant'], 'id': 422, 'def': 'a common elephant', 'name': 'elephant'}, {'frequency': 'c', 'synset': 'elk.n.01', 'synonyms': ['elk', 'moose'], 'id': 423, 'def': 'large northern deer with enormous flattened antlers in the male', 'name': 'elk'}, {'frequency': 'c', 'synset': 'envelope.n.01', 'synonyms': ['envelope'], 'id': 424, 'def': 'a flat (usually rectangular) container for a letter, thin package, etc.', 'name': 'envelope'}, {'frequency': 'c', 'synset': 'eraser.n.01', 'synonyms': ['eraser'], 'id': 425, 'def': 'an implement used to erase something', 'name': 'eraser'}, {'frequency': 'r', 'synset': 'escargot.n.01', 'synonyms': ['escargot'], 'id': 426, 'def': 'edible snail usually served in the shell with a sauce of melted butter and garlic', 'name': 'escargot'}, {'frequency': 'r', 'synset': 'eyepatch.n.01', 'synonyms': ['eyepatch'], 'id': 427, 'def': 'a protective cloth covering for an injured eye', 'name': 'eyepatch'}, {'frequency': 'r', 'synset': 'falcon.n.01', 'synonyms': ['falcon'], 'id': 428, 'def': 'birds of prey having long pointed powerful wings adapted for swift flight', 'name': 'falcon'}, {'frequency': 'f', 'synset': 'fan.n.01', 'synonyms': ['fan'], 'id': 429, 'def': 'a device for creating a current of air by movement of a surface or surfaces', 'name': 'fan'}, {'frequency': 'f', 'synset': 'faucet.n.01', 'synonyms': ['faucet', 'spigot', 'tap'], 'id': 430, 'def': 'a regulator for controlling the flow of a liquid from a reservoir', 'name': 'faucet'}, {'frequency': 'r', 'synset': 'fedora.n.01', 'synonyms': ['fedora'], 'id': 431, 'def': 'a hat made of felt with a creased crown', 'name': 'fedora'}, {'frequency': 'r', 'synset': 'ferret.n.02', 'synonyms': ['ferret'], 'id': 432, 'def': 'domesticated albino variety of the European polecat bred for hunting rats and rabbits', 'name': 'ferret'}, {'frequency': 'c', 'synset': 'ferris_wheel.n.01', 'synonyms': ['Ferris_wheel'], 'id': 433, 'def': 'a large wheel with suspended seats that remain upright as the wheel rotates', 'name': 'Ferris_wheel'}, {'frequency': 'c', 'synset': 'ferry.n.01', 'synonyms': ['ferry', 'ferryboat'], 'id': 434, 'def': 'a boat that transports people or vehicles across a body of water and operates on a regular schedule', 'name': 'ferry'}, {'frequency': 'r', 'synset': 'fig.n.04', 'synonyms': ['fig_(fruit)'], 'id': 435, 'def': 'fleshy sweet pear-shaped yellowish or purple fruit eaten fresh or preserved or dried', 'name': 'fig_(fruit)'}, {'frequency': 'c', 'synset': 'fighter.n.02', 'synonyms': ['fighter_jet', 'fighter_aircraft', 'attack_aircraft'], 'id': 436, 'def': 'a high-speed military or naval airplane designed to destroy enemy targets', 'name': 'fighter_jet'}, {'frequency': 'f', 'synset': 'figurine.n.01', 'synonyms': ['figurine'], 'id': 437, 'def': 'a small carved or molded figure', 'name': 'figurine'}, {'frequency': 'c', 'synset': 'file.n.03', 'synonyms': ['file_cabinet', 'filing_cabinet'], 'id': 438, 'def': 'office furniture consisting of a container for keeping papers in order', 'name': 'file_cabinet'}, {'frequency': 'r', 'synset': 'file.n.04', 'synonyms': ['file_(tool)'], 'id': 439, 'def': 'a steel hand tool with small sharp teeth on some or all of its surfaces; used for smoothing wood or metal', 'name': 'file_(tool)'}, {'frequency': 'f', 'synset': 'fire_alarm.n.02', 'synonyms': ['fire_alarm', 'smoke_alarm'], 'id': 440, 'def': 'an alarm that is tripped off by fire or smoke', 'name': 'fire_alarm'}, {'frequency': 'f', 'synset': 'fire_engine.n.01', 'synonyms': ['fire_engine', 'fire_truck'], 'id': 441, 'def': 'large trucks that carry firefighters and equipment to the site of a fire', 'name': 'fire_engine'}, {'frequency': 'f', 'synset': 'fire_extinguisher.n.01', 'synonyms': ['fire_extinguisher', 'extinguisher'], 'id': 442, 'def': 'a manually operated device for extinguishing small fires', 'name': 'fire_extinguisher'}, {'frequency': 'c', 'synset': 'fire_hose.n.01', 'synonyms': ['fire_hose'], 'id': 443, 'def': 'a large hose that carries water from a fire hydrant to the site of the fire', 'name': 'fire_hose'}, {'frequency': 'f', 'synset': 'fireplace.n.01', 'synonyms': ['fireplace'], 'id': 444, 'def': 'an open recess in a wall at the base of a chimney where a fire can be built', 'name': 'fireplace'}, {'frequency': 'f', 'synset': 'fireplug.n.01', 'synonyms': ['fireplug', 'fire_hydrant', 'hydrant'], 'id': 445, 'def': 'an upright hydrant for drawing water to use in fighting a fire', 'name': 'fireplug'}, {'frequency': 'r', 'synset': 'first-aid_kit.n.01', 'synonyms': ['first-aid_kit'], 'id': 446, 'def': 'kit consisting of a set of bandages and medicines for giving first aid', 'name': 'first-aid_kit'}, {'frequency': 'f', 'synset': 'fish.n.01', 'synonyms': ['fish'], 'id': 447, 'def': 'any of various mostly cold-blooded aquatic vertebrates usually having scales and breathing through gills', 'name': 'fish'}, {'frequency': 'c', 'synset': 'fish.n.02', 'synonyms': ['fish_(food)'], 'id': 448, 'def': 'the flesh of fish used as food', 'name': 'fish_(food)'}, {'frequency': 'r', 'synset': 'fishbowl.n.02', 'synonyms': ['fishbowl', 'goldfish_bowl'], 'id': 449, 'def': 'a transparent bowl in which small fish are kept', 'name': 'fishbowl'}, {'frequency': 'c', 'synset': 'fishing_rod.n.01', 'synonyms': ['fishing_rod', 'fishing_pole'], 'id': 450, 'def': 'a rod that is used in fishing to extend the fishing line', 'name': 'fishing_rod'}, {'frequency': 'f', 'synset': 'flag.n.01', 'synonyms': ['flag'], 'id': 451, 'def': 'emblem usually consisting of a rectangular piece of cloth of distinctive design (do not include pole)', 'name': 'flag'}, {'frequency': 'f', 'synset': 'flagpole.n.02', 'synonyms': ['flagpole', 'flagstaff'], 'id': 452, 'def': 'a tall staff or pole on which a flag is raised', 'name': 'flagpole'}, {'frequency': 'c', 'synset': 'flamingo.n.01', 'synonyms': ['flamingo'], 'id': 453, 'def': 'large pink web-footed bird with down-bent bill', 'name': 'flamingo'}, {'frequency': 'c', 'synset': 'flannel.n.01', 'synonyms': ['flannel'], 'id': 454, 'def': 'a soft light woolen fabric; used for clothing', 'name': 'flannel'}, {'frequency': 'c', 'synset': 'flap.n.01', 'synonyms': ['flap'], 'id': 455, 'def': 'any broad thin covering attached at one edge, such as a mud flap next to a wheel or a flap on an airplane wing', 'name': 'flap'}, {'frequency': 'r', 'synset': 'flash.n.10', 'synonyms': ['flash', 'flashbulb'], 'id': 456, 'def': 'a lamp for providing momentary light to take a photograph', 'name': 'flash'}, {'frequency': 'c', 'synset': 'flashlight.n.01', 'synonyms': ['flashlight', 'torch'], 'id': 457, 'def': 'a small portable battery-powered electric lamp', 'name': 'flashlight'}, {'frequency': 'r', 'synset': 'fleece.n.03', 'synonyms': ['fleece'], 'id': 458, 'def': 'a soft bulky fabric with deep pile; used chiefly for clothing', 'name': 'fleece'}, {'frequency': 'f', 'synset': 'flip-flop.n.02', 'synonyms': ['flip-flop_(sandal)'], 'id': 459, 'def': 'a backless sandal held to the foot by a thong between two toes', 'name': 'flip-flop_(sandal)'}, {'frequency': 'c', 'synset': 'flipper.n.01', 'synonyms': ['flipper_(footwear)', 'fin_(footwear)'], 'id': 460, 'def': 'a shoe to aid a person in swimming', 'name': 'flipper_(footwear)'}, {'frequency': 'f', 'synset': 'flower_arrangement.n.01', 'synonyms': ['flower_arrangement', 'floral_arrangement'], 'id': 461, 'def': 'a decorative arrangement of flowers', 'name': 'flower_arrangement'}, {'frequency': 'c', 'synset': 'flute.n.02', 'synonyms': ['flute_glass', 'champagne_flute'], 'id': 462, 'def': 'a tall narrow wineglass', 'name': 'flute_glass'}, {'frequency': 'c', 'synset': 'foal.n.01', 'synonyms': ['foal'], 'id': 463, 'def': 'a young horse', 'name': 'foal'}, {'frequency': 'c', 'synset': 'folding_chair.n.01', 'synonyms': ['folding_chair'], 'id': 464, 'def': 'a chair that can be folded flat for storage', 'name': 'folding_chair'}, {'frequency': 'c', 'synset': 'food_processor.n.01', 'synonyms': ['food_processor'], 'id': 465, 'def': 'a kitchen appliance for shredding, blending, chopping, or slicing food', 'name': 'food_processor'}, {'frequency': 'c', 'synset': 'football.n.02', 'synonyms': ['football_(American)'], 'id': 466, 'def': 'the inflated oblong ball used in playing American football', 'name': 'football_(American)'}, {'frequency': 'r', 'synset': 'football_helmet.n.01', 'synonyms': ['football_helmet'], 'id': 467, 'def': 'a padded helmet with a face mask to protect the head of football players', 'name': 'football_helmet'}, {'frequency': 'c', 'synset': 'footstool.n.01', 'synonyms': ['footstool', 'footrest'], 'id': 468, 'def': 'a low seat or a stool to rest the feet of a seated person', 'name': 'footstool'}, {'frequency': 'f', 'synset': 'fork.n.01', 'synonyms': ['fork'], 'id': 469, 'def': 'cutlery used for serving and eating food', 'name': 'fork'}, {'frequency': 'c', 'synset': 'forklift.n.01', 'synonyms': ['forklift'], 'id': 470, 'def': 'an industrial vehicle with a power operated fork in front that can be inserted under loads to lift and move them', 'name': 'forklift'}, {'frequency': 'c', 'synset': 'freight_car.n.01', 'synonyms': ['freight_car'], 'id': 471, 'def': 'a railway car that carries freight', 'name': 'freight_car'}, {'frequency': 'c', 'synset': 'french_toast.n.01', 'synonyms': ['French_toast'], 'id': 472, 'def': 'bread slice dipped in egg and milk and fried', 'name': 'French_toast'}, {'frequency': 'c', 'synset': 'freshener.n.01', 'synonyms': ['freshener', 'air_freshener'], 'id': 473, 'def': 'anything that freshens air by removing or covering odor', 'name': 'freshener'}, {'frequency': 'f', 'synset': 'frisbee.n.01', 'synonyms': ['frisbee'], 'id': 474, 'def': 'a light, plastic disk propelled with a flip of the wrist for recreation or competition', 'name': 'frisbee'}, {'frequency': 'c', 'synset': 'frog.n.01', 'synonyms': ['frog', 'toad', 'toad_frog'], 'id': 475, 'def': 'a tailless stout-bodied amphibians with long hind limbs for leaping', 'name': 'frog'}, {'frequency': 'c', 'synset': 'fruit_juice.n.01', 'synonyms': ['fruit_juice'], 'id': 476, 'def': 'drink produced by squeezing or crushing fruit', 'name': 'fruit_juice'}, {'frequency': 'f', 'synset': 'frying_pan.n.01', 'synonyms': ['frying_pan', 'frypan', 'skillet'], 'id': 477, 'def': 'a pan used for frying foods', 'name': 'frying_pan'}, {'frequency': 'r', 'synset': 'fudge.n.01', 'synonyms': ['fudge'], 'id': 478, 'def': 'soft creamy candy', 'name': 'fudge'}, {'frequency': 'r', 'synset': 'funnel.n.02', 'synonyms': ['funnel'], 'id': 479, 'def': 'a cone-shaped utensil used to channel a substance into a container with a small mouth', 'name': 'funnel'}, {'frequency': 'r', 'synset': 'futon.n.01', 'synonyms': ['futon'], 'id': 480, 'def': 'a pad that is used for sleeping on the floor or on a raised frame', 'name': 'futon'}, {'frequency': 'r', 'synset': 'gag.n.02', 'synonyms': ['gag', 'muzzle'], 'id': 481, 'def': "restraint put into a person's mouth to prevent speaking or shouting", 'name': 'gag'}, {'frequency': 'r', 'synset': 'garbage.n.03', 'synonyms': ['garbage'], 'id': 482, 'def': 'a receptacle where waste can be discarded', 'name': 'garbage'}, {'frequency': 'c', 'synset': 'garbage_truck.n.01', 'synonyms': ['garbage_truck'], 'id': 483, 'def': 'a truck for collecting domestic refuse', 'name': 'garbage_truck'}, {'frequency': 'c', 'synset': 'garden_hose.n.01', 'synonyms': ['garden_hose'], 'id': 484, 'def': 'a hose used for watering a lawn or garden', 'name': 'garden_hose'}, {'frequency': 'c', 'synset': 'gargle.n.01', 'synonyms': ['gargle', 'mouthwash'], 'id': 485, 'def': 'a medicated solution used for gargling and rinsing the mouth', 'name': 'gargle'}, {'frequency': 'r', 'synset': 'gargoyle.n.02', 'synonyms': ['gargoyle'], 'id': 486, 'def': 'an ornament consisting of a grotesquely carved figure of a person or animal', 'name': 'gargoyle'}, {'frequency': 'c', 'synset': 'garlic.n.02', 'synonyms': ['garlic', 'ail'], 'id': 487, 'def': 'aromatic bulb used as seasoning', 'name': 'garlic'}, {'frequency': 'r', 'synset': 'gasmask.n.01', 'synonyms': ['gasmask', 'respirator', 'gas_helmet'], 'id': 488, 'def': 'a protective face mask with a filter', 'name': 'gasmask'}, {'frequency': 'c', 'synset': 'gazelle.n.01', 'synonyms': ['gazelle'], 'id': 489, 'def': 'small swift graceful antelope of Africa and Asia having lustrous eyes', 'name': 'gazelle'}, {'frequency': 'c', 'synset': 'gelatin.n.02', 'synonyms': ['gelatin', 'jelly'], 'id': 490, 'def': 'an edible jelly made with gelatin and used as a dessert or salad base or a coating for foods', 'name': 'gelatin'}, {'frequency': 'r', 'synset': 'gem.n.02', 'synonyms': ['gemstone'], 'id': 491, 'def': 'a crystalline rock that can be cut and polished for jewelry', 'name': 'gemstone'}, {'frequency': 'r', 'synset': 'generator.n.02', 'synonyms': ['generator'], 'id': 492, 'def': 'engine that converts mechanical energy into electrical energy by electromagnetic induction', 'name': 'generator'}, {'frequency': 'c', 'synset': 'giant_panda.n.01', 'synonyms': ['giant_panda', 'panda', 'panda_bear'], 'id': 493, 'def': 'large black-and-white herbivorous mammal of bamboo forests of China and Tibet', 'name': 'giant_panda'}, {'frequency': 'c', 'synset': 'gift_wrap.n.01', 'synonyms': ['gift_wrap'], 'id': 494, 'def': 'attractive wrapping paper suitable for wrapping gifts', 'name': 'gift_wrap'}, {'frequency': 'c', 'synset': 'ginger.n.03', 'synonyms': ['ginger', 'gingerroot'], 'id': 495, 'def': 'the root of the common ginger plant; used fresh as a seasoning', 'name': 'ginger'}, {'frequency': 'f', 'synset': 'giraffe.n.01', 'synonyms': ['giraffe'], 'id': 496, 'def': 'tall animal having a spotted coat and small horns and very long neck and legs', 'name': 'giraffe'}, {'frequency': 'c', 'synset': 'girdle.n.02', 'synonyms': ['cincture', 'sash', 'waistband', 'waistcloth'], 'id': 497, 'def': 'a band of material around the waist that strengthens a skirt or trousers', 'name': 'cincture'}, {'frequency': 'f', 'synset': 'glass.n.02', 'synonyms': ['glass_(drink_container)', 'drinking_glass'], 'id': 498, 'def': 'a container for holding liquids while drinking', 'name': 'glass_(drink_container)'}, {'frequency': 'c', 'synset': 'globe.n.03', 'synonyms': ['globe'], 'id': 499, 'def': 'a sphere on which a map (especially of the earth) is represented', 'name': 'globe'}, {'frequency': 'f', 'synset': 'glove.n.02', 'synonyms': ['glove'], 'id': 500, 'def': 'handwear covering the hand', 'name': 'glove'}, {'frequency': 'c', 'synset': 'goat.n.01', 'synonyms': ['goat'], 'id': 501, 'def': 'a common goat', 'name': 'goat'}, {'frequency': 'f', 'synset': 'goggles.n.01', 'synonyms': ['goggles'], 'id': 502, 'def': 'tight-fitting spectacles worn to protect the eyes', 'name': 'goggles'}, {'frequency': 'r', 'synset': 'goldfish.n.01', 'synonyms': ['goldfish'], 'id': 503, 'def': 'small golden or orange-red freshwater fishes used as pond or aquarium pets', 'name': 'goldfish'}, {'frequency': 'c', 'synset': 'golf_club.n.02', 'synonyms': ['golf_club', 'golf-club'], 'id': 504, 'def': 'golf equipment used by a golfer to hit a golf ball', 'name': 'golf_club'}, {'frequency': 'c', 'synset': 'golfcart.n.01', 'synonyms': ['golfcart'], 'id': 505, 'def': 'a small motor vehicle in which golfers can ride between shots', 'name': 'golfcart'}, {'frequency': 'r', 'synset': 'gondola.n.02', 'synonyms': ['gondola_(boat)'], 'id': 506, 'def': 'long narrow flat-bottomed boat propelled by sculling; traditionally used on canals of Venice', 'name': 'gondola_(boat)'}, {'frequency': 'c', 'synset': 'goose.n.01', 'synonyms': ['goose'], 'id': 507, 'def': 'loud, web-footed long-necked aquatic birds usually larger than ducks', 'name': 'goose'}, {'frequency': 'r', 'synset': 'gorilla.n.01', 'synonyms': ['gorilla'], 'id': 508, 'def': 'largest ape', 'name': 'gorilla'}, {'frequency': 'r', 'synset': 'gourd.n.02', 'synonyms': ['gourd'], 'id': 509, 'def': 'any of numerous inedible fruits with hard rinds', 'name': 'gourd'}, {'frequency': 'f', 'synset': 'grape.n.01', 'synonyms': ['grape'], 'id': 510, 'def': 'any of various juicy fruit with green or purple skins; grow in clusters', 'name': 'grape'}, {'frequency': 'c', 'synset': 'grater.n.01', 'synonyms': ['grater'], 'id': 511, 'def': 'utensil with sharp perforations for shredding foods (as vegetables or cheese)', 'name': 'grater'}, {'frequency': 'c', 'synset': 'gravestone.n.01', 'synonyms': ['gravestone', 'headstone', 'tombstone'], 'id': 512, 'def': 'a stone that is used to mark a grave', 'name': 'gravestone'}, {'frequency': 'r', 'synset': 'gravy_boat.n.01', 'synonyms': ['gravy_boat', 'gravy_holder'], 'id': 513, 'def': 'a dish (often boat-shaped) for serving gravy or sauce', 'name': 'gravy_boat'}, {'frequency': 'f', 'synset': 'green_bean.n.02', 'synonyms': ['green_bean'], 'id': 514, 'def': 'a common bean plant cultivated for its slender green edible pods', 'name': 'green_bean'}, {'frequency': 'f', 'synset': 'green_onion.n.01', 'synonyms': ['green_onion', 'spring_onion', 'scallion'], 'id': 515, 'def': 'a young onion before the bulb has enlarged', 'name': 'green_onion'}, {'frequency': 'r', 'synset': 'griddle.n.01', 'synonyms': ['griddle'], 'id': 516, 'def': 'cooking utensil consisting of a flat heated surface on which food is cooked', 'name': 'griddle'}, {'frequency': 'f', 'synset': 'grill.n.02', 'synonyms': ['grill', 'grille', 'grillwork', 'radiator_grille'], 'id': 517, 'def': 'a framework of metal bars used as a partition or a grate', 'name': 'grill'}, {'frequency': 'r', 'synset': 'grits.n.01', 'synonyms': ['grits', 'hominy_grits'], 'id': 518, 'def': 'coarsely ground corn boiled as a breakfast dish', 'name': 'grits'}, {'frequency': 'c', 'synset': 'grizzly.n.01', 'synonyms': ['grizzly', 'grizzly_bear'], 'id': 519, 'def': 'powerful brownish-yellow bear of the uplands of western North America', 'name': 'grizzly'}, {'frequency': 'c', 'synset': 'grocery_bag.n.01', 'synonyms': ['grocery_bag'], 'id': 520, 'def': "a sack for holding customer's groceries", 'name': 'grocery_bag'}, {'frequency': 'f', 'synset': 'guitar.n.01', 'synonyms': ['guitar'], 'id': 521, 'def': 'a stringed instrument usually having six strings; played by strumming or plucking', 'name': 'guitar'}, {'frequency': 'c', 'synset': 'gull.n.02', 'synonyms': ['gull', 'seagull'], 'id': 522, 'def': 'mostly white aquatic bird having long pointed wings and short legs', 'name': 'gull'}, {'frequency': 'c', 'synset': 'gun.n.01', 'synonyms': ['gun'], 'id': 523, 'def': 'a weapon that discharges a bullet at high velocity from a metal tube', 'name': 'gun'}, {'frequency': 'f', 'synset': 'hairbrush.n.01', 'synonyms': ['hairbrush'], 'id': 524, 'def': "a brush used to groom a person's hair", 'name': 'hairbrush'}, {'frequency': 'c', 'synset': 'hairnet.n.01', 'synonyms': ['hairnet'], 'id': 525, 'def': 'a small net that someone wears over their hair to keep it in place', 'name': 'hairnet'}, {'frequency': 'c', 'synset': 'hairpin.n.01', 'synonyms': ['hairpin'], 'id': 526, 'def': "a double pronged pin used to hold women's hair in place", 'name': 'hairpin'}, {'frequency': 'r', 'synset': 'halter.n.03', 'synonyms': ['halter_top'], 'id': 527, 'def': "a woman's top that fastens behind the back and neck leaving the back and arms uncovered", 'name': 'halter_top'}, {'frequency': 'f', 'synset': 'ham.n.01', 'synonyms': ['ham', 'jambon', 'gammon'], 'id': 528, 'def': 'meat cut from the thigh of a hog (usually smoked)', 'name': 'ham'}, {'frequency': 'c', 'synset': 'hamburger.n.01', 'synonyms': ['hamburger', 'beefburger', 'burger'], 'id': 529, 'def': 'a sandwich consisting of a patty of minced beef served on a bun', 'name': 'hamburger'}, {'frequency': 'c', 'synset': 'hammer.n.02', 'synonyms': ['hammer'], 'id': 530, 'def': 'a hand tool with a heavy head and a handle; used to deliver an impulsive force by striking', 'name': 'hammer'}, {'frequency': 'c', 'synset': 'hammock.n.02', 'synonyms': ['hammock'], 'id': 531, 'def': 'a hanging bed of canvas or rope netting (usually suspended between two trees)', 'name': 'hammock'}, {'frequency': 'r', 'synset': 'hamper.n.02', 'synonyms': ['hamper'], 'id': 532, 'def': 'a basket usually with a cover', 'name': 'hamper'}, {'frequency': 'c', 'synset': 'hamster.n.01', 'synonyms': ['hamster'], 'id': 533, 'def': 'short-tailed burrowing rodent with large cheek pouches', 'name': 'hamster'}, {'frequency': 'f', 'synset': 'hand_blower.n.01', 'synonyms': ['hair_dryer'], 'id': 534, 'def': 'a hand-held electric blower that can blow warm air onto the hair', 'name': 'hair_dryer'}, {'frequency': 'r', 'synset': 'hand_glass.n.01', 'synonyms': ['hand_glass', 'hand_mirror'], 'id': 535, 'def': 'a mirror intended to be held in the hand', 'name': 'hand_glass'}, {'frequency': 'f', 'synset': 'hand_towel.n.01', 'synonyms': ['hand_towel', 'face_towel'], 'id': 536, 'def': 'a small towel used to dry the hands or face', 'name': 'hand_towel'}, {'frequency': 'c', 'synset': 'handcart.n.01', 'synonyms': ['handcart', 'pushcart', 'hand_truck'], 'id': 537, 'def': 'wheeled vehicle that can be pushed by a person', 'name': 'handcart'}, {'frequency': 'r', 'synset': 'handcuff.n.01', 'synonyms': ['handcuff'], 'id': 538, 'def': 'shackle that consists of a metal loop that can be locked around the wrist', 'name': 'handcuff'}, {'frequency': 'c', 'synset': 'handkerchief.n.01', 'synonyms': ['handkerchief'], 'id': 539, 'def': 'a square piece of cloth used for wiping the eyes or nose or as a costume accessory', 'name': 'handkerchief'}, {'frequency': 'f', 'synset': 'handle.n.01', 'synonyms': ['handle', 'grip', 'handgrip'], 'id': 540, 'def': 'the appendage to an object that is designed to be held in order to use or move it', 'name': 'handle'}, {'frequency': 'r', 'synset': 'handsaw.n.01', 'synonyms': ['handsaw', "carpenter's_saw"], 'id': 541, 'def': 'a saw used with one hand for cutting wood', 'name': 'handsaw'}, {'frequency': 'r', 'synset': 'hardback.n.01', 'synonyms': ['hardback_book', 'hardcover_book'], 'id': 542, 'def': 'a book with cardboard or cloth or leather covers', 'name': 'hardback_book'}, {'frequency': 'r', 'synset': 'harmonium.n.01', 'synonyms': ['harmonium', 'organ_(musical_instrument)', 'reed_organ_(musical_instrument)'], 'id': 543, 'def': 'a free-reed instrument in which air is forced through the reeds by bellows', 'name': 'harmonium'}, {'frequency': 'f', 'synset': 'hat.n.01', 'synonyms': ['hat'], 'id': 544, 'def': 'headwear that protects the head from bad weather, sun, or worn for fashion', 'name': 'hat'}, {'frequency': 'r', 'synset': 'hatbox.n.01', 'synonyms': ['hatbox'], 'id': 545, 'def': 'a round piece of luggage for carrying hats', 'name': 'hatbox'}, {'frequency': 'c', 'synset': 'head_covering.n.01', 'synonyms': ['veil'], 'id': 546, 'def': 'a garment that covers the head OR face', 'name': 'veil'}, {'frequency': 'f', 'synset': 'headband.n.01', 'synonyms': ['headband'], 'id': 547, 'def': 'a band worn around or over the head', 'name': 'headband'}, {'frequency': 'f', 'synset': 'headboard.n.01', 'synonyms': ['headboard'], 'id': 548, 'def': 'a vertical board or panel forming the head of a bedstead', 'name': 'headboard'}, {'frequency': 'f', 'synset': 'headlight.n.01', 'synonyms': ['headlight', 'headlamp'], 'id': 549, 'def': 'a powerful light with reflector; attached to the front of an automobile or locomotive', 'name': 'headlight'}, {'frequency': 'c', 'synset': 'headscarf.n.01', 'synonyms': ['headscarf'], 'id': 550, 'def': 'a kerchief worn over the head and tied under the chin', 'name': 'headscarf'}, {'frequency': 'r', 'synset': 'headset.n.01', 'synonyms': ['headset'], 'id': 551, 'def': 'receiver consisting of a pair of headphones', 'name': 'headset'}, {'frequency': 'c', 'synset': 'headstall.n.01', 'synonyms': ['headstall_(for_horses)', 'headpiece_(for_horses)'], 'id': 552, 'def': "the band that is the part of a bridle that fits around a horse's head", 'name': 'headstall_(for_horses)'}, {'frequency': 'c', 'synset': 'heart.n.02', 'synonyms': ['heart'], 'id': 553, 'def': 'a muscular organ; its contractions move the blood through the body', 'name': 'heart'}, {'frequency': 'c', 'synset': 'heater.n.01', 'synonyms': ['heater', 'warmer'], 'id': 554, 'def': 'device that heats water or supplies warmth to a room', 'name': 'heater'}, {'frequency': 'c', 'synset': 'helicopter.n.01', 'synonyms': ['helicopter'], 'id': 555, 'def': 'an aircraft without wings that obtains its lift from the rotation of overhead blades', 'name': 'helicopter'}, {'frequency': 'f', 'synset': 'helmet.n.02', 'synonyms': ['helmet'], 'id': 556, 'def': 'a protective headgear made of hard material to resist blows', 'name': 'helmet'}, {'frequency': 'r', 'synset': 'heron.n.02', 'synonyms': ['heron'], 'id': 557, 'def': 'grey or white wading bird with long neck and long legs and (usually) long bill', 'name': 'heron'}, {'frequency': 'c', 'synset': 'highchair.n.01', 'synonyms': ['highchair', 'feeding_chair'], 'id': 558, 'def': 'a chair for feeding a very young child', 'name': 'highchair'}, {'frequency': 'f', 'synset': 'hinge.n.01', 'synonyms': ['hinge'], 'id': 559, 'def': 'a joint that holds two parts together so that one can swing relative to the other', 'name': 'hinge'}, {'frequency': 'r', 'synset': 'hippopotamus.n.01', 'synonyms': ['hippopotamus'], 'id': 560, 'def': 'massive thick-skinned animal living in or around rivers of tropical Africa', 'name': 'hippopotamus'}, {'frequency': 'r', 'synset': 'hockey_stick.n.01', 'synonyms': ['hockey_stick'], 'id': 561, 'def': 'sports implement consisting of a stick used by hockey players to move the puck', 'name': 'hockey_stick'}, {'frequency': 'c', 'synset': 'hog.n.03', 'synonyms': ['hog', 'pig'], 'id': 562, 'def': 'domestic swine', 'name': 'hog'}, {'frequency': 'f', 'synset': 'home_plate.n.01', 'synonyms': ['home_plate_(baseball)', 'home_base_(baseball)'], 'id': 563, 'def': '(baseball) a rubber slab where the batter stands; it must be touched by a base runner in order to score', 'name': 'home_plate_(baseball)'}, {'frequency': 'c', 'synset': 'honey.n.01', 'synonyms': ['honey'], 'id': 564, 'def': 'a sweet yellow liquid produced by bees', 'name': 'honey'}, {'frequency': 'f', 'synset': 'hood.n.06', 'synonyms': ['fume_hood', 'exhaust_hood'], 'id': 565, 'def': 'metal covering leading to a vent that exhausts smoke or fumes', 'name': 'fume_hood'}, {'frequency': 'f', 'synset': 'hook.n.05', 'synonyms': ['hook'], 'id': 566, 'def': 'a curved or bent implement for suspending or pulling something', 'name': 'hook'}, {'frequency': 'r', 'synset': 'hookah.n.01', 'synonyms': ['hookah', 'narghile', 'nargileh', 'sheesha', 'shisha', 'water_pipe'], 'id': 567, 'def': 'a tobacco pipe with a long flexible tube connected to a container where the smoke is cooled by passing through water', 'name': 'hookah'}, {'frequency': 'r', 'synset': 'hornet.n.01', 'synonyms': ['hornet'], 'id': 568, 'def': 'large stinging wasp', 'name': 'hornet'}, {'frequency': 'f', 'synset': 'horse.n.01', 'synonyms': ['horse'], 'id': 569, 'def': 'a common horse', 'name': 'horse'}, {'frequency': 'f', 'synset': 'hose.n.03', 'synonyms': ['hose', 'hosepipe'], 'id': 570, 'def': 'a flexible pipe for conveying a liquid or gas', 'name': 'hose'}, {'frequency': 'r', 'synset': 'hot-air_balloon.n.01', 'synonyms': ['hot-air_balloon'], 'id': 571, 'def': 'balloon for travel through the air in a basket suspended below a large bag of heated air', 'name': 'hot-air_balloon'}, {'frequency': 'r', 'synset': 'hot_plate.n.01', 'synonyms': ['hotplate'], 'id': 572, 'def': 'a portable electric appliance for heating or cooking or keeping food warm', 'name': 'hotplate'}, {'frequency': 'c', 'synset': 'hot_sauce.n.01', 'synonyms': ['hot_sauce'], 'id': 573, 'def': 'a pungent peppery sauce', 'name': 'hot_sauce'}, {'frequency': 'r', 'synset': 'hourglass.n.01', 'synonyms': ['hourglass'], 'id': 574, 'def': 'a sandglass timer that runs for sixty minutes', 'name': 'hourglass'}, {'frequency': 'r', 'synset': 'houseboat.n.01', 'synonyms': ['houseboat'], 'id': 575, 'def': 'a barge that is designed and equipped for use as a dwelling', 'name': 'houseboat'}, {'frequency': 'c', 'synset': 'hummingbird.n.01', 'synonyms': ['hummingbird'], 'id': 576, 'def': 'tiny American bird having brilliant iridescent plumage and long slender bills', 'name': 'hummingbird'}, {'frequency': 'r', 'synset': 'hummus.n.01', 'synonyms': ['hummus', 'humus', 'hommos', 'hoummos', 'humous'], 'id': 577, 'def': 'a thick spread made from mashed chickpeas', 'name': 'hummus'}, {'frequency': 'f', 'synset': 'ice_bear.n.01', 'synonyms': ['polar_bear'], 'id': 578, 'def': 'white bear of Arctic regions', 'name': 'polar_bear'}, {'frequency': 'c', 'synset': 'ice_cream.n.01', 'synonyms': ['icecream'], 'id': 579, 'def': 'frozen dessert containing cream and sugar and flavoring', 'name': 'icecream'}, {'frequency': 'r', 'synset': 'ice_lolly.n.01', 'synonyms': ['popsicle'], 'id': 580, 'def': 'ice cream or water ice on a small wooden stick', 'name': 'popsicle'}, {'frequency': 'c', 'synset': 'ice_maker.n.01', 'synonyms': ['ice_maker'], 'id': 581, 'def': 'an appliance included in some electric refrigerators for making ice cubes', 'name': 'ice_maker'}, {'frequency': 'r', 'synset': 'ice_pack.n.01', 'synonyms': ['ice_pack', 'ice_bag'], 'id': 582, 'def': 'a waterproof bag filled with ice: applied to the body (especially the head) to cool or reduce swelling', 'name': 'ice_pack'}, {'frequency': 'r', 'synset': 'ice_skate.n.01', 'synonyms': ['ice_skate'], 'id': 583, 'def': 'skate consisting of a boot with a steel blade fitted to the sole', 'name': 'ice_skate'}, {'frequency': 'c', 'synset': 'igniter.n.01', 'synonyms': ['igniter', 'ignitor', 'lighter'], 'id': 584, 'def': 'a substance or device used to start a fire', 'name': 'igniter'}, {'frequency': 'r', 'synset': 'inhaler.n.01', 'synonyms': ['inhaler', 'inhalator'], 'id': 585, 'def': 'a dispenser that produces a chemical vapor to be inhaled through mouth or nose', 'name': 'inhaler'}, {'frequency': 'f', 'synset': 'ipod.n.01', 'synonyms': ['iPod'], 'id': 586, 'def': 'a pocket-sized device used to play music files', 'name': 'iPod'}, {'frequency': 'c', 'synset': 'iron.n.04', 'synonyms': ['iron_(for_clothing)', 'smoothing_iron_(for_clothing)'], 'id': 587, 'def': 'home appliance consisting of a flat metal base that is heated and used to smooth cloth', 'name': 'iron_(for_clothing)'}, {'frequency': 'c', 'synset': 'ironing_board.n.01', 'synonyms': ['ironing_board'], 'id': 588, 'def': 'narrow padded board on collapsible supports; used for ironing clothes', 'name': 'ironing_board'}, {'frequency': 'f', 'synset': 'jacket.n.01', 'synonyms': ['jacket'], 'id': 589, 'def': 'a waist-length coat', 'name': 'jacket'}, {'frequency': 'c', 'synset': 'jam.n.01', 'synonyms': ['jam'], 'id': 590, 'def': 'preserve of crushed fruit', 'name': 'jam'}, {'frequency': 'f', 'synset': 'jar.n.01', 'synonyms': ['jar'], 'id': 591, 'def': 'a vessel (usually cylindrical) with a wide mouth and without handles', 'name': 'jar'}, {'frequency': 'f', 'synset': 'jean.n.01', 'synonyms': ['jean', 'blue_jean', 'denim'], 'id': 592, 'def': '(usually plural) close-fitting trousers of heavy denim for manual work or casual wear', 'name': 'jean'}, {'frequency': 'c', 'synset': 'jeep.n.01', 'synonyms': ['jeep', 'landrover'], 'id': 593, 'def': 'a car suitable for traveling over rough terrain', 'name': 'jeep'}, {'frequency': 'r', 'synset': 'jelly_bean.n.01', 'synonyms': ['jelly_bean', 'jelly_egg'], 'id': 594, 'def': 'sugar-glazed jellied candy', 'name': 'jelly_bean'}, {'frequency': 'f', 'synset': 'jersey.n.03', 'synonyms': ['jersey', 'T-shirt', 'tee_shirt'], 'id': 595, 'def': 'a close-fitting pullover shirt', 'name': 'jersey'}, {'frequency': 'c', 'synset': 'jet.n.01', 'synonyms': ['jet_plane', 'jet-propelled_plane'], 'id': 596, 'def': 'an airplane powered by one or more jet engines', 'name': 'jet_plane'}, {'frequency': 'r', 'synset': 'jewel.n.01', 'synonyms': ['jewel', 'gem', 'precious_stone'], 'id': 597, 'def': 'a precious or semiprecious stone incorporated into a piece of jewelry', 'name': 'jewel'}, {'frequency': 'c', 'synset': 'jewelry.n.01', 'synonyms': ['jewelry', 'jewellery'], 'id': 598, 'def': 'an adornment (as a bracelet or ring or necklace) made of precious metals and set with gems (or imitation gems)', 'name': 'jewelry'}, {'frequency': 'r', 'synset': 'joystick.n.02', 'synonyms': ['joystick'], 'id': 599, 'def': 'a control device for computers consisting of a vertical handle that can move freely in two directions', 'name': 'joystick'}, {'frequency': 'c', 'synset': 'jump_suit.n.01', 'synonyms': ['jumpsuit'], 'id': 600, 'def': "one-piece garment fashioned after a parachutist's uniform", 'name': 'jumpsuit'}, {'frequency': 'c', 'synset': 'kayak.n.01', 'synonyms': ['kayak'], 'id': 601, 'def': 'a small canoe consisting of a light frame made watertight with animal skins', 'name': 'kayak'}, {'frequency': 'r', 'synset': 'keg.n.02', 'synonyms': ['keg'], 'id': 602, 'def': 'small cask or barrel', 'name': 'keg'}, {'frequency': 'r', 'synset': 'kennel.n.01', 'synonyms': ['kennel', 'doghouse'], 'id': 603, 'def': 'outbuilding that serves as a shelter for a dog', 'name': 'kennel'}, {'frequency': 'c', 'synset': 'kettle.n.01', 'synonyms': ['kettle', 'boiler'], 'id': 604, 'def': 'a metal pot for stewing or boiling; usually has a lid', 'name': 'kettle'}, {'frequency': 'f', 'synset': 'key.n.01', 'synonyms': ['key'], 'id': 605, 'def': 'metal instrument used to unlock a lock', 'name': 'key'}, {'frequency': 'r', 'synset': 'keycard.n.01', 'synonyms': ['keycard'], 'id': 606, 'def': 'a plastic card used to gain access typically to a door', 'name': 'keycard'}, {'frequency': 'c', 'synset': 'kilt.n.01', 'synonyms': ['kilt'], 'id': 607, 'def': 'a knee-length pleated tartan skirt worn by men as part of the traditional dress in the Highlands of northern Scotland', 'name': 'kilt'}, {'frequency': 'c', 'synset': 'kimono.n.01', 'synonyms': ['kimono'], 'id': 608, 'def': 'a loose robe; imitated from robes originally worn by Japanese', 'name': 'kimono'}, {'frequency': 'f', 'synset': 'kitchen_sink.n.01', 'synonyms': ['kitchen_sink'], 'id': 609, 'def': 'a sink in a kitchen', 'name': 'kitchen_sink'}, {'frequency': 'r', 'synset': 'kitchen_table.n.01', 'synonyms': ['kitchen_table'], 'id': 610, 'def': 'a table in the kitchen', 'name': 'kitchen_table'}, {'frequency': 'f', 'synset': 'kite.n.03', 'synonyms': ['kite'], 'id': 611, 'def': 'plaything consisting of a light frame covered with tissue paper; flown in wind at end of a string', 'name': 'kite'}, {'frequency': 'c', 'synset': 'kitten.n.01', 'synonyms': ['kitten', 'kitty'], 'id': 612, 'def': 'young domestic cat', 'name': 'kitten'}, {'frequency': 'c', 'synset': 'kiwi.n.03', 'synonyms': ['kiwi_fruit'], 'id': 613, 'def': 'fuzzy brown egg-shaped fruit with slightly tart green flesh', 'name': 'kiwi_fruit'}, {'frequency': 'f', 'synset': 'knee_pad.n.01', 'synonyms': ['knee_pad'], 'id': 614, 'def': 'protective garment consisting of a pad worn by football or baseball or hockey players', 'name': 'knee_pad'}, {'frequency': 'f', 'synset': 'knife.n.01', 'synonyms': ['knife'], 'id': 615, 'def': 'tool with a blade and point used as a cutting instrument', 'name': 'knife'}, {'frequency': 'r', 'synset': 'knitting_needle.n.01', 'synonyms': ['knitting_needle'], 'id': 616, 'def': 'needle consisting of a slender rod with pointed ends; usually used in pairs', 'name': 'knitting_needle'}, {'frequency': 'f', 'synset': 'knob.n.02', 'synonyms': ['knob'], 'id': 617, 'def': 'a round handle often found on a door', 'name': 'knob'}, {'frequency': 'r', 'synset': 'knocker.n.05', 'synonyms': ['knocker_(on_a_door)', 'doorknocker'], 'id': 618, 'def': 'a device (usually metal and ornamental) attached by a hinge to a door', 'name': 'knocker_(on_a_door)'}, {'frequency': 'r', 'synset': 'koala.n.01', 'synonyms': ['koala', 'koala_bear'], 'id': 619, 'def': 'sluggish tailless Australian marsupial with grey furry ears and coat', 'name': 'koala'}, {'frequency': 'r', 'synset': 'lab_coat.n.01', 'synonyms': ['lab_coat', 'laboratory_coat'], 'id': 620, 'def': 'a light coat worn to protect clothing from substances used while working in a laboratory', 'name': 'lab_coat'}, {'frequency': 'f', 'synset': 'ladder.n.01', 'synonyms': ['ladder'], 'id': 621, 'def': 'steps consisting of two parallel members connected by rungs', 'name': 'ladder'}, {'frequency': 'c', 'synset': 'ladle.n.01', 'synonyms': ['ladle'], 'id': 622, 'def': 'a spoon-shaped vessel with a long handle frequently used to transfer liquids', 'name': 'ladle'}, {'frequency': 'c', 'synset': 'ladybug.n.01', 'synonyms': ['ladybug', 'ladybeetle', 'ladybird_beetle'], 'id': 623, 'def': 'small round bright-colored and spotted beetle, typically red and black', 'name': 'ladybug'}, {'frequency': 'f', 'synset': 'lamb.n.01', 'synonyms': ['lamb_(animal)'], 'id': 624, 'def': 'young sheep', 'name': 'lamb_(animal)'}, {'frequency': 'r', 'synset': 'lamb_chop.n.01', 'synonyms': ['lamb-chop', 'lambchop'], 'id': 625, 'def': 'chop cut from a lamb', 'name': 'lamb-chop'}, {'frequency': 'f', 'synset': 'lamp.n.02', 'synonyms': ['lamp'], 'id': 626, 'def': 'a piece of furniture holding one or more electric light bulbs', 'name': 'lamp'}, {'frequency': 'f', 'synset': 'lamppost.n.01', 'synonyms': ['lamppost'], 'id': 627, 'def': 'a metal post supporting an outdoor lamp (such as a streetlight)', 'name': 'lamppost'}, {'frequency': 'f', 'synset': 'lampshade.n.01', 'synonyms': ['lampshade'], 'id': 628, 'def': 'a protective ornamental shade used to screen a light bulb from direct view', 'name': 'lampshade'}, {'frequency': 'c', 'synset': 'lantern.n.01', 'synonyms': ['lantern'], 'id': 629, 'def': 'light in a transparent protective case', 'name': 'lantern'}, {'frequency': 'f', 'synset': 'lanyard.n.02', 'synonyms': ['lanyard', 'laniard'], 'id': 630, 'def': 'a cord worn around the neck to hold a knife or whistle, etc.', 'name': 'lanyard'}, {'frequency': 'f', 'synset': 'laptop.n.01', 'synonyms': ['laptop_computer', 'notebook_computer'], 'id': 631, 'def': 'a portable computer small enough to use in your lap', 'name': 'laptop_computer'}, {'frequency': 'r', 'synset': 'lasagna.n.01', 'synonyms': ['lasagna', 'lasagne'], 'id': 632, 'def': 'baked dish of layers of lasagna pasta with sauce and cheese and meat or vegetables', 'name': 'lasagna'}, {'frequency': 'f', 'synset': 'latch.n.02', 'synonyms': ['latch'], 'id': 633, 'def': 'a bar that can be lowered or slid into a groove to fasten a door or gate', 'name': 'latch'}, {'frequency': 'r', 'synset': 'lawn_mower.n.01', 'synonyms': ['lawn_mower'], 'id': 634, 'def': 'garden tool for mowing grass on lawns', 'name': 'lawn_mower'}, {'frequency': 'r', 'synset': 'leather.n.01', 'synonyms': ['leather'], 'id': 635, 'def': 'an animal skin made smooth and flexible by removing the hair and then tanning', 'name': 'leather'}, {'frequency': 'c', 'synset': 'legging.n.01', 'synonyms': ['legging_(clothing)', 'leging_(clothing)', 'leg_covering'], 'id': 636, 'def': 'a garment covering the leg (usually extending from the knee to the ankle)', 'name': 'legging_(clothing)'}, {'frequency': 'c', 'synset': 'lego.n.01', 'synonyms': ['Lego', 'Lego_set'], 'id': 637, 'def': "a child's plastic construction set for making models from blocks", 'name': 'Lego'}, {'frequency': 'r', 'synset': 'legume.n.02', 'synonyms': ['legume'], 'id': 638, 'def': 'the fruit or seed of bean or pea plants', 'name': 'legume'}, {'frequency': 'f', 'synset': 'lemon.n.01', 'synonyms': ['lemon'], 'id': 639, 'def': 'yellow oval fruit with juicy acidic flesh', 'name': 'lemon'}, {'frequency': 'r', 'synset': 'lemonade.n.01', 'synonyms': ['lemonade'], 'id': 640, 'def': 'sweetened beverage of diluted lemon juice', 'name': 'lemonade'}, {'frequency': 'f', 'synset': 'lettuce.n.02', 'synonyms': ['lettuce'], 'id': 641, 'def': 'leafy plant commonly eaten in salad or on sandwiches', 'name': 'lettuce'}, {'frequency': 'f', 'synset': 'license_plate.n.01', 'synonyms': ['license_plate', 'numberplate'], 'id': 642, 'def': "a plate mounted on the front and back of car and bearing the car's registration number", 'name': 'license_plate'}, {'frequency': 'f', 'synset': 'life_buoy.n.01', 'synonyms': ['life_buoy', 'lifesaver', 'life_belt', 'life_ring'], 'id': 643, 'def': 'a ring-shaped life preserver used to prevent drowning (NOT a life-jacket or vest)', 'name': 'life_buoy'}, {'frequency': 'f', 'synset': 'life_jacket.n.01', 'synonyms': ['life_jacket', 'life_vest'], 'id': 644, 'def': 'life preserver consisting of a sleeveless jacket of buoyant or inflatable design', 'name': 'life_jacket'}, {'frequency': 'f', 'synset': 'light_bulb.n.01', 'synonyms': ['lightbulb'], 'id': 645, 'def': 'lightblub/source of light', 'name': 'lightbulb'}, {'frequency': 'r', 'synset': 'lightning_rod.n.02', 'synonyms': ['lightning_rod', 'lightning_conductor'], 'id': 646, 'def': 'a metallic conductor that is attached to a high point and leads to the ground', 'name': 'lightning_rod'}, {'frequency': 'f', 'synset': 'lime.n.06', 'synonyms': ['lime'], 'id': 647, 'def': 'the green acidic fruit of any of various lime trees', 'name': 'lime'}, {'frequency': 'r', 'synset': 'limousine.n.01', 'synonyms': ['limousine'], 'id': 648, 'def': 'long luxurious car; usually driven by a chauffeur', 'name': 'limousine'}, {'frequency': 'c', 'synset': 'lion.n.01', 'synonyms': ['lion'], 'id': 649, 'def': 'large gregarious predatory cat of Africa and India', 'name': 'lion'}, {'frequency': 'c', 'synset': 'lip_balm.n.01', 'synonyms': ['lip_balm'], 'id': 650, 'def': 'a balm applied to the lips', 'name': 'lip_balm'}, {'frequency': 'r', 'synset': 'liquor.n.01', 'synonyms': ['liquor', 'spirits', 'hard_liquor', 'liqueur', 'cordial'], 'id': 651, 'def': 'liquor or beer', 'name': 'liquor'}, {'frequency': 'c', 'synset': 'lizard.n.01', 'synonyms': ['lizard'], 'id': 652, 'def': 'a reptile with usually two pairs of legs and a tapering tail', 'name': 'lizard'}, {'frequency': 'f', 'synset': 'log.n.01', 'synonyms': ['log'], 'id': 653, 'def': 'a segment of the trunk of a tree when stripped of branches', 'name': 'log'}, {'frequency': 'c', 'synset': 'lollipop.n.02', 'synonyms': ['lollipop'], 'id': 654, 'def': 'hard candy on a stick', 'name': 'lollipop'}, {'frequency': 'f', 'synset': 'loudspeaker.n.01', 'synonyms': ['speaker_(stero_equipment)'], 'id': 655, 'def': 'electronic device that produces sound often as part of a stereo system', 'name': 'speaker_(stero_equipment)'}, {'frequency': 'c', 'synset': 'love_seat.n.01', 'synonyms': ['loveseat'], 'id': 656, 'def': 'small sofa that seats two people', 'name': 'loveseat'}, {'frequency': 'r', 'synset': 'machine_gun.n.01', 'synonyms': ['machine_gun'], 'id': 657, 'def': 'a rapidly firing automatic gun', 'name': 'machine_gun'}, {'frequency': 'f', 'synset': 'magazine.n.02', 'synonyms': ['magazine'], 'id': 658, 'def': 'a paperback periodic publication', 'name': 'magazine'}, {'frequency': 'f', 'synset': 'magnet.n.01', 'synonyms': ['magnet'], 'id': 659, 'def': 'a device that attracts iron and produces a magnetic field', 'name': 'magnet'}, {'frequency': 'c', 'synset': 'mail_slot.n.01', 'synonyms': ['mail_slot'], 'id': 660, 'def': 'a slot (usually in a door) through which mail can be delivered', 'name': 'mail_slot'}, {'frequency': 'f', 'synset': 'mailbox.n.01', 'synonyms': ['mailbox_(at_home)', 'letter_box_(at_home)'], 'id': 661, 'def': 'a private box for delivery of mail', 'name': 'mailbox_(at_home)'}, {'frequency': 'r', 'synset': 'mallard.n.01', 'synonyms': ['mallard'], 'id': 662, 'def': 'wild dabbling duck from which domestic ducks are descended', 'name': 'mallard'}, {'frequency': 'r', 'synset': 'mallet.n.01', 'synonyms': ['mallet'], 'id': 663, 'def': 'a sports implement with a long handle and a hammer-like head used to hit a ball', 'name': 'mallet'}, {'frequency': 'r', 'synset': 'mammoth.n.01', 'synonyms': ['mammoth'], 'id': 664, 'def': 'any of numerous extinct elephants widely distributed in the Pleistocene', 'name': 'mammoth'}, {'frequency': 'r', 'synset': 'manatee.n.01', 'synonyms': ['manatee'], 'id': 665, 'def': 'sirenian mammal of tropical coastal waters of America', 'name': 'manatee'}, {'frequency': 'c', 'synset': 'mandarin.n.05', 'synonyms': ['mandarin_orange'], 'id': 666, 'def': 'a somewhat flat reddish-orange loose skinned citrus of China', 'name': 'mandarin_orange'}, {'frequency': 'c', 'synset': 'manger.n.01', 'synonyms': ['manger', 'trough'], 'id': 667, 'def': 'a container (usually in a barn or stable) from which cattle or horses feed', 'name': 'manger'}, {'frequency': 'f', 'synset': 'manhole.n.01', 'synonyms': ['manhole'], 'id': 668, 'def': 'a hole (usually with a flush cover) through which a person can gain access to an underground structure', 'name': 'manhole'}, {'frequency': 'f', 'synset': 'map.n.01', 'synonyms': ['map'], 'id': 669, 'def': "a diagrammatic representation of the earth's surface (or part of it)", 'name': 'map'}, {'frequency': 'f', 'synset': 'marker.n.03', 'synonyms': ['marker'], 'id': 670, 'def': 'a writing implement for making a mark', 'name': 'marker'}, {'frequency': 'r', 'synset': 'martini.n.01', 'synonyms': ['martini'], 'id': 671, 'def': 'a cocktail made of gin (or vodka) with dry vermouth', 'name': 'martini'}, {'frequency': 'r', 'synset': 'mascot.n.01', 'synonyms': ['mascot'], 'id': 672, 'def': 'a person or animal that is adopted by a team or other group as a symbolic figure', 'name': 'mascot'}, {'frequency': 'c', 'synset': 'mashed_potato.n.01', 'synonyms': ['mashed_potato'], 'id': 673, 'def': 'potato that has been peeled and boiled and then mashed', 'name': 'mashed_potato'}, {'frequency': 'r', 'synset': 'masher.n.02', 'synonyms': ['masher'], 'id': 674, 'def': 'a kitchen utensil used for mashing (e.g. potatoes)', 'name': 'masher'}, {'frequency': 'f', 'synset': 'mask.n.04', 'synonyms': ['mask', 'facemask'], 'id': 675, 'def': 'a protective covering worn over the face', 'name': 'mask'}, {'frequency': 'f', 'synset': 'mast.n.01', 'synonyms': ['mast'], 'id': 676, 'def': 'a vertical spar for supporting sails', 'name': 'mast'}, {'frequency': 'c', 'synset': 'mat.n.03', 'synonyms': ['mat_(gym_equipment)', 'gym_mat'], 'id': 677, 'def': 'sports equipment consisting of a piece of thick padding on the floor for gymnastics', 'name': 'mat_(gym_equipment)'}, {'frequency': 'r', 'synset': 'matchbox.n.01', 'synonyms': ['matchbox'], 'id': 678, 'def': 'a box for holding matches', 'name': 'matchbox'}, {'frequency': 'f', 'synset': 'mattress.n.01', 'synonyms': ['mattress'], 'id': 679, 'def': 'a thick pad filled with resilient material used as a bed or part of a bed', 'name': 'mattress'}, {'frequency': 'c', 'synset': 'measuring_cup.n.01', 'synonyms': ['measuring_cup'], 'id': 680, 'def': 'graduated cup used to measure liquid or granular ingredients', 'name': 'measuring_cup'}, {'frequency': 'c', 'synset': 'measuring_stick.n.01', 'synonyms': ['measuring_stick', 'ruler_(measuring_stick)', 'measuring_rod'], 'id': 681, 'def': 'measuring instrument having a sequence of marks at regular intervals', 'name': 'measuring_stick'}, {'frequency': 'c', 'synset': 'meatball.n.01', 'synonyms': ['meatball'], 'id': 682, 'def': 'ground meat formed into a ball and fried or simmered in broth', 'name': 'meatball'}, {'frequency': 'c', 'synset': 'medicine.n.02', 'synonyms': ['medicine'], 'id': 683, 'def': 'something that treats or prevents or alleviates the symptoms of disease', 'name': 'medicine'}, {'frequency': 'c', 'synset': 'melon.n.01', 'synonyms': ['melon'], 'id': 684, 'def': 'fruit of the gourd family having a hard rind and sweet juicy flesh', 'name': 'melon'}, {'frequency': 'f', 'synset': 'microphone.n.01', 'synonyms': ['microphone'], 'id': 685, 'def': 'device for converting sound waves into electrical energy', 'name': 'microphone'}, {'frequency': 'r', 'synset': 'microscope.n.01', 'synonyms': ['microscope'], 'id': 686, 'def': 'magnifier of the image of small objects', 'name': 'microscope'}, {'frequency': 'f', 'synset': 'microwave.n.02', 'synonyms': ['microwave_oven'], 'id': 687, 'def': 'kitchen appliance that cooks food by passing an electromagnetic wave through it', 'name': 'microwave_oven'}, {'frequency': 'r', 'synset': 'milestone.n.01', 'synonyms': ['milestone', 'milepost'], 'id': 688, 'def': 'stone post at side of a road to show distances', 'name': 'milestone'}, {'frequency': 'f', 'synset': 'milk.n.01', 'synonyms': ['milk'], 'id': 689, 'def': 'a white nutritious liquid secreted by mammals and used as food by human beings', 'name': 'milk'}, {'frequency': 'r', 'synset': 'milk_can.n.01', 'synonyms': ['milk_can'], 'id': 690, 'def': 'can for transporting milk', 'name': 'milk_can'}, {'frequency': 'r', 'synset': 'milkshake.n.01', 'synonyms': ['milkshake'], 'id': 691, 'def': 'frothy drink of milk and flavoring and sometimes fruit or ice cream', 'name': 'milkshake'}, {'frequency': 'f', 'synset': 'minivan.n.01', 'synonyms': ['minivan'], 'id': 692, 'def': 'a small box-shaped passenger van', 'name': 'minivan'}, {'frequency': 'r', 'synset': 'mint.n.05', 'synonyms': ['mint_candy'], 'id': 693, 'def': 'a candy that is flavored with a mint oil', 'name': 'mint_candy'}, {'frequency': 'f', 'synset': 'mirror.n.01', 'synonyms': ['mirror'], 'id': 694, 'def': 'polished surface that forms images by reflecting light', 'name': 'mirror'}, {'frequency': 'c', 'synset': 'mitten.n.01', 'synonyms': ['mitten'], 'id': 695, 'def': 'glove that encases the thumb separately and the other four fingers together', 'name': 'mitten'}, {'frequency': 'c', 'synset': 'mixer.n.04', 'synonyms': ['mixer_(kitchen_tool)', 'stand_mixer'], 'id': 696, 'def': 'a kitchen utensil that is used for mixing foods', 'name': 'mixer_(kitchen_tool)'}, {'frequency': 'c', 'synset': 'money.n.03', 'synonyms': ['money'], 'id': 697, 'def': 'the official currency issued by a government or national bank', 'name': 'money'}, {'frequency': 'f', 'synset': 'monitor.n.04', 'synonyms': ['monitor_(computer_equipment) computer_monitor'], 'id': 698, 'def': 'a computer monitor', 'name': 'monitor_(computer_equipment) computer_monitor'}, {'frequency': 'c', 'synset': 'monkey.n.01', 'synonyms': ['monkey'], 'id': 699, 'def': 'any of various long-tailed primates', 'name': 'monkey'}, {'frequency': 'f', 'synset': 'motor.n.01', 'synonyms': ['motor'], 'id': 700, 'def': 'machine that converts other forms of energy into mechanical energy and so imparts motion', 'name': 'motor'}, {'frequency': 'f', 'synset': 'motor_scooter.n.01', 'synonyms': ['motor_scooter', 'scooter'], 'id': 701, 'def': 'a wheeled vehicle with small wheels and a low-powered engine', 'name': 'motor_scooter'}, {'frequency': 'r', 'synset': 'motor_vehicle.n.01', 'synonyms': ['motor_vehicle', 'automotive_vehicle'], 'id': 702, 'def': 'a self-propelled wheeled vehicle that does not run on rails', 'name': 'motor_vehicle'}, {'frequency': 'f', 'synset': 'motorcycle.n.01', 'synonyms': ['motorcycle'], 'id': 703, 'def': 'a motor vehicle with two wheels and a strong frame', 'name': 'motorcycle'}, {'frequency': 'f', 'synset': 'mound.n.01', 'synonyms': ['mound_(baseball)', "pitcher's_mound"], 'id': 704, 'def': '(baseball) the slight elevation on which the pitcher stands', 'name': 'mound_(baseball)'}, {'frequency': 'f', 'synset': 'mouse.n.04', 'synonyms': ['mouse_(computer_equipment)', 'computer_mouse'], 'id': 705, 'def': 'a computer input device that controls an on-screen pointer (does not include trackpads / touchpads)', 'name': 'mouse_(computer_equipment)'}, {'frequency': 'f', 'synset': 'mousepad.n.01', 'synonyms': ['mousepad'], 'id': 706, 'def': 'a small portable pad that provides an operating surface for a computer mouse', 'name': 'mousepad'}, {'frequency': 'c', 'synset': 'muffin.n.01', 'synonyms': ['muffin'], 'id': 707, 'def': 'a sweet quick bread baked in a cup-shaped pan', 'name': 'muffin'}, {'frequency': 'f', 'synset': 'mug.n.04', 'synonyms': ['mug'], 'id': 708, 'def': 'with handle and usually cylindrical', 'name': 'mug'}, {'frequency': 'f', 'synset': 'mushroom.n.02', 'synonyms': ['mushroom'], 'id': 709, 'def': 'a common mushroom', 'name': 'mushroom'}, {'frequency': 'r', 'synset': 'music_stool.n.01', 'synonyms': ['music_stool', 'piano_stool'], 'id': 710, 'def': 'a stool for piano players; usually adjustable in height', 'name': 'music_stool'}, {'frequency': 'c', 'synset': 'musical_instrument.n.01', 'synonyms': ['musical_instrument', 'instrument_(musical)'], 'id': 711, 'def': 'any of various devices or contrivances that can be used to produce musical tones or sounds', 'name': 'musical_instrument'}, {'frequency': 'r', 'synset': 'nailfile.n.01', 'synonyms': ['nailfile'], 'id': 712, 'def': 'a small flat file for shaping the nails', 'name': 'nailfile'}, {'frequency': 'f', 'synset': 'napkin.n.01', 'synonyms': ['napkin', 'table_napkin', 'serviette'], 'id': 713, 'def': 'a small piece of table linen or paper that is used to wipe the mouth and to cover the lap in order to protect clothing', 'name': 'napkin'}, {'frequency': 'r', 'synset': 'neckerchief.n.01', 'synonyms': ['neckerchief'], 'id': 714, 'def': 'a kerchief worn around the neck', 'name': 'neckerchief'}, {'frequency': 'f', 'synset': 'necklace.n.01', 'synonyms': ['necklace'], 'id': 715, 'def': 'jewelry consisting of a cord or chain (often bearing gems) worn about the neck as an ornament', 'name': 'necklace'}, {'frequency': 'f', 'synset': 'necktie.n.01', 'synonyms': ['necktie', 'tie_(necktie)'], 'id': 716, 'def': 'neckwear consisting of a long narrow piece of material worn under a collar and tied in knot at the front', 'name': 'necktie'}, {'frequency': 'c', 'synset': 'needle.n.03', 'synonyms': ['needle'], 'id': 717, 'def': 'a sharp pointed implement (usually metal)', 'name': 'needle'}, {'frequency': 'c', 'synset': 'nest.n.01', 'synonyms': ['nest'], 'id': 718, 'def': 'a structure in which animals lay eggs or give birth to their young', 'name': 'nest'}, {'frequency': 'f', 'synset': 'newspaper.n.01', 'synonyms': ['newspaper', 'paper_(newspaper)'], 'id': 719, 'def': 'a daily or weekly publication on folded sheets containing news, articles, and advertisements', 'name': 'newspaper'}, {'frequency': 'c', 'synset': 'newsstand.n.01', 'synonyms': ['newsstand'], 'id': 720, 'def': 'a stall where newspapers and other periodicals are sold', 'name': 'newsstand'}, {'frequency': 'c', 'synset': 'nightwear.n.01', 'synonyms': ['nightshirt', 'nightwear', 'sleepwear', 'nightclothes'], 'id': 721, 'def': 'garments designed to be worn in bed', 'name': 'nightshirt'}, {'frequency': 'r', 'synset': 'nosebag.n.01', 'synonyms': ['nosebag_(for_animals)', 'feedbag'], 'id': 722, 'def': 'a canvas bag that is used to feed an animal (such as a horse); covers the muzzle and fastens at the top of the head', 'name': 'nosebag_(for_animals)'}, {'frequency': 'c', 'synset': 'noseband.n.01', 'synonyms': ['noseband_(for_animals)', 'nosepiece_(for_animals)'], 'id': 723, 'def': "a strap that is the part of a bridle that goes over the animal's nose", 'name': 'noseband_(for_animals)'}, {'frequency': 'f', 'synset': 'notebook.n.01', 'synonyms': ['notebook'], 'id': 724, 'def': 'a book with blank pages for recording notes or memoranda', 'name': 'notebook'}, {'frequency': 'c', 'synset': 'notepad.n.01', 'synonyms': ['notepad'], 'id': 725, 'def': 'a pad of paper for keeping notes', 'name': 'notepad'}, {'frequency': 'f', 'synset': 'nut.n.03', 'synonyms': ['nut'], 'id': 726, 'def': 'a small metal block (usually square or hexagonal) with internal screw thread to be fitted onto a bolt', 'name': 'nut'}, {'frequency': 'r', 'synset': 'nutcracker.n.01', 'synonyms': ['nutcracker'], 'id': 727, 'def': 'a hand tool used to crack nuts open', 'name': 'nutcracker'}, {'frequency': 'f', 'synset': 'oar.n.01', 'synonyms': ['oar'], 'id': 728, 'def': 'an implement used to propel or steer a boat', 'name': 'oar'}, {'frequency': 'r', 'synset': 'octopus.n.01', 'synonyms': ['octopus_(food)'], 'id': 729, 'def': 'tentacles of octopus prepared as food', 'name': 'octopus_(food)'}, {'frequency': 'r', 'synset': 'octopus.n.02', 'synonyms': ['octopus_(animal)'], 'id': 730, 'def': 'bottom-living cephalopod having a soft oval body with eight long tentacles', 'name': 'octopus_(animal)'}, {'frequency': 'c', 'synset': 'oil_lamp.n.01', 'synonyms': ['oil_lamp', 'kerosene_lamp', 'kerosine_lamp'], 'id': 731, 'def': 'a lamp that burns oil (as kerosine) for light', 'name': 'oil_lamp'}, {'frequency': 'c', 'synset': 'olive_oil.n.01', 'synonyms': ['olive_oil'], 'id': 732, 'def': 'oil from olives', 'name': 'olive_oil'}, {'frequency': 'r', 'synset': 'omelet.n.01', 'synonyms': ['omelet', 'omelette'], 'id': 733, 'def': 'beaten eggs cooked until just set; may be folded around e.g. ham or cheese or jelly', 'name': 'omelet'}, {'frequency': 'f', 'synset': 'onion.n.01', 'synonyms': ['onion'], 'id': 734, 'def': 'the bulb of an onion plant', 'name': 'onion'}, {'frequency': 'f', 'synset': 'orange.n.01', 'synonyms': ['orange_(fruit)'], 'id': 735, 'def': 'orange (FRUIT of an orange tree)', 'name': 'orange_(fruit)'}, {'frequency': 'c', 'synset': 'orange_juice.n.01', 'synonyms': ['orange_juice'], 'id': 736, 'def': 'bottled or freshly squeezed juice of oranges', 'name': 'orange_juice'}, {'frequency': 'c', 'synset': 'ostrich.n.02', 'synonyms': ['ostrich'], 'id': 737, 'def': 'fast-running African flightless bird with two-toed feet; largest living bird', 'name': 'ostrich'}, {'frequency': 'f', 'synset': 'ottoman.n.03', 'synonyms': ['ottoman', 'pouf', 'pouffe', 'hassock'], 'id': 738, 'def': 'a thick standalone cushion used as a seat or footrest, often next to a chair', 'name': 'ottoman'}, {'frequency': 'f', 'synset': 'oven.n.01', 'synonyms': ['oven'], 'id': 739, 'def': 'kitchen appliance used for baking or roasting', 'name': 'oven'}, {'frequency': 'c', 'synset': 'overall.n.01', 'synonyms': ['overalls_(clothing)'], 'id': 740, 'def': 'work clothing consisting of denim trousers usually with a bib and shoulder straps', 'name': 'overalls_(clothing)'}, {'frequency': 'c', 'synset': 'owl.n.01', 'synonyms': ['owl'], 'id': 741, 'def': 'nocturnal bird of prey with hawk-like beak and claws and large head with front-facing eyes', 'name': 'owl'}, {'frequency': 'c', 'synset': 'packet.n.03', 'synonyms': ['packet'], 'id': 742, 'def': 'a small package or bundle', 'name': 'packet'}, {'frequency': 'r', 'synset': 'pad.n.03', 'synonyms': ['inkpad', 'inking_pad', 'stamp_pad'], 'id': 743, 'def': 'absorbent material saturated with ink used to transfer ink evenly to a rubber stamp', 'name': 'inkpad'}, {'frequency': 'c', 'synset': 'pad.n.04', 'synonyms': ['pad'], 'id': 744, 'def': 'mostly arm/knee pads labeled', 'name': 'pad'}, {'frequency': 'f', 'synset': 'paddle.n.04', 'synonyms': ['paddle', 'boat_paddle'], 'id': 745, 'def': 'a short light oar used without an oarlock to propel a canoe or small boat', 'name': 'paddle'}, {'frequency': 'c', 'synset': 'padlock.n.01', 'synonyms': ['padlock'], 'id': 746, 'def': 'a detachable, portable lock', 'name': 'padlock'}, {'frequency': 'c', 'synset': 'paintbrush.n.01', 'synonyms': ['paintbrush'], 'id': 747, 'def': 'a brush used as an applicator to apply paint', 'name': 'paintbrush'}, {'frequency': 'f', 'synset': 'painting.n.01', 'synonyms': ['painting'], 'id': 748, 'def': 'graphic art consisting of an artistic composition made by applying paints to a surface', 'name': 'painting'}, {'frequency': 'f', 'synset': 'pajama.n.02', 'synonyms': ['pajamas', 'pyjamas'], 'id': 749, 'def': 'loose-fitting nightclothes worn for sleeping or lounging', 'name': 'pajamas'}, {'frequency': 'c', 'synset': 'palette.n.02', 'synonyms': ['palette', 'pallet'], 'id': 750, 'def': 'board that provides a flat surface on which artists mix paints and the range of colors used', 'name': 'palette'}, {'frequency': 'f', 'synset': 'pan.n.01', 'synonyms': ['pan_(for_cooking)', 'cooking_pan'], 'id': 751, 'def': 'cooking utensil consisting of a wide metal vessel', 'name': 'pan_(for_cooking)'}, {'frequency': 'r', 'synset': 'pan.n.03', 'synonyms': ['pan_(metal_container)'], 'id': 752, 'def': 'shallow container made of metal', 'name': 'pan_(metal_container)'}, {'frequency': 'c', 'synset': 'pancake.n.01', 'synonyms': ['pancake'], 'id': 753, 'def': 'a flat cake of thin batter fried on both sides on a griddle', 'name': 'pancake'}, {'frequency': 'r', 'synset': 'pantyhose.n.01', 'synonyms': ['pantyhose'], 'id': 754, 'def': "a woman's tights consisting of underpants and stockings", 'name': 'pantyhose'}, {'frequency': 'r', 'synset': 'papaya.n.02', 'synonyms': ['papaya'], 'id': 755, 'def': 'large oval melon-like tropical fruit with yellowish flesh', 'name': 'papaya'}, {'frequency': 'f', 'synset': 'paper_plate.n.01', 'synonyms': ['paper_plate'], 'id': 756, 'def': 'a disposable plate made of cardboard', 'name': 'paper_plate'}, {'frequency': 'f', 'synset': 'paper_towel.n.01', 'synonyms': ['paper_towel'], 'id': 757, 'def': 'a disposable towel made of absorbent paper', 'name': 'paper_towel'}, {'frequency': 'r', 'synset': 'paperback_book.n.01', 'synonyms': ['paperback_book', 'paper-back_book', 'softback_book', 'soft-cover_book'], 'id': 758, 'def': 'a book with paper covers', 'name': 'paperback_book'}, {'frequency': 'r', 'synset': 'paperweight.n.01', 'synonyms': ['paperweight'], 'id': 759, 'def': 'a weight used to hold down a stack of papers', 'name': 'paperweight'}, {'frequency': 'c', 'synset': 'parachute.n.01', 'synonyms': ['parachute'], 'id': 760, 'def': 'rescue equipment consisting of a device that fills with air and retards your fall', 'name': 'parachute'}, {'frequency': 'c', 'synset': 'parakeet.n.01', 'synonyms': ['parakeet', 'parrakeet', 'parroket', 'paraquet', 'paroquet', 'parroquet'], 'id': 761, 'def': 'any of numerous small slender long-tailed parrots', 'name': 'parakeet'}, {'frequency': 'c', 'synset': 'parasail.n.01', 'synonyms': ['parasail_(sports)'], 'id': 762, 'def': 'parachute that will lift a person up into the air when it is towed by a motorboat or a car', 'name': 'parasail_(sports)'}, {'frequency': 'c', 'synset': 'parasol.n.01', 'synonyms': ['parasol', 'sunshade'], 'id': 763, 'def': 'a handheld collapsible source of shade', 'name': 'parasol'}, {'frequency': 'r', 'synset': 'parchment.n.01', 'synonyms': ['parchment'], 'id': 764, 'def': 'a superior paper resembling sheepskin', 'name': 'parchment'}, {'frequency': 'c', 'synset': 'parka.n.01', 'synonyms': ['parka', 'anorak'], 'id': 765, 'def': "a kind of heavy jacket (`windcheater' is a British term)", 'name': 'parka'}, {'frequency': 'f', 'synset': 'parking_meter.n.01', 'synonyms': ['parking_meter'], 'id': 766, 'def': 'a coin-operated timer located next to a parking space', 'name': 'parking_meter'}, {'frequency': 'c', 'synset': 'parrot.n.01', 'synonyms': ['parrot'], 'id': 767, 'def': 'usually brightly colored tropical birds with short hooked beaks and the ability to mimic sounds', 'name': 'parrot'}, {'frequency': 'c', 'synset': 'passenger_car.n.01', 'synonyms': ['passenger_car_(part_of_a_train)', 'coach_(part_of_a_train)'], 'id': 768, 'def': 'a railcar where passengers ride', 'name': 'passenger_car_(part_of_a_train)'}, {'frequency': 'r', 'synset': 'passenger_ship.n.01', 'synonyms': ['passenger_ship'], 'id': 769, 'def': 'a ship built to carry passengers', 'name': 'passenger_ship'}, {'frequency': 'c', 'synset': 'passport.n.02', 'synonyms': ['passport'], 'id': 770, 'def': 'a document issued by a country to a citizen allowing that person to travel abroad and re-enter the home country', 'name': 'passport'}, {'frequency': 'f', 'synset': 'pastry.n.02', 'synonyms': ['pastry'], 'id': 771, 'def': 'any of various baked foods made of dough or batter', 'name': 'pastry'}, {'frequency': 'r', 'synset': 'patty.n.01', 'synonyms': ['patty_(food)'], 'id': 772, 'def': 'small flat mass of chopped food', 'name': 'patty_(food)'}, {'frequency': 'c', 'synset': 'pea.n.01', 'synonyms': ['pea_(food)'], 'id': 773, 'def': 'seed of a pea plant used for food', 'name': 'pea_(food)'}, {'frequency': 'c', 'synset': 'peach.n.03', 'synonyms': ['peach'], 'id': 774, 'def': 'downy juicy fruit with sweet yellowish or whitish flesh', 'name': 'peach'}, {'frequency': 'c', 'synset': 'peanut_butter.n.01', 'synonyms': ['peanut_butter'], 'id': 775, 'def': 'a spread made from ground peanuts', 'name': 'peanut_butter'}, {'frequency': 'f', 'synset': 'pear.n.01', 'synonyms': ['pear'], 'id': 776, 'def': 'sweet juicy gritty-textured fruit available in many varieties', 'name': 'pear'}, {'frequency': 'c', 'synset': 'peeler.n.03', 'synonyms': ['peeler_(tool_for_fruit_and_vegetables)'], 'id': 777, 'def': 'a device for peeling vegetables or fruits', 'name': 'peeler_(tool_for_fruit_and_vegetables)'}, {'frequency': 'r', 'synset': 'peg.n.04', 'synonyms': ['wooden_leg', 'pegleg'], 'id': 778, 'def': 'a prosthesis that replaces a missing leg', 'name': 'wooden_leg'}, {'frequency': 'r', 'synset': 'pegboard.n.01', 'synonyms': ['pegboard'], 'id': 779, 'def': 'a board perforated with regularly spaced holes into which pegs can be fitted', 'name': 'pegboard'}, {'frequency': 'c', 'synset': 'pelican.n.01', 'synonyms': ['pelican'], 'id': 780, 'def': 'large long-winged warm-water seabird having a large bill with a distensible pouch for fish', 'name': 'pelican'}, {'frequency': 'f', 'synset': 'pen.n.01', 'synonyms': ['pen'], 'id': 781, 'def': 'a writing implement with a point from which ink flows', 'name': 'pen'}, {'frequency': 'f', 'synset': 'pencil.n.01', 'synonyms': ['pencil'], 'id': 782, 'def': 'a thin cylindrical pointed writing implement made of wood and graphite', 'name': 'pencil'}, {'frequency': 'r', 'synset': 'pencil_box.n.01', 'synonyms': ['pencil_box', 'pencil_case'], 'id': 783, 'def': 'a box for holding pencils', 'name': 'pencil_box'}, {'frequency': 'r', 'synset': 'pencil_sharpener.n.01', 'synonyms': ['pencil_sharpener'], 'id': 784, 'def': 'a rotary implement for sharpening the point on pencils', 'name': 'pencil_sharpener'}, {'frequency': 'r', 'synset': 'pendulum.n.01', 'synonyms': ['pendulum'], 'id': 785, 'def': 'an apparatus consisting of an object mounted so that it swings freely under the influence of gravity', 'name': 'pendulum'}, {'frequency': 'c', 'synset': 'penguin.n.01', 'synonyms': ['penguin'], 'id': 786, 'def': 'short-legged flightless birds of cold southern regions having webbed feet and wings modified as flippers', 'name': 'penguin'}, {'frequency': 'r', 'synset': 'pennant.n.02', 'synonyms': ['pennant'], 'id': 787, 'def': 'a flag longer than it is wide (and often tapering)', 'name': 'pennant'}, {'frequency': 'r', 'synset': 'penny.n.02', 'synonyms': ['penny_(coin)'], 'id': 788, 'def': 'a coin worth one-hundredth of the value of the basic unit', 'name': 'penny_(coin)'}, {'frequency': 'f', 'synset': 'pepper.n.03', 'synonyms': ['pepper', 'peppercorn'], 'id': 789, 'def': 'pungent seasoning from the berry of the common pepper plant; whole or ground', 'name': 'pepper'}, {'frequency': 'c', 'synset': 'pepper_mill.n.01', 'synonyms': ['pepper_mill', 'pepper_grinder'], 'id': 790, 'def': 'a mill for grinding pepper', 'name': 'pepper_mill'}, {'frequency': 'c', 'synset': 'perfume.n.02', 'synonyms': ['perfume'], 'id': 791, 'def': 'a toiletry that emits and diffuses a fragrant odor', 'name': 'perfume'}, {'frequency': 'r', 'synset': 'persimmon.n.02', 'synonyms': ['persimmon'], 'id': 792, 'def': 'orange fruit resembling a plum; edible when fully ripe', 'name': 'persimmon'}, {'frequency': 'f', 'synset': 'person.n.01', 'synonyms': ['person', 'baby', 'child', 'boy', 'girl', 'man', 'woman', 'human'], 'id': 793, 'def': 'a human being', 'name': 'person'}, {'frequency': 'c', 'synset': 'pet.n.01', 'synonyms': ['pet'], 'id': 794, 'def': 'a domesticated animal kept for companionship or amusement', 'name': 'pet'}, {'frequency': 'c', 'synset': 'pew.n.01', 'synonyms': ['pew_(church_bench)', 'church_bench'], 'id': 795, 'def': 'long bench with backs; used in church by the congregation', 'name': 'pew_(church_bench)'}, {'frequency': 'r', 'synset': 'phonebook.n.01', 'synonyms': ['phonebook', 'telephone_book', 'telephone_directory'], 'id': 796, 'def': 'a directory containing an alphabetical list of telephone subscribers and their telephone numbers', 'name': 'phonebook'}, {'frequency': 'c', 'synset': 'phonograph_record.n.01', 'synonyms': ['phonograph_record', 'phonograph_recording', 'record_(phonograph_recording)'], 'id': 797, 'def': 'sound recording consisting of a typically black disk with a continuous groove', 'name': 'phonograph_record'}, {'frequency': 'f', 'synset': 'piano.n.01', 'synonyms': ['piano'], 'id': 798, 'def': 'a keyboard instrument that is played by depressing keys that cause hammers to strike tuned strings and produce sounds', 'name': 'piano'}, {'frequency': 'f', 'synset': 'pickle.n.01', 'synonyms': ['pickle'], 'id': 799, 'def': 'vegetables (especially cucumbers) preserved in brine or vinegar', 'name': 'pickle'}, {'frequency': 'f', 'synset': 'pickup.n.01', 'synonyms': ['pickup_truck'], 'id': 800, 'def': 'a light truck with an open body and low sides and a tailboard', 'name': 'pickup_truck'}, {'frequency': 'c', 'synset': 'pie.n.01', 'synonyms': ['pie'], 'id': 801, 'def': 'dish baked in pastry-lined pan often with a pastry top', 'name': 'pie'}, {'frequency': 'c', 'synset': 'pigeon.n.01', 'synonyms': ['pigeon'], 'id': 802, 'def': 'wild and domesticated birds having a heavy body and short legs', 'name': 'pigeon'}, {'frequency': 'r', 'synset': 'piggy_bank.n.01', 'synonyms': ['piggy_bank', 'penny_bank'], 'id': 803, 'def': "a child's coin bank (often shaped like a pig)", 'name': 'piggy_bank'}, {'frequency': 'f', 'synset': 'pillow.n.01', 'synonyms': ['pillow'], 'id': 804, 'def': 'a cushion to support the head of a sleeping person', 'name': 'pillow'}, {'frequency': 'r', 'synset': 'pin.n.09', 'synonyms': ['pin_(non_jewelry)'], 'id': 805, 'def': 'a small slender (often pointed) piece of wood or metal used to support or fasten or attach things', 'name': 'pin_(non_jewelry)'}, {'frequency': 'f', 'synset': 'pineapple.n.02', 'synonyms': ['pineapple'], 'id': 806, 'def': 'large sweet fleshy tropical fruit with a tuft of stiff leaves', 'name': 'pineapple'}, {'frequency': 'c', 'synset': 'pinecone.n.01', 'synonyms': ['pinecone'], 'id': 807, 'def': 'the seed-producing cone of a pine tree', 'name': 'pinecone'}, {'frequency': 'r', 'synset': 'ping-pong_ball.n.01', 'synonyms': ['ping-pong_ball'], 'id': 808, 'def': 'light hollow ball used in playing table tennis', 'name': 'ping-pong_ball'}, {'frequency': 'r', 'synset': 'pinwheel.n.03', 'synonyms': ['pinwheel'], 'id': 809, 'def': 'a toy consisting of vanes of colored paper or plastic that is pinned to a stick and spins when it is pointed into the wind', 'name': 'pinwheel'}, {'frequency': 'r', 'synset': 'pipe.n.01', 'synonyms': ['tobacco_pipe'], 'id': 810, 'def': 'a tube with a small bowl at one end; used for smoking tobacco', 'name': 'tobacco_pipe'}, {'frequency': 'f', 'synset': 'pipe.n.02', 'synonyms': ['pipe', 'piping'], 'id': 811, 'def': 'a long tube made of metal or plastic that is used to carry water or oil or gas etc.', 'name': 'pipe'}, {'frequency': 'r', 'synset': 'pistol.n.01', 'synonyms': ['pistol', 'handgun'], 'id': 812, 'def': 'a firearm that is held and fired with one hand', 'name': 'pistol'}, {'frequency': 'c', 'synset': 'pita.n.01', 'synonyms': ['pita_(bread)', 'pocket_bread'], 'id': 813, 'def': 'usually small round bread that can open into a pocket for filling', 'name': 'pita_(bread)'}, {'frequency': 'f', 'synset': 'pitcher.n.02', 'synonyms': ['pitcher_(vessel_for_liquid)', 'ewer'], 'id': 814, 'def': 'an open vessel with a handle and a spout for pouring', 'name': 'pitcher_(vessel_for_liquid)'}, {'frequency': 'r', 'synset': 'pitchfork.n.01', 'synonyms': ['pitchfork'], 'id': 815, 'def': 'a long-handled hand tool with sharp widely spaced prongs for lifting and pitching hay', 'name': 'pitchfork'}, {'frequency': 'f', 'synset': 'pizza.n.01', 'synonyms': ['pizza'], 'id': 816, 'def': 'Italian open pie made of thin bread dough spread with a spiced mixture of e.g. tomato sauce and cheese', 'name': 'pizza'}, {'frequency': 'f', 'synset': 'place_mat.n.01', 'synonyms': ['place_mat'], 'id': 817, 'def': 'a mat placed on a table for an individual place setting', 'name': 'place_mat'}, {'frequency': 'f', 'synset': 'plate.n.04', 'synonyms': ['plate'], 'id': 818, 'def': 'dish on which food is served or from which food is eaten', 'name': 'plate'}, {'frequency': 'c', 'synset': 'platter.n.01', 'synonyms': ['platter'], 'id': 819, 'def': 'a large shallow dish used for serving food', 'name': 'platter'}, {'frequency': 'r', 'synset': 'playpen.n.01', 'synonyms': ['playpen'], 'id': 820, 'def': 'a portable enclosure in which babies may be left to play', 'name': 'playpen'}, {'frequency': 'c', 'synset': 'pliers.n.01', 'synonyms': ['pliers', 'plyers'], 'id': 821, 'def': 'a gripping hand tool with two hinged arms and (usually) serrated jaws', 'name': 'pliers'}, {'frequency': 'r', 'synset': 'plow.n.01', 'synonyms': ['plow_(farm_equipment)', 'plough_(farm_equipment)'], 'id': 822, 'def': 'a farm tool having one or more heavy blades to break the soil and cut a furrow prior to sowing', 'name': 'plow_(farm_equipment)'}, {'frequency': 'r', 'synset': 'plume.n.02', 'synonyms': ['plume'], 'id': 823, 'def': 'a feather or cluster of feathers worn as an ornament', 'name': 'plume'}, {'frequency': 'r', 'synset': 'pocket_watch.n.01', 'synonyms': ['pocket_watch'], 'id': 824, 'def': 'a watch that is carried in a small watch pocket', 'name': 'pocket_watch'}, {'frequency': 'c', 'synset': 'pocketknife.n.01', 'synonyms': ['pocketknife'], 'id': 825, 'def': 'a knife with a blade that folds into the handle; suitable for carrying in the pocket', 'name': 'pocketknife'}, {'frequency': 'c', 'synset': 'poker.n.01', 'synonyms': ['poker_(fire_stirring_tool)', 'stove_poker', 'fire_hook'], 'id': 826, 'def': 'fire iron consisting of a metal rod with a handle; used to stir a fire', 'name': 'poker_(fire_stirring_tool)'}, {'frequency': 'f', 'synset': 'pole.n.01', 'synonyms': ['pole', 'post'], 'id': 827, 'def': 'a long (usually round) rod of wood or metal or plastic', 'name': 'pole'}, {'frequency': 'f', 'synset': 'polo_shirt.n.01', 'synonyms': ['polo_shirt', 'sport_shirt'], 'id': 828, 'def': 'a shirt with short sleeves designed for comfort and casual wear', 'name': 'polo_shirt'}, {'frequency': 'r', 'synset': 'poncho.n.01', 'synonyms': ['poncho'], 'id': 829, 'def': 'a blanket-like cloak with a hole in the center for the head', 'name': 'poncho'}, {'frequency': 'c', 'synset': 'pony.n.05', 'synonyms': ['pony'], 'id': 830, 'def': 'any of various breeds of small gentle horses usually less than five feet high at the shoulder', 'name': 'pony'}, {'frequency': 'r', 'synset': 'pool_table.n.01', 'synonyms': ['pool_table', 'billiard_table', 'snooker_table'], 'id': 831, 'def': 'game equipment consisting of a heavy table on which pool is played', 'name': 'pool_table'}, {'frequency': 'f', 'synset': 'pop.n.02', 'synonyms': ['pop_(soda)', 'soda_(pop)', 'tonic', 'soft_drink'], 'id': 832, 'def': 'a sweet drink containing carbonated water and flavoring', 'name': 'pop_(soda)'}, {'frequency': 'c', 'synset': 'postbox.n.01', 'synonyms': ['postbox_(public)', 'mailbox_(public)'], 'id': 833, 'def': 'public box for deposit of mail', 'name': 'postbox_(public)'}, {'frequency': 'c', 'synset': 'postcard.n.01', 'synonyms': ['postcard', 'postal_card', 'mailing-card'], 'id': 834, 'def': 'a card for sending messages by post without an envelope', 'name': 'postcard'}, {'frequency': 'f', 'synset': 'poster.n.01', 'synonyms': ['poster', 'placard'], 'id': 835, 'def': 'a sign posted in a public place as an advertisement', 'name': 'poster'}, {'frequency': 'f', 'synset': 'pot.n.01', 'synonyms': ['pot'], 'id': 836, 'def': 'metal or earthenware cooking vessel that is usually round and deep; often has a handle and lid', 'name': 'pot'}, {'frequency': 'f', 'synset': 'pot.n.04', 'synonyms': ['flowerpot'], 'id': 837, 'def': 'a container in which plants are cultivated', 'name': 'flowerpot'}, {'frequency': 'f', 'synset': 'potato.n.01', 'synonyms': ['potato'], 'id': 838, 'def': 'an edible tuber native to South America', 'name': 'potato'}, {'frequency': 'c', 'synset': 'potholder.n.01', 'synonyms': ['potholder'], 'id': 839, 'def': 'an insulated pad for holding hot pots', 'name': 'potholder'}, {'frequency': 'c', 'synset': 'pottery.n.01', 'synonyms': ['pottery', 'clayware'], 'id': 840, 'def': 'ceramic ware made from clay and baked in a kiln', 'name': 'pottery'}, {'frequency': 'c', 'synset': 'pouch.n.01', 'synonyms': ['pouch'], 'id': 841, 'def': 'a small or medium size container for holding or carrying things', 'name': 'pouch'}, {'frequency': 'c', 'synset': 'power_shovel.n.01', 'synonyms': ['power_shovel', 'excavator', 'digger'], 'id': 842, 'def': 'a machine for excavating', 'name': 'power_shovel'}, {'frequency': 'c', 'synset': 'prawn.n.01', 'synonyms': ['prawn', 'shrimp'], 'id': 843, 'def': 'any of various edible decapod crustaceans', 'name': 'prawn'}, {'frequency': 'c', 'synset': 'pretzel.n.01', 'synonyms': ['pretzel'], 'id': 844, 'def': 'glazed and salted cracker typically in the shape of a loose knot', 'name': 'pretzel'}, {'frequency': 'f', 'synset': 'printer.n.03', 'synonyms': ['printer', 'printing_machine'], 'id': 845, 'def': 'a machine that prints', 'name': 'printer'}, {'frequency': 'c', 'synset': 'projectile.n.01', 'synonyms': ['projectile_(weapon)', 'missile'], 'id': 846, 'def': 'a weapon that is forcibly thrown or projected at a targets', 'name': 'projectile_(weapon)'}, {'frequency': 'c', 'synset': 'projector.n.02', 'synonyms': ['projector'], 'id': 847, 'def': 'an optical instrument that projects an enlarged image onto a screen', 'name': 'projector'}, {'frequency': 'f', 'synset': 'propeller.n.01', 'synonyms': ['propeller', 'propellor'], 'id': 848, 'def': 'a mechanical device that rotates to push against air or water', 'name': 'propeller'}, {'frequency': 'r', 'synset': 'prune.n.01', 'synonyms': ['prune'], 'id': 849, 'def': 'dried plum', 'name': 'prune'}, {'frequency': 'r', 'synset': 'pudding.n.01', 'synonyms': ['pudding'], 'id': 850, 'def': 'any of various soft thick unsweetened baked dishes', 'name': 'pudding'}, {'frequency': 'r', 'synset': 'puffer.n.02', 'synonyms': ['puffer_(fish)', 'pufferfish', 'blowfish', 'globefish'], 'id': 851, 'def': 'fishes whose elongated spiny body can inflate itself with water or air to form a globe', 'name': 'puffer_(fish)'}, {'frequency': 'r', 'synset': 'puffin.n.01', 'synonyms': ['puffin'], 'id': 852, 'def': 'seabirds having short necks and brightly colored compressed bills', 'name': 'puffin'}, {'frequency': 'r', 'synset': 'pug.n.01', 'synonyms': ['pug-dog'], 'id': 853, 'def': 'small compact smooth-coated breed of Asiatic origin having a tightly curled tail and broad flat wrinkled muzzle', 'name': 'pug-dog'}, {'frequency': 'c', 'synset': 'pumpkin.n.02', 'synonyms': ['pumpkin'], 'id': 854, 'def': 'usually large pulpy deep-yellow round fruit of the squash family maturing in late summer or early autumn', 'name': 'pumpkin'}, {'frequency': 'r', 'synset': 'punch.n.03', 'synonyms': ['puncher'], 'id': 855, 'def': 'a tool for making holes or indentations', 'name': 'puncher'}, {'frequency': 'r', 'synset': 'puppet.n.01', 'synonyms': ['puppet', 'marionette'], 'id': 856, 'def': 'a small figure of a person operated from above with strings by a puppeteer', 'name': 'puppet'}, {'frequency': 'c', 'synset': 'puppy.n.01', 'synonyms': ['puppy'], 'id': 857, 'def': 'a young dog', 'name': 'puppy'}, {'frequency': 'r', 'synset': 'quesadilla.n.01', 'synonyms': ['quesadilla'], 'id': 858, 'def': 'a tortilla that is filled with cheese and heated', 'name': 'quesadilla'}, {'frequency': 'r', 'synset': 'quiche.n.02', 'synonyms': ['quiche'], 'id': 859, 'def': 'a tart filled with rich unsweetened custard; often contains other ingredients (as cheese or ham or seafood or vegetables)', 'name': 'quiche'}, {'frequency': 'f', 'synset': 'quilt.n.01', 'synonyms': ['quilt', 'comforter'], 'id': 860, 'def': 'bedding made of two layers of cloth filled with stuffing and stitched together', 'name': 'quilt'}, {'frequency': 'c', 'synset': 'rabbit.n.01', 'synonyms': ['rabbit'], 'id': 861, 'def': 'any of various burrowing animals of the family Leporidae having long ears and short tails', 'name': 'rabbit'}, {'frequency': 'r', 'synset': 'racer.n.02', 'synonyms': ['race_car', 'racing_car'], 'id': 862, 'def': 'a fast car that competes in races', 'name': 'race_car'}, {'frequency': 'c', 'synset': 'racket.n.04', 'synonyms': ['racket', 'racquet'], 'id': 863, 'def': 'a sports implement used to strike a ball in various games', 'name': 'racket'}, {'frequency': 'r', 'synset': 'radar.n.01', 'synonyms': ['radar'], 'id': 864, 'def': 'measuring instrument in which the echo of a pulse of microwave radiation is used to detect and locate distant objects', 'name': 'radar'}, {'frequency': 'f', 'synset': 'radiator.n.03', 'synonyms': ['radiator'], 'id': 865, 'def': 'a mechanism consisting of a metal honeycomb through which hot fluids circulate', 'name': 'radiator'}, {'frequency': 'c', 'synset': 'radio_receiver.n.01', 'synonyms': ['radio_receiver', 'radio_set', 'radio', 'tuner_(radio)'], 'id': 866, 'def': 'an electronic receiver that detects and demodulates and amplifies transmitted radio signals', 'name': 'radio_receiver'}, {'frequency': 'c', 'synset': 'radish.n.03', 'synonyms': ['radish', 'daikon'], 'id': 867, 'def': 'pungent edible root of any of various cultivated radish plants', 'name': 'radish'}, {'frequency': 'c', 'synset': 'raft.n.01', 'synonyms': ['raft'], 'id': 868, 'def': 'a flat float (usually made of logs or planks) that can be used for transport or as a platform for swimmers', 'name': 'raft'}, {'frequency': 'r', 'synset': 'rag_doll.n.01', 'synonyms': ['rag_doll'], 'id': 869, 'def': 'a cloth doll that is stuffed and (usually) painted', 'name': 'rag_doll'}, {'frequency': 'c', 'synset': 'raincoat.n.01', 'synonyms': ['raincoat', 'waterproof_jacket'], 'id': 870, 'def': 'a water-resistant coat', 'name': 'raincoat'}, {'frequency': 'c', 'synset': 'ram.n.05', 'synonyms': ['ram_(animal)'], 'id': 871, 'def': 'uncastrated adult male sheep', 'name': 'ram_(animal)'}, {'frequency': 'c', 'synset': 'raspberry.n.02', 'synonyms': ['raspberry'], 'id': 872, 'def': 'red or black edible aggregate berries usually smaller than the related blackberries', 'name': 'raspberry'}, {'frequency': 'r', 'synset': 'rat.n.01', 'synonyms': ['rat'], 'id': 873, 'def': 'any of various long-tailed rodents similar to but larger than a mouse', 'name': 'rat'}, {'frequency': 'c', 'synset': 'razorblade.n.01', 'synonyms': ['razorblade'], 'id': 874, 'def': 'a blade that has very sharp edge', 'name': 'razorblade'}, {'frequency': 'c', 'synset': 'reamer.n.01', 'synonyms': ['reamer_(juicer)', 'juicer', 'juice_reamer'], 'id': 875, 'def': 'a squeezer with a conical ridged center that is used for squeezing juice from citrus fruit', 'name': 'reamer_(juicer)'}, {'frequency': 'f', 'synset': 'rearview_mirror.n.01', 'synonyms': ['rearview_mirror'], 'id': 876, 'def': 'vehicle mirror (side or rearview)', 'name': 'rearview_mirror'}, {'frequency': 'c', 'synset': 'receipt.n.02', 'synonyms': ['receipt'], 'id': 877, 'def': 'an acknowledgment (usually tangible) that payment has been made', 'name': 'receipt'}, {'frequency': 'c', 'synset': 'recliner.n.01', 'synonyms': ['recliner', 'reclining_chair', 'lounger_(chair)'], 'id': 878, 'def': 'an armchair whose back can be lowered and foot can be raised to allow the sitter to recline in it', 'name': 'recliner'}, {'frequency': 'c', 'synset': 'record_player.n.01', 'synonyms': ['record_player', 'phonograph_(record_player)', 'turntable'], 'id': 879, 'def': 'machine in which rotating records cause a stylus to vibrate and the vibrations are amplified acoustically or electronically', 'name': 'record_player'}, {'frequency': 'f', 'synset': 'reflector.n.01', 'synonyms': ['reflector'], 'id': 880, 'def': 'device that reflects light, radiation, etc.', 'name': 'reflector'}, {'frequency': 'f', 'synset': 'remote_control.n.01', 'synonyms': ['remote_control'], 'id': 881, 'def': 'a device that can be used to control a machine or apparatus from a distance', 'name': 'remote_control'}, {'frequency': 'c', 'synset': 'rhinoceros.n.01', 'synonyms': ['rhinoceros'], 'id': 882, 'def': 'massive powerful herbivorous odd-toed ungulate of southeast Asia and Africa having very thick skin and one or two horns on the snout', 'name': 'rhinoceros'}, {'frequency': 'r', 'synset': 'rib.n.03', 'synonyms': ['rib_(food)'], 'id': 883, 'def': 'cut of meat including one or more ribs', 'name': 'rib_(food)'}, {'frequency': 'c', 'synset': 'rifle.n.01', 'synonyms': ['rifle'], 'id': 884, 'def': 'a shoulder firearm with a long barrel', 'name': 'rifle'}, {'frequency': 'f', 'synset': 'ring.n.08', 'synonyms': ['ring'], 'id': 885, 'def': 'jewelry consisting of a circlet of precious metal (often set with jewels) worn on the finger', 'name': 'ring'}, {'frequency': 'r', 'synset': 'river_boat.n.01', 'synonyms': ['river_boat'], 'id': 886, 'def': 'a boat used on rivers or to ply a river', 'name': 'river_boat'}, {'frequency': 'r', 'synset': 'road_map.n.02', 'synonyms': ['road_map'], 'id': 887, 'def': '(NOT A ROAD) a MAP showing roads (for automobile travel)', 'name': 'road_map'}, {'frequency': 'c', 'synset': 'robe.n.01', 'synonyms': ['robe'], 'id': 888, 'def': 'any loose flowing garment', 'name': 'robe'}, {'frequency': 'c', 'synset': 'rocking_chair.n.01', 'synonyms': ['rocking_chair'], 'id': 889, 'def': 'a chair mounted on rockers', 'name': 'rocking_chair'}, {'frequency': 'r', 'synset': 'rodent.n.01', 'synonyms': ['rodent'], 'id': 890, 'def': 'relatively small placental mammals having a single pair of constantly growing incisor teeth specialized for gnawing', 'name': 'rodent'}, {'frequency': 'r', 'synset': 'roller_skate.n.01', 'synonyms': ['roller_skate'], 'id': 891, 'def': 'a shoe with pairs of rollers (small hard wheels) fixed to the sole', 'name': 'roller_skate'}, {'frequency': 'r', 'synset': 'rollerblade.n.01', 'synonyms': ['Rollerblade'], 'id': 892, 'def': 'an in-line variant of a roller skate', 'name': 'Rollerblade'}, {'frequency': 'c', 'synset': 'rolling_pin.n.01', 'synonyms': ['rolling_pin'], 'id': 893, 'def': 'utensil consisting of a cylinder (usually of wood) with a handle at each end; used to roll out dough', 'name': 'rolling_pin'}, {'frequency': 'r', 'synset': 'root_beer.n.01', 'synonyms': ['root_beer'], 'id': 894, 'def': 'carbonated drink containing extracts of roots and herbs', 'name': 'root_beer'}, {'frequency': 'c', 'synset': 'router.n.02', 'synonyms': ['router_(computer_equipment)'], 'id': 895, 'def': 'a device that forwards data packets between computer networks', 'name': 'router_(computer_equipment)'}, {'frequency': 'f', 'synset': 'rubber_band.n.01', 'synonyms': ['rubber_band', 'elastic_band'], 'id': 896, 'def': 'a narrow band of elastic rubber used to hold things (such as papers) together', 'name': 'rubber_band'}, {'frequency': 'c', 'synset': 'runner.n.08', 'synonyms': ['runner_(carpet)'], 'id': 897, 'def': 'a long narrow carpet', 'name': 'runner_(carpet)'}, {'frequency': 'f', 'synset': 'sack.n.01', 'synonyms': ['plastic_bag', 'paper_bag'], 'id': 898, 'def': "a bag made of paper or plastic for holding customer's purchases", 'name': 'plastic_bag'}, {'frequency': 'f', 'synset': 'saddle.n.01', 'synonyms': ['saddle_(on_an_animal)'], 'id': 899, 'def': 'a seat for the rider of a horse or camel', 'name': 'saddle_(on_an_animal)'}, {'frequency': 'f', 'synset': 'saddle_blanket.n.01', 'synonyms': ['saddle_blanket', 'saddlecloth', 'horse_blanket'], 'id': 900, 'def': 'stable gear consisting of a blanket placed under the saddle', 'name': 'saddle_blanket'}, {'frequency': 'c', 'synset': 'saddlebag.n.01', 'synonyms': ['saddlebag'], 'id': 901, 'def': 'a large bag (or pair of bags) hung over a saddle', 'name': 'saddlebag'}, {'frequency': 'r', 'synset': 'safety_pin.n.01', 'synonyms': ['safety_pin'], 'id': 902, 'def': 'a pin in the form of a clasp; has a guard so the point of the pin will not stick the user', 'name': 'safety_pin'}, {'frequency': 'f', 'synset': 'sail.n.01', 'synonyms': ['sail'], 'id': 903, 'def': 'a large piece of fabric by means of which wind is used to propel a sailing vessel', 'name': 'sail'}, {'frequency': 'f', 'synset': 'salad.n.01', 'synonyms': ['salad'], 'id': 904, 'def': 'food mixtures either arranged on a plate or tossed and served with a moist dressing; usually consisting of or including greens', 'name': 'salad'}, {'frequency': 'r', 'synset': 'salad_plate.n.01', 'synonyms': ['salad_plate', 'salad_bowl'], 'id': 905, 'def': 'a plate or bowl for individual servings of salad', 'name': 'salad_plate'}, {'frequency': 'c', 'synset': 'salami.n.01', 'synonyms': ['salami'], 'id': 906, 'def': 'highly seasoned fatty sausage of pork and beef usually dried', 'name': 'salami'}, {'frequency': 'c', 'synset': 'salmon.n.01', 'synonyms': ['salmon_(fish)'], 'id': 907, 'def': 'any of various large food and game fishes of northern waters', 'name': 'salmon_(fish)'}, {'frequency': 'r', 'synset': 'salmon.n.03', 'synonyms': ['salmon_(food)'], 'id': 908, 'def': 'flesh of any of various marine or freshwater fish of the family Salmonidae', 'name': 'salmon_(food)'}, {'frequency': 'c', 'synset': 'salsa.n.01', 'synonyms': ['salsa'], 'id': 909, 'def': 'spicy sauce of tomatoes and onions and chili peppers to accompany Mexican foods', 'name': 'salsa'}, {'frequency': 'f', 'synset': 'saltshaker.n.01', 'synonyms': ['saltshaker'], 'id': 910, 'def': 'a shaker with a perforated top for sprinkling salt', 'name': 'saltshaker'}, {'frequency': 'f', 'synset': 'sandal.n.01', 'synonyms': ['sandal_(type_of_shoe)'], 'id': 911, 'def': 'a shoe consisting of a sole fastened by straps to the foot', 'name': 'sandal_(type_of_shoe)'}, {'frequency': 'f', 'synset': 'sandwich.n.01', 'synonyms': ['sandwich'], 'id': 912, 'def': 'two (or more) slices of bread with a filling between them', 'name': 'sandwich'}, {'frequency': 'r', 'synset': 'satchel.n.01', 'synonyms': ['satchel'], 'id': 913, 'def': 'luggage consisting of a small case with a flat bottom and (usually) a shoulder strap', 'name': 'satchel'}, {'frequency': 'r', 'synset': 'saucepan.n.01', 'synonyms': ['saucepan'], 'id': 914, 'def': 'a deep pan with a handle; used for stewing or boiling', 'name': 'saucepan'}, {'frequency': 'f', 'synset': 'saucer.n.02', 'synonyms': ['saucer'], 'id': 915, 'def': 'a small shallow dish for holding a cup at the table', 'name': 'saucer'}, {'frequency': 'f', 'synset': 'sausage.n.01', 'synonyms': ['sausage'], 'id': 916, 'def': 'highly seasoned minced meat stuffed in casings', 'name': 'sausage'}, {'frequency': 'r', 'synset': 'sawhorse.n.01', 'synonyms': ['sawhorse', 'sawbuck'], 'id': 917, 'def': 'a framework for holding wood that is being sawed', 'name': 'sawhorse'}, {'frequency': 'r', 'synset': 'sax.n.02', 'synonyms': ['saxophone'], 'id': 918, 'def': "a wind instrument with a `J'-shaped form typically made of brass", 'name': 'saxophone'}, {'frequency': 'f', 'synset': 'scale.n.07', 'synonyms': ['scale_(measuring_instrument)'], 'id': 919, 'def': 'a measuring instrument for weighing; shows amount of mass', 'name': 'scale_(measuring_instrument)'}, {'frequency': 'r', 'synset': 'scarecrow.n.01', 'synonyms': ['scarecrow', 'strawman'], 'id': 920, 'def': 'an effigy in the shape of a man to frighten birds away from seeds', 'name': 'scarecrow'}, {'frequency': 'f', 'synset': 'scarf.n.01', 'synonyms': ['scarf'], 'id': 921, 'def': 'a garment worn around the head or neck or shoulders for warmth or decoration', 'name': 'scarf'}, {'frequency': 'c', 'synset': 'school_bus.n.01', 'synonyms': ['school_bus'], 'id': 922, 'def': 'a bus used to transport children to or from school', 'name': 'school_bus'}, {'frequency': 'f', 'synset': 'scissors.n.01', 'synonyms': ['scissors'], 'id': 923, 'def': 'a tool having two crossed pivoting blades with looped handles', 'name': 'scissors'}, {'frequency': 'f', 'synset': 'scoreboard.n.01', 'synonyms': ['scoreboard'], 'id': 924, 'def': 'a large board for displaying the score of a contest (and some other information)', 'name': 'scoreboard'}, {'frequency': 'r', 'synset': 'scraper.n.01', 'synonyms': ['scraper'], 'id': 925, 'def': 'any of various hand tools for scraping', 'name': 'scraper'}, {'frequency': 'c', 'synset': 'screwdriver.n.01', 'synonyms': ['screwdriver'], 'id': 926, 'def': 'a hand tool for driving screws; has a tip that fits into the head of a screw', 'name': 'screwdriver'}, {'frequency': 'f', 'synset': 'scrub_brush.n.01', 'synonyms': ['scrubbing_brush'], 'id': 927, 'def': 'a brush with short stiff bristles for heavy cleaning', 'name': 'scrubbing_brush'}, {'frequency': 'c', 'synset': 'sculpture.n.01', 'synonyms': ['sculpture'], 'id': 928, 'def': 'a three-dimensional work of art', 'name': 'sculpture'}, {'frequency': 'c', 'synset': 'seabird.n.01', 'synonyms': ['seabird', 'seafowl'], 'id': 929, 'def': 'a bird that frequents coastal waters and the open ocean: gulls; pelicans; gannets; cormorants; albatrosses; petrels; etc.', 'name': 'seabird'}, {'frequency': 'c', 'synset': 'seahorse.n.02', 'synonyms': ['seahorse'], 'id': 930, 'def': 'small fish with horse-like heads bent sharply downward and curled tails', 'name': 'seahorse'}, {'frequency': 'r', 'synset': 'seaplane.n.01', 'synonyms': ['seaplane', 'hydroplane'], 'id': 931, 'def': 'an airplane that can land on or take off from water', 'name': 'seaplane'}, {'frequency': 'c', 'synset': 'seashell.n.01', 'synonyms': ['seashell'], 'id': 932, 'def': 'the shell of a marine organism', 'name': 'seashell'}, {'frequency': 'c', 'synset': 'sewing_machine.n.01', 'synonyms': ['sewing_machine'], 'id': 933, 'def': 'a textile machine used as a home appliance for sewing', 'name': 'sewing_machine'}, {'frequency': 'c', 'synset': 'shaker.n.03', 'synonyms': ['shaker'], 'id': 934, 'def': 'a container in which something can be shaken', 'name': 'shaker'}, {'frequency': 'c', 'synset': 'shampoo.n.01', 'synonyms': ['shampoo'], 'id': 935, 'def': 'cleansing agent consisting of soaps or detergents used for washing the hair', 'name': 'shampoo'}, {'frequency': 'c', 'synset': 'shark.n.01', 'synonyms': ['shark'], 'id': 936, 'def': 'typically large carnivorous fishes with sharpe teeth', 'name': 'shark'}, {'frequency': 'r', 'synset': 'sharpener.n.01', 'synonyms': ['sharpener'], 'id': 937, 'def': 'any implement that is used to make something (an edge or a point) sharper', 'name': 'sharpener'}, {'frequency': 'r', 'synset': 'sharpie.n.03', 'synonyms': ['Sharpie'], 'id': 938, 'def': 'a pen with indelible ink that will write on any surface', 'name': 'Sharpie'}, {'frequency': 'r', 'synset': 'shaver.n.03', 'synonyms': ['shaver_(electric)', 'electric_shaver', 'electric_razor'], 'id': 939, 'def': 'a razor powered by an electric motor', 'name': 'shaver_(electric)'}, {'frequency': 'c', 'synset': 'shaving_cream.n.01', 'synonyms': ['shaving_cream', 'shaving_soap'], 'id': 940, 'def': 'toiletry consisting that forms a rich lather for softening the beard before shaving', 'name': 'shaving_cream'}, {'frequency': 'r', 'synset': 'shawl.n.01', 'synonyms': ['shawl'], 'id': 941, 'def': 'cloak consisting of an oblong piece of cloth used to cover the head and shoulders', 'name': 'shawl'}, {'frequency': 'r', 'synset': 'shears.n.01', 'synonyms': ['shears'], 'id': 942, 'def': 'large scissors with strong blades', 'name': 'shears'}, {'frequency': 'f', 'synset': 'sheep.n.01', 'synonyms': ['sheep'], 'id': 943, 'def': 'woolly usually horned ruminant mammal related to the goat', 'name': 'sheep'}, {'frequency': 'r', 'synset': 'shepherd_dog.n.01', 'synonyms': ['shepherd_dog', 'sheepdog'], 'id': 944, 'def': 'any of various usually long-haired breeds of dog reared to herd and guard sheep', 'name': 'shepherd_dog'}, {'frequency': 'r', 'synset': 'sherbert.n.01', 'synonyms': ['sherbert', 'sherbet'], 'id': 945, 'def': 'a frozen dessert made primarily of fruit juice and sugar', 'name': 'sherbert'}, {'frequency': 'c', 'synset': 'shield.n.02', 'synonyms': ['shield'], 'id': 946, 'def': 'armor carried on the arm to intercept blows', 'name': 'shield'}, {'frequency': 'f', 'synset': 'shirt.n.01', 'synonyms': ['shirt'], 'id': 947, 'def': 'a garment worn on the upper half of the body', 'name': 'shirt'}, {'frequency': 'f', 'synset': 'shoe.n.01', 'synonyms': ['shoe', 'sneaker_(type_of_shoe)', 'tennis_shoe'], 'id': 948, 'def': 'common footwear covering the foot', 'name': 'shoe'}, {'frequency': 'f', 'synset': 'shopping_bag.n.01', 'synonyms': ['shopping_bag'], 'id': 949, 'def': 'a bag made of plastic or strong paper (often with handles); used to transport goods after shopping', 'name': 'shopping_bag'}, {'frequency': 'c', 'synset': 'shopping_cart.n.01', 'synonyms': ['shopping_cart'], 'id': 950, 'def': 'a handcart that holds groceries or other goods while shopping', 'name': 'shopping_cart'}, {'frequency': 'f', 'synset': 'short_pants.n.01', 'synonyms': ['short_pants', 'shorts_(clothing)', 'trunks_(clothing)'], 'id': 951, 'def': 'trousers that end at or above the knee', 'name': 'short_pants'}, {'frequency': 'r', 'synset': 'shot_glass.n.01', 'synonyms': ['shot_glass'], 'id': 952, 'def': 'a small glass adequate to hold a single swallow of whiskey', 'name': 'shot_glass'}, {'frequency': 'f', 'synset': 'shoulder_bag.n.01', 'synonyms': ['shoulder_bag'], 'id': 953, 'def': 'a large handbag that can be carried by a strap looped over the shoulder', 'name': 'shoulder_bag'}, {'frequency': 'c', 'synset': 'shovel.n.01', 'synonyms': ['shovel'], 'id': 954, 'def': 'a hand tool for lifting loose material such as snow, dirt, etc.', 'name': 'shovel'}, {'frequency': 'f', 'synset': 'shower.n.01', 'synonyms': ['shower_head'], 'id': 955, 'def': 'a plumbing fixture that sprays water over you', 'name': 'shower_head'}, {'frequency': 'r', 'synset': 'shower_cap.n.01', 'synonyms': ['shower_cap'], 'id': 956, 'def': 'a tight cap worn to keep hair dry while showering', 'name': 'shower_cap'}, {'frequency': 'f', 'synset': 'shower_curtain.n.01', 'synonyms': ['shower_curtain'], 'id': 957, 'def': 'a curtain that keeps water from splashing out of the shower area', 'name': 'shower_curtain'}, {'frequency': 'r', 'synset': 'shredder.n.01', 'synonyms': ['shredder_(for_paper)'], 'id': 958, 'def': 'a device that shreds documents', 'name': 'shredder_(for_paper)'}, {'frequency': 'f', 'synset': 'signboard.n.01', 'synonyms': ['signboard'], 'id': 959, 'def': 'structure displaying a board on which advertisements can be posted', 'name': 'signboard'}, {'frequency': 'c', 'synset': 'silo.n.01', 'synonyms': ['silo'], 'id': 960, 'def': 'a cylindrical tower used for storing goods', 'name': 'silo'}, {'frequency': 'f', 'synset': 'sink.n.01', 'synonyms': ['sink'], 'id': 961, 'def': 'plumbing fixture consisting of a water basin fixed to a wall or floor and having a drainpipe', 'name': 'sink'}, {'frequency': 'f', 'synset': 'skateboard.n.01', 'synonyms': ['skateboard'], 'id': 962, 'def': 'a board with wheels that is ridden in a standing or crouching position and propelled by foot', 'name': 'skateboard'}, {'frequency': 'c', 'synset': 'skewer.n.01', 'synonyms': ['skewer'], 'id': 963, 'def': 'a long pin for holding meat in position while it is being roasted', 'name': 'skewer'}, {'frequency': 'f', 'synset': 'ski.n.01', 'synonyms': ['ski'], 'id': 964, 'def': 'sports equipment for skiing on snow', 'name': 'ski'}, {'frequency': 'f', 'synset': 'ski_boot.n.01', 'synonyms': ['ski_boot'], 'id': 965, 'def': 'a stiff boot that is fastened to a ski with a ski binding', 'name': 'ski_boot'}, {'frequency': 'f', 'synset': 'ski_parka.n.01', 'synonyms': ['ski_parka', 'ski_jacket'], 'id': 966, 'def': 'a parka to be worn while skiing', 'name': 'ski_parka'}, {'frequency': 'f', 'synset': 'ski_pole.n.01', 'synonyms': ['ski_pole'], 'id': 967, 'def': 'a pole with metal points used as an aid in skiing', 'name': 'ski_pole'}, {'frequency': 'f', 'synset': 'skirt.n.02', 'synonyms': ['skirt'], 'id': 968, 'def': 'a garment hanging from the waist; worn mainly by girls and women', 'name': 'skirt'}, {'frequency': 'r', 'synset': 'skullcap.n.01', 'synonyms': ['skullcap'], 'id': 969, 'def': 'rounded brimless cap fitting the crown of the head', 'name': 'skullcap'}, {'frequency': 'c', 'synset': 'sled.n.01', 'synonyms': ['sled', 'sledge', 'sleigh'], 'id': 970, 'def': 'a vehicle or flat object for transportation over snow by sliding or pulled by dogs, etc.', 'name': 'sled'}, {'frequency': 'c', 'synset': 'sleeping_bag.n.01', 'synonyms': ['sleeping_bag'], 'id': 971, 'def': 'large padded bag designed to be slept in outdoors', 'name': 'sleeping_bag'}, {'frequency': 'r', 'synset': 'sling.n.05', 'synonyms': ['sling_(bandage)', 'triangular_bandage'], 'id': 972, 'def': 'bandage to support an injured forearm; slung over the shoulder or neck', 'name': 'sling_(bandage)'}, {'frequency': 'c', 'synset': 'slipper.n.01', 'synonyms': ['slipper_(footwear)', 'carpet_slipper_(footwear)'], 'id': 973, 'def': 'low footwear that can be slipped on and off easily; usually worn indoors', 'name': 'slipper_(footwear)'}, {'frequency': 'r', 'synset': 'smoothie.n.02', 'synonyms': ['smoothie'], 'id': 974, 'def': 'a thick smooth drink consisting of fresh fruit pureed with ice cream or yoghurt or milk', 'name': 'smoothie'}, {'frequency': 'r', 'synset': 'snake.n.01', 'synonyms': ['snake', 'serpent'], 'id': 975, 'def': 'limbless scaly elongate reptile; some are venomous', 'name': 'snake'}, {'frequency': 'f', 'synset': 'snowboard.n.01', 'synonyms': ['snowboard'], 'id': 976, 'def': 'a board that resembles a broad ski or a small surfboard; used in a standing position to slide down snow-covered slopes', 'name': 'snowboard'}, {'frequency': 'c', 'synset': 'snowman.n.01', 'synonyms': ['snowman'], 'id': 977, 'def': 'a figure of a person made of packed snow', 'name': 'snowman'}, {'frequency': 'c', 'synset': 'snowmobile.n.01', 'synonyms': ['snowmobile'], 'id': 978, 'def': 'tracked vehicle for travel on snow having skis in front', 'name': 'snowmobile'}, {'frequency': 'f', 'synset': 'soap.n.01', 'synonyms': ['soap'], 'id': 979, 'def': 'a cleansing agent made from the salts of vegetable or animal fats', 'name': 'soap'}, {'frequency': 'f', 'synset': 'soccer_ball.n.01', 'synonyms': ['soccer_ball'], 'id': 980, 'def': "an inflated ball used in playing soccer (called `football' outside of the United States)", 'name': 'soccer_ball'}, {'frequency': 'f', 'synset': 'sock.n.01', 'synonyms': ['sock'], 'id': 981, 'def': 'cloth covering for the foot; worn inside the shoe; reaches to between the ankle and the knee', 'name': 'sock'}, {'frequency': 'f', 'synset': 'sofa.n.01', 'synonyms': ['sofa', 'couch', 'lounge'], 'id': 982, 'def': 'an upholstered seat for more than one person', 'name': 'sofa'}, {'frequency': 'r', 'synset': 'softball.n.01', 'synonyms': ['softball'], 'id': 983, 'def': 'ball used in playing softball', 'name': 'softball'}, {'frequency': 'c', 'synset': 'solar_array.n.01', 'synonyms': ['solar_array', 'solar_battery', 'solar_panel'], 'id': 984, 'def': 'electrical device consisting of a large array of connected solar cells', 'name': 'solar_array'}, {'frequency': 'r', 'synset': 'sombrero.n.02', 'synonyms': ['sombrero'], 'id': 985, 'def': 'a straw hat with a tall crown and broad brim; worn in American southwest and in Mexico', 'name': 'sombrero'}, {'frequency': 'f', 'synset': 'soup.n.01', 'synonyms': ['soup'], 'id': 986, 'def': 'liquid food especially of meat or fish or vegetable stock often containing pieces of solid food', 'name': 'soup'}, {'frequency': 'r', 'synset': 'soup_bowl.n.01', 'synonyms': ['soup_bowl'], 'id': 987, 'def': 'a bowl for serving soup', 'name': 'soup_bowl'}, {'frequency': 'c', 'synset': 'soupspoon.n.01', 'synonyms': ['soupspoon'], 'id': 988, 'def': 'a spoon with a rounded bowl for eating soup', 'name': 'soupspoon'}, {'frequency': 'c', 'synset': 'sour_cream.n.01', 'synonyms': ['sour_cream', 'soured_cream'], 'id': 989, 'def': 'soured light cream', 'name': 'sour_cream'}, {'frequency': 'r', 'synset': 'soya_milk.n.01', 'synonyms': ['soya_milk', 'soybean_milk', 'soymilk'], 'id': 990, 'def': 'a milk substitute containing soybean flour and water; used in some infant formulas and in making tofu', 'name': 'soya_milk'}, {'frequency': 'r', 'synset': 'space_shuttle.n.01', 'synonyms': ['space_shuttle'], 'id': 991, 'def': "a reusable spacecraft with wings for a controlled descent through the Earth's atmosphere", 'name': 'space_shuttle'}, {'frequency': 'r', 'synset': 'sparkler.n.02', 'synonyms': ['sparkler_(fireworks)'], 'id': 992, 'def': 'a firework that burns slowly and throws out a shower of sparks', 'name': 'sparkler_(fireworks)'}, {'frequency': 'f', 'synset': 'spatula.n.02', 'synonyms': ['spatula'], 'id': 993, 'def': 'a hand tool with a thin flexible blade used to mix or spread soft substances', 'name': 'spatula'}, {'frequency': 'r', 'synset': 'spear.n.01', 'synonyms': ['spear', 'lance'], 'id': 994, 'def': 'a long pointed rod used as a tool or weapon', 'name': 'spear'}, {'frequency': 'f', 'synset': 'spectacles.n.01', 'synonyms': ['spectacles', 'specs', 'eyeglasses', 'glasses'], 'id': 995, 'def': 'optical instrument consisting of a frame that holds a pair of lenses for correcting defective vision', 'name': 'spectacles'}, {'frequency': 'c', 'synset': 'spice_rack.n.01', 'synonyms': ['spice_rack'], 'id': 996, 'def': 'a rack for displaying containers filled with spices', 'name': 'spice_rack'}, {'frequency': 'c', 'synset': 'spider.n.01', 'synonyms': ['spider'], 'id': 997, 'def': 'predatory arachnid with eight legs, two poison fangs, two feelers, and usually two silk-spinning organs at the back end of the body', 'name': 'spider'}, {'frequency': 'r', 'synset': 'spiny_lobster.n.02', 'synonyms': ['crawfish', 'crayfish'], 'id': 998, 'def': 'large edible marine crustacean having a spiny carapace but lacking the large pincers of true lobsters', 'name': 'crawfish'}, {'frequency': 'c', 'synset': 'sponge.n.01', 'synonyms': ['sponge'], 'id': 999, 'def': 'a porous mass usable to absorb water typically used for cleaning', 'name': 'sponge'}, {'frequency': 'f', 'synset': 'spoon.n.01', 'synonyms': ['spoon'], 'id': 1000, 'def': 'a piece of cutlery with a shallow bowl-shaped container and a handle', 'name': 'spoon'}, {'frequency': 'c', 'synset': 'sportswear.n.01', 'synonyms': ['sportswear', 'athletic_wear', 'activewear'], 'id': 1001, 'def': 'attire worn for sport or for casual wear', 'name': 'sportswear'}, {'frequency': 'c', 'synset': 'spotlight.n.02', 'synonyms': ['spotlight'], 'id': 1002, 'def': 'a lamp that produces a strong beam of light to illuminate a restricted area; used to focus attention of a stage performer', 'name': 'spotlight'}, {'frequency': 'r', 'synset': 'squid.n.01', 'synonyms': ['squid_(food)', 'calamari', 'calamary'], 'id': 1003, 'def': '(Italian cuisine) squid prepared as food', 'name': 'squid_(food)'}, {'frequency': 'c', 'synset': 'squirrel.n.01', 'synonyms': ['squirrel'], 'id': 1004, 'def': 'a kind of arboreal rodent having a long bushy tail', 'name': 'squirrel'}, {'frequency': 'r', 'synset': 'stagecoach.n.01', 'synonyms': ['stagecoach'], 'id': 1005, 'def': 'a large coach-and-four formerly used to carry passengers and mail on regular routes between towns', 'name': 'stagecoach'}, {'frequency': 'c', 'synset': 'stapler.n.01', 'synonyms': ['stapler_(stapling_machine)'], 'id': 1006, 'def': 'a machine that inserts staples into sheets of paper in order to fasten them together', 'name': 'stapler_(stapling_machine)'}, {'frequency': 'c', 'synset': 'starfish.n.01', 'synonyms': ['starfish', 'sea_star'], 'id': 1007, 'def': 'echinoderms characterized by five arms extending from a central disk', 'name': 'starfish'}, {'frequency': 'f', 'synset': 'statue.n.01', 'synonyms': ['statue_(sculpture)'], 'id': 1008, 'def': 'a sculpture representing a human or animal', 'name': 'statue_(sculpture)'}, {'frequency': 'c', 'synset': 'steak.n.01', 'synonyms': ['steak_(food)'], 'id': 1009, 'def': 'a slice of meat cut from the fleshy part of an animal or large fish', 'name': 'steak_(food)'}, {'frequency': 'r', 'synset': 'steak_knife.n.01', 'synonyms': ['steak_knife'], 'id': 1010, 'def': 'a sharp table knife used in eating steak', 'name': 'steak_knife'}, {'frequency': 'f', 'synset': 'steering_wheel.n.01', 'synonyms': ['steering_wheel'], 'id': 1011, 'def': 'a handwheel that is used for steering', 'name': 'steering_wheel'}, {'frequency': 'r', 'synset': 'step_ladder.n.01', 'synonyms': ['stepladder'], 'id': 1012, 'def': 'a folding portable ladder hinged at the top', 'name': 'stepladder'}, {'frequency': 'c', 'synset': 'step_stool.n.01', 'synonyms': ['step_stool'], 'id': 1013, 'def': 'a stool that has one or two steps that fold under the seat', 'name': 'step_stool'}, {'frequency': 'c', 'synset': 'stereo.n.01', 'synonyms': ['stereo_(sound_system)'], 'id': 1014, 'def': 'electronic device for playing audio', 'name': 'stereo_(sound_system)'}, {'frequency': 'r', 'synset': 'stew.n.02', 'synonyms': ['stew'], 'id': 1015, 'def': 'food prepared by stewing especially meat or fish with vegetables', 'name': 'stew'}, {'frequency': 'r', 'synset': 'stirrer.n.02', 'synonyms': ['stirrer'], 'id': 1016, 'def': 'an implement used for stirring', 'name': 'stirrer'}, {'frequency': 'f', 'synset': 'stirrup.n.01', 'synonyms': ['stirrup'], 'id': 1017, 'def': "support consisting of metal loops into which rider's feet go", 'name': 'stirrup'}, {'frequency': 'f', 'synset': 'stool.n.01', 'synonyms': ['stool'], 'id': 1018, 'def': 'a simple seat without a back or arms', 'name': 'stool'}, {'frequency': 'f', 'synset': 'stop_sign.n.01', 'synonyms': ['stop_sign'], 'id': 1019, 'def': 'a traffic sign to notify drivers that they must come to a complete stop', 'name': 'stop_sign'}, {'frequency': 'f', 'synset': 'stoplight.n.01', 'synonyms': ['brake_light'], 'id': 1020, 'def': 'a red light on the rear of a motor vehicle that signals when the brakes are applied', 'name': 'brake_light'}, {'frequency': 'f', 'synset': 'stove.n.01', 'synonyms': ['stove', 'kitchen_stove', 'range_(kitchen_appliance)', 'kitchen_range', 'cooking_stove'], 'id': 1021, 'def': 'a kitchen appliance used for cooking food', 'name': 'stove'}, {'frequency': 'c', 'synset': 'strainer.n.01', 'synonyms': ['strainer'], 'id': 1022, 'def': 'a filter to retain larger pieces while smaller pieces and liquids pass through', 'name': 'strainer'}, {'frequency': 'f', 'synset': 'strap.n.01', 'synonyms': ['strap'], 'id': 1023, 'def': 'an elongated strip of material for binding things together or holding', 'name': 'strap'}, {'frequency': 'f', 'synset': 'straw.n.04', 'synonyms': ['straw_(for_drinking)', 'drinking_straw'], 'id': 1024, 'def': 'a thin paper or plastic tube used to suck liquids into the mouth', 'name': 'straw_(for_drinking)'}, {'frequency': 'f', 'synset': 'strawberry.n.01', 'synonyms': ['strawberry'], 'id': 1025, 'def': 'sweet fleshy red fruit', 'name': 'strawberry'}, {'frequency': 'f', 'synset': 'street_sign.n.01', 'synonyms': ['street_sign'], 'id': 1026, 'def': 'a sign visible from the street', 'name': 'street_sign'}, {'frequency': 'f', 'synset': 'streetlight.n.01', 'synonyms': ['streetlight', 'street_lamp'], 'id': 1027, 'def': 'a lamp supported on a lamppost; for illuminating a street', 'name': 'streetlight'}, {'frequency': 'r', 'synset': 'string_cheese.n.01', 'synonyms': ['string_cheese'], 'id': 1028, 'def': 'cheese formed in long strings twisted together', 'name': 'string_cheese'}, {'frequency': 'r', 'synset': 'stylus.n.02', 'synonyms': ['stylus'], 'id': 1029, 'def': 'a pointed tool for writing or drawing or engraving, including pens', 'name': 'stylus'}, {'frequency': 'r', 'synset': 'subwoofer.n.01', 'synonyms': ['subwoofer'], 'id': 1030, 'def': 'a loudspeaker that is designed to reproduce very low bass frequencies', 'name': 'subwoofer'}, {'frequency': 'r', 'synset': 'sugar_bowl.n.01', 'synonyms': ['sugar_bowl'], 'id': 1031, 'def': 'a dish in which sugar is served', 'name': 'sugar_bowl'}, {'frequency': 'r', 'synset': 'sugarcane.n.01', 'synonyms': ['sugarcane_(plant)'], 'id': 1032, 'def': 'juicy canes whose sap is a source of molasses and commercial sugar; fresh canes are sometimes chewed for the juice', 'name': 'sugarcane_(plant)'}, {'frequency': 'f', 'synset': 'suit.n.01', 'synonyms': ['suit_(clothing)'], 'id': 1033, 'def': 'a set of garments (usually including a jacket and trousers or skirt) for outerwear all of the same fabric and color', 'name': 'suit_(clothing)'}, {'frequency': 'c', 'synset': 'sunflower.n.01', 'synonyms': ['sunflower'], 'id': 1034, 'def': 'any plant of the genus Helianthus having large flower heads with dark disk florets and showy yellow rays', 'name': 'sunflower'}, {'frequency': 'f', 'synset': 'sunglasses.n.01', 'synonyms': ['sunglasses'], 'id': 1035, 'def': 'spectacles that are darkened or polarized to protect the eyes from the glare of the sun', 'name': 'sunglasses'}, {'frequency': 'c', 'synset': 'sunhat.n.01', 'synonyms': ['sunhat'], 'id': 1036, 'def': 'a hat with a broad brim that protects the face from direct exposure to the sun', 'name': 'sunhat'}, {'frequency': 'f', 'synset': 'surfboard.n.01', 'synonyms': ['surfboard'], 'id': 1037, 'def': 'a narrow buoyant board for riding surf', 'name': 'surfboard'}, {'frequency': 'c', 'synset': 'sushi.n.01', 'synonyms': ['sushi'], 'id': 1038, 'def': 'rice (with raw fish) wrapped in seaweed', 'name': 'sushi'}, {'frequency': 'c', 'synset': 'swab.n.02', 'synonyms': ['mop'], 'id': 1039, 'def': 'cleaning implement consisting of absorbent material fastened to a handle; for cleaning floors', 'name': 'mop'}, {'frequency': 'c', 'synset': 'sweat_pants.n.01', 'synonyms': ['sweat_pants'], 'id': 1040, 'def': 'loose-fitting trousers with elastic cuffs; worn by athletes', 'name': 'sweat_pants'}, {'frequency': 'c', 'synset': 'sweatband.n.02', 'synonyms': ['sweatband'], 'id': 1041, 'def': 'a band of material tied around the forehead or wrist to absorb sweat', 'name': 'sweatband'}, {'frequency': 'f', 'synset': 'sweater.n.01', 'synonyms': ['sweater'], 'id': 1042, 'def': 'a crocheted or knitted garment covering the upper part of the body', 'name': 'sweater'}, {'frequency': 'f', 'synset': 'sweatshirt.n.01', 'synonyms': ['sweatshirt'], 'id': 1043, 'def': 'cotton knit pullover with long sleeves worn during athletic activity', 'name': 'sweatshirt'}, {'frequency': 'c', 'synset': 'sweet_potato.n.02', 'synonyms': ['sweet_potato'], 'id': 1044, 'def': 'the edible tuberous root of the sweet potato vine', 'name': 'sweet_potato'}, {'frequency': 'f', 'synset': 'swimsuit.n.01', 'synonyms': ['swimsuit', 'swimwear', 'bathing_suit', 'swimming_costume', 'bathing_costume', 'swimming_trunks', 'bathing_trunks'], 'id': 1045, 'def': 'garment worn for swimming', 'name': 'swimsuit'}, {'frequency': 'c', 'synset': 'sword.n.01', 'synonyms': ['sword'], 'id': 1046, 'def': 'a cutting or thrusting weapon that has a long metal blade', 'name': 'sword'}, {'frequency': 'r', 'synset': 'syringe.n.01', 'synonyms': ['syringe'], 'id': 1047, 'def': 'a medical instrument used to inject or withdraw fluids', 'name': 'syringe'}, {'frequency': 'r', 'synset': 'tabasco.n.02', 'synonyms': ['Tabasco_sauce'], 'id': 1048, 'def': 'very spicy sauce (trade name Tabasco) made from fully-aged red peppers', 'name': 'Tabasco_sauce'}, {'frequency': 'r', 'synset': 'table-tennis_table.n.01', 'synonyms': ['table-tennis_table', 'ping-pong_table'], 'id': 1049, 'def': 'a table used for playing table tennis', 'name': 'table-tennis_table'}, {'frequency': 'f', 'synset': 'table.n.02', 'synonyms': ['table'], 'id': 1050, 'def': 'a piece of furniture having a smooth flat top that is usually supported by one or more vertical legs', 'name': 'table'}, {'frequency': 'c', 'synset': 'table_lamp.n.01', 'synonyms': ['table_lamp'], 'id': 1051, 'def': 'a lamp that sits on a table', 'name': 'table_lamp'}, {'frequency': 'f', 'synset': 'tablecloth.n.01', 'synonyms': ['tablecloth'], 'id': 1052, 'def': 'a covering spread over a dining table', 'name': 'tablecloth'}, {'frequency': 'r', 'synset': 'tachometer.n.01', 'synonyms': ['tachometer'], 'id': 1053, 'def': 'measuring instrument for indicating speed of rotation', 'name': 'tachometer'}, {'frequency': 'r', 'synset': 'taco.n.02', 'synonyms': ['taco'], 'id': 1054, 'def': 'a small tortilla cupped around a filling', 'name': 'taco'}, {'frequency': 'f', 'synset': 'tag.n.02', 'synonyms': ['tag'], 'id': 1055, 'def': 'a label associated with something for the purpose of identification or information', 'name': 'tag'}, {'frequency': 'f', 'synset': 'taillight.n.01', 'synonyms': ['taillight', 'rear_light'], 'id': 1056, 'def': 'lamp (usually red) mounted at the rear of a motor vehicle', 'name': 'taillight'}, {'frequency': 'r', 'synset': 'tambourine.n.01', 'synonyms': ['tambourine'], 'id': 1057, 'def': 'a shallow drum with a single drumhead and with metallic disks in the sides', 'name': 'tambourine'}, {'frequency': 'r', 'synset': 'tank.n.01', 'synonyms': ['army_tank', 'armored_combat_vehicle', 'armoured_combat_vehicle'], 'id': 1058, 'def': 'an enclosed armored military vehicle; has a cannon and moves on caterpillar treads', 'name': 'army_tank'}, {'frequency': 'f', 'synset': 'tank.n.02', 'synonyms': ['tank_(storage_vessel)', 'storage_tank'], 'id': 1059, 'def': 'a large (usually metallic) vessel for holding gases or liquids', 'name': 'tank_(storage_vessel)'}, {'frequency': 'f', 'synset': 'tank_top.n.01', 'synonyms': ['tank_top_(clothing)'], 'id': 1060, 'def': 'a tight-fitting sleeveless shirt with wide shoulder straps and low neck and no front opening', 'name': 'tank_top_(clothing)'}, {'frequency': 'f', 'synset': 'tape.n.01', 'synonyms': ['tape_(sticky_cloth_or_paper)'], 'id': 1061, 'def': 'a long thin piece of cloth or paper as used for binding or fastening', 'name': 'tape_(sticky_cloth_or_paper)'}, {'frequency': 'c', 'synset': 'tape.n.04', 'synonyms': ['tape_measure', 'measuring_tape'], 'id': 1062, 'def': 'measuring instrument consisting of a narrow strip (cloth or metal) marked in inches or centimeters and used for measuring lengths', 'name': 'tape_measure'}, {'frequency': 'c', 'synset': 'tapestry.n.02', 'synonyms': ['tapestry'], 'id': 1063, 'def': 'a heavy textile with a woven design; used for curtains and upholstery', 'name': 'tapestry'}, {'frequency': 'f', 'synset': 'tarpaulin.n.01', 'synonyms': ['tarp'], 'id': 1064, 'def': 'waterproofed canvas', 'name': 'tarp'}, {'frequency': 'c', 'synset': 'tartan.n.01', 'synonyms': ['tartan', 'plaid'], 'id': 1065, 'def': 'a cloth having a crisscross design', 'name': 'tartan'}, {'frequency': 'c', 'synset': 'tassel.n.01', 'synonyms': ['tassel'], 'id': 1066, 'def': 'adornment consisting of a bunch of cords fastened at one end', 'name': 'tassel'}, {'frequency': 'c', 'synset': 'tea_bag.n.01', 'synonyms': ['tea_bag'], 'id': 1067, 'def': 'a measured amount of tea in a bag for an individual serving of tea', 'name': 'tea_bag'}, {'frequency': 'c', 'synset': 'teacup.n.02', 'synonyms': ['teacup'], 'id': 1068, 'def': 'a cup from which tea is drunk', 'name': 'teacup'}, {'frequency': 'c', 'synset': 'teakettle.n.01', 'synonyms': ['teakettle'], 'id': 1069, 'def': 'kettle for boiling water to make tea', 'name': 'teakettle'}, {'frequency': 'f', 'synset': 'teapot.n.01', 'synonyms': ['teapot'], 'id': 1070, 'def': 'pot for brewing tea; usually has a spout and handle', 'name': 'teapot'}, {'frequency': 'f', 'synset': 'teddy.n.01', 'synonyms': ['teddy_bear'], 'id': 1071, 'def': "plaything consisting of a child's toy bear (usually plush and stuffed with soft materials)", 'name': 'teddy_bear'}, {'frequency': 'f', 'synset': 'telephone.n.01', 'synonyms': ['telephone', 'phone', 'telephone_set'], 'id': 1072, 'def': 'electronic device for communicating by voice over long distances (includes wired and wireless/cell phones)', 'name': 'telephone'}, {'frequency': 'c', 'synset': 'telephone_booth.n.01', 'synonyms': ['telephone_booth', 'phone_booth', 'call_box', 'telephone_box', 'telephone_kiosk'], 'id': 1073, 'def': 'booth for using a telephone', 'name': 'telephone_booth'}, {'frequency': 'f', 'synset': 'telephone_pole.n.01', 'synonyms': ['telephone_pole', 'telegraph_pole', 'telegraph_post'], 'id': 1074, 'def': 'tall pole supporting telephone wires', 'name': 'telephone_pole'}, {'frequency': 'r', 'synset': 'telephoto_lens.n.01', 'synonyms': ['telephoto_lens', 'zoom_lens'], 'id': 1075, 'def': 'a camera lens that magnifies the image', 'name': 'telephoto_lens'}, {'frequency': 'c', 'synset': 'television_camera.n.01', 'synonyms': ['television_camera', 'tv_camera'], 'id': 1076, 'def': 'television equipment for capturing and recording video', 'name': 'television_camera'}, {'frequency': 'f', 'synset': 'television_receiver.n.01', 'synonyms': ['television_set', 'tv', 'tv_set'], 'id': 1077, 'def': 'an electronic device that receives television signals and displays them on a screen', 'name': 'television_set'}, {'frequency': 'f', 'synset': 'tennis_ball.n.01', 'synonyms': ['tennis_ball'], 'id': 1078, 'def': 'ball about the size of a fist used in playing tennis', 'name': 'tennis_ball'}, {'frequency': 'f', 'synset': 'tennis_racket.n.01', 'synonyms': ['tennis_racket'], 'id': 1079, 'def': 'a racket used to play tennis', 'name': 'tennis_racket'}, {'frequency': 'r', 'synset': 'tequila.n.01', 'synonyms': ['tequila'], 'id': 1080, 'def': 'Mexican liquor made from fermented juices of an agave plant', 'name': 'tequila'}, {'frequency': 'c', 'synset': 'thermometer.n.01', 'synonyms': ['thermometer'], 'id': 1081, 'def': 'measuring instrument for measuring temperature', 'name': 'thermometer'}, {'frequency': 'c', 'synset': 'thermos.n.01', 'synonyms': ['thermos_bottle'], 'id': 1082, 'def': 'vacuum flask that preserves temperature of hot or cold drinks', 'name': 'thermos_bottle'}, {'frequency': 'f', 'synset': 'thermostat.n.01', 'synonyms': ['thermostat'], 'id': 1083, 'def': 'a regulator for automatically regulating temperature by starting or stopping the supply of heat', 'name': 'thermostat'}, {'frequency': 'r', 'synset': 'thimble.n.02', 'synonyms': ['thimble'], 'id': 1084, 'def': 'a small metal cap to protect the finger while sewing; can be used as a small container', 'name': 'thimble'}, {'frequency': 'c', 'synset': 'thread.n.01', 'synonyms': ['thread', 'yarn'], 'id': 1085, 'def': 'a fine cord of twisted fibers (of cotton or silk or wool or nylon etc.) used in sewing and weaving', 'name': 'thread'}, {'frequency': 'c', 'synset': 'thumbtack.n.01', 'synonyms': ['thumbtack', 'drawing_pin', 'pushpin'], 'id': 1086, 'def': 'a tack for attaching papers to a bulletin board or drawing board', 'name': 'thumbtack'}, {'frequency': 'c', 'synset': 'tiara.n.01', 'synonyms': ['tiara'], 'id': 1087, 'def': 'a jeweled headdress worn by women on formal occasions', 'name': 'tiara'}, {'frequency': 'c', 'synset': 'tiger.n.02', 'synonyms': ['tiger'], 'id': 1088, 'def': 'large feline of forests in most of Asia having a tawny coat with black stripes', 'name': 'tiger'}, {'frequency': 'c', 'synset': 'tights.n.01', 'synonyms': ['tights_(clothing)', 'leotards'], 'id': 1089, 'def': 'skintight knit hose covering the body from the waist to the feet worn by acrobats and dancers and as stockings by women and girls', 'name': 'tights_(clothing)'}, {'frequency': 'c', 'synset': 'timer.n.01', 'synonyms': ['timer', 'stopwatch'], 'id': 1090, 'def': 'a timepiece that measures a time interval and signals its end', 'name': 'timer'}, {'frequency': 'f', 'synset': 'tinfoil.n.01', 'synonyms': ['tinfoil'], 'id': 1091, 'def': 'foil made of tin or an alloy of tin and lead', 'name': 'tinfoil'}, {'frequency': 'c', 'synset': 'tinsel.n.01', 'synonyms': ['tinsel'], 'id': 1092, 'def': 'a showy decoration that is basically valueless', 'name': 'tinsel'}, {'frequency': 'f', 'synset': 'tissue.n.02', 'synonyms': ['tissue_paper'], 'id': 1093, 'def': 'a soft thin (usually translucent) paper', 'name': 'tissue_paper'}, {'frequency': 'c', 'synset': 'toast.n.01', 'synonyms': ['toast_(food)'], 'id': 1094, 'def': 'slice of bread that has been toasted', 'name': 'toast_(food)'}, {'frequency': 'f', 'synset': 'toaster.n.02', 'synonyms': ['toaster'], 'id': 1095, 'def': 'a kitchen appliance (usually electric) for toasting bread', 'name': 'toaster'}, {'frequency': 'f', 'synset': 'toaster_oven.n.01', 'synonyms': ['toaster_oven'], 'id': 1096, 'def': 'kitchen appliance consisting of a small electric oven for toasting or warming food', 'name': 'toaster_oven'}, {'frequency': 'f', 'synset': 'toilet.n.02', 'synonyms': ['toilet'], 'id': 1097, 'def': 'a plumbing fixture for defecation and urination', 'name': 'toilet'}, {'frequency': 'f', 'synset': 'toilet_tissue.n.01', 'synonyms': ['toilet_tissue', 'toilet_paper', 'bathroom_tissue'], 'id': 1098, 'def': 'a soft thin absorbent paper for use in toilets', 'name': 'toilet_tissue'}, {'frequency': 'f', 'synset': 'tomato.n.01', 'synonyms': ['tomato'], 'id': 1099, 'def': 'mildly acid red or yellow pulpy fruit eaten as a vegetable', 'name': 'tomato'}, {'frequency': 'f', 'synset': 'tongs.n.01', 'synonyms': ['tongs'], 'id': 1100, 'def': 'any of various devices for taking hold of objects; usually have two hinged legs with handles above and pointed hooks below', 'name': 'tongs'}, {'frequency': 'c', 'synset': 'toolbox.n.01', 'synonyms': ['toolbox'], 'id': 1101, 'def': 'a box or chest or cabinet for holding hand tools', 'name': 'toolbox'}, {'frequency': 'f', 'synset': 'toothbrush.n.01', 'synonyms': ['toothbrush'], 'id': 1102, 'def': 'small brush; has long handle; used to clean teeth', 'name': 'toothbrush'}, {'frequency': 'f', 'synset': 'toothpaste.n.01', 'synonyms': ['toothpaste'], 'id': 1103, 'def': 'a dentifrice in the form of a paste', 'name': 'toothpaste'}, {'frequency': 'f', 'synset': 'toothpick.n.01', 'synonyms': ['toothpick'], 'id': 1104, 'def': 'pick consisting of a small strip of wood or plastic; used to pick food from between the teeth', 'name': 'toothpick'}, {'frequency': 'f', 'synset': 'top.n.09', 'synonyms': ['cover'], 'id': 1105, 'def': 'covering for a hole (especially a hole in the top of a container)', 'name': 'cover'}, {'frequency': 'c', 'synset': 'tortilla.n.01', 'synonyms': ['tortilla'], 'id': 1106, 'def': 'thin unleavened pancake made from cornmeal or wheat flour', 'name': 'tortilla'}, {'frequency': 'c', 'synset': 'tow_truck.n.01', 'synonyms': ['tow_truck'], 'id': 1107, 'def': 'a truck equipped to hoist and pull wrecked cars (or to remove cars from no-parking zones)', 'name': 'tow_truck'}, {'frequency': 'f', 'synset': 'towel.n.01', 'synonyms': ['towel'], 'id': 1108, 'def': 'a rectangular piece of absorbent cloth (or paper) for drying or wiping', 'name': 'towel'}, {'frequency': 'f', 'synset': 'towel_rack.n.01', 'synonyms': ['towel_rack', 'towel_rail', 'towel_bar'], 'id': 1109, 'def': 'a rack consisting of one or more bars on which towels can be hung', 'name': 'towel_rack'}, {'frequency': 'f', 'synset': 'toy.n.03', 'synonyms': ['toy'], 'id': 1110, 'def': 'a device regarded as providing amusement', 'name': 'toy'}, {'frequency': 'c', 'synset': 'tractor.n.01', 'synonyms': ['tractor_(farm_equipment)'], 'id': 1111, 'def': 'a wheeled vehicle with large wheels; used in farming and other applications', 'name': 'tractor_(farm_equipment)'}, {'frequency': 'f', 'synset': 'traffic_light.n.01', 'synonyms': ['traffic_light'], 'id': 1112, 'def': 'a device to control vehicle traffic often consisting of three or more lights', 'name': 'traffic_light'}, {'frequency': 'c', 'synset': 'trail_bike.n.01', 'synonyms': ['dirt_bike'], 'id': 1113, 'def': 'a lightweight motorcycle equipped with rugged tires and suspension for off-road use', 'name': 'dirt_bike'}, {'frequency': 'f', 'synset': 'trailer_truck.n.01', 'synonyms': ['trailer_truck', 'tractor_trailer', 'trucking_rig', 'articulated_lorry', 'semi_truck'], 'id': 1114, 'def': 'a truck consisting of a tractor and trailer together', 'name': 'trailer_truck'}, {'frequency': 'f', 'synset': 'train.n.01', 'synonyms': ['train_(railroad_vehicle)', 'railroad_train'], 'id': 1115, 'def': 'public or private transport provided by a line of railway cars coupled together and drawn by a locomotive', 'name': 'train_(railroad_vehicle)'}, {'frequency': 'r', 'synset': 'trampoline.n.01', 'synonyms': ['trampoline'], 'id': 1116, 'def': 'gymnastic apparatus consisting of a strong canvas sheet attached with springs to a metal frame', 'name': 'trampoline'}, {'frequency': 'f', 'synset': 'tray.n.01', 'synonyms': ['tray'], 'id': 1117, 'def': 'an open receptacle for holding or displaying or serving articles or food', 'name': 'tray'}, {'frequency': 'r', 'synset': 'trench_coat.n.01', 'synonyms': ['trench_coat'], 'id': 1118, 'def': 'a military style raincoat; belted with deep pockets', 'name': 'trench_coat'}, {'frequency': 'r', 'synset': 'triangle.n.05', 'synonyms': ['triangle_(musical_instrument)'], 'id': 1119, 'def': 'a percussion instrument consisting of a metal bar bent in the shape of an open triangle', 'name': 'triangle_(musical_instrument)'}, {'frequency': 'c', 'synset': 'tricycle.n.01', 'synonyms': ['tricycle'], 'id': 1120, 'def': 'a vehicle with three wheels that is moved by foot pedals', 'name': 'tricycle'}, {'frequency': 'f', 'synset': 'tripod.n.01', 'synonyms': ['tripod'], 'id': 1121, 'def': 'a three-legged rack used for support', 'name': 'tripod'}, {'frequency': 'f', 'synset': 'trouser.n.01', 'synonyms': ['trousers', 'pants_(clothing)'], 'id': 1122, 'def': 'a garment extending from the waist to the knee or ankle, covering each leg separately', 'name': 'trousers'}, {'frequency': 'f', 'synset': 'truck.n.01', 'synonyms': ['truck'], 'id': 1123, 'def': 'an automotive vehicle suitable for hauling', 'name': 'truck'}, {'frequency': 'r', 'synset': 'truffle.n.03', 'synonyms': ['truffle_(chocolate)', 'chocolate_truffle'], 'id': 1124, 'def': 'creamy chocolate candy', 'name': 'truffle_(chocolate)'}, {'frequency': 'c', 'synset': 'trunk.n.02', 'synonyms': ['trunk'], 'id': 1125, 'def': 'luggage consisting of a large strong case used when traveling or for storage', 'name': 'trunk'}, {'frequency': 'r', 'synset': 'tub.n.02', 'synonyms': ['vat'], 'id': 1126, 'def': 'a large vessel for holding or storing liquids', 'name': 'vat'}, {'frequency': 'c', 'synset': 'turban.n.01', 'synonyms': ['turban'], 'id': 1127, 'def': 'a traditional headdress consisting of a long scarf wrapped around the head', 'name': 'turban'}, {'frequency': 'c', 'synset': 'turkey.n.04', 'synonyms': ['turkey_(food)'], 'id': 1128, 'def': 'flesh of large domesticated fowl usually roasted', 'name': 'turkey_(food)'}, {'frequency': 'r', 'synset': 'turnip.n.01', 'synonyms': ['turnip'], 'id': 1129, 'def': 'widely cultivated plant having a large fleshy edible white or yellow root', 'name': 'turnip'}, {'frequency': 'c', 'synset': 'turtle.n.02', 'synonyms': ['turtle'], 'id': 1130, 'def': 'any of various aquatic and land reptiles having a bony shell and flipper-like limbs for swimming', 'name': 'turtle'}, {'frequency': 'c', 'synset': 'turtleneck.n.01', 'synonyms': ['turtleneck_(clothing)', 'polo-neck'], 'id': 1131, 'def': 'a sweater or jersey with a high close-fitting collar', 'name': 'turtleneck_(clothing)'}, {'frequency': 'c', 'synset': 'typewriter.n.01', 'synonyms': ['typewriter'], 'id': 1132, 'def': 'hand-operated character printer for printing written messages one character at a time', 'name': 'typewriter'}, {'frequency': 'f', 'synset': 'umbrella.n.01', 'synonyms': ['umbrella'], 'id': 1133, 'def': 'a lightweight handheld collapsible canopy', 'name': 'umbrella'}, {'frequency': 'f', 'synset': 'underwear.n.01', 'synonyms': ['underwear', 'underclothes', 'underclothing', 'underpants'], 'id': 1134, 'def': 'undergarment worn next to the skin and under the outer garments', 'name': 'underwear'}, {'frequency': 'r', 'synset': 'unicycle.n.01', 'synonyms': ['unicycle'], 'id': 1135, 'def': 'a vehicle with a single wheel that is driven by pedals', 'name': 'unicycle'}, {'frequency': 'f', 'synset': 'urinal.n.01', 'synonyms': ['urinal'], 'id': 1136, 'def': 'a plumbing fixture (usually attached to the wall) used by men to urinate', 'name': 'urinal'}, {'frequency': 'c', 'synset': 'urn.n.01', 'synonyms': ['urn'], 'id': 1137, 'def': 'a large vase that usually has a pedestal or feet', 'name': 'urn'}, {'frequency': 'c', 'synset': 'vacuum.n.04', 'synonyms': ['vacuum_cleaner'], 'id': 1138, 'def': 'an electrical home appliance that cleans by suction', 'name': 'vacuum_cleaner'}, {'frequency': 'f', 'synset': 'vase.n.01', 'synonyms': ['vase'], 'id': 1139, 'def': 'an open jar of glass or porcelain used as an ornament or to hold flowers', 'name': 'vase'}, {'frequency': 'c', 'synset': 'vending_machine.n.01', 'synonyms': ['vending_machine'], 'id': 1140, 'def': 'a slot machine for selling goods', 'name': 'vending_machine'}, {'frequency': 'f', 'synset': 'vent.n.01', 'synonyms': ['vent', 'blowhole', 'air_vent'], 'id': 1141, 'def': 'a hole for the escape of gas or air', 'name': 'vent'}, {'frequency': 'f', 'synset': 'vest.n.01', 'synonyms': ['vest', 'waistcoat'], 'id': 1142, 'def': "a man's sleeveless garment worn underneath a coat", 'name': 'vest'}, {'frequency': 'c', 'synset': 'videotape.n.01', 'synonyms': ['videotape'], 'id': 1143, 'def': 'a video recording made on magnetic tape', 'name': 'videotape'}, {'frequency': 'r', 'synset': 'vinegar.n.01', 'synonyms': ['vinegar'], 'id': 1144, 'def': 'sour-tasting liquid produced usually by oxidation of the alcohol in wine or cider and used as a condiment or food preservative', 'name': 'vinegar'}, {'frequency': 'r', 'synset': 'violin.n.01', 'synonyms': ['violin', 'fiddle'], 'id': 1145, 'def': 'bowed stringed instrument that is the highest member of the violin family', 'name': 'violin'}, {'frequency': 'r', 'synset': 'vodka.n.01', 'synonyms': ['vodka'], 'id': 1146, 'def': 'unaged colorless liquor originating in Russia', 'name': 'vodka'}, {'frequency': 'c', 'synset': 'volleyball.n.02', 'synonyms': ['volleyball'], 'id': 1147, 'def': 'an inflated ball used in playing volleyball', 'name': 'volleyball'}, {'frequency': 'r', 'synset': 'vulture.n.01', 'synonyms': ['vulture'], 'id': 1148, 'def': 'any of various large birds of prey having naked heads and weak claws and feeding chiefly on carrion', 'name': 'vulture'}, {'frequency': 'c', 'synset': 'waffle.n.01', 'synonyms': ['waffle'], 'id': 1149, 'def': 'pancake batter baked in a waffle iron', 'name': 'waffle'}, {'frequency': 'r', 'synset': 'waffle_iron.n.01', 'synonyms': ['waffle_iron'], 'id': 1150, 'def': 'a kitchen appliance for baking waffles', 'name': 'waffle_iron'}, {'frequency': 'c', 'synset': 'wagon.n.01', 'synonyms': ['wagon'], 'id': 1151, 'def': 'any of various kinds of wheeled vehicles drawn by an animal or a tractor', 'name': 'wagon'}, {'frequency': 'c', 'synset': 'wagon_wheel.n.01', 'synonyms': ['wagon_wheel'], 'id': 1152, 'def': 'a wheel of a wagon', 'name': 'wagon_wheel'}, {'frequency': 'c', 'synset': 'walking_stick.n.01', 'synonyms': ['walking_stick'], 'id': 1153, 'def': 'a stick carried in the hand for support in walking', 'name': 'walking_stick'}, {'frequency': 'c', 'synset': 'wall_clock.n.01', 'synonyms': ['wall_clock'], 'id': 1154, 'def': 'a clock mounted on a wall', 'name': 'wall_clock'}, {'frequency': 'f', 'synset': 'wall_socket.n.01', 'synonyms': ['wall_socket', 'wall_plug', 'electric_outlet', 'electrical_outlet', 'outlet', 'electric_receptacle'], 'id': 1155, 'def': 'receptacle providing a place in a wiring system where current can be taken to run electrical devices', 'name': 'wall_socket'}, {'frequency': 'f', 'synset': 'wallet.n.01', 'synonyms': ['wallet', 'billfold'], 'id': 1156, 'def': 'a pocket-size case for holding papers and paper money', 'name': 'wallet'}, {'frequency': 'r', 'synset': 'walrus.n.01', 'synonyms': ['walrus'], 'id': 1157, 'def': 'either of two large northern marine mammals having ivory tusks and tough hide over thick blubber', 'name': 'walrus'}, {'frequency': 'r', 'synset': 'wardrobe.n.01', 'synonyms': ['wardrobe'], 'id': 1158, 'def': 'a tall piece of furniture that provides storage space for clothes; has a door and rails or hooks for hanging clothes', 'name': 'wardrobe'}, {'frequency': 'r', 'synset': 'washbasin.n.01', 'synonyms': ['washbasin', 'basin_(for_washing)', 'washbowl', 'washstand', 'handbasin'], 'id': 1159, 'def': 'a bathroom sink that is permanently installed and connected to a water supply and drainpipe; where you can wash your hands and face', 'name': 'washbasin'}, {'frequency': 'c', 'synset': 'washer.n.03', 'synonyms': ['automatic_washer', 'washing_machine'], 'id': 1160, 'def': 'a home appliance for washing clothes and linens automatically', 'name': 'automatic_washer'}, {'frequency': 'f', 'synset': 'watch.n.01', 'synonyms': ['watch', 'wristwatch'], 'id': 1161, 'def': 'a small, portable timepiece', 'name': 'watch'}, {'frequency': 'f', 'synset': 'water_bottle.n.01', 'synonyms': ['water_bottle'], 'id': 1162, 'def': 'a bottle for holding water', 'name': 'water_bottle'}, {'frequency': 'c', 'synset': 'water_cooler.n.01', 'synonyms': ['water_cooler'], 'id': 1163, 'def': 'a device for cooling and dispensing drinking water', 'name': 'water_cooler'}, {'frequency': 'c', 'synset': 'water_faucet.n.01', 'synonyms': ['water_faucet', 'water_tap', 'tap_(water_faucet)'], 'id': 1164, 'def': 'a faucet for drawing water from a pipe or cask', 'name': 'water_faucet'}, {'frequency': 'r', 'synset': 'water_heater.n.01', 'synonyms': ['water_heater', 'hot-water_heater'], 'id': 1165, 'def': 'a heater and storage tank to supply heated water', 'name': 'water_heater'}, {'frequency': 'c', 'synset': 'water_jug.n.01', 'synonyms': ['water_jug'], 'id': 1166, 'def': 'a jug that holds water', 'name': 'water_jug'}, {'frequency': 'r', 'synset': 'water_pistol.n.01', 'synonyms': ['water_gun', 'squirt_gun'], 'id': 1167, 'def': 'plaything consisting of a toy pistol that squirts water', 'name': 'water_gun'}, {'frequency': 'c', 'synset': 'water_scooter.n.01', 'synonyms': ['water_scooter', 'sea_scooter', 'jet_ski'], 'id': 1168, 'def': 'a motorboat resembling a motor scooter (NOT A SURFBOARD OR WATER SKI)', 'name': 'water_scooter'}, {'frequency': 'c', 'synset': 'water_ski.n.01', 'synonyms': ['water_ski'], 'id': 1169, 'def': 'broad ski for skimming over water towed by a speedboat (DO NOT MARK WATER)', 'name': 'water_ski'}, {'frequency': 'c', 'synset': 'water_tower.n.01', 'synonyms': ['water_tower'], 'id': 1170, 'def': 'a large reservoir for water', 'name': 'water_tower'}, {'frequency': 'c', 'synset': 'watering_can.n.01', 'synonyms': ['watering_can'], 'id': 1171, 'def': 'a container with a handle and a spout with a perforated nozzle; used to sprinkle water over plants', 'name': 'watering_can'}, {'frequency': 'f', 'synset': 'watermelon.n.02', 'synonyms': ['watermelon'], 'id': 1172, 'def': 'large oblong or roundish melon with a hard green rind and sweet watery red or occasionally yellowish pulp', 'name': 'watermelon'}, {'frequency': 'f', 'synset': 'weathervane.n.01', 'synonyms': ['weathervane', 'vane_(weathervane)', 'wind_vane'], 'id': 1173, 'def': 'mechanical device attached to an elevated structure; rotates freely to show the direction of the wind', 'name': 'weathervane'}, {'frequency': 'c', 'synset': 'webcam.n.01', 'synonyms': ['webcam'], 'id': 1174, 'def': 'a digital camera designed to take digital photographs and transmit them over the internet', 'name': 'webcam'}, {'frequency': 'c', 'synset': 'wedding_cake.n.01', 'synonyms': ['wedding_cake', 'bridecake'], 'id': 1175, 'def': 'a rich cake with two or more tiers and covered with frosting and decorations; served at a wedding reception', 'name': 'wedding_cake'}, {'frequency': 'c', 'synset': 'wedding_ring.n.01', 'synonyms': ['wedding_ring', 'wedding_band'], 'id': 1176, 'def': 'a ring given to the bride and/or groom at the wedding', 'name': 'wedding_ring'}, {'frequency': 'f', 'synset': 'wet_suit.n.01', 'synonyms': ['wet_suit'], 'id': 1177, 'def': 'a close-fitting garment made of a permeable material; worn in cold water to retain body heat', 'name': 'wet_suit'}, {'frequency': 'f', 'synset': 'wheel.n.01', 'synonyms': ['wheel'], 'id': 1178, 'def': 'a circular frame with spokes (or a solid disc) that can rotate on a shaft or axle', 'name': 'wheel'}, {'frequency': 'c', 'synset': 'wheelchair.n.01', 'synonyms': ['wheelchair'], 'id': 1179, 'def': 'a movable chair mounted on large wheels', 'name': 'wheelchair'}, {'frequency': 'c', 'synset': 'whipped_cream.n.01', 'synonyms': ['whipped_cream'], 'id': 1180, 'def': 'cream that has been beaten until light and fluffy', 'name': 'whipped_cream'}, {'frequency': 'c', 'synset': 'whistle.n.03', 'synonyms': ['whistle'], 'id': 1181, 'def': 'a small wind instrument that produces a whistling sound by blowing into it', 'name': 'whistle'}, {'frequency': 'c', 'synset': 'wig.n.01', 'synonyms': ['wig'], 'id': 1182, 'def': 'hairpiece covering the head and made of real or synthetic hair', 'name': 'wig'}, {'frequency': 'c', 'synset': 'wind_chime.n.01', 'synonyms': ['wind_chime'], 'id': 1183, 'def': 'a decorative arrangement of pieces of metal or glass or pottery that hang together loosely so the wind can cause them to tinkle', 'name': 'wind_chime'}, {'frequency': 'c', 'synset': 'windmill.n.01', 'synonyms': ['windmill'], 'id': 1184, 'def': 'A mill or turbine that is powered by wind', 'name': 'windmill'}, {'frequency': 'c', 'synset': 'window_box.n.01', 'synonyms': ['window_box_(for_plants)'], 'id': 1185, 'def': 'a container for growing plants on a windowsill', 'name': 'window_box_(for_plants)'}, {'frequency': 'f', 'synset': 'windshield_wiper.n.01', 'synonyms': ['windshield_wiper', 'windscreen_wiper', 'wiper_(for_windshield/screen)'], 'id': 1186, 'def': 'a mechanical device that cleans the windshield', 'name': 'windshield_wiper'}, {'frequency': 'c', 'synset': 'windsock.n.01', 'synonyms': ['windsock', 'air_sock', 'air-sleeve', 'wind_sleeve', 'wind_cone'], 'id': 1187, 'def': 'a truncated cloth cone mounted on a mast/pole; shows wind direction', 'name': 'windsock'}, {'frequency': 'f', 'synset': 'wine_bottle.n.01', 'synonyms': ['wine_bottle'], 'id': 1188, 'def': 'a bottle for holding wine', 'name': 'wine_bottle'}, {'frequency': 'c', 'synset': 'wine_bucket.n.01', 'synonyms': ['wine_bucket', 'wine_cooler'], 'id': 1189, 'def': 'a bucket of ice used to chill a bottle of wine', 'name': 'wine_bucket'}, {'frequency': 'f', 'synset': 'wineglass.n.01', 'synonyms': ['wineglass'], 'id': 1190, 'def': 'a glass that has a stem and in which wine is served', 'name': 'wineglass'}, {'frequency': 'f', 'synset': 'winker.n.02', 'synonyms': ['blinder_(for_horses)'], 'id': 1191, 'def': 'blinds that prevent a horse from seeing something on either side', 'name': 'blinder_(for_horses)'}, {'frequency': 'c', 'synset': 'wok.n.01', 'synonyms': ['wok'], 'id': 1192, 'def': 'pan with a convex bottom; used for frying in Chinese cooking', 'name': 'wok'}, {'frequency': 'r', 'synset': 'wolf.n.01', 'synonyms': ['wolf'], 'id': 1193, 'def': 'a wild carnivorous mammal of the dog family, living and hunting in packs', 'name': 'wolf'}, {'frequency': 'c', 'synset': 'wooden_spoon.n.02', 'synonyms': ['wooden_spoon'], 'id': 1194, 'def': 'a spoon made of wood', 'name': 'wooden_spoon'}, {'frequency': 'c', 'synset': 'wreath.n.01', 'synonyms': ['wreath'], 'id': 1195, 'def': 'an arrangement of flowers, leaves, or stems fastened in a ring', 'name': 'wreath'}, {'frequency': 'c', 'synset': 'wrench.n.03', 'synonyms': ['wrench', 'spanner'], 'id': 1196, 'def': 'a hand tool that is used to hold or twist a nut or bolt', 'name': 'wrench'}, {'frequency': 'f', 'synset': 'wristband.n.01', 'synonyms': ['wristband'], 'id': 1197, 'def': 'band consisting of a part of a sleeve that covers the wrist', 'name': 'wristband'}, {'frequency': 'f', 'synset': 'wristlet.n.01', 'synonyms': ['wristlet', 'wrist_band'], 'id': 1198, 'def': 'a band or bracelet worn around the wrist', 'name': 'wristlet'}, {'frequency': 'c', 'synset': 'yacht.n.01', 'synonyms': ['yacht'], 'id': 1199, 'def': 'an expensive vessel propelled by sail or power and used for cruising or racing', 'name': 'yacht'}, {'frequency': 'c', 'synset': 'yogurt.n.01', 'synonyms': ['yogurt', 'yoghurt', 'yoghourt'], 'id': 1200, 'def': 'a custard-like food made from curdled milk', 'name': 'yogurt'}, {'frequency': 'c', 'synset': 'yoke.n.07', 'synonyms': ['yoke_(animal_equipment)'], 'id': 1201, 'def': 'gear joining two animals at the neck; NOT egg yolk', 'name': 'yoke_(animal_equipment)'}, {'frequency': 'f', 'synset': 'zebra.n.01', 'synonyms': ['zebra'], 'id': 1202, 'def': 'any of several fleet black-and-white striped African equines', 'name': 'zebra'}, {'frequency': 'c', 'synset': 'zucchini.n.02', 'synonyms': ['zucchini', 'courgette'], 'id': 1203, 'def': 'small cucumber-shaped vegetable marrow; typically dark green', 'name': 'zucchini'}] # noqa +# fmt: on diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/pascal_voc.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/pascal_voc.py new file mode 100644 index 0000000..dbbf82c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/pascal_voc.py @@ -0,0 +1,82 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +import numpy as np +import os +import xml.etree.ElementTree as ET +from typing import List, Tuple, Union + +from detectron2.data import DatasetCatalog, MetadataCatalog +from detectron2.structures import BoxMode +from detectron2.utils.file_io import PathManager + +__all__ = ["load_voc_instances", "register_pascal_voc"] + + +# fmt: off +CLASS_NAMES = ( + "aeroplane", "bicycle", "bird", "boat", "bottle", "bus", "car", "cat", + "chair", "cow", "diningtable", "dog", "horse", "motorbike", "person", + "pottedplant", "sheep", "sofa", "train", "tvmonitor" +) +# fmt: on + + +def load_voc_instances(dirname: str, split: str, class_names: Union[List[str], Tuple[str, ...]]): + """ + Load Pascal VOC detection annotations to Detectron2 format. + + Args: + dirname: Contain "Annotations", "ImageSets", "JPEGImages" + split (str): one of "train", "test", "val", "trainval" + class_names: list or tuple of class names + """ + with PathManager.open(os.path.join(dirname, "ImageSets", "Main", split + ".txt")) as f: + fileids = np.loadtxt(f, dtype=np.str) + + # Needs to read many small annotation files. Makes sense at local + annotation_dirname = PathManager.get_local_path(os.path.join(dirname, "Annotations/")) + dicts = [] + for fileid in fileids: + anno_file = os.path.join(annotation_dirname, fileid + ".xml") + jpeg_file = os.path.join(dirname, "JPEGImages", fileid + ".jpg") + + with PathManager.open(anno_file) as f: + tree = ET.parse(f) + + r = { + "file_name": jpeg_file, + "image_id": fileid, + "height": int(tree.findall("./size/height")[0].text), + "width": int(tree.findall("./size/width")[0].text), + } + instances = [] + + for obj in tree.findall("object"): + cls = obj.find("name").text + # We include "difficult" samples in training. + # Based on limited experiments, they don't hurt accuracy. + # difficult = int(obj.find("difficult").text) + # if difficult == 1: + # continue + bbox = obj.find("bndbox") + bbox = [float(bbox.find(x).text) for x in ["xmin", "ymin", "xmax", "ymax"]] + # Original annotations are integers in the range [1, W or H] + # Assuming they mean 1-based pixel indices (inclusive), + # a box with annotation (xmin=1, xmax=W) covers the whole image. + # In coordinate space this is represented by (xmin=0, xmax=W) + bbox[0] -= 1.0 + bbox[1] -= 1.0 + instances.append( + {"category_id": class_names.index(cls), "bbox": bbox, "bbox_mode": BoxMode.XYXY_ABS} + ) + r["annotations"] = instances + dicts.append(r) + return dicts + + +def register_pascal_voc(name, dirname, split, year, class_names=CLASS_NAMES): + DatasetCatalog.register(name, lambda: load_voc_instances(dirname, split, class_names)) + MetadataCatalog.get(name).set( + thing_classes=list(class_names), dirname=dirname, year=year, split=split + ) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/register_coco.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/register_coco.py new file mode 100644 index 0000000..e564438 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/datasets/register_coco.py @@ -0,0 +1,3 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .coco import register_coco_instances # noqa +from .coco_panoptic import register_coco_panoptic_separated # noqa diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/detection_utils.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/detection_utils.py new file mode 100644 index 0000000..2707eb4 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/detection_utils.py @@ -0,0 +1,623 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +""" +Common data processing utilities that are used in a +typical object detection data pipeline. +""" +import logging +import numpy as np +from typing import List, Union +import pycocotools.mask as mask_util +import torch +from PIL import Image + +from detectron2.structures import ( + BitMasks, + Boxes, + BoxMode, + Instances, + Keypoints, + PolygonMasks, + RotatedBoxes, + polygons_to_bitmask, +) +from detectron2.utils.file_io import PathManager + +from . import transforms as T +from .catalog import MetadataCatalog + +__all__ = [ + "SizeMismatchError", + "convert_image_to_rgb", + "check_image_size", + "transform_proposals", + "transform_instance_annotations", + "annotations_to_instances", + "annotations_to_instances_rotated", + "build_augmentation", + "build_transform_gen", + "create_keypoint_hflip_indices", + "filter_empty_instances", + "read_image", +] + + +class SizeMismatchError(ValueError): + """ + When loaded image has difference width/height compared with annotation. + """ + + +# https://en.wikipedia.org/wiki/YUV#SDTV_with_BT.601 +_M_RGB2YUV = [[0.299, 0.587, 0.114], [-0.14713, -0.28886, 0.436], [0.615, -0.51499, -0.10001]] +_M_YUV2RGB = [[1.0, 0.0, 1.13983], [1.0, -0.39465, -0.58060], [1.0, 2.03211, 0.0]] + +# https://www.exiv2.org/tags.html +_EXIF_ORIENT = 274 # exif 'Orientation' tag + + +def convert_PIL_to_numpy(image, format): + """ + Convert PIL image to numpy array of target format. + + Args: + image (PIL.Image): a PIL image + format (str): the format of output image + + Returns: + (np.ndarray): also see `read_image` + """ + if format is not None: + # PIL only supports RGB, so convert to RGB and flip channels over below + conversion_format = format + if format in ["BGR", "YUV-BT.601"]: + conversion_format = "RGB" + image = image.convert(conversion_format) + image = np.asarray(image) + # PIL squeezes out the channel dimension for "L", so make it HWC + if format == "L": + image = np.expand_dims(image, -1) + + # handle formats not supported by PIL + elif format == "BGR": + # flip channels if needed + image = image[:, :, ::-1] + elif format == "YUV-BT.601": + image = image / 255.0 + image = np.dot(image, np.array(_M_RGB2YUV).T) + + return image + + +def convert_image_to_rgb(image, format): + """ + Convert an image from given format to RGB. + + Args: + image (np.ndarray or Tensor): an HWC image + format (str): the format of input image, also see `read_image` + + Returns: + (np.ndarray): (H,W,3) RGB image in 0-255 range, can be either float or uint8 + """ + if isinstance(image, torch.Tensor): + image = image.cpu().numpy() + if format == "BGR": + image = image[:, :, [2, 1, 0]] + elif format == "YUV-BT.601": + image = np.dot(image, np.array(_M_YUV2RGB).T) + image = image * 255.0 + else: + if format == "L": + image = image[:, :, 0] + image = image.astype(np.uint8) + image = np.asarray(Image.fromarray(image, mode=format).convert("RGB")) + return image + + +def _apply_exif_orientation(image): + """ + Applies the exif orientation correctly. + + This code exists per the bug: + https://github.com/python-pillow/Pillow/issues/3973 + with the function `ImageOps.exif_transpose`. The Pillow source raises errors with + various methods, especially `tobytes` + + Function based on: + https://github.com/wkentaro/labelme/blob/v4.5.4/labelme/utils/image.py#L59 + https://github.com/python-pillow/Pillow/blob/7.1.2/src/PIL/ImageOps.py#L527 + + Args: + image (PIL.Image): a PIL image + + Returns: + (PIL.Image): the PIL image with exif orientation applied, if applicable + """ + if not hasattr(image, "getexif"): + return image + + try: + exif = image.getexif() + except Exception: # https://github.com/facebookresearch/detectron2/issues/1885 + exif = None + + if exif is None: + return image + + orientation = exif.get(_EXIF_ORIENT) + + method = { + 2: Image.FLIP_LEFT_RIGHT, + 3: Image.ROTATE_180, + 4: Image.FLIP_TOP_BOTTOM, + 5: Image.TRANSPOSE, + 6: Image.ROTATE_270, + 7: Image.TRANSVERSE, + 8: Image.ROTATE_90, + }.get(orientation) + + if method is not None: + return image.transpose(method) + return image + + +def read_image(file_name, format=None): + """ + Read an image into the given format. + Will apply rotation and flipping if the image has such exif information. + + Args: + file_name (str): image file path + format (str): one of the supported image modes in PIL, or "BGR" or "YUV-BT.601". + + Returns: + image (np.ndarray): + an HWC image in the given format, which is 0-255, uint8 for + supported image modes in PIL or "BGR"; float (0-1 for Y) for YUV-BT.601. + """ + with PathManager.open(file_name, "rb") as f: + image = Image.open(f) + + # work around this bug: https://github.com/python-pillow/Pillow/issues/3973 + image = _apply_exif_orientation(image) + return convert_PIL_to_numpy(image, format) + + +def check_image_size(dataset_dict, image): + """ + Raise an error if the image does not match the size specified in the dict. + """ + if "width" in dataset_dict or "height" in dataset_dict: + image_wh = (image.shape[1], image.shape[0]) + expected_wh = (dataset_dict["width"], dataset_dict["height"]) + if not image_wh == expected_wh: + raise SizeMismatchError( + "Mismatched image shape{}, got {}, expect {}.".format( + " for image " + dataset_dict["file_name"] + if "file_name" in dataset_dict + else "", + image_wh, + expected_wh, + ) + + " Please check the width/height in your annotation." + ) + + # To ensure bbox always remap to original image size + if "width" not in dataset_dict: + dataset_dict["width"] = image.shape[1] + if "height" not in dataset_dict: + dataset_dict["height"] = image.shape[0] + + +def transform_proposals(dataset_dict, image_shape, transforms, *, proposal_topk, min_box_size=0): + """ + Apply transformations to the proposals in dataset_dict, if any. + + Args: + dataset_dict (dict): a dict read from the dataset, possibly + contains fields "proposal_boxes", "proposal_objectness_logits", "proposal_bbox_mode" + image_shape (tuple): height, width + transforms (TransformList): + proposal_topk (int): only keep top-K scoring proposals + min_box_size (int): proposals with either side smaller than this + threshold are removed + + The input dict is modified in-place, with abovementioned keys removed. A new + key "proposals" will be added. Its value is an `Instances` + object which contains the transformed proposals in its field + "proposal_boxes" and "objectness_logits". + """ + if "proposal_boxes" in dataset_dict: + # Transform proposal boxes + boxes = transforms.apply_box( + BoxMode.convert( + dataset_dict.pop("proposal_boxes"), + dataset_dict.pop("proposal_bbox_mode"), + BoxMode.XYXY_ABS, + ) + ) + boxes = Boxes(boxes) + objectness_logits = torch.as_tensor( + dataset_dict.pop("proposal_objectness_logits").astype("float32") + ) + + boxes.clip(image_shape) + keep = boxes.nonempty(threshold=min_box_size) + boxes = boxes[keep] + objectness_logits = objectness_logits[keep] + + proposals = Instances(image_shape) + proposals.proposal_boxes = boxes[:proposal_topk] + proposals.objectness_logits = objectness_logits[:proposal_topk] + dataset_dict["proposals"] = proposals + + +def transform_instance_annotations( + annotation, transforms, image_size, *, keypoint_hflip_indices=None +): + """ + Apply transforms to box, segmentation and keypoints annotations of a single instance. + + It will use `transforms.apply_box` for the box, and + `transforms.apply_coords` for segmentation polygons & keypoints. + If you need anything more specially designed for each data structure, + you'll need to implement your own version of this function or the transforms. + + Args: + annotation (dict): dict of instance annotations for a single instance. + It will be modified in-place. + transforms (TransformList or list[Transform]): + image_size (tuple): the height, width of the transformed image + keypoint_hflip_indices (ndarray[int]): see `create_keypoint_hflip_indices`. + + Returns: + dict: + the same input dict with fields "bbox", "segmentation", "keypoints" + transformed according to `transforms`. + The "bbox_mode" field will be set to XYXY_ABS. + """ + if isinstance(transforms, (tuple, list)): + transforms = T.TransformList(transforms) + # bbox is 1d (per-instance bounding box) + bbox = BoxMode.convert(annotation["bbox"], annotation["bbox_mode"], BoxMode.XYXY_ABS) + # clip transformed bbox to image size + bbox = transforms.apply_box(np.array([bbox]))[0].clip(min=0) + annotation["bbox"] = np.minimum(bbox, list(image_size + image_size)[::-1]) + annotation["bbox_mode"] = BoxMode.XYXY_ABS + + if "segmentation" in annotation: + # each instance contains 1 or more polygons + segm = annotation["segmentation"] + if isinstance(segm, list): + # polygons + polygons = [np.asarray(p).reshape(-1, 2) for p in segm] + annotation["segmentation"] = [ + p.reshape(-1) for p in transforms.apply_polygons(polygons) + ] + elif isinstance(segm, dict): + # RLE + mask = mask_util.decode(segm) + mask = transforms.apply_segmentation(mask) + assert tuple(mask.shape[:2]) == image_size + annotation["segmentation"] = mask + else: + raise ValueError( + "Cannot transform segmentation of type '{}'!" + "Supported types are: polygons as list[list[float] or ndarray]," + " COCO-style RLE as a dict.".format(type(segm)) + ) + + if "keypoints" in annotation: + keypoints = transform_keypoint_annotations( + annotation["keypoints"], transforms, image_size, keypoint_hflip_indices + ) + annotation["keypoints"] = keypoints + + return annotation + + +def transform_keypoint_annotations(keypoints, transforms, image_size, keypoint_hflip_indices=None): + """ + Transform keypoint annotations of an image. + If a keypoint is transformed out of image boundary, it will be marked "unlabeled" (visibility=0) + + Args: + keypoints (list[float]): Nx3 float in Detectron2's Dataset format. + Each point is represented by (x, y, visibility). + transforms (TransformList): + image_size (tuple): the height, width of the transformed image + keypoint_hflip_indices (ndarray[int]): see `create_keypoint_hflip_indices`. + When `transforms` includes horizontal flip, will use the index + mapping to flip keypoints. + """ + # (N*3,) -> (N, 3) + keypoints = np.asarray(keypoints, dtype="float64").reshape(-1, 3) + keypoints_xy = transforms.apply_coords(keypoints[:, :2]) + + # Set all out-of-boundary points to "unlabeled" + inside = (keypoints_xy >= np.array([0, 0])) & (keypoints_xy <= np.array(image_size[::-1])) + inside = inside.all(axis=1) + keypoints[:, :2] = keypoints_xy + keypoints[:, 2][~inside] = 0 + + # This assumes that HorizFlipTransform is the only one that does flip + do_hflip = sum(isinstance(t, T.HFlipTransform) for t in transforms.transforms) % 2 == 1 + + # Alternative way: check if probe points was horizontally flipped. + # probe = np.asarray([[0.0, 0.0], [image_width, 0.0]]) + # probe_aug = transforms.apply_coords(probe.copy()) + # do_hflip = np.sign(probe[1][0] - probe[0][0]) != np.sign(probe_aug[1][0] - probe_aug[0][0]) # noqa + + # If flipped, swap each keypoint with its opposite-handed equivalent + if do_hflip: + if keypoint_hflip_indices is None: + raise ValueError("Cannot flip keypoints without providing flip indices!") + if len(keypoints) != len(keypoint_hflip_indices): + raise ValueError( + "Keypoint data has {} points, but metadata " + "contains {} points!".format(len(keypoints), len(keypoint_hflip_indices)) + ) + keypoints = keypoints[np.asarray(keypoint_hflip_indices, dtype=np.int32), :] + + # Maintain COCO convention that if visibility == 0 (unlabeled), then x, y = 0 + keypoints[keypoints[:, 2] == 0] = 0 + return keypoints + + +def annotations_to_instances(annos, image_size, mask_format="polygon"): + """ + Create an :class:`Instances` object used by the models, + from instance annotations in the dataset dict. + + Args: + annos (list[dict]): a list of instance annotations in one image, each + element for one instance. + image_size (tuple): height, width + + Returns: + Instances: + It will contain fields "gt_boxes", "gt_classes", + "gt_masks", "gt_keypoints", if they can be obtained from `annos`. + This is the format that builtin models expect. + """ + boxes = ( + np.stack( + [BoxMode.convert(obj["bbox"], obj["bbox_mode"], BoxMode.XYXY_ABS) for obj in annos] + ) + if len(annos) + else np.zeros((0, 4)) + ) + target = Instances(image_size) + target.gt_boxes = Boxes(boxes) + + classes = [int(obj["category_id"]) for obj in annos] + classes = torch.tensor(classes, dtype=torch.int64) + target.gt_classes = classes + + if len(annos) and "segmentation" in annos[0]: + segms = [obj["segmentation"] for obj in annos] + if mask_format == "polygon": + try: + masks = PolygonMasks(segms) + except ValueError as e: + raise ValueError( + "Failed to use mask_format=='polygon' from the given annotations!" + ) from e + else: + assert mask_format == "bitmask", mask_format + masks = [] + for segm in segms: + if isinstance(segm, list): + # polygon + masks.append(polygons_to_bitmask(segm, *image_size)) + elif isinstance(segm, dict): + # COCO RLE + masks.append(mask_util.decode(segm)) + elif isinstance(segm, np.ndarray): + assert segm.ndim == 2, "Expect segmentation of 2 dimensions, got {}.".format( + segm.ndim + ) + # mask array + masks.append(segm) + else: + raise ValueError( + "Cannot convert segmentation of type '{}' to BitMasks!" + "Supported types are: polygons as list[list[float] or ndarray]," + " COCO-style RLE as a dict, or a binary segmentation mask " + " in a 2D numpy array of shape HxW.".format(type(segm)) + ) + # torch.from_numpy does not support array with negative stride. + masks = BitMasks( + torch.stack([torch.from_numpy(np.ascontiguousarray(x)) for x in masks]) + ) + target.gt_masks = masks + + if len(annos) and "keypoints" in annos[0]: + kpts = [obj.get("keypoints", []) for obj in annos] + target.gt_keypoints = Keypoints(kpts) + + return target + + +def annotations_to_instances_rotated(annos, image_size): + """ + Create an :class:`Instances` object used by the models, + from instance annotations in the dataset dict. + Compared to `annotations_to_instances`, this function is for rotated boxes only + + Args: + annos (list[dict]): a list of instance annotations in one image, each + element for one instance. + image_size (tuple): height, width + + Returns: + Instances: + Containing fields "gt_boxes", "gt_classes", + if they can be obtained from `annos`. + This is the format that builtin models expect. + """ + boxes = [obj["bbox"] for obj in annos] + target = Instances(image_size) + boxes = target.gt_boxes = RotatedBoxes(boxes) + boxes.clip(image_size) + + classes = [obj["category_id"] for obj in annos] + classes = torch.tensor(classes, dtype=torch.int64) + target.gt_classes = classes + + return target + + +def filter_empty_instances( + instances, by_box=True, by_mask=True, box_threshold=1e-5, return_mask=False +): + """ + Filter out empty instances in an `Instances` object. + + Args: + instances (Instances): + by_box (bool): whether to filter out instances with empty boxes + by_mask (bool): whether to filter out instances with empty masks + box_threshold (float): minimum width and height to be considered non-empty + return_mask (bool): whether to return boolean mask of filtered instances + + Returns: + Instances: the filtered instances. + tensor[bool], optional: boolean mask of filtered instances + """ + assert by_box or by_mask + r = [] + if by_box: + r.append(instances.gt_boxes.nonempty(threshold=box_threshold)) + if instances.has("gt_masks") and by_mask: + r.append(instances.gt_masks.nonempty()) + + # TODO: can also filter visible keypoints + + if not r: + return instances + m = r[0] + for x in r[1:]: + m = m & x + if return_mask: + return instances[m], m + return instances[m] + + +def create_keypoint_hflip_indices(dataset_names: Union[str, List[str]]) -> List[int]: + """ + Args: + dataset_names: list of dataset names + + Returns: + list[int]: a list of size=#keypoints, storing the + horizontally-flipped keypoint indices. + """ + if isinstance(dataset_names, str): + dataset_names = [dataset_names] + + check_metadata_consistency("keypoint_names", dataset_names) + check_metadata_consistency("keypoint_flip_map", dataset_names) + + meta = MetadataCatalog.get(dataset_names[0]) + names = meta.keypoint_names + # TODO flip -> hflip + flip_map = dict(meta.keypoint_flip_map) + flip_map.update({v: k for k, v in flip_map.items()}) + flipped_names = [i if i not in flip_map else flip_map[i] for i in names] + flip_indices = [names.index(i) for i in flipped_names] + return flip_indices + + +def gen_crop_transform_with_instance(crop_size, image_size, instance): + """ + Generate a CropTransform so that the cropping region contains + the center of the given instance. + + Args: + crop_size (tuple): h, w in pixels + image_size (tuple): h, w + instance (dict): an annotation dict of one instance, in Detectron2's + dataset format. + """ + crop_size = np.asarray(crop_size, dtype=np.int32) + bbox = BoxMode.convert(instance["bbox"], instance["bbox_mode"], BoxMode.XYXY_ABS) + center_yx = (bbox[1] + bbox[3]) * 0.5, (bbox[0] + bbox[2]) * 0.5 + assert ( + image_size[0] >= center_yx[0] and image_size[1] >= center_yx[1] + ), "The annotation bounding box is outside of the image!" + assert ( + image_size[0] >= crop_size[0] and image_size[1] >= crop_size[1] + ), "Crop size is larger than image size!" + + min_yx = np.maximum(np.floor(center_yx).astype(np.int32) - crop_size, 0) + max_yx = np.maximum(np.asarray(image_size, dtype=np.int32) - crop_size, 0) + max_yx = np.minimum(max_yx, np.ceil(center_yx).astype(np.int32)) + + y0 = np.random.randint(min_yx[0], max_yx[0] + 1) + x0 = np.random.randint(min_yx[1], max_yx[1] + 1) + return T.CropTransform(x0, y0, crop_size[1], crop_size[0]) + + +def check_metadata_consistency(key, dataset_names): + """ + Check that the datasets have consistent metadata. + + Args: + key (str): a metadata key + dataset_names (list[str]): a list of dataset names + + Raises: + AttributeError: if the key does not exist in the metadata + ValueError: if the given datasets do not have the same metadata values defined by key + """ + if len(dataset_names) == 0: + return + logger = logging.getLogger(__name__) + entries_per_dataset = [getattr(MetadataCatalog.get(d), key) for d in dataset_names] + for idx, entry in enumerate(entries_per_dataset): + if entry != entries_per_dataset[0]: + logger.error( + "Metadata '{}' for dataset '{}' is '{}'".format(key, dataset_names[idx], str(entry)) + ) + logger.error( + "Metadata '{}' for dataset '{}' is '{}'".format( + key, dataset_names[0], str(entries_per_dataset[0]) + ) + ) + raise ValueError("Datasets have different metadata '{}'!".format(key)) + + +def build_augmentation(cfg, is_train): + """ + Create a list of default :class:`Augmentation` from config. + Now it includes resizing and flipping. + + Returns: + list[Augmentation] + """ + if is_train: + min_size = cfg.INPUT.MIN_SIZE_TRAIN + max_size = cfg.INPUT.MAX_SIZE_TRAIN + sample_style = cfg.INPUT.MIN_SIZE_TRAIN_SAMPLING + else: + min_size = cfg.INPUT.MIN_SIZE_TEST + max_size = cfg.INPUT.MAX_SIZE_TEST + sample_style = "choice" + augmentation = [T.ResizeShortestEdge(min_size, max_size, sample_style)] + if is_train and cfg.INPUT.RANDOM_FLIP != "none": + augmentation.append( + T.RandomFlip( + horizontal=cfg.INPUT.RANDOM_FLIP == "horizontal", + vertical=cfg.INPUT.RANDOM_FLIP == "vertical", + ) + ) + return augmentation + + +build_transform_gen = build_augmentation +""" +Alias for backward-compatibility. +""" diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/samplers/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/samplers/__init__.py new file mode 100644 index 0000000..85c9f1a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/samplers/__init__.py @@ -0,0 +1,17 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .distributed_sampler import ( + InferenceSampler, + RandomSubsetTrainingSampler, + RepeatFactorTrainingSampler, + TrainingSampler, +) + +from .grouped_batch_sampler import GroupedBatchSampler + +__all__ = [ + "GroupedBatchSampler", + "TrainingSampler", + "RandomSubsetTrainingSampler", + "InferenceSampler", + "RepeatFactorTrainingSampler", +] diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/samplers/distributed_sampler.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/samplers/distributed_sampler.py new file mode 100644 index 0000000..a098e6a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/samplers/distributed_sampler.py @@ -0,0 +1,278 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import itertools +import logging +import math +from collections import defaultdict +from typing import Optional +import torch +from torch.utils.data.sampler import Sampler + +from detectron2.utils import comm + +logger = logging.getLogger(__name__) + + +class TrainingSampler(Sampler): + """ + In training, we only care about the "infinite stream" of training data. + So this sampler produces an infinite stream of indices and + all workers cooperate to correctly shuffle the indices and sample different indices. + + The samplers in each worker effectively produces `indices[worker_id::num_workers]` + where `indices` is an infinite stream of indices consisting of + `shuffle(range(size)) + shuffle(range(size)) + ...` (if shuffle is True) + or `range(size) + range(size) + ...` (if shuffle is False) + + Note that this sampler does not shard based on pytorch DataLoader worker id. + A sampler passed to pytorch DataLoader is used only with map-style dataset + and will not be executed inside workers. + But if this sampler is used in a way that it gets execute inside a dataloader + worker, then extra work needs to be done to shard its outputs based on worker id. + This is required so that workers don't produce identical data. + :class:`ToIterableDataset` implements this logic. + This note is true for all samplers in detectron2. + """ + + def __init__(self, size: int, shuffle: bool = True, seed: Optional[int] = None): + """ + Args: + size (int): the total number of data of the underlying dataset to sample from + shuffle (bool): whether to shuffle the indices or not + seed (int): the initial seed of the shuffle. Must be the same + across all workers. If None, will use a random seed shared + among workers (require synchronization among all workers). + """ + if not isinstance(size, int): + raise TypeError(f"TrainingSampler(size=) expects an int. Got type {type(size)}.") + if size <= 0: + raise ValueError(f"TrainingSampler(size=) expects a positive int. Got {size}.") + self._size = size + self._shuffle = shuffle + if seed is None: + seed = comm.shared_random_seed() + self._seed = int(seed) + + self._rank = comm.get_rank() + self._world_size = comm.get_world_size() + + def __iter__(self): + start = self._rank + yield from itertools.islice(self._infinite_indices(), start, None, self._world_size) + + def _infinite_indices(self): + g = torch.Generator() + g.manual_seed(self._seed) + while True: + if self._shuffle: + yield from torch.randperm(self._size, generator=g).tolist() + else: + yield from torch.arange(self._size).tolist() + + +class RandomSubsetTrainingSampler(TrainingSampler): + """ + Similar to TrainingSampler, but only sample a random subset of indices. + This is useful when you want to estimate the accuracy vs data-number curves by + training the model with different subset_ratio. + """ + + def __init__( + self, + size: int, + subset_ratio: float, + shuffle: bool = True, + seed_shuffle: Optional[int] = None, + seed_subset: Optional[int] = None, + ): + """ + Args: + size (int): the total number of data of the underlying dataset to sample from + subset_ratio (float): the ratio of subset data to sample from the underlying dataset + shuffle (bool): whether to shuffle the indices or not + seed_shuffle (int): the initial seed of the shuffle. Must be the same + across all workers. If None, will use a random seed shared + among workers (require synchronization among all workers). + seed_subset (int): the seed to randomize the subset to be sampled. + Must be the same across all workers. If None, will use a random seed shared + among workers (require synchronization among all workers). + """ + super().__init__(size=size, shuffle=shuffle, seed=seed_shuffle) + + assert 0.0 < subset_ratio <= 1.0 + self._size_subset = int(size * subset_ratio) + assert self._size_subset > 0 + if seed_subset is None: + seed_subset = comm.shared_random_seed() + self._seed_subset = int(seed_subset) + + # randomly generate the subset indexes to be sampled from + g = torch.Generator() + g.manual_seed(self._seed_subset) + indexes_randperm = torch.randperm(self._size, generator=g) + self._indexes_subset = indexes_randperm[: self._size_subset] + + logger.info("Using RandomSubsetTrainingSampler......") + logger.info(f"Randomly sample {self._size_subset} data from the original {self._size} data") + + def _infinite_indices(self): + g = torch.Generator() + g.manual_seed(self._seed) # self._seed equals seed_shuffle from __init__() + while True: + if self._shuffle: + # generate a random permutation to shuffle self._indexes_subset + randperm = torch.randperm(self._size_subset, generator=g) + yield from self._indexes_subset[randperm].tolist() + else: + yield from self._indexes_subset.tolist() + + +class RepeatFactorTrainingSampler(Sampler): + """ + Similar to TrainingSampler, but a sample may appear more times than others based + on its "repeat factor". This is suitable for training on class imbalanced datasets like LVIS. + """ + + def __init__(self, repeat_factors, *, shuffle=True, seed=None): + """ + Args: + repeat_factors (Tensor): a float vector, the repeat factor for each indice. When it's + full of ones, it is equivalent to ``TrainingSampler(len(repeat_factors), ...)``. + shuffle (bool): whether to shuffle the indices or not + seed (int): the initial seed of the shuffle. Must be the same + across all workers. If None, will use a random seed shared + among workers (require synchronization among all workers). + """ + self._shuffle = shuffle + if seed is None: + seed = comm.shared_random_seed() + self._seed = int(seed) + + self._rank = comm.get_rank() + self._world_size = comm.get_world_size() + + # Split into whole number (_int_part) and fractional (_frac_part) parts. + self._int_part = torch.trunc(repeat_factors) + self._frac_part = repeat_factors - self._int_part + + @staticmethod + def repeat_factors_from_category_frequency(dataset_dicts, repeat_thresh): + """ + Compute (fractional) per-image repeat factors based on category frequency. + The repeat factor for an image is a function of the frequency of the rarest + category labeled in that image. The "frequency of category c" in [0, 1] is defined + as the fraction of images in the training set (without repeats) in which category c + appears. + See :paper:`lvis` (>= v2) Appendix B.2. + + Args: + dataset_dicts (list[dict]): annotations in Detectron2 dataset format. + repeat_thresh (float): frequency threshold below which data is repeated. + If the frequency is half of `repeat_thresh`, the image will be + repeated twice. + + Returns: + torch.Tensor: + the i-th element is the repeat factor for the dataset image at index i. + """ + # 1. For each category c, compute the fraction of images that contain it: f(c) + category_freq = defaultdict(int) + for dataset_dict in dataset_dicts: # For each image (without repeats) + cat_ids = {ann["category_id"] for ann in dataset_dict["annotations"]} + for cat_id in cat_ids: + category_freq[cat_id] += 1 + num_images = len(dataset_dicts) + for k, v in category_freq.items(): + category_freq[k] = v / num_images + + # 2. For each category c, compute the category-level repeat factor: + # r(c) = max(1, sqrt(t / f(c))) + category_rep = { + cat_id: max(1.0, math.sqrt(repeat_thresh / cat_freq)) + for cat_id, cat_freq in category_freq.items() + } + + # 3. For each image I, compute the image-level repeat factor: + # r(I) = max_{c in I} r(c) + rep_factors = [] + for dataset_dict in dataset_dicts: + cat_ids = {ann["category_id"] for ann in dataset_dict["annotations"]} + rep_factor = max({category_rep[cat_id] for cat_id in cat_ids}, default=1.0) + rep_factors.append(rep_factor) + + return torch.tensor(rep_factors, dtype=torch.float32) + + def _get_epoch_indices(self, generator): + """ + Create a list of dataset indices (with repeats) to use for one epoch. + + Args: + generator (torch.Generator): pseudo random number generator used for + stochastic rounding. + + Returns: + torch.Tensor: list of dataset indices to use in one epoch. Each index + is repeated based on its calculated repeat factor. + """ + # Since repeat factors are fractional, we use stochastic rounding so + # that the target repeat factor is achieved in expectation over the + # course of training + rands = torch.rand(len(self._frac_part), generator=generator) + rep_factors = self._int_part + (rands < self._frac_part).float() + # Construct a list of indices in which we repeat images as specified + indices = [] + for dataset_index, rep_factor in enumerate(rep_factors): + indices.extend([dataset_index] * int(rep_factor.item())) + return torch.tensor(indices, dtype=torch.int64) + + def __iter__(self): + start = self._rank + yield from itertools.islice(self._infinite_indices(), start, None, self._world_size) + + def _infinite_indices(self): + g = torch.Generator() + g.manual_seed(self._seed) + while True: + # Sample indices with repeats determined by stochastic rounding; each + # "epoch" may have a slightly different size due to the rounding. + indices = self._get_epoch_indices(g) + if self._shuffle: + randperm = torch.randperm(len(indices), generator=g) + yield from indices[randperm].tolist() + else: + yield from indices.tolist() + + +class InferenceSampler(Sampler): + """ + Produce indices for inference across all workers. + Inference needs to run on the __exact__ set of samples, + therefore when the total number of samples is not divisible by the number of workers, + this sampler produces different number of samples on different workers. + """ + + def __init__(self, size: int): + """ + Args: + size (int): the total number of data of the underlying dataset to sample from + """ + self._size = size + assert size > 0 + self._rank = comm.get_rank() + self._world_size = comm.get_world_size() + self._local_indices = self._get_local_indices(size, self._world_size, self._rank) + + @staticmethod + def _get_local_indices(total_size, world_size, rank): + shard_size = total_size // world_size + left = total_size % world_size + shard_sizes = [shard_size + int(r < left) for r in range(world_size)] + + begin = sum(shard_sizes[:rank]) + end = min(sum(shard_sizes[: rank + 1]), total_size) + return range(begin, end) + + def __iter__(self): + yield from self._local_indices + + def __len__(self): + return len(self._local_indices) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/samplers/grouped_batch_sampler.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/samplers/grouped_batch_sampler.py new file mode 100644 index 0000000..6ba3c01 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/samplers/grouped_batch_sampler.py @@ -0,0 +1,47 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import numpy as np +from torch.utils.data.sampler import BatchSampler, Sampler + + +class GroupedBatchSampler(BatchSampler): + """ + Wraps another sampler to yield a mini-batch of indices. + It enforces that the batch only contain elements from the same group. + It also tries to provide mini-batches which follows an ordering which is + as close as possible to the ordering from the original sampler. + """ + + def __init__(self, sampler, group_ids, batch_size): + """ + Args: + sampler (Sampler): Base sampler. + group_ids (list[int]): If the sampler produces indices in range [0, N), + `group_ids` must be a list of `N` ints which contains the group id of each sample. + The group ids must be a set of integers in the range [0, num_groups). + batch_size (int): Size of mini-batch. + """ + if not isinstance(sampler, Sampler): + raise ValueError( + "sampler should be an instance of " + "torch.autils.data.Sampler, but got sampler={}".format(sampler) + ) + self.sampler = sampler + self.group_ids = np.asarray(group_ids) + assert self.group_ids.ndim == 1 + self.batch_size = batch_size + groups = np.unique(self.group_ids).tolist() + + # buffer the indices of each group until batch size is reached + self.buffer_per_group = {k: [] for k in groups} + + def __iter__(self): + for idx in self.sampler: + group_id = self.group_ids[idx] + group_buffer = self.buffer_per_group[group_id] + group_buffer.append(idx) + if len(group_buffer) == self.batch_size: + yield group_buffer[:] # yield a copy of the list + del group_buffer[:] + + def __len__(self): + raise NotImplementedError("len() of GroupedBatchSampler is not well-defined.") diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/transforms/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/transforms/__init__.py new file mode 100644 index 0000000..ab3c63b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/transforms/__init__.py @@ -0,0 +1,14 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from fvcore.transforms.transform import Transform, TransformList # order them first +from fvcore.transforms.transform import * +from .transform import * +from .augmentation import * +from .augmentation_impl import * + +__all__ = [k for k in globals().keys() if not k.startswith("_")] + + +from detectron2.utils.env import fixup_module_metadata + +fixup_module_metadata(__name__, globals(), __all__) +del fixup_module_metadata diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/transforms/augmentation.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/transforms/augmentation.py new file mode 100644 index 0000000..48be5b1 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/transforms/augmentation.py @@ -0,0 +1,377 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +import inspect +import numpy as np +import pprint +from typing import Any, List, Optional, Tuple, Union +from fvcore.transforms.transform import Transform, TransformList + +""" +See "Data Augmentation" tutorial for an overview of the system: +https://detectron2.readthedocs.io/tutorials/augmentation.html +""" + + +__all__ = [ + "Augmentation", + "AugmentationList", + "AugInput", + "TransformGen", + "apply_transform_gens", + "StandardAugInput", + "apply_augmentations", +] + + +def _check_img_dtype(img): + assert isinstance(img, np.ndarray), "[Augmentation] Needs an numpy array, but got a {}!".format( + type(img) + ) + assert not isinstance(img.dtype, np.integer) or ( + img.dtype == np.uint8 + ), "[Augmentation] Got image of type {}, use uint8 or floating points instead!".format( + img.dtype + ) + assert img.ndim in [2, 3], img.ndim + + +def _get_aug_input_args(aug, aug_input) -> List[Any]: + """ + Get the arguments to be passed to ``aug.get_transform`` from the input ``aug_input``. + """ + if aug.input_args is None: + # Decide what attributes are needed automatically + prms = list(inspect.signature(aug.get_transform).parameters.items()) + # The default behavior is: if there is one parameter, then its "image" + # (work automatically for majority of use cases, and also avoid BC breaking), + # Otherwise, use the argument names. + if len(prms) == 1: + names = ("image",) + else: + names = [] + for name, prm in prms: + if prm.kind in (inspect.Parameter.VAR_POSITIONAL, inspect.Parameter.VAR_KEYWORD): + raise TypeError( + f""" \ +The default implementation of `{type(aug)}.__call__` does not allow \ +`{type(aug)}.get_transform` to use variable-length arguments (*args, **kwargs)! \ +If arguments are unknown, reimplement `__call__` instead. \ +""" + ) + names.append(name) + aug.input_args = tuple(names) + + args = [] + for f in aug.input_args: + try: + args.append(getattr(aug_input, f)) + except AttributeError as e: + raise AttributeError( + f"{type(aug)}.get_transform needs input attribute '{f}', " + f"but it is not an attribute of {type(aug_input)}!" + ) from e + return args + + +class Augmentation: + """ + Augmentation defines (often random) policies/strategies to generate :class:`Transform` + from data. It is often used for pre-processing of input data. + + A "policy" that generates a :class:`Transform` may, in the most general case, + need arbitrary information from input data in order to determine what transforms + to apply. Therefore, each :class:`Augmentation` instance defines the arguments + needed by its :meth:`get_transform` method. When called with the positional arguments, + the :meth:`get_transform` method executes the policy. + + Note that :class:`Augmentation` defines the policies to create a :class:`Transform`, + but not how to execute the actual transform operations to those data. + Its :meth:`__call__` method will use :meth:`AugInput.transform` to execute the transform. + + The returned `Transform` object is meant to describe deterministic transformation, which means + it can be re-applied on associated data, e.g. the geometry of an image and its segmentation + masks need to be transformed together. + (If such re-application is not needed, then determinism is not a crucial requirement.) + """ + + input_args: Optional[Tuple[str]] = None + """ + Stores the attribute names needed by :meth:`get_transform`, e.g. ``("image", "sem_seg")``. + By default, it is just a tuple of argument names in :meth:`self.get_transform`, which often only + contain "image". As long as the argument name convention is followed, there is no need for + users to touch this attribute. + """ + + def _init(self, params=None): + if params: + for k, v in params.items(): + if k != "self" and not k.startswith("_"): + setattr(self, k, v) + + def get_transform(self, *args) -> Transform: + """ + Execute the policy based on input data, and decide what transform to apply to inputs. + + Args: + args: Any fixed-length positional arguments. By default, the name of the arguments + should exist in the :class:`AugInput` to be used. + + Returns: + Transform: Returns the deterministic transform to apply to the input. + + Examples: + :: + class MyAug: + # if a policy needs to know both image and semantic segmentation + def get_transform(image, sem_seg) -> T.Transform: + pass + tfm: Transform = MyAug().get_transform(image, sem_seg) + new_image = tfm.apply_image(image) + + Notes: + Users can freely use arbitrary new argument names in custom + :meth:`get_transform` method, as long as they are available in the + input data. In detectron2 we use the following convention: + + * image: (H,W) or (H,W,C) ndarray of type uint8 in range [0, 255], or + floating point in range [0, 1] or [0, 255]. + * boxes: (N,4) ndarray of float32. It represents the instance bounding boxes + of N instances. Each is in XYXY format in unit of absolute coordinates. + * sem_seg: (H,W) ndarray of type uint8. Each element is an integer label of pixel. + + We do not specify convention for other types and do not include builtin + :class:`Augmentation` that uses other types in detectron2. + """ + raise NotImplementedError + + def __call__(self, aug_input) -> Transform: + """ + Augment the given `aug_input` **in-place**, and return the transform that's used. + + This method will be called to apply the augmentation. In most augmentation, it + is enough to use the default implementation, which calls :meth:`get_transform` + using the inputs. But a subclass can overwrite it to have more complicated logic. + + Args: + aug_input (AugInput): an object that has attributes needed by this augmentation + (defined by ``self.get_transform``). Its ``transform`` method will be called + to in-place transform it. + + Returns: + Transform: the transform that is applied on the input. + """ + args = _get_aug_input_args(self, aug_input) + tfm = self.get_transform(*args) + assert isinstance(tfm, (Transform, TransformList)), ( + f"{type(self)}.get_transform must return an instance of Transform! " + f"Got {type(tfm)} instead." + ) + aug_input.transform(tfm) + return tfm + + def _rand_range(self, low=1.0, high=None, size=None): + """ + Uniform float random number between low and high. + """ + if high is None: + low, high = 0, low + if size is None: + size = [] + return np.random.uniform(low, high, size) + + def __repr__(self): + """ + Produce something like: + "MyAugmentation(field1={self.field1}, field2={self.field2})" + """ + try: + sig = inspect.signature(self.__init__) + classname = type(self).__name__ + argstr = [] + for name, param in sig.parameters.items(): + assert ( + param.kind != param.VAR_POSITIONAL and param.kind != param.VAR_KEYWORD + ), "The default __repr__ doesn't support *args or **kwargs" + assert hasattr(self, name), ( + "Attribute {} not found! " + "Default __repr__ only works if attributes match the constructor.".format(name) + ) + attr = getattr(self, name) + default = param.default + if default is attr: + continue + attr_str = pprint.pformat(attr) + if "\n" in attr_str: + # don't show it if pformat decides to use >1 lines + attr_str = "..." + argstr.append("{}={}".format(name, attr_str)) + return "{}({})".format(classname, ", ".join(argstr)) + except AssertionError: + return super().__repr__() + + __str__ = __repr__ + + +def _transform_to_aug(tfm_or_aug): + """ + Wrap Transform into Augmentation. + Private, used internally to implement augmentations. + """ + assert isinstance(tfm_or_aug, (Transform, Augmentation)), tfm_or_aug + if isinstance(tfm_or_aug, Augmentation): + return tfm_or_aug + else: + + class _TransformToAug(Augmentation): + def __init__(self, tfm: Transform): + self.tfm = tfm + + def get_transform(self, *args): + return self.tfm + + def __repr__(self): + return repr(self.tfm) + + __str__ = __repr__ + + return _TransformToAug(tfm_or_aug) + + +class AugmentationList(Augmentation): + """ + Apply a sequence of augmentations. + + It has ``__call__`` method to apply the augmentations. + + Note that :meth:`get_transform` method is impossible (will throw error if called) + for :class:`AugmentationList`, because in order to apply a sequence of augmentations, + the kth augmentation must be applied first, to provide inputs needed by the (k+1)th + augmentation. + """ + + def __init__(self, augs): + """ + Args: + augs (list[Augmentation or Transform]): + """ + super().__init__() + self.augs = [_transform_to_aug(x) for x in augs] + + def __call__(self, aug_input) -> Transform: + tfms = [] + for x in self.augs: + tfm = x(aug_input) + tfms.append(tfm) + return TransformList(tfms) + + def __repr__(self): + msgs = [str(x) for x in self.augs] + return "AugmentationList[{}]".format(", ".join(msgs)) + + __str__ = __repr__ + + +class AugInput: + """ + Input that can be used with :meth:`Augmentation.__call__`. + This is a standard implementation for the majority of use cases. + This class provides the standard attributes **"image", "boxes", "sem_seg"** + defined in :meth:`__init__` and they may be needed by different augmentations. + Most augmentation policies do not need attributes beyond these three. + + After applying augmentations to these attributes (using :meth:`AugInput.transform`), + the returned transforms can then be used to transform other data structures that users have. + + Examples: + :: + input = AugInput(image, boxes=boxes) + tfms = augmentation(input) + transformed_image = input.image + transformed_boxes = input.boxes + transformed_other_data = tfms.apply_other(other_data) + + An extended project that works with new data types may implement augmentation policies + that need other inputs. An algorithm may need to transform inputs in a way different + from the standard approach defined in this class. In those rare situations, users can + implement a class similar to this class, that satify the following condition: + + * The input must provide access to these data in the form of attribute access + (``getattr``). For example, if an :class:`Augmentation` to be applied needs "image" + and "sem_seg" arguments, its input must have the attribute "image" and "sem_seg". + * The input must have a ``transform(tfm: Transform) -> None`` method which + in-place transforms all its attributes. + """ + + # TODO maybe should support more builtin data types here + def __init__( + self, + image: np.ndarray, + *, + boxes: Optional[np.ndarray] = None, + sem_seg: Optional[np.ndarray] = None, + ): + """ + Args: + image (ndarray): (H,W) or (H,W,C) ndarray of type uint8 in range [0, 255], or + floating point in range [0, 1] or [0, 255]. The meaning of C is up + to users. + boxes (ndarray or None): Nx4 float32 boxes in XYXY_ABS mode + sem_seg (ndarray or None): HxW uint8 semantic segmentation mask. Each element + is an integer label of pixel. + """ + _check_img_dtype(image) + self.image = image + self.boxes = boxes + self.sem_seg = sem_seg + + def transform(self, tfm: Transform) -> None: + """ + In-place transform all attributes of this class. + + By "in-place", it means after calling this method, accessing an attribute such + as ``self.image`` will return transformed data. + """ + self.image = tfm.apply_image(self.image) + if self.boxes is not None: + self.boxes = tfm.apply_box(self.boxes) + if self.sem_seg is not None: + self.sem_seg = tfm.apply_segmentation(self.sem_seg) + + def apply_augmentations( + self, augmentations: List[Union[Augmentation, Transform]] + ) -> TransformList: + """ + Equivalent of ``AugmentationList(augmentations)(self)`` + """ + return AugmentationList(augmentations)(self) + + +def apply_augmentations(augmentations: List[Union[Transform, Augmentation]], inputs): + """ + Use ``T.AugmentationList(augmentations)(inputs)`` instead. + """ + if isinstance(inputs, np.ndarray): + # handle the common case of image-only Augmentation, also for backward compatibility + image_only = True + inputs = AugInput(inputs) + else: + image_only = False + tfms = inputs.apply_augmentations(augmentations) + return inputs.image if image_only else inputs, tfms + + +apply_transform_gens = apply_augmentations +""" +Alias for backward-compatibility. +""" + +TransformGen = Augmentation +""" +Alias for Augmentation, since it is something that generates :class:`Transform`s +""" + +StandardAugInput = AugInput +""" +Alias for compatibility. It's not worth the complexity to have two classes. +""" diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/transforms/augmentation_impl.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/transforms/augmentation_impl.py new file mode 100644 index 0000000..652a34a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/transforms/augmentation_impl.py @@ -0,0 +1,614 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. +""" +Implement many useful :class:`Augmentation`. +""" +import numpy as np +import sys +from typing import Tuple +import torch +from fvcore.transforms.transform import ( + BlendTransform, + CropTransform, + HFlipTransform, + NoOpTransform, + PadTransform, + Transform, + TransformList, + VFlipTransform, +) +from PIL import Image + +from .augmentation import Augmentation, _transform_to_aug +from .transform import ExtentTransform, ResizeTransform, RotationTransform + +__all__ = [ + "FixedSizeCrop", + "RandomApply", + "RandomBrightness", + "RandomContrast", + "RandomCrop", + "RandomExtent", + "RandomFlip", + "RandomSaturation", + "RandomLighting", + "RandomRotation", + "Resize", + "ResizeScale", + "ResizeShortestEdge", + "RandomCrop_CategoryAreaConstraint", +] + + +class RandomApply(Augmentation): + """ + Randomly apply an augmentation with a given probability. + """ + + def __init__(self, tfm_or_aug, prob=0.5): + """ + Args: + tfm_or_aug (Transform, Augmentation): the transform or augmentation + to be applied. It can either be a `Transform` or `Augmentation` + instance. + prob (float): probability between 0.0 and 1.0 that + the wrapper transformation is applied + """ + super().__init__() + self.aug = _transform_to_aug(tfm_or_aug) + assert 0.0 <= prob <= 1.0, f"Probablity must be between 0.0 and 1.0 (given: {prob})" + self.prob = prob + + def get_transform(self, *args): + do = self._rand_range() < self.prob + if do: + return self.aug.get_transform(*args) + else: + return NoOpTransform() + + def __call__(self, aug_input): + do = self._rand_range() < self.prob + if do: + return self.aug(aug_input) + else: + return NoOpTransform() + + +class RandomFlip(Augmentation): + """ + Flip the image horizontally or vertically with the given probability. + """ + + def __init__(self, prob=0.5, *, horizontal=True, vertical=False): + """ + Args: + prob (float): probability of flip. + horizontal (boolean): whether to apply horizontal flipping + vertical (boolean): whether to apply vertical flipping + """ + super().__init__() + + if horizontal and vertical: + raise ValueError("Cannot do both horiz and vert. Please use two Flip instead.") + if not horizontal and not vertical: + raise ValueError("At least one of horiz or vert has to be True!") + self._init(locals()) + + def get_transform(self, image): + h, w = image.shape[:2] + do = self._rand_range() < self.prob + if do: + if self.horizontal: + return HFlipTransform(w) + elif self.vertical: + return VFlipTransform(h) + else: + return NoOpTransform() + + +class Resize(Augmentation): + """Resize image to a fixed target size""" + + def __init__(self, shape, interp=Image.BILINEAR): + """ + Args: + shape: (h, w) tuple or a int + interp: PIL interpolation method + """ + if isinstance(shape, int): + shape = (shape, shape) + shape = tuple(shape) + self._init(locals()) + + def get_transform(self, image): + return ResizeTransform( + image.shape[0], image.shape[1], self.shape[0], self.shape[1], self.interp + ) + + +class ResizeShortestEdge(Augmentation): + """ + Resize the image while keeping the aspect ratio unchanged. + It attempts to scale the shorter edge to the given `short_edge_length`, + as long as the longer edge does not exceed `max_size`. + If `max_size` is reached, then downscale so that the longer edge does not exceed max_size. + """ + + @torch.jit.unused + def __init__( + self, short_edge_length, max_size=sys.maxsize, sample_style="range", interp=Image.BILINEAR + ): + """ + Args: + short_edge_length (list[int]): If ``sample_style=="range"``, + a [min, max] interval from which to sample the shortest edge length. + If ``sample_style=="choice"``, a list of shortest edge lengths to sample from. + max_size (int): maximum allowed longest edge length. + sample_style (str): either "range" or "choice". + """ + super().__init__() + assert sample_style in ["range", "choice"], sample_style + + self.is_range = sample_style == "range" + if isinstance(short_edge_length, int): + short_edge_length = (short_edge_length, short_edge_length) + if self.is_range: + assert len(short_edge_length) == 2, ( + "short_edge_length must be two values using 'range' sample style." + f" Got {short_edge_length}!" + ) + self._init(locals()) + + @torch.jit.unused + def get_transform(self, image): + h, w = image.shape[:2] + if self.is_range: + size = np.random.randint(self.short_edge_length[0], self.short_edge_length[1] + 1) + else: + size = np.random.choice(self.short_edge_length) + if size == 0: + return NoOpTransform() + + newh, neww = ResizeShortestEdge.get_output_shape(h, w, size, self.max_size) + return ResizeTransform(h, w, newh, neww, self.interp) + + @staticmethod + def get_output_shape( + oldh: int, oldw: int, short_edge_length: int, max_size: int + ) -> Tuple[int, int]: + """ + Compute the output size given input size and target short edge length. + """ + h, w = oldh, oldw + size = short_edge_length * 1.0 + scale = size / min(h, w) + if h < w: + newh, neww = size, scale * w + else: + newh, neww = scale * h, size + if max(newh, neww) > max_size: + scale = max_size * 1.0 / max(newh, neww) + newh = newh * scale + neww = neww * scale + neww = int(neww + 0.5) + newh = int(newh + 0.5) + return (newh, neww) + + +class ResizeScale(Augmentation): + """ + Takes target size as input and randomly scales the given target size between `min_scale` + and `max_scale`. It then scales the input image such that it fits inside the scaled target + box, keeping the aspect ratio constant. + This implements the resize part of the Google's 'resize_and_crop' data augmentation: + https://github.com/tensorflow/tpu/blob/master/models/official/detection/utils/input_utils.py#L127 + """ + + def __init__( + self, + min_scale: float, + max_scale: float, + target_height: int, + target_width: int, + interp: int = Image.BILINEAR, + ): + """ + Args: + min_scale: minimum image scale range. + max_scale: maximum image scale range. + target_height: target image height. + target_width: target image width. + interp: image interpolation method. + """ + super().__init__() + self._init(locals()) + + def _get_resize(self, image: np.ndarray, scale: float) -> Transform: + input_size = image.shape[:2] + + # Compute new target size given a scale. + target_size = (self.target_height, self.target_width) + target_scale_size = np.multiply(target_size, scale) + + # Compute actual rescaling applied to input image and output size. + output_scale = np.minimum( + target_scale_size[0] / input_size[0], target_scale_size[1] / input_size[1] + ) + output_size = np.round(np.multiply(input_size, output_scale)).astype(int) + + return ResizeTransform( + input_size[0], input_size[1], output_size[0], output_size[1], self.interp + ) + + def get_transform(self, image: np.ndarray) -> Transform: + random_scale = np.random.uniform(self.min_scale, self.max_scale) + return self._get_resize(image, random_scale) + + +class RandomRotation(Augmentation): + """ + This method returns a copy of this image, rotated the given + number of degrees counter clockwise around the given center. + """ + + def __init__(self, angle, expand=True, center=None, sample_style="range", interp=None): + """ + Args: + angle (list[float]): If ``sample_style=="range"``, + a [min, max] interval from which to sample the angle (in degrees). + If ``sample_style=="choice"``, a list of angles to sample from + expand (bool): choose if the image should be resized to fit the whole + rotated image (default), or simply cropped + center (list[[float, float]]): If ``sample_style=="range"``, + a [[minx, miny], [maxx, maxy]] relative interval from which to sample the center, + [0, 0] being the top left of the image and [1, 1] the bottom right. + If ``sample_style=="choice"``, a list of centers to sample from + Default: None, which means that the center of rotation is the center of the image + center has no effect if expand=True because it only affects shifting + """ + super().__init__() + assert sample_style in ["range", "choice"], sample_style + self.is_range = sample_style == "range" + if isinstance(angle, (float, int)): + angle = (angle, angle) + if center is not None and isinstance(center[0], (float, int)): + center = (center, center) + self._init(locals()) + + def get_transform(self, image): + h, w = image.shape[:2] + center = None + if self.is_range: + angle = np.random.uniform(self.angle[0], self.angle[1]) + if self.center is not None: + center = ( + np.random.uniform(self.center[0][0], self.center[1][0]), + np.random.uniform(self.center[0][1], self.center[1][1]), + ) + else: + angle = np.random.choice(self.angle) + if self.center is not None: + center = np.random.choice(self.center) + + if center is not None: + center = (w * center[0], h * center[1]) # Convert to absolute coordinates + + if angle % 360 == 0: + return NoOpTransform() + + return RotationTransform(h, w, angle, expand=self.expand, center=center, interp=self.interp) + + +class FixedSizeCrop(Augmentation): + """ + If `crop_size` is smaller than the input image size, then it uses a random crop of + the crop size. If `crop_size` is larger than the input image size, then it pads + the right and the bottom of the image to the crop size if `pad` is True, otherwise + it returns the smaller image. + """ + + def __init__(self, crop_size: Tuple[int], pad: bool = True, pad_value: float = 128.0): + """ + Args: + crop_size: target image (height, width). + pad: if True, will pad images smaller than `crop_size` up to `crop_size` + pad_value: the padding value. + """ + super().__init__() + self._init(locals()) + + def _get_crop(self, image: np.ndarray) -> Transform: + # Compute the image scale and scaled size. + input_size = image.shape[:2] + output_size = self.crop_size + + # Add random crop if the image is scaled up. + max_offset = np.subtract(input_size, output_size) + max_offset = np.maximum(max_offset, 0) + offset = np.multiply(max_offset, np.random.uniform(0.0, 1.0)) + offset = np.round(offset).astype(int) + return CropTransform( + offset[1], offset[0], output_size[1], output_size[0], input_size[1], input_size[0] + ) + + def _get_pad(self, image: np.ndarray) -> Transform: + # Compute the image scale and scaled size. + input_size = image.shape[:2] + output_size = self.crop_size + + # Add padding if the image is scaled down. + pad_size = np.subtract(output_size, input_size) + pad_size = np.maximum(pad_size, 0) + original_size = np.minimum(input_size, output_size) + return PadTransform( + 0, 0, pad_size[1], pad_size[0], original_size[1], original_size[0], self.pad_value + ) + + def get_transform(self, image: np.ndarray) -> TransformList: + transforms = [self._get_crop(image)] + if self.pad: + transforms.append(self._get_pad(image)) + return TransformList(transforms) + + +class RandomCrop(Augmentation): + """ + Randomly crop a rectangle region out of an image. + """ + + def __init__(self, crop_type: str, crop_size): + """ + Args: + crop_type (str): one of "relative_range", "relative", "absolute", "absolute_range". + crop_size (tuple[float, float]): two floats, explained below. + + - "relative": crop a (H * crop_size[0], W * crop_size[1]) region from an input image of + size (H, W). crop size should be in (0, 1] + - "relative_range": uniformly sample two values from [crop_size[0], 1] + and [crop_size[1]], 1], and use them as in "relative" crop type. + - "absolute" crop a (crop_size[0], crop_size[1]) region from input image. + crop_size must be smaller than the input image size. + - "absolute_range", for an input of size (H, W), uniformly sample H_crop in + [crop_size[0], min(H, crop_size[1])] and W_crop in [crop_size[0], min(W, crop_size[1])]. + Then crop a region (H_crop, W_crop). + """ + # TODO style of relative_range and absolute_range are not consistent: + # one takes (h, w) but another takes (min, max) + super().__init__() + assert crop_type in ["relative_range", "relative", "absolute", "absolute_range"] + self._init(locals()) + + def get_transform(self, image): + h, w = image.shape[:2] + croph, cropw = self.get_crop_size((h, w)) + assert h >= croph and w >= cropw, "Shape computation in {} has bugs.".format(self) + h0 = np.random.randint(h - croph + 1) + w0 = np.random.randint(w - cropw + 1) + return CropTransform(w0, h0, cropw, croph) + + def get_crop_size(self, image_size): + """ + Args: + image_size (tuple): height, width + + Returns: + crop_size (tuple): height, width in absolute pixels + """ + h, w = image_size + if self.crop_type == "relative": + ch, cw = self.crop_size + return int(h * ch + 0.5), int(w * cw + 0.5) + elif self.crop_type == "relative_range": + crop_size = np.asarray(self.crop_size, dtype=np.float32) + ch, cw = crop_size + np.random.rand(2) * (1 - crop_size) + return int(h * ch + 0.5), int(w * cw + 0.5) + elif self.crop_type == "absolute": + return (min(self.crop_size[0], h), min(self.crop_size[1], w)) + elif self.crop_type == "absolute_range": + assert self.crop_size[0] <= self.crop_size[1] + ch = np.random.randint(min(h, self.crop_size[0]), min(h, self.crop_size[1]) + 1) + cw = np.random.randint(min(w, self.crop_size[0]), min(w, self.crop_size[1]) + 1) + return ch, cw + else: + raise NotImplementedError("Unknown crop type {}".format(self.crop_type)) + + +class RandomCrop_CategoryAreaConstraint(Augmentation): + """ + Similar to :class:`RandomCrop`, but find a cropping window such that no single category + occupies a ratio of more than `single_category_max_area` in semantic segmentation ground + truth, which can cause unstability in training. The function attempts to find such a valid + cropping window for at most 10 times. + """ + + def __init__( + self, + crop_type: str, + crop_size, + single_category_max_area: float = 1.0, + ignored_category: int = None, + ): + """ + Args: + crop_type, crop_size: same as in :class:`RandomCrop` + single_category_max_area: the maximum allowed area ratio of a + category. Set to 1.0 to disable + ignored_category: allow this category in the semantic segmentation + ground truth to exceed the area ratio. Usually set to the category + that's ignored in training. + """ + self.crop_aug = RandomCrop(crop_type, crop_size) + self._init(locals()) + + def get_transform(self, image, sem_seg): + if self.single_category_max_area >= 1.0: + return self.crop_aug.get_transform(image) + else: + h, w = sem_seg.shape + for _ in range(10): + crop_size = self.crop_aug.get_crop_size((h, w)) + y0 = np.random.randint(h - crop_size[0] + 1) + x0 = np.random.randint(w - crop_size[1] + 1) + sem_seg_temp = sem_seg[y0 : y0 + crop_size[0], x0 : x0 + crop_size[1]] + labels, cnt = np.unique(sem_seg_temp, return_counts=True) + if self.ignored_category is not None: + cnt = cnt[labels != self.ignored_category] + if len(cnt) > 1 and np.max(cnt) < np.sum(cnt) * self.single_category_max_area: + break + crop_tfm = CropTransform(x0, y0, crop_size[1], crop_size[0]) + return crop_tfm + + +class RandomExtent(Augmentation): + """ + Outputs an image by cropping a random "subrect" of the source image. + + The subrect can be parameterized to include pixels outside the source image, + in which case they will be set to zeros (i.e. black). The size of the output + image will vary with the size of the random subrect. + """ + + def __init__(self, scale_range, shift_range): + """ + Args: + output_size (h, w): Dimensions of output image + scale_range (l, h): Range of input-to-output size scaling factor + shift_range (x, y): Range of shifts of the cropped subrect. The rect + is shifted by [w / 2 * Uniform(-x, x), h / 2 * Uniform(-y, y)], + where (w, h) is the (width, height) of the input image. Set each + component to zero to crop at the image's center. + """ + super().__init__() + self._init(locals()) + + def get_transform(self, image): + img_h, img_w = image.shape[:2] + + # Initialize src_rect to fit the input image. + src_rect = np.array([-0.5 * img_w, -0.5 * img_h, 0.5 * img_w, 0.5 * img_h]) + + # Apply a random scaling to the src_rect. + src_rect *= np.random.uniform(self.scale_range[0], self.scale_range[1]) + + # Apply a random shift to the coordinates origin. + src_rect[0::2] += self.shift_range[0] * img_w * (np.random.rand() - 0.5) + src_rect[1::2] += self.shift_range[1] * img_h * (np.random.rand() - 0.5) + + # Map src_rect coordinates into image coordinates (center at corner). + src_rect[0::2] += 0.5 * img_w + src_rect[1::2] += 0.5 * img_h + + return ExtentTransform( + src_rect=(src_rect[0], src_rect[1], src_rect[2], src_rect[3]), + output_size=(int(src_rect[3] - src_rect[1]), int(src_rect[2] - src_rect[0])), + ) + + +class RandomContrast(Augmentation): + """ + Randomly transforms image contrast. + + Contrast intensity is uniformly sampled in (intensity_min, intensity_max). + - intensity < 1 will reduce contrast + - intensity = 1 will preserve the input image + - intensity > 1 will increase contrast + + See: https://pillow.readthedocs.io/en/3.0.x/reference/ImageEnhance.html + """ + + def __init__(self, intensity_min, intensity_max): + """ + Args: + intensity_min (float): Minimum augmentation + intensity_max (float): Maximum augmentation + """ + super().__init__() + self._init(locals()) + + def get_transform(self, image): + w = np.random.uniform(self.intensity_min, self.intensity_max) + return BlendTransform(src_image=image.mean(), src_weight=1 - w, dst_weight=w) + + +class RandomBrightness(Augmentation): + """ + Randomly transforms image brightness. + + Brightness intensity is uniformly sampled in (intensity_min, intensity_max). + - intensity < 1 will reduce brightness + - intensity = 1 will preserve the input image + - intensity > 1 will increase brightness + + See: https://pillow.readthedocs.io/en/3.0.x/reference/ImageEnhance.html + """ + + def __init__(self, intensity_min, intensity_max): + """ + Args: + intensity_min (float): Minimum augmentation + intensity_max (float): Maximum augmentation + """ + super().__init__() + self._init(locals()) + + def get_transform(self, image): + w = np.random.uniform(self.intensity_min, self.intensity_max) + return BlendTransform(src_image=0, src_weight=1 - w, dst_weight=w) + + +class RandomSaturation(Augmentation): + """ + Randomly transforms saturation of an RGB image. + Input images are assumed to have 'RGB' channel order. + + Saturation intensity is uniformly sampled in (intensity_min, intensity_max). + - intensity < 1 will reduce saturation (make the image more grayscale) + - intensity = 1 will preserve the input image + - intensity > 1 will increase saturation + + See: https://pillow.readthedocs.io/en/3.0.x/reference/ImageEnhance.html + """ + + def __init__(self, intensity_min, intensity_max): + """ + Args: + intensity_min (float): Minimum augmentation (1 preserves input). + intensity_max (float): Maximum augmentation (1 preserves input). + """ + super().__init__() + self._init(locals()) + + def get_transform(self, image): + assert image.shape[-1] == 3, "RandomSaturation only works on RGB images" + w = np.random.uniform(self.intensity_min, self.intensity_max) + grayscale = image.dot([0.299, 0.587, 0.114])[:, :, np.newaxis] + return BlendTransform(src_image=grayscale, src_weight=1 - w, dst_weight=w) + + +class RandomLighting(Augmentation): + """ + The "lighting" augmentation described in AlexNet, using fixed PCA over ImageNet. + Input images are assumed to have 'RGB' channel order. + + The degree of color jittering is randomly sampled via a normal distribution, + with standard deviation given by the scale parameter. + """ + + def __init__(self, scale): + """ + Args: + scale (float): Standard deviation of principal component weighting. + """ + super().__init__() + self._init(locals()) + self.eigen_vecs = np.array( + [[-0.5675, 0.7192, 0.4009], [-0.5808, -0.0045, -0.8140], [-0.5836, -0.6948, 0.4203]] + ) + self.eigen_vals = np.array([0.2175, 0.0188, 0.0045]) + + def get_transform(self, image): + assert image.shape[-1] == 3, "RandomLighting only works on RGB images" + weights = np.random.normal(scale=self.scale, size=3) + return BlendTransform( + src_image=self.eigen_vecs.dot(weights * self.eigen_vals), src_weight=1.0, dst_weight=1.0 + ) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/data/transforms/transform.py b/motion-gan-pipeline/preprocessing/third/detectron2/data/transforms/transform.py new file mode 100644 index 0000000..de44b99 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/data/transforms/transform.py @@ -0,0 +1,351 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +""" +See "Data Augmentation" tutorial for an overview of the system: +https://detectron2.readthedocs.io/tutorials/augmentation.html +""" + +import numpy as np +import torch +import torch.nn.functional as F +from fvcore.transforms.transform import ( + CropTransform, + HFlipTransform, + NoOpTransform, + Transform, + TransformList, +) +from PIL import Image + +try: + import cv2 # noqa +except ImportError: + # OpenCV is an optional dependency at the moment + pass + +__all__ = [ + "ExtentTransform", + "ResizeTransform", + "RotationTransform", + "ColorTransform", + "PILColorTransform", +] + + +class ExtentTransform(Transform): + """ + Extracts a subregion from the source image and scales it to the output size. + + The fill color is used to map pixels from the source rect that fall outside + the source image. + + See: https://pillow.readthedocs.io/en/latest/PIL.html#PIL.ImageTransform.ExtentTransform + """ + + def __init__(self, src_rect, output_size, interp=Image.LINEAR, fill=0): + """ + Args: + src_rect (x0, y0, x1, y1): src coordinates + output_size (h, w): dst image size + interp: PIL interpolation methods + fill: Fill color used when src_rect extends outside image + """ + super().__init__() + self._set_attributes(locals()) + + def apply_image(self, img, interp=None): + h, w = self.output_size + if len(img.shape) > 2 and img.shape[2] == 1: + pil_image = Image.fromarray(img[:, :, 0], mode="L") + else: + pil_image = Image.fromarray(img) + pil_image = pil_image.transform( + size=(w, h), + method=Image.EXTENT, + data=self.src_rect, + resample=interp if interp else self.interp, + fill=self.fill, + ) + ret = np.asarray(pil_image) + if len(img.shape) > 2 and img.shape[2] == 1: + ret = np.expand_dims(ret, -1) + return ret + + def apply_coords(self, coords): + # Transform image center from source coordinates into output coordinates + # and then map the new origin to the corner of the output image. + h, w = self.output_size + x0, y0, x1, y1 = self.src_rect + new_coords = coords.astype(np.float32) + new_coords[:, 0] -= 0.5 * (x0 + x1) + new_coords[:, 1] -= 0.5 * (y0 + y1) + new_coords[:, 0] *= w / (x1 - x0) + new_coords[:, 1] *= h / (y1 - y0) + new_coords[:, 0] += 0.5 * w + new_coords[:, 1] += 0.5 * h + return new_coords + + def apply_segmentation(self, segmentation): + segmentation = self.apply_image(segmentation, interp=Image.NEAREST) + return segmentation + + +class ResizeTransform(Transform): + """ + Resize the image to a target size. + """ + + def __init__(self, h, w, new_h, new_w, interp=None): + """ + Args: + h, w (int): original image size + new_h, new_w (int): new image size + interp: PIL interpolation methods, defaults to bilinear. + """ + # TODO decide on PIL vs opencv + super().__init__() + if interp is None: + interp = Image.BILINEAR + self._set_attributes(locals()) + + def apply_image(self, img, interp=None): + assert img.shape[:2] == (self.h, self.w) + assert len(img.shape) <= 4 + interp_method = interp if interp is not None else self.interp + + if img.dtype == np.uint8: + if len(img.shape) > 2 and img.shape[2] == 1: + pil_image = Image.fromarray(img[:, :, 0], mode="L") + else: + pil_image = Image.fromarray(img) + pil_image = pil_image.resize((self.new_w, self.new_h), interp_method) + ret = np.asarray(pil_image) + if len(img.shape) > 2 and img.shape[2] == 1: + ret = np.expand_dims(ret, -1) + else: + # PIL only supports uint8 + if any(x < 0 for x in img.strides): + img = np.ascontiguousarray(img) + img = torch.from_numpy(img) + shape = list(img.shape) + shape_4d = shape[:2] + [1] * (4 - len(shape)) + shape[2:] + img = img.view(shape_4d).permute(2, 3, 0, 1) # hw(c) -> nchw + _PIL_RESIZE_TO_INTERPOLATE_MODE = { + Image.NEAREST: "nearest", + Image.BILINEAR: "bilinear", + Image.BICUBIC: "bicubic", + } + mode = _PIL_RESIZE_TO_INTERPOLATE_MODE[interp_method] + align_corners = None if mode == "nearest" else False + img = F.interpolate( + img, (self.new_h, self.new_w), mode=mode, align_corners=align_corners + ) + shape[:2] = (self.new_h, self.new_w) + ret = img.permute(2, 3, 0, 1).view(shape).numpy() # nchw -> hw(c) + + return ret + + def apply_coords(self, coords): + coords[:, 0] = coords[:, 0] * (self.new_w * 1.0 / self.w) + coords[:, 1] = coords[:, 1] * (self.new_h * 1.0 / self.h) + return coords + + def apply_segmentation(self, segmentation): + segmentation = self.apply_image(segmentation, interp=Image.NEAREST) + return segmentation + + def inverse(self): + return ResizeTransform(self.new_h, self.new_w, self.h, self.w, self.interp) + + +class RotationTransform(Transform): + """ + This method returns a copy of this image, rotated the given + number of degrees counter clockwise around its center. + """ + + def __init__(self, h, w, angle, expand=True, center=None, interp=None): + """ + Args: + h, w (int): original image size + angle (float): degrees for rotation + expand (bool): choose if the image should be resized to fit the whole + rotated image (default), or simply cropped + center (tuple (width, height)): coordinates of the rotation center + if left to None, the center will be fit to the center of each image + center has no effect if expand=True because it only affects shifting + interp: cv2 interpolation method, default cv2.INTER_LINEAR + """ + super().__init__() + image_center = np.array((w / 2, h / 2)) + if center is None: + center = image_center + if interp is None: + interp = cv2.INTER_LINEAR + abs_cos, abs_sin = (abs(np.cos(np.deg2rad(angle))), abs(np.sin(np.deg2rad(angle)))) + if expand: + # find the new width and height bounds + bound_w, bound_h = np.rint( + [h * abs_sin + w * abs_cos, h * abs_cos + w * abs_sin] + ).astype(int) + else: + bound_w, bound_h = w, h + + self._set_attributes(locals()) + self.rm_coords = self.create_rotation_matrix() + # Needed because of this problem https://github.com/opencv/opencv/issues/11784 + self.rm_image = self.create_rotation_matrix(offset=-0.5) + + def apply_image(self, img, interp=None): + """ + img should be a numpy array, formatted as Height * Width * Nchannels + """ + if len(img) == 0 or self.angle % 360 == 0: + return img + assert img.shape[:2] == (self.h, self.w) + interp = interp if interp is not None else self.interp + return cv2.warpAffine(img, self.rm_image, (self.bound_w, self.bound_h), flags=interp) + + def apply_coords(self, coords): + """ + coords should be a N * 2 array-like, containing N couples of (x, y) points + """ + coords = np.asarray(coords, dtype=float) + if len(coords) == 0 or self.angle % 360 == 0: + return coords + return cv2.transform(coords[:, np.newaxis, :], self.rm_coords)[:, 0, :] + + def apply_segmentation(self, segmentation): + segmentation = self.apply_image(segmentation, interp=cv2.INTER_NEAREST) + return segmentation + + def create_rotation_matrix(self, offset=0): + center = (self.center[0] + offset, self.center[1] + offset) + rm = cv2.getRotationMatrix2D(tuple(center), self.angle, 1) + if self.expand: + # Find the coordinates of the center of rotation in the new image + # The only point for which we know the future coordinates is the center of the image + rot_im_center = cv2.transform(self.image_center[None, None, :] + offset, rm)[0, 0, :] + new_center = np.array([self.bound_w / 2, self.bound_h / 2]) + offset - rot_im_center + # shift the rotation center to the new coordinates + rm[:, 2] += new_center + return rm + + def inverse(self): + """ + The inverse is to rotate it back with expand, and crop to get the original shape. + """ + if not self.expand: # Not possible to inverse if a part of the image is lost + raise NotImplementedError() + rotation = RotationTransform( + self.bound_h, self.bound_w, -self.angle, True, None, self.interp + ) + crop = CropTransform( + (rotation.bound_w - self.w) // 2, (rotation.bound_h - self.h) // 2, self.w, self.h + ) + return TransformList([rotation, crop]) + + +class ColorTransform(Transform): + """ + Generic wrapper for any photometric transforms. + These transformations should only affect the color space and + not the coordinate space of the image (e.g. annotation + coordinates such as bounding boxes should not be changed) + """ + + def __init__(self, op): + """ + Args: + op (Callable): operation to be applied to the image, + which takes in an ndarray and returns an ndarray. + """ + if not callable(op): + raise ValueError("op parameter should be callable") + super().__init__() + self._set_attributes(locals()) + + def apply_image(self, img): + return self.op(img) + + def apply_coords(self, coords): + return coords + + def inverse(self): + return NoOpTransform() + + def apply_segmentation(self, segmentation): + return segmentation + + +class PILColorTransform(ColorTransform): + """ + Generic wrapper for PIL Photometric image transforms, + which affect the color space and not the coordinate + space of the image + """ + + def __init__(self, op): + """ + Args: + op (Callable): operation to be applied to the image, + which takes in a PIL Image and returns a transformed + PIL Image. + For reference on possible operations see: + - https://pillow.readthedocs.io/en/stable/ + """ + if not callable(op): + raise ValueError("op parameter should be callable") + super().__init__(op) + + def apply_image(self, img): + img = Image.fromarray(img) + return np.asarray(super().apply_image(img)) + + +def HFlip_rotated_box(transform, rotated_boxes): + """ + Apply the horizontal flip transform on rotated boxes. + + Args: + rotated_boxes (ndarray): Nx5 floating point array of + (x_center, y_center, width, height, angle_degrees) format + in absolute coordinates. + """ + # Transform x_center + rotated_boxes[:, 0] = transform.width - rotated_boxes[:, 0] + # Transform angle + rotated_boxes[:, 4] = -rotated_boxes[:, 4] + return rotated_boxes + + +def Resize_rotated_box(transform, rotated_boxes): + """ + Apply the resizing transform on rotated boxes. For details of how these (approximation) + formulas are derived, please refer to :meth:`RotatedBoxes.scale`. + + Args: + rotated_boxes (ndarray): Nx5 floating point array of + (x_center, y_center, width, height, angle_degrees) format + in absolute coordinates. + """ + scale_factor_x = transform.new_w * 1.0 / transform.w + scale_factor_y = transform.new_h * 1.0 / transform.h + rotated_boxes[:, 0] *= scale_factor_x + rotated_boxes[:, 1] *= scale_factor_y + theta = rotated_boxes[:, 4] * np.pi / 180.0 + c = np.cos(theta) + s = np.sin(theta) + rotated_boxes[:, 2] *= np.sqrt(np.square(scale_factor_x * c) + np.square(scale_factor_y * s)) + rotated_boxes[:, 3] *= np.sqrt(np.square(scale_factor_x * s) + np.square(scale_factor_y * c)) + rotated_boxes[:, 4] = np.arctan2(scale_factor_x * s, scale_factor_y * c) * 180 / np.pi + + return rotated_boxes + + +HFlipTransform.register_type("rotated_box", HFlip_rotated_box) +ResizeTransform.register_type("rotated_box", Resize_rotated_box) + +# not necessary any more with latest fvcore +NoOpTransform.register_type("rotated_box", lambda t, x: x) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/engine/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/engine/__init__.py new file mode 100644 index 0000000..08a6157 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/engine/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from .launch import * +from .train_loop import * + +__all__ = [k for k in globals().keys() if not k.startswith("_")] + + +# prefer to let hooks and defaults live in separate namespaces (therefore not in __all__) +# but still make them available here +from .hooks import * +from .defaults import * diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/engine/defaults.py b/motion-gan-pipeline/preprocessing/third/detectron2/engine/defaults.py new file mode 100644 index 0000000..cc3faa1 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/engine/defaults.py @@ -0,0 +1,715 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +""" +This file contains components with some default boilerplate logic user may need +in training / testing. They will not work for everyone, but many users may find them useful. + +The behavior of functions/classes in this file is subject to change, +since they are meant to represent the "common default behavior" people need in their projects. +""" + +import argparse +import logging +import os +import sys +import weakref +from collections import OrderedDict +from typing import Optional +import torch +from fvcore.nn.precise_bn import get_bn_modules +from omegaconf import OmegaConf +from torch.nn.parallel import DistributedDataParallel + +import detectron2.data.transforms as T +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import CfgNode, LazyConfig +from detectron2.data import ( + MetadataCatalog, + build_detection_test_loader, + build_detection_train_loader, +) +from detectron2.evaluation import ( + DatasetEvaluator, + inference_on_dataset, + print_csv_format, + verify_results, +) +from detectron2.modeling import build_model +from detectron2.solver import build_lr_scheduler, build_optimizer +from detectron2.utils import comm +from detectron2.utils.collect_env import collect_env_info +from detectron2.utils.env import seed_all_rng +from detectron2.utils.events import CommonMetricPrinter, JSONWriter, TensorboardXWriter +from detectron2.utils.file_io import PathManager +from detectron2.utils.logger import setup_logger + +from . import hooks +from .train_loop import AMPTrainer, SimpleTrainer, TrainerBase + +__all__ = [ + "create_ddp_model", + "default_argument_parser", + "default_setup", + "default_writers", + "DefaultPredictor", + "DefaultTrainer", +] + + +def create_ddp_model(model, *, fp16_compression=False, **kwargs): + """ + Create a DistributedDataParallel model if there are >1 processes. + + Args: + model: a torch.nn.Module + fp16_compression: add fp16 compression hooks to the ddp object. + See more at https://pytorch.org/docs/stable/ddp_comm_hooks.html#torch.distributed.algorithms.ddp_comm_hooks.default_hooks.fp16_compress_hook + kwargs: other arguments of :module:`torch.nn.parallel.DistributedDataParallel`. + """ # noqa + if comm.get_world_size() == 1: + return model + if "device_ids" not in kwargs: + kwargs["device_ids"] = [comm.get_local_rank()] + ddp = DistributedDataParallel(model, **kwargs) + if fp16_compression: + from torch.distributed.algorithms.ddp_comm_hooks import default as comm_hooks + + ddp.register_comm_hook(state=None, hook=comm_hooks.fp16_compress_hook) + return ddp + + +def default_argument_parser(epilog=None): + """ + Create a parser with some common arguments used by detectron2 users. + + Args: + epilog (str): epilog passed to ArgumentParser describing the usage. + + Returns: + argparse.ArgumentParser: + """ + parser = argparse.ArgumentParser( + epilog=epilog + or f""" +Examples: + +Run on single machine: + $ {sys.argv[0]} --num-gpus 8 --config-file cfg.yaml + +Change some config options: + $ {sys.argv[0]} --config-file cfg.yaml MODEL.WEIGHTS /path/to/weight.pth SOLVER.BASE_LR 0.001 + +Run on multiple machines: + (machine0)$ {sys.argv[0]} --machine-rank 0 --num-machines 2 --dist-url [--other-flags] + (machine1)$ {sys.argv[0]} --machine-rank 1 --num-machines 2 --dist-url [--other-flags] +""", + formatter_class=argparse.RawDescriptionHelpFormatter, + ) + parser.add_argument("--config-file", default="", metavar="FILE", help="path to config file") + parser.add_argument( + "--resume", + action="store_true", + help="Whether to attempt to resume from the checkpoint directory. " + "See documentation of `DefaultTrainer.resume_or_load()` for what it means.", + ) + parser.add_argument("--eval-only", action="store_true", help="perform evaluation only") + parser.add_argument("--num-gpus", type=int, default=1, help="number of gpus *per machine*") + parser.add_argument("--num-machines", type=int, default=1, help="total number of machines") + parser.add_argument( + "--machine-rank", type=int, default=0, help="the rank of this machine (unique per machine)" + ) + + # PyTorch still may leave orphan processes in multi-gpu training. + # Therefore we use a deterministic way to obtain port, + # so that users are aware of orphan processes by seeing the port occupied. + port = 2 ** 15 + 2 ** 14 + hash(os.getuid() if sys.platform != "win32" else 1) % 2 ** 14 + parser.add_argument( + "--dist-url", + default="tcp://127.0.0.1:{}".format(port), + help="initialization URL for pytorch distributed backend. See " + "https://pytorch.org/docs/stable/distributed.html for details.", + ) + parser.add_argument( + "opts", + help=""" +Modify config options at the end of the command. For Yacs configs, use +space-separated "PATH.KEY VALUE" pairs. +For python-based LazyConfig, use "path.key=value". + """.strip(), + default=None, + nargs=argparse.REMAINDER, + ) + return parser + + +def _try_get_key(cfg, *keys, default=None): + """ + Try select keys from cfg until the first key that exists. Otherwise return default. + """ + if isinstance(cfg, CfgNode): + cfg = OmegaConf.create(cfg.dump()) + for k in keys: + none = object() + p = OmegaConf.select(cfg, k, default=none) + if p is not none: + return p + return default + + +def _highlight(code, filename): + try: + import pygments + except ImportError: + return code + + from pygments.lexers import Python3Lexer, YamlLexer + from pygments.formatters import Terminal256Formatter + + lexer = Python3Lexer() if filename.endswith(".py") else YamlLexer() + code = pygments.highlight(code, lexer, Terminal256Formatter(style="monokai")) + return code + + +def default_setup(cfg, args): + """ + Perform some basic common setups at the beginning of a job, including: + + 1. Set up the detectron2 logger + 2. Log basic information about environment, cmdline arguments, and config + 3. Backup the config to the output directory + + Args: + cfg (CfgNode or omegaconf.DictConfig): the full config to be used + args (argparse.NameSpace): the command line arguments to be logged + """ + output_dir = _try_get_key(cfg, "OUTPUT_DIR", "output_dir", "train.output_dir") + if comm.is_main_process() and output_dir: + PathManager.mkdirs(output_dir) + + rank = comm.get_rank() + setup_logger(output_dir, distributed_rank=rank, name="fvcore") + logger = setup_logger(output_dir, distributed_rank=rank) + + logger.info("Rank of current process: {}. World size: {}".format(rank, comm.get_world_size())) + logger.info("Environment info:\n" + collect_env_info()) + + logger.info("Command line arguments: " + str(args)) + if hasattr(args, "config_file") and args.config_file != "": + logger.info( + "Contents of args.config_file={}:\n{}".format( + args.config_file, + _highlight(PathManager.open(args.config_file, "r").read(), args.config_file), + ) + ) + + if comm.is_main_process() and output_dir: + # Note: some of our scripts may expect the existence of + # config.yaml in output directory + path = os.path.join(output_dir, "config.yaml") + if isinstance(cfg, CfgNode): + logger.info("Running with full config:\n{}".format(_highlight(cfg.dump(), ".yaml"))) + with PathManager.open(path, "w") as f: + f.write(cfg.dump()) + else: + LazyConfig.save(cfg, path) + logger.info("Full config saved to {}".format(path)) + + # make sure each worker has a different, yet deterministic seed if specified + seed = _try_get_key(cfg, "SEED", "train.seed", default=-1) + seed_all_rng(None if seed < 0 else seed + rank) + + # cudnn benchmark has large overhead. It shouldn't be used considering the small size of + # typical validation set. + if not (hasattr(args, "eval_only") and args.eval_only): + torch.backends.cudnn.benchmark = _try_get_key( + cfg, "CUDNN_BENCHMARK", "train.cudnn_benchmark", default=False + ) + + +def default_writers(output_dir: str, max_iter: Optional[int] = None): + """ + Build a list of :class:`EventWriter` to be used. + It now consists of a :class:`CommonMetricPrinter`, + :class:`TensorboardXWriter` and :class:`JSONWriter`. + + Args: + output_dir: directory to store JSON metrics and tensorboard events + max_iter: the total number of iterations + + Returns: + list[EventWriter]: a list of :class:`EventWriter` objects. + """ + PathManager.mkdirs(output_dir) + return [ + # It may not always print what you want to see, since it prints "common" metrics only. + CommonMetricPrinter(max_iter), + JSONWriter(os.path.join(output_dir, "metrics.json")), + TensorboardXWriter(output_dir), + ] + + +class DefaultPredictor: + """ + Create a simple end-to-end predictor with the given config that runs on + single device for a single input image. + + Compared to using the model directly, this class does the following additions: + + 1. Load checkpoint from `cfg.MODEL.WEIGHTS`. + 2. Always take BGR image as the input and apply conversion defined by `cfg.INPUT.FORMAT`. + 3. Apply resizing defined by `cfg.INPUT.{MIN,MAX}_SIZE_TEST`. + 4. Take one input image and produce a single output, instead of a batch. + + This is meant for simple demo purposes, so it does the above steps automatically. + This is not meant for benchmarks or running complicated inference logic. + If you'd like to do anything more complicated, please refer to its source code as + examples to build and use the model manually. + + Attributes: + metadata (Metadata): the metadata of the underlying dataset, obtained from + cfg.DATASETS.TEST. + + Examples: + :: + pred = DefaultPredictor(cfg) + inputs = cv2.imread("input.jpg") + outputs = pred(inputs) + """ + + def __init__(self, cfg): + self.cfg = cfg.clone() # cfg can be modified by model + self.model = build_model(self.cfg) + self.model.eval() + if len(cfg.DATASETS.TEST): + self.metadata = MetadataCatalog.get(cfg.DATASETS.TEST[0]) + + checkpointer = DetectionCheckpointer(self.model) + checkpointer.load(cfg.MODEL.WEIGHTS) + + self.aug = T.ResizeShortestEdge( + [cfg.INPUT.MIN_SIZE_TEST, cfg.INPUT.MIN_SIZE_TEST], cfg.INPUT.MAX_SIZE_TEST + ) + + self.input_format = cfg.INPUT.FORMAT + assert self.input_format in ["RGB", "BGR"], self.input_format + + def __call__(self, original_image): + """ + Args: + original_image (np.ndarray): an image of shape (H, W, C) (in BGR order). + + Returns: + predictions (dict): + the output of the model for one image only. + See :doc:`/tutorials/models` for details about the format. + """ + with torch.no_grad(): # https://github.com/sphinx-doc/sphinx/issues/4258 + # Apply pre-processing to image. + if self.input_format == "RGB": + # whether the model expects BGR inputs or RGB + original_image = original_image[:, :, ::-1] + height, width = original_image.shape[:2] + image = self.aug.get_transform(original_image).apply_image(original_image) + image = torch.as_tensor(image.astype("float32").transpose(2, 0, 1)) + + inputs = {"image": image, "height": height, "width": width} + predictions = self.model([inputs])[0] + return predictions + + +class DefaultTrainer(TrainerBase): + """ + A trainer with default training logic. It does the following: + + 1. Create a :class:`SimpleTrainer` using model, optimizer, dataloader + defined by the given config. Create a LR scheduler defined by the config. + 2. Load the last checkpoint or `cfg.MODEL.WEIGHTS`, if exists, when + `resume_or_load` is called. + 3. Register a few common hooks defined by the config. + + It is created to simplify the **standard model training workflow** and reduce code boilerplate + for users who only need the standard training workflow, with standard features. + It means this class makes *many assumptions* about your training logic that + may easily become invalid in a new research. In fact, any assumptions beyond those made in the + :class:`SimpleTrainer` are too much for research. + + The code of this class has been annotated about restrictive assumptions it makes. + When they do not work for you, you're encouraged to: + + 1. Overwrite methods of this class, OR: + 2. Use :class:`SimpleTrainer`, which only does minimal SGD training and + nothing else. You can then add your own hooks if needed. OR: + 3. Write your own training loop similar to `tools/plain_train_net.py`. + + See the :doc:`/tutorials/training` tutorials for more details. + + Note that the behavior of this class, like other functions/classes in + this file, is not stable, since it is meant to represent the "common default behavior". + It is only guaranteed to work well with the standard models and training workflow in detectron2. + To obtain more stable behavior, write your own training logic with other public APIs. + + Examples: + :: + trainer = DefaultTrainer(cfg) + trainer.resume_or_load() # load last checkpoint or MODEL.WEIGHTS + trainer.train() + + Attributes: + scheduler: + checkpointer (DetectionCheckpointer): + cfg (CfgNode): + """ + + def __init__(self, cfg): + """ + Args: + cfg (CfgNode): + """ + super().__init__() + logger = logging.getLogger("detectron2") + if not logger.isEnabledFor(logging.INFO): # setup_logger is not called for d2 + setup_logger() + cfg = DefaultTrainer.auto_scale_workers(cfg, comm.get_world_size()) + + # Assume these objects must be constructed in this order. + model = self.build_model(cfg) + optimizer = self.build_optimizer(cfg, model) + data_loader = self.build_train_loader(cfg) + + model = create_ddp_model(model, broadcast_buffers=False) + self._trainer = (AMPTrainer if cfg.SOLVER.AMP.ENABLED else SimpleTrainer)( + model, data_loader, optimizer + ) + + self.scheduler = self.build_lr_scheduler(cfg, optimizer) + self.checkpointer = DetectionCheckpointer( + # Assume you want to save checkpoints together with logs/statistics + model, + cfg.OUTPUT_DIR, + trainer=weakref.proxy(self), + ) + self.start_iter = 0 + self.max_iter = cfg.SOLVER.MAX_ITER + self.cfg = cfg + + self.register_hooks(self.build_hooks()) + + def resume_or_load(self, resume=True): + """ + If `resume==True` and `cfg.OUTPUT_DIR` contains the last checkpoint (defined by + a `last_checkpoint` file), resume from the file. Resuming means loading all + available states (eg. optimizer and scheduler) and update iteration counter + from the checkpoint. ``cfg.MODEL.WEIGHTS`` will not be used. + + Otherwise, this is considered as an independent training. The method will load model + weights from the file `cfg.MODEL.WEIGHTS` (but will not load other states) and start + from iteration 0. + + Args: + resume (bool): whether to do resume or not + """ + self.checkpointer.resume_or_load(self.cfg.MODEL.WEIGHTS, resume=resume) + if resume and self.checkpointer.has_checkpoint(): + # The checkpoint stores the training iteration that just finished, thus we start + # at the next iteration + self.start_iter = self.iter + 1 + + def build_hooks(self): + """ + Build a list of default hooks, including timing, evaluation, + checkpointing, lr scheduling, precise BN, writing events. + + Returns: + list[HookBase]: + """ + cfg = self.cfg.clone() + cfg.defrost() + cfg.DATALOADER.NUM_WORKERS = 0 # save some memory and time for PreciseBN + + ret = [ + hooks.IterationTimer(), + hooks.LRScheduler(), + hooks.PreciseBN( + # Run at the same freq as (but before) evaluation. + cfg.TEST.EVAL_PERIOD, + self.model, + # Build a new data loader to not affect training + self.build_train_loader(cfg), + cfg.TEST.PRECISE_BN.NUM_ITER, + ) + if cfg.TEST.PRECISE_BN.ENABLED and get_bn_modules(self.model) + else None, + ] + + # Do PreciseBN before checkpointer, because it updates the model and need to + # be saved by checkpointer. + # This is not always the best: if checkpointing has a different frequency, + # some checkpoints may have more precise statistics than others. + if comm.is_main_process(): + ret.append(hooks.PeriodicCheckpointer(self.checkpointer, cfg.SOLVER.CHECKPOINT_PERIOD)) + + def test_and_save_results(): + self._last_eval_results = self.test(self.cfg, self.model) + return self._last_eval_results + + # Do evaluation after checkpointer, because then if it fails, + # we can use the saved checkpoint to debug. + ret.append(hooks.EvalHook(cfg.TEST.EVAL_PERIOD, test_and_save_results)) + + if comm.is_main_process(): + # Here the default print/log frequency of each writer is used. + # run writers in the end, so that evaluation metrics are written + ret.append(hooks.PeriodicWriter(self.build_writers(), period=20)) + return ret + + def build_writers(self): + """ + Build a list of writers to be used using :func:`default_writers()`. + If you'd like a different list of writers, you can overwrite it in + your trainer. + + Returns: + list[EventWriter]: a list of :class:`EventWriter` objects. + """ + return default_writers(self.cfg.OUTPUT_DIR, self.max_iter) + + def train(self): + """ + Run training. + + Returns: + OrderedDict of results, if evaluation is enabled. Otherwise None. + """ + super().train(self.start_iter, self.max_iter) + if len(self.cfg.TEST.EXPECTED_RESULTS) and comm.is_main_process(): + assert hasattr( + self, "_last_eval_results" + ), "No evaluation results obtained during training!" + verify_results(self.cfg, self._last_eval_results) + return self._last_eval_results + + def run_step(self): + self._trainer.iter = self.iter + self._trainer.run_step() + + def state_dict(self): + ret = super().state_dict() + ret["_trainer"] = self._trainer.state_dict() + return ret + + def load_state_dict(self, state_dict): + super().load_state_dict(state_dict) + self._trainer.load_state_dict(state_dict["_trainer"]) + + @classmethod + def build_model(cls, cfg): + """ + Returns: + torch.nn.Module: + + It now calls :func:`detectron2.modeling.build_model`. + Overwrite it if you'd like a different model. + """ + model = build_model(cfg) + logger = logging.getLogger(__name__) + logger.info("Model:\n{}".format(model)) + return model + + @classmethod + def build_optimizer(cls, cfg, model): + """ + Returns: + torch.optim.Optimizer: + + It now calls :func:`detectron2.solver.build_optimizer`. + Overwrite it if you'd like a different optimizer. + """ + return build_optimizer(cfg, model) + + @classmethod + def build_lr_scheduler(cls, cfg, optimizer): + """ + It now calls :func:`detectron2.solver.build_lr_scheduler`. + Overwrite it if you'd like a different scheduler. + """ + return build_lr_scheduler(cfg, optimizer) + + @classmethod + def build_train_loader(cls, cfg): + """ + Returns: + iterable + + It now calls :func:`detectron2.data.build_detection_train_loader`. + Overwrite it if you'd like a different data loader. + """ + return build_detection_train_loader(cfg) + + @classmethod + def build_test_loader(cls, cfg, dataset_name): + """ + Returns: + iterable + + It now calls :func:`detectron2.data.build_detection_test_loader`. + Overwrite it if you'd like a different data loader. + """ + return build_detection_test_loader(cfg, dataset_name) + + @classmethod + def build_evaluator(cls, cfg, dataset_name): + """ + Returns: + DatasetEvaluator or None + + It is not implemented by default. + """ + raise NotImplementedError( + """ +If you want DefaultTrainer to automatically run evaluation, +please implement `build_evaluator()` in subclasses (see train_net.py for example). +Alternatively, you can call evaluation functions yourself (see Colab balloon tutorial for example). +""" + ) + + @classmethod + def test(cls, cfg, model, evaluators=None): + """ + Evaluate the given model. The given model is expected to already contain + weights to evaluate. + + Args: + cfg (CfgNode): + model (nn.Module): + evaluators (list[DatasetEvaluator] or None): if None, will call + :meth:`build_evaluator`. Otherwise, must have the same length as + ``cfg.DATASETS.TEST``. + + Returns: + dict: a dict of result metrics + """ + logger = logging.getLogger(__name__) + if isinstance(evaluators, DatasetEvaluator): + evaluators = [evaluators] + if evaluators is not None: + assert len(cfg.DATASETS.TEST) == len(evaluators), "{} != {}".format( + len(cfg.DATASETS.TEST), len(evaluators) + ) + + results = OrderedDict() + for idx, dataset_name in enumerate(cfg.DATASETS.TEST): + data_loader = cls.build_test_loader(cfg, dataset_name) + # When evaluators are passed in as arguments, + # implicitly assume that evaluators can be created before data_loader. + if evaluators is not None: + evaluator = evaluators[idx] + else: + try: + evaluator = cls.build_evaluator(cfg, dataset_name) + except NotImplementedError: + logger.warn( + "No evaluator found. Use `DefaultTrainer.test(evaluators=)`, " + "or implement its `build_evaluator` method." + ) + results[dataset_name] = {} + continue + results_i = inference_on_dataset(model, data_loader, evaluator) + results[dataset_name] = results_i + if comm.is_main_process(): + assert isinstance( + results_i, dict + ), "Evaluator must return a dict on the main process. Got {} instead.".format( + results_i + ) + logger.info("Evaluation results for {} in csv format:".format(dataset_name)) + print_csv_format(results_i) + + if len(results) == 1: + results = list(results.values())[0] + return results + + @staticmethod + def auto_scale_workers(cfg, num_workers: int): + """ + When the config is defined for certain number of workers (according to + ``cfg.SOLVER.REFERENCE_WORLD_SIZE``) that's different from the number of + workers currently in use, returns a new cfg where the total batch size + is scaled so that the per-GPU batch size stays the same as the + original ``IMS_PER_BATCH // REFERENCE_WORLD_SIZE``. + + Other config options are also scaled accordingly: + * training steps and warmup steps are scaled inverse proportionally. + * learning rate are scaled proportionally, following :paper:`ImageNet in 1h`. + + For example, with the original config like the following: + + .. code-block:: yaml + + IMS_PER_BATCH: 16 + BASE_LR: 0.1 + REFERENCE_WORLD_SIZE: 8 + MAX_ITER: 5000 + STEPS: (4000,) + CHECKPOINT_PERIOD: 1000 + + When this config is used on 16 GPUs instead of the reference number 8, + calling this method will return a new config with: + + .. code-block:: yaml + + IMS_PER_BATCH: 32 + BASE_LR: 0.2 + REFERENCE_WORLD_SIZE: 16 + MAX_ITER: 2500 + STEPS: (2000,) + CHECKPOINT_PERIOD: 500 + + Note that both the original config and this new config can be trained on 16 GPUs. + It's up to user whether to enable this feature (by setting ``REFERENCE_WORLD_SIZE``). + + Returns: + CfgNode: a new config. Same as original if ``cfg.SOLVER.REFERENCE_WORLD_SIZE==0``. + """ + old_world_size = cfg.SOLVER.REFERENCE_WORLD_SIZE + if old_world_size == 0 or old_world_size == num_workers: + return cfg + cfg = cfg.clone() + frozen = cfg.is_frozen() + cfg.defrost() + + assert ( + cfg.SOLVER.IMS_PER_BATCH % old_world_size == 0 + ), "Invalid REFERENCE_WORLD_SIZE in config!" + scale = num_workers / old_world_size + bs = cfg.SOLVER.IMS_PER_BATCH = int(round(cfg.SOLVER.IMS_PER_BATCH * scale)) + lr = cfg.SOLVER.BASE_LR = cfg.SOLVER.BASE_LR * scale + max_iter = cfg.SOLVER.MAX_ITER = int(round(cfg.SOLVER.MAX_ITER / scale)) + warmup_iter = cfg.SOLVER.WARMUP_ITERS = int(round(cfg.SOLVER.WARMUP_ITERS / scale)) + cfg.SOLVER.STEPS = tuple(int(round(s / scale)) for s in cfg.SOLVER.STEPS) + cfg.TEST.EVAL_PERIOD = int(round(cfg.TEST.EVAL_PERIOD / scale)) + cfg.SOLVER.CHECKPOINT_PERIOD = int(round(cfg.SOLVER.CHECKPOINT_PERIOD / scale)) + cfg.SOLVER.REFERENCE_WORLD_SIZE = num_workers # maintain invariant + logger = logging.getLogger(__name__) + logger.info( + f"Auto-scaling the config to batch_size={bs}, learning_rate={lr}, " + f"max_iter={max_iter}, warmup={warmup_iter}." + ) + + if frozen: + cfg.freeze() + return cfg + + +# Access basic attributes from the underlying trainer +for _attr in ["model", "data_loader", "optimizer"]: + setattr( + DefaultTrainer, + _attr, + property( + # getter + lambda self, x=_attr: getattr(self._trainer, x), + # setter + lambda self, value, x=_attr: setattr(self._trainer, x, value), + ), + ) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/engine/hooks.py b/motion-gan-pipeline/preprocessing/third/detectron2/engine/hooks.py new file mode 100644 index 0000000..52c321f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/engine/hooks.py @@ -0,0 +1,686 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +import datetime +import itertools +import logging +import math +import operator +import os +import tempfile +import time +import warnings +from collections import Counter +import torch +from fvcore.common.checkpoint import Checkpointer +from fvcore.common.checkpoint import PeriodicCheckpointer as _PeriodicCheckpointer +from fvcore.common.param_scheduler import ParamScheduler +from fvcore.common.timer import Timer +from fvcore.nn.precise_bn import get_bn_modules, update_bn_stats + +import detectron2.utils.comm as comm +from detectron2.evaluation.testing import flatten_results_dict +from detectron2.solver import LRMultiplier +from detectron2.utils.events import EventStorage, EventWriter +from detectron2.utils.file_io import PathManager + +from .train_loop import HookBase + +__all__ = [ + "CallbackHook", + "IterationTimer", + "PeriodicWriter", + "PeriodicCheckpointer", + "BestCheckpointer", + "LRScheduler", + "AutogradProfiler", + "EvalHook", + "PreciseBN", + "TorchProfiler", + "TorchMemoryStats", +] + + +""" +Implement some common hooks. +""" + + +class CallbackHook(HookBase): + """ + Create a hook using callback functions provided by the user. + """ + + def __init__(self, *, before_train=None, after_train=None, before_step=None, after_step=None): + """ + Each argument is a function that takes one argument: the trainer. + """ + self._before_train = before_train + self._before_step = before_step + self._after_step = after_step + self._after_train = after_train + + def before_train(self): + if self._before_train: + self._before_train(self.trainer) + + def after_train(self): + if self._after_train: + self._after_train(self.trainer) + # The functions may be closures that hold reference to the trainer + # Therefore, delete them to avoid circular reference. + del self._before_train, self._after_train + del self._before_step, self._after_step + + def before_step(self): + if self._before_step: + self._before_step(self.trainer) + + def after_step(self): + if self._after_step: + self._after_step(self.trainer) + + +class IterationTimer(HookBase): + """ + Track the time spent for each iteration (each run_step call in the trainer). + Print a summary in the end of training. + + This hook uses the time between the call to its :meth:`before_step` + and :meth:`after_step` methods. + Under the convention that :meth:`before_step` of all hooks should only + take negligible amount of time, the :class:`IterationTimer` hook should be + placed at the beginning of the list of hooks to obtain accurate timing. + """ + + def __init__(self, warmup_iter=3): + """ + Args: + warmup_iter (int): the number of iterations at the beginning to exclude + from timing. + """ + self._warmup_iter = warmup_iter + self._step_timer = Timer() + self._start_time = time.perf_counter() + self._total_timer = Timer() + + def before_train(self): + self._start_time = time.perf_counter() + self._total_timer.reset() + self._total_timer.pause() + + def after_train(self): + logger = logging.getLogger(__name__) + total_time = time.perf_counter() - self._start_time + total_time_minus_hooks = self._total_timer.seconds() + hook_time = total_time - total_time_minus_hooks + + num_iter = self.trainer.storage.iter + 1 - self.trainer.start_iter - self._warmup_iter + + if num_iter > 0 and total_time_minus_hooks > 0: + # Speed is meaningful only after warmup + # NOTE this format is parsed by grep in some scripts + logger.info( + "Overall training speed: {} iterations in {} ({:.4f} s / it)".format( + num_iter, + str(datetime.timedelta(seconds=int(total_time_minus_hooks))), + total_time_minus_hooks / num_iter, + ) + ) + + logger.info( + "Total training time: {} ({} on hooks)".format( + str(datetime.timedelta(seconds=int(total_time))), + str(datetime.timedelta(seconds=int(hook_time))), + ) + ) + + def before_step(self): + self._step_timer.reset() + self._total_timer.resume() + + def after_step(self): + # +1 because we're in after_step, the current step is done + # but not yet counted + iter_done = self.trainer.storage.iter - self.trainer.start_iter + 1 + if iter_done >= self._warmup_iter: + sec = self._step_timer.seconds() + self.trainer.storage.put_scalars(time=sec) + else: + self._start_time = time.perf_counter() + self._total_timer.reset() + + self._total_timer.pause() + + +class PeriodicWriter(HookBase): + """ + Write events to EventStorage (by calling ``writer.write()``) periodically. + + It is executed every ``period`` iterations and after the last iteration. + Note that ``period`` does not affect how data is smoothed by each writer. + """ + + def __init__(self, writers, period=20): + """ + Args: + writers (list[EventWriter]): a list of EventWriter objects + period (int): + """ + self._writers = writers + for w in writers: + assert isinstance(w, EventWriter), w + self._period = period + + def after_step(self): + if (self.trainer.iter + 1) % self._period == 0 or ( + self.trainer.iter == self.trainer.max_iter - 1 + ): + for writer in self._writers: + writer.write() + + def after_train(self): + for writer in self._writers: + # If any new data is found (e.g. produced by other after_train), + # write them before closing + writer.write() + writer.close() + + +class PeriodicCheckpointer(_PeriodicCheckpointer, HookBase): + """ + Same as :class:`detectron2.checkpoint.PeriodicCheckpointer`, but as a hook. + + Note that when used as a hook, + it is unable to save additional data other than what's defined + by the given `checkpointer`. + + It is executed every ``period`` iterations and after the last iteration. + """ + + def before_train(self): + self.max_iter = self.trainer.max_iter + + def after_step(self): + # No way to use **kwargs + self.step(self.trainer.iter) + + +class BestCheckpointer(HookBase): + """ + Checkpoints best weights based off given metric. + + This hook should be used in conjunction to and executed after the hook + that produces the metric, e.g. `EvalHook`. + """ + + def __init__( + self, + eval_period: int, + checkpointer: Checkpointer, + val_metric: str, + mode: str = "max", + file_prefix: str = "model_best", + ) -> None: + """ + Args: + eval_period (int): the period `EvalHook` is set to run. + checkpointer: the checkpointer object used to save checkpoints. + val_metric (str): validation metric to track for best checkpoint, e.g. "bbox/AP50" + mode (str): one of {'max', 'min'}. controls whether the chosen val metric should be + maximized or minimized, e.g. for "bbox/AP50" it should be "max" + file_prefix (str): the prefix of checkpoint's filename, defaults to "model_best" + """ + self._logger = logging.getLogger(__name__) + self._period = eval_period + self._val_metric = val_metric + assert mode in [ + "max", + "min", + ], f'Mode "{mode}" to `BestCheckpointer` is unknown. It should be one of {"max", "min"}.' + if mode == "max": + self._compare = operator.gt + else: + self._compare = operator.lt + self._checkpointer = checkpointer + self._file_prefix = file_prefix + self.best_metric = None + self.best_iter = None + + def _update_best(self, val, iteration): + if math.isnan(val) or math.isinf(val): + return False + self.best_metric = val + self.best_iter = iteration + return True + + def _best_checking(self): + metric_tuple = self.trainer.storage.latest().get(self._val_metric) + if metric_tuple is None: + self._logger.warning( + f"Given val metric {self._val_metric} does not seem to be computed/stored." + "Will not be checkpointing based on it." + ) + return + else: + latest_metric, metric_iter = metric_tuple + + if self.best_metric is None: + if self._update_best(latest_metric, metric_iter): + additional_state = {"iteration": metric_iter} + self._checkpointer.save(f"{self._file_prefix}", **additional_state) + self._logger.info( + f"Saved first model at {self.best_metric:0.5f} @ {self.best_iter} steps" + ) + elif self._compare(latest_metric, self.best_metric): + additional_state = {"iteration": metric_iter} + self._checkpointer.save(f"{self._file_prefix}", **additional_state) + self._logger.info( + f"Saved best model as latest eval score for {self._val_metric} is " + f"{latest_metric:0.5f}, better than last best score " + f"{self.best_metric:0.5f} @ iteration {self.best_iter}." + ) + self._update_best(latest_metric, metric_iter) + else: + self._logger.info( + f"Not saving as latest eval score for {self._val_metric} is {latest_metric:0.5f}, " + f"not better than best score {self.best_metric:0.5f} @ iteration {self.best_iter}." + ) + + def after_step(self): + # same conditions as `EvalHook` + next_iter = self.trainer.iter + 1 + if ( + self._period > 0 + and next_iter % self._period == 0 + and next_iter != self.trainer.max_iter + ): + self._best_checking() + + def after_train(self): + # same conditions as `EvalHook` + if self.trainer.iter + 1 >= self.trainer.max_iter: + self._best_checking() + + +class LRScheduler(HookBase): + """ + A hook which executes a torch builtin LR scheduler and summarizes the LR. + It is executed after every iteration. + """ + + def __init__(self, optimizer=None, scheduler=None): + """ + Args: + optimizer (torch.optim.Optimizer): + scheduler (torch.optim.LRScheduler or fvcore.common.param_scheduler.ParamScheduler): + if a :class:`ParamScheduler` object, it defines the multiplier over the base LR + in the optimizer. + + If any argument is not given, will try to obtain it from the trainer. + """ + self._optimizer = optimizer + self._scheduler = scheduler + + def before_train(self): + self._optimizer = self._optimizer or self.trainer.optimizer + if isinstance(self.scheduler, ParamScheduler): + self._scheduler = LRMultiplier( + self._optimizer, + self.scheduler, + self.trainer.max_iter, + last_iter=self.trainer.iter - 1, + ) + self._best_param_group_id = LRScheduler.get_best_param_group_id(self._optimizer) + + @staticmethod + def get_best_param_group_id(optimizer): + # NOTE: some heuristics on what LR to summarize + # summarize the param group with most parameters + largest_group = max(len(g["params"]) for g in optimizer.param_groups) + + if largest_group == 1: + # If all groups have one parameter, + # then find the most common initial LR, and use it for summary + lr_count = Counter([g["lr"] for g in optimizer.param_groups]) + lr = lr_count.most_common()[0][0] + for i, g in enumerate(optimizer.param_groups): + if g["lr"] == lr: + return i + else: + for i, g in enumerate(optimizer.param_groups): + if len(g["params"]) == largest_group: + return i + + def after_step(self): + lr = self._optimizer.param_groups[self._best_param_group_id]["lr"] + self.trainer.storage.put_scalar("lr", lr, smoothing_hint=False) + self.scheduler.step() + + @property + def scheduler(self): + return self._scheduler or self.trainer.scheduler + + def state_dict(self): + if isinstance(self.scheduler, torch.optim.lr_scheduler._LRScheduler): + return self.scheduler.state_dict() + return {} + + def load_state_dict(self, state_dict): + if isinstance(self.scheduler, torch.optim.lr_scheduler._LRScheduler): + logger = logging.getLogger(__name__) + logger.info("Loading scheduler from state_dict ...") + self.scheduler.load_state_dict(state_dict) + + +class TorchProfiler(HookBase): + """ + A hook which runs `torch.profiler.profile`. + + Examples: + :: + hooks.TorchProfiler( + lambda trainer: 10 < trainer.iter < 20, self.cfg.OUTPUT_DIR + ) + + The above example will run the profiler for iteration 10~20 and dump + results to ``OUTPUT_DIR``. We did not profile the first few iterations + because they are typically slower than the rest. + The result files can be loaded in the ``chrome://tracing`` page in chrome browser, + and the tensorboard visualizations can be visualized using + ``tensorboard --logdir OUTPUT_DIR/log`` + """ + + def __init__(self, enable_predicate, output_dir, *, activities=None, save_tensorboard=True): + """ + Args: + enable_predicate (callable[trainer -> bool]): a function which takes a trainer, + and returns whether to enable the profiler. + It will be called once every step, and can be used to select which steps to profile. + output_dir (str): the output directory to dump tracing files. + activities (iterable): same as in `torch.profiler.profile`. + save_tensorboard (bool): whether to save tensorboard visualizations at (output_dir)/log/ + """ + self._enable_predicate = enable_predicate + self._activities = activities + self._output_dir = output_dir + self._save_tensorboard = save_tensorboard + + def before_step(self): + if self._enable_predicate(self.trainer): + if self._save_tensorboard: + on_trace_ready = torch.profiler.tensorboard_trace_handler( + os.path.join( + self._output_dir, + "log", + "profiler-tensorboard-iter{}".format(self.trainer.iter), + ), + f"worker{comm.get_rank()}", + ) + else: + on_trace_ready = None + self._profiler = torch.profiler.profile( + activities=self._activities, + on_trace_ready=on_trace_ready, + record_shapes=True, + profile_memory=True, + with_stack=True, + with_flops=True, + ) + self._profiler.__enter__() + else: + self._profiler = None + + def after_step(self): + if self._profiler is None: + return + self._profiler.__exit__(None, None, None) + if not self._save_tensorboard: + PathManager.mkdirs(self._output_dir) + out_file = os.path.join( + self._output_dir, "profiler-trace-iter{}.json".format(self.trainer.iter) + ) + if "://" not in out_file: + self._profiler.export_chrome_trace(out_file) + else: + # Support non-posix filesystems + with tempfile.TemporaryDirectory(prefix="detectron2_profiler") as d: + tmp_file = os.path.join(d, "tmp.json") + self._profiler.export_chrome_trace(tmp_file) + with open(tmp_file) as f: + content = f.read() + with PathManager.open(out_file, "w") as f: + f.write(content) + + +class AutogradProfiler(TorchProfiler): + """ + A hook which runs `torch.autograd.profiler.profile`. + + Examples: + :: + hooks.AutogradProfiler( + lambda trainer: 10 < trainer.iter < 20, self.cfg.OUTPUT_DIR + ) + + The above example will run the profiler for iteration 10~20 and dump + results to ``OUTPUT_DIR``. We did not profile the first few iterations + because they are typically slower than the rest. + The result files can be loaded in the ``chrome://tracing`` page in chrome browser. + + Note: + When used together with NCCL on older version of GPUs, + autograd profiler may cause deadlock because it unnecessarily allocates + memory on every device it sees. The memory management calls, if + interleaved with NCCL calls, lead to deadlock on GPUs that do not + support ``cudaLaunchCooperativeKernelMultiDevice``. + """ + + def __init__(self, enable_predicate, output_dir, *, use_cuda=True): + """ + Args: + enable_predicate (callable[trainer -> bool]): a function which takes a trainer, + and returns whether to enable the profiler. + It will be called once every step, and can be used to select which steps to profile. + output_dir (str): the output directory to dump tracing files. + use_cuda (bool): same as in `torch.autograd.profiler.profile`. + """ + warnings.warn("AutogradProfiler has been deprecated in favor of TorchProfiler.") + self._enable_predicate = enable_predicate + self._use_cuda = use_cuda + self._output_dir = output_dir + + def before_step(self): + if self._enable_predicate(self.trainer): + self._profiler = torch.autograd.profiler.profile(use_cuda=self._use_cuda) + self._profiler.__enter__() + else: + self._profiler = None + + +class EvalHook(HookBase): + """ + Run an evaluation function periodically, and at the end of training. + + It is executed every ``eval_period`` iterations and after the last iteration. + """ + + def __init__(self, eval_period, eval_function): + """ + Args: + eval_period (int): the period to run `eval_function`. Set to 0 to + not evaluate periodically (but still after the last iteration). + eval_function (callable): a function which takes no arguments, and + returns a nested dict of evaluation metrics. + + Note: + This hook must be enabled in all or none workers. + If you would like only certain workers to perform evaluation, + give other workers a no-op function (`eval_function=lambda: None`). + """ + self._period = eval_period + self._func = eval_function + + def _do_eval(self): + results = self._func() + + if results: + assert isinstance( + results, dict + ), "Eval function must return a dict. Got {} instead.".format(results) + + flattened_results = flatten_results_dict(results) + for k, v in flattened_results.items(): + try: + v = float(v) + except Exception as e: + raise ValueError( + "[EvalHook] eval_function should return a nested dict of float. " + "Got '{}: {}' instead.".format(k, v) + ) from e + self.trainer.storage.put_scalars(**flattened_results, smoothing_hint=False) + + # Evaluation may take different time among workers. + # A barrier make them start the next iteration together. + comm.synchronize() + + def after_step(self): + next_iter = self.trainer.iter + 1 + if self._period > 0 and next_iter % self._period == 0: + # do the last eval in after_train + if next_iter != self.trainer.max_iter: + self._do_eval() + + def after_train(self): + # This condition is to prevent the eval from running after a failed training + if self.trainer.iter + 1 >= self.trainer.max_iter: + self._do_eval() + # func is likely a closure that holds reference to the trainer + # therefore we clean it to avoid circular reference in the end + del self._func + + +class PreciseBN(HookBase): + """ + The standard implementation of BatchNorm uses EMA in inference, which is + sometimes suboptimal. + This class computes the true average of statistics rather than the moving average, + and put true averages to every BN layer in the given model. + + It is executed every ``period`` iterations and after the last iteration. + """ + + def __init__(self, period, model, data_loader, num_iter): + """ + Args: + period (int): the period this hook is run, or 0 to not run during training. + The hook will always run in the end of training. + model (nn.Module): a module whose all BN layers in training mode will be + updated by precise BN. + Note that user is responsible for ensuring the BN layers to be + updated are in training mode when this hook is triggered. + data_loader (iterable): it will produce data to be run by `model(data)`. + num_iter (int): number of iterations used to compute the precise + statistics. + """ + self._logger = logging.getLogger(__name__) + if len(get_bn_modules(model)) == 0: + self._logger.info( + "PreciseBN is disabled because model does not contain BN layers in training mode." + ) + self._disabled = True + return + + self._model = model + self._data_loader = data_loader + self._num_iter = num_iter + self._period = period + self._disabled = False + + self._data_iter = None + + def after_step(self): + next_iter = self.trainer.iter + 1 + is_final = next_iter == self.trainer.max_iter + if is_final or (self._period > 0 and next_iter % self._period == 0): + self.update_stats() + + def update_stats(self): + """ + Update the model with precise statistics. Users can manually call this method. + """ + if self._disabled: + return + + if self._data_iter is None: + self._data_iter = iter(self._data_loader) + + def data_loader(): + for num_iter in itertools.count(1): + if num_iter % 100 == 0: + self._logger.info( + "Running precise-BN ... {}/{} iterations.".format(num_iter, self._num_iter) + ) + # This way we can reuse the same iterator + yield next(self._data_iter) + + with EventStorage(): # capture events in a new storage to discard them + self._logger.info( + "Running precise-BN for {} iterations... ".format(self._num_iter) + + "Note that this could produce different statistics every time." + ) + update_bn_stats(self._model, data_loader(), self._num_iter) + + +class TorchMemoryStats(HookBase): + """ + Writes pytorch's cuda memory statistics periodically. + """ + + def __init__(self, period=20, max_runs=10): + """ + Args: + period (int): Output stats each 'period' iterations + max_runs (int): Stop the logging after 'max_runs' + """ + + self._logger = logging.getLogger(__name__) + self._period = period + self._max_runs = max_runs + self._runs = 0 + + def after_step(self): + if self._runs > self._max_runs: + return + + if (self.trainer.iter + 1) % self._period == 0 or ( + self.trainer.iter == self.trainer.max_iter - 1 + ): + if torch.cuda.is_available(): + max_reserved_mb = torch.cuda.max_memory_reserved() / 1024.0 / 1024.0 + reserved_mb = torch.cuda.memory_reserved() / 1024.0 / 1024.0 + max_allocated_mb = torch.cuda.max_memory_allocated() / 1024.0 / 1024.0 + allocated_mb = torch.cuda.memory_allocated() / 1024.0 / 1024.0 + + self._logger.info( + ( + " iter: {} " + " max_reserved_mem: {:.0f}MB " + " reserved_mem: {:.0f}MB " + " max_allocated_mem: {:.0f}MB " + " allocated_mem: {:.0f}MB " + ).format( + self.trainer.iter, + max_reserved_mb, + reserved_mb, + max_allocated_mb, + allocated_mb, + ) + ) + + self._runs += 1 + if self._runs == self._max_runs: + mem_summary = torch.cuda.memory_summary() + self._logger.info("\n" + mem_summary) + + torch.cuda.reset_peak_memory_stats() diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/engine/launch.py b/motion-gan-pipeline/preprocessing/third/detectron2/engine/launch.py new file mode 100644 index 0000000..46f9869 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/engine/launch.py @@ -0,0 +1,126 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +from datetime import timedelta +import torch +import torch.distributed as dist +import torch.multiprocessing as mp + +from detectron2.utils import comm + +__all__ = ["DEFAULT_TIMEOUT", "launch"] + +DEFAULT_TIMEOUT = timedelta(minutes=30) + + +def _find_free_port(): + import socket + + sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + # Binding to port 0 will cause the OS to find an available port for us + sock.bind(("", 0)) + port = sock.getsockname()[1] + sock.close() + # NOTE: there is still a chance the port could be taken by other processes. + return port + + +def launch( + main_func, + num_gpus_per_machine, + num_machines=1, + machine_rank=0, + dist_url=None, + args=(), + timeout=DEFAULT_TIMEOUT, +): + """ + Launch multi-gpu or distributed training. + This function must be called on all machines involved in the training. + It will spawn child processes (defined by ``num_gpus_per_machine``) on each machine. + + Args: + main_func: a function that will be called by `main_func(*args)` + num_gpus_per_machine (int): number of GPUs per machine + num_machines (int): the total number of machines + machine_rank (int): the rank of this machine + dist_url (str): url to connect to for distributed jobs, including protocol + e.g. "tcp://127.0.0.1:8686". + Can be set to "auto" to automatically select a free port on localhost + timeout (timedelta): timeout of the distributed workers + args (tuple): arguments passed to main_func + """ + world_size = num_machines * num_gpus_per_machine + if world_size > 1: + # https://github.com/pytorch/pytorch/pull/14391 + # TODO prctl in spawned processes + + if dist_url == "auto": + assert num_machines == 1, "dist_url=auto not supported in multi-machine jobs." + port = _find_free_port() + dist_url = f"tcp://127.0.0.1:{port}" + if num_machines > 1 and dist_url.startswith("file://"): + logger = logging.getLogger(__name__) + logger.warning( + "file:// is not a reliable init_method in multi-machine jobs. Prefer tcp://" + ) + + mp.spawn( + _distributed_worker, + nprocs=num_gpus_per_machine, + args=( + main_func, + world_size, + num_gpus_per_machine, + machine_rank, + dist_url, + args, + timeout, + ), + daemon=False, + ) + else: + main_func(*args) + + +def _distributed_worker( + local_rank, + main_func, + world_size, + num_gpus_per_machine, + machine_rank, + dist_url, + args, + timeout=DEFAULT_TIMEOUT, +): + assert torch.cuda.is_available(), "cuda is not available. Please check your installation." + global_rank = machine_rank * num_gpus_per_machine + local_rank + try: + dist.init_process_group( + backend="NCCL", + init_method=dist_url, + world_size=world_size, + rank=global_rank, + timeout=timeout, + ) + except Exception as e: + logger = logging.getLogger(__name__) + logger.error("Process group URL: {}".format(dist_url)) + raise e + + # Setup the local process group (which contains ranks within the same machine) + assert comm._LOCAL_PROCESS_GROUP is None + num_machines = world_size // num_gpus_per_machine + for i in range(num_machines): + ranks_on_i = list(range(i * num_gpus_per_machine, (i + 1) * num_gpus_per_machine)) + pg = dist.new_group(ranks_on_i) + if i == machine_rank: + comm._LOCAL_PROCESS_GROUP = pg + + assert num_gpus_per_machine <= torch.cuda.device_count() + torch.cuda.set_device(local_rank) + + # synchronize is needed here to prevent a possible timeout after calling init_process_group + # See: https://github.com/facebookresearch/maskrcnn-benchmark/issues/172 + comm.synchronize() + + main_func(*args) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/engine/train_loop.py b/motion-gan-pipeline/preprocessing/third/detectron2/engine/train_loop.py new file mode 100644 index 0000000..c4a86b5 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/engine/train_loop.py @@ -0,0 +1,417 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +import logging +import numpy as np +import time +import weakref +from typing import List, Mapping, Optional +import torch +from torch.nn.parallel import DataParallel, DistributedDataParallel + +import detectron2.utils.comm as comm +from detectron2.utils.events import EventStorage, get_event_storage +from detectron2.utils.logger import _log_api_usage + +__all__ = ["HookBase", "TrainerBase", "SimpleTrainer", "AMPTrainer"] + + +class HookBase: + """ + Base class for hooks that can be registered with :class:`TrainerBase`. + + Each hook can implement 4 methods. The way they are called is demonstrated + in the following snippet: + :: + hook.before_train() + for iter in range(start_iter, max_iter): + hook.before_step() + trainer.run_step() + hook.after_step() + iter += 1 + hook.after_train() + + Notes: + 1. In the hook method, users can access ``self.trainer`` to access more + properties about the context (e.g., model, current iteration, or config + if using :class:`DefaultTrainer`). + + 2. A hook that does something in :meth:`before_step` can often be + implemented equivalently in :meth:`after_step`. + If the hook takes non-trivial time, it is strongly recommended to + implement the hook in :meth:`after_step` instead of :meth:`before_step`. + The convention is that :meth:`before_step` should only take negligible time. + + Following this convention will allow hooks that do care about the difference + between :meth:`before_step` and :meth:`after_step` (e.g., timer) to + function properly. + + """ + + trainer: "TrainerBase" = None + """ + A weak reference to the trainer object. Set by the trainer when the hook is registered. + """ + + def before_train(self): + """ + Called before the first iteration. + """ + pass + + def after_train(self): + """ + Called after the last iteration. + """ + pass + + def before_step(self): + """ + Called before each iteration. + """ + pass + + def after_step(self): + """ + Called after each iteration. + """ + pass + + def state_dict(self): + """ + Hooks are stateless by default, but can be made checkpointable by + implementing `state_dict` and `load_state_dict`. + """ + return {} + + +class TrainerBase: + """ + Base class for iterative trainer with hooks. + + The only assumption we made here is: the training runs in a loop. + A subclass can implement what the loop is. + We made no assumptions about the existence of dataloader, optimizer, model, etc. + + Attributes: + iter(int): the current iteration. + + start_iter(int): The iteration to start with. + By convention the minimum possible value is 0. + + max_iter(int): The iteration to end training. + + storage(EventStorage): An EventStorage that's opened during the course of training. + """ + + def __init__(self) -> None: + self._hooks: List[HookBase] = [] + self.iter: int = 0 + self.start_iter: int = 0 + self.max_iter: int + self.storage: EventStorage + _log_api_usage("trainer." + self.__class__.__name__) + + def register_hooks(self, hooks: List[Optional[HookBase]]) -> None: + """ + Register hooks to the trainer. The hooks are executed in the order + they are registered. + + Args: + hooks (list[Optional[HookBase]]): list of hooks + """ + hooks = [h for h in hooks if h is not None] + for h in hooks: + assert isinstance(h, HookBase) + # To avoid circular reference, hooks and trainer cannot own each other. + # This normally does not matter, but will cause memory leak if the + # involved objects contain __del__: + # See http://engineering.hearsaysocial.com/2013/06/16/circular-references-in-python/ + h.trainer = weakref.proxy(self) + self._hooks.extend(hooks) + + def train(self, start_iter: int, max_iter: int): + """ + Args: + start_iter, max_iter (int): See docs above + """ + logger = logging.getLogger(__name__) + logger.info("Starting training from iteration {}".format(start_iter)) + + self.iter = self.start_iter = start_iter + self.max_iter = max_iter + + with EventStorage(start_iter) as self.storage: + try: + self.before_train() + for self.iter in range(start_iter, max_iter): + self.before_step() + self.run_step() + self.after_step() + # self.iter == max_iter can be used by `after_train` to + # tell whether the training successfully finished or failed + # due to exceptions. + self.iter += 1 + except Exception: + logger.exception("Exception during training:") + raise + finally: + self.after_train() + + def before_train(self): + for h in self._hooks: + h.before_train() + + def after_train(self): + self.storage.iter = self.iter + for h in self._hooks: + h.after_train() + + def before_step(self): + # Maintain the invariant that storage.iter == trainer.iter + # for the entire execution of each step + self.storage.iter = self.iter + + for h in self._hooks: + h.before_step() + + def after_step(self): + for h in self._hooks: + h.after_step() + + def run_step(self): + raise NotImplementedError + + def state_dict(self): + ret = {"iteration": self.iter} + hooks_state = {} + for h in self._hooks: + sd = h.state_dict() + if sd: + name = type(h).__qualname__ + if name in hooks_state: + # TODO handle repetitive stateful hooks + continue + hooks_state[name] = sd + if hooks_state: + ret["hooks"] = hooks_state + return ret + + def load_state_dict(self, state_dict): + logger = logging.getLogger(__name__) + self.iter = state_dict["iteration"] + for key, value in state_dict.get("hooks", {}).items(): + for h in self._hooks: + try: + name = type(h).__qualname__ + except AttributeError: + continue + if name == key: + h.load_state_dict(value) + break + else: + logger.warning(f"Cannot find the hook '{key}', its state_dict is ignored.") + + +class SimpleTrainer(TrainerBase): + """ + A simple trainer for the most common type of task: + single-cost single-optimizer single-data-source iterative optimization, + optionally using data-parallelism. + It assumes that every step, you: + + 1. Compute the loss with a data from the data_loader. + 2. Compute the gradients with the above loss. + 3. Update the model with the optimizer. + + All other tasks during training (checkpointing, logging, evaluation, LR schedule) + are maintained by hooks, which can be registered by :meth:`TrainerBase.register_hooks`. + + If you want to do anything fancier than this, + either subclass TrainerBase and implement your own `run_step`, + or write your own training loop. + """ + + def __init__(self, model, data_loader, optimizer): + """ + Args: + model: a torch Module. Takes a data from data_loader and returns a + dict of losses. + data_loader: an iterable. Contains data to be used to call model. + optimizer: a torch optimizer. + """ + super().__init__() + + """ + We set the model to training mode in the trainer. + However it's valid to train a model that's in eval mode. + If you want your model (or a submodule of it) to behave + like evaluation during training, you can overwrite its train() method. + """ + model.train() + + self.model = model + self.data_loader = data_loader + self._data_loader_iter = iter(data_loader) + self.optimizer = optimizer + + def run_step(self): + """ + Implement the standard training logic described above. + """ + assert self.model.training, "[SimpleTrainer] model was changed to eval mode!" + start = time.perf_counter() + """ + If you want to do something with the data, you can wrap the dataloader. + """ + data = next(self._data_loader_iter) + data_time = time.perf_counter() - start + + """ + If you want to do something with the losses, you can wrap the model. + """ + loss_dict = self.model(data) + if isinstance(loss_dict, torch.Tensor): + losses = loss_dict + loss_dict = {"total_loss": loss_dict} + else: + losses = sum(loss_dict.values()) + + """ + If you need to accumulate gradients or do something similar, you can + wrap the optimizer with your custom `zero_grad()` method. + """ + self.optimizer.zero_grad() + losses.backward() + + self._write_metrics(loss_dict, data_time) + + """ + If you need gradient clipping/scaling or other processing, you can + wrap the optimizer with your custom `step()` method. But it is + suboptimal as explained in https://arxiv.org/abs/2006.15704 Sec 3.2.4 + """ + self.optimizer.step() + + def _write_metrics( + self, + loss_dict: Mapping[str, torch.Tensor], + data_time: float, + prefix: str = "", + ) -> None: + SimpleTrainer.write_metrics(loss_dict, data_time, prefix) + + @staticmethod + def write_metrics( + loss_dict: Mapping[str, torch.Tensor], + data_time: float, + prefix: str = "", + ) -> None: + """ + Args: + loss_dict (dict): dict of scalar losses + data_time (float): time taken by the dataloader iteration + prefix (str): prefix for logging keys + """ + metrics_dict = {k: v.detach().cpu().item() for k, v in loss_dict.items()} + metrics_dict["data_time"] = data_time + + # Gather metrics among all workers for logging + # This assumes we do DDP-style training, which is currently the only + # supported method in detectron2. + all_metrics_dict = comm.gather(metrics_dict) + + if comm.is_main_process(): + storage = get_event_storage() + + # data_time among workers can have high variance. The actual latency + # caused by data_time is the maximum among workers. + data_time = np.max([x.pop("data_time") for x in all_metrics_dict]) + storage.put_scalar("data_time", data_time) + + # average the rest metrics + metrics_dict = { + k: np.mean([x[k] for x in all_metrics_dict]) for k in all_metrics_dict[0].keys() + } + total_losses_reduced = sum(metrics_dict.values()) + if not np.isfinite(total_losses_reduced): + raise FloatingPointError( + f"Loss became infinite or NaN at iteration={storage.iter}!\n" + f"loss_dict = {metrics_dict}" + ) + + storage.put_scalar("{}total_loss".format(prefix), total_losses_reduced) + if len(metrics_dict) > 1: + storage.put_scalars(**metrics_dict) + + def state_dict(self): + ret = super().state_dict() + ret["optimizer"] = self.optimizer.state_dict() + return ret + + def load_state_dict(self, state_dict): + super().load_state_dict(state_dict) + self.optimizer.load_state_dict(state_dict["optimizer"]) + + +class AMPTrainer(SimpleTrainer): + """ + Like :class:`SimpleTrainer`, but uses PyTorch's native automatic mixed precision + in the training loop. + """ + + def __init__(self, model, data_loader, optimizer, grad_scaler=None): + """ + Args: + model, data_loader, optimizer: same as in :class:`SimpleTrainer`. + grad_scaler: torch GradScaler to automatically scale gradients. + """ + unsupported = "AMPTrainer does not support single-process multi-device training!" + if isinstance(model, DistributedDataParallel): + assert not (model.device_ids and len(model.device_ids) > 1), unsupported + assert not isinstance(model, DataParallel), unsupported + + super().__init__(model, data_loader, optimizer) + + if grad_scaler is None: + from torch.cuda.amp import GradScaler + + grad_scaler = GradScaler() + self.grad_scaler = grad_scaler + + def run_step(self): + """ + Implement the AMP training logic. + """ + assert self.model.training, "[AMPTrainer] model was changed to eval mode!" + assert torch.cuda.is_available(), "[AMPTrainer] CUDA is required for AMP training!" + from torch.cuda.amp import autocast + + start = time.perf_counter() + data = next(self._data_loader_iter) + data_time = time.perf_counter() - start + + with autocast(): + loss_dict = self.model(data) + if isinstance(loss_dict, torch.Tensor): + losses = loss_dict + loss_dict = {"total_loss": loss_dict} + else: + losses = sum(loss_dict.values()) + + self.optimizer.zero_grad() + self.grad_scaler.scale(losses).backward() + + self._write_metrics(loss_dict, data_time) + + self.grad_scaler.step(self.optimizer) + self.grad_scaler.update() + + def state_dict(self): + ret = super().state_dict() + ret["grad_scaler"] = self.grad_scaler.state_dict() + return ret + + def load_state_dict(self, state_dict): + super().load_state_dict(state_dict) + self.grad_scaler.load_state_dict(state_dict["grad_scaler"]) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/__init__.py new file mode 100644 index 0000000..d96609e --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/__init__.py @@ -0,0 +1,12 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .cityscapes_evaluation import CityscapesInstanceEvaluator, CityscapesSemSegEvaluator +from .coco_evaluation import COCOEvaluator +from .rotated_coco_evaluation import RotatedCOCOEvaluator +from .evaluator import DatasetEvaluator, DatasetEvaluators, inference_context, inference_on_dataset +from .lvis_evaluation import LVISEvaluator +from .panoptic_evaluation import COCOPanopticEvaluator +from .pascal_voc_evaluation import PascalVOCDetectionEvaluator +from .sem_seg_evaluation import SemSegEvaluator +from .testing import print_csv_format, verify_results + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/cityscapes_evaluation.py b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/cityscapes_evaluation.py new file mode 100644 index 0000000..9cc7888 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/cityscapes_evaluation.py @@ -0,0 +1,197 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import glob +import logging +import numpy as np +import os +import tempfile +from collections import OrderedDict +import torch +from PIL import Image + +from detectron2.data import MetadataCatalog +from detectron2.utils import comm +from detectron2.utils.file_io import PathManager + +from .evaluator import DatasetEvaluator + + +class CityscapesEvaluator(DatasetEvaluator): + """ + Base class for evaluation using cityscapes API. + """ + + def __init__(self, dataset_name): + """ + Args: + dataset_name (str): the name of the dataset. + It must have the following metadata associated with it: + "thing_classes", "gt_dir". + """ + self._metadata = MetadataCatalog.get(dataset_name) + self._cpu_device = torch.device("cpu") + self._logger = logging.getLogger(__name__) + + def reset(self): + self._working_dir = tempfile.TemporaryDirectory(prefix="cityscapes_eval_") + self._temp_dir = self._working_dir.name + # All workers will write to the same results directory + # TODO this does not work in distributed training + assert ( + comm.get_local_size() == comm.get_world_size() + ), "CityscapesEvaluator currently do not work with multiple machines." + self._temp_dir = comm.all_gather(self._temp_dir)[0] + if self._temp_dir != self._working_dir.name: + self._working_dir.cleanup() + self._logger.info( + "Writing cityscapes results to temporary directory {} ...".format(self._temp_dir) + ) + + +class CityscapesInstanceEvaluator(CityscapesEvaluator): + """ + Evaluate instance segmentation results on cityscapes dataset using cityscapes API. + + Note: + * It does not work in multi-machine distributed training. + * It contains a synchronization, therefore has to be used on all ranks. + * Only the main process runs evaluation. + """ + + def process(self, inputs, outputs): + from cityscapesscripts.helpers.labels import name2label + + for input, output in zip(inputs, outputs): + file_name = input["file_name"] + basename = os.path.splitext(os.path.basename(file_name))[0] + pred_txt = os.path.join(self._temp_dir, basename + "_pred.txt") + + if "instances" in output: + output = output["instances"].to(self._cpu_device) + num_instances = len(output) + with open(pred_txt, "w") as fout: + for i in range(num_instances): + pred_class = output.pred_classes[i] + classes = self._metadata.thing_classes[pred_class] + class_id = name2label[classes].id + score = output.scores[i] + mask = output.pred_masks[i].numpy().astype("uint8") + png_filename = os.path.join( + self._temp_dir, basename + "_{}_{}.png".format(i, classes) + ) + + Image.fromarray(mask * 255).save(png_filename) + fout.write( + "{} {} {}\n".format(os.path.basename(png_filename), class_id, score) + ) + else: + # Cityscapes requires a prediction file for every ground truth image. + with open(pred_txt, "w") as fout: + pass + + def evaluate(self): + """ + Returns: + dict: has a key "segm", whose value is a dict of "AP" and "AP50". + """ + comm.synchronize() + if comm.get_rank() > 0: + return + import cityscapesscripts.evaluation.evalInstanceLevelSemanticLabeling as cityscapes_eval + + self._logger.info("Evaluating results under {} ...".format(self._temp_dir)) + + # set some global states in cityscapes evaluation API, before evaluating + cityscapes_eval.args.predictionPath = os.path.abspath(self._temp_dir) + cityscapes_eval.args.predictionWalk = None + cityscapes_eval.args.JSONOutput = False + cityscapes_eval.args.colorized = False + cityscapes_eval.args.gtInstancesFile = os.path.join(self._temp_dir, "gtInstances.json") + + # These lines are adopted from + # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalInstanceLevelSemanticLabeling.py # noqa + gt_dir = PathManager.get_local_path(self._metadata.gt_dir) + groundTruthImgList = glob.glob(os.path.join(gt_dir, "*", "*_gtFine_instanceIds.png")) + assert len( + groundTruthImgList + ), "Cannot find any ground truth images to use for evaluation. Searched for: {}".format( + cityscapes_eval.args.groundTruthSearch + ) + predictionImgList = [] + for gt in groundTruthImgList: + predictionImgList.append(cityscapes_eval.getPrediction(gt, cityscapes_eval.args)) + results = cityscapes_eval.evaluateImgLists( + predictionImgList, groundTruthImgList, cityscapes_eval.args + )["averages"] + + ret = OrderedDict() + ret["segm"] = {"AP": results["allAp"] * 100, "AP50": results["allAp50%"] * 100} + self._working_dir.cleanup() + return ret + + +class CityscapesSemSegEvaluator(CityscapesEvaluator): + """ + Evaluate semantic segmentation results on cityscapes dataset using cityscapes API. + + Note: + * It does not work in multi-machine distributed training. + * It contains a synchronization, therefore has to be used on all ranks. + * Only the main process runs evaluation. + """ + + def process(self, inputs, outputs): + from cityscapesscripts.helpers.labels import trainId2label + + for input, output in zip(inputs, outputs): + file_name = input["file_name"] + basename = os.path.splitext(os.path.basename(file_name))[0] + pred_filename = os.path.join(self._temp_dir, basename + "_pred.png") + + output = output["sem_seg"].argmax(dim=0).to(self._cpu_device).numpy() + pred = 255 * np.ones(output.shape, dtype=np.uint8) + for train_id, label in trainId2label.items(): + if label.ignoreInEval: + continue + pred[output == train_id] = label.id + Image.fromarray(pred).save(pred_filename) + + def evaluate(self): + comm.synchronize() + if comm.get_rank() > 0: + return + # Load the Cityscapes eval script *after* setting the required env var, + # since the script reads CITYSCAPES_DATASET into global variables at load time. + import cityscapesscripts.evaluation.evalPixelLevelSemanticLabeling as cityscapes_eval + + self._logger.info("Evaluating results under {} ...".format(self._temp_dir)) + + # set some global states in cityscapes evaluation API, before evaluating + cityscapes_eval.args.predictionPath = os.path.abspath(self._temp_dir) + cityscapes_eval.args.predictionWalk = None + cityscapes_eval.args.JSONOutput = False + cityscapes_eval.args.colorized = False + + # These lines are adopted from + # https://github.com/mcordts/cityscapesScripts/blob/master/cityscapesscripts/evaluation/evalPixelLevelSemanticLabeling.py # noqa + gt_dir = PathManager.get_local_path(self._metadata.gt_dir) + groundTruthImgList = glob.glob(os.path.join(gt_dir, "*", "*_gtFine_labelIds.png")) + assert len( + groundTruthImgList + ), "Cannot find any ground truth images to use for evaluation. Searched for: {}".format( + cityscapes_eval.args.groundTruthSearch + ) + predictionImgList = [] + for gt in groundTruthImgList: + predictionImgList.append(cityscapes_eval.getPrediction(cityscapes_eval.args, gt)) + results = cityscapes_eval.evaluateImgLists( + predictionImgList, groundTruthImgList, cityscapes_eval.args + ) + ret = OrderedDict() + ret["sem_seg"] = { + "IoU": 100.0 * results["averageScoreClasses"], + "iIoU": 100.0 * results["averageScoreInstClasses"], + "IoU_sup": 100.0 * results["averageScoreCategories"], + "iIoU_sup": 100.0 * results["averageScoreInstCategories"], + } + self._working_dir.cleanup() + return ret diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/coco_evaluation.py b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/coco_evaluation.py new file mode 100644 index 0000000..e3884d2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/coco_evaluation.py @@ -0,0 +1,722 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import contextlib +import copy +import io +import itertools +import json +import logging +import numpy as np +import os +import pickle +from collections import OrderedDict +import pycocotools.mask as mask_util +import torch +from pycocotools.coco import COCO +from pycocotools.cocoeval import COCOeval +from tabulate import tabulate + +import detectron2.utils.comm as comm +from detectron2.config import CfgNode +from detectron2.data import MetadataCatalog +from detectron2.data.datasets.coco import convert_to_coco_json +from detectron2.structures import Boxes, BoxMode, pairwise_iou +from detectron2.utils.file_io import PathManager +from detectron2.utils.logger import create_small_table + +from .evaluator import DatasetEvaluator + +try: + from detectron2.evaluation.fast_eval_api import COCOeval_opt +except ImportError: + COCOeval_opt = COCOeval + + +class COCOEvaluator(DatasetEvaluator): + """ + Evaluate AR for object proposals, AP for instance detection/segmentation, AP + for keypoint detection outputs using COCO's metrics. + See http://cocodataset.org/#detection-eval and + http://cocodataset.org/#keypoints-eval to understand its metrics. + The metrics range from 0 to 100 (instead of 0 to 1), where a -1 or NaN means + the metric cannot be computed (e.g. due to no predictions made). + + In addition to COCO, this evaluator is able to support any bounding box detection, + instance segmentation, or keypoint detection dataset. + """ + + def __init__( + self, + dataset_name, + tasks=None, + distributed=True, + output_dir=None, + *, + max_dets_per_image=None, + use_fast_impl=True, + kpt_oks_sigmas=(), + allow_cached_coco=True, + ): + """ + Args: + dataset_name (str): name of the dataset to be evaluated. + It must have either the following corresponding metadata: + + "json_file": the path to the COCO format annotation + + Or it must be in detectron2's standard dataset format + so it can be converted to COCO format automatically. + tasks (tuple[str]): tasks that can be evaluated under the given + configuration. A task is one of "bbox", "segm", "keypoints". + By default, will infer this automatically from predictions. + distributed (True): if True, will collect results from all ranks and run evaluation + in the main process. + Otherwise, will only evaluate the results in the current process. + output_dir (str): optional, an output directory to dump all + results predicted on the dataset. The dump contains two files: + + 1. "instances_predictions.pth" a file that can be loaded with `torch.load` and + contains all the results in the format they are produced by the model. + 2. "coco_instances_results.json" a json file in COCO's result format. + max_dets_per_image (int): limit on the maximum number of detections per image. + By default in COCO, this limit is to 100, but this can be customized + to be greater, as is needed in evaluation metrics AP fixed and AP pool + (see https://arxiv.org/pdf/2102.01066.pdf) + This doesn't affect keypoint evaluation. + use_fast_impl (bool): use a fast but **unofficial** implementation to compute AP. + Although the results should be very close to the official implementation in COCO + API, it is still recommended to compute results with the official API for use in + papers. The faster implementation also uses more RAM. + kpt_oks_sigmas (list[float]): The sigmas used to calculate keypoint OKS. + See http://cocodataset.org/#keypoints-eval + When empty, it will use the defaults in COCO. + Otherwise it should be the same length as ROI_KEYPOINT_HEAD.NUM_KEYPOINTS. + allow_cached_coco (bool): Whether to use cached coco json from previous validation + runs. You should set this to False if you need to use different validation data. + Defaults to True. + """ + self._logger = logging.getLogger(__name__) + self._distributed = distributed + self._output_dir = output_dir + + if use_fast_impl and (COCOeval_opt is COCOeval): + self._logger.info("Fast COCO eval is not built. Falling back to official COCO eval.") + use_fast_impl = False + self._use_fast_impl = use_fast_impl + + # COCOeval requires the limit on the number of detections per image (maxDets) to be a list + # with at least 3 elements. The default maxDets in COCOeval is [1, 10, 100], in which the + # 3rd element (100) is used as the limit on the number of detections per image when + # evaluating AP. COCOEvaluator expects an integer for max_dets_per_image, so for COCOeval, + # we reformat max_dets_per_image into [1, 10, max_dets_per_image], based on the defaults. + if max_dets_per_image is None: + max_dets_per_image = [1, 10, 100] + else: + max_dets_per_image = [1, 10, max_dets_per_image] + self._max_dets_per_image = max_dets_per_image + + if tasks is not None and isinstance(tasks, CfgNode): + kpt_oks_sigmas = ( + tasks.TEST.KEYPOINT_OKS_SIGMAS if not kpt_oks_sigmas else kpt_oks_sigmas + ) + self._logger.warn( + "COCO Evaluator instantiated using config, this is deprecated behavior." + " Please pass in explicit arguments instead." + ) + self._tasks = None # Infering it from predictions should be better + else: + self._tasks = tasks + + self._cpu_device = torch.device("cpu") + + self._metadata = MetadataCatalog.get(dataset_name) + if not hasattr(self._metadata, "json_file"): + if output_dir is None: + raise ValueError( + "output_dir must be provided to COCOEvaluator " + "for datasets not in COCO format." + ) + self._logger.info(f"Trying to convert '{dataset_name}' to COCO format ...") + + cache_path = os.path.join(output_dir, f"{dataset_name}_coco_format.json") + self._metadata.json_file = cache_path + convert_to_coco_json(dataset_name, cache_path, allow_cached=allow_cached_coco) + + json_file = PathManager.get_local_path(self._metadata.json_file) + with contextlib.redirect_stdout(io.StringIO()): + self._coco_api = COCO(json_file) + + # Test set json files do not contain annotations (evaluation must be + # performed using the COCO evaluation server). + self._do_evaluation = "annotations" in self._coco_api.dataset + if self._do_evaluation: + self._kpt_oks_sigmas = kpt_oks_sigmas + + def reset(self): + self._predictions = [] + + def process(self, inputs, outputs): + """ + Args: + inputs: the inputs to a COCO model (e.g., GeneralizedRCNN). + It is a list of dict. Each dict corresponds to an image and + contains keys like "height", "width", "file_name", "image_id". + outputs: the outputs of a COCO model. It is a list of dicts with key + "instances" that contains :class:`Instances`. + """ + for input, output in zip(inputs, outputs): + prediction = {"image_id": input["image_id"]} + + if "instances" in output: + instances = output["instances"].to(self._cpu_device) + prediction["instances"] = instances_to_coco_json(instances, input["image_id"]) + if "proposals" in output: + prediction["proposals"] = output["proposals"].to(self._cpu_device) + if len(prediction) > 1: + self._predictions.append(prediction) + + def evaluate(self, img_ids=None): + """ + Args: + img_ids: a list of image IDs to evaluate on. Default to None for the whole dataset + """ + if self._distributed: + comm.synchronize() + predictions = comm.gather(self._predictions, dst=0) + predictions = list(itertools.chain(*predictions)) + + if not comm.is_main_process(): + return {} + else: + predictions = self._predictions + + if len(predictions) == 0: + self._logger.warning("[COCOEvaluator] Did not receive valid predictions.") + return {} + + if self._output_dir: + PathManager.mkdirs(self._output_dir) + file_path = os.path.join(self._output_dir, "instances_predictions.pth") + with PathManager.open(file_path, "wb") as f: + torch.save(predictions, f) + + self._results = OrderedDict() + if "proposals" in predictions[0]: + self._eval_box_proposals(predictions) + if "instances" in predictions[0]: + self._eval_predictions(predictions, img_ids=img_ids) + # Copy so the caller can do whatever with results + return copy.deepcopy(self._results) + + def _tasks_from_predictions(self, predictions): + """ + Get COCO API "tasks" (i.e. iou_type) from COCO-format predictions. + """ + tasks = {"bbox"} + for pred in predictions: + if "segmentation" in pred: + tasks.add("segm") + if "keypoints" in pred: + tasks.add("keypoints") + return sorted(tasks) + + def _eval_predictions(self, predictions, img_ids=None): + """ + Evaluate predictions. Fill self._results with the metrics of the tasks. + """ + self._logger.info("Preparing results for COCO format ...") + coco_results = list(itertools.chain(*[x["instances"] for x in predictions])) + tasks = self._tasks or self._tasks_from_predictions(coco_results) + + # unmap the category ids for COCO + if hasattr(self._metadata, "thing_dataset_id_to_contiguous_id"): + dataset_id_to_contiguous_id = self._metadata.thing_dataset_id_to_contiguous_id + all_contiguous_ids = list(dataset_id_to_contiguous_id.values()) + num_classes = len(all_contiguous_ids) + assert min(all_contiguous_ids) == 0 and max(all_contiguous_ids) == num_classes - 1 + + reverse_id_mapping = {v: k for k, v in dataset_id_to_contiguous_id.items()} + for result in coco_results: + category_id = result["category_id"] + assert category_id < num_classes, ( + f"A prediction has class={category_id}, " + f"but the dataset only has {num_classes} classes and " + f"predicted class id should be in [0, {num_classes - 1}]." + ) + result["category_id"] = reverse_id_mapping[category_id] + + if self._output_dir: + file_path = os.path.join(self._output_dir, "coco_instances_results.json") + self._logger.info("Saving results to {}".format(file_path)) + with PathManager.open(file_path, "w") as f: + f.write(json.dumps(coco_results)) + f.flush() + + if not self._do_evaluation: + self._logger.info("Annotations are not available for evaluation.") + return + + self._logger.info( + "Evaluating predictions with {} COCO API...".format( + "unofficial" if self._use_fast_impl else "official" + ) + ) + for task in sorted(tasks): + assert task in {"bbox", "segm", "keypoints"}, f"Got unknown task: {task}!" + coco_eval = ( + _evaluate_predictions_on_coco( + self._coco_api, + coco_results, + task, + kpt_oks_sigmas=self._kpt_oks_sigmas, + use_fast_impl=self._use_fast_impl, + img_ids=img_ids, + max_dets_per_image=self._max_dets_per_image, + ) + if len(coco_results) > 0 + else None # cocoapi does not handle empty results very well + ) + + res = self._derive_coco_results( + coco_eval, task, class_names=self._metadata.get("thing_classes") + ) + self._results[task] = res + + def _eval_box_proposals(self, predictions): + """ + Evaluate the box proposals in predictions. + Fill self._results with the metrics for "box_proposals" task. + """ + if self._output_dir: + # Saving generated box proposals to file. + # Predicted box_proposals are in XYXY_ABS mode. + bbox_mode = BoxMode.XYXY_ABS.value + ids, boxes, objectness_logits = [], [], [] + for prediction in predictions: + ids.append(prediction["image_id"]) + boxes.append(prediction["proposals"].proposal_boxes.tensor.numpy()) + objectness_logits.append(prediction["proposals"].objectness_logits.numpy()) + + proposal_data = { + "boxes": boxes, + "objectness_logits": objectness_logits, + "ids": ids, + "bbox_mode": bbox_mode, + } + with PathManager.open(os.path.join(self._output_dir, "box_proposals.pkl"), "wb") as f: + pickle.dump(proposal_data, f) + + if not self._do_evaluation: + self._logger.info("Annotations are not available for evaluation.") + return + + self._logger.info("Evaluating bbox proposals ...") + res = {} + areas = {"all": "", "small": "s", "medium": "m", "large": "l"} + for limit in [100, 1000]: + for area, suffix in areas.items(): + stats = _evaluate_box_proposals(predictions, self._coco_api, area=area, limit=limit) + key = "AR{}@{:d}".format(suffix, limit) + res[key] = float(stats["ar"].item() * 100) + self._logger.info("Proposal metrics: \n" + create_small_table(res)) + self._results["box_proposals"] = res + + def _derive_coco_results(self, coco_eval, iou_type, class_names=None): + """ + Derive the desired score numbers from summarized COCOeval. + + Args: + coco_eval (None or COCOEval): None represents no predictions from model. + iou_type (str): + class_names (None or list[str]): if provided, will use it to predict + per-category AP. + + Returns: + a dict of {metric name: score} + """ + + metrics = { + "bbox": ["AP", "AP50", "AP75", "APs", "APm", "APl"], + "segm": ["AP", "AP50", "AP75", "APs", "APm", "APl"], + "keypoints": ["AP", "AP50", "AP75", "APm", "APl"], + }[iou_type] + + if coco_eval is None: + self._logger.warn("No predictions from the model!") + return {metric: float("nan") for metric in metrics} + + # the standard metrics + results = { + metric: float(coco_eval.stats[idx] * 100 if coco_eval.stats[idx] >= 0 else "nan") + for idx, metric in enumerate(metrics) + } + self._logger.info( + "Evaluation results for {}: \n".format(iou_type) + create_small_table(results) + ) + if not np.isfinite(sum(results.values())): + self._logger.info("Some metrics cannot be computed and is shown as NaN.") + + if class_names is None or len(class_names) <= 1: + return results + # Compute per-category AP + # from https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L222-L252 # noqa + precisions = coco_eval.eval["precision"] + # precision has dims (iou, recall, cls, area range, max dets) + assert len(class_names) == precisions.shape[2] + + results_per_category = [] + for idx, name in enumerate(class_names): + # area range index 0: all area ranges + # max dets index -1: typically 100 per image + precision = precisions[:, :, idx, 0, -1] + precision = precision[precision > -1] + ap = np.mean(precision) if precision.size else float("nan") + results_per_category.append(("{}".format(name), float(ap * 100))) + + # tabulate it + N_COLS = min(6, len(results_per_category) * 2) + results_flatten = list(itertools.chain(*results_per_category)) + results_2d = itertools.zip_longest(*[results_flatten[i::N_COLS] for i in range(N_COLS)]) + table = tabulate( + results_2d, + tablefmt="pipe", + floatfmt=".3f", + headers=["category", "AP"] * (N_COLS // 2), + numalign="left", + ) + self._logger.info("Per-category {} AP: \n".format(iou_type) + table) + + results.update({"AP-" + name: ap for name, ap in results_per_category}) + return results + + +def instances_to_coco_json(instances, img_id): + """ + Dump an "Instances" object to a COCO-format json that's used for evaluation. + + Args: + instances (Instances): + img_id (int): the image id + + Returns: + list[dict]: list of json annotations in COCO format. + """ + num_instance = len(instances) + if num_instance == 0: + return [] + + boxes = instances.pred_boxes.tensor.numpy() + boxes = BoxMode.convert(boxes, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS) + boxes = boxes.tolist() + scores = instances.scores.tolist() + classes = instances.pred_classes.tolist() + + has_mask = instances.has("pred_masks") + if has_mask: + # use RLE to encode the masks, because they are too large and takes memory + # since this evaluator stores outputs of the entire dataset + rles = [ + mask_util.encode(np.array(mask[:, :, None], order="F", dtype="uint8"))[0] + for mask in instances.pred_masks + ] + for rle in rles: + # "counts" is an array encoded by mask_util as a byte-stream. Python3's + # json writer which always produces strings cannot serialize a bytestream + # unless you decode it. Thankfully, utf-8 works out (which is also what + # the pycocotools/_mask.pyx does). + rle["counts"] = rle["counts"].decode("utf-8") + + has_keypoints = instances.has("pred_keypoints") + if has_keypoints: + keypoints = instances.pred_keypoints + + results = [] + for k in range(num_instance): + result = { + "image_id": img_id, + "category_id": classes[k], + "bbox": boxes[k], + "score": scores[k], + } + if has_mask: + result["segmentation"] = rles[k] + if has_keypoints: + # In COCO annotations, + # keypoints coordinates are pixel indices. + # However our predictions are floating point coordinates. + # Therefore we subtract 0.5 to be consistent with the annotation format. + # This is the inverse of data loading logic in `datasets/coco.py`. + keypoints[k][:, :2] -= 0.5 + result["keypoints"] = keypoints[k].flatten().tolist() + results.append(result) + return results + + +# inspired from Detectron: +# https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L255 # noqa +def _evaluate_box_proposals(dataset_predictions, coco_api, thresholds=None, area="all", limit=None): + """ + Evaluate detection proposal recall metrics. This function is a much + faster alternative to the official COCO API recall evaluation code. However, + it produces slightly different results. + """ + # Record max overlap value for each gt box + # Return vector of overlap values + areas = { + "all": 0, + "small": 1, + "medium": 2, + "large": 3, + "96-128": 4, + "128-256": 5, + "256-512": 6, + "512-inf": 7, + } + area_ranges = [ + [0 ** 2, 1e5 ** 2], # all + [0 ** 2, 32 ** 2], # small + [32 ** 2, 96 ** 2], # medium + [96 ** 2, 1e5 ** 2], # large + [96 ** 2, 128 ** 2], # 96-128 + [128 ** 2, 256 ** 2], # 128-256 + [256 ** 2, 512 ** 2], # 256-512 + [512 ** 2, 1e5 ** 2], + ] # 512-inf + assert area in areas, "Unknown area range: {}".format(area) + area_range = area_ranges[areas[area]] + gt_overlaps = [] + num_pos = 0 + + for prediction_dict in dataset_predictions: + predictions = prediction_dict["proposals"] + + # sort predictions in descending order + # TODO maybe remove this and make it explicit in the documentation + inds = predictions.objectness_logits.sort(descending=True)[1] + predictions = predictions[inds] + + ann_ids = coco_api.getAnnIds(imgIds=prediction_dict["image_id"]) + anno = coco_api.loadAnns(ann_ids) + gt_boxes = [ + BoxMode.convert(obj["bbox"], BoxMode.XYWH_ABS, BoxMode.XYXY_ABS) + for obj in anno + if obj["iscrowd"] == 0 + ] + gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4) # guard against no boxes + gt_boxes = Boxes(gt_boxes) + gt_areas = torch.as_tensor([obj["area"] for obj in anno if obj["iscrowd"] == 0]) + + if len(gt_boxes) == 0 or len(predictions) == 0: + continue + + valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1]) + gt_boxes = gt_boxes[valid_gt_inds] + + num_pos += len(gt_boxes) + + if len(gt_boxes) == 0: + continue + + if limit is not None and len(predictions) > limit: + predictions = predictions[:limit] + + overlaps = pairwise_iou(predictions.proposal_boxes, gt_boxes) + + _gt_overlaps = torch.zeros(len(gt_boxes)) + for j in range(min(len(predictions), len(gt_boxes))): + # find which proposal box maximally covers each gt box + # and get the iou amount of coverage for each gt box + max_overlaps, argmax_overlaps = overlaps.max(dim=0) + + # find which gt box is 'best' covered (i.e. 'best' = most iou) + gt_ovr, gt_ind = max_overlaps.max(dim=0) + assert gt_ovr >= 0 + # find the proposal box that covers the best covered gt box + box_ind = argmax_overlaps[gt_ind] + # record the iou coverage of this gt box + _gt_overlaps[j] = overlaps[box_ind, gt_ind] + assert _gt_overlaps[j] == gt_ovr + # mark the proposal box and the gt box as used + overlaps[box_ind, :] = -1 + overlaps[:, gt_ind] = -1 + + # append recorded iou coverage level + gt_overlaps.append(_gt_overlaps) + gt_overlaps = ( + torch.cat(gt_overlaps, dim=0) if len(gt_overlaps) else torch.zeros(0, dtype=torch.float32) + ) + gt_overlaps, _ = torch.sort(gt_overlaps) + + if thresholds is None: + step = 0.05 + thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32) + recalls = torch.zeros_like(thresholds) + # compute recall for each iou threshold + for i, t in enumerate(thresholds): + recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos) + # ar = 2 * np.trapz(recalls, thresholds) + ar = recalls.mean() + return { + "ar": ar, + "recalls": recalls, + "thresholds": thresholds, + "gt_overlaps": gt_overlaps, + "num_pos": num_pos, + } + + +def _evaluate_predictions_on_coco( + coco_gt, + coco_results, + iou_type, + kpt_oks_sigmas=None, + use_fast_impl=True, + img_ids=None, + max_dets_per_image=None, +): + """ + Evaluate the coco results using COCOEval API. + """ + assert len(coco_results) > 0 + + if iou_type == "segm": + coco_results = copy.deepcopy(coco_results) + # When evaluating mask AP, if the results contain bbox, cocoapi will + # use the box area as the area of the instance, instead of the mask area. + # This leads to a different definition of small/medium/large. + # We remove the bbox field to let mask AP use mask area. + for c in coco_results: + c.pop("bbox", None) + + coco_dt = coco_gt.loadRes(coco_results) + coco_eval = (COCOeval_opt if use_fast_impl else COCOeval)(coco_gt, coco_dt, iou_type) + # For COCO, the default max_dets_per_image is [1, 10, 100]. + if max_dets_per_image is None: + max_dets_per_image = [1, 10, 100] # Default from COCOEval + else: + assert ( + len(max_dets_per_image) >= 3 + ), "COCOeval requires maxDets (and max_dets_per_image) to have length at least 3" + # In the case that user supplies a custom input for max_dets_per_image, + # apply COCOevalMaxDets to evaluate AP with the custom input. + if max_dets_per_image[2] != 100: + coco_eval = COCOevalMaxDets(coco_gt, coco_dt, iou_type) + if iou_type != "keypoints": + coco_eval.params.maxDets = max_dets_per_image + + if img_ids is not None: + coco_eval.params.imgIds = img_ids + + if iou_type == "keypoints": + # Use the COCO default keypoint OKS sigmas unless overrides are specified + if kpt_oks_sigmas: + assert hasattr(coco_eval.params, "kpt_oks_sigmas"), "pycocotools is too old!" + coco_eval.params.kpt_oks_sigmas = np.array(kpt_oks_sigmas) + # COCOAPI requires every detection and every gt to have keypoints, so + # we just take the first entry from both + num_keypoints_dt = len(coco_results[0]["keypoints"]) // 3 + num_keypoints_gt = len(next(iter(coco_gt.anns.values()))["keypoints"]) // 3 + num_keypoints_oks = len(coco_eval.params.kpt_oks_sigmas) + assert num_keypoints_oks == num_keypoints_dt == num_keypoints_gt, ( + f"[COCOEvaluator] Prediction contain {num_keypoints_dt} keypoints. " + f"Ground truth contains {num_keypoints_gt} keypoints. " + f"The length of cfg.TEST.KEYPOINT_OKS_SIGMAS is {num_keypoints_oks}. " + "They have to agree with each other. For meaning of OKS, please refer to " + "http://cocodataset.org/#keypoints-eval." + ) + + coco_eval.evaluate() + coco_eval.accumulate() + coco_eval.summarize() + + return coco_eval + + +class COCOevalMaxDets(COCOeval): + """ + Modified version of COCOeval for evaluating AP with a custom + maxDets (by default for COCO, maxDets is 100) + """ + + def summarize(self): + """ + Compute and display summary metrics for evaluation results given + a custom value for max_dets_per_image + """ + + def _summarize(ap=1, iouThr=None, areaRng="all", maxDets=100): + p = self.params + iStr = " {:<18} {} @[ IoU={:<9} | area={:>6s} | maxDets={:>3d} ] = {:0.3f}" + titleStr = "Average Precision" if ap == 1 else "Average Recall" + typeStr = "(AP)" if ap == 1 else "(AR)" + iouStr = ( + "{:0.2f}:{:0.2f}".format(p.iouThrs[0], p.iouThrs[-1]) + if iouThr is None + else "{:0.2f}".format(iouThr) + ) + + aind = [i for i, aRng in enumerate(p.areaRngLbl) if aRng == areaRng] + mind = [i for i, mDet in enumerate(p.maxDets) if mDet == maxDets] + if ap == 1: + # dimension of precision: [TxRxKxAxM] + s = self.eval["precision"] + # IoU + if iouThr is not None: + t = np.where(iouThr == p.iouThrs)[0] + s = s[t] + s = s[:, :, :, aind, mind] + else: + # dimension of recall: [TxKxAxM] + s = self.eval["recall"] + if iouThr is not None: + t = np.where(iouThr == p.iouThrs)[0] + s = s[t] + s = s[:, :, aind, mind] + if len(s[s > -1]) == 0: + mean_s = -1 + else: + mean_s = np.mean(s[s > -1]) + print(iStr.format(titleStr, typeStr, iouStr, areaRng, maxDets, mean_s)) + return mean_s + + def _summarizeDets(): + stats = np.zeros((12,)) + # Evaluate AP using the custom limit on maximum detections per image + stats[0] = _summarize(1, maxDets=self.params.maxDets[2]) + stats[1] = _summarize(1, iouThr=0.5, maxDets=self.params.maxDets[2]) + stats[2] = _summarize(1, iouThr=0.75, maxDets=self.params.maxDets[2]) + stats[3] = _summarize(1, areaRng="small", maxDets=self.params.maxDets[2]) + stats[4] = _summarize(1, areaRng="medium", maxDets=self.params.maxDets[2]) + stats[5] = _summarize(1, areaRng="large", maxDets=self.params.maxDets[2]) + stats[6] = _summarize(0, maxDets=self.params.maxDets[0]) + stats[7] = _summarize(0, maxDets=self.params.maxDets[1]) + stats[8] = _summarize(0, maxDets=self.params.maxDets[2]) + stats[9] = _summarize(0, areaRng="small", maxDets=self.params.maxDets[2]) + stats[10] = _summarize(0, areaRng="medium", maxDets=self.params.maxDets[2]) + stats[11] = _summarize(0, areaRng="large", maxDets=self.params.maxDets[2]) + return stats + + def _summarizeKps(): + stats = np.zeros((10,)) + stats[0] = _summarize(1, maxDets=20) + stats[1] = _summarize(1, maxDets=20, iouThr=0.5) + stats[2] = _summarize(1, maxDets=20, iouThr=0.75) + stats[3] = _summarize(1, maxDets=20, areaRng="medium") + stats[4] = _summarize(1, maxDets=20, areaRng="large") + stats[5] = _summarize(0, maxDets=20) + stats[6] = _summarize(0, maxDets=20, iouThr=0.5) + stats[7] = _summarize(0, maxDets=20, iouThr=0.75) + stats[8] = _summarize(0, maxDets=20, areaRng="medium") + stats[9] = _summarize(0, maxDets=20, areaRng="large") + return stats + + if not self.eval: + raise Exception("Please run accumulate() first") + iouType = self.params.iouType + if iouType == "segm" or iouType == "bbox": + summarize = _summarizeDets + elif iouType == "keypoints": + summarize = _summarizeKps + self.stats = summarize() + + def __str__(self): + self.summarize() diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/evaluator.py b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/evaluator.py new file mode 100644 index 0000000..baf9960 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/evaluator.py @@ -0,0 +1,224 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import datetime +import logging +import time +from collections import OrderedDict, abc +from contextlib import ExitStack, contextmanager +from typing import List, Union +import torch +from torch import nn + +from detectron2.utils.comm import get_world_size, is_main_process +from detectron2.utils.logger import log_every_n_seconds + + +class DatasetEvaluator: + """ + Base class for a dataset evaluator. + + The function :func:`inference_on_dataset` runs the model over + all samples in the dataset, and have a DatasetEvaluator to process the inputs/outputs. + + This class will accumulate information of the inputs/outputs (by :meth:`process`), + and produce evaluation results in the end (by :meth:`evaluate`). + """ + + def reset(self): + """ + Preparation for a new round of evaluation. + Should be called before starting a round of evaluation. + """ + pass + + def process(self, inputs, outputs): + """ + Process the pair of inputs and outputs. + If they contain batches, the pairs can be consumed one-by-one using `zip`: + + .. code-block:: python + + for input_, output in zip(inputs, outputs): + # do evaluation on single input/output pair + ... + + Args: + inputs (list): the inputs that's used to call the model. + outputs (list): the return value of `model(inputs)` + """ + pass + + def evaluate(self): + """ + Evaluate/summarize the performance, after processing all input/output pairs. + + Returns: + dict: + A new evaluator class can return a dict of arbitrary format + as long as the user can process the results. + In our train_net.py, we expect the following format: + + * key: the name of the task (e.g., bbox) + * value: a dict of {metric name: score}, e.g.: {"AP50": 80} + """ + pass + + +class DatasetEvaluators(DatasetEvaluator): + """ + Wrapper class to combine multiple :class:`DatasetEvaluator` instances. + + This class dispatches every evaluation call to + all of its :class:`DatasetEvaluator`. + """ + + def __init__(self, evaluators): + """ + Args: + evaluators (list): the evaluators to combine. + """ + super().__init__() + self._evaluators = evaluators + + def reset(self): + for evaluator in self._evaluators: + evaluator.reset() + + def process(self, inputs, outputs): + for evaluator in self._evaluators: + evaluator.process(inputs, outputs) + + def evaluate(self): + results = OrderedDict() + for evaluator in self._evaluators: + result = evaluator.evaluate() + if is_main_process() and result is not None: + for k, v in result.items(): + assert ( + k not in results + ), "Different evaluators produce results with the same key {}".format(k) + results[k] = v + return results + + +def inference_on_dataset( + model, data_loader, evaluator: Union[DatasetEvaluator, List[DatasetEvaluator], None] +): + """ + Run model on the data_loader and evaluate the metrics with evaluator. + Also benchmark the inference speed of `model.__call__` accurately. + The model will be used in eval mode. + + Args: + model (callable): a callable which takes an object from + `data_loader` and returns some outputs. + + If it's an nn.Module, it will be temporarily set to `eval` mode. + If you wish to evaluate a model in `training` mode instead, you can + wrap the given model and override its behavior of `.eval()` and `.train()`. + data_loader: an iterable object with a length. + The elements it generates will be the inputs to the model. + evaluator: the evaluator(s) to run. Use `None` if you only want to benchmark, + but don't want to do any evaluation. + + Returns: + The return value of `evaluator.evaluate()` + """ + num_devices = get_world_size() + logger = logging.getLogger(__name__) + logger.info("Start inference on {} batches".format(len(data_loader))) + + total = len(data_loader) # inference data loader must have a fixed length + if evaluator is None: + # create a no-op evaluator + evaluator = DatasetEvaluators([]) + if isinstance(evaluator, abc.MutableSequence): + evaluator = DatasetEvaluators(evaluator) + evaluator.reset() + + num_warmup = min(5, total - 1) + start_time = time.perf_counter() + total_data_time = 0 + total_compute_time = 0 + total_eval_time = 0 + with ExitStack() as stack: + if isinstance(model, nn.Module): + stack.enter_context(inference_context(model)) + stack.enter_context(torch.no_grad()) + + start_data_time = time.perf_counter() + for idx, inputs in enumerate(data_loader): + total_data_time += time.perf_counter() - start_data_time + if idx == num_warmup: + start_time = time.perf_counter() + total_data_time = 0 + total_compute_time = 0 + total_eval_time = 0 + + start_compute_time = time.perf_counter() + outputs = model(inputs) + if torch.cuda.is_available(): + torch.cuda.synchronize() + total_compute_time += time.perf_counter() - start_compute_time + + start_eval_time = time.perf_counter() + evaluator.process(inputs, outputs) + total_eval_time += time.perf_counter() - start_eval_time + + iters_after_start = idx + 1 - num_warmup * int(idx >= num_warmup) + data_seconds_per_iter = total_data_time / iters_after_start + compute_seconds_per_iter = total_compute_time / iters_after_start + eval_seconds_per_iter = total_eval_time / iters_after_start + total_seconds_per_iter = (time.perf_counter() - start_time) / iters_after_start + if idx >= num_warmup * 2 or compute_seconds_per_iter > 5: + eta = datetime.timedelta(seconds=int(total_seconds_per_iter * (total - idx - 1))) + log_every_n_seconds( + logging.INFO, + ( + f"Inference done {idx + 1}/{total}. " + f"Dataloading: {data_seconds_per_iter:.4f} s/iter. " + f"Inference: {compute_seconds_per_iter:.4f} s/iter. " + f"Eval: {eval_seconds_per_iter:.4f} s/iter. " + f"Total: {total_seconds_per_iter:.4f} s/iter. " + f"ETA={eta}" + ), + n=5, + ) + start_data_time = time.perf_counter() + + # Measure the time only for this worker (before the synchronization barrier) + total_time = time.perf_counter() - start_time + total_time_str = str(datetime.timedelta(seconds=total_time)) + # NOTE this format is parsed by grep + logger.info( + "Total inference time: {} ({:.6f} s / iter per device, on {} devices)".format( + total_time_str, total_time / (total - num_warmup), num_devices + ) + ) + total_compute_time_str = str(datetime.timedelta(seconds=int(total_compute_time))) + logger.info( + "Total inference pure compute time: {} ({:.6f} s / iter per device, on {} devices)".format( + total_compute_time_str, total_compute_time / (total - num_warmup), num_devices + ) + ) + + results = evaluator.evaluate() + # An evaluator may return None when not in main process. + # Replace it by an empty dict instead to make it easier for downstream code to handle + if results is None: + results = {} + return results + + +@contextmanager +def inference_context(model): + """ + A context where the model is temporarily changed to eval mode, + and restored to previous mode afterwards. + + Args: + model: a torch Module + """ + training_mode = model.training + model.eval() + yield + model.train(training_mode) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/fast_eval_api.py b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/fast_eval_api.py new file mode 100644 index 0000000..2eb202b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/fast_eval_api.py @@ -0,0 +1,121 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import copy +import logging +import numpy as np +import time +from pycocotools.cocoeval import COCOeval + +from detectron2 import _C + +logger = logging.getLogger(__name__) + + +class COCOeval_opt(COCOeval): + """ + This is a slightly modified version of the original COCO API, where the functions evaluateImg() + and accumulate() are implemented in C++ to speedup evaluation + """ + + def evaluate(self): + """ + Run per image evaluation on given images and store results in self.evalImgs_cpp, a + datastructure that isn't readable from Python but is used by a c++ implementation of + accumulate(). Unlike the original COCO PythonAPI, we don't populate the datastructure + self.evalImgs because this datastructure is a computational bottleneck. + :return: None + """ + tic = time.time() + + p = self.params + # add backward compatibility if useSegm is specified in params + if p.useSegm is not None: + p.iouType = "segm" if p.useSegm == 1 else "bbox" + logger.info("Evaluate annotation type *{}*".format(p.iouType)) + p.imgIds = list(np.unique(p.imgIds)) + if p.useCats: + p.catIds = list(np.unique(p.catIds)) + p.maxDets = sorted(p.maxDets) + self.params = p + + self._prepare() # bottleneck + + # loop through images, area range, max detection number + catIds = p.catIds if p.useCats else [-1] + + if p.iouType == "segm" or p.iouType == "bbox": + computeIoU = self.computeIoU + elif p.iouType == "keypoints": + computeIoU = self.computeOks + self.ious = { + (imgId, catId): computeIoU(imgId, catId) for imgId in p.imgIds for catId in catIds + } # bottleneck + + maxDet = p.maxDets[-1] + + # <<<< Beginning of code differences with original COCO API + def convert_instances_to_cpp(instances, is_det=False): + # Convert annotations for a list of instances in an image to a format that's fast + # to access in C++ + instances_cpp = [] + for instance in instances: + instance_cpp = _C.InstanceAnnotation( + int(instance["id"]), + instance["score"] if is_det else instance.get("score", 0.0), + instance["area"], + bool(instance.get("iscrowd", 0)), + bool(instance.get("ignore", 0)), + ) + instances_cpp.append(instance_cpp) + return instances_cpp + + # Convert GT annotations, detections, and IOUs to a format that's fast to access in C++ + ground_truth_instances = [ + [convert_instances_to_cpp(self._gts[imgId, catId]) for catId in p.catIds] + for imgId in p.imgIds + ] + detected_instances = [ + [convert_instances_to_cpp(self._dts[imgId, catId], is_det=True) for catId in p.catIds] + for imgId in p.imgIds + ] + ious = [[self.ious[imgId, catId] for catId in catIds] for imgId in p.imgIds] + + if not p.useCats: + # For each image, flatten per-category lists into a single list + ground_truth_instances = [[[o for c in i for o in c]] for i in ground_truth_instances] + detected_instances = [[[o for c in i for o in c]] for i in detected_instances] + + # Call C++ implementation of self.evaluateImgs() + self._evalImgs_cpp = _C.COCOevalEvaluateImages( + p.areaRng, maxDet, p.iouThrs, ious, ground_truth_instances, detected_instances + ) + self._evalImgs = None + + self._paramsEval = copy.deepcopy(self.params) + toc = time.time() + logger.info("COCOeval_opt.evaluate() finished in {:0.2f} seconds.".format(toc - tic)) + # >>>> End of code differences with original COCO API + + def accumulate(self): + """ + Accumulate per image evaluation results and store the result in self.eval. Does not + support changing parameter settings from those used by self.evaluate() + """ + logger.info("Accumulating evaluation results...") + tic = time.time() + assert hasattr( + self, "_evalImgs_cpp" + ), "evaluate() must be called before accmulate() is called." + + self.eval = _C.COCOevalAccumulate(self._paramsEval, self._evalImgs_cpp) + + # recall is num_iou_thresholds X num_categories X num_area_ranges X num_max_detections + self.eval["recall"] = np.array(self.eval["recall"]).reshape( + self.eval["counts"][:1] + self.eval["counts"][2:] + ) + + # precision and scores are num_iou_thresholds X num_recall_thresholds X num_categories X + # num_area_ranges X num_max_detections + self.eval["precision"] = np.array(self.eval["precision"]).reshape(self.eval["counts"]) + self.eval["scores"] = np.array(self.eval["scores"]).reshape(self.eval["counts"]) + toc = time.time() + logger.info("COCOeval_opt.accumulate() finished in {:0.2f} seconds.".format(toc - tic)) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/lvis_evaluation.py b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/lvis_evaluation.py new file mode 100644 index 0000000..0604fea --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/lvis_evaluation.py @@ -0,0 +1,380 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import copy +import itertools +import json +import logging +import os +import pickle +from collections import OrderedDict +import torch + +import detectron2.utils.comm as comm +from detectron2.config import CfgNode +from detectron2.data import MetadataCatalog +from detectron2.structures import Boxes, BoxMode, pairwise_iou +from detectron2.utils.file_io import PathManager +from detectron2.utils.logger import create_small_table + +from .coco_evaluation import instances_to_coco_json +from .evaluator import DatasetEvaluator + + +class LVISEvaluator(DatasetEvaluator): + """ + Evaluate object proposal and instance detection/segmentation outputs using + LVIS's metrics and evaluation API. + """ + + def __init__( + self, + dataset_name, + tasks=None, + distributed=True, + output_dir=None, + *, + max_dets_per_image=None, + ): + """ + Args: + dataset_name (str): name of the dataset to be evaluated. + It must have the following corresponding metadata: + "json_file": the path to the LVIS format annotation + tasks (tuple[str]): tasks that can be evaluated under the given + configuration. A task is one of "bbox", "segm". + By default, will infer this automatically from predictions. + distributed (True): if True, will collect results from all ranks for evaluation. + Otherwise, will evaluate the results in the current process. + output_dir (str): optional, an output directory to dump results. + max_dets_per_image (None or int): limit on maximum detections per image in evaluating AP + This limit, by default of the LVIS dataset, is 300. + """ + from lvis import LVIS + + self._logger = logging.getLogger(__name__) + + if tasks is not None and isinstance(tasks, CfgNode): + self._logger.warn( + "COCO Evaluator instantiated using config, this is deprecated behavior." + " Please pass in explicit arguments instead." + ) + self._tasks = None # Infering it from predictions should be better + else: + self._tasks = tasks + + self._distributed = distributed + self._output_dir = output_dir + self._max_dets_per_image = max_dets_per_image + + self._cpu_device = torch.device("cpu") + + self._metadata = MetadataCatalog.get(dataset_name) + json_file = PathManager.get_local_path(self._metadata.json_file) + self._lvis_api = LVIS(json_file) + # Test set json files do not contain annotations (evaluation must be + # performed using the LVIS evaluation server). + self._do_evaluation = len(self._lvis_api.get_ann_ids()) > 0 + + def reset(self): + self._predictions = [] + + def process(self, inputs, outputs): + """ + Args: + inputs: the inputs to a LVIS model (e.g., GeneralizedRCNN). + It is a list of dict. Each dict corresponds to an image and + contains keys like "height", "width", "file_name", "image_id". + outputs: the outputs of a LVIS model. It is a list of dicts with key + "instances" that contains :class:`Instances`. + """ + for input, output in zip(inputs, outputs): + prediction = {"image_id": input["image_id"]} + + if "instances" in output: + instances = output["instances"].to(self._cpu_device) + prediction["instances"] = instances_to_coco_json(instances, input["image_id"]) + if "proposals" in output: + prediction["proposals"] = output["proposals"].to(self._cpu_device) + self._predictions.append(prediction) + + def evaluate(self): + if self._distributed: + comm.synchronize() + predictions = comm.gather(self._predictions, dst=0) + predictions = list(itertools.chain(*predictions)) + + if not comm.is_main_process(): + return + else: + predictions = self._predictions + + if len(predictions) == 0: + self._logger.warning("[LVISEvaluator] Did not receive valid predictions.") + return {} + + if self._output_dir: + PathManager.mkdirs(self._output_dir) + file_path = os.path.join(self._output_dir, "instances_predictions.pth") + with PathManager.open(file_path, "wb") as f: + torch.save(predictions, f) + + self._results = OrderedDict() + if "proposals" in predictions[0]: + self._eval_box_proposals(predictions) + if "instances" in predictions[0]: + self._eval_predictions(predictions) + # Copy so the caller can do whatever with results + return copy.deepcopy(self._results) + + def _tasks_from_predictions(self, predictions): + for pred in predictions: + if "segmentation" in pred: + return ("bbox", "segm") + return ("bbox",) + + def _eval_predictions(self, predictions): + """ + Evaluate predictions. Fill self._results with the metrics of the tasks. + + Args: + predictions (list[dict]): list of outputs from the model + """ + self._logger.info("Preparing results in the LVIS format ...") + lvis_results = list(itertools.chain(*[x["instances"] for x in predictions])) + tasks = self._tasks or self._tasks_from_predictions(lvis_results) + + # LVIS evaluator can be used to evaluate results for COCO dataset categories. + # In this case `_metadata` variable will have a field with COCO-specific category mapping. + if hasattr(self._metadata, "thing_dataset_id_to_contiguous_id"): + reverse_id_mapping = { + v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items() + } + for result in lvis_results: + result["category_id"] = reverse_id_mapping[result["category_id"]] + else: + # unmap the category ids for LVIS (from 0-indexed to 1-indexed) + for result in lvis_results: + result["category_id"] += 1 + + if self._output_dir: + file_path = os.path.join(self._output_dir, "lvis_instances_results.json") + self._logger.info("Saving results to {}".format(file_path)) + with PathManager.open(file_path, "w") as f: + f.write(json.dumps(lvis_results)) + f.flush() + + if not self._do_evaluation: + self._logger.info("Annotations are not available for evaluation.") + return + + self._logger.info("Evaluating predictions ...") + for task in sorted(tasks): + res = _evaluate_predictions_on_lvis( + self._lvis_api, + lvis_results, + task, + max_dets_per_image=self._max_dets_per_image, + class_names=self._metadata.get("thing_classes"), + ) + self._results[task] = res + + def _eval_box_proposals(self, predictions): + """ + Evaluate the box proposals in predictions. + Fill self._results with the metrics for "box_proposals" task. + """ + if self._output_dir: + # Saving generated box proposals to file. + # Predicted box_proposals are in XYXY_ABS mode. + bbox_mode = BoxMode.XYXY_ABS.value + ids, boxes, objectness_logits = [], [], [] + for prediction in predictions: + ids.append(prediction["image_id"]) + boxes.append(prediction["proposals"].proposal_boxes.tensor.numpy()) + objectness_logits.append(prediction["proposals"].objectness_logits.numpy()) + + proposal_data = { + "boxes": boxes, + "objectness_logits": objectness_logits, + "ids": ids, + "bbox_mode": bbox_mode, + } + with PathManager.open(os.path.join(self._output_dir, "box_proposals.pkl"), "wb") as f: + pickle.dump(proposal_data, f) + + if not self._do_evaluation: + self._logger.info("Annotations are not available for evaluation.") + return + + self._logger.info("Evaluating bbox proposals ...") + res = {} + areas = {"all": "", "small": "s", "medium": "m", "large": "l"} + for limit in [100, 1000]: + for area, suffix in areas.items(): + stats = _evaluate_box_proposals(predictions, self._lvis_api, area=area, limit=limit) + key = "AR{}@{:d}".format(suffix, limit) + res[key] = float(stats["ar"].item() * 100) + self._logger.info("Proposal metrics: \n" + create_small_table(res)) + self._results["box_proposals"] = res + + +# inspired from Detectron: +# https://github.com/facebookresearch/Detectron/blob/a6a835f5b8208c45d0dce217ce9bbda915f44df7/detectron/datasets/json_dataset_evaluator.py#L255 # noqa +def _evaluate_box_proposals(dataset_predictions, lvis_api, thresholds=None, area="all", limit=None): + """ + Evaluate detection proposal recall metrics. This function is a much + faster alternative to the official LVIS API recall evaluation code. However, + it produces slightly different results. + """ + # Record max overlap value for each gt box + # Return vector of overlap values + areas = { + "all": 0, + "small": 1, + "medium": 2, + "large": 3, + "96-128": 4, + "128-256": 5, + "256-512": 6, + "512-inf": 7, + } + area_ranges = [ + [0 ** 2, 1e5 ** 2], # all + [0 ** 2, 32 ** 2], # small + [32 ** 2, 96 ** 2], # medium + [96 ** 2, 1e5 ** 2], # large + [96 ** 2, 128 ** 2], # 96-128 + [128 ** 2, 256 ** 2], # 128-256 + [256 ** 2, 512 ** 2], # 256-512 + [512 ** 2, 1e5 ** 2], + ] # 512-inf + assert area in areas, "Unknown area range: {}".format(area) + area_range = area_ranges[areas[area]] + gt_overlaps = [] + num_pos = 0 + + for prediction_dict in dataset_predictions: + predictions = prediction_dict["proposals"] + + # sort predictions in descending order + # TODO maybe remove this and make it explicit in the documentation + inds = predictions.objectness_logits.sort(descending=True)[1] + predictions = predictions[inds] + + ann_ids = lvis_api.get_ann_ids(img_ids=[prediction_dict["image_id"]]) + anno = lvis_api.load_anns(ann_ids) + gt_boxes = [ + BoxMode.convert(obj["bbox"], BoxMode.XYWH_ABS, BoxMode.XYXY_ABS) for obj in anno + ] + gt_boxes = torch.as_tensor(gt_boxes).reshape(-1, 4) # guard against no boxes + gt_boxes = Boxes(gt_boxes) + gt_areas = torch.as_tensor([obj["area"] for obj in anno]) + + if len(gt_boxes) == 0 or len(predictions) == 0: + continue + + valid_gt_inds = (gt_areas >= area_range[0]) & (gt_areas <= area_range[1]) + gt_boxes = gt_boxes[valid_gt_inds] + + num_pos += len(gt_boxes) + + if len(gt_boxes) == 0: + continue + + if limit is not None and len(predictions) > limit: + predictions = predictions[:limit] + + overlaps = pairwise_iou(predictions.proposal_boxes, gt_boxes) + + _gt_overlaps = torch.zeros(len(gt_boxes)) + for j in range(min(len(predictions), len(gt_boxes))): + # find which proposal box maximally covers each gt box + # and get the iou amount of coverage for each gt box + max_overlaps, argmax_overlaps = overlaps.max(dim=0) + + # find which gt box is 'best' covered (i.e. 'best' = most iou) + gt_ovr, gt_ind = max_overlaps.max(dim=0) + assert gt_ovr >= 0 + # find the proposal box that covers the best covered gt box + box_ind = argmax_overlaps[gt_ind] + # record the iou coverage of this gt box + _gt_overlaps[j] = overlaps[box_ind, gt_ind] + assert _gt_overlaps[j] == gt_ovr + # mark the proposal box and the gt box as used + overlaps[box_ind, :] = -1 + overlaps[:, gt_ind] = -1 + + # append recorded iou coverage level + gt_overlaps.append(_gt_overlaps) + gt_overlaps = ( + torch.cat(gt_overlaps, dim=0) if len(gt_overlaps) else torch.zeros(0, dtype=torch.float32) + ) + gt_overlaps, _ = torch.sort(gt_overlaps) + + if thresholds is None: + step = 0.05 + thresholds = torch.arange(0.5, 0.95 + 1e-5, step, dtype=torch.float32) + recalls = torch.zeros_like(thresholds) + # compute recall for each iou threshold + for i, t in enumerate(thresholds): + recalls[i] = (gt_overlaps >= t).float().sum() / float(num_pos) + # ar = 2 * np.trapz(recalls, thresholds) + ar = recalls.mean() + return { + "ar": ar, + "recalls": recalls, + "thresholds": thresholds, + "gt_overlaps": gt_overlaps, + "num_pos": num_pos, + } + + +def _evaluate_predictions_on_lvis( + lvis_gt, lvis_results, iou_type, max_dets_per_image=None, class_names=None +): + """ + Args: + iou_type (str): + max_dets_per_image (None or int): limit on maximum detections per image in evaluating AP + This limit, by default of the LVIS dataset, is 300. + class_names (None or list[str]): if provided, will use it to predict + per-category AP. + + Returns: + a dict of {metric name: score} + """ + metrics = { + "bbox": ["AP", "AP50", "AP75", "APs", "APm", "APl", "APr", "APc", "APf"], + "segm": ["AP", "AP50", "AP75", "APs", "APm", "APl", "APr", "APc", "APf"], + }[iou_type] + + logger = logging.getLogger(__name__) + + if len(lvis_results) == 0: # TODO: check if needed + logger.warn("No predictions from the model!") + return {metric: float("nan") for metric in metrics} + + if iou_type == "segm": + lvis_results = copy.deepcopy(lvis_results) + # When evaluating mask AP, if the results contain bbox, LVIS API will + # use the box area as the area of the instance, instead of the mask area. + # This leads to a different definition of small/medium/large. + # We remove the bbox field to let mask AP use mask area. + for c in lvis_results: + c.pop("bbox", None) + + if max_dets_per_image is None: + max_dets_per_image = 300 # Default for LVIS dataset + + from lvis import LVISEval, LVISResults + + logger.info(f"Evaluating with max detections per image = {max_dets_per_image}") + lvis_results = LVISResults(lvis_gt, lvis_results, max_dets=max_dets_per_image) + lvis_eval = LVISEval(lvis_gt, lvis_results, iou_type) + lvis_eval.run() + lvis_eval.print_results() + + # Pull the standard metrics from the LVIS results + results = lvis_eval.get_results() + results = {metric: float(results[metric] * 100) for metric in metrics} + logger.info("Evaluation results for {}: \n".format(iou_type) + create_small_table(results)) + return results diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/panoptic_evaluation.py b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/panoptic_evaluation.py new file mode 100644 index 0000000..9fb3462 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/panoptic_evaluation.py @@ -0,0 +1,199 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import contextlib +import io +import itertools +import json +import logging +import numpy as np +import os +import tempfile +from collections import OrderedDict +from typing import Optional +from PIL import Image +from tabulate import tabulate + +from detectron2.data import MetadataCatalog +from detectron2.utils import comm +from detectron2.utils.file_io import PathManager + +from .evaluator import DatasetEvaluator + +logger = logging.getLogger(__name__) + + +class COCOPanopticEvaluator(DatasetEvaluator): + """ + Evaluate Panoptic Quality metrics on COCO using PanopticAPI. + It saves panoptic segmentation prediction in `output_dir` + + It contains a synchronize call and has to be called from all workers. + """ + + def __init__(self, dataset_name: str, output_dir: Optional[str] = None): + """ + Args: + dataset_name: name of the dataset + output_dir: output directory to save results for evaluation. + """ + self._metadata = MetadataCatalog.get(dataset_name) + self._thing_contiguous_id_to_dataset_id = { + v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items() + } + self._stuff_contiguous_id_to_dataset_id = { + v: k for k, v in self._metadata.stuff_dataset_id_to_contiguous_id.items() + } + + self._output_dir = output_dir + if self._output_dir is not None: + PathManager.mkdirs(self._output_dir) + + def reset(self): + self._predictions = [] + + def _convert_category_id(self, segment_info): + isthing = segment_info.pop("isthing", None) + if isthing is None: + # the model produces panoptic category id directly. No more conversion needed + return segment_info + if isthing is True: + segment_info["category_id"] = self._thing_contiguous_id_to_dataset_id[ + segment_info["category_id"] + ] + else: + segment_info["category_id"] = self._stuff_contiguous_id_to_dataset_id[ + segment_info["category_id"] + ] + return segment_info + + def process(self, inputs, outputs): + from panopticapi.utils import id2rgb + + for input, output in zip(inputs, outputs): + panoptic_img, segments_info = output["panoptic_seg"] + panoptic_img = panoptic_img.cpu().numpy() + if segments_info is None: + # If "segments_info" is None, we assume "panoptic_img" is a + # H*W int32 image storing the panoptic_id in the format of + # category_id * label_divisor + instance_id. We reserve -1 for + # VOID label, and add 1 to panoptic_img since the official + # evaluation script uses 0 for VOID label. + label_divisor = self._metadata.label_divisor + segments_info = [] + for panoptic_label in np.unique(panoptic_img): + if panoptic_label == -1: + # VOID region. + continue + pred_class = panoptic_label // label_divisor + isthing = ( + pred_class in self._metadata.thing_dataset_id_to_contiguous_id.values() + ) + segments_info.append( + { + "id": int(panoptic_label) + 1, + "category_id": int(pred_class), + "isthing": bool(isthing), + } + ) + # Official evaluation script uses 0 for VOID label. + panoptic_img += 1 + + file_name = os.path.basename(input["file_name"]) + file_name_png = os.path.splitext(file_name)[0] + ".png" + with io.BytesIO() as out: + Image.fromarray(id2rgb(panoptic_img)).save(out, format="PNG") + segments_info = [self._convert_category_id(x) for x in segments_info] + self._predictions.append( + { + "image_id": input["image_id"], + "file_name": file_name_png, + "png_string": out.getvalue(), + "segments_info": segments_info, + } + ) + + def evaluate(self): + comm.synchronize() + + self._predictions = comm.gather(self._predictions) + self._predictions = list(itertools.chain(*self._predictions)) + if not comm.is_main_process(): + return + + # PanopticApi requires local files + gt_json = PathManager.get_local_path(self._metadata.panoptic_json) + gt_folder = PathManager.get_local_path(self._metadata.panoptic_root) + + with tempfile.TemporaryDirectory(prefix="panoptic_eval") as pred_dir: + logger.info("Writing all panoptic predictions to {} ...".format(pred_dir)) + for p in self._predictions: + with open(os.path.join(pred_dir, p["file_name"]), "wb") as f: + f.write(p.pop("png_string")) + + with open(gt_json, "r") as f: + json_data = json.load(f) + json_data["annotations"] = self._predictions + + output_dir = self._output_dir or pred_dir + predictions_json = os.path.join(output_dir, "predictions.json") + with PathManager.open(predictions_json, "w") as f: + f.write(json.dumps(json_data)) + + from panopticapi.evaluation import pq_compute + + with contextlib.redirect_stdout(io.StringIO()): + pq_res = pq_compute( + gt_json, + PathManager.get_local_path(predictions_json), + gt_folder=gt_folder, + pred_folder=pred_dir, + ) + + res = {} + res["PQ"] = 100 * pq_res["All"]["pq"] + res["SQ"] = 100 * pq_res["All"]["sq"] + res["RQ"] = 100 * pq_res["All"]["rq"] + res["PQ_th"] = 100 * pq_res["Things"]["pq"] + res["SQ_th"] = 100 * pq_res["Things"]["sq"] + res["RQ_th"] = 100 * pq_res["Things"]["rq"] + res["PQ_st"] = 100 * pq_res["Stuff"]["pq"] + res["SQ_st"] = 100 * pq_res["Stuff"]["sq"] + res["RQ_st"] = 100 * pq_res["Stuff"]["rq"] + + results = OrderedDict({"panoptic_seg": res}) + _print_panoptic_results(pq_res) + + return results + + +def _print_panoptic_results(pq_res): + headers = ["", "PQ", "SQ", "RQ", "#categories"] + data = [] + for name in ["All", "Things", "Stuff"]: + row = [name] + [pq_res[name][k] * 100 for k in ["pq", "sq", "rq"]] + [pq_res[name]["n"]] + data.append(row) + table = tabulate( + data, headers=headers, tablefmt="pipe", floatfmt=".3f", stralign="center", numalign="center" + ) + logger.info("Panoptic Evaluation Results:\n" + table) + + +if __name__ == "__main__": + from detectron2.utils.logger import setup_logger + + logger = setup_logger() + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("--gt-json") + parser.add_argument("--gt-dir") + parser.add_argument("--pred-json") + parser.add_argument("--pred-dir") + args = parser.parse_args() + + from panopticapi.evaluation import pq_compute + + with contextlib.redirect_stdout(io.StringIO()): + pq_res = pq_compute( + args.gt_json, args.pred_json, gt_folder=args.gt_dir, pred_folder=args.pred_dir + ) + _print_panoptic_results(pq_res) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/pascal_voc_evaluation.py b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/pascal_voc_evaluation.py new file mode 100644 index 0000000..1d1abcd --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/pascal_voc_evaluation.py @@ -0,0 +1,300 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +import logging +import numpy as np +import os +import tempfile +import xml.etree.ElementTree as ET +from collections import OrderedDict, defaultdict +from functools import lru_cache +import torch + +from detectron2.data import MetadataCatalog +from detectron2.utils import comm +from detectron2.utils.file_io import PathManager + +from .evaluator import DatasetEvaluator + + +class PascalVOCDetectionEvaluator(DatasetEvaluator): + """ + Evaluate Pascal VOC style AP for Pascal VOC dataset. + It contains a synchronization, therefore has to be called from all ranks. + + Note that the concept of AP can be implemented in different ways and may not + produce identical results. This class mimics the implementation of the official + Pascal VOC Matlab API, and should produce similar but not identical results to the + official API. + """ + + def __init__(self, dataset_name): + """ + Args: + dataset_name (str): name of the dataset, e.g., "voc_2007_test" + """ + self._dataset_name = dataset_name + meta = MetadataCatalog.get(dataset_name) + + # Too many tiny files, download all to local for speed. + annotation_dir_local = PathManager.get_local_path( + os.path.join(meta.dirname, "Annotations/") + ) + self._anno_file_template = os.path.join(annotation_dir_local, "{}.xml") + self._image_set_path = os.path.join(meta.dirname, "ImageSets", "Main", meta.split + ".txt") + self._class_names = meta.thing_classes + assert meta.year in [2007, 2012], meta.year + self._is_2007 = meta.year == 2007 + self._cpu_device = torch.device("cpu") + self._logger = logging.getLogger(__name__) + + def reset(self): + self._predictions = defaultdict(list) # class name -> list of prediction strings + + def process(self, inputs, outputs): + for input, output in zip(inputs, outputs): + image_id = input["image_id"] + instances = output["instances"].to(self._cpu_device) + boxes = instances.pred_boxes.tensor.numpy() + scores = instances.scores.tolist() + classes = instances.pred_classes.tolist() + for box, score, cls in zip(boxes, scores, classes): + xmin, ymin, xmax, ymax = box + # The inverse of data loading logic in `datasets/pascal_voc.py` + xmin += 1 + ymin += 1 + self._predictions[cls].append( + f"{image_id} {score:.3f} {xmin:.1f} {ymin:.1f} {xmax:.1f} {ymax:.1f}" + ) + + def evaluate(self): + """ + Returns: + dict: has a key "segm", whose value is a dict of "AP", "AP50", and "AP75". + """ + all_predictions = comm.gather(self._predictions, dst=0) + if not comm.is_main_process(): + return + predictions = defaultdict(list) + for predictions_per_rank in all_predictions: + for clsid, lines in predictions_per_rank.items(): + predictions[clsid].extend(lines) + del all_predictions + + self._logger.info( + "Evaluating {} using {} metric. " + "Note that results do not use the official Matlab API.".format( + self._dataset_name, 2007 if self._is_2007 else 2012 + ) + ) + + with tempfile.TemporaryDirectory(prefix="pascal_voc_eval_") as dirname: + res_file_template = os.path.join(dirname, "{}.txt") + + aps = defaultdict(list) # iou -> ap per class + for cls_id, cls_name in enumerate(self._class_names): + lines = predictions.get(cls_id, [""]) + + with open(res_file_template.format(cls_name), "w") as f: + f.write("\n".join(lines)) + + for thresh in range(50, 100, 5): + rec, prec, ap = voc_eval( + res_file_template, + self._anno_file_template, + self._image_set_path, + cls_name, + ovthresh=thresh / 100.0, + use_07_metric=self._is_2007, + ) + aps[thresh].append(ap * 100) + + ret = OrderedDict() + mAP = {iou: np.mean(x) for iou, x in aps.items()} + ret["bbox"] = {"AP": np.mean(list(mAP.values())), "AP50": mAP[50], "AP75": mAP[75]} + return ret + + +############################################################################## +# +# Below code is modified from +# https://github.com/rbgirshick/py-faster-rcnn/blob/master/lib/datasets/voc_eval.py +# -------------------------------------------------------- +# Fast/er R-CNN +# Licensed under The MIT License [see LICENSE for details] +# Written by Bharath Hariharan +# -------------------------------------------------------- + +"""Python implementation of the PASCAL VOC devkit's AP evaluation code.""" + + +@lru_cache(maxsize=None) +def parse_rec(filename): + """Parse a PASCAL VOC xml file.""" + with PathManager.open(filename) as f: + tree = ET.parse(f) + objects = [] + for obj in tree.findall("object"): + obj_struct = {} + obj_struct["name"] = obj.find("name").text + obj_struct["pose"] = obj.find("pose").text + obj_struct["truncated"] = int(obj.find("truncated").text) + obj_struct["difficult"] = int(obj.find("difficult").text) + bbox = obj.find("bndbox") + obj_struct["bbox"] = [ + int(bbox.find("xmin").text), + int(bbox.find("ymin").text), + int(bbox.find("xmax").text), + int(bbox.find("ymax").text), + ] + objects.append(obj_struct) + + return objects + + +def voc_ap(rec, prec, use_07_metric=False): + """Compute VOC AP given precision and recall. If use_07_metric is true, uses + the VOC 07 11-point method (default:False). + """ + if use_07_metric: + # 11 point metric + ap = 0.0 + for t in np.arange(0.0, 1.1, 0.1): + if np.sum(rec >= t) == 0: + p = 0 + else: + p = np.max(prec[rec >= t]) + ap = ap + p / 11.0 + else: + # correct AP calculation + # first append sentinel values at the end + mrec = np.concatenate(([0.0], rec, [1.0])) + mpre = np.concatenate(([0.0], prec, [0.0])) + + # compute the precision envelope + for i in range(mpre.size - 1, 0, -1): + mpre[i - 1] = np.maximum(mpre[i - 1], mpre[i]) + + # to calculate area under PR curve, look for points + # where X axis (recall) changes value + i = np.where(mrec[1:] != mrec[:-1])[0] + + # and sum (\Delta recall) * prec + ap = np.sum((mrec[i + 1] - mrec[i]) * mpre[i + 1]) + return ap + + +def voc_eval(detpath, annopath, imagesetfile, classname, ovthresh=0.5, use_07_metric=False): + """rec, prec, ap = voc_eval(detpath, + annopath, + imagesetfile, + classname, + [ovthresh], + [use_07_metric]) + + Top level function that does the PASCAL VOC evaluation. + + detpath: Path to detections + detpath.format(classname) should produce the detection results file. + annopath: Path to annotations + annopath.format(imagename) should be the xml annotations file. + imagesetfile: Text file containing the list of images, one image per line. + classname: Category name (duh) + [ovthresh]: Overlap threshold (default = 0.5) + [use_07_metric]: Whether to use VOC07's 11 point AP computation + (default False) + """ + # assumes detections are in detpath.format(classname) + # assumes annotations are in annopath.format(imagename) + # assumes imagesetfile is a text file with each line an image name + + # first load gt + # read list of images + with PathManager.open(imagesetfile, "r") as f: + lines = f.readlines() + imagenames = [x.strip() for x in lines] + + # load annots + recs = {} + for imagename in imagenames: + recs[imagename] = parse_rec(annopath.format(imagename)) + + # extract gt objects for this class + class_recs = {} + npos = 0 + for imagename in imagenames: + R = [obj for obj in recs[imagename] if obj["name"] == classname] + bbox = np.array([x["bbox"] for x in R]) + difficult = np.array([x["difficult"] for x in R]).astype(np.bool) + # difficult = np.array([False for x in R]).astype(np.bool) # treat all "difficult" as GT + det = [False] * len(R) + npos = npos + sum(~difficult) + class_recs[imagename] = {"bbox": bbox, "difficult": difficult, "det": det} + + # read dets + detfile = detpath.format(classname) + with open(detfile, "r") as f: + lines = f.readlines() + + splitlines = [x.strip().split(" ") for x in lines] + image_ids = [x[0] for x in splitlines] + confidence = np.array([float(x[1]) for x in splitlines]) + BB = np.array([[float(z) for z in x[2:]] for x in splitlines]).reshape(-1, 4) + + # sort by confidence + sorted_ind = np.argsort(-confidence) + BB = BB[sorted_ind, :] + image_ids = [image_ids[x] for x in sorted_ind] + + # go down dets and mark TPs and FPs + nd = len(image_ids) + tp = np.zeros(nd) + fp = np.zeros(nd) + for d in range(nd): + R = class_recs[image_ids[d]] + bb = BB[d, :].astype(float) + ovmax = -np.inf + BBGT = R["bbox"].astype(float) + + if BBGT.size > 0: + # compute overlaps + # intersection + ixmin = np.maximum(BBGT[:, 0], bb[0]) + iymin = np.maximum(BBGT[:, 1], bb[1]) + ixmax = np.minimum(BBGT[:, 2], bb[2]) + iymax = np.minimum(BBGT[:, 3], bb[3]) + iw = np.maximum(ixmax - ixmin + 1.0, 0.0) + ih = np.maximum(iymax - iymin + 1.0, 0.0) + inters = iw * ih + + # union + uni = ( + (bb[2] - bb[0] + 1.0) * (bb[3] - bb[1] + 1.0) + + (BBGT[:, 2] - BBGT[:, 0] + 1.0) * (BBGT[:, 3] - BBGT[:, 1] + 1.0) + - inters + ) + + overlaps = inters / uni + ovmax = np.max(overlaps) + jmax = np.argmax(overlaps) + + if ovmax > ovthresh: + if not R["difficult"][jmax]: + if not R["det"][jmax]: + tp[d] = 1.0 + R["det"][jmax] = 1 + else: + fp[d] = 1.0 + else: + fp[d] = 1.0 + + # compute precision recall + fp = np.cumsum(fp) + tp = np.cumsum(tp) + rec = tp / float(npos) + # avoid divide by zero in case the first detection matches a difficult + # ground truth + prec = tp / np.maximum(tp + fp, np.finfo(np.float64).eps) + ap = voc_ap(rec, prec, use_07_metric) + + return rec, prec, ap diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/rotated_coco_evaluation.py b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/rotated_coco_evaluation.py new file mode 100644 index 0000000..ea6d1b3 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/rotated_coco_evaluation.py @@ -0,0 +1,207 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import itertools +import json +import numpy as np +import os +import torch +from pycocotools.cocoeval import COCOeval, maskUtils + +from detectron2.structures import BoxMode, RotatedBoxes, pairwise_iou_rotated +from detectron2.utils.file_io import PathManager + +from .coco_evaluation import COCOEvaluator + + +class RotatedCOCOeval(COCOeval): + @staticmethod + def is_rotated(box_list): + if type(box_list) == np.ndarray: + return box_list.shape[1] == 5 + elif type(box_list) == list: + if box_list == []: # cannot decide the box_dim + return False + return np.all( + np.array( + [ + (len(obj) == 5) and ((type(obj) == list) or (type(obj) == np.ndarray)) + for obj in box_list + ] + ) + ) + return False + + @staticmethod + def boxlist_to_tensor(boxlist, output_box_dim): + if type(boxlist) == np.ndarray: + box_tensor = torch.from_numpy(boxlist) + elif type(boxlist) == list: + if boxlist == []: + return torch.zeros((0, output_box_dim), dtype=torch.float32) + else: + box_tensor = torch.FloatTensor(boxlist) + else: + raise Exception("Unrecognized boxlist type") + + input_box_dim = box_tensor.shape[1] + if input_box_dim != output_box_dim: + if input_box_dim == 4 and output_box_dim == 5: + box_tensor = BoxMode.convert(box_tensor, BoxMode.XYWH_ABS, BoxMode.XYWHA_ABS) + else: + raise Exception( + "Unable to convert from {}-dim box to {}-dim box".format( + input_box_dim, output_box_dim + ) + ) + return box_tensor + + def compute_iou_dt_gt(self, dt, gt, is_crowd): + if self.is_rotated(dt) or self.is_rotated(gt): + # TODO: take is_crowd into consideration + assert all(c == 0 for c in is_crowd) + dt = RotatedBoxes(self.boxlist_to_tensor(dt, output_box_dim=5)) + gt = RotatedBoxes(self.boxlist_to_tensor(gt, output_box_dim=5)) + return pairwise_iou_rotated(dt, gt) + else: + # This is the same as the classical COCO evaluation + return maskUtils.iou(dt, gt, is_crowd) + + def computeIoU(self, imgId, catId): + p = self.params + if p.useCats: + gt = self._gts[imgId, catId] + dt = self._dts[imgId, catId] + else: + gt = [_ for cId in p.catIds for _ in self._gts[imgId, cId]] + dt = [_ for cId in p.catIds for _ in self._dts[imgId, cId]] + if len(gt) == 0 and len(dt) == 0: + return [] + inds = np.argsort([-d["score"] for d in dt], kind="mergesort") + dt = [dt[i] for i in inds] + if len(dt) > p.maxDets[-1]: + dt = dt[0 : p.maxDets[-1]] + + assert p.iouType == "bbox", "unsupported iouType for iou computation" + + g = [g["bbox"] for g in gt] + d = [d["bbox"] for d in dt] + + # compute iou between each dt and gt region + iscrowd = [int(o["iscrowd"]) for o in gt] + + # Note: this function is copied from cocoeval.py in cocoapi + # and the major difference is here. + ious = self.compute_iou_dt_gt(d, g, iscrowd) + return ious + + +class RotatedCOCOEvaluator(COCOEvaluator): + """ + Evaluate object proposal/instance detection outputs using COCO-like metrics and APIs, + with rotated boxes support. + Note: this uses IOU only and does not consider angle differences. + """ + + def process(self, inputs, outputs): + """ + Args: + inputs: the inputs to a COCO model (e.g., GeneralizedRCNN). + It is a list of dict. Each dict corresponds to an image and + contains keys like "height", "width", "file_name", "image_id". + outputs: the outputs of a COCO model. It is a list of dicts with key + "instances" that contains :class:`Instances`. + """ + for input, output in zip(inputs, outputs): + prediction = {"image_id": input["image_id"]} + + if "instances" in output: + instances = output["instances"].to(self._cpu_device) + + prediction["instances"] = self.instances_to_json(instances, input["image_id"]) + if "proposals" in output: + prediction["proposals"] = output["proposals"].to(self._cpu_device) + self._predictions.append(prediction) + + def instances_to_json(self, instances, img_id): + num_instance = len(instances) + if num_instance == 0: + return [] + + boxes = instances.pred_boxes.tensor.numpy() + if boxes.shape[1] == 4: + boxes = BoxMode.convert(boxes, BoxMode.XYXY_ABS, BoxMode.XYWH_ABS) + boxes = boxes.tolist() + scores = instances.scores.tolist() + classes = instances.pred_classes.tolist() + + results = [] + for k in range(num_instance): + result = { + "image_id": img_id, + "category_id": classes[k], + "bbox": boxes[k], + "score": scores[k], + } + + results.append(result) + return results + + def _eval_predictions(self, predictions, img_ids=None): # img_ids: unused + """ + Evaluate predictions on the given tasks. + Fill self._results with the metrics of the tasks. + """ + self._logger.info("Preparing results for COCO format ...") + coco_results = list(itertools.chain(*[x["instances"] for x in predictions])) + + # unmap the category ids for COCO + if hasattr(self._metadata, "thing_dataset_id_to_contiguous_id"): + reverse_id_mapping = { + v: k for k, v in self._metadata.thing_dataset_id_to_contiguous_id.items() + } + for result in coco_results: + result["category_id"] = reverse_id_mapping[result["category_id"]] + + if self._output_dir: + file_path = os.path.join(self._output_dir, "coco_instances_results.json") + self._logger.info("Saving results to {}".format(file_path)) + with PathManager.open(file_path, "w") as f: + f.write(json.dumps(coco_results)) + f.flush() + + if not self._do_evaluation: + self._logger.info("Annotations are not available for evaluation.") + return + + self._logger.info("Evaluating predictions ...") + + assert self._tasks is None or set(self._tasks) == { + "bbox" + }, "[RotatedCOCOEvaluator] Only bbox evaluation is supported" + coco_eval = ( + self._evaluate_predictions_on_coco(self._coco_api, coco_results) + if len(coco_results) > 0 + else None # cocoapi does not handle empty results very well + ) + + task = "bbox" + res = self._derive_coco_results( + coco_eval, task, class_names=self._metadata.get("thing_classes") + ) + self._results[task] = res + + def _evaluate_predictions_on_coco(self, coco_gt, coco_results): + """ + Evaluate the coco results using COCOEval API. + """ + assert len(coco_results) > 0 + + coco_dt = coco_gt.loadRes(coco_results) + + # Only bbox is supported for now + coco_eval = RotatedCOCOeval(coco_gt, coco_dt, iouType="bbox") + + coco_eval.evaluate() + coco_eval.accumulate() + coco_eval.summarize() + + return coco_eval diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/sem_seg_evaluation.py b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/sem_seg_evaluation.py new file mode 100644 index 0000000..7a19db7 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/sem_seg_evaluation.py @@ -0,0 +1,184 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import itertools +import json +import logging +import numpy as np +import os +from collections import OrderedDict +import PIL.Image as Image +import pycocotools.mask as mask_util +import torch + +from detectron2.data import DatasetCatalog, MetadataCatalog +from detectron2.utils.comm import all_gather, is_main_process, synchronize +from detectron2.utils.file_io import PathManager + +from .evaluator import DatasetEvaluator + + +class SemSegEvaluator(DatasetEvaluator): + """ + Evaluate semantic segmentation metrics. + """ + + def __init__( + self, + dataset_name, + distributed=True, + output_dir=None, + *, + num_classes=None, + ignore_label=None, + ): + """ + Args: + dataset_name (str): name of the dataset to be evaluated. + distributed (bool): if True, will collect results from all ranks for evaluation. + Otherwise, will evaluate the results in the current process. + output_dir (str): an output directory to dump results. + num_classes, ignore_label: deprecated argument + """ + self._logger = logging.getLogger(__name__) + if num_classes is not None: + self._logger.warn( + "SemSegEvaluator(num_classes) is deprecated! It should be obtained from metadata." + ) + if ignore_label is not None: + self._logger.warn( + "SemSegEvaluator(ignore_label) is deprecated! It should be obtained from metadata." + ) + self._dataset_name = dataset_name + self._distributed = distributed + self._output_dir = output_dir + + self._cpu_device = torch.device("cpu") + + self.input_file_to_gt_file = { + dataset_record["file_name"]: dataset_record["sem_seg_file_name"] + for dataset_record in DatasetCatalog.get(dataset_name) + } + + meta = MetadataCatalog.get(dataset_name) + # Dict that maps contiguous training ids to COCO category ids + try: + c2d = meta.stuff_dataset_id_to_contiguous_id + self._contiguous_id_to_dataset_id = {v: k for k, v in c2d.items()} + except AttributeError: + self._contiguous_id_to_dataset_id = None + self._class_names = meta.stuff_classes + self._num_classes = len(meta.stuff_classes) + if num_classes is not None: + assert self._num_classes == num_classes, f"{self._num_classes} != {num_classes}" + self._ignore_label = ignore_label if ignore_label is not None else meta.ignore_label + + def reset(self): + self._conf_matrix = np.zeros((self._num_classes + 1, self._num_classes + 1), dtype=np.int64) + self._predictions = [] + + def process(self, inputs, outputs): + """ + Args: + inputs: the inputs to a model. + It is a list of dicts. Each dict corresponds to an image and + contains keys like "height", "width", "file_name". + outputs: the outputs of a model. It is either list of semantic segmentation predictions + (Tensor [H, W]) or list of dicts with key "sem_seg" that contains semantic + segmentation prediction in the same format. + """ + for input, output in zip(inputs, outputs): + output = output["sem_seg"].argmax(dim=0).to(self._cpu_device) + pred = np.array(output, dtype=np.int) + with PathManager.open(self.input_file_to_gt_file[input["file_name"]], "rb") as f: + gt = np.array(Image.open(f), dtype=np.int) + + gt[gt == self._ignore_label] = self._num_classes + + self._conf_matrix += np.bincount( + (self._num_classes + 1) * pred.reshape(-1) + gt.reshape(-1), + minlength=self._conf_matrix.size, + ).reshape(self._conf_matrix.shape) + + self._predictions.extend(self.encode_json_sem_seg(pred, input["file_name"])) + + def evaluate(self): + """ + Evaluates standard semantic segmentation metrics (http://cocodataset.org/#stuff-eval): + + * Mean intersection-over-union averaged across classes (mIoU) + * Frequency Weighted IoU (fwIoU) + * Mean pixel accuracy averaged across classes (mACC) + * Pixel Accuracy (pACC) + """ + if self._distributed: + synchronize() + conf_matrix_list = all_gather(self._conf_matrix) + self._predictions = all_gather(self._predictions) + self._predictions = list(itertools.chain(*self._predictions)) + if not is_main_process(): + return + + self._conf_matrix = np.zeros_like(self._conf_matrix) + for conf_matrix in conf_matrix_list: + self._conf_matrix += conf_matrix + + if self._output_dir: + PathManager.mkdirs(self._output_dir) + file_path = os.path.join(self._output_dir, "sem_seg_predictions.json") + with PathManager.open(file_path, "w") as f: + f.write(json.dumps(self._predictions)) + + acc = np.full(self._num_classes, np.nan, dtype=np.float) + iou = np.full(self._num_classes, np.nan, dtype=np.float) + tp = self._conf_matrix.diagonal()[:-1].astype(np.float) + pos_gt = np.sum(self._conf_matrix[:-1, :-1], axis=0).astype(np.float) + class_weights = pos_gt / np.sum(pos_gt) + pos_pred = np.sum(self._conf_matrix[:-1, :-1], axis=1).astype(np.float) + acc_valid = pos_gt > 0 + acc[acc_valid] = tp[acc_valid] / pos_gt[acc_valid] + iou_valid = (pos_gt + pos_pred) > 0 + union = pos_gt + pos_pred - tp + iou[acc_valid] = tp[acc_valid] / union[acc_valid] + macc = np.sum(acc[acc_valid]) / np.sum(acc_valid) + miou = np.sum(iou[acc_valid]) / np.sum(iou_valid) + fiou = np.sum(iou[acc_valid] * class_weights[acc_valid]) + pacc = np.sum(tp) / np.sum(pos_gt) + + res = {} + res["mIoU"] = 100 * miou + res["fwIoU"] = 100 * fiou + for i, name in enumerate(self._class_names): + res["IoU-{}".format(name)] = 100 * iou[i] + res["mACC"] = 100 * macc + res["pACC"] = 100 * pacc + for i, name in enumerate(self._class_names): + res["ACC-{}".format(name)] = 100 * acc[i] + + if self._output_dir: + file_path = os.path.join(self._output_dir, "sem_seg_evaluation.pth") + with PathManager.open(file_path, "wb") as f: + torch.save(res, f) + results = OrderedDict({"sem_seg": res}) + self._logger.info(results) + return results + + def encode_json_sem_seg(self, sem_seg, input_file_name): + """ + Convert semantic segmentation to COCO stuff format with segments encoded as RLEs. + See http://cocodataset.org/#format-results + """ + json_list = [] + for label in np.unique(sem_seg): + if self._contiguous_id_to_dataset_id is not None: + assert ( + label in self._contiguous_id_to_dataset_id + ), "Label {} is not in the metadata info for {}".format(label, self._dataset_name) + dataset_id = self._contiguous_id_to_dataset_id[label] + else: + dataset_id = int(label) + mask = (sem_seg == label).astype(np.uint8) + mask_rle = mask_util.encode(np.array(mask[:, :, None], order="F"))[0] + mask_rle["counts"] = mask_rle["counts"].decode("utf-8") + json_list.append( + {"file_name": input_file_name, "category_id": dataset_id, "segmentation": mask_rle} + ) + return json_list diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/testing.py b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/testing.py new file mode 100644 index 0000000..9e5ae62 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/evaluation/testing.py @@ -0,0 +1,85 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +import numpy as np +import pprint +import sys +from collections.abc import Mapping + + +def print_csv_format(results): + """ + Print main metrics in a format similar to Detectron, + so that they are easy to copypaste into a spreadsheet. + + Args: + results (OrderedDict[dict]): task_name -> {metric -> score} + unordered dict can also be printed, but in arbitrary order + """ + assert isinstance(results, Mapping) or not len(results), results + logger = logging.getLogger(__name__) + for task, res in results.items(): + if isinstance(res, Mapping): + # Don't print "AP-category" metrics since they are usually not tracked. + important_res = [(k, v) for k, v in res.items() if "-" not in k] + logger.info("copypaste: Task: {}".format(task)) + logger.info("copypaste: " + ",".join([k[0] for k in important_res])) + logger.info("copypaste: " + ",".join(["{0:.4f}".format(k[1]) for k in important_res])) + else: + logger.info(f"copypaste: {task}={res}") + + +def verify_results(cfg, results): + """ + Args: + results (OrderedDict[dict]): task_name -> {metric -> score} + + Returns: + bool: whether the verification succeeds or not + """ + expected_results = cfg.TEST.EXPECTED_RESULTS + if not len(expected_results): + return True + + ok = True + for task, metric, expected, tolerance in expected_results: + actual = results[task].get(metric, None) + if actual is None: + ok = False + continue + if not np.isfinite(actual): + ok = False + continue + diff = abs(actual - expected) + if diff > tolerance: + ok = False + + logger = logging.getLogger(__name__) + if not ok: + logger.error("Result verification failed!") + logger.error("Expected Results: " + str(expected_results)) + logger.error("Actual Results: " + pprint.pformat(results)) + + sys.exit(1) + else: + logger.info("Results verification passed.") + return ok + + +def flatten_results_dict(results): + """ + Expand a hierarchical dict of scalars into a flat dict of scalars. + If results[k1][k2][k3] = v, the returned dict will have the entry + {"k1/k2/k3": v}. + + Args: + results (dict): + """ + r = {} + for k, v in results.items(): + if isinstance(v, Mapping): + v = flatten_results_dict(v) + for kk, vv in v.items(): + r[k + "/" + kk] = vv + else: + r[k] = v + return r diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/export/README.md b/motion-gan-pipeline/preprocessing/third/detectron2/export/README.md new file mode 100644 index 0000000..9fcd335 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/export/README.md @@ -0,0 +1,13 @@ + +This directory contains code to prepare a detectron2 model for deployment. +Currently it supports exporting a detectron2 model to Caffe2 format through ONNX. + +Please see [documentation](https://detectron2.readthedocs.io/tutorials/deployment.html) for its usage. + + +### Acknowledgements + +Thanks to Mobile Vision team at Facebook for developing the Caffe2 conversion tools. + +Thanks to Computing Platform Department - PAI team at Alibaba Group (@bddpqq, @chenbohua3) who +help export Detectron2 models to TorchScript. diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/export/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/export/__init__.py new file mode 100644 index 0000000..25e5c94 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/export/__init__.py @@ -0,0 +1,15 @@ +# -*- coding: utf-8 -*- + +try: + from caffe2.proto import caffe2_pb2 as _tmp + + # caffe2 is optional +except ImportError: + pass +else: + from .api import * + +from .flatten import TracingAdapter +from .torchscript import scripting_with_instances, dump_torchscript_IR + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/export/api.py b/motion-gan-pipeline/preprocessing/third/detectron2/export/api.py new file mode 100644 index 0000000..ad42721 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/export/api.py @@ -0,0 +1,235 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import copy +import logging +import os +import torch +from caffe2.proto import caffe2_pb2 +from torch import nn + +from detectron2.config import CfgNode +from detectron2.utils.file_io import PathManager + +from .caffe2_inference import ProtobufDetectionModel +from .caffe2_modeling import META_ARCH_CAFFE2_EXPORT_TYPE_MAP, convert_batched_inputs_to_c2_format +from .shared import get_pb_arg_vali, get_pb_arg_vals, save_graph + +__all__ = [ + "add_export_config", + "Caffe2Model", + "Caffe2Tracer", +] + + +def add_export_config(cfg): + return cfg + + +class Caffe2Tracer: + """ + Make a detectron2 model traceable with Caffe2 operators. + This class creates a traceable version of a detectron2 model which: + + 1. Rewrite parts of the model using ops in Caffe2. Note that some ops do + not have GPU implementation in Caffe2. + 2. Remove post-processing and only produce raw layer outputs + + After making a traceable model, the class provide methods to export such a + model to different deployment formats. + Exported graph produced by this class take two input tensors: + + 1. (1, C, H, W) float "data" which is an image (usually in [0, 255]). + (H, W) often has to be padded to multiple of 32 (depend on the model + architecture). + 2. 1x3 float "im_info", each row of which is (height, width, 1.0). + Height and width are true image shapes before padding. + + The class currently only supports models using builtin meta architectures. + Batch inference is not supported, and contributions are welcome. + """ + + def __init__(self, cfg: CfgNode, model: nn.Module, inputs): + """ + Args: + cfg (CfgNode): a detectron2 config used to construct caffe2-compatible model. + model (nn.Module): An original pytorch model. Must be among a few official models + in detectron2 that can be converted to become caffe2-compatible automatically. + Weights have to be already loaded to this model. + inputs: sample inputs that the given model takes for inference. + Will be used to trace the model. For most models, random inputs with + no detected objects will not work as they lead to wrong traces. + """ + assert isinstance(cfg, CfgNode), cfg + assert isinstance(model, torch.nn.Module), type(model) + + # TODO make it support custom models, by passing in c2 model directly + C2MetaArch = META_ARCH_CAFFE2_EXPORT_TYPE_MAP[cfg.MODEL.META_ARCHITECTURE] + self.traceable_model = C2MetaArch(cfg, copy.deepcopy(model)) + self.inputs = inputs + self.traceable_inputs = self.traceable_model.get_caffe2_inputs(inputs) + + def export_caffe2(self): + """ + Export the model to Caffe2's protobuf format. + The returned object can be saved with its :meth:`.save_protobuf()` method. + The result can be loaded and executed using Caffe2 runtime. + + Returns: + :class:`Caffe2Model` + """ + from .caffe2_export import export_caffe2_detection_model + + predict_net, init_net = export_caffe2_detection_model( + self.traceable_model, self.traceable_inputs + ) + return Caffe2Model(predict_net, init_net) + + def export_onnx(self): + """ + Export the model to ONNX format. + Note that the exported model contains custom ops only available in caffe2, therefore it + cannot be directly executed by other runtime (such as onnxruntime or TensorRT). + Post-processing or transformation passes may be applied on the model to accommodate + different runtimes, but we currently do not provide support for them. + + Returns: + onnx.ModelProto: an onnx model. + """ + from .caffe2_export import export_onnx_model as export_onnx_model_impl + + return export_onnx_model_impl(self.traceable_model, (self.traceable_inputs,)) + + def export_torchscript(self): + """ + Export the model to a ``torch.jit.TracedModule`` by tracing. + The returned object can be saved to a file by ``.save()``. + + Returns: + torch.jit.TracedModule: a torch TracedModule + """ + logger = logging.getLogger(__name__) + logger.info("Tracing the model with torch.jit.trace ...") + with torch.no_grad(): + return torch.jit.trace(self.traceable_model, (self.traceable_inputs,)) + + +class Caffe2Model(nn.Module): + """ + A wrapper around the traced model in Caffe2's protobuf format. + The exported graph has different inputs/outputs from the original Pytorch + model, as explained in :class:`Caffe2Tracer`. This class wraps around the + exported graph to simulate the same interface as the original Pytorch model. + It also provides functions to save/load models in Caffe2's format.' + + Examples: + :: + c2_model = Caffe2Tracer(cfg, torch_model, inputs).export_caffe2() + inputs = [{"image": img_tensor_CHW}] + outputs = c2_model(inputs) + orig_outputs = torch_model(inputs) + """ + + def __init__(self, predict_net, init_net): + super().__init__() + self.eval() # always in eval mode + self._predict_net = predict_net + self._init_net = init_net + self._predictor = None + + __init__.__HIDE_SPHINX_DOC__ = True + + @property + def predict_net(self): + """ + caffe2.core.Net: the underlying caffe2 predict net + """ + return self._predict_net + + @property + def init_net(self): + """ + caffe2.core.Net: the underlying caffe2 init net + """ + return self._init_net + + def save_protobuf(self, output_dir): + """ + Save the model as caffe2's protobuf format. + It saves the following files: + + * "model.pb": definition of the graph. Can be visualized with + tools like `netron `_. + * "model_init.pb": model parameters + * "model.pbtxt": human-readable definition of the graph. Not + needed for deployment. + + Args: + output_dir (str): the output directory to save protobuf files. + """ + logger = logging.getLogger(__name__) + logger.info("Saving model to {} ...".format(output_dir)) + if not PathManager.exists(output_dir): + PathManager.mkdirs(output_dir) + + with PathManager.open(os.path.join(output_dir, "model.pb"), "wb") as f: + f.write(self._predict_net.SerializeToString()) + with PathManager.open(os.path.join(output_dir, "model.pbtxt"), "w") as f: + f.write(str(self._predict_net)) + with PathManager.open(os.path.join(output_dir, "model_init.pb"), "wb") as f: + f.write(self._init_net.SerializeToString()) + + def save_graph(self, output_file, inputs=None): + """ + Save the graph as SVG format. + + Args: + output_file (str): a SVG file + inputs: optional inputs given to the model. + If given, the inputs will be used to run the graph to record + shape of every tensor. The shape information will be + saved together with the graph. + """ + from .caffe2_export import run_and_save_graph + + if inputs is None: + save_graph(self._predict_net, output_file, op_only=False) + else: + size_divisibility = get_pb_arg_vali(self._predict_net, "size_divisibility", 0) + device = get_pb_arg_vals(self._predict_net, "device", b"cpu").decode("ascii") + inputs = convert_batched_inputs_to_c2_format(inputs, size_divisibility, device) + inputs = [x.cpu().numpy() for x in inputs] + run_and_save_graph(self._predict_net, self._init_net, inputs, output_file) + + @staticmethod + def load_protobuf(dir): + """ + Args: + dir (str): a directory used to save Caffe2Model with + :meth:`save_protobuf`. + The files "model.pb" and "model_init.pb" are needed. + + Returns: + Caffe2Model: the caffe2 model loaded from this directory. + """ + predict_net = caffe2_pb2.NetDef() + with PathManager.open(os.path.join(dir, "model.pb"), "rb") as f: + predict_net.ParseFromString(f.read()) + + init_net = caffe2_pb2.NetDef() + with PathManager.open(os.path.join(dir, "model_init.pb"), "rb") as f: + init_net.ParseFromString(f.read()) + + return Caffe2Model(predict_net, init_net) + + def __call__(self, inputs): + """ + An interface that wraps around a Caffe2 model and mimics detectron2's models' + input/output format. See details about the format at :doc:`/tutorials/models`. + This is used to compare the outputs of caffe2 model with its original torch model. + + Due to the extra conversion between Pytorch/Caffe2, this method is not meant for + benchmark. Because of the conversion, this method also has dependency + on detectron2 in order to convert to detectron2's output format. + """ + if self._predictor is None: + self._predictor = ProtobufDetectionModel(self._predict_net, self._init_net) + return self._predictor(inputs) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/export/c10.py b/motion-gan-pipeline/preprocessing/third/detectron2/export/c10.py new file mode 100644 index 0000000..25ee230 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/export/c10.py @@ -0,0 +1,534 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import math +import torch +import torch.nn.functional as F + +from detectron2.layers import cat +from detectron2.layers.roi_align_rotated import ROIAlignRotated +from detectron2.modeling import poolers +from detectron2.modeling.proposal_generator import rpn +from detectron2.modeling.roi_heads.mask_head import mask_rcnn_inference +from detectron2.structures import Boxes, ImageList, Instances, Keypoints + +from .shared import alias, to_device + + +""" +This file contains caffe2-compatible implementation of several detectron2 components. +""" + + +class Caffe2Boxes(Boxes): + """ + Representing a list of detectron2.structures.Boxes from minibatch, each box + is represented by a 5d vector (batch index + 4 coordinates), or a 6d vector + (batch index + 5 coordinates) for RotatedBoxes. + """ + + def __init__(self, tensor): + assert isinstance(tensor, torch.Tensor) + assert tensor.dim() == 2 and tensor.size(-1) in [4, 5, 6], tensor.size() + # TODO: make tensor immutable when dim is Nx5 for Boxes, + # and Nx6 for RotatedBoxes? + self.tensor = tensor + + +# TODO clean up this class, maybe just extend Instances +class InstancesList(object): + """ + Tensor representation of a list of Instances object for a batch of images. + + When dealing with a batch of images with Caffe2 ops, a list of bboxes + (instances) are usually represented by single Tensor with size + (sigma(Ni), 5) or (sigma(Ni), 4) plus a batch split Tensor. This class is + for providing common functions to convert between these two representations. + """ + + def __init__(self, im_info, indices, extra_fields=None): + # [N, 3] -> (H, W, Scale) + self.im_info = im_info + # [N,] -> indice of batch to which the instance belongs + self.indices = indices + # [N, ...] + self.batch_extra_fields = extra_fields or {} + + self.image_size = self.im_info + + def get_fields(self): + """like `get_fields` in the Instances object, + but return each field in tensor representations""" + ret = {} + for k, v in self.batch_extra_fields.items(): + # if isinstance(v, torch.Tensor): + # tensor_rep = v + # elif isinstance(v, (Boxes, Keypoints)): + # tensor_rep = v.tensor + # else: + # raise ValueError("Can't find tensor representation for: {}".format()) + ret[k] = v + return ret + + def has(self, name): + return name in self.batch_extra_fields + + def set(self, name, value): + data_len = len(value) + if len(self.batch_extra_fields): + assert ( + len(self) == data_len + ), "Adding a field of length {} to a Instances of length {}".format(data_len, len(self)) + self.batch_extra_fields[name] = value + + def __setattr__(self, name, val): + if name in ["im_info", "indices", "batch_extra_fields", "image_size"]: + super().__setattr__(name, val) + else: + self.set(name, val) + + def __getattr__(self, name): + if name not in self.batch_extra_fields: + raise AttributeError("Cannot find field '{}' in the given Instances!".format(name)) + return self.batch_extra_fields[name] + + def __len__(self): + return len(self.indices) + + def flatten(self): + ret = [] + for _, v in self.batch_extra_fields.items(): + if isinstance(v, (Boxes, Keypoints)): + ret.append(v.tensor) + else: + ret.append(v) + return ret + + @staticmethod + def to_d2_instances_list(instances_list): + """ + Convert InstancesList to List[Instances]. The input `instances_list` can + also be a List[Instances], in this case this method is a non-op. + """ + if not isinstance(instances_list, InstancesList): + assert all(isinstance(x, Instances) for x in instances_list) + return instances_list + + ret = [] + for i, info in enumerate(instances_list.im_info): + instances = Instances(torch.Size([int(info[0].item()), int(info[1].item())])) + + ids = instances_list.indices == i + for k, v in instances_list.batch_extra_fields.items(): + if isinstance(v, torch.Tensor): + instances.set(k, v[ids]) + continue + elif isinstance(v, Boxes): + instances.set(k, v[ids, -4:]) + continue + + target_type, tensor_source = v + assert isinstance(tensor_source, torch.Tensor) + assert tensor_source.shape[0] == instances_list.indices.shape[0] + tensor_source = tensor_source[ids] + + if issubclass(target_type, Boxes): + instances.set(k, Boxes(tensor_source[:, -4:])) + elif issubclass(target_type, Keypoints): + instances.set(k, Keypoints(tensor_source)) + elif issubclass(target_type, torch.Tensor): + instances.set(k, tensor_source) + else: + raise ValueError("Can't handle targe type: {}".format(target_type)) + + ret.append(instances) + return ret + + +class Caffe2Compatible(object): + """ + A model can inherit this class to indicate that it can be traced and deployed with caffe2. + """ + + def _get_tensor_mode(self): + return self._tensor_mode + + def _set_tensor_mode(self, v): + self._tensor_mode = v + + tensor_mode = property(_get_tensor_mode, _set_tensor_mode) + """ + If true, the model expects C2-style tensor only inputs/outputs format. + """ + + +class Caffe2RPN(Caffe2Compatible, rpn.RPN): + def _generate_proposals( + self, images, objectness_logits_pred, anchor_deltas_pred, gt_instances=None + ): + assert isinstance(images, ImageList) + if self.tensor_mode: + im_info = images.image_sizes + else: + im_info = torch.tensor([[im_sz[0], im_sz[1], 1.0] for im_sz in images.image_sizes]).to( + images.tensor.device + ) + assert isinstance(im_info, torch.Tensor) + + rpn_rois_list = [] + rpn_roi_probs_list = [] + for scores, bbox_deltas, cell_anchors_tensor, feat_stride in zip( + objectness_logits_pred, + anchor_deltas_pred, + iter(self.anchor_generator.cell_anchors), + self.anchor_generator.strides, + ): + scores = scores.detach() + bbox_deltas = bbox_deltas.detach() + + rpn_rois, rpn_roi_probs = torch.ops._caffe2.GenerateProposals( + scores, + bbox_deltas, + im_info, + cell_anchors_tensor, + spatial_scale=1.0 / feat_stride, + pre_nms_topN=self.pre_nms_topk[self.training], + post_nms_topN=self.post_nms_topk[self.training], + nms_thresh=self.nms_thresh, + min_size=self.min_box_size, + # correct_transform_coords=True, # deprecated argument + angle_bound_on=True, # Default + angle_bound_lo=-180, + angle_bound_hi=180, + clip_angle_thresh=1.0, # Default + legacy_plus_one=False, + ) + rpn_rois_list.append(rpn_rois) + rpn_roi_probs_list.append(rpn_roi_probs) + + # For FPN in D2, in RPN all proposals from different levels are concated + # together, ranked and picked by top post_nms_topk. Then in ROIPooler + # it calculates level_assignments and calls the RoIAlign from + # the corresponding level. + + if len(objectness_logits_pred) == 1: + rpn_rois = rpn_rois_list[0] + rpn_roi_probs = rpn_roi_probs_list[0] + else: + assert len(rpn_rois_list) == len(rpn_roi_probs_list) + rpn_post_nms_topN = self.post_nms_topk[self.training] + + device = rpn_rois_list[0].device + input_list = [to_device(x, "cpu") for x in (rpn_rois_list + rpn_roi_probs_list)] + + # TODO remove this after confirming rpn_max_level/rpn_min_level + # is not needed in CollectRpnProposals. + feature_strides = list(self.anchor_generator.strides) + rpn_min_level = int(math.log2(feature_strides[0])) + rpn_max_level = int(math.log2(feature_strides[-1])) + assert (rpn_max_level - rpn_min_level + 1) == len( + rpn_rois_list + ), "CollectRpnProposals requires continuous levels" + + rpn_rois = torch.ops._caffe2.CollectRpnProposals( + input_list, + # NOTE: in current implementation, rpn_max_level and rpn_min_level + # are not needed, only the subtraction of two matters and it + # can be infer from the number of inputs. Keep them now for + # consistency. + rpn_max_level=2 + len(rpn_rois_list) - 1, + rpn_min_level=2, + rpn_post_nms_topN=rpn_post_nms_topN, + ) + rpn_rois = to_device(rpn_rois, device) + rpn_roi_probs = [] + + proposals = self.c2_postprocess(im_info, rpn_rois, rpn_roi_probs, self.tensor_mode) + return proposals, {} + + def forward(self, images, features, gt_instances=None): + assert not self.training + features = [features[f] for f in self.in_features] + objectness_logits_pred, anchor_deltas_pred = self.rpn_head(features) + return self._generate_proposals( + images, + objectness_logits_pred, + anchor_deltas_pred, + gt_instances, + ) + + @staticmethod + def c2_postprocess(im_info, rpn_rois, rpn_roi_probs, tensor_mode): + proposals = InstancesList( + im_info=im_info, + indices=rpn_rois[:, 0], + extra_fields={ + "proposal_boxes": Caffe2Boxes(rpn_rois), + "objectness_logits": (torch.Tensor, rpn_roi_probs), + }, + ) + if not tensor_mode: + proposals = InstancesList.to_d2_instances_list(proposals) + else: + proposals = [proposals] + return proposals + + +class Caffe2ROIPooler(Caffe2Compatible, poolers.ROIPooler): + @staticmethod + def c2_preprocess(box_lists): + assert all(isinstance(x, Boxes) for x in box_lists) + if all(isinstance(x, Caffe2Boxes) for x in box_lists): + # input is pure-tensor based + assert len(box_lists) == 1 + pooler_fmt_boxes = box_lists[0].tensor + else: + pooler_fmt_boxes = poolers.convert_boxes_to_pooler_format(box_lists) + return pooler_fmt_boxes + + def forward(self, x, box_lists): + assert not self.training + + pooler_fmt_boxes = self.c2_preprocess(box_lists) + num_level_assignments = len(self.level_poolers) + + if num_level_assignments == 1: + if isinstance(self.level_poolers[0], ROIAlignRotated): + c2_roi_align = torch.ops._caffe2.RoIAlignRotated + aligned = True + else: + c2_roi_align = torch.ops._caffe2.RoIAlign + aligned = self.level_poolers[0].aligned + + x0 = x[0] + if x0.is_quantized: + x0 = x0.dequantize() + + out = c2_roi_align( + x0, + pooler_fmt_boxes, + order="NCHW", + spatial_scale=float(self.level_poolers[0].spatial_scale), + pooled_h=int(self.output_size[0]), + pooled_w=int(self.output_size[1]), + sampling_ratio=int(self.level_poolers[0].sampling_ratio), + aligned=aligned, + ) + return out + + device = pooler_fmt_boxes.device + assert ( + self.max_level - self.min_level + 1 == 4 + ), "Currently DistributeFpnProposals only support 4 levels" + fpn_outputs = torch.ops._caffe2.DistributeFpnProposals( + to_device(pooler_fmt_boxes, "cpu"), + roi_canonical_scale=self.canonical_box_size, + roi_canonical_level=self.canonical_level, + roi_max_level=self.max_level, + roi_min_level=self.min_level, + legacy_plus_one=False, + ) + fpn_outputs = [to_device(x, device) for x in fpn_outputs] + + rois_fpn_list = fpn_outputs[:-1] + rois_idx_restore_int32 = fpn_outputs[-1] + + roi_feat_fpn_list = [] + for roi_fpn, x_level, pooler in zip(rois_fpn_list, x, self.level_poolers): + if isinstance(pooler, ROIAlignRotated): + c2_roi_align = torch.ops._caffe2.RoIAlignRotated + aligned = True + else: + c2_roi_align = torch.ops._caffe2.RoIAlign + aligned = bool(pooler.aligned) + + if x_level.is_quantized: + x_level = x_level.dequantize() + + roi_feat_fpn = c2_roi_align( + x_level, + roi_fpn, + order="NCHW", + spatial_scale=float(pooler.spatial_scale), + pooled_h=int(self.output_size[0]), + pooled_w=int(self.output_size[1]), + sampling_ratio=int(pooler.sampling_ratio), + aligned=aligned, + ) + roi_feat_fpn_list.append(roi_feat_fpn) + + roi_feat_shuffled = cat(roi_feat_fpn_list, dim=0) + assert roi_feat_shuffled.numel() > 0 and rois_idx_restore_int32.numel() > 0, ( + "Caffe2 export requires tracing with a model checkpoint + input that can produce valid" + " detections. But no detections were obtained with the given checkpoint and input!" + ) + roi_feat = torch.ops._caffe2.BatchPermutation(roi_feat_shuffled, rois_idx_restore_int32) + return roi_feat + + +class Caffe2FastRCNNOutputsInference: + def __init__(self, tensor_mode): + self.tensor_mode = tensor_mode # whether the output is caffe2 tensor mode + + def __call__(self, box_predictor, predictions, proposals): + """equivalent to FastRCNNOutputLayers.inference""" + num_classes = box_predictor.num_classes + score_thresh = box_predictor.test_score_thresh + nms_thresh = box_predictor.test_nms_thresh + topk_per_image = box_predictor.test_topk_per_image + is_rotated = len(box_predictor.box2box_transform.weights) == 5 + + if is_rotated: + box_dim = 5 + assert box_predictor.box2box_transform.weights[4] == 1, ( + "The weights for Rotated BBoxTransform in C2 have only 4 dimensions," + + " thus enforcing the angle weight to be 1 for now" + ) + box2box_transform_weights = box_predictor.box2box_transform.weights[:4] + else: + box_dim = 4 + box2box_transform_weights = box_predictor.box2box_transform.weights + + class_logits, box_regression = predictions + if num_classes + 1 == class_logits.shape[1]: + class_prob = F.softmax(class_logits, -1) + else: + assert num_classes == class_logits.shape[1] + class_prob = F.sigmoid(class_logits) + # BoxWithNMSLimit will infer num_classes from the shape of the class_prob + # So append a zero column as placeholder for the background class + class_prob = torch.cat((class_prob, torch.zeros(class_prob.shape[0], 1)), dim=1) + + assert box_regression.shape[1] % box_dim == 0 + cls_agnostic_bbox_reg = box_regression.shape[1] // box_dim == 1 + + input_tensor_mode = proposals[0].proposal_boxes.tensor.shape[1] == box_dim + 1 + + rois = type(proposals[0].proposal_boxes).cat([p.proposal_boxes for p in proposals]) + device, dtype = rois.tensor.device, rois.tensor.dtype + if input_tensor_mode: + im_info = proposals[0].image_size + rois = rois.tensor + else: + im_info = torch.tensor( + [[sz[0], sz[1], 1.0] for sz in [x.image_size for x in proposals]] + ) + batch_ids = cat( + [ + torch.full((b, 1), i, dtype=dtype, device=device) + for i, b in enumerate(len(p) for p in proposals) + ], + dim=0, + ) + rois = torch.cat([batch_ids, rois.tensor], dim=1) + + roi_pred_bbox, roi_batch_splits = torch.ops._caffe2.BBoxTransform( + to_device(rois, "cpu"), + to_device(box_regression, "cpu"), + to_device(im_info, "cpu"), + weights=box2box_transform_weights, + apply_scale=True, + rotated=is_rotated, + angle_bound_on=True, + angle_bound_lo=-180, + angle_bound_hi=180, + clip_angle_thresh=1.0, + legacy_plus_one=False, + ) + roi_pred_bbox = to_device(roi_pred_bbox, device) + roi_batch_splits = to_device(roi_batch_splits, device) + + nms_outputs = torch.ops._caffe2.BoxWithNMSLimit( + to_device(class_prob, "cpu"), + to_device(roi_pred_bbox, "cpu"), + to_device(roi_batch_splits, "cpu"), + score_thresh=float(score_thresh), + nms=float(nms_thresh), + detections_per_im=int(topk_per_image), + soft_nms_enabled=False, + soft_nms_method="linear", + soft_nms_sigma=0.5, + soft_nms_min_score_thres=0.001, + rotated=is_rotated, + cls_agnostic_bbox_reg=cls_agnostic_bbox_reg, + input_boxes_include_bg_cls=False, + output_classes_include_bg_cls=False, + legacy_plus_one=False, + ) + roi_score_nms = to_device(nms_outputs[0], device) + roi_bbox_nms = to_device(nms_outputs[1], device) + roi_class_nms = to_device(nms_outputs[2], device) + roi_batch_splits_nms = to_device(nms_outputs[3], device) + roi_keeps_nms = to_device(nms_outputs[4], device) + roi_keeps_size_nms = to_device(nms_outputs[5], device) + if not self.tensor_mode: + roi_class_nms = roi_class_nms.to(torch.int64) + + roi_batch_ids = cat( + [ + torch.full((b, 1), i, dtype=dtype, device=device) + for i, b in enumerate(int(x.item()) for x in roi_batch_splits_nms) + ], + dim=0, + ) + + roi_class_nms = alias(roi_class_nms, "class_nms") + roi_score_nms = alias(roi_score_nms, "score_nms") + roi_bbox_nms = alias(roi_bbox_nms, "bbox_nms") + roi_batch_splits_nms = alias(roi_batch_splits_nms, "batch_splits_nms") + roi_keeps_nms = alias(roi_keeps_nms, "keeps_nms") + roi_keeps_size_nms = alias(roi_keeps_size_nms, "keeps_size_nms") + + results = InstancesList( + im_info=im_info, + indices=roi_batch_ids[:, 0], + extra_fields={ + "pred_boxes": Caffe2Boxes(roi_bbox_nms), + "scores": roi_score_nms, + "pred_classes": roi_class_nms, + }, + ) + + if not self.tensor_mode: + results = InstancesList.to_d2_instances_list(results) + batch_splits = roi_batch_splits_nms.int().tolist() + kept_indices = list(roi_keeps_nms.to(torch.int64).split(batch_splits)) + else: + results = [results] + kept_indices = [roi_keeps_nms] + + return results, kept_indices + + +class Caffe2MaskRCNNInference: + def __call__(self, pred_mask_logits, pred_instances): + """equivalent to mask_head.mask_rcnn_inference""" + if all(isinstance(x, InstancesList) for x in pred_instances): + assert len(pred_instances) == 1 + mask_probs_pred = pred_mask_logits.sigmoid() + mask_probs_pred = alias(mask_probs_pred, "mask_fcn_probs") + pred_instances[0].pred_masks = mask_probs_pred + else: + mask_rcnn_inference(pred_mask_logits, pred_instances) + + +class Caffe2KeypointRCNNInference: + def __init__(self, use_heatmap_max_keypoint): + self.use_heatmap_max_keypoint = use_heatmap_max_keypoint + + def __call__(self, pred_keypoint_logits, pred_instances): + # just return the keypoint heatmap for now, + # there will be option to call HeatmapMaxKeypointOp + output = alias(pred_keypoint_logits, "kps_score") + if all(isinstance(x, InstancesList) for x in pred_instances): + assert len(pred_instances) == 1 + if self.use_heatmap_max_keypoint: + device = output.device + output = torch.ops._caffe2.HeatmapMaxKeypoint( + to_device(output, "cpu"), + pred_instances[0].pred_boxes.tensor, + should_output_softmax=True, # worth make it configerable? + ) + output = to_device(output, device) + output = alias(output, "keypoints_out") + pred_instances[0].pred_keypoints = output + return pred_keypoint_logits diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/export/caffe2_export.py b/motion-gan-pipeline/preprocessing/third/detectron2/export/caffe2_export.py new file mode 100644 index 0000000..74ac123 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/export/caffe2_export.py @@ -0,0 +1,207 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import copy +import io +import logging +import numpy as np +from typing import List +import onnx +import torch +from caffe2.proto import caffe2_pb2 +from caffe2.python import core +from caffe2.python.onnx.backend import Caffe2Backend +from tabulate import tabulate +from termcolor import colored +from torch.onnx import OperatorExportTypes + +from .shared import ( + ScopedWS, + construct_init_net_from_params, + fuse_alias_placeholder, + fuse_copy_between_cpu_and_gpu, + get_params_from_init_net, + group_norm_replace_aten_with_caffe2, + infer_device_type, + remove_dead_end_ops, + remove_reshape_for_fc, + save_graph, +) + +logger = logging.getLogger(__name__) + + +def export_onnx_model(model, inputs): + """ + Trace and export a model to onnx format. + + Args: + model (nn.Module): + inputs (tuple[args]): the model will be called by `model(*inputs)` + + Returns: + an onnx model + """ + assert isinstance(model, torch.nn.Module) + + # make sure all modules are in eval mode, onnx may change the training state + # of the module if the states are not consistent + def _check_eval(module): + assert not module.training + + model.apply(_check_eval) + + # Export the model to ONNX + with torch.no_grad(): + with io.BytesIO() as f: + torch.onnx.export( + model, + inputs, + f, + operator_export_type=OperatorExportTypes.ONNX_ATEN_FALLBACK, + # verbose=True, # NOTE: uncomment this for debugging + # export_params=True, + ) + onnx_model = onnx.load_from_string(f.getvalue()) + + # Apply ONNX's Optimization + all_passes = onnx.optimizer.get_available_passes() + passes = ["fuse_bn_into_conv"] + assert all(p in all_passes for p in passes) + onnx_model = onnx.optimizer.optimize(onnx_model, passes) + return onnx_model + + +def _op_stats(net_def): + type_count = {} + for t in [op.type for op in net_def.op]: + type_count[t] = type_count.get(t, 0) + 1 + type_count_list = sorted(type_count.items(), key=lambda kv: kv[0]) # alphabet + type_count_list = sorted(type_count_list, key=lambda kv: -kv[1]) # count + return "\n".join("{:>4}x {}".format(count, name) for name, count in type_count_list) + + +def _assign_device_option( + predict_net: caffe2_pb2.NetDef, init_net: caffe2_pb2.NetDef, tensor_inputs: List[torch.Tensor] +): + """ + ONNX exported network doesn't have concept of device, assign necessary + device option for each op in order to make it runable on GPU runtime. + """ + + def _get_device_type(torch_tensor): + assert torch_tensor.device.type in ["cpu", "cuda"] + assert torch_tensor.device.index == 0 + return torch_tensor.device.type + + def _assign_op_device_option(net_proto, net_ssa, blob_device_types): + for op, ssa_i in zip(net_proto.op, net_ssa): + if op.type in ["CopyCPUToGPU", "CopyGPUToCPU"]: + op.device_option.CopyFrom(core.DeviceOption(caffe2_pb2.CUDA, 0)) + else: + devices = [blob_device_types[b] for b in ssa_i[0] + ssa_i[1]] + assert all(d == devices[0] for d in devices) + if devices[0] == "cuda": + op.device_option.CopyFrom(core.DeviceOption(caffe2_pb2.CUDA, 0)) + + # update ops in predict_net + predict_net_input_device_types = { + (name, 0): _get_device_type(tensor) + for name, tensor in zip(predict_net.external_input, tensor_inputs) + } + predict_net_device_types = infer_device_type( + predict_net, known_status=predict_net_input_device_types, device_name_style="pytorch" + ) + predict_net_ssa, _ = core.get_ssa(predict_net) + _assign_op_device_option(predict_net, predict_net_ssa, predict_net_device_types) + + # update ops in init_net + init_net_ssa, versions = core.get_ssa(init_net) + init_net_output_device_types = { + (name, versions[name]): predict_net_device_types[(name, 0)] + for name in init_net.external_output + } + init_net_device_types = infer_device_type( + init_net, known_status=init_net_output_device_types, device_name_style="pytorch" + ) + _assign_op_device_option(init_net, init_net_ssa, init_net_device_types) + + +def export_caffe2_detection_model(model: torch.nn.Module, tensor_inputs: List[torch.Tensor]): + """ + Export a caffe2-compatible Detectron2 model to caffe2 format via ONNX. + + Arg: + model: a caffe2-compatible version of detectron2 model, defined in caffe2_modeling.py + tensor_inputs: a list of tensors that caffe2 model takes as input. + """ + model = copy.deepcopy(model) + assert isinstance(model, torch.nn.Module) + assert hasattr(model, "encode_additional_info") + + # Export via ONNX + logger.info( + "Exporting a {} model via ONNX ...".format(type(model).__name__) + + " Some warnings from ONNX are expected and are usually not to worry about." + ) + onnx_model = export_onnx_model(model, (tensor_inputs,)) + # Convert ONNX model to Caffe2 protobuf + init_net, predict_net = Caffe2Backend.onnx_graph_to_caffe2_net(onnx_model) + ops_table = [[op.type, op.input, op.output] for op in predict_net.op] + table = tabulate(ops_table, headers=["type", "input", "output"], tablefmt="pipe") + logger.info( + "ONNX export Done. Exported predict_net (before optimizations):\n" + colored(table, "cyan") + ) + + # Apply protobuf optimization + fuse_alias_placeholder(predict_net, init_net) + if any(t.device.type != "cpu" for t in tensor_inputs): + fuse_copy_between_cpu_and_gpu(predict_net) + remove_dead_end_ops(init_net) + _assign_device_option(predict_net, init_net, tensor_inputs) + params, device_options = get_params_from_init_net(init_net) + predict_net, params = remove_reshape_for_fc(predict_net, params) + init_net = construct_init_net_from_params(params, device_options) + group_norm_replace_aten_with_caffe2(predict_net) + + # Record necessary information for running the pb model in Detectron2 system. + model.encode_additional_info(predict_net, init_net) + + logger.info("Operators used in predict_net: \n{}".format(_op_stats(predict_net))) + logger.info("Operators used in init_net: \n{}".format(_op_stats(init_net))) + + return predict_net, init_net + + +def run_and_save_graph(predict_net, init_net, tensor_inputs, graph_save_path): + """ + Run the caffe2 model on given inputs, recording the shape and draw the graph. + + predict_net/init_net: caffe2 model. + tensor_inputs: a list of tensors that caffe2 model takes as input. + graph_save_path: path for saving graph of exported model. + """ + + logger.info("Saving graph of ONNX exported model to {} ...".format(graph_save_path)) + save_graph(predict_net, graph_save_path, op_only=False) + + # Run the exported Caffe2 net + logger.info("Running ONNX exported model ...") + with ScopedWS("__ws_tmp__", True) as ws: + ws.RunNetOnce(init_net) + initialized_blobs = set(ws.Blobs()) + uninitialized = [inp for inp in predict_net.external_input if inp not in initialized_blobs] + for name, blob in zip(uninitialized, tensor_inputs): + ws.FeedBlob(name, blob) + + try: + ws.RunNetOnce(predict_net) + except RuntimeError as e: + logger.warning("Encountered RuntimeError: \n{}".format(str(e))) + + ws_blobs = {b: ws.FetchBlob(b) for b in ws.Blobs()} + blob_sizes = {b: ws_blobs[b].shape for b in ws_blobs if isinstance(ws_blobs[b], np.ndarray)} + + logger.info("Saving graph with blob shapes to {} ...".format(graph_save_path)) + save_graph(predict_net, graph_save_path, op_only=False, blob_sizes=blob_sizes) + + return ws_blobs diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/export/caffe2_inference.py b/motion-gan-pipeline/preprocessing/third/detectron2/export/caffe2_inference.py new file mode 100644 index 0000000..deb886c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/export/caffe2_inference.py @@ -0,0 +1,161 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import logging +import numpy as np +from itertools import count +import torch +from caffe2.proto import caffe2_pb2 +from caffe2.python import core + +from .caffe2_modeling import META_ARCH_CAFFE2_EXPORT_TYPE_MAP, convert_batched_inputs_to_c2_format +from .shared import ScopedWS, get_pb_arg_vali, get_pb_arg_vals, infer_device_type + +logger = logging.getLogger(__name__) + + +# ===== ref: mobile-vision predictor's 'Caffe2Wrapper' class ====== +class ProtobufModel(torch.nn.Module): + """ + Wrapper of a caffe2's protobuf model. + It works just like nn.Module, but running caffe2 under the hood. + Input/Output are tuple[tensor] that match the caffe2 net's external_input/output. + """ + + _ids = count(0) + + def __init__(self, predict_net, init_net): + logger.info(f"Initializing ProtobufModel for: {predict_net.name} ...") + super().__init__() + assert isinstance(predict_net, caffe2_pb2.NetDef) + assert isinstance(init_net, caffe2_pb2.NetDef) + # create unique temporary workspace for each instance + self.ws_name = "__tmp_ProtobufModel_{}__".format(next(self._ids)) + self.net = core.Net(predict_net) + + logger.info("Running init_net once to fill the parameters ...") + with ScopedWS(self.ws_name, is_reset=True, is_cleanup=False) as ws: + ws.RunNetOnce(init_net) + uninitialized_external_input = [] + for blob in self.net.Proto().external_input: + if blob not in ws.Blobs(): + uninitialized_external_input.append(blob) + ws.CreateBlob(blob) + ws.CreateNet(self.net) + + self._error_msgs = set() + self._input_blobs = uninitialized_external_input + + def _infer_output_devices(self, inputs): + """ + Returns: + list[str]: list of device for each external output + """ + + def _get_device_type(torch_tensor): + assert torch_tensor.device.type in ["cpu", "cuda"] + assert torch_tensor.device.index == 0 + return torch_tensor.device.type + + predict_net = self.net.Proto() + input_device_types = { + (name, 0): _get_device_type(tensor) for name, tensor in zip(self._input_blobs, inputs) + } + device_type_map = infer_device_type( + predict_net, known_status=input_device_types, device_name_style="pytorch" + ) + ssa, versions = core.get_ssa(predict_net) + versioned_outputs = [(name, versions[name]) for name in predict_net.external_output] + output_devices = [device_type_map[outp] for outp in versioned_outputs] + return output_devices + + def forward(self, inputs): + """ + Args: + inputs (tuple[torch.Tensor]) + + Returns: + tuple[torch.Tensor] + """ + assert len(inputs) == len(self._input_blobs), ( + f"Length of inputs ({len(inputs)}) " + f"doesn't match the required input blobs: {self._input_blobs}" + ) + + with ScopedWS(self.ws_name, is_reset=False, is_cleanup=False) as ws: + for b, tensor in zip(self._input_blobs, inputs): + ws.FeedBlob(b, tensor) + + try: + ws.RunNet(self.net.Proto().name) + except RuntimeError as e: + if not str(e) in self._error_msgs: + self._error_msgs.add(str(e)) + logger.warning("Encountered new RuntimeError: \n{}".format(str(e))) + logger.warning("Catch the error and use partial results.") + + c2_outputs = [ws.FetchBlob(b) for b in self.net.Proto().external_output] + # Remove outputs of current run, this is necessary in order to + # prevent fetching the result from previous run if the model fails + # in the middle. + for b in self.net.Proto().external_output: + # Needs to create uninitialized blob to make the net runable. + # This is "equivalent" to: ws.RemoveBlob(b) then ws.CreateBlob(b), + # but there'no such API. + ws.FeedBlob(b, f"{b}, a C++ native class of type nullptr (uninitialized).") + + # Cast output to torch.Tensor on the desired device + output_devices = ( + self._infer_output_devices(inputs) + if any(t.device.type != "cpu" for t in inputs) + else ["cpu" for _ in self.net.Proto().external_output] + ) + + outputs = [] + for name, c2_output, device in zip( + self.net.Proto().external_output, c2_outputs, output_devices + ): + if not isinstance(c2_output, np.ndarray): + raise RuntimeError( + "Invalid output for blob {}, received: {}".format(name, c2_output) + ) + outputs.append(torch.tensor(c2_output).to(device=device)) + return tuple(outputs) + + +class ProtobufDetectionModel(torch.nn.Module): + """ + A class works just like a pytorch meta arch in terms of inference, but running + caffe2 model under the hood. + """ + + def __init__(self, predict_net, init_net, *, convert_outputs=None): + """ + Args: + predict_net, init_net (core.Net): caffe2 nets + convert_outptus (callable): a function that converts caffe2 + outputs to the same format of the original pytorch model. + By default, use the one defined in the caffe2 meta_arch. + """ + super().__init__() + self.protobuf_model = ProtobufModel(predict_net, init_net) + self.size_divisibility = get_pb_arg_vali(predict_net, "size_divisibility", 0) + self.device = get_pb_arg_vals(predict_net, "device", b"cpu").decode("ascii") + + if convert_outputs is None: + meta_arch = get_pb_arg_vals(predict_net, "meta_architecture", b"GeneralizedRCNN") + meta_arch = META_ARCH_CAFFE2_EXPORT_TYPE_MAP[meta_arch.decode("ascii")] + self._convert_outputs = meta_arch.get_outputs_converter(predict_net, init_net) + else: + self._convert_outputs = convert_outputs + + def _convert_inputs(self, batched_inputs): + # currently all models convert inputs in the same way + return convert_batched_inputs_to_c2_format( + batched_inputs, self.size_divisibility, self.device + ) + + def forward(self, batched_inputs): + c2_inputs = self._convert_inputs(batched_inputs) + c2_results = self.protobuf_model(c2_inputs) + c2_results = dict(zip(self.protobuf_model.net.Proto().external_output, c2_results)) + return self._convert_outputs(batched_inputs, c2_inputs, c2_results) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/export/caffe2_modeling.py b/motion-gan-pipeline/preprocessing/third/detectron2/export/caffe2_modeling.py new file mode 100644 index 0000000..e00de4a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/export/caffe2_modeling.py @@ -0,0 +1,419 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import functools +import io +import struct +import types +import torch + +from detectron2.modeling import meta_arch +from detectron2.modeling.box_regression import Box2BoxTransform +from detectron2.modeling.roi_heads import keypoint_head +from detectron2.structures import Boxes, ImageList, Instances, RotatedBoxes + +from .c10 import Caffe2Compatible +from .caffe2_patch import ROIHeadsPatcher, patch_generalized_rcnn +from .shared import ( + alias, + check_set_pb_arg, + get_pb_arg_floats, + get_pb_arg_valf, + get_pb_arg_vali, + get_pb_arg_vals, + mock_torch_nn_functional_interpolate, +) + + +def assemble_rcnn_outputs_by_name(image_sizes, tensor_outputs, force_mask_on=False): + """ + A function to assemble caffe2 model's outputs (i.e. Dict[str, Tensor]) + to detectron2's format (i.e. list of Instances instance). + This only works when the model follows the Caffe2 detectron's naming convention. + + Args: + image_sizes (List[List[int, int]]): [H, W] of every image. + tensor_outputs (Dict[str, Tensor]): external_output to its tensor. + + force_mask_on (Bool): if true, the it make sure there'll be pred_masks even + if the mask is not found from tensor_outputs (usually due to model crash) + """ + + results = [Instances(image_size) for image_size in image_sizes] + + batch_splits = tensor_outputs.get("batch_splits", None) + if batch_splits: + raise NotImplementedError() + assert len(image_sizes) == 1 + result = results[0] + + bbox_nms = tensor_outputs["bbox_nms"] + score_nms = tensor_outputs["score_nms"] + class_nms = tensor_outputs["class_nms"] + # Detection will always success because Conv support 0-batch + assert bbox_nms is not None + assert score_nms is not None + assert class_nms is not None + if bbox_nms.shape[1] == 5: + result.pred_boxes = RotatedBoxes(bbox_nms) + else: + result.pred_boxes = Boxes(bbox_nms) + result.scores = score_nms + result.pred_classes = class_nms.to(torch.int64) + + mask_fcn_probs = tensor_outputs.get("mask_fcn_probs", None) + if mask_fcn_probs is not None: + # finish the mask pred + mask_probs_pred = mask_fcn_probs + num_masks = mask_probs_pred.shape[0] + class_pred = result.pred_classes + indices = torch.arange(num_masks, device=class_pred.device) + mask_probs_pred = mask_probs_pred[indices, class_pred][:, None] + result.pred_masks = mask_probs_pred + elif force_mask_on: + # NOTE: there's no way to know the height/width of mask here, it won't be + # used anyway when batch size is 0, so just set them to 0. + result.pred_masks = torch.zeros([0, 1, 0, 0], dtype=torch.uint8) + + keypoints_out = tensor_outputs.get("keypoints_out", None) + kps_score = tensor_outputs.get("kps_score", None) + if keypoints_out is not None: + # keypoints_out: [N, 4, #kypoints], where 4 is in order of (x, y, score, prob) + keypoints_tensor = keypoints_out + # NOTE: it's possible that prob is not calculated if "should_output_softmax" + # is set to False in HeatmapMaxKeypoint, so just using raw score, seems + # it doesn't affect mAP. TODO: check more carefully. + keypoint_xyp = keypoints_tensor.transpose(1, 2)[:, :, [0, 1, 2]] + result.pred_keypoints = keypoint_xyp + elif kps_score is not None: + # keypoint heatmap to sparse data structure + pred_keypoint_logits = kps_score + keypoint_head.keypoint_rcnn_inference(pred_keypoint_logits, [result]) + + return results + + +def _cast_to_f32(f64): + return struct.unpack("f", struct.pack("f", f64))[0] + + +def set_caffe2_compatible_tensor_mode(model, enable=True): + def _fn(m): + if isinstance(m, Caffe2Compatible): + m.tensor_mode = enable + + model.apply(_fn) + + +def convert_batched_inputs_to_c2_format(batched_inputs, size_divisibility, device): + """ + See get_caffe2_inputs() below. + """ + assert all(isinstance(x, dict) for x in batched_inputs) + assert all(x["image"].dim() == 3 for x in batched_inputs) + + images = [x["image"] for x in batched_inputs] + images = ImageList.from_tensors(images, size_divisibility) + + im_info = [] + for input_per_image, image_size in zip(batched_inputs, images.image_sizes): + target_height = input_per_image.get("height", image_size[0]) + target_width = input_per_image.get("width", image_size[1]) # noqa + # NOTE: The scale inside im_info is kept as convention and for providing + # post-processing information if further processing is needed. For + # current Caffe2 model definitions that don't include post-processing inside + # the model, this number is not used. + # NOTE: There can be a slight difference between width and height + # scales, using a single number can results in numerical difference + # compared with D2's post-processing. + scale = target_height / image_size[0] + im_info.append([image_size[0], image_size[1], scale]) + im_info = torch.Tensor(im_info) + + return images.tensor.to(device), im_info.to(device) + + +class Caffe2MetaArch(Caffe2Compatible, torch.nn.Module): + """ + Base class for caffe2-compatible implementation of a meta architecture. + The forward is traceable and its traced graph can be converted to caffe2 + graph through ONNX. + """ + + def __init__(self, cfg, torch_model): + """ + Args: + cfg (CfgNode): + torch_model (nn.Module): the detectron2 model (meta_arch) to be + converted. + """ + super().__init__() + self._wrapped_model = torch_model + self.eval() + set_caffe2_compatible_tensor_mode(self, True) + + def get_caffe2_inputs(self, batched_inputs): + """ + Convert pytorch-style structured inputs to caffe2-style inputs that + are tuples of tensors. + + Args: + batched_inputs (list[dict]): inputs to a detectron2 model + in its standard format. Each dict has "image" (CHW tensor), and optionally + "height" and "width". + + Returns: + tuple[Tensor]: + tuple of tensors that will be the inputs to the + :meth:`forward` method. For existing models, the first + is an NCHW tensor (padded and batched); the second is + a im_info Nx3 tensor, where the rows are + (height, width, unused legacy parameter) + """ + return convert_batched_inputs_to_c2_format( + batched_inputs, + self._wrapped_model.backbone.size_divisibility, + self._wrapped_model.device, + ) + + def encode_additional_info(self, predict_net, init_net): + """ + Save extra metadata that will be used by inference in the output protobuf. + """ + pass + + def forward(self, inputs): + """ + Run the forward in caffe2-style. It has to use caffe2-compatible ops + and the method will be used for tracing. + + Args: + inputs (tuple[Tensor]): inputs defined by :meth:`get_caffe2_input`. + They will be the inputs of the converted caffe2 graph. + + Returns: + tuple[Tensor]: output tensors. They will be the outputs of the + converted caffe2 graph. + """ + raise NotImplementedError + + def _caffe2_preprocess_image(self, inputs): + """ + Caffe2 implementation of preprocess_image, which is called inside each MetaArch's forward. + It normalizes the input images, and the final caffe2 graph assumes the + inputs have been batched already. + """ + data, im_info = inputs + data = alias(data, "data") + im_info = alias(im_info, "im_info") + mean, std = self._wrapped_model.pixel_mean, self._wrapped_model.pixel_std + normalized_data = (data - mean) / std + normalized_data = alias(normalized_data, "normalized_data") + + # Pack (data, im_info) into ImageList which is recognized by self.inference. + images = ImageList(tensor=normalized_data, image_sizes=im_info) + return images + + @staticmethod + def get_outputs_converter(predict_net, init_net): + """ + Creates a function that converts outputs of the caffe2 model to + detectron2's standard format. + The function uses information in `predict_net` and `init_net` that are + available at inferene time. Therefore the function logic can be used in inference. + + The returned function has the following signature: + + def convert(batched_inputs, c2_inputs, c2_results) -> detectron2_outputs + + Where + + * batched_inputs (list[dict]): the original input format of the meta arch + * c2_inputs (tuple[Tensor]): the caffe2 inputs. + * c2_results (dict[str, Tensor]): the caffe2 output format, + corresponding to the outputs of the :meth:`forward` function. + * detectron2_outputs: the original output format of the meta arch. + + This function can be used to compare the outputs of the original meta arch and + the converted caffe2 graph. + + Returns: + callable: a callable of the above signature. + """ + raise NotImplementedError + + +class Caffe2GeneralizedRCNN(Caffe2MetaArch): + def __init__(self, cfg, torch_model): + assert isinstance(torch_model, meta_arch.GeneralizedRCNN) + torch_model = patch_generalized_rcnn(torch_model) + super().__init__(cfg, torch_model) + + try: + use_heatmap_max_keypoint = cfg.EXPORT_CAFFE2.USE_HEATMAP_MAX_KEYPOINT + except AttributeError: + use_heatmap_max_keypoint = False + self.roi_heads_patcher = ROIHeadsPatcher( + self._wrapped_model.roi_heads, use_heatmap_max_keypoint + ) + + def encode_additional_info(self, predict_net, init_net): + size_divisibility = self._wrapped_model.backbone.size_divisibility + check_set_pb_arg(predict_net, "size_divisibility", "i", size_divisibility) + check_set_pb_arg( + predict_net, "device", "s", str.encode(str(self._wrapped_model.device), "ascii") + ) + check_set_pb_arg(predict_net, "meta_architecture", "s", b"GeneralizedRCNN") + + @mock_torch_nn_functional_interpolate() + def forward(self, inputs): + if not self.tensor_mode: + return self._wrapped_model.inference(inputs) + images = self._caffe2_preprocess_image(inputs) + features = self._wrapped_model.backbone(images.tensor) + proposals, _ = self._wrapped_model.proposal_generator(images, features) + with self.roi_heads_patcher.mock_roi_heads(): + detector_results, _ = self._wrapped_model.roi_heads(images, features, proposals) + return tuple(detector_results[0].flatten()) + + @staticmethod + def get_outputs_converter(predict_net, init_net): + def f(batched_inputs, c2_inputs, c2_results): + _, im_info = c2_inputs + image_sizes = [[int(im[0]), int(im[1])] for im in im_info] + results = assemble_rcnn_outputs_by_name(image_sizes, c2_results) + return meta_arch.GeneralizedRCNN._postprocess(results, batched_inputs, image_sizes) + + return f + + +class Caffe2RetinaNet(Caffe2MetaArch): + def __init__(self, cfg, torch_model): + assert isinstance(torch_model, meta_arch.RetinaNet) + super().__init__(cfg, torch_model) + + @mock_torch_nn_functional_interpolate() + def forward(self, inputs): + assert self.tensor_mode + images = self._caffe2_preprocess_image(inputs) + + # explicitly return the images sizes to avoid removing "im_info" by ONNX + # since it's not used in the forward path + return_tensors = [images.image_sizes] + + features = self._wrapped_model.backbone(images.tensor) + features = [features[f] for f in self._wrapped_model.head_in_features] + for i, feature_i in enumerate(features): + features[i] = alias(feature_i, "feature_{}".format(i), is_backward=True) + return_tensors.append(features[i]) + + pred_logits, pred_anchor_deltas = self._wrapped_model.head(features) + for i, (box_cls_i, box_delta_i) in enumerate(zip(pred_logits, pred_anchor_deltas)): + return_tensors.append(alias(box_cls_i, "box_cls_{}".format(i))) + return_tensors.append(alias(box_delta_i, "box_delta_{}".format(i))) + + return tuple(return_tensors) + + def encode_additional_info(self, predict_net, init_net): + size_divisibility = self._wrapped_model.backbone.size_divisibility + check_set_pb_arg(predict_net, "size_divisibility", "i", size_divisibility) + check_set_pb_arg( + predict_net, "device", "s", str.encode(str(self._wrapped_model.device), "ascii") + ) + check_set_pb_arg(predict_net, "meta_architecture", "s", b"RetinaNet") + + # Inference parameters: + check_set_pb_arg( + predict_net, "score_threshold", "f", _cast_to_f32(self._wrapped_model.test_score_thresh) + ) + check_set_pb_arg( + predict_net, "topk_candidates", "i", self._wrapped_model.test_topk_candidates + ) + check_set_pb_arg( + predict_net, "nms_threshold", "f", _cast_to_f32(self._wrapped_model.test_nms_thresh) + ) + check_set_pb_arg( + predict_net, + "max_detections_per_image", + "i", + self._wrapped_model.max_detections_per_image, + ) + + check_set_pb_arg( + predict_net, + "bbox_reg_weights", + "floats", + [_cast_to_f32(w) for w in self._wrapped_model.box2box_transform.weights], + ) + self._encode_anchor_generator_cfg(predict_net) + + def _encode_anchor_generator_cfg(self, predict_net): + # serialize anchor_generator for future use + serialized_anchor_generator = io.BytesIO() + torch.save(self._wrapped_model.anchor_generator, serialized_anchor_generator) + # Ideally we can put anchor generating inside the model, then we don't + # need to store this information. + bytes = serialized_anchor_generator.getvalue() + check_set_pb_arg(predict_net, "serialized_anchor_generator", "s", bytes) + + @staticmethod + def get_outputs_converter(predict_net, init_net): + self = types.SimpleNamespace() + serialized_anchor_generator = io.BytesIO( + get_pb_arg_vals(predict_net, "serialized_anchor_generator", None) + ) + self.anchor_generator = torch.load(serialized_anchor_generator) + bbox_reg_weights = get_pb_arg_floats(predict_net, "bbox_reg_weights", None) + self.box2box_transform = Box2BoxTransform(weights=tuple(bbox_reg_weights)) + self.test_score_thresh = get_pb_arg_valf(predict_net, "score_threshold", None) + self.test_topk_candidates = get_pb_arg_vali(predict_net, "topk_candidates", None) + self.test_nms_thresh = get_pb_arg_valf(predict_net, "nms_threshold", None) + self.max_detections_per_image = get_pb_arg_vali( + predict_net, "max_detections_per_image", None + ) + + # hack to reuse inference code from RetinaNet + for meth in [ + "forward_inference", + "inference_single_image", + "_transpose_dense_predictions", + "_decode_multi_level_predictions", + "_decode_per_level_predictions", + ]: + setattr(self, meth, functools.partial(getattr(meta_arch.RetinaNet, meth), self)) + + def f(batched_inputs, c2_inputs, c2_results): + _, im_info = c2_inputs + image_sizes = [[int(im[0]), int(im[1])] for im in im_info] + dummy_images = ImageList( + torch.randn( + ( + len(im_info), + 3, + ) + + tuple(image_sizes[0]) + ), + image_sizes, + ) + + num_features = len([x for x in c2_results.keys() if x.startswith("box_cls_")]) + pred_logits = [c2_results["box_cls_{}".format(i)] for i in range(num_features)] + pred_anchor_deltas = [c2_results["box_delta_{}".format(i)] for i in range(num_features)] + + # For each feature level, feature should have the same batch size and + # spatial dimension as the box_cls and box_delta. + dummy_features = [x.clone()[:, 0:0, :, :] for x in pred_logits] + # self.num_classess can be inferred + self.num_classes = pred_logits[0].shape[1] // (pred_anchor_deltas[0].shape[1] // 4) + + results = self.forward_inference( + dummy_images, dummy_features, [pred_logits, pred_anchor_deltas] + ) + return meta_arch.GeneralizedRCNN._postprocess(results, batched_inputs, image_sizes) + + return f + + +META_ARCH_CAFFE2_EXPORT_TYPE_MAP = { + "GeneralizedRCNN": Caffe2GeneralizedRCNN, + "RetinaNet": Caffe2RetinaNet, +} diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/export/caffe2_patch.py b/motion-gan-pipeline/preprocessing/third/detectron2/export/caffe2_patch.py new file mode 100644 index 0000000..c9eee59 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/export/caffe2_patch.py @@ -0,0 +1,152 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import contextlib +from unittest import mock +import torch + +from detectron2.modeling import poolers +from detectron2.modeling.proposal_generator import rpn +from detectron2.modeling.roi_heads import keypoint_head, mask_head +from detectron2.modeling.roi_heads.fast_rcnn import FastRCNNOutputLayers + +from .c10 import ( + Caffe2Compatible, + Caffe2FastRCNNOutputsInference, + Caffe2KeypointRCNNInference, + Caffe2MaskRCNNInference, + Caffe2ROIPooler, + Caffe2RPN, +) + + +class GenericMixin(object): + pass + + +class Caffe2CompatibleConverter(object): + """ + A GenericUpdater which implements the `create_from` interface, by modifying + module object and assign it with another class replaceCls. + """ + + def __init__(self, replaceCls): + self.replaceCls = replaceCls + + def create_from(self, module): + # update module's class to the new class + assert isinstance(module, torch.nn.Module) + if issubclass(self.replaceCls, GenericMixin): + # replaceCls should act as mixin, create a new class on-the-fly + new_class = type( + "{}MixedWith{}".format(self.replaceCls.__name__, module.__class__.__name__), + (self.replaceCls, module.__class__), + {}, # {"new_method": lambda self: ...}, + ) + module.__class__ = new_class + else: + # replaceCls is complete class, this allow arbitrary class swap + module.__class__ = self.replaceCls + + # initialize Caffe2Compatible + if isinstance(module, Caffe2Compatible): + module.tensor_mode = False + + return module + + +def patch(model, target, updater, *args, **kwargs): + """ + recursively (post-order) update all modules with the target type and its + subclasses, make a initialization/composition/inheritance/... via the + updater.create_from. + """ + for name, module in model.named_children(): + model._modules[name] = patch(module, target, updater, *args, **kwargs) + if isinstance(model, target): + return updater.create_from(model, *args, **kwargs) + return model + + +def patch_generalized_rcnn(model): + ccc = Caffe2CompatibleConverter + model = patch(model, rpn.RPN, ccc(Caffe2RPN)) + model = patch(model, poolers.ROIPooler, ccc(Caffe2ROIPooler)) + + return model + + +@contextlib.contextmanager +def mock_fastrcnn_outputs_inference( + tensor_mode, check=True, box_predictor_type=FastRCNNOutputLayers +): + with mock.patch.object( + box_predictor_type, + "inference", + autospec=True, + side_effect=Caffe2FastRCNNOutputsInference(tensor_mode), + ) as mocked_func: + yield + if check: + assert mocked_func.call_count > 0 + + +@contextlib.contextmanager +def mock_mask_rcnn_inference(tensor_mode, patched_module, check=True): + with mock.patch( + "{}.mask_rcnn_inference".format(patched_module), side_effect=Caffe2MaskRCNNInference() + ) as mocked_func: + yield + if check: + assert mocked_func.call_count > 0 + + +@contextlib.contextmanager +def mock_keypoint_rcnn_inference(tensor_mode, patched_module, use_heatmap_max_keypoint, check=True): + with mock.patch( + "{}.keypoint_rcnn_inference".format(patched_module), + side_effect=Caffe2KeypointRCNNInference(use_heatmap_max_keypoint), + ) as mocked_func: + yield + if check: + assert mocked_func.call_count > 0 + + +class ROIHeadsPatcher: + def __init__(self, heads, use_heatmap_max_keypoint): + self.heads = heads + self.use_heatmap_max_keypoint = use_heatmap_max_keypoint + + @contextlib.contextmanager + def mock_roi_heads(self, tensor_mode=True): + """ + Patching several inference functions inside ROIHeads and its subclasses + + Args: + tensor_mode (bool): whether the inputs/outputs are caffe2's tensor + format or not. Default to True. + """ + # NOTE: this requries the `keypoint_rcnn_inference` and `mask_rcnn_inference` + # are called inside the same file as BaseXxxHead due to using mock.patch. + kpt_heads_mod = keypoint_head.BaseKeypointRCNNHead.__module__ + mask_head_mod = mask_head.BaseMaskRCNNHead.__module__ + + mock_ctx_managers = [ + mock_fastrcnn_outputs_inference( + tensor_mode=tensor_mode, + check=True, + box_predictor_type=type(self.heads.box_predictor), + ) + ] + if getattr(self.heads, "keypoint_on", False): + mock_ctx_managers += [ + mock_keypoint_rcnn_inference( + tensor_mode, kpt_heads_mod, self.use_heatmap_max_keypoint + ) + ] + if getattr(self.heads, "mask_on", False): + mock_ctx_managers += [mock_mask_rcnn_inference(tensor_mode, mask_head_mod)] + + with contextlib.ExitStack() as stack: # python 3.3+ + for mgr in mock_ctx_managers: + stack.enter_context(mgr) + yield diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/export/flatten.py b/motion-gan-pipeline/preprocessing/third/detectron2/export/flatten.py new file mode 100644 index 0000000..f5ba429 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/export/flatten.py @@ -0,0 +1,330 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import collections +from dataclasses import dataclass +from typing import Callable, List, Optional, Tuple +import torch +from torch import nn + +from detectron2.structures import Boxes, Instances, ROIMasks +from detectron2.utils.registry import _convert_target_to_string, locate + +from .torchscript_patch import patch_builtin_len + + +@dataclass +class Schema: + """ + A Schema defines how to flatten a possibly hierarchical object into tuple of + primitive objects, so it can be used as inputs/outputs of PyTorch's tracing. + + PyTorch does not support tracing a function that produces rich output + structures (e.g. dict, Instances, Boxes). To trace such a function, we + flatten the rich object into tuple of tensors, and return this tuple of tensors + instead. Meanwhile, we also need to know how to "rebuild" the original object + from the flattened results, so we can evaluate the flattened results. + A Schema defines how to flatten an object, and while flattening it, it records + necessary schemas so that the object can be rebuilt using the flattened outputs. + + The flattened object and the schema object is returned by ``.flatten`` classmethod. + Then the original object can be rebuilt with the ``__call__`` method of schema. + + A Schema is a dataclass that can be serialized easily. + """ + + # inspired by FetchMapper in tensorflow/python/client/session.py + + @classmethod + def flatten(cls, obj): + raise NotImplementedError + + def __call__(self, values): + raise NotImplementedError + + @staticmethod + def _concat(values): + ret = () + sizes = [] + for v in values: + assert isinstance(v, tuple), "Flattened results must be a tuple" + ret = ret + v + sizes.append(len(v)) + return ret, sizes + + @staticmethod + def _split(values, sizes): + if len(sizes): + expected_len = sum(sizes) + assert ( + len(values) == expected_len + ), f"Values has length {len(values)} but expect length {expected_len}." + ret = [] + for k in range(len(sizes)): + begin, end = sum(sizes[:k]), sum(sizes[: k + 1]) + ret.append(values[begin:end]) + return ret + + +@dataclass +class ListSchema(Schema): + schemas: List[Schema] # the schemas that define how to flatten each element in the list + sizes: List[int] # the flattened length of each element + + def __call__(self, values): + values = self._split(values, self.sizes) + if len(values) != len(self.schemas): + raise ValueError( + f"Values has length {len(values)} but schemas " f"has length {len(self.schemas)}!" + ) + values = [m(v) for m, v in zip(self.schemas, values)] + return list(values) + + @classmethod + def flatten(cls, obj): + res = [flatten_to_tuple(k) for k in obj] + values, sizes = cls._concat([k[0] for k in res]) + return values, cls([k[1] for k in res], sizes) + + +@dataclass +class TupleSchema(ListSchema): + def __call__(self, values): + return tuple(super().__call__(values)) + + +@dataclass +class IdentitySchema(Schema): + def __call__(self, values): + return values[0] + + @classmethod + def flatten(cls, obj): + return (obj,), cls() + + +@dataclass +class DictSchema(ListSchema): + keys: List[str] + + def __call__(self, values): + values = super().__call__(values) + return dict(zip(self.keys, values)) + + @classmethod + def flatten(cls, obj): + for k in obj.keys(): + if not isinstance(k, str): + raise KeyError("Only support flattening dictionaries if keys are str.") + keys = sorted(obj.keys()) + values = [obj[k] for k in keys] + ret, schema = ListSchema.flatten(values) + return ret, cls(schema.schemas, schema.sizes, keys) + + +@dataclass +class InstancesSchema(DictSchema): + def __call__(self, values): + image_size, fields = values[-1], values[:-1] + fields = super().__call__(fields) + return Instances(image_size, **fields) + + @classmethod + def flatten(cls, obj): + ret, schema = super().flatten(obj.get_fields()) + size = obj.image_size + if not isinstance(size, torch.Tensor): + size = torch.tensor(size) + return ret + (size,), schema + + +@dataclass +class TensorWrapSchema(Schema): + """ + For classes that are simple wrapper of tensors, e.g. + Boxes, RotatedBoxes, BitMasks + """ + + class_name: str + + def __call__(self, values): + return locate(self.class_name)(values[0]) + + @classmethod + def flatten(cls, obj): + return (obj.tensor,), cls(_convert_target_to_string(type(obj))) + + +# if more custom structures needed in the future, can allow +# passing in extra schemas for custom types +def flatten_to_tuple(obj): + """ + Flatten an object so it can be used for PyTorch tracing. + Also returns how to rebuild the original object from the flattened outputs. + + Returns: + res (tuple): the flattened results that can be used as tracing outputs + schema: an object with a ``__call__`` method such that ``schema(res) == obj``. + It is a pure dataclass that can be serialized. + """ + schemas = [ + ((str, bytes), IdentitySchema), + (list, ListSchema), + (tuple, TupleSchema), + (collections.abc.Mapping, DictSchema), + (Instances, InstancesSchema), + ((Boxes, ROIMasks), TensorWrapSchema), + ] + for klass, schema in schemas: + if isinstance(obj, klass): + F = schema + break + else: + F = IdentitySchema + + return F.flatten(obj) + + +class TracingAdapter(nn.Module): + """ + A model may take rich input/output format (e.g. dict or custom classes), + but `torch.jit.trace` requires tuple of tensors as input/output. + This adapter flattens input/output format of a model so it becomes traceable. + + It also records the necessary schema to rebuild model's inputs/outputs from flattened + inputs/outputs. + + Example: + :: + outputs = model(inputs) # inputs/outputs may be rich structure + adapter = TracingAdapter(model, inputs) + + # can now trace the model, with adapter.flattened_inputs, or another + # tuple of tensors with the same length and meaning + traced = torch.jit.trace(adapter, adapter.flattened_inputs) + + # traced model can only produce flattened outputs (tuple of tensors) + flattened_outputs = traced(*adapter.flattened_inputs) + # adapter knows the schema to convert it back (new_outputs == outputs) + new_outputs = adapter.outputs_schema(flattened_outputs) + """ + + flattened_inputs: Tuple[torch.Tensor] = None + """ + Flattened version of inputs given to this class's constructor. + """ + + inputs_schema: Schema = None + """ + Schema of the inputs given to this class's constructor. + """ + + outputs_schema: Schema = None + """ + Schema of the output produced by calling the given model with inputs. + """ + + def __init__( + self, + model: nn.Module, + inputs, + inference_func: Optional[Callable] = None, + allow_non_tensor: bool = False, + ): + """ + Args: + model: an nn.Module + inputs: An input argument or a tuple of input arguments used to call model. + After flattening, it has to only consist of tensors. + inference_func: a callable that takes (model, *inputs), calls the + model with inputs, and return outputs. By default it + is ``lambda model, *inputs: model(*inputs)``. Can be override + if you need to call the model differently. + allow_non_tensor: allow inputs/outputs to contain non-tensor objects. + This option will filter out non-tensor objects to make the + model traceable, but ``inputs_schema``/``outputs_schema`` cannot be + used anymore because inputs/outputs cannot be rebuilt from pure tensors. + This is useful when you're only interested in the single trace of + execution (e.g. for flop count), but not interested in + generalizing the traced graph to new inputs. + """ + super().__init__() + if isinstance(model, (nn.parallel.distributed.DistributedDataParallel, nn.DataParallel)): + model = model.module + self.model = model + if not isinstance(inputs, tuple): + inputs = (inputs,) + self.inputs = inputs + self.allow_non_tensor = allow_non_tensor + + if inference_func is None: + inference_func = lambda model, *inputs: model(*inputs) # noqa + self.inference_func = inference_func + + self.flattened_inputs, self.inputs_schema = flatten_to_tuple(inputs) + + if all(isinstance(x, torch.Tensor) for x in self.flattened_inputs): + return + if self.allow_non_tensor: + self.flattened_inputs = tuple( + [x for x in self.flattened_inputs if isinstance(x, torch.Tensor)] + ) + self.inputs_schema = None + else: + for input in self.flattened_inputs: + if not isinstance(input, torch.Tensor): + raise ValueError( + "Inputs for tracing must only contain tensors. " + f"Got a {type(input)} instead." + ) + + def forward(self, *args: torch.Tensor): + with torch.no_grad(), patch_builtin_len(): + if self.inputs_schema is not None: + inputs_orig_format = self.inputs_schema(args) + else: + if len(args) != len(self.flattened_inputs) or any( + x is not y for x, y in zip(args, self.flattened_inputs) + ): + raise ValueError( + "TracingAdapter does not contain valid inputs_schema." + " So it cannot generalize to other inputs and must be" + " traced with `.flattened_inputs`." + ) + inputs_orig_format = self.inputs + + outputs = self.inference_func(self.model, *inputs_orig_format) + flattened_outputs, schema = flatten_to_tuple(outputs) + + flattened_output_tensors = tuple( + [x for x in flattened_outputs if isinstance(x, torch.Tensor)] + ) + if len(flattened_output_tensors) < len(flattened_outputs): + if self.allow_non_tensor: + flattened_outputs = flattened_output_tensors + self.outputs_schema = None + else: + raise ValueError( + "Model cannot be traced because some model outputs " + "cannot flatten to tensors." + ) + else: # schema is valid + if self.outputs_schema is None: + self.outputs_schema = schema + else: + assert self.outputs_schema == schema, ( + "Model should always return outputs with the same " + "structure so it can be traced!" + ) + return flattened_outputs + + def _create_wrapper(self, traced_model): + """ + Return a function that has an input/output interface the same as the + original model, but it calls the given traced model under the hood. + """ + + def forward(*args): + flattened_inputs, _ = flatten_to_tuple(args) + flattened_outputs = traced_model(*flattened_inputs) + return self.outputs_schema(flattened_outputs) + + return forward diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/export/shared.py b/motion-gan-pipeline/preprocessing/third/detectron2/export/shared.py new file mode 100644 index 0000000..2d0f7bf --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/export/shared.py @@ -0,0 +1,1034 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import collections +import contextlib +import copy +import functools +import logging +import numpy as np +import os +from typing import Any, Callable, Dict, List, Optional, Tuple, Union +from unittest import mock +import caffe2.python.utils as putils +import torch +import torch.nn.functional as F +from caffe2.proto import caffe2_pb2 +from caffe2.python import core, net_drawer, workspace +from torch.nn.functional import interpolate as interp + +logger = logging.getLogger(__name__) + + +# ==== torch/utils_toffee/cast.py ======================================= + + +def to_device(t, device_str): + """ + This function is a replacement of .to(another_device) such that it allows the + casting to be traced properly by explicitly calling the underlying copy ops. + It also avoids introducing unncessary op when casting to the same device. + """ + src = t.device + dst = torch.device(device_str) + + if src == dst: + return t + elif src.type == "cuda" and dst.type == "cpu": + return torch.ops._caffe2.CopyGPUToCPU(t) + elif src.type == "cpu" and dst.type == "cuda": + return torch.ops._caffe2.CopyCPUToGPU(t) + else: + raise RuntimeError("Can't cast tensor from device {} to device {}".format(src, dst)) + + +# ==== torch/utils_toffee/interpolate.py ======================================= + + +# Note: borrowed from vision/detection/fair/detectron/detectron/modeling/detector.py +def BilinearInterpolation(tensor_in, up_scale): + assert up_scale % 2 == 0, "Scale should be even" + + def upsample_filt(size): + factor = (size + 1) // 2 + if size % 2 == 1: + center = factor - 1 + else: + center = factor - 0.5 + + og = np.ogrid[:size, :size] + return (1 - abs(og[0] - center) / factor) * (1 - abs(og[1] - center) / factor) + + kernel_size = int(up_scale) * 2 + bil_filt = upsample_filt(kernel_size) + + dim = int(tensor_in.shape[1]) + kernel = np.zeros((dim, dim, kernel_size, kernel_size), dtype=np.float32) + kernel[range(dim), range(dim), :, :] = bil_filt + + tensor_out = F.conv_transpose2d( + tensor_in, + weight=to_device(torch.Tensor(kernel), tensor_in.device), + bias=None, + stride=int(up_scale), + padding=int(up_scale / 2), + ) + + return tensor_out + + +# NOTE: ONNX is incompatible with traced torch.nn.functional.interpolate if +# using dynamic `scale_factor` rather than static `size`. (T43166860) +# NOTE: Caffe2 Int8 conversion might not be able to quantize `size` properly. +def onnx_compatibale_interpolate( + input, size=None, scale_factor=None, mode="nearest", align_corners=None +): + # NOTE: The input dimensions are interpreted in the form: + # `mini-batch x channels x [optional depth] x [optional height] x width`. + if size is None and scale_factor is not None: + if input.dim() == 4: + if isinstance(scale_factor, (int, float)): + height_scale, width_scale = (scale_factor, scale_factor) + else: + assert isinstance(scale_factor, (tuple, list)) + assert len(scale_factor) == 2 + height_scale, width_scale = scale_factor + + assert not align_corners, "No matching C2 op for align_corners == True" + if mode == "nearest": + return torch.ops._caffe2.ResizeNearest( + input, order="NCHW", width_scale=width_scale, height_scale=height_scale + ) + elif mode == "bilinear": + logger.warning( + "Use F.conv_transpose2d for bilinear interpolate" + " because there's no such C2 op, this may cause significant" + " slowdown and the boundary pixels won't be as same as" + " using F.interpolate due to padding." + ) + assert height_scale == width_scale + return BilinearInterpolation(input, up_scale=height_scale) + logger.warning("Output size is not static, it might cause ONNX conversion issue") + + return interp(input, size, scale_factor, mode, align_corners) + + +@contextlib.contextmanager +def mock_torch_nn_functional_interpolate(): + if torch.onnx.is_in_onnx_export(): + with mock.patch( + "torch.nn.functional.interpolate", side_effect=onnx_compatibale_interpolate + ): + yield + else: + yield + + +# ==== torch/utils_caffe2/ws_utils.py ========================================== + + +class ScopedWS(object): + def __init__(self, ws_name, is_reset, is_cleanup=False): + self.ws_name = ws_name + self.is_reset = is_reset + self.is_cleanup = is_cleanup + self.org_ws = "" + + def __enter__(self): + self.org_ws = workspace.CurrentWorkspace() + if self.ws_name is not None: + workspace.SwitchWorkspace(self.ws_name, True) + if self.is_reset: + workspace.ResetWorkspace() + + return workspace + + def __exit__(self, *args): + if self.is_cleanup: + workspace.ResetWorkspace() + if self.ws_name is not None: + workspace.SwitchWorkspace(self.org_ws) + + +def fetch_any_blob(name): + bb = None + try: + bb = workspace.FetchBlob(name) + except TypeError: + bb = workspace.FetchInt8Blob(name) + except Exception as e: + logger.error("Get blob {} error: {}".format(name, e)) + + return bb + + +# ==== torch/utils_caffe2/protobuf.py ========================================== + + +def get_pb_arg(pb, arg_name): + for x in pb.arg: + if x.name == arg_name: + return x + return None + + +def get_pb_arg_valf(pb, arg_name, default_val): + arg = get_pb_arg(pb, arg_name) + return arg.f if arg is not None else default_val + + +def get_pb_arg_floats(pb, arg_name, default_val): + arg = get_pb_arg(pb, arg_name) + return list(map(float, arg.floats)) if arg is not None else default_val + + +def get_pb_arg_ints(pb, arg_name, default_val): + arg = get_pb_arg(pb, arg_name) + return list(map(int, arg.ints)) if arg is not None else default_val + + +def get_pb_arg_vali(pb, arg_name, default_val): + arg = get_pb_arg(pb, arg_name) + return arg.i if arg is not None else default_val + + +def get_pb_arg_vals(pb, arg_name, default_val): + arg = get_pb_arg(pb, arg_name) + return arg.s if arg is not None else default_val + + +def get_pb_arg_valstrings(pb, arg_name, default_val): + arg = get_pb_arg(pb, arg_name) + return list(arg.strings) if arg is not None else default_val + + +def check_set_pb_arg(pb, arg_name, arg_attr, arg_value, allow_override=False): + arg = get_pb_arg(pb, arg_name) + if arg is None: + arg = putils.MakeArgument(arg_name, arg_value) + assert hasattr(arg, arg_attr) + pb.arg.extend([arg]) + if allow_override and getattr(arg, arg_attr) != arg_value: + logger.warning( + "Override argument {}: {} -> {}".format(arg_name, getattr(arg, arg_attr), arg_value) + ) + setattr(arg, arg_attr, arg_value) + else: + assert arg is not None + assert getattr(arg, arg_attr) == arg_value, "Existing value {}, new value {}".format( + getattr(arg, arg_attr), arg_value + ) + + +def _create_const_fill_op_from_numpy(name, tensor, device_option=None): + assert type(tensor) == np.ndarray + kTypeNameMapper = { + np.dtype("float32"): "GivenTensorFill", + np.dtype("int32"): "GivenTensorIntFill", + np.dtype("int64"): "GivenTensorInt64Fill", + np.dtype("uint8"): "GivenTensorStringFill", + } + + args_dict = {} + if tensor.dtype == np.dtype("uint8"): + args_dict.update({"values": [str(tensor.data)], "shape": [1]}) + else: + args_dict.update({"values": tensor, "shape": tensor.shape}) + + if device_option is not None: + args_dict["device_option"] = device_option + + return core.CreateOperator(kTypeNameMapper[tensor.dtype], [], [name], **args_dict) + + +def _create_const_fill_op_from_c2_int8_tensor(name, int8_tensor): + assert type(int8_tensor) == workspace.Int8Tensor + kTypeNameMapper = { + np.dtype("int32"): "Int8GivenIntTensorFill", + np.dtype("uint8"): "Int8GivenTensorFill", + } + + tensor = int8_tensor.data + assert tensor.dtype in [np.dtype("uint8"), np.dtype("int32")] + values = tensor.tobytes() if tensor.dtype == np.dtype("uint8") else tensor + + return core.CreateOperator( + kTypeNameMapper[tensor.dtype], + [], + [name], + values=values, + shape=tensor.shape, + Y_scale=int8_tensor.scale, + Y_zero_point=int8_tensor.zero_point, + ) + + +def create_const_fill_op( + name: str, + blob: Union[np.ndarray, workspace.Int8Tensor], + device_option: Optional[caffe2_pb2.DeviceOption] = None, +) -> caffe2_pb2.OperatorDef: + """ + Given a blob object, return the Caffe2 operator that creates this blob + as constant. Currently support NumPy tensor and Caffe2 Int8Tensor. + """ + + tensor_type = type(blob) + assert tensor_type in [ + np.ndarray, + workspace.Int8Tensor, + ], 'Error when creating const fill op for "{}", unsupported blob type: {}'.format( + name, type(blob) + ) + + if tensor_type == np.ndarray: + return _create_const_fill_op_from_numpy(name, blob, device_option) + elif tensor_type == workspace.Int8Tensor: + assert device_option is None + return _create_const_fill_op_from_c2_int8_tensor(name, blob) + + +def construct_init_net_from_params( + params: Dict[str, Any], device_options: Optional[Dict[str, caffe2_pb2.DeviceOption]] = None +) -> caffe2_pb2.NetDef: + """ + Construct the init_net from params dictionary + """ + init_net = caffe2_pb2.NetDef() + device_options = device_options or {} + for name, blob in params.items(): + if isinstance(blob, str): + logger.warning( + ( + "Blob {} with type {} is not supported in generating init net," + " skipped.".format(name, type(blob)) + ) + ) + continue + init_net.op.extend( + [create_const_fill_op(name, blob, device_option=device_options.get(name, None))] + ) + init_net.external_output.append(name) + return init_net + + +def get_producer_map(ssa): + """ + Return dict from versioned blob to (i, j), + where i is index of producer op, j is the index of output of that op. + """ + producer_map = {} + for i in range(len(ssa)): + outputs = ssa[i][1] + for j, outp in enumerate(outputs): + producer_map[outp] = (i, j) + return producer_map + + +def get_consumer_map(ssa): + """ + Return dict from versioned blob to list of (i, j), + where i is index of consumer op, j is the index of input of that op. + """ + consumer_map = collections.defaultdict(list) + for i in range(len(ssa)): + inputs = ssa[i][0] + for j, inp in enumerate(inputs): + consumer_map[inp].append((i, j)) + return consumer_map + + +def get_params_from_init_net( + init_net: caffe2_pb2.NetDef, +) -> [Dict[str, Any], Dict[str, caffe2_pb2.DeviceOption]]: + """ + Take the output blobs from init_net by running it. + Outputs: + params: dict from blob name to numpy array + device_options: dict from blob name to the device option of its creating op + """ + # NOTE: this assumes that the params is determined by producer op with the + # only exception be CopyGPUToCPU which is CUDA op but returns CPU tensor. + def _get_device_option(producer_op): + if producer_op.type == "CopyGPUToCPU": + return caffe2_pb2.DeviceOption() + else: + return producer_op.device_option + + with ScopedWS("__get_params_from_init_net__", is_reset=True, is_cleanup=True) as ws: + ws.RunNetOnce(init_net) + params = {b: fetch_any_blob(b) for b in init_net.external_output} + ssa, versions = core.get_ssa(init_net) + producer_map = get_producer_map(ssa) + device_options = { + b: _get_device_option(init_net.op[producer_map[(b, versions[b])][0]]) + for b in init_net.external_output + } + return params, device_options + + +def _updater_raise(op, input_types, output_types): + raise RuntimeError( + "Failed to apply updater for op {} given input_types {} and" + " output_types {}".format(op, input_types, output_types) + ) + + +def _generic_status_identifier( + predict_net: caffe2_pb2.NetDef, + status_updater: Callable, + known_status: Dict[Tuple[str, int], Any], +) -> Dict[Tuple[str, int], Any]: + """ + Statically infer the status of each blob, the status can be such as device type + (CPU/GPU), layout (NCHW/NHWC), data type (float32/int8), etc. "Blob" here + is versioned blob (Tuple[str, int]) in the format compatible with ssa. + Inputs: + predict_net: the caffe2 network + status_updater: a callable, given an op and the status of its input/output, + it returns the updated status of input/output. `None` is used for + representing unknown status. + known_status: a dict containing known status, used as initialization. + Outputs: + A dict mapping from versioned blob to its status + """ + ssa, versions = core.get_ssa(predict_net) + versioned_ext_input = [(b, 0) for b in predict_net.external_input] + versioned_ext_output = [(b, versions[b]) for b in predict_net.external_output] + all_versioned_blobs = set().union(*[set(x[0] + x[1]) for x in ssa]) + + allowed_vbs = all_versioned_blobs.union(versioned_ext_input).union(versioned_ext_output) + assert all(k in allowed_vbs for k in known_status) + assert all(v is not None for v in known_status.values()) + _known_status = copy.deepcopy(known_status) + + def _check_and_update(key, value): + assert value is not None + if key in _known_status: + if not _known_status[key] == value: + raise RuntimeError( + "Confilict status for {}, existing status {}, new status {}".format( + key, _known_status[key], value + ) + ) + _known_status[key] = value + + def _update_i(op, ssa_i): + versioned_inputs = ssa_i[0] + versioned_outputs = ssa_i[1] + + inputs_status = [_known_status.get(b, None) for b in versioned_inputs] + outputs_status = [_known_status.get(b, None) for b in versioned_outputs] + + new_inputs_status, new_outputs_status = status_updater(op, inputs_status, outputs_status) + + for versioned_blob, status in zip( + versioned_inputs + versioned_outputs, new_inputs_status + new_outputs_status + ): + if status is not None: + _check_and_update(versioned_blob, status) + + for op, ssa_i in zip(predict_net.op, ssa): + _update_i(op, ssa_i) + for op, ssa_i in zip(reversed(predict_net.op), reversed(ssa)): + _update_i(op, ssa_i) + + # NOTE: This strictly checks all the blob from predict_net must be assgined + # a known status. However sometimes it's impossible (eg. having deadend op), + # we may relax this constraint if + for k in all_versioned_blobs: + if k not in _known_status: + raise NotImplementedError( + "Can not infer the status for {}. Currently only support the case where" + " a single forward and backward pass can identify status for all blobs.".format(k) + ) + + return _known_status + + +def infer_device_type( + predict_net: caffe2_pb2.NetDef, + known_status: Dict[Tuple[str, int], Any], + device_name_style: str = "caffe2", +) -> Dict[Tuple[str, int], str]: + """Return the device type ("cpu" or "gpu"/"cuda") of each (versioned) blob""" + + assert device_name_style in ["caffe2", "pytorch"] + _CPU_STR = "cpu" + _GPU_STR = "gpu" if device_name_style == "caffe2" else "cuda" + + def _copy_cpu_to_gpu_updater(op, input_types, output_types): + if input_types[0] == _GPU_STR or output_types[0] == _CPU_STR: + _updater_raise(op, input_types, output_types) + return ([_CPU_STR], [_GPU_STR]) + + def _copy_gpu_to_cpu_updater(op, input_types, output_types): + if input_types[0] == _CPU_STR or output_types[0] == _GPU_STR: + _updater_raise(op, input_types, output_types) + return ([_GPU_STR], [_CPU_STR]) + + def _other_ops_updater(op, input_types, output_types): + non_none_types = [x for x in input_types + output_types if x is not None] + if len(non_none_types) > 0: + the_type = non_none_types[0] + if not all(x == the_type for x in non_none_types): + _updater_raise(op, input_types, output_types) + else: + the_type = None + return ([the_type for _ in op.input], [the_type for _ in op.output]) + + def _device_updater(op, *args, **kwargs): + return { + "CopyCPUToGPU": _copy_cpu_to_gpu_updater, + "CopyGPUToCPU": _copy_gpu_to_cpu_updater, + }.get(op.type, _other_ops_updater)(op, *args, **kwargs) + + return _generic_status_identifier(predict_net, _device_updater, known_status) + + +# ==== torch/utils_caffe2/vis.py =============================================== + + +def _modify_blob_names(ops, blob_rename_f): + ret = [] + + def _replace_list(blob_list, replaced_list): + del blob_list[:] + blob_list.extend(replaced_list) + + for x in ops: + cur = copy.deepcopy(x) + _replace_list(cur.input, list(map(blob_rename_f, cur.input))) + _replace_list(cur.output, list(map(blob_rename_f, cur.output))) + ret.append(cur) + + return ret + + +def _rename_blob(name, blob_sizes, blob_ranges): + def _list_to_str(bsize): + ret = ", ".join([str(x) for x in bsize]) + ret = "[" + ret + "]" + return ret + + ret = name + if blob_sizes is not None and name in blob_sizes: + ret += "\n" + _list_to_str(blob_sizes[name]) + if blob_ranges is not None and name in blob_ranges: + ret += "\n" + _list_to_str(blob_ranges[name]) + + return ret + + +# graph_name could not contain word 'graph' +def save_graph(net, file_name, graph_name="net", op_only=True, blob_sizes=None, blob_ranges=None): + blob_rename_f = functools.partial(_rename_blob, blob_sizes=blob_sizes, blob_ranges=blob_ranges) + return save_graph_base(net, file_name, graph_name, op_only, blob_rename_f) + + +def save_graph_base(net, file_name, graph_name="net", op_only=True, blob_rename_func=None): + graph = None + ops = net.op + if blob_rename_func is not None: + ops = _modify_blob_names(ops, blob_rename_func) + if not op_only: + graph = net_drawer.GetPydotGraph(ops, graph_name, rankdir="TB") + else: + graph = net_drawer.GetPydotGraphMinimal( + ops, graph_name, rankdir="TB", minimal_dependency=True + ) + + try: + par_dir = os.path.dirname(file_name) + if not os.path.exists(par_dir): + os.makedirs(par_dir) + + format = os.path.splitext(os.path.basename(file_name))[-1] + if format == ".png": + graph.write_png(file_name) + elif format == ".pdf": + graph.write_pdf(file_name) + elif format == ".svg": + graph.write_svg(file_name) + else: + print("Incorrect format {}".format(format)) + except Exception as e: + print("Error when writing graph to image {}".format(e)) + + return graph + + +# ==== torch/utils_toffee/aten_to_caffe2.py ==================================== + + +def group_norm_replace_aten_with_caffe2(predict_net: caffe2_pb2.NetDef): + """ + For ONNX exported model, GroupNorm will be represented as ATen op, + this can be a drop in replacement from ATen to GroupNorm + """ + count = 0 + for op in predict_net.op: + if op.type == "ATen": + op_name = get_pb_arg_vals(op, "operator", None) # return byte in py3 + if op_name and op_name.decode() == "group_norm": + op.arg.remove(get_pb_arg(op, "operator")) + + if get_pb_arg_vali(op, "cudnn_enabled", None): + op.arg.remove(get_pb_arg(op, "cudnn_enabled")) + + num_groups = get_pb_arg_vali(op, "num_groups", None) + if num_groups is not None: + op.arg.remove(get_pb_arg(op, "num_groups")) + check_set_pb_arg(op, "group", "i", num_groups) + + op.type = "GroupNorm" + count += 1 + if count > 1: + logger.info("Replaced {} ATen operator to GroupNormOp".format(count)) + + +# ==== torch/utils_toffee/alias.py ============================================= + + +def alias(x, name, is_backward=False): + if not torch.onnx.is_in_onnx_export(): + return x + assert isinstance(x, torch.Tensor) + return torch.ops._caffe2.AliasWithName(x, name, is_backward=is_backward) + + +def fuse_alias_placeholder(predict_net, init_net): + """Remove AliasWithName placeholder and rename the input/output of it""" + # First we finish all the re-naming + for i, op in enumerate(predict_net.op): + if op.type == "AliasWithName": + assert len(op.input) == 1 + assert len(op.output) == 1 + name = get_pb_arg_vals(op, "name", None).decode() + is_backward = bool(get_pb_arg_vali(op, "is_backward", 0)) + rename_op_input(predict_net, init_net, i, 0, name, from_producer=is_backward) + rename_op_output(predict_net, i, 0, name) + + # Remove AliasWithName, should be very safe since it's a non-op + new_ops = [] + for op in predict_net.op: + if op.type != "AliasWithName": + new_ops.append(op) + else: + # safety check + assert op.input == op.output + assert op.input[0] == op.arg[0].s.decode() + del predict_net.op[:] + predict_net.op.extend(new_ops) + + +# ==== torch/utils_caffe2/graph_transform.py =================================== + + +class IllegalGraphTransformError(ValueError): + """When a graph transform function call can't be executed.""" + + +def _rename_versioned_blob_in_proto( + proto: caffe2_pb2.NetDef, + old_name: str, + new_name: str, + version: int, + ssa: List[Tuple[List[Tuple[str, int]], List[Tuple[str, int]]]], + start_versions: Dict[str, int], + end_versions: Dict[str, int], +): + """In given proto, rename all blobs with matched version""" + # Operater list + for op, i_th_ssa in zip(proto.op, ssa): + versioned_inputs, versioned_outputs = i_th_ssa + for i in range(len(op.input)): + if versioned_inputs[i] == (old_name, version): + op.input[i] = new_name + for i in range(len(op.output)): + if versioned_outputs[i] == (old_name, version): + op.output[i] = new_name + # external_input + if start_versions.get(old_name, 0) == version: + for i in range(len(proto.external_input)): + if proto.external_input[i] == old_name: + proto.external_input[i] = new_name + # external_output + if end_versions.get(old_name, 0) == version: + for i in range(len(proto.external_output)): + if proto.external_output[i] == old_name: + proto.external_output[i] = new_name + + +def rename_op_input( + predict_net: caffe2_pb2.NetDef, + init_net: caffe2_pb2.NetDef, + op_id: int, + input_id: int, + new_name: str, + from_producer: bool = False, +): + """ + Rename the op_id-th operator in predict_net, change it's input_id-th input's + name to the new_name. It also does automatic re-route and change + external_input and init_net if necessary. + - It requires the input is only consumed by this op. + - This function modifies predict_net and init_net in-place. + - When from_producer is enable, this also updates other operators that consumes + the same input. Be cautious because may trigger unintended behavior. + """ + assert isinstance(predict_net, caffe2_pb2.NetDef) + assert isinstance(init_net, caffe2_pb2.NetDef) + + init_net_ssa, init_net_versions = core.get_ssa(init_net) + predict_net_ssa, predict_net_versions = core.get_ssa( + predict_net, copy.deepcopy(init_net_versions) + ) + + versioned_inputs, versioned_outputs = predict_net_ssa[op_id] + old_name, version = versioned_inputs[input_id] + + if from_producer: + producer_map = get_producer_map(predict_net_ssa) + if not (old_name, version) in producer_map: + raise NotImplementedError( + "Can't find producer, the input {} is probably from" + " init_net, this is not supported yet.".format(old_name) + ) + producer = producer_map[(old_name, version)] + rename_op_output(predict_net, producer[0], producer[1], new_name) + return + + def contain_targets(op_ssa): + return (old_name, version) in op_ssa[0] + + is_consumer = [contain_targets(op_ssa) for op_ssa in predict_net_ssa] + if sum(is_consumer) > 1: + raise IllegalGraphTransformError( + ( + "Input '{}' of operator(#{}) are consumed by other ops, please use" + + " rename_op_output on the producer instead. Offending op: \n{}" + ).format(old_name, op_id, predict_net.op[op_id]) + ) + + # update init_net + _rename_versioned_blob_in_proto( + init_net, old_name, new_name, version, init_net_ssa, {}, init_net_versions + ) + # update predict_net + _rename_versioned_blob_in_proto( + predict_net, + old_name, + new_name, + version, + predict_net_ssa, + init_net_versions, + predict_net_versions, + ) + + +def rename_op_output(predict_net: caffe2_pb2.NetDef, op_id: int, output_id: int, new_name: str): + """ + Rename the op_id-th operator in predict_net, change it's output_id-th input's + name to the new_name. It also does automatic re-route and change + external_output and if necessary. + - It allows multiple consumers of its output. + - This function modifies predict_net in-place, doesn't need init_net. + """ + assert isinstance(predict_net, caffe2_pb2.NetDef) + + ssa, blob_versions = core.get_ssa(predict_net) + + versioned_inputs, versioned_outputs = ssa[op_id] + old_name, version = versioned_outputs[output_id] + + # update predict_net + _rename_versioned_blob_in_proto( + predict_net, old_name, new_name, version, ssa, {}, blob_versions + ) + + +def get_sub_graph_external_input_output( + predict_net: caffe2_pb2.NetDef, sub_graph_op_indices: List[int] +) -> Tuple[List[Tuple[str, int]], List[Tuple[str, int]]]: + """ + Return the list of external input/output of sub-graph, + each element is tuple of the name and corresponding version in predict_net. + + external input/output is defined the same way as caffe2 NetDef. + """ + ssa, versions = core.get_ssa(predict_net) + + all_inputs = [] + all_outputs = [] + for op_id in sub_graph_op_indices: + all_inputs += [inp for inp in ssa[op_id][0] if inp not in all_inputs] + all_outputs += list(ssa[op_id][1]) # ssa output won't repeat + + # for versioned blobs, external inputs are just those blob in all_inputs + # but not in all_outputs + ext_inputs = [inp for inp in all_inputs if inp not in all_outputs] + + # external outputs are essentially outputs of this subgraph that are used + # outside of this sub-graph (including predict_net.external_output) + all_other_inputs = sum( + (ssa[i][0] for i in range(len(ssa)) if i not in sub_graph_op_indices), + [(outp, versions[outp]) for outp in predict_net.external_output], + ) + ext_outputs = [outp for outp in all_outputs if outp in set(all_other_inputs)] + + return ext_inputs, ext_outputs + + +class DiGraph: + """A DAG representation of caffe2 graph, each vertice is a versioned blob.""" + + def __init__(self): + self.vertices = set() + self.graph = collections.defaultdict(list) + + def add_edge(self, u, v): + self.graph[u].append(v) + self.vertices.add(u) + self.vertices.add(v) + + # grab from https://www.geeksforgeeks.org/find-paths-given-source-destination/ + def get_all_paths(self, s, d): + visited = {k: False for k in self.vertices} + path = [] + all_paths = [] + + def _get_all_paths_util(graph, u, d, visited, path): + visited[u] = True + path.append(u) + if u == d: + all_paths.append(copy.deepcopy(path)) + else: + for i in graph[u]: + if not visited[i]: + _get_all_paths_util(graph, i, d, visited, path) + path.pop() + visited[u] = False + + _get_all_paths_util(self.graph, s, d, visited, path) + return all_paths + + @staticmethod + def from_ssa(ssa): + graph = DiGraph() + for op_id in range(len(ssa)): + for inp in ssa[op_id][0]: + for outp in ssa[op_id][1]: + graph.add_edge(inp, outp) + return graph + + +def _get_dependency_chain(ssa, versioned_target, versioned_source): + """ + Return the index list of relevant operator to produce target blob from source blob, + if there's no dependency, return empty list. + """ + + # finding all paths between nodes can be O(N!), thus we can only search + # in the subgraph using the op starting from the first consumer of source blob + # to the producer of the target blob. + consumer_map = get_consumer_map(ssa) + producer_map = get_producer_map(ssa) + start_op = min(x[0] for x in consumer_map[versioned_source]) - 15 + end_op = ( + producer_map[versioned_target][0] + 15 if versioned_target in producer_map else start_op + ) + sub_graph_ssa = ssa[start_op : end_op + 1] + if len(sub_graph_ssa) > 30: + logger.warning( + "Subgraph bebetween {} and {} is large (from op#{} to op#{}), it" + " might take non-trival time to find all paths between them.".format( + versioned_source, versioned_target, start_op, end_op + ) + ) + + dag = DiGraph.from_ssa(sub_graph_ssa) + paths = dag.get_all_paths(versioned_source, versioned_target) # include two ends + ops_in_paths = [[producer_map[blob][0] for blob in path[1:]] for path in paths] + return sorted(set().union(*[set(ops) for ops in ops_in_paths])) + + +def identify_reshape_sub_graph(predict_net: caffe2_pb2.NetDef) -> List[List[int]]: + """ + Idenfity the reshape sub-graph in a protobuf. + The reshape sub-graph is defined as matching the following pattern: + + (input_blob) -> Op_1 -> ... -> Op_N -> (new_shape) -─┐ + └-------------------------------------------> Reshape -> (output_blob) + + Return: + List of sub-graphs, each sub-graph is represented as a list of indices + of the relavent ops, [Op_1, Op_2, ..., Op_N, Reshape] + """ + + ssa, _ = core.get_ssa(predict_net) + + ret = [] + for i, op in enumerate(predict_net.op): + if op.type == "Reshape": + assert len(op.input) == 2 + input_ssa = ssa[i][0] + data_source = input_ssa[0] + shape_source = input_ssa[1] + op_indices = _get_dependency_chain(ssa, shape_source, data_source) + ret.append(op_indices + [i]) + return ret + + +def remove_reshape_for_fc(predict_net, params): + """ + In PyTorch nn.Linear has to take 2D tensor, this often leads to reshape + a 4D tensor to 2D by calling .view(). However this (dynamic) reshaping + doesn't work well with ONNX and Int8 tools, and cause using extra + ops (eg. ExpandDims) that might not be available on mobile. + Luckily Caffe2 supports 4D tensor for FC, so we can remove those reshape + after exporting ONNX model. + """ + from caffe2.python import core + + # find all reshape sub-graph that can be removed, which is now all Reshape + # sub-graph whose output is only consumed by FC. + # TODO: to make it safer, we may need the actually value to better determine + # if a Reshape before FC is removable. + reshape_sub_graphs = identify_reshape_sub_graph(predict_net) + sub_graphs_to_remove = [] + for reshape_sub_graph in reshape_sub_graphs: + reshape_op_id = reshape_sub_graph[-1] + assert predict_net.op[reshape_op_id].type == "Reshape" + ssa, _ = core.get_ssa(predict_net) + reshape_output = ssa[reshape_op_id][1][0] + consumers = [i for i in range(len(ssa)) if reshape_output in ssa[i][0]] + if all(predict_net.op[consumer].type == "FC" for consumer in consumers): + # safety check if the sub-graph is isolated, for this reshape sub-graph, + # it means it has one non-param external input and one external output. + ext_inputs, ext_outputs = get_sub_graph_external_input_output( + predict_net, reshape_sub_graph + ) + non_params_ext_inputs = [inp for inp in ext_inputs if inp[1] != 0] + if len(non_params_ext_inputs) == 1 and len(ext_outputs) == 1: + sub_graphs_to_remove.append(reshape_sub_graph) + + # perform removing subgraph by: + # 1: rename the Reshape's output to its input, then the graph can be + # seen as in-place itentify, meaning whose external input/output are the same. + # 2: simply remove those ops. + remove_op_ids = [] + params_to_remove = [] + for sub_graph in sub_graphs_to_remove: + logger.info( + "Remove Reshape sub-graph:\n{}".format( + "".join(["(#{:>4})\n{}".format(i, predict_net.op[i]) for i in sub_graph]) + ) + ) + reshape_op_id = sub_graph[-1] + new_reshap_output = predict_net.op[reshape_op_id].input[0] + rename_op_output(predict_net, reshape_op_id, 0, new_reshap_output) + ext_inputs, ext_outputs = get_sub_graph_external_input_output(predict_net, sub_graph) + non_params_ext_inputs = [inp for inp in ext_inputs if inp[1] != 0] + params_ext_inputs = [inp for inp in ext_inputs if inp[1] == 0] + assert len(non_params_ext_inputs) == 1 and len(ext_outputs) == 1 + assert ext_outputs[0][0] == non_params_ext_inputs[0][0] + assert ext_outputs[0][1] == non_params_ext_inputs[0][1] + 1 + remove_op_ids.extend(sub_graph) + params_to_remove.extend(params_ext_inputs) + + predict_net = copy.deepcopy(predict_net) + new_ops = [op for i, op in enumerate(predict_net.op) if i not in remove_op_ids] + del predict_net.op[:] + predict_net.op.extend(new_ops) + for versioned_params in params_to_remove: + name = versioned_params[0] + logger.info("Remove params: {} from init_net and predict_net.external_input".format(name)) + del params[name] + predict_net.external_input.remove(name) + + return predict_net, params + + +def fuse_copy_between_cpu_and_gpu(predict_net: caffe2_pb2.NetDef): + """ + In-place fuse extra copy ops between cpu/gpu for the following case: + a -CopyAToB-> b -CopyBToA> c1 -NextOp1-> d1 + -CopyBToA> c2 -NextOp2-> d2 + The fused network will look like: + a -NextOp1-> d1 + -NextOp2-> d2 + """ + + _COPY_OPS = ["CopyCPUToGPU", "CopyGPUToCPU"] + + def _fuse_once(predict_net): + ssa, blob_versions = core.get_ssa(predict_net) + consumer_map = get_consumer_map(ssa) + versioned_external_output = [ + (name, blob_versions[name]) for name in predict_net.external_output + ] + + for op_id, op in enumerate(predict_net.op): + if op.type in _COPY_OPS: + fw_copy_versioned_output = ssa[op_id][1][0] + consumer_ids = [x[0] for x in consumer_map[fw_copy_versioned_output]] + reverse_op_type = _COPY_OPS[1 - _COPY_OPS.index(op.type)] + + is_fusable = ( + len(consumer_ids) > 0 + and fw_copy_versioned_output not in versioned_external_output + and all( + predict_net.op[_op_id].type == reverse_op_type + and ssa[_op_id][1][0] not in versioned_external_output + for _op_id in consumer_ids + ) + ) + + if is_fusable: + for rv_copy_op_id in consumer_ids: + # making each NextOp uses "a" directly and removing Copy ops + rs_copy_versioned_output = ssa[rv_copy_op_id][1][0] + next_op_id, inp_id = consumer_map[rs_copy_versioned_output][0] + predict_net.op[next_op_id].input[inp_id] = op.input[0] + # remove CopyOps + new_ops = [ + op + for i, op in enumerate(predict_net.op) + if i != op_id and i not in consumer_ids + ] + del predict_net.op[:] + predict_net.op.extend(new_ops) + return True + + return False + + # _fuse_once returns False is nothing can be fused + while _fuse_once(predict_net): + pass + + +def remove_dead_end_ops(net_def: caffe2_pb2.NetDef): + """remove ops if its output is not used or not in external_output""" + ssa, versions = core.get_ssa(net_def) + versioned_external_output = [(name, versions[name]) for name in net_def.external_output] + consumer_map = get_consumer_map(ssa) + removed_op_ids = set() + + def _is_dead_end(versioned_blob): + return not ( + versioned_blob in versioned_external_output + or ( + len(consumer_map[versioned_blob]) > 0 + and all(x[0] not in removed_op_ids for x in consumer_map[versioned_blob]) + ) + ) + + for i, ssa_i in reversed(list(enumerate(ssa))): + versioned_outputs = ssa_i[1] + if all(_is_dead_end(outp) for outp in versioned_outputs): + removed_op_ids.add(i) + + # simply removing those deadend ops should have no effect to external_output + new_ops = [op for i, op in enumerate(net_def.op) if i not in removed_op_ids] + del net_def.op[:] + net_def.op.extend(new_ops) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/export/torchscript.py b/motion-gan-pipeline/preprocessing/third/detectron2/export/torchscript.py new file mode 100644 index 0000000..24fe59b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/export/torchscript.py @@ -0,0 +1,132 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import os +import torch + +from detectron2.utils.file_io import PathManager + +from .torchscript_patch import freeze_training_mode, patch_instances + +__all__ = ["scripting_with_instances", "dump_torchscript_IR"] + + +def scripting_with_instances(model, fields): + """ + Run :func:`torch.jit.script` on a model that uses the :class:`Instances` class. Since + attributes of :class:`Instances` are "dynamically" added in eager mode,it is difficult + for scripting to support it out of the box. This function is made to support scripting + a model that uses :class:`Instances`. It does the following: + + 1. Create a scriptable ``new_Instances`` class which behaves similarly to ``Instances``, + but with all attributes been "static". + The attributes need to be statically declared in the ``fields`` argument. + 2. Register ``new_Instances``, and force scripting compiler to + use it when trying to compile ``Instances``. + + After this function, the process will be reverted. User should be able to script another model + using different fields. + + Example: + Assume that ``Instances`` in the model consist of two attributes named + ``proposal_boxes`` and ``objectness_logits`` with type :class:`Boxes` and + :class:`Tensor` respectively during inference. You can call this function like: + :: + fields = {"proposal_boxes": Boxes, "objectness_logits": torch.Tensor} + torchscipt_model = scripting_with_instances(model, fields) + + Note: + It only support models in evaluation mode. + + Args: + model (nn.Module): The input model to be exported by scripting. + fields (Dict[str, type]): Attribute names and corresponding type that + ``Instances`` will use in the model. Note that all attributes used in ``Instances`` + need to be added, regardless of whether they are inputs/outputs of the model. + Data type not defined in detectron2 is not supported for now. + + Returns: + torch.jit.ScriptModule: the model in torchscript format + """ + assert ( + not model.training + ), "Currently we only support exporting models in evaluation mode to torchscript" + + with freeze_training_mode(model), patch_instances(fields): + scripted_model = torch.jit.script(model) + return scripted_model + + +# alias for old name +export_torchscript_with_instances = scripting_with_instances + + +def dump_torchscript_IR(model, dir): + """ + Dump IR of a TracedModule/ScriptModule/Function in various format (code, graph, + inlined graph). Useful for debugging. + + Args: + model (TracedModule/ScriptModule/ScriptFUnction): traced or scripted module + dir (str): output directory to dump files. + """ + dir = os.path.expanduser(dir) + PathManager.mkdirs(dir) + + def _get_script_mod(mod): + if isinstance(mod, torch.jit.TracedModule): + return mod._actual_script_module + return mod + + # Dump pretty-printed code: https://pytorch.org/docs/stable/jit.html#inspecting-code + with PathManager.open(os.path.join(dir, "model_ts_code.txt"), "w") as f: + + def get_code(mod): + # Try a few ways to get code using private attributes. + try: + # This contains more information than just `mod.code` + return _get_script_mod(mod)._c.code + except AttributeError: + pass + try: + return mod.code + except AttributeError: + return None + + def dump_code(prefix, mod): + code = get_code(mod) + name = prefix or "root model" + if code is None: + f.write(f"Could not found code for {name} (type={mod.original_name})\n") + f.write("\n") + else: + f.write(f"\nCode for {name}, type={mod.original_name}:\n") + f.write(code) + f.write("\n") + f.write("-" * 80) + + for name, m in mod.named_children(): + dump_code(prefix + "." + name, m) + + if isinstance(model, torch.jit.ScriptFunction): + f.write(get_code(model)) + else: + dump_code("", model) + + def _get_graph(model): + try: + # Recursively dump IR of all modules + return _get_script_mod(model)._c.dump_to_str(True, False, False) + except AttributeError: + return model.graph.str() + + with PathManager.open(os.path.join(dir, "model_ts_IR.txt"), "w") as f: + f.write(_get_graph(model)) + + # Dump IR of the entire graph (all submodules inlined) + with PathManager.open(os.path.join(dir, "model_ts_IR_inlined.txt"), "w") as f: + f.write(str(model.inlined_graph)) + + if not isinstance(model, torch.jit.ScriptFunction): + # Dump the model structure in pytorch style + with PathManager.open(os.path.join(dir, "model.txt"), "w") as f: + f.write(str(model)) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/export/torchscript_patch.py b/motion-gan-pipeline/preprocessing/third/detectron2/export/torchscript_patch.py new file mode 100644 index 0000000..da9b324 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/export/torchscript_patch.py @@ -0,0 +1,406 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import os +import sys +import tempfile +from contextlib import ExitStack, contextmanager +from copy import deepcopy +from unittest import mock +import torch +from torch import nn + +# need some explicit imports due to https://github.com/pytorch/pytorch/issues/38964 +import detectron2 # noqa F401 +from detectron2.structures import Boxes, Instances +from detectron2.utils.env import _import_file + +_counter = 0 + + +def _clear_jit_cache(): + from torch.jit._recursive import concrete_type_store + from torch.jit._state import _jit_caching_layer + + concrete_type_store.type_store.clear() # for modules + _jit_caching_layer.clear() # for free functions + + +def _add_instances_conversion_methods(newInstances): + """ + Add from_instances methods to the scripted Instances class. + """ + cls_name = newInstances.__name__ + + @torch.jit.unused + def from_instances(instances: Instances): + """ + Create scripted Instances from original Instances + """ + fields = instances.get_fields() + image_size = instances.image_size + ret = newInstances(image_size) + for name, val in fields.items(): + assert hasattr(ret, f"_{name}"), f"No attribute named {name} in {cls_name}" + setattr(ret, name, deepcopy(val)) + return ret + + newInstances.from_instances = from_instances + + +@contextmanager +def patch_instances(fields): + """ + A contextmanager, under which the Instances class in detectron2 is replaced + by a statically-typed scriptable class, defined by `fields`. + See more in `scripting_with_instances`. + """ + + with tempfile.TemporaryDirectory(prefix="detectron2") as dir, tempfile.NamedTemporaryFile( + mode="w", encoding="utf-8", suffix=".py", dir=dir, delete=False + ) as f: + try: + # Objects that use Instances should not reuse previously-compiled + # results in cache, because `Instances` could be a new class each time. + _clear_jit_cache() + + cls_name, s = _gen_instance_module(fields) + f.write(s) + f.flush() + f.close() + + module = _import(f.name) + new_instances = getattr(module, cls_name) + _ = torch.jit.script(new_instances) + # let torchscript think Instances was scripted already + Instances.__torch_script_class__ = True + # let torchscript find new_instances when looking for the jit type of Instances + Instances._jit_override_qualname = torch._jit_internal._qualified_name(new_instances) + + _add_instances_conversion_methods(new_instances) + yield new_instances + finally: + try: + del Instances.__torch_script_class__ + del Instances._jit_override_qualname + except AttributeError: + pass + sys.modules.pop(module.__name__) + + +def _gen_instance_class(fields): + """ + Args: + fields (dict[name: type]) + """ + + class _FieldType: + def __init__(self, name, type_): + assert isinstance(name, str), f"Field name must be str, got {name}" + self.name = name + self.type_ = type_ + self.annotation = f"{type_.__module__}.{type_.__name__}" + + fields = [_FieldType(k, v) for k, v in fields.items()] + + def indent(level, s): + return " " * 4 * level + s + + lines = [] + + global _counter + _counter += 1 + + cls_name = "ScriptedInstances{}".format(_counter) + + field_names = tuple(x.name for x in fields) + extra_args = ", ".join([f"{f.name}: Optional[{f.annotation}] = None" for f in fields]) + lines.append( + f""" +class {cls_name}: + def __init__(self, image_size: Tuple[int, int], {extra_args}): + self.image_size = image_size + self._field_names = {field_names} +""" + ) + + for f in fields: + lines.append( + indent(2, f"self._{f.name} = torch.jit.annotate(Optional[{f.annotation}], {f.name})") + ) + + for f in fields: + lines.append( + f""" + @property + def {f.name}(self) -> {f.annotation}: + # has to use a local for type refinement + # https://pytorch.org/docs/stable/jit_language_reference.html#optional-type-refinement + t = self._{f.name} + assert t is not None, "{f.name} is None and cannot be accessed!" + return t + + @{f.name}.setter + def {f.name}(self, value: {f.annotation}) -> None: + self._{f.name} = value +""" + ) + + # support method `__len__` + lines.append( + """ + def __len__(self) -> int: +""" + ) + for f in fields: + lines.append( + f""" + t = self._{f.name} + if t is not None: + return len(t) +""" + ) + lines.append( + """ + raise NotImplementedError("Empty Instances does not support __len__!") +""" + ) + + # support method `has` + lines.append( + """ + def has(self, name: str) -> bool: +""" + ) + for f in fields: + lines.append( + f""" + if name == "{f.name}": + return self._{f.name} is not None +""" + ) + lines.append( + """ + return False +""" + ) + + # support method `to` + none_args = ", None" * len(fields) + lines.append( + f""" + def to(self, device: torch.device) -> "{cls_name}": + ret = {cls_name}(self.image_size{none_args}) +""" + ) + for f in fields: + if hasattr(f.type_, "to"): + lines.append( + f""" + t = self._{f.name} + if t is not None: + ret._{f.name} = t.to(device) +""" + ) + else: + # For now, ignore fields that cannot be moved to devices. + # Maybe can support other tensor-like classes (e.g. __torch_function__) + pass + lines.append( + """ + return ret +""" + ) + + # support method `getitem` + none_args = ", None" * len(fields) + lines.append( + f""" + def __getitem__(self, item) -> "{cls_name}": + ret = {cls_name}(self.image_size{none_args}) +""" + ) + for f in fields: + lines.append( + f""" + t = self._{f.name} + if t is not None: + ret._{f.name} = t[item] +""" + ) + lines.append( + """ + return ret +""" + ) + + # support method `cat` + # this version does not contain checks that all instances have same size and fields + none_args = ", None" * len(fields) + lines.append( + f""" + def cat(self, instances: List["{cls_name}"]) -> "{cls_name}": + ret = {cls_name}(self.image_size{none_args}) +""" + ) + for f in fields: + lines.append( + f""" + t = self._{f.name} + if t is not None: + values: List[{f.annotation}] = [x.{f.name} for x in instances] + if torch.jit.isinstance(t, torch.Tensor): + ret._{f.name} = torch.cat(values, dim=0) + else: + ret._{f.name} = t.cat(values) +""" + ) + lines.append( + """ + return ret""" + ) + + # support method `get_fields()` + lines.append( + """ + def get_fields(self) -> Dict[str, Tensor]: + ret = {} + """ + ) + for f in fields: + if f.type_ == Boxes: + stmt = "t.tensor" + elif f.type_ == torch.Tensor: + stmt = "t" + else: + stmt = f'assert False, "unsupported type {str(f.type_)}"' + lines.append( + f""" + t = self._{f.name} + if t is not None: + ret["{f.name}"] = {stmt} + """ + ) + lines.append( + """ + return ret""" + ) + return cls_name, os.linesep.join(lines) + + +def _gen_instance_module(fields): + # TODO: find a more automatic way to enable import of other classes + s = """ +from copy import deepcopy +import torch +from torch import Tensor +import typing +from typing import * + +import detectron2 +from detectron2.structures import Boxes, Instances + +""" + + cls_name, cls_def = _gen_instance_class(fields) + s += cls_def + return cls_name, s + + +def _import(path): + return _import_file( + "{}{}".format(sys.modules[__name__].__name__, _counter), path, make_importable=True + ) + + +@contextmanager +def patch_builtin_len(modules=()): + """ + Patch the builtin len() function of a few detectron2 modules + to use __len__ instead, because __len__ does not convert values to + integers and therefore is friendly to tracing. + + Args: + modules (list[stsr]): names of extra modules to patch len(), in + addition to those in detectron2. + """ + + def _new_len(obj): + return obj.__len__() + + with ExitStack() as stack: + MODULES = [ + "detectron2.modeling.roi_heads.fast_rcnn", + "detectron2.modeling.roi_heads.mask_head", + "detectron2.modeling.roi_heads.keypoint_head", + ] + list(modules) + ctxs = [stack.enter_context(mock.patch(mod + ".len")) for mod in MODULES] + for m in ctxs: + m.side_effect = _new_len + yield + + +def patch_nonscriptable_classes(): + """ + Apply patches on a few nonscriptable detectron2 classes. + Should not have side-effects on eager usage. + """ + # __prepare_scriptable__ can also be added to models for easier maintenance. + # But it complicates the clean model code. + + from detectron2.modeling.backbone import ResNet, FPN + + # Due to https://github.com/pytorch/pytorch/issues/36061, + # we change backbone to use ModuleList for scripting. + # (note: this changes param names in state_dict) + + def prepare_resnet(self): + ret = deepcopy(self) + ret.stages = nn.ModuleList(ret.stages) + for k in self.stage_names: + delattr(ret, k) + return ret + + ResNet.__prepare_scriptable__ = prepare_resnet + + def prepare_fpn(self): + ret = deepcopy(self) + ret.lateral_convs = nn.ModuleList(ret.lateral_convs) + ret.output_convs = nn.ModuleList(ret.output_convs) + for name, _ in self.named_children(): + if name.startswith("fpn_"): + delattr(ret, name) + return ret + + FPN.__prepare_scriptable__ = prepare_fpn + + # Annotate some attributes to be constants for the purpose of scripting, + # even though they are not constants in eager mode. + from detectron2.modeling.roi_heads import StandardROIHeads + + if hasattr(StandardROIHeads, "__annotations__"): + # copy first to avoid editing annotations of base class + StandardROIHeads.__annotations__ = deepcopy(StandardROIHeads.__annotations__) + StandardROIHeads.__annotations__["mask_on"] = torch.jit.Final[bool] + StandardROIHeads.__annotations__["keypoint_on"] = torch.jit.Final[bool] + + +# These patches are not supposed to have side-effects. +patch_nonscriptable_classes() + + +@contextmanager +def freeze_training_mode(model): + """ + A context manager that annotates the "training" attribute of every submodule + to constant, so that the training codepath in these modules can be + meta-compiled away. Upon exiting, the annotations are reverted. + """ + classes = {type(x) for x in model.modules()} + # __constants__ is the old way to annotate constants and not compatible + # with __annotations__ . + classes = {x for x in classes if not hasattr(x, "__constants__")} + for cls in classes: + cls.__annotations__["training"] = torch.jit.Final[bool] + yield + for cls in classes: + cls.__annotations__["training"] = bool diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/layers/__init__.py new file mode 100644 index 0000000..3d015c5 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/__init__.py @@ -0,0 +1,24 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .batch_norm import FrozenBatchNorm2d, get_norm, NaiveSyncBatchNorm, CycleBatchNormList +from .deform_conv import DeformConv, ModulatedDeformConv +from .mask_ops import paste_masks_in_image +from .nms import batched_nms, batched_nms_rotated, nms, nms_rotated +from .roi_align import ROIAlign, roi_align +from .roi_align_rotated import ROIAlignRotated, roi_align_rotated +from .shape_spec import ShapeSpec +from .wrappers import ( + BatchNorm2d, + Conv2d, + ConvTranspose2d, + cat, + interpolate, + Linear, + nonzero_tuple, + cross_entropy, + shapes_to_tensor, +) +from .blocks import CNNBlockBase, DepthwiseSeparableConv2d +from .aspp import ASPP +from .losses import ciou_loss, diou_loss + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/aspp.py b/motion-gan-pipeline/preprocessing/third/detectron2/layers/aspp.py new file mode 100644 index 0000000..14861aa --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/aspp.py @@ -0,0 +1,144 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from copy import deepcopy +import fvcore.nn.weight_init as weight_init +import torch +from torch import nn +from torch.nn import functional as F + +from .batch_norm import get_norm +from .blocks import DepthwiseSeparableConv2d +from .wrappers import Conv2d + + +class ASPP(nn.Module): + """ + Atrous Spatial Pyramid Pooling (ASPP). + """ + + def __init__( + self, + in_channels, + out_channels, + dilations, + *, + norm, + activation, + pool_kernel_size=None, + dropout: float = 0.0, + use_depthwise_separable_conv=False, + ): + """ + Args: + in_channels (int): number of input channels for ASPP. + out_channels (int): number of output channels. + dilations (list): a list of 3 dilations in ASPP. + norm (str or callable): normalization for all conv layers. + See :func:`layers.get_norm` for supported format. norm is + applied to all conv layers except the conv following + global average pooling. + activation (callable): activation function. + pool_kernel_size (tuple, list): the average pooling size (kh, kw) + for image pooling layer in ASPP. If set to None, it always + performs global average pooling. If not None, it must be + divisible by the shape of inputs in forward(). It is recommended + to use a fixed input feature size in training, and set this + option to match this size, so that it performs global average + pooling in training, and the size of the pooling window stays + consistent in inference. + dropout (float): apply dropout on the output of ASPP. It is used in + the official DeepLab implementation with a rate of 0.1: + https://github.com/tensorflow/models/blob/21b73d22f3ed05b650e85ac50849408dd36de32e/research/deeplab/model.py#L532 # noqa + use_depthwise_separable_conv (bool): use DepthwiseSeparableConv2d + for 3x3 convs in ASPP, proposed in :paper:`DeepLabV3+`. + """ + super(ASPP, self).__init__() + assert len(dilations) == 3, "ASPP expects 3 dilations, got {}".format(len(dilations)) + self.pool_kernel_size = pool_kernel_size + self.dropout = dropout + use_bias = norm == "" + self.convs = nn.ModuleList() + # conv 1x1 + self.convs.append( + Conv2d( + in_channels, + out_channels, + kernel_size=1, + bias=use_bias, + norm=get_norm(norm, out_channels), + activation=deepcopy(activation), + ) + ) + weight_init.c2_xavier_fill(self.convs[-1]) + # atrous convs + for dilation in dilations: + if use_depthwise_separable_conv: + self.convs.append( + DepthwiseSeparableConv2d( + in_channels, + out_channels, + kernel_size=3, + padding=dilation, + dilation=dilation, + norm1=norm, + activation1=deepcopy(activation), + norm2=norm, + activation2=deepcopy(activation), + ) + ) + else: + self.convs.append( + Conv2d( + in_channels, + out_channels, + kernel_size=3, + padding=dilation, + dilation=dilation, + bias=use_bias, + norm=get_norm(norm, out_channels), + activation=deepcopy(activation), + ) + ) + weight_init.c2_xavier_fill(self.convs[-1]) + # image pooling + # We do not add BatchNorm because the spatial resolution is 1x1, + # the original TF implementation has BatchNorm. + if pool_kernel_size is None: + image_pooling = nn.Sequential( + nn.AdaptiveAvgPool2d(1), + Conv2d(in_channels, out_channels, 1, bias=True, activation=deepcopy(activation)), + ) + else: + image_pooling = nn.Sequential( + nn.AvgPool2d(kernel_size=pool_kernel_size, stride=1), + Conv2d(in_channels, out_channels, 1, bias=True, activation=deepcopy(activation)), + ) + weight_init.c2_xavier_fill(image_pooling[1]) + self.convs.append(image_pooling) + + self.project = Conv2d( + 5 * out_channels, + out_channels, + kernel_size=1, + bias=use_bias, + norm=get_norm(norm, out_channels), + activation=deepcopy(activation), + ) + weight_init.c2_xavier_fill(self.project) + + def forward(self, x): + size = x.shape[-2:] + if self.pool_kernel_size is not None: + if size[0] % self.pool_kernel_size[0] or size[1] % self.pool_kernel_size[1]: + raise ValueError( + "`pool_kernel_size` must be divisible by the shape of inputs. " + "Input size: {} `pool_kernel_size`: {}".format(size, self.pool_kernel_size) + ) + res = [] + for conv in self.convs: + res.append(conv(x)) + res[-1] = F.interpolate(res[-1], size=size, mode="bilinear", align_corners=False) + res = torch.cat(res, dim=1) + res = self.project(res) + res = F.dropout(res, self.dropout, training=self.training) if self.dropout > 0 else res + return res diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/batch_norm.py b/motion-gan-pipeline/preprocessing/third/detectron2/layers/batch_norm.py new file mode 100644 index 0000000..09a6c66 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/batch_norm.py @@ -0,0 +1,276 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import torch +import torch.distributed as dist +from fvcore.nn.distributed import differentiable_all_reduce +from torch import nn +from torch.nn import functional as F + +from detectron2.utils import comm, env + +from .wrappers import BatchNorm2d + + +class FrozenBatchNorm2d(nn.Module): + """ + BatchNorm2d where the batch statistics and the affine parameters are fixed. + + It contains non-trainable buffers called + "weight" and "bias", "running_mean", "running_var", + initialized to perform identity transformation. + + The pre-trained backbone models from Caffe2 only contain "weight" and "bias", + which are computed from the original four parameters of BN. + The affine transform `x * weight + bias` will perform the equivalent + computation of `(x - running_mean) / sqrt(running_var) * weight + bias`. + When loading a backbone model from Caffe2, "running_mean" and "running_var" + will be left unchanged as identity transformation. + + Other pre-trained backbone models may contain all 4 parameters. + + The forward is implemented by `F.batch_norm(..., training=False)`. + """ + + _version = 3 + + def __init__(self, num_features, eps=1e-5): + super().__init__() + self.num_features = num_features + self.eps = eps + self.register_buffer("weight", torch.ones(num_features)) + self.register_buffer("bias", torch.zeros(num_features)) + self.register_buffer("running_mean", torch.zeros(num_features)) + self.register_buffer("running_var", torch.ones(num_features) - eps) + + def forward(self, x): + if x.requires_grad: + # When gradients are needed, F.batch_norm will use extra memory + # because its backward op computes gradients for weight/bias as well. + scale = self.weight * (self.running_var + self.eps).rsqrt() + bias = self.bias - self.running_mean * scale + scale = scale.reshape(1, -1, 1, 1) + bias = bias.reshape(1, -1, 1, 1) + out_dtype = x.dtype # may be half + return x * scale.to(out_dtype) + bias.to(out_dtype) + else: + # When gradients are not needed, F.batch_norm is a single fused op + # and provide more optimization opportunities. + return F.batch_norm( + x, + self.running_mean, + self.running_var, + self.weight, + self.bias, + training=False, + eps=self.eps, + ) + + def _load_from_state_dict( + self, state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs + ): + version = local_metadata.get("version", None) + + if version is None or version < 2: + # No running_mean/var in early versions + # This will silent the warnings + if prefix + "running_mean" not in state_dict: + state_dict[prefix + "running_mean"] = torch.zeros_like(self.running_mean) + if prefix + "running_var" not in state_dict: + state_dict[prefix + "running_var"] = torch.ones_like(self.running_var) + + super()._load_from_state_dict( + state_dict, prefix, local_metadata, strict, missing_keys, unexpected_keys, error_msgs + ) + + def __repr__(self): + return "FrozenBatchNorm2d(num_features={}, eps={})".format(self.num_features, self.eps) + + @classmethod + def convert_frozen_batchnorm(cls, module): + """ + Convert all BatchNorm/SyncBatchNorm in module into FrozenBatchNorm. + + Args: + module (torch.nn.Module): + + Returns: + If module is BatchNorm/SyncBatchNorm, returns a new module. + Otherwise, in-place convert module and return it. + + Similar to convert_sync_batchnorm in + https://github.com/pytorch/pytorch/blob/master/torch/nn/modules/batchnorm.py + """ + bn_module = nn.modules.batchnorm + bn_module = (bn_module.BatchNorm2d, bn_module.SyncBatchNorm) + res = module + if isinstance(module, bn_module): + res = cls(module.num_features) + if module.affine: + res.weight.data = module.weight.data.clone().detach() + res.bias.data = module.bias.data.clone().detach() + res.running_mean.data = module.running_mean.data + res.running_var.data = module.running_var.data + res.eps = module.eps + else: + for name, child in module.named_children(): + new_child = cls.convert_frozen_batchnorm(child) + if new_child is not child: + res.add_module(name, new_child) + return res + + +def get_norm(norm, out_channels): + """ + Args: + norm (str or callable): either one of BN, SyncBN, FrozenBN, GN; + or a callable that takes a channel number and returns + the normalization layer as a nn.Module. + + Returns: + nn.Module or None: the normalization layer + """ + if norm is None: + return None + if isinstance(norm, str): + if len(norm) == 0: + return None + norm = { + "BN": BatchNorm2d, + # Fixed in https://github.com/pytorch/pytorch/pull/36382 + "SyncBN": NaiveSyncBatchNorm if env.TORCH_VERSION <= (1, 5) else nn.SyncBatchNorm, + "FrozenBN": FrozenBatchNorm2d, + "GN": lambda channels: nn.GroupNorm(32, channels), + # for debugging: + "nnSyncBN": nn.SyncBatchNorm, + "naiveSyncBN": NaiveSyncBatchNorm, + # expose stats_mode N as an option to caller, required for zero-len inputs + "naiveSyncBN_N": lambda channels: NaiveSyncBatchNorm(channels, stats_mode="N"), + }[norm] + return norm(out_channels) + + +class NaiveSyncBatchNorm(BatchNorm2d): + """ + In PyTorch<=1.5, ``nn.SyncBatchNorm`` has incorrect gradient + when the batch size on each worker is different. + (e.g., when scale augmentation is used, or when it is applied to mask head). + + This is a slower but correct alternative to `nn.SyncBatchNorm`. + + Note: + There isn't a single definition of Sync BatchNorm. + + When ``stats_mode==""``, this module computes overall statistics by using + statistics of each worker with equal weight. The result is true statistics + of all samples (as if they are all on one worker) only when all workers + have the same (N, H, W). This mode does not support inputs with zero batch size. + + When ``stats_mode=="N"``, this module computes overall statistics by weighting + the statistics of each worker by their ``N``. The result is true statistics + of all samples (as if they are all on one worker) only when all workers + have the same (H, W). It is slower than ``stats_mode==""``. + + Even though the result of this module may not be the true statistics of all samples, + it may still be reasonable because it might be preferrable to assign equal weights + to all workers, regardless of their (H, W) dimension, instead of putting larger weight + on larger images. From preliminary experiments, little difference is found between such + a simplified implementation and an accurate computation of overall mean & variance. + """ + + def __init__(self, *args, stats_mode="", **kwargs): + super().__init__(*args, **kwargs) + assert stats_mode in ["", "N"] + self._stats_mode = stats_mode + + def forward(self, input): + if comm.get_world_size() == 1 or not self.training: + return super().forward(input) + + B, C = input.shape[0], input.shape[1] + + half_input = input.dtype == torch.float16 + if half_input: + # fp16 does not have good enough numerics for the reduction here + input = input.float() + mean = torch.mean(input, dim=[0, 2, 3]) + meansqr = torch.mean(input * input, dim=[0, 2, 3]) + + if self._stats_mode == "": + assert B > 0, 'SyncBatchNorm(stats_mode="") does not support zero batch size.' + vec = torch.cat([mean, meansqr], dim=0) + vec = differentiable_all_reduce(vec) * (1.0 / dist.get_world_size()) + mean, meansqr = torch.split(vec, C) + momentum = self.momentum + else: + if B == 0: + vec = torch.zeros([2 * C + 1], device=mean.device, dtype=mean.dtype) + vec = vec + input.sum() # make sure there is gradient w.r.t input + else: + vec = torch.cat( + [mean, meansqr, torch.ones([1], device=mean.device, dtype=mean.dtype)], dim=0 + ) + vec = differentiable_all_reduce(vec * B) + + total_batch = vec[-1].detach() + momentum = total_batch.clamp(max=1) * self.momentum # no update if total_batch is 0 + mean, meansqr, _ = torch.split(vec / total_batch.clamp(min=1), C) # avoid div-by-zero + + var = meansqr - mean * mean + invstd = torch.rsqrt(var + self.eps) + scale = self.weight * invstd + bias = self.bias - mean * scale + scale = scale.reshape(1, -1, 1, 1) + bias = bias.reshape(1, -1, 1, 1) + + self.running_mean += momentum * (mean.detach() - self.running_mean) + self.running_var += momentum * (var.detach() - self.running_var) + ret = input * scale + bias + if half_input: + ret = ret.half() + return ret + + +class CycleBatchNormList(nn.ModuleList): + """ + Implement domain-specific BatchNorm by cycling. + + When a BatchNorm layer is used for multiple input domains or input + features, it might need to maintain a separate test-time statistics + for each domain. See Sec 5.2 in :paper:`rethinking-batchnorm`. + + This module implements it by using N separate BN layers + and it cycles through them every time a forward() is called. + + NOTE: The caller of this module MUST guarantee to always call + this module by multiple of N times. Otherwise its test-time statistics + will be incorrect. + """ + + def __init__(self, length: int, bn_class=nn.BatchNorm2d, **kwargs): + """ + Args: + length: number of BatchNorm layers to cycle. + bn_class: the BatchNorm class to use + kwargs: arguments of the BatchNorm class, such as num_features. + """ + self._affine = kwargs.pop("affine", True) + super().__init__([bn_class(**kwargs, affine=False) for k in range(length)]) + if self._affine: + # shared affine, domain-specific BN + channels = self[0].num_features + self.weight = nn.Parameter(torch.ones(channels)) + self.bias = nn.Parameter(torch.zeros(channels)) + self._pos = 0 + + def forward(self, x): + ret = self[self._pos](x) + self._pos = (self._pos + 1) % len(self) + + if self._affine: + w = self.weight.reshape(1, -1, 1, 1) + b = self.bias.reshape(1, -1, 1, 1) + return ret * w + b + else: + return ret + + def extra_repr(self): + return f"affine={self._affine}" diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/blocks.py b/motion-gan-pipeline/preprocessing/third/detectron2/layers/blocks.py new file mode 100644 index 0000000..1995a4b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/blocks.py @@ -0,0 +1,111 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +import fvcore.nn.weight_init as weight_init +from torch import nn + +from .batch_norm import FrozenBatchNorm2d, get_norm +from .wrappers import Conv2d + + +""" +CNN building blocks. +""" + + +class CNNBlockBase(nn.Module): + """ + A CNN block is assumed to have input channels, output channels and a stride. + The input and output of `forward()` method must be NCHW tensors. + The method can perform arbitrary computation but must match the given + channels and stride specification. + + Attribute: + in_channels (int): + out_channels (int): + stride (int): + """ + + def __init__(self, in_channels, out_channels, stride): + """ + The `__init__` method of any subclass should also contain these arguments. + + Args: + in_channels (int): + out_channels (int): + stride (int): + """ + super().__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.stride = stride + + def freeze(self): + """ + Make this block not trainable. + This method sets all parameters to `requires_grad=False`, + and convert all BatchNorm layers to FrozenBatchNorm + + Returns: + the block itself + """ + for p in self.parameters(): + p.requires_grad = False + FrozenBatchNorm2d.convert_frozen_batchnorm(self) + return self + + +class DepthwiseSeparableConv2d(nn.Module): + """ + A kxk depthwise convolution + a 1x1 convolution. + + In :paper:`xception`, norm & activation are applied on the second conv. + :paper:`mobilenet` uses norm & activation on both convs. + """ + + def __init__( + self, + in_channels, + out_channels, + kernel_size=3, + padding=1, + dilation=1, + *, + norm1=None, + activation1=None, + norm2=None, + activation2=None, + ): + """ + Args: + norm1, norm2 (str or callable): normalization for the two conv layers. + activation1, activation2 (callable(Tensor) -> Tensor): activation + function for the two conv layers. + """ + super().__init__() + self.depthwise = Conv2d( + in_channels, + in_channels, + kernel_size=kernel_size, + padding=padding, + dilation=dilation, + groups=in_channels, + bias=not norm1, + norm=get_norm(norm1, in_channels), + activation=activation1, + ) + self.pointwise = Conv2d( + in_channels, + out_channels, + kernel_size=1, + bias=not norm2, + norm=get_norm(norm2, out_channels), + activation=activation2, + ) + + # default initialization + weight_init.c2_msra_fill(self.depthwise) + weight_init.c2_msra_fill(self.pointwise) + + def forward(self, x): + return self.pointwise(self.depthwise(x)) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/README.md b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/README.md new file mode 100644 index 0000000..778ed3d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/README.md @@ -0,0 +1,7 @@ + + +To add a new Op: + +1. Create a new directory +2. Implement new ops there +3. Delcare its Python interface in `vision.cpp`. diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated.h b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated.h new file mode 100644 index 0000000..03f4211 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated.h @@ -0,0 +1,115 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +#pragma once +#include + +namespace detectron2 { + +at::Tensor ROIAlignRotated_forward_cpu( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio); + +at::Tensor ROIAlignRotated_backward_cpu( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio); + +#if defined(WITH_CUDA) || defined(WITH_HIP) +at::Tensor ROIAlignRotated_forward_cuda( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio); + +at::Tensor ROIAlignRotated_backward_cuda( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio); +#endif + +// Interface for Python +inline at::Tensor ROIAlignRotated_forward( + const at::Tensor& input, + const at::Tensor& rois, + const double spatial_scale, + const int64_t pooled_height, + const int64_t pooled_width, + const int64_t sampling_ratio) { + if (input.is_cuda()) { +#if defined(WITH_CUDA) || defined(WITH_HIP) + return ROIAlignRotated_forward_cuda( + input, + rois, + spatial_scale, + pooled_height, + pooled_width, + sampling_ratio); +#else + AT_ERROR("Detectron2 is not compiled with GPU support!"); +#endif + } + return ROIAlignRotated_forward_cpu( + input, rois, spatial_scale, pooled_height, pooled_width, sampling_ratio); +} + +inline at::Tensor ROIAlignRotated_backward( + const at::Tensor& grad, + const at::Tensor& rois, + const double spatial_scale, + const int64_t pooled_height, + const int64_t pooled_width, + const int64_t batch_size, + const int64_t channels, + const int64_t height, + const int64_t width, + const int64_t sampling_ratio) { + if (grad.is_cuda()) { +#if defined(WITH_CUDA) || defined(WITH_HIP) + return ROIAlignRotated_backward_cuda( + grad, + rois, + spatial_scale, + pooled_height, + pooled_width, + batch_size, + channels, + height, + width, + sampling_ratio); +#else + AT_ERROR("Detectron2 is not compiled with GPU support!"); +#endif + } + return ROIAlignRotated_backward_cpu( + grad, + rois, + spatial_scale, + pooled_height, + pooled_width, + batch_size, + channels, + height, + width, + sampling_ratio); +} + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cpu.cpp b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cpu.cpp new file mode 100644 index 0000000..2a3d305 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cpu.cpp @@ -0,0 +1,522 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +#include +#include "ROIAlignRotated.h" + +// Note: this implementation originates from the Caffe2 ROIAlignRotated Op +// and PyTorch ROIAlign (non-rotated) Op implementations. +// The key difference between this implementation and those ones is +// we don't do "legacy offset" in this version, as there aren't many previous +// works, if any, using the "legacy" ROIAlignRotated Op. +// This would make the interface a bit cleaner. + +namespace detectron2 { + +namespace { +template +struct PreCalc { + int pos1; + int pos2; + int pos3; + int pos4; + T w1; + T w2; + T w3; + T w4; +}; + +template +void pre_calc_for_bilinear_interpolate( + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int iy_upper, + const int ix_upper, + T roi_start_h, + T roi_start_w, + T bin_size_h, + T bin_size_w, + int roi_bin_grid_h, + int roi_bin_grid_w, + T roi_center_h, + T roi_center_w, + T cos_theta, + T sin_theta, + std::vector>& pre_calc) { + int pre_calc_index = 0; + for (int ph = 0; ph < pooled_height; ph++) { + for (int pw = 0; pw < pooled_width; pw++) { + for (int iy = 0; iy < iy_upper; iy++) { + const T yy = roi_start_h + ph * bin_size_h + + static_cast(iy + .5f) * bin_size_h / + static_cast(roi_bin_grid_h); // e.g., 0.5, 1.5 + for (int ix = 0; ix < ix_upper; ix++) { + const T xx = roi_start_w + pw * bin_size_w + + static_cast(ix + .5f) * bin_size_w / + static_cast(roi_bin_grid_w); + + // Rotate by theta around the center and translate + // In image space, (y, x) is the order for Right Handed System, + // and this is essentially multiplying the point by a rotation matrix + // to rotate it counterclockwise through angle theta. + T y = yy * cos_theta - xx * sin_theta + roi_center_h; + T x = yy * sin_theta + xx * cos_theta + roi_center_w; + // deal with: inverse elements are out of feature map boundary + if (y < -1.0 || y > height || x < -1.0 || x > width) { + // empty + PreCalc pc; + pc.pos1 = 0; + pc.pos2 = 0; + pc.pos3 = 0; + pc.pos4 = 0; + pc.w1 = 0; + pc.w2 = 0; + pc.w3 = 0; + pc.w4 = 0; + pre_calc[pre_calc_index] = pc; + pre_calc_index += 1; + continue; + } + + if (y < 0) { + y = 0; + } + if (x < 0) { + x = 0; + } + + int y_low = (int)y; + int x_low = (int)x; + int y_high; + int x_high; + + if (y_low >= height - 1) { + y_high = y_low = height - 1; + y = (T)y_low; + } else { + y_high = y_low + 1; + } + + if (x_low >= width - 1) { + x_high = x_low = width - 1; + x = (T)x_low; + } else { + x_high = x_low + 1; + } + + T ly = y - y_low; + T lx = x - x_low; + T hy = 1. - ly, hx = 1. - lx; + T w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx; + + // save weights and indices + PreCalc pc; + pc.pos1 = y_low * width + x_low; + pc.pos2 = y_low * width + x_high; + pc.pos3 = y_high * width + x_low; + pc.pos4 = y_high * width + x_high; + pc.w1 = w1; + pc.w2 = w2; + pc.w3 = w3; + pc.w4 = w4; + pre_calc[pre_calc_index] = pc; + + pre_calc_index += 1; + } + } + } + } +} + +template +void bilinear_interpolate_gradient( + const int height, + const int width, + T y, + T x, + T& w1, + T& w2, + T& w3, + T& w4, + int& x_low, + int& x_high, + int& y_low, + int& y_high) { + // deal with cases that inverse elements are out of feature map boundary + if (y < -1.0 || y > height || x < -1.0 || x > width) { + // empty + w1 = w2 = w3 = w4 = 0.; + x_low = x_high = y_low = y_high = -1; + return; + } + + if (y < 0) { + y = 0; + } + + if (x < 0) { + x = 0; + } + + y_low = (int)y; + x_low = (int)x; + + if (y_low >= height - 1) { + y_high = y_low = height - 1; + y = (T)y_low; + } else { + y_high = y_low + 1; + } + + if (x_low >= width - 1) { + x_high = x_low = width - 1; + x = (T)x_low; + } else { + x_high = x_low + 1; + } + + T ly = y - y_low; + T lx = x - x_low; + T hy = 1. - ly, hx = 1. - lx; + + // reference in forward + // T v1 = input[y_low * width + x_low]; + // T v2 = input[y_low * width + x_high]; + // T v3 = input[y_high * width + x_low]; + // T v4 = input[y_high * width + x_high]; + // T val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4); + + w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx; + + return; +} + +template +inline void add(T* address, const T& val) { + *address += val; +} + +} // namespace + +template +void ROIAlignRotatedForward( + const int nthreads, + const T* input, + const T& spatial_scale, + const int channels, + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + const T* rois, + T* output) { + int n_rois = nthreads / channels / pooled_width / pooled_height; + // (n, c, ph, pw) is an element in the pooled output + // can be parallelized using omp + // #pragma omp parallel for num_threads(32) + for (int n = 0; n < n_rois; n++) { + int index_n = n * channels * pooled_width * pooled_height; + + const T* current_roi = rois + n * 6; + int roi_batch_ind = current_roi[0]; + + // Do not use rounding; this implementation detail is critical + // ROIAlignRotated supports align == true, i.e., continuous coordinate + // by default, thus the 0.5 offset + T offset = (T)0.5; + T roi_center_w = current_roi[1] * spatial_scale - offset; + T roi_center_h = current_roi[2] * spatial_scale - offset; + T roi_width = current_roi[3] * spatial_scale; + T roi_height = current_roi[4] * spatial_scale; + T theta = current_roi[5] * M_PI / 180.0; + T cos_theta = cos(theta); + T sin_theta = sin(theta); + + AT_ASSERTM( + roi_width >= 0 && roi_height >= 0, + "ROIs in ROIAlignRotated do not have non-negative size!"); + + T bin_size_h = static_cast(roi_height) / static_cast(pooled_height); + T bin_size_w = static_cast(roi_width) / static_cast(pooled_width); + + // We use roi_bin_grid to sample the grid and mimic integral + int roi_bin_grid_h = (sampling_ratio > 0) + ? sampling_ratio + : ceil(roi_height / pooled_height); // e.g., = 2 + int roi_bin_grid_w = + (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width); + + // We do average (integral) pooling inside a bin + const T count = std::max(roi_bin_grid_h * roi_bin_grid_w, 1); // e.g. = 4 + + // we want to precalculate indices and weights shared by all channels, + // this is the key point of optimization + std::vector> pre_calc( + roi_bin_grid_h * roi_bin_grid_w * pooled_width * pooled_height); + + // roi_start_h and roi_start_w are computed wrt the center of RoI (x, y). + // Appropriate translation needs to be applied after. + T roi_start_h = -roi_height / 2.0; + T roi_start_w = -roi_width / 2.0; + + pre_calc_for_bilinear_interpolate( + height, + width, + pooled_height, + pooled_width, + roi_bin_grid_h, + roi_bin_grid_w, + roi_start_h, + roi_start_w, + bin_size_h, + bin_size_w, + roi_bin_grid_h, + roi_bin_grid_w, + roi_center_h, + roi_center_w, + cos_theta, + sin_theta, + pre_calc); + + for (int c = 0; c < channels; c++) { + int index_n_c = index_n + c * pooled_width * pooled_height; + const T* offset_input = + input + (roi_batch_ind * channels + c) * height * width; + int pre_calc_index = 0; + + for (int ph = 0; ph < pooled_height; ph++) { + for (int pw = 0; pw < pooled_width; pw++) { + int index = index_n_c + ph * pooled_width + pw; + + T output_val = 0.; + for (int iy = 0; iy < roi_bin_grid_h; iy++) { + for (int ix = 0; ix < roi_bin_grid_w; ix++) { + PreCalc pc = pre_calc[pre_calc_index]; + output_val += pc.w1 * offset_input[pc.pos1] + + pc.w2 * offset_input[pc.pos2] + + pc.w3 * offset_input[pc.pos3] + pc.w4 * offset_input[pc.pos4]; + + pre_calc_index += 1; + } + } + output_val /= count; + + output[index] = output_val; + } // for pw + } // for ph + } // for c + } // for n +} + +template +void ROIAlignRotatedBackward( + const int nthreads, + // may not be contiguous. should index using n_stride, etc + const T* grad_output, + const T& spatial_scale, + const int channels, + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + T* grad_input, + const T* rois, + const int n_stride, + const int c_stride, + const int h_stride, + const int w_stride) { + for (int index = 0; index < nthreads; index++) { + // (n, c, ph, pw) is an element in the pooled output + int pw = index % pooled_width; + int ph = (index / pooled_width) % pooled_height; + int c = (index / pooled_width / pooled_height) % channels; + int n = index / pooled_width / pooled_height / channels; + + const T* current_roi = rois + n * 6; + int roi_batch_ind = current_roi[0]; + + // Do not use rounding; this implementation detail is critical + // ROIAlignRotated supports align == true, i.e., continuous coordinate + // by default, thus the 0.5 offset + T offset = (T)0.5; + T roi_center_w = current_roi[1] * spatial_scale - offset; + T roi_center_h = current_roi[2] * spatial_scale - offset; + T roi_width = current_roi[3] * spatial_scale; + T roi_height = current_roi[4] * spatial_scale; + T theta = current_roi[5] * M_PI / 180.0; + T cos_theta = cos(theta); + T sin_theta = sin(theta); + + AT_ASSERTM( + roi_width >= 0 && roi_height >= 0, + "ROIs in ROIAlignRotated do not have non-negative size!"); + + T bin_size_h = static_cast(roi_height) / static_cast(pooled_height); + T bin_size_w = static_cast(roi_width) / static_cast(pooled_width); + + T* offset_grad_input = + grad_input + ((roi_batch_ind * channels + c) * height * width); + + int output_offset = n * n_stride + c * c_stride; + const T* offset_grad_output = grad_output + output_offset; + const T grad_output_this_bin = + offset_grad_output[ph * h_stride + pw * w_stride]; + + // We use roi_bin_grid to sample the grid and mimic integral + int roi_bin_grid_h = (sampling_ratio > 0) + ? sampling_ratio + : ceil(roi_height / pooled_height); // e.g., = 2 + int roi_bin_grid_w = + (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width); + + // roi_start_h and roi_start_w are computed wrt the center of RoI (x, y). + // Appropriate translation needs to be applied after. + T roi_start_h = -roi_height / 2.0; + T roi_start_w = -roi_width / 2.0; + + // We do average (integral) pooling inside a bin + const T count = roi_bin_grid_h * roi_bin_grid_w; // e.g. = 4 + + for (int iy = 0; iy < roi_bin_grid_h; iy++) { + const T yy = roi_start_h + ph * bin_size_h + + static_cast(iy + .5f) * bin_size_h / + static_cast(roi_bin_grid_h); // e.g., 0.5, 1.5 + for (int ix = 0; ix < roi_bin_grid_w; ix++) { + const T xx = roi_start_w + pw * bin_size_w + + static_cast(ix + .5f) * bin_size_w / + static_cast(roi_bin_grid_w); + + // Rotate by theta around the center and translate + T y = yy * cos_theta - xx * sin_theta + roi_center_h; + T x = yy * sin_theta + xx * cos_theta + roi_center_w; + + T w1, w2, w3, w4; + int x_low, x_high, y_low, y_high; + + bilinear_interpolate_gradient( + height, width, y, x, w1, w2, w3, w4, x_low, x_high, y_low, y_high); + + T g1 = grad_output_this_bin * w1 / count; + T g2 = grad_output_this_bin * w2 / count; + T g3 = grad_output_this_bin * w3 / count; + T g4 = grad_output_this_bin * w4 / count; + + if (x_low >= 0 && x_high >= 0 && y_low >= 0 && y_high >= 0) { + // atomic add is not needed for now since it is single threaded + add(offset_grad_input + y_low * width + x_low, static_cast(g1)); + add(offset_grad_input + y_low * width + x_high, static_cast(g2)); + add(offset_grad_input + y_high * width + x_low, static_cast(g3)); + add(offset_grad_input + y_high * width + x_high, static_cast(g4)); + } // if + } // ix + } // iy + } // for +} // ROIAlignRotatedBackward + +at::Tensor ROIAlignRotated_forward_cpu( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio) { + AT_ASSERTM(input.device().is_cpu(), "input must be a CPU tensor"); + AT_ASSERTM(rois.device().is_cpu(), "rois must be a CPU tensor"); + + at::TensorArg input_t{input, "input", 1}, rois_t{rois, "rois", 2}; + + at::CheckedFrom c = "ROIAlign_forward_cpu"; + at::checkAllSameType(c, {input_t, rois_t}); + + auto num_rois = rois.size(0); + auto channels = input.size(1); + auto height = input.size(2); + auto width = input.size(3); + + at::Tensor output = at::zeros( + {num_rois, channels, pooled_height, pooled_width}, input.options()); + + auto output_size = num_rois * pooled_height * pooled_width * channels; + + if (output.numel() == 0) { + return output; + } + + auto input_ = input.contiguous(), rois_ = rois.contiguous(); + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + input.scalar_type(), "ROIAlignRotated_forward", [&] { + ROIAlignRotatedForward( + output_size, + input_.data_ptr(), + spatial_scale, + channels, + height, + width, + pooled_height, + pooled_width, + sampling_ratio, + rois_.data_ptr(), + output.data_ptr()); + }); + return output; +} + +at::Tensor ROIAlignRotated_backward_cpu( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio) { + AT_ASSERTM(grad.device().is_cpu(), "grad must be a CPU tensor"); + AT_ASSERTM(rois.device().is_cpu(), "rois must be a CPU tensor"); + + at::TensorArg grad_t{grad, "grad", 1}, rois_t{rois, "rois", 2}; + + at::CheckedFrom c = "ROIAlignRotated_backward_cpu"; + at::checkAllSameType(c, {grad_t, rois_t}); + + at::Tensor grad_input = + at::zeros({batch_size, channels, height, width}, grad.options()); + + // handle possibly empty gradients + if (grad.numel() == 0) { + return grad_input; + } + + // get stride values to ensure indexing into gradients is correct. + int n_stride = grad.stride(0); + int c_stride = grad.stride(1); + int h_stride = grad.stride(2); + int w_stride = grad.stride(3); + + auto rois_ = rois.contiguous(); + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + grad.scalar_type(), "ROIAlignRotated_forward", [&] { + ROIAlignRotatedBackward( + grad.numel(), + grad.data_ptr(), + spatial_scale, + channels, + height, + width, + pooled_height, + pooled_width, + sampling_ratio, + grad_input.data_ptr(), + rois_.data_ptr(), + n_stride, + c_stride, + h_stride, + w_stride); + }); + return grad_input; +} + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cuda.cu b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cuda.cu new file mode 100644 index 0000000..fca1865 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/ROIAlignRotated/ROIAlignRotated_cuda.cu @@ -0,0 +1,443 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +#include +#include +#include +#include + +// TODO make it in a common file +#define CUDA_1D_KERNEL_LOOP(i, n) \ + for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < n; \ + i += blockDim.x * gridDim.x) + +// Note: this implementation originates from the Caffe2 ROIAlignRotated Op +// and PyTorch ROIAlign (non-rotated) Op implementations. +// The key difference between this implementation and those ones is +// we don't do "legacy offset" in this version, as there aren't many previous +// works, if any, using the "legacy" ROIAlignRotated Op. +// This would make the interface a bit cleaner. + +namespace detectron2 { + +namespace { + +template +__device__ T bilinear_interpolate( + const T* input, + const int height, + const int width, + T y, + T x) { + // deal with cases that inverse elements are out of feature map boundary + if (y < -1.0 || y > height || x < -1.0 || x > width) { + // empty + return 0; + } + + if (y < 0) { + y = 0; + } + + if (x < 0) { + x = 0; + } + + int y_low = (int)y; + int x_low = (int)x; + int y_high; + int x_high; + + if (y_low >= height - 1) { + y_high = y_low = height - 1; + y = (T)y_low; + } else { + y_high = y_low + 1; + } + + if (x_low >= width - 1) { + x_high = x_low = width - 1; + x = (T)x_low; + } else { + x_high = x_low + 1; + } + + T ly = y - y_low; + T lx = x - x_low; + T hy = 1. - ly, hx = 1. - lx; + // do bilinear interpolation + T v1 = input[y_low * width + x_low]; + T v2 = input[y_low * width + x_high]; + T v3 = input[y_high * width + x_low]; + T v4 = input[y_high * width + x_high]; + T w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx; + + T val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4); + + return val; +} + +template +__device__ void bilinear_interpolate_gradient( + const int height, + const int width, + T y, + T x, + T& w1, + T& w2, + T& w3, + T& w4, + int& x_low, + int& x_high, + int& y_low, + int& y_high) { + // deal with cases that inverse elements are out of feature map boundary + if (y < -1.0 || y > height || x < -1.0 || x > width) { + // empty + w1 = w2 = w3 = w4 = 0.; + x_low = x_high = y_low = y_high = -1; + return; + } + + if (y < 0) { + y = 0; + } + + if (x < 0) { + x = 0; + } + + y_low = (int)y; + x_low = (int)x; + + if (y_low >= height - 1) { + y_high = y_low = height - 1; + y = (T)y_low; + } else { + y_high = y_low + 1; + } + + if (x_low >= width - 1) { + x_high = x_low = width - 1; + x = (T)x_low; + } else { + x_high = x_low + 1; + } + + T ly = y - y_low; + T lx = x - x_low; + T hy = 1. - ly, hx = 1. - lx; + + // reference in forward + // T v1 = input[y_low * width + x_low]; + // T v2 = input[y_low * width + x_high]; + // T v3 = input[y_high * width + x_low]; + // T v4 = input[y_high * width + x_high]; + // T val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4); + + w1 = hy * hx, w2 = hy * lx, w3 = ly * hx, w4 = ly * lx; + + return; +} + +} // namespace + +template +__global__ void RoIAlignRotatedForward( + const int nthreads, + const T* input, + const T spatial_scale, + const int channels, + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + const T* rois, + T* top_data) { + CUDA_1D_KERNEL_LOOP(index, nthreads) { + // (n, c, ph, pw) is an element in the pooled output + int pw = index % pooled_width; + int ph = (index / pooled_width) % pooled_height; + int c = (index / pooled_width / pooled_height) % channels; + int n = index / pooled_width / pooled_height / channels; + + const T* current_roi = rois + n * 6; + int roi_batch_ind = current_roi[0]; + + // Do not use rounding; this implementation detail is critical + // ROIAlignRotated supports align == true, i.e., continuous coordinate + // by default, thus the 0.5 offset + T offset = (T)0.5; + T roi_center_w = current_roi[1] * spatial_scale - offset; + T roi_center_h = current_roi[2] * spatial_scale - offset; + T roi_width = current_roi[3] * spatial_scale; + T roi_height = current_roi[4] * spatial_scale; + T theta = current_roi[5] * M_PI / 180.0; + T cos_theta = cos(theta); + T sin_theta = sin(theta); + + T bin_size_h = static_cast(roi_height) / static_cast(pooled_height); + T bin_size_w = static_cast(roi_width) / static_cast(pooled_width); + + const T* offset_input = + input + (roi_batch_ind * channels + c) * height * width; + + // We use roi_bin_grid to sample the grid and mimic integral + int roi_bin_grid_h = (sampling_ratio > 0) + ? sampling_ratio + : ceil(roi_height / pooled_height); // e.g., = 2 + int roi_bin_grid_w = + (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width); + + // roi_start_h and roi_start_w are computed wrt the center of RoI (x, y). + // Appropriate translation needs to be applied after. + T roi_start_h = -roi_height / 2.0; + T roi_start_w = -roi_width / 2.0; + + // We do average (inte gral) pooling inside a bin + const T count = max(roi_bin_grid_h * roi_bin_grid_w, 1); // e.g. = 4 + + T output_val = 0.; + for (int iy = 0; iy < roi_bin_grid_h; iy++) // e.g., iy = 0, 1 + { + const T yy = roi_start_h + ph * bin_size_h + + static_cast(iy + .5f) * bin_size_h / + static_cast(roi_bin_grid_h); // e.g., 0.5, 1.5 + for (int ix = 0; ix < roi_bin_grid_w; ix++) { + const T xx = roi_start_w + pw * bin_size_w + + static_cast(ix + .5f) * bin_size_w / + static_cast(roi_bin_grid_w); + + // Rotate by theta around the center and translate + T y = yy * cos_theta - xx * sin_theta + roi_center_h; + T x = yy * sin_theta + xx * cos_theta + roi_center_w; + + T val = bilinear_interpolate(offset_input, height, width, y, x); + output_val += val; + } + } + output_val /= count; + + top_data[index] = output_val; + } +} + +template +__global__ void RoIAlignRotatedBackwardFeature( + const int nthreads, + const T* top_diff, + const int num_rois, + const T spatial_scale, + const int channels, + const int height, + const int width, + const int pooled_height, + const int pooled_width, + const int sampling_ratio, + T* bottom_diff, + const T* rois) { + CUDA_1D_KERNEL_LOOP(index, nthreads) { + // (n, c, ph, pw) is an element in the pooled output + int pw = index % pooled_width; + int ph = (index / pooled_width) % pooled_height; + int c = (index / pooled_width / pooled_height) % channels; + int n = index / pooled_width / pooled_height / channels; + + const T* current_roi = rois + n * 6; + int roi_batch_ind = current_roi[0]; + + // Do not use rounding; this implementation detail is critical + // ROIAlignRotated supports align == true, i.e., continuous coordinate + // by default, thus the 0.5 offset + T offset = (T)0.5; + T roi_center_w = current_roi[1] * spatial_scale - offset; + T roi_center_h = current_roi[2] * spatial_scale - offset; + T roi_width = current_roi[3] * spatial_scale; + T roi_height = current_roi[4] * spatial_scale; + T theta = current_roi[5] * M_PI / 180.0; + T cos_theta = cos(theta); + T sin_theta = sin(theta); + + T bin_size_h = static_cast(roi_height) / static_cast(pooled_height); + T bin_size_w = static_cast(roi_width) / static_cast(pooled_width); + + T* offset_bottom_diff = + bottom_diff + (roi_batch_ind * channels + c) * height * width; + + int top_offset = (n * channels + c) * pooled_height * pooled_width; + const T* offset_top_diff = top_diff + top_offset; + const T top_diff_this_bin = offset_top_diff[ph * pooled_width + pw]; + + // We use roi_bin_grid to sample the grid and mimic integral + int roi_bin_grid_h = (sampling_ratio > 0) + ? sampling_ratio + : ceil(roi_height / pooled_height); // e.g., = 2 + int roi_bin_grid_w = + (sampling_ratio > 0) ? sampling_ratio : ceil(roi_width / pooled_width); + + // roi_start_h and roi_start_w are computed wrt the center of RoI (x, y). + // Appropriate translation needs to be applied after. + T roi_start_h = -roi_height / 2.0; + T roi_start_w = -roi_width / 2.0; + + // We do average (integral) pooling inside a bin + const T count = roi_bin_grid_h * roi_bin_grid_w; // e.g. = 4 + + for (int iy = 0; iy < roi_bin_grid_h; iy++) // e.g., iy = 0, 1 + { + const T yy = roi_start_h + ph * bin_size_h + + static_cast(iy + .5f) * bin_size_h / + static_cast(roi_bin_grid_h); // e.g., 0.5, 1.5 + for (int ix = 0; ix < roi_bin_grid_w; ix++) { + const T xx = roi_start_w + pw * bin_size_w + + static_cast(ix + .5f) * bin_size_w / + static_cast(roi_bin_grid_w); + + // Rotate by theta around the center and translate + T y = yy * cos_theta - xx * sin_theta + roi_center_h; + T x = yy * sin_theta + xx * cos_theta + roi_center_w; + + T w1, w2, w3, w4; + int x_low, x_high, y_low, y_high; + + bilinear_interpolate_gradient( + height, width, y, x, w1, w2, w3, w4, x_low, x_high, y_low, y_high); + + T g1 = top_diff_this_bin * w1 / count; + T g2 = top_diff_this_bin * w2 / count; + T g3 = top_diff_this_bin * w3 / count; + T g4 = top_diff_this_bin * w4 / count; + + if (x_low >= 0 && x_high >= 0 && y_low >= 0 && y_high >= 0) { + atomicAdd( + offset_bottom_diff + y_low * width + x_low, static_cast(g1)); + atomicAdd( + offset_bottom_diff + y_low * width + x_high, static_cast(g2)); + atomicAdd( + offset_bottom_diff + y_high * width + x_low, static_cast(g3)); + atomicAdd( + offset_bottom_diff + y_high * width + x_high, static_cast(g4)); + } // if + } // ix + } // iy + } // CUDA_1D_KERNEL_LOOP +} // RoIAlignRotatedBackward + +at::Tensor ROIAlignRotated_forward_cuda( + const at::Tensor& input, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int sampling_ratio) { + AT_ASSERTM(input.device().is_cuda(), "input must be a CUDA tensor"); + AT_ASSERTM(rois.device().is_cuda(), "rois must be a CUDA tensor"); + at::TensorArg input_t{input, "input", 1}, rois_t{rois, "rois", 2}; + + at::CheckedFrom c = "ROIAlignRotated_forward_cuda"; + at::checkAllSameGPU(c, {input_t, rois_t}); + at::checkAllSameType(c, {input_t, rois_t}); + at::cuda::CUDAGuard device_guard(input.device()); + + auto num_rois = rois.size(0); + auto channels = input.size(1); + auto height = input.size(2); + auto width = input.size(3); + + auto output = at::empty( + {num_rois, channels, pooled_height, pooled_width}, input.options()); + auto output_size = num_rois * pooled_height * pooled_width * channels; + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + dim3 grid(std::min( + at::cuda::ATenCeilDiv( + static_cast(output_size), static_cast(512)), + static_cast(4096))); + dim3 block(512); + + if (output.numel() == 0) { + AT_CUDA_CHECK(cudaGetLastError()); + return output; + } + + auto input_ = input.contiguous(), rois_ = rois.contiguous(); + AT_DISPATCH_FLOATING_TYPES( + input.scalar_type(), "ROIAlignRotated_forward", [&] { + RoIAlignRotatedForward<<>>( + output_size, + input_.data_ptr(), + spatial_scale, + channels, + height, + width, + pooled_height, + pooled_width, + sampling_ratio, + rois_.data_ptr(), + output.data_ptr()); + }); + cudaDeviceSynchronize(); + AT_CUDA_CHECK(cudaGetLastError()); + return output; +} + +// TODO remove the dependency on input and use instead its sizes -> save memory +at::Tensor ROIAlignRotated_backward_cuda( + const at::Tensor& grad, + const at::Tensor& rois, + const float spatial_scale, + const int pooled_height, + const int pooled_width, + const int batch_size, + const int channels, + const int height, + const int width, + const int sampling_ratio) { + AT_ASSERTM(grad.device().is_cuda(), "grad must be a CUDA tensor"); + AT_ASSERTM(rois.device().is_cuda(), "rois must be a CUDA tensor"); + + at::TensorArg grad_t{grad, "grad", 1}, rois_t{rois, "rois", 2}; + at::CheckedFrom c = "ROIAlign_backward_cuda"; + at::checkAllSameGPU(c, {grad_t, rois_t}); + at::checkAllSameType(c, {grad_t, rois_t}); + at::cuda::CUDAGuard device_guard(grad.device()); + + auto num_rois = rois.size(0); + auto grad_input = + at::zeros({batch_size, channels, height, width}, grad.options()); + + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + dim3 grid(std::min( + at::cuda::ATenCeilDiv( + static_cast(grad.numel()), static_cast(512)), + static_cast(4096))); + dim3 block(512); + + // handle possibly empty gradients + if (grad.numel() == 0) { + AT_CUDA_CHECK(cudaGetLastError()); + return grad_input; + } + + auto grad_ = grad.contiguous(), rois_ = rois.contiguous(); + AT_DISPATCH_FLOATING_TYPES( + grad.scalar_type(), "ROIAlignRotated_backward", [&] { + RoIAlignRotatedBackwardFeature<<>>( + grad.numel(), + grad_.data_ptr(), + num_rois, + spatial_scale, + channels, + height, + width, + pooled_height, + pooled_width, + sampling_ratio, + grad_input.data_ptr(), + rois_.data_ptr()); + }); + AT_CUDA_CHECK(cudaGetLastError()); + return grad_input; +} + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated.h b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated.h new file mode 100644 index 0000000..3bf383b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated.h @@ -0,0 +1,35 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +#pragma once +#include + +namespace detectron2 { + +at::Tensor box_iou_rotated_cpu( + const at::Tensor& boxes1, + const at::Tensor& boxes2); + +#if defined(WITH_CUDA) || defined(WITH_HIP) +at::Tensor box_iou_rotated_cuda( + const at::Tensor& boxes1, + const at::Tensor& boxes2); +#endif + +// Interface for Python +// inline is needed to prevent multiple function definitions when this header is +// included by different cpps +inline at::Tensor box_iou_rotated( + const at::Tensor& boxes1, + const at::Tensor& boxes2) { + assert(boxes1.device().is_cuda() == boxes2.device().is_cuda()); + if (boxes1.device().is_cuda()) { +#if defined(WITH_CUDA) || defined(WITH_HIP) + return box_iou_rotated_cuda(boxes1.contiguous(), boxes2.contiguous()); +#else + AT_ERROR("Detectron2 is not compiled with GPU support!"); +#endif + } + + return box_iou_rotated_cpu(boxes1.contiguous(), boxes2.contiguous()); +} + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cpu.cpp b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cpu.cpp new file mode 100644 index 0000000..c843487 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cpu.cpp @@ -0,0 +1,39 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +#include "box_iou_rotated.h" +#include "box_iou_rotated_utils.h" + +namespace detectron2 { + +template +void box_iou_rotated_cpu_kernel( + const at::Tensor& boxes1, + const at::Tensor& boxes2, + at::Tensor& ious) { + auto num_boxes1 = boxes1.size(0); + auto num_boxes2 = boxes2.size(0); + + for (int i = 0; i < num_boxes1; i++) { + for (int j = 0; j < num_boxes2; j++) { + ious[i * num_boxes2 + j] = single_box_iou_rotated( + boxes1[i].data_ptr(), boxes2[j].data_ptr()); + } + } +} + +at::Tensor box_iou_rotated_cpu( + // input must be contiguous: + const at::Tensor& boxes1, + const at::Tensor& boxes2) { + auto num_boxes1 = boxes1.size(0); + auto num_boxes2 = boxes2.size(0); + at::Tensor ious = + at::empty({num_boxes1 * num_boxes2}, boxes1.options().dtype(at::kFloat)); + + box_iou_rotated_cpu_kernel(boxes1, boxes2, ious); + + // reshape from 1d array to 2d array + auto shape = std::vector{num_boxes1, num_boxes2}; + return ious.reshape(shape); +} + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cuda.cu b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cuda.cu new file mode 100644 index 0000000..952710e --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_cuda.cu @@ -0,0 +1,130 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +#include +#include +#include +#include +#include "box_iou_rotated_utils.h" + +namespace detectron2 { + +// 2D block with 32 * 16 = 512 threads per block +const int BLOCK_DIM_X = 32; +const int BLOCK_DIM_Y = 16; + +template +__global__ void box_iou_rotated_cuda_kernel( + const int n_boxes1, + const int n_boxes2, + const T* dev_boxes1, + const T* dev_boxes2, + T* dev_ious) { + const int row_start = blockIdx.x * blockDim.x; + const int col_start = blockIdx.y * blockDim.y; + + const int row_size = min(n_boxes1 - row_start, blockDim.x); + const int col_size = min(n_boxes2 - col_start, blockDim.y); + + __shared__ float block_boxes1[BLOCK_DIM_X * 5]; + __shared__ float block_boxes2[BLOCK_DIM_Y * 5]; + + // It's safe to copy using threadIdx.x since BLOCK_DIM_X >= BLOCK_DIM_Y + if (threadIdx.x < row_size && threadIdx.y == 0) { + block_boxes1[threadIdx.x * 5 + 0] = + dev_boxes1[(row_start + threadIdx.x) * 5 + 0]; + block_boxes1[threadIdx.x * 5 + 1] = + dev_boxes1[(row_start + threadIdx.x) * 5 + 1]; + block_boxes1[threadIdx.x * 5 + 2] = + dev_boxes1[(row_start + threadIdx.x) * 5 + 2]; + block_boxes1[threadIdx.x * 5 + 3] = + dev_boxes1[(row_start + threadIdx.x) * 5 + 3]; + block_boxes1[threadIdx.x * 5 + 4] = + dev_boxes1[(row_start + threadIdx.x) * 5 + 4]; + } + + if (threadIdx.x < col_size && threadIdx.y == 0) { + block_boxes2[threadIdx.x * 5 + 0] = + dev_boxes2[(col_start + threadIdx.x) * 5 + 0]; + block_boxes2[threadIdx.x * 5 + 1] = + dev_boxes2[(col_start + threadIdx.x) * 5 + 1]; + block_boxes2[threadIdx.x * 5 + 2] = + dev_boxes2[(col_start + threadIdx.x) * 5 + 2]; + block_boxes2[threadIdx.x * 5 + 3] = + dev_boxes2[(col_start + threadIdx.x) * 5 + 3]; + block_boxes2[threadIdx.x * 5 + 4] = + dev_boxes2[(col_start + threadIdx.x) * 5 + 4]; + } + __syncthreads(); + + if (threadIdx.x < row_size && threadIdx.y < col_size) { + int offset = (row_start + threadIdx.x) * n_boxes2 + col_start + threadIdx.y; + dev_ious[offset] = single_box_iou_rotated( + block_boxes1 + threadIdx.x * 5, block_boxes2 + threadIdx.y * 5); + } +} + +at::Tensor box_iou_rotated_cuda( + // input must be contiguous + const at::Tensor& boxes1, + const at::Tensor& boxes2) { + using scalar_t = float; + AT_ASSERTM( + boxes1.scalar_type() == at::kFloat, "boxes1 must be a float tensor"); + AT_ASSERTM( + boxes2.scalar_type() == at::kFloat, "boxes2 must be a float tensor"); + AT_ASSERTM(boxes1.is_cuda(), "boxes1 must be a CUDA tensor"); + AT_ASSERTM(boxes2.is_cuda(), "boxes2 must be a CUDA tensor"); + at::cuda::CUDAGuard device_guard(boxes1.device()); + + auto num_boxes1 = boxes1.size(0); + auto num_boxes2 = boxes2.size(0); + + at::Tensor ious = + at::empty({num_boxes1 * num_boxes2}, boxes1.options().dtype(at::kFloat)); + + bool transpose = false; + if (num_boxes1 > 0 && num_boxes2 > 0) { + scalar_t *data1 = boxes1.data_ptr(), + *data2 = boxes2.data_ptr(); + + if (num_boxes2 > 65535 * BLOCK_DIM_Y) { + AT_ASSERTM( + num_boxes1 <= 65535 * BLOCK_DIM_Y, + "Too many boxes for box_iou_rotated_cuda!"); + // x dim is allowed to be large, but y dim cannot, + // so we transpose the two to avoid "invalid configuration argument" + // error. We assume one of them is small. Otherwise the result is hard to + // fit in memory anyway. + std::swap(num_boxes1, num_boxes2); + std::swap(data1, data2); + transpose = true; + } + + const int blocks_x = + at::cuda::ATenCeilDiv(static_cast(num_boxes1), BLOCK_DIM_X); + const int blocks_y = + at::cuda::ATenCeilDiv(static_cast(num_boxes2), BLOCK_DIM_Y); + + dim3 blocks(blocks_x, blocks_y); + dim3 threads(BLOCK_DIM_X, BLOCK_DIM_Y); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + box_iou_rotated_cuda_kernel<<>>( + num_boxes1, + num_boxes2, + data1, + data2, + (scalar_t*)ious.data_ptr()); + + AT_CUDA_CHECK(cudaGetLastError()); + } + + // reshape from 1d array to 2d array + auto shape = std::vector{num_boxes1, num_boxes2}; + if (transpose) { + return ious.view(shape).t(); + } else { + return ious.view(shape); + } +} + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_utils.h b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_utils.h new file mode 100644 index 0000000..b54a5dd --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/box_iou_rotated/box_iou_rotated_utils.h @@ -0,0 +1,370 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +#pragma once + +#include +#include + +#if defined(__CUDACC__) || __HCC__ == 1 || __HIP__ == 1 +// Designates functions callable from the host (CPU) and the device (GPU) +#define HOST_DEVICE __host__ __device__ +#define HOST_DEVICE_INLINE HOST_DEVICE __forceinline__ +#else +#include +#define HOST_DEVICE +#define HOST_DEVICE_INLINE HOST_DEVICE inline +#endif + +namespace detectron2 { + +namespace { + +template +struct RotatedBox { + T x_ctr, y_ctr, w, h, a; +}; + +template +struct Point { + T x, y; + HOST_DEVICE_INLINE Point(const T& px = 0, const T& py = 0) : x(px), y(py) {} + HOST_DEVICE_INLINE Point operator+(const Point& p) const { + return Point(x + p.x, y + p.y); + } + HOST_DEVICE_INLINE Point& operator+=(const Point& p) { + x += p.x; + y += p.y; + return *this; + } + HOST_DEVICE_INLINE Point operator-(const Point& p) const { + return Point(x - p.x, y - p.y); + } + HOST_DEVICE_INLINE Point operator*(const T coeff) const { + return Point(x * coeff, y * coeff); + } +}; + +template +HOST_DEVICE_INLINE T dot_2d(const Point& A, const Point& B) { + return A.x * B.x + A.y * B.y; +} + +// R: result type. can be different from input type +template +HOST_DEVICE_INLINE R cross_2d(const Point& A, const Point& B) { + return static_cast(A.x) * static_cast(B.y) - + static_cast(B.x) * static_cast(A.y); +} + +template +HOST_DEVICE_INLINE void get_rotated_vertices( + const RotatedBox& box, + Point (&pts)[4]) { + // M_PI / 180. == 0.01745329251 + double theta = box.a * 0.01745329251; + T cosTheta2 = (T)cos(theta) * 0.5f; + T sinTheta2 = (T)sin(theta) * 0.5f; + + // y: top --> down; x: left --> right + pts[0].x = box.x_ctr + sinTheta2 * box.h + cosTheta2 * box.w; + pts[0].y = box.y_ctr + cosTheta2 * box.h - sinTheta2 * box.w; + pts[1].x = box.x_ctr - sinTheta2 * box.h + cosTheta2 * box.w; + pts[1].y = box.y_ctr - cosTheta2 * box.h - sinTheta2 * box.w; + pts[2].x = 2 * box.x_ctr - pts[0].x; + pts[2].y = 2 * box.y_ctr - pts[0].y; + pts[3].x = 2 * box.x_ctr - pts[1].x; + pts[3].y = 2 * box.y_ctr - pts[1].y; +} + +template +HOST_DEVICE_INLINE int get_intersection_points( + const Point (&pts1)[4], + const Point (&pts2)[4], + Point (&intersections)[24]) { + // Line vector + // A line from p1 to p2 is: p1 + (p2-p1)*t, t=[0,1] + Point vec1[4], vec2[4]; + for (int i = 0; i < 4; i++) { + vec1[i] = pts1[(i + 1) % 4] - pts1[i]; + vec2[i] = pts2[(i + 1) % 4] - pts2[i]; + } + + // When computing the intersection area, it doesn't hurt if we have + // more (duplicated/approximate) intersections/vertices than needed, + // while it can cause drastic difference if we miss an intersection/vertex. + // Therefore, we add an epsilon to relax the comparisons between + // the float point numbers that decide the intersection points. + double EPS = 1e-5; + + // Line test - test all line combos for intersection + int num = 0; // number of intersections + for (int i = 0; i < 4; i++) { + for (int j = 0; j < 4; j++) { + // Solve for 2x2 Ax=b + T det = cross_2d(vec2[j], vec1[i]); + + // This takes care of parallel lines + if (fabs(det) <= 1e-14) { + continue; + } + + auto vec12 = pts2[j] - pts1[i]; + + T t1 = cross_2d(vec2[j], vec12) / det; + T t2 = cross_2d(vec1[i], vec12) / det; + + if (t1 > -EPS && t1 < 1.0f + EPS && t2 > -EPS && t2 < 1.0f + EPS) { + intersections[num++] = pts1[i] + vec1[i] * t1; + } + } + } + + // Check for vertices of rect1 inside rect2 + { + const auto& AB = vec2[0]; + const auto& DA = vec2[3]; + auto ABdotAB = dot_2d(AB, AB); + auto ADdotAD = dot_2d(DA, DA); + for (int i = 0; i < 4; i++) { + // assume ABCD is the rectangle, and P is the point to be judged + // P is inside ABCD iff. P's projection on AB lies within AB + // and P's projection on AD lies within AD + + auto AP = pts1[i] - pts2[0]; + + auto APdotAB = dot_2d(AP, AB); + auto APdotAD = -dot_2d(AP, DA); + + if ((APdotAB > -EPS) && (APdotAD > -EPS) && (APdotAB < ABdotAB + EPS) && + (APdotAD < ADdotAD + EPS)) { + intersections[num++] = pts1[i]; + } + } + } + + // Reverse the check - check for vertices of rect2 inside rect1 + { + const auto& AB = vec1[0]; + const auto& DA = vec1[3]; + auto ABdotAB = dot_2d(AB, AB); + auto ADdotAD = dot_2d(DA, DA); + for (int i = 0; i < 4; i++) { + auto AP = pts2[i] - pts1[0]; + + auto APdotAB = dot_2d(AP, AB); + auto APdotAD = -dot_2d(AP, DA); + + if ((APdotAB > -EPS) && (APdotAD > -EPS) && (APdotAB < ABdotAB + EPS) && + (APdotAD < ADdotAD + EPS)) { + intersections[num++] = pts2[i]; + } + } + } + + return num; +} + +template +HOST_DEVICE_INLINE int convex_hull_graham( + const Point (&p)[24], + const int& num_in, + Point (&q)[24], + bool shift_to_zero = false) { + assert(num_in >= 2); + + // Step 1: + // Find point with minimum y + // if more than 1 points have the same minimum y, + // pick the one with the minimum x. + int t = 0; + for (int i = 1; i < num_in; i++) { + if (p[i].y < p[t].y || (p[i].y == p[t].y && p[i].x < p[t].x)) { + t = i; + } + } + auto& start = p[t]; // starting point + + // Step 2: + // Subtract starting point from every points (for sorting in the next step) + for (int i = 0; i < num_in; i++) { + q[i] = p[i] - start; + } + + // Swap the starting point to position 0 + auto tmp = q[0]; + q[0] = q[t]; + q[t] = tmp; + + // Step 3: + // Sort point 1 ~ num_in according to their relative cross-product values + // (essentially sorting according to angles) + // If the angles are the same, sort according to their distance to origin + T dist[24]; +#if defined(__CUDACC__) || __HCC__ == 1 || __HIP__ == 1 + // compute distance to origin before sort, and sort them together with the + // points + for (int i = 0; i < num_in; i++) { + dist[i] = dot_2d(q[i], q[i]); + } + + // CUDA version + // In the future, we can potentially use thrust + // for sorting here to improve speed (though not guaranteed) + for (int i = 1; i < num_in - 1; i++) { + for (int j = i + 1; j < num_in; j++) { + T crossProduct = cross_2d(q[i], q[j]); + if ((crossProduct < -1e-6) || + (fabs(crossProduct) < 1e-6 && dist[i] > dist[j])) { + auto q_tmp = q[i]; + q[i] = q[j]; + q[j] = q_tmp; + auto dist_tmp = dist[i]; + dist[i] = dist[j]; + dist[j] = dist_tmp; + } + } + } +#else + // CPU version + std::sort( + q + 1, q + num_in, [](const Point& A, const Point& B) -> bool { + T temp = cross_2d(A, B); + if (fabs(temp) < 1e-6) { + return dot_2d(A, A) < dot_2d(B, B); + } else { + return temp > 0; + } + }); + // compute distance to origin after sort, since the points are now different. + for (int i = 0; i < num_in; i++) { + dist[i] = dot_2d(q[i], q[i]); + } +#endif + + // Step 4: + // Make sure there are at least 2 points (that don't overlap with each other) + // in the stack + int k; // index of the non-overlapped second point + for (k = 1; k < num_in; k++) { + if (dist[k] > 1e-8) { + break; + } + } + if (k == num_in) { + // We reach the end, which means the convex hull is just one point + q[0] = p[t]; + return 1; + } + q[1] = q[k]; + int m = 2; // 2 points in the stack + // Step 5: + // Finally we can start the scanning process. + // When a non-convex relationship between the 3 points is found + // (either concave shape or duplicated points), + // we pop the previous point from the stack + // until the 3-point relationship is convex again, or + // until the stack only contains two points + for (int i = k + 1; i < num_in; i++) { + while (m > 1) { + auto q1 = q[i] - q[m - 2], q2 = q[m - 1] - q[m - 2]; + // cross_2d() uses FMA and therefore computes round(round(q1.x*q2.y) - + // q2.x*q1.y) So it may not return 0 even when q1==q2. Therefore we + // compare round(q1.x*q2.y) and round(q2.x*q1.y) directly. (round means + // round to nearest floating point). + if (q1.x * q2.y >= q2.x * q1.y) + m--; + else + break; + } + // Using double also helps, but float can solve the issue for now. + // while (m > 1 && cross_2d(q[i] - q[m - 2], q[m - 1] - q[m - 2]) + // >= 0) { + // m--; + // } + q[m++] = q[i]; + } + + // Step 6 (Optional): + // In general sense we need the original coordinates, so we + // need to shift the points back (reverting Step 2) + // But if we're only interested in getting the area/perimeter of the shape + // We can simply return. + if (!shift_to_zero) { + for (int i = 0; i < m; i++) { + q[i] += start; + } + } + + return m; +} + +template +HOST_DEVICE_INLINE T polygon_area(const Point (&q)[24], const int& m) { + if (m <= 2) { + return 0; + } + + T area = 0; + for (int i = 1; i < m - 1; i++) { + area += fabs(cross_2d(q[i] - q[0], q[i + 1] - q[0])); + } + + return area / 2.0; +} + +template +HOST_DEVICE_INLINE T rotated_boxes_intersection( + const RotatedBox& box1, + const RotatedBox& box2) { + // There are up to 4 x 4 + 4 + 4 = 24 intersections (including dups) returned + // from rotated_rect_intersection_pts + Point intersectPts[24], orderedPts[24]; + + Point pts1[4]; + Point pts2[4]; + get_rotated_vertices(box1, pts1); + get_rotated_vertices(box2, pts2); + + int num = get_intersection_points(pts1, pts2, intersectPts); + + if (num <= 2) { + return 0.0; + } + + // Convex Hull to order the intersection points in clockwise order and find + // the contour area. + int num_convex = convex_hull_graham(intersectPts, num, orderedPts, true); + return polygon_area(orderedPts, num_convex); +} + +} // namespace + +template +HOST_DEVICE_INLINE T +single_box_iou_rotated(T const* const box1_raw, T const* const box2_raw) { + // shift center to the middle point to achieve higher precision in result + RotatedBox box1, box2; + auto center_shift_x = (box1_raw[0] + box2_raw[0]) / 2.0; + auto center_shift_y = (box1_raw[1] + box2_raw[1]) / 2.0; + box1.x_ctr = box1_raw[0] - center_shift_x; + box1.y_ctr = box1_raw[1] - center_shift_y; + box1.w = box1_raw[2]; + box1.h = box1_raw[3]; + box1.a = box1_raw[4]; + box2.x_ctr = box2_raw[0] - center_shift_x; + box2.y_ctr = box2_raw[1] - center_shift_y; + box2.w = box2_raw[2]; + box2.h = box2_raw[3]; + box2.a = box2_raw[4]; + + T area1 = box1.w * box1.h; + T area2 = box2.w * box2.h; + if (area1 < 1e-14 || area2 < 1e-14) { + return 0.f; + } + + T intersection = rotated_boxes_intersection(box1, box2); + T iou = intersection / (area1 + area2 - intersection); + return iou; +} + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/cocoeval/cocoeval.cpp b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/cocoeval/cocoeval.cpp new file mode 100644 index 0000000..0a5b7b9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/cocoeval/cocoeval.cpp @@ -0,0 +1,507 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +#include "cocoeval.h" +#include +#include +#include +#include + +using namespace pybind11::literals; + +namespace detectron2 { + +namespace COCOeval { + +// Sort detections from highest score to lowest, such that +// detection_instances[detection_sorted_indices[t]] >= +// detection_instances[detection_sorted_indices[t+1]]. Use stable_sort to match +// original COCO API +void SortInstancesByDetectionScore( + const std::vector& detection_instances, + std::vector* detection_sorted_indices) { + detection_sorted_indices->resize(detection_instances.size()); + std::iota( + detection_sorted_indices->begin(), detection_sorted_indices->end(), 0); + std::stable_sort( + detection_sorted_indices->begin(), + detection_sorted_indices->end(), + [&detection_instances](size_t j1, size_t j2) { + return detection_instances[j1].score > detection_instances[j2].score; + }); +} + +// Partition the ground truth objects based on whether or not to ignore them +// based on area +void SortInstancesByIgnore( + const std::array& area_range, + const std::vector& ground_truth_instances, + std::vector* ground_truth_sorted_indices, + std::vector* ignores) { + ignores->clear(); + ignores->reserve(ground_truth_instances.size()); + for (auto o : ground_truth_instances) { + ignores->push_back( + o.ignore || o.area < area_range[0] || o.area > area_range[1]); + } + + ground_truth_sorted_indices->resize(ground_truth_instances.size()); + std::iota( + ground_truth_sorted_indices->begin(), + ground_truth_sorted_indices->end(), + 0); + std::stable_sort( + ground_truth_sorted_indices->begin(), + ground_truth_sorted_indices->end(), + [&ignores](size_t j1, size_t j2) { + return (int)(*ignores)[j1] < (int)(*ignores)[j2]; + }); +} + +// For each IOU threshold, greedily match each detected instance to a ground +// truth instance (if possible) and store the results +void MatchDetectionsToGroundTruth( + const std::vector& detection_instances, + const std::vector& detection_sorted_indices, + const std::vector& ground_truth_instances, + const std::vector& ground_truth_sorted_indices, + const std::vector& ignores, + const std::vector>& ious, + const std::vector& iou_thresholds, + const std::array& area_range, + ImageEvaluation* results) { + // Initialize memory to store return data matches and ignore + const int num_iou_thresholds = iou_thresholds.size(); + const int num_ground_truth = ground_truth_sorted_indices.size(); + const int num_detections = detection_sorted_indices.size(); + std::vector ground_truth_matches( + num_iou_thresholds * num_ground_truth, 0); + std::vector& detection_matches = results->detection_matches; + std::vector& detection_ignores = results->detection_ignores; + std::vector& ground_truth_ignores = results->ground_truth_ignores; + detection_matches.resize(num_iou_thresholds * num_detections, 0); + detection_ignores.resize(num_iou_thresholds * num_detections, false); + ground_truth_ignores.resize(num_ground_truth); + for (auto g = 0; g < num_ground_truth; ++g) { + ground_truth_ignores[g] = ignores[ground_truth_sorted_indices[g]]; + } + + for (auto t = 0; t < num_iou_thresholds; ++t) { + for (auto d = 0; d < num_detections; ++d) { + // information about best match so far (match=-1 -> unmatched) + double best_iou = std::min(iou_thresholds[t], 1 - 1e-10); + int match = -1; + for (auto g = 0; g < num_ground_truth; ++g) { + // if this ground truth instance is already matched and not a + // crowd, it cannot be matched to another detection + if (ground_truth_matches[t * num_ground_truth + g] > 0 && + !ground_truth_instances[ground_truth_sorted_indices[g]].is_crowd) { + continue; + } + + // if detected instance matched to a regular ground truth + // instance, we can break on the first ground truth instance + // tagged as ignore (because they are sorted by the ignore tag) + if (match >= 0 && !ground_truth_ignores[match] && + ground_truth_ignores[g]) { + break; + } + + // if IOU overlap is the best so far, store the match appropriately + if (ious[d][ground_truth_sorted_indices[g]] >= best_iou) { + best_iou = ious[d][ground_truth_sorted_indices[g]]; + match = g; + } + } + // if match was made, store id of match for both detection and + // ground truth + if (match >= 0) { + detection_ignores[t * num_detections + d] = ground_truth_ignores[match]; + detection_matches[t * num_detections + d] = + ground_truth_instances[ground_truth_sorted_indices[match]].id; + ground_truth_matches[t * num_ground_truth + match] = + detection_instances[detection_sorted_indices[d]].id; + } + + // set unmatched detections outside of area range to ignore + const InstanceAnnotation& detection = + detection_instances[detection_sorted_indices[d]]; + detection_ignores[t * num_detections + d] = + detection_ignores[t * num_detections + d] || + (detection_matches[t * num_detections + d] == 0 && + (detection.area < area_range[0] || detection.area > area_range[1])); + } + } + + // store detection score results + results->detection_scores.resize(detection_sorted_indices.size()); + for (size_t d = 0; d < detection_sorted_indices.size(); ++d) { + results->detection_scores[d] = + detection_instances[detection_sorted_indices[d]].score; + } +} + +std::vector EvaluateImages( + const std::vector>& area_ranges, + int max_detections, + const std::vector& iou_thresholds, + const ImageCategoryInstances>& image_category_ious, + const ImageCategoryInstances& + image_category_ground_truth_instances, + const ImageCategoryInstances& + image_category_detection_instances) { + const int num_area_ranges = area_ranges.size(); + const int num_images = image_category_ground_truth_instances.size(); + const int num_categories = + image_category_ious.size() > 0 ? image_category_ious[0].size() : 0; + std::vector detection_sorted_indices; + std::vector ground_truth_sorted_indices; + std::vector ignores; + std::vector results_all( + num_images * num_area_ranges * num_categories); + + // Store results for each image, category, and area range combination. Results + // for each IOU threshold are packed into the same ImageEvaluation object + for (auto i = 0; i < num_images; ++i) { + for (auto c = 0; c < num_categories; ++c) { + const std::vector& ground_truth_instances = + image_category_ground_truth_instances[i][c]; + const std::vector& detection_instances = + image_category_detection_instances[i][c]; + + SortInstancesByDetectionScore( + detection_instances, &detection_sorted_indices); + if ((int)detection_sorted_indices.size() > max_detections) { + detection_sorted_indices.resize(max_detections); + } + + for (size_t a = 0; a < area_ranges.size(); ++a) { + SortInstancesByIgnore( + area_ranges[a], + ground_truth_instances, + &ground_truth_sorted_indices, + &ignores); + + MatchDetectionsToGroundTruth( + detection_instances, + detection_sorted_indices, + ground_truth_instances, + ground_truth_sorted_indices, + ignores, + image_category_ious[i][c], + iou_thresholds, + area_ranges[a], + &results_all + [c * num_area_ranges * num_images + a * num_images + i]); + } + } + } + + return results_all; +} + +// Convert a python list to a vector +template +std::vector list_to_vec(const py::list& l) { + std::vector v(py::len(l)); + for (int i = 0; i < (int)py::len(l); ++i) { + v[i] = l[i].cast(); + } + return v; +} + +// Helper function to Accumulate() +// Considers the evaluation results applicable to a particular category, area +// range, and max_detections parameter setting, which begin at +// evaluations[evaluation_index]. Extracts a sorted list of length n of all +// applicable detection instances concatenated across all images in the dataset, +// which are represented by the outputs evaluation_indices, detection_scores, +// image_detection_indices, and detection_sorted_indices--all of which are +// length n. evaluation_indices[i] stores the applicable index into +// evaluations[] for instance i, which has detection score detection_score[i], +// and is the image_detection_indices[i]'th of the list of detections +// for the image containing i. detection_sorted_indices[] defines a sorted +// permutation of the 3 other outputs +int BuildSortedDetectionList( + const std::vector& evaluations, + const int64_t evaluation_index, + const int64_t num_images, + const int max_detections, + std::vector* evaluation_indices, + std::vector* detection_scores, + std::vector* detection_sorted_indices, + std::vector* image_detection_indices) { + assert(evaluations.size() >= evaluation_index + num_images); + + // Extract a list of object instances of the applicable category, area + // range, and max detections requirements such that they can be sorted + image_detection_indices->clear(); + evaluation_indices->clear(); + detection_scores->clear(); + image_detection_indices->reserve(num_images * max_detections); + evaluation_indices->reserve(num_images * max_detections); + detection_scores->reserve(num_images * max_detections); + int num_valid_ground_truth = 0; + for (auto i = 0; i < num_images; ++i) { + const ImageEvaluation& evaluation = evaluations[evaluation_index + i]; + + for (int d = 0; + d < (int)evaluation.detection_scores.size() && d < max_detections; + ++d) { // detected instances + evaluation_indices->push_back(evaluation_index + i); + image_detection_indices->push_back(d); + detection_scores->push_back(evaluation.detection_scores[d]); + } + for (auto ground_truth_ignore : evaluation.ground_truth_ignores) { + if (!ground_truth_ignore) { + ++num_valid_ground_truth; + } + } + } + + // Sort detections by decreasing score, using stable sort to match + // python implementation + detection_sorted_indices->resize(detection_scores->size()); + std::iota( + detection_sorted_indices->begin(), detection_sorted_indices->end(), 0); + std::stable_sort( + detection_sorted_indices->begin(), + detection_sorted_indices->end(), + [&detection_scores](size_t j1, size_t j2) { + return (*detection_scores)[j1] > (*detection_scores)[j2]; + }); + + return num_valid_ground_truth; +} + +// Helper function to Accumulate() +// Compute a precision recall curve given a sorted list of detected instances +// encoded in evaluations, evaluation_indices, detection_scores, +// detection_sorted_indices, image_detection_indices (see +// BuildSortedDetectionList()). Using vectors precisions and recalls +// and temporary storage, output the results into precisions_out, recalls_out, +// and scores_out, which are large buffers containing many precion/recall curves +// for all possible parameter settings, with precisions_out_index and +// recalls_out_index defining the applicable indices to store results. +void ComputePrecisionRecallCurve( + const int64_t precisions_out_index, + const int64_t precisions_out_stride, + const int64_t recalls_out_index, + const std::vector& recall_thresholds, + const int iou_threshold_index, + const int num_iou_thresholds, + const int num_valid_ground_truth, + const std::vector& evaluations, + const std::vector& evaluation_indices, + const std::vector& detection_scores, + const std::vector& detection_sorted_indices, + const std::vector& image_detection_indices, + std::vector* precisions, + std::vector* recalls, + std::vector* precisions_out, + std::vector* scores_out, + std::vector* recalls_out) { + assert(recalls_out->size() > recalls_out_index); + + // Compute precision/recall for each instance in the sorted list of detections + int64_t true_positives_sum = 0, false_positives_sum = 0; + precisions->clear(); + recalls->clear(); + precisions->reserve(detection_sorted_indices.size()); + recalls->reserve(detection_sorted_indices.size()); + assert(!evaluations.empty() || detection_sorted_indices.empty()); + for (auto detection_sorted_index : detection_sorted_indices) { + const ImageEvaluation& evaluation = + evaluations[evaluation_indices[detection_sorted_index]]; + const auto num_detections = + evaluation.detection_matches.size() / num_iou_thresholds; + const auto detection_index = iou_threshold_index * num_detections + + image_detection_indices[detection_sorted_index]; + assert(evaluation.detection_matches.size() > detection_index); + assert(evaluation.detection_ignores.size() > detection_index); + const int64_t detection_match = + evaluation.detection_matches[detection_index]; + const bool detection_ignores = + evaluation.detection_ignores[detection_index]; + const auto true_positive = detection_match > 0 && !detection_ignores; + const auto false_positive = detection_match == 0 && !detection_ignores; + if (true_positive) { + ++true_positives_sum; + } + if (false_positive) { + ++false_positives_sum; + } + + const double recall = + static_cast(true_positives_sum) / num_valid_ground_truth; + recalls->push_back(recall); + const int64_t num_valid_detections = + true_positives_sum + false_positives_sum; + const double precision = num_valid_detections > 0 + ? static_cast(true_positives_sum) / num_valid_detections + : 0.0; + precisions->push_back(precision); + } + + (*recalls_out)[recalls_out_index] = !recalls->empty() ? recalls->back() : 0; + + for (int64_t i = static_cast(precisions->size()) - 1; i > 0; --i) { + if ((*precisions)[i] > (*precisions)[i - 1]) { + (*precisions)[i - 1] = (*precisions)[i]; + } + } + + // Sample the per instance precision/recall list at each recall threshold + for (size_t r = 0; r < recall_thresholds.size(); ++r) { + // first index in recalls >= recall_thresholds[r] + std::vector::iterator low = std::lower_bound( + recalls->begin(), recalls->end(), recall_thresholds[r]); + size_t precisions_index = low - recalls->begin(); + + const auto results_ind = precisions_out_index + r * precisions_out_stride; + assert(results_ind < precisions_out->size()); + assert(results_ind < scores_out->size()); + if (precisions_index < precisions->size()) { + (*precisions_out)[results_ind] = (*precisions)[precisions_index]; + (*scores_out)[results_ind] = + detection_scores[detection_sorted_indices[precisions_index]]; + } else { + (*precisions_out)[results_ind] = 0; + (*scores_out)[results_ind] = 0; + } + } +} +py::dict Accumulate( + const py::object& params, + const std::vector& evaluations) { + const std::vector recall_thresholds = + list_to_vec(params.attr("recThrs")); + const std::vector max_detections = + list_to_vec(params.attr("maxDets")); + const int num_iou_thresholds = py::len(params.attr("iouThrs")); + const int num_recall_thresholds = py::len(params.attr("recThrs")); + const int num_categories = params.attr("useCats").cast() == 1 + ? py::len(params.attr("catIds")) + : 1; + const int num_area_ranges = py::len(params.attr("areaRng")); + const int num_max_detections = py::len(params.attr("maxDets")); + const int num_images = py::len(params.attr("imgIds")); + + std::vector precisions_out( + num_iou_thresholds * num_recall_thresholds * num_categories * + num_area_ranges * num_max_detections, + -1); + std::vector recalls_out( + num_iou_thresholds * num_categories * num_area_ranges * + num_max_detections, + -1); + std::vector scores_out( + num_iou_thresholds * num_recall_thresholds * num_categories * + num_area_ranges * num_max_detections, + -1); + + // Consider the list of all detected instances in the entire dataset in one + // large list. evaluation_indices, detection_scores, + // image_detection_indices, and detection_sorted_indices all have the same + // length as this list, such that each entry corresponds to one detected + // instance + std::vector evaluation_indices; // indices into evaluations[] + std::vector detection_scores; // detection scores of each instance + std::vector detection_sorted_indices; // sorted indices of all + // instances in the dataset + std::vector + image_detection_indices; // indices into the list of detected instances in + // the same image as each instance + std::vector precisions, recalls; + + for (auto c = 0; c < num_categories; ++c) { + for (auto a = 0; a < num_area_ranges; ++a) { + for (auto m = 0; m < num_max_detections; ++m) { + // The COCO PythonAPI assumes evaluations[] (the return value of + // COCOeval::EvaluateImages() is one long list storing results for each + // combination of category, area range, and image id, with categories in + // the outermost loop and images in the innermost loop. + const int64_t evaluations_index = + c * num_area_ranges * num_images + a * num_images; + int num_valid_ground_truth = BuildSortedDetectionList( + evaluations, + evaluations_index, + num_images, + max_detections[m], + &evaluation_indices, + &detection_scores, + &detection_sorted_indices, + &image_detection_indices); + + if (num_valid_ground_truth == 0) { + continue; + } + + for (auto t = 0; t < num_iou_thresholds; ++t) { + // recalls_out is a flattened vectors representing a + // num_iou_thresholds X num_categories X num_area_ranges X + // num_max_detections matrix + const int64_t recalls_out_index = + t * num_categories * num_area_ranges * num_max_detections + + c * num_area_ranges * num_max_detections + + a * num_max_detections + m; + + // precisions_out and scores_out are flattened vectors + // representing a num_iou_thresholds X num_recall_thresholds X + // num_categories X num_area_ranges X num_max_detections matrix + const int64_t precisions_out_stride = + num_categories * num_area_ranges * num_max_detections; + const int64_t precisions_out_index = t * num_recall_thresholds * + num_categories * num_area_ranges * num_max_detections + + c * num_area_ranges * num_max_detections + + a * num_max_detections + m; + + ComputePrecisionRecallCurve( + precisions_out_index, + precisions_out_stride, + recalls_out_index, + recall_thresholds, + t, + num_iou_thresholds, + num_valid_ground_truth, + evaluations, + evaluation_indices, + detection_scores, + detection_sorted_indices, + image_detection_indices, + &precisions, + &recalls, + &precisions_out, + &scores_out, + &recalls_out); + } + } + } + } + + time_t rawtime; + struct tm local_time; + std::array buffer; + time(&rawtime); +#ifdef _WIN32 + localtime_s(&local_time, &rawtime); +#else + localtime_r(&rawtime, &local_time); +#endif + strftime( + buffer.data(), 200, "%Y-%m-%d %H:%num_max_detections:%S", &local_time); + return py::dict( + "params"_a = params, + "counts"_a = std::vector( + {num_iou_thresholds, + num_recall_thresholds, + num_categories, + num_area_ranges, + num_max_detections}), + "date"_a = buffer, + "precision"_a = precisions_out, + "recall"_a = recalls_out, + "scores"_a = scores_out); +} + +} // namespace COCOeval + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/cocoeval/cocoeval.h b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/cocoeval/cocoeval.h new file mode 100644 index 0000000..db246e4 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/cocoeval/cocoeval.h @@ -0,0 +1,88 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +#pragma once + +#include +#include +#include +#include +#include + +namespace py = pybind11; + +namespace detectron2 { + +namespace COCOeval { + +// Annotation data for a single object instance in an image +struct InstanceAnnotation { + InstanceAnnotation( + uint64_t id, + double score, + double area, + bool is_crowd, + bool ignore) + : id{id}, score{score}, area{area}, is_crowd{is_crowd}, ignore{ignore} {} + uint64_t id; + double score = 0.; + double area = 0.; + bool is_crowd = false; + bool ignore = false; +}; + +// Stores intermediate results for evaluating detection results for a single +// image that has D detected instances and G ground truth instances. This stores +// matches between detected and ground truth instances +struct ImageEvaluation { + // For each of the D detected instances, the id of the matched ground truth + // instance, or 0 if unmatched + std::vector detection_matches; + + // The detection score of each of the D detected instances + std::vector detection_scores; + + // Marks whether or not each of G instances was ignored from evaluation (e.g., + // because it's outside area_range) + std::vector ground_truth_ignores; + + // Marks whether or not each of D instances was ignored from evaluation (e.g., + // because it's outside aRng) + std::vector detection_ignores; +}; + +template +using ImageCategoryInstances = std::vector>>; + +// C++ implementation of COCO API cocoeval.py::COCOeval.evaluateImg(). For each +// combination of image, category, area range settings, and IOU thresholds to +// evaluate, it matches detected instances to ground truth instances and stores +// the results into a vector of ImageEvaluation results, which will be +// interpreted by the COCOeval::Accumulate() function to produce precion-recall +// curves. The parameters of nested vectors have the following semantics: +// image_category_ious[i][c][d][g] is the intersection over union of the d'th +// detected instance and g'th ground truth instance of +// category category_ids[c] in image image_ids[i] +// image_category_ground_truth_instances[i][c] is a vector of ground truth +// instances in image image_ids[i] of category category_ids[c] +// image_category_detection_instances[i][c] is a vector of detected +// instances in image image_ids[i] of category category_ids[c] +std::vector EvaluateImages( + const std::vector>& area_ranges, // vector of 2-tuples + int max_detections, + const std::vector& iou_thresholds, + const ImageCategoryInstances>& image_category_ious, + const ImageCategoryInstances& + image_category_ground_truth_instances, + const ImageCategoryInstances& + image_category_detection_instances); + +// C++ implementation of COCOeval.accumulate(), which generates precision +// recall curves for each set of category, IOU threshold, detection area range, +// and max number of detections parameters. It is assumed that the parameter +// evaluations is the return value of the functon COCOeval::EvaluateImages(), +// which was called with the same parameter settings params +py::dict Accumulate( + const py::object& params, + const std::vector& evalutations); + +} // namespace COCOeval +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/cuda_version.cu b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/cuda_version.cu new file mode 100644 index 0000000..6dfe1b9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/cuda_version.cu @@ -0,0 +1,26 @@ +// Copyright (c) Facebook, Inc. and its affiliates. + +#include + +namespace detectron2 { +int get_cudart_version() { +// Not a ROCM platform: Either HIP is not used, or +// it is used, but platform is not ROCM (i.e. it is CUDA) +#if !defined(__HIP_PLATFORM_HCC__) + return CUDART_VERSION; +#else + int version = 0; + +#if HIP_VERSION_MAJOR != 0 + // Create a convention similar to that of CUDA, as assumed by other + // parts of the code. + + version = HIP_VERSION_MINOR; + version += (HIP_VERSION_MAJOR * 100); +#else + hipRuntimeGetVersion(&version); +#endif + return version; +#endif +} +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/deformable/deform_conv.h b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/deformable/deform_conv.h new file mode 100644 index 0000000..965c1bf --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/deformable/deform_conv.h @@ -0,0 +1,377 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +#pragma once +#include + +namespace detectron2 { + +#if defined(WITH_CUDA) || defined(WITH_HIP) +int deform_conv_forward_cuda( + at::Tensor input, + at::Tensor weight, + at::Tensor offset, + at::Tensor output, + at::Tensor columns, + at::Tensor ones, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + int im2col_step); + +int deform_conv_backward_input_cuda( + at::Tensor input, + at::Tensor offset, + at::Tensor gradOutput, + at::Tensor gradInput, + at::Tensor gradOffset, + at::Tensor weight, + at::Tensor columns, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + int im2col_step); + +int deform_conv_backward_parameters_cuda( + at::Tensor input, + at::Tensor offset, + at::Tensor gradOutput, + at::Tensor gradWeight, // at::Tensor gradBias, + at::Tensor columns, + at::Tensor ones, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + float scale, + int im2col_step); + +void modulated_deform_conv_cuda_forward( + at::Tensor input, + at::Tensor weight, + at::Tensor bias, + at::Tensor ones, + at::Tensor offset, + at::Tensor mask, + at::Tensor output, + at::Tensor columns, + int kernel_h, + int kernel_w, + const int stride_h, + const int stride_w, + const int pad_h, + const int pad_w, + const int dilation_h, + const int dilation_w, + const int group, + const int deformable_group, + const bool with_bias); + +void modulated_deform_conv_cuda_backward( + at::Tensor input, + at::Tensor weight, + at::Tensor bias, + at::Tensor ones, + at::Tensor offset, + at::Tensor mask, + at::Tensor columns, + at::Tensor grad_input, + at::Tensor grad_weight, + at::Tensor grad_bias, + at::Tensor grad_offset, + at::Tensor grad_mask, + at::Tensor grad_output, + int kernel_h, + int kernel_w, + int stride_h, + int stride_w, + int pad_h, + int pad_w, + int dilation_h, + int dilation_w, + int group, + int deformable_group, + const bool with_bias); + +#endif + +inline int deform_conv_forward( + at::Tensor input, + at::Tensor weight, + at::Tensor offset, + at::Tensor output, + at::Tensor columns, + at::Tensor ones, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + int im2col_step) { + if (input.is_cuda()) { +#if defined(WITH_CUDA) || defined(WITH_HIP) + TORCH_CHECK(weight.is_cuda(), "weight tensor is not on GPU!"); + TORCH_CHECK(offset.is_cuda(), "offset tensor is not on GPU!"); + return deform_conv_forward_cuda( + input, + weight, + offset, + output, + columns, + ones, + kW, + kH, + dW, + dH, + padW, + padH, + dilationW, + dilationH, + group, + deformable_group, + im2col_step); +#else + AT_ERROR("Detectron2 is not compiled with GPU support!"); +#endif + } + AT_ERROR("This operator is not implemented on CPU"); +} + +inline int deform_conv_backward_input( + at::Tensor input, + at::Tensor offset, + at::Tensor gradOutput, + at::Tensor gradInput, + at::Tensor gradOffset, + at::Tensor weight, + at::Tensor columns, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + int im2col_step) { + if (gradOutput.is_cuda()) { +#if defined(WITH_CUDA) || defined(WITH_HIP) + TORCH_CHECK(input.is_cuda(), "input tensor is not on GPU!"); + TORCH_CHECK(weight.is_cuda(), "weight tensor is not on GPU!"); + TORCH_CHECK(offset.is_cuda(), "offset tensor is not on GPU!"); + return deform_conv_backward_input_cuda( + input, + offset, + gradOutput, + gradInput, + gradOffset, + weight, + columns, + kW, + kH, + dW, + dH, + padW, + padH, + dilationW, + dilationH, + group, + deformable_group, + im2col_step); +#else + AT_ERROR("Detectron2 is not compiled with GPU support!"); +#endif + } + AT_ERROR("This operator is not implemented on CPU"); +} + +inline int deform_conv_backward_filter( + at::Tensor input, + at::Tensor offset, + at::Tensor gradOutput, + at::Tensor gradWeight, // at::Tensor gradBias, + at::Tensor columns, + at::Tensor ones, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + float scale, + int im2col_step) { + if (gradOutput.is_cuda()) { +#if defined(WITH_CUDA) || defined(WITH_HIP) + TORCH_CHECK(input.is_cuda(), "input tensor is not on GPU!"); + TORCH_CHECK(offset.is_cuda(), "offset tensor is not on GPU!"); + return deform_conv_backward_parameters_cuda( + input, + offset, + gradOutput, + gradWeight, + columns, + ones, + kW, + kH, + dW, + dH, + padW, + padH, + dilationW, + dilationH, + group, + deformable_group, + scale, + im2col_step); +#else + AT_ERROR("Detectron2 is not compiled with GPU support!"); +#endif + } + AT_ERROR("This operator is not implemented on CPU"); +} + +inline void modulated_deform_conv_forward( + at::Tensor input, + at::Tensor weight, + at::Tensor bias, + at::Tensor ones, + at::Tensor offset, + at::Tensor mask, + at::Tensor output, + at::Tensor columns, + int kernel_h, + int kernel_w, + const int stride_h, + const int stride_w, + const int pad_h, + const int pad_w, + const int dilation_h, + const int dilation_w, + const int group, + const int deformable_group, + const bool with_bias) { + if (input.is_cuda()) { +#if defined(WITH_CUDA) || defined(WITH_HIP) + TORCH_CHECK(weight.is_cuda(), "weight tensor is not on GPU!"); + TORCH_CHECK(bias.is_cuda(), "bias tensor is not on GPU!"); + TORCH_CHECK(offset.is_cuda(), "offset tensor is not on GPU!"); + return modulated_deform_conv_cuda_forward( + input, + weight, + bias, + ones, + offset, + mask, + output, + columns, + kernel_h, + kernel_w, + stride_h, + stride_w, + pad_h, + pad_w, + dilation_h, + dilation_w, + group, + deformable_group, + with_bias); +#else + AT_ERROR("Detectron2 is not compiled with GPU support!"); +#endif + } + AT_ERROR("This operator is not implemented on CPU"); +} + +inline void modulated_deform_conv_backward( + at::Tensor input, + at::Tensor weight, + at::Tensor bias, + at::Tensor ones, + at::Tensor offset, + at::Tensor mask, + at::Tensor columns, + at::Tensor grad_input, + at::Tensor grad_weight, + at::Tensor grad_bias, + at::Tensor grad_offset, + at::Tensor grad_mask, + at::Tensor grad_output, + int kernel_h, + int kernel_w, + int stride_h, + int stride_w, + int pad_h, + int pad_w, + int dilation_h, + int dilation_w, + int group, + int deformable_group, + const bool with_bias) { + if (grad_output.is_cuda()) { +#if defined(WITH_CUDA) || defined(WITH_HIP) + TORCH_CHECK(input.is_cuda(), "input tensor is not on GPU!"); + TORCH_CHECK(weight.is_cuda(), "weight tensor is not on GPU!"); + TORCH_CHECK(bias.is_cuda(), "bias tensor is not on GPU!"); + TORCH_CHECK(offset.is_cuda(), "offset tensor is not on GPU!"); + return modulated_deform_conv_cuda_backward( + input, + weight, + bias, + ones, + offset, + mask, + columns, + grad_input, + grad_weight, + grad_bias, + grad_offset, + grad_mask, + grad_output, + kernel_h, + kernel_w, + stride_h, + stride_w, + pad_h, + pad_w, + dilation_h, + dilation_w, + group, + deformable_group, + with_bias); +#else + AT_ERROR("Detectron2 is not compiled with GPU support!"); +#endif + } + AT_ERROR("This operator is not implemented on CPU"); +} + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/deformable/deform_conv_cuda.cu b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/deformable/deform_conv_cuda.cu new file mode 100644 index 0000000..2072bb8 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/deformable/deform_conv_cuda.cu @@ -0,0 +1,1223 @@ +// Copyright (c) Facebook, Inc. and its affiliates. + +// modified from +// https://github.com/open-mmlab/mmdetection/blob/master/mmdet/ops/dcn/src/deform_conv_cuda.cpp +// Original license: Apache 2.0 + +// modify from +// https://github.com/chengdazhi/Deformable-Convolution-V2-PyTorch/blob/mmdetection/mmdet/ops/dcn/src/deform_conv_cuda.c +// Original license: Apache 2.0 + +#include + +#include "deform_conv.h" + +#include +#include + +namespace detectron2 { + +void deformable_im2col( + const at::Tensor data_im, + const at::Tensor data_offset, + const int channels, + const int height, + const int width, + const int ksize_h, + const int ksize_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int parallel_imgs, + const int deformable_group, + at::Tensor data_col); + +void deformable_col2im( + const at::Tensor data_col, + const at::Tensor data_offset, + const int channels, + const int height, + const int width, + const int ksize_h, + const int ksize_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int parallel_imgs, + const int deformable_group, + at::Tensor grad_im); + +void deformable_col2im_coord( + const at::Tensor data_col, + const at::Tensor data_im, + const at::Tensor data_offset, + const int channels, + const int height, + const int width, + const int ksize_h, + const int ksize_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int parallel_imgs, + const int deformable_group, + at::Tensor grad_offset); + +void modulated_deformable_im2col_cuda( + const at::Tensor data_im, + const at::Tensor data_offset, + const at::Tensor data_mask, + const int batch_size, + const int channels, + const int height_im, + const int width_im, + const int height_col, + const int width_col, + const int kernel_h, + const int kenerl_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int deformable_group, + at::Tensor data_col); + +void modulated_deformable_col2im_cuda( + const at::Tensor data_col, + const at::Tensor data_offset, + const at::Tensor data_mask, + const int batch_size, + const int channels, + const int height_im, + const int width_im, + const int height_col, + const int width_col, + const int kernel_h, + const int kenerl_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int deformable_group, + at::Tensor grad_im); + +void modulated_deformable_col2im_coord_cuda( + const at::Tensor data_col, + const at::Tensor data_im, + const at::Tensor data_offset, + const at::Tensor data_mask, + const int batch_size, + const int channels, + const int height_im, + const int width_im, + const int height_col, + const int width_col, + const int kernel_h, + const int kenerl_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int deformable_group, + at::Tensor grad_offset, + at::Tensor grad_mask); + +void shape_check( + at::Tensor input, + at::Tensor offset, + at::Tensor* gradOutput, + at::Tensor weight, + int kH, + int kW, + int dH, + int dW, + int padH, + int padW, + int dilationH, + int dilationW, + int group, + int deformable_group) { + TORCH_CHECK( + weight.ndimension() == 4, + "4D weight tensor (nOutputPlane,nInputPlane,kH,kW) expected, " + "but got: %s", + weight.ndimension()); + + TORCH_CHECK(weight.is_contiguous(), "weight tensor has to be contiguous"); + + TORCH_CHECK( + kW > 0 && kH > 0, + "kernel size should be greater than zero, but got kH: %d kW: %d", + kH, + kW); + + TORCH_CHECK( + (weight.size(2) == kH && weight.size(3) == kW), + "kernel size should be consistent with weight, ", + "but got kH: %d kW: %d weight.size(2): %d, weight.size(3): %d", + kH, + kW, + weight.size(2), + weight.size(3)); + + TORCH_CHECK( + dW > 0 && dH > 0, + "stride should be greater than zero, but got dH: %d dW: %d", + dH, + dW); + + TORCH_CHECK( + dilationW > 0 && dilationH > 0, + "dilation should be greater than 0, but got dilationH: %d dilationW: %d", + dilationH, + dilationW); + + int ndim = input.ndimension(); + int dimf = 0; + int dimh = 1; + int dimw = 2; + + if (ndim == 4) { + dimf++; + dimh++; + dimw++; + } + + TORCH_CHECK( + ndim == 3 || ndim == 4, + "3D or 4D input tensor expected but got: %s", + ndim); + + long nInputPlane = weight.size(1) * group; + long inputHeight = input.size(dimh); + long inputWidth = input.size(dimw); + long nOutputPlane = weight.size(0); + long outputHeight = + (inputHeight + 2 * padH - (dilationH * (kH - 1) + 1)) / dH + 1; + long outputWidth = + (inputWidth + 2 * padW - (dilationW * (kW - 1) + 1)) / dW + 1; + + TORCH_CHECK( + nInputPlane % deformable_group == 0, + "input channels must divide deformable group size"); + + if (outputWidth < 1 || outputHeight < 1) + AT_ERROR( + "Given input size: (%ld x %ld x %ld). " + "Calculated output size: (%ld x %ld x %ld). Output size is too small", + nInputPlane, + inputHeight, + inputWidth, + nOutputPlane, + outputHeight, + outputWidth); + + TORCH_CHECK( + input.size(1) == nInputPlane, + "invalid number of input planes, expected: %d, but got: %d", + nInputPlane, + input.size(1)); + + TORCH_CHECK( + (inputHeight + 2 * padH >= kH && inputWidth + 2 * padW >= kW), + "input image is smaller than kernel"); + + TORCH_CHECK( + (offset.size(2) == outputHeight && offset.size(3) == outputWidth), + "invalid spatial size of offset, expected height: %d width: %d, but " + "got height: %d width: %d", + outputHeight, + outputWidth, + offset.size(2), + offset.size(3)); + + TORCH_CHECK( + (offset.size(1) == deformable_group * 2 * kH * kW), + "invalid number of channels of offset"); + + if (gradOutput != NULL) { + TORCH_CHECK( + gradOutput->size(dimf) == nOutputPlane, + "invalid number of gradOutput planes, expected: %d, but got: %d", + nOutputPlane, + gradOutput->size(dimf)); + + TORCH_CHECK( + (gradOutput->size(dimh) == outputHeight && + gradOutput->size(dimw) == outputWidth), + "invalid size of gradOutput, expected height: %d width: %d , but " + "got height: %d width: %d", + outputHeight, + outputWidth, + gradOutput->size(dimh), + gradOutput->size(dimw)); + } +} + +int deform_conv_forward_cuda( + at::Tensor input, + at::Tensor weight, + at::Tensor offset, + at::Tensor output, + at::Tensor columns, + at::Tensor ones, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + int im2col_step) { + // todo: resize columns to include im2col: done + // todo: add im2col_step as input + // todo: add new output buffer and transpose it to output (or directly + // transpose output) todo: possibly change data indexing because of + // parallel_imgs + + shape_check( + input, + offset, + NULL, + weight, + kH, + kW, + dH, + dW, + padH, + padW, + dilationH, + dilationW, + group, + deformable_group); + + input = input.contiguous(); + offset = offset.contiguous(); + weight = weight.contiguous(); + + int batch = 1; + if (input.ndimension() == 3) { + // Force batch + batch = 0; + input.unsqueeze_(0); + offset.unsqueeze_(0); + } + + // todo: assert batchsize dividable by im2col_step + + long batchSize = input.size(0); + long nInputPlane = input.size(1); + long inputHeight = input.size(2); + long inputWidth = input.size(3); + + long nOutputPlane = weight.size(0); + + long outputWidth = + (inputWidth + 2 * padW - (dilationW * (kW - 1) + 1)) / dW + 1; + long outputHeight = + (inputHeight + 2 * padH - (dilationH * (kH - 1) + 1)) / dH + 1; + + TORCH_CHECK((offset.size(0) == batchSize), "invalid batch size of offset"); + + output = output.view( + {batchSize / im2col_step, + im2col_step, + nOutputPlane, + outputHeight, + outputWidth}); + columns = at::zeros( + {nInputPlane * kW * kH, im2col_step * outputHeight * outputWidth}, + input.options()); + + if (ones.ndimension() != 2 || + ones.size(0) * ones.size(1) < outputHeight * outputWidth) { + ones = at::ones({outputHeight, outputWidth}, input.options()); + } + + input = input.view( + {batchSize / im2col_step, + im2col_step, + nInputPlane, + inputHeight, + inputWidth}); + offset = offset.view( + {batchSize / im2col_step, + im2col_step, + deformable_group * 2 * kH * kW, + outputHeight, + outputWidth}); + + at::Tensor output_buffer = at::zeros( + {batchSize / im2col_step, + nOutputPlane, + im2col_step * outputHeight, + outputWidth}, + output.options()); + + output_buffer = output_buffer.view( + {output_buffer.size(0), + group, + output_buffer.size(1) / group, + output_buffer.size(2), + output_buffer.size(3)}); + + for (int elt = 0; elt < batchSize / im2col_step; elt++) { + deformable_im2col( + input[elt], + offset[elt], + nInputPlane, + inputHeight, + inputWidth, + kH, + kW, + padH, + padW, + dH, + dW, + dilationH, + dilationW, + im2col_step, + deformable_group, + columns); + + columns = columns.view({group, columns.size(0) / group, columns.size(1)}); + weight = weight.view( + {group, + weight.size(0) / group, + weight.size(1), + weight.size(2), + weight.size(3)}); + + for (int g = 0; g < group; g++) { + output_buffer[elt][g] = output_buffer[elt][g] + .flatten(1) + .addmm_(weight[g].flatten(1), columns[g]) + .view_as(output_buffer[elt][g]); + } + } + + output_buffer = output_buffer.view( + {output_buffer.size(0), + output_buffer.size(1) * output_buffer.size(2), + output_buffer.size(3), + output_buffer.size(4)}); + + output_buffer = output_buffer.view( + {batchSize / im2col_step, + nOutputPlane, + im2col_step, + outputHeight, + outputWidth}); + output_buffer.transpose_(1, 2); + output.copy_(output_buffer); + output = output.view({batchSize, nOutputPlane, outputHeight, outputWidth}); + + input = input.view({batchSize, nInputPlane, inputHeight, inputWidth}); + offset = offset.view( + {batchSize, deformable_group * 2 * kH * kW, outputHeight, outputWidth}); + + if (batch == 0) { + output = output.view({nOutputPlane, outputHeight, outputWidth}); + input = input.view({nInputPlane, inputHeight, inputWidth}); + offset = offset.view({offset.size(1), offset.size(2), offset.size(3)}); + } + + return 1; +} + +int deform_conv_backward_input_cuda( + at::Tensor input, + at::Tensor offset, + at::Tensor gradOutput, + at::Tensor gradInput, + at::Tensor gradOffset, + at::Tensor weight, + at::Tensor columns, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + int im2col_step) { + shape_check( + input, + offset, + &gradOutput, + weight, + kH, + kW, + dH, + dW, + padH, + padW, + dilationH, + dilationW, + group, + deformable_group); + + input = input.contiguous(); + offset = offset.contiguous(); + gradOutput = gradOutput.contiguous(); + weight = weight.contiguous(); + + int batch = 1; + + if (input.ndimension() == 3) { + // Force batch + batch = 0; + input = input.view({1, input.size(0), input.size(1), input.size(2)}); + offset = offset.view({1, offset.size(0), offset.size(1), offset.size(2)}); + gradOutput = gradOutput.view( + {1, gradOutput.size(0), gradOutput.size(1), gradOutput.size(2)}); + } + + long batchSize = input.size(0); + long nInputPlane = input.size(1); + long inputHeight = input.size(2); + long inputWidth = input.size(3); + + long nOutputPlane = weight.size(0); + + long outputWidth = + (inputWidth + 2 * padW - (dilationW * (kW - 1) + 1)) / dW + 1; + long outputHeight = + (inputHeight + 2 * padH - (dilationH * (kH - 1) + 1)) / dH + 1; + + TORCH_CHECK((offset.size(0) == batchSize), 3, "invalid batch size of offset"); + gradInput = gradInput.view({batchSize, nInputPlane, inputHeight, inputWidth}); + columns = at::zeros( + {nInputPlane * kW * kH, im2col_step * outputHeight * outputWidth}, + input.options()); + + // change order of grad output + gradOutput = gradOutput.view( + {batchSize / im2col_step, + im2col_step, + nOutputPlane, + outputHeight, + outputWidth}); + gradOutput.transpose_(1, 2); + + gradInput = gradInput.view( + {batchSize / im2col_step, + im2col_step, + nInputPlane, + inputHeight, + inputWidth}); + input = input.view( + {batchSize / im2col_step, + im2col_step, + nInputPlane, + inputHeight, + inputWidth}); + gradOffset = gradOffset.view( + {batchSize / im2col_step, + im2col_step, + deformable_group * 2 * kH * kW, + outputHeight, + outputWidth}); + offset = offset.view( + {batchSize / im2col_step, + im2col_step, + deformable_group * 2 * kH * kW, + outputHeight, + outputWidth}); + + for (int elt = 0; elt < batchSize / im2col_step; elt++) { + // divide into groups + columns = columns.view({group, columns.size(0) / group, columns.size(1)}); + weight = weight.view( + {group, + weight.size(0) / group, + weight.size(1), + weight.size(2), + weight.size(3)}); + gradOutput = gradOutput.view( + {gradOutput.size(0), + group, + gradOutput.size(1) / group, + gradOutput.size(2), + gradOutput.size(3), + gradOutput.size(4)}); + + for (int g = 0; g < group; g++) { + columns[g] = columns[g].addmm_( + weight[g].flatten(1).transpose(0, 1), + gradOutput[elt][g].flatten(1), + 0.0f, + 1.0f); + } + + columns = + columns.view({columns.size(0) * columns.size(1), columns.size(2)}); + gradOutput = gradOutput.view( + {gradOutput.size(0), + gradOutput.size(1) * gradOutput.size(2), + gradOutput.size(3), + gradOutput.size(4), + gradOutput.size(5)}); + + deformable_col2im_coord( + columns, + input[elt], + offset[elt], + nInputPlane, + inputHeight, + inputWidth, + kH, + kW, + padH, + padW, + dH, + dW, + dilationH, + dilationW, + im2col_step, + deformable_group, + gradOffset[elt]); + + deformable_col2im( + columns, + offset[elt], + nInputPlane, + inputHeight, + inputWidth, + kH, + kW, + padH, + padW, + dH, + dW, + dilationH, + dilationW, + im2col_step, + deformable_group, + gradInput[elt]); + } + + gradOutput.transpose_(1, 2); + gradOutput = + gradOutput.view({batchSize, nOutputPlane, outputHeight, outputWidth}); + + gradInput = gradInput.view({batchSize, nInputPlane, inputHeight, inputWidth}); + input = input.view({batchSize, nInputPlane, inputHeight, inputWidth}); + gradOffset = gradOffset.view( + {batchSize, deformable_group * 2 * kH * kW, outputHeight, outputWidth}); + offset = offset.view( + {batchSize, deformable_group * 2 * kH * kW, outputHeight, outputWidth}); + + if (batch == 0) { + gradOutput = gradOutput.view({nOutputPlane, outputHeight, outputWidth}); + input = input.view({nInputPlane, inputHeight, inputWidth}); + gradInput = gradInput.view({nInputPlane, inputHeight, inputWidth}); + offset = offset.view({offset.size(1), offset.size(2), offset.size(3)}); + gradOffset = + gradOffset.view({offset.size(1), offset.size(2), offset.size(3)}); + } + + return 1; +} + +int deform_conv_backward_parameters_cuda( + at::Tensor input, + at::Tensor offset, + at::Tensor gradOutput, + at::Tensor gradWeight, // at::Tensor gradBias, + at::Tensor columns, + at::Tensor ones, + int kW, + int kH, + int dW, + int dH, + int padW, + int padH, + int dilationW, + int dilationH, + int group, + int deformable_group, + float scale, + int im2col_step) { + // todo: transpose and reshape outGrad + // todo: reshape columns + // todo: add im2col_step as input + + shape_check( + input, + offset, + &gradOutput, + gradWeight, + kH, + kW, + dH, + dW, + padH, + padW, + dilationH, + dilationW, + group, + deformable_group); + + input = input.contiguous(); + offset = offset.contiguous(); + gradOutput = gradOutput.contiguous(); + + int batch = 1; + + if (input.ndimension() == 3) { + // Force batch + batch = 0; + input = input.view( + at::IntList({1, input.size(0), input.size(1), input.size(2)})); + gradOutput = gradOutput.view( + {1, gradOutput.size(0), gradOutput.size(1), gradOutput.size(2)}); + } + + long batchSize = input.size(0); + long nInputPlane = input.size(1); + long inputHeight = input.size(2); + long inputWidth = input.size(3); + + long nOutputPlane = gradWeight.size(0); + + long outputWidth = + (inputWidth + 2 * padW - (dilationW * (kW - 1) + 1)) / dW + 1; + long outputHeight = + (inputHeight + 2 * padH - (dilationH * (kH - 1) + 1)) / dH + 1; + + TORCH_CHECK((offset.size(0) == batchSize), "invalid batch size of offset"); + + columns = at::zeros( + {nInputPlane * kW * kH, im2col_step * outputHeight * outputWidth}, + input.options()); + + gradOutput = gradOutput.view( + {batchSize / im2col_step, + im2col_step, + nOutputPlane, + outputHeight, + outputWidth}); + gradOutput.transpose_(1, 2); + + at::Tensor gradOutputBuffer = at::zeros_like(gradOutput); + gradOutputBuffer = gradOutputBuffer.view( + {batchSize / im2col_step, + nOutputPlane, + im2col_step, + outputHeight, + outputWidth}); + gradOutputBuffer.copy_(gradOutput); + // gradOutput is not contiguous, so we do reshape (instead of view) next + gradOutputBuffer = gradOutputBuffer.reshape( + {batchSize / im2col_step, + nOutputPlane, + im2col_step * outputHeight, + outputWidth}); + + gradOutput.transpose_(1, 2); + gradOutput = + gradOutput.view({batchSize, nOutputPlane, outputHeight, outputWidth}); + + input = input.view( + {batchSize / im2col_step, + im2col_step, + nInputPlane, + inputHeight, + inputWidth}); + offset = offset.view( + {batchSize / im2col_step, + im2col_step, + deformable_group * 2 * kH * kW, + outputHeight, + outputWidth}); + + for (int elt = 0; elt < batchSize / im2col_step; elt++) { + deformable_im2col( + input[elt], + offset[elt], + nInputPlane, + inputHeight, + inputWidth, + kH, + kW, + padH, + padW, + dH, + dW, + dilationH, + dilationW, + im2col_step, + deformable_group, + columns); + + // divide into group + gradOutputBuffer = gradOutputBuffer.view( + {gradOutputBuffer.size(0), + group, + gradOutputBuffer.size(1) / group, + gradOutputBuffer.size(2), + gradOutputBuffer.size(3)}); + columns = columns.view({group, columns.size(0) / group, columns.size(1)}); + gradWeight = gradWeight.view( + {group, + gradWeight.size(0) / group, + gradWeight.size(1), + gradWeight.size(2), + gradWeight.size(3)}); + + for (int g = 0; g < group; g++) { + gradWeight[g] = gradWeight[g] + .flatten(1) + .addmm_( + gradOutputBuffer[elt][g].flatten(1), + columns[g].transpose(1, 0), + 1.0, + scale) + .view_as(gradWeight[g]); + } + gradOutputBuffer = gradOutputBuffer.view( + {gradOutputBuffer.size(0), + gradOutputBuffer.size(1) * gradOutputBuffer.size(2), + gradOutputBuffer.size(3), + gradOutputBuffer.size(4)}); + columns = + columns.view({columns.size(0) * columns.size(1), columns.size(2)}); + gradWeight = gradWeight.view( + {gradWeight.size(0) * gradWeight.size(1), + gradWeight.size(2), + gradWeight.size(3), + gradWeight.size(4)}); + } + + input = input.view({batchSize, nInputPlane, inputHeight, inputWidth}); + offset = offset.view( + {batchSize, deformable_group * 2 * kH * kW, outputHeight, outputWidth}); + + if (batch == 0) { + gradOutput = gradOutput.view({nOutputPlane, outputHeight, outputWidth}); + input = input.view({nInputPlane, inputHeight, inputWidth}); + } + + return 1; +} + +void modulated_deform_conv_cuda_forward( + at::Tensor input, + at::Tensor weight, + at::Tensor bias, + at::Tensor ones, + at::Tensor offset, + at::Tensor mask, + at::Tensor output, + at::Tensor columns, + int kernel_h, + int kernel_w, + const int stride_h, + const int stride_w, + const int pad_h, + const int pad_w, + const int dilation_h, + const int dilation_w, + const int group, + const int deformable_group, + const bool with_bias) { + shape_check( + input, + offset, + NULL, + weight, + kernel_h, + kernel_w, + stride_h, + stride_w, + pad_h, + pad_w, + dilation_h, + dilation_w, + group, + deformable_group); + + TORCH_CHECK(input.is_contiguous(), "input tensor has to be contiguous"); + TORCH_CHECK(weight.is_contiguous(), "weight tensor has to be contiguous"); + + const int batch = input.size(0); + const int channels = input.size(1); + const int height = input.size(2); + const int width = input.size(3); + + const int channels_out = weight.size(0); + const int channels_kernel = weight.size(1); + const int kernel_h_ = weight.size(2); + const int kernel_w_ = weight.size(3); + + if (kernel_h_ != kernel_h || kernel_w_ != kernel_w) + AT_ERROR( + "Input shape and kernel shape wont match: (%d x %d vs %d x %d).", + kernel_h_, + kernel_w, + kernel_h_, + kernel_w_); + if (channels != channels_kernel * group) + AT_ERROR( + "Input shape and kernel channels wont match: (%d vs %d).", + channels, + channels_kernel * group); + + const int height_out = + (height + 2 * pad_h - (dilation_h * (kernel_h - 1) + 1)) / stride_h + 1; + const int width_out = + (width + 2 * pad_w - (dilation_w * (kernel_w - 1) + 1)) / stride_w + 1; + + // mask shape check + TORCH_CHECK( + (mask.size(2) == height_out && mask.size(3) == width_out), + "invalid spatial size of mask, expected height: %d width: %d, but " + "got height: %d width: %d", + height_out, + width_out, + mask.size(2), + mask.size(3)); + + TORCH_CHECK( + (mask.size(1) == deformable_group * kernel_h * kernel_w), + "invalid number of channels of mask"); + + if (ones.ndimension() != 2 || + ones.size(0) * ones.size(1) < height_out * width_out) { + // Resize plane and fill with ones... + ones = at::ones({height_out, width_out}, input.options()); + } + + // resize output + output = output.view({batch, channels_out, height_out, width_out}).zero_(); + // resize temporary columns + columns = at::zeros( + {channels * kernel_h * kernel_w, 1 * height_out * width_out}, + input.options()); + + output = output.view( + {output.size(0), + group, + output.size(1) / group, + output.size(2), + output.size(3)}); + + for (int b = 0; b < batch; b++) { + modulated_deformable_im2col_cuda( + input[b], + offset[b], + mask[b], + 1, + channels, + height, + width, + height_out, + width_out, + kernel_h, + kernel_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + deformable_group, + columns); + + // divide into group + weight = weight.view( + {group, + weight.size(0) / group, + weight.size(1), + weight.size(2), + weight.size(3)}); + columns = columns.view({group, columns.size(0) / group, columns.size(1)}); + + for (int g = 0; g < group; g++) { + output[b][g] = output[b][g] + .flatten(1) + .addmm_(weight[g].flatten(1), columns[g]) + .view_as(output[b][g]); + } + + weight = weight.view( + {weight.size(0) * weight.size(1), + weight.size(2), + weight.size(3), + weight.size(4)}); + columns = + columns.view({columns.size(0) * columns.size(1), columns.size(2)}); + } + + output = output.view( + {output.size(0), + output.size(1) * output.size(2), + output.size(3), + output.size(4)}); + + if (with_bias) { + output += bias.view({1, bias.size(0), 1, 1}); + } +} + +void modulated_deform_conv_cuda_backward( + at::Tensor input, + at::Tensor weight, + at::Tensor bias, + at::Tensor ones, + at::Tensor offset, + at::Tensor mask, + at::Tensor columns, + at::Tensor grad_input, + at::Tensor grad_weight, + at::Tensor grad_bias, + at::Tensor grad_offset, + at::Tensor grad_mask, + at::Tensor grad_output, + int kernel_h, + int kernel_w, + int stride_h, + int stride_w, + int pad_h, + int pad_w, + int dilation_h, + int dilation_w, + int group, + int deformable_group, + const bool with_bias) { + shape_check( + input, + offset, + &grad_output, + weight, + kernel_h, + kernel_w, + stride_h, + stride_w, + pad_h, + pad_w, + dilation_h, + dilation_w, + group, + deformable_group); + + TORCH_CHECK(input.is_contiguous(), "input tensor has to be contiguous"); + TORCH_CHECK(weight.is_contiguous(), "weight tensor has to be contiguous"); + + const int batch = input.size(0); + const int channels = input.size(1); + const int height = input.size(2); + const int width = input.size(3); + + const int channels_kernel = weight.size(1); + const int kernel_h_ = weight.size(2); + const int kernel_w_ = weight.size(3); + if (kernel_h_ != kernel_h || kernel_w_ != kernel_w) + AT_ERROR( + "Input shape and kernel shape wont match: (%d x %d vs %d x %d).", + kernel_h_, + kernel_w, + kernel_h_, + kernel_w_); + if (channels != channels_kernel * group) + AT_ERROR( + "Input shape and kernel channels wont match: (%d vs %d).", + channels, + channels_kernel * group); + + const int height_out = + (height + 2 * pad_h - (dilation_h * (kernel_h - 1) + 1)) / stride_h + 1; + const int width_out = + (width + 2 * pad_w - (dilation_w * (kernel_w - 1) + 1)) / stride_w + 1; + + // mask shape check + TORCH_CHECK( + (mask.size(2) == height_out && mask.size(3) == width_out), + "invalid spatial size of mask, expected height: %d width: %d, but " + "got height: %d width: %d", + height_out, + width_out, + mask.size(2), + mask.size(3)); + + TORCH_CHECK( + (mask.size(1) == deformable_group * kernel_h * kernel_w), + "invalid number of channels of mask"); + + if (ones.ndimension() != 2 || + ones.size(0) * ones.size(1) < height_out * width_out) { + // Resize plane and fill with ones... + ones = at::ones({height_out, width_out}, input.options()); + } + + grad_input = grad_input.view({batch, channels, height, width}); + columns = at::zeros( + {channels * kernel_h * kernel_w, height_out * width_out}, + input.options()); + + grad_output = grad_output.view( + {grad_output.size(0), + group, + grad_output.size(1) / group, + grad_output.size(2), + grad_output.size(3)}); + + for (int b = 0; b < batch; b++) { + // divide int group + columns = columns.view({group, columns.size(0) / group, columns.size(1)}); + weight = weight.view( + {group, + weight.size(0) / group, + weight.size(1), + weight.size(2), + weight.size(3)}); + + for (int g = 0; g < group; g++) { + columns[g].addmm_( + weight[g].flatten(1).transpose(0, 1), + grad_output[b][g].flatten(1), + 0.0f, + 1.0f); + } + + columns = + columns.view({columns.size(0) * columns.size(1), columns.size(2)}); + weight = weight.view( + {weight.size(0) * weight.size(1), + weight.size(2), + weight.size(3), + weight.size(4)}); + + // gradient w.r.t. input coordinate data + modulated_deformable_col2im_coord_cuda( + columns, + input[b], + offset[b], + mask[b], + 1, + channels, + height, + width, + height_out, + width_out, + kernel_h, + kernel_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + deformable_group, + grad_offset[b], + grad_mask[b]); + // gradient w.r.t. input data + modulated_deformable_col2im_cuda( + columns, + offset[b], + mask[b], + 1, + channels, + height, + width, + height_out, + width_out, + kernel_h, + kernel_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + deformable_group, + grad_input[b]); + + // gradient w.r.t. weight, dWeight should accumulate across the batch and + // group + modulated_deformable_im2col_cuda( + input[b], + offset[b], + mask[b], + 1, + channels, + height, + width, + height_out, + width_out, + kernel_h, + kernel_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + deformable_group, + columns); + + columns = columns.view({group, columns.size(0) / group, columns.size(1)}); + grad_weight = grad_weight.view( + {group, + grad_weight.size(0) / group, + grad_weight.size(1), + grad_weight.size(2), + grad_weight.size(3)}); + if (with_bias) + grad_bias = grad_bias.view({group, grad_bias.size(0) / group}); + + for (int g = 0; g < group; g++) { + grad_weight[g] = + grad_weight[g] + .flatten(1) + .addmm_(grad_output[b][g].flatten(1), columns[g].transpose(0, 1)) + .view_as(grad_weight[g]); + if (with_bias) { + grad_bias[g] = + grad_bias[g] + .view({-1, 1}) + .addmm_(grad_output[b][g].flatten(1), ones.view({-1, 1})) + .view(-1); + } + } + + columns = + columns.view({columns.size(0) * columns.size(1), columns.size(2)}); + grad_weight = grad_weight.view( + {grad_weight.size(0) * grad_weight.size(1), + grad_weight.size(2), + grad_weight.size(3), + grad_weight.size(4)}); + if (with_bias) + grad_bias = grad_bias.view({grad_bias.size(0) * grad_bias.size(1)}); + } + grad_output = grad_output.view( + {grad_output.size(0) * grad_output.size(1), + grad_output.size(2), + grad_output.size(3), + grad_output.size(4)}); +} + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/deformable/deform_conv_cuda_kernel.cu b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/deformable/deform_conv_cuda_kernel.cu new file mode 100644 index 0000000..f299c7a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/deformable/deform_conv_cuda_kernel.cu @@ -0,0 +1,1288 @@ +// Copyright (c) Facebook, Inc. and its affiliates. + +// modified from +// https://github.com/open-mmlab/mmdetection/blob/master/mmdet/ops/dcn/src/deform_conv_cuda_kernel.cu +// Original license: Apache 2.0 +// clang-format off + +// modify from +// https://github.com/chengdazhi/Deformable-Convolution-V2-PyTorch/blob/mmdetection/mmdet/ops/dcn/src/deform_conv_cuda_kernel.cu + +/*! + ******************* BEGIN Caffe Copyright Notice and Disclaimer ***************** + * + * COPYRIGHT + * + * All contributions by the University of California: + * Copyright (c) 2014-2017 The Regents of the University of California (Regents) + * All rights reserved. + * + * All other contributions: + * Copyright (c) 2014-2017, the respective contributors + * All rights reserved. + * + * Caffe uses a shared copyright model: each contributor holds copyright over + * their contributions to Caffe. The project versioning records all such + * contribution and copyright details. If a contributor wants to further mark + * their specific copyright on a particular contribution, they should indicate + * their copyright solely in the commit message of the change when it is + * committed. + * + * LICENSE + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the documentation + * and/or other materials provided with the distribution. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" + *AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE + *IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE + *FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + *DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + *SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + *CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + *OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + *OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + * CONTRIBUTION AGREEMENT + * + * By contributing to the BVLC/caffe repository through pull-request, comment, + * or otherwise, the contributor releases their content to the + * license and copyright terms herein. + * + ***************** END Caffe Copyright Notice and Disclaimer ********************* + * + * Copyright (c) 2018 Microsoft + * Licensed under The MIT License [see LICENSE for details] + * \file modulated_deformable_im2col.cuh + * \brief Function definitions of converting an image to + * column matrix based on kernel, padding, dilation, and offset. + * These functions are mainly used in deformable convolution operators. + * \ref: https://arxiv.org/abs/1703.06211 + * \author Yuwen Xiong, Haozhi Qi, Jifeng Dai, Xizhou Zhu, Han Hu, Dazhi Cheng + */ + +#include +#include +#include +#include +#include +#include + +using namespace at; + +#define CUDA_KERNEL_LOOP(i, n) \ + for (int i = blockIdx.x * blockDim.x + threadIdx.x; i < (n); \ + i += blockDim.x * gridDim.x) + + +namespace { + +const int CUDA_NUM_THREADS = 1024; +const int kMaxGridNum = 65535; + +inline int GET_BLOCKS(const int N) { + return std::min(kMaxGridNum, (N + CUDA_NUM_THREADS - 1) / CUDA_NUM_THREADS); +} + +} + +template +__device__ scalar_t deformable_im2col_bilinear( + const scalar_t* bottom_data, + const int data_width, + const int height, + const int width, + scalar_t h, + scalar_t w) { + int h_low = floor(h); + int w_low = floor(w); + int h_high = h_low + 1; + int w_high = w_low + 1; + + scalar_t lh = h - h_low; + scalar_t lw = w - w_low; + scalar_t hh = 1 - lh, hw = 1 - lw; + + scalar_t v1 = 0; + if (h_low >= 0 && w_low >= 0) + v1 = bottom_data[h_low * data_width + w_low]; + scalar_t v2 = 0; + if (h_low >= 0 && w_high <= width - 1) + v2 = bottom_data[h_low * data_width + w_high]; + scalar_t v3 = 0; + if (h_high <= height - 1 && w_low >= 0) + v3 = bottom_data[h_high * data_width + w_low]; + scalar_t v4 = 0; + if (h_high <= height - 1 && w_high <= width - 1) + v4 = bottom_data[h_high * data_width + w_high]; + + scalar_t w1 = hh * hw, w2 = hh * lw, w3 = lh * hw, w4 = lh * lw; + + scalar_t val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4); + return val; +} + +template +__device__ scalar_t get_gradient_weight( + scalar_t argmax_h, + scalar_t argmax_w, + const int h, + const int w, + const int height, + const int width) { + if (argmax_h <= -1 || argmax_h >= height || argmax_w <= -1 || + argmax_w >= width) { + // empty + return 0; + } + + int argmax_h_low = floor(argmax_h); + int argmax_w_low = floor(argmax_w); + int argmax_h_high = argmax_h_low + 1; + int argmax_w_high = argmax_w_low + 1; + + scalar_t weight = 0; + if (h == argmax_h_low && w == argmax_w_low) + weight = (h + 1 - argmax_h) * (w + 1 - argmax_w); + if (h == argmax_h_low && w == argmax_w_high) + weight = (h + 1 - argmax_h) * (argmax_w + 1 - w); + if (h == argmax_h_high && w == argmax_w_low) + weight = (argmax_h + 1 - h) * (w + 1 - argmax_w); + if (h == argmax_h_high && w == argmax_w_high) + weight = (argmax_h + 1 - h) * (argmax_w + 1 - w); + return weight; +} + +template +__device__ scalar_t get_coordinate_weight( + scalar_t argmax_h, + scalar_t argmax_w, + const int height, + const int width, + const scalar_t* im_data, + const int data_width, + const int bp_dir) { + if (argmax_h <= -1 || argmax_h >= height || argmax_w <= -1 || + argmax_w >= width) { + // empty + return 0; + } + + int argmax_h_low = floor(argmax_h); + int argmax_w_low = floor(argmax_w); + int argmax_h_high = argmax_h_low + 1; + int argmax_w_high = argmax_w_low + 1; + + scalar_t weight = 0; + + if (bp_dir == 0) { + if (argmax_h_low >= 0 && argmax_w_low >= 0) + weight += -1 * (argmax_w_low + 1 - argmax_w) * + im_data[argmax_h_low * data_width + argmax_w_low]; + if (argmax_h_low >= 0 && argmax_w_high <= width - 1) + weight += -1 * (argmax_w - argmax_w_low) * + im_data[argmax_h_low * data_width + argmax_w_high]; + if (argmax_h_high <= height - 1 && argmax_w_low >= 0) + weight += (argmax_w_low + 1 - argmax_w) * + im_data[argmax_h_high * data_width + argmax_w_low]; + if (argmax_h_high <= height - 1 && argmax_w_high <= width - 1) + weight += (argmax_w - argmax_w_low) * + im_data[argmax_h_high * data_width + argmax_w_high]; + } else if (bp_dir == 1) { + if (argmax_h_low >= 0 && argmax_w_low >= 0) + weight += -1 * (argmax_h_low + 1 - argmax_h) * + im_data[argmax_h_low * data_width + argmax_w_low]; + if (argmax_h_low >= 0 && argmax_w_high <= width - 1) + weight += (argmax_h_low + 1 - argmax_h) * + im_data[argmax_h_low * data_width + argmax_w_high]; + if (argmax_h_high <= height - 1 && argmax_w_low >= 0) + weight += -1 * (argmax_h - argmax_h_low) * + im_data[argmax_h_high * data_width + argmax_w_low]; + if (argmax_h_high <= height - 1 && argmax_w_high <= width - 1) + weight += (argmax_h - argmax_h_low) * + im_data[argmax_h_high * data_width + argmax_w_high]; + } + + return weight; +} + +template +__global__ void deformable_im2col_gpu_kernel( + const int n, + const scalar_t* data_im, + const scalar_t* data_offset, + const int height, + const int width, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int channel_per_deformable_group, + const int batch_size, + const int num_channels, + const int deformable_group, + const int height_col, + const int width_col, + scalar_t* data_col) { + CUDA_KERNEL_LOOP(index, n) { + // index index of output matrix + const int w_col = index % width_col; + const int h_col = (index / width_col) % height_col; + const int b_col = (index / width_col / height_col) % batch_size; + const int c_im = (index / width_col / height_col) / batch_size; + const int c_col = c_im * kernel_h * kernel_w; + + // compute deformable group index + const int deformable_group_index = c_im / channel_per_deformable_group; + + const int h_in = h_col * stride_h - pad_h; + const int w_in = w_col * stride_w - pad_w; + scalar_t* data_col_ptr = data_col + + ((c_col * batch_size + b_col) * height_col + h_col) * width_col + w_col; + // const scalar_t* data_im_ptr = data_im + ((b_col * num_channels + c_im) * + // height + h_in) * width + w_in; + const scalar_t* data_im_ptr = + data_im + (b_col * num_channels + c_im) * height * width; + const scalar_t* data_offset_ptr = data_offset + + (b_col * deformable_group + deformable_group_index) * 2 * kernel_h * + kernel_w * height_col * width_col; + + for (int i = 0; i < kernel_h; ++i) { + for (int j = 0; j < kernel_w; ++j) { + const int data_offset_h_ptr = + ((2 * (i * kernel_w + j)) * height_col + h_col) * width_col + w_col; + const int data_offset_w_ptr = + ((2 * (i * kernel_w + j) + 1) * height_col + h_col) * width_col + + w_col; + const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr]; + const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr]; + scalar_t val = static_cast(0); + const scalar_t h_im = h_in + i * dilation_h + offset_h; + const scalar_t w_im = w_in + j * dilation_w + offset_w; + if (h_im > -1 && w_im > -1 && h_im < height && w_im < width) { + // const scalar_t map_h = i * dilation_h + offset_h; + // const scalar_t map_w = j * dilation_w + offset_w; + // const int cur_height = height - h_in; + // const int cur_width = width - w_in; + // val = deformable_im2col_bilinear(data_im_ptr, width, cur_height, + // cur_width, map_h, map_w); + val = deformable_im2col_bilinear( + data_im_ptr, width, height, width, h_im, w_im); + } + *data_col_ptr = val; + data_col_ptr += batch_size * height_col * width_col; + } + } + } +} + + +template +__global__ void deformable_col2im_gpu_kernel( + const int n, + const scalar_t* data_col, + const scalar_t* data_offset, + const int channels, + const int height, + const int width, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int channel_per_deformable_group, + const int batch_size, + const int deformable_group, + const int height_col, + const int width_col, + scalar_t* grad_im) { + CUDA_KERNEL_LOOP(index, n) { + const int j = (index / width_col / height_col / batch_size) % kernel_w; + const int i = + (index / width_col / height_col / batch_size / kernel_w) % kernel_h; + const int c = + index / width_col / height_col / batch_size / kernel_w / kernel_h; + // compute the start and end of the output + + const int deformable_group_index = c / channel_per_deformable_group; + + int w_out = index % width_col; + int h_out = (index / width_col) % height_col; + int b = (index / width_col / height_col) % batch_size; + int w_in = w_out * stride_w - pad_w; + int h_in = h_out * stride_h - pad_h; + + const scalar_t* data_offset_ptr = data_offset + + (b * deformable_group + deformable_group_index) * 2 * kernel_h * + kernel_w * height_col * width_col; + const int data_offset_h_ptr = + ((2 * (i * kernel_w + j)) * height_col + h_out) * width_col + w_out; + const int data_offset_w_ptr = + ((2 * (i * kernel_w + j) + 1) * height_col + h_out) * width_col + w_out; + const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr]; + const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr]; + const scalar_t cur_inv_h_data = h_in + i * dilation_h + offset_h; + const scalar_t cur_inv_w_data = w_in + j * dilation_w + offset_w; + + const scalar_t cur_top_grad = data_col[index]; + const int cur_h = (int)cur_inv_h_data; + const int cur_w = (int)cur_inv_w_data; + for (int dy = -2; dy <= 2; dy++) { + for (int dx = -2; dx <= 2; dx++) { + if (cur_h + dy >= 0 && cur_h + dy < height && cur_w + dx >= 0 && + cur_w + dx < width && abs(cur_inv_h_data - (cur_h + dy)) < 1 && + abs(cur_inv_w_data - (cur_w + dx)) < 1) { + int cur_bottom_grad_pos = + ((b * channels + c) * height + cur_h + dy) * width + cur_w + dx; + scalar_t weight = get_gradient_weight( + cur_inv_h_data, + cur_inv_w_data, + cur_h + dy, + cur_w + dx, + height, + width); + atomicAdd(grad_im + cur_bottom_grad_pos, weight * cur_top_grad); + } + } + } + } +} + + +template +__global__ void deformable_col2im_coord_gpu_kernel( + const int n, + const scalar_t* data_col, + const scalar_t* data_im, + const scalar_t* data_offset, + const int channels, + const int height, + const int width, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int channel_per_deformable_group, + const int batch_size, + const int offset_channels, + const int deformable_group, + const int height_col, + const int width_col, + scalar_t* grad_offset) { + CUDA_KERNEL_LOOP(index, n) { + scalar_t val = 0; + int w = index % width_col; + int h = (index / width_col) % height_col; + int c = (index / width_col / height_col) % offset_channels; + int b = (index / width_col / height_col) / offset_channels; + // compute the start and end of the output + + const int deformable_group_index = c / (2 * kernel_h * kernel_w); + const int col_step = kernel_h * kernel_w; + int cnt = 0; + const scalar_t* data_col_ptr = data_col + + deformable_group_index * channel_per_deformable_group * batch_size * + width_col * height_col; + const scalar_t* data_im_ptr = data_im + + (b * deformable_group + deformable_group_index) * + channel_per_deformable_group / kernel_h / kernel_w * height * width; + const scalar_t* data_offset_ptr = data_offset + + (b * deformable_group + deformable_group_index) * 2 * kernel_h * + kernel_w * height_col * width_col; + + const int offset_c = c - deformable_group_index * 2 * kernel_h * kernel_w; + + for (int col_c = (offset_c / 2); col_c < channel_per_deformable_group; + col_c += col_step) { + const int col_pos = + (((col_c * batch_size + b) * height_col) + h) * width_col + w; + const int bp_dir = offset_c % 2; + + int j = (col_pos / width_col / height_col / batch_size) % kernel_w; + int i = + (col_pos / width_col / height_col / batch_size / kernel_w) % kernel_h; + int w_out = col_pos % width_col; + int h_out = (col_pos / width_col) % height_col; + int w_in = w_out * stride_w - pad_w; + int h_in = h_out * stride_h - pad_h; + const int data_offset_h_ptr = + (((2 * (i * kernel_w + j)) * height_col + h_out) * width_col + w_out); + const int data_offset_w_ptr = + (((2 * (i * kernel_w + j) + 1) * height_col + h_out) * width_col + + w_out); + const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr]; + const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr]; + scalar_t inv_h = h_in + i * dilation_h + offset_h; + scalar_t inv_w = w_in + j * dilation_w + offset_w; + if (inv_h <= -1 || inv_w <= -1 || inv_h >= height || inv_w >= width) { + inv_h = inv_w = -2; + } + const scalar_t weight = get_coordinate_weight( + inv_h, + inv_w, + height, + width, + data_im_ptr + cnt * height * width, + width, + bp_dir); + val += weight * data_col_ptr[col_pos]; + cnt += 1; + } + + grad_offset[index] = val; + } +} + + +namespace detectron2 { + +void deformable_im2col( + const at::Tensor data_im, + const at::Tensor data_offset, + const int channels, + const int height, + const int width, + const int ksize_h, + const int ksize_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int parallel_imgs, + const int deformable_group, + at::Tensor data_col) { + // num_axes should be smaller than block size + // todo: check parallel_imgs is correctly passed in + int height_col = + (height + 2 * pad_h - (dilation_h * (ksize_h - 1) + 1)) / stride_h + 1; + int width_col = + (width + 2 * pad_w - (dilation_w * (ksize_w - 1) + 1)) / stride_w + 1; + int num_kernels = channels * height_col * width_col * parallel_imgs; + int channel_per_deformable_group = channels / deformable_group; + + at::cuda::CUDAGuard device_guard(data_im.device()); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + data_im.scalar_type(), "deformable_im2col_gpu", ([&] { + const scalar_t* data_im_ = data_im.data_ptr(); + const scalar_t* data_offset_ = data_offset.data_ptr(); + scalar_t* data_col_ = data_col.data_ptr(); + + deformable_im2col_gpu_kernel<<< + GET_BLOCKS(num_kernels), + CUDA_NUM_THREADS, + 0, + stream>>>( + num_kernels, + data_im_, + data_offset_, + height, + width, + ksize_h, + ksize_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + channel_per_deformable_group, + parallel_imgs, + channels, + deformable_group, + height_col, + width_col, + data_col_); + })); + + cudaError_t err = cudaGetLastError(); + if (err != cudaSuccess) { + printf("error in deformable_im2col: %s\n", cudaGetErrorString(err)); + } +} + + +void deformable_col2im( + const at::Tensor data_col, + const at::Tensor data_offset, + const int channels, + const int height, + const int width, + const int ksize_h, + const int ksize_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int parallel_imgs, + const int deformable_group, + at::Tensor grad_im) { + // todo: make sure parallel_imgs is passed in correctly + int height_col = + (height + 2 * pad_h - (dilation_h * (ksize_h - 1) + 1)) / stride_h + 1; + int width_col = + (width + 2 * pad_w - (dilation_w * (ksize_w - 1) + 1)) / stride_w + 1; + int num_kernels = + channels * ksize_h * ksize_w * height_col * width_col * parallel_imgs; + int channel_per_deformable_group = channels / deformable_group; + + at::cuda::CUDAGuard device_guard(data_col.device()); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + data_col.scalar_type(), "deformable_col2im_gpu", ([&] { + const scalar_t* data_col_ = data_col.data_ptr(); + const scalar_t* data_offset_ = data_offset.data_ptr(); + scalar_t* grad_im_ = grad_im.data_ptr(); + + deformable_col2im_gpu_kernel<<< + GET_BLOCKS(num_kernels), + CUDA_NUM_THREADS, + 0, + stream>>>( + num_kernels, + data_col_, + data_offset_, + channels, + height, + width, + ksize_h, + ksize_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + channel_per_deformable_group, + parallel_imgs, + deformable_group, + height_col, + width_col, + grad_im_); + })); + + cudaError_t err = cudaGetLastError(); + if (err != cudaSuccess) { + printf("error in deformable_col2im: %s\n", cudaGetErrorString(err)); + } +} + + +void deformable_col2im_coord( + const at::Tensor data_col, + const at::Tensor data_im, + const at::Tensor data_offset, + const int channels, + const int height, + const int width, + const int ksize_h, + const int ksize_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int parallel_imgs, + const int deformable_group, + at::Tensor grad_offset) { + int height_col = + (height + 2 * pad_h - (dilation_h * (ksize_h - 1) + 1)) / stride_h + 1; + int width_col = + (width + 2 * pad_w - (dilation_w * (ksize_w - 1) + 1)) / stride_w + 1; + int num_kernels = height_col * width_col * 2 * ksize_h * ksize_w * + deformable_group * parallel_imgs; + int channel_per_deformable_group = + channels * ksize_h * ksize_w / deformable_group; + + at::cuda::CUDAGuard device_guard(data_col.device()); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + data_col.scalar_type(), "deformable_col2im_coord_gpu", ([&] { + const scalar_t* data_col_ = data_col.data_ptr(); + const scalar_t* data_im_ = data_im.data_ptr(); + const scalar_t* data_offset_ = data_offset.data_ptr(); + scalar_t* grad_offset_ = grad_offset.data_ptr(); + + deformable_col2im_coord_gpu_kernel<<< + GET_BLOCKS(num_kernels), + CUDA_NUM_THREADS, + 0, + stream>>>( + num_kernels, + data_col_, + data_im_, + data_offset_, + channels, + height, + width, + ksize_h, + ksize_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + channel_per_deformable_group, + parallel_imgs, + 2 * ksize_h * ksize_w * deformable_group, + deformable_group, + height_col, + width_col, + grad_offset_); + })); +} + +} // namespace detectron2 + + +template +__device__ scalar_t dmcn_im2col_bilinear( + const scalar_t* bottom_data, + const int data_width, + const int height, + const int width, + scalar_t h, + scalar_t w) { + int h_low = floor(h); + int w_low = floor(w); + int h_high = h_low + 1; + int w_high = w_low + 1; + + scalar_t lh = h - h_low; + scalar_t lw = w - w_low; + scalar_t hh = 1 - lh, hw = 1 - lw; + + scalar_t v1 = 0; + if (h_low >= 0 && w_low >= 0) + v1 = bottom_data[h_low * data_width + w_low]; + scalar_t v2 = 0; + if (h_low >= 0 && w_high <= width - 1) + v2 = bottom_data[h_low * data_width + w_high]; + scalar_t v3 = 0; + if (h_high <= height - 1 && w_low >= 0) + v3 = bottom_data[h_high * data_width + w_low]; + scalar_t v4 = 0; + if (h_high <= height - 1 && w_high <= width - 1) + v4 = bottom_data[h_high * data_width + w_high]; + + scalar_t w1 = hh * hw, w2 = hh * lw, w3 = lh * hw, w4 = lh * lw; + + scalar_t val = (w1 * v1 + w2 * v2 + w3 * v3 + w4 * v4); + return val; +} + +template +__device__ scalar_t dmcn_get_gradient_weight( + scalar_t argmax_h, + scalar_t argmax_w, + const int h, + const int w, + const int height, + const int width) { + if (argmax_h <= -1 || argmax_h >= height || argmax_w <= -1 || + argmax_w >= width) { + // empty + return 0; + } + + int argmax_h_low = floor(argmax_h); + int argmax_w_low = floor(argmax_w); + int argmax_h_high = argmax_h_low + 1; + int argmax_w_high = argmax_w_low + 1; + + scalar_t weight = 0; + if (h == argmax_h_low && w == argmax_w_low) + weight = (h + 1 - argmax_h) * (w + 1 - argmax_w); + if (h == argmax_h_low && w == argmax_w_high) + weight = (h + 1 - argmax_h) * (argmax_w + 1 - w); + if (h == argmax_h_high && w == argmax_w_low) + weight = (argmax_h + 1 - h) * (w + 1 - argmax_w); + if (h == argmax_h_high && w == argmax_w_high) + weight = (argmax_h + 1 - h) * (argmax_w + 1 - w); + return weight; +} + +template +__device__ scalar_t dmcn_get_coordinate_weight( + scalar_t argmax_h, + scalar_t argmax_w, + const int height, + const int width, + const scalar_t* im_data, + const int data_width, + const int bp_dir) { + if (argmax_h <= -1 || argmax_h >= height || argmax_w <= -1 || + argmax_w >= width) { + // empty + return 0; + } + + int argmax_h_low = floor(argmax_h); + int argmax_w_low = floor(argmax_w); + int argmax_h_high = argmax_h_low + 1; + int argmax_w_high = argmax_w_low + 1; + + scalar_t weight = 0; + + if (bp_dir == 0) { + if (argmax_h_low >= 0 && argmax_w_low >= 0) + weight += -1 * (argmax_w_low + 1 - argmax_w) * + im_data[argmax_h_low * data_width + argmax_w_low]; + if (argmax_h_low >= 0 && argmax_w_high <= width - 1) + weight += -1 * (argmax_w - argmax_w_low) * + im_data[argmax_h_low * data_width + argmax_w_high]; + if (argmax_h_high <= height - 1 && argmax_w_low >= 0) + weight += (argmax_w_low + 1 - argmax_w) * + im_data[argmax_h_high * data_width + argmax_w_low]; + if (argmax_h_high <= height - 1 && argmax_w_high <= width - 1) + weight += (argmax_w - argmax_w_low) * + im_data[argmax_h_high * data_width + argmax_w_high]; + } else if (bp_dir == 1) { + if (argmax_h_low >= 0 && argmax_w_low >= 0) + weight += -1 * (argmax_h_low + 1 - argmax_h) * + im_data[argmax_h_low * data_width + argmax_w_low]; + if (argmax_h_low >= 0 && argmax_w_high <= width - 1) + weight += (argmax_h_low + 1 - argmax_h) * + im_data[argmax_h_low * data_width + argmax_w_high]; + if (argmax_h_high <= height - 1 && argmax_w_low >= 0) + weight += -1 * (argmax_h - argmax_h_low) * + im_data[argmax_h_high * data_width + argmax_w_low]; + if (argmax_h_high <= height - 1 && argmax_w_high <= width - 1) + weight += (argmax_h - argmax_h_low) * + im_data[argmax_h_high * data_width + argmax_w_high]; + } + + return weight; +} + +template +__global__ void modulated_deformable_im2col_gpu_kernel( + const int n, + const scalar_t* data_im, + const scalar_t* data_offset, + const scalar_t* data_mask, + const int height, + const int width, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int channel_per_deformable_group, + const int batch_size, + const int num_channels, + const int deformable_group, + const int height_col, + const int width_col, + scalar_t* data_col) { + CUDA_KERNEL_LOOP(index, n) { + // index index of output matrix + const int w_col = index % width_col; + const int h_col = (index / width_col) % height_col; + const int b_col = (index / width_col / height_col) % batch_size; + const int c_im = (index / width_col / height_col) / batch_size; + const int c_col = c_im * kernel_h * kernel_w; + + // compute deformable group index + const int deformable_group_index = c_im / channel_per_deformable_group; + + const int h_in = h_col * stride_h - pad_h; + const int w_in = w_col * stride_w - pad_w; + + scalar_t* data_col_ptr = data_col + + ((c_col * batch_size + b_col) * height_col + h_col) * width_col + w_col; + // const float* data_im_ptr = data_im + ((b_col * num_channels + c_im) * + // height + h_in) * width + w_in; + const scalar_t* data_im_ptr = + data_im + (b_col * num_channels + c_im) * height * width; + const scalar_t* data_offset_ptr = data_offset + + (b_col * deformable_group + deformable_group_index) * 2 * kernel_h * + kernel_w * height_col * width_col; + + const scalar_t* data_mask_ptr = data_mask + + (b_col * deformable_group + deformable_group_index) * kernel_h * + kernel_w * height_col * width_col; + + for (int i = 0; i < kernel_h; ++i) { + for (int j = 0; j < kernel_w; ++j) { + const int data_offset_h_ptr = + ((2 * (i * kernel_w + j)) * height_col + h_col) * width_col + w_col; + const int data_offset_w_ptr = + ((2 * (i * kernel_w + j) + 1) * height_col + h_col) * width_col + + w_col; + const int data_mask_hw_ptr = + ((i * kernel_w + j) * height_col + h_col) * width_col + w_col; + const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr]; + const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr]; + const scalar_t mask = data_mask_ptr[data_mask_hw_ptr]; + scalar_t val = static_cast(0); + const scalar_t h_im = h_in + i * dilation_h + offset_h; + const scalar_t w_im = w_in + j * dilation_w + offset_w; + // if (h_im >= 0 && w_im >= 0 && h_im < height && w_im < width) { + if (h_im > -1 && w_im > -1 && h_im < height && w_im < width) { + // const float map_h = i * dilation_h + offset_h; + // const float map_w = j * dilation_w + offset_w; + // const int cur_height = height - h_in; + // const int cur_width = width - w_in; + // val = dmcn_im2col_bilinear(data_im_ptr, width, cur_height, + // cur_width, map_h, map_w); + val = dmcn_im2col_bilinear( + data_im_ptr, width, height, width, h_im, w_im); + } + *data_col_ptr = val * mask; + data_col_ptr += batch_size * height_col * width_col; + // data_col_ptr += height_col * width_col; + } + } + } +} + +template +__global__ void modulated_deformable_col2im_gpu_kernel( + const int n, + const scalar_t* data_col, + const scalar_t* data_offset, + const scalar_t* data_mask, + const int channels, + const int height, + const int width, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int channel_per_deformable_group, + const int batch_size, + const int deformable_group, + const int height_col, + const int width_col, + scalar_t* grad_im) { + CUDA_KERNEL_LOOP(index, n) { + const int j = (index / width_col / height_col / batch_size) % kernel_w; + const int i = + (index / width_col / height_col / batch_size / kernel_w) % kernel_h; + const int c = + index / width_col / height_col / batch_size / kernel_w / kernel_h; + // compute the start and end of the output + + const int deformable_group_index = c / channel_per_deformable_group; + + int w_out = index % width_col; + int h_out = (index / width_col) % height_col; + int b = (index / width_col / height_col) % batch_size; + int w_in = w_out * stride_w - pad_w; + int h_in = h_out * stride_h - pad_h; + + const scalar_t* data_offset_ptr = data_offset + + (b * deformable_group + deformable_group_index) * 2 * kernel_h * + kernel_w * height_col * width_col; + const scalar_t* data_mask_ptr = data_mask + + (b * deformable_group + deformable_group_index) * kernel_h * kernel_w * + height_col * width_col; + const int data_offset_h_ptr = + ((2 * (i * kernel_w + j)) * height_col + h_out) * width_col + w_out; + const int data_offset_w_ptr = + ((2 * (i * kernel_w + j) + 1) * height_col + h_out) * width_col + w_out; + const int data_mask_hw_ptr = + ((i * kernel_w + j) * height_col + h_out) * width_col + w_out; + const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr]; + const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr]; + const scalar_t mask = data_mask_ptr[data_mask_hw_ptr]; + const scalar_t cur_inv_h_data = h_in + i * dilation_h + offset_h; + const scalar_t cur_inv_w_data = w_in + j * dilation_w + offset_w; + + const scalar_t cur_top_grad = data_col[index] * mask; + const int cur_h = (int)cur_inv_h_data; + const int cur_w = (int)cur_inv_w_data; + for (int dy = -2; dy <= 2; dy++) { + for (int dx = -2; dx <= 2; dx++) { + if (cur_h + dy >= 0 && cur_h + dy < height && cur_w + dx >= 0 && + cur_w + dx < width && abs(cur_inv_h_data - (cur_h + dy)) < 1 && + abs(cur_inv_w_data - (cur_w + dx)) < 1) { + int cur_bottom_grad_pos = + ((b * channels + c) * height + cur_h + dy) * width + cur_w + dx; + scalar_t weight = dmcn_get_gradient_weight( + cur_inv_h_data, + cur_inv_w_data, + cur_h + dy, + cur_w + dx, + height, + width); + atomicAdd(grad_im + cur_bottom_grad_pos, weight * cur_top_grad); + } + } + } + } +} + +template +__global__ void modulated_deformable_col2im_coord_gpu_kernel( + const int n, + const scalar_t* data_col, + const scalar_t* data_im, + const scalar_t* data_offset, + const scalar_t* data_mask, + const int channels, + const int height, + const int width, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int channel_per_deformable_group, + const int batch_size, + const int offset_channels, + const int deformable_group, + const int height_col, + const int width_col, + scalar_t* grad_offset, + scalar_t* grad_mask) { + CUDA_KERNEL_LOOP(index, n) { + scalar_t val = 0, mval = 0; + int w = index % width_col; + int h = (index / width_col) % height_col; + int c = (index / width_col / height_col) % offset_channels; + int b = (index / width_col / height_col) / offset_channels; + // compute the start and end of the output + + const int deformable_group_index = c / (2 * kernel_h * kernel_w); + const int col_step = kernel_h * kernel_w; + int cnt = 0; + const scalar_t* data_col_ptr = data_col + + deformable_group_index * channel_per_deformable_group * batch_size * + width_col * height_col; + const scalar_t* data_im_ptr = data_im + + (b * deformable_group + deformable_group_index) * + channel_per_deformable_group / kernel_h / kernel_w * height * width; + const scalar_t* data_offset_ptr = data_offset + + (b * deformable_group + deformable_group_index) * 2 * kernel_h * + kernel_w * height_col * width_col; + const scalar_t* data_mask_ptr = data_mask + + (b * deformable_group + deformable_group_index) * kernel_h * kernel_w * + height_col * width_col; + + const int offset_c = c - deformable_group_index * 2 * kernel_h * kernel_w; + + for (int col_c = (offset_c / 2); col_c < channel_per_deformable_group; + col_c += col_step) { + const int col_pos = + (((col_c * batch_size + b) * height_col) + h) * width_col + w; + const int bp_dir = offset_c % 2; + + int j = (col_pos / width_col / height_col / batch_size) % kernel_w; + int i = + (col_pos / width_col / height_col / batch_size / kernel_w) % kernel_h; + int w_out = col_pos % width_col; + int h_out = (col_pos / width_col) % height_col; + int w_in = w_out * stride_w - pad_w; + int h_in = h_out * stride_h - pad_h; + const int data_offset_h_ptr = + (((2 * (i * kernel_w + j)) * height_col + h_out) * width_col + w_out); + const int data_offset_w_ptr = + (((2 * (i * kernel_w + j) + 1) * height_col + h_out) * width_col + + w_out); + const int data_mask_hw_ptr = + (((i * kernel_w + j) * height_col + h_out) * width_col + w_out); + const scalar_t offset_h = data_offset_ptr[data_offset_h_ptr]; + const scalar_t offset_w = data_offset_ptr[data_offset_w_ptr]; + const scalar_t mask = data_mask_ptr[data_mask_hw_ptr]; + scalar_t inv_h = h_in + i * dilation_h + offset_h; + scalar_t inv_w = w_in + j * dilation_w + offset_w; + if (inv_h <= -1 || inv_w <= -1 || inv_h >= height || inv_w >= width) { + inv_h = inv_w = -2; + } else { + mval += data_col_ptr[col_pos] * + dmcn_im2col_bilinear( + data_im_ptr + cnt * height * width, + width, + height, + width, + inv_h, + inv_w); + } + const scalar_t weight = dmcn_get_coordinate_weight( + inv_h, + inv_w, + height, + width, + data_im_ptr + cnt * height * width, + width, + bp_dir); + val += weight * data_col_ptr[col_pos] * mask; + cnt += 1; + } + // KERNEL_ASSIGN(grad_offset[index], offset_req, val); + grad_offset[index] = val; + if (offset_c % 2 == 0) + // KERNEL_ASSIGN(grad_mask[(((b * deformable_group + + // deformable_group_index) * kernel_h * kernel_w + offset_c / 2) * + // height_col + h) * width_col + w], mask_req, mval); + grad_mask + [(((b * deformable_group + deformable_group_index) * kernel_h * + kernel_w + + offset_c / 2) * + height_col + + h) * + width_col + + w] = mval; + } +} + + +namespace detectron2 { + +void modulated_deformable_im2col_cuda( + const at::Tensor data_im, + const at::Tensor data_offset, + const at::Tensor data_mask, + const int batch_size, + const int channels, + const int height_im, + const int width_im, + const int height_col, + const int width_col, + const int kernel_h, + const int kenerl_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int deformable_group, + at::Tensor data_col) { + // num_axes should be smaller than block size + const int channel_per_deformable_group = channels / deformable_group; + const int num_kernels = channels * batch_size * height_col * width_col; + + at::cuda::CUDAGuard device_guard(data_im.device()); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + data_im.scalar_type(), "modulated_deformable_im2col_gpu", ([&] { + const scalar_t* data_im_ = data_im.data_ptr(); + const scalar_t* data_offset_ = data_offset.data_ptr(); + const scalar_t* data_mask_ = data_mask.data_ptr(); + scalar_t* data_col_ = data_col.data_ptr(); + + modulated_deformable_im2col_gpu_kernel<<< + GET_BLOCKS(num_kernels), + CUDA_NUM_THREADS, + 0, + stream>>>( + num_kernels, + data_im_, + data_offset_, + data_mask_, + height_im, + width_im, + kernel_h, + kenerl_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + channel_per_deformable_group, + batch_size, + channels, + deformable_group, + height_col, + width_col, + data_col_); + })); + + cudaError_t err = cudaGetLastError(); + if (err != cudaSuccess) { + printf( + "error in modulated_deformable_im2col_cuda: %s\n", + cudaGetErrorString(err)); + } +} + +void modulated_deformable_col2im_cuda( + const at::Tensor data_col, + const at::Tensor data_offset, + const at::Tensor data_mask, + const int batch_size, + const int channels, + const int height_im, + const int width_im, + const int height_col, + const int width_col, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int deformable_group, + at::Tensor grad_im) { + const int channel_per_deformable_group = channels / deformable_group; + const int num_kernels = + channels * kernel_h * kernel_w * batch_size * height_col * width_col; + + at::cuda::CUDAGuard device_guard(data_col.device()); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + data_col.scalar_type(), "modulated_deformable_col2im_gpu", ([&] { + const scalar_t* data_col_ = data_col.data_ptr(); + const scalar_t* data_offset_ = data_offset.data_ptr(); + const scalar_t* data_mask_ = data_mask.data_ptr(); + scalar_t* grad_im_ = grad_im.data_ptr(); + + modulated_deformable_col2im_gpu_kernel<<< + GET_BLOCKS(num_kernels), + CUDA_NUM_THREADS, + 0, + stream>>>( + num_kernels, + data_col_, + data_offset_, + data_mask_, + channels, + height_im, + width_im, + kernel_h, + kernel_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + channel_per_deformable_group, + batch_size, + deformable_group, + height_col, + width_col, + grad_im_); + })); + + cudaError_t err = cudaGetLastError(); + if (err != cudaSuccess) { + printf( + "error in modulated_deformable_col2im_cuda: %s\n", + cudaGetErrorString(err)); + } +} + +void modulated_deformable_col2im_coord_cuda( + const at::Tensor data_col, + const at::Tensor data_im, + const at::Tensor data_offset, + const at::Tensor data_mask, + const int batch_size, + const int channels, + const int height_im, + const int width_im, + const int height_col, + const int width_col, + const int kernel_h, + const int kernel_w, + const int pad_h, + const int pad_w, + const int stride_h, + const int stride_w, + const int dilation_h, + const int dilation_w, + const int deformable_group, + at::Tensor grad_offset, + at::Tensor grad_mask) { + const int num_kernels = batch_size * height_col * width_col * 2 * kernel_h * + kernel_w * deformable_group; + const int channel_per_deformable_group = + channels * kernel_h * kernel_w / deformable_group; + + at::cuda::CUDAGuard device_guard(data_col.device()); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES_AND_HALF( + data_col.scalar_type(), "modulated_deformable_col2im_coord_gpu", ([&] { + const scalar_t* data_col_ = data_col.data_ptr(); + const scalar_t* data_im_ = data_im.data_ptr(); + const scalar_t* data_offset_ = data_offset.data_ptr(); + const scalar_t* data_mask_ = data_mask.data_ptr(); + scalar_t* grad_offset_ = grad_offset.data_ptr(); + scalar_t* grad_mask_ = grad_mask.data_ptr(); + + modulated_deformable_col2im_coord_gpu_kernel<<< + GET_BLOCKS(num_kernels), + CUDA_NUM_THREADS, + 0, + stream>>>( + num_kernels, + data_col_, + data_im_, + data_offset_, + data_mask_, + channels, + height_im, + width_im, + kernel_h, + kernel_w, + pad_h, + pad_w, + stride_h, + stride_w, + dilation_h, + dilation_w, + channel_per_deformable_group, + batch_size, + 2 * kernel_h * kernel_w * deformable_group, + deformable_group, + height_col, + width_col, + grad_offset_, + grad_mask_); + })); + cudaError_t err = cudaGetLastError(); + if (err != cudaSuccess) { + printf( + "error in modulated_deformable_col2im_coord_cuda: %s\n", + cudaGetErrorString(err)); + } +} + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/nms_rotated/nms_rotated.h b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/nms_rotated/nms_rotated.h new file mode 100644 index 0000000..12aca38 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/nms_rotated/nms_rotated.h @@ -0,0 +1,39 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +#pragma once +#include + +namespace detectron2 { + +at::Tensor nms_rotated_cpu( + const at::Tensor& dets, + const at::Tensor& scores, + const double iou_threshold); + +#if defined(WITH_CUDA) || defined(WITH_HIP) +at::Tensor nms_rotated_cuda( + const at::Tensor& dets, + const at::Tensor& scores, + const double iou_threshold); +#endif + +// Interface for Python +// inline is needed to prevent multiple function definitions when this header is +// included by different cpps +inline at::Tensor nms_rotated( + const at::Tensor& dets, + const at::Tensor& scores, + const double iou_threshold) { + assert(dets.device().is_cuda() == scores.device().is_cuda()); + if (dets.device().is_cuda()) { +#if defined(WITH_CUDA) || defined(WITH_HIP) + return nms_rotated_cuda( + dets.contiguous(), scores.contiguous(), iou_threshold); +#else + AT_ERROR("Detectron2 is not compiled with GPU support!"); +#endif + } + + return nms_rotated_cpu(dets.contiguous(), scores.contiguous(), iou_threshold); +} + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/nms_rotated/nms_rotated_cpu.cpp b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/nms_rotated/nms_rotated_cpu.cpp new file mode 100644 index 0000000..d7556e6 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/nms_rotated/nms_rotated_cpu.cpp @@ -0,0 +1,75 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +#include "../box_iou_rotated/box_iou_rotated_utils.h" +#include "nms_rotated.h" + +namespace detectron2 { + +template +at::Tensor nms_rotated_cpu_kernel( + const at::Tensor& dets, + const at::Tensor& scores, + const double iou_threshold) { + // nms_rotated_cpu_kernel is modified from torchvision's nms_cpu_kernel, + // however, the code in this function is much shorter because + // we delegate the IoU computation for rotated boxes to + // the single_box_iou_rotated function in box_iou_rotated_utils.h + AT_ASSERTM(dets.device().is_cpu(), "dets must be a CPU tensor"); + AT_ASSERTM(scores.device().is_cpu(), "scores must be a CPU tensor"); + AT_ASSERTM( + dets.scalar_type() == scores.scalar_type(), + "dets should have the same type as scores"); + + if (dets.numel() == 0) { + return at::empty({0}, dets.options().dtype(at::kLong)); + } + + auto order_t = std::get<1>(scores.sort(0, /* descending=*/true)); + + auto ndets = dets.size(0); + at::Tensor suppressed_t = at::zeros({ndets}, dets.options().dtype(at::kByte)); + at::Tensor keep_t = at::zeros({ndets}, dets.options().dtype(at::kLong)); + + auto suppressed = suppressed_t.data_ptr(); + auto keep = keep_t.data_ptr(); + auto order = order_t.data_ptr(); + + int64_t num_to_keep = 0; + + for (int64_t _i = 0; _i < ndets; _i++) { + auto i = order[_i]; + if (suppressed[i] == 1) { + continue; + } + + keep[num_to_keep++] = i; + + for (int64_t _j = _i + 1; _j < ndets; _j++) { + auto j = order[_j]; + if (suppressed[j] == 1) { + continue; + } + + auto ovr = single_box_iou_rotated( + dets[i].data_ptr(), dets[j].data_ptr()); + if (ovr >= iou_threshold) { + suppressed[j] = 1; + } + } + } + return keep_t.narrow(/*dim=*/0, /*start=*/0, /*length=*/num_to_keep); +} + +at::Tensor nms_rotated_cpu( + // input must be contiguous + const at::Tensor& dets, + const at::Tensor& scores, + const double iou_threshold) { + auto result = at::empty({0}, dets.options()); + + AT_DISPATCH_FLOATING_TYPES(dets.scalar_type(), "nms_rotated", [&] { + result = nms_rotated_cpu_kernel(dets, scores, iou_threshold); + }); + return result; +} + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/nms_rotated/nms_rotated_cuda.cu b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/nms_rotated/nms_rotated_cuda.cu new file mode 100644 index 0000000..2a3db5c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/nms_rotated/nms_rotated_cuda.cu @@ -0,0 +1,145 @@ +// Copyright (c) Facebook, Inc. and its affiliates. +#include +#include +#include +#include +#ifdef WITH_CUDA +#include "../box_iou_rotated/box_iou_rotated_utils.h" +#endif +// TODO avoid this when pytorch supports "same directory" hipification +#ifdef WITH_HIP +#include "box_iou_rotated/box_iou_rotated_utils.h" +#endif + +using namespace detectron2; + +namespace { +int const threadsPerBlock = sizeof(unsigned long long) * 8; +} + +template +__global__ void nms_rotated_cuda_kernel( + const int n_boxes, + const double iou_threshold, + const T* dev_boxes, + unsigned long long* dev_mask) { + // nms_rotated_cuda_kernel is modified from torchvision's nms_cuda_kernel + + const int row_start = blockIdx.y; + const int col_start = blockIdx.x; + + // if (row_start > col_start) return; + + const int row_size = + min(n_boxes - row_start * threadsPerBlock, threadsPerBlock); + const int col_size = + min(n_boxes - col_start * threadsPerBlock, threadsPerBlock); + + // Compared to nms_cuda_kernel, where each box is represented with 4 values + // (x1, y1, x2, y2), each rotated box is represented with 5 values + // (x_center, y_center, width, height, angle_degrees) here. + __shared__ T block_boxes[threadsPerBlock * 5]; + if (threadIdx.x < col_size) { + block_boxes[threadIdx.x * 5 + 0] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 0]; + block_boxes[threadIdx.x * 5 + 1] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 1]; + block_boxes[threadIdx.x * 5 + 2] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 2]; + block_boxes[threadIdx.x * 5 + 3] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 3]; + block_boxes[threadIdx.x * 5 + 4] = + dev_boxes[(threadsPerBlock * col_start + threadIdx.x) * 5 + 4]; + } + __syncthreads(); + + if (threadIdx.x < row_size) { + const int cur_box_idx = threadsPerBlock * row_start + threadIdx.x; + const T* cur_box = dev_boxes + cur_box_idx * 5; + int i = 0; + unsigned long long t = 0; + int start = 0; + if (row_start == col_start) { + start = threadIdx.x + 1; + } + for (i = start; i < col_size; i++) { + // Instead of devIoU used by original horizontal nms, here + // we use the single_box_iou_rotated function from box_iou_rotated_utils.h + if (single_box_iou_rotated(cur_box, block_boxes + i * 5) > + iou_threshold) { + t |= 1ULL << i; + } + } + const int col_blocks = at::cuda::ATenCeilDiv(n_boxes, threadsPerBlock); + dev_mask[cur_box_idx * col_blocks + col_start] = t; + } +} + +namespace detectron2 { + +at::Tensor nms_rotated_cuda( + // input must be contiguous + const at::Tensor& dets, + const at::Tensor& scores, + double iou_threshold) { + // using scalar_t = float; + AT_ASSERTM(dets.is_cuda(), "dets must be a CUDA tensor"); + AT_ASSERTM(scores.is_cuda(), "scores must be a CUDA tensor"); + at::cuda::CUDAGuard device_guard(dets.device()); + + auto order_t = std::get<1>(scores.sort(0, /* descending=*/true)); + auto dets_sorted = dets.index_select(0, order_t); + + auto dets_num = dets.size(0); + + const int col_blocks = + at::cuda::ATenCeilDiv(static_cast(dets_num), threadsPerBlock); + + at::Tensor mask = + at::empty({dets_num * col_blocks}, dets.options().dtype(at::kLong)); + + dim3 blocks(col_blocks, col_blocks); + dim3 threads(threadsPerBlock); + cudaStream_t stream = at::cuda::getCurrentCUDAStream(); + + AT_DISPATCH_FLOATING_TYPES( + dets_sorted.scalar_type(), "nms_rotated_kernel_cuda", [&] { + nms_rotated_cuda_kernel<<>>( + dets_num, + iou_threshold, + dets_sorted.data_ptr(), + (unsigned long long*)mask.data_ptr()); + }); + + at::Tensor mask_cpu = mask.to(at::kCPU); + unsigned long long* mask_host = + (unsigned long long*)mask_cpu.data_ptr(); + + std::vector remv(col_blocks); + memset(&remv[0], 0, sizeof(unsigned long long) * col_blocks); + + at::Tensor keep = + at::empty({dets_num}, dets.options().dtype(at::kLong).device(at::kCPU)); + int64_t* keep_out = keep.data_ptr(); + + int num_to_keep = 0; + for (int i = 0; i < dets_num; i++) { + int nblock = i / threadsPerBlock; + int inblock = i % threadsPerBlock; + + if (!(remv[nblock] & (1ULL << inblock))) { + keep_out[num_to_keep++] = i; + unsigned long long* p = mask_host + i * col_blocks; + for (int j = nblock; j < col_blocks; j++) { + remv[j] |= p[j]; + } + } + } + + AT_CUDA_CHECK(cudaGetLastError()); + return order_t.index( + {keep.narrow(/*dim=*/0, /*start=*/0, /*length=*/num_to_keep) + .to(order_t.device(), keep.scalar_type())}); +} + +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/vision.cpp b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/vision.cpp new file mode 100644 index 0000000..c9a2cd4 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/csrc/vision.cpp @@ -0,0 +1,117 @@ +// Copyright (c) Facebook, Inc. and its affiliates. + +#include +#include "ROIAlignRotated/ROIAlignRotated.h" +#include "box_iou_rotated/box_iou_rotated.h" +#include "cocoeval/cocoeval.h" +#include "deformable/deform_conv.h" +#include "nms_rotated/nms_rotated.h" + +namespace detectron2 { + +#if defined(WITH_CUDA) || defined(WITH_HIP) +extern int get_cudart_version(); +#endif + +std::string get_cuda_version() { +#if defined(WITH_CUDA) || defined(WITH_HIP) + std::ostringstream oss; + +#if defined(WITH_CUDA) + oss << "CUDA "; +#else + oss << "HIP "; +#endif + + // copied from + // https://github.com/pytorch/pytorch/blob/master/aten/src/ATen/cuda/detail/CUDAHooks.cpp#L231 + auto printCudaStyleVersion = [&](int v) { + oss << (v / 1000) << "." << (v / 10 % 100); + if (v % 10 != 0) { + oss << "." << (v % 10); + } + }; + printCudaStyleVersion(get_cudart_version()); + return oss.str(); +#else // neither CUDA nor HIP + return std::string("not available"); +#endif +} + +bool has_cuda() { +#if defined(WITH_CUDA) + return true; +#else + return false; +#endif +} + +// similar to +// https://github.com/pytorch/pytorch/blob/master/aten/src/ATen/Version.cpp +std::string get_compiler_version() { + std::ostringstream ss; +#if defined(__GNUC__) +#ifndef __clang__ + +#if ((__GNUC__ <= 4) && (__GNUC_MINOR__ <= 8)) +#error "GCC >= 4.9 is required!" +#endif + + { ss << "GCC " << __GNUC__ << "." << __GNUC_MINOR__; } +#endif +#endif + +#if defined(__clang_major__) + { + ss << "clang " << __clang_major__ << "." << __clang_minor__ << "." + << __clang_patchlevel__; + } +#endif + +#if defined(_MSC_VER) + { ss << "MSVC " << _MSC_FULL_VER; } +#endif + return ss.str(); +} + +PYBIND11_MODULE(TORCH_EXTENSION_NAME, m) { + m.def("get_compiler_version", &get_compiler_version, "get_compiler_version"); + m.def("get_cuda_version", &get_cuda_version, "get_cuda_version"); + m.def("has_cuda", &has_cuda, "has_cuda"); + + m.def("deform_conv_forward", &deform_conv_forward, "deform_conv_forward"); + m.def( + "deform_conv_backward_input", + &deform_conv_backward_input, + "deform_conv_backward_input"); + m.def( + "deform_conv_backward_filter", + &deform_conv_backward_filter, + "deform_conv_backward_filter"); + m.def( + "modulated_deform_conv_forward", + &modulated_deform_conv_forward, + "modulated_deform_conv_forward"); + m.def( + "modulated_deform_conv_backward", + &modulated_deform_conv_backward, + "modulated_deform_conv_backward"); + + m.def("COCOevalAccumulate", &COCOeval::Accumulate, "COCOeval::Accumulate"); + m.def( + "COCOevalEvaluateImages", + &COCOeval::EvaluateImages, + "COCOeval::EvaluateImages"); + pybind11::class_(m, "InstanceAnnotation") + .def(pybind11::init()); + pybind11::class_(m, "ImageEvaluation") + .def(pybind11::init<>()); +} + +TORCH_LIBRARY(detectron2, m) { + m.def("nms_rotated", &nms_rotated); + m.def("box_iou_rotated", &box_iou_rotated); + m.def("roi_align_rotated_forward", &ROIAlignRotated_forward); + m.def("roi_align_rotated_backward", &ROIAlignRotated_backward); +} +} // namespace detectron2 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/deform_conv.py b/motion-gan-pipeline/preprocessing/third/detectron2/layers/deform_conv.py new file mode 100644 index 0000000..dffb720 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/deform_conv.py @@ -0,0 +1,514 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import math +from functools import lru_cache +import torch +from torch import nn +from torch.autograd import Function +from torch.autograd.function import once_differentiable +from torch.nn.modules.utils import _pair +from torchvision.ops import deform_conv2d + +from detectron2.utils.develop import create_dummy_class, create_dummy_func + +from .wrappers import _NewEmptyTensorOp + + +class _DeformConv(Function): + @staticmethod + def forward( + ctx, + input, + offset, + weight, + stride=1, + padding=0, + dilation=1, + groups=1, + deformable_groups=1, + im2col_step=64, + ): + if input is not None and input.dim() != 4: + raise ValueError( + "Expected 4D tensor as input, got {}D tensor instead.".format(input.dim()) + ) + ctx.stride = _pair(stride) + ctx.padding = _pair(padding) + ctx.dilation = _pair(dilation) + ctx.groups = groups + ctx.deformable_groups = deformable_groups + ctx.im2col_step = im2col_step + + ctx.save_for_backward(input, offset, weight) + + output = input.new_empty( + _DeformConv._output_size(input, weight, ctx.padding, ctx.dilation, ctx.stride) + ) + + ctx.bufs_ = [input.new_empty(0), input.new_empty(0)] # columns, ones + + if not input.is_cuda: + # TODO: let torchvision support full features of our deformconv. + if deformable_groups != 1: + raise NotImplementedError( + "Deformable Conv with deformable_groups != 1 is not supported on CPUs!" + ) + return deform_conv2d( + input, offset, weight, stride=stride, padding=padding, dilation=dilation + ) + else: + cur_im2col_step = _DeformConv._cal_im2col_step(input.shape[0], ctx.im2col_step) + assert (input.shape[0] % cur_im2col_step) == 0, "im2col step must divide batchsize" + + _C.deform_conv_forward( + input, + weight, + offset, + output, + ctx.bufs_[0], + ctx.bufs_[1], + weight.size(3), + weight.size(2), + ctx.stride[1], + ctx.stride[0], + ctx.padding[1], + ctx.padding[0], + ctx.dilation[1], + ctx.dilation[0], + ctx.groups, + ctx.deformable_groups, + cur_im2col_step, + ) + return output + + @staticmethod + @once_differentiable + def backward(ctx, grad_output): + input, offset, weight = ctx.saved_tensors + + grad_input = grad_offset = grad_weight = None + + if not grad_output.is_cuda: + raise NotImplementedError("Deformable Conv is not supported on CPUs!") + else: + cur_im2col_step = _DeformConv._cal_im2col_step(input.shape[0], ctx.im2col_step) + assert (input.shape[0] % cur_im2col_step) == 0, "im2col step must divide batchsize" + + if ctx.needs_input_grad[0] or ctx.needs_input_grad[1]: + grad_input = torch.zeros_like(input) + grad_offset = torch.zeros_like(offset) + _C.deform_conv_backward_input( + input, + offset, + grad_output, + grad_input, + grad_offset, + weight, + ctx.bufs_[0], + weight.size(3), + weight.size(2), + ctx.stride[1], + ctx.stride[0], + ctx.padding[1], + ctx.padding[0], + ctx.dilation[1], + ctx.dilation[0], + ctx.groups, + ctx.deformable_groups, + cur_im2col_step, + ) + + if ctx.needs_input_grad[2]: + grad_weight = torch.zeros_like(weight) + _C.deform_conv_backward_filter( + input, + offset, + grad_output, + grad_weight, + ctx.bufs_[0], + ctx.bufs_[1], + weight.size(3), + weight.size(2), + ctx.stride[1], + ctx.stride[0], + ctx.padding[1], + ctx.padding[0], + ctx.dilation[1], + ctx.dilation[0], + ctx.groups, + ctx.deformable_groups, + 1, + cur_im2col_step, + ) + + return grad_input, grad_offset, grad_weight, None, None, None, None, None, None + + @staticmethod + def _output_size(input, weight, padding, dilation, stride): + channels = weight.size(0) + output_size = (input.size(0), channels) + for d in range(input.dim() - 2): + in_size = input.size(d + 2) + pad = padding[d] + kernel = dilation[d] * (weight.size(d + 2) - 1) + 1 + stride_ = stride[d] + output_size += ((in_size + (2 * pad) - kernel) // stride_ + 1,) + if not all(map(lambda s: s > 0, output_size)): + raise ValueError( + "convolution input is too small (output would be {})".format( + "x".join(map(str, output_size)) + ) + ) + return output_size + + @staticmethod + @lru_cache(maxsize=128) + def _cal_im2col_step(input_size, default_size): + """ + Calculate proper im2col step size, which should be divisible by input_size and not larger + than prefer_size. Meanwhile the step size should be as large as possible to be more + efficient. So we choose the largest one among all divisors of input_size which are smaller + than prefer_size. + :param input_size: input batch size . + :param default_size: default preferred im2col step size. + :return: the largest proper step size. + """ + if input_size <= default_size: + return input_size + best_step = 1 + for step in range(2, min(int(math.sqrt(input_size)) + 1, default_size)): + if input_size % step == 0: + if input_size // step <= default_size: + return input_size // step + best_step = step + + return best_step + + +class _ModulatedDeformConv(Function): + @staticmethod + def forward( + ctx, + input, + offset, + mask, + weight, + bias=None, + stride=1, + padding=0, + dilation=1, + groups=1, + deformable_groups=1, + ): + ctx.stride = stride + ctx.padding = padding + ctx.dilation = dilation + ctx.groups = groups + ctx.deformable_groups = deformable_groups + ctx.with_bias = bias is not None + if not ctx.with_bias: + bias = input.new_empty(1) # fake tensor + if not input.is_cuda: + raise NotImplementedError("Deformable Conv is not supported on CPUs!") + if ( + weight.requires_grad + or mask.requires_grad + or offset.requires_grad + or input.requires_grad + ): + ctx.save_for_backward(input, offset, mask, weight, bias) + output = input.new_empty(_ModulatedDeformConv._infer_shape(ctx, input, weight)) + ctx._bufs = [input.new_empty(0), input.new_empty(0)] + _C.modulated_deform_conv_forward( + input, + weight, + bias, + ctx._bufs[0], + offset, + mask, + output, + ctx._bufs[1], + weight.shape[2], + weight.shape[3], + ctx.stride, + ctx.stride, + ctx.padding, + ctx.padding, + ctx.dilation, + ctx.dilation, + ctx.groups, + ctx.deformable_groups, + ctx.with_bias, + ) + return output + + @staticmethod + @once_differentiable + def backward(ctx, grad_output): + if not grad_output.is_cuda: + raise NotImplementedError("Deformable Conv is not supported on CPUs!") + input, offset, mask, weight, bias = ctx.saved_tensors + grad_input = torch.zeros_like(input) + grad_offset = torch.zeros_like(offset) + grad_mask = torch.zeros_like(mask) + grad_weight = torch.zeros_like(weight) + grad_bias = torch.zeros_like(bias) + _C.modulated_deform_conv_backward( + input, + weight, + bias, + ctx._bufs[0], + offset, + mask, + ctx._bufs[1], + grad_input, + grad_weight, + grad_bias, + grad_offset, + grad_mask, + grad_output, + weight.shape[2], + weight.shape[3], + ctx.stride, + ctx.stride, + ctx.padding, + ctx.padding, + ctx.dilation, + ctx.dilation, + ctx.groups, + ctx.deformable_groups, + ctx.with_bias, + ) + if not ctx.with_bias: + grad_bias = None + + return ( + grad_input, + grad_offset, + grad_mask, + grad_weight, + grad_bias, + None, + None, + None, + None, + None, + ) + + @staticmethod + def _infer_shape(ctx, input, weight): + n = input.size(0) + channels_out = weight.size(0) + height, width = input.shape[2:4] + kernel_h, kernel_w = weight.shape[2:4] + height_out = ( + height + 2 * ctx.padding - (ctx.dilation * (kernel_h - 1) + 1) + ) // ctx.stride + 1 + width_out = ( + width + 2 * ctx.padding - (ctx.dilation * (kernel_w - 1) + 1) + ) // ctx.stride + 1 + return n, channels_out, height_out, width_out + + +deform_conv = _DeformConv.apply +modulated_deform_conv = _ModulatedDeformConv.apply + + +class DeformConv(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + groups=1, + deformable_groups=1, + bias=False, + norm=None, + activation=None, + ): + """ + Deformable convolution from :paper:`deformconv`. + + Arguments are similar to :class:`Conv2D`. Extra arguments: + + Args: + deformable_groups (int): number of groups used in deformable convolution. + norm (nn.Module, optional): a normalization layer + activation (callable(Tensor) -> Tensor): a callable activation function + """ + super(DeformConv, self).__init__() + + assert not bias + assert in_channels % groups == 0, "in_channels {} cannot be divisible by groups {}".format( + in_channels, groups + ) + assert ( + out_channels % groups == 0 + ), "out_channels {} cannot be divisible by groups {}".format(out_channels, groups) + + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = _pair(kernel_size) + self.stride = _pair(stride) + self.padding = _pair(padding) + self.dilation = _pair(dilation) + self.groups = groups + self.deformable_groups = deformable_groups + self.norm = norm + self.activation = activation + + self.weight = nn.Parameter( + torch.Tensor(out_channels, in_channels // self.groups, *self.kernel_size) + ) + self.bias = None + + nn.init.kaiming_uniform_(self.weight, nonlinearity="relu") + + def forward(self, x, offset): + if x.numel() == 0: + # When input is empty, we want to return a empty tensor with "correct" shape, + # So that the following operations will not panic + # if they check for the shape of the tensor. + # This computes the height and width of the output tensor + output_shape = [ + (i + 2 * p - (di * (k - 1) + 1)) // s + 1 + for i, p, di, k, s in zip( + x.shape[-2:], self.padding, self.dilation, self.kernel_size, self.stride + ) + ] + output_shape = [x.shape[0], self.weight.shape[0]] + output_shape + return _NewEmptyTensorOp.apply(x, output_shape) + + x = deform_conv( + x, + offset, + self.weight, + self.stride, + self.padding, + self.dilation, + self.groups, + self.deformable_groups, + ) + if self.norm is not None: + x = self.norm(x) + if self.activation is not None: + x = self.activation(x) + return x + + def extra_repr(self): + tmpstr = "in_channels=" + str(self.in_channels) + tmpstr += ", out_channels=" + str(self.out_channels) + tmpstr += ", kernel_size=" + str(self.kernel_size) + tmpstr += ", stride=" + str(self.stride) + tmpstr += ", padding=" + str(self.padding) + tmpstr += ", dilation=" + str(self.dilation) + tmpstr += ", groups=" + str(self.groups) + tmpstr += ", deformable_groups=" + str(self.deformable_groups) + tmpstr += ", bias=False" + return tmpstr + + +class ModulatedDeformConv(nn.Module): + def __init__( + self, + in_channels, + out_channels, + kernel_size, + stride=1, + padding=0, + dilation=1, + groups=1, + deformable_groups=1, + bias=True, + norm=None, + activation=None, + ): + """ + Modulated deformable convolution from :paper:`deformconv2`. + + Arguments are similar to :class:`Conv2D`. Extra arguments: + + Args: + deformable_groups (int): number of groups used in deformable convolution. + norm (nn.Module, optional): a normalization layer + activation (callable(Tensor) -> Tensor): a callable activation function + """ + super(ModulatedDeformConv, self).__init__() + self.in_channels = in_channels + self.out_channels = out_channels + self.kernel_size = _pair(kernel_size) + self.stride = stride + self.padding = padding + self.dilation = dilation + self.groups = groups + self.deformable_groups = deformable_groups + self.with_bias = bias + self.norm = norm + self.activation = activation + + self.weight = nn.Parameter( + torch.Tensor(out_channels, in_channels // groups, *self.kernel_size) + ) + if bias: + self.bias = nn.Parameter(torch.Tensor(out_channels)) + else: + self.bias = None + + nn.init.kaiming_uniform_(self.weight, nonlinearity="relu") + if self.bias is not None: + nn.init.constant_(self.bias, 0) + + def forward(self, x, offset, mask): + if x.numel() == 0: + output_shape = [ + (i + 2 * p - (di * (k - 1) + 1)) // s + 1 + for i, p, di, k, s in zip( + x.shape[-2:], self.padding, self.dilation, self.kernel_size, self.stride + ) + ] + output_shape = [x.shape[0], self.weight.shape[0]] + output_shape + return _NewEmptyTensorOp.apply(x, output_shape) + + x = modulated_deform_conv( + x, + offset, + mask, + self.weight, + self.bias, + self.stride, + self.padding, + self.dilation, + self.groups, + self.deformable_groups, + ) + if self.norm is not None: + x = self.norm(x) + if self.activation is not None: + x = self.activation(x) + return x + + def extra_repr(self): + tmpstr = "in_channels=" + str(self.in_channels) + tmpstr += ", out_channels=" + str(self.out_channels) + tmpstr += ", kernel_size=" + str(self.kernel_size) + tmpstr += ", stride=" + str(self.stride) + tmpstr += ", padding=" + str(self.padding) + tmpstr += ", dilation=" + str(self.dilation) + tmpstr += ", groups=" + str(self.groups) + tmpstr += ", deformable_groups=" + str(self.deformable_groups) + tmpstr += ", bias=" + str(self.with_bias) + return tmpstr + + +try: + from detectron2 import _C +except ImportError: + # TODO: register ops natively so there is no need to import _C. + _msg = "detectron2 is not compiled successfully, please build following the instructions!" + _args = ("detectron2._C", _msg) + DeformConv = create_dummy_class("DeformConv", *_args) + ModulatedDeformConv = create_dummy_class("ModulatedDeformConv", *_args) + deform_conv = create_dummy_func("deform_conv", *_args) + modulated_deform_conv = create_dummy_func("modulated_deform_conv", *_args) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/losses.py b/motion-gan-pipeline/preprocessing/third/detectron2/layers/losses.py new file mode 100644 index 0000000..cf4d5e9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/losses.py @@ -0,0 +1,133 @@ +import math +import torch + + +def diou_loss( + boxes1: torch.Tensor, + boxes2: torch.Tensor, + reduction: str = "none", + eps: float = 1e-7, +) -> torch.Tensor: + """ + Distance Intersection over Union Loss (Zhaohui Zheng et. al) + https://arxiv.org/abs/1911.08287 + Args: + boxes1, boxes2 (Tensor): box locations in XYXY format, shape (N, 4) or (4,). + reduction: 'none' | 'mean' | 'sum' + 'none': No reduction will be applied to the output. + 'mean': The output will be averaged. + 'sum': The output will be summed. + eps (float): small number to prevent division by zero + """ + + x1, y1, x2, y2 = boxes1.unbind(dim=-1) + x1g, y1g, x2g, y2g = boxes2.unbind(dim=-1) + + # TODO: use torch._assert_async() when pytorch 1.8 support is dropped + assert (x2 >= x1).all(), "bad box: x1 larger than x2" + assert (y2 >= y1).all(), "bad box: y1 larger than y2" + + # Intersection keypoints + xkis1 = torch.max(x1, x1g) + ykis1 = torch.max(y1, y1g) + xkis2 = torch.min(x2, x2g) + ykis2 = torch.min(y2, y2g) + + intsct = torch.zeros_like(x1) + mask = (ykis2 > ykis1) & (xkis2 > xkis1) + intsct[mask] = (xkis2[mask] - xkis1[mask]) * (ykis2[mask] - ykis1[mask]) + union = (x2 - x1) * (y2 - y1) + (x2g - x1g) * (y2g - y1g) - intsct + eps + iou = intsct / union + + # smallest enclosing box + xc1 = torch.min(x1, x1g) + yc1 = torch.min(y1, y1g) + xc2 = torch.max(x2, x2g) + yc2 = torch.max(y2, y2g) + diag_len = ((xc2 - xc1) ** 2) + ((yc2 - yc1) ** 2) + eps + + # centers of boxes + x_p = (x2 + x1) / 2 + y_p = (y2 + y1) / 2 + x_g = (x1g + x2g) / 2 + y_g = (y1g + y2g) / 2 + distance = ((x_p - x_g) ** 2) + ((y_p - y_g) ** 2) + + # Eqn. (7) + loss = 1 - iou + (distance / diag_len) + if reduction == "mean": + loss = loss.mean() if loss.numel() > 0 else 0.0 * loss.sum() + elif reduction == "sum": + loss = loss.sum() + + return loss + + +def ciou_loss( + boxes1: torch.Tensor, + boxes2: torch.Tensor, + reduction: str = "none", + eps: float = 1e-7, +) -> torch.Tensor: + """ + Complete Intersection over Union Loss (Zhaohui Zheng et. al) + https://arxiv.org/abs/1911.08287 + Args: + boxes1, boxes2 (Tensor): box locations in XYXY format, shape (N, 4) or (4,). + reduction: 'none' | 'mean' | 'sum' + 'none': No reduction will be applied to the output. + 'mean': The output will be averaged. + 'sum': The output will be summed. + eps (float): small number to prevent division by zero + """ + + x1, y1, x2, y2 = boxes1.unbind(dim=-1) + x1g, y1g, x2g, y2g = boxes2.unbind(dim=-1) + + # TODO: use torch._assert_async() when pytorch 1.8 support is dropped + assert (x2 >= x1).all(), "bad box: x1 larger than x2" + assert (y2 >= y1).all(), "bad box: y1 larger than y2" + + # Intersection keypoints + xkis1 = torch.max(x1, x1g) + ykis1 = torch.max(y1, y1g) + xkis2 = torch.min(x2, x2g) + ykis2 = torch.min(y2, y2g) + + intsct = torch.zeros_like(x1) + mask = (ykis2 > ykis1) & (xkis2 > xkis1) + intsct[mask] = (xkis2[mask] - xkis1[mask]) * (ykis2[mask] - ykis1[mask]) + union = (x2 - x1) * (y2 - y1) + (x2g - x1g) * (y2g - y1g) - intsct + eps + iou = intsct / union + + # smallest enclosing box + xc1 = torch.min(x1, x1g) + yc1 = torch.min(y1, y1g) + xc2 = torch.max(x2, x2g) + yc2 = torch.max(y2, y2g) + diag_len = ((xc2 - xc1) ** 2) + ((yc2 - yc1) ** 2) + eps + + # centers of boxes + x_p = (x2 + x1) / 2 + y_p = (y2 + y1) / 2 + x_g = (x1g + x2g) / 2 + y_g = (y1g + y2g) / 2 + distance = ((x_p - x_g) ** 2) + ((y_p - y_g) ** 2) + + # width and height of boxes + w_pred = x2 - x1 + h_pred = y2 - y1 + w_gt = x2g - x1g + h_gt = y2g - y1g + v = (4 / (math.pi ** 2)) * torch.pow((torch.atan(w_gt / h_gt) - torch.atan(w_pred / h_pred)), 2) + with torch.no_grad(): + alpha = v / (1 - iou + v + eps) + + # Eqn. (10) + loss = 1 - iou + (distance / diag_len) + alpha * v + if reduction == "mean": + loss = loss.mean() if loss.numel() > 0 else 0.0 * loss.sum() + elif reduction == "sum": + loss = loss.sum() + + return loss diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/mask_ops.py b/motion-gan-pipeline/preprocessing/third/detectron2/layers/mask_ops.py new file mode 100644 index 0000000..e7a9f3a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/mask_ops.py @@ -0,0 +1,275 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import numpy as np +from typing import Tuple +import torch +from PIL import Image +from torch.nn import functional as F + +__all__ = ["paste_masks_in_image"] + + +BYTES_PER_FLOAT = 4 +# TODO: This memory limit may be too much or too little. It would be better to +# determine it based on available resources. +GPU_MEM_LIMIT = 1024 ** 3 # 1 GB memory limit + + +def _do_paste_mask(masks, boxes, img_h: int, img_w: int, skip_empty: bool = True): + """ + Args: + masks: N, 1, H, W + boxes: N, 4 + img_h, img_w (int): + skip_empty (bool): only paste masks within the region that + tightly bound all boxes, and returns the results this region only. + An important optimization for CPU. + + Returns: + if skip_empty == False, a mask of shape (N, img_h, img_w) + if skip_empty == True, a mask of shape (N, h', w'), and the slice + object for the corresponding region. + """ + # On GPU, paste all masks together (up to chunk size) + # by using the entire image to sample the masks + # Compared to pasting them one by one, + # this has more operations but is faster on COCO-scale dataset. + device = masks.device + + if skip_empty and not torch.jit.is_scripting(): + x0_int, y0_int = torch.clamp(boxes.min(dim=0).values.floor()[:2] - 1, min=0).to( + dtype=torch.int32 + ) + x1_int = torch.clamp(boxes[:, 2].max().ceil() + 1, max=img_w).to(dtype=torch.int32) + y1_int = torch.clamp(boxes[:, 3].max().ceil() + 1, max=img_h).to(dtype=torch.int32) + else: + x0_int, y0_int = 0, 0 + x1_int, y1_int = img_w, img_h + x0, y0, x1, y1 = torch.split(boxes, 1, dim=1) # each is Nx1 + + N = masks.shape[0] + + img_y = torch.arange(y0_int, y1_int, device=device, dtype=torch.float32) + 0.5 + img_x = torch.arange(x0_int, x1_int, device=device, dtype=torch.float32) + 0.5 + img_y = (img_y - y0) / (y1 - y0) * 2 - 1 + img_x = (img_x - x0) / (x1 - x0) * 2 - 1 + # img_x, img_y have shapes (N, w), (N, h) + + gx = img_x[:, None, :].expand(N, img_y.size(1), img_x.size(1)) + gy = img_y[:, :, None].expand(N, img_y.size(1), img_x.size(1)) + grid = torch.stack([gx, gy], dim=3) + + if not torch.jit.is_scripting(): + if not masks.dtype.is_floating_point: + masks = masks.float() + img_masks = F.grid_sample(masks, grid.to(masks.dtype), align_corners=False) + + if skip_empty and not torch.jit.is_scripting(): + return img_masks[:, 0], (slice(y0_int, y1_int), slice(x0_int, x1_int)) + else: + return img_masks[:, 0], () + + +# Annotate boxes as Tensor (but not Boxes) in order to use scripting +@torch.jit.script_if_tracing +def paste_masks_in_image( + masks: torch.Tensor, boxes: torch.Tensor, image_shape: Tuple[int, int], threshold: float = 0.5 +): + """ + Paste a set of masks that are of a fixed resolution (e.g., 28 x 28) into an image. + The location, height, and width for pasting each mask is determined by their + corresponding bounding boxes in boxes. + + Note: + This is a complicated but more accurate implementation. In actual deployment, it is + often enough to use a faster but less accurate implementation. + See :func:`paste_mask_in_image_old` in this file for an alternative implementation. + + Args: + masks (tensor): Tensor of shape (Bimg, Hmask, Wmask), where Bimg is the number of + detected object instances in the image and Hmask, Wmask are the mask width and mask + height of the predicted mask (e.g., Hmask = Wmask = 28). Values are in [0, 1]. + boxes (Boxes or Tensor): A Boxes of length Bimg or Tensor of shape (Bimg, 4). + boxes[i] and masks[i] correspond to the same object instance. + image_shape (tuple): height, width + threshold (float): A threshold in [0, 1] for converting the (soft) masks to + binary masks. + + Returns: + img_masks (Tensor): A tensor of shape (Bimg, Himage, Wimage), where Bimg is the + number of detected object instances and Himage, Wimage are the image width + and height. img_masks[i] is a binary mask for object instance i. + """ + + assert masks.shape[-1] == masks.shape[-2], "Only square mask predictions are supported" + N = len(masks) + if N == 0: + return masks.new_empty((0,) + image_shape, dtype=torch.uint8) + if not isinstance(boxes, torch.Tensor): + boxes = boxes.tensor + device = boxes.device + assert len(boxes) == N, boxes.shape + + img_h, img_w = image_shape + + # The actual implementation split the input into chunks, + # and paste them chunk by chunk. + if device.type == "cpu" or torch.jit.is_scripting(): + # CPU is most efficient when they are pasted one by one with skip_empty=True + # so that it performs minimal number of operations. + num_chunks = N + else: + # GPU benefits from parallelism for larger chunks, but may have memory issue + # int(img_h) because shape may be tensors in tracing + num_chunks = int(np.ceil(N * int(img_h) * int(img_w) * BYTES_PER_FLOAT / GPU_MEM_LIMIT)) + assert ( + num_chunks <= N + ), "Default GPU_MEM_LIMIT in mask_ops.py is too small; try increasing it" + chunks = torch.chunk(torch.arange(N, device=device), num_chunks) + + img_masks = torch.zeros( + N, img_h, img_w, device=device, dtype=torch.bool if threshold >= 0 else torch.uint8 + ) + for inds in chunks: + masks_chunk, spatial_inds = _do_paste_mask( + masks[inds, None, :, :], boxes[inds], img_h, img_w, skip_empty=device.type == "cpu" + ) + + if threshold >= 0: + masks_chunk = (masks_chunk >= threshold).to(dtype=torch.bool) + else: + # for visualization and debugging + masks_chunk = (masks_chunk * 255).to(dtype=torch.uint8) + + if torch.jit.is_scripting(): # Scripting does not use the optimized codepath + img_masks[inds] = masks_chunk + else: + img_masks[(inds,) + spatial_inds] = masks_chunk + return img_masks + + +# The below are the original paste function (from Detectron1) which has +# larger quantization error. +# It is faster on CPU, while the aligned one is faster on GPU thanks to grid_sample. + + +def paste_mask_in_image_old(mask, box, img_h, img_w, threshold): + """ + Paste a single mask in an image. + This is a per-box implementation of :func:`paste_masks_in_image`. + This function has larger quantization error due to incorrect pixel + modeling and is not used any more. + + Args: + mask (Tensor): A tensor of shape (Hmask, Wmask) storing the mask of a single + object instance. Values are in [0, 1]. + box (Tensor): A tensor of shape (4, ) storing the x0, y0, x1, y1 box corners + of the object instance. + img_h, img_w (int): Image height and width. + threshold (float): Mask binarization threshold in [0, 1]. + + Returns: + im_mask (Tensor): + The resized and binarized object mask pasted into the original + image plane (a tensor of shape (img_h, img_w)). + """ + # Conversion from continuous box coordinates to discrete pixel coordinates + # via truncation (cast to int32). This determines which pixels to paste the + # mask onto. + box = box.to(dtype=torch.int32) # Continuous to discrete coordinate conversion + # An example (1D) box with continuous coordinates (x0=0.7, x1=4.3) will map to + # a discrete coordinates (x0=0, x1=4). Note that box is mapped to 5 = x1 - x0 + 1 + # pixels (not x1 - x0 pixels). + samples_w = box[2] - box[0] + 1 # Number of pixel samples, *not* geometric width + samples_h = box[3] - box[1] + 1 # Number of pixel samples, *not* geometric height + + # Resample the mask from it's original grid to the new samples_w x samples_h grid + mask = Image.fromarray(mask.cpu().numpy()) + mask = mask.resize((samples_w, samples_h), resample=Image.BILINEAR) + mask = np.array(mask, copy=False) + + if threshold >= 0: + mask = np.array(mask > threshold, dtype=np.uint8) + mask = torch.from_numpy(mask) + else: + # for visualization and debugging, we also + # allow it to return an unmodified mask + mask = torch.from_numpy(mask * 255).to(torch.uint8) + + im_mask = torch.zeros((img_h, img_w), dtype=torch.uint8) + x_0 = max(box[0], 0) + x_1 = min(box[2] + 1, img_w) + y_0 = max(box[1], 0) + y_1 = min(box[3] + 1, img_h) + + im_mask[y_0:y_1, x_0:x_1] = mask[ + (y_0 - box[1]) : (y_1 - box[1]), (x_0 - box[0]) : (x_1 - box[0]) + ] + return im_mask + + +# Our pixel modeling requires extrapolation for any continuous +# coordinate < 0.5 or > length - 0.5. When sampling pixels on the masks, +# we would like this extrapolation to be an interpolation between boundary values and zero, +# instead of using absolute zero or boundary values. +# Therefore `paste_mask_in_image_old` is often used with zero padding around the masks like this: +# masks, scale = pad_masks(masks[:, 0, :, :], 1) +# boxes = scale_boxes(boxes.tensor, scale) + + +def pad_masks(masks, padding): + """ + Args: + masks (tensor): A tensor of shape (B, M, M) representing B masks. + padding (int): Number of cells to pad on all sides. + + Returns: + The padded masks and the scale factor of the padding size / original size. + """ + B = masks.shape[0] + M = masks.shape[-1] + pad2 = 2 * padding + scale = float(M + pad2) / M + padded_masks = masks.new_zeros((B, M + pad2, M + pad2)) + padded_masks[:, padding:-padding, padding:-padding] = masks + return padded_masks, scale + + +def scale_boxes(boxes, scale): + """ + Args: + boxes (tensor): A tensor of shape (B, 4) representing B boxes with 4 + coords representing the corners x0, y0, x1, y1, + scale (float): The box scaling factor. + + Returns: + Scaled boxes. + """ + w_half = (boxes[:, 2] - boxes[:, 0]) * 0.5 + h_half = (boxes[:, 3] - boxes[:, 1]) * 0.5 + x_c = (boxes[:, 2] + boxes[:, 0]) * 0.5 + y_c = (boxes[:, 3] + boxes[:, 1]) * 0.5 + + w_half *= scale + h_half *= scale + + scaled_boxes = torch.zeros_like(boxes) + scaled_boxes[:, 0] = x_c - w_half + scaled_boxes[:, 2] = x_c + w_half + scaled_boxes[:, 1] = y_c - h_half + scaled_boxes[:, 3] = y_c + h_half + return scaled_boxes + + +@torch.jit.script_if_tracing +def _paste_masks_tensor_shape( + masks: torch.Tensor, + boxes: torch.Tensor, + image_shape: Tuple[torch.Tensor, torch.Tensor], + threshold: float = 0.5, +): + """ + A wrapper of paste_masks_in_image where image_shape is Tensor. + During tracing, shapes might be tensors instead of ints. The Tensor->int + conversion should be scripted rather than traced. + """ + return paste_masks_in_image(masks, boxes, (int(image_shape[0]), int(image_shape[1])), threshold) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/nms.py b/motion-gan-pipeline/preprocessing/third/detectron2/layers/nms.py new file mode 100644 index 0000000..6b6be71 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/nms.py @@ -0,0 +1,139 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +import torch +from torchvision.ops import boxes as box_ops +from torchvision.ops import nms # noqa . for compatibility + + +def batched_nms( + boxes: torch.Tensor, scores: torch.Tensor, idxs: torch.Tensor, iou_threshold: float +): + """ + Same as torchvision.ops.boxes.batched_nms, but with float(). + """ + assert boxes.shape[-1] == 4 + # Note: Torchvision already has a strategy (https://github.com/pytorch/vision/issues/1311) + # to decide whether to use coordinate trick or for loop to implement batched_nms. So we + # just call it directly. + # Fp16 does not have enough range for batched NMS, so adding float(). + return box_ops.batched_nms(boxes.float(), scores, idxs, iou_threshold) + + +# Note: this function (nms_rotated) might be moved into +# torchvision/ops/boxes.py in the future +def nms_rotated(boxes, scores, iou_threshold): + """ + Performs non-maximum suppression (NMS) on the rotated boxes according + to their intersection-over-union (IoU). + + Rotated NMS iteratively removes lower scoring rotated boxes which have an + IoU greater than iou_threshold with another (higher scoring) rotated box. + + Note that RotatedBox (5, 3, 4, 2, -90) covers exactly the same region as + RotatedBox (5, 3, 4, 2, 90) does, and their IoU will be 1. However, they + can be representing completely different objects in certain tasks, e.g., OCR. + + As for the question of whether rotated-NMS should treat them as faraway boxes + even though their IOU is 1, it depends on the application and/or ground truth annotation. + + As an extreme example, consider a single character v and the square box around it. + + If the angle is 0 degree, the object (text) would be read as 'v'; + + If the angle is 90 degrees, the object (text) would become '>'; + + If the angle is 180 degrees, the object (text) would become '^'; + + If the angle is 270/-90 degrees, the object (text) would become '<' + + All of these cases have IoU of 1 to each other, and rotated NMS that only + uses IoU as criterion would only keep one of them with the highest score - + which, practically, still makes sense in most cases because typically + only one of theses orientations is the correct one. Also, it does not matter + as much if the box is only used to classify the object (instead of transcribing + them with a sequential OCR recognition model) later. + + On the other hand, when we use IoU to filter proposals that are close to the + ground truth during training, we should definitely take the angle into account if + we know the ground truth is labeled with the strictly correct orientation (as in, + upside-down words are annotated with -180 degrees even though they can be covered + with a 0/90/-90 degree box, etc.) + + The way the original dataset is annotated also matters. For example, if the dataset + is a 4-point polygon dataset that does not enforce ordering of vertices/orientation, + we can estimate a minimum rotated bounding box to this polygon, but there's no way + we can tell the correct angle with 100% confidence (as shown above, there could be 4 different + rotated boxes, with angles differed by 90 degrees to each other, covering the exactly + same region). In that case we have to just use IoU to determine the box + proximity (as many detection benchmarks (even for text) do) unless there're other + assumptions we can make (like width is always larger than height, or the object is not + rotated by more than 90 degrees CCW/CW, etc.) + + In summary, not considering angles in rotated NMS seems to be a good option for now, + but we should be aware of its implications. + + Args: + boxes (Tensor[N, 5]): Rotated boxes to perform NMS on. They are expected to be in + (x_center, y_center, width, height, angle_degrees) format. + scores (Tensor[N]): Scores for each one of the rotated boxes + iou_threshold (float): Discards all overlapping rotated boxes with IoU < iou_threshold + + Returns: + keep (Tensor): int64 tensor with the indices of the elements that have been kept + by Rotated NMS, sorted in decreasing order of scores + """ + return torch.ops.detectron2.nms_rotated(boxes, scores, iou_threshold) + + +# Note: this function (batched_nms_rotated) might be moved into +# torchvision/ops/boxes.py in the future +def batched_nms_rotated(boxes, scores, idxs, iou_threshold): + """ + Performs non-maximum suppression in a batched fashion. + + Each index value correspond to a category, and NMS + will not be applied between elements of different categories. + + Args: + boxes (Tensor[N, 5]): + boxes where NMS will be performed. They + are expected to be in (x_ctr, y_ctr, width, height, angle_degrees) format + scores (Tensor[N]): + scores for each one of the boxes + idxs (Tensor[N]): + indices of the categories for each one of the boxes. + iou_threshold (float): + discards all overlapping boxes + with IoU < iou_threshold + + Returns: + Tensor: + int64 tensor with the indices of the elements that have been kept + by NMS, sorted in decreasing order of scores + """ + assert boxes.shape[-1] == 5 + + if boxes.numel() == 0: + return torch.empty((0,), dtype=torch.int64, device=boxes.device) + boxes = boxes.float() # fp16 does not have enough range for batched NMS + # Strategy: in order to perform NMS independently per class, + # we add an offset to all the boxes. The offset is dependent + # only on the class idx, and is large enough so that boxes + # from different classes do not overlap + + # Note that batched_nms in torchvision/ops/boxes.py only uses max_coordinate, + # which won't handle negative coordinates correctly. + # Here by using min_coordinate we can make sure the negative coordinates are + # correctly handled. + max_coordinate = ( + torch.max(boxes[:, 0], boxes[:, 1]) + torch.max(boxes[:, 2], boxes[:, 3]) / 2 + ).max() + min_coordinate = ( + torch.min(boxes[:, 0], boxes[:, 1]) - torch.max(boxes[:, 2], boxes[:, 3]) / 2 + ).min() + offsets = idxs.to(boxes) * (max_coordinate - min_coordinate + 1) + boxes_for_nms = boxes.clone() # avoid modifying the original values in boxes + boxes_for_nms[:, :2] += offsets[:, None] + keep = nms_rotated(boxes_for_nms, scores, iou_threshold) + return keep diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/roi_align.py b/motion-gan-pipeline/preprocessing/third/detectron2/layers/roi_align.py new file mode 100644 index 0000000..163462e --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/roi_align.py @@ -0,0 +1,74 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from torch import nn +from torchvision.ops import roi_align + + +# NOTE: torchvision's RoIAlign has a different default aligned=False +class ROIAlign(nn.Module): + def __init__(self, output_size, spatial_scale, sampling_ratio, aligned=True): + """ + Args: + output_size (tuple): h, w + spatial_scale (float): scale the input boxes by this number + sampling_ratio (int): number of inputs samples to take for each output + sample. 0 to take samples densely. + aligned (bool): if False, use the legacy implementation in + Detectron. If True, align the results more perfectly. + + Note: + The meaning of aligned=True: + + Given a continuous coordinate c, its two neighboring pixel indices (in our + pixel model) are computed by floor(c - 0.5) and ceil(c - 0.5). For example, + c=1.3 has pixel neighbors with discrete indices [0] and [1] (which are sampled + from the underlying signal at continuous coordinates 0.5 and 1.5). But the original + roi_align (aligned=False) does not subtract the 0.5 when computing neighboring + pixel indices and therefore it uses pixels with a slightly incorrect alignment + (relative to our pixel model) when performing bilinear interpolation. + + With `aligned=True`, + we first appropriately scale the ROI and then shift it by -0.5 + prior to calling roi_align. This produces the correct neighbors; see + detectron2/tests/test_roi_align.py for verification. + + The difference does not make a difference to the model's performance if + ROIAlign is used together with conv layers. + """ + super().__init__() + self.output_size = output_size + self.spatial_scale = spatial_scale + self.sampling_ratio = sampling_ratio + self.aligned = aligned + + from torchvision import __version__ + + version = tuple(int(x) for x in __version__.split(".")[:2]) + # https://github.com/pytorch/vision/pull/2438 + assert version >= (0, 7), "Require torchvision >= 0.7" + + def forward(self, input, rois): + """ + Args: + input: NCHW images + rois: Bx5 boxes. First column is the index into N. The other 4 columns are xyxy. + """ + assert rois.dim() == 2 and rois.size(1) == 5 + if input.is_quantized: + input = input.dequantize() + return roi_align( + input, + rois.to(dtype=input.dtype), + self.output_size, + self.spatial_scale, + self.sampling_ratio, + self.aligned, + ) + + def __repr__(self): + tmpstr = self.__class__.__name__ + "(" + tmpstr += "output_size=" + str(self.output_size) + tmpstr += ", spatial_scale=" + str(self.spatial_scale) + tmpstr += ", sampling_ratio=" + str(self.sampling_ratio) + tmpstr += ", aligned=" + str(self.aligned) + tmpstr += ")" + return tmpstr diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/roi_align_rotated.py b/motion-gan-pipeline/preprocessing/third/detectron2/layers/roi_align_rotated.py new file mode 100644 index 0000000..d097326 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/roi_align_rotated.py @@ -0,0 +1,91 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import torch +from torch import nn +from torch.autograd import Function +from torch.autograd.function import once_differentiable +from torch.nn.modules.utils import _pair + + +class _ROIAlignRotated(Function): + @staticmethod + def forward(ctx, input, roi, output_size, spatial_scale, sampling_ratio): + ctx.save_for_backward(roi) + ctx.output_size = _pair(output_size) + ctx.spatial_scale = spatial_scale + ctx.sampling_ratio = sampling_ratio + ctx.input_shape = input.size() + output = torch.ops.detectron2.roi_align_rotated_forward( + input, roi, spatial_scale, output_size[0], output_size[1], sampling_ratio + ) + return output + + @staticmethod + @once_differentiable + def backward(ctx, grad_output): + (rois,) = ctx.saved_tensors + output_size = ctx.output_size + spatial_scale = ctx.spatial_scale + sampling_ratio = ctx.sampling_ratio + bs, ch, h, w = ctx.input_shape + grad_input = torch.ops.detectron2.roi_align_rotated_backward( + grad_output, + rois, + spatial_scale, + output_size[0], + output_size[1], + bs, + ch, + h, + w, + sampling_ratio, + ) + return grad_input, None, None, None, None, None + + +roi_align_rotated = _ROIAlignRotated.apply + + +class ROIAlignRotated(nn.Module): + def __init__(self, output_size, spatial_scale, sampling_ratio): + """ + Args: + output_size (tuple): h, w + spatial_scale (float): scale the input boxes by this number + sampling_ratio (int): number of inputs samples to take for each output + sample. 0 to take samples densely. + + Note: + ROIAlignRotated supports continuous coordinate by default: + Given a continuous coordinate c, its two neighboring pixel indices (in our + pixel model) are computed by floor(c - 0.5) and ceil(c - 0.5). For example, + c=1.3 has pixel neighbors with discrete indices [0] and [1] (which are sampled + from the underlying signal at continuous coordinates 0.5 and 1.5). + """ + super(ROIAlignRotated, self).__init__() + self.output_size = output_size + self.spatial_scale = spatial_scale + self.sampling_ratio = sampling_ratio + + def forward(self, input, rois): + """ + Args: + input: NCHW images + rois: Bx6 boxes. First column is the index into N. + The other 5 columns are (x_ctr, y_ctr, width, height, angle_degrees). + """ + assert rois.dim() == 2 and rois.size(1) == 6 + orig_dtype = input.dtype + if orig_dtype == torch.float16: + input = input.float() + rois = rois.float() + return roi_align_rotated( + input, rois, self.output_size, self.spatial_scale, self.sampling_ratio + ).to(dtype=orig_dtype) + + def __repr__(self): + tmpstr = self.__class__.__name__ + "(" + tmpstr += "output_size=" + str(self.output_size) + tmpstr += ", spatial_scale=" + str(self.spatial_scale) + tmpstr += ", sampling_ratio=" + str(self.sampling_ratio) + tmpstr += ")" + return tmpstr diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/rotated_boxes.py b/motion-gan-pipeline/preprocessing/third/detectron2/layers/rotated_boxes.py new file mode 100644 index 0000000..03f73b3 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/rotated_boxes.py @@ -0,0 +1,21 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from __future__ import absolute_import, division, print_function, unicode_literals +import torch + + +def pairwise_iou_rotated(boxes1, boxes2): + """ + Return intersection-over-union (Jaccard index) of boxes. + + Both sets of boxes are expected to be in + (x_center, y_center, width, height, angle) format. + + Arguments: + boxes1 (Tensor[N, 5]) + boxes2 (Tensor[M, 5]) + + Returns: + iou (Tensor[N, M]): the NxM matrix containing the pairwise + IoU values for every element in boxes1 and boxes2 + """ + return torch.ops.detectron2.box_iou_rotated(boxes1, boxes2) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/shape_spec.py b/motion-gan-pipeline/preprocessing/third/detectron2/layers/shape_spec.py new file mode 100644 index 0000000..fe7e8e2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/shape_spec.py @@ -0,0 +1,20 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. +from collections import namedtuple + + +class ShapeSpec(namedtuple("_ShapeSpec", ["channels", "height", "width", "stride"])): + """ + A simple structure that contains basic shape specification about a tensor. + It is often used as the auxiliary inputs/outputs of models, + to complement the lack of shape inference ability among pytorch modules. + + Attributes: + channels: + height: + width: + stride: + """ + + def __new__(cls, channels=None, height=None, width=None, stride=None): + return super().__new__(cls, channels, height, width, stride) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/layers/wrappers.py b/motion-gan-pipeline/preprocessing/third/detectron2/layers/wrappers.py new file mode 100644 index 0000000..29d0ef9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/layers/wrappers.py @@ -0,0 +1,132 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +""" +Wrappers around on some nn functions, mainly to support empty tensors. + +Ideally, add support directly in PyTorch to empty tensors in those functions. + +These can be removed once https://github.com/pytorch/pytorch/issues/12013 +is implemented +""" + +from typing import List, Optional +import torch +from torch.nn import functional as F + + +def shapes_to_tensor(x: List[int], device: Optional[torch.device] = None) -> torch.Tensor: + """ + Turn a list of integer scalars or integer Tensor scalars into a vector, + in a way that's both traceable and scriptable. + + In tracing, `x` should be a list of scalar Tensor, so the output can trace to the inputs. + In scripting or eager, `x` should be a list of int. + """ + if torch.jit.is_scripting(): + return torch.as_tensor(x, device=device) + if torch.jit.is_tracing(): + assert all( + [isinstance(t, torch.Tensor) for t in x] + ), "Shape should be tensor during tracing!" + # as_tensor should not be used in tracing because it records a constant + ret = torch.stack(x) + if ret.device != device: # avoid recording a hard-coded device if not necessary + ret = ret.to(device=device) + return ret + return torch.as_tensor(x, device=device) + + +def cat(tensors: List[torch.Tensor], dim: int = 0): + """ + Efficient version of torch.cat that avoids a copy if there is only a single element in a list + """ + assert isinstance(tensors, (list, tuple)) + if len(tensors) == 1: + return tensors[0] + return torch.cat(tensors, dim) + + +def cross_entropy(input, target, *, reduction="mean", **kwargs): + """ + Same as `torch.nn.functional.cross_entropy`, but returns 0 (instead of nan) + for empty inputs. + """ + if target.numel() == 0 and reduction == "mean": + return input.sum() * 0.0 # connect the gradient + return F.cross_entropy(input, target, reduction=reduction, **kwargs) + + +class _NewEmptyTensorOp(torch.autograd.Function): + @staticmethod + def forward(ctx, x, new_shape): + ctx.shape = x.shape + return x.new_empty(new_shape) + + @staticmethod + def backward(ctx, grad): + shape = ctx.shape + return _NewEmptyTensorOp.apply(grad, shape), None + + +class Conv2d(torch.nn.Conv2d): + """ + A wrapper around :class:`torch.nn.Conv2d` to support empty inputs and more features. + """ + + def __init__(self, *args, **kwargs): + """ + Extra keyword arguments supported in addition to those in `torch.nn.Conv2d`: + + Args: + norm (nn.Module, optional): a normalization layer + activation (callable(Tensor) -> Tensor): a callable activation function + + It assumes that norm layer is used before activation. + """ + norm = kwargs.pop("norm", None) + activation = kwargs.pop("activation", None) + super().__init__(*args, **kwargs) + + self.norm = norm + self.activation = activation + + def forward(self, x): + # torchscript does not support SyncBatchNorm yet + # https://github.com/pytorch/pytorch/issues/40507 + # and we skip these codes in torchscript since: + # 1. currently we only support torchscript in evaluation mode + # 2. features needed by exporting module to torchscript are added in PyTorch 1.6 or + # later version, `Conv2d` in these PyTorch versions has already supported empty inputs. + if not torch.jit.is_scripting(): + if x.numel() == 0 and self.training: + # https://github.com/pytorch/pytorch/issues/12013 + assert not isinstance( + self.norm, torch.nn.SyncBatchNorm + ), "SyncBatchNorm does not support empty inputs!" + + x = F.conv2d( + x, self.weight, self.bias, self.stride, self.padding, self.dilation, self.groups + ) + if self.norm is not None: + x = self.norm(x) + if self.activation is not None: + x = self.activation(x) + return x + + +ConvTranspose2d = torch.nn.ConvTranspose2d +BatchNorm2d = torch.nn.BatchNorm2d +interpolate = F.interpolate +Linear = torch.nn.Linear + + +def nonzero_tuple(x): + """ + A 'as_tuple=True' version of torch.nonzero to support torchscript. + because of https://github.com/pytorch/pytorch/issues/38718 + """ + if torch.jit.is_scripting(): + if x.dim() == 0: + return x.unsqueeze(0).nonzero().unbind(1) + return x.nonzero().unbind(1) + else: + return x.nonzero(as_tuple=True) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/model_zoo/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/model_zoo/__init__.py new file mode 100644 index 0000000..6204208 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/model_zoo/__init__.py @@ -0,0 +1,10 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +""" +Model Zoo API for Detectron2: a collection of functions to create common model architectures +listed in `MODEL_ZOO.md `_, +and optionally load their pre-trained weights. +""" + +from .model_zoo import get, get_config_file, get_checkpoint_url, get_config + +__all__ = ["get_checkpoint_url", "get", "get_config_file", "get_config"] diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/model_zoo/model_zoo.py b/motion-gan-pipeline/preprocessing/third/detectron2/model_zoo/model_zoo.py new file mode 100644 index 0000000..5b90bc9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/model_zoo/model_zoo.py @@ -0,0 +1,213 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import os +from typing import Optional +import pkg_resources +import torch + +from detectron2.checkpoint import DetectionCheckpointer +from detectron2.config import CfgNode, LazyConfig, get_cfg, instantiate +from detectron2.modeling import build_model + + +class _ModelZooUrls(object): + """ + Mapping from names to officially released Detectron2 pre-trained models. + """ + + S3_PREFIX = "https://dl.fbaipublicfiles.com/detectron2/" + + # format: {config_path.yaml} -> model_id/model_final_{commit}.pkl + CONFIG_PATH_TO_URL_SUFFIX = { + # COCO Detection with Faster R-CNN + "COCO-Detection/faster_rcnn_R_50_C4_1x": "137257644/model_final_721ade.pkl", + "COCO-Detection/faster_rcnn_R_50_DC5_1x": "137847829/model_final_51d356.pkl", + "COCO-Detection/faster_rcnn_R_50_FPN_1x": "137257794/model_final_b275ba.pkl", + "COCO-Detection/faster_rcnn_R_50_C4_3x": "137849393/model_final_f97cb7.pkl", + "COCO-Detection/faster_rcnn_R_50_DC5_3x": "137849425/model_final_68d202.pkl", + "COCO-Detection/faster_rcnn_R_50_FPN_3x": "137849458/model_final_280758.pkl", + "COCO-Detection/faster_rcnn_R_101_C4_3x": "138204752/model_final_298dad.pkl", + "COCO-Detection/faster_rcnn_R_101_DC5_3x": "138204841/model_final_3e0943.pkl", + "COCO-Detection/faster_rcnn_R_101_FPN_3x": "137851257/model_final_f6e8b1.pkl", + "COCO-Detection/faster_rcnn_X_101_32x8d_FPN_3x": "139173657/model_final_68b088.pkl", + # COCO Detection with RetinaNet + "COCO-Detection/retinanet_R_50_FPN_1x": "190397773/model_final_bfca0b.pkl", + "COCO-Detection/retinanet_R_50_FPN_3x": "190397829/model_final_5bd44e.pkl", + "COCO-Detection/retinanet_R_101_FPN_3x": "190397697/model_final_971ab9.pkl", + # COCO Detection with RPN and Fast R-CNN + "COCO-Detection/rpn_R_50_C4_1x": "137258005/model_final_450694.pkl", + "COCO-Detection/rpn_R_50_FPN_1x": "137258492/model_final_02ce48.pkl", + "COCO-Detection/fast_rcnn_R_50_FPN_1x": "137635226/model_final_e5f7ce.pkl", + # COCO Instance Segmentation Baselines with Mask R-CNN + "COCO-InstanceSegmentation/mask_rcnn_R_50_C4_1x": "137259246/model_final_9243eb.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_1x": "137260150/model_final_4f86c3.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x": "137260431/model_final_a54504.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_50_C4_3x": "137849525/model_final_4ce675.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_50_DC5_3x": "137849551/model_final_84107b.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x": "137849600/model_final_f10217.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_101_C4_3x": "138363239/model_final_a2914c.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_101_DC5_3x": "138363294/model_final_0464b7.pkl", + "COCO-InstanceSegmentation/mask_rcnn_R_101_FPN_3x": "138205316/model_final_a3ec72.pkl", + "COCO-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_3x": "139653917/model_final_2d9806.pkl", # noqa + # New baselines using Large-Scale Jitter and Longer Training Schedule + "new_baselines/mask_rcnn_R_50_FPN_100ep_LSJ": "42047764/model_final_bb69de.pkl", + "new_baselines/mask_rcnn_R_50_FPN_200ep_LSJ": "42047638/model_final_89a8d3.pkl", + "new_baselines/mask_rcnn_R_50_FPN_400ep_LSJ": "42019571/model_final_14d201.pkl", + "new_baselines/mask_rcnn_R_101_FPN_100ep_LSJ": "42025812/model_final_4f7b58.pkl", + "new_baselines/mask_rcnn_R_101_FPN_200ep_LSJ": "42131867/model_final_0bb7ae.pkl", + "new_baselines/mask_rcnn_R_101_FPN_400ep_LSJ": "42073830/model_final_f96b26.pkl", + "new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_100ep_LSJ": "42047771/model_final_b7fbab.pkl", # noqa + "new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_200ep_LSJ": "42132721/model_final_5d87c1.pkl", # noqa + "new_baselines/mask_rcnn_regnetx_4gf_dds_FPN_400ep_LSJ": "42025447/model_final_f1362d.pkl", # noqa + "new_baselines/mask_rcnn_regnety_4gf_dds_FPN_100ep_LSJ": "42047784/model_final_6ba57e.pkl", # noqa + "new_baselines/mask_rcnn_regnety_4gf_dds_FPN_200ep_LSJ": "42047642/model_final_27b9c1.pkl", # noqa + "new_baselines/mask_rcnn_regnety_4gf_dds_FPN_400ep_LSJ": "42045954/model_final_ef3a80.pkl", # noqa + # COCO Person Keypoint Detection Baselines with Keypoint R-CNN + "COCO-Keypoints/keypoint_rcnn_R_50_FPN_1x": "137261548/model_final_04e291.pkl", + "COCO-Keypoints/keypoint_rcnn_R_50_FPN_3x": "137849621/model_final_a6e10b.pkl", + "COCO-Keypoints/keypoint_rcnn_R_101_FPN_3x": "138363331/model_final_997cc7.pkl", + "COCO-Keypoints/keypoint_rcnn_X_101_32x8d_FPN_3x": "139686956/model_final_5ad38f.pkl", + # COCO Panoptic Segmentation Baselines with Panoptic FPN + "COCO-PanopticSegmentation/panoptic_fpn_R_50_1x": "139514544/model_final_dbfeb4.pkl", + "COCO-PanopticSegmentation/panoptic_fpn_R_50_3x": "139514569/model_final_c10459.pkl", + "COCO-PanopticSegmentation/panoptic_fpn_R_101_3x": "139514519/model_final_cafdb1.pkl", + # LVIS Instance Segmentation Baselines with Mask R-CNN + "LVISv0.5-InstanceSegmentation/mask_rcnn_R_50_FPN_1x": "144219072/model_final_571f7c.pkl", # noqa + "LVISv0.5-InstanceSegmentation/mask_rcnn_R_101_FPN_1x": "144219035/model_final_824ab5.pkl", # noqa + "LVISv0.5-InstanceSegmentation/mask_rcnn_X_101_32x8d_FPN_1x": "144219108/model_final_5e3439.pkl", # noqa + # Cityscapes & Pascal VOC Baselines + "Cityscapes/mask_rcnn_R_50_FPN": "142423278/model_final_af9cf5.pkl", + "PascalVOC-Detection/faster_rcnn_R_50_C4": "142202221/model_final_b1acc2.pkl", + # Other Settings + "Misc/mask_rcnn_R_50_FPN_1x_dconv_c3-c5": "138602867/model_final_65c703.pkl", + "Misc/mask_rcnn_R_50_FPN_3x_dconv_c3-c5": "144998336/model_final_821d0b.pkl", + "Misc/cascade_mask_rcnn_R_50_FPN_1x": "138602847/model_final_e9d89b.pkl", + "Misc/cascade_mask_rcnn_R_50_FPN_3x": "144998488/model_final_480dd8.pkl", + "Misc/mask_rcnn_R_50_FPN_3x_syncbn": "169527823/model_final_3b3c51.pkl", + "Misc/mask_rcnn_R_50_FPN_3x_gn": "138602888/model_final_dc5d9e.pkl", + "Misc/scratch_mask_rcnn_R_50_FPN_3x_gn": "138602908/model_final_01ca85.pkl", + "Misc/scratch_mask_rcnn_R_50_FPN_9x_gn": "183808979/model_final_da7b4c.pkl", + "Misc/scratch_mask_rcnn_R_50_FPN_9x_syncbn": "184226666/model_final_5ce33e.pkl", + "Misc/panoptic_fpn_R_101_dconv_cascade_gn_3x": "139797668/model_final_be35db.pkl", + "Misc/cascade_mask_rcnn_X_152_32x8d_FPN_IN5k_gn_dconv": "18131413/model_0039999_e76410.pkl", # noqa + # D1 Comparisons + "Detectron1-Comparisons/faster_rcnn_R_50_FPN_noaug_1x": "137781054/model_final_7ab50c.pkl", # noqa + "Detectron1-Comparisons/mask_rcnn_R_50_FPN_noaug_1x": "137781281/model_final_62ca52.pkl", # noqa + "Detectron1-Comparisons/keypoint_rcnn_R_50_FPN_1x": "137781195/model_final_cce136.pkl", + } + + @staticmethod + def query(config_path: str) -> Optional[str]: + """ + Args: + config_path: relative config filename + """ + name = config_path.replace(".yaml", "").replace(".py", "") + if name in _ModelZooUrls.CONFIG_PATH_TO_URL_SUFFIX: + suffix = _ModelZooUrls.CONFIG_PATH_TO_URL_SUFFIX[name] + return _ModelZooUrls.S3_PREFIX + name + "/" + suffix + return None + + +def get_checkpoint_url(config_path): + """ + Returns the URL to the model trained using the given config + + Args: + config_path (str): config file name relative to detectron2's "configs/" + directory, e.g., "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml" + + Returns: + str: a URL to the model + """ + url = _ModelZooUrls.query(config_path) + if url is None: + raise RuntimeError("Pretrained model for {} is not available!".format(config_path)) + return url + + +def get_config_file(config_path): + """ + Returns path to a builtin config file. + + Args: + config_path (str): config file name relative to detectron2's "configs/" + directory, e.g., "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml" + + Returns: + str: the real path to the config file. + """ + cfg_file = pkg_resources.resource_filename( + "detectron2.model_zoo", os.path.join("configs", config_path) + ) + if not os.path.exists(cfg_file): + raise RuntimeError("{} not available in Model Zoo!".format(config_path)) + return cfg_file + + +def get_config(config_path, trained: bool = False): + """ + Returns a config object for a model in model zoo. + + Args: + config_path (str): config file name relative to detectron2's "configs/" + directory, e.g., "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml" + trained (bool): If True, will set ``MODEL.WEIGHTS`` to trained model zoo weights. + If False, the checkpoint specified in the config file's ``MODEL.WEIGHTS`` is used + instead; this will typically (though not always) initialize a subset of weights using + an ImageNet pre-trained model, while randomly initializing the other weights. + + Returns: + CfgNode or omegaconf.DictConfig: a config object + """ + cfg_file = get_config_file(config_path) + if cfg_file.endswith(".yaml"): + cfg = get_cfg() + cfg.merge_from_file(cfg_file) + if trained: + cfg.MODEL.WEIGHTS = get_checkpoint_url(config_path) + return cfg + elif cfg_file.endswith(".py"): + cfg = LazyConfig.load(cfg_file) + if trained: + url = get_checkpoint_url(config_path) + if "train" in cfg and "init_checkpoint" in cfg.train: + cfg.train.init_checkpoint = url + else: + raise NotImplementedError + return cfg + + +def get(config_path, trained: bool = False, device: Optional[str] = None): + """ + Get a model specified by relative path under Detectron2's official ``configs/`` directory. + + Args: + config_path (str): config file name relative to detectron2's "configs/" + directory, e.g., "COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml" + trained (bool): see :func:`get_config`. + device (str or None): overwrite the device in config, if given. + + Returns: + nn.Module: a detectron2 model. Will be in training mode. + + Example: + :: + from detectron2 import model_zoo + model = model_zoo.get("COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_1x.yaml", trained=True) + """ + cfg = get_config(config_path, trained) + if device is None and not torch.cuda.is_available(): + device = "cpu" + if device is not None and isinstance(cfg, CfgNode): + cfg.MODEL.DEVICE = device + + if isinstance(cfg, CfgNode): + model = build_model(cfg) + DetectionCheckpointer(model).load(cfg.MODEL.WEIGHTS) + else: + model = instantiate(cfg.model) + if device is not None: + model = model.to(device) + if "train" in cfg and "init_checkpoint" in cfg.train: + DetectionCheckpointer(model).load(cfg.train.init_checkpoint) + return model diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/__init__.py new file mode 100644 index 0000000..576493d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/__init__.py @@ -0,0 +1,59 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from detectron2.layers import ShapeSpec + +from .anchor_generator import build_anchor_generator, ANCHOR_GENERATOR_REGISTRY +from .backbone import ( + BACKBONE_REGISTRY, + FPN, + Backbone, + ResNet, + ResNetBlockBase, + build_backbone, + build_resnet_backbone, + make_stage, +) +from .meta_arch import ( + META_ARCH_REGISTRY, + SEM_SEG_HEADS_REGISTRY, + GeneralizedRCNN, + PanopticFPN, + ProposalNetwork, + RetinaNet, + SemanticSegmentor, + build_model, + build_sem_seg_head, + FCOS, +) +from .postprocessing import detector_postprocess +from .proposal_generator import ( + PROPOSAL_GENERATOR_REGISTRY, + build_proposal_generator, + RPN_HEAD_REGISTRY, + build_rpn_head, +) +from .roi_heads import ( + ROI_BOX_HEAD_REGISTRY, + ROI_HEADS_REGISTRY, + ROI_KEYPOINT_HEAD_REGISTRY, + ROI_MASK_HEAD_REGISTRY, + ROIHeads, + StandardROIHeads, + BaseMaskRCNNHead, + BaseKeypointRCNNHead, + FastRCNNOutputLayers, + build_box_head, + build_keypoint_head, + build_mask_head, + build_roi_heads, +) +from .test_time_augmentation import DatasetMapperTTA, GeneralizedRCNNWithTTA +from .mmdet_wrapper import MMDetBackbone, MMDetDetector + +_EXCLUDE = {"ShapeSpec"} +__all__ = [k for k in globals().keys() if k not in _EXCLUDE and not k.startswith("_")] + + +from detectron2.utils.env import fixup_module_metadata + +fixup_module_metadata(__name__, globals(), __all__) +del fixup_module_metadata diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/anchor_generator.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/anchor_generator.py new file mode 100644 index 0000000..ee4b988 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/anchor_generator.py @@ -0,0 +1,382 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import collections +import math +from typing import List +import torch +from torch import nn + +from detectron2.config import configurable +from detectron2.layers import ShapeSpec +from detectron2.structures import Boxes, RotatedBoxes +from detectron2.utils.registry import Registry + +ANCHOR_GENERATOR_REGISTRY = Registry("ANCHOR_GENERATOR") +ANCHOR_GENERATOR_REGISTRY.__doc__ = """ +Registry for modules that creates object detection anchors for feature maps. + +The registered object will be called with `obj(cfg, input_shape)`. +""" + + +class BufferList(nn.Module): + """ + Similar to nn.ParameterList, but for buffers + """ + + def __init__(self, buffers): + super().__init__() + for i, buffer in enumerate(buffers): + # Use non-persistent buffer so the values are not saved in checkpoint + self.register_buffer(str(i), buffer, persistent=False) + + def __len__(self): + return len(self._buffers) + + def __iter__(self): + return iter(self._buffers.values()) + + +def _create_grid_offsets(size: List[int], stride: int, offset: float, device: torch.device): + grid_height, grid_width = size + shifts_x = torch.arange( + offset * stride, grid_width * stride, step=stride, dtype=torch.float32, device=device + ) + shifts_y = torch.arange( + offset * stride, grid_height * stride, step=stride, dtype=torch.float32, device=device + ) + + shift_y, shift_x = torch.meshgrid(shifts_y, shifts_x) + shift_x = shift_x.reshape(-1) + shift_y = shift_y.reshape(-1) + return shift_x, shift_y + + +def _broadcast_params(params, num_features, name): + """ + If one size (or aspect ratio) is specified and there are multiple feature + maps, we "broadcast" anchors of that single size (or aspect ratio) + over all feature maps. + + If params is list[float], or list[list[float]] with len(params) == 1, repeat + it num_features time. + + Returns: + list[list[float]]: param for each feature + """ + assert isinstance( + params, collections.abc.Sequence + ), f"{name} in anchor generator has to be a list! Got {params}." + assert len(params), f"{name} in anchor generator cannot be empty!" + if not isinstance(params[0], collections.abc.Sequence): # params is list[float] + return [params] * num_features + if len(params) == 1: + return list(params) * num_features + assert len(params) == num_features, ( + f"Got {name} of length {len(params)} in anchor generator, " + f"but the number of input features is {num_features}!" + ) + return params + + +@ANCHOR_GENERATOR_REGISTRY.register() +class DefaultAnchorGenerator(nn.Module): + """ + Compute anchors in the standard ways described in + "Faster R-CNN: Towards Real-Time Object Detection with Region Proposal Networks". + """ + + box_dim: torch.jit.Final[int] = 4 + """ + the dimension of each anchor box. + """ + + @configurable + def __init__(self, *, sizes, aspect_ratios, strides, offset=0.5): + """ + This interface is experimental. + + Args: + sizes (list[list[float]] or list[float]): + If ``sizes`` is list[list[float]], ``sizes[i]`` is the list of anchor sizes + (i.e. sqrt of anchor area) to use for the i-th feature map. + If ``sizes`` is list[float], ``sizes`` is used for all feature maps. + Anchor sizes are given in absolute lengths in units of + the input image; they do not dynamically scale if the input image size changes. + aspect_ratios (list[list[float]] or list[float]): list of aspect ratios + (i.e. height / width) to use for anchors. Same "broadcast" rule for `sizes` applies. + strides (list[int]): stride of each input feature. + offset (float): Relative offset between the center of the first anchor and the top-left + corner of the image. Value has to be in [0, 1). + Recommend to use 0.5, which means half stride. + """ + super().__init__() + + self.strides = strides + self.num_features = len(self.strides) + sizes = _broadcast_params(sizes, self.num_features, "sizes") + aspect_ratios = _broadcast_params(aspect_ratios, self.num_features, "aspect_ratios") + self.cell_anchors = self._calculate_anchors(sizes, aspect_ratios) + + self.offset = offset + assert 0.0 <= self.offset < 1.0, self.offset + + @classmethod + def from_config(cls, cfg, input_shape: List[ShapeSpec]): + return { + "sizes": cfg.MODEL.ANCHOR_GENERATOR.SIZES, + "aspect_ratios": cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS, + "strides": [x.stride for x in input_shape], + "offset": cfg.MODEL.ANCHOR_GENERATOR.OFFSET, + } + + def _calculate_anchors(self, sizes, aspect_ratios): + cell_anchors = [ + self.generate_cell_anchors(s, a).float() for s, a in zip(sizes, aspect_ratios) + ] + return BufferList(cell_anchors) + + @property + @torch.jit.unused + def num_cell_anchors(self): + """ + Alias of `num_anchors`. + """ + return self.num_anchors + + @property + @torch.jit.unused + def num_anchors(self): + """ + Returns: + list[int]: Each int is the number of anchors at every pixel + location, on that feature map. + For example, if at every pixel we use anchors of 3 aspect + ratios and 5 sizes, the number of anchors is 15. + (See also ANCHOR_GENERATOR.SIZES and ANCHOR_GENERATOR.ASPECT_RATIOS in config) + + In standard RPN models, `num_anchors` on every feature map is the same. + """ + return [len(cell_anchors) for cell_anchors in self.cell_anchors] + + def _grid_anchors(self, grid_sizes: List[List[int]]): + """ + Returns: + list[Tensor]: #featuremap tensors, each is (#locations x #cell_anchors) x 4 + """ + anchors = [] + # buffers() not supported by torchscript. use named_buffers() instead + buffers: List[torch.Tensor] = [x[1] for x in self.cell_anchors.named_buffers()] + for size, stride, base_anchors in zip(grid_sizes, self.strides, buffers): + shift_x, shift_y = _create_grid_offsets(size, stride, self.offset, base_anchors.device) + shifts = torch.stack((shift_x, shift_y, shift_x, shift_y), dim=1) + + anchors.append((shifts.view(-1, 1, 4) + base_anchors.view(1, -1, 4)).reshape(-1, 4)) + + return anchors + + def generate_cell_anchors(self, sizes=(32, 64, 128, 256, 512), aspect_ratios=(0.5, 1, 2)): + """ + Generate a tensor storing canonical anchor boxes, which are all anchor + boxes of different sizes and aspect_ratios centered at (0, 0). + We can later build the set of anchors for a full feature map by + shifting and tiling these tensors (see `meth:_grid_anchors`). + + Args: + sizes (tuple[float]): + aspect_ratios (tuple[float]]): + + Returns: + Tensor of shape (len(sizes) * len(aspect_ratios), 4) storing anchor boxes + in XYXY format. + """ + + # This is different from the anchor generator defined in the original Faster R-CNN + # code or Detectron. They yield the same AP, however the old version defines cell + # anchors in a less natural way with a shift relative to the feature grid and + # quantization that results in slightly different sizes for different aspect ratios. + # See also https://github.com/facebookresearch/Detectron/issues/227 + + anchors = [] + for size in sizes: + area = size ** 2.0 + for aspect_ratio in aspect_ratios: + # s * s = w * h + # a = h / w + # ... some algebra ... + # w = sqrt(s * s / a) + # h = a * w + w = math.sqrt(area / aspect_ratio) + h = aspect_ratio * w + x0, y0, x1, y1 = -w / 2.0, -h / 2.0, w / 2.0, h / 2.0 + anchors.append([x0, y0, x1, y1]) + return torch.tensor(anchors) + + def forward(self, features: List[torch.Tensor]): + """ + Args: + features (list[Tensor]): list of backbone feature maps on which to generate anchors. + + Returns: + list[Boxes]: a list of Boxes containing all the anchors for each feature map + (i.e. the cell anchors repeated over all locations in the feature map). + The number of anchors of each feature map is Hi x Wi x num_cell_anchors, + where Hi, Wi are resolution of the feature map divided by anchor stride. + """ + grid_sizes = [feature_map.shape[-2:] for feature_map in features] + anchors_over_all_feature_maps = self._grid_anchors(grid_sizes) + return [Boxes(x) for x in anchors_over_all_feature_maps] + + +@ANCHOR_GENERATOR_REGISTRY.register() +class RotatedAnchorGenerator(nn.Module): + """ + Compute rotated anchors used by Rotated RPN (RRPN), described in + "Arbitrary-Oriented Scene Text Detection via Rotation Proposals". + """ + + box_dim: int = 5 + """ + the dimension of each anchor box. + """ + + @configurable + def __init__(self, *, sizes, aspect_ratios, strides, angles, offset=0.5): + """ + This interface is experimental. + + Args: + sizes (list[list[float]] or list[float]): + If sizes is list[list[float]], sizes[i] is the list of anchor sizes + (i.e. sqrt of anchor area) to use for the i-th feature map. + If sizes is list[float], the sizes are used for all feature maps. + Anchor sizes are given in absolute lengths in units of + the input image; they do not dynamically scale if the input image size changes. + aspect_ratios (list[list[float]] or list[float]): list of aspect ratios + (i.e. height / width) to use for anchors. Same "broadcast" rule for `sizes` applies. + strides (list[int]): stride of each input feature. + angles (list[list[float]] or list[float]): list of angles (in degrees CCW) + to use for anchors. Same "broadcast" rule for `sizes` applies. + offset (float): Relative offset between the center of the first anchor and the top-left + corner of the image. Value has to be in [0, 1). + Recommend to use 0.5, which means half stride. + """ + super().__init__() + + self.strides = strides + self.num_features = len(self.strides) + sizes = _broadcast_params(sizes, self.num_features, "sizes") + aspect_ratios = _broadcast_params(aspect_ratios, self.num_features, "aspect_ratios") + angles = _broadcast_params(angles, self.num_features, "angles") + self.cell_anchors = self._calculate_anchors(sizes, aspect_ratios, angles) + + self.offset = offset + assert 0.0 <= self.offset < 1.0, self.offset + + @classmethod + def from_config(cls, cfg, input_shape: List[ShapeSpec]): + return { + "sizes": cfg.MODEL.ANCHOR_GENERATOR.SIZES, + "aspect_ratios": cfg.MODEL.ANCHOR_GENERATOR.ASPECT_RATIOS, + "strides": [x.stride for x in input_shape], + "offset": cfg.MODEL.ANCHOR_GENERATOR.OFFSET, + "angles": cfg.MODEL.ANCHOR_GENERATOR.ANGLES, + } + + def _calculate_anchors(self, sizes, aspect_ratios, angles): + cell_anchors = [ + self.generate_cell_anchors(size, aspect_ratio, angle).float() + for size, aspect_ratio, angle in zip(sizes, aspect_ratios, angles) + ] + return BufferList(cell_anchors) + + @property + def num_cell_anchors(self): + """ + Alias of `num_anchors`. + """ + return self.num_anchors + + @property + def num_anchors(self): + """ + Returns: + list[int]: Each int is the number of anchors at every pixel + location, on that feature map. + For example, if at every pixel we use anchors of 3 aspect + ratios, 2 sizes and 5 angles, the number of anchors is 30. + (See also ANCHOR_GENERATOR.SIZES, ANCHOR_GENERATOR.ASPECT_RATIOS + and ANCHOR_GENERATOR.ANGLES in config) + + In standard RRPN models, `num_anchors` on every feature map is the same. + """ + return [len(cell_anchors) for cell_anchors in self.cell_anchors] + + def _grid_anchors(self, grid_sizes): + anchors = [] + for size, stride, base_anchors in zip(grid_sizes, self.strides, self.cell_anchors): + shift_x, shift_y = _create_grid_offsets(size, stride, self.offset, base_anchors.device) + zeros = torch.zeros_like(shift_x) + shifts = torch.stack((shift_x, shift_y, zeros, zeros, zeros), dim=1) + + anchors.append((shifts.view(-1, 1, 5) + base_anchors.view(1, -1, 5)).reshape(-1, 5)) + + return anchors + + def generate_cell_anchors( + self, + sizes=(32, 64, 128, 256, 512), + aspect_ratios=(0.5, 1, 2), + angles=(-90, -60, -30, 0, 30, 60, 90), + ): + """ + Generate a tensor storing canonical anchor boxes, which are all anchor + boxes of different sizes, aspect_ratios, angles centered at (0, 0). + We can later build the set of anchors for a full feature map by + shifting and tiling these tensors (see `meth:_grid_anchors`). + + Args: + sizes (tuple[float]): + aspect_ratios (tuple[float]]): + angles (tuple[float]]): + + Returns: + Tensor of shape (len(sizes) * len(aspect_ratios) * len(angles), 5) + storing anchor boxes in (x_ctr, y_ctr, w, h, angle) format. + """ + anchors = [] + for size in sizes: + area = size ** 2.0 + for aspect_ratio in aspect_ratios: + # s * s = w * h + # a = h / w + # ... some algebra ... + # w = sqrt(s * s / a) + # h = a * w + w = math.sqrt(area / aspect_ratio) + h = aspect_ratio * w + anchors.extend([0, 0, w, h, a] for a in angles) + + return torch.tensor(anchors) + + def forward(self, features): + """ + Args: + features (list[Tensor]): list of backbone feature maps on which to generate anchors. + + Returns: + list[RotatedBoxes]: a list of Boxes containing all the anchors for each feature map + (i.e. the cell anchors repeated over all locations in the feature map). + The number of anchors of each feature map is Hi x Wi x num_cell_anchors, + where Hi, Wi are resolution of the feature map divided by anchor stride. + """ + grid_sizes = [feature_map.shape[-2:] for feature_map in features] + anchors_over_all_feature_maps = self._grid_anchors(grid_sizes) + return [RotatedBoxes(x) for x in anchors_over_all_feature_maps] + + +def build_anchor_generator(cfg, input_shape): + """ + Built an anchor generator from `cfg.MODEL.ANCHOR_GENERATOR.NAME`. + """ + anchor_generator = cfg.MODEL.ANCHOR_GENERATOR.NAME + return ANCHOR_GENERATOR_REGISTRY.get(anchor_generator)(cfg, input_shape) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/__init__.py new file mode 100644 index 0000000..55b265d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/__init__.py @@ -0,0 +1,17 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .build import build_backbone, BACKBONE_REGISTRY # noqa F401 isort:skip + +from .backbone import Backbone +from .fpn import FPN +from .regnet import RegNet +from .resnet import ( + BasicStem, + ResNet, + ResNetBlockBase, + build_resnet_backbone, + make_stage, + BottleneckBlock, +) + +__all__ = [k for k in globals().keys() if not k.startswith("_")] +# TODO can expose more resnet blocks after careful consideration diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/backbone.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/backbone.py new file mode 100644 index 0000000..369fb88 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/backbone.py @@ -0,0 +1,53 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from abc import ABCMeta, abstractmethod +import torch.nn as nn + +from detectron2.layers import ShapeSpec + +__all__ = ["Backbone"] + + +class Backbone(nn.Module, metaclass=ABCMeta): + """ + Abstract base class for network backbones. + """ + + def __init__(self): + """ + The `__init__` method of any subclass can specify its own set of arguments. + """ + super().__init__() + + @abstractmethod + def forward(self): + """ + Subclasses must override this method, but adhere to the same return type. + + Returns: + dict[str->Tensor]: mapping from feature name (e.g., "res2") to tensor + """ + pass + + @property + def size_divisibility(self) -> int: + """ + Some backbones require the input height and width to be divisible by a + specific integer. This is typically true for encoder / decoder type networks + with lateral connection (e.g., FPN) for which feature maps need to match + dimension in the "bottom up" and "top down" paths. Set to 0 if no specific + input size divisibility is required. + """ + return 0 + + def output_shape(self): + """ + Returns: + dict[str->ShapeSpec] + """ + # this is a backward-compatible default + return { + name: ShapeSpec( + channels=self._out_feature_channels[name], stride=self._out_feature_strides[name] + ) + for name in self._out_features + } diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/build.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/build.py new file mode 100644 index 0000000..af02141 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/build.py @@ -0,0 +1,33 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from detectron2.layers import ShapeSpec +from detectron2.utils.registry import Registry + +from .backbone import Backbone + +BACKBONE_REGISTRY = Registry("BACKBONE") +BACKBONE_REGISTRY.__doc__ = """ +Registry for backbones, which extract feature maps from images + +The registered object must be a callable that accepts two arguments: + +1. A :class:`detectron2.config.CfgNode` +2. A :class:`detectron2.layers.ShapeSpec`, which contains the input shape specification. + +Registered object must return instance of :class:`Backbone`. +""" + + +def build_backbone(cfg, input_shape=None): + """ + Build a backbone from `cfg.MODEL.BACKBONE.NAME`. + + Returns: + an instance of :class:`Backbone` + """ + if input_shape is None: + input_shape = ShapeSpec(channels=len(cfg.MODEL.PIXEL_MEAN)) + + backbone_name = cfg.MODEL.BACKBONE.NAME + backbone = BACKBONE_REGISTRY.get(backbone_name)(cfg, input_shape) + assert isinstance(backbone, Backbone) + return backbone diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/fpn.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/fpn.py new file mode 100644 index 0000000..d0bdfc9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/fpn.py @@ -0,0 +1,255 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import math +import fvcore.nn.weight_init as weight_init +import torch +import torch.nn.functional as F +from torch import nn + +from detectron2.layers import Conv2d, ShapeSpec, get_norm + +from .backbone import Backbone +from .build import BACKBONE_REGISTRY +from .resnet import build_resnet_backbone + +__all__ = ["build_resnet_fpn_backbone", "build_retinanet_resnet_fpn_backbone", "FPN"] + + +class FPN(Backbone): + """ + This module implements :paper:`FPN`. + It creates pyramid features built on top of some input feature maps. + """ + + _fuse_type: torch.jit.Final[str] + + def __init__( + self, bottom_up, in_features, out_channels, norm="", top_block=None, fuse_type="sum" + ): + """ + Args: + bottom_up (Backbone): module representing the bottom up subnetwork. + Must be a subclass of :class:`Backbone`. The multi-scale feature + maps generated by the bottom up network, and listed in `in_features`, + are used to generate FPN levels. + in_features (list[str]): names of the input feature maps coming + from the backbone to which FPN is attached. For example, if the + backbone produces ["res2", "res3", "res4"], any *contiguous* sublist + of these may be used; order must be from high to low resolution. + out_channels (int): number of channels in the output feature maps. + norm (str): the normalization to use. + top_block (nn.Module or None): if provided, an extra operation will + be performed on the output of the last (smallest resolution) + FPN output, and the result will extend the result list. The top_block + further downsamples the feature map. It must have an attribute + "num_levels", meaning the number of extra FPN levels added by + this block, and "in_feature", which is a string representing + its input feature (e.g., p5). + fuse_type (str): types for fusing the top down features and the lateral + ones. It can be "sum" (default), which sums up element-wise; or "avg", + which takes the element-wise mean of the two. + """ + super(FPN, self).__init__() + assert isinstance(bottom_up, Backbone) + assert in_features, in_features + + # Feature map strides and channels from the bottom up network (e.g. ResNet) + input_shapes = bottom_up.output_shape() + strides = [input_shapes[f].stride for f in in_features] + in_channels_per_feature = [input_shapes[f].channels for f in in_features] + + _assert_strides_are_log2_contiguous(strides) + lateral_convs = [] + output_convs = [] + + use_bias = norm == "" + for idx, in_channels in enumerate(in_channels_per_feature): + lateral_norm = get_norm(norm, out_channels) + output_norm = get_norm(norm, out_channels) + + lateral_conv = Conv2d( + in_channels, out_channels, kernel_size=1, bias=use_bias, norm=lateral_norm + ) + output_conv = Conv2d( + out_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + bias=use_bias, + norm=output_norm, + ) + weight_init.c2_xavier_fill(lateral_conv) + weight_init.c2_xavier_fill(output_conv) + stage = int(math.log2(strides[idx])) + self.add_module("fpn_lateral{}".format(stage), lateral_conv) + self.add_module("fpn_output{}".format(stage), output_conv) + + lateral_convs.append(lateral_conv) + output_convs.append(output_conv) + # Place convs into top-down order (from low to high resolution) + # to make the top-down computation in forward clearer. + self.lateral_convs = lateral_convs[::-1] + self.output_convs = output_convs[::-1] + self.top_block = top_block + self.in_features = tuple(in_features) + self.bottom_up = bottom_up + # Return feature names are "p", like ["p2", "p3", ..., "p6"] + self._out_feature_strides = {"p{}".format(int(math.log2(s))): s for s in strides} + # top block output feature maps. + if self.top_block is not None: + for s in range(stage, stage + self.top_block.num_levels): + self._out_feature_strides["p{}".format(s + 1)] = 2 ** (s + 1) + + self._out_features = list(self._out_feature_strides.keys()) + self._out_feature_channels = {k: out_channels for k in self._out_features} + self._size_divisibility = strides[-1] + assert fuse_type in {"avg", "sum"} + self._fuse_type = fuse_type + + @property + def size_divisibility(self): + return self._size_divisibility + + def forward(self, x): + """ + Args: + input (dict[str->Tensor]): mapping feature map name (e.g., "res5") to + feature map tensor for each feature level in high to low resolution order. + + Returns: + dict[str->Tensor]: + mapping from feature map name to FPN feature map tensor + in high to low resolution order. Returned feature names follow the FPN + paper convention: "p", where stage has stride = 2 ** stage e.g., + ["p2", "p3", ..., "p6"]. + """ + bottom_up_features = self.bottom_up(x) + results = [] + prev_features = self.lateral_convs[0](bottom_up_features[self.in_features[-1]]) + results.append(self.output_convs[0](prev_features)) + + # Reverse feature maps into top-down order (from low to high resolution) + for idx, (lateral_conv, output_conv) in enumerate( + zip(self.lateral_convs, self.output_convs) + ): + # Slicing of ModuleList is not supported https://github.com/pytorch/pytorch/issues/47336 + # Therefore we loop over all modules but skip the first one + if idx > 0: + features = self.in_features[-idx - 1] + features = bottom_up_features[features] + top_down_features = F.interpolate(prev_features, scale_factor=2.0, mode="nearest") + lateral_features = lateral_conv(features) + prev_features = lateral_features + top_down_features + if self._fuse_type == "avg": + prev_features /= 2 + results.insert(0, output_conv(prev_features)) + + if self.top_block is not None: + if self.top_block.in_feature in bottom_up_features: + top_block_in_feature = bottom_up_features[self.top_block.in_feature] + else: + top_block_in_feature = results[self._out_features.index(self.top_block.in_feature)] + results.extend(self.top_block(top_block_in_feature)) + assert len(self._out_features) == len(results) + return {f: res for f, res in zip(self._out_features, results)} + + def output_shape(self): + return { + name: ShapeSpec( + channels=self._out_feature_channels[name], stride=self._out_feature_strides[name] + ) + for name in self._out_features + } + + +def _assert_strides_are_log2_contiguous(strides): + """ + Assert that each stride is 2x times its preceding stride, i.e. "contiguous in log2". + """ + for i, stride in enumerate(strides[1:], 1): + assert stride == 2 * strides[i - 1], "Strides {} {} are not log2 contiguous".format( + stride, strides[i - 1] + ) + + +class LastLevelMaxPool(nn.Module): + """ + This module is used in the original FPN to generate a downsampled + P6 feature from P5. + """ + + def __init__(self): + super().__init__() + self.num_levels = 1 + self.in_feature = "p5" + + def forward(self, x): + return [F.max_pool2d(x, kernel_size=1, stride=2, padding=0)] + + +class LastLevelP6P7(nn.Module): + """ + This module is used in RetinaNet to generate extra layers, P6 and P7 from + C5 feature. + """ + + def __init__(self, in_channels, out_channels, in_feature="res5"): + super().__init__() + self.num_levels = 2 + self.in_feature = in_feature + self.p6 = nn.Conv2d(in_channels, out_channels, 3, 2, 1) + self.p7 = nn.Conv2d(out_channels, out_channels, 3, 2, 1) + for module in [self.p6, self.p7]: + weight_init.c2_xavier_fill(module) + + def forward(self, c5): + p6 = self.p6(c5) + p7 = self.p7(F.relu(p6)) + return [p6, p7] + + +@BACKBONE_REGISTRY.register() +def build_resnet_fpn_backbone(cfg, input_shape: ShapeSpec): + """ + Args: + cfg: a detectron2 CfgNode + + Returns: + backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`. + """ + bottom_up = build_resnet_backbone(cfg, input_shape) + in_features = cfg.MODEL.FPN.IN_FEATURES + out_channels = cfg.MODEL.FPN.OUT_CHANNELS + backbone = FPN( + bottom_up=bottom_up, + in_features=in_features, + out_channels=out_channels, + norm=cfg.MODEL.FPN.NORM, + top_block=LastLevelMaxPool(), + fuse_type=cfg.MODEL.FPN.FUSE_TYPE, + ) + return backbone + + +@BACKBONE_REGISTRY.register() +def build_retinanet_resnet_fpn_backbone(cfg, input_shape: ShapeSpec): + """ + Args: + cfg: a detectron2 CfgNode + + Returns: + backbone (Backbone): backbone module, must be a subclass of :class:`Backbone`. + """ + bottom_up = build_resnet_backbone(cfg, input_shape) + in_features = cfg.MODEL.FPN.IN_FEATURES + out_channels = cfg.MODEL.FPN.OUT_CHANNELS + in_channels_p6p7 = bottom_up.output_shape()["res5"].channels + backbone = FPN( + bottom_up=bottom_up, + in_features=in_features, + out_channels=out_channels, + norm=cfg.MODEL.FPN.NORM, + top_block=LastLevelP6P7(in_channels_p6p7, out_channels), + fuse_type=cfg.MODEL.FPN.FUSE_TYPE, + ) + return backbone diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/regnet.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/regnet.py new file mode 100644 index 0000000..3533d63 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/regnet.py @@ -0,0 +1,452 @@ +# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved +""" +Implementation of RegNet models from :paper:`dds` and :paper:`scaling`. + +This code is adapted from https://github.com/facebookresearch/pycls with minimal modifications. +Some code duplication exists between RegNet and ResNets (e.g., ResStem) in order to simplify +model loading. +""" + +import numpy as np +from torch import nn + +from detectron2.layers import CNNBlockBase, ShapeSpec, get_norm + +from .backbone import Backbone + +__all__ = [ + "AnyNet", + "RegNet", + "ResStem", + "SimpleStem", + "VanillaBlock", + "ResBasicBlock", + "ResBottleneckBlock", +] + + +def conv2d(w_in, w_out, k, *, stride=1, groups=1, bias=False): + """Helper for building a conv2d layer.""" + assert k % 2 == 1, "Only odd size kernels supported to avoid padding issues." + s, p, g, b = stride, (k - 1) // 2, groups, bias + return nn.Conv2d(w_in, w_out, k, stride=s, padding=p, groups=g, bias=b) + + +def gap2d(): + """Helper for building a global average pooling layer.""" + return nn.AdaptiveAvgPool2d((1, 1)) + + +def pool2d(k, *, stride=1): + """Helper for building a pool2d layer.""" + assert k % 2 == 1, "Only odd size kernels supported to avoid padding issues." + return nn.MaxPool2d(k, stride=stride, padding=(k - 1) // 2) + + +def init_weights(m): + """Performs ResNet-style weight initialization.""" + if isinstance(m, nn.Conv2d): + # Note that there is no bias due to BN + fan_out = m.kernel_size[0] * m.kernel_size[1] * m.out_channels + m.weight.data.normal_(mean=0.0, std=np.sqrt(2.0 / fan_out)) + elif isinstance(m, nn.BatchNorm2d): + m.weight.data.fill_(1.0) + m.bias.data.zero_() + elif isinstance(m, nn.Linear): + m.weight.data.normal_(mean=0.0, std=0.01) + m.bias.data.zero_() + + +class ResStem(CNNBlockBase): + """ResNet stem for ImageNet: 7x7, BN, AF, MaxPool.""" + + def __init__(self, w_in, w_out, norm, activation_class): + super().__init__(w_in, w_out, 4) + self.conv = conv2d(w_in, w_out, 7, stride=2) + self.bn = get_norm(norm, w_out) + self.af = activation_class() + self.pool = pool2d(3, stride=2) + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class SimpleStem(CNNBlockBase): + """Simple stem for ImageNet: 3x3, BN, AF.""" + + def __init__(self, w_in, w_out, norm, activation_class): + super().__init__(w_in, w_out, 2) + self.conv = conv2d(w_in, w_out, 3, stride=2) + self.bn = get_norm(norm, w_out) + self.af = activation_class() + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class SE(nn.Module): + """Squeeze-and-Excitation (SE) block: AvgPool, FC, Act, FC, Sigmoid.""" + + def __init__(self, w_in, w_se, activation_class): + super().__init__() + self.avg_pool = gap2d() + self.f_ex = nn.Sequential( + conv2d(w_in, w_se, 1, bias=True), + activation_class(), + conv2d(w_se, w_in, 1, bias=True), + nn.Sigmoid(), + ) + + def forward(self, x): + return x * self.f_ex(self.avg_pool(x)) + + +class VanillaBlock(CNNBlockBase): + """Vanilla block: [3x3 conv, BN, Relu] x2.""" + + def __init__(self, w_in, w_out, stride, norm, activation_class, _params): + super().__init__(w_in, w_out, stride) + self.a = conv2d(w_in, w_out, 3, stride=stride) + self.a_bn = get_norm(norm, w_out) + self.a_af = activation_class() + self.b = conv2d(w_out, w_out, 3) + self.b_bn = get_norm(norm, w_out) + self.b_af = activation_class() + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class BasicTransform(nn.Module): + """Basic transformation: [3x3 conv, BN, Relu] x2.""" + + def __init__(self, w_in, w_out, stride, norm, activation_class, _params): + super().__init__() + self.a = conv2d(w_in, w_out, 3, stride=stride) + self.a_bn = get_norm(norm, w_out) + self.a_af = activation_class() + self.b = conv2d(w_out, w_out, 3) + self.b_bn = get_norm(norm, w_out) + self.b_bn.final_bn = True + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class ResBasicBlock(CNNBlockBase): + """Residual basic block: x + f(x), f = basic transform.""" + + def __init__(self, w_in, w_out, stride, norm, activation_class, params): + super().__init__(w_in, w_out, stride) + self.proj, self.bn = None, None + if (w_in != w_out) or (stride != 1): + self.proj = conv2d(w_in, w_out, 1, stride=stride) + self.bn = get_norm(norm, w_out) + self.f = BasicTransform(w_in, w_out, stride, norm, activation_class, params) + self.af = activation_class() + + def forward(self, x): + x_p = self.bn(self.proj(x)) if self.proj else x + return self.af(x_p + self.f(x)) + + +class BottleneckTransform(nn.Module): + """Bottleneck transformation: 1x1, 3x3 [+SE], 1x1.""" + + def __init__(self, w_in, w_out, stride, norm, activation_class, params): + super().__init__() + w_b = int(round(w_out * params["bot_mul"])) + w_se = int(round(w_in * params["se_r"])) + groups = w_b // params["group_w"] + self.a = conv2d(w_in, w_b, 1) + self.a_bn = get_norm(norm, w_b) + self.a_af = activation_class() + self.b = conv2d(w_b, w_b, 3, stride=stride, groups=groups) + self.b_bn = get_norm(norm, w_b) + self.b_af = activation_class() + self.se = SE(w_b, w_se, activation_class) if w_se else None + self.c = conv2d(w_b, w_out, 1) + self.c_bn = get_norm(norm, w_out) + self.c_bn.final_bn = True + + def forward(self, x): + for layer in self.children(): + x = layer(x) + return x + + +class ResBottleneckBlock(CNNBlockBase): + """Residual bottleneck block: x + f(x), f = bottleneck transform.""" + + def __init__(self, w_in, w_out, stride, norm, activation_class, params): + super().__init__(w_in, w_out, stride) + self.proj, self.bn = None, None + if (w_in != w_out) or (stride != 1): + self.proj = conv2d(w_in, w_out, 1, stride=stride) + self.bn = get_norm(norm, w_out) + self.f = BottleneckTransform(w_in, w_out, stride, norm, activation_class, params) + self.af = activation_class() + + def forward(self, x): + x_p = self.bn(self.proj(x)) if self.proj else x + return self.af(x_p + self.f(x)) + + +class AnyStage(nn.Module): + """AnyNet stage (sequence of blocks w/ the same output shape).""" + + def __init__(self, w_in, w_out, stride, d, block_class, norm, activation_class, params): + super().__init__() + for i in range(d): + block = block_class(w_in, w_out, stride, norm, activation_class, params) + self.add_module("b{}".format(i + 1), block) + stride, w_in = 1, w_out + + def forward(self, x): + for block in self.children(): + x = block(x) + return x + + +class AnyNet(Backbone): + """AnyNet model. See :paper:`dds`.""" + + def __init__( + self, + *, + stem_class, + stem_width, + block_class, + depths, + widths, + group_widths, + strides, + bottleneck_ratios, + se_ratio, + activation_class, + freeze_at=0, + norm="BN", + out_features=None, + ): + """ + Args: + stem_class (callable): A callable taking 4 arguments (channels in, channels out, + normalization, callable returning an activation function) that returns another + callable implementing the stem module. + stem_width (int): The number of output channels that the stem produces. + block_class (callable): A callable taking 6 arguments (channels in, channels out, + stride, normalization, callable returning an activation function, a dict of + block-specific parameters) that returns another callable implementing the repeated + block module. + depths (list[int]): Number of blocks in each stage. + widths (list[int]): For each stage, the number of output channels of each block. + group_widths (list[int]): For each stage, the number of channels per group in group + convolution, if the block uses group convolution. + strides (list[int]): The stride that each network stage applies to its input. + bottleneck_ratios (list[float]): For each stage, the ratio of the number of bottleneck + channels to the number of block input channels (or, equivalently, output channels), + if the block uses a bottleneck. + se_ratio (float): The ratio of the number of channels used inside the squeeze-excitation + (SE) module to it number of input channels, if SE the block uses SE. + activation_class (callable): A callable taking no arguments that returns another + callable implementing an activation function. + freeze_at (int): The number of stages at the beginning to freeze. + see :meth:`freeze` for detailed explanation. + norm (str or callable): normalization for all conv layers. + See :func:`layers.get_norm` for supported format. + out_features (list[str]): name of the layers whose outputs should + be returned in forward. RegNet's use "stem" and "s1", "s2", etc for the stages after + the stem. If None, will return the output of the last layer. + """ + super().__init__() + self.stem = stem_class(3, stem_width, norm, activation_class) + + current_stride = self.stem.stride + self._out_feature_strides = {"stem": current_stride} + self._out_feature_channels = {"stem": self.stem.out_channels} + self.stages_and_names = [] + prev_w = stem_width + + for i, (d, w, s, b, g) in enumerate( + zip(depths, widths, strides, bottleneck_ratios, group_widths) + ): + params = {"bot_mul": b, "group_w": g, "se_r": se_ratio} + stage = AnyStage(prev_w, w, s, d, block_class, norm, activation_class, params) + name = "s{}".format(i + 1) + self.add_module(name, stage) + self.stages_and_names.append((stage, name)) + self._out_feature_strides[name] = current_stride = int( + current_stride * np.prod([k.stride for k in stage.children()]) + ) + self._out_feature_channels[name] = list(stage.children())[-1].out_channels + prev_w = w + + self.apply(init_weights) + + if out_features is None: + out_features = [name] + self._out_features = out_features + assert len(self._out_features) + children = [x[0] for x in self.named_children()] + for out_feature in self._out_features: + assert out_feature in children, "Available children: {} does not include {}".format( + ", ".join(children), out_feature + ) + self.freeze(freeze_at) + + def forward(self, x): + """ + Args: + x: Tensor of shape (N,C,H,W). H, W must be a multiple of ``self.size_divisibility``. + + Returns: + dict[str->Tensor]: names and the corresponding features + """ + assert x.dim() == 4, f"Model takes an input of shape (N, C, H, W). Got {x.shape} instead!" + outputs = {} + x = self.stem(x) + if "stem" in self._out_features: + outputs["stem"] = x + for stage, name in self.stages_and_names: + x = stage(x) + if name in self._out_features: + outputs[name] = x + return outputs + + def output_shape(self): + return { + name: ShapeSpec( + channels=self._out_feature_channels[name], stride=self._out_feature_strides[name] + ) + for name in self._out_features + } + + def freeze(self, freeze_at=0): + """ + Freeze the first several stages of the model. Commonly used in fine-tuning. + + Layers that produce the same feature map spatial size are defined as one + "stage" by :paper:`FPN`. + + Args: + freeze_at (int): number of stages to freeze. + `1` means freezing the stem. `2` means freezing the stem and + one residual stage, etc. + + Returns: + nn.Module: this model itself + """ + if freeze_at >= 1: + self.stem.freeze() + for idx, (stage, _) in enumerate(self.stages_and_names, start=2): + if freeze_at >= idx: + for block in stage.children(): + block.freeze() + return self + + +def adjust_block_compatibility(ws, bs, gs): + """Adjusts the compatibility of widths, bottlenecks, and groups.""" + assert len(ws) == len(bs) == len(gs) + assert all(w > 0 and b > 0 and g > 0 for w, b, g in zip(ws, bs, gs)) + vs = [int(max(1, w * b)) for w, b in zip(ws, bs)] + gs = [int(min(g, v)) for g, v in zip(gs, vs)] + ms = [np.lcm(g, b) if b > 1 else g for g, b in zip(gs, bs)] + vs = [max(m, int(round(v / m) * m)) for v, m in zip(vs, ms)] + ws = [int(v / b) for v, b in zip(vs, bs)] + assert all(w * b % g == 0 for w, b, g in zip(ws, bs, gs)) + return ws, bs, gs + + +def generate_regnet_parameters(w_a, w_0, w_m, d, q=8): + """Generates per stage widths and depths from RegNet parameters.""" + assert w_a >= 0 and w_0 > 0 and w_m > 1 and w_0 % q == 0 + # Generate continuous per-block ws + ws_cont = np.arange(d) * w_a + w_0 + # Generate quantized per-block ws + ks = np.round(np.log(ws_cont / w_0) / np.log(w_m)) + ws_all = w_0 * np.power(w_m, ks) + ws_all = np.round(np.divide(ws_all, q)).astype(int) * q + # Generate per stage ws and ds (assumes ws_all are sorted) + ws, ds = np.unique(ws_all, return_counts=True) + # Compute number of actual stages and total possible stages + num_stages, total_stages = len(ws), ks.max() + 1 + # Convert numpy arrays to lists and return + ws, ds, ws_all, ws_cont = (x.tolist() for x in (ws, ds, ws_all, ws_cont)) + return ws, ds, num_stages, total_stages, ws_all, ws_cont + + +class RegNet(AnyNet): + """RegNet model. See :paper:`dds`.""" + + def __init__( + self, + *, + stem_class, + stem_width, + block_class, + depth, + w_a, + w_0, + w_m, + group_width, + stride=2, + bottleneck_ratio=1.0, + se_ratio=0.0, + activation_class=None, + freeze_at=0, + norm="BN", + out_features=None, + ): + """ + Build a RegNet from the parameterization described in :paper:`dds` Section 3.3. + + Args: + See :class:`AnyNet` for arguments that are not listed here. + depth (int): Total number of blocks in the RegNet. + w_a (float): Factor by which block width would increase prior to quantizing block widths + by stage. See :paper:`dds` Section 3.3. + w_0 (int): Initial block width. See :paper:`dds` Section 3.3. + w_m (float): Parameter controlling block width quantization. + See :paper:`dds` Section 3.3. + group_width (int): Number of channels per group in group convolution, if the block uses + group convolution. + bottleneck_ratio (float): The ratio of the number of bottleneck channels to the number + of block input channels (or, equivalently, output channels), if the block uses a + bottleneck. + stride (int): The stride that each network stage applies to its input. + """ + ws, ds = generate_regnet_parameters(w_a, w_0, w_m, depth)[0:2] + ss = [stride for _ in ws] + bs = [bottleneck_ratio for _ in ws] + gs = [group_width for _ in ws] + ws, bs, gs = adjust_block_compatibility(ws, bs, gs) + + def default_activation_class(): + return nn.ReLU(inplace=True) + + super().__init__( + stem_class=stem_class, + stem_width=stem_width, + block_class=block_class, + depths=ds, + widths=ws, + strides=ss, + group_widths=gs, + bottleneck_ratios=bs, + se_ratio=se_ratio, + activation_class=default_activation_class + if activation_class is None + else activation_class, + freeze_at=freeze_at, + norm=norm, + out_features=out_features, + ) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/resnet.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/resnet.py new file mode 100644 index 0000000..5b8e842 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/backbone/resnet.py @@ -0,0 +1,694 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import numpy as np +import fvcore.nn.weight_init as weight_init +import torch +import torch.nn.functional as F +from torch import nn + +from detectron2.layers import ( + CNNBlockBase, + Conv2d, + DeformConv, + ModulatedDeformConv, + ShapeSpec, + get_norm, +) + +from .backbone import Backbone +from .build import BACKBONE_REGISTRY + +__all__ = [ + "ResNetBlockBase", + "BasicBlock", + "BottleneckBlock", + "DeformBottleneckBlock", + "BasicStem", + "ResNet", + "make_stage", + "build_resnet_backbone", +] + + +class BasicBlock(CNNBlockBase): + """ + The basic residual block for ResNet-18 and ResNet-34 defined in :paper:`ResNet`, + with two 3x3 conv layers and a projection shortcut if needed. + """ + + def __init__(self, in_channels, out_channels, *, stride=1, norm="BN"): + """ + Args: + in_channels (int): Number of input channels. + out_channels (int): Number of output channels. + stride (int): Stride for the first conv. + norm (str or callable): normalization for all conv layers. + See :func:`layers.get_norm` for supported format. + """ + super().__init__(in_channels, out_channels, stride) + + if in_channels != out_channels: + self.shortcut = Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=stride, + bias=False, + norm=get_norm(norm, out_channels), + ) + else: + self.shortcut = None + + self.conv1 = Conv2d( + in_channels, + out_channels, + kernel_size=3, + stride=stride, + padding=1, + bias=False, + norm=get_norm(norm, out_channels), + ) + + self.conv2 = Conv2d( + out_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + bias=False, + norm=get_norm(norm, out_channels), + ) + + for layer in [self.conv1, self.conv2, self.shortcut]: + if layer is not None: # shortcut can be None + weight_init.c2_msra_fill(layer) + + def forward(self, x): + out = self.conv1(x) + out = F.relu_(out) + out = self.conv2(out) + + if self.shortcut is not None: + shortcut = self.shortcut(x) + else: + shortcut = x + + out += shortcut + out = F.relu_(out) + return out + + +class BottleneckBlock(CNNBlockBase): + """ + The standard bottleneck residual block used by ResNet-50, 101 and 152 + defined in :paper:`ResNet`. It contains 3 conv layers with kernels + 1x1, 3x3, 1x1, and a projection shortcut if needed. + """ + + def __init__( + self, + in_channels, + out_channels, + *, + bottleneck_channels, + stride=1, + num_groups=1, + norm="BN", + stride_in_1x1=False, + dilation=1, + ): + """ + Args: + bottleneck_channels (int): number of output channels for the 3x3 + "bottleneck" conv layers. + num_groups (int): number of groups for the 3x3 conv layer. + norm (str or callable): normalization for all conv layers. + See :func:`layers.get_norm` for supported format. + stride_in_1x1 (bool): when stride>1, whether to put stride in the + first 1x1 convolution or the bottleneck 3x3 convolution. + dilation (int): the dilation rate of the 3x3 conv layer. + """ + super().__init__(in_channels, out_channels, stride) + + if in_channels != out_channels: + self.shortcut = Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=stride, + bias=False, + norm=get_norm(norm, out_channels), + ) + else: + self.shortcut = None + + # The original MSRA ResNet models have stride in the first 1x1 conv + # The subsequent fb.torch.resnet and Caffe2 ResNe[X]t implementations have + # stride in the 3x3 conv + stride_1x1, stride_3x3 = (stride, 1) if stride_in_1x1 else (1, stride) + + self.conv1 = Conv2d( + in_channels, + bottleneck_channels, + kernel_size=1, + stride=stride_1x1, + bias=False, + norm=get_norm(norm, bottleneck_channels), + ) + + self.conv2 = Conv2d( + bottleneck_channels, + bottleneck_channels, + kernel_size=3, + stride=stride_3x3, + padding=1 * dilation, + bias=False, + groups=num_groups, + dilation=dilation, + norm=get_norm(norm, bottleneck_channels), + ) + + self.conv3 = Conv2d( + bottleneck_channels, + out_channels, + kernel_size=1, + bias=False, + norm=get_norm(norm, out_channels), + ) + + for layer in [self.conv1, self.conv2, self.conv3, self.shortcut]: + if layer is not None: # shortcut can be None + weight_init.c2_msra_fill(layer) + + # Zero-initialize the last normalization in each residual branch, + # so that at the beginning, the residual branch starts with zeros, + # and each residual block behaves like an identity. + # See Sec 5.1 in "Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour": + # "For BN layers, the learnable scaling coefficient γ is initialized + # to be 1, except for each residual block's last BN + # where γ is initialized to be 0." + + # nn.init.constant_(self.conv3.norm.weight, 0) + # TODO this somehow hurts performance when training GN models from scratch. + # Add it as an option when we need to use this code to train a backbone. + + def forward(self, x): + out = self.conv1(x) + out = F.relu_(out) + + out = self.conv2(out) + out = F.relu_(out) + + out = self.conv3(out) + + if self.shortcut is not None: + shortcut = self.shortcut(x) + else: + shortcut = x + + out += shortcut + out = F.relu_(out) + return out + + +class DeformBottleneckBlock(CNNBlockBase): + """ + Similar to :class:`BottleneckBlock`, but with :paper:`deformable conv ` + in the 3x3 convolution. + """ + + def __init__( + self, + in_channels, + out_channels, + *, + bottleneck_channels, + stride=1, + num_groups=1, + norm="BN", + stride_in_1x1=False, + dilation=1, + deform_modulated=False, + deform_num_groups=1, + ): + super().__init__(in_channels, out_channels, stride) + self.deform_modulated = deform_modulated + + if in_channels != out_channels: + self.shortcut = Conv2d( + in_channels, + out_channels, + kernel_size=1, + stride=stride, + bias=False, + norm=get_norm(norm, out_channels), + ) + else: + self.shortcut = None + + stride_1x1, stride_3x3 = (stride, 1) if stride_in_1x1 else (1, stride) + + self.conv1 = Conv2d( + in_channels, + bottleneck_channels, + kernel_size=1, + stride=stride_1x1, + bias=False, + norm=get_norm(norm, bottleneck_channels), + ) + + if deform_modulated: + deform_conv_op = ModulatedDeformConv + # offset channels are 2 or 3 (if with modulated) * kernel_size * kernel_size + offset_channels = 27 + else: + deform_conv_op = DeformConv + offset_channels = 18 + + self.conv2_offset = Conv2d( + bottleneck_channels, + offset_channels * deform_num_groups, + kernel_size=3, + stride=stride_3x3, + padding=1 * dilation, + dilation=dilation, + ) + self.conv2 = deform_conv_op( + bottleneck_channels, + bottleneck_channels, + kernel_size=3, + stride=stride_3x3, + padding=1 * dilation, + bias=False, + groups=num_groups, + dilation=dilation, + deformable_groups=deform_num_groups, + norm=get_norm(norm, bottleneck_channels), + ) + + self.conv3 = Conv2d( + bottleneck_channels, + out_channels, + kernel_size=1, + bias=False, + norm=get_norm(norm, out_channels), + ) + + for layer in [self.conv1, self.conv2, self.conv3, self.shortcut]: + if layer is not None: # shortcut can be None + weight_init.c2_msra_fill(layer) + + nn.init.constant_(self.conv2_offset.weight, 0) + nn.init.constant_(self.conv2_offset.bias, 0) + + def forward(self, x): + out = self.conv1(x) + out = F.relu_(out) + + if self.deform_modulated: + offset_mask = self.conv2_offset(out) + offset_x, offset_y, mask = torch.chunk(offset_mask, 3, dim=1) + offset = torch.cat((offset_x, offset_y), dim=1) + mask = mask.sigmoid() + out = self.conv2(out, offset, mask) + else: + offset = self.conv2_offset(out) + out = self.conv2(out, offset) + out = F.relu_(out) + + out = self.conv3(out) + + if self.shortcut is not None: + shortcut = self.shortcut(x) + else: + shortcut = x + + out += shortcut + out = F.relu_(out) + return out + + +class BasicStem(CNNBlockBase): + """ + The standard ResNet stem (layers before the first residual block), + with a conv, relu and max_pool. + """ + + def __init__(self, in_channels=3, out_channels=64, norm="BN"): + """ + Args: + norm (str or callable): norm after the first conv layer. + See :func:`layers.get_norm` for supported format. + """ + super().__init__(in_channels, out_channels, 4) + self.in_channels = in_channels + self.conv1 = Conv2d( + in_channels, + out_channels, + kernel_size=7, + stride=2, + padding=3, + bias=False, + norm=get_norm(norm, out_channels), + ) + weight_init.c2_msra_fill(self.conv1) + + def forward(self, x): + x = self.conv1(x) + x = F.relu_(x) + x = F.max_pool2d(x, kernel_size=3, stride=2, padding=1) + return x + + +class ResNet(Backbone): + """ + Implement :paper:`ResNet`. + """ + + def __init__(self, stem, stages, num_classes=None, out_features=None, freeze_at=0): + """ + Args: + stem (nn.Module): a stem module + stages (list[list[CNNBlockBase]]): several (typically 4) stages, + each contains multiple :class:`CNNBlockBase`. + num_classes (None or int): if None, will not perform classification. + Otherwise, will create a linear layer. + out_features (list[str]): name of the layers whose outputs should + be returned in forward. Can be anything in "stem", "linear", or "res2" ... + If None, will return the output of the last layer. + freeze_at (int): The number of stages at the beginning to freeze. + see :meth:`freeze` for detailed explanation. + """ + super().__init__() + self.stem = stem + self.num_classes = num_classes + + current_stride = self.stem.stride + self._out_feature_strides = {"stem": current_stride} + self._out_feature_channels = {"stem": self.stem.out_channels} + + self.stage_names, self.stages = [], [] + + if out_features is not None: + # Avoid keeping unused layers in this module. They consume extra memory + # and may cause allreduce to fail + num_stages = max( + [{"res2": 1, "res3": 2, "res4": 3, "res5": 4}.get(f, 0) for f in out_features] + ) + stages = stages[:num_stages] + for i, blocks in enumerate(stages): + assert len(blocks) > 0, len(blocks) + for block in blocks: + assert isinstance(block, CNNBlockBase), block + + name = "res" + str(i + 2) + stage = nn.Sequential(*blocks) + + self.add_module(name, stage) + self.stage_names.append(name) + self.stages.append(stage) + + self._out_feature_strides[name] = current_stride = int( + current_stride * np.prod([k.stride for k in blocks]) + ) + self._out_feature_channels[name] = curr_channels = blocks[-1].out_channels + self.stage_names = tuple(self.stage_names) # Make it static for scripting + + if num_classes is not None: + self.avgpool = nn.AdaptiveAvgPool2d((1, 1)) + self.linear = nn.Linear(curr_channels, num_classes) + + # Sec 5.1 in "Accurate, Large Minibatch SGD: Training ImageNet in 1 Hour": + # "The 1000-way fully-connected layer is initialized by + # drawing weights from a zero-mean Gaussian with standard deviation of 0.01." + nn.init.normal_(self.linear.weight, std=0.01) + name = "linear" + + if out_features is None: + out_features = [name] + self._out_features = out_features + assert len(self._out_features) + children = [x[0] for x in self.named_children()] + for out_feature in self._out_features: + assert out_feature in children, "Available children: {}".format(", ".join(children)) + self.freeze(freeze_at) + + def forward(self, x): + """ + Args: + x: Tensor of shape (N,C,H,W). H, W must be a multiple of ``self.size_divisibility``. + + Returns: + dict[str->Tensor]: names and the corresponding features + """ + assert x.dim() == 4, f"ResNet takes an input of shape (N, C, H, W). Got {x.shape} instead!" + outputs = {} + x = self.stem(x) + if "stem" in self._out_features: + outputs["stem"] = x + for name, stage in zip(self.stage_names, self.stages): + x = stage(x) + if name in self._out_features: + outputs[name] = x + if self.num_classes is not None: + x = self.avgpool(x) + x = torch.flatten(x, 1) + x = self.linear(x) + if "linear" in self._out_features: + outputs["linear"] = x + return outputs + + def output_shape(self): + return { + name: ShapeSpec( + channels=self._out_feature_channels[name], stride=self._out_feature_strides[name] + ) + for name in self._out_features + } + + def freeze(self, freeze_at=0): + """ + Freeze the first several stages of the ResNet. Commonly used in + fine-tuning. + + Layers that produce the same feature map spatial size are defined as one + "stage" by :paper:`FPN`. + + Args: + freeze_at (int): number of stages to freeze. + `1` means freezing the stem. `2` means freezing the stem and + one residual stage, etc. + + Returns: + nn.Module: this ResNet itself + """ + if freeze_at >= 1: + self.stem.freeze() + for idx, stage in enumerate(self.stages, start=2): + if freeze_at >= idx: + for block in stage.children(): + block.freeze() + return self + + @staticmethod + def make_stage(block_class, num_blocks, *, in_channels, out_channels, **kwargs): + """ + Create a list of blocks of the same type that forms one ResNet stage. + + Args: + block_class (type): a subclass of CNNBlockBase that's used to create all blocks in this + stage. A module of this type must not change spatial resolution of inputs unless its + stride != 1. + num_blocks (int): number of blocks in this stage + in_channels (int): input channels of the entire stage. + out_channels (int): output channels of **every block** in the stage. + kwargs: other arguments passed to the constructor of + `block_class`. If the argument name is "xx_per_block", the + argument is a list of values to be passed to each block in the + stage. Otherwise, the same argument is passed to every block + in the stage. + + Returns: + list[CNNBlockBase]: a list of block module. + + Examples: + :: + stage = ResNet.make_stage( + BottleneckBlock, 3, in_channels=16, out_channels=64, + bottleneck_channels=16, num_groups=1, + stride_per_block=[2, 1, 1], + dilations_per_block=[1, 1, 2] + ) + + Usually, layers that produce the same feature map spatial size are defined as one + "stage" (in :paper:`FPN`). Under such definition, ``stride_per_block[1:]`` should + all be 1. + """ + blocks = [] + for i in range(num_blocks): + curr_kwargs = {} + for k, v in kwargs.items(): + if k.endswith("_per_block"): + assert len(v) == num_blocks, ( + f"Argument '{k}' of make_stage should have the " + f"same length as num_blocks={num_blocks}." + ) + newk = k[: -len("_per_block")] + assert newk not in kwargs, f"Cannot call make_stage with both {k} and {newk}!" + curr_kwargs[newk] = v[i] + else: + curr_kwargs[k] = v + + blocks.append( + block_class(in_channels=in_channels, out_channels=out_channels, **curr_kwargs) + ) + in_channels = out_channels + return blocks + + @staticmethod + def make_default_stages(depth, block_class=None, **kwargs): + """ + Created list of ResNet stages from pre-defined depth (one of 18, 34, 50, 101, 152). + If it doesn't create the ResNet variant you need, please use :meth:`make_stage` + instead for fine-grained customization. + + Args: + depth (int): depth of ResNet + block_class (type): the CNN block class. Has to accept + `bottleneck_channels` argument for depth > 50. + By default it is BasicBlock or BottleneckBlock, based on the + depth. + kwargs: + other arguments to pass to `make_stage`. Should not contain + stride and channels, as they are predefined for each depth. + + Returns: + list[list[CNNBlockBase]]: modules in all stages; see arguments of + :class:`ResNet.__init__`. + """ + num_blocks_per_stage = { + 18: [2, 2, 2, 2], + 34: [3, 4, 6, 3], + 50: [3, 4, 6, 3], + 101: [3, 4, 23, 3], + 152: [3, 8, 36, 3], + }[depth] + if block_class is None: + block_class = BasicBlock if depth < 50 else BottleneckBlock + if depth < 50: + in_channels = [64, 64, 128, 256] + out_channels = [64, 128, 256, 512] + else: + in_channels = [64, 256, 512, 1024] + out_channels = [256, 512, 1024, 2048] + ret = [] + for (n, s, i, o) in zip(num_blocks_per_stage, [1, 2, 2, 2], in_channels, out_channels): + if depth >= 50: + kwargs["bottleneck_channels"] = o // 4 + ret.append( + ResNet.make_stage( + block_class=block_class, + num_blocks=n, + stride_per_block=[s] + [1] * (n - 1), + in_channels=i, + out_channels=o, + **kwargs, + ) + ) + return ret + + +ResNetBlockBase = CNNBlockBase +""" +Alias for backward compatibiltiy. +""" + + +def make_stage(*args, **kwargs): + """ + Deprecated alias for backward compatibiltiy. + """ + return ResNet.make_stage(*args, **kwargs) + + +@BACKBONE_REGISTRY.register() +def build_resnet_backbone(cfg, input_shape): + """ + Create a ResNet instance from config. + + Returns: + ResNet: a :class:`ResNet` instance. + """ + # need registration of new blocks/stems? + norm = cfg.MODEL.RESNETS.NORM + stem = BasicStem( + in_channels=input_shape.channels, + out_channels=cfg.MODEL.RESNETS.STEM_OUT_CHANNELS, + norm=norm, + ) + + # fmt: off + freeze_at = cfg.MODEL.BACKBONE.FREEZE_AT + out_features = cfg.MODEL.RESNETS.OUT_FEATURES + depth = cfg.MODEL.RESNETS.DEPTH + num_groups = cfg.MODEL.RESNETS.NUM_GROUPS + width_per_group = cfg.MODEL.RESNETS.WIDTH_PER_GROUP + bottleneck_channels = num_groups * width_per_group + in_channels = cfg.MODEL.RESNETS.STEM_OUT_CHANNELS + out_channels = cfg.MODEL.RESNETS.RES2_OUT_CHANNELS + stride_in_1x1 = cfg.MODEL.RESNETS.STRIDE_IN_1X1 + res5_dilation = cfg.MODEL.RESNETS.RES5_DILATION + deform_on_per_stage = cfg.MODEL.RESNETS.DEFORM_ON_PER_STAGE + deform_modulated = cfg.MODEL.RESNETS.DEFORM_MODULATED + deform_num_groups = cfg.MODEL.RESNETS.DEFORM_NUM_GROUPS + # fmt: on + assert res5_dilation in {1, 2}, "res5_dilation cannot be {}.".format(res5_dilation) + + num_blocks_per_stage = { + 18: [2, 2, 2, 2], + 34: [3, 4, 6, 3], + 50: [3, 4, 6, 3], + 101: [3, 4, 23, 3], + 152: [3, 8, 36, 3], + }[depth] + + if depth in [18, 34]: + assert out_channels == 64, "Must set MODEL.RESNETS.RES2_OUT_CHANNELS = 64 for R18/R34" + assert not any( + deform_on_per_stage + ), "MODEL.RESNETS.DEFORM_ON_PER_STAGE unsupported for R18/R34" + assert res5_dilation == 1, "Must set MODEL.RESNETS.RES5_DILATION = 1 for R18/R34" + assert num_groups == 1, "Must set MODEL.RESNETS.NUM_GROUPS = 1 for R18/R34" + + stages = [] + + for idx, stage_idx in enumerate(range(2, 6)): + # res5_dilation is used this way as a convention in R-FCN & Deformable Conv paper + dilation = res5_dilation if stage_idx == 5 else 1 + first_stride = 1 if idx == 0 or (stage_idx == 5 and dilation == 2) else 2 + stage_kargs = { + "num_blocks": num_blocks_per_stage[idx], + "stride_per_block": [first_stride] + [1] * (num_blocks_per_stage[idx] - 1), + "in_channels": in_channels, + "out_channels": out_channels, + "norm": norm, + } + # Use BasicBlock for R18 and R34. + if depth in [18, 34]: + stage_kargs["block_class"] = BasicBlock + else: + stage_kargs["bottleneck_channels"] = bottleneck_channels + stage_kargs["stride_in_1x1"] = stride_in_1x1 + stage_kargs["dilation"] = dilation + stage_kargs["num_groups"] = num_groups + if deform_on_per_stage[idx]: + stage_kargs["block_class"] = DeformBottleneckBlock + stage_kargs["deform_modulated"] = deform_modulated + stage_kargs["deform_num_groups"] = deform_num_groups + else: + stage_kargs["block_class"] = BottleneckBlock + blocks = ResNet.make_stage(**stage_kargs) + in_channels = out_channels + out_channels *= 2 + bottleneck_channels *= 2 + stages.append(blocks) + return ResNet(stem, stages, out_features=out_features, freeze_at=freeze_at) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/box_regression.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/box_regression.py new file mode 100644 index 0000000..b24c123 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/box_regression.py @@ -0,0 +1,369 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import math +from typing import List, Tuple, Union +import torch +from fvcore.nn import giou_loss, smooth_l1_loss +from torch.nn import functional as F + +from detectron2.layers import cat, ciou_loss, diou_loss +from detectron2.structures import Boxes + +# Value for clamping large dw and dh predictions. The heuristic is that we clamp +# such that dw and dh are no larger than what would transform a 16px box into a +# 1000px box (based on a small anchor, 16px, and a typical image size, 1000px). +_DEFAULT_SCALE_CLAMP = math.log(1000.0 / 16) + + +__all__ = ["Box2BoxTransform", "Box2BoxTransformRotated", "Box2BoxTransformLinear"] + + +@torch.jit.script +class Box2BoxTransform(object): + """ + The box-to-box transform defined in R-CNN. The transformation is parameterized + by 4 deltas: (dx, dy, dw, dh). The transformation scales the box's width and height + by exp(dw), exp(dh) and shifts a box's center by the offset (dx * width, dy * height). + """ + + def __init__( + self, weights: Tuple[float, float, float, float], scale_clamp: float = _DEFAULT_SCALE_CLAMP + ): + """ + Args: + weights (4-element tuple): Scaling factors that are applied to the + (dx, dy, dw, dh) deltas. In Fast R-CNN, these were originally set + such that the deltas have unit variance; now they are treated as + hyperparameters of the system. + scale_clamp (float): When predicting deltas, the predicted box scaling + factors (dw and dh) are clamped such that they are <= scale_clamp. + """ + self.weights = weights + self.scale_clamp = scale_clamp + + def get_deltas(self, src_boxes, target_boxes): + """ + Get box regression transformation deltas (dx, dy, dw, dh) that can be used + to transform the `src_boxes` into the `target_boxes`. That is, the relation + ``target_boxes == self.apply_deltas(deltas, src_boxes)`` is true (unless + any delta is too large and is clamped). + + Args: + src_boxes (Tensor): source boxes, e.g., object proposals + target_boxes (Tensor): target of the transformation, e.g., ground-truth + boxes. + """ + assert isinstance(src_boxes, torch.Tensor), type(src_boxes) + assert isinstance(target_boxes, torch.Tensor), type(target_boxes) + + src_widths = src_boxes[:, 2] - src_boxes[:, 0] + src_heights = src_boxes[:, 3] - src_boxes[:, 1] + src_ctr_x = src_boxes[:, 0] + 0.5 * src_widths + src_ctr_y = src_boxes[:, 1] + 0.5 * src_heights + + target_widths = target_boxes[:, 2] - target_boxes[:, 0] + target_heights = target_boxes[:, 3] - target_boxes[:, 1] + target_ctr_x = target_boxes[:, 0] + 0.5 * target_widths + target_ctr_y = target_boxes[:, 1] + 0.5 * target_heights + + wx, wy, ww, wh = self.weights + dx = wx * (target_ctr_x - src_ctr_x) / src_widths + dy = wy * (target_ctr_y - src_ctr_y) / src_heights + dw = ww * torch.log(target_widths / src_widths) + dh = wh * torch.log(target_heights / src_heights) + + deltas = torch.stack((dx, dy, dw, dh), dim=1) + assert (src_widths > 0).all().item(), "Input boxes to Box2BoxTransform are not valid!" + return deltas + + def apply_deltas(self, deltas, boxes): + """ + Apply transformation `deltas` (dx, dy, dw, dh) to `boxes`. + + Args: + deltas (Tensor): transformation deltas of shape (N, k*4), where k >= 1. + deltas[i] represents k potentially different class-specific + box transformations for the single box boxes[i]. + boxes (Tensor): boxes to transform, of shape (N, 4) + """ + deltas = deltas.float() # ensure fp32 for decoding precision + boxes = boxes.to(deltas.dtype) + + widths = boxes[:, 2] - boxes[:, 0] + heights = boxes[:, 3] - boxes[:, 1] + ctr_x = boxes[:, 0] + 0.5 * widths + ctr_y = boxes[:, 1] + 0.5 * heights + + wx, wy, ww, wh = self.weights + dx = deltas[:, 0::4] / wx + dy = deltas[:, 1::4] / wy + dw = deltas[:, 2::4] / ww + dh = deltas[:, 3::4] / wh + + # Prevent sending too large values into torch.exp() + dw = torch.clamp(dw, max=self.scale_clamp) + dh = torch.clamp(dh, max=self.scale_clamp) + + pred_ctr_x = dx * widths[:, None] + ctr_x[:, None] + pred_ctr_y = dy * heights[:, None] + ctr_y[:, None] + pred_w = torch.exp(dw) * widths[:, None] + pred_h = torch.exp(dh) * heights[:, None] + + x1 = pred_ctr_x - 0.5 * pred_w + y1 = pred_ctr_y - 0.5 * pred_h + x2 = pred_ctr_x + 0.5 * pred_w + y2 = pred_ctr_y + 0.5 * pred_h + pred_boxes = torch.stack((x1, y1, x2, y2), dim=-1) + return pred_boxes.reshape(deltas.shape) + + +@torch.jit.script +class Box2BoxTransformRotated(object): + """ + The box-to-box transform defined in Rotated R-CNN. The transformation is parameterized + by 5 deltas: (dx, dy, dw, dh, da). The transformation scales the box's width and height + by exp(dw), exp(dh), shifts a box's center by the offset (dx * width, dy * height), + and rotate a box's angle by da (radians). + Note: angles of deltas are in radians while angles of boxes are in degrees. + """ + + def __init__( + self, + weights: Tuple[float, float, float, float, float], + scale_clamp: float = _DEFAULT_SCALE_CLAMP, + ): + """ + Args: + weights (5-element tuple): Scaling factors that are applied to the + (dx, dy, dw, dh, da) deltas. These are treated as + hyperparameters of the system. + scale_clamp (float): When predicting deltas, the predicted box scaling + factors (dw and dh) are clamped such that they are <= scale_clamp. + """ + self.weights = weights + self.scale_clamp = scale_clamp + + def get_deltas(self, src_boxes, target_boxes): + """ + Get box regression transformation deltas (dx, dy, dw, dh, da) that can be used + to transform the `src_boxes` into the `target_boxes`. That is, the relation + ``target_boxes == self.apply_deltas(deltas, src_boxes)`` is true (unless + any delta is too large and is clamped). + + Args: + src_boxes (Tensor): Nx5 source boxes, e.g., object proposals + target_boxes (Tensor): Nx5 target of the transformation, e.g., ground-truth + boxes. + """ + assert isinstance(src_boxes, torch.Tensor), type(src_boxes) + assert isinstance(target_boxes, torch.Tensor), type(target_boxes) + + src_ctr_x, src_ctr_y, src_widths, src_heights, src_angles = torch.unbind(src_boxes, dim=1) + + target_ctr_x, target_ctr_y, target_widths, target_heights, target_angles = torch.unbind( + target_boxes, dim=1 + ) + + wx, wy, ww, wh, wa = self.weights + dx = wx * (target_ctr_x - src_ctr_x) / src_widths + dy = wy * (target_ctr_y - src_ctr_y) / src_heights + dw = ww * torch.log(target_widths / src_widths) + dh = wh * torch.log(target_heights / src_heights) + # Angles of deltas are in radians while angles of boxes are in degrees. + # the conversion to radians serve as a way to normalize the values + da = target_angles - src_angles + da = (da + 180.0) % 360.0 - 180.0 # make it in [-180, 180) + da *= wa * math.pi / 180.0 + + deltas = torch.stack((dx, dy, dw, dh, da), dim=1) + assert ( + (src_widths > 0).all().item() + ), "Input boxes to Box2BoxTransformRotated are not valid!" + return deltas + + def apply_deltas(self, deltas, boxes): + """ + Apply transformation `deltas` (dx, dy, dw, dh, da) to `boxes`. + + Args: + deltas (Tensor): transformation deltas of shape (N, k*5). + deltas[i] represents box transformation for the single box boxes[i]. + boxes (Tensor): boxes to transform, of shape (N, 5) + """ + assert deltas.shape[1] % 5 == 0 and boxes.shape[1] == 5 + + boxes = boxes.to(deltas.dtype).unsqueeze(2) + + ctr_x = boxes[:, 0] + ctr_y = boxes[:, 1] + widths = boxes[:, 2] + heights = boxes[:, 3] + angles = boxes[:, 4] + + wx, wy, ww, wh, wa = self.weights + + dx = deltas[:, 0::5] / wx + dy = deltas[:, 1::5] / wy + dw = deltas[:, 2::5] / ww + dh = deltas[:, 3::5] / wh + da = deltas[:, 4::5] / wa + + # Prevent sending too large values into torch.exp() + dw = torch.clamp(dw, max=self.scale_clamp) + dh = torch.clamp(dh, max=self.scale_clamp) + + pred_boxes = torch.zeros_like(deltas) + pred_boxes[:, 0::5] = dx * widths + ctr_x # x_ctr + pred_boxes[:, 1::5] = dy * heights + ctr_y # y_ctr + pred_boxes[:, 2::5] = torch.exp(dw) * widths # width + pred_boxes[:, 3::5] = torch.exp(dh) * heights # height + + # Following original RRPN implementation, + # angles of deltas are in radians while angles of boxes are in degrees. + pred_angle = da * 180.0 / math.pi + angles + pred_angle = (pred_angle + 180.0) % 360.0 - 180.0 # make it in [-180, 180) + + pred_boxes[:, 4::5] = pred_angle + + return pred_boxes + + +class Box2BoxTransformLinear(object): + """ + The linear box-to-box transform defined in FCOS. The transformation is parameterized + by the distance from the center of (square) src box to 4 edges of the target box. + """ + + def __init__(self, normalize_by_size=True): + """ + Args: + normalize_by_size: normalize deltas by the size of src (anchor) boxes. + """ + self.normalize_by_size = normalize_by_size + + def get_deltas(self, src_boxes, target_boxes): + """ + Get box regression transformation deltas (dx1, dy1, dx2, dy2) that can be used + to transform the `src_boxes` into the `target_boxes`. That is, the relation + ``target_boxes == self.apply_deltas(deltas, src_boxes)`` is true. + The center of src must be inside target boxes. + + Args: + src_boxes (Tensor): square source boxes, e.g., anchors + target_boxes (Tensor): target of the transformation, e.g., ground-truth + boxes. + """ + assert isinstance(src_boxes, torch.Tensor), type(src_boxes) + assert isinstance(target_boxes, torch.Tensor), type(target_boxes) + + src_ctr_x = 0.5 * (src_boxes[:, 0] + src_boxes[:, 2]) + src_ctr_y = 0.5 * (src_boxes[:, 1] + src_boxes[:, 3]) + + target_l = src_ctr_x - target_boxes[:, 0] + target_t = src_ctr_y - target_boxes[:, 1] + target_r = target_boxes[:, 2] - src_ctr_x + target_b = target_boxes[:, 3] - src_ctr_y + + deltas = torch.stack((target_l, target_t, target_r, target_b), dim=1) + if self.normalize_by_size: + stride_w = src_boxes[:, 2] - src_boxes[:, 0] + stride_h = src_boxes[:, 3] - src_boxes[:, 1] + strides = torch.stack([stride_w, stride_h, stride_w, stride_h], axis=1) + deltas = deltas / strides + + return deltas + + def apply_deltas(self, deltas, boxes): + """ + Apply transformation `deltas` (dx1, dy1, dx2, dy2) to `boxes`. + + Args: + deltas (Tensor): transformation deltas of shape (N, k*4), where k >= 1. + deltas[i] represents k potentially different class-specific + box transformations for the single box boxes[i]. + boxes (Tensor): boxes to transform, of shape (N, 4) + """ + # Ensure the output is a valid box. See Sec 2.1 of https://arxiv.org/abs/2006.09214 + deltas = F.relu(deltas) + boxes = boxes.to(deltas.dtype) + + ctr_x = 0.5 * (boxes[:, 0] + boxes[:, 2]) + ctr_y = 0.5 * (boxes[:, 1] + boxes[:, 3]) + if self.normalize_by_size: + stride_w = boxes[:, 2] - boxes[:, 0] + stride_h = boxes[:, 3] - boxes[:, 1] + strides = torch.stack([stride_w, stride_h, stride_w, stride_h], axis=1) + deltas = deltas * strides + + l = deltas[:, 0::4] + t = deltas[:, 1::4] + r = deltas[:, 2::4] + b = deltas[:, 3::4] + + pred_boxes = torch.zeros_like(deltas) + pred_boxes[:, 0::4] = ctr_x[:, None] - l # x1 + pred_boxes[:, 1::4] = ctr_y[:, None] - t # y1 + pred_boxes[:, 2::4] = ctr_x[:, None] + r # x2 + pred_boxes[:, 3::4] = ctr_y[:, None] + b # y2 + return pred_boxes + + +def _dense_box_regression_loss( + anchors: List[Union[Boxes, torch.Tensor]], + box2box_transform: Box2BoxTransform, + pred_anchor_deltas: List[torch.Tensor], + gt_boxes: List[torch.Tensor], + fg_mask: torch.Tensor, + box_reg_loss_type="smooth_l1", + smooth_l1_beta=0.0, +): + """ + Compute loss for dense multi-level box regression. + Loss is accumulated over ``fg_mask``. + + Args: + anchors: #lvl anchor boxes, each is (HixWixA, 4) + pred_anchor_deltas: #lvl predictions, each is (N, HixWixA, 4) + gt_boxes: N ground truth boxes, each has shape (R, 4) (R = sum(Hi * Wi * A)) + fg_mask: the foreground boolean mask of shape (N, R) to compute loss on + box_reg_loss_type (str): Loss type to use. Supported losses: "smooth_l1", "giou", + "diou", "ciou". + smooth_l1_beta (float): beta parameter for the smooth L1 regression loss. Default to + use L1 loss. Only used when `box_reg_loss_type` is "smooth_l1" + """ + if isinstance(anchors[0], Boxes): + anchors = type(anchors[0]).cat(anchors).tensor # (R, 4) + else: + anchors = cat(anchors) + if box_reg_loss_type == "smooth_l1": + gt_anchor_deltas = [box2box_transform.get_deltas(anchors, k) for k in gt_boxes] + gt_anchor_deltas = torch.stack(gt_anchor_deltas) # (N, R, 4) + loss_box_reg = smooth_l1_loss( + cat(pred_anchor_deltas, dim=1)[fg_mask], + gt_anchor_deltas[fg_mask], + beta=smooth_l1_beta, + reduction="sum", + ) + elif box_reg_loss_type == "giou": + pred_boxes = [ + box2box_transform.apply_deltas(k, anchors) for k in cat(pred_anchor_deltas, dim=1) + ] + loss_box_reg = giou_loss( + torch.stack(pred_boxes)[fg_mask], torch.stack(gt_boxes)[fg_mask], reduction="sum" + ) + elif box_reg_loss_type == "diou": + pred_boxes = [ + box2box_transform.apply_deltas(k, anchors) for k in cat(pred_anchor_deltas, dim=1) + ] + loss_box_reg = diou_loss( + torch.stack(pred_boxes)[fg_mask], torch.stack(gt_boxes)[fg_mask], reduction="sum" + ) + elif box_reg_loss_type == "ciou": + pred_boxes = [ + box2box_transform.apply_deltas(k, anchors) for k in cat(pred_anchor_deltas, dim=1) + ] + loss_box_reg = ciou_loss( + torch.stack(pred_boxes)[fg_mask], torch.stack(gt_boxes)[fg_mask], reduction="sum" + ) + else: + raise ValueError(f"Invalid dense box regression loss type '{box_reg_loss_type}'") + return loss_box_reg diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/matcher.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/matcher.py new file mode 100644 index 0000000..c7597ca --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/matcher.py @@ -0,0 +1,127 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from typing import List +import torch + +from detectron2.layers import nonzero_tuple + + +# TODO: the name is too general +class Matcher(object): + """ + This class assigns to each predicted "element" (e.g., a box) a ground-truth + element. Each predicted element will have exactly zero or one matches; each + ground-truth element may be matched to zero or more predicted elements. + + The matching is determined by the MxN match_quality_matrix, that characterizes + how well each (ground-truth, prediction)-pair match each other. For example, + if the elements are boxes, this matrix may contain box intersection-over-union + overlap values. + + The matcher returns (a) a vector of length N containing the index of the + ground-truth element m in [0, M) that matches to prediction n in [0, N). + (b) a vector of length N containing the labels for each prediction. + """ + + def __init__( + self, thresholds: List[float], labels: List[int], allow_low_quality_matches: bool = False + ): + """ + Args: + thresholds (list): a list of thresholds used to stratify predictions + into levels. + labels (list): a list of values to label predictions belonging at + each level. A label can be one of {-1, 0, 1} signifying + {ignore, negative class, positive class}, respectively. + allow_low_quality_matches (bool): if True, produce additional matches + for predictions with maximum match quality lower than high_threshold. + See set_low_quality_matches_ for more details. + + For example, + thresholds = [0.3, 0.5] + labels = [0, -1, 1] + All predictions with iou < 0.3 will be marked with 0 and + thus will be considered as false positives while training. + All predictions with 0.3 <= iou < 0.5 will be marked with -1 and + thus will be ignored. + All predictions with 0.5 <= iou will be marked with 1 and + thus will be considered as true positives. + """ + # Add -inf and +inf to first and last position in thresholds + thresholds = thresholds[:] + assert thresholds[0] > 0 + thresholds.insert(0, -float("inf")) + thresholds.append(float("inf")) + # Currently torchscript does not support all + generator + assert all([low <= high for (low, high) in zip(thresholds[:-1], thresholds[1:])]) + assert all([l in [-1, 0, 1] for l in labels]) + assert len(labels) == len(thresholds) - 1 + self.thresholds = thresholds + self.labels = labels + self.allow_low_quality_matches = allow_low_quality_matches + + def __call__(self, match_quality_matrix): + """ + Args: + match_quality_matrix (Tensor[float]): an MxN tensor, containing the + pairwise quality between M ground-truth elements and N predicted + elements. All elements must be >= 0 (due to the us of `torch.nonzero` + for selecting indices in :meth:`set_low_quality_matches_`). + + Returns: + matches (Tensor[int64]): a vector of length N, where matches[i] is a matched + ground-truth index in [0, M) + match_labels (Tensor[int8]): a vector of length N, where pred_labels[i] indicates + whether a prediction is a true or false positive or ignored + """ + assert match_quality_matrix.dim() == 2 + if match_quality_matrix.numel() == 0: + default_matches = match_quality_matrix.new_full( + (match_quality_matrix.size(1),), 0, dtype=torch.int64 + ) + # When no gt boxes exist, we define IOU = 0 and therefore set labels + # to `self.labels[0]`, which usually defaults to background class 0 + # To choose to ignore instead, can make labels=[-1,0,-1,1] + set appropriate thresholds + default_match_labels = match_quality_matrix.new_full( + (match_quality_matrix.size(1),), self.labels[0], dtype=torch.int8 + ) + return default_matches, default_match_labels + + assert torch.all(match_quality_matrix >= 0) + + # match_quality_matrix is M (gt) x N (predicted) + # Max over gt elements (dim 0) to find best gt candidate for each prediction + matched_vals, matches = match_quality_matrix.max(dim=0) + + match_labels = matches.new_full(matches.size(), 1, dtype=torch.int8) + + for (l, low, high) in zip(self.labels, self.thresholds[:-1], self.thresholds[1:]): + low_high = (matched_vals >= low) & (matched_vals < high) + match_labels[low_high] = l + + if self.allow_low_quality_matches: + self.set_low_quality_matches_(match_labels, match_quality_matrix) + + return matches, match_labels + + def set_low_quality_matches_(self, match_labels, match_quality_matrix): + """ + Produce additional matches for predictions that have only low-quality matches. + Specifically, for each ground-truth G find the set of predictions that have + maximum overlap with it (including ties); for each prediction in that set, if + it is unmatched, then match it to the ground-truth G. + + This function implements the RPN assignment case (i) in Sec. 3.1.2 of + :paper:`Faster R-CNN`. + """ + # For each gt, find the prediction with which it has highest quality + highest_quality_foreach_gt, _ = match_quality_matrix.max(dim=1) + # Find the highest quality match available, even if it is low, including ties. + # Note that the matches qualities must be positive due to the use of + # `torch.nonzero`. + _, pred_inds_with_highest_quality = nonzero_tuple( + match_quality_matrix == highest_quality_foreach_gt[:, None] + ) + # If an anchor was labeled positive only due to a low-quality match + # with gt_A, but it has larger overlap with gt_B, it's matched index will still be gt_B. + # This follows the implementation in Detectron, and is found to have no significant impact. + match_labels[pred_inds_with_highest_quality] = 1 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/__init__.py new file mode 100644 index 0000000..6b06681 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/__init__.py @@ -0,0 +1,16 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +from .build import META_ARCH_REGISTRY, build_model # isort:skip + +from .panoptic_fpn import PanopticFPN + +# import all the meta_arch, so they will be registered +from .rcnn import GeneralizedRCNN, ProposalNetwork +from .dense_detector import DenseDetector +from .retinanet import RetinaNet +from .fcos import FCOS +from .semantic_seg import SEM_SEG_HEADS_REGISTRY, SemanticSegmentor, build_sem_seg_head + + +__all__ = list(globals().keys()) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/build.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/build.py new file mode 100644 index 0000000..3427215 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/build.py @@ -0,0 +1,25 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import torch + +from detectron2.utils.logger import _log_api_usage +from detectron2.utils.registry import Registry + +META_ARCH_REGISTRY = Registry("META_ARCH") # noqa F401 isort:skip +META_ARCH_REGISTRY.__doc__ = """ +Registry for meta-architectures, i.e. the whole model. + +The registered object will be called with `obj(cfg)` +and expected to return a `nn.Module` object. +""" + + +def build_model(cfg): + """ + Build the whole model architecture, defined by ``cfg.MODEL.META_ARCHITECTURE``. + Note that it does not load any weights from ``cfg``. + """ + meta_arch = cfg.MODEL.META_ARCHITECTURE + model = META_ARCH_REGISTRY.get(meta_arch)(cfg) + model.to(torch.device(cfg.MODEL.DEVICE)) + _log_api_usage("modeling.meta_arch." + meta_arch) + return model diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/dense_detector.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/dense_detector.py new file mode 100644 index 0000000..382eab9 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/dense_detector.py @@ -0,0 +1,282 @@ +import numpy as np +from typing import Dict, List, Optional, Tuple +import torch +from torch import Tensor, nn + +from detectron2.data.detection_utils import convert_image_to_rgb +from detectron2.modeling import Backbone +from detectron2.structures import Boxes, ImageList, Instances +from detectron2.utils.events import get_event_storage + +from ..postprocessing import detector_postprocess + + +def permute_to_N_HWA_K(tensor, K: int): + """ + Transpose/reshape a tensor from (N, (Ai x K), H, W) to (N, (HxWxAi), K) + """ + assert tensor.dim() == 4, tensor.shape + N, _, H, W = tensor.shape + tensor = tensor.view(N, -1, K, H, W) + tensor = tensor.permute(0, 3, 4, 1, 2) + tensor = tensor.reshape(N, -1, K) # Size=(N,HWA,K) + return tensor + + +class DenseDetector(nn.Module): + """ + Base class for dense detector. We define a dense detector as a fully-convolutional model that + makes per-pixel (i.e. dense) predictions. + """ + + def __init__( + self, + backbone: Backbone, + head: nn.Module, + head_in_features: Optional[List[str]] = None, + *, + pixel_mean, + pixel_std, + ): + """ + Args: + backbone: backbone module + head: head module + head_in_features: backbone features to use in head. Default to all backbone features. + pixel_mean (Tuple[float]): + Values to be used for image normalization (BGR order). + To train on images of different number of channels, set different mean & std. + Default values are the mean pixel value from ImageNet: [103.53, 116.28, 123.675] + pixel_std (Tuple[float]): + When using pre-trained models in Detectron1 or any MSRA models, + std has been absorbed into its conv1 weights, so the std needs to be set 1. + Otherwise, you can use [57.375, 57.120, 58.395] (ImageNet std) + """ + super().__init__() + + self.backbone = backbone + self.head = head + if head_in_features is None: + shapes = self.backbone.output_shape() + self.head_in_features = sorted(shapes.keys(), key=lambda x: shapes[x].stride) + else: + self.head_in_features = head_in_features + + self.register_buffer("pixel_mean", torch.tensor(pixel_mean).view(-1, 1, 1), False) + self.register_buffer("pixel_std", torch.tensor(pixel_std).view(-1, 1, 1), False) + + @property + def device(self): + return self.pixel_mean.device + + def forward(self, batched_inputs: List[Dict[str, Tensor]]): + """ + Args: + batched_inputs: a list, batched outputs of :class:`DatasetMapper` . + Each item in the list contains the inputs for one image. + For now, each item in the list is a dict that contains: + + * image: Tensor, image in (C, H, W) format. + * instances: Instances + + Other information that's included in the original dicts, such as: + + * "height", "width" (int): the output resolution of the model, used in inference. + See :meth:`postprocess` for details. + + Returns: + In training, dict[str, Tensor]: mapping from a named loss to a tensor storing the + loss. Used during training only. In inference, the standard output format, described + in :doc:`/tutorials/models`. + """ + images = self.preprocess_image(batched_inputs) + features = self.backbone(images.tensor) + features = [features[f] for f in self.head_in_features] + predictions = self.head(features) + + if self.training: + assert not torch.jit.is_scripting(), "Not supported" + assert "instances" in batched_inputs[0], "Instance annotations are missing in training!" + gt_instances = [x["instances"].to(self.device) for x in batched_inputs] + return self.forward_training(images, features, predictions, gt_instances) + else: + results = self.forward_inference(images, features, predictions) + if torch.jit.is_scripting(): + return results + + processed_results = [] + for results_per_image, input_per_image, image_size in zip( + results, batched_inputs, images.image_sizes + ): + height = input_per_image.get("height", image_size[0]) + width = input_per_image.get("width", image_size[1]) + r = detector_postprocess(results_per_image, height, width) + processed_results.append({"instances": r}) + return processed_results + + def forward_training(self, images, features, predictions, gt_instances): + raise NotImplementedError() + + def preprocess_image(self, batched_inputs: List[Dict[str, Tensor]]): + """ + Normalize, pad and batch the input images. + """ + images = [x["image"].to(self.device) for x in batched_inputs] + images = [(x - self.pixel_mean) / self.pixel_std for x in images] + images = ImageList.from_tensors(images, self.backbone.size_divisibility) + return images + + def _transpose_dense_predictions( + self, predictions: List[List[Tensor]], dims_per_anchor: List[int] + ) -> List[List[Tensor]]: + """ + Transpose the dense per-level predictions. + + Args: + predictions: a list of outputs, each is a list of per-level + predictions with shape (N, Ai x K, Hi, Wi), where N is the + number of images, Ai is the number of anchors per location on + level i, K is the dimension of predictions per anchor. + dims_per_anchor: the value of K for each predictions. e.g. 4 for + box prediction, #classes for classification prediction. + + Returns: + List[List[Tensor]]: each prediction is transposed to (N, Hi x Wi x Ai, K). + """ + assert len(predictions) == len(dims_per_anchor) + res: List[List[Tensor]] = [] + for pred, dim_per_anchor in zip(predictions, dims_per_anchor): + pred = [permute_to_N_HWA_K(x, dim_per_anchor) for x in pred] + res.append(pred) + return res + + def _ema_update(self, name: str, value: float, initial_value: float, momentum: float = 0.9): + """ + Apply EMA update to `self.name` using `value`. + + This is mainly used for loss normalizer. In Detectron1, loss is normalized by number + of foreground samples in the batch. When batch size is 1 per GPU, #foreground has a + large variance and using it lead to lower performance. Therefore we maintain an EMA of + #foreground to stabilize the normalizer. + + Args: + name: name of the normalizer + value: the new value to update + initial_value: the initial value to start with + momentum: momentum of EMA + + Returns: + float: the updated EMA value + """ + if hasattr(self, name): + old = getattr(self, name) + else: + old = initial_value + new = old * momentum + value * (1 - momentum) + setattr(self, name, new) + return new + + def _decode_per_level_predictions( + self, + anchors: Boxes, + pred_scores: Tensor, + pred_deltas: Tensor, + score_thresh: float, + topk_candidates: int, + image_size: Tuple[int, int], + ) -> Instances: + """ + Decode boxes and classification predictions of one featuer level, by + the following steps: + 1. filter the predictions based on score threshold and top K scores. + 2. transform the box regression outputs + 3. return the predicted scores, classes and boxes + + Args: + anchors: Boxes, anchor for this feature level + pred_scores: HxWxA,K + pred_deltas: HxWxA,4 + + Returns: + Instances: with field "scores", "pred_boxes", "pred_classes". + """ + # Apply two filtering to make NMS faster. + # 1. Keep boxes with confidence score higher than threshold + keep_idxs = pred_scores > score_thresh + pred_scores = pred_scores[keep_idxs] + topk_idxs = torch.nonzero(keep_idxs) # Kx2 + + # 2. Keep top k top scoring boxes only + num_topk = min(topk_candidates, topk_idxs.size(0)) + pred_scores, idxs = pred_scores.topk(num_topk) + topk_idxs = topk_idxs[idxs] + + anchor_idxs, classes_idxs = topk_idxs.unbind(dim=1) + + pred_boxes = self.box2box_transform.apply_deltas( + pred_deltas[anchor_idxs], anchors.tensor[anchor_idxs] + ) + return Instances( + image_size, pred_boxes=Boxes(pred_boxes), scores=pred_scores, pred_classes=classes_idxs + ) + + def _decode_multi_level_predictions( + self, + anchors: List[Boxes], + pred_scores: List[Tensor], + pred_deltas: List[Tensor], + score_thresh: float, + topk_candidates: int, + image_size: Tuple[int, int], + ) -> Instances: + """ + Run `_decode_per_level_predictions` for all feature levels and concat the results. + """ + predictions = [ + self._decode_per_level_predictions( + anchors_i, + box_cls_i, + box_reg_i, + self.test_score_thresh, + self.test_topk_candidates, + image_size, + ) + # Iterate over every feature level + for box_cls_i, box_reg_i, anchors_i in zip(pred_scores, pred_deltas, anchors) + ] + return predictions[0].cat(predictions) # 'Instances.cat' is not scriptale but this is + + def visualize_training(self, batched_inputs, results): + """ + A function used to visualize ground truth images and final network predictions. + It shows ground truth bounding boxes on the original image and up to 20 + predicted object bounding boxes on the original image. + + Args: + batched_inputs (list): a list that contains input to the model. + results (List[Instances]): a list of #images elements returned by forward_inference(). + """ + from detectron2.utils.visualizer import Visualizer + + assert len(batched_inputs) == len( + results + ), "Cannot visualize inputs and results of different sizes" + storage = get_event_storage() + max_boxes = 20 + + image_index = 0 # only visualize a single image + img = batched_inputs[image_index]["image"] + img = convert_image_to_rgb(img.permute(1, 2, 0), self.input_format) + v_gt = Visualizer(img, None) + v_gt = v_gt.overlay_instances(boxes=batched_inputs[image_index]["instances"].gt_boxes) + anno_img = v_gt.get_image() + processed_results = detector_postprocess(results[image_index], img.shape[0], img.shape[1]) + predicted_boxes = processed_results.pred_boxes.tensor.detach().cpu().numpy() + + v_pred = Visualizer(img, None) + v_pred = v_pred.overlay_instances(boxes=predicted_boxes[0:max_boxes]) + prop_img = v_pred.get_image() + vis_img = np.vstack((anno_img, prop_img)) + vis_img = vis_img.transpose(2, 0, 1) + vis_name = f"Top: GT bounding boxes; Bottom: {max_boxes} Highest Scoring Results" + storage.put_image(vis_name, vis_img) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/fcos.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/fcos.py new file mode 100644 index 0000000..55cdb76 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/fcos.py @@ -0,0 +1,303 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import logging +from typing import List, Optional, Tuple +import torch +from fvcore.nn import sigmoid_focal_loss_jit +from torch import Tensor, nn +from torch.nn import functional as F + +from detectron2.layers import ShapeSpec, batched_nms +from detectron2.structures import Boxes, ImageList, Instances, pairwise_point_box_distance +from detectron2.utils.events import get_event_storage + +from ..anchor_generator import DefaultAnchorGenerator +from ..backbone import Backbone +from ..box_regression import Box2BoxTransformLinear, _dense_box_regression_loss +from .dense_detector import DenseDetector +from .retinanet import RetinaNetHead + +__all__ = ["FCOS"] + + +logger = logging.getLogger(__name__) + + +class FCOS(DenseDetector): + """ + Implement FCOS in :paper:`fcos`. + """ + + def __init__( + self, + *, + backbone: Backbone, + head: nn.Module, + head_in_features: Optional[List[str]] = None, + box2box_transform=None, + num_classes, + center_sampling_radius: float = 1.5, + focal_loss_alpha=0.25, + focal_loss_gamma=2.0, + test_score_thresh=0.2, + test_topk_candidates=1000, + test_nms_thresh=0.6, + max_detections_per_image=100, + pixel_mean, + pixel_std, + ): + """ + Args: + center_sampling_radius: radius of the "center" of a groundtruth box, + within which all anchor points are labeled positive. + Other arguments mean the same as in :class:`RetinaNet`. + """ + super().__init__( + backbone, head, head_in_features, pixel_mean=pixel_mean, pixel_std=pixel_std + ) + + self.num_classes = num_classes + + # FCOS uses one anchor point per location. + # We represent the anchor point by a box whose size equals the anchor stride. + feature_shapes = backbone.output_shape() + fpn_strides = [feature_shapes[k].stride for k in self.head_in_features] + self.anchor_generator = DefaultAnchorGenerator( + sizes=[[k] for k in fpn_strides], aspect_ratios=[1.0], strides=fpn_strides + ) + + # FCOS parameterizes box regression by a linear transform, + # where predictions are normalized by anchor stride (equal to anchor size). + if box2box_transform is None: + box2box_transform = Box2BoxTransformLinear(normalize_by_size=True) + self.box2box_transform = box2box_transform + + self.center_sampling_radius = float(center_sampling_radius) + + # Loss parameters: + self.focal_loss_alpha = focal_loss_alpha + self.focal_loss_gamma = focal_loss_gamma + + # Inference parameters: + self.test_score_thresh = test_score_thresh + self.test_topk_candidates = test_topk_candidates + self.test_nms_thresh = test_nms_thresh + self.max_detections_per_image = max_detections_per_image + + def forward_training(self, images, features, predictions, gt_instances): + # Transpose the Hi*Wi*A dimension to the middle: + pred_logits, pred_anchor_deltas, pred_centerness = self._transpose_dense_predictions( + predictions, [self.num_classes, 4, 1] + ) + anchors = self.anchor_generator(features) + gt_labels, gt_boxes = self.label_anchors(anchors, gt_instances) + return self.losses( + anchors, pred_logits, gt_labels, pred_anchor_deltas, gt_boxes, pred_centerness + ) + + @torch.no_grad() + def match_anchors(self, anchors: List[Boxes], gt_instances: List[Instances]): + """ + Match anchors with ground truth boxes. + + Args: + anchors: #level boxes, from the highest resolution to lower resolution + gt_instances: ground truth instances per image + + Returns: + List[Tensor]: + #image tensors, each is a vector of matched gt + indices (or -1 for unmatched anchors) for all anchors. + """ + num_anchors_per_level = [len(x) for x in anchors] + anchors = Boxes.cat(anchors) # Rx4 + anchor_centers = anchors.get_centers() # Rx2 + anchor_sizes = anchors.tensor[:, 2] - anchors.tensor[:, 0] # R + + lower_bound = anchor_sizes * 4 + lower_bound[: num_anchors_per_level[0]] = 0 + upper_bound = anchor_sizes * 8 + upper_bound[-num_anchors_per_level[-1] :] = float("inf") + + matched_indices = [] + for gt_per_image in gt_instances: + gt_centers = gt_per_image.gt_boxes.get_centers() # Nx2 + # FCOS with center sampling: anchor point must be close enough to gt center. + pairwise_match = (anchor_centers[:, None, :] - gt_centers[None, :, :]).abs_().max( + dim=2 + ).values < self.center_sampling_radius * anchor_sizes[:, None] + pairwise_dist = pairwise_point_box_distance(anchor_centers, gt_per_image.gt_boxes) + + # The original FCOS anchor matching rule: anchor point must be inside gt + pairwise_match &= pairwise_dist.min(dim=2).values > 0 + + # Multilevel anchor matching in FCOS: each anchor is only responsible + # for certain scale range. + pairwise_dist = pairwise_dist.max(dim=2).values + pairwise_match &= (pairwise_dist > lower_bound[:, None]) & ( + pairwise_dist < upper_bound[:, None] + ) + + # Match the GT box with minimum area, if there are multiple GT matches + gt_areas = gt_per_image.gt_boxes.area() # N + pairwise_match = pairwise_match.to(torch.float32) * (1e8 - gt_areas[None, :]) + min_values, matched_idx = pairwise_match.max(dim=1) # R, per-anchor match + matched_idx[min_values < 1e-5] = -1 # Unmatched anchors are assigned -1 + + matched_indices.append(matched_idx) + return matched_indices + + @torch.no_grad() + def label_anchors(self, anchors, gt_instances): + """ + Same interface as :meth:`RetinaNet.label_anchors`, but implemented with FCOS + anchor matching rule. + + Unlike RetinaNet, there are no ignored anchors. + """ + matched_indices = self.match_anchors(anchors, gt_instances) + + matched_labels, matched_boxes = [], [] + for gt_index, gt_per_image in zip(matched_indices, gt_instances): + label = gt_per_image.gt_classes[gt_index.clip(min=0)] + label[gt_index < 0] = self.num_classes # background + + matched_gt_boxes = gt_per_image.gt_boxes[gt_index.clip(min=0)] + + matched_labels.append(label) + matched_boxes.append(matched_gt_boxes) + return matched_labels, matched_boxes + + def losses( + self, anchors, pred_logits, gt_labels, pred_anchor_deltas, gt_boxes, pred_centerness + ): + """ + This method is almost identical to :meth:`RetinaNet.losses`, with an extra + "loss_centerness" in the returned dict. + """ + num_images = len(gt_labels) + gt_labels = torch.stack(gt_labels) # (N, R) + + pos_mask = (gt_labels >= 0) & (gt_labels != self.num_classes) + num_pos_anchors = pos_mask.sum().item() + get_event_storage().put_scalar("num_pos_anchors", num_pos_anchors / num_images) + normalizer = self._ema_update("loss_normalizer", max(num_pos_anchors, 1), 300) + + # classification and regression loss + gt_labels_target = F.one_hot(gt_labels, num_classes=self.num_classes + 1)[ + :, :, :-1 + ] # no loss for the last (background) class + loss_cls = sigmoid_focal_loss_jit( + torch.cat(pred_logits, dim=1), + gt_labels_target.to(pred_logits[0].dtype), + alpha=self.focal_loss_alpha, + gamma=self.focal_loss_gamma, + reduction="sum", + ) + + loss_box_reg = _dense_box_regression_loss( + anchors, + self.box2box_transform, + pred_anchor_deltas, + [x.tensor for x in gt_boxes], + pos_mask, + box_reg_loss_type="giou", + ) + + ctrness_targets = self.compute_ctrness_targets(anchors, gt_boxes) # NxR + pred_centerness = torch.cat(pred_centerness, dim=1).squeeze(dim=2) # NxR + ctrness_loss = F.binary_cross_entropy_with_logits( + pred_centerness[pos_mask], ctrness_targets[pos_mask], reduction="sum" + ) + return { + "loss_fcos_cls": loss_cls / normalizer, + "loss_fcos_loc": loss_box_reg / normalizer, + "loss_fcos_ctr": ctrness_loss / normalizer, + } + + def compute_ctrness_targets(self, anchors, gt_boxes): # NxR + anchors = Boxes.cat(anchors).tensor # Rx4 + reg_targets = [self.box2box_transform.get_deltas(anchors, m.tensor) for m in gt_boxes] + reg_targets = torch.stack(reg_targets, dim=0) # NxRx4 + if len(reg_targets) == 0: + return reg_targets.new_zeros(len(reg_targets)) + left_right = reg_targets[:, :, [0, 2]] + top_bottom = reg_targets[:, :, [1, 3]] + ctrness = (left_right.min(dim=-1)[0] / left_right.max(dim=-1)[0]) * ( + top_bottom.min(dim=-1)[0] / top_bottom.max(dim=-1)[0] + ) + return torch.sqrt(ctrness) + + def forward_inference( + self, images: ImageList, features: List[Tensor], predictions: List[List[Tensor]] + ): + pred_logits, pred_anchor_deltas, pred_centerness = self._transpose_dense_predictions( + predictions, [self.num_classes, 4, 1] + ) + anchors = self.anchor_generator(features) + + results: List[Instances] = [] + for img_idx, image_size in enumerate(images.image_sizes): + scores_per_image = [ + # Multiply and sqrt centerness & classification scores + # (See eqn. 4 in https://arxiv.org/abs/2006.09214) + torch.sqrt(x[img_idx].sigmoid_() * y[img_idx].sigmoid_()) + for x, y in zip(pred_logits, pred_centerness) + ] + deltas_per_image = [x[img_idx] for x in pred_anchor_deltas] + results_per_image = self.inference_single_image( + anchors, scores_per_image, deltas_per_image, image_size + ) + results.append(results_per_image) + return results + + def inference_single_image( + self, + anchors: List[Boxes], + box_cls: List[Tensor], + box_delta: List[Tensor], + image_size: Tuple[int, int], + ): + """ + Identical to :meth:`RetinaNet.inference_single_image. + """ + pred = self._decode_multi_level_predictions( + anchors, + box_cls, + box_delta, + self.test_score_thresh, + self.test_topk_candidates, + image_size, + ) + keep = batched_nms( + pred.pred_boxes.tensor, pred.scores, pred.pred_classes, self.test_nms_thresh + ) + return pred[keep[: self.max_detections_per_image]] + + +class FCOSHead(RetinaNetHead): + """ + The head used in :paper:`fcos`. It adds an additional centerness + prediction branch on top of :class:`RetinaNetHead`. + """ + + def __init__(self, *, input_shape: List[ShapeSpec], conv_dims: List[int], **kwargs): + super().__init__(input_shape=input_shape, conv_dims=conv_dims, num_anchors=1, **kwargs) + # Unlike original FCOS, we do not add an additional learnable scale layer + # because it's found to have no benefits after normalizing regression targets by stride. + self._num_features = len(input_shape) + self.ctrness = nn.Conv2d(conv_dims[-1], 1, kernel_size=3, stride=1, padding=1) + torch.nn.init.normal_(self.ctrness.weight, std=0.01) + torch.nn.init.constant_(self.ctrness.bias, 0) + + def forward(self, features): + assert len(features) == self._num_features + logits = [] + bbox_reg = [] + ctrness = [] + for feature in features: + logits.append(self.cls_score(self.cls_subnet(feature))) + bbox_feature = self.bbox_subnet(feature) + bbox_reg.append(self.bbox_pred(bbox_feature)) + ctrness.append(self.ctrness(bbox_feature)) + return logits, bbox_reg, ctrness diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/panoptic_fpn.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/panoptic_fpn.py new file mode 100644 index 0000000..13aeabc --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/panoptic_fpn.py @@ -0,0 +1,266 @@ +# -*- coding: utf-8 -*- +# Copyright (c) Facebook, Inc. and its affiliates. + +import logging +from typing import Dict, List +import torch +from torch import nn + +from detectron2.config import configurable +from detectron2.structures import ImageList + +from ..postprocessing import detector_postprocess, sem_seg_postprocess +from .build import META_ARCH_REGISTRY +from .rcnn import GeneralizedRCNN +from .semantic_seg import build_sem_seg_head + +__all__ = ["PanopticFPN"] + + +@META_ARCH_REGISTRY.register() +class PanopticFPN(GeneralizedRCNN): + """ + Implement the paper :paper:`PanopticFPN`. + """ + + @configurable + def __init__( + self, + *, + sem_seg_head: nn.Module, + combine_overlap_thresh: float = 0.5, + combine_stuff_area_thresh: float = 4096, + combine_instances_score_thresh: float = 0.5, + **kwargs, + ): + """ + NOTE: this interface is experimental. + + Args: + sem_seg_head: a module for the semantic segmentation head. + combine_overlap_thresh: combine masks into one instances if + they have enough overlap + combine_stuff_area_thresh: ignore stuff areas smaller than this threshold + combine_instances_score_thresh: ignore instances whose score is + smaller than this threshold + + Other arguments are the same as :class:`GeneralizedRCNN`. + """ + super().__init__(**kwargs) + self.sem_seg_head = sem_seg_head + # options when combining instance & semantic outputs + self.combine_overlap_thresh = combine_overlap_thresh + self.combine_stuff_area_thresh = combine_stuff_area_thresh + self.combine_instances_score_thresh = combine_instances_score_thresh + + @classmethod + def from_config(cls, cfg): + ret = super().from_config(cfg) + ret.update( + { + "combine_overlap_thresh": cfg.MODEL.PANOPTIC_FPN.COMBINE.OVERLAP_THRESH, + "combine_stuff_area_thresh": cfg.MODEL.PANOPTIC_FPN.COMBINE.STUFF_AREA_LIMIT, + "combine_instances_score_thresh": cfg.MODEL.PANOPTIC_FPN.COMBINE.INSTANCES_CONFIDENCE_THRESH, # noqa + } + ) + ret["sem_seg_head"] = build_sem_seg_head(cfg, ret["backbone"].output_shape()) + logger = logging.getLogger(__name__) + if not cfg.MODEL.PANOPTIC_FPN.COMBINE.ENABLED: + logger.warning( + "PANOPTIC_FPN.COMBINED.ENABLED is no longer used. " + " model.inference(do_postprocess=) should be used to toggle postprocessing." + ) + if cfg.MODEL.PANOPTIC_FPN.INSTANCE_LOSS_WEIGHT != 1.0: + w = cfg.MODEL.PANOPTIC_FPN.INSTANCE_LOSS_WEIGHT + logger.warning( + "PANOPTIC_FPN.INSTANCE_LOSS_WEIGHT should be replaced by weights on each ROI head." + ) + + def update_weight(x): + if isinstance(x, dict): + return {k: v * w for k, v in x.items()} + else: + return x * w + + roi_heads = ret["roi_heads"] + roi_heads.box_predictor.loss_weight = update_weight(roi_heads.box_predictor.loss_weight) + roi_heads.mask_head.loss_weight = update_weight(roi_heads.mask_head.loss_weight) + return ret + + def forward(self, batched_inputs): + """ + Args: + batched_inputs: a list, batched outputs of :class:`DatasetMapper`. + Each item in the list contains the inputs for one image. + + For now, each item in the list is a dict that contains: + + * "image": Tensor, image in (C, H, W) format. + * "instances": Instances + * "sem_seg": semantic segmentation ground truth. + * Other information that's included in the original dicts, such as: + "height", "width" (int): the output resolution of the model, used in inference. + See :meth:`postprocess` for details. + + Returns: + list[dict]: + each dict has the results for one image. The dict contains the following keys: + + * "instances": see :meth:`GeneralizedRCNN.forward` for its format. + * "sem_seg": see :meth:`SemanticSegmentor.forward` for its format. + * "panoptic_seg": See the return value of + :func:`combine_semantic_and_instance_outputs` for its format. + """ + if not self.training: + return self.inference(batched_inputs) + images = self.preprocess_image(batched_inputs) + features = self.backbone(images.tensor) + + assert "sem_seg" in batched_inputs[0] + gt_sem_seg = [x["sem_seg"].to(self.device) for x in batched_inputs] + gt_sem_seg = ImageList.from_tensors( + gt_sem_seg, self.backbone.size_divisibility, self.sem_seg_head.ignore_value + ).tensor + sem_seg_results, sem_seg_losses = self.sem_seg_head(features, gt_sem_seg) + + gt_instances = [x["instances"].to(self.device) for x in batched_inputs] + proposals, proposal_losses = self.proposal_generator(images, features, gt_instances) + detector_results, detector_losses = self.roi_heads( + images, features, proposals, gt_instances + ) + + losses = sem_seg_losses + losses.update(proposal_losses) + losses.update(detector_losses) + return losses + + def inference(self, batched_inputs: List[Dict[str, torch.Tensor]], do_postprocess: bool = True): + """ + Run inference on the given inputs. + + Args: + batched_inputs (list[dict]): same as in :meth:`forward` + do_postprocess (bool): whether to apply post-processing on the outputs. + + Returns: + When do_postprocess=True, see docs in :meth:`forward`. + Otherwise, returns a (list[Instances], list[Tensor]) that contains + the raw detector outputs, and raw semantic segmentation outputs. + """ + images = self.preprocess_image(batched_inputs) + features = self.backbone(images.tensor) + sem_seg_results, sem_seg_losses = self.sem_seg_head(features, None) + proposals, _ = self.proposal_generator(images, features, None) + detector_results, _ = self.roi_heads(images, features, proposals, None) + + if do_postprocess: + processed_results = [] + for sem_seg_result, detector_result, input_per_image, image_size in zip( + sem_seg_results, detector_results, batched_inputs, images.image_sizes + ): + height = input_per_image.get("height", image_size[0]) + width = input_per_image.get("width", image_size[1]) + sem_seg_r = sem_seg_postprocess(sem_seg_result, image_size, height, width) + detector_r = detector_postprocess(detector_result, height, width) + + processed_results.append({"sem_seg": sem_seg_r, "instances": detector_r}) + + panoptic_r = combine_semantic_and_instance_outputs( + detector_r, + sem_seg_r.argmax(dim=0), + self.combine_overlap_thresh, + self.combine_stuff_area_thresh, + self.combine_instances_score_thresh, + ) + processed_results[-1]["panoptic_seg"] = panoptic_r + return processed_results + else: + return detector_results, sem_seg_results + + +def combine_semantic_and_instance_outputs( + instance_results, + semantic_results, + overlap_threshold, + stuff_area_thresh, + instances_score_thresh, +): + """ + Implement a simple combining logic following + "combine_semantic_and_instance_predictions.py" in panopticapi + to produce panoptic segmentation outputs. + + Args: + instance_results: output of :func:`detector_postprocess`. + semantic_results: an (H, W) tensor, each element is the contiguous semantic + category id + + Returns: + panoptic_seg (Tensor): of shape (height, width) where the values are ids for each segment. + segments_info (list[dict]): Describe each segment in `panoptic_seg`. + Each dict contains keys "id", "category_id", "isthing". + """ + panoptic_seg = torch.zeros_like(semantic_results, dtype=torch.int32) + + # sort instance outputs by scores + sorted_inds = torch.argsort(-instance_results.scores) + + current_segment_id = 0 + segments_info = [] + + instance_masks = instance_results.pred_masks.to(dtype=torch.bool, device=panoptic_seg.device) + + # Add instances one-by-one, check for overlaps with existing ones + for inst_id in sorted_inds: + score = instance_results.scores[inst_id].item() + if score < instances_score_thresh: + break + mask = instance_masks[inst_id] # H,W + mask_area = mask.sum().item() + + if mask_area == 0: + continue + + intersect = (mask > 0) & (panoptic_seg > 0) + intersect_area = intersect.sum().item() + + if intersect_area * 1.0 / mask_area > overlap_threshold: + continue + + if intersect_area > 0: + mask = mask & (panoptic_seg == 0) + + current_segment_id += 1 + panoptic_seg[mask] = current_segment_id + segments_info.append( + { + "id": current_segment_id, + "isthing": True, + "score": score, + "category_id": instance_results.pred_classes[inst_id].item(), + "instance_id": inst_id.item(), + } + ) + + # Add semantic results to remaining empty areas + semantic_labels = torch.unique(semantic_results).cpu().tolist() + for semantic_label in semantic_labels: + if semantic_label == 0: # 0 is a special "thing" class + continue + mask = (semantic_results == semantic_label) & (panoptic_seg == 0) + mask_area = mask.sum().item() + if mask_area < stuff_area_thresh: + continue + + current_segment_id += 1 + panoptic_seg[mask] = current_segment_id + segments_info.append( + { + "id": current_segment_id, + "isthing": False, + "category_id": semantic_label, + "area": mask_area, + } + ) + + return panoptic_seg, segments_info diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/rcnn.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/rcnn.py new file mode 100644 index 0000000..7b45363 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/rcnn.py @@ -0,0 +1,327 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +import numpy as np +from typing import Dict, List, Optional, Tuple +import torch +from torch import nn + +from detectron2.config import configurable +from detectron2.data.detection_utils import convert_image_to_rgb +from detectron2.structures import ImageList, Instances +from detectron2.utils.events import get_event_storage +from detectron2.utils.logger import log_first_n + +from ..backbone import Backbone, build_backbone +from ..postprocessing import detector_postprocess +from ..proposal_generator import build_proposal_generator +from ..roi_heads import build_roi_heads +from .build import META_ARCH_REGISTRY + +__all__ = ["GeneralizedRCNN", "ProposalNetwork"] + + +@META_ARCH_REGISTRY.register() +class GeneralizedRCNN(nn.Module): + """ + Generalized R-CNN. Any models that contains the following three components: + 1. Per-image feature extraction (aka backbone) + 2. Region proposal generation + 3. Per-region feature extraction and prediction + """ + + @configurable + def __init__( + self, + *, + backbone: Backbone, + proposal_generator: nn.Module, + roi_heads: nn.Module, + pixel_mean: Tuple[float], + pixel_std: Tuple[float], + input_format: Optional[str] = None, + vis_period: int = 0, + ): + """ + Args: + backbone: a backbone module, must follow detectron2's backbone interface + proposal_generator: a module that generates proposals using backbone features + roi_heads: a ROI head that performs per-region computation + pixel_mean, pixel_std: list or tuple with #channels element, representing + the per-channel mean and std to be used to normalize the input image + input_format: describe the meaning of channels of input. Needed by visualization + vis_period: the period to run visualization. Set to 0 to disable. + """ + super().__init__() + self.backbone = backbone + self.proposal_generator = proposal_generator + self.roi_heads = roi_heads + + self.input_format = input_format + self.vis_period = vis_period + if vis_period > 0: + assert input_format is not None, "input_format is required for visualization!" + + self.register_buffer("pixel_mean", torch.tensor(pixel_mean).view(-1, 1, 1), False) + self.register_buffer("pixel_std", torch.tensor(pixel_std).view(-1, 1, 1), False) + assert ( + self.pixel_mean.shape == self.pixel_std.shape + ), f"{self.pixel_mean} and {self.pixel_std} have different shapes!" + + @classmethod + def from_config(cls, cfg): + backbone = build_backbone(cfg) + return { + "backbone": backbone, + "proposal_generator": build_proposal_generator(cfg, backbone.output_shape()), + "roi_heads": build_roi_heads(cfg, backbone.output_shape()), + "input_format": cfg.INPUT.FORMAT, + "vis_period": cfg.VIS_PERIOD, + "pixel_mean": cfg.MODEL.PIXEL_MEAN, + "pixel_std": cfg.MODEL.PIXEL_STD, + } + + @property + def device(self): + return self.pixel_mean.device + + def visualize_training(self, batched_inputs, proposals): + """ + A function used to visualize images and proposals. It shows ground truth + bounding boxes on the original image and up to 20 top-scoring predicted + object proposals on the original image. Users can implement different + visualization functions for different models. + + Args: + batched_inputs (list): a list that contains input to the model. + proposals (list): a list that contains predicted proposals. Both + batched_inputs and proposals should have the same length. + """ + from detectron2.utils.visualizer import Visualizer + + storage = get_event_storage() + max_vis_prop = 20 + + for input, prop in zip(batched_inputs, proposals): + img = input["image"] + img = convert_image_to_rgb(img.permute(1, 2, 0), self.input_format) + v_gt = Visualizer(img, None) + v_gt = v_gt.overlay_instances(boxes=input["instances"].gt_boxes) + anno_img = v_gt.get_image() + box_size = min(len(prop.proposal_boxes), max_vis_prop) + v_pred = Visualizer(img, None) + v_pred = v_pred.overlay_instances( + boxes=prop.proposal_boxes[0:box_size].tensor.cpu().numpy() + ) + prop_img = v_pred.get_image() + vis_img = np.concatenate((anno_img, prop_img), axis=1) + vis_img = vis_img.transpose(2, 0, 1) + vis_name = "Left: GT bounding boxes; Right: Predicted proposals" + storage.put_image(vis_name, vis_img) + break # only visualize one image in a batch + + def forward(self, batched_inputs: List[Dict[str, torch.Tensor]]): + """ + Args: + batched_inputs: a list, batched outputs of :class:`DatasetMapper` . + Each item in the list contains the inputs for one image. + For now, each item in the list is a dict that contains: + + * image: Tensor, image in (C, H, W) format. + * instances (optional): groundtruth :class:`Instances` + * proposals (optional): :class:`Instances`, precomputed proposals. + + Other information that's included in the original dicts, such as: + + * "height", "width" (int): the output resolution of the model, used in inference. + See :meth:`postprocess` for details. + + Returns: + list[dict]: + Each dict is the output for one input image. + The dict contains one key "instances" whose value is a :class:`Instances`. + The :class:`Instances` object has the following keys: + "pred_boxes", "pred_classes", "scores", "pred_masks", "pred_keypoints" + """ + if not self.training: + return self.inference(batched_inputs) + + images = self.preprocess_image(batched_inputs) + if "instances" in batched_inputs[0]: + gt_instances = [x["instances"].to(self.device) for x in batched_inputs] + else: + gt_instances = None + + features = self.backbone(images.tensor) + + if self.proposal_generator is not None: + proposals, proposal_losses = self.proposal_generator(images, features, gt_instances) + else: + assert "proposals" in batched_inputs[0] + proposals = [x["proposals"].to(self.device) for x in batched_inputs] + proposal_losses = {} + + _, detector_losses = self.roi_heads(images, features, proposals, gt_instances) + if self.vis_period > 0: + storage = get_event_storage() + if storage.iter % self.vis_period == 0: + self.visualize_training(batched_inputs, proposals) + + losses = {} + losses.update(detector_losses) + losses.update(proposal_losses) + return losses + + def inference( + self, + batched_inputs: List[Dict[str, torch.Tensor]], + detected_instances: Optional[List[Instances]] = None, + do_postprocess: bool = True, + ): + """ + Run inference on the given inputs. + + Args: + batched_inputs (list[dict]): same as in :meth:`forward` + detected_instances (None or list[Instances]): if not None, it + contains an `Instances` object per image. The `Instances` + object contains "pred_boxes" and "pred_classes" which are + known boxes in the image. + The inference will then skip the detection of bounding boxes, + and only predict other per-ROI outputs. + do_postprocess (bool): whether to apply post-processing on the outputs. + + Returns: + When do_postprocess=True, same as in :meth:`forward`. + Otherwise, a list[Instances] containing raw network outputs. + """ + assert not self.training + + images = self.preprocess_image(batched_inputs) + features = self.backbone(images.tensor) + + if detected_instances is None: + if self.proposal_generator is not None: + proposals, _ = self.proposal_generator(images, features, None) + else: + assert "proposals" in batched_inputs[0] + proposals = [x["proposals"].to(self.device) for x in batched_inputs] + + results, _ = self.roi_heads(images, features, proposals, None) + else: + detected_instances = [x.to(self.device) for x in detected_instances] + results = self.roi_heads.forward_with_given_boxes(features, detected_instances) + + if do_postprocess: + assert not torch.jit.is_scripting(), "Scripting is not supported for postprocess." + return GeneralizedRCNN._postprocess(results, batched_inputs, images.image_sizes) + else: + return results + + def preprocess_image(self, batched_inputs: List[Dict[str, torch.Tensor]]): + """ + Normalize, pad and batch the input images. + """ + images = [x["image"].to(self.device) for x in batched_inputs] + images = [(x - self.pixel_mean) / self.pixel_std for x in images] + images = ImageList.from_tensors(images, self.backbone.size_divisibility) + return images + + @staticmethod + def _postprocess(instances, batched_inputs: List[Dict[str, torch.Tensor]], image_sizes): + """ + Rescale the output instances to the target size. + """ + # note: private function; subject to changes + processed_results = [] + for results_per_image, input_per_image, image_size in zip( + instances, batched_inputs, image_sizes + ): + height = input_per_image.get("height", image_size[0]) + width = input_per_image.get("width", image_size[1]) + r = detector_postprocess(results_per_image, height, width) + processed_results.append({"instances": r}) + return processed_results + + +@META_ARCH_REGISTRY.register() +class ProposalNetwork(nn.Module): + """ + A meta architecture that only predicts object proposals. + """ + + @configurable + def __init__( + self, + *, + backbone: Backbone, + proposal_generator: nn.Module, + pixel_mean: Tuple[float], + pixel_std: Tuple[float], + ): + """ + Args: + backbone: a backbone module, must follow detectron2's backbone interface + proposal_generator: a module that generates proposals using backbone features + pixel_mean, pixel_std: list or tuple with #channels element, representing + the per-channel mean and std to be used to normalize the input image + """ + super().__init__() + self.backbone = backbone + self.proposal_generator = proposal_generator + self.register_buffer("pixel_mean", torch.tensor(pixel_mean).view(-1, 1, 1), False) + self.register_buffer("pixel_std", torch.tensor(pixel_std).view(-1, 1, 1), False) + + @classmethod + def from_config(cls, cfg): + backbone = build_backbone(cfg) + return { + "backbone": backbone, + "proposal_generator": build_proposal_generator(cfg, backbone.output_shape()), + "pixel_mean": cfg.MODEL.PIXEL_MEAN, + "pixel_std": cfg.MODEL.PIXEL_STD, + } + + @property + def device(self): + return self.pixel_mean.device + + def forward(self, batched_inputs): + """ + Args: + Same as in :class:`GeneralizedRCNN.forward` + + Returns: + list[dict]: + Each dict is the output for one input image. + The dict contains one key "proposals" whose value is a + :class:`Instances` with keys "proposal_boxes" and "objectness_logits". + """ + images = [x["image"].to(self.device) for x in batched_inputs] + images = [(x - self.pixel_mean) / self.pixel_std for x in images] + images = ImageList.from_tensors(images, self.backbone.size_divisibility) + features = self.backbone(images.tensor) + + if "instances" in batched_inputs[0]: + gt_instances = [x["instances"].to(self.device) for x in batched_inputs] + elif "targets" in batched_inputs[0]: + log_first_n( + logging.WARN, "'targets' in the model inputs is now renamed to 'instances'!", n=10 + ) + gt_instances = [x["targets"].to(self.device) for x in batched_inputs] + else: + gt_instances = None + proposals, proposal_losses = self.proposal_generator(images, features, gt_instances) + # In training, the proposals are not useful at all but we generate them anyway. + # This makes RPN-only models about 5% slower. + if self.training: + return proposal_losses + + processed_results = [] + for results_per_image, input_per_image, image_size in zip( + proposals, batched_inputs, images.image_sizes + ): + height = input_per_image.get("height", image_size[0]) + width = input_per_image.get("width", image_size[1]) + r = detector_postprocess(results_per_image, height, width) + processed_results.append({"proposals": r}) + return processed_results diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/retinanet.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/retinanet.py new file mode 100644 index 0000000..3ea88f6 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/retinanet.py @@ -0,0 +1,439 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +import math +from typing import List, Tuple +import torch +from fvcore.nn import sigmoid_focal_loss_jit +from torch import Tensor, nn +from torch.nn import functional as F + +from detectron2.config import configurable +from detectron2.layers import CycleBatchNormList, ShapeSpec, batched_nms, cat, get_norm +from detectron2.structures import Boxes, ImageList, Instances, pairwise_iou +from detectron2.utils.events import get_event_storage + +from ..anchor_generator import build_anchor_generator +from ..backbone import Backbone, build_backbone +from ..box_regression import Box2BoxTransform, _dense_box_regression_loss +from ..matcher import Matcher +from .build import META_ARCH_REGISTRY +from .dense_detector import DenseDetector, permute_to_N_HWA_K # noqa + +__all__ = ["RetinaNet"] + + +logger = logging.getLogger(__name__) + + +@META_ARCH_REGISTRY.register() +class RetinaNet(DenseDetector): + """ + Implement RetinaNet in :paper:`RetinaNet`. + """ + + @configurable + def __init__( + self, + *, + backbone: Backbone, + head: nn.Module, + head_in_features, + anchor_generator, + box2box_transform, + anchor_matcher, + num_classes, + focal_loss_alpha=0.25, + focal_loss_gamma=2.0, + smooth_l1_beta=0.0, + box_reg_loss_type="smooth_l1", + test_score_thresh=0.05, + test_topk_candidates=1000, + test_nms_thresh=0.5, + max_detections_per_image=100, + pixel_mean, + pixel_std, + vis_period=0, + input_format="BGR", + ): + """ + NOTE: this interface is experimental. + + Args: + backbone: a backbone module, must follow detectron2's backbone interface + head (nn.Module): a module that predicts logits and regression deltas + for each level from a list of per-level features + head_in_features (Tuple[str]): Names of the input feature maps to be used in head + anchor_generator (nn.Module): a module that creates anchors from a + list of features. Usually an instance of :class:`AnchorGenerator` + box2box_transform (Box2BoxTransform): defines the transform from anchors boxes to + instance boxes + anchor_matcher (Matcher): label the anchors by matching them with ground truth. + num_classes (int): number of classes. Used to label background proposals. + + # Loss parameters: + focal_loss_alpha (float): focal_loss_alpha + focal_loss_gamma (float): focal_loss_gamma + smooth_l1_beta (float): smooth_l1_beta + box_reg_loss_type (str): Options are "smooth_l1", "giou", "diou", "ciou" + + # Inference parameters: + test_score_thresh (float): Inference cls score threshold, only anchors with + score > INFERENCE_TH are considered for inference (to improve speed) + test_topk_candidates (int): Select topk candidates before NMS + test_nms_thresh (float): Overlap threshold used for non-maximum suppression + (suppress boxes with IoU >= this threshold) + max_detections_per_image (int): + Maximum number of detections to return per image during inference + (100 is based on the limit established for the COCO dataset). + + pixel_mean, pixel_std: see :class:`DenseDetector`. + """ + super().__init__( + backbone, head, head_in_features, pixel_mean=pixel_mean, pixel_std=pixel_std + ) + self.num_classes = num_classes + + # Anchors + self.anchor_generator = anchor_generator + self.box2box_transform = box2box_transform + self.anchor_matcher = anchor_matcher + + # Loss parameters: + self.focal_loss_alpha = focal_loss_alpha + self.focal_loss_gamma = focal_loss_gamma + self.smooth_l1_beta = smooth_l1_beta + self.box_reg_loss_type = box_reg_loss_type + # Inference parameters: + self.test_score_thresh = test_score_thresh + self.test_topk_candidates = test_topk_candidates + self.test_nms_thresh = test_nms_thresh + self.max_detections_per_image = max_detections_per_image + # Vis parameters + self.vis_period = vis_period + self.input_format = input_format + + @classmethod + def from_config(cls, cfg): + backbone = build_backbone(cfg) + backbone_shape = backbone.output_shape() + feature_shapes = [backbone_shape[f] for f in cfg.MODEL.RETINANET.IN_FEATURES] + head = RetinaNetHead(cfg, feature_shapes) + anchor_generator = build_anchor_generator(cfg, feature_shapes) + return { + "backbone": backbone, + "head": head, + "anchor_generator": anchor_generator, + "box2box_transform": Box2BoxTransform(weights=cfg.MODEL.RETINANET.BBOX_REG_WEIGHTS), + "anchor_matcher": Matcher( + cfg.MODEL.RETINANET.IOU_THRESHOLDS, + cfg.MODEL.RETINANET.IOU_LABELS, + allow_low_quality_matches=True, + ), + "pixel_mean": cfg.MODEL.PIXEL_MEAN, + "pixel_std": cfg.MODEL.PIXEL_STD, + "num_classes": cfg.MODEL.RETINANET.NUM_CLASSES, + "head_in_features": cfg.MODEL.RETINANET.IN_FEATURES, + # Loss parameters: + "focal_loss_alpha": cfg.MODEL.RETINANET.FOCAL_LOSS_ALPHA, + "focal_loss_gamma": cfg.MODEL.RETINANET.FOCAL_LOSS_GAMMA, + "smooth_l1_beta": cfg.MODEL.RETINANET.SMOOTH_L1_LOSS_BETA, + "box_reg_loss_type": cfg.MODEL.RETINANET.BBOX_REG_LOSS_TYPE, + # Inference parameters: + "test_score_thresh": cfg.MODEL.RETINANET.SCORE_THRESH_TEST, + "test_topk_candidates": cfg.MODEL.RETINANET.TOPK_CANDIDATES_TEST, + "test_nms_thresh": cfg.MODEL.RETINANET.NMS_THRESH_TEST, + "max_detections_per_image": cfg.TEST.DETECTIONS_PER_IMAGE, + # Vis parameters + "vis_period": cfg.VIS_PERIOD, + "input_format": cfg.INPUT.FORMAT, + } + + def forward_training(self, images, features, predictions, gt_instances): + # Transpose the Hi*Wi*A dimension to the middle: + pred_logits, pred_anchor_deltas = self._transpose_dense_predictions( + predictions, [self.num_classes, 4] + ) + anchors = self.anchor_generator(features) + gt_labels, gt_boxes = self.label_anchors(anchors, gt_instances) + return self.losses(anchors, pred_logits, gt_labels, pred_anchor_deltas, gt_boxes) + + def losses(self, anchors, pred_logits, gt_labels, pred_anchor_deltas, gt_boxes): + """ + Args: + anchors (list[Boxes]): a list of #feature level Boxes + gt_labels, gt_boxes: see output of :meth:`RetinaNet.label_anchors`. + Their shapes are (N, R) and (N, R, 4), respectively, where R is + the total number of anchors across levels, i.e. sum(Hi x Wi x Ai) + pred_logits, pred_anchor_deltas: both are list[Tensor]. Each element in the + list corresponds to one level and has shape (N, Hi * Wi * Ai, K or 4). + Where K is the number of classes used in `pred_logits`. + + Returns: + dict[str, Tensor]: + mapping from a named loss to a scalar tensor storing the loss. + Used during training only. The dict keys are: "loss_cls" and "loss_box_reg" + """ + num_images = len(gt_labels) + gt_labels = torch.stack(gt_labels) # (N, R) + + valid_mask = gt_labels >= 0 + pos_mask = (gt_labels >= 0) & (gt_labels != self.num_classes) + num_pos_anchors = pos_mask.sum().item() + get_event_storage().put_scalar("num_pos_anchors", num_pos_anchors / num_images) + normalizer = self._ema_update("loss_normalizer", max(num_pos_anchors, 1), 100) + + # classification and regression loss + gt_labels_target = F.one_hot(gt_labels[valid_mask], num_classes=self.num_classes + 1)[ + :, :-1 + ] # no loss for the last (background) class + loss_cls = sigmoid_focal_loss_jit( + cat(pred_logits, dim=1)[valid_mask], + gt_labels_target.to(pred_logits[0].dtype), + alpha=self.focal_loss_alpha, + gamma=self.focal_loss_gamma, + reduction="sum", + ) + + loss_box_reg = _dense_box_regression_loss( + anchors, + self.box2box_transform, + pred_anchor_deltas, + gt_boxes, + pos_mask, + box_reg_loss_type=self.box_reg_loss_type, + smooth_l1_beta=self.smooth_l1_beta, + ) + + return { + "loss_cls": loss_cls / normalizer, + "loss_box_reg": loss_box_reg / normalizer, + } + + @torch.no_grad() + def label_anchors(self, anchors, gt_instances): + """ + Args: + anchors (list[Boxes]): A list of #feature level Boxes. + The Boxes contains anchors of this image on the specific feature level. + gt_instances (list[Instances]): a list of N `Instances`s. The i-th + `Instances` contains the ground-truth per-instance annotations + for the i-th input image. + + Returns: + list[Tensor]: List of #img tensors. i-th element is a vector of labels whose length is + the total number of anchors across all feature maps (sum(Hi * Wi * A)). + Label values are in {-1, 0, ..., K}, with -1 means ignore, and K means background. + + list[Tensor]: i-th element is a Rx4 tensor, where R is the total number of anchors + across feature maps. The values are the matched gt boxes for each anchor. + Values are undefined for those anchors not labeled as foreground. + """ + anchors = Boxes.cat(anchors) # Rx4 + + gt_labels = [] + matched_gt_boxes = [] + for gt_per_image in gt_instances: + match_quality_matrix = pairwise_iou(gt_per_image.gt_boxes, anchors) + matched_idxs, anchor_labels = self.anchor_matcher(match_quality_matrix) + del match_quality_matrix + + if len(gt_per_image) > 0: + matched_gt_boxes_i = gt_per_image.gt_boxes.tensor[matched_idxs] + + gt_labels_i = gt_per_image.gt_classes[matched_idxs] + # Anchors with label 0 are treated as background. + gt_labels_i[anchor_labels == 0] = self.num_classes + # Anchors with label -1 are ignored. + gt_labels_i[anchor_labels == -1] = -1 + else: + matched_gt_boxes_i = torch.zeros_like(anchors.tensor) + gt_labels_i = torch.zeros_like(matched_idxs) + self.num_classes + + gt_labels.append(gt_labels_i) + matched_gt_boxes.append(matched_gt_boxes_i) + + return gt_labels, matched_gt_boxes + + def forward_inference( + self, images: ImageList, features: List[Tensor], predictions: List[List[Tensor]] + ): + pred_logits, pred_anchor_deltas = self._transpose_dense_predictions( + predictions, [self.num_classes, 4] + ) + anchors = self.anchor_generator(features) + + results: List[Instances] = [] + for img_idx, image_size in enumerate(images.image_sizes): + scores_per_image = [x[img_idx].sigmoid_() for x in pred_logits] + deltas_per_image = [x[img_idx] for x in pred_anchor_deltas] + results_per_image = self.inference_single_image( + anchors, scores_per_image, deltas_per_image, image_size + ) + results.append(results_per_image) + return results + + def inference_single_image( + self, + anchors: List[Boxes], + box_cls: List[Tensor], + box_delta: List[Tensor], + image_size: Tuple[int, int], + ): + """ + Single-image inference. Return bounding-box detection results by thresholding + on scores and applying non-maximum suppression (NMS). + + Arguments: + anchors (list[Boxes]): list of #feature levels. Each entry contains + a Boxes object, which contains all the anchors in that feature level. + box_cls (list[Tensor]): list of #feature levels. Each entry contains + tensor of size (H x W x A, K) + box_delta (list[Tensor]): Same shape as 'box_cls' except that K becomes 4. + image_size (tuple(H, W)): a tuple of the image height and width. + + Returns: + Same as `inference`, but for only one image. + """ + pred = self._decode_multi_level_predictions( + anchors, + box_cls, + box_delta, + self.test_score_thresh, + self.test_topk_candidates, + image_size, + ) + keep = batched_nms( # per-class NMS + pred.pred_boxes.tensor, pred.scores, pred.pred_classes, self.test_nms_thresh + ) + return pred[keep[: self.max_detections_per_image]] + + +class RetinaNetHead(nn.Module): + """ + The head used in RetinaNet for object classification and box regression. + It has two subnets for the two tasks, with a common structure but separate parameters. + """ + + @configurable + def __init__( + self, + *, + input_shape: List[ShapeSpec], + num_classes, + num_anchors, + conv_dims: List[int], + norm="", + prior_prob=0.01, + ): + """ + NOTE: this interface is experimental. + + Args: + input_shape (List[ShapeSpec]): input shape + num_classes (int): number of classes. Used to label background proposals. + num_anchors (int): number of generated anchors + conv_dims (List[int]): dimensions for each convolution layer + norm (str or callable): + Normalization for conv layers except for the two output layers. + See :func:`detectron2.layers.get_norm` for supported types. + prior_prob (float): Prior weight for computing bias + """ + super().__init__() + + self._num_features = len(input_shape) + if norm == "BN" or norm == "SyncBN": + logger.info( + f"Using domain-specific {norm} in RetinaNetHead with len={self._num_features}." + ) + bn_class = nn.BatchNorm2d if norm == "BN" else nn.SyncBatchNorm + + def norm(c): + return CycleBatchNormList( + length=self._num_features, bn_class=bn_class, num_features=c + ) + + else: + norm_name = str(type(get_norm(norm, 1))) + if "BN" in norm_name: + logger.warning( + f"Shared BatchNorm (type={norm_name}) may not work well in RetinaNetHead." + ) + + cls_subnet = [] + bbox_subnet = [] + for in_channels, out_channels in zip( + [input_shape[0].channels] + list(conv_dims), conv_dims + ): + cls_subnet.append( + nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=1, padding=1) + ) + if norm: + cls_subnet.append(get_norm(norm, out_channels)) + cls_subnet.append(nn.ReLU()) + bbox_subnet.append( + nn.Conv2d(in_channels, out_channels, kernel_size=3, stride=1, padding=1) + ) + if norm: + bbox_subnet.append(get_norm(norm, out_channels)) + bbox_subnet.append(nn.ReLU()) + + self.cls_subnet = nn.Sequential(*cls_subnet) + self.bbox_subnet = nn.Sequential(*bbox_subnet) + self.cls_score = nn.Conv2d( + conv_dims[-1], num_anchors * num_classes, kernel_size=3, stride=1, padding=1 + ) + self.bbox_pred = nn.Conv2d( + conv_dims[-1], num_anchors * 4, kernel_size=3, stride=1, padding=1 + ) + + # Initialization + for modules in [self.cls_subnet, self.bbox_subnet, self.cls_score, self.bbox_pred]: + for layer in modules.modules(): + if isinstance(layer, nn.Conv2d): + torch.nn.init.normal_(layer.weight, mean=0, std=0.01) + torch.nn.init.constant_(layer.bias, 0) + + # Use prior in model initialization to improve stability + bias_value = -(math.log((1 - prior_prob) / prior_prob)) + torch.nn.init.constant_(self.cls_score.bias, bias_value) + + @classmethod + def from_config(cls, cfg, input_shape: List[ShapeSpec]): + num_anchors = build_anchor_generator(cfg, input_shape).num_cell_anchors + assert ( + len(set(num_anchors)) == 1 + ), "Using different number of anchors between levels is not currently supported!" + num_anchors = num_anchors[0] + + return { + "input_shape": input_shape, + "num_classes": cfg.MODEL.RETINANET.NUM_CLASSES, + "conv_dims": [input_shape[0].channels] * cfg.MODEL.RETINANET.NUM_CONVS, + "prior_prob": cfg.MODEL.RETINANET.PRIOR_PROB, + "norm": cfg.MODEL.RETINANET.NORM, + "num_anchors": num_anchors, + } + + def forward(self, features: List[Tensor]): + """ + Arguments: + features (list[Tensor]): FPN feature map tensors in high to low resolution. + Each tensor in the list correspond to different feature levels. + + Returns: + logits (list[Tensor]): #lvl tensors, each has shape (N, AxK, Hi, Wi). + The tensor predicts the classification probability + at each spatial position for each of the A anchors and K object + classes. + bbox_reg (list[Tensor]): #lvl tensors, each has shape (N, Ax4, Hi, Wi). + The tensor predicts 4-vector (dx,dy,dw,dh) box + regression values for every anchor. These values are the + relative offset between the anchor and the ground truth box. + """ + assert len(features) == self._num_features + logits = [] + bbox_reg = [] + for feature in features: + logits.append(self.cls_score(self.cls_subnet(feature))) + bbox_reg.append(self.bbox_pred(self.bbox_subnet(feature))) + return logits, bbox_reg diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/semantic_seg.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/semantic_seg.py new file mode 100644 index 0000000..6dd3dc2 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/meta_arch/semantic_seg.py @@ -0,0 +1,260 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import numpy as np +from typing import Callable, Dict, Optional, Tuple, Union +import fvcore.nn.weight_init as weight_init +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import configurable +from detectron2.layers import Conv2d, ShapeSpec, get_norm +from detectron2.structures import ImageList +from detectron2.utils.registry import Registry + +from ..backbone import Backbone, build_backbone +from ..postprocessing import sem_seg_postprocess +from .build import META_ARCH_REGISTRY + +__all__ = [ + "SemanticSegmentor", + "SEM_SEG_HEADS_REGISTRY", + "SemSegFPNHead", + "build_sem_seg_head", +] + + +SEM_SEG_HEADS_REGISTRY = Registry("SEM_SEG_HEADS") +SEM_SEG_HEADS_REGISTRY.__doc__ = """ +Registry for semantic segmentation heads, which make semantic segmentation predictions +from feature maps. +""" + + +@META_ARCH_REGISTRY.register() +class SemanticSegmentor(nn.Module): + """ + Main class for semantic segmentation architectures. + """ + + @configurable + def __init__( + self, + *, + backbone: Backbone, + sem_seg_head: nn.Module, + pixel_mean: Tuple[float], + pixel_std: Tuple[float], + ): + """ + Args: + backbone: a backbone module, must follow detectron2's backbone interface + sem_seg_head: a module that predicts semantic segmentation from backbone features + pixel_mean, pixel_std: list or tuple with #channels element, representing + the per-channel mean and std to be used to normalize the input image + """ + super().__init__() + self.backbone = backbone + self.sem_seg_head = sem_seg_head + self.register_buffer("pixel_mean", torch.tensor(pixel_mean).view(-1, 1, 1), False) + self.register_buffer("pixel_std", torch.tensor(pixel_std).view(-1, 1, 1), False) + + @classmethod + def from_config(cls, cfg): + backbone = build_backbone(cfg) + sem_seg_head = build_sem_seg_head(cfg, backbone.output_shape()) + return { + "backbone": backbone, + "sem_seg_head": sem_seg_head, + "pixel_mean": cfg.MODEL.PIXEL_MEAN, + "pixel_std": cfg.MODEL.PIXEL_STD, + } + + @property + def device(self): + return self.pixel_mean.device + + def forward(self, batched_inputs): + """ + Args: + batched_inputs: a list, batched outputs of :class:`DatasetMapper`. + Each item in the list contains the inputs for one image. + + For now, each item in the list is a dict that contains: + + * "image": Tensor, image in (C, H, W) format. + * "sem_seg": semantic segmentation ground truth + * Other information that's included in the original dicts, such as: + "height", "width" (int): the output resolution of the model (may be different + from input resolution), used in inference. + + + Returns: + list[dict]: + Each dict is the output for one input image. + The dict contains one key "sem_seg" whose value is a + Tensor that represents the + per-pixel segmentation prediced by the head. + The prediction has shape KxHxW that represents the logits of + each class for each pixel. + """ + images = [x["image"].to(self.device) for x in batched_inputs] + images = [(x - self.pixel_mean) / self.pixel_std for x in images] + images = ImageList.from_tensors(images, self.backbone.size_divisibility) + + features = self.backbone(images.tensor) + + if "sem_seg" in batched_inputs[0]: + targets = [x["sem_seg"].to(self.device) for x in batched_inputs] + targets = ImageList.from_tensors( + targets, self.backbone.size_divisibility, self.sem_seg_head.ignore_value + ).tensor + else: + targets = None + results, losses = self.sem_seg_head(features, targets) + + if self.training: + return losses + + processed_results = [] + for result, input_per_image, image_size in zip(results, batched_inputs, images.image_sizes): + height = input_per_image.get("height", image_size[0]) + width = input_per_image.get("width", image_size[1]) + r = sem_seg_postprocess(result, image_size, height, width) + processed_results.append({"sem_seg": r}) + return processed_results + + +def build_sem_seg_head(cfg, input_shape): + """ + Build a semantic segmentation head from `cfg.MODEL.SEM_SEG_HEAD.NAME`. + """ + name = cfg.MODEL.SEM_SEG_HEAD.NAME + return SEM_SEG_HEADS_REGISTRY.get(name)(cfg, input_shape) + + +@SEM_SEG_HEADS_REGISTRY.register() +class SemSegFPNHead(nn.Module): + """ + A semantic segmentation head described in :paper:`PanopticFPN`. + It takes a list of FPN features as input, and applies a sequence of + 3x3 convs and upsampling to scale all of them to the stride defined by + ``common_stride``. Then these features are added and used to make final + predictions by another 1x1 conv layer. + """ + + @configurable + def __init__( + self, + input_shape: Dict[str, ShapeSpec], + *, + num_classes: int, + conv_dims: int, + common_stride: int, + loss_weight: float = 1.0, + norm: Optional[Union[str, Callable]] = None, + ignore_value: int = -1, + ): + """ + NOTE: this interface is experimental. + + Args: + input_shape: shapes (channels and stride) of the input features + num_classes: number of classes to predict + conv_dims: number of output channels for the intermediate conv layers. + common_stride: the common stride that all features will be upscaled to + loss_weight: loss weight + norm (str or callable): normalization for all conv layers + ignore_value: category id to be ignored during training. + """ + super().__init__() + input_shape = sorted(input_shape.items(), key=lambda x: x[1].stride) + if not len(input_shape): + raise ValueError("SemSegFPNHead(input_shape=) cannot be empty!") + self.in_features = [k for k, v in input_shape] + feature_strides = [v.stride for k, v in input_shape] + feature_channels = [v.channels for k, v in input_shape] + + self.ignore_value = ignore_value + self.common_stride = common_stride + self.loss_weight = loss_weight + + self.scale_heads = [] + for in_feature, stride, channels in zip( + self.in_features, feature_strides, feature_channels + ): + head_ops = [] + head_length = max(1, int(np.log2(stride) - np.log2(self.common_stride))) + for k in range(head_length): + norm_module = get_norm(norm, conv_dims) + conv = Conv2d( + channels if k == 0 else conv_dims, + conv_dims, + kernel_size=3, + stride=1, + padding=1, + bias=not norm, + norm=norm_module, + activation=F.relu, + ) + weight_init.c2_msra_fill(conv) + head_ops.append(conv) + if stride != self.common_stride: + head_ops.append( + nn.Upsample(scale_factor=2, mode="bilinear", align_corners=False) + ) + self.scale_heads.append(nn.Sequential(*head_ops)) + self.add_module(in_feature, self.scale_heads[-1]) + self.predictor = Conv2d(conv_dims, num_classes, kernel_size=1, stride=1, padding=0) + weight_init.c2_msra_fill(self.predictor) + + @classmethod + def from_config(cls, cfg, input_shape: Dict[str, ShapeSpec]): + return { + "input_shape": { + k: v for k, v in input_shape.items() if k in cfg.MODEL.SEM_SEG_HEAD.IN_FEATURES + }, + "ignore_value": cfg.MODEL.SEM_SEG_HEAD.IGNORE_VALUE, + "num_classes": cfg.MODEL.SEM_SEG_HEAD.NUM_CLASSES, + "conv_dims": cfg.MODEL.SEM_SEG_HEAD.CONVS_DIM, + "common_stride": cfg.MODEL.SEM_SEG_HEAD.COMMON_STRIDE, + "norm": cfg.MODEL.SEM_SEG_HEAD.NORM, + "loss_weight": cfg.MODEL.SEM_SEG_HEAD.LOSS_WEIGHT, + } + + def forward(self, features, targets=None): + """ + Returns: + In training, returns (None, dict of losses) + In inference, returns (CxHxW logits, {}) + """ + x = self.layers(features) + if self.training: + return None, self.losses(x, targets) + else: + x = F.interpolate( + x, scale_factor=self.common_stride, mode="bilinear", align_corners=False + ) + return x, {} + + def layers(self, features): + for i, f in enumerate(self.in_features): + if i == 0: + x = self.scale_heads[i](features[f]) + else: + x = x + self.scale_heads[i](features[f]) + x = self.predictor(x) + return x + + def losses(self, predictions, targets): + predictions = predictions.float() # https://github.com/pytorch/pytorch/issues/48163 + predictions = F.interpolate( + predictions, + scale_factor=self.common_stride, + mode="bilinear", + align_corners=False, + ) + loss = F.cross_entropy( + predictions, targets, reduction="mean", ignore_index=self.ignore_value + ) + losses = {"loss_sem_seg": loss * self.loss_weight} + return losses diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/mmdet_wrapper.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/mmdet_wrapper.py new file mode 100644 index 0000000..386e929 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/mmdet_wrapper.py @@ -0,0 +1,274 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import itertools +import logging +import numpy as np +from collections import OrderedDict +from collections.abc import Mapping +from typing import Dict, List, Optional, Tuple, Union +import torch +from omegaconf import DictConfig, OmegaConf +from torch import Tensor, nn + +from detectron2.layers import ShapeSpec +from detectron2.structures import BitMasks, Boxes, ImageList, Instances +from detectron2.utils.events import get_event_storage + +from .backbone import Backbone + +logger = logging.getLogger(__name__) + + +def _to_container(cfg): + """ + mmdet will assert the type of dict/list. + So convert omegaconf objects to dict/list. + """ + if isinstance(cfg, DictConfig): + cfg = OmegaConf.to_container(cfg, resolve=True) + from mmcv.utils import ConfigDict + + return ConfigDict(cfg) + + +class MMDetBackbone(Backbone): + """ + Wrapper of mmdetection backbones to use in detectron2. + + mmdet backbones produce list/tuple of tensors, while detectron2 backbones + produce a dict of tensors. This class wraps the given backbone to produce + output in detectron2's convention, so it can be used in place of detectron2 + backbones. + """ + + def __init__( + self, + backbone: Union[nn.Module, Mapping], + neck: Union[nn.Module, Mapping, None] = None, + *, + output_shapes: List[ShapeSpec], + output_names: Optional[List[str]] = None, + ): + """ + Args: + backbone: either a backbone module or a mmdet config dict that defines a + backbone. The backbone takes a 4D image tensor and returns a + sequence of tensors. + neck: either a backbone module or a mmdet config dict that defines a + neck. The neck takes outputs of backbone and returns a + sequence of tensors. If None, no neck is used. + pretrained_backbone: defines the backbone weights that can be loaded by + mmdet, such as "torchvision://resnet50". + output_shapes: shape for every output of the backbone (or neck, if given). + stride and channels are often needed. + output_names: names for every output of the backbone (or neck, if given). + By default, will use "out0", "out1", ... + """ + super().__init__() + if isinstance(backbone, Mapping): + from mmdet.models import build_backbone + + backbone = build_backbone(_to_container(backbone)) + self.backbone = backbone + + if isinstance(neck, Mapping): + from mmdet.models import build_neck + + neck = build_neck(_to_container(neck)) + self.neck = neck + + # "Neck" weights, if any, are part of neck itself. This is the interface + # of mmdet so we follow it. Reference: + # https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/detectors/two_stage.py + logger.info("Initializing mmdet backbone weights...") + self.backbone.init_weights() + # train() in mmdet modules is non-trivial, and has to be explicitly + # called. Reference: + # https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/backbones/resnet.py + self.backbone.train() + if self.neck is not None: + logger.info("Initializing mmdet neck weights ...") + if isinstance(self.neck, nn.Sequential): + for m in self.neck: + m.init_weights() + else: + self.neck.init_weights() + self.neck.train() + + self._output_shapes = output_shapes + if not output_names: + output_names = [f"out{i}" for i in range(len(output_shapes))] + self._output_names = output_names + + def forward(self, x) -> Dict[str, Tensor]: + outs = self.backbone(x) + if self.neck is not None: + outs = self.neck(outs) + assert isinstance( + outs, (list, tuple) + ), "mmdet backbone should return a list/tuple of tensors!" + if len(outs) != len(self._output_shapes): + raise ValueError( + "Length of output_shapes does not match outputs from the mmdet backbone: " + f"{len(outs)} != {len(self._output_shapes)}" + ) + return {k: v for k, v in zip(self._output_names, outs)} + + def output_shape(self) -> Dict[str, ShapeSpec]: + return {k: v for k, v in zip(self._output_names, self._output_shapes)} + + +class MMDetDetector(nn.Module): + """ + Wrapper of a mmdetection detector model, for detection and instance segmentation. + Input/output formats of this class follow detectron2's convention, so a + mmdetection model can be trained and evaluated in detectron2. + """ + + def __init__( + self, + detector: Union[nn.Module, Mapping], + *, + # Default is 32 regardless of model: + # https://github.com/open-mmlab/mmdetection/tree/master/configs/_base_/datasets + size_divisibility=32, + pixel_mean: Tuple[float], + pixel_std: Tuple[float], + ): + """ + Args: + detector: a mmdet detector, or a mmdet config dict that defines a detector. + size_divisibility: pad input images to multiple of this number + pixel_mean: per-channel mean to normalize input image + pixel_std: per-channel stddev to normalize input image + """ + super().__init__() + if isinstance(detector, Mapping): + from mmdet.models import build_detector + + detector = build_detector(_to_container(detector)) + self.detector = detector + self.size_divisibility = size_divisibility + + self.register_buffer("pixel_mean", torch.tensor(pixel_mean).view(-1, 1, 1), False) + self.register_buffer("pixel_std", torch.tensor(pixel_std).view(-1, 1, 1), False) + assert ( + self.pixel_mean.shape == self.pixel_std.shape + ), f"{self.pixel_mean} and {self.pixel_std} have different shapes!" + + def forward(self, batched_inputs: List[Dict[str, torch.Tensor]]): + images = [x["image"].to(self.device) for x in batched_inputs] + images = [(x - self.pixel_mean) / self.pixel_std for x in images] + images = ImageList.from_tensors(images, size_divisibility=self.size_divisibility).tensor + metas = [] + rescale = {"height" in x for x in batched_inputs} + if len(rescale) != 1: + raise ValueError("Some inputs have original height/width, but some don't!") + rescale = list(rescale)[0] + output_shapes = [] + for input in batched_inputs: + meta = {} + c, h, w = input["image"].shape + meta["img_shape"] = meta["ori_shape"] = (h, w, c) + if rescale: + scale_factor = np.array( + [w / input["width"], h / input["height"]] * 2, dtype="float32" + ) + ori_shape = (input["height"], input["width"]) + output_shapes.append(ori_shape) + meta["ori_shape"] = ori_shape + (c,) + else: + scale_factor = 1.0 + output_shapes.append((h, w)) + meta["scale_factor"] = scale_factor + meta["flip"] = False + padh, padw = images.shape[-2:] + meta["pad_shape"] = (padh, padw, c) + metas.append(meta) + + if self.training: + gt_instances = [x["instances"].to(self.device) for x in batched_inputs] + if gt_instances[0].has("gt_masks"): + from mmdet.core import PolygonMasks as mm_PolygonMasks, BitmapMasks as mm_BitMasks + + def convert_mask(m, shape): + # mmdet mask format + if isinstance(m, BitMasks): + return mm_BitMasks(m.tensor.cpu().numpy(), shape[0], shape[1]) + else: + return mm_PolygonMasks(m.polygons, shape[0], shape[1]) + + gt_masks = [convert_mask(x.gt_masks, x.image_size) for x in gt_instances] + losses_and_metrics = self.detector.forward_train( + images, + metas, + [x.gt_boxes.tensor for x in gt_instances], + [x.gt_classes for x in gt_instances], + gt_masks=gt_masks, + ) + else: + losses_and_metrics = self.detector.forward_train( + images, + metas, + [x.gt_boxes.tensor for x in gt_instances], + [x.gt_classes for x in gt_instances], + ) + return _parse_losses(losses_and_metrics) + else: + results = self.detector.simple_test(images, metas, rescale=rescale) + results = [ + {"instances": _convert_mmdet_result(r, shape)} + for r, shape in zip(results, output_shapes) + ] + return results + + @property + def device(self): + return self.pixel_mean.device + + +# Reference: show_result() in +# https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/detectors/base.py +def _convert_mmdet_result(result, shape: Tuple[int, int]) -> Instances: + if isinstance(result, tuple): + bbox_result, segm_result = result + if isinstance(segm_result, tuple): + segm_result = segm_result[0] + else: + bbox_result, segm_result = result, None + + bboxes = torch.from_numpy(np.vstack(bbox_result)) # Nx5 + bboxes, scores = bboxes[:, :4], bboxes[:, -1] + labels = [ + torch.full((bbox.shape[0],), i, dtype=torch.int32) for i, bbox in enumerate(bbox_result) + ] + labels = torch.cat(labels) + inst = Instances(shape) + inst.pred_boxes = Boxes(bboxes) + inst.scores = scores + inst.pred_classes = labels + + if segm_result is not None and len(labels) > 0: + segm_result = list(itertools.chain(*segm_result)) + segm_result = [torch.from_numpy(x) if isinstance(x, np.ndarray) else x for x in segm_result] + segm_result = torch.stack(segm_result, dim=0) + inst.pred_masks = segm_result + return inst + + +# reference: https://github.com/open-mmlab/mmdetection/blob/master/mmdet/models/detectors/base.py +def _parse_losses(losses: Dict[str, Tensor]) -> Dict[str, Tensor]: + log_vars = OrderedDict() + for loss_name, loss_value in losses.items(): + if isinstance(loss_value, torch.Tensor): + log_vars[loss_name] = loss_value.mean() + elif isinstance(loss_value, list): + log_vars[loss_name] = sum(_loss.mean() for _loss in loss_value) + else: + raise TypeError(f"{loss_name} is not a tensor or list of tensors") + + if "loss" not in loss_name: + # put metrics to storage; don't return them + storage = get_event_storage() + value = log_vars.pop(loss_name).cpu().item() + storage.put_scalar(loss_name, value) + return log_vars diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/poolers.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/poolers.py new file mode 100644 index 0000000..6bea77a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/poolers.py @@ -0,0 +1,245 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import math +from typing import List +import torch +from torch import nn +from torchvision.ops import RoIPool + +from detectron2.layers import ROIAlign, ROIAlignRotated, cat, nonzero_tuple, shapes_to_tensor +from detectron2.structures import Boxes + +""" +To export ROIPooler to torchscript, in this file, variables that should be annotated with +`Union[List[Boxes], List[RotatedBoxes]]` are only annotated with `List[Boxes]`. + +TODO: Correct these annotations when torchscript support `Union`. +https://github.com/pytorch/pytorch/issues/41412 +""" + +__all__ = ["ROIPooler"] + + +def assign_boxes_to_levels( + box_lists: List[Boxes], + min_level: int, + max_level: int, + canonical_box_size: int, + canonical_level: int, +): + """ + Map each box in `box_lists` to a feature map level index and return the assignment + vector. + + Args: + box_lists (list[Boxes] | list[RotatedBoxes]): A list of N Boxes or N RotatedBoxes, + where N is the number of images in the batch. + min_level (int): Smallest feature map level index. The input is considered index 0, + the output of stage 1 is index 1, and so. + max_level (int): Largest feature map level index. + canonical_box_size (int): A canonical box size in pixels (sqrt(box area)). + canonical_level (int): The feature map level index on which a canonically-sized box + should be placed. + + Returns: + A tensor of length M, where M is the total number of boxes aggregated over all + N batch images. The memory layout corresponds to the concatenation of boxes + from all images. Each element is the feature map index, as an offset from + `self.min_level`, for the corresponding box (so value i means the box is at + `self.min_level + i`). + """ + box_sizes = torch.sqrt(cat([boxes.area() for boxes in box_lists])) + # Eqn.(1) in FPN paper + level_assignments = torch.floor( + canonical_level + torch.log2(box_sizes / canonical_box_size + 1e-8) + ) + # clamp level to (min, max), in case the box size is too large or too small + # for the available feature maps + level_assignments = torch.clamp(level_assignments, min=min_level, max=max_level) + return level_assignments.to(torch.int64) - min_level + + +def convert_boxes_to_pooler_format(box_lists: List[Boxes]): + """ + Convert all boxes in `box_lists` to the low-level format used by ROI pooling ops + (see description under Returns). + + Args: + box_lists (list[Boxes] | list[RotatedBoxes]): + A list of N Boxes or N RotatedBoxes, where N is the number of images in the batch. + + Returns: + When input is list[Boxes]: + A tensor of shape (M, 5), where M is the total number of boxes aggregated over all + N batch images. + The 5 columns are (batch index, x0, y0, x1, y1), where batch index + is the index in [0, N) identifying which batch image the box with corners at + (x0, y0, x1, y1) comes from. + When input is list[RotatedBoxes]: + A tensor of shape (M, 6), where M is the total number of boxes aggregated over all + N batch images. + The 6 columns are (batch index, x_ctr, y_ctr, width, height, angle_degrees), + where batch index is the index in [0, N) identifying which batch image the + rotated box (x_ctr, y_ctr, width, height, angle_degrees) comes from. + """ + boxes = torch.cat([x.tensor for x in box_lists], dim=0) + # __len__ returns Tensor in tracing. + sizes = shapes_to_tensor([x.__len__() for x in box_lists], device=boxes.device) + indices = torch.repeat_interleave( + torch.arange(len(box_lists), dtype=boxes.dtype, device=boxes.device), sizes + ) + return cat([indices[:, None], boxes], dim=1) + + +class ROIPooler(nn.Module): + """ + Region of interest feature map pooler that supports pooling from one or more + feature maps. + """ + + def __init__( + self, + output_size, + scales, + sampling_ratio, + pooler_type, + canonical_box_size=224, + canonical_level=4, + ): + """ + Args: + output_size (int, tuple[int] or list[int]): output size of the pooled region, + e.g., 14 x 14. If tuple or list is given, the length must be 2. + scales (list[float]): The scale for each low-level pooling op relative to + the input image. For a feature map with stride s relative to the input + image, scale is defined as 1/s. The stride must be power of 2. + When there are multiple scales, they must form a pyramid, i.e. they must be + a monotically decreasing geometric sequence with a factor of 1/2. + sampling_ratio (int): The `sampling_ratio` parameter for the ROIAlign op. + pooler_type (string): Name of the type of pooling operation that should be applied. + For instance, "ROIPool" or "ROIAlignV2". + canonical_box_size (int): A canonical box size in pixels (sqrt(box area)). The default + is heuristically defined as 224 pixels in the FPN paper (based on ImageNet + pre-training). + canonical_level (int): The feature map level index from which a canonically-sized box + should be placed. The default is defined as level 4 (stride=16) in the FPN paper, + i.e., a box of size 224x224 will be placed on the feature with stride=16. + The box placement for all boxes will be determined from their sizes w.r.t + canonical_box_size. For example, a box whose area is 4x that of a canonical box + should be used to pool features from feature level ``canonical_level+1``. + + Note that the actual input feature maps given to this module may not have + sufficiently many levels for the input boxes. If the boxes are too large or too + small for the input feature maps, the closest level will be used. + """ + super().__init__() + + if isinstance(output_size, int): + output_size = (output_size, output_size) + assert len(output_size) == 2 + assert isinstance(output_size[0], int) and isinstance(output_size[1], int) + self.output_size = output_size + + if pooler_type == "ROIAlign": + self.level_poolers = nn.ModuleList( + ROIAlign( + output_size, spatial_scale=scale, sampling_ratio=sampling_ratio, aligned=False + ) + for scale in scales + ) + elif pooler_type == "ROIAlignV2": + self.level_poolers = nn.ModuleList( + ROIAlign( + output_size, spatial_scale=scale, sampling_ratio=sampling_ratio, aligned=True + ) + for scale in scales + ) + elif pooler_type == "ROIPool": + self.level_poolers = nn.ModuleList( + RoIPool(output_size, spatial_scale=scale) for scale in scales + ) + elif pooler_type == "ROIAlignRotated": + self.level_poolers = nn.ModuleList( + ROIAlignRotated(output_size, spatial_scale=scale, sampling_ratio=sampling_ratio) + for scale in scales + ) + else: + raise ValueError("Unknown pooler type: {}".format(pooler_type)) + + # Map scale (defined as 1 / stride) to its feature map level under the + # assumption that stride is a power of 2. + min_level = -(math.log2(scales[0])) + max_level = -(math.log2(scales[-1])) + assert math.isclose(min_level, int(min_level)) and math.isclose( + max_level, int(max_level) + ), "Featuremap stride is not power of 2!" + self.min_level = int(min_level) + self.max_level = int(max_level) + assert ( + len(scales) == self.max_level - self.min_level + 1 + ), "[ROIPooler] Sizes of input featuremaps do not form a pyramid!" + assert 0 <= self.min_level and self.min_level <= self.max_level + self.canonical_level = canonical_level + assert canonical_box_size > 0 + self.canonical_box_size = canonical_box_size + + def forward(self, x: List[torch.Tensor], box_lists: List[Boxes]): + """ + Args: + x (list[Tensor]): A list of feature maps of NCHW shape, with scales matching those + used to construct this module. + box_lists (list[Boxes] | list[RotatedBoxes]): + A list of N Boxes or N RotatedBoxes, where N is the number of images in the batch. + The box coordinates are defined on the original image and + will be scaled by the `scales` argument of :class:`ROIPooler`. + + Returns: + Tensor: + A tensor of shape (M, C, output_size, output_size) where M is the total number of + boxes aggregated over all N batch images and C is the number of channels in `x`. + """ + num_level_assignments = len(self.level_poolers) + + assert isinstance(x, list) and isinstance( + box_lists, list + ), "Arguments to pooler must be lists" + assert ( + len(x) == num_level_assignments + ), "unequal value, num_level_assignments={}, but x is list of {} Tensors".format( + num_level_assignments, len(x) + ) + + assert len(box_lists) == x[0].size( + 0 + ), "unequal value, x[0] batch dim 0 is {}, but box_list has length {}".format( + x[0].size(0), len(box_lists) + ) + if len(box_lists) == 0: + return torch.zeros( + (0, x[0].shape[1]) + self.output_size, device=x[0].device, dtype=x[0].dtype + ) + + pooler_fmt_boxes = convert_boxes_to_pooler_format(box_lists) + + if num_level_assignments == 1: + return self.level_poolers[0](x[0], pooler_fmt_boxes) + + level_assignments = assign_boxes_to_levels( + box_lists, self.min_level, self.max_level, self.canonical_box_size, self.canonical_level + ) + + num_boxes = pooler_fmt_boxes.size(0) + num_channels = x[0].shape[1] + output_size = self.output_size[0] + + dtype, device = x[0].dtype, x[0].device + output = torch.zeros( + (num_boxes, num_channels, output_size, output_size), dtype=dtype, device=device + ) + + for level, pooler in enumerate(self.level_poolers): + inds = nonzero_tuple(level_assignments == level)[0] + pooler_fmt_boxes_level = pooler_fmt_boxes[inds] + # Use index_put_ instead of advance indexing, to avoid pytorch/issues/49852 + output.index_put_((inds,), pooler(x[level], pooler_fmt_boxes_level)) + + return output diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/postprocessing.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/postprocessing.py new file mode 100644 index 0000000..52f273b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/postprocessing.py @@ -0,0 +1,101 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import torch +from torch.nn import functional as F + +from detectron2.structures import Instances, ROIMasks + + +# perhaps should rename to "resize_instance" +def detector_postprocess( + results: Instances, output_height: int, output_width: int, mask_threshold: float = 0.5 +): + """ + Resize the output instances. + The input images are often resized when entering an object detector. + As a result, we often need the outputs of the detector in a different + resolution from its inputs. + + This function will resize the raw outputs of an R-CNN detector + to produce outputs according to the desired output resolution. + + Args: + results (Instances): the raw outputs from the detector. + `results.image_size` contains the input image resolution the detector sees. + This object might be modified in-place. + output_height, output_width: the desired output resolution. + + Returns: + Instances: the resized output from the model, based on the output resolution + """ + if isinstance(output_width, torch.Tensor): + # This shape might (but not necessarily) be tensors during tracing. + # Converts integer tensors to float temporaries to ensure true + # division is performed when computing scale_x and scale_y. + output_width_tmp = output_width.float() + output_height_tmp = output_height.float() + new_size = torch.stack([output_height, output_width]) + else: + new_size = (output_height, output_width) + output_width_tmp = output_width + output_height_tmp = output_height + + scale_x, scale_y = ( + output_width_tmp / results.image_size[1], + output_height_tmp / results.image_size[0], + ) + results = Instances(new_size, **results.get_fields()) + + if results.has("pred_boxes"): + output_boxes = results.pred_boxes + elif results.has("proposal_boxes"): + output_boxes = results.proposal_boxes + else: + output_boxes = None + assert output_boxes is not None, "Predictions must contain boxes!" + + output_boxes.scale(scale_x, scale_y) + output_boxes.clip(results.image_size) + + results = results[output_boxes.nonempty()] + + if results.has("pred_masks"): + if isinstance(results.pred_masks, ROIMasks): + roi_masks = results.pred_masks + else: + # pred_masks is a tensor of shape (N, 1, M, M) + roi_masks = ROIMasks(results.pred_masks[:, 0, :, :]) + results.pred_masks = roi_masks.to_bitmasks( + results.pred_boxes, output_height, output_width, mask_threshold + ).tensor # TODO return ROIMasks/BitMask object in the future + + if results.has("pred_keypoints"): + results.pred_keypoints[:, :, 0] *= scale_x + results.pred_keypoints[:, :, 1] *= scale_y + + return results + + +def sem_seg_postprocess(result, img_size, output_height, output_width): + """ + Return semantic segmentation predictions in the original resolution. + + The input images are often resized when entering semantic segmentor. Moreover, in same + cases, they also padded inside segmentor to be divisible by maximum network stride. + As a result, we often need the predictions of the segmentor in a different + resolution from its inputs. + + Args: + result (Tensor): semantic segmentation prediction logits. A tensor of shape (C, H, W), + where C is the number of classes, and H, W are the height and width of the prediction. + img_size (tuple): image size that segmentor is taking as input. + output_height, output_width: the desired output resolution. + + Returns: + semantic segmentation prediction (Tensor): A tensor of the shape + (C, output_height, output_width) that contains per-pixel soft predictions. + """ + result = result[:, : img_size[0], : img_size[1]].expand(1, -1, -1, -1) + result = F.interpolate( + result, size=(output_height, output_width), mode="bilinear", align_corners=False + )[0] + return result diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/__init__.py new file mode 100644 index 0000000..3f4e4df --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .build import PROPOSAL_GENERATOR_REGISTRY, build_proposal_generator +from .rpn import RPN_HEAD_REGISTRY, build_rpn_head, RPN, StandardRPNHead + +__all__ = list(globals().keys()) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/build.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/build.py new file mode 100644 index 0000000..34eb12d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/build.py @@ -0,0 +1,24 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from detectron2.utils.registry import Registry + +PROPOSAL_GENERATOR_REGISTRY = Registry("PROPOSAL_GENERATOR") +PROPOSAL_GENERATOR_REGISTRY.__doc__ = """ +Registry for proposal generator, which produces object proposals from feature maps. + +The registered object will be called with `obj(cfg, input_shape)`. +The call should return a `nn.Module` object. +""" + +from . import rpn, rrpn # noqa F401 isort:skip + + +def build_proposal_generator(cfg, input_shape): + """ + Build a proposal generator from `cfg.MODEL.PROPOSAL_GENERATOR.NAME`. + The name can be "PrecomputedProposals" to use no proposal generator. + """ + name = cfg.MODEL.PROPOSAL_GENERATOR.NAME + if name == "PrecomputedProposals": + return None + + return PROPOSAL_GENERATOR_REGISTRY.get(name)(cfg, input_shape) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/proposal_utils.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/proposal_utils.py new file mode 100644 index 0000000..4703219 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/proposal_utils.py @@ -0,0 +1,196 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +import math +from typing import List, Tuple, Union +import torch + +from detectron2.layers import batched_nms, cat +from detectron2.structures import Boxes, Instances + +logger = logging.getLogger(__name__) + + +def _is_tracing(): + # (fixed in TORCH_VERSION >= 1.9) + if torch.jit.is_scripting(): + # https://github.com/pytorch/pytorch/issues/47379 + return False + else: + return torch.jit.is_tracing() + + +def find_top_rpn_proposals( + proposals: List[torch.Tensor], + pred_objectness_logits: List[torch.Tensor], + image_sizes: List[Tuple[int, int]], + nms_thresh: float, + pre_nms_topk: int, + post_nms_topk: int, + min_box_size: float, + training: bool, +): + """ + For each feature map, select the `pre_nms_topk` highest scoring proposals, + apply NMS, clip proposals, and remove small boxes. Return the `post_nms_topk` + highest scoring proposals among all the feature maps for each image. + + Args: + proposals (list[Tensor]): A list of L tensors. Tensor i has shape (N, Hi*Wi*A, 4). + All proposal predictions on the feature maps. + pred_objectness_logits (list[Tensor]): A list of L tensors. Tensor i has shape (N, Hi*Wi*A). + image_sizes (list[tuple]): sizes (h, w) for each image + nms_thresh (float): IoU threshold to use for NMS + pre_nms_topk (int): number of top k scoring proposals to keep before applying NMS. + When RPN is run on multiple feature maps (as in FPN) this number is per + feature map. + post_nms_topk (int): number of top k scoring proposals to keep after applying NMS. + When RPN is run on multiple feature maps (as in FPN) this number is total, + over all feature maps. + min_box_size (float): minimum proposal box side length in pixels (absolute units + wrt input images). + training (bool): True if proposals are to be used in training, otherwise False. + This arg exists only to support a legacy bug; look for the "NB: Legacy bug ..." + comment. + + Returns: + list[Instances]: list of N Instances. The i-th Instances + stores post_nms_topk object proposals for image i, sorted by their + objectness score in descending order. + """ + num_images = len(image_sizes) + device = proposals[0].device + + # 1. Select top-k anchor for every level and every image + topk_scores = [] # #lvl Tensor, each of shape N x topk + topk_proposals = [] + level_ids = [] # #lvl Tensor, each of shape (topk,) + batch_idx = torch.arange(num_images, device=device) + for level_id, (proposals_i, logits_i) in enumerate(zip(proposals, pred_objectness_logits)): + Hi_Wi_A = logits_i.shape[1] + if isinstance(Hi_Wi_A, torch.Tensor): # it's a tensor in tracing + num_proposals_i = torch.clamp(Hi_Wi_A, max=pre_nms_topk) + else: + num_proposals_i = min(Hi_Wi_A, pre_nms_topk) + + topk_scores_i, topk_idx = logits_i.topk(num_proposals_i, dim=1) + + # each is N x topk + topk_proposals_i = proposals_i[batch_idx[:, None], topk_idx] # N x topk x 4 + + topk_proposals.append(topk_proposals_i) + topk_scores.append(topk_scores_i) + level_ids.append(torch.full((num_proposals_i,), level_id, dtype=torch.int64, device=device)) + + # 2. Concat all levels together + topk_scores = cat(topk_scores, dim=1) + topk_proposals = cat(topk_proposals, dim=1) + level_ids = cat(level_ids, dim=0) + + # 3. For each image, run a per-level NMS, and choose topk results. + results: List[Instances] = [] + for n, image_size in enumerate(image_sizes): + boxes = Boxes(topk_proposals[n]) + scores_per_img = topk_scores[n] + lvl = level_ids + + valid_mask = torch.isfinite(boxes.tensor).all(dim=1) & torch.isfinite(scores_per_img) + if not valid_mask.all(): + if training: + raise FloatingPointError( + "Predicted boxes or scores contain Inf/NaN. Training has diverged." + ) + boxes = boxes[valid_mask] + scores_per_img = scores_per_img[valid_mask] + lvl = lvl[valid_mask] + boxes.clip(image_size) + + # filter empty boxes + keep = boxes.nonempty(threshold=min_box_size) + if _is_tracing() or keep.sum().item() != len(boxes): + boxes, scores_per_img, lvl = boxes[keep], scores_per_img[keep], lvl[keep] + + keep = batched_nms(boxes.tensor, scores_per_img, lvl, nms_thresh) + # In Detectron1, there was different behavior during training vs. testing. + # (https://github.com/facebookresearch/Detectron/issues/459) + # During training, topk is over the proposals from *all* images in the training batch. + # During testing, it is over the proposals for each image separately. + # As a result, the training behavior becomes batch-dependent, + # and the configuration "POST_NMS_TOPK_TRAIN" end up relying on the batch size. + # This bug is addressed in Detectron2 to make the behavior independent of batch size. + keep = keep[:post_nms_topk] # keep is already sorted + + res = Instances(image_size) + res.proposal_boxes = boxes[keep] + res.objectness_logits = scores_per_img[keep] + results.append(res) + return results + + +def add_ground_truth_to_proposals( + gt: Union[List[Instances], List[Boxes]], proposals: List[Instances] +) -> List[Instances]: + """ + Call `add_ground_truth_to_proposals_single_image` for all images. + + Args: + gt(Union[List[Instances], List[Boxes]): list of N elements. Element i is a Instances + representing the ground-truth for image i. + proposals (list[Instances]): list of N elements. Element i is a Instances + representing the proposals for image i. + + Returns: + list[Instances]: list of N Instances. Each is the proposals for the image, + with field "proposal_boxes" and "objectness_logits". + """ + assert gt is not None + + if len(proposals) != len(gt): + raise ValueError("proposals and gt should have the same length as the number of images!") + if len(proposals) == 0: + return proposals + + return [ + add_ground_truth_to_proposals_single_image(gt_i, proposals_i) + for gt_i, proposals_i in zip(gt, proposals) + ] + + +def add_ground_truth_to_proposals_single_image( + gt: Union[Instances, Boxes], proposals: Instances +) -> Instances: + """ + Augment `proposals` with `gt`. + + Args: + Same as `add_ground_truth_to_proposals`, but with gt and proposals + per image. + + Returns: + Same as `add_ground_truth_to_proposals`, but for only one image. + """ + if isinstance(gt, Boxes): + # convert Boxes to Instances + gt = Instances(proposals.image_size, gt_boxes=gt) + + gt_boxes = gt.gt_boxes + device = proposals.objectness_logits.device + # Assign all ground-truth boxes an objectness logit corresponding to + # P(object) = sigmoid(logit) =~ 1. + gt_logit_value = math.log((1.0 - 1e-10) / (1 - (1.0 - 1e-10))) + gt_logits = gt_logit_value * torch.ones(len(gt_boxes), device=device) + + # Concatenating gt_boxes with proposals requires them to have the same fields + gt_proposal = Instances(proposals.image_size, **gt.get_fields()) + gt_proposal.proposal_boxes = gt_boxes + gt_proposal.objectness_logits = gt_logits + + for key in proposals.get_fields().keys(): + assert gt_proposal.has( + key + ), "The attribute '{}' in `proposals` does not exist in `gt`".format(key) + + # NOTE: Instances.cat only use fields from the first item. Extra fields in latter items + # will be thrown away. + new_proposals = Instances.cat([proposals, gt_proposal]) + + return new_proposals diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/rpn.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/rpn.py new file mode 100644 index 0000000..99cd536 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/rpn.py @@ -0,0 +1,533 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from typing import Dict, List, Optional, Tuple, Union +import torch +import torch.nn.functional as F +from torch import nn + +from detectron2.config import configurable +from detectron2.layers import Conv2d, ShapeSpec, cat +from detectron2.structures import Boxes, ImageList, Instances, pairwise_iou +from detectron2.utils.events import get_event_storage +from detectron2.utils.memory import retry_if_cuda_oom +from detectron2.utils.registry import Registry + +from ..anchor_generator import build_anchor_generator +from ..box_regression import Box2BoxTransform, _dense_box_regression_loss +from ..matcher import Matcher +from ..sampling import subsample_labels +from .build import PROPOSAL_GENERATOR_REGISTRY +from .proposal_utils import find_top_rpn_proposals + +RPN_HEAD_REGISTRY = Registry("RPN_HEAD") +RPN_HEAD_REGISTRY.__doc__ = """ +Registry for RPN heads, which take feature maps and perform +objectness classification and bounding box regression for anchors. + +The registered object will be called with `obj(cfg, input_shape)`. +The call should return a `nn.Module` object. +""" + + +""" +Shape shorthand in this module: + + N: number of images in the minibatch + L: number of feature maps per image on which RPN is run + A: number of cell anchors (must be the same for all feature maps) + Hi, Wi: height and width of the i-th feature map + B: size of the box parameterization + +Naming convention: + + objectness: refers to the binary classification of an anchor as object vs. not object. + + deltas: refers to the 4-d (dx, dy, dw, dh) deltas that parameterize the box2box + transform (see :class:`box_regression.Box2BoxTransform`), or 5d for rotated boxes. + + pred_objectness_logits: predicted objectness scores in [-inf, +inf]; use + sigmoid(pred_objectness_logits) to estimate P(object). + + gt_labels: ground-truth binary classification labels for objectness + + pred_anchor_deltas: predicted box2box transform deltas + + gt_anchor_deltas: ground-truth box2box transform deltas +""" + + +def build_rpn_head(cfg, input_shape): + """ + Build an RPN head defined by `cfg.MODEL.RPN.HEAD_NAME`. + """ + name = cfg.MODEL.RPN.HEAD_NAME + return RPN_HEAD_REGISTRY.get(name)(cfg, input_shape) + + +@RPN_HEAD_REGISTRY.register() +class StandardRPNHead(nn.Module): + """ + Standard RPN classification and regression heads described in :paper:`Faster R-CNN`. + Uses a 3x3 conv to produce a shared hidden state from which one 1x1 conv predicts + objectness logits for each anchor and a second 1x1 conv predicts bounding-box deltas + specifying how to deform each anchor into an object proposal. + """ + + @configurable + def __init__( + self, *, in_channels: int, num_anchors: int, box_dim: int = 4, conv_dims: List[int] = (-1,) + ): + """ + NOTE: this interface is experimental. + + Args: + in_channels (int): number of input feature channels. When using multiple + input features, they must have the same number of channels. + num_anchors (int): number of anchors to predict for *each spatial position* + on the feature map. The total number of anchors for each + feature map will be `num_anchors * H * W`. + box_dim (int): dimension of a box, which is also the number of box regression + predictions to make for each anchor. An axis aligned box has + box_dim=4, while a rotated box has box_dim=5. + conv_dims (list[int]): a list of integers representing the output channels + of N conv layers. Set it to -1 to use the same number of output channels + as input channels. + """ + super().__init__() + cur_channels = in_channels + # Keeping the old variable names and structure for backwards compatiblity. + # Otherwise the old checkpoints will fail to load. + if len(conv_dims) == 1: + out_channels = cur_channels if conv_dims[0] == -1 else conv_dims[0] + # 3x3 conv for the hidden representation + self.conv = self._get_rpn_conv(cur_channels, out_channels) + cur_channels = out_channels + else: + self.conv = nn.Sequential() + for k, conv_dim in enumerate(conv_dims): + out_channels = cur_channels if conv_dim == -1 else conv_dim + if out_channels <= 0: + raise ValueError( + f"Conv output channels should be greater than 0. Got {out_channels}" + ) + conv = self._get_rpn_conv(cur_channels, out_channels) + self.conv.add_module(f"conv{k}", conv) + cur_channels = out_channels + # 1x1 conv for predicting objectness logits + self.objectness_logits = nn.Conv2d(cur_channels, num_anchors, kernel_size=1, stride=1) + # 1x1 conv for predicting box2box transform deltas + self.anchor_deltas = nn.Conv2d(cur_channels, num_anchors * box_dim, kernel_size=1, stride=1) + + # Keeping the order of weights initialization same for backwards compatiblility. + for layer in self.modules(): + if isinstance(layer, nn.Conv2d): + nn.init.normal_(layer.weight, std=0.01) + nn.init.constant_(layer.bias, 0) + + def _get_rpn_conv(self, in_channels, out_channels): + return Conv2d( + in_channels, + out_channels, + kernel_size=3, + stride=1, + padding=1, + activation=nn.ReLU(), + ) + + @classmethod + def from_config(cls, cfg, input_shape): + # Standard RPN is shared across levels: + in_channels = [s.channels for s in input_shape] + assert len(set(in_channels)) == 1, "Each level must have the same channel!" + in_channels = in_channels[0] + + # RPNHead should take the same input as anchor generator + # NOTE: it assumes that creating an anchor generator does not have unwanted side effect. + anchor_generator = build_anchor_generator(cfg, input_shape) + num_anchors = anchor_generator.num_anchors + box_dim = anchor_generator.box_dim + assert ( + len(set(num_anchors)) == 1 + ), "Each level must have the same number of anchors per spatial position" + return { + "in_channels": in_channels, + "num_anchors": num_anchors[0], + "box_dim": box_dim, + "conv_dims": cfg.MODEL.RPN.CONV_DIMS, + } + + def forward(self, features: List[torch.Tensor]): + """ + Args: + features (list[Tensor]): list of feature maps + + Returns: + list[Tensor]: A list of L elements. + Element i is a tensor of shape (N, A, Hi, Wi) representing + the predicted objectness logits for all anchors. A is the number of cell anchors. + list[Tensor]: A list of L elements. Element i is a tensor of shape + (N, A*box_dim, Hi, Wi) representing the predicted "deltas" used to transform anchors + to proposals. + """ + pred_objectness_logits = [] + pred_anchor_deltas = [] + for x in features: + t = self.conv(x) + pred_objectness_logits.append(self.objectness_logits(t)) + pred_anchor_deltas.append(self.anchor_deltas(t)) + return pred_objectness_logits, pred_anchor_deltas + + +@PROPOSAL_GENERATOR_REGISTRY.register() +class RPN(nn.Module): + """ + Region Proposal Network, introduced by :paper:`Faster R-CNN`. + """ + + @configurable + def __init__( + self, + *, + in_features: List[str], + head: nn.Module, + anchor_generator: nn.Module, + anchor_matcher: Matcher, + box2box_transform: Box2BoxTransform, + batch_size_per_image: int, + positive_fraction: float, + pre_nms_topk: Tuple[float, float], + post_nms_topk: Tuple[float, float], + nms_thresh: float = 0.7, + min_box_size: float = 0.0, + anchor_boundary_thresh: float = -1.0, + loss_weight: Union[float, Dict[str, float]] = 1.0, + box_reg_loss_type: str = "smooth_l1", + smooth_l1_beta: float = 0.0, + ): + """ + NOTE: this interface is experimental. + + Args: + in_features (list[str]): list of names of input features to use + head (nn.Module): a module that predicts logits and regression deltas + for each level from a list of per-level features + anchor_generator (nn.Module): a module that creates anchors from a + list of features. Usually an instance of :class:`AnchorGenerator` + anchor_matcher (Matcher): label the anchors by matching them with ground truth. + box2box_transform (Box2BoxTransform): defines the transform from anchors boxes to + instance boxes + batch_size_per_image (int): number of anchors per image to sample for training + positive_fraction (float): fraction of foreground anchors to sample for training + pre_nms_topk (tuple[float]): (train, test) that represents the + number of top k proposals to select before NMS, in + training and testing. + post_nms_topk (tuple[float]): (train, test) that represents the + number of top k proposals to select after NMS, in + training and testing. + nms_thresh (float): NMS threshold used to de-duplicate the predicted proposals + min_box_size (float): remove proposal boxes with any side smaller than this threshold, + in the unit of input image pixels + anchor_boundary_thresh (float): legacy option + loss_weight (float|dict): weights to use for losses. Can be single float for weighting + all rpn losses together, or a dict of individual weightings. Valid dict keys are: + "loss_rpn_cls" - applied to classification loss + "loss_rpn_loc" - applied to box regression loss + box_reg_loss_type (str): Loss type to use. Supported losses: "smooth_l1", "giou". + smooth_l1_beta (float): beta parameter for the smooth L1 regression loss. Default to + use L1 loss. Only used when `box_reg_loss_type` is "smooth_l1" + """ + super().__init__() + self.in_features = in_features + self.rpn_head = head + self.anchor_generator = anchor_generator + self.anchor_matcher = anchor_matcher + self.box2box_transform = box2box_transform + self.batch_size_per_image = batch_size_per_image + self.positive_fraction = positive_fraction + # Map from self.training state to train/test settings + self.pre_nms_topk = {True: pre_nms_topk[0], False: pre_nms_topk[1]} + self.post_nms_topk = {True: post_nms_topk[0], False: post_nms_topk[1]} + self.nms_thresh = nms_thresh + self.min_box_size = float(min_box_size) + self.anchor_boundary_thresh = anchor_boundary_thresh + if isinstance(loss_weight, float): + loss_weight = {"loss_rpn_cls": loss_weight, "loss_rpn_loc": loss_weight} + self.loss_weight = loss_weight + self.box_reg_loss_type = box_reg_loss_type + self.smooth_l1_beta = smooth_l1_beta + + @classmethod + def from_config(cls, cfg, input_shape: Dict[str, ShapeSpec]): + in_features = cfg.MODEL.RPN.IN_FEATURES + ret = { + "in_features": in_features, + "min_box_size": cfg.MODEL.PROPOSAL_GENERATOR.MIN_SIZE, + "nms_thresh": cfg.MODEL.RPN.NMS_THRESH, + "batch_size_per_image": cfg.MODEL.RPN.BATCH_SIZE_PER_IMAGE, + "positive_fraction": cfg.MODEL.RPN.POSITIVE_FRACTION, + "loss_weight": { + "loss_rpn_cls": cfg.MODEL.RPN.LOSS_WEIGHT, + "loss_rpn_loc": cfg.MODEL.RPN.BBOX_REG_LOSS_WEIGHT * cfg.MODEL.RPN.LOSS_WEIGHT, + }, + "anchor_boundary_thresh": cfg.MODEL.RPN.BOUNDARY_THRESH, + "box2box_transform": Box2BoxTransform(weights=cfg.MODEL.RPN.BBOX_REG_WEIGHTS), + "box_reg_loss_type": cfg.MODEL.RPN.BBOX_REG_LOSS_TYPE, + "smooth_l1_beta": cfg.MODEL.RPN.SMOOTH_L1_BETA, + } + + ret["pre_nms_topk"] = (cfg.MODEL.RPN.PRE_NMS_TOPK_TRAIN, cfg.MODEL.RPN.PRE_NMS_TOPK_TEST) + ret["post_nms_topk"] = (cfg.MODEL.RPN.POST_NMS_TOPK_TRAIN, cfg.MODEL.RPN.POST_NMS_TOPK_TEST) + + ret["anchor_generator"] = build_anchor_generator(cfg, [input_shape[f] for f in in_features]) + ret["anchor_matcher"] = Matcher( + cfg.MODEL.RPN.IOU_THRESHOLDS, cfg.MODEL.RPN.IOU_LABELS, allow_low_quality_matches=True + ) + ret["head"] = build_rpn_head(cfg, [input_shape[f] for f in in_features]) + return ret + + def _subsample_labels(self, label): + """ + Randomly sample a subset of positive and negative examples, and overwrite + the label vector to the ignore value (-1) for all elements that are not + included in the sample. + + Args: + labels (Tensor): a vector of -1, 0, 1. Will be modified in-place and returned. + """ + pos_idx, neg_idx = subsample_labels( + label, self.batch_size_per_image, self.positive_fraction, 0 + ) + # Fill with the ignore label (-1), then set positive and negative labels + label.fill_(-1) + label.scatter_(0, pos_idx, 1) + label.scatter_(0, neg_idx, 0) + return label + + @torch.jit.unused + @torch.no_grad() + def label_and_sample_anchors( + self, anchors: List[Boxes], gt_instances: List[Instances] + ) -> Tuple[List[torch.Tensor], List[torch.Tensor]]: + """ + Args: + anchors (list[Boxes]): anchors for each feature map. + gt_instances: the ground-truth instances for each image. + + Returns: + list[Tensor]: + List of #img tensors. i-th element is a vector of labels whose length is + the total number of anchors across all feature maps R = sum(Hi * Wi * A). + Label values are in {-1, 0, 1}, with meanings: -1 = ignore; 0 = negative + class; 1 = positive class. + list[Tensor]: + i-th element is a Rx4 tensor. The values are the matched gt boxes for each + anchor. Values are undefined for those anchors not labeled as 1. + """ + anchors = Boxes.cat(anchors) + + gt_boxes = [x.gt_boxes for x in gt_instances] + image_sizes = [x.image_size for x in gt_instances] + del gt_instances + + gt_labels = [] + matched_gt_boxes = [] + for image_size_i, gt_boxes_i in zip(image_sizes, gt_boxes): + """ + image_size_i: (h, w) for the i-th image + gt_boxes_i: ground-truth boxes for i-th image + """ + + match_quality_matrix = retry_if_cuda_oom(pairwise_iou)(gt_boxes_i, anchors) + matched_idxs, gt_labels_i = retry_if_cuda_oom(self.anchor_matcher)(match_quality_matrix) + # Matching is memory-expensive and may result in CPU tensors. But the result is small + gt_labels_i = gt_labels_i.to(device=gt_boxes_i.device) + del match_quality_matrix + + if self.anchor_boundary_thresh >= 0: + # Discard anchors that go out of the boundaries of the image + # NOTE: This is legacy functionality that is turned off by default in Detectron2 + anchors_inside_image = anchors.inside_box(image_size_i, self.anchor_boundary_thresh) + gt_labels_i[~anchors_inside_image] = -1 + + # A vector of labels (-1, 0, 1) for each anchor + gt_labels_i = self._subsample_labels(gt_labels_i) + + if len(gt_boxes_i) == 0: + # These values won't be used anyway since the anchor is labeled as background + matched_gt_boxes_i = torch.zeros_like(anchors.tensor) + else: + # TODO wasted indexing computation for ignored boxes + matched_gt_boxes_i = gt_boxes_i[matched_idxs].tensor + + gt_labels.append(gt_labels_i) # N,AHW + matched_gt_boxes.append(matched_gt_boxes_i) + return gt_labels, matched_gt_boxes + + @torch.jit.unused + def losses( + self, + anchors: List[Boxes], + pred_objectness_logits: List[torch.Tensor], + gt_labels: List[torch.Tensor], + pred_anchor_deltas: List[torch.Tensor], + gt_boxes: List[torch.Tensor], + ) -> Dict[str, torch.Tensor]: + """ + Return the losses from a set of RPN predictions and their associated ground-truth. + + Args: + anchors (list[Boxes or RotatedBoxes]): anchors for each feature map, each + has shape (Hi*Wi*A, B), where B is box dimension (4 or 5). + pred_objectness_logits (list[Tensor]): A list of L elements. + Element i is a tensor of shape (N, Hi*Wi*A) representing + the predicted objectness logits for all anchors. + gt_labels (list[Tensor]): Output of :meth:`label_and_sample_anchors`. + pred_anchor_deltas (list[Tensor]): A list of L elements. Element i is a tensor of shape + (N, Hi*Wi*A, 4 or 5) representing the predicted "deltas" used to transform anchors + to proposals. + gt_boxes (list[Tensor]): Output of :meth:`label_and_sample_anchors`. + + Returns: + dict[loss name -> loss value]: A dict mapping from loss name to loss value. + Loss names are: `loss_rpn_cls` for objectness classification and + `loss_rpn_loc` for proposal localization. + """ + num_images = len(gt_labels) + gt_labels = torch.stack(gt_labels) # (N, sum(Hi*Wi*Ai)) + + # Log the number of positive/negative anchors per-image that's used in training + pos_mask = gt_labels == 1 + num_pos_anchors = pos_mask.sum().item() + num_neg_anchors = (gt_labels == 0).sum().item() + storage = get_event_storage() + storage.put_scalar("rpn/num_pos_anchors", num_pos_anchors / num_images) + storage.put_scalar("rpn/num_neg_anchors", num_neg_anchors / num_images) + + localization_loss = _dense_box_regression_loss( + anchors, + self.box2box_transform, + pred_anchor_deltas, + gt_boxes, + pos_mask, + box_reg_loss_type=self.box_reg_loss_type, + smooth_l1_beta=self.smooth_l1_beta, + ) + + valid_mask = gt_labels >= 0 + objectness_loss = F.binary_cross_entropy_with_logits( + cat(pred_objectness_logits, dim=1)[valid_mask], + gt_labels[valid_mask].to(torch.float32), + reduction="sum", + ) + normalizer = self.batch_size_per_image * num_images + losses = { + "loss_rpn_cls": objectness_loss / normalizer, + # The original Faster R-CNN paper uses a slightly different normalizer + # for loc loss. But it doesn't matter in practice + "loss_rpn_loc": localization_loss / normalizer, + } + losses = {k: v * self.loss_weight.get(k, 1.0) for k, v in losses.items()} + return losses + + def forward( + self, + images: ImageList, + features: Dict[str, torch.Tensor], + gt_instances: Optional[List[Instances]] = None, + ): + """ + Args: + images (ImageList): input images of length `N` + features (dict[str, Tensor]): input data as a mapping from feature + map name to tensor. Axis 0 represents the number of images `N` in + the input data; axes 1-3 are channels, height, and width, which may + vary between feature maps (e.g., if a feature pyramid is used). + gt_instances (list[Instances], optional): a length `N` list of `Instances`s. + Each `Instances` stores ground-truth instances for the corresponding image. + + Returns: + proposals: list[Instances]: contains fields "proposal_boxes", "objectness_logits" + loss: dict[Tensor] or None + """ + features = [features[f] for f in self.in_features] + anchors = self.anchor_generator(features) + + pred_objectness_logits, pred_anchor_deltas = self.rpn_head(features) + # Transpose the Hi*Wi*A dimension to the middle: + pred_objectness_logits = [ + # (N, A, Hi, Wi) -> (N, Hi, Wi, A) -> (N, Hi*Wi*A) + score.permute(0, 2, 3, 1).flatten(1) + for score in pred_objectness_logits + ] + pred_anchor_deltas = [ + # (N, A*B, Hi, Wi) -> (N, A, B, Hi, Wi) -> (N, Hi, Wi, A, B) -> (N, Hi*Wi*A, B) + x.view(x.shape[0], -1, self.anchor_generator.box_dim, x.shape[-2], x.shape[-1]) + .permute(0, 3, 4, 1, 2) + .flatten(1, -2) + for x in pred_anchor_deltas + ] + + if self.training: + assert gt_instances is not None, "RPN requires gt_instances in training!" + gt_labels, gt_boxes = self.label_and_sample_anchors(anchors, gt_instances) + losses = self.losses( + anchors, pred_objectness_logits, gt_labels, pred_anchor_deltas, gt_boxes + ) + else: + losses = {} + proposals = self.predict_proposals( + anchors, pred_objectness_logits, pred_anchor_deltas, images.image_sizes + ) + return proposals, losses + + def predict_proposals( + self, + anchors: List[Boxes], + pred_objectness_logits: List[torch.Tensor], + pred_anchor_deltas: List[torch.Tensor], + image_sizes: List[Tuple[int, int]], + ): + """ + Decode all the predicted box regression deltas to proposals. Find the top proposals + by applying NMS and removing boxes that are too small. + + Returns: + proposals (list[Instances]): list of N Instances. The i-th Instances + stores post_nms_topk object proposals for image i, sorted by their + objectness score in descending order. + """ + # The proposals are treated as fixed for joint training with roi heads. + # This approach ignores the derivative w.r.t. the proposal boxes’ coordinates that + # are also network responses. + with torch.no_grad(): + pred_proposals = self._decode_proposals(anchors, pred_anchor_deltas) + return find_top_rpn_proposals( + pred_proposals, + pred_objectness_logits, + image_sizes, + self.nms_thresh, + self.pre_nms_topk[self.training], + self.post_nms_topk[self.training], + self.min_box_size, + self.training, + ) + + def _decode_proposals(self, anchors: List[Boxes], pred_anchor_deltas: List[torch.Tensor]): + """ + Transform anchors into proposals by applying the predicted anchor deltas. + + Returns: + proposals (list[Tensor]): A list of L tensors. Tensor i has shape + (N, Hi*Wi*A, B) + """ + N = pred_anchor_deltas[0].shape[0] + proposals = [] + # For each feature map + for anchors_i, pred_anchor_deltas_i in zip(anchors, pred_anchor_deltas): + B = anchors_i.tensor.size(1) + pred_anchor_deltas_i = pred_anchor_deltas_i.reshape(-1, B) + # Expand anchors to shape (N*Hi*Wi*A, B) + anchors_i = anchors_i.tensor.unsqueeze(0).expand(N, -1, -1).reshape(-1, B) + proposals_i = self.box2box_transform.apply_deltas(pred_anchor_deltas_i, anchors_i) + # Append feature map proposals with shape (N, Hi*Wi*A, B) + proposals.append(proposals_i.view(N, -1, B)) + return proposals diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/rrpn.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/rrpn.py new file mode 100644 index 0000000..1a3cd28 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/proposal_generator/rrpn.py @@ -0,0 +1,209 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import itertools +import logging +from typing import Dict, List +import torch + +from detectron2.config import configurable +from detectron2.layers import ShapeSpec, batched_nms_rotated, cat +from detectron2.structures import Instances, RotatedBoxes, pairwise_iou_rotated +from detectron2.utils.memory import retry_if_cuda_oom + +from ..box_regression import Box2BoxTransformRotated +from .build import PROPOSAL_GENERATOR_REGISTRY +from .proposal_utils import _is_tracing +from .rpn import RPN + +logger = logging.getLogger(__name__) + + +def find_top_rrpn_proposals( + proposals, + pred_objectness_logits, + image_sizes, + nms_thresh, + pre_nms_topk, + post_nms_topk, + min_box_size, + training, +): + """ + For each feature map, select the `pre_nms_topk` highest scoring proposals, + apply NMS, clip proposals, and remove small boxes. Return the `post_nms_topk` + highest scoring proposals among all the feature maps if `training` is True, + otherwise, returns the highest `post_nms_topk` scoring proposals for each + feature map. + + Args: + proposals (list[Tensor]): A list of L tensors. Tensor i has shape (N, Hi*Wi*A, 5). + All proposal predictions on the feature maps. + pred_objectness_logits (list[Tensor]): A list of L tensors. Tensor i has shape (N, Hi*Wi*A). + image_sizes (list[tuple]): sizes (h, w) for each image + nms_thresh (float): IoU threshold to use for NMS + pre_nms_topk (int): number of top k scoring proposals to keep before applying NMS. + When RRPN is run on multiple feature maps (as in FPN) this number is per + feature map. + post_nms_topk (int): number of top k scoring proposals to keep after applying NMS. + When RRPN is run on multiple feature maps (as in FPN) this number is total, + over all feature maps. + min_box_size(float): minimum proposal box side length in pixels (absolute units wrt + input images). + training (bool): True if proposals are to be used in training, otherwise False. + This arg exists only to support a legacy bug; look for the "NB: Legacy bug ..." + comment. + + Returns: + proposals (list[Instances]): list of N Instances. The i-th Instances + stores post_nms_topk object proposals for image i. + """ + num_images = len(image_sizes) + device = proposals[0].device + + # 1. Select top-k anchor for every level and every image + topk_scores = [] # #lvl Tensor, each of shape N x topk + topk_proposals = [] + level_ids = [] # #lvl Tensor, each of shape (topk,) + batch_idx = torch.arange(num_images, device=device) + for level_id, proposals_i, logits_i in zip( + itertools.count(), proposals, pred_objectness_logits + ): + Hi_Wi_A = logits_i.shape[1] + if isinstance(Hi_Wi_A, torch.Tensor): # it's a tensor in tracing + num_proposals_i = torch.clamp(Hi_Wi_A, max=pre_nms_topk) + else: + num_proposals_i = min(Hi_Wi_A, pre_nms_topk) + + topk_scores_i, topk_idx = logits_i.topk(num_proposals_i, dim=1) + + # each is N x topk + topk_proposals_i = proposals_i[batch_idx[:, None], topk_idx] # N x topk x 5 + + topk_proposals.append(topk_proposals_i) + topk_scores.append(topk_scores_i) + level_ids.append(torch.full((num_proposals_i,), level_id, dtype=torch.int64, device=device)) + + # 2. Concat all levels together + topk_scores = cat(topk_scores, dim=1) + topk_proposals = cat(topk_proposals, dim=1) + level_ids = cat(level_ids, dim=0) + + # 3. For each image, run a per-level NMS, and choose topk results. + results = [] + for n, image_size in enumerate(image_sizes): + boxes = RotatedBoxes(topk_proposals[n]) + scores_per_img = topk_scores[n] + lvl = level_ids + + valid_mask = torch.isfinite(boxes.tensor).all(dim=1) & torch.isfinite(scores_per_img) + if not valid_mask.all(): + if training: + raise FloatingPointError( + "Predicted boxes or scores contain Inf/NaN. Training has diverged." + ) + boxes = boxes[valid_mask] + scores_per_img = scores_per_img[valid_mask] + lvl = lvl[valid_mask] + boxes.clip(image_size) + + # filter empty boxes + keep = boxes.nonempty(threshold=min_box_size) + if _is_tracing() or keep.sum().item() != len(boxes): + boxes, scores_per_img, lvl = (boxes[keep], scores_per_img[keep], lvl[keep]) + + keep = batched_nms_rotated(boxes.tensor, scores_per_img, lvl, nms_thresh) + # In Detectron1, there was different behavior during training vs. testing. + # (https://github.com/facebookresearch/Detectron/issues/459) + # During training, topk is over the proposals from *all* images in the training batch. + # During testing, it is over the proposals for each image separately. + # As a result, the training behavior becomes batch-dependent, + # and the configuration "POST_NMS_TOPK_TRAIN" end up relying on the batch size. + # This bug is addressed in Detectron2 to make the behavior independent of batch size. + keep = keep[:post_nms_topk] + + res = Instances(image_size) + res.proposal_boxes = boxes[keep] + res.objectness_logits = scores_per_img[keep] + results.append(res) + return results + + +@PROPOSAL_GENERATOR_REGISTRY.register() +class RRPN(RPN): + """ + Rotated Region Proposal Network described in :paper:`RRPN`. + """ + + @configurable + def __init__(self, *args, **kwargs): + super().__init__(*args, **kwargs) + if self.anchor_boundary_thresh >= 0: + raise NotImplementedError( + "anchor_boundary_thresh is a legacy option not implemented for RRPN." + ) + + @classmethod + def from_config(cls, cfg, input_shape: Dict[str, ShapeSpec]): + ret = super().from_config(cfg, input_shape) + ret["box2box_transform"] = Box2BoxTransformRotated(weights=cfg.MODEL.RPN.BBOX_REG_WEIGHTS) + return ret + + @torch.no_grad() + def label_and_sample_anchors(self, anchors: List[RotatedBoxes], gt_instances: List[Instances]): + """ + Args: + anchors (list[RotatedBoxes]): anchors for each feature map. + gt_instances: the ground-truth instances for each image. + + Returns: + list[Tensor]: + List of #img tensors. i-th element is a vector of labels whose length is + the total number of anchors across feature maps. Label values are in {-1, 0, 1}, + with meanings: -1 = ignore; 0 = negative class; 1 = positive class. + list[Tensor]: + i-th element is a Nx5 tensor, where N is the total number of anchors across + feature maps. The values are the matched gt boxes for each anchor. + Values are undefined for those anchors not labeled as 1. + """ + anchors = RotatedBoxes.cat(anchors) + + gt_boxes = [x.gt_boxes for x in gt_instances] + del gt_instances + + gt_labels = [] + matched_gt_boxes = [] + for gt_boxes_i in gt_boxes: + """ + gt_boxes_i: ground-truth boxes for i-th image + """ + match_quality_matrix = retry_if_cuda_oom(pairwise_iou_rotated)(gt_boxes_i, anchors) + matched_idxs, gt_labels_i = retry_if_cuda_oom(self.anchor_matcher)(match_quality_matrix) + # Matching is memory-expensive and may result in CPU tensors. But the result is small + gt_labels_i = gt_labels_i.to(device=gt_boxes_i.device) + + # A vector of labels (-1, 0, 1) for each anchor + gt_labels_i = self._subsample_labels(gt_labels_i) + + if len(gt_boxes_i) == 0: + # These values won't be used anyway since the anchor is labeled as background + matched_gt_boxes_i = torch.zeros_like(anchors.tensor) + else: + # TODO wasted indexing computation for ignored boxes + matched_gt_boxes_i = gt_boxes_i[matched_idxs].tensor + + gt_labels.append(gt_labels_i) # N,AHW + matched_gt_boxes.append(matched_gt_boxes_i) + return gt_labels, matched_gt_boxes + + @torch.no_grad() + def predict_proposals(self, anchors, pred_objectness_logits, pred_anchor_deltas, image_sizes): + pred_proposals = self._decode_proposals(anchors, pred_anchor_deltas) + return find_top_rrpn_proposals( + pred_proposals, + pred_objectness_logits, + image_sizes, + self.nms_thresh, + self.pre_nms_topk[self.training], + self.post_nms_topk[self.training], + self.min_box_size, + self.training, + ) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/__init__.py new file mode 100644 index 0000000..d13e9c5 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/__init__.py @@ -0,0 +1,29 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .box_head import ROI_BOX_HEAD_REGISTRY, build_box_head, FastRCNNConvFCHead +from .keypoint_head import ( + ROI_KEYPOINT_HEAD_REGISTRY, + build_keypoint_head, + BaseKeypointRCNNHead, + KRCNNConvDeconvUpsampleHead, +) +from .mask_head import ( + ROI_MASK_HEAD_REGISTRY, + build_mask_head, + BaseMaskRCNNHead, + MaskRCNNConvUpsampleHead, +) +from .roi_heads import ( + ROI_HEADS_REGISTRY, + ROIHeads, + Res5ROIHeads, + StandardROIHeads, + build_roi_heads, + select_foreground_proposals, +) +from .cascade_rcnn import CascadeROIHeads +from .rotated_fast_rcnn import RROIHeads +from .fast_rcnn import FastRCNNOutputLayers + +from . import cascade_rcnn # isort:skip + +__all__ = list(globals().keys()) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/box_head.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/box_head.py new file mode 100644 index 0000000..5d0370b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/box_head.py @@ -0,0 +1,118 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import numpy as np +from typing import List +import fvcore.nn.weight_init as weight_init +import torch +from torch import nn + +from detectron2.config import configurable +from detectron2.layers import Conv2d, ShapeSpec, get_norm +from detectron2.utils.registry import Registry + +__all__ = ["FastRCNNConvFCHead", "build_box_head", "ROI_BOX_HEAD_REGISTRY"] + +ROI_BOX_HEAD_REGISTRY = Registry("ROI_BOX_HEAD") +ROI_BOX_HEAD_REGISTRY.__doc__ = """ +Registry for box heads, which make box predictions from per-region features. + +The registered object will be called with `obj(cfg, input_shape)`. +""" + + +# To get torchscript support, we make the head a subclass of `nn.Sequential`. +# Therefore, to add new layers in this head class, please make sure they are +# added in the order they will be used in forward(). +@ROI_BOX_HEAD_REGISTRY.register() +class FastRCNNConvFCHead(nn.Sequential): + """ + A head with several 3x3 conv layers (each followed by norm & relu) and then + several fc layers (each followed by relu). + """ + + @configurable + def __init__( + self, input_shape: ShapeSpec, *, conv_dims: List[int], fc_dims: List[int], conv_norm="" + ): + """ + NOTE: this interface is experimental. + + Args: + input_shape (ShapeSpec): shape of the input feature. + conv_dims (list[int]): the output dimensions of the conv layers + fc_dims (list[int]): the output dimensions of the fc layers + conv_norm (str or callable): normalization for the conv layers. + See :func:`detectron2.layers.get_norm` for supported types. + """ + super().__init__() + assert len(conv_dims) + len(fc_dims) > 0 + + self._output_size = (input_shape.channels, input_shape.height, input_shape.width) + + self.conv_norm_relus = [] + for k, conv_dim in enumerate(conv_dims): + conv = Conv2d( + self._output_size[0], + conv_dim, + kernel_size=3, + padding=1, + bias=not conv_norm, + norm=get_norm(conv_norm, conv_dim), + activation=nn.ReLU(), + ) + self.add_module("conv{}".format(k + 1), conv) + self.conv_norm_relus.append(conv) + self._output_size = (conv_dim, self._output_size[1], self._output_size[2]) + + self.fcs = [] + for k, fc_dim in enumerate(fc_dims): + if k == 0: + self.add_module("flatten", nn.Flatten()) + fc = nn.Linear(int(np.prod(self._output_size)), fc_dim) + self.add_module("fc{}".format(k + 1), fc) + self.add_module("fc_relu{}".format(k + 1), nn.ReLU()) + self.fcs.append(fc) + self._output_size = fc_dim + + for layer in self.conv_norm_relus: + weight_init.c2_msra_fill(layer) + for layer in self.fcs: + weight_init.c2_xavier_fill(layer) + + @classmethod + def from_config(cls, cfg, input_shape): + num_conv = cfg.MODEL.ROI_BOX_HEAD.NUM_CONV + conv_dim = cfg.MODEL.ROI_BOX_HEAD.CONV_DIM + num_fc = cfg.MODEL.ROI_BOX_HEAD.NUM_FC + fc_dim = cfg.MODEL.ROI_BOX_HEAD.FC_DIM + return { + "input_shape": input_shape, + "conv_dims": [conv_dim] * num_conv, + "fc_dims": [fc_dim] * num_fc, + "conv_norm": cfg.MODEL.ROI_BOX_HEAD.NORM, + } + + def forward(self, x): + for layer in self: + x = layer(x) + return x + + @property + @torch.jit.unused + def output_shape(self): + """ + Returns: + ShapeSpec: the output feature shape + """ + o = self._output_size + if isinstance(o, int): + return ShapeSpec(channels=o) + else: + return ShapeSpec(channels=o[0], height=o[1], width=o[2]) + + +def build_box_head(cfg, input_shape): + """ + Build a box head defined by `cfg.MODEL.ROI_BOX_HEAD.NAME`. + """ + name = cfg.MODEL.ROI_BOX_HEAD.NAME + return ROI_BOX_HEAD_REGISTRY.get(name)(cfg, input_shape) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/cascade_rcnn.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/cascade_rcnn.py new file mode 100644 index 0000000..a0ca70f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/cascade_rcnn.py @@ -0,0 +1,299 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from typing import List +import torch +from torch import nn +from torch.autograd.function import Function + +from detectron2.config import configurable +from detectron2.layers import ShapeSpec +from detectron2.structures import Boxes, Instances, pairwise_iou +from detectron2.utils.events import get_event_storage + +from ..box_regression import Box2BoxTransform +from ..matcher import Matcher +from ..poolers import ROIPooler +from .box_head import build_box_head +from .fast_rcnn import FastRCNNOutputLayers, fast_rcnn_inference +from .roi_heads import ROI_HEADS_REGISTRY, StandardROIHeads + + +class _ScaleGradient(Function): + @staticmethod + def forward(ctx, input, scale): + ctx.scale = scale + return input + + @staticmethod + def backward(ctx, grad_output): + return grad_output * ctx.scale, None + + +@ROI_HEADS_REGISTRY.register() +class CascadeROIHeads(StandardROIHeads): + """ + The ROI heads that implement :paper:`Cascade R-CNN`. + """ + + @configurable + def __init__( + self, + *, + box_in_features: List[str], + box_pooler: ROIPooler, + box_heads: List[nn.Module], + box_predictors: List[nn.Module], + proposal_matchers: List[Matcher], + **kwargs, + ): + """ + NOTE: this interface is experimental. + + Args: + box_pooler (ROIPooler): pooler that extracts region features from given boxes + box_heads (list[nn.Module]): box head for each cascade stage + box_predictors (list[nn.Module]): box predictor for each cascade stage + proposal_matchers (list[Matcher]): matcher with different IoU thresholds to + match boxes with ground truth for each stage. The first matcher matches + RPN proposals with ground truth, the other matchers use boxes predicted + by the previous stage as proposals and match them with ground truth. + """ + assert "proposal_matcher" not in kwargs, ( + "CascadeROIHeads takes 'proposal_matchers=' for each stage instead " + "of one 'proposal_matcher='." + ) + # The first matcher matches RPN proposals with ground truth, done in the base class + kwargs["proposal_matcher"] = proposal_matchers[0] + num_stages = self.num_cascade_stages = len(box_heads) + box_heads = nn.ModuleList(box_heads) + box_predictors = nn.ModuleList(box_predictors) + assert len(box_predictors) == num_stages, f"{len(box_predictors)} != {num_stages}!" + assert len(proposal_matchers) == num_stages, f"{len(proposal_matchers)} != {num_stages}!" + super().__init__( + box_in_features=box_in_features, + box_pooler=box_pooler, + box_head=box_heads, + box_predictor=box_predictors, + **kwargs, + ) + self.proposal_matchers = proposal_matchers + + @classmethod + def from_config(cls, cfg, input_shape): + ret = super().from_config(cfg, input_shape) + ret.pop("proposal_matcher") + return ret + + @classmethod + def _init_box_head(cls, cfg, input_shape): + # fmt: off + in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES + pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION + pooler_scales = tuple(1.0 / input_shape[k].stride for k in in_features) + sampling_ratio = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO + pooler_type = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE + cascade_bbox_reg_weights = cfg.MODEL.ROI_BOX_CASCADE_HEAD.BBOX_REG_WEIGHTS + cascade_ious = cfg.MODEL.ROI_BOX_CASCADE_HEAD.IOUS + assert len(cascade_bbox_reg_weights) == len(cascade_ious) + assert cfg.MODEL.ROI_BOX_HEAD.CLS_AGNOSTIC_BBOX_REG, \ + "CascadeROIHeads only support class-agnostic regression now!" + assert cascade_ious[0] == cfg.MODEL.ROI_HEADS.IOU_THRESHOLDS[0] + # fmt: on + + in_channels = [input_shape[f].channels for f in in_features] + # Check all channel counts are equal + assert len(set(in_channels)) == 1, in_channels + in_channels = in_channels[0] + + box_pooler = ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type=pooler_type, + ) + pooled_shape = ShapeSpec( + channels=in_channels, width=pooler_resolution, height=pooler_resolution + ) + + box_heads, box_predictors, proposal_matchers = [], [], [] + for match_iou, bbox_reg_weights in zip(cascade_ious, cascade_bbox_reg_weights): + box_head = build_box_head(cfg, pooled_shape) + box_heads.append(box_head) + box_predictors.append( + FastRCNNOutputLayers( + cfg, + box_head.output_shape, + box2box_transform=Box2BoxTransform(weights=bbox_reg_weights), + ) + ) + proposal_matchers.append(Matcher([match_iou], [0, 1], allow_low_quality_matches=False)) + return { + "box_in_features": in_features, + "box_pooler": box_pooler, + "box_heads": box_heads, + "box_predictors": box_predictors, + "proposal_matchers": proposal_matchers, + } + + def forward(self, images, features, proposals, targets=None): + del images + if self.training: + proposals = self.label_and_sample_proposals(proposals, targets) + + if self.training: + # Need targets to box head + losses = self._forward_box(features, proposals, targets) + losses.update(self._forward_mask(features, proposals)) + losses.update(self._forward_keypoint(features, proposals)) + return proposals, losses + else: + pred_instances = self._forward_box(features, proposals) + pred_instances = self.forward_with_given_boxes(features, pred_instances) + return pred_instances, {} + + def _forward_box(self, features, proposals, targets=None): + """ + Args: + features, targets: the same as in + Same as in :meth:`ROIHeads.forward`. + proposals (list[Instances]): the per-image object proposals with + their matching ground truth. + Each has fields "proposal_boxes", and "objectness_logits", + "gt_classes", "gt_boxes". + """ + features = [features[f] for f in self.box_in_features] + head_outputs = [] # (predictor, predictions, proposals) + prev_pred_boxes = None + image_sizes = [x.image_size for x in proposals] + for k in range(self.num_cascade_stages): + if k > 0: + # The output boxes of the previous stage are used to create the input + # proposals of the next stage. + proposals = self._create_proposals_from_boxes(prev_pred_boxes, image_sizes) + if self.training: + proposals = self._match_and_label_boxes(proposals, k, targets) + predictions = self._run_stage(features, proposals, k) + prev_pred_boxes = self.box_predictor[k].predict_boxes(predictions, proposals) + head_outputs.append((self.box_predictor[k], predictions, proposals)) + + if self.training: + losses = {} + storage = get_event_storage() + for stage, (predictor, predictions, proposals) in enumerate(head_outputs): + with storage.name_scope("stage{}".format(stage)): + stage_losses = predictor.losses(predictions, proposals) + losses.update({k + "_stage{}".format(stage): v for k, v in stage_losses.items()}) + return losses + else: + # Each is a list[Tensor] of length #image. Each tensor is Ri x (K+1) + scores_per_stage = [h[0].predict_probs(h[1], h[2]) for h in head_outputs] + + # Average the scores across heads + scores = [ + sum(list(scores_per_image)) * (1.0 / self.num_cascade_stages) + for scores_per_image in zip(*scores_per_stage) + ] + # Use the boxes of the last head + predictor, predictions, proposals = head_outputs[-1] + boxes = predictor.predict_boxes(predictions, proposals) + pred_instances, _ = fast_rcnn_inference( + boxes, + scores, + image_sizes, + predictor.test_score_thresh, + predictor.test_nms_thresh, + predictor.test_topk_per_image, + ) + return pred_instances + + @torch.no_grad() + def _match_and_label_boxes(self, proposals, stage, targets): + """ + Match proposals with groundtruth using the matcher at the given stage. + Label the proposals as foreground or background based on the match. + + Args: + proposals (list[Instances]): One Instances for each image, with + the field "proposal_boxes". + stage (int): the current stage + targets (list[Instances]): the ground truth instances + + Returns: + list[Instances]: the same proposals, but with fields "gt_classes" and "gt_boxes" + """ + num_fg_samples, num_bg_samples = [], [] + for proposals_per_image, targets_per_image in zip(proposals, targets): + match_quality_matrix = pairwise_iou( + targets_per_image.gt_boxes, proposals_per_image.proposal_boxes + ) + # proposal_labels are 0 or 1 + matched_idxs, proposal_labels = self.proposal_matchers[stage](match_quality_matrix) + if len(targets_per_image) > 0: + gt_classes = targets_per_image.gt_classes[matched_idxs] + # Label unmatched proposals (0 label from matcher) as background (label=num_classes) + gt_classes[proposal_labels == 0] = self.num_classes + gt_boxes = targets_per_image.gt_boxes[matched_idxs] + else: + gt_classes = torch.zeros_like(matched_idxs) + self.num_classes + gt_boxes = Boxes( + targets_per_image.gt_boxes.tensor.new_zeros((len(proposals_per_image), 4)) + ) + proposals_per_image.gt_classes = gt_classes + proposals_per_image.gt_boxes = gt_boxes + + num_fg_samples.append((proposal_labels == 1).sum().item()) + num_bg_samples.append(proposal_labels.numel() - num_fg_samples[-1]) + + # Log the number of fg/bg samples in each stage + storage = get_event_storage() + storage.put_scalar( + "stage{}/roi_head/num_fg_samples".format(stage), + sum(num_fg_samples) / len(num_fg_samples), + ) + storage.put_scalar( + "stage{}/roi_head/num_bg_samples".format(stage), + sum(num_bg_samples) / len(num_bg_samples), + ) + return proposals + + def _run_stage(self, features, proposals, stage): + """ + Args: + features (list[Tensor]): #lvl input features to ROIHeads + proposals (list[Instances]): #image Instances, with the field "proposal_boxes" + stage (int): the current stage + + Returns: + Same output as `FastRCNNOutputLayers.forward()`. + """ + box_features = self.box_pooler(features, [x.proposal_boxes for x in proposals]) + # The original implementation averages the losses among heads, + # but scale up the parameter gradients of the heads. + # This is equivalent to adding the losses among heads, + # but scale down the gradients on features. + if self.training: + box_features = _ScaleGradient.apply(box_features, 1.0 / self.num_cascade_stages) + box_features = self.box_head[stage](box_features) + return self.box_predictor[stage](box_features) + + def _create_proposals_from_boxes(self, boxes, image_sizes): + """ + Args: + boxes (list[Tensor]): per-image predicted boxes, each of shape Ri x 4 + image_sizes (list[tuple]): list of image shapes in (h, w) + + Returns: + list[Instances]: per-image proposals with the given boxes. + """ + # Just like RPN, the proposals should not have gradients + boxes = [Boxes(b.detach()) for b in boxes] + proposals = [] + for boxes_per_image, image_size in zip(boxes, image_sizes): + boxes_per_image.clip(image_size) + if self.training: + # do not filter empty boxes at inference time, + # because the scores from each stage need to be aligned and added later + boxes_per_image = boxes_per_image[boxes_per_image.nonempty()] + prop = Instances(image_size) + prop.proposal_boxes = boxes_per_image + proposals.append(prop) + return proposals diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/fast_rcnn.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/fast_rcnn.py new file mode 100644 index 0000000..42eba21 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/fast_rcnn.py @@ -0,0 +1,462 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +from typing import Dict, List, Tuple, Union +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import configurable +from detectron2.layers import ShapeSpec, batched_nms, cat, cross_entropy, nonzero_tuple +from detectron2.modeling.box_regression import Box2BoxTransform, _dense_box_regression_loss +from detectron2.structures import Boxes, Instances +from detectron2.utils.events import get_event_storage + +__all__ = ["fast_rcnn_inference", "FastRCNNOutputLayers"] + + +logger = logging.getLogger(__name__) + +""" +Shape shorthand in this module: + + N: number of images in the minibatch + R: number of ROIs, combined over all images, in the minibatch + Ri: number of ROIs in image i + K: number of foreground classes. E.g.,there are 80 foreground classes in COCO. + +Naming convention: + + deltas: refers to the 4-d (dx, dy, dw, dh) deltas that parameterize the box2box + transform (see :class:`box_regression.Box2BoxTransform`). + + pred_class_logits: predicted class scores in [-inf, +inf]; use + softmax(pred_class_logits) to estimate P(class). + + gt_classes: ground-truth classification labels in [0, K], where [0, K) represent + foreground object classes and K represents the background class. + + pred_proposal_deltas: predicted box2box transform deltas for transforming proposals + to detection box predictions. + + gt_proposal_deltas: ground-truth box2box transform deltas +""" + + +def fast_rcnn_inference( + boxes: List[torch.Tensor], + scores: List[torch.Tensor], + image_shapes: List[Tuple[int, int]], + score_thresh: float, + nms_thresh: float, + topk_per_image: int, +): + """ + Call `fast_rcnn_inference_single_image` for all images. + + Args: + boxes (list[Tensor]): A list of Tensors of predicted class-specific or class-agnostic + boxes for each image. Element i has shape (Ri, K * 4) if doing + class-specific regression, or (Ri, 4) if doing class-agnostic + regression, where Ri is the number of predicted objects for image i. + This is compatible with the output of :meth:`FastRCNNOutputLayers.predict_boxes`. + scores (list[Tensor]): A list of Tensors of predicted class scores for each image. + Element i has shape (Ri, K + 1), where Ri is the number of predicted objects + for image i. Compatible with the output of :meth:`FastRCNNOutputLayers.predict_probs`. + image_shapes (list[tuple]): A list of (width, height) tuples for each image in the batch. + score_thresh (float): Only return detections with a confidence score exceeding this + threshold. + nms_thresh (float): The threshold to use for box non-maximum suppression. Value in [0, 1]. + topk_per_image (int): The number of top scoring detections to return. Set < 0 to return + all detections. + + Returns: + instances: (list[Instances]): A list of N instances, one for each image in the batch, + that stores the topk most confidence detections. + kept_indices: (list[Tensor]): A list of 1D tensor of length of N, each element indicates + the corresponding boxes/scores index in [0, Ri) from the input, for image i. + """ + result_per_image = [ + fast_rcnn_inference_single_image( + boxes_per_image, scores_per_image, image_shape, score_thresh, nms_thresh, topk_per_image + ) + for scores_per_image, boxes_per_image, image_shape in zip(scores, boxes, image_shapes) + ] + return [x[0] for x in result_per_image], [x[1] for x in result_per_image] + + +def _log_classification_stats(pred_logits, gt_classes, prefix="fast_rcnn"): + """ + Log the classification metrics to EventStorage. + + Args: + pred_logits: Rx(K+1) logits. The last column is for background class. + gt_classes: R labels + """ + num_instances = gt_classes.numel() + if num_instances == 0: + return + pred_classes = pred_logits.argmax(dim=1) + bg_class_ind = pred_logits.shape[1] - 1 + + fg_inds = (gt_classes >= 0) & (gt_classes < bg_class_ind) + num_fg = fg_inds.nonzero().numel() + fg_gt_classes = gt_classes[fg_inds] + fg_pred_classes = pred_classes[fg_inds] + + num_false_negative = (fg_pred_classes == bg_class_ind).nonzero().numel() + num_accurate = (pred_classes == gt_classes).nonzero().numel() + fg_num_accurate = (fg_pred_classes == fg_gt_classes).nonzero().numel() + + storage = get_event_storage() + storage.put_scalar(f"{prefix}/cls_accuracy", num_accurate / num_instances) + if num_fg > 0: + storage.put_scalar(f"{prefix}/fg_cls_accuracy", fg_num_accurate / num_fg) + storage.put_scalar(f"{prefix}/false_negative", num_false_negative / num_fg) + + +def fast_rcnn_inference_single_image( + boxes, + scores, + image_shape: Tuple[int, int], + score_thresh: float, + nms_thresh: float, + topk_per_image: int, +): + """ + Single-image inference. Return bounding-box detection results by thresholding + on scores and applying non-maximum suppression (NMS). + + Args: + Same as `fast_rcnn_inference`, but with boxes, scores, and image shapes + per image. + + Returns: + Same as `fast_rcnn_inference`, but for only one image. + """ + valid_mask = torch.isfinite(boxes).all(dim=1) & torch.isfinite(scores).all(dim=1) + if not valid_mask.all(): + boxes = boxes[valid_mask] + scores = scores[valid_mask] + + scores = scores[:, :-1] + num_bbox_reg_classes = boxes.shape[1] // 4 + # Convert to Boxes to use the `clip` function ... + boxes = Boxes(boxes.reshape(-1, 4)) + boxes.clip(image_shape) + boxes = boxes.tensor.view(-1, num_bbox_reg_classes, 4) # R x C x 4 + + # 1. Filter results based on detection scores. It can make NMS more efficient + # by filtering out low-confidence detections. + filter_mask = scores > score_thresh # R x K + # R' x 2. First column contains indices of the R predictions; + # Second column contains indices of classes. + filter_inds = filter_mask.nonzero() + if num_bbox_reg_classes == 1: + boxes = boxes[filter_inds[:, 0], 0] + else: + boxes = boxes[filter_mask] + scores = scores[filter_mask] + + # 2. Apply NMS for each class independently. + keep = batched_nms(boxes, scores, filter_inds[:, 1], nms_thresh) + if topk_per_image >= 0: + keep = keep[:topk_per_image] + boxes, scores, filter_inds = boxes[keep], scores[keep], filter_inds[keep] + + result = Instances(image_shape) + result.pred_boxes = Boxes(boxes) + result.scores = scores + result.pred_classes = filter_inds[:, 1] + return result, filter_inds[:, 0] + + +class FastRCNNOutputLayers(nn.Module): + """ + Two linear layers for predicting Fast R-CNN outputs: + + 1. proposal-to-detection box regression deltas + 2. classification scores + """ + + @configurable + def __init__( + self, + input_shape: ShapeSpec, + *, + box2box_transform, + num_classes: int, + test_score_thresh: float = 0.0, + test_nms_thresh: float = 0.5, + test_topk_per_image: int = 100, + cls_agnostic_bbox_reg: bool = False, + smooth_l1_beta: float = 0.0, + box_reg_loss_type: str = "smooth_l1", + loss_weight: Union[float, Dict[str, float]] = 1.0, + ): + """ + NOTE: this interface is experimental. + + Args: + input_shape (ShapeSpec): shape of the input feature to this module + box2box_transform (Box2BoxTransform or Box2BoxTransformRotated): + num_classes (int): number of foreground classes + test_score_thresh (float): threshold to filter predictions results. + test_nms_thresh (float): NMS threshold for prediction results. + test_topk_per_image (int): number of top predictions to produce per image. + cls_agnostic_bbox_reg (bool): whether to use class agnostic for bbox regression + smooth_l1_beta (float): transition point from L1 to L2 loss. Only used if + `box_reg_loss_type` is "smooth_l1" + box_reg_loss_type (str): Box regression loss type. One of: "smooth_l1", "giou", + "diou", "ciou" + loss_weight (float|dict): weights to use for losses. Can be single float for weighting + all losses, or a dict of individual weightings. Valid dict keys are: + * "loss_cls": applied to classification loss + * "loss_box_reg": applied to box regression loss + """ + super().__init__() + if isinstance(input_shape, int): # some backward compatibility + input_shape = ShapeSpec(channels=input_shape) + self.num_classes = num_classes + input_size = input_shape.channels * (input_shape.width or 1) * (input_shape.height or 1) + # prediction layer for num_classes foreground classes and one background class (hence + 1) + self.cls_score = nn.Linear(input_size, num_classes + 1) + num_bbox_reg_classes = 1 if cls_agnostic_bbox_reg else num_classes + box_dim = len(box2box_transform.weights) + self.bbox_pred = nn.Linear(input_size, num_bbox_reg_classes * box_dim) + + nn.init.normal_(self.cls_score.weight, std=0.01) + nn.init.normal_(self.bbox_pred.weight, std=0.001) + for l in [self.cls_score, self.bbox_pred]: + nn.init.constant_(l.bias, 0) + + self.box2box_transform = box2box_transform + self.smooth_l1_beta = smooth_l1_beta + self.test_score_thresh = test_score_thresh + self.test_nms_thresh = test_nms_thresh + self.test_topk_per_image = test_topk_per_image + self.box_reg_loss_type = box_reg_loss_type + if isinstance(loss_weight, float): + loss_weight = {"loss_cls": loss_weight, "loss_box_reg": loss_weight} + self.loss_weight = loss_weight + + @classmethod + def from_config(cls, cfg, input_shape): + return { + "input_shape": input_shape, + "box2box_transform": Box2BoxTransform(weights=cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS), + # fmt: off + "num_classes" : cfg.MODEL.ROI_HEADS.NUM_CLASSES, + "cls_agnostic_bbox_reg" : cfg.MODEL.ROI_BOX_HEAD.CLS_AGNOSTIC_BBOX_REG, + "smooth_l1_beta" : cfg.MODEL.ROI_BOX_HEAD.SMOOTH_L1_BETA, + "test_score_thresh" : cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST, + "test_nms_thresh" : cfg.MODEL.ROI_HEADS.NMS_THRESH_TEST, + "test_topk_per_image" : cfg.TEST.DETECTIONS_PER_IMAGE, + "box_reg_loss_type" : cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_LOSS_TYPE, + "loss_weight" : {"loss_box_reg": cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_LOSS_WEIGHT}, + # fmt: on + } + + def forward(self, x): + """ + Args: + x: per-region features of shape (N, ...) for N bounding boxes to predict. + + Returns: + (Tensor, Tensor): + First tensor: shape (N,K+1), scores for each of the N box. Each row contains the + scores for K object categories and 1 background class. + + Second tensor: bounding box regression deltas for each box. Shape is shape (N,Kx4), + or (N,4) for class-agnostic regression. + """ + if x.dim() > 2: + x = torch.flatten(x, start_dim=1) + scores = self.cls_score(x) + proposal_deltas = self.bbox_pred(x) + return scores, proposal_deltas + + def losses(self, predictions, proposals): + """ + Args: + predictions: return values of :meth:`forward()`. + proposals (list[Instances]): proposals that match the features that were used + to compute predictions. The fields ``proposal_boxes``, ``gt_boxes``, + ``gt_classes`` are expected. + + Returns: + Dict[str, Tensor]: dict of losses + """ + scores, proposal_deltas = predictions + + # parse classification outputs + gt_classes = ( + cat([p.gt_classes for p in proposals], dim=0) if len(proposals) else torch.empty(0) + ) + _log_classification_stats(scores, gt_classes) + + # parse box regression outputs + if len(proposals): + proposal_boxes = cat([p.proposal_boxes.tensor for p in proposals], dim=0) # Nx4 + assert not proposal_boxes.requires_grad, "Proposals should not require gradients!" + # If "gt_boxes" does not exist, the proposals must be all negative and + # should not be included in regression loss computation. + # Here we just use proposal_boxes as an arbitrary placeholder because its + # value won't be used in self.box_reg_loss(). + gt_boxes = cat( + [(p.gt_boxes if p.has("gt_boxes") else p.proposal_boxes).tensor for p in proposals], + dim=0, + ) + else: + proposal_boxes = gt_boxes = torch.empty((0, 4), device=proposal_deltas.device) + + losses = { + "loss_cls": cross_entropy(scores, gt_classes, reduction="mean"), + "loss_box_reg": self.box_reg_loss( + proposal_boxes, gt_boxes, proposal_deltas, gt_classes + ), + } + return {k: v * self.loss_weight.get(k, 1.0) for k, v in losses.items()} + + def box_reg_loss(self, proposal_boxes, gt_boxes, pred_deltas, gt_classes): + """ + Args: + proposal_boxes/gt_boxes are tensors with the same shape (R, 4 or 5). + pred_deltas has shape (R, 4 or 5), or (R, num_classes * (4 or 5)). + gt_classes is a long tensor of shape R, the gt class label of each proposal. + R shall be the number of proposals. + """ + box_dim = proposal_boxes.shape[1] # 4 or 5 + # Regression loss is only computed for foreground proposals (those matched to a GT) + fg_inds = nonzero_tuple((gt_classes >= 0) & (gt_classes < self.num_classes))[0] + if pred_deltas.shape[1] == box_dim: # cls-agnostic regression + fg_pred_deltas = pred_deltas[fg_inds] + else: + fg_pred_deltas = pred_deltas.view(-1, self.num_classes, box_dim)[ + fg_inds, gt_classes[fg_inds] + ] + + loss_box_reg = _dense_box_regression_loss( + [proposal_boxes[fg_inds]], + self.box2box_transform, + [fg_pred_deltas.unsqueeze(0)], + [gt_boxes[fg_inds]], + ..., + self.box_reg_loss_type, + self.smooth_l1_beta, + ) + + # The reg loss is normalized using the total number of regions (R), not the number + # of foreground regions even though the box regression loss is only defined on + # foreground regions. Why? Because doing so gives equal training influence to + # each foreground example. To see how, consider two different minibatches: + # (1) Contains a single foreground region + # (2) Contains 100 foreground regions + # If we normalize by the number of foreground regions, the single example in + # minibatch (1) will be given 100 times as much influence as each foreground + # example in minibatch (2). Normalizing by the total number of regions, R, + # means that the single example in minibatch (1) and each of the 100 examples + # in minibatch (2) are given equal influence. + return loss_box_reg / max(gt_classes.numel(), 1.0) # return 0 if empty + + def inference(self, predictions: Tuple[torch.Tensor, torch.Tensor], proposals: List[Instances]): + """ + Args: + predictions: return values of :meth:`forward()`. + proposals (list[Instances]): proposals that match the features that were + used to compute predictions. The ``proposal_boxes`` field is expected. + + Returns: + list[Instances]: same as `fast_rcnn_inference`. + list[Tensor]: same as `fast_rcnn_inference`. + """ + boxes = self.predict_boxes(predictions, proposals) + scores = self.predict_probs(predictions, proposals) + image_shapes = [x.image_size for x in proposals] + return fast_rcnn_inference( + boxes, + scores, + image_shapes, + self.test_score_thresh, + self.test_nms_thresh, + self.test_topk_per_image, + ) + + def predict_boxes_for_gt_classes(self, predictions, proposals): + """ + Args: + predictions: return values of :meth:`forward()`. + proposals (list[Instances]): proposals that match the features that were used + to compute predictions. The fields ``proposal_boxes``, ``gt_classes`` are expected. + + Returns: + list[Tensor]: + A list of Tensors of predicted boxes for GT classes in case of + class-specific box head. Element i of the list has shape (Ri, B), where Ri is + the number of proposals for image i and B is the box dimension (4 or 5) + """ + if not len(proposals): + return [] + scores, proposal_deltas = predictions + proposal_boxes = cat([p.proposal_boxes.tensor for p in proposals], dim=0) + N, B = proposal_boxes.shape + predict_boxes = self.box2box_transform.apply_deltas( + proposal_deltas, proposal_boxes + ) # Nx(KxB) + + K = predict_boxes.shape[1] // B + if K > 1: + gt_classes = torch.cat([p.gt_classes for p in proposals], dim=0) + # Some proposals are ignored or have a background class. Their gt_classes + # cannot be used as index. + gt_classes = gt_classes.clamp_(0, K - 1) + + predict_boxes = predict_boxes.view(N, K, B)[ + torch.arange(N, dtype=torch.long, device=predict_boxes.device), gt_classes + ] + num_prop_per_image = [len(p) for p in proposals] + return predict_boxes.split(num_prop_per_image) + + def predict_boxes( + self, predictions: Tuple[torch.Tensor, torch.Tensor], proposals: List[Instances] + ): + """ + Args: + predictions: return values of :meth:`forward()`. + proposals (list[Instances]): proposals that match the features that were + used to compute predictions. The ``proposal_boxes`` field is expected. + + Returns: + list[Tensor]: + A list of Tensors of predicted class-specific or class-agnostic boxes + for each image. Element i has shape (Ri, K * B) or (Ri, B), where Ri is + the number of proposals for image i and B is the box dimension (4 or 5) + """ + if not len(proposals): + return [] + _, proposal_deltas = predictions + num_prop_per_image = [len(p) for p in proposals] + proposal_boxes = cat([p.proposal_boxes.tensor for p in proposals], dim=0) + predict_boxes = self.box2box_transform.apply_deltas( + proposal_deltas, + proposal_boxes, + ) # Nx(KxB) + return predict_boxes.split(num_prop_per_image) + + def predict_probs( + self, predictions: Tuple[torch.Tensor, torch.Tensor], proposals: List[Instances] + ): + """ + Args: + predictions: return values of :meth:`forward()`. + proposals (list[Instances]): proposals that match the features that were + used to compute predictions. + + Returns: + list[Tensor]: + A list of Tensors of predicted class probabilities for each image. + Element i has shape (Ri, K + 1), where Ri is the number of proposals for image i. + """ + scores, _ = predictions + num_inst_per_image = [len(p) for p in proposals] + probs = F.softmax(scores, dim=-1) + return probs.split(num_inst_per_image, dim=0) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/keypoint_head.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/keypoint_head.py new file mode 100644 index 0000000..e0acc13 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/keypoint_head.py @@ -0,0 +1,272 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from typing import List +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import configurable +from detectron2.layers import Conv2d, ConvTranspose2d, cat, interpolate +from detectron2.structures import Instances, heatmaps_to_keypoints +from detectron2.utils.events import get_event_storage +from detectron2.utils.registry import Registry + +_TOTAL_SKIPPED = 0 + + +__all__ = [ + "ROI_KEYPOINT_HEAD_REGISTRY", + "build_keypoint_head", + "BaseKeypointRCNNHead", + "KRCNNConvDeconvUpsampleHead", +] + + +ROI_KEYPOINT_HEAD_REGISTRY = Registry("ROI_KEYPOINT_HEAD") +ROI_KEYPOINT_HEAD_REGISTRY.__doc__ = """ +Registry for keypoint heads, which make keypoint predictions from per-region features. + +The registered object will be called with `obj(cfg, input_shape)`. +""" + + +def build_keypoint_head(cfg, input_shape): + """ + Build a keypoint head from `cfg.MODEL.ROI_KEYPOINT_HEAD.NAME`. + """ + name = cfg.MODEL.ROI_KEYPOINT_HEAD.NAME + return ROI_KEYPOINT_HEAD_REGISTRY.get(name)(cfg, input_shape) + + +def keypoint_rcnn_loss(pred_keypoint_logits, instances, normalizer): + """ + Arguments: + pred_keypoint_logits (Tensor): A tensor of shape (N, K, S, S) where N is the total number + of instances in the batch, K is the number of keypoints, and S is the side length + of the keypoint heatmap. The values are spatial logits. + instances (list[Instances]): A list of M Instances, where M is the batch size. + These instances are predictions from the model + that are in 1:1 correspondence with pred_keypoint_logits. + Each Instances should contain a `gt_keypoints` field containing a `structures.Keypoint` + instance. + normalizer (float): Normalize the loss by this amount. + If not specified, we normalize by the number of visible keypoints in the minibatch. + + Returns a scalar tensor containing the loss. + """ + heatmaps = [] + valid = [] + + keypoint_side_len = pred_keypoint_logits.shape[2] + for instances_per_image in instances: + if len(instances_per_image) == 0: + continue + keypoints = instances_per_image.gt_keypoints + heatmaps_per_image, valid_per_image = keypoints.to_heatmap( + instances_per_image.proposal_boxes.tensor, keypoint_side_len + ) + heatmaps.append(heatmaps_per_image.view(-1)) + valid.append(valid_per_image.view(-1)) + + if len(heatmaps): + keypoint_targets = cat(heatmaps, dim=0) + valid = cat(valid, dim=0).to(dtype=torch.uint8) + valid = torch.nonzero(valid).squeeze(1) + + # torch.mean (in binary_cross_entropy_with_logits) doesn't + # accept empty tensors, so handle it separately + if len(heatmaps) == 0 or valid.numel() == 0: + global _TOTAL_SKIPPED + _TOTAL_SKIPPED += 1 + storage = get_event_storage() + storage.put_scalar("kpts_num_skipped_batches", _TOTAL_SKIPPED, smoothing_hint=False) + return pred_keypoint_logits.sum() * 0 + + N, K, H, W = pred_keypoint_logits.shape + pred_keypoint_logits = pred_keypoint_logits.view(N * K, H * W) + + keypoint_loss = F.cross_entropy( + pred_keypoint_logits[valid], keypoint_targets[valid], reduction="sum" + ) + + # If a normalizer isn't specified, normalize by the number of visible keypoints in the minibatch + if normalizer is None: + normalizer = valid.numel() + keypoint_loss /= normalizer + + return keypoint_loss + + +def keypoint_rcnn_inference(pred_keypoint_logits: torch.Tensor, pred_instances: List[Instances]): + """ + Post process each predicted keypoint heatmap in `pred_keypoint_logits` into (x, y, score) + and add it to the `pred_instances` as a `pred_keypoints` field. + + Args: + pred_keypoint_logits (Tensor): A tensor of shape (R, K, S, S) where R is the total number + of instances in the batch, K is the number of keypoints, and S is the side length of + the keypoint heatmap. The values are spatial logits. + pred_instances (list[Instances]): A list of N Instances, where N is the number of images. + + Returns: + None. Each element in pred_instances will contain extra "pred_keypoints" and + "pred_keypoint_heatmaps" fields. "pred_keypoints" is a tensor of shape + (#instance, K, 3) where the last dimension corresponds to (x, y, score). + The scores are larger than 0. "pred_keypoint_heatmaps" contains the raw + keypoint logits as passed to this function. + """ + # flatten all bboxes from all images together (list[Boxes] -> Rx4 tensor) + bboxes_flat = cat([b.pred_boxes.tensor for b in pred_instances], dim=0) + + pred_keypoint_logits = pred_keypoint_logits.detach() + keypoint_results = heatmaps_to_keypoints(pred_keypoint_logits, bboxes_flat.detach()) + num_instances_per_image = [len(i) for i in pred_instances] + keypoint_results = keypoint_results[:, :, [0, 1, 3]].split(num_instances_per_image, dim=0) + heatmap_results = pred_keypoint_logits.split(num_instances_per_image, dim=0) + + for keypoint_results_per_image, heatmap_results_per_image, instances_per_image in zip( + keypoint_results, heatmap_results, pred_instances + ): + # keypoint_results_per_image is (num instances)x(num keypoints)x(x, y, score) + # heatmap_results_per_image is (num instances)x(num keypoints)x(side)x(side) + instances_per_image.pred_keypoints = keypoint_results_per_image + instances_per_image.pred_keypoint_heatmaps = heatmap_results_per_image + + +class BaseKeypointRCNNHead(nn.Module): + """ + Implement the basic Keypoint R-CNN losses and inference logic described in + Sec. 5 of :paper:`Mask R-CNN`. + """ + + @configurable + def __init__(self, *, num_keypoints, loss_weight=1.0, loss_normalizer=1.0): + """ + NOTE: this interface is experimental. + + Args: + num_keypoints (int): number of keypoints to predict + loss_weight (float): weight to multiple on the keypoint loss + loss_normalizer (float or str): + If float, divide the loss by `loss_normalizer * #images`. + If 'visible', the loss is normalized by the total number of + visible keypoints across images. + """ + super().__init__() + self.num_keypoints = num_keypoints + self.loss_weight = loss_weight + assert loss_normalizer == "visible" or isinstance(loss_normalizer, float), loss_normalizer + self.loss_normalizer = loss_normalizer + + @classmethod + def from_config(cls, cfg, input_shape): + ret = { + "loss_weight": cfg.MODEL.ROI_KEYPOINT_HEAD.LOSS_WEIGHT, + "num_keypoints": cfg.MODEL.ROI_KEYPOINT_HEAD.NUM_KEYPOINTS, + } + normalize_by_visible = ( + cfg.MODEL.ROI_KEYPOINT_HEAD.NORMALIZE_LOSS_BY_VISIBLE_KEYPOINTS + ) # noqa + if not normalize_by_visible: + batch_size_per_image = cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE + positive_sample_fraction = cfg.MODEL.ROI_HEADS.POSITIVE_FRACTION + ret["loss_normalizer"] = ( + ret["num_keypoints"] * batch_size_per_image * positive_sample_fraction + ) + else: + ret["loss_normalizer"] = "visible" + return ret + + def forward(self, x, instances: List[Instances]): + """ + Args: + x: input 4D region feature(s) provided by :class:`ROIHeads`. + instances (list[Instances]): contains the boxes & labels corresponding + to the input features. + Exact format is up to its caller to decide. + Typically, this is the foreground instances in training, with + "proposal_boxes" field and other gt annotations. + In inference, it contains boxes that are already predicted. + + Returns: + A dict of losses if in training. The predicted "instances" if in inference. + """ + x = self.layers(x) + if self.training: + num_images = len(instances) + normalizer = ( + None if self.loss_normalizer == "visible" else num_images * self.loss_normalizer + ) + return { + "loss_keypoint": keypoint_rcnn_loss(x, instances, normalizer=normalizer) + * self.loss_weight + } + else: + keypoint_rcnn_inference(x, instances) + return instances + + def layers(self, x): + """ + Neural network layers that makes predictions from regional input features. + """ + raise NotImplementedError + + +# To get torchscript support, we make the head a subclass of `nn.Sequential`. +# Therefore, to add new layers in this head class, please make sure they are +# added in the order they will be used in forward(). +@ROI_KEYPOINT_HEAD_REGISTRY.register() +class KRCNNConvDeconvUpsampleHead(BaseKeypointRCNNHead, nn.Sequential): + """ + A standard keypoint head containing a series of 3x3 convs, followed by + a transpose convolution and bilinear interpolation for upsampling. + It is described in Sec. 5 of :paper:`Mask R-CNN`. + """ + + @configurable + def __init__(self, input_shape, *, num_keypoints, conv_dims, **kwargs): + """ + NOTE: this interface is experimental. + + Args: + input_shape (ShapeSpec): shape of the input feature + conv_dims: an iterable of output channel counts for each conv in the head + e.g. (512, 512, 512) for three convs outputting 512 channels. + """ + super().__init__(num_keypoints=num_keypoints, **kwargs) + + # default up_scale to 2.0 (this can be made an option) + up_scale = 2.0 + in_channels = input_shape.channels + + for idx, layer_channels in enumerate(conv_dims, 1): + module = Conv2d(in_channels, layer_channels, 3, stride=1, padding=1) + self.add_module("conv_fcn{}".format(idx), module) + self.add_module("conv_fcn_relu{}".format(idx), nn.ReLU()) + in_channels = layer_channels + + deconv_kernel = 4 + self.score_lowres = ConvTranspose2d( + in_channels, num_keypoints, deconv_kernel, stride=2, padding=deconv_kernel // 2 - 1 + ) + self.up_scale = up_scale + + for name, param in self.named_parameters(): + if "bias" in name: + nn.init.constant_(param, 0) + elif "weight" in name: + # Caffe2 implementation uses MSRAFill, which in fact + # corresponds to kaiming_normal_ in PyTorch + nn.init.kaiming_normal_(param, mode="fan_out", nonlinearity="relu") + + @classmethod + def from_config(cls, cfg, input_shape): + ret = super().from_config(cfg, input_shape) + ret["input_shape"] = input_shape + ret["conv_dims"] = cfg.MODEL.ROI_KEYPOINT_HEAD.CONV_DIMS + return ret + + def layers(self, x): + for layer in self: + x = layer(x) + x = interpolate(x, scale_factor=self.up_scale, mode="bilinear", align_corners=False) + return x diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/mask_head.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/mask_head.py new file mode 100644 index 0000000..5ac5c4b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/mask_head.py @@ -0,0 +1,292 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from typing import List +import fvcore.nn.weight_init as weight_init +import torch +from torch import nn +from torch.nn import functional as F + +from detectron2.config import configurable +from detectron2.layers import Conv2d, ConvTranspose2d, ShapeSpec, cat, get_norm +from detectron2.structures import Instances +from detectron2.utils.events import get_event_storage +from detectron2.utils.registry import Registry + +__all__ = [ + "BaseMaskRCNNHead", + "MaskRCNNConvUpsampleHead", + "build_mask_head", + "ROI_MASK_HEAD_REGISTRY", +] + + +ROI_MASK_HEAD_REGISTRY = Registry("ROI_MASK_HEAD") +ROI_MASK_HEAD_REGISTRY.__doc__ = """ +Registry for mask heads, which predicts instance masks given +per-region features. + +The registered object will be called with `obj(cfg, input_shape)`. +""" + + +@torch.jit.unused +def mask_rcnn_loss(pred_mask_logits: torch.Tensor, instances: List[Instances], vis_period: int = 0): + """ + Compute the mask prediction loss defined in the Mask R-CNN paper. + + Args: + pred_mask_logits (Tensor): A tensor of shape (B, C, Hmask, Wmask) or (B, 1, Hmask, Wmask) + for class-specific or class-agnostic, where B is the total number of predicted masks + in all images, C is the number of foreground classes, and Hmask, Wmask are the height + and width of the mask predictions. The values are logits. + instances (list[Instances]): A list of N Instances, where N is the number of images + in the batch. These instances are in 1:1 + correspondence with the pred_mask_logits. The ground-truth labels (class, box, mask, + ...) associated with each instance are stored in fields. + vis_period (int): the period (in steps) to dump visualization. + + Returns: + mask_loss (Tensor): A scalar tensor containing the loss. + """ + cls_agnostic_mask = pred_mask_logits.size(1) == 1 + total_num_masks = pred_mask_logits.size(0) + mask_side_len = pred_mask_logits.size(2) + assert pred_mask_logits.size(2) == pred_mask_logits.size(3), "Mask prediction must be square!" + + gt_classes = [] + gt_masks = [] + for instances_per_image in instances: + if len(instances_per_image) == 0: + continue + if not cls_agnostic_mask: + gt_classes_per_image = instances_per_image.gt_classes.to(dtype=torch.int64) + gt_classes.append(gt_classes_per_image) + + gt_masks_per_image = instances_per_image.gt_masks.crop_and_resize( + instances_per_image.proposal_boxes.tensor, mask_side_len + ).to(device=pred_mask_logits.device) + # A tensor of shape (N, M, M), N=#instances in the image; M=mask_side_len + gt_masks.append(gt_masks_per_image) + + if len(gt_masks) == 0: + return pred_mask_logits.sum() * 0 + + gt_masks = cat(gt_masks, dim=0) + + if cls_agnostic_mask: + pred_mask_logits = pred_mask_logits[:, 0] + else: + indices = torch.arange(total_num_masks) + gt_classes = cat(gt_classes, dim=0) + pred_mask_logits = pred_mask_logits[indices, gt_classes] + + if gt_masks.dtype == torch.bool: + gt_masks_bool = gt_masks + else: + # Here we allow gt_masks to be float as well (depend on the implementation of rasterize()) + gt_masks_bool = gt_masks > 0.5 + gt_masks = gt_masks.to(dtype=torch.float32) + + # Log the training accuracy (using gt classes and 0.5 threshold) + mask_incorrect = (pred_mask_logits > 0.0) != gt_masks_bool + mask_accuracy = 1 - (mask_incorrect.sum().item() / max(mask_incorrect.numel(), 1.0)) + num_positive = gt_masks_bool.sum().item() + false_positive = (mask_incorrect & ~gt_masks_bool).sum().item() / max( + gt_masks_bool.numel() - num_positive, 1.0 + ) + false_negative = (mask_incorrect & gt_masks_bool).sum().item() / max(num_positive, 1.0) + + storage = get_event_storage() + storage.put_scalar("mask_rcnn/accuracy", mask_accuracy) + storage.put_scalar("mask_rcnn/false_positive", false_positive) + storage.put_scalar("mask_rcnn/false_negative", false_negative) + if vis_period > 0 and storage.iter % vis_period == 0: + pred_masks = pred_mask_logits.sigmoid() + vis_masks = torch.cat([pred_masks, gt_masks], axis=2) + name = "Left: mask prediction; Right: mask GT" + for idx, vis_mask in enumerate(vis_masks): + vis_mask = torch.stack([vis_mask] * 3, axis=0) + storage.put_image(name + f" ({idx})", vis_mask) + + mask_loss = F.binary_cross_entropy_with_logits(pred_mask_logits, gt_masks, reduction="mean") + return mask_loss + + +def mask_rcnn_inference(pred_mask_logits: torch.Tensor, pred_instances: List[Instances]): + """ + Convert pred_mask_logits to estimated foreground probability masks while also + extracting only the masks for the predicted classes in pred_instances. For each + predicted box, the mask of the same class is attached to the instance by adding a + new "pred_masks" field to pred_instances. + + Args: + pred_mask_logits (Tensor): A tensor of shape (B, C, Hmask, Wmask) or (B, 1, Hmask, Wmask) + for class-specific or class-agnostic, where B is the total number of predicted masks + in all images, C is the number of foreground classes, and Hmask, Wmask are the height + and width of the mask predictions. The values are logits. + pred_instances (list[Instances]): A list of N Instances, where N is the number of images + in the batch. Each Instances must have field "pred_classes". + + Returns: + None. pred_instances will contain an extra "pred_masks" field storing a mask of size (Hmask, + Wmask) for predicted class. Note that the masks are returned as a soft (non-quantized) + masks the resolution predicted by the network; post-processing steps, such as resizing + the predicted masks to the original image resolution and/or binarizing them, is left + to the caller. + """ + cls_agnostic_mask = pred_mask_logits.size(1) == 1 + + if cls_agnostic_mask: + mask_probs_pred = pred_mask_logits.sigmoid() + else: + # Select masks corresponding to the predicted classes + num_masks = pred_mask_logits.shape[0] + class_pred = cat([i.pred_classes for i in pred_instances]) + indices = torch.arange(num_masks, device=class_pred.device) + mask_probs_pred = pred_mask_logits[indices, class_pred][:, None].sigmoid() + # mask_probs_pred.shape: (B, 1, Hmask, Wmask) + + num_boxes_per_image = [len(i) for i in pred_instances] + mask_probs_pred = mask_probs_pred.split(num_boxes_per_image, dim=0) + + for prob, instances in zip(mask_probs_pred, pred_instances): + instances.pred_masks = prob # (1, Hmask, Wmask) + + +class BaseMaskRCNNHead(nn.Module): + """ + Implement the basic Mask R-CNN losses and inference logic described in :paper:`Mask R-CNN` + """ + + @configurable + def __init__(self, *, loss_weight: float = 1.0, vis_period: int = 0): + """ + NOTE: this interface is experimental. + + Args: + loss_weight (float): multiplier of the loss + vis_period (int): visualization period + """ + super().__init__() + self.vis_period = vis_period + self.loss_weight = loss_weight + + @classmethod + def from_config(cls, cfg, input_shape): + return {"vis_period": cfg.VIS_PERIOD} + + def forward(self, x, instances: List[Instances]): + """ + Args: + x: input region feature(s) provided by :class:`ROIHeads`. + instances (list[Instances]): contains the boxes & labels corresponding + to the input features. + Exact format is up to its caller to decide. + Typically, this is the foreground instances in training, with + "proposal_boxes" field and other gt annotations. + In inference, it contains boxes that are already predicted. + + Returns: + A dict of losses in training. The predicted "instances" in inference. + """ + x = self.layers(x) + if self.training: + return {"loss_mask": mask_rcnn_loss(x, instances, self.vis_period) * self.loss_weight} + else: + mask_rcnn_inference(x, instances) + return instances + + def layers(self, x): + """ + Neural network layers that makes predictions from input features. + """ + raise NotImplementedError + + +# To get torchscript support, we make the head a subclass of `nn.Sequential`. +# Therefore, to add new layers in this head class, please make sure they are +# added in the order they will be used in forward(). +@ROI_MASK_HEAD_REGISTRY.register() +class MaskRCNNConvUpsampleHead(BaseMaskRCNNHead, nn.Sequential): + """ + A mask head with several conv layers, plus an upsample layer (with `ConvTranspose2d`). + Predictions are made with a final 1x1 conv layer. + """ + + @configurable + def __init__(self, input_shape: ShapeSpec, *, num_classes, conv_dims, conv_norm="", **kwargs): + """ + NOTE: this interface is experimental. + + Args: + input_shape (ShapeSpec): shape of the input feature + num_classes (int): the number of foreground classes (i.e. background is not + included). 1 if using class agnostic prediction. + conv_dims (list[int]): a list of N>0 integers representing the output dimensions + of N-1 conv layers and the last upsample layer. + conv_norm (str or callable): normalization for the conv layers. + See :func:`detectron2.layers.get_norm` for supported types. + """ + super().__init__(**kwargs) + assert len(conv_dims) >= 1, "conv_dims have to be non-empty!" + + self.conv_norm_relus = [] + + cur_channels = input_shape.channels + for k, conv_dim in enumerate(conv_dims[:-1]): + conv = Conv2d( + cur_channels, + conv_dim, + kernel_size=3, + stride=1, + padding=1, + bias=not conv_norm, + norm=get_norm(conv_norm, conv_dim), + activation=nn.ReLU(), + ) + self.add_module("mask_fcn{}".format(k + 1), conv) + self.conv_norm_relus.append(conv) + cur_channels = conv_dim + + self.deconv = ConvTranspose2d( + cur_channels, conv_dims[-1], kernel_size=2, stride=2, padding=0 + ) + self.add_module("deconv_relu", nn.ReLU()) + cur_channels = conv_dims[-1] + + self.predictor = Conv2d(cur_channels, num_classes, kernel_size=1, stride=1, padding=0) + + for layer in self.conv_norm_relus + [self.deconv]: + weight_init.c2_msra_fill(layer) + # use normal distribution initialization for mask prediction layer + nn.init.normal_(self.predictor.weight, std=0.001) + if self.predictor.bias is not None: + nn.init.constant_(self.predictor.bias, 0) + + @classmethod + def from_config(cls, cfg, input_shape): + ret = super().from_config(cfg, input_shape) + conv_dim = cfg.MODEL.ROI_MASK_HEAD.CONV_DIM + num_conv = cfg.MODEL.ROI_MASK_HEAD.NUM_CONV + ret.update( + conv_dims=[conv_dim] * (num_conv + 1), # +1 for ConvTranspose + conv_norm=cfg.MODEL.ROI_MASK_HEAD.NORM, + input_shape=input_shape, + ) + if cfg.MODEL.ROI_MASK_HEAD.CLS_AGNOSTIC_MASK: + ret["num_classes"] = 1 + else: + ret["num_classes"] = cfg.MODEL.ROI_HEADS.NUM_CLASSES + return ret + + def layers(self, x): + for layer in self: + x = layer(x) + return x + + +def build_mask_head(cfg, input_shape): + """ + Build a mask head defined by `cfg.MODEL.ROI_MASK_HEAD.NAME`. + """ + name = cfg.MODEL.ROI_MASK_HEAD.NAME + return ROI_MASK_HEAD_REGISTRY.get(name)(cfg, input_shape) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/roi_heads.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/roi_heads.py new file mode 100644 index 0000000..13dd57a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/roi_heads.py @@ -0,0 +1,877 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import inspect +import logging +import numpy as np +from typing import Dict, List, Optional, Tuple +import torch +from torch import nn + +from detectron2.config import configurable +from detectron2.layers import ShapeSpec, nonzero_tuple +from detectron2.structures import Boxes, ImageList, Instances, pairwise_iou +from detectron2.utils.events import get_event_storage +from detectron2.utils.registry import Registry + +from ..backbone.resnet import BottleneckBlock, ResNet +from ..matcher import Matcher +from ..poolers import ROIPooler +from ..proposal_generator.proposal_utils import add_ground_truth_to_proposals +from ..sampling import subsample_labels +from .box_head import build_box_head +from .fast_rcnn import FastRCNNOutputLayers +from .keypoint_head import build_keypoint_head +from .mask_head import build_mask_head + +ROI_HEADS_REGISTRY = Registry("ROI_HEADS") +ROI_HEADS_REGISTRY.__doc__ = """ +Registry for ROI heads in a generalized R-CNN model. +ROIHeads take feature maps and region proposals, and +perform per-region computation. + +The registered object will be called with `obj(cfg, input_shape)`. +The call is expected to return an :class:`ROIHeads`. +""" + +logger = logging.getLogger(__name__) + + +def build_roi_heads(cfg, input_shape): + """ + Build ROIHeads defined by `cfg.MODEL.ROI_HEADS.NAME`. + """ + name = cfg.MODEL.ROI_HEADS.NAME + return ROI_HEADS_REGISTRY.get(name)(cfg, input_shape) + + +def select_foreground_proposals( + proposals: List[Instances], bg_label: int +) -> Tuple[List[Instances], List[torch.Tensor]]: + """ + Given a list of N Instances (for N images), each containing a `gt_classes` field, + return a list of Instances that contain only instances with `gt_classes != -1 && + gt_classes != bg_label`. + + Args: + proposals (list[Instances]): A list of N Instances, where N is the number of + images in the batch. + bg_label: label index of background class. + + Returns: + list[Instances]: N Instances, each contains only the selected foreground instances. + list[Tensor]: N boolean vector, correspond to the selection mask of + each Instances object. True for selected instances. + """ + assert isinstance(proposals, (list, tuple)) + assert isinstance(proposals[0], Instances) + assert proposals[0].has("gt_classes") + fg_proposals = [] + fg_selection_masks = [] + for proposals_per_image in proposals: + gt_classes = proposals_per_image.gt_classes + fg_selection_mask = (gt_classes != -1) & (gt_classes != bg_label) + fg_idxs = fg_selection_mask.nonzero().squeeze(1) + fg_proposals.append(proposals_per_image[fg_idxs]) + fg_selection_masks.append(fg_selection_mask) + return fg_proposals, fg_selection_masks + + +def select_proposals_with_visible_keypoints(proposals: List[Instances]) -> List[Instances]: + """ + Args: + proposals (list[Instances]): a list of N Instances, where N is the + number of images. + + Returns: + proposals: only contains proposals with at least one visible keypoint. + + Note that this is still slightly different from Detectron. + In Detectron, proposals for training keypoint head are re-sampled from + all the proposals with IOU>threshold & >=1 visible keypoint. + + Here, the proposals are first sampled from all proposals with + IOU>threshold, then proposals with no visible keypoint are filtered out. + This strategy seems to make no difference on Detectron and is easier to implement. + """ + ret = [] + all_num_fg = [] + for proposals_per_image in proposals: + # If empty/unannotated image (hard negatives), skip filtering for train + if len(proposals_per_image) == 0: + ret.append(proposals_per_image) + continue + gt_keypoints = proposals_per_image.gt_keypoints.tensor + # #fg x K x 3 + vis_mask = gt_keypoints[:, :, 2] >= 1 + xs, ys = gt_keypoints[:, :, 0], gt_keypoints[:, :, 1] + proposal_boxes = proposals_per_image.proposal_boxes.tensor.unsqueeze(dim=1) # #fg x 1 x 4 + kp_in_box = ( + (xs >= proposal_boxes[:, :, 0]) + & (xs <= proposal_boxes[:, :, 2]) + & (ys >= proposal_boxes[:, :, 1]) + & (ys <= proposal_boxes[:, :, 3]) + ) + selection = (kp_in_box & vis_mask).any(dim=1) + selection_idxs = nonzero_tuple(selection)[0] + all_num_fg.append(selection_idxs.numel()) + ret.append(proposals_per_image[selection_idxs]) + + storage = get_event_storage() + storage.put_scalar("keypoint_head/num_fg_samples", np.mean(all_num_fg)) + return ret + + +class ROIHeads(torch.nn.Module): + """ + ROIHeads perform all per-region computation in an R-CNN. + + It typically contains logic to + + 1. (in training only) match proposals with ground truth and sample them + 2. crop the regions and extract per-region features using proposals + 3. make per-region predictions with different heads + + It can have many variants, implemented as subclasses of this class. + This base class contains the logic to match/sample proposals. + But it is not necessary to inherit this class if the sampling logic is not needed. + """ + + @configurable + def __init__( + self, + *, + num_classes, + batch_size_per_image, + positive_fraction, + proposal_matcher, + proposal_append_gt=True, + ): + """ + NOTE: this interface is experimental. + + Args: + num_classes (int): number of foreground classes (i.e. background is not included) + batch_size_per_image (int): number of proposals to sample for training + positive_fraction (float): fraction of positive (foreground) proposals + to sample for training. + proposal_matcher (Matcher): matcher that matches proposals and ground truth + proposal_append_gt (bool): whether to include ground truth as proposals as well + """ + super().__init__() + self.batch_size_per_image = batch_size_per_image + self.positive_fraction = positive_fraction + self.num_classes = num_classes + self.proposal_matcher = proposal_matcher + self.proposal_append_gt = proposal_append_gt + + @classmethod + def from_config(cls, cfg): + return { + "batch_size_per_image": cfg.MODEL.ROI_HEADS.BATCH_SIZE_PER_IMAGE, + "positive_fraction": cfg.MODEL.ROI_HEADS.POSITIVE_FRACTION, + "num_classes": cfg.MODEL.ROI_HEADS.NUM_CLASSES, + "proposal_append_gt": cfg.MODEL.ROI_HEADS.PROPOSAL_APPEND_GT, + # Matcher to assign box proposals to gt boxes + "proposal_matcher": Matcher( + cfg.MODEL.ROI_HEADS.IOU_THRESHOLDS, + cfg.MODEL.ROI_HEADS.IOU_LABELS, + allow_low_quality_matches=False, + ), + } + + def _sample_proposals( + self, matched_idxs: torch.Tensor, matched_labels: torch.Tensor, gt_classes: torch.Tensor + ) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Based on the matching between N proposals and M groundtruth, + sample the proposals and set their classification labels. + + Args: + matched_idxs (Tensor): a vector of length N, each is the best-matched + gt index in [0, M) for each proposal. + matched_labels (Tensor): a vector of length N, the matcher's label + (one of cfg.MODEL.ROI_HEADS.IOU_LABELS) for each proposal. + gt_classes (Tensor): a vector of length M. + + Returns: + Tensor: a vector of indices of sampled proposals. Each is in [0, N). + Tensor: a vector of the same length, the classification label for + each sampled proposal. Each sample is labeled as either a category in + [0, num_classes) or the background (num_classes). + """ + has_gt = gt_classes.numel() > 0 + # Get the corresponding GT for each proposal + if has_gt: + gt_classes = gt_classes[matched_idxs] + # Label unmatched proposals (0 label from matcher) as background (label=num_classes) + gt_classes[matched_labels == 0] = self.num_classes + # Label ignore proposals (-1 label) + gt_classes[matched_labels == -1] = -1 + else: + gt_classes = torch.zeros_like(matched_idxs) + self.num_classes + + sampled_fg_idxs, sampled_bg_idxs = subsample_labels( + gt_classes, self.batch_size_per_image, self.positive_fraction, self.num_classes + ) + + sampled_idxs = torch.cat([sampled_fg_idxs, sampled_bg_idxs], dim=0) + return sampled_idxs, gt_classes[sampled_idxs] + + @torch.no_grad() + def label_and_sample_proposals( + self, proposals: List[Instances], targets: List[Instances] + ) -> List[Instances]: + """ + Prepare some proposals to be used to train the ROI heads. + It performs box matching between `proposals` and `targets`, and assigns + training labels to the proposals. + It returns ``self.batch_size_per_image`` random samples from proposals and groundtruth + boxes, with a fraction of positives that is no larger than + ``self.positive_fraction``. + + Args: + See :meth:`ROIHeads.forward` + + Returns: + list[Instances]: + length `N` list of `Instances`s containing the proposals + sampled for training. Each `Instances` has the following fields: + + - proposal_boxes: the proposal boxes + - gt_boxes: the ground-truth box that the proposal is assigned to + (this is only meaningful if the proposal has a label > 0; if label = 0 + then the ground-truth box is random) + + Other fields such as "gt_classes", "gt_masks", that's included in `targets`. + """ + # Augment proposals with ground-truth boxes. + # In the case of learned proposals (e.g., RPN), when training starts + # the proposals will be low quality due to random initialization. + # It's possible that none of these initial + # proposals have high enough overlap with the gt objects to be used + # as positive examples for the second stage components (box head, + # cls head, mask head). Adding the gt boxes to the set of proposals + # ensures that the second stage components will have some positive + # examples from the start of training. For RPN, this augmentation improves + # convergence and empirically improves box AP on COCO by about 0.5 + # points (under one tested configuration). + if self.proposal_append_gt: + proposals = add_ground_truth_to_proposals(targets, proposals) + + proposals_with_gt = [] + + num_fg_samples = [] + num_bg_samples = [] + for proposals_per_image, targets_per_image in zip(proposals, targets): + has_gt = len(targets_per_image) > 0 + match_quality_matrix = pairwise_iou( + targets_per_image.gt_boxes, proposals_per_image.proposal_boxes + ) + matched_idxs, matched_labels = self.proposal_matcher(match_quality_matrix) + sampled_idxs, gt_classes = self._sample_proposals( + matched_idxs, matched_labels, targets_per_image.gt_classes + ) + + # Set target attributes of the sampled proposals: + proposals_per_image = proposals_per_image[sampled_idxs] + proposals_per_image.gt_classes = gt_classes + + if has_gt: + sampled_targets = matched_idxs[sampled_idxs] + # We index all the attributes of targets that start with "gt_" + # and have not been added to proposals yet (="gt_classes"). + # NOTE: here the indexing waste some compute, because heads + # like masks, keypoints, etc, will filter the proposals again, + # (by foreground/background, or number of keypoints in the image, etc) + # so we essentially index the data twice. + for (trg_name, trg_value) in targets_per_image.get_fields().items(): + if trg_name.startswith("gt_") and not proposals_per_image.has(trg_name): + proposals_per_image.set(trg_name, trg_value[sampled_targets]) + # If no GT is given in the image, we don't know what a dummy gt value can be. + # Therefore the returned proposals won't have any gt_* fields, except for a + # gt_classes full of background label. + + num_bg_samples.append((gt_classes == self.num_classes).sum().item()) + num_fg_samples.append(gt_classes.numel() - num_bg_samples[-1]) + proposals_with_gt.append(proposals_per_image) + + # Log the number of fg/bg samples that are selected for training ROI heads + storage = get_event_storage() + storage.put_scalar("roi_head/num_fg_samples", np.mean(num_fg_samples)) + storage.put_scalar("roi_head/num_bg_samples", np.mean(num_bg_samples)) + + return proposals_with_gt + + def forward( + self, + images: ImageList, + features: Dict[str, torch.Tensor], + proposals: List[Instances], + targets: Optional[List[Instances]] = None, + ) -> Tuple[List[Instances], Dict[str, torch.Tensor]]: + """ + Args: + images (ImageList): + features (dict[str,Tensor]): input data as a mapping from feature + map name to tensor. Axis 0 represents the number of images `N` in + the input data; axes 1-3 are channels, height, and width, which may + vary between feature maps (e.g., if a feature pyramid is used). + proposals (list[Instances]): length `N` list of `Instances`. The i-th + `Instances` contains object proposals for the i-th input image, + with fields "proposal_boxes" and "objectness_logits". + targets (list[Instances], optional): length `N` list of `Instances`. The i-th + `Instances` contains the ground-truth per-instance annotations + for the i-th input image. Specify `targets` during training only. + It may have the following fields: + + - gt_boxes: the bounding box of each instance. + - gt_classes: the label for each instance with a category ranging in [0, #class]. + - gt_masks: PolygonMasks or BitMasks, the ground-truth masks of each instance. + - gt_keypoints: NxKx3, the groud-truth keypoints for each instance. + + Returns: + list[Instances]: length `N` list of `Instances` containing the + detected instances. Returned during inference only; may be [] during training. + + dict[str->Tensor]: + mapping from a named loss to a tensor storing the loss. Used during training only. + """ + raise NotImplementedError() + + +@ROI_HEADS_REGISTRY.register() +class Res5ROIHeads(ROIHeads): + """ + The ROIHeads in a typical "C4" R-CNN model, where + the box and mask head share the cropping and + the per-region feature computation by a Res5 block. + See :paper:`ResNet` Appendix A. + """ + + @configurable + def __init__( + self, + *, + in_features: List[str], + pooler: ROIPooler, + res5: nn.Module, + box_predictor: nn.Module, + mask_head: Optional[nn.Module] = None, + **kwargs, + ): + """ + NOTE: this interface is experimental. + + Args: + in_features (list[str]): list of backbone feature map names to use for + feature extraction + pooler (ROIPooler): pooler to extra region features from backbone + res5 (nn.Sequential): a CNN to compute per-region features, to be used by + ``box_predictor`` and ``mask_head``. Typically this is a "res5" + block from a ResNet. + box_predictor (nn.Module): make box predictions from the feature. + Should have the same interface as :class:`FastRCNNOutputLayers`. + mask_head (nn.Module): transform features to make mask predictions + """ + super().__init__(**kwargs) + self.in_features = in_features + self.pooler = pooler + if isinstance(res5, (list, tuple)): + res5 = nn.Sequential(*res5) + self.res5 = res5 + self.box_predictor = box_predictor + self.mask_on = mask_head is not None + if self.mask_on: + self.mask_head = mask_head + + @classmethod + def from_config(cls, cfg, input_shape): + # fmt: off + ret = super().from_config(cfg) + in_features = ret["in_features"] = cfg.MODEL.ROI_HEADS.IN_FEATURES + pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION + pooler_type = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE + pooler_scales = (1.0 / input_shape[in_features[0]].stride, ) + sampling_ratio = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO + mask_on = cfg.MODEL.MASK_ON + # fmt: on + assert not cfg.MODEL.KEYPOINT_ON + assert len(in_features) == 1 + + ret["pooler"] = ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type=pooler_type, + ) + + # Compatbility with old moco code. Might be useful. + # See notes in StandardROIHeads.from_config + if not inspect.ismethod(cls._build_res5_block): + logger.warning( + "The behavior of _build_res5_block may change. " + "Please do not depend on private methods." + ) + cls._build_res5_block = classmethod(cls._build_res5_block) + + ret["res5"], out_channels = cls._build_res5_block(cfg) + ret["box_predictor"] = FastRCNNOutputLayers( + cfg, ShapeSpec(channels=out_channels, height=1, width=1) + ) + + if mask_on: + ret["mask_head"] = build_mask_head( + cfg, + ShapeSpec(channels=out_channels, width=pooler_resolution, height=pooler_resolution), + ) + return ret + + @classmethod + def _build_res5_block(cls, cfg): + # fmt: off + stage_channel_factor = 2 ** 3 # res5 is 8x res2 + num_groups = cfg.MODEL.RESNETS.NUM_GROUPS + width_per_group = cfg.MODEL.RESNETS.WIDTH_PER_GROUP + bottleneck_channels = num_groups * width_per_group * stage_channel_factor + out_channels = cfg.MODEL.RESNETS.RES2_OUT_CHANNELS * stage_channel_factor + stride_in_1x1 = cfg.MODEL.RESNETS.STRIDE_IN_1X1 + norm = cfg.MODEL.RESNETS.NORM + assert not cfg.MODEL.RESNETS.DEFORM_ON_PER_STAGE[-1], \ + "Deformable conv is not yet supported in res5 head." + # fmt: on + + blocks = ResNet.make_stage( + BottleneckBlock, + 3, + stride_per_block=[2, 1, 1], + in_channels=out_channels // 2, + bottleneck_channels=bottleneck_channels, + out_channels=out_channels, + num_groups=num_groups, + norm=norm, + stride_in_1x1=stride_in_1x1, + ) + return nn.Sequential(*blocks), out_channels + + def _shared_roi_transform(self, features: List[torch.Tensor], boxes: List[Boxes]): + x = self.pooler(features, boxes) + return self.res5(x) + + def forward( + self, + images: ImageList, + features: Dict[str, torch.Tensor], + proposals: List[Instances], + targets: Optional[List[Instances]] = None, + ): + """ + See :meth:`ROIHeads.forward`. + """ + del images + + if self.training: + assert targets + proposals = self.label_and_sample_proposals(proposals, targets) + del targets + + proposal_boxes = [x.proposal_boxes for x in proposals] + box_features = self._shared_roi_transform( + [features[f] for f in self.in_features], proposal_boxes + ) + predictions = self.box_predictor(box_features.mean(dim=[2, 3])) + + if self.training: + del features + losses = self.box_predictor.losses(predictions, proposals) + if self.mask_on: + proposals, fg_selection_masks = select_foreground_proposals( + proposals, self.num_classes + ) + # Since the ROI feature transform is shared between boxes and masks, + # we don't need to recompute features. The mask loss is only defined + # on foreground proposals, so we need to select out the foreground + # features. + mask_features = box_features[torch.cat(fg_selection_masks, dim=0)] + del box_features + losses.update(self.mask_head(mask_features, proposals)) + return [], losses + else: + pred_instances, _ = self.box_predictor.inference(predictions, proposals) + pred_instances = self.forward_with_given_boxes(features, pred_instances) + return pred_instances, {} + + def forward_with_given_boxes( + self, features: Dict[str, torch.Tensor], instances: List[Instances] + ) -> List[Instances]: + """ + Use the given boxes in `instances` to produce other (non-box) per-ROI outputs. + + Args: + features: same as in `forward()` + instances (list[Instances]): instances to predict other outputs. Expect the keys + "pred_boxes" and "pred_classes" to exist. + + Returns: + instances (Instances): + the same `Instances` object, with extra + fields such as `pred_masks` or `pred_keypoints`. + """ + assert not self.training + assert instances[0].has("pred_boxes") and instances[0].has("pred_classes") + + if self.mask_on: + feature_list = [features[f] for f in self.in_features] + x = self._shared_roi_transform(feature_list, [x.pred_boxes for x in instances]) + return self.mask_head(x, instances) + else: + return instances + + +@ROI_HEADS_REGISTRY.register() +class StandardROIHeads(ROIHeads): + """ + It's "standard" in a sense that there is no ROI transform sharing + or feature sharing between tasks. + Each head independently processes the input features by each head's + own pooler and head. + + This class is used by most models, such as FPN and C5. + To implement more models, you can subclass it and implement a different + :meth:`forward()` or a head. + """ + + @configurable + def __init__( + self, + *, + box_in_features: List[str], + box_pooler: ROIPooler, + box_head: nn.Module, + box_predictor: nn.Module, + mask_in_features: Optional[List[str]] = None, + mask_pooler: Optional[ROIPooler] = None, + mask_head: Optional[nn.Module] = None, + keypoint_in_features: Optional[List[str]] = None, + keypoint_pooler: Optional[ROIPooler] = None, + keypoint_head: Optional[nn.Module] = None, + train_on_pred_boxes: bool = False, + **kwargs, + ): + """ + NOTE: this interface is experimental. + + Args: + box_in_features (list[str]): list of feature names to use for the box head. + box_pooler (ROIPooler): pooler to extra region features for box head + box_head (nn.Module): transform features to make box predictions + box_predictor (nn.Module): make box predictions from the feature. + Should have the same interface as :class:`FastRCNNOutputLayers`. + mask_in_features (list[str]): list of feature names to use for the mask + pooler or mask head. None if not using mask head. + mask_pooler (ROIPooler): pooler to extract region features from image features. + The mask head will then take region features to make predictions. + If None, the mask head will directly take the dict of image features + defined by `mask_in_features` + mask_head (nn.Module): transform features to make mask predictions + keypoint_in_features, keypoint_pooler, keypoint_head: similar to ``mask_*``. + train_on_pred_boxes (bool): whether to use proposal boxes or + predicted boxes from the box head to train other heads. + """ + super().__init__(**kwargs) + # keep self.in_features for backward compatibility + self.in_features = self.box_in_features = box_in_features + self.box_pooler = box_pooler + self.box_head = box_head + self.box_predictor = box_predictor + + self.mask_on = mask_in_features is not None + if self.mask_on: + self.mask_in_features = mask_in_features + self.mask_pooler = mask_pooler + self.mask_head = mask_head + + self.keypoint_on = keypoint_in_features is not None + if self.keypoint_on: + self.keypoint_in_features = keypoint_in_features + self.keypoint_pooler = keypoint_pooler + self.keypoint_head = keypoint_head + + self.train_on_pred_boxes = train_on_pred_boxes + + @classmethod + def from_config(cls, cfg, input_shape): + ret = super().from_config(cfg) + ret["train_on_pred_boxes"] = cfg.MODEL.ROI_BOX_HEAD.TRAIN_ON_PRED_BOXES + # Subclasses that have not been updated to use from_config style construction + # may have overridden _init_*_head methods. In this case, those overridden methods + # will not be classmethods and we need to avoid trying to call them here. + # We test for this with ismethod which only returns True for bound methods of cls. + # Such subclasses will need to handle calling their overridden _init_*_head methods. + if inspect.ismethod(cls._init_box_head): + ret.update(cls._init_box_head(cfg, input_shape)) + if inspect.ismethod(cls._init_mask_head): + ret.update(cls._init_mask_head(cfg, input_shape)) + if inspect.ismethod(cls._init_keypoint_head): + ret.update(cls._init_keypoint_head(cfg, input_shape)) + return ret + + @classmethod + def _init_box_head(cls, cfg, input_shape): + # fmt: off + in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES + pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION + pooler_scales = tuple(1.0 / input_shape[k].stride for k in in_features) + sampling_ratio = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO + pooler_type = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE + # fmt: on + + # If StandardROIHeads is applied on multiple feature maps (as in FPN), + # then we share the same predictors and therefore the channel counts must be the same + in_channels = [input_shape[f].channels for f in in_features] + # Check all channel counts are equal + assert len(set(in_channels)) == 1, in_channels + in_channels = in_channels[0] + + box_pooler = ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type=pooler_type, + ) + # Here we split "box head" and "box predictor", which is mainly due to historical reasons. + # They are used together so the "box predictor" layers should be part of the "box head". + # New subclasses of ROIHeads do not need "box predictor"s. + box_head = build_box_head( + cfg, ShapeSpec(channels=in_channels, height=pooler_resolution, width=pooler_resolution) + ) + box_predictor = FastRCNNOutputLayers(cfg, box_head.output_shape) + return { + "box_in_features": in_features, + "box_pooler": box_pooler, + "box_head": box_head, + "box_predictor": box_predictor, + } + + @classmethod + def _init_mask_head(cls, cfg, input_shape): + if not cfg.MODEL.MASK_ON: + return {} + # fmt: off + in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES + pooler_resolution = cfg.MODEL.ROI_MASK_HEAD.POOLER_RESOLUTION + pooler_scales = tuple(1.0 / input_shape[k].stride for k in in_features) + sampling_ratio = cfg.MODEL.ROI_MASK_HEAD.POOLER_SAMPLING_RATIO + pooler_type = cfg.MODEL.ROI_MASK_HEAD.POOLER_TYPE + # fmt: on + + in_channels = [input_shape[f].channels for f in in_features][0] + + ret = {"mask_in_features": in_features} + ret["mask_pooler"] = ( + ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type=pooler_type, + ) + if pooler_type + else None + ) + if pooler_type: + shape = ShapeSpec( + channels=in_channels, width=pooler_resolution, height=pooler_resolution + ) + else: + shape = {f: input_shape[f] for f in in_features} + ret["mask_head"] = build_mask_head(cfg, shape) + return ret + + @classmethod + def _init_keypoint_head(cls, cfg, input_shape): + if not cfg.MODEL.KEYPOINT_ON: + return {} + # fmt: off + in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES + pooler_resolution = cfg.MODEL.ROI_KEYPOINT_HEAD.POOLER_RESOLUTION + pooler_scales = tuple(1.0 / input_shape[k].stride for k in in_features) # noqa + sampling_ratio = cfg.MODEL.ROI_KEYPOINT_HEAD.POOLER_SAMPLING_RATIO + pooler_type = cfg.MODEL.ROI_KEYPOINT_HEAD.POOLER_TYPE + # fmt: on + + in_channels = [input_shape[f].channels for f in in_features][0] + + ret = {"keypoint_in_features": in_features} + ret["keypoint_pooler"] = ( + ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type=pooler_type, + ) + if pooler_type + else None + ) + if pooler_type: + shape = ShapeSpec( + channels=in_channels, width=pooler_resolution, height=pooler_resolution + ) + else: + shape = {f: input_shape[f] for f in in_features} + ret["keypoint_head"] = build_keypoint_head(cfg, shape) + return ret + + def forward( + self, + images: ImageList, + features: Dict[str, torch.Tensor], + proposals: List[Instances], + targets: Optional[List[Instances]] = None, + ) -> Tuple[List[Instances], Dict[str, torch.Tensor]]: + """ + See :class:`ROIHeads.forward`. + """ + del images + if self.training: + assert targets, "'targets' argument is required during training" + proposals = self.label_and_sample_proposals(proposals, targets) + del targets + + if self.training: + losses = self._forward_box(features, proposals) + # Usually the original proposals used by the box head are used by the mask, keypoint + # heads. But when `self.train_on_pred_boxes is True`, proposals will contain boxes + # predicted by the box head. + losses.update(self._forward_mask(features, proposals)) + losses.update(self._forward_keypoint(features, proposals)) + return proposals, losses + else: + pred_instances = self._forward_box(features, proposals) + # During inference cascaded prediction is used: the mask and keypoints heads are only + # applied to the top scoring box detections. + pred_instances = self.forward_with_given_boxes(features, pred_instances) + return pred_instances, {} + + def forward_with_given_boxes( + self, features: Dict[str, torch.Tensor], instances: List[Instances] + ) -> List[Instances]: + """ + Use the given boxes in `instances` to produce other (non-box) per-ROI outputs. + + This is useful for downstream tasks where a box is known, but need to obtain + other attributes (outputs of other heads). + Test-time augmentation also uses this. + + Args: + features: same as in `forward()` + instances (list[Instances]): instances to predict other outputs. Expect the keys + "pred_boxes" and "pred_classes" to exist. + + Returns: + list[Instances]: + the same `Instances` objects, with extra + fields such as `pred_masks` or `pred_keypoints`. + """ + assert not self.training + assert instances[0].has("pred_boxes") and instances[0].has("pred_classes") + + instances = self._forward_mask(features, instances) + instances = self._forward_keypoint(features, instances) + return instances + + def _forward_box(self, features: Dict[str, torch.Tensor], proposals: List[Instances]): + """ + Forward logic of the box prediction branch. If `self.train_on_pred_boxes is True`, + the function puts predicted boxes in the `proposal_boxes` field of `proposals` argument. + + Args: + features (dict[str, Tensor]): mapping from feature map names to tensor. + Same as in :meth:`ROIHeads.forward`. + proposals (list[Instances]): the per-image object proposals with + their matching ground truth. + Each has fields "proposal_boxes", and "objectness_logits", + "gt_classes", "gt_boxes". + + Returns: + In training, a dict of losses. + In inference, a list of `Instances`, the predicted instances. + """ + features = [features[f] for f in self.box_in_features] + box_features = self.box_pooler(features, [x.proposal_boxes for x in proposals]) + box_features = self.box_head(box_features) + predictions = self.box_predictor(box_features) + del box_features + + if self.training: + losses = self.box_predictor.losses(predictions, proposals) + # proposals is modified in-place below, so losses must be computed first. + if self.train_on_pred_boxes: + with torch.no_grad(): + pred_boxes = self.box_predictor.predict_boxes_for_gt_classes( + predictions, proposals + ) + for proposals_per_image, pred_boxes_per_image in zip(proposals, pred_boxes): + proposals_per_image.proposal_boxes = Boxes(pred_boxes_per_image) + return losses + else: + pred_instances, _ = self.box_predictor.inference(predictions, proposals) + return pred_instances + + def _forward_mask(self, features: Dict[str, torch.Tensor], instances: List[Instances]): + """ + Forward logic of the mask prediction branch. + + Args: + features (dict[str, Tensor]): mapping from feature map names to tensor. + Same as in :meth:`ROIHeads.forward`. + instances (list[Instances]): the per-image instances to train/predict masks. + In training, they can be the proposals. + In inference, they can be the boxes predicted by R-CNN box head. + + Returns: + In training, a dict of losses. + In inference, update `instances` with new fields "pred_masks" and return it. + """ + if not self.mask_on: + return {} if self.training else instances + + if self.training: + # head is only trained on positive proposals. + instances, _ = select_foreground_proposals(instances, self.num_classes) + + if self.mask_pooler is not None: + features = [features[f] for f in self.mask_in_features] + boxes = [x.proposal_boxes if self.training else x.pred_boxes for x in instances] + features = self.mask_pooler(features, boxes) + else: + features = {f: features[f] for f in self.mask_in_features} + return self.mask_head(features, instances) + + def _forward_keypoint(self, features: Dict[str, torch.Tensor], instances: List[Instances]): + """ + Forward logic of the keypoint prediction branch. + + Args: + features (dict[str, Tensor]): mapping from feature map names to tensor. + Same as in :meth:`ROIHeads.forward`. + instances (list[Instances]): the per-image instances to train/predict keypoints. + In training, they can be the proposals. + In inference, they can be the boxes predicted by R-CNN box head. + + Returns: + In training, a dict of losses. + In inference, update `instances` with new fields "pred_keypoints" and return it. + """ + if not self.keypoint_on: + return {} if self.training else instances + + if self.training: + # head is only trained on positive proposals with >=1 visible keypoints. + instances, _ = select_foreground_proposals(instances, self.num_classes) + instances = select_proposals_with_visible_keypoints(instances) + + if self.keypoint_pooler is not None: + features = [features[f] for f in self.keypoint_in_features] + boxes = [x.proposal_boxes if self.training else x.pred_boxes for x in instances] + features = self.keypoint_pooler(features, boxes) + else: + features = {f: features[f] for f in self.keypoint_in_features} + return self.keypoint_head(features, instances) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/rotated_fast_rcnn.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/rotated_fast_rcnn.py new file mode 100644 index 0000000..b1eedee --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/roi_heads/rotated_fast_rcnn.py @@ -0,0 +1,270 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +import numpy as np +import torch + +from detectron2.config import configurable +from detectron2.layers import ShapeSpec, batched_nms_rotated +from detectron2.structures import Instances, RotatedBoxes, pairwise_iou_rotated +from detectron2.utils.events import get_event_storage + +from ..box_regression import Box2BoxTransformRotated +from ..poolers import ROIPooler +from ..proposal_generator.proposal_utils import add_ground_truth_to_proposals +from .box_head import build_box_head +from .fast_rcnn import FastRCNNOutputLayers +from .roi_heads import ROI_HEADS_REGISTRY, StandardROIHeads + +logger = logging.getLogger(__name__) + +""" +Shape shorthand in this module: + + N: number of images in the minibatch + R: number of ROIs, combined over all images, in the minibatch + Ri: number of ROIs in image i + K: number of foreground classes. E.g.,there are 80 foreground classes in COCO. + +Naming convention: + + deltas: refers to the 5-d (dx, dy, dw, dh, da) deltas that parameterize the box2box + transform (see :class:`box_regression.Box2BoxTransformRotated`). + + pred_class_logits: predicted class scores in [-inf, +inf]; use + softmax(pred_class_logits) to estimate P(class). + + gt_classes: ground-truth classification labels in [0, K], where [0, K) represent + foreground object classes and K represents the background class. + + pred_proposal_deltas: predicted rotated box2box transform deltas for transforming proposals + to detection box predictions. + + gt_proposal_deltas: ground-truth rotated box2box transform deltas +""" + + +def fast_rcnn_inference_rotated( + boxes, scores, image_shapes, score_thresh, nms_thresh, topk_per_image +): + """ + Call `fast_rcnn_inference_single_image_rotated` for all images. + + Args: + boxes (list[Tensor]): A list of Tensors of predicted class-specific or class-agnostic + boxes for each image. Element i has shape (Ri, K * 5) if doing + class-specific regression, or (Ri, 5) if doing class-agnostic + regression, where Ri is the number of predicted objects for image i. + This is compatible with the output of :meth:`FastRCNNOutputLayers.predict_boxes`. + scores (list[Tensor]): A list of Tensors of predicted class scores for each image. + Element i has shape (Ri, K + 1), where Ri is the number of predicted objects + for image i. Compatible with the output of :meth:`FastRCNNOutputLayers.predict_probs`. + image_shapes (list[tuple]): A list of (width, height) tuples for each image in the batch. + score_thresh (float): Only return detections with a confidence score exceeding this + threshold. + nms_thresh (float): The threshold to use for box non-maximum suppression. Value in [0, 1]. + topk_per_image (int): The number of top scoring detections to return. Set < 0 to return + all detections. + + Returns: + instances: (list[Instances]): A list of N instances, one for each image in the batch, + that stores the topk most confidence detections. + kept_indices: (list[Tensor]): A list of 1D tensor of length of N, each element indicates + the corresponding boxes/scores index in [0, Ri) from the input, for image i. + """ + result_per_image = [ + fast_rcnn_inference_single_image_rotated( + boxes_per_image, scores_per_image, image_shape, score_thresh, nms_thresh, topk_per_image + ) + for scores_per_image, boxes_per_image, image_shape in zip(scores, boxes, image_shapes) + ] + return [x[0] for x in result_per_image], [x[1] for x in result_per_image] + + +def fast_rcnn_inference_single_image_rotated( + boxes, scores, image_shape, score_thresh, nms_thresh, topk_per_image +): + """ + Single-image inference. Return rotated bounding-box detection results by thresholding + on scores and applying rotated non-maximum suppression (Rotated NMS). + + Args: + Same as `fast_rcnn_inference_rotated`, but with rotated boxes, scores, and image shapes + per image. + + Returns: + Same as `fast_rcnn_inference_rotated`, but for only one image. + """ + valid_mask = torch.isfinite(boxes).all(dim=1) & torch.isfinite(scores).all(dim=1) + if not valid_mask.all(): + boxes = boxes[valid_mask] + scores = scores[valid_mask] + + B = 5 # box dimension + scores = scores[:, :-1] + num_bbox_reg_classes = boxes.shape[1] // B + # Convert to Boxes to use the `clip` function ... + boxes = RotatedBoxes(boxes.reshape(-1, B)) + boxes.clip(image_shape) + boxes = boxes.tensor.view(-1, num_bbox_reg_classes, B) # R x C x B + # Filter results based on detection scores + filter_mask = scores > score_thresh # R x K + # R' x 2. First column contains indices of the R predictions; + # Second column contains indices of classes. + filter_inds = filter_mask.nonzero() + if num_bbox_reg_classes == 1: + boxes = boxes[filter_inds[:, 0], 0] + else: + boxes = boxes[filter_mask] + scores = scores[filter_mask] + + # Apply per-class Rotated NMS + keep = batched_nms_rotated(boxes, scores, filter_inds[:, 1], nms_thresh) + if topk_per_image >= 0: + keep = keep[:topk_per_image] + boxes, scores, filter_inds = boxes[keep], scores[keep], filter_inds[keep] + + result = Instances(image_shape) + result.pred_boxes = RotatedBoxes(boxes) + result.scores = scores + result.pred_classes = filter_inds[:, 1] + + return result, filter_inds[:, 0] + + +class RotatedFastRCNNOutputLayers(FastRCNNOutputLayers): + """ + Two linear layers for predicting Rotated Fast R-CNN outputs. + """ + + @classmethod + def from_config(cls, cfg, input_shape): + args = super().from_config(cfg, input_shape) + args["box2box_transform"] = Box2BoxTransformRotated( + weights=cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS + ) + return args + + def inference(self, predictions, proposals): + """ + Returns: + list[Instances]: same as `fast_rcnn_inference_rotated`. + list[Tensor]: same as `fast_rcnn_inference_rotated`. + """ + boxes = self.predict_boxes(predictions, proposals) + scores = self.predict_probs(predictions, proposals) + image_shapes = [x.image_size for x in proposals] + + return fast_rcnn_inference_rotated( + boxes, + scores, + image_shapes, + self.test_score_thresh, + self.test_nms_thresh, + self.test_topk_per_image, + ) + + +@ROI_HEADS_REGISTRY.register() +class RROIHeads(StandardROIHeads): + """ + This class is used by Rotated Fast R-CNN to detect rotated boxes. + For now, it only supports box predictions but not mask or keypoints. + """ + + @configurable + def __init__(self, **kwargs): + """ + NOTE: this interface is experimental. + """ + super().__init__(**kwargs) + assert ( + not self.mask_on and not self.keypoint_on + ), "Mask/Keypoints not supported in Rotated ROIHeads." + assert not self.train_on_pred_boxes, "train_on_pred_boxes not implemented for RROIHeads!" + + @classmethod + def _init_box_head(cls, cfg, input_shape): + # fmt: off + in_features = cfg.MODEL.ROI_HEADS.IN_FEATURES + pooler_resolution = cfg.MODEL.ROI_BOX_HEAD.POOLER_RESOLUTION + pooler_scales = tuple(1.0 / input_shape[k].stride for k in in_features) + sampling_ratio = cfg.MODEL.ROI_BOX_HEAD.POOLER_SAMPLING_RATIO + pooler_type = cfg.MODEL.ROI_BOX_HEAD.POOLER_TYPE + # fmt: on + assert pooler_type in ["ROIAlignRotated"], pooler_type + # assume all channel counts are equal + in_channels = [input_shape[f].channels for f in in_features][0] + + box_pooler = ROIPooler( + output_size=pooler_resolution, + scales=pooler_scales, + sampling_ratio=sampling_ratio, + pooler_type=pooler_type, + ) + box_head = build_box_head( + cfg, ShapeSpec(channels=in_channels, height=pooler_resolution, width=pooler_resolution) + ) + # This line is the only difference v.s. StandardROIHeads + box_predictor = RotatedFastRCNNOutputLayers(cfg, box_head.output_shape) + return { + "box_in_features": in_features, + "box_pooler": box_pooler, + "box_head": box_head, + "box_predictor": box_predictor, + } + + @torch.no_grad() + def label_and_sample_proposals(self, proposals, targets): + """ + Prepare some proposals to be used to train the RROI heads. + It performs box matching between `proposals` and `targets`, and assigns + training labels to the proposals. + It returns `self.batch_size_per_image` random samples from proposals and groundtruth boxes, + with a fraction of positives that is no larger than `self.positive_sample_fraction. + + Args: + See :meth:`StandardROIHeads.forward` + + Returns: + list[Instances]: length `N` list of `Instances`s containing the proposals + sampled for training. Each `Instances` has the following fields: + - proposal_boxes: the rotated proposal boxes + - gt_boxes: the ground-truth rotated boxes that the proposal is assigned to + (this is only meaningful if the proposal has a label > 0; if label = 0 + then the ground-truth box is random) + - gt_classes: the ground-truth classification lable for each proposal + """ + if self.proposal_append_gt: + proposals = add_ground_truth_to_proposals(targets, proposals) + + proposals_with_gt = [] + + num_fg_samples = [] + num_bg_samples = [] + for proposals_per_image, targets_per_image in zip(proposals, targets): + has_gt = len(targets_per_image) > 0 + match_quality_matrix = pairwise_iou_rotated( + targets_per_image.gt_boxes, proposals_per_image.proposal_boxes + ) + matched_idxs, matched_labels = self.proposal_matcher(match_quality_matrix) + sampled_idxs, gt_classes = self._sample_proposals( + matched_idxs, matched_labels, targets_per_image.gt_classes + ) + + proposals_per_image = proposals_per_image[sampled_idxs] + proposals_per_image.gt_classes = gt_classes + + if has_gt: + sampled_targets = matched_idxs[sampled_idxs] + proposals_per_image.gt_boxes = targets_per_image.gt_boxes[sampled_targets] + + num_bg_samples.append((gt_classes == self.num_classes).sum().item()) + num_fg_samples.append(gt_classes.numel() - num_bg_samples[-1]) + proposals_with_gt.append(proposals_per_image) + + # Log the number of fg/bg samples that are selected for training ROI heads + storage = get_event_storage() + storage.put_scalar("roi_head/num_fg_samples", np.mean(num_fg_samples)) + storage.put_scalar("roi_head/num_bg_samples", np.mean(num_bg_samples)) + + return proposals_with_gt diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/sampling.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/sampling.py new file mode 100644 index 0000000..a2d0f66 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/sampling.py @@ -0,0 +1,54 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import torch + +from detectron2.layers import nonzero_tuple + +__all__ = ["subsample_labels"] + + +def subsample_labels( + labels: torch.Tensor, num_samples: int, positive_fraction: float, bg_label: int +): + """ + Return `num_samples` (or fewer, if not enough found) + random samples from `labels` which is a mixture of positives & negatives. + It will try to return as many positives as possible without + exceeding `positive_fraction * num_samples`, and then try to + fill the remaining slots with negatives. + + Args: + labels (Tensor): (N, ) label vector with values: + * -1: ignore + * bg_label: background ("negative") class + * otherwise: one or more foreground ("positive") classes + num_samples (int): The total number of labels with value >= 0 to return. + Values that are not sampled will be filled with -1 (ignore). + positive_fraction (float): The number of subsampled labels with values > 0 + is `min(num_positives, int(positive_fraction * num_samples))`. The number + of negatives sampled is `min(num_negatives, num_samples - num_positives_sampled)`. + In order words, if there are not enough positives, the sample is filled with + negatives. If there are also not enough negatives, then as many elements are + sampled as is possible. + bg_label (int): label index of background ("negative") class. + + Returns: + pos_idx, neg_idx (Tensor): + 1D vector of indices. The total length of both is `num_samples` or fewer. + """ + positive = nonzero_tuple((labels != -1) & (labels != bg_label))[0] + negative = nonzero_tuple(labels == bg_label)[0] + + num_pos = int(num_samples * positive_fraction) + # protect against not enough positive examples + num_pos = min(positive.numel(), num_pos) + num_neg = num_samples - num_pos + # protect against not enough negative examples + num_neg = min(negative.numel(), num_neg) + + # randomly select positive and negative examples + perm1 = torch.randperm(positive.numel(), device=positive.device)[:num_pos] + perm2 = torch.randperm(negative.numel(), device=negative.device)[:num_neg] + + pos_idx = positive[perm1] + neg_idx = negative[perm2] + return pos_idx, neg_idx diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/modeling/test_time_augmentation.py b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/test_time_augmentation.py new file mode 100644 index 0000000..373e6bf --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/modeling/test_time_augmentation.py @@ -0,0 +1,307 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import copy +import numpy as np +from contextlib import contextmanager +from itertools import count +from typing import List +import torch +from fvcore.transforms import HFlipTransform, NoOpTransform +from torch import nn +from torch.nn.parallel import DistributedDataParallel + +from detectron2.config import configurable +from detectron2.data.detection_utils import read_image +from detectron2.data.transforms import ( + RandomFlip, + ResizeShortestEdge, + ResizeTransform, + apply_augmentations, +) +from detectron2.structures import Boxes, Instances + +from .meta_arch import GeneralizedRCNN +from .postprocessing import detector_postprocess +from .roi_heads.fast_rcnn import fast_rcnn_inference_single_image + +__all__ = ["DatasetMapperTTA", "GeneralizedRCNNWithTTA"] + + +class DatasetMapperTTA: + """ + Implement test-time augmentation for detection data. + It is a callable which takes a dataset dict from a detection dataset, + and returns a list of dataset dicts where the images + are augmented from the input image by the transformations defined in the config. + This is used for test-time augmentation. + """ + + @configurable + def __init__(self, min_sizes: List[int], max_size: int, flip: bool): + """ + Args: + min_sizes: list of short-edge size to resize the image to + max_size: maximum height or width of resized images + flip: whether to apply flipping augmentation + """ + self.min_sizes = min_sizes + self.max_size = max_size + self.flip = flip + + @classmethod + def from_config(cls, cfg): + return { + "min_sizes": cfg.TEST.AUG.MIN_SIZES, + "max_size": cfg.TEST.AUG.MAX_SIZE, + "flip": cfg.TEST.AUG.FLIP, + } + + def __call__(self, dataset_dict): + """ + Args: + dict: a dict in standard model input format. See tutorials for details. + + Returns: + list[dict]: + a list of dicts, which contain augmented version of the input image. + The total number of dicts is ``len(min_sizes) * (2 if flip else 1)``. + Each dict has field "transforms" which is a TransformList, + containing the transforms that are used to generate this image. + """ + numpy_image = dataset_dict["image"].permute(1, 2, 0).numpy() + shape = numpy_image.shape + orig_shape = (dataset_dict["height"], dataset_dict["width"]) + if shape[:2] != orig_shape: + # It transforms the "original" image in the dataset to the input image + pre_tfm = ResizeTransform(orig_shape[0], orig_shape[1], shape[0], shape[1]) + else: + pre_tfm = NoOpTransform() + + # Create all combinations of augmentations to use + aug_candidates = [] # each element is a list[Augmentation] + for min_size in self.min_sizes: + resize = ResizeShortestEdge(min_size, self.max_size) + aug_candidates.append([resize]) # resize only + if self.flip: + flip = RandomFlip(prob=1.0) + aug_candidates.append([resize, flip]) # resize + flip + + # Apply all the augmentations + ret = [] + for aug in aug_candidates: + new_image, tfms = apply_augmentations(aug, np.copy(numpy_image)) + torch_image = torch.from_numpy(np.ascontiguousarray(new_image.transpose(2, 0, 1))) + + dic = copy.deepcopy(dataset_dict) + dic["transforms"] = pre_tfm + tfms + dic["image"] = torch_image + ret.append(dic) + return ret + + +class GeneralizedRCNNWithTTA(nn.Module): + """ + A GeneralizedRCNN with test-time augmentation enabled. + Its :meth:`__call__` method has the same interface as :meth:`GeneralizedRCNN.forward`. + """ + + def __init__(self, cfg, model, tta_mapper=None, batch_size=3): + """ + Args: + cfg (CfgNode): + model (GeneralizedRCNN): a GeneralizedRCNN to apply TTA on. + tta_mapper (callable): takes a dataset dict and returns a list of + augmented versions of the dataset dict. Defaults to + `DatasetMapperTTA(cfg)`. + batch_size (int): batch the augmented images into this batch size for inference. + """ + super().__init__() + if isinstance(model, DistributedDataParallel): + model = model.module + assert isinstance( + model, GeneralizedRCNN + ), "TTA is only supported on GeneralizedRCNN. Got a model of type {}".format(type(model)) + self.cfg = cfg.clone() + assert not self.cfg.MODEL.KEYPOINT_ON, "TTA for keypoint is not supported yet" + assert ( + not self.cfg.MODEL.LOAD_PROPOSALS + ), "TTA for pre-computed proposals is not supported yet" + + self.model = model + + if tta_mapper is None: + tta_mapper = DatasetMapperTTA(cfg) + self.tta_mapper = tta_mapper + self.batch_size = batch_size + + @contextmanager + def _turn_off_roi_heads(self, attrs): + """ + Open a context where some heads in `model.roi_heads` are temporarily turned off. + Args: + attr (list[str]): the attribute in `model.roi_heads` which can be used + to turn off a specific head, e.g., "mask_on", "keypoint_on". + """ + roi_heads = self.model.roi_heads + old = {} + for attr in attrs: + try: + old[attr] = getattr(roi_heads, attr) + except AttributeError: + # The head may not be implemented in certain ROIHeads + pass + + if len(old.keys()) == 0: + yield + else: + for attr in old.keys(): + setattr(roi_heads, attr, False) + yield + for attr in old.keys(): + setattr(roi_heads, attr, old[attr]) + + def _batch_inference(self, batched_inputs, detected_instances=None): + """ + Execute inference on a list of inputs, + using batch size = self.batch_size, instead of the length of the list. + + Inputs & outputs have the same format as :meth:`GeneralizedRCNN.inference` + """ + if detected_instances is None: + detected_instances = [None] * len(batched_inputs) + + outputs = [] + inputs, instances = [], [] + for idx, input, instance in zip(count(), batched_inputs, detected_instances): + inputs.append(input) + instances.append(instance) + if len(inputs) == self.batch_size or idx == len(batched_inputs) - 1: + outputs.extend( + self.model.inference( + inputs, + instances if instances[0] is not None else None, + do_postprocess=False, + ) + ) + inputs, instances = [], [] + return outputs + + def __call__(self, batched_inputs): + """ + Same input/output format as :meth:`GeneralizedRCNN.forward` + """ + + def _maybe_read_image(dataset_dict): + ret = copy.copy(dataset_dict) + if "image" not in ret: + image = read_image(ret.pop("file_name"), self.model.input_format) + image = torch.from_numpy(np.ascontiguousarray(image.transpose(2, 0, 1))) # CHW + ret["image"] = image + if "height" not in ret and "width" not in ret: + ret["height"] = image.shape[1] + ret["width"] = image.shape[2] + return ret + + return [self._inference_one_image(_maybe_read_image(x)) for x in batched_inputs] + + def _inference_one_image(self, input): + """ + Args: + input (dict): one dataset dict with "image" field being a CHW tensor + + Returns: + dict: one output dict + """ + orig_shape = (input["height"], input["width"]) + augmented_inputs, tfms = self._get_augmented_inputs(input) + # Detect boxes from all augmented versions + with self._turn_off_roi_heads(["mask_on", "keypoint_on"]): + # temporarily disable roi heads + all_boxes, all_scores, all_classes = self._get_augmented_boxes(augmented_inputs, tfms) + # merge all detected boxes to obtain final predictions for boxes + merged_instances = self._merge_detections(all_boxes, all_scores, all_classes, orig_shape) + + if self.cfg.MODEL.MASK_ON: + # Use the detected boxes to obtain masks + augmented_instances = self._rescale_detected_boxes( + augmented_inputs, merged_instances, tfms + ) + # run forward on the detected boxes + outputs = self._batch_inference(augmented_inputs, augmented_instances) + # Delete now useless variables to avoid being out of memory + del augmented_inputs, augmented_instances + # average the predictions + merged_instances.pred_masks = self._reduce_pred_masks(outputs, tfms) + merged_instances = detector_postprocess(merged_instances, *orig_shape) + return {"instances": merged_instances} + else: + return {"instances": merged_instances} + + def _get_augmented_inputs(self, input): + augmented_inputs = self.tta_mapper(input) + tfms = [x.pop("transforms") for x in augmented_inputs] + return augmented_inputs, tfms + + def _get_augmented_boxes(self, augmented_inputs, tfms): + # 1: forward with all augmented images + outputs = self._batch_inference(augmented_inputs) + # 2: union the results + all_boxes = [] + all_scores = [] + all_classes = [] + for output, tfm in zip(outputs, tfms): + # Need to inverse the transforms on boxes, to obtain results on original image + pred_boxes = output.pred_boxes.tensor + original_pred_boxes = tfm.inverse().apply_box(pred_boxes.cpu().numpy()) + all_boxes.append(torch.from_numpy(original_pred_boxes).to(pred_boxes.device)) + + all_scores.extend(output.scores) + all_classes.extend(output.pred_classes) + all_boxes = torch.cat(all_boxes, dim=0) + return all_boxes, all_scores, all_classes + + def _merge_detections(self, all_boxes, all_scores, all_classes, shape_hw): + # select from the union of all results + num_boxes = len(all_boxes) + num_classes = self.cfg.MODEL.ROI_HEADS.NUM_CLASSES + # +1 because fast_rcnn_inference expects background scores as well + all_scores_2d = torch.zeros(num_boxes, num_classes + 1, device=all_boxes.device) + for idx, cls, score in zip(count(), all_classes, all_scores): + all_scores_2d[idx, cls] = score + + merged_instances, _ = fast_rcnn_inference_single_image( + all_boxes, + all_scores_2d, + shape_hw, + 1e-8, + self.cfg.MODEL.ROI_HEADS.NMS_THRESH_TEST, + self.cfg.TEST.DETECTIONS_PER_IMAGE, + ) + + return merged_instances + + def _rescale_detected_boxes(self, augmented_inputs, merged_instances, tfms): + augmented_instances = [] + for input, tfm in zip(augmented_inputs, tfms): + # Transform the target box to the augmented image's coordinate space + pred_boxes = merged_instances.pred_boxes.tensor.cpu().numpy() + pred_boxes = torch.from_numpy(tfm.apply_box(pred_boxes)) + + aug_instances = Instances( + image_size=input["image"].shape[1:3], + pred_boxes=Boxes(pred_boxes), + pred_classes=merged_instances.pred_classes, + scores=merged_instances.scores, + ) + augmented_instances.append(aug_instances) + return augmented_instances + + def _reduce_pred_masks(self, outputs, tfms): + # Should apply inverse transforms on masks. + # We assume only resize & flip are used. pred_masks is a scale-invariant + # representation, so we handle flip specially + for output, tfm in zip(outputs, tfms): + if any(isinstance(t, HFlipTransform) for t in tfm.transforms): + output.pred_masks = output.pred_masks.flip(dims=[3]) + all_pred_masks = torch.stack([o.pred_masks for o in outputs], dim=0) + avg_pred_masks = torch.mean(all_pred_masks, dim=0) + return avg_pred_masks diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/projects/README.md b/motion-gan-pipeline/preprocessing/third/detectron2/projects/README.md new file mode 100644 index 0000000..95afe7f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/projects/README.md @@ -0,0 +1,2 @@ + +Projects live in the [`projects` directory](../../projects) under the root of this repository, but not here. diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/projects/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/projects/__init__.py new file mode 100644 index 0000000..a68207d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/projects/__init__.py @@ -0,0 +1,31 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import importlib +from pathlib import Path + +_PROJECTS = { + "point_rend": "PointRend", + "deeplab": "DeepLab", + "panoptic_deeplab": "Panoptic-DeepLab", +} +_PROJECT_ROOT = Path(__file__).resolve().parent.parent.parent / "projects" + +if _PROJECT_ROOT.is_dir(): + # This is true only for in-place installation (pip install -e, setup.py develop), + # where setup(package_dir=) does not work: https://github.com/pypa/setuptools/issues/230 + + class _D2ProjectsFinder(importlib.abc.MetaPathFinder): + def find_spec(self, name, path, target=None): + if not name.startswith("detectron2.projects."): + return + project_name = name.split(".")[-1] + project_dir = _PROJECTS.get(project_name) + if not project_dir: + return + target_file = _PROJECT_ROOT / f"{project_dir}/{project_name}/__init__.py" + if not target_file.is_file(): + return + return importlib.util.spec_from_file_location(name, target_file) + + import sys + + sys.meta_path.append(_D2ProjectsFinder()) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/solver/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/solver/__init__.py new file mode 100644 index 0000000..9a2dbd3 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/solver/__init__.py @@ -0,0 +1,5 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .build import build_lr_scheduler, build_optimizer, get_default_optimizer_params +from .lr_scheduler import WarmupCosineLR, WarmupMultiStepLR, LRMultiplier, WarmupParamScheduler + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/solver/build.py b/motion-gan-pipeline/preprocessing/third/detectron2/solver/build.py new file mode 100644 index 0000000..42c6e88 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/solver/build.py @@ -0,0 +1,287 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import copy +import itertools +import logging +from collections import defaultdict +from enum import Enum +from typing import Any, Callable, Dict, Iterable, List, Optional, Set, Type, Union +import torch +from fvcore.common.param_scheduler import CosineParamScheduler, MultiStepParamScheduler + +from detectron2.config import CfgNode + +from .lr_scheduler import LRMultiplier, WarmupParamScheduler + +_GradientClipperInput = Union[torch.Tensor, Iterable[torch.Tensor]] +_GradientClipper = Callable[[_GradientClipperInput], None] + + +class GradientClipType(Enum): + VALUE = "value" + NORM = "norm" + + +def _create_gradient_clipper(cfg: CfgNode) -> _GradientClipper: + """ + Creates gradient clipping closure to clip by value or by norm, + according to the provided config. + """ + cfg = copy.deepcopy(cfg) + + def clip_grad_norm(p: _GradientClipperInput): + torch.nn.utils.clip_grad_norm_(p, cfg.CLIP_VALUE, cfg.NORM_TYPE) + + def clip_grad_value(p: _GradientClipperInput): + torch.nn.utils.clip_grad_value_(p, cfg.CLIP_VALUE) + + _GRADIENT_CLIP_TYPE_TO_CLIPPER = { + GradientClipType.VALUE: clip_grad_value, + GradientClipType.NORM: clip_grad_norm, + } + return _GRADIENT_CLIP_TYPE_TO_CLIPPER[GradientClipType(cfg.CLIP_TYPE)] + + +def _generate_optimizer_class_with_gradient_clipping( + optimizer: Type[torch.optim.Optimizer], + *, + per_param_clipper: Optional[_GradientClipper] = None, + global_clipper: Optional[_GradientClipper] = None, +) -> Type[torch.optim.Optimizer]: + """ + Dynamically creates a new type that inherits the type of a given instance + and overrides the `step` method to add gradient clipping + """ + assert ( + per_param_clipper is None or global_clipper is None + ), "Not allowed to use both per-parameter clipping and global clipping" + + def optimizer_wgc_step(self, closure=None): + if per_param_clipper is not None: + for group in self.param_groups: + for p in group["params"]: + per_param_clipper(p) + else: + # global clipper for future use with detr + # (https://github.com/facebookresearch/detr/pull/287) + all_params = itertools.chain(*[g["params"] for g in self.param_groups]) + global_clipper(all_params) + super(type(self), self).step(closure) + + OptimizerWithGradientClip = type( + optimizer.__name__ + "WithGradientClip", + (optimizer,), + {"step": optimizer_wgc_step}, + ) + return OptimizerWithGradientClip + + +def maybe_add_gradient_clipping( + cfg: CfgNode, optimizer: Type[torch.optim.Optimizer] +) -> Type[torch.optim.Optimizer]: + """ + If gradient clipping is enabled through config options, wraps the existing + optimizer type to become a new dynamically created class OptimizerWithGradientClip + that inherits the given optimizer and overrides the `step` method to + include gradient clipping. + + Args: + cfg: CfgNode, configuration options + optimizer: type. A subclass of torch.optim.Optimizer + + Return: + type: either the input `optimizer` (if gradient clipping is disabled), or + a subclass of it with gradient clipping included in the `step` method. + """ + if not cfg.SOLVER.CLIP_GRADIENTS.ENABLED: + return optimizer + if isinstance(optimizer, torch.optim.Optimizer): + optimizer_type = type(optimizer) + else: + assert issubclass(optimizer, torch.optim.Optimizer), optimizer + optimizer_type = optimizer + + grad_clipper = _create_gradient_clipper(cfg.SOLVER.CLIP_GRADIENTS) + OptimizerWithGradientClip = _generate_optimizer_class_with_gradient_clipping( + optimizer_type, per_param_clipper=grad_clipper + ) + if isinstance(optimizer, torch.optim.Optimizer): + optimizer.__class__ = OptimizerWithGradientClip # a bit hacky, not recommended + return optimizer + else: + return OptimizerWithGradientClip + + +def build_optimizer(cfg: CfgNode, model: torch.nn.Module) -> torch.optim.Optimizer: + """ + Build an optimizer from config. + """ + params = get_default_optimizer_params( + model, + base_lr=cfg.SOLVER.BASE_LR, + weight_decay_norm=cfg.SOLVER.WEIGHT_DECAY_NORM, + bias_lr_factor=cfg.SOLVER.BIAS_LR_FACTOR, + weight_decay_bias=cfg.SOLVER.WEIGHT_DECAY_BIAS, + ) + return maybe_add_gradient_clipping(cfg, torch.optim.SGD)( + params, + lr=cfg.SOLVER.BASE_LR, + momentum=cfg.SOLVER.MOMENTUM, + nesterov=cfg.SOLVER.NESTEROV, + weight_decay=cfg.SOLVER.WEIGHT_DECAY, + ) + + +def get_default_optimizer_params( + model: torch.nn.Module, + base_lr: Optional[float] = None, + weight_decay: Optional[float] = None, + weight_decay_norm: Optional[float] = None, + bias_lr_factor: Optional[float] = 1.0, + weight_decay_bias: Optional[float] = None, + overrides: Optional[Dict[str, Dict[str, float]]] = None, +) -> List[Dict[str, Any]]: + """ + Get default param list for optimizer, with support for a few types of + overrides. If no overrides needed, this is equivalent to `model.parameters()`. + + Args: + base_lr: lr for every group by default. Can be omitted to use the one in optimizer. + weight_decay: weight decay for every group by default. Can be omitted to use the one + in optimizer. + weight_decay_norm: override weight decay for params in normalization layers + bias_lr_factor: multiplier of lr for bias parameters. + weight_decay_bias: override weight decay for bias parameters + overrides: if not `None`, provides values for optimizer hyperparameters + (LR, weight decay) for module parameters with a given name; e.g. + ``{"embedding": {"lr": 0.01, "weight_decay": 0.1}}`` will set the LR and + weight decay values for all module parameters named `embedding`. + + For common detection models, ``weight_decay_norm`` is the only option + needed to be set. ``bias_lr_factor,weight_decay_bias`` are legacy settings + from Detectron1 that are not found useful. + + Example: + :: + torch.optim.SGD(get_default_optimizer_params(model, weight_decay_norm=0), + lr=0.01, weight_decay=1e-4, momentum=0.9) + """ + if overrides is None: + overrides = {} + defaults = {} + if base_lr is not None: + defaults["lr"] = base_lr + if weight_decay is not None: + defaults["weight_decay"] = weight_decay + bias_overrides = {} + if bias_lr_factor is not None and bias_lr_factor != 1.0: + # NOTE: unlike Detectron v1, we now by default make bias hyperparameters + # exactly the same as regular weights. + if base_lr is None: + raise ValueError("bias_lr_factor requires base_lr") + bias_overrides["lr"] = base_lr * bias_lr_factor + if weight_decay_bias is not None: + bias_overrides["weight_decay"] = weight_decay_bias + if len(bias_overrides): + if "bias" in overrides: + raise ValueError("Conflicting overrides for 'bias'") + overrides["bias"] = bias_overrides + + norm_module_types = ( + torch.nn.BatchNorm1d, + torch.nn.BatchNorm2d, + torch.nn.BatchNorm3d, + torch.nn.SyncBatchNorm, + # NaiveSyncBatchNorm inherits from BatchNorm2d + torch.nn.GroupNorm, + torch.nn.InstanceNorm1d, + torch.nn.InstanceNorm2d, + torch.nn.InstanceNorm3d, + torch.nn.LayerNorm, + torch.nn.LocalResponseNorm, + ) + params: List[Dict[str, Any]] = [] + memo: Set[torch.nn.parameter.Parameter] = set() + for module in model.modules(): + for module_param_name, value in module.named_parameters(recurse=False): + if not value.requires_grad: + continue + # Avoid duplicating parameters + if value in memo: + continue + memo.add(value) + + hyperparams = copy.copy(defaults) + if isinstance(module, norm_module_types) and weight_decay_norm is not None: + hyperparams["weight_decay"] = weight_decay_norm + hyperparams.update(overrides.get(module_param_name, {})) + params.append({"params": [value], **hyperparams}) + return reduce_param_groups(params) + + +def _expand_param_groups(params: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + # Transform parameter groups into per-parameter structure. + # Later items in `params` can overwrite parameters set in previous items. + ret = defaultdict(dict) + for item in params: + assert "params" in item + cur_params = {x: y for x, y in item.items() if x != "params"} + for param in item["params"]: + ret[param].update({"params": [param], **cur_params}) + return list(ret.values()) + + +def reduce_param_groups(params: List[Dict[str, Any]]) -> List[Dict[str, Any]]: + # Reorganize the parameter groups and merge duplicated groups. + # The number of parameter groups needs to be as small as possible in order + # to efficiently use the PyTorch multi-tensor optimizer. Therefore instead + # of using a parameter_group per single parameter, we reorganize the + # parameter groups and merge duplicated groups. This approach speeds + # up multi-tensor optimizer significantly. + params = _expand_param_groups(params) + groups = defaultdict(list) # re-group all parameter groups by their hyperparams + for item in params: + cur_params = tuple((x, y) for x, y in item.items() if x != "params") + groups[cur_params].extend(item["params"]) + ret = [] + for param_keys, param_values in groups.items(): + cur = {kv[0]: kv[1] for kv in param_keys} + cur["params"] = param_values + ret.append(cur) + return ret + + +def build_lr_scheduler( + cfg: CfgNode, optimizer: torch.optim.Optimizer +) -> torch.optim.lr_scheduler._LRScheduler: + """ + Build a LR scheduler from config. + """ + name = cfg.SOLVER.LR_SCHEDULER_NAME + + if name == "WarmupMultiStepLR": + steps = [x for x in cfg.SOLVER.STEPS if x <= cfg.SOLVER.MAX_ITER] + if len(steps) != len(cfg.SOLVER.STEPS): + logger = logging.getLogger(__name__) + logger.warning( + "SOLVER.STEPS contains values larger than SOLVER.MAX_ITER. " + "These values will be ignored." + ) + sched = MultiStepParamScheduler( + values=[cfg.SOLVER.GAMMA ** k for k in range(len(steps) + 1)], + milestones=steps, + num_updates=cfg.SOLVER.MAX_ITER, + ) + elif name == "WarmupCosineLR": + end_value = cfg.SOLVER.BASE_LR_END / cfg.SOLVER.BASE_LR + assert end_value >= 0.0 and end_value <= 1.0, end_value + sched = CosineParamScheduler(1, end_value) + else: + raise ValueError("Unknown LR scheduler: {}".format(name)) + + sched = WarmupParamScheduler( + sched, + cfg.SOLVER.WARMUP_FACTOR, + min(cfg.SOLVER.WARMUP_ITERS / cfg.SOLVER.MAX_ITER, 1.0), + cfg.SOLVER.WARMUP_METHOD, + ) + return LRMultiplier(optimizer, multiplier=sched, max_iter=cfg.SOLVER.MAX_ITER) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/solver/lr_scheduler.py b/motion-gan-pipeline/preprocessing/third/detectron2/solver/lr_scheduler.py new file mode 100644 index 0000000..8803e87 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/solver/lr_scheduler.py @@ -0,0 +1,238 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import logging +import math +from bisect import bisect_right +from typing import List +import torch +from fvcore.common.param_scheduler import ( + CompositeParamScheduler, + ConstantParamScheduler, + LinearParamScheduler, + ParamScheduler, +) + +logger = logging.getLogger(__name__) + + +class WarmupParamScheduler(CompositeParamScheduler): + """ + Add an initial warmup stage to another scheduler. + """ + + def __init__( + self, + scheduler: ParamScheduler, + warmup_factor: float, + warmup_length: float, + warmup_method: str = "linear", + ): + """ + Args: + scheduler: warmup will be added at the beginning of this scheduler + warmup_factor: the factor w.r.t the initial value of ``scheduler``, e.g. 0.001 + warmup_length: the relative length (in [0, 1]) of warmup steps w.r.t the entire + training, e.g. 0.01 + warmup_method: one of "linear" or "constant" + """ + end_value = scheduler(warmup_length) # the value to reach when warmup ends + start_value = warmup_factor * scheduler(0.0) + if warmup_method == "constant": + warmup = ConstantParamScheduler(start_value) + elif warmup_method == "linear": + warmup = LinearParamScheduler(start_value, end_value) + else: + raise ValueError("Unknown warmup method: {}".format(warmup_method)) + super().__init__( + [warmup, scheduler], + interval_scaling=["rescaled", "fixed"], + lengths=[warmup_length, 1 - warmup_length], + ) + + +class LRMultiplier(torch.optim.lr_scheduler._LRScheduler): + """ + A LRScheduler which uses fvcore :class:`ParamScheduler` to multiply the + learning rate of each param in the optimizer. + Every step, the learning rate of each parameter becomes its initial value + multiplied by the output of the given :class:`ParamScheduler`. + + The absolute learning rate value of each parameter can be different. + This scheduler can be used as long as the relative scale among them do + not change during training. + + Examples: + :: + LRMultiplier( + opt, + WarmupParamScheduler( + MultiStepParamScheduler( + [1, 0.1, 0.01], + milestones=[60000, 80000], + num_updates=90000, + ), 0.001, 100 / 90000 + ), + max_iter=90000 + ) + """ + + # NOTES: in the most general case, every LR can use its own scheduler. + # Supporting this requires interaction with the optimizer when its parameter + # group is initialized. For example, classyvision implements its own optimizer + # that allows different schedulers for every parameter group. + # To avoid this complexity, we use this class to support the most common cases + # where the relative scale among all LRs stay unchanged during training. In this + # case we only need a total of one scheduler that defines the relative LR multiplier. + + def __init__( + self, + optimizer: torch.optim.Optimizer, + multiplier: ParamScheduler, + max_iter: int, + last_iter: int = -1, + ): + """ + Args: + optimizer, last_iter: See ``torch.optim.lr_scheduler._LRScheduler``. + ``last_iter`` is the same as ``last_epoch``. + multiplier: a fvcore ParamScheduler that defines the multiplier on + every LR of the optimizer + max_iter: the total number of training iterations + """ + if not isinstance(multiplier, ParamScheduler): + raise ValueError( + "_LRMultiplier(multiplier=) must be an instance of fvcore " + f"ParamScheduler. Got {multiplier} instead." + ) + self._multiplier = multiplier + self._max_iter = max_iter + super().__init__(optimizer, last_epoch=last_iter) + + def state_dict(self): + # fvcore schedulers are stateless. Only keep pytorch scheduler states + return {"base_lrs": self.base_lrs, "last_epoch": self.last_epoch} + + def get_lr(self) -> List[float]: + multiplier = self._multiplier(self.last_epoch / self._max_iter) + return [base_lr * multiplier for base_lr in self.base_lrs] + + +""" +Content below is no longer needed! +""" + +# NOTE: PyTorch's LR scheduler interface uses names that assume the LR changes +# only on epoch boundaries. We typically use iteration based schedules instead. +# As a result, "epoch" (e.g., as in self.last_epoch) should be understood to mean +# "iteration" instead. + +# FIXME: ideally this would be achieved with a CombinedLRScheduler, separating +# MultiStepLR with WarmupLR but the current LRScheduler design doesn't allow it. + + +class WarmupMultiStepLR(torch.optim.lr_scheduler._LRScheduler): + def __init__( + self, + optimizer: torch.optim.Optimizer, + milestones: List[int], + gamma: float = 0.1, + warmup_factor: float = 0.001, + warmup_iters: int = 1000, + warmup_method: str = "linear", + last_epoch: int = -1, + ): + logger.warning( + "WarmupMultiStepLR is deprecated! Use LRMultipilier with fvcore ParamScheduler instead!" + ) + if not list(milestones) == sorted(milestones): + raise ValueError( + "Milestones should be a list of" " increasing integers. Got {}", milestones + ) + self.milestones = milestones + self.gamma = gamma + self.warmup_factor = warmup_factor + self.warmup_iters = warmup_iters + self.warmup_method = warmup_method + super().__init__(optimizer, last_epoch) + + def get_lr(self) -> List[float]: + warmup_factor = _get_warmup_factor_at_iter( + self.warmup_method, self.last_epoch, self.warmup_iters, self.warmup_factor + ) + return [ + base_lr * warmup_factor * self.gamma ** bisect_right(self.milestones, self.last_epoch) + for base_lr in self.base_lrs + ] + + def _compute_values(self) -> List[float]: + # The new interface + return self.get_lr() + + +class WarmupCosineLR(torch.optim.lr_scheduler._LRScheduler): + def __init__( + self, + optimizer: torch.optim.Optimizer, + max_iters: int, + warmup_factor: float = 0.001, + warmup_iters: int = 1000, + warmup_method: str = "linear", + last_epoch: int = -1, + ): + logger.warning( + "WarmupCosineLR is deprecated! Use LRMultipilier with fvcore ParamScheduler instead!" + ) + self.max_iters = max_iters + self.warmup_factor = warmup_factor + self.warmup_iters = warmup_iters + self.warmup_method = warmup_method + super().__init__(optimizer, last_epoch) + + def get_lr(self) -> List[float]: + warmup_factor = _get_warmup_factor_at_iter( + self.warmup_method, self.last_epoch, self.warmup_iters, self.warmup_factor + ) + # Different definitions of half-cosine with warmup are possible. For + # simplicity we multiply the standard half-cosine schedule by the warmup + # factor. An alternative is to start the period of the cosine at warmup_iters + # instead of at 0. In the case that warmup_iters << max_iters the two are + # very close to each other. + return [ + base_lr + * warmup_factor + * 0.5 + * (1.0 + math.cos(math.pi * self.last_epoch / self.max_iters)) + for base_lr in self.base_lrs + ] + + def _compute_values(self) -> List[float]: + # The new interface + return self.get_lr() + + +def _get_warmup_factor_at_iter( + method: str, iter: int, warmup_iters: int, warmup_factor: float +) -> float: + """ + Return the learning rate warmup factor at a specific iteration. + See :paper:`ImageNet in 1h` for more details. + + Args: + method (str): warmup method; either "constant" or "linear". + iter (int): iteration at which to calculate the warmup factor. + warmup_iters (int): the number of warmup iterations. + warmup_factor (float): the base warmup factor (the meaning changes according + to the method used). + + Returns: + float: the effective warmup factor at the given iteration. + """ + if iter >= warmup_iters: + return 1.0 + + if method == "constant": + return warmup_factor + elif method == "linear": + alpha = iter / warmup_iters + return warmup_factor * (1 - alpha) + alpha + else: + raise ValueError("Unknown warmup method: {}".format(method)) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/structures/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/structures/__init__.py new file mode 100644 index 0000000..f3ee605 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/structures/__init__.py @@ -0,0 +1,17 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .boxes import Boxes, BoxMode, pairwise_iou, pairwise_ioa, pairwise_point_box_distance +from .image_list import ImageList + +from .instances import Instances +from .keypoints import Keypoints, heatmaps_to_keypoints +from .masks import BitMasks, PolygonMasks, polygons_to_bitmask, ROIMasks +from .rotated_boxes import RotatedBoxes +from .rotated_boxes import pairwise_iou as pairwise_iou_rotated + +__all__ = [k for k in globals().keys() if not k.startswith("_")] + + +from detectron2.utils.env import fixup_module_metadata + +fixup_module_metadata(__name__, globals(), __all__) +del fixup_module_metadata diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/structures/boxes.py b/motion-gan-pipeline/preprocessing/third/detectron2/structures/boxes.py new file mode 100644 index 0000000..ae543c6 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/structures/boxes.py @@ -0,0 +1,423 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import math +import numpy as np +from enum import IntEnum, unique +from typing import List, Tuple, Union +import torch +from torch import device + +_RawBoxType = Union[List[float], Tuple[float, ...], torch.Tensor, np.ndarray] + + +@unique +class BoxMode(IntEnum): + """ + Enum of different ways to represent a box. + """ + + XYXY_ABS = 0 + """ + (x0, y0, x1, y1) in absolute floating points coordinates. + The coordinates in range [0, width or height]. + """ + XYWH_ABS = 1 + """ + (x0, y0, w, h) in absolute floating points coordinates. + """ + XYXY_REL = 2 + """ + Not yet supported! + (x0, y0, x1, y1) in range [0, 1]. They are relative to the size of the image. + """ + XYWH_REL = 3 + """ + Not yet supported! + (x0, y0, w, h) in range [0, 1]. They are relative to the size of the image. + """ + XYWHA_ABS = 4 + """ + (xc, yc, w, h, a) in absolute floating points coordinates. + (xc, yc) is the center of the rotated box, and the angle a is in degrees ccw. + """ + + @staticmethod + def convert(box: _RawBoxType, from_mode: "BoxMode", to_mode: "BoxMode") -> _RawBoxType: + """ + Args: + box: can be a k-tuple, k-list or an Nxk array/tensor, where k = 4 or 5 + from_mode, to_mode (BoxMode) + + Returns: + The converted box of the same type. + """ + if from_mode == to_mode: + return box + + original_type = type(box) + is_numpy = isinstance(box, np.ndarray) + single_box = isinstance(box, (list, tuple)) + if single_box: + assert len(box) == 4 or len(box) == 5, ( + "BoxMode.convert takes either a k-tuple/list or an Nxk array/tensor," + " where k == 4 or 5" + ) + arr = torch.tensor(box)[None, :] + else: + # avoid modifying the input box + if is_numpy: + arr = torch.from_numpy(np.asarray(box)).clone() + else: + arr = box.clone() + + assert to_mode not in [BoxMode.XYXY_REL, BoxMode.XYWH_REL] and from_mode not in [ + BoxMode.XYXY_REL, + BoxMode.XYWH_REL, + ], "Relative mode not yet supported!" + + if from_mode == BoxMode.XYWHA_ABS and to_mode == BoxMode.XYXY_ABS: + assert ( + arr.shape[-1] == 5 + ), "The last dimension of input shape must be 5 for XYWHA format" + original_dtype = arr.dtype + arr = arr.double() + + w = arr[:, 2] + h = arr[:, 3] + a = arr[:, 4] + c = torch.abs(torch.cos(a * math.pi / 180.0)) + s = torch.abs(torch.sin(a * math.pi / 180.0)) + # This basically computes the horizontal bounding rectangle of the rotated box + new_w = c * w + s * h + new_h = c * h + s * w + + # convert center to top-left corner + arr[:, 0] -= new_w / 2.0 + arr[:, 1] -= new_h / 2.0 + # bottom-right corner + arr[:, 2] = arr[:, 0] + new_w + arr[:, 3] = arr[:, 1] + new_h + + arr = arr[:, :4].to(dtype=original_dtype) + elif from_mode == BoxMode.XYWH_ABS and to_mode == BoxMode.XYWHA_ABS: + original_dtype = arr.dtype + arr = arr.double() + arr[:, 0] += arr[:, 2] / 2.0 + arr[:, 1] += arr[:, 3] / 2.0 + angles = torch.zeros((arr.shape[0], 1), dtype=arr.dtype) + arr = torch.cat((arr, angles), axis=1).to(dtype=original_dtype) + else: + if to_mode == BoxMode.XYXY_ABS and from_mode == BoxMode.XYWH_ABS: + arr[:, 2] += arr[:, 0] + arr[:, 3] += arr[:, 1] + elif from_mode == BoxMode.XYXY_ABS and to_mode == BoxMode.XYWH_ABS: + arr[:, 2] -= arr[:, 0] + arr[:, 3] -= arr[:, 1] + else: + raise NotImplementedError( + "Conversion from BoxMode {} to {} is not supported yet".format( + from_mode, to_mode + ) + ) + + if single_box: + return original_type(arr.flatten().tolist()) + if is_numpy: + return arr.numpy() + else: + return arr + + +class Boxes: + """ + This structure stores a list of boxes as a Nx4 torch.Tensor. + It supports some common methods about boxes + (`area`, `clip`, `nonempty`, etc), + and also behaves like a Tensor + (support indexing, `to(device)`, `.device`, and iteration over all boxes) + + Attributes: + tensor (torch.Tensor): float matrix of Nx4. Each row is (x1, y1, x2, y2). + """ + + def __init__(self, tensor: torch.Tensor): + """ + Args: + tensor (Tensor[float]): a Nx4 matrix. Each row is (x1, y1, x2, y2). + """ + device = tensor.device if isinstance(tensor, torch.Tensor) else torch.device("cpu") + tensor = torch.as_tensor(tensor, dtype=torch.float32, device=device) + if tensor.numel() == 0: + # Use reshape, so we don't end up creating a new tensor that does not depend on + # the inputs (and consequently confuses jit) + tensor = tensor.reshape((-1, 4)).to(dtype=torch.float32, device=device) + assert tensor.dim() == 2 and tensor.size(-1) == 4, tensor.size() + + self.tensor = tensor + + def clone(self) -> "Boxes": + """ + Clone the Boxes. + + Returns: + Boxes + """ + return Boxes(self.tensor.clone()) + + def to(self, device: torch.device): + # Boxes are assumed float32 and does not support to(dtype) + return Boxes(self.tensor.to(device=device)) + + def area(self) -> torch.Tensor: + """ + Computes the area of all the boxes. + + Returns: + torch.Tensor: a vector with areas of each box. + """ + box = self.tensor + area = (box[:, 2] - box[:, 0]) * (box[:, 3] - box[:, 1]) + return area + + def clip(self, box_size: Tuple[int, int]) -> None: + """ + Clip (in place) the boxes by limiting x coordinates to the range [0, width] + and y coordinates to the range [0, height]. + + Args: + box_size (height, width): The clipping box's size. + """ + assert torch.isfinite(self.tensor).all(), "Box tensor contains infinite or NaN!" + h, w = box_size + x1 = self.tensor[:, 0].clamp(min=0, max=w) + y1 = self.tensor[:, 1].clamp(min=0, max=h) + x2 = self.tensor[:, 2].clamp(min=0, max=w) + y2 = self.tensor[:, 3].clamp(min=0, max=h) + self.tensor = torch.stack((x1, y1, x2, y2), dim=-1) + + def nonempty(self, threshold: float = 0.0) -> torch.Tensor: + """ + Find boxes that are non-empty. + A box is considered empty, if either of its side is no larger than threshold. + + Returns: + Tensor: + a binary vector which represents whether each box is empty + (False) or non-empty (True). + """ + box = self.tensor + widths = box[:, 2] - box[:, 0] + heights = box[:, 3] - box[:, 1] + keep = (widths > threshold) & (heights > threshold) + return keep + + def __getitem__(self, item) -> "Boxes": + """ + Args: + item: int, slice, or a BoolTensor + + Returns: + Boxes: Create a new :class:`Boxes` by indexing. + + The following usage are allowed: + + 1. `new_boxes = boxes[3]`: return a `Boxes` which contains only one box. + 2. `new_boxes = boxes[2:10]`: return a slice of boxes. + 3. `new_boxes = boxes[vector]`, where vector is a torch.BoolTensor + with `length = len(boxes)`. Nonzero elements in the vector will be selected. + + Note that the returned Boxes might share storage with this Boxes, + subject to Pytorch's indexing semantics. + """ + if isinstance(item, int): + return Boxes(self.tensor[item].view(1, -1)) + b = self.tensor[item] + assert b.dim() == 2, "Indexing on Boxes with {} failed to return a matrix!".format(item) + return Boxes(b) + + def __len__(self) -> int: + return self.tensor.shape[0] + + def __repr__(self) -> str: + return "Boxes(" + str(self.tensor) + ")" + + def inside_box(self, box_size: Tuple[int, int], boundary_threshold: int = 0) -> torch.Tensor: + """ + Args: + box_size (height, width): Size of the reference box. + boundary_threshold (int): Boxes that extend beyond the reference box + boundary by more than boundary_threshold are considered "outside". + + Returns: + a binary vector, indicating whether each box is inside the reference box. + """ + height, width = box_size + inds_inside = ( + (self.tensor[..., 0] >= -boundary_threshold) + & (self.tensor[..., 1] >= -boundary_threshold) + & (self.tensor[..., 2] < width + boundary_threshold) + & (self.tensor[..., 3] < height + boundary_threshold) + ) + return inds_inside + + def get_centers(self) -> torch.Tensor: + """ + Returns: + The box centers in a Nx2 array of (x, y). + """ + return (self.tensor[:, :2] + self.tensor[:, 2:]) / 2 + + def scale(self, scale_x: float, scale_y: float) -> None: + """ + Scale the box with horizontal and vertical scaling factors + """ + self.tensor[:, 0::2] *= scale_x + self.tensor[:, 1::2] *= scale_y + + @classmethod + def cat(cls, boxes_list: List["Boxes"]) -> "Boxes": + """ + Concatenates a list of Boxes into a single Boxes + + Arguments: + boxes_list (list[Boxes]) + + Returns: + Boxes: the concatenated Boxes + """ + assert isinstance(boxes_list, (list, tuple)) + if len(boxes_list) == 0: + return cls(torch.empty(0)) + assert all([isinstance(box, Boxes) for box in boxes_list]) + + # use torch.cat (v.s. layers.cat) so the returned boxes never share storage with input + cat_boxes = cls(torch.cat([b.tensor for b in boxes_list], dim=0)) + return cat_boxes + + @property + def device(self) -> device: + return self.tensor.device + + # type "Iterator[torch.Tensor]", yield, and iter() not supported by torchscript + # https://github.com/pytorch/pytorch/issues/18627 + @torch.jit.unused + def __iter__(self): + """ + Yield a box as a Tensor of shape (4,) at a time. + """ + yield from self.tensor + + +def pairwise_intersection(boxes1: Boxes, boxes2: Boxes) -> torch.Tensor: + """ + Given two lists of boxes of size N and M, + compute the intersection area between __all__ N x M pairs of boxes. + The box order must be (xmin, ymin, xmax, ymax) + + Args: + boxes1,boxes2 (Boxes): two `Boxes`. Contains N & M boxes, respectively. + + Returns: + Tensor: intersection, sized [N,M]. + """ + boxes1, boxes2 = boxes1.tensor, boxes2.tensor + width_height = torch.min(boxes1[:, None, 2:], boxes2[:, 2:]) - torch.max( + boxes1[:, None, :2], boxes2[:, :2] + ) # [N,M,2] + + width_height.clamp_(min=0) # [N,M,2] + intersection = width_height.prod(dim=2) # [N,M] + return intersection + + +# implementation from https://github.com/kuangliu/torchcv/blob/master/torchcv/utils/box.py +# with slight modifications +def pairwise_iou(boxes1: Boxes, boxes2: Boxes) -> torch.Tensor: + """ + Given two lists of boxes of size N and M, compute the IoU + (intersection over union) between **all** N x M pairs of boxes. + The box order must be (xmin, ymin, xmax, ymax). + + Args: + boxes1,boxes2 (Boxes): two `Boxes`. Contains N & M boxes, respectively. + + Returns: + Tensor: IoU, sized [N,M]. + """ + area1 = boxes1.area() # [N] + area2 = boxes2.area() # [M] + inter = pairwise_intersection(boxes1, boxes2) + + # handle empty boxes + iou = torch.where( + inter > 0, + inter / (area1[:, None] + area2 - inter), + torch.zeros(1, dtype=inter.dtype, device=inter.device), + ) + return iou + + +def pairwise_ioa(boxes1: Boxes, boxes2: Boxes) -> torch.Tensor: + """ + Similar to :func:`pariwise_iou` but compute the IoA (intersection over boxes2 area). + + Args: + boxes1,boxes2 (Boxes): two `Boxes`. Contains N & M boxes, respectively. + + Returns: + Tensor: IoA, sized [N,M]. + """ + area2 = boxes2.area() # [M] + inter = pairwise_intersection(boxes1, boxes2) + + # handle empty boxes + ioa = torch.where( + inter > 0, inter / area2, torch.zeros(1, dtype=inter.dtype, device=inter.device) + ) + return ioa + + +def pairwise_point_box_distance(points: torch.Tensor, boxes: Boxes): + """ + Pairwise distance between N points and M boxes. The distance between a + point and a box is represented by the distance from the point to 4 edges + of the box. Distances are all positive when the point is inside the box. + + Args: + points: Nx2 coordinates. Each row is (x, y) + boxes: M boxes + + Returns: + Tensor: distances of size (N, M, 4). The 4 values are distances from + the point to the left, top, right, bottom of the box. + """ + x, y = points.unsqueeze(dim=2).unbind(dim=1) # (N, 1) + x0, y0, x1, y1 = boxes.tensor.unsqueeze(dim=0).unbind(dim=2) # (1, M) + return torch.stack([x - x0, y - y0, x1 - x, y1 - y], dim=2) + + +def matched_pairwise_iou(boxes1: Boxes, boxes2: Boxes) -> torch.Tensor: + """ + Compute pairwise intersection over union (IOU) of two sets of matched + boxes that have the same number of boxes. + Similar to :func:`pairwise_iou`, but computes only diagonal elements of the matrix. + + Args: + boxes1 (Boxes): bounding boxes, sized [N,4]. + boxes2 (Boxes): same length as boxes1 + Returns: + Tensor: iou, sized [N]. + """ + assert len(boxes1) == len( + boxes2 + ), "boxlists should have the same" "number of entries, got {}, {}".format( + len(boxes1), len(boxes2) + ) + area1 = boxes1.area() # [N] + area2 = boxes2.area() # [N] + box1, box2 = boxes1.tensor, boxes2.tensor + lt = torch.max(box1[:, :2], box2[:, :2]) # [N,2] + rb = torch.min(box1[:, 2:], box2[:, 2:]) # [N,2] + wh = (rb - lt).clamp(min=0) # [N,2] + inter = wh[:, 0] * wh[:, 1] # [N] + iou = inter / (area1 + area2 - inter) # [N] + return iou diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/structures/image_list.py b/motion-gan-pipeline/preprocessing/third/detectron2/structures/image_list.py new file mode 100644 index 0000000..b31b2d3 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/structures/image_list.py @@ -0,0 +1,110 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from __future__ import division +from typing import Any, List, Tuple +import torch +from torch import device +from torch.nn import functional as F + +from detectron2.layers.wrappers import shapes_to_tensor + + +class ImageList(object): + """ + Structure that holds a list of images (of possibly + varying sizes) as a single tensor. + This works by padding the images to the same size. + The original sizes of each image is stored in `image_sizes`. + + Attributes: + image_sizes (list[tuple[int, int]]): each tuple is (h, w). + During tracing, it becomes list[Tensor] instead. + """ + + def __init__(self, tensor: torch.Tensor, image_sizes: List[Tuple[int, int]]): + """ + Arguments: + tensor (Tensor): of shape (N, H, W) or (N, C_1, ..., C_K, H, W) where K >= 1 + image_sizes (list[tuple[int, int]]): Each tuple is (h, w). It can + be smaller than (H, W) due to padding. + """ + self.tensor = tensor + self.image_sizes = image_sizes + + def __len__(self) -> int: + return len(self.image_sizes) + + def __getitem__(self, idx) -> torch.Tensor: + """ + Access the individual image in its original size. + + Args: + idx: int or slice + + Returns: + Tensor: an image of shape (H, W) or (C_1, ..., C_K, H, W) where K >= 1 + """ + size = self.image_sizes[idx] + return self.tensor[idx, ..., : size[0], : size[1]] + + @torch.jit.unused + def to(self, *args: Any, **kwargs: Any) -> "ImageList": + cast_tensor = self.tensor.to(*args, **kwargs) + return ImageList(cast_tensor, self.image_sizes) + + @property + def device(self) -> device: + return self.tensor.device + + @staticmethod + def from_tensors( + tensors: List[torch.Tensor], size_divisibility: int = 0, pad_value: float = 0.0 + ) -> "ImageList": + """ + Args: + tensors: a tuple or list of `torch.Tensor`, each of shape (Hi, Wi) or + (C_1, ..., C_K, Hi, Wi) where K >= 1. The Tensors will be padded + to the same shape with `pad_value`. + size_divisibility (int): If `size_divisibility > 0`, add padding to ensure + the common height and width is divisible by `size_divisibility`. + This depends on the model and many models need a divisibility of 32. + pad_value (float): value to pad + + Returns: + an `ImageList`. + """ + assert len(tensors) > 0 + assert isinstance(tensors, (tuple, list)) + for t in tensors: + assert isinstance(t, torch.Tensor), type(t) + assert t.shape[:-2] == tensors[0].shape[:-2], t.shape + + image_sizes = [(im.shape[-2], im.shape[-1]) for im in tensors] + image_sizes_tensor = [shapes_to_tensor(x) for x in image_sizes] + max_size = torch.stack(image_sizes_tensor).max(0).values + + if size_divisibility > 1: + stride = size_divisibility + # the last two dims are H,W, both subject to divisibility requirement + max_size = (max_size + (stride - 1)).div(stride, rounding_mode="floor") * stride + + # handle weirdness of scripting and tracing ... + if torch.jit.is_scripting(): + max_size: List[int] = max_size.to(dtype=torch.long).tolist() + else: + if torch.jit.is_tracing(): + image_sizes = image_sizes_tensor + + if len(tensors) == 1: + # This seems slightly (2%) faster. + # TODO: check whether it's faster for multiple images as well + image_size = image_sizes[0] + padding_size = [0, max_size[-1] - image_size[1], 0, max_size[-2] - image_size[0]] + batched_imgs = F.pad(tensors[0], padding_size, value=pad_value).unsqueeze_(0) + else: + # max_size can be a tensor in tracing mode, therefore convert to list + batch_shape = [len(tensors)] + list(tensors[0].shape[:-2]) + list(max_size) + batched_imgs = tensors[0].new_full(batch_shape, pad_value) + for img, pad_img in zip(tensors, batched_imgs): + pad_img[..., : img.shape[-2], : img.shape[-1]].copy_(img) + + return ImageList(batched_imgs.contiguous(), image_sizes) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/structures/instances.py b/motion-gan-pipeline/preprocessing/third/detectron2/structures/instances.py new file mode 100644 index 0000000..612e66f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/structures/instances.py @@ -0,0 +1,192 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import itertools +from typing import Any, Dict, List, Tuple, Union +import torch + + +class Instances: + """ + This class represents a list of instances in an image. + It stores the attributes of instances (e.g., boxes, masks, labels, scores) as "fields". + All fields must have the same ``__len__`` which is the number of instances. + + All other (non-field) attributes of this class are considered private: + they must start with '_' and are not modifiable by a user. + + Some basic usage: + + 1. Set/get/check a field: + + .. code-block:: python + + instances.gt_boxes = Boxes(...) + print(instances.pred_masks) # a tensor of shape (N, H, W) + print('gt_masks' in instances) + + 2. ``len(instances)`` returns the number of instances + 3. Indexing: ``instances[indices]`` will apply the indexing on all the fields + and returns a new :class:`Instances`. + Typically, ``indices`` is a integer vector of indices, + or a binary mask of length ``num_instances`` + + .. code-block:: python + + category_3_detections = instances[instances.pred_classes == 3] + confident_detections = instances[instances.scores > 0.9] + """ + + def __init__(self, image_size: Tuple[int, int], **kwargs: Any): + """ + Args: + image_size (height, width): the spatial size of the image. + kwargs: fields to add to this `Instances`. + """ + self._image_size = image_size + self._fields: Dict[str, Any] = {} + for k, v in kwargs.items(): + self.set(k, v) + + @property + def image_size(self) -> Tuple[int, int]: + """ + Returns: + tuple: height, width + """ + return self._image_size + + def __setattr__(self, name: str, val: Any) -> None: + if name.startswith("_"): + super().__setattr__(name, val) + else: + self.set(name, val) + + def __getattr__(self, name: str) -> Any: + if name == "_fields" or name not in self._fields: + raise AttributeError("Cannot find field '{}' in the given Instances!".format(name)) + return self._fields[name] + + def set(self, name: str, value: Any) -> None: + """ + Set the field named `name` to `value`. + The length of `value` must be the number of instances, + and must agree with other existing fields in this object. + """ + data_len = len(value) + if len(self._fields): + assert ( + len(self) == data_len + ), "Adding a field of length {} to a Instances of length {}".format(data_len, len(self)) + self._fields[name] = value + + def has(self, name: str) -> bool: + """ + Returns: + bool: whether the field called `name` exists. + """ + return name in self._fields + + def remove(self, name: str) -> None: + """ + Remove the field called `name`. + """ + del self._fields[name] + + def get(self, name: str) -> Any: + """ + Returns the field called `name`. + """ + return self._fields[name] + + def get_fields(self) -> Dict[str, Any]: + """ + Returns: + dict: a dict which maps names (str) to data of the fields + + Modifying the returned dict will modify this instance. + """ + return self._fields + + # Tensor-like methods + def to(self, *args: Any, **kwargs: Any) -> "Instances": + """ + Returns: + Instances: all fields are called with a `to(device)`, if the field has this method. + """ + ret = Instances(self._image_size) + for k, v in self._fields.items(): + if hasattr(v, "to"): + v = v.to(*args, **kwargs) + ret.set(k, v) + return ret + + def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> "Instances": + """ + Args: + item: an index-like object and will be used to index all the fields. + + Returns: + If `item` is a string, return the data in the corresponding field. + Otherwise, returns an `Instances` where all fields are indexed by `item`. + """ + if type(item) == int: + if item >= len(self) or item < -len(self): + raise IndexError("Instances index out of range!") + else: + item = slice(item, None, len(self)) + + ret = Instances(self._image_size) + for k, v in self._fields.items(): + ret.set(k, v[item]) + return ret + + def __len__(self) -> int: + for v in self._fields.values(): + # use __len__ because len() has to be int and is not friendly to tracing + return v.__len__() + raise NotImplementedError("Empty Instances does not support __len__!") + + def __iter__(self): + raise NotImplementedError("`Instances` object is not iterable!") + + @staticmethod + def cat(instance_lists: List["Instances"]) -> "Instances": + """ + Args: + instance_lists (list[Instances]) + + Returns: + Instances + """ + assert all(isinstance(i, Instances) for i in instance_lists) + assert len(instance_lists) > 0 + if len(instance_lists) == 1: + return instance_lists[0] + + image_size = instance_lists[0].image_size + if not isinstance(image_size, torch.Tensor): # could be a tensor in tracing + for i in instance_lists[1:]: + assert i.image_size == image_size + ret = Instances(image_size) + for k in instance_lists[0]._fields.keys(): + values = [i.get(k) for i in instance_lists] + v0 = values[0] + if isinstance(v0, torch.Tensor): + values = torch.cat(values, dim=0) + elif isinstance(v0, list): + values = list(itertools.chain(*values)) + elif hasattr(type(v0), "cat"): + values = type(v0).cat(values) + else: + raise ValueError("Unsupported type {} for concatenation".format(type(v0))) + ret.set(k, values) + return ret + + def __str__(self) -> str: + s = self.__class__.__name__ + "(" + s += "num_instances={}, ".format(len(self)) + s += "image_height={}, ".format(self._image_size[0]) + s += "image_width={}, ".format(self._image_size[1]) + s += "fields=[{}])".format(", ".join((f"{k}: {v}" for k, v in self._fields.items()))) + return s + + __repr__ = __str__ diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/structures/keypoints.py b/motion-gan-pipeline/preprocessing/third/detectron2/structures/keypoints.py new file mode 100644 index 0000000..d0ee872 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/structures/keypoints.py @@ -0,0 +1,239 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import numpy as np +from typing import Any, List, Tuple, Union +import torch +from torch.nn import functional as F + + +class Keypoints: + """ + Stores keypoint **annotation** data. GT Instances have a `gt_keypoints` property + containing the x,y location and visibility flag of each keypoint. This tensor has shape + (N, K, 3) where N is the number of instances and K is the number of keypoints per instance. + + The visibility flag follows the COCO format and must be one of three integers: + + * v=0: not labeled (in which case x=y=0) + * v=1: labeled but not visible + * v=2: labeled and visible + """ + + def __init__(self, keypoints: Union[torch.Tensor, np.ndarray, List[List[float]]]): + """ + Arguments: + keypoints: A Tensor, numpy array, or list of the x, y, and visibility of each keypoint. + The shape should be (N, K, 3) where N is the number of + instances, and K is the number of keypoints per instance. + """ + device = keypoints.device if isinstance(keypoints, torch.Tensor) else torch.device("cpu") + keypoints = torch.as_tensor(keypoints, dtype=torch.float32, device=device) + assert keypoints.dim() == 3 and keypoints.shape[2] == 3, keypoints.shape + self.tensor = keypoints + + def __len__(self) -> int: + return self.tensor.size(0) + + def to(self, *args: Any, **kwargs: Any) -> "Keypoints": + return type(self)(self.tensor.to(*args, **kwargs)) + + @property + def device(self) -> torch.device: + return self.tensor.device + + def to_heatmap(self, boxes: torch.Tensor, heatmap_size: int) -> torch.Tensor: + """ + Convert keypoint annotations to a heatmap of one-hot labels for training, + as described in :paper:`Mask R-CNN`. + + Arguments: + boxes: Nx4 tensor, the boxes to draw the keypoints to + + Returns: + heatmaps: + A tensor of shape (N, K), each element is integer spatial label + in the range [0, heatmap_size**2 - 1] for each keypoint in the input. + valid: + A tensor of shape (N, K) containing whether each keypoint is in the roi or not. + """ + return _keypoints_to_heatmap(self.tensor, boxes, heatmap_size) + + def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> "Keypoints": + """ + Create a new `Keypoints` by indexing on this `Keypoints`. + + The following usage are allowed: + + 1. `new_kpts = kpts[3]`: return a `Keypoints` which contains only one instance. + 2. `new_kpts = kpts[2:10]`: return a slice of key points. + 3. `new_kpts = kpts[vector]`, where vector is a torch.ByteTensor + with `length = len(kpts)`. Nonzero elements in the vector will be selected. + + Note that the returned Keypoints might share storage with this Keypoints, + subject to Pytorch's indexing semantics. + """ + if isinstance(item, int): + return Keypoints([self.tensor[item]]) + return Keypoints(self.tensor[item]) + + def __repr__(self) -> str: + s = self.__class__.__name__ + "(" + s += "num_instances={})".format(len(self.tensor)) + return s + + @staticmethod + def cat(keypoints_list: List["Keypoints"]) -> "Keypoints": + """ + Concatenates a list of Keypoints into a single Keypoints + + Arguments: + keypoints_list (list[Keypoints]) + + Returns: + Keypoints: the concatenated Keypoints + """ + assert isinstance(keypoints_list, (list, tuple)) + assert len(keypoints_list) > 0 + assert all(isinstance(keypoints, Keypoints) for keypoints in keypoints_list) + + cat_kpts = type(keypoints_list[0])( + torch.cat([kpts.tensor for kpts in keypoints_list], dim=0) + ) + return cat_kpts + + +# TODO make this nicer, this is a direct translation from C2 (but removing the inner loop) +def _keypoints_to_heatmap( + keypoints: torch.Tensor, rois: torch.Tensor, heatmap_size: int +) -> Tuple[torch.Tensor, torch.Tensor]: + """ + Encode keypoint locations into a target heatmap for use in SoftmaxWithLoss across space. + + Maps keypoints from the half-open interval [x1, x2) on continuous image coordinates to the + closed interval [0, heatmap_size - 1] on discrete image coordinates. We use the + continuous-discrete conversion from Heckbert 1990 ("What is the coordinate of a pixel?"): + d = floor(c) and c = d + 0.5, where d is a discrete coordinate and c is a continuous coordinate. + + Arguments: + keypoints: tensor of keypoint locations in of shape (N, K, 3). + rois: Nx4 tensor of rois in xyxy format + heatmap_size: integer side length of square heatmap. + + Returns: + heatmaps: A tensor of shape (N, K) containing an integer spatial label + in the range [0, heatmap_size**2 - 1] for each keypoint in the input. + valid: A tensor of shape (N, K) containing whether each keypoint is in + the roi or not. + """ + + if rois.numel() == 0: + return rois.new().long(), rois.new().long() + offset_x = rois[:, 0] + offset_y = rois[:, 1] + scale_x = heatmap_size / (rois[:, 2] - rois[:, 0]) + scale_y = heatmap_size / (rois[:, 3] - rois[:, 1]) + + offset_x = offset_x[:, None] + offset_y = offset_y[:, None] + scale_x = scale_x[:, None] + scale_y = scale_y[:, None] + + x = keypoints[..., 0] + y = keypoints[..., 1] + + x_boundary_inds = x == rois[:, 2][:, None] + y_boundary_inds = y == rois[:, 3][:, None] + + x = (x - offset_x) * scale_x + x = x.floor().long() + y = (y - offset_y) * scale_y + y = y.floor().long() + + x[x_boundary_inds] = heatmap_size - 1 + y[y_boundary_inds] = heatmap_size - 1 + + valid_loc = (x >= 0) & (y >= 0) & (x < heatmap_size) & (y < heatmap_size) + vis = keypoints[..., 2] > 0 + valid = (valid_loc & vis).long() + + lin_ind = y * heatmap_size + x + heatmaps = lin_ind * valid + + return heatmaps, valid + + +@torch.jit.script_if_tracing +def heatmaps_to_keypoints(maps: torch.Tensor, rois: torch.Tensor) -> torch.Tensor: + """ + Extract predicted keypoint locations from heatmaps. + + Args: + maps (Tensor): (#ROIs, #keypoints, POOL_H, POOL_W). The predicted heatmap of logits for + each ROI and each keypoint. + rois (Tensor): (#ROIs, 4). The box of each ROI. + + Returns: + Tensor of shape (#ROIs, #keypoints, 4) with the last dimension corresponding to + (x, y, logit, score) for each keypoint. + + When converting discrete pixel indices in an NxN image to a continuous keypoint coordinate, + we maintain consistency with :meth:`Keypoints.to_heatmap` by using the conversion from + Heckbert 1990: c = d + 0.5, where d is a discrete coordinate and c is a continuous coordinate. + """ + # The decorator use of torch.no_grad() was not supported by torchscript. + # https://github.com/pytorch/pytorch/issues/44768 + maps = maps.detach() + rois = rois.detach() + + offset_x = rois[:, 0] + offset_y = rois[:, 1] + + widths = (rois[:, 2] - rois[:, 0]).clamp(min=1) + heights = (rois[:, 3] - rois[:, 1]).clamp(min=1) + widths_ceil = widths.ceil() + heights_ceil = heights.ceil() + + num_rois, num_keypoints = maps.shape[:2] + xy_preds = maps.new_zeros(rois.shape[0], num_keypoints, 4) + + width_corrections = widths / widths_ceil + height_corrections = heights / heights_ceil + + keypoints_idx = torch.arange(num_keypoints, device=maps.device) + + for i in range(num_rois): + outsize = (int(heights_ceil[i]), int(widths_ceil[i])) + roi_map = F.interpolate( + maps[[i]], size=outsize, mode="bicubic", align_corners=False + ).squeeze( + 0 + ) # #keypoints x H x W + + # softmax over the spatial region + max_score, _ = roi_map.view(num_keypoints, -1).max(1) + max_score = max_score.view(num_keypoints, 1, 1) + tmp_full_resolution = (roi_map - max_score).exp_() + tmp_pool_resolution = (maps[i] - max_score).exp_() + # Produce scores over the region H x W, but normalize with POOL_H x POOL_W, + # so that the scores of objects of different absolute sizes will be more comparable + roi_map_scores = tmp_full_resolution / tmp_pool_resolution.sum((1, 2), keepdim=True) + + w = roi_map.shape[2] + pos = roi_map.view(num_keypoints, -1).argmax(1) + + x_int = pos % w + y_int = (pos - x_int) // w + + assert ( + roi_map_scores[keypoints_idx, y_int, x_int] + == roi_map_scores.view(num_keypoints, -1).max(1)[0] + ).all() + + x = (x_int.float() + 0.5) * width_corrections[i] + y = (y_int.float() + 0.5) * height_corrections[i] + + xy_preds[i, :, 0] = x + offset_x[i] + xy_preds[i, :, 1] = y + offset_y[i] + xy_preds[i, :, 2] = roi_map[keypoints_idx, y_int, x_int] + xy_preds[i, :, 3] = roi_map_scores[keypoints_idx, y_int, x_int] + + return xy_preds diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/structures/masks.py b/motion-gan-pipeline/preprocessing/third/detectron2/structures/masks.py new file mode 100644 index 0000000..8f8e72d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/structures/masks.py @@ -0,0 +1,532 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import copy +import itertools +import numpy as np +from typing import Any, Iterator, List, Union +import pycocotools.mask as mask_util +import torch +from torch import device + +from detectron2.layers.roi_align import ROIAlign +from detectron2.utils.memory import retry_if_cuda_oom + +from .boxes import Boxes + + +def polygon_area(x, y): + # Using the shoelace formula + # https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates + return 0.5 * np.abs(np.dot(x, np.roll(y, 1)) - np.dot(y, np.roll(x, 1))) + + +def polygons_to_bitmask(polygons: List[np.ndarray], height: int, width: int) -> np.ndarray: + """ + Args: + polygons (list[ndarray]): each array has shape (Nx2,) + height, width (int) + + Returns: + ndarray: a bool mask of shape (height, width) + """ + if len(polygons) == 0: + # COCOAPI does not support empty polygons + return np.zeros((height, width)).astype(np.bool) + rles = mask_util.frPyObjects(polygons, height, width) + rle = mask_util.merge(rles) + return mask_util.decode(rle).astype(np.bool) + + +def rasterize_polygons_within_box( + polygons: List[np.ndarray], box: np.ndarray, mask_size: int +) -> torch.Tensor: + """ + Rasterize the polygons into a mask image and + crop the mask content in the given box. + The cropped mask is resized to (mask_size, mask_size). + + This function is used when generating training targets for mask head in Mask R-CNN. + Given original ground-truth masks for an image, new ground-truth mask + training targets in the size of `mask_size x mask_size` + must be provided for each predicted box. This function will be called to + produce such targets. + + Args: + polygons (list[ndarray[float]]): a list of polygons, which represents an instance. + box: 4-element numpy array + mask_size (int): + + Returns: + Tensor: BoolTensor of shape (mask_size, mask_size) + """ + # 1. Shift the polygons w.r.t the boxes + w, h = box[2] - box[0], box[3] - box[1] + + polygons = copy.deepcopy(polygons) + for p in polygons: + p[0::2] = p[0::2] - box[0] + p[1::2] = p[1::2] - box[1] + + # 2. Rescale the polygons to the new box size + # max() to avoid division by small number + ratio_h = mask_size / max(h, 0.1) + ratio_w = mask_size / max(w, 0.1) + + if ratio_h == ratio_w: + for p in polygons: + p *= ratio_h + else: + for p in polygons: + p[0::2] *= ratio_w + p[1::2] *= ratio_h + + # 3. Rasterize the polygons with coco api + mask = polygons_to_bitmask(polygons, mask_size, mask_size) + mask = torch.from_numpy(mask) + return mask + + +class BitMasks: + """ + This class stores the segmentation masks for all objects in one image, in + the form of bitmaps. + + Attributes: + tensor: bool Tensor of N,H,W, representing N instances in the image. + """ + + def __init__(self, tensor: Union[torch.Tensor, np.ndarray]): + """ + Args: + tensor: bool Tensor of N,H,W, representing N instances in the image. + """ + device = tensor.device if isinstance(tensor, torch.Tensor) else torch.device("cpu") + tensor = torch.as_tensor(tensor, dtype=torch.bool, device=device) + assert tensor.dim() == 3, tensor.size() + self.image_size = tensor.shape[1:] + self.tensor = tensor + + @torch.jit.unused + def to(self, *args: Any, **kwargs: Any) -> "BitMasks": + return BitMasks(self.tensor.to(*args, **kwargs)) + + @property + def device(self) -> torch.device: + return self.tensor.device + + @torch.jit.unused + def __getitem__(self, item: Union[int, slice, torch.BoolTensor]) -> "BitMasks": + """ + Returns: + BitMasks: Create a new :class:`BitMasks` by indexing. + + The following usage are allowed: + + 1. `new_masks = masks[3]`: return a `BitMasks` which contains only one mask. + 2. `new_masks = masks[2:10]`: return a slice of masks. + 3. `new_masks = masks[vector]`, where vector is a torch.BoolTensor + with `length = len(masks)`. Nonzero elements in the vector will be selected. + + Note that the returned object might share storage with this object, + subject to Pytorch's indexing semantics. + """ + if isinstance(item, int): + return BitMasks(self.tensor[item].unsqueeze(0)) + m = self.tensor[item] + assert m.dim() == 3, "Indexing on BitMasks with {} returns a tensor with shape {}!".format( + item, m.shape + ) + return BitMasks(m) + + @torch.jit.unused + def __iter__(self) -> torch.Tensor: + yield from self.tensor + + @torch.jit.unused + def __repr__(self) -> str: + s = self.__class__.__name__ + "(" + s += "num_instances={})".format(len(self.tensor)) + return s + + def __len__(self) -> int: + return self.tensor.shape[0] + + def nonempty(self) -> torch.Tensor: + """ + Find masks that are non-empty. + + Returns: + Tensor: a BoolTensor which represents + whether each mask is empty (False) or non-empty (True). + """ + return self.tensor.flatten(1).any(dim=1) + + @staticmethod + def from_polygon_masks( + polygon_masks: Union["PolygonMasks", List[List[np.ndarray]]], height: int, width: int + ) -> "BitMasks": + """ + Args: + polygon_masks (list[list[ndarray]] or PolygonMasks) + height, width (int) + """ + if isinstance(polygon_masks, PolygonMasks): + polygon_masks = polygon_masks.polygons + masks = [polygons_to_bitmask(p, height, width) for p in polygon_masks] + if len(masks): + return BitMasks(torch.stack([torch.from_numpy(x) for x in masks])) + else: + return BitMasks(torch.empty(0, height, width, dtype=torch.bool)) + + @staticmethod + def from_roi_masks(roi_masks: "ROIMasks", height: int, width: int) -> "BitMasks": + """ + Args: + roi_masks: + height, width (int): + """ + return roi_masks.to_bitmasks(height, width) + + def crop_and_resize(self, boxes: torch.Tensor, mask_size: int) -> torch.Tensor: + """ + Crop each bitmask by the given box, and resize results to (mask_size, mask_size). + This can be used to prepare training targets for Mask R-CNN. + It has less reconstruction error compared to rasterization with polygons. + However we observe no difference in accuracy, + but BitMasks requires more memory to store all the masks. + + Args: + boxes (Tensor): Nx4 tensor storing the boxes for each mask + mask_size (int): the size of the rasterized mask. + + Returns: + Tensor: + A bool tensor of shape (N, mask_size, mask_size), where + N is the number of predicted boxes for this image. + """ + assert len(boxes) == len(self), "{} != {}".format(len(boxes), len(self)) + device = self.tensor.device + + batch_inds = torch.arange(len(boxes), device=device).to(dtype=boxes.dtype)[:, None] + rois = torch.cat([batch_inds, boxes], dim=1) # Nx5 + + bit_masks = self.tensor.to(dtype=torch.float32) + rois = rois.to(device=device) + output = ( + ROIAlign((mask_size, mask_size), 1.0, 0, aligned=True) + .forward(bit_masks[:, None, :, :], rois) + .squeeze(1) + ) + output = output >= 0.5 + return output + + def get_bounding_boxes(self) -> Boxes: + """ + Returns: + Boxes: tight bounding boxes around bitmasks. + If a mask is empty, it's bounding box will be all zero. + """ + boxes = torch.zeros(self.tensor.shape[0], 4, dtype=torch.float32) + x_any = torch.any(self.tensor, dim=1) + y_any = torch.any(self.tensor, dim=2) + for idx in range(self.tensor.shape[0]): + x = torch.where(x_any[idx, :])[0] + y = torch.where(y_any[idx, :])[0] + if len(x) > 0 and len(y) > 0: + boxes[idx, :] = torch.as_tensor( + [x[0], y[0], x[-1] + 1, y[-1] + 1], dtype=torch.float32 + ) + return Boxes(boxes) + + @staticmethod + def cat(bitmasks_list: List["BitMasks"]) -> "BitMasks": + """ + Concatenates a list of BitMasks into a single BitMasks + + Arguments: + bitmasks_list (list[BitMasks]) + + Returns: + BitMasks: the concatenated BitMasks + """ + assert isinstance(bitmasks_list, (list, tuple)) + assert len(bitmasks_list) > 0 + assert all(isinstance(bitmask, BitMasks) for bitmask in bitmasks_list) + + cat_bitmasks = type(bitmasks_list[0])(torch.cat([bm.tensor for bm in bitmasks_list], dim=0)) + return cat_bitmasks + + +class PolygonMasks: + """ + This class stores the segmentation masks for all objects in one image, in the form of polygons. + + Attributes: + polygons: list[list[ndarray]]. Each ndarray is a float64 vector representing a polygon. + """ + + def __init__(self, polygons: List[List[Union[torch.Tensor, np.ndarray]]]): + """ + Arguments: + polygons (list[list[np.ndarray]]): The first + level of the list correspond to individual instances, + the second level to all the polygons that compose the + instance, and the third level to the polygon coordinates. + The third level array should have the format of + [x0, y0, x1, y1, ..., xn, yn] (n >= 3). + """ + if not isinstance(polygons, list): + raise ValueError( + "Cannot create PolygonMasks: Expect a list of list of polygons per image. " + "Got '{}' instead.".format(type(polygons)) + ) + + def _make_array(t: Union[torch.Tensor, np.ndarray]) -> np.ndarray: + # Use float64 for higher precision, because why not? + # Always put polygons on CPU (self.to is a no-op) since they + # are supposed to be small tensors. + # May need to change this assumption if GPU placement becomes useful + if isinstance(t, torch.Tensor): + t = t.cpu().numpy() + return np.asarray(t).astype("float64") + + def process_polygons( + polygons_per_instance: List[Union[torch.Tensor, np.ndarray]] + ) -> List[np.ndarray]: + if not isinstance(polygons_per_instance, list): + raise ValueError( + "Cannot create polygons: Expect a list of polygons per instance. " + "Got '{}' instead.".format(type(polygons_per_instance)) + ) + # transform each polygon to a numpy array + polygons_per_instance = [_make_array(p) for p in polygons_per_instance] + for polygon in polygons_per_instance: + if len(polygon) % 2 != 0 or len(polygon) < 6: + raise ValueError(f"Cannot create a polygon from {len(polygon)} coordinates.") + return polygons_per_instance + + self.polygons: List[List[np.ndarray]] = [ + process_polygons(polygons_per_instance) for polygons_per_instance in polygons + ] + + def to(self, *args: Any, **kwargs: Any) -> "PolygonMasks": + return self + + @property + def device(self) -> torch.device: + return torch.device("cpu") + + def get_bounding_boxes(self) -> Boxes: + """ + Returns: + Boxes: tight bounding boxes around polygon masks. + """ + boxes = torch.zeros(len(self.polygons), 4, dtype=torch.float32) + for idx, polygons_per_instance in enumerate(self.polygons): + minxy = torch.as_tensor([float("inf"), float("inf")], dtype=torch.float32) + maxxy = torch.zeros(2, dtype=torch.float32) + for polygon in polygons_per_instance: + coords = torch.from_numpy(polygon).view(-1, 2).to(dtype=torch.float32) + minxy = torch.min(minxy, torch.min(coords, dim=0).values) + maxxy = torch.max(maxxy, torch.max(coords, dim=0).values) + boxes[idx, :2] = minxy + boxes[idx, 2:] = maxxy + return Boxes(boxes) + + def nonempty(self) -> torch.Tensor: + """ + Find masks that are non-empty. + + Returns: + Tensor: + a BoolTensor which represents whether each mask is empty (False) or not (True). + """ + keep = [1 if len(polygon) > 0 else 0 for polygon in self.polygons] + return torch.from_numpy(np.asarray(keep, dtype=np.bool)) + + def __getitem__(self, item: Union[int, slice, List[int], torch.BoolTensor]) -> "PolygonMasks": + """ + Support indexing over the instances and return a `PolygonMasks` object. + `item` can be: + + 1. An integer. It will return an object with only one instance. + 2. A slice. It will return an object with the selected instances. + 3. A list[int]. It will return an object with the selected instances, + correpsonding to the indices in the list. + 4. A vector mask of type BoolTensor, whose length is num_instances. + It will return an object with the instances whose mask is nonzero. + """ + if isinstance(item, int): + selected_polygons = [self.polygons[item]] + elif isinstance(item, slice): + selected_polygons = self.polygons[item] + elif isinstance(item, list): + selected_polygons = [self.polygons[i] for i in item] + elif isinstance(item, torch.Tensor): + # Polygons is a list, so we have to move the indices back to CPU. + if item.dtype == torch.bool: + assert item.dim() == 1, item.shape + item = item.nonzero().squeeze(1).cpu().numpy().tolist() + elif item.dtype in [torch.int32, torch.int64]: + item = item.cpu().numpy().tolist() + else: + raise ValueError("Unsupported tensor dtype={} for indexing!".format(item.dtype)) + selected_polygons = [self.polygons[i] for i in item] + return PolygonMasks(selected_polygons) + + def __iter__(self) -> Iterator[List[np.ndarray]]: + """ + Yields: + list[ndarray]: the polygons for one instance. + Each Tensor is a float64 vector representing a polygon. + """ + return iter(self.polygons) + + def __repr__(self) -> str: + s = self.__class__.__name__ + "(" + s += "num_instances={})".format(len(self.polygons)) + return s + + def __len__(self) -> int: + return len(self.polygons) + + def crop_and_resize(self, boxes: torch.Tensor, mask_size: int) -> torch.Tensor: + """ + Crop each mask by the given box, and resize results to (mask_size, mask_size). + This can be used to prepare training targets for Mask R-CNN. + + Args: + boxes (Tensor): Nx4 tensor storing the boxes for each mask + mask_size (int): the size of the rasterized mask. + + Returns: + Tensor: A bool tensor of shape (N, mask_size, mask_size), where + N is the number of predicted boxes for this image. + """ + assert len(boxes) == len(self), "{} != {}".format(len(boxes), len(self)) + + device = boxes.device + # Put boxes on the CPU, as the polygon representation is not efficient GPU-wise + # (several small tensors for representing a single instance mask) + boxes = boxes.to(torch.device("cpu")) + + results = [ + rasterize_polygons_within_box(poly, box.numpy(), mask_size) + for poly, box in zip(self.polygons, boxes) + ] + """ + poly: list[list[float]], the polygons for one instance + box: a tensor of shape (4,) + """ + if len(results) == 0: + return torch.empty(0, mask_size, mask_size, dtype=torch.bool, device=device) + return torch.stack(results, dim=0).to(device=device) + + def area(self): + """ + Computes area of the mask. + Only works with Polygons, using the shoelace formula: + https://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates + + Returns: + Tensor: a vector, area for each instance + """ + + area = [] + for polygons_per_instance in self.polygons: + area_per_instance = 0 + for p in polygons_per_instance: + area_per_instance += polygon_area(p[0::2], p[1::2]) + area.append(area_per_instance) + + return torch.tensor(area) + + @staticmethod + def cat(polymasks_list: List["PolygonMasks"]) -> "PolygonMasks": + """ + Concatenates a list of PolygonMasks into a single PolygonMasks + + Arguments: + polymasks_list (list[PolygonMasks]) + + Returns: + PolygonMasks: the concatenated PolygonMasks + """ + assert isinstance(polymasks_list, (list, tuple)) + assert len(polymasks_list) > 0 + assert all(isinstance(polymask, PolygonMasks) for polymask in polymasks_list) + + cat_polymasks = type(polymasks_list[0])( + list(itertools.chain.from_iterable(pm.polygons for pm in polymasks_list)) + ) + return cat_polymasks + + +class ROIMasks: + """ + Represent masks by N smaller masks defined in some ROIs. Once ROI boxes are given, + full-image bitmask can be obtained by "pasting" the mask on the region defined + by the corresponding ROI box. + """ + + def __init__(self, tensor: torch.Tensor): + """ + Args: + tensor: (N, M, M) mask tensor that defines the mask within each ROI. + """ + if tensor.dim() != 3: + raise ValueError("ROIMasks must take a masks of 3 dimension.") + self.tensor = tensor + + def to(self, device: torch.device) -> "ROIMasks": + return ROIMasks(self.tensor.to(device)) + + @property + def device(self) -> device: + return self.tensor.device + + def __len__(self): + return self.tensor.shape[0] + + def __getitem__(self, item) -> "ROIMasks": + """ + Returns: + ROIMasks: Create a new :class:`ROIMasks` by indexing. + + The following usage are allowed: + + 1. `new_masks = masks[2:10]`: return a slice of masks. + 2. `new_masks = masks[vector]`, where vector is a torch.BoolTensor + with `length = len(masks)`. Nonzero elements in the vector will be selected. + + Note that the returned object might share storage with this object, + subject to Pytorch's indexing semantics. + """ + t = self.tensor[item] + if t.dim() != 3: + raise ValueError( + f"Indexing on ROIMasks with {item} returns a tensor with shape {t.shape}!" + ) + return ROIMasks(t) + + @torch.jit.unused + def __repr__(self) -> str: + s = self.__class__.__name__ + "(" + s += "num_instances={})".format(len(self.tensor)) + return s + + @torch.jit.unused + def to_bitmasks(self, boxes: torch.Tensor, height, width, threshold=0.5): + """ + Args: see documentation of :func:`paste_masks_in_image`. + """ + from detectron2.layers.mask_ops import paste_masks_in_image, _paste_masks_tensor_shape + + if torch.jit.is_tracing(): + if isinstance(height, torch.Tensor): + paste_func = _paste_masks_tensor_shape + else: + paste_func = paste_masks_in_image + else: + paste_func = retry_if_cuda_oom(paste_masks_in_image) + bitmasks = paste_func(self.tensor, boxes.tensor, (height, width), threshold=threshold) + return BitMasks(bitmasks) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/structures/rotated_boxes.py b/motion-gan-pipeline/preprocessing/third/detectron2/structures/rotated_boxes.py new file mode 100644 index 0000000..4ec8e4c --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/structures/rotated_boxes.py @@ -0,0 +1,503 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import math +from typing import List, Tuple +import torch + +from detectron2.layers.rotated_boxes import pairwise_iou_rotated + +from .boxes import Boxes + + +class RotatedBoxes(Boxes): + """ + This structure stores a list of rotated boxes as a Nx5 torch.Tensor. + It supports some common methods about boxes + (`area`, `clip`, `nonempty`, etc), + and also behaves like a Tensor + (support indexing, `to(device)`, `.device`, and iteration over all boxes) + """ + + def __init__(self, tensor: torch.Tensor): + """ + Args: + tensor (Tensor[float]): a Nx5 matrix. Each row is + (x_center, y_center, width, height, angle), + in which angle is represented in degrees. + While there's no strict range restriction for it, + the recommended principal range is between [-180, 180) degrees. + + Assume we have a horizontal box B = (x_center, y_center, width, height), + where width is along the x-axis and height is along the y-axis. + The rotated box B_rot (x_center, y_center, width, height, angle) + can be seen as: + + 1. When angle == 0: + B_rot == B + 2. When angle > 0: + B_rot is obtained by rotating B w.r.t its center by :math:`|angle|` degrees CCW; + 3. When angle < 0: + B_rot is obtained by rotating B w.r.t its center by :math:`|angle|` degrees CW. + + Mathematically, since the right-handed coordinate system for image space + is (y, x), where y is top->down and x is left->right, the 4 vertices of the + rotated rectangle :math:`(yr_i, xr_i)` (i = 1, 2, 3, 4) can be obtained from + the vertices of the horizontal rectangle :math:`(y_i, x_i)` (i = 1, 2, 3, 4) + in the following way (:math:`\\theta = angle*\\pi/180` is the angle in radians, + :math:`(y_c, x_c)` is the center of the rectangle): + + .. math:: + + yr_i = \\cos(\\theta) (y_i - y_c) - \\sin(\\theta) (x_i - x_c) + y_c, + + xr_i = \\sin(\\theta) (y_i - y_c) + \\cos(\\theta) (x_i - x_c) + x_c, + + which is the standard rigid-body rotation transformation. + + Intuitively, the angle is + (1) the rotation angle from y-axis in image space + to the height vector (top->down in the box's local coordinate system) + of the box in CCW, and + (2) the rotation angle from x-axis in image space + to the width vector (left->right in the box's local coordinate system) + of the box in CCW. + + More intuitively, consider the following horizontal box ABCD represented + in (x1, y1, x2, y2): (3, 2, 7, 4), + covering the [3, 7] x [2, 4] region of the continuous coordinate system + which looks like this: + + .. code:: none + + O--------> x + | + | A---B + | | | + | D---C + | + v y + + Note that each capital letter represents one 0-dimensional geometric point + instead of a 'square pixel' here. + + In the example above, using (x, y) to represent a point we have: + + .. math:: + + O = (0, 0), A = (3, 2), B = (7, 2), C = (7, 4), D = (3, 4) + + We name vector AB = vector DC as the width vector in box's local coordinate system, and + vector AD = vector BC as the height vector in box's local coordinate system. Initially, + when angle = 0 degree, they're aligned with the positive directions of x-axis and y-axis + in the image space, respectively. + + For better illustration, we denote the center of the box as E, + + .. code:: none + + O--------> x + | + | A---B + | | E | + | D---C + | + v y + + where the center E = ((3+7)/2, (2+4)/2) = (5, 3). + + Also, + + .. math:: + + width = |AB| = |CD| = 7 - 3 = 4, + height = |AD| = |BC| = 4 - 2 = 2. + + Therefore, the corresponding representation for the same shape in rotated box in + (x_center, y_center, width, height, angle) format is: + + (5, 3, 4, 2, 0), + + Now, let's consider (5, 3, 4, 2, 90), which is rotated by 90 degrees + CCW (counter-clockwise) by definition. It looks like this: + + .. code:: none + + O--------> x + | B-C + | | | + | |E| + | | | + | A-D + v y + + The center E is still located at the same point (5, 3), while the vertices + ABCD are rotated by 90 degrees CCW with regard to E: + A = (4, 5), B = (4, 1), C = (6, 1), D = (6, 5) + + Here, 90 degrees can be seen as the CCW angle to rotate from y-axis to + vector AD or vector BC (the top->down height vector in box's local coordinate system), + or the CCW angle to rotate from x-axis to vector AB or vector DC (the left->right + width vector in box's local coordinate system). + + .. math:: + + width = |AB| = |CD| = 5 - 1 = 4, + height = |AD| = |BC| = 6 - 4 = 2. + + Next, how about (5, 3, 4, 2, -90), which is rotated by 90 degrees CW (clockwise) + by definition? It looks like this: + + .. code:: none + + O--------> x + | D-A + | | | + | |E| + | | | + | C-B + v y + + The center E is still located at the same point (5, 3), while the vertices + ABCD are rotated by 90 degrees CW with regard to E: + A = (6, 1), B = (6, 5), C = (4, 5), D = (4, 1) + + .. math:: + + width = |AB| = |CD| = 5 - 1 = 4, + height = |AD| = |BC| = 6 - 4 = 2. + + This covers exactly the same region as (5, 3, 4, 2, 90) does, and their IoU + will be 1. However, these two will generate different RoI Pooling results and + should not be treated as an identical box. + + On the other hand, it's easy to see that (X, Y, W, H, A) is identical to + (X, Y, W, H, A+360N), for any integer N. For example (5, 3, 4, 2, 270) would be + identical to (5, 3, 4, 2, -90), because rotating the shape 270 degrees CCW is + equivalent to rotating the same shape 90 degrees CW. + + We could rotate further to get (5, 3, 4, 2, 180), or (5, 3, 4, 2, -180): + + .. code:: none + + O--------> x + | + | C---D + | | E | + | B---A + | + v y + + .. math:: + + A = (7, 4), B = (3, 4), C = (3, 2), D = (7, 2), + + width = |AB| = |CD| = 7 - 3 = 4, + height = |AD| = |BC| = 4 - 2 = 2. + + Finally, this is a very inaccurate (heavily quantized) illustration of + how (5, 3, 4, 2, 60) looks like in case anyone wonders: + + .. code:: none + + O--------> x + | B\ + | / C + | /E / + | A / + | `D + v y + + It's still a rectangle with center of (5, 3), width of 4 and height of 2, + but its angle (and thus orientation) is somewhere between + (5, 3, 4, 2, 0) and (5, 3, 4, 2, 90). + """ + device = tensor.device if isinstance(tensor, torch.Tensor) else torch.device("cpu") + tensor = torch.as_tensor(tensor, dtype=torch.float32, device=device) + if tensor.numel() == 0: + # Use reshape, so we don't end up creating a new tensor that does not depend on + # the inputs (and consequently confuses jit) + tensor = tensor.reshape((0, 5)).to(dtype=torch.float32, device=device) + assert tensor.dim() == 2 and tensor.size(-1) == 5, tensor.size() + + self.tensor = tensor + + def clone(self) -> "RotatedBoxes": + """ + Clone the RotatedBoxes. + + Returns: + RotatedBoxes + """ + return RotatedBoxes(self.tensor.clone()) + + def to(self, device: torch.device): + # Boxes are assumed float32 and does not support to(dtype) + return RotatedBoxes(self.tensor.to(device=device)) + + def area(self) -> torch.Tensor: + """ + Computes the area of all the boxes. + + Returns: + torch.Tensor: a vector with areas of each box. + """ + box = self.tensor + area = box[:, 2] * box[:, 3] + return area + + def normalize_angles(self) -> None: + """ + Restrict angles to the range of [-180, 180) degrees + """ + self.tensor[:, 4] = (self.tensor[:, 4] + 180.0) % 360.0 - 180.0 + + def clip(self, box_size: Tuple[int, int], clip_angle_threshold: float = 1.0) -> None: + """ + Clip (in place) the boxes by limiting x coordinates to the range [0, width] + and y coordinates to the range [0, height]. + + For RRPN: + Only clip boxes that are almost horizontal with a tolerance of + clip_angle_threshold to maintain backward compatibility. + + Rotated boxes beyond this threshold are not clipped for two reasons: + + 1. There are potentially multiple ways to clip a rotated box to make it + fit within the image. + 2. It's tricky to make the entire rectangular box fit within the image + and still be able to not leave out pixels of interest. + + Therefore we rely on ops like RoIAlignRotated to safely handle this. + + Args: + box_size (height, width): The clipping box's size. + clip_angle_threshold: + Iff. abs(normalized(angle)) <= clip_angle_threshold (in degrees), + we do the clipping as horizontal boxes. + """ + h, w = box_size + + # normalize angles to be within (-180, 180] degrees + self.normalize_angles() + + idx = torch.where(torch.abs(self.tensor[:, 4]) <= clip_angle_threshold)[0] + + # convert to (x1, y1, x2, y2) + x1 = self.tensor[idx, 0] - self.tensor[idx, 2] / 2.0 + y1 = self.tensor[idx, 1] - self.tensor[idx, 3] / 2.0 + x2 = self.tensor[idx, 0] + self.tensor[idx, 2] / 2.0 + y2 = self.tensor[idx, 1] + self.tensor[idx, 3] / 2.0 + + # clip + x1.clamp_(min=0, max=w) + y1.clamp_(min=0, max=h) + x2.clamp_(min=0, max=w) + y2.clamp_(min=0, max=h) + + # convert back to (xc, yc, w, h) + self.tensor[idx, 0] = (x1 + x2) / 2.0 + self.tensor[idx, 1] = (y1 + y2) / 2.0 + # make sure widths and heights do not increase due to numerical errors + self.tensor[idx, 2] = torch.min(self.tensor[idx, 2], x2 - x1) + self.tensor[idx, 3] = torch.min(self.tensor[idx, 3], y2 - y1) + + def nonempty(self, threshold: float = 0.0) -> torch.Tensor: + """ + Find boxes that are non-empty. + A box is considered empty, if either of its side is no larger than threshold. + + Returns: + Tensor: a binary vector which represents + whether each box is empty (False) or non-empty (True). + """ + box = self.tensor + widths = box[:, 2] + heights = box[:, 3] + keep = (widths > threshold) & (heights > threshold) + return keep + + def __getitem__(self, item) -> "RotatedBoxes": + """ + Returns: + RotatedBoxes: Create a new :class:`RotatedBoxes` by indexing. + + The following usage are allowed: + + 1. `new_boxes = boxes[3]`: return a `RotatedBoxes` which contains only one box. + 2. `new_boxes = boxes[2:10]`: return a slice of boxes. + 3. `new_boxes = boxes[vector]`, where vector is a torch.ByteTensor + with `length = len(boxes)`. Nonzero elements in the vector will be selected. + + Note that the returned RotatedBoxes might share storage with this RotatedBoxes, + subject to Pytorch's indexing semantics. + """ + if isinstance(item, int): + return RotatedBoxes(self.tensor[item].view(1, -1)) + b = self.tensor[item] + assert b.dim() == 2, "Indexing on RotatedBoxes with {} failed to return a matrix!".format( + item + ) + return RotatedBoxes(b) + + def __len__(self) -> int: + return self.tensor.shape[0] + + def __repr__(self) -> str: + return "RotatedBoxes(" + str(self.tensor) + ")" + + def inside_box(self, box_size: Tuple[int, int], boundary_threshold: int = 0) -> torch.Tensor: + """ + Args: + box_size (height, width): Size of the reference box covering + [0, width] x [0, height] + boundary_threshold (int): Boxes that extend beyond the reference box + boundary by more than boundary_threshold are considered "outside". + + For RRPN, it might not be necessary to call this function since it's common + for rotated box to extend to outside of the image boundaries + (the clip function only clips the near-horizontal boxes) + + Returns: + a binary vector, indicating whether each box is inside the reference box. + """ + height, width = box_size + + cnt_x = self.tensor[..., 0] + cnt_y = self.tensor[..., 1] + half_w = self.tensor[..., 2] / 2.0 + half_h = self.tensor[..., 3] / 2.0 + a = self.tensor[..., 4] + c = torch.abs(torch.cos(a * math.pi / 180.0)) + s = torch.abs(torch.sin(a * math.pi / 180.0)) + # This basically computes the horizontal bounding rectangle of the rotated box + max_rect_dx = c * half_w + s * half_h + max_rect_dy = c * half_h + s * half_w + + inds_inside = ( + (cnt_x - max_rect_dx >= -boundary_threshold) + & (cnt_y - max_rect_dy >= -boundary_threshold) + & (cnt_x + max_rect_dx < width + boundary_threshold) + & (cnt_y + max_rect_dy < height + boundary_threshold) + ) + + return inds_inside + + def get_centers(self) -> torch.Tensor: + """ + Returns: + The box centers in a Nx2 array of (x, y). + """ + return self.tensor[:, :2] + + def scale(self, scale_x: float, scale_y: float) -> None: + """ + Scale the rotated box with horizontal and vertical scaling factors + Note: when scale_factor_x != scale_factor_y, + the rotated box does not preserve the rectangular shape when the angle + is not a multiple of 90 degrees under resize transformation. + Instead, the shape is a parallelogram (that has skew) + Here we make an approximation by fitting a rotated rectangle to the parallelogram. + """ + self.tensor[:, 0] *= scale_x + self.tensor[:, 1] *= scale_y + theta = self.tensor[:, 4] * math.pi / 180.0 + c = torch.cos(theta) + s = torch.sin(theta) + + # In image space, y is top->down and x is left->right + # Consider the local coordintate system for the rotated box, + # where the box center is located at (0, 0), and the four vertices ABCD are + # A(-w / 2, -h / 2), B(w / 2, -h / 2), C(w / 2, h / 2), D(-w / 2, h / 2) + # the midpoint of the left edge AD of the rotated box E is: + # E = (A+D)/2 = (-w / 2, 0) + # the midpoint of the top edge AB of the rotated box F is: + # F(0, -h / 2) + # To get the old coordinates in the global system, apply the rotation transformation + # (Note: the right-handed coordinate system for image space is yOx): + # (old_x, old_y) = (s * y + c * x, c * y - s * x) + # E(old) = (s * 0 + c * (-w/2), c * 0 - s * (-w/2)) = (-c * w / 2, s * w / 2) + # F(old) = (s * (-h / 2) + c * 0, c * (-h / 2) - s * 0) = (-s * h / 2, -c * h / 2) + # After applying the scaling factor (sfx, sfy): + # E(new) = (-sfx * c * w / 2, sfy * s * w / 2) + # F(new) = (-sfx * s * h / 2, -sfy * c * h / 2) + # The new width after scaling tranformation becomes: + + # w(new) = |E(new) - O| * 2 + # = sqrt[(sfx * c * w / 2)^2 + (sfy * s * w / 2)^2] * 2 + # = sqrt[(sfx * c)^2 + (sfy * s)^2] * w + # i.e., scale_factor_w = sqrt[(sfx * c)^2 + (sfy * s)^2] + # + # For example, + # when angle = 0 or 180, |c| = 1, s = 0, scale_factor_w == scale_factor_x; + # when |angle| = 90, c = 0, |s| = 1, scale_factor_w == scale_factor_y + self.tensor[:, 2] *= torch.sqrt((scale_x * c) ** 2 + (scale_y * s) ** 2) + + # h(new) = |F(new) - O| * 2 + # = sqrt[(sfx * s * h / 2)^2 + (sfy * c * h / 2)^2] * 2 + # = sqrt[(sfx * s)^2 + (sfy * c)^2] * h + # i.e., scale_factor_h = sqrt[(sfx * s)^2 + (sfy * c)^2] + # + # For example, + # when angle = 0 or 180, |c| = 1, s = 0, scale_factor_h == scale_factor_y; + # when |angle| = 90, c = 0, |s| = 1, scale_factor_h == scale_factor_x + self.tensor[:, 3] *= torch.sqrt((scale_x * s) ** 2 + (scale_y * c) ** 2) + + # The angle is the rotation angle from y-axis in image space to the height + # vector (top->down in the box's local coordinate system) of the box in CCW. + # + # angle(new) = angle_yOx(O - F(new)) + # = angle_yOx( (sfx * s * h / 2, sfy * c * h / 2) ) + # = atan2(sfx * s * h / 2, sfy * c * h / 2) + # = atan2(sfx * s, sfy * c) + # + # For example, + # when sfx == sfy, angle(new) == atan2(s, c) == angle(old) + self.tensor[:, 4] = torch.atan2(scale_x * s, scale_y * c) * 180 / math.pi + + @classmethod + def cat(cls, boxes_list: List["RotatedBoxes"]) -> "RotatedBoxes": + """ + Concatenates a list of RotatedBoxes into a single RotatedBoxes + + Arguments: + boxes_list (list[RotatedBoxes]) + + Returns: + RotatedBoxes: the concatenated RotatedBoxes + """ + assert isinstance(boxes_list, (list, tuple)) + if len(boxes_list) == 0: + return cls(torch.empty(0)) + assert all([isinstance(box, RotatedBoxes) for box in boxes_list]) + + # use torch.cat (v.s. layers.cat) so the returned boxes never share storage with input + cat_boxes = cls(torch.cat([b.tensor for b in boxes_list], dim=0)) + return cat_boxes + + @property + def device(self) -> torch.device: + return self.tensor.device + + @torch.jit.unused + def __iter__(self): + """ + Yield a box as a Tensor of shape (5,) at a time. + """ + yield from self.tensor + + +def pairwise_iou(boxes1: RotatedBoxes, boxes2: RotatedBoxes) -> None: + """ + Given two lists of rotated boxes of size N and M, + compute the IoU (intersection over union) + between **all** N x M pairs of boxes. + The box order must be (x_center, y_center, width, height, angle). + + Args: + boxes1, boxes2 (RotatedBoxes): + two `RotatedBoxes`. Contains N & M rotated boxes, respectively. + + Returns: + Tensor: IoU, sized [N,M]. + """ + + return pairwise_iou_rotated(boxes1.tensor, boxes2.tensor) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/tracking/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/__init__.py new file mode 100644 index 0000000..aaf477d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/__init__.py @@ -0,0 +1,15 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from .base_tracker import ( # noqa + BaseTracker, + build_tracker_head, + TRACKER_HEADS_REGISTRY, +) +from .bbox_iou_tracker import BBoxIOUTracker # noqa +from .hungarian_tracker import BaseHungarianTracker # noqa +from .iou_weighted_hungarian_bbox_iou_tracker import ( # noqa + IOUWeightedHungarianBBoxIOUTracker, +) +from .utils import create_prediction_pairs # noqa +from .vanilla_hungarian_bbox_iou_tracker import VanillaHungarianBBoxIOUTracker # noqa + +__all__ = [k for k in globals().keys() if not k.startswith("_")] diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/tracking/base_tracker.py b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/base_tracker.py new file mode 100644 index 0000000..e2dea77 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/base_tracker.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python3 +# Copyright 2004-present Facebook. All Rights Reserved. +from ..structures import Instances +from detectron2.utils.registry import Registry +from ..config.config import CfgNode as CfgNode_ +from detectron2.config import configurable + + +TRACKER_HEADS_REGISTRY = Registry("TRACKER_HEADS") +TRACKER_HEADS_REGISTRY.__doc__ = """ +Registry for tracking classes. +""" + + +class BaseTracker(object): + """ + A parent class for all trackers + """ + + @configurable + def __init__(self, **kwargs): + self._prev_instances = None # (D2)instances for previous frame + self._matched_idx = set() # indices in prev_instances found matching + self._matched_ID = set() # idendities in prev_instances found matching + self._untracked_prev_idx = set() # indices in prev_instances not found matching + self._id_count = 0 # used to assign new id + + @classmethod + def from_config(cls, cfg: CfgNode_): + raise NotImplementedError("Calling BaseTracker::from_config") + + def update(self, predictions: Instances) -> Instances: + """ + Args: + predictions: D2 Instances for predictions of the current frame + Return: + D2 Instances for predictions of the current frame with ID assigned + + _prev_instances and instances will have the following fields: + .pred_boxes (shape=[N, 4]) + .scores (shape=[N,]) + .pred_classes (shape=[N,]) + .pred_keypoints (shape=[N, M, 3], Optional) + .pred_masks (shape=List[2D_MASK], Optional) 2D_MASK: shape=[H, W] + .ID (shape=[N,]) + + N: # of detected bboxes + H and W: height and width of 2D mask + """ + raise NotImplementedError("Calling BaseTracker::update") + + +def build_tracker_head(cfg: CfgNode_) -> BaseTracker: + """ + Build a tracker head from `cfg.TRACKER_HEADS.TRACKER_NAME`. + + Args: + cfg: D2 CfgNode, config file with tracker information + Return: + tracker object + """ + name = cfg.TRACKER_HEADS.TRACKER_NAME + tracker_class = TRACKER_HEADS_REGISTRY.get(name) + return tracker_class(cfg) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/tracking/bbox_iou_tracker.py b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/bbox_iou_tracker.py new file mode 100644 index 0000000..d7eb3aa --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/bbox_iou_tracker.py @@ -0,0 +1,260 @@ +#!/usr/bin/env python3 +# Copyright 2004-present Facebook. All Rights Reserved. +import copy +from typing import List + +import numpy as np +import torch +from detectron2.config import configurable +from detectron2.structures import Boxes, Instances +from detectron2.structures.boxes import pairwise_iou + +from ..config.config import CfgNode as CfgNode_ +from .base_tracker import BaseTracker, TRACKER_HEADS_REGISTRY + + +@TRACKER_HEADS_REGISTRY.register() +class BBoxIOUTracker(BaseTracker): + """ + A bounding box tracker to assign ID based on IoU between current and previous instances + """ + @configurable + def __init__( + self, + *, + video_height: int, + video_width: int, + max_num_instances: int = 200, + max_lost_frame_count: int = 0, + min_box_rel_dim: float = 0.02, + min_instance_period: int = 1, + track_iou_threshold: float = 0.5, + **kwargs + ): + """ + Args: + video_height: height the video frame + video_width: width of the video frame + max_num_instances: maximum number of id allowed to be tracked + max_lost_frame_count: maximum number of frame an id can lost tracking + exceed this number, an id is considered as lost + forever + min_box_rel_dim: a percentage, smaller than this dimension, a bbox is + removed from tracking + min_instance_period: an instance will be shown after this number of period + since its first showing up in the video + track_iou_threshold: iou threshold, below this number a bbox pair is removed + from tracking + """ + super().__init__(**kwargs) + self._video_height = video_height + self._video_width = video_width + self._max_num_instances = max_num_instances + self._max_lost_frame_count = max_lost_frame_count + self._min_box_rel_dim = min_box_rel_dim + self._min_instance_period = min_instance_period + self._track_iou_threshold = track_iou_threshold + + @classmethod + def from_config(cls, cfg: CfgNode_): + """ + Old style initialization using CfgNode + + Args: + cfg: D2 CfgNode, config file + Return: + dictionary storing arguments for __init__ method + """ + assert "VIDEO_HEIGHT" in cfg.TRACKER_HEADS + assert "VIDEO_WIDTH" in cfg.TRACKER_HEADS + video_height = cfg.TRACKER_HEADS.get("VIDEO_HEIGHT") + video_width = cfg.TRACKER_HEADS.get("VIDEO_WIDTH") + max_num_instances = cfg.TRACKER_HEADS.get("MAX_NUM_INSTANCES", 200) + max_lost_frame_count = cfg.TRACKER_HEADS.get("MAX_LOST_FRAME_COUNT", 0) + min_box_rel_dim = cfg.TRACKER_HEADS.get("MIN_BOX_REL_DIM", 0.02) + min_instance_period = cfg.TRACKER_HEADS.get("MIN_INSTANCE_PERIOD", 1) + track_iou_threshold = cfg.TRACKER_HEADS.get("TRACK_IOU_THRESHOLD", 0.5) + return { + "_target_": "detectron2.tracking.bbox_iou_tracker.BBoxIOUTracker", + "video_height": video_height, + "video_width": video_width, + "max_num_instances": max_num_instances, + "max_lost_frame_count": max_lost_frame_count, + "min_box_rel_dim": min_box_rel_dim, + "min_instance_period": min_instance_period, + "track_iou_threshold": track_iou_threshold + } + + def update(self, instances: Instances) -> Instances: + """ + See BaseTracker description + """ + if instances.has("pred_keypoints"): + raise NotImplementedError("Need to add support for keypoints") + instances = self._initialize_extra_fields(instances) + if self._prev_instances is not None: + # calculate IoU of all bbox pairs + iou_all = pairwise_iou( + boxes1=instances.pred_boxes, + boxes2=self._prev_instances.pred_boxes, + ) + # sort IoU in descending order + bbox_pairs = self._create_prediction_pairs(instances, iou_all) + # assign previous ID to current bbox if IoU > track_iou_threshold + self._reset_fields() + for bbox_pair in bbox_pairs: + idx = bbox_pair["idx"] + prev_id = bbox_pair["prev_id"] + if idx in self._matched_idx \ + or prev_id in self._matched_ID \ + or bbox_pair["IoU"] < self._track_iou_threshold: + continue + instances.ID[idx] = prev_id + instances.ID_period[idx] = bbox_pair["prev_period"] + 1 + instances.lost_frame_count[idx] = 0 + self._matched_idx.add(idx) + self._matched_ID.add(prev_id) + self._untracked_prev_idx.remove(bbox_pair["prev_idx"]) + instances = self._assign_new_id(instances) + instances = self._merge_untracked_instances(instances) + self._prev_instances = copy.deepcopy(instances) + return instances + + def _create_prediction_pairs( + self, instances: Instances, iou_all: np.ndarray + ) -> List: + """ + For all instances in previous and current frames, create pairs. For each + pair, store index of the instance in current frame predcitions, index in + previous predictions, ID in previous predictions, IoU of the bboxes in this + pair, period in previous predictions. + + Args: + instances: D2 Instances, for predictions of the current frame + iou_all: IoU for all bboxes pairs + Return: + A list of IoU for all pairs + """ + bbox_pairs = [] + for i in range(len(instances)): + for j in range(len(self._prev_instances)): + bbox_pairs.append( + { + "idx": i, + "prev_idx": j, + "prev_id": self._prev_instances.ID[j], + "IoU": iou_all[i, j], + "prev_period": self._prev_instances.ID_period[j], + } + ) + return bbox_pairs + + def _initialize_extra_fields(self, instances: Instances) -> Instances: + """ + If input instances don't have ID, ID_period, lost_frame_count fields, + this method is used to initialize these fields. + + Args: + instances: D2 Instances, for predictions of the current frame + Return: + D2 Instances with extra fields added + """ + if not instances.has("ID"): + instances.set("ID", [None] * len(instances)) + if not instances.has("ID_period"): + instances.set("ID_period", [None] * len(instances)) + if not instances.has("lost_frame_count"): + instances.set("lost_frame_count", [None] * len(instances)) + if self._prev_instances is None: + instances.ID = list(range(len(instances))) + self._id_count += len(instances) + instances.ID_period = [1] * len(instances) + instances.lost_frame_count = [0] * len(instances) + return instances + + def _reset_fields(self): + """ + Before each uodate call, reset fields first + """ + self._matched_idx = set() + self._matched_ID = set() + self._untracked_prev_idx = set(range(len(self._prev_instances))) + + def _assign_new_id(self, instances: Instances) -> Instances: + """ + For each untracked instance, assign a new id + + Args: + instances: D2 Instances, for predictions of the current frame + Return: + D2 Instances with new ID assigned + """ + untracked_idx = set(range(len(instances))).difference(self._matched_idx) + for idx in untracked_idx: + instances.ID[idx] = self._id_count + self._id_count += 1 + instances.ID_period[idx] = 1 + instances.lost_frame_count[idx] = 0 + return instances + + def _merge_untracked_instances(self, instances: Instances) -> Instances: + """ + For untracked previous instances, under certain condition, still keep them + in tracking and merge with the current instances. + + Args: + instances: D2 Instances, for predictions of the current frame + Return: + D2 Instances merging current instances and instances from previous + frame decided to keep tracking + """ + untracked_instances = Instances( + image_size=instances.image_size, + pred_boxes=[], + pred_masks=[], + pred_classes=[], + scores=[], + ID=[], + ID_period=[], + lost_frame_count=[], + ) + prev_bboxes = list(self._prev_instances.pred_boxes) + prev_classes = list(self._prev_instances.pred_classes) + prev_scores = list(self._prev_instances.scores) + prev_ID_period = self._prev_instances.ID_period + if instances.has("pred_masks"): + prev_masks = list(self._prev_instances.pred_masks) + for idx in self._untracked_prev_idx: + x_left, y_top, x_right, y_bot = prev_bboxes[idx] + if ( + (1.0 * (x_right - x_left) / self._video_width < self._min_box_rel_dim) + or (1.0 * (y_bot - y_top) / self._video_height < self._min_box_rel_dim) + or self._prev_instances.lost_frame_count[idx] >= self._max_lost_frame_count + or prev_ID_period[idx] <= self._min_instance_period + ): + continue + untracked_instances.pred_boxes.append(list(prev_bboxes[idx].numpy())) + untracked_instances.pred_classes.append(int(prev_classes[idx])) + untracked_instances.scores.append(float(prev_scores[idx])) + untracked_instances.ID.append(self._prev_instances.ID[idx]) + untracked_instances.ID_period.append(self._prev_instances.ID_period[idx]) + untracked_instances.lost_frame_count.append( + self._prev_instances.lost_frame_count[idx] + 1 + ) + if instances.has("pred_masks"): + untracked_instances.pred_masks.append(prev_masks[idx].numpy().astype(np.uint8)) + + untracked_instances.pred_boxes = Boxes(torch.FloatTensor(untracked_instances.pred_boxes)) + untracked_instances.pred_classes = torch.IntTensor(untracked_instances.pred_classes) + untracked_instances.scores = torch.FloatTensor(untracked_instances.scores) + if instances.has("pred_masks"): + untracked_instances.pred_masks = torch.IntTensor(untracked_instances.pred_masks) + else: + untracked_instances.remove("pred_masks") + + return Instances.cat( + [ + instances, + untracked_instances, + ] + ) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/tracking/hungarian_tracker.py b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/hungarian_tracker.py new file mode 100644 index 0000000..a080157 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/hungarian_tracker.py @@ -0,0 +1,176 @@ +#!/usr/bin/env python3 +# Copyright 2004-present Facebook. All Rights Reserved. +import copy + +import numpy as np +import torch +from detectron2.structures import Boxes, Instances + +from .base_tracker import BaseTracker +from scipy.optimize import linear_sum_assignment +from ..config.config import CfgNode as CfgNode_ +from typing import Dict +from detectron2.config import configurable + + +class BaseHungarianTracker(BaseTracker): + """ + A base class for all Hungarian trackers + """ + + @configurable + def __init__( + self, + video_height: int, + video_width: int, + max_num_instances: int = 200, + max_lost_frame_count: int = 0, + min_box_rel_dim: float = 0.02, + min_instance_period: int = 1, + **kwargs + ): + """ + Args: + video_height: height the video frame + video_width: width of the video frame + max_num_instances: maximum number of id allowed to be tracked + max_lost_frame_count: maximum number of frame an id can lost tracking + exceed this number, an id is considered as lost + forever + min_box_rel_dim: a percentage, smaller than this dimension, a bbox is + removed from tracking + min_instance_period: an instance will be shown after this number of period + since its first showing up in the video + """ + super().__init__(**kwargs) + self._video_height = video_height + self._video_width = video_width + self._max_num_instances = max_num_instances + self._max_lost_frame_count = max_lost_frame_count + self._min_box_rel_dim = min_box_rel_dim + self._min_instance_period = min_instance_period + + @classmethod + def from_config(cls, cfg: CfgNode_) -> Dict: + raise NotImplementedError("Calling HungarianTracker::from_config") + + def build_cost_matrix(self, instances: Instances, prev_instances: Instances) -> np.ndarray: + raise NotImplementedError("Calling HungarianTracker::build_matrix") + + def update(self, instances: Instances) -> Instances: + if instances.has("pred_keypoints"): + raise NotImplementedError("Need to add support for keypoints") + instances = self._initialize_extra_fields(instances) + if self._prev_instances is not None: + self._untracked_prev_idx = set(range(len(self._prev_instances))) + cost_matrix = self.build_cost_matrix(instances, self._prev_instances) + matched_idx, matched_prev_idx = linear_sum_assignment(cost_matrix) + instances = self._process_matched_idx(instances, matched_idx, matched_prev_idx) + instances = self._process_unmatched_idx(instances, matched_idx) + instances = self._process_unmatched_prev_idx(instances, matched_prev_idx) + self._prev_instances = copy.deepcopy(instances) + return instances + + def _initialize_extra_fields(self, instances: Instances) -> Instances: + """ + If input instances don't have ID, ID_period, lost_frame_count fields, + this method is used to initialize these fields. + + Args: + instances: D2 Instances, for predictions of the current frame + Return: + D2 Instances with extra fields added + """ + if not instances.has("ID"): + instances.set("ID", [None] * len(instances)) + if not instances.has("ID_period"): + instances.set("ID_period", [None] * len(instances)) + if not instances.has("lost_frame_count"): + instances.set("lost_frame_count", [None] * len(instances)) + if self._prev_instances is None: + instances.ID = list(range(len(instances))) + self._id_count += len(instances) + instances.ID_period = [1] * len(instances) + instances.lost_frame_count = [0] * len(instances) + return instances + + def _process_matched_idx( + self, + instances: Instances, + matched_idx: np.ndarray, + matched_prev_idx: np.ndarray + ) -> Instances: + assert matched_idx.size == matched_prev_idx.size + for i in range(matched_idx.size): + instances.ID[matched_idx[i]] = self._prev_instances.ID[matched_prev_idx[i]] + instances.ID_period[matched_idx[i]] = \ + self._prev_instances.ID_period[matched_prev_idx[i]] + 1 + instances.lost_frame_count[matched_idx[i]] = 0 + return instances + + def _process_unmatched_idx(self, instances: Instances, matched_idx: np.ndarray) -> Instances: + untracked_idx = set(range(len(instances))).difference(set(matched_idx)) + for idx in untracked_idx: + instances.ID[idx] = self._id_count + self._id_count += 1 + instances.ID_period[idx] = 1 + instances.lost_frame_count[idx] = 0 + return instances + + def _process_unmatched_prev_idx( + self, + instances: Instances, + matched_prev_idx: + np.ndarray + ) -> Instances: + untracked_instances = Instances( + image_size=instances.image_size, + pred_boxes=[], + pred_masks=[], + pred_classes=[], + scores=[], + ID=[], + ID_period=[], + lost_frame_count=[], + ) + prev_bboxes = list(self._prev_instances.pred_boxes) + prev_classes = list(self._prev_instances.pred_classes) + prev_scores = list(self._prev_instances.scores) + prev_ID_period = self._prev_instances.ID_period + if instances.has("pred_masks"): + prev_masks = list(self._prev_instances.pred_masks) + untracked_prev_idx = set(range(len(self._prev_instances))).difference(set(matched_prev_idx)) + for idx in untracked_prev_idx: + x_left, y_top, x_right, y_bot = prev_bboxes[idx] + if ( + (1.0 * (x_right - x_left) / self._video_width < self._min_box_rel_dim) + or (1.0 * (y_bot - y_top) / self._video_height < self._min_box_rel_dim) + or self._prev_instances.lost_frame_count[idx] >= self._max_lost_frame_count + or prev_ID_period[idx] <= self._min_instance_period + ): + continue + untracked_instances.pred_boxes.append(list(prev_bboxes[idx].numpy())) + untracked_instances.pred_classes.append(int(prev_classes[idx])) + untracked_instances.scores.append(float(prev_scores[idx])) + untracked_instances.ID.append(self._prev_instances.ID[idx]) + untracked_instances.ID_period.append(self._prev_instances.ID_period[idx]) + untracked_instances.lost_frame_count.append( + self._prev_instances.lost_frame_count[idx] + 1 + ) + if instances.has("pred_masks"): + untracked_instances.pred_masks.append(prev_masks[idx].numpy().astype(np.uint8)) + + untracked_instances.pred_boxes = Boxes(torch.FloatTensor(untracked_instances.pred_boxes)) + untracked_instances.pred_classes = torch.IntTensor(untracked_instances.pred_classes) + untracked_instances.scores = torch.FloatTensor(untracked_instances.scores) + if instances.has("pred_masks"): + untracked_instances.pred_masks = torch.IntTensor(untracked_instances.pred_masks) + else: + untracked_instances.remove("pred_masks") + + return Instances.cat( + [ + instances, + untracked_instances, + ] + ) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/tracking/iou_weighted_hungarian_bbox_iou_tracker.py b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/iou_weighted_hungarian_bbox_iou_tracker.py new file mode 100644 index 0000000..94bf342 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/iou_weighted_hungarian_bbox_iou_tracker.py @@ -0,0 +1,101 @@ +#!/usr/bin/env python3 +# Copyright 2004-present Facebook. All Rights Reserved. + +from typing import List + +import numpy as np + +from .base_tracker import TRACKER_HEADS_REGISTRY +from .vanilla_hungarian_bbox_iou_tracker import VanillaHungarianBBoxIOUTracker +from detectron2.config import configurable, CfgNode as CfgNode_ + + +@TRACKER_HEADS_REGISTRY.register() +class IOUWeightedHungarianBBoxIOUTracker(VanillaHungarianBBoxIOUTracker): + """ + A tracker using IoU as weight in Hungarian algorithm, also known + as Munkres or Kuhn-Munkres algorithm + """ + + @configurable + def __init__( + self, + *, + video_height: int, + video_width: int, + max_num_instances: int = 200, + max_lost_frame_count: int = 0, + min_box_rel_dim: float = 0.02, + min_instance_period: int = 1, + track_iou_threshold: float = 0.5, + **kwargs + ): + """ + Args: + video_height: height the video frame + video_width: width of the video frame + max_num_instances: maximum number of id allowed to be tracked + max_lost_frame_count: maximum number of frame an id can lost tracking + exceed this number, an id is considered as lost + forever + min_box_rel_dim: a percentage, smaller than this dimension, a bbox is + removed from tracking + min_instance_period: an instance will be shown after this number of period + since its first showing up in the video + track_iou_threshold: iou threshold, below this number a bbox pair is removed + from tracking + """ + super().__init__( + video_height=video_height, + video_width=video_width, + max_num_instances=max_num_instances, + max_lost_frame_count=max_lost_frame_count, + min_box_rel_dim=min_box_rel_dim, + min_instance_period=min_instance_period, + track_iou_threshold=track_iou_threshold + ) + + @classmethod + def from_config(cls, cfg: CfgNode_): + """ + Old style initialization using CfgNode + + Args: + cfg: D2 CfgNode, config file + Return: + dictionary storing arguments for __init__ method + """ + assert "VIDEO_HEIGHT" in cfg.TRACKER_HEADS + assert "VIDEO_WIDTH" in cfg.TRACKER_HEADS + video_height = cfg.TRACKER_HEADS.get("VIDEO_HEIGHT") + video_width = cfg.TRACKER_HEADS.get("VIDEO_WIDTH") + max_num_instances = cfg.TRACKER_HEADS.get("MAX_NUM_INSTANCES", 200) + max_lost_frame_count = cfg.TRACKER_HEADS.get("MAX_LOST_FRAME_COUNT", 0) + min_box_rel_dim = cfg.TRACKER_HEADS.get("MIN_BOX_REL_DIM", 0.02) + min_instance_period = cfg.TRACKER_HEADS.get("MIN_INSTANCE_PERIOD", 1) + track_iou_threshold = cfg.TRACKER_HEADS.get("TRACK_IOU_THRESHOLD", 0.5) + return { + "_target_": "detectron2.tracking.iou_weighted_hungarian_bbox_iou_tracker.IOUWeightedHungarianBBoxIOUTracker", # noqa + "video_height": video_height, + "video_width": video_width, + "max_num_instances": max_num_instances, + "max_lost_frame_count": max_lost_frame_count, + "min_box_rel_dim": min_box_rel_dim, + "min_instance_period": min_instance_period, + "track_iou_threshold": track_iou_threshold + } + + def assign_cost_matrix_values(self, cost_matrix: np.ndarray, bbox_pairs: List) -> np.ndarray: + """ + Based on IoU for each pair of bbox, assign the associated value in cost matrix + + Args: + cost_matrix: np.ndarray, initialized 2D array with target dimensions + bbox_pairs: list of bbox pair, in each pair, iou value is stored + Return: + np.ndarray, cost_matrix with assigned values + """ + for pair in bbox_pairs: + # assign (-1 * IoU) for above threshold pairs, algorithms will minimize cost + cost_matrix[pair["idx"]][pair["prev_idx"]] = -1 * pair["IoU"] + return cost_matrix diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/tracking/utils.py b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/utils.py new file mode 100644 index 0000000..7dbcf2a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/utils.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python3 +from detectron2.structures import Instances +import numpy as np +from typing import List + + +def create_prediction_pairs( + instances: Instances, + prev_instances: Instances, + iou_all: np.ndarray, + threshold: float = 0.5, +) -> List: + """ + Args: + instances: predictions from current frame + prev_instances: predictions from previous frame + iou_all: 2D numpy array containing iou for each bbox pair + threshold: below the threshold, doesn't consider the pair of bbox is valid + Return: + List of bbox pairs + """ + bbox_pairs = [] + for i in range(len(instances)): + for j in range(len(prev_instances)): + if iou_all[i, j] < threshold: + continue + bbox_pairs.append( + { + "idx": i, + "prev_idx": j, + "prev_id": prev_instances.ID[j], + "IoU": iou_all[i, j], + "prev_period": prev_instances.ID_period[j], + } + ) + return bbox_pairs + + +LARGE_COST_VALUE = 100000 diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/tracking/vanilla_hungarian_bbox_iou_tracker.py b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/vanilla_hungarian_bbox_iou_tracker.py new file mode 100644 index 0000000..9c1bf5a --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/tracking/vanilla_hungarian_bbox_iou_tracker.py @@ -0,0 +1,131 @@ +#!/usr/bin/env python3 +# Copyright 2004-present Facebook. All Rights Reserved. + +from typing import List + +import numpy as np +from detectron2.structures import Instances +from detectron2.structures.boxes import pairwise_iou +from detectron2.tracking.utils import create_prediction_pairs, LARGE_COST_VALUE + +from .base_tracker import TRACKER_HEADS_REGISTRY +from .hungarian_tracker import BaseHungarianTracker +from detectron2.config import configurable, CfgNode as CfgNode_ + + +@TRACKER_HEADS_REGISTRY.register() +class VanillaHungarianBBoxIOUTracker(BaseHungarianTracker): + """ + Hungarian algo based tracker using bbox iou as metric + """ + + @configurable + def __init__( + self, + *, + video_height: int, + video_width: int, + max_num_instances: int = 200, + max_lost_frame_count: int = 0, + min_box_rel_dim: float = 0.02, + min_instance_period: int = 1, + track_iou_threshold: float = 0.5, + **kwargs + ): + """ + Args: + video_height: height the video frame + video_width: width of the video frame + max_num_instances: maximum number of id allowed to be tracked + max_lost_frame_count: maximum number of frame an id can lost tracking + exceed this number, an id is considered as lost + forever + min_box_rel_dim: a percentage, smaller than this dimension, a bbox is + removed from tracking + min_instance_period: an instance will be shown after this number of period + since its first showing up in the video + track_iou_threshold: iou threshold, below this number a bbox pair is removed + from tracking + """ + super().__init__( + video_height=video_height, + video_width=video_width, + max_num_instances=max_num_instances, + max_lost_frame_count=max_lost_frame_count, + min_box_rel_dim=min_box_rel_dim, + min_instance_period=min_instance_period + ) + self._track_iou_threshold = track_iou_threshold + + @classmethod + def from_config(cls, cfg: CfgNode_): + """ + Old style initialization using CfgNode + + Args: + cfg: D2 CfgNode, config file + Return: + dictionary storing arguments for __init__ method + """ + assert "VIDEO_HEIGHT" in cfg.TRACKER_HEADS + assert "VIDEO_WIDTH" in cfg.TRACKER_HEADS + video_height = cfg.TRACKER_HEADS.get("VIDEO_HEIGHT") + video_width = cfg.TRACKER_HEADS.get("VIDEO_WIDTH") + max_num_instances = cfg.TRACKER_HEADS.get("MAX_NUM_INSTANCES", 200) + max_lost_frame_count = cfg.TRACKER_HEADS.get("MAX_LOST_FRAME_COUNT", 0) + min_box_rel_dim = cfg.TRACKER_HEADS.get("MIN_BOX_REL_DIM", 0.02) + min_instance_period = cfg.TRACKER_HEADS.get("MIN_INSTANCE_PERIOD", 1) + track_iou_threshold = cfg.TRACKER_HEADS.get("TRACK_IOU_THRESHOLD", 0.5) + return { + "_target_": "detectron2.tracking.vanilla_hungarian_bbox_iou_tracker.VanillaHungarianBBoxIOUTracker", # noqa + "video_height": video_height, + "video_width": video_width, + "max_num_instances": max_num_instances, + "max_lost_frame_count": max_lost_frame_count, + "min_box_rel_dim": min_box_rel_dim, + "min_instance_period": min_instance_period, + "track_iou_threshold": track_iou_threshold + } + + def build_cost_matrix(self, instances: Instances, prev_instances: Instances) -> np.ndarray: + """ + Build the cost matrix for assignment problem + (https://en.wikipedia.org/wiki/Assignment_problem) + + Args: + instances: D2 Instances, for current frame predictions + prev_instances: D2 Instances, for previous frame predictions + + Return: + the cost matrix in numpy array + """ + assert instances is not None and prev_instances is not None + # calculate IoU of all bbox pairs + iou_all = pairwise_iou( + boxes1=instances.pred_boxes, + boxes2=self._prev_instances.pred_boxes, + ) + bbox_pairs = create_prediction_pairs( + instances, + self._prev_instances, + iou_all, + threshold=self._track_iou_threshold + ) + # assign large cost value to make sure pair below IoU threshold won't be matched + cost_matrix = np.full((len(instances), len(prev_instances)), LARGE_COST_VALUE) + return self.assign_cost_matrix_values(cost_matrix, bbox_pairs) + + def assign_cost_matrix_values(self, cost_matrix: np.ndarray, bbox_pairs: List) -> np.ndarray: + """ + Based on IoU for each pair of bbox, assign the associated value in cost matrix + + Args: + cost_matrix: np.ndarray, initialized 2D array with target dimensions + bbox_pairs: list of bbox pair, in each pair, iou value is stored + Return: + np.ndarray, cost_matrix with assigned values + """ + for pair in bbox_pairs: + # assign -1 for IoU above threshold pairs, algorithms will minimize cost + cost_matrix[pair["idx"]][pair["prev_idx"]] = -1 + return cost_matrix diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/README.md b/motion-gan-pipeline/preprocessing/third/detectron2/utils/README.md new file mode 100644 index 0000000..9765b24 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/README.md @@ -0,0 +1,5 @@ +# Utility functions + +This folder contain utility functions that are not used in the +core library, but are useful for building models or training +code using the config system. diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/__init__.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/__init__.py new file mode 100644 index 0000000..9020c2d --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/__init__.py @@ -0,0 +1 @@ +# Copyright (c) Facebook, Inc. and its affiliates. diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/analysis.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/analysis.py new file mode 100644 index 0000000..178da79 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/analysis.py @@ -0,0 +1,188 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +# -*- coding: utf-8 -*- + +import typing +from typing import Any, List +import fvcore +from fvcore.nn import activation_count, flop_count, parameter_count, parameter_count_table +from torch import nn + +from detectron2.export import TracingAdapter + +__all__ = [ + "activation_count_operators", + "flop_count_operators", + "parameter_count_table", + "parameter_count", + "FlopCountAnalysis", +] + +FLOPS_MODE = "flops" +ACTIVATIONS_MODE = "activations" + + +# Some extra ops to ignore from counting, including elementwise and reduction ops +_IGNORED_OPS = { + "aten::add", + "aten::add_", + "aten::argmax", + "aten::argsort", + "aten::batch_norm", + "aten::constant_pad_nd", + "aten::div", + "aten::div_", + "aten::exp", + "aten::log2", + "aten::max_pool2d", + "aten::meshgrid", + "aten::mul", + "aten::mul_", + "aten::neg", + "aten::nonzero_numpy", + "aten::reciprocal", + "aten::repeat_interleave", + "aten::rsub", + "aten::sigmoid", + "aten::sigmoid_", + "aten::softmax", + "aten::sort", + "aten::sqrt", + "aten::sub", + "torchvision::nms", # TODO estimate flop for nms +} + + +class FlopCountAnalysis(fvcore.nn.FlopCountAnalysis): + """ + Same as :class:`fvcore.nn.FlopCountAnalysis`, but supports detectron2 models. + """ + + def __init__(self, model, inputs): + """ + Args: + model (nn.Module): + inputs (Any): inputs of the given model. Does not have to be tuple of tensors. + """ + wrapper = TracingAdapter(model, inputs, allow_non_tensor=True) + super().__init__(wrapper, wrapper.flattened_inputs) + self.set_op_handle(**{k: None for k in _IGNORED_OPS}) + + +def flop_count_operators(model: nn.Module, inputs: list) -> typing.DefaultDict[str, float]: + """ + Implement operator-level flops counting using jit. + This is a wrapper of :func:`fvcore.nn.flop_count` and adds supports for standard + detection models in detectron2. + Please use :class:`FlopCountAnalysis` for more advanced functionalities. + + Note: + The function runs the input through the model to compute flops. + The flops of a detection model is often input-dependent, for example, + the flops of box & mask head depends on the number of proposals & + the number of detected objects. + Therefore, the flops counting using a single input may not accurately + reflect the computation cost of a model. It's recommended to average + across a number of inputs. + + Args: + model: a detectron2 model that takes `list[dict]` as input. + inputs (list[dict]): inputs to model, in detectron2's standard format. + Only "image" key will be used. + supported_ops (dict[str, Handle]): see documentation of :func:`fvcore.nn.flop_count` + + Returns: + Counter: Gflop count per operator + """ + old_train = model.training + model.eval() + ret = FlopCountAnalysis(model, inputs).by_operator() + model.train(old_train) + return {k: v / 1e9 for k, v in ret.items()} + + +def activation_count_operators( + model: nn.Module, inputs: list, **kwargs +) -> typing.DefaultDict[str, float]: + """ + Implement operator-level activations counting using jit. + This is a wrapper of fvcore.nn.activation_count, that supports standard detection models + in detectron2. + + Note: + The function runs the input through the model to compute activations. + The activations of a detection model is often input-dependent, for example, + the activations of box & mask head depends on the number of proposals & + the number of detected objects. + + Args: + model: a detectron2 model that takes `list[dict]` as input. + inputs (list[dict]): inputs to model, in detectron2's standard format. + Only "image" key will be used. + + Returns: + Counter: activation count per operator + """ + return _wrapper_count_operators(model=model, inputs=inputs, mode=ACTIVATIONS_MODE, **kwargs) + + +def _wrapper_count_operators( + model: nn.Module, inputs: list, mode: str, **kwargs +) -> typing.DefaultDict[str, float]: + # ignore some ops + supported_ops = {k: lambda *args, **kwargs: {} for k in _IGNORED_OPS} + supported_ops.update(kwargs.pop("supported_ops", {})) + kwargs["supported_ops"] = supported_ops + + assert len(inputs) == 1, "Please use batch size=1" + tensor_input = inputs[0]["image"] + inputs = [{"image": tensor_input}] # remove other keys, in case there are any + + old_train = model.training + if isinstance(model, (nn.parallel.distributed.DistributedDataParallel, nn.DataParallel)): + model = model.module + wrapper = TracingAdapter(model, inputs) + wrapper.eval() + if mode == FLOPS_MODE: + ret = flop_count(wrapper, (tensor_input,), **kwargs) + elif mode == ACTIVATIONS_MODE: + ret = activation_count(wrapper, (tensor_input,), **kwargs) + else: + raise NotImplementedError("Count for mode {} is not supported yet.".format(mode)) + # compatible with change in fvcore + if isinstance(ret, tuple): + ret = ret[0] + model.train(old_train) + return ret + + +def find_unused_parameters(model: nn.Module, inputs: Any) -> List[str]: + """ + Given a model, find parameters that do not contribute + to the loss. + + Args: + model: a model in training mode that returns losses + inputs: argument or a tuple of arguments. Inputs of the model + + Returns: + list[str]: the name of unused parameters + """ + assert model.training + for _, prm in model.named_parameters(): + prm.grad = None + + if isinstance(inputs, tuple): + losses = model(*inputs) + else: + losses = model(inputs) + + if isinstance(losses, dict): + losses = sum(losses.values()) + losses.backward() + + unused: List[str] = [] + for name, prm in model.named_parameters(): + if prm.grad is None: + unused.append(name) + prm.grad = None + return unused diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/collect_env.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/collect_env.py new file mode 100644 index 0000000..1b23548 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/collect_env.py @@ -0,0 +1,242 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import importlib +import numpy as np +import os +import re +import subprocess +import sys +from collections import defaultdict +import PIL +import torch +import torchvision +from tabulate import tabulate + +__all__ = ["collect_env_info"] + + +def collect_torch_env(): + try: + import torch.__config__ + + return torch.__config__.show() + except ImportError: + # compatible with older versions of pytorch + from torch.utils.collect_env import get_pretty_env_info + + return get_pretty_env_info() + + +def get_env_module(): + var_name = "DETECTRON2_ENV_MODULE" + return var_name, os.environ.get(var_name, "") + + +def detect_compute_compatibility(CUDA_HOME, so_file): + try: + cuobjdump = os.path.join(CUDA_HOME, "bin", "cuobjdump") + if os.path.isfile(cuobjdump): + output = subprocess.check_output( + "'{}' --list-elf '{}'".format(cuobjdump, so_file), shell=True + ) + output = output.decode("utf-8").strip().split("\n") + arch = [] + for line in output: + line = re.findall(r"\.sm_([0-9]*)\.", line)[0] + arch.append(".".join(line)) + arch = sorted(set(arch)) + return ", ".join(arch) + else: + return so_file + "; cannot find cuobjdump" + except Exception: + # unhandled failure + return so_file + + +def collect_env_info(): + has_gpu = torch.cuda.is_available() # true for both CUDA & ROCM + torch_version = torch.__version__ + + # NOTE that CUDA_HOME/ROCM_HOME could be None even when CUDA runtime libs are functional + from torch.utils.cpp_extension import CUDA_HOME, ROCM_HOME + + has_rocm = False + if (getattr(torch.version, "hip", None) is not None) and (ROCM_HOME is not None): + has_rocm = True + has_cuda = has_gpu and (not has_rocm) + + data = [] + data.append(("sys.platform", sys.platform)) # check-template.yml depends on it + data.append(("Python", sys.version.replace("\n", ""))) + data.append(("numpy", np.__version__)) + + try: + import detectron2 # noqa + + data.append( + ("detectron2", detectron2.__version__ + " @" + os.path.dirname(detectron2.__file__)) + ) + except ImportError: + data.append(("detectron2", "failed to import")) + except AttributeError: + data.append(("detectron2", "imported a wrong installation")) + + try: + import detectron2._C as _C + except ImportError as e: + data.append(("detectron2._C", f"not built correctly: {e}")) + + # print system compilers when extension fails to build + if sys.platform != "win32": # don't know what to do for windows + try: + # this is how torch/autils/cpp_extensions.py choose compiler + cxx = os.environ.get("CXX", "c++") + cxx = subprocess.check_output("'{}' --version".format(cxx), shell=True) + cxx = cxx.decode("utf-8").strip().split("\n")[0] + except subprocess.SubprocessError: + cxx = "Not found" + data.append(("Compiler ($CXX)", cxx)) + + if has_cuda and CUDA_HOME is not None: + try: + nvcc = os.path.join(CUDA_HOME, "bin", "nvcc") + nvcc = subprocess.check_output("'{}' -V".format(nvcc), shell=True) + nvcc = nvcc.decode("utf-8").strip().split("\n")[-1] + except subprocess.SubprocessError: + nvcc = "Not found" + data.append(("CUDA compiler", nvcc)) + if has_cuda and sys.platform != "win32": + try: + so_file = importlib.util.find_spec("detectron2._C").origin + except (ImportError, AttributeError): + pass + else: + data.append( + ("detectron2 arch flags", detect_compute_compatibility(CUDA_HOME, so_file)) + ) + else: + # print compilers that are used to build extension + data.append(("Compiler", _C.get_compiler_version())) + data.append(("CUDA compiler", _C.get_cuda_version())) # cuda or hip + if has_cuda and getattr(_C, "has_cuda", lambda: True)(): + data.append( + ("detectron2 arch flags", detect_compute_compatibility(CUDA_HOME, _C.__file__)) + ) + + data.append(get_env_module()) + data.append(("PyTorch", torch_version + " @" + os.path.dirname(torch.__file__))) + data.append(("PyTorch debug build", torch.version.debug)) + + if not has_gpu: + has_gpu_text = "No: torch.cuda.is_available() == False" + else: + has_gpu_text = "Yes" + data.append(("GPU available", has_gpu_text)) + if has_gpu: + devices = defaultdict(list) + for k in range(torch.cuda.device_count()): + cap = ".".join((str(x) for x in torch.cuda.get_device_capability(k))) + name = torch.cuda.get_device_name(k) + f" (arch={cap})" + devices[name].append(str(k)) + for name, devids in devices.items(): + data.append(("GPU " + ",".join(devids), name)) + + if has_rocm: + msg = " - invalid!" if not (ROCM_HOME and os.path.isdir(ROCM_HOME)) else "" + data.append(("ROCM_HOME", str(ROCM_HOME) + msg)) + else: + try: + from torch.utils.collect_env import get_nvidia_driver_version, run as _run + + data.append(("Driver version", get_nvidia_driver_version(_run))) + except Exception: + pass + msg = " - invalid!" if not (CUDA_HOME and os.path.isdir(CUDA_HOME)) else "" + data.append(("CUDA_HOME", str(CUDA_HOME) + msg)) + + cuda_arch_list = os.environ.get("TORCH_CUDA_ARCH_LIST", None) + if cuda_arch_list: + data.append(("TORCH_CUDA_ARCH_LIST", cuda_arch_list)) + data.append(("Pillow", PIL.__version__)) + + try: + data.append( + ( + "torchvision", + str(torchvision.__version__) + " @" + os.path.dirname(torchvision.__file__), + ) + ) + if has_cuda: + try: + torchvision_C = importlib.util.find_spec("torchvision._C").origin + msg = detect_compute_compatibility(CUDA_HOME, torchvision_C) + data.append(("torchvision arch flags", msg)) + except (ImportError, AttributeError): + data.append(("torchvision._C", "Not found")) + except AttributeError: + data.append(("torchvision", "unknown")) + + try: + import fvcore + + data.append(("fvcore", fvcore.__version__)) + except (ImportError, AttributeError): + pass + + try: + import iopath + + data.append(("iopath", iopath.__version__)) + except (ImportError, AttributeError): + pass + + try: + import cv2 + + data.append(("cv2", cv2.__version__)) + except (ImportError, AttributeError): + data.append(("cv2", "Not found")) + env_str = tabulate(data) + "\n" + env_str += collect_torch_env() + return env_str + + +def test_nccl_ops(): + num_gpu = torch.cuda.device_count() + if os.access("/tmp", os.W_OK): + import torch.multiprocessing as mp + + dist_url = "file:///tmp/nccl_tmp_file" + print("Testing NCCL connectivity ... this should not hang.") + mp.spawn(_test_nccl_worker, nprocs=num_gpu, args=(num_gpu, dist_url), daemon=False) + print("NCCL succeeded.") + + +def _test_nccl_worker(rank, num_gpu, dist_url): + import torch.distributed as dist + + dist.init_process_group(backend="NCCL", init_method=dist_url, rank=rank, world_size=num_gpu) + dist.barrier(device_ids=[rank]) + + +if __name__ == "__main__": + try: + from detectron2.utils.collect_env import collect_env_info as f + + print(f()) + except ImportError: + print(collect_env_info()) + + if torch.cuda.is_available(): + num_gpu = torch.cuda.device_count() + for k in range(num_gpu): + device = f"cuda:{k}" + try: + x = torch.tensor([1, 2.0], dtype=torch.float32) + x = x.to(device) + except Exception as e: + print( + f"Unable to copy tensor to device={device}: {e}. " + "Your CUDA environment is broken." + ) + if num_gpu > 1: + test_nccl_ops() diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/colormap.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/colormap.py new file mode 100644 index 0000000..14ded16 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/colormap.py @@ -0,0 +1,158 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +""" +An awesome colormap for really neat visualizations. +Copied from Detectron, and removed gray colors. +""" + +import numpy as np +import random + +__all__ = ["colormap", "random_color", "random_colors"] + +# fmt: off +# RGB: +_COLORS = np.array( + [ + 0.000, 0.447, 0.741, + 0.850, 0.325, 0.098, + 0.929, 0.694, 0.125, + 0.494, 0.184, 0.556, + 0.466, 0.674, 0.188, + 0.301, 0.745, 0.933, + 0.635, 0.078, 0.184, + 0.300, 0.300, 0.300, + 0.600, 0.600, 0.600, + 1.000, 0.000, 0.000, + 1.000, 0.500, 0.000, + 0.749, 0.749, 0.000, + 0.000, 1.000, 0.000, + 0.000, 0.000, 1.000, + 0.667, 0.000, 1.000, + 0.333, 0.333, 0.000, + 0.333, 0.667, 0.000, + 0.333, 1.000, 0.000, + 0.667, 0.333, 0.000, + 0.667, 0.667, 0.000, + 0.667, 1.000, 0.000, + 1.000, 0.333, 0.000, + 1.000, 0.667, 0.000, + 1.000, 1.000, 0.000, + 0.000, 0.333, 0.500, + 0.000, 0.667, 0.500, + 0.000, 1.000, 0.500, + 0.333, 0.000, 0.500, + 0.333, 0.333, 0.500, + 0.333, 0.667, 0.500, + 0.333, 1.000, 0.500, + 0.667, 0.000, 0.500, + 0.667, 0.333, 0.500, + 0.667, 0.667, 0.500, + 0.667, 1.000, 0.500, + 1.000, 0.000, 0.500, + 1.000, 0.333, 0.500, + 1.000, 0.667, 0.500, + 1.000, 1.000, 0.500, + 0.000, 0.333, 1.000, + 0.000, 0.667, 1.000, + 0.000, 1.000, 1.000, + 0.333, 0.000, 1.000, + 0.333, 0.333, 1.000, + 0.333, 0.667, 1.000, + 0.333, 1.000, 1.000, + 0.667, 0.000, 1.000, + 0.667, 0.333, 1.000, + 0.667, 0.667, 1.000, + 0.667, 1.000, 1.000, + 1.000, 0.000, 1.000, + 1.000, 0.333, 1.000, + 1.000, 0.667, 1.000, + 0.333, 0.000, 0.000, + 0.500, 0.000, 0.000, + 0.667, 0.000, 0.000, + 0.833, 0.000, 0.000, + 1.000, 0.000, 0.000, + 0.000, 0.167, 0.000, + 0.000, 0.333, 0.000, + 0.000, 0.500, 0.000, + 0.000, 0.667, 0.000, + 0.000, 0.833, 0.000, + 0.000, 1.000, 0.000, + 0.000, 0.000, 0.167, + 0.000, 0.000, 0.333, + 0.000, 0.000, 0.500, + 0.000, 0.000, 0.667, + 0.000, 0.000, 0.833, + 0.000, 0.000, 1.000, + 0.000, 0.000, 0.000, + 0.143, 0.143, 0.143, + 0.857, 0.857, 0.857, + 1.000, 1.000, 1.000 + ] +).astype(np.float32).reshape(-1, 3) +# fmt: on + + +def colormap(rgb=False, maximum=255): + """ + Args: + rgb (bool): whether to return RGB colors or BGR colors. + maximum (int): either 255 or 1 + + Returns: + ndarray: a float32 array of Nx3 colors, in range [0, 255] or [0, 1] + """ + assert maximum in [255, 1], maximum + c = _COLORS * maximum + if not rgb: + c = c[:, ::-1] + return c + + +def random_color(rgb=False, maximum=255): + """ + Args: + rgb (bool): whether to return RGB colors or BGR colors. + maximum (int): either 255 or 1 + + Returns: + ndarray: a vector of 3 numbers + """ + idx = np.random.randint(0, len(_COLORS)) + ret = _COLORS[idx] * maximum + if not rgb: + ret = ret[::-1] + return ret + + +def random_colors(N, rgb=False, maximum=255): + """ + Args: + N (int): number of unique colors needed + rgb (bool): whether to return RGB colors or BGR colors. + maximum (int): either 255 or 1 + + Returns: + ndarray: a list of random_color + """ + indices = random.sample(range(len(_COLORS)), N) + ret = [_COLORS[i] * maximum for i in indices] + if not rgb: + ret = [x[::-1] for x in ret] + return ret + + +if __name__ == "__main__": + import cv2 + + size = 100 + H, W = 10, 10 + canvas = np.random.rand(H * size, W * size, 3).astype("float32") + for h in range(H): + for w in range(W): + idx = h * W + w + if idx >= len(_COLORS): + break + canvas[h * size : (h + 1) * size, w * size : (w + 1) * size] = _COLORS[idx] + cv2.imshow("a", canvas) + cv2.waitKey(0) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/comm.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/comm.py new file mode 100644 index 0000000..7e2a0c4 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/comm.py @@ -0,0 +1,199 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +""" +This file contains primitives for multi-gpu communication. +This is useful when doing distributed training. +""" + +import functools +import numpy as np +import torch +import torch.distributed as dist + +_LOCAL_PROCESS_GROUP = None +""" +A torch process group which only includes processes that on the same machine as the current process. +This variable is set when processes are spawned by `launch()` in "engine/launch.py". +""" + + +def get_world_size() -> int: + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size() + + +def get_rank() -> int: + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + return dist.get_rank() + + +def get_local_rank() -> int: + """ + Returns: + The rank of the current process within the local (per-machine) process group. + """ + if not dist.is_available(): + return 0 + if not dist.is_initialized(): + return 0 + assert ( + _LOCAL_PROCESS_GROUP is not None + ), "Local process group is not created! Please use launch() to spawn processes!" + return dist.get_rank(group=_LOCAL_PROCESS_GROUP) + + +def get_local_size() -> int: + """ + Returns: + The size of the per-machine process group, + i.e. the number of processes per machine. + """ + if not dist.is_available(): + return 1 + if not dist.is_initialized(): + return 1 + return dist.get_world_size(group=_LOCAL_PROCESS_GROUP) + + +def is_main_process() -> bool: + return get_rank() == 0 + + +def synchronize(): + """ + Helper function to synchronize (barrier) among all processes when + using distributed training + """ + if not dist.is_available(): + return + if not dist.is_initialized(): + return + world_size = dist.get_world_size() + if world_size == 1: + return + if dist.get_backend() == dist.Backend.NCCL: + # This argument is needed to avoid warnings. + # It's valid only for NCCL backend. + dist.barrier(device_ids=[torch.cuda.current_device()]) + else: + dist.barrier() + + +@functools.lru_cache() +def _get_global_gloo_group(): + """ + Return a process group based on gloo backend, containing all the ranks + The result is cached. + """ + if dist.get_backend() == "nccl": + return dist.new_group(backend="gloo") + else: + return dist.group.WORLD + + +def all_gather(data, group=None): + """ + Run all_gather on arbitrary picklable data (not necessarily tensors). + + Args: + data: any picklable object + group: a torch process group. By default, will use a group which + contains all ranks on gloo backend. + + Returns: + list[data]: list of data gathered from each rank + """ + if get_world_size() == 1: + return [data] + if group is None: + group = _get_global_gloo_group() # use CPU group by default, to reduce GPU RAM usage. + world_size = dist.get_world_size(group) + if world_size == 1: + return [data] + + output = [None for _ in range(world_size)] + dist.all_gather_object(output, data, group=group) + return output + + +def gather(data, dst=0, group=None): + """ + Run gather on arbitrary picklable data (not necessarily tensors). + + Args: + data: any picklable object + dst (int): destination rank + group: a torch process group. By default, will use a group which + contains all ranks on gloo backend. + + Returns: + list[data]: on dst, a list of data gathered from each rank. Otherwise, + an empty list. + """ + if get_world_size() == 1: + return [data] + if group is None: + group = _get_global_gloo_group() + world_size = dist.get_world_size(group=group) + if world_size == 1: + return [data] + rank = dist.get_rank(group=group) + + if rank == dst: + output = [None for _ in range(world_size)] + dist.gather_object(data, output, dst=dst, group=group) + return output + else: + dist.gather_object(data, None, dst=dst, group=group) + return [] + + +def shared_random_seed(): + """ + Returns: + int: a random number that is the same across all workers. + If workers need a shared RNG, they can use this shared seed to + create one. + + All workers must call this function, otherwise it will deadlock. + """ + ints = np.random.randint(2 ** 31) + all_ints = all_gather(ints) + return all_ints[0] + + +def reduce_dict(input_dict, average=True): + """ + Reduce the values in the dictionary from all processes so that process with rank + 0 has the reduced results. + + Args: + input_dict (dict): inputs to be reduced. All the values must be scalar CUDA Tensor. + average (bool): whether to do average or sum + + Returns: + a dict with the same keys as input_dict, after reduction. + """ + world_size = get_world_size() + if world_size < 2: + return input_dict + with torch.no_grad(): + names = [] + values = [] + # sort the keys so that they are consistent across processes + for k in sorted(input_dict.keys()): + names.append(k) + values.append(input_dict[k]) + values = torch.stack(values, dim=0) + dist.reduce(values, dst=0) + if dist.get_rank() == 0 and average: + # only main process gets accumulated, so only divide by + # world_size in this case + values /= world_size + reduced_dict = {k: v for k, v in zip(names, values)} + return reduced_dict diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/develop.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/develop.py new file mode 100644 index 0000000..7ff7bf1 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/develop.py @@ -0,0 +1,59 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +""" Utilities for developers only. +These are not visible to users (not automatically imported). And should not +appeared in docs.""" +# adapted from https://github.com/tensorpack/tensorpack/blob/master/tensorpack/utils/develop.py + + +def create_dummy_class(klass, dependency, message=""): + """ + When a dependency of a class is not available, create a dummy class which throws ImportError + when used. + + Args: + klass (str): name of the class. + dependency (str): name of the dependency. + message: extra message to print + Returns: + class: a class object + """ + err = "Cannot import '{}', therefore '{}' is not available.".format(dependency, klass) + if message: + err = err + " " + message + + class _DummyMetaClass(type): + # throw error on class attribute access + def __getattr__(_, __): + raise ImportError(err) + + class _Dummy(object, metaclass=_DummyMetaClass): + # throw error on constructor + def __init__(self, *args, **kwargs): + raise ImportError(err) + + return _Dummy + + +def create_dummy_func(func, dependency, message=""): + """ + When a dependency of a function is not available, create a dummy function which throws + ImportError when used. + + Args: + func (str): name of the function. + dependency (str or list[str]): name(s) of the dependency. + message: extra message to print + Returns: + function: a function object + """ + err = "Cannot import '{}', therefore '{}' is not available.".format(dependency, func) + if message: + err = err + " " + message + + if isinstance(dependency, (list, tuple)): + dependency = ",".join(dependency) + + def _dummy(*args, **kwargs): + raise ImportError(err) + + return _dummy diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/env.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/env.py new file mode 100644 index 0000000..2f2c0fb --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/env.py @@ -0,0 +1,170 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import importlib +import importlib.util +import logging +import numpy as np +import os +import random +import sys +from datetime import datetime +import torch + +__all__ = ["seed_all_rng"] + + +TORCH_VERSION = tuple(int(x) for x in torch.__version__.split(".")[:2]) +""" +PyTorch version as a tuple of 2 ints. Useful for comparison. +""" + + +DOC_BUILDING = os.getenv("_DOC_BUILDING", False) # set in docs/conf.py +""" +Whether we're building documentation. +""" + + +def seed_all_rng(seed=None): + """ + Set the random seed for the RNG in torch, numpy and python. + + Args: + seed (int): if None, will use a strong random seed. + """ + if seed is None: + seed = ( + os.getpid() + + int(datetime.now().strftime("%S%f")) + + int.from_bytes(os.urandom(2), "big") + ) + logger = logging.getLogger(__name__) + logger.info("Using a generated random seed {}".format(seed)) + np.random.seed(seed) + torch.manual_seed(seed) + random.seed(seed) + os.environ["PYTHONHASHSEED"] = str(seed) + + +# from https://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path +def _import_file(module_name, file_path, make_importable=False): + spec = importlib.util.spec_from_file_location(module_name, file_path) + module = importlib.util.module_from_spec(spec) + spec.loader.exec_module(module) + if make_importable: + sys.modules[module_name] = module + return module + + +def _configure_libraries(): + """ + Configurations for some libraries. + """ + # An environment option to disable `import cv2` globally, + # in case it leads to negative performance impact + disable_cv2 = int(os.environ.get("DETECTRON2_DISABLE_CV2", False)) + if disable_cv2: + sys.modules["cv2"] = None + else: + # Disable opencl in opencv since its interaction with cuda often has negative effects + # This envvar is supported after OpenCV 3.4.0 + os.environ["OPENCV_OPENCL_RUNTIME"] = "disabled" + try: + import cv2 + + if int(cv2.__version__.split(".")[0]) >= 3: + cv2.ocl.setUseOpenCL(False) + except ModuleNotFoundError: + # Other types of ImportError, if happened, should not be ignored. + # Because a failed opencv import could mess up address space + # https://github.com/skvark/opencv-python/issues/381 + pass + + def get_version(module, digit=2): + return tuple(map(int, module.__version__.split(".")[:digit])) + + # fmt: off + assert get_version(torch) >= (1, 4), "Requires torch>=1.4" + import fvcore + assert get_version(fvcore, 3) >= (0, 1, 2), "Requires fvcore>=0.1.2" + import yaml + assert get_version(yaml) >= (5, 1), "Requires pyyaml>=5.1" + # fmt: on + + +_ENV_SETUP_DONE = False + + +def setup_environment(): + """Perform environment setup work. The default setup is a no-op, but this + function allows the user to specify a Python source file or a module in + the $DETECTRON2_ENV_MODULE environment variable, that performs + custom setup work that may be necessary to their computing environment. + """ + global _ENV_SETUP_DONE + if _ENV_SETUP_DONE: + return + _ENV_SETUP_DONE = True + + _configure_libraries() + + custom_module_path = os.environ.get("DETECTRON2_ENV_MODULE") + + if custom_module_path: + setup_custom_environment(custom_module_path) + else: + # The default setup is a no-op + pass + + +def setup_custom_environment(custom_module): + """ + Load custom environment setup by importing a Python source file or a + module, and run the setup function. + """ + if custom_module.endswith(".py"): + module = _import_file("detectron2.autils.env.custom_module", custom_module) + else: + module = importlib.import_module(custom_module) + assert hasattr(module, "setup_environment") and callable(module.setup_environment), ( + "Custom environment module defined in {} does not have the " + "required callable attribute 'setup_environment'." + ).format(custom_module) + module.setup_environment() + + +def fixup_module_metadata(module_name, namespace, keys=None): + """ + Fix the __qualname__ of module members to be their exported api name, so + when they are referenced in docs, sphinx can find them. Reference: + https://github.com/python-trio/trio/blob/6754c74eacfad9cc5c92d5c24727a2f3b620624e/trio/_util.py#L216-L241 + """ + if not DOC_BUILDING: + return + seen_ids = set() + + def fix_one(qualname, name, obj): + # avoid infinite recursion (relevant when using + # typing.Generic, for example) + if id(obj) in seen_ids: + return + seen_ids.add(id(obj)) + + mod = getattr(obj, "__module__", None) + if mod is not None and (mod.startswith(module_name) or mod.startswith("fvcore.")): + obj.__module__ = module_name + # Modules, unlike everything else in Python, put fully-qualitied + # names into their __name__ attribute. We check for "." to avoid + # rewriting these. + if hasattr(obj, "__name__") and "." not in obj.__name__: + obj.__name__ = name + obj.__qualname__ = qualname + if isinstance(obj, type): + for attr_name, attr_value in obj.__dict__.items(): + fix_one(objname + "." + attr_name, attr_name, attr_value) + + if keys is None: + keys = namespace.keys() + for objname in keys: + if not objname.startswith("_"): + obj = namespace[objname] + fix_one(objname, objname, obj) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/events.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/events.py new file mode 100644 index 0000000..b696698 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/events.py @@ -0,0 +1,486 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import datetime +import json +import logging +import os +import time +from collections import defaultdict +from contextlib import contextmanager +from typing import Optional +import torch +from fvcore.common.history_buffer import HistoryBuffer + +from detectron2.utils.file_io import PathManager + +__all__ = [ + "get_event_storage", + "JSONWriter", + "TensorboardXWriter", + "CommonMetricPrinter", + "EventStorage", +] + +_CURRENT_STORAGE_STACK = [] + + +def get_event_storage(): + """ + Returns: + The :class:`EventStorage` object that's currently being used. + Throws an error if no :class:`EventStorage` is currently enabled. + """ + assert len( + _CURRENT_STORAGE_STACK + ), "get_event_storage() has to be called inside a 'with EventStorage(...)' context!" + return _CURRENT_STORAGE_STACK[-1] + + +class EventWriter: + """ + Base class for writers that obtain events from :class:`EventStorage` and process them. + """ + + def write(self): + raise NotImplementedError + + def close(self): + pass + + +class JSONWriter(EventWriter): + """ + Write scalars to a json file. + + It saves scalars as one json per line (instead of a big json) for easy parsing. + + Examples parsing such a json file: + :: + $ cat metrics.json | jq -s '.[0:2]' + [ + { + "data_time": 0.008433341979980469, + "iteration": 19, + "loss": 1.9228371381759644, + "loss_box_reg": 0.050025828182697296, + "loss_classifier": 0.5316952466964722, + "loss_mask": 0.7236229181289673, + "loss_rpn_box": 0.0856662318110466, + "loss_rpn_cls": 0.48198649287223816, + "lr": 0.007173333333333333, + "time": 0.25401854515075684 + }, + { + "data_time": 0.007216215133666992, + "iteration": 39, + "loss": 1.282649278640747, + "loss_box_reg": 0.06222952902317047, + "loss_classifier": 0.30682939291000366, + "loss_mask": 0.6970193982124329, + "loss_rpn_box": 0.038663312792778015, + "loss_rpn_cls": 0.1471673548221588, + "lr": 0.007706666666666667, + "time": 0.2490077018737793 + } + ] + + $ cat metrics.json | jq '.loss_mask' + 0.7126231789588928 + 0.689423680305481 + 0.6776131987571716 + ... + + """ + + def __init__(self, json_file, window_size=20): + """ + Args: + json_file (str): path to the json file. New data will be appended if the file exists. + window_size (int): the window size of median smoothing for the scalars whose + `smoothing_hint` are True. + """ + self._file_handle = PathManager.open(json_file, "a") + self._window_size = window_size + self._last_write = -1 + + def write(self): + storage = get_event_storage() + to_save = defaultdict(dict) + + for k, (v, iter) in storage.latest_with_smoothing_hint(self._window_size).items(): + # keep scalars that have not been written + if iter <= self._last_write: + continue + to_save[iter][k] = v + if len(to_save): + all_iters = sorted(to_save.keys()) + self._last_write = max(all_iters) + + for itr, scalars_per_iter in to_save.items(): + scalars_per_iter["iteration"] = itr + self._file_handle.write(json.dumps(scalars_per_iter, sort_keys=True) + "\n") + self._file_handle.flush() + try: + os.fsync(self._file_handle.fileno()) + except AttributeError: + pass + + def close(self): + self._file_handle.close() + + +class TensorboardXWriter(EventWriter): + """ + Write all scalars to a tensorboard file. + """ + + def __init__(self, log_dir: str, window_size: int = 20, **kwargs): + """ + Args: + log_dir (str): the directory to save the output events + window_size (int): the scalars will be median-smoothed by this window size + + kwargs: other arguments passed to `torch.autils.tensorboard.SummaryWriter(...)` + """ + self._window_size = window_size + from torch.utils.tensorboard import SummaryWriter + + self._writer = SummaryWriter(log_dir, **kwargs) + self._last_write = -1 + + def write(self): + storage = get_event_storage() + new_last_write = self._last_write + for k, (v, iter) in storage.latest_with_smoothing_hint(self._window_size).items(): + if iter > self._last_write: + self._writer.add_scalar(k, v, iter) + new_last_write = max(new_last_write, iter) + self._last_write = new_last_write + + # storage.put_{image,histogram} is only meant to be used by + # tensorboard writer. So we access its internal fields directly from here. + if len(storage._vis_data) >= 1: + for img_name, img, step_num in storage._vis_data: + self._writer.add_image(img_name, img, step_num) + # Storage stores all image data and rely on this writer to clear them. + # As a result it assumes only one writer will use its image data. + # An alternative design is to let storage store limited recent + # data (e.g. only the most recent image) that all writers can access. + # In that case a writer may not see all image data if its period is long. + storage.clear_images() + + if len(storage._histograms) >= 1: + for params in storage._histograms: + self._writer.add_histogram_raw(**params) + storage.clear_histograms() + + def close(self): + if hasattr(self, "_writer"): # doesn't exist when the code fails at import + self._writer.close() + + +class CommonMetricPrinter(EventWriter): + """ + Print **common** metrics to the terminal, including + iteration time, ETA, memory, all losses, and the learning rate. + It also applies smoothing using a window of 20 elements. + + It's meant to print common metrics in common ways. + To print something in more customized ways, please implement a similar printer by yourself. + """ + + def __init__(self, max_iter: Optional[int] = None, window_size: int = 20): + """ + Args: + max_iter: the maximum number of iterations to train. + Used to compute ETA. If not given, ETA will not be printed. + window_size (int): the losses will be median-smoothed by this window size + """ + self.logger = logging.getLogger(__name__) + self._max_iter = max_iter + self._window_size = window_size + self._last_write = None # (step, time) of last call to write(). Used to compute ETA + + def _get_eta(self, storage) -> Optional[str]: + if self._max_iter is None: + return "" + iteration = storage.iter + try: + eta_seconds = storage.history("time").median(1000) * (self._max_iter - iteration - 1) + storage.put_scalar("eta_seconds", eta_seconds, smoothing_hint=False) + return str(datetime.timedelta(seconds=int(eta_seconds))) + except KeyError: + # estimate eta on our own - more noisy + eta_string = None + if self._last_write is not None: + estimate_iter_time = (time.perf_counter() - self._last_write[1]) / ( + iteration - self._last_write[0] + ) + eta_seconds = estimate_iter_time * (self._max_iter - iteration - 1) + eta_string = str(datetime.timedelta(seconds=int(eta_seconds))) + self._last_write = (iteration, time.perf_counter()) + return eta_string + + def write(self): + storage = get_event_storage() + iteration = storage.iter + if iteration == self._max_iter: + # This hook only reports training progress (loss, ETA, etc) but not other data, + # therefore do not write anything after training succeeds, even if this method + # is called. + return + + try: + data_time = storage.history("data_time").avg(20) + except KeyError: + # they may not exist in the first few iterations (due to warmup) + # or when SimpleTrainer is not used + data_time = None + try: + iter_time = storage.history("time").global_avg() + except KeyError: + iter_time = None + try: + lr = "{:.5g}".format(storage.history("lr").latest()) + except KeyError: + lr = "N/A" + + eta_string = self._get_eta(storage) + + if torch.cuda.is_available(): + max_mem_mb = torch.cuda.max_memory_allocated() / 1024.0 / 1024.0 + else: + max_mem_mb = None + + # NOTE: max_mem is parsed by grep in "dev/parse_results.sh" + self.logger.info( + " {eta}iter: {iter} {losses} {time}{data_time}lr: {lr} {memory}".format( + eta=f"eta: {eta_string} " if eta_string else "", + iter=iteration, + losses=" ".join( + [ + "{}: {:.4g}".format(k, v.median(self._window_size)) + for k, v in storage.histories().items() + if "loss" in k + ] + ), + time="time: {:.4f} ".format(iter_time) if iter_time is not None else "", + data_time="data_time: {:.4f} ".format(data_time) if data_time is not None else "", + lr=lr, + memory="max_mem: {:.0f}M".format(max_mem_mb) if max_mem_mb is not None else "", + ) + ) + + +class EventStorage: + """ + The user-facing class that provides metric storage functionalities. + + In the future we may add support for storing / logging other types of data if needed. + """ + + def __init__(self, start_iter=0): + """ + Args: + start_iter (int): the iteration number to start with + """ + self._history = defaultdict(HistoryBuffer) + self._smoothing_hints = {} + self._latest_scalars = {} + self._iter = start_iter + self._current_prefix = "" + self._vis_data = [] + self._histograms = [] + + def put_image(self, img_name, img_tensor): + """ + Add an `img_tensor` associated with `img_name`, to be shown on + tensorboard. + + Args: + img_name (str): The name of the image to put into tensorboard. + img_tensor (torch.Tensor or numpy.array): An `uint8` or `float` + Tensor of shape `[channel, height, width]` where `channel` is + 3. The image format should be RGB. The elements in img_tensor + can either have values in [0, 1] (float32) or [0, 255] (uint8). + The `img_tensor` will be visualized in tensorboard. + """ + self._vis_data.append((img_name, img_tensor, self._iter)) + + def put_scalar(self, name, value, smoothing_hint=True): + """ + Add a scalar `value` to the `HistoryBuffer` associated with `name`. + + Args: + smoothing_hint (bool): a 'hint' on whether this scalar is noisy and should be + smoothed when logged. The hint will be accessible through + :meth:`EventStorage.smoothing_hints`. A writer may ignore the hint + and apply custom smoothing rule. + + It defaults to True because most scalars we save need to be smoothed to + provide any useful signal. + """ + name = self._current_prefix + name + history = self._history[name] + value = float(value) + history.update(value, self._iter) + self._latest_scalars[name] = (value, self._iter) + + existing_hint = self._smoothing_hints.get(name) + if existing_hint is not None: + assert ( + existing_hint == smoothing_hint + ), "Scalar {} was put with a different smoothing_hint!".format(name) + else: + self._smoothing_hints[name] = smoothing_hint + + def put_scalars(self, *, smoothing_hint=True, **kwargs): + """ + Put multiple scalars from keyword arguments. + + Examples: + + storage.put_scalars(loss=my_loss, accuracy=my_accuracy, smoothing_hint=True) + """ + for k, v in kwargs.items(): + self.put_scalar(k, v, smoothing_hint=smoothing_hint) + + def put_histogram(self, hist_name, hist_tensor, bins=1000): + """ + Create a histogram from a tensor. + + Args: + hist_name (str): The name of the histogram to put into tensorboard. + hist_tensor (torch.Tensor): A Tensor of arbitrary shape to be converted + into a histogram. + bins (int): Number of histogram bins. + """ + ht_min, ht_max = hist_tensor.min().item(), hist_tensor.max().item() + + # Create a histogram with PyTorch + hist_counts = torch.histc(hist_tensor, bins=bins) + hist_edges = torch.linspace(start=ht_min, end=ht_max, steps=bins + 1, dtype=torch.float32) + + # Parameter for the add_histogram_raw function of SummaryWriter + hist_params = dict( + tag=hist_name, + min=ht_min, + max=ht_max, + num=len(hist_tensor), + sum=float(hist_tensor.sum()), + sum_squares=float(torch.sum(hist_tensor ** 2)), + bucket_limits=hist_edges[1:].tolist(), + bucket_counts=hist_counts.tolist(), + global_step=self._iter, + ) + self._histograms.append(hist_params) + + def history(self, name): + """ + Returns: + HistoryBuffer: the scalar history for name + """ + ret = self._history.get(name, None) + if ret is None: + raise KeyError("No history metric available for {}!".format(name)) + return ret + + def histories(self): + """ + Returns: + dict[name -> HistoryBuffer]: the HistoryBuffer for all scalars + """ + return self._history + + def latest(self): + """ + Returns: + dict[str -> (float, int)]: mapping from the name of each scalar to the most + recent value and the iteration number its added. + """ + return self._latest_scalars + + def latest_with_smoothing_hint(self, window_size=20): + """ + Similar to :meth:`latest`, but the returned values + are either the un-smoothed original latest value, + or a median of the given window_size, + depend on whether the smoothing_hint is True. + + This provides a default behavior that other writers can use. + """ + result = {} + for k, (v, itr) in self._latest_scalars.items(): + result[k] = ( + self._history[k].median(window_size) if self._smoothing_hints[k] else v, + itr, + ) + return result + + def smoothing_hints(self): + """ + Returns: + dict[name -> bool]: the user-provided hint on whether the scalar + is noisy and needs smoothing. + """ + return self._smoothing_hints + + def step(self): + """ + User should either: (1) Call this function to increment storage.iter when needed. Or + (2) Set `storage.iter` to the correct iteration number before each iteration. + + The storage will then be able to associate the new data with an iteration number. + """ + self._iter += 1 + + @property + def iter(self): + """ + Returns: + int: The current iteration number. When used together with a trainer, + this is ensured to be the same as trainer.iter. + """ + return self._iter + + @iter.setter + def iter(self, val): + self._iter = int(val) + + @property + def iteration(self): + # for backward compatibility + return self._iter + + def __enter__(self): + _CURRENT_STORAGE_STACK.append(self) + return self + + def __exit__(self, exc_type, exc_val, exc_tb): + assert _CURRENT_STORAGE_STACK[-1] == self + _CURRENT_STORAGE_STACK.pop() + + @contextmanager + def name_scope(self, name): + """ + Yields: + A context within which all the events added to this storage + will be prefixed by the name scope. + """ + old_prefix = self._current_prefix + self._current_prefix = name.rstrip("/") + "/" + yield + self._current_prefix = old_prefix + + def clear_images(self): + """ + Delete all the stored images for visualization. This should be called + after images are written to tensorboard. + """ + self._vis_data = [] + + def clear_histograms(self): + """ + Delete all the stored histograms for visualization. + This should be called after histograms are written to tensorboard. + """ + self._histograms = [] diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/file_io.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/file_io.py new file mode 100644 index 0000000..46ee4ec --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/file_io.py @@ -0,0 +1,37 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +from iopath.common.file_io import HTTPURLHandler, OneDrivePathHandler, PathHandler +from iopath.common.file_io import PathManager as PathManagerBase + +__all__ = ["PathManager", "PathHandler"] + + +PathManager = PathManagerBase() +""" +This is a detectron2 project-specific PathManager. +We try to stay away from global PathManager in fvcore as it +introduces potential conflicts among other libraries. +""" + + +class Detectron2Handler(PathHandler): + """ + Resolve anything that's hosted under detectron2's namespace. + """ + + PREFIX = "detectron2://" + S3_DETECTRON2_PREFIX = "https://dl.fbaipublicfiles.com/detectron2/" + + def _get_supported_prefixes(self): + return [self.PREFIX] + + def _get_local_path(self, path, **kwargs): + name = path[len(self.PREFIX) :] + return PathManager.get_local_path(self.S3_DETECTRON2_PREFIX + name, **kwargs) + + def _open(self, path, mode="r", **kwargs): + return PathManager.open(self._get_local_path(path), mode, **kwargs) + + +PathManager.register_handler(HTTPURLHandler()) +PathManager.register_handler(OneDrivePathHandler()) +PathManager.register_handler(Detectron2Handler()) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/logger.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/logger.py new file mode 100644 index 0000000..c0c19c4 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/logger.py @@ -0,0 +1,237 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import atexit +import functools +import logging +import os +import sys +import time +from collections import Counter +import torch +from tabulate import tabulate +from termcolor import colored + +from detectron2.utils.file_io import PathManager + +__all__ = ["setup_logger", "log_first_n", "log_every_n", "log_every_n_seconds"] + + +class _ColorfulFormatter(logging.Formatter): + def __init__(self, *args, **kwargs): + self._root_name = kwargs.pop("root_name") + "." + self._abbrev_name = kwargs.pop("abbrev_name", "") + if len(self._abbrev_name): + self._abbrev_name = self._abbrev_name + "." + super(_ColorfulFormatter, self).__init__(*args, **kwargs) + + def formatMessage(self, record): + record.name = record.name.replace(self._root_name, self._abbrev_name) + log = super(_ColorfulFormatter, self).formatMessage(record) + if record.levelno == logging.WARNING: + prefix = colored("WARNING", "red", attrs=["blink"]) + elif record.levelno == logging.ERROR or record.levelno == logging.CRITICAL: + prefix = colored("ERROR", "red", attrs=["blink", "underline"]) + else: + return log + return prefix + " " + log + + +@functools.lru_cache() # so that calling setup_logger multiple times won't add many handlers +def setup_logger( + output=None, distributed_rank=0, *, color=True, name="detectron2", abbrev_name=None +): + """ + Initialize the detectron2 logger and set its verbosity level to "DEBUG". + + Args: + output (str): a file name or a directory to save log. If None, will not save log file. + If ends with ".txt" or ".log", assumed to be a file name. + Otherwise, logs will be saved to `output/log.txt`. + name (str): the root module name of this logger + abbrev_name (str): an abbreviation of the module, to avoid long names in logs. + Set to "" to not log the root module in logs. + By default, will abbreviate "detectron2" to "d2" and leave other + modules unchanged. + + Returns: + logging.Logger: a logger + """ + logger = logging.getLogger(name) + logger.setLevel(logging.DEBUG) + logger.propagate = False + + if abbrev_name is None: + abbrev_name = "d2" if name == "detectron2" else name + + plain_formatter = logging.Formatter( + "[%(asctime)s] %(name)s %(levelname)s: %(message)s", datefmt="%m/%d %H:%M:%S" + ) + # stdout logging: master only + if distributed_rank == 0: + ch = logging.StreamHandler(stream=sys.stdout) + ch.setLevel(logging.DEBUG) + if color: + formatter = _ColorfulFormatter( + colored("[%(asctime)s %(name)s]: ", "green") + "%(message)s", + datefmt="%m/%d %H:%M:%S", + root_name=name, + abbrev_name=str(abbrev_name), + ) + else: + formatter = plain_formatter + ch.setFormatter(formatter) + logger.addHandler(ch) + + # file logging: all workers + if output is not None: + if output.endswith(".txt") or output.endswith(".log"): + filename = output + else: + filename = os.path.join(output, "log.txt") + if distributed_rank > 0: + filename = filename + ".rank{}".format(distributed_rank) + PathManager.mkdirs(os.path.dirname(filename)) + + fh = logging.StreamHandler(_cached_log_stream(filename)) + fh.setLevel(logging.DEBUG) + fh.setFormatter(plain_formatter) + logger.addHandler(fh) + + return logger + + +# cache the opened file object, so that different calls to `setup_logger` +# with the same file name can safely write to the same file. +@functools.lru_cache(maxsize=None) +def _cached_log_stream(filename): + # use 1K buffer if writing to cloud storage + io = PathManager.open(filename, "a", buffering=1024 if "://" in filename else -1) + atexit.register(io.close) + return io + + +""" +Below are some other convenient logging methods. +They are mainly adopted from +https://github.com/abseil/abseil-py/blob/master/absl/logging/__init__.py +""" + + +def _find_caller(): + """ + Returns: + str: module name of the caller + tuple: a hashable key to be used to identify different callers + """ + frame = sys._getframe(2) + while frame: + code = frame.f_code + if os.path.join("autils", "logger.") not in code.co_filename: + mod_name = frame.f_globals["__name__"] + if mod_name == "__main__": + mod_name = "detectron2" + return mod_name, (code.co_filename, frame.f_lineno, code.co_name) + frame = frame.f_back + + +_LOG_COUNTER = Counter() +_LOG_TIMER = {} + + +def log_first_n(lvl, msg, n=1, *, name=None, key="caller"): + """ + Log only for the first n times. + + Args: + lvl (int): the logging level + msg (str): + n (int): + name (str): name of the logger to use. Will use the caller's module by default. + key (str or tuple[str]): the string(s) can be one of "caller" or + "message", which defines how to identify duplicated logs. + For example, if called with `n=1, key="caller"`, this function + will only log the first call from the same caller, regardless of + the message content. + If called with `n=1, key="message"`, this function will log the + same content only once, even if they are called from different places. + If called with `n=1, key=("caller", "message")`, this function + will not log only if the same caller has logged the same message before. + """ + if isinstance(key, str): + key = (key,) + assert len(key) > 0 + + caller_module, caller_key = _find_caller() + hash_key = () + if "caller" in key: + hash_key = hash_key + caller_key + if "message" in key: + hash_key = hash_key + (msg,) + + _LOG_COUNTER[hash_key] += 1 + if _LOG_COUNTER[hash_key] <= n: + logging.getLogger(name or caller_module).log(lvl, msg) + + +def log_every_n(lvl, msg, n=1, *, name=None): + """ + Log once per n times. + + Args: + lvl (int): the logging level + msg (str): + n (int): + name (str): name of the logger to use. Will use the caller's module by default. + """ + caller_module, key = _find_caller() + _LOG_COUNTER[key] += 1 + if n == 1 or _LOG_COUNTER[key] % n == 1: + logging.getLogger(name or caller_module).log(lvl, msg) + + +def log_every_n_seconds(lvl, msg, n=1, *, name=None): + """ + Log no more than once per n seconds. + + Args: + lvl (int): the logging level + msg (str): + n (int): + name (str): name of the logger to use. Will use the caller's module by default. + """ + caller_module, key = _find_caller() + last_logged = _LOG_TIMER.get(key, None) + current_time = time.time() + if last_logged is None or current_time - last_logged >= n: + logging.getLogger(name or caller_module).log(lvl, msg) + _LOG_TIMER[key] = current_time + + +def create_small_table(small_dict): + """ + Create a small table using the keys of small_dict as headers. This is only + suitable for small dictionaries. + + Args: + small_dict (dict): a result dictionary of only a few items. + + Returns: + str: the table as a string. + """ + keys, values = tuple(zip(*small_dict.items())) + table = tabulate( + [values], + headers=keys, + tablefmt="pipe", + floatfmt=".3f", + stralign="center", + numalign="center", + ) + return table + + +def _log_api_usage(identifier: str): + """ + Internal function used to log the usage of different detectron2 components + inside facebook's infra. + """ + torch._C._log_api_usage_once("detectron2." + identifier) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/memory.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/memory.py new file mode 100644 index 0000000..bd49478 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/memory.py @@ -0,0 +1,84 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +import logging +from contextlib import contextmanager +from functools import wraps +import torch + +__all__ = ["retry_if_cuda_oom"] + + +@contextmanager +def _ignore_torch_cuda_oom(): + """ + A context which ignores CUDA OOM exception from pytorch. + """ + try: + yield + except RuntimeError as e: + # NOTE: the string may change? + if "CUDA out of memory. " in str(e): + pass + else: + raise + + +def retry_if_cuda_oom(func): + """ + Makes a function retry itself after encountering + pytorch's CUDA OOM error. + It will first retry after calling `torch.cuda.empty_cache()`. + + If that still fails, it will then retry by trying to convert inputs to CPUs. + In this case, it expects the function to dispatch to CPU implementation. + The return values may become CPU tensors as well and it's user's + responsibility to convert it back to CUDA tensor if needed. + + Args: + func: a stateless callable that takes tensor-like objects as arguments + + Returns: + a callable which retries `func` if OOM is encountered. + + Examples: + :: + output = retry_if_cuda_oom(some_torch_function)(input1, input2) + # output may be on CPU even if inputs are on GPU + + Note: + 1. When converting inputs to CPU, it will only look at each argument and check + if it has `.device` and `.to` for conversion. Nested structures of tensors + are not supported. + + 2. Since the function might be called more than once, it has to be + stateless. + """ + + def maybe_to_cpu(x): + try: + like_gpu_tensor = x.device.type == "cuda" and hasattr(x, "to") + except AttributeError: + like_gpu_tensor = False + if like_gpu_tensor: + return x.to(device="cpu") + else: + return x + + @wraps(func) + def wrapped(*args, **kwargs): + with _ignore_torch_cuda_oom(): + return func(*args, **kwargs) + + # Clear cache and retry + torch.cuda.empty_cache() + with _ignore_torch_cuda_oom(): + return func(*args, **kwargs) + + # Try on CPU. This slows down the code significantly, therefore print a notice. + logger = logging.getLogger(__name__) + logger.info("Attempting to copy inputs of {} to CPU due to CUDA OOM".format(str(func))) + new_args = (maybe_to_cpu(x) for x in args) + new_kwargs = {k: maybe_to_cpu(v) for k, v in kwargs.items()} + return func(*new_args, **new_kwargs) + + return wrapped diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/registry.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/registry.py new file mode 100644 index 0000000..f67d809 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/registry.py @@ -0,0 +1,60 @@ +# Copyright (c) Facebook, Inc. and its affiliates. + +from typing import Any +import pydoc +from fvcore.common.registry import Registry # for backward compatibility. + +""" +``Registry`` and `locate` provide ways to map a string (typically found +in config files) to callable objects. +""" + +__all__ = ["Registry", "locate"] + + +def _convert_target_to_string(t: Any) -> str: + """ + Inverse of ``locate()``. + + Args: + t: any object with ``__module__`` and ``__qualname__`` + """ + module, qualname = t.__module__, t.__qualname__ + + # Compress the path to this object, e.g. ``module.submodule._impl.class`` + # may become ``module.submodule.class``, if the later also resolves to the same + # object. This simplifies the string, and also is less affected by moving the + # class implementation. + module_parts = module.split(".") + for k in range(1, len(module_parts)): + prefix = ".".join(module_parts[:k]) + candidate = f"{prefix}.{qualname}" + try: + if locate(candidate) is t: + return candidate + except ImportError: + pass + return f"{module}.{qualname}" + + +def locate(name: str) -> Any: + """ + Locate and return an object ``x`` using an input string ``{x.__module__}.{x.__qualname__}``, + such as "module.submodule.class_name". + + Raise Exception if it cannot be found. + """ + obj = pydoc.locate(name) + + # Some cases (e.g. torch.optim.sgd.SGD) not handled correctly + # by pydoc.locate. Try a private function from hydra. + if obj is None: + try: + # from hydra.autils import get_method - will print many errors + from hydra.utils import _locate + except ImportError as e: + raise ImportError(f"Cannot dynamically locate object {name}!") from e + else: + obj = _locate(name) # it raises if fails + + return obj diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/serialize.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/serialize.py new file mode 100644 index 0000000..0b38862 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/serialize.py @@ -0,0 +1,32 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import cloudpickle + + +class PicklableWrapper(object): + """ + Wrap an object to make it more picklable, note that it uses + heavy weight serialization libraries that are slower than pickle. + It's best to use it only on closures (which are usually not picklable). + + This is a simplified version of + https://github.com/joblib/joblib/blob/master/joblib/externals/loky/cloudpickle_wrapper.py + """ + + def __init__(self, obj): + while isinstance(obj, PicklableWrapper): + # Wrapping an object twice is no-op + obj = obj._obj + self._obj = obj + + def __reduce__(self): + s = cloudpickle.dumps(self._obj) + return cloudpickle.loads, (s,) + + def __call__(self, *args, **kwargs): + return self._obj(*args, **kwargs) + + def __getattr__(self, attr): + # Ensure that the wrapped object can be used seamlessly as the previous object. + if attr not in ["_obj"]: + return getattr(self._obj, attr) + return getattr(self, attr) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/testing.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/testing.py new file mode 100644 index 0000000..161fa6b --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/testing.py @@ -0,0 +1,137 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import io +import numpy as np +import torch + +from detectron2 import model_zoo +from detectron2.data import DatasetCatalog +from detectron2.data.detection_utils import read_image +from detectron2.modeling import build_model +from detectron2.structures import Boxes, Instances, ROIMasks +from detectron2.utils.file_io import PathManager + + +""" +Internal utilities for tests. Don't use except for writing tests. +""" + + +def get_model_no_weights(config_path): + """ + Like model_zoo.get, but do not load any weights (even pretrained) + """ + cfg = model_zoo.get_config(config_path) + if not torch.cuda.is_available(): + cfg.MODEL.DEVICE = "cpu" + return build_model(cfg) + + +def random_boxes(num_boxes, max_coord=100, device="cpu"): + """ + Create a random Nx4 boxes tensor, with coordinates < max_coord. + """ + boxes = torch.rand(num_boxes, 4, device=device) * (max_coord * 0.5) + boxes.clamp_(min=1.0) # tiny boxes cause numerical instability in box regression + # Note: the implementation of this function in torchvision is: + # boxes[:, 2:] += torch.rand(N, 2) * 100 + # but it does not guarantee non-negative widths/heights constraints: + # boxes[:, 2] >= boxes[:, 0] and boxes[:, 3] >= boxes[:, 1]: + boxes[:, 2:] += boxes[:, :2] + return boxes + + +def get_sample_coco_image(tensor=True): + """ + Args: + tensor (bool): if True, returns 3xHxW tensor. + else, returns a HxWx3 numpy array. + + Returns: + an image, in BGR color. + """ + try: + file_name = DatasetCatalog.get("coco_2017_val_100")[0]["file_name"] + if not PathManager.exists(file_name): + raise FileNotFoundError() + except IOError: + # for public CI to run + file_name = PathManager.get_local_path( + "http://images.cocodataset.org/train2017/000000000009.jpg" + ) + ret = read_image(file_name, format="BGR") + if tensor: + ret = torch.from_numpy(np.ascontiguousarray(ret.transpose(2, 0, 1))) + return ret + + +def convert_scripted_instances(instances): + """ + Convert a scripted Instances object to a regular :class:`Instances` object + """ + assert hasattr( + instances, "image_size" + ), f"Expect an Instances object, but got {type(instances)}!" + ret = Instances(instances.image_size) + for name in instances._field_names: + val = getattr(instances, "_" + name, None) + if val is not None: + ret.set(name, val) + return ret + + +def assert_instances_allclose(input, other, *, rtol=1e-5, msg="", size_as_tensor=False): + """ + Args: + input, other (Instances): + size_as_tensor: compare image_size of the Instances as tensors (instead of tuples). + Useful for comparing outputs of tracing. + """ + if not isinstance(input, Instances): + input = convert_scripted_instances(input) + if not isinstance(other, Instances): + other = convert_scripted_instances(other) + + if not msg: + msg = "Two Instances are different! " + else: + msg = msg.rstrip() + " " + + size_error_msg = msg + f"image_size is {input.image_size} vs. {other.image_size}!" + if size_as_tensor: + assert torch.equal( + torch.tensor(input.image_size), torch.tensor(other.image_size) + ), size_error_msg + else: + assert input.image_size == other.image_size, size_error_msg + fields = sorted(input.get_fields().keys()) + fields_other = sorted(other.get_fields().keys()) + assert fields == fields_other, msg + f"Fields are {fields} vs {fields_other}!" + + for f in fields: + val1, val2 = input.get(f), other.get(f) + if isinstance(val1, (Boxes, ROIMasks)): + # boxes in the range of O(100) and can have a larger tolerance + assert torch.allclose(val1.tensor, val2.tensor, atol=100 * rtol), ( + msg + f"Field {f} differs too much!" + ) + elif isinstance(val1, torch.Tensor): + if val1.dtype.is_floating_point: + mag = torch.abs(val1).max().cpu().item() + assert torch.allclose(val1, val2, atol=mag * rtol), ( + msg + f"Field {f} differs too much!" + ) + else: + assert torch.equal(val1, val2), msg + f"Field {f} is different!" + else: + raise ValueError(f"Don't know how to compare type {type(val1)}") + + +def reload_script_model(module): + """ + Save a jit module and load it back. + Similar to the `getExportImportCopy` function in torch/testing/ + """ + buffer = io.BytesIO() + torch.jit.save(module, buffer) + buffer.seek(0) + return torch.jit.load(buffer) diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/video_visualizer.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/video_visualizer.py new file mode 100644 index 0000000..5ec4de1 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/video_visualizer.py @@ -0,0 +1,282 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import numpy as np +import pycocotools.mask as mask_util + +from detectron2.utils.visualizer import ( + ColorMode, + Visualizer, + _create_text_labels, + _PanopticPrediction, +) + +from .colormap import random_color, random_colors +from detectron2.structures import Instances +from typing import List + + +class _DetectedInstance: + """ + Used to store data about detected objects in video frame, + in order to transfer color to objects in the future frames. + + Attributes: + label (int): + bbox (tuple[float]): + mask_rle (dict): + color (tuple[float]): RGB colors in range (0, 1) + ttl (int): time-to-live for the instance. For example, if ttl=2, + the instance color can be transferred to objects in the next two frames. + """ + + __slots__ = ["label", "bbox", "mask_rle", "color", "ttl"] + + def __init__(self, label, bbox, mask_rle, color, ttl): + self.label = label + self.bbox = bbox + self.mask_rle = mask_rle + self.color = color + self.ttl = ttl + + +class VideoVisualizer: + def __init__(self, metadata, instance_mode=ColorMode.IMAGE): + """ + Args: + metadata (MetadataCatalog): image metadata. + """ + self.metadata = metadata + self._old_instances = [] + assert instance_mode in [ + ColorMode.IMAGE, + ColorMode.IMAGE_BW, + ], "Other mode not supported yet." + self._instance_mode = instance_mode + self._max_num_instances = self.metadata.get("max_num_instances", 74) + self._assigned_colors = {} + self._color_pool = random_colors(self._max_num_instances, rgb=True, maximum=1) + self._color_idx_set = set(range(len(self._color_pool))) + + def draw_instance_predictions(self, frame, predictions): + """ + Draw instance-level prediction results on an image. + + Args: + frame (ndarray): an RGB image of shape (H, W, C), in the range [0, 255]. + predictions (Instances): the output of an instance detection/segmentation + model. Following fields will be used to draw: + "pred_boxes", "pred_classes", "scores", "pred_masks" (or "pred_masks_rle"). + + Returns: + output (VisImage): image object with visualizations. + """ + frame_visualizer = Visualizer(frame, self.metadata) + num_instances = len(predictions) + if num_instances == 0: + return frame_visualizer.output + + boxes = predictions.pred_boxes.tensor.numpy() if predictions.has("pred_boxes") else None + scores = predictions.scores if predictions.has("scores") else None + classes = predictions.pred_classes.numpy() if predictions.has("pred_classes") else None + keypoints = predictions.pred_keypoints if predictions.has("pred_keypoints") else None + colors = predictions.COLOR if predictions.has("COLOR") else [None] * len(predictions) + periods = predictions.ID_period if predictions.has("ID_period") else None + period_threshold = self.metadata.get("period_threshold", 0) + visibilities = [True] * len(predictions) if periods is None else [ + x > period_threshold for x in periods] + + if predictions.has("pred_masks"): + masks = predictions.pred_masks + # mask IOU is not yet enabled + # masks_rles = mask_util.encode(np.asarray(masks.permute(1, 2, 0), order="F")) + # assert len(masks_rles) == num_instances + else: + masks = None + + if not predictions.has("COLOR"): + if predictions.has("ID"): + colors = self._assign_colors_by_id(predictions) + else: + # ToDo: clean old assign color method and use a default tracker to assign id + detected = [ + _DetectedInstance(classes[i], boxes[i], mask_rle=None, color=colors[i], ttl=8) + for i in range(num_instances) + ] + colors = self._assign_colors(detected) + + labels = _create_text_labels(classes, scores, self.metadata.get("thing_classes", None)) + + if self._instance_mode == ColorMode.IMAGE_BW: + # any() returns uint8 tensor + frame_visualizer.output.reset_image( + frame_visualizer._create_grayscale_image( + (masks.any(dim=0) > 0).numpy() if masks is not None else None + ) + ) + alpha = 0.3 + else: + alpha = 0.5 + + labels = ( + None + if labels is None + else [y[0] for y in filter(lambda x: x[1], zip(labels, visibilities))] + ) # noqa + assigned_colors = ( + None + if colors is None + else [y[0] for y in filter(lambda x: x[1], zip(colors, visibilities))] + ) # noqa + frame_visualizer.overlay_instances( + boxes=None if masks is not None else boxes[visibilities], # boxes are a bit distracting + masks=None if masks is None else masks[visibilities], + labels=labels, + keypoints=None if keypoints is None else keypoints[visibilities], + assigned_colors=assigned_colors, + alpha=alpha, + ) + + return frame_visualizer.output + + def draw_sem_seg(self, frame, sem_seg, area_threshold=None): + """ + Args: + sem_seg (ndarray or Tensor): semantic segmentation of shape (H, W), + each value is the integer label. + area_threshold (Optional[int]): only draw segmentations larger than the threshold + """ + # don't need to do anything special + frame_visualizer = Visualizer(frame, self.metadata) + frame_visualizer.draw_sem_seg(sem_seg, area_threshold=None) + return frame_visualizer.output + + def draw_panoptic_seg_predictions( + self, frame, panoptic_seg, segments_info, area_threshold=None, alpha=0.5 + ): + frame_visualizer = Visualizer(frame, self.metadata) + pred = _PanopticPrediction(panoptic_seg, segments_info, self.metadata) + + if self._instance_mode == ColorMode.IMAGE_BW: + frame_visualizer.output.reset_image( + frame_visualizer._create_grayscale_image(pred.non_empty_mask()) + ) + + # draw mask for all semantic segments first i.e. "stuff" + for mask, sinfo in pred.semantic_masks(): + category_idx = sinfo["category_id"] + try: + mask_color = [x / 255 for x in self.metadata.stuff_colors[category_idx]] + except AttributeError: + mask_color = None + + frame_visualizer.draw_binary_mask( + mask, + color=mask_color, + text=self.metadata.stuff_classes[category_idx], + alpha=alpha, + area_threshold=area_threshold, + ) + + all_instances = list(pred.instance_masks()) + if len(all_instances) == 0: + return frame_visualizer.output + # draw mask for all instances second + masks, sinfo = list(zip(*all_instances)) + num_instances = len(masks) + masks_rles = mask_util.encode( + np.asarray(np.asarray(masks).transpose(1, 2, 0), dtype=np.uint8, order="F") + ) + assert len(masks_rles) == num_instances + + category_ids = [x["category_id"] for x in sinfo] + detected = [ + _DetectedInstance(category_ids[i], bbox=None, mask_rle=masks_rles[i], color=None, ttl=8) + for i in range(num_instances) + ] + colors = self._assign_colors(detected) + labels = [self.metadata.thing_classes[k] for k in category_ids] + + frame_visualizer.overlay_instances( + boxes=None, + masks=masks, + labels=labels, + keypoints=None, + assigned_colors=colors, + alpha=alpha, + ) + return frame_visualizer.output + + def _assign_colors(self, instances): + """ + Naive tracking heuristics to assign same color to the same instance, + will update the internal state of tracked instances. + + Returns: + list[tuple[float]]: list of colors. + """ + + # Compute iou with either boxes or masks: + is_crowd = np.zeros((len(instances),), dtype=np.bool) + if instances[0].bbox is None: + assert instances[0].mask_rle is not None + # use mask iou only when box iou is None + # because box seems good enough + rles_old = [x.mask_rle for x in self._old_instances] + rles_new = [x.mask_rle for x in instances] + ious = mask_util.iou(rles_old, rles_new, is_crowd) + threshold = 0.5 + else: + boxes_old = [x.bbox for x in self._old_instances] + boxes_new = [x.bbox for x in instances] + ious = mask_util.iou(boxes_old, boxes_new, is_crowd) + threshold = 0.6 + if len(ious) == 0: + ious = np.zeros((len(self._old_instances), len(instances)), dtype="float32") + + # Only allow matching instances of the same label: + for old_idx, old in enumerate(self._old_instances): + for new_idx, new in enumerate(instances): + if old.label != new.label: + ious[old_idx, new_idx] = 0 + + matched_new_per_old = np.asarray(ious).argmax(axis=1) + max_iou_per_old = np.asarray(ious).max(axis=1) + + # Try to find match for each old instance: + extra_instances = [] + for idx, inst in enumerate(self._old_instances): + if max_iou_per_old[idx] > threshold: + newidx = matched_new_per_old[idx] + if instances[newidx].color is None: + instances[newidx].color = inst.color + continue + # If an old instance does not match any new instances, + # keep it for the next frame in case it is just missed by the detector + inst.ttl -= 1 + if inst.ttl > 0: + extra_instances.append(inst) + + # Assign random color to newly-detected instances: + for inst in instances: + if inst.color is None: + inst.color = random_color(rgb=True, maximum=1) + self._old_instances = instances[:] + extra_instances + return [d.color for d in instances] + + def _assign_colors_by_id(self, instances: Instances) -> List: + colors = [] + untracked_ids = set(self._assigned_colors.keys()) + for id in instances.ID: + if id in self._assigned_colors: + colors.append(self._color_pool[self._assigned_colors[id]]) + untracked_ids.remove(id) + else: + assert len(self._color_idx_set) >= 1, f"Number of id exceeded maximum, \ + max = {self._max_num_instances}" + idx = self._color_idx_set.pop() + color = self._color_pool[idx] + self._assigned_colors[id] = idx + colors.append(color) + for id in untracked_ids: + self._color_idx_set.add(self._assigned_colors[id]) + del self._assigned_colors[id] + return colors diff --git a/motion-gan-pipeline/preprocessing/third/detectron2/utils/visualizer.py b/motion-gan-pipeline/preprocessing/third/detectron2/utils/visualizer.py new file mode 100644 index 0000000..8e14518 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/detectron2/utils/visualizer.py @@ -0,0 +1,1267 @@ +# Copyright (c) Facebook, Inc. and its affiliates. +import colorsys +import logging +import math +import numpy as np +from enum import Enum, unique +import cv2 +import matplotlib as mpl +import matplotlib.colors as mplc +import matplotlib.figure as mplfigure +import pycocotools.mask as mask_util +import torch +from matplotlib.backends.backend_agg import FigureCanvasAgg +from PIL import Image + +from detectron2.data import MetadataCatalog +from detectron2.structures import BitMasks, Boxes, BoxMode, Keypoints, PolygonMasks, RotatedBoxes +from detectron2.utils.file_io import PathManager + +from .colormap import random_color + +logger = logging.getLogger(__name__) + +__all__ = ["ColorMode", "VisImage", "Visualizer"] + + +_SMALL_OBJECT_AREA_THRESH = 1000 +_LARGE_MASK_AREA_THRESH = 120000 +_OFF_WHITE = (1.0, 1.0, 240.0 / 255) +_BLACK = (0, 0, 0) +_RED = (1.0, 0, 0) + +_KEYPOINT_THRESHOLD = 0.05 + + +@unique +class ColorMode(Enum): + """ + Enum of different color modes to use for instance visualizations. + """ + + IMAGE = 0 + """ + Picks a random color for every instance and overlay segmentations with low opacity. + """ + SEGMENTATION = 1 + """ + Let instances of the same category have similar colors + (from metadata.thing_colors), and overlay them with + high opacity. This provides more attention on the quality of segmentation. + """ + IMAGE_BW = 2 + """ + Same as IMAGE, but convert all areas without masks to gray-scale. + Only available for drawing per-instance mask predictions. + """ + + +class GenericMask: + """ + Attribute: + polygons (list[ndarray]): list[ndarray]: polygons for this mask. + Each ndarray has format [x, y, x, y, ...] + mask (ndarray): a binary mask + """ + + def __init__(self, mask_or_polygons, height, width): + self._mask = self._polygons = self._has_holes = None + self.height = height + self.width = width + + m = mask_or_polygons + if isinstance(m, dict): + # RLEs + assert "counts" in m and "size" in m + if isinstance(m["counts"], list): # uncompressed RLEs + h, w = m["size"] + assert h == height and w == width + m = mask_util.frPyObjects(m, h, w) + self._mask = mask_util.decode(m)[:, :] + return + + if isinstance(m, list): # list[ndarray] + self._polygons = [np.asarray(x).reshape(-1) for x in m] + return + + if isinstance(m, np.ndarray): # assumed to be a binary mask + assert m.shape[1] != 2, m.shape + assert m.shape == ( + height, + width, + ), f"mask shape: {m.shape}, target dims: {height}, {width}" + self._mask = m.astype("uint8") + return + + raise ValueError("GenericMask cannot handle object {} of type '{}'".format(m, type(m))) + + @property + def mask(self): + if self._mask is None: + self._mask = self.polygons_to_mask(self._polygons) + return self._mask + + @property + def polygons(self): + if self._polygons is None: + self._polygons, self._has_holes = self.mask_to_polygons(self._mask) + return self._polygons + + @property + def has_holes(self): + if self._has_holes is None: + if self._mask is not None: + self._polygons, self._has_holes = self.mask_to_polygons(self._mask) + else: + self._has_holes = False # if original format is polygon, does not have holes + return self._has_holes + + def mask_to_polygons(self, mask): + # cv2.RETR_CCOMP flag retrieves all the contours and arranges them to a 2-level + # hierarchy. External contours (boundary) of the object are placed in hierarchy-1. + # Internal contours (holes) are placed in hierarchy-2. + # cv2.CHAIN_APPROX_NONE flag gets vertices of polygons from contours. + mask = np.ascontiguousarray(mask) # some versions of cv2 does not support incontiguous arr + res = cv2.findContours(mask.astype("uint8"), cv2.RETR_CCOMP, cv2.CHAIN_APPROX_NONE) + hierarchy = res[-1] + if hierarchy is None: # empty mask + return [], False + has_holes = (hierarchy.reshape(-1, 4)[:, 3] >= 0).sum() > 0 + res = res[-2] + res = [x.flatten() for x in res] + # These coordinates from OpenCV are integers in range [0, W-1 or H-1]. + # We add 0.5 to turn them into real-value coordinate space. A better solution + # would be to first +0.5 and then dilate the returned polygon by 0.5. + res = [x + 0.5 for x in res if len(x) >= 6] + return res, has_holes + + def polygons_to_mask(self, polygons): + rle = mask_util.frPyObjects(polygons, self.height, self.width) + rle = mask_util.merge(rle) + return mask_util.decode(rle)[:, :] + + def area(self): + return self.mask.sum() + + def bbox(self): + p = mask_util.frPyObjects(self.polygons, self.height, self.width) + p = mask_util.merge(p) + bbox = mask_util.toBbox(p) + bbox[2] += bbox[0] + bbox[3] += bbox[1] + return bbox + + +class _PanopticPrediction: + """ + Unify different panoptic annotation/prediction formats + """ + + def __init__(self, panoptic_seg, segments_info, metadata=None): + if segments_info is None: + assert metadata is not None + # If "segments_info" is None, we assume "panoptic_img" is a + # H*W int32 image storing the panoptic_id in the format of + # category_id * label_divisor + instance_id. We reserve -1 for + # VOID label. + label_divisor = metadata.label_divisor + segments_info = [] + for panoptic_label in np.unique(panoptic_seg.numpy()): + if panoptic_label == -1: + # VOID region. + continue + pred_class = panoptic_label // label_divisor + isthing = pred_class in metadata.thing_dataset_id_to_contiguous_id.values() + segments_info.append( + { + "id": int(panoptic_label), + "category_id": int(pred_class), + "isthing": bool(isthing), + } + ) + del metadata + + self._seg = panoptic_seg + + self._sinfo = {s["id"]: s for s in segments_info} # seg id -> seg info + segment_ids, areas = torch.unique(panoptic_seg, sorted=True, return_counts=True) + areas = areas.numpy() + sorted_idxs = np.argsort(-areas) + self._seg_ids, self._seg_areas = segment_ids[sorted_idxs], areas[sorted_idxs] + self._seg_ids = self._seg_ids.tolist() + for sid, area in zip(self._seg_ids, self._seg_areas): + if sid in self._sinfo: + self._sinfo[sid]["area"] = float(area) + + def non_empty_mask(self): + """ + Returns: + (H, W) array, a mask for all pixels that have a prediction + """ + empty_ids = [] + for id in self._seg_ids: + if id not in self._sinfo: + empty_ids.append(id) + if len(empty_ids) == 0: + return np.zeros(self._seg.shape, dtype=np.uint8) + assert ( + len(empty_ids) == 1 + ), ">1 ids corresponds to no labels. This is currently not supported" + return (self._seg != empty_ids[0]).numpy().astype(np.bool) + + def semantic_masks(self): + for sid in self._seg_ids: + sinfo = self._sinfo.get(sid) + if sinfo is None or sinfo["isthing"]: + # Some pixels (e.g. id 0 in PanopticFPN) have no instance or semantic predictions. + continue + yield (self._seg == sid).numpy().astype(np.bool), sinfo + + def instance_masks(self): + for sid in self._seg_ids: + sinfo = self._sinfo.get(sid) + if sinfo is None or not sinfo["isthing"]: + continue + mask = (self._seg == sid).numpy().astype(np.bool) + if mask.sum() > 0: + yield mask, sinfo + + +def _create_text_labels(classes, scores, class_names, is_crowd=None): + """ + Args: + classes (list[int] or None): + scores (list[float] or None): + class_names (list[str] or None): + is_crowd (list[bool] or None): + + Returns: + list[str] or None + """ + labels = None + if classes is not None: + if class_names is not None and len(class_names) > 0: + labels = [class_names[i] for i in classes] + else: + labels = [str(i) for i in classes] + if scores is not None: + if labels is None: + labels = ["{:.0f}%".format(s * 100) for s in scores] + else: + labels = ["{} {:.0f}%".format(l, s * 100) for l, s in zip(labels, scores)] + if labels is not None and is_crowd is not None: + labels = [l + ("|crowd" if crowd else "") for l, crowd in zip(labels, is_crowd)] + return labels + + +class VisImage: + def __init__(self, img, scale=1.0): + """ + Args: + img (ndarray): an RGB image of shape (H, W, 3) in range [0, 255]. + scale (float): scale the input image + """ + self.img = img + self.scale = scale + self.width, self.height = img.shape[1], img.shape[0] + self._setup_figure(img) + + def _setup_figure(self, img): + """ + Args: + Same as in :meth:`__init__()`. + + Returns: + fig (matplotlib.pyplot.figure): top level container for all the image plot elements. + ax (matplotlib.pyplot.Axes): contains figure elements and sets the coordinate system. + """ + fig = mplfigure.Figure(frameon=False) + self.dpi = fig.get_dpi() + # add a small 1e-2 to avoid precision lost due to matplotlib's truncation + # (https://github.com/matplotlib/matplotlib/issues/15363) + fig.set_size_inches( + (self.width * self.scale + 1e-2) / self.dpi, + (self.height * self.scale + 1e-2) / self.dpi, + ) + self.canvas = FigureCanvasAgg(fig) + # self.canvas = mpl.backends.backend_cairo.FigureCanvasCairo(fig) + ax = fig.add_axes([0.0, 0.0, 1.0, 1.0]) + ax.axis("off") + self.fig = fig + self.ax = ax + self.reset_image(img) + + def reset_image(self, img): + """ + Args: + img: same as in __init__ + """ + img = img.astype("uint8") + self.ax.imshow(img, extent=(0, self.width, self.height, 0), interpolation="nearest") + + def save(self, filepath): + """ + Args: + filepath (str): a string that contains the absolute path, including the file name, where + the visualized image will be saved. + """ + self.fig.savefig(filepath) + + def get_image(self): + """ + Returns: + ndarray: + the visualized image of shape (H, W, 3) (RGB) in uint8 type. + The shape is scaled w.r.t the input image using the given `scale` argument. + """ + canvas = self.canvas + s, (width, height) = canvas.print_to_buffer() + # buf = io.BytesIO() # works for cairo backend + # canvas.print_rgba(buf) + # width, height = self.width, self.height + # s = buf.getvalue() + + buffer = np.frombuffer(s, dtype="uint8") + + img_rgba = buffer.reshape(height, width, 4) + rgb, alpha = np.split(img_rgba, [3], axis=2) + return rgb.astype("uint8") + + +class Visualizer: + """ + Visualizer that draws data about detection/segmentation on images. + + It contains methods like `draw_{text,box,circle,line,binary_mask,polygon}` + that draw primitive objects to images, as well as high-level wrappers like + `draw_{instance_predictions,sem_seg,panoptic_seg_predictions,dataset_dict}` + that draw composite data in some pre-defined style. + + Note that the exact visualization style for the high-level wrappers are subject to change. + Style such as color, opacity, label contents, visibility of labels, or even the visibility + of objects themselves (e.g. when the object is too small) may change according + to different heuristics, as long as the results still look visually reasonable. + + To obtain a consistent style, you can implement custom drawing functions with the + abovementioned primitive methods instead. If you need more customized visualization + styles, you can process the data yourself following their format documented in + tutorials (:doc:`/tutorials/models`, :doc:`/tutorials/datasets`). This class does not + intend to satisfy everyone's preference on drawing styles. + + This visualizer focuses on high rendering quality rather than performance. It is not + designed to be used for real-time applications. + """ + + # TODO implement a fast, rasterized version using OpenCV + + def __init__(self, img_rgb, metadata=None, scale=1.0, instance_mode=ColorMode.IMAGE): + """ + Args: + img_rgb: a numpy array of shape (H, W, C), where H and W correspond to + the height and width of the image respectively. C is the number of + color channels. The image is required to be in RGB format since that + is a requirement of the Matplotlib library. The image is also expected + to be in the range [0, 255]. + metadata (Metadata): dataset metadata (e.g. class names and colors) + instance_mode (ColorMode): defines one of the pre-defined style for drawing + instances on an image. + """ + self.img = np.asarray(img_rgb).clip(0, 255).astype(np.uint8) + if metadata is None: + metadata = MetadataCatalog.get("__nonexist__") + self.metadata = metadata + self.output = VisImage(self.img, scale=scale) + self.cpu_device = torch.device("cpu") + + # too small texts are useless, therefore clamp to 9 + self._default_font_size = max( + np.sqrt(self.output.height * self.output.width) // 90, 10 // scale + ) + self._instance_mode = instance_mode + self.keypoint_threshold = _KEYPOINT_THRESHOLD + + def draw_instance_predictions(self, predictions): + """ + Draw instance-level prediction results on an image. + + Args: + predictions (Instances): the output of an instance detection/segmentation + model. Following fields will be used to draw: + "pred_boxes", "pred_classes", "scores", "pred_masks" (or "pred_masks_rle"). + + Returns: + output (VisImage): image object with visualizations. + """ + boxes = predictions.pred_boxes if predictions.has("pred_boxes") else None + scores = predictions.scores if predictions.has("scores") else None + classes = predictions.pred_classes.tolist() if predictions.has("pred_classes") else None + labels = _create_text_labels(classes, scores, self.metadata.get("thing_classes", None)) + keypoints = predictions.pred_keypoints if predictions.has("pred_keypoints") else None + + if predictions.has("pred_masks"): + masks = np.asarray(predictions.pred_masks) + masks = [GenericMask(x, self.output.height, self.output.width) for x in masks] + else: + masks = None + + if self._instance_mode == ColorMode.SEGMENTATION and self.metadata.get("thing_colors"): + colors = [ + self._jitter([x / 255 for x in self.metadata.thing_colors[c]]) for c in classes + ] + alpha = 0.8 + else: + colors = None + alpha = 0.5 + + if self._instance_mode == ColorMode.IMAGE_BW: + self.output.reset_image( + self._create_grayscale_image( + (predictions.pred_masks.any(dim=0) > 0).numpy() + if predictions.has("pred_masks") + else None + ) + ) + alpha = 0.3 + + self.overlay_instances( + masks=masks, + boxes=boxes, + labels=labels, + keypoints=keypoints, + assigned_colors=colors, + alpha=alpha, + ) + return self.output + + def draw_sem_seg(self, sem_seg, area_threshold=None, alpha=0.8): + """ + Draw semantic segmentation predictions/labels. + + Args: + sem_seg (Tensor or ndarray): the segmentation of shape (H, W). + Each value is the integer label of the pixel. + area_threshold (int): segments with less than `area_threshold` are not drawn. + alpha (float): the larger it is, the more opaque the segmentations are. + + Returns: + output (VisImage): image object with visualizations. + """ + if isinstance(sem_seg, torch.Tensor): + sem_seg = sem_seg.numpy() + labels, areas = np.unique(sem_seg, return_counts=True) + sorted_idxs = np.argsort(-areas).tolist() + labels = labels[sorted_idxs] + for label in filter(lambda l: l < len(self.metadata.stuff_classes), labels): + try: + mask_color = [x / 255 for x in self.metadata.stuff_colors[label]] + except (AttributeError, IndexError): + mask_color = None + + binary_mask = (sem_seg == label).astype(np.uint8) + text = self.metadata.stuff_classes[label] + self.draw_binary_mask( + binary_mask, + color=mask_color, + edge_color=_OFF_WHITE, + text=text, + alpha=alpha, + area_threshold=area_threshold, + ) + return self.output + + def draw_panoptic_seg(self, panoptic_seg, segments_info, area_threshold=None, alpha=0.7): + """ + Draw panoptic prediction annotations or results. + + Args: + panoptic_seg (Tensor): of shape (height, width) where the values are ids for each + segment. + segments_info (list[dict] or None): Describe each segment in `panoptic_seg`. + If it is a ``list[dict]``, each dict contains keys "id", "category_id". + If None, category id of each pixel is computed by + ``pixel // metadata.label_divisor``. + area_threshold (int): stuff segments with less than `area_threshold` are not drawn. + + Returns: + output (VisImage): image object with visualizations. + """ + pred = _PanopticPrediction(panoptic_seg, segments_info, self.metadata) + + if self._instance_mode == ColorMode.IMAGE_BW: + self.output.reset_image(self._create_grayscale_image(pred.non_empty_mask())) + + # draw mask for all semantic segments first i.e. "stuff" + for mask, sinfo in pred.semantic_masks(): + category_idx = sinfo["category_id"] + try: + mask_color = [x / 255 for x in self.metadata.stuff_colors[category_idx]] + except AttributeError: + mask_color = None + + text = self.metadata.stuff_classes[category_idx] + self.draw_binary_mask( + mask, + color=mask_color, + edge_color=_OFF_WHITE, + text=text, + alpha=alpha, + area_threshold=area_threshold, + ) + + # draw mask for all instances second + all_instances = list(pred.instance_masks()) + if len(all_instances) == 0: + return self.output + masks, sinfo = list(zip(*all_instances)) + category_ids = [x["category_id"] for x in sinfo] + + try: + scores = [x["score"] for x in sinfo] + except KeyError: + scores = None + labels = _create_text_labels( + category_ids, scores, self.metadata.thing_classes, [x.get("iscrowd", 0) for x in sinfo] + ) + + try: + colors = [ + self._jitter([x / 255 for x in self.metadata.thing_colors[c]]) for c in category_ids + ] + except AttributeError: + colors = None + self.overlay_instances(masks=masks, labels=labels, assigned_colors=colors, alpha=alpha) + + return self.output + + draw_panoptic_seg_predictions = draw_panoptic_seg # backward compatibility + + def draw_dataset_dict(self, dic): + """ + Draw annotations/segmentaions in Detectron2 Dataset format. + + Args: + dic (dict): annotation/segmentation data of one image, in Detectron2 Dataset format. + + Returns: + output (VisImage): image object with visualizations. + """ + annos = dic.get("annotations", None) + if annos: + if "segmentation" in annos[0]: + masks = [x["segmentation"] for x in annos] + else: + masks = None + if "keypoints" in annos[0]: + keypts = [x["keypoints"] for x in annos] + keypts = np.array(keypts).reshape(len(annos), -1, 3) + else: + keypts = None + + boxes = [ + BoxMode.convert(x["bbox"], x["bbox_mode"], BoxMode.XYXY_ABS) + if len(x["bbox"]) == 4 + else x["bbox"] + for x in annos + ] + + colors = None + category_ids = [x["category_id"] for x in annos] + if self._instance_mode == ColorMode.SEGMENTATION and self.metadata.get("thing_colors"): + colors = [ + self._jitter([x / 255 for x in self.metadata.thing_colors[c]]) + for c in category_ids + ] + names = self.metadata.get("thing_classes", None) + labels = _create_text_labels( + category_ids, + scores=None, + class_names=names, + is_crowd=[x.get("iscrowd", 0) for x in annos], + ) + self.overlay_instances( + labels=labels, boxes=boxes, masks=masks, keypoints=keypts, assigned_colors=colors + ) + + sem_seg = dic.get("sem_seg", None) + if sem_seg is None and "sem_seg_file_name" in dic: + with PathManager.open(dic["sem_seg_file_name"], "rb") as f: + sem_seg = Image.open(f) + sem_seg = np.asarray(sem_seg, dtype="uint8") + if sem_seg is not None: + self.draw_sem_seg(sem_seg, area_threshold=0, alpha=0.5) + + pan_seg = dic.get("pan_seg", None) + if pan_seg is None and "pan_seg_file_name" in dic: + with PathManager.open(dic["pan_seg_file_name"], "rb") as f: + pan_seg = Image.open(f) + pan_seg = np.asarray(pan_seg) + from panopticapi.utils import rgb2id + + pan_seg = rgb2id(pan_seg) + if pan_seg is not None: + segments_info = dic["segments_info"] + pan_seg = torch.tensor(pan_seg) + self.draw_panoptic_seg(pan_seg, segments_info, area_threshold=0, alpha=0.5) + return self.output + + def overlay_instances( + self, + *, + boxes=None, + labels=None, + masks=None, + keypoints=None, + assigned_colors=None, + alpha=0.5, + ): + """ + Args: + boxes (Boxes, RotatedBoxes or ndarray): either a :class:`Boxes`, + or an Nx4 numpy array of XYXY_ABS format for the N objects in a single image, + or a :class:`RotatedBoxes`, + or an Nx5 numpy array of (x_center, y_center, width, height, angle_degrees) format + for the N objects in a single image, + labels (list[str]): the text to be displayed for each instance. + masks (masks-like object): Supported types are: + + * :class:`detectron2.structures.PolygonMasks`, + :class:`detectron2.structures.BitMasks`. + * list[list[ndarray]]: contains the segmentation masks for all objects in one image. + The first level of the list corresponds to individual instances. The second + level to all the polygon that compose the instance, and the third level + to the polygon coordinates. The third level should have the format of + [x0, y0, x1, y1, ..., xn, yn] (n >= 3). + * list[ndarray]: each ndarray is a binary mask of shape (H, W). + * list[dict]: each dict is a COCO-style RLE. + keypoints (Keypoint or array like): an array-like object of shape (N, K, 3), + where the N is the number of instances and K is the number of keypoints. + The last dimension corresponds to (x, y, visibility or score). + assigned_colors (list[matplotlib.colors]): a list of colors, where each color + corresponds to each mask or box in the image. Refer to 'matplotlib.colors' + for full list of formats that the colors are accepted in. + Returns: + output (VisImage): image object with visualizations. + """ + num_instances = 0 + if boxes is not None: + boxes = self._convert_boxes(boxes) + num_instances = len(boxes) + if masks is not None: + masks = self._convert_masks(masks) + if num_instances: + assert len(masks) == num_instances + else: + num_instances = len(masks) + if keypoints is not None: + if num_instances: + assert len(keypoints) == num_instances + else: + num_instances = len(keypoints) + keypoints = self._convert_keypoints(keypoints) + if labels is not None: + assert len(labels) == num_instances + if assigned_colors is None: + assigned_colors = [random_color(rgb=True, maximum=1) for _ in range(num_instances)] + if num_instances == 0: + return self.output + if boxes is not None and boxes.shape[1] == 5: + return self.overlay_rotated_instances( + boxes=boxes, labels=labels, assigned_colors=assigned_colors + ) + + # Display in largest to smallest order to reduce occlusion. + areas = None + if boxes is not None: + areas = np.prod(boxes[:, 2:] - boxes[:, :2], axis=1) + elif masks is not None: + areas = np.asarray([x.area() for x in masks]) + + if areas is not None: + sorted_idxs = np.argsort(-areas).tolist() + # Re-order overlapped instances in descending order. + boxes = boxes[sorted_idxs] if boxes is not None else None + labels = [labels[k] for k in sorted_idxs] if labels is not None else None + masks = [masks[idx] for idx in sorted_idxs] if masks is not None else None + assigned_colors = [assigned_colors[idx] for idx in sorted_idxs] + keypoints = keypoints[sorted_idxs] if keypoints is not None else None + + for i in range(num_instances): + color = assigned_colors[i] + if boxes is not None: + self.draw_box(boxes[i], edge_color=color) + + if masks is not None: + for segment in masks[i].polygons: + self.draw_polygon(segment.reshape(-1, 2), color, alpha=alpha) + + if labels is not None: + # first get a box + if boxes is not None: + x0, y0, x1, y1 = boxes[i] + text_pos = (x0, y0) # if drawing boxes, put text on the box corner. + horiz_align = "left" + elif masks is not None: + # skip small mask without polygon + if len(masks[i].polygons) == 0: + continue + + x0, y0, x1, y1 = masks[i].bbox() + + # draw text in the center (defined by median) when box is not drawn + # median is less sensitive to outliers. + text_pos = np.median(masks[i].mask.nonzero(), axis=1)[::-1] + horiz_align = "center" + else: + continue # drawing the box confidence for keypoints isn't very useful. + # for small objects, draw text at the side to avoid occlusion + instance_area = (y1 - y0) * (x1 - x0) + if ( + instance_area < _SMALL_OBJECT_AREA_THRESH * self.output.scale + or y1 - y0 < 40 * self.output.scale + ): + if y1 >= self.output.height - 5: + text_pos = (x1, y0) + else: + text_pos = (x0, y1) + + height_ratio = (y1 - y0) / np.sqrt(self.output.height * self.output.width) + lighter_color = self._change_color_brightness(color, brightness_factor=0.7) + font_size = ( + np.clip((height_ratio - 0.02) / 0.08 + 1, 1.2, 2) + * 0.5 + * self._default_font_size + ) + self.draw_text( + labels[i], + text_pos, + color=lighter_color, + horizontal_alignment=horiz_align, + font_size=font_size, + ) + + # draw keypoints + if keypoints is not None: + for keypoints_per_instance in keypoints: + self.draw_and_connect_keypoints(keypoints_per_instance) + + return self.output + + def overlay_rotated_instances(self, boxes=None, labels=None, assigned_colors=None): + """ + Args: + boxes (ndarray): an Nx5 numpy array of + (x_center, y_center, width, height, angle_degrees) format + for the N objects in a single image. + labels (list[str]): the text to be displayed for each instance. + assigned_colors (list[matplotlib.colors]): a list of colors, where each color + corresponds to each mask or box in the image. Refer to 'matplotlib.colors' + for full list of formats that the colors are accepted in. + + Returns: + output (VisImage): image object with visualizations. + """ + num_instances = len(boxes) + + if assigned_colors is None: + assigned_colors = [random_color(rgb=True, maximum=1) for _ in range(num_instances)] + if num_instances == 0: + return self.output + + # Display in largest to smallest order to reduce occlusion. + if boxes is not None: + areas = boxes[:, 2] * boxes[:, 3] + + sorted_idxs = np.argsort(-areas).tolist() + # Re-order overlapped instances in descending order. + boxes = boxes[sorted_idxs] + labels = [labels[k] for k in sorted_idxs] if labels is not None else None + colors = [assigned_colors[idx] for idx in sorted_idxs] + + for i in range(num_instances): + self.draw_rotated_box_with_label( + boxes[i], edge_color=colors[i], label=labels[i] if labels is not None else None + ) + + return self.output + + def draw_and_connect_keypoints(self, keypoints): + """ + Draws keypoints of an instance and follows the rules for keypoint connections + to draw lines between appropriate keypoints. This follows color heuristics for + line color. + + Args: + keypoints (Tensor): a tensor of shape (K, 3), where K is the number of keypoints + and the last dimension corresponds to (x, y, probability). + + Returns: + output (VisImage): image object with visualizations. + """ + visible = {} + keypoint_names = self.metadata.get("keypoint_names") + for idx, keypoint in enumerate(keypoints): + + # draw keypoint + x, y, prob = keypoint + if prob > self.keypoint_threshold: + self.draw_circle((x, y), color=_RED) + if keypoint_names: + keypoint_name = keypoint_names[idx] + visible[keypoint_name] = (x, y) + + if self.metadata.get("keypoint_connection_rules"): + for kp0, kp1, color in self.metadata.keypoint_connection_rules: + if kp0 in visible and kp1 in visible: + x0, y0 = visible[kp0] + x1, y1 = visible[kp1] + color = tuple(x / 255.0 for x in color) + self.draw_line([x0, x1], [y0, y1], color=color) + + # draw lines from nose to mid-shoulder and mid-shoulder to mid-hip + # Note that this strategy is specific to person keypoints. + # For other keypoints, it should just do nothing + try: + ls_x, ls_y = visible["left_shoulder"] + rs_x, rs_y = visible["right_shoulder"] + mid_shoulder_x, mid_shoulder_y = (ls_x + rs_x) / 2, (ls_y + rs_y) / 2 + except KeyError: + pass + else: + # draw line from nose to mid-shoulder + nose_x, nose_y = visible.get("nose", (None, None)) + if nose_x is not None: + self.draw_line([nose_x, mid_shoulder_x], [nose_y, mid_shoulder_y], color=_RED) + + try: + # draw line from mid-shoulder to mid-hip + lh_x, lh_y = visible["left_hip"] + rh_x, rh_y = visible["right_hip"] + except KeyError: + pass + else: + mid_hip_x, mid_hip_y = (lh_x + rh_x) / 2, (lh_y + rh_y) / 2 + self.draw_line([mid_hip_x, mid_shoulder_x], [mid_hip_y, mid_shoulder_y], color=_RED) + return self.output + + """ + Primitive drawing functions: + """ + + def draw_text( + self, + text, + position, + *, + font_size=None, + color="g", + horizontal_alignment="center", + rotation=0, + ): + """ + Args: + text (str): class label + position (tuple): a tuple of the x and y coordinates to place text on image. + font_size (int, optional): font of the text. If not provided, a font size + proportional to the image width is calculated and used. + color: color of the text. Refer to `matplotlib.colors` for full list + of formats that are accepted. + horizontal_alignment (str): see `matplotlib.text.Text` + rotation: rotation angle in degrees CCW + + Returns: + output (VisImage): image object with text drawn. + """ + if not font_size: + font_size = self._default_font_size + + # since the text background is dark, we don't want the text to be dark + color = np.maximum(list(mplc.to_rgb(color)), 0.2) + color[np.argmax(color)] = max(0.8, np.max(color)) + + x, y = position + self.output.ax.text( + x, + y, + text, + size=font_size * self.output.scale, + family="sans-serif", + bbox={"facecolor": "black", "alpha": 0.8, "pad": 0.7, "edgecolor": "none"}, + verticalalignment="top", + horizontalalignment=horizontal_alignment, + color=color, + zorder=10, + rotation=rotation, + ) + return self.output + + def draw_box(self, box_coord, alpha=0.5, edge_color="g", line_style="-"): + """ + Args: + box_coord (tuple): a tuple containing x0, y0, x1, y1 coordinates, where x0 and y0 + are the coordinates of the image's top left corner. x1 and y1 are the + coordinates of the image's bottom right corner. + alpha (float): blending efficient. Smaller values lead to more transparent masks. + edge_color: color of the outline of the box. Refer to `matplotlib.colors` + for full list of formats that are accepted. + line_style (string): the string to use to create the outline of the boxes. + + Returns: + output (VisImage): image object with box drawn. + """ + x0, y0, x1, y1 = box_coord + width = x1 - x0 + height = y1 - y0 + + linewidth = max(self._default_font_size / 4, 1) + + self.output.ax.add_patch( + mpl.patches.Rectangle( + (x0, y0), + width, + height, + fill=False, + edgecolor=edge_color, + linewidth=linewidth * self.output.scale, + alpha=alpha, + linestyle=line_style, + ) + ) + return self.output + + def draw_rotated_box_with_label( + self, rotated_box, alpha=0.5, edge_color="g", line_style="-", label=None + ): + """ + Draw a rotated box with label on its top-left corner. + + Args: + rotated_box (tuple): a tuple containing (cnt_x, cnt_y, w, h, angle), + where cnt_x and cnt_y are the center coordinates of the box. + w and h are the width and height of the box. angle represents how + many degrees the box is rotated CCW with regard to the 0-degree box. + alpha (float): blending efficient. Smaller values lead to more transparent masks. + edge_color: color of the outline of the box. Refer to `matplotlib.colors` + for full list of formats that are accepted. + line_style (string): the string to use to create the outline of the boxes. + label (string): label for rotated box. It will not be rendered when set to None. + + Returns: + output (VisImage): image object with box drawn. + """ + cnt_x, cnt_y, w, h, angle = rotated_box + area = w * h + # use thinner lines when the box is small + linewidth = self._default_font_size / ( + 6 if area < _SMALL_OBJECT_AREA_THRESH * self.output.scale else 3 + ) + + theta = angle * math.pi / 180.0 + c = math.cos(theta) + s = math.sin(theta) + rect = [(-w / 2, h / 2), (-w / 2, -h / 2), (w / 2, -h / 2), (w / 2, h / 2)] + # x: left->right ; y: top->down + rotated_rect = [(s * yy + c * xx + cnt_x, c * yy - s * xx + cnt_y) for (xx, yy) in rect] + for k in range(4): + j = (k + 1) % 4 + self.draw_line( + [rotated_rect[k][0], rotated_rect[j][0]], + [rotated_rect[k][1], rotated_rect[j][1]], + color=edge_color, + linestyle="--" if k == 1 else line_style, + linewidth=linewidth, + ) + + if label is not None: + text_pos = rotated_rect[1] # topleft corner + + height_ratio = h / np.sqrt(self.output.height * self.output.width) + label_color = self._change_color_brightness(edge_color, brightness_factor=0.7) + font_size = ( + np.clip((height_ratio - 0.02) / 0.08 + 1, 1.2, 2) * 0.5 * self._default_font_size + ) + self.draw_text(label, text_pos, color=label_color, font_size=font_size, rotation=angle) + + return self.output + + def draw_circle(self, circle_coord, color, radius=3): + """ + Args: + circle_coord (list(int) or tuple(int)): contains the x and y coordinates + of the center of the circle. + color: color of the polygon. Refer to `matplotlib.colors` for a full list of + formats that are accepted. + radius (int): radius of the circle. + + Returns: + output (VisImage): image object with box drawn. + """ + x, y = circle_coord + self.output.ax.add_patch( + mpl.patches.Circle(circle_coord, radius=radius, fill=True, color=color) + ) + return self.output + + def draw_line(self, x_data, y_data, color, linestyle="-", linewidth=None): + """ + Args: + x_data (list[int]): a list containing x values of all the points being drawn. + Length of list should match the length of y_data. + y_data (list[int]): a list containing y values of all the points being drawn. + Length of list should match the length of x_data. + color: color of the line. Refer to `matplotlib.colors` for a full list of + formats that are accepted. + linestyle: style of the line. Refer to `matplotlib.lines.Line2D` + for a full list of formats that are accepted. + linewidth (float or None): width of the line. When it's None, + a default value will be computed and used. + + Returns: + output (VisImage): image object with line drawn. + """ + if linewidth is None: + linewidth = self._default_font_size / 3 + linewidth = max(linewidth, 1) + self.output.ax.add_line( + mpl.lines.Line2D( + x_data, + y_data, + linewidth=linewidth * self.output.scale, + color=color, + linestyle=linestyle, + ) + ) + return self.output + + def draw_binary_mask( + self, binary_mask, color=None, *, edge_color=None, text=None, alpha=0.5, area_threshold=10 + ): + """ + Args: + binary_mask (ndarray): numpy array of shape (H, W), where H is the image height and + W is the image width. Each value in the array is either a 0 or 1 value of uint8 + type. + color: color of the mask. Refer to `matplotlib.colors` for a full list of + formats that are accepted. If None, will pick a random color. + edge_color: color of the polygon edges. Refer to `matplotlib.colors` for a + full list of formats that are accepted. + text (str): if None, will be drawn on the object + alpha (float): blending efficient. Smaller values lead to more transparent masks. + area_threshold (float): a connected component smaller than this area will not be shown. + + Returns: + output (VisImage): image object with mask drawn. + """ + if color is None: + color = random_color(rgb=True, maximum=1) + color = mplc.to_rgb(color) + + has_valid_segment = False + binary_mask = binary_mask.astype("uint8") # opencv needs uint8 + mask = GenericMask(binary_mask, self.output.height, self.output.width) + shape2d = (binary_mask.shape[0], binary_mask.shape[1]) + + if not mask.has_holes: + # draw polygons for regular masks + for segment in mask.polygons: + area = mask_util.area(mask_util.frPyObjects([segment], shape2d[0], shape2d[1])) + if area < (area_threshold or 0): + continue + has_valid_segment = True + segment = segment.reshape(-1, 2) + self.draw_polygon(segment, color=color, edge_color=edge_color, alpha=alpha) + else: + # TODO: Use Path/PathPatch to draw vector graphics: + # https://stackoverflow.com/questions/8919719/how-to-plot-a-complex-polygon + rgba = np.zeros(shape2d + (4,), dtype="float32") + rgba[:, :, :3] = color + rgba[:, :, 3] = (mask.mask == 1).astype("float32") * alpha + has_valid_segment = True + self.output.ax.imshow(rgba, extent=(0, self.output.width, self.output.height, 0)) + + if text is not None and has_valid_segment: + lighter_color = self._change_color_brightness(color, brightness_factor=0.7) + self._draw_text_in_mask(binary_mask, text, lighter_color) + return self.output + + def draw_soft_mask(self, soft_mask, color=None, *, text=None, alpha=0.5): + """ + Args: + soft_mask (ndarray): float array of shape (H, W), each value in [0, 1]. + color: color of the mask. Refer to `matplotlib.colors` for a full list of + formats that are accepted. If None, will pick a random color. + text (str): if None, will be drawn on the object + alpha (float): blending efficient. Smaller values lead to more transparent masks. + + Returns: + output (VisImage): image object with mask drawn. + """ + if color is None: + color = random_color(rgb=True, maximum=1) + color = mplc.to_rgb(color) + + shape2d = (soft_mask.shape[0], soft_mask.shape[1]) + rgba = np.zeros(shape2d + (4,), dtype="float32") + rgba[:, :, :3] = color + rgba[:, :, 3] = soft_mask * alpha + self.output.ax.imshow(rgba, extent=(0, self.output.width, self.output.height, 0)) + + if text is not None: + lighter_color = self._change_color_brightness(color, brightness_factor=0.7) + binary_mask = (soft_mask > 0.5).astype("uint8") + self._draw_text_in_mask(binary_mask, text, lighter_color) + return self.output + + def draw_polygon(self, segment, color, edge_color=None, alpha=0.5): + """ + Args: + segment: numpy array of shape Nx2, containing all the points in the polygon. + color: color of the polygon. Refer to `matplotlib.colors` for a full list of + formats that are accepted. + edge_color: color of the polygon edges. Refer to `matplotlib.colors` for a + full list of formats that are accepted. If not provided, a darker shade + of the polygon color will be used instead. + alpha (float): blending efficient. Smaller values lead to more transparent masks. + + Returns: + output (VisImage): image object with polygon drawn. + """ + if edge_color is None: + # make edge color darker than the polygon color + if alpha > 0.8: + edge_color = self._change_color_brightness(color, brightness_factor=-0.7) + else: + edge_color = color + edge_color = mplc.to_rgb(edge_color) + (1,) + + polygon = mpl.patches.Polygon( + segment, + fill=True, + facecolor=mplc.to_rgb(color) + (alpha,), + edgecolor=edge_color, + linewidth=max(self._default_font_size // 15 * self.output.scale, 1), + ) + self.output.ax.add_patch(polygon) + return self.output + + """ + Internal methods: + """ + + def _jitter(self, color): + """ + Randomly modifies given color to produce a slightly different color than the color given. + + Args: + color (tuple[double]): a tuple of 3 elements, containing the RGB values of the color + picked. The values in the list are in the [0.0, 1.0] range. + + Returns: + jittered_color (tuple[double]): a tuple of 3 elements, containing the RGB values of the + color after being jittered. The values in the list are in the [0.0, 1.0] range. + """ + color = mplc.to_rgb(color) + vec = np.random.rand(3) + # better to do it in another color space + vec = vec / np.linalg.norm(vec) * 0.5 + res = np.clip(vec + color, 0, 1) + return tuple(res) + + def _create_grayscale_image(self, mask=None): + """ + Create a grayscale version of the original image. + The colors in masked area, if given, will be kept. + """ + img_bw = self.img.astype("f4").mean(axis=2) + img_bw = np.stack([img_bw] * 3, axis=2) + if mask is not None: + img_bw[mask] = self.img[mask] + return img_bw + + def _change_color_brightness(self, color, brightness_factor): + """ + Depending on the brightness_factor, gives a lighter or darker color i.e. a color with + less or more saturation than the original color. + + Args: + color: color of the polygon. Refer to `matplotlib.colors` for a full list of + formats that are accepted. + brightness_factor (float): a value in [-1.0, 1.0] range. A lightness factor of + 0 will correspond to no change, a factor in [-1.0, 0) range will result in + a darker color and a factor in (0, 1.0] range will result in a lighter color. + + Returns: + modified_color (tuple[double]): a tuple containing the RGB values of the + modified color. Each value in the tuple is in the [0.0, 1.0] range. + """ + assert brightness_factor >= -1.0 and brightness_factor <= 1.0 + color = mplc.to_rgb(color) + polygon_color = colorsys.rgb_to_hls(*mplc.to_rgb(color)) + modified_lightness = polygon_color[1] + (brightness_factor * polygon_color[1]) + modified_lightness = 0.0 if modified_lightness < 0.0 else modified_lightness + modified_lightness = 1.0 if modified_lightness > 1.0 else modified_lightness + modified_color = colorsys.hls_to_rgb(polygon_color[0], modified_lightness, polygon_color[2]) + return modified_color + + def _convert_boxes(self, boxes): + """ + Convert different format of boxes to an NxB array, where B = 4 or 5 is the box dimension. + """ + if isinstance(boxes, Boxes) or isinstance(boxes, RotatedBoxes): + return boxes.tensor.detach().numpy() + else: + return np.asarray(boxes) + + def _convert_masks(self, masks_or_polygons): + """ + Convert different format of masks or polygons to a tuple of masks and polygons. + + Returns: + list[GenericMask]: + """ + + m = masks_or_polygons + if isinstance(m, PolygonMasks): + m = m.polygons + if isinstance(m, BitMasks): + m = m.tensor.numpy() + if isinstance(m, torch.Tensor): + m = m.numpy() + ret = [] + for x in m: + if isinstance(x, GenericMask): + ret.append(x) + else: + ret.append(GenericMask(x, self.output.height, self.output.width)) + return ret + + def _draw_text_in_mask(self, binary_mask, text, color): + """ + Find proper places to draw text given a binary mask. + """ + # TODO sometimes drawn on wrong objects. the heuristics here can improve. + _num_cc, cc_labels, stats, centroids = cv2.connectedComponentsWithStats(binary_mask, 8) + if stats[1:, -1].size == 0: + return + largest_component_id = np.argmax(stats[1:, -1]) + 1 + + # draw text on the largest component, as well as other very large components. + for cid in range(1, _num_cc): + if cid == largest_component_id or stats[cid, -1] > _LARGE_MASK_AREA_THRESH: + # median is more stable than centroid + # center = centroids[largest_component_id] + center = np.median((cc_labels == cid).nonzero(), axis=1)[::-1] + self.draw_text(text, center, color=color) + + def _convert_keypoints(self, keypoints): + if isinstance(keypoints, Keypoints): + keypoints = keypoints.tensor + keypoints = np.asarray(keypoints) + return keypoints + + def get_output(self): + """ + Returns: + output (VisImage): the image output containing the visualizations added + to the image. + """ + return self.output diff --git a/motion-gan-pipeline/preprocessing/third/mmflow/flownet2CS/flownet2cs_8x1_sfine_flyingthings3d_subset_384x768.pth b/motion-gan-pipeline/preprocessing/third/mmflow/flownet2CS/flownet2cs_8x1_sfine_flyingthings3d_subset_384x768.pth new file mode 100644 index 0000000..cf64f7f --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/mmflow/flownet2CS/flownet2cs_8x1_sfine_flyingthings3d_subset_384x768.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f97c353079f97b7801b8b4c6984703f0f0788be5543b974863e52248b771118f +size 311523031 diff --git a/motion-gan-pipeline/preprocessing/third/mmflow/flownet2CS/flownet2cs_8x1_sfine_flyingthings3d_subset_384x768.py b/motion-gan-pipeline/preprocessing/third/mmflow/flownet2CS/flownet2cs_8x1_sfine_flyingthings3d_subset_384x768.py new file mode 100644 index 0000000..4433c09 --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/mmflow/flownet2CS/flownet2cs_8x1_sfine_flyingthings3d_subset_384x768.py @@ -0,0 +1,466 @@ +FlowNetC_checkpoint = 'work_dir/upload_ready/flownet/flownetc_8x1_sfine_flyingthings3d_subset_384x768.pth' +model = dict( + type='FlowNetCSS', + flownetC=dict( + freeze_net=True, + type='FlowNetC', + encoder=dict( + type='FlowNetEncoder', + in_channels=3, + pyramid_levels=['level1', 'level2', 'level3'], + out_channels=(64, 128, 256), + kernel_size=(7, 5, 5), + strides=(2, 2, 2), + num_convs=(1, 1, 1), + dilations=(1, 1, 1), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1)), + corr_level='level3', + corr_encoder=dict( + type='CorrEncoder', + in_channels=473, + pyramid_levels=['level3', 'level4', 'level5', 'level6'], + kernel_size=(3, 3, 3, 3), + num_convs=(1, 2, 2, 2), + out_channels=(256, 512, 512, 1024), + redir_in_channels=256, + redir_channels=32, + strides=(1, 2, 2, 2), + dilations=(1, 1, 1, 1), + corr_cfg=dict( + type='Correlation', + kernel_size=1, + max_displacement=10, + stride=1, + padding=0, + dilation_patch=2), + scaled=False, + conv_cfg=None, + norm_cfg=None, + act_cfg=dict(type='LeakyReLU', negative_slope=0.1)), + decoder=dict( + type='FlowNetCDecoder', + in_channels=dict( + level6=1024, level5=1026, level4=770, level3=386, level2=194), + out_channels=dict(level6=512, level5=256, level4=128, level3=64), + deconv_bias=True, + pred_bias=True, + upsample_bias=True, + norm_cfg=None, + act_cfg=dict(type='LeakyReLU', negative_slope=0.1)), + init_cfg=dict( + type='Pretrained', + checkpoint= + 'work_dir/upload_ready/flownet/flownetc_8x1_sfine_flyingthings3d_subset_384x768.pth' + ), + train_cfg=dict(), + test_cfg=dict()), + flownetS1=dict( + type='FlowNetS', + encoder=dict( + type='FlowNetEncoder', + in_channels=12, + pyramid_levels=[ + 'level1', 'level2', 'level3', 'level4', 'level5', 'level6' + ], + num_convs=(1, 1, 2, 2, 2, 2), + out_channels=(64, 128, 256, 512, 512, 1024), + kernel_size=(7, 5, (5, 3), 3, 3, 3), + strides=(2, 2, 2, 2, 2, 2), + dilations=(1, 1, 1, 1, 1, 1), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1)), + decoder=dict( + type='FlowNetSDecoder', + in_channels=dict( + level6=1024, level5=1026, level4=770, level3=386, level2=194), + out_channels=dict(level6=512, level5=256, level4=128, level3=64), + deconv_bias=True, + pred_bias=True, + upsample_bias=False, + act_cfg=dict(type='LeakyReLU', negative_slope=0.1), + flow_loss=dict( + type='MultiLevelEPE', + p=2, + reduction='sum', + weights=dict( + level2=0.005, + level3=0.01, + level4=0.02, + level5=0.08, + level6=0.32))), + init_cfg=[ + dict( + type='Kaiming', + layer=['Conv2d', 'ConvTranspose2d'], + a=0.1, + mode='fan_in', + nonlinearity='leaky_relu', + bias=0), + dict(type='Constant', layer='BatchNorm2d', val=1, bias=0) + ], + train_cfg=dict(), + test_cfg=dict()), + link_cfg=dict(scale_factor=4, mode='bilinear'), + out_level='level2') +test_dataset_type = 'Sintel' +test_data_root = 'data/Sintel' +img_norm_cfg = dict( + mean=[0.0, 0.0, 0.0], std=[255.0, 255.0, 255.0], to_rgb=False) +global_transform = dict( + translates=(0.05, 0.05), + zoom=(1.0, 1.5), + shear=(0.86, 1.16), + rotate=(-10.0, 10.0)) +relative_transform = dict( + translates=(0.00375, 0.00375), + zoom=(0.985, 1.015), + shear=(1.0, 1.0), + rotate=(-1.0, 1.0)) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict( + type='ColorJitter', + brightness=0.5, + contrast=0.5, + saturation=0.5, + hue=0.5), + dict(type='RandomGamma', gamma_range=(0.7, 1.5)), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='GaussianNoise', sigma_range=(0, 0.04), clamp_range=(0.0, 1.0)), + dict(type='RandomFlip', prob=0.5, direction='horizontal'), + dict(type='RandomFlip', prob=0.5, direction='vertical'), + dict( + type='RandomAffine', + global_transform=dict( + translates=(0.05, 0.05), + zoom=(1.0, 1.5), + shear=(0.86, 1.16), + rotate=(-10.0, 10.0)), + relative_transform=dict( + translates=(0.00375, 0.00375), + zoom=(0.985, 1.015), + shear=(1.0, 1.0), + rotate=(-1.0, 1.0))), + dict(type='RandomCrop', crop_size=(384, 768)), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['imgs', 'flow_gt'], + meta_keys=[ + 'img_fields', 'ann_fields', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'filename_flow', + 'ori_filename_flow', 'ori_shape', 'img_shape', 'img_norm_cfg' + ]) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', 'ori_filename1', + 'ori_filename2', 'ori_shape', 'img_shape', 'img_norm_cfg', + 'scale_factor', 'pad_shape' + ]) +] +flyingthings3d_subset_train = dict( + type='FlyingThings3DSubset', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict( + type='ColorJitter', + brightness=0.5, + contrast=0.5, + saturation=0.5, + hue=0.5), + dict(type='RandomGamma', gamma_range=(0.7, 1.5)), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict( + type='GaussianNoise', + sigma_range=(0, 0.04), + clamp_range=(0.0, 1.0)), + dict(type='RandomFlip', prob=0.5, direction='horizontal'), + dict(type='RandomFlip', prob=0.5, direction='vertical'), + dict( + type='RandomAffine', + global_transform=dict( + translates=(0.05, 0.05), + zoom=(1.0, 1.5), + shear=(0.86, 1.16), + rotate=(-10.0, 10.0)), + relative_transform=dict( + translates=(0.00375, 0.00375), + zoom=(0.985, 1.015), + shear=(1.0, 1.0), + rotate=(-1.0, 1.0))), + dict(type='RandomCrop', crop_size=(384, 768)), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['imgs', 'flow_gt'], + meta_keys=[ + 'img_fields', 'ann_fields', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'filename_flow', + 'ori_filename_flow', 'ori_shape', 'img_shape', 'img_norm_cfg' + ]) + ], + data_root='data/FlyingThings3D_subset', + test_mode=False, + direction='forward', + scene='left') +test_data_cleanpass = dict( + type='Sintel', + data_root='data/Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', 'ori_filename1', + 'ori_filename2', 'ori_shape', 'img_shape', 'img_norm_cfg', + 'scale_factor', 'pad_shape' + ]) + ], + test_mode=True, + pass_style='clean') +test_data_finalpass = dict( + type='Sintel', + data_root='data/Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', 'ori_filename1', + 'ori_filename2', 'ori_shape', 'img_shape', 'img_norm_cfg', + 'scale_factor', 'pad_shape' + ]) + ], + test_mode=True, + pass_style='final') +data = dict( + train_dataloader=dict( + samples_per_gpu=1, + workers_per_gpu=5, + drop_last=True, + persistent_workers=True), + val_dataloader=dict(samples_per_gpu=1, workers_per_gpu=5, shuffle=False), + test_dataloader=dict(samples_per_gpu=1, workers_per_gpu=5, shuffle=False), + train=dict( + type='FlyingThings3DSubset', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict( + type='ColorJitter', + brightness=0.5, + contrast=0.5, + saturation=0.5, + hue=0.5), + dict(type='RandomGamma', gamma_range=(0.7, 1.5)), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict( + type='GaussianNoise', + sigma_range=(0, 0.04), + clamp_range=(0.0, 1.0)), + dict(type='RandomFlip', prob=0.5, direction='horizontal'), + dict(type='RandomFlip', prob=0.5, direction='vertical'), + dict( + type='RandomAffine', + global_transform=dict( + translates=(0.05, 0.05), + zoom=(1.0, 1.5), + shear=(0.86, 1.16), + rotate=(-10.0, 10.0)), + relative_transform=dict( + translates=(0.00375, 0.00375), + zoom=(0.985, 1.015), + shear=(1.0, 1.0), + rotate=(-1.0, 1.0))), + dict(type='RandomCrop', crop_size=(384, 768)), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['imgs', 'flow_gt'], + meta_keys=[ + 'img_fields', 'ann_fields', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'filename_flow', + 'ori_filename_flow', 'ori_shape', 'img_shape', + 'img_norm_cfg' + ]) + ], + data_root='data/FlyingThings3D_subset', + test_mode=False, + direction='forward', + scene='left'), + val=dict( + type='ConcatDataset', + datasets=[ + dict( + type='Sintel', + data_root='data/Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'ori_shape', + 'img_shape', 'img_norm_cfg', 'scale_factor', + 'pad_shape' + ]) + ], + test_mode=True, + pass_style='clean'), + dict( + type='Sintel', + data_root='data/Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'ori_shape', + 'img_shape', 'img_norm_cfg', 'scale_factor', + 'pad_shape' + ]) + ], + test_mode=True, + pass_style='final') + ], + separate_eval=True), + test=dict( + type='ConcatDataset', + datasets=[ + dict( + type='Sintel', + data_root='data/Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'ori_shape', + 'img_shape', 'img_norm_cfg', 'scale_factor', + 'pad_shape' + ]) + ], + test_mode=True, + pass_style='clean'), + dict( + type='Sintel', + data_root='data/Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'ori_shape', + 'img_shape', 'img_norm_cfg', 'scale_factor', + 'pad_shape' + ]) + ], + test_mode=True, + pass_style='final') + ], + separate_eval=True)) +optimizer = dict( + type='Adam', lr=1e-05, weight_decay=0.0004, betas=(0.9, 0.999)) +optimizer_config = dict(grad_clip=None) +lr_config = dict( + policy='step', + by_epoch=False, + gamma=0.5, + step=[200000, 300000, 400000, 500000]) +runner = dict(type='IterBasedRunner', max_iters=600000) +checkpoint_config = dict(by_epoch=False, interval=50000) +evaluation = dict(interval=50000, metric='EPE') +log_config = dict( + interval=50, + hooks=[dict(type='TextLoggerHook'), + dict(type='TensorboardLoggerHook')]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +load_from = 'work_dir/cs_c2/latest.pth' +resume_from = None +workflow = [('train', 1)] +work_dir = 'work_dir/cs_t2' +gpu_ids = range(0, 1) diff --git a/motion-gan-pipeline/preprocessing/third/mmflow/pwcnet/pwcnet_ft_4x1_300k_sintel_final_384x768.pth b/motion-gan-pipeline/preprocessing/third/mmflow/pwcnet/pwcnet_ft_4x1_300k_sintel_final_384x768.pth new file mode 100644 index 0000000..1385a0e --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/mmflow/pwcnet/pwcnet_ft_4x1_300k_sintel_final_384x768.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:48b53f3900969f13bd522e98c7ac8990850daa8a59312f01ed41a4cc31c0422a +size 112646841 diff --git a/motion-gan-pipeline/preprocessing/third/mmflow/pwcnet/pwcnet_ft_4x1_300k_sintel_final_384x768.py b/motion-gan-pipeline/preprocessing/third/mmflow/pwcnet/pwcnet_ft_4x1_300k_sintel_final_384x768.py new file mode 100644 index 0000000..f0360fd --- /dev/null +++ b/motion-gan-pipeline/preprocessing/third/mmflow/pwcnet/pwcnet_ft_4x1_300k_sintel_final_384x768.py @@ -0,0 +1,409 @@ +dataset_type = 'Sintel' +data_root = 'data/Sintel' +img_norm_cfg = dict( + mean=[0.0, 0.0, 0.0], std=[255.0, 255.0, 255.0], to_rgb=False) +crop_size = (384, 768) +global_transform = dict( + translates=(0.05, 0.05), + zoom=(1.0, 1.5), + shear=(0.86, 1.16), + rotate=(-10.0, 10.0)) +relative_transform = dict( + translates=(0.00375, 0.00375), + zoom=(0.985, 1.015), + shear=(1.0, 1.0), + rotate=(-1.0, 1.0)) +train_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_occ=True), + dict( + type='ColorJitter', + brightness=0.5, + contrast=0.5, + saturation=0.5, + hue=0.5), + dict(type='RandomGamma', gamma_range=(0.7, 1.5)), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='RandomFlip', prob=0.5, direction='horizontal'), + dict(type='RandomFlip', prob=0.5, direction='vertical'), + dict( + type='RandomAffine', + global_transform=dict( + translates=(0.05, 0.05), + zoom=(1.0, 1.5), + shear=(0.86, 1.16), + rotate=(-10.0, 10.0)), + relative_transform=dict( + translates=(0.00375, 0.00375), + zoom=(0.985, 1.015), + shear=(1.0, 1.0), + rotate=(-1.0, 1.0))), + dict(type='RandomCrop', crop_size=(384, 768)), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['imgs', 'flow_gt'], + meta_keys=[ + 'img_fields', 'ann_fields', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'filename_flow', + 'ori_filename_flow', 'ori_shape', 'img_shape', 'img_norm_cfg' + ]) +] +test_pipeline = [ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', 'ori_filename1', + 'ori_filename2', 'ori_shape', 'img_shape', 'img_norm_cfg', + 'scale_factor', 'pad_shape' + ]) +] +sintel_final_train = dict( + type='Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_occ=True), + dict( + type='ColorJitter', + brightness=0.5, + contrast=0.5, + saturation=0.5, + hue=0.5), + dict(type='RandomGamma', gamma_range=(0.7, 1.5)), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='RandomFlip', prob=0.5, direction='horizontal'), + dict(type='RandomFlip', prob=0.5, direction='vertical'), + dict( + type='RandomAffine', + global_transform=dict( + translates=(0.05, 0.05), + zoom=(1.0, 1.5), + shear=(0.86, 1.16), + rotate=(-10.0, 10.0)), + relative_transform=dict( + translates=(0.00375, 0.00375), + zoom=(0.985, 1.015), + shear=(1.0, 1.0), + rotate=(-1.0, 1.0))), + dict(type='RandomCrop', crop_size=(384, 768)), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['imgs', 'flow_gt'], + meta_keys=[ + 'img_fields', 'ann_fields', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'filename_flow', + 'ori_filename_flow', 'ori_shape', 'img_shape', 'img_norm_cfg' + ]) + ], + data_root='data/Sintel', + test_mode=False, + pass_style='final') +sintel_clean_test = dict( + type='Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', 'ori_filename1', + 'ori_filename2', 'ori_shape', 'img_shape', 'img_norm_cfg', + 'scale_factor', 'pad_shape' + ]) + ], + data_root='data/Sintel', + test_mode=True, + pass_style='clean') +sintel_final_test = dict( + type='Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', 'ori_filename1', + 'ori_filename2', 'ori_shape', 'img_shape', 'img_norm_cfg', + 'scale_factor', 'pad_shape' + ]) + ], + data_root='data/Sintel', + test_mode=True, + pass_style='final') +data = dict( + train_dataloader=dict( + samples_per_gpu=1, + workers_per_gpu=5, + drop_last=True, + persistent_workers=True), + val_dataloader=dict( + samples_per_gpu=1, + workers_per_gpu=5, + shuffle=False, + persistent_workers=True), + test_dataloader=dict(samples_per_gpu=1, workers_per_gpu=5, shuffle=False), + train=[ + dict( + type='Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations', with_occ=True), + dict( + type='ColorJitter', + brightness=0.5, + contrast=0.5, + saturation=0.5, + hue=0.5), + dict(type='RandomGamma', gamma_range=(0.7, 1.5)), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='RandomFlip', prob=0.5, direction='horizontal'), + dict(type='RandomFlip', prob=0.5, direction='vertical'), + dict( + type='RandomAffine', + global_transform=dict( + translates=(0.05, 0.05), + zoom=(1.0, 1.5), + shear=(0.86, 1.16), + rotate=(-10.0, 10.0)), + relative_transform=dict( + translates=(0.00375, 0.00375), + zoom=(0.985, 1.015), + shear=(1.0, 1.0), + rotate=(-1.0, 1.0))), + dict(type='RandomCrop', crop_size=(384, 768)), + dict(type='DefaultFormatBundle'), + dict( + type='Collect', + keys=['imgs', 'flow_gt'], + meta_keys=[ + 'img_fields', 'ann_fields', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'filename_flow', + 'ori_filename_flow', 'ori_shape', 'img_shape', + 'img_norm_cfg' + ]) + ], + data_root='data/Sintel', + test_mode=False, + pass_style='final') + ], + val=dict( + type='ConcatDataset', + datasets=[ + dict( + type='Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'ori_shape', + 'img_shape', 'img_norm_cfg', 'scale_factor', + 'pad_shape' + ]) + ], + data_root='data/Sintel', + test_mode=True, + pass_style='clean'), + dict( + type='Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'ori_shape', + 'img_shape', 'img_norm_cfg', 'scale_factor', + 'pad_shape' + ]) + ], + data_root='data/Sintel', + test_mode=True, + pass_style='final') + ], + separate_eval=True), + test=dict( + type='ConcatDataset', + datasets=[ + dict( + type='Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'ori_shape', + 'img_shape', 'img_norm_cfg', 'scale_factor', + 'pad_shape' + ]) + ], + data_root='data/Sintel', + test_mode=True, + pass_style='clean'), + dict( + type='Sintel', + pipeline=[ + dict(type='LoadImageFromFile'), + dict(type='LoadAnnotations'), + dict(type='InputResize', exponent=6), + dict( + type='Normalize', + mean=[0.0, 0.0, 0.0], + std=[255.0, 255.0, 255.0], + to_rgb=False), + dict(type='TestFormatBundle'), + dict( + type='Collect', + keys=['imgs'], + meta_keys=[ + 'flow_gt', 'filename1', 'filename2', + 'ori_filename1', 'ori_filename2', 'ori_shape', + 'img_shape', 'img_norm_cfg', 'scale_factor', + 'pad_shape' + ]) + ], + data_root='data/Sintel', + test_mode=True, + pass_style='final') + ], + separate_eval=True)) +optimizer = dict( + type='Adam', lr=3e-05, weight_decay=0.0004, betas=(0.9, 0.999)) +optimizer_config = dict(grad_clip=None) +lr_config = dict( + policy='MultiStage', + by_epoch=False, + gammas=[0.5, 0.5], + milestone_lrs=[3e-05, 2e-05], + milestone_iters=[0, 150000], + steps=[[ + 45000, 65000, 85000, 95000, 97500, 100000, 110000, 120000, 130000, + 140000 + ], + [ + 195000, 215000, 235000, 245000, 247500, 250000, 260000, 270000, + 280000, 290000 + ]]) +runner = dict(type='IterBasedRunner', max_iters=300000) +checkpoint_config = dict(by_epoch=False, interval=50000) +evaluation = dict(interval=50000, metric='EPE') +log_config = dict( + interval=50, + hooks=[dict(type='TextLoggerHook'), + dict(type='TensorboardLoggerHook')]) +dist_params = dict(backend='nccl') +log_level = 'INFO' +load_from = 'https://download.openmmlab.com/mmflow/pwcnet/pwcnet_ft_4x1_300k_sintel_384x768.pth' +resume_from = None +workflow = [('train', 1)] +model = dict( + type='PWCNet', + encoder=dict( + type='PWCNetEncoder', + in_channels=3, + net_type='Basic', + pyramid_levels=[ + 'level1', 'level2', 'level3', 'level4', 'level5', 'level6' + ], + out_channels=(16, 32, 64, 96, 128, 196), + strides=(2, 2, 2, 2, 2, 2), + dilations=(1, 1, 1, 1, 1, 1), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1)), + decoder=dict( + type='PWCNetDecoder', + in_channels=dict( + level6=81, level5=213, level4=181, level3=149, level2=117), + flow_div=20.0, + corr_cfg=dict(type='Correlation', max_displacement=4, padding=0), + warp_cfg=dict(type='Warp', align_corners=True, use_mask=True), + act_cfg=dict(type='LeakyReLU', negative_slope=0.1), + scaled=False, + post_processor=dict(type='ContextNet', in_channels=565), + flow_loss=dict( + type='MultiLevelEPE', + p=1, + q=0.4, + eps=0.01, + reduction='sum', + weights=dict( + level2=0.005, + level3=0.01, + level4=0.02, + level5=0.08, + level6=0.32))), + train_cfg=dict(), + test_cfg=dict(), + init_cfg=dict( + type='Kaiming', + nonlinearity='leaky_relu', + layer=['Conv2d', 'ConvTranspose2d'], + mode='fan_in', + bias=0)) diff --git a/requirements.api.txt b/requirements.api.txt new file mode 100644 index 0000000..25810f0 --- /dev/null +++ b/requirements.api.txt @@ -0,0 +1,7 @@ +tinydb~=4.7.1 +python-multipart~=0.0.6 +httpx~=0.23.1 +chardet~=5.1.0 +fastapi~=0.95.0 + +git+https://github.com/mediatechnologycenter/api-utils.git@0.1.0 diff --git a/requirements.txt b/requirements.txt new file mode 100644 index 0000000..6125a92 --- /dev/null +++ b/requirements.txt @@ -0,0 +1,36 @@ +wavio +soundfile +opencv-python +scikit-image +tqdm +noisereduce +resampy +mediapipe +matplotlib +h5py +face_alignment +python_speech_features +moviepy +pydub +chumpy +av +pims +dominate +setuptools==59.5.0 +configargparse +wandb +pytorch-ignite +basicsr +facexlib +basicsr>=1.4.2 +facexlib>=0.2.5 +lmdb +pyyaml +scipy +tb-nightly +yapf +realesrgan +pytorch-fid +# tensorflow-gpu==1.15 +ninja +# git+https://github.com/facebookresearch/pytorch3d.git@stable \ No newline at end of file diff --git a/requirements_deepspeech.txt b/requirements_deepspeech.txt new file mode 100644 index 0000000..c7a94de --- /dev/null +++ b/requirements_deepspeech.txt @@ -0,0 +1,37 @@ +soundfile +librosa +wavio +opencv-python +scikit-image +tqdm +noisereduce +resampy +mediapipe +matplotlib +h5py +face_alignment +python_speech_features +moviepy +pydub +chumpy +av +pims +dominate +setuptools==59.5.0 +configargparse +wandb +pytorch-ignite +basicsr +facexlib +basicsr>=1.4.2 +facexlib>=0.2.5 +lmdb +pyyaml +scipy +tb-nightly +yapf +realesrgan +# pytorch-fid +tensorflow-gpu==1.15 +ninja +torchvision \ No newline at end of file